From 94d1758a877c17a2caf639be527688f7a24b6048 Mon Sep 17 00:00:00 2001 From: Ho-Eun Ryu Date: Mon, 4 Jan 2010 15:50:48 +0900 Subject: initial commit --- README | 1 + 1 file changed, 1 insertion(+) create mode 100644 README diff --git a/README b/README new file mode 100644 index 0000000..825359e --- /dev/null +++ b/README @@ -0,0 +1 @@ +intel moorestown mix library -- cgit v1.2.3 From bd8388b4555645b3d29abc6a94c303638064d69a Mon Sep 17 00:00:00 2001 From: "wonjong.lee" Date: Mon, 8 Feb 2010 11:30:37 +0900 Subject: start new branch (intel-cdk-moblin-libmix-20100129) from: Ho-Eun, Ryu --- mix_audio/AUTHORS | 1 + mix_audio/COPYING | 26 + mix_audio/ChangeLog | 112 + mix_audio/INSTALL | 4 + mix_audio/Makefile.am | 7 + mix_audio/NEWS | 1 + mix_audio/README | 2 + mix_audio/autogen.sh | 19 + mix_audio/configure.ac | 137 + mix_audio/docs/Makefile.am | 4 + mix_audio/docs/reference/Makefile.am | 4 + mix_audio/docs/reference/MixAudio/Makefile.am | 96 + .../docs/reference/MixAudio/MixAudio-docs.sgml | 39 + .../docs/reference/MixAudio/MixAudio-sections.txt | 187 + mix_audio/docs/reference/MixAudio/MixAudio.types | 6 + .../html/MixAudio-MixAudioConfigParams.html | 689 ++++ .../html/MixAudio-MixAudioConfigParamsAAC.html | 823 ++++ .../html/MixAudio-MixAudioConfigParamsMP3.html | 221 ++ .../html/MixAudio-MixAudioConfigParamsWMA.html | 391 ++ .../MixAudio/html/MixAudio-MixAudioInitParams.html | 139 + .../MixAudio/html/MixAudio-mixaudiotypes.html | 94 + .../docs/reference/MixAudio/html/MixAudio.devhelp | 124 + .../docs/reference/MixAudio/html/MixAudio.devhelp2 | 186 + .../docs/reference/MixAudio/html/MixAudio.html | 1286 ++++++ .../reference/MixAudio/html/api-index-full.html | 259 ++ mix_audio/docs/reference/MixAudio/html/ch01.html | 56 + mix_audio/docs/reference/MixAudio/html/index.html | 60 + mix_audio/docs/reference/MixAudio/html/index.sgml | 134 + mix_audio/docs/reference/MixAudio/html/style.css | 167 + .../reference/MixAudio/html/tree-hierarchy.html | 37 + mix_audio/m4/Makefile.am | 1 + mix_audio/m4/as-mix-version.m4 | 35 + mix_audio/mixaudio.spec | 56 + mix_audio/pkgconfig/Makefile.am | 11 + mix_audio/pkgconfig/mixaudio.pc.in | 12 + mix_audio/src/Makefile.am | 61 + mix_audio/src/amhelper.c | 120 + mix_audio/src/amhelper.h | 25 + mix_audio/src/intel_sst_ioctl.h | 337 ++ mix_audio/src/mixacp.c | 322 ++ mix_audio/src/mixacp.h | 367 ++ mix_audio/src/mixacpaac.c | 360 ++ mix_audio/src/mixacpaac.h | 413 ++ mix_audio/src/mixacpmp3.c | 175 + mix_audio/src/mixacpmp3.h | 170 + mix_audio/src/mixacpwma.c | 205 + mix_audio/src/mixacpwma.h | 235 ++ mix_audio/src/mixaip.c | 167 + mix_audio/src/mixaip.h | 132 + mix_audio/src/mixaudio.c | 2092 ++++++++++ mix_audio/src/mixaudio.h | 574 +++ mix_audio/src/mixaudiotypes.h | 27 + mix_audio/src/pvt.h | 9 + mix_audio/src/sst_proxy.c | 435 ++ mix_audio/src/sst_proxy.h | 17 + mix_audio/tests/Makefile.am | 2 + mix_audio/tests/smoke/Makefile.am | 25 + mix_audio/tests/smoke/mixaudiosmoke.c | 77 + mix_common/AUTHORS | 1 + mix_common/COPYING | 26 + mix_common/ChangeLog | 28 + mix_common/INSTALL | 291 ++ mix_common/Makefile.am | 10 + mix_common/NEWS | 1 + mix_common/README | 1 + mix_common/autogen.sh | 8 + mix_common/configure.ac | 39 + mix_common/m4/as-mix-version.m4 | 35 + mix_common/mixcommon.pc.in | 11 + mix_common/mixcommon.spec | 43 + mix_common/src/Makefile.am | 23 + mix_common/src/mixdrmparams.c | 163 + mix_common/src/mixdrmparams.h | 123 + mix_common/src/mixlog.c | 257 ++ mix_common/src/mixlog.h | 47 + mix_common/src/mixparams.c | 274 ++ mix_common/src/mixparams.h | 202 + mix_common/src/mixresult.h | 90 + mix_vbp/AUTHORS | 1 + mix_vbp/COPYING | 26 + mix_vbp/ChangeLog | 2 + mix_vbp/INSTALL | 4 + mix_vbp/Makefile.am | 9 + mix_vbp/Merge_readme.txt | 2 + mix_vbp/NEWS | 2 + mix_vbp/README | 2 + mix_vbp/autogen.sh | 19 + mix_vbp/configure.ac | 77 + mix_vbp/m4/Makefile.am | 1 + mix_vbp/m4/as-mix-version.m4 | 35 + mix_vbp/mixvbp.pc.in | 11 + mix_vbp/mixvbp.spec | 52 + mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 1034 +++++ .../viddec_fw/fw/codecs/h264/include/h264parse.h | 172 + .../fw/codecs/h264/include/h264parse_dpb.h | 107 + .../fw/codecs/h264/include/h264parse_sei.h | 314 ++ .../viddec_fw/fw/codecs/h264/parser/h264parse.c | 786 ++++ .../fw/codecs/h264/parser/h264parse_bsd.c | 228 ++ .../fw/codecs/h264/parser/h264parse_dpb.c | 4171 ++++++++++++++++++++ .../fw/codecs/h264/parser/h264parse_math.c | 82 + .../fw/codecs/h264/parser/h264parse_mem.c | 198 + .../fw/codecs/h264/parser/h264parse_pps.c | 128 + .../fw/codecs/h264/parser/h264parse_sei.c | 1176 ++++++ .../viddec_fw/fw/codecs/h264/parser/h264parse_sh.c | 740 ++++ .../fw/codecs/h264/parser/h264parse_sps.c | 513 +++ .../fw/codecs/h264/parser/mix_vbp_h264_stubs.c | 575 +++ .../fw/codecs/h264/parser/viddec_h264_parse.c | 559 +++ .../fw/codecs/h264/parser/viddec_h264_workload.c | 1306 ++++++ mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h | 195 + .../viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h | 231 ++ .../fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c | 32 + .../fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c | 114 + .../fw/codecs/mp2/parser/viddec_mpeg2_metadata.c | 1039 +++++ .../fw/codecs/mp2/parser/viddec_mpeg2_parse.c | 380 ++ .../fw/codecs/mp2/parser/viddec_mpeg2_workload.c | 461 +++ .../fw/codecs/mp4/include/viddec_fw_mp4.h | 231 ++ .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 371 ++ .../mp4/parser/viddec_mp4_decodevideoobjectplane.c | 98 + .../mp4/parser/viddec_mp4_decodevideoobjectplane.h | 11 + .../fw/codecs/mp4/parser/viddec_mp4_parse.c | 278 ++ .../fw/codecs/mp4/parser/viddec_mp4_parse.h | 527 +++ .../fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 134 + .../fw/codecs/mp4/parser/viddec_mp4_shortheader.h | 11 + .../mp4/parser/viddec_mp4_videoobjectlayer.c | 596 +++ .../mp4/parser/viddec_mp4_videoobjectlayer.h | 17 + .../mp4/parser/viddec_mp4_videoobjectplane.c | 422 ++ .../mp4/parser/viddec_mp4_videoobjectplane.h | 11 + .../fw/codecs/mp4/parser/viddec_mp4_visualobject.c | 287 ++ .../fw/codecs/mp4/parser/viddec_mp4_visualobject.h | 13 + .../fw/codecs/mp4/parser/viddec_parse_sc_mp4.c | 143 + .../viddec_fw/fw/codecs/vc1/include/vc1common.h | 111 + .../fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c | 16 + mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h | 224 ++ mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c | 557 +++ mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h | 136 + .../fw/codecs/vc1/parser/vc1parse_bitplane.c | 753 ++++ .../viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c | 100 + .../fw/codecs/vc1/parser/vc1parse_bpic_adv.c | 257 ++ .../fw/codecs/vc1/parser/vc1parse_common_defs.h | 608 +++ .../fw/codecs/vc1/parser/vc1parse_common_tables.c | 198 + .../fw/codecs/vc1/parser/vc1parse_huffman.c | 97 + .../viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c | 101 + .../fw/codecs/vc1/parser/vc1parse_ipic_adv.c | 257 ++ .../fw/codecs/vc1/parser/vc1parse_mv_com.c | 82 + .../fw/codecs/vc1/parser/vc1parse_pic_com.c | 101 + .../fw/codecs/vc1/parser/vc1parse_pic_com_adv.c | 403 ++ .../viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c | 149 + .../fw/codecs/vc1/parser/vc1parse_ppic_adv.c | 368 ++ .../fw/codecs/vc1/parser/vc1parse_vopdq.c | 130 + .../fw/codecs/vc1/parser/viddec_vc1_parse.c | 345 ++ .../fw/codecs/vc1/parser/viddec_vc1_workload.c | 691 ++++ mix_vbp/viddec_fw/fw/include/stdint.h | 23 + mix_vbp/viddec_fw/fw/include/viddec_debug.h | 31 + mix_vbp/viddec_fw/fw/include/viddec_fw_version.h | 7 + mix_vbp/viddec_fw/fw/parser/Makefile.am | 205 + mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c | 224 ++ mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h | 114 + .../viddec_fw/fw/parser/include/ipc_fw_custom.h | 87 + .../viddec_fw/fw/parser/include/viddec_emitter.h | 96 + .../viddec_fw/fw/parser/include/viddec_fw_debug.h | 80 + .../fw/parser/include/viddec_fw_parser_fw_ipc.h | 194 + .../fw/parser/include/viddec_h264_parse.h | 6 + .../viddec_fw/fw/parser/include/viddec_mp4_parse.h | 6 + .../fw/parser/include/viddec_mpeg2_parse.h | 6 + .../fw/parser/include/viddec_parser_ops.h | 106 + mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h | 95 + .../viddec_fw/fw/parser/include/viddec_pm_parse.h | 24 + .../viddec_fw/fw/parser/include/viddec_pm_tags.h | 17 + .../fw/parser/include/viddec_pm_utils_bstream.h | 81 + .../fw/parser/include/viddec_pm_utils_list.h | 51 + .../viddec_fw/fw/parser/include/viddec_vc1_parse.h | 6 + mix_vbp/viddec_fw/fw/parser/main.c | 608 +++ mix_vbp/viddec_fw/fw/parser/utils.c | 253 ++ mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 1568 ++++++++ mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h | 48 + mix_vbp/viddec_fw/fw/parser/vbp_loader.c | 162 + mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 318 ++ mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 1277 ++++++ mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h | 49 + mix_vbp/viddec_fw/fw/parser/vbp_trace.c | 28 + mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 47 + mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 548 +++ mix_vbp/viddec_fw/fw/parser/vbp_utils.h | 106 + mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 1029 +++++ mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h | 54 + mix_vbp/viddec_fw/fw/parser/viddec_emit.c | 78 + mix_vbp/viddec_fw/fw/parser/viddec_intr.c | 56 + mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c | 119 + .../fw/parser/viddec_parse_sc_fast_loop.c | 190 + mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c | 6 + mix_vbp/viddec_fw/fw/parser/viddec_pm.c | 554 +++ mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c | 127 + mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c | 178 + mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c | 21 + mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c | 304 ++ .../viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 472 +++ mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c | 221 ++ mix_vbp/viddec_fw/include/viddec_fw_common_defs.h | 200 + mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h | 242 ++ mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h | 281 ++ mix_vbp/viddec_fw/include/viddec_fw_item_types.h | 738 ++++ mix_vbp/viddec_fw/include/viddec_fw_parser_host.h | 237 ++ mix_vbp/viddec_fw/include/viddec_fw_workload.h | 152 + mix_video/AUTHORS | 1 + mix_video/COPYING | 26 + mix_video/ChangeLog | 2 + mix_video/INSTALL | 4 + mix_video/Makefile.am | 9 + mix_video/NEWS | 2 + mix_video/README | 2 + mix_video/autogen.sh | 19 + mix_video/configure.ac | 137 + mix_video/m4/Makefile.am | 1 + mix_video/m4/as-mix-version.m4 | 35 + mix_video/mixvideo.pc.in | 12 + mix_video/mixvideo.spec | 67 + mix_video/mixvideoint.pc.in | 12 + mix_video/src/Makefile.am | 136 + mix_video/src/Makefile.old | 40 + mix_video/src/mixbuffer.c | 220 ++ mix_video/src/mixbuffer.h | 130 + mix_video/src/mixbuffer_private.h | 39 + mix_video/src/mixbufferpool.c | 484 +++ mix_video/src/mixbufferpool.h | 150 + mix_video/src/mixdisplay.c | 539 +++ mix_video/src/mixdisplay.h | 233 ++ mix_video/src/mixdisplayx11.c | 205 + mix_video/src/mixdisplayx11.h | 141 + mix_video/src/mixdrmparams.c | 189 + mix_video/src/mixdrmparams.h | 126 + mix_video/src/mixframemanager.c | 775 ++++ mix_video/src/mixframemanager.h | 164 + mix_video/src/mixsurfacepool.c | 652 +++ mix_video/src/mixsurfacepool.h | 158 + mix_video/src/mixvideo.c | 1638 ++++++++ mix_video/src/mixvideo.h | 208 + mix_video/src/mixvideo_private.h | 57 + mix_video/src/mixvideocaps.c | 261 ++ mix_video/src/mixvideocaps.h | 137 + mix_video/src/mixvideoconfigparams.c | 157 + mix_video/src/mixvideoconfigparams.h | 126 + mix_video/src/mixvideoconfigparamsdec.c | 534 +++ mix_video/src/mixvideoconfigparamsdec.h | 195 + mix_video/src/mixvideoconfigparamsdec_h264.c | 213 + mix_video/src/mixvideoconfigparamsdec_h264.h | 130 + mix_video/src/mixvideoconfigparamsdec_mp42.c | 244 ++ mix_video/src/mixvideoconfigparamsdec_mp42.h | 141 + mix_video/src/mixvideoconfigparamsdec_vc1.c | 188 + mix_video/src/mixvideoconfigparamsdec_vc1.h | 134 + mix_video/src/mixvideoconfigparamsenc.c | 688 ++++ mix_video/src/mixvideoconfigparamsenc.h | 254 ++ mix_video/src/mixvideoconfigparamsenc_h264.c | 322 ++ mix_video/src/mixvideoconfigparamsenc_h264.h | 160 + mix_video/src/mixvideoconfigparamsenc_mpeg4.c | 300 ++ mix_video/src/mixvideoconfigparamsenc_mpeg4.h | 152 + mix_video/src/mixvideoconfigparamsenc_preview.c | 222 ++ mix_video/src/mixvideoconfigparamsenc_preview.h | 124 + mix_video/src/mixvideodecodeparams.c | 204 + mix_video/src/mixvideodecodeparams.h | 139 + mix_video/src/mixvideodef.h | 114 + mix_video/src/mixvideoencodeparams.c | 204 + mix_video/src/mixvideoencodeparams.h | 140 + mix_video/src/mixvideoformat.c | 401 ++ mix_video/src/mixvideoformat.h | 160 + mix_video/src/mixvideoformat_h264.c | 1663 ++++++++ mix_video/src/mixvideoformat_h264.h | 129 + mix_video/src/mixvideoformat_mp42.c | 1416 +++++++ mix_video/src/mixvideoformat_mp42.h | 117 + mix_video/src/mixvideoformat_vc1.c | 1749 ++++++++ mix_video/src/mixvideoformat_vc1.h | 123 + mix_video/src/mixvideoformatenc.c | 502 +++ mix_video/src/mixvideoformatenc.h | 178 + mix_video/src/mixvideoformatenc_h264.c | 1954 +++++++++ mix_video/src/mixvideoformatenc_h264.h | 137 + mix_video/src/mixvideoformatenc_mpeg4.c | 1713 ++++++++ mix_video/src/mixvideoformatenc_mpeg4.h | 137 + mix_video/src/mixvideoformatenc_preview.c | 1187 ++++++ mix_video/src/mixvideoformatenc_preview.h | 133 + mix_video/src/mixvideoformatqueue.h | 24 + mix_video/src/mixvideoframe.c | 391 ++ mix_video/src/mixvideoframe.h | 144 + mix_video/src/mixvideoframe_private.h | 68 + mix_video/src/mixvideoinitparams.c | 219 + mix_video/src/mixvideoinitparams.h | 138 + mix_video/src/mixvideolog.h | 25 + mix_video/src/mixvideorenderparams.c | 420 ++ mix_video/src/mixvideorenderparams.h | 158 + mix_video/src/mixvideorenderparams_internal.h | 36 + mix_video/src/test.c | 87 + mix_video/test/Makefile.am | 2 + mix_video/test/autogen.sh | 1 + mix_video/test/configure.ac | 53 + mix_video/test/src/Makefile.am | 22 + mix_video/test/src/test_framemanager.c | 200 + 294 files changed, 74577 insertions(+) create mode 100644 mix_audio/AUTHORS create mode 100644 mix_audio/COPYING create mode 100644 mix_audio/ChangeLog create mode 100644 mix_audio/INSTALL create mode 100644 mix_audio/Makefile.am create mode 100644 mix_audio/NEWS create mode 100644 mix_audio/README create mode 100644 mix_audio/autogen.sh create mode 100644 mix_audio/configure.ac create mode 100644 mix_audio/docs/Makefile.am create mode 100644 mix_audio/docs/reference/Makefile.am create mode 100644 mix_audio/docs/reference/MixAudio/Makefile.am create mode 100644 mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml create mode 100644 mix_audio/docs/reference/MixAudio/MixAudio-sections.txt create mode 100644 mix_audio/docs/reference/MixAudio/MixAudio.types create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 create mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio.html create mode 100644 mix_audio/docs/reference/MixAudio/html/api-index-full.html create mode 100644 mix_audio/docs/reference/MixAudio/html/ch01.html create mode 100644 mix_audio/docs/reference/MixAudio/html/index.html create mode 100644 mix_audio/docs/reference/MixAudio/html/index.sgml create mode 100644 mix_audio/docs/reference/MixAudio/html/style.css create mode 100644 mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html create mode 100644 mix_audio/m4/Makefile.am create mode 100644 mix_audio/m4/as-mix-version.m4 create mode 100644 mix_audio/mixaudio.spec create mode 100644 mix_audio/pkgconfig/Makefile.am create mode 100644 mix_audio/pkgconfig/mixaudio.pc.in create mode 100644 mix_audio/src/Makefile.am create mode 100644 mix_audio/src/amhelper.c create mode 100644 mix_audio/src/amhelper.h create mode 100644 mix_audio/src/intel_sst_ioctl.h create mode 100644 mix_audio/src/mixacp.c create mode 100644 mix_audio/src/mixacp.h create mode 100644 mix_audio/src/mixacpaac.c create mode 100644 mix_audio/src/mixacpaac.h create mode 100644 mix_audio/src/mixacpmp3.c create mode 100644 mix_audio/src/mixacpmp3.h create mode 100644 mix_audio/src/mixacpwma.c create mode 100644 mix_audio/src/mixacpwma.h create mode 100644 mix_audio/src/mixaip.c create mode 100644 mix_audio/src/mixaip.h create mode 100644 mix_audio/src/mixaudio.c create mode 100644 mix_audio/src/mixaudio.h create mode 100644 mix_audio/src/mixaudiotypes.h create mode 100644 mix_audio/src/pvt.h create mode 100644 mix_audio/src/sst_proxy.c create mode 100644 mix_audio/src/sst_proxy.h create mode 100644 mix_audio/tests/Makefile.am create mode 100644 mix_audio/tests/smoke/Makefile.am create mode 100644 mix_audio/tests/smoke/mixaudiosmoke.c create mode 100644 mix_common/AUTHORS create mode 100644 mix_common/COPYING create mode 100644 mix_common/ChangeLog create mode 100644 mix_common/INSTALL create mode 100644 mix_common/Makefile.am create mode 100644 mix_common/NEWS create mode 100644 mix_common/README create mode 100644 mix_common/autogen.sh create mode 100644 mix_common/configure.ac create mode 100644 mix_common/m4/as-mix-version.m4 create mode 100644 mix_common/mixcommon.pc.in create mode 100644 mix_common/mixcommon.spec create mode 100644 mix_common/src/Makefile.am create mode 100644 mix_common/src/mixdrmparams.c create mode 100644 mix_common/src/mixdrmparams.h create mode 100644 mix_common/src/mixlog.c create mode 100644 mix_common/src/mixlog.h create mode 100644 mix_common/src/mixparams.c create mode 100644 mix_common/src/mixparams.h create mode 100644 mix_common/src/mixresult.h create mode 100644 mix_vbp/AUTHORS create mode 100644 mix_vbp/COPYING create mode 100644 mix_vbp/ChangeLog create mode 100644 mix_vbp/INSTALL create mode 100644 mix_vbp/Makefile.am create mode 100644 mix_vbp/Merge_readme.txt create mode 100644 mix_vbp/NEWS create mode 100644 mix_vbp/README create mode 100644 mix_vbp/autogen.sh create mode 100644 mix_vbp/configure.ac create mode 100644 mix_vbp/m4/Makefile.am create mode 100644 mix_vbp/m4/as-mix-version.m4 create mode 100644 mix_vbp/mixvbp.pc.in create mode 100644 mix_vbp/mixvbp.spec create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c create mode 100644 mix_vbp/viddec_fw/fw/include/stdint.h create mode 100644 mix_vbp/viddec_fw/fw/include/viddec_debug.h create mode 100644 mix_vbp/viddec_fw/fw/include/viddec_fw_version.h create mode 100644 mix_vbp/viddec_fw/fw/parser/Makefile.am create mode 100644 mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c create mode 100644 mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h create mode 100644 mix_vbp/viddec_fw/fw/parser/main.c create mode 100644 mix_vbp/viddec_fw/fw/parser/utils.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_loader.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_loader.h create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_trace.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_trace.h create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_utils.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_utils.h create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_emit.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_intr.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_pm.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c create mode 100644 mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c create mode 100644 mix_vbp/viddec_fw/include/viddec_fw_common_defs.h create mode 100644 mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h create mode 100644 mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h create mode 100644 mix_vbp/viddec_fw/include/viddec_fw_item_types.h create mode 100644 mix_vbp/viddec_fw/include/viddec_fw_parser_host.h create mode 100644 mix_vbp/viddec_fw/include/viddec_fw_workload.h create mode 100644 mix_video/AUTHORS create mode 100644 mix_video/COPYING create mode 100644 mix_video/ChangeLog create mode 100644 mix_video/INSTALL create mode 100644 mix_video/Makefile.am create mode 100644 mix_video/NEWS create mode 100644 mix_video/README create mode 100644 mix_video/autogen.sh create mode 100644 mix_video/configure.ac create mode 100644 mix_video/m4/Makefile.am create mode 100644 mix_video/m4/as-mix-version.m4 create mode 100644 mix_video/mixvideo.pc.in create mode 100644 mix_video/mixvideo.spec create mode 100644 mix_video/mixvideoint.pc.in create mode 100644 mix_video/src/Makefile.am create mode 100644 mix_video/src/Makefile.old create mode 100644 mix_video/src/mixbuffer.c create mode 100644 mix_video/src/mixbuffer.h create mode 100644 mix_video/src/mixbuffer_private.h create mode 100644 mix_video/src/mixbufferpool.c create mode 100644 mix_video/src/mixbufferpool.h create mode 100644 mix_video/src/mixdisplay.c create mode 100644 mix_video/src/mixdisplay.h create mode 100644 mix_video/src/mixdisplayx11.c create mode 100644 mix_video/src/mixdisplayx11.h create mode 100644 mix_video/src/mixdrmparams.c create mode 100644 mix_video/src/mixdrmparams.h create mode 100644 mix_video/src/mixframemanager.c create mode 100644 mix_video/src/mixframemanager.h create mode 100644 mix_video/src/mixsurfacepool.c create mode 100644 mix_video/src/mixsurfacepool.h create mode 100644 mix_video/src/mixvideo.c create mode 100644 mix_video/src/mixvideo.h create mode 100644 mix_video/src/mixvideo_private.h create mode 100644 mix_video/src/mixvideocaps.c create mode 100644 mix_video/src/mixvideocaps.h create mode 100644 mix_video/src/mixvideoconfigparams.c create mode 100644 mix_video/src/mixvideoconfigparams.h create mode 100644 mix_video/src/mixvideoconfigparamsdec.c create mode 100644 mix_video/src/mixvideoconfigparamsdec.h create mode 100644 mix_video/src/mixvideoconfigparamsdec_h264.c create mode 100644 mix_video/src/mixvideoconfigparamsdec_h264.h create mode 100644 mix_video/src/mixvideoconfigparamsdec_mp42.c create mode 100644 mix_video/src/mixvideoconfigparamsdec_mp42.h create mode 100644 mix_video/src/mixvideoconfigparamsdec_vc1.c create mode 100644 mix_video/src/mixvideoconfigparamsdec_vc1.h create mode 100644 mix_video/src/mixvideoconfigparamsenc.c create mode 100644 mix_video/src/mixvideoconfigparamsenc.h create mode 100644 mix_video/src/mixvideoconfigparamsenc_h264.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_h264.h create mode 100644 mix_video/src/mixvideoconfigparamsenc_mpeg4.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_mpeg4.h create mode 100644 mix_video/src/mixvideoconfigparamsenc_preview.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_preview.h create mode 100644 mix_video/src/mixvideodecodeparams.c create mode 100644 mix_video/src/mixvideodecodeparams.h create mode 100644 mix_video/src/mixvideodef.h create mode 100644 mix_video/src/mixvideoencodeparams.c create mode 100644 mix_video/src/mixvideoencodeparams.h create mode 100644 mix_video/src/mixvideoformat.c create mode 100644 mix_video/src/mixvideoformat.h create mode 100644 mix_video/src/mixvideoformat_h264.c create mode 100644 mix_video/src/mixvideoformat_h264.h create mode 100644 mix_video/src/mixvideoformat_mp42.c create mode 100644 mix_video/src/mixvideoformat_mp42.h create mode 100644 mix_video/src/mixvideoformat_vc1.c create mode 100644 mix_video/src/mixvideoformat_vc1.h create mode 100644 mix_video/src/mixvideoformatenc.c create mode 100644 mix_video/src/mixvideoformatenc.h create mode 100644 mix_video/src/mixvideoformatenc_h264.c create mode 100644 mix_video/src/mixvideoformatenc_h264.h create mode 100644 mix_video/src/mixvideoformatenc_mpeg4.c create mode 100644 mix_video/src/mixvideoformatenc_mpeg4.h create mode 100644 mix_video/src/mixvideoformatenc_preview.c create mode 100644 mix_video/src/mixvideoformatenc_preview.h create mode 100644 mix_video/src/mixvideoformatqueue.h create mode 100644 mix_video/src/mixvideoframe.c create mode 100644 mix_video/src/mixvideoframe.h create mode 100644 mix_video/src/mixvideoframe_private.h create mode 100644 mix_video/src/mixvideoinitparams.c create mode 100644 mix_video/src/mixvideoinitparams.h create mode 100644 mix_video/src/mixvideolog.h create mode 100644 mix_video/src/mixvideorenderparams.c create mode 100644 mix_video/src/mixvideorenderparams.h create mode 100644 mix_video/src/mixvideorenderparams_internal.h create mode 100644 mix_video/src/test.c create mode 100644 mix_video/test/Makefile.am create mode 100644 mix_video/test/autogen.sh create mode 100644 mix_video/test/configure.ac create mode 100644 mix_video/test/src/Makefile.am create mode 100644 mix_video/test/src/test_framemanager.c diff --git a/mix_audio/AUTHORS b/mix_audio/AUTHORS new file mode 100644 index 0000000..d74d027 --- /dev/null +++ b/mix_audio/AUTHORS @@ -0,0 +1 @@ +echo.choi@intel.com diff --git a/mix_audio/COPYING b/mix_audio/COPYING new file mode 100644 index 0000000..a4f852c --- /dev/null +++ b/mix_audio/COPYING @@ -0,0 +1,26 @@ +INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License) + +IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING. +Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software. + + +LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions: +1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software. +2. You may not reverse engineer, decompile, or disassemble the Software. +3. You may not sublicense the Software. +4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions. +5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL). +OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights. +EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software. +LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS. +TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate. +APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations. +GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052. +CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos. +ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion. +ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel. +NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties. +SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions. +WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself. +CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions. + diff --git a/mix_audio/ChangeLog b/mix_audio/ChangeLog new file mode 100644 index 0000000..3eb86f3 --- /dev/null +++ b/mix_audio/ChangeLog @@ -0,0 +1,112 @@ +2010-01-18 Echo Choi + + * Updated version to 0.3.5 and submit for build. + * Updated call to Audio Manager to use stream name. + * Removed the check to allow decode to be called during PAUSE. + +2010-01-11 Echo Choi + + * Updated version to 0.3.4 + * Updated MixCommon dependency to v 0.1.6. + * Updated the parameter conversion code for AAC to detect codec value from parameters. + * Fixed and added more enum types for AAC parameters definitions. + * Added methods to replace AAC parameters direct accessing. + * Added psPresentFlag for AAC param object. + * Updated gtk-doc documentation. + * Added get_stream_byte_decoded API. + +2010-01-04 Echo Choi + + * Fixed code review issues: declare const for char* + * Fixed code review issues: array size calculation. + +2009-12-23 Echo Choi + + * Added aac core operating frequency param for AAC Param object. Needed to configure HE-AAC decoder. + * Fixed the log message category for DRAIN debug log. + +2009-11-19 Echo Choi + + * Added more utility function to populate param object. + * Added MixAudio API to read output configuration (get params) + +2009-11-18 Echo Choi + + * Added return code that inform caller to interpret errno for error. + * Fixed more error checkings. + +2009-11-17 Echo Choi + + * Added default invalid value for various enumerations. + * Fixed some bugs in type declarations. + * Cleaned up code. Added pointer checks, state checks. + +2009-11-15 Echo Choi + + * Updated version to 0.3.3 and package for build. + * Fixed DRAIN state test condition. + +2009-11-13 Echo Choi + + * Updated MixCommon version dependency as MixAudio is using new definitions from MixCommon. + * Fixed issues reported by klocwork. + +2009-11-11 Echo Choi + + * Fixed a mem leak in the stub code. + +2009-11-01 Echo Choi + + * Increased version number to 0.3.2 and package for build. + +2009-10-28 Echo Choi + + * Renamed MPEG_FORMAT member of AAC params to MPEG_ID. + +2009-10-23 Echo Choi + + * Updated version to 0.3.1 for build. + * Added code to assign op_align to sst structure in deterministic case. + * Added stub code to write input bytes to file during dnr request. + * Fixed MixAudio::decode() method to use correct 64-bit type for decode consumed/produced. + +2009-10-18 Echo Choi + + * Added dbus-glib dependency. + * Updated AAC param object to include additonal fields for HE-AAC support. + +2009-10-16 Echo Choi + + * Moved mixdrmparams.* to MixCommon package. + * Changed mix_audio_decode API to include output parameters for bytes consumed and produceds + * Updated version to 0.3.0 to reflect API change in mix_audio_decode. + +2009-10-08 Echo Choi + + * Package for 0.2.6 build. + +2009-10-02 Echo Choi + + * Updated version number to 0.2.6 + * Defined new range for error code that encapsulate errno when system calls to SST API shall fail. + * Added internal states to track PAUSED_DRAINING, and added code to deal with this state. + +2009-08-17 Echo Choi + + * Updated SST API struct to align with build 0.04.008. + * Added bit-mask based runtime log mechanism. + +2009-08-14 Echo Choi + + * Fixed return value check after DROP call. + * Added method to dump status upon SST call failure. + +2009-08-13 Echo Choi + + * Updated API definitions to sync with v0.5 documentation. + +2009-08-10 Echo Choi + + * Fixed stop_drop so it is called even if the state is STOPPED + + diff --git a/mix_audio/INSTALL b/mix_audio/INSTALL new file mode 100644 index 0000000..50e1648 --- /dev/null +++ b/mix_audio/INSTALL @@ -0,0 +1,4 @@ +run the following to build and install: +./autogen.sh +./configure +make diff --git a/mix_audio/Makefile.am b/mix_audio/Makefile.am new file mode 100644 index 0000000..2ed4bcd --- /dev/null +++ b/mix_audio/Makefile.am @@ -0,0 +1,7 @@ +SUBDIRS = src tests pkgconfig + +#Uncomment the following line if building documentation using gtkdoc +#SUBDIRS += docs + +EXTRA_DIST = autogen.sh m4 +DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc diff --git a/mix_audio/NEWS b/mix_audio/NEWS new file mode 100644 index 0000000..82302b4 --- /dev/null +++ b/mix_audio/NEWS @@ -0,0 +1 @@ +no. diff --git a/mix_audio/README b/mix_audio/README new file mode 100644 index 0000000..b4292a0 --- /dev/null +++ b/mix_audio/README @@ -0,0 +1,2 @@ +MIX Audio is an user library interface for various hardware audio codecs available on the platform. + diff --git a/mix_audio/autogen.sh b/mix_audio/autogen.sh new file mode 100644 index 0000000..13a1d76 --- /dev/null +++ b/mix_audio/autogen.sh @@ -0,0 +1,19 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + +package=MixAudio + +#Uncomment the follow line if building documentation using gtkdoc +#gtkdocize --flavour no-tmpl || exit 1 +aclocal -I m4/ $ACLOCAL_FLAGS || exit 1 +libtoolize --copy --force || exit 1 +autoheader -v || exit 1 +autoconf -v || exit 1 +automake -a -c -v || exit 1 + +echo "Now type ./configure to configure $package." +exit 0 diff --git a/mix_audio/configure.ac b/mix_audio/configure.ac new file mode 100644 index 0000000..bcbb4ab --- /dev/null +++ b/mix_audio/configure.ac @@ -0,0 +1,137 @@ +AC_INIT("","",[echo.choi@intel.com]) + +AC_CONFIG_MACRO_DIR(m4) + +AS_MIX_VERSION(mixaudio, MIXAUDIO, 0, 3, 5) + +dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode +AM_MAINTAINER_MODE + +AM_INIT_AUTOMAKE($PACKAGE, $VERSION) + +MIXAUDIO_PKG_DEPS="glib-2.0, gobject-2.0, gthread-2.0 mixcommon" +AC_SUBST(MIXAUDIO_PKG_DEPS) + +dnl make aclocal work in maintainer mode +AC_SUBST(ACLOCAL_AMFLAGS, "-I m4") + +AM_CONFIG_HEADER(config.h) + +dnl check for tools +AC_PROG_CC_C_O +AC_PROG_CC +AC_PROG_LIBTOOL + +MIX_CFLAGS="-Wall -Werror -O" + +AC_ARG_ENABLE([lpestub], + [ --enable-lpestub Stub LPE methods], + [case "${enableval}" in + yes) lpestub=true ;; + no) lpestub=false ;; + *) AC_MSG_ERROR([bad value ${enableval} for --enable-lpestub]) ;; + esac],[lpestub=false]) + +AM_CONDITIONAL([LPESTUB], [test x$lpestub = xtrue]) + +AC_ARG_ENABLE([workaround], + [ --enable-workaround Enable workaround for LPE DROP], + [case "${enableval}" in + yes) workaround=true ;; + no) workaround=false ;; + *) AC_MSG_ERROR([bad value ${enableval} for --enable-workaround]) ;; + esac],[workaround=false]) + +AM_CONDITIONAL([WORKAROUND], [test x$workaround = xtrue]) + +AC_ARG_ENABLE([audiomanager], + [ --enable-audiomanager Audio Manager methods(default=enable)], + [case "${enableval}" in + yes) audiomanager=true ;; + no) audiomanager=false ;; + *) AC_MSG_ERROR([bad value ${enableval} for --enable-audiomanager]) ;; + esac],[audiomanager=true]) + +AM_CONDITIONAL([AUDIO_MANAGER], [test x$audiomanager = xtrue]) + +dnl decide on error flags +dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR") +dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR") + +dnl Check for pkgconfig first +AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no) + +dnl Give error and exit if we don't have pkgconfig +if test "x$HAVE_PKGCONFIG" = "xno"; then + AC_MSG_ERROR(you need to have pkgconfig installed !) +fi + +dnl GLib +dnl FIXME: need to align with moblin glib version +dnl FIXME: currently using an earlier version so it can be built on dev box. +GLIB_REQ=2.18 + +dnl Check for glib2 without extra fat, useful for the unversioned tool frontends +dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +if test "x$HAVE_GLIB" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) +if test "x$HAVE_GOBJECT" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no) +if test "x$HAVE_GTRHEAD" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +MIXCOMMON_REQUIRED=0.1.6 +PKG_CHECK_MODULES(MIXCOMMON, mixcommon >= $MIXCOMMON_REQUIRED , HAVE_MIXCOMMON=yes, HAVE_MIXCOMMON=no) +if test "x$HAVE_MIXCOMMON" = "xno"; then + AC_MSG_ERROR(You need mixcommon development package $MIXCOMMON_REQUIRED installed !) +fi + +if test "x$audiomanager" = "xtrue"; then + PKG_CHECK_MODULES(DBUS_GLIB, dbus-glib-1) +fi + +dnl Check for documentation xrefs +dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`" +dnl AC_SUBST(GLIB_PREFIX) + +AC_SUBST(GLIB_CFLAGS) +AC_SUBST(GLIB_LIBS) +AC_SUBST(GOBJECT_CFLAGS) +AC_SUBST(GOBJECT_LIBS) +AC_SUBST(MIX_CFLAGS) +AC_SUBST(GTHREAD_CFLAGS) +AC_SUBST(GTHREAD_LIBS) +AC_SUBST(MIXCOMMON_CFLAGS) +AC_SUBST(MIXCOMMON_LIBS) + +dnl check for gtk-doc +dnl GTK_DOC_CHECK(1.9) + +AC_CONFIG_FILES( +Makefile +src/Makefile +tests/Makefile +tests/smoke/Makefile +pkgconfig/Makefile +pkgconfig/mixaudio.pc +) + +dnl Additional Makefiles if we are building document with gtkdoc. +dnl Un-comment this section to enable building of documentation. +dnl AC_CONFIG_FILES( +dnl docs/Makefile +dnl docs/reference/Makefile +dnl docs/reference/MixAudio/Makefile +dnl ) + +AC_OUTPUT + + diff --git a/mix_audio/docs/Makefile.am b/mix_audio/docs/Makefile.am new file mode 100644 index 0000000..621e3f7 --- /dev/null +++ b/mix_audio/docs/Makefile.am @@ -0,0 +1,4 @@ +SUBDIRS = reference + +DIST_SUBDIRS = reference + diff --git a/mix_audio/docs/reference/Makefile.am b/mix_audio/docs/reference/Makefile.am new file mode 100644 index 0000000..85bde95 --- /dev/null +++ b/mix_audio/docs/reference/Makefile.am @@ -0,0 +1,4 @@ +SUBDIRS = MixAudio + +DIST_SUBDIRS = MixAudio + diff --git a/mix_audio/docs/reference/MixAudio/Makefile.am b/mix_audio/docs/reference/MixAudio/Makefile.am new file mode 100644 index 0000000..adf494c --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/Makefile.am @@ -0,0 +1,96 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + + +## Process this file with automake to produce Makefile.in + +# We require automake 1.6 at least. +AUTOMAKE_OPTIONS = 1.6 + +# This is a blank Makefile.am for using gtk-doc. +# Copy this to your project's API docs directory and modify the variables to +# suit your project. See the GTK+ Makefiles in gtk+/docs/reference for examples +# of using the various options. + +# The name of the module, e.g. 'glib'. +DOC_MODULE=MixAudio + +# The top-level SGML file. You can change this if you want to. +DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml + +# The directory containing the source code. Relative to $(srcdir). +# gtk-doc will search all .c & .h files beneath here for inline comments +# documenting the functions and macros. +# e.g. DOC_SOURCE_DIR=../../../gtk +DOC_SOURCE_DIR=$(top_srcdir)/src + +# Extra options to pass to gtkdoc-scangobj. Not normally needed. +#SCANGOBJ_OPTIONS=--type-init-func="g_init(NULL,NULL)" + +# Extra options to supply to gtkdoc-scan. +# e.g. SCAN_OPTIONS=--deprecated-guards="GTK_DISABLE_DEPRECATED" +SCAN_OPTIONS=--rebuild-sections --rebuild-types +#SCAN_OPTIONS=--rebuild-sections + +# Extra options to supply to gtkdoc-mkdb. +# e.g. MKDB_OPTIONS=--sgml-mode --output-format=xml +MKDB_OPTIONS=--sgml-mode --output-format=xml + +# Extra options to supply to gtkdoc-mktmpl +# e.g. MKTMPL_OPTIONS=--only-section-tmpl +MKTMPL_OPTIONS= + +# Extra options to supply to gtkdoc-fixref. Not normally needed. +# e.g. FIXXREF_OPTIONS=--extra-dir=../gdk-pixbuf/html --extra-dir=../gdk/html +FIXXREF_OPTIONS= + +# Used for dependencies. The docs will be rebuilt if any of these change. +# e.g. HFILE_GLOB=$(top_srcdir)/gtk/*.h +# e.g. CFILE_GLOB=$(top_srcdir)/gtk/*.c +HFILE_GLOB=$(top_srcdir)/src/*.h +CFILE_GLOB=$(top_srcdir)/src/*.c + +# Header files to ignore when scanning. +# e.g. IGNORE_HFILES=gtkdebug.h gtkintl.h +IGNORE_HFILES=*~ intel_sst_ioctl.h pvt.h sst_proxy.h amhelper.h + +# Images to copy into HTML directory. +# e.g. HTML_IMAGES=$(top_srcdir)/gtk/stock-icons/stock_about_24.png +HTML_IMAGES= + +# Extra SGML files that are included by $(DOC_MAIN_SGML_FILE). +# e.g. content_files=running.sgml building.sgml changes-2.0.sgml +content_files= + +# SGML files where gtk-doc abbrevations (#GtkWidget) are expanded +# These files must be listed here *and* in content_files +# e.g. expand_content_files=running.sgml +expand_content_files= + +# CFLAGS and LDFLAGS for compiling gtkdoc-scangobj with your library. +# Only needed if you are using gtkdoc-scangobj to dynamically query widget +# signals and properties. +# e.g. INCLUDES=-I$(top_srcdir) -I$(top_builddir) $(GTK_DEBUG_FLAGS) +# e.g. GTKDOC_LIBS=$(top_builddir)/gtk/$(gtktargetlib) +AM_CFLAGS=$(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS) +GTKDOC_LIBS=$(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS) $(top_srcdir)/src/libmixaudio.la + +# This includes the standard gtk-doc make rules, copied by gtkdocize. +include $(top_srcdir)/gtk-doc.make + +# Other files to distribute +# e.g. EXTRA_DIST += version.xml.in +EXTRA_DIST += + +# Files not to distribute +# for --rebuild-types in $(SCAN_OPTIONS), e.g. $(DOC_MODULE).types +# for --rebuild-sections in $(SCAN_OPTIONS) e.g. $(DOC_MODULE)-sections.txt +#DISTCLEANFILES = + +# Comment this out if you want your docs-status tested during 'make check' +#TESTS = $(GTKDOC_CHECK) + diff --git a/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml b/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml new file mode 100644 index 0000000..7627fe9 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml @@ -0,0 +1,39 @@ + + + + + MixAudio Reference Manual + + MixAudio version 0.3 + + + + + + Mix Audio API + + + + + + + + + + + + + + Object Hierarchy + + + + + API Index + + + + diff --git a/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt b/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt new file mode 100644 index 0000000..d96a685 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt @@ -0,0 +1,187 @@ +
+mixacpwma +MixAudioWMAVersion +MixAudioConfigParamsWMA +MixAudioConfigParamsWMA +mix_acp_wma_new +mix_acp_wma_ref +mix_acp_wma_unref +MIX_ACP_WMA_CHANNEL_MASK +MIX_ACP_WMA_FORMAT_TAG +MIX_ACP_WMA_BLOCK_ALIGN +MIX_ACP_WMA_ENCODE_OPT +MIX_ACP_WMA_PCM_BIT_WIDTH +mix_acp_wma_get_version +mix_acp_wma_set_version + +MIX_AUDIOCONFIGPARAMSWMA +MIX_IS_AUDIOCONFIGPARAMSWMA +MIX_TYPE_AUDIOCONFIGPARAMSWMA +mix_acp_wma_get_type +MIX_AUDIOCONFIGPARAMSWMA_CLASS +MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS +MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS +
+ +
+mixacp +MixACPOpAlign +MixACPBPSType +MixDecodeMode +MixAudioConfigParams +MixAudioConfigParams +mix_acp_new +mix_acp_ref +mix_acp_unref +MIX_ACP_DECODEMODE +MIX_ACP_NUM_CHANNELS +MIX_ACP_BITRATE +MIX_ACP_SAMPLE_FREQ +mix_acp_get_decodemode +mix_acp_set_decodemode +mix_acp_get_streamname +mix_acp_set_streamname +mix_acp_set_audio_manager +mix_acp_get_audio_manager +mix_acp_is_streamname_valid +mix_acp_get_bps +mix_acp_set_bps +mix_acp_get_op_align +mix_acp_set_op_align + +MIX_AUDIOCONFIGPARAMS +MIX_IS_AUDIOCONFIGPARAMS +MIX_TYPE_AUDIOCONFIGPARAMS +mix_acp_get_type +MIX_AUDIOCONFIGPARAMS_CLASS +MIX_IS_AUDIOCONFIGPARAMS_CLASS +MIX_AUDIOCONFIGPARAMS_GET_CLASS +
+ +
+mixacpaac +MixAACBitrateType +MixAACBitstreamFormt +MixAACProfile +MixAACMpegID +MixAudioConfigParamsAAC +MixAudioConfigParamsAAC +mix_acp_aac_new +mix_acp_aac_ref +mix_acp_aac_unref +mix_acp_aac_set_mpeg_id +mix_acp_aac_get_mpeg_id +MIX_ACP_AAC_CRC +mix_acp_aac_set_aot +mix_acp_aac_get_aot +MIX_ACP_AAC_SBR_FLAG +MIX_ACP_AAC_PS_FLAG +MIX_ACP_AAC_PCE_FLAG +MIX_ACP_AAC_SAMPLE_RATE +MIX_ACP_AAC_CHANNELS +mix_acp_aac_get_bit_stream_format +mix_acp_aac_set_bit_stream_format +mix_acp_aac_get_aac_profile +mix_acp_aac_set_aac_profile +mix_acp_aac_get_bit_rate_type +mix_acp_aac_set_bit_rate_type + +MIX_AUDIOCONFIGPARAMSAAC +MIX_IS_AUDIOCONFIGPARAMSAAC +MIX_TYPE_AUDIOCONFIGPARAMSAAC +mix_acp_aac_get_type +MIX_AUDIOCONFIGPARAMSAAC_CLASS +MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS +MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS +
+ +
+mixaudio +MixStreamState +MixState +MixCodecMode +MixVolType +MixVolRamp +MixIOVec +MixDeviceState +MixAudio +MixAudio +mix_audio_new +mix_audio_ref +mix_audio_unref +mix_audio_get_version +mix_audio_initialize +mix_audio_configure +mix_audio_decode +mix_audio_capture_encode +mix_audio_start +mix_audio_stop_drop +mix_audio_stop_drain +mix_audio_pause +mix_audio_resume +mix_audio_get_timestamp +mix_audio_set_mute +mix_audio_get_mute +mix_audio_get_max_vol +mix_audio_get_min_vol +mix_audio_get_volume +mix_audio_set_volume +mix_audio_deinitialize +mix_audio_get_stream_state +mix_audio_get_state +mix_audio_am_is_enabled +mix_audio_is_am_available +mix_audio_get_output_configuration +mix_audio_get_stream_byte_decoded + +MIX_AUDIO +MIX_IS_AUDIO +MIX_TYPE_AUDIO +mix_audio_get_type +MIX_AUDIO_CLASS +MIX_IS_AUDIO_CLASS +MIX_AUDIO_GET_CLASS +
+ +
+mixaip +MixAudioInitParams +MixAudioInitParams +mix_aip_new +mix_aip_ref +mix_aip_unref + +MIX_AUDIOINITPARAMS +MIX_IS_AUDIOINITPARAMS +MIX_TYPE_AUDIOINITPARAMS +mix_aip_get_type +MIX_AUDIOINITPARAMS_CLASS +MIX_IS_AUDIOINITPARAMS_CLASS +MIX_AUDIOINITPARAMS_GET_CLASS +
+ +
+mixacpmp3 +MixAudioConfigParamsMP3 +MixAudioConfigParamsMP3 +mix_acp_mp3_new +mix_acp_mp3_ref +mix_acp_mp3_unref +MIX_ACP_MP3_CRC +MIX_ACP_MP3_MPEG_FORMAT +MIX_ACP_MP3_MPEG_LAYER + +MIX_AUDIOCONFIGPARAMSMP3 +MIX_IS_AUDIOCONFIGPARAMSMP3 +MIX_TYPE_AUDIOCONFIGPARAMSMP3 +mix_acp_mp3_get_type +MIX_AUDIOCONFIGPARAMSMP3_CLASS +MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS +MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS +
+ +
+mixaudiotypes +MixAudioManager +
+ diff --git a/mix_audio/docs/reference/MixAudio/MixAudio.types b/mix_audio/docs/reference/MixAudio/MixAudio.types new file mode 100644 index 0000000..0a80168 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/MixAudio.types @@ -0,0 +1,6 @@ +mix_acp_wma_get_type +mix_acp_get_type +mix_acp_aac_get_type +mix_audio_get_type +mix_aip_get_type +mix_acp_mp3_get_type diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html new file mode 100644 index 0000000..1dd3b14 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html @@ -0,0 +1,689 @@ + + + + +MixAudioConfigParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixAudioConfigParams

+

MixAudioConfigParams — MixAudio configuration parameters object.

+
+ +
+

Description

+

+MixAudio configuration parameters object which is used to communicate audio specific parameters. +

+

+This object is should not be instantiated as codec specific parameters are definied in individual derive classes.

+
+
+

Details

+
+

enum MixACPOpAlign

+
typedef enum {
+  MIX_ACP_OUTPUT_ALIGN_UNKNOWN=-1,
+  MIX_ACP_OUTPUT_ALIGN_16=0,
+  MIX_ACP_OUTPUT_ALIGN_MSB,
+  MIX_ACP_OUTPUT_ALIGN_LSB,
+  MIX_ACP_OUTPUT_ALIGN_LAST
+} MixACPOpAlign;
+
+

+Audio Output alignment.

+
++ + + + + + + + + + + + + + + + + + + + + + +

MIX_ACP_OUTPUT_ALIGN_UNKNOWN

Output alignment undefined. +

MIX_ACP_OUTPUT_ALIGN_16

MIX_ACP_OUTPUT_ALIGN_MSB

Output word is MSB aligned +

MIX_ACP_OUTPUT_ALIGN_LSB

Output word is LSB aligned +

MIX_ACP_OUTPUT_ALIGN_LAST

Last entry in list. +
+
+
+
+

enum MixACPBPSType

+
typedef enum {
+  MIX_ACP_BPS_UNKNOWN=0,
+  MIX_ACP_BPS_16=16,
+  MIX_ACP_BPS_24=24,
+} MixACPBPSType;
+
+

+Audio Output Size in bits per sample.

+
++ + + + + + + + + + + + + + +

MIX_ACP_BPS_UNKNOWN

Bit Per Sample undefined. +

MIX_ACP_BPS_16

Output bits per sample is 16 bits +

MIX_ACP_BPS_24

Output bits per sample is 24 bits +
+
+
+
+

enum MixDecodeMode

+
typedef enum {
+  MIX_DECODE_NULL=0,
+  MIX_DECODE_DIRECTRENDER,
+  MIX_DECODE_DECODERETURN,
+  MIX_DECODE_LAST
+} MixDecodeMode;
+
+

+Operation Mode for a MI-X session. See mix_audio_configure().

+
++ + + + + + + + + + + + + + + + + + +

MIX_DECODE_NULL

Undefined decode mode. +

MIX_DECODE_DIRECTRENDER

Stream is configured in Direct Render mode +

MIX_DECODE_DECODERETURN

Stream is configured in Decode Return mode +

MIX_DECODE_LAST

Last index in the enumeration. +
+
+
+
+

MixAudioConfigParams

+
typedef struct {
+  MixParams parent;
+
+  /* Audio Session Parameters */
+  MixDecodeMode decode_mode;
+  gchar *stream_name;
+  MixAudioManager audio_manager;
+
+  /* Audio Format Parameters */
+  gint num_channels;
+  gint bit_rate;
+  gint sample_freq;
+  MixACPBPSType bits_per_sample;
+  MixACPOpAlign op_align;
+} MixAudioConfigParams;
+
+

+MixAudio configuration parameters object.

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

MixParams parent;

parent. +

MixDecodeMode decode_mode;

Decode Mode to use for current session. See mix_acp_set_decodemode +

gchar *stream_name;

Stream name. See mix_acp_set_streamname. This object will release the string upon destruction. +

MixAudioManager audio_manager;

Type of Audio Manager. See mix_acp_set_audio_manager. +

gint num_channels;

Number of output channels. See MIX_ACP_NUM_CHANNELS +

gint bit_rate;

Optional. See MIX_ACP_BITRATE +

gint sample_freq;

Output frequency. See MIX_ACP_SAMPLE_FREQ +

MixACPBPSType bits_per_sample;

Number of output bit per sample. See mix_acp_set_bps +

MixACPOpAlign op_align;

Output Byte Alignment. See mix_acp_set_op_align +
+
+
+
+

mix_acp_new ()

+
MixAudioConfigParams * mix_acp_new                      (void);
+

+Use this method to create new instance of MixAudioConfigParams

+
++ + + + +

returns :

A newly allocated instance of MixAudioConfigParams +
+
+
+
+

mix_acp_ref ()

+
MixAudioConfigParams * mix_acp_ref                      (MixAudioConfigParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixAudioConfigParams instance where reference count has been increased. +
+
+
+
+

mix_acp_unref()

+
#define mix_acp_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

MIX_ACP_DECODEMODE()

+
#define MIX_ACP_DECODEMODE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->decode_mode)
+
+

+MixAudioConfigParam.decode_mode accessor. +

+

+Configure the decode mode to one of MixDecodeMode value.

+
++ + + + +

obj :

MixAudioConfigParams object +
+
+
+
+

MIX_ACP_NUM_CHANNELS()

+
#define MIX_ACP_NUM_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMS(obj)->num_channels)
+
+

+MixAudioConfigParam.num_channels accessor. +

+

+Configure the number of output channels. This value need to be exact the same as the supported output channel in the audio since down-mixing is not supported. +

+

+This value can be used during MIX_DECODE_DECODERETURN mode for buffer size/duration calculation. +

+

+In Moorestown, number of channel must be 1 or 2.

+
++ + + + +

obj :

MixAudioConfigParams object +
+
+
+
+

MIX_ACP_BITRATE()

+
#define MIX_ACP_BITRATE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->bit_rate)
+
+

+MixAudioConfigParam.bit_rate accessor. +

+

+Bit rate of the current audio. +

+

+Optional

+
++ + + + +

obj :

MixAudioConfigParams object +
+
+
+
+

MIX_ACP_SAMPLE_FREQ()

+
#define MIX_ACP_SAMPLE_FREQ(obj) (MIX_AUDIOCONFIGPARAMS(obj)->sample_freq)
+
+

+MixAudioConfigParam.sample_freq accessor. +

+

+Output sampling frequency. +

+

+This value can be used during MIX_DECODE_DECODERETURN mode for buffer size/duration calculation.

+
++ + + + +

obj :

MixAudioConfigParams object +
+
+
+
+

mix_acp_get_decodemode ()

+
MixDecodeMode       mix_acp_get_decodemode              (MixAudioConfigParams *obj);
+

+Retrieve currently configured MixDecodeMode.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParams +

returns :

MixDecodeMode +
+
+
+
+

mix_acp_set_decodemode ()

+
MIX_RESULT          mix_acp_set_decodemode              (MixAudioConfigParams *obj,
+                                                         MixDecodeMode mode);
+

+Configure session for one of the MixDecodeMode.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParams +

mode :

MixDecodeMode to set +

returns :

MIX_RESULT +
+
+
+
+

mix_acp_get_streamname ()

+
gchar *             mix_acp_get_streamname              (MixAudioConfigParams *obj);
+

+Return copy of streamname. caller must free with g_free()

+
++ + + + + + + + + + +

obj :

MixAudioConfigParams +

returns :

pointer to a copy of the stream name. NULL if name is not available. +
+
+
+
+

mix_acp_set_streamname ()

+
MIX_RESULT          mix_acp_set_streamname              (MixAudioConfigParams *obj,
+                                                         const gchar *streamname);
+

+Set the stream name. The object will make a copy of the input stream name string.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParams +

streamname :

Stream name to set +

returns :

MIX_RESULT +
+
+
+
+

mix_acp_set_audio_manager ()

+
MIX_RESULT          mix_acp_set_audio_manager           (MixAudioConfigParams *obj,
+                                                         MixAudioManager am);
+

+Set the Audio Manager to one of the MixAudioManager.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParams +

am :

MixAudioManager +

returns :

MIX_RESULT +
+
+
+
+

mix_acp_get_audio_manager ()

+
MixAudioManager     mix_acp_get_audio_manager           (MixAudioConfigParams *obj);
+

+Retrieve name of currently configured audio manager.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParams +

returns :

MixAudioManager +
+
+
+
+

mix_acp_is_streamname_valid ()

+
gboolean            mix_acp_is_streamname_valid         (MixAudioConfigParams *obj);
+

+Check if stream name is valid considering the current Decode Mode.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParams +

returns :

boolean indicates if stream name is valid. +
+
+
+
+

mix_acp_get_bps ()

+
MixACPBPSType       mix_acp_get_bps                     (MixAudioConfigParams *obj);
+

+Retrive currently configured bit-per-stream value.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParams +

returns :

MixACPBPSType +
+
+
+
+

mix_acp_set_bps ()

+
MIX_RESULT          mix_acp_set_bps                     (MixAudioConfigParams *obj,
+                                                         MixACPBPSType type);
+

+Configure bit-per-stream of one of the supported MixACPBPSType.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParams +

mode :

MixACPBPSType to set +

returns :

MIX_RESULT +
+
+
+
+

mix_acp_get_op_align ()

+
MixACPOpAlign       mix_acp_get_op_align                (MixAudioConfigParams *obj);
+

+Get Output Alignment.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParams object +

returns :

MixACPOpAlign +
+
+
+
+

mix_acp_set_op_align ()

+
MIX_RESULT          mix_acp_set_op_align                (MixAudioConfigParams *obj,
+                                                         MixACPOpAlign op_align);
+

+Set Output Alignment to one of the MixACPOpAlign value.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParams object +

op_align :

One of the supported MixACPOpAlign +

returns :

MIX_RESULT +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html new file mode 100644 index 0000000..46e4e8e --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html @@ -0,0 +1,823 @@ + + + + +MixAudioConfigParamsAAC + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixAudioConfigParamsAAC

+

MixAudioConfigParamsAAC — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format.

+
+ +
+

Description

+

+A data object which stores audio specific parameters for the following formats: +

+
    +
  • AAC-LC
  • +
  • HE-AAC v1
  • +
  • HE-AAC v2
  • +
+

+

+

+Additional parameters must be set in the parent object MixAudioConfigParams

+
+
+

Details

+
+

enum MixAACBitrateType

+
typedef enum {
+  MIX_AAC_BR_NULL=-1,
+  MIX_AAC_BR_CONSTANT=0,
+  MIX_AAC_BR_VARIABLE,
+  MIX_AAC_BR_LAST
+} MixAACBitrateType;
+
+

+Types of bitrate in AAC.

+
++ + + + + + + + + + + + + + + + + + +

MIX_AAC_BR_NULL

Undefined bit rate type. +

MIX_AAC_BR_CONSTANT

Constant bit rate. +

MIX_AAC_BR_VARIABLE

Variable bit rate. +

MIX_AAC_BR_LAST

last entry. +
+
+
+
+

enum MixAACBitstreamFormt

+
typedef enum {
+  MIX_AAC_BS_NULL=-1,
+  MIX_AAC_BS_ADTS=0,
+  MIX_AAC_BS_ADIF,
+  MIX_AAC_BS_RAW,
+  MIX_AAC_BS_LAST
+} MixAACBitstreamFormt;
+
+

+AAC bitstream format.

+
++ + + + + + + + + + + + + + + + + + + + + + +

MIX_AAC_BS_NULL

Undefined bitstream format. +

MIX_AAC_BS_ADTS

Bitstream is in ADTS format. +

MIX_AAC_BS_ADIF

Bitstream is in ADIF format. +

MIX_AAC_BS_RAW

Bitstream is in raw format. +

MIX_AAC_BS_LAST

Last entry. +
+
+
+
+

enum MixAACProfile

+
typedef enum {
+  MIX_AAC_PROFILE_NULL=-1,
+  MIX_AAC_PROFILE_MAIN=0,
+  MIX_AAC_PROFILE_LC,
+  MIX_AAC_PROFILE_SSR,
+  MIX_AAC_PROFILE_LAST
+} MixAACProfile;
+
+

+AAC profiles definitions.

+
++ + + + + + + + + + + + + + + + + + + + + + +

MIX_AAC_PROFILE_NULL

Undefined profile. +

MIX_AAC_PROFILE_MAIN

Not Supported AAC Main profile. +

MIX_AAC_PROFILE_LC

AAC-LC profile, including support of SBR and PS tool. +

MIX_AAC_PROFILE_SSR

Not Supported SSR profile. +

MIX_AAC_PROFILE_LAST

Last entry. +
+
+
+
+

enum MixAACMpegID

+
typedef enum {
+  MIX_AAC_MPEG_ID_NULL=-1,
+  MIX_AAC_MPEG_2_ID = 0,
+  MIX_AAC_MPEG_4_ID = 1,
+  MIX_AAC_MPEG_LAST
+} MixAACMpegID;
+
+

+AAC MPEG ID.

+
++ + + + + + + + + + + + + + + + + + +

MIX_AAC_MPEG_ID_NULL

Undefined MPEG ID. +

MIX_AAC_MPEG_2_ID

Indicate MPEG 2 Audio. +

MIX_AAC_MPEG_4_ID

Indicate MPEG 4 Audio. +

MIX_AAC_MPEG_LAST

last entry. +
+
+
+
+

MixAudioConfigParamsAAC

+
typedef struct {
+  MixAudioConfigParams parent;
+
+  /* Audio Format Parameters */
+  MixAACMpegID MPEG_id;
+  MixAACBitstreamFormt bit_stream_format;
+  MixAACProfile aac_profile;
+  guint aot;
+  guint aac_sample_rate;      
+  guint aac_channels;  
+  MixAACBitrateType bit_rate_type;
+  gboolean CRC;
+  guint sbrPresentFlag;
+  guint psPresentFlag;
+  gboolean pce_present;
+  gint8 syntc_id[2]; 
+  gint8 syntc_tag[2]; 
+  gint num_syntc_elems;
+} MixAudioConfigParamsAAC;
+
+

+MixAudio Parameter object

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

MixAudioConfigParams parent;

parent. +

MixAACMpegID MPEG_id;

MPEG ID. See mix_acp_aac_set_mpeg_id +

MixAACBitstreamFormt bit_stream_format;

Bitstream format. See mix_acp_aac_set_bit_stream_format. +

MixAACProfile aac_profile;

AAC profile. See mix_acp_aac_set_aac_profile. +

guint aot;

Audio object type. See mix_acp_aac_set_aot +

guint aac_sample_rate;

See MIX_ACP_AAC_SAMPLE_RATE macro. +

guint aac_channels;

See MIX_ACP_AAC_CHANNELS macro. +

MixAACBitrateType bit_rate_type;

Bitrate type. See mix_acp_aac_set_bit_rate_type +

gboolean CRC;

CRC check 0:disable, 1:enable. +

guint sbrPresentFlag;

See MIX_ACP_AAC_SBR_FLAG macro. +

guint psPresentFlag;

See MIX_ACP_AAC_PS_FLAG macro. +

gboolean pce_present;

Not Used. See MIX_ACP_AAC_PCE_FLAG +

gint8 syntc_id[2];

Not Used. 0 for ID_SCE(Dula Mono), -1 for raw. +

gint8 syntc_tag[2];

Not Used. -1 for raw. 0-16 for rest of the streams. +

gint num_syntc_elems;

Not Used. Number of syntatic elements. +
+
+
+
+

mix_acp_aac_new ()

+
MixAudioConfigParamsAAC * mix_acp_aac_new               (void);
+

+Use this method to create new instance of MixAudioConfigParamsAAC

+
++ + + + +

returns :

A newly allocated instance of MixAudioConfigParamsAAC +
+
+
+
+

mix_acp_aac_ref ()

+
MixAudioConfigParamsAAC * mix_acp_aac_ref               (MixAudioConfigParamsAAC *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixAudioConfigParamsAAC instance where reference count has been increased. +
+
+
+
+

mix_acp_aac_unref()

+
#define mix_acp_aac_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_acp_aac_set_mpeg_id ()

+
MIX_RESULT          mix_acp_aac_set_mpeg_id             (MixAudioConfigParamsAAC *obj,
+                                                         MixAACMpegID mpegid);
+

+Configure decoder to treat audio as MPEG 2 or MPEG 4.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

mpegid :

MPEG ID to set. +

return :

MIX_RESULT +
+
+
+
+

mix_acp_aac_get_mpeg_id ()

+
MixAACMpegID        mix_acp_aac_get_mpeg_id             (MixAudioConfigParamsAAC *obj);
+

+Retrieve currently configured mpeg id value.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParamsAAC object +

returns :

MPEG ID. +
+
+
+
+

MIX_ACP_AAC_CRC()

+
#define MIX_ACP_AAC_CRC(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->CRC)
+
+

+MixAudioConfigParamAAC.CRC accessor.

+
++ + + + +

obj :

MixAudioConfigParamsAAC object. +
+
+
+
+

mix_acp_aac_set_aot ()

+
MIX_RESULT          mix_acp_aac_set_aot                 (MixAudioConfigParamsAAC *obj,
+                                                         guint aot);
+

+Audio Object Type for the MPEG-4 audio stream. Valid value are: +

+

+2 - for AAC-LC +

+

+5 - for SBR +

+

+Method returns MIX_RESULT_NOT_SUPPORTED for not supported value.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

aot :

Audio Object Type. +
+
+
+
+

mix_acp_aac_get_aot ()

+
guint               mix_acp_aac_get_aot                 (MixAudioConfigParamsAAC *obj);
+

+To retrieve currently configured audio object type.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

aot :

Pointer to receive the Audio Object Type. +

return :

Currently configured audio object type. Or 0 if not yet specified. +
+
+
+
+

MIX_ACP_AAC_SBR_FLAG()

+
#define MIX_ACP_AAC_SBR_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->sbrPresentFlag)
+
+

+MixAudioConfigParamAAC.sbrPresentFlag accessor. +

+

+Applicable only when bit_stream_format==MIX_AAC_BS_RAW. Indicates whether SBR data is present. +

+

+0: Absent +

+

+1: Present +

+

+-1 (0xffffffff): indicates implicit signalling.

+
++ + + + +

obj :

MixAudioConfigParamsAAC object +
+
+
+
+

MIX_ACP_AAC_PS_FLAG()

+
#define MIX_ACP_AAC_PS_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->psPresentFlag)
+
+

+MixAudioConfigParamAAC.psPresentFlag accessor. +

+

+Applicable only when bit_stream_format==MIX_AAC_BS_RAW. Indicates whether PS data is present. +

+

+0: Absent +

+

+1: Present +

+

+-1 (0xffffffff): indicates implicit signalling.

+
++ + + + +

obj :

MixAudioConfigParamsAAC object +
+
+
+
+

MIX_ACP_AAC_PCE_FLAG()

+
#define MIX_ACP_AAC_PCE_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->pce_present)
+
+

+MixAudioConfigParamAAC.pce_present accessor. +

+

+Applicable only when bit_stream_format==MIX_AAC_BS_RAW. Indicates PCE data presence. +

+

+1:present +

+

+0:absent. +

+

+Not Used on Moorestown.

+
++ + + + +

obj :

MixAudioConfigParamsAAC object. +
+
+
+
+

MIX_ACP_AAC_SAMPLE_RATE()

+
#define MIX_ACP_AAC_SAMPLE_RATE(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_sample_rate)
+
+

+MixAudioConfigParamAAC.aac_sample_rate accessor. +

+

+Plain AAC decoder operating sample rate. Which could be different from the output sampling rate with HE AAC v1 and v2.

+
++ + + + +

obj :

MixAudioConfigParamsAAC object. +
+
+
+
+

MIX_ACP_AAC_CHANNELS()

+
#define MIX_ACP_AAC_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_channels)
+
+

+MixAudioConfigParamAAC.aac_channels accessor. +

+

+Indicates the number of output channels used by AAC decoder before SBR or PS tools are applied.

+
++ + + + +

obj :

MixAudioConfigParamsAAC +
+
+
+
+

mix_acp_aac_get_bit_stream_format ()

+
MixAACBitstreamFormt  mix_acp_aac_get_bit_stream_format (MixAudioConfigParamsAAC *obj);
+

+Return the bitstream format currently configured.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

returns :

MixAACBitstreamFormt +
+
+
+
+

mix_acp_aac_set_bit_stream_format ()

+
MIX_RESULT          mix_acp_aac_set_bit_stream_format   (MixAudioConfigParamsAAC *obj,
+                                                         MixAACBitstreamFormt bit_stream_format);
+

+Set the type of bitstream format as specified in MixAACBitstreamFormt.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

bit_stream_format :

Bit stream format. +

returns :

MIX_RESULT +
+
+
+
+

mix_acp_aac_get_aac_profile ()

+
MixAACProfile       mix_acp_aac_get_aac_profile         (MixAudioConfigParamsAAC *obj);
+

+Retrieve the AAC profile currently configured.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

returns :

MixAACProfile +
+
+
+
+

mix_acp_aac_set_aac_profile ()

+
MIX_RESULT          mix_acp_aac_set_aac_profile         (MixAudioConfigParamsAAC *obj,
+                                                         MixAACProfile aac_profile);
+

+Configure AAC profile for current session. +

+

+Only MIX_AAC_PROFILE_LC is supported in Moorestown.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

aac_profile :

AAC profile to set. +

returns :

MIX_RESULT +
+
+
+
+

mix_acp_aac_get_bit_rate_type ()

+
MixAACBitrateType   mix_acp_aac_get_bit_rate_type       (MixAudioConfigParamsAAC *obj);
+

+Retrieve the bit rate type currently configured.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

returns :

MixAACBitrateType +
+
+
+
+

mix_acp_aac_set_bit_rate_type ()

+
MIX_RESULT          mix_acp_aac_set_bit_rate_type       (MixAudioConfigParamsAAC *obj,
+                                                         MixAACBitrateType bit_rate_type);
+

+Set the bit rate type used.

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParamsAAC +

bit_rate_type :

Bit rate type to set. +

returns :

MIX_RESULT +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html new file mode 100644 index 0000000..8c97357 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html @@ -0,0 +1,221 @@ + + + + +MixAudioConfigParamsMP3 + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixAudioConfigParamsMP3

+

MixAudioConfigParamsMP3 — Audio configuration parameters for MP3 audio.

+
+
+

Synopsis

+
+
+#include <mixacpmp3.h>
+
+                    MixAudioConfigParamsMP3;
+MixAudioConfigParamsMP3 * mix_acp_mp3_new               (void);
+MixAudioConfigParamsMP3 * mix_acp_mp3_ref               (MixAudioConfigParamsMP3 *mix);
+#define             mix_acp_mp3_unref                   (obj)
+#define             MIX_ACP_MP3_CRC                     (obj)
+#define             MIX_ACP_MP3_MPEG_FORMAT             (obj)
+#define             MIX_ACP_MP3_MPEG_LAYER              (obj)
+
+
+
+

Description

+

+A data object which stores audio specific parameters for MP3 audio. +

+

+Additional parameters must be set in the parent object MixAudioConfigParams

+
+
+

Details

+
+

MixAudioConfigParamsMP3

+
typedef struct {
+  MixAudioConfigParams parent;
+
+  /* Audio Format Parameters */
+  gboolean CRC;
+  gint MPEG_format;
+  gint MPEG_layer;
+} MixAudioConfigParamsMP3;
+
+

+MI-X Audio Parameter object for MP3 Audio.

+
++ + + + + + + + + + + + + + + + + + +

MixAudioConfigParams parent;

parent. +

gboolean CRC;

CRC. See MIX_ACP_MP3_CRC +

gint MPEG_format;

OptionalMPEG format of the mpeg audio. See MIX_ACP_MP3_MPEG_FORMAT +

gint MPEG_layer;

OptionalMPEG layer of the mpeg audio. See MIX_ACP_MP3_MPEG_LAYER +
+
+
+
+

mix_acp_mp3_new ()

+
MixAudioConfigParamsMP3 * mix_acp_mp3_new               (void);
+

+Use this method to create new instance of MixAudioConfigParamsMP3

+
++ + + + +

returns :

A newly allocated instance of MixAudioConfigParamsMP3 +
+
+
+
+

mix_acp_mp3_ref ()

+
MixAudioConfigParamsMP3 * mix_acp_mp3_ref               (MixAudioConfigParamsMP3 *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixAudioConfigParamsMP3 instance where reference count has been increased. +
+
+
+
+

mix_acp_mp3_unref()

+
#define mix_acp_mp3_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

MIX_ACP_MP3_CRC()

+
#define MIX_ACP_MP3_CRC(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->CRC)
+
+

+MixAudioConfigParamMP3.CRC accessor. +

+

+Optional

+
++ + + + +

obj :

MixAudioConfigParamsMP3 object. +
+
+
+
+

MIX_ACP_MP3_MPEG_FORMAT()

+
#define MIX_ACP_MP3_MPEG_FORMAT(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_format)
+
+

+MixAudioConfigParamMP3.MPEG_format accessor. +

+

+Supported MPEG format should be 1 or 2.

+
++ + + + +

obj :

MixAudioConfigParamsMP3 object. +
+
+
+
+

MIX_ACP_MP3_MPEG_LAYER()

+
#define MIX_ACP_MP3_MPEG_LAYER(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_layer)
+
+

+MixAudioConfigParamMP3.MPEG_layer accessor. +

+

+Supported layer should be 1, 2, or 3.

+
++ + + + +

obj :

MixAudioConfigParamsMP3 object. +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html new file mode 100644 index 0000000..efd14ca --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html @@ -0,0 +1,391 @@ + + + + +MixAudioConfigParamsWMA + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixAudioConfigParamsWMA

+

MixAudioConfigParamsWMA — Audio parameters for WMA audio.

+
+ +
+

Description

+

+A data object which stores audio specific parameters for WMA. +

+

+In Moorestown, only WMA2 is supported. +

+

+Additional parameters must be set in the parent object MixAudioConfigParams

+
+
+

Details

+
+

enum MixAudioWMAVersion

+
typedef enum {
+  MIX_AUDIO_WMA_VUNKNOWN,
+  MIX_AUDIO_WMA_V9,
+  MIX_AUDIO_WMA_V10,
+  MIX_AUDIO_WMA_V10P,
+  MIX_AUDIO_WMA_LAST
+} MixAudioWMAVersion;
+
+

+WMA version.

+
++ + + + + + + + + + + + + + + + + + + + + + +

MIX_AUDIO_WMA_VUNKNOWN

WMA version undefined. +

MIX_AUDIO_WMA_V9

WMA 9 +

MIX_AUDIO_WMA_V10

Not Supported WMA 10 +

MIX_AUDIO_WMA_V10P

Not Supported WMA 10 Pro +

MIX_AUDIO_WMA_LAST

last entry. +
+
+
+
+

MixAudioConfigParamsWMA

+
typedef struct {
+  MixAudioConfigParams parent;
+
+  /* Audio Format Parameters */
+  guint32 channel_mask;
+  guint16 format_tag;
+  guint16 block_align;
+  guint16 wma_encode_opt;/* Encoder option */
+  guint8 pcm_bit_width;  /* source pcm bit width */
+  MixAudioWMAVersion wma_version;
+} MixAudioConfigParamsWMA;
+
+

+MI-X Audio Parameter object

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

MixAudioConfigParams parent;

parent. +

guint32 channel_mask;

Channel Mask. See MIX_ACP_WMA_CHANNEL_MASK +

guint16 format_tag;

Format tag. See MIX_ACP_WMA_FORMAT_TAG +

guint16 block_align;

guint16 wma_encode_opt;

Encoder option. See MIX_ACP_WMA_ENCODE_OPT +

guint8 pcm_bit_width;

Source pcm bit width. See MIX_ACP_WMA_PCM_BIT_WIDTH +

MixAudioWMAVersion wma_version;

WMA version. See mix_acp_wma_set_version +
+
+
+
+

mix_acp_wma_new ()

+
MixAudioConfigParamsWMA * mix_acp_wma_new               (void);
+

+Use this method to create new instance of MixAudioConfigParamsWMA

+
++ + + + +

returns :

A newly allocated instance of MixAudioConfigParamsWMA +
+
+
+
+

mix_acp_wma_ref ()

+
MixAudioConfigParamsWMA * mix_acp_wma_ref               (MixAudioConfigParamsWMA *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixAudioConfigParamsWMA instance where reference count has been increased. +
+
+
+
+

mix_acp_wma_unref()

+
#define mix_acp_wma_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

MIX_ACP_WMA_CHANNEL_MASK()

+
#define MIX_ACP_WMA_CHANNEL_MASK(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->channel_mask)
+
+

+MixAudioConfigParamWMA.channel_mask accessor. +

+

+Channel mask must be one of the following: +

+

+4: For single (1) channel output. +

+

+3: For stereo (2) channels output. +

+

+Only 1 or 2 output channels are supported.

+
++ + + + +

obj :

MixAudioConfigParamsWMA object +
+
+
+
+

MIX_ACP_WMA_FORMAT_TAG()

+
#define MIX_ACP_WMA_FORMAT_TAG(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->format_tag)
+
+

+MixAudioConfigParamWMA.format_tag accessor. +

+

+In Moorestown, only value 0x0161 combined with use of MIX_AUDIO_WMA_V9 is supported.

+
++ + + + +

obj :

MixAudioConfigParamsWMA object +
+
+
+
+

MIX_ACP_WMA_BLOCK_ALIGN()

+
#define MIX_ACP_WMA_BLOCK_ALIGN(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->block_align)
+
+

+MixAudioConfigParamWMA.block_align accessor. +

+

+Block alignment indicates packet size. Available from ASF Header.

+
++ + + + +

obj :

MixAudioConfigParamsWMA object +
+
+
+
+

MIX_ACP_WMA_ENCODE_OPT()

+
#define MIX_ACP_WMA_ENCODE_OPT(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->wma_encode_opt)
+
+

+MixAudioConfigParamWMA.wma_encode_opt accessor. +

+

+Encoder option available from ASF header.

+
++ + + + +

obj :

MixAudioConfigParamsWMA object +
+
+
+
+

MIX_ACP_WMA_PCM_BIT_WIDTH()

+
#define MIX_ACP_WMA_PCM_BIT_WIDTH(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->pcm_bit_width)
+
+

+MixAudioConfigParamWMA.pcm_bit_width accessor. +

+

+Source pcm bit width available from ASF Header.

+
++ + + + +

obj :

MixAudioConfigParamsWMA object +
+
+
+
+

mix_acp_wma_get_version ()

+
MixAudioWMAVersion  mix_acp_wma_get_version             (MixAudioConfigParamsWMA *obj);
+

+Get WMA Version.

+
++ + + + + + + + + + +

obj :

MixAudioConfigParamsWMA object +

returns :

MixAudioWMAVersion +
+
+
+
+

mix_acp_wma_set_version ()

+
MIX_RESULT          mix_acp_wma_set_version             (MixAudioConfigParamsWMA *obj,
+                                                         MixAudioWMAVersion ver);
+

+Set WMA Version. +

+

+In Moorestown, only MIX_AUDIO_WMA_V9 is supported

+
++ + + + + + + + + + + + + + +

obj :

MixAudioConfigParamsWMA object +

ver :

MixAudioWMAVersion to set. +

returns :

MIX_RESULT. +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html new file mode 100644 index 0000000..5aa7e45 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html @@ -0,0 +1,139 @@ + + + + +MixAudioInitParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixAudioInitParams

+

MixAudioInitParams — Initialization parameters object.

+
+
+

Synopsis

+
+
+#include <mixacp.h>
+
+                    MixAudioInitParams;
+MixAudioInitParams * mix_aip_new                        (void);
+MixAudioInitParams * mix_aip_ref                        (MixAudioInitParams *mix);
+#define             mix_aip_unref                       (obj)
+
+
+
+

Description

+

+A data object which stores initialization specific parameters. +

+

+Not Implemented in Moorestown.

+
+
+

Details

+
+

MixAudioInitParams

+
typedef struct {
+  MixParams parent;
+} MixAudioInitParams;
+
+

+MixAudio initialization parameter object.

+
++ + + + +

MixParams parent;

Parent. +
+
+
+
+

mix_aip_new ()

+
MixAudioInitParams * mix_aip_new                        (void);
+

+Use this method to create new instance of MixAudioInitParams

+
++ + + + +

returns :

A newly allocated instance of MixAudioInitParams +
+
+
+
+

mix_aip_ref ()

+
MixAudioInitParams * mix_aip_ref                        (MixAudioInitParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixAudioInitParams instance where reference count has been increased. +
+
+
+
+

mix_aip_unref()

+
#define mix_aip_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html new file mode 100644 index 0000000..7166107 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html @@ -0,0 +1,94 @@ + + + + +Mix Audio Types + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

Mix Audio Types

+

Mix Audio Types — Miscellanous types used by MixAudio API.

+
+
+

Synopsis

+
+
+#include <mixaudiotypes.h>
+
+enum                MixAudioManager;
+
+
+
+

Description

+

+Miscellanous types used by MixAudio API.

+
+
+

Details

+
+

enum MixAudioManager

+
typedef enum {
+  MIX_AUDIOMANAGER_NONE = 0,
+  MIX_AUDIOMANAGER_INTELAUDIOMANAGER,
+  MIX_AUDIOMANAGER_LAST
+} MixAudioManager;
+
+

+Audio Manager enumerations.

+
++ + + + + + + + + + + + + + +

MIX_AUDIOMANAGER_NONE

No Audio Manager. +

MIX_AUDIOMANAGER_INTELAUDIOMANAGER

Intel Audio Manager. +

MIX_AUDIOMANAGER_LAST

Last index. +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp new file mode 100644 index 0000000..9063304 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp @@ -0,0 +1,124 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 new file mode 100644 index 0000000..f9e0358 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.html b/mix_audio/docs/reference/MixAudio/html/MixAudio.html new file mode 100644 index 0000000..2f53577 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/MixAudio.html @@ -0,0 +1,1286 @@ + + + + +MixAudio + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixAudio

+

MixAudio — Object to support a single stream playback using hardware accelerated decoder.

+
+
+

Synopsis

+
+
+#include <mixaudio.h>
+
+enum                MixStreamState;
+enum                MixState;
+enum                MixCodecMode;
+enum                MixVolType;
+enum                MixVolRamp;
+                    MixIOVec;
+enum                MixDeviceState;
+                    MixAudio;
+MixAudio *          mix_audio_new                       (void);
+MixAudio *          mix_audio_ref                       (MixAudio *mix);
+#define             mix_audio_unref                     (obj)
+MIX_RESULT          mix_audio_get_version               (guint *major,
+                                                         guint *minor);
+MIX_RESULT          mix_audio_initialize                (MixAudio *mix,
+                                                         MixCodecMode mode,
+                                                         MixAudioInitParams *aip,
+                                                         MixDrmParams *drminitparams);
+MIX_RESULT          mix_audio_configure                 (MixAudio *mix,
+                                                         MixAudioConfigParams *audioconfigparams,
+                                                         MixDrmParams *drmparams);
+MIX_RESULT          mix_audio_decode                    (MixAudio *mix,
+                                                         const MixIOVec *iovin,
+                                                         gint iovincnt,
+                                                         guint64 *insize,
+                                                         MixIOVec *iovout,
+                                                         gint iovoutcnt,
+                                                         guint64 *outsize);
+MIX_RESULT          mix_audio_capture_encode            (MixAudio *mix,
+                                                         MixIOVec *iovout,
+                                                         gint iovoutcnt);
+MIX_RESULT          mix_audio_start                     (MixAudio *mix);
+MIX_RESULT          mix_audio_stop_drop                 (MixAudio *mix);
+MIX_RESULT          mix_audio_stop_drain                (MixAudio *mix);
+MIX_RESULT          mix_audio_pause                     (MixAudio *mix);
+MIX_RESULT          mix_audio_resume                    (MixAudio *mix);
+MIX_RESULT          mix_audio_get_timestamp             (MixAudio *mix,
+                                                         guint64 *msecs);
+MIX_RESULT          mix_audio_set_mute                  (MixAudio *mix,
+                                                         gboolean mute);
+MIX_RESULT          mix_audio_get_mute                  (MixAudio *mix,
+                                                         gboolean *muted);
+MIX_RESULT          mix_audio_get_max_vol               (MixAudio *mix,
+                                                         gint *maxvol);
+MIX_RESULT          mix_audio_get_min_vol               (MixAudio *mix,
+                                                         gint *minvol);
+MIX_RESULT          mix_audio_get_volume                (MixAudio *mix,
+                                                         gint *currvol,
+                                                         MixVolType type);
+MIX_RESULT          mix_audio_set_volume                (MixAudio *mix,
+                                                         gint currvol,
+                                                         MixVolType type,
+                                                         gulong msecs,
+                                                         MixVolRamp ramptype);
+MIX_RESULT          mix_audio_deinitialize              (MixAudio *mix);
+MIX_RESULT          mix_audio_get_stream_state          (MixAudio *mix,
+                                                         MixStreamState *streamState);
+MIX_RESULT          mix_audio_get_state                 (MixAudio *mix,
+                                                         MixState *state);
+gboolean            mix_audio_am_is_enabled             (MixAudio *mix);
+MIX_RESULT          mix_audio_is_am_available           (MixAudio *mix,
+                                                         MixAudioManager am,
+                                                         gboolean *avail);
+MIX_RESULT          mix_audio_get_output_configuration  (MixAudio *mix,
+                                                         MixAudioConfigParams **audioconfigparams);
+MIX_RESULT          mix_audio_get_stream_byte_decoded   (MixAudio *mix,
+                                                         guint64 *byte);
+
+
+
+

Object Hierarchy

+
+  GObject
+   +----MixAudio
+
+
+
+

Description

+

+MixAudio object provide thread-safe API for application and/or multimedia framework to take advantage of Intel Smart Sound Technology(TM) driver for hardware audio decode and render. +

+

+Each MixAudio object represents one streaming session with the Intel Smart Sound driver and provides configuration and control of the decoding and playback options. +

+

+The MixAudio object also support integration with Intel Audio Manager service. +

+

+An application can utilize the MixAudio object by calling the following sequence: +

+
    +
  1. +mix_audio_new() to create a MixAudio instance.
  2. +
  3. +mix_audio_initialize() to allocate Intel Smart Sound Technology resource.
  4. +
  5. +mix_audio_configure() to configure stream parameters.
  6. +
  7. +mix_audio_decode() can be called repeatedly for decoding and, optionally, rendering.
  8. +
  9. +mix_audio_start() is called after the 1st mix_audio_decode() method to start rendering.
  10. +
  11. +mix_audio_stop_drain() is called after the last buffer is passed for decoding in with mix_audio_decode().
  12. +
  13. +mix_audio_deinitialize() to free resource once playback is completed.
  14. +
+

+

+

+Since mix_audio_decode() is a blocking call during playback, the following methods are called in a seperate thread to control progress: +

+ +
+
+

Details

+
+

enum MixStreamState

+
typedef enum {
+  MIX_STREAM_NULL=0,
+  MIX_STREAM_STOPPED,
+  MIX_STREAM_PLAYING,
+  MIX_STREAM_PAUSED,
+  MIX_STREAM_DRAINING,
+  MIX_STREAM_LAST
+} MixStreamState;
+
+

+Stream State during Decode and Render or Encode mode. These states do not apply to Decode and Return mode.

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + +

MIX_STREAM_NULL

Stream is not allocated. +

MIX_STREAM_STOPPED

Stream is at STOP state. This is the only state DNR is allowed. +

MIX_STREAM_PLAYING

Stream is at Playing state. +

MIX_STREAM_PAUSED

Stream is Paused. +

MIX_STREAM_DRAINING

Stream is draining -- remaining of the buffer in the device are playing. This state is special due to the limitation that no other control operations are allowed at this state. Stream will become MIX_STREAM_STOPPED automatically when this data draining has completed. +

MIX_STREAM_LAST

Last index in the enumeration. +
+
+
+
+

enum MixState

+
typedef enum {
+  MIX_STATE_NULL=0,
+  MIX_STATE_UNINITIALIZED,
+  MIX_STATE_INITIALIZED,
+  MIX_STATE_CONFIGURED,
+  MIX_STATE_LAST
+} MixState;
+
+

+The varies states the device is in.

+
++ + + + + + + + + + + + + + + + + + + + + + +

MIX_STATE_NULL

MIX_STATE_UNINITIALIZED

MIX is not initialized. +

MIX_STATE_INITIALIZED

MIX is initialized. +

MIX_STATE_CONFIGURED

MIX is configured successfully. +

MIX_STATE_LAST

Last index in the enumeration. +
+
+
+
+

enum MixCodecMode

+
typedef enum {
+  MIX_CODING_INVALID=0,
+  MIX_CODING_ENCODE,
+  MIX_CODING_DECODE,
+  MIX_CODING_LAST
+} MixCodecMode;
+
+

+Mode where device is operating on. See mix_audio_initialize().

+
++ + + + + + + + + + + + + + + + + + +

MIX_CODING_INVALID

Indicates device uninitialied for any mode. +

MIX_CODING_ENCODE

Indicates device is opened for encoding. +

MIX_CODING_DECODE

Indicates device is opened for decoding. +

MIX_CODING_LAST

Last index in the enumeration. +
+
+
+
+

enum MixVolType

+
typedef enum {
+  MIX_VOL_PERCENT=0,
+  MIX_VOL_DECIBELS,
+  MIX_VOL_LAST
+} MixVolType;
+
+

+See mix_audio_getvolume() and mix_audio_setvolume().

+
++ + + + + + + + + + + + + + +

MIX_VOL_PERCENT

volume is expressed in percentage. +

MIX_VOL_DECIBELS

volume is expressed in decibel. +

MIX_VOL_LAST

last entry. +
+
+
+
+

enum MixVolRamp

+
typedef enum 
+{
+  MIX_RAMP_LINEAR = 0,
+  MIX_RAMP_EXPONENTIAL,
+  MIX_RAMP_LAST
+} MixVolRamp;
+
+

+See mix_audio_getvolume() and mix_audio_setvolume().

+
++ + + + + + + + + + + + + + +

MIX_RAMP_LINEAR

volume is expressed in percentage. +

MIX_RAMP_EXPONENTIAL

volume is expressed in decibel. +

MIX_RAMP_LAST

last entry. +
+
+
+
+

MixIOVec

+
typedef struct {
+  guchar *data;
+  gint size;
+} MixIOVec;
+
+

+Scatter-gather style structure. To be used by mix_audio_decode() method for input and output buffer.

+
++ + + + + + + + + + +

guchar *data;

data pointer +

gint size;

size of buffer in data +
+
+
+
+

enum MixDeviceState

+
typedef enum {
+  MIX_AUDIO_DEV_CLOSED=0,
+  MIX_AUDIO_DEV_OPENED,
+  MIX_AUDIO_DEV_ALLOCATED
+} MixDeviceState;
+
+

+Device state.

+
++ + + + + + + + + + + + + + +

MIX_AUDIO_DEV_CLOSED

TBD +

MIX_AUDIO_DEV_OPENED

TBD +

MIX_AUDIO_DEV_ALLOCATED

TBD +
+
+
+
+

MixAudio

+
typedef struct {
+  GObject parent;
+} MixAudio;
+
+

+MI-X Audio object

+
++ + + + +

GObject parent;

Parent object. +
+
+
+
+

mix_audio_new ()

+
MixAudio *          mix_audio_new                       (void);
+

+Use this method to create new instance of MixAudio

+
++ + + + +

returns :

A newly allocated instance of MixAudio +
+
+
+
+

mix_audio_ref ()

+
MixAudio *          mix_audio_ref                       (MixAudio *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixAudio instance where reference count has been increased. +
+
+
+
+

mix_audio_unref()

+
#define mix_audio_unref(obj) g_object_unref (G_OBJECT(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_audio_get_version ()

+
MIX_RESULT          mix_audio_get_version               (guint *major,
+                                                         guint *minor);
+

+Returns the version of the MI-X library.

+
++ + + + +

returns :

MIX_RESULT_SUCCESS +
+
+
+
+

mix_audio_initialize ()

+
MIX_RESULT          mix_audio_initialize                (MixAudio *mix,
+                                                         MixCodecMode mode,
+                                                         MixAudioInitParams *aip,
+                                                         MixDrmParams *drminitparams);
+

+This function will initialize an encode or decode session with this MixAudio instance. During this call, the device will be opened. If the device is not available, an error is returned to the caller so that an alternative (e.g. software decoding) can be configured instead. Use mix_audio_deinitialize() to close the device. +

+

+A previous initialized session must be de-initialized using mix_audio_deinitialize() before it can be initialized again.

+
++ + + + + + + + + + + + + + + + + + + + + + +

mix :

MixAudio object. +

mode :

Requested MixCodecMode. +

aip :

Audio initialization parameters. +

drminitparams :

Optional. DRM initialization param if applicable. +

returns :

MIX_RESULT_SUCCESS on successful initilaization. MIX_RESULT_ALREADY_INIT if session is already initialized. +
+
+
+
+

mix_audio_configure ()

+
MIX_RESULT          mix_audio_configure                 (MixAudio *mix,
+                                                         MixAudioConfigParams *audioconfigparams,
+                                                         MixDrmParams *drmparams);
+

+This function can be used to configure a stream for the current session. The caller can use this function to do the following: +

+

+

+
    +
  • Choose decoding mode (direct-render or decode-return)
  • +
  • Provide DRM parameters (using DRMparams object)
  • +
  • Provide stream parameters (using STRMparams objects)
  • +
  • Provide a stream name for the Intel Smart Sound Technology stream
  • +
+

+

+

+SST stream parameters will be set during this call, and stream resources allocated in SST. +

+

+

+
+

Intel Audio Manager support:

+

If Intel Audio Manager support is enabled, and if mode is specified to MIX_DECODE_DIRECTRENDER, the SST stream will be registered with Intel Audio Manager in the context of this call, using the stream name provided in streamname. Application will receive a notification from Intel Audio Manager that the stream has been created during or soon after this call. The application should be ready to handle either possibility. A stream ID (associated with the stream name) will be provided by Intel Audio Manager which will be used for subsequent notifications from Intel Audio Manager or calls to Intel Audio Manager for muting, pause and resume. See mix_audio_getstreamid()

+

If a stream is already registered with Intel Audio Manager, application must pass the same streamname argument to retain the session. Otherwise, the existing stream will be unregistered and a new stream will be registered with the new streamname. +

+
+

+

+

+If mode is specified to MIX_DECODE_DIRECTRENDER but direct-render mode is not available (due to end user use of alternative output device), an error indication will be returned to the caller so that an alternate pipeline configuration can be created (e.g. including a Pulse Audio sink, and support for output buffers). In this case, the caller will need to call mix_audio_configure() again to with mode specify as MIX_DECODE_DECODERETURN to request decode-return mode. +

+

+This method can be called multiple times if reconfiguration of the stream is needed. However, this method must be called when the stream is in MIX_STREAM_STOPPED state.

+
++ + + + + + + + + + + + + + + + + + +

mix :

MixAudio object. +

audioconfigparams :

a MixAudioConfigParams derived object containing information for the specific stream type. +

drmparams :

Optional. DRM initialization param if applicable. +

returns :

Result indicates successful or not. +
+
+
+
+

mix_audio_decode ()

+
MIX_RESULT          mix_audio_decode                    (MixAudio *mix,
+                                                         const MixIOVec *iovin,
+                                                         gint iovincnt,
+                                                         guint64 *insize,
+                                                         MixIOVec *iovout,
+                                                         gint iovoutcnt,
+                                                         guint64 *outsize);
+

+This function is used to initiate HW accelerated decoding of encoded data buffers. This function may be used in two major modes, direct-render or decode-return. +

+

+With direct-render, input buffers are provided by the caller which hold encoded audio data, and no output buffers are provided. The encoded data is decoded, and the decoded data is sent directly to the output speaker. This allows very low power audio rendering and is the best choice of operation for longer battery life. +

+

+

+
+

Intel Audio Manager Support

+However, if the user has connected a different target output device, such as Bluetooth headphones, this mode cannot be used as the decoded audio must be directed to the Pulse Audio stack where the output to Bluetooth device can be supported, per Intel Audio Manager guidelines. This mode is called decode-return, and requires the caller to provide output buffers for the decoded data. +
+

+

+

+Input buffers in both modes are one or more user space buffers using a scatter/gather style vector interface. +

+

+Output buffers for the decode-return mode are one or more user space buffers in a scatter style vector interface. Buffers will be filled in order and lengths of data filled will be returned. +

+

+This call will block until data has been completely copied or queued to the driver. All user space buffers may be used or released when this call returns. +

+

+Note: If the stream is configured as MIX_DECODE_DIRECTRENDER, and whenever the stream in MIX_STREAM_STOPPED state, the call to mix_audio_decode() will not start the playback until mix_audio_start() is called. This behavior would allow application to queue up data but delay the playback until appropriate time.

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

mix :

MixAudio object. +

iovin :

a pointer to an array of MixIOVec structure that contains the input buffers +

iovincnt :

the number of entry in the iovin array +

iovout :

a pointer to an arrya of MixIOVec structure that represent the output buffer. During input, each size in the MixIOVec array represents the available buffer size pointed to by data. Upon return, each size value will be updated to reflect how much data has been filled. This parameter is ignored if stream is configured to MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. +

iovoutcnt :

in/out parameter which when input, it contains the number of entry available in the iovout array. Upon return, this value will be updated to reflect how many entry in the iovout array has been populated with data. This parameter is ignored if stream is configured to MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. +

outsize :

Total number of bytes returned for the decode session. This parameter is ignored if stream is configured to MIX_DECODE_DIRECTRENDER. +

returns :

MIX_RESULT +
+
+
+
+

mix_audio_capture_encode ()

+
MIX_RESULT          mix_audio_capture_encode            (MixAudio *mix,
+                                                         MixIOVec *iovout,
+                                                         gint iovoutcnt);
+

+To read encoded data from device. +

+

+ +NOTE: May need to rename to "read_encoded" or other name. Since "encode" seems to mean taking raw audio and convert to compressed audio. +

+
++ + + + + + + + + + + + + + + + + + +

mix :

MixAudio object. +

iovout :

Capture audio samples. +

iovoutcnt :

Number of entry in the input vector iovout. +

returns :

MIX_RESULT +
+
+
+
+

mix_audio_start ()

+
MIX_RESULT          mix_audio_start                     (MixAudio *mix);
+

+If the stream is configured to MIX_DECODE_DIRECTRENDER, application use this call to change the stream out of the MIX_STREAM_STOPPED state. If mix_audio_decode() is called and blocking in a seperate thread prior to this call. This method causes the device to start rendering data. +

+

+In MIX_DECODE_DECODERETURN, this method is no op.

+
++ + + + + + + + + + +

mix :

MixAudio object. +

returns :

MIX_RESULT_SUCCESS if the resulting state is either MIX_STREAM_PLAYING or MIX_STREAM_PAUSED. Fail code otherwise. +
+
+
+
+

mix_audio_stop_drop ()

+
MIX_RESULT          mix_audio_stop_drop                 (MixAudio *mix);
+

+If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. +

+

+All remaining frames to be decoded or rendered will be discarded and playback will stop immediately, unblocks any pending mix_audio_decode(). +

+

+If MIX_STOP_DRAIN is requested, the call will block with stream state set to MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. When MIX_STOP_DRAIN returns successfully, the stream would have reached MIX_STREAM_STOPPED successfully. +

+

+After this call, timestamp retrived by mix_audio_gettimestamp() is reset to zero. +

+

+Note that this method returns MIX_RESULT_WRONG_STATE if the stream is in MIX_STREAM_DRAINING state.

+
++ + + + + + + + + + +

mix :

MixAudio object. +

returns :

MIX_RESULT_SUCCESS if the resulting state has successfully reached MIX_STREAM_STOPPED. Fail code otherwise. +
+
+
+
+

mix_audio_stop_drain ()

+
MIX_RESULT          mix_audio_stop_drain                (MixAudio *mix);
+

+If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. +

+

+The call will block with stream state set to MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. +

+

+Note that this method blocks until MIX_STREAM_STOPPED is reached if it is called when the stream is already in MIX_STREAM_DRAINING state.

+
++ + + + + + + + + + +

mix :

MixAudio object. +

returns :

MIX_RESULT_SUCCESS if the resulting state has successfully reached MIX_STREAM_STOPPED. Fail code otherwise. +
+
+
+
+

mix_audio_pause ()

+
MIX_RESULT          mix_audio_pause                     (MixAudio *mix);
+

+If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state from MIX_STREAM_PLAYING to MIX_STREAM_PAUSED. Note that this method returns sucessful only when the resulting state reaches MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as MIX_STREAM_STOPPED, where transitioning to MIX_STREAM_PAUSED is not possible. +

+

+In some situation, where there is potential race condition with the DRAINING operation, this method may return MIX_RESULT_NEED_RETRY to indicate last operation result is inclusive and request caller to call again.

+
++ + + + + + + + + + +

mix :

MixAudio object. +

returns :

MIX_RESULT_SUCCESS if MIX_STREAM_PAUSED state is reached successfully. MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. +
+
+
+
+

mix_audio_resume ()

+
MIX_RESULT          mix_audio_resume                    (MixAudio *mix);
+

+If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state to MIX_STREAM_PLAYING. Note that this method returns sucessful only when the resulting state reaches MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as MIX_STREAM_DRAINING, where transitioning to MIX_STREAM_PLAYING is not possible.

+
++ + + + + + + + + + +

mix :

MixAudio object. +

returns :

MIX_RESULT_SUCCESS if MIX_STREAM_PLAYING state is reached successfully. MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. +
+
+
+
+

mix_audio_get_timestamp ()

+
MIX_RESULT          mix_audio_get_timestamp             (MixAudio *mix,
+                                                         guint64 *msecs);
+

+This function can be used to retrieve the current timestamp for audio playback in milliseconds. The timestamp will reflect the amount of audio data rendered since the start of stream, or since the last stop. Note that the timestamp is always reset to zero when the stream enter MIX_STREAM_STOPPED state. The timestamp is an unsigned long value, so the value will wrap when the timestamp reaches ULONG_MAX. This function is only valid in direct-render mode.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

msecs :

play time in milliseconds. +

returns :

MIX_RESULT_SUCCESS if the timestamp is available. MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. +
+
+
+
+

mix_audio_set_mute ()

+
MIX_RESULT          mix_audio_set_mute                  (MixAudio *mix,
+                                                         gboolean mute);
+

+This function is used to mute and unmute audio playback. While muted, playback would continue but silently. This function is only valid when the session is configured to MIX_DECODE_DIRECTRENDER mode. +

+

+Note that playback volumn may change due to change of global settings while stream is muted.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

mute :

Turn mute on/off. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_get_mute ()

+
MIX_RESULT          mix_audio_get_mute                  (MixAudio *mix,
+                                                         gboolean *muted);
+

+Get Mute.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

muted :

current mute state. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_get_max_vol ()

+
MIX_RESULT          mix_audio_get_max_vol               (MixAudio *mix,
+                                                         gint *maxvol);
+

+This function can be used if the application will be setting the audio volume using decibels instead of percentage. The maximum volume in decibels supported by the driver will be returned. This value can be used to determine the upper bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to MIX_DECODE_DIRECTRENDER mode.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

maxvol :

pointer to receive max volumn. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_get_min_vol ()

+
MIX_RESULT          mix_audio_get_min_vol               (MixAudio *mix,
+                                                         gint *minvol);
+

+This function can be used if the application will be setting the audio volume using decibels instead of percentage. The minimum volume in decibels supported by the driver will be returned. This value can be used to determine the lower bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to MIX_DECODE_DIRECTRENDER mode.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

minvol :

pointer to receive max volumn. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_get_volume ()

+
MIX_RESULT          mix_audio_get_volume                (MixAudio *mix,
+                                                         gint *currvol,
+                                                         MixVolType type);
+

+This function returns the current volume setting in either decibels or percentage. This function is only valid if stream is configured to MIX_DECODE_DIRECTRENDER mode.

+
++ + + + + + + + + + + + + + + + + + +

mix :

MixAudio object. +

currvol :

Current volume. Note that if type equals MIX_VOL_PERCENT, this value will be return within the range of 0 to 100 inclusive. +

type :

The type represented by currvol. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_set_volume ()

+
MIX_RESULT          mix_audio_set_volume                (MixAudio *mix,
+                                                         gint currvol,
+                                                         MixVolType type,
+                                                         gulong msecs,
+                                                         MixVolRamp ramptype);
+

+This function sets the current volume setting in either decibels or percentage. This function is only valid if the stream is configured to MIX_DECODE_DIRECTRENDER mode.

+
++ + + + + + + + + + + + + + + + + + +

mix :

MixAudio object. +

currvol :

Current volume. Note that if type equals MIX_VOL_PERCENT, this value will be trucated to within the range of 0 to 100 inclusive. +

type :

The type represented by currvol. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_deinitialize ()

+
MIX_RESULT          mix_audio_deinitialize              (MixAudio *mix);
+

+This function will uninitialize a session with this MI-X instance. During this call, the SST device will be closed and resources including mmapped buffers would be freed.This function should be called by the application once mix_audio_init() has been called. +

+

+

+
+

Intel Audio Manager Support

+The SST stream would be unregistered with Intel Audio Manager if it was registered. +
+

+

+

+Note that if this method should not fail normally. If it does return failure, the state of this object and the underlying mechanism is compromised and application should not attempt to reuse this object.

+
++ + + + + + + + + + +

mix :

MixAudio object. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_get_stream_state ()

+
MIX_RESULT          mix_audio_get_stream_state          (MixAudio *mix,
+                                                         MixStreamState *streamState);
+

+Get the stream state of the current stream.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

streamState :

pointer to receive stream state. +

returns :

MIX_RESULT +
+
+
+
+

mix_audio_get_state ()

+
MIX_RESULT          mix_audio_get_state                 (MixAudio *mix,
+                                                         MixState *state);
+

+Get the device state of the audio session.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

state :

pointer to receive state +

returns :

Current device state. +
+
+
+
+

mix_audio_am_is_enabled ()

+
gboolean            mix_audio_am_is_enabled             (MixAudio *mix);
+

+This method checks if the current session is configure to use Intel Audio Manager. Note that Intel Audio Manager is considered disabled if the stream has not be initialized to use the service explicitly.

+
++ + + + + + + + + + +

mix :

MixAudio object. +

returns :

boolean indicates if Intel Audio Manager is enabled with the current session. +
+
+
+
+

mix_audio_is_am_available ()

+
MIX_RESULT          mix_audio_is_am_available           (MixAudio *mix,
+                                                         MixAudioManager am,
+                                                         gboolean *avail);
+

+Check if AM is available.

+
++ + + + + + + + + + + + + + + + + + +

mix :

TBD +

am :

TBD +

avail :

TBD +

returns :

TBD +
+
+
+
+

mix_audio_get_output_configuration ()

+
MIX_RESULT          mix_audio_get_output_configuration  (MixAudio *mix,
+                                                         MixAudioConfigParams **audioconfigparams);
+

+This method retrieve the current configuration. This can be called after initialization. If a stream has been configured, it returns the corresponding derive object of MixAudioConfigParams.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

audioconfigparams :

double pointer to hold output configuration. +

returns :

MIX_RESULT_SUCCESS on success or other fail code. +
+
+
+
+

mix_audio_get_stream_byte_decoded ()

+
MIX_RESULT          mix_audio_get_stream_byte_decoded   (MixAudio *mix,
+                                                         guint64 *byte);
+

+Retrive the culmulative byte decoded. +

+

+Not Implemented.

+
++ + + + + + + + + + + + + + +

mix :

MixAudio object. +

msecs :

stream byte decoded.. +

returns :

MIX_RESULT_SUCCESS if the value is available. MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/api-index-full.html b/mix_audio/docs/reference/MixAudio/html/api-index-full.html new file mode 100644 index 0000000..99c830e --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/api-index-full.html @@ -0,0 +1,259 @@ + + + + +API Index + + + + + + + + + + + + + + + + + + +
+

+API Index

+
+++++ + + + + + +
GObjectMixAudio 
+
+

M

+
+
MixAACBitrateType, enum MixAACBitrateType +
+
MixAACBitstreamFormt, enum MixAACBitstreamFormt +
+
MixAACMpegID, enum MixAACMpegID +
+
MixAACProfile, enum MixAACProfile +
+
MixACPBPSType, enum MixACPBPSType +
+
MixACPOpAlign, enum MixACPOpAlign +
+
MixAudio, MixAudio +
+
MixAudioConfigParams, MixAudioConfigParams +
+
MixAudioConfigParamsAAC, MixAudioConfigParamsAAC +
+
MixAudioConfigParamsMP3, MixAudioConfigParamsMP3 +
+
MixAudioConfigParamsWMA, MixAudioConfigParamsWMA +
+
MixAudioInitParams, MixAudioInitParams +
+
MixAudioManager, enum MixAudioManager +
+
MixAudioWMAVersion, enum MixAudioWMAVersion +
+
MixCodecMode, enum MixCodecMode +
+
MixDecodeMode, enum MixDecodeMode +
+
MixDeviceState, enum MixDeviceState +
+
MixIOVec, MixIOVec +
+
MixState, enum MixState +
+
MixStreamState, enum MixStreamState +
+
MixVolRamp, enum MixVolRamp +
+
MixVolType, enum MixVolType +
+
MIX_ACP_AAC_CHANNELS, MIX_ACP_AAC_CHANNELS() +
+
MIX_ACP_AAC_CRC, MIX_ACP_AAC_CRC() +
+
mix_acp_aac_get_aac_profile, mix_acp_aac_get_aac_profile () +
+
mix_acp_aac_get_aot, mix_acp_aac_get_aot () +
+
mix_acp_aac_get_bit_rate_type, mix_acp_aac_get_bit_rate_type () +
+
mix_acp_aac_get_bit_stream_format, mix_acp_aac_get_bit_stream_format () +
+
mix_acp_aac_get_mpeg_id, mix_acp_aac_get_mpeg_id () +
+
mix_acp_aac_new, mix_acp_aac_new () +
+
MIX_ACP_AAC_PCE_FLAG, MIX_ACP_AAC_PCE_FLAG() +
+
MIX_ACP_AAC_PS_FLAG, MIX_ACP_AAC_PS_FLAG() +
+
mix_acp_aac_ref, mix_acp_aac_ref () +
+
MIX_ACP_AAC_SAMPLE_RATE, MIX_ACP_AAC_SAMPLE_RATE() +
+
MIX_ACP_AAC_SBR_FLAG, MIX_ACP_AAC_SBR_FLAG() +
+
mix_acp_aac_set_aac_profile, mix_acp_aac_set_aac_profile () +
+
mix_acp_aac_set_aot, mix_acp_aac_set_aot () +
+
mix_acp_aac_set_bit_rate_type, mix_acp_aac_set_bit_rate_type () +
+
mix_acp_aac_set_bit_stream_format, mix_acp_aac_set_bit_stream_format () +
+
mix_acp_aac_set_mpeg_id, mix_acp_aac_set_mpeg_id () +
+
mix_acp_aac_unref, mix_acp_aac_unref() +
+
MIX_ACP_BITRATE, MIX_ACP_BITRATE() +
+
MIX_ACP_DECODEMODE, MIX_ACP_DECODEMODE() +
+
mix_acp_get_audio_manager, mix_acp_get_audio_manager () +
+
mix_acp_get_bps, mix_acp_get_bps () +
+
mix_acp_get_decodemode, mix_acp_get_decodemode () +
+
mix_acp_get_op_align, mix_acp_get_op_align () +
+
mix_acp_get_streamname, mix_acp_get_streamname () +
+
mix_acp_is_streamname_valid, mix_acp_is_streamname_valid () +
+
MIX_ACP_MP3_CRC, MIX_ACP_MP3_CRC() +
+
MIX_ACP_MP3_MPEG_FORMAT, MIX_ACP_MP3_MPEG_FORMAT() +
+
MIX_ACP_MP3_MPEG_LAYER, MIX_ACP_MP3_MPEG_LAYER() +
+
mix_acp_mp3_new, mix_acp_mp3_new () +
+
mix_acp_mp3_ref, mix_acp_mp3_ref () +
+
mix_acp_mp3_unref, mix_acp_mp3_unref() +
+
mix_acp_new, mix_acp_new () +
+
MIX_ACP_NUM_CHANNELS, MIX_ACP_NUM_CHANNELS() +
+
mix_acp_ref, mix_acp_ref () +
+
MIX_ACP_SAMPLE_FREQ, MIX_ACP_SAMPLE_FREQ() +
+
mix_acp_set_audio_manager, mix_acp_set_audio_manager () +
+
mix_acp_set_bps, mix_acp_set_bps () +
+
mix_acp_set_decodemode, mix_acp_set_decodemode () +
+
mix_acp_set_op_align, mix_acp_set_op_align () +
+
mix_acp_set_streamname, mix_acp_set_streamname () +
+
mix_acp_unref, mix_acp_unref() +
+
MIX_ACP_WMA_BLOCK_ALIGN, MIX_ACP_WMA_BLOCK_ALIGN() +
+
MIX_ACP_WMA_CHANNEL_MASK, MIX_ACP_WMA_CHANNEL_MASK() +
+
MIX_ACP_WMA_ENCODE_OPT, MIX_ACP_WMA_ENCODE_OPT() +
+
MIX_ACP_WMA_FORMAT_TAG, MIX_ACP_WMA_FORMAT_TAG() +
+
mix_acp_wma_get_version, mix_acp_wma_get_version () +
+
mix_acp_wma_new, mix_acp_wma_new () +
+
MIX_ACP_WMA_PCM_BIT_WIDTH, MIX_ACP_WMA_PCM_BIT_WIDTH() +
+
mix_acp_wma_ref, mix_acp_wma_ref () +
+
mix_acp_wma_set_version, mix_acp_wma_set_version () +
+
mix_acp_wma_unref, mix_acp_wma_unref() +
+
mix_aip_new, mix_aip_new () +
+
mix_aip_ref, mix_aip_ref () +
+
mix_aip_unref, mix_aip_unref() +
+
mix_audio_am_is_enabled, mix_audio_am_is_enabled () +
+
mix_audio_capture_encode, mix_audio_capture_encode () +
+
mix_audio_configure, mix_audio_configure () +
+
mix_audio_decode, mix_audio_decode () +
+
mix_audio_deinitialize, mix_audio_deinitialize () +
+
mix_audio_get_max_vol, mix_audio_get_max_vol () +
+
mix_audio_get_min_vol, mix_audio_get_min_vol () +
+
mix_audio_get_mute, mix_audio_get_mute () +
+
mix_audio_get_output_configuration, mix_audio_get_output_configuration () +
+
mix_audio_get_state, mix_audio_get_state () +
+
mix_audio_get_stream_byte_decoded, mix_audio_get_stream_byte_decoded () +
+
mix_audio_get_stream_state, mix_audio_get_stream_state () +
+
mix_audio_get_timestamp, mix_audio_get_timestamp () +
+
mix_audio_get_version, mix_audio_get_version () +
+
mix_audio_get_volume, mix_audio_get_volume () +
+
mix_audio_initialize, mix_audio_initialize () +
+
mix_audio_is_am_available, mix_audio_is_am_available () +
+
mix_audio_new, mix_audio_new () +
+
mix_audio_pause, mix_audio_pause () +
+
mix_audio_ref, mix_audio_ref () +
+
mix_audio_resume, mix_audio_resume () +
+
mix_audio_set_mute, mix_audio_set_mute () +
+
mix_audio_set_volume, mix_audio_set_volume () +
+
mix_audio_start, mix_audio_start () +
+
mix_audio_stop_drain, mix_audio_stop_drain () +
+
mix_audio_stop_drop, mix_audio_stop_drop () +
+
mix_audio_unref, mix_audio_unref() +
+
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/ch01.html b/mix_audio/docs/reference/MixAudio/html/ch01.html new file mode 100644 index 0000000..2ab25e8 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/ch01.html @@ -0,0 +1,56 @@ + + + + +Mix Audio API + + + + + + + + + + + + + + + + + + + +
+

+Mix Audio API

+
+
+MixAudioConfigParamsAAC — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format. +
+
+MixAudioConfigParamsMP3 — Audio configuration parameters for MP3 audio. +
+
+MixAudioConfigParamsWMA — Audio parameters for WMA audio. +
+
+MixAudioConfigParams — MixAudio configuration parameters object. +
+
+MixAudioInitParams — Initialization parameters object. +
+
+MixAudio — Object to support a single stream playback using hardware accelerated decoder. +
+
+Mix Audio Types — Miscellanous types used by MixAudio API. +
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/index.html b/mix_audio/docs/reference/MixAudio/html/index.html new file mode 100644 index 0000000..ab60f03 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/index.html @@ -0,0 +1,60 @@ + + + + +MixAudio Reference Manual + + + + + + + + + + +
+
+
+
+

+ MixAudio version 0.3 + +

+
+
+
+
+
Mix Audio API
+
+
+MixAudioConfigParamsAAC — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format. +
+
+MixAudioConfigParamsMP3 — Audio configuration parameters for MP3 audio. +
+
+MixAudioConfigParamsWMA — Audio parameters for WMA audio. +
+
+MixAudioConfigParams — MixAudio configuration parameters object. +
+
+MixAudioInitParams — Initialization parameters object. +
+
+MixAudio — Object to support a single stream playback using hardware accelerated decoder. +
+
+Mix Audio Types — Miscellanous types used by MixAudio API. +
+
+
Object Hierarchy
+
API Index
+
+
+ + + diff --git a/mix_audio/docs/reference/MixAudio/html/index.sgml b/mix_audio/docs/reference/MixAudio/html/index.sgml new file mode 100644 index 0000000..0cc1a2a --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/index.sgml @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mix_audio/docs/reference/MixAudio/html/style.css b/mix_audio/docs/reference/MixAudio/html/style.css new file mode 100644 index 0000000..bb44c28 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/style.css @@ -0,0 +1,167 @@ +.synopsis, .classsynopsis +{ + background: #eeeeee; + border: solid 1px #aaaaaa; + padding: 0.5em; +} +.programlisting +{ + background: #eeeeff; + border: solid 1px #aaaaff; + padding: 0.5em; +} +.variablelist +{ + padding: 4px; + margin-left: 3em; +} +.variablelist td:first-child +{ + vertical-align: top; +} + +/* this is needed so that the local anchors are displayed below the naviagtion */ +@media screen { + sup a.footnote + { + position: relative; + top: 0em ! important; + } + div.refnamediv a[name], div.refsect1 a[name] + { + position: relative; + top: -4.5em; + } + table.navigation#top + { + background: #ffeeee; + border: solid 1px #ffaaaa; + margin-top: 0; + margin-bottom: 0; + position: fixed; + top: 0; + left: 0; + height: 2em; + z-index: 1; + } + .navigation a + { + color: #770000; + } + .navigation a:visited + { + color: #550000; + } + td.shortcuts + { + color: #770000; + font-size: 80%; + white-space: nowrap; + } + div.refentry, div.chapter, div.reference, div.part, div.book, div.glossary, div.sect1, div.appendix, div.preface + { + position: relative; + top: 3em; + z-index: 0; + } + div.glossary, div.index + { + position: relative; + top: 2em; + z-index: 0; + } + div.refnamediv + { + margin-top: 2em; + } + body + { + padding-bottom: 20em; + } +} +@media print { + table.navigation { + visibility: collapse; + display: none; + } + div.titlepage table.navigation { + visibility: visible; + display: table; + background: #ffeeee; + border: solid 1px #ffaaaa; + margin-top: 0; + margin-bottom: 0; + top: 0; + left: 0; + height: 2em; + } +} + +.navigation .title +{ + font-size: 200%; +} + + +div.gallery-float +{ + float: left; + padding: 10px; +} +div.gallery-float img +{ + border-style: none; +} +div.gallery-spacer +{ + clear: both; +} +a +{ + text-decoration: none; +} +a:hover +{ + text-decoration: underline; + color: #FF0000; +} + +div.table table +{ + border-collapse: collapse; + border-spacing: 0px; + border-style: solid; + border-color: #777777; + border-width: 1px; +} + +div.table table td, div.table table th +{ + border-style: solid; + border-color: #777777; + border-width: 1px; + padding: 3px; + vertical-align: top; +} + +div.table table th +{ + background-color: #eeeeee; +} + +hr +{ + color: #777777; + background: #777777; + border: 0; + height: 1px; + clear: both; +} + +.footer +{ + padding-top: 3.5em; + color: #777777; + text-align: center; + font-size: 80%; +} diff --git a/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html b/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html new file mode 100644 index 0000000..e6f8029 --- /dev/null +++ b/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html @@ -0,0 +1,37 @@ + + + + +Object Hierarchy + + + + + + + + + + + + + + + + + + + +
+

+Object Hierarchy

+
+    GObject
+        MixAudio
+
+
+ + + diff --git a/mix_audio/m4/Makefile.am b/mix_audio/m4/Makefile.am new file mode 100644 index 0000000..66381d4 --- /dev/null +++ b/mix_audio/m4/Makefile.am @@ -0,0 +1 @@ +EXTRA_DIST += diff --git a/mix_audio/m4/as-mix-version.m4 b/mix_audio/m4/as-mix-version.m4 new file mode 100644 index 0000000..8b09d7c --- /dev/null +++ b/mix_audio/m4/as-mix-version.m4 @@ -0,0 +1,35 @@ +dnl as-mix-version.m4 + +dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) + +dnl example +dnl AS_MIX_VERSION(mixaudio,MIXAUDIO, 0, 3, 2,) +dnl for a 0.3.2 release version + +dnl this macro +dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE +dnl - defines [$PREFIX], VERSION +dnl - AC_SUBST's all defined vars + +AC_DEFUN([AS_MIX_VERSION], +[ + PACKAGE=[$1] + [$2]_MAJOR=[$3] + [$2]_MINOR=[$4] + [$2]_REVISION=[$5] + [$2]_CURRENT=m4_eval([$3] + [$4]) + [$2]_AGE=[$4] + VERSION=[$3].[$4].[$5] + + AC_SUBST([$2]_MAJOR) + AC_SUBST([$2]_MINOR) + AC_SUBST([$2]_REVISION) + AC_SUBST([$2]_CURRENT) + AC_SUBST([$2]_AGE) + + AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name]) + AC_SUBST(PACKAGE) + AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version]) + AC_SUBST(VERSION) + +]) diff --git a/mix_audio/mixaudio.spec b/mix_audio/mixaudio.spec new file mode 100644 index 0000000..e618d51 --- /dev/null +++ b/mix_audio/mixaudio.spec @@ -0,0 +1,56 @@ +# INTEL CONFIDENTIAL +# Copyright 2009 Intel Corporation All Rights Reserved. +# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +# +# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + +Summary: MIX Audio +Name: mixaudio +Version: 0.3.5 +Release: 1 +Source0: %{name}-%{version}.tar.gz +NoSource: 0 +License: Intel Proprietary +Group: System Environment/Libraries +BuildRoot: %{_tmppath}/%{name}-root +ExclusiveArch: i586 i386 +BuildRequires: glib2-devel mixcommon-devel dbus-glib-devel + +%description +MIX Audio is an user library interface for various hardware audio codecs +available on the platform. + +%package devel +Summary: Libraries include files +Group: Development/Libraries +Requires: %{name} = %{version} + +%description devel +The %{name}-devel package contains the header files and static libraries +for building applications which use %{name}. + +%prep +%setup -q + +%build +%autogen +%configure --prefix=%{_prefix} +make + +%install +%make_install + +%clean +rm -rf $RPM_BUILD_ROOT + +%files +%defattr(-,root,root) +%{_libdir}/libmixaudio.so.* + +%files devel +%defattr(-,root,root) +%{_libdir}/libmixaudio.so +%{_libdir}/libmixaudio.la +%{_libdir}/pkgconfig/mixaudio.pc +%{_includedir}/*.h +%doc COPYING diff --git a/mix_audio/pkgconfig/Makefile.am b/mix_audio/pkgconfig/Makefile.am new file mode 100644 index 0000000..ceea4fa --- /dev/null +++ b/mix_audio/pkgconfig/Makefile.am @@ -0,0 +1,11 @@ +### all of the standard pc files we need to generate +pcfiles = mixaudio.pc + +all-local: $(pcfiles) + +pkgconfigdir = $(libdir)/pkgconfig +pkgconfig_DATA = $(pcfiles) + +EXTRA_DIST = mixaudio.pc.in + +CLEANFILES = $(pcfiles) diff --git a/mix_audio/pkgconfig/mixaudio.pc.in b/mix_audio/pkgconfig/mixaudio.pc.in new file mode 100644 index 0000000..b521b5b --- /dev/null +++ b/mix_audio/pkgconfig/mixaudio.pc.in @@ -0,0 +1,12 @@ +prefix=@prefix@ +exec_prefix=@exec_prefix@ +libdir=@libdir@ +includedir=@includedir@ +toolsdir=${exec_prefix}/bin + +Name: MixAudio +Description: Intel MIX Audio +Requires: @MIXAUDIO_PKG_DEPS@ +Version: @VERSION@ +Libs: -L${libdir} -lmixaudio +Cflags: -I${includedir} diff --git a/mix_audio/src/Makefile.am b/mix_audio/src/Makefile.am new file mode 100644 index 0000000..b03751b --- /dev/null +++ b/mix_audio/src/Makefile.am @@ -0,0 +1,61 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + +lib_LTLIBRARIES = libmixaudio.la +#noinst_LTLIBRARIES = libmixaudio_stub.la + +############################################################################## +# sources used to compile +libmixaudio_la_SOURCES = mixaudio.c \ + sst_proxy.c \ + mixaip.c \ + mixacp.c \ + mixacpmp3.c \ + mixacpwma.c \ + mixacpaac.c + +# flags used to compile this plugin +# add other _CFLAGS and _LIBS as needed +libmixaudio_la_CFLAGS = $(DBUS_GLIB_CFLAGS) $(GLIB_CFLAGS) $(MIX_CFLAGS) $(GOBJECT_CFLAGS) $(GTHREAD_CFLAGS) -DMIXAUDIO_CURRENT=@MIXAUDIO_CURRENT@ -DMIXAUDIO_AGE=@MIXAUDIO_AGE@ -DMIXAUDIO_REVISION=@MIXAUDIO_REVISION@ $(MIXCOMMON_CFLAGS) -DMIX_LOG_ENABLE +libmixaudio_la_LIBADD = $(DBUS_GLIB_LIBS) $(GLIB_LIBS) $(GOBJECT_LIBS) $(GTHREAD_LIBS) $(MIXCOMMON_LIBS) +libmixaudio_la_LDFLAGS = $(DBUS_GLIB_LIBS)$(GLIB_LIBS) $(GOBJECT_LIBS) $(GTHREAD_LIBS) -version-info @MIXAUDIO_CURRENT@:@MIXAUDIO_REVISION@:@MIXAUDIO_AGE@ $(MIXCOMMON_LIBS) + +libmixaudio_la_LIBTOOLFLAGS = --tag=disable-static + +# additional flags to enable backdoor or workaround +if LPESTUB +libmixaudio_la_CFLAGS += -DLPESTUB +endif + +if WORKAROUND +libmixaudio_la_CFLAGS += -DDROP_WORKAROUND +endif + +#libmixaudio_stub_la_SOURCES = $(libmixaudio_la_SOURCES) +#libmixaudio_stub_la_CFLAGS = $(libmixaudio_la_CFLAGS) -DLPESTUB +#libmixaudio_stub_la_LIBADD = $(libmixaudio_la_LIBADD) +#libmixaudio_stub_la_LDFLAGS = $(libmixaudio_la_LDFLAGS) +#libmixaudio_stub_la_LIBTOOLFLAGS = $(libmixaudio_la_LIBTOOLFLAGS) + +# headers we need but don't want installed +noinst_HEADERS = intel_sst_ioctl.h sst_proxy.h pvt.h amhelper.h + +# TODO: decide whehter a /usr/include/mix is needed for mix headers +include_HEADERS = mixaudio.h \ + mixaudiotypes.h \ + mixaip.h \ + mixacp.h \ + mixacpmp3.h \ + mixacpwma.h \ + mixacpaac.h + +if AUDIO_MANAGER +libmixaudio_la_CFLAGS += -DAUDIO_MANAGER +libmixaudio_la_SOURCES += amhelper.c +#include_HEADERS += amhelper.h +endif + diff --git a/mix_audio/src/amhelper.c b/mix_audio/src/amhelper.c new file mode 100644 index 0000000..501ece7 --- /dev/null +++ b/mix_audio/src/amhelper.c @@ -0,0 +1,120 @@ +#include "amhelper.h" +#include + +static DBusGConnection *connection; + +static DBusGProxy *proxy_lpe = NULL; + +static gboolean am_enable=FALSE; + +/* Connect to am dbus server + * return -1 means failed + * return 0 means succeeded + * */ +gint dbus_init() { + GError *error; + const char *name = "org.moblin.audiomanager"; + + const char *path_lpe = "/org/moblin/audiomanager/lpe"; + const char *interface_lpe = "org.moblin.audiomanager.lpe"; + + const gchar* env = g_getenv("MIX_AM"); + if (env && env[0] == '1') { + am_enable = TRUE; + } + else + am_enable = FALSE; + + if (am_enable) { + error = NULL; + connection = dbus_g_bus_get(DBUS_BUS_SESSION, &error); + + if (connection == NULL) { + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "Failed to open connection to bus: %s\n", + error->message); + g_error_free(error); + return -1; + } + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "Successfully get a dbus connection\n"); + + proxy_lpe = dbus_g_proxy_new_for_name(connection, name, + path_lpe, interface_lpe); + if (proxy_lpe == NULL) { + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "Failed to connect to AM dbus server\n"); + return -1; + } + else { + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "Successfully connected to AM dbus\npath: %s\ninterface: %s\n", + path_lpe, interface_lpe); + } + } + return 0; +} + +gint32 lpe_stream_register(guint32 lpe_stream_id, char* media_role, char* lpe_stream_name, guint32 stream_type) +{ + GError *error; + gint32 s_output = 0; + error = NULL; + + if (am_enable) { + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "lpe_stream_id: %d\n", lpe_stream_id); + + if (lpe_stream_id == 0) { + return 0; + } + if(!dbus_g_proxy_call (proxy_lpe, "LPEStreamRegister", &error, G_TYPE_UINT, + lpe_stream_id, G_TYPE_STRING, media_role, G_TYPE_STRING, lpe_stream_name, G_TYPE_UINT, stream_type, + G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID)) { + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "LPEStreamRegister failed: %s\n", error->message); + g_error_free(error); + return s_output; + } + + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "LPEStreamRegister returned am stream id %d\n", s_output); + } + + return s_output; +} + +gint32 lpe_stream_unregister(guint32 am_stream_id) +{ + GError *error; + gint32 s_output = 0; + + if (am_enable) { + error = NULL; + if(!dbus_g_proxy_call (proxy_lpe, "LPEStreamUnregister", &error, G_TYPE_UINT, am_stream_id, + G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID)){ + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "LPEStreamUnregister failed: %s\n", error->message); + g_error_free(error); + return s_output; + } + } + return s_output; +} + +gint32 lpe_stream_notify_pause(guint32 stream_id) +{ + GError *error; + gint32 s_output=0; + + if (am_enable) { + dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyPause", &error, G_TYPE_UINT, stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); + } + + return s_output; +} + +gint32 lpe_stream_notify_resume(guint32 stream_id) +{ + GError *error; + gint32 s_output=0; + + if (am_enable) { + dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyResume", &error, G_TYPE_UINT, stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); + } + + return s_output; +} + diff --git a/mix_audio/src/amhelper.h b/mix_audio/src/amhelper.h new file mode 100644 index 0000000..9ec115c --- /dev/null +++ b/mix_audio/src/amhelper.h @@ -0,0 +1,25 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_AM_HELPER_H__ +#define __MIX_AM_HELPER_H__ + +#include +#include + +gint dbus_init(); + +gint32 lpe_stream_register(guint32 lpe_stream_id, char* media_role, char* lpe_stream_name, guint32 stream_type); + +gint32 lpe_stream_unregister(guint32 am_stream_id); + +gint32 lpe_stream_notify_pause(guint32 stream_id); + +gint32 lpe_stream_notify_resume(guint32 stream_id); + +#endif diff --git a/mix_audio/src/intel_sst_ioctl.h b/mix_audio/src/intel_sst_ioctl.h new file mode 100644 index 0000000..7fecf12 --- /dev/null +++ b/mix_audio/src/intel_sst_ioctl.h @@ -0,0 +1,337 @@ +#ifndef __INTEL_SST_IOCTL_H__ +#define __INTEL_SST_IOCTL_H__ + +enum sst_codec_types { +/* AUDIO/MUSIC CODEC Type Definitions */ + SST_CODEC_TYPE_UNKNOWN = 0, + SST_CODEC_TYPE_PCM, /* Pass through Audio codec */ + SST_CODEC_TYPE_MP3, + SST_CODEC_TYPE_MP24, + SST_CODEC_TYPE_AAC, + SST_CODEC_TYPE_AACP, + SST_CODEC_TYPE_eAACP, + SST_CODEC_TYPE_WMA9, + SST_CODEC_TYPE_WMA10, + SST_CODEC_TYPE_WMA10P, + SST_CODEC_TYPE_RA, + SST_CODEC_TYPE_DDAC3, + SST_CODEC_TYPE_STEREO_TRUE_HD, + SST_CODEC_TYPE_STEREO_HD_PLUS, + + /* VOICE CODEC Type Definitions */ + SST_CODEC_TYPE_VOICE_PCM = 0x21, /* Pass through voice codec */ + SST_CODEC_SRC = 0x64, + SST_CODEC_MIXER = 0x65, + SST_CODEC_DOWN_MIXER = 0x66, + SST_CODEC_VOLUME_CONTROL = 0x67, + SST_CODEC_OEM1 = 0xC8, + SST_CODEC_OEM2 = 0xC9, +}; + +enum snd_sst_stream_ops { + STREAM_OPS_PLAYBACK = 0, /* Decode */ + STREAM_OPS_CAPTURE, /* Encode */ + STREAM_OPS_PLAYBACK_DRM, /* Play Audio/Voice */ + STREAM_OPS_PLAYBACK_ALERT, /* Play Audio/Voice */ + STREAM_OPS_CAPTURE_VOICE_CALL, /* CSV Voice recording */ +}; + +enum stream_type { + STREAM_TYPE_MUSIC = 1, + STREAM_TYPE_VOICE +}; + +/* Firmware Version info */ +struct snd_sst_fw_version { + __u8 build; /* build number*/ + __u8 minor; /* minor number*/ + __u8 major; /* major number*/ + __u8 type; /* build type*/ +}; + +/* Port info structure */ +struct snd_sst_port_info { + __u16 port_type; + __u16 reserved; +}; + +/* Mixer info structure */ +struct snd_sst_mix_info { + __u16 max_streams; + __u16 reserved; +}; + +/* PCM Parameters */ +struct snd_pcm_params { + __u16 codec; /* codec type */ + __u8 num_chan; /* 1=Mono, 2=Stereo */ + __u8 pcm_wd_sz; /* 16/24 - bit*/ + __u32 brate; /* Bitrate in bits per second */ + __u32 sfreq; /* Sampling rate in Hz */ + __u16 frame_size; + __u16 samples_per_frame; /* Frame size num samples per frame */ + __u32 period_count; /* period elapsed time count, in samples,*/ +}; + +/* MP3 Music Parameters Message */ +struct snd_mp3_params { + __u16 codec; + __u8 num_chan; /* 1=Mono, 2=Stereo */ + __u8 pcm_wd_sz; /* 16/24 - bit*/ + __u32 brate; /* Use the hard coded value. */ + __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */ + __u8 crc_check; /* crc_check - disable (0) or enable (1) */ + __u8 op_align; /* op align 0- 16 bit, 1- MSB, 2 LSB*/ + __u16 reserved; /* Unused */ +}; + +#define AAC_BIT_STREAM_ADTS 0 +#define AAC_BIT_STREAM_ADIF 1 +#define AAC_BIT_STREAM_RAW 2 + +/* AAC Music Parameters Message */ +struct snd_aac_params { + __u16 codec; + __u8 num_chan; /* 1=Mono, 2=Stereo*/ + __u8 pcm_wd_sz; /* 16/24 - bit*/ + __u32 brate; + __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */ + __u32 aac_srate; /* Plain AAC decoder operating sample rate */ + __u8 mpg_id; /* 0=MPEG-2, 1=MPEG-4 */ + __u8 bs_format; /* input bit stream format adts=0, adif=1, raw=2 */ + __u8 aac_profile; /* 0=Main Profile, 1=LC profile, 3=SSR profile */ + __u8 ext_chl; /* No.of external channels */ + __u8 aot; /* Audio object type. 1=Main , 2=LC , 3=SSR, 4=SBR*/ + __u8 op_align; /* output alignment 0=16 bit , 1=MSB, 2= LSB align */ + __u8 brate_type; /* 0=CBR, 1=VBR */ + __u8 crc_check; /* crc check 0= disable, 1=enable */ + __s8 bit_stream_format[8]; /* input bit stream format adts/adif/raw */ + __u8 jstereo; /* Joint stereo Flag */ + __u8 sbr_present; /* 1 = SBR Present, 0 = SBR absent, for RAW */ + __u8 downsample; /* 1 = Downsampling ON, 0 = Downsampling OFF */ + __u8 num_syntc_elems; /* 1- Mono/stereo, 0 - Dual Mono, 0 - for raw */ + __s8 syntc_id[2]; /* 0 for ID_SCE(Dula Mono), -1 for raw */ + __s8 syntc_tag[2]; /* raw - -1 and 0 -16 for rest of the streams */ + __u8 pce_present; /* Flag. 1- present 0 - not present, for RAW */ + __u8 reserved; + __u16 reserved1; + +}; + +/* WMA Music Parameters Message */ +struct snd_wma_params { + __u16 codec; + __u8 num_chan; /* 1=Mono, 2=Stereo */ + __u8 pcm_wd_sz; /* 16/24 - bit*/ + __u32 brate; /* Use the hard coded value. */ + __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */ + __u32 channel_mask; /* Channel Mask */ + __u16 format_tag; /* Format Tag */ + __u16 block_align; /* packet size */ + __u16 wma_encode_opt;/* Encoder option */ + __u8 op_align; /* op align 0- 16 bit, 1- MSB, 2 LSB*/ + __u8 pcm_src; /* input pcm bit width*/ +}; + +/* Pre processing param structure */ +struct snd_prp_params { + __u32 reserved; /* No pre-processing defined yet */ +}; + +/* Post processing Capability info structure */ +struct snd_sst_postproc_info { + __u32 src_min; /* Supported SRC Min sampling freq */ + __u32 src_max; /* Supported SRC Max sampling freq */ + __u8 src; /* 0=Not supported, 1=Supported */ + __u8 bass_boost; /* 0=Not Supported, 1=Supported */ + __u8 stereo_widening; /* 0=Not Supported, 1=Supported */ + __u8 volume_control; /* 0=Not Supported, 1=Supported */ + __s16 min_vol; /* Minimum value of Volume in dB */ + __s16 max_vol; /* Maximum value of Volume in dB */ + __u8 mute_control; /*0=No Mute, 1=Mute*/ + __u8 reserved1; + __u16 reserved2; +}; + +/* pre processing Capability info structure */ +struct snd_sst_prp_info { + __s16 min_vol; /* Minimum value of Volume in dB */ + __s16 max_vol; /* Maximum value of Volume in dB */ + __u8 volume_control; /* 0=Not Supported, 1=Supported */ + __u8 reserved1; /* for 32 bit alignment */ + __u16 reserved2; /* for 32 bit alignment */ +} __attribute__ ((packed)); + +/* Firmware capabilities info */ +struct snd_sst_fw_info { + struct snd_sst_fw_version fw_version; /* Firmware version */ + __u8 audio_codecs_supported[8]; /* Codecs supported by FW */ + __u32 recommend_min_duration; /* Min duration for Low power Playback*/ + __u8 max_pcm_streams_supported; /*Max number of PCM streams supported */ + __u8 max_enc_streams_supported; /*Max number of Encoded streams */ + __u16 reserved; /* 32 bit alignment*/ + struct snd_sst_postproc_info pop_info; /* Post processing capability*/ + struct snd_sst_prp_info prp_info; /* pre_processing mod cap info */ + struct snd_sst_port_info port_info[2]; /* Port info */ + struct snd_sst_mix_info mix_info; /* Mixer info */ + __u32 min_input_buf; /*minmum i/p buffer for decode*/ +}; + +/* Add the codec parameter structures for new codecs to be supported */ +#define CODEC_PARAM_STRUCTURES \ + struct snd_pcm_params pcm_params; \ + struct snd_mp3_params mp3_params; \ + struct snd_aac_params aac_params; \ + struct snd_wma_params wma_params; + +/* Pre and Post Processing param structures */ +#define PPP_PARAM_STRUCTURES \ + struct snd_prp_params prp_params; + +/* Codec params struture */ +union snd_sst_codec_params { + CODEC_PARAM_STRUCTURES; +}; + +/* Pre-processing params struture */ +union snd_sst_ppp_params{ + PPP_PARAM_STRUCTURES; +}; + +struct snd_sst_stream_params { + union snd_sst_codec_params uc; +} __attribute__ ((packed)); + +struct snd_sst_params { + __u32 result; + __u32 stream_id; + __u8 codec; + __u8 ops; + __u8 stream_type; + struct snd_sst_stream_params sparams; +}; + +/*ioctl related stuff here*/ +struct snd_sst_pmic_config { + __u32 sfreq; /* Sampling rate in Hz */ + __u16 num_chan; /* Mono =1 or Stereo =2 */ + __u16 pcm_wd_sz; /* Number of bits per sample */ +} __attribute__ ((packed)); + +struct snd_sst_get_stream_params { + struct snd_sst_params codec_params; + struct snd_sst_pmic_config pcm_params; +}; + +enum snd_sst_target_type { + SND_SST_TARGET_PMIC = 1, + SND_SST_TARGET_OTHER, +}; + +enum snd_sst_port_action { + SND_SST_PORT_PREPARE = 1, + SND_SST_PORT_ACTIVATE, +}; + +/* Target selection per device structure */ +struct snd_sst_slot_info { + __u8 mix_enable; /* Mixer enable or disable */ + __u8 device_type; + __u8 device_instance; /* 0, 1, 2 */ + __u8 target_type; + __u16 slot[2]; + __u8 master; + __u8 action; + __u16 reserved; + struct snd_sst_pmic_config pcm_params; +} __attribute__ ((packed)); + +/* Target device list structure */ +struct snd_sst_target_device { + __u32 device_route; + struct snd_sst_slot_info devices[2]; +} __attribute__ ((packed)); + +struct snd_sst_driver_info { + __u32 version; /* Version of the driver */ + __u32 active_pcm_streams; + __u32 active_enc_streams; + __u32 max_pcm_streams; + __u32 max_enc_streams; + __u32 buf_per_stream; +}; + +struct snd_sst_vol { + __u32 stream_id; + __s32 volume; + __u32 ramp_duration; + __u32 ramp_type; /* Ramp type, default=0 */ +}; + +struct snd_sst_mute { + __u32 stream_id; + __u32 mute; +}; + +enum snd_sst_buff_type { + SST_BUF_USER = 1, + SST_BUF_MMAP, + SST_BUF_RAR, +}; + +struct snd_sst_mmap_buff_entry { + unsigned int offset; + unsigned int size; +}; + +struct snd_sst_mmap_buffs { + unsigned int entries; + enum snd_sst_buff_type type; + struct snd_sst_mmap_buff_entry *buff; +}; + +struct snd_sst_buff_entry { + void *buffer; + unsigned int size; +}; + +struct snd_sst_buffs { + unsigned int entries; + __u8 type; + struct snd_sst_buff_entry *buff_entry; +}; + +struct snd_sst_dbufs { + unsigned long long input_bytes_consumed; + unsigned long long output_bytes_produced; + struct snd_sst_buffs *ibufs; + struct snd_sst_buffs *obufs; +}; + +/*IOCTL defined here*/ +/*SST MMF IOCTLS only*/ +#define SNDRV_SST_STREAM_SET_PARAMS _IOR('L', 0x00, \ + struct snd_sst_stream_params *) +#define SNDRV_SST_STREAM_GET_PARAMS _IOWR('L', 0x01, \ + struct snd_sst_get_stream_params *) +#define SNDRV_SST_STREAM_GET_TSTAMP _IOWR('L', 0x02, __u64 *) +#define SNDRV_SST_STREAM_DECODE _IOWR('L', 0x03, struct snd_sst_dbufs *) +#define SNDRV_SST_STREAM_BYTES_DECODED _IOWR('L', 0x04, __u64 *) +#define SNDRV_SST_STREAM_START _IO('A', 0x42) +#define SNDRV_SST_STREAM_DROP _IO('A', 0x43) +#define SNDRV_SST_STREAM_DRAIN _IO('A', 0x44) +#define SNDRV_SST_STREAM_PAUSE _IOW('A', 0x45, int) +#define SNDRV_SST_STREAM_RESUME _IO('A', 0x47) +#define SNDRV_SST_MMAP_PLAY _IOW('L', 0x05, struct snd_sst_mmap_buffs *) +#define SNDRV_SST_MMAP_CAPTURE _IOW('L', 0x06, struct snd_sst_mmap_buffs *) +/*SST common ioctls */ +#define SNDRV_SST_DRIVER_INFO _IOR('L', 0x10, struct snd_sst_driver_info *) +#define SNDRV_SST_SET_VOL _IOW('L', 0x11, struct snd_sst_vol *) +#define SNDRV_SST_GET_VOL _IOW('L', 0x12, struct snd_sst_vol *) +#define SNDRV_SST_MUTE _IOW('L', 0x13, struct snd_sst_mute *) +/*AM Ioctly only*/ +#define SNDRV_SST_FW_INFO _IOR('L', 0x20, struct snd_sst_fw_info *) +#define SNDRV_SST_SET_TARGET_DEVICE _IOW('L', 0x21, \ + struct snd_sst_target_device *) + +#endif /*__INTEL_SST_IOCTL_H__*/ diff --git a/mix_audio/src/mixacp.c b/mix_audio/src/mixacp.c new file mode 100644 index 0000000..e7ce507 --- /dev/null +++ b/mix_audio/src/mixacp.c @@ -0,0 +1,322 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixacp + * @short_description: MixAudio configuration parameters object. + * @include: mixacp.h + * + * #MixAudio configuration parameters object which is used to communicate audio specific parameters. + * + * This object is should not be instantiated as codec specific parameters are definied in individual derive classes. + */ + +#include "mixacp.h" +#include + +static GType _mix_acp_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_acp_type = g_define_type_id; } + +gboolean mix_acp_copy(MixParams* target, const MixParams *src); +MixParams* mix_acp_dup(const MixParams *obj); +gboolean mix_acp_equal(MixParams* first, MixParams *second); +static void mix_acp_finalize(MixParams *obj); + +G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParams, mix_acp, MIX_TYPE_PARAMS, _do_init); + +void +_mix_acp_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_acp_get_type ()); +} + +static void mix_acp_init (MixAudioConfigParams *self) +{ + self->decode_mode = MIX_DECODE_NULL; + self->stream_name = NULL; + self->audio_manager=MIX_AUDIOMANAGER_NONE; + self->num_channels = 0; + self->bit_rate = 0; + self->sample_freq = 0; + self->bits_per_sample = MIX_ACP_BPS_16; + self->op_align = MIX_ACP_OUTPUT_ALIGN_16; +} + +static void mix_acp_class_init(MixAudioConfigParamsClass *klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_acp_finalize; + mixparams_class->copy = (MixParamsCopyFunction)mix_acp_copy; + mixparams_class->dup = (MixParamsDupFunction)mix_acp_dup; + mixparams_class->equal = (MixParamsEqualFunction)mix_acp_equal; + + klass->print_params = NULL; +} + +MixAudioConfigParams *mix_acp_new(void) +{ + MixAudioConfigParams *ret = (MixAudioConfigParams *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMS); + + return ret; +} + +void mix_acp_finalize(MixParams *obj) +{ + /* clean up here. */ + MixAudioConfigParams *acp = MIX_AUDIOCONFIGPARAMS(obj); + + if (acp->stream_name) { + g_free(acp->stream_name); + acp->stream_name = NULL; + } + + /* Chain up parent */ + if (parent_class->finalize) + parent_class->finalize(obj); +} + +MixAudioConfigParams *mix_acp_ref(MixAudioConfigParams *mix) +{ + return (MixAudioConfigParams*)mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_acp_dup: + * @obj: a #MixAudioConfigParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams* mix_acp_dup(const MixParams *obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_AUDIOCONFIGPARAMS(obj)) + { + MixAudioConfigParams *duplicate = mix_acp_new(); + if (mix_acp_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) + { + ret = MIX_PARAMS(duplicate); + } + else + { + mix_acp_unref(duplicate); + } + } + + return ret; +} + +/** + * mix_acp_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_copy(MixParams* target, const MixParams *src) +{ + if (MIX_IS_AUDIOCONFIGPARAMS(target) && MIX_IS_AUDIOCONFIGPARAMS(src)) + { + MixAudioConfigParams *t = MIX_AUDIOCONFIGPARAMS(target); + MixAudioConfigParams *s = MIX_AUDIOCONFIGPARAMS(src); + + t->decode_mode = s->decode_mode; + t->stream_name = g_strdup(s->stream_name); + t->audio_manager=s->audio_manager; + t->num_channels = s->num_channels; + t->bit_rate = s->bit_rate; + t->sample_freq = s->sample_freq; + t->bits_per_sample = s->bits_per_sample; + t->op_align = s->op_align; + + // Now chainup base class + if (parent_class->copy) + { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); + } + else + return TRUE; + } + return FALSE; +} + +/** + * mix_acp_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_equal(MixParams* first, MixParams *second) +{ + gboolean ret = FALSE; + + if (first && second) + { + if (first == second) return TRUE; + } + else + { + // one of them is NULL. + return FALSE; + } + + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + + if (ret && MIX_IS_AUDIOCONFIGPARAMS(first) && MIX_IS_AUDIOCONFIGPARAMS(second)) + { + MixAudioConfigParams *acp1 = MIX_AUDIOCONFIGPARAMS(first); + MixAudioConfigParams *acp2 = MIX_AUDIOCONFIGPARAMS(second); + + ret = (acp1->decode_mode == acp2->decode_mode) && + (acp1->audio_manager == acp2->audio_manager) && + (acp1->num_channels == acp2->num_channels) && + (acp1->bit_rate == acp2->bit_rate) && + (acp1->sample_freq == acp2->sample_freq) && + (acp1->bits_per_sample == acp2->bits_per_sample) && + (acp1->op_align == acp2->op_align) && + (!g_strcmp0(acp1->stream_name, acp2->stream_name)); + //g_strcmp0 handles NULL gracefully + } + + return ret; +} + + +gboolean mix_acp_is_streamname_valid(MixAudioConfigParams *obj) +{ + if (MIX_IS_AUDIOCONFIGPARAMS(obj)) + if ((obj->stream_name) && (obj->stream_name[0] != 0)) return TRUE; + + return FALSE; +} + +gchar *mix_acp_get_streamname(MixAudioConfigParams *obj) +{ + gchar *ret = NULL; + if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj)) && obj->stream_name) + { + ret = g_strdup(obj->stream_name); + } + return ret; +} + +MIX_RESULT mix_acp_set_streamname(MixAudioConfigParams *obj, const gchar *streamname) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (!obj) return MIX_RESULT_NULL_PTR; + + if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) + { + if (obj->stream_name) + { + g_free(obj->stream_name); + obj->stream_name = NULL; + } + + if (streamname) obj->stream_name = g_strdup(streamname); + + ret = MIX_RESULT_SUCCESS; + } + else + { + ret = MIX_RESULT_INVALID_PARAM; + } + + return ret; +} + +MixACPBPSType mix_acp_get_bps(MixAudioConfigParams *obj) +{ + if (G_LIKELY(obj)) + return obj->bits_per_sample; + else + return 0; +} + +MIX_RESULT mix_acp_set_bps(MixAudioConfigParams *obj, MixACPBPSType type) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (!obj) return MIX_RESULT_NULL_PTR; + + if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) + { + switch (type) + { + case MIX_ACP_BPS_UNKNOWN: + case MIX_ACP_BPS_16: + case MIX_ACP_BPS_24: + obj->bits_per_sample = type; + break; + default: + ret = MIX_RESULT_INVALID_PARAM; + break; + } + } + else + { + ret = MIX_RESULT_INVALID_PARAM; + } + + return ret; +} + + +MixACPOpAlign mix_acp_get_op_align(MixAudioConfigParams *obj) +{ + return (obj->op_align); +} + +MIX_RESULT mix_acp_set_op_align(MixAudioConfigParams *obj, MixACPOpAlign op_align) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if ((op_align >= MIX_ACP_OUTPUT_ALIGN_16) && (op_align < MIX_ACP_OUTPUT_ALIGN_LAST)) + obj->op_align = op_align; + else ret=MIX_RESULT_INVALID_PARAM; + + return ret; +} + +void mix_acp_print_params(MixAudioConfigParams *obj) +{ + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "decode_mode: %d\n", obj->decode_mode); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "stream_name: %s\n", obj->stream_name); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "audio_manager: %d\n", obj->audio_manager); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "num_channels: %d\n", obj->num_channels); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_rate: %d\n", obj->bit_rate); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "sample_freq: %d\n", obj->sample_freq); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bits_per_sample: %d\n", obj->bits_per_sample); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "op_align: %d\n", obj->op_align); + + MixAudioConfigParamsClass *klass = MIX_AUDIOCONFIGPARAMS_GET_CLASS(obj); + if (klass->print_params) + { + klass->print_params(obj); + } +} + diff --git a/mix_audio/src/mixacp.h b/mix_audio/src/mixacp.h new file mode 100644 index 0000000..0acd309 --- /dev/null +++ b/mix_audio/src/mixacp.h @@ -0,0 +1,367 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_AUDIOCONFIGPARAMS_H__ +#define __MIX_AUDIOCONFIGPARAMS_H__ + + +#include "mixparams.h" +#include "mixresult.h" +#include "mixaudiotypes.h" + +/** + * MixACPOpAlign: + * @MIX_ACP_OUTPUT_ALIGN_UNKNOWN: Output alignment undefined. + * @IX_ACP_OUTPUT_ALIGN_16: Output word is 16-bit aligned + * @MIX_ACP_OUTPUT_ALIGN_MSB: Output word is MSB aligned + * @MIX_ACP_OUTPUT_ALIGN_LSB: Output word is LSB aligned + * @MIX_ACP_OUTPUT_ALIGN_LAST: Last entry in list. + * + * Audio Output alignment. + * + */ +typedef enum { + MIX_ACP_OUTPUT_ALIGN_UNKNOWN=-1, + MIX_ACP_OUTPUT_ALIGN_16=0, + MIX_ACP_OUTPUT_ALIGN_MSB, + MIX_ACP_OUTPUT_ALIGN_LSB, + MIX_ACP_OUTPUT_ALIGN_LAST +} MixACPOpAlign; + +/** + * MixACPBPSType: + * @MIX_ACP_BPS_UNKNOWN: Bit Per Sample undefined. + * @MIX_ACP_BPS_16: Output bits per sample is 16 bits + * @MIX_ACP_BPS_24: Output bits per sample is 24 bits + * + * Audio Output Size in bits per sample. + * + */ +typedef enum { + MIX_ACP_BPS_UNKNOWN=0, + MIX_ACP_BPS_16=16, + MIX_ACP_BPS_24=24, +} MixACPBPSType; + +/** + * MIX_TYPE_AUDIOCONFIGPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_AUDIOCONFIGPARAMS (mix_acp_get_type ()) + +/** + * MIX_AUDIOCONFIGPARAMS: + * @obj: object to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParams)) + +/** + * MIX_IS_AUDIOCONFIGPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixAudioConfigParams + */ +#define MIX_IS_AUDIOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMS)) + +/** + * MIX_AUDIOCONFIGPARAMS_CLASS: + * @klass: class to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParamsClass)) + +/** + * MIX_IS_AUDIOCONFIGPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixAudioConfigParamsClass + */ +#define MIX_IS_AUDIOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMS)) + +/** + * MIX_AUDIOCONFIGPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_AUDIOCONFIGPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParamsClass)) + +typedef struct _MixAudioConfigParams MixAudioConfigParams; +typedef struct _MixAudioConfigParamsClass MixAudioConfigParamsClass; + +/** + * MixDecodeMode: + * @MIX_DECODE_NULL: Undefined decode mode. + * @MIX_DECODE_DIRECTRENDER: Stream is configured in Direct Render mode + * @MIX_DECODE_DECODERETURN: Stream is configured in Decode Return mode + * @MIX_DECODE_LAST: Last index in the enumeration. + * + * Operation Mode for a MI-X session. See mix_audio_configure(). + * + */ +typedef enum { + MIX_DECODE_NULL=0, + MIX_DECODE_DIRECTRENDER, + MIX_DECODE_DECODERETURN, + MIX_DECODE_LAST +} MixDecodeMode; + +/** + * MixAudioConfigParams: + * @parent: parent. + * @decode_mode: Decode Mode to use for current session. See #mix_acp_set_decodemode + * @stream_name: Stream name. See #mix_acp_set_streamname. This object will release the string upon destruction. + * @audio_manager: Type of Audio Manager. See #mix_acp_set_audio_manager. + * @num_channels: Number of output channels. See #MIX_ACP_NUM_CHANNELS + * @bit_rate: Optional. See #MIX_ACP_BITRATE + * @sample_freq: Output frequency. See #MIX_ACP_SAMPLE_FREQ + * @bits_per_sample: Number of output bit per sample. See #mix_acp_set_bps + * @op_align: Output Byte Alignment. See #mix_acp_set_op_align + * + * @MixAudio configuration parameters object. + */ +struct _MixAudioConfigParams +{ + /*< public >*/ + MixParams parent; + + /*< public >*/ + /* Audio Session Parameters */ + MixDecodeMode decode_mode; + gchar *stream_name; + MixAudioManager audio_manager; + + /*< public >*/ + /* Audio Format Parameters */ + gint num_channels; + gint bit_rate; + gint sample_freq; + MixACPBPSType bits_per_sample; + MixACPOpAlign op_align; + /*< private >*/ + void* reserved1; + void* reserved2; + void* reserved3; + void* reserved4; +}; + +/** + * MixAudioConfigParamsClass: + * + * MI-X Audio object class + */ +struct _MixAudioConfigParamsClass +{ + /*< public >*/ + MixParamsClass parent_class; + + /*< virtual public >*/ + void (*print_params) (MixAudioConfigParams *obj); + + /* class members */ + +}; + +/** + * mix_acp_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_acp_get_type (void); + +/** + * mix_acp_new: + * @returns: A newly allocated instance of #MixAudioConfigParams + * + * Use this method to create new instance of #MixAudioConfigParams + */ +MixAudioConfigParams *mix_acp_new(void); + +/** + * mix_acp_ref: + * @mix: object to add reference + * @returns: the MixAudioConfigParams instance where reference count has been increased. + * + * Add reference count. + */ +MixAudioConfigParams *mix_acp_ref(MixAudioConfigParams *mix); + +/** + * mix_acp_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_acp_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/** + * MIX_ACP_DECODEMODE: + * @obj: #MixAudioConfigParams object + * + * MixAudioConfigParam.decode_mode accessor. + * + * Configure the decode mode to one of #MixDecodeMode value. +*/ +#define MIX_ACP_DECODEMODE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->decode_mode) + +/** + * MIX_ACP_NUM_CHANNELS: + * @obj: #MixAudioConfigParams object + * + * MixAudioConfigParam.num_channels accessor. + * + * Configure the number of output channels. This value need to be exact the same as the supported output channel in the audio since down-mixing is not supported. + * + * This value can be used during #MIX_DECODE_DECODERETURN mode for buffer size/duration calculation. + * + * In Moorestown, number of channel must be 1 or 2. +*/ +#define MIX_ACP_NUM_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMS(obj)->num_channels) + +/** + * MIX_ACP_BITRATE: + * @obj: #MixAudioConfigParams object + * + * MixAudioConfigParam.bit_rate accessor. + * + * Bit rate of the current audio. + * + * Optional +*/ +#define MIX_ACP_BITRATE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->bit_rate) + +/** + * MIX_ACP_SAMPLE_FREQ: + * @obj: #MixAudioConfigParams object + * + * MixAudioConfigParam.sample_freq accessor. + * + * Output sampling frequency. + * + * This value can be used during #MIX_DECODE_DECODERETURN mode for buffer size/duration calculation. +*/ +#define MIX_ACP_SAMPLE_FREQ(obj) (MIX_AUDIOCONFIGPARAMS(obj)->sample_freq) + +/** + * mix_acp_get_decodemode: + * @obj: #MixAudioConfigParams + * @returns: #MixDecodeMode + * + * Retrieve currently configured #MixDecodeMode. + */ +MixDecodeMode mix_acp_get_decodemode(MixAudioConfigParams *obj); + +/** + * mix_acp_set_decodemode: + * @obj: #MixAudioConfigParams + * @mode: #MixDecodeMode to set + * @returns: #MIX_RESULT + * + * Configure session for one of the #MixDecodeMode. + */ +MIX_RESULT mix_acp_set_decodemode(MixAudioConfigParams *obj, MixDecodeMode mode); + +/** + * mix_acp_get_streamname: + * @obj: #MixAudioConfigParams + * @returns: pointer to a copy of the stream name. NULL if name is not available. + * + * Return copy of streamname. caller must free with g_free() + */ +gchar *mix_acp_get_streamname(MixAudioConfigParams *obj); + +/** + * mix_acp_set_streamname: + * @obj: #MixAudioConfigParams + * @streamname: Stream name to set + * @returns: #MIX_RESULT + * + * Set the stream name. The object will make a copy of the input stream name string. + * + */ +MIX_RESULT mix_acp_set_streamname(MixAudioConfigParams *obj, const gchar *streamname); + +/** + * mix_acp_set_audio_manager: + * @obj: #MixAudioConfigParams + * @am: #MixAudioManager + * @returns: #MIX_RESULT + * + * Set the Audio Manager to one of the #MixAudioManager. + */ +MIX_RESULT mix_acp_set_audio_manager(MixAudioConfigParams *obj, MixAudioManager am); + +/** + * mix_acp_get_audio_manager: + * @obj: #MixAudioConfigParams + * @returns: #MixAudioManager + * + * Retrieve name of currently configured audio manager. + */ +MixAudioManager mix_acp_get_audio_manager(MixAudioConfigParams *obj); + +/** + * mix_acp_is_streamname_valid: + * @obj: #MixAudioConfigParams + * @returns: boolean indicates if stream name is valid. + * + * Check if stream name is valid considering the current Decode Mode. + */ +gboolean mix_acp_is_streamname_valid(MixAudioConfigParams *obj); + + +/** + * mix_acp_get_bps: + * @obj: #MixAudioConfigParams + * @returns: #MixACPBPSType + * + * Retrive currently configured bit-per-stream value. + */ +MixACPBPSType mix_acp_get_bps(MixAudioConfigParams *obj); + +/** + * mix_acp_set_bps: + * @obj: #MixAudioConfigParams + * @mode: #MixACPBPSType to set + * @returns: #MIX_RESULT + * + * Configure bit-per-stream of one of the supported #MixACPBPSType. + */ +MIX_RESULT mix_acp_set_bps(MixAudioConfigParams *obj, MixACPBPSType type); + +/** + * mix_acp_get_op_align: + * @obj: #MixAudioConfigParams object + * @returns: #MixACPOpAlign + * + * Get Output Alignment. + */ +MixACPOpAlign mix_acp_get_op_align(MixAudioConfigParams *obj); + +/** + * mix_acp_set_op_align: + * @obj: #MixAudioConfigParams object + * @op_align: One of the supported #MixACPOpAlign + * @returns: MIX_RESULT + * + * Set Output Alignment to one of the #MixACPOpAlign value. + */ +MIX_RESULT mix_acp_set_op_align(MixAudioConfigParams *obj, MixACPOpAlign op_align); + +/* void mix_acp_print_params(MixAudioConfigParams *obj); */ + + +#endif /* __MIX_AUDIOCONFIGPARAMS_H__ */ + diff --git a/mix_audio/src/mixacpaac.c b/mix_audio/src/mixacpaac.c new file mode 100644 index 0000000..4f83eb9 --- /dev/null +++ b/mix_audio/src/mixacpaac.c @@ -0,0 +1,360 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixacpaac + * @short_description: Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format. + * @include: mixacpaac.h + * + * A data object which stores audio specific parameters for the following formats: + * + * AAC-LC + * HE-AAC v1 + * HE-AAC v2 + * + * + * Additional parameters must be set in the parent object #MixAudioConfigParams + */ + +#include "mixacpaac.h" +#include +#include + +static GType _mix_acp_aac_type = 0; +static MixAudioConfigParamsClass *parent_class = NULL; + +#define _do_init { _mix_acp_aac_type = g_define_type_id; } + +gboolean mix_acp_aac_copy(MixParams* target, const MixParams *src); +MixParams* mix_acp_aac_dup(const MixParams *obj); +gboolean mix_acp_aac_equal(MixParams* first, MixParams *second); +static void mix_acp_aac_finalize(MixParams *obj); + +void mix_aac_print_params(MixAudioConfigParams *obj); + +G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsAAC, mix_acp_aac, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init); + +static void mix_acp_aac_init (MixAudioConfigParamsAAC *self) +{ + self->MPEG_id = MIX_AAC_MPEG_ID_NULL; + self->bit_stream_format= MIX_AAC_BS_NULL; + self->aac_profile=MIX_AAC_PROFILE_NULL; + self->aot=0; + self->bit_rate_type=MIX_AAC_BR_NULL; /* 0=CBR, 1=VBR */ + self->CRC=FALSE; + self->sbrPresentFlag = -1; + self->psPresentFlag = -1; + self->pce_present=FALSE; /* Flag. 1- present 0 - not present, for RAW */ + self->syntc_id[0] = self->syntc_id[1] = 0; /* 0 for ID_SCE(Dula Mono), -1 for raw */ + self->syntc_tag[0] = self->syntc_tag[1] = 0; /* raw - -1 and 0 -16 for rest of the streams */ + self->num_syntc_elems = 0; + self->aac_sample_rate = 0; + self->aac_channels = 0; +} + +static void mix_acp_aac_class_init(MixAudioConfigParamsAACClass *klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_acp_aac_finalize; + mixparams_class->copy = (MixParamsCopyFunction)mix_acp_aac_copy; + mixparams_class->dup = (MixParamsDupFunction)mix_acp_aac_dup; + mixparams_class->equal = (MixParamsEqualFunction)mix_acp_aac_equal; + +// MixAudioConfigParamsClass *acp = MIX_AUDIOCONFIGPARAMS_GET_CLASS(klass); + MixAudioConfigParamsClass *acp = (MixAudioConfigParamsClass *)klass; + acp->print_params = mix_aac_print_params; +} + +MixAudioConfigParamsAAC *mix_acp_aac_new(void) +{ + MixAudioConfigParamsAAC *ret = (MixAudioConfigParamsAAC *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSAAC); + + return ret; +} + +void mix_acp_aac_finalize(MixParams *obj) +{ + /* clean up here. */ + + /* Chain up parent */ + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->finalize) + klass->finalize(obj); +} + +MixAudioConfigParamsAAC *mix_acp_aac_ref(MixAudioConfigParamsAAC *mix) +{ + return (MixAudioConfigParamsAAC*)mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_acp_aac_dup: + * @obj: a #MixAudioConfigParamsAAC object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams* mix_acp_aac_dup(const MixParams *obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) + { + MixAudioConfigParamsAAC *duplicate = mix_acp_aac_new(); + if (mix_acp_aac_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) + { + ret = MIX_PARAMS(duplicate); + } + else + { + mix_acp_aac_unref(duplicate); + } + } + + return ret; +} + +/** + * mix_acp_aac_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_aac_copy(MixParams* target, const MixParams *src) +{ + if (MIX_IS_AUDIOCONFIGPARAMSAAC(target) && MIX_IS_AUDIOCONFIGPARAMSAAC(src)) + { + MixAudioConfigParamsAAC *t = MIX_AUDIOCONFIGPARAMSAAC(target); + MixAudioConfigParamsAAC *s = MIX_AUDIOCONFIGPARAMSAAC(src); + + t->MPEG_id = s->MPEG_id; + t->bit_stream_format = s->bit_stream_format; + t->aac_profile = s->aac_profile; + t->aot = s->aot; + t->bit_rate_type = s->bit_rate_type; + t->CRC = s->CRC; + + // Now chainup base class + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->copy) + { + return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); + } + else + return TRUE; + } + return FALSE; +} + +/** + * mix_acp_aac_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_aac_equal(MixParams* first, MixParams *second) +{ + gboolean ret = FALSE; + + if (first && second) + { + if (first == second) return TRUE; + } + else + { + return FALSE; + } + + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = klass->equal(first, second); + else + ret = TRUE; + + if (ret && MIX_IS_AUDIOCONFIGPARAMSAAC(first) && MIX_IS_AUDIOCONFIGPARAMSAAC(second)) + { + + MixAudioConfigParamsAAC *acp1 = MIX_AUDIOCONFIGPARAMSAAC(first); + MixAudioConfigParamsAAC *acp2 = MIX_AUDIOCONFIGPARAMSAAC(second); + + ret = (acp1->MPEG_id == acp2->MPEG_id) && + (acp1->bit_stream_format && acp2->bit_stream_format) && + (acp1->aac_profile == acp2->aac_profile) && + (acp1->aot == acp2->aot) && + (acp1->bit_rate_type == acp2->bit_rate_type) && + (acp1->CRC == acp2->CRC) && + (acp1->sbrPresentFlag == acp2->sbrPresentFlag) && + (acp1->psPresentFlag == acp2->psPresentFlag) && + (acp1->pce_present == acp2->pce_present) && + (acp1->syntc_id[0] == acp2->syntc_id[0]) && + (acp1->syntc_id[1] == acp2->syntc_id[1]) && + (acp1->syntc_tag[0] == acp2->syntc_tag[0]) && + (acp1->syntc_tag[1] == acp2->syntc_tag[1]); + } + + return ret; +} + +MIX_RESULT mix_acp_aac_set_bit_stream_format(MixAudioConfigParamsAAC *obj, MixAACBitstreamFormt bit_stream_format) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (!obj) return MIX_RESULT_NULL_PTR; + + if (bit_stream_format < MIX_AAC_BS_ADTS && bit_stream_format >= MIX_AAC_BS_LAST) + { + ret = MIX_RESULT_INVALID_PARAM; + } + else + { + obj->bit_stream_format = bit_stream_format; + } + + return ret; +} +MixAACBitstreamFormt mix_acp_aac_get_bit_stream_format(MixAudioConfigParamsAAC *obj) +{ + if (obj) + return obj->bit_stream_format; + else + return MIX_AAC_BS_NULL; +} + +MIX_RESULT mix_acp_aac_set_aac_profile(MixAudioConfigParamsAAC *obj, MixAACProfile aac_profile) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (!obj) return MIX_RESULT_NULL_PTR; + + if (aac_profile < MIX_AAC_PROFILE_MAIN || aac_profile >= MIX_AAC_PROFILE_LAST) + { + ret = MIX_RESULT_INVALID_PARAM; + } + else + { + obj->aac_profile = aac_profile; + } + + return ret; +} +MixAACProfile mix_acp_aac_get_aac_profile(MixAudioConfigParamsAAC *obj) +{ + if (obj) + return obj->aac_profile; + else + return MIX_AAC_PROFILE_NULL; +} + +MIX_RESULT mix_acp_aac_set_bit_rate_type(MixAudioConfigParamsAAC *obj, MixAACBitrateType bit_rate_type) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (!obj) return MIX_RESULT_NULL_PTR; + + if (bit_rate_type != MIX_AAC_BR_CONSTANT && bit_rate_type != MIX_AAC_BR_VARIABLE) + { + ret = MIX_RESULT_INVALID_PARAM; + } + else + { + obj->bit_rate_type = bit_rate_type; + } + + return ret; +} +MixAACBitrateType mix_acp_aac_get_bit_rate_type(MixAudioConfigParamsAAC *obj) +{ + if (obj) + return obj->bit_rate_type; + else + return MIX_AAC_BR_NULL; +} + +void mix_aac_print_params(MixAudioConfigParams *obj) +{ + MixAudioConfigParamsAAC *t = MIX_AUDIOCONFIGPARAMSAAC(obj); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "Mpeg ID: %d\n", t->MPEG_id); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_stream_format: %d\n", t->bit_stream_format); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "aac_profile: %d\n", t->aac_profile); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "aot: %d\n", t->aot); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_rate_type: %d\n", t->bit_rate_type); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "CRC: %d\n", t->CRC); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, " \n"); +} + + +MIX_RESULT mix_acp_aac_set_aot(MixAudioConfigParamsAAC *obj, guint aot) +{ + if (!obj) return MIX_RESULT_NULL_PTR; + + if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) + { + if ((aot == 2) || (aot == 5)) + { + obj->aot=aot; + return MIX_RESULT_SUCCESS; + } + else + { + return MIX_RESULT_NOT_SUPPORTED; + } + } + else + { + return MIX_RESULT_INVALID_PARAM; + } +} + +guint mix_acp_aac_get_aot(MixAudioConfigParamsAAC *obj) +{ + if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) + return obj->aot; + else + return 0; +} + + +MIX_RESULT mix_acp_aac_set_mpeg_id(MixAudioConfigParamsAAC *obj, MixAACMpegID mpegid) +{ + if (!obj) return MIX_RESULT_NULL_PTR; + + if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) + { + if ((mpegid >= MIX_AAC_MPEG_ID_NULL) || (mpegid < MIX_AAC_MPEG_LAST)) + { + obj->MPEG_id=mpegid; + return MIX_RESULT_SUCCESS; + } + else + { + return MIX_RESULT_NOT_SUPPORTED; + } + } + else + { + return MIX_RESULT_INVALID_PARAM; + } +} + +MixAACMpegID mix_acp_aac_get_mpeg_id(MixAudioConfigParamsAAC *obj) +{ + if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) + return obj->MPEG_id; + else + return MIX_AAC_MPEG_ID_NULL; +} + diff --git a/mix_audio/src/mixacpaac.h b/mix_audio/src/mixacpaac.h new file mode 100644 index 0000000..7de2d95 --- /dev/null +++ b/mix_audio/src/mixacpaac.h @@ -0,0 +1,413 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_ACP_AAC_H__ +#define __MIX_ACP_AAC_H__ + +#include "mixacp.h" + +/** + * MIX_TYPE_AUDIOCONFIGPARAMSAAC: + * + * Get type of class. + */ +#define MIX_TYPE_AUDIOCONFIGPARAMSAAC (mix_acp_aac_get_type ()) + +/** + * MIX_AUDIOCONFIGPARAMSAAC: + * @obj: object to be type-casted. + * + * Type casting + */ +#define MIX_AUDIOCONFIGPARAMSAAC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAAC)) + +/** + * MIX_IS_AUDIOCONFIGPARAMSAAC: + * @obj: an object. + * + * Checks if the given object is an instance of #MixAudioConfigParams + */ +#define MIX_IS_AUDIOCONFIGPARAMSAAC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC)) + +/** + * MIX_AUDIOCONFIGPARAMSAAC_CLASS: + * @klass: class to be type-casted. + * + * Type Casting. + */ +#define MIX_AUDIOCONFIGPARAMSAAC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAACClass)) + +/** + * MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixAudioConfigParamsClass + */ +#define MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSAAC)) + +/** + * MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS: + * @obj: a #MixAudioConfigParams object. + * + * Get the class instance of the object. + */ +#define MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAACClass)) + +typedef struct _MixAudioConfigParamsAAC MixAudioConfigParamsAAC; +typedef struct _MixAudioConfigParamsAACClass MixAudioConfigParamsAACClass; + +/** + * MixAACBitrateType: + * @MIX_AAC_BR_NULL: Undefined bit rate type. + * @MIX_AAC_BR_CONSTANT: Constant bit rate. + * @MIX_AAC_BR_VARIABLE: Variable bit rate. + * @MIX_AAC_BR_LAST: last entry. + * + * Types of bitrate in AAC. + */ +typedef enum { + MIX_AAC_BR_NULL=-1, + MIX_AAC_BR_CONSTANT=0, + MIX_AAC_BR_VARIABLE, + MIX_AAC_BR_LAST +} MixAACBitrateType; + +/** + * MixAACBitstreamFormt: + * @MIX_AAC_BS_NULL: Undefined bitstream format. + * @MIX_AAC_BS_ADTS: Bitstream is in ADTS format. + * @MIX_AAC_BS_ADIF: Bitstream is in ADIF format. + * @MIX_AAC_BS_RAW: Bitstream is in raw format. + * @MIX_AAC_BS_LAST: Last entry. + * + * AAC bitstream format. + */ +typedef enum { + MIX_AAC_BS_NULL=-1, + MIX_AAC_BS_ADTS=0, + MIX_AAC_BS_ADIF, + MIX_AAC_BS_RAW, + MIX_AAC_BS_LAST +} MixAACBitstreamFormt; + +/** + * MixAACProfile: + * @MIX_AAC_PROFILE_NULL: Undefined profile. + * @MIX_AAC_PROFILE_MAIN: Not Supported AAC Main profile. + * @MIX_AAC_PROFILE_LC: AAC-LC profile, including support of SBR and PS tool. + * @MIX_AAC_PROFILE_SSR: Not Supported SSR profile. + * @MIX_AAC_PROFILE_LAST: Last entry. + * + * AAC profiles definitions. + */ +typedef enum { + MIX_AAC_PROFILE_NULL=-1, + MIX_AAC_PROFILE_MAIN=0, + MIX_AAC_PROFILE_LC, + MIX_AAC_PROFILE_SSR, + MIX_AAC_PROFILE_LAST +} MixAACProfile; + +/* Using enumeration as this MPEG ID definition is specific to SST and different from + any MPEG/ADTS header. +*/ +/** + * MixAACMpegID: + * @MIX_AAC_MPEG_ID_NULL: Undefined MPEG ID. + * @MIX_AAC_MPEG_2_ID: Indicate MPEG 2 Audio. + * @MIX_AAC_MPEG_4_ID: Indicate MPEG 4 Audio. + * @MIX_AAC_MPEG_LAST: last entry. + * + * AAC MPEG ID. +*/ +typedef enum { + MIX_AAC_MPEG_ID_NULL=-1, + MIX_AAC_MPEG_2_ID = 0, + MIX_AAC_MPEG_4_ID = 1, + MIX_AAC_MPEG_LAST +} MixAACMpegID; + +/** + * MixAudioConfigParamsAAC: + * @parent: parent. + * @MPEG_id: MPEG ID. See #mix_acp_aac_set_mpeg_id + * @bit_stream_format: Bitstream format. See #mix_acp_aac_set_bit_stream_format. + * @aac_profile: AAC profile. See #mix_acp_aac_set_aac_profile. + * @aot: Audio object type. See #mix_acp_aac_set_aot + * @aac_sample_rate: See #MIX_ACP_AAC_SAMPLE_RATE macro. + * @aac_channels: See #MIX_ACP_AAC_CHANNELS macro. + * @bit_rate_type: Bitrate type. See #mix_acp_aac_set_bit_rate_type + * @sbrPresentFlag: See #MIX_ACP_AAC_SBR_FLAG macro. + * @psPresentFlag: See #MIX_ACP_AAC_PS_FLAG macro. + * @CRC: CRC check 0:disable, 1:enable. + * @pce_present: Not Used. See #MIX_ACP_AAC_PCE_FLAG + * @syntc_id: Not Used. 0 for ID_SCE(Dula Mono), -1 for raw. + * @syntc_tag: Not Used. -1 for raw. 0-16 for rest of the streams. + * @num_syntc_elems: Not Used. Number of syntatic elements. + * + * MixAudio Parameter object + */ +struct _MixAudioConfigParamsAAC +{ + /*< public >*/ + MixAudioConfigParams parent; + + /*< public >*/ + /* Audio Format Parameters */ + MixAACMpegID MPEG_id; + MixAACBitstreamFormt bit_stream_format; + MixAACProfile aac_profile; + guint aot; + guint aac_sample_rate; + guint aac_channels; + MixAACBitrateType bit_rate_type; + gboolean CRC; + guint sbrPresentFlag; + guint psPresentFlag; + gboolean pce_present; + gint8 syntc_id[2]; + gint8 syntc_tag[2]; + gint num_syntc_elems; + /*< private >*/ + void* reserved1; + void* reserved2; + void* reserved3; + void* reserved4; +}; + +/** + * MixAudioConfigParamsAACClass: + * + * MI-X Audio object class + */ +struct _MixAudioConfigParamsAACClass +{ + /*< public >*/ + MixAudioConfigParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_acp_aac_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_acp_aac_get_type (void); + +/** + * mix_acp_aac_new: + * @returns: A newly allocated instance of #MixAudioConfigParamsAAC + * + * Use this method to create new instance of #MixAudioConfigParamsAAC + */ +MixAudioConfigParamsAAC *mix_acp_aac_new(void); + +/** + * mix_acp_aac_ref: + * @mix: object to add reference + * @returns: the MixAudioConfigParamsAAC instance where reference count has been increased. + * + * Add reference count. + */ +MixAudioConfigParamsAAC *mix_acp_aac_ref(MixAudioConfigParamsAAC *mix); + +/** + * mix_acp_aac_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_acp_aac_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + + +/** + * mix_acp_aac_set_mpeg_id: + * @obj: #MixAudioConfigParamsAAC + * @mpegid: MPEG ID to set. + * @return: MIX_RESULT + * + * Configure decoder to treat audio as MPEG 2 or MPEG 4. +*/ +MIX_RESULT mix_acp_aac_set_mpeg_id(MixAudioConfigParamsAAC *obj, MixAACMpegID mpegid); + +/** + * mix_acp_aac_get_mpeg_id: + * @obj: #MixAudioConfigParamsAAC object + * @returns: MPEG ID. + * + * Retrieve currently configured mpeg id value. +*/ +MixAACMpegID mix_acp_aac_get_mpeg_id(MixAudioConfigParamsAAC *obj); + +/** + * MIX_ACP_AAC_CRC: + * @obj: #MixAudioConfigParamsAAC object. + * + * #MixAudioConfigParamAAC.CRC accessor. +*/ +#define MIX_ACP_AAC_CRC(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->CRC) + +/** + * mix_acp_aac_set_aot: + * @obj: #MixAudioConfigParamsAAC + * @aot: Audio Object Type. + * + * Audio Object Type for the MPEG-4 audio stream. Valid value are: + * + * 2 - for AAC-LC + * + * 5 - for SBR + * + * Method returns MIX_RESULT_NOT_SUPPORTED for not supported value. + * +*/ +MIX_RESULT mix_acp_aac_set_aot(MixAudioConfigParamsAAC *obj, guint aot); + +/** + * mix_acp_aac_get_aot: + * @obj: #MixAudioConfigParamsAAC + * @aot: Pointer to receive the Audio Object Type. + * @return: Currently configured audio object type. Or 0 if not yet specified. + * + * To retrieve currently configured audio object type. +*/ +guint mix_acp_aac_get_aot(MixAudioConfigParamsAAC *obj); + +/** + * MIX_ACP_AAC_SBR_FLAG: + * @obj: #MixAudioConfigParamsAAC object + * + * MixAudioConfigParamAAC.sbrPresentFlag accessor. + * + * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates whether SBR data is present. + * + * 0: Absent + * + * 1: Present + * + * -1 (0xffffffff): indicates implicit signalling. + */ +#define MIX_ACP_AAC_SBR_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->sbrPresentFlag) + +/** + * MIX_ACP_AAC_PS_FLAG: + * @obj: #MixAudioConfigParamsAAC object + * + * MixAudioConfigParamAAC.psPresentFlag accessor. + * + * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates whether PS data is present. + * + * 0: Absent + * + * 1: Present + * + * -1 (0xffffffff): indicates implicit signalling. + */ +#define MIX_ACP_AAC_PS_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->psPresentFlag) + +/** + * MIX_ACP_AAC_PCE_FLAG: + * @obj: #MixAudioConfigParamsAAC object. + * + * MixAudioConfigParamAAC.pce_present accessor. + * + * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates PCE data presence. + * + * 1:present + * + * 0:absent. + * + * Not Used on Moorestown. + */ +#define MIX_ACP_AAC_PCE_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->pce_present) + +/** + * MIX_ACP_AAC_SAMPLE_RATE: + * @obj: #MixAudioConfigParamsAAC object. + * + * MixAudioConfigParamAAC.aac_sample_rate accessor. + * + * Plain AAC decoder operating sample rate. Which could be different from the output sampling rate with HE AAC v1 and v2. + */ +#define MIX_ACP_AAC_SAMPLE_RATE(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_sample_rate) + +/** + * MIX_ACP_AAC_CHANNELS: + * @obj: #MixAudioConfigParamsAAC + * + * MixAudioConfigParamAAC.aac_channels accessor. + * + * Indicates the number of output channels used by AAC decoder before SBR or PS tools are applied. + * + */ +#define MIX_ACP_AAC_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_channels) + +/** + * mix_acp_aac_get_bit_stream_format: + * @obj: #MixAudioConfigParamsAAC + * @returns: #MixAACBitstreamFormt + * + * Return the bitstream format currently configured. + */ +MixAACBitstreamFormt mix_acp_aac_get_bit_stream_format(MixAudioConfigParamsAAC *obj); + +/** + * mix_acp_aac_set_bit_stream_format: + * @obj: #MixAudioConfigParamsAAC + * @bit_stream_format: Bit stream format. + * @returns: MIX_RESULT + * + * Set the type of bitstream format as specified in #MixAACBitstreamFormt. + */ +MIX_RESULT mix_acp_aac_set_bit_stream_format(MixAudioConfigParamsAAC *obj, MixAACBitstreamFormt bit_stream_format); + +/** + * mix_acp_aac_get_aac_profile: + * @obj: #MixAudioConfigParamsAAC + * @returns: #MixAACProfile + * + * Retrieve the AAC profile currently configured. + */ +MixAACProfile mix_acp_aac_get_aac_profile(MixAudioConfigParamsAAC *obj); + +/** + * mix_acp_aac_set_aac_profile: + * @obj: #MixAudioConfigParamsAAC + * @aac_profile: AAC profile to set. + * @returns: MIX_RESULT + * + * Configure AAC profile for current session. + * + * Only #MIX_AAC_PROFILE_LC is supported in Moorestown. + */ +MIX_RESULT mix_acp_aac_set_aac_profile(MixAudioConfigParamsAAC *obj, MixAACProfile aac_profile); + +/** + * mix_acp_aac_get_bit_rate_type: + * @obj: #MixAudioConfigParamsAAC + * @returns: #MixAACBitrateType + * + * Retrieve the bit rate type currently configured. + */ +MixAACBitrateType mix_acp_aac_get_bit_rate_type(MixAudioConfigParamsAAC *obj); + +/** + * mix_acp_aac_set_bit_rate_type: + * @obj: #MixAudioConfigParamsAAC + * @bit_rate_type: Bit rate type to set. + * @returns: MIX_RESULT + * + * Set the bit rate type used. + */ +MIX_RESULT mix_acp_aac_set_bit_rate_type(MixAudioConfigParamsAAC *obj, MixAACBitrateType bit_rate_type); + +#endif /* __MIX_AUDIOCONFIGPARAMSAAC_H__ */ diff --git a/mix_audio/src/mixacpmp3.c b/mix_audio/src/mixacpmp3.c new file mode 100644 index 0000000..75ab8cb --- /dev/null +++ b/mix_audio/src/mixacpmp3.c @@ -0,0 +1,175 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixacpmp3 + * @short_description: Audio configuration parameters for MP3 audio. + * @include: mixacpmp3.h + * + * A data object which stores audio specific parameters for MP3 audio. + * + * Additional parameters must be set in the parent object #MixAudioConfigParams + */ + +#include "mixacpmp3.h" + +static GType _mix_acp_mp3_type = 0; +static MixAudioConfigParamsClass *parent_class = NULL; + +#define _do_init { _mix_acp_mp3_type = g_define_type_id; } + +gboolean mix_acp_mp3_copy(MixParams* target, const MixParams *src); +MixParams* mix_acp_mp3_dup(const MixParams *obj); +gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second); +static void mix_acp_mp3_finalize(MixParams *obj); + +G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsMP3, mix_acp_mp3, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init); + +static void mix_acp_mp3_init (MixAudioConfigParamsMP3 *self) +{ + self->CRC=FALSE; + self->MPEG_format=0; + self->MPEG_layer=0; +} + +static void mix_acp_mp3_class_init(MixAudioConfigParamsMP3Class *klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_acp_mp3_finalize; + mixparams_class->copy = (MixParamsCopyFunction)mix_acp_mp3_copy; + mixparams_class->dup = (MixParamsDupFunction)mix_acp_mp3_dup; + mixparams_class->equal = (MixParamsEqualFunction)mix_acp_mp3_equal; +} + +MixAudioConfigParamsMP3 *mix_acp_mp3_new(void) +{ + MixAudioConfigParamsMP3 *ret = (MixAudioConfigParamsMP3 *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSMP3); + + return ret; +} + +void mix_acp_mp3_finalize(MixParams *obj) +{ + /* clean up here. */ + + /* Chain up parent */ + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->finalize) + klass->finalize(obj); +} + +MixAudioConfigParamsMP3 *mix_acp_mp3_ref(MixAudioConfigParamsMP3 *mix) +{ + if (G_UNLIKELY(!mix)) return NULL; + return (MixAudioConfigParamsMP3*)mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_acp_mp3_dup: + * @obj: a #MixAudioConfigParamsMP3 object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams* mix_acp_mp3_dup(const MixParams *obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_AUDIOCONFIGPARAMSMP3(obj)) + { + MixAudioConfigParamsMP3 *duplicate = mix_acp_mp3_new(); + if (mix_acp_mp3_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) + { + ret = MIX_PARAMS(duplicate); + } + else + { + mix_acp_mp3_unref(duplicate); + } + } + + return ret; +} + +/** + * mix_acp_mp3_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_mp3_copy(MixParams* target, const MixParams *src) +{ + if (MIX_IS_AUDIOCONFIGPARAMSMP3(target) && MIX_IS_AUDIOCONFIGPARAMSMP3(src)) + { + MixAudioConfigParamsMP3 *t = MIX_AUDIOCONFIGPARAMSMP3(target); + MixAudioConfigParamsMP3 *s = MIX_AUDIOCONFIGPARAMSMP3(src); + + t->CRC = s->CRC; + t->MPEG_format = s->MPEG_format; + t->MPEG_layer = s->MPEG_layer; + + // Now chainup base class + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->copy) + { + return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); + } + else + return TRUE; + } + return FALSE; +} + +/** + * mix_acp_mp3_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second) +{ + gboolean ret = FALSE; + + if (first && second) + { + if (first == second) return TRUE; + } + else + { + return FALSE; + } + + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = klass->equal(first, second); + else + ret = TRUE; + + if (ret && MIX_IS_AUDIOCONFIGPARAMSMP3(first) && MIX_IS_AUDIOCONFIGPARAMSMP3(second)) + { + MixAudioConfigParamsMP3 *acp1 = MIX_AUDIOCONFIGPARAMSMP3(first); + MixAudioConfigParamsMP3 *acp2 = MIX_AUDIOCONFIGPARAMSMP3(second); + + ret = (acp1->CRC == acp2->CRC) && + (acp1->MPEG_format == acp2->MPEG_format) && + (acp1->MPEG_layer == acp2->MPEG_layer); + } + + return ret; +} + + diff --git a/mix_audio/src/mixacpmp3.h b/mix_audio/src/mixacpmp3.h new file mode 100644 index 0000000..e000b4f --- /dev/null +++ b/mix_audio/src/mixacpmp3.h @@ -0,0 +1,170 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_ACP_MP3_H__ +#define __MIX_ACP_MP3_H__ + + +#include "mixacp.h" + +/** + * MIX_TYPE_AUDIOCONFIGPARAMSMP3: + * + * Get type of class. + */ +#define MIX_TYPE_AUDIOCONFIGPARAMSMP3 (mix_acp_mp3_get_type ()) + +/** + * MIX_AUDIOCONFIGPARAMSMP3: + * @obj: object to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOCONFIGPARAMSMP3(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3)) + +/** + * MIX_IS_AUDIOCONFIGPARAMSMP3: + * @obj: an object. + * + * Checks if the given object is an instance of #MixAudioConfigParamsMP3 + */ +#define MIX_IS_AUDIOCONFIGPARAMSMP3(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3)) + +/** + * MIX_AUDIOCONFIGPARAMSMP3_CLASS: + * @klass: class to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOCONFIGPARAMSMP3_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3Class)) + +/** + * MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixAudioConfigParamsMP3Class + */ +#define MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSMP3)) + +/** + * MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS: + * @obj: a #MixAudioConfigParams object. + * + * Get the class instance of the object. + */ +#define MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3Class)) + +typedef struct _MixAudioConfigParamsMP3 MixAudioConfigParamsMP3; +typedef struct _MixAudioConfigParamsMP3Class MixAudioConfigParamsMP3Class; + +/** + * MixAudioConfigParamsMP3: + * @parent: parent. + * @CRC: CRC. See #MIX_ACP_MP3_CRC + * @MPEG_format: OptionalMPEG format of the mpeg audio. See #MIX_ACP_MP3_MPEG_FORMAT + * @MPEG_layer: OptionalMPEG layer of the mpeg audio. See #MIX_ACP_MP3_MPEG_LAYER + * + * MI-X Audio Parameter object for MP3 Audio. + */ +struct _MixAudioConfigParamsMP3 +{ + /*< public >*/ + MixAudioConfigParams parent; + + /*< public >*/ + /* Audio Format Parameters */ + gboolean CRC; + gint MPEG_format; + gint MPEG_layer; + + /*< private >*/ + void* reserved1; + void* reserved2; + void* reserved3; + void* reserved4; +}; + +/** + * MixAudioConfigParamsMP3Class: + * + * MI-X Audio object class + */ +struct _MixAudioConfigParamsMP3Class +{ + /*< public >*/ + MixAudioConfigParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_acp_mp3_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_acp_mp3_get_type (void); + +/** + * mix_acp_mp3_new: + * @returns: A newly allocated instance of #MixAudioConfigParamsMP3 + * + * Use this method to create new instance of #MixAudioConfigParamsMP3 + */ +MixAudioConfigParamsMP3 *mix_acp_mp3_new(void); + +/** + * mix_acp_mp3_ref: + * @mix: object to add reference + * @returns: the MixAudioConfigParamsMP3 instance where reference count has been increased. + * + * Add reference count. + */ +MixAudioConfigParamsMP3 *mix_acp_mp3_ref(MixAudioConfigParamsMP3 *mix); + +/** + * mix_acp_mp3_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_acp_mp3_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/** + * MIX_ACP_MP3_CRC: + * @obj: #MixAudioConfigParamsMP3 object. + * + * MixAudioConfigParamMP3.CRC accessor. + * + * Optional +*/ +#define MIX_ACP_MP3_CRC(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->CRC) + +/** + * MIX_ACP_MP3_MPEG_FORMAT: + * @obj: #MixAudioConfigParamsMP3 object. + * + * MixAudioConfigParamMP3.MPEG_format accessor. + * + * Supported MPEG format should be 1 or 2. +*/ +#define MIX_ACP_MP3_MPEG_FORMAT(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_format) + +/** + * MIX_ACP_MP3_MPEG_LAYER: + * @obj: #MixAudioConfigParamsMP3 object. + * + * MixAudioConfigParamMP3.MPEG_layer accessor. + * + * Supported layer should be 1, 2, or 3. +*/ +#define MIX_ACP_MP3_MPEG_LAYER(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_layer) + +#endif /* __MIX_AUDIOCONFIGPARAMSMP3_H__ */ diff --git a/mix_audio/src/mixacpwma.c b/mix_audio/src/mixacpwma.c new file mode 100644 index 0000000..cf2590f --- /dev/null +++ b/mix_audio/src/mixacpwma.c @@ -0,0 +1,205 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixacpwma + * @short_description: Audio parameters for WMA audio. + * @include: mixacpwma.h + * + * A data object which stores audio specific parameters for WMA. + * + * In Moorestown, only WMA2 is supported. + * + * Additional parameters must be set in the parent object #MixAudioConfigParams + */ + +#include "mixacpwma.h" + +static GType _mix_acp_wma_type = 0; +static MixAudioConfigParamsClass *parent_class = NULL; + +#define _do_init { _mix_acp_wma_type = g_define_type_id; } + +gboolean mix_acp_wma_copy(MixParams* target, const MixParams *src); +MixParams* mix_acp_wma_dup(const MixParams *obj); +gboolean mix_acp_wma_equal(MixParams* first, MixParams *second); +static void mix_acp_wma_finalize(MixParams *obj); + +G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsWMA, mix_acp_wma, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init); + +static void mix_acp_wma_init (MixAudioConfigParamsWMA *self) +{ + self->channel_mask = 0; + self->format_tag = 0; + self->block_align = 0; + self->wma_encode_opt = 0; + self->pcm_bit_width = 0; /* source pcm bit width */ + self->wma_version = MIX_AUDIO_WMA_VUNKNOWN; +} + +static void mix_acp_wma_class_init(MixAudioConfigParamsWMAClass *klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_acp_wma_finalize; + mixparams_class->copy = (MixParamsCopyFunction)mix_acp_wma_copy; + mixparams_class->dup = (MixParamsDupFunction)mix_acp_wma_dup; + mixparams_class->equal = (MixParamsEqualFunction)mix_acp_wma_equal; +} + +MixAudioConfigParamsWMA *mix_acp_wma_new(void) +{ + MixAudioConfigParamsWMA *ret = (MixAudioConfigParamsWMA *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSWMA); + + return ret; +} + +void mix_acp_wma_finalize(MixParams *obj) +{ + /* clean up here. */ + + /* Chain up parent */ + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->finalize) + klass->finalize(obj); +} + +MixAudioConfigParamsWMA *mix_acp_wma_ref(MixAudioConfigParamsWMA *obj) +{ + return (MixAudioConfigParamsWMA*)mix_params_ref(MIX_PARAMS(obj)); +} + +/** + * mix_acp_wma_dup: + * @obj: a #MixAudioConfigParamsWMA object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams* mix_acp_wma_dup(const MixParams *obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_AUDIOCONFIGPARAMSWMA(obj)) + { + MixAudioConfigParamsWMA *duplicate = mix_acp_wma_new(); + if (mix_acp_wma_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) + { + ret = MIX_PARAMS(duplicate); + } + else + { + mix_acp_wma_unref(duplicate); + } + } + + return ret; +} + +/** + * mix_acp_wma_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_wma_copy(MixParams* target, const MixParams *src) +{ + if (MIX_IS_AUDIOCONFIGPARAMSWMA(target) && MIX_IS_AUDIOCONFIGPARAMSWMA(src)) + { + MixAudioConfigParamsWMA *t = MIX_AUDIOCONFIGPARAMSWMA(target); + MixAudioConfigParamsWMA *s = MIX_AUDIOCONFIGPARAMSWMA(src); + + t->channel_mask = s->channel_mask; + t->format_tag = s->format_tag; + t->block_align = s->block_align; + t->wma_encode_opt = s->wma_encode_opt; + t->wma_version = s->wma_version; + t->pcm_bit_width = s->pcm_bit_width; + + // Now chainup base class + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->copy) + { + return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); + } + else + return TRUE; + } + return FALSE; +} + +/** + * mix_acp_wma_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_acp_wma_equal(MixParams* first, MixParams *second) +{ + gboolean ret = FALSE; + + if (first && second) + { + if (first == second) return TRUE; + } + else + { + return FALSE; + } + + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = klass->equal(first, second); + else + ret = TRUE; + + if (ret && MIX_IS_AUDIOCONFIGPARAMSWMA(first) && MIX_IS_AUDIOCONFIGPARAMSWMA(second)) + { + MixAudioConfigParamsWMA *acp1 = MIX_AUDIOCONFIGPARAMSWMA(first); + MixAudioConfigParamsWMA *acp2 = MIX_AUDIOCONFIGPARAMSWMA(second); + + ret = (acp1->channel_mask == acp2->channel_mask) && + (acp1->format_tag == acp2->format_tag) && + (acp1->block_align == acp2->block_align) && + (acp1->wma_encode_opt == acp2->wma_encode_opt) && + (acp1->pcm_bit_width == acp2->pcm_bit_width) && + (acp1->wma_version == acp2->wma_version); + } + + return ret; +} + +MixAudioWMAVersion mix_acp_wma_get_version(MixAudioConfigParamsWMA *obj) +{ + if (obj) + return (obj->wma_version); + else + return MIX_AUDIO_WMA_VUNKNOWN; +} + +MIX_RESULT mix_acp_wma_set_version(MixAudioConfigParamsWMA *obj, MixAudioWMAVersion ver) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (!obj) return MIX_RESULT_NULL_PTR; + + if ((ver > MIX_AUDIO_WMA_VUNKNOWN) && (ver < MIX_AUDIO_WMA_LAST)) + obj->wma_version = ver; + else + ret=MIX_RESULT_INVALID_PARAM; + + return ret; +} + diff --git a/mix_audio/src/mixacpwma.h b/mix_audio/src/mixacpwma.h new file mode 100644 index 0000000..8c617fd --- /dev/null +++ b/mix_audio/src/mixacpwma.h @@ -0,0 +1,235 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_ACP_WMA_H__ +#define __MIX_ACP_WMA_H__ + + +#include "mixacp.h" + +/** + * MIX_TYPE_AUDIOCONFIGPARAMSWMA: + * + * Get type of class. + */ +#define MIX_TYPE_AUDIOCONFIGPARAMSWMA (mix_acp_wma_get_type ()) + +/** + * MIX_AUDIOCONFIGPARAMSWMA: + * @obj: object to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOCONFIGPARAMSWMA(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMA)) + +/** + * MIX_IS_AUDIOCONFIGPARAMSWMA: + * @obj: an object. + * + * Checks if the given object is an instance of #MixAudioConfigParamsWMA + */ +#define MIX_IS_AUDIOCONFIGPARAMSWMA(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA)) + +/** + * MIX_AUDIOCONFIGPARAMSWMA_CLASS: + * @klass: class to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOCONFIGPARAMSWMA_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMAClass)) + +/** + * MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixAudioConfigParamsWMAClass + */ +#define MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSWMA)) + +/** + * MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS: + * @obj: a #MixAudioConfigParamsWMA object. + * + * Get the class instance of the object. + */ +#define MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMAClass)) + +/** + * MixAudioWMAVersion: + * @MIX_AUDIO_WMA_VUNKNOWN: WMA version undefined. + * @MIX_AUDIO_WMA_V9: WMA 9 + * @MIX_AUDIO_WMA_V10: Not Supported WMA 10 + * @MIX_AUDIO_WMA_V10P: Not Supported WMA 10 Pro + * @MIX_AUDIO_WMA_LAST: last entry. + * + * WMA version. + */ +typedef enum { + MIX_AUDIO_WMA_VUNKNOWN, + MIX_AUDIO_WMA_V9, + MIX_AUDIO_WMA_V10, + MIX_AUDIO_WMA_V10P, + MIX_AUDIO_WMA_LAST +} MixAudioWMAVersion; + +typedef struct _MixAudioConfigParamsWMA MixAudioConfigParamsWMA; +typedef struct _MixAudioConfigParamsWMAClass MixAudioConfigParamsWMAClass; + +/** + * MixAudioConfigParamsWMA: + * @parent: parent. + * @channel_mask: Channel Mask. See #MIX_ACP_WMA_CHANNEL_MASK + * @format_tag: Format tag. See #MIX_ACP_WMA_FORMAT_TAG + * @block_algin: Block alignment. See #MIX_ACP_WMA_BLOCK_ALIGN + * @wma_encode_opt: Encoder option. See #MIX_ACP_WMA_ENCODE_OPT + * @pcm_bit_width: Source pcm bit width. See #MIX_ACP_WMA_PCM_BIT_WIDTH + * @wma_version: WMA version. See #mix_acp_wma_set_version + * + * MI-X Audio Parameter object + */ +struct _MixAudioConfigParamsWMA +{ + /*< public >*/ + MixAudioConfigParams parent; + + /*< public >*/ + /* Audio Format Parameters */ + guint32 channel_mask; + guint16 format_tag; + guint16 block_align; + guint16 wma_encode_opt;/* Encoder option */ + guint8 pcm_bit_width; /* source pcm bit width */ + MixAudioWMAVersion wma_version; +}; + +/** + * MixAudioConfigParamsWMAClass: + * + * MI-X Audio object class + */ +struct _MixAudioConfigParamsWMAClass +{ + /*< public >*/ + MixAudioConfigParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_acp_wma_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_acp_wma_get_type (void); + +/** + * mix_acp_wma_new: + * @returns: A newly allocated instance of #MixAudioConfigParamsWMA + * + * Use this method to create new instance of #MixAudioConfigParamsWMA + */ +MixAudioConfigParamsWMA *mix_acp_wma_new(void); + +/** + * mix_acp_wma_ref: + * @mix: object to add reference + * @returns: the MixAudioConfigParamsWMA instance where reference count has been increased. + * + * Add reference count. + */ +MixAudioConfigParamsWMA *mix_acp_wma_ref(MixAudioConfigParamsWMA *mix); + +/** + * mix_acp_wma_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_acp_wma_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/** + * MIX_ACP_WMA_CHANNEL_MASK: + * @obj: #MixAudioConfigParamsWMA object + * + * MixAudioConfigParamWMA.channel_mask accessor. + * + * Channel mask must be one of the following: + * + * 4: For single (1) channel output. + * + * 3: For stereo (2) channels output. + * + * Only 1 or 2 output channels are supported. + * +*/ +#define MIX_ACP_WMA_CHANNEL_MASK(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->channel_mask) + +/** + * MIX_ACP_WMA_FORMAT_TAG: + * @obj: #MixAudioConfigParamsWMA object + * + * MixAudioConfigParamWMA.format_tag accessor. + * + * In Moorestown, only value 0x0161 combined with use of #MIX_AUDIO_WMA_V9 is supported. +*/ +#define MIX_ACP_WMA_FORMAT_TAG(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->format_tag) + +/** + * MIX_ACP_WMA_BLOCK_ALIGN: + * @obj: #MixAudioConfigParamsWMA object + * + * MixAudioConfigParamWMA.block_align accessor. + * + * Block alignment indicates packet size. Available from ASF Header. +*/ +#define MIX_ACP_WMA_BLOCK_ALIGN(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->block_align) + +/** + * MIX_ACP_WMA_ENCODE_OPT: + * @obj: #MixAudioConfigParamsWMA object + * + * MixAudioConfigParamWMA.wma_encode_opt accessor. + * + * Encoder option available from ASF header. +*/ +#define MIX_ACP_WMA_ENCODE_OPT(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->wma_encode_opt) + +/** + * MIX_ACP_WMA_PCM_BIT_WIDTH: + * @obj: #MixAudioConfigParamsWMA object + * + * MixAudioConfigParamWMA.pcm_bit_width accessor. + * + * Source pcm bit width available from ASF Header. +*/ +#define MIX_ACP_WMA_PCM_BIT_WIDTH(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->pcm_bit_width) + +/* Class Methods */ +/** + * mix_acp_wma_get_version: + * @obj: #MixAudioConfigParamsWMA object + * @returns: MixAudioWMAVersion + * + * Get WMA Version. +*/ +MixAudioWMAVersion mix_acp_wma_get_version(MixAudioConfigParamsWMA *obj); + +/** + * mix_acp_wma_set_version: + * @obj: #MixAudioConfigParamsWMA object + * @ver: MixAudioWMAVersion to set. + * @returns: MIX_RESULT. + * + * Set WMA Version. + * + * In Moorestown, only #MIX_AUDIO_WMA_V9 is supported +*/ +MIX_RESULT mix_acp_wma_set_version(MixAudioConfigParamsWMA *obj, MixAudioWMAVersion ver); + +#endif /* __MIX_AUDIOCONFIGPARAMSWMA_H__ */ diff --git a/mix_audio/src/mixaip.c b/mix_audio/src/mixaip.c new file mode 100644 index 0000000..8ee0811 --- /dev/null +++ b/mix_audio/src/mixaip.c @@ -0,0 +1,167 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixaip + * @short_description: Initialization parameters object. + * @include: mixacp.h + * + * A data object which stores initialization specific parameters. + * + * Not Implemented in Moorestown. + */ + +#include "mixaip.h" + +//static GType _mix_aip_type = 0; +static MixParamsClass *parent_class = NULL; + +// #define _do_init { _mix_aip_type = g_define_type_id; }; +#define _do_init + +gboolean mix_aip_copy(MixParams* target, const MixParams *src); +MixParams* mix_aip_dup(const MixParams *obj); +gboolean mix_aip_equal(MixParams* first, MixParams *second); +static void mix_aip_finalize(MixParams *obj); + +G_DEFINE_TYPE_WITH_CODE(MixAudioInitParams, mix_aip, MIX_TYPE_PARAMS, _do_init ); + +#if 0 +void _mix_aip_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_aip_get_type ()); +} +#endif + +static void mix_aip_init (MixAudioInitParams *self) +{ + self->reserved1 = self->reserved2 = self->reserved3 = self->reserved4 = NULL; +} + +static void mix_aip_class_init(MixAudioInitParamsClass *klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_aip_finalize; + mixparams_class->copy = (MixParamsCopyFunction)mix_aip_copy; + mixparams_class->dup = (MixParamsDupFunction)mix_aip_dup; + mixparams_class->equal = (MixParamsEqualFunction)mix_aip_equal; +} + +MixAudioInitParams *mix_aip_new(void) +{ + MixAudioInitParams *ret = (MixAudioInitParams *)g_type_create_instance (MIX_TYPE_AUDIOINITPARAMS); + + return ret; +} + +void mix_aip_finalize(MixParams *obj) +{ + /* clean up here. */ + + /* Chain up parent */ + if (parent_class->finalize) + parent_class->finalize(obj); +} + +MixAudioInitParams *mix_aip_ref(MixAudioInitParams *mix) +{ + return (MixAudioInitParams*)mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_aip_dup: + * @obj: a #MixAudioInitParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams* mix_aip_dup(const MixParams *obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_AUDIOINITPARAMS(obj)) + { + MixAudioInitParams *duplicate = mix_aip_new(); + if (mix_aip_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) + { + ret = MIX_PARAMS(duplicate); + } + else + { + mix_aip_unref(duplicate); + } + } + + return ret; +} + +/** + * mix_aip_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_aip_copy(MixParams* target, const MixParams *src) +{ + if (MIX_IS_AUDIOINITPARAMS(target) && MIX_IS_AUDIOINITPARAMS(src)) + { + // TODO perform copy. + // + // Now chainup base class + // Get the root class from the cached parent_class object. This cached parent_class object has not be overwritten by this current class. + // Using the cached parent_class object because this_class would have ->copy pointing to this method! + // Cached parent_class contains the class object before it is overwritten by this derive class. + // MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (parent_class->copy) + { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); + } + else + return TRUE; + } + return FALSE; +} + +/** + * mix_aip_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_aip_equal(MixParams* first, MixParams *second) +{ + gboolean ret = FALSE; + + if (MIX_IS_AUDIOINITPARAMS(first) && MIX_IS_AUDIOINITPARAMS(second)) + { + // TODO: do deep compare + + if (ret) + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} diff --git a/mix_audio/src/mixaip.h b/mix_audio/src/mixaip.h new file mode 100644 index 0000000..613ed54 --- /dev/null +++ b/mix_audio/src/mixaip.h @@ -0,0 +1,132 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_AUDIOINITPARAMS_H__ +#define __MIX_AUDIOINITPARAMS_H__ + + +#include + +/** + * MIX_TYPE_AUDIOINITPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_AUDIOINITPARAMS (mix_aip_get_type ()) + +/** + * MIX_AUDIOINITPARAMS: + * @obj: object to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParams)) + +/** + * MIX_IS_AUDIOINITPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_AUDIOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOINITPARAMS)) + +/** + * MIX_AUDIOINITPARAMS_CLASS: + * @klass: class to be type-casted. + * + * Type casting. + */ +#define MIX_AUDIOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParamsClass)) + +/** + * MIX_IS_AUDIOINITPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_AUDIOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOINITPARAMS)) + +/** + * MIX_AUDIOINITPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_AUDIOINITPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParamsClass)) + +typedef struct _MixAudioInitParams MixAudioInitParams; +typedef struct _MixAudioInitParamsClass MixAudioInitParamsClass; + +/** + * MixAudioInitParams: + * @parent: Parent. + * + * @MixAudio initialization parameter object. + */ +struct _MixAudioInitParams +{ + /*< public >*/ + MixParams parent; + + /*< private >*/ + void* reserved1; + void* reserved2; + void* reserved3; + void* reserved4; +}; + +/** + * MixAudioInitParamsClass: + * @parent_class: Parent class. + * + * @MixAudio initialization parameter object class structure. + */ +struct _MixAudioInitParamsClass +{ + /*< public >*/ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_aip_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_aip_get_type (void); + +/** + * mix_aip_new: + * @returns: A newly allocated instance of #MixAudioInitParams + * + * Use this method to create new instance of #MixAudioInitParams + */ +MixAudioInitParams *mix_aip_new(void); + +/** + * mix_aip_ref: + * @mix: object to add reference + * @returns: the MixAudioInitParams instance where reference count has been increased. + * + * Add reference count. + */ +MixAudioInitParams *mix_aip_ref(MixAudioInitParams *mix); + +/** + * mix_aip_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_aip_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +#endif /* __MIX_AUDIOINITPARAMS_H__ */ diff --git a/mix_audio/src/mixaudio.c b/mix_audio/src/mixaudio.c new file mode 100644 index 0000000..6d41350 --- /dev/null +++ b/mix_audio/src/mixaudio.c @@ -0,0 +1,2092 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixaudio + * @short_description: Object to support a single stream playback using hardware accelerated decoder. + * @include: mixaudio.h + * + * #MixAudio object provide thread-safe API for application and/or multimedia framework to take advantage of Intel Smart Sound Technology(TM) driver for hardware audio decode and render. + * + * Each #MixAudio object represents one streaming session with the Intel Smart Sound driver and provides configuration and control of the decoding and playback options. + * + * The #MixAudio object also support integration with Intel Audio Manager service. + * + * An application can utilize the #MixAudio object by calling the following sequence: + * + * mix_audio_new() to create a #MixAudio instance. + * mix_audio_initialize() to allocate Intel Smart Sound Technology resource. + * mix_audio_configure() to configure stream parameters. + * mix_audio_decode() can be called repeatedly for decoding and, optionally, rendering. + * mix_audio_start() is called after the 1st mix_audio_decode() method to start rendering. + * mix_audio_stop_drain() is called after the last buffer is passed for decoding in with mix_audio_decode(). + * mix_audio_deinitialize() to free resource once playback is completed. + * + * + * Since mix_audio_decode() is a blocking call during playback, the following methods are called in a seperate thread to control progress: + * + * mix_audio_start() + * mix_audio_pause() + * mix_audio_resume() + * mix_audio_stop_drop() + * + */ + +/** + * SECTION:mixaudiotypes + * @title: Mix Audio Types + * @short_description: Miscellanous types used by #MixAudio API. + * @include: mixaudiotypes.h + * + * Miscellanous types used by #MixAudio API. +*/ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include "mixaudio.h" + +#ifdef AUDIO_MANAGER +#include "amhelper.h" +#endif + +#ifndef MIXAUDIO_CURRENT +#define MIXAUDIO_CURRENT 0 +#endif +#ifndef MIXAUDIO_AGE +#define MIXAUDIO_AGE 0 +#endif + +/* Include this now but it will change when driver updates. + We would want to build against a kernel dev package if that + is available. +*/ +#include +#include "intel_sst_ioctl.h" +#include "sst_proxy.h" + +#ifdef G_LOG_DOMAIN +#undef G_LOG_DOMAIN +#define G_LOG_DOMAIN ((gchar*)"mixaudio") +#endif + +/** + * LPE_DEVICE: + * + * LPE Device location. + */ +static const char* LPE_DEVICE="/dev/lpe"; +/* #define LPE_DEVICE "/dev/lpe" */ + +#define _LOCK(obj) g_static_rec_mutex_lock(obj); +#define _UNLOCK(obj) g_static_rec_mutex_unlock(obj); + +#define _UNLOCK_RETURN(obj, res) { _UNLOCK(obj); return res; } + +typedef enum { + MIX_STREAM_PAUSED_DRAINING = MIX_STREAM_LAST, + MIX_STREAM_INTERNAL_LAST +} MixStreamStateInternal; + + +MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); +MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); +MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); +MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); +MIX_RESULT mix_audio_start_default(MixAudio *mix); +MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix); +MIX_RESULT mix_audio_stop_drain_default(MixAudio *mix); +MIX_RESULT mix_audio_pause_default(MixAudio *mix); +MIX_RESULT mix_audio_resume_default(MixAudio *mix); +MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs); +MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute); +MIX_RESULT mix_audio_get_mute_default(MixAudio *mix, gboolean* muted); +MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol); +MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol); +MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType type); +MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype); +MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix); +MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState); +MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state); +MIX_RESULT mix_audio_is_am_available_default(MixAudio *mix, MixAudioManager am, gboolean *avail); +MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams); + +static gboolean g_IAM_available = FALSE; +MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audioconfigparams); +MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfigparams); +MIX_RESULT mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams); + +static void mix_audio_finalize(GObject *obj); +G_DEFINE_TYPE (MixAudio, mix_audio, G_TYPE_OBJECT); + +static gboolean has_FW_INFO = FALSE; +static struct snd_sst_fw_info cur_FW_INFO = {{0}}; + +static MIX_RESULT mix_audio_FW_INFO(MixAudio *mix); +static MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params); +static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize); +static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); +static void mix_audio_debug_dump(MixAudio *mix); + +static guint g_log_handler=0; +static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, const gchar *message, gpointer user_data); + +/** + * mix_acp_print_params: + * @obj: TBD + * + * This method is to print acp param. It is a hidden implementation within MixAudioConfigParams. +*/ +void mix_acp_print_params(MixAudioConfigParams *obj); + +static void mix_audio_init (MixAudio *self) +{ + self->useIAM = FALSE; + self->streamID = 0; // TODO: Find out the invalid value for stream ID when integrates with IAM. + self->amStreamID = 0; // TODO: as above + self->streamState = MIX_STREAM_NULL; + self->encoding = NULL; + self->fileDescriptor = -1; + self->state = MIX_STATE_UNINITIALIZED; + self->codecMode = MIX_CODING_INVALID; + self->am_registered = FALSE; + + /* private member initialization */ + g_static_rec_mutex_init (&self->streamlock); + g_static_rec_mutex_init (&self->controllock); + + self->audioconfigparams = NULL; + self->deviceState = MIX_AUDIO_DEV_CLOSED; + +#ifdef LPESTUB + g_message("MixAudio running in stub mode!"); + self->ts_last = 0; + self->ts_elapsed = 0; +#endif + + self->bytes_written=0; + +} + +void _mix_aip_initialize (void); + +static void mix_audio_class_init (MixAudioClass *klass) +{ + GObjectClass *gobject_class = (GObjectClass*)klass; + + gobject_class->finalize = mix_audio_finalize; + + // Init thread before any threads/sync object are used. + if (!g_thread_supported ()) g_thread_init (NULL); + + /* Init some global vars */ + g_IAM_available = FALSE; + + // base implementations + klass->initialize = mix_audio_initialize_default; + klass->configure = mix_audio_configure_default; + klass->decode = mix_audio_decode_default; + klass->capture_encode = mix_audio_capture_encode_default; + klass->start = mix_audio_start_default; + klass->stop_drop = mix_audio_stop_drop_default; + klass->stop_drain = mix_audio_stop_drain_default; + klass->pause = mix_audio_pause_default; + klass->resume = mix_audio_resume_default; + klass->get_timestamp = mix_audio_get_timestamp_default; + klass->set_mute = mix_audio_set_mute_default; + klass->get_mute = mix_audio_get_mute_default; + klass->get_max_vol = mix_audio_get_max_vol_default; + klass->get_min_vol = mix_audio_get_min_vol_default; + klass->get_volume = mix_audio_get_volume_default; + klass->set_volume = mix_audio_set_volume_default; + klass->deinitialize = mix_audio_deinitialize_default; + klass->get_stream_state = mix_audio_get_stream_state_default; + klass->get_state = mix_audio_get_state_default; + klass->is_am_available = mix_audio_is_am_available_default; + klass->get_output_configuration = mix_audio_get_output_configuration_default; + + // Set log handler... + if (!g_log_handler) + { + // Get Environment variable + // See mix_audio_log for details + const gchar* loglevel = g_getenv("MIX_AUDIO_DEBUG"); + guint64 ll = 0; + if (loglevel) + { + if (g_strstr_len(loglevel,-1, "0x") == loglevel) + { + // Hex string + ll = g_ascii_strtoull(loglevel+2, NULL, 16); + } + else + { + // Decimal string + ll = g_ascii_strtoull(loglevel, NULL, 10); + } + } + guint32 mask = (guint32)ll; + g_log_handler = g_log_set_handler(G_LOG_DOMAIN, 0xffffffff, mix_audio_log, (gpointer)mask); +/* + g_debug("DEBUG Enabled"); + g_log(G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%s", "LOG Enabled"); + g_message("MESSAGE Enabled"); + g_warning("WARNING Enabled"); + g_critical("CRITICAL Enabled"); + g_error("ERROR Enabled"); +*/ + } +} + +static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, const gchar *message, gpointer user_data) +{ + // Log message based on a mask. + // Mask could be read from MIX_AUDIO_DEBUG environment variable + // mask is a bit mask specifying the message to print. The lsb (0) is "ERROR" and graduating increasing + // value as describe in GLogLevelFlags structure. Not that lsb in GLogLevelFlags is not "ERROR" and + // here we shifted the log_level to ignore the first 2 values in GLogLevelFlags, making ERROR align to + // the lsb. + static const gchar* lognames[] = {"error", "critical", "warning", "message", "log", "debug"}; + guint32 mask = (guint32)user_data & ((G_LOG_LEVEL_MASK & log_level) >> 2); + gint index = 0; + + GTimeVal t = {0}; + + // convert bit mask back to index. + index = ffs(mask) - 1; + + if ((index<0) || (index >= (sizeof(lognames)/sizeof(lognames[0])))) return; + + g_get_current_time(&t); + g_printerr("%" G_GUINT64_FORMAT ":%s-%s: %s\n", + ((guint64)1000000 * t.tv_sec + (guint64)t.tv_usec), + log_domain?log_domain:G_LOG_DOMAIN, + lognames[index], + message?message:"NULL"); +} + +MixAudio *mix_audio_new(void) +{ + MixAudio *ret = g_object_new(MIX_TYPE_AUDIO, NULL); + + return ret; +} + +void mix_audio_finalize(GObject *obj) +{ + /* clean up here. */ + MixAudio *mix = MIX_AUDIO(obj); + + if (G_UNLIKELY(!mix)) return; + + /* + We are not going to check the thread lock anymore in this method. + If a thread is accessing the object it better still have a ref on this + object and in that case, this method won't be called. + + The application have to risk access violation if it calls the methods in + a thread without actually holding a reference. + */ + + g_debug("_finalized(). bytes written=%" G_GUINT64_FORMAT, mix->bytes_written); + + g_static_rec_mutex_free (&mix->streamlock); + g_static_rec_mutex_free (&mix->controllock); + + if (mix->audioconfigparams) + { + mix_acp_unref(mix->audioconfigparams); + mix->audioconfigparams = NULL; + } +} + +MixAudio *mix_audio_ref(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return NULL; + + return (MixAudio*)g_object_ref(G_OBJECT(mix)); +} + +MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + // TODO: parse and process MixAudioInitParams. It is ignored for now. + + // initialized must be called with both thread-lock held, so no other operation is allowed. + + // try lock stream thread. If failed, a pending _decode/_encode/_drain is ongoing. + if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; + + // also lock the control thread lock. + _LOCK(&mix->controllock); + + if (mix->state == MIX_STATE_UNINITIALIZED) + { + // Only allowed in uninitialized state. + switch (mode) + { + case MIX_CODING_DECODE: + case MIX_CODING_ENCODE: + { + // Open device. Same flags to open for decode and encode? +#ifdef LPESTUB + //g_debug("Reading env var LPESTUB_FILE for data output file.\n"); + //const char* filename = g_getenv("LPESTUB_FILE"); + gchar *filename = NULL; + GError *err = NULL; + const gchar* fn = NULL; + fn = g_getenv("MIX_AUDIO_OUTPUT"); + if (fn) + mix->fileDescriptor = open(fn, O_RDWR|O_CREAT, S_IRUSR|S_IWUSR); + + if (mix->fileDescriptor == -1) + { + mix->fileDescriptor = g_file_open_tmp ("mixaudio.XXXXXX", &filename, &err); + + if (err) + { + g_warning("Oops, cannot open temp file: Error message: %s", err->message); + } + else + { + g_debug("Opening %s as output data file.\n", filename); + } + } + else + { + g_debug("Opening %s as output data file.\n", fn); + } + if (filename) g_free(filename); +#else + g_debug("Opening %s\n", LPE_DEVICE); + mix->fileDescriptor = open(LPE_DEVICE, O_RDWR); +#endif + if (mix->fileDescriptor != -1) + { + mix->codecMode = mode; + mix->state = MIX_STATE_INITIALIZED; + ret = MIX_RESULT_SUCCESS; + g_debug("open() succeeded. fd=%d", mix->fileDescriptor); + } + else + { + ret = MIX_RESULT_LPE_NOTAVAIL; + } + } + break; + default: + ret = MIX_RESULT_INVALID_PARAM; + break; + } + } + else + { + ret = MIX_RESULT_WRONG_STATE; + } + + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); + + return ret; +} + +gboolean mix_audio_am_is_available(void) +{ + // return FALSE for now until IAM is available for integration. + // TODO: Check IAM + return FALSE; +} + +gboolean mix_audio_base_am_is_enabled(MixAudio *mix) +{ + // TODO: Check IAM usage + return FALSE; +} + +/** + * mix_audio_SST_SET_PARAMS: + * @mix: #MixAudio object. + * @params: Audio parameter used to configure SST. + * @returns: #MIX_RESULT indicating configuration result. + * + * This method setup up a SST stream with the given parameters. Note that even though + * this method could succeed and SST stream is setup properly, client may still not be able + * to use the session if other condition are met, such as a successfully set-up IAM, if used. + */ +MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if (mix->state == MIX_STATE_UNINITIALIZED) return MIX_RESULT_NOT_INIT; + + if (!MIX_IS_AUDIOCONFIGPARAMS(params)) return MIX_RESULT_INVALID_PARAM; + + mix_acp_print_params(params); + + struct snd_sst_params sst_params = {0}; + + gboolean converted = mix_sst_params_convert(params, &sst_params); + + if (converted) + { + // Setup the driver structure + // We are assuming the configstream will always be called after open so the codec mode + // should already been setup. + sst_params.stream_id = mix->streamID; + // We are not checking the codecMODE here for out-of-range...assuming we check that + // during init... + if (mix->codecMode == MIX_CODING_ENCODE) + sst_params.ops = STREAM_OPS_CAPTURE; + else sst_params.ops = STREAM_OPS_PLAYBACK; + + // hard-coded to support music only. + sst_params.stream_type = 0x0; // stream_type 0x00 is STREAM_TYPE_MUSIC per SST doc. + + // SET_PARAMS + int retVal = 0; + +#ifdef LPESTUB + // Not calling the ioctl +#else + g_debug("Calling SNDRV_SST_STREAM_SET_PARAMS. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_SET_PARAMS, &sst_params); + g_debug("_SET_PARAMS returned %d", retVal); +#endif + + if (!retVal) + { + // IOCTL success. + switch (sst_params.result) + { + // Please refers to SST API doc for return value definition. + case 5: + g_debug("SET_PARAMS succeeded with Stream Parameter Modified."); + case 0: + // driver says ok, too. + ret = MIX_RESULT_SUCCESS; + mix->deviceState = MIX_AUDIO_DEV_ALLOCATED; + mix->streamState = MIX_STREAM_STOPPED; + mix->streamID = sst_params.stream_id; + // clear old params + if (MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams)) + { + mix_acp_unref(mix->audioconfigparams); + mix->audioconfigparams=NULL; + } + // replace with new one. + mix->audioconfigparams = MIX_AUDIOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(params))); + // Note: do not set mix->state here because this state may rely op other than SET_PARAMS + g_debug("SET_PARAMS succeeded streamID=%d.", mix->streamID); + break; + case 1: + ret = MIX_RESULT_STREAM_NOTAVAIL; + g_debug("SET_PARAMS failed STREAM not available."); + break; + case 2: + ret = MIX_RESULT_CODEC_NOTAVAIL; + g_debug("SET_PARAMS failed CODEC not available."); + break; + case 3: + ret = MIX_RESULT_CODEC_NOTSUPPORTED; + g_debug("SET_PARAMS failed CODEC not supported."); + break; + case 4: + ret = MIX_RESULT_INVALID_PARAM; + g_debug("SET_PARAMS failed Invalid Stream Parameters."); + break; + case 6: + g_debug("SET_PARAMS failed Invalid Stream ID."); + default: + ret = MIX_RESULT_FAIL; + g_critical("SET_PARAMS failed unexpectedly. Result code: %u\n", sst_params.result); + break; + } + } + else + { + // log errors + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("Failed to SET_PARAMS. errno:0x%08x. %s\n", errno, strerror(errno)); + } + } + else + { + ret = MIX_RESULT_INVALID_PARAM; + } + + return ret; +} + +MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if (state) + *state = mix->state; + else + ret = MIX_RESULT_NULL_PTR; + + return ret; +} + +MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; + + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->streamlock, MIX_RESULT_WRONG_STATE); + + if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DIRECTRENDER) + ret = mix_audio_SST_writev(mix, iovin, iovincnt, insize); + else + ret = mix_audio_SST_STREAM_DECODE(mix, iovin, iovincnt, insize, iovout, iovoutcnt, outsize); + + _UNLOCK(&mix->streamlock); + + return ret; +} + +MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; + +#ifdef AUDIO_MANAGER + if (mix->amStreamID && (lpe_stream_unregister(mix->amStreamID) < 0)) { + g_debug("lpe_stream_unregister failed\n"); + //return MIX_RESULT_FAIL; // TODO: not sure what to do here + } +#endif + + _LOCK(&mix->controllock); + + if (mix->state == MIX_STATE_UNINITIALIZED) + ret = MIX_RESULT_SUCCESS; + else if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL)) + ret = MIX_RESULT_WRONG_STATE; + else + { + if (mix->fileDescriptor != -1) + { + g_debug("Closing fd=%d\n", mix->fileDescriptor); + close(mix->fileDescriptor); + mix->fileDescriptor = -1; + mix->deviceState = MIX_AUDIO_DEV_CLOSED; + } + mix->state = MIX_STATE_UNINITIALIZED; + } + + mix->bytes_written = 0; + + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); + + return ret; +} + + +MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + // Will call DROP even if we are already stopped. It is needed to unblock any pending write() call. +// if (mix->streamState == MIX_STREAM_DRAINING) +// ret = MIX_RESULT_WRONG_STATE; +// else + { + int retVal = 0; +#ifdef LPESTUB + // Not calling ioctl. +#else + g_debug("Calling SNDRV_SST_STREAM_DROP. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DROP); + g_debug("_DROP returned %d", retVal); +#endif + + if (!retVal) + { + mix->streamState = MIX_STREAM_STOPPED; + ret = MIX_RESULT_SUCCESS; + } + else + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("Failed to stop stream. Error:0x%08x. Unknown stream state.", errno); + } + } + + _UNLOCK(&mix->controllock); + + return ret; +} + +MIX_RESULT mix_audio_stop_drain_default(MixAudio *mix) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + int retVal = 0; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + // No need to lock to check vars that won't be changed in this function + + if (g_static_rec_mutex_trylock(&mix->streamlock)) + { + gboolean doDrain = FALSE; + + if (mix->state != MIX_STATE_CONFIGURED) + _UNLOCK_RETURN(&mix->streamlock, MIX_RESULT_NOT_CONFIGURED); + + _LOCK(&mix->controllock); + { + if (mix->streamState == MIX_STREAM_STOPPED) + ret = MIX_RESULT_SUCCESS; + else if ((mix->streamState == MIX_STREAM_DRAINING) || mix->streamState == MIX_STREAM_PAUSED_DRAINING) + ret = MIX_RESULT_WRONG_STATE; + else + { + doDrain = TRUE; + g_debug("MIX stream is DRAINING"); + mix->streamState = MIX_STREAM_DRAINING; + } + } + _UNLOCK(&mix->controllock); + + + if (doDrain) + { + // Calling the blocking DRAIN without holding the controllock + // TODO: remove this ifdef when API becomes available. + #ifdef LPESTUB + + #else + //g_debug("Calling SNDRV_SST_STREAM_DRAIN. fd=0x%08x", mix->fileDescriptor); + //retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DRAIN); +// g_warning("Calling SNDRV_SST_STREAM_DROP instead of SNDRV_SST_STREAM_DRAIN here since DRAIN is not yet integrated. There may be data loss. fd=%d", mix->fileDescriptor); + g_debug("Calling SNDRV_SST_STREAM_DRAIN fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DRAIN); + g_debug("_DRAIN returned %d", retVal); + #endif + + if (retVal) + { + _LOCK(&mix->controllock); + if (mix->streamState != MIX_STREAM_STOPPED) + { + // DRAIN could return failed if DROP is called during DRAIN. + // Any state resulting as a failed DRAIN would be error, execpt STOPPED. + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("Failed to drain stream. Error:0x%08x. Unknown stream state.", errno); + } + _UNLOCK(&mix->controllock); + } + else + { + _LOCK(&mix->controllock); + if ((mix->streamState != MIX_STREAM_DRAINING) && + (mix->streamState != MIX_STREAM_STOPPED)) + { + // State is changed while in DRAINING. This should not be allowed and is a bug. + g_warning("MIX Internal state error! DRAIN state(%u) changed!",mix->streamState); + ret = MIX_RESULT_FAIL; + } + else + { + mix->streamState = MIX_STREAM_STOPPED; + ret = MIX_RESULT_SUCCESS; + } + _UNLOCK(&mix->controllock); + } + } + + _UNLOCK(&mix->streamlock); + } + else + { + // Cannot obtain stream lock meaning there's a pending _decode/_encode. + // Will not proceed. + ret = MIX_RESULT_WRONG_STATE; + } + + return ret; +} + +MIX_RESULT mix_audio_start_default(MixAudio *mix) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONGMODE); + + // Note this impl return success even if stream is already started. + switch (mix->streamState) + { + case MIX_STREAM_PLAYING: + case MIX_STREAM_PAUSED: + case MIX_STREAM_PAUSED_DRAINING: + ret = MIX_RESULT_SUCCESS; + break; + case MIX_STREAM_STOPPED: + { + int retVal = 0; +#ifdef LPESTUB + // Not calling ioctl. +#else + g_debug("Calling SNDRV_SST_STREAM_START. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_START); + g_debug("_START returned %d", retVal); +#endif + if (retVal) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("Fail to START. Error:0x%08x. Stream state unchanged.", errno); + mix_audio_debug_dump(mix); + } + else + { + mix->streamState = MIX_STREAM_PLAYING; + ret = MIX_RESULT_SUCCESS; + } + } + break; + case MIX_STREAM_DRAINING: + default: + ret = MIX_RESULT_WRONG_STATE; + break; + } + + _UNLOCK(&mix->controllock); + +#ifdef LPESTUB + if (MIX_SUCCEEDED(ret)) + { + if (mix->ts_last == 0) + { + GTimeVal tval = {0}; + g_get_current_time(&tval); + mix->ts_last = 1000ll * tval.tv_sec + tval.tv_usec / 1000; + } + } +#endif + return ret; +} + +MIX_RESULT mix_audio_get_version(guint* major, guint *minor) +{ + // simulate the way libtool generate version so the number synchronize with the filename. + if (major) + *major = MIXAUDIO_CURRENT-MIXAUDIO_AGE; + + if (minor) + *minor = MIXAUDIO_AGE; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + // param checks + if (!MIX_IS_AUDIOCONFIGPARAMS(audioconfigparams)) return MIX_RESULT_NOT_ACP; + if (MIX_ACP_DECODEMODE(audioconfigparams) >= MIX_DECODE_LAST) return MIX_RESULT_INVALID_DECODE_MODE; + if (!mix_acp_is_streamname_valid(audioconfigparams)) return MIX_RESULT_INVALID_STREAM_NAME; + + // If we cannot lock stream thread, data is flowing and we can't configure. + if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; + + _LOCK(&mix->controllock); + + // Check all unallowed conditions + if (mix->state == MIX_STATE_UNINITIALIZED) + ret = MIX_RESULT_NOT_INIT; // Will not allowed if the state is still UNINITIALIZED + else if ((mix->codecMode != MIX_CODING_DECODE) && (mix->codecMode != MIX_CODING_ENCODE)) + ret = MIX_RESULT_WRONGMODE; // This configure is allowed only in DECODE mode. + else if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL)) + ret = MIX_RESULT_WRONG_STATE; + + if (!MIX_SUCCEEDED(ret)) + { + // Some check failed. Unlock and return. + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); + return ret; + } + + if (audioconfigparams->audio_manager == MIX_AUDIOMANAGER_INTELAUDIOMANAGER) { + mix->useIAM = TRUE; + } + // now configure stream. + + ret = mix_audio_am_unregister(mix, audioconfigparams); + + if (MIX_SUCCEEDED(ret)) + { + ret = mix_audio_SST_SET_PARAMS(mix, audioconfigparams); + } + + if (MIX_SUCCEEDED(ret)) + { + ret = mix_audio_am_register(mix, audioconfigparams); + } + + if (MIX_SUCCEEDED(ret)) + { + mix->state = MIX_STATE_CONFIGURED; + } + else + { + mix->state = MIX_STATE_INITIALIZED; + } + + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); + + return ret; +} + +MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if (!msecs) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + if (mix->state == MIX_STATE_CONFIGURED) + { + if ((mix->codecMode == MIX_CODING_DECODE) && (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN)) + { + ret = MIX_RESULT_WRONGMODE; + } + else { + + unsigned long long ts = 0; + int retVal = 0; + +#ifdef LPESTUB + // For stubbing, just get system clock. + if (MIX_ACP_BITRATE(mix->audioconfigparams) > 0) + { + // use bytes_written and bitrate + // to get times in msec. + ts = mix->bytes_written * 8000 / MIX_ACP_BITRATE(mix->audioconfigparams); + } + else if (mix->ts_last) + { + GTimeVal tval = {0}; + g_get_current_time(&tval); + ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000; + ts -= mix->ts_last; + ts += mix->ts_elapsed; + } + else + { + ts = 0; + } +#else + g_debug("Calling SNDRV_SST_STREAM_GET_TSTAMP. fd=%d", mix->fileDescriptor); + ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_TSTAMP, &ts); +#endif + + if (retVal) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_GET_TSTAMP failed. Error:0x%08x", errno); + //ret = MIX_RESULT_FAIL; + mix_audio_debug_dump(mix); + } + else + { + *msecs = ts; + g_debug("_GET_TSTAMP returned %" G_GUINT64_FORMAT, ts); + } + } + } + else + ret = MIX_RESULT_NOT_CONFIGURED; + + _UNLOCK(&mix->controllock); + + return ret; +} + +gboolean mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams) +{ + if (g_strcmp0(oldparams->stream_name, newparams->stream_name) == 0) { + return FALSE; + } + + return TRUE; +} + +MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audioconfigparams) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if (mix->am_registered && MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams) && MIX_IS_AUDIOCONFIGPARAMS(audioconfigparams)) + { + // we have 2 params. let's check + if ((MIX_ACP_DECODEMODE(mix->audioconfigparams) != MIX_ACP_DECODEMODE(audioconfigparams)) || + mix_audio_AM_Change(mix->audioconfigparams, audioconfigparams)) //TODO: add checking for SST change + { + // decode mode change. + if (mix->amStreamID > 0) { + if (lpe_stream_unregister(mix->amStreamID) != 0) { + return MIX_RESULT_FAIL; + } + mix->am_registered = FALSE; + } + } + } + + return ret; +} + +MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfigparams) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + gint32 codec_mode = -1; + + if (mix->codecMode == MIX_CODING_DECODE) + codec_mode = 0; + else if (mix->codecMode == MIX_CODING_ENCODE) + codec_mode = 1; + else + return MIX_RESULT_FAIL; // TODO: what to do when fail? + +#ifdef AUDIO_MANAGER + if (audioconfigparams->stream_name == NULL) + return MIX_RESULT_FAIL; + +// if AM is enable, and not_registered, then register + if (mix->useIAM && !mix->am_registered) { + gint32 amStreamID = lpe_stream_register(mix->streamID, "music", audioconfigparams->stream_name, codec_mode); + + if (amStreamID == -1){ + mix->amStreamID = 0; + return MIX_RESULT_FAIL; + } + else if (amStreamID == -2) { // -2: Direct render not avail, see AM spec + mix->amStreamID = 0; + return MIX_RESULT_DIRECT_NOTAVAIL; + } + mix->am_registered = TRUE; + mix->amStreamID = amStreamID; + } +#endif + + return ret; +} + +MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt) +{ + struct iovec *vec; + gint bytes_read; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + // TODO: set count limit + if (iovoutcnt < 1) { + return MIX_RESULT_INVALID_COUNT; + } + + if (iovout == NULL) + return MIX_RESULT_NULL_PTR; + + vec = (struct iovec *) g_alloca(sizeof(struct iovec) * iovoutcnt); + if (!vec) return MIX_RESULT_NO_MEMORY; + + gint i; + for (i=0; i < iovoutcnt; i++) + { + vec[i].iov_base = iovout[i].data; + vec[i].iov_len = iovout[i].size; + } + + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "begin readv()\n"); + bytes_read = readv(mix->fileDescriptor, vec, iovoutcnt); + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "end readv(), return: %d\n", bytes_read); + if (bytes_read < 0) { // TODO: should not be 0, but driver return 0 right now + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_ERROR, "return: %d\n", bytes_read); + return MIX_RESULT_FAIL; + } +/* + gint bytes_count=0; + for (i=0; i < iovoutcnt; i++) + { + bytes_count += iovout[i].size; + } + iovout[i].size = bytes_read - bytes_count; +*/ + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (!maxvol) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + if (!has_FW_INFO) + { + ret = mix_audio_FW_INFO(mix); + } + + if (MIX_SUCCEEDED(ret)) + { + *maxvol = (gint)cur_FW_INFO.pop_info.max_vol; + } + + _UNLOCK(&mix->controllock); + + return ret; +} + + +MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (!minvol) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + if (!has_FW_INFO) + { + ret = mix_audio_FW_INFO(mix); + } + + if (MIX_SUCCEEDED(ret)) + { + *minvol = (gint)cur_FW_INFO.pop_info.min_vol; + } + + _UNLOCK(&mix->controllock); + + return ret; +} + +MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if (!streamState) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + // PAUSED_DRAINING is internal state. + if (mix->streamState == MIX_STREAM_PAUSED_DRAINING) + *streamState = MIX_STREAM_PAUSED; + else + *streamState = mix->streamState; + + _UNLOCK(&mix->controllock); + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType type) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + struct snd_sst_vol vol = {0}; + + if (!currvol) return MIX_RESULT_NULL_PTR; + if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + vol.stream_id = mix->streamID; + + int retVal = 0; + +#ifdef LPESTUB + // Not calling. +#else + g_debug("Calling SNDRV_SST_GET_VOL. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_GET_VOL, &vol); + g_debug("SNDRV_SST_GET_VOL returned %d. vol=%d", retVal, vol.volume); +#endif + + if (retVal) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_GET_VOL failed. Error:0x%08x", errno); + mix_audio_debug_dump(mix); + } + else + { + gint maxvol = 0; + ret = mix_audio_get_max_vol(mix, &maxvol); + + if (MIX_SUCCEEDED(ret)) + { + if (type == MIX_VOL_PERCENT) + *currvol = (maxvol!=0)?((vol.volume * 100) / maxvol):0; + else + *currvol = vol.volume; + } + } + + _UNLOCK(&mix->controllock); + + return ret; +} + +MIX_RESULT mix_audio_get_mute_default(MixAudio *mix, gboolean* muted) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + return ret; +} + +MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + struct snd_sst_mute m = { 0 }; + + if (mute) m.mute = 1; + else m.mute = 0; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + m.stream_id = mix->streamID; + + int retVal = 0; + +#ifdef LPESTUB + // Not calling. +#else + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_MUTE, &m); +#endif + + if (retVal) + { + //ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_MUTE failed. Error:0x%08x", errno); + mix_audio_debug_dump(mix); + } + + _UNLOCK(&mix->controllock); + + return ret; +} + +MIX_RESULT mix_audio_pause_default(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + if (mix->streamState == MIX_STREAM_PAUSED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); + + if ((mix->streamState != MIX_STREAM_PLAYING) && (mix->streamState != MIX_STREAM_DRAINING)) + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONG_STATE); + + int retVal = 0; + +#ifdef LPESTUB + // Not calling +#else + g_debug("Calling SNDRV_SST_STREAM_PAUSE. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_PAUSE); + g_debug("_PAUSE returned %d", retVal); +#endif + + if (retVal) + { + if (mix->streamState == MIX_STREAM_DRAINING) + { + // if stream state has been DRAINING, DRAIN could become successful during the PAUSE call, but not yet have chance to update streamState since we now hold the lock. + // In this case, the mix_streamState becomes out-of-sync with the actual playback state. PAUSE failed due to stream already STOPPED but mix->streamState remains at "DRAINING" + // On the other hand, we can't let DRAIN hold the lock the entire time. + // We would not know if we fail PAUSE due to DRAINING, or a valid reason. + // Need a better mechanism to sync DRAINING. + // DRAINING is not likely problem for resume, as long as the PAUSED state is set when stream is really PAUSED. + ret = MIX_RESULT_NEED_RETRY; + g_warning("PAUSE failed while DRAINING. Draining could be just completed. Retry needed."); + } + else + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_PAUSE failed. Error:0x%08x", errno); + mix_audio_debug_dump(mix); + } + } + else + { + if (mix->streamState == MIX_STREAM_DRAINING) + { + mix->streamState = MIX_STREAM_PAUSED_DRAINING; + } + else + { + mix->streamState = MIX_STREAM_PAUSED; + } + } + + _UNLOCK(&mix->controllock); + +#ifdef LPESTUB + if (MIX_SUCCEEDED(ret)) + { + GTimeVal tval = {0}; + g_get_current_time(&tval); + guint64 ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000; + mix->ts_elapsed += ts - mix->ts_last; + mix->ts_last = 0; + } +#endif + return ret; +} + +MIX_RESULT mix_audio_resume_default(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + if ((mix->streamState == MIX_STREAM_PLAYING) || (mix->streamState == MIX_STREAM_DRAINING)) + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); + + if ((mix->streamState != MIX_STREAM_PAUSED_DRAINING) && (mix->streamState != MIX_STREAM_PAUSED)) + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONG_STATE); + + int retVal = 0; + +#ifdef LPESTUB + // Not calling +#else + g_debug("Calling SNDRV_SST_STREAM_RESUME"); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_RESUME); + g_debug("_STREAM_RESUME returned %d", retVal); +#endif + + if (retVal) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_PAUSE failed. Error:0x%08x", errno); + mix_audio_debug_dump(mix); + } + { + if (mix->streamState == MIX_STREAM_PAUSED_DRAINING) + mix->streamState = MIX_STREAM_DRAINING; + else + mix->streamState = MIX_STREAM_PLAYING; + } + + _UNLOCK(&mix->controllock); + +#ifdef LPESTUB + if (MIX_SUCCEEDED(ret)) + { + GTimeVal tval = {0}; + g_get_current_time(&tval); + guint64 ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000; + mix->ts_last = ts; + } +#endif + + return ret; +} + +MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + struct snd_sst_vol vol = {0}; + + vol.ramp_duration = msecs; + vol.ramp_type = ramptype; // TODO: confirm the mappings between Mix and SST. + + if (!mix) return MIX_RESULT_NULL_PTR; + + if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; + + _LOCK(&mix->controllock); + + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + + vol.stream_id = mix->streamID; + + if (type == MIX_VOL_DECIBELS) + { + vol.volume = currvol; + } + else + { + gint maxvol = 0; + ret = mix_audio_get_max_vol(mix, &maxvol); + + if (!maxvol) + g_critical("Max Vol is 0!"); + + if (MIX_SUCCEEDED(ret)) + { + vol.volume = currvol * maxvol / 100; + } + } + + int retVal = 0; + +#ifdef LPESTUB + // Not calling +#else + g_debug("calling SNDRV_SST_SET_VOL vol=%d", vol.volume); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_SET_VOL, &vol); + g_debug("SNDRV_SST_SET_VOL returned %d", retVal); +#endif + + if (retVal) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_SET_VOL failed. Error:0x%08x", errno); + mix_audio_debug_dump(mix); + } + + _UNLOCK(&mix->controllock); + + return ret; +} + +MIX_RESULT mix_audio_FW_INFO(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + _LOCK(&mix->controllock); + + // This call always get the fw info. + int retVal = 0; + +#ifdef LPESTUB + // Not calling. +#else + g_debug("calling SNDRV_SST_FW_INFO fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_FW_INFO, &cur_FW_INFO); + g_debug("SNDRV_SST_FW_INFO returned %d", retVal); +#endif + + if (!retVal) + { + has_FW_INFO = TRUE; + } + else + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_FW_INFO failed. Error:0x%08x", errno); + mix_audio_debug_dump(mix); + } + + _UNLOCK(&mix->controllock); + + return ret; +} + + +static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + +/* + definition of "struct iovec" used by writev: + struct iovec { + void *iov_base; + size_t iov_len; + }; +*/ + + if (!mix) return MIX_RESULT_NULL_PTR; + + size_t total_bytes = 0; + // NOTE: we may want to find a way to avoid this copy. + struct iovec *in = (struct iovec*)g_alloca(sizeof(struct iovec) * iovincnt); + if (!in) return MIX_RESULT_NO_MEMORY; + + int i; + for (i=0;iaudioconfigparams) > 0) + { + wait_time = total_bytes*8*1000*1000/MIX_ACP_BITRATE(mix->audioconfigparams); + // g_debug("To wait %lu usec for writev() to simulate blocking\n", wait_time); + } + GTimer *timer = g_timer_new(); + g_timer_start(timer); + + g_debug("calling writev(fd=%d)", mix->fileDescriptor); + written = writev(mix->fileDescriptor, in, iovincnt); + if (written >= 0) mix->bytes_written += written; + g_debug("writev() returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written); + /* Now since writing to file rarely block, we put timestamp there to block.*/ + g_timer_stop(timer); + gulong elapsed = 0; + g_timer_elapsed(timer, &elapsed); + g_timer_destroy(timer); + // g_debug("writev() returned in %lu usec\n", elapsed); + if ((MIX_ACP_BITRATE(mix->audioconfigparams) > 0) && (wait_time > elapsed)) + { + wait_time -= elapsed; + g_usleep(wait_time); + } +#else + g_debug("calling writev(fd=%d) with %d", mix->fileDescriptor, total_bytes); + written = writev(mix->fileDescriptor, in, iovincnt); + if (written > 0) mix->bytes_written += written; + g_debug("writev() returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written); +#endif + + if (written < 0) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("writev() failed. Error:0x%08x", errno); + } + else + { + // guranttee written is positive value before sign extending it. + if (insize) *insize = (guint64)written; + if (written != total_bytes) + { + g_warning("writev() wrote only %d out of %d", written, total_bytes); + } + } + + return ret; +} + +static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int retVal = 0; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + if ((iovout == NULL) || (iovoutcnt <= 0)) + { + g_critical("Wrong mode. Please report a bug..."); + return MIX_RESULT_NULL_PTR; + } + + g_message("Input entries=%d. Output entries=%d", iovincnt, iovoutcnt); + + struct snd_sst_buff_entry *ientries = NULL; + struct snd_sst_buff_entry *oentries = NULL; + + ientries = (struct snd_sst_buff_entry*)g_alloca(sizeof(struct snd_sst_buff_entry) * iovincnt); + oentries = (struct snd_sst_buff_entry*)g_alloca(sizeof(struct snd_sst_buff_entry) * iovoutcnt); + + if (!ientries || !oentries) return MIX_RESULT_NO_MEMORY; + + struct snd_sst_dbufs dbufs = {0}; + + struct snd_sst_buffs ibuf = {0}; + struct snd_sst_buffs obuf = {0}; + + ibuf.entries = iovincnt; + ibuf.type = SST_BUF_USER; + ibuf.buff_entry = ientries; + + obuf.entries = iovoutcnt; + obuf.type = SST_BUF_USER; + obuf.buff_entry = oentries; + + dbufs.ibufs = &ibuf; + dbufs.obufs = &obuf; + + int i = 0; + for (i=0;i1) + { + for (i=0;ifileDescriptor); + written = writev(mix->fileDescriptor, in, iovincnt); + if (written >= 0) + { + mix->bytes_written += written; + dbufs.output_bytes_produced = written; + dbufs.input_bytes_consumed = written; + } + g_debug("stub STREAM_DECODE (writev) returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written); +#else + g_debug("calling SNDRV_SST_STREAM_DECODE fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DECODE, &dbufs); + g_debug("SNDRV_SST_STREAM_DECODE returned %d", retVal); +#endif + + if (retVal) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_STREAM_DECODE failed. Error:0x%08x", errno); + mix_audio_debug_dump(mix); + } + else + { + if (insize) *insize = dbufs.input_bytes_consumed; + if (outsize) *outsize = dbufs.output_bytes_produced; + g_message("consumed=%" G_GUINT64_FORMAT " produced=%" G_GUINT64_FORMAT, dbufs.input_bytes_consumed, dbufs.output_bytes_produced); + } + + return ret; +} + +// Starting interface +//MIX_RESULT mix_audio_get_version(guint* major, guint *minor); + +MIX_RESULT mix_audio_initialize(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "mix_audio_initialize\n"); + + if (!klass->initialize) + return MIX_RESULT_FAIL; // TODO: add more descriptive error + +#ifdef AUDIO_MANAGER + if (dbus_init() < 0) { + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to connect to dbus\n"); +// commented out, gracefully exit right now +// return MIX_RESULT_FAIL; // TODO: add more descriptive error + } +#endif + + return klass->initialize(mix, mode, aip, drminitparams); +} + +MIX_RESULT mix_audio_configure(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->configure) + return MIX_RESULT_FAIL; + + return klass->configure(mix, audioconfigparams, drmparams); +} + +MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->decode) + return MIX_RESULT_FAIL; + + return klass->decode(mix, iovin, iovincnt, insize, iovout, iovoutcnt, outsize); +} + +MIX_RESULT mix_audio_capture_encode(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->capture_encode) + return MIX_RESULT_FAIL; + + return klass->capture_encode(mix, iovout, iovoutcnt); +} + +MIX_RESULT mix_audio_start(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->start) + return MIX_RESULT_FAIL; + + return klass->start(mix); +} + +MIX_RESULT mix_audio_stop_drop(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->stop_drop) + return MIX_RESULT_FAIL; + + return klass->stop_drop(mix); +} + +MIX_RESULT mix_audio_stop_drain(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->stop_drain) + return MIX_RESULT_FAIL; + + return klass->stop_drain(mix); +} + +MIX_RESULT mix_audio_pause(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->pause) + return MIX_RESULT_FAIL; + + return klass->pause(mix); +} + +MIX_RESULT mix_audio_resume(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->resume) + return MIX_RESULT_FAIL; + + return klass->resume(mix); +} + +MIX_RESULT mix_audio_get_timestamp(MixAudio *mix, guint64 *msecs) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_timestamp) + return MIX_RESULT_FAIL; + + return klass->get_timestamp(mix, msecs); +} + +MIX_RESULT mix_audio_get_mute(MixAudio *mix, gboolean* muted) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_mute) + return MIX_RESULT_FAIL; + + return klass->get_mute(mix, muted); +} + +MIX_RESULT mix_audio_set_mute(MixAudio *mix, gboolean mute) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->set_mute) + return MIX_RESULT_FAIL; + + return klass->set_mute(mix, mute); +} + +MIX_RESULT mix_audio_get_max_vol(MixAudio *mix, gint *maxvol) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_max_vol) + return MIX_RESULT_FAIL; + + return klass->get_max_vol(mix, maxvol); +} + +MIX_RESULT mix_audio_get_min_vol(MixAudio *mix, gint *minvol) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_min_vol) + return MIX_RESULT_FAIL; + + return klass->get_min_vol(mix, minvol); +} + +MIX_RESULT mix_audio_get_volume(MixAudio *mix, gint *currvol, MixVolType type) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_volume) + return MIX_RESULT_FAIL; + + return klass->get_volume(mix, currvol, type); +} + +MIX_RESULT mix_audio_set_volume(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->set_volume) + return MIX_RESULT_FAIL; + + return klass->set_volume(mix, currvol, type, msecs, ramptype); +} + +MIX_RESULT mix_audio_deinitialize(MixAudio *mix) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->deinitialize) + return MIX_RESULT_FAIL; + + return klass->deinitialize(mix); +} + +MIX_RESULT mix_audio_get_stream_state(MixAudio *mix, MixStreamState *streamState) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_stream_state) + return MIX_RESULT_FAIL; + + return klass->get_stream_state(mix, streamState); +} + +MIX_RESULT mix_audio_get_state(MixAudio *mix, MixState *state) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_state) + return MIX_RESULT_FAIL; + + return klass->get_state(mix, state); +} + +MIX_RESULT mix_audio_is_am_available_default(MixAudio *mix, MixAudioManager am, gboolean *avail) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (avail) + *avail = FALSE; + else + ret = MIX_RESULT_NULL_PTR; + + return ret; +} + +MIX_RESULT mix_audio_is_am_available(MixAudio *mix, MixAudioManager am, gboolean *avail) +{ + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->is_am_available) + return MIX_RESULT_FAIL; + + return klass->is_am_available(mix, am, avail); +} + +const gchar* dbgstr_UNKNOWN="UNKNOWN"; + +static const gchar* _mix_stream_state_get_name (MixStreamState s) +{ + static const gchar *MixStreamStateNames[] = { + "MIX_STREAM_NULL", + "MIX_STREAM_STOPPED", + "MIX_STREAM_PLAYING", + "MIX_STREAM_PAUSED", + "MIX_STREAM_DRAINING", + "MIX_STREAM_PAUSED_DRAINING", + "MIX_STREAM_INTERNAL_LAST" + }; + + const gchar *ret = dbgstr_UNKNOWN; + + if (s < sizeof(MixStreamStateNames)/sizeof(MixStreamStateNames[0])) + { + ret = MixStreamStateNames[s]; + } + + return ret; +} + +static const gchar* _mix_state_get_name(MixState s) +{ + static const gchar* MixStateNames[] = { + "MIX_STATE_NULL", + "MIX_STATE_UNINITIALIZED", + "MIX_STATE_INITIALIZED", + "MIX_STATE_CONFIGURED", + "MIX_STATE_LAST" + }; + + const gchar *ret = dbgstr_UNKNOWN; + + if (s < sizeof(MixStateNames)/sizeof(MixStateNames[0])) + { + ret = MixStateNames[s]; + } + + return ret; +} + +static const gchar* _mix_codec_mode_get_name(MixCodecMode s) +{ + static const gchar* MixCodecModeNames[] = { + "MIX_CODING_INVALID", + "MIX_CODING_ENCODE", + "MIX_CODING_DECODE", + "MIX_CODING_LAST" + }; + + const gchar *ret = dbgstr_UNKNOWN; + + if (s < sizeof(MixCodecModeNames)/sizeof(MixCodecModeNames[0])) + { + ret = MixCodecModeNames[s]; + } + + return ret; +} + +static const gchar* _mix_device_state_get_name(MixDeviceState s) +{ + static const gchar* MixDeviceStateNames[] = { + "MIX_AUDIO_DEV_CLOSED", + "MIX_AUDIO_DEV_OPENED", + "MIX_AUDIO_DEV_ALLOCATED" + }; + + const gchar *ret = dbgstr_UNKNOWN; + + if (s < sizeof(MixDeviceStateNames)/sizeof(MixDeviceStateNames[0])) + { + ret = MixDeviceStateNames[s]; + } + + return ret; +} + +void mix_audio_debug_dump(MixAudio *mix) +{ + const gchar* prefix="MixAudio:"; + + if (!MIX_IS_AUDIO(mix)) + { + g_debug("%s Not a valid MixAudio object.", prefix); + return; + } + + g_debug("%s streamState(%s)", prefix, _mix_stream_state_get_name(mix->streamState)); + g_debug("%s encoding(%s)", prefix, mix->encoding?mix->encoding:dbgstr_UNKNOWN); + g_debug("%s fileDescriptor(%d)", prefix, mix->fileDescriptor); + g_debug("%s state(%s)", prefix, _mix_state_get_name(mix->state)); + g_debug("%s codecMode(%s)", prefix, _mix_codec_mode_get_name(mix->codecMode)); + + // Private members + g_debug("%s streamID(%d)", prefix, mix->streamID); + //GStaticRecMutex streamlock; // lock that must be acquired to invoke stream method. + //GStaticRecMutex controllock; // lock that must be acquired to call control function. + if (MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams)) + { + // TODO: print audioconfigparams + } + else + { + g_debug("%s audioconfigparams(NULL)", prefix); + } + + g_debug("%s deviceState(%s)", prefix, _mix_device_state_get_name(mix->deviceState)); + + g_debug("%s ts_last(%" G_GUINT64_FORMAT ")", prefix, mix->ts_last); + g_debug("%s ts_elapsed(%" G_GUINT64_FORMAT ")", prefix, mix->ts_elapsed); + g_debug("%s bytes_written(%" G_GUINT64_FORMAT ")", prefix, mix->bytes_written); + + return; +} + +MIX_RESULT mix_audio_get_output_configuration(MixAudio *mix, MixAudioConfigParams **audioconfigparams) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_output_configuration) + return MIX_RESULT_FAIL; + + return klass->get_output_configuration(mix, audioconfigparams); +} + +MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + struct snd_sst_get_stream_params stream_params = {{0}}; + MixAudioConfigParams *p = NULL; + int retVal = 0; + + if (G_UNLIKELY(!mix || !audioconfigparams)) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + if (mix->state <= MIX_STATE_UNINITIALIZED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_INIT); + +#ifdef LPESTUB +#else + // Check only if we are initialized. + g_debug("Calling SNDRV_SST_STREAM_GET_PARAMS. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_PARAMS, &stream_params); + g_debug("_GET_PARAMS returned %d", retVal); +#endif + + _UNLOCK(&mix->controllock); + + if (retVal) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("Failed to GET_PARAMS. errno:0x%08x. %s\n", errno, strerror(errno)); + } + else + { + p = mix_sst_params_to_acp(&stream_params); + *audioconfigparams = p; + } + + return ret; +} + +MIX_RESULT mix_audio_get_stream_byte_decoded(MixAudio *mix, guint64 *byte) +{ + return MIX_RESULT_NOT_SUPPORTED; +} + diff --git a/mix_audio/src/mixaudio.h b/mix_audio/src/mixaudio.h new file mode 100644 index 0000000..a3cef5a --- /dev/null +++ b/mix_audio/src/mixaudio.h @@ -0,0 +1,574 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_AUDIO_H__ +#define __MIX_AUDIO_H__ + +#include +#include "mixacp.h" +#include "mixaip.h" +#include "mixdrmparams.h" +#include "mixresult.h" +#include "mixaudiotypes.h" + +/* + * Type macros. + */ +#define MIX_TYPE_AUDIO (mix_audio_get_type ()) +#define MIX_AUDIO(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIO, MixAudio)) +#define MIX_IS_AUDIO(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIO)) +#define MIX_AUDIO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIO, MixAudioClass)) +#define MIX_IS_AUDIO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIO)) +#define MIX_AUDIO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIO, MixAudioClass)) + +typedef struct _MixAudio MixAudio; +typedef struct _MixAudioClass MixAudioClass; + +/** + * MixStreamState: + * @MIX_STREAM_NULL: Stream is not allocated. + * @MIX_STREAM_STOPPED: Stream is at STOP state. This is the only state DNR is allowed. + * @MIX_STREAM_PLAYING: Stream is at Playing state. + * @MIX_STREAM_PAUSED: Stream is Paused. + * @MIX_STREAM_DRAINING: Stream is draining -- remaining of the buffer in the device are playing. This state is special due to the limitation that no other control operations are allowed at this state. Stream will become @MIX_STREAM_STOPPED automatically when this data draining has completed. + * @MIX_STREAM_LAST: Last index in the enumeration. + * + * Stream State during Decode and Render or Encode mode. These states do not apply to Decode and Return mode. + */ +typedef enum { + MIX_STREAM_NULL=0, + MIX_STREAM_STOPPED, + MIX_STREAM_PLAYING, + MIX_STREAM_PAUSED, + MIX_STREAM_DRAINING, + MIX_STREAM_LAST +} MixStreamState; + +/** + * MixState: + * @MIX_STATE_UNINITIALIZED: MIX is not initialized. + * @MIX_STATE_INITIALIZED: MIX is initialized. + * @MIX_STATE_CONFIGURED: MIX is configured successfully. + * @MIX_STATE_LAST: Last index in the enumeration. + * + * The varies states the device is in. + */ +typedef enum { + MIX_STATE_NULL=0, + MIX_STATE_UNINITIALIZED, + MIX_STATE_INITIALIZED, + MIX_STATE_CONFIGURED, + MIX_STATE_LAST +} MixState; + +/** + * MixCodecMode: + * @MIX_CODING_INVALID: Indicates device uninitialied for any mode. + * @MIX_CODING_ENCODE: Indicates device is opened for encoding. + * @MIX_CODING_DECODE: Indicates device is opened for decoding. + * @MIX_CODING_LAST: Last index in the enumeration. + * + * Mode where device is operating on. See mix_audio_initialize(). + */ +typedef enum { + MIX_CODING_INVALID=0, + MIX_CODING_ENCODE, + MIX_CODING_DECODE, + MIX_CODING_LAST +} MixCodecMode; + +/** + * MixVolType: + * @MIX_VOL_PERCENT: volume is expressed in percentage. + * @MIX_VOL_DECIBELS: volume is expressed in decibel. + * @MIX_VOL_LAST: last entry. + * + * See mix_audio_getvolume() and mix_audio_setvolume(). + */ +typedef enum { + MIX_VOL_PERCENT=0, + MIX_VOL_DECIBELS, + MIX_VOL_LAST +} MixVolType; + +/** + * MixVolRamp: + * @MIX_RAMP_LINEAR: volume is expressed in percentage. + * @MIX_RAMP_EXPONENTIAL: volume is expressed in decibel. + * @MIX_RAMP_LAST: last entry. + * + * See mix_audio_getvolume() and mix_audio_setvolume(). + */ +typedef enum +{ + MIX_RAMP_LINEAR = 0, + MIX_RAMP_EXPONENTIAL, + MIX_RAMP_LAST +} MixVolRamp; + +/** + * MixIOVec: + * @data: data pointer + * @size: size of buffer in @data + * + * Scatter-gather style structure. To be used by mix_audio_decode() method for input and output buffer. + */ +typedef struct { + guchar *data; + gint size; +} MixIOVec; + +/** + * MixDeviceState: + * @MIX_AUDIO_DEV_CLOSED: TBD + * @MIX_AUDIO_DEV_OPENED: TBD + * @MIX_AUDIO_DEV_ALLOCATED: TBD + * + * Device state. + */ +typedef enum { + MIX_AUDIO_DEV_CLOSED=0, + MIX_AUDIO_DEV_OPENED, + MIX_AUDIO_DEV_ALLOCATED +} MixDeviceState; + +/** + * MixAudioClass: + * @parent_class: Parent class; + * + * MI-X Audio object class + */ +struct _MixAudioClass +{ + /*< public >*/ + GObjectClass parent_class; + + /*< virtual public >*/ + MIX_RESULT (*initialize) (MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); + MIX_RESULT (*configure) (MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); + MIX_RESULT (*decode) (MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); + MIX_RESULT (*capture_encode) (MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); + MIX_RESULT (*start) (MixAudio *mix); + MIX_RESULT (*stop_drop) (MixAudio *mix); + MIX_RESULT (*stop_drain) (MixAudio *mix); + MIX_RESULT (*pause) (MixAudio *mix); + MIX_RESULT (*resume) (MixAudio *mix); + MIX_RESULT (*get_timestamp) (MixAudio *mix, guint64 *msecs); + MIX_RESULT (*set_mute) (MixAudio *mix, gboolean mute); + MIX_RESULT (*get_mute) (MixAudio *mix, gboolean* muted); + MIX_RESULT (*get_max_vol) (MixAudio *mix, gint *maxvol); + MIX_RESULT (*get_min_vol) (MixAudio *mix, gint *minvol); + MIX_RESULT (*get_volume) (MixAudio *mix, gint *currvol, MixVolType type); + MIX_RESULT (*set_volume) (MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype); + MIX_RESULT (*deinitialize) (MixAudio *mix); + MIX_RESULT (*get_stream_state) (MixAudio *mix, MixStreamState *streamState); + MIX_RESULT (*get_state) (MixAudio *mix, MixState *state); + MIX_RESULT (*is_am_available) (MixAudio *mix, MixAudioManager am, gboolean *avail); + MIX_RESULT (*get_output_configuration) (MixAudio *mix, MixAudioConfigParams **audioconfigparams); +}; + +/** + * MixAudio: + * @parent: Parent object. + * @streamState: Current state of the stream + * @decodeMode: Current decode mode of the device. This value is valid only when @codingMode equals #MIX_CODING_DECODE. + * @fileDescriptor: File Descriptor to the opened device. + * @state: State of the current #MixAudio session. + * @codecMode: Current codec mode of the session. + * @useIAM: Is current stream configured to use Intel Audio Manager. + * @encoding: Not Used. + * + * MI-X Audio object + */ +struct _MixAudio +{ + /*< public >*/ + GObject parent; + + /*< public >*/ + + /*< private >*/ + MixStreamState streamState; + gchar *encoding; + MixState state; + MixCodecMode codecMode; + gboolean useIAM; + int fileDescriptor; + gint streamID; + guint32 amStreamID; + GStaticRecMutex streamlock; // lock that must be acquired to invoke stream method. + GStaticRecMutex controllock; // lock that must be acquired to call control function. + MixAudioConfigParams *audioconfigparams; + gboolean am_registered; + MixDeviceState deviceState; + + guint64 ts_last; + guint64 ts_elapsed; + guint64 bytes_written; +}; + +/** + * mix_audio_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_audio_get_type (void); + +/** + * mix_audio_new: + * @returns: A newly allocated instance of #MixAudio + * + * Use this method to create new instance of #MixAudio + */ +MixAudio *mix_audio_new(void); + +/** + * mix_audio_ref: + * @mix: object to add reference + * @returns: the MixAudio instance where reference count has been increased. + * + * Add reference count. + */ +MixAudio *mix_audio_ref(MixAudio *mix); + +/** + * mix_audio_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_audio_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/** + * mix_audio_get_version: + * @returns: #MIX_RESULT_SUCCESS + * + * Returns the version of the MI-X library. + * + */ +MIX_RESULT mix_audio_get_version(guint* major, guint *minor); + +/** + * mix_audio_initialize: + * @mix: #MixAudio object. + * @mode: Requested #MixCodecMode. + * @aip: Audio initialization parameters. + * @drminitparams: Optional. DRM initialization param if applicable. + * @returns: #MIX_RESULT_SUCCESS on successful initilaization. #MIX_RESULT_ALREADY_INIT if session is already initialized. + * + * This function will initialize an encode or decode session with this #MixAudio instance. During this call, the device will be opened. If the device is not available, an error is returned to the caller so that an alternative (e.g. software decoding) can be configured instead. Use mix_audio_deinitialize() to close the device. + * + * A previous initialized session must be de-initialized using mix_audio_deinitialize() before it can be initialized again. + */ +MIX_RESULT mix_audio_initialize(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); + +/** + * mix_audio_configure: + * @mix: #MixAudio object. + * @audioconfigparams: a #MixAudioConfigParams derived object containing information for the specific stream type. + * @drmparams: Optional. DRM initialization param if applicable. + * @returns: Result indicates successful or not. + * + * This function can be used to configure a stream for the current session. The caller can use this function to do the following: + * + * + * Choose decoding mode (direct-render or decode-return) + * Provide DRM parameters (using DRMparams object) + * Provide stream parameters (using STRMparams objects) + * Provide a stream name for the Intel Smart Sound Technology stream + * + * + * SST stream parameters will be set during this call, and stream resources allocated in SST. + * + * + * Intel Audio Manager support: + * If Intel Audio Manager support is enabled, and if @mode is specified to #MIX_DECODE_DIRECTRENDER, the SST stream will be registered with Intel Audio Manager in the context of this call, using the stream name provided in @streamname. Application will receive a notification from Intel Audio Manager that the stream has been created during or soon after this call. The application should be ready to handle either possibility. A stream ID (associated with the stream name) will be provided by Intel Audio Manager which will be used for subsequent notifications from Intel Audio Manager or calls to Intel Audio Manager for muting, pause and resume. See mix_audio_getstreamid() + * If a stream is already registered with Intel Audio Manager, application must pass the same @streamname argument to retain the session. Otherwise, the existing stream will be unregistered and a new stream will be registered with the new @streamname. + * + * + * + * If @mode is specified to #MIX_DECODE_DIRECTRENDER but direct-render mode is not available (due to end user use of alternative output device), an error indication will be returned to the caller so that an alternate pipeline configuration can be created (e.g. including a Pulse Audio sink, and support for output buffers). In this case, the caller will need to call mix_audio_configure() again to with @mode specify as #MIX_DECODE_DECODERETURN to request decode-return mode. + * + * This method can be called multiple times if reconfiguration of the stream is needed. However, this method must be called when the stream is in #MIX_STREAM_STOPPED state. + * + */ +MIX_RESULT mix_audio_configure(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); + +/** + * mix_audio_decode: + * @mix: #MixAudio object. + * @iovin: a pointer to an array of #MixIOVec structure that contains the input buffers + * @iovincnt: the number of entry in the @iovin array + * @iovout: a pointer to an arrya of #MixIOVec structure that represent the output buffer. During input, each size in the #MixIOVec array represents the available buffer size pointed to by data. Upon return, each size value will be updated to reflect how much data has been filled. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. + * @iovoutcnt: in/out parameter which when input, it contains the number of entry available in the @iovout array. Upon return, this value will be updated to reflect how many entry in the @iovout array has been populated with data. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. + * @outsize: Total number of bytes returned for the decode session. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. + * @returns: #MIX_RESULT + * + * This function is used to initiate HW accelerated decoding of encoded data buffers. This function may be used in two major modes, direct-render or decode-return. + * + * With direct-render, input buffers are provided by the caller which hold encoded audio data, and no output buffers are provided. The encoded data is decoded, and the decoded data is sent directly to the output speaker. This allows very low power audio rendering and is the best choice of operation for longer battery life. + * + * + * Intel Audio Manager Support + * However, if the user has connected a different target output device, such as Bluetooth headphones, this mode cannot be used as the decoded audio must be directed to the Pulse Audio stack where the output to Bluetooth device can be supported, per Intel Audio Manager guidelines. This mode is called decode-return, and requires the caller to provide output buffers for the decoded data. + * + * + * Input buffers in both modes are one or more user space buffers using a scatter/gather style vector interface. + * + * Output buffers for the decode-return mode are one or more user space buffers in a scatter style vector interface. Buffers will be filled in order and lengths of data filled will be returned. + * + * This call will block until data has been completely copied or queued to the driver. All user space buffers may be used or released when this call returns. + * + * Note: If the stream is configured as #MIX_DECODE_DIRECTRENDER, and whenever the stream in #MIX_STREAM_STOPPED state, the call to mix_audio_decode() will not start the playback until mix_audio_start() is called. This behavior would allow application to queue up data but delay the playback until appropriate time. + * + */ +MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); + +/** + * mix_audio_capture_encode: + * @mix: #MixAudio object. + * @iovout: Capture audio samples. + * @iovoutcnt: Number of entry in the input vector @iovout. + * @returns: #MIX_RESULT + * + * To read encoded data from device. + * + * + * NOTE: May need to rename to "read_encoded" or other name. Since "encode" seems to mean taking raw audio and convert to compressed audio. + * + */ +MIX_RESULT mix_audio_capture_encode(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); + +/** + * mix_audio_start: + * @mix: #MixAudio object. + * @returns: #MIX_RESULT_SUCCESS if the resulting state is either #MIX_STREAM_PLAYING or #MIX_STREAM_PAUSED. Fail code otherwise. + * + * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application use this call to change the stream out of the #MIX_STREAM_STOPPED state. If mix_audio_decode() is called and blocking in a seperate thread prior to this call. This method causes the device to start rendering data. + * + * In #MIX_DECODE_DECODERETURN, this method is no op. + */ +MIX_RESULT mix_audio_start(MixAudio *mix); + +/** + * mix_audio_stop_drop: + * @mix: #MixAudio object. + * @returns: #MIX_RESULT_SUCCESS if the resulting state has successfully reached #MIX_STREAM_STOPPED. Fail code otherwise. + * + * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. + * + * All remaining frames to be decoded or rendered will be discarded and playback will stop immediately, unblocks any pending mix_audio_decode(). + * + * If #MIX_STOP_DRAIN is requested, the call will block with stream state set to #MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. When #MIX_STOP_DRAIN returns successfully, the stream would have reached #MIX_STREAM_STOPPED successfully. + * + * After this call, timestamp retrived by mix_audio_gettimestamp() is reset to zero. + * + * Note that this method returns #MIX_RESULT_WRONG_STATE if the stream is in #MIX_STREAM_DRAINING state. + * + */ +MIX_RESULT mix_audio_stop_drop(MixAudio *mix); + +/** + * mix_audio_stop_drain: + * @mix: #MixAudio object. + * @returns: #MIX_RESULT_SUCCESS if the resulting state has successfully reached #MIX_STREAM_STOPPED. Fail code otherwise. + * + * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. + * + * The call will block with stream state set to #MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. + * + * Note that this method blocks until #MIX_STREAM_STOPPED is reached if it is called when the stream is already in #MIX_STREAM_DRAINING state. + * + */ +MIX_RESULT mix_audio_stop_drain(MixAudio *mix); + +/** + * mix_audio_pause: + * @mix: #MixAudio object. + * @returns: #MIX_RESULT_SUCCESS if #MIX_STREAM_PAUSED state is reached successfully. #MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. + * + * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state from #MIX_STREAM_PLAYING to #MIX_STREAM_PAUSED. Note that this method returns sucessful only when the resulting state reaches #MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as #MIX_STREAM_STOPPED, where transitioning to #MIX_STREAM_PAUSED is not possible. + * + * In some situation, where there is potential race condition with the DRAINING operation, this method may return MIX_RESULT_NEED_RETRY to indicate last operation result is inclusive and request caller to call again. + */ +MIX_RESULT mix_audio_pause(MixAudio *mix); + +/** + * mix_audio_resume: + * @mix: #MixAudio object. + * @returns: #MIX_RESULT_SUCCESS if #MIX_STREAM_PLAYING state is reached successfully. #MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. + * + * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state to #MIX_STREAM_PLAYING. Note that this method returns sucessful only when the resulting state reaches #MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as #MIX_STREAM_DRAINING, where transitioning to #MIX_STREAM_PLAYING is not possible. + * + */ +MIX_RESULT mix_audio_resume(MixAudio *mix); + + +/** + * mix_audio_get_timestamp: + * @mix: #MixAudio object. + * @msecs: play time in milliseconds. + * @returns: #MIX_RESULT_SUCCESS if the timestamp is available. #MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. + * + * This function can be used to retrieve the current timestamp for audio playback in milliseconds. The timestamp will reflect the amount of audio data rendered since the start of stream, or since the last stop. Note that the timestamp is always reset to zero when the stream enter #MIX_STREAM_STOPPED state. The timestamp is an unsigned long value, so the value will wrap when the timestamp reaches #ULONG_MAX. This function is only valid in direct-render mode. + */ +MIX_RESULT mix_audio_get_timestamp(MixAudio *mix, guint64 *msecs); + +/** + * mix_audio_set_mute: + * @mix: #MixAudio object. + * @mute: Turn mute on/off. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * This function is used to mute and unmute audio playback. While muted, playback would continue but silently. This function is only valid when the session is configured to #MIX_DECODE_DIRECTRENDER mode. + * + * Note that playback volumn may change due to change of global settings while stream is muted. + */ +MIX_RESULT mix_audio_set_mute(MixAudio *mix, gboolean mute); + +/** + * mix_audio_get_mute: + * @mix: #MixAudio object. + * @muted: current mute state. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * Get Mute. + */ +MIX_RESULT mix_audio_get_mute(MixAudio *mix, gboolean* muted); + +/** + * mix_audio_get_max_vol: + * @mix: #MixAudio object. + * @maxvol: pointer to receive max volumn. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * This function can be used if the application will be setting the audio volume using decibels instead of percentage. The maximum volume in decibels supported by the driver will be returned. This value can be used to determine the upper bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode. + * + */ +MIX_RESULT mix_audio_get_max_vol(MixAudio *mix, gint *maxvol); + +/** + * mix_audio_get_min_vol: + * @mix: #MixAudio object. + * @minvol: pointer to receive max volumn. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * This function can be used if the application will be setting the audio volume using decibels instead of percentage. The minimum volume in decibels supported by the driver will be returned. This value can be used to determine the lower bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode. + * + */ +MIX_RESULT mix_audio_get_min_vol(MixAudio *mix, gint *minvol); + +/** + * mix_audio_get_volume: + * @mix: #MixAudio object. + * @currvol: Current volume. Note that if @type equals #MIX_VOL_PERCENT, this value will be return within the range of 0 to 100 inclusive. + * @type: The type represented by @currvol. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * This function returns the current volume setting in either decibels or percentage. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode. + * + */ +MIX_RESULT mix_audio_get_volume(MixAudio *mix, gint *currvol, MixVolType type); + +/** + * mix_audio_set_volume: + * @mix: #MixAudio object. + * @currvol: Current volume. Note that if @type equals #MIX_VOL_PERCENT, this value will be trucated to within the range of 0 to 100 inclusive. + * @type: The type represented by @currvol. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * This function sets the current volume setting in either decibels or percentage. This function is only valid if the stream is configured to #MIX_DECODE_DIRECTRENDER mode. + * + */ +MIX_RESULT mix_audio_set_volume(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype); + +/** + * mix_audio_deinitialize: + * @mix: #MixAudio object. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * This function will uninitialize a session with this MI-X instance. During this call, the SST device will be closed and resources including mmapped buffers would be freed.This function should be called by the application once mix_audio_init() has been called. + * + * + * Intel Audio Manager Support + * The SST stream would be unregistered with Intel Audio Manager if it was registered. + * + * + * Note that if this method should not fail normally. If it does return failure, the state of this object and the underlying mechanism is compromised and application should not attempt to reuse this object. + */ +MIX_RESULT mix_audio_deinitialize(MixAudio *mix); + +/** + * mix_audio_get_stream_state: + * @mix: #MixAudio object. + * @streamState: pointer to receive stream state. + * @returns: #MIX_RESULT + * + * Get the stream state of the current stream. + */ +MIX_RESULT mix_audio_get_stream_state(MixAudio *mix, MixStreamState *streamState); + +/** + * mix_audio_get_state: + * @mix: #MixAudio object. + * @state: pointer to receive state + * @returns: Current device state. + * + * Get the device state of the audio session. + */ +MIX_RESULT mix_audio_get_state(MixAudio *mix, MixState *state); + +/** + * mix_audio_am_is_enabled: + * @mix: #MixAudio object. + * @returns: boolean indicates if Intel Audio Manager is enabled with the current session. + * + * This method checks if the current session is configure to use Intel Audio Manager. Note that Intel Audio Manager is considered disabled if the stream has not be initialized to use the service explicitly. + */ +gboolean mix_audio_am_is_enabled(MixAudio *mix); + +// Real implementation for Base class +//MIX_RESULT mix_audio_get_version(guint* major, guint *minor); + +/** + * mix_audio_is_am_available: + * @mix: TBD + * @am: TBD + * @avail: TBD + * @returns: TBD + * + * Check if AM is available. + */ +MIX_RESULT mix_audio_is_am_available(MixAudio *mix, MixAudioManager am, gboolean *avail); + +/** + * mix_audio_get_output_configuration: + * @mix: #MixAudio object. + * @audioconfigparams: double pointer to hold output configuration. + * @returns: #MIX_RESULT_SUCCESS on success or other fail code. + * + * This method retrieve the current configuration. This can be called after initialization. If a stream has been configured, it returns the corresponding derive object of MixAudioConfigParams. + */ +MIX_RESULT mix_audio_get_output_configuration(MixAudio *mix, MixAudioConfigParams **audioconfigparams); + +/** + * mix_audio_get_stream_byte_decoded: + * @mix: #MixAudio object. + * @msecs: stream byte decoded.. + * @returns: #MIX_RESULT_SUCCESS if the value is available. #MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. + * + * Retrive the culmulative byte decoded. + * + * Not Implemented. + */ +MIX_RESULT mix_audio_get_stream_byte_decoded(MixAudio *mix, guint64 *byte); + +#endif /* __MIX_AUDIO_H__ */ diff --git a/mix_audio/src/mixaudiotypes.h b/mix_audio/src/mixaudiotypes.h new file mode 100644 index 0000000..1b4e085 --- /dev/null +++ b/mix_audio/src/mixaudiotypes.h @@ -0,0 +1,27 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_AUDIO_TYPES_H__ +#define __MIX_AUDIO_TYPES_H__ + +/** + * MixAudioManager: + * @MIX_AUDIOMANAGER_NONE: No Audio Manager. + * @MIX_AUDIOMANAGER_INTELAUDIOMANAGER: Intel Audio Manager. + * @MIX_AUDIOMANAGER_LAST: Last index. + * + * Audio Manager enumerations. + */ +typedef enum { + MIX_AUDIOMANAGER_NONE = 0, + MIX_AUDIOMANAGER_INTELAUDIOMANAGER, + MIX_AUDIOMANAGER_LAST +} MixAudioManager; + + +#endif diff --git a/mix_audio/src/pvt.h b/mix_audio/src/pvt.h new file mode 100644 index 0000000..f4be9e5 --- /dev/null +++ b/mix_audio/src/pvt.h @@ -0,0 +1,9 @@ + + +typedef unsigned short u16; +typedef unsigned long u32; +typedef unsigned char u8; +typedef signed char s8; +typedef signed short s16; +#define __user + diff --git a/mix_audio/src/sst_proxy.c b/mix_audio/src/sst_proxy.c new file mode 100644 index 0000000..438e06e --- /dev/null +++ b/mix_audio/src/sst_proxy.c @@ -0,0 +1,435 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + + +#include +#include +#include +#include "mixacpmp3.h" +#include "mixacpwma.h" +#include "mixacpaac.h" +#include "intel_sst_ioctl.h" +#include "mixacp.h" +#include "sst_proxy.h" + +#ifdef G_LOG_DOMAIN +#undef G_LOG_DOMAIN +#define G_LOG_DOMAIN ((gchar*)"mixaudio") +#endif + +gboolean mix_sst_params_convert_mp3(MixAudioConfigParamsMP3 *acp, struct snd_sst_params *s); +gboolean mix_sst_params_convert_wma(MixAudioConfigParamsWMA *acp, struct snd_sst_params *s); +gboolean mix_sst_params_convert_aac(MixAudioConfigParamsAAC *acp, struct snd_sst_params *s); +void mix_sst_params_to_mp3(MixAudioConfigParamsMP3 *acp, struct snd_mp3_params *params); +void mix_sst_params_to_wma(MixAudioConfigParamsWMA *acp, struct snd_wma_params *params); +void mix_sst_params_to_aac(MixAudioConfigParamsAAC *acp, struct snd_aac_params *params); +void mix_sst_set_bps(MixAudioConfigParams *acp, guchar pcm_wd_sz); +void mix_sst_set_op_align(MixAudioConfigParams *acp, guchar op_align); + +/* + * Utilities that convert param object to driver struct. + * No Mix Context needed. However, it knows about the driver's param structure. + */ +gboolean mix_sst_params_convert(MixAudioConfigParams *acp, struct snd_sst_params *s) +{ + gboolean ret = FALSE; + + if (!s) return FALSE; + + if (MIX_IS_AUDIOCONFIGPARAMSMP3(acp)) + ret = mix_sst_params_convert_mp3(MIX_AUDIOCONFIGPARAMSMP3(acp), s); + else if (MIX_IS_AUDIOCONFIGPARAMSWMA(acp)) + ret = mix_sst_params_convert_wma(MIX_AUDIOCONFIGPARAMSWMA(acp), s); + else if (MIX_IS_AUDIOCONFIGPARAMSAAC(acp)) + ret = mix_sst_params_convert_aac(MIX_AUDIOCONFIGPARAMSAAC(acp), s); + + return ret; +} + + +gboolean mix_sst_params_convert_mp3(MixAudioConfigParamsMP3 *acp, struct snd_sst_params *s) +{ + struct snd_mp3_params *p = &s->sparams.uc.mp3_params; + + s->codec = p->codec = SST_CODEC_TYPE_MP3; + p->num_chan = MIX_ACP_NUM_CHANNELS(acp); + p->brate = MIX_ACP_BITRATE(acp); + p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); + p->crc_check = MIX_ACP_MP3_CRC(acp); + p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp)); + if (p->pcm_wd_sz == MIX_ACP_BPS_16) + p->op_align = MIX_ACP_OUTPUT_ALIGN_16; + else + p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp)); + + return TRUE; +} + +gboolean mix_sst_params_convert_wma(MixAudioConfigParamsWMA *acp, struct snd_sst_params *s) +{ + struct snd_wma_params *p = &s->sparams.uc.wma_params; + + p->num_chan = MIX_ACP_NUM_CHANNELS(acp); + p->brate = MIX_ACP_BITRATE(acp); + p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); + p->wma_encode_opt = MIX_ACP_WMA_ENCODE_OPT(acp); + p->block_align = MIX_ACP_WMA_BLOCK_ALIGN(acp); + p->channel_mask = MIX_ACP_WMA_CHANNEL_MASK(acp); + p->format_tag = MIX_ACP_WMA_FORMAT_TAG(acp); + p->pcm_src = MIX_ACP_WMA_PCM_BIT_WIDTH(acp); + p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp)); + if (p->pcm_wd_sz == MIX_ACP_BPS_16) + p->op_align = MIX_ACP_OUTPUT_ALIGN_16; + else + p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp)); + + switch (mix_acp_wma_get_version(acp)) + { + case MIX_AUDIO_WMA_V9: + s->codec = p->codec = SST_CODEC_TYPE_WMA9; + break; + case MIX_AUDIO_WMA_V10: + s->codec = p->codec = SST_CODEC_TYPE_WMA10; + break; + case MIX_AUDIO_WMA_V10P: + s->codec = p->codec = SST_CODEC_TYPE_WMA10P; + break; + default: + break; + } + + return TRUE; +} + +#define AAC_DUMP(param) g_message("snd_aac_params.%s=%u", #param, p->param) +#define AAC_DUMP_I(param, idx) g_message("snd_aac_params.%s[%d]=%x", #param, idx, p->param[idx]) + +gboolean mix_sst_params_convert_aac(MixAudioConfigParamsAAC *acp, struct snd_sst_params *s) +{ + struct snd_aac_params *p = &s->sparams.uc.aac_params; + + // I have only AOT, where tools are usually specified at eAOT. + // However, sometimes, AOT could tell us the tool involved. e.g. + // AOT==5 --> SBR + // AOT==29 --> PS + // AOT==2 --> AAC-LC + + // we know SBR present only if it is indicated presence, or AOT says so. + guint aot = mix_acp_aac_get_aot(acp); + p->sbr_present = ((MIX_ACP_AAC_SBR_FLAG(acp) == 1) || + (aot == 5) || + (MIX_ACP_AAC_PS_FLAG(acp) == 1) || + (aot == 29))?1:0; + + // As far as we know, we should: + // set sbr_present flag for SST in case of possible implicit signalling of SBR, and + // we should use HEAACv2 decoder in case of possible implicit signalling of PS. + // Although we should theoretically select HEAACv2 decoder for HEAACv1 and HEAAC, + // it is not advisable since HEAACv2 decoder has more overhead as per SST team. + // So MixAudio is implicitly selecting codec base on AOT, psPresentFlag and sbrPresentFlag. + // Application can override the selection by explicitly setting psPresentFlag and/or sbrPresentFlag. + if ((MIX_ACP_AAC_PS_FLAG(acp) == 1) || (aot == 29)) + { + // PS present. + s->codec = p->codec = SST_CODEC_TYPE_eAACP; + } + else if (p->sbr_present == 1) + { + s->codec = p->codec = SST_CODEC_TYPE_AACP; + } + else + { + s->codec = p->codec = SST_CODEC_TYPE_AAC; + } + + p->num_chan = MIX_ACP_AAC_CHANNELS(acp); // core/internal channels + p->ext_chl = MIX_ACP_NUM_CHANNELS(acp); // external channels + p->aac_srate = MIX_ACP_AAC_SAMPLE_RATE(acp); // aac decoder internal frequency + p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); // output/external frequency + + p->brate = MIX_ACP_BITRATE(acp); + p->mpg_id = (guint)mix_acp_aac_get_mpeg_id(acp); + p->bs_format = mix_acp_aac_get_bit_stream_format(acp); + p->aac_profile = mix_acp_aac_get_aac_profile(acp); + // AOT defined by MPEG spec is 5 for SBR but SST definition is 4 for SBR. + if (aot == 5) + p->aot = 4; + else if (aot == 2) + p->aot = aot; + p->crc_check = MIX_ACP_AAC_CRC(acp); + p->brate_type = mix_acp_aac_get_bit_rate_type(acp); + p->pce_present = MIX_ACP_AAC_PCE_FLAG(acp); + p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp)); + + if (p->pcm_wd_sz == MIX_ACP_BPS_16) + p->op_align = MIX_ACP_OUTPUT_ALIGN_16; + else + p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp)); + + //p->aac_srate = ; // __u32 aac_srate; /* Plain AAC decoder operating sample rate */ + //p->ext_chl = ; // __u8 ext_chl; /* No.of external channels */ + + switch (p->bs_format) + { + case MIX_AAC_BS_ADTS: + g_sprintf((gchar*)p->bit_stream_format, "adts"); + break; + case MIX_AAC_BS_ADIF: + g_sprintf((gchar*)p->bit_stream_format, "adif"); + break; + case MIX_AAC_BS_RAW: + g_sprintf((gchar*)p->bit_stream_format, "raw"); + p->num_syntc_elems = 0; + p->syntc_id[0] = (gint8)-1; /* 0 for ID_SCE(Dula Mono), -1 for raw */ + p->syntc_id[1] = (gint8)-1; + p->syntc_tag[0] = (gint8)-1; /* raw - -1 and 0 -16 for rest of the streams */ + p->syntc_tag[1] = (gint8)-1; + break; + default: + break; + } + + { + AAC_DUMP(codec); + AAC_DUMP(num_chan); /* 1=Mono, 2=Stereo*/ + AAC_DUMP(pcm_wd_sz); /* 16/24 - bit*/ + AAC_DUMP(brate); + AAC_DUMP(sfreq); /* Sampling freq eg. 8000, 441000, 48000 */ + AAC_DUMP(aac_srate); /* Plain AAC decoder operating sample rate */ + AAC_DUMP(mpg_id); /* 0=MPEG-2, 1=MPEG-4 */ + AAC_DUMP(bs_format); /* input bit stream format adts=0, adif=1, raw=2 */ + AAC_DUMP(aac_profile); /* 0=Main Profile, 1=LC profile, 3=SSR profile */ + AAC_DUMP(ext_chl); /* No.of external channels */ + AAC_DUMP(aot); /* Audio object type. 1=Main , 2=LC , 3=SSR, 4=SBR*/ + AAC_DUMP(op_align); /* output alignment 0=16 bit , 1=MSB, 2= LSB align */ + AAC_DUMP(brate_type); /* 0=CBR, 1=VBR */ + AAC_DUMP(crc_check); /* crc check 0= disable, 1=enable */ + // AAC_DUMP(bit_stream_format[8]); /* input bit stream format adts/adif/raw */ + g_message("snd_aac_params.bit_stream_format=%s", p->bit_stream_format); + AAC_DUMP(jstereo); /* Joint stereo Flag */ + AAC_DUMP(sbr_present); /* 1 = SBR Present, 0 = SBR absent, for RAW */ + AAC_DUMP(downsample); /* 1 = Downsampling ON, 0 = Downsampling OFF */ + AAC_DUMP(num_syntc_elems); /* 1- Mono/stereo, 0 - Dual Mono, 0 - for raw */ + g_message("snd_aac_params.syntc_id[0]=%x", p->syntc_id[0]); + g_message("snd_aac_params.syntc_id[1]=%x", p->syntc_id[1]); + g_message("snd_aac_params.syntc_tag[0]=%x", p->syntc_tag[0]); + g_message("snd_aac_params.syntc_tag[1]=%x", p->syntc_tag[1]); + //AAC_DUMP_I(syntc_id, 0); /* 0 for ID_SCE(Dula Mono), -1 for raw */ + //AAC_DUMP_I(syntc_id, 1); /* 0 for ID_SCE(Dula Mono), -1 for raw */ + //AAC_DUMP_I(syntc_tag, 0); /* raw - -1 and 0 -16 for rest of the streams */ + //AAC_DUMP_I(syntc_tag, 1); /* raw - -1 and 0 -16 for rest of the streams */ + AAC_DUMP(pce_present); /* Flag. 1- present 0 - not present, for RAW */ + AAC_DUMP(reserved); + AAC_DUMP(reserved1); + } + + return TRUE; +} + +MixAudioConfigParams *mix_sst_acp_from_codec(guint codec) +{ + MixAudioConfigParams *ret = NULL; + + // need stream specific ACP + switch (codec) + { + case SST_CODEC_TYPE_MP3: + case SST_CODEC_TYPE_MP24: + ret = (MixAudioConfigParams*)mix_acp_mp3_new(); + break; + case SST_CODEC_TYPE_AAC: + case SST_CODEC_TYPE_AACP: + case SST_CODEC_TYPE_eAACP: + ret = (MixAudioConfigParams*)mix_acp_aac_new(); + break; + case SST_CODEC_TYPE_WMA9: + case SST_CODEC_TYPE_WMA10: + case SST_CODEC_TYPE_WMA10P: + ret = (MixAudioConfigParams*)mix_acp_wma_new(); + break; + } + + return ret; +} + + + +MixAudioConfigParams *mix_sst_params_to_acp(struct snd_sst_get_stream_params *stream_params) +{ + MixAudioConfigParams *ret = NULL; + + gboolean allocated = FALSE; + // Ingoring stream_params.codec_params.result, which seem to return details specific to stream allocation. + switch (stream_params->codec_params.result) + { + // Please refers to SST API doc for return value definition. + case 5: + g_debug("last SET_PARAMS succeeded with Stream Parameter Modified."); + case 0: + allocated = TRUE; + break; + case 1: + // last SET_PARAMS failed STREAM was not available. + case 2: + // last SET_PARAMS failed CODEC was not available. + case 3: + // last SET_PARAMS failed CODEC was not supported. + case 4: + // last SET_PARAMS failed Invalid Stream Parameters. + case 6: + // last SET_PARAMS failed Invalid Stream ID. + default: + // last SET_PARAMS failed unexpectedly. + break; + } + + if (allocated) + { + switch (stream_params->codec_params.codec) + { + case SST_CODEC_TYPE_MP3: + case SST_CODEC_TYPE_MP24: + ret = (MixAudioConfigParams*)mix_acp_mp3_new(); + mix_sst_params_to_mp3(MIX_AUDIOCONFIGPARAMSMP3(ret), &stream_params->codec_params.sparams.uc.mp3_params); + break; + case SST_CODEC_TYPE_AAC: + case SST_CODEC_TYPE_AACP: + case SST_CODEC_TYPE_eAACP: + ret = (MixAudioConfigParams*)mix_acp_aac_new(); + mix_sst_params_to_aac(MIX_AUDIOCONFIGPARAMSAAC(ret), &stream_params->codec_params.sparams.uc.aac_params); + break; + case SST_CODEC_TYPE_WMA9: + case SST_CODEC_TYPE_WMA10: + case SST_CODEC_TYPE_WMA10P: + ret = (MixAudioConfigParams*)mix_acp_wma_new(); + mix_sst_params_to_wma(MIX_AUDIOCONFIGPARAMSWMA(ret), &stream_params->codec_params.sparams.uc.wma_params); + break; + } + } + + if (!ret) ret = mix_acp_new(); + + if (ret) + { + // Be sure to update all vars that becomes available since the ACP could set defaults. + MIX_ACP_SAMPLE_FREQ(ret) = stream_params->pcm_params.sfreq; + MIX_ACP_NUM_CHANNELS(ret) = stream_params->pcm_params.num_chan; + mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(ret), stream_params->pcm_params.pcm_wd_sz); + } + + return ret; +} + + +void mix_sst_params_to_mp3(MixAudioConfigParamsMP3 *acp, struct snd_mp3_params *params) +{ + if(!acp || !params) return; + + MIX_ACP_NUM_CHANNELS(MIX_AUDIOCONFIGPARAMS(acp)) = params->num_chan; + MIX_ACP_BITRATE(MIX_AUDIOCONFIGPARAMS(acp)) = params->brate; + MIX_ACP_SAMPLE_FREQ(MIX_AUDIOCONFIGPARAMS(acp)) = params->sfreq; + MIX_ACP_MP3_CRC(acp) = params->crc_check; + + mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz); + mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align); +} + +void mix_sst_params_to_wma(MixAudioConfigParamsWMA *acp, struct snd_wma_params *params) +{ + + MIX_ACP_BITRATE(acp) = params->brate; + MIX_ACP_SAMPLE_FREQ(acp) = params->sfreq; + MIX_ACP_WMA_ENCODE_OPT(acp) = params->wma_encode_opt; + MIX_ACP_WMA_BLOCK_ALIGN(acp) = params->block_align; + MIX_ACP_WMA_CHANNEL_MASK(acp) = params->channel_mask; + MIX_ACP_WMA_FORMAT_TAG(acp) = params->format_tag; + MIX_ACP_WMA_PCM_BIT_WIDTH(acp) = params->pcm_src; + + mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz); + mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align); + + switch (params->codec) + { + case SST_CODEC_TYPE_WMA9: + mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V9); + break; + case SST_CODEC_TYPE_WMA10: + mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V10); + break; + case SST_CODEC_TYPE_WMA10P: + mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V10P); + break; + } +} + + +void mix_sst_params_to_aac(MixAudioConfigParamsAAC *acp, struct snd_aac_params *params) +{ + if (params->codec == SST_CODEC_TYPE_eAACP) + { + MIX_ACP_AAC_PS_FLAG(acp) = TRUE; + } + + MIX_ACP_NUM_CHANNELS(acp) = params->num_chan; + MIX_ACP_BITRATE(acp) = params->brate; + MIX_ACP_SAMPLE_FREQ(acp) = params->sfreq; + mix_acp_aac_set_mpeg_id(acp, params->mpg_id); + mix_acp_aac_set_bit_stream_format(acp, params->bs_format); + mix_acp_aac_set_aac_profile(acp, params->aac_profile); + + // SST API specific 4 for SBR while AOT definition in MPEG 4 spec specific 5. + // converting. + if (params->aot == 4) + mix_acp_aac_set_aot(acp, 5); + else if (params->aot == 2) + mix_acp_aac_set_aot(acp, params->aot); + + MIX_ACP_AAC_CRC(acp) = params->crc_check; + mix_acp_aac_set_bit_rate_type(acp, params->brate_type); + MIX_ACP_AAC_SBR_FLAG(acp) = params->sbr_present; + MIX_ACP_AAC_PCE_FLAG(acp) = params->pce_present; + + mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz); + mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align); + + acp->num_syntc_elems = params->num_syntc_elems; + acp->syntc_id[0] = params->syntc_id[0]; + acp->syntc_id[1] = params->syntc_id[1]; + acp->syntc_tag[0] = params->syntc_tag[0]; + acp->syntc_tag[1] = params->syntc_tag[1]; +} + +void mix_sst_set_bps(MixAudioConfigParams *acp, guchar pcm_wd_sz) +{ + switch (pcm_wd_sz) + { + case MIX_ACP_BPS_16: + case MIX_ACP_BPS_24: + break; + default: + pcm_wd_sz = MIX_ACP_BPS_UNKNOWN; + break; + } + mix_acp_set_bps(MIX_AUDIOCONFIGPARAMS(acp), pcm_wd_sz); +} + +void mix_sst_set_op_align(MixAudioConfigParams *acp, guchar op_align) +{ + switch (op_align) + { + case MIX_ACP_OUTPUT_ALIGN_16: + case MIX_ACP_OUTPUT_ALIGN_MSB: + case MIX_ACP_OUTPUT_ALIGN_LSB: + break; + default: + op_align = MIX_ACP_OUTPUT_ALIGN_UNKNOWN; + break; + } + mix_acp_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), op_align); +} + diff --git a/mix_audio/src/sst_proxy.h b/mix_audio/src/sst_proxy.h new file mode 100644 index 0000000..6ad69fe --- /dev/null +++ b/mix_audio/src/sst_proxy.h @@ -0,0 +1,17 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __SST_PROXY_H__ +#define __SST_PROXY_H__ + +// renaming the struct for easier update, and reference, in MixAudio code. + +gboolean mix_sst_params_convert(MixAudioConfigParams *params, struct snd_sst_params *s); +MixAudioConfigParams *mix_sst_params_to_acp(struct snd_sst_get_stream_params *stream_params); + +#endif diff --git a/mix_audio/tests/Makefile.am b/mix_audio/tests/Makefile.am new file mode 100644 index 0000000..372e488 --- /dev/null +++ b/mix_audio/tests/Makefile.am @@ -0,0 +1,2 @@ +SUBDIRS = smoke + diff --git a/mix_audio/tests/smoke/Makefile.am b/mix_audio/tests/smoke/Makefile.am new file mode 100644 index 0000000..0a373ec --- /dev/null +++ b/mix_audio/tests/smoke/Makefile.am @@ -0,0 +1,25 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + +noinst_PROGRAMS = mixaudiosmoke + +############################################################################## +# sources used to compile +mixaudiosmoke_SOURCES = mixaudiosmoke.c + +# flags used to compile this plugin +# add other _CFLAGS and _LIBS as needed +mixaudiosmoke_CFLAGS = -I$(top_srcdir)/src $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS) +mixaudiosmoke_LDADD = $(GLIB_LIBS) $(GOBJECT_LIBS) $(top_srcdir)/src/libmixaudio.la $(MIXCOMMON_LIBS) +#mixaudiosmoke_LDFLAGS = $(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS) +mixaudiosmoke_LIBTOOLFLAGS = --tag=disable-static + +# headers we need but don't want installed +noinst_HEADERS = + + + diff --git a/mix_audio/tests/smoke/mixaudiosmoke.c b/mix_audio/tests/smoke/mixaudiosmoke.c new file mode 100644 index 0000000..8f81108 --- /dev/null +++ b/mix_audio/tests/smoke/mixaudiosmoke.c @@ -0,0 +1,77 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#include +#include "mixaudio.h" +#include "mixparams.h" +#include "mixacp.h" +#include "mixacpmp3.h" + +void test_getversion() +{ + g_printf("Calling mixaudio_getversion...\n"); + { + guint major = 0; + guint minor = 0; + MIX_RESULT ret = mix_audio_get_version(&major, &minor); + if (MIX_SUCCEEDED(ret)) + { + g_printf("MixAudio Version %u.%u\n", major, minor); + } + else + g_printf("mixaudio_getversion() failed! Ret code : 0x%08x\n", ret); + } +} + +int main (int argc, char **argv) +{ + g_type_init(); + + g_printf("Smoke test for MixAudio and structs\n"); + + test_getversion(); + + g_printf("Creating MixAudio...\n"); + MixAudio *ma = mix_audio_new(); + if (MIX_IS_AUDIO(ma)) + { + g_printf("Successful.\n"); + + } + else + { + g_printf("Failed.\n"); + } + + g_printf("Creating MixAudioConfigParams...\n"); + MixAudioConfigParams *map = mix_acp_new(); + if (MIX_IS_AUDIOCONFIGPARAMS(map)) + { + g_printf("Successful.\n"); + + g_printf("Destroying MixAudioConfigParams...\n"); + mix_acp_unref(map); + g_printf("Successful.\n"); + } + else + { + g_printf("Failed.\n"); + } + g_printf("Creating mp3 config params...\n"); + MixAudioConfigParamsMP3 *mp3 = mix_acp_mp3_new(); + + mp3->CRC = 0; + + g_printf("Destroying MixAudio...\n"); + mix_audio_unref(ma); + g_printf("Successful.\n"); + + g_printf("Smoke completed.\n"); +} + + diff --git a/mix_common/AUTHORS b/mix_common/AUTHORS new file mode 100644 index 0000000..2175750 --- /dev/null +++ b/mix_common/AUTHORS @@ -0,0 +1 @@ +Khanh Nguyen diff --git a/mix_common/COPYING b/mix_common/COPYING new file mode 100644 index 0000000..a4f852c --- /dev/null +++ b/mix_common/COPYING @@ -0,0 +1,26 @@ +INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License) + +IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING. +Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software. + + +LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions: +1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software. +2. You may not reverse engineer, decompile, or disassemble the Software. +3. You may not sublicense the Software. +4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions. +5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL). +OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights. +EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software. +LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS. +TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate. +APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations. +GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052. +CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos. +ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion. +ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel. +NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties. +SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions. +WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself. +CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions. + diff --git a/mix_common/ChangeLog b/mix_common/ChangeLog new file mode 100644 index 0000000..b3469f8 --- /dev/null +++ b/mix_common/ChangeLog @@ -0,0 +1,28 @@ +2010-01-11 Echo Choi + + * Updated version to 0.1.6 + * Added NEED_RETRY and ERRNO error code to support retry and errno. + +2009-11-18 Echo Choi + + * Fixed inclusion of m4 directory. + * Added return code. + +2009-11-13 Echo Choi + + * Updated version to 0.1.5 + * Added additional error codes. + +2009-10-16 Echo Choi + + * Included mixdrmparams.* + +2009-10-14 Echo Choi + + * Updated version to 0.1.4. + +2009-10-08 Echo Choi + + * Updated version to 0.1.3 and packaged for build. + +Initial version diff --git a/mix_common/INSTALL b/mix_common/INSTALL new file mode 100644 index 0000000..8b82ade --- /dev/null +++ b/mix_common/INSTALL @@ -0,0 +1,291 @@ +Installation Instructions +************************* + +Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005, +2006, 2007, 2008 Free Software Foundation, Inc. + + This file is free documentation; the Free Software Foundation gives +unlimited permission to copy, distribute and modify it. + +Basic Installation +================== + + Briefly, the shell commands `./configure; make; make install' should +configure, build, and install this package. The following +more-detailed instructions are generic; see the `README' file for +instructions specific to this package. + + The `configure' shell script attempts to guess correct values for +various system-dependent variables used during compilation. It uses +those values to create a `Makefile' in each directory of the package. +It may also create one or more `.h' files containing system-dependent +definitions. Finally, it creates a shell script `config.status' that +you can run in the future to recreate the current configuration, and a +file `config.log' containing compiler output (useful mainly for +debugging `configure'). + + It can also use an optional file (typically called `config.cache' +and enabled with `--cache-file=config.cache' or simply `-C') that saves +the results of its tests to speed up reconfiguring. Caching is +disabled by default to prevent problems with accidental use of stale +cache files. + + If you need to do unusual things to compile the package, please try +to figure out how `configure' could check whether to do them, and mail +diffs or instructions to the address given in the `README' so they can +be considered for the next release. If you are using the cache, and at +some point `config.cache' contains results you don't want to keep, you +may remove or edit it. + + The file `configure.ac' (or `configure.in') is used to create +`configure' by a program called `autoconf'. You need `configure.ac' if +you want to change it or regenerate `configure' using a newer version +of `autoconf'. + +The simplest way to compile this package is: + + 1. `cd' to the directory containing the package's source code and type + `./configure' to configure the package for your system. + + Running `configure' might take a while. While running, it prints + some messages telling which features it is checking for. + + 2. Type `make' to compile the package. + + 3. Optionally, type `make check' to run any self-tests that come with + the package. + + 4. Type `make install' to install the programs and any data files and + documentation. + + 5. You can remove the program binaries and object files from the + source code directory by typing `make clean'. To also remove the + files that `configure' created (so you can compile the package for + a different kind of computer), type `make distclean'. There is + also a `make maintainer-clean' target, but that is intended mainly + for the package's developers. If you use it, you may have to get + all sorts of other programs in order to regenerate files that came + with the distribution. + + 6. Often, you can also type `make uninstall' to remove the installed + files again. + +Compilers and Options +===================== + + Some systems require unusual options for compilation or linking that +the `configure' script does not know about. Run `./configure --help' +for details on some of the pertinent environment variables. + + You can give `configure' initial values for configuration parameters +by setting variables in the command line or in the environment. Here +is an example: + + ./configure CC=c99 CFLAGS=-g LIBS=-lposix + + *Note Defining Variables::, for more details. + +Compiling For Multiple Architectures +==================================== + + You can compile the package for more than one kind of computer at the +same time, by placing the object files for each architecture in their +own directory. To do this, you can use GNU `make'. `cd' to the +directory where you want the object files and executables to go and run +the `configure' script. `configure' automatically checks for the +source code in the directory that `configure' is in and in `..'. + + With a non-GNU `make', it is safer to compile the package for one +architecture at a time in the source code directory. After you have +installed the package for one architecture, use `make distclean' before +reconfiguring for another architecture. + + On MacOS X 10.5 and later systems, you can create libraries and +executables that work on multiple system types--known as "fat" or +"universal" binaries--by specifying multiple `-arch' options to the +compiler but only a single `-arch' option to the preprocessor. Like +this: + + ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ + CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ + CPP="gcc -E" CXXCPP="g++ -E" + + This is not guaranteed to produce working output in all cases, you +may have to build one architecture at a time and combine the results +using the `lipo' tool if you have problems. + +Installation Names +================== + + By default, `make install' installs the package's commands under +`/usr/local/bin', include files under `/usr/local/include', etc. You +can specify an installation prefix other than `/usr/local' by giving +`configure' the option `--prefix=PREFIX'. + + You can specify separate installation prefixes for +architecture-specific files and architecture-independent files. If you +pass the option `--exec-prefix=PREFIX' to `configure', the package uses +PREFIX as the prefix for installing programs and libraries. +Documentation and other data files still use the regular prefix. + + In addition, if you use an unusual directory layout you can give +options like `--bindir=DIR' to specify different values for particular +kinds of files. Run `configure --help' for a list of the directories +you can set and what kinds of files go in them. + + If the package supports it, you can cause programs to be installed +with an extra prefix or suffix on their names by giving `configure' the +option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. + +Optional Features +================= + + Some packages pay attention to `--enable-FEATURE' options to +`configure', where FEATURE indicates an optional part of the package. +They may also pay attention to `--with-PACKAGE' options, where PACKAGE +is something like `gnu-as' or `x' (for the X Window System). The +`README' should mention any `--enable-' and `--with-' options that the +package recognizes. + + For packages that use the X Window System, `configure' can usually +find the X include and library files automatically, but if it doesn't, +you can use the `configure' options `--x-includes=DIR' and +`--x-libraries=DIR' to specify their locations. + +Particular systems +================== + + On HP-UX, the default C compiler is not ANSI C compatible. If GNU +CC is not installed, it is recommended to use the following options in +order to use an ANSI C compiler: + + ./configure CC="cc -Ae" + +and if that doesn't work, install pre-built binaries of GCC for HP-UX. + + On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot +parse its `' header file. The option `-nodtk' can be used as +a workaround. If GNU CC is not installed, it is therefore recommended +to try + + ./configure CC="cc" + +and if that doesn't work, try + + ./configure CC="cc -nodtk" + +Specifying the System Type +========================== + + There may be some features `configure' cannot figure out +automatically, but needs to determine by the type of machine the package +will run on. Usually, assuming the package is built to be run on the +_same_ architectures, `configure' can figure that out, but if it prints +a message saying it cannot guess the machine type, give it the +`--build=TYPE' option. TYPE can either be a short name for the system +type, such as `sun4', or a canonical name which has the form: + + CPU-COMPANY-SYSTEM + +where SYSTEM can have one of these forms: + + OS KERNEL-OS + + See the file `config.sub' for the possible values of each field. If +`config.sub' isn't included in this package, then this package doesn't +need to know the machine type. + + If you are _building_ compiler tools for cross-compiling, you should +use the option `--target=TYPE' to select the type of system they will +produce code for. + + If you want to _use_ a cross compiler, that generates code for a +platform different from the build platform, you should specify the +"host" platform (i.e., that on which the generated programs will +eventually be run) with `--host=TYPE'. + +Sharing Defaults +================ + + If you want to set default values for `configure' scripts to share, +you can create a site shell script called `config.site' that gives +default values for variables like `CC', `cache_file', and `prefix'. +`configure' looks for `PREFIX/share/config.site' if it exists, then +`PREFIX/etc/config.site' if it exists. Or, you can set the +`CONFIG_SITE' environment variable to the location of the site script. +A warning: not all `configure' scripts look for a site script. + +Defining Variables +================== + + Variables not defined in a site shell script can be set in the +environment passed to `configure'. However, some packages may run +configure again during the build, and the customized values of these +variables may be lost. In order to avoid this problem, you should set +them in the `configure' command line, using `VAR=value'. For example: + + ./configure CC=/usr/local2/bin/gcc + +causes the specified `gcc' to be used as the C compiler (unless it is +overridden in the site shell script). + +Unfortunately, this technique does not work for `CONFIG_SHELL' due to +an Autoconf bug. Until the bug is fixed you can use this workaround: + + CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash + +`configure' Invocation +====================== + + `configure' recognizes the following options to control how it +operates. + +`--help' +`-h' + Print a summary of all of the options to `configure', and exit. + +`--help=short' +`--help=recursive' + Print a summary of the options unique to this package's + `configure', and exit. The `short' variant lists options used + only in the top level, while the `recursive' variant lists options + also present in any nested packages. + +`--version' +`-V' + Print the version of Autoconf used to generate the `configure' + script, and exit. + +`--cache-file=FILE' + Enable the cache: use and save the results of the tests in FILE, + traditionally `config.cache'. FILE defaults to `/dev/null' to + disable caching. + +`--config-cache' +`-C' + Alias for `--cache-file=config.cache'. + +`--quiet' +`--silent' +`-q' + Do not print messages saying which checks are being made. To + suppress all normal output, redirect it to `/dev/null' (any error + messages will still be shown). + +`--srcdir=DIR' + Look for the package's source code in directory DIR. Usually + `configure' can determine that directory automatically. + +`--prefix=DIR' + Use DIR as the installation prefix. *Note Installation Names:: + for more details, including other options available for fine-tuning + the installation locations. + +`--no-create' +`-n' + Run the configure checks, but stop before creating any output + files. + +`configure' also accepts some other, not widely useful, options. Run +`configure --help' for more details. + diff --git a/mix_common/Makefile.am b/mix_common/Makefile.am new file mode 100644 index 0000000..f5b19ff --- /dev/null +++ b/mix_common/Makefile.am @@ -0,0 +1,10 @@ +SUBDIRS = src + +#ACLOCAL_AMFLAGS=-I m4 +#Uncomment the following line if building documentation using gtkdoc +#SUBDIRS += docs + +pkgconfigdir = $(libdir)/pkgconfig +pkgconfig_DATA=mixcommon.pc +EXTRA_DIST = autogen.sh m4 +DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc diff --git a/mix_common/NEWS b/mix_common/NEWS new file mode 100644 index 0000000..4327969 --- /dev/null +++ b/mix_common/NEWS @@ -0,0 +1 @@ +news diff --git a/mix_common/README b/mix_common/README new file mode 100644 index 0000000..8178c76 --- /dev/null +++ b/mix_common/README @@ -0,0 +1 @@ +readme diff --git a/mix_common/autogen.sh b/mix_common/autogen.sh new file mode 100644 index 0000000..e123d49 --- /dev/null +++ b/mix_common/autogen.sh @@ -0,0 +1,8 @@ +package=MixCommon + +aclocal -I m4/ $ACLOCAL_FLAGS || exit 1 +libtoolize --copy --force || exit 1 +autoheader -v || exit 1 +autoconf -v || exit 1 +automake -a -c -v || exit 1 +#autoreconf -v --install diff --git a/mix_common/configure.ac b/mix_common/configure.ac new file mode 100644 index 0000000..2165138 --- /dev/null +++ b/mix_common/configure.ac @@ -0,0 +1,39 @@ +AC_INIT("", "", [khanh.v.nguyen@intel.com]) + +AC_CONFIG_MACRO_DIR(m4) + +AS_MIX_VERSION(mixcommon, MIXCOMMON, 0, 1, 7) + +AM_INIT_AUTOMAKE($PACKAGE, $VERSION) +#AM_INIT_AUTOMAKE([-Wall -Werror foreign]) + +AC_PROG_CC +AC_PROG_LIBTOOL + +AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes , no) + +dnl Give error and exit if we don't have pkgconfig +if test "x$HAVE_PKGCONFIG" = "xno"; then + AC_MSG_ERROR(you need to have pkgconfig installed !) +fi + +GLIB_REQ=2.16 +dnl Check for glib2 without extra fat, useful for the unversioned tool frontends +dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +if test "x$HAVE_GLIB" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) +if test "x$HAVE_GOBJECT" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +AC_CONFIG_HEADERS([config.h]) +AC_CONFIG_FILES([ + mixcommon.pc + Makefile + src/Makefile +]) +AC_OUTPUT diff --git a/mix_common/m4/as-mix-version.m4 b/mix_common/m4/as-mix-version.m4 new file mode 100644 index 0000000..8b09d7c --- /dev/null +++ b/mix_common/m4/as-mix-version.m4 @@ -0,0 +1,35 @@ +dnl as-mix-version.m4 + +dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) + +dnl example +dnl AS_MIX_VERSION(mixaudio,MIXAUDIO, 0, 3, 2,) +dnl for a 0.3.2 release version + +dnl this macro +dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE +dnl - defines [$PREFIX], VERSION +dnl - AC_SUBST's all defined vars + +AC_DEFUN([AS_MIX_VERSION], +[ + PACKAGE=[$1] + [$2]_MAJOR=[$3] + [$2]_MINOR=[$4] + [$2]_REVISION=[$5] + [$2]_CURRENT=m4_eval([$3] + [$4]) + [$2]_AGE=[$4] + VERSION=[$3].[$4].[$5] + + AC_SUBST([$2]_MAJOR) + AC_SUBST([$2]_MINOR) + AC_SUBST([$2]_REVISION) + AC_SUBST([$2]_CURRENT) + AC_SUBST([$2]_AGE) + + AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name]) + AC_SUBST(PACKAGE) + AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version]) + AC_SUBST(VERSION) + +]) diff --git a/mix_common/mixcommon.pc.in b/mix_common/mixcommon.pc.in new file mode 100644 index 0000000..05ef285 --- /dev/null +++ b/mix_common/mixcommon.pc.in @@ -0,0 +1,11 @@ +prefix=@prefix@ +exec_prefix=@exec_prefix@ +libdir=@libdir@ +includedir=@includedir@ + +Name: MI-X Library - Common +Description: Common library for MI-X +Requires: +Version: @VERSION@ +Libs: -L${libdir} -l@PACKAGE@ +Cflags: -I${includedir} diff --git a/mix_common/mixcommon.spec b/mix_common/mixcommon.spec new file mode 100644 index 0000000..46f900a --- /dev/null +++ b/mix_common/mixcommon.spec @@ -0,0 +1,43 @@ +Summary: MIX Common +Name: mixcommon +Version: 0.1.7 +Release: 1 +Source0: %{name}-%{version}.tar.gz +NoSource: 0 +License: Proprietary +Group: System Environment/Libraries +BuildRoot: %{_tmppath}/%{name}-root +ExclusiveArch: i586 + +%description +MIX Common contains common classes, datatype, header files used by other MIX components + +%package devel +Summary: Libraries include files +Group: Development/Libraries +Requires: %{name} = %{version} + +%description devel +The %{name}-devel package contains the header files and static libraries for building applications which use %{name}. + +%prep +%setup -q +%build +./autogen.sh +./configure --prefix=%{_prefix} +make +%install +rm -rf $RPM_BUILD_ROOT +make DESTDIR=$RPM_BUILD_ROOT install +%clean +rm -rf $RPM_BUILD_ROOT +%files +%defattr(-,root,root) +%{_prefix}/lib/*.so* + +%files devel +%defattr(-,root,root) +%{_prefix}/include +%{_prefix}/lib/*.la +%{_prefix}/lib/pkgconfig/mixcommon.pc +%doc COPYING diff --git a/mix_common/src/Makefile.am b/mix_common/src/Makefile.am new file mode 100644 index 0000000..199c509 --- /dev/null +++ b/mix_common/src/Makefile.am @@ -0,0 +1,23 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + +lib_LTLIBRARIES = libmixcommon.la + +############################################################################## +# sources used to compile +libmixcommon_la_SOURCES = mixparams.c mixlog.c mixdrmparams.c + +# flags used to compile this plugin +# add other _CFLAGS and _LIBS as needed +libmixcommon_la_CFLAGS = $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) +libmixcommon_la_LIBADD = $(GLIB_LIBS) $(GOBJECT_LIBS) +libmixcommon_la_LDFLAGS = $(GLIB_LIBS) $(GOBJECT_LIBS) -version-info @MIXCOMMON_CURRENT@:@MIXCOMMON_REVISION@:@MIXCOMMON_AGE@ +libmixcommon_la_LIBTOOLFLAGS = --tag=disable-static + +include_HEADERS = mixparams.h mixresult.h mixlog.h mixdrmparams.h +#mixcommonincludedir = $(includedir) +#mixcommoninclude_HEADERS = mixparams.h mixresult.h diff --git a/mix_common/src/mixdrmparams.c b/mix_common/src/mixdrmparams.c new file mode 100644 index 0000000..82e3f39 --- /dev/null +++ b/mix_common/src/mixdrmparams.c @@ -0,0 +1,163 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixdrmparams + * @short_description: Drm parameters + * + * A data object which stores drm specific parameters. + */ + +#include "mixdrmparams.h" + +static GType _mix_drmparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_drmparams_type = g_define_type_id; } + +gboolean mix_drmparams_copy(MixParams* target, const MixParams *src); +MixParams* mix_drmparams_dup(const MixParams *obj); +gboolean mix_drmparams_equal(MixParams* first, MixParams *second); +static void mix_drmparams_finalize(MixParams *obj); + +G_DEFINE_TYPE_WITH_CODE(MixDrmParams, mix_drmparams, MIX_TYPE_PARAMS, _do_init); + +void +_mix_drmparams_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_drmparams_get_type ()); +} + +static void mix_drmparams_init (MixDrmParams *self) +{ +} + +static void mix_drmparams_class_init(MixDrmParamsClass *klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_drmparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction)mix_drmparams_copy; + mixparams_class->dup = (MixParamsDupFunction)mix_drmparams_dup; + mixparams_class->equal = (MixParamsEqualFunction)mix_drmparams_equal; +} + +MixDrmParams *mix_drmparams_new(void) +{ + MixDrmParams *ret = (MixDrmParams *)g_type_create_instance (MIX_TYPE_DRMPARAMS); + + return ret; +} + +void mix_drmparams_finalize(MixParams *obj) +{ + /* clean up here. */ + + /* Chain up parent */ + if (parent_class->finalize) + parent_class->finalize(obj); +} + +MixDrmParams *mix_drmparams_ref(MixDrmParams *mix) +{ + return (MixDrmParams*)mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_drmparams_dup: + * @obj: a #MixDrmParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams* mix_drmparams_dup(const MixParams *obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_DRMPARAMS(obj)) + { + MixDrmParams *duplicate = mix_drmparams_new(); + if (mix_drmparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) + { + ret = MIX_PARAMS(duplicate); + } + else + { + mix_drmparams_unref(duplicate); + } + } + + return ret;; +} + +/** + * mix_drmparams_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_drmparams_copy(MixParams* target, const MixParams *src) +{ + if (MIX_IS_DRMPARAMS(target) && MIX_IS_DRMPARAMS(src)) + { + // TODO perform copy. + // + // Now chainup base class + // Get the root class from the cached parent_class object. This cached parent_class object has not be overwritten by this current class. + // Using the cached parent_class object because this_class would have ->copy pointing to this method! + // Cached parent_class contains the class object before it is overwritten by this derive class. + // MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (parent_class->copy) + { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); + } + else + return TRUE; + } + return FALSE; +} + +/** + * mix_drmparams_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_drmparams_equal(MixParams* first, MixParams *second) +{ + gboolean ret = TRUE; + + if (MIX_IS_DRMPARAMS(first) && MIX_IS_DRMPARAMS(second)) + { + // TODO: do deep compare + + if (ret) + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} + + diff --git a/mix_common/src/mixdrmparams.h b/mix_common/src/mixdrmparams.h new file mode 100644 index 0000000..7ef82fb --- /dev/null +++ b/mix_common/src/mixdrmparams.h @@ -0,0 +1,123 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_DRMPARAMS_H__ +#define __MIX_DRMPARAMS_H__ + + +#include "mixparams.h" + +/** + * MIX_TYPE_DRMPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_DRMPARAMS (mix_drmparams_get_type ()) + +/** + * MIX_DRMPARAMS: + * @obj: object to be type-casted. + */ +#define MIX_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DRMPARAMS, MixDrmParams)) + +/** + * MIX_IS_DRMPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DRMPARAMS)) + +/** + * MIX_DRMPARAMS_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) + +/** + * MIX_IS_DRMPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DRMPARAMS)) + +/** + * MIX_DRMPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_DRMPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) + +typedef struct _MixDrmParams MixDrmParams; +typedef struct _MixDrmParamsClass MixDrmParamsClass; + +/** + * MixDrmParams: + * + * MI-X Drm Parameter object + */ +struct _MixDrmParams +{ + /*< public >*/ + MixParams parent; + + /*< public >*/ +}; + +/** + * MixDrmParamsClass: + * + * MI-X Drm object class + */ +struct _MixDrmParamsClass +{ + /*< public >*/ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_drmparams_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_drmparams_get_type (void); + +/** + * mix_drmparams_new: + * @returns: A newly allocated instance of #MixDrmParams + * + * Use this method to create new instance of #MixDrmParams + */ +MixDrmParams *mix_drmparams_new(void); + +/** + * mix_drmparams_ref: + * @mix: object to add reference + * @returns: the MixDrmParams instance where reference count has been increased. + * + * Add reference count. + */ +MixDrmParams *mix_drmparams_ref(MixDrmParams *mix); + +/** + * mix_drmparams_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + + +#endif /* __MIX_DRMPARAMS_H__ */ diff --git a/mix_common/src/mixlog.c b/mix_common/src/mixlog.c new file mode 100644 index 0000000..a9dd359 --- /dev/null +++ b/mix_common/src/mixlog.c @@ -0,0 +1,257 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include +#include +#include "mixlog.h" + +#define MIX_DELOG_COMPS "MIX_DELOG_COMPS" +#define MIX_DELOG_FILES "MIX_DELOG_FILES" +#define MIX_DELOG_FUNCS "MIX_DELOG_FUNCS" +#define MIX_LOG_ENABLE "MIX_LOG_ENABLE" +#define MIX_DELOG_DELIMITERS " ,;" + +#define MIX_LOG_LEVEL "MIX_LOG_LEVEL" + +static GStaticMutex g_mutex = G_STATIC_MUTEX_INIT; + +#ifdef MIX_LOG_USE_HT +static GHashTable *g_defile_ht = NULL, *g_defunc_ht = NULL, *g_decom_ht = NULL; +static gint g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; +static gint g_refcount = 0; + +#define mix_log_destroy_ht(ht) if(ht) { g_hash_table_destroy(ht); ht = NULL; } + +void mix_log_get_ht(GHashTable **ht, const gchar *var) { + + const char *delog_list = NULL; + char *item = NULL; + if (!ht || !var) { + return; + } + + delog_list = g_getenv(var); + if (!delog_list) { + return; + } + + if (*ht == NULL) { + *ht = g_hash_table_new(g_str_hash, g_str_equal); + if (*ht == NULL) { + return; + } + } + + item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); + while (item != NULL) { + g_hash_table_insert(*ht, item, "true"); + item = strtok(NULL, MIX_DELOG_DELIMITERS); + } +} + +void mix_log_initialize_func() { + + const gchar *mix_log_level = NULL; + g_static_mutex_lock(&g_mutex); + + if (g_refcount == 0) { + + mix_log_level = g_getenv(MIX_LOG_LEVEL); + if (mix_log_level) { + g_mix_log_level = atoi(mix_log_level); + } + + mix_log_get_ht(&g_decom_ht, MIX_DELOG_COMPS); + mix_log_get_ht(&g_defile_ht, MIX_DELOG_FILES); + mix_log_get_ht(&g_defunc_ht, MIX_DELOG_FUNCS); + } + + g_refcount++; + + g_static_mutex_unlock(&g_mutex); +} + +void mix_log_finalize_func() { + + g_static_mutex_lock(&g_mutex); + + g_refcount--; + + if (g_refcount == 0) { + mix_log_destroy_ht(g_decom_ht); + mix_log_destroy_ht(g_defile_ht); + mix_log_destroy_ht(g_defunc_ht); + + g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; + } + + if (g_refcount < 0) { + g_refcount = 0; + } + + g_static_mutex_unlock(&g_mutex); +} + +void mix_log_func(const gchar* comp, gint level, const gchar *file, + const gchar *func, gint line, const gchar *format, ...) { + + va_list args; + static gchar* loglevel[4] = {"**ERROR", "*WARNING", "INFO", "VERBOSE"}; + + if (!format) { + return; + } + + g_static_mutex_lock(&g_mutex); + + if (level > g_mix_log_level) { + goto exit; + } + + if (g_decom_ht) { + if (g_hash_table_lookup(g_decom_ht, comp)) { + goto exit; + } + } + + if (g_defile_ht) { + if (g_hash_table_lookup(g_defile_ht, file)) { + goto exit; + } + } + + if (g_defunc_ht) { + if (g_hash_table_lookup(g_defunc_ht, func)) { + goto exit; + } + } + + if(level > MIX_LOG_LEVEL_VERBOSE) { + level = MIX_LOG_LEVEL_VERBOSE; + } + if(level < MIX_LOG_LEVEL_ERROR) { + level = MIX_LOG_LEVEL_ERROR; + } + + g_print("%s : %s : %s : ", loglevel[level - 1], file, func); + + va_start(args, format); + g_vprintf(format, args); + va_end(args); + + exit: g_static_mutex_unlock(&g_mutex); +} + +#else /* MIX_LOG_USE_HT */ + +gboolean mix_shall_delog(const gchar *name, const gchar *var) { + + const char *delog_list = NULL; + char *item = NULL; + gboolean delog = FALSE; + + if (!name || !var) { + return delog; + } + + delog_list = g_getenv(var); + if (!delog_list) { + return delog; + } + + item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); + while (item != NULL) { + if (strcmp(item, name) == 0) { + delog = TRUE; + break; + } + item = strtok(NULL, MIX_DELOG_DELIMITERS); + } + + return delog; +} + +gboolean mix_log_enabled() { + + const char *value = NULL; + value = g_getenv(MIX_LOG_ENABLE); + if(!value) { + return FALSE; + } + + if(value[0] == '0') { + return FALSE; + } + return TRUE; +} + +void mix_log_func(const gchar* comp, gint level, const gchar *file, + const gchar *func, gint line, const gchar *format, ...) { + + va_list args; + static gchar* loglevel[4] = { "**ERROR", "*WARNING", "INFO", "VERBOSE" }; + + const gchar *env_mix_log_level = NULL; + gint mix_log_level_threhold = MIX_LOG_LEVEL_VERBOSE; + + if(!mix_log_enabled()) { + return; + } + + if (!format) { + return; + } + + g_static_mutex_lock(&g_mutex); + + /* log level */ + env_mix_log_level = g_getenv(MIX_LOG_LEVEL); + if (env_mix_log_level) { + mix_log_level_threhold = atoi(env_mix_log_level); + } + + if (level > mix_log_level_threhold) { + goto exit; + } + + /* component */ + if (mix_shall_delog(comp, MIX_DELOG_COMPS)) { + goto exit; + } + + /* files */ + if (mix_shall_delog(file, MIX_DELOG_FILES)) { + goto exit; + } + + /* functions */ + if (mix_shall_delog(func, MIX_DELOG_FUNCS)) { + goto exit; + } + + if (level > MIX_LOG_LEVEL_VERBOSE) { + level = MIX_LOG_LEVEL_VERBOSE; + } + if (level < MIX_LOG_LEVEL_ERROR) { + level = MIX_LOG_LEVEL_ERROR; + } + + g_print("%s : %s : %s : ", loglevel[level - 1], file, func); + + va_start(args, format); + g_vprintf(format, args); + va_end(args); + +exit: + g_static_mutex_unlock(&g_mutex); +} + + +#endif /* MIX_LOG_USE_HT */ + + diff --git a/mix_common/src/mixlog.h b/mix_common/src/mixlog.h new file mode 100644 index 0000000..2fe60fd --- /dev/null +++ b/mix_common/src/mixlog.h @@ -0,0 +1,47 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include + +#ifndef __MIX_LOG_H__ +#define __MIX_LOG_H__ + +/* Warning: don't call these functions */ +void mix_log_func(const gchar* comp, gint level, const gchar *file, + const gchar *func, gint line, const gchar *format, ...); + +/* Components */ +#define MIX_VIDEO_COMP "mixvideo" +#define GST_MIX_VIDEO_DEC_COMP "gstmixvideodec" +#define GST_MIX_VIDEO_SINK_COMP "gstmixvideosink" +#define GST_MIX_VIDEO_ENC_COMP "gstmixvideoenc" + +#define MIX_AUDIO_COMP "mixaudio" +#define GST_MIX_AUDIO_DEC_COMP "gstmixaudiodec" +#define GST_MIX_AUDIO_SINK_COMP "gstmixaudiosink" + +/* log level */ +#define MIX_LOG_LEVEL_ERROR 1 +#define MIX_LOG_LEVEL_WARNING 2 +#define MIX_LOG_LEVEL_INFO 3 +#define MIX_LOG_LEVEL_VERBOSE 4 + + +/* MACROS for mixlog */ +#ifdef MIX_LOG_ENABLE + +#define mix_log(comp, level, format, ...) \ + mix_log_func(comp, level, __FILE__, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) + +#else + +#define mix_log(comp, level, format, ...) + +#endif + +#endif diff --git a/mix_common/src/mixparams.c b/mix_common/src/mixparams.c new file mode 100644 index 0000000..2f8f8f6 --- /dev/null +++ b/mix_common/src/mixparams.c @@ -0,0 +1,274 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixparams + * @short_description: Lightweight base class for the MIX media params + * + */ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "mixparams.h" +#include + + +#define DEBUG_REFCOUNT + +static void mix_params_class_init (gpointer g_class, gpointer class_data); +static void mix_params_init (GTypeInstance * instance, gpointer klass); + +static void mix_params_finalize(MixParams * obj); +static gboolean mix_params_copy_default (MixParams *target, const MixParams *src); +static MixParams *mix_params_dup_default(const MixParams *obj); +static gboolean mix_params_equal_default (MixParams *first, MixParams *second); + +GType mix_params_get_type (void) +{ + static GType _mix_params_type = 0; + + if (G_UNLIKELY (_mix_params_type == 0)) { + + GTypeInfo info = { + sizeof (MixParamsClass), + NULL, + NULL, + mix_params_class_init, + NULL, + NULL, + sizeof (MixParams), + 0, + (GInstanceInitFunc) mix_params_init, + NULL + }; + + static const GTypeFundamentalInfo fundamental_info = { + (G_TYPE_FLAG_CLASSED | G_TYPE_FLAG_INSTANTIATABLE | + G_TYPE_FLAG_DERIVABLE | G_TYPE_FLAG_DEEP_DERIVABLE) + }; + + info.value_table = NULL; + + _mix_params_type = g_type_fundamental_next (); + g_type_register_fundamental (_mix_params_type, "MixParams", &info, &fundamental_info, G_TYPE_FLAG_ABSTRACT); + + } + + return _mix_params_type; +} + +static void mix_params_class_init (gpointer g_class, gpointer class_data) +{ + MixParamsClass *klass = MIX_PARAMS_CLASS (g_class); + + klass->dup = mix_params_dup_default; + klass->copy = mix_params_copy_default; + klass->finalize = mix_params_finalize; + klass->equal = mix_params_equal_default; +} + +static void mix_params_init (GTypeInstance * instance, gpointer klass) +{ + MixParams *obj = MIX_PARAMS_CAST (instance); + + obj->refcount = 1; +} + +gboolean mix_params_copy (MixParams *target, const MixParams *src) +{ + /* Use the target object class. Because it knows what it is looking for. */ + MixParamsClass *klass = MIX_PARAMS_GET_CLASS(target); + if (klass->copy) + { + return klass->copy(target, src); + } + else + { + return mix_params_copy_default(target, src); + } +} + +/** + * mix_params_copy_default: + * @target: target + * @src: source + * + * The default copy method of this object. Perhap copy at this level. + * Assign this to the copy vmethod. + */ +static gboolean mix_params_copy_default (MixParams *target, const MixParams *src) +{ + if (MIX_IS_PARAMS(target) && MIX_IS_PARAMS(src)) + { + // TODO perform deep copy. + return TRUE; + } + return FALSE; +} + +static void mix_params_finalize (MixParams * obj) +{ + /* do nothing */ +} + +MixParams *mix_params_dup(const MixParams *obj) +{ + MixParamsClass *klass = MIX_PARAMS_GET_CLASS(obj); + + if (klass->dup) + { + return klass->dup(obj); + } + else if (MIX_IS_PARAMS(obj)) + { + return mix_params_dup_default(obj); + } + return NULL; +} + +static MixParams *mix_params_dup_default(const MixParams *obj) +{ + MixParams *ret = mix_params_new(); + if (mix_params_copy(ret, obj)) + { + return ret; + } + + return NULL; +} + +MixParams* mix_params_new (GType type) +{ + MixParams *obj; + + /* we don't support dynamic types because they really aren't useful, + * and could cause refcount problems */ + obj = (MixParams *) g_type_create_instance (type); + + return obj; +} + +MixParams* mix_params_ref (MixParams *obj) +{ + g_return_val_if_fail(MIX_IS_PARAMS (obj), NULL); + + g_atomic_int_inc(&obj->refcount); + + return obj; +} + +static void mix_params_free(MixParams *obj) +{ + MixParamsClass *klass = NULL; + + klass = MIX_PARAMS_GET_CLASS(obj); + klass->finalize(obj); + + /* Should we support recycling the object? */ + /* If so, refcount handling is slightly different. */ + /* i.e. If the refcount is still 0 we can really free the object, else the finalize method recycled the object -- but to where? */ + + if (g_atomic_int_get (&obj->refcount) == 0) { + + g_type_free_instance ((GTypeInstance *) obj); + } +} + +void mix_params_unref (MixParams *obj) +{ + g_return_if_fail (obj != NULL); + g_return_if_fail (obj->refcount > 0); + + if (G_UNLIKELY (g_atomic_int_dec_and_test (&obj->refcount))) { + mix_params_free (obj); + } +} + +/** + * mix_params_replace: + * @olddata: pointer to a pointer to a object to be replaced + * @newdata: pointer to new object + * + * Modifies a pointer to point to a new object. The modification + * is done atomically, and the reference counts are updated correctly. + * Either @newdata and the value pointed to by @olddata may be NULL. + */ +void mix_params_replace (MixParams **olddata, MixParams *newdata) +{ + MixParams *olddata_val; + + g_return_if_fail (olddata != NULL); + + olddata_val = g_atomic_pointer_get ((gpointer *) olddata); + + if (olddata_val == newdata) + return; + + if (newdata) + mix_params_ref (newdata); + + while (!g_atomic_pointer_compare_and_exchange ((gpointer *) olddata, olddata_val, newdata)) + { + olddata_val = g_atomic_pointer_get ((gpointer *) olddata); + } + + if (olddata_val) + mix_params_unref (olddata_val); + +} + +gboolean mix_params_equal (MixParams *first, MixParams *second) +{ + if (MIX_IS_PARAMS(first)) + { + MixParamsClass *klass = MIX_PARAMS_GET_CLASS(first); + + if (klass->equal) + { + return klass->equal(first, second); + } + else + { + return mix_params_equal_default(first, second); + } + } + else + return FALSE; +} + +static gboolean mix_params_equal_default (MixParams *first, MixParams *second) +{ + if (MIX_IS_PARAMS(first) && MIX_IS_PARAMS(second)) + { + gboolean ret = TRUE; + + // Do data comparison here. + + return ret; + } + else + return FALSE; +} + +/** + * mix_value_dup_params: + * @value: a valid #GValue of %MIX_TYPE_PARAMS derived type + * @returns: object contents of @value + * + * Get the contents of a #MIX_TYPE_PARAMS derived #GValue, + * increasing its reference count. + */ +MixParams* mix_value_dup_params (const GValue * value) +{ + g_return_val_if_fail (MIX_VALUE_HOLDS_PARAMS (value), NULL); + + return mix_params_ref (value->data[0].v_pointer); +} + + diff --git a/mix_common/src/mixparams.h b/mix_common/src/mixparams.h new file mode 100644 index 0000000..75d4051 --- /dev/null +++ b/mix_common/src/mixparams.h @@ -0,0 +1,202 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_PARAMS_H__ +#define __MIX_PARAMS_H__ + +#include + +G_BEGIN_DECLS + +#define MIX_TYPE_PARAMS (mix_params_get_type()) +#define MIX_IS_PARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_PARAMS)) +#define MIX_IS_PARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_PARAMS)) +#define MIX_PARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_PARAMS, MixParamsClass)) +#define MIX_PARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_PARAMS, MixParams)) +#define MIX_PARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_PARAMS, MixParamsClass)) +#define MIX_PARAMS_CAST(obj) ((MixParams*)(obj)) + +typedef struct _MixParams MixParams; +typedef struct _MixParamsClass MixParamsClass; + +/** + * MixParamsDupFunction: + * @obj: Params to duplicate + * @returns: reference to cloned instance. + * + * Virtual function prototype for methods to create duplicate of instance. + * + */ +typedef MixParams * (*MixParamsDupFunction) (const MixParams *obj); + +/** + * MixParamsCopyFunction: + * @target: target of the copy + * @src: source of the copy + * @returns: boolean indicates if copy is successful. + * + * Virtual function prototype for methods to create copies of instance. + * + */ +typedef gboolean (*MixParamsCopyFunction) (MixParams* target, const MixParams *src); + +/** + * MixParamsFinalizeFunction: + * @obj: Params to finalize + * + * Virtual function prototype for methods to free ressources used by + * object. + */ +typedef void (*MixParamsFinalizeFunction) (MixParams *obj); + +/** + * MixParamsEqualsFunction: + * @first: first object in the comparison + * @second: second object in the comparison + * + * Virtual function prototype for methods to compare 2 objects and check if they are equal. + */ +typedef gboolean (*MixParamsEqualFunction) (MixParams *first, MixParams *second); + +/** + * MIX_VALUE_HOLDS_PARAMS: + * @value: the #GValue to check + * + * Checks if the given #GValue contains a #MIX_TYPE_PARAM value. + */ +#define MIX_VALUE_HOLDS_PARAMS(value) (G_VALUE_HOLDS(value, MIX_TYPE_PARAMS)) + +/** + * MIX_PARAMS_REFCOUNT: + * @obj: a #MixParams + * + * Get access to the reference count field of the object. + */ +#define MIX_PARAMS_REFCOUNT(obj) ((MIX_PARAMS_CAST(obj))->refcount) +/** + * MIX_PARAMS_REFCOUNT_VALUE: + * @obj: a #MixParams + * + * Get the reference count value of the object + */ +#define MIX_PARAMS_REFCOUNT_VALUE(obj) (g_atomic_int_get (&(MIX_PARAMS_CAST(obj))->refcount)) + +/** + * MixParams: + * @instance: type instance + * @refcount: atomic refcount + * + * Base class for a refcounted parameter objects. + */ +struct _MixParams { + GTypeInstance instance; + /*< public >*/ + gint refcount; + + /*< private >*/ + gpointer _reserved; +}; + +/** + * MixParamsClass: + * @dup: method to duplicate the object. + * @copy: method to copy details in one object to the other. + * @finalize: destructor + * @equal: method to check if the content of two objects are equal. + * + * #MixParams class strcut. + */ +struct _MixParamsClass { + GTypeClass type_class; + + MixParamsDupFunction dup; + MixParamsCopyFunction copy; + MixParamsFinalizeFunction finalize; + MixParamsEqualFunction equal; + + /*< private >*/ + gpointer _mix_reserved; +}; + +/** + * mix_params_get_type: + * @returns: type of this object. + * + * Get type. + */ +GType mix_params_get_type(void); + +/** + * mix_params_new: + * @returns: return a newly allocated object. + * + * Create new instance of the object. + */ +MixParams* mix_params_new(); + +/** + * mix_params_copy: + * @target: copy to target + * @src: copy from source + * @returns: boolean indicating if copy is successful. + * + * Copy data from one instance to the other. This method internally invoked the #MixParams::copy method such that derived object will be copied correctly. + */ +gboolean mix_params_copy(MixParams *target, const MixParams *src); + + +/** + * mix_params_ref: + * @obj: a #MixParams object. + * @returns: the object with reference count incremented. + * + * Increment reference count. + */ +MixParams* mix_params_ref(MixParams *obj); + + +/** + * mix_params_unref: + * @obj: a #MixParams object. + * + * Decrement reference count. + */ +void mix_params_unref (MixParams *obj); + +/** + * mix_params_replace: + * @olddata: + * @newdata: + * + * Replace a pointer of the object with the new one. + */ +void mix_params_replace(MixParams **olddata, MixParams *newdata); + +/** + * mix_params_dup: + * @obj: #MixParams object to duplicate. + * @returns: A newly allocated duplicate of the object, or NULL if failed. + * + * Duplicate the given #MixParams and allocate a new instance. This method is chained up properly and derive object will be dupped properly. + */ +MixParams *mix_params_dup(const MixParams *obj); + +/** + * mix_params_equal: + * @first: first object to compare + * @second: second object to compare + * @returns: boolean indicates if the 2 object contains same data. + * + * Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance. + */ +gboolean mix_params_equal(MixParams *first, MixParams *second); + +G_END_DECLS + +#endif + diff --git a/mix_common/src/mixresult.h b/mix_common/src/mixresult.h new file mode 100644 index 0000000..9472a7e --- /dev/null +++ b/mix_common/src/mixresult.h @@ -0,0 +1,90 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2008-2009 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel’s prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ + +#ifndef MIX_RESULT_H +#define MIX_RESULT_H + +#include + +typedef gint32 MIX_RESULT; + +#define MIX_SUCCEEDED(result_code) ((((MIX_RESULT)(result_code)) & 0x80000000) == 0) + +typedef enum { + /** General success */ + MIX_RESULT_SUCCESS = (MIX_RESULT) 0x00000000, + MIX_RESULT_SUCCESS_CHG = (MIX_RESULT)0x00000001, + + /** Module specific success starting number */ + + /** Starting success number for Audio */ + MIX_RESULT_SUCCESS_AUDIO_START = (MIX_RESULT) 0x00010000, + /** Starting success number for Video */ + MIX_RESULT_SUCCESS_VIDEO_START = (MIX_RESULT) 0x00020000, + /** Starting success number for DRM */ + MIX_RESULT_SUCCESS_DRM_START = (MIX_RESULT) 0x00030000 +} MIX_SUCCESS_COMMON; + +typedef enum { + /** General failure */ + MIX_RESULT_FAIL = (MIX_RESULT) 0x80000000, + MIX_RESULT_NULL_PTR = (MIX_RESULT) 0x80000001, + MIX_RESULT_LPE_NOTAVAIL = (MIX_RESULT) 0X80000002, + MIX_RESULT_DIRECT_NOTAVAIL = (MIX_RESULT) 0x80000003, + MIX_RESULT_NOT_SUPPORTED = (MIX_RESULT) 0x80000004, + MIX_RESULT_CONF_MISMATCH = (MIX_RESULT) 0x80000005, + MIX_RESULT_RESUME_NEEDED = (MIX_RESULT) 0x80000007, + MIX_RESULT_WRONGMODE = (MIX_RESULT) 0x80000008, + MIX_RESULT_RESOURCES_NOTAVAIL = (MIX_RESULT)0x80000009, + MIX_RESULT_INVALID_PARAM = (MIX_RESULT)0x8000000a, + MIX_RESULT_ALREADY_INIT = (MIX_RESULT)0x8000000b, + MIX_RESULT_WRONG_STATE = (MIX_RESULT)0x8000000c, + MIX_RESULT_NOT_INIT = (MIX_RESULT)0x8000000d, + MIX_RESULT_NOT_CONFIGURED = (MIX_RESULT)0x8000000e, + MIX_RESULT_STREAM_NOTAVAIL = (MIX_RESULT)0x8000000f, + MIX_RESULT_CODEC_NOTAVAIL = (MIX_RESULT)0x80000010, + MIX_RESULT_CODEC_NOTSUPPORTED = (MIX_RESULT)0x80000011, + MIX_RESULT_INVALID_COUNT = (MIX_RESULT)0x80000012, + MIX_RESULT_NOT_ACP = (MIX_RESULT)0x80000013, + MIX_RESULT_INVALID_DECODE_MODE = (MIX_RESULT)0x80000014, + MIX_RESULT_INVALID_STREAM_NAME = (MIX_RESULT)0x80000015, + MIX_RESULT_NO_MEMORY = (MIX_RESULT)0x80000016, + MIX_RESULT_NEED_RETRY = (MIX_RESULT)0x80000017, + MIX_RESULT_SYSTEM_ERRNO = (MIX_RESULT)0x80000018, + + /** Module specific errors starting number */ + + /** Starting error number for Audio */ + MIX_RESULT_ERROR_AUDIO_START = (MIX_RESULT) 0x80010000, + /** Starting error number for Video */ + MIX_RESULT_ERROR_VIDEO_START = (MIX_RESULT) 0x80020000, + /** Starting error number for DRM */ + MIX_RESULT_ERROR_DRM_START = (MIX_RESULT) 0x80030000 +} MIX_ERROR_COMMON; + + /* New success code should be added just above this line */ +// MIX_RESULT_IAM_DISABLED, /* 0x80000008 */ +// MIX_RESULT_IAM_NOTAVAIL, /* 0x80000009 */ +// MIX_RESULT_IAM_REG_FAILED, /* 0x8000000f */ + + + +#endif // MIX_RESULT_H diff --git a/mix_vbp/AUTHORS b/mix_vbp/AUTHORS new file mode 100644 index 0000000..db8081b --- /dev/null +++ b/mix_vbp/AUTHORS @@ -0,0 +1 @@ +linda.s.cline@intel.com diff --git a/mix_vbp/COPYING b/mix_vbp/COPYING new file mode 100644 index 0000000..a4f852c --- /dev/null +++ b/mix_vbp/COPYING @@ -0,0 +1,26 @@ +INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License) + +IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING. +Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software. + + +LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions: +1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software. +2. You may not reverse engineer, decompile, or disassemble the Software. +3. You may not sublicense the Software. +4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions. +5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL). +OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights. +EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software. +LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS. +TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate. +APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations. +GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052. +CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos. +ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion. +ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel. +NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties. +SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions. +WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself. +CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions. + diff --git a/mix_vbp/ChangeLog b/mix_vbp/ChangeLog new file mode 100644 index 0000000..139597f --- /dev/null +++ b/mix_vbp/ChangeLog @@ -0,0 +1,2 @@ + + diff --git a/mix_vbp/INSTALL b/mix_vbp/INSTALL new file mode 100644 index 0000000..50e1648 --- /dev/null +++ b/mix_vbp/INSTALL @@ -0,0 +1,4 @@ +run the following to build and install: +./autogen.sh +./configure +make diff --git a/mix_vbp/Makefile.am b/mix_vbp/Makefile.am new file mode 100644 index 0000000..a8b59cd --- /dev/null +++ b/mix_vbp/Makefile.am @@ -0,0 +1,9 @@ +SUBDIRS = viddec_fw/fw/parser + +#Uncomment the following line if building documentation using gtkdoc +#SUBDIRS += docs + +pkgconfigdir = $(libdir)/pkgconfig +pkgconfig_DATA=mixvbp.pc +EXTRA_DIST = autogen.sh mixvbp.spec +DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc diff --git a/mix_vbp/Merge_readme.txt b/mix_vbp/Merge_readme.txt new file mode 100644 index 0000000..90936bb --- /dev/null +++ b/mix_vbp/Merge_readme.txt @@ -0,0 +1,2 @@ +DHG revision #218237 + diff --git a/mix_vbp/NEWS b/mix_vbp/NEWS new file mode 100644 index 0000000..139597f --- /dev/null +++ b/mix_vbp/NEWS @@ -0,0 +1,2 @@ + + diff --git a/mix_vbp/README b/mix_vbp/README new file mode 100644 index 0000000..2bcf017 --- /dev/null +++ b/mix_vbp/README @@ -0,0 +1,2 @@ +MIX Video is an user library interface for various hardware video codecs available on the platform. + diff --git a/mix_vbp/autogen.sh b/mix_vbp/autogen.sh new file mode 100644 index 0000000..ed2c536 --- /dev/null +++ b/mix_vbp/autogen.sh @@ -0,0 +1,19 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + +package=MIXVBP + +#Uncomment the follow line if building documentation using gtkdoc +#gtkdocize --flavour no-tmpl || exit 1 +aclocal -I m4/ $ACLOCAL_FLAGS || exit 1 +libtoolize --copy --force || exit 1 +autoheader -v || exit 1 +autoconf -v || exit 1 +automake -a -c -v || exit 1 + +echo "Now type ./configure to configure $package." +exit 0 diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac new file mode 100644 index 0000000..93a9081 --- /dev/null +++ b/mix_vbp/configure.ac @@ -0,0 +1,77 @@ +AC_INIT("", "", [linda.s.cline@intel.com]) + +AC_CONFIG_MACRO_DIR(m4) + +AS_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 15) + +dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode +AM_MAINTAINER_MODE + +AM_INIT_AUTOMAKE($PACKAGE, $VERSION) + +dnl make aclocal work in maintainer mode +AC_SUBST(ACLOCAL_AMFLAGS, "-I m4") + +AM_CONFIG_HEADER(config.h) + +dnl check for tools +AC_PROG_CC +AC_PROG_LIBTOOL + +MIX_CFLAGS="-Wall -Werror" + +dnl decide on error flags +dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR") +dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR") + +dnl Check for pkgconfig first +AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no) + +dnl Give error and exit if we don't have pkgconfig +if test "x$HAVE_PKGCONFIG" = "xno"; then + AC_MSG_ERROR(you need to have pkgconfig installed !) +fi + +dnl GLib +dnl FIXME: need to align with moblin glib version +dnl FIXME: currently using an earlier version so it can be built on dev box. +GLIB_REQ=2.16 + +dnl Check for glib2 without extra fat, useful for the unversioned tool frontends +dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +if test "x$HAVE_GLIB" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) +if test "x$HAVE_GOBJECT" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no) +if test "x$HAVE_GTHREAD" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +dnl Check for documentation xrefs +dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`" +dnl AC_SUBST(GLIB_PREFIX) + +AC_SUBST(GLIB_CFLAGS) +AC_SUBST(GLIB_LIBS) +AC_SUBST(GOBJECT_CFLAGS) +AC_SUBST(GOBJECT_LIBS) +AC_SUBST(MIX_CFLAGS) +AC_SUBST(GTHREAD_CFLAGS) +AC_SUBST(GTHREAD_LIBS) + +AC_CONFIG_FILES([ +mixvbp.pc +Makefile +viddec_fw/fw/parser/Makefile +]) + +AC_OUTPUT + + diff --git a/mix_vbp/m4/Makefile.am b/mix_vbp/m4/Makefile.am new file mode 100644 index 0000000..66381d4 --- /dev/null +++ b/mix_vbp/m4/Makefile.am @@ -0,0 +1 @@ +EXTRA_DIST += diff --git a/mix_vbp/m4/as-mix-version.m4 b/mix_vbp/m4/as-mix-version.m4 new file mode 100644 index 0000000..f0301b1 --- /dev/null +++ b/mix_vbp/m4/as-mix-version.m4 @@ -0,0 +1,35 @@ +dnl as-mix-version.m4 + +dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) + +dnl example +dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,) +dnl for a 0.3.2 release version + +dnl this macro +dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE +dnl - defines [$PREFIX], VERSION +dnl - AC_SUBST's all defined vars + +AC_DEFUN([AS_MIX_VERSION], +[ + PACKAGE=[$1] + [$2]_MAJOR=[$3] + [$2]_MINOR=[$4] + [$2]_REVISION=[$5] + [$2]_CURRENT=m4_eval([$3] + [$4]) + [$2]_AGE=[$4] + VERSION=[$3].[$4].[$5] + + AC_SUBST([$2]_MAJOR) + AC_SUBST([$2]_MINOR) + AC_SUBST([$2]_REVISION) + AC_SUBST([$2]_CURRENT) + AC_SUBST([$2]_AGE) + + AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name]) + AC_SUBST(PACKAGE) + AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version]) + AC_SUBST(VERSION) + +]) diff --git a/mix_vbp/mixvbp.pc.in b/mix_vbp/mixvbp.pc.in new file mode 100644 index 0000000..a1ec3a0 --- /dev/null +++ b/mix_vbp/mixvbp.pc.in @@ -0,0 +1,11 @@ +prefix=@prefix@ +exec_prefix=@exec_prefix@ +libdir=@libdir@ +includedir=@includedir@ + +Name: MIX Parser +Description: MIX Video Parser Library +Version: @VERSION@ +Libs: -L${libdir} -l@PACKAGE@ +Cflags: -I${includedir}/mixvbp + diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec new file mode 100644 index 0000000..da15b9d --- /dev/null +++ b/mix_vbp/mixvbp.spec @@ -0,0 +1,52 @@ +# INTEL CONFIDENTIAL +# Copyright 2009 Intel Corporation All Rights Reserved. +# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +# +# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + +Summary: MIX Video Bitstream Parser +Name: mixvbp +Version: 0.1.15 +Release: 1 +Source0: %{name}-%{version}.tar.gz +NoSource: 0 +License: Proprietary +Group: System Environment/Libraries +BuildRoot: %{_tmppath}/%{name}-root +ExclusiveArch: i586 + +%description +MIX Video Bitstream Parser is an user library interface for various video format bitstream parsing + +%package devel +Summary: Libraries include files +Group: Development/Libraries +Requires: %{name} = %{version} + +%description devel +The %{name}-devel package contains the header files and static libraries for building applications which use %{name}. + +%prep +%setup -q +%build +./autogen.sh +./configure --prefix=%{_prefix} +make +%install +make DESTDIR=$RPM_BUILD_ROOT install +rm -f $RPM_BUILD_ROOT/%{_prefix}/lib/libmixvbp_mpeg2* + +%clean +rm -rf $RPM_BUILD_ROOT +%files +%defattr(-,root,root) +%{_prefix}/lib/libmixvbp.so* +%{_prefix}/lib/libmixvbp_vc1.so* +%{_prefix}/lib/libmixvbp_h264.so* +%{_prefix}/lib/libmixvbp_mpeg4.so* + +%files devel +%defattr(-,root,root) +%{_prefix}/include +%{_prefix}/lib/*.la +%{_prefix}/lib/pkgconfig/mixvbp.pc diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h new file mode 100644 index 0000000..51f0602 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -0,0 +1,1034 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: MPEG-4 header. +// +*/ + + +#ifndef _H264_H_ +#define _H264_H_ + +#ifdef HOST_ONLY +#include +#include +#include +#endif + +#include "stdint.h" +#include "viddec_debug.h" + +#include "viddec_fw_workload.h" +#include "h264parse_sei.h" + + +#ifdef WIN32 +#define mfd_printf OS_INFO +#endif + +#ifdef H264_VERBOSE +#define PRINTF(format, args...) OS_INFO("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) +#else +//#define PRINTF(args...) +#endif + +//#pragma warning(disable : 4710) // function not inlined +//#pragma warning(disable : 4514) // unreferenced inline function has been removed CL +//#pragma warning(disable : 4100) // unreferenced formal parameter CL + +#ifdef __cplusplus +extern "C" { +#endif + +#define MAX_INT32_VALUE 0x7fffffff + +#define MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE 256 +#define MAX_CPB_CNT 32 +#define MAX_NUM_SLICE_GRPS 1 //As per Annex A for high profile, the num_slice_groups_minus1 is 0 +#define MAX_PIC_LIST_NUM 8 + +//#define MAX_PIC_SIZE_IN_MAP_UNITS 1024 //0 ???????? Henry +#define MAX_NUM_REF_IDX_L0_ACTIVE 32 +//#define STARTCODE_BUF_SIZE 2048+1024 + +#define NUM_MMCO_OPERATIONS 17 + +// Used to check whether the SEI RP is the only way for recovery (cisco contents) +// This threshold will decide the interval of recovery even no error detected if no IDR during this time +#define SEI_REC_CHECK_TH 8 + +//SPS +#define MAX_NUM_SPS 32 +#define SCL_DEFAULT 1 + +//PPS +#define MAX_PIC_PARAMS 255 +#define MAX_NUM_REF_FRAMES 32 +#define MAX_QP 51 +#define MAX_NUM_PPS 256 + +#define PUT_FS_IDC_BITS(w) (w&0x1F) +#define PUT_LIST_INDEX_FIELD_BIT(w) ((w&0x1)<<5) +#define PUT_LIST_LONG_TERM_BITS(w) ((w&0x1)<<6) +#define PUT_LIST_PTR_LIST_ID_BIT(id) (id<<5) + + +// DPB +#define FRAME_FLAG_DANGLING_TOP_FIELD ( 0x1 << 3 ) +#define FRAME_FLAG_DANGLING_BOTTOM_FIELD ( 0x1 << 4 ) + +#define MPD_DPB_FS_NULL_IDC 31 // May need to be changed if we alter gaps_in_frame_num to use + +#define MFD_H264_MAX_FRAME_BUFFERS 17 +#define NUM_DPB_FRAME_STORES (MFD_H264_MAX_FRAME_BUFFERS + 1) // 1 extra for storign non-existent pictures. + +//Scalling Matrix Type +#define PPS_QM 0 +#define SPS_QM 1 +#define FB_QM 2 +#define DEFAULT_QM 3 + +//Frame Type +#define FRAME_TYPE_IDR 0x00 +#define FRAME_TYPE_I 0x01 +#define FRAME_TYPE_P 0x02 +#define FRAME_TYPE_B 0x03 +#define FRAME_TYPE_INVALID 0x04 + + +#define FRAME_TYPE_FRAME_OFFSET 3 +#define FRAME_TYPE_TOP_OFFSET 3 +#define FRAME_TYPE_BOTTOM_OFFSET 0 +#define FRAME_TYPE_STRUCTRUE_OFFSET 6 + +//// Error handling +#define FIELD_ERR_OFFSET 17 //offset for Field error flag ----refer to the structure definition viddec_fw_workload_error_codes in viddec_fw_common_defs.h + +////Bits Handling +#define h264_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) ) +#define h264_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start)))) + + +//// PIP +typedef enum _pip_setting_t +{ + PIP_SCALER_DISABLED, + PIP_SCALE_FACTOR_1_BY_4, + PIP_SCALE_FACTOR_1_BY_2, + PIP_SCALER_INVALID, + +} pip_setting_t; + + +#ifdef VERBOSE +#define DEBUGGETBITS(args...) OS_INFO( args ) +#else +//#define DEBUGGETBITS(args...) +#endif + +/* status codes */ +typedef enum _h264_Status +{ + H264_STATUS_EOF = 1, // end of file + H264_STATUS_OK = 0, // no error + H264_STATUS_NO_MEM = 2, // out of memory + H264_STATUS_FILE_ERROR = 3, // file error + H264_STATUS_NOTSUPPORT = 4, // not supported mode + H264_STATUS_PARSE_ERROR = 5, // fail in parse MPEG-4 stream + H264_STATUS_ERROR = 6, // unknown/unspecified error + H264_NAL_ERROR, + H264_SPS_INVALID_PROFILE, + H264_SPS_INVALID_LEVEL, + H264_SPS_INVALID_SEQ_PARAM_ID, + H264_SPS_ERROR, + H264_PPS_INVALID_PIC_ID, + H264_PPS_INVALID_SEQ_ID, + H264_PPS_ERROR, + H264_SliceHeader_INVALID_MB, + H264_SliceHeader_ERROR, + H264_FRAME_DONE, + H264_SLICE_DONE, + H264_STATUS_POLL_ONCE_ERROR, + H264_STATUS_DEC_MEMINIT_ERROR, + H264_STATUS_NAL_UNIT_TYPE_ERROR, + H264_STATUS_SEI_ERROR, + H264_STATUS_SEI_DONE, +} h264_Status; + + + +typedef enum _picture_structure_t +{ + TOP_FIELD = 1, + BOTTOM_FIELD = 2, + FRAME = 3, + INVALID = 4 +} picture_structure_t; + +///// Chorma format + +typedef enum _h264_chroma_format_t +{ + H264_CHROMA_MONOCHROME, + H264_CHROMA_420, + H264_CHROMA_422, + H264_CHROMA_444, +}h264_chroma_format_t; + +/* H264 start code values */ +typedef enum _h264_nal_unit_type +{ + h264_NAL_UNIT_TYPE_unspecified = 0, + h264_NAL_UNIT_TYPE_SLICE, + h264_NAL_UNIT_TYPE_DPA, + h264_NAL_UNIT_TYPE_DPB, + h264_NAL_UNIT_TYPE_DPC, + h264_NAL_UNIT_TYPE_IDR, + h264_NAL_UNIT_TYPE_SEI, + h264_NAL_UNIT_TYPE_SPS, + h264_NAL_UNIT_TYPE_PPS, + h264_NAL_UNIT_TYPE_Acc_unit_delimiter, + h264_NAL_UNIT_TYPE_EOSeq, + h264_NAL_UNIT_TYPE_EOstream, + h264_NAL_UNIT_TYPE_filler_data, + h264_NAL_UNIT_TYPE_SPS_extension, + h264_NAL_UNIT_TYPE_Reserved1 =14, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved2 =15, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved3 =16, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved4 =17, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved5 =18, /*14-18*/ + h264_NAL_UNIT_TYPE_ACP =19, + h264_NAL_UNIT_TYPE_Reserved6 =20, /*20-23*/ + h264_NAL_UNIT_TYPE_unspecified2 =24, /*24-31*/ +} h264_nal_unit_type; + +#define h264_NAL_PRIORITY_HIGHEST 3 +#define h264_NAL_PRIORITY_HIGH 2 +#define h264_NAL_PRIRITY_LOW 1 +#define h264_NAL_PRIORITY_DISPOSABLE 0 + + +typedef enum _h264_Profile +{ + h264_ProfileBaseline = 66, /** Baseline profile */ + h264_ProfileMain = 77, /** Main profile */ + h264_ProfileExtended = 88, /** Extended profile */ + h264_ProfileHigh = 100 , /** High profile */ + h264_ProfileHigh10 = 110, /** High 10 profile */ + h264_ProfileHigh422 = 122, /** High profile 4:2:2 */ + h264_ProfileHigh444 = 144, /** High profile 4:4:4 */ +} h264_Profile; + + +typedef enum _h264_Level +{ + h264_Level1b = 9, /** Level 1b */ + h264_Level1 = 10, /** Level 1 */ + h264_Level11 = 11, /** Level 1.1 */ + h264_Level12 = 12, /** Level 1.2 */ + h264_Level13 = 13, /** Level 1.3 */ + h264_Level2 = 20, /** Level 2 */ + h264_Level21 = 21, /** Level 2.1 */ + h264_Level22 = 22, /** Level 2.2 */ + h264_Level3 = 30, /** Level 3 */ + h264_Level31 = 31, /** Level 3.1 */ + h264_Level32 = 32, /** Level 3.2 */ + h264_Level4 = 40, /** Level 4 */ + h264_Level41 = 41, /** Level 4.1 */ + h264_Level42 = 42, /** Level 4.2 */ + h264_Level5 = 50, /** Level 5 */ + h264_Level51 = 51, /** Level 5.1 */ + h264_LevelReserved = 255 /** Unknown profile */ +} h264_Level; + + +typedef enum _h264_video_format +{ + h264_Component =0, + h264_PAL, + h264_NTSC, + h264_SECAM, + h264_MAC, + h264_unspecified, + h264_Reserved6, + h264_Reserved7 +}h264_video_format; + + +typedef enum _h264_fcm +{ + h264_ProgressiveFrame = 0, + h264_InterlacedFrame = 1, + h264_InterlacedField = 3, + h264_PictureFormatNone +} h264_fcm; + + +///// Define the picture types [] +typedef enum _h264_ptype_t +{ + h264_PtypeP = 0, + h264_PtypeB = 1, + h264_PtypeI = 2, + h264_PtypeSP = 3, + h264_PtypeSI = 4, + h264_Ptype_unspecified, +} h264_ptype_t; + + +///// Aspect ratio +typedef enum _h264_aspect_ratio +{ + h264_AR_Unspecified = 0, + h264_AR_1_1 = 1, + h264_AR_12_11 = 2, + h264_AR_10_11 = 3, + h264_AR_16_11 = 4, + h264_AR_40_33 = 5, + h264_AR_24_11 = 6, + h264_AR_20_11 = 7, + h264_AR_32_11 = 8, + h264_AR_80_33 = 9, + h264_AR_18_11 = 10, + h264_AR_15_11 = 11, + h264_AR_64_33 = 12, + h264_AR_160_99 = 13, + h264_AR_4_3 = 14, + h264_AR_3_2 = 15, + h264_AR_2_1 = 16, + h264_AR_RESERVED = 17, + h264_AR_Extended_SAR = 255, +}h264_aspect_ratio; + + +////////////////////////////////////////////// + +////////////////////////////////////////////// +// storable_picture + +/* Structure details + If all members remain ints + Size = 11 ints, i.e. 44 bytes +*/ + +typedef struct +{ + int32_t poc; + int32_t pic_num; + + int32_t long_term_pic_num; + + uint8_t long_term_frame_idx; + uint8_t is_long_term; + uint8_t used_for_reference; + uint8_t pad_flag; // Used to indicate the status + +} storable_picture, *storable_picture_ptr; + +////////////////////////////////////////////// +// frame store + +/* Structure details + If all members remain ints + Size = 46 ints, i.e. 184 bytes +*/ + +typedef struct _frame_store +{ + storable_picture frame; + storable_picture top_field; + storable_picture bottom_field; + + int32_t frame_num; + + int32_t frame_num_wrap; + + + uint8_t fs_idc; + uint8_t pic_type; //bit7 structure: 1 frame , 0 field; + //bit4,5,6 top field (frame) pic type, 00 IDR 01 I 10 P 11 B 100 INVALID + //bit1,2,3 bottom pic type, 00 IDR 01 I 10 P 11 B 100 INVALID + uint8_t long_term_frame_idx; // No two frame stores may have the same long-term frame index + + #define viddec_h264_get_dec_structure(x) h264_bitfields_extract( (x)->fs_flag_1, 0, 0x03) + #define viddec_h264_set_dec_structure(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 0, 0x03) + #define viddec_h264_get_is_used(x) h264_bitfields_extract( (x)->fs_flag_1, 2, 0x03) + #define viddec_h264_set_is_frame_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x03) + #define viddec_h264_set_is_top_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x01) + #define viddec_h264_set_is_bottom_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 3, 0x01) + #define viddec_h264_get_is_skipped(x) h264_bitfields_extract( (x)->fs_flag_1, 4, 0x03) + #define viddec_h264_set_is_frame_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x03) + #define viddec_h264_set_is_top_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x01) + #define viddec_h264_set_is_bottom_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 5, 0x01) + #define viddec_h264_get_is_long_term(x) h264_bitfields_extract( (x)->fs_flag_1, 6, 0x03) + #define viddec_h264_set_is_frame_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x03) + #define viddec_h264_set_is_top_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x01) + #define viddec_h264_set_is_bottom_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 7, 0x01) + uint8_t fs_flag_1; + + + #define viddec_h264_get_is_non_existent(x) h264_bitfields_extract( (x)->fs_flag_2, 0, 0x01) + #define viddec_h264_set_is_non_existent(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 0, 0x01) + #define viddec_h264_get_is_output(x) h264_bitfields_extract( (x)->fs_flag_2, 1, 0x01) + #define viddec_h264_set_is_output(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 1, 0x01) + #define viddec_h264_get_is_dangling(x) h264_bitfields_extract( (x)->fs_flag_2, 2, 0x01) + #define viddec_h264_set_is_dangling(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 2, 0x01) + #define viddec_h264_get_recovery_pt_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 3, 0x01) + #define viddec_h264_set_recovery_pt_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 3, 0x01) + #define viddec_h264_get_broken_link_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 4, 0x01) + #define viddec_h264_set_broken_link_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 4, 0x01) + #define viddec_h264_get_open_gop_entry(x) h264_bitfields_extract( (x)->fs_flag_2, 5, 0x01) + #define viddec_h264_set_open_gop_entry(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 5, 0x01) + #define viddec_h264_get_first_field_intra(x) h264_bitfields_extract( (x)->fs_flag_2, 6, 0x01) + #define viddec_h264_set_first_field_intra(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 6, 0x01) + uint8_t fs_flag_2; + + uint8_t fs_flag_reserve_1; + uint8_t fs_flag_reserve_2; + uint8_t fs_flag_reserve_3; + + // If non-reference, may have skipped pixel decode + //uint8_t non_ref_skipped; +} frame_store, *frame_param_ptr; + +//! Decoded Picture Buffer +typedef struct _h264_decoded_picture_buffer +{ + /// + int32_t last_output_poc; + int32_t max_long_term_pic_idx; + + //// Resolutions + int32_t PicWidthInMbs; + int32_t FrameHeightInMbs; + + frame_store fs[NUM_DPB_FRAME_STORES]; + + uint8_t fs_ref_idc[16]; + uint8_t fs_ltref_idc[16]; + + uint8_t fs_dpb_idc[NUM_DPB_FRAME_STORES+2]; + + uint8_t listX_0[33+3]; // [bit5}:field_flag:0 for top, 1 for bottom, [bit4~0]:fs_idc + uint8_t listX_1[33+3]; + + uint8_t listXsize[2]; // 1 to 32 + uint8_t nInitListSize[2]; + + //uint32_t size; + uint8_t fs_dec_idc; + uint8_t fs_non_exist_idc; + uint8_t BumpLevel; + uint8_t used_size; + + uint8_t OutputLevel; + uint8_t OutputLevelValid; + uint8_t OutputCtrl; + uint8_t num_ref_frames; + + uint8_t ref_frames_in_buffer; + uint8_t ltref_frames_in_buffer; + uint8_t SuspendOutput; + uint8_t WaitSeiRecovery; + + + uint8_t frame_numbers_need_to_be_allocated; + uint8_t frame_id_need_to_be_allocated; + + //// frame list to release from dpb, need be displayed + uint8_t frame_numbers_need_to_be_removed; + uint8_t frame_id_need_to_be_removed[17]; + + //// frame list to removed from dpb but not display + uint8_t frame_numbers_need_to_be_dropped; + uint8_t frame_id_need_to_be_dropped[17]; + + //// frame list to display (in display order) + uint8_t frame_numbers_need_to_be_displayed; + uint8_t frame_id_need_to_be_displayed[17]; + + +} h264_DecodedPictureBuffer; + + +////////////////////////////////////////////// +// qm_matrix_set +typedef struct _qm_matrix_set +{ + // uint8_t scaling_default_vector; + uint8_t scaling_list[56]; // 0 to 23 for qm 0 to 5 (4x4), 24 to 55 for qm 6 & 7 (8x8) + +} qm_matrix_set, *qm_matrix_set_ptr; + +/* +///////// Currently not enabled in parser fw/////////////////// +typedef struct _h264_SPS_Extension_RBSP { + int32_t seq_parameter_set_id; //UE + int32_t aux_format_idc; //UE + int32_t bit_depth_aux_minus8; //UE + int32_t alpha_incr_flag; + int32_t alpha_opaque_value; + int32_t alpha_transparent_value; + int32_t additional_extension_flag; +// h264_rbsp_trail_set* rbsp_trail_ptr; +}h264_SPS_Extension_RBSP_t; +*/ + +typedef struct _h264_hrd_param_set { + int32_t bit_rate_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 + int32_t cpb_size_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 + + uint8_t cbr_flag[MAX_CPB_CNT]; // u(1) * 32 + +} h264_hrd_param_set, *h264_hrd_param_set_ptr; + +typedef struct _vui_seq_parameters_t_used +{ + uint32_t num_units_in_tick; // u(32) + uint32_t time_scale; // u(32) + + int32_t num_reorder_frames; // ue(v), 0 to max_dec_frame_buffering + int32_t max_dec_frame_buffering; // ue(v), 0 to MaxDpbSize, specified in subclause A.3 + + uint16_t sar_width; // u(16) + uint16_t sar_height; // u(16) + + uint8_t aspect_ratio_info_present_flag; // u(1) + uint8_t aspect_ratio_idc; // u(8) + uint8_t video_signal_type_present_flag; // u(1) + uint8_t video_format; // u(3) + + uint8_t colour_description_present_flag; // u(1) + uint8_t colour_primaries; // u(8) + uint8_t transfer_characteristics; // u(8) + uint8_t timing_info_present_flag; // u(1) + + uint8_t fixed_frame_rate_flag; // u(1) + uint8_t low_delay_hrd_flag; // u(1) + uint8_t bitstream_restriction_flag; // u(1) + uint8_t pic_struct_present_flag; + + uint8_t nal_hrd_parameters_present_flag; // u(1) + uint8_t nal_hrd_cpb_removal_delay_length_minus1; // u(5) + uint8_t nal_hrd_dpb_output_delay_length_minus1; // u(5) + uint8_t nal_hrd_time_offset_length; // u(5) + + uint8_t nal_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 + uint8_t nal_hrd_initial_cpb_removal_delay_length_minus1; // u(5) + uint8_t vcl_hrd_parameters_present_flag; // u(1) + uint8_t vcl_hrd_cpb_removal_delay_length_minus1; // u(5) + + uint8_t vcl_hrd_dpb_output_delay_length_minus1; // u(5) + uint8_t vcl_hrd_time_offset_length; // u(5) + uint8_t vcl_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 + uint8_t vcl_hrd_initial_cpb_removal_delay_length_minus1; // u(5) + + /////// Here should be kept as 32-bits aligned for next structures + /// 2 structures for NAL&VCL HRD + + +} vui_seq_parameters_t_used; + + +typedef struct _vui_seq_parameters_t_not_used +{ + int16_t chroma_sample_loc_type_top_field; // ue(v) + int16_t chroma_sample_loc_type_bottom_field; // ue(v) + + uint8_t overscan_info_present_flag; // u(1) + uint8_t overscan_appropriate_flag; // u(1) + + uint8_t video_full_range_flag; // u(1) + uint8_t matrix_coefficients; // u(8) + + uint8_t chroma_location_info_present_flag; // u(1) + uint8_t max_bytes_per_pic_denom; // ue(v), 0 to 16 + uint8_t max_bits_per_mb_denom; // ue(v), 0 to 16 + uint8_t log2_max_mv_length_vertical; // ue(v), 0 to 16, default to 16 + uint8_t log2_max_mv_length_horizontal; // ue(v), 0 to 16, default to 16 + + uint8_t motion_vectors_over_pic_boundaries_flag; // u(1) + + uint8_t nal_hrd_bit_rate_scale; // u(4) + uint8_t nal_hrd_cpb_size_scale; // u(4) + + uint8_t vcl_hrd_bit_rate_scale; // u(4) + uint8_t vcl_hrd_cpb_size_scale; // u(4) + + h264_hrd_param_set nal_hrd_parameters; + h264_hrd_param_set vcl_hrd_parameters; + + +} vui_seq_parameters_t_not_used, *vui_seq_parameters_t_not_used_ptr; + + +////////////////////////////////////////////// +// picture parameter set + +typedef struct _PPS_PAR +{ + //int32_t DOUBLE_ALIGN valid; // indicates the parameter set is valid + + int32_t pic_init_qp_minus26; // se(v), -26 to +25 + int32_t pic_init_qs_minus26; // se(v), -26 to +25 + int32_t chroma_qp_index_offset; // se(v), -12 to +12 + int32_t second_chroma_qp_index_offset; + + uint8_t pic_parameter_set_id; // ue(v), 0 to 255, restricted to 0 to 127 by MPD_CTRL_MAXPPS = 128 + uint8_t seq_parameter_set_id; // ue(v), 0 to 31 + uint8_t entropy_coding_mode_flag; // u(1) + uint8_t pic_order_present_flag; // u(1) + + uint8_t num_slice_groups_minus1; // ue(v), shall be 0 for MP + // Below are not relevant for main profile... + uint8_t slice_group_map_type; // ue(v), 0 to 6 + uint8_t num_ref_idx_l0_active; // ue(v), 0 to 31 + uint8_t num_ref_idx_l1_active; // ue(v), 0 to 31 + + uint8_t weighted_pred_flag; // u(1) + uint8_t weighted_bipred_idc; // u(2) + uint8_t deblocking_filter_control_present_flag; // u(1) + uint8_t constrained_intra_pred_flag; // u(1) + + uint8_t redundant_pic_cnt_present_flag; // u(1) + uint8_t transform_8x8_mode_flag; + uint8_t pic_scaling_matrix_present_flag; + uint8_t pps_status_flag; + + //// Keep here with 32-bits aligned + uint8_t pic_scaling_list_present_flag[MAX_PIC_LIST_NUM]; + + qm_matrix_set pps_qm; + + uint8_t ScalingList4x4[6][16]; + uint8_t ScalingList8x8[2][64]; + uint8_t UseDefaultScalingMatrix4x4Flag[6+2]; + uint8_t UseDefaultScalingMatrix8x8Flag[6+2]; + +} pic_param_set, *pic_param_set_ptr, h264_PicParameterSet_t; + +typedef union _list_reordering_num_t +{ + int32_t abs_diff_pic_num_minus1; + int32_t long_term_pic_num; +} list_reordering_num_t; + +typedef struct _h264_Ref_Pic_List_Reordering ////size = 8*33+ 1 + 33 +{ + list_reordering_num_t list_reordering_num[MAX_NUM_REF_FRAMES+1]; + + uint8_t ref_pic_list_reordering_flag; + uint8_t reordering_of_pic_nums_idc[MAX_NUM_REF_FRAMES+1]; //UE + +}h264_Ref_Pic_List_Reordering_t; + +typedef enum _H264_DANGLING_TYPE +{ + DANGLING_TYPE_LAST_FIELD, + DANGLING_TYPE_DPB_RESET, + DANGLING_TYPE_FIELD, + DANGLING_TYPE_FRAME, + DANGLING_TYPE_GAP_IN_FRAME + +} H264_DANGLING_TYPE; + + +typedef struct _h264_Dec_Ref_Pic_Marking //size = 17*4*2 + 17*3 + 4 + 1 +{ + int32_t difference_of_pic_num_minus1[NUM_MMCO_OPERATIONS]; + int32_t long_term_pic_num[NUM_MMCO_OPERATIONS]; + + /// MMCO + uint8_t memory_management_control_operation[NUM_MMCO_OPERATIONS]; + uint8_t max_long_term_frame_idx_plus1[NUM_MMCO_OPERATIONS]; + uint8_t long_term_frame_idx[NUM_MMCO_OPERATIONS]; + uint8_t long_term_reference_flag; + + uint8_t adaptive_ref_pic_marking_mode_flag; + uint8_t dec_ref_pic_marking_count; + uint8_t no_output_of_prior_pics_flag; + + uint8_t pad; +}h264_Dec_Ref_Pic_Marking_t; + + + +typedef struct old_slice_par +{ + int32_t frame_num; + int32_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt_bottom; + int32_t delta_pic_order_cnt[2]; + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t nal_ref_idc; + uint8_t structure; + + uint8_t idr_flag; + uint8_t idr_pic_id; + uint8_t pic_parameter_id; + uint8_t status; +} OldSliceParams; + +#ifdef VBP +typedef struct _h264__pred_weight_table +{ + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; + uint8_t luma_weight_l0_flag; + int16_t luma_weight_l0[32]; + int8_t luma_offset_l0[32]; + uint8_t chroma_weight_l0_flag; + int16_t chroma_weight_l0[32][2]; + int8_t chroma_offset_l0[32][2]; + + uint8_t luma_weight_l1_flag; + int16_t luma_weight_l1[32]; + int8_t luma_offset_l1[32]; + uint8_t chroma_weight_l1_flag; + int16_t chroma_weight_l1[32][2]; + int8_t chroma_offset_l1[32][2]; +} h264_pred_weight_table; +#endif + +typedef struct _h264_Slice_Header +{ + int32_t first_mb_in_slice; //UE + int32_t frame_num; //UV + int32_t pic_order_cnt_lsb; //UV + int32_t delta_pic_order_cnt_bottom; //SE + int32_t delta_pic_order_cnt[2]; //SE + int32_t redundant_pic_cnt; //UE + + uint32_t num_ref_idx_l0_active; //UE + uint32_t num_ref_idx_l1_active; //UE + + int32_t slice_qp_delta; //SE + int32_t slice_qs_delta; //SE + int32_t slice_alpha_c0_offset_div2; //SE + int32_t slice_beta_offset_div2; //SE + int32_t slice_group_change_cycle; //UV + +#ifdef VBP + h264_pred_weight_table sh_predwttbl; +#endif + + ///// Flags or IDs + //h264_ptype_t slice_type; //UE + uint8_t slice_type; + uint8_t nal_ref_idc; + uint8_t structure; + uint8_t pic_parameter_id; //UE + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t idr_flag; //UE + uint8_t idr_pic_id; //UE + + uint8_t sh_error; + uint8_t cabac_init_idc; //UE + uint8_t sp_for_switch_flag; + uint8_t disable_deblocking_filter_idc; //UE + + uint8_t direct_spatial_mv_pred_flag; + uint8_t num_ref_idx_active_override_flag; + int16_t current_slice_nr; + + //// For Ref list reordering + h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; + h264_Ref_Pic_List_Reordering_t sh_refpic_l0; + h264_Ref_Pic_List_Reordering_t sh_refpic_l1; + +} h264_Slice_Header_t; + + +#define MAX_USER_DATA_SIZE 1024 +typedef struct _h264_user_data_t +{ + h264_sei_payloadtype user_data_type; + + int32_t user_data_id; + int32_t dsn; + int32_t user_data_size; + int32_t user_data[MAX_USER_DATA_SIZE>>2]; +} h264_user_data_t; + +// SPS DISPLAY parameters: seq_param_set_disp, *seq_param_set_disp_ptr; +typedef struct _SPS_DISP +{ + ///// VUI info + vui_seq_parameters_t_used vui_seq_parameters; //size = + + ///// Resolution + int16_t pic_width_in_mbs_minus1; + int16_t pic_height_in_map_units_minus1; + + ///// Cropping + int16_t frame_crop_rect_left_offset; + int16_t frame_crop_rect_right_offset; + + int16_t frame_crop_rect_top_offset; + int16_t frame_crop_rect_bottom_offset; + + ///// Flags + uint8_t frame_mbs_only_flag; + uint8_t mb_adaptive_frame_field_flag; + uint8_t direct_8x8_inference_flag; + uint8_t frame_cropping_flag; + + uint16_t vui_parameters_present_flag; + uint16_t chroma_format_idc; +} seq_param_set_disp, *seq_param_set_disp_ptr; + + +////SPS: seq_param_set, *seq_param_set_ptr; + +typedef struct _SPS_PAR_USED +{ + uint32_t is_updated; + + /////////// Required for display section ////////////////////////// + seq_param_set_disp sps_disp; + + int32_t expectedDeltaPerPOCCycle; + int32_t offset_for_non_ref_pic; // se(v), -2^31 to (2^31)-1, 32-bit integer + int32_t offset_for_top_to_bottom_field; // se(v), -2^31 to (2^31)-1, 32-bit integer + + /////////// IDC + uint8_t profile_idc; // u(8), 0x77 for MP + uint8_t constraint_set_flags; // bit 0 to 3 for set0 to set3 + uint8_t level_idc; // u(8) + uint8_t seq_parameter_set_id; // ue(v), 0 to 31 + + + uint8_t pic_order_cnt_type; // ue(v), 0 to 2 + uint8_t log2_max_frame_num_minus4; // ue(v), 0 to 12 + uint8_t log2_max_pic_order_cnt_lsb_minus4; // ue(v), 0 to 12 + uint8_t num_ref_frames_in_pic_order_cnt_cycle; // ue(v), 0 to 255 + + //int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; // se(v), -2^31 to (2^31)-1, 32-bit integer + uint8_t num_ref_frames; // ue(v), 0 to 16, + uint8_t gaps_in_frame_num_value_allowed_flag; // u(1) + // This is my addition, we should calculate this once and leave it with the sps + // as opposed to calculating it each time in h264_hdr_decoding_POC() + + uint8_t delta_pic_order_always_zero_flag; // u(1) + uint8_t residual_colour_transform_flag; + + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t lossless_qpprime_y_zero_flag; + uint8_t seq_scaling_matrix_present_flag; + + uint8_t seq_scaling_list_present_flag[MAX_PIC_LIST_NUM]; //0-7 + + //// Combine the scaling matrix to word ( 24 + 32) + uint8_t ScalingList4x4[6][16]; + uint8_t ScalingList8x8[2][64]; + uint8_t UseDefaultScalingMatrix4x4Flag[6]; + uint8_t UseDefaultScalingMatrix8x8Flag[6]; + +} seq_param_set_used, *seq_param_set_used_ptr; + + +typedef struct _SPS_PAR_ALL +{ + + seq_param_set_used sps_par_used; + vui_seq_parameters_t_not_used sps_vui_par_not_used; + +}seq_param_set_all, *seq_param_set_all_ptr; + + +///// Image control parameter//////////// +typedef struct _h264_img_par +{ + int32_t frame_num; // decoding num of current frame + int32_t frame_count; // count of decoded frames + int32_t current_slice_num; + int32_t gaps_in_frame_num; + + // POC decoding + int32_t num_ref_frames_in_pic_order_cnt_cycle; + int32_t delta_pic_order_always_zero_flag; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + + int32_t pic_order_cnt_lsb; + int32_t pic_order_cnt_msb; + int32_t delta_pic_order_cnt_bottom; + int32_t delta_pic_order_cnt[2]; + + int32_t PicOrderCntMsb; + int32_t CurrPicOrderCntMsb; + int32_t PrevPicOrderCntLsb; + + int32_t FrameNumOffset; + + int32_t PreviousFrameNum; + int32_t PreviousFrameNumOffset; + + int32_t toppoc; + int32_t bottompoc; + int32_t framepoc; + int32_t ThisPOC; + + //int32_t sei_freeze_this_image; + + ///////////////////// Resolutions + int32_t PicWidthInMbs; + int32_t FrameHeightInMbs; + + ///////////////////// MMCO + uint8_t last_has_mmco_5; + uint8_t curr_has_mmco_5; + + /////////////////// Flags + uint8_t g_new_frame; + uint8_t g_new_pic; + + uint8_t structure; + uint8_t second_field; // Set to one if this is the second field of a set of paired fields... + uint8_t field_pic_flag; + uint8_t last_pic_bottom_field; + + uint8_t bottom_field_flag; + uint8_t MbaffFrameFlag; + uint8_t no_output_of_prior_pics_flag; + uint8_t long_term_reference_flag; + + uint8_t skip_this_pic; + uint8_t pic_order_cnt_type; + // Recovery + uint8_t recovery_point_found; + uint8_t used_for_reference; +} h264_img_par; + + +typedef struct _h264_slice_reg_data +{ + uint32_t h264_bsd_slice_p1; // 0x150 + //uint32_t h264_mpr_list0[8]; // from 0x380 to 0x3BC + uint32_t h264_bsd_slice_p2; // 0x154 + uint32_t h264_bsd_slice_start; // 0x158 + +} h264_slice_data; + + +typedef struct _h264_pic_data +{ + uint32_t h264_dpb_init; // 0x40 + //info For current pic + uint32_t h264_cur_bsd_img_init; // 0x140 + uint32_t h264_cur_mpr_tf_poc; // 0x300 + uint32_t h264_cur_mpr_bf_poc; // 0x304 + + //info For framess in DPB + //uint32_t h264_dpb_bsd_img_init[16]; //0x140 + //uint32_t h264_dpb_mpr_tf_poc[16]; // 0x300 + //uint32_t h264_dpb_mpr_bf_poc[16]; // 0x304 +} h264_pic_data; + +enum h264_workload_item_type +{ + VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_H264_PIC_REG, + VIDDEC_WORKLOAD_H264_DPB_FRAME_POC, + VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET, + VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET, + VIDDEC_WORKLOAD_H264_PWT_ES_BYTES, + VIDDEC_WORKLOAD_H264_SCALING_MATRIX, + VIDDEC_WORKLOAD_H264_DEBUG +}; + + + +//////////////////////////////////////////// +/* Full Info set*/ +//////////////////////////////////////////// +typedef struct _h264_Info +{ + + h264_DecodedPictureBuffer dpb; + + //// Structures + //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address + seq_param_set_used active_SPS; + pic_param_set active_PPS; + + + h264_Slice_Header_t SliceHeader; + OldSliceParams old_slice; + sei_info sei_information; + + h264_img_par img; + + uint32_t SPS_PADDR_GL; + uint32_t PPS_PADDR_GL; + uint32_t OFFSET_REF_FRAME_PADDR_GL; + uint32_t TMP_OFFSET_REFFRM_PADDR_GL; + + uint32_t h264_list_replacement; + + uint32_t h264_pwt_start_byte_offset; + uint32_t h264_pwt_start_bit_offset; + uint32_t h264_pwt_end_byte_offset; + uint32_t h264_pwt_end_bit_offset; + uint32_t h264_pwt_enabled; + + uint32_t sps_valid; + + uint8_t slice_ref_list0[32]; + uint8_t slice_ref_list1[32]; + + + uint8_t qm_present_list; + //h264_NAL_Unit_t + uint8_t nal_unit_type; + uint8_t old_nal_unit_type; + uint8_t got_start; + + //workload + uint8_t push_to_cur; + uint8_t Is_first_frame_in_stream; + uint8_t Is_SPS_updated; + uint8_t number_of_first_au_info_nal_before_first_slice; + + uint8_t is_frame_boundary_detected_by_non_slice_nal; + uint8_t is_frame_boundary_detected_by_slice_nal; + uint8_t is_current_workload_done; + uint8_t primary_pic_type_plus_one; //AUD---[0,7] + + //Error handling + uint8_t sei_rp_received; + uint8_t last_I_frame_idc; + uint8_t sei_b_state_ready; + uint8_t gop_err_flag; + + + uint32_t wl_err_curr; + uint32_t wl_err_next; + +} h264_Info; + + + +struct h264_viddec_parser +{ + uint32_t sps_pps_ddr_paddr; + h264_Info info; +}; + + + + + +#endif //_H264_H_ + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h new file mode 100644 index 0000000..c255980 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h @@ -0,0 +1,172 @@ +#ifndef __H264PARSE_H_ +#define __H264PARSE_H_ + +#include "h264.h" + +#ifndef MFD_FIRMWARE +#define true 1 +#define false 0 +#endif + +//////////////////////////////////////////////////////////////////// +// The following part is only for Parser Debug +/////////////////////////////////////////////////////////////////// + + + +enum h264_debug_point_id +{ + WARNING_H264_GENERAL = 0xff000000, + WARNING_H264_DPB, + WARNING_H264_REFLIST, + WARNING_H264_SPS, + WARNING_H264_PPS, + WARNING_H264_SEI, + WARNING_H264_VCL, + + ERROR_H264_GENERAL = 0xffff0000, + ERROR_H264_DPB, + ERROR_H264_REFLIST, + ERROR_H264_SPS, + ERROR_H264_PPS, + ERROR_H264_SEI, + ERROR_H264_VCL +}; + +static inline void MFD_PARSER_DEBUG(int debug_point_id) +{ +#ifdef H264_MFD_DEBUG + + int p1,p2,p3,p4,p5,p6; + + p1 = 0x0BAD; + p2 = 0xC0DE; + p3 = debug_point_id; + p4=p5=p6 = 0; + + DEBUG_WRITE(p1,p2,p3,p4,p5,p6); +#endif + + debug_point_id = debug_point_id; + + return; +} + + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// Init functions +//////////////////////////////////////////////////////////////////// +extern void h264_init_old_slice(h264_Info* pInfo); +extern void h264_init_img(h264_Info* pInfo); +extern void h264_init_Info(h264_Info* pInfo); +extern void h264_init_Info_under_sps_pps_level(h264_Info* pInfo); +extern void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem); + +extern void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader); +extern void h264_sei_stream_initialise (h264_Info* pInfo); +extern void h264_update_img_info(h264_Info * pInfo ); +extern void h264_update_frame_type(h264_Info * pInfo ); + +extern int32_t h264_check_previous_frame_end(h264_Info * pInfo); + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// bsd functions +//////////////////////////////////////////////////////////////////// +extern uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo); +////// VLE and bit operation +extern uint32_t h264_get_codeNum(void *parent,h264_Info* pInfo); +extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSigned); + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// parse functions +//////////////////////////////////////////////////////////////////// + +//NAL +extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc); + +////// Slice header +extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); +extern h264_Status h264_Parse_Slice_Header_1(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); +extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); +extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + + +////// SPS +extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame); +//extern h264_Status h264_Parse_SeqParameterSet_Extension(void *parent, h264_Info * pInfo); +extern h264_Status h264_Parse_PicParameterSet(void *parent, h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet); + +////// SEI functions +h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent,h264_Info* pInfo); +h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize); + +////// +extern h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo); +extern h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); +extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); +extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// utils functions +//////////////////////////////////////////////////////////////////// +extern int32_t h264_is_new_picture_start(h264_Info* pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice); +extern int32_t h264_is_second_field(h264_Info * pInfo); +///// Math functions +uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod); +uint32_t mult_u(uint32_t var1, uint32_t var2); +///// Mem functions +extern void* h264_memset( void* buf, uint32_t c, uint32_t num ); +extern void* h264_memcpy( void* dest, void* src, uint32_t num ); + +extern void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); +extern void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); + +extern void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); +extern void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); + +extern void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); +extern void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); +extern uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); +extern void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// workload functions +//////////////////////////////////////////////////////////////////// + +extern void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ); + +extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ); + +extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ); +extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo ); + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// utils functions outside h264 +//////////////////////////////////////////////////////////////////// + +extern void *memset(void *s, int32_t c, uint32_t n); +extern void *memcpy(void *dest, const void *src, uint32_t n); +extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); +extern int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits); +extern int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// Second level parse functions +//////////////////////////////////////////////////////////////////// + +#endif ////__H264PARSE_H_ + + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h new file mode 100644 index 0000000..2a19b5f --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h @@ -0,0 +1,107 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: MPEG-4 header. +// +*/ + + +#ifndef _H264_DPB_CTL_H_ +#define _H264_DPB_CTL_H_ + + +#include "h264.h" + +#ifdef __cplusplus +extern "C" { +#endif + +//////////////////////////////////////////////////////////////////// +///////////////////////////// Parser control functions +//////////////////////////////////////////////////////////////////// + +///// Reference list +extern void h264_dpb_update_ref_lists(h264_Info * pInfo); +extern void h264_dpb_reorder_lists(h264_Info * pInfo); + +extern void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting); + +///// POC +extern void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num); +extern void h264_hdr_post_poc(h264_Info* pInfo,int32_t NonExisting, int32_t frame_num, int32_t use_old); + +///// DPB buffer mangement +extern void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb); + +extern void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); +extern void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); +extern void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx); +extern void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity); +extern void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX); +extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); + +extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo); +extern void h264_dpb_is_used_for_reference(int32_t * flag); + + +extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index); +extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); + +extern void h264_dpb_idr_memory_management (h264_Info * pInfo, + seq_param_set_used_ptr active_sps, + int32_t no_output_of_prior_pics_flag); + +extern void h264_dpb_init_frame_store(h264_Info * pInfo); +extern void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, + int32_t SizeChange, int32_t no_output_of_prior_pics_flag); + +extern void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo); + +extern int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting); + +extern void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos); +extern void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag); + +extern void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, + int32_t NonExisting, + int32_t num_ref_frames); +extern int32_t h264_dpb_queue_update(h264_Info * pInfo, + int32_t push, + int32_t direct, + int32_t frame_request, + int32_t num_ref_frames); + +extern void h264_dpb_split_field (h264_Info * pInfo); +extern void h264_dpb_combine_field(int32_t use_old); + +extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo, + int32_t used_for_reference, + int32_t add2dpb, + int32_t NonExisting, + int32_t use_old); + +extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, + int32_t NonExisting, + int32_t use_old); + +extern void h264_dpb_adaptive_memory_management (h264_Info * pInfo); + +extern int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo, + int32_t direct, int32_t request, int32_t num_ref_frames); + +extern void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx); +extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing); + +//////////////////////////////////////////////////////////// Globals +extern frame_store *active_fs; + + + +#endif //_H264_DPB_CTL_H_ + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h new file mode 100644 index 0000000..e5903cd --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h @@ -0,0 +1,314 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: MPEG-4 header. +// +*/ + + +#ifndef _H264_SEI_H_ +#define _H264_SEI_H_ + +#include "h264.h" + + +//defines for SEI +#define MAX_CPB_CNT 32 +#define MAX_NUM_CLOCK_TS 3 +#define MAX_PAN_SCAN_CNT 3 +#define MAX_NUM_SPARE_PICS 16 +#define MAX_SUB_SEQ_LAYERS 256 +#define MAX_SLICE_GRPS 1 // for high profile +#define NUM_SPS 32 +#define MAX_NUM_REF_SUBSEQS 256 + + +#define SEI_SCAN_FORMAT_INTERLACED 0x1 +#define SEI_SCAN_FORMAT_PROGRESSIVE 0x3 +#define SEI_SCAN_FORMAT_VALID(r) (r&0x1) +#define SEI_SCAN_FORMAT(r) ((r&0x2)>>1) + +typedef enum +{ + SEI_BUF_PERIOD = 0, + SEI_PIC_TIMING, + SEI_PAN_SCAN, + SEI_FILLER_PAYLOAD, + SEI_REG_USERDATA, + SEI_UNREG_USERDATA, + SEI_RECOVERY_POINT, + SEI_DEC_REF_PIC_MARKING_REP, + SEI_SPARE_PIC, + SEI_SCENE_INFO, + SEI_SUB_SEQ_INFO, + SEI_SUB_SEQ_LAYER, + SEI_SUB_SEQ, + SEI_FULL_FRAME_FREEZE, + SEI_FULL_FRAME_FREEZE_RELEASE, + SEI_FULL_FRAME_SNAPSHOT, + SEI_PROGRESSIVE_SEGMENT_START, + SEI_PROGRESSIVE_SEGMENT_END, + SEI_MOTION_CONSTRAINED_SLICE_GRP_SET, + SEI_FILM_GRAIN_CHARACTERISTICS, + SEI_DEBLK_FILTER_DISPLAY_PREFERENCE, + SEI_STEREO_VIDEO_INFO, + SEI_RESERVED, +}h264_sei_payloadtype; + + + +typedef struct _h264_SEI_buffering_period +{ + int32_t seq_param_set_id; + int32_t initial_cpb_removal_delay_nal; + int32_t initial_cpb_removal_delay_offset_nal; + int32_t initial_cpb_removal_delay_vcl; + int32_t initial_cpb_removal_delay_offset_vcl; + +}h264_SEI_buffering_period_t; + +typedef struct _h264_SEI_pic_timing +{ + int32_t cpb_removal_delay; + int32_t dpb_output_delay; + int32_t pic_struct; +}h264_SEI_pic_timing_t; + +#if 0 +int32_t clock_timestamp_flag[MAX_NUM_CLOCK_TS]; +int32_t ct_type[MAX_NUM_CLOCK_TS]; +int32_t nuit_field_based_flag[MAX_NUM_CLOCK_TS]; +int32_t counting_type[MAX_NUM_CLOCK_TS]; +int32_t full_timestamp_flag[MAX_NUM_CLOCK_TS]; +int32_t discontinuity_flag[MAX_NUM_CLOCK_TS]; +int32_t cnt_dropped_flag[MAX_NUM_CLOCK_TS]; +int32_t n_frames[MAX_NUM_CLOCK_TS]; +int32_t seconds_value[MAX_NUM_CLOCK_TS]; +int32_t minutes_value[MAX_NUM_CLOCK_TS]; +int32_t hours_value[MAX_NUM_CLOCK_TS]; +int32_t seconds_flag[MAX_NUM_CLOCK_TS]; +int32_t minutes_flag[MAX_NUM_CLOCK_TS]; +int32_t hours_flag[MAX_NUM_CLOCK_TS]; +int32_t time_offset[MAX_NUM_CLOCK_TS]; + +#endif + +typedef struct _h264_SEI_pan_scan_rectangle +{ + int32_t pan_scan_rect_id; + int32_t pan_scan_rect_cancel_flag; + int32_t pan_scan_cnt_minus1; + int32_t pan_scan_rect_left_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_right_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_top_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_bottom_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_repetition_period; +}h264_SEI_pan_scan_rectangle_t; + +typedef struct _h264_SEI_filler_payload +{ + int32_t ff_byte; +}h264_SEI_filler_payload_t; + +typedef struct _h264_SEI_userdata_registered +{ + int32_t itu_t_t35_country_code; + int32_t itu_t_t35_country_code_extension_byte; + int32_t itu_t_t35_payload_byte; +}h264_SEI_userdata_registered_t; + +typedef struct _h264_SEI_userdata_unregistered +{ + int32_t uuid_iso_iec_11578[4]; + int32_t user_data_payload_byte; +}h264_SEI_userdata_unregistered_t; + +typedef struct _h264_SEI_recovery_point +{ + int32_t recovery_frame_cnt; + int32_t exact_match_flag; + int32_t broken_link_flag; + int32_t changing_slice_group_idc; +}h264_SEI_recovery_point_t; + +typedef struct _h264_SEI_decoded_ref_pic_marking_repetition +{ + int32_t original_idr_flag; + int32_t original_frame_num; + int32_t orignal_field_pic_flag; + int32_t original_bottom_field_pic_flag; + int32_t no_output_of_prior_pics_flag; + int32_t long_term_reference_flag; + int32_t adaptive_ref_pic_marking_mode_flag; + int32_t memory_management_control_operation; //UE + int32_t difference_of_pics_num_minus1; //UE + int32_t long_term_pic_num; //UE + int32_t long_term_frame_idx; //UE + int32_t max_long_term_frame_idx_plus1; //UE +}h264_SEI_decoded_ref_pic_marking_repetition_t; + +typedef struct _h264_SEI_spare_picture +{ + int32_t target_frame_num; + int32_t spare_field_flag; + int32_t target_bottom_field_flag; + int32_t num_spare_pics_minus1; + int32_t delta_spare_frame_num[MAX_NUM_SPARE_PICS]; + int32_t spare_bottom_field_flag[MAX_NUM_SPARE_PICS]; + int32_t spare_area_idc[MAX_NUM_SPARE_PICS]; // not complete +}h264_SEI_spare_picture_t; + +typedef struct _h264_SEI_scene_info +{ + int32_t scene_info_present_flag; + int32_t scene_id; + int32_t scene_transitioning_type; + int32_t second_scene_id; +}h264_SEI_scene_info_t; + +typedef struct _h264_SEI_sub_sequence_info +{ + int32_t sub_seq_layer_num; + int32_t sub_seq_id; + int32_t first_ref_pic_flag; + int32_t leading_non_ref_pic_flag; + int32_t last_pic_flag; + int32_t sub_seq_frame_num_flag; + int32_t sub_seq_frame_num; +}h264_SEI_sub_sequence_info_t; + +typedef struct _h264_SEI_sub_sequence_layer +{ + int32_t num_sub_seq_layers_minus1; + int32_t accurate_statistics_flag[MAX_SUB_SEQ_LAYERS]; + int32_t average_bit_rate[MAX_SUB_SEQ_LAYERS]; + int32_t average_frame_rate[MAX_SUB_SEQ_LAYERS]; +}h264_SEI_sub_sequence_layer_t; + +typedef struct _h264_SEI_sub_sequence +{ + int32_t sub_seq_layer_num; + int32_t sub_seq_id; + int32_t duration_flag; + int32_t sub_seq_duration; + int32_t average_rate_flag; + int32_t average_statistics_flag; + int32_t average_bit_rate; + int32_t average_frame_rate; + int32_t num_referenced_subseqs; + int32_t ref_sub_seq_layer_num; + int32_t ref_sub_seq_id; + int32_t ref_sub_seq_direction; +}h264_SEI_sub_sequence_t; + +typedef struct _h264_SEI_full_frame_freeze +{ + int32_t full_frame_freeze_repetition_period; +}h264_SEI_full_frame_freeze_t; + +typedef struct _h264_SEI_full_frame_snapshot +{ + int32_t snapshot_id; +}h264_SEI_full_frame_snapshot_t; + +typedef struct _h264_SEI_progressive_segment_start +{ + int32_t progressive_refinement_id; + int32_t num_refinement_steps_minus1; +}h264_SEI_progressive_segment_start_t; + +typedef struct _h264_SEI_progressive_segment_end +{ + int32_t progressive_refinement_id; +}h264_SEI_progressive_segment_end_t; + +typedef struct _h264_SEI_motion_constrained_slice_group +{ + int32_t num_slice_groups_in_set_minus1; + int32_t slice_group_id[MAX_SLICE_GRPS]; + int32_t exact_sample_value_match_flag; + int32_t pan_scan_rect_flag; + int32_t pan_scan_rect_id; +}h264_SEI_motion_constrained_slice_group_t; + +typedef struct _h264_SEI_deblocking_filter_display_pref +{ + int32_t devlocking_display_preference_cancel_flag; + int32_t display_prior_to_deblocking_preferred_flag; + int32_t dec_frame_buffering_constraint_flag; + int32_t deblocking_display_preference_repetition_period; +}h264_SEI_deblocking_filter_display_pref_t; + +typedef struct _h264_SEI_stereo_video_info +{ + int32_t field_views_flag; + int32_t top_field_is_left_view_flag; + int32_t curent_frame_is_left_view_flag; + int32_t next_frame_is_second_view_flag; + int32_t left_view_self_contained_flag; + int32_t right_view_self_contained_flag; +}h264_SEI_stereo_video_info_t; + +typedef struct _h264_SEI_reserved +{ + int32_t reserved_sei_message_payload_byte; +}h264_SEI_reserved_t; + + +//////////////////////////// +// SEI Info +///////////////////////////// + +typedef struct sei_info +{ + int32_t recovery_point; + int32_t recovery_frame_num; + + int32_t capture_POC; + int32_t freeze_POC; + int32_t release_POC; // The POC which when reached will allow display update to re-commence + int32_t disp_frozen; // Indicates display is currently frozen + int32_t freeze_rep_period; + int32_t recovery_frame_cnt; + int32_t capture_fn; + int32_t recovery_fn; + int32_t broken_link; + int32_t scan_format; + int32_t broken_link_pic; +}sei_info, *sei_info_ptr; + +/*typedef struct _h264_SEI +{ + h264_SEI_buffering_period_t buf_period; + h264_SEI_pic_timing_t pic_timing; + h264_SEI_pan_scan_rectangle_t pan_scan_timing; + h264_SEI_filler_payload_t filler_payload; + h264_SEI_userdata_registered_t userdata_reg; + h264_SEI_userdata_unregistered_t userdata_unreg; + h264_SEI_recovery_point_t recovery_point; + h264_SEI_decoded_ref_pic_marking_repetition_t dec_ref_pic_marking_rep; + h264_SEI_spare_picture_t spare_pic; + h264_SEI_scene_info_t scene_info; + h264_SEI_sub_sequence_info_t sub_sequence_info; + h264_SEI_sub_sequence_layer_t sub_sequence_layer; + h264_SEI_sub_sequence_t sub_sequence; + h264_SEI_full_frame_snapshot_t full_frame_snapshot; + h264_SEI_full_frame_t full_frame; + h264_SEI_progressive_segment_start_t progressive_segment_start; + h264_SEI_progressive_segment_end_t progressive_segment_end; + h264_SEI_motion_constrained_slice_group_t motion_constrained_slice_grp; + h264_SEI_deblocking_filter_display_pref_t deblk_filter_display_pref; + h264_SEI_stereo_video_info_t stereo_video_info; + h264_SEI_reserved_t reserved; +}h264_SEI_t; +*/ + + +#endif //_H264_SEI_H_ + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c new file mode 100644 index 0000000..a96285d --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c @@ -0,0 +1,786 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: h264 parser +// +///////////////////////////////////////////////////////////////////////*/ + + +#include "h264.h" +#include "h264parse.h" +#include "h264parse_dpb.h" + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + + + +h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo) +{ + int32_t j, scanj; + int32_t delta_scale, lastScale, nextScale; + +#if 0 + const uint8_t ZZ_SCAN[16] = + { 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15 + }; + + const uint8_t ZZ_SCAN8[64] = + { 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 + }; +#endif + + lastScale = 8; + nextScale = 8; + scanj = 0; + + for(j=0; jSliceHeader; + + /////////////////////////////////////////////////// + // Reload SPS/PPS while + // 1) Start of Frame (in case of context switch) + // 2) PPS id changed + /////////////////////////////////////////////////// + if((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id)) + { +#ifndef WIN32 + h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id); + + if(pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + + if(pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated =1; + h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); + h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); + } + else + { + if(h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id)) + { + pInfo->Is_SPS_updated =1; + h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); + h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); + } + } + +#else + pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id]; + pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id]; +#endif + + if(pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected + } + } + else { + if((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + } + + + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1): \ + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1); + + + return H264_STATUS_OK; +}; //// End of h264_active_par_set + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////// +// Parse slice header info +////////////////////////////////////////////////// +h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status retStatus = H264_STATUS_ERROR; + + //////////////////////////////////////////////////// + //// Parse slice header info + //// Part1: not depend on the active PPS/SPS + //// Part2/3: depend on the active parset + ////////////////////////////////////////////////// + + //retStatus = h264_Parse_Slice_Header_1(pInfo); + + SliceHeader->sh_error = 0; + + if(h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) + { + ////////////////////////////////////////// + //// Active parameter set for this slice + ////////////////////////////////////////// + retStatus = h264_active_par_set(pInfo, SliceHeader); + } + + if(retStatus == H264_STATUS_OK) { + switch(pInfo->active_SPS.profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + pInfo->active_PPS.transform_8x8_mode_flag=0; + pInfo->active_PPS.pic_scaling_matrix_present_flag =0; + pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; + + default: + break; + } + + if( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 2; + } + else if( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 4; + } + + } else { + SliceHeader->sh_error |= 1; + } + + + //if(SliceHeader->sh_error) { + //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + //} + + + + ////////////////////////////////// + //// Parse slice data (MB loop) + ////////////////////////////////// + //retStatus = h264_Parse_Slice_Data(pInfo); + { + //uint32_t data = 0; + //if( viddec_pm_peek_bits(parent, &data, 32) == -1) + //retStatus = H264_STATUS_ERROR; + } + //h264_Parse_rbsp_trailing_bits(pInfo); + + return retStatus; +} + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc) +{ + h264_Status ret = H264_STATUS_ERROR; + + //h264_NAL_Unit_t* NAL = &pInfo->NAL; + uint32_t code; +#if 0 + viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24); + viddec_pm_get_bits(parent, &code, 1); //forbidden_zero_bit + + viddec_pm_get_bits(parent, &code, 2); + SliceHeader->nal_ref_idc = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 5); + pInfo->nal_unit_type = (uint8_t)code; +#else +#ifdef VBP + if( viddec_pm_get_bits(parent, &code, 8) != -1) +#else + //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type + if( viddec_pm_get_bits(parent, &code, 32) != -1) +#endif + { + *nal_ref_idc = (uint8_t)((code>>5)&0x3); + pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f); + ret = H264_STATUS_OK; + } +#endif + + return ret; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +/*! + ************************************************************************ + * \brief + * set defaults for old_slice + * NAL unit of a picture" + ************************************************************************ + */ +#ifndef INT_MAX +#define INT_MAX 0xFFFFFFFF +#endif + +#ifndef UINT_MAX +#define UINT_MAX 0x7FFFFFFF +#endif + +void h264_init_old_slice(h264_Info* pInfo) +{ + pInfo->SliceHeader.field_pic_flag = 0; + + pInfo->SliceHeader.pic_parameter_id = 0xFF; + + pInfo->SliceHeader.frame_num = INT_MAX; + + pInfo->SliceHeader.nal_ref_idc = 0xFF; + + pInfo->SliceHeader.idr_flag = 0; + + pInfo->SliceHeader.pic_order_cnt_lsb = UINT_MAX; + pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX; + + pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX; + pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX; + + return; +} + + +void h264_init_img(h264_Info* pInfo) +{ + h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) ); + + + return; +} + + +void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem) +{ + int32_t i; + + h264_Info * pInfo = &(parser->info); + + parser->sps_pps_ddr_paddr = (uint32_t)persist_mem; + + pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr; + pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all); + pInfo->OFFSET_REF_FRAME_PADDR_GL = pInfo->PPS_PADDR_GL + MAX_NUM_PPS * sizeof(pic_param_set); + pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + + h264_memset( &(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used) ); + h264_memset( &(pInfo->active_PPS), 0x0, sizeof(pic_param_set) ); + + /* Global for SPS & PPS */ + for(i=0;iactive_SPS.seq_parameter_set_id = 0xff; + h264_Parse_Copy_Sps_To_DDR (pInfo, &(pInfo->active_SPS), i); + } + for(i=0;iactive_PPS.seq_parameter_set_id = 0xff; + h264_Parse_Copy_Pps_To_DDR (pInfo, &(pInfo->active_PPS), i); + } + + pInfo->active_SPS.seq_parameter_set_id = 0xff; + pInfo->sps_valid = 0; + pInfo->got_start = 0; + + return; +} + + +void h264_init_Info_under_sps_pps_level(h264_Info* pInfo) +{ + int32_t i=0; + + h264_memset( &(pInfo->dpb), 0x0, sizeof(h264_DecodedPictureBuffer) ); + h264_memset( &(pInfo->SliceHeader), 0x0, sizeof(h264_Slice_Header_t) ); + h264_memset( &(pInfo->old_slice), 0x0, sizeof(OldSliceParams) ); + h264_memset( &(pInfo->sei_information), 0x0, sizeof(sei_info) ); + h264_memset( &(pInfo->img), 0x0, sizeof(h264_img_par) ); + + pInfo->h264_list_replacement = 0; + + pInfo->h264_pwt_start_byte_offset = 0; + pInfo->h264_pwt_start_bit_offset = 0; + pInfo->h264_pwt_end_byte_offset = 0; + pInfo->h264_pwt_end_bit_offset = 0; + pInfo->h264_pwt_enabled = 0; + + for(i=0;i<32;i++) + { + pInfo->slice_ref_list0[i] = 0; + pInfo->slice_ref_list1[i] = 0; + } + + pInfo->qm_present_list = 0; + + pInfo->nal_unit_type = 0; + pInfo->old_nal_unit_type = 0xff; + + pInfo->push_to_cur = 0; + pInfo->Is_first_frame_in_stream = 1; + pInfo->Is_SPS_updated = 0; + pInfo->number_of_first_au_info_nal_before_first_slice = 0; + + pInfo->is_frame_boundary_detected_by_non_slice_nal = 0; + pInfo->is_frame_boundary_detected_by_slice_nal = 0; + pInfo->is_current_workload_done = 0; + + pInfo->sei_rp_received = 0; + pInfo->last_I_frame_idc = 255; + pInfo->wl_err_curr = 0; + pInfo->wl_err_next = 0; + + pInfo->primary_pic_type_plus_one = 0; + pInfo->sei_b_state_ready = 0; + + /* Init old slice structure */ + h264_init_old_slice(pInfo); + + /* init_dpb */ + h264_init_dpb(&(pInfo->dpb)); + + /* init_sei */ + h264_sei_stream_initialise(pInfo); + +} + +void h264_init_Info(h264_Info* pInfo) +{ + h264_memset(pInfo, 0x0, sizeof(h264_Info)); + + pInfo->old_nal_unit_type = 0xff; + + pInfo->Is_first_frame_in_stream =1; + pInfo->img.frame_count = 0; + pInfo->last_I_frame_idc = 255; + + return; +} + + /* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +///////////////////////////////////////////////////// +// +// Judge whether it is the first VCL of a new picture +// +///////////////////////////////////////////////////// + int32_t h264_is_second_field(h264_Info * pInfo) + { + h264_Slice_Header_t cur_slice = pInfo->SliceHeader; + OldSliceParams old_slice = pInfo->old_slice; + + int result = 0; + + //pInfo->img.second_field = 0; + + /// is it second field? + + //OS_INFO( "xxx is_used = %d\n", pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used); + + if (cur_slice.structure != FRAME) + { + if( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ) + &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )) + { + if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag)) + { + + if(old_slice.structure != cur_slice.structure) + { + + if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1: + (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \ + ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0) || // Condition 2: + (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0))) + { + //pInfo->img.second_field = 1; + result = 1; + } + } + } + + + } + + + } + + + + return result; + + } //// End of h264_is_second_field + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice) +{ + int result = 0; + + if(pInfo->number_of_first_au_info_nal_before_first_slice) + { + pInfo->number_of_first_au_info_nal_before_first_slice = 0; + return 1; + } + + + + result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); + result |= (old_slice.frame_num != cur_slice.frame_num); + result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); + if(cur_slice.field_pic_flag && old_slice.field_pic_flag) + { + result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag); + } + + result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ + ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0)); + result |= ( old_slice.idr_flag != cur_slice.idr_flag); + + if (cur_slice.idr_flag && old_slice.idr_flag) + { + result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id); + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb); + result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom); + } + + if (pInfo->active_SPS.pic_order_cnt_type == 1) + { + result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]); + result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]); + } + + return result; +} + + +int32_t h264_check_previous_frame_end(h264_Info * pInfo) +{ + int result = 0; + + if( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) ) + { + + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->img.current_slice_num = 0; + + if((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) { + pInfo->is_frame_boundary_detected_by_non_slice_nal =1; + pInfo->is_current_workload_done=1; + result=1; + } + break; + } + default: + break; + } + + } + + return result; + +} + + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////// +// 1) Update old slice structure for frame boundary detection +////////////////////////////////////////////////////////////// +void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader) +{ + pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id; + + pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num; + + pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag; + + if(pInfo->SliceHeader.field_pic_flag) + { + pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; + } + + pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc; + + pInfo->old_slice.structure = pInfo->SliceHeader.structure; + + pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag; + if (pInfo->SliceHeader.idr_flag) + { + pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id; + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + pInfo->old_slice.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; + pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; + } + + if (pInfo->active_SPS.pic_order_cnt_type == 1) + { + pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; + pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; + } + + ////////////////////////////// Next to current + memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t)); + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// Initialization for new picture +////////////////////////////////////////////////////////////////////////////// +void h264_update_img_info(h264_Info * pInfo ) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + pInfo->img.frame_num = pInfo->SliceHeader.frame_num; + pInfo->img.structure = pInfo->SliceHeader.structure; + + pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag; + pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; + + pInfo->img.MbaffFrameFlag = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag)); + pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type; + + if(pInfo->img.pic_order_cnt_type == 1) { + pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle; + pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag; + pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic; + pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field; + } + + pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; + //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb; + pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; + pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; + pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; + + + pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num; + + pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag; + + ////////////////////////////////////////////////// Check SEI recovery point + if (pInfo->sei_information.recovery_point) { + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum; + } + + if (pInfo->SliceHeader.idr_flag) + pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num; + + + + /////////////////////////////////////////////////Resolution Change + pInfo->img.curr_has_mmco_5 = 0; + + if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)|| + (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) ) + { + int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0; + + // If resolution changed, reset the soft DPB here + h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics); + } + + return; + +} ///// End of init new frame + + +void h264_update_frame_type(h264_Info * pInfo ) +{ + +//update frame type + if(pInfo->img.structure == FRAME) + { + if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET); + //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff; + //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc; + + } + else + { + #if 1 + switch(pInfo->SliceHeader.slice_type) + { + case h264_PtypeB: + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET); + break; + case h264_PtypeSP: + case h264_PtypeP: + if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET); + break; + case h264_PtypeI: + case h264_PtypeSI: + if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET); + } + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; + + break; + default: + break; + + } + #endif + + } + + } + else if(pInfo->img.structure == TOP_FIELD) + { + if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));; + } + else + { + switch(pInfo->SliceHeader.slice_type) + { + case h264_PtypeB: + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + break; + case h264_PtypeSP: + case h264_PtypeP: + if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + break; + case h264_PtypeI: + case h264_PtypeSI: + if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + } + break; + default: + break; + + } + + } + + + }else if(pInfo->img.structure == BOTTOM_FIELD) + { + if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));; + } + else + { + switch(pInfo->SliceHeader.slice_type) + { + case h264_PtypeB: + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + break; + case h264_PtypeSP: + case h264_PtypeP: + if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + break; + case h264_PtypeI: + case h264_PtypeSI: + if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + } + break; + default: + break; + + } + + } + + } + return; + +} + + +//////#endif ///////////// IFDEF H264_PARSE_C/////////////////// + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c new file mode 100644 index 0000000..c4e00ee --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c @@ -0,0 +1,228 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: h264 bistream decoding +// +///////////////////////////////////////////////////////////////////////*/ + + +#include "h264.h" +#include "h264parse.h" +#include "viddec_parser_ops.h" + + + + + +/** + get_codeNum :Get codenum based on sec 9.1 of H264 spec. + @param cxt : Buffer adress & size are part inputs, the cxt is updated + with codeNum & sign on sucess. + Assumption: codeNum is a max of 32 bits + + @retval 1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code. + @retval 0 : Couldn't find a code in the current buffer. + be freed. +*/ + +uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) +{ + int32_t leadingZeroBits= 0; + uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; + uint32_t codeNum =0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + uint8_t is_first_byte = 1; + uint32_t length =0; + uint32_t bits_need_add_in_first_byte =0; + int32_t bits_operation_result=0; + + //remove warning + pInfo = pInfo; + + ////// Step 1: parse through zero bits until we find a bit with value 1. + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + + while(!match) + { + if ((bits_offset != 0) && ( is_first_byte == 1)) + { + //we handle byte at a time, if we have offset then for first + // byte handle only 8 - offset bits + noOfBits = (uint8_t)(8 - bits_offset); + bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits); + + + temp = (temp << bits_offset); + if(temp!=0) + { + bits_need_add_in_first_byte = bits_offset; + } + is_first_byte =0; + } + else + { + noOfBits = 8;/* always 8 bits as we read a byte at a time */ + bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); + + } + + if(-1==bits_operation_result) + { + return MAX_INT32_VALUE; + } + + if(temp != 0) + { + // if byte!=0 we have at least one bit with value 1. + count=1; + while(((temp & 0x80) != 0x80) && (count <= noOfBits)) + { + count++; + temp = temp <<1; + } + //At this point we get the bit position of 1 in current byte(count). + + match = 1; + leadingZeroBits += count; + } + else + { + // we don't have a 1 in current byte + leadingZeroBits += noOfBits; + } + + if(!match) + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, noOfBits); + } + else + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, count); + } + + } + ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value. + + + if(match) + { + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + /* bit position in current byte */ + //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7); + count = ((count + bits_need_add_in_first_byte)& 0x7); + + leadingZeroBits --; + length = leadingZeroBits; + codeNum = 0; + noOfBits = 8 - count; + + + while(leadingZeroBits > 0) + { + if(noOfBits < (uint32_t)leadingZeroBits) + { + viddec_pm_get_bits(parent, &temp, noOfBits); + + + codeNum = (codeNum << noOfBits) | temp; + leadingZeroBits -= noOfBits; + } + else + { + viddec_pm_get_bits(parent, &temp, leadingZeroBits); + + codeNum = (codeNum << leadingZeroBits) | temp; + leadingZeroBits = 0; + } + + + noOfBits = 8; + } + // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits). + codeNum = codeNum + (1 << length) -1; + + } + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + if(bits_offset!=0) + { + viddec_pm_peek_bits(parent, &temp, 8-bits_offset); + } + + return codeNum; +} + + +/*---------------------------------------*/ +/*---------------------------------------*/ +int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) +{ + int32_t sval = 0; + signed char sign; + + sval = h264_get_codeNum(parent , pInfo); + + if(bIsSigned) //get signed integer golomb code else the value is unsigned + { + sign = (sval & 0x1)?1:-1; + sval = (sval +1) >> 1; + sval = sval * sign; + } + + return sval; +} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned) + +/// +/// Check whether more RBSP data left in current NAL +/// +uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) +{ + uint8_t cnt = 0; + + uint8_t is_emul =0; + uint8_t cur_byte = 0; + int32_t shift_bits =0; + uint32_t ctr_bit = 0; + uint32_t bits_offset =0, byte_offset =0; + + //remove warning + pInfo = pInfo; + + if (!viddec_pm_is_nomoredata(parent)) + return 1; + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + shift_bits = 7-bits_offset; + + // read one byte + viddec_pm_get_cur_byte(parent, &cur_byte); + + ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; + + // a stop bit has to be one + if (ctr_bit==0) + return 1; + + while (shift_bits>=0 && !cnt) + { + cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit + } + + return (cnt); +} + + + +///////////// EOF///////////////////// + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c new file mode 100644 index 0000000..d1b693b --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -0,0 +1,4171 @@ + +/*! + *********************************************************************** + * \file: h264_dpb_ctl.c + * + *********************************************************************** + */ + +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" + +#include "viddec_fw_workload.h" +#include "viddec_pm.h" +#include "viddec_h264_parse.h" + + + +//#include +#include "h264parse.h" +#include "h264parse_dpb.h" +//#include "h264_debug.h" + +#ifndef NULL +#define NULL 0 +#endif +//#ifndef USER_MODE +//#define NULL 0 +//#endif + +////////////////////////// Declare Globals/////////////////////////////// +frame_store *active_fs; + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +///////////////////////// DPB init ////////////////////////////////////////// +////////////////////////////////////////////////////////////////////////////// +// Init DPB +// Description: init dpb, which should be called while open +// +////////////////////////////////////////////////////////////////////////////// + +void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb) +{ + int32_t i; + + //// Init DPB to zero + //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) ); + + + for(i=0;ifs[i].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + p_dpb->used_size = 0; + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + + return; +} + + +///////////////////////// Reference list management ////////////////////////// + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_add_ref_list () +// +// Adds an idc to the long term reference list +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) +{ + p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc; + p_dpb->ref_frames_in_buffer++; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_add_ltref_list () +// +// Adds an idc to the long term reference list +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) +{ + p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc; + p_dpb->ltref_frames_in_buffer++; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_update_all_ref_lists (h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting) +// +// Decide whether the current picture needs to be added to the reference lists +// active_fs should be set-up prior to calling this function +// +// Check if we need to search the lists here +// or can we go straight to adding to ref lists.. +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExisting) +{ + if(NonExisting) + h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc); + else + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + //if(active_fs->is_reference) + if(active_fs->frame.used_for_reference) + { + if(viddec_h264_get_is_long_term(active_fs)) + { + if(viddec_h264_get_dec_structure(active_fs) == FRAME) + h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + else + { + uint32_t found_in_list = 0, i = 0; + for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) { + if(p_dpb->fs_ltref_idc[i] == active_fs->fs_idc) found_in_list = 1; + } + + if(found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + } + } + else + { + if(viddec_h264_get_dec_structure(active_fs) == FRAME) { + h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); + } else + { + uint32_t found_in_list = 0, i = 0; + + for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++) + { + if(p_dpb->fs_ref_idc[i] == active_fs->fs_idc) found_in_list = 1; + } + + if(found_in_list == 0) h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); + } + } + } + + return; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// Set active fs +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index) +{ + active_fs = &p_dpb->fs[index]; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// Sort reference list +////////////////////////////////////////////////////////////////////////////// + +void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t desc) +{ + int32_t j, k, temp, idc; + + // Dodgy looking for embedded code here... + if(size > 1) + { + for (j = 0; j < size-1; j = j + 1) { + for (k = j + 1; k < size; k = k + 1) { + if ((desc & (sort_indices[j] < sort_indices[k]))| + (~desc & (sort_indices[j] > sort_indices[k])) ) + { + temp = sort_indices[k]; + sort_indices[k] = sort_indices[j]; + sort_indices[j] = temp; + idc = list[k]; + list[k] = list[j]; + list[j] = idc; + } + } + } + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_pic_is_bottom_field_ref () +// +// Used to sort a list based on a corresponding sort indices +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_pic_is_bottom_field_ref(int32_t long_term) +{ + int32_t temp; + if(long_term) temp = ((active_fs->bottom_field.used_for_reference) && (active_fs->bottom_field.is_long_term)) ? 1 : 0; + else temp = ((active_fs->bottom_field.used_for_reference) && !(active_fs->bottom_field.is_long_term)) ? 1 : 0; + + return temp; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_pic_is_top_field_ref () +// +// Used to sort a list based on a corresponding sort indices +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_pic_is_top_field_ref(int32_t long_term) +{ + int32_t temp; + if(long_term) + temp = ((active_fs->top_field.used_for_reference) && (active_fs->top_field.is_long_term)) ? 1 : 0; + else + temp = ((active_fs->top_field.used_for_reference) && !(active_fs->top_field.is_long_term)) ? 1 : 0; + + return temp; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_gen_pic_list_from_frame_list () +// +// Used to sort a list based on a corresponding sort indices +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, uint8_t *pic_list, uint8_t *frame_list, int32_t currPicStructure, int32_t list_size, int32_t long_term) +{ + int32_t top_idx, bot_idx, got_pic, list_idx; + int32_t lterm; + + list_idx = 0; + lterm = (long_term)? 1:0; + + if(list_size){ + + + top_idx = 0; + bot_idx = 0; + + if (currPicStructure == TOP_FIELD) { + while ((top_idx < list_size)||(bot_idx < list_size)) + { + /////////////////////////////////////////// ref Top Field + got_pic = 0; + while ((top_idx < list_size) & ~got_pic) + { + h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x1) + { + if(h264_dpb_pic_is_top_field_ref(long_term)) + { + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field + list_idx++; + got_pic = 1; + } + } + top_idx++; + } + + /////////////////////////////////////////// ref Bottom Field + got_pic = 0; + while ((bot_idx < list_size) & ~got_pic) + { + h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x2) + { + if(h264_dpb_pic_is_bottom_field_ref(long_term)) + { + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field + list_idx++; + got_pic = 1; + } + } + bot_idx++; + } + } + } + + /////////////////////////////////////////////// current Bottom Field + if (currPicStructure == BOTTOM_FIELD) { + while ((top_idx < list_size)||(bot_idx < list_size)) + { + /////////////////////////////////////////// ref Top Field + got_pic = 0; + while ((bot_idx < list_size) && (!(got_pic))) + { + h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x2) { + if(h264_dpb_pic_is_bottom_field_ref(long_term)) { + // short term ref pic + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field + list_idx++; + got_pic = 1; + } + } + bot_idx++; + } + + /////////////////////////////////////////// ref Bottom Field + got_pic = 0; + while ((top_idx < list_size) && (!(got_pic))) + { + h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x1) { + if(h264_dpb_pic_is_top_field_ref(long_term)){ + // short term ref pic + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field + list_idx++; + got_pic = 1; + } + } + top_idx++; + } + } + } + } + + return list_idx; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_ref_list () +// +// Removes an idc from the refernce list and updates list after +// + +void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) +{ + uint8_t idx = 0; + int32_t Found = 0; + + while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found))) + { + if (p_dpb->fs_ref_idc[idx] == ref_idc) + Found = 1; + else + idx++; + } + + if (Found) + { + // Move the remainder of the list up one + while(idx < p_dpb->ref_frames_in_buffer - 1) { + p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1]; + idx ++; + } + + p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one + p_dpb->ref_frames_in_buffer--; + } + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_ltref_list () +// +// Removes an idc from the long term reference list and updates list after +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_idc) +{ + uint8_t idx = 0; + int32_t Found = 0; + + while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found))) + { + if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1; + else idx++; + } + + if (Found) + { + // Move the remainder of the list up one + while(idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1)) + { + p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1]; + idx ++; + } + p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one + + p_dpb->ltref_frames_in_buffer--; + } + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_init_lists () +// +// Used to initialise the reference lists +// Also assigns picture numbers and long term picture numbers if P OR B slice +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_update_ref_lists(h264_Info * pInfo) +{ + h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb; + + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + uint8_t list0idx, list0idx_1, listltidx; + uint8_t idx; + + uint8_t add_top, add_bottom, diff; + uint8_t list_idc; + uint8_t check_non_existing, skip_picture; + + + uint8_t gen_pic_fs_list0[16]; + uint8_t gen_pic_fs_list1[16]; + uint8_t gen_pic_fs_listlt[16]; + uint8_t gen_pic_pic_list[32]; // check out these sizes... + + uint8_t sort_fs_idc[16]; + int32_t list_sort_number[16]; + +#ifdef DUMP_HEADER_INFO + static int cc1 = 0; + //OS_INFO("-------------cc1= %d\n",cc1); /////// DEBUG info + if(cc1 == 255) + idx = 0; +#endif + + list0idx = list0idx_1 = listltidx = 0; + + if (pInfo->SliceHeader.structure == FRAME) + { + ////////////////////////////////////////////////// short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if((viddec_h264_get_is_used(active_fs) == 3)&&(active_fs->frame.used_for_reference == 3)) + { + if (active_fs->frame_num > pInfo->img.frame_num) + active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; + else + active_fs->frame_num_wrap = active_fs->frame_num; + + active_fs->frame.pic_num = active_fs->frame_num_wrap; + + // Use this opportunity to sort list for a p-frame + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.pic_num; + list0idx++; + } + } + } + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) + p_dpb->listX_0[idx] = (sort_fs_idc[idx]); // frame + + p_dpb->listXsize[0] = list0idx; + } + + ////////////////////////////////////////////////// long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3) && (active_fs->frame.used_for_reference == 3)) + { + active_fs->frame.long_term_pic_num = active_fs->frame.long_term_frame_idx; + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[list0idx-p_dpb->listXsize[0]] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[list0idx-p_dpb->listXsize[0]] = active_fs->frame.long_term_pic_num; + list0idx++; + } + } + } + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0); + for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + } + p_dpb->listXsize[0] = list0idx; + } + } + else /// Field base + { + if (pInfo->SliceHeader.structure == TOP_FIELD) + { + add_top = 1; + add_bottom = 0; + } + else + { + add_top = 0; + add_bottom = 1; + } + + ////////////////////////////////////////////P0: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (active_fs->frame.used_for_reference) + { + if(active_fs->frame_num > pInfo->SliceHeader.frame_num) { + active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; + } else { + active_fs->frame_num_wrap = active_fs->frame_num; + } + + if ((active_fs->frame.used_for_reference)&0x1) { + active_fs->top_field.pic_num = (active_fs->frame_num_wrap << 1) + add_top; + } + + if ((active_fs->frame.used_for_reference)&0x2) { + active_fs->bottom_field.pic_num = (active_fs->frame_num_wrap << 1) + add_bottom; + } + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame_num_wrap; + list0idx++; + } + } + } + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) { + gen_pic_fs_list0[idx] = sort_fs_idc[idx]; + } + + p_dpb->listXsize[0] = 0; + p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[0]; idx++) + { + p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; + } + } + + ////////////////////////////////////////////P0: long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (viddec_h264_get_is_long_term(active_fs)&0x1) { + active_fs->top_field.long_term_pic_num = (active_fs->top_field.long_term_frame_idx << 1) + add_top; + } + + if (viddec_h264_get_is_long_term(active_fs)&0x2) { + active_fs->bottom_field.long_term_pic_num = (active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom; + } + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[listltidx] = active_fs->long_term_frame_idx; + listltidx++; + } + } + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); + for (idx = 0; idx < listltidx; idx++) { + gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; + } + list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); + + for (idx = 0; idx < list0idx_1; idx++) { + p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; + } + p_dpb->listXsize[0] += list0idx_1; + } + } + + + if (pInfo->SliceHeader.slice_type == h264_PtypeI) + { + p_dpb->listXsize[0] = 0; + p_dpb->listXsize[1] = 0; + return; + } + + if(pInfo->SliceHeader.slice_type == h264_PtypeP) + { + //// Forward done above + p_dpb->listXsize[1] = 0; + } + + + // B-Slice + // Do not include non-existing frames for B-pictures when cnt_type is zero + + if(pInfo->SliceHeader.slice_type == h264_PtypeB) + { + list0idx = list0idx_1 = listltidx = 0; + skip_picture = 0; + + if(pInfo->active_SPS.pic_order_cnt_type == 0) + check_non_existing = 1; + else + check_non_existing = 0; + + if (pInfo->SliceHeader.structure == FRAME) + { + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (viddec_h264_get_is_used(active_fs) == 3) + { + if(check_non_existing) + { + if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; + else skip_picture = 0; + } + + if(skip_picture == 0) + { + if ((active_fs->frame.used_for_reference==3) && (!(active_fs->frame.is_long_term))) + { + if (pInfo->img.framepoc >= active_fs->frame.poc) + { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.poc; + list0idx++; + } + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = sort_fs_idc[idx]; + } + + list0idx_1 = list0idx; + + /////////////////////////////////////////B0: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (viddec_h264_get_is_used(active_fs) == 3) + { + if(check_non_existing) + { + if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; + else skip_picture = 0; + } + + if(skip_picture == 0) + { + if ((active_fs->frame.used_for_reference) && (!(active_fs->frame.is_long_term))) + { + if (pInfo->img.framepoc < active_fs->frame.poc) + { + sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; + list0idx++; + } + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); + for (idx = list0idx_1; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1]; + } + + for (idx = 0; idx < list0idx_1; idx++) { + p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx]; + } + + for (idx = list0idx_1; idx < list0idx; idx++) { + p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx]; + } + + p_dpb->listXsize[0] = list0idx; + p_dpb->listXsize[1] = list0idx; + + /////////////////////////////////////////B0: long term handling + list0idx = 0; + + // Can non-existent pics be set as long term?? + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3)) + { + // if we have two fields, both must be long-term + sort_fs_idc[list0idx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.long_term_pic_num; + list0idx++; + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0); + for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1) + { + p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + } + + p_dpb->listXsize[0] += list0idx; + p_dpb->listXsize[1] += list0idx; + } + else // Field + { + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (viddec_h264_get_is_used(active_fs)) { + if(check_non_existing) { + if(viddec_h264_get_is_non_existent(active_fs)) + skip_picture = 1; + else + skip_picture = 0; + } + + if(skip_picture == 0) { + if (pInfo->img.ThisPOC >= active_fs->frame.poc) { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.poc; + list0idx++; + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx = idx + 1) { + gen_pic_fs_list0[idx] = sort_fs_idc[idx]; + } + + list0idx_1 = list0idx; + + ///////////////////////////////////////////// B1: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (viddec_h264_get_is_used(active_fs)) + { + if(check_non_existing) { + if(viddec_h264_get_is_non_existent(active_fs)) + skip_picture = 1; + else + skip_picture = 0; + } + + if(skip_picture == 0) { + if (pInfo->img.ThisPOC < active_fs->frame.poc) { + sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; + list0idx++; + } + } + } + } + + ///// Generate frame list from sorted fs + ///// + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); + for (idx = list0idx_1; idx < list0idx; idx++) + gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1]; + + for (idx = 0; idx < list0idx_1; idx++) + gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx]; + + for (idx = list0idx_1; idx < list0idx; idx++) + gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx]; + + ///// Generate List_X0 + ///// + p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[0]; idx++) + p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; + + //// Generate List X1 + //// + p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[1]; idx++) + p_dpb->listX_1[idx] = gen_pic_pic_list[idx]; + + ///////////////////////////////////////////// B1: long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[listltidx] = active_fs->long_term_frame_idx; + listltidx++; + } + + h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); + for (idx = 0; idx < listltidx; idx++) + gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; + + list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); + + for (idx = 0; idx < list0idx_1; idx++) + { + p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; + p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx]; + } + + p_dpb->listXsize[0] += list0idx_1; + p_dpb->listXsize[1] += list0idx_1; + } + } + + // Setup initial list sizes at this point + p_dpb->nInitListSize[0] = p_dpb->listXsize[0]; + p_dpb->nInitListSize[1] = p_dpb->listXsize[1]; + if(pInfo->SliceHeader.slice_type != h264_PtypeI) + { + if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1)) + { + // check if lists are identical, if yes swap first two elements of listX[1] + diff = 0; + for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1) + { + if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1; + } + + + if (!(diff)) + { + list_idc = p_dpb->listX_1[0]; + p_dpb->listX_1[0] = p_dpb->listX_1[1]; + p_dpb->listX_1[1] = list_idc; + } + } + + // set max size + if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active) + { + p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active; + } + + + if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active) + { + p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active; + } + + + + } + + + + /// DPB reorder list + h264_dpb_reorder_lists(pInfo); + + return; +} //// End of init_dpb_list + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_short_term_pic () +// +// Sets active_fs to point to frame store containing picture with given picNum +// Sets field_flag, bottom_field and err_flag based on the picture and whether +// it is available or not... +// +static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic_num, int32_t *bottom_field_bit) +{ + register uint32_t idx; + register frame_param_ptr temp_fs; + + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + *bottom_field_bit = 0; + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]]; + if (pInfo->SliceHeader.structure == FRAME) + { + if(temp_fs->frame.used_for_reference == 3) + if (!(temp_fs->frame.is_long_term)) + if (temp_fs->frame.pic_num == pic_num) return temp_fs; + } + else // current picture is a field + { + if (temp_fs->frame.used_for_reference&0x1) + if (!(temp_fs->top_field.is_long_term)) + if (temp_fs->top_field.pic_num == pic_num) + { + return temp_fs; + } + + if (temp_fs->frame.used_for_reference&0x2) + if (!(temp_fs->bottom_field.is_long_term)) + if (temp_fs->bottom_field.pic_num == pic_num) + { + *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); + return temp_fs; + } + } + } + return NULL; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_long_term_pic () +// +// Sets active_fs to point to frame store containing picture with given picNum +// + +static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long_term_pic_num, int32_t *bottom_field_bit) +{ + register uint32_t idx; + register frame_param_ptr temp_fs; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + *bottom_field_bit = 0; + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]]; + if (pInfo->SliceHeader.structure == FRAME) + { + if (temp_fs->frame.used_for_reference == 3) + if (temp_fs->frame.is_long_term) + if (temp_fs->frame.long_term_pic_num == long_term_pic_num) + return temp_fs; + } + else + { + if (temp_fs->frame.used_for_reference&0x1) + if (temp_fs->top_field.is_long_term) + if (temp_fs->top_field.long_term_pic_num == long_term_pic_num) + return temp_fs; + + if (temp_fs->frame.used_for_reference&0x2) + if (temp_fs->bottom_field.is_long_term) + if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num) + { + *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); + return temp_fs; + } + } + } + return NULL; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_reorder_ref_pic_list () +// +// Used to sort a list based on a corresponding sort indices +// + +struct list_value_t +{ + int32_t value; + struct list_value_t *next; +}; + +struct linked_list_t +{ + struct list_value_t *begin; + struct list_value_t *end; + struct list_value_t *entry; + struct list_value_t *prev_entry; + struct list_value_t list[32]; +}; + +static void linked_list_initialize (struct linked_list_t *lp, uint8_t *vp, int32_t size) +{ + struct list_value_t *lvp; + + lvp = lp->list; + lp->begin = lvp; + lp->entry = lvp; + lp->end = lvp + (size-1); + lp->prev_entry = NULL; + + while (lvp <= lp->end) + { + lvp->value = *(vp++); + lvp->next = lvp + 1; + lvp++; + } + lp->end->next = NULL; + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value) +{ + register struct list_value_t *lvp = lp->entry; + register struct list_value_t *lvp_prev; + + if (lvp == NULL) { + lp->end->value = list_value; // replace the end entry + } else if ((lp->begin==lp->end)||(lvp==lp->end)) // replece the begin/end entry and set the entry to NULL + { + lp->entry->value = list_value; + lp->prev_entry = lp->entry; + lp->entry = NULL; + } + else if (lvp->value==list_value) // the entry point matches + { + lp->prev_entry = lvp; + lp->entry = lvp->next; + } + else if (lvp->next == lp->end) // the entry is just before the end + { + // replace the end and swap the end and entry points + // lvp + // prev_entry => entry => old_end + // old_end & new_prev_entry => new_end & entry + lp->end->value = list_value; + + if (lp->prev_entry) + lp->prev_entry->next = lp->end; + else + lp->begin = lp->end; + + lp->prev_entry = lp->end; + lp->end->next = lvp; + lp->end = lvp; + lvp->next = NULL; + } + else + { + lvp_prev = NULL; + while (lvp->next) // do not check the end but we'll be in the loop at least once + { + if (lvp->value == list_value) break; + lvp_prev = lvp; + lvp = lvp->next; + } + lvp->value = list_value; // force end matches + + // remove lvp from the list + lvp_prev->next = lvp->next; + if (lvp==lp->end) lp->end = lvp_prev; + + // insert lvp in front of lp->entry + if (lp->entry==lp->begin) + { + lvp->next = lp->begin; + lp->begin = lvp; + } + else + { + lvp->next = lp->entry; + lp->prev_entry->next = lvp; + } + lp->prev_entry = lvp; + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +static void linked_list_output (struct linked_list_t *lp, int32_t *vp) +{ + register int32_t *ip1; + register struct list_value_t *lvp; + + lvp = lp->begin; + ip1 = vp; + while (lvp) + { + *(ip1++) = lvp->value; + lvp = lvp->next; + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +int32_t h264_dpb_reorder_ref_pic_list(h264_Info * pInfo,int32_t list_num, int32_t num_ref_idx_active) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint8_t *remapping_of_pic_nums_idc; + list_reordering_num_t *list_reordering_num; + int32_t bottom_field_bit; + + int32_t maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num; + int32_t refIdxLX; + int32_t i; + + int32_t PicList[32] = {0}; + struct linked_list_t ll; + struct linked_list_t *lp = ≪ // should consider use the scratch space + + // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu + register frame_param_ptr temp_fs; + register int32_t temp; + register uint8_t *ip1; + + maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + + if (list_num == 0) // i.e list 0 + { + ip1 = p_dpb->listX_0; + remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc; + list_reordering_num = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num; + } + else + { + ip1 = p_dpb->listX_1; + remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc; + list_reordering_num = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num; + } + + + linked_list_initialize (lp, ip1, num_ref_idx_active); + + currPicNum = pInfo->SliceHeader.frame_num; + if (pInfo->SliceHeader.structure != FRAME) + { + + /* The reason it is + 1 I think, is because the list is based on polarity + expand later... + */ + maxPicNum <<= 1; + currPicNum <<= 1; + currPicNum++; + } + + picNumLXPred = currPicNum; + refIdxLX = 0; + + for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++) + { + if(i > MAX_NUM_REF_FRAMES) + { + break; + } + + if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering + { + temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1); + if (remapping_of_pic_nums_idc[i] == 0) + { + temp = picNumLXPred - temp; + if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum; + else picNumLXNoWrap = temp; + } + else // (remapping_of_pic_nums_idc[i] == 1) + { + temp += picNumLXPred; + if (temp >= maxPicNum) picNumLXNoWrap = temp - maxPicNum; + else picNumLXNoWrap = temp; + } + + // Updates for next iteration of the loop + picNumLXPred = picNumLXNoWrap; + + if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum; + else pic_num = picNumLXNoWrap; + + temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit); + if (temp_fs) + { + temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); + linked_list_reorder (lp, temp); + } + } + else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering + { + pic_num = list_reordering_num[i].long_term_pic_num; + + temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit); + if (temp_fs) + { + temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); + linked_list_reorder (lp, temp); + } + } + } + + linked_list_output (lp, PicList); + + if(0 == list_num ) + { + for(i=0; islice_ref_list0[i]=(uint8_t)PicList[i]; + } + } + else + { + for(i=0; islice_ref_list1[i]=(uint8_t)PicList[i]; + } + } + + + // Instead of updating the now reordered list here, just write it down... + // This way, we can continue to hold the initialised list in p_dpb->listX_0 + // and therefore not need to update it every slice + + //h264_dpb_write_list(list_num, PicList, num_ref_idx_active); + + return num_ref_idx_active; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + + +void h264_dpb_RP_check_list (h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint8_t *p_list = pInfo->slice_ref_list0; + + // + // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away! + // + + if((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + } + + + // + // Repare Ref list if it damaged with RP recovery only + // + if((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received) + { + + int32_t idx, rp_found = 0; + + if(pInfo->SliceHeader.num_ref_idx_l0_active == 1) + { + if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list0; + } + else + { + p_list = pInfo->dpb.listX_0; + //pInfo->sei_rp_received = 0; + //return; + } + + + for(idx = 0; idx < p_dpb->used_size; idx++) { + if(p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) { + rp_found = 1; + break; + } + } + if(rp_found) { +#if 0 + int32_t poc; + + ///// Clear long-term ref list + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); + } + + ///// Clear short-term ref list + //while(p_dpb->used_size>1) + for(idx = 0; idx < p_dpb->used_size; idx++) + { + int32_t idx_pos; + //// find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos); + + //// Remove all frames in previous GOP + if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc)) + { + // Remove from ref-list + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); + + // Output from DPB + //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0)) + { + //int32_t existing; + //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing); + //p_dpb->last_output_poc = poc; + } + //h264_dpb_remove_frame_from_dpb(p_dpb, idx); // Remove dpb.fs_dpb_idc[pos] + + } + } +#endif + ///// Set the reference to last I frame + if( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0])) + { + /// Repaire the reference list now + h264_dpb_unmark_for_reference(p_dpb, p_list[0]); + h264_dpb_remove_ref_list(p_dpb, p_list[0]); + p_list[0] = pInfo->last_I_frame_idc; + } + + } + } + + pInfo->sei_rp_received = 0; + pInfo->sei_b_state_ready = 1; + + } + + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_reorder_lists () +// +// Used to sort a list based on a corresponding sort indices +// + +void h264_dpb_reorder_lists(h264_Info * pInfo) +{ + int32_t currSliceType = pInfo->SliceHeader.slice_type; + + if (currSliceType == h264_PtypeP ) + { + /////////////////////////////////////////////// Reordering reference list for P slice + /// Forward reordering + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); + else + { + + } + pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; + } else if (currSliceType == h264_PtypeB) + { + /////////////////////////////////////////////// Reordering reference list for B slice + /// Forward reordering + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); + else + { + + } + pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; + + /// Backward reordering + if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active); + else + { + + } + pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active; + } + + //// Check if need recover reference list with previous recovery point + h264_dpb_RP_check_list(pInfo); + + + return; +} + +////////////////////////////////////////// DPB management ////////////////////// + +////////////////////////////////////////////////////////////////////////////// +// avc_dpb_get_non_output_frame_number () +// +// get total non output frame number in the DPB. +// +static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo) +{ + int32_t idx; + int32_t number=0; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if (viddec_h264_get_is_output(active_fs) == 0) + { + (number)++; + } + } + + return number; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +//// Store previous picture in DPB, and then update DPB queue, remove unused frames from DPB + +void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExisting, int32_t use_old) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t used_for_reference; + int32_t is_direct_output; + int32_t second_field_stored = 0; + int32_t poc; + int32_t pos; + int32_t flag; + int32_t first_field_non_ref = 0; + int32_t idr_flag; + + if(NonExisting) { + if(p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC) + return; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + } else { + if(p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC) + return; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + } + + if(NonExisting == 0) + { + //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1; + pInfo->img.last_has_mmco_5 = 0; + pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag; + + //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag); + used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0); + + switch (viddec_h264_get_dec_structure(active_fs)) + { + case(TOP_FIELD) : { + active_fs->top_field.used_for_reference = used_for_reference; + viddec_h264_set_is_top_used(active_fs, 1); + //active_fs->crc_field_coded = 1; + }break; + case(BOTTOM_FIELD): { + active_fs->bottom_field.used_for_reference = used_for_reference << 1; + viddec_h264_set_is_bottom_used(active_fs, 1); + //active_fs->crc_field_coded = 1; + }break; + default: { + active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(active_fs, 3); + //if(pInfo->img.MbaffFrameFlag) active_fs->crc_field_coded = 1; + + }break; + } + + //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image; + //if (freeze_assert) sei_information.disp_frozen = 1; + + idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag; + if (idr_flag) { + h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag); + } else { + // adaptive memory management + if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) { + h264_dpb_adaptive_memory_management(pInfo); + } + } + // Reset the active frame store - could have changed in mem management ftns + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if ((viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD)) + { + // check for frame store with same pic_number -- always true in my case, YH + // when we allocate frame store for the second field, we make sure the frame store for the second + // field is the one that contains the first field of the frame- see h264_dpb_init_frame_store() + // This is different from JM model. + // In this way we don't need to move image data around and can reduce memory bandwidth. + // simply check if the check if the other field has been decoded or not + + if (viddec_h264_get_is_used(active_fs) != 0) + { + if(pInfo->img.second_field) + { + h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 0, NonExisting, use_old); + second_field_stored = 1; + } + } + } + } + else + { // Set up locals for non-existing frames + used_for_reference = 1; + + active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(active_fs, 3); + viddec_h264_set_dec_structure(active_fs, FRAME); + pInfo->img.structure = FRAME; + } + + is_direct_output = 0; + if (NonExisting == 0) + { + if(p_dpb->used_size >= p_dpb->BumpLevel) + { + // non-reference frames may be output directly + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if ((used_for_reference == 0) && (viddec_h264_get_is_used(active_fs) == 3)) + { + h264_dpb_get_smallest_poc (p_dpb, &poc, &pos); + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + if ((pos == MPD_DPB_FS_NULL_IDC) || (pInfo->img.ThisPOC < poc)) + { + is_direct_output = 1; + } + } + } + } + + if (NonExisting) { + h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); + } else if(pInfo->SliceHeader.idr_flag == 0) { + if(used_for_reference){ + if(pInfo->img.second_field == 0) { + if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) { + h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); + } + } + } + } + + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + //if (is_direct_output == 0) + { + if ((pInfo->img.second_field == 0) || (NonExisting)) + { + h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 1, NonExisting, use_old); + } + + // In an errored stream we saw a condition where + // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel, + // which in itself is an error, but this means first_field_non_ref will + // not get set and causes problems for h264_dpb_queue_update() + if((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) { + if(used_for_reference == 0) + if(p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel) + first_field_non_ref = 1; + } + + } + + if(NonExisting) + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + else + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if(NonExisting == 0) + { + if((pInfo->img.second_field == 1) || (pInfo->img.structure == FRAME)) + { + //h264_send_new_decoded_frame(); + if((p_dpb->OutputCtrl) && (is_direct_output == 0)) + h264_dpb_output_one_frame_from_dpb(pInfo, 0, 0,pInfo->active_SPS.num_ref_frames); + + // Pictures inserted by this point - check if we have reached the specified output + // level (if one has been specified) so we can begin on next call + + /* + Fixed HSD 212625---------------should compare OutputLevel with non-output frame number in dpb, not the used number in dpb + if((p_dpb->OutputLevelValid)&&(p_dpb->OutputCtrl == 0)) + { + if(p_dpb->used_size == p_dpb->OutputLevel) + p_dpb->OutputCtrl = 1; + } + */ + + if(p_dpb->OutputLevelValid) + { + int32_t non_output_frame_number=0; + non_output_frame_number = avc_dpb_get_non_output_frame_number(pInfo); + + if(non_output_frame_number == p_dpb->OutputLevel) + p_dpb->OutputCtrl = 1; + else + p_dpb->OutputCtrl = 0; + } + else { + p_dpb->OutputCtrl = 0; + } + } + } + + while(p_dpb->used_size > (p_dpb->BumpLevel + first_field_non_ref)) + //while(p_dpb->used_size > p_dpb->BumpLevel) + { + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + // + // Do not output "direct output" pictures until the sempahore has been set that the pic is + // decoded!! + // + if(is_direct_output) { + h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames); + //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + // + // Add reference pictures into Reference list + // + if(used_for_reference) { + h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting); + } + + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + + return; +} ////////////// End of DPB store pic + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_insert_picture_in_dpb () +// +// Insert the decoded picture into the DPB. A free DPB position is necessary +// for frames, . +// This ftn tends to fill out the framestore's top level parameters from the +// storable picture's parameters within it. It is called from h264_dpb_store_picture_in_dpb() +// +// This function finishes by updating the reference lists - this means it must be called after +// h264_dpb_sliding_window_memory_management() +// +// In the case of a frame it will call h264_dpb_split_field() +// In the case of the second field of a complementary field pair it calls h264_dpb_combine_field() +// + +void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference, int32_t add2dpb, int32_t NonExisting, int32_t use_old) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + if(NonExisting == 0) { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num; + } + else { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + active_fs->frame_num = active_fs->frame.pic_num; + } + + if (add2dpb) { + p_dpb->fs_dpb_idc[p_dpb->used_size] = active_fs->fs_idc; + p_dpb->used_size++; + } + + + switch (viddec_h264_get_dec_structure(active_fs)) + { + case FRAME :{ + viddec_h264_set_is_frame_used(active_fs, 3); + active_fs->frame.used_for_reference = used_for_reference?3:0; + if (used_for_reference) + { + active_fs->frame.used_for_reference = 3; + if (active_fs->frame.is_long_term) + viddec_h264_set_is_frame_long_term(active_fs, 3); + } + // Split frame to 2 fields for prediction + h264_dpb_split_field(pInfo); + + }break; + case TOP_FIELD :{ + viddec_h264_set_is_top_used(active_fs, 1); + + active_fs->top_field.used_for_reference = used_for_reference; + if (used_for_reference) + { + active_fs->frame.used_for_reference |= 0x1; + if (active_fs->top_field.is_long_term) + { + viddec_h264_set_is_top_long_term(active_fs, 1); + active_fs->long_term_frame_idx = active_fs->top_field.long_term_frame_idx; + } + } + if (viddec_h264_get_is_used(active_fs) == 3) { + h264_dpb_combine_field(use_old); // generate frame view + } + else + { + active_fs->frame.poc = active_fs->top_field.poc; + } + + }break; + case BOTTOM_FIELD :{ + viddec_h264_set_is_bottom_used(active_fs, 1); + + active_fs->bottom_field.used_for_reference = (used_for_reference<<1); + if (used_for_reference) + { + active_fs->frame.used_for_reference |= 0x2; + if (active_fs->bottom_field.is_long_term) + { + viddec_h264_set_is_bottom_long_term(active_fs, 1); + active_fs->long_term_frame_idx = active_fs->bottom_field.long_term_frame_idx; + } + } + if (viddec_h264_get_is_used(active_fs) == 3) { + h264_dpb_combine_field(use_old); // generate frame view + } + else + { + active_fs->frame.poc = active_fs->bottom_field.poc; + } + + }break; + } +/* + if ( gRestartMode.LastRestartType == RESTART_SEI ) + { + if ( active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1; + } + + gRestartMode.LastRestartType = 0xFFFF; +*/ + + return; +} ////// End of insert picture in DPB + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_unmark_short_term_for_reference () +// +// Adaptive Memory Management: Mark short term picture unused +// + +void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1) +{ + int32_t picNumX; + int32_t currPicNum; + uint32_t idx; + int32_t unmark_done; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + if (pInfo->img.structure == FRAME) + currPicNum = pInfo->img.frame_num; + else + currPicNum = (pInfo->img.frame_num << 1) + 1; + + picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); + + unmark_done = 0; + + for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (pInfo->img.structure == FRAME) + { + /* If all pic numbers in the list are different (and they should be) + we should terminate the for loop the moment we match pic numbers, + no need to continue to check - hence set unmark_done + */ + + if ((active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(active_fs) == 0) && + (active_fs->frame.pic_num == picNumX)) + { + h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + unmark_done = 1; + } + } + else + { + /* + If we wish to unmark a short-term picture by picture number when the current picture + is a field, we have to unmark the corresponding field as unused for reference, + and also if it was part of a frame or complementary reference field pair, the + frame is to be marked as unused. However the opposite field may still be used as a + reference for future fields + + How will this affect the reference list update ftn coming after?? + + */ + if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& + (active_fs->top_field.pic_num == picNumX) ) + { + active_fs->top_field.used_for_reference = 0; + active_fs->frame.used_for_reference &= 2; + + unmark_done = 1; + + //Check if other field is used for short-term reference, if not remove from list... + if(active_fs->bottom_field.used_for_reference == 0) + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && + (active_fs->bottom_field.pic_num == picNumX) ) + { + active_fs->bottom_field.used_for_reference = 0; + active_fs->frame.used_for_reference &= 1; + + unmark_done = 1; + + //Check if other field is used for reference, if not remove from list... + if(active_fs->top_field.used_for_reference == 0) + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + } + } + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +//////////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_unmark_long_term_for_reference () +// +// Adaptive Memory Management: Mark long term picture unused +// +// In a frame situation the long_term_pic_num will refer to another frame. +// Thus we can call h264_dpb_unmark_for_long_term_reference() and then remove the picture +// from the list +// +// If the current picture is a field, long_term_pic_num will refer to another field +// It is also the case that each individual field should have a unique picture number +// 8.2.5.4.2 suggests that when curr pic is a field, an mmco == 2 operation +// should be accompanied by a second op to unmark the other field as being unused +/////////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long_term_pic_num) +{ + uint32_t idx; + int32_t unmark_done; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + unmark_done = 0; + for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (!(unmark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (pInfo->img.structure == FRAME) + { + if ((active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(active_fs)==3) && + (active_fs->frame.long_term_pic_num == long_term_pic_num)) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + unmark_done = 1; + } + } + else + { + /// Check top field + if ((active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(active_fs)&0x1) && + (active_fs->top_field.long_term_pic_num == long_term_pic_num) ) + { + active_fs->top_field.used_for_reference = 0; + active_fs->top_field.is_long_term = 0; + active_fs->frame.used_for_reference &= 2; + viddec_h264_set_is_frame_long_term(active_fs, 2); + + unmark_done = 1; + + //Check if other field is used for long term reference, if not remove from list... + if ((active_fs->bottom_field.used_for_reference == 0) || (active_fs->bottom_field.is_long_term == 0)) + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + + /// Check Bottom field + if ((active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(active_fs)&0x2) && + (active_fs->bottom_field.long_term_pic_num == long_term_pic_num) ) + { + active_fs->bottom_field.used_for_reference = 0; + active_fs->bottom_field.is_long_term = 0; + active_fs->frame.used_for_reference &= 1; + viddec_h264_set_is_frame_long_term(active_fs, 1); + + unmark_done = 1; + //Check if other field is used for long term reference, if not remove from list... + if ((active_fs->top_field.used_for_reference == 0) || (active_fs->top_field.is_long_term == 0)) + { + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + } + } // field structure + } //for(idx) + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_pic_struct_by_pic_num +// +// Searches the fields appearing in short term reference list +// Returns the polarity of the field with pic_num = picNumX +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int32_t picNumX) +{ + uint32_t idx; + int32_t pic_struct = INVALID; + int32_t found = 0; + + for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& + (active_fs->top_field.pic_num == picNumX) ) + { + found = 1; + pic_struct = TOP_FIELD; + + } + if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && + (active_fs->bottom_field.pic_num == picNumX) ) + { + found = 1; + pic_struct = BOTTOM_FIELD; + + } + } + + return pic_struct; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_assign_long_term_frame_idx () +// +// Assign a long term frame index to a short term picture +// Both lists must be updated as part of this process... +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1, int32_t long_term_frame_idx) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t picNumX; + int32_t currPicNum; + int32_t polarity = 0; + + if (pInfo->img.structure == FRAME) { + currPicNum = pInfo->img.frame_num; + } else { + currPicNum = (pInfo->img.frame_num << 1) + 1; + } + + picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); + + // remove frames / fields with same long_term_frame_idx + if (pInfo->img.structure == FRAME) { + h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); + } else { + polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); + + if(polarity != INVALID) + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, active_fs->fs_idc, polarity); + } + + h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX); + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_update_max_long_term_frame_idx () +// +// Set new max long_term_frame_idx +// + +void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb,int32_t max_long_term_frame_idx_plus1) +{ + //h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t idx; + int32_t temp; + int32_t removed_count; + int32_t idx2 = 0; + + p_dpb->max_long_term_pic_idx = max_long_term_frame_idx_plus1 - 1; + + temp = p_dpb->ltref_frames_in_buffer; + removed_count = 0; + + // check for invalid frames + for (idx = 0; idx < temp; idx++) + { + idx2 = idx - removed_count; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]); + + if (active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) + { + removed_count++; + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]); + } + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_unmark_all_short_term_for_reference () +// +// Unmark all short term refernce pictures +// + +void h264_dpb_mm_unmark_all_short_term_for_reference (h264_DecodedPictureBuffer *p_dpb) +{ + int32_t idx; + int32_t temp = p_dpb->ref_frames_in_buffer; + + for (idx = 0; idx < temp; idx++) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_mark_current_picture_long_term () +// +// Marks the current picture as long term after unmarking any long term picture +// already assigned with the same long term frame index +// + +void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx) +{ + int32_t picNumX; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (viddec_h264_get_dec_structure(active_fs) == FRAME) + { + h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + active_fs->frame.is_long_term = 1; + active_fs->frame.long_term_frame_idx = long_term_frame_idx; + active_fs->frame.long_term_pic_num = long_term_frame_idx; + } + else + { + if(viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) + { + picNumX = (active_fs->top_field.pic_num << 1) + 1; + active_fs->top_field.is_long_term = 1; + active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + + // Assign long-term pic num + active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + } + else + { + picNumX = (active_fs->bottom_field.pic_num << 1) + 1; + active_fs->bottom_field.is_long_term = 1; + active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + + // Assign long-term pic num + active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + + } + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(active_fs)); + } + // Add to long term list + //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc); + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx () +// +// Mark a long-term reference frame or complementary field pair unused for referemce +// NOTE: Obviously this ftn cannot be used to unmark individual fields... +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx) +{ + uint32_t idx; + for(idx =0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (active_fs->long_term_frame_idx == long_term_frame_idx) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + } + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_long_term_field_for_reference_by_frame_idx () +// +// Mark a long-term reference field unused for reference. However if it is the +// complementary field (opposite polarity) of the picture stored in fs_idc, +// we do not unmark it +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity) +{ + uint32_t idx; + int32_t found = 0; + int32_t is_complement = 0; + + for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + if (active_fs->long_term_frame_idx == long_term_frame_idx) + { + if(active_fs->fs_idc == fs_idc) + { + // Again these seem like redundant checks but for safety while until JM is updated + if (polarity == TOP_FIELD) + is_complement = (active_fs->bottom_field.is_long_term)? 1:0; + else if(polarity == BOTTOM_FIELD) + is_complement = (active_fs->top_field.is_long_term) ? 1:0; + } + found = 1; + } + } + + if(found) { + if(is_complement == 0) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx-1]); + } + } + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mark_pic_long_term () +// +// This is used on a picture already in the dpb - i.e. not for the current picture +// dpb_split / dpb_combine field will perform ftnality in that case +// +// Marks a picture as used for long-term reference. Adds it to the long-term +// reference list. Also removes it from the short term reference list if required +// +// Note: if the current picture is a frame, the picture to be marked will be a +// short-term reference frame or short-term complemenetary reference field pair +// We use the pic_num assigned to the frame part of the structure to locate it +// Both its fields will have their long_term_frame_idx and long_term_pic_num +// assigned to be equal to long_term_frame_idx +// +// If the current picture is a field, the picture to be marked will be a +// short-term reference field. We use the pic_nums assigned to the field parts of +// the structure to identify the appropriate field. We assign the long_term_frame_idx +// of the field equal to long_term_frame_idx. +// +// We also check to see if this marking has resulted in both fields of the frame +// becoming long_term. If it has, we update the frame part of the structure by +// setting its long_term_frame_idx +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint32_t idx; + int32_t mark_done; + int32_t polarity = 0; + + mark_done = 0; + + if (pInfo->img.structure == FRAME) + { + for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(mark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (active_fs->frame.used_for_reference == 3) + { + if ((!(active_fs->frame.is_long_term))&&(active_fs->frame.pic_num == picNumX)) + { + active_fs->long_term_frame_idx = long_term_frame_idx; + active_fs->frame.long_term_frame_idx = long_term_frame_idx; + active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + + active_fs->frame.is_long_term = 1; + active_fs->top_field.is_long_term = 1; + active_fs->bottom_field.is_long_term = 1; + + viddec_h264_set_is_frame_long_term(active_fs, 3); + mark_done = 1; + + // Assign long-term pic num + active_fs->frame.long_term_pic_num = long_term_frame_idx; + active_fs->top_field.long_term_pic_num = long_term_frame_idx; + active_fs->bottom_field.long_term_pic_num = long_term_frame_idx; + // Add to long term list + h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + // Remove from short-term list + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + } + } + } + else + { + polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); + active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG + + if(polarity == TOP_FIELD) + { + active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + active_fs->top_field.is_long_term = 1; + viddec_h264_set_is_top_long_term(active_fs, 1); + + // Assign long-term pic num + active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0); + + } + else if (polarity == BOTTOM_FIELD) + { + active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + active_fs->bottom_field.is_long_term = 1; + viddec_h264_set_is_bottom_long_term(active_fs, 1); + + // Assign long-term pic num + active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0); + } + + if (viddec_h264_get_is_long_term(active_fs) == 3) + { + active_fs->frame.is_long_term = 1; + active_fs->frame.long_term_frame_idx = long_term_frame_idx; + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + } + else + { + // We need to add this idc to the long term ref list... + h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + + // If the opposite field is not a short term reference, remove it from the + // short term list. Since we know top field is a reference but both are not long term + // we can simply check that both fields are not references... + if(active_fs->frame.used_for_reference != 3) + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + } + } + return; +} ///// End of mark pic long term + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_adaptive_memory_management () +// +// Perform Adaptive memory control decoded reference picture marking process +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_adaptive_memory_management (h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t idx; + + idx = 0; + + while (idx < pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count) + { + switch(pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx]) + { + case 1:{ //Mark a short-term reference picture as �unused for reference? + h264_dpb_mm_unmark_short_term_for_reference(pInfo, + pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]); + } break; + case 2:{ //Mark a long-term reference picture as �unused for reference? + h264_dpb_mm_unmark_long_term_for_reference(pInfo, + pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]); + }break; + case 3:{ //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it + h264_dpb_mm_assign_long_term_frame_idx(pInfo, + pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx], + pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); + }break; + case 4:{ //Specify the maximum long-term frame index and + //mark all long-term reference pictureshaving long-term frame indices greater than + //the maximum value as "unused for reference" + h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb, + pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]); + }break; + case 5:{ //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to + // "no long-term frame indices" + h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb); + h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0); + pInfo->img.last_has_mmco_5 = 1; + }break; + case 6:{ //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it + h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb, + pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); + }break; + } + idx++; + } + + + if (pInfo->img.last_has_mmco_5) + { + pInfo->img.frame_num = 0; + pInfo->SliceHeader.frame_num=0; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (viddec_h264_get_dec_structure(active_fs) == FRAME) + { + pInfo->img.bottompoc -= active_fs->frame.poc; + pInfo->img.toppoc -= active_fs->frame.poc; + + + active_fs->frame.poc = 0; + active_fs->frame.pic_num = 0; + active_fs->frame_num = 0; + } + + else if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) + { + active_fs->top_field.poc = active_fs->top_field.pic_num = 0; + pInfo->img.toppoc = active_fs->top_field.poc; + } + else if (viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD) + { + active_fs->bottom_field.poc = active_fs->bottom_field.pic_num = 0; + pInfo->img.bottompoc = 0; + } + + h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field,pInfo->active_SPS.num_ref_frames); + } + // Reset the marking count operations for the current picture... + pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count = 0; + + return; +} ////// End of adaptive memory management + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_gaps_in_frame_num_mem_management () +// +// Produces a set of frame_nums pertaining to "non-existing" pictures +// Calls h264_dpb_store_picture_in_dpb +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) +{ + int32_t temp_frame_num = 0; + int32_t idx, prev_idc; + int32_t prev_frame_num_plus1_wrap; + uint32_t temp; + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + seq_param_set_used_ptr active_sps = &pInfo->active_SPS; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + pInfo->img.gaps_in_frame_num = 0; + + // pInfo->img.last_has_mmco_5 set thru store_picture_in_dpb + if (pInfo->img.last_has_mmco_5) + { + // If the previous picture was an unpaired field, mark it as a dangler + if(p_dpb->used_size) + { + idx = p_dpb->used_size-1; + prev_idc = p_dpb->fs_dpb_idc[idx]; + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + active_fs->frame_num =0; + } + } + pInfo->img.PreviousFrameNumOffset = 0; + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum = 0; + + } + + // Check for gaps in frame_num + if(pInfo->SliceHeader.idr_flag) { + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + } + // Have we re-started following a recovery point message? +/* + else if(got_sei_recovery || aud_got_restart){ + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + //got_sei_recovery = 0; + //aud_got_restart = 0; + } +*/ + else if(pInfo->img.frame_num != pInfo->img.PreviousFrameNum) + { + if (MaxFrameNum) + ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); + + prev_frame_num_plus1_wrap = temp; + if(pInfo->img.frame_num != prev_frame_num_plus1_wrap) + { + pInfo->img.gaps_in_frame_num = (pInfo->img.frame_num < pInfo->img.PreviousFrameNum)? ((MaxFrameNum + pInfo->img.frame_num -1) - pInfo->img.PreviousFrameNum): (pInfo->img.frame_num - pInfo->img.PreviousFrameNum - 1); + // We should test for an error here - should infer an unintentional loss of pictures + } + } + + + //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) { + if(pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) { + // infer an unintentional loss of pictures + // only invoke following process for a conforming bitstream + // when gaps_in_frame_num_value_allowed_flag is equal to 1 + pInfo->img.gaps_in_frame_num = 0; + + //mfd_printf("ERROR STREAM??\n"); + ////// Error handling here---- + } + + /////// Removed following OLO source (Sodaville H.D) + //else if (pInfo->img.gaps_in_frame_num > active_sps->num_ref_frames) { + // // No need to produce any more non-existent frames than the amount required to flush the dpb + // pInfo->img.gaps_in_frame_num = active_sps->num_ref_frames; + //mfd_printf("gaps in frame: %d\n", gaps_in_frame_num); + //} + + // If the previous picture was an unpaired field, mark it as a dangler + if(p_dpb->used_size) + { + idx = p_dpb->used_size-1; + prev_idc = p_dpb->fs_dpb_idc[idx]; + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + if(viddec_h264_get_is_used(active_fs) != 3) { + h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + } + } + + while(temp_frame_num < pInfo->img.gaps_in_frame_num) + { + h264_dpb_assign_frame_store(pInfo, 1); + + // Set up initial markings - not sure if all are needed + viddec_h264_set_dec_structure(active_fs, FRAME); + + if(MaxFrameNum) + ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); + + active_fs->frame.pic_num = temp; + active_fs->long_term_frame_idx = 0; + active_fs->frame.long_term_pic_num = 0; + viddec_h264_set_is_frame_long_term(active_fs, 0); + + // Note the call below will overwrite some aspects of the img structure with info relating to the + // non-existent picture + // However, since this is called before h264_hdr_decoding_poc() for the current existing picture + // it should be o.k. + if(pInfo->img.pic_order_cnt_type) + h264_hdr_decoding_poc(pInfo, 1, temp); + + pInfo->img.structure = FRAME; + active_fs->frame.poc = pInfo->img.framepoc; + + // call store_picture_in_dpb + + h264_dpb_store_previous_picture_in_dpb(pInfo, 1, 0); + + h264_hdr_post_poc(pInfo, 1, temp, 0); + + temp_frame_num++; + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_for_reference () +// +// Mark FrameStore unused for reference. Removes it from the short term reference list +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) +{ + h264_dpb_set_active_fs(p_dpb, fs_idc); + + if (viddec_h264_get_is_used(active_fs)&0x1) active_fs->top_field.used_for_reference = 0; + if (viddec_h264_get_is_used(active_fs)&0x2) active_fs->bottom_field.used_for_reference = 0; + if (viddec_h264_get_is_used(active_fs) == 3) active_fs->frame.used_for_reference = 0; + + active_fs->frame.used_for_reference = 0; + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_for_long_term_reference () +// +// mark FrameStore unused for reference and reset long term flags +// This function does not remove it form the long term list +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) +{ + h264_dpb_set_active_fs(p_dpb, fs_idc); + + if (viddec_h264_get_is_used(active_fs)&0x1) + { + active_fs->top_field.used_for_reference = 0; + active_fs->top_field.is_long_term = 0; + } + + if (viddec_h264_get_is_used(active_fs)&0x2) + { + active_fs->bottom_field.used_for_reference = 0; + active_fs->bottom_field.is_long_term = 0; + } + if (viddec_h264_get_is_used(active_fs) == 3) + { + active_fs->frame.used_for_reference = 0; + active_fs->frame.is_long_term = 0; + } + + active_fs->frame.used_for_reference = 0; + viddec_h264_set_is_frame_long_term(active_fs, 0); + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mark_dangling_field +// +// Tells HW previous field was dangling +// Marks it in SW as so +// Takes appropriate actions. - sys_data needs thought through... +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) +{ + + h264_dpb_set_active_fs(p_dpb, fs_idc); + + //PRINTF(MFD_NONE, " fs_idc = %d DANGLING_TYPE = %d \n", fs_idc, reason); + /* + Make the check that it has not already been marked + This covers the situation of a dangling field followed by a + frame which is direct output (i.e. never entered into the dpb). + In this case we could attempt to mark the prev unpaired field + as a dangler twice which would upset the HW dpb_disp_q count + */ + + if(viddec_h264_get_is_dangling(active_fs) == 0) + { + switch(viddec_h264_get_dec_structure(active_fs)) + { + case TOP_FIELD: + viddec_h264_set_is_dangling(active_fs, 1); + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), active_fs->fs_idc); + break; + case BOTTOM_FIELD: + //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), active_fs->fs_idc); + viddec_h264_set_is_dangling(active_fs, 1); + break; + default: + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), active_fs->fs_idc); + break; + } + + //h264_send_new_decoded_frame(); + } + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_is_used_for_reference () +// +// Check if one of the frames/fields in active_fs is used for reference +// +void h264_dpb_is_used_for_reference(int32_t * flag) +{ + + /* Check out below for embedded */ + *flag = 0; + if (active_fs->frame.used_for_reference) + *flag = 1; + else if (viddec_h264_get_is_used(active_fs) ==3) // frame + *flag = active_fs->frame.used_for_reference; + else + { + if (viddec_h264_get_is_used(active_fs)&0x1) // top field + *flag = active_fs->top_field.used_for_reference; + if (viddec_h264_get_is_used(active_fs)&0x2) // bottom field + *flag = *flag || active_fs->bottom_field.used_for_reference; + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_idr_memory_management () +// +// Perform Memory management for idr pictures +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr active_sps, int32_t no_output_of_prior_pics_flag) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint32_t idx; + uint32_t i; + int32_t DPB_size; + int32_t FrameSizeInBytes, FrameSizeInMbs; + uint32_t data; + int32_t num_ref_frames = active_sps->num_ref_frames; + int32_t level_idc = active_sps->level_idc; + uint32_t temp_bump_level=0; + + + /// H.D----- + /// There are 2 kinds of dpb flush defined, one is with display, the other is without display + /// The function name dpb_flush actually is just the first, and the 2nd one is for error case or no_prior_output + /// We will rewrite the code below to make it clean and clear + /// + if (no_output_of_prior_pics_flag) + { + + // free all stored pictures + for (idx = 0; idx < p_dpb->used_size; idx = idx + 1) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",active_fs->fs_idc, active_fs->first_dsn); + viddec_h264_set_is_frame_used(active_fs, 0); + //if( (active_fs->frame_sent == 0x01) && (active_fs->is_output == 0x0)) + { + //DECODED_FRAME sent but not DISPLAY_FRAME + h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host + + /// Add into drop-out list for all frms in dpb without display + if(!(viddec_h264_get_is_non_existent(active_fs))) { + if( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released + p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx]; + p_dpb->frame_numbers_need_to_be_removed ++; + } else { //// This frame will be removed without display + p_dpb->frame_id_need_to_be_dropped[p_dpb->frame_numbers_need_to_be_dropped] = p_dpb->fs_dpb_idc[idx]; + p_dpb->frame_numbers_need_to_be_dropped ++; + } + } + } + + } + + ////////////////////////////////////////// Reset Reference list + for (i = 0; i < p_dpb->ref_frames_in_buffer; i++) + p_dpb->fs_ref_idc[i] = MPD_DPB_FS_NULL_IDC; + + for (i = 0; i < p_dpb->ltref_frames_in_buffer; i++) + p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC; + + ////////////////////////////////////////// Reset DPB and dpb list + for (i = 0; i < p_dpb->used_size; i++) { + p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + + p_dpb->used_size = 0; + p_dpb->ref_frames_in_buffer = 0; + p_dpb->ltref_frames_in_buffer = 0; + + p_dpb->last_output_poc = 0x80000000; + } + else { + h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, num_ref_frames); + } + + if (p_dpb->fs_dec_idc != MPD_DPB_FS_NULL_IDC) // added condition for use of DPB initialization + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + if (pInfo->img.long_term_reference_flag) + { + p_dpb->max_long_term_pic_idx = 0; + switch (viddec_h264_get_dec_structure(active_fs)) + { + case FRAME : active_fs->frame.is_long_term = 1; + case TOP_FIELD : active_fs->top_field.is_long_term = 1; + case BOTTOM_FIELD : active_fs->bottom_field.is_long_term = 1; + } + active_fs->long_term_frame_idx = 0; + } + else + { + p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC; + viddec_h264_set_is_frame_long_term(active_fs, 0); + } + } + + p_dpb->OutputLevel = 0; + p_dpb->OutputLevelValid = 0; + p_dpb->OutputCtrl = 0; + + + // Set up bumping level - do this every time a parameters set is activated... + if(active_sps->sps_disp.vui_parameters_present_flag) + { + if(active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) + { + //p_dpb->OutputLevel = active_sps->sps_disp.vui_seq_parameters.num_reorder_frames; + //p_dpb->OutputLevelValid = 1; + } + } + + // Set up bumping level - do this every time a parameters set is activated... + switch(level_idc) + { + case h264_Level1b: + case h264_Level1: + { + if ((active_sps->profile_idc < 100) && ((active_sps->constraint_set_flags & 0x1) == 0)) { + DPB_size = 338; + } + else { + DPB_size = 149; + } + + break; + } + case h264_Level11: + { + DPB_size = 338; + break; + } + case h264_Level12: + case h264_Level13: + case h264_Level2: + { + DPB_size = 891; + break; + } + case h264_Level21: + { + DPB_size = 1782; + break; + } + case h264_Level22: + case h264_Level3: + { + DPB_size = 3038; + break; + } + case h264_Level31: + { + DPB_size = 6750; + break; + } + case h264_Level32: + { + DPB_size = 7680; + break; + } + case h264_Level4: + case h264_Level41: + { + DPB_size = 12288; + break; + } + case h264_Level42: + { + DPB_size = 13056; + break; + } + case h264_Level5: + { + DPB_size = 41400; + break; + } + case h264_Level51: + { + DPB_size = 69120; + break; + } + default : DPB_size = 69120; break; + } + + FrameSizeInMbs = pInfo->img.PicWidthInMbs * pInfo->img.FrameHeightInMbs; + FrameSizeInBytes = (FrameSizeInMbs << 8) + (FrameSizeInMbs << 7); + + if(FrameSizeInBytes) + { + + temp_bump_level = ldiv_mod_u((DPB_size << 10), FrameSizeInBytes, &data); + + if(temp_bump_level > 255) + { + p_dpb->BumpLevel = 255; + } + else + { + p_dpb->BumpLevel = (uint8_t)temp_bump_level; + } + } + + if (p_dpb->BumpLevel == 0) + p_dpb->BumpLevel = active_sps->num_ref_frames + 1; + + if (p_dpb->BumpLevel > 16) + p_dpb->BumpLevel = 16; + + + if(active_sps->sps_disp.vui_parameters_present_flag && active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) { + + if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) { + //MFD_PARSER_DEBUG(ERROR_H264_DPB); + //// err handling here + } + else { + p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ? + (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering) : 1; + } + } + + + // A new sequence means automatic frame release + //sei_information.disp_frozen = 0; + + return; +} //// End --- dpb_idr_memory_management + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_frame_from_dpb () +// +// remove one frame from DPB +// The parameter index, is the location of the frame to be removed in the +// fs_dpb_idc list. The used size is decremented by one +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx) +{ + int32_t fs_idc; + uint32_t i; + + fs_idc = p_dpb->fs_dpb_idc[idx]; + + h264_dpb_set_active_fs(p_dpb, fs_idc); + viddec_h264_set_is_frame_used(active_fs, 0); + + //add to support frame relocation interface to host + if(!(viddec_h264_get_is_non_existent(active_fs))) + { + p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc; + p_dpb->frame_numbers_need_to_be_removed ++; + } + + ///////////////////////////////////////// Reset FS + p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC; + + /////Remove unused frame from dpb-list + i = idx; + while( (i + 1)< p_dpb->used_size) + { + p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1]; + i ++; + } + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + + //////////////////////////// + p_dpb->used_size--; + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_unused_frame_from_dpb () +// +// Remove a picture from DPB which is no longer needed. +// Search for a frame which is not used for reference and has previously been placed +// in the output queue - if find one call h264_dpb_remove_frame_from_dpb() and +// set flag 1 +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag) +{ + uint32_t idx; + int32_t first_non_exist_valid, non_exist_idx; + int32_t used_for_reference = 0; + + *flag = 0; + first_non_exist_valid = 0x0; + non_exist_idx = 0x0; + + for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_is_used_for_reference(&used_for_reference); + + //if( (used_for_reference == 0x0 ) && active_fs->is_output && active_fs->is_non_existent == 0x0) + //{ + //PRINTF(MFD_NONE, " requesting to send FREE: fs_idc = %d fb_id = %d \n", active_fs->fs_idc, active_fs->fb_id); + //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1); + //} + + if (viddec_h264_get_is_output(active_fs) && (used_for_reference == 0)) + { + h264_dpb_remove_frame_from_dpb(p_dpb, idx); + *flag = 1; + } +/* +/////// Removed following OLO source (Sodaville H.D) + else if ( (first_non_exist_valid == 0x0) && active_fs->is_non_existent ) + { + first_non_exist_valid = 0x01; + non_exist_idx = idx; + } +*/ + } +/* +/////// Removed following OLO source (Sodaville H.D) + if ( *flag == 0x0 && first_non_exist_valid) { + h264_dpb_remove_frame_from_dpb(p_dpb,non_exist_idx); + *flag = 1; + } +*/ + return; +} //// End of h264_dpb_remove_unused_frame_from_dpb + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_smallest_poc () +// +// find smallest POC in the DPB which has not as yet been output +// This function only checks for frames and dangling fields... +// unless the dpb used size is one, in which case it will accept an unpaired field +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos) +{ + int32_t poc_int; + uint32_t idx; + int32_t first_non_output = 1; + + *pos = MPD_DPB_FS_NULL_IDC; + + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]); + poc_int = active_fs->frame.poc; + + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if (viddec_h264_get_is_output(active_fs) == 0) + { + //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc); + if ((viddec_h264_get_is_used(active_fs) == 3) || (viddec_h264_get_is_dangling(active_fs))) + { + if (first_non_output) + { + *pos = idx; + first_non_output = 0; + poc_int = active_fs->frame.poc; + } + else if (poc_int > active_fs->frame.poc) + { + poc_int = active_fs->frame.poc; + *pos = idx; + } + } + else if (p_dpb->used_size == 1) + { + poc_int = active_fs->frame.poc; + *pos = idx; + } + } + } + + *poc = poc_int; + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_split_field () +// +// Extract field information from a frame +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_split_field (h264_Info * pInfo) +{ + + //active_fs->frame.poc = active_fs->frame.poc; + // active_fs->top_field.poc = active_fs->frame.poc; + // This line changed on 11/05/05 KMc + active_fs->top_field.poc = pInfo->img.toppoc; + active_fs->bottom_field.poc = pInfo->img.bottompoc; + + active_fs->top_field.used_for_reference = active_fs->frame.used_for_reference & 1; + active_fs->bottom_field.used_for_reference = active_fs->frame.used_for_reference >> 1; + + active_fs->top_field.is_long_term = active_fs->frame.is_long_term; + active_fs->bottom_field.is_long_term = active_fs->frame.is_long_term; + + active_fs->long_term_frame_idx = active_fs->frame.long_term_frame_idx; + active_fs->top_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; + active_fs->bottom_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; + + + // Assign field mvs attached to MB-Frame buffer to the proper buffer + //! Generate field MVs from Frame MVs + // ... + // these will be done in RTL through using proper memory mapping + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_combine_field (int32_t use_old) +// +// Generate a frame from top and bottom fields +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_combine_field(int32_t use_old) +{ + + //remove warning + use_old = use_old; + + active_fs->frame.poc = (active_fs->top_field.poc < active_fs->bottom_field.poc)? + active_fs->top_field.poc: active_fs->bottom_field.poc; + + //active_fs->frame.poc = active_fs->poc; + + + active_fs->frame.used_for_reference = active_fs->top_field.used_for_reference |(active_fs->bottom_field.used_for_reference); + + active_fs->frame.is_long_term = active_fs->top_field.is_long_term |(active_fs->bottom_field.is_long_term <<1); + + if (active_fs->frame.is_long_term) + active_fs->frame.long_term_frame_idx = active_fs->long_term_frame_idx; + + return; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_sliding_window_memory_management () +// +// Perform Sliding window decoded reference picture marking process +// It must be the reference frame, complementary reference field pair +// or non-paired reference field that has the smallest value of +// FrameNumWrap which is marked as unused for reference. Note : We CANNOT +// simply use frame_num!!!! +// +// Although we hold frame_num_wrap in SW, currently, this is not +// being updated for every picture (the b-picture parameter non-update +// phenomenon of the reference software) +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, int32_t NonExisting, int32_t num_ref_frames) +{ + // if this is a reference pic with sliding window, unmark first ref frame + // should this be (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer) + // Rem: adaptive marking can be on a slice by slice basis so we + // could have pictures merked as long term reference in adaptive marking and then + // the marking mode changed back to sliding_window_memory_management + if (p_dpb->ref_frames_in_buffer >= (num_ref_frames - p_dpb->ltref_frames_in_buffer)) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + + if(NonExisting == 0) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + viddec_h264_set_is_frame_long_term(active_fs, 0); + } + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_store_picture_in_dpb () +// +// First we run the marking procedure. +// Then, before we add the current frame_store to the list of refernce stores we run some checks +// These include checking the number of existing reference frames +// in DPB and if necessary, flushing frames. +// +// \param NonExisting +// If non-zero this is called to store a non-existing frame resulting from gaps_in_frame_num +////////////////////////////////////////////////////////////////////////////// + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_frame_output () +// +// If direct == 1, Directly output a frame without storing it in the p_dpb-> +// Therefore we must set is_used to 0, which I guess means it will not appear +// in the fs_dpb_idc list and is_output to 1 which means it should be in the +// fs_output_idc list. +// +// If it is a non-existing pcture we do not actually place it in the output queue +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + h264_dpb_set_active_fs(p_dpb, fs_idc); + + //h264_dpb_push_output_queue(); + if(pInfo->sei_information.disp_frozen) + { + // check pocs + if(active_fs->top_field.poc >= pInfo->sei_information.freeze_POC) + { + if(active_fs->top_field.poc < pInfo->sei_information.release_POC) + { + viddec_h264_set_is_top_skipped(active_fs, 1); + } + else + { + pInfo->sei_information.disp_frozen = 0; + } + } + + if(active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC) + { + if(active_fs->bottom_field.poc < pInfo->sei_information.release_POC) + { + viddec_h264_set_is_bottom_skipped(active_fs, 1); + } + else + { + pInfo->sei_information.disp_frozen = 0; + } + } + } + + if ( viddec_h264_get_broken_link_picture(active_fs) ) + pInfo->sei_information.broken_link = 1; + + if( pInfo->sei_information.broken_link) + { + // Check if this was the recovery point picture - going to have recovery point on + // a frame basis + if(viddec_h264_get_recovery_pt_picture(active_fs)) + { + pInfo->sei_information.broken_link = 0; + // Also reset wait on sei recovery point picture + p_dpb->WaitSeiRecovery = 0; + } + else + { + viddec_h264_set_is_frame_skipped(active_fs, 3); + } + } + else + { + // even if this is not a broken - link, we need to follow SEI recovery point rules + // Did we use SEI recovery point for th elast restart? + if ( p_dpb->WaitSeiRecovery ) + { + if ( viddec_h264_get_recovery_pt_picture(active_fs) ) { + p_dpb->WaitSeiRecovery = 0; + } else { + viddec_h264_set_is_frame_skipped(active_fs, 3); + } + } + } + + if ( p_dpb->SuspendOutput ) + { + if ( viddec_h264_get_open_gop_entry(active_fs) ) { + p_dpb->SuspendOutput = 0; + } else{ + viddec_h264_set_is_frame_skipped(active_fs, 3); + } + } + + //h264_send_new_display_frame(0x0); + viddec_h264_set_is_output(active_fs, 1); + + if(viddec_h264_get_is_non_existent(active_fs) == 0) + { + *existing = 1; + p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=active_fs->fs_idc; + p_dpb->frame_numbers_need_to_be_displayed++; + + //if(direct) + //h264_dpb_remove_frame_from_dpb(p_dpb, active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos] + } + else + { + *existing = 0; + } + + if(direct) { + viddec_h264_set_is_frame_used(active_fs, 0); + active_fs->frame.used_for_reference = 0; + active_fs->top_field.used_for_reference = 0; + active_fs->bottom_field.used_for_reference = 0; + active_fs->fs_idc = MPD_DPB_FS_NULL_IDC; + } + return; +} ///////// End of dpb frame output + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_output_one_frame_from_dpb () +// +// Output one frame stored in the DPB. Basiclly this results in its placment +// in the fs_output_idc list. +// Placement in the output queue should cause an automatic removal from the dpb +// if the frame store is not being used as a reference +// This may need another param for a frame request so that it definitely outputs one non-exiosting frame +////////////////////////////////////////////////////////////////////////////// +int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int32_t request, int32_t num_ref_frames) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t poc; + int32_t pos; + int32_t used_for_reference; + + int32_t existing = 0; + int32_t is_refused = 0; + int32_t is_pushed = 0; + + //remove warning + request = request; + + if(direct) + { + h264_dpb_frame_output(pInfo, p_dpb->fs_dec_idc, 1, &existing); + } + else + { + if(p_dpb->used_size != 0) + { + // Should this be dpb.not_as_yet_output_num > 0 ?? + // There should maybe be a is_refused == 0 condition instead... + while ((p_dpb->used_size > 0) && (existing == 0) && (is_refused == 0)) + { + // find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); + if (pos != MPD_DPB_FS_NULL_IDC) + { + // put it into the output queue + h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); + + p_dpb->last_output_poc = poc; + if (existing) is_pushed = 1; + // If non-reference, free frame store and move empty store to end of buffer + + h264_dpb_is_used_for_reference(&used_for_reference); + if (!(used_for_reference)) + h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] + } + else + { + int32_t flag; + uint32_t idx; + + // This is basically an error condition caused by too many reference frames in the DPB. + // It should only happen in errored streams, and can happen if this picture had an MMCO, + // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have + // unmarked the oldest reference frame. + h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames); + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + if (flag == 0) { + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_is_used_for_reference(&used_for_reference); + + if (used_for_reference) { + break; + } + } + + if (idx < p_dpb->used_size) { + // Short term + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); + + // Long term + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); + + // Remove from DPB + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + } + return 1; + } + } + } + } + + return is_pushed; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_queue_update +// +// This should be called anytime the output queue might be changed +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_queue_update(h264_Info* pInfo,int32_t push, int32_t direct, int32_t frame_request, int32_t num_ref_frames) +{ + + int32_t frame_output = 0; + + if(push) + { + frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, direct, 0, num_ref_frames); + } + else if(frame_request) + { + frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, 0, 1,num_ref_frames); + } + + + return frame_output; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_flush_dpb () +// +// Unmarks all reference pictures in the short-term and long term lists and +// in doing so resets the lists. +// +// Flushing the dpb, adds all the current frames in the dpb, not already on the output list +// to the output list and removes them from the dpb (they will all be marked as unused for +// reference first) +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t idx, flag; + int32_t ref_frames_in_buffer; + + ref_frames_in_buffer = p_dpb->ref_frames_in_buffer; + + for (idx = 0; idx < ref_frames_in_buffer; idx++){ + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + } + + ref_frames_in_buffer = p_dpb->ltref_frames_in_buffer; + + for (idx = 0; idx < ref_frames_in_buffer; idx++) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[0]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); + } + + // output frames in POC order + if (output_all) { + while (p_dpb->used_size - keep_complement) { + h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames); + } + } + + flag = 1; + while (flag) { + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_reset_dpb () +// +// Used to reset the contents of dpb +// Must calculate memory (aligned) pointers for each of the possible frame stores +// +// Also want to calculate possible max dpb size in terms of frames +// We should have an active SPS when we call this ftn to calc bumping level +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, int32_t SizeChange, int32_t no_output_of_prior_pics_flag) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t num_ref_frames = pInfo->active_SPS.num_ref_frames; + + + // If half way through a frame then Frame in progress will still be high, + // so mark the previous field as a dangling field. This is also needed to + // keep cs7050_sif_dpb_disp_numb_ptr correct. Better to reset instead? + if(p_dpb->used_size) + { + int32_t idx; + idx = p_dpb->used_size-1; + if (p_dpb->fs_dpb_idc[idx] != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if(viddec_h264_get_is_used(active_fs) != 3) + h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET + } + } + + // initialize software DPB + if(active_fs) { + viddec_h264_set_dec_structure(active_fs, INVALID); + } + h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1 + + + // May always be a size change which calls this function now... + // could eliminate below branch + if(SizeChange) + { + + /*** + Note : 21/03/2005 14:16 + Danger asociated with resetting curr_alloc_mem as it would allow the FW top reallocate + frame stores from 0 -> NUM_FRAME_STORES again - could lead to queue overflow and corruption + + Placed in size change condition in the hope that this will only ensure dpb is empty + and thus this behaviour is valid before continuing again + ***/ + + + p_dpb->PicWidthInMbs = PicWidthInMbs; + p_dpb->FrameHeightInMbs = FrameHeightInMbs; + + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + //Flush the current DPB. + h264_dpb_flush_dpb(pInfo, 1,0,num_ref_frames); + } + + return; +} ///// End of reset DPB + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +// --------------------------------------------------------------------------- +// Note that if an 'missing_pip_fb' condition exists, the message will +// sent to the host each time setup_free_fb is called. However, since this +// condition is not expected to happen if pre-defined steps are followed, we let +// it be for now and will change it if required. Basically, as long as host +// enables PiP after adding PiP buffers and disables PiP before removing buffers +// and matches PiP fb_id's with normal decode fb_id's this condition should +// not occur. +// --------------------------------------------------------------------------- +int32_t dpb_setup_free_fb( h264_DecodedPictureBuffer *p_dpb, uint8_t* fb_id, pip_setting_t* pip_setting ) +{ + uint8_t idx; + + //remove warning + pip_setting = pip_setting; + + + for (idx = 0; idx < NUM_DPB_FRAME_STORES; idx++) + { + if (p_dpb->fs[idx].fs_idc == MPD_DPB_FS_NULL_IDC) + { + *fb_id = idx; + break; + } + } + + if(idx == NUM_DPB_FRAME_STORES) + return 1; + + p_dpb->fs[idx].fs_idc = idx; + + return 0; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_assign_frame_store () +// +// may need a non-existing option parameter +// + +int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) +{ + uint8_t idc = MPD_DPB_FS_NULL_IDC; + pip_setting_t pip_setting; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + + while( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) { + /// + /// Generally this is triggered a error case, no more frame buffer avaliable for next + /// What we do here is just remove one with min-POC before get more info + /// + + int32_t pos = 0, poc = 0, existing = 1; + + // find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); + if (pos != MPD_DPB_FS_NULL_IDC) + { + // put it into the output queue + h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); + p_dpb->last_output_poc = poc; + h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] + } + } + + + if(NonExisting) { + p_dpb->fs_non_exist_idc = idc; + }else { + p_dpb->fs_dec_idc = idc; + } + + //add to support frame relocation interface to host + if(!NonExisting) + { + p_dpb->frame_numbers_need_to_be_allocated = 1; + p_dpb->frame_id_need_to_be_allocated = p_dpb->fs_dec_idc; + } + + + ///////////////////////////////h264_dpb_reset_fs(); + h264_dpb_set_active_fs(p_dpb, idc); + active_fs->fs_flag_1 = 0; + active_fs->fs_flag_2 = 0; + viddec_h264_set_is_non_existent(active_fs, NonExisting); + viddec_h264_set_is_output(active_fs, (NonExisting?1:0)); + + active_fs->pic_type = ((FRAME_TYPE_INVALID<is_used is reset on removal from dpb, no need for it here + // ->poc would only be changed when we overwrite on insert_Picture_in_dpb() + // but would be used by get_smallest_poc() + // ->top.poc would also not be overwritten until a new valid value comes along, + // but I don't think it is used before then so no need to reset + //active_fs->is_long_term = 0; + active_fs->frame.used_for_reference = 0; + active_fs->frame.poc = 0; + + return 1; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_update_queue_dangling_field (h264_Info * pInfo) +// +// Update DPB for Dangling field special case +// +void h264_dpb_update_queue_dangling_field(h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; + int32_t prev_pic_unpaired_field = 0; + + if(dpb_ptr->used_size > dpb_ptr->BumpLevel) + { + if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + if(viddec_h264_get_is_used(active_fs) != 3) + { + prev_pic_unpaired_field = 1; + } + } + + if (pInfo->img.structure != FRAME) + { + // To prove this is the second field, + // 1) The previous picture is an (as yet) unpaired field + if(prev_pic_unpaired_field) + { + // If we establish the previous pic was an unpaired field and this picture is not + // its complement, the previous picture was a dangling field + if(pInfo->img.second_field == 0) { + while(dpb_ptr->used_size > dpb_ptr->BumpLevel) + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + } + } + } + else if (prev_pic_unpaired_field) { + while(dpb_ptr->used_size > dpb_ptr->BumpLevel) + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + } + } + + + return; +} ///// End of init Frame Store + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_init_frame_store (h264_Info * pInfo) +// +// Set the frame store to be used in decoding the picture +// + +void h264_dpb_init_frame_store(h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; + + int32_t free_fs_found; + int32_t idx = 0; + int32_t prev_pic_unpaired_field = 0; + int32_t prev_idc = MPD_DPB_FS_NULL_IDC; + int32_t structure = pInfo->img.structure; + + if(dpb_ptr->used_size) + { + idx = dpb_ptr->used_size-1; + prev_idc = dpb_ptr->fs_dpb_idc[idx]; + } + + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + if(viddec_h264_get_is_used(active_fs) != 3) + { + //PRINTF(MFD_NONE, " FN: %d active_fs->is_used = %d \n", (h264_frame_number+1), active_fs->is_used); + prev_pic_unpaired_field = 1; + } + } + + //if ((pInfo->img.curr_has_mmco_5) || (pInfo->img.idr_flag)) curr_fld_not_prev_comp = 1; + + if (structure != FRAME) + { + + // To prove this is the second field, + // 1) The previous picture is an (as yet) unpaired field + if(prev_pic_unpaired_field) + { + // If we establish the previous pic was an unpaired field and this picture is not + // its complement, the previous picture was a dangling field + if(pInfo->img.second_field == 0) + h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FIELD + } + } + else if (prev_pic_unpaired_field) { + h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FRAME + } + + free_fs_found = 0; + + // If this is not a second field, we must find a free space for the current picture + if (!(pInfo->img.second_field)) + { + dpb_ptr->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + free_fs_found = h264_dpb_assign_frame_store(pInfo, 0); + //h264_frame_number++; + //PRINTF(MFD_NONE, " FN: %d (inc) fs_idc = %d \n", (h264_frame_number+1), dpb.fs_dec_idc); + } + + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dec_idc); + + ////////////// TODO: THe following init +#if 1 + if( pInfo->img.second_field) { + //active_fs->second_dsn = pInfo->img.dsn; + //active_fs->prev_dsn = pInfo->img.prev_dsn; + if (active_fs->pic_type == FRAME_TYPE_IDR || + active_fs->pic_type == FRAME_TYPE_I) { + + viddec_h264_set_first_field_intra(active_fs, 1); + } else { + viddec_h264_set_first_field_intra(active_fs, 0); + } + + } + else { + //active_fs->first_dsn = pInfo->img.dsn; + //active_fs->prev_dsn = pInfo->img.prev_dsn; + viddec_h264_set_first_field_intra(active_fs, 0); + } + + if (pInfo->img.structure == FRAME) { + //active_fs->second_dsn = 0x0; + } + + if ( pInfo->sei_information.broken_link_pic ) + { + viddec_h264_set_broken_link_picture(active_fs, 1); + pInfo->sei_information.broken_link_pic = 0; + } + + if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0)) + viddec_h264_set_recovery_pt_picture(active_fs, 1); + + //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr)) + if(pInfo->img.recovery_point_found == 6) + { + viddec_h264_set_open_gop_entry(active_fs, 1); + pInfo->dpb.SuspendOutput = 1; + } +#endif + + if ((pInfo->img.second_field) || (free_fs_found)) + { + viddec_h264_set_dec_structure(active_fs, pInfo->img.structure); + viddec_h264_set_is_output(active_fs, 0); + + switch(pInfo->img.structure) + { + case (FRAME) :{ + active_fs->frame.pic_num = pInfo->img.frame_num; + active_fs->frame.long_term_frame_idx = 0; + active_fs->frame.long_term_pic_num = 0; + active_fs->frame.used_for_reference = 0; + active_fs->frame.is_long_term = 0; + //active_fs->frame.structure = pInfo->img.structure; + active_fs->frame.poc = pInfo->img.framepoc; + }break; + case (TOP_FIELD) :{ + active_fs->top_field.pic_num = pInfo->img.frame_num; + active_fs->top_field.long_term_frame_idx = 0; + active_fs->top_field.long_term_pic_num = 0; + active_fs->top_field.used_for_reference = 0; + active_fs->top_field.is_long_term = 0; + //active_fs->top_field.structure = pInfo->img.structure; + active_fs->top_field.poc = pInfo->img.toppoc; + }break; + case(BOTTOM_FIELD) :{ + active_fs->bottom_field.pic_num = pInfo->img.frame_num; + active_fs->bottom_field.long_term_frame_idx = 0; + active_fs->bottom_field.long_term_pic_num = 0; + active_fs->bottom_field.used_for_reference = 0; + active_fs->bottom_field.is_long_term = 0; + //active_fs->bottom_field.structure = pInfo->img.structure; + active_fs->bottom_field.poc = pInfo->img.bottompoc; + }break; + } + } + else + { + // Need to drop a frame or something here + } + + return; +} ///// End of init Frame Store + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// Decoding POC for current Picture +// 1) pic_order_cnt_type (0, 1, 2) +// +////////////////////////////////////////////////////////////////////////////// + +void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num) +{ + int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4)); + int32_t delta_pic_order_count[2]; + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + int32_t AbsFrameNum =0; + int32_t ExpectedDeltaPerPicOrderCntCycle =0; + int32_t PicOrderCntCycleCnt = 0; + int32_t FrameNumInPicOrderCntCycle =0; + int32_t ExpectedPicOrderCnt =0; + + int32_t actual_frame_num =0; + + + + if(NonExisting) actual_frame_num = frame_num; + else actual_frame_num = pInfo->img.frame_num; + + switch (pInfo->active_SPS.pic_order_cnt_type) + { + case 0: + if(NonExisting != 0) break; + + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = 0; + } + else if (pInfo->img.last_has_mmco_5) + { + if (pInfo->img.last_pic_bottom_field) + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = 0; + } + else + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = pInfo->img.toppoc; + } + } + + // Calculate the MSBs of current picture + if((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb) && + ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) ) + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb; + } else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) && + ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) ) + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb; + } else + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb; + } + + // 2nd + + if(pInfo->img.field_pic_flag==0) + { + //frame pix + pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom; + pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301 + } + else if (pInfo->img.bottom_field_flag==0) + { //top field + pInfo->img.ThisPOC= pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + } + else + { //bottom field + pInfo->img.ThisPOC= pInfo->img.bottompoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + } + pInfo->img.framepoc=pInfo->img.ThisPOC; + + if ( pInfo->img.frame_num != pInfo->old_slice.frame_num) + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + + if(pInfo->SliceHeader.nal_ref_idc) + { + pInfo->img.PrevPicOrderCntLsb = pInfo->img.pic_order_cnt_lsb; + pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; + } + + break; + case 1: { + if(NonExisting) + { + delta_pic_order_count[0] = 0; + delta_pic_order_count[1] = 0; + } + else + { + delta_pic_order_count[0] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : pInfo->img.delta_pic_order_cnt[0]; + delta_pic_order_count[1] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : + ( (!pInfo->active_PPS.pic_order_present_flag) && (!(pInfo->img.field_pic_flag))) ? 0 : + pInfo->img.delta_pic_order_cnt[1]; + } + + // this if branch should not be taken during processing of a gap_in_frame_num pic since + // an IDR picture cannot produce non-existent frames... + if(pInfo->SliceHeader.idr_flag) + { + pInfo->img.FrameNumOffset = 0; + } + else + { + + if (actual_frame_num < pInfo->img.PreviousFrameNum) + { + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; + } + else + { + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; + } + } + + // pInfo->img.num_ref_frames_in_pic_order_cnt_cycle set from SPS + // so constant between existent and non-existent frames + if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) + AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; + else + AbsFrameNum = 0; + + // pInfo->img.disposable_flag should never be true for a non-existent frame since these are always + // references... + if ((pInfo->SliceHeader.nal_ref_idc == 0) && (AbsFrameNum > 0)) AbsFrameNum = AbsFrameNum - 1; + + // 3rd + ExpectedDeltaPerPicOrderCntCycle = pInfo->active_SPS.expectedDeltaPerPOCCycle; + + if (AbsFrameNum) + { + // Rem: pInfo->img.num_ref_frames_in_pic_order_cnt_cycle takes max value of 255 (8 bit) + // Frame NUm may be 2^16 (17 bits) + // I guess we really have to treat AbsFrameNum as a 32 bit number + uint32_t temp = 0; + int32_t i=0; + int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; + + if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) + PicOrderCntCycleCnt = ldiv_mod_u((uint32_t)(AbsFrameNum-1), (uint32_t)pInfo->img.num_ref_frames_in_pic_order_cnt_cycle, &temp); + + ExpectedPicOrderCnt = mult_u((uint32_t)PicOrderCntCycleCnt, (uint32_t)ExpectedDeltaPerPicOrderCntCycle); + + FrameNumInPicOrderCntCycle = temp; + + //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle; +#ifndef USER_MODE + h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id); + for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) + ExpectedPicOrderCnt += offset_for_ref_frame[i]; +#else + for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) + ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i]; +#endif + } + else { + ExpectedPicOrderCnt = 0; + } + + if (pInfo->SliceHeader.nal_ref_idc == 0) + ExpectedPicOrderCnt += pInfo->img.offset_for_non_ref_pic; + + if (!(pInfo->img.field_pic_flag)) + { + pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; + pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[1]; + pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; + pInfo->img.ThisPOC = pInfo->img.framepoc; + } + else if (!(pInfo->img.bottom_field_flag)) + { + //top field + pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; + pInfo->img.ThisPOC = pInfo->img.toppoc; + pInfo->img.bottompoc = 0; + } + else + { + //bottom field + pInfo->img.toppoc = 0; + pInfo->img.bottompoc = ExpectedPicOrderCnt + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[0]; + pInfo->img.ThisPOC = pInfo->img.bottompoc; + } + + //CONFORMANCE_ISSUE + pInfo->img.framepoc=pInfo->img.ThisPOC; + + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum=pInfo->img.frame_num; + pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset; + + } + break; + case 2: { // POC MODE 2 + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.FrameNumOffset = 0; + pInfo->img.framepoc = 0; + pInfo->img.toppoc = 0; + pInfo->img.bottompoc = 0; + pInfo->img.ThisPOC = 0; + } + else + { + if (pInfo->img.last_has_mmco_5) + { + pInfo->img.PreviousFrameNum = 0; + pInfo->img.PreviousFrameNumOffset = 0; + } + if (actual_frame_num < pInfo->img.PreviousFrameNum) + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; + else + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; + + AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; + if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1; + else pInfo->img.ThisPOC = (AbsFrameNum<<1); + + if (!(pInfo->img.field_pic_flag)) + { + pInfo->img.toppoc = pInfo->img.ThisPOC; + pInfo->img.bottompoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + else if (!(pInfo->img.bottom_field_flag)) + { + pInfo->img.toppoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + else + { + pInfo->img.bottompoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + } + + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + } + break; + default: + break; + } + + return; +} //// End of decoding_POC + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_hdr_post_poc () +// +////////////////////////////////////////////////////////////////////////////// + +void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, int32_t use_old) +{ + int32_t actual_frame_num = (NonExisting)? frame_num : + (use_old)? pInfo->old_slice.frame_num : + pInfo->img.frame_num; + + int32_t disposable_flag = (use_old)?(pInfo->old_slice.nal_ref_idc == 0) : + (pInfo->SliceHeader.nal_ref_idc == 0); + + switch(pInfo->img.pic_order_cnt_type) + { + case 0: { + pInfo->img.PreviousFrameNum = actual_frame_num; + if ((disposable_flag == 0) && (NonExisting == 0)) + { + pInfo->img.PrevPicOrderCntLsb = (use_old)? pInfo->old_slice.pic_order_cnt_lsb : + pInfo->SliceHeader.pic_order_cnt_lsb; + pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; + } + } + break; + case 1: { + pInfo->img.PreviousFrameNum = actual_frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + } + break; + case 2: { + pInfo->img.PreviousFrameNum = actual_frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + + }break; + + default: { + }break; + } + + return; +} ///// End of h264_hdr_post_poc + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c new file mode 100644 index 0000000..b5df6d9 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c @@ -0,0 +1,82 @@ +//#include "math.h" +// Arithmatic functions using add & subtract + +unsigned long mult_u(register unsigned long var1, register unsigned long var2) +{ + + register unsigned long var_out = 0; + + while (var2 > 0) + { + + if (var2 & 0x01) + { + var_out += var1; + } + var2 >>= 1; + var1 <<= 1; + } + return var_out; + +}// mult_u + +unsigned long ldiv_mod_u(register unsigned long a, register unsigned long b, unsigned long * mod) +{ + register unsigned long div = b; + register unsigned long res = 0; + register unsigned long bit = 0x1; + + if (!div) + { + *mod = 0; + return 0xffffffff ; // Div by 0 + } + + if (a < b) + { + *mod = a; + return 0; // It won't even go once + } + + while(!(div & 0x80000000)) + { + div <<= 1; + bit <<= 1; + } + + while (bit) + { + if (div <= a) + { + res |= bit; + a -= div; + } + div >>= 1; + bit >>= 1; + } + *mod = a; + return res; +}// ldiv_mod_u + + +unsigned ldiv_u(register unsigned a, register unsigned b) +{ + register unsigned div = b << 16; + register unsigned res = 0; + register unsigned bit = 0x10000; + + while (bit) + { + div >>= 1; + bit >>= 1; + if (div < a) + { + res |= bit; + a -= div; + } + } + + return res; +} + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c new file mode 100644 index 0000000..a956607 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c @@ -0,0 +1,198 @@ + +/*! + *********************************************************************** + * \file: h264_dpb_ctl.c + * + *********************************************************************** + */ + +//#include + +#include "h264parse.h" + + +// --------------------------------------------------------------------------- +// IMPORTANT: note that in this implementation int c is an int not a char +// --------------------------------------------------------------------------- +void* h264_memset( void* buf, uint32_t c, uint32_t num ) +{ + uint32_t* buf32 = buf; + uint32_t size32 = ( num >> 2 ); + uint32_t i; + + for ( i = 0; i < size32; i++ ) + { + *buf32++ = c; + } + + return buf; +} + + +void* h264_memcpy( void* dest, void* src, uint32_t num ) +{ + int32_t* dest32 = dest; + int32_t* src32 = src; + uint32_t size32 = ( num >> 2 ); + uint32_t i; + + for ( i = 0; i < size32; i++ ) + { + *dest32++ = *src32++; + } + + return dest; +} + + +#ifndef USER_MODE + +//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem +void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) +{ + uint32_t copy_size = sizeof(pic_param_set); + uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + + if(nPPSId < MAX_NUM_PPS) + { + cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0); + } + + return; + +} +//end of h264_Parse_Copy_Pps_To_DDR + + +// h264_Parse_Copy_Pps_From_DDR copy a pps with nPPSId from ddr mem to local PPS +void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) +{ + + uint32_t copy_size= sizeof(pic_param_set); + uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + + if( nPPSId < MAX_NUM_PPS) + { + cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0); + } + + return; +} +//end of h264_Parse_Copy_Pps_From_DDR + + +//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem with nSPSId +void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) +{ + uint32_t copy_size = sizeof(seq_param_set_used); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + + if(nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0); + } + + //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); + + + return; +} + +//end of h264_Parse_Copy_Sps_To_DDR + + +// h264_Parse_Copy_Sps_From_DDR copy a sps with nSPSId from ddr mem to local SPS +void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) +{ + uint32_t copy_size= sizeof(seq_param_set_used); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + + if(nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0); + } + + return; + +} +//end of h264_Parse_Copy_Sps_From_DDR + +//h264_Parse_Copy_Offset_Ref_Frames_To_DDR () copy local offset_ref_frames to ddr mem with nSPSId +void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId) +{ + uint32_t copy_size = sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; + + if(nSPSId < MAX_NUM_SPS) + { + //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 1, 0); + h264_memcpy((int32_t *)offset_ref_frames_entry_ptr,pOffset_ref_frames, copy_size); + } + + return; +} + +//end of h264_Parse_Copy_Offset_Ref_Frames_To_DDR + + +// h264_Parse_Copy_Offset_Ref_Frames_From_DDR copy a offset_ref_frames with nSPSId from ddr mem to local offset_ref_frames +void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId) +{ + uint32_t copy_size= sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; + + if(nSPSId < MAX_NUM_SPS) + { + //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 0, 0); + h264_memcpy(pOffset_ref_frames, (int32_t *)offset_ref_frames_entry_ptr, copy_size); + } + + return; + +} +//end of h264_Parse_Copy_Offset_Ref_Frames_From_DDR + + +//h264_Parse_Check_Sps_Updated_Flag () copy local sps to ddr mem with nSPSId +uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) +{ + uint32_t is_updated=0; + uint32_t copy_size = sizeof(uint32_t); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + + + if(nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0); + } + + //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); + + + return is_updated; +} + +//end of h264_Parse_Check_Sps_Updated_Flag + + +// h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS +void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) +{ + uint32_t is_updated=0; + uint32_t copy_size= sizeof(uint32_t); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + + if(nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0); + } + + return; + +} +//end of h264_Parse_Clear_Sps_Updated_Flag + + +#endif + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c new file mode 100644 index 0000000..a1281c2 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c @@ -0,0 +1,128 @@ + + +#include "h264.h" +#include "h264parse.h" + +/*---------------------------------------------*/ +/*---------------------------------------------*/ +/*---------------------------------------------*/ +h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet) +{ + h264_Status ret = H264_PPS_ERROR; + + //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet; + uint32_t code=0, i = 0; + + do { + ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id + code = h264_GetVLCElement(parent, pInfo, false); + if(code > MAX_PIC_PARAMS) { + break; + } + PictureParameterSet->pic_parameter_set_id = (uint8_t)code; + + + code = h264_GetVLCElement(parent, pInfo, false); + if(code > MAX_NUM_SPS-1) { + break; + } + PictureParameterSet->seq_parameter_set_id = (uint8_t)code; + + ///// entropy_coding_mode_flag + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; + ///// pic_order_present_flag + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->pic_order_present_flag = (uint8_t)code; + + PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false); + + // + // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0 + // + if(PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS) + break; + + PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1; + PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + + //// PPS->num_ref_idx_l0_active --- [0,32] + if(((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES)) + { + break; + } + + //// weighting prediction + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->weighted_pred_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 2); + PictureParameterSet->weighted_bipred_idc = (uint8_t)code; + + //// QP + PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true); + PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true); + if(((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP)) + break; + PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); + + //// Deblocking ctl parameters + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->constrained_intra_pred_flag = (uint8_t)code; + + if( viddec_pm_get_bits(parent, &code, 1) == -1) + break; + PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code; + + //// Check if have more RBSP Data for additional parameters + if(h264_More_RBSP_Data(parent, pInfo)) + { + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code; + + if( viddec_pm_get_bits(parent, &code, 1) == -1) + break; + PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code; + + if(PictureParameterSet->pic_scaling_matrix_present_flag) + { + uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1); + for(i=0; ipic_scaling_list_present_flag[i] = (uint8_t)code; + + if(PictureParameterSet->pic_scaling_list_present_flag[i]) + { + if(i<6) + h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo); + else + h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); + } + } + } + + PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix + //if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12)) + // break; + } + else + { + PictureParameterSet->transform_8x8_mode_flag = 0; + PictureParameterSet->pic_scaling_matrix_present_flag = 0; + PictureParameterSet->second_chroma_qp_index_offset = PictureParameterSet->chroma_qp_index_offset; + } + + ret = H264_STATUS_OK; + }while(0); + + //h264_Parse_rbsp_trailing_bits(pInfo); + return ret; +} + +////////// EOF/////////////// + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c new file mode 100644 index 0000000..829eb55 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c @@ -0,0 +1,1176 @@ +#define H264_PARSE_SEI_C + +#ifdef H264_PARSE_SEI_C + +#include "h264.h" +#include "h264parse.h" +#include "h264parse_dpb.h" + +#include "viddec_parser_ops.h" + +#include "viddec_fw_item_types.h" +#include "viddec_fw_workload.h" + +////////////////////////////////////////////////////////////////////////////// +// avc_sei_stream_initialise () +// +// + +void h264_sei_stream_initialise (h264_Info* pInfo) +{ + pInfo->sei_information.capture_POC = 0; + pInfo->sei_information.disp_frozen = 0; + pInfo->sei_information.release_POC = 0; + pInfo->sei_information.capture_fn = 0; + pInfo->sei_information.recovery_fn = 0xFFFFFFFF; + pInfo->sei_information.scan_format = 0; + pInfo->sei_information.broken_link_pic = 0; + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_buffering_period(void *parent,h264_Info* pInfo) +{ + h264_Status ret = H264_STATUS_SEI_ERROR; + + h264_SEI_buffering_period_t* sei_msg_ptr; + h264_SEI_buffering_period_t sei_buffering_period; + int32_t SchedSelIdx; + int num_bits = 0; + + sei_msg_ptr = (h264_SEI_buffering_period_t *)(&sei_buffering_period); + + do{ + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; + } + else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; + } + + sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false); + if(sei_msg_ptr->seq_param_set_id >= NUM_SPS) + break; + + //check if this id is same as the id of the current SPS //fix + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + break; + + for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; SchedSelIdx++) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_nal, num_bits); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_nal, num_bits); + } + } + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) + { + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + break; + + for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; SchedSelIdx++) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_vcl, num_bits); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_vcl, num_bits); + } + } + + ret = H264_STATUS_OK; + } while (0); + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo) +{ + int32_t CpbDpbDelaysPresentFlag = 0; + h264_SEI_pic_timing_t* sei_msg_ptr; + h264_SEI_pic_timing_t sei_pic_timing; + int32_t num_bits_cpb = 0, num_bits_dpb = 0, time_offset_length = 0; + uint32_t code; + uint32_t clock_timestamp_flag = 0; + uint32_t full_timestamp_flag = 0; + uint32_t seconds_flag = 0; + uint32_t minutes_flag = 0; + uint32_t hours_flag = 0; + uint32_t time_offset = 0; + + + + + sei_msg_ptr = (h264_SEI_pic_timing_t *)(&sei_pic_timing); + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag) + { + num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 +1; + num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 + 1; + time_offset_length = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_time_offset_length; + } + else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 +1; + num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 + 1; + } + + + CpbDpbDelaysPresentFlag = 1; // as per amphion code + if(CpbDpbDelaysPresentFlag) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->cpb_removal_delay, num_bits_cpb); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->dpb_output_delay, num_bits_dpb); + } + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag) + { + int32_t i = 0, NumClockTS = 0; + + viddec_workload_item_t wi; + + wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; + viddec_pm_get_bits(parent, &code , 4); + sei_msg_ptr->pic_struct = (uint8_t)code; + + + if((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) { + pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE; + } else { + pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED; + } + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING; + wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct; + +#ifndef VBP + //Push to current if we are in first frame, or we do not detect previous frame end + if( (pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done) ) { + viddec_pm_append_workitem( parent, &wi ); + } else { + viddec_pm_append_workitem_next( parent, &wi ); + } +#endif + + if(sei_msg_ptr->pic_struct < 3) { + NumClockTS = 1; + } else if((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) { + NumClockTS = 2; + } else { + NumClockTS = 3; + } + + for(i = 0; i < NumClockTS; i++) + { + viddec_pm_get_bits(parent, &code , 1); + clock_timestamp_flag = code; + //sei_msg_ptr->clock_timestamp_flag[i] = (uint8_t)code; + + if(clock_timestamp_flag) + { + viddec_pm_get_bits(parent, &code , 2); + //sei_msg_ptr->ct_type[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->nuit_field_based_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 5); + //sei_msg_ptr->counting_type[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->full_timestamp_flag[i] = (uint8_t)code; + full_timestamp_flag = code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->discontinuity_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->cnt_dropped_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 8); + //sei_msg_ptr->n_frames[i] = (uint8_t)code; + + + if(full_timestamp_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->seconds_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->minutes_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 5); + //sei_msg_ptr->hours_value[i] = (uint8_t)code; + } + else + { + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->seconds_flag[i] = (uint8_t)code; + seconds_flag = code; + + if(seconds_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->seconds_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->minutes_flag[i] = (uint8_t)code; + minutes_flag = code; + + if(minutes_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->minutes_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->hours_flag[i] = (uint8_t)code; + hours_flag = code; + + if(hours_flag){ + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->hours_value[i] = (uint8_t)code; + } + } + } + } + + if(time_offset_length > 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&time_offset, time_offset_length); + } + } + } + } + + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo) +{ + h264_SEI_pan_scan_rectangle_t* sei_msg_ptr; + h264_SEI_pan_scan_rectangle_t sei_pan_scan; + uint32_t code; + + viddec_workload_item_t wi; + + h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) ); + + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN; + + sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan); + + sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false); + + wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code; + viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag); + + if(!sei_msg_ptr->pan_scan_rect_cancel_flag) + { + int32_t i; + sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1); + if(sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1) + { + return H264_STATUS_SEI_ERROR; + } + for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) + { + sei_msg_ptr->pan_scan_rect_left_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_right_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_top_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true); + } + sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false); + wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period; + } + +#ifndef VBP + if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } +#endif + + if(!sei_msg_ptr->pan_scan_rect_cancel_flag) + { + int32_t i; + + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT; + + for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) + { + viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]); + viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]); + viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]); + viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]); + +#ifndef VBP + if(pInfo->Is_first_frame_in_stream) { //cur is first frame + viddec_pm_append_workitem( parent, &wi ); + } else { + viddec_pm_append_workitem_next( parent, &wi ); + } +#endif + } + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_filler_payload(void *parent,h264_Info* pInfo, uint32_t payload_size) +{ + + h264_SEI_filler_payload_t* sei_msg_ptr; + h264_SEI_filler_payload_t sei_filler_payload; + uint32_t k; + uint32_t code; + + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_filler_payload_t *)(&sei_filler_payload); + for(k=0; k < payload_size; k++) + { + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->ff_byte = (uint8_t)code; + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payload_size) +{ + + h264_SEI_userdata_registered_t* sei_msg_ptr; + h264_SEI_userdata_registered_t sei_userdata_registered; + uint32_t i; + int32_t byte = 0; + uint32_t code = 0; + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED; + wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered); + + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->itu_t_t35_country_code = (uint8_t)code; + + if(sei_msg_ptr->itu_t_t35_country_code != 0xff) { + i = 1; + } else { + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->itu_t_t35_country_code_extension_byte = (uint8_t)code; + i = 2; + } + + + wi.user_data.size =0; + do + { + + viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); + + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + wi.user_data.size++; + + if(11 == wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } +#endif + wi.user_data.size =0; + } + + i++; + }while(i < payload_size); + + if(0!=wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); + +#ifndef VBP + if(pInfo->Is_first_frame_in_stream) //cur is first frame + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } +#endif + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t payload_size) +{ + + h264_SEI_userdata_unregistered_t* sei_msg_ptr; + h264_SEI_userdata_unregistered_t sei_userdata_unregistered; + uint32_t i; + int32_t byte = 0; + uint32_t code; + + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED; + + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_userdata_unregistered_t *)(&sei_userdata_unregistered); + + for (i = 0; i < 4; i++) + { + viddec_pm_get_bits(parent, &code , 32); + sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code; + } + + wi.user_data.size =0; + for(i = 16; i < payload_size; i++) + { + + viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); + + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + wi.user_data.size++; + + if(11 == wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); + if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + wi.user_data.size =0; + } + } + + if(0!=wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); + if(pInfo->Is_first_frame_in_stream) //cur is first frame + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) +{ + + h264_SEI_recovery_point_t* sei_msg_ptr; + h264_SEI_recovery_point_t sei_recovery_point; + uint32_t code; + viddec_workload_item_t wi; + + + sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point); + + sei_msg_ptr->recovery_frame_cnt = h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->exact_match_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->broken_link_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 2); + sei_msg_ptr->changing_slice_group_idc = (uint8_t)code; + + pInfo->sei_information.recovery_point = 1; + pInfo->sei_information.recovery_frame_cnt = (int32_t) sei_msg_ptr->recovery_frame_cnt; + pInfo->sei_information.capture_fn = 1; + pInfo->sei_information.broken_link_pic = sei_msg_ptr->broken_link_flag; + + if(pInfo->got_start) { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + + // + /// Append workload for SEI + // + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT; + wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt; + viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag); + viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag); + wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc; + + if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_dec_ref_pic_marking_rep(void *parent,h264_Info* pInfo) +{ + + h264_SEI_decoded_ref_pic_marking_repetition_t* sei_msg_ptr; + h264_SEI_decoded_ref_pic_marking_repetition_t sei_ref_pic; + uint32_t code; + + sei_msg_ptr = (h264_SEI_decoded_ref_pic_marking_repetition_t *)(&sei_ref_pic); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->original_idr_flag = (uint8_t)code; + + sei_msg_ptr->original_frame_num = h264_GetVLCElement(parent, pInfo, false); + + if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->orignal_field_pic_flag = (uint8_t)code; + + if(sei_msg_ptr->orignal_field_pic_flag) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->original_bottom_field_pic_flag = (uint8_t)code; + } + } + h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, &pInfo->SliceHeader); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_spare_pic(void *parent,h264_Info* pInfo) +{ + + //h264_SEI_spare_picture_t* sei_msg_ptr; + + //remove warning + pInfo = pInfo; + parent = parent; + + //sei_msg_ptr = (h264_SEI_spare_picture_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_scene_info(void *parent,h264_Info* pInfo) +{ + + h264_SEI_scene_info_t* sei_msg_ptr; + h264_SEI_scene_info_t sei_scene_info; + uint32_t code; + + sei_msg_ptr = (h264_SEI_scene_info_t*)(&sei_scene_info); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->scene_info_present_flag = (uint8_t)code; + + if(sei_msg_ptr->scene_info_present_flag) + { + sei_msg_ptr->scene_id = h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->scene_transitioning_type= h264_GetVLCElement(parent, pInfo, false); + if(sei_msg_ptr->scene_transitioning_type > 3) + { + sei_msg_ptr->second_scene_id = h264_GetVLCElement(parent, pInfo, false); + } + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_sub_seq_info(void *parent,h264_Info* pInfo) +{ + + h264_SEI_sub_sequence_info_t* sei_msg_ptr; + h264_SEI_sub_sequence_info_t sei_sub_sequence_info; + uint32_t code; + + sei_msg_ptr = (h264_SEI_sub_sequence_info_t *)(&sei_sub_sequence_info); + + sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo,false); + sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo,false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->first_ref_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->leading_non_ref_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->last_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->sub_seq_frame_num_flag = (uint8_t)code; + + + if(sei_msg_ptr->sub_seq_frame_num_flag) + { + sei_msg_ptr->sub_seq_frame_num = h264_GetVLCElement(parent, pInfo,false); + } + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_sub_seq_layer(void *parent,h264_Info* pInfo) +{ + + h264_SEI_sub_sequence_layer_t* sei_msg_ptr; + h264_SEI_sub_sequence_layer_t sei_sub_sequence_layer; + int32_t layer; + uint32_t code; + + sei_msg_ptr = (h264_SEI_sub_sequence_layer_t *)(&sei_sub_sequence_layer); + sei_msg_ptr->num_sub_seq_layers_minus1 = h264_GetVLCElement(parent, pInfo,false); + + if(sei_msg_ptr->num_sub_seq_layers_minus1 >= MAX_SUB_SEQ_LAYERS) + { + return H264_STATUS_SEI_ERROR; + } + + for(layer = 0;layer <= sei_msg_ptr->num_sub_seq_layers_minus1; layer++) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->accurate_statistics_flag[layer] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_bit_rate[layer] = (uint16_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_frame_rate[layer] = (uint16_t)code; + + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_sub_seq(void *parent,h264_Info* pInfo) +{ + int32_t n; + uint32_t code; + + h264_SEI_sub_sequence_t* sei_msg_ptr; + h264_SEI_sub_sequence_t sei_sub_sequence; + + sei_msg_ptr = (h264_SEI_sub_sequence_t *)(&sei_sub_sequence); + + sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->duration_flag = (uint8_t)code; + + if(sei_msg_ptr->duration_flag) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->sub_seq_duration, 32); + } + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->average_rate_flag = (uint8_t)code; + + if(sei_msg_ptr->average_rate_flag) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->average_statistics_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_bit_rate = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_frame_rate = (uint8_t)code; + + } + sei_msg_ptr->num_referenced_subseqs = h264_GetVLCElement(parent, pInfo, false); + if(sei_msg_ptr->num_referenced_subseqs >= MAX_NUM_REF_SUBSEQS) + { + return H264_STATUS_SEI_ERROR; + } + + for(n = 0; n < sei_msg_ptr->num_referenced_subseqs; n++) + { + sei_msg_ptr->ref_sub_seq_layer_num= h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->ref_sub_seq_id= h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->ref_sub_seq_direction = (uint8_t)code; + } + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_full_frame_freeze(void *parent,h264_Info* pInfo) +{ + + h264_SEI_full_frame_freeze_t* sei_msg_ptr; + h264_SEI_full_frame_freeze_t sei_full_frame_freeze; + + sei_msg_ptr = (h264_SEI_full_frame_freeze_t *)(&sei_full_frame_freeze); + + sei_msg_ptr->full_frame_freeze_repetition_period= h264_GetVLCElement(parent, pInfo, false); + + pInfo->sei_information.capture_POC = 1; + pInfo->sei_information.freeze_rep_period = sei_msg_ptr->full_frame_freeze_repetition_period; + //pInfo->img.sei_freeze_this_image = 1; + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_full_frame_freeze_release(void *parent,h264_Info* pInfo) +{ + //remove warning + parent = parent; + pInfo = pInfo; + + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_full_frame_snapshot(void *parent,h264_Info* pInfo) +{ + + h264_SEI_full_frame_snapshot_t* sei_msg_ptr; + h264_SEI_full_frame_snapshot_t sei_full_frame_snapshot; + + sei_msg_ptr = (h264_SEI_full_frame_snapshot_t *)(&sei_full_frame_snapshot); + + sei_msg_ptr->snapshot_id = h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_progressive_segement_start(void *parent,h264_Info* pInfo) +{ + + h264_SEI_progressive_segment_start_t* sei_msg_ptr; + h264_SEI_progressive_segment_start_t sei_progressive_segment_start; + + sei_msg_ptr = (h264_SEI_progressive_segment_start_t *)(&sei_progressive_segment_start); + + sei_msg_ptr->progressive_refinement_id= h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->num_refinement_steps_minus1= h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_progressive_segment_end(void *parent,h264_Info* pInfo) +{ + + h264_SEI_progressive_segment_end_t* sei_msg_ptr; + h264_SEI_progressive_segment_end_t sei_progressive_segment_end; + + sei_msg_ptr = (h264_SEI_progressive_segment_end_t *)(&sei_progressive_segment_end); + + sei_msg_ptr->progressive_refinement_id = h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_motion_constrained_slice_grp_set(void *parent, h264_Info* pInfo) +{ + int32_t i; + uint32_t code; + h264_SEI_motion_constrained_slice_group_t* sei_msg_ptr; + h264_SEI_motion_constrained_slice_group_t sei_motion_constrained_slice_group; + + sei_msg_ptr = (h264_SEI_motion_constrained_slice_group_t *)(&sei_motion_constrained_slice_group); + + sei_msg_ptr->num_slice_groups_in_set_minus1= h264_GetVLCElement(parent, pInfo, false); + if(sei_msg_ptr->num_slice_groups_in_set_minus1 >= MAX_NUM_SLICE_GRPS) + { + return H264_STATUS_SEI_ERROR; + } + + for(i=0; i<= sei_msg_ptr->num_slice_groups_in_set_minus1; i++) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->slice_group_id[i] = (uint8_t)code; + } + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->exact_sample_value_match_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->pan_scan_rect_flag = (uint8_t)code; + + + if(sei_msg_ptr->pan_scan_rect_flag) + { + sei_msg_ptr->pan_scan_rect_id= h264_GetVLCElement(parent, pInfo, false); + } + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_film_grain_characteristics(void *parent,h264_Info* pInfo) +{ + //OS_INFO("Not supported SEI\n"); + + //remove warning + parent = parent; + pInfo = pInfo; + + + + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_deblocking_filter_display_preferences(void *parent,h264_Info* pInfo) +{ + + //h264_SEI_deblocking_filter_display_pref_t* sei_msg_ptr; + + //remove warning + parent = parent; + pInfo = pInfo; + + //sei_msg_ptr = (h264_SEI_deblocking_filter_display_pref_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_stereo_video_info(void *parent,h264_Info* pInfo) +{ + + //h264_SEI_stereo_video_info_t* sei_msg_ptr; + + //remove warning + parent = parent; + pInfo = pInfo; + + + //sei_msg_ptr = (h264_SEI_stereo_video_info_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size) +{ + int32_t k, byte_index, user_data_byte_index; + uint32_t i; + int32_t word, bits; + uint32_t user_data; + //h264_SEI_reserved_t* sei_msg_ptr; + //h264_SEI_reserved_t sei_reserved; + + //remove warning + pInfo = pInfo; + + //sei_msg_ptr = (h264_SEI_reserved_t *)(&sei_reserved); + + byte_index = 0; + word = 0; + user_data_byte_index = 0x0; + + for(i = 0, k = 0; i < payload_size; i++) + { + if(byte_index == 0) word = 0; + viddec_pm_get_bits(parent, (uint32_t *)&bits, 8); + + switch (byte_index) + { + case 1: + word = (bits << 8) | word; + break; + case 2: + word = (bits << 16) | word; + break; + case 3: + word = (bits << 24) | word; + break; + default : + word = bits; + break; + } + + if(byte_index == 3) + { + byte_index = 0; + user_data = word; + k++; + } + else + { + byte_index++; + } + + user_data_byte_index++; + if ( user_data_byte_index == MAX_USER_DATA_SIZE) + { + //user_data->user_data_size = user_data_byte_index; + //sei_msg_ptr = (h264_SEI_reserved_t *)(&user_data->user_data[0]); + byte_index = 0; + word = 0; + user_data_byte_index = 0x0; + } + } + + if(byte_index) + user_data = word; + + //user_data->user_data_size = user_data_byte_index; + + return user_data_byte_index; + + return H264_STATUS_OK; +} + +////// TODO +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize) +{ + //int32_t bit_equal_to_zero; + h264_Status status = H264_STATUS_OK; + + //removing warning + payloadSize = payloadSize; + + switch(payloadType) + { + case SEI_BUF_PERIOD: + status = h264_sei_buffering_period(parent, pInfo); + break; + case SEI_PIC_TIMING: + status = h264_sei_pic_timing(parent, pInfo); + break; + case SEI_PAN_SCAN: + status = h264_sei_pan_scan(parent, pInfo); + break; + case SEI_FILLER_PAYLOAD: + status = h264_sei_filler_payload(parent, pInfo, payloadSize); + break; + case SEI_REG_USERDATA: + status = h264_sei_userdata_reg(parent, pInfo, payloadSize); + break; + case SEI_UNREG_USERDATA: + status = h264_sei_userdata_unreg(parent, pInfo, payloadSize); + break; + case SEI_RECOVERY_POINT: + h264_sei_recovery_point(parent, pInfo); + break; + case SEI_DEC_REF_PIC_MARKING_REP: + status = h264_sei_dec_ref_pic_marking_rep(parent, pInfo); + break; + case SEI_SPARE_PIC: + status = h264_sei_spare_pic(parent, pInfo); + break; + case SEI_SCENE_INFO: + status = h264_sei_scene_info(parent, pInfo); + break; + case SEI_SUB_SEQ_INFO: + status = h264_sei_sub_seq_info(parent, pInfo); + break; + case SEI_SUB_SEQ_LAYER: + status = h264_sei_sub_seq_layer(parent, pInfo); + break; + case SEI_SUB_SEQ: + status = h264_sei_sub_seq(parent, pInfo); + break; + case SEI_FULL_FRAME_FREEZE: + status = h264_sei_full_frame_freeze(parent, pInfo); + break; + case SEI_FULL_FRAME_FREEZE_RELEASE: + h264_sei_full_frame_freeze_release(parent, pInfo); + break; + case SEI_FULL_FRAME_SNAPSHOT: + status = h264_sei_full_frame_snapshot(parent, pInfo); + break; + case SEI_PROGRESSIVE_SEGMENT_START: + status = h264_sei_progressive_segement_start(parent, pInfo); + break; + case SEI_PROGRESSIVE_SEGMENT_END: + status = h264_sei_progressive_segment_end(parent, pInfo); + break; + case SEI_MOTION_CONSTRAINED_SLICE_GRP_SET: + status = h264_sei_motion_constrained_slice_grp_set(parent, pInfo); + break; + case SEI_FILM_GRAIN_CHARACTERISTICS: + status = h264_sei_film_grain_characteristics(parent, pInfo); + break; + case SEI_DEBLK_FILTER_DISPLAY_PREFERENCE: + status = h264_sei_deblocking_filter_display_preferences(parent, pInfo); + break; + case SEI_STEREO_VIDEO_INFO: + status = h264_sei_stereo_video_info(parent, pInfo); + break; + default: + status = h264_sei_reserved_sei_message(parent, pInfo, payloadSize); + break; + } + +/* + viddec_pm_get_bits(parent, (uint32_t *)&tmp, 1); + + if(tmp == 0x1) // if byte is not aligned + { + while(pInfo->bitoff != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&bit_equal_to_zero, 1); + } + } +*/ + return status; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent, h264_Info* pInfo) +{ + h264_Status status = H264_STATUS_OK; + int32_t payload_type, payload_size; + uint32_t next_8_bits = 0,bits_offset=0,byte_offset = 0; + uint8_t is_emul = 0; + int32_t bits_operation_result = 0; + + do { + //// payload_type + payload_type = 0; + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + while (next_8_bits == 0xFF) + { + bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + if(-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + payload_type += 255; + + } + //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + payload_type += next_8_bits; + + //// payload_size + payload_size = 0; + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + while (next_8_bits == 0xFF) + { + payload_size += 255; + bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + if(-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + } + //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + payload_size += next_8_bits; + + //PRINTF(MFD_NONE, " SEI: payload type = %d, payload size = %d \n", payload_type, payload_size); + + + ///////////////////////////////// + // Parse SEI payloads + ///////////////////////////////// + status = h264_SEI_payload(parent, pInfo, payload_type, payload_size); + if(status != H264_STATUS_OK) + break; + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + // OS_INFO("SEI byte_offset 3= %d, bits_offset=%d\n", byte_offset, bits_offset); + + if(bits_offset!=0) + { + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8-bits_offset); + } + + bits_operation_result = viddec_pm_peek_bits(parent, (uint32_t *)&next_8_bits, 8); + if(-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + + // OS_INFO("next_8_bits = %08x\n", next_8_bits); + + }while(next_8_bits != 0x80); + + //} while (h264_More_RBSP_Data(parent, pInfo) && status == H264_STATUS_OK); + + return status; +} + +#endif + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c new file mode 100644 index 0000000..3134ae0 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c @@ -0,0 +1,740 @@ +//#define H264_PARSE_SLICE_HDR +//#ifdef H264_PARSE_SLICE_HDR + +#include "h264.h" +#include "h264parse.h" + +extern int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul); + + +/*-----------------------------------------------------------------------------------------*/ +// Slice header 1---- +// 1) first_mb_in_slice, slice_type, pic_parameter_id +/*-----------------------------------------------------------------------------------------*/ +h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_STATUS_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_type =0; + uint32_t data =0; + + do { + ///// first_mb_in_slice + SliceHeader->first_mb_in_slice = h264_GetVLCElement(parent, pInfo, false); + + ///// slice_type + slice_type = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->slice_type = (slice_type%5); + + if(SliceHeader->slice_type > h264_PtypeI) { + ret = H264_STATUS_NOTSUPPORT; + break; + } + + + ////// pic_parameter_id + data = h264_GetVLCElement(parent, pInfo, false); + if(data > MAX_PIC_PARAMS) { + ret = H264_PPS_INVALID_PIC_ID; + break; + } + SliceHeader->pic_parameter_id = (uint8_t)data; + ret = H264_STATUS_OK; + }while(0); + + return ret; +} + +/*-----------------------------------------------------------------------------------------*/ +// slice header 2 +// frame_num +// field_pic_flag, structure +// idr_pic_id +// pic_order_cnt_lsb, delta_pic_order_cnt_bottom +/*-----------------------------------------------------------------------------------------*/ + +h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + uint32_t code; + int32_t max_mb_num=0; + + do { + //////////////////////////////////// Slice header part 2////////////////// + + /// Frame_num + viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4); + SliceHeader->frame_num = (int32_t)code; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->field_pic_flag = (uint8_t)code; + + if(SliceHeader->field_pic_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->bottom_field_flag = (uint8_t)code; + + SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + if(SliceHeader->structure == FRAME) { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + } else { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; + } + + + ///if(pInfo->img.MbaffFrameFlag) + if(pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { + SliceHeader->first_mb_in_slice <<=1; + } + + if(SliceHeader->first_mb_in_slice >= max_mb_num) + break; + + + if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); + } + + if(pInfo->active_SPS.pic_order_cnt_type == 0) + { + viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4); + SliceHeader->pic_order_cnt_lsb = (uint32_t)code; + + + if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true); + if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true); + } + } + + if(pInfo->active_PPS.redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); + if(SliceHeader->redundant_pic_cnt > 127) + break; + } else { + SliceHeader->redundant_pic_cnt = 0; + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + +/*-----------------------------------------------------------------------------------------*/ +// slice header 3 +// (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT, ref_pic_remark, alpha, beta, etc) +/*-----------------------------------------------------------------------------------------*/ + +h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_alpha_c0_offset, slice_beta_offset; + uint32_t code; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + do { + /// direct_spatial_mv_pred_flag + if(SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code , 1); + SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code; + } + else + { + SliceHeader->direct_spatial_mv_pred_flag = 0; + } + + // + // Reset ref_idx and Overide it if exist + // + SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active; + SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active; + + if((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code; + + if(SliceHeader->num_ref_idx_active_override_flag) + { + SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; + if(SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + } + } + } + + if(SliceHeader->slice_type != h264_PtypeB) { + SliceHeader->num_ref_idx_l1_active = 0; + } + + if((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) + { + break; + } + + if(h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + + + //// + //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW + //// + if(((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + { + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + pInfo->h264_pwt_enabled = 1; + pInfo->h264_pwt_start_byte_offset = byte_offset; + pInfo->h264_pwt_start_bit_offset = bits_offset; + + if(h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + if(0 == bits_offset) + { + pInfo->h264_pwt_end_byte_offset = byte_offset-1; + pInfo->h264_pwt_end_bit_offset = 8; + } + else + { + pInfo->h264_pwt_end_byte_offset = byte_offset; + pInfo->h264_pwt_end_bit_offset = bits_offset; + } + + } + + + + //// + //// Parse Ref_pic marking if there + //// + if(SliceHeader->nal_ref_idc != 0) + { + if(h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + } + + if((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); + } + else + { + SliceHeader->cabac_init_idc = 0; + } + + if(SliceHeader->cabac_init_idc > 2) + { + break; + } + + SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); + if( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26))) + break; + + + if((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) ) + { + if(SliceHeader->slice_type == h264_PtypeSP) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sp_for_switch_flag = (uint8_t)code; + + } + SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); + + if( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + break; + } + + if(pInfo->active_PPS.deblocking_filter_control_present_flag) + { + SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); + if(SliceHeader->disable_deblocking_filter_idc != 1) + { + SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; + if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) { + break; + } + + SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; + if (slice_beta_offset < -12 || slice_beta_offset > 12) { + break; + } + } + else + { + SliceHeader->slice_alpha_c0_offset_div2 = 0; + SliceHeader->slice_beta_offset_div2 = 0; + } + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + + +/*--------------------------------------------------------------------------------------------------*/ +// +// The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num +// specify the change from the initial reference picture lists to the reference picture lists to be used +// for decoding the slice + +// reordering_of_pic_nums_idc: +// 0: abs_diff_pic_num_minus1 is present and corresponds to a difference to subtract from a picture number prediction value +// 1: abs_diff_pic_num_minus1 is present and corresponds to a difference to add to a picture number prediction value +// 2: long_term_pic_num is present and specifies the long-term picture number for a reference picture +// 3: End loop for reordering of the initial reference picture list +// +/*--------------------------------------------------------------------------------------------------*/ + +h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t reorder= -1; + uint32_t code; + + + if((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)code; + + if(SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag) + { + + reorder= -1; + do + { + reorder++; + + if(reorder > MAX_NUM_REF_FRAMES) + { + return H264_SliceHeader_ERROR; + } + + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); + if((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + } + else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + } + + }while(SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3); + } + } + + if(SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)code; + + if(SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag) + { + + reorder = -1; + do + { + reorder++; + if(reorder > MAX_NUM_REF_FRAMES) + { + return H264_SliceHeader_ERROR; + } + SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); + if((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + } + else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + } + }while(SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3); + } + } + + //currently just two reference frames but in case mroe than two, then should use an array for the above structures that is why reorder + return H264_STATUS_OK; + +} + +#ifdef VBP +h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + uint32_t i =0, j=0; + uint32_t flag; + + SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); + + if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); + } + + for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; + + if(SliceHeader->sh_predwttbl.luma_weight_l0_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; + } + + if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; + + if(SliceHeader->sh_predwttbl.chroma_weight_l0_flag) + { + for(j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for(j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if(SliceHeader->slice_type == h264_PtypeB) + { + for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; + + if(SliceHeader->sh_predwttbl.luma_weight_l1_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; + + if(SliceHeader->sh_predwttbl.chroma_weight_l1_flag) + { + for(j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for(j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; +} ///// End of h264_Parse_Pred_Weight_Table + +#else + +/*--------------------------------------------------------------------------------------------------*/ +// +// Parse Prediction weight table +// Note: This table will be reparsed in HW Accelerator, so needn't keep it in parser +// +/*--------------------------------------------------------------------------------------------------*/ + + +h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + uint32_t i =0, j=0; + uint32_t flag, val; + //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader; + + //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom"); + val = h264_GetVLCElement(parent, pInfo, false); + + if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom"); + val = h264_GetVLCElement(parent,pInfo, false); + } + + for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + { + //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + + //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag) + if(flag) + { + //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + } + else + { + //SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + //SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; + } + + if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if(flag) + { + for(j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for(j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if(SliceHeader->slice_type == h264_PtypeB) + { + for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + { + //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if(flag) + { + //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + } + else + { + //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if(flag) + { + for(j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for(j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; +} ///// End of h264_Parse_Pred_Weight_Table + +#endif + +/*--------------------------------------------------------------------------------------------------*/ +// The syntax elements specify marking of the reference pictures. +// 1)IDR: no_output_of_prior_pics_flag, +// long_term_reference_flag, +// 2)NonIDR: adaptive_ref_pic_marking_mode_flag, +// memory_management_control_operation, +// difference_of_pic_nums_minus1, +// long_term_frame_idx, +// long_term_pic_num, and +// max_long_term_frame_idx_plus1 +// +//The marking of a reference picture can be "unused for reference", "used for short-term reference", or "used for longterm +// reference", but only one among these three. +/*--------------------------------------------------------------------------------------------------*/ + + +h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + uint8_t i = 0; + uint32_t code = 0; + + if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)code; + pInfo->img.long_term_reference_flag = (uint8_t)code; + } + else + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)code; + + /////////////////////////////////////////////////////////////////////////////////////// + //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified + // 0 Sliding window reference picture marking mode: A marking mode + // providing a first-in first-out mechanism for short-term reference pictures. + // 1 Adaptive reference picture marking mode: A reference picture + // marking mode providing syntax elements to specify marking of + // reference pictures as �unused for reference?and to assign long-term + // frame indices. + /////////////////////////////////////////////////////////////////////////////////////// + + if(SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) + { + do + { + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); + if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) + { + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) + { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) + { + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) + { + pInfo->img.curr_has_mmco_5 = 1; + } + + if(i>NUM_MMCO_OPERATIONS) { + return H264_STATUS_ERROR; + } + + }while(SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); + } + } + + + + SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; + + return H264_STATUS_OK; +} + + + +//#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c new file mode 100644 index 0000000..29ef54d --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c @@ -0,0 +1,513 @@ +//#define H264_PARSE_SPS_C +//#ifdef H264_PARSE_SPS_C + +#include "h264.h" +#include "h264parse.h" + + +/// SPS extension unit (unit_type = 13) +/// +#if 0 +h264_Status h264_Parse_SeqParameterSet_Extension(void *parent,h264_Info * pInfo) +{ + /*h264_SPS_Extension_RBSP_t* SPS_ext = pInfo->p_active_SPS_ext; + + SPS_ext->seq_parameter_set_id = h264_GetVLCElement(pInfo, false); + if(SPS_ext->seq_parameter_set_id > MAX_SEQ_PARAMS-1) + { + return H264_SPS_ERROR; + } + SPS_ext->aux_format_idc = h264_GetVLCElement(pInfo, false); + if(SPS_ext->aux_format_idc > 3) + { + return H264_SPS_ERROR; + } + if(SPS_ext->aux_format_idc != 0) + { + SPS_ext->bit_depth_aux_minus8 = h264_GetVLCElement(pInfo, false); + if(SPS_ext->bit_depth_aux_minus8 + 8 > 12) + { + return H264_SPS_ERROR; + } + + SPS_ext->alpha_incr_flag = h264_GetBits(pInfo, 1, "alpha_incr_flag"); + if(SPS_ext->alpha_incr_flag > 1) + { + return H264_SPS_ERROR; + } + + SPS_ext->alpha_opaque_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_opaque_value"); //+8 to get the bit_depth value + SPS_ext->alpha_transparent_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_transparent_value"); //+8 to get the bit_depth value + } + SPS_ext->additional_extension_flag = h264_GetBits(pInfo, 1, "additional_extension_flag"); +*/ + return H264_STATUS_OK; +} +#endif + + +h264_Status h264_Parse_HRD_Parameters(void *parent, h264_Info* pInfo, int nal_hrd,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used) +{ + //seq_param_set_ptr SPS = pInfo->p_active_SPS; + int32_t i = 0; + uint32_t code; + + + if(nal_hrd) + { + SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + if(SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + { + return H264_SPS_ERROR; + } + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale = (uint8_t)(code>>4); + pVUI_Seq_Not_Used->nal_hrd_cpb_size_scale = (uint8_t)(code & 0xf); + + for(i=0; i<=SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; i++) + { + pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->nal_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->nal_hrd_parameters.cbr_flag[i] = (uint8_t)code; + } + + if( viddec_pm_get_bits(parent, &code, 20) == -1) + return H264_SPS_ERROR; + + SPS->sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); + SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; + SPS->sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; + SPS->sps_disp.vui_seq_parameters.nal_hrd_time_offset_length = (uint8_t)(code&0x1f);; + + } + else + { + SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + { + return H264_SPS_ERROR; + } + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale = (uint8_t)(code>>4); + pVUI_Seq_Not_Used->vcl_hrd_cpb_size_scale = (uint8_t)(code&0xf); + + for(i=0; i<=SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; i++) + { + pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->vcl_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->vcl_hrd_parameters.cbr_flag[i] = (uint8_t)code; + } + + if( viddec_pm_get_bits(parent, &code, 20) == -1) + return H264_SPS_ERROR; + + SPS->sps_disp.vui_seq_parameters.vcl_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); + SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; + SPS->sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; + SPS->sps_disp.vui_seq_parameters.vcl_hrd_time_offset_length = (uint8_t)(code&0x1f);; + } + + return H264_STATUS_OK; +} + + + +h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used) +{ + h264_Status ret = H264_STATUS_OK; + //seq_param_set_ptr SPS = pInfo->p_active_SPS; + int32_t nal_hrd = 0; + uint32_t code; + + do { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag = (uint8_t)code; + + + if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc = (uint8_t)code; + + if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc == h264_AR_Extended_SAR) + { + viddec_pm_get_bits(parent, &code, 16); + SPS->sps_disp.vui_seq_parameters.sar_width = (uint16_t)code; + + viddec_pm_get_bits(parent, &code, 16); + SPS->sps_disp.vui_seq_parameters.sar_height = (uint16_t)code; + + } + } + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->overscan_info_present_flag = (uint8_t)code; + + if(pVUI_Seq_Not_Used->overscan_info_present_flag) + { + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->overscan_appropriate_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag = (uint8_t)code; + + if(SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + viddec_pm_get_bits(parent, &code, 3); + SPS->sps_disp.vui_seq_parameters.video_format = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code; + + if(SPS->sps_disp.vui_seq_parameters.colour_description_present_flag) + { + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.colour_primaries = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.transfer_characteristics = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code; + } + } + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->chroma_location_info_present_flag = (uint8_t)code; + + if(pVUI_Seq_Not_Used->chroma_location_info_present_flag) + { + pVUI_Seq_Not_Used->chroma_sample_loc_type_top_field = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->chroma_sample_loc_type_bottom_field = h264_GetVLCElement(parent, pInfo, false); + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.timing_info_present_flag = (uint8_t)code; + + if(SPS->sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + viddec_pm_get_bits(parent, &code, 32); + SPS->sps_disp.vui_seq_parameters.num_units_in_tick = (uint32_t)code; + + viddec_pm_get_bits(parent, &code, 32); + SPS->sps_disp.vui_seq_parameters.time_scale = (uint32_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.fixed_frame_rate_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag = (uint8_t)code; + + if(SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + nal_hrd = 1; + ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag = (uint8_t)code; + + if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) + { + nal_hrd = 0; + ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + } + + if((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) + { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.low_delay_hrd_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.pic_struct_present_flag = (uint8_t)code; + + if(viddec_pm_get_bits(parent, &code, 1) == -1) { + ret = H264_STATUS_ERROR; + break; + } + SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag = (uint8_t)code; + + if(SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag) + { + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->motion_vectors_over_pic_boundaries_flag = (uint8_t)code; + + pVUI_Seq_Not_Used->max_bytes_per_pic_denom = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->max_bits_per_mb_denom = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->log2_max_mv_length_horizontal = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->log2_max_mv_length_vertical = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.vui_seq_parameters.num_reorder_frames = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering = h264_GetVLCElement(parent, pInfo, false); + + if(SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering == MAX_INT32_VALUE) + ret = H264_STATUS_ERROR; + } + }while (0); + + return ret; +} + + +h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame) +{ + h264_Status ret = H264_SPS_ERROR; + + int32_t i = 0, tmp = 0; + int32_t PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs; + uint32_t code = 0; + uint32_t data = 0; + + //SPS->profile_idc = h264_GetBits(pInfo, 8, "Profile"); + viddec_pm_get_bits(parent, &code, 8); + SPS->profile_idc = (uint8_t)code; + + switch(SPS->profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + case h264_ProfileHigh10: + case h264_ProfileHigh422: + case h264_ProfileHigh444: + case h264_ProfileHigh: + break; + default: + return H264_SPS_INVALID_PROFILE; + break; + } + + //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag"); + //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag"); //should be 1 + //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag"); + //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag"); + + viddec_pm_get_bits(parent, &code, 4); + SPS->constraint_set_flags = (uint8_t)code; + + //// reserved_zero_4bits + viddec_pm_get_bits(parent, (uint32_t *)&code, 4); + + viddec_pm_get_bits(parent, &code, 8); + SPS->level_idc = (uint8_t)code; + + switch(SPS->level_idc) + { + case h264_Level1b: + case h264_Level1: + case h264_Level11: + case h264_Level12: + case h264_Level13: + case h264_Level2: + case h264_Level21: + case h264_Level22: + case h264_Level3: + case h264_Level31: + case h264_Level32: + case h264_Level4: + case h264_Level41: + case h264_Level42: + case h264_Level5: + case h264_Level51: + break; + default: + return H264_SPS_INVALID_LEVEL; + } + + do { + SPS->seq_parameter_set_id = h264_GetVLCElement(parent, pInfo, false); + + //// seq_parameter_set_id ---[0,31] + if(SPS->seq_parameter_set_id > MAX_NUM_SPS -1) + break; + + if((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) || + (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) ) + { + //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2] + data = h264_GetVLCElement(parent, pInfo, false); + if( data > H264_CHROMA_422) + break; + SPS->sps_disp.chroma_format_idc = (uint8_t)data; + //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {} + + //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel + data = h264_GetVLCElement(parent, pInfo, false); + if( data) + break; + SPS->bit_depth_luma_minus8 = (uint8_t)data; + + //// bit_depth_chroma_minus8 ---[0,4] + data = h264_GetVLCElement(parent, pInfo, false); + if( data ) + break; + SPS->bit_depth_chroma_minus8 = (uint8_t)data; + + + viddec_pm_get_bits(parent, &code, 1); + SPS->lossless_qpprime_y_zero_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->seq_scaling_matrix_present_flag = (uint8_t)code; + + if(SPS->seq_scaling_matrix_present_flag == 1) + { + //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12; + int n_ScalingList = 8; /// We do not support 444 currrently + + for(i=0; iseq_scaling_list_present_flag[i] = (uint8_t)code; + + if(SPS->seq_scaling_list_present_flag[i]) + { + if(i<6) + h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo); + else + h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); + } + } + } + } + else + { + SPS->sps_disp.chroma_format_idc = 1; + SPS->seq_scaling_matrix_present_flag = 0; + + SPS->bit_depth_luma_minus8 = 0; + SPS->bit_depth_chroma_minus8 = 0; + //h264_SetDefaultScalingLists(pInfo); + } + + //// log2_max_frame_num_minus4 ---[0,12] + data = (h264_GetVLCElement(parent, pInfo, false)); + if( data > 12) + break; + SPS->log2_max_frame_num_minus4 = (uint8_t)data; + + //// pic_order_cnt_type ---- [0,2] + data = h264_GetVLCElement(parent, pInfo, false); + if( data > 2) + break; + SPS->pic_order_cnt_type = (uint8_t)data; + + + SPS->expectedDeltaPerPOCCycle = 0; + if(SPS->pic_order_cnt_type == 0) { + SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false); + } else if(SPS->pic_order_cnt_type == 1){ + viddec_pm_get_bits(parent, &code, 1); + SPS->delta_pic_order_always_zero_flag = (uint8_t)code; + + SPS->offset_for_non_ref_pic = h264_GetVLCElement(parent, pInfo, true); + SPS->offset_for_top_to_bottom_field = h264_GetVLCElement(parent, pInfo, true); + + //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255] + data = h264_GetVLCElement(parent, pInfo, false); + if( data > 255) + break; + SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data; + + + //Alloc memory for frame offset -- FIXME + for(i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++) + { + /////SPS->offset_for_ref_frame[i] could be removed from SPS +#ifndef USER_MODE + tmp = h264_GetVLCElement(parent, pInfo, true); + pOffset_ref_frame[i]=tmp; + SPS->expectedDeltaPerPOCCycle += tmp; +#else + tmp = h264_GetVLCElement(parent, pInfo, true); + SPS->offset_for_ref_frame[i]=tmp; + SPS->expectedDeltaPerPOCCycle += tmp; +#endif + } + } + + //// num_ref_frames ---[0,16] + data = h264_GetVLCElement(parent, pInfo, false); + if( data > 16) + break; + SPS->num_ref_frames = (uint8_t)data; + + viddec_pm_get_bits(parent, &code, 1); + SPS->gaps_in_frame_num_value_allowed_flag = (uint8_t)code; + + + SPS->sps_disp.pic_width_in_mbs_minus1 = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.pic_height_in_map_units_minus1 = h264_GetVLCElement(parent, pInfo, false); + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.frame_mbs_only_flag = (uint8_t)code; + + /// err check for size + PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1); + PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1); + FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1); + if((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128)) + break; + + if(!SPS->sps_disp.frame_mbs_only_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code; + } + + //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1); + //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.frame_cropping_flag = (uint8_t)code; + + if(SPS->sps_disp.frame_cropping_flag) + { + SPS->sps_disp.frame_crop_rect_left_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_right_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_top_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_bottom_offset = h264_GetVLCElement(parent, pInfo, false); + } + + //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1 + if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0){ + break; + } + + ////// vui_parameters + if(viddec_pm_get_bits(parent, &code, 1) == -1) + break; + SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code; + ret = H264_STATUS_OK; + + if(SPS->sps_disp.vui_parameters_present_flag) + { +#ifndef VBP // Ignore VUI parsing result + ret = +#endif + h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); + } + + }while(0); + + //h264_Parse_rbsp_trailing_bits(pInfo); + + return ret; +} + +//#endif + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c new file mode 100644 index 0000000..87959f3 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c @@ -0,0 +1,575 @@ +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "h264.h" +#include "h264parse.h" +#include "viddec_fw_item_types.h" +#include "h264parse_dpb.h" +#include + +extern void* h264_memcpy( void* dest, void* src, uint32_t num ); + +uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) +{ + if (swap != 0) + { + g_warning("swap copying is not implemented."); + } + + if (to_ddr) + { + memcpy((void*)ddr_addr, (void*)local_addr, size); + } + else + { + memcpy((void*)local_addr, (void*)ddr_addr, size); + } + + return (0); +} + +#if 0 +void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) +{ + + if(pInfo->Is_first_frame_in_stream) //new stream, fill new frame in cur + { + + pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->push_to_cur = 1; + + } + else // move to next for new frame + { + pInfo->push_to_cur = 0; + } + + + + //fill dpb managemnt info + + + + + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + pInfo->dpb.frame_numbers_need_to_be_removed =0; + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + +} + +void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) +{ + //// + //// Now we can flush out all frames in DPB fro display + if(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + pInfo->dpb.frame_numbers_need_to_be_removed =0; + +} + +void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) +{ + pInfo->qm_present_list=0; +} + +void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) +{ +#if 1 + uint32_t i, nitems=0; + + + if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for(i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } + } + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for(i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } + } + } + + } + else + { + nitems =0; + } +#endif +} +#else + + +void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + h264_slice_data slice_data; + + uint32_t i=0, nitems=0, data=0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + + ////////////////////// Update Reference list ////////////////// + if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for(i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } + } + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for(i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } + } + } + + } + else + { + nitems =0; + } + /////file ref list 0 + // h264_parse_emit_ref_list(parent, pInfo, 0); + + /////file ref list 1 + //h264_parse_emit_ref_list(parent, pInfo, 1); + + ///////////////////////////////////// Slice Data //////////////////////////////// + // h264_fill_slice_data(pInfo, &slice_data); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG; + + wi.data.data_offset = slice_data.h264_bsd_slice_start; + wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; + wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent , &wi); + } + else + { + // viddec_pm_append_workitem_next( parent , &wi); + } + + + ///////////////////////////predict weight table item and data if have/////////////////////////// + if(pInfo->h264_pwt_enabled) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; + wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; + wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; + wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent , &wi); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); + } + else + { + // viddec_pm_append_workitem_next( parent , &wi); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); + } + } + + + ////////////////////////////////// Update ES Buffer for Slice /////////////////////// + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); + + if(pInfo->active_PPS.entropy_coding_mode_flag) + { + if(0!=bits_offset) { + data = data; // fix compilation warning + // don't skip byte-aligned bits as those bits are actually + // part of slice_data + //viddec_pm_get_bits(parent, &data, 8-bits_offset); + } + } + else + { + if(0!=bits_offset) { + wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; + wi.data.data_offset = bits_offset; + wi.data.data_payload[0]=0; + wi.data.data_payload[1]=0; + + if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur + // viddec_pm_append_workitem( parent , &wi); + } + else { + //viddec_pm_append_workitem_next( parent , &wi); + } + } + } + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_pixeldata( parent ); + } + else + { + //viddec_pm_append_pixeldata_next( parent); + } + + return; +} + + +void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + const uint32_t *pl; + uint32_t i=0,nitems=0; + + h264_pic_data pic_data; + + pInfo->qm_present_list=0; + + //h264_parse_emit_4X4_scaling_matrix(parent, pInfo); + // h264_parse_emit_8X8_scaling_matrix(parent, pInfo); + + // h264_fill_pic_data(pInfo, &pic_data); + + // How many payloads must be generated + nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up + + pl = (const uint32_t *) &pic_data; + + // Dump slice data to an array of workitems, to do pl access non valid mem + for( i = 0; i < nitems; i++ ) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG; + wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + + return; +} + +void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + uint32_t i=0,nitems=0; + + ///////////////////////// Frame attributes////////////////////////// + + //Push data into current workload if first frame or frame_boundary already detected by non slice nal + if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) + { + //viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); + //pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->is_frame_boundary_detected_by_non_slice_nal=0; + pInfo->push_to_cur = 1; + //h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); + } + else // move to cur if frame boundary detected by previous non slice nal, or move to next if not + { + //viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + + pInfo->push_to_cur = 0; + //h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); + + pInfo->is_current_workload_done=1; + } + + ///////////////////// SPS///////////////////// + // h264_parse_emit_sps(parent, pInfo); + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for(i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for(i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + /////////////////////flust frames (do not display)///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; + + for(i=0; idpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_dropped =0; + + /////////////////////updata DPB frames///////////////////// + nitems = pInfo->dpb.used_size; + for(i=0; idpb.fs_dpb_idc[i]; + + if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id; + wi.ref_frame.reference_id = fs_id; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + } + + + /////////////////////updata dpb frames info (poc)///////////////////// + nitems = pInfo->dpb.used_size; + for(i=0; idpb.fs_dpb_idc[i]; + + if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; + wi.data.data_offset = fs_id; + //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); + + switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) + { + case (FRAME):{ + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + case (TOP_FIELD):{ + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = 0; + break; + }; + + case (BOTTOM_FIELD):{ + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + default : { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + break; + }; + } + + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + + } + } + + /////////////////////Alloc buffer for current Existing frame///////////////////// + if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated) + { + if(pInfo->push_to_cur) + { + // viddec_workload_t *wl_cur = viddec_pm_get_header (parent); + // wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + else + { + // viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + } + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + return; +} + + + +void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) +{ + + uint32_t nitems=0, i=0; + viddec_workload_item_t wi; + + //// + //// Now we can flush out all frames in DPB fro display + if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for(i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for(i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); + } + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + return; +} +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c new file mode 100644 index 0000000..9388d81 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -0,0 +1,559 @@ +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" + +#include "viddec_fw_workload.h" +#include "viddec_pm.h" + +#include "h264.h" +#include "h264parse.h" + +#include "viddec_h264_parse.h" +#include "h264parse_dpb.h" + +/* Init function which can be called to intialized local context on open and flush and preserve*/ +#ifdef VBP +void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#else +static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#endif +{ + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + if(!preserve) + { + /* we don't initialize this data if we want to preserve + sequence and gop information */ + h264_init_sps_pps(parser,persist_mem); + } + /* picture level info which will always be initialized */ + h264_init_Info_under_sps_pps_level(pInfo); + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +#ifdef VBP +uint32_t viddec_h264_parse(void *parent, void *ctxt) +#else +static uint32_t viddec_h264_parse(void *parent, void *ctxt) +#endif +{ + struct h264_viddec_parser* parser = ctxt; + + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + + + uint8_t nal_ref_idc = 0; + + ///// Parse NAL Unit header + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + + ///// Check frame bounday for non-vcl elimitter + h264_check_previous_frame_end(pInfo); + + //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type); + //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0); +#if 0 + devh_SVEN_WriteModuleEvent( NULL, + SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0, + pInfo->got_start,pInfo->nal_unit_type, pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num); +#endif + + //////// Parse valid NAL unit + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + if(pInfo->got_start) { + pInfo->img.recovery_point_found |= 1; + } + + pInfo->sei_rp_received = 0; + + case h264_NAL_UNIT_TYPE_SLICE: + //////////////////////////////////////////////////////////////////////////// + // Step 1: Check start point + //////////////////////////////////////////////////////////////////////////// + // + /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I) + /// 1) No start point reached, append current ES buffer to workload and release it + /// 2) else, start parsing + // + //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR))) + //{ + //pInfo->img.recovery_point_found = 1; + //} + { + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = nal_ref_idc; + + if( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + + + if(pInfo->img.recovery_point_found == 0) { + pInfo->img.structure = FRAME; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + break; + } + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + + if(next_SliceHeader.sh_error & 3) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + + break; + } + pInfo->img.current_slice_num++; + + +#ifdef DUMP_HEADER_INFO +dump_slice_header(pInfo, &next_SliceHeader); +////h264_print_decoder_values(pInfo); +#endif + + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if(h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if(pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + +#ifdef DUMP_HEADER_INFO + dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); +#endif + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if(pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + + // + /// Emit out the New Frame + if(pInfo->img.g_new_frame) + { + h264_parse_emit_start_new_frame(parent, pInfo); + } + + h264_parse_emit_current_pic(parent, pInfo); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + + + ////////////////////////////////////////////////////////////// + // Step 4: DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + + + h264_dpb_update_ref_lists( pInfo); + +#ifdef DUMP_HEADER_INFO + dump_ref_list(pInfo); +#endif + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + + } + break; + + ///// * Main profile doesn't support Data Partition, skipped.... *//// + case h264_NAL_UNIT_TYPE_DPA: + case h264_NAL_UNIT_TYPE_DPB: + case h264_NAL_UNIT_TYPE_DPC: + //OS_INFO("***********************DP feature, not supported currently*******************\n"); + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + status = H264_STATUS_NOTSUPPORT; + break; + + //// * Parsing SEI info *//// + case h264_NAL_UNIT_TYPE_SEI: + status = H264_STATUS_OK; + + //OS_INFO("*****************************SEI**************************************\n"); + if(pInfo->sps_valid){ + //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW + pInfo->number_of_first_au_info_nal_before_first_slice++; + /// parsing the SEI info + status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo); + } + + //h264_rbsp_trailing_bits(pInfo); + break; + case h264_NAL_UNIT_TYPE_SPS: + { + //OS_INFO("*****************************SPS**************************************\n"); + /// + /// Can not define local SPS since the Current local stack size limitation! + /// Could be changed after the limitation gone + /// + uint8_t old_sps_id=0; + vui_seq_parameters_t_not_used vui_seq_not_used; + + old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + + + status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); + if(status == H264_STATUS_OK) { + h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); + pInfo->sps_valid = 1; + + if(1==pInfo->active_SPS.pic_order_cnt_type) { + h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); + } + +#ifdef DUMP_HEADER_INFO + dump_sps(&(pInfo->active_SPS)); +#endif + + } + ///// Restore the active SPS if new arrival's id changed + if(old_sps_id>=MAX_NUM_SPS) { + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + else { + if(old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + else { + //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + } + + pInfo->number_of_first_au_info_nal_before_first_slice++; + } + break; + case h264_NAL_UNIT_TYPE_PPS: + { + //OS_INFO("*****************************PPS**************************************\n"); + + uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; + + h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set)); + pInfo->number_of_first_au_info_nal_before_first_slice++; + + if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK) + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id); + if(old_sps_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated = 1; + } + if(pInfo->active_SPS.seq_parameter_set_id != 0xff) { + h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id); + pInfo->got_start = 1; + if(pInfo->sei_information.recovery_point) + { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + } + else + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + #ifdef DUMP_HEADER_INFO + dump_pps(&(pInfo->active_PPS)); + #endif + } else { + if(old_sps_idactive_SPS), old_sps_id); + if(old_pps_idactive_PPS), old_pps_id); + } + + } //// End of PPS parsing + break; + + + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + + h264_parse_emit_eos(parent, pInfo); + h264_init_dpb(&(pInfo->dpb)); + + /* picture level info which will always be initialized */ + //h264_init_Info_under_sps_pps_level(pInfo); + + ////reset the pInfo here + //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false); + + + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: +#if 1 + ///// primary_pic_type + { + uint32_t code = 0xff; + int32_t ret = 0; + ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3); + + if(ret != -1) { + //if(pInfo->got_start && (code == 0)) + //{ + //pInfo->img.recovery_point_found |= 4; + //} + pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1; + status = H264_STATUS_OK; + } + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + } +#endif + + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_filler_data: + status = H264_STATUS_OK; + break; + case h264_NAL_UNIT_TYPE_ACP: + break; + case h264_NAL_UNIT_TYPE_SPS_extension: + case h264_NAL_UNIT_TYPE_unspecified: + case h264_NAL_UNIT_TYPE_unspecified2: + status = H264_STATUS_OK; + //nothing + break; + default: + status = H264_STATUS_OK; + break; + } + + //pInfo->old_nal_unit_type = pInfo->nal_unit_type; + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + case h264_NAL_UNIT_TYPE_SLICE: + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->old_nal_unit_type = pInfo->nal_unit_type; + break; + } + default: + break; + } + + return status; +} + + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +static uint32_t viddec_h264_is_frame_start(void *ctxt) +{ + struct h264_viddec_parser* parser = ctxt; + uint32_t ret = 0; + + h264_Info * pInfo = &(parser->info); + + if(pInfo->img.g_new_frame) { + ret = 1; + } + + return ret; +} + +#ifdef VBP +uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, + uint32_t *codec_specific_errors) +#else +static uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) +#endif +{ + struct h264_viddec_parser* parser = ctxt; + uint32_t ret = VIDDEC_PARSE_SUCESS; + h264_Info * pInfo = &(parser->info); + uint8_t is_stream_forced_to_complete=false; + + is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc); + + if(is_stream_forced_to_complete || (pInfo->is_current_workload_done)) + { + viddec_workload_t *wl; + viddec_frame_attributes_t *attrs; + + wl = viddec_pm_get_header( parent ); + attrs = &wl->attrs; + + if((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048)) + { + attrs->cont_size.width = 32; + attrs->cont_size.height = 32; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + } + + *codec_specific_errors = pInfo->wl_err_curr; + pInfo->wl_err_curr = pInfo->wl_err_next; + pInfo->wl_err_next = 0; + + if(is_stream_forced_to_complete) + { + h264_parse_emit_eos(parent, pInfo); + } + ret = VIDDEC_PARSE_FRMDONE; + } + + return ret; +} + +#ifdef VBP +void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) +#else +static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) +#endif +{ + /* Should return size of my structure */ + size->context_size = sizeof(struct h264_viddec_parser); + size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all) + + MAX_NUM_PPS * sizeof(pic_param_set) + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE + + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; +} + +void viddec_h264_get_ops(viddec_parser_ops_t *ops) +{ + ops->init = viddec_h264_init; + + ops->parse_syntax = viddec_h264_parse; + ops->get_cxt_size = viddec_h264_get_context_size; + ops->is_wkld_done = viddec_h264_wkld_done; + ops->is_frame_start = viddec_h264_is_frame_start; + return; +} + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c new file mode 100644 index 0000000..4fc2f1a --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c @@ -0,0 +1,1306 @@ +/* Any workload management goes in this file */ + +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "h264.h" +#include "h264parse.h" +#include "viddec_fw_item_types.h" +#include "h264parse_dpb.h" + + +#include "viddec_fw_workload.h" +#include +#include "viddec_pm_utils_bstream.h" + +// picture parameter 1 +#define PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT(w) (((uint32_t)w)&0x1) +#define PUT_BSD_PP1_SLICE_TYPE_BITS(w) ((((uint32_t)w)&0x7)<<1) +#define PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(w) ((((uint32_t)w)&0x3)<<4) +#define PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6) +#define PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(w) ((((uint32_t)w)&0x3F)<<8) +#define PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(w) ((((uint32_t)w)&0x3F)<<16) + +// picture parameter 2 +#define PUT_BSD_PP2_CABAC_INIT_IDC_BITS(w) (((uint32_t)w)&0x3) +#define PUT_BSD_PP2_QP_BITS(w) ((((uint32_t)w)&0x3F)<<2) +#define PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(w) ((((uint32_t)w)&0x3)<<8) +#define PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<10) +#define PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<14) +#define PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<18) +#define PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(w) ((((uint32_t)w)&0x1F)<<19) +#define PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(w) ((((uint32_t)w)&0x1F)<<24) + + +// slice start parameter +#define PUT_BSD_SS_START_ADDR_BITS(w) (((uint32_t)w)&0x7fff) // 14:0 current slice start address +#define PUT_BSD_SS_SKIP_FS_IDC_BITS(w) ((((uint32_t)w)&0x3f)<<16) // [5:0], [4:0] frame store idc, [5] - 0: top-filed, 1: bottom field +#define PUT_BSD_SS_SKIP_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<24) // 0: P-skip, 1: I-skip +#define PUT_BSD_SS_SKIP_REWIND_BITS(w) ((((uint32_t)w)&0xf)<<28) // number of MB or MBAFF pairs to rewind before skip + +//h264_dpb_init +#define PUT_FRAME_WIDTH_MB_BITS(w) (((uint32_t)w)&0x7F) +#define PUT_FRAME_HEIGHT_MB_BITS(w) ((((uint32_t)w)&0x7F)<<16) + +//dpb lut table init +//#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8) + +//h264 img init +#define PUT_BSD_IMAGE_STRUCTURE_BITS(w) (((uint32_t)w)&0x3) +#define PUT_BSD_IMAGE_IDR_BIT(w) ((((uint32_t)w)&0x1)<<2) +#define PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<3) +#define PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<4) +#define PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<5) +#define PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6) +#define PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<7) +#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8) + +#define PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<13) +#define PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<14) +#define PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<15) +#define PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<16) +#define PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(w) ((((uint32_t)w)&0xFF)<<17) +#define PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<25) + + +extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, + int32_t NonExisting, + int32_t use_old); + +extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); + + + +void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_Info *pInfo) +{ + + viddec_frame_attributes_t *attrs = &wl->attrs; + + + + //// Cont_size + attrs->cont_size.height = pInfo->img.FrameHeightInMbs*16; + attrs->cont_size.width = pInfo->img.PicWidthInMbs*16; + + //// The following attributes will be updated in slice level + attrs->h264.used_for_reference = 0; + attrs->h264.top_field_first = 0; + attrs->h264.top_field_poc = 0; + attrs->h264.bottom_field_poc = 0; + attrs->h264.field_pic_flag = 0; + +#if 1 +/// Double check the size late!!!!! + //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16; + //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16; + + if( (pInfo->active_SPS.sps_disp.frame_cropping_flag) && + (pInfo->active_SPS.sps_disp.chroma_format_idc < 4)) + { + int32_t CropUnitX, CropUnitY; + int32_t SubWidthC, SubHeightC; + + if(pInfo->active_SPS.sps_disp.chroma_format_idc == 0) + { + CropUnitX = 1; + CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag; + } + else + { + SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1); + SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1) + - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1); + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag); + } + + if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY)) + { + attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); + //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); + } + } +/// Pan-Scan Info + +#endif + +} + + +static void h264_parse_update_frame_attributes(void *parent, h264_Info *pInfo) +{ + viddec_workload_t *wl_cur, *wl_next; + viddec_frame_attributes_t *attrs; + uint8_t frame_type=0; + + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + wl_cur = viddec_pm_get_header( parent ); + attrs = &wl_cur->attrs; + } + else + { + wl_next = viddec_pm_get_next_header (parent); + attrs = &wl_next->attrs; + } + + /////////update frame type + if((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)) + { + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET; + switch(frame_type) + { + case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break; + case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break; + case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break; + case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break; + default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break; + } + + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; + } + else + { + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET; + switch(frame_type) + { + case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break; + case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break; + case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break; + case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break; + default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break; + + } + + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET; + switch(frame_type) + { + case FRAME_TYPE_IDR: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR; break; + case FRAME_TYPE_I: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I; break; + case FRAME_TYPE_P: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P; break; + case FRAME_TYPE_B: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B; break; + default: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; break; + + } + } + + /////////update is_referece flag + attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1; + + /////////update POC + attrs->h264.top_field_poc = pInfo->img.toppoc; + attrs->h264.bottom_field_poc = pInfo->img.bottompoc; + + //////// update TFF + if(attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) { + attrs->h264.top_field_first = 1; + } else { + attrs->h264.top_field_first = 0; + } + + /////// update field_pic_flag + //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag); + attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag; + + return; +} + + +static void h264_fill_slice_data(h264_Info *pInfo, h264_slice_data * p_slice_data) +{ + uint32_t data=0; + uint32_t first_mb_in_slice =0; + + + + ////////////fill pic parameters 1 + data = PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) + + PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) + + PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) + + PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag) + + PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active) + + PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active); + p_slice_data->h264_bsd_slice_p1 = data; + + + ///////////fill pic parameters 2 + data = PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) + + PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) + + PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) + + PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) + + PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) + + PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) + + PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) + + PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset); + + p_slice_data->h264_bsd_slice_p2 = data; + + /////////fill slice start + first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice; + + data = PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice); + data |= PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) | + PUT_BSD_SS_SKIP_TYPE_BIT(0) | + PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3)); + + p_slice_data->h264_bsd_slice_start = data; + +} + + +static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + uint32_t i=0, n_items=0; + uint32_t qm_type=0; + + + for( i = 0; i < 6; i++ ) + { + qm_type = FB_QM; + if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first + { + if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix4x4Flag[i]) { + qm_type = DEFAULT_QM; + } else { + qm_type = SPS_QM; + } + } + } + + if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps + { + if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix4x4Flag[i]) { + qm_type = DEFAULT_QM; + } else { + qm_type = PPS_QM; + } + } + else + { + if ((i != 0) && (i != 3) && (i < 6)) { + pInfo->qm_present_list &= ~((0x1)<active_SPS.ScalingList4x4[i][n_items*8+0]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24); + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + } + + break; + } + case (PPS_QM):{ + + for(n_items =0; n_items<2; n_items++) + { + wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24); + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + + break; + } + case (DEFAULT_QM): + { + + wi.data.data_offset = i + (DEFAULT_QM << 4); + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + break; + } + default: + { + break; + } + } + } + +} + +static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + uint32_t i=0, n_items=0; + uint32_t qm_type=0; + + for( i = 6; i < 8; i++ ) + { + qm_type = FB_QM; + if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first + { + if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix8x8Flag[i-6]) + { + qm_type = DEFAULT_QM; + } + else + { + qm_type = SPS_QM; + } + } + } + + if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps + { + if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]) + { + qm_type = DEFAULT_QM; + } + else + { + qm_type = PPS_QM; + } + } + } + wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX; + + // data_offset 0x aa bb cc dd + // bb is the workload item offset + // cc is the qm_type + // dd is the matrix number + // + switch (qm_type) + { + case (SPS_QM): + { + for(n_items =0; n_items<8; n_items++) + { + wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24); + + if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi ); + } else { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + break; + } + case (PPS_QM): + { + for(n_items =0; n_items<8; n_items++) + { + wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24); + + if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi ); + } else { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + + break; + } + case (DEFAULT_QM): + { + wi.data.data_offset = i + (DEFAULT_QM << 4); + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi ); + } else { + viddec_pm_append_workitem_next( parent, &wi ); + } + + break; + } + default:{ + break; + } + } + } + +} + + + +static void h264_fill_pic_data(h264_Info *pInfo, h264_pic_data * p_pic_data) +{ + uint32_t data=0; + uint32_t dec_idc =0; + uint32_t frame_structure =0; + + //fill h264_dpb_init + data = PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) + + PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs); + + p_pic_data->h264_dpb_init = data; + + ////////////////////////////////file current pic info + data = 0; + dec_idc = pInfo->dpb.fs_dec_idc; + frame_structure = pInfo->img.structure; + if(frame_structure == FRAME) + frame_structure=0; + //data = PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); + + //p_pic_data->h264_cur_bsd_img_init= data; + + data = PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure) + + PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + + PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) + + PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) + + PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) + + PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) + + PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) + + PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) + + PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) + + PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) + + PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) + + PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) + + PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) + + PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); + + p_pic_data->h264_cur_bsd_img_init= data; + + //to do: add qm list + //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) + + //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc); + + if(pInfo->img.structure == FRAME) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; + p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; + }else if (pInfo->img.structure == TOP_FIELD) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; + p_pic_data->h264_cur_mpr_bf_poc = 0; + } + else if (pInfo->img.structure == BOTTOM_FIELD) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = 0; + p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; + } + else + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = 0; + p_pic_data->h264_cur_mpr_bf_poc = 0; + } + + return; +} + +static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) +{ + viddec_workload_item_t wi; + + if(pInfo->Is_SPS_updated) + { + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; + + viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc); + viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc); + viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc); + viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames); + viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag); + viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag); + viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag); + viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag); + wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1; + wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + viddec_fw_reset_workload_item(&wi); + if(pInfo->active_SPS.sps_disp.frame_cropping_flag) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING; + viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset); + viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset); + viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset); + viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset); + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + viddec_fw_reset_workload_item(&wi); + if(pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1) + { + wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; + viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag); + viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag); + viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag); + viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag); + viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag); + viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag); + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1) + { + viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc); + if(h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc) + { + viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width); + viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height); + } + } + + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag); + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries); + viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics); + } + viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format); + } + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag); + } + + if( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) + { + viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag); + } + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + + viddec_fw_reset_workload_item(&wi); + + if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO; + + wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick; + wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale; + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + + + pInfo->Is_SPS_updated =0; + + } + + return; +} + + + + +static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t list_id) +{ + uint32_t i=0, nitems=0, byte_index=0, data=0, data_writed=0; + uint8_t *p_list; + viddec_workload_item_t wi; + + if(0 == list_id) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0; + + if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list0; + } + else + { + p_list = pInfo->dpb.listX_0; + } + } + else + { + nitems =0; + p_list = pInfo->dpb.listX_0; + } + } + else + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1; + + if( h264_PtypeB==pInfo->SliceHeader.slice_type) + { + nitems = pInfo->SliceHeader.num_ref_idx_l1_active; + if(pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list1; + } + else + { + p_list = pInfo->dpb.listX_1; + } + } + else + { + nitems = 0; + p_list = pInfo->dpb.listX_1; + } + + } + + if(0 == nitems) + { + return; + } + + byte_index =0; + data_writed=0; + + + for (i=0; i < 32; i++) + { + if(byte_index == 0) data = 0; + + if(idpb.fs[ (p_list[i]&0x1f) ]))) + { + data |= (pInfo->h264_list_replacement) << byte_index; + } + else + { + data |= (p_list[i] & 0x7f) << byte_index; + } + } + else + { + data |= (0x80) << byte_index; + } + + + if(byte_index == 24) + { + byte_index = 0; + wi.data.data_offset = data_writed&(~0x1); + wi.data.data_payload[data_writed&0x1]=data; + + data =0; + + if(data_writed&0x1) + { + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + data_writed ++; + } + else + { + byte_index += 8; + } + } + +} + + + +void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + h264_slice_data slice_data; + + uint32_t i=0, nitems=0, data=0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + ////////////////////// Update frame attributes///////////////// + h264_parse_update_frame_attributes(parent,pInfo); + + + if(pInfo->SliceHeader.sh_error) { + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + + if(pInfo->push_to_cur) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); + } else { + pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); + } + } + + + ////////////////////// Update Reference list ////////////////// + if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for(i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } + } + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for(i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } + } + } + + } + else + { + nitems =0; + } + /////file ref list 0 + h264_parse_emit_ref_list(parent, pInfo, 0); + + /////file ref list 1 + h264_parse_emit_ref_list(parent, pInfo, 1); + + ///////////////////////////////////// Slice Data //////////////////////////////// + h264_fill_slice_data(pInfo, &slice_data); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG; + + wi.data.data_offset = slice_data.h264_bsd_slice_start; + wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; + wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent , &wi); + } + else + { + viddec_pm_append_workitem_next( parent , &wi); + } + + + ///////////////////////////predict weight table item and data if have/////////////////////////// + if(pInfo->h264_pwt_enabled) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; + wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; + wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; + wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent , &wi); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); + } + else + { + viddec_pm_append_workitem_next( parent , &wi); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); + } + } + + + ////////////////////////////////// Update ES Buffer for Slice /////////////////////// + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); + + if(pInfo->active_PPS.entropy_coding_mode_flag) + { + if(0!=bits_offset) { + viddec_pm_get_bits(parent, &data, 8-bits_offset); + } + } + else + { + if(0!=bits_offset) { + wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; + wi.data.data_offset = bits_offset; + wi.data.data_payload[0]=0; + wi.data.data_payload[1]=0; + + if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent , &wi); + } + else { + viddec_pm_append_workitem_next( parent , &wi); + } + } + } + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_pixeldata( parent ); + } + else + { + viddec_pm_append_pixeldata_next( parent); + } + + return; +} + + +void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + const uint32_t *pl; + uint32_t i=0,nitems=0; + + h264_pic_data pic_data; + + pInfo->qm_present_list=0; + + h264_parse_emit_4X4_scaling_matrix(parent, pInfo); + h264_parse_emit_8X8_scaling_matrix(parent, pInfo); + + h264_fill_pic_data(pInfo, &pic_data); + + // How many payloads must be generated + nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up + + pl = (const uint32_t *) &pic_data; + + // Dump slice data to an array of workitems, to do pl access non valid mem + for( i = 0; i < nitems; i++ ) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG; + wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + + return; +} + +void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + uint32_t i=0,nitems=0; + + ///////////////////////// Frame attributes////////////////////////// + + //Push data into current workload if first frame or frame_boundary already detected by non slice nal + if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) + { + viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); + //pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->is_frame_boundary_detected_by_non_slice_nal=0; + pInfo->push_to_cur = 1; + h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); + } + else // move to cur if frame boundary detected by previous non slice nal, or move to next if not + { + viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + + pInfo->push_to_cur = 0; + h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); + + pInfo->is_current_workload_done=1; + } + + ///////////////////// SPS///////////////////// + h264_parse_emit_sps(parent, pInfo); + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for(i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for(i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + /////////////////////flust frames (do not display)///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; + + for(i=0; idpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_dropped =0; + + /////////////////////updata DPB frames///////////////////// + nitems = pInfo->dpb.used_size; + for(i=0; idpb.fs_dpb_idc[i]; + + if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id; + wi.ref_frame.reference_id = fs_id; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + } + + + /////////////////////updata dpb frames info (poc)///////////////////// + nitems = pInfo->dpb.used_size; + for(i=0; idpb.fs_dpb_idc[i]; + + if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; + wi.data.data_offset = fs_id; + //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); + + switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) + { + case (FRAME):{ + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + case (TOP_FIELD):{ + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = 0; + break; + }; + + case (BOTTOM_FIELD):{ + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + default : { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + break; + }; + } + + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + } + } + + /////////////////////Alloc buffer for current Existing frame///////////////////// + if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated) + { + if(pInfo->push_to_cur) + { + viddec_workload_t *wl_cur = viddec_pm_get_header (parent); + wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + else + { + viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + } + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + return; +} + + + +void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) +{ + + uint32_t nitems=0, i=0; + viddec_workload_item_t wi; + + + wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + + //// + //// Now we can flush out all frames in DPB fro display + + if(MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc) + { + if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + } + + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for(i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for(i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if(pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + else + { + viddec_pm_append_workitem_next( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); + } + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + return; +} + + + + + + diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h new file mode 100644 index 0000000..aa2a712 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h @@ -0,0 +1,195 @@ +#ifndef _MPEG2_H +#define _MPEG2_H + +/** + * mpeg2.h + * ------- + * This file contains all the necessary enumerations and structures needed from + * the MPEG-2 Specification. + */ + +/* Max Pan-Scan offsets */ +#define MPEG2_MAX_VID_OFFSETS 3 + +/* Quantization matrix size */ +#define MPEG2_QUANT_MAT_SIZE 64 + +/* MPEG2 Start Code Values */ +typedef enum { + MPEG2_SC_PICTURE = 0x00, + MPEG2_SC_SLICE_HDR = 0x01, + MPEG2_SC_SLICE_MIN = 0x01, + MPEG2_SC_SLICE_MAX = 0xAF, + MPEG2_SC_USER_DATA = 0xB2, + MPEG2_SC_SEQ_HDR = 0xB3, + MPEG2_SC_SEQ_ERR = 0xB4, + MPEG2_SC_EXT = 0xB5, + MPEG2_SC_SEQ_END = 0xB7, + MPEG2_SC_GROUP = 0xB8, + MPEG2_SC_SYS_MIN = 0xB9, + MPEG2_SC_SYS_MAX = 0xFF, + MPEG2_SC_ALL = 0xFF +} mpeg2_start_codes; + +/* MPEG2 Extension Start Code ID */ +typedef enum { + MPEG2_EXT_SEQ = 1, + MPEG2_EXT_SEQ_DISP = 2, + MPEG2_EXT_QUANT_MAT = 3, + MPEG2_EXT_COPYRIGHT = 4, + MPEG2_EXT_SEQ_SCAL = 5, + MPEG2_EXT_PIC_DISP = 7, + MPEG2_EXT_PIC_CODING = 8, + MPEG2_EXT_PIC_SPA_SCAL = 9, + MPEG2_EXT_PIC_TEMP_SCAL = 10, + MPEG2_EXT_ALL = 11 +} mpeg2_ext_start_codes; + +/* MPEG2 Picture Coding Type Values */ +typedef enum { + MPEG2_PC_TYPE_FORBIDDEN = 0, + MPEG2_PC_TYPE_I = 1, + MPEG2_PC_TYPE_P = 2, + MPEG2_PC_TYPE_B = 3 +} mpeg2_picture_type; + +/* MPEG2 Picture Structure Type Values */ +typedef enum { + MPEG2_PIC_STRUCT_RESERVED = 0, + MPEG2_PIC_STRUCT_TOP = 1, + MPEG2_PIC_STRUCT_BOTTOM = 2, + MPEG2_PIC_STRUCT_FRAME = 3 +} mpeg2_picture_structure; + +/* MPEG2 Chroma Format Values */ +typedef enum { + MPEG2_CF_RESERVED = 0, + MPEG2_CF_420 = 1, + MPEG2_CF_422 = 2, + MPEG2_CF_444 = 3 +} mpeg2_chroma_format; + +/* MPEG2 Parser Structures */ +/* Sequence Header Info */ +struct mpeg2_sequence_hdr_info +{ + uint32_t horizontal_size_value; + uint32_t vertical_size_value; + uint32_t aspect_ratio_information; + uint32_t frame_rate_code; + uint32_t bit_rate_value; + uint32_t vbv_buffer_size_value; + uint32_t constrained_parameters_flag; +}; + +/* Group of Pictures Header Info */ +struct mpeg2_gop_hdr_info +{ + uint32_t closed_gop; + uint32_t broken_link; +}; + +/* Picture Header */ +struct mpeg2_picture_hdr_info +{ + uint32_t temporal_reference; + uint32_t picture_coding_type; + uint32_t full_pel_forward_vect; + uint32_t forward_f_code; + uint32_t full_pel_backward_vect; + uint32_t backward_f_code; +}; + +/* Sequence Extension Info */ +struct mpeg2_sequence_ext_info +{ + uint32_t profile_and_level_indication; + uint32_t progressive_sequence; + uint32_t chroma_format; + uint32_t horizontal_size_extension; + uint32_t vertical_size_extension; + uint32_t bit_rate_extension; + uint32_t vbv_buffer_size_extension; + uint32_t frame_rate_extension_n; + uint32_t frame_rate_extension_d; +}; + +/* Sequence Display Extension Info */ +struct mpeg2_sequence_disp_ext_info +{ + uint32_t video_format; + uint32_t colour_description; + uint32_t colour_primaries; + uint32_t transfer_characteristics; + uint32_t display_horizontal_size; + uint32_t display_vertical_size; +}; + +/* Sequence scalable extension Info */ +struct mpeg2_sequence_scal_ext_info +{ + uint32_t scalable_mode; +}; + +/* Picture Coding Extension */ +struct mpeg2_picture_coding_ext_info +{ + uint32_t fcode00; + uint32_t fcode01; + uint32_t fcode10; + uint32_t fcode11; + uint32_t intra_dc_precision; + uint32_t picture_structure; + uint32_t top_field_first; + uint32_t frame_pred_frame_dct; + uint32_t concealment_motion_vectors; + uint32_t q_scale_type; + uint32_t intra_vlc_format; + uint32_t alternate_scan; + uint32_t repeat_first_field; + uint32_t chroma_420_type; + uint32_t progressive_frame; + uint32_t composite_display_flag; +}; + +/* Picture Display Extension */ +struct mpeg2_picture_disp_ext_info +{ + uint32_t frame_center_horizontal_offset[MPEG2_MAX_VID_OFFSETS]; + uint32_t frame_center_vertical_offset[MPEG2_MAX_VID_OFFSETS]; +}; + +/* Quantization Matrix Extension */ +struct mpeg2_quant_ext_info +{ + uint32_t load_intra_quantiser_matrix; + uint32_t load_non_intra_quantiser_matrix; + uint32_t load_chroma_intra_quantiser_matrix; + uint32_t load_chroma_non_intra_quantiser_matrix; +}; + +/* Quantization Matrices */ +struct mpeg2_quant_matrices +{ + uint8_t intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; + uint8_t non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; + uint8_t chroma_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; + uint8_t chroma_non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; +}; + +/* MPEG2 Info */ +struct mpeg2_info +{ + struct mpeg2_sequence_hdr_info seq_hdr; + struct mpeg2_gop_hdr_info gop_hdr; + struct mpeg2_picture_hdr_info pic_hdr; + struct mpeg2_sequence_ext_info seq_ext; + struct mpeg2_sequence_disp_ext_info seq_disp_ext; + struct mpeg2_sequence_scal_ext_info seq_scal_ext; + struct mpeg2_picture_coding_ext_info pic_cod_ext; + struct mpeg2_picture_disp_ext_info pic_disp_ext; + struct mpeg2_quant_ext_info qnt_ext; + struct mpeg2_quant_matrices qnt_mat; +}; + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h new file mode 100644 index 0000000..a6d8c2c --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h @@ -0,0 +1,231 @@ +#ifndef _VIDDEC_MPEG2_H +#define _VIDDEC_MPEG2_H + +/** + * viddec_mpeg2.h + * -------------- + * This header file contains all the necessary state information and function + * prototypes for the MPEG2 parser. This header also defines the debug macros + * used by the MPEG2 parser to emit debug messages in host mode. + */ + +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "mpeg2.h" + +/* Debug Print Macros */ +#define MPEG2_DEB(x...) DEB("MPEG2_Parser: "x) +#define MPEG2_FA_DEB(x...) DEB("MPEG2_Frame_attribute: "x) + +/* Bit masks */ +#define MPEG2_BIT_MASK_11 0x7ff /* Used for masking Height and Width */ +#define MPEG2_BIT_MASK_8 0xff /* Used fro masking start code byte */ +#define MPEG2_BIT_MASK_4 0xf /* Used for masking Level */ +#define MPEG2_BIT_MASK_3 0x7 /* Used for masking Profile */ + +/* MPEG2 Start code and prefix size */ +#define MPEG2_SC_AND_PREFIX_SIZE 32 + +/* Number of DMEM Workload Items */ +#define MPEG2_NUM_DMEM_WL_ITEMS 2 + +/* Number of Quantization Matrix Workload Items */ +#define MPEG2_NUM_QMAT_WL_ITEMS 32 + +/* Maximum supported content size */ +#define MPEG2_MAX_CONTENT_WIDTH 2048 +#define MPEG2_MAX_CONTENT_HEIGHT 2048 + +/* Others */ +#define MPEG2_BITS_EIGHT 8 + + +/* MPEG2 Stream Levels */ +typedef enum { + MPEG2_LEVEL_SEQ = 0, + MPEG2_LEVEL_GOP, + MPEG2_LEVEL_PIC +} mpeg2_stream_levels; + +/* MPEG2 Headers and Extensions */ +typedef enum { + MPEG2_HEADER_NONE = 0, + MPEG2_HEADER_SEQ = 1 << 0, + MPEG2_HEADER_SEQ_EXT = 1 << 1, + MPEG2_HEADER_SEQ_DISP_EXT = 1 << 2, + MPEG2_HEADER_GOP = 1 << 3, + MPEG2_HEADER_PIC = 1 << 4, + MPEG2_HEADER_PIC_COD_EXT = 1 << 5, + MPEG2_HEADER_PIC_DISP_EXT = 1 << 6, + MPEG2_HEADER_SEQ_SCAL_EXT = 1 << 7 +} mpeg2_headers; + +/* MPEG2 Parser Status Codes */ +typedef enum { + MPEG2_SUCCESS = 0, /* No error */ + MPEG2_FRAME_COMPLETE = 1, /* Frame parsing complete found */ + MPEG2_PARSE_ERROR = 2, /* Failure in parsing */ +} mpeg2_status; + +/* MPEG2 Current Workload Status Codes */ +typedef enum { + MPEG2_WL_EMPTY = 0, + MPEG2_WL_DMEM_DATA = (1 << 0), + MPEG2_WL_REF_INFO = (1 << 1), + MPEG2_WL_PARTIAL_SLICE = (1 << 2), + MPEG2_WL_DANGLING_FIELD = (1 << 3), + MPEG2_WL_COMPLETE = (1 << 4), + MPEG2_WL_MISSING_TF = (1 << 5), + MPEG2_WL_MISSING_BF = (1 << 6), + MPEG2_WL_UNSUPPORTED = (1 << 7), + /* Error codes */ + MPEG2_WL_CORRUPTED_SEQ_HDR = (1 << 8), + MPEG2_WL_CORRUPTED_SEQ_EXT = (1 << 9), + MPEG2_WL_CORRUPTED_SEQ_DISP_EXT = (1 << 10), + MPEG2_WL_CORRUPTED_GOP_HDR = (1 << 11), + MPEG2_WL_CORRUPTED_PIC_HDR = (1 << 12), + MPEG2_WL_CORRUPTED_PIC_COD_EXT = (1 << 13), + MPEG2_WL_CORRUPTED_PIC_DISP_EXT = (1 << 14), + MPEG2_WL_CORRUPTED_QMAT_EXT = (1 << 15), + /* Error concealment codes */ + MPEG2_WL_CONCEALED_PIC_COD_TYPE = (1 << 16), + MPEG2_WL_CONCEALED_PIC_STRUCT = (1 << 17), + MPEG2_WL_CONCEALED_CHROMA_FMT = (1 << 18), + /* Type of dangling field */ + MPEG2_WL_DANGLING_FIELD_TOP = (1 << 24), + MPEG2_WL_DANGLING_FIELD_BOTTOM = (1 << 25), + MPEG2_WL_REPEAT_FIELD = (1 << 26), +} mpeg2_wl_status_codes; + +/* MPEG2 Parser Workload types */ +typedef enum +{ + /* MPEG2 Decoder Specific data */ + VIDDEC_WORKLOAD_MPEG2_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + + /* MPEG2 Quantization Matrix data */ + VIDDEC_WORKLOAD_MPEG2_QMAT, + + /* Past reference frame */ + VIDDEC_WORKLOAD_MPEG2_REF_PAST = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + + /* Future reference frame */ + VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, + + /* Use current frame as reference */ + VIDDEC_WORKLOAD_MPEG2_REF_CURRENT_FRAME, + + /* User Data */ + VIDDEC_WORKLOAD_MPEG2_USERDATA = VIDDEC_WORKLOAD_USERDATA +} viddec_mpeg2_workloads; + +/* MPEG2 Decoder Specific Workitems */ +struct mpeg2_workitems +{ + /* Core Sequence Info 1 */ + uint32_t csi1; + + /* Core Sequence Info 2 */ + uint32_t csi2; + + /* Core Picture Info 1 */ + uint32_t cpi1; + + /* Core Picture Coding Extension Info 1 */ + uint32_t cpce1; + + /* Quantization Matrices */ + /* 0-15: Intra Quantization Matrix */ + /* 16-31: Non-Intra Quantization Matrix */ + /* 32-47: Chroma Intra Quantization Matrix */ + /* 48-63: Chroma Non-Intra Quantization Matrix */ + uint32_t qmat[MPEG2_QUANT_MAT_SIZE]; +}; + +/* MPEG2 Video Parser Context */ +struct viddec_mpeg2_parser +{ + /* MPEG2 Metadata Structure */ + struct mpeg2_info info; + + /* MPEG2 Workitems */ + struct mpeg2_workitems wi; + + /* Workload Status */ + uint32_t mpeg2_wl_status; + + /* Last parsed start code */ + int32_t mpeg2_last_parsed_sc; + + /* Last parsed slice start code. Used to start emitting workload items. */ + int32_t mpeg2_last_parsed_slice_sc; + + /* Current sequence headers parsed */ + uint8_t mpeg2_curr_seq_headers; + + /* Current frame headers parsed */ + uint8_t mpeg2_curr_frame_headers; + + /* Flag to indicate a valid sequence header was successfully parsed for */ + /* the current stream. */ + uint8_t mpeg2_valid_seq_hdr_parsed; + + /* Flag to indicate if quantization matrices are updated */ + uint8_t mpeg2_custom_qmat_parsed; + + /* Flag to indicate if reference table is updated with an entry */ + uint8_t mpeg2_ref_table_updated; + + /* Flag to indicate if the stream is MPEG2 */ + uint8_t mpeg2_stream; + + /* Flag to indicate if the previous picture metadata is parsed */ + uint8_t mpeg2_pic_metadata_complete; + + /* Number of active pan scan offsets */ + uint8_t mpeg2_num_pan_scan_offsets; + + /* Indicates the current stream level (Sequence/GOP/Picture) */ + /* Used for identifying the level for User Data */ + uint8_t mpeg2_stream_level; + + /* Flag to indicate if the current picture is interlaced or not */ + uint8_t mpeg2_picture_interlaced; + + /* Flag to indicate if the current field for interlaced picture is first */ + /* field or not. This flag is used only when mpeg2_picture_interlaced is */ + /* set to 1. */ + uint8_t mpeg2_first_field; + + /* Flag to indicate if the current parsed data has start of a frame */ + uint8_t mpeg2_frame_start; + + /* Temporal reference of the previous picture - Used to detect dangling fields */ + uint32_t mpeg2_prev_temp_ref; + + /* Previous picture structure - Used to identify the type of missing field */ + uint8_t mpeg2_prev_picture_structure; + + /* Flag to decide whether to use the current or next workload to dump workitems */ + uint8_t mpeg2_use_next_workload; + uint8_t mpeg2_first_slice_flag; +}; + +/* External Function Declarations */ +extern void *memset(void *s, int32_t c, uint32_t n); + +/* MPEG2 Parser Function Prototypes */ +void viddec_mpeg2_translate_attr (void *parent, void *ctxt); +void viddec_mpeg2_emit_workload (void *parent, void *ctxt); +void viddec_mpeg2_parse_seq_hdr (void *parent, void *ctxt); +void viddec_mpeg2_parse_gop_hdr (void *parent, void *ctxt); +void viddec_mpeg2_parse_pic_hdr (void *parent, void *ctxt); +void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt); +void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt); +void viddec_mpeg2_parse_ext (void *parent, void *ctxt); + +/* MPEG2 wrapper functions for workload operations */ +void viddec_mpeg2_append_workitem (void *parent, viddec_workload_item_t *wi, uint8_t flag); +void viddec_mpeg2_append_pixeldata (void *parent, uint8_t flag); +viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag); +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c new file mode 100644 index 0000000..6aa6120 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c @@ -0,0 +1,32 @@ +#include "viddec_mpeg2.h" +#include "viddec_fw_item_types.h" + + +void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t flag) +{ + return; +} + +void viddec_mpeg2_emit_workload(void *parent, void *ctxt) +{ + return; +} + +void viddec_mpeg2_append_pixeldata(void *parent, uint8_t flag) +{ + return; +} + +viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag) +{ + viddec_workload_t *ret; + if (flag) + { + ret = viddec_pm_get_next_header(parent); + } + else + { + ret = viddec_pm_get_header(parent); + } + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c new file mode 100644 index 0000000..e33a6d6 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c @@ -0,0 +1,114 @@ +/** + * viddec_mpeg2_frame_attr.c + * ------------------------- + * This is a helper file for viddec_mpeg2_workload.c to translate the data + * stored in the parser context into frame attributes in the workload. + */ + +#include "viddec_mpeg2.h" + +/* viddec_mpeg2_print_attr() - Prints collected frame attributes */ +static inline void viddec_mpeg2_print_attr(viddec_frame_attributes_t *attr) +{ + unsigned int index = 0; + + MPEG2_FA_DEB("Content_Size=%dx%d\n", attr->cont_size.width, + attr->cont_size.height); + MPEG2_FA_DEB("Repeat=%d\n", attr->mpeg2.repeat_first_field); + MPEG2_FA_DEB("Frame_Type=%d\n", attr->frame_type); + MPEG2_FA_DEB("Temporal_Reference=%d\n", attr->mpeg2.temporal_ref); + MPEG2_FA_DEB("Top_Field_First=%d\n", attr->mpeg2.top_field_first); + MPEG2_FA_DEB("Progressive_Frame=%d\n", attr->mpeg2.progressive_frame); + MPEG2_FA_DEB("Picture_Struct=%d\n", attr->mpeg2.picture_struct); + MPEG2_FA_DEB("Pan_Scan_Offsets=%d\n", attr->mpeg2.number_of_frame_center_offsets); + + for (index = 0; index < attr->mpeg2.number_of_frame_center_offsets; index++) + { + MPEG2_FA_DEB("\tPan_Scan_Offset_%d= %dx%d\n", index, + attr->mpeg2.frame_center_offset[index].horz, + attr->mpeg2.frame_center_offset[index].vert); + } + + return; +} + +/* viddec_mpeg2_set_default_values() - Resets attributes that are optional */ +/* in the bitstream to their default values. */ +static inline void viddec_mpeg2_set_default_values(viddec_frame_attributes_t *attrs) +{ + unsigned int index = 0; + + attrs->mpeg2.number_of_frame_center_offsets = 0; + for (index = 0; index < MPEG2_MAX_VID_OFFSETS ; index++) + { + attrs->mpeg2.frame_center_offset[index].horz = 0; + attrs->mpeg2.frame_center_offset[index].vert = 0; + } + + return; +} + +/* viddec_mpeg2_translate_attr() - Translates metadata parsed into frame */ +/* attributes in the workload */ +void viddec_mpeg2_translate_attr(void *parent, void *ctxt) +{ + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get workload */ + viddec_workload_t *wl = viddec_pm_get_header( parent ); + + /* Get attributes in workload */ + viddec_frame_attributes_t *attrs = &wl->attrs; + + /* Get the default values for optional attributes */ + viddec_mpeg2_set_default_values(attrs); + + /* Populate attributes from parser context */ + /* Content Size */ + attrs->cont_size.height = ((parser->info.seq_ext.vertical_size_extension << 12) + | parser->info.seq_hdr.vertical_size_value); + attrs->cont_size.width = ((parser->info.seq_ext.horizontal_size_extension << 12) + | parser->info.seq_hdr.horizontal_size_value); + + /* Repeat field */ + attrs->mpeg2.repeat_first_field = parser->info.pic_cod_ext.repeat_first_field; + + /* Temporal Reference */ + attrs->mpeg2.temporal_ref = parser->info.pic_hdr.temporal_reference; + + /* Top field first */ + attrs->mpeg2.top_field_first = parser->info.pic_cod_ext.top_field_first; + + /* Progressive frame */ + attrs->mpeg2.progressive_frame = parser->info.pic_cod_ext.progressive_frame; + + /* Picture Structure */ + attrs->mpeg2.picture_struct = parser->info.pic_cod_ext.picture_structure; + + /* Populate the frame type */ + switch (parser->info.pic_hdr.picture_coding_type) + { + case MPEG2_PC_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break; + case MPEG2_PC_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break; + case MPEG2_PC_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break; + default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; + } + + /* Update PanScan data */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_DISP_EXT) + { + unsigned int index = 0; + attrs->mpeg2.number_of_frame_center_offsets = parser->mpeg2_num_pan_scan_offsets; + for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++) + { + attrs->mpeg2.frame_center_offset[index].horz = parser->info.pic_disp_ext.frame_center_horizontal_offset[index]; + attrs->mpeg2.frame_center_offset[index].vert = parser->info.pic_disp_ext.frame_center_vertical_offset[index]; + } + } + + /* Print frame attributes */ + viddec_mpeg2_print_attr(attrs); + + return; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c new file mode 100644 index 0000000..56604a4 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c @@ -0,0 +1,1039 @@ +/** + * viddec_mpeg2_metadata.c + * ----------------------- + * This file contains all the routines to parse the information from MPEG2 + * elementary stream and store it in the parser context. Based on the data + * parsed, the state information in the context is updated. + * + * Headers currently parsed from MPEG2 stream include: + * - Sequence Header + * - Sequence Extension + * - Sequence Display Extension + * - GOP Header + * - Picture Header + * - Picture Coding Extension + * - Quantization Matrix Extension + * - Picture Display Extension + * + * The slice data is parsed and appended into workload in viddec_mpeg2_parse.c + */ + +#include "viddec_mpeg2.h" + +/* Default quantization matrix values */ +const uint8_t mpeg2_default_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = { + 8, 16, 19, 22, 26, 27, 29, 34, + 16, 16, 22, 24, 27, 29, 34, 37, + 19, 22, 26, 27, 29, 34, 34, 38, + 22, 22, 26, 27, 29, 34, 37, 40, + 22, 26, 27, 29, 32, 35, 40, 48, + 26, 27, 29, 32, 35, 40, 48, 58, + 26, 27, 29, 34, 38, 46, 56, 69, + 27, 29, 35, 38, 46, 56, 69, 83 +}; +const uint8_t mpeg2_default_non_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = { + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16 +}; + +/* Matrix for converting scan order */ +const uint8_t mpeg2_classic_scan[MPEG2_QUANT_MAT_SIZE] = { + 0, 1, 8, 16, 9, 2, 3, 10, + 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, + 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, + 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, + 53, 60, 61, 54, 47, 55, 62, 63 +}; +const uint8_t mpeg2_alternate_scan[MPEG2_QUANT_MAT_SIZE] = { + 0, 8, 16, 24, 1, 9, 2, 10, + 17, 25, 32, 40, 48, 56, 57, 49, + 41, 33, 26, 18, 3, 11, 4, 12, + 19, 27, 34, 42, 50, 58, 35, 43, + 51, 59, 20, 28, 5, 13, 6, 14, + 21, 29, 36, 44, 52, 60, 37, 45, + 53, 61, 22, 30, 7, 15, 23, 31, + 38, 46, 54, 62, 39, 47, 55, 63 +}; + +/* Look-up tables for macro block address increment VLC */ +const uint8_t mb_addr_inc_tab1[16] = { + 0, 0, 7, 6, 5, 5, 4, 4, + 3, 3, 3, 3, 2, 2, 2, 2 +}; +const uint8_t mb_addr_inc_tab2[8] = { + 13, 12, 11, 10, 9, 9, 8, 8 +}; +const uint8_t mb_addr_inc_tab3[40] = { + 33, 32, 31, 30, 29, 28, 27, 26, + 25, 24, 23, 22, 21, 21, 20, 20, + 19, 19, 18, 18, 17, 17, 16, 16, + 15, 15, 15, 15, 15, 15, 15, 15, + 14, 14, 14, 14, 14, 14, 14, 14 +}; + +/* viddec_mpeg2_copy_default_matrix() - Copies quantization matrix from src */ +/* to dst */ +static inline void mpeg2_copy_matrix(const uint8_t *src, uint8_t *dst) +{ + register uint32_t index = 0; + for(index=0; index < MPEG2_QUANT_MAT_SIZE; index++) + dst[index] = src[index]; +} + +/* viddec_mpeg2_copy_matrix() - Copies next 64bytes in the stream into given */ +/* matrix */ +static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint32_t alternate_scan) +{ + int32_t ret = 1; + uint32_t index = 0, code = 0; + const uint8_t *zigzag_scan = (const uint8_t *) mpeg2_classic_scan; + + if (alternate_scan) + { + zigzag_scan = (const uint8_t *) mpeg2_alternate_scan; + } + + /* Start extracting matrix co-efficients and copy them in */ + /* inverse zigzag scan order */ + for (index = 0; index < MPEG2_QUANT_MAT_SIZE; index++) + { + ret = viddec_pm_get_bits(parent, &code, MPEG2_BITS_EIGHT); + /* Quantization values cannot be zero. If zero value if found, */ + /* further parsing is stopped and the existing values are used.*/ + if ((ret != 1) || (code == 0)) + { + ret = -1; + break; + } + matrix[zigzag_scan[index]] = (uint8_t)(code & 0xFF); + } + + return ret; +} + +/* viddec_mpeg2_parse_seq_hdr() - Parse sequence header metadata and store */ +/* in parser context */ +void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Horizontal Frame Size */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.horizontal_size_value, 12); + + /* Get Vertical Frame Size */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vertical_size_value, 12); + + /* Get Frame Aspect Ratio */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.aspect_ratio_information, 4); + + /* Get Frame Rate */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.frame_rate_code, 4); + + /* Get Bit Rate */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.bit_rate_value, 18); + + /* Skip Marker bit */ + ret_code |= viddec_pm_skip_bits(parent, 1); + + /* Get VBV Buffer Size Value */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vbv_buffer_size_value, 10); + + /* Get Constrained Parameters Flag */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.constrained_parameters_flag, 1); + + /* Quantization Matrix Support */ + /* Get Intra Quantizer matrix, if available or use default values */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.intra_quantiser_matrix, 0); + mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix); + } + else + { + if (!parser->mpeg2_custom_qmat_parsed) + { + mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.intra_quantiser_matrix); + mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix); + } + } + + /* Get Non-Intra Qualtizer matrix, if available or use default values */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_non_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.non_intra_quantiser_matrix, 0); + mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); + } + else + { + if (!parser->mpeg2_custom_qmat_parsed) + { + mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.non_intra_quantiser_matrix); + mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); + } + } + + /* Error handling */ + /* The return value from get_bits() function is accumulated. If the return value is not 1, */ + /* then there was an error getting the required information from the stream and the status */ + /* is updated for the current workload. */ + if (ret_code == 1) + { + /* This flag indicates a valid sequence header has been parsed and so even if */ + /* a sequence haeder is corrupted in the future, this valid sequence header */ + /* could be reused. */ + parser->mpeg2_valid_seq_hdr_parsed = true; + /* This flag indicates a valid custom quantization matrix has been parsed. */ + /* So, if in the future, there is an error parsing quantization matrix, the */ + /* parser will use the previously parsed custom values. */ + if ((parser->info.qnt_ext.load_intra_quantiser_matrix) + || (parser->info.qnt_ext.load_non_intra_quantiser_matrix)) + { + parser->mpeg2_custom_qmat_parsed = true; + } + MPEG2_DEB("Seqeunce header parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_HDR; + MPEG2_DEB("Sequence header corrupted.\n"); + } + + parser->mpeg2_stream = false; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ; + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ; + parser->mpeg2_stream_level = MPEG2_LEVEL_SEQ; + + return; +} + +/* viddec_mpeg2_parse_gop_hdr() - Parse group of pictures header info and */ +/* store it in parser context */ +void viddec_mpeg2_parse_gop_hdr(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Skip first 25 bits */ + /* Skip time_code */ + ret_code |= viddec_pm_skip_bits(parent, 25); + + /* Get closed gop info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.closed_gop, 1); + + /* Get broken link info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.broken_link, 1); + + if (ret_code == 1) + { + MPEG2_DEB("GOP Header parsed successfully.\n"); + } + else + { + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_GOP_HDR; + MPEG2_DEB("GOP header corrupted.\n"); + } + + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_GOP; + parser->mpeg2_stream_level = MPEG2_LEVEL_GOP; + + return; +} + +/* viddec_mpeg2_parse_pic_hdr() - Parse picture header info and store it in */ +/* parser context */ +void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) +{ + int32_t ret_code = 0, found_error = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Temporal Reference info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.temporal_reference, 10); + + /* Get Picture Coding type and skip the following byte */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.picture_coding_type, 3); + + /* Error Handling and Concealment */ + /* Picture coding type should be one I, P or B */ + if ((parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) && + (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_P) && + (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_B)) + { + found_error = 1; + } + /* The first frame after a gop header should be a coded I picture as per */ + /* section 6.3.1 in MPEG2 Specification. */ + else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP) + { + if (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) + { + found_error = 1; + } + } + /* The first frame after a sequence header cannot be a coded B picture as per */ + /* section 6.1.1.6 in MPEG2 Specification. */ + else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ) + { + if (parser->info.pic_hdr.picture_coding_type == MPEG2_PC_TYPE_B) + { + found_error = 1; + } + } + + /* If there is an error parsing picture coding type, do error concealment and continue. */ + if ((ret_code != 1) || (found_error)) + { + if (found_error) + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR; + MPEG2_DEB("Picture header corrupted.\n"); + } + + /* Error concealment for picture coding type - Default to I picture. */ + parser->info.pic_hdr.picture_coding_type = MPEG2_PC_TYPE_I; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_COD_TYPE; + MPEG2_DEB("Picture Coding Type corrupted. Concealing to I type.\n"); + } + + /* Skip next 16 bits */ + /* Skip vbv_delay */ + ret_code |= viddec_pm_skip_bits(parent, 16); + + /* If Picture Coding type is either P or B then */ + /* Get forward vector code */ + if ((MPEG2_PC_TYPE_P == parser->info.pic_hdr.picture_coding_type) || + (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type)) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_forward_vect, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.forward_f_code, 3); + } + else + { + parser->info.pic_hdr.full_pel_forward_vect = 0; + parser->info.pic_hdr.forward_f_code = 0; + } + + /* If Picture coding type is B then */ + /* Get backward vector code */ + if (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_backward_vect, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.backward_f_code, 3); + } + else + { + parser->info.pic_hdr.full_pel_backward_vect = 0; + parser->info.pic_hdr.backward_f_code = 0; + } + + if (ret_code == 1) + { + MPEG2_DEB("Picture header parsed successfully.\n") + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR; + MPEG2_DEB("Picture header corrupted.\n"); + } + + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC; + parser->mpeg2_stream_level = MPEG2_LEVEL_PIC; + + return; +} + +/* viddec_mpeg2_parse_ext_seq() - Parse Sequence extension metadata and */ +/* store in parser context */ +void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Profile and Level info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.profile_and_level_indication, 8); + + /* Get Progressive Sequence Flag */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.progressive_sequence, 1); + + /* Get Chroma Format */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.chroma_format, 2); + + /* Error Concealment */ + /* If there is an error parsing chroma format, do error concealment and continue. */ + if ((ret_code != 1) || (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED)) + { + if (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED) + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT; + MPEG2_DEB("Sequence extension corrupted.\n") + } + + /* Error concealment for chroma format - Default to 4:2:0 */ + parser->info.seq_ext.chroma_format = MPEG2_CF_420; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_CHROMA_FMT; + MPEG2_DEB("Chroma Format corrupted. Concealing to 4:2:0.\n"); + } + + /* Get Content Size Extension Data */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.horizontal_size_extension, 2); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vertical_size_extension, 2); + + /* Get Bit Rate Extension */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.bit_rate_extension, 12); + + /* Skip Marker bit */ + ret_code |= viddec_pm_skip_bits(parent, 1); + + /* Get VBV Buffer Size Extension Data */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vbv_buffer_size_extension, 8); + + /* Skip 1 bit */ + /* Skip low_delay */ + ret_code |= viddec_pm_skip_bits(parent, 1); + + /* Get Frame Rate extension data */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_n, 2); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_d, 5); + + if (ret_code == 1) + { + MPEG2_DEB("Sequence extension header parsed successfully.\n") + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT; + MPEG2_DEB("Sequence extension corrupted.\n") + } + + /* Check if the last parsed start code was that of sequence header. */ + /* If true, seq extension followed seq header => MPEG2 Stream */ + parser->mpeg2_stream = (parser->mpeg2_last_parsed_sc == MPEG2_SC_SEQ_HDR) ? true:false; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_EXT; + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_EXT; + + return; +} + +/* viddec_mpeg2_parse_ext_seq_disp() - Parse Sequence Display extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get video format */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.video_format, 3); + + /* Check if color description info is present */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_description, 1); + + /* If color description is found, get color primaries info */ + /* and transfer characteristics */ + if (parser->info.seq_disp_ext.colour_description) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_primaries, 8); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.transfer_characteristics, 8); + ret_code |= viddec_pm_skip_bits(parent, 8); + } + + /* Get Display Horizontal Size */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_horizontal_size, 14); + ret_code |= viddec_pm_skip_bits(parent, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_vertical_size, 14); + + if (ret_code == 1) + { + MPEG2_DEB("Sequence display extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_DISP_EXT; + MPEG2_DEB("Sequence display extension corrupted.\n") + } + + /* Set flag to indicate Sequence Display Extension is present */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_DISP_EXT; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_DISP_EXT; + + return; +} + +/* viddec_mpeg2_parse_ext_seq_scal() - Parse Sequence Scalable extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_seq_scal(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get video format */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_scal_ext.scalable_mode, 2); + + if (ret_code == 1) + { + MPEG2_DEB("Sequence scalable extension parsed successfully.\n"); + } + + /* Set flag to indicate Sequence Display Extension is present */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_SCAL_EXT; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_SCAL_EXT; + + return; +} + +/* viddec_mpeg2_parse_ext_pic() - Parse Picture Coding extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) +{ + int32_t ret_code = 0, found_error = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Forward/Backward, Horizontal/Vertical codes */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode00, 4); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode01, 4); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode10, 4); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode11, 4); + + /* Get Intra DC Precision */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_dc_precision, 2); + + /* Get Picture Structure */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.picture_structure, 2); + + /* Error Handling and Concealment */ + /* Picture structure should be frame, top field or bottom field */ + if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_RESERVED) + { + found_error = 1; + } + /* All pictures in progressive sequence should be frame picture */ + else if (parser->info.seq_ext.progressive_sequence) + { + if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME) + { + found_error = 1; + } + } + + /* If there is an error parsing picture structure, do error concealment and continue. */ + if ((ret_code != 1) || (found_error)) + { + if (found_error) + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT; + MPEG2_DEB("Picture coding extension corrupted.\n"); + } + + /* Error concealment for picture structure - Default to frame picture. */ + parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT; + MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n"); + } + + /* Get flags */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.top_field_first, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.frame_pred_frame_dct, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.concealment_motion_vectors, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.q_scale_type, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_vlc_format, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.alternate_scan, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.repeat_first_field, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.chroma_420_type, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.progressive_frame, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.composite_display_flag, 1); + + /* Error concealment for frame picture */ + if ((parser->info.pic_cod_ext.top_field_first) + || (parser->info.pic_cod_ext.frame_pred_frame_dct) + || (parser->info.pic_cod_ext.repeat_first_field) + || (parser->info.pic_cod_ext.progressive_frame)) + { + if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME) + { + parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT; + MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n"); + } + } + + if (ret_code == 1) + { + MPEG2_DEB("Picture coding extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT; + MPEG2_DEB("Picture coding extension corrupted.\n"); + } + + /* Dangling field detection */ + /* If the previous picture is the first field, then the temporal reference number */ + /* should match with the second field. Otherwise, one of the fields in the previous */ + /* picture is missing and dangling field error is marked. The workload containing */ + /* the previous picture is emitted out and current picture data is added to the next */ + /* workload. The mpeg2_use_next_workload variable is used as a flag to direct the */ + /* items into the current/next workload. */ + if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)) + { + if (parser->mpeg2_prev_temp_ref != parser->info.pic_hdr.temporal_reference) + { + /* Mark dangling field info in workload status */ + parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD; + if (parser->mpeg2_prev_picture_structure == MPEG2_PIC_STRUCT_BOTTOM) + { + parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_TOP; + } + else + { + parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_BOTTOM; + } + /* Set flag stating current workload is done */ + parser->mpeg2_pic_metadata_complete = true; + /* Set flag to use the next workload for adding workitems for */ + /* the current frame */ + parser->mpeg2_use_next_workload = true; + /* Toggle first field flag to compensate for missing field */ + parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true; + } + else + { + /* Same field repeated */ + if (parser->mpeg2_prev_picture_structure == parser->info.pic_cod_ext.picture_structure) + { + /* Mark unsupported in workload status */ + parser->mpeg2_wl_status |= MPEG2_WL_REPEAT_FIELD; + } + } + } + + /* Set context variables for interlaced picture handling */ + if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_FRAME) + { + /* Frame picture found. Reset variables used for interlaced fields picture. */ + parser->mpeg2_picture_interlaced = false; + parser->mpeg2_first_field = false; + parser->mpeg2_use_next_workload = false; + } + else + { + /* Interlaced fields picture found. */ + parser->mpeg2_picture_interlaced = true; + parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true; + } + + /* Set flags */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_COD_EXT; + parser->mpeg2_prev_temp_ref = parser->info.pic_hdr.temporal_reference; + parser->mpeg2_prev_picture_structure = parser->info.pic_cod_ext.picture_structure; + if ((!parser->mpeg2_picture_interlaced) + || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) + { + parser->mpeg2_frame_start = true; + } + + return; +} + +/* viddec_mpeg2_parse_ext_pic_disp() - Parse Picture Display extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + uint32_t index = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Determine number of offsets */ + if (parser->info.seq_ext.progressive_sequence) + { + if (parser->info.pic_cod_ext.repeat_first_field) + { + parser->mpeg2_num_pan_scan_offsets = + (parser->info.pic_cod_ext.top_field_first) ? 3 : 2; + } + else /* Not repeat field */ + parser->mpeg2_num_pan_scan_offsets = 1; + } + else /* Not progressive sequence */ + { + /* Check if picture structure is a field */ + if ((parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_TOP) || + (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_BOTTOM)) + { + parser->mpeg2_num_pan_scan_offsets = 1; + } + else + { + parser->mpeg2_num_pan_scan_offsets = + (parser->info.pic_cod_ext.repeat_first_field) ? 3 : 2; + } + } + + /* Get the offsets */ + for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_horizontal_offset[index], 16); + ret_code |= viddec_pm_skip_bits(parent, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_vertical_offset[index], 16); + ret_code |= viddec_pm_skip_bits(parent, 1); + } + + if (ret_code == 1) + { + MPEG2_DEB("Picture display extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_DISP_EXT; + MPEG2_DEB("Picture display extension corrupted.\n"); + } + + /* Set flag to indicate picture display extension is found */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_DISP_EXT; + return; +} + +/* viddec_mpeg2_parse_ext_quant() - Parse Quantization Matrix extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Quantization Matrix Support */ + /* Get Intra Quantizer matrix, if available or use default values */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, + parser->info.qnt_mat.chroma_intra_quantiser_matrix); + } + + /* Get Non-Intra Qualtizer matrix, if available */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_non_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.non_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, + parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); + } + + /* Get Chroma Intra Quantizer matrix, if available */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.chroma_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + } + + /* Get Chroma Non-Intra Quantizer matrix, if available */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.chroma_non_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + } + + if (ret_code == 1) + { + MPEG2_DEB("Quantization matrix extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_QMAT_EXT; + MPEG2_DEB("Quantization matrix extension corrupted.\n"); + } + + /* Set quantization matrices updated flag */ + if ( (parser->info.qnt_ext.load_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_non_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) ) + { + MPEG2_DEB("Custom quantization matrix found.\n"); + } + + return; +} + +/* viddec_mpeg2_parse_ext() - Parse extension metadata and store in parser */ +/* context */ +void viddec_mpeg2_parse_ext(void *parent, void *ctxt) +{ + uint32_t ext_code = 0; + + /* Get extension start code */ + viddec_pm_get_bits(parent, &ext_code, 4); + + /* Switch on extension type */ + switch ( ext_code ) + { + /* Sequence Extension Info */ + case MPEG2_EXT_SEQ: + viddec_mpeg2_parse_ext_seq(parent, ctxt); + break; + + /* Sequence Display Extension info */ + case MPEG2_EXT_SEQ_DISP: + viddec_mpeg2_parse_ext_seq_disp(parent, ctxt); + break; + + case MPEG2_EXT_SEQ_SCAL: + viddec_mpeg2_parse_ext_seq_scal(parent, ctxt); + break; + + /* Picture Coding Extension */ + case MPEG2_EXT_PIC_CODING: + viddec_mpeg2_parse_ext_pic(parent, ctxt); + break; + + /* Picture Display Extension */ + case MPEG2_EXT_PIC_DISP: + viddec_mpeg2_parse_ext_pic_disp(parent, ctxt); + break; + + /* Quantization Extension*/ + case MPEG2_EXT_QUANT_MAT: + viddec_mpeg2_parse_ext_quant(parent, ctxt); + break; + + default: + break; + } /* Switch, on extension type */ + + return; +} + +/* viddec_mpeg2_parse_ext() - Parse user data and append to workload. */ +void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt) +{ + uint32_t user_data = 0; + viddec_workload_item_t wi; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Set the user data level (SEQ/GOP/PIC) in the workitem type. */ + switch (parser->mpeg2_stream_level) + { + case MPEG2_LEVEL_SEQ: + { + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + } + case MPEG2_LEVEL_GOP: + { + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + } + case MPEG2_LEVEL_PIC: + { + wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; + break; + } + default: + { + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; + break; + } + } + + /* Read 1 byte of user data and store it in workitem for the current */ + /* stream level (SEQ/GOP/PIC). Keep adding data payloads till it reaches */ + /* size 11. When it is 11, the maximum user data payload size, append the */ + /* workitem. This loop is repeated till all user data is extracted and */ + /* appended. */ + wi.user_data.size = 0; + memset(&(wi.user_data), 0, sizeof(wi.user_data)); + while(viddec_pm_get_bits(parent, &user_data, MPEG2_BITS_EIGHT) != -1) + { + /* Store the valid byte in data payload */ + wi.user_data.data_payload[wi.user_data.size] = user_data; + wi.user_data.size++; + + /* When size exceeds payload size, append workitem and continue */ + if (wi.user_data.size >= 11) + { + viddec_pm_setup_userdata(&wi); + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + viddec_fw_reset_workload_item(&wi); + wi.user_data.size = 0; + } + } + /* If size is not 0, append remaining user data. */ + if (wi.user_data.size > 0) + { + viddec_pm_setup_userdata(&wi); + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + wi.user_data.size = 0; + } + + MPEG2_DEB("User data @ Level %d found.\n", parser->mpeg2_stream_level); + return; +} + +static inline uint32_t get_mb_addr_increment(uint32_t *data) +{ + if (*data >= 1024) + { + return 1; + } + else if (*data >= 128) + { + *data >>= 6; + return mb_addr_inc_tab1[*data]; + } + else if (*data >= 64) + { + *data >>= 3; + *data -= 8; + return mb_addr_inc_tab2[*data]; + } + else + { + *data -= 24; + return mb_addr_inc_tab3[*data]; + } +} + +static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t *first_mb) +{ + uint32_t mb_row = 0, mb_width = 0, prev_mb_addr = 0; + uint32_t temp = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + *first_mb = 0; + mb_row = ((parser->mpeg2_last_parsed_slice_sc & 0xFF) - 1); + mb_width = parser->info.seq_hdr.horizontal_size_value >> 4; + prev_mb_addr = (mb_row * mb_width) - 1; + + /* Skip slice start code */ + viddec_pm_skip_bits(parent, 32); + + if (parser->info.seq_hdr.vertical_size_value > 2800) + { + /* Get 3 bits of slice_vertical_position_extension */ + viddec_pm_get_bits(parent, &temp, 3); + mb_row += (temp << 7); + } + + /* Skip proprity_breakpoint if sequence scalable extension is present */ + if (parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_SCAL_EXT) + { + /* Skip 7 bits if scalable mode is 00 (Data partition) */ + if (parser->info.seq_scal_ext.scalable_mode == 0) + { + viddec_pm_skip_bits(parent, 7); + } + } + + /* Skip quantizer_scale */ + viddec_pm_skip_bits(parent, 5); + + /* Skip a few bits with slice information */ + temp = 0; + viddec_pm_peek_bits(parent, &temp, 1); + if (temp == 0x1) + { + /* Skip intra_slice_flag(1), intra_slice(1) and reserved_bits(7) */ + viddec_pm_skip_bits(parent, 9); + temp=0; + viddec_pm_peek_bits(parent, &temp, 1); + while (temp == 0x1) + { + /* Skip extra_bit_slice(1) and extra_information_slice(8) */ + viddec_pm_skip_bits(parent, 9); + temp=0; + viddec_pm_peek_bits(parent, &temp, 1); + } + } + + /* Skip extra_bit_slice flag */ + viddec_pm_skip_bits(parent, 1); + + /* Increment prev_mb_addr by 33 for every 11 bits of macroblock_escape string */ + temp=0; + viddec_pm_peek_bits(parent, &temp, 11); + while (temp == 0x8) + { + viddec_pm_skip_bits(parent, 11); + prev_mb_addr += 33; + temp=0; + viddec_pm_peek_bits(parent, &temp, 11); + } + + /* Get the mb_addr_increment and add it to prev_mb_addr to get the current mb number. */ + *first_mb = prev_mb_addr + get_mb_addr_increment(&temp); + MPEG2_DEB("First MB number in slice is 0x%08X.\n", *first_mb); + + return; +} + +/* Parse slice data to get the number of macroblocks in the current slice and then */ +/* append as pixel data. */ +void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt) +{ + uint32_t bit_off=0, start_byte=0, first_mb = 0; + uint8_t is_emul=0; + viddec_workload_item_t wi; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get current byte position */ + viddec_pm_get_au_pos(parent, &bit_off, &start_byte, &is_emul); + + /* Populate wi type */ + viddec_mpeg2_get_first_mb_number(parent, ctxt, &first_mb); + wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES; + wi.es.es_flags = (first_mb << 16); + + /* Append data from given byte position as pixel data */ + viddec_pm_append_misc_tags(parent, start_byte, (unsigned int) -1, &wi, !parser->mpeg2_use_next_workload); + return; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c new file mode 100644 index 0000000..a7b6ef7 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c @@ -0,0 +1,380 @@ +/** + * viddec_mpeg2_parse.c + * -------------------- + * This file acts as the main interface between the parser manager and MPEG2 + * parser. All the operations done by the MPEG2 parser are defined here and + * functions pointers for each operation is returned to the parser manager. + */ + +#include "viddec_mpeg2.h" + +/* viddec_mpeg2_parser_init() - Initializes parser context. */ +static void viddec_mpeg2_parser_init +( + void *ctxt, + uint32_t *persist_mem, + uint32_t preserve +) +{ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Avoid compiler warning */ + persist_mem = persist_mem; + + /* Initialize state variables */ + parser->mpeg2_pic_metadata_complete = false; + parser->mpeg2_picture_interlaced = false; + parser->mpeg2_first_field = false; + parser->mpeg2_frame_start = false; + parser->mpeg2_ref_table_updated = false; + parser->mpeg2_use_next_workload = false; + parser->mpeg2_first_slice_flag = false; + parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; + parser->mpeg2_last_parsed_sc = MPEG2_SC_ALL; + parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX; + parser->mpeg2_wl_status = MPEG2_WL_EMPTY; + parser->mpeg2_prev_picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->mpeg2_prev_temp_ref = 0; + parser->mpeg2_num_pan_scan_offsets = 0; + + if(preserve) + { + /* Init all picture level header info */ + memset(&parser->info.pic_hdr, 0, sizeof(struct mpeg2_picture_hdr_info)); + memset(&parser->info.pic_cod_ext, 0, sizeof(struct mpeg2_picture_coding_ext_info)); + memset(&parser->info.pic_disp_ext, 0, sizeof(struct mpeg2_picture_disp_ext_info)); + } + else + { + /* Init all header info */ + memset(&parser->info, 0, sizeof(struct mpeg2_info)); + + parser->mpeg2_stream = false; + parser->mpeg2_custom_qmat_parsed = false; + parser->mpeg2_valid_seq_hdr_parsed = false; + parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; + } + + MPEG2_DEB("MPEG2 Parser: Context Initialized.\n"); + + return; +} + +/* viddec_mpeg2_get_context_size() - Returns the memory size required by the */ +/* MPEG2 parser. */ +static void viddec_mpeg2_get_context_size +( + viddec_parser_memory_sizes_t *size +) +{ + /* Should return size of my structure */ + size->context_size = sizeof(struct viddec_mpeg2_parser); + size->persist_size = 0; +} + +/* viddec_mpeg2_get_error_code() - Returns the error code for the current */ +/* workload. */ +static void viddec_mpeg2_get_error_code +( + struct viddec_mpeg2_parser *parser, + viddec_workload_t *wl, + uint32_t *error_code +) +{ + *error_code = 0; + + /* Dangling field error */ + if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD; + if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD_TOP) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_TOPFIELD; + } + else + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD; + } + } + + /* Repeated same field */ + if (parser->mpeg2_wl_status & MPEG2_WL_REPEAT_FIELD) + { + *error_code |= (VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + + /* If workload is not complete, set non-decodeable flag */ + if (!(parser->mpeg2_wl_status & MPEG2_WL_COMPLETE)) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + } + + /* If reference info is not updated, set missing reference flag */ + if (!(parser->mpeg2_wl_status & MPEG2_WL_REF_INFO)) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE; + } + + /* Missing DMEM data flag and irrecoverable flag is set */ + if (!(parser->mpeg2_wl_status & MPEG2_WL_DMEM_DATA)) + { + *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ; + } + + /* Missing sequence header and irrecoverable flag is set */ + if ((!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ)) + && (!parser->mpeg2_valid_seq_hdr_parsed)) + { + *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ; + } + + /* Unsupported features found in stream */ + if (parser->mpeg2_wl_status & MPEG2_WL_UNSUPPORTED) + { + *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ; + } + + /* If frame type is unknown, default to I frame. */ + if ((wl->attrs.frame_type != VIDDEC_FRAME_TYPE_I) + && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_P) + && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_B)) + { + wl->attrs.frame_type = VIDDEC_FRAME_TYPE_I; + } + + /* If there is a mismatch between the frame type and reference information */ + /* then mark the workload as not decodable */ + if (wl->attrs.frame_type == VIDDEC_FRAME_TYPE_B) + { + if (wl->is_reference_frame != 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + } + else + { + if (wl->is_reference_frame == 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + } + + /* For non-decodable frames, do not set reference info so that the workload */ + /* manager does not increment ref count. */ + if (*error_code & VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE) + { + wl->is_reference_frame = 0; + } + + /* Corrupted header notification */ + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_HDR) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_DISP_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_GOP_HDR) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_HDR) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_COD_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_DISP_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_QMAT_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT; + + MPEG2_DEB("Workload error code: 0x%8X.\n", *error_code); + return; +} + +/* viddec_mpeg2_is_start_frame() - Returns if the current chunk of parsed */ +/* data has start of a frame. */ +static uint32_t viddec_mpeg2_is_start_frame +( + void *ctxt +) +{ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + return (parser->mpeg2_frame_start); +} + +/* viddec_mpeg2_is_workload_done() - Returns current frame parsing status */ +/* to the parser manager. */ +static uint32_t viddec_mpeg2_is_workload_done +( + void *parent, + void *ctxt, + unsigned int next_sc, + uint32_t *codec_specific_errors +) +{ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + viddec_workload_t *wl = viddec_pm_get_header(parent); + uint32_t ret = VIDDEC_PARSE_SUCESS; + uint32_t frame_boundary = 0; + uint8_t force_frame_complete = 0; + parent = parent; + + /* Detect Frame Boundary */ + frame_boundary = ((MPEG2_SC_PICTURE == next_sc) || (MPEG2_SC_SEQ_HDR == next_sc) || (MPEG2_SC_GROUP == next_sc)); + if (frame_boundary) + { + parser->mpeg2_first_slice_flag = false; + } + + force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); + + if (force_frame_complete || (frame_boundary && (parser->mpeg2_pic_metadata_complete))) + { + if(!force_frame_complete) + { + parser->mpeg2_wl_status |= MPEG2_WL_COMPLETE; + parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX; + parser->mpeg2_pic_metadata_complete = false; + parser->mpeg2_first_slice_flag = false; + } + + viddec_mpeg2_get_error_code(parser, wl, codec_specific_errors); + parser->mpeg2_wl_status = MPEG2_WL_EMPTY; + parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; + /* Reset mpeg2_use_next_workload flag if it is set */ + if (parser->mpeg2_use_next_workload) + { + viddec_pm_set_late_frame_detect(parent); + parser->mpeg2_use_next_workload = false; + } + ret = VIDDEC_PARSE_FRMDONE; + } + return ret; +} + +/* viddec_mpeg2_parse() - Parse metadata info from the buffer for the prev */ +/* start code found. */ +static mpeg2_status viddec_mpeg2_parse +( + void *parent, + void *ctxt +) +{ + uint32_t current_sc = 0, sc_bits = MPEG2_SC_AND_PREFIX_SIZE; + int32_t ret = MPEG2_SUCCESS; + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Reset frame start flag. For Mpeg1 we want to set frame start after + we parsed pich header, since there is no extension*/ + parser->mpeg2_frame_start = (!parser->mpeg2_stream) && (parser->mpeg2_last_parsed_sc == MPEG2_SC_PICTURE); + + /* Peak current start code - First 32 bits of the stream */ + ret = viddec_pm_peek_bits(parent, ¤t_sc, sc_bits); + if (ret == -1) + { + MPEG2_DEB("Unable to get start code.\n"); + return MPEG2_PARSE_ERROR; + } + current_sc &= MPEG2_BIT_MASK_8; + MPEG2_DEB("Start Code found = 0x%.8X\n", current_sc); + + /* Get rid of the start code prefix for all start codes except slice */ + /* start codes. */ + if ((current_sc < MPEG2_SC_SLICE_MIN) || (current_sc > MPEG2_SC_SLICE_MAX)) + { + viddec_pm_skip_bits(parent, sc_bits); + } + + /* Parse Metadata based on the start code found */ + switch( current_sc ) + { + /* Sequence Start Code */ + case MPEG2_SC_SEQ_HDR: + { + parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; + viddec_mpeg2_parse_seq_hdr(parent, ctxt); + } + break; + + /* Picture Start Code */ + case MPEG2_SC_PICTURE: + { + viddec_mpeg2_parse_pic_hdr(parent, ctxt); + } + break; + + /* Extension Code */ + case MPEG2_SC_EXT: + { + viddec_mpeg2_parse_ext(parent, ctxt); + } + break; + + /* Group of Pictures Header */ + case MPEG2_SC_GROUP: + { + viddec_mpeg2_parse_gop_hdr(parent, ctxt); + } + break; + + /* Unused Start Code */ + case MPEG2_SC_SEQ_END: + case MPEG2_SC_SEQ_ERR: + break; + + /* User Data */ + case MPEG2_SC_USER_DATA: + { + viddec_mpeg2_parse_and_append_user_data(parent, ctxt); + } + break; + + default: + { + /* Slice Data - Append slice data to the workload */ + if ((current_sc >= MPEG2_SC_SLICE_MIN) && + (current_sc <= MPEG2_SC_SLICE_MAX)) + { + if (!parser->mpeg2_first_slice_flag) + { + /* At this point, all the metadata required by the MPEG2 */ + /* hardware for decoding is extracted and stored. So the */ + /* metadata can be packed into workitems and emitted out.*/ + viddec_mpeg2_emit_workload(parent, ctxt); + + /* If the current picture is progressive or it is the */ + /* second field of interlaced field picture then, set */ + /* the workload done flag. */ + if ((!parser->mpeg2_picture_interlaced) + || ((parser->mpeg2_picture_interlaced) && (!parser->mpeg2_first_field))) + { + parser->mpeg2_pic_metadata_complete = true; + } + else if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)) + { + parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; + } + + parser->mpeg2_first_slice_flag = true; + } + parser->mpeg2_last_parsed_slice_sc = current_sc; + viddec_mpeg2_parse_and_append_slice_data(parent, ctxt); + parser->mpeg2_wl_status |= MPEG2_WL_PARTIAL_SLICE; + } + } + } /* Switch */ + + /* Save last parsed start code */ + parser->mpeg2_last_parsed_sc = current_sc; + return ret; +} + +/* viddec_mpeg2_get_ops() - Register parser ops with the parser manager. */ +void viddec_mpeg2_get_ops +( + viddec_parser_ops_t *ops +) +{ + ops->init = viddec_mpeg2_parser_init; + ops->parse_syntax = viddec_mpeg2_parse; + ops->get_cxt_size = viddec_mpeg2_get_context_size; + ops->is_wkld_done = viddec_mpeg2_is_workload_done; + ops->is_frame_start = viddec_mpeg2_is_start_frame; + return; +} + diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c new file mode 100644 index 0000000..503ded5 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c @@ -0,0 +1,461 @@ +/** + * viddec_mpeg2_workload.c + * ----------------------- + * This file packs the data parsed and stored in the context into workload and + * emits it out. The current list of workitems emitter into the workload + * include: + * + * - DMEM - Register Data + * - Past and Future picture references + * - Quantization matrix data + * + * Slice data gets appended into the workload in viddec_mpeg2_parse.c + * + * Also, the frame attributes are updated in the workload. + */ + +#include "viddec_mpeg2.h" +#include "viddec_fw_item_types.h" + +void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t next_wl) +{ + if (next_wl) + { + viddec_pm_append_workitem_next(parent, wi); + } + else + { + viddec_pm_append_workitem(parent, wi); + } + return; +} + +viddec_workload_t* viddec_mpeg2_get_header(void *parent, uint8_t next_wl) +{ + viddec_workload_t *ret; + if (next_wl) + { + ret = viddec_pm_get_next_header(parent); + } + else + { + ret = viddec_pm_get_header(parent); + } + return ret; +} + +/* viddec_mpeg2_set_seq_ext_defaults() - Sets non-zero default values for */ +/* sequence extension items in case sequence extension is not present. */ +static void viddec_mpeg2_set_seq_ext_defaults(struct viddec_mpeg2_parser *parser) +{ + parser->info.seq_ext.progressive_sequence = true; + parser->info.seq_ext.chroma_format = MPEG2_CF_420; +} + +/* viddec_mpeg2_set_pic_cod_ext_defaults() - Sets non-zero default values for*/ +/* picture coding extension items in case picture coding extension is not */ +/* present. */ +static void viddec_mpeg2_set_pic_cod_ext_defaults(struct viddec_mpeg2_parser *parser) +{ + parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->info.pic_cod_ext.frame_pred_frame_dct = true; + parser->info.pic_cod_ext.progressive_frame = true; +} + +/* viddec_mpeg2_pack_qmat() - Packs the 256 byte quantization matrix data */ +/* 64 32-bit values. */ +#ifdef MFDBIGENDIAN +static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser) +{ + /* Quantization Matrix Support */ + /* Populate Quantization Matrices */ + uint32_t index = 0; + uint32_t *qmat_packed, *qmat_unpacked; + + /* When transferring the quantization matrix data from the parser */ + /* context into workload items, we are packing four 8 bit */ + /* quantization values into one DWORD (32 bits). To do this, the */ + /* array of values of type uint8_t, is typecast as uint32 * and */ + /* read. */ + qmat_packed = (uint32_t *) parser->wi.qmat; + qmat_unpacked = (uint32_t *) &parser->info.qnt_mat; + + for (index=0; indexwi.qmat; + qmat_unpacked = (uint8_t *) &parser->info.qnt_mat; + + for (index=0; indexwi.csi1 = 0x0; + parser->wi.csi2 = 0x0; + parser->wi.cpi1 = 0x0; + parser->wi.cpce1 = 0x0; + + /* Set defaults for missing fields */ + if (!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_EXT)) + { + viddec_mpeg2_set_seq_ext_defaults(parser); + } + if (!(parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_COD_EXT)) + { + viddec_mpeg2_set_pic_cod_ext_defaults(parser); + } + + /* Populate Core Sequence Info 1 */ + parser->wi.csi1 |= (parser->mpeg2_stream) << 1; + parser->wi.csi1 |= (parser->info.seq_hdr.constrained_parameters_flag) << 2; + parser->wi.csi1 |= (parser->info.seq_ext.progressive_sequence) << 3; + parser->wi.csi1 |= (parser->info.seq_ext.chroma_format) << 16; + parser->wi.csi1 |= (parser->info.qnt_ext.load_intra_quantiser_matrix) << 19; + parser->wi.csi1 |= (parser->info.qnt_ext.load_non_intra_quantiser_matrix) << 20; + parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) << 21; + parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) << 22; + MPEG2_DEB("Core Sequence Info 1: 0x%.8X\n", parser->wi.csi1); + + /* Populate Core Sequence Info 2 */ + parser->wi.csi2 |= (parser->info.seq_hdr.horizontal_size_value & MPEG2_BIT_MASK_11); + parser->wi.csi2 |= (parser->info.seq_hdr.vertical_size_value & MPEG2_BIT_MASK_11) << 14; + MPEG2_DEB("Core Sequence Info 2: 0x%.8X\n", parser->wi.csi2); + + /* Populate Core Picture Info */ + parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_forward_vect); + parser->wi.cpi1 |= (parser->info.pic_hdr.forward_f_code) << 1; + parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_backward_vect) << 4; + parser->wi.cpi1 |= (parser->info.pic_hdr.backward_f_code) << 5; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode00) << 8; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode01) << 12; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode10) << 16; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode11) << 20; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.intra_dc_precision) << 24; + parser->wi.cpi1 |= (parser->info.pic_hdr.picture_coding_type-1) << 26; + MPEG2_DEB("Core Picture Info 1: 0x%.8X\n", parser->wi.cpi1); + + /* Populate Core Picture Extension Info */ + parser->wi.cpce1 |= (parser->info.pic_cod_ext.composite_display_flag); + parser->wi.cpce1 |= (parser->info.pic_cod_ext.progressive_frame) << 1; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.chroma_420_type) << 2; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.repeat_first_field) << 3; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.alternate_scan) << 4; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.intra_vlc_format) << 5; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.q_scale_type) << 6; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.concealment_motion_vectors) << 7; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.frame_pred_frame_dct) << 8; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.top_field_first) << 9; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.picture_structure) << 10; + MPEG2_DEB("Core Picture Ext Info 1: 0x%.8X\n", parser->wi.cpce1); + + return; +} + +/* mpeg2_emit_display_frame() - Sends the frame id as a workload item. */ +static inline void mpeg2_emit_frameid(void *parent, int32_t wl_type, uint8_t flag) +{ + viddec_workload_item_t wi; + wi.vwi_type = wl_type; + + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_mpeg2_append_workitem( parent, &wi, flag ); +} + +/* mpeg2_send_ref_reorder() - Reorders reference frames */ +static inline void mpeg2_send_ref_reorder(void *parent, uint8_t flag) +{ + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + /* Reorder index 1 to index 0 only */ + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; + viddec_mpeg2_append_workitem( parent, &wi, flag ); +} + +/* viddec_mpeg2_manage_ref() - Manages frame references by inserting the */ +/* past and future references (if any) for every frame inserted in the */ +/* workload. */ +static void viddec_mpeg2_manage_ref(void *parent, void *ctxt) +{ + int32_t frame_id = 1; + int32_t frame_type; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload ); + wl->is_reference_frame = 0; + + /* Identify the frame type (I, P or B) */ + frame_type = parser->info.pic_hdr.picture_coding_type; + + /* Send reference frame information based on whether the picture is a */ + /* frame picture or field picture. */ + if ((!parser->mpeg2_picture_interlaced) + || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) + { + /* Check if we need to reorder frame references/send frame for display */ + /* in case of I or P type */ + if (frame_type != MPEG2_PC_TYPE_B) + { + /* Checking reorder */ + if (parser->mpeg2_ref_table_updated) + { + mpeg2_send_ref_reorder(parent, parser->mpeg2_use_next_workload); + } + } + + /* Send reference frame workitems */ + switch(frame_type) + { + case MPEG2_PC_TYPE_I: + { + break; + } + case MPEG2_PC_TYPE_P: + { + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); + break; + } + case MPEG2_PC_TYPE_B: + { + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, parser->mpeg2_use_next_workload); + } + } + + /* Set reference information updated flag */ + if (!parser->mpeg2_picture_interlaced) + { + parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO; + } + } + else + { + /* Set reference information updated flag for second fiel */ + parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO; + } + + /* Set the reference frame flags for I and P types */ + if (frame_type != MPEG2_PC_TYPE_B) + { + wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK); + parser->mpeg2_ref_table_updated = true; + } + + return; +} + +/* viddec_mpeg2_check_unsupported() - Check for unsupported feature in the stream */ +static void viddec_mpeg2_check_unsupported(void *parent, void *ctxt) +{ + unsigned int unsupported_feature_found = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get workload */ + viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload ); + + /* Get attributes in workload */ + viddec_frame_attributes_t *attrs = &wl->attrs; + + /* Check for unsupported content size */ + unsupported_feature_found |= (attrs->cont_size.height > MPEG2_MAX_CONTENT_HEIGHT); + unsupported_feature_found |= (attrs->cont_size.width > MPEG2_MAX_CONTENT_WIDTH); + + /* Update parser status, if found */ + if (unsupported_feature_found) + { + parser->mpeg2_wl_status |= MPEG2_WL_UNSUPPORTED; + } + + return; +} + +/* viddec_mpeg2_append_metadata() - Appends meta data from the stream. */ +void viddec_mpeg2_append_metadata(void *parent, void *ctxt) +{ + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + viddec_workload_item_t wi; + + /* Append sequence info, if found with current frame */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; + + viddec_fw_mp2_sh_set_horizontal_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.horizontal_size_value); + viddec_fw_mp2_sh_set_vertical_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vertical_size_value); + viddec_fw_mp2_sh_set_aspect_ratio_information ( &(wi.mp2_sh) , parser->info.seq_hdr.aspect_ratio_information); + viddec_fw_mp2_sh_set_frame_rate_code ( &(wi.mp2_sh) , parser->info.seq_hdr.frame_rate_code); + viddec_fw_mp2_sh_set_bit_rate_value ( &(wi.mp2_sh) , parser->info.seq_hdr.bit_rate_value); + viddec_fw_mp2_sh_set_vbv_buffer_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vbv_buffer_size_value); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + /* Append sequence extension info, if found with current frame */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_EXT) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_MPEG2_SEQ_EXT; + + viddec_fw_mp2_se_set_profile_and_level_indication( &(wi.mp2_se) , parser->info.seq_ext.profile_and_level_indication); + viddec_fw_mp2_se_set_progressive_sequence ( &(wi.mp2_se) , parser->info.seq_ext.progressive_sequence); + viddec_fw_mp2_se_set_chroma_format ( &(wi.mp2_se) , parser->info.seq_ext.chroma_format); + viddec_fw_mp2_se_set_horizontal_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.horizontal_size_extension); + viddec_fw_mp2_se_set_vertical_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vertical_size_extension); + viddec_fw_mp2_se_set_bit_rate_extension ( &(wi.mp2_se) , parser->info.seq_ext.bit_rate_extension); + viddec_fw_mp2_se_set_vbv_buffer_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vbv_buffer_size_extension); + viddec_fw_mp2_se_set_frame_rate_extension_n ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_n); + viddec_fw_mp2_se_set_frame_rate_extension_d ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_d); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + /* Append Display info, if present */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_DISP_EXT) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; + + viddec_fw_mp2_sde_set_video_format ( &(wi.mp2_sde) , parser->info.seq_disp_ext.video_format); + viddec_fw_mp2_sde_set_color_description ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_description); + viddec_fw_mp2_sde_set_color_primaries ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_primaries); + viddec_fw_mp2_sde_set_transfer_characteristics( &(wi.mp2_sde) , parser->info.seq_disp_ext.transfer_characteristics); + viddec_fw_mp2_sde_set_display_horizontal_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_horizontal_size); + viddec_fw_mp2_sde_set_display_vertical_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_vertical_size); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + /* Append GOP info, if present */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO; + + viddec_fw_mp2_gop_set_closed_gop ( &(wi.mp2_gop) , parser->info.gop_hdr.closed_gop); + viddec_fw_mp2_gop_set_broken_link( &(wi.mp2_gop) , parser->info.gop_hdr.broken_link); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + return; +} + +/* viddec_mpeg2_append_workitems() - Appends decoder specific workitems */ +/* to the workload starting at the address and length specified. */ +static void viddec_mpeg2_append_workitems +( + void *parent, + uint32_t* address, + int workitem_type, + int num_items, + uint8_t flag +) +{ + int32_t index=0; + const uint32_t* initial_address = address; + viddec_workload_item_t wi; + + for (index=0; index < num_items; index++) + { + wi.vwi_type = workitem_type; + wi.data.data_offset = (char *) address - (const char *) initial_address; + wi.data.data_payload[0] = address[0]; + wi.data.data_payload[1] = address[1]; + address += 2; + + viddec_mpeg2_append_workitem(parent, &wi, flag); + } + + return; +} + +/* viddec_mpeg2_emit_workload() - Emits MPEG2 parser generated work load */ +/* items. */ +/* Items include: MPEG2 DMEM Data, Quantization Matrices. */ +/* Pixel ES data sent separately whenever parser sees slice data */ +void viddec_mpeg2_emit_workload(void *parent, void *ctxt) +{ + MPEG2_DEB("Emitting workloads.\n"); + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Append meta data workitems */ + viddec_mpeg2_append_metadata(parent, ctxt); + + /* Transfer metadata into attributes */ + viddec_mpeg2_translate_attr(parent, ctxt); + + /* Check for unsupported features in the stream and update parser status */ + viddec_mpeg2_check_unsupported(parent, ctxt); + + /* Transfer all stored metadata into MPEG2 Hardware Info */ + viddec_mpeg2_trans_metadata_workitems(parser); + + /* Send MPEG2 DMEM workitems */ + viddec_mpeg2_append_workitems(parent, + (uint32_t *) &parser->wi, + VIDDEC_WORKLOAD_MPEG2_DMEM, + MPEG2_NUM_DMEM_WL_ITEMS, + parser->mpeg2_use_next_workload); + parser->mpeg2_wl_status |= MPEG2_WL_DMEM_DATA; + MPEG2_DEB("Adding %d items as DMEM Data.\n", MPEG2_NUM_DMEM_WL_ITEMS); + + /* Send MPEG2 Quantization Matrix workitems, if updated */ + viddec_mpeg2_pack_qmat(parser); + viddec_mpeg2_append_workitems(parent, + (uint32_t *) parser->wi.qmat, + VIDDEC_WORKLOAD_MPEG2_QMAT, + MPEG2_NUM_QMAT_WL_ITEMS, + parser->mpeg2_use_next_workload); + MPEG2_DEB("Adding %d items as QMAT Data.\n", MPEG2_NUM_QMAT_WL_ITEMS); + + /* Manage reference frames */ + viddec_mpeg2_manage_ref(parent, ctxt); + + return; +} + diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h new file mode 100644 index 0000000..7084161 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h @@ -0,0 +1,231 @@ +#ifndef VIDDEC_FW_MP4_H +#define VIDDEC_FW_MP4_H + +#include "viddec_fw_workload.h" + +enum viddec_fw_mp4_ref_frame_id +{ + VIDDEC_MP4_FRAME_CURRENT = 0, + VIDDEC_MP4_FRAME_PAST = 1, + VIDDEC_MP4_FRAME_FUTURE = 2, + VIDDEC_MP4_FRAME_MAX = 3, +}; + +enum mp4_workload_item_type +{ + VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + VIDDEC_WORKLOAD_MP4_FUTURE_FRAME, + VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_MP4_VOP_INFO, + VIDDEC_WORKLOAD_MP4_BVOP_INFO, + VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, + VIDDEC_WORKLOAD_MP4_IQUANT, + VIDDEC_WORKLOAD_MP4_NIQUANT, + VIDDEC_WORKLOAD_MP4_SVH, +}; + +enum viddec_fw_mp4_vop_coding_type_t +{ + VIDDEC_MP4_VOP_TYPE_I = 0, + VIDDEC_MP4_VOP_TYPE_P, + VIDDEC_MP4_VOP_TYPE_B, + VIDDEC_MP4_VOP_TYPE_S +}; + +// This structure contains the information extracted from the Video Object Layer. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOL_INFO, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Flags extracted from the Video Object Layer + // 0:0 - short_video_header + // 1:2 - vol_shape + // 3:3 - interlaced + // 4:4 - obmc_disable + // 5:5 - quarter_sample + // 6:6 - resync_marker_disable + // 7:7 - data_partitioned + // 8:8 - reversible_vlc + #define viddec_fw_mp4_get_reversible_vlc(x) viddec_fw_bitfields_extract((x)->vol_flags, 8, 0x1) + #define viddec_fw_mp4_set_reversible_vlc(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 8, 0x1) + #define viddec_fw_mp4_get_data_partitioned(x) viddec_fw_bitfields_extract((x)->vol_flags, 7, 0x1) + #define viddec_fw_mp4_set_data_partitioned(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 7, 0x1) + #define viddec_fw_mp4_get_resync_marker_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 6, 0x1) + #define viddec_fw_mp4_set_resync_marker_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 6, 0x1) + #define viddec_fw_mp4_get_quarter_sample(x) viddec_fw_bitfields_extract((x)->vol_flags, 5, 0x1) + #define viddec_fw_mp4_set_quarter_sample(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 5, 0x1) + #define viddec_fw_mp4_get_obmc_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 4, 0x1) + #define viddec_fw_mp4_set_obmc_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 4, 0x1) + #define viddec_fw_mp4_get_interlaced(x) viddec_fw_bitfields_extract((x)->vol_flags, 3, 0x1) + #define viddec_fw_mp4_set_interlaced(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 3, 0x1) + #define viddec_fw_mp4_get_vol_shape(x) viddec_fw_bitfields_extract((x)->vol_flags, 1, 0x3) + #define viddec_fw_mp4_set_vol_shape(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 1, 0x3) + #define viddec_fw_mp4_get_short_video_header_flag(x) viddec_fw_bitfields_extract((x)->vol_flags, 0, 0x1) + #define viddec_fw_mp4_set_short_video_header_flag(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 0, 0x1) + unsigned int vol_flags; + + // Size extracted from the Video Object Layer + // 0:12 - width + // 13:25 - height + // MFD_MPG4VD_MB_PER_ROW can be calculated as (width+15) >> 4 + // MFD_MPG4VD_MB_ROWS can be calculated as (height+15) >> 4 + #define viddec_fw_mp4_get_vol_width(x) viddec_fw_bitfields_extract((x)->vol_size, 13, 0x1FFF) + #define viddec_fw_mp4_set_vol_width(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 13, 0x1FFF) + #define viddec_fw_mp4_get_vol_height(x) viddec_fw_bitfields_extract((x)->vol_size, 0, 0x1FFF) + #define viddec_fw_mp4_set_vol_height(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 0, 0x1FFF) + unsigned int vol_size; + + // Sprite, time increments and quantization details from the Video Object Layer + // 0:15 - vop_time_increment_resolution + // 16:17 - sprite_enable + // 18:23 - sprite_warping_points + // 24:25 - sprite_warping_accuracy + // 26:29 - quant_precision + // 30:30 - quant_type + #define viddec_fw_mp4_get_quant_type(x) viddec_fw_bitfields_extract((x)->vol_item, 30, 0x1) + #define viddec_fw_mp4_set_quant_type(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 30, 0x1) + #define viddec_fw_mp4_get_quant_precision(x) viddec_fw_bitfields_extract((x)->vol_item, 26, 0xF) + #define viddec_fw_mp4_set_quant_precision(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 26, 0xF) + #define viddec_fw_mp4_get_sprite_warping_accuracy(x) viddec_fw_bitfields_extract((x)->vol_item, 24, 0x3) + #define viddec_fw_mp4_set_sprite_warping_accuracy(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 24, 0x3) + #define viddec_fw_mp4_get_sprite_warping_points(x) viddec_fw_bitfields_extract((x)->vol_item, 18, 0x3F) + #define viddec_fw_mp4_set_sprite_warping_points(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 18, 0x3F) + #define viddec_fw_mp4_get_sprite_enable(x) viddec_fw_bitfields_extract((x)->vol_item, 16, 0x3) + #define viddec_fw_mp4_set_sprite_enable(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 16, 0x3) + #define viddec_fw_mp4_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_item, 0, 0xFFFF) + #define viddec_fw_mp4_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 0, 0xFFFF) + unsigned int vol_item; + +} viddec_fw_mp4_vol_info_t; + +// This structure contains the information extracted from the Video Object Layer. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOP_INFO, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Frame Info - to populate register MFD_MPG4VD_BSP_FRAME_INFO + // 0:4 - current_frame_id + // 5:5 - current_field_frame + // 6:10 - future_frame_id + // 11:11 - future_field_frame + // 12:16 - past_frame_id + // 17:17 - past_field_frame + #define viddec_fw_mp4_get_past_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 17, 0x1) + #define viddec_fw_mp4_set_past_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 17, 0x1) + #define viddec_fw_mp4_get_past_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 12, 0x1F) + #define viddec_fw_mp4_set_past_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 12, 0x1F) + #define viddec_fw_mp4_get_future_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 11, 0x1) + #define viddec_fw_mp4_set_future_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 11, 0x1) + #define viddec_fw_mp4_get_future_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 6, 0x1F) + #define viddec_fw_mp4_set_future_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 6, 0x1F) + #define viddec_fw_mp4_get_current_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 5, 0x1) + #define viddec_fw_mp4_set_current_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 5, 0x1) + #define viddec_fw_mp4_get_current_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 0, 0x1F) + #define viddec_fw_mp4_set_current_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 0, 0x1F) + unsigned int frame_info; + + // Video Object Plane Info + // 0:1 - vop_coding_type + // 2:2 - vop_rounding_type + // 3:5 - intra_dc_vlc_thr + // 6:6 - top_field_first + // 7:7 - alternate_vertical_scan_flag + // 8:16 - vop_quant + // 17:19 - vop_fcode_forward + // 20:22 - vop_fcode_backward + // 23:31 - quant_scale + #define viddec_fw_mp4_get_vop_quant_scale(x) viddec_fw_bitfields_extract((x)->vop_data, 23, 0x1FF) + #define viddec_fw_mp4_set_vop_quant_scale(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 23, 0x1FF) + #define viddec_fw_mp4_get_vop_fcode_backward(x) viddec_fw_bitfields_extract((x)->vop_data, 20, 0x7) + #define viddec_fw_mp4_set_vop_fcode_backward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 20, 0x7) + #define viddec_fw_mp4_get_vop_fcode_forward(x) viddec_fw_bitfields_extract((x)->vop_data, 17, 0x7) + #define viddec_fw_mp4_set_vop_fcode_forward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 17, 0x7) + #define viddec_fw_mp4_get_vop_quant(x) viddec_fw_bitfields_extract((x)->vop_data, 8, 0x1FF) + #define viddec_fw_mp4_set_vop_quant(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 8, 0x1FF) + #define viddec_fw_mp4_get_alternate_vertical_scan_flag(x) viddec_fw_bitfields_extract((x)->vop_data, 7, 0x1) + #define viddec_fw_mp4_set_alternate_vertical_scan_flag(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 7, 0x1) + #define viddec_fw_mp4_get_top_field_first(x) viddec_fw_bitfields_extract((x)->vop_data, 6, 0x1) + #define viddec_fw_mp4_set_top_field_first(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 6, 0x1) + #define viddec_fw_mp4_get_intra_dc_vlc_thr(x) viddec_fw_bitfields_extract((x)->vop_data, 3, 0x7) + #define viddec_fw_mp4_set_intra_dc_vlc_thr(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 3, 0x7) + #define viddec_fw_mp4_get_vop_rounding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 2, 0x1) + #define viddec_fw_mp4_set_vop_rounding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 2, 0x1) + #define viddec_fw_mp4_get_vop_coding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 0, 0x3) + #define viddec_fw_mp4_set_vop_coding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 0, 0x3) + unsigned int vop_data; + + // No of bits used in first byte of MB data + unsigned int bit_offset; + +} viddec_fw_mp4_vop_info_t; + +// This structure contains the information extracted from the Video Object Layer. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_BVOP_INFO, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Frame period = T(first B-VOP after VOL) - T(past reference of first B-VOP after VOL) + unsigned int Tframe; + + // TRD is the difference in temporal reference of the temporally next reference VOP with + // temporally previous reference VOP, assuming B-VOPs or skipped VOPs in between. + unsigned int TRD; + + // TRB is the difference in temporal reference of the B-VOP and the previous reference VOP. + unsigned int TRB; + +} viddec_fw_mp4_bvop_info_t; + +// This structure contains the information extracted from the sprite trajectory. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, +// using the fields vwi_payload in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Sprite Trajectory can have dmv_codes for each warping point. + // 0:13 - warping_mv_code_dv + // 14:27 - warping_mv_code_du + // 28:31 - warping_point_index - identifies which warping point the warping code refers to. + // The default value for index is 0xF which should be treated as invalid. + #define viddec_fw_mp4_get_warping_point_index(x) viddec_fw_bitfields_extract((x), 28, 0xF) + #define viddec_fw_mp4_set_warping_point_index(x, val) viddec_fw_bitfields_insert((x), val, 28, 0xF) + #define viddec_fw_mp4_get_warping_mv_code_du(x) viddec_fw_bitfields_extract((x), 14, 0x3FFF) + #define viddec_fw_mp4_set_warping_mv_code_du(x, val) viddec_fw_bitfields_insert((x), val, 14, 0x3FFF) + #define viddec_fw_mp4_get_warping_mv_code_dv(x) viddec_fw_bitfields_extract((x), 0, 0x3FFF) + #define viddec_fw_mp4_set_warping_mv_code_dv(x, val) viddec_fw_bitfields_insert((x), val, 0, 0x3FFF) + unsigned int warping_mv_code[3]; +} viddec_fw_mp4_sprite_trajectory_t; + +// IQUANT entries will be populated in the workload using items of type VIDDEC_WORKLOAD_MP4_IQUANT and the +// vwi_payload array. The entries will be in the order in which they need to be programmed in the registers. +// There is no need for a separate structure for these values. + +// This structure contains the information extracted from the Video Plane with Short Header. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SVH, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Video Plane with Short Header + // 0:7 - temporal_reference + // 8:19 - num_macroblocks_in_gob + // 20:24 - num_gobs_in_vop + // 25:27 - num_rows_in_gob + #define viddec_fw_mp4_get_num_rows_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 25, 0x7) + #define viddec_fw_mp4_set_num_rows_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 25, 0x7) + #define viddec_fw_mp4_get_num_gobs_in_vop(x) viddec_fw_bitfields_extract((x)->svh_data, 20, 0x1F) + #define viddec_fw_mp4_set_num_gobs_in_vop(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 20, 0x1F) + #define viddec_fw_mp4_get_num_macroblocks_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 8, 0xFFF) + #define viddec_fw_mp4_set_num_macroblocks_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 8, 0xFFF) + #define viddec_fw_mp4_get_temporal_reference(x) viddec_fw_bitfields_extract((x)->svh_data, 0, 0xFF) + #define viddec_fw_mp4_set_temporal_reference(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 0, 0xFF) + unsigned int svh_data; + + unsigned int pad1; + unsigned int pad2; +} viddec_fw_mp4_svh_t; + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c new file mode 100644 index 0000000..f595c91 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -0,0 +1,371 @@ +#include "viddec_fw_workload.h" +#include "viddec_parser_ops.h" +#include "viddec_fw_mp4.h" +#include "viddec_mp4_parse.h" + +uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_frame_attributes_t *attr = &(wl->attrs); + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + + memset(attr, 0, sizeof(viddec_frame_attributes_t)); + + attr->cont_size.width = vol->video_object_layer_width; + attr->cont_size.height = vol->video_object_layer_height; + + // Translate vop_coding_type + switch(vol->VideoObjectPlane.vop_coding_type) + { + case MP4_VOP_TYPE_B: + attr->frame_type = VIDDEC_FRAME_TYPE_B; + break; + case MP4_VOP_TYPE_P: + attr->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case MP4_VOP_TYPE_S: + attr->frame_type = VIDDEC_FRAME_TYPE_S; + break; + case MP4_VOP_TYPE_I: + attr->frame_type = VIDDEC_FRAME_TYPE_I; + break; + default: + break; + } // switch on vop_coding_type + + attr->mpeg4.top_field_first = vol->VideoObjectPlane.top_field_first; + + return result; +} // viddec_fw_mp4_populate_attr + +uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_vol_info_t vol_info; + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + + memset(&vol_info, 0, sizeof(viddec_fw_mp4_vol_info_t)); + + // Get vol_flags + viddec_fw_mp4_set_reversible_vlc(&vol_info, vol->reversible_vlc); + viddec_fw_mp4_set_data_partitioned(&vol_info, vol->data_partitioned); + viddec_fw_mp4_set_resync_marker_disable(&vol_info, vol->resync_marker_disable); + viddec_fw_mp4_set_quarter_sample(&vol_info, vol->quarter_sample); + viddec_fw_mp4_set_obmc_disable(&vol_info, vol->obmc_disable); + viddec_fw_mp4_set_interlaced(&vol_info, vol->interlaced); + viddec_fw_mp4_set_vol_shape(&vol_info, vol->video_object_layer_shape); + viddec_fw_mp4_set_short_video_header_flag(&vol_info, vol->short_video_header); + + // Get vol_size + viddec_fw_mp4_set_vol_width(&vol_info, vol->video_object_layer_width); + viddec_fw_mp4_set_vol_height(&vol_info, vol->video_object_layer_height); + + // Get vol_item + viddec_fw_mp4_set_quant_type(&vol_info, vol->quant_type); + viddec_fw_mp4_set_quant_precision(&vol_info, vol->quant_precision); + viddec_fw_mp4_set_sprite_warping_accuracy(&vol_info, vol->sprite_info.sprite_warping_accuracy); + viddec_fw_mp4_set_sprite_warping_points(&vol_info, vol->sprite_info.no_of_sprite_warping_points); + viddec_fw_mp4_set_sprite_enable(&vol_info, vol->sprite_enable); + viddec_fw_mp4_set_vop_time_increment_resolution(&vol_info, vol->vop_time_increment_resolution); + + + wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOL_INFO; + wi.vwi_payload[0] = vol_info.vol_flags; + wi.vwi_payload[1] = vol_info.vol_size; + wi.vwi_payload[2] = vol_info.vol_item; + + result = viddec_pm_append_workitem(parent, &wi); + + return result; +} // viddec_fw_mp4_insert_vol_workitem + +uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_vop_info_t vop_info; + mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane); + uint32_t byte = 0; + unsigned char is_emul; + + memset(&vop_info, 0, sizeof(viddec_fw_mp4_vop_info_t)); + + // Get frame_info + viddec_fw_mp4_set_past_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_2].is_field); + viddec_fw_mp4_set_past_frame_id(&vop_info, VIDDEC_MP4_FRAME_PAST); + viddec_fw_mp4_set_future_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_1].is_field); + viddec_fw_mp4_set_future_frame_id(&vop_info, VIDDEC_MP4_FRAME_FUTURE); + viddec_fw_mp4_set_current_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_0].is_field); + viddec_fw_mp4_set_current_frame_id(&vop_info, VIDDEC_MP4_FRAME_CURRENT); + + // HW has a limitation that the enums for PAST(1), FUTURE(2) and CURRENT(0) cannot be changed and + // the spec does not support field pictures. Hence the field_frame bits are always zero. + // This gives us the constant 0x10200. + vop_info.frame_info = 0x10200; + + // Get vop_data + // Quant scale is in the video_packet_header or the gob_layer - both of which are parsed by the BSP + viddec_fw_mp4_set_vop_quant_scale(&vop_info, 0); + viddec_fw_mp4_set_vop_fcode_backward(&vop_info, vop->vop_fcode_backward); + viddec_fw_mp4_set_vop_fcode_forward(&vop_info, vop->vop_fcode_forward); + viddec_fw_mp4_set_vop_quant(&vop_info, vop->vop_quant); + viddec_fw_mp4_set_alternate_vertical_scan_flag(&vop_info, vop->alternate_vertical_scan_flag); + viddec_fw_mp4_set_top_field_first(&vop_info, vop->top_field_first); + viddec_fw_mp4_set_intra_dc_vlc_thr(&vop_info, vop->intra_dc_vlc_thr); + viddec_fw_mp4_set_vop_rounding_type(&vop_info, vop->vop_rounding_type); + viddec_fw_mp4_set_vop_coding_type(&vop_info, vop->vop_coding_type); + + // Get vol_item + result = viddec_pm_get_au_pos(parent, &vop_info.bit_offset, &byte, &is_emul); + + wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOP_INFO; + wi.vwi_payload[0] = vop_info.frame_info; + wi.vwi_payload[1] = vop_info.vop_data; + wi.vwi_payload[2] = vop_info.bit_offset; + + result = viddec_pm_append_workitem(parent, &wi); + + return result; +} // viddec_fw_mp4_insert_vop_workitem + +uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_svh_t svh_info; + mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); + + memset(&svh_info, 0, sizeof(viddec_fw_mp4_svh_t)); + + // Get svh_data + viddec_fw_mp4_set_temporal_reference(&svh_info, svh->temporal_reference); + viddec_fw_mp4_set_num_macroblocks_in_gob(&svh_info, svh->num_macroblocks_in_gob); + viddec_fw_mp4_set_num_gobs_in_vop(&svh_info, svh->num_gobs_in_vop); + viddec_fw_mp4_set_num_rows_in_gob(&svh_info, svh->num_rows_in_gob); + + wi.vwi_type = VIDDEC_WORKLOAD_MP4_SVH; + wi.vwi_payload[0] = svh_info.svh_data; + wi.vwi_payload[1] = svh_info.pad1; + wi.vwi_payload[2] = svh_info.pad2; + + result = viddec_pm_append_workitem(parent, &wi); + + return result; +} // viddec_fw_mp4_insert_vpsh_workitem + +uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_sprite_trajectory_t sprite_info; + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane); + uint8_t no_of_entries_per_item = 3; + uint8_t no_of_sprite_workitems = 0; + uint8_t warp_index = 0; + int i, j; + + if(!vol->sprite_info.no_of_sprite_warping_points) + return result; + + no_of_sprite_workitems = (vol->sprite_info.no_of_sprite_warping_points > 3) ? 2 : 1; + + for(i=0; isprite_info.no_of_sprite_warping_points) + { + viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index); + viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]); + viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]); + } + else + { + sprite_info.warping_mv_code[j] = 0xF << 28; + } + warp_index++; + } + + wi.vwi_type = VIDDEC_WORKLOAD_MP4_SPRT_TRAJ; + wi.vwi_payload[0] = sprite_info.warping_mv_code[0]; + wi.vwi_payload[1] = sprite_info.warping_mv_code[1]; + wi.vwi_payload[2] = sprite_info.warping_mv_code[2]; + + result = viddec_pm_append_workitem(parent, &wi); + } + + return result; +} // viddec_fw_mp4_insert_sprite_workitem + +uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + + wi.vwi_type = VIDDEC_WORKLOAD_MP4_BVOP_INFO; + wi.vwi_payload[0] = vol->Tframe; + wi.vwi_payload[1] = vol->TRD; + wi.vwi_payload[2] = vol->TRB; + + result = viddec_pm_append_workitem(parent, &wi); + + return result; +} // viddec_fw_mp4_insert_bvop_workitem + +uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint32_t *qmat) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + uint8_t i; + + // No of items = (64/4 Dwords / 3 entries per workload item) + // 64 8b entries => 64 * 8 / 32 DWORDS => 64/4 DWORDS => 16 DWORDS + // Each item can store 3 DWORDS, 16 DWORDS => 16/3 items => 6 items + for(i=0; i<6; i++) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + + if(intra_quant_flag) + wi.vwi_type = VIDDEC_WORKLOAD_MP4_IQUANT; + else + wi.vwi_type = VIDDEC_WORKLOAD_MP4_NIQUANT; + + if(i == 6) + { + wi.vwi_payload[0] = qmat[0]; + wi.vwi_payload[1] = 0; + wi.vwi_payload[2] = 0; + } + else + { + wi.vwi_payload[0] = qmat[0]; + wi.vwi_payload[1] = qmat[1]; + wi.vwi_payload[2] = qmat[2]; + } + + qmat += 3; + + result = viddec_pm_append_workitem(parent, &wi); + } + + return result; +} // viddec_fw_mp4_insert_qmat + +uint32_t viddec_fw_mp4_insert_inversequant_workitem(void *parent, mp4_VOLQuant_mat_t *qmat) +{ + uint32_t result = MP4_STATUS_OK; + + if(qmat->load_intra_quant_mat) + { + result = viddec_fw_mp4_insert_qmat(parent, true, (uint32_t *) &(qmat->intra_quant_mat)); + } + + if(qmat->load_nonintra_quant_mat) + { + result = viddec_fw_mp4_insert_qmat(parent, false, (uint32_t *) &(qmat->nonintra_quant_mat)); + } + + return result; +} // viddec_fw_mp4_insert_inversequant_workitem + +uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_MP4_PAST_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + result = viddec_pm_append_workitem(parent, &wi); + + return result; +} // viddec_fw_mp4_insert_past_frame_workitem + +uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_MP4_FUTURE_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + result = viddec_pm_append_workitem(parent, &wi); + + return result; +} // viddec_fw_mp4_insert_future_frame_workitem + +uint32_t viddec_fw_mp4_insert_reorder_workitem(void *parent) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + + // Move frame at location 1 of the reference table to location 0 + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; + + result = viddec_pm_append_workitem(parent, &wi); + + return result; +} // viddec_fw_mp4_insert_reorder_workitem + +uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt) +{ + uint32_t result = 0; + viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; + viddec_workload_t *wl = viddec_pm_get_header(parent); + + result = viddec_fw_mp4_populate_attr(wl, parser); + result = viddec_fw_mp4_insert_vol_workitem(parent, parser); + result = viddec_fw_mp4_insert_vop_workitem(parent, parser); + result = viddec_fw_mp4_insert_sprite_workitem(parent, parser); + result = viddec_fw_mp4_insert_inversequant_workitem(parent, &(parser->info.VisualObject.VideoObject.quant_mat_info)); + + if(parser->info.VisualObject.VideoObject.short_video_header) + result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser); + + if(!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded) + wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; + + // Send reference re-order tag for all reference frame types + if (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type != MP4_VOP_TYPE_B) + { + result = viddec_fw_mp4_insert_reorder_workitem(parent); + } + + // Handle vop_coding_type based information + switch(parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type) + { + case MP4_VOP_TYPE_B: + result = viddec_fw_mp4_insert_bvop_workitem(parent, parser); + result = viddec_fw_mp4_insert_past_frame_workitem(parent); + result = viddec_fw_mp4_insert_future_frame_workitem(parent); + break; + case MP4_VOP_TYPE_P: + case MP4_VOP_TYPE_S: + result = viddec_fw_mp4_insert_past_frame_workitem(parent); + // Deliberate fall-thru to type I + case MP4_VOP_TYPE_I: + wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK); + // Swap reference information + parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1]; + parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0]; + break; + break; + default: + break; + } // switch on vop_coding_type + + result = viddec_pm_append_pixeldata(parent); + + return result; +} // viddec_fw_mp4_emit_workload + diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c new file mode 100644 index 0000000..4ae9135 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c @@ -0,0 +1,98 @@ +#include "viddec_mp4_decodevideoobjectplane.h" + +mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo) +{ + mp4_Status_t status = MP4_STATUS_OK; + uint32_t vop_time=0; +// mp4_VisualObject_t *vo = &(pInfo->VisualObject); + mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject); + mp4_GroupOfVideoObjectPlane_t *gvop = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane); + mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + // set VOP time + if (vol->short_video_header) + { + vop_time = vol->vop_sync_time + + pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.temporal_reference * 1001; + +// if (vo->currentFrame.time > vop_time) + { + vol->vop_sync_time += 256 * 1001; + vop_time += 256 * 1001; + } + } + else + { + if (vop->vop_coding_type == MP4_VOP_TYPE_B) + { + vop_time = vol->vop_sync_time_b + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment; + } + else + { + if (gvop->time_base > vol->vop_sync_time) + vol->vop_sync_time = gvop->time_base; + + vop_time = vol->vop_sync_time + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment; + + if (vol->vop_sync_time_b < vol->vop_sync_time) + vol->vop_sync_time_b = vol->vop_sync_time; + + if (vop->modulo_time_base != 0) + vol->vop_sync_time = vop_time - vop->vop_time_increment; + } + } + + if(vop->vop_coded) + { + switch (vop->vop_coding_type) + { + case MP4_VOP_TYPE_S: + if (vol->sprite_enable != MP4_SPRITE_GMC) + break; + // Deliberate fall-through from this case + case MP4_VOP_TYPE_I: + case MP4_VOP_TYPE_P: + // set past and future time for B-VOP + vol->pastFrameTime = vol->futureFrameTime; + vol->futureFrameTime = vop_time; + break; + default: + break; + } + } + + if (vop->vop_coded) +// || (vop_time != vo->currentFrame.time && vop_time != vo->pastFrame.time && vop_time != vo->futureFrame.time) ) + { + if(vop->vop_coding_type == MP4_VOP_TYPE_B) + { + if (!vol->Tframe) + vol->Tframe = (int) (vop_time); // - vo->pastFrame.time); + + if (vop->vop_coded) + { + vol->TRB = (int) (vop_time - vol->pastFrameTime); + vol->TRD = (int) (vol->futureFrameTime - vol->pastFrameTime); + + // defense from bad streams when B-VOPs are before Past and/or Future + if (vol->TRB <= 0) + vol->TRB = 1; + + if (vol->TRD <= 0) + vol->TRD = 2; + + if (vol->TRD <= vol->TRB) + { + vol->TRB = 1; + vol->TRD = 2; + } + + if (vol->Tframe >= vol->TRD) + vol->Tframe = vol->TRB; + } + } + } + + return status; +} // mp4_DecodeVideoObjectPlane + diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h new file mode 100644 index 0000000..2cb3c87 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h @@ -0,0 +1,11 @@ +#ifndef VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H +#define VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t *pInfo); + +//void mp4_copy_info_to_dmem(mp4_Info_t *pInfo, mp4_MBHWInterface *ptr_parameters); + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c new file mode 100644 index 0000000..b4cc302 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c @@ -0,0 +1,278 @@ +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" +#include "viddec_mp4_decodevideoobjectplane.h" +#include "viddec_mp4_shortheader.h" +#include "viddec_mp4_videoobjectlayer.h" +#include "viddec_mp4_videoobjectplane.h" +#include "viddec_mp4_visualobject.h" + +extern uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state); + +void viddec_mp4_get_context_size(viddec_parser_memory_sizes_t *size) +{ + /* Should return size of my structure */ + size->context_size = sizeof(viddec_mp4_parser_t); + size->persist_size = 0; + return; +} // viddec_mp4_get_context_size + +uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors) +{ + viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; + int result = VIDDEC_PARSE_SUCESS; + uint8_t frame_boundary = false; + uint8_t force_frame_complete = false; + + //DEB("entering is_wkld_done: next_sc: 0x%x, sc_seen: %d\n", next_sc, parser->sc_seen); + + parent = parent; + + // VS, VO, VOL, VOP or GVOP start codes indicate frame boundary. + frame_boundary = ( (MP4_SC_VISUAL_OBJECT_SEQUENCE == next_sc) || + (MP4_SC_VISUAL_OBJECT == next_sc) || + ((MP4_SC_VIDEO_OBJECT_LAYER_MIN <= next_sc) && (next_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) || + (next_sc <= MP4_SC_VIDEO_OBJECT_MAX) || + (MP4_SC_VIDEO_OBJECT_PLANE == next_sc) || + ((SHORT_THIRD_STARTCODE_BYTE & 0xFC) == (next_sc & 0xFC)) || + (MP4_SC_GROUP_OF_VOP == next_sc) ); + + // EOS and discontinuity should force workload completion. + force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); + + if(frame_boundary | force_frame_complete) + { + *codec_specific_errors = 0; + + // Frame is considered complete and without errors, if a VOL was received since startup and + // if a VOP was received for this workload. + if (!((parser->sc_seen & MP4_SC_SEEN_VOL) && (parser->sc_seen & MP4_SC_SEEN_VOP)) && !(parser->sc_seen & MP4_SC_SEEN_SVH)) + *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + + /* + This is the strategy for error detection. + Errors in any field needed by the firmware (parser/decoder) are treated as non-decodable. + Errors in other fields will be considered decodable. + Defaults/alternate strategies will be considered on a case-by-case basis as customer content is seen. + + ERROR_TYPE | PARSING | INVALID/UNSUPPORTED | BS = Bitstream error + ----------------------------------------------------------------- UNSUP = Un-supported + DFLT_PRESENT | YES | NO | YES | NO | ND = Non-decodable + COMPONENT USED | | | | | DFLT = Populate defaults + ----------------------------------------------------------------- + FIRMWARE | BS+ND | BS+ND | UNSUP+ND | UNSUP+ND | + DRIVER/USER | BS+DFLT | BS | UNSUP | UNSUP | + NONE | BS | BS | UNSUP | UNSUP | + | | | Continue Parsing | + */ + if((parser->bitstream_error & MP4_BS_ERROR_HDR_NONDEC) || (parser->bitstream_error & MP4_BS_ERROR_FRM_NONDEC)) + *codec_specific_errors |= (VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE | VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM); + + if((parser->bitstream_error & MP4_BS_ERROR_HDR_UNSUP) || (parser->bitstream_error & MP4_BS_ERROR_FRM_UNSUP)) + *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED; + + if((parser->bitstream_error & MP4_BS_ERROR_HDR_PARSE) || (parser->bitstream_error & MP4_BS_ERROR_FRM_PARSE)) + *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR; + + parser->bitstream_error &= MP4_HDR_ERROR_MASK; + parser->sc_seen &= MP4_SC_SEEN_VOL; + result = VIDDEC_PARSE_FRMDONE; + } + //DEB("exiting is_wkld_done: next_sc: 0x%x, sc_seen: %d, err: %d, fr_bnd:%d, force:%d\n", + // next_sc, parser->sc_seen, *codec_specific_errors, frame_boundary, force_frame_complete); + + return result; +} // viddec_mp4_wkld_done + +void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +{ + viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; + + persist_mem = persist_mem; + parser->is_frame_start = false; + parser->prev_sc = MP4_SC_INVALID; + parser->current_sc = MP4_SC_INVALID; + parser->cur_sc_prefix = false; + parser->next_sc_prefix = false; + parser->ignore_scs = false; + + if(preserve) + { + // Need to maintain information till VOL + parser->sc_seen &= MP4_SC_SEEN_VOL; + parser->bitstream_error &= MP4_HDR_ERROR_MASK; + + // Reset only frame related data + memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t)); + memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263)); + } + else + { + parser->sc_seen = MP4_SC_SEEN_INVALID; + parser->bitstream_error = MP4_BS_ERROR_NONE; + memset(&(parser->info), 0, sizeof(mp4_Info_t)); + } + + return; +} // viddec_mp4_init + +static uint32_t viddec_mp4_decodevop_and_emitwkld(void *parent, void *ctxt) +{ + int status = MP4_STATUS_OK; + viddec_mp4_parser_t *cxt = (viddec_mp4_parser_t *)ctxt; + + status = mp4_DecodeVideoObjectPlane(&(cxt->info)); + +#ifndef VBP + status = viddec_fw_mp4_emit_workload(parent, ctxt); +#endif + + return status; +} // viddec_mp4_decodevop_and_emitwkld + +uint32_t viddec_mp4_parse(void *parent, void *ctxt) +{ + uint32_t sc=0; + viddec_mp4_parser_t *cxt; + uint8_t is_svh=0; + int32_t getbits=0; + int32_t status = 0; + + cxt = (viddec_mp4_parser_t *)ctxt; + is_svh = (cxt->cur_sc_prefix) ? false: true; + if((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1) + { + DEB("Start code not found\n"); + return VIDDEC_PARSE_ERROR; + } + + if(!is_svh) + { + viddec_pm_get_bits(parent, &sc, 32); + sc = sc & 0xFF; + cxt->current_sc = sc; + cxt->current_sc |= 0x100; + DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); + + switch(sc) + { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + { + status = mp4_Parse_VisualSequence(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE; + DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); + break; + } + case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC: + {/* Not required to do anything */ + break; + } + case MP4_SC_USER_DATA: + { /* Copy userdata to user-visible buffer (EMIT) */ + status = mp4_Parse_UserData(parent, cxt); + DEB("MP4_USER_DATA_SC: \n"); + break; + } + case MP4_SC_GROUP_OF_VOP: + { + status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt); + cxt->prev_sc = MP4_SC_GROUP_OF_VOP; + DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); + break; + } + case MP4_SC_VIDEO_SESSION_ERROR: + {/* Not required to do anything?? */ + break; + } + case MP4_SC_VISUAL_OBJECT: + { + status = mp4_Parse_VisualObject(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT; + DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); + break; + } + case MP4_SC_VIDEO_OBJECT_PLANE: + { + /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit + a starting bit offset after parsing the header. */ + status = mp4_Parse_VideoObjectPlane(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + // TODO: Fix this for interlaced + cxt->is_frame_start = true; + cxt->sc_seen |= MP4_SC_SEEN_VOP; + + DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); + break; + } + case MP4_SC_STUFFING: + { + break; + } + default: + { + if( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) ) + { + status = mp4_Parse_VideoObjectLayer(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_VOL; + cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); + sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + } + // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN + else if(sc <= MP4_SC_VIDEO_OBJECT_MAX) + { + // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer + getbits = viddec_pm_get_bits(parent, &sc, 22); + if(getbits != -1) + { + cxt->current_sc = sc; + status = mp4_Parse_VideoObject_svh(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_SVH; + cxt->is_frame_start = true; + DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status); + } + } + else + { + DEB("UNKWON Cod:0x%08X\n", sc); + } + } + break; + } + } + else + { + viddec_pm_get_bits(parent, &sc, 22); + cxt->current_sc = sc; + DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); + status = mp4_Parse_VideoObject_svh(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_SVH; + cxt->is_frame_start = true; + DEB("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + } + + // Current sc becomes the previous sc + cxt->prev_sc = sc; + + return VIDDEC_PARSE_SUCESS; +} // viddec_mp4_parse + +uint32_t viddec_mp4_is_frame_start(void *ctxt) +{ + viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *)ctxt; + return parser->is_frame_start; +} // viddec_mp4_is_frame_start + +void viddec_mp4_get_ops(viddec_parser_ops_t *ops) +{ + ops->parse_syntax = viddec_mp4_parse; + ops->get_cxt_size = viddec_mp4_get_context_size; + ops->is_wkld_done = viddec_mp4_wkld_done; + ops->parse_sc = viddec_parse_sc_mp4; + ops->is_frame_start = viddec_mp4_is_frame_start; + ops->init = viddec_mp4_init; + return; +} // viddec_mp4_get_ops diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h new file mode 100644 index 0000000..12447a4 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h @@ -0,0 +1,527 @@ +#ifndef VIDDEC_MP4_PARSE_H +#define VIDDEC_MP4_PARSE_H + +#include "viddec_fw_debug.h" +#include "viddec_fw_mp4.h" + +/* Macros for MP4 start code detection */ +#define FIRST_STARTCODE_BYTE 0x00 +#define SECOND_STARTCODE_BYTE 0x00 +#define THIRD_STARTCODE_BYTE 0x01 +#define SHORT_THIRD_STARTCODE_BYTE 0x80 +#define SC_BYTE_MASK0 0x00ff0000 +#define SC_BYTE_MASK1 0x000000ff + +/* status codes */ +typedef enum +{ + MP4_STATUS_OK = 0, /* Success */ + MP4_STATUS_PARSE_ERROR = (1 << 0), /* Invalid syntax */ + MP4_STATUS_NOTSUPPORT = (1 << 1), /* unsupported feature */ + MP4_STATUS_REQD_DATA_ERROR = (1 << 2), /* supported data either invalid or missing */ +} mp4_Status_t; + +/* feature codes */ +typedef enum +{ + MP4_VOP_FEATURE_DEFAULT = 0, // Default VOP features, no code image update needed + MP4_VOP_FEATURE_SVH = 1, // VOP has Short Video Header + MP4_VOP_FEATURE_DP = 2 // VOP is Data Partitioned +} mp4_Vop_feature; + +/* MPEG-4 start code values: Table 6-3 */ +typedef enum +{ + MP4_SC_VIDEO_OBJECT_MIN = 0x00, + MP4_SC_VIDEO_OBJECT_MAX = 0x1F, + MP4_SC_VIDEO_OBJECT_LAYER_MIN = 0x20, + MP4_SC_VIDEO_OBJECT_LAYER_MAX = 0x2F, + MP4_SC_FGS_BP_MIN = 0x40, /* Unsupported */ + MP4_SC_FGS_BP_MAX = 0x5F, /* Unsupported */ + MP4_SC_VISUAL_OBJECT_SEQUENCE = 0xB0, + MP4_SC_VISUAL_OBJECT_SEQUENCE_EC = 0xB1, + MP4_SC_USER_DATA = 0xB2, + MP4_SC_GROUP_OF_VOP = 0xB3, + MP4_SC_VIDEO_SESSION_ERROR = 0xB4, + MP4_SC_VISUAL_OBJECT = 0xB5, + MP4_SC_VIDEO_OBJECT_PLANE = 0xB6, + MP4_SC_SLICE = 0xB7, /* Unsupported */ + MP4_SC_EXTENSION = 0xB8, /* Unsupported */ + MP4_SC_FGS_VOP = 0xB9, /* Unsupported */ + MP4_SC_FBA_OBJECT = 0xBA, /* Unsupported */ + MP4_SC_FBA_OBJECT_PLANE = 0xBB, /* Unsupported */ + MP4_SC_MESH_OBJECT = 0xBC, /* Unsupported */ + MP4_SC_MESH_OBJECT_PLANE = 0xBD, /* Unsupported */ + MP4_SC_STILL_TEXTURE_OBJECT = 0xBE, /* Unsupported */ + MP4_SC_TEXTURE_SPATIAL_LAYER = 0xBF, /* Unsupported */ + MP4_SC_TEXTURE_SNR_LAYER = 0xC0, /* Unsupported */ + MP4_SC_TEXTURE_TILE = 0xC1, /* Unsupported */ + MP4_SC_TEXTURE_SHAPE_LAYER = 0xC2, /* Unsupported */ + MP4_SC_STUFFING = 0xC3, + MP4_SC_SYTEM_MIN = 0xC6, /* Unsupported */ + MP4_SC_SYTEM_MAX = 0xFF, /* Unsupported */ + MP4_SC_INVALID = 0x100, /* Invalid */ +}mp4_start_code_values_t; + +/* MPEG-4 code values + ISO/IEC 14496-2:2004 table 6-6 */ +enum +{ + MP4_VISUAL_OBJECT_TYPE_VIDEO = 1, + MP4_VISUAL_OBJECT_TYPE_TEXTURE = 2, + MP4_VISUAL_OBJECT_TYPE_MESH = 3, + MP4_VISUAL_OBJECT_TYPE_FBA = 4, + MP4_VISUAL_OBJECT_TYPE_3DMESH = 5 +}; + +/* ISO/IEC 14496-2:2004 table 6-7 */ +enum +{ + MP4_VIDEO_FORMAT_COMPONENT = 0, + MP4_VIDEO_FORMAT_PAL = 1, + MP4_VIDEO_FORMAT_NTSC = 2, + MP4_VIDEO_FORMAT_SECAM = 3, + MP4_VIDEO_FORMAT_MAC = 4, + MP4_VIDEO_FORMAT_UNSPECIFIED = 5 +}; + +/* ISO/IEC 14496-2:2004 table 6-8..10 */ +enum +{ + MP4_VIDEO_COLORS_FORBIDDEN = 0, + MP4_VIDEO_COLORS_ITU_R_BT_709 = 1, + MP4_VIDEO_COLORS_UNSPECIFIED = 2, + MP4_VIDEO_COLORS_RESERVED = 3, + MP4_VIDEO_COLORS_ITU_R_BT_470_2_M = 4, + MP4_VIDEO_COLORS_ITU_R_BT_470_2_BG = 5, + MP4_VIDEO_COLORS_SMPTE_170M = 6, + MP4_VIDEO_COLORS_SMPTE_240M = 7, + MP4_VIDEO_COLORS_GENERIC_FILM = 8 +}; + +/* ISO/IEC 14496-2:2004 table 6-11 */ +enum +{ + MP4_VIDEO_OBJECT_TYPE_SIMPLE = 1, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_SCALABLE = 2, + MP4_VIDEO_OBJECT_TYPE_CORE = 3, + MP4_VIDEO_OBJECT_TYPE_MAIN = 4, + MP4_VIDEO_OBJECT_TYPE_NBIT = 5, + MP4_VIDEO_OBJECT_TYPE_2DTEXTURE = 6, + MP4_VIDEO_OBJECT_TYPE_2DMESH = 7, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_FACE = 8, + MP4_VIDEO_OBJECT_TYPE_STILL_SCALABLE_TEXTURE = 9, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_REAL_TIME_SIMPLE = 10, + MP4_VIDEO_OBJECT_TYPE_CORE_SCALABLE = 11, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_CODING_EFFICIENCY = 12, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_SCALABLE_TEXTURE = 13, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_FBA = 14, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_STUDIO = 15, + MP4_VIDEO_OBJECT_TYPE_CORE_STUDIO = 16, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_SIMPLE = 17, + MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE = 18 +}; + +/* ISO/IEC 14496-2:2004 table 6.17 (maximum defined video_object_layer_shape_extension) */ +#define MP4_SHAPE_EXT_NUM 13 + +/* ISO/IEC 14496-2:2004 table 6-14 */ +enum +{ + MP4_ASPECT_RATIO_FORBIDDEN = 0, + MP4_ASPECT_RATIO_1_1 = 1, + MP4_ASPECT_RATIO_12_11 = 2, + MP4_ASPECT_RATIO_10_11 = 3, + MP4_ASPECT_RATIO_16_11 = 4, + MP4_ASPECT_RATIO_40_33 = 5, + MP4_ASPECT_RATIO_EXTPAR = 15 +}; + +/* ISO/IEC 14496-2:2004 table 6-15 */ +#define MP4_CHROMA_FORMAT_420 1 + +/* ISO/IEC 14496-2:2004 table 6-16 */ +enum +{ + MP4_SHAPE_TYPE_RECTANGULAR = 0, + MP4_SHAPE_TYPE_BINARY = 1, + MP4_SHAPE_TYPE_BINARYONLY = 2, + MP4_SHAPE_TYPE_GRAYSCALE = 3 +}; + +/* ISO/IEC 14496-2:2004 table 6-19 */ +#define MP4_SPRITE_STATIC 1 +#define MP4_SPRITE_GMC 2 + +/* ISO/IEC 14496-2:2004 table 6-24 */ +enum +{ + MP4_VOP_TYPE_I = 0, + MP4_VOP_TYPE_P = 1, + MP4_VOP_TYPE_B = 2, + MP4_VOP_TYPE_S = 3, +}; + +/* ISO/IEC 14496-2:2004 table 6-26 */ +enum +{ + MP4_SPRITE_TRANSMIT_MODE_STOP = 0, + MP4_SPRITE_TRANSMIT_MODE_PIECE = 1, + MP4_SPRITE_TRANSMIT_MODE_UPDATE = 2, + MP4_SPRITE_TRANSMIT_MODE_PAUSE = 3 +}; + +/* ISO/IEC 14496-2:2004 table 7-3 */ +enum +{ + MP4_BAB_TYPE_MVDSZ_NOUPDATE = 0, + MP4_BAB_TYPE_MVDSNZ_NOUPDATE = 1, + MP4_BAB_TYPE_TRANSPARENT = 2, + MP4_BAB_TYPE_OPAQUE = 3, + MP4_BAB_TYPE_INTRACAE = 4, + MP4_BAB_TYPE_MVDSZ_INTERCAE = 5, + MP4_BAB_TYPE_MVDSNZ_INTERCAE = 6 +}; + +#define MP4_DC_MARKER 0x6B001 // 110 1011 0000 0000 0001 +#define MP4_MV_MARKER 0x1F001 // 1 1111 0000 0000 0001 + + +/* ISO/IEC 14496-2:2004 table G.1 */ +enum +{ + MP4_SIMPLE_PROFILE_LEVEL_1 = 0x01, + MP4_SIMPLE_PROFILE_LEVEL_2 = 0x02, + MP4_SIMPLE_PROFILE_LEVEL_3 = 0x03, + MP4_SIMPLE_PROFILE_LEVEL_4a = 0x04, + MP4_SIMPLE_PROFILE_LEVEL_5 = 0x05, + MP4_SIMPLE_PROFILE_LEVEL_6 = 0x06, + MP4_SIMPLE_PROFILE_LEVEL_0 = 0x08, + MP4_CORE_PROFILE_LEVEL_1 = 0x21, + MP4_CORE_PROFILE_LEVEL_2 = 0x22, + MP4_MAIN_PROFILE_LEVEL_2 = 0x32, + MP4_MAIN_PROFILE_LEVEL_3 = 0x33, + MP4_MAIN_PROFILE_LEVEL_4 = 0x34, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_1 = 0x91, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_2 = 0x92, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_3 = 0x93, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_4 = 0x94, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_1 = 0xB1, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_2 = 0xB2, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_3 = 0xB3, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_4 = 0xB4, + MP4_ADVANCED_CORE_PROFILE_LEVEL_1 = 0xC1, + MP4_ADVANCED_CORE_PROFILE_LEVEL_2 = 0xC2, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0 = 0xF0, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1 = 0xF1, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2 = 0xF2, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3 = 0xF3, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4 = 0xF4, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5 = 0xF5, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B = 0xF7 +}; + +/* Group Of Video Object Plane Info */ +typedef struct +{ + uint8_t closed_gov; + uint8_t broken_link; + uint8_t time_code_hours; + uint8_t time_code_minutes; + uint8_t time_code_seconds; + uint8_t dummy1; + uint16_t dummy2; + uint32_t time_base; +} mp4_GroupOfVideoObjectPlane_t; + + +/* Video Object Plane Info */ +typedef struct +{ + uint8_t vop_coding_type; + uint32_t modulo_time_base; + uint16_t vop_time_increment; + uint8_t vop_coded; + + uint16_t vop_id; + uint16_t vop_id_for_prediction; + uint8_t is_vop_id_for_prediction_indication; + uint8_t vop_rounding_type; + uint8_t vop_reduced_resolution; + uint8_t align_dummy; + + uint16_t vop_width; + uint16_t vop_height; + uint16_t vop_horizontal_mc_spatial_ref; + uint16_t vop_vertical_mc_spatial_ref; + + uint8_t background_composition; + uint8_t change_conv_ratio_disable; + uint8_t is_vop_constant_alpha; + uint8_t vop_constant_alpha_value; + uint8_t intra_dc_vlc_thr; + uint8_t top_field_first; + uint8_t alternate_vertical_scan_flag; + uint8_t sprite_transmit_mode; + + int32_t brightness_change_factor; + uint16_t vop_quant; + uint8_t vop_fcode_forward; + uint8_t vop_fcode_backward; + + uint16_t warping_mv_code_du[4]; + uint16_t warping_mv_code_dv[4]; + +} mp4_VideoObjectPlane_t; + +/* VOLControlParameters Info */ +typedef struct +{ + uint8_t chroma_format; + uint8_t low_delay; + uint8_t vbv_parameters; + uint8_t align_dummy1; + uint32_t bit_rate; + uint32_t vbv_buffer_size; + uint32_t vbv_occupancy; +} mp4_VOLControlParameters_t; + +/* Video Object Plane with short header Info */ +typedef struct _mp4_VideoObjectPlaneH263 +{ + uint8_t temporal_reference; + uint8_t split_screen_indicator; + uint8_t document_camera_indicator; + uint8_t full_picture_freeze_release; + uint8_t source_format; + uint8_t picture_coding_type; + uint8_t vop_quant; + uint16_t num_gobs_in_vop; + uint16_t num_macroblocks_in_gob; + uint8_t num_rows_in_gob; + +#if 0 + uint8_t gob_number; + int gob_header_empty; + int gob_frame_id; + int quant_scale; +#endif +} mp4_VideoObjectPlaneH263; + +typedef struct +{ + uint16_t sprite_width; + uint16_t sprite_height; + uint16_t sprite_left_coordinate; + uint16_t sprite_top_coordinate; + uint16_t no_of_sprite_warping_points; + uint16_t sprite_warping_accuracy; + uint16_t sprite_brightness_change; + uint16_t low_latency_sprite_enable; +}mp4_VOLSpriteInfo_t; + +typedef struct +{ + uint8_t load_intra_quant_mat; + uint8_t load_nonintra_quant_mat; + uint16_t align_dummy1; + uint8_t intra_quant_mat[64]; + uint8_t nonintra_quant_mat[64]; +}mp4_VOLQuant_mat_t; + +/* Video Object Layer Info */ +typedef struct +{ + uint8_t video_object_layer_id; /* Last 4 bits of start code. */ + uint8_t short_video_header; + uint8_t random_accessible_vol; + uint8_t video_object_type_indication; + + uint8_t is_object_layer_identifier; + uint8_t video_object_layer_verid; + uint8_t video_object_layer_priority; + uint8_t aspect_ratio_info; + + uint8_t aspect_ratio_info_par_width; + uint8_t aspect_ratio_info_par_height; + uint8_t align_dummy1; + uint8_t is_vol_control_parameters; + + mp4_VOLControlParameters_t VOLControlParameters; + + uint8_t video_object_layer_shape; + uint16_t vop_time_increment_resolution; + uint8_t vop_time_increment_resolution_bits; + + uint8_t fixed_vop_rate; + uint16_t fixed_vop_time_increment; + uint16_t video_object_layer_width; + uint16_t video_object_layer_height; + uint8_t interlaced; + + uint8_t obmc_disable; + uint8_t sprite_enable; + mp4_VOLSpriteInfo_t sprite_info; + uint8_t not_8_bit; + uint8_t quant_precision; + + uint8_t bits_per_pixel; + uint8_t quant_type; + mp4_VOLQuant_mat_t quant_mat_info; + uint8_t quarter_sample; + uint8_t complexity_estimation_disable; + + uint8_t resync_marker_disable; + uint8_t data_partitioned; + uint8_t reversible_vlc; + uint8_t newpred_enable; + + uint8_t reduced_resolution_vop_enable; // verid != 1 + uint8_t scalability; + uint8_t low_latency_sprite_enable; + + mp4_GroupOfVideoObjectPlane_t GroupOfVideoObjectPlane; + mp4_VideoObjectPlane_t VideoObjectPlane; + mp4_VideoObjectPlaneH263 VideoObjectPlaneH263; + + // for interlaced B-VOP direct mode + uint32_t Tframe; + // for B-VOP direct mode + uint32_t TRB, TRD; + // time increment of past and future VOP for B-VOP + uint32_t pastFrameTime, futureFrameTime; + // VOP global time + uint32_t vop_sync_time, vop_sync_time_b; + +} mp4_VideoObjectLayer_t; + +/* video_signal_type Info */ +typedef struct +{ + uint8_t is_video_signal_type; + uint8_t video_format; + uint8_t video_range; + uint8_t is_colour_description; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; +} mp4_VideoSignalType_t; + +typedef struct _mp4_Frame { + long long int time; +} mp4_Frame; + +/* Visual Object Info */ +typedef struct +{ + uint8_t is_visual_object_identifier; + uint8_t visual_object_verid; + uint8_t visual_object_priority; + uint8_t visual_object_type; + mp4_VideoSignalType_t VideoSignalType; + mp4_VideoObjectLayer_t VideoObject; + + mp4_Frame currentFrame; // current + mp4_Frame pastFrame; // reference in past + mp4_Frame futureFrame; // reference in future +} mp4_VisualObject_t; + +/* Full Info */ +typedef struct +{ + mp4_VisualObject_t VisualObject; + uint8_t profile_and_level_indication; +} mp4_Info_t; + +enum +{ + MP4_SC_SEEN_INVALID = 0x0, + MP4_SC_SEEN_VOL = 0x1, + MP4_SC_SEEN_VOP = 0x2, + MP4_SC_SEEN_SVH = 0x4, +}; + +enum +{ + MP4_BS_ERROR_NONE = (0 << 0), + MP4_BS_ERROR_HDR_PARSE = (1 << 0), + MP4_BS_ERROR_HDR_NONDEC = (1 << 1), + MP4_BS_ERROR_HDR_UNSUP = (1 << 2), + MP4_BS_ERROR_FRM_PARSE = (1 << 3), + MP4_BS_ERROR_FRM_NONDEC = (1 << 4), + MP4_BS_ERROR_FRM_UNSUP = (1 << 5), +}; + +#define MP4_HDR_ERROR_MASK (MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC | MP4_BS_ERROR_HDR_UNSUP) + +typedef enum +{ + VIDDEC_MP4_INDX_0 = 0, + VIDDEC_MP4_INDX_1 = 1, + VIDDEC_MP4_INDX_2 = 2, + VIDDEC_MP4_INDX_MAX = 3, +} viddec_fw_mp4_ref_index_t; + +typedef struct +{ + uint8_t is_field; +} viddec_mp4_ref_info_t; + +typedef struct +{ + // The relevant bitstream data for current stream + mp4_Info_t info; + + // The previous start code (without the prefix) + uint32_t prev_sc; + + // The current start code (without the prefix) + // TODO: Revisit for SVH + uint32_t current_sc; + + // Indicates if we look for both short and long video header or just the long video header + // If false, sc detection looks for both short and long video headers. + // If true, long video header has been seen and sc detection does not look for short video header any more. + uint8_t ignore_scs; + + // Indicates if the current start code prefix is long (if true). + uint8_t cur_sc_prefix; + + // Indicates if the next start code prefix is long (if true). + uint8_t next_sc_prefix; + + // Indicates start of a frame + uint8_t is_frame_start; + + // Indicates which start codes were seen for this workload + uint8_t sc_seen; + + // Indicates bitstream errors if any + uint16_t bitstream_error; + + // Reference frame information + viddec_mp4_ref_info_t ref_frame[VIDDEC_MP4_INDX_MAX]; + +}viddec_mp4_parser_t; + +#define BREAK_GETBITS_FAIL(x, ret) { \ + if(x == -1){ \ + FWTRACE; \ + ret = MP4_STATUS_PARSE_ERROR; \ + break;} \ + } + +#define BREAK_GETBITS_REQD_MISSING(x, ret) { \ + if(x == -1){ \ + FWTRACE; \ + ret = MP4_STATUS_REQD_DATA_ERROR; \ + break;} \ + } + +extern void *memset(void *s, int32_t c, uint32_t n); + +uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt); + +void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status); + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c new file mode 100644 index 0000000..a3d894d --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -0,0 +1,134 @@ +#include "viddec_mp4_shortheader.h" + +typedef struct +{ + uint16_t vop_width; + uint16_t vop_height; + uint16_t num_macroblocks_in_gob; + uint16_t num_gobs_in_vop; + uint8_t num_rows_in_gob; +} svh_src_fmt_params_t; + +const svh_src_fmt_params_t svh_src_fmt_defaults[5] = +{ + {128, 96, 8, 6, 1}, + {176, 144, 11, 9, 1}, + {352, 288, 22, 18, 1}, + {704, 576, 88, 18, 2}, + {1408, 1152, 352, 18, 4}, +}; + +mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Status_t ret = MP4_STATUS_OK; + unsigned int data; + mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); + int32_t getbits = 0; + + do + { + getbits = viddec_pm_get_bits(parent, &data, 27); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + + data = data >> 1; // zero_bit + svh->vop_quant = (data & 0x1F); + data = data >> 9; // vop_quant + four_reserved_zero_bits + svh->picture_coding_type = (data & 0x1); + data = data >> 1; // vop_quant + four_reserved_zero_bits + svh->source_format = (data & 0x7); + data = data >> 8; // source_format + full_picture_freeze_release + document_camera_indicator + split_screen_indicator + zero_bit + marker_bit + svh->temporal_reference = data; + + if (svh->source_format == 0 || svh->source_format > 5) + { + DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + for (;;) + { + getbits = viddec_pm_get_bits(parent, &data, 1); // pei + BREAK_GETBITS_FAIL(getbits, ret); + if (!data) + break; + getbits = viddec_pm_get_bits(parent, &data, 8); // psupp + BREAK_GETBITS_FAIL(getbits, ret); + } + + // Anything after this needs to be fed to the decoder as PIXEL_ES + } while(0); + + return ret; +} + +mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Status_t ret=MP4_STATUS_OK; + mp4_Info_t *pInfo = &(parser->info); + mp4_VideoSignalType_t *vst = &(pInfo->VisualObject.VideoSignalType); + mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + mp4_VideoObjectPlaneH263 *svh = &(pInfo->VisualObject.VideoObject.VideoObjectPlaneH263); + uint8_t index = 0; + + ret = mp4_Parse_VideoObjectPlane_svh(parent, parser); + if(ret == MP4_STATUS_OK) + { + // Populate defaults for the svh + vol->short_video_header = 1; + vol->video_object_layer_shape = MP4_SHAPE_TYPE_RECTANGULAR; + vol->obmc_disable = 1; + vol->quant_type = 0; + vol->resync_marker_disable = 1; + vol->data_partitioned = 0; + vol->reversible_vlc = 0; + vol->interlaced = 0; + vol->complexity_estimation_disable = 1; + vol->scalability = 0; + vol->not_8_bit = 0; + vol->bits_per_pixel = 8; + vol->quant_precision = 5; + vol->vop_time_increment_resolution = 30000; + vol->fixed_vop_time_increment = 1001; + vol->aspect_ratio_info = MP4_ASPECT_RATIO_12_11; + + vop->vop_rounding_type = 0; + vop->vop_fcode_forward = 1; + vop->vop_coded = 1; + vop->vop_coding_type = svh->picture_coding_type ? MP4_VOP_TYPE_P: MP4_VOP_TYPE_I; + vop->vop_quant = svh->vop_quant; + + vst->colour_primaries = 1; + vst->transfer_characteristics = 1; + vst->matrix_coefficients = 6; + + index = svh->source_format - 1; + vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width; + vol->video_object_layer_height = svh_src_fmt_defaults[index].vop_height; + svh->num_macroblocks_in_gob = svh_src_fmt_defaults[index].num_macroblocks_in_gob; + svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop; + svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob; + } + + mp4_set_hdr_bitstream_error(parser, false, ret); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT; + + wi.mp4_vpsh.info = 0; + wi.mp4_vpsh.pad1 = 0; + wi.mp4_vpsh.pad2 = 0; + + viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format); + + ret = viddec_pm_append_workitem(parent, &wi); + if(ret == 1) + ret = MP4_STATUS_OK; + } + + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h new file mode 100644 index 0000000..e2ecaaa --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h @@ -0,0 +1,11 @@ +#ifndef VIDDEC_MP4_SHORTHEADER_H +#define VIDDEC_MP4_SHORTHEADER_H +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *cxt); + +mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *cxt); + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c new file mode 100644 index 0000000..6df06b6 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -0,0 +1,596 @@ +#include "viddec_mp4_videoobjectlayer.h" + +const unsigned char mp4_DefaultIntraQuantMatrix[64] = { + 8, 17, 18, 19, 21, 23, 25, 27, + 17, 18, 19, 21, 23, 25, 27, 28, + 20, 21, 22, 23, 24, 26, 28, 30, + 21, 22, 23, 24, 26, 28, 30, 32, + 22, 23, 24, 26, 28, 30, 32, 35, + 23, 24, 26, 28, 30, 32, 35, 38, + 25, 26, 28, 30, 32, 35, 38, 41, + 27, 28, 30, 32, 35, 38, 41, 45 +}; +const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = { + 16, 17, 18, 19, 20, 21, 22, 23, + 17, 18, 19, 20, 21, 22, 23, 24, + 18, 19, 20, 21, 22, 23, 24, 25, + 19, 20, 21, 22, 23, 24, 26, 27, + 20, 21, 22, 23, 25, 26, 27, 28, + 21, 22, 23, 24, 26, 27, 28, 30, + 22, 23, 24, 26, 27, 28, 30, 31, + 23, 24, 25, 27, 28, 30, 31, 33 +}; +const unsigned char mp4_ClassicalZigzag[64] = { + 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 +}; + +static inline int mp4_GetMacroBlockNumberSize(int nmb) +{ + int nb = 0; + nmb --; + do { + nmb >>= 1; + nb ++; + } while (nmb); + return nb; +} + +static inline void mp4_copy_default_table(const uint8_t *src, uint8_t *dst, uint32_t len) +{ + uint32_t i; + for(i=0; i< len; i++) + dst[i] = src[i]; +} + + +static inline mp4_Status_t mp4_Parse_QuantMatrix(void *parent, uint8_t *pQM) +{ + uint32_t i,code=0; + uint8_t last=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_OK; + + for (i = 0; i < 64; i ++) + { + getbits = viddec_pm_get_bits(parent, &code, 8); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if (code == 0) break; + pQM[mp4_ClassicalZigzag[i]] = (uint8_t)(code & 0xFF); + } + last = pQM[mp4_ClassicalZigzag[i-1]]; + for (; i < 64; i ++) + { + pQM[mp4_ClassicalZigzag[i]] = last; + } + return ret;; +} + +static inline uint8_t mp4_pvt_valid_object_type_indication(uint8_t val) +{ + return ((1 <= val) || (val <= 18)); +} + +static inline uint8_t mp4_pvt_valid_object_layer_verid(uint8_t val) +{ + uint8_t ret=false; + switch(val) + { + case 1: + case 2: + case 4: + case 5: + { + ret = true; + break; + } + default: + { + break; + } + } + return ret; +} + +static mp4_Status_t +mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_VOLControlParameters_t *cxt = &(parser->info.VisualObject.VideoObject.VOLControlParameters); + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + uint32_t code=0; + + do + { + getbits = viddec_pm_get_bits(parent, &(code), 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + cxt->chroma_format = (code >> 2) & 0x3; + cxt->low_delay = ((code & 0x2) > 0); + cxt->vbv_parameters = code & 0x1; + + if (cxt->chroma_format != MP4_CHROMA_FORMAT_420) + { + DEB("Warning: mp4_Parse_VideoObject:vol_control_parameters.chroma_format != 4:2:0\n"); + cxt->chroma_format= MP4_CHROMA_FORMAT_420; + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + ret = MP4_STATUS_NOTSUPPORT; + } + + if(cxt->vbv_parameters) + {/* TODO: Check for validity of marker bits */ + getbits = viddec_pm_get_bits(parent, &(code), 32); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* 32 bits= firsthalf(15) + M + LatterHalf(15) + M */ + cxt->bit_rate = (code & 0xFFFE) >> 1; // Get rid of 1 marker bit + cxt->bit_rate |= ((code & 0xFFFE0000) >> 2); // Get rid of 2 marker bits + + if(cxt->bit_rate == 0) + { + DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + ret = MP4_STATUS_NOTSUPPORT; + // Do we need to really break here? Why not just set an error and proceed + //break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 19); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* 19 bits= firsthalf(15) + M + LatterHalf(3)*/ + cxt->vbv_buffer_size = code & 0x7; + cxt->vbv_buffer_size |= ( (code >> 4) & 0x7FFF); + if(cxt->vbv_buffer_size == 0) + { + DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + ret = MP4_STATUS_NOTSUPPORT; + // Do we need to really break here? Why not just set an error and proceed + //break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 28); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* 28 bits= firsthalf(11) + M + LatterHalf(15) + M */ + code = code >>1; + cxt->vbv_occupancy = code & 0x7FFF; + code = code >>16; + cxt->vbv_occupancy |= (code & 0x07FF); + } + ret = MP4_STATUS_OK; + } while(0); + + return ret; +} + +static uint32_t mp4_pvt_count_number_of_bits(uint32_t val) +{ + uint32_t num_bits=0; + do{ + val >>= 1; + num_bits++; + }while(val); + return num_bits; +} + +static mp4_Status_t +mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_VideoObjectLayer_t *vidObjLay = (&parser->info.VisualObject.VideoObject); + mp4_VOLSpriteInfo_t *cxt = &(vidObjLay->sprite_info); + uint32_t sprite_enable = vidObjLay->sprite_enable; + uint32_t code; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + + do{ + if ((sprite_enable == MP4_SPRITE_STATIC) || + (sprite_enable == MP4_SPRITE_GMC)) + { + if (sprite_enable != MP4_SPRITE_GMC) + { + /* This is not a supported type by HW */ + DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + cxt->sprite_brightness_change = code & 0x1; + cxt->sprite_warping_accuracy = (code >> 1) & 0x3; + cxt->no_of_sprite_warping_points = code >> 3; + if(cxt->no_of_sprite_warping_points > 1) + { + DEB("Error: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", + cxt->no_of_sprite_warping_points); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + if((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change)) + { + DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + if (vidObjLay->sprite_enable != MP4_SPRITE_GMC) + { + DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + } + ret = MP4_STATUS_OK; + }while(0); + + return ret; +} + +static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t *vidObjLay) +{ + uint32_t code; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + mp4_VOLQuant_mat_t *quant = &(vidObjLay->quant_mat_info); + + do{ + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + quant->load_intra_quant_mat = code; + if (quant->load_intra_quant_mat) + { + mp4_Parse_QuantMatrix(parent, &(quant->intra_quant_mat[0])); + } + else + { + mp4_copy_default_table((const uint8_t *)&mp4_DefaultIntraQuantMatrix[0], (uint8_t *)&(quant->intra_quant_mat[0]), 64); + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + quant->load_nonintra_quant_mat = code; + if (quant->load_nonintra_quant_mat) + { + mp4_Parse_QuantMatrix(parent, &(quant->nonintra_quant_mat[0])); + } + else + { + mp4_copy_default_table((const uint8_t *)&mp4_DefaultNonIntraQuantMatrix[0], (uint8_t *)&(quant->nonintra_quant_mat[0]), 64); + } + ret = MP4_STATUS_OK; + }while(0); + return ret; +} + +static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t code; + mp4_Info_t *pInfo = &(parser->info); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + + do{ + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) + { + /* TODO: check for validity of marker bits */ + getbits = viddec_pm_get_bits(parent, &(code), 29); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_layer_height = (code >> 1) & 0x1FFF; + vidObjLay->video_object_layer_width = (code >> 15) & 0x1FFF; + } + + getbits = viddec_pm_get_bits(parent, &(code), 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->interlaced = ((code & 0x2) > 0); + vidObjLay->obmc_disable = ((code & 0x1) > 0); + + { + uint32_t num_bits=1; + if(vidObjLay->video_object_layer_verid != 1) num_bits=2; + getbits = viddec_pm_get_bits(parent, &(code), num_bits); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->sprite_enable = code; + } + + ret = mp4_Parse_VOL_sprite(parent, parser); + if(ret != MP4_STATUS_OK) + { + break; + } + + if ((vidObjLay->video_object_layer_verid != 1) && + (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)) + { + /* not supported shape*/ + DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->not_8_bit = (code > 0 ); + if(vidObjLay->not_8_bit) + { + /* 8 bit is only supported mode*/ + DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + else + {/* We use default values since only 8 bit mode is supported */ + vidObjLay->quant_precision = 5; + vidObjLay->bits_per_pixel = 8; + } + + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE) + { + /* Should not get here as shape is checked earlier */ + DEB("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->quant_type = code; + if (vidObjLay->quant_type) + { + ret = mp4_Parse_VOL_quant_mat(parent, vidObjLay); + if(ret != MP4_STATUS_OK) + { + break; + } + } + + if (vidObjLay->video_object_layer_verid != 1) + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->quarter_sample = code; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->complexity_estimation_disable = code; + if(!vidObjLay->complexity_estimation_disable) + {/* complexity estimation not supported */ + DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->resync_marker_disable = ((code & 0x2) > 0); + vidObjLay->data_partitioned = code & 0x1; + if(vidObjLay->data_partitioned) + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->reversible_vlc = code; + } + + if (vidObjLay->video_object_layer_verid != 1) + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->newpred_enable = code; + if(vidObjLay->newpred_enable) + { + DEB("Error: NEWPRED mode is not supported\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->reduced_resolution_vop_enable = code; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->scalability = code; + if(vidObjLay->scalability) + { + DEB("Error: VOL scalability is not supported\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + // No need to parse further - none of the fields are interesting to parser/decoder/user + ret = MP4_STATUS_OK; + }while(0); + return ret; +} + +mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t code; + mp4_Info_t *pInfo = &(parser->info); + mp4_VisualObject_t *visObj = &(pInfo->VisualObject); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + +//DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); + do{ + vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; + + vidObjLay->short_video_header = 0; + vidObjLay->video_object_layer_id = (parser->current_sc & 0xF); + + getbits = viddec_pm_get_bits(parent, &code, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_type_indication = code & 0xFF; + vidObjLay->random_accessible_vol = ((code & 0x100) > 0); + + if(!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication)) + { /* Streams with "unknown" type mismatch with ref */ + DEB("Warning: video_object_type_indication = %d, forcing to 1\n", + vidObjLay->video_object_type_indication); + vidObjLay->video_object_type_indication = 1; + } + + if(vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE) + {/* This is not a supported type by HW */ + DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n", + vidObjLay->video_object_type_indication); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + else + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->is_object_layer_identifier = code; + vidObjLay->video_object_layer_verid = + (mp4_pvt_valid_object_layer_verid(visObj->visual_object_verid)) ? visObj->visual_object_verid : 1; + + if (vidObjLay->is_object_layer_identifier) + { + getbits = viddec_pm_get_bits(parent, &(code), 7); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_layer_priority = code & 0x7; + vidObjLay->video_object_layer_verid = (code >> 3) & 0xF; + if(!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid)) + { + DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n", + vidObjLay->video_object_layer_verid); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + /* Video object layer ID supercedes visual object ID */ + visObj->visual_object_verid = vidObjLay->video_object_layer_verid; + } + + getbits = viddec_pm_get_bits(parent, &(code), 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->aspect_ratio_info = code & 0xF; + if(vidObjLay->aspect_ratio_info == MP4_ASPECT_RATIO_EXTPAR) + { + getbits = viddec_pm_get_bits(parent, &(code), 16); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->aspect_ratio_info_par_width = (code >> 8) & 0xFF; + vidObjLay->aspect_ratio_info_par_height = code & 0xFF; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->is_vol_control_parameters = code; + if(vidObjLay->is_vol_control_parameters) + { + ret = mp4_pvt_VOL_volcontrolparameters(parent, parser); + if(ret != MP4_STATUS_OK) + { + break; + } + } + + getbits = viddec_pm_get_bits(parent, &(code), 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_layer_shape = code; + /* If shape is not rectangluar exit early without parsing */ + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + { + DEB("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n", + MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + if ((vidObjLay->video_object_layer_verid != 1) && + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)) + {/* Grayscale not supported */ + DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 19); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* TODO: check validity of marker */ + vidObjLay->vop_time_increment_resolution = (code >> 2) & 0xFFFF; + vidObjLay->fixed_vop_rate = code & 0x1; + + if(vidObjLay->vop_time_increment_resolution == 0) + { + DEB("Error: 0 value for vop_time_increment_resolution\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + /* calculate number bits in vop_time_increment_resolution */ + vidObjLay->vop_time_increment_resolution_bits = (uint8_t)mp4_pvt_count_number_of_bits( + (uint32_t)(vidObjLay->vop_time_increment_resolution -1)); + + if(vidObjLay->fixed_vop_rate) + { + getbits = viddec_pm_get_bits(parent, &(code), vidObjLay->vop_time_increment_resolution_bits); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->fixed_vop_time_increment = code; + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + ret = mp4_Parse_VOL_notbinaryonly(parent, parser); + if(ret != MP4_STATUS_OK) + { + break; + } + } + else + { + DEB("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n"); + ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + break; + } + } + + vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; + ret = MP4_STATUS_OK; + } while(0); + + mp4_set_hdr_bitstream_error(parser, true, ret); + if(ret != MP4_STATUS_OK) + parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; +//DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + viddec_workload_t *wl = viddec_pm_get_header(parent); + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ; + + wi.mp4_vol.vol_aspect_ratio = 0; + wi.mp4_vol.vol_bit_rate = 0; + wi.mp4_vol.vol_frame_rate = 0; + + viddec_fw_mp4_vol_set_aspect_ratio_info(&wi.mp4_vol, vidObjLay->aspect_ratio_info); + viddec_fw_mp4_vol_set_par_width(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_width); + viddec_fw_mp4_vol_set_par_height(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_height); + viddec_fw_mp4_vol_set_control_param(&wi.mp4_vol, vidObjLay->is_vol_control_parameters); + viddec_fw_mp4_vol_set_chroma_format(&wi.mp4_vol, vidObjLay->VOLControlParameters.chroma_format); + viddec_fw_mp4_vol_set_interlaced(&wi.mp4_vol, vidObjLay->interlaced); + viddec_fw_mp4_vol_set_fixed_vop_rate(&wi.mp4_vol, vidObjLay->fixed_vop_rate); + + viddec_fw_mp4_vol_set_vbv_param(&wi.mp4_vol, vidObjLay->VOLControlParameters.vbv_parameters); + viddec_fw_mp4_vol_set_bit_rate(&wi.mp4_vol, vidObjLay->VOLControlParameters.bit_rate); + + viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment); + viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution); + + ret = viddec_pm_append_workitem(parent, &wi); + if(ret == 1) + ret = MP4_STATUS_OK; + + memset(&(wl->attrs), 0, sizeof(viddec_frame_attributes_t)); + + wl->attrs.cont_size.width = vidObjLay->video_object_layer_width; + wl->attrs.cont_size.height = vidObjLay->video_object_layer_height; + } + + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h new file mode 100644 index 0000000..4540b6b --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h @@ -0,0 +1,17 @@ +#ifndef VIDDEC_MP4_VIDEOOBJECTLAYER_H +#define VIDDEC_MP4_VIDEOOBJECTLAYER_H +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +void mp4_ResetVOL(mp4_Info_t *pInfo); + +mp4_Status_t mp4_InitVOL(mp4_Info_t *pInfo); + +mp4_Status_t mp4_FreeVOL(mp4_Info_t *pInfo); + +mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *cxt); + + + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c new file mode 100644 index 0000000..9840af4 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c @@ -0,0 +1,422 @@ +#include "viddec_mp4_videoobjectplane.h" + +mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Info_t* pInfo = &(parser->info); + uint32_t code; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_REQD_DATA_ERROR; + mp4_GroupOfVideoObjectPlane_t *data; + uint32_t time_code = 0; + + data = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane); + + do + { + getbits = viddec_pm_get_bits(parent, &code, 20); + BREAK_GETBITS_FAIL(getbits, ret); + ret = MP4_STATUS_OK; + + data->broken_link = ((code & 0x1) > 0); + data->closed_gov = ((code & 0x2) > 0); + time_code = code = code >> 2; + data->time_code_seconds = code & 0x3F; + code = code >> 6; + if((code & 1) == 0) + {/* SGA:Should we ignore marker bit? */ + DEB("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n"); + } + code = code >>1; + data->time_code_minutes = code & 0x3F; + code = code >> 6; + data->time_code_hours = code & 0x1F; + + // This is the timebase in full second units + data->time_base = data->time_code_seconds + (60*data->time_code_minutes) + (3600*data->time_code_hours); + // Need to convert this into no. of ticks + data->time_base *= pInfo->VisualObject.VideoObject.vop_time_increment_resolution; + + } while(0); + + mp4_set_hdr_bitstream_error(parser, true, ret); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ; + + wi.mp4_gvop.gvop_info = 0; + wi.mp4_gvop.pad1 = 0; + wi.mp4_gvop.pad2 = 0; + + viddec_fw_mp4_gvop_set_broken_link(&wi.mp4_gvop, data->broken_link); + viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov); + viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code); + + ret = viddec_pm_append_workitem(parent, &wi); + if(ret == 1) + ret = MP4_STATUS_OK; + } + + return ret; +} + +static inline mp4_Status_t mp4_brightness_change(void *parent, int32_t *b_change) +{ + uint32_t code; + int32_t getbits=0; + + *b_change = 0; + getbits = viddec_pm_peek_bits(parent, &code, 4); + if (code == 15) + { + getbits = viddec_pm_skip_bits(parent, 4); + getbits = viddec_pm_get_bits(parent, &code, 10); + *b_change = 625 + code; + } + else if (code == 14) + { + getbits = viddec_pm_skip_bits(parent, 4); + getbits = viddec_pm_get_bits(parent, &code, 9); + *b_change = 113 + code; + } + else if (code >= 12) + { + getbits = viddec_pm_skip_bits(parent, 3); + getbits = viddec_pm_get_bits(parent, &code, 7); + *b_change = (code < 64) ? ((int32_t)code - 112) : ((int32_t)code - 15); + } + else if (code >= 8) + { + getbits = viddec_pm_skip_bits(parent, 2); + getbits = viddec_pm_get_bits(parent, &code, 6); + *b_change = (code < 32) ? ((int32_t)code - 48) : ((int32_t)code - 15); + } + else + { + getbits = viddec_pm_skip_bits(parent, 1); + getbits = viddec_pm_get_bits(parent, &code, 5); + *b_change = (code < 16) ? ((int32_t)code - 16) : ((int32_t)code - 15); + } + + return ( (getbits == -1) ? MP4_STATUS_PARSE_ERROR: MP4_STATUS_OK); +} +static inline int32_t mp4_Sprite_dmv_length(void * parent, int32_t *dmv_length) +{ + uint32_t code, skip; + int32_t getbits=0; + mp4_Status_t ret= MP4_STATUS_PARSE_ERROR; + *dmv_length=0; + skip=3; + do{ + getbits = viddec_pm_peek_bits(parent, &code, skip); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + + if(code == 7) + { + viddec_pm_skip_bits(parent, skip); + getbits = viddec_pm_peek_bits(parent, &code, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + + skip=1; + while((code & 256) != 0) + {/* count number of 1 bits */ + code <<=1; + skip++; + } + *dmv_length = 5 + skip; + } + else + { + skip=(code <= 1) ? 2 : 3; + *dmv_length = code - 1; + } + viddec_pm_skip_bits(parent, skip); + ret= MP4_STATUS_OK; + + }while(0); + return ret; +} + +static inline mp4_Status_t +mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_VideoObjectPlane_t *vidObjPlane) +{ + uint32_t code, i; + int32_t dmv_length=0, dmv_code=0, getbits=0; + mp4_Status_t ret = MP4_STATUS_OK; + for(i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ ) + { + ret = mp4_Sprite_dmv_length(parent, &dmv_length); + if(ret != MP4_STATUS_OK) + { + break; + } + if(dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + dmv_code = (int32_t)code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if(code != 1) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + vidObjPlane->warping_mv_code_du[i] = dmv_code; + /* TODO: create another inline function to avoid code duplication */ + ret = mp4_Sprite_dmv_length(parent, &dmv_length); + if(ret != MP4_STATUS_OK) + { + break; + } + if(dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + dmv_code = (int32_t)code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if(code != 1) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + vidObjPlane->warping_mv_code_dv[i] = dmv_code; + + } + return ret; +} + +static inline mp4_Status_t mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(void *parent, uint32_t *base) +{ + mp4_Status_t ret= MP4_STATUS_OK; + int32_t getbits=0; + uint32_t code = 0; + + *base = 0; + do + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + *base += code; + }while(code != 0); + return ret; +} + +mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t code; + mp4_Info_t *pInfo = &(parser->info); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vidObjPlane = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + int32_t getbits=0; + mp4_Status_t ret= MP4_STATUS_PARSE_ERROR; + + do + { + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_coding_type = code & 0x3; + if( mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(parent, + &(vidObjPlane->modulo_time_base)) == MP4_STATUS_REQD_DATA_ERROR) + { + break; + } + + getbits = viddec_pm_get_bits(parent, &code, 1); + /* TODO: check for marker bit validity */ + { + uint32_t numbits=0; + numbits = vidObjLay->vop_time_increment_resolution_bits; + if(numbits == 0) numbits=1; /*TODO:check if its greater than 16 bits ?? */ + getbits = viddec_pm_get_bits(parent, &code, numbits); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_time_increment = code; + } + + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + + vidObjPlane->vop_coded = code & 0x1; + if(vidObjPlane->vop_coded == 0) + { + ret = MP4_STATUS_OK;/* Exit point 1 */ + break; + } + + if(vidObjLay->newpred_enable) + { + /* New pred mode not supported in HW */ + DEB("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) && + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P) || + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S) && + (vidObjLay->sprite_enable == MP4_SPRITE_GMC)))) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_rounding_type = code; + } + + if (vidObjLay->reduced_resolution_vop_enable && + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) || + (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P))) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_reduced_resolution = code; + if (vidObjPlane->vop_reduced_resolution) + { + DEB("Error: mp4_Parse_VideoObjectPlane: Reduced Resolution vidObjPlane is not supported\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + { + /* we support only rectangular shapes so the following logic is not required */ + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) && + (!vidObjLay->complexity_estimation_disable)) + { + /* Not required according to DE team */ + //read_vop_complexity_estimation_header(); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->intra_dc_vlc_thr = code; + if (vidObjLay->interlaced) + { + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->top_field_first = ((code & 0x2) > 0); + vidObjPlane->alternate_vertical_scan_flag = code & 0x1; + } + } + + if (((vidObjLay->sprite_enable == MP4_SPRITE_STATIC) || (vidObjLay->sprite_enable == MP4_SPRITE_GMC)) && + (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S)) + { + if (vidObjLay->sprite_info.no_of_sprite_warping_points > 0){ + if (mp4_Sprite_Trajectory(parent, vidObjLay, vidObjPlane) != MP4_STATUS_OK){ + break; + } + } + vidObjPlane->brightness_change_factor = 0; + if (vidObjLay->sprite_info.sprite_brightness_change) + { + int32_t change=0; + if(mp4_brightness_change(parent, &change) == MP4_STATUS_PARSE_ERROR) + { + break; + } + vidObjPlane->brightness_change_factor = change; + } + + if (vidObjLay->sprite_enable == MP4_SPRITE_STATIC) + { + /* SGA: IS decode sprite not required. Is static even supported */ + ret = MP4_STATUS_OK;/* Exit point 2 */ + break; + } + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + // Length of vop_quant is specified by quant_precision + getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_quant = code; + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + if (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I) + { + vidObjPlane->vop_fcode_forward = 0; + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_fcode_forward = code & 0x7; + if (vidObjPlane->vop_fcode_forward == 0) + { + DEB("Error: vop_fcode_forward == 0\n"); + break; + } + } + if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) + { + vidObjPlane->vop_fcode_backward = 0; + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_fcode_backward = code &0x7; + if (vidObjPlane->vop_fcode_backward == 0) + { + DEB("Error: vop_fcode_backward == 0\n"); + break; + } + } + if (!vidObjLay->scalability) + { + if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) && + (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + // The remaining data contains the macroblock information that is handled by the BSP + // The offsets to be sent to the BSP are obtained in the workload population + } + else + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + } + else + {/* Binary Not supported */ + ret = MP4_STATUS_NOTSUPPORT; + break; + } + /* Since we made it all the way here it a success condition */ + ret = MP4_STATUS_OK; /* Exit point 3 */ + }while(0); + + mp4_set_hdr_bitstream_error(parser, false, ret); + + return ret; +} // mp4_Parse_VideoObjectPlane diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h new file mode 100644 index 0000000..b54f642 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h @@ -0,0 +1,11 @@ +#ifndef VIDDEC_MP4_VIDEOOBJECTPLANE_H +#define VIDDEC_MP4_VIDEOOBJECTPLANE_H +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser); + +mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser); + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c new file mode 100644 index 0000000..36c0b29 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c @@ -0,0 +1,287 @@ +#include "viddec_mp4_visualobject.h" + +static inline uint8_t mp4_pvt_isValid_verID(uint8_t id) +{ + uint8_t ret=true; + switch(id) + { + case 1: + case 2: + case 4: + case 5: + { + break; + } + default: + { + ret = false; + break; + } + } + return ret; +} // mp4_pvt_isValid_verID + +static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalType_t *vidSignal) +{ + uint32_t data=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + + /* Set default values defined in spec first */ + vidSignal->video_format = 5; + vidSignal->video_range = 0; + vidSignal->colour_primaries = 1; + vidSignal->transfer_characteristics = 1; + vidSignal->matrix_coefficients = 1; + do + { + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidSignal->is_video_signal_type = (data > 0); + if(vidSignal->is_video_signal_type) + { + getbits = viddec_pm_get_bits(parent, &data, 5); + BREAK_GETBITS_FAIL(getbits, ret); + vidSignal->is_colour_description = data & 0x1; + vidSignal->video_range = ((data & 0x2) > 0); + data = data >> 2; + vidSignal->video_format = data & 0x7; + if(vidSignal->is_colour_description) + { + getbits = viddec_pm_get_bits(parent, &data, 24); + BREAK_GETBITS_FAIL(getbits, ret); + vidSignal->colour_primaries = (data >> 16) & 0xFF; + vidSignal->transfer_characteristics = (data >> 8) & 0xFF; + vidSignal->matrix_coefficients = data & 0xFF; + } + } + ret = MP4_STATUS_OK; + }while(0); + + return ret; +} // mp4_Parse_video_signal_type + +void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status) +{ + //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n", + // parser->bitstream_error, hdr_flag, parse_status); + + if(hdr_flag) + { + if(parse_status & MP4_STATUS_NOTSUPPORT) + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + if(parse_status & MP4_STATUS_PARSE_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_HDR_PARSE; + if(parse_status & MP4_STATUS_REQD_DATA_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; + parser->bitstream_error &= MP4_HDR_ERROR_MASK; + } + else + { + if(parse_status & MP4_STATUS_NOTSUPPORT) + parser->bitstream_error |= MP4_BS_ERROR_FRM_UNSUP; + if(parse_status & MP4_STATUS_PARSE_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_FRM_PARSE; + if(parse_status & MP4_STATUS_REQD_DATA_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_FRM_NONDEC; + } + + //DEB("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error); + + return; +} // mp4_set_hdr_bitstream_error + +mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t data=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + + getbits = viddec_pm_get_bits(parent, &data, 8); + if(getbits != -1) + { + parser->info.profile_and_level_indication = data & 0xFF; + // If present, check for validity + switch(parser->info.profile_and_level_indication) + { + case MP4_SIMPLE_PROFILE_LEVEL_0: + case MP4_SIMPLE_PROFILE_LEVEL_1: + case MP4_SIMPLE_PROFILE_LEVEL_2: + case MP4_SIMPLE_PROFILE_LEVEL_3: + case MP4_SIMPLE_PROFILE_LEVEL_4a: + case MP4_SIMPLE_PROFILE_LEVEL_5: + case MP4_SIMPLE_PROFILE_LEVEL_6: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B: + parser->bitstream_error = MP4_BS_ERROR_NONE; + ret = MP4_STATUS_OK; + break; + default: + parser->bitstream_error = MP4_BS_ERROR_HDR_UNSUP | MP4_BS_ERROR_HDR_NONDEC; + break; + } + } + else + { + parser->bitstream_error = MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC; + } + + return ret; +} // mp4_Parse_VisualSequence + +mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Info_t *pInfo = &(parser->info); + mp4_VisualObject_t *visObj = &(pInfo->VisualObject); + uint32_t data=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + + do + { + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_FAIL(getbits, ret); + visObj->is_visual_object_identifier = (data > 0); + + visObj->visual_object_verid = 1; /* Default value as per spec */ + if (visObj->is_visual_object_identifier) + { + viddec_pm_get_bits(parent, &data, 7); + visObj->visual_object_priority = data & 0x7; + data = data >> 3; + if(mp4_pvt_isValid_verID(data & 0xF)) + { + visObj->visual_object_verid = data & 0xF; + } + else + { + DEB("Warning: Unsupported visual_object_verid\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + // Continue parsing as it is not a required field for decoder + } + } + + getbits = viddec_pm_get_bits(parent, &data, 4); + BREAK_GETBITS_FAIL(getbits, ret); + visObj->visual_object_type = data; + if (visObj->visual_object_type != MP4_VISUAL_OBJECT_TYPE_VIDEO) + { + /* VIDEO is the only supported type */ + DEB("Error: Unsupported object: visual_object_type != video ID\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + break; + } + + /* Not required to check for visual_object_type as we already handle it above */ + ret = mp4_Parse_video_signal_type(parent, &(visObj->VideoSignalType)); + + // No need to check for user data or visual object layer because they have a different start code + // and will not be part of this header + + } while(0); + + mp4_set_hdr_bitstream_error(parser, true, ret); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + mp4_VideoSignalType_t *vst = &(visObj->VideoSignalType); + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ; + + wi.mp4_vs_vo.vs_item = 0; + wi.mp4_vs_vo.video_signal_type = 0; + wi.mp4_vs_vo.color_desc = 0; + + viddec_fw_mp4_vs_set_profile_and_level_indication(&wi.mp4_vs_vo, pInfo->profile_and_level_indication); + + viddec_fw_mp4_vo_set_video_signal_type(&wi.mp4_vs_vo, vst->is_video_signal_type); + if(vst->is_video_signal_type) + { + viddec_fw_mp4_vo_set_video_range(&wi.mp4_vs_vo, vst->video_range); + viddec_fw_mp4_vo_set_video_format(&wi.mp4_vs_vo, vst->video_format); + viddec_fw_mp4_vo_set_colour_description(&wi.mp4_vs_vo, vst->is_colour_description); + if(vst->is_colour_description) + { + viddec_fw_mp4_vo_set_transfer_char(&wi.mp4_vs_vo, vst->transfer_characteristics); + viddec_fw_mp4_vo_set_color_primaries(&wi.mp4_vs_vo, vst->colour_primaries); + } + } + + ret = viddec_pm_append_workitem(parent, &wi); + if(ret == 1) + ret = MP4_STATUS_OK; + } + + return ret; +} // mp4_Parse_VisualObject + +mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + uint32_t user_data; + viddec_workload_item_t wi; + + DEB("ParseUser-prev_sc: 0x%x\n", parser->prev_sc); + + /* find the scope based on start code sc */ + switch(parser->prev_sc) { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + case MP4_SC_VISUAL_OBJECT: + wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA; + break; + case MP4_SC_GROUP_OF_VOP: + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + case MP4_SC_VIDEO_OBJECT_LAYER_MIN: + wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA; + break; + default: + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen + break; + } + + /* Read 1 byte of user data and store it in workitem for the current stream level (VS/VO/VOL/GVOP). + Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, + append the workitem. This loop is repeated till all user data is extracted and appended. */ + wi.user_data.size = 0; + while(viddec_pm_get_bits(parent, &user_data, 8) != -1) + { + /* Store the valid byte in data payload */ + wi.user_data.data_payload[wi.user_data.size] = user_data; + wi.user_data.size++; + + /* When size exceeds payload size, append workitem and continue */ + if (wi.user_data.size >= 11) + { + viddec_pm_setup_userdata(&wi); + ret = viddec_pm_append_workitem(parent, &wi); + wi.user_data.size = 0; + } + } + /* If size is not 0, append remaining user data. */ + if (wi.user_data.size > 0) + { + int i; + for(i=wi.user_data.size;i<11;i++) + { + wi.user_data.data_payload[i] = 0; + } + viddec_pm_setup_userdata(&wi); + ret = viddec_pm_append_workitem(parent, &wi); + wi.user_data.size = 0; + } + + if(ret == 1) + ret = MP4_STATUS_OK; + + return ret; +} // mp4_Parse_UserData + diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h new file mode 100644 index 0000000..0aec9ad --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h @@ -0,0 +1,13 @@ +#ifndef VIDDEC_MP4_VISUALOBJECT_H +#define VIDDEC_MP4_VISUALOBJECT_H +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser); + +mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser); + +mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser); + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c new file mode 100644 index 0000000..6a34500 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c @@ -0,0 +1,143 @@ +#include "viddec_pm_parse.h" +#include "viddec_fw_debug.h" +#include "viddec_mp4_parse.h" + +/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success. + The conext is updated with current phase and sc_code position in the buffer. + + What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. + Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. + if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern + we are looking for. Its incremented to 4 once we see a byte after this pattern. + + For MP4 there are two startcode patterns LVH & SVH. LVH is same as other codecs (00 00 01), SVH + A.K.A H263 is (00 00 8X). So we have to look for both kind of start codes. The spec doesn't + explicitly say if both of them can exist in a stream? So current implemenation will assume + that only one of them is present in a given stream to simplify implementation. The reason it can + get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect + of SVH start code. +*/ + +uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) +{ + uint8_t *ptr; + uint32_t size; + uint32_t data_left=0, phase = 0, ret = 0; + viddec_sc_parse_cubby_cxt_t *cxt; + viddec_mp4_parser_t *p_info; + + cxt = ( viddec_sc_parse_cubby_cxt_t *)in; + viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; + size = 0; + data_left = cxt->size; + ptr = cxt->buf; + phase = cxt->phase; + cxt->sc_end_pos = -1; + p_info = (viddec_mp4_parser_t *)pcxt; + + /* parse until there is more data and start code not found */ + while((data_left > 0) &&(phase < 3)) + { + /* Check if we are byte aligned & phase=0, if thats the case we can check + work at a time instead of byte*/ + if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) + { + while(data_left > 3) + { + uint32_t data; + char mask1 = 0, mask2=0; + + data = *((uint32_t *)ptr); +#ifndef MFDBIGENDIAN + data = SWAP_WORD(data); +#endif + mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); + mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); + /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need + two consecutive zero bytes for a start code pattern */ + if(mask1 && mask2) + {/* Success so skip 4 bytes and start over */ + ptr+=4;size+=4;data_left-=4; + continue; + } + else + { + break; + } + } + } + + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected + two zero bytes in the word so we look one byte at a time*/ + if(data_left > 0) + { + if(*ptr == FIRST_STARTCODE_BYTE) + {/* Phase can be 3 only if third start code byte is found */ + phase++; + ptr++;size++;data_left--; + if(phase > 2) + { + phase = 2; + + if ( (((uint32_t)ptr) & 0x3) == 0 ) + { + while( data_left > 3 ) + { + if(*((uint32_t *)ptr) != 0) + { + break; + } + ptr+=4;size+=4;data_left-=4; + } + } + } + } + else + { + uint8_t normal_sc=0, short_sc=0; + if(phase == 2) + { + normal_sc = (*ptr == THIRD_STARTCODE_BYTE); + short_sc = (p_info->ignore_scs == 0) && (SHORT_THIRD_STARTCODE_BYTE == ( *ptr & 0xFC)); + } + + if(!(normal_sc | short_sc)) + { + phase = 0; + } + else + {/* Match for start code so update context with byte position */ + cxt->sc_end_pos = size; + phase = 3; + p_info->cur_sc_prefix = p_info->next_sc_prefix; + p_info->next_sc_prefix = (normal_sc) ? 1: 0; + if(normal_sc) + { + p_info->ignore_scs=1; + } + else + { + /* For short start code since start code is in one nibble just return at this point */ + phase += 1; + state->next_sc = *ptr; + state->second_scprfx_length = 2; + ret=1; + break; + } + } + ptr++;size++;data_left--; + } + } + } + if((data_left > 0) && (phase == 3)) + { + cxt->sc_end_pos++; + state->next_sc = cxt->buf[cxt->sc_end_pos]; + state->second_scprfx_length = 3; + phase++; + ret = 1; + } + cxt->phase = phase; + /* Return SC found only if phase is 4, else always success */ + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h new file mode 100644 index 0000000..d57a9bf --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h @@ -0,0 +1,111 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: VC1 header. +// +*/ + +#ifndef _VC1_COMMON_H_ +#define _VC1_COMMON_H_ + +/* If the pixel data is left near an emulation prevention sequence, the decoder will be unaware + unless we send some previous bytes */ +//#define PADDING_FOR_EMUL 3 +#define PADDING_FOR_EMUL 0 + +#define GET_BLSB( name, bitf ) BLSB_MFD_##name##_##bitf +#define GET_BMSK( name, bitf ) BMSK_MFD_##name##_##bitf + +#define BF_READ( name, bitf, value ) ((value & GET_BMSK(name, bitf) ) >> GET_BLSB(name, bitf) ) +#define BF_WRITE( name, bitf, value, data ) value = ((value & ~GET_BMSK(name, bitf)) | ((data) << GET_BLSB(name, bitf))) + +enum vc1_workload_item_type +{ + VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_VC1_BITOFFSET, + VIDDEC_WORKLOAD_VC1_BITPLANE0, + VIDDEC_WORKLOAD_VC1_BITPLANE1, + VIDDEC_WORKLOAD_VC1_BITPLANE2, + VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + VIDDEC_WORKLOAD_VC1_FUTURE_FRAME, +}; + +typedef enum +{ + vc1_ProgressiveFrame = 0, + vc1_InterlacedFrame = 2, + vc1_InterlacedField = 3, + vc1_PictureFormatNone +} vc1_fcm; + +/** This enumeration defines the various frame types as defined in PTYPE syntax +element. +PTYPE interpretation depends on bitstream profile. The value that needs to get +programmed in the frame_type register 0x2218 is this generic enum obtained +from Canmore code. +Changing this enum to match the spec for each profile caused md5 mismatches. +TODO: Why are these the values to program - is this the case with reference decoder? +*/ +enum +{ + VC1_I_FRAME = (1 << 0), + VC1_P_FRAME = (1 << 1), + VC1_B_FRAME = (1 << 2), + VC1_BI_FRAME = VC1_I_FRAME | VC1_B_FRAME, + VC1_SKIPPED_FRAME = (1 << 3) | VC1_P_FRAME +}; + +enum { + vc1_FrameDone = 1 << 0, + vc1_FieldDone = 1 << 1, + vc1_SliceDone = 1 << 2, + vc1_Field1Done = 1 << 3, + vc1_Field2Done = 1 << 4, + vc1_FrameError = 1 << 8, +}; + +typedef struct { + /* 0x00 */ uint32_t general; + /* 0x04 */ uint32_t stream_format1; + /* 0x08 */ uint32_t coded_size; + /* 0x0c */ uint32_t stream_format2; + /* 0x10 */ uint32_t entrypoint1; + /* 0x14 */ uint32_t range_map; + /* 0x18 */ uint32_t frame_type; + /* 0x1c */ uint32_t recon_control; + /* 0x20 */ uint32_t mv_control; + /* 0x24 */ uint32_t intcomp_fwd_top; + /* 0x28 */ uint32_t ref_bfraction; + /* 0x2c */ uint32_t blk_control; + /* 0x30 */ uint32_t trans_data; + /* 0x34 */ uint32_t vop_dquant; +#define NUM_REF_ID 4 + /* 0x38-0x48 */ uint32_t ref_frm_id[NUM_REF_ID]; + /* 0x48 */ uint32_t fieldref_ctrl_id; + /* 0x4c */ uint32_t auxfrmctrl; + /* 0x50 */ uint32_t imgstruct; + /* 0x54 */ uint32_t alt_frame_type; + /* 0x58 */ uint32_t intcomp_fwd_bot; + /* 0x5c */ uint32_t intcomp_bwd_top; + /* 0x60 */ uint32_t intcomp_bwd_bot; + /* 0x64 */ uint32_t _stuffing; +} VC1D_SPR_REGS; + +/* +In VC1, past reference is the fwd reference and future reference is the backward reference +i.e. P frame has only a forward reference and B frame has both a forward and a backward reference. +*/ +enum { + VC1_FRAME_CURRENT_REF = 0, + VC1_FRAME_CURRENT_DIS, + VC1_FRAME_PAST, + VC1_FRAME_FUTURE, +}; + +#endif //_VC1_COMMON_H_ + diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c new file mode 100644 index 0000000..a2d6721 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c @@ -0,0 +1,16 @@ +#include "vc1.h" + +void vc1_start_new_frame (void *parent, vc1_viddec_parser_t *parser ) +{ + return; +} + +void vc1_end_frame (vc1_viddec_parser_t *parser) +{ + return; +} + +int32_t vc1_parse_emit_current_frame( void *parent, vc1_viddec_parser_t *parser ) +{ + return(0); +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h new file mode 100644 index 0000000..8416b24 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h @@ -0,0 +1,224 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: VC1 header. +// +*/ + +#ifndef _VC1_H_ +#define _VC1_H_ + +#ifdef MFD_FIRMWARE + typedef unsigned int size_t; + #define LOG(...) +#else + #include + #include + #include + enum { + NONE = 0, + CRITICAL, + WARNING, + INFO, + DEBUG, + } log_level; + + #define vc1_log_level DEBUG + + #define LOG( log_lev, format, args ... ) \ + if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ , ## args ); } +#endif + +#include "viddec_fw_workload.h" +#include "vc1parse_common_defs.h" +#include "vc1common.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define LOG_CRIT(format, args ... ) LOG( CRITICAL, format, ## args) +#define LOG_WARN(format, args ... ) LOG( WARNING, format, ## args) +#define LOG_INFO(format, args ... ) LOG( INFO, format, ## args) +#define LOG_DEBUG(format, args ... ) LOG( DEBUG, format, ## args) + +// Seems to be hardware bug: DO NOT TRY TO SWAP BITPLANE0 and BITPLANE2 +// Block Control Register at offset 222C uses Bitplane_raw_ID0 to indicate directmb/fieldtx while +// and Bitplane_raw_ID2 for acpred/mvtypemb/forwardmb +// but when we send bitplane index 0 for directmb/fieldtx and bitplane index 2 for acpred/mvtypemb/forwardmb +// md5 mismatches are seen +typedef enum +{ + BPP_FORWARDMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_ACPRED = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_MVTYPEMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_OVERFLAGS = VIDDEC_WORKLOAD_VC1_BITPLANE1, + BPP_SKIPMB = VIDDEC_WORKLOAD_VC1_BITPLANE1, + BPP_DIRECTMB = VIDDEC_WORKLOAD_VC1_BITPLANE2, + BPP_FIELDTX = VIDDEC_WORKLOAD_VC1_BITPLANE2, +} vc1_bpp_type_t; + +/* status codes */ +typedef enum { + VC1_STATUS_EOF = 1, // end of file + VC1_STATUS_OK = 0, // no error + VC1_STATUS_NO_MEM = 2, // out of memory + VC1_STATUS_FILE_ERROR = 2, // file error + VC1_STATUS_NOTSUPPORT = 2, // not supported mode + VC1_STATUS_PARSE_ERROR = 2, // fail in parse MPEG-4 stream + VC1_STATUS_ERROR = 2 // unknown/unspecified error +} vc1_Status; + +/* VC1 start code values */ +typedef enum { + vc1_Forbidden = 0x80,/*0x80-0xFF*/ + vc1_Reserved1 = 0x09,/*0x00-0x09*/ + vc1_Reserved2 = 0x10, + vc1_Reserved3 = 0x1A, + vc1_Reserved4 = 0x20,/*0x20-0x7F*/ + vc1_SCEndOfSequence = 0x0A, + vc1_SCSlice = 0x0B, + vc1_SCField = 0x0C, + vc1_SCFrameHeader = 0x0D, + vc1_SCEntryPointHeader = 0x0E, + vc1_SCSequenceHeader = 0x0F, + vc1_SCSliceUser = 0x1B, + vc1_SCFieldUser = 0x1C, + vc1_SCFrameUser = 0x1D, + vc1_SCEntryPointUser = 0x1E, + vc1_SCSequenceUser = 0x1F +} vc1_sc; + +#if 0 +typedef enum +{ + vc1_ProfileSimple = 0, /** Simple profile */ + vc1_ProfileMain, /** Main profile */ + vc1_ProfileReserved, /** Reserved */ + vc1_ProfileAdvanced /** Advanced profile */ +} vc1_Profile; +#endif + +typedef enum +{ + vc1_PtypeI = 1, + vc1_PtypeP = 2, + vc1_PtypeB = 4, + vc1_PtypeBI = 5, + vc1_PtypeSkipped = 8|2, +} vc1_ptype; + +typedef enum +{ + vc1_PtypeII = 0, + vc1_PtypeIP = 1, + vc1_PtypePI = 2, + vc1_PtypePP = 3, + vc1_PtypeBB = 4, + vc1_PtypeBBI = 5, + vc1_PtypeBIB = 6, + vc1_PtypeBIBI = 7 +} vc1_fptype; + +typedef enum +{ + vc1_Imode_Raw = 0, //0x0000 + vc1_Imode_Norm2, //0x10 + vc1_Imode_Diff2, //0x001 + vc1_Imode_Norm6, //0x11 + vc1_Imode_Diff6, //0x0001 + vc1_Imode_Rowskip, //0x010 + vc1_Imode_Colskip, //0x011 +} vc1_Imode; + +/* calculation of MAX_BITPLANE_SZ 2048/16x1088/16 pel= 128x68 bit used for bitplane + * as rows are packed in DWORDS + * we have (128)/32 * 68 Dwords needed for bitplane storage + */ +#define MAX_BITPLANE_SZ 272 + +/* Full Info */ +typedef struct { + unsigned char* bufptr; /* current frame, point to header or data */ + int bitoff; /* mostly point to next frame header or PSC */ + int picture_info_has_changed; + vc1_metadata_t metadata; + vc1_PictureLayerHeader picLayerHeader; + uint32_t bitplane[MAX_BITPLANE_SZ]; +} vc1_Info; + +#ifdef __cplusplus +} +#endif + +enum { + VC1_REF_FRAME_T_MINUS_1 = 0, + VC1_REF_FRAME_T_MINUS_2, + VC1_REF_FRAME_T_MINUS_0, + VC1_NUM_REFERENCE_FRAMES, +}; + +enum vc1_sc_seen_flags +{ + VC1_SC_INVALID = 0 << 0, + VC1_SC_SEQ = 1 << 0, + VC1_SC_EP = 1 << 1, + VC1_SC_FRM = 1 << 2, + VC1_SC_FLD = 1 << 3, + VC1_SC_SLC = 1 << 4, + VC1_SC_UD = 1 << 5, +}; +#define VC1_SEQ_MASK VC1_SC_SEQ +#define VC1_EP_MASK VC1_SC_SEQ | VC1_SC_EP +#define VC1_FRM_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM +#define VC1_FLD_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM | VC1_SC_FLD + +typedef struct { + int id; + uint32_t intcomp_top; + uint32_t intcomp_bot; + int fcm; /* frame coding mode */ + int type; + int anchor[2]; /* one per field */ + int rr_en; /* range reduction enable flag at sequence layer */ + int rr_frm; /* range reduction flag at picture layer */ +} ref_frame_t; + +typedef struct +{ + uint32_t sc_seen_since_last_wkld; + uint32_t sc_seen; + uint32_t is_frame_start; + uint8_t is_reference_picture; + uint32_t intcomp_last[4]; /* for B frames */ + uint32_t intcomp_top[2]; + uint32_t intcomp_bot[2]; + vc1_Info info; + VC1D_SPR_REGS spr; + ref_frame_t ref_frame[VC1_NUM_REFERENCE_FRAMES]; +#ifdef VBP + /* A storage area is provided for each type of bit plane. Only one of */ + /* each type will ever be used for a picture and never more than three */ + /* bit-planes per picture, and often only one is used. We never clear */ + /* this data and writes into it when we need to. vc1parse_bitplane.c */ + /* makes use of these set them to one of the bitplane types included */ + /* in the picture header structure. Those sturctures are set every */ + /* time a picture parse begins. */ + uint32_t bp_forwardmb[4096]; + uint32_t bp_acpred[4096]; + uint32_t bp_mvtypemb[4096]; + uint32_t bp_overflags[4096]; + uint32_t bp_skipmb[4096]; + uint32_t bp_directmb[4096]; + uint32_t bp_fieldtx[4096]; + uint32_t start_code; +#endif +} vc1_viddec_parser_t; + +#endif //_VC1_H_ + diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c new file mode 100644 index 0000000..a033385 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c @@ -0,0 +1,557 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 bitstream layers down to but not including +// macroblock layer. +// +*/ + +#include "viddec_fw_debug.h" +#include "vc1parse.h" + +#define VC1_PIXEL_IN_LUMA 16 + +/*------------------------------------------------------------------------------ + * Parse modified rcv file, start codes are inserted using rcv2vc1.c. + * source is in + * http://svn.jf.intel.com/svn/DHG_Src/CESWE_Src/DEV/trunk/sv/mfd/tools/utils. + * Assumme rcv file width < 90,112 pixel to differenciate from real VC1 + * advanced profile header. + * Original rcv description is in annex L + * Table 263 of SMPTE 421M. + */ +vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) +{ + uint32_t result; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_RcvSequenceHeader rcv; + + memset(&rcv, 0, sizeof(vc1_RcvSequenceHeader)); + + result = viddec_pm_get_bits(ctxt, &rcv.struct_a_rcv, 32); + md->width = rcv.struct_a.HORIZ_SIZE; + md->height = rcv.struct_a.VERT_SIZE; + + result = viddec_pm_get_bits(ctxt, &rcv.struct_c_rcv, 32); + md->PROFILE = rcv.struct_c.PROFILE >> 2; + md->LOOPFILTER = rcv.struct_c.LOOPFILTER; + md->MULTIRES = rcv.struct_c.MULTIRES; + md->FASTUVMC = rcv.struct_c.FASTUVMC; + md->EXTENDED_MV = rcv.struct_c.EXTENDED_MV; + md->DQUANT = rcv.struct_c.DQUANT; + md->VSTRANSFORM = rcv.struct_c.VSTRANSFORM; + md->OVERLAP = rcv.struct_c.OVERLAP; + md->RANGERED = rcv.struct_c.RANGERED; + md->MAXBFRAMES = rcv.struct_c.MAXBFRAMES; + md->QUANTIZER = rcv.struct_c.QUANTIZER; + md->FINTERPFLAG = rcv.struct_c.FINTERPFLAG; +#ifdef VBP + md->SYNCMARKER = rcv.struct_c.SYNCMARKER; +#endif + + if ((md->PROFILE == VC1_PROFILE_SIMPLE) || + (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN)) + { + md->DQUANT = 0; + } + // TODO: NEED TO CHECK RESERVED BITS ARE 0 + + md->widthMB = (md->width + 15 ) / VC1_PIXEL_IN_LUMA; + md->heightMB = (md->height + 15) / VC1_PIXEL_IN_LUMA; + + DEB("rcv: beforemod: res: %dx%d\n", md->width, md->height); + + /* WL takes resolution in unit of 2 pel - sec. 6.2.13.1 */ + md->width = md->width/2 -1; + md->height = md->height/2 -1; + + DEB("rcv: res: %dx%d\n", md->width, md->height); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C; + + wi.vc1_sh_struct_a_c.size = 0; + wi.vc1_sh_struct_a_c.flags = 0; + wi.vc1_sh_struct_a_c.pad = 0; + + viddec_fw_vc1_set_rcv_horiz_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.HORIZ_SIZE); + viddec_fw_vc1_set_rcv_vert_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.VERT_SIZE); + + viddec_fw_vc1_set_rcv_bitrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.BITRTQ_POSTPROC); + viddec_fw_vc1_set_rcv_frmrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.FRMRTQ_POSTPROC); + viddec_fw_vc1_set_rcv_profile(&wi.vc1_sh_struct_a_c, rcv.struct_c.PROFILE); + viddec_fw_vc1_set_rcv_level(&wi.vc1_sh_struct_a_c, 0); + viddec_fw_vc1_set_rcv_cbr(&wi.vc1_sh_struct_a_c, 0); + viddec_fw_vc1_set_rcv_rangered(&wi.vc1_sh_struct_a_c, rcv.struct_c.RANGERED); + viddec_fw_vc1_set_rcv_maxbframes(&wi.vc1_sh_struct_a_c, rcv.struct_c.MAXBFRAMES); + viddec_fw_vc1_set_rcv_finterpflag(&wi.vc1_sh_struct_a_c, rcv.struct_c.FINTERPFLAG); + + result = viddec_pm_append_workitem(ctxt, &wi); + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse sequence layer. This function is only applicable to advanced profile + * as simple and main profiles use other mechanisms to communicate these + * metadata. + * Table 3 of SMPTE 421M. + * Table 13 of SMPTE 421M for HRD_PARAM(). + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_SequenceLayerHeader sh; + uint32_t result; + + memset(&sh, 0, sizeof(vc1_SequenceLayerHeader)); + + // PARSE SEQUENCE HEADER + result = viddec_pm_get_bits(ctxt, &sh.flags, 15); + if(result == 1) + { + md->PROFILE = sh.seq_flags.PROFILE; +#ifdef VBP + md->LEVEL = sh.seq_flags.LEVEL; +#endif + } + + result = viddec_pm_get_bits(ctxt, &sh.max_size, 32); + if(result == 1) + { + md->POSTPROCFLAG = sh.seq_max_size.POSTPROCFLAG; + md->width = sh.seq_max_size.MAX_CODED_WIDTH; + md->height = sh.seq_max_size.MAX_CODED_HEIGHT; + md->PULLDOWN = sh.seq_max_size.PULLDOWN; + md->INTERLACE = sh.seq_max_size.INTERLACE; + md->TFCNTRFLAG = sh.seq_max_size.TFCNTRFLAG; + md->FINTERPFLAG = sh.seq_max_size.FINTERPFLAG; + md->PSF = sh.seq_max_size.PSF; + } + + if (sh.seq_max_size.DISPLAY_EXT == 1) + { + result = viddec_pm_get_bits(ctxt, &sh.disp_size, 29); + if(result == 1) + { + if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1) + { + result = viddec_pm_get_bits(ctxt, &tempValue, 4); + sh.ASPECT_RATIO = tempValue; + if (sh.ASPECT_RATIO == 15) + { + result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16); + } + } + + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.FRAMERATE_FLAG = tempValue; + if (sh.FRAMERATE_FLAG == 1) + { + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.FRAMERATEIND = tempValue; + if (sh.FRAMERATEIND == 0) + { + result = viddec_pm_get_bits(ctxt, &sh.framerate_fraction, 12); + } + else + { + result = viddec_pm_get_bits(ctxt, &tempValue, 16); + sh.FRAMERATEEXP = tempValue; + } + } + + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.COLOR_FORMAT_FLAG = tempValue; + if (sh.COLOR_FORMAT_FLAG == 1) + { + result = viddec_pm_get_bits(ctxt, &sh.color_format, 24); + } + } // Successful get of display size + } // DISPLAY_EXT is 1 + + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.HRD_PARAM_FLAG = tempValue; + if (sh.HRD_PARAM_FLAG == 1) + { + /* HRD_PARAM(). */ + result = viddec_pm_get_bits(ctxt, &tempValue, 5); + sh.HRD_NUM_LEAKY_BUCKETS = tempValue; + md->HRD_NUM_LEAKY_BUCKETS = sh.HRD_NUM_LEAKY_BUCKETS; + // Skip the rest of the parsing - hrdinfo is not required for decode or for attributes + } + else + { + md->HRD_NUM_LEAKY_BUCKETS = 0; + } + + md->widthMB = (((md->width + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA; + md->heightMB = (((md->height + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA; + + DEB("md: res: %dx%d\n", md->width, md->height); + DEB("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi_sl, wi_de; + + wi_sl.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; + + wi_sl.vc1_sl.size = 0; + wi_sl.vc1_sl.flags = 0; + wi_sl.vc1_sl.pad = 0; + + viddec_fw_vc1_set_profile(&wi_sl.vc1_sl, sh.seq_flags.PROFILE); + viddec_fw_vc1_set_level(&wi_sl.vc1_sl, sh.seq_flags.LEVEL); + viddec_fw_vc1_set_colordiff_format(&wi_sl.vc1_sl, sh.seq_flags.COLORDIFF_FORMAT); + viddec_fw_vc1_set_pulldown(&wi_sl.vc1_sl, sh.seq_max_size.PULLDOWN); + viddec_fw_vc1_set_max_coded_width(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_WIDTH); + viddec_fw_vc1_set_max_coded_height(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_HEIGHT); + + viddec_fw_vc1_set_bitrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.BITRTQ_POSTPROC); + viddec_fw_vc1_set_frmrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.FRMRTQ_POSTPROC); + viddec_fw_vc1_set_interlace(&wi_sl.vc1_sl, sh.seq_max_size.INTERLACE); + viddec_fw_vc1_set_tfcntrflag(&wi_sl.vc1_sl, sh.seq_max_size.TFCNTRFLAG); + viddec_fw_vc1_set_finterpflag(&wi_sl.vc1_sl, sh.seq_max_size.FINTERPFLAG); + viddec_fw_vc1_set_psf(&wi_sl.vc1_sl, sh.seq_max_size.PSF); + viddec_fw_vc1_set_display_ext(&wi_sl.vc1_sl, sh.seq_max_size.DISPLAY_EXT); + + result = viddec_pm_append_workitem(ctxt, &wi_sl); + + // send DISPLAY EXTENSION metadata if present + if (sh.seq_max_size.DISPLAY_EXT) + { + wi_de.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; + + wi_de.vc1_sl_de.size = 0; + wi_de.vc1_sl_de.framerate = 0; + wi_de.vc1_sl_de.aspectsize = 0; + + viddec_fw_vc1_set_disp_horiz_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_HORIZ_SIZE); + viddec_fw_vc1_set_disp_vert_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_VERT_SIZE); + viddec_fw_vc1_set_disp_aspect_ratio_flag(&wi_de.vc1_sl_de, sh.seq_disp_size.ASPECT_RATIO_FLAG); + viddec_fw_vc1_set_disp_color_format_flag(&wi_de.vc1_sl_de, sh.COLOR_FORMAT_FLAG); + viddec_fw_vc1_set_disp_framerate_flag(&wi_de.vc1_sl_de, sh.FRAMERATE_FLAG); + viddec_fw_vc1_set_disp_framerateind(&wi_de.vc1_sl_de, sh.FRAMERATEIND); + + viddec_fw_vc1_set_disp_aspect_ratio(&wi_de.vc1_sl_de, sh.ASPECT_RATIO); + viddec_fw_vc1_set_disp_frameratenr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATENR); + viddec_fw_vc1_set_disp_frameratedr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATEDR); + viddec_fw_vc1_set_disp_framerateexp(&wi_de.vc1_sl_de, sh.FRAMERATEEXP); + + viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_HORIZ_SIZE); + viddec_fw_vc1_set_disp_aspect_ratio_vert_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_VERT_SIZE); + viddec_fw_vc1_set_disp_color_prim(&wi_de.vc1_sl_de, sh.seq_color_format.COLOR_PRIM); + viddec_fw_vc1_set_disp_transfer_char(&wi_de.vc1_sl_de, sh.seq_color_format.TRANSFER_CHAR); + + result = viddec_pm_append_workitem(ctxt, &wi_de); + } + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse entry point layer. This function is only applicable for advanced + * profile and is used to signal a random access point and changes in coding + * control parameters. + * Table 14 of SMPTE 421M. + * Table 15 of SMPTE 421M for HRD_FULLNESS(). + *------------------------------------------------------------------------------ + */ +vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_EntryPointHeader ep; + uint32_t result; + uint32_t temp; + + memset(&ep, 0, sizeof(vc1_EntryPointHeader)); + + // PARSE ENTRYPOINT HEADER + result = viddec_pm_get_bits(ctxt, &ep.flags, 13); + if(result == 1) + { + // Skip the flags already peeked at (13) and the unneeded hrd_full data + // NOTE: HRD_NUM_LEAKY_BUCKETS is initialized to 0 when HRD_PARAM_FLAG is not present + int hrd_bits = md->HRD_NUM_LEAKY_BUCKETS * 8; + while(hrd_bits >= 32) + { + result = viddec_pm_skip_bits(ctxt, 32); + hrd_bits -= 32; + } + result = viddec_pm_skip_bits(ctxt, hrd_bits); + + md->REFDIST = 0; + md->PANSCAN_FLAG = ep.ep_flags.PANSCAN_FLAG; + md->REFDIST_FLAG = ep.ep_flags.REFDIST_FLAG; + md->LOOPFILTER = ep.ep_flags.LOOPFILTER; + md->FASTUVMC = ep.ep_flags.FASTUVMC; + md->EXTENDED_MV = ep.ep_flags.EXTENDED_MV; + md->DQUANT = ep.ep_flags.DQUANT; + md->VSTRANSFORM = ep.ep_flags.VSTRANSFORM; + md->OVERLAP = ep.ep_flags.OVERLAP; + md->QUANTIZER = ep.ep_flags.QUANTIZER; + + result = viddec_pm_get_bits(ctxt, &temp, 1); + if(result == 1) + { + ep.CODED_SIZE_FLAG = temp; + if(ep.CODED_SIZE_FLAG) + { + result = viddec_pm_get_bits(ctxt, &ep.size, 24); + md->width = ep.ep_size.CODED_WIDTH; + md->height = ep.ep_size.CODED_HEIGHT; + } + } + if(ep.ep_flags.EXTENDED_MV) + { + result = viddec_pm_get_bits(ctxt, &temp, 1); + md->EXTENDED_DMV = ep.EXTENDED_DMV = temp; + } + + result = viddec_pm_get_bits(ctxt, &temp, 1); + if(result == 1) + { + md->RANGE_MAPY_FLAG = ep.RANGE_MAPY_FLAG = temp; + if(ep.RANGE_MAPY_FLAG) + { + result = viddec_pm_get_bits(ctxt, &temp, 3); + md->RANGE_MAPY = ep.RANGE_MAPY = temp; + } + } + + result = viddec_pm_get_bits(ctxt, &temp, 1); + if(result == 1) + { + md->RANGE_MAPUV_FLAG = ep.RANGE_MAPUV_FLAG = temp; + if(ep.RANGE_MAPUV_FLAG) + { + result = viddec_pm_get_bits(ctxt, &temp, 3); + md->RANGE_MAPUV = ep.RANGE_MAPUV = temp; + } + } + } + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO; + + wi.vc1_ep.size = 0; + wi.vc1_ep.flags = 0; + wi.vc1_ep.pad = 0; + + viddec_fw_vc1_set_ep_size_flag(&wi.vc1_ep, ep.CODED_SIZE_FLAG); + viddec_fw_vc1_set_ep_horiz_size(&wi.vc1_ep, ep.ep_size.CODED_WIDTH); + viddec_fw_vc1_set_ep_vert_size(&wi.vc1_ep, ep.ep_size.CODED_HEIGHT); + + viddec_fw_vc1_set_ep_broken_link(&wi.vc1_ep, ep.ep_flags.BROKEN_LINK); + viddec_fw_vc1_set_ep_closed_entry(&wi.vc1_ep, ep.ep_flags.CLOSED_ENTRY); + viddec_fw_vc1_set_ep_panscan_flag(&wi.vc1_ep, ep.ep_flags.PANSCAN_FLAG); + viddec_fw_vc1_set_ep_range_mapy_flag(&wi.vc1_ep, ep.RANGE_MAPY_FLAG); + viddec_fw_vc1_set_ep_range_mapy(&wi.vc1_ep, ep.RANGE_MAPY); + viddec_fw_vc1_set_ep_range_mapuv_flag(&wi.vc1_ep, ep.RANGE_MAPUV_FLAG); + viddec_fw_vc1_set_ep_range_mapuv(&wi.vc1_ep, ep.RANGE_MAPUV); + + result = viddec_pm_append_workitem(ctxt, &wi); + } + +#ifdef VBP + md->BROKEN_LINK = ep.ep_flags.BROKEN_LINK; + md->CLOSED_ENTRY = ep.ep_flags.CLOSED_ENTRY; +#endif + + DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT); + DEB("md: after ep: res: %dx%d\n", md->width, md->height); + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + uint32_t temp; + int i; + + for(i=0; imetadata.bp_raw[i] = true; + } + + if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) + { + VC1_PEEK_BITS(2, temp); /* fcm */ + if( (pInfo->metadata.INTERLACE == 1) && (temp == VC1_FCM_FIELD_INTERLACE)) + { + status = vc1_ParseFieldHeader_Adv(ctxt, pInfo); + } + else + { + status = vc1_ParsePictureHeader_Adv(ctxt, pInfo); + } + } + else + { + status = vc1_ParsePictureHeader(ctxt, pInfo); + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse field picture layer. This function parses the field picture layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_PARSE_ERROR; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) { + if (picLayerHeader->CurrField == 0) + { + picLayerHeader->PTYPE = picLayerHeader->PTypeField1; + picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF); + } + else + { + picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF); + picLayerHeader->PTYPE = picLayerHeader->PTypeField2; + } + status = vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse slice layer. This function parses the slice layer, which is only + * supported by advanced profile. + * Table 26 of SMPTE 421M but skipping parsing of macroblock layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + uint32_t SLICE_ADDR; + vc1_Status status = VC1_STATUS_OK; + + VC1_GET_BITS9(9, SLICE_ADDR); + VC1_GET_BITS9(1, tempValue); /* PIC_HEADER_FLAG. */ + if (tempValue == 1) { + uint8_t *last_bufptr = pInfo->bufptr; + uint32_t last_bitoff = pInfo->bitoff; + status = vc1_ParsePictureLayer(ctxt, pInfo); + pInfo->picture_info_has_changed = 1; + if( status ) { + /* FIXME - is this a good way of handling this? Failed, see if it's for fields */ + pInfo->bufptr = last_bufptr; + pInfo->bitoff = last_bitoff; + status = vc1_ParseFieldHeader_Adv(ctxt, pInfo); + } + } else + pInfo->picture_info_has_changed = 0; + + pInfo->picLayerHeader.SLICE_ADDR = SLICE_ADDR; + + return status; +} + +/*------------------------------------------------------------------------------ + * This function parses the user data information as defined in SMPTE 421M annex F. + * It then appends that data to the workload. + * Assume the flush byte 0x80 is within the 3 bytes before next start code. + * let's put 1 byte per item first + *------------------------------------------------------------------------------ + */ +vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) +{ + vc1_Status status = VC1_STATUS_OK; + uint32_t user_data; + viddec_workload_item_t wi; + uint32_t ud_id; + + /* find the scope based on start code sc */ + switch(sc) { + case vc1_SCSequenceUser: + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + case vc1_SCEntryPointUser: + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + case vc1_SCFrameUser: + wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; + break; + case vc1_SCFieldUser: + wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA; + break; + case vc1_SCSliceUser: + wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA; + break; + default: + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen + break; + } + + /* get identifier - 4 bytes*/ + // Extract this information but discard it for now + VC1_GET_BITS(32, ud_id); + + /* Read 1 byte of user data and store it in workitem for the current stream level (SEQ/GOP/PIC). + Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, + append the workitem. This loop is repeated till all user data is extracted and appended. */ + wi.user_data.size = 0; + while(viddec_pm_get_bits(ctxt, &user_data, 8) != -1) + { + /* Store the valid byte in data payload */ + wi.user_data.data_payload[wi.user_data.size] = user_data; + wi.user_data.size++; + + /* When size exceeds payload size, append workitem and continue */ + if (wi.user_data.size >= 11) + { + viddec_pm_setup_userdata(&wi); + viddec_pm_append_workitem(ctxt, &wi); + wi.user_data.size = 0; + } + if(user_data == 0x80) // flushing byte + break; + } + /* If size is not 0, append remaining user data. */ + if (wi.user_data.size > 0) + { + int i; + for(i=wi.user_data.size;i<11;i++) + { + wi.user_data.data_payload[i] = 0; + } + viddec_pm_setup_userdata(&wi); + viddec_pm_append_workitem(ctxt, &wi); + wi.user_data.size = 0; + } + + return(status); +} // vc1_ParseAndAppendUserData diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h new file mode 100644 index 0000000..d0e2f00 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h @@ -0,0 +1,136 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Common functions for parsing VC-1 bitstreams. +// +*/ + +#ifndef _VC1PARSE_H_ +#define _VC1PARSE_H_ + +#include "viddec_parser_ops.h" +#include "vc1.h" + +/** @weakgroup vc1parse_defs VC-1 Parse Definitions */ +/** @ingroup vc1parse_defs */ +/*@{*/ + +/* This macro gets the next less-than-nine bits from the bitstream. It is +assumed that numBits is less than ten. */ +#ifdef VC1_VERBOSE +#include +#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__) +#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args) +#else +#define AUTO_TRACE +#define DEBUGBITS(...) +#endif + +extern void *memset(void *s, int32_t c, uint32_t n); + +/* This macro gets the next numBits from the bitstream. */ +#define VC1_GET_BITS VC1_GET_BITS9 +#define VC1_GET_BITS9(numBits, value) \ +{ uint32_t __tmp__; \ + viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ + value = __tmp__;\ + DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ +} + +#define VC1_PEEK_BITS(numBits, value) \ +{ uint32_t __tmp__; \ + viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ + value = __tmp__;\ + DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ +} + +/* This macro asserts if the condition is not true. */ +#ifdef VC1_VERBOSE +#define VC1_ASSERT(condition) \ +{ \ + if (! (condition)) \ + OS_INFO("Failed " #condition "!\n"); \ +} +#else +#define VC1_ASSERT(condition) +#endif + +/*@}*/ + +/** @weakgroup vc1parse VC-1 Parse Functions */ +/** @ingroup vc1parse */ +/*@{*/ + +extern const uint8_t VC1_MVMODE_LOW_TBL[]; +extern const uint8_t VC1_MVMODE_HIGH_TBL[]; +extern const int32_t VC1_BITPLANE_IMODE_TBL[]; +extern const int32_t VC1_BITPLANE_K_TBL[]; +extern const int32_t VC1_BFRACTION_TBL[]; +extern const int32_t VC1_REFDIST_TBL[]; + +void vc1_end_frame(vc1_viddec_parser_t *parser); + +/* Top-level functions to parse bitstream layers for rcv format. */ +vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse bitstream layers for the various profiles. */ +vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse headers for various picture layers for the +simple and main profiles. */ +vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse common part of the headers for various picture +layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_Adv (void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various progressive +picture layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various interlace frame +layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various interlace frame +layers for the advanced profile. */ +vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse syntax element in bitstream. */ +vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo); +vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bptype); +vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable); +vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond); + +void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser); +int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser); + +/* function to handle user data */ +vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc); + +/*@}*/ + +#endif /* _VC1PARSE_H_. */ diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c new file mode 100644 index 0000000..5ee9e18 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c @@ -0,0 +1,753 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 bitstreams. +// +*/ + +#include "vc1parse.h" + +#ifdef VBP +#include "viddec_pm.h" +#endif + +/*----------------------------------------------------------------------------*/ + + +/* put one bit into a buffer + * used for bitplane decoding, each bit correspond to a MB + * HW requires row to start at DW (32 bits) boundary + * input: value - bit value + * mbx - image width in MB + * mby - image height in MB + * x - x location (column) of MB in MB unit + * y - y location (row) of MB in MB unit + * output: outp - buffer to fill + */ +//#define put_bit(value,x,y,mbx,mby,invert,outp) +static inline void put_bit( uint32_t value, int x, int y, int mbx, int mby, uint8_t invert, uint32_t* outp) +{ + int bit; + uint32_t *out; + + bit = mby; + + value ^= invert; + if (!value) return; /* assume buffer is initialized with zeros */ + + out = outp; + /* go to corresponding row location in DW unit */ + out += (( mbx + 31 ) >> 5) * y; + out += x >> 5; /* go to corresponding column location in DW unit */ + bit = x & 0x1f; /* compute remaining bits */ + *out |= 1 << bit; /* put bit */ +} + +/* if b is the bit at location (x,y) + * b = b^invert + * used for bitplane decoding, each bit correspond to a MB + * HW requires row to start at DW (32 bits) boundary + * input: value - bit value + * x - x location (column) of MB in MB unit + * y - y location (row) of MB in MB unit + * mbx - image width in MB + * output: outp - buffer to fill + * returns bit value + */ +static inline int xor_bit( int x, int y, int mbx, uint32_t invert, uint32_t* outp) +{ + int bit; + uint32_t *out; + uint8_t value; + //if (invert == 0) return; /* do nothing if XOR with 0 */ + + out = outp; + out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */ + out += x >> 5; /* go to corresponding row location in DW unit */ + bit = x & 0x1f; /* compute remaining bits */ + + if (invert == 1) + *out ^= (1 << bit); /* put XOR bit */ + value = (*out & (1 << bit)) >> bit; /* return bit value */ + + return(value); + +} + +/* get bit at location (x,y) + * used for bitplane decoding, each bit correspond to a MB + * HW requires row to start at DW (32 bits) boundary + * input: value - bit value + * x - x location (column) of MB in MB unit + * y - y location (row) of MB in MB unit + * mbx - image width in MB + * outp - bit buffer in dwords + * returns bit value + */ +static inline int get_bit( int x, int y, int mbx, uint32_t* outp) +{ + int bit; + uint32_t *out; + uint8_t value; + + out = outp; + out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */ + out += x >> 5; /* go to corresponding row location in DW unit */ + bit = x & 0x1f; /* compute remaining bits */ + value = (*out & (1 << bit)) >> bit; /* return bit value */ + + return(value); + +} + +static void vc1_InverseDiff(vc1_Bitplane *pBitplane, int32_t widthMB, int32_t heightMB) +{ + int32_t i, j, previousBit=0, temp; + + for (i = 0; i < heightMB; i++) + { + for (j = 0; j < widthMB; j++) + { + if ((i == 0 && j == 0)) + { + previousBit=xor_bit(j, i, widthMB, pBitplane->invert, + pBitplane->databits); + } + else if (j == 0) /* XOR with TOP */ + { + previousBit = get_bit(0, i-1, widthMB, pBitplane->databits); + temp=xor_bit(j, i, widthMB, previousBit, + pBitplane->databits); + previousBit = temp; + } + //TODO isSameAsTop can be optimized + else if (((i > 0) && (previousBit != + get_bit(j, i-1, widthMB, pBitplane->databits)))) + { + temp=xor_bit(j, i, widthMB, pBitplane->invert, + pBitplane->databits); + previousBit = temp; + } + else + { + temp=xor_bit(j, i, widthMB, previousBit, + pBitplane->databits); + previousBit = temp; + } + } + } +} + + +/*----------------------------------------------------------------------------*/ +/* implement normal 2 mode bitplane decoding, SMPTE 412M 8.7.3.2 + * width, height are in MB unit. + */ +static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, + int32_t width, int32_t height) +{ + int32_t i; + int32_t tmp_databits = 0; + + int32_t row[2], col[2]; + int8_t tmp=0; + + /* disable pBitplane->invert in the Norm2 decode stage of + VC1_BITPLANE_DIFF2_MODE */ + if (pBitplane->imode == VC1_BITPLANE_DIFF2_MODE) + { + tmp = pBitplane->invert; + pBitplane->invert=0; + } + + // By default, initialize the values for the even case + col[0] = 0; /* i%width; */ + row[0] = 0; /* i/width; */ + col[1] = 1; /* (i+1)%width; */ + row[1] = 0; /* (i+1)/width; */ + + // If width*height is odd, the first bit is the value of the bitplane + // for the first macroblock + if ((width*height) & 1) /* first bit if size is odd */ + { + VC1_GET_BITS(1, tmp_databits); + put_bit(tmp_databits, 0, 0, width, height, pBitplane->invert, + pBitplane->databits); + + // Modify initialization for odd sizes + col[0] = 1; /* i%width; */ + col[1] = 2; /* (i+1)%width; */ + + // Consider special case where width is 1 + if(width == 1) + { + col[0] = 0; /* i%width; */ + row[0] = 1; /* i/width; */ + col[1] = 0; /* (i+1)%width; */ + row[1] = 2; /* (i+1)/width; */ + } + } + + /* decode every pair of bits in natural scan order */ + for (i = (width*height) & 1; i < (width*height/2)*2; i += 2) + { + int32_t tmp = 0; + + //col[0]=i%width; + //row[0]=i/width; + //col[1]=(i+1)%width; + //row[1]=(i+1)/width; + + VC1_GET_BITS(1, tmp); + if (tmp == 0) + { + put_bit(0, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(0, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + else + { + VC1_GET_BITS(1, tmp); + if (tmp == 1) + { + put_bit(1, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(1, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + else + { + VC1_GET_BITS(1, tmp); + if (tmp == 0) + { + put_bit(1, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(0, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + else + { + put_bit(0, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(1, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + } + } + + // Consider special case where width is 1 + if(width == 1) + { + row[0] += 2; + row[1] += 2; + } + else + { + col[0] += 2; /* i%width; */ + if ( col[0] >= width ) + { + // For odd sizes, col[0] can alternatively start at 0 and 1 + col[0] -= width; + row[0]++; + } + + col[1] += 2; /* (i+1)%width; */ + if ( col[1] >= width ) + { + // For odd sizes, col[1] can alternatively start at 0 and 1 + col[1] -= width; + row[1]++; + } + } + } + + /* restore value */ + pBitplane->invert=tmp; +} + +/*----------------------------------------------------------------------------*/ +/* compute Normal-6 mode bitplane decoding + * algorithm is described in SMPTE 421M 8.7.3.4 + * width, height are in MB unit. + */ +static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, + int32_t width, int32_t height) +{ + vc1_Status status; + int32_t i, j, k; + int32_t ResidualX = 0; + int32_t ResidualY = 0; + uint8_t _2x3tiled = (((width%3)!=0)&&((height%3)==0)); + + int32_t row, col; + int8_t tmp=0; + + /* disable pBitplane->invert in the Norm2 decode stage of + VC1_BITPLANE_DIFF2_MODE */ + if (pBitplane->imode == VC1_BITPLANE_DIFF6_MODE) + { + tmp = pBitplane->invert; + pBitplane->invert=0; + } + + if (_2x3tiled) + { + int32_t sizeW = width/2; + int32_t sizeH = height/3; + + for (i = 0; i < sizeH; i++) + { + row = 3*i; /* compute row location for tile */ + + for (j = 0; j < sizeW; j++) + { + col = 2*j + (width & 1); /* compute column location for tile */ + + /* get k=sum(bi2^i) were i is the ith bit of the tile */ + status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL); + VC1_ASSERT(status == VC1_STATUS_OK); + + /* put bits in tile */ + put_bit(k&1, col, row, width, height, pBitplane->invert, + pBitplane->databits); + put_bit(((k&2)>>1), col+1, row, width, height, + pBitplane->invert,pBitplane->databits); + + put_bit(((k&4)>>2), col, row+1, width, height, + pBitplane->invert,pBitplane->databits); + put_bit(((k&8)>>3), col+1, row+1, width, height, + pBitplane->invert,pBitplane->databits); + + put_bit(((k&16)>>4), col, row+2, width, height, + pBitplane->invert,pBitplane->databits); + put_bit(((k&32)>>5), col+1, row+2, width, + height,pBitplane->invert, pBitplane->databits); + } + } + ResidualX = width & 1; + ResidualY = 0; + } + else /* 3x2 tile */ + { + int32_t sizeW = width/3; + int32_t sizeH = height/2; + + for (i = 0; i < sizeH; i++) + { + row = 2*i + (height&1) ; /* compute row location for tile */ + + for (j = 0; j < sizeW; j++) + { + col = 3*j + (width%3); /* compute column location for tile */ + + /* get k=sum(bi2^i) were i is the ith bit of the tile */ + status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL); + VC1_ASSERT(status == VC1_STATUS_OK); + + put_bit(k&1, col, row, width, height,pBitplane->invert, + pBitplane->databits); + put_bit((k&2)>>1, col+1, row, width, height, pBitplane->invert, + pBitplane->databits); + put_bit((k&4)>>2, col+2, row, width, height, pBitplane->invert, + pBitplane->databits); + + put_bit((k&8)>>3, col, row+1, width, height,pBitplane->invert, + pBitplane->databits); + put_bit((k&16)>>4, col+1, row+1, width, + height,pBitplane->invert, pBitplane->databits); + put_bit((k&32)>>5, col+2, row+1, width, + height,pBitplane->invert, pBitplane->databits); + } + } + ResidualX = width % 3; + ResidualY = height & 1; + } + +#ifndef VBP + for (i = 0; i < ResidualX; i++) + { + int32_t ColSkip; + VC1_GET_BITS(1, ColSkip); + + if (1 == ColSkip) + { + for(j = 0; j < height; j++) + { + int32_t Value = 0; + VC1_GET_BITS(1, Value); + put_bit(Value, i, j, width, height,pBitplane->invert, + pBitplane->databits); + } + } + } + + for (j = 0; j < ResidualY; j++) + { + int32_t RowSkip; + VC1_GET_BITS(1, RowSkip); + if (1 == RowSkip) + { + for (i = ResidualX; i < width; i++) + { + int32_t Value = 0; + VC1_GET_BITS(1, Value); + put_bit(Value, i, j, width, height,pBitplane->invert, + pBitplane->databits); + } + } + } + #else + int32_t Value = 0; + for (i = 0; i < ResidualX; i++) + { + int32_t ColSkip; + VC1_GET_BITS(1, ColSkip); + Value = 0; + for(j = 0; j < height; j++) + { + if (1 == ColSkip) + { + VC1_GET_BITS(1, Value); + } + put_bit(Value, i, j, width, height,pBitplane->invert, + pBitplane->databits); + } + } + + for (j = 0; j < ResidualY; j++) + { + int32_t RowSkip; + VC1_GET_BITS(1, RowSkip); + Value = 0; + for (i = ResidualX; i < width; i++) + { + if (1 == RowSkip) + { + VC1_GET_BITS(1, Value); + } + put_bit(Value, i, j, width, height,pBitplane->invert, + pBitplane->databits); + } + } + #endif + + /* restore value */ + pBitplane->invert=tmp; + +} + +/*----------------------------------------------------------------------------*/ +/* initialize bitplane to array of zeros + * each row begins with a dword + * input: + * width: widh in MB unit + * height: height in MB unit + * returns even bitplane size in dwords + */ +int initBitplane(vc1_Bitplane *pBitplane,uint32_t width, uint32_t height) +{ + int i; + int numDword = 0; + + numDword = ((width + 31)>>5) * height; + numDword += numDword & 1; /* add 1 in case numDword is odd */ + + for (i=0;idatabits[i] = 0; + return(numDword); +} + +/*----------------------------------------------------------------------------*/ +/* modified IPP code for bitplane decoding + * width: width in MB unit + * height: height in MB unit + */ +vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, + uint32_t width, uint32_t height, vc1_bpp_type_t bpnum) +{ + uint32_t i, j; + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + uint32_t biplaneSz; /* bitplane sz in dwords */ + vc1_Bitplane bp; + vc1_Bitplane *bpp = &bp; + + // By default, set imode to raw + pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = true; + + // bitplane data would be temporarily stored in the vc1 context + bpp->databits = pInfo->bitplane; + + /* init bitplane to zero, function retunr bitplane buffer size in dword */ + biplaneSz = initBitplane(bpp, width, height); + + VC1_GET_BITS(1, tempValue); + bpp->invert = (uint8_t) tempValue; + + if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode, + VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK) + { + return status; + } + + // If the imode is VC1_BITPLANE_RAW_MODE: bitplane information is in the MB layer + // there is no need to parse for bitplane information in the picture layer + // Only bits need to be appropriately set in the block control register + // In all other modes, bitplane information follows and needs to be parsed and sent to the decoder + + if (bpp->imode == VC1_BITPLANE_NORM2_MODE) + { + vc1_Norm2ModeDecode(ctxt, bpp, width, height); + } + else if (bpp->imode == VC1_BITPLANE_DIFF2_MODE) + { + vc1_Norm2ModeDecode(ctxt, bpp, width, height); + vc1_InverseDiff(bpp, width, height); + } + else if (bpp->imode == VC1_BITPLANE_NORM6_MODE) + { + vc1_Norm6ModeDecode(ctxt, bpp, width, height); + + } + else if (bpp->imode == VC1_BITPLANE_DIFF6_MODE) + { + vc1_Norm6ModeDecode(ctxt, bpp, width, height); + vc1_InverseDiff(bpp, width, height); + } + else if (bpp->imode == VC1_BITPLANE_ROWSKIP_MODE) + { + + for (i = 0; i < height; i++) + { + VC1_GET_BITS(1, tempValue); + /* if tempValue==0, put row of zeros Dwords*/ + if (tempValue == 1) + { + for (j = 0; j < width; j++) + { + VC1_GET_BITS(1, tempValue); + put_bit( tempValue, j, i, width, height, bpp->invert, + bpp->databits); + } + } + else if (bpp->invert) { //TO TEST + for (j = 0; j < width; j++) { + put_bit( 0, j, i, width, height, bpp->invert, + bpp->databits); + } + } + } + + } + else if (bpp->imode == VC1_BITPLANE_COLSKIP_MODE) + { + for (i = 0; i < width; i++) + { + VC1_GET_BITS(1, tempValue); + /* if tempValue==0, and invert == 0, fill column with zeros */ + if (tempValue == 1) + { + for (j = 0; j < height; j++) + { + VC1_GET_BITS(1, tempValue); + put_bit( tempValue, i, j, width, height, bpp->invert, + bpp->databits); + } + } + else if (bpp->invert) { // fill column with ones + for (j = 0; j < height; j++) { + put_bit( 0, i, j, width, height, bpp->invert, + bpp->databits); + } + }//end for else + } + } + + if(bpp->imode != VC1_BITPLANE_RAW_MODE) + { + uint32_t* pl; + int sizeinbytes,nitems,i; + viddec_workload_item_t wi; + uint32_t *bit_dw; + + pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = false; + + sizeinbytes = ((( width + 31 ) / 32)) * (height) * 4; + + pl = bpp->databits; + bit_dw = bpp->databits; + + // How many payloads must be generated + nitems = (sizeinbytes + (sizeof(wi.data.data_payload) - 1)) / + sizeof(wi.data.data_payload); + + // Dump DMEM to an array of workitems + for( i = 0; i < nitems; i++ ) + { + wi.vwi_type = bpnum; + wi.data.data_offset = (char *)pl - (char *)bit_dw; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + + viddec_pm_append_workitem( ctxt, &wi ); + } + } + +#ifdef VBP + { + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)ctxt; + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data); + + if (biplaneSz > 4096) + { + /* bigger than we got, so let's bail with a non meaningful error. */ + return VC1_STATUS_ERROR; + } + + /* At this point bp contains the information we need for the bit-plane */ + /* bpnum is the enumeration that tells us which bitplane this is for. */ + /* pInfo->picLayerHeader.ACPRED is one of the bitplanes I need to fill.*/ + switch (bpnum) + { + case VIDDEC_WORKLOAD_VC1_BITPLANE0: + if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + { + if(bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.FORWARDMB.invert = bp.invert; + pInfo->picLayerHeader.FORWARDMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_forwardmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.FORWARDMB.databits = parser->bp_forwardmb; + } + else + { + pInfo->picLayerHeader.raw_FORWARDMB = 1; + } + } + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) + { + if(bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.ACPRED.invert = bp.invert; + pInfo->picLayerHeader.ACPRED.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_acpred[i] = bp.databits[i]; + } + pInfo->picLayerHeader.ACPRED.databits = parser->bp_acpred; + } + else + { + pInfo->picLayerHeader.raw_ACPRED = 1; + } + } + if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + { + if(bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.MVTYPEMB.invert = bp.invert; + pInfo->picLayerHeader.MVTYPEMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_mvtypemb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.MVTYPEMB.databits = parser->bp_mvtypemb; + } + else + { + pInfo->picLayerHeader.raw_MVTYPEMB = 1; + } + } + break; + case VIDDEC_WORKLOAD_VC1_BITPLANE1: + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) + { + if(bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.OVERFLAGS.invert = bp.invert; + pInfo->picLayerHeader.OVERFLAGS.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_overflags[i] = bp.databits[i]; + } + pInfo->picLayerHeader.OVERFLAGS.databits = parser->bp_overflags; + } + else + { + pInfo->picLayerHeader.raw_OVERFLAGS = 1; + } + } + if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) + { + if(bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.SKIPMB.invert = bp.invert; + pInfo->picLayerHeader.SKIPMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_skipmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.SKIPMB.databits = parser->bp_skipmb; + } + else + { + pInfo->picLayerHeader.raw_SKIPMB = 1; + } + } + break; + case VIDDEC_WORKLOAD_VC1_BITPLANE2: + if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) + { + if(bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.DIRECTMB.invert = bp.invert; + pInfo->picLayerHeader.DIRECTMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_directmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.DIRECTMB.databits = parser->bp_directmb; + } + else + { + pInfo->picLayerHeader.raw_DIRECTMB = 1; + } + } + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) + { + if(bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.FIELDTX.invert = bp.invert; + pInfo->picLayerHeader.FIELDTX.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_fieldtx[i] = bp.databits[i]; + } + pInfo->picLayerHeader.FIELDTX.databits = parser->bp_fieldtx; + } + else + { + pInfo->picLayerHeader.raw_FIELDTX = 1; + } + } + break; + } + } +#endif + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c new file mode 100644 index 0000000..e73cde3 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c @@ -0,0 +1,100 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive B picture in simple +// or main profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" // For DEB + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive B picture for main + * profile bitstream. This parser starts after PTYPE was parsed but stops + * before parsing of macroblock layer. + * Table 21 of SMPTE 421M after processing up to PTYPE for B picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else picLayerHeader->HALFQP=0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return VC1_STATUS_OK; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c new file mode 100644 index 0000000..4074309 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c @@ -0,0 +1,257 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive B picture in advanced +// profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" // For DEB + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive B picture for advanced + * profile bitstream. + * Table 22 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace B frame for advanced + * profile bitstream. + * Table 84 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->INTCOMP); + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + // EPC picLayerHeader->MVMODE = VC1_MVMODE_1MV; + VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace B field for advanced + * profile bitstream. + * Table 89 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader* picLayerHeader = &pInfo->picLayerHeader; + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if ((bit_count == 2) && (picLayerHeader->MVMODE == 0)) + bit_count++; + picLayerHeader->MVMODE = table[bit_count]; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) != + VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + + if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h new file mode 100644 index 0000000..9e621fc --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h @@ -0,0 +1,608 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Common definitions for parsing VC-1 bitstreams. +// +*/ + +#ifndef _VC1PARSE_COMMON_DEFS_H_ +#define _VC1PARSE_COMMON_DEFS_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include + +/** @weakgroup vc1parse_common_defs VC-1 Common Definitions */ +/** @ingroup vc1parse_common_defs */ +/*@{*/ + +/** This defines the maximum number of horizontal macroblocks in a picture. */ +#define VC1_WIDTH_MB_MAX ((2048+15)/16) + +/** This defines the maximum number of vertical macroblocks in a picture. */ +#define VC1_HEIGHT_MB_MAX ((1088+15)/16) + +/** This defines the maximum number of bitplane storage per picture. */ +#define VC1_MAX_BITPLANE_CHUNKS 3 + +/** This defines the value for an invalid BFRACTION syntax element. */ +#define VC1_BFRACTION_INVALID 0 + +/** This defines the value for BFRACTION syntax element that defines a BI +picture. */ +#define VC1_BFRACTION_BI 9 + +/** This enumeration defines the various supported profiles as defined in +PROFILE syntax element. */ +enum +{ + VC1_PROFILE_SIMPLE, + VC1_PROFILE_MAIN, + VC1_PROFILE_RESERVED, + VC1_PROFILE_ADVANCED +}; + +/** This enumeration defines the frame coding mode as defined in FCM syntax +element. */ +enum +{ + VC1_FCM_PROGRESSIVE, + VC1_FCM_FRAME_INTERLACE = 2, + VC1_FCM_FIELD_INTERLACE = 3 +}; + +/** This enumeration defines the various bitplane types as defined in IMODE +syntax element. */ +enum +{ + VC1_BITPLANE_RAW_MODE, + VC1_BITPLANE_NORM2_MODE, + VC1_BITPLANE_DIFF2_MODE, + VC1_BITPLANE_NORM6_MODE, + VC1_BITPLANE_DIFF6_MODE, + VC1_BITPLANE_ROWSKIP_MODE, + VC1_BITPLANE_COLSKIP_MODE +}; + +/** This enumeration defines the various motion vector modes as defined in +MVMODE or MVMODE2 syntax element. */ +enum +{ + VC1_MVMODE_1MV, +#ifdef VBP + VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_HPEL_1MV, +#else + VC1_MVMODE_HPEL_1MV, + VC1_MVMODE_HPELBI_1MV, +#endif + VC1_MVMODE_MIXED_MV, + VC1_MVMODE_INTENSCOMP +}; + +/** This enumeration defines the extended differential motion vector range flag +as defined in DMVRANGE syntax element. */ +enum +{ + VC1_DMVRANGE_NONE, + VC1_DMVRANGE_HORIZONTAL_RANGE, + VC1_DMVRANGE_VERTICAL_RANGE, + VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE +}; + +/** This enumeration defines the intensity compensation field as defined in +INTCOMPFIELD syntax element. */ +enum +{ + VC1_INTCOMP_TOP_FIELD = 1, + VC1_INTCOMP_BOTTOM_FIELD = 2, + VC1_INTCOMP_BOTH_FIELD = 3 +}; + +/** This enumeration defines the differential quantizer profiles as defined in +DQPROFILE syntax element. */ +enum +{ + VC1_DQPROFILE_ALL4EDGES, + VC1_DQPROFILE_DBLEDGES, + VC1_DQPROFILE_SNGLEDGES, + VC1_DQPROFILE_ALLMBLKS +}; + +/** This enumeration defines the conditional overlap flag as defined in CONDOVER +syntax element. */ +enum +{ + VC1_CONDOVER_FLAG_NONE = 0, + VC1_CONDOVER_FLAG_ALL = 2, + VC1_CONDOVER_FLAG_SOME = 3 +}; + +/** This enumeration defines the type of quantizer to be used and is derived +from bitstream syntax. */ +enum +{ + VC1_QUANTIZER_NONUNIFORM, + VC1_QUANTIZER_UNIFORM +}; + +/** This structure represents the various bitplanes within VC-1 bitstream. */ +typedef struct +{ + uint8_t invert; + int32_t imode; + uint32_t *databits; +} vc1_Bitplane; + +/** This structure represents all bitstream metadata needed for register programming. */ +typedef struct +{ + // From Sequence Layer for Advanced Profile + uint8_t PROFILE; /** 2 bit(s). */ +#ifdef VBP + uint8_t LEVEL; +#endif + uint8_t POSTPROCFLAG; /** 1 bit(s). */ + uint8_t PULLDOWN; /** 1 bit(s). */ + uint8_t INTERLACE; /** 1 bit(s). */ + uint8_t TFCNTRFLAG; /** 1 bit(s). */ + uint8_t FINTERPFLAG; /** 1 bit(s). */ + uint8_t PSF; /** 1 bit(s). */ + uint8_t HRD_NUM_LEAKY_BUCKETS; /** 5 bit(s). */ + + // From STRUCT_C + uint8_t MAXBFRAMES; /** 3 bit(s). */ + uint8_t MULTIRES; /** 1 bit(s). */ + + // From EntryPoint Layer for Advanced Profile + uint8_t PANSCAN_FLAG; + uint8_t REFDIST_FLAG; + uint8_t LOOPFILTER; + uint8_t FASTUVMC; + uint8_t EXTENDED_MV; + uint8_t DQUANT; + uint8_t VSTRANSFORM; + uint8_t OVERLAP; + uint8_t QUANTIZER; + uint8_t EXTENDED_DMV; + uint8_t RANGE_MAPY_FLAG; + uint8_t RANGE_MAPY; + uint8_t RANGE_MAPUV_FLAG; + uint8_t RANGE_MAPUV; + + // From Picture Header + uint8_t RANGERED; /** 1 bit(s). */ + uint8_t RNDCTRL; /** 1 bit(s), rcv specific. */ + + // REFDIST is present only in field-interlaced mode on I/I, I/P, P/I, P/P frames + // From Canmore, looks like this needs to be propagated to following B frames + uint8_t REFDIST; + uint8_t INTCOMPFIELD; /** ? bit(s)? */ + uint8_t LUMSCALE2; /** 6 bit(s). */ + uint8_t LUMSHIFT2; /** 6 bit(s). */ + uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS]; + + // From SequenceLayerHeader, EntryPointHeader or Struct_A + uint16_t width; + uint16_t height; + uint16_t widthMB; + uint16_t heightMB; + +#ifdef VBP + uint8_t CLOSED_ENTRY; + uint8_t BROKEN_LINK; + uint8_t SYNCMARKER; +#endif + +} vc1_metadata_t; + +/** This structure represents the sequence header for advanced profile. */ +typedef struct +{ + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned BITRTQ_POSTPROC:5; + unsigned FRMRTQ_POSTPROC:3; + unsigned COLORDIFF_FORMAT:2; + unsigned LEVEL:3; + unsigned PROFILE:2; + unsigned pad:17; + } seq_flags; +#else + struct + { + unsigned pad:17; + unsigned PROFILE:2; + unsigned LEVEL:3; + unsigned COLORDIFF_FORMAT:2; + unsigned FRMRTQ_POSTPROC:3; + unsigned BITRTQ_POSTPROC:5; + } seq_flags; +#endif + uint32_t flags; + }; + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned DISPLAY_EXT:1; + unsigned PSF:1; + unsigned RESERVED:1; + unsigned FINTERPFLAG:1; + unsigned TFCNTRFLAG:1; + unsigned INTERLACE:1; + unsigned PULLDOWN:1; + unsigned MAX_CODED_HEIGHT:12; + unsigned MAX_CODED_WIDTH:12; + unsigned POSTPROCFLAG:1; + } seq_max_size; +#else + struct + { + unsigned POSTPROCFLAG:1; + unsigned MAX_CODED_WIDTH:12; + unsigned MAX_CODED_HEIGHT:12; + unsigned PULLDOWN:1; + unsigned INTERLACE:1; + unsigned TFCNTRFLAG:1; + unsigned FINTERPFLAG:1; + unsigned RESERVED:1; + unsigned PSF:1; + unsigned DISPLAY_EXT:1; + } seq_max_size; +#endif + uint32_t max_size; + }; + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned ASPECT_RATIO_FLAG:1; + unsigned DISP_VERT_SIZE:14; + unsigned DISP_HORIZ_SIZE:14; + unsigned pad:3; + } seq_disp_size; +#else + struct + { + unsigned pad:3; + unsigned DISP_HORIZ_SIZE:14; + unsigned DISP_VERT_SIZE:14; + unsigned ASPECT_RATIO_FLAG:1; + } seq_disp_size; +#endif + uint32_t disp_size; + }; + + uint8_t ASPECT_RATIO; // 4 bits + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned ASPECT_VERT_SIZE:8; + unsigned ASPECT_HORIZ_SIZE:8; + unsigned pad:16; + } seq_aspect_size; +#else + struct + { + unsigned pad:16; + unsigned ASPECT_HORIZ_SIZE:8; + unsigned ASPECT_VERT_SIZE:8; + } seq_aspect_size; +#endif + uint32_t aspect_size; + }; + + uint8_t FRAMERATE_FLAG; // 1b + uint8_t FRAMERATEIND; // 1b + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned FRAMERATEDR:4; + unsigned FRAMERATENR:8; + unsigned pad:20; + } seq_framerate_fraction; +#else + struct + { + unsigned pad:20; + unsigned FRAMERATENR:8; + unsigned FRAMERATEDR:4; + } seq_framerate_fraction; +#endif + uint32_t framerate_fraction; + }; + + uint16_t FRAMERATEEXP; // 16b + uint8_t COLOR_FORMAT_FLAG; // 1b + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned MATRIX_COEF:8; + unsigned TRANSFER_CHAR:8; + unsigned COLOR_PRIM:8; + unsigned pad:8; + } seq_color_format; +#else + struct + { + unsigned pad:8; + unsigned COLOR_PRIM:8; + unsigned TRANSFER_CHAR:8; + unsigned MATRIX_COEF:8; + } seq_color_format; +#endif + uint32_t color_format; + }; + + uint8_t HRD_PARAM_FLAG; // 1b + uint8_t HRD_NUM_LEAKY_BUCKETS; // 5b + // No need to parse remaining items - not needed so far +} vc1_SequenceLayerHeader; + +/** This structure represents metadata for struct c. */ +typedef struct +{ + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned res6:1; + unsigned FINTERPFLAG:1; + unsigned QUANTIZER:2; + unsigned MAXBFRAMES:3; + unsigned RANGERED:1; + unsigned SYNCMARKER:1; + unsigned OVERLAP:1; + unsigned res5:1; + unsigned VSTRANSFORM:1; + unsigned DQUANT:2; + unsigned EXTENDED_MV:1; + unsigned FASTUVMC:1; + unsigned res4:1; + unsigned MULTIRES:1; + unsigned res3:1; + unsigned LOOPFILTER:1; + unsigned BITRTQ_POSTPROC:5; + unsigned FRMRTQ_POSTPROC:3; + unsigned PROFILE:4; + } struct_c; +#else + struct + { + unsigned PROFILE:4; + unsigned FRMRTQ_POSTPROC:3; + unsigned BITRTQ_POSTPROC:5; + unsigned LOOPFILTER:1; + unsigned res3:1; + unsigned MULTIRES:1; + unsigned res4:1; + unsigned FASTUVMC:1; + unsigned EXTENDED_MV:1; + unsigned DQUANT:2; + unsigned VSTRANSFORM:1; + unsigned res5:1; + unsigned OVERLAP:1; + unsigned SYNCMARKER:1; + unsigned RANGERED:1; + unsigned MAXBFRAMES:3; + unsigned QUANTIZER:2; + unsigned FINTERPFLAG:1; + unsigned res6:1; + } struct_c; +#endif + uint32_t struct_c_rcv; + }; + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned VERT_SIZE:16; + unsigned HORIZ_SIZE:16; + } struct_a; +#else + struct + { + unsigned HORIZ_SIZE:16; + unsigned VERT_SIZE:16; + } struct_a; +#endif + uint32_t struct_a_rcv; + }; + +} vc1_RcvSequenceHeader; + +/** This structure represents metadata for entry point layers. */ +typedef struct +{ + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned QUANTIZER:2; + unsigned OVERLAP:1; + unsigned VSTRANSFORM:1; + unsigned DQUANT:2; + unsigned EXTENDED_MV:1; + unsigned FASTUVMC:1; + unsigned LOOPFILTER:1; + unsigned REFDIST_FLAG:1; + unsigned PANSCAN_FLAG:1; + unsigned CLOSED_ENTRY:1; + unsigned BROKEN_LINK:1; + unsigned pad1:19; + } ep_flags; +#else + struct + { + unsigned pad1:19; + unsigned BROKEN_LINK:1; + unsigned CLOSED_ENTRY:1; + unsigned PANSCAN_FLAG:1; + unsigned REFDIST_FLAG:1; + unsigned LOOPFILTER:1; + unsigned FASTUVMC:1; + unsigned EXTENDED_MV:1; + unsigned DQUANT:2; + unsigned VSTRANSFORM:1; + unsigned OVERLAP:1; + unsigned QUANTIZER:2; + } ep_flags; +#endif + uint32_t flags; + }; + + // Skipping HRD data because it is not needed for our processing + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned CODED_HEIGHT:12; + unsigned CODED_WIDTH:12; + unsigned pad2:8; + } ep_size; +#else + struct + { + unsigned pad2:8; + unsigned CODED_WIDTH:12; + unsigned CODED_HEIGHT:12; + } ep_size; +#endif + uint32_t size; + }; + + uint8_t CODED_SIZE_FLAG; /** 1 bit(s). */ + uint8_t EXTENDED_DMV; /** 1 bit(s). */ + uint8_t RANGE_MAPY_FLAG; /** 1 bit(s). */ + uint8_t RANGE_MAPY; /** 3 bit(s). */ + uint8_t RANGE_MAPUV_FLAG; /** 1 bit(s). */ + uint8_t RANGE_MAPUV; /** 3 bit(s). */ +} vc1_EntryPointHeader; + +/** This structure represents metadata for slice and picture layers. */ +typedef struct +{ + /* Slice layer. */ + uint16_t SLICE_ADDR; /** 9 bit(s). */ + + /* Picture layer for simple or main profile. */ + uint8_t RANGEREDFRM; /** 1 bit(s). */ + uint8_t PTYPE; /** 4 bit(s)? */ + int8_t BFRACTION_NUM; /** ? bit(s). */ + int16_t BFRACTION_DEN; /** ? bit(s). */ + uint8_t PQINDEX; /** 5 bit(s). */ + uint8_t HALFQP; /** 1 bit(s). */ + uint8_t PQUANTIZER; /** 1 bit(s). */ + uint8_t MVRANGE; /** 3 bit(s)? */ + uint8_t MVMODE; /** 4 bit(s)? */ + uint8_t MVMODE2; /** 3 bit(s)? */ + uint8_t LUMSCALE; /** 6 bit(s). */ + uint8_t LUMSHIFT; /** 6 bit(s). */ + uint8_t MVTAB; /** 2 bit(s). */ + uint8_t CBPTAB; /** 2 bit(s). */ + uint8_t TTMBF; /** 1 bit(s). */ + uint8_t TTFRM; /** 2 bit(s). */ + uint8_t TRANSACFRM; /** 2 bit(s)? */ + uint8_t TRANSACFRM2; /** 2 bit(s)? */ + uint8_t TRANSDCTAB; /** 1 bit(s). */ + + /* Picture layer for advanced profile. */ + uint8_t FCM; /** 2 bit(s)? */ + uint8_t FPTYPE; /** 3 bit(s). */ + uint8_t TFCNTR; /** 8 bit(s) */ + uint8_t RPTFRM; /** 2 bit(s) */ + uint8_t TFF; /** 1 bit(s). */ + uint8_t RFF; /** 1 bit(s) */ + uint8_t RNDCTRL; /** 1 bit(s). */ + uint8_t UVSAMP; /** 1 bit(s). */ + uint8_t POSTPROC; /** 2 bit(s). */ + uint8_t CONDOVER; /** 2 bit(s)? */ + uint8_t DMVRANGE; /** ? bit(s)? */ + uint8_t MV4SWITCH; /** 1 bit(s). */ + uint8_t INTCOMP; /** 1 bit(s). */ + uint8_t MBMODETAB; /** 2 bit(s). */ + uint8_t MV2BPTAB; /** 2 bit(s). */ + uint8_t MV4BPTAB; /** 2 bit(s). */ + uint8_t NUMREF; /** 1 bit(s). */ + uint8_t REFFIELD; /** 1 bit(s). */ + + /* PAN SCAN */ + uint8_t PS_PRESENT; /** 1 bit(s). */ + uint8_t number_of_pan_scan_window; /** 4 max. */ + viddec_vc1_pan_scan_window_t PAN_SCAN_WINDOW[VIDDEC_PANSCAN_MAX_OFFSETS]; + + /* VOPDQUANT. */ + uint8_t PQDIFF; /** 3 bit(s). */ + uint8_t ABSPQ; /** 5 bit(s). */ + uint8_t DQUANTFRM; /** 1 bit(s). */ + uint8_t DQPROFILE; /** 2 bit(s). */ + uint8_t DQSBEDGE; /** 2 bit(s). */ + uint8_t DQBILEVEL; /** 1 bit(s). */ + + /* Others. */ + uint8_t PTypeField1; + uint8_t PTypeField2; + uint32_t PQUANT; + uint8_t CurrField; + uint8_t BottomField; + uint32_t UniformQuant; + +#ifdef VBP + uint8_t raw_MVTYPEMB; + uint8_t raw_DIRECTMB; + uint8_t raw_SKIPMB; + uint8_t raw_ACPRED; + uint8_t raw_FIELDTX; + uint8_t raw_OVERFLAGS; + uint8_t raw_FORWARDMB; + + vc1_Bitplane MVTYPEMB; + vc1_Bitplane DIRECTMB; + vc1_Bitplane SKIPMB; + vc1_Bitplane ACPRED; + vc1_Bitplane FIELDTX; + vc1_Bitplane OVERFLAGS; + vc1_Bitplane FORWARDMB; + uint32_t ALTPQUANT; + uint8_t DQDBEDGE; +#endif + +} vc1_PictureLayerHeader; + +/*@}*/ + +#ifdef __cplusplus +} +#endif /* __cplusplus. */ + +#endif /* _VC1PARSE_COMMON_DEFS_H_. */ diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c new file mode 100644 index 0000000..6fec35a --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c @@ -0,0 +1,198 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Contains tables for VLC decoding of syntax elements in simple +// or main profile of VC-1 bitstream. +// +*/ + +#include "vc1parse.h" + +const uint8_t VC1_MVMODE_LOW_TBL[] = +{ + VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_1MV, + VC1_MVMODE_HPEL_1MV, + VC1_MVMODE_MIXED_MV, + VC1_MVMODE_INTENSCOMP +}; + +const uint8_t VC1_MVMODE_HIGH_TBL[] = +{ + VC1_MVMODE_1MV, + VC1_MVMODE_MIXED_MV, + VC1_MVMODE_HPEL_1MV, + VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_INTENSCOMP +}; + +const int32_t VC1_BITPLANE_IMODE_TBL[] = +{ + 4, /* max bits */ + 1, /* total subtables */ + 4, /* subtable sizes */ + + 0, /* 1-bit codes */ + 2, /* 2-bit codes */ + 2, VC1_BITPLANE_NORM2_MODE, + 3, VC1_BITPLANE_NORM6_MODE, + 3, /* 3-bit codes */ + 1, VC1_BITPLANE_DIFF2_MODE, + 2, VC1_BITPLANE_ROWSKIP_MODE, + 3, VC1_BITPLANE_COLSKIP_MODE, + 2, /* 4-bit codes */ + 0, VC1_BITPLANE_RAW_MODE, + 1, VC1_BITPLANE_DIFF6_MODE, +-1 +}; + +/* This VLC table is used for decoding of k in bitplane. */ +const int32_t VC1_BITPLANE_K_TBL[] = +{ + 13, /* max bits */ + 2, /* total subtables */ + 6,7,/* subtable sizes */ + + 1, /* 1-bit codes */ + 1, 0 , + 0, /* 2-bit codes */ + 0, /* 3-bit codes */ + 6, /* 4-bit codes */ + 2, 1, 3, 2, 4, 4, 5, 8, + 6, 16, 7, 32, + 0, /* 5-bit codes */ + 1, /* 6-bit codes */ + (3 << 1)| 1, 63, + 0, /* 7-bit codes */ + 15, /* 8-bit codes */ + 0, 3, 1, 5, 2, 6, 3, 9, + 4, 10, 5, 12, 6, 17, 7, 18, + 8, 20, 9, 24, 10, 33, 11, 34, + 12, 36, 13, 40, 14, 48, + 6, /* 9-bit codes */ + (3 << 4)| 7, 31, + (3 << 4)| 6, 47, + (3 << 4)| 5, 55, + (3 << 4)| 4, 59, + + (3 << 4)| 3, 61, + (3 << 4)| 2, 62, + 20, /* 10-bit codes */ + (1 << 6)| 11, 11, + (1 << 6)| 7, 7 , + (1 << 6)| 13, 13, + (1 << 6)| 14, 14, + + (1 << 6)| 19, 19, + (1 << 6)| 21, 21, + (1 << 6)| 22, 22, + (1 << 6)| 25, 25, + + (1 << 6)| 26, 26, + (1 << 6)| 28, 28, + (1 << 6)| 3, 35, + (1 << 6)| 5, 37, + + (1 << 6)| 6, 38, + (1 << 6)| 9, 41, + (1 << 6)| 10, 42, + (1 << 6)| 12, 44, + + (1 << 6)| 17, 49, + (1 << 6)| 18, 50, + (1 << 6)| 20, 52, + (1 << 6)| 24, 56, + 0, /* 11-bit codes */ + 0, /* 12-bit codes */ + 15, /* 13-bit codes */ + (3 << 8)| 14, 15, + (3 << 8)| 13, 23, + (3 << 8)| 12, 27, + (3 << 8)| 11, 29, + + (3 << 8)| 10, 30, + (3 << 8)| 9, 39, + (3 << 8)| 8, 43, + (3 << 8)| 7, 45, + + (3 << 8)| 6, 46, + (3 << 8)| 5, 51, + (3 << 8)| 4, 53, + (3 << 8)| 3, 54, + + (3 << 8)| 2, 57, + (3 << 8)| 1, 58, + (3 << 8)| 0, 60, + -1 +}; + +/* This VLC table is used for decoding of BFRACTION. */ +const int32_t VC1_BFRACTION_TBL[] = +{ + 7, /* max bits */ + 2, /* total subtables */ + 3,4, /* subtable sizes */ + 0, /* 1-bit codes */ + 0, /* 2-bit codes */ + 7, /* 3-bit codes */ + 0x00,1,2, 0x01,1,3, 0x02,2,3, 0x03,1,4, + 0x04,3,4, 0x05,1,5, 0x06,2,5, + 0, /* 4-bit codes */ + 0, /* 5-bit codes */ + 0, /* 6-bit codes */ + 16, /* 7-bit codes */ + 0x70, 3,5, 0x71, 4,5, 0x72, 1,6, 0x73, 5,6, + 0x74, 1,7, 0x75, 2,7, 0x76, 3,7, 0x77, 4,7, + 0x78, 5,7, 0x79, 6,7, 0x7A, 1,8, 0x7B, 3,8, + 0x7C, 5,8, 0x7D, 7,8, + 0x7E, VC1_BFRACTION_INVALID,VC1_BFRACTION_INVALID, + 0x7F, VC1_BFRACTION_BI, VC1_BFRACTION_BI, + + -1 +}; + +/* This table is used for VLC decoding of REFDIST. */ +const int32_t VC1_REFDIST_TBL[] = +{ + 16, /* Max bits. */ + 3, /* Total sub-tables. */ + 5, 6, 5, /* Sub-table sizes. */ + + 0, /* 1-bit codes. */ + 3, /* 2-bit codes. */ + 0, 0, 1, 1, 2, 2, + 1, /* 3-bit codes. */ + 6, 3, + 1, /* 4-bit codes. */ + 14, 4, + 1, /* 5-bit codes. */ + 30, 5, + 1, /* 6-bit codes. */ + 62, 6, + 1, /* 7-bit codes. */ + 126, 7, + 1, /* 8-bit codes. */ + 254, 8, + 1, /* 9-bit codes. */ + 510, 9, + 1, /* 10-bit codes. */ + 1022, 10, + 1, /* 11-bit codes. */ + 2046, 11, + 1, /* 12-bit codes. */ + 4094, 12, + 1, /* 13-bit codes. */ + 8190, 13, + 1, /* 14-bit codes. */ + 16382, 14, + 1, /* 15-bit codes. */ + 32766, 15, + 1, /* 16-bit codes. */ + 65534, 16, + -1 /* end of table. */ +}; diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c new file mode 100644 index 0000000..c2f5985 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c @@ -0,0 +1,97 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VLC syntax elements within VC-1 bitstream. +// +*/ + +#include "vc1parse.h" + +/*----------------------------------------------------------------------------*/ + +vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable) +{ + uint32_t tempValue; + const int32_t *pTable = pDecodeTable; + vc1_Status status = VC1_STATUS_OK; + int32_t i, j, maxBits, loopCount, totalBits, value; + + maxBits = *pTable++; + loopCount = *pTable++; + totalBits = 0; + for (i = 0; i < loopCount; i++) + totalBits += *pTable++; + + if (totalBits != maxBits) + return VC1_STATUS_PARSE_ERROR; + + value = 0; + for (i = 0; i < maxBits; i++) + { + VC1_GET_BITS9(1, tempValue); + value = (value << 1) | tempValue; + loopCount = *pTable++; + if (loopCount == -1) + break; + for (j = 0; j < loopCount; j++) + { + if (value == *pTable++) + { + *pDst = *pTable; + return status; + } + else + pTable++; + } + } + + return status; +} + +/*----------------------------------------------------------------------------*/ + +vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, + int8_t *pFirst, int16_t *pSecond) +{ + uint32_t tempValue; + const int32_t *pTable = pDecodeTable; + vc1_Status status = VC1_STATUS_OK; + int32_t i, j, maxBits, loopCount, totalBits, value; + + maxBits = *pTable++; + loopCount = *pTable++; + totalBits = 0; + for (i = 0; i < loopCount; i++) + totalBits += *pTable++; + + if (totalBits != maxBits) + return VC1_STATUS_PARSE_ERROR; + + value = 0; + for (i = 0; i < maxBits; i++) + { + VC1_GET_BITS9(1, tempValue); + value = (value << 1) | tempValue; + loopCount = *pTable++; + if (loopCount == -1) + break; + for (j = 0; j < loopCount; j++) + { + if (value == *pTable++) + { + *pFirst = *pTable++; + *pSecond = *pTable; + return status; + } + else + pTable += 2; + } + } + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c new file mode 100644 index 0000000..1a37929 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c @@ -0,0 +1,101 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive I picture in simple +// or main profile bitstream or progressive BI picture in main profile +// bitstream. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive I picture for simple + * or main profile bitstream or progressive BI picture in main profile + * bitstream. This parser starts after PTYPE was parsed but stops before + * parsing of macroblock layer. + * Table 16 of SMPTE 421M after processing up to PTYPE for I picture. + * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7. + For each I or BI frame, RND shall be set to 1 */ + if (md->PROFILE != VC1_PROFILE_ADVANCED) + { + picLayerHeader->RNDCTRL = md->RNDCTRL | 1 ; + md->RNDCTRL = picLayerHeader->RNDCTRL; + } + + + if (picLayerHeader->PTYPE == VC1_BI_FRAME) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) + != VC1_STATUS_OK) + { + return status; + } + if (picLayerHeader->BFRACTION_DEN != VC1_BFRACTION_BI) + return VC1_STATUS_PARSE_ERROR; + } + + VC1_GET_BITS9(7, tempValue); /* BF. */ + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else picLayerHeader->HALFQP=0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + /* MVRANGE but only for main profile. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->MULTIRES == 1 && picLayerHeader->PTYPE != VC1_BI_FRAME) + { + VC1_GET_BITS9(2, tempValue); /* RESPIC. */ + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c new file mode 100644 index 0000000..03aeb79 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c @@ -0,0 +1,257 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive I or BI picture in +// advanced profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive I or BI picture for + * advanced profile bitstream. + * Table 18 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + { + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + { + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace I or BI frame for + * advanced profile bitstream. + * Table 82 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + { + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + { + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace I or BI field for + * advanced profile bitstream. + * Table 87 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + // Reset MVMODE when the second field is an I picture + // to avoid carrying forward the mvmode values from previous field + // especially the intensity compensation value + picLayerHeader->MVMODE = 0; + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) { + DEB("Error parsing I field \n"); + return status; + } + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + VC1_GET_BITS9(2, tempValue); /* POSTPROC. */ + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) != + VC1_STATUS_OK) + { + DEB("Error parsing I field \n"); + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + (md->heightMB+1)/2, BPP_OVERFLAGS)) != + VC1_STATUS_OK) + { + DEB("Error parsing I field \n"); + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + if (status != VC1_STATUS_OK) { + DEB("Error parsing I field \n"); + return status; + } + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c new file mode 100644 index 0000000..7cbcc34 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c @@ -0,0 +1,82 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 syntax elements MVRANGE and DMVRANGE. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse syntax element MVRANGE, which exists for main and advanced profiles. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->EXTENDED_MV == 1) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + if (picLayerHeader->MVRANGE) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + if (picLayerHeader->MVRANGE) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + picLayerHeader->MVRANGE += 1; + } + picLayerHeader->MVRANGE += 1; + } + } + else + picLayerHeader->MVRANGE = 0; + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse syntax element DMVRANGE. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->EXTENDED_DMV == 1) + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_NONE; + else + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_HORIZONTAL_RANGE; + else + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_VERTICAL_RANGE; + else + { + picLayerHeader->DMVRANGE = + VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE; + } + } + } + } + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c new file mode 100644 index 0000000..c363456 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c @@ -0,0 +1,101 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for simple and main profiles. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for simple or + * main profile down to macroblock layer. + * Table 16 of SMPTE 421M after processing up to PTYPE for I picture. + * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture. + * Table 19 of SMPTE 421M after processing up to PTYPE for P picture. + * Table 21 of SMPTE 421M after processing up to PTYPE for B picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + int32_t result; + + if (md->PROFILE != VC1_PROFILE_ADVANCED) + { + // As per spec, for main/simple profile, if the size of the coded picture is <= 1B, + // it shall be treated as a skipped frame. + // In content with skipped frames, the data is "00". + // rcv to vc1 conversion process adds an additional byte (0x80) to the picture, hence + // the data looks like "00 80" + // Hence if data is <= 2B, we will consider it skipped (check for 16+1b, if it fails, the frame is skipped). + result = viddec_pm_peek_bits(ctxt, &tempValue, 17); + if(result == -1) + { + picLayerHeader->PTYPE = VC1_SKIPPED_FRAME; + return status; + } + } + + if (md->FINTERPFLAG == 1) + { + VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */ + } + + VC1_GET_BITS9(2, tempValue); /* FRMCNT. */ + + if (md->RANGERED == 1) + { + VC1_GET_BITS9(1, picLayerHeader->RANGEREDFRM); + } + + if (md->MAXBFRAMES == 0) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE == 0) + picLayerHeader->PTYPE = VC1_I_FRAME; + else + picLayerHeader->PTYPE = VC1_P_FRAME; + } + else + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE == 0) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE == 0) { + picLayerHeader->PTYPE = VC1_B_FRAME; /* Or VC1_BI_FRAME. */ + /* if peek(7) = 0b1111111 then ptype = bi */ + VC1_PEEK_BITS( 7, tempValue ); + if ( tempValue == 0x7f ) + picLayerHeader->PTYPE = VC1_BI_FRAME; + } else + picLayerHeader->PTYPE = VC1_I_FRAME; + } + else + picLayerHeader->PTYPE = VC1_P_FRAME; + } + + if (picLayerHeader->PTYPE == VC1_I_FRAME || + picLayerHeader->PTYPE == VC1_BI_FRAME) + { + status = vc1_ParsePictureHeader_ProgressiveIpicture(ctxt, pInfo); + } + else if (picLayerHeader->PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_ProgressivePpicture(ctxt, pInfo); + else if (picLayerHeader->PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_ProgressiveBpicture(ctxt, pInfo); + else + status = VC1_STATUS_PARSE_ERROR; + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c new file mode 100644 index 0000000..fa9c3c7 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c @@ -0,0 +1,403 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for advanced profile. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for advanced + * profile down to POSTPROC syntax element. + * Table 18 of SMPTE 421M for progressive I or BI picture. + * Table 20 of SMPTE 421M for progressive P picture. + * Table 22 of SMPTE 421M for progressive B picture. + * Table 23 of SMPTE 421M for skipped picture. + * Table 82 of SMPTE 421M for interlace I or BI frame. + * Table 83 of SMPTE 421M for interlace P frame. + * Table 84 of SMPTE 421M for interlace B frame. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t i = 0; + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + uint32_t number_of_pan_scan_window; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->INTERLACE == 1) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; + return VC1_STATUS_PARSE_ERROR; + } + else + picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + + + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + picLayerHeader->PTYPE = VC1_SKIPPED_FRAME; + else + picLayerHeader->PTYPE = VC1_BI_FRAME; + } + else + picLayerHeader->PTYPE = VC1_I_FRAME; + } + else + picLayerHeader->PTYPE = VC1_B_FRAME; + } + else + picLayerHeader->PTYPE = VC1_P_FRAME; + + if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) + { + if (md->TFCNTRFLAG) + { + VC1_GET_BITS9(8, picLayerHeader->TFCNTR); /* TFCNTR. */ + } + } + + if (md->PULLDOWN) + { + if ((md->INTERLACE == 0) || (md->PSF == 1)) + { + VC1_GET_BITS9(2, picLayerHeader->RPTFRM); + } + else + { + VC1_GET_BITS9(1, picLayerHeader->TFF); + VC1_GET_BITS9(1, picLayerHeader->RFF); + } + } + + if (md->PANSCAN_FLAG == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); /* PS_PRESENT. */ + if (picLayerHeader->PS_PRESENT == 1) + { + if ((md->INTERLACE == 1) && + (md->PSF == 0)) + { + if (md->PULLDOWN == 1) + number_of_pan_scan_window = 2 + picLayerHeader->RFF; + else + number_of_pan_scan_window = 2; + } + else + { + if (md->PULLDOWN == 1) + number_of_pan_scan_window = 1 + picLayerHeader->RPTFRM; + else + number_of_pan_scan_window = 1; + } + picLayerHeader->number_of_pan_scan_window = number_of_pan_scan_window; + + for (i = 0; i < number_of_pan_scan_window; i++) + { + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ + } + } + } + + if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) + { + VC1_GET_BITS9(1, picLayerHeader->RNDCTRL); + md->RNDCTRL = picLayerHeader->RNDCTRL; + + if ((md->INTERLACE == 1) || + (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE)) + { + VC1_GET_BITS9(1, picLayerHeader->UVSAMP); + } + + if ((md->FINTERPFLAG == 1) && + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + { + VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */ + } + + if ((picLayerHeader->PTYPE == VC1_B_FRAME) && + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) + != VC1_STATUS_OK) + { + return status; + } + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + } + + return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for advanced + * profile down to BFRACTION syntax element. + * Table 85 of SMPTE 421M. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t i = 0; + vc1_Status status = VC1_STATUS_OK; + uint32_t number_of_pan_scan_window; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; + else + picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + if (picLayerHeader->FCM != VC1_FCM_FIELD_INTERLACE) + return VC1_STATUS_PARSE_ERROR; + + VC1_GET_BITS9(3, picLayerHeader->FPTYPE); + if (picLayerHeader->FPTYPE == 0) + { + picLayerHeader->PTypeField1 = VC1_I_FRAME; + picLayerHeader->PTypeField2 = VC1_I_FRAME; + } + else if (picLayerHeader->FPTYPE == 1) + { + picLayerHeader->PTypeField1 = VC1_I_FRAME; + picLayerHeader->PTypeField2 = VC1_P_FRAME; + } + else if (picLayerHeader->FPTYPE == 2) + { + picLayerHeader->PTypeField1 = VC1_P_FRAME; + picLayerHeader->PTypeField2 = VC1_I_FRAME; + } + else if (picLayerHeader->FPTYPE == 3) + { + picLayerHeader->PTypeField1 = VC1_P_FRAME; + picLayerHeader->PTypeField2 = VC1_P_FRAME; + } + else if (picLayerHeader->FPTYPE == 4) + { + picLayerHeader->PTypeField1 = VC1_B_FRAME; + picLayerHeader->PTypeField2 = VC1_B_FRAME; + } + else if (picLayerHeader->FPTYPE == 5) + { + picLayerHeader->PTypeField1 = VC1_B_FRAME; + picLayerHeader->PTypeField2 = VC1_BI_FRAME; + } + else if (picLayerHeader->FPTYPE == 6) + { + picLayerHeader->PTypeField1 = VC1_BI_FRAME; + picLayerHeader->PTypeField2 = VC1_B_FRAME; + } + else if (picLayerHeader->FPTYPE == 7) + { + picLayerHeader->PTypeField1 = VC1_BI_FRAME; + picLayerHeader->PTypeField2 = VC1_BI_FRAME; + } + + if (md->TFCNTRFLAG) + { + VC1_GET_BITS9(8, picLayerHeader->TFCNTR); + } + + if (md->PULLDOWN == 1) + { + if (md->PSF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->RPTFRM); + } + else + { + VC1_GET_BITS9(1, picLayerHeader->TFF); + VC1_GET_BITS9(1, picLayerHeader->RFF); + } + } else + picLayerHeader->TFF = 1; + + if (md->PANSCAN_FLAG == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); + if (picLayerHeader->PS_PRESENT) + { + if (md->PULLDOWN) + number_of_pan_scan_window = 2 + picLayerHeader->RFF; + else + number_of_pan_scan_window = 2; + picLayerHeader->number_of_pan_scan_window =number_of_pan_scan_window; + + for (i = 0; i < number_of_pan_scan_window; i++) + { + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ + } + } + } + VC1_GET_BITS9(1, md->RNDCTRL); + +#ifdef VBP + picLayerHeader->RNDCTRL = md->RNDCTRL; +#endif + + VC1_GET_BITS9(1, picLayerHeader->UVSAMP); + + if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3)) + { + int32_t tmp; + if ((status = vc1_DecodeHuffmanOne(ctxt, &tmp, + VC1_REFDIST_TBL)) != VC1_STATUS_OK) + { + return status; + } + md->REFDIST = tmp; + } + + if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7)) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + } + + if (picLayerHeader->CurrField == 0) + { + picLayerHeader->PTYPE = picLayerHeader->PTypeField1; + picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF); + } + else + { + picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF); + picLayerHeader->PTYPE = picLayerHeader->PTypeField2; + } + + return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function calls the appropriate function to further + * parse the picture header for advanced profile down to macroblock layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_PARSE_ERROR; + + if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE) + { + if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + { + status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo); + } + else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_ProgressivePpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_ProgressiveBpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE) + { + if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + { + status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo); + } + else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_InterlacePpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_InterlaceBpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) + { + int ptype; + if( pInfo->picLayerHeader.CurrField == 0) + ptype = pInfo->picLayerHeader.PTypeField1; + else + ptype = pInfo->picLayerHeader.PTypeField2; + + if ((ptype == VC1_I_FRAME) || + (ptype == VC1_BI_FRAME)) + { + status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo); + } + else if (ptype == VC1_P_FRAME) + status = vc1_ParseFieldHeader_InterlacePpicture_Adv(ctxt, pInfo); + else if (ptype == VC1_B_FRAME) + status = vc1_ParseFieldHeader_InterlaceBpicture_Adv(ctxt, pInfo); + else if (ptype == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c new file mode 100644 index 0000000..ba9c756 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c @@ -0,0 +1,149 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive P picture in simple +// or main profile bitstream. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive P picture for simple + * or main profile bitstream. This parser starts after PTYPE was parsed but + * stops before parsing of macroblock layer. + * Table 19 of SMPTE 421M after processing up to PTYPE for P picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7. + It toggles back and forth between 0 and 1 for P frames */ + if (md->PROFILE != VC1_PROFILE_ADVANCED) + { + picLayerHeader->RNDCTRL = md->RNDCTRL ^ 1 ; + md->RNDCTRL = picLayerHeader->RNDCTRL; + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else picLayerHeader->HALFQP=0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->MULTIRES == 1) + VC1_GET_BITS9(2, tempValue); /* RESPIC. */ + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 3)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 3) + bit_count += picLayerHeader->MVMODE; + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + } +#ifdef VBP + else + picLayerHeader->MVMODE2 = 0; +#else + else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || + ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && + (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) + { + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_MVTYPEMB)) + != VC1_STATUS_OK) + { + return status; + } + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c new file mode 100644 index 0000000..144c138 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c @@ -0,0 +1,368 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive P picture in advanced +// profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive P picture for advanced + * profile bitstream. + * Table 20 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 3)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 3) + bit_count += picLayerHeader->MVMODE; + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || + ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && + (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) + { + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_MVTYPEMB)) != + VC1_STATUS_OK) + { + return status; + } + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace P frame for advanced + * profile bitstream. + * Table 83 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + /* DMVRANGE. */ + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MV4SWITCH); + + VC1_GET_BITS9(1, picLayerHeader->INTCOMP); + if (picLayerHeader->INTCOMP) + { + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ + + if (picLayerHeader->MV4SWITCH == 1) + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace P field for advanced + * profile bitstream. + * Table 88 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + + VC1_GET_BITS9(1, picLayerHeader->NUMREF); + + if (picLayerHeader->NUMREF == 0) + { + VC1_GET_BITS9(1, picLayerHeader->REFFIELD); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) { + DEB("Error in vc1_MVRangeDecode \n"); + return status; + } + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE == 0) { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + + if ( picLayerHeader->MVMODE == 1) + bit_count ++; + + bit_count++; + } + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + + VC1_GET_BITS9(1, md->INTCOMPFIELD); + if (md->INTCOMPFIELD == 1) + md->INTCOMPFIELD = VC1_INTCOMP_BOTH_FIELD; + else + { + VC1_GET_BITS9(1, md->INTCOMPFIELD); + if(md->INTCOMPFIELD == 1) + md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD; + else + md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD; + } + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); /* LUMSCALE1. */ + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); /* LUMSHIFT1. */ + if ( md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD ) { + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) + { + VC1_GET_BITS9(6, md->LUMSCALE2); + VC1_GET_BITS9(6, md->LUMSHIFT2); + } + } + else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); + + if (picLayerHeader->NUMREF) + { + VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ + } + else + { + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + } + + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + +#ifdef VBP + if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) +#else + if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV) +#endif + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + picLayerHeader->TRANSACFRM2 = 0; + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c new file mode 100644 index 0000000..559a0dd --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c @@ -0,0 +1,130 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 syntax elements VOPDQUANT and DQUANT. +// +*/ + +#include "vc1parse.h" + +#define VC1_UNDEF_PQUANT 0 + +static const uint8_t MapPQIndToQuant_Impl[] = +{ + VC1_UNDEF_PQUANT, + 1, 2, 3, 4, 5, 6, 7, 8, + 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 27, 29, 31 +}; + +/*------------------------------------------------------------------------------ + * Parse syntax element VOPDQuant as defined in Table 24 of SMPTE 421M. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->DQUANT == 0) + return status; + + if (md->DQUANT == 2) + { + VC1_GET_BITS9(3, picLayerHeader->PQDIFF); + if (picLayerHeader->PQDIFF == 7) + { + VC1_GET_BITS9(5, picLayerHeader->ABSPQ); + } + } + else + { + VC1_GET_BITS9(1, picLayerHeader->DQUANTFRM); + if (picLayerHeader->DQUANTFRM == 1) + { + VC1_GET_BITS9(2, picLayerHeader->DQPROFILE); + if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_SNGLEDGES) + { + VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); + } + else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_DBLEDGES) + { +#ifdef VBP + VC1_GET_BITS9(2, picLayerHeader->DQDBEDGE); +#else + VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */ +#endif + } + else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS) + { + VC1_GET_BITS9(1, picLayerHeader->DQBILEVEL); + } + if (! (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS && + picLayerHeader->DQBILEVEL == 0)) + { + VC1_GET_BITS9(3, picLayerHeader->PQDIFF); + if (picLayerHeader->PQDIFF == 7) + { + VC1_GET_BITS9(5, picLayerHeader->ABSPQ); + } + } + } + } +#ifdef VBP + if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2)) + { + if (picLayerHeader->PQDIFF == 7) + { + picLayerHeader->ALTPQUANT = picLayerHeader->ABSPQ; + } + else + { + picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1; + } + } +#endif + return status; +} + +/*------------------------------------------------------------------------------ + * Compute value for PQUANT syntax element that does not exist in bitstreams for + * progressive I and BI pictures. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + picLayerHeader->PQUANT = picLayerHeader->PQINDEX; + picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM; + + if (md->QUANTIZER == 0) + { + if (picLayerHeader->PQINDEX < 9) + picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM; + else + { + picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM; + picLayerHeader->PQUANT = + MapPQIndToQuant_Impl[picLayerHeader->PQINDEX]; + } + } + else + { + if (md->QUANTIZER == 2) + picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM; + } + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c new file mode 100644 index 0000000..6af6f09 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c @@ -0,0 +1,345 @@ +#include "viddec_fw_debug.h" // For DEB +#include "viddec_parser_ops.h" // For parser helper functions +#include "vc1.h" // For the parser structure +#include "vc1parse.h" // For vc1 parser helper functions +#ifdef VBP +#include "viddec_pm.h" +#endif +#define vc1_is_frame_start_code( ch ) \ + (( vc1_SCField == ch ||vc1_SCSlice == ch || vc1_SCFrameHeader == ch ) ? 1 : 0) + +/* init function */ +#ifdef VBP +void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#else +static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#endif +{ + vc1_viddec_parser_t *parser = ctxt; + int i; + + persist_mem = persist_mem; + + for(i=0; iref_frame[i].id = -1; /* first I frame checks that value */ + parser->ref_frame[i].anchor[0] = 1; + parser->ref_frame[i].anchor[1] = 1; + parser->ref_frame[i].intcomp_top = 0; + parser->ref_frame[i].intcomp_bot = 0; + } + + parser->intcomp_top[0] = 0; + parser->intcomp_bot[0] = 0; + parser->intcomp_top[1] = 0; + parser->intcomp_bot[1] = 0; + parser->is_reference_picture = false; + + memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); + + if(preserve) + { + parser->sc_seen &= VC1_EP_MASK; + parser->sc_seen_since_last_wkld &= VC1_EP_MASK; + } + else + { + parser->sc_seen = VC1_SC_INVALID; + parser->sc_seen_since_last_wkld = VC1_SC_INVALID; + memset(&parser->info.metadata, 0, sizeof(parser->info.metadata)); + } + + return; +} // viddec_vc1_init + +static void vc1_swap_intcomp(vc1_viddec_parser_t *parser) +{ + parser->intcomp_top[1] = parser->intcomp_top[0]; + parser->intcomp_bot[1] = parser->intcomp_bot[0]; + parser->intcomp_top[0] = 0; + parser->intcomp_bot[0] = 0; + + return; +} // vc1_swap_intcomp + +#ifdef VBP +uint32_t viddec_vc1_parse(void *parent, void *ctxt) +#else +static uint32_t viddec_vc1_parse(void *parent, void *ctxt) +#endif +{ + vc1_viddec_parser_t *parser = ctxt; + uint32_t sc=0x0; + int32_t ret=0, status=0; + +#ifdef VBP + /* This works only if there is one slice and no start codes */ + /* A better fix would be to insert start codes it there aren't any. */ + ret = viddec_pm_peek_bits(parent, &sc, 32); + if ((sc > 0x0100) && (sc < 0x0200)) /* a Start code will be in this range. */ + { + ret = viddec_pm_get_bits(parent, &sc, 32); + } + else + { + /* In cases where we get a buffer with no start codes, we assume */ + /* that this is a frame of data. We may have to fix this later. */ + sc = vc1_SCFrameHeader; + } +#else + ret = viddec_pm_get_bits(parent, &sc, 32); +#endif + sc = sc & 0xFF; + parser->is_frame_start = (sc == vc1_SCFrameHeader); + DEB("START_CODE = %02x\n", sc); + switch( sc ) + { + case vc1_SCSequenceHeader: + { + uint32_t data=0; + parser->ref_frame[0].anchor[0] = 1; + parser->ref_frame[0].anchor[1] = 1; + parser->ref_frame[1].anchor[0] = 1; + parser->ref_frame[1].anchor[1] = 1; + memset( &parser->info.metadata, 0, sizeof(parser->info.metadata)); + /* look if we have a rcv header for main or simple profile */ + ret = viddec_pm_peek_bits(parent,&data ,2); + + if (data == 3) + { + status = vc1_ParseSequenceLayer(parent, &parser->info); + } + else + { + status = vc1_ParseRCVSequenceLayer(parent, &parser->info); + } + parser->sc_seen = VC1_SC_SEQ; + parser->sc_seen_since_last_wkld |= VC1_SC_SEQ; +#ifdef VBP + parser->start_code = VC1_SC_SEQ; +#endif + break; + } + + case vc1_SCEntryPointHeader: + { + status = vc1_ParseEntryPointLayer(parent, &parser->info); + parser->sc_seen |= VC1_SC_EP; + // Clear all bits indicating data below ep header + parser->sc_seen &= VC1_EP_MASK; + parser->sc_seen_since_last_wkld |= VC1_SC_EP; +#ifdef VBP + parser->start_code = VC1_SC_EP; +#endif + break; + } + + case vc1_SCFrameHeader: + { + memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); + status = vc1_ParsePictureLayer(parent, &parser->info); + if((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) || + (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME)) + { + vc1_swap_intcomp(parser); + } + parser->sc_seen |= VC1_SC_FRM; + // Clear all bits indicating data below frm header + parser->sc_seen &= VC1_FRM_MASK; + parser->sc_seen_since_last_wkld |= VC1_SC_FRM; + vc1_start_new_frame ( parent, parser ); +#ifdef VBP + parser->start_code = VC1_SC_FRM; +#endif + break; + } + + case vc1_SCSlice: + { + status = vc1_ParseSliceLayer(parent, &parser->info); + parser->sc_seen_since_last_wkld |= VC1_SC_SLC; +#ifdef VBP + parser->start_code = VC1_SC_SLC; +#endif + break; + } + + case vc1_SCField: + { + parser->info.picLayerHeader.SLICE_ADDR = 0; + parser->info.picLayerHeader.CurrField = 1; + parser->info.picLayerHeader.REFFIELD = 0; + parser->info.picLayerHeader.NUMREF = 0; + parser->info.picLayerHeader.MBMODETAB = 0; + parser->info.picLayerHeader.MV4SWITCH = 0; + parser->info.picLayerHeader.DMVRANGE = 0; + parser->info.picLayerHeader.MVTAB = 0; + parser->info.picLayerHeader.MVMODE = 0; + parser->info.picLayerHeader.MVRANGE = 0; +#ifdef VBP + parser->info.picLayerHeader.raw_MVTYPEMB = 0; + parser->info.picLayerHeader.raw_DIRECTMB = 0; + parser->info.picLayerHeader.raw_SKIPMB = 0; + parser->info.picLayerHeader.raw_ACPRED = 0; + parser->info.picLayerHeader.raw_FIELDTX = 0; + parser->info.picLayerHeader.raw_OVERFLAGS = 0; + parser->info.picLayerHeader.raw_FORWARDMB = 0; + + memset(&(parser->info.picLayerHeader.MVTYPEMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.DIRECTMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.SKIPMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.ACPRED), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.FIELDTX), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.OVERFLAGS), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.FORWARDMB), 0, sizeof(vc1_Bitplane)); + + parser->info.picLayerHeader.ALTPQUANT = 0; + parser->info.picLayerHeader.DQDBEDGE = 0; + #endif + + status = vc1_ParseFieldLayer(parent, &parser->info); + if((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME)) + { + vc1_swap_intcomp(parser); + } + + parser->sc_seen |= VC1_SC_FLD; + parser->sc_seen_since_last_wkld |= VC1_SC_FLD; +#ifdef VBP + parser->start_code = VC1_SC_FLD; +#endif + break; + } + + case vc1_SCSequenceUser: + case vc1_SCEntryPointUser: + case vc1_SCFrameUser: + case vc1_SCSliceUser: + case vc1_SCFieldUser: + {/* Handle user data */ + status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items + parser->sc_seen_since_last_wkld |= VC1_SC_UD; +#ifdef VBP + parser->start_code = VC1_SC_UD; +#endif + break; + } + + case vc1_SCEndOfSequence: + { + parser->sc_seen = VC1_SC_INVALID; + parser->sc_seen_since_last_wkld |= VC1_SC_INVALID; +#ifdef VBP + parser->start_code = VC1_SC_INVALID; +#endif + break; + } + default: /* Any other SC that is not handled */ + { + DEB("SC = %02x - unhandled\n", sc ); +#ifdef VBP + parser->start_code = VC1_SC_INVALID; +#endif + break; + } + } + + if( vc1_is_frame_start_code( sc ) ) { + vc1_parse_emit_current_frame( parent, parser ); + } + + return VIDDEC_PARSE_SUCESS; +} // viddec_vc1_parse + +/** + If a picture header was seen and the next start code is a sequence header, entrypoint header, + end of sequence or another frame header, this api returns frame done. + If a sequence header and a frame header was not seen before this point, all the + information needed for decode is not present and parser errors are reported. +*/ +#ifdef VBP +uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) +#else +static uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) +#endif +{ + vc1_viddec_parser_t *parser = ctxt; + int ret = VIDDEC_PARSE_SUCESS; + parent = parent; + switch (next_sc) + { + case vc1_SCFrameHeader: + if(((parser->sc_seen_since_last_wkld & VC1_SC_EP) || + (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) && + (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM))) + { + break; + } + // Deliberate fall-thru case + case vc1_SCEntryPointHeader: + if((next_sc == vc1_SCEntryPointHeader) && + (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) && + (!(parser->sc_seen_since_last_wkld & VC1_SC_EP))) + { + break; + } + // Deliberate fall-thru case + case vc1_SCSequenceHeader: + case vc1_SCEndOfSequence: + case VIDDEC_PARSE_EOS: + case VIDDEC_PARSE_DISCONTINUITY: + ret = VIDDEC_PARSE_FRMDONE; + // Set errors for progressive + if((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM)) + *codec_specific_errors = 0; + else + *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + vc1_end_frame(parser); + parser->sc_seen_since_last_wkld = VC1_SC_INVALID; + // TODO: Need to check for interlaced + break; + default: + ret = VIDDEC_PARSE_SUCESS; + break; + } //switch + DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n", + next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld, + *codec_specific_errors, ret); + + return ret; +} // viddec_vc1_wkld_done + +#ifdef VBP +void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size) +#else +static void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size) +#endif +{ + size->context_size = sizeof(vc1_viddec_parser_t); + size->persist_size = 0; + return; +} // viddec_vc1_get_context_size + +#ifdef VBP +uint32_t viddec_vc1_is_start_frame(void *ctxt) +#else +static uint32_t viddec_vc1_is_start_frame(void *ctxt) +#endif +{ + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *) ctxt; + return parser->is_frame_start; +} // viddec_vc1_is_start_frame + +void viddec_vc1_get_ops(viddec_parser_ops_t *ops) +{ + ops->init = viddec_vc1_init; + ops->parse_syntax = viddec_vc1_parse; + ops->get_cxt_size = viddec_vc1_get_context_size; + ops->is_wkld_done = viddec_vc1_wkld_done; + ops->is_frame_start = viddec_vc1_is_start_frame; + return; +} // viddec_vc1_get_ops + diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c new file mode 100644 index 0000000..b787831 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c @@ -0,0 +1,691 @@ +/* Any workload management goes in this file */ + +#include "viddec_fw_debug.h" +#include "vc1.h" +#include "vc1parse.h" +#include "viddec_fw_workload.h" +#include +#include "viddec_pm_utils_bstream.h" + +/* this function returns workload frame types corresponding to VC1 PTYPES (frame types) + * VC1 frame types: can be found in vc1parse_common_defs.h + * workload frame types are in viddec_workload.h +*/ +static inline uint32_t vc1_populate_frame_type(uint32_t vc1_frame_type) +{ + uint32_t viddec_frame_type; + + switch(vc1_frame_type) + { + case VC1_I_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_I; + break; + case VC1_P_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_P; + break; + case VC1_B_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_B; + break; + case VC1_BI_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_BI; + break; + case VC1_SKIPPED_FRAME : + viddec_frame_type = VIDDEC_FRAME_TYPE_SKIP; + break; + default: + viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID; + break; + } // switch on vc1 frame type + + return(viddec_frame_type); +} // vc1_populate_frame_type + +static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_parser_t *parser) +{ + viddec_workload_t *wl = viddec_pm_get_header( parent ); + viddec_frame_attributes_t *attrs = &wl->attrs; + vc1_Info *info = &parser->info; + unsigned i; + + /* typical sequence layer and entry_point data */ + attrs->cont_size.height = info->metadata.height * 2 + 2; + attrs->cont_size.width = info->metadata.width * 2 + 2; + + /* frame type */ + /* we can have two fileds with different types for field interlace coding mode */ + if (info->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) { + attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1); + attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2); + } else { + attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE); + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown + } + + /* frame counter */ + attrs->vc1.tfcntr = info->picLayerHeader.TFCNTR; + + /* TFF, repeat frame, field */ + attrs->vc1.tff = info->picLayerHeader.TFF; + attrs->vc1.rptfrm = info->picLayerHeader.RPTFRM; + attrs->vc1.rff = info->picLayerHeader.RFF; + + /* PAN Scan */ + attrs->vc1.ps_present = info->picLayerHeader.PS_PRESENT; + attrs->vc1.num_of_pan_scan_windows = info->picLayerHeader.number_of_pan_scan_window; + for (i=0;ivc1.num_of_pan_scan_windows;i++) { + attrs->vc1.pan_scan_window[i].hoffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset; + attrs->vc1.pan_scan_window[i].voffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset; + attrs->vc1.pan_scan_window[i].width = info->picLayerHeader.PAN_SCAN_WINDOW[i].width; + attrs->vc1.pan_scan_window[i].height = info->picLayerHeader.PAN_SCAN_WINDOW[i].height; + } //end for i + + return; +} // translate_parser_info_to_frame_attributes + +void vc1_intcomp(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr) +{ + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; + uint32_t intcomp1 = 1; + uint32_t intcomp2 = 0; + + // Get the intensity compensation from the bitstream + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp1, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp1, pic->LUMSHIFT); + + if(md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) + { + intcomp2 = 1; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp2, md->LUMSCALE2); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp2, md->LUMSHIFT2); + } + + switch(md->INTCOMPFIELD) + { + case VC1_INTCOMP_TOP_FIELD: + if(pic->CurrField == 0) // First field decoded + { + if(pic->TFF) + { + //parser->intcomp_bot[0] = intcomp1 << 13; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1); + } + else + { + parser->intcomp_top[0] = intcomp1; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp1); + } + } + else // Second field + { + if(pic->TFF) + { + parser->intcomp_top[0] = intcomp1; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1); + } + else + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1); + } + } + break; + case VC1_INTCOMP_BOTTOM_FIELD: + if(pic->CurrField == 0) // First field decoded + { + if(pic->TFF) + { + parser->intcomp_bot[0] = intcomp1; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp1); + } + else + { + parser->intcomp_bot[0] = intcomp1 << 13; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1); + } + } + else // Second field + { + if(pic->TFF) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1); + } + else + { + parser->intcomp_bot[0] = intcomp1; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1); + } + } + break; + case VC1_INTCOMP_BOTH_FIELD: + if(pic->CurrField == 0) // First field decoded + { + if(pic->TFF) + { + parser->intcomp_bot[0] = intcomp2; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp2; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp2); + } + else + { + parser->intcomp_top[0] = intcomp2; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp2; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp2); + } + } + else // Second field + { + if(pic->TFF) + { + parser->intcomp_top[0] = intcomp1; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp2); + } + else + { + parser->intcomp_bot[0] = intcomp1; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp2); + } + } + break; + default: + break; + } // switch on INTCOMPFIELD + + return; +} // vc1_intcomp + +static void handle_intensity_compensation(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr) +{ + vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; + uint8_t intcomp_present = false; + + if((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) + { + intcomp_present = true; + if(pic->FCM == VC1_FCM_FIELD_INTERLACE) + { + vc1_intcomp(parser, pInfo, spr); + } + else + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, spr->intcomp_fwd_top, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, spr->intcomp_fwd_top, pic->LUMSHIFT); + + if(parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, spr->intcomp_fwd_bot, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, spr->intcomp_fwd_bot, pic->LUMSHIFT); + } + + parser->intcomp_top[0] = spr->intcomp_fwd_top; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = spr->intcomp_fwd_top; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = spr->intcomp_fwd_top; + } + } + + // Propagate the previous picture's intensity compensation + if(pic->FCM == VC1_FCM_FIELD_INTERLACE) + { + if( (pic->CurrField) || + ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE))) + { + spr->intcomp_fwd_top |= parser->intcomp_top[1]; + spr->intcomp_fwd_bot |= parser->intcomp_bot[1]; + } + } + if(pic->FCM == VC1_FCM_FRAME_INTERLACE) + { + if( (pic->CurrField) || + ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE))) + { + spr->intcomp_fwd_bot |= parser->intcomp_bot[1]; + } + } + + switch(pic->PTYPE) + { + case VC1_B_FRAME: + spr->intcomp_fwd_top = parser->intcomp_last[0]; + spr->intcomp_fwd_bot = parser->intcomp_last[1]; + spr->intcomp_bwd_top = parser->intcomp_last[2]; + spr->intcomp_bwd_bot = parser->intcomp_last[3]; + break; + case VC1_P_FRAME: + // If first field, store the intcomp values to propagate. + // If second field has valid intcomp values, store them + // to propagate. + if(pic->CurrField == 0) // first field + { + parser->intcomp_last[0] = spr->intcomp_fwd_top; + parser->intcomp_last[1] = spr->intcomp_fwd_bot; + parser->intcomp_last[2] = spr->intcomp_bwd_top; + parser->intcomp_last[3] = spr->intcomp_bwd_bot; + } + else // Second field + { + parser->intcomp_last[0] |= spr->intcomp_fwd_top; + parser->intcomp_last[1] |= spr->intcomp_fwd_bot; + parser->intcomp_last[2] |= spr->intcomp_bwd_top; + parser->intcomp_last[3] |= spr->intcomp_bwd_bot; + } + break; + case VC1_I_FRAME: + case VC1_BI_FRAME: + break; + default: + break; + } + + return; +} // handle_intensity_compensation + +/** + * This function populates the registers for range reduction (main profile) + * This function assumes pInfo->metadata.RANGERED is ON at the sequence layer (J.1.17) + * A frame is marked as range reduced by the RANGEREDFRM flag at the picture layer, + * and the output of the decoded range reduced frame needs to be scaled up (8.1.1.4). + * Previous reference frame needs be upscaled or downscaled based on the RR status of + * current and previous frame (8.3.4.11) + */ +static inline void vc1_fill_RR_hw_struct(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr) +{ + vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; + int is_previous_ref_rr=0; + + /* range reduction applies to luma and chroma component + which are the same register bit as RANGE_MAPY_FLAG, RANGE_MAPUV_FLAG */ + BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, pic->RANGEREDFRM); + BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, pic->RANGEREDFRM); + + /* Get the range reduced status of the previous frame */ + switch (pic->PTYPE) + { + case VC1_P_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm; + break; + } + case VC1_B_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm; + break; + } + default: + { + break; + } + } + + /* if current frame is RR and previous frame is not + donwscale the reference pixel ( RANGE_REF_RED_TYPE =1 in register) */ + if(pic->RANGEREDFRM) + { + if(!is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 1); + } + } + else + { + /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */ + if(is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 0); + } + } // end for RR upscale + +} // vc1_fill_RR_hw_struct + +/** + * fill workload items that will load registers for HW decoder + */ +static void vc1_fill_hw_struct(vc1_viddec_parser_t *parser, vc1_Info* pInfo, VC1D_SPR_REGS *spr) +{ + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; + int field = pic->CurrField; + int ptype; + + ptype = pic->PTYPE; + + LOG_CRIT("ptype = %d, field = %d, topfield = %d, slice = %d", ptype, pic->CurrField, pic->BottomField, pic->SLICE_ADDR); + + /* Common to both fields */ + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, spr->stream_format1, md->PROFILE); + + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, spr->coded_size, md->width); + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, spr->coded_size, md->height); + + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, spr->stream_format2, md->INTERLACE); + + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, spr->entrypoint1, md->LOOPFILTER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, spr->entrypoint1, md->FASTUVMC); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, spr->entrypoint1, md->EXTENDED_MV); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, spr->entrypoint1, md->DQUANT); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, spr->entrypoint1, md->VSTRANSFORM); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, spr->entrypoint1, md->OVERLAP); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, spr->entrypoint1, md->QUANTIZER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, spr->entrypoint1, md->EXTENDED_DMV); + + /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/ + if(md->RANGERED) + { + vc1_fill_RR_hw_struct(parser, pInfo, spr ); + } + else + { //range mapping + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, md->RANGE_MAPY_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, spr->range_map, md->RANGE_MAPY); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, md->RANGE_MAPUV_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, spr->range_map, md->RANGE_MAPUV); + } + + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, spr->frame_type, pic->FCM); + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, spr->frame_type, pic->PTYPE); + + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, spr->recon_control, md->RNDCTRL); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, spr->recon_control, pic->UVSAMP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, spr->recon_control, pic->PQUANT); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, spr->recon_control, pic->HALFQP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, spr->recon_control, pic->UniformQuant); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, spr->recon_control, pic->POSTPROC); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, spr->recon_control, pic->CONDOVER); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, spr->recon_control, (pic->PQINDEX <= 8)); + + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, spr->mv_control, pic->MVRANGE); + if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP) + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE2); + else + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, spr->mv_control, pic->MVTAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, spr->mv_control, pic->DMVRANGE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, spr->mv_control, pic->MV4SWITCH); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, spr->mv_control, pic->MBMODETAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, spr->mv_control, + pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) )); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, spr->mv_control, pic->REFFIELD); + + handle_intensity_compensation(parser, pInfo, spr); + + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, spr->ref_bfraction, pic->BFRACTION_DEN); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, spr->ref_bfraction, pic->BFRACTION_NUM); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, spr->ref_bfraction, md->REFDIST); + + // BLOCK CONTROL REGISTER Offset 0x2C + BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, spr->blk_control, pic->CBPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, spr->blk_control, pic->TTMBF); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, spr->blk_control, pic->TTFRM); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, spr->blk_control, pic->MV2BPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, spr->blk_control, pic->MV4BPTAB); + if((field == 1) && (pic->SLICE_ADDR)) + { + int mby = md->height * 2 + 2; + mby = (mby + 15 ) / 16; + pic->SLICE_ADDR -= (mby/2); + } + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, spr->blk_control, pic->SLICE_ADDR); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, spr->blk_control, md->bp_raw[0]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, spr->blk_control, md->bp_raw[1]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, spr->blk_control, md->bp_raw[2]); + + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, spr->trans_data, pic->TRANSACFRM); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, spr->trans_data, pic->TRANSACFRM2); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, spr->trans_data, pic->TRANSDCTAB); + + // When DQUANT is 1 or 2, we have the VOPDQUANT structure in the bitstream that + // controls the value calculated for ALTPQUANT + // ALTPQUANT must be in the range of 1 and 31 for it to be valid + // DQUANTFRM is present only when DQUANT is 1 and ALTPQUANT setting should be dependent on DQUANT instead + if(md->DQUANT) + { + if(pic->PQDIFF == 7) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->ABSPQ); + else if (pic->DQUANTFRM == 1) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->PQUANT + pic->PQDIFF + 1); + } + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, spr->vop_dquant, pic->DQUANTFRM); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, spr->vop_dquant, pic->DQPROFILE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, spr->vop_dquant, pic->DQSBEDGE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, spr->vop_dquant, pic->DQBILEVEL); + + BF_WRITE(VC1_0_SEQPIC_CURR_FRAME_ID,FCM, spr->ref_frm_id[VC1_FRAME_CURRENT_REF], pic->FCM ); + + if ( ptype == VC1_B_FRAME) { + // Forward reference is past reference and is the second temporally closest reference - hence minus_2 + BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm ); + // Backward reference is future reference frame and is temporally the closest - hence minus_1 + BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm ); + } else { + // Only Forward reference is valid and is the temporally closest reference - hence minus_1, backward is set same as forward + BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm ); + BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm ); + } + + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, spr->fieldref_ctrl_id, pic->BottomField); + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, spr->fieldref_ctrl_id, pic->CurrField); + if(parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, 1); + } + else + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]); + } + + if( pic->FCM == VC1_FCM_FIELD_INTERLACE ) { + BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, spr->imgstruct, (pic->BottomField) ? 2 : 1); + } + + return; +} // vc1_fill_hw_struct + +int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser) +{ + viddec_workload_item_t wi; + const uint32_t *pl; + int i; + int nitems; + + if( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) { + translate_parser_info_to_frame_attributes( parent, parser ); + return 0; + } + + translate_parser_info_to_frame_attributes( parent, parser ); + memset(&parser->spr, 0, sizeof(VC1D_SPR_REGS)); + vc1_fill_hw_struct( parser, &parser->info, &parser->spr ); + + /* STUFF BSP Data Memory it into a variety of workload items */ + + pl = (const uint32_t *) &parser->spr; + + // How many payloads must be generated + nitems = (sizeof(parser->spr) + 7) / 8; /* In QWORDs rounded up */ + + + // Dump DMEM to an array of workitems + for( i = 0; (i < nitems) && ( (parser->info.picLayerHeader.SLICE_ADDR == 0) || parser->info.picture_info_has_changed ); i++ ) + { + wi.vwi_type = VIDDEC_WORKLOAD_DECODER_SPECIFIC; + wi.data.data_offset = (unsigned int)pl - (unsigned int)&parser->spr; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + + viddec_pm_append_workitem( parent, &wi ); + } + + { + uint32_t bit, byte; + uint8_t is_emul; + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + wi.vwi_payload[0] = bit + (is_emul*8); + wi.vwi_payload[1] = parser->info.picLayerHeader.SLICE_ADDR; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi ); + } + + viddec_pm_append_pixeldata( parent ); + + return(0); +} + +/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ +static inline void vc1_send_past_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi ); + return; +} + +/* send future frame item */ +static inline void vc1_send_future_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi ); + return; +} + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_reorder_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0 + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same + viddec_pm_append_workitem( parent, &wi ); + return; +} // send_reorder_ref_items + +/** update workload with more workload items for ref and update values to store... + */ +void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser) +{ + vc1_metadata_t *md = &(parser->info.metadata); + viddec_workload_t *wl = viddec_pm_get_header(parent); + int frame_type = parser->info.picLayerHeader.PTYPE; + int frame_id = 1; // new reference frame is assigned index 1 + + /* init */ + memset(&parser->spr, 0, sizeof(parser->spr)); + wl->is_reference_frame = 0; + + /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */ + if (parser->info.metadata.RANGE_MAPY_FLAG || + parser->info.metadata.RANGE_MAPUV_FLAG || + parser->info.picLayerHeader.RANGEREDFRM) + { + wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME; + } + + LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type); + + parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type)); + + /* reference / anchor frames processing + * we need to send reorder before reference frames */ + if (parser->is_reference_picture) + { + /* one frame has been sent */ + if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1) + { + /* there is a frame in the reference buffer, move it to the past */ + send_reorder_ref_items(parent); + } + } + + /* send workitems for reference frames */ + switch( frame_type ) + { + case VC1_B_FRAME: + { + vc1_send_past_ref_items(parent); + vc1_send_future_ref_items(parent); + break; + } + case VC1_SKIPPED_FRAME: + { + wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; + vc1_send_past_ref_items(parent); + break; + } + case VC1_P_FRAME: + { + vc1_send_past_ref_items( parent); + break; + } + default: + break; + } + + /* reference / anchor frames from previous code + * we may need it for frame reduction */ + if (parser->is_reference_picture) + { + wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK); + + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].id = frame_id; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].fcm = parser->info.picLayerHeader.FCM; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0] = (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME); + if(parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = (parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME); + } + else + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0]; + } + + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].type = parser->info.picLayerHeader.PTYPE; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_en = md->RANGERED; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_frm = parser->info.picLayerHeader.RANGEREDFRM; + + LOG_CRIT("anchor[0] = %d, anchor[1] = %d", + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0], + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] ); + } + + return; +} // vc1_start_new_frame + +void vc1_end_frame(vc1_viddec_parser_t *parser) +{ + /* update status of reference frames */ + if(parser->is_reference_picture) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_2] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1]; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0]; + } + + return; +} // vc1_end_frame + diff --git a/mix_vbp/viddec_fw/fw/include/stdint.h b/mix_vbp/viddec_fw/fw/include/stdint.h new file mode 100644 index 0000000..885cfe1 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/include/stdint.h @@ -0,0 +1,23 @@ +#ifndef __STDINT_H +#define __STDINT_H + +typedef unsigned char uint8_t; +typedef unsigned short uint16_t; +typedef unsigned int uint32_t; +typedef unsigned long long uint64_t; + +//#ifndef _MACHTYPES_H_ +typedef signed char int8_t; +typedef signed short int16_t; +typedef signed int int32_t; +typedef signed long long int64_t; +//#endif + +#ifndef NULL +#define NULL (void*)0x0 +#endif + +#define true 1 +#define false 0 + +#endif diff --git a/mix_vbp/viddec_fw/fw/include/viddec_debug.h b/mix_vbp/viddec_fw/fw/include/viddec_debug.h new file mode 100644 index 0000000..23db98f --- /dev/null +++ b/mix_vbp/viddec_fw/fw/include/viddec_debug.h @@ -0,0 +1,31 @@ +#ifndef VIDDEC_DEBUG_H +#define VIDDEC_DEBUG_H + +#ifndef VBP + +#ifdef HOST_ONLY + #include + #include + #define DEB OS_PRINT + #define FWTRACE OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ ); +// #define DEB(format, args...) +// #define FWTRACE + #define DEB_FNAME(format, args...) OS_PRINT("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) + #define CDEB(a, format, args...) if(a != 0) {DEB(format, ##args);} +#else + #define DEB(format, args...) + #define FWTRACE + #define CDEB(a, format, args...) + #define DEB_FNAME(format, args...) +#endif + +#else // VBP is defined + +#define DEB(format, args...) +#define FWTRACE +#define CDEB(a, format, args...) +#define DEB_FNAME(format, args...) + +#endif // end of VBP + +#endif diff --git a/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h new file mode 100644 index 0000000..099be69 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h @@ -0,0 +1,7 @@ +#ifndef VIDDEC_FW_VERSION_H +#define VIDDEC_FW_VERSION_H + +#define VIDDEC_FW_MAJOR_NUM 0 +#define VIDDEC_FW_MINOR_NUM 8 +#define VIDDEC_FW_BUILD_NUM 11 +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/Makefile.am b/mix_vbp/viddec_fw/fw/parser/Makefile.am new file mode 100644 index 0000000..c94b935 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/Makefile.am @@ -0,0 +1,205 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# +VC1PATH=./../codecs/vc1/parser +MP2PATH=./../codecs/mp2/parser +MP4PATH=./../codecs/mp4/parser +H264PATH=./../codecs/h264/parser + +PARSER_INCLUDE_PATH=-I./include \ + -I../include \ + -I../../include \ + -I./vc1/include \ + -I../codecs/vc1/include \ + -I../codecs/mp2/include \ + -I../codecs/mp4/include \ + -I../codecs/h264/include \ + -I../codecs/vc1/parser + + +PARSER_MACROS= -DVBP \ + -DHOST_ONLY \ + -DG_LOG_DOMAIN=\"vbp\" + + +la_CFLAGS = $(GLIB_CFLAGS) \ + $(MIX_CFLAGS) \ + $(GOBJECT_CFLAGS) \ + $(GTHREAD_CFLAGS) \ + $(PARSER_INCLUDE_PATH) \ + $(PARSER_MACROS) \ + -DMIXVBP_CURRENT=@MIXVBP_CURRENT@ \ + -DMIXVBP_AGE=@MIXVBP_AGE@ \ + -DMIXVBP_REVISION=@MIXVBP_REVISION@ + +la_LIBADD = $(GLIB_LIBS) \ + $(GOBJECT_LIBS) \ + $(GTHREAD_LIBS) + +la_LDFLAGS = $(GLIB_LIBS) \ + $(GOBJECT_LIBS) \ + $(GTHREAD_LIBS) \ + -version-info @MIXVBP_CURRENT@:@MIXVBP_REVISION@:@MIXVBP_AGE@ + +lib_LTLIBRARIES = libmixvbp.la \ + libmixvbp_vc1.la \ + libmixvbp_mpeg2.la \ + libmixvbp_mpeg4.la \ + libmixvbp_h264.la + + +###################################### vbp loader ######################################## + +# sources used to compile +libmixvbp_la_SOURCES = vbp_loader.c \ + vbp_utils.c \ + vbp_trace.c \ + vbp_h264_parser.c \ + vbp_vc1_parser.c \ + vbp_mp42_parser.c \ + viddec_pm.c \ + viddec_pm_stubs.c \ + viddec_pm_parser_ops.c \ + viddec_pm_utils_bstream.c \ + viddec_pm_tags.c \ + viddec_emit.c \ + viddec_pm_utils_list.c \ + viddec_parse_sc.c \ + viddec_parse_sc_stub.c + +libmixvbp_la_CFLAGS = $(la_CFLAGS) +libmixvbp_la_LIBADD = $(la_LIBADD) +libmixvbp_la_LDFLAGS = $(la_LDFLAGS) +libmixvbp_la_LIBTOOLFLAGS = --tag=disable-static + +###################################### VC-1 parser ######################################## + +libmixvbp_vc1_la_SOURCES = $(VC1PATH)/vc1parse.c \ + $(VC1PATH)/vc1parse_bitplane.c \ + $(VC1PATH)/vc1parse_bpic.c \ + $(VC1PATH)/vc1parse_bpic_adv.c \ + $(VC1PATH)/vc1parse_common_tables.c \ + $(VC1PATH)/vc1parse_huffman.c \ + $(VC1PATH)/vc1parse_ipic.c \ + $(VC1PATH)/vc1parse_ipic_adv.c \ + $(VC1PATH)/vc1parse_mv_com.c \ + $(VC1PATH)/vc1parse_pic_com.c \ + $(VC1PATH)/vc1parse_pic_com_adv.c \ + $(VC1PATH)/vc1parse_ppic.c \ + $(VC1PATH)/vc1parse_ppic_adv.c \ + $(VC1PATH)/vc1parse_vopdq.c \ + $(VC1PATH)/viddec_vc1_parse.c \ + $(VC1PATH)/mix_vbp_vc1_stubs.c + +libmixvbp_vc1_la_CFLAGS = $(la_CFLAGS) +libmixvbp_vc1_la_LIBADD = $(la_LIBADD) libmixvbp.la +libmixvbp_vc1_la_LDFLAGS = $(la_LDFLAGS) +libmixvbp_vc1_la_LIBTOOLFLAGS = --tag=disable-static + +###################################### MPEG-2 parser ######################################## + +libmixvbp_mpeg2_la_SOURCES = $(MP2PATH)/viddec_mpeg2_metadata.c \ + $(MP2PATH)/viddec_mpeg2_parse.c \ + $(MP2PATH)/mix_vbp_mpeg2_stubs.c + +libmixvbp_mpeg2_la_CFLAGS = $(la_CFLAGS) +libmixvbp_mpeg2_la_LIBADD = $(la_LIBADD) libmixvbp.la +libmixvbp_mpeg2_la_LDFLAGS = $(la_LDFLAGS) +libmixvbp_mpeg2_la_LIBTOOLFLAGS = --tag=disable-static + +###################################### MPEG-4 parser ######################################## + +libmixvbp_mpeg4_la_SOURCES = $(MP4PATH)/viddec_mp4_parse.c \ + $(MP4PATH)/viddec_parse_sc_mp4.c \ + $(MP4PATH)/viddec_mp4_visualobject.c \ + $(MP4PATH)/viddec_mp4_videoobjectplane.c \ + $(MP4PATH)/viddec_mp4_shortheader.c \ + $(MP4PATH)/viddec_mp4_videoobjectlayer.c \ + $(MP4PATH)/viddec_mp4_decodevideoobjectplane.c + +libmixvbp_mpeg4_la_CFLAGS = $(la_CFLAGS) +libmixvbp_mpeg4_la_LIBADD = $(la_LIBADD) libmixvbp.la +libmixvbp_mpeg4_la_LDFLAGS = $(la_LDFLAGS) +libmixvbp_mpeg4_la_LIBTOOLFLAGS = --tag=disable-static + +###################################### H.264 parser ######################################## + +libmixvbp_h264_la_SOURCES = $(H264PATH)/h264parse.c \ + $(H264PATH)/h264parse_bsd.c \ + $(H264PATH)/h264parse_math.c \ + $(H264PATH)/h264parse_mem.c \ + $(H264PATH)/h264parse_sei.c \ + $(H264PATH)/h264parse_sh.c \ + $(H264PATH)/h264parse_pps.c \ + $(H264PATH)/h264parse_sps.c \ + $(H264PATH)/h264parse_dpb.c \ + $(H264PATH)/viddec_h264_parse.c \ + $(H264PATH)/mix_vbp_h264_stubs.c + +libmixvbp_h264_la_CFLAGS = $(la_CFLAGS) +libmixvbp_h264_la_LIBADD = $(la_LIBADD) libmixvbp.la +libmixvbp_h264_la_LDFLAGS = $(la_LDFLAGS) +libmixvbp_h264_la_LIBTOOLFLAGS = --tag=disable-static + +############################################################################################## + +# headers we need but don't want installed +noinst_HEADERS = ./vbp_h264_parser.h \ + ./vbp_mp42_parser.h \ + ./vbp_vc1_parser.h \ + ./vbp_trace.h \ + ./vbp_loader.h \ + ./vbp_utils.h \ + ./include/fw_pvt.h \ + ./include/ipc_fw_custom.h \ + ./include/viddec_emitter.h \ + ./include/viddec_fw_debug.h \ + ./include/viddec_fw_parser_fw_ipc.h \ + ./include/viddec_h264_parse.h \ + ./include/viddec_mp4_parse.h \ + ./include/viddec_mpeg2_parse.h \ + ./include/viddec_parser_ops.h \ + ./include/viddec_pm.h \ + ./include/viddec_pm_parse.h \ + ./include/viddec_pm_tags.h \ + ./include/viddec_pm_utils_bstream.h \ + ./include/viddec_pm_utils_list.h \ + ./include/viddec_vc1_parse.h \ + ../include/stdint.h \ + ../include/viddec_debug.h \ + ../include/viddec_fw_version.h \ + ../../include/viddec_fw_common_defs.h \ + ../../include/viddec_fw_decoder_host.h \ + ../../include/viddec_fw_frame_attr.h \ + ../../include/viddec_fw_item_types.h \ + ../../include/viddec_fw_parser_host.h \ + ../../include/viddec_fw_workload.h \ + ../../fw/include/stdint.h \ + ../../fw/include/viddec_debug.h \ + ../../fw/include/viddec_fw_version.h \ + ../../fw/codecs/h264/include/h264.h \ + ../../fw/codecs/h264/include/h264parse.h \ + ../../fw/codecs/h264/include/h264parse_dpb.h \ + ../../fw/codecs/h264/include/h264parse_sei.h \ + ../../fw/codecs/mp2/include/mpeg2.h \ + ../../fw/codecs/mp2/include/viddec_mpeg2.h \ + ../../fw/codecs/mp4/include/viddec_fw_mp4.h \ + ../../fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h \ + ../../fw/codecs/mp4/parser/viddec_mp4_parse.h \ + ../../fw/codecs/mp4/parser/viddec_mp4_shortheader.h \ + ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h \ + ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h \ + ../../fw/codecs/mp4/parser/viddec_mp4_visualobject.h \ + ../../fw/codecs/vc1/include/vc1common.h \ + ../../fw/codecs/vc1/parser/vc1.h \ + ../../fw/codecs/vc1/parser/vc1parse.h \ + ../../fw/codecs/vc1/parser/vc1parse_common_defs.h + + +mixincludedir=$(includedir)/mixvbp +mixinclude_HEADERS = vbp_loader.h + +############################################################################################## diff --git a/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c new file mode 100644 index 0000000..299dbce --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c @@ -0,0 +1,224 @@ +/* + + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2005-2008 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + Intel Corporation + 2200 Mission College Blvd. + Santa Clara, CA 97052 + + BSD LICENSE + + Copyright(c) 2005-2008 Intel Corporation. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ + +#ifndef SVEN_FW_H +#include "sven_fw.h" +#endif + +#define _OSAL_IO_MEMMAP_H /* to prevent errors when including sven_devh.h */ +#define _OSAL_ASSERT_H /* to prevent errors when including sven_devh.h */ +#include "sven_devh.h" + +#include "fw_pvt.h" + +static os_devhandle_t g_svenh; + +#define FW_SVEN_DEVH_DISABLE_SVEN_REGISTER_IO +//#define SVEN_DEVH_DISABLE_SVEN + +extern int sven_fw_is_tx_enabled( + struct SVENHandle *svenh ); + +#ifndef SVEN_DEVH_DISABLE_SVEN +static void sven_write_event( + struct SVENHandle *svenh, + struct SVENEvent *ev ) +{ + if ( NULL == svenh ) + svenh = &g_svenh.devh_svenh; + + if ( NULL != svenh->phot ) + sven_fw_write_event(svenh,ev); +} + +static void sven_fw_initialize_event_top( + struct SVENEvent *ev, + int module, + int unit, + int event_type, + int event_subtype ) +{ + ev->se_et.et_gencount = 0; + ev->se_et.et_module = module; + ev->se_et.et_unit = unit; + ev->se_et.et_type = event_type; + ev->se_et.et_subtype = event_subtype; +} +#endif + +uint32_t sven_get_timestamp() +{ + uint32_t value = 0; + + if ( NULL != g_svenh.devh_svenh.ptime ) + { + value = sven_fw_read_external_register( &g_svenh.devh_svenh, g_svenh.devh_svenh.ptime ); + } + + return(value); +} + +/* ---------------------------------------------------------------------- */ +/* ---------------------------------------------------------------------- */ + +void devh_SVEN_SetModuleUnit( + os_devhandle_t *devh, + int sven_module, + int sven_unit ) +{ +#ifndef SVEN_DEVH_DISABLE_SVEN + if ( NULL == devh ) + devh = &g_svenh; + devh->devh_sven_module = sven_module; + devh->devh_sven_unit = sven_unit; +#endif +} + +os_devhandle_t *devhandle_factory( const char *desc ) +{ + /* pointer to global vsparc local registers */ + g_svenh.devh_regs_ptr = (void *) 0x10000000; /* firmware address to Local (GV) registers */ + + return( &g_svenh ); +} + +int devhandle_connect_name( + os_devhandle_t *devh, + const char *devname ) +{ + return(1); +} + +/* ---------------------------------------------------------------------- */ +/* ---------------------------------------------------------------------- */ + +void devh_SVEN_WriteModuleEvent( + os_devhandle_t *devh, + int module_event_subtype, + unsigned int payload0, + unsigned int payload1, + unsigned int payload2, + unsigned int payload3, + unsigned int payload4, + unsigned int payload5 ) +{ +#ifndef SVEN_DEVH_DISABLE_SVEN + struct SVENEvent ev __attribute__ ((aligned(8))); + + devh = (NULL != devh) ? devh : &g_svenh; + + if ( ! sven_fw_is_tx_enabled( &devh->devh_svenh ) ) + return; + + sven_fw_initialize_event_top( &ev, + devh->devh_sven_module, + 1 /* devh->devh_sven_unit */, + SVEN_event_type_module_specific, + module_event_subtype ); + + ev.u.se_uint[0] = payload0; + ev.u.se_uint[1] = payload1; + ev.u.se_uint[2] = payload2; + ev.u.se_uint[3] = payload3; + ev.u.se_uint[4] = payload4; + ev.u.se_uint[5] = payload5; + + sven_write_event( &devh->devh_svenh, &ev ); +#endif +} + +/* ---------------------------------------------------------------------- */ +/* SVEN FW TX: Required custom routines to enable FW TX */ +/* ---------------------------------------------------------------------- */ +int sven_fw_set_globals( + struct SVEN_FW_Globals *fw_globals ) +{ + sven_fw_attach( &g_svenh.devh_svenh, fw_globals ); + devh_SVEN_SetModuleUnit( &g_svenh, SVEN_module_GEN4_GV, 1 ); + return(0); +} + +uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); + +unsigned int sven_fw_read_external_register( + struct SVENHandle *svenh, + volatile unsigned int *preg ) +{ + unsigned int reg __attribute__ ((aligned(8))); + + (void)svenh; // argument unused + + cp_using_dma_phys( (uint32_t) preg, (uint32_t) ®, 4, 0, 0 ); + + return( reg ); +} + +void sven_fw_copy_event_to_host_mem( + struct SVENHandle *svenh, + volatile struct SVENEvent *to, + const struct SVENEvent *from ) +{ + (void)svenh; // argument unused + + cp_using_dma_phys( (uint32_t) to, (uint32_t) from, sizeof(*to), 1, 0 ); +} +/* ---------------------------------------------------------------------- */ +/* ---------------------------------------------------------------------- */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h new file mode 100644 index 0000000..0928ad3 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h @@ -0,0 +1,114 @@ +#ifndef FW_PVT_H +#define FW_PVT_H + +#include +#include "viddec_fw_parser_fw_ipc.h" +#include "viddec_fw_parser_ipclib_config.h" +#include "viddec_emitter.h" +#include "viddec_pm.h" +#include "viddec_fw_debug.h" + +#define GET_IPC_HANDLE(x) (FW_IPC_Handle *)&(x.fwIpc) +#define GV_DDR_MEM_MASK 0x80000000 +/* Macros for Interrupts */ +#define TRAPS_ENABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") +#define TRAPS_DISABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") + +#define TRAPS_INT_ENABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") +#define TRAPS_INT_DISABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") + +#define TRAPS_ENABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0x20, %0": "=r" (enabled):) + +#define TRAPS_INT_DISABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0xF00, %0": "=r" (enabled):) + +#define VIDDEC_WATCHDOG_COUNTER_MAX (0x000FFFFF) + +/* Synchronous message buffer, which is shared by both Host and Fw for handling synchronous messages */ +typedef struct +{ + uint8_t data[CONFIG_IPC_SYNC_MESSAGE_BUF_SIZE]; +}mfd_sync_msg_t; + +/* Required Information needed by Parser Kernel for each stream */ +typedef struct +{ + uint32_t ddr_cxt; /* phys addr of swap space where Parser kernel stores pvt information */ + uint32_t cxt_size; /* size of context buffer */ + uint32_t strm_type; /* Current stream information*/ + uint32_t wl_time; /* ticks for processing current workload */ + uint32_t es_time; /* ticks for processing current workload */ + uint32_t low_watermark; /* On crossing this value we generate low watermark interrupt */ + uint8_t state; /* Current state of stream ... start(1), stop(0).. */ + uint8_t priority; /* Priority of current stream Real time or Non real time */ + uint8_t buffered_data;/* Do we have data from past buffer */ + uint8_t pending_interrupt;/* Whether an Interrupt needs to be generated for this stream */ +}mfd_stream_info; + +/* Global data for Parser kernel */ +typedef struct +{ + int32_t low_id; /* last scheduled low priority stream id */ + int32_t high_id;/* last scheduled high priority stream id */ + uint32_t g_parser_tables; /* should point to global_parser_table in DDR */ +}mfd_pk_data_t; + +typedef struct +{ + ipc_msg_data input; + ipc_msg_data wkld1; + ipc_msg_data wkld2; + viddec_pm_cxt_t pm; +}mfd_pk_strm_cxt; + +/* This structure defines the layout of local memory */ +typedef struct +{ + mfd_sync_msg_t buf; + _IPC_int_state_t int_status[FW_SUPPORTED_STREAMS]; + FW_IPC_Handle fwIpc; + mfd_stream_info stream_info[FW_SUPPORTED_STREAMS]; + mfd_pk_data_t g_pk_data; + mfd_pk_strm_cxt srm_cxt; +}dmem_t; + +/* Pvt Functions which will be used by multiple modules */ + +static inline void reg_write(uint32_t offset, uint32_t value) +{ + *((volatile uint32_t*) (GV_SI_MMR_BASE_ADDRESS + offset)) = value; +} + +static inline uint32_t reg_read(uint32_t offset) +{ + uint32_t value=0; + value = *((volatile uint32_t*) (GV_SI_MMR_BASE_ADDRESS + offset)); + return value; +} + + +static inline void DEBUG(uint32_t print, uint32_t code, uint32_t val) +{ + if(print > 0) + { + DUMP_TO_MEM(code); + DUMP_TO_MEM(val); + dump_ptr = (dump_ptr + 7) & ~0x7; + } +} + +void *memcpy(void *dest, const void *src, uint32_t n); + +void *memset(void *s, int32_t c, uint32_t n); + +uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); + +uint32_t set_wdog(uint32_t offset); + +void get_wdog(uint32_t *value); + +void enable_intr(void); + +uint32_t get_total_ticks(uint32_t start, uint32_t end); + +void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsigned int clean); +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h new file mode 100644 index 0000000..adfdabf --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h @@ -0,0 +1,87 @@ +/* + +This file is provided under a dual BSD/GPLv2 license. When using or +redistributing this file, you may do so under either license. + +GPL LICENSE SUMMARY + +Copyright(c) 2005-2008 Intel Corporation. All rights reserved. + +This program is free software; you can redistribute it and/or modify +it under the terms of version 2 of the GNU General Public License as +published by the Free Software Foundation. + +This program is distributed in the hope that it will be useful, but +WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. +The full GNU General Public License is included in this distribution +in the file called LICENSE.GPL. + +Contact Information: +Intel Corporation +2200 Mission College Blvd. +Santa Clara, CA 97052 + +BSD LICENSE + +Copyright(c) 2005-2008 Intel Corporation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in +the documentation and/or other materials provided with the +distribution. +* Neither the name of Intel Corporation nor the names of its +contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef IPC_FW_CUSTOM_H +#define IPC_FW_CUSTOM_H 1 + +#include "viddec_fw_parser_fw_ipc.h" + +extern void custom_ipclib_firmware_out_of_reset(void); + +extern struct FW_IPC_Handler *custom_ipclib_get_fwipc(void); +extern void *custom_ipclib_get_sync_message_area(void); + +extern void custom_ipclib_firmware_setup(void); +extern void custom_ipclib_firmware_ready(void); + +extern int custom_ipclib_firmware_is_sync_command_requested(void); +extern void custom_ipclib_firmware_ack_sync_command(void); + +void custom_ipclib_memcpy_to_host_mem( + void *to, + const void *from, + int size ); + +void custom_ipclib_memcpy_from_host_mem( + void *to, + const void *from, + int size ); + +#endif /* IPC_FW_CUSTOM_H */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h new file mode 100644 index 0000000..bb96bab --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h @@ -0,0 +1,96 @@ +#ifndef VIDDEC_EMITTER_H +#define VIDDEC_EMITTER_H + +#include +#ifndef HOST_ONLY +#define DDR_MEM_MASK 0x80000000 +#else +#define DDR_MEM_MASK 0x0 +#endif +#include "viddec_fw_workload.h" +#include "viddec_fw_common_defs.h" +#include "viddec_fw_debug.h" + +typedef struct +{ + viddec_workload_t *data; + uint32_t max_items; + uint32_t num_items; + uint32_t result; +}viddec_emitter_wkld; + +typedef struct +{ + viddec_emitter_wkld cur; + viddec_emitter_wkld next; +}viddec_emitter; + +/* + whats this for? Emitting current tag for ES buffer +*/ +int32_t viddec_emit_assoc_tag(viddec_emitter *emit, uint32_t id, uint32_t using_next); + +int32_t viddec_emit_contr_tag(viddec_emitter *emit, viddec_input_buffer_t *ibuf, uint8_t incomplete, uint32_t using_next); + +int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit); + +int32_t viddec_emit_append(viddec_emitter_wkld *cxt, viddec_workload_item_t *item); + +/* + Init function for setting up emitter context. +*/ +static inline void viddec_emit_init(viddec_emitter *cxt) +{ + cxt->cur.data = cxt->next.data = 0; + cxt->cur.max_items = cxt->next.max_items = 0; + cxt->cur.num_items = cxt->next.num_items = 0; + cxt->cur.result = cxt->next.result = VIDDEC_FW_WORKLOAD_SUCCESS; +} + +static inline void viddec_emit_update(viddec_emitter *cxt, uint32_t cur, uint32_t next, uint32_t cur_size, uint32_t next_size) +{ + cxt->cur.data = (cur != 0) ? (viddec_workload_t *)(cur | DDR_MEM_MASK) : NULL; + cxt->next.data = (next != 0) ? (viddec_workload_t *)(next | DDR_MEM_MASK): NULL; + cxt->cur.max_items = (cur_size - sizeof(viddec_workload_t))/sizeof(viddec_workload_item_t); + cxt->next.max_items = (next_size - sizeof(viddec_workload_t))/sizeof(viddec_workload_item_t); +} + +static inline void viddec_emit_time(viddec_emitter *cxt, uint32_t time) +{ + viddec_emitter_wkld *cur; + cur = &(cxt->cur); + cur->data->time = time; +} + +static inline void viddec_emit_set_codec(viddec_emitter *emit, uint32_t codec_type) +{ + emit->cur.data->codec = codec_type; +} + +static inline void viddec_emit_set_codec_errors(viddec_emitter *emit, uint32_t codec_error) +{ + emit->cur.result |= codec_error; + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PM_WORKLOAD_STATUS, (int)emit->cur.result, (int)emit->cur.data, + (int)emit->cur.num_items, 0, 0, 0); +} + +static inline void viddec_emit_set_workload_error(viddec_emitter *emit, uint32_t error, uint32_t using_next) +{ + viddec_emitter_wkld *cur_wkld; + cur_wkld = (using_next == false)? &(emit->cur):&(emit->next); + cur_wkld->result |= error; + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PM_WORKLOAD_STATUS, (int)cur_wkld->result, (int)cur_wkld->data, + (int)cur_wkld->num_items, using_next, 0, 0); +} + +static inline void viddec_emit_set_inband_tag(viddec_emitter *emit, uint32_t type, uint32_t using_next) +{ + viddec_emitter_wkld *cur_wkld; + viddec_workload_item_t item; + cur_wkld = (using_next == false)? &(emit->cur):&(emit->next); + item.vwi_type = type; + item.vwi_payload[0] = item.vwi_payload[1] = item.vwi_payload[2] = 0; + viddec_emit_append(cur_wkld, &item); +} + +#endif /* VIDDEC_EMITTER_H */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h new file mode 100644 index 0000000..cccc437 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h @@ -0,0 +1,80 @@ +#ifndef VIDDEC_FW_DEBUG_H +#define VIDDEC_FW_DEBUG_H + +//#define SWAP_BYTE(x,y,z) (( ( (x)>>(8*y))& 0xFF) << (8*z)) +#define SWAP_BYTE(x,y,z) (( ( (x) >> ((y) << 3))& 0xFF) << ((z) << 3)) +#define SWAP_WORD(x) ( SWAP_BYTE((x),0,3) | SWAP_BYTE((x),1,2) |SWAP_BYTE((x),2,1) |SWAP_BYTE((x),3,0)) + +#ifndef VBP + +#ifndef HOST_ONLY +#define _OSAL_IO_MEMMAP_H /* to prevent errors when including sven_devh.h */ +#define _OSAL_ASSERT_H /* to prevent errors when including sven_devh.h */ +#endif +#include +#include "viddec_debug.h" +#include "sven_devh.h" +#include "auto_eas/gen4_gv.h" + +#ifdef HOST_ONLY +#define DUMP_TO_MEM(x) DEB("0x%.08X ",x); +#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) DEB("Sven evnt=0x%.8X p1=%d p2=%d p3=%d p4=%d p5=%d p6=%d\n",event, p1, p2, p3, p4, p5, p6) +#define read_ret(x) +#define read_fp(x) +#define read_sp(x) +#define read_wim(x) +#define read_psr(x) +#else +extern uint32_t dump_ptr; +/* Macros for Dumping data to DDR */ +#define DUMP_TO_MEM(x) ((volatile unsigned int *)0x8F000000)[dump_ptr++] = SWAP_WORD(x); +#define read_ret(x) asm("mov %%i7, %0\n":"=r" (x)) +#define read_fp(x) asm("mov %%i6, %0\n":"=r" (x)) +#define read_sp(x) asm("mov %%sp, %0\n":"=r" (x)) +#define read_wim(x) asm("mov %%wim, %0\n":"=r" (x)) +#define read_psr(x) asm("mov %%psr, %0\n":"=r" (x)) +#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) devh_SVEN_WriteModuleEvent( NULL, event, p1, p2, p3, p4, p5, p6) +#endif + +#else // VBP is defined + +#include +#include "viddec_debug.h" +#define DUMP_TO_MEM(x) +#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) +#define read_ret(x) +#define read_fp(x) +#define read_sp(x) +#define read_wim(x) +#define read_psr(x) + + +#endif + +static inline void DEBUG_WRITE(uint32_t p1, uint32_t p2, uint32_t p3, uint32_t p4, uint32_t p5, uint32_t p6) +{ + //uint32_t temp; + DUMP_TO_MEM(0xCACAFEED); + DUMP_TO_MEM(p1); + DUMP_TO_MEM(p2); + DUMP_TO_MEM(p3); + DUMP_TO_MEM(p4); + DUMP_TO_MEM(p5); + DUMP_TO_MEM(p6); + DUMP_TO_MEM(0xCACA0000); + //temp = dump_ptr; + //DUMP_TO_MEM(temp); +} +static inline void DUMP_SPARC_REG(void) +{ + uint32_t ret1, fp, sp, wim, psr; + read_ret(ret1); + read_fp(fp); + read_sp(sp); + read_wim(wim); + read_psr(psr); + //crash = (uint32_t *)0x1000bf0c; + //DEBUG_WRITE(sp, wim, fp, ret1, (*crash), 0xFED); + DEBUG_WRITE(sp, wim, fp, ret1, psr, 0xFFFFFFFF); +} +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h new file mode 100644 index 0000000..a77b645 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h @@ -0,0 +1,194 @@ +/* + + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2005-2008 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + Intel Corporation + 2200 Mission College Blvd. + Santa Clara, CA 97052 + + BSD LICENSE + + Copyright(c) 2005-2008 Intel Corporation. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_PARSER_FW_IPC_H +#define VIDDEC_FW_PARSER_FW_IPC_H 1 + +#include "viddec_fw_parser_ipclib.h" + +/** Generic Firmware-to-host Message Send Queue */ +typedef struct +{ + struct IPC_MsgQueue mq; /* local MSGQueue handle */ +} FW_IPC_SendQue; + +/** Generic Host-to-Firmware Message Receive Queue */ +typedef struct +{ + struct IPC_MsgQueue mq; /* local MSGQueue handle */ +} FW_IPC_ReceiveQue; + +typedef struct +{ + unsigned int state; + unsigned int priority; +}FW_IPC_stream_info; + +/* ---------------------------------------------------------------------- */ +/* ---------------------------------------------------------------------- */ + +typedef struct +{ + /** Synchronous Message Buffer, shared between host and firmware */ + volatile char *sync_msg_buf; + + /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */ + FW_IPC_SendQue snd_q[CONFIG_IPC_HOST_MAX_RX_QUEUES]; + + /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */ + FW_IPC_ReceiveQue rcv_q[CONFIG_IPC_FW_MAX_RX_QUEUES]; + /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */ + FW_IPC_ReceiveQue wkld_q[CONFIG_IPC_FW_MAX_RX_QUEUES]; + + /** FIRMWARE_TO_HOST Message Queues (outbound) */ + struct _IPC_QueueHeader *snd_q_shared[CONFIG_IPC_HOST_MAX_RX_QUEUES]; + /** HOST_TO_FIRMWARE Message Queues (inbbound) */ + struct _IPC_QueueHeader *rcv_q_shared[CONFIG_IPC_FW_MAX_RX_QUEUES]; + /** HOST_TO_FIRMWARE Message Queues (inbbound) */ + struct _IPC_QueueHeader *wkld_q_shared[CONFIG_IPC_FW_MAX_RX_QUEUES]; + /** Actual qheaders allocated in FW memory */ + struct _IPC_QueueHeader snd_qh[CONFIG_IPC_HOST_MAX_RX_QUEUES]; + struct _IPC_QueueHeader rcv_qh[CONFIG_IPC_FW_MAX_RX_QUEUES]; + struct _IPC_QueueHeader wkld_qh[CONFIG_IPC_FW_MAX_RX_QUEUES]; + + /** Stream releated info like priority */ + FW_IPC_stream_info strm_info[CONFIG_IPC_FW_MAX_RX_QUEUES]; + + unsigned int one_msg_size; + unsigned char one_msg[CONFIG_IPC_MESSAGE_MAX_SIZE]; +} FW_IPC_Handle; + +/*@}*/ + +/** @weakgroup Host IPC Functions */ +/** @ingroup fw_ipc */ +/*@{*/ + +/** +This function allows us to check and see if there's space available on the send queue(output) of fw +for the message of size(message_size). It also provides us the amount of space available. +@param[in] fwipc : Ipc handle. +@param[in] message_size : size of message that we want to write. +@param[out] bytes : returns the amount of space available for writing. +@retval 0 : if space is not available for current message. +@retval 1 : if space is available for current message. +*/ +int FwIPC_SpaceAvailForMessage(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, unsigned int message_size, unsigned int *bytes); + +/** +This function writes the message of message_size into queue(host_rx_queue). +@param[in] fwipc : Ipc handle. +@param[in] host_rx_queue : id of the queue that needs to be written. +@param[in] message : Message that we want to write. +@param[in] message_size : size of message that we want to write. +@retval 0 : if write fails. +@retval 1 : if write succeeds. +*/ +int FwIPC_SendMessage(FW_IPC_Handle *fwipc, unsigned int host_rx_queue, const char *message, unsigned int message_size ); + +/** +This function reads a message(which is <= max_message_size) from rcv_queue of firmware into input parameter message. +@param[in] fwipc : Ipc handle. +@param[in] rcv_q : Receive queue to read from. +@param[out] message : Message that we want to read. +@param[in] max_message_size : max possible size of the message. +@retval : The size of message that was read. +*/ +int FwIPC_ReadMessage(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, char *message, unsigned int max_message_size ); + +/** +This function Initialises shared queue headers and sync command buffer for IPC. +@param[in] fwipc : Ipc handle. +@param[in] synchronous_command_buffer : update handle with pointer to shared memory + between host and FW. +@retval 0 : if write succeeds. +*/ +int FwIPC_Initialize(FW_IPC_Handle *fwipc, volatile char *synchronous_command_buffer ); + +/** +This function Initialises Sendqueue with circular buffer which has actual data. +@param[in] fwipc : Ipc handle. +@param[in] snd_q : Send queue that needs to be initialized. +@param[in] snd_circbuf : Address of circular buffer. +*/ +void FWIPC_SendQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, void *snd_circbuf ); + +/** +This function Initialises Recvqueue with circular buffer which has actual data. +@param[in] fwipc : Ipc handle. +@param[in] rcv_q : Receive queue that needs to be initialized. +@param[in] rcv_circbuf : Address of circular buffer. +*/ +void FwIPC_ReceiveQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, void *rcv_circbuf ); + +/** +This function reads the nth(index) message(which is <= max_message_size ) from rcv_queue of firmware into input parameter message +by peeking the queue. +@param[in] fwipc : Ipc handle. +@param[in] rcv_q : Send queue to read from. +@param[out] message : Message that we want to read. +@param[in] max_message_size : max possible size of the message. +@param[in] index : nth message(index >=0). +@retval : The size of message that was read. +*/ +int FwIPC_PeekReadMessage(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, char *message, unsigned int max_message_size, unsigned int index ); + +/*@}*/ +#endif /* FW_IPC_H */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h new file mode 100644 index 0000000..4712be7 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h @@ -0,0 +1,6 @@ +#ifndef VIDDEC_H264_PARSE_H +#define VIDDEC_H264_PARSE_H + +void viddec_h264_get_ops(viddec_parser_ops_t *ops); + +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h new file mode 100644 index 0000000..e3e795a --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h @@ -0,0 +1,6 @@ +#ifndef VIDDEC_MP4_PARSE_H +#define VIDDEC_MP4_PARSE_H + +void viddec_mp4_get_ops(viddec_parser_ops_t *ops); + +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h new file mode 100644 index 0000000..7c0efea --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h @@ -0,0 +1,6 @@ +#ifndef VIDDEC_MPEG2_PARSE_H +#define VIDDEC_MPEG2_PARSE_H + +void viddec_mpeg2_get_ops(viddec_parser_ops_t *ops); + +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h new file mode 100644 index 0000000..a61e340 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h @@ -0,0 +1,106 @@ +#ifndef VIDDEC_PARSER_OPS_H +#define VIDDEC_PARSER_OPS_H + +#include "viddec_fw_workload.h" + +#define VIDDEC_PARSE_INVALID_POS 0xFFFFFFFF + +typedef enum +{ + VIDDEC_PARSE_EOS = 0x0FFF, /* Dummy start code to force EOS */ + VIDDEC_PARSE_DISCONTINUITY, /* Dummy start code to force completion and flush */ +}viddec_parser_inband_messages_t; + +typedef struct +{ + uint32_t context_size; + uint32_t persist_size; +}viddec_parser_memory_sizes_t; + +typedef struct +{ + void (*init)(void *ctxt, uint32_t *persist, uint32_t preserve); + uint32_t (*parse_sc) (void *ctxt, void *pcxt, void *sc_state); + uint32_t (*parse_syntax) (void *parent, void *ctxt); + void (*get_cxt_size) (viddec_parser_memory_sizes_t *size); + uint32_t (*is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors); + uint32_t (*is_frame_start)(void *ctxt); + uint32_t (*gen_contrib_tags)(void *parent, uint32_t ignore_partial); + uint32_t (*gen_assoc_tags)(void *parent); +}viddec_parser_ops_t; + + +typedef enum +{ + VIDDEC_PARSE_ERROR = 0xF0, + VIDDEC_PARSE_SUCESS = 0xF1, + VIDDEC_PARSE_FRMDONE = 0xF2, +}viddec_parser_error_t; + +/* + * + *Functions used by Parsers + * + */ + +/* This function returns the requested number of bits(<=32) and increments au byte position. + */ +int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits); + +/* This function returns requested number of bits(<=32) with out incrementing au byte position + */ +int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); + +/* This function skips requested number of bits(<=32) by incrementing au byte position. + */ +int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits); + +/* This function appends a work item to current workload. + */ +int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item); + +/* This function appends a work item to next workload. + */ +int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item); + +/* This function gets current byte and bit positions and information on whether an emulation byte is present after +current byte. + */ +int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul); + +/* This function appends Pixel tag to current work load starting from current position to end of au unit. + */ +int32_t viddec_pm_append_pixeldata(void *parent); + +/* This function appends Pixel tag to next work load starting from current position to end of au unit. + */ +int32_t viddec_pm_append_pixeldata_next(void *parent); + +/* This function provides the workload header for pasers to fill in attribute values + */ +viddec_workload_t* viddec_pm_get_header(void *parent); + +/* This function provides the next workload header for pasers to fill in attribute values + */ +viddec_workload_t* viddec_pm_get_next_header(void *parent); + +/* Returns the current byte value where offset is on */ +uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte); + +/* Tells us if there is more data that need to parse */ +int32_t viddec_pm_is_nomoredata(void *parent); + +/* This function appends misc tag to work load starting from start position to end position of au unit */ +int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next); + +void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error); + +void viddec_pm_set_late_frame_detect(void *parent); + +static inline void viddec_fw_reset_workload_item(viddec_workload_item_t *wi) +{ + wi->vwi_payload[0] = wi->vwi_payload[1] = wi->vwi_payload[2] = 0; +} + +void viddec_pm_setup_userdata(viddec_workload_item_t *wi); +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h new file mode 100644 index 0000000..6d1d2be --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h @@ -0,0 +1,95 @@ +#ifndef VIDDEC_PM_H +#define VIDDEC_PM_H + +#include +#include "viddec_emitter.h" +#include "viddec_pm_utils_list.h" +#include "viddec_pm_utils_bstream.h" +#include "viddec_pm_parse.h" +#include "viddec_parser_ops.h" + +#define SC_DETECT_BUF_SIZE 1024 +#define MAX_CODEC_CXT_SIZE 4096 + +typedef enum +{ + PM_SUCCESS = 0, + /* Messages to indicate more ES data */ + PM_NO_DATA = 0x100, + /* Messages to indicate SC found */ + PM_SC_FOUND = 0x200, + PM_FIRST_SC_FOUND = 0x201, + /* Messages to indicate Frame done */ + PM_WKLD_DONE = 0x300, + /* Messages to indicate Error conditions */ + PM_OVERFLOW = 0x400, + /* Messages to indicate inband conditions */ + PM_INBAND_MESSAGES = 0x500, + PM_EOS = 0x501, + PM_DISCONTINUITY = 0x502, +}pm_parse_state_t; + +/* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers + cur_es points to current es buffer we are parsing. */ +typedef struct +{ + int32_t list_index; /* current index of list */ + uint32_t cur_offset; + uint32_t cur_size; + viddec_input_buffer_t *cur_es; +}viddec_pm_sc_cur_buf_t; + +typedef struct +{ + uint32_t pending_tags[MAX_IBUFS_PER_SC]; + uint8_t dummy; + uint8_t frame_done; + uint8_t first_buf_aligned; + uint8_t using_next; +}vidded_pm_pending_tags_t; + +/* This structure holds all necessary data required by parser manager for stream parsing. + */ +typedef struct +{ + /* Actual buffer where data gets DMA'd. 8 padding bytes for alignment */ + uint8_t scbuf[SC_DETECT_BUF_SIZE + 8]; + viddec_sc_parse_cubby_cxt_t parse_cubby; + viddec_pm_utils_list_t list; + /* Place to store tags to be added to next to next workload */ + viddec_pm_sc_cur_buf_t cur_buf; + viddec_emitter emitter; + viddec_pm_utils_bstream_cxt_t getbits; + viddec_sc_prefix_state_t sc_prefix_info; + vidded_pm_pending_tags_t pending_tags; + uint8_t word_align_dummy; + uint8_t late_frame_detect; + uint8_t frame_start_found; + uint8_t found_fm_st_in_current_au; + uint32_t next_workload_error_eos; + uint32_t pending_inband_tags; +#ifdef VBP + uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3]; +#else + uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2]; +#endif +}viddec_pm_cxt_t; + +/* + * + * Functions used by Parser kernel + * + */ + +/* This is for initialising parser manager context to default values */ +void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean); + +/* This is the main parse function which returns state information that parser kernel can understand.*/ +uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf); + +void viddec_pm_init_ops(); + +void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time); + +uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size); +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h new file mode 100644 index 0000000..703d65d --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h @@ -0,0 +1,24 @@ +#ifndef VIDDEC_PM_PARSE_H +#define VIDDEC_PM_PARSE_H + +#include +/* This structure is used by first pass parsing(sc detect), the pm passes information on number of bytes + that needs to be parsed and if start code found then sc_end_pos contains the index of last sc code byte + in the current buffer */ +typedef struct +{ + uint32_t size; /* size pointed to by buf */ + uint8_t *buf; /* ptr to data */ + int32_t sc_end_pos; /* return value end position of sc */ + uint32_t phase; /* phase information(state) for sc */ +}viddec_sc_parse_cubby_cxt_t; + +typedef struct +{ + uint16_t next_sc; + uint8_t second_scprfx_length; + uint8_t first_sc_detect; +}viddec_sc_prefix_state_t; + +uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state); +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h new file mode 100644 index 0000000..f035e53 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h @@ -0,0 +1,17 @@ +#ifndef VIDDEC_PM_TAGS_H +#define VIDDEC_PM_TAGS_H + +#include "viddec_pm.h" +#include "viddec_emitter.h" + +/* Define to initalize temporary association list */ +#define INVALID_ENTRY ((uint32_t) -1) + +void viddec_pm_generate_tags_for_unused_buffers_to_flush(viddec_pm_cxt_t *cxt); +uint32_t viddec_generic_add_association_tags(void *parent); +uint32_t viddec_h264_add_association_tags(void *parent); +uint32_t viddec_mpeg2_add_association_tags(void *parent); +uint32_t viddec_pm_lateframe_generate_contribution_tags(void *parent, uint32_t ignore_partial); +uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ignore_partial); +uint32_t viddec_pm_generate_missed_association_tags(viddec_pm_cxt_t *cxt, uint32_t using_next); +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h new file mode 100644 index 0000000..1971a36 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h @@ -0,0 +1,81 @@ +#ifndef VIDDEC_PM_UTILS_BSTREAM_H +#define VIDDEC_PM_UTILS_BSTREAM_H + +#include "viddec_pm_utils_list.h" + +#define CUBBY_SIZE 1024 +//#define CUBBY_SIZE 512 +#define SCRATCH_SIZE 20 +#define MIN_DATA 8 + +typedef struct +{ +#ifdef VBP + uint8_t *buf; +#else + uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */ +#endif + uint32_t buf_st; /* start pos in buf */ + uint32_t buf_end; /* first invalid byte in buf */ + uint32_t buf_index; /* current index in buf */ + uint32_t buf_bitoff; /* bit offset in current index position */ +}viddec_pm_utils_bstream_buf_cxt_t; + +typedef struct +{ + uint8_t buf_scratch[SCRATCH_SIZE];/* scratch for boundary reads*/ + uint32_t st; /* start index of valid byte */ + uint32_t size;/* Total number of bytes in current buffer */ + uint32_t bitoff; /* bit offset in first valid byte */ +}viddec_pm_utils_bstream_scratch_cxt_t; + +typedef struct +{ +#ifdef VBP + /* counter of emulation preventation byte */ + uint32_t emulation_byte_counter; +#endif + /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store + the bstream buffer's first valid byte index wrt to accessunit in this variable */ + uint32_t au_pos; + /* This is for keeping track of which list item was used to load data last */ + uint32_t list_off; + /* This is for tracking emulation prevention bytes */ + uint32_t phase; + /* This flag tells us whether to look for emulation prevention or not */ + uint32_t is_emul_reqd; + /* A pointer to list of es buffers which contribute to current access unit */ + viddec_pm_utils_list_t *list; + /* scratch buffer to stage data on boundaries and reloads */ + viddec_pm_utils_bstream_scratch_cxt_t scratch; + /* Actual context which has valid data for get bits functionality */ + viddec_pm_utils_bstream_buf_cxt_t bstrm_buf; +}viddec_pm_utils_bstream_cxt_t; + +void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul); + +int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits); + +int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip); + +int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte); + +uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt); + +uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt); + +static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul) +{ + uint32_t phase=cxt->phase; + + *bit = cxt->bstrm_buf.buf_bitoff; + *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st); + if(cxt->phase > 0) + { + phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 ); + } + *is_emul = (cxt->is_emul_reqd) && (phase > 0) && + (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) && + (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3); +} +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h new file mode 100644 index 0000000..98f2d46 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h @@ -0,0 +1,51 @@ +#ifndef VIDDEC_PM_COMMON_LIST_H +#define VIDDEC_PM_COMMON_LIST_H + +#include "viddec_emitter.h" + +/* Limitation:This is the maximum numbers of es buffers between start codes. Needs to change if we encounter + a case if this is not sufficent */ +#ifdef VBP +#define MAX_IBUFS_PER_SC 512 +#else +#define MAX_IBUFS_PER_SC 64 +#endif + +/* This structure is for storing information on byte position in the current access unit. + stpos is the au byte index of first byte in current es buffer.edpos is the au byte index+1 of last + valid byte in current es buffer.*/ +typedef struct +{ + uint32_t stpos; + uint32_t edpos; +}viddec_pm_utils_au_bytepos_t; + +/* this structure is for storing all necessary information for list handling */ +typedef struct +{ + uint16_t num_items; /* Number of buffers in List */ + uint16_t first_scprfx_length; /* Length of first sc prefix in this list */ + int32_t start_offset; /* starting offset of unused data including sc prefix in first buffer */ + int32_t end_offset; /* Offset of unsused data in last buffer including 2nd sc prefix */ + viddec_input_buffer_t sc_ibuf[MAX_IBUFS_PER_SC]; /* Place to store buffer descriptors */ + viddec_pm_utils_au_bytepos_t data[MAX_IBUFS_PER_SC]; /* place to store au byte positions */ + int32_t total_bytes; /* total bytes for current access unit including first sc prefix*/ +}viddec_pm_utils_list_t; + +/* This function initialises the list to default values */ +void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt); + +/* This function adds a new entry to list and will emit tags if needed */ +uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf); + +/* This function updates au byte position of the current list. This should be called after sc codes are detected and before + syntax parsing as get bits requires this to be initialized. */ +void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length); + +/* This function walks through the list and removes consumed buffers based on total bytes. It then moves + unused entires to the top of list. */ +void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length); + +/* this function returns 1 if the requested byte is not found. If found returns list and offset into list */ +uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset); +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h new file mode 100644 index 0000000..c77aed1 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h @@ -0,0 +1,6 @@ +#ifndef VIDDEC_VC1_PARSE_H +#define VIDDEC_VC1_PARSE_H + +void viddec_vc1_get_ops(viddec_parser_ops_t *ops); + +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c new file mode 100644 index 0000000..1bb368a --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/main.c @@ -0,0 +1,608 @@ +#include "fw_pvt.h" +#include "viddec_fw_parser_ipclib_config.h" +#include "viddec_fw_common_defs.h" +#include "viddec_fw_parser.h" +#include "viddec_fw_debug.h" + +/* This define makes sure that the structure is stored in Local memory. + This is shared memory between host and FW.*/ +volatile dmem_t _dmem __attribute__ ((section (".exchange"))); +/* Debug index should be disbaled for Production FW */ +uint32_t dump_ptr=0; +uint32_t timer=0; + +/* Auto Api definitions */ +ismd_api_group viddec_fw_api_array[2]; + +extern void viddec_fw_parser_register_callbacks(void); + +/*------------------------------------------------------------------------------ + * Function: initialize firmware SVEN TX Output + *------------------------------------------------------------------------------ + */ +int SMDEXPORT viddec_fw_parser_sven_init(struct SVEN_FW_Globals *sven_fw_globals ) +{ + extern int sven_fw_set_globals(struct SVEN_FW_Globals *fw_globals ); + return(sven_fw_set_globals(sven_fw_globals)); +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_check_watermark_boundary + * This function figures out if we crossesd watermark boundary on input data. + * before represents the ES Queue data when we started and current represents ES Queue data + * when we are ready to swap.Threshold is the amount of data specified by the driver to trigger an + * interrupt. + * We return true if threshold is between before and current. + *------------------------------------------------------------------------------ + */ +static inline uint32_t viddec_fw_check_watermark_boundary(uint32_t before, uint32_t current, uint32_t threshold) +{ + return ((before >= threshold) && (current < threshold)); +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_get_total_input_Q_data + * This function figures out how much data is available in input queue of the FW + *------------------------------------------------------------------------------ + */ +static uint32_t viddec_fw_get_total_input_Q_data(uint32_t indx) +{ + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + uint32_t ret; + int32_t pos=0; + FW_IPC_ReceiveQue *rcv_q; + + rcv_q = &fwipc->rcv_q[indx]; + /* count the cubby buffer which we already read if present */ + ret = (_dmem.stream_info[indx].buffered_data) ? CONFIG_IPC_MESSAGE_MAX_SIZE:0; + ret += ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos); + return ret; +} + +/*------------------------------------------------------------------------------ + * Function: mfd_round_robin + * Params: + * [in] pri: Priority of the stream + * [in] indx: stream id number of the last stream that was scheduled. + * [out] qnum: Stream id of priority(pri) which has data. + * This function is responsible for figuring out which stream needs to be scheduled next. + * It starts after the last scheduled stream and walks through all streams until it finds + * a stream which is of required priority, in start state, has space on output and data in + * input. + * If no such stream is found qnum is not updated and return value is 0. + * If a stream is found then qnum is updated with that id and function returns 1. + *------------------------------------------------------------------------------ + */ + +uint32_t mfd_round_robin(uint32_t pri, int32_t *qnum, int32_t indx) +{ + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + int32_t i = CONFIG_IPC_FW_MAX_RX_QUEUES; + uint32_t ret = 0; + /* Go through all queues until we find a valid queue of reqd priority */ + while(i>0) + { + indx++; + if(indx >= CONFIG_IPC_FW_MAX_RX_QUEUES) indx = 0; + + /* We should look only at queues which match priority and + in running state */ + if( (_dmem.stream_info[indx].state == 1) + && (_dmem.stream_info[indx].priority == pri)) + { + uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos; + FW_IPC_ReceiveQue *rcv_q; + rcv_q = &fwipc->rcv_q[indx]; + inpt_avail = (_dmem.stream_info[indx].buffered_data > 0) || (ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos) > 0); + /* we have to check for two workloads to protect against error cases where we might have to push both current and next workloads */ + output_avail = FwIPC_SpaceAvailForMessage(fwipc, &fwipc->snd_q[indx], CONFIG_IPC_MESSAGE_MAX_SIZE, &pos) >= 2; + pos = 0; + /* Need at least current and next to proceed */ + wklds_avail = (ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos) >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1)); + if(inpt_avail && output_avail && wklds_avail) + {/* Success condition: we have some data on input and enough space on output queue */ + *qnum = indx; + ret =1; + break; + } + } + i--; + } + return ret; +} +static inline void mfd_setup_emitter(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, mfd_pk_strm_cxt *cxt) +{ + int32_t ret1=0,ret=0; + /* We don't check return values for the peek as round robin guarantee's that we have required free workloads */ + ret = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld1), sizeof(ipc_msg_data), 0); + ret1 = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld2), sizeof(ipc_msg_data), 1); + viddec_emit_update(&(cxt->pm.emitter), cxt->wkld1.phys, cxt->wkld2.phys, cxt->wkld1.len, cxt->wkld2.len); +} + +static inline void mfd_init_swap_memory(viddec_pm_cxt_t *pm, uint32_t codec_type, uint32_t start_addr, uint32_t clean) +{ + uint32_t *persist_mem; + persist_mem = (uint32_t *)(start_addr | GV_DDR_MEM_MASK); + viddec_pm_init_context(pm,codec_type, persist_mem, clean); + pm->sc_prefix_info.first_sc_detect = 1; + viddec_emit_init(&(pm->emitter)); +} + +void output_omar_wires( unsigned int value ) +{ +#ifdef RTL_SIMULATION + reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, value ); +#endif +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_init_swap_memory + * This function is responsible for seeting the swap memory to a good state for current stream. + * The swap parameter tells us whether we need to dma the context to local memory. + * We call init on emitter and parser manager which inturn calls init of the codec we are opening the stream for. + *------------------------------------------------------------------------------ + */ + +void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsigned int clean) +{ + mfd_pk_strm_cxt *cxt; + mfd_stream_info *cxt_swap; + cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt); + cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]); + + if(swap) + {/* Swap context into local memory */ + cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false); + } + + { + mfd_init_swap_memory(&(cxt->pm), cxt_swap->strm_type, cxt_swap->ddr_cxt+cxt_swap->cxt_size, clean); + cxt_swap->wl_time = 0; + cxt_swap->es_time = 0; + } + if(swap) + {/* Swap context into DDR */ + cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false); + } +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_push_current_frame_to_output + * This is a helper function to read a workload from input queue and push to output queue. + * This is called when are done with a frame. + *------------------------------------------------------------------------------ + */ +static inline void viddec_fw_push_current_frame_to_output(FW_IPC_Handle *fwipc, uint32_t cur) +{ + ipc_msg_data wkld_to_push; + FwIPC_ReadMessage(fwipc, &fwipc->wkld_q[cur], (char *)&(wkld_to_push), sizeof(ipc_msg_data)); + FwIPC_SendMessage(fwipc, cur, (char *)&(wkld_to_push), sizeof(ipc_msg_data)); +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_get_next_stream_to_schedule + * This is a helper function to figure out which active stream needs to be scheduled next. + * If none of the streams are active it returns -1. + *------------------------------------------------------------------------------ + */ +static inline int viddec_fw_get_next_stream_to_schedule(void) +{ + int32_t cur = -1; + + if(mfd_round_robin(viddec_stream_priority_REALTIME, &cur, _dmem.g_pk_data.high_id)) + { + /* On success store the stream id */ + _dmem.g_pk_data.high_id = cur; + } + else + { + /* Check Low priority Queues, Since we couldn't find a valid realtime stream */ + if(mfd_round_robin(viddec_stream_priority_BACKGROUND, &cur, _dmem.g_pk_data.low_id)) + { + _dmem.g_pk_data.low_id = cur; + } + } + + return cur; +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_update_pending_interrupt_flag + * This is a helper function to figure out if we need to mark an interrupt pending for this stream. + * We update status value here if we find any of the interrupt conditions are true. + * If this stream has a interrupt pending which we could not send to host, we don't overwrite past status info. + *------------------------------------------------------------------------------ + */ +static inline void viddec_fw_update_pending_interrupt_flag(int32_t cur, mfd_stream_info *cxt_swap, uint8_t pushed_a_workload, + uint32_t es_Q_data_at_start) +{ + if(_dmem.int_status[cur].mask) + { + if(!cxt_swap->pending_interrupt) + { + uint32_t es_Q_data_now; + uint8_t wmark_boundary_reached=false; + es_Q_data_now = viddec_fw_get_total_input_Q_data((uint32_t)cur); + wmark_boundary_reached = viddec_fw_check_watermark_boundary(es_Q_data_at_start, es_Q_data_now, cxt_swap->low_watermark); + _dmem.int_status[cur].status = 0; + if(pushed_a_workload) + { + _dmem.int_status[cur].status |= VIDDEC_FW_WKLD_DATA_AVAIL; + } + if(wmark_boundary_reached) + { + _dmem.int_status[cur].status |= VIDDEC_FW_INPUT_WATERMARK_REACHED; + } + cxt_swap->pending_interrupt = ( _dmem.int_status[cur].status != 0); + } + } + else + { + cxt_swap->pending_interrupt = false; + } +} + +static inline void viddec_fw_handle_error_and_inband_messages(int32_t cur, uint32_t pm_ret) +{ + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + + viddec_fw_push_current_frame_to_output(fwipc, cur); + switch(pm_ret) + { + case PM_EOS: + case PM_OVERFLOW: + { + viddec_fw_init_swap_memory(cur, false, true); + } + break; + case PM_DISCONTINUITY: + { + viddec_fw_init_swap_memory(cur, false, false); + } + break; + default: + break; + } +} + +void viddec_fw_debug_scheduled_stream_state(int32_t indx, int32_t start) +{ + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos; + FW_IPC_ReceiveQue *rcv_q; + uint32_t message; + + message = (start) ? SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_START: SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_END; + rcv_q = &fwipc->rcv_q[indx]; + inpt_avail = ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos); + inpt_avail += ((_dmem.stream_info[indx].buffered_data > 0) ? CONFIG_IPC_MESSAGE_MAX_SIZE: 0); + inpt_avail = inpt_avail >> 4; + pos = 0; + output_avail = ipc_mq_read_avail(&fwipc->snd_q[indx].mq, (int32_t *)&pos); + output_avail = output_avail >> 4; + pos = 0; + wklds_avail = ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos); + wklds_avail = wklds_avail >> 4; + WRITE_SVEN(message, (int)indx, (int)inpt_avail, (int)output_avail, + (int)wklds_avail, 0, 0); +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_process_async_queues(A.K.A -> Parser Kernel) + * This function is responsible for handling the asynchronous queues. + * + * The first step is to figure out which stream to run. The current algorithm + * will go through all high priority queues for a valid stream, if not found we + * go through lower priority queues. + * + * If a valid stream is found we swap the required context from DDR to DMEM and do all necessary + * things to setup the stream. + * Once a stream is setup we call the parser manager and wait until a wrkld is created or no more input + * data left. + * Once we find a wkld we push it to host and save the current context to DDR. + *------------------------------------------------------------------------------ + */ + +static inline int32_t viddec_fw_process_async_queues() +{ + int32_t cur = -1; + + cur = viddec_fw_get_next_stream_to_schedule(); + + if(cur != -1) + { + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + FW_IPC_ReceiveQue *rcv_q; + /* bits captured by OMAR */ + output_omar_wires( 0x0 ); + rcv_q = &fwipc->rcv_q[cur]; + { + mfd_pk_strm_cxt *cxt; + mfd_stream_info *cxt_swap; + cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt); + cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[cur]); + + /* Step 1: Swap rodata to local memory. Not doing this currently as all the rodata fits in local memory. */ + {/* Step 2: Swap context into local memory */ + cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false); + } + /* Step 3:setup emitter by reading input data and workloads and initialising it */ + mfd_setup_emitter(fwipc, &fwipc->wkld_q[cur], cxt); + viddec_fw_debug_scheduled_stream_state(cur, true); + /* Step 4: Call Parser Manager until workload done or No more ES buffers */ + { + ipc_msg_data *data = 0; + uint8_t stream_active = true, pushed_a_workload=false; + uint32_t pm_ret = PM_SUCCESS, es_Q_data_at_start; + uint32_t start_time, time=0; + + start_time = set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX); + timer=0; + es_Q_data_at_start = viddec_fw_get_total_input_Q_data((uint32_t)cur); + do + { + output_omar_wires( 0x1 ); + { + uint32_t es_t0,es_t1; + get_wdog(&es_t0); + pm_ret = viddec_pm_parse_es_buffer(&(cxt->pm), cxt_swap->strm_type, data); + get_wdog(&es_t1); + cxt_swap->es_time += get_total_ticks(es_t0, es_t1); + } + switch(pm_ret) + { + case PM_EOS: + case PM_WKLD_DONE: + case PM_OVERFLOW: + case PM_DISCONTINUITY: + {/* Finished a frame worth of data or encountered fatal error*/ + stream_active = false; + } + break; + case PM_NO_DATA: + { + uint32_t next_ret=0; + if ( (NULL != data) && (0 != cxt_swap->es_time) ) + { + /* print performance info for this buffer */ + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_DONE, (int)cur, (int)cxt_swap->es_time, (int)cxt->input.phys, + (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); + cxt_swap->es_time = 0; + } + + next_ret = FwIPC_ReadMessage(fwipc, rcv_q, (char *)&(cxt->input), sizeof(ipc_msg_data)); + if(next_ret != 0) + { + data = &(cxt->input); + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_START, (int)cur, (int)cxt_swap->wl_time, + (int)cxt->input.phys, (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); + } + else + {/* No data on input queue */ + cxt_swap->buffered_data = 0; + stream_active = false; + } + } + break; + default: + {/* Not done with current buffer */ + data = NULL; + } + break; + } + }while(stream_active); + get_wdog(&time); + cxt_swap->wl_time += get_total_ticks(start_time, time); + /* Step 5: If workload done push workload out */ + switch(pm_ret) + { + case PM_EOS: + case PM_WKLD_DONE: + case PM_OVERFLOW: + case PM_DISCONTINUITY: + {/* Push current workload as we are done with the frame */ + cxt_swap->buffered_data = (PM_WKLD_DONE == pm_ret) ? true: false; + viddec_pm_update_time(&(cxt->pm), cxt_swap->wl_time); + + /* xmit performance info for this workload output */ + WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_PK_WL_DONE, (int)cur, (int)cxt_swap->wl_time, (int)cxt->wkld1.phys, + (int)cxt->wkld1.len, (int)cxt->wkld1.id, (int)cxt->wkld1.flags ); + cxt_swap->wl_time = 0; + + viddec_fw_push_current_frame_to_output(fwipc, cur); + if(pm_ret != PM_WKLD_DONE) + { + viddec_fw_handle_error_and_inband_messages(cur, pm_ret); + } + pushed_a_workload = true; + } + break; + default: + break; + } + /* Update information on whether we have active interrupt for this stream */ + viddec_fw_update_pending_interrupt_flag(cur, cxt_swap, pushed_a_workload, es_Q_data_at_start); + } + viddec_fw_debug_scheduled_stream_state(cur, false); + /* Step 6: swap context into DDR */ + { + cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false); + } + } + + } + return cur; +} + + +/*------------------------------------------------------------------------------ + * Function: process_command + * This magic function figures out which function to excute based on autoapi. + *------------------------------------------------------------------------------ + */ + +static inline void process_command(uint32_t cmd_id, unsigned char *command) +{ + int32_t groupid = ((cmd_id >> 24) - 13) & 0xff; + int32_t funcid = cmd_id & 0xffffff; + /* writing func pointer to hsot doorbell */ + output_omar_wires( (int) viddec_fw_api_array[groupid].unmarshal[funcid] ); + WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_AUTOAPI_CMD,(int) cmd_id, (int) command, ((int *)command)[0], + ((int *)command)[1], ((int *)command)[2], ((int *)command)[3] ); + + viddec_fw_api_array[groupid].unmarshal[funcid](0, command); + +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_process_sync_queues(A.K.A auto api) + * Params: + * [in] msg: common sync structure where all required parameters are present for autoapi. + * + * This function is responsible for handling synchronous messages. All synchronous messages + * are handled through auto api. + * what are synchronous messages? Anything releated to teardown or opening a stream Ex: open, close, flush etc. + * + * Only once synchronous message at a time. When a synchronous message its id is usually in cp doorbell. Once + * we are done handling synchronous message through auto api we release doorbell to let the host write next + * message. + *------------------------------------------------------------------------------ + */ + +static inline int32_t viddec_fw_process_sync_queues(unsigned char *msg) +{ + int32_t ret = -1; + + if(0 == reg_read(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS)) + { + uint32_t command1=0; + command1 = reg_read(CONFIG_IPC_ROFF_RISC_RX_DOORBELL); + process_command(command1, msg); + reg_write(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS, 0x2); /* Inform Host we are done with this message */ + ret = 0; + } + return ret; +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_check_for_pending_int + * This function walks through all active streams to see if atleast one stream has a pending interrupt + * and returns true if it finds one. + *------------------------------------------------------------------------------ + */ +static inline uint32_t viddec_fw_check_for_pending_int(void) +{ + uint32_t i=0, ret=false; + /* start from 0 to max streams that fw can handle*/ + while(i < FW_SUPPORTED_STREAMS) + { + if(_dmem.stream_info[i].state == 1) + { + if((_dmem.stream_info[i].pending_interrupt) && _dmem.int_status[i].mask) + { + ret = true; + } + else + {/* If this is not in INT state clear the status before sending it to host */ + _dmem.int_status[i].status = 0; + } + } + i++; + } + return ret; +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_clear_processed_int + * This function walks through all active streams to clear pending interrupt state.This is + * called after a INT was issued. + *------------------------------------------------------------------------------ + */ +static inline void viddec_fw_clear_processed_int(void) +{ + uint32_t i=0; + /* start from 0 to max streams that fw can handle*/ + while(i < FW_SUPPORTED_STREAMS) + { + //if(_dmem.stream_info[i].state == 1) + _dmem.stream_info[i].pending_interrupt = false; + i++; + } + return; +} + +/*------------------------------------------------------------------------------ + * Function: viddec_fw_int_host + * This function interrupts host if data is available for host or any other status + * is valid which the host configures the FW to. + * There is only one interrupt line so this is a shared Int for all streams, Host should + * look at status of all streams when it receives a Int. + * The FW will interrupt the host only if host doorbell is free, in other words the host + * should always make the doorbell free at the End of its ISR. + *------------------------------------------------------------------------------ + */ + +static inline int32_t viddec_fw_int_host() +{ + /* We Interrupt the host only if host is ready to receive an interrupt */ + if((reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) & GV_DOORBELL_STATS) == GV_DOORBELL_STATS) + { + if(viddec_fw_check_for_pending_int()) + { + /* If a pending interrupt is found trigger INT */ + reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, VIDDEC_FW_PARSER_IPC_HOST_INT); + /* Clear all stream's pending Interrupt info since we use a global INT for all streams */ + viddec_fw_clear_processed_int(); + } + } + return 1; +} +volatile unsigned int stack_corrupted __attribute__ ((section (".stckovrflwchk"))); +/*------------------------------------------------------------------------------ + * Function: main + * This function is the main firmware function. Its a infinite loop where it polls + * for messages and processes them if they are available. Currently we ping pong between + * synchronous and asynchronous messages one at a time. If we have multiple aysnchronous + * queues we always process only one between synchronous messages. + * + * For multiple asynchronous queues we round robin through the high priorities first and pick + * the first one available. Next time when we come around for asynchronous message we start + * from the next stream onwards so this guarantees that we give equal time slices for same + * priority queues. If no high priority queues are active we go to low priority queues and repeat + * the same process. + *------------------------------------------------------------------------------ + */ + +int main(void) +{ + unsigned char *msg = (uint8_t *)&(_dmem.buf.data[0]); + + /* We wait until host reads sync message */ + reg_write(CONFIG_IPC_ROFF_HOST_RX_DOORBELL, GV_FW_IPC_HOST_SYNC); + + while ( GV_DOORBELL_STATS != reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) ) + { /*poll register until done bit is set */ + /* Host re-writes Vsparc DRAM (BSS) in this loop and will hit the DONE bit when complete */ + } + enable_intr(); + /* Initialize State for queues */ + viddec_fw_parser_register_callbacks(); + FwIPC_Initialize(GET_IPC_HANDLE(_dmem), (volatile char *)msg); + _dmem.g_pk_data.high_id = _dmem.g_pk_data.low_id = -1; + viddec_pm_init_ops(); + stack_corrupted = 0xDEADBEEF; + while(1) + { + viddec_fw_process_sync_queues(msg); + viddec_fw_process_async_queues(); + viddec_fw_int_host(); +#if 0 + if(stack_corrupted != 0xDEADBEEF) + { + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_STACK_CORRPON, 0, 0, 0, 0, 0, 0); + while(1); + } +#endif + } + return 1; +} diff --git a/mix_vbp/viddec_fw/fw/parser/utils.c b/mix_vbp/viddec_fw/fw/parser/utils.c new file mode 100644 index 0000000..5a22e5b --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/utils.c @@ -0,0 +1,253 @@ +#include "fw_pvt.h" +#include "viddec_fw_parser_ipclib_config.h" + +extern uint32_t timer; + +/*------------------------------------------------------------------------------ + * Function: memcpy + * This is a memory-copy function. + *------------------------------------------------------------------------------ + */ +/* NOTE: we are inventing memcpy since we don't want to include string libs as part of FW Due to size limitations*/ +void *memcpy(void *dest, const void *src, uint32_t n) +{ + uint8_t *ptr8_frm, *ptr8_to; + uint32_t *ptr32_frm, *ptr32_to; + uint32_t bytes_left=n,trail = 0; + uint32_t align=0; + + ptr8_frm = (uint8_t *)src; + ptr8_to = (uint8_t *)dest; + + trail = ((uint32_t)ptr8_frm) & 0x3; + if((trail == (((uint32_t)ptr8_to) & 0x3)) && (n > 4)) + { + /* check to see what's the offset bytes to go to a word alignment */ + bytes_left -= trail; + while(align > 0){ + *ptr8_to ++ = *ptr8_frm ++; + trail--; + } + /* check to see if rest of bytes is a multiple of 4. */ + trail = bytes_left & 0x3; + bytes_left = (bytes_left >> 2) << 2; + ptr32_to = (uint32_t *)ptr8_to; + ptr32_frm = (uint32_t *)ptr8_frm; + /* copy word by word */ + while(bytes_left > 0){ + *ptr32_to ++ = *ptr32_frm ++; + bytes_left -= 4; + } + /* If there are any trailing bytes do a byte copy */ + ptr8_to = (uint8_t *)ptr32_to; + ptr8_frm = (uint8_t *)ptr32_frm; + while(trail > 0){ + *ptr8_to ++ = *ptr8_frm ++; + trail--; + } + } + else + {/* case when src and dest addr are not on same alignment. + Just do a byte copy */ + while(bytes_left > 0){ + *ptr8_to ++ = *ptr8_frm ++; + bytes_left -= 1; + } + } + return dest; +} + +/*------------------------------------------------------------------------------ + * Function: memset + * This is a function to copy specificed value into memory array. + *------------------------------------------------------------------------------ + */ +/* NOTE: we are inventing memset since we don't want to include string libs as part of FW Due to size limitations*/ +void *memset(void *s, int32_t c, uint32_t n) +{ + uint8_t *ptr8 = (uint8_t *)s; + uint32_t *ptr32, data; + uint32_t mask = 0, bytes_left = n; + + mask = c & 0xFF; + mask |= (mask << 8); + mask |= (mask << 16); + if(n >= 4) + { + uint32_t trail=0; + trail = 4 - (((uint32_t)ptr8) & 0x3); + if(trail < 4) + { + ptr32 = (uint32_t *)(((uint32_t)ptr8) & ~0x3); + data = (*ptr32 >> (8*trail)) << (8*trail); + data |= (mask >> (32 - (8*trail))); + *ptr32 = data; + bytes_left -= trail; + ptr8 += trail; + } + ptr32 = (uint32_t *)((uint32_t)ptr8); + while(bytes_left >= 4) + { + *ptr32 = mask; + ptr32++; + bytes_left -=4; + } + if(bytes_left > 0) + { + data = (*ptr32 << (8*bytes_left)) >> (8*bytes_left); + data |= (mask << (32 - (8*bytes_left))); + *ptr32=data; + } + } + + return s; +} + +/*------------------------------------------------------------------------------ + * Function: cp_using_dma + * This is a function to copy data from local memory to/from system memory. + * Params: + * [in] ddr_addr : Word aligned ddr address. + * [in] local_addr: Word aligned local address. + * [in] size : No of bytes to transfer. + * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory. + * [in] swap : Enable or disable byte swap(endian). + * [out] return : Actual number of bytes copied, which can be more than what was requested + * since we can only copy words at a time. + * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned. + *------------------------------------------------------------------------------ + */ +uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) +{ + uint32_t val=0, wrote = size; + + while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) + { + /* wait if DMA is busy with a transcation Error condition??*/ + } + + reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3) & ~GV_DDR_MEM_MASK); + reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc)); + //wrote += (ddr_addr & 0x3); + wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */ + val=(wrote & 0xffff) << 2; + reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); + val |= DMA_CTRL_STATUS_START; + /* If size > 64 use 128 byte burst speed */ + if(wrote > 64) + val |= (1<<18); + if(swap) /* Endian swap if needed */ + val |= DMA_CTRL_STATUS_SWAP; + if(to_ddr) + val = val | DMA_CTRL_STATUS_DIRCN; + reg_write(DMA_CONTROL_STATUS, val); + while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) + { + /* wait till DMA is done */ + } + reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); + + return (wrote << 2); +} + +/*------------------------------------------------------------------------------ + * Function: cp_using_dma + * This is a function to copy data from local memory to/from system memory. + * Params: + * [in] ddr_addr : Word aligned ddr address. + * [in] local_addr: Word aligned local address. + * [in] size : No of bytes to transfer. + * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory. + * [in] swap : Enable or disable byte swap(endian). + * [out] return : Actual number of bytes copied, which can be more than what was requested + * since we can only copy words at a time. + * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned. + *------------------------------------------------------------------------------ + */ +uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) +{ + uint32_t val=0, wrote = size; + + while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) + { + /* wait if DMA is busy with a transcation Error condition??*/ + } + + reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3)); + reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc)); + //wrote += (ddr_addr & 0x3); + wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */ + val=(wrote & 0xffff) << 2; + reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); + val |= DMA_CTRL_STATUS_START; + /* If size > 64 use 128 byte burst speed */ + if(wrote > 64) + val |= (1<<18); + if(swap) /* Endian swap if needed */ + val |= DMA_CTRL_STATUS_SWAP; + if(to_ddr) + val = val | DMA_CTRL_STATUS_DIRCN; + reg_write(DMA_CONTROL_STATUS, val); + while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) + { + /* wait till DMA is done */ + } + reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); + + return (wrote << 2); +} + +void update_ctrl_reg(uint8_t enable, uint32_t mask) +{ + uint32_t read_val = 0; + read_val = reg_read(CONFIG_CP_CONTROL_REG); + if(enable) + { + read_val = read_val | mask; + } + else + { + read_val = read_val & ~mask; + } + reg_write(CONFIG_CP_CONTROL_REG, read_val); + return; + +} + +extern uint32_t sven_get_timestamp(); + +uint32_t set_wdog(uint32_t offset) +{ +#ifdef B0_TIMER_FIX + update_ctrl_reg(0, WATCH_DOG_ENABLE); + reg_write(INT_REG, INT_WDOG_ENABLE); + reg_write(WATCH_DOG_COUNTER, offset & WATCH_DOG_MASK); + update_ctrl_reg(1, WATCH_DOG_ENABLE); + return offset & WATCH_DOG_MASK; +#else + return sven_get_timestamp(); +#endif +} + +void get_wdog(uint32_t *value) +{ +#ifdef B0_TIMER_FIX + *value = reg_read(WATCH_DOG_COUNTER) & WATCH_DOG_MASK; + reg_write(INT_REG, ~INT_WDOG_ENABLE); + update_ctrl_reg(0, WATCH_DOG_ENABLE); +#else + *value = sven_get_timestamp(); +#endif +} + +uint32_t get_total_ticks(uint32_t start, uint32_t end) +{ + uint32_t value; +#ifdef B0_TIMER_FIX + value = (start-end) + (start*timer); + timer=0; +#else + value = end-start;/* convert to 1 MHz clocks */ +#endif + return value; +} diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c new file mode 100644 index 0000000..033f6b6 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -0,0 +1,1568 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + + +#include +#include + +#include "h264.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_h264_parser.h" + + +/* number of bytes used to encode length of NAL payload. Default is 4 bytes. */ +static int NAL_length_size = 4; + +/* default scaling list table */ +unsigned char Default_4x4_Intra[16] = +{ + 6,13,20,28, + 13,20,28,32, + 20,28,32,37, + 28,32,37,42 +}; + +unsigned char Default_4x4_Inter[16] = +{ + 10,14,20,24, + 14,20,24,27, + 20,24,27,30, + 24,27,30,34 +}; + +unsigned char Default_8x8_Intra[64] = +{ + 6,10,13,16,18,23,25,27, + 10,11,16,18,23,25,27,29, + 13,16,18,23,25,27,29,31, + 16,18,23,25,27,29,31,33, + 18,23,25,27,29,31,33,36, + 23,25,27,29,31,33,36,38, + 25,27,29,31,33,36,38,40, + 27,29,31,33,36,38,40,42 +}; + +unsigned char Default_8x8_Inter[64] = +{ + 9,13,15,17,19,21,22,24, + 13,13,17,19,21,22,24,25, + 15,17,19,21,22,24,25,27, + 17,19,21,22,24,25,27,28, + 19,21,22,24,25,27,28,30, + 21,22,24,25,27,28,30,32, + 22,24,25,27,28,30,32,33, + 24,25,27,28,30,32,33,35 +}; + +unsigned char quant_flat[16] = +{ + 16,16,16,16, + 16,16,16,16, + 16,16,16,16, + 16,16,16,16 +}; + +unsigned char quant8_flat[64] = +{ + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16 +}; + +unsigned char* UseDefaultList[8] = +{ + Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra, + Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter, + Default_8x8_Intra, + Default_8x8_Inter +}; + +/** + * + */ +uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done"); + if (NULL == pcontext->parser_ops->is_wkld_done) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + /* entry point not needed */ + pcontext->parser_ops->is_frame_start = NULL; + return VBP_OK; +} + + +/** + * + */ +uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + pcontext->query_data = NULL; + vbp_data_h264 *query_data = NULL; + + query_data = g_try_new0(vbp_data_h264, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = g_try_new0(vbp_picture_data_h264, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferH264, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = g_try_new0(vbp_slice_data_h264, MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + + query_data->IQ_matrix_buf = g_try_new0(VAIQMatrixBufferH264, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + query_data->codec_data = g_try_new0(vbp_codec_data_h264, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + return VBP_OK; + +cleanup: + vbp_free_query_data_h264(pcontext); + + return VBP_MEM; +} + +uint32 vbp_free_query_data_h264(vbp_context *pcontext) +{ + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + int i; + vbp_data_h264 *query_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + if (query_data->pic_data) + { + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + g_free(query_data->pic_data[i].slc_data); + g_free(query_data->pic_data[i].pic_parms); + } + g_free(query_data->pic_data); + } + + g_free(query_data->IQ_matrix_buf); + g_free(query_data->codec_data); + g_free(query_data); + + pcontext->query_data = NULL; + + return VBP_OK; +} + + +static inline uint16_t vbp_utils_ntohs(uint8_t* p) +{ + uint16_t i = ((*p) << 8) + ((*(p+1))); + return i; +} + +static inline uint32_t vbp_utils_ntohl(uint8_t* p) +{ + uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3))); + return i; +} + + +static inline void vbp_set_VAPicture_h264( + int curr_picture_structure, + int bottom_field, + frame_store* store, + VAPictureH264* pic) +{ + if (FRAME == curr_picture_structure) + { + if (FRAME != viddec_h264_get_dec_structure(store)) + { + WTRACE("Reference picture structure is not frame for current frame picture!"); + } + pic->flags = 0; + pic->TopFieldOrderCnt = store->frame.poc; + pic->BottomFieldOrderCnt = store->frame.poc; + } + else + { + if (FRAME == viddec_h264_get_dec_structure(store)) + { + WTRACE("reference picture structure is frame for current field picture!"); + } + if (bottom_field) + { + pic->flags = VA_PICTURE_H264_BOTTOM_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic->flags = VA_PICTURE_H264_TOP_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + } +} + +static inline void vbp_set_slice_ref_list_h264( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + int i, j; + int num_ref_idx_active = 0; + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + uint8_t* p_list = NULL; + VAPictureH264* refPicListX = NULL; + frame_store* fs = NULL; + + /* initialize ref picutre list, set picture id and flags to invalid. */ + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + for (j = 0; j < 32; j++) + { + refPicListX->picture_id = VA_INVALID_SURFACE; + refPicListX->frame_idx = 0; + refPicListX->flags = VA_PICTURE_H264_INVALID; + refPicListX->TopFieldOrderCnt = 0; + refPicListX->BottomFieldOrderCnt = 0; + refPicListX++; + } + } + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + + if ((i == 0) && + ((h264_PtypeB == slice_header->slice_type) || + (h264_PtypeP == slice_header->slice_type))) + { + num_ref_idx_active = slice_header->num_ref_idx_l0_active; + if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list0; + } + else + { + p_list = h264_parser->info.dpb.listX_0; + } + } + else if((i == 1) && (h264_PtypeB == slice_header->slice_type)) + { + num_ref_idx_active = slice_header->num_ref_idx_l1_active; + if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list1; + } + else + { + p_list = h264_parser->info.dpb.listX_1; + } + } + else + { + num_ref_idx_active = 0; + p_list = NULL; + } + + + for (j = 0; j < num_ref_idx_active; j++) + { + fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]); + + /* bit 5 indicates if reference picture is bottom field */ + vbp_set_VAPicture_h264( + h264_parser->info.img.structure, + (p_list[j] & 0x20) >> 5, + fs, + refPicListX); + + refPicListX->frame_idx = fs->frame_num; + refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE; + refPicListX++; + } + } +} + +static inline void vbp_set_pre_weight_table_h264( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + int i, j; + + if ((((h264_PtypeP == slice_header->slice_type) || + (h264_PtypeB == slice_header->slice_type)) && + h264_parser->info.active_PPS.weighted_pred_flag) || + ((h264_PtypeB == slice_header->slice_type) && + (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) + { + slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; + slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; + slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag; + slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag; + slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag; + slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag; + + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i]; + slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i]; + slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i]; + slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i]; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j]; + slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j]; + slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j]; + slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j]; + } + } + } + else + { + /* default weight table */ + slc_parms->luma_log2_weight_denom = 5; + slc_parms->chroma_log2_weight_denom = 5; + slc_parms->luma_weight_l0_flag = 0; + slc_parms->luma_weight_l1_flag = 0; + slc_parms->chroma_weight_l0_flag = 0; + slc_parms->chroma_weight_l1_flag = 0; + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = 0; + slc_parms->luma_offset_l0[i] = 0; + slc_parms->luma_weight_l1[i] = 0; + slc_parms->luma_offset_l1[i] = 0; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = 0; + slc_parms->chroma_offset_l0[i][j] = 0; + slc_parms->chroma_weight_l1[i][j] = 0; + slc_parms->chroma_offset_l1[i][j] = 0; + } + } + } +} + + +static inline void vbp_set_reference_frames_h264( + struct h264_viddec_parser *parser, + VAPictureParameterBufferH264* pic_parms) +{ + int buffer_idx; + int frame_idx; + frame_store* store = NULL; + h264_DecodedPictureBuffer* dpb = &(parser->info.dpb); + /* initialize reference frames */ + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + pic_parms->num_ref_frames = 0; + + frame_idx = 0; + + /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */ + /* set short term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + + store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]]; + /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0) */ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + /* set long term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]]; + if (!viddec_h264_get_is_long_term(store)) + { + WTRACE("long term frame is not marked as long term."); + } + /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0)*/ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + pic_parms->num_ref_frames = frame_idx; + + if (frame_idx > parser->info.active_SPS.num_ref_frames) + { + WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).", + frame_idx, parser->info.active_SPS.num_ref_frames); + } +} + + +static inline void vbp_set_scaling_list_h264( + struct h264_viddec_parser *parser, + VAIQMatrixBufferH264* IQ_matrix_buf) +{ + int i; + if (parser->info.active_PPS.pic_scaling_matrix_present_flag) + { + for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++) + { + if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use PPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64); + } + } + } + else /* pic_scaling_list not present */ + { + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + /* SPS matrix present - use fallback rule B */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i], + 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i], + 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + g_warning("invalid scaling list index."); + break; + } + } + else /* seq_scaling_matrix not present */ + { + /* SPS matrix not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } /* end of seq_scaling_matrix not present */ + } /* end of pic_scaling_list not present */ + } /* for loop for each index from 0 to 7 */ + } /* end of pic_scaling_matrix present */ + else + { + /* PPS matrix not present, use SPS information */ + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++) + { + if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use SPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64); + } + } + } + else + { + /* SPS list not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } + } + } + else + { + /* SPS matrix not present - use flat lists */ + for (i = 0; i < 6; i++) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16); + } + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } + } + + if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) && + (parser->info.active_PPS.pic_scaling_matrix_present_flag || + parser->info.active_SPS.seq_scaling_matrix_present_flag)) + { + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } +} + +static void vbp_set_codec_data_h264( + struct h264_viddec_parser *parser, + vbp_codec_data_h264* codec_data) +{ + /* parameter id */ + codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; + codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; + + /* profile and level */ + codec_data->profile_idc = parser->info.active_SPS.profile_idc; + codec_data->level_idc = parser->info.active_SPS.level_idc; + + + codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; + + + /* reference frames */ + codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && + !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) + { + /* no longer necessary: two fields share the same interlaced surface */ + /* codec_data->num_ref_frames *= 2; */ + } + + codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + + /* frame coding */ + codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + + /* frame dimension */ + codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16; + + codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; + + /* frame cropping */ + codec_data->frame_cropping_flag = + parser->info.active_SPS.sps_disp.frame_cropping_flag; + + codec_data->frame_crop_rect_left_offset = + parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; + + codec_data->frame_crop_rect_right_offset = + parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; + + codec_data->frame_crop_rect_top_offset = + parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; + + codec_data->frame_crop_rect_bottom_offset = + parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; + + /* aspect ratio */ + codec_data->aspect_ratio_info_present_flag = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag; + + codec_data->aspect_ratio_idc = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; + + codec_data->sar_width = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; + + codec_data->sar_height = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + + /* video format */ + codec_data->video_format = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + + codec_data->video_format = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag; +} + + +static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + struct h264_viddec_parser* parser = NULL; + vbp_picture_data_h264* pic_data = NULL; + VAPictureParameterBufferH264* pic_parms = NULL; + + parser = (struct h264_viddec_parser *)cxt->codec_data; + + if (0 == parser->info.SliceHeader.first_mb_in_slice) + { + /* a new picture is parsed */ + query_data->num_pictures++; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + WTRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + pic_parms = pic_data->pic_parms; + + if (parser->info.SliceHeader.first_mb_in_slice == 0) + { + /** + * picture parameter only needs to be set once, + * even multiple slices may be encoded + */ + + /* VAPictureParameterBufferH264 */ + pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; + pic_parms->CurrPic.frame_idx = 0; + if (parser->info.img.field_pic_flag == 1) + { + if (parser->info.img.bottom_field_flag) + { + pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; + } + else + { + /* also OK set to 0 (from test suite) */ + pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; + } + } + else + { + pic_parms->CurrPic.flags = 0; /* frame picture */ + } + pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; + pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; + pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; + + /* don't care if current frame is used as long term reference */ + if (parser->info.SliceHeader.nal_ref_idc != 0) + { + pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + + /* frame height in MBS */ + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; + pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + + + pic_parms->seq_fields.value = 0; + pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; + + /* new fields in libva 0.31 */ + pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; + pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag; + + + /* referened from UMG_Moorstown_TestSuites */ + pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; + + pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + pic_parms->slice_group_change_rate_minus1 = 0; + pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; + pic_parms->pic_init_qs_minus26 = 0; + pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; + pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; + + /* new LibVA fields in v0.31*/ + pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; + pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0; + + /* all slices in the pciture have the same field_pic_flag */ + pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; + pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; + + pic_parms->frame_num = parser->info.SliceHeader.frame_num; + } + + + /* set reference frames, and num_ref_frames */ + vbp_set_reference_frames_h264(parser, pic_parms); + if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + /* num of reference frame is 0 if current picture is IDR */ + pic_parms->num_ref_frames = 0; + } + else + { + /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ + } + + return VBP_OK; +} + +#if 0 +static inline void vbp_update_reference_frames_h264_methodA(vbp_picture_data_h264* pic_data) +{ + VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; + + char is_used[16]; + memset(is_used, 0, sizeof(is_used)); + + int ref_list; + int slice_index; + int i, j; + VAPictureH264* pRefList = NULL; + + for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) + { + VASliceParameterBufferH264* slice_parms = + &(pic_data->slc_data[slice_index].slc_parms); + + for (ref_list = 0; ref_list < 2; ref_list++) + { + if (0 == ref_list) + pRefList = slice_parms->RefPicList0; + else + pRefList = slice_parms->RefPicList1; + + for (i = 0; i < 32; i++, pRefList++) + { + if (VA_PICTURE_H264_INVALID == pRefList->flags) + break; + + for (j = 0; j < 16; j++) + { + if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == + pRefList->TopFieldOrderCnt) + { + is_used[j] = 1; + break; + } + } + } + } + } + + int frame_idx = 0; + VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; + for (i = 0; i < 16; i++) + { + if (is_used[i]) + { + memcpy(pRefFrame, + &(pic_parms->ReferenceFrames[i]), + sizeof(VAPictureH264)); + + pRefFrame++; + frame_idx++; + } + } + pic_parms->num_ref_frames = frame_idx; + + for (; frame_idx < 16; frame_idx++) + { + pRefFrame->picture_id = VA_INVALID_SURFACE; + pRefFrame->frame_idx = -1; + pRefFrame->flags = VA_PICTURE_H264_INVALID; + pRefFrame->TopFieldOrderCnt = -1; + pRefFrame->BottomFieldOrderCnt = -1; + pRefFrame++; + } +} +#endif + +#if 0 +static inline void vbp_update_reference_frames_h264_methodB(vbp_picture_data_h264* pic_data) +{ + VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; + int i; + VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; + for (i = 0; i < 16; i++) + { + pRefFrame->picture_id = VA_INVALID_SURFACE; + pRefFrame->frame_idx = -1; + pRefFrame->flags = VA_PICTURE_H264_INVALID; + pRefFrame->TopFieldOrderCnt = -1; + pRefFrame->BottomFieldOrderCnt = -1; + pRefFrame++; + } + + pic_parms->num_ref_frames = 0; + + + int ref_list; + int slice_index; + int j; + VAPictureH264* pRefList = NULL; + + for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) + { + VASliceParameterBufferH264* slice_parms = + &(pic_data->slc_data[slice_index].slc_parms); + + for (ref_list = 0; ref_list < 2; ref_list++) + { + if (0 == ref_list) + pRefList = slice_parms->RefPicList0; + else + pRefList = slice_parms->RefPicList1; + + for (i = 0; i < 32; i++, pRefList++) + { + if (VA_PICTURE_H264_INVALID == pRefList->flags) + break; + + for (j = 0; j < 16; j++) + { + if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == + pRefList->TopFieldOrderCnt) + { + pic_parms->ReferenceFrames[j].flags |= + pRefList->flags; + + if ((pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_TOP_FIELD) && + (pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_BOTTOM_FIELD)) + { + pic_parms->ReferenceFrames[j].flags = 0; + } + break; + } + } + if (j == 16) + { + memcpy(&(pic_parms->ReferenceFrames[pic_parms->num_ref_frames++]), + pRefList, + sizeof(VAPictureH264)); + } + + } + } + } +} +#endif + + +static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_data->buffer_addr = cxt->parse_cubby.buf; + slc_parms = &(slc_data->slc_parms); + + /* byte: how many bytes have been parsed */ + /* bit: bits parsed within the current parsing position */ + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + +#if 0 + /* add 4 bytes of start code prefix */ + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos + 4; + + slc_data->slice_offset = pcontext->parser_cxt->list.data[index].stpos - 4; + + /* overwrite the "length" bytes to start code (0x00000001) */ + *(slc_data->buffer_addr + slc_data->slice_offset) = 0; + *(slc_data->buffer_addr + slc_data->slice_offset + 1) = 0; + *(slc_data->buffer_addr + slc_data->slice_offset + 2) = 0; + *(slc_data->buffer_addr + slc_data->slice_offset + 3) = 1; + + + /* the offset to the NAL start code for this slice */ + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* bit offset from NAL start code to the beginning of slice data */ + /* slc_parms->slice_data_bit_offset = bit;*/ + slc_parms->slice_data_bit_offset = (byte + 4)* 8 + bit; + +#else + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = cxt->list.data[index].stpos; + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* bit offset from NAL start code to the beginning of slice data */ + slc_parms->slice_data_bit_offset = bit + byte * 8; +#endif + + if (is_emul) + { + WTRACE("next byte is emulation prevention byte."); + /*slc_parms->slice_data_bit_offset += 8; */ + } + + if (cxt->getbits.emulation_byte_counter != 0) + { + slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8; + } + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if(h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + slc_parms->slice_type = slice_header->slice_type; + + slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag; + + slc_parms->num_ref_idx_l0_active_minus1 = 0; + slc_parms->num_ref_idx_l1_active_minus1 = 0; + if (slice_header->slice_type == h264_PtypeI) + { + } + else if (slice_header->slice_type == h264_PtypeP) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + } + else if (slice_header->slice_type == h264_PtypeB) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1; + } + else + { + WTRACE("slice type %d is not supported.", slice_header->slice_type); + } + + slc_parms->cabac_init_idc = slice_header->cabac_init_idc; + slc_parms->slice_qp_delta = slice_header->slice_qp_delta; + slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc; + slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2; + slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2; + + + vbp_set_pre_weight_table_h264(h264_parser, slc_parms); + vbp_set_slice_ref_list_h264(h264_parser, slc_parms); + + + pic_data->num_slices++; + + //vbp_update_reference_frames_h264_methodB(pic_data); + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + return VBP_OK; +} + +/** +* parse decoder configuration data +*/ +uint32 vbp_parse_init_data_h264(vbp_context* pcontext) +{ + /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */ + + uint8 configuration_version = 0; + uint8 AVC_profile_indication = 0; + uint8 profile_compatibility = 0; + uint8 AVC_level_indication = 0; + uint8 length_size_minus_one = 0; + uint8 num_of_sequence_parameter_sets = 0; + uint8 num_of_picture_parameter_sets = 0; + uint16 sequence_parameter_set_length = 0; + uint16 picture_parameter_set_length = 0; + + int i = 0; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint8* cur_data = cxt->parse_cubby.buf; + + + if (cxt->parse_cubby.size < 6) + { + /* need at least 6 bytes to start parsing the structure, see spec 15 */ + return VBP_DATA; + } + + configuration_version = *cur_data++; + AVC_profile_indication = *cur_data++; + + /*ITRACE("Profile indication: %d", AVC_profile_indication); */ + + profile_compatibility = *cur_data++; + AVC_level_indication = *cur_data++; + + /* ITRACE("Level indication: %d", AVC_level_indication);*/ + /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */ + length_size_minus_one = (*cur_data) & 0x3; + + if (length_size_minus_one != 3) + { + WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); + } + + NAL_length_size = length_size_minus_one + 1; + + cur_data++; + + /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */ + num_of_sequence_parameter_sets = (*cur_data) & 0x1f; + if (num_of_sequence_parameter_sets > 1) + { + WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets); + } + if (num_of_sequence_parameter_sets > MAX_NUM_SPS) + { + /* this would never happen as MAX_NUM_SPS = 32 */ + WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS); + } + cur_data++; + + cxt->list.num_items = 0; + for (i = 0; i < num_of_sequence_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse sequence_parameter_set_length */ + return VBP_DATA; + } + + /* 16 bits */ + sequence_parameter_set_length = vbp_utils_ntohs(cur_data); + + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least sequence_parameter_set_length bytes for SPS */ + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length; + + cxt->list.num_items++; + + cur_data += sequence_parameter_set_length; + } + + if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) + { + /* need at least one more byte to parse num_of_picture_parameter_sets */ + return VBP_DATA; + } + + num_of_picture_parameter_sets = *cur_data++; + if (num_of_picture_parameter_sets > 1) + { + /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ + } + + for (i = 0; i < num_of_picture_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse picture_parameter_set_length */ + return VBP_DATA; + } + + /* 16 bits */ + picture_parameter_set_length = vbp_utils_ntohs(cur_data); + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least picture_parameter_set_length bytes for PPS */ + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length; + + cxt->list.num_items++; + + cur_data += picture_parameter_set_length; + } + + if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size) + { + WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", + cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); + } + + return VBP_OK; +} + +static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p) +{ + switch (NAL_length_size) + { + case 4: + return vbp_utils_ntohl(p); + + case 3: + { + uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2))); + return i; + } + + case 2: + return vbp_utils_ntohs(p); + + case 1: + return *p; + + default: + WTRACE("invalid NAL_length_size: %d.", NAL_length_size); + /* default to 4 bytes for length */ + NAL_length_size = 4; + return vbp_utils_ntohl(p); + } +} + +/** +** H.264 elementary stream does not have start code. +* instead, it is comprised of size of NAL unit and payload +* of NAL unit. See spec 15 (Sample format) +*/ +uint32 vbp_parse_start_code_h264(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + int32_t size_left = 0; + int32_t size_parsed = 0; + int32_t NAL_length = 0; + viddec_sc_parse_cubby_cxt_t* cubby = NULL; + + /* reset query data for the new sample buffer */ + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + int i; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + + cubby = &(cxt->parse_cubby); + + cxt->list.num_items = 0; + + /* start code emulation prevention byte is present in NAL */ + cxt->getbits.is_emul_reqd = 1; + + size_left = cubby->size; + + while (size_left >= NAL_length_size) + { + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); + + size_parsed += NAL_length_size; + cxt->list.data[cxt->list.num_items].stpos = size_parsed; + size_parsed += NAL_length; /* skip NAL bytes */ + /* end position is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); + break; + } + + size_left = cubby->size - size_parsed; + } + + if (size_left != 0) + { + WTRACE("Elementary stream is not aligned (%d).", size_left); + } + return VBP_OK; +} + +/** +* +* process parsing result after a NAL unit is parsed +* +*/ +uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) +{ + if (i >= MAX_NUM_SLICES) + { + return VBP_PARM; + } + + uint32 error = VBP_OK; + + struct h264_viddec_parser* parser = NULL; + parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); + switch (parser->info.nal_unit_type) + { + case h264_NAL_UNIT_TYPE_SLICE: + /* ITRACE("slice header is parsed."); */ + error = vbp_add_pic_data_h264(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264(pcontext, i); + } + break; + + case h264_NAL_UNIT_TYPE_IDR: + /* ITRACE("IDR header is parsed."); */ + error = vbp_add_pic_data_h264(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264(pcontext, i); + } + break; + + case h264_NAL_UNIT_TYPE_SEI: + /* ITRACE("SEI header is parsed."); */ + break; + + case h264_NAL_UNIT_TYPE_SPS: + /*ITRACE("SPS header is parsed."); */ + break; + + case h264_NAL_UNIT_TYPE_PPS: + /* ITRACE("PPS header is parsed."); */ + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + /* ITRACE("ACC unit delimiter is parsed."); */ + break; + + case h264_NAL_UNIT_TYPE_EOSeq: + /* ITRACE("EOSeq is parsed."); */ + break; + + case h264_NAL_UNIT_TYPE_EOstream: + /* ITRACE("EOStream is parsed."); */ + break; + + default: + WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); + break; + } + return error; +} + +/* +* +* fill query data structure after sample buffer is parsed +* +*/ +uint32 vbp_populate_query_data_h264(vbp_context *pcontext) +{ + vbp_data_h264 *query_data = NULL; + struct h264_viddec_parser *parser = NULL; + + parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + vbp_set_codec_data_h264(parser, query_data->codec_data); + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* VQIAMatrixBufferH264 */ + vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf); + + if (query_data->num_pictures > 0) + { + /* + * picture parameter buffer and slice parameter buffer have been populated + */ + } + else + { + /** + * add a dummy picture that contains picture parameters parsed + from SPS and PPS. + */ + vbp_add_pic_data_h264(pcontext, 0); + } + return VBP_OK; +} diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h new file mode 100644 index 0000000..6ed4499 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h @@ -0,0 +1,48 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef VBP_H264_PARSER_H +#define VBP_H264_PARSER_H + +/* + * setup parser's entry points + */ +uint32 vbp_init_parser_entries_h264(vbp_context *pcontext); + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_h264(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_h264(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_h264(vbp_context *pcontext); + +/* + * parse start code. Only support lenght prefixed mode. Start + * code prefixed is not supported. + */ +uint32 vbp_parse_start_code_h264(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_h264(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_h264(vbp_context *pcontext); + +#endif /*VBP_H264_PARSER_H*/ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c new file mode 100644 index 0000000..27a2dd0 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c @@ -0,0 +1,162 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include + +#include "vbp_loader.h" +#include "vbp_utils.h" + +/** + * + */ +uint32 vbp_open(uint32 parser_type, Handle *hcontext) +{ + vbp_context **ppcontext; + uint32 error; + + if (NULL == hcontext) + { + return VBP_PARM; + } + + *hcontext = NULL; /* prepare for failure. */ + + ppcontext = (vbp_context **)hcontext; + + /** + * TO DO: + * check if vbp context has been created. + */ + + error = vbp_utils_create_context(parser_type, ppcontext); + if (VBP_OK != error) + { + ETRACE("Failed to create context: %d.", error); + } + + return error; +} + +/** + * + */ +uint32 vbp_close(Handle hcontext) +{ + uint32 error; + + if (NULL == hcontext) + { + return VBP_PARM; + } + + vbp_context *pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + /* not a valid vbp context. */ + ETRACE("context is not initialized"); + return VBP_INIT; + } + error = vbp_utils_destroy_context(pcontext); + if (VBP_OK != error) + { + ETRACE("Failed to destroy context: %d.", error); + } + + return error; +} + + +/** + * + */ +uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == data) || (0 == size)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag); + + if (VBP_OK != error) + { + ETRACE("Failed to parse buffer: %d.", error); + } + return error; +} + +/** + * + */ +uint32 vbp_query(Handle hcontext, void **data) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == data)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_query(pcontext, data); + + if (VBP_OK != error) + { + ETRACE("Failed to query parsing result: %d.", error); + } + return error; +} + +/** + * + */ +uint32 vbp_flush(Handle hcontext) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if (NULL == hcontext) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_flush(pcontext); + + return error; +} diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h new file mode 100644 index 0000000..66169dd --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -0,0 +1,318 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef VBP_LOADER_H +#define VBP_LOADER_H + +#include + +#ifndef TRUE +#define TRUE 1 +#endif + +#ifndef FALSE +#define FALSE 0 +#endif + + +#ifndef uint8 +typedef unsigned char uint8; +#endif +#ifndef uint16 +typedef unsigned short uint16; +#endif +#ifndef uint32 +typedef unsigned int uint32; +#endif +#ifndef bool +typedef int bool; +#endif + +typedef void *Handle; + +/* + * MPEG-4 Part 2 data structure + */ + +typedef struct _vbp_codec_data_mp42 +{ + uint8 profile_and_level_indication; +} vbp_codec_data_mp42; + +typedef struct _vbp_slice_data_mp42 +{ + uint8* buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferMPEG4 slice_param; +} vbp_slice_data_mp42; + +typedef struct _vbp_picture_data_mp42 +{ + uint8 vop_coded; + VAPictureParameterBufferMPEG4 picture_param; + VAIQMatrixBufferMPEG4 iq_matrix_buffer; + + uint32 number_slices; + vbp_slice_data_mp42 *slice_data; + +} vbp_picture_data_mp42; + +typedef struct _vbp_data_mp42 +{ + vbp_codec_data_mp42 codec_data; + + uint32 number_pictures; + vbp_picture_data_mp42 *picture_data; + +} vbp_data_mp42; + +/* + * H.264 data structure + */ + +typedef struct _vbp_codec_data_h264 +{ + uint8 pic_parameter_set_id; + uint8 seq_parameter_set_id; + + uint8 profile_idc; + uint8 level_idc; + uint8 constraint_set1_flag; + + uint8 num_ref_frames; + uint8 gaps_in_frame_num_value_allowed_flag; + + uint8 frame_mbs_only_flag; + uint8 mb_adaptive_frame_field_flag; + + int frame_width; + int frame_height; + + uint8 frame_cropping_flag; + int frame_crop_rect_left_offset; + int frame_crop_rect_right_offset; + int frame_crop_rect_top_offset; + int frame_crop_rect_bottom_offset; + + uint8 vui_parameters_present_flag; + /* aspect ratio */ + uint8 aspect_ratio_info_present_flag; + uint8 aspect_ratio_idc; + uint16 sar_width; + uint16 sar_height; + + /* video fromat */ + uint8 video_signal_type_present_flag; + uint8 video_format; + +} vbp_codec_data_h264; + +typedef struct _vbp_slice_data_h264 +{ + uint8* buffer_addr; + + uint32 slice_offset; /* slice data offset */ + + uint32 slice_size; /* slice data size */ + + VASliceParameterBufferH264 slc_parms; + +} vbp_slice_data_h264; + + + typedef struct _vbp_picture_data_h264 + { + VAPictureParameterBufferH264* pic_parms; + + uint32 num_slices; + + vbp_slice_data_h264* slc_data; + + } vbp_picture_data_h264; + + +typedef struct _vbp_data_h264 +{ + /* rolling counter of buffers sent by vbp_parse */ + uint32 buf_number; + + uint32 num_pictures; + + vbp_picture_data_h264* pic_data; + + /** + * do we need to send matrix to VA for each picture? If not, we need + * a flag indicating whether it is updated. + */ + VAIQMatrixBufferH264* IQ_matrix_buf; + + vbp_codec_data_h264* codec_data; + +} vbp_data_h264; + +/* + * vc1 data structure + */ +typedef struct _vbp_codec_data_vc1 +{ + /* Sequence layer. */ + uint8 PROFILE; + uint8 LEVEL; + uint8 POSTPROCFLAG; + uint8 PULLDOWN; + uint8 INTERLACE; + uint8 TFCNTRFLAG; + uint8 FINTERPFLAG; + uint8 PSF; + + /* Entry point layer. */ + uint8 BROKEN_LINK; + uint8 CLOSED_ENTRY; + uint8 PANSCAN_FLAG; + uint8 REFDIST_FLAG; + uint8 LOOPFILTER; + uint8 FASTUVMC; + uint8 EXTENDED_MV; + uint8 DQUANT; + uint8 VSTRANSFORM; + uint8 OVERLAP; + uint8 QUANTIZER; + uint16 CODED_WIDTH; + uint16 CODED_HEIGHT; + uint8 EXTENDED_DMV; + uint8 RANGE_MAPY_FLAG; + uint8 RANGE_MAPY; + uint8 RANGE_MAPUV_FLAG; + uint8 RANGE_MAPUV; + + /* Others. */ + uint8 RANGERED; + uint8 MAXBFRAMES; + uint8 MULTIRES; + uint8 SYNCMARKER; + uint8 RNDCTRL; + uint8 REFDIST; + uint16 widthMB; + uint16 heightMB; + + uint8 INTCOMPFIELD; + uint8 LUMSCALE2; + uint8 LUMSHIFT2; +} vbp_codec_data_vc1; + +typedef struct _vbp_slice_data_vc1 +{ + uint8 *buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferVC1 slc_parms; /* pointer to slice parms */ +} vbp_slice_data_vc1; + + +typedef struct _vbp_picture_data_vc1 +{ + uint32 picture_is_skipped; /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */ + VAPictureParameterBufferVC1 *pic_parms; /* current parsed picture header */ + uint32 size_bitplanes; /* based on number of MBs */ + uint8 *packed_bitplanes; /* contains up to three bitplanes packed for libVA */ + uint32 num_slices; /* number of slices. always at least one */ + vbp_slice_data_vc1 *slc_data; /* pointer to array of slice data */ +} vbp_picture_data_vc1; + +typedef struct _vbp_data_vc1 +{ + uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */ + vbp_codec_data_vc1 *se_data; /* parsed SH/EPs */ + + uint32 num_pictures; + + vbp_picture_data_vc1* pic_data; +} vbp_data_vc1; + +enum _picture_type +{ + VC1_PTYPE_I, + VC1_PTYPE_P, + VC1_PTYPE_B, + VC1_PTYPE_BI, + VC1_PTYPE_SKIPPED +}; + +enum _vbp_parser_error +{ + VBP_OK, + VBP_TYPE, + VBP_LOAD, + VBP_UNLOAD, + VBP_INIT, + VBP_DATA, + VBP_DONE, + VBP_GLIB, + VBP_MEM, + VBP_PARM, + VBP_CXT, + VBP_IMPL +}; + +enum _vbp_parser_type +{ + VBP_VC1, + VBP_MPEG2, + VBP_MPEG4, + VBP_H264 +}; + +/* + * open video bitstream parser to parse a specific media type. + * @param parser_type: one of the types defined in #vbp_parser_type + * @param hcontext: pointer to hold returned VBP context handle. + * @return VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_open(uint32 parser_type, Handle *hcontext); + +/* + * close video bitstream parser. + * @param hcontext: VBP context handle. + * @returns VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_close(Handle hcontext); + +/* + * parse bitstream. + * @param hcontext: handle to VBP context. + * @param data: pointer to bitstream buffer. + * @param size: size of bitstream buffer. + * @param init_flag: 1 if buffer contains bitstream configuration data, 0 otherwise. + * @return VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag); + +/* + * query parsing result. + * @param hcontext: handle to VBP context. + * @param data: pointer to hold a data blob that contains parsing result. + * Structure of data blob is determined by the media type. + * @return VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_query(Handle hcontext, void **data); + + +/* + * flush any un-parsed bitstream. + * @param hcontext: handle to VBP context. + * @returns VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_flush(Handle hcontent); + +#endif /* VBP_LOADER_H */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c new file mode 100644 index 0000000..87beca4 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -0,0 +1,1277 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + + +#include +#include + +#include +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_mp42_parser.h" +#include "../codecs/mp4/parser/viddec_mp4_parse.h" + +#define MIX_VBP_COMP "mixvbp" + +/* + * Some divX avi files contains 2 frames in one gstbuffer. + */ +#define MAX_NUM_PICTURES_MP42 8 + +uint32 vbp_get_sc_pos_mp42(uint8 *buf, uint32 length, + uint32* sc_phase, uint32 *sc_end_pos, uint8 *is_normal_sc); + +void vbp_on_vop_mp42(vbp_context *pcontext, int list_index); +void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index); +void vbp_dump_query_data(vbp_context *pcontext, int list_index); + +uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index); +uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index); + +/* This is coppied from DHG mp42 parser */ +static inline mp4_Status_t +vbp_sprite_trajectory_mp42(void *parent, mp4_VideoObjectLayer_t *vidObjLay, + mp4_VideoObjectPlane_t *vidObjPlane); + +/* This is coppied from DHG mp42 parser */ +static inline int32_t vbp_sprite_dmv_length_mp42(void * parent, + int32_t *dmv_length); + +/** + * + */ +uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + /* absolutely impossible, just sanity check */ + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init"); + if (pcontext->parser_ops->init == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4"); + if (pcontext->parser_ops->parse_sc == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse"); + if (pcontext->parser_ops->parse_syntax == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size"); + if (pcontext->parser_ops->get_cxt_size == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done"); + if (pcontext->parser_ops->is_wkld_done == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + return VBP_OK; +} + + +/* + * For the codec_data passed by gstreamer + */ +uint32 vbp_parse_init_data_mp42(vbp_context *pcontext) +{ + VTRACE ("begin\n"); + vbp_parse_start_code_mp42(pcontext); + VTRACE ("end\n"); + + return VBP_OK; +} + +uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + + uint8 is_svh = 0; + uint32 current_sc = parser->current_sc; + is_svh = parser->cur_sc_prefix ? false : true; + + VTRACE ("begin\n"); + + VTRACE ("current_sc = 0x%x profile_and_level_indication = 0x%x\n", + parser->current_sc, parser->info.profile_and_level_indication); + + if (!is_svh) + { + /* remove prefix from current_sc */ + current_sc &= 0x0FF; + switch (current_sc) + { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + VTRACE ("MP4_SC_VISUAL_OBJECT_SEQUENCE\n"); + + query_data->codec_data.profile_and_level_indication + = parser->info.profile_and_level_indication; + + break; + case MP4_SC_VIDEO_OBJECT_PLANE: + VTRACE ("MP4_SC_VIDEO_OBJECT_PLANE\n"); + vbp_on_vop_mp42(pcontext, list_index); + break; + default: { + if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (current_sc + <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) { + query_data->codec_data.profile_and_level_indication + = parser->info.profile_and_level_indication; + } else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX) { + if (parser->sc_seen == MP4_SC_SEEN_SVH) { + VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n"); + vbp_on_vop_svh_mp42(pcontext, list_index); + } + } + } + break; + } + + } else { + if (parser->sc_seen == MP4_SC_SEEN_SVH) { + VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n"); + vbp_on_vop_svh_mp42(pcontext, list_index); + } + } + + VTRACE ("End\n"); + + return VBP_OK; +} + +/* + * This function fills viddec_pm_cxt_t by start codes + * I may change the codes to make it more efficient later + */ + +uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + /*viddec_parser_ops_t *func = pcontext->parser_ops; */ + uint8 *buf = NULL; + uint32 size = 0; + uint32 sc_phase = 0; + uint32 sc_end_pos = -1; + + uint32 bytes_parsed = 0; + + viddec_mp4_parser_t *pinfo = NULL; + + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + /* reset query data for the new sample buffer */ + query_data->number_pictures = 0; + + /* emulation prevention byte is always present */ + cxt->getbits.is_emul_reqd = 1; + + cxt->list.num_items = 0; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + + buf = cxt->parse_cubby.buf; + size = cxt->parse_cubby.size; + + pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]); + + uint8 is_normal_sc = 0; + + uint32 found_sc = 0; + + VTRACE ("begin cxt->parse_cubby.size= %d\n", size); + + while (1) { + + sc_phase = 0; + + found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size + - bytes_parsed, &sc_phase, &sc_end_pos, &is_normal_sc); + + if (found_sc) { + + VTRACE ("sc_end_pos = %d\n", sc_end_pos); + + cxt->list.data[cxt->list.num_items].stpos = bytes_parsed + + sc_end_pos - 3; + if (cxt->list.num_items != 0) { + cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed + + sc_end_pos - 3; + } + bytes_parsed += sc_end_pos; + + cxt->list.num_items++; + pinfo->cur_sc_prefix = is_normal_sc; + + } else { + + if (cxt->list.num_items != 0) { + cxt->list.data[cxt->list.num_items - 1].edpos + = cxt->parse_cubby.size; + break; + } else { + + VTRACE ("I didn't find any sc in cubby buffer! The size of cubby is %d\n", + size); + + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + break; + } + } + } + + return VBP_OK; +} + +uint32 vbp_populate_query_data_mp42(vbp_context *pcontext) +{ +#if 0 + vbp_dump_query_data(pcontext); +#endif + return VBP_OK; +} + +void vbp_fill_codec_data(vbp_context *pcontext, int list_index) +{ + + /* fill vbp_codec_data_mp42 data */ + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + query_data->codec_data.profile_and_level_indication + = parser->info.profile_and_level_indication; +} + +void vbp_fill_slice_data(vbp_context *pcontext, int list_index) +{ + + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + + if (!parser->info.VisualObject.VideoObject.short_video_header) { + vbp_process_slices_mp42(pcontext, list_index); + } else { + vbp_process_slices_svh_mp42(pcontext, list_index); + } +} + +void vbp_fill_picture_param(vbp_context *pcontext, int list_index) +{ + + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + vbp_picture_data_mp42 *picture_data = NULL; + VAPictureParameterBufferMPEG4 *picture_param = NULL; + + picture_data = &(query_data->picture_data[query_data->number_pictures]); + + picture_param = &(picture_data->picture_param); + + uint8 idx = 0; + + picture_data->vop_coded + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded; + VTRACE ("vop_coded = %d\n", picture_data->vop_coded); + + /* + * fill picture_param + */ + + /* NOTE: for short video header, the parser saves vop_width and vop_height + * to VOL->video_object_layer_width and VOL->video_object_layer_height + */ + picture_param->vop_width + = parser->info.VisualObject.VideoObject.video_object_layer_width; + picture_param->vop_height + = parser->info.VisualObject.VideoObject.video_object_layer_height; + + picture_param->forward_reference_picture = VA_INVALID_SURFACE; + picture_param->backward_reference_picture = VA_INVALID_SURFACE; + + /* + * VAPictureParameterBufferMPEG4::vol_fields + */ + picture_param->vol_fields.bits.short_video_header + = parser->info.VisualObject.VideoObject.short_video_header; + picture_param->vol_fields.bits.chroma_format + = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format; + + /* TODO: find out why testsuite always set this value to be 0 */ + // picture_param->vol_fields.bits.chroma_format = 0; + + picture_param->vol_fields.bits.interlaced + = parser->info.VisualObject.VideoObject.interlaced; + picture_param->vol_fields.bits.obmc_disable + = parser->info.VisualObject.VideoObject.obmc_disable; + picture_param->vol_fields.bits.sprite_enable + = parser->info.VisualObject.VideoObject.sprite_enable; + picture_param->vol_fields.bits.sprite_warping_accuracy + = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy; + picture_param->vol_fields.bits.quant_type + = parser->info.VisualObject.VideoObject.quant_type; + picture_param->vol_fields.bits.quarter_sample + = parser->info.VisualObject.VideoObject.quarter_sample; + picture_param->vol_fields.bits.data_partitioned + = parser->info.VisualObject.VideoObject.data_partitioned; + picture_param->vol_fields.bits.reversible_vlc + = parser->info.VisualObject.VideoObject.reversible_vlc; + picture_param->vol_fields.bits.resync_marker_disable + = parser->info.VisualObject.VideoObject.resync_marker_disable; + + picture_param->no_of_sprite_warping_points + = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points; + + for (idx = 0; idx < 3; idx++) { + picture_param->sprite_trajectory_du[idx] + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx]; + picture_param->sprite_trajectory_dv[idx] + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx]; + } + + picture_param->quant_precision + = parser->info.VisualObject.VideoObject.quant_precision; + + /* + * VAPictureParameterBufferMPEG4::vop_fields + */ + + if (!parser->info.VisualObject.VideoObject.short_video_header) { + picture_param->vop_fields.bits.vop_coding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type; + } else { + picture_param->vop_fields.bits.vop_coding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type; + } + + /* TODO: + * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type + * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7 + */ + + if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) { + picture_param->vop_fields.bits.backward_reference_vop_coding_type + = picture_param->vop_fields.bits.vop_coding_type; + } + + picture_param->vop_fields.bits.vop_rounding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type; + picture_param->vop_fields.bits.intra_dc_vlc_thr + = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr; + picture_param->vop_fields.bits.top_field_first + = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first; + picture_param->vop_fields.bits.alternate_vertical_scan_flag + = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag; + + picture_param->vop_fcode_forward + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward; + picture_param->vop_fcode_backward + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward; + picture_param->vop_time_increment_resolution + = parser->info.VisualObject.VideoObject.vop_time_increment_resolution; + + /* short header related */ + picture_param->num_gobs_in_vop + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop; + picture_param->num_macroblocks_in_gob + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob; + + /* for direct mode prediction */ + picture_param->TRB = parser->info.VisualObject.VideoObject.TRB; + picture_param->TRD = parser->info.VisualObject.VideoObject.TRD; + +#if 0 + printf( + "parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable = %d\n", + parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable); + + printf("parser->info.VisualObject.VideoObject.data_partitioned = %d\n", + parser->info.VisualObject.VideoObject.data_partitioned); + + printf( + "####parser->info.VisualObject.VideoObject.resync_marker_disable = %d####\n", + parser->info.VisualObject.VideoObject.resync_marker_disable); +#endif +} + +void vbp_fill_iq_matrix_buffer(vbp_context *pcontext, int list_index) +{ + + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + mp4_VOLQuant_mat_t *quant_mat_info = + &(parser->info.VisualObject.VideoObject.quant_mat_info); + + vbp_picture_data_mp42 *picture_data = NULL; + VAIQMatrixBufferMPEG4 *iq_matrix = NULL; + + picture_data = &(query_data->picture_data[query_data->number_pictures]); + iq_matrix = &(picture_data->iq_matrix_buffer); + + iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat; + iq_matrix->load_non_intra_quant_mat + = quant_mat_info->load_nonintra_quant_mat; + memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64); + memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, + 64); +} + +void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + vbp_fill_codec_data(pcontext, list_index); + + vbp_fill_picture_param(pcontext, list_index); + vbp_fill_iq_matrix_buffer(pcontext, list_index); + vbp_fill_slice_data(pcontext, list_index); + + query_data->number_pictures++; +} + +void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + vbp_fill_codec_data(pcontext, list_index); + + vbp_fill_picture_param(pcontext, list_index); + vbp_fill_iq_matrix_buffer(pcontext, list_index); + vbp_fill_slice_data(pcontext, list_index); + + query_data->number_pictures++; +} + +uint32 vbp_get_sc_pos_mp42( + uint8 *buf, + uint32 length, + uint32* sc_phase, + uint32 *sc_end_pos, + uint8 *is_normal_sc) +{ + uint8 *ptr = buf; + uint32 size; + uint32 data_left = 0, phase = 0, ret = 0; + size = 0; + + data_left = length; + phase = *sc_phase; + *sc_end_pos = -1; + + /* parse until there is more data and start code not found */ + while ((data_left > 0) && (phase < 3)) { + /* Check if we are byte aligned & phase=0, if thats the case we can check + work at a time instead of byte*/ + if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) { + while (data_left > 3) { + uint32 data; + char mask1 = 0, mask2 = 0; + + data = *((uint32 *) ptr); +#ifndef MFDBIGENDIAN + data = SWAP_WORD(data); +#endif + mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); + mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); + /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need + two consecutive zero bytes for a start code pattern */ + if (mask1 && mask2) {/* Success so skip 4 bytes and start over */ + ptr += 4; + size += 4; + data_left -= 4; + continue; + } else { + break; + } + } + } + + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected + two zero bytes in the word so we look one byte at a time*/ + if (data_left > 0) { + if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */ + phase++; + ptr++; + size++; + data_left--; + if (phase > 2) { + phase = 2; + + if ((((uint32) ptr) & 0x3) == 0) { + while (data_left > 3) { + if (*((uint32 *) ptr) != 0) { + break; + } + ptr += 4; + size += 4; + data_left -= 4; + } + } + } + } else { + uint8 normal_sc = 0, short_sc = 0; + if (phase == 2) { + normal_sc = (*ptr == THIRD_STARTCODE_BYTE); + short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); + + VTRACE ("short_sc = %d\n", short_sc); + + *is_normal_sc = normal_sc; + } + + if (!(normal_sc | short_sc)) { + phase = 0; + } else {/* Match for start code so update context with byte position */ + *sc_end_pos = size; + phase = 3; + + if (normal_sc) { + } else { + /* For short start code since start code is in one nibble just return at this point */ + phase += 1; + ret = 1; + break; + } + } + ptr++; + size++; + data_left--; + } + } + } + if ((data_left > 0) && (phase == 3)) { + (*sc_end_pos)++; + phase++; + ret = 1; + } + *sc_phase = phase; + /* Return SC found only if phase is 4, else always success */ + return ret; +} + +uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs) +{ + uint32 length = 0; + numOfMbs--; + do { + numOfMbs >>= 1; + length++; + } while (numOfMbs); + return length; +} + +mp4_Status_t vbp_video_packet_header_mp42( + void *parent, + viddec_mp4_parser_t *parser_cxt, + uint16_t *quant_scale, + uint32 *macroblock_number) +{ + + mp4_Status_t ret = MP4_STATUS_OK; + mp4_Info_t *pInfo = &(parser_cxt->info); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vidObjPlane = + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + uint32 code = 0; + int32_t getbits = 0; + + uint16_t _quant_scale = 0; + uint32 _macroblock_number = 0; + uint32 header_extension_codes = 0; + uint8 vop_coding_type = vidObjPlane->vop_coding_type; + + do { + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + /* get macroblock_number */ + { + uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4; + uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4; + uint32 length = vbp_macroblock_number_length_mp42(mbs_x + * mbs_y); + + getbits = viddec_pm_get_bits(parent, &code, length); + BREAK_GETBITS_FAIL(getbits, ret); + + length = code; + } + + /* quant_scale */ + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) { + getbits = viddec_pm_get_bits(parent, &code, + vidObjLay->quant_precision); + BREAK_GETBITS_FAIL(getbits, ret); + _quant_scale = code; + } + + /* header_extension_codes */ + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + header_extension_codes = code; + } + + if (header_extension_codes) { + do { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + } while (code); + + /* marker_bit */ + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + + /* vop_time_increment */ + { + uint32 numbits = 0; + numbits = vidObjLay->vop_time_increment_resolution_bits; + if (numbits == 0) { + numbits = 1; + } + getbits = viddec_pm_get_bits(parent, &code, numbits); + BREAK_GETBITS_FAIL(getbits, ret); + } + /* marker_bit */ + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + + /* vop_coding_type */ + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_FAIL(getbits, ret); + + vop_coding_type = code & 0x3; + + /* Fixed Klocwork issue: Code is unreachable. + * Comment the following codes because we have + * already checked video_object_layer_shape + */ + /* if (vidObjLay->video_object_layer_shape + != MP4_SHAPE_TYPE_RECTANGULAR) { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + */ + if (vidObjLay->video_object_layer_shape + != MP4_SHAPE_TYPE_BINARYONLY) { + /* intra_dc_vlc_thr */ + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) + && (vop_coding_type == MP4_VOP_TYPE_S) + && (vidObjLay->sprite_info.no_of_sprite_warping_points + > 0)) { + if (vbp_sprite_trajectory_mp42(parent, vidObjLay, + vidObjPlane) != MP4_STATUS_OK) { + break; + } + } + + if (vidObjLay->reduced_resolution_vop_enable + && (vidObjLay->video_object_layer_shape + == MP4_SHAPE_TYPE_RECTANGULAR) + && ((vop_coding_type == MP4_VOP_TYPE_I) + || (vop_coding_type == MP4_VOP_TYPE_P))) { + /* vop_reduced_resolution */ + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + } + + if (vop_coding_type == MP4_VOP_TYPE_I) { + /* vop_fcode_forward */ + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + } + + if (vop_coding_type == MP4_VOP_TYPE_B) { + /* vop_fcode_backward */ + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + } + } + } + + if (vidObjLay->newpred_enable) { + /* New pred mode not supported in HW, but, does libva support this? */ + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + *quant_scale = _quant_scale; + *macroblock_number = _macroblock_number; + } while (0); + return ret; +} + +uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt) +{ + + mp4_Info_t *pInfo = &(parser_cxt->info); + mp4_VideoObjectPlane_t *vidObjPlane = + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + uint32 resync_marker_length = 0; + if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) { + resync_marker_length = 17; + } else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) { + uint8 fcode_max = vidObjPlane->vop_fcode_forward; + if (fcode_max < vidObjPlane->vop_fcode_backward) { + fcode_max = vidObjPlane->vop_fcode_backward; + } + resync_marker_length = 16 + fcode_max; + } else { + resync_marker_length = 16 + vidObjPlane->vop_fcode_forward; + } + return resync_marker_length; +} + +uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) +{ + uint32 ret = MP4_STATUS_OK; + + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = + (viddec_mp4_parser_t *) &(parent->codec_data[0]); + + VTRACE ("begin\n"); + + vbp_picture_data_mp42 *picture_data = + &(query_data->picture_data[query_data->number_pictures]); + vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data[0]); + VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param); + + picture_data->number_slices = 1; + + uint8 is_emul = 0; + uint32 bit_offset = 0; + uint32 byte_offset = 0; + + /* The offsets are relative to parent->parse_cubby.buf */ + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + slice_data->buffer_addr = parent->parse_cubby.buf; + + slice_data->slice_offset = byte_offset + + parent->list.data[list_index].stpos; + slice_data->slice_size = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset; + + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = 0; + slice_param->quant_scale + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant; + + VTRACE ("end\n"); + + return ret; +} + +mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) +{ + + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = + (viddec_mp4_parser_t *) &(parent->codec_data[0]); + + vbp_picture_data_mp42 *picture_data = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + VASliceParameterBufferMPEG4* slice_param = NULL; + + uint32 ret = MP4_STATUS_OK; + + uint8 is_emul = 0; + uint32 bit_offset = 0; + uint32 byte_offset = 0; + + uint32 code = 0; + int32_t getbits = 0; + uint32 resync_marker_length = 0; + + uint32 slice_index = 0; + +#ifdef VBP_TRACE + uint32 list_size_at_index = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos; +#endif + + VTRACE ("list_index = %d list_size_at_index = %d\n", list_index, + list_size_at_index); + + VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index, + parent->list.data[list_index].edpos, + parent->list.data[list_index].stpos); + + /* The offsets are relative to parent->parse_cubby.buf */ + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + +#if 0 + if (is_emul) { + g_print("*** emul != 0\n"); + /*byte_offset += 1;*/ + } +#endif + + picture_data = &(query_data->picture_data[query_data->number_pictures]); + slice_data = &(picture_data->slice_data[slice_index]); + slice_param = &(slice_data->slice_param); + + slice_data->buffer_addr = parent->parse_cubby.buf; + + slice_data->slice_offset = byte_offset + + parent->list.data[list_index].stpos; + slice_data->slice_size = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset; + + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = 0; + slice_param->quant_scale + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant; + + slice_index++; + picture_data->number_slices = slice_index; + + /* + * scan for resync_marker + */ + + if (!parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) { + + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + if (bit_offset) { + getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + if (getbits == -1) { + ret = MP4_STATUS_PARSE_ERROR; + return ret; + } + } + + /* + * get resync_marker_length + */ + resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt); + + while (1) { + + uint16_t quant_scale = 0; + uint32 macroblock_number = 0; + + getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length); + BREAK_GETBITS_FAIL(getbits, ret); + + if (code != 1) { + getbits = viddec_pm_get_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); + continue; + } + + /* + * We found resync_marker + */ + + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + slice_data->slice_size -= (parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset); + slice_param->slice_data_size = slice_data->slice_size; + + slice_data = &(picture_data->slice_data[slice_index]); + slice_param = &(slice_data->slice_param); + + /* + * parse video_packet_header + */ + getbits = viddec_pm_get_bits(parent, &code, resync_marker_length); + BREAK_GETBITS_FAIL(getbits, ret); + + vbp_video_packet_header_mp42(parent, parser_cxt, + &quant_scale, ¯oblock_number); + + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + slice_data->buffer_addr = parent->parse_cubby.buf; + + slice_data->slice_offset = byte_offset + + parent->list.data[list_index].stpos; + slice_data->slice_size = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset; + + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = macroblock_number; + slice_param->quant_scale = quant_scale; + + slice_index++; + + if (slice_index >= MAX_NUM_SLICES) { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + + picture_data->number_slices = slice_index; + } + } + return ret; +} + +/* This is coppied from DHG MP42 parser */ +static inline int32_t vbp_sprite_dmv_length_mp42( + void * parent, + int32_t *dmv_length) +{ + uint32 code, skip; + int32_t getbits = 0; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + *dmv_length = 0; + skip = 3; + do { + getbits = viddec_pm_peek_bits(parent, &code, skip); + BREAK_GETBITS_FAIL(getbits, ret); + + if (code == 7) { + viddec_pm_skip_bits(parent, skip); + getbits = viddec_pm_peek_bits(parent, &code, 9); + BREAK_GETBITS_FAIL(getbits, ret); + + skip = 1; + while ((code & 256) != 0) {/* count number of 1 bits */ + code <<= 1; + skip++; + } + *dmv_length = 5 + skip; + } else { + skip = (code <= 1) ? 2 : 3; + *dmv_length = code - 1; + } + viddec_pm_skip_bits(parent, skip); + ret = MP4_STATUS_OK; + + } while (0); + return ret; +} + +/* This is coppied from DHG MP42 parser */ +static inline mp4_Status_t vbp_sprite_trajectory_mp42( + void *parent, + mp4_VideoObjectLayer_t *vidObjLay, + mp4_VideoObjectPlane_t *vidObjPlane) +{ + uint32 code, i; + int32_t dmv_length = 0, dmv_code = 0, getbits = 0; + mp4_Status_t ret = MP4_STATUS_OK; + for (i = 0; i + < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) { + ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); + if (ret != MP4_STATUS_OK) { + break; + } + if (dmv_length <= 0) { + dmv_code = 0; + } else { + getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); + BREAK_GETBITS_FAIL(getbits, ret); + dmv_code = (int32_t) code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + if (code != 1) { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + vidObjPlane->warping_mv_code_du[i] = dmv_code; + /* TODO: create another inline function to avoid code duplication */ + ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); + if (ret != MP4_STATUS_OK) { + break; + } + if (dmv_length <= 0) { + dmv_code = 0; + } else { + getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); + BREAK_GETBITS_FAIL(getbits, ret); + dmv_code = (int32_t) code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + if (code != 1) { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + vidObjPlane->warping_mv_code_dv[i] = dmv_code; + + } + return ret; +} + +/* + * free memory of vbp_data_mp42 structure and its members + */ +uint32 vbp_free_query_data_mp42(vbp_context *pcontext) +{ + + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + gint idx = 0; + + if (query_data) { + if (query_data->picture_data) { + for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) { + g_free(query_data->picture_data[idx].slice_data); + } + g_free(query_data->picture_data); + } + + g_free(query_data); + } + + pcontext->query_data = NULL; + return VBP_OK; +} + +/* + * Allocate memory for vbp_data_mp42 structure and all its members. + */ +uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) +{ + + gint idx = 0; + vbp_data_mp42 *query_data; + pcontext->query_data = NULL; + + query_data = g_try_new0(vbp_data_mp42, 1); + if (query_data == NULL) { + goto cleanup; + } + + query_data->picture_data = g_try_new0(vbp_picture_data_mp42, + MAX_NUM_PICTURES_MP42); + if (NULL == query_data->picture_data) { + goto cleanup; + } + + for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) { + query_data->picture_data[idx].number_slices = 0; + query_data->picture_data[idx].slice_data = g_try_new0( + vbp_slice_data_mp42, MAX_NUM_SLICES); + + if (query_data->picture_data[idx].slice_data == NULL) { + goto cleanup; + } + } + + pcontext->query_data = (void *) query_data; + return VBP_OK; + + cleanup: + + if (query_data) { + if (query_data->picture_data) { + for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) { + g_free(query_data->picture_data[idx].slice_data); + } + g_free(query_data->picture_data); + } + + g_free(query_data); + } + + return VBP_MEM; +} + +void vbp_dump_query_data(vbp_context *pcontext, int list_index) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + vbp_picture_data_mp42 *picture_data = NULL; + VAPictureParameterBufferMPEG4 *picture_param = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + + uint32 idx = 0, jdx = 0; + + for (idx = 0; idx < query_data->number_pictures; idx++) { + + picture_data = &(query_data->picture_data[idx]); + picture_param = &(picture_data->picture_param); + slice_data = &(picture_data->slice_data[0]); + + g_print("======================= dump_begin ======================\n\n"); + g_print("======================= codec_data ======================\n"); + + /* codec_data */ + g_print("codec_data.profile_and_level_indication = 0x%x\n", + query_data->codec_data.profile_and_level_indication); + + g_print("==================== picture_param =======================\n"); + + /* picture_param */ + g_print("picture_param->vop_width = %d\n", picture_param->vop_width); + g_print("picture_param->vop_height = %d\n", picture_param->vop_height); + + g_print("picture_param->vol_fields.bits.short_video_header = %d\n", + picture_param->vol_fields.bits.short_video_header); + g_print("picture_param->vol_fields.bits.chroma_format = %d\n", + picture_param->vol_fields.bits.chroma_format); + g_print("picture_param->vol_fields.bits.interlaced = %d\n", + picture_param->vol_fields.bits.interlaced); + g_print("picture_param->vol_fields.bits.obmc_disable = %d\n", + picture_param->vol_fields.bits.obmc_disable); + g_print("picture_param->vol_fields.bits.sprite_enable = %d\n", + picture_param->vol_fields.bits.sprite_enable); + g_print( + "picture_param->vol_fields.bits.sprite_warping_accuracy = %d\n", + picture_param->vol_fields.bits.sprite_warping_accuracy); + g_print("picture_param->vol_fields.bits.quant_type = %d\n", + picture_param->vol_fields.bits.quant_type); + g_print("picture_param->vol_fields.bits.quarter_sample = %d\n", + picture_param->vol_fields.bits.quarter_sample); + g_print("picture_param->vol_fields.bits.data_partitioned = %d\n", + picture_param->vol_fields.bits.data_partitioned); + g_print("picture_param->vol_fields.bits.reversible_vlc = %d\n", + picture_param->vol_fields.bits.reversible_vlc); + + g_print("picture_param->no_of_sprite_warping_points = %d\n", + picture_param->no_of_sprite_warping_points); + g_print("picture_param->quant_precision = %d\n", + picture_param->quant_precision); + g_print("picture_param->sprite_trajectory_du = %d, %d, %d\n", + picture_param->sprite_trajectory_du[0], + picture_param->sprite_trajectory_du[1], + picture_param->sprite_trajectory_du[2]); + g_print("picture_param->sprite_trajectory_dv = %d, %d, %d\n", + picture_param->sprite_trajectory_dv[0], + picture_param->sprite_trajectory_dv[1], + picture_param->sprite_trajectory_dv[2]); + + g_print("picture_param->vop_fields.bits.vop_coding_type = %d\n", + picture_param->vop_fields.bits.vop_coding_type); + g_print( + "picture_param->vop_fields.bits.backward_reference_vop_coding_type = %d\n", + picture_param->vop_fields.bits.backward_reference_vop_coding_type); + g_print("picture_param->vop_fields.bits.vop_rounding_type = %d\n", + picture_param->vop_fields.bits.vop_rounding_type); + g_print("picture_param->vop_fields.bits.intra_dc_vlc_thr = %d\n", + picture_param->vop_fields.bits.intra_dc_vlc_thr); + g_print("picture_param->vop_fields.bits.top_field_first = %d\n", + picture_param->vop_fields.bits.top_field_first); + g_print( + "picture_param->vop_fields.bits.alternate_vertical_scan_flag = %d\n", + picture_param->vop_fields.bits.alternate_vertical_scan_flag); + + g_print("picture_param->vop_fcode_forward = %d\n", + picture_param->vop_fcode_forward); + g_print("picture_param->vop_fcode_backward = %d\n", + picture_param->vop_fcode_backward); + g_print("picture_param->num_gobs_in_vop = %d\n", + picture_param->num_gobs_in_vop); + g_print("picture_param->num_macroblocks_in_gob = %d\n", + picture_param->num_macroblocks_in_gob); + g_print("picture_param->TRB = %d\n", picture_param->TRB); + g_print("picture_param->TRD = %d\n", picture_param->TRD); + + g_print("==================== slice_data ==========================\n"); + + g_print("slice_data.buffer_addr = 0x%x\n", + (unsigned int) slice_data->buffer_addr); + g_print("slice_data.slice_offset = 0x%x\n", slice_data->slice_offset); + g_print("slice_data.slice_size = 0x%x\n", slice_data->slice_size); + + g_print("slice_data.slice_param.macroblock_number = %d\n", + slice_data->slice_param.macroblock_number); + g_print("slice_data.slice_param.macroblock_offset = 0x%x\n", + slice_data->slice_param.macroblock_offset); + g_print("slice_data.slice_param.quant_scale = %d\n", + slice_data->slice_param.quant_scale); + g_print("slice_data.slice_param.slice_data_flag = %d\n", + slice_data->slice_param.slice_data_flag); + g_print("slice_data.slice_param.slice_data_offset = %d\n", + slice_data->slice_param.slice_data_offset); + g_print("slice_data.slice_param.slice_data_size = %d\n", + slice_data->slice_param.slice_data_size); + + g_print("================= iq_matrix_buffer ======================\n"); + g_print("iq_matrix_buffer.load_intra_quant_mat = %d\n", + picture_data->iq_matrix_buffer.load_intra_quant_mat); + g_print("iq_matrix_buffer.load_non_intra_quant_mat = %d\n", + picture_data->iq_matrix_buffer.load_non_intra_quant_mat); + + g_print("------- iq_matrix_buffer.intra_quant_mat ----------\n"); + for (jdx = 0; jdx < 64; jdx++) { + + g_print("%02x ", + picture_data->iq_matrix_buffer.intra_quant_mat[jdx]); + + if ((jdx + 1) % 8 == 0) { + g_print("\n"); + } + } + + g_print("----- iq_matrix_buffer.non_intra_quant_mat --------\n"); + for (jdx = 0; jdx < 64; jdx++) { + + g_print("%02x ", + picture_data->iq_matrix_buffer.non_intra_quant_mat[jdx]); + + if ((jdx + 1) % 8 == 0) { + g_print("\n"); + } + } + + g_print("-------- slice buffer begin ------------\n"); + + for (jdx = 0; jdx < 64; jdx++) { + g_print("%02x ", *(slice_data->buffer_addr + + slice_data->slice_offset + jdx)); + if ((jdx + 1) % 8 == 0) { + g_print("\n"); + } + } + g_print("-------- slice buffer begin ------------\n"); + + g_print("\n\n============== dump_end ==========================\n\n"); + + } +} + diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h new file mode 100644 index 0000000..c0deaa4 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h @@ -0,0 +1,49 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef VBP_MP42_PARSER_H +#define VBP_MP42_PARSER_H + +/* + * setup parser's entry points + */ + +uint32 vbp_init_parser_entries_mp42(vbp_context *pcontext); + + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_mp42(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_mp42(vbp_context *pcontext); + +/* + * parse start code. + */ +uint32 vbp_parse_start_code_mp42(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_mp42(vbp_context *pcontext); + +#endif /*VBP_MP42_PARSER_H*/ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.c b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c new file mode 100644 index 0000000..d87bfd8 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c @@ -0,0 +1,28 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + + +#include "vbp_trace.h" + +#ifdef VBP_TRACE + +void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...) +{ + if (NULL == cat || NULL == fun || NULL == format) + return; + + printf("%s %s(#%d): ", cat, fun, line); + va_list args; + va_start(args, format); + vprintf(format, args); + va_end(args); + printf("\n"); +} + +#endif + diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h new file mode 100644 index 0000000..9f2a21c --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -0,0 +1,47 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + + +#ifndef VBP_TRACE_H_ +#define VBP_TRACE_H_ + + + +//#define VBP_TRACE + + +#ifdef VBP_TRACE /* if VBP_TRACE is defined*/ + +#include +#include + +extern void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...); + +#define VBP_TRACE_UTIL(cat, format, ...) \ +vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) + + +#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR: ", format, ##__VA_ARGS__) +#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ", format, ##__VA_ARGS__) +#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__) +#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__) + +#else /* if VBP_TRACE is not defined */ + +#define ETRACE(format, ...) +#define WTRACE(format, ...) +#define ITRACE(format, ...) +#define VTRACE(format, ...) + + +#endif /* VBP_TRACE*/ + + +#endif /*VBP_TRACE_H_*/ + + diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c new file mode 100644 index 0000000..651b801 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -0,0 +1,548 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + + +#include +#include + +#include "vc1.h" +#include "h264.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_vc1_parser.h" +#include "vbp_h264_parser.h" +#include "vbp_mp42_parser.h" + + + +/* buffer counter */ +uint32 buffer_counter = 0; + + +/** + * + * uninitialize parser context + * + */ +static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext) +{ + uint32 error = VBP_OK; + + if (NULL == pcontext) + { + return error; + } + + /* not need to reset parser entry points. */ + + g_free(pcontext->parser_ops); + pcontext->parser_ops = NULL; + + + if (pcontext->fd_parser) + { + dlclose(pcontext->fd_parser); + pcontext->fd_parser = NULL; + } + + return error; +} + +/** + * + * initialize parser context + * + */ +static uint32 vbp_utils_initialize_context(vbp_context *pcontext) +{ + uint32 error = VBP_OK; + char *parser_name; + + switch (pcontext->parser_type) + { + case VBP_VC1: + parser_name = "libmixvbp_vc1.so.0"; + break; + + /* MPEG-2 parser is not supported. */ + + /* case VBP_MPEG2: + parser_name = "libmixvbp_mpeg2.so.0"; + break;*/ + + case VBP_MPEG4: + parser_name = "libmixvbp_mpeg4.so.0"; + break; + + case VBP_H264: + parser_name = "libmixvbp_h264.so.0"; + break; + + default: + g_warning ("Warning! Unsupported parser type!"); + return VBP_TYPE; + } + + pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY); + if (NULL == pcontext->fd_parser) + { + ETRACE("Failed to load parser %s.", parser_name); + error = VBP_LOAD; + goto cleanup; + } + + pcontext->parser_ops = g_try_new(viddec_parser_ops_t, 1); + if (NULL == pcontext->parser_ops) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + +#define SET_FUNC_POINTER(X, Y)\ + case X:\ + pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\ + pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\ + pcontext->func_free_query_data = vbp_free_query_data_##Y;\ + pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\ + pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\ + pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\ + pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\ + break; + + switch (pcontext->parser_type) + { + SET_FUNC_POINTER(VBP_VC1, vc1); + SET_FUNC_POINTER(VBP_MPEG4, mp42); + SET_FUNC_POINTER(VBP_H264, h264); + } + + /* set entry points for parser operations: + init + parse_sc + parse_syntax + get_cxt_size + is_wkld_done + is_frame_start + */ + error = pcontext->func_init_parser_entries(pcontext); + +cleanup: + + if (VBP_OK != error) + { + /* no need to log error. the loader would have done so already. */ + vbp_utils_uninitialize_context(pcontext); + } + + return error; +} + +/** +* +* free allocated memory. +* +*/ +static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext) +{ + if (NULL == pcontext) + { + return VBP_OK; + } + + if (pcontext->func_free_query_data) + { + pcontext->func_free_query_data(pcontext); + } + + g_free(pcontext->workload2); + pcontext->workload2 = NULL; + + g_free(pcontext->workload1); + pcontext->workload1 = NULL; + + g_free(pcontext->persist_mem); + pcontext->persist_mem = NULL; + + g_free(pcontext->parser_cxt); + pcontext->parser_cxt = NULL; + + return VBP_OK; +} + + +/** + * + * allocate memory + * + */ +static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) +{ + /* pcontext is guaranteed to be valid input. */ + uint32 error = VBP_OK; + viddec_parser_memory_sizes_t sizes; + + pcontext->parser_cxt = g_try_new(viddec_pm_cxt_t, 1); + if (NULL == pcontext->parser_cxt) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* invoke parser entry to get context size */ + /* no return value, should always succeed. */ + pcontext->parser_ops->get_cxt_size(&sizes); + + /* allocate persistent memory for parser */ + if (sizes.persist_size) + { + pcontext->persist_mem = g_try_malloc(sizes.persist_size); + if (NULL == pcontext->persist_mem) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + } + else + { + /* OK for VC-1, MPEG2 and MPEG4. */ + if ((VBP_VC1 == pcontext->parser_type) || + (VBP_MPEG2 == pcontext->parser_type) || + (VBP_MPEG4 == pcontext->parser_type)) + { + pcontext->persist_mem = NULL; + } + else + { + /* mandatory for H.264 */ + ETRACE("Failed to allocate memory"); + error = VBP_CXT; + goto cleanup; + } + } + + /* allocate a new workload with 1000 items. */ + pcontext->workload1 = g_try_malloc(sizeof(viddec_workload_t) + + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); + if (NULL == pcontext->workload1) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* allocate a second workload with 1000 items. */ + pcontext->workload2 = g_try_malloc(sizeof(viddec_workload_t) + + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); + if (NULL == pcontext->workload2) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* allocate format-specific query data */ + error = pcontext->func_allocate_query_data(pcontext); + +cleanup: + if (error != VBP_OK) + { + vbp_utils_free_parser_memory(pcontext); + } + return error; +} + + + +/** + * + * parse the elementary sample buffer or codec configuration data + * + */ +static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + uint32 error = VBP_OK; + int i; + + /* reset list number. func_parse_init_data or func_parse_start_code will + * set it equal to number of sequence headers, picture headers or slices headers + * found in the sample buffer + */ + cxt->list.num_items = 0; + + /** + * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1 + * for H.264 and MPEG-4, VC1 advanced profile and set to 0 + * for VC1 simple or main profile when parsing the frame + * buffer. When parsing the sequence header, it must be set to 1 + * always. + * + * PARSER IMPLEMENTOR: set this flag in the parser. + */ + + /* + if ((codec_type == VBP_H264) || (codec_type == VBP_MPEG4)) + { + cxt->getbits.is_emul_reqd = 1; + } + */ + + + /* populate the list.*/ + if (init_data_flag) + { + error = pcontext->func_parse_init_data(pcontext); + } + else + { + error = pcontext->func_parse_start_code(pcontext); + } + + if (VBP_OK != error) + { + ETRACE("Failed to parse the start code!"); + return error; + } + + /* set up bitstream buffer */ + cxt->getbits.list = &(cxt->list); + + /* setup buffer pointer */ + cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf; + + /* + * TO DO: + * check if cxt->getbits.is_emul_reqd is set properly + */ + + for (i = 0; i < cxt->list.num_items; i++) + { + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = cxt->list.data[i].stpos; + cxt->list.end_offset = cxt->list.data[i].edpos; + cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos; + + /* invoke parse entry point to parse the buffer */ + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + + /* can't return error for now. Neet further investigation */ + + /*if (0 != error) + { + ETRACE("failed to parse the syntax: %d!", error); + return error; + }*/ + + /* + * process parsing result + */ + error = pcontext->func_process_parsing_result(pcontext, i); + + if (0 != error) + { + ETRACE("Failed to process parsing result."); + return error; + } + } + + /* currently always assume a complete frame is supplied for parsing, so + * there is no need to check if workload is done + */ + + /* + uint32_t codec_errors = 0; + uint32_t state; + + error = ops->is_wkld_done( + (void *)cxt, + (void *)&(cxt->codec_data[0]), + (uint32_t)cxt->sc_prefix_info.next_sc, + &codec_errors); + state = (ret == VIDDEC_PARSE_FRMDONE) ? VBP_DONE : VBP_OK; + return state; + */ + + return VBP_OK; +} + + +/** + * + * create the parser context + * + */ +uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) +{ + uint32 error = VBP_OK; + vbp_context *pcontext = NULL; + + /* prevention from the failure */ + *ppcontext = NULL; + + pcontext = g_try_new0(vbp_context, 1); + if (NULL == pcontext) + { + error = VBP_MEM; + goto cleanup; + } + + pcontext->parser_type = parser_type; + + /* load parser, initialize parser operators and entry points */ + error = vbp_utils_initialize_context(pcontext); + if (VBP_OK != error) + { + goto cleanup; + } + + /* allocate parser context, persistent memory, query data and workload */ + error = vbp_utils_allocate_parser_memory(pcontext); + if (VBP_OK != error) + { + goto cleanup; + } + + viddec_pm_utils_list_init(&(pcontext->parser_cxt->list)); + viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0); + pcontext->parser_cxt->cur_buf.list_index = -1; + pcontext->parser_cxt->parse_cubby.phase = 0; + + /* invoke the entry point to initialize the parser. */ + pcontext->parser_ops->init( + (void *)pcontext->parser_cxt->codec_data, + (void *)pcontext->persist_mem, + FALSE); + + viddec_emit_init(&(pcontext->parser_cxt->emitter)); + + /* overwrite init with our number of items. */ + pcontext->parser_cxt->emitter.cur.max_items = MAX_WORKLOAD_ITEMS; + pcontext->parser_cxt->emitter.next.max_items = MAX_WORKLOAD_ITEMS; + + /* set up to find the first start code. */ + pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1; + + /* indicates initialized OK. */ + pcontext->identifier = MAGIC_NUMBER; + *ppcontext = pcontext; + error = VBP_OK; + +cleanup: + + if (VBP_OK != error) + { + vbp_utils_free_parser_memory(pcontext); + vbp_utils_uninitialize_context(pcontext); + g_free(pcontext); + pcontext = NULL; + } + + return error; +} + +/** + * + * destroy the context. + * + */ +uint32 vbp_utils_destroy_context(vbp_context *pcontext) +{ + /* entry point, not need to validate input parameters. */ + vbp_utils_free_parser_memory(pcontext); + vbp_utils_uninitialize_context(pcontext); + g_free(pcontext); + pcontext = NULL; + + return VBP_OK; +} + + +/** + * + * parse the sample buffer or parser configuration data. + * + */ +uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag) +{ + /* entry point, not need to validate input parameters. */ + + uint32 error = VBP_OK; + + /* ITRACE("buffer counter: %d",buffer_counter); */ + + /* set up emitter. */ + pcontext->parser_cxt->emitter.cur.data = pcontext->workload1; + pcontext->parser_cxt->emitter.next.data = pcontext->workload2; + + /* reset bit offset */ + pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0; + + + /* set up cubby. */ + pcontext->parser_cxt->parse_cubby.buf = data; + pcontext->parser_cxt->parse_cubby.size = size; + pcontext->parser_cxt->parse_cubby.phase = 0; + + error = vbp_utils_parse_es_buffer(pcontext, init_data_flag); + + /* rolling count of buffers. */ + if (0 == init_data_flag) + { + buffer_counter++; + } + return error; +} + +/** + * + * provide query data back to the consumer + * + */ +uint32 vbp_utils_query(vbp_context *pcontext, void **data) +{ + /* entry point, not need to validate input parameters. */ + uint32 error = VBP_OK; + + error = pcontext->func_populate_query_data(pcontext); + if (VBP_OK == error) + { + *data = pcontext->query_data; + } + else + { + *data = NULL; + } + return error; +} + +/** + * + * flush parsing buffer. Currently it is no op. + * + */ +uint32 vbp_utils_flush(vbp_context *pcontext) +{ + return VBP_IMPL; +} + diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h new file mode 100644 index 0000000..67ff3e8 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h @@ -0,0 +1,106 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef VBP_UTILS_H +#define VBP_UTILS_H + +#include "viddec_parser_ops.h" +#include "viddec_pm_parse.h" +#include "viddec_pm.h" +#include "vbp_trace.h" + +#define MAGIC_NUMBER 0x0DEADBEEF +#define MAX_WORKLOAD_ITEMS 1000 + +/* maximum 256 slices per sample buffer */ +#define MAX_NUM_SLICES 256 + +/* maximum two pictures per sample buffer */ +#define MAX_NUM_PICTURES 2 + + +extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state); + +/* rolling counter of sample buffer */ +extern uint32 buffer_counter; + +typedef struct vbp_context_t vbp_context; + +typedef uint32 (*function_init_parser_entries)(vbp_context* cxt); +typedef uint32 (*function_allocate_query_data)(vbp_context* cxt); +typedef uint32 (*function_free_query_data)(vbp_context* cxt); +typedef uint32 (*function_parse_init_data)(vbp_context* cxt); +typedef uint32 (*function_parse_start_code)(vbp_context* cxt); +typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i); +typedef uint32 (*function_populate_query_data)(vbp_context* cxt); + + + +struct vbp_context_t +{ + /* magic number */ + uint32 identifier; + + /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */ + uint32 parser_type; + + /* handle to parser (shared object) */ + void *fd_parser; + + /* parser (shared object) entry points */ + viddec_parser_ops_t *parser_ops; + + /* parser context */ + viddec_pm_cxt_t *parser_cxt; + + /* work load */ + viddec_workload_t *workload1, *workload2; + + /* persistent memory for parser */ + uint32 *persist_mem; + + /* format specific query data */ + void *query_data; + + + function_init_parser_entries func_init_parser_entries; + function_allocate_query_data func_allocate_query_data; + function_free_query_data func_free_query_data; + function_parse_init_data func_parse_init_data; + function_parse_start_code func_parse_start_code; + function_process_parsing_result func_process_parsing_result; + function_populate_query_data func_populate_query_data; + +}; + +/** + * create VBP context + */ +uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext); + +/* + * destroy VBP context + */ +uint32 vbp_utils_destroy_context(vbp_context *pcontext); + +/* + * parse bitstream + */ +uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag); + +/* + * query parsing result + */ +uint32 vbp_utils_query(vbp_context *pcontext, void **data); + +/* + * flush un-parsed bitstream + */ +uint32 vbp_utils_flush(vbp_context *pcontext); + +#endif /* VBP_UTILS_H */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c new file mode 100644 index 0000000..502cdc6 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -0,0 +1,1029 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + + +#include +#include +#include + +#include "vc1.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_vc1_parser.h" + +/* maximum number of Macroblock divided by 2, see va.h */ +#define MAX_BITPLANE_SIZE 16384 + +/* Start code prefix is 001 which is 3 bytes. */ +#define PREFIX_SIZE 3 + +static uint32 b_fraction_table[][9] = { + /* num 0 1 2 3 4 5 6 7 8 den */ + /* 0 */ { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + /* 1 */ { 0, 0, 0, 1, 3, 5, 9, 11, 17 }, + /* 2 */ { 0, 0, 0, 2, 0, 6, 0, 12, 0 }, + /* 3 */ { 0, 0, 0, 0, 4, 7, 0, 13, 18 }, + /* 4 */ { 0, 0, 0, 0, 0, 8, 0, 14, 0 }, + /* 5 */ { 0, 0, 0, 0, 0, 0, 10, 15, 19 }, + /* 6 */ { 0, 0, 0, 0, 0, 0, 0, 16, 0 }, + /* 7 */ { 0, 0, 0, 0, 0, 0, 0, 0, 20 } +}; + + + +/** + * set parser entry points + */ +uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + /* impossible, just sanity check */ + return VBP_PARM; + } + + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done"); + if (NULL == pcontext->parser_ops->is_wkld_done) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame"); + if (NULL == pcontext->parser_ops->is_frame_start) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + return VBP_OK; +} + +/** + * allocate query data structure + */ +uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + /* impossible, just sanity check */ + return VBP_PARM; + } + + pcontext->query_data = NULL; + + vbp_data_vc1 *query_data = NULL; + query_data = g_try_new0(vbp_data_vc1, 1); + if (NULL == query_data) + { + return VBP_MEM; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->se_data = g_try_new0(vbp_codec_data_vc1, 1); + if (NULL == query_data->se_data) + { + goto cleanup; + } + query_data->pic_data = g_try_new0(vbp_picture_data_vc1, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferVC1, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + + query_data->pic_data[i].packed_bitplanes = g_try_malloc0(MAX_BITPLANE_SIZE); + if (NULL == query_data->pic_data[i].packed_bitplanes) + { + goto cleanup; + } + + query_data->pic_data[i].slc_data = g_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1)); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + return VBP_OK; + +cleanup: + vbp_free_query_data_vc1(pcontext); + + return VBP_MEM; +} + + +/** + * free query data structure + */ +uint32 vbp_free_query_data_vc1(vbp_context *pcontext) +{ + vbp_data_vc1 *query_data = NULL; + + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + query_data = (vbp_data_vc1 *)pcontext->query_data; + + if (query_data->pic_data) + { + int i = 0; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + g_free(query_data->pic_data[i].slc_data); + g_free(query_data->pic_data[i].packed_bitplanes); + g_free(query_data->pic_data[i].pic_parms); + } + } + + g_free(query_data->pic_data); + + g_free(query_data->se_data); + + g_free(query_data); + + pcontext->query_data = NULL; + + return VBP_OK; +} + + +/** + * We want to create a list of buffer segments where each segment is a start + * code followed by all the data up to the next start code or to the end of + * the buffer. In VC-1, it is common to get buffers with no start codes. The + * parser proper, doesn't really handle the situation where there are no SCs. + * In this case, I will bypass the stripping of the SC code and assume a frame. + */ +static uint32 vbp_parse_start_code_helper_vc1( + viddec_pm_cxt_t *cxt, + viddec_parser_ops_t *ops, + int init_data_flag) +{ + uint32_t ret = VBP_OK; + viddec_sc_parse_cubby_cxt_t cubby; + + /* make copy of cubby */ + /* this doesn't copy the buffer, merely the structure that holds the buffer */ + /* pointer. Below, where we call parse_sc() the code starts the search for */ + /* SCs at the beginning of the buffer pointed to by the cubby, so in our */ + /* cubby copy we increment the pointer as we move through the buffer. If */ + /* you think of each start code followed either by another start code or the */ + /* end of the buffer, then parse_sc() is returning information relative to */ + /* current segment. */ + + cubby = cxt->parse_cubby; + + cxt->list.num_items = 0; + cxt->list.data[0].stpos = 0; + cxt->getbits.is_emul_reqd = 1; + + /* codec initialization data is always start code prefixed. (may not start at position 0) + * sample buffer for AP has three start code patterns here: + * pattern 0: no start code at all, the whole buffer is a single segment item + * pattern 1: start codes for all segment items + * pattern 2: no start code for the first segment item, start codes for the rest segment items + */ + + gboolean is_pattern_two = FALSE; + + unsigned char start_code = 0; + + while(1) + { + /* parse the created buffer for sc */ + ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info)); + if(ret == 1) + { + cubby.phase = 0; + start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos); +#if 1 + if (0 == init_data_flag && + PREFIX_SIZE != cubby.sc_end_pos && + 0 == cxt->list.num_items) + { + /* buffer does not have start code at the beginning */ + vc1_viddec_parser_t *parser = NULL; + vc1_metadata_t *seqLayerHeader = NULL; + + parser = (vc1_viddec_parser_t *)cxt->codec_data; + seqLayerHeader = &(parser->info.metadata); + if (1 == seqLayerHeader->INTERLACE) + { + /* this is a hack for interlaced field coding */ + /* handle field interlace coding. One sample contains two fields, where: + * the first field does not have start code prefix, + * the second field has start code prefix. + */ + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + is_pattern_two = TRUE; + } + } +#endif + if (cxt->list.num_items == 0) /* found first SC. */ + { + /* sc_end_pos gets us to the SC type. We need to back up to the first zero */ + cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE; + } + else + { + /* First we set the end position of the last segment. */ + /* Since the SC parser searches from SC type to SC type and the */ + /* sc_end_pos is relative to this segment only, we merely add */ + /* sc_end_pos to the start to find the end. */ + cxt->list.data[cxt->list.num_items - 1].edpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + + /* Then we set the start position of the current segment. */ + /* So I need to subtract 1 ??? */ + cxt->list.data[cxt->list.num_items].stpos = + cxt->list.data[cxt->list.num_items - 1].edpos; + + if (is_pattern_two) + { + cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE; + /* restore to normal pattern */ + is_pattern_two = FALSE; + } + } + /* We need to set up the cubby buffer for the next time through parse_sc(). */ + /* But even though we want the list to contain a segment as described */ + /* above, we want the cubby buffer to start just past the prefix, or it will */ + /* find the same SC again. So I bump the cubby buffer past the prefix. */ + cubby.buf = cubby.buf + + cxt->list.data[cxt->list.num_items].stpos + + PREFIX_SIZE; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos - + PREFIX_SIZE; + + if (start_code >= 0x0A && start_code <= 0x0F) + { + /* only put known start code to the list + * 0x0A: end of sequence + * 0x0B: slice header + * 0x0C: frame header + * 0x0D: field header + * 0x0E: entry point header + * 0x0F: sequence header + */ + cxt->list.num_items++; + } + else + { + ITRACE("skipping unknown start code :%d", start_code); + } + + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + /* we get here, if we reach the end of the buffer while looking or a SC. */ + /* If we never found a SC, then num_items will never get incremented. */ + if (cxt->list.num_items == 0) + { + /* If we don't find a SC we probably still have a frame of data. */ + /* So let's bump the num_items or else later we will not parse the */ + /* frame. */ + cxt->list.num_items = 1; + } + /* now we can set the end position of the last segment. */ + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + return VBP_OK; +} + +/* +* parse initialization data (decoder configuration data) +* for VC1 advanced profile, data is sequence header and +* entry pointer header. +* for VC1 main/simple profile, data format +* is defined in VC1 spec: Annex J, (Decoder initialization metadata +* structure 1 and structure 3 +*/ +uint32 vbp_parse_init_data_vc1(vbp_context *pcontext) +{ + /** + * init data (aka decoder configuration data) must + * be start-code prefixed + */ + + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + return vbp_parse_start_code_helper_vc1(cxt, ops, 1); +} + + + +/** +* Parse start codes, VC1 main/simple profile does not have start code; +* VC1 advanced may not have start code either. +*/ +uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + + vc1_viddec_parser_t *parser = NULL; + vc1_metadata_t *seqLayerHeader = NULL; + + vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data; + + /* Reset query data for the new sample buffer */ + int i = 0; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->num_pictures = 0; + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].picture_is_skipped = 0; + } + + parser = (vc1_viddec_parser_t *)cxt->codec_data; + seqLayerHeader = &(parser->info.metadata); + + + /* WMV codec data will have a start code, but the WMV picture data won't. */ + if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE) + { + return vbp_parse_start_code_helper_vc1(cxt, ops, 0); + } + else + { + /* WMV: vc1 simple or main profile. No start code present. + */ + + /* must set is_emul_reqd to 0! */ + cxt->getbits.is_emul_reqd = 0; + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + return VBP_OK; +} + + +/** + * + */ +static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 *current_bit) +{ + uint8 value; + + value = (data[*current_word] >> *current_bit) & 1; + + /* Fix up bit/byte offsets. endianess?? */ + if (*current_bit < 31) + { + ++(*current_bit); + } + else + { + ++(*current_word); + *current_bit = 0; + } + + return value; +} + + +/** + * + */ +static uint32 vbp_pack_bitplane_vc1( + uint32 *from_plane, + uint8 *to_plane, + uint32 width, + uint32 height, + uint32 nibble_shift) +{ + uint32 error = VBP_OK; + uint32 current_word = 0; + uint32 current_bit = 0; /* must agree with number in vbp_get_bit_vc1 */ + uint32 i, j, n; + uint8 value; + uint32 stride = 0; + + stride = 32 * ((width + 31) / 32); + + for (i = 0, n = 0; i < height; i++) + { + for (j = 0; j < stride; j++) + { + if (j < width) + { + value = vbp_get_bit_vc1( + from_plane, + ¤t_word, + ¤t_bit); + + to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4)); + n++; + } + else + { + break; + } + } + if (stride > width) + { + current_word++; + current_bit = 0; + } + } + + return error; +} + + +/** + * + */ +static inline uint32 vbp_map_bfraction(uint32 numerator, uint32 denominator) +{ + uint32 b_fraction = 0; + + if ((numerator < 8) && (denominator < 9)) + { + b_fraction = b_fraction_table[numerator][denominator]; + } + + return b_fraction; +} + +/** + * + */ +static uint32 vbp_pack_bitplanes_vc1( + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) +{ + uint32 error = VBP_OK; + if (0 == pic_data->pic_parms->bitplane_present.value) + { + /* return if bitplane is not present */ + pic_data->size_bitplanes = 0; + memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE); + return error; + } + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + + + /* set bit plane size */ + pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2; + + + memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes); + + /* see libva library va.h for nibble bit */ + switch (picLayerHeader->PTYPE) + { + case VC1_I_FRAME: + case VC1_BI_FRAME: + if (picLayerHeader->OVERFLAGS.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->OVERFLAGS.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->ACPRED.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->ACPRED.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->FIELDTX.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->FIELDTX.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->MVTYPEMB.imode || + picLayerHeader->DIRECTMB.imode || + picLayerHeader->SKIPMB.imode || + picLayerHeader->FORWARDMB.imode) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + + case VC1_P_FRAME: + if (picLayerHeader->MVTYPEMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->MVTYPEMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->SKIPMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->SKIPMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->DIRECTMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->DIRECTMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->FIELDTX.imode || + picLayerHeader->FORWARDMB.imode || + picLayerHeader->ACPRED.imode || + picLayerHeader->OVERFLAGS.imode ) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + + case VC1_B_FRAME: + if (picLayerHeader->FORWARDMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->FORWARDMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->SKIPMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->SKIPMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->DIRECTMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->DIRECTMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->MVTYPEMB.imode || + picLayerHeader->FIELDTX.imode || + picLayerHeader->ACPRED.imode || + picLayerHeader->OVERFLAGS.imode) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + } + return error; +} + + +/** + * fill the query data structure after sequence header, entry point header + * or a complete frame is parsed. + * NOTE: currently partial frame is not handled properly + */ +uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) +{ + uint32 error = VBP_OK; + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + + vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; + + /* first we get the SH/EP data. Can we cut down on this? */ + vbp_codec_data_vc1 *se_data = query_data->se_data; + se_data->PROFILE = seqLayerHeader->PROFILE; + se_data->LEVEL = seqLayerHeader->LEVEL; + se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG; + se_data->PULLDOWN = seqLayerHeader->PULLDOWN; + se_data->INTERLACE = seqLayerHeader->INTERLACE; + se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG; + se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG; + se_data->PSF = seqLayerHeader->PSF; + se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK; + se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY; + se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG; + se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG; + se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER; + se_data->FASTUVMC = seqLayerHeader->FASTUVMC; + se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV; + se_data->DQUANT = seqLayerHeader->DQUANT; + se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM; + se_data->OVERLAP = seqLayerHeader->OVERLAP; + se_data->QUANTIZER = seqLayerHeader->QUANTIZER; + se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1; + se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1; + se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV; + se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG; + se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY; + se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG; + se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV; + se_data->RANGERED = seqLayerHeader->RANGERED; + se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES; + se_data->MULTIRES = seqLayerHeader->MULTIRES; + se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER; + se_data->RNDCTRL = seqLayerHeader->RNDCTRL; + se_data->REFDIST = seqLayerHeader->REFDIST; + se_data->widthMB = seqLayerHeader->widthMB; + se_data->heightMB = seqLayerHeader->heightMB; + se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD; + se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2; + se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2; + + /* update buffer number */ + query_data->buf_number = buffer_counter; + + if (query_data->num_pictures > 2) + { + WTRACE("sampe buffer contains %d pictures", query_data->num_pictures); + } + return error; +} + + + +static void vbp_pack_picture_params_vc1( + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + + + VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms; + + /* Then we get the picture header data. Picture type need translation. */ + pic_parms->forward_reference_picture = VA_INVALID_SURFACE; + pic_parms->backward_reference_picture = VA_INVALID_SURFACE; + pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE; + + pic_parms->sequence_fields.value = 0; + pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE; + pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER; + pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP; + + pic_parms->coded_width = (seqLayerHeader->width + 1) << 1; + pic_parms->coded_height = (seqLayerHeader->height + 1) << 1; + + pic_parms->entrypoint_fields.value = 0; + pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY; + pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK; + pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER; + + pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER; + pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC; + + pic_parms->range_mapping_fields.value = 0; + pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG; + pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY; + pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG; + pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV; + + pic_parms->b_picture_fraction = + vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN); + + pic_parms->cbp_table = picLayerHeader->CBPTAB; + pic_parms->mb_mode_table = picLayerHeader->MBMODETAB; + pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM; + pic_parms->rounding_control = picLayerHeader->RNDCTRL; + pic_parms->post_processing = picLayerHeader->POSTPROC; + /* fix this. Add RESPIC to parser. */ + pic_parms->picture_resolution_index = 0; + pic_parms->luma_scale = picLayerHeader->LUMSCALE; + pic_parms->luma_shift = picLayerHeader->LUMSHIFT; + + pic_parms->picture_fields.value = 0; + switch (picLayerHeader->PTYPE) + { + case VC1_I_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I; + break; + + case VC1_P_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P; + break; + + case VC1_B_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B; + break; + + case VC1_BI_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI; + break; + + case VC1_SKIPPED_FRAME: + pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED; + break; + + default: + /* to do: handle this case */ + break; + } + pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM; + if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE) + { + /* simple or main profile, top field flag is not present, default to 1.*/ + pic_parms->picture_fields.bits.top_field_first = 1; + } + else + { + pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF; + } + + pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField); + /* This seems to be set based on the MVMODE and MVMODE2 syntax. */ + /* This is a hack. Probably will need refining. */ + if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) || + (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2)) + { + pic_parms->picture_fields.bits.intensity_compensation = 1; + } + else + { + pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP; + } + + /* Lets store the raw-mode BP bits. */ + pic_parms->raw_coding.value = 0; + pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB; + pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB; + pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB; + pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX; + pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB; + pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED; + pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS; + + /* imode 1/0 indicates bitmap presence in Pic Hdr. */ + pic_parms->bitplane_present.value = 0; + + pic_parms->bitplane_present.flags.bp_mv_type_mb = + pic_parms->raw_coding.flags.mv_type_mb ? 1 : + (picLayerHeader->MVTYPEMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_direct_mb = + pic_parms->raw_coding.flags.direct_mb ? 1 : + (picLayerHeader->DIRECTMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_skip_mb = + pic_parms->raw_coding.flags.skip_mb ? 1 : + (picLayerHeader->SKIPMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_field_tx = + pic_parms->raw_coding.flags.field_tx ? 1 : + (picLayerHeader->FIELDTX.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_forward_mb = + pic_parms->raw_coding.flags.forward_mb ? 1 : + (picLayerHeader->FORWARDMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_ac_pred = + pic_parms->raw_coding.flags.ac_pred ? 1 : + (picLayerHeader->ACPRED.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_overflags = + pic_parms->raw_coding.flags.overflags ? 1 : + (picLayerHeader->OVERFLAGS.imode ? 1: 0); + + pic_parms->reference_fields.value = 0; + pic_parms->reference_fields.bits.reference_distance_flag = + seqLayerHeader->REFDIST_FLAG; + + pic_parms->reference_fields.bits.reference_distance = + seqLayerHeader->REFDIST; + + pic_parms->reference_fields.bits.num_reference_pictures = + picLayerHeader->NUMREF; + + pic_parms->reference_fields.bits.reference_field_pic_indicator = + picLayerHeader->REFFIELD; + + pic_parms->mv_fields.value = 0; + pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE; + pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2; + + pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB; + pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB; + pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH; + pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB; + pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV; + pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE; + pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV; + pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE; + + pic_parms->pic_quantizer_fields.value = 0; + pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT; + pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER; + pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP; + pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT; + pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant; + pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM; + pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE; + pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE; + pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE; + pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL; + pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT; + + pic_parms->transform_fields.value = 0; + pic_parms->transform_fields.bits.variable_sized_transform_flag = + seqLayerHeader->VSTRANSFORM; + + pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF; + pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM; + + pic_parms->transform_fields.bits.transform_ac_codingset_idx1 = + (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0; + + pic_parms->transform_fields.bits.transform_ac_codingset_idx2 = + (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0; + + pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB; +} + + +static void vbp_pack_slice_data_vc1( + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos; + uint32 bit; + uint32 byte; + uint8 is_emul; + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); + VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms); + + /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/ + + slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos; + slc_data->slice_size = slice_size - byte; + slc_data->slice_offset = byte; + + slc_parms->slice_data_size = slc_data->slice_size; + slc_parms->slice_data_offset = 0; + + /* fix this. we need to be able to handle partial slices. */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + slc_parms->macroblock_offset = bit; + + /* fix this. we need o get the slice_vertical_position from the code */ + slc_parms->slice_vertical_position = pic_data->num_slices; + + pic_data->num_slices++; +} + +/** + * process parsing result + */ +uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 error = VBP_OK; + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + if (parser->start_code != VC1_SC_FRM && parser->start_code != VC1_SC_FLD && + parser->start_code != VC1_SC_SLC) + { + /* only handle frame data, field data and slice data here + */ + return VBP_OK; + } + vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; + + if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) + { + query_data->num_pictures++; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + if (query_data->num_pictures == 0) + { + ETRACE("Unexpected num of pictures."); + return VBP_DATA; + } + + /* start packing data */ + int picture_index = query_data->num_pictures - 1; + vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]); + + if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) + { + /* setup picture parameter first*/ + vbp_pack_picture_params_vc1(pcontext, index, pic_data); + + /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */ + error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data); + if (VBP_OK != error) + { + ETRACE("Failed to pack bitplane."); + return error; + } + + } + + /* Always pack slice parameter. The first macroblock in the picture CANNOT + * be preceeded by a slice header, so we will have first slice parsed always. + * + */ + + if (pic_data->num_slices >= MAX_NUM_SLICES) + { + ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + /* set up slice parameter */ + vbp_pack_slice_data_vc1(pcontext, index, pic_data); + + + return VBP_OK; +} diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h new file mode 100644 index 0000000..510e16c --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h @@ -0,0 +1,54 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef VBP_VC1_PARSER_H +#define VBP_VC1_PARSER_H + + +/* + * setup parser's entry pointer + */ +uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext); + +/* + * allocate query data structure - vbp_vc1_data + */ +uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext); + +/* + * free query data structure + */ +uint32 vbp_free_query_data_vc1(vbp_context *pcontext); + +/* + * parse bitstream configuration data + */ +uint32 vbp_parse_init_data_vc1(vbp_context *pcontext); + +/* + * parse bitstream start code and fill the viddec_input_buffer_t list. + * WMV has no start code so the whole buffer will be treated as a single frame. + * For VC1 progressive, if start code is not found, the whole buffer will be treated as a + * single frame as well. + * For VC1 interlace, the first field is not start code prefixed, but the second field + * is always start code prefixed. + */ +uint32 vbp_parse_start_code_vc1(vbp_context *pcontext); + +/* + * processe parsing result + */ +uint32 vbp_process_parsing_result_vc1(vbp_context *pcontext, int list_index); + +/* + * populate query data structure + */ +uint32 vbp_populate_query_data_vc1(vbp_context *pcontext); + + +#endif /*VBP_VC1_PARSER_H*/ diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c new file mode 100644 index 0000000..f6e6a8a --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c @@ -0,0 +1,78 @@ +#include "viddec_emitter.h" +#include "viddec_fw_workload.h" +#include "viddec_fw_debug.h" + +int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit) +{ + if(emit->cur.data != NULL) + { + emit->cur.data->num_items = emit->cur.num_items; + } + if(emit->next.data != NULL) + { + emit->next.data->num_items = emit->next.num_items; + } + emit->cur.num_items = emit->next.num_items; + emit->next.num_items = 0; + if(emit->cur.data != NULL) + { + emit->cur.data->result = emit->cur.result; + } + if(emit->next.data != NULL) + { + emit->next.data->result = emit->next.result; + } + emit->cur.result = emit->next.result; + emit->next.result = 0; + return 1; +} + +int32_t viddec_emit_append(viddec_emitter_wkld *cxt, viddec_workload_item_t *item) +{ + int32_t ret =0; + if((cxt->num_items < cxt->max_items) && (cxt->data != NULL)) + { + cxt->data->item[cxt->num_items] = *item; + cxt->num_items++; + ret = 1; + CDEB(0, "%s: item(%02d) = [%08x %08x %08x %08x]\n",__FUNCTION__, cxt->num_items - 1, item->vwi_type, item->vwi_payload[0], item->vwi_payload[1], item->vwi_payload[2]); + } + else + { + cxt->result |= (VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_WKLD_OVERLFOW, (int)item->vwi_type, (int)(cxt->data), 0, 0, 0, 0); + } + return ret; +} + +int32_t viddec_emit_contr_tag(viddec_emitter *emit, viddec_input_buffer_t *ibuf, uint8_t incomplete, uint32_t using_next) +{ + viddec_workload_item_t item; + viddec_emitter_wkld *cur_wkld; + + cur_wkld = (using_next == 0)? &(emit->cur):&(emit->next); + + if(!incomplete) + item.vwi_type = VIDDEC_WORKLOAD_IBUF_DONE; + else + item.vwi_type = VIDDEC_WORKLOAD_IBUF_CONTINUED; + item.tag.tag_phys_addr = ibuf->phys; + item.tag.tag_phys_len = ibuf->len; + item.tag.tag_value = ibuf->id; + + return viddec_emit_append(cur_wkld, &item); +} + +int32_t viddec_emit_assoc_tag(viddec_emitter *emit, uint32_t id, uint32_t using_next) +{ + viddec_workload_item_t item; + viddec_emitter_wkld *cur_wkld; + + cur_wkld = (using_next == false)? &(emit->cur):&(emit->next); + item.vwi_type = VIDDEC_WORKLOAD_TAG; + item.tag.tag_phys_addr = -1; + item.tag.tag_phys_len = -1; + item.tag.tag_value = id; + return viddec_emit_append(cur_wkld, &item); +} + diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_intr.c b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c new file mode 100644 index 0000000..fa6c1f2 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c @@ -0,0 +1,56 @@ +#include "fw_pvt.h" +#include "viddec_fw_parser_ipclib_config.h" +#include "viddec_fw_debug.h" + +extern uint32_t timer; + +void enable_intr(void) +{ + TRAPS_ENABLE; + TRAPS_INT_ENABLE; + //reg_write(INT_REG, 0); +} + +/*------------------------------------------------------------------------------ + * Function: mfd_trap_handler + * This is the FW's ISR, Currently we don't support any INT as we are running parsers only on GV which + * are pure SW modules. + *------------------------------------------------------------------------------ + */ +void mfd_trap_handler() +{ + uint32_t reg=0, temp=0; + temp = reg_read(INT_STATUS); + //DEBUG_WRITE(0xff, temp, timer, 0, 0, 0); + if(temp & INT_WDOG_ENABLE) + { + timer++; + set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX); + reg = reg_read(INT_STATUS); + } + if(temp & 0x4) + { + + temp = temp & (~0x4); + reg_write(INT_REG, temp); + //val = reg_read(DMA_CONTROL_STATUS); + //val |=DMA_CTRL_STATUS_DONE; + //reg_write(DMA_CONTROL_STATUS, val); + //reg = reg_read(INT_STATUS); + } + if(temp & 0x2) + { + + temp = temp & (~0x2); + reg_write(INT_REG, temp); + } + + if(temp & 0x1) + { + temp = temp & (~0x1); + reg_write(INT_REG, temp); + } + //DEBUG_WRITE(0xff, timer, temp, reg, 0, val); + __asm__("nop"); + +} diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c new file mode 100644 index 0000000..85b6b8e --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c @@ -0,0 +1,119 @@ +#include "viddec_pm_parse.h" +#include "viddec_fw_debug.h" + +#define FIRST_STARTCODE_BYTE 0x00 +#define SECOND_STARTCODE_BYTE 0x00 +#define THIRD_STARTCODE_BYTE 0x01 + +/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ +/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ +/* these are little-endian defines */ +#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */ +#define SC_BYTE_MASK1 0x000000ff /* little-endian */ + +/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success. + The conext is updated with current phase and sc_code position in the buffer. +*/ +uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) +{ + uint8_t *ptr; + uint32_t size; + uint32_t data_left=0, phase = 0, ret = 0; + viddec_sc_parse_cubby_cxt_t *cxt; + /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. + Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. + if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern + we are looking for. Its incremented to 4 once we see a byte after this pattern */ + cxt = ( viddec_sc_parse_cubby_cxt_t *)in; + size = 0; + data_left = cxt->size; + ptr = cxt->buf; + phase = cxt->phase; + cxt->sc_end_pos = -1; + pcxt=pcxt; + + /* parse until there is more data and start code not found */ + while((data_left > 0) &&(phase < 3)) + { + /* Check if we are byte aligned & phase=0, if thats the case we can check + work at a time instead of byte*/ + if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) + { + while(data_left > 3) + { + uint32_t data; + char mask1 = 0, mask2=0; + + data = *((uint32_t *)ptr); +#ifndef MFDBIGENDIAN + data = SWAP_WORD(data); +#endif + mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); + mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); + /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need + two consecutive zero bytes for a start code pattern */ + if(mask1 && mask2) + {/* Success so skip 4 bytes and start over */ + ptr+=4;size+=4;data_left-=4; + continue; + } + else + { + break; + } + } + } + + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected + two zero bytes in the word so we look one byte at a time*/ + if(data_left > 0) + { + if(*ptr == FIRST_STARTCODE_BYTE) + {/* Phase can be 3 only if third start code byte is found */ + phase++; + ptr++;size++;data_left--; + if(phase > 2) + { + phase = 2; + + if ( (((uint32_t)ptr) & 0x3) == 0 ) + { + while( data_left > 3 ) + { + if(*((uint32_t *)ptr) != 0) + { + break; + } + ptr+=4;size+=4;data_left-=4; + } + } + } + } + else + { + if((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2)) + {/* Match for start code so update context with byte position */ + phase = 3; + cxt->sc_end_pos = size; + } + else + { + phase = 0; + } + ptr++;size++;data_left--; + } + } + } + if((data_left > 0) && (phase == 3)) + { + viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; + cxt->sc_end_pos++; + state->next_sc = cxt->buf[cxt->sc_end_pos]; + state->second_scprfx_length = 3; + phase++; + ret = 1; + } + cxt->phase = phase; + /* Return SC found only if phase is 4, else always success */ + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c new file mode 100644 index 0000000..6f00d27 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c @@ -0,0 +1,190 @@ +#include "viddec_pm_parse.h" +#include "viddec_fw_debug.h" + +#define FIRST_STARTCODE_BYTE 0x00 +#define SECOND_STARTCODE_BYTE 0x00 +#define THIRD_STARTCODE_BYTE 0x01 + +/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ +/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ +/* these are little-endian defines */ +#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */ +#define SC_BYTE_MASK1 0x000000ff /* little-endian */ + +// This is the 2.25 clocks per byte loop +#define USE_2p25_CLOCK_PER_BYTE_LOOP + +#ifdef USE_2p25_CLOCK_PER_BYTE_LOOP +static int parser_find_next_startcode( + const unsigned char *buf, + int i, + int len, + unsigned int *pphase ) +{ + int sc_pos = -1; + int in_slow_loop; + register unsigned int scphase; + + scphase = *pphase; + + in_slow_loop = 1; + if ( (0 == (0x3 & i)) && /* dword aligned */ + (0 == scphase) && /* no "potential" SC detected */ + ((len - i) >= 4) ) /* more than four bytes left */ + { + in_slow_loop = 0; /* go to fast loop */ + } + + while( i < len ) + { + if ( in_slow_loop ) + { +/* ------- slow SC Detect Loop, used when 0 detected in stream --------*/ +sc_detect_slow_loop: + + while ( i < len ) + { + unsigned char ch; + + ch = buf[i]; + + /* searching for a zero, ignore phase for now */ + if ( FIRST_STARTCODE_BYTE == ch ) + { + /* if we've already got two zeros, hold at phase == 2 */ + if ( scphase < 2 ) + { + scphase++; + } + else if ( scphase > 2 ) + { + /* RARE Valid Condition, SC == 00 00 01 00 */ + /* if we've already got two zeros hold at phase == 2 + * we also enter here of we're at phase 3 + * meaning we've got 00 00 01 00 which is a valid SC + */ + /* 00 00 01 00 */ + sc_pos = i; + *pphase = scphase; + return(sc_pos); + } + else /* implies scphase == 2, holding receiving 0's */ + { + } + } + else if ( THIRD_STARTCODE_BYTE == ch ) + { + if ( 2 == scphase ) + { + /* next byte is the SC */ + scphase++; + } + else if ( scphase < 2 ) + { + scphase = 0; /* start over */ + } + else if ( scphase > 2 ) + { + /* RARE Valid Condition, SC == 00 00 01 01 */ + sc_pos = i; + *pphase = scphase; + return(sc_pos); + } + } + else if ( 3 == scphase ) + { + /* Valid Condition, SC == 00 00 01 xx */ + sc_pos = i; + *pphase = scphase; + return(sc_pos); + } + else + { + scphase = 0; + + if ( (3 == (0x3 & i)) && /* dword aligned? */ + ((len - i) > 4) ) /* more than four bytes left */ + { + i++; + in_slow_loop = 0; /* go to fast loop */ + + /* WARNING: Performance GoTo */ + goto sc_detect_fast_loop; + } + } + + i++; + } + } + else /* we're in the fast loop */ + { +/* ------- FAST SC Detect Loop, used to skip at high bandwidth --------*/ +sc_detect_fast_loop: + + /* FAST start-code scanning loop (Krebs Algorithm) */ + while ( i <= (len - 4) ) + { + register unsigned int dw; + + dw = *((unsigned int *)&buf[i]); +#ifndef MFDBIGENDIAN + dw = SWAP_WORD(dw); +#endif + if ( 0 != (dw & SC_BYTE_MASK0) ) + { + if ( 0 != (dw & SC_BYTE_MASK1) ) + { + /* most common code path */ + i += 4; + continue; + } + } + + break; + } + /* potential SC detected or at end of loop */ + in_slow_loop = 1; + + /* WARNING: performance goto */ + goto sc_detect_slow_loop; + } + } + + *pphase = scphase; + return(sc_pos); +} +unsigned int viddec_parse_sc(void *in, void *pcxt) +{ + viddec_sc_parse_cubby_cxt_t *cxt; + int boff; + int retval=0; + + cxt = (viddec_sc_parse_cubby_cxt_t *)in; + + /* get to four-byte alignment */ + boff = (int)cxt->buf & 0x3; + + cxt->sc_end_pos = parser_find_next_startcode( + (const unsigned char *)cxt->buf - boff, + boff, + cxt->size + boff, + &cxt->phase ); + + if ( (int)cxt->sc_end_pos >= 0 ) + { + cxt->sc_end_pos -= boff; + + /* have not fully finished the buffer */ + if ( cxt->sc_end_pos < cxt->size ) + cxt->phase++; + + retval = 1; + } + else + { + /* No startcode found */ + } + + return(retval); +} +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c new file mode 100644 index 0000000..5aa2e9c --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c @@ -0,0 +1,6 @@ +#include + +uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) +{ + return (0); +} diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c new file mode 100644 index 0000000..ffcff11 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c @@ -0,0 +1,554 @@ +#include "viddec_pm.h" +#include "viddec_fw_debug.h" +#include "viddec_fw_common_defs.h" +#include "viddec_pm_tags.h" +#include "viddec_parser_ops.h" +#include "viddec_vc1_parse.h" +#include "viddec_mp4_parse.h" +#include "viddec_mpeg2_parse.h" +#include "viddec_h264_parse.h" +/* + Overview of Parser manager: + Parser manager is the glue between Kernel(main.c) and actual codecs. We abstract common functionality as much as we can + in this module. The parser Manager context allocates memory for Parsers. At any point in time there is only one active stream. + During open stream we setup all necessary initialisation for the codec we are handling. The parser manager context is + stored on DDR when the current stream gets swapped out by the kernel. When the next stream comes in it has it's own + version of parser manager. + Parser manager is reponsible for providing information on when its a good time to swap a stream. + High level algorithm of parser Manager once a stream is opened and active(RET's are returns to Kernel): + + 1. create a list data structure to hold any incoming ES descriptors. + 2. Check to see if any of the ES buffers Desc in current list has data to be processed. If not request kernel(RET) for a buffer. + 3. If data is present parse until a scprefix+sc is found. If not goto step2. + 4. If startcode detected update list state to make ES data look like Linear buffer. + 5. Setup required state to provide getbits interface for codecs to access bit stream maximum 32bits at a time. + 6. Setup Current & Next workloads provided by Kernel. + 7. Call the codec to parse the data we collected between start codes. + 8. Query to see if we parsed frame worth of data. + 9. Do necessary TAG association and remove used buffers from List. + 10. Send information to kernel on whether workload is done or Not.(RET). When kernel reschedules start from step2. + + Kernel can swap current stream at RET points described above. + + Other additional things supported: + - Generic start code detect function which is same for most of codecs. + - Memory Management. + - Flush of stream. + - Emulation prevention. + - Interface to emit necessary tags for codec specific types. +*/ + + +/* check to see if codec needs emulation prevention */ +#define EMUL_REQD(codec) ((codec == MFD_STREAM_FORMAT_VC1) || (codec_type == MFD_STREAM_FORMAT_H264) ? 1: 0) + +#ifdef RTL_SIMULATION +extern void output_omar_wires( unsigned int value ); +#else +#define output_omar_wires(x) +#endif + +/* Place to store Function pointers for all supported interfaces for each codec */ +viddec_parser_ops_t parser_ops[MFD_STREAM_FORMAT_MAX]; + + + +/* we need to define as external function so that for host mode we can use the same code without + modifications by overloading dma function with a copy function +*/ +extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); + +void viddec_pm_init_ops() +{ + viddec_vc1_get_ops(&parser_ops[MFD_STREAM_FORMAT_VC1]); + parser_ops[MFD_STREAM_FORMAT_VC1].parse_sc = viddec_parse_sc; + parser_ops[MFD_STREAM_FORMAT_VC1].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags; + parser_ops[MFD_STREAM_FORMAT_VC1].gen_assoc_tags = viddec_generic_add_association_tags; + + viddec_mpeg2_get_ops(&parser_ops[MFD_STREAM_FORMAT_MPEG]); + parser_ops[MFD_STREAM_FORMAT_MPEG].parse_sc = viddec_parse_sc; + parser_ops[MFD_STREAM_FORMAT_MPEG].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags; + parser_ops[MFD_STREAM_FORMAT_MPEG].gen_assoc_tags = viddec_mpeg2_add_association_tags; + + viddec_h264_get_ops(&parser_ops[MFD_STREAM_FORMAT_H264]); + parser_ops[MFD_STREAM_FORMAT_H264].parse_sc = viddec_parse_sc; + parser_ops[MFD_STREAM_FORMAT_H264].gen_contrib_tags = viddec_pm_lateframe_generate_contribution_tags; + parser_ops[MFD_STREAM_FORMAT_H264].gen_assoc_tags = viddec_h264_add_association_tags; + + viddec_mp4_get_ops(&parser_ops[MFD_STREAM_FORMAT_MPEG42]); + parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags; + parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_assoc_tags = viddec_generic_add_association_tags; +} + +/* + Returns size of persistent DDR memory required for the codec. If the required memory is less than max allocated + scratch memory in FW we always give the max scratch size. +*/ +uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size) +{ + parser_ops[codec_type].get_cxt_size(size); + if(size->context_size > MAX_CODEC_CXT_SIZE) + { + DEB("ERROR: size(%d) of context for codec=%d is greater than max=%d\n",size->context_size,codec_type,MAX_CODEC_CXT_SIZE); + } + size->context_size = sizeof(viddec_pm_cxt_t); + return 1; +} + +/* + Initialize the scratch memory allocated to the stream based on clean. if clean is true initialize to + start state, if not then preserve stream information. +*/ +void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean) +{ + int i; + + for(i=0; ipending_tags.pending_tags[i] = INVALID_ENTRY; + } + cxt->frame_start_found = false; + cxt->found_fm_st_in_current_au = false; + cxt->late_frame_detect = (MFD_STREAM_FORMAT_H264 == codec_type) ? true:false; + cxt->pending_tags.first_buf_aligned = cxt->pending_tags.using_next = cxt->pending_tags.frame_done =false; + cxt->next_workload_error_eos = VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + viddec_pm_utils_list_init(&(cxt->list)); + cxt->cur_buf.list_index = -1; + cxt->parse_cubby.phase=0; + parser_ops[codec_type].init((void *)&(cxt->codec_data[0]), persist_mem, !clean); + if(clean) + { + cxt->pending_inband_tags = 0; + } + else + { + /* TODO: Enable this once codecs support this function */ + //parser_ops[codec_type].flush_preserve((void *)&(cxt->codec_data[0]), persist_mem); + } + +} + +void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time) +{ + viddec_emit_time(&(cxt->emitter), time); +} + +/* add an esbuffer to list */ +static inline uint32_t viddec_pm_add_es_buf_to_list(viddec_pm_cxt_t *cxt, viddec_input_buffer_t *es_buf) +{ + uint32_t val , ret = PM_OVERFLOW; + + val = viddec_pm_utils_list_addbuf(&(cxt->list), es_buf); + if(val == 1) ret = PM_SUCCESS; + return ret; +} + +static inline uint32_t viddec_pm_check_inband_messages(viddec_pm_sc_cur_buf_t *cur_buf, uint32_t *type) +{ + uint32_t ret=false; + if(cur_buf->cur_es->flags != 0) + { + /* update offset to point to next position for loading data */ + cur_buf->cur_offset +=(cur_buf->cur_size); + cur_buf->cur_size = 0; + switch(cur_buf->cur_es->flags) + { + case VIDDEC_STREAM_EOS: + { + *type = PM_EOS; + } + break; + case VIDDEC_STREAM_DISCONTINUITY: + { + *type = PM_DISCONTINUITY; + } + default: + break; + } + ret =true; + } + return ret; +} + +/* creates an ibuf from the current position in list. Fills sc_parse_cubby_cxt */ +uint32_t viddec_pm_create_ibuf(viddec_pm_cxt_t *cxt) +{ + uint32_t ret = PM_NO_DATA; +#ifndef VBP + viddec_sc_parse_cubby_cxt_t *cubby = &(cxt->parse_cubby); +#endif + viddec_pm_sc_cur_buf_t *cur_buf = &(cxt->cur_buf); + viddec_pm_utils_list_t *list = &(cxt->list); + + /* Step1: check if list is Empty, If yes return No data */ + if(list->num_items > 0) + { + /* Step 2: Check to see If current index into list is empty & we have data in list, + if so increment index and initialise it*/ + if(cur_buf->list_index == -1) + { + if(viddec_pm_utils_list_getbyte_position(list, + list->first_scprfx_length+1, + (uint32_t *)&(cur_buf->list_index), + &(cur_buf->cur_offset)) != 1) + {/* This return's offset and index from where we have to start for sc detect */ + cur_buf->cur_size = 0; + cur_buf->cur_es = &(list->sc_ibuf[cur_buf->list_index]); + } + else + { + return PM_NO_DATA; + } + } + + /* Step3: If we are done with current buffer then try to go to next item in list */ + if((cur_buf->cur_offset + cur_buf->cur_size) >= cur_buf->cur_es->len) + { + /* Need to handle In band messages before going to next buffer */ + //if(viddec_pm_check_inband_messages(cur_buf)) + if(viddec_pm_check_inband_messages(cur_buf, &ret)) + { + return ret; + } + /* If no items in list after the current buffer return no data */ + if((uint32_t)(cur_buf->list_index + 1) >= list->num_items) + { + return PM_NO_DATA; + } + cur_buf->list_index++; + cur_buf->cur_es = &(list->sc_ibuf[cur_buf->list_index]); + cur_buf->cur_offset = cur_buf->cur_size = 0; + } + /* Step4: Fill the cubby with data to send to parser sc code function */ + { + int32_t data_left; + /* data left is the leftout size in current ES buffer */ + data_left = cur_buf->cur_es->len - (cur_buf->cur_offset + cur_buf->cur_size); + + /* update offset to point to next position for loading data */ + cur_buf->cur_offset +=(cur_buf->cur_size); + +#ifndef VBP + /* Load maximum of array size */ + if(data_left >= SC_DETECT_BUF_SIZE) + { + data_left = SC_DETECT_BUF_SIZE; + } + /* can be zero if we have zero sized buffers in our list.EX:NEW segment */ + if(data_left > 0) + {/* do a copy using Linear Dma */ + uint32_t size , ddr_addr = 0, ddr_mask=0; + /* get ddr adress of current offset in ES buffer */ +#ifdef HOST_ONLY + ddr_addr = cur_buf->cur_offset + (uint32_t)cur_buf->cur_es->buf; +#else + ddr_addr = cur_buf->cur_offset + cur_buf->cur_es->phys; +#endif + ddr_mask = (ddr_addr & 3); + ddr_addr = ddr_addr & ~3; + /* return from this function can be more bytes based on input buf alignment. + The adress for local memory we are sending is on DWORD boundary so it should be safe. + */ + + size = cp_using_dma(ddr_addr, (uint32_t)&(cxt->scbuf[0]), data_left+ddr_mask, 0,1);//false, true); + cubby->size = data_left; + + /* point to actual memory location which has the data(skip aligment bytes) */ + cubby->buf = &(cxt->scbuf[ddr_mask]); + cur_buf->cur_size = data_left; + ret = PM_SUCCESS; + } + else + { + /* If we completely consumed this buffer or this is a zero sized buffer we want to check inband messages */ + //if(viddec_pm_check_inband_messages(cur_buf)) + if(viddec_pm_check_inband_messages(cur_buf, &ret)) + { + return ret; + } + } +#else + ret = PM_SUCCESS; +#endif + } + } + + return ret; +} + +/* + Read data from esbuffer list and parse for start codes or EOS. If we consumed all the data we return no data left. +*/ +static inline uint32_t viddec_pm_parse_for_sccode(viddec_pm_cxt_t *cxt, viddec_parser_ops_t *func) +{ + uint32_t ret = PM_NO_DATA; + uint32_t sc_boundary_found = 0; + + while(!sc_boundary_found) + { + /* Create an buffer from list to parse */ + ret = viddec_pm_create_ibuf(cxt); + switch(ret) + { + case PM_NO_DATA: + {/* No data in esbuffer list for parsing sc */ + sc_boundary_found = 1; + } + break; + case PM_EOS: + case PM_DISCONTINUITY: + { + sc_boundary_found = 1; + cxt->list.end_offset = cxt->cur_buf.cur_offset+1; + cxt->parse_cubby.phase = 0; + /* we didn't find a start code so second start code length would be 0 */ + cxt->sc_prefix_info.second_scprfx_length = 0; + //cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS; + if(ret == PM_EOS) + { + cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS; + } + if(ret == PM_DISCONTINUITY) + { + cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_DISCONTINUITY; + } + } + break; + case PM_SUCCESS: + default: + { + /* parse the created buffer for sc */ + ret = func->parse_sc((void *)&(cxt->parse_cubby), (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info)); + if(ret == 1) + { + cxt->list.end_offset = cxt->parse_cubby.sc_end_pos + cxt->cur_buf.cur_offset; + cxt->parse_cubby.phase = 0; + cxt->list.total_bytes+=cxt->parse_cubby.sc_end_pos; + ret = PM_SC_FOUND; + sc_boundary_found = 1; + break; + } + else + { + cxt->list.total_bytes+=cxt->cur_buf.cur_size; + } + } + break; + } + } + + return ret; +} + +/* + Once we are ready to flush the current workload, we update current workload on DDR with our internal information + that was not written before like num of items in workload, errors in stream etc... +*/ +void viddec_pm_finalize_workload(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t codec_errors) +{ + viddec_emit_set_codec(&(cxt->emitter), codec_type); + viddec_emit_set_codec_errors(&(cxt->emitter), codec_errors); + viddec_emit_flush_current_wkld(&(cxt->emitter)); + output_omar_wires( 0x5 ); + output_omar_wires( 0x1 ); +} + +/* + After parsing between start codes we cleanup our list so that it has only buffers that are not consumed yet. +*/ +uint32_t viddec_pm_finalize_list(viddec_pm_cxt_t *cxt) +{ + uint32_t ret=1; + + viddec_pm_utils_list_remove_used_entries(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length); + cxt->cur_buf.list_index = -1; + cxt->list.first_scprfx_length = cxt->sc_prefix_info.second_scprfx_length; + return ret; +} + +/* Case to handle if we encounter list overflow without seeing second start code */ +void viddec_pm_handle_buffer_overflow(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf) +{ + uint32_t indx=0; + while(indx< (uint32_t)cxt->list.num_items) + {/* Dump tags for all entries in list to prevent buffer leak */ + viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, true); + viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, true); + indx++; + } + /* Dump tags for the new buffer that was received */ + viddec_emit_contr_tag(&(cxt->emitter), es_buf, 0, true); + viddec_emit_assoc_tag(&(cxt->emitter), es_buf->id, true); + /* Set errors on both current and next as both can be invalid */ + viddec_emit_set_workload_error(&(cxt->emitter), + (VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE), + true); + viddec_emit_set_workload_error(&(cxt->emitter), + (VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE), + false); + /* cleanup the pending tags */ + viddec_pm_generate_missed_association_tags(cxt, true); + viddec_pm_finalize_workload(cxt, codec_type, 0); + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_BUFFER_OVERLFOW, (int)es_buf->phys, (int)es_buf->len, 0, 0, 0, 0); +} + +static inline void viddec_pm_handle_post_inband_messages(viddec_pm_cxt_t *cxt, uint32_t m_type) +{ + if((m_type & ~(0xFF))== PM_INBAND_MESSAGES) + { + /* If EOS decide set error on next workload too */ + viddec_emit_set_workload_error(&(cxt->emitter), cxt->next_workload_error_eos, true); + if(m_type == PM_EOS) + { + viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_EOS, true); + } + if(m_type == PM_DISCONTINUITY) + { + cxt->pending_inband_tags = PM_DISCONTINUITY; + } + } +} + +static inline uint32_t viddec_pm_handle_new_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf) +{ + uint32_t state = PM_SUCCESS; + if(es_buf != NULL) + { + state = viddec_pm_add_es_buf_to_list(cxt, es_buf); + if(state == PM_OVERFLOW) + { + viddec_pm_handle_buffer_overflow(cxt, codec_type, es_buf); + } + } + return state; +} + +static inline void viddec_pm_handle_pre_inband_messages(viddec_pm_cxt_t *cxt) +{ + if(cxt->pending_inband_tags == PM_DISCONTINUITY) + { + viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_DISCONTINUITY, false); + cxt->pending_inband_tags = 0; + } +} + +/* + Main function of parser manager. + It searches until start codes are found int he list if not through return type indicates kernel to provide more buffers. + If a start code is found it calls the codec to parse the syntax data it accumulated so far. + If codec says a frame is not done then continues to find the next start code. + If codec says frame is done it does tag association and indicates kernel a frame is done. +*/ +uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf) +{ + uint32_t state = PM_SUCCESS; + + /* Step1: Append Es buffer to list */ + viddec_pm_handle_pre_inband_messages(cxt); + state = viddec_pm_handle_new_es_buffer(cxt, codec_type, es_buf); + if(state == PM_SUCCESS) + { + uint32_t scdetect_ret; + output_omar_wires( 0x3 ); + /* Step2: Phase1 of parsing, parse until a sc is found */ + scdetect_ret = viddec_pm_parse_for_sccode(cxt,&parser_ops[codec_type]); + switch(scdetect_ret) + { + case PM_NO_DATA: + { + /* Step3: If we consumed all the data indicate we need more buffers */ + state = PM_NO_DATA; + break; + } + case PM_EOS: + case PM_DISCONTINUITY: + case PM_SC_FOUND: + { + uint32_t codec_errors=0; + /* Create necessary state information to make the ES buffers look like linear data */ + viddec_pm_utils_list_updatebytepos(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length); + if(cxt->sc_prefix_info.first_sc_detect != 1) + { + /* Step4: If we saw two start codes init state and call codec to parse */ + uint32_t codec_ret; + /* Initialise the state to provide get bits for codecs */ + viddec_pm_utils_bstream_init(&(cxt->getbits), &(cxt->list), EMUL_REQD(codec_type)); + output_omar_wires( 0x1 ); + /* call the codec to do synatax parsing */ + parser_ops[codec_type].parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + /* Check and see if frame start was detected. If we did update frame start in current au */ + if(parser_ops[codec_type].is_frame_start((void *)&(cxt->codec_data[0])) == true) + { + cxt->frame_start_found += 1; + cxt->found_fm_st_in_current_au = true; + } + /* Query to see if we reached end of current frame */ + codec_ret = parser_ops[codec_type].is_wkld_done((void *)cxt, + (void *)&(cxt->codec_data[0]), + (uint32_t)(cxt->sc_prefix_info.next_sc), + &codec_errors); + + state = (codec_ret == VIDDEC_PARSE_FRMDONE) ? PM_WKLD_DONE : PM_SUCCESS; + /* generate contribution and association tags */ + cxt->pending_tags.frame_done = (codec_ret == VIDDEC_PARSE_FRMDONE); + parser_ops[codec_type].gen_assoc_tags(cxt); + parser_ops[codec_type].gen_contrib_tags(cxt, (state != PM_WKLD_DONE)); + } + else + { + /* Step4: If this is the first start code in this stream, clean up and return */ + if(cxt->list.total_bytes != 0) + { + viddec_pm_generic_generate_contribution_tags(cxt, true); + viddec_generic_add_association_tags(cxt); + } + else + { + if(cxt->list.num_items >= 1) + { + uint32_t indx=0; + while((indx< (uint32_t)cxt->list.num_items) && (cxt->list.sc_ibuf[indx].len == 0)) + {/* Dump all zero sized buffers until we see a buffer with valid data */ + viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, false); + viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, false); + indx++; + } + } + } + if((scdetect_ret & ~(0xFF))!= PM_INBAND_MESSAGES) + { + state = PM_SUCCESS;//state = PM_FIRST_SC_FOUND; + cxt->sc_prefix_info.first_sc_detect = 0; + } + else + { + state = PM_WKLD_DONE; + } + } + + viddec_pm_handle_post_inband_messages(cxt, scdetect_ret); + + /* Step 5: If current frame is done, finalise the workload state with necessary information */ + if(state == PM_WKLD_DONE) + { + DEB("\nFRAME ... DONE\n"); + /* we decrement frame start. This can be 0 in cases like sending junk data with EOS */ + cxt->frame_start_found -= (cxt->frame_start_found)? 1: 0; + if((scdetect_ret & ~(0xFF))== PM_INBAND_MESSAGES) + {/* If EOS dump pending tags and set state */ + viddec_pm_generate_missed_association_tags(cxt, false); + state = scdetect_ret; + } + /* Write back stored state of workloads to memory to prepare for psuhing to output queue */ + viddec_pm_finalize_workload(cxt, codec_type, codec_errors); + } + /* Step 6: Reset the list to prepare for next iteration */ + viddec_pm_finalize_list(cxt); + break; + } + default: + break; + } + }//if(state == PM_SUCCESS) + return state; +} // viddec_pm_parse_es_buffer diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c new file mode 100644 index 0000000..f16fbcd --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c @@ -0,0 +1,127 @@ +#include "fw_pvt.h" +#include "viddec_fw_parser_ipclib_config.h" +#include "viddec_fw_common_defs.h" +#include "viddec_pm_tags.h" +#include "viddec_fw_parser.h" + +extern dmem_t _dmem; +extern viddec_parser_ops_t parser_ops[MFD_STREAM_FORMAT_MAX]; + +static void viddec_fw_parser_peekmessages(viddec_pm_cxt_t *pm, ipc_msg_data *wkld_cur, ipc_msg_data *wkld_next, int32_t *ret_cur, int32_t *ret_next, uint32_t stream_id) +{ + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + wkld_cur->phys = wkld_next->phys = 0; + /* read current and next workloads by peeking to free wkld queue.This would only give us a copy + of message but won't actually pull it out of queue*/ + + *ret_cur = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_cur, sizeof(ipc_msg_data), 0); + *ret_next = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_next, sizeof(ipc_msg_data), 1); + /* NOTE: I am passing length of current workload as size for next, since next workload might not exist. This is safe since in flush we always append to current workload */ + viddec_emit_update(&(pm->emitter), wkld_cur->phys, wkld_next->phys, wkld_cur->len, wkld_cur->len); +} + +static void viddec_fw_parser_push_error_workload(viddec_pm_cxt_t *pm, ipc_msg_data *wkld_cur, uint32_t stream_id) +{ + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + /* Push the current wkld */ + viddec_emit_set_workload_error(&(pm->emitter), + (VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE), + false); + viddec_emit_flush_current_wkld(&(pm->emitter)); + FwIPC_SendMessage(fwipc, stream_id, (char *)wkld_cur, sizeof(ipc_msg_data)); + FwIPC_ReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_cur, sizeof(ipc_msg_data)); +} + +int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type) +{ + FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); + mfd_pk_strm_cxt *cxt; + mfd_stream_info *cxt_swap; + viddec_pm_cxt_t *pm; + int32_t pos=0, ret = VIDDEC_FW_SUCCESS;/* success */ + uint32_t workloads_in_input_q = 0; + cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt); + cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]); + pm = &(cxt->pm); + + workloads_in_input_q = ipc_mq_read_avail(&fwipc->wkld_q[stream_id].mq, (int32_t *)&pos); + pos = 0; + /* Check to see if output queue has space for next message */ + if(ipc_mq_write_avail(&fwipc->snd_q[stream_id].mq,&pos) >= workloads_in_input_q) + { + /* Check how many free workloads are available. Need at least 1 */ + if(workloads_in_input_q >= CONFIG_IPC_MESSAGE_MAX_SIZE) + { + ipc_msg_data wkld_cur, wkld_next, cur_es; + int32_t ret_cur=0,ret_next=0; + + {/* Swap context into local memory */ + cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) pm, sizeof(viddec_pm_cxt_t), false, false); + } + + viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id); + if(workloads_in_input_q >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1)) + {/* If we have more than 2 workloads, most likely current workload has partial data. To avoid overflow + lets push current and use next which is most likely empty .If there's only one workload it was + next for previous frame so most likely its empty in which case we don't do this logic*/ + viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id); + viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id); + } + /* Empty current es buffers in list */ + /* TODO(Assumption): we have to make sure that list flush is really succesful by checking return values. + If our workload size is big enough to to accomadate buf done tags then its not necessary + since we will guaranteed succesful writes for all es buffers */ + viddec_pm_generate_tags_for_unused_buffers_to_flush(pm); + /* Check the number of ES buffers and append them to current wkld */ + while(FwIPC_ReadMessage(fwipc, &(fwipc->rcv_q[stream_id]), (char *)&cur_es, sizeof(ipc_msg_data)) != 0) + { + /* NOTE(Assumption): Again we have to define workload size to be big enough to make sure we can fit + all the es buffers into current workload */ + viddec_emit_contr_tag(&(pm->emitter), &cur_es, 0, false); + viddec_emit_assoc_tag(&(pm->emitter), cur_es.id, false); + } + viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id); + do + {/* Read until no workloads left */ + viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id); + if(ret_cur == 0) + { + break; + } + viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id); + }while(1); + switch(flush_type) + { + case VIDDEC_STREAM_FLUSH_DISCARD: + { + /* Reset pm_context */ + viddec_fw_init_swap_memory(stream_id, 0, 1); + } + break; + case VIDDEC_STREAM_FLUSH_PRESERVE: + { + /* Reset just stream information */ + viddec_fw_init_swap_memory(stream_id, 0, 0); + } + default: + break; + } + {/* swap context into DDR */ + cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) pm, sizeof(viddec_pm_cxt_t), true, false); + } + } + else + { + pos = 0; + /* check to see if I have any es buffers on input queue. If none are present we don't have to do anything */ + if(ipc_mq_read_avail(&fwipc->rcv_q[stream_id].mq, (int32_t *)&pos) != 0) + ret = VIDDEC_FW_NEED_FREE_WKLD; + } + } + else + { + /* data present in output queue. */ + ret =VIDDEC_FW_PORT_FULL; + } + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c new file mode 100644 index 0000000..9a7d828 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c @@ -0,0 +1,178 @@ +#include "viddec_pm.h" +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "viddec_emitter.h" +#include "viddec_fw_workload.h" +#include "viddec_pm_utils_bstream.h" + +extern void viddec_pm_utils_list_emit_pixel_tags(viddec_pm_utils_list_t *list, uint32_t start, viddec_emitter *emitter, uint32_t using_next); +extern void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t start, uint32_t end, viddec_emitter *emitter, uint32_t is_cur_wkld, viddec_workload_item_t *wi); + +int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1); + if(ret == -1) + {DEB("FAILURE!!!! getbits returned %d\n", ret);} + + return ret; +} + +int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0); + return ret; +} + +int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits); + return ret; +} + +int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_emit_append(&(cxt->emitter.cur), item); + return ret; +} + +int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_emit_append(&(cxt->emitter.next), item); + return ret; +} + +int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_t *is_emul) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul); + + return ret; + +} + +static inline int32_t viddec_pm_append_restof_pixel_data(void *parent, uint32_t cur_wkld) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + uint32_t start=0, b_off=0; + uint8_t emul=0; + viddec_workload_item_t wi; + + cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), &b_off, &start, &emul); + if(emul) start--; + + wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES; + wi.es.es_flags = 0; + viddec_pm_utils_list_emit_slice_tags(&(cxt->list), start, cxt->list.total_bytes -1, &(cxt->emitter), cur_wkld, &wi); + return ret; +} + +int32_t viddec_pm_append_pixeldata(void *parent) +{ + return viddec_pm_append_restof_pixel_data(parent, 1); +} + +int32_t viddec_pm_append_pixeldata_next(void *parent) +{ + return viddec_pm_append_restof_pixel_data(parent, 0); +} + +viddec_workload_t* viddec_pm_get_header(void *parent) +{ + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + + return cxt->emitter.cur.data; +} + +viddec_workload_t* viddec_pm_get_next_header(void *parent) +{ + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + + return cxt->emitter.next.data; +} + +int32_t viddec_pm_is_nomoredata(void *parent) +{ + int32_t ret=0; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits)); + return ret; +} + +uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte) +{ + int32_t ret=-1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_get_current_byte(&(cxt->getbits), byte); + return ret; +} + +int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + if (end == VIDDEC_PARSE_INVALID_POS) end = (cxt->list.total_bytes -1); + viddec_pm_utils_list_emit_slice_tags(&(cxt->list), start, end, &(cxt->emitter), using_next, wi); + + return ret; + +} + +void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error) +{ + viddec_pm_cxt_t *cxt; + cxt = (viddec_pm_cxt_t *)parent; + cxt->next_workload_error_eos = error; +} + +void viddec_pm_set_late_frame_detect(void *parent) +{ + viddec_pm_cxt_t *cxt; + cxt = (viddec_pm_cxt_t *)parent; + cxt->late_frame_detect = true; +} + +void viddec_pm_setup_userdata(viddec_workload_item_t *wi) +{ +#ifdef MFDBIGENDIAN + wi->vwi_payload[0] = SWAP_WORD(wi->vwi_payload[0]); + wi->vwi_payload[1] = SWAP_WORD(wi->vwi_payload[1]); + wi->vwi_payload[2] = SWAP_WORD(wi->vwi_payload[2]); +#else + wi=wi; +#endif +} diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c new file mode 100644 index 0000000..0a6f09b --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c @@ -0,0 +1,21 @@ +#include "viddec_parser_ops.h" + +void viddec_vc1_get_ops(viddec_parser_ops_t *ops) +{ + return; +} + +void viddec_mpeg2_get_ops(viddec_parser_ops_t *ops) +{ + return; +} + +void viddec_mp4_get_ops(viddec_parser_ops_t *ops) +{ + return; +} + +void viddec_h264_get_ops(viddec_parser_ops_t *ops) +{ + return; +} diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c new file mode 100644 index 0000000..b0d8842 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c @@ -0,0 +1,304 @@ +#include "viddec_pm.h" +#include "viddec_fw_debug.h" +#include "viddec_fw_common_defs.h" +#include "viddec_pm_tags.h" +/* + Overview of tag association: + + Contribution flags: + The current list has all the buffers which contribute to this particular workload. So we walkthrough the + list and throw buf done for all the buffers which were consumed. This can be deduced from total bytes we + in list which represents the bytes that were used for this acces unit. + For buffers which were partially used and this can only be the last buffer we throw continued tag. The + Parser manager tells us when to throw a continued tag. This will only happen when parser Manager detects + that we reached end of current frame. + + Association Tags: + These are the tags that FW generates which indicates how to associate metadata with Frames. + The policy to determine which tag belongs to which frame is based on sc prefix position. If ES buffer starts with + or has a sc prefix its associated to next decodable frame(based on first slice or header depending on codec). + We use three state variables to determine where the frame starts and ends. + frame_start_found: Indicates we saw the beggining of frame in current list of ES buffers(which represent current acces unit). + This is decremented on workload done since it normally means we detected frame end. + found_fm_st_in_current_au:Indicates we saw the first slice in current access unit. Its mainly used to decide whether the first buffer + belongs to current frame or next frame. Its reset after its use. + Frame Done: Indicates we detected end of frame pointed by current workload. + + Basic algo: + If we find frame start and if first buffer doesn't start with SC prefix Every consumed buffer belongs to Next frame. If first buffer + starts with SC prefix on that buffer belongs to Current frame. + If we haven't found frame start every buffer belongs to current frame. + + TODO: Check for return codes from emitter +*/ + + +/* + This function generates contribution tags current workload by walking through list of consumed buffers. + If frame is done(ignore_partial is false) we generate continue tags for the last item in list(if its not completely consumed). + This is used for all codecs except H264. + */ +uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ignore_partial) +{ + uint32_t ret = PM_SUCCESS; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_list_t *list = &(cxt->list); + + if(list->num_items != 0) + { + if(!cxt->late_frame_detect) + { + uint32_t num_items = 0; + while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes)) + {/* Walkthrough Consumed buffers and dump the tags */ + viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, false); + num_items++; + } + /* Dump incomplete tags if required */ + if(!ignore_partial) + {/* check to see if last item is not consumed and dump continued flag */ + if((num_items < list->num_items) + && (list->data[num_items].edpos >= (uint32_t)list->total_bytes)) + { + viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false); + } + } + } + else + { + /* Only happens for dangling fields in MP2 Field pictures, in which case we find out the current frame was done in + last access unit, which is similar to H264 */ + ret = viddec_pm_lateframe_generate_contribution_tags(parent, ignore_partial); + cxt->late_frame_detect = false; + } + } + return ret; +} + +/* + For H264 when a frame is done it really means current frame was done in last access unit. The current access unit represnted + by list belongs to next frame. ignore_partial is false for frame done. + When frame is not done we dump all consumed buffers into next workload else they go to current workload. + If frame is done we throw a continued flag for first buffer in current workload if it was used in last access unit. + */ +uint32_t viddec_pm_lateframe_generate_contribution_tags(void *parent, uint32_t ignore_partial) +{ + uint32_t ret = PM_SUCCESS; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_list_t *list = &(cxt->list); + + if(list->num_items != 0) + { + uint32_t num_items = 0; + /* If start offset is not 0 then it was partially used in last access unit. !ignore_partial means frame done*/ + if((list->start_offset!= 0) && !ignore_partial) + {/* Emit continue in current if necessary. */ + viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false); + } + + while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes)) + { /* Walkthrough Consumed buffers and dump the tags to current or Next*/ + viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, !ignore_partial); + num_items++; + } + } + return ret; +} + +/* + This function dumps tags from temporary array into a workload(we indicate either current or next from using_next). +*/ +uint32_t viddec_pm_generate_missed_association_tags(viddec_pm_cxt_t *cxt, uint32_t using_next) +{ + uint32_t i=0, ret = PM_SUCCESS; + + while((i < MAX_IBUFS_PER_SC) && (cxt->pending_tags.pending_tags[i] != INVALID_ENTRY)) + { + viddec_emit_assoc_tag(&(cxt->emitter), cxt->pending_tags.pending_tags[i], using_next); + cxt->pending_tags.pending_tags[i] = INVALID_ENTRY; + i++; + } + return ret; +} + +/* This function adds current list of es buffer to pending list. ignore_first when set tells us to ignore the first + buffer in list. +*/ +void viddec_pm_add_tags_to_pendinglist(viddec_pm_cxt_t *cxt, uint32_t ignore_first) +{ + viddec_pm_utils_list_t *list = &(cxt->list); + vidded_pm_pending_tags_t *pend = &(cxt->pending_tags); + uint32_t index=0, t_index=0; + + if(!ignore_first && (list->start_offset == 0)) + {/* If start offset is 0 we are saying that first buffer in list starts with start code */ + pend->first_buf_aligned = true; + } + else + {/* We are ignoring first item in list since we already threw a tag for this buffer */ + index++; + pend->first_buf_aligned = false; + } + + while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes)) + {/* walk through consumed buffers and buffer id's in pending list */ + pend->pending_tags[t_index] = list->sc_ibuf[index].id; + index++;t_index++; + } + if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes)) + {/* If last item is partially consumed still add it to pending tags since tag association is based on start of ES buffer */ + pend->pending_tags[t_index] = list->sc_ibuf[index].id; + } +} + +/* Helper function to emit a association tag from pending list and resetting the value to invalid entry */ +static inline void viddec_pm_emit_pending_tag_item(viddec_emitter *emit, vidded_pm_pending_tags_t *pend, uint32_t index, uint32_t using_next) +{ + viddec_emit_assoc_tag(emit, pend->pending_tags[index], using_next); + pend->pending_tags[index] = INVALID_ENTRY; +} + +/* + Tag association for mpeg2: + start frame is detected in pict header extension, but pict header represents start of frame. + To handle this we always store current AU list in temporary pending list. At the start of function + we look to see if a frame start was found, if we did we start dumping items from pending list based + on byte position of sc in first buffer of pending list. At the end we copy current list items to + pending list. + Limitation With Dangling fields: If we have AF1 AF2 BF1 CF1 CF2 as the sequence of fields + Tag assocaiation will be fine for A & B, However the first buffer tag on C will fall into B + We donot want to fix this issue right now as it means doubling size of pending list which + increases memory usage. Normally dangling fields are thrown away so worst case we will miss + one original PTS, So its OK not to fix it right now. + */ +uint32_t viddec_mpeg2_add_association_tags(void *parent) +{ + uint32_t ret = PM_SUCCESS; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + vidded_pm_pending_tags_t *pend = &(cxt->pending_tags); + uint32_t first_slice = false, index = 0; + /* check to see if we found a frame start in current access unit */ + first_slice = cxt->frame_start_found && cxt->found_fm_st_in_current_au; + cxt->found_fm_st_in_current_au = false; + /* If we found frame start and first item in pending tags is start with start code + then it needs to go to current frame. */ + if(first_slice && pend->first_buf_aligned && (pend->pending_tags[index] != INVALID_ENTRY)) + { + viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, false); + index++; + } + /* rest of list goes to current if frame start is not found else next frame */ + while((index < MAX_IBUFS_PER_SC) && (pend->pending_tags[index] != INVALID_ENTRY)) + { + viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, cxt->frame_start_found); + index++; + } + /* Copy items to temporary List */ + viddec_pm_add_tags_to_pendinglist(cxt, false); + return ret; +} + +/* + Tag association for h264: + In this case when we get frame done it means current frame was done in last access unit. The data in current list belongs + to next frame. To handle this we always dump the buffered tags from last list and throw them in current/next frame based on pend state. + If the first item in current list is on sc boundary, it has to go into next so we always throw that tag in next. + For rest of items we store them in pending tags array and store inforamtion on where these stored tags should go into for + next run. Thi is detemined by start frame. we do this because at this state our next should be current and "next next" should + be next. + */ +uint32_t viddec_h264_add_association_tags(void *parent) +{ + uint32_t ret = PM_SUCCESS; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_list_t *list = &(cxt->list); + vidded_pm_pending_tags_t *pend = &(cxt->pending_tags); + uint32_t first_slice = false, index = 0; + + /* Throw tags for items from pending list based on stored state from last run */ + viddec_pm_generate_missed_association_tags(cxt, pend->using_next); + first_slice = cxt->frame_start_found && cxt->found_fm_st_in_current_au; + cxt->found_fm_st_in_current_au = false; + /* If we saw frame start and first buffer is aligned to start code throw it into next */ + if(first_slice && (list->start_offset == 0)) + { + viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found && cxt->pending_tags.frame_done); + index++; + } + /* add tags to pending list */ + viddec_pm_add_tags_to_pendinglist(cxt, (index != 0)); + /* We want to figure out where these buffers should go into. There are three possible cases + current: If no frame start found these should go into next. + next: If one frame start is found and frame is not done then it should go to next. + if a frame is done then pm will push current out and next time we come here previous next is current. + next next: If two frame starts are found then we want it to be next next workload, which is what next will be + when we get called next time. + */ + pend->using_next = (!cxt->pending_tags.frame_done && (cxt->frame_start_found == 1)) || (cxt->frame_start_found > 1); + return ret; +} + +/* + Tag association for vc1: + Frame header represents start of new frame. If we saw a frame start in current access unit and the buffer starts + with start code it needs to go to current frame. Rest of items go to next if frame start found else current frame. + */ +uint32_t viddec_generic_add_association_tags(void *parent) +{ + uint32_t ret = PM_SUCCESS; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_list_t *list = &(cxt->list); + uint32_t not_first_slice = false, index = 0; + + /* We check to see if this access unit is not the first one with frame start. This evaluates to true in that case */ + not_first_slice = cxt->frame_start_found && !cxt->found_fm_st_in_current_au; + cxt->found_fm_st_in_current_au = false; + if(list->start_offset == 0) + {/* If start offset is 0, we have start code at beggining of buffer. If frame start was detected in this + access unit we put the tag in current else it goes to next */ + viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, not_first_slice); + } + /* Skip first item always, for start_offset=0 its already been handled above*/ + index++; + while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes)) + {/* Walkthrough Consumed buffers and dump the tags to current or next*/ + viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found); + index++; + } + if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes)) + {/* Dump last item if it was partially consumed */ + viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found); + } + return ret; +} + +/* + This function throws tags for buffers which were not used yet during flush. + */ +void viddec_pm_generate_tags_for_unused_buffers_to_flush(viddec_pm_cxt_t *cxt) +{ + viddec_pm_utils_list_t *list; + uint32_t index=0; + + list = &(cxt->list); + /* Generate association tags from temporary pending array */ + viddec_pm_generate_missed_association_tags(cxt, false); + if(list->num_items > 0) + { + /* Throw contribution flag for first item as done */ + viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false); + if(cxt->list.start_offset == 0) + {/* Throw association for first item if it was not done already */ + viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false); + } + index++; + while(index < list->num_items) + {/* Walk through list and throw contribution and association flags */ + viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false); + viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false); + index++; + } + } + /* Not required to re init list structure as flush takes care of it */ +} + diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c new file mode 100644 index 0000000..8d3f329 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -0,0 +1,472 @@ +#include "viddec_pm_utils_bstream.h" +#include "viddec_fw_debug.h" + +/* Internal data structure for calculating required bits. */ +typedef union +{ + uint8_t byte[8]; + uint32_t word[2]; +}viddec_pm_utils_getbits_t; + +void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt); +uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index); +extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); + +/* Bytes left in cubby buffer which were not consumed yet */ +static inline uint32_t viddec_pm_utils_bstream_bytesincubby(viddec_pm_utils_bstream_buf_cxt_t *cxt) +{ + return (cxt->buf_end - cxt->buf_index); +} + +/* + This function checks to see if we are at the last valid byte for current access unit. +*/ +uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt) +{ + uint32_t data_remaining = 0; + uint8_t ret = false; + + /* How much data is remaining including current byte to be processed.*/ + data_remaining = cxt->list->total_bytes - (cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st)); + + /* Start code prefix can be 000001 or 0000001. We always only check for 000001. + data_reamining should be 1 for 000001, as we don't count sc prefix and 1 represents current byte. + data_reamining should be 2 for 00000001, as we don't count sc prefix its current byte and extra 00 as we check for 000001. + NOTE: This is used for H264 only. + */ + switch(data_remaining) + { + case 2: + /* If next byte is 0 and its the last byte in access unit */ + ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0); + break; + case 1: + /* if the current byte is last byte */ + ret = true; + break; + default: + break; + } + return ret; +} + +/* + This function returns true if cubby buffer has the last byte of access unit. +*/ +uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt) +{ + uint32_t last_byte_offset_plus_one=0; + uint8_t ret = false; + /* Check to see if the last byte Acces unit offset is the last byte for current access unit. + End represents the first invalid byte, so (end - st) will give number of bytes.*/ + last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st); + if((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes) + { + ret = true; + } + return ret; +} + +/* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */ +static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt) +{ + cxt->st = cxt->size = cxt->bitoff=0; +} + +/* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if + we need to go to next es buffer */ +static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset) +{ + uint32_t ret=0; + int32_t val=0; + val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes; + val = val - (int32_t)offset; + if(val > 0) ret = (uint32_t)val; + return val; +} + +/* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by + lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter + at returns index of ES buffer in list which has byte_offset */ +static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt, + uint32_t *lst_index, + uint32_t byte_offset, + uint32_t *physaddr) +{ + viddec_pm_utils_list_t *list; + uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */ + + list = cxt->list; + while(*lst_index < list->num_items) + { + /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */ + last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes; + if(byte_offset < last_byte_offst) + {/* Found a match so return with data remaining */ +#if 1 + int32_t val=0; + val = last_byte_offst - (int32_t)byte_offset; + if(val > 0) bytes_left = (uint32_t)val; +#else + bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset); +#endif + *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index); + break; + } + *lst_index+=1; + } + return bytes_left; +} + +/* This function is for copying trailing bytes of cubby bitstream buffer to scratch buffer */ +static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes) +{ + uint32_t i=0; + for(i=0; ibuf_scratch[i] = *data; + data++;cxt->size++; + } +} + +/* This function is for copying trailing bytes from scratch buffer to bitstream buffer*/ +static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data) +{ + uint32_t i=0; + for(i=0; isize;i++) + { + *data = cxt->buf_scratch[i]; + data++; + } +} + +/* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */ +static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream, + viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/ + uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/ + uint32_t *phase, /* Phase for emulation */ + uint32_t num_bytes,/* requested number of bytes*/ + uint32_t emul_reqd, /* On true we look for emulation prevention */ + uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/ + ) +{ + int32_t ret = 1; + uint8_t cur_byte = 0, valid_bytes_read = 0; + *act_bytes = 0; + + while(valid_bytes_read < num_bytes) + { + cur_byte = bstream->buf[bstream->buf_index + *act_bytes]; + if((cur_byte == 0x3) &&(*phase == 2)) + {/* skip emulation byte. we update the phase only if emulation prevention is enabled */ + *phase = 0; + } + else + { + data->byte[valid_bytes_read] = cur_byte; + /* + We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past. + From second byte onwards we always look to update phase. + */ + if((*act_bytes != 0) || (is_offset_zero)) + { + if(cur_byte == 0) + { + /* Update phase only if emulation prevention is required */ + *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 ); + } + else + { + *phase=0; + } + } + valid_bytes_read++; + } + *act_bytes +=1; + } + /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array + has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */ + if((bstream->buf_index + *act_bytes -1) >= bstream->buf_end) + { + ret = -1; + } + return ret; +} + +/* + This function checks to see if we have minimum amount of data else tries to reload as much as it can. + Always returns the data left in current buffer in parameter. +*/ +static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left) +{ +#ifdef VBP + *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); +#else + uint8_t isReload=0; + + *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); + /* If we have minimum data we should continue, else try to read more data */ + if(*data_left bstrm_buf)); + /* Break out of loop if we reached last byte or we have enough data */ + isReload = !((*data_left >= MIN_DATA) || (viddec_pm_utils_bstream_nomoredata(cxt) == 1)); + } + } +#endif +} +/* + This function moves the stream position by N bits(parameter bits). The bytes parameter tells us how many bytes were + read for this N bits(can be different due to emulation bytes). +*/ +static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_buf_cxt_t *bstream, uint32_t bits, uint32_t bytes) +{ + if((bits & 0x7) == 0) + { + bstream->buf_bitoff = 0; + bstream->buf_index +=bytes; + } + else + { + bstream->buf_bitoff = bits & 0x7; + bstream->buf_index +=(bytes - 1); + } +} + +/* + This function gets physical address of the requested au offset(pos). +*/ + +uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index) +{ + uint32_t ret = 0, last_byte_offst=0; + viddec_pm_utils_list_t *list; + + list = cxt->list; + while(lst_index < list->num_items) + { + last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes; + if(pos < last_byte_offst) + { +#ifndef MFDBIGENDIAN + ret = (uint32_t)list->sc_ibuf[lst_index].buf; +#else + ret = list->sc_ibuf[lst_index].phys; +#endif + ret +=(pos - list->data[lst_index].stpos); + if(lst_index == 0) ret+=list->start_offset; + break; + } + lst_index++; + } + return ret; +} + +/* + Actual reload function which uses dma to refill bitstream buffer. +*/ +void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt) +{ + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + + /* Update current offset positions */ + cxt->au_pos += (bstream->buf_index - bstream->buf_st); + bstream->buf_st = bstream->buf_index; + /* copy leftover bytes into scratch */ + { + int32_t cur_bytes=0; + viddec_pm_utils_bstream_scratch_init(&(cxt->scratch)); + cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); + if(cur_bytes > 0) + { + viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes); + cxt->scratch.bitoff = bstream->buf_bitoff; + } + } + /* Initiate DMA and copyback scratch data */ + { + uint32_t data_left = 0, ddr_mask=0; + /* calculate necesary aligmnets and copy data */ + { + uint32_t ddr_addr=0, data_wrote=0; + uint32_t byte_pos; + /* byte pos points to the position from where we want to read data.*/ + byte_pos = cxt->au_pos + cxt->scratch.size; + data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr); + if(data_left > CUBBY_SIZE) + { + data_left = CUBBY_SIZE; + } + if(data_left != 0) + { + ddr_mask = ddr_addr & 0x3; + ddr_addr = ddr_addr & ~0x3; + data_wrote = cp_using_dma(ddr_addr, (uint32_t)&(bstream->buf[MIN_DATA]), (data_left + ddr_mask), 0, 1); + } + } + /* copy scratch data back to buffer and update offsets */ + { + uint32_t index=0; + index = MIN_DATA + ddr_mask; + index -= cxt->scratch.size; + viddec_pm_utils_bstream_scratch_copyfrom(&(cxt->scratch), &(bstream->buf[index])); + bstream->buf_st = bstream->buf_index = index; + bstream->buf_end = data_left + cxt->scratch.size + bstream->buf_st; + bstream->buf_bitoff = cxt->scratch.bitoff; + } + } +} + +/* + Init function called by parser manager after sc code detected. +*/ +void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul) +{ +#ifdef VBP + cxt->emulation_byte_counter = 0; +#endif + + cxt->au_pos = 0; + cxt->list = list; + cxt->list_off = 0; + cxt->phase = 0; + cxt->is_emul_reqd = is_emul; + cxt->bstrm_buf.buf_st = cxt->bstrm_buf.buf_end = cxt->bstrm_buf.buf_index = cxt->bstrm_buf.buf_bitoff = 0; +} + +/* Get the requested byte position. If the byte is already present in cubby its returned + else we seek forward and get the requested byte. + Limitation:Once we seek forward we can't return back. +*/ +int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte) +{ + int32_t ret = -1; + uint32_t data_left=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + viddec_pm_utils_check_bstream_reload(cxt, &data_left); + if(data_left != 0) + { + *byte = bstream->buf[bstream->buf_index]; + ret = 1; + } + return ret; +} + +/* + Function to skip N bits ( N<= 32). +*/ +int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits) +{ + int32_t ret = -1; + uint32_t data_left=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + viddec_pm_utils_check_bstream_reload(cxt, &data_left); + if((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + { + uint8_t bytes_required=0; + + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + if(bytes_required <= data_left) + { + viddec_pm_utils_getbits_t data; + uint32_t act_bytes =0; + if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + { + uint32_t total_bits=0; + total_bits=num_bits+bstream->buf_bitoff; + viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); + ret=1; + + if (act_bytes > bytes_required) + { + cxt->emulation_byte_counter = act_bytes - bytes_required; + } + } + } + } + return ret; +} + +/* + Function to get N bits ( N<= 32). +*/ +int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip) +{ + uint32_t data_left=0; + int32_t ret = -1; + /* STEP 1: Make sure that we have at least minimum data before we calculate bits */ + viddec_pm_utils_check_bstream_reload(cxt, &data_left); + + if((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + { + uint32_t bytes_required=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + + /* Step 2: Make sure we have bytes for requested bits */ + if(bytes_required <= data_left) + { + uint32_t act_bytes, phase; + viddec_pm_utils_getbits_t data; + phase = cxt->phase; + /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ + if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + { + uint32_t total_bits=0; + uint32_t shift_by=0; + /* zero out upper bits */ + /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts + in single statement */ + data.byte[0] <<= bstream->buf_bitoff; + data.byte[0] >>= bstream->buf_bitoff; + +#ifndef MFDBIGENDIAN + data.word[0] = SWAP_WORD(data.word[0]); + data.word[1] = SWAP_WORD(data.word[1]); +#endif + total_bits = num_bits+bstream->buf_bitoff; + if(total_bits > 32) + { + /* We have to use both the words to get required data */ + shift_by = total_bits - 32; + data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by)); + //total_bits -= shift_by;/* BUG */ + } + else + { + shift_by = 32 - total_bits; + data.word[0] = data.word[0] >> shift_by; + } + *out = data.word[0]; + if(skip) + { + /* update au byte position if needed */ + viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); + cxt->phase = phase; + + if (act_bytes > bytes_required) + { + cxt->emulation_byte_counter += act_bytes - bytes_required; + } + } + + ret =1; + } + } + } + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c new file mode 100644 index 0000000..ccc83b3 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c @@ -0,0 +1,221 @@ +#include "viddec_pm_utils_list.h" +#include "viddec_fw_debug.h" + +/* + Initialize list. + */ +void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt) +{ + cxt->num_items = 0; + cxt->start_offset = 0; + cxt->end_offset = -1; + cxt->total_bytes = 0; + cxt->first_scprfx_length = 0; +} + +/* + Add a new ES buffer to list. If not succesful returns 0. + */ +uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf) +{ + uint32_t ret = 0; + if((list->num_items + 1) <= MAX_IBUFS_PER_SC) + { + list->num_items +=1; + list->sc_ibuf[list->num_items - 1] = *es_buf; + ret = 1; + } + return ret; +} + +/* + We return the index of es buffer and the offset into it for the requested byte offset. + EX: if byte=4, and the first es buffer in list is of length 100, we return lis_index=0, offset=3. + byte value should range from [1-N]. + */ +uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset) +{ + uint32_t index = 0, accumulated_size=0; + + /* First buffer in list is always special case, since start offset is tied to it */ + accumulated_size = list->sc_ibuf[index].len - list->start_offset; + if( accumulated_size >= byte) + { + /* we found a match in first buffer itself */ + *offset = list->start_offset + byte - 1; + *list_index = index; + return 0; + } + index++; + /* walkthrough the list until we find the byte */ + while(index < list->num_items) + { + if((accumulated_size + list->sc_ibuf[index].len) >= byte) + { + *offset = byte - accumulated_size - 1; + *list_index = index; + return 0; + } + accumulated_size += list->sc_ibuf[index].len; + index++; + } + return 1; +} + +/* + Since the stream data can span multiple ES buffers on different DDR locations, for our purpose + we store start and end position on each ES buffer to make the data look linear. + The start represents the linear offset of the first byte in list. + end-1 represents linear offset of last byte in list. + */ +void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length) +{ + uint32_t items=0; + uint32_t start=0, end=0; + + if(list->num_items != 0) + { + end = list->sc_ibuf[0].len - list->start_offset; + if((int32_t)end >= list->total_bytes) end = list->total_bytes; + list->data[items].stpos = start; + list->data[items].edpos = end; + items++; + while((int32_t)end < list->total_bytes) + { + start = end; + end += list->sc_ibuf[items].len; + if((int32_t)end >= list->total_bytes) end = list->total_bytes; + list->data[items].stpos = start; + list->data[items].edpos = end; + items++; + } + while(items < list->num_items) + { + if(sc_prefix_length != 0) + { + start = end = list->total_bytes+1; + } + else + { + start = end = list->total_bytes; + } + list->data[items].stpos = start; + list->data[items].edpos = end; + items++; + } + /* Normal access unit sequence is SC+data+SC. We read SC+data+SC bytes so far. + but the current access unit should be SC+data, the Second SC belongs to next access unit. + So we subtract SC length to reflect that */ + list->total_bytes -= sc_prefix_length; + } +} + +static inline void viddec_pm_utils_list_emit_slice_tags_append(viddec_emitter_wkld *cur_wkld, viddec_workload_item_t *wi) +{ + /* + Most of the time len >0. However we can have a condition on EOS where the last buffer can be + zero sized in which case we want to make sure that we emit END of SLICE information. + */ + if((wi->es.es_phys_len != 0) || (wi->es.es_flags&VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE)) + { + viddec_emit_append(cur_wkld, wi); + } +} + +/* + Emit requested tags for data from start to end position. The tags should include end byte too. + */ +void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t start, uint32_t end, viddec_emitter *emitter, uint32_t is_cur_wkld, viddec_workload_item_t *wi) +{ + if((list->num_items != 0) && ((int32_t)start < (list->total_bytes)) && ((int32_t)end <= (list->total_bytes))) + { + uint32_t flags=0, items=0; + viddec_emitter_wkld *cur_wkld; + + flags = wi->es.es_flags; + cur_wkld = (is_cur_wkld != 0) ? &(emitter->cur):&(emitter->next); + /* Seek until we find a ES buffer entry which has the start position */ + while(start >= list->data[items].edpos) items++; + + if(end < list->data[items].edpos) + { /* One ES buffer has both start and end in it. So dump a single entry */ + wi->es.es_phys_len = end - start + 1; + wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos; + /* Account for start_offset if its the first buffer in List */ + if(items == 0) wi->es.es_phys_addr += list->start_offset; + + wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE | VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE; + viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi); + } + else + { + /* We know that there are at least two buffers for the requested data. Dump the first item */ + wi->es.es_phys_len = list->data[items].edpos - start; + wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos; + if(items == 0) wi->es.es_phys_addr += list->start_offset; + wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE; + viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi); + items++; + /* Dump everything in between if any until the last buffer */ + while(end >= list->data[items].edpos) + { + wi->es.es_phys_len = list->data[items].edpos - list->data[items].stpos; + wi->es.es_phys_addr = list->sc_ibuf[items].phys; + wi->es.es_flags = flags; + viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi); + items++; + } + /* Dump ES buffer which has end in it along with end slice flag */ + wi->es.es_phys_len = end - list->data[items].stpos + 1; + wi->es.es_phys_addr = list->sc_ibuf[items].phys; + wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE; + viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi); + } + } +} + +/* + We delete the consumed buffers in our list. If there are any buffers left over which have more data + the get moved to the top of the list array. + */ +void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length) +{ + list->end_offset = -1; + + if(list->num_items != 0) + { + if(length != 0) + { + uint32_t items = list->num_items-1, byte_pos; + uint32_t index=0; + viddec_input_buffer_t *es_buf; + byte_pos = list->total_bytes; + while((list->data[items].edpos > byte_pos) && (list->data[items].stpos > byte_pos)) + { + items--; + } + if(items != 0) + { + list->start_offset = byte_pos - list->data[items].stpos; + while(items < list->num_items) + { + es_buf = &(list->sc_ibuf[items]); + list->sc_ibuf[index] = *es_buf; + index++; + items++; + } + list->num_items = index; + } + else + { + list->start_offset += (byte_pos - list->data[items].stpos); + } + } + else + { + list->num_items = 0; + list->start_offset = 0; + } + list->total_bytes = length; + } +} diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h new file mode 100644 index 0000000..bc2c239 --- /dev/null +++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h @@ -0,0 +1,200 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_COMMON_DEFS_H +#define VIDDEC_FW_COMMON_DEFS_H + +#define VIDDEC_FW_PARSER_IPC_HOST_INT 0x87654321 +#define EMITTER_WORKLOAD_ENTRIES 2048 + +/* This enum defines priority level for opening a stream */ +enum viddec_stream_priority +{ + viddec_stream_priority_BACKGROUND, /* Lowest priority stream */ + viddec_stream_priority_REALTIME, /* Real time highest priority stream */ + viddec_stream_priority_INVALID, +}; + +/* This enum defines supported flush types */ +enum viddec_stream_flushtype +{ + VIDDEC_STREAM_FLUSH_DISCARD, /* Reinitialise to start state */ + VIDDEC_STREAM_FLUSH_PRESERVE, /* Reinitialise to start state by preserving sequence info*/ +}; + +enum viddec_stream_inband_flags +{ + VIDDEC_STREAM_DEFAULT_FLAG=0, /* Default value for flags */ + VIDDEC_STREAM_EOS, /* End of stream message */ + VIDDEC_STREAM_DISCONTINUITY, /* new segment which forces flush and preserve */ +}; + +/* Message descriptor for Parser's Input and output queues. needs to be 8 byte aligned */ +typedef struct viddec_input_buffer +{ + unsigned int flags; /* Flags for Inband messages like EOS, valid range defined in viddec_stream_inband_flags */ + unsigned int phys;/* DDR addr of where ES/WKLD is at. */ + unsigned int len;/* size of buffer at phys_addr */ + unsigned int id;/* A id for the buffer which is not used or modified by the FW. */ +#ifdef HOST_ONLY + unsigned char *buf; /* virt pointer to buffer. This is a don't care for FW */ +#endif +}ipc_msg_data; + +typedef ipc_msg_data viddec_input_buffer_t; +typedef ipc_msg_data viddec_ipc_msg_data; + +/* Return types for interface functions */ +typedef enum +{ + VIDDEC_FW_SUCCESS, /* succesful with current operation */ + VIDDEC_FW_NORESOURCES, /* No resources to execute the requested functionality */ + VIDDEC_FW_FAILURE, /* Failed for Uknown reason */ + VIDDEC_FW_INVALID_PARAM, /* The parameters that were passed are Invalid */ + VIDDEC_FW_PORT_FULL, /* The operation failed since queue is full */ + VIDDEC_FW_PORT_EMPTY, /* The operation failed since queue is empty */ + VIDDEC_FW_NEED_FREE_WKLD, /* The operation failed since a free wkld is not available */ +}viddec_fw_return_types_t; + +/* Defines for Interrupt mask and status */ +typedef enum +{ + VIDDEC_FW_WKLD_DATA_AVAIL=1, /* A processed workload is available */ + VIDDEC_FW_INPUT_WATERMARK_REACHED=2, /* The input path is below the set watermark for current stream */ +}viddec_fw_parser_int_status_t; + +/* Defines for attributes on stream, If not set explicitly will be default values */ +typedef enum +{ + VIDDEC_FW_INPUT_Q_WATERMARK, /* Define for setting Input queue watermarks */ + VIDDEC_FW_STREAM_PRIORITY, /* Define for setting stream priority */ +}viddec_fw_stream_attributes_t; + +typedef struct +{ + unsigned int input_q_space; /* Num of messages that can be written to input queue */ + unsigned int output_q_data; /* Num of messages in output queue */ + unsigned int workload_q_status; /* Number of free wklds available to parser */ +}viddec_fw_q_status_t; + +typedef struct +{ + unsigned int to_fw_q_space; /* Num of messages that can be written to input queue */ + unsigned int from_fw_q_data; /* Num of messages in output queue */ +}viddec_fw_decoder_q_status_t; + +enum viddec_fw_decoder_int_status +{ + VIDDEC_FW_DECODER_INT_STATUS_STREAM_0 = (1<< 0), /* Decoder Stream 0 Requires Service */ + VIDDEC_FW_DECODER_INT_STATUS_STREAM_1 = (1<< 1), /* Decoder Stream 1 Requires Service */ + VIDDEC_FW_DECODER_INT_STATUS_STREAM_2 = (1<< 2), /* Decoder Stream 2 Requires Service */ + + + VIDDEC_FW_DECODER_INT_STATUS_STREAM_HIGH = (1<<30), /* Any Decoder Stream >= 30 Requires Service */ + VIDDEC_FW_DECODER_INT_STATUS_AUTO_API = (1<<31) /* An Auto-API Function has completed */ +}; + +/** Hardware Accelerated stream formats */ +enum viddec_stream_format +{ + MFD_STREAM_FORMAT_MPEG=1, + MFD_STREAM_FORMAT_H264, + MFD_STREAM_FORMAT_VC1, + MFD_STREAM_FORMAT_MPEG42, + + MFD_STREAM_FORMAT_MAX, /* must be last */ + MFD_STREAM_FORMAT_INVALID +}; + +/* Workload specific error codes */ +enum viddec_fw_workload_error_codes +{ + VIDDEC_FW_WORKLOAD_SUCCESS = 0, + VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE = (1 << 0),/* Parser/Decoder detected a non decodable error with this workload */ + VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW = (1 << 1),/* Parser Detected more than 64 buffers between two start codes */ + VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW = (1 << 2),/* Parser Detected overflow of currently allocated workload memory */ + VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME = (1 << 3),/* This is impartial or empty frame which was flushed by Parser/Decoder */ + VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM = (1 << 4),/* This is impartial or empty frame from Parser/Decoder */ + VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED = (1 << 5),/* Parser Detected unsupported feature in the stream */ + /* First 8 bits reserved for Non Decodable errors */ + VIDDEC_FW_WORKLOAD_ERR_CONCEALED = (1 << 9),/* The decoder concealed some errors in this frame */ + VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE = (1 << 10),/* Deocder/parser detected at least one of the required reference frames is missing */ + VIDDEC_FW_WORKLOAD_ERR_IN_REFERENCE = (1 << 11),/* Deocder/parser detected at least one of the reference frames has errors in it */ + VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD = (1 << 12),/* Parser detected at least one of the fields are missing */ + VIDDEC_FW_WORKLOAD_ERR_PARTIAL_SLICE = (1 << 13),/* Deocder detected at least one of the fields are missing */ + VIDDEC_FW_WORKLOAD_ERR_MACROBLOCK = (1 << 14),/* Deocder detected macroblock errors */ + VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO = (1 << 16),/* Parser detected sequence information is missing */ + + VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17),/* Decoder/Parser detected errors in "top field" or "frame"*/ + VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18),/* Decoder/Parser detected errors in "bottom field" or "frame" */ + VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR = (1 << 19),/* Parser detected errors */ + +}; + +enum viddec_fw_mpeg2_error_codes +{ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR = (1 << 24),/* Parser detected corruption in sequence header. Will use the previous good sequence info, if found. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT = (1 << 25),/* Parser detected corruption in seqeunce extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT = (1 << 26),/* Parser detected corruption in sequence display extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR = (1 << 27),/* Parser detected corruption in GOP header. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR = (1 << 26),/* Parser detected corruption in picture header. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT = (1 << 27),/* Parser detected corruption in picture coding extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT = (1 << 28),/* Parser detected corruption in picture display extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT = (1 << 29),/* Parser detected corruption in quantization matrix extension. */ +}; + +#endif diff --git a/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h new file mode 100644 index 0000000..3a07af0 --- /dev/null +++ b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h @@ -0,0 +1,242 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ + +#ifndef VIDDEC_FW_DECODER_HOST_H +#define VIDDEC_FW_DECODER_HOST_H + +#ifdef __cplusplus +extern "C" { +#endif + +#include "viddec_fw_common_defs.h" + +/** @weakgroup viddec Fw Decoder interface Functions */ +/** @ingroup viddec_fw_decoder */ +/*@{*/ + +/** + This function returns the size required for loading fw. + @retval size : Required size. +*/ + uint32_t viddec_fw_decoder_query_fwsize(void); + +/** + This function loads Decoder Firmware and initialises necessary state information. + @param[in] phys : Physical address on where firmware should be loaded. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len); + +/** + This function returns required size for global memory for all supported decoders. This is a synchronous message to FW. + @param[out] size : returns the size required. + @retval VIDDEC_FW_SUCCESS : Successfuly got required information from FW. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. +*/ + uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size); + +/** + This function sets global memory for the firmware to use.This is a synchronous message to FW. + @param[in] phys : Physical address on where global memory starts. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully setup global memory. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. +*/ + uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len); + +/** + This function returns the size required opening a stream. This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want information about. + @param[out] size : Size of memory required for opening a stream. + @retval VIDDEC_FW_SUCCESS : Successfuly talked to FW and got required size. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. +*/ + uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size); + +/** + This function opens requested codec.This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want to open. + @param[in] phys : Physical address of allocated memory for this codec. + @param[in] prority : Priority of stream. 1 for realtime and 0 for background. + @param[out] strm_handle : Handle of the opened stream. + @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. + @retval VIDDEC_FW_FAILURE : Failed to Open a stream. +*/ + uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); + + +/** + This function closes stream.This a synchronous message to FW. + @param[in] strm_handle : Handle of the stream to close. +*/ + void viddec_fw_decoder_closestream(uint32_t strm_handle); + +/** + This function allows to get current status of the decoder workload queues. If the current stream is active we return + number of input messages that can be written to input queue and the number of messages in output queue of the stream. + + Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT + Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is + written into output queue of a stream. + @param[in] strm_handle : The handle of stream that we want to get status of queues. + @param[out] status : The status of each queue gets updated in here. + @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. + @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. +*/ + uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status); + +/** + This function flushes the current stream. This is a synchronous message to FW. + Before calling this function the host has to make sure the output queue of the firmware + is empty. After this function is executed the FW will read all entries in input + wkld buffer queue into output queue. After this operation the host has to read all entries + in output queue again to finish the flush operation. + @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. + @param[in] strm_handle : Handle of the stream we want to flush. + @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. + @retval VIDDEC_FW_FAILURE : Failed to flush a stream. +*/ + uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type); + +/** + This function sends an input workload buffer. The host should provide required frame buffers in this workload before + sending it to fw. + @param[in] strm_handle : The handle of stream that we want to send workload buffer to. + @param[in] cur_wkld : The workload buffer we want to send. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. +*/ + uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld); + +/** + This function gets the decoded workload from fw. + @param[in] strm_handle : The handle of stream that we want to read workload from. + @param[out] cur_wkld : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. +*/ + uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld); + +/** + This function unloads Decoder Firmware and free's the resources allocated in Load fw. + If this function is called before load fw it will crash with a segmentation fault. +*/ + void viddec_fw_decoder_deinit(void); + +/** + This function gets the major and minor revison numbers of the loaded firmware. + @param[out] major : The major revision number. + @param[out] minor : The minor revision number. + @param[out] build : The Internal Build number. +*/ + void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); + +/** + This function returns the interrupt status of all streams which need to be processed. A value of zero + means no active streams which generated this interrupt. +*/ + uint32_t viddec_fw_decoder_active_pending_interrupts(void); + +/** + This function clears the interrupts for all active streams represented by status input parameter. + The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts(). + @param[in] status : The status value that was returned by viddec_fw_decoder_active_pending_interrupts(). +*/ + void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status); + +/** + This function enables/disables interrupt for the stream specified. + @param[in] strm_handle : The handle of stream that we want enable or disable interrupts for. + @param[in] enable : Boolean value if ==0 means disable Interrupts else enable. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed. +*/ + uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable); + +/** + This function returns which stream interrupted in the past based on status, which is a snapshot of + interrupt status that was cleared in the past. The host has to call clear with status information + before calling this function again with status value. The Host should do this operation until this function + returns 0, which means all the streams that generated interrupt have been processed. + @param[out]strm_handle : The handle of a stream that generated interrupt. + @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). + @retval 1 : A valid stream handle was found. + @retval 0 : No more streams from the status which caused interrupt. +*/ + uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle); + +/** + This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(), + This should be called after host performs all necessary actions for the stream. + @param[in] strm_handle : The handle of a stream that we want to clear to indicate we handled it. + @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). + @retval 1 : Operation was sucessful. + @retval 0 : Invalid stream handle was passed. +*/ + uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle); + +/*@}*/ +#ifdef __cplusplus +} +#endif + +#endif//#ifndef VIDDEC_FW_DECODER_HOST_H diff --git a/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h new file mode 100644 index 0000000..a816dd4 --- /dev/null +++ b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h @@ -0,0 +1,281 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_FRAME_ATTR_H +#define VIDDEC_FW_FRAME_ATTR_H + +#define VIDDEC_PANSCAN_MAX_OFFSETS 4 +#define VIDDEC_MAX_CPB_CNT 32 + +/** +This enumeration lists all the frame types defined by the MPEG, VC1 and H264 specifications. +Frame types applicable to a single codec are specified in the comments. +*/ +typedef enum +{ + VIDDEC_FRAME_TYPE_INVALID=0, /** Unknown type - default value */ + VIDDEC_FRAME_TYPE_IDR=0x1, /** IDR frame - h264 only */ + VIDDEC_FRAME_TYPE_I=0x2, /** I frame */ + VIDDEC_FRAME_TYPE_P=0x3, /** P frame */ + VIDDEC_FRAME_TYPE_B=0x4, /** B frame */ + VIDDEC_FRAME_TYPE_BI=0x5, /** BI frame - Intracoded B frame - vc1 only */ + VIDDEC_FRAME_TYPE_SKIP=0x6, /** Skipped frame - vc1 only */ + VIDDEC_FRAME_TYPE_D=0x7, /** D frame - mpeg1 only */ + VIDDEC_FRAME_TYPE_S=0x8, /** SVOP frame - mpeg4 only - sprite encoded frame - treat as P */ + VIDDEC_FRAME_TYPE_MAX, +} viddec_frame_type_t; + +/** +This structure contains the content size info extracted from the stream. +*/ +typedef struct viddec_rect_size +{ + unsigned int width; + unsigned int height; +}viddec_rect_size_t; + +/** +This structure contains MPEG2 specific pan scan offsets extracted from the stream. +*/ +typedef struct viddec_mpeg2_frame_center_offset +{ + int horz; + int vert; +}viddec_mpeg2_frame_center_offset_t; + +/** +This structure contains the MPEG2 specific frame attributes. +*/ +typedef struct viddec_mpeg2_frame_attributes +{ + /** + 10 bit unsigned integer corresponding to the display order of each coded picture + in the stream (or gop if gop header is present). + Refer to "temporal_reference" of the picture header in ITU-T H.262 Specification. + */ + unsigned int temporal_ref; + + /** + Pan/Scan rectangle info + Refer to the picture display extension in ITU-T H.262 Specification. + */ + viddec_mpeg2_frame_center_offset_t frame_center_offset[VIDDEC_PANSCAN_MAX_OFFSETS]; + unsigned int number_of_frame_center_offsets; + + /** + Top-Field first flag + Refer to "top_field_first" of the picture coding extension in ITU-T H.262 Specification. + */ + unsigned int top_field_first; + + /** + Progressive frame flag - Indicates if current frame is progressive or not. + Refer to "progressive_frame" of the picture coding extension in ITU-T H.262 Specification. + */ + unsigned int progressive_frame; + + /** + Frame/field polarity for each coded picture. + Refer to Table 6-14 in ITU-T H.262 Specification. + */ + unsigned int picture_struct; + + /** + Repeat field/frame flag. + Refer to "repeat_first_field" of the picture coding extension in ITU-T H.262 Specification. + */ + unsigned int repeat_first_field; + +}viddec_mpeg2_frame_attributes_t; + +/** +This structure contains MPEG2 specific pan scan offsets extracted from the stream. +*/ +typedef struct viddec_vc1_pan_scan_window +{ + unsigned int hoffset; + unsigned int voffset; + unsigned int width; + unsigned int height; +}viddec_vc1_pan_scan_window_t; + +/** +This structure contains the VC1 specific frame attributes. +*/ +typedef struct viddec_vc1_frame_attributes +{ + /** + Temporal Reference of frame/field. + Refer to "TFCNTR" in the picture layer of the SMPTE VC1 Specification. + */ + unsigned int tfcntr; + + /** + Frame/field repeat information in the bitstream. + Refer to "RPTFRM", "TFF", "BFF" in the picture layer + of the SMPTE VC1 Specification. + */ + unsigned int rptfrm; + unsigned int tff; + unsigned int rff; + + /** + Pan-scan information in the bitstream. + Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET", + "PS_WIDTH" and "PS_HEIGHT" in the picture layer of the SMPTE VC1 Specification. + */ + unsigned int panscan_flag; + unsigned int ps_present; + unsigned int num_of_pan_scan_windows; + viddec_vc1_pan_scan_window_t pan_scan_window[VIDDEC_PANSCAN_MAX_OFFSETS]; + +}viddec_vc1_frame_attributes_t; + +/** +This structure contains the H264 specific frame attributes. +*/ +typedef struct viddec_h264_frame_attributes +{ + /** + used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame + */ + ///// This flag hasn't been enable so far + unsigned int used_for_reference; + + + /** - + Picture Order Count for the current frame/field.- + This value is computed using information from the bitstream.- + Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification.- + */ + // These fileds will be supported in future + int top_field_poc; + int bottom_field_poc; + + /** + Display size, which is cropped from content size. + Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed + */ + viddec_rect_size_t cropped_size; + + /** + top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1 + */ + unsigned int top_field_first; + + /** + field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1 + */ + unsigned int field_pic_flag; + +}viddec_h264_frame_attributes_t; + +/** +This structure contains the MPEG4 specific frame attributes. +*/ +typedef struct viddec_mpeg4_frame_attributes +{ + /** + Top-Field first flag + Refer to "top_field_first" of the Video Object Plane of the MPEG4 Spec. + */ + unsigned int top_field_first; + +}viddec_mpeg4_frame_attributes_t; + +/** +This structure groups all the frame attributes that are exported by the firmware. +The frame attributes are split into attributes that are common to all codecs and +that are specific to codec type. +As of this release, it is populated only for mpeg2 only. +*/ +typedef struct viddec_frame_attributes +{ + /** + Content size specified in the stream. + For MPEG2, refer to "horizontal_size_value, vertical_size_value" of the sequence header and + "horizontal_size_extension, vertical_size_extension" of the sequence extension in ITU-T H.262 Specification. + For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the + sequence parameter set in ITU-T H.264 Specification. + For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer, + "CODED_SIZE_FLAG", "CODED_WIDTH" and "CODED_HEIGHT" in the entrypoint layer of the SMPTE VC1 Specification. + */ + viddec_rect_size_t cont_size; + + /** + Type of frame populated in the workload. + frame_type contains the frame type for progressive frame and the field type for the top field for interlaced frames. + bottom_field_type contains the field type for the bottom field for interlaced frames. + For MPEG2, refer to "picture_coding_type" in picture header (Table 6-12) in ITU-T H.262 Specification. + For H264, refer to "slice_type" in slice header (Table 7-6) in ITU-T H.264 Specification. + For VC1, refer to "PTYPE" and FPTYPE in the picture layer (Tables 33, 34, 35, 105) in SMPTE VC1 Specification. + */ + viddec_frame_type_t frame_type; + viddec_frame_type_t bottom_field_type; + + /** Codec specific attributes */ + union + { + viddec_mpeg2_frame_attributes_t mpeg2; + viddec_vc1_frame_attributes_t vc1; + viddec_h264_frame_attributes_t h264; + viddec_mpeg4_frame_attributes_t mpeg4; + }; + +}viddec_frame_attributes_t; + +#endif /* VIDDEC_FRAME_ATTR_H */ diff --git a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h new file mode 100644 index 0000000..66e5f59 --- /dev/null +++ b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h @@ -0,0 +1,738 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_ITEM_TYPES_H +#define VIDDEC_FW_ITEM_TYPES_H + + +/* The following macros are defined to pack data into 32 bit words. + mask: A 32 bit value of N 1 bits starting from lsb where N represents the length of data we are packing. + start: Bit start position of data we want. + ex: If we want to pack Height(16bits), width(16bits) where width is from (1:16) and height is from (17:32), these are + the start and mask values for width and height. + width: start = 0 mask=0xFFFF + Height:start= 16 mask=0xFFFF + + extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in + unsigned integer type. + insert: Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to + be a unsigned int of N bits starting with lsb. +*/ + +#define viddec_fw_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) ) +#define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start)))) + +/* Workload items type. Each item here represents data that Parser detected ex:slice data which + is used either by host or decoder.*/ +enum workload_item_type +{ + VIDDEC_WORKLOAD_INVALID=0x0, /* Unknown type */ + VIDDEC_WORKLOAD_PIXEL_ES=0x100, /* Slice data tag */ + VIDDEC_WORKLOAD_TAG=0x200, /* Frame association tag */ + VIDDEC_WORKLOAD_USERDATA=0x300, /* user data tag */ + VIDDEC_WORKLOAD_DECODER_INFO=0x400, /* decoder specific data tag which decoder module understands*/ + VIDDEC_WORKLOAD_IBUF_DONE=0x500, /* Es buffer completely used tag */ + VIDDEC_WORKLOAD_IBUF_CONTINUED=0x600, /* Es buffer partially used tag */ + VIDDEC_WORKLOAD_TAG_BUFFER_LOOSE_START=0x700, /* ??? */ + VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER=0x800, /* Reorder frames in DPB tag */ + VIDDEC_WORKLOAD_DISPLAY_FRAME=0x900, /* Display order in DPB tag, for H264 NOT required??? */ + + VIDDEC_WORKLOAD_SEQUENCE_INFO=0xa00, /* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */ + VIDDEC_WORKLOAD_DISPLAY_INFO=0xb00, /* MPEG2 Seq Disp Ext, H264 VUI */ + VIDDEC_WORKLOAD_GOP_INFO=0xc00, /* MPEG2 GOP, VC1 Entrypoint */ + VIDDEC_WORKLOAD_SEQ_USER_DATA=0xd00, /* MPEG2, VC1 Sequence Level User data */ + VIDDEC_WORKLOAD_GOP_USER_DATA=0xe00, /* MPEG2, VC1 Gop Level User data */ + VIDDEC_WORKLOAD_FRM_USER_DATA=0xf00, /* MPEG2 Picture User data, VC1 Frame User data */ + VIDDEC_WORKLOAD_FLD_USER_DATA=0x1000, /* MPEG2, VC1 Field User data */ + VIDDEC_WORKLOAD_SLC_USER_DATA=0x1100, /* VC1 Slice User data */ + VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA=0x1200, /* MPEG4 Visual Object User data */ + VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA=0x1300, /* MPEG4 Video Object Layer User data */ + + VIDDEC_WORKLOAD_MPEG2_SEQ_EXT=0x1150, /* MPEG2 Only - Sequence Extension */ + VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C=0x1200, /* VC1 Only */ + + VIDDEC_WORKLOAD_H264_CROPPING=0x1400, /* H264 only */ + VIDDEC_WORKLOAD_H264_PAN_SCAN=0x1500, /* H264 only */ + VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO=0x2100, /* H264 only */ + VIDDEC_WORKLOAD_SEI_PIC_TIMING=0x1600, /* H264 only */ + VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT=0x1700, /* H264 only */ + VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED=0x1800, /* H264 only */ + VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED=0x1900, /* H264 only */ + VIDDEC_WORKLOAD_SEI_RECOVERY_POINT=0x1a00, /* H264 only */ + VIDDEC_WORKLOAD_IBUF_EOS=0x1b00, /* EOS tag on last workload used for current stream */ + VIDDEC_WORKLOAD_IBUF_DISCONTINUITY=0x1c00, /* Discontinuity tag on first workload after discontinuity */ + + VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ=0x1d00, /* MPEG4 Only - Visual Sequence */ + VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ=0x1e00, /* MPEG4 Only - Video Object Layer */ + VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ=0x1f00, /* MPEG4 Only - Group of Video Object Planes */ + VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT=0x2000, /* MPEG4 Only - Video Plane with Short Header */ + + VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 = 0x10000, /* required reference frames tag, last eight bits tell the id of frame in dpb */ + VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 = 0x20000, /* release frames tag, last eight bits tell the id of frame in dpb */ + VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 = 0x30000, /* Display order in DPB tag, for H264 */ + VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 = 0x40000, /* Release frames but not display, for H264 */ + VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 = 0x50000, /* Release list while EOS, last eight bits tell the id of frame in dpb */ + VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 = 0x60000, /* Diaplay list while EOS, last eight bits tell the id of frame in dpb */ + + VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 = 0x70000, /* required for H264 as it needs whole DPB for each frame */ + VIDDEC_WORKLOAD_H264_REFR_LIST_0 = 0x80000, /* ref list 0 for H264 */ + VIDDEC_WORKLOAD_H264_REFR_LIST_1 = 0x90000, /* ref list 1 for H264 */ + VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY = 0xa0000, /* eos items begin after this */ + VIDDEC_WORKLOAD_DECODER_SPECIFIC = 0x100000, /* pvt info for decoder tags */ + VIDDEC_WORKLOAD_MAX, +}; + +/* 16-byte workload */ +typedef struct viddec_workload_item +{ + enum workload_item_type vwi_type; + union + { + struct + { + unsigned int es_phys_addr; + unsigned int es_phys_len; + unsigned int es_flags; + }es; + struct + { + unsigned int tag_phys_addr; + unsigned int tag_phys_len; + unsigned int tag_value; + }tag; + struct + { + unsigned int data_offset; + unsigned int data_payload[2]; + }data; + struct + { + signed int reference_id; /* Assigned by parser */ + unsigned int luma_phys_addr; /* assigned by host, for DM */ + unsigned int chroma_phys_addr; /* assigned by host, for DM */ + }ref_frame; + struct /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */ + { + signed int ref_table_offset; /* Index of first "reordered" */ + /* index from Current[] for Next[offset+0], Ref[offset+1], Ref[offset+2], Ref[offset+3] */ + unsigned int ref_reorder_00010203; + /* index from Current[] for Next[offset+4], Ref[offset+5], Ref[offset+6], Ref[offset+7] */ + unsigned int ref_reorder_04050607; + } ref_reorder; + struct + { + /* we pack a maximum of 11 bytes of user data and 1 byte for size */ + /* TODO: we can pack 12 bytes and use bottom 8 bits of type to indicate size */ +#define viddec_fw_get_user_data_size(x) ((x)->user_data.size) +#define viddec_fw_get_user_data(x) (unsigned char *)&((x)->user_data.data_payload[0]) + unsigned char size; + unsigned char data_payload[11]; + /* + ITEM TYPES WHICH use this: + VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED, VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED, + VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA, + VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA, + */ + }user_data; + struct + { + // Sequence Header Item I (From LSB): + // - horizontal_size_value - 12 bits + // - vertical_size_value - 12 bits + // - aspect_ratio_information - 4 bits + // - frame_rate_code - 4 bits + #define viddec_fw_mp2_sh_get_horizontal_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 0, 0xFFF) + #define viddec_fw_mp2_sh_get_vertical_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF) + #define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF) + #define viddec_fw_mp2_sh_get_frame_rate_code(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF) + #define viddec_fw_mp2_sh_set_horizontal_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 0, 0xFFF) + #define viddec_fw_mp2_sh_set_vertical_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF) + #define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF) + #define viddec_fw_mp2_sh_set_frame_rate_code(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF) + unsigned int seq_hdr_item_1; + + // Sequence Header Item II (From LSB): + // - bit_rate_value - 18 bits + // - vbv_buffer_size_value - 10 bits + // - remaining pad bits + #define viddec_fw_mp2_sh_get_bit_rate_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 0, 0x3FFFF) + #define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF) + #define viddec_fw_mp2_sh_set_bit_rate_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 0, 0x3FFFF) + #define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF) + unsigned int seq_hdr_item_2; + + unsigned int pad; + } mp2_sh; // mp2 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO + struct + { + // Sequence Extension Item I (From LSB): + // - profile_and_level_indication - 8 bits + // - progressive_sequence - 1 bit + // - chroma_format - 2 bits + // - horizontal_size_extension - 2 bits + // - vertical_size_extension - 2 bits + // - bit_rate_extension - 12 bits + // - remaining pad bits + #define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 0, 0xFF) + #define viddec_fw_mp2_se_get_progressive_sequence(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 8, 0x1) + #define viddec_fw_mp2_se_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 9, 0x3) + #define viddec_fw_mp2_se_get_horizontal_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3) + #define viddec_fw_mp2_se_get_vertical_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3) + #define viddec_fw_mp2_se_get_bit_rate_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF) + #define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 0, 0xFF) + #define viddec_fw_mp2_se_set_progressive_sequence(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 8, 0x1) + #define viddec_fw_mp2_se_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 9, 0x3) + #define viddec_fw_mp2_se_set_horizontal_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3) + #define viddec_fw_mp2_se_set_vertical_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3) + #define viddec_fw_mp2_se_set_bit_rate_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF) + unsigned int seq_ext_item_1; + + // Sequence Extension Item II (From LSB): + // - vbv_buffer_size_extension - 8 bits + // - frame_rate_extension_n - 2 bits + // - frame_rate_extension_d - 5 bits + // - remaining pad bits + #define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 0, 0xFF) + #define viddec_fw_mp2_se_get_frame_rate_extension_n(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 8, 0x3) + #define viddec_fw_mp2_se_get_frame_rate_extension_d(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F) + #define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 0, 0xFF) + #define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 8, 0x3) + #define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F) + unsigned int seq_ext_item_2; + + unsigned int pad; + } mp2_se; // mp2 item of type VIDDEC_WORKLOAD_MPEG2_SEQ_EXT + struct + { + // Sequence Display Extension Item I (From LSB): + // - display_horizontal_size - 14 bits + // - display_vertical_size - 14 bits + // - video_format - 3 bits + // - color_description - 1 bit + #define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 0, 0x3FFF) + #define viddec_fw_mp2_sde_get_display_vertical_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF) + #define viddec_fw_mp2_sde_get_video_format(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7) + #define viddec_fw_mp2_sde_get_color_description(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1) + #define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 0, 0x3FFF) + #define viddec_fw_mp2_sde_set_display_vertical_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF) + #define viddec_fw_mp2_sde_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7) + #define viddec_fw_mp2_sde_set_color_description(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1) + unsigned int seq_disp_ext_item_1; + + // Sequence Display Extension II (From LSB): + // - color_primaries - 8 bits + // - transfer_characteristics - 8 bits + // - remaining pad bits + #define viddec_fw_mp2_sde_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 0, 0xFF) + #define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 8, 0xFF) + #define viddec_fw_mp2_sde_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 0, 0xFF) + #define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 8, 0xFF) + unsigned int seq_disp_ext_item_2; + + unsigned int pad; + } mp2_sde; // mp2 item of type VIDDEC_WORKLOAD_DISPLAY_INFO + struct + { + // Group of Pictures Header Item I (From LSB): + // - closed_gop - 1 bit + // - broken_link - 1 bit + // - remaining pad bits + #define viddec_fw_mp2_gop_get_closed_gop(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 0, 0x1) + #define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 1, 0x1) + #define viddec_fw_mp2_gop_set_closed_gop(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 0, 0x1) + #define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 1, 0x1) + unsigned int gop_hdr_item_1; + + unsigned int pad1; + unsigned int pad2; + } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO + struct + { + #define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3) + #define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3) + + #define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7) + #define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7) + + #define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3) + #define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3) + + #define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) + #define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) + + #define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) + #define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) + + #define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) + #define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) + + #define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F) + #define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F) + + #define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7) + #define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7) + + #define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) + #define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) + + #define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) + #define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) + + #define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1) + #define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1) + + #define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1) + #define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1) + + #define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) + #define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) + + uint32_t size; // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12 + uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1 + uint32_t pad; + } vc1_sl; // vc1 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO + struct + { + // This item is populated when display_ext flag is set in the sequence layer + // therefore, no need to provide this flag + #define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF) + #define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF) + + #define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF) + #define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF) + + #define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1) + #define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1) + + #define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1) + #define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1) + + #define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1) + #define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1) + + #define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1) + #define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1) + + #define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF) + #define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF) + + #define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF) + #define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF) + + #define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF) + #define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF) + + #define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF) + #define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF) + + #define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF) + #define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF) + + #define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF) + #define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF) + + #define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF) + #define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF) + + #define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF) + #define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF) + + uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1 + uint32_t framerate; // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16 + uint32_t aspectsize; // aspect_ratio_horiz_size:8, aspect_ratio_vert_size:8, color_prim:8, transfer_char:8 + } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO + struct + { + #define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF) + #define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF) + + #define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF) + #define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF) + + #define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F) + #define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F) + + #define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7) + #define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7) + + #define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF) + #define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF) + + #define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7) + #define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7) + + #define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1) + #define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1) + + #define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) + #define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) + + #define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7) + #define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7) + + #define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) + #define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) + + uint32_t size; // horiz_size:16, vert_size:16 + uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1 + uint32_t pad; + } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C + struct + { + #define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) + #define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) + + #define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) + #define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) + + #define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) + #define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) + + #define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1) + #define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1) + + #define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1) + #define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1) + + #define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1) + #define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1) + + #define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1) + #define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1) + + #define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7) + #define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7) + + #define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) + #define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) + + #define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7) + #define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7) + + uint32_t size; // coded_size_flag:1, coded_width:12, coded_height:12 + uint32_t flags; // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3 + uint32_t pad; + } vc1_ep; // vc1 item of type VIDDEC_WORKLOAD_GOP_INFO + struct + { + /* + 0-7 bits for profile_idc. + 8-15 bits for level_idc. + 16-17 bits for chroma_format_idc. + 18-22 bits for num_ref_frames. + 23 for gaps_in_frame_num_value_allowed_flag. + 24 for frame_mbs_only_flag. + 25 for frame_cropping_flag. + 26 for vui_parameters_present_flag. + */ +#define viddec_fw_h264_sps_get_profile_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 0, 0xFF) +#define viddec_fw_h264_sps_set_profile_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 0, 0xFF) +#define viddec_fw_h264_sps_get_level_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 8, 0xFF) +#define viddec_fw_h264_sps_set_level_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 8, 0xFF) +#define viddec_fw_h264_sps_get_chroma_format_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 16, 0x3) +#define viddec_fw_h264_sps_set_chroma_format_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 16, 0x3) +#define viddec_fw_h264_sps_get_num_ref_frames(x) viddec_fw_bitfields_extract( (x)->sps_messages, 18, 0x1F) +#define viddec_fw_h264_sps_set_num_ref_frames(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 18, 0x1F) +#define viddec_fw_h264_sps_get_gaps_in_frame_num_value_allowed_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 23, 0x1) +#define viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 23, 0x1) +#define viddec_fw_h264_sps_get_frame_mbs_only_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 24, 0x1) +#define viddec_fw_h264_sps_set_frame_mbs_only_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 24, 0x1) +#define viddec_fw_h264_sps_get_frame_cropping_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 25, 0x1) +#define viddec_fw_h264_sps_set_frame_cropping_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 25, 0x1) +#define viddec_fw_h264_sps_get_vui_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 26, 0x1) +#define viddec_fw_h264_sps_set_vui_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 26, 0x1) + unsigned int sps_messages; + unsigned int pic_width_in_mbs_minus1; + unsigned int pic_height_in_map_units_minus1; + } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO + + struct + { +#define viddec_fw_h264_cropping_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF) +#define viddec_fw_h264_cropping_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF) +#define viddec_fw_h264_cropping_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF) +#define viddec_fw_h264_cropping_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF) + unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */ +#define viddec_fw_h264_cropping_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF) +#define viddec_fw_h264_cropping_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF) +#define viddec_fw_h264_cropping_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF) +#define viddec_fw_h264_cropping_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF) + unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */ + unsigned int pad; + } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING + + struct + { + /* 0 bit for aspect_ratio_info_present_flag + 1 st bit for video_signal_type_present_flag + 2 nd bit for colour_description_present_flag + 3 rd bit for timing_info_present_flag + 4 th bit for nal_hrd_parameters_present_flag + 5 th bit for vcl_hrd_parameters_present_flag + 6 th bit for fixed_frame_rate_flag + 7 th bit for pic_struct_present_flag + 8 th bit for low_delay_hrd_flag + 9,10,11 bits for video_format + */ +#define viddec_fw_h264_vui_get_aspect_ratio_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 0, 0x1) +#define viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 0, 0x1) +#define viddec_fw_h264_vui_get_video_signal_type_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 1, 0x1) +#define viddec_fw_h264_vui_set_video_signal_type_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 1, 0x1) +#define viddec_fw_h264_vui_get_colour_description_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 2, 0x1) +#define viddec_fw_h264_vui_set_colour_description_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 2, 0x1) +#define viddec_fw_h264_vui_get_timing_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 3, 0x1) +#define viddec_fw_h264_vui_set_timing_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 3, 0x1) +#define viddec_fw_h264_vui_get_nal_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 4, 0x1) +#define viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 4, 0x1) +#define viddec_fw_h264_vui_get_vcl_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 5, 0x1) +#define viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 5, 0x1) +#define viddec_fw_h264_vui_get_fixed_frame_rate_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 6, 0x1) +#define viddec_fw_h264_vui_set_fixed_frame_rate_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 6, 0x1) +#define viddec_fw_h264_vui_get_pic_struct_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 7, 0x1) +#define viddec_fw_h264_vui_set_pic_struct_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 7, 0x1) +#define viddec_fw_h264_vui_get_low_delay_hrd_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 8, 0x1) +#define viddec_fw_h264_vui_set_low_delay_hrd_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 8, 0x1) +#define viddec_fw_h264_vui_get_video_format(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 9, 0x7) +#define viddec_fw_h264_vui_set_video_format(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 9, 0x7) + unsigned int vui_flags_and_format; + +#define viddec_fw_h264_vui_get_aspect_ratio_idc(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 0, 0xFF) +#define viddec_fw_h264_vui_set_aspect_ratio_idc(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 0, 0xFF) +#define viddec_fw_h264_vui_get_colour_primaries(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 8, 0xFF) +#define viddec_fw_h264_vui_set_colour_primaries(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 8, 0xFF) +#define viddec_fw_h264_vui_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 16, 0xFF) +#define viddec_fw_h264_vui_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF) + /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */ + unsigned int aspc_color_transfer; + +#define viddec_fw_h264_vui_get_sar_width(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF) +#define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF) +#define viddec_fw_h264_vui_set_sar_width(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF) +#define viddec_fw_h264_vui_set_sar_height(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 0, 0xFFFF) + unsigned int sar_width_height; /* Lower 16 for height upper 16 for width */ + } h264_vui; // h264 item of type VIDDEC_WORKLOAD_DISPLAY_INFO + struct + { +#define viddec_fw_h264_vui_get_num_units_in_tick_flag(x) viddec_fw_bitfields_extract( (x)->num_units_in_tick, 0, 0xFFFFFFFF) +#define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val) viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF) +#define viddec_fw_h264_vui_get_time_scale_flag(x) viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF) +#define viddec_fw_h264_vui_set_time_scale_flag(x, val) viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF) + unsigned int num_units_in_tick; + unsigned int time_scale; + unsigned int pad1; + } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO + struct + { + unsigned int pic_struct; /* 4 bit length */ + unsigned int pad1; + unsigned int pad2; + } h264_sei_pic_timing; // h264 item of type VIDDEC_WORKLOAD_SEI_PIC_TIMING + struct + { + unsigned int pan_scan_rect_id; + +#define viddec_fw_h264_sei_pan_scan_get_cancel_flag(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 0, 0x1) +#define viddec_fw_h264_sei_pan_scan_get_cnt_minus1(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 1, 0x3) +#define viddec_fw_h264_sei_pan_scan_set_cancel_flag(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 0, 0x1) +#define viddec_fw_h264_sei_pan_scan_set_cnt_minus1(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 1, 0x3) + unsigned int pan_scan_cancel_and_cnt; /* 0 bit for cancel flag and 2 bits for cnt_minus1 */ + unsigned int pan_scan_rect_repetition_period; + } h264_sei_pan_scan; // h264 item of type VIDDEC_WORKLOAD_H264_PAN_SCAN + + struct + { + +#define viddec_fw_h264_pan_scan_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF) + unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */ + +#define viddec_fw_h264_pan_scan_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF) + unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */ + + unsigned int pad; + } h264_pan_scan_rect; // h264 item of type VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT + struct + { + unsigned int recovery_frame_cnt; +#define viddec_fw_h264_h264_sei_recovery_get_exact_match_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 0, 0x1) +#define viddec_fw_h264_h264_sei_recovery_get_broken_link_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 1, 0x1) +#define viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 0, 0x1) +#define viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 1, 0x1) + unsigned int broken_and_exctmatch_flags; /* 0 bit for exact match, 1 bit for brokenlink */ + + unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */ + + } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT + + struct + { + // Visual Sequence (From LSB): + // - profile_and_level_indication - 8 bits + #define viddec_fw_mp4_vs_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->vs_item, 0, 0xFF) + #define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val, 0, 0xFF) + unsigned int vs_item; + + // Visual Object - video_signal_type + // - video_signal_type - 1b + // - video_format - 3b + // - video_range - 1b + // - colour_description - 1b + #define viddec_fw_mp4_vo_get_colour_description(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1) + #define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1) + #define viddec_fw_mp4_vo_get_video_range(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1) + #define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1) + #define viddec_fw_mp4_vo_get_video_format(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 1, 0x7) + #define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 1, 0x7) + #define viddec_fw_mp4_vo_get_video_signal_type(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 0, 0x1) + #define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 0, 0x1) + unsigned int video_signal_type; + + // Visual Object - video_signal_type + // - color_primaries - 8 bits + // - transfer_characteristics - 8 bits + #define viddec_fw_mp4_vo_get_transfer_char(x) viddec_fw_bitfields_extract( (x)->color_desc, 8, 0xFF) + #define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 8, 0xFF) + #define viddec_fw_mp4_vo_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->color_desc, 0, 0xFF) + #define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 0, 0xFF) + unsigned int color_desc; + } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ + + struct + { + // Video Object Layer(From LSB): + // - aspect_ratio_info - 4b + // - par_width - 8b + // - par_height - 8b + // - vol_control_param - 1b + // - chroma_format - 2b + // - interlaced - 1b + // - fixed_vop_rate - 1b + #define viddec_fw_mp4_vol_get_fixed_vop_rate(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1) + #define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1) + #define viddec_fw_mp4_vol_get_interlaced(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1) + #define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1) + #define viddec_fw_mp4_vol_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3) + #define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3) + #define viddec_fw_mp4_vol_get_control_param(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1) + #define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1) + #define viddec_fw_mp4_vol_get_par_height(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF) + #define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF) + #define viddec_fw_mp4_vol_get_par_width(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF) + #define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF) + #define viddec_fw_mp4_vol_get_aspect_ratio_info(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF) + #define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF) + unsigned int vol_aspect_ratio; + + // Video Object Layer(From LSB): + // - vbv_parameters - 1b + // - bit_rate - 30b + #define viddec_fw_mp4_vol_get_bit_rate(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF) + #define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF) + #define viddec_fw_mp4_vol_get_vbv_param(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1) + #define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1) + unsigned int vol_bit_rate; + + // Video Object Layer(From LSB): + // - fixed_vop_time_increment - 16b + // - vop_time_increment_resolution - 16b + #define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF) + #define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF) + #define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF) + #define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF) + unsigned int vol_frame_rate; + } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ + + struct + { + // Group of Video Object Planes(From LSB): + // - time_code - 18b + // - closed_gov - 1b + // - broken_link - 1b + #define viddec_fw_mp4_gvop_get_broken_link(x) viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1) + #define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1) + #define viddec_fw_mp4_gvop_get_closed_gov(x) viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1) + #define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1) + #define viddec_fw_mp4_gvop_get_time_code(x) viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF) + #define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF) + unsigned int gvop_info; + + unsigned int pad1; + unsigned int pad2; + } mp4_gvop; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ + + struct + { + // Group of Video Object Planes(From LSB): + // - source_format - 3b + #define viddec_fw_mp4_vpsh_get_source_format(x) viddec_fw_bitfields_extract((x)->info, 0, 0x7) + #define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7) + unsigned int info; + + unsigned int pad1; + unsigned int pad2; + } mp4_vpsh; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT + + unsigned int vwi_payload[3]; + }; +}viddec_workload_item_t; + +#endif /* VIDDEC_ITEM_TYPES_H */ diff --git a/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h new file mode 100644 index 0000000..6d26555 --- /dev/null +++ b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h @@ -0,0 +1,237 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ + +#ifndef VIDDEC_FW_PARSER_HOST_H +#define VIDDEC_FW_PARSER_HOST_H + +#ifdef __cplusplus +extern "C" { +#endif +#include "viddec_fw_common_defs.h" + +/** @weakgroup viddec Fw Parser interface Functions */ +/** @ingroup viddec_fw_parser */ +/*@{*/ + +/** + This function returns the size required for loading fw. + @retval size : Required size. +*/ + uint32_t viddec_fw_parser_query_fwsize(void); + +/** + This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW. + @param[in] phys : Physical address on where firmware should be loaded. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len); + +/** + This function returns the size required opening a stream. This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want information about. + @param[out] num_wklds : Number of wklds required for initialisation. + @param[out] size : Size of memory required for opening a stream. +*/ + void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size); + +/** + This function opens requested codec.This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want to open. + @param[in] phys : Physical address of allocated memory for this codec. + @param[in] prority : Priority of stream. 1 for realtime and 0 for background. + @param[out] strm_handle : Handle of the opened stream. + @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. + @retval VIDDEC_FW_FAILURE : Failed to Open a stream. + @retval VIDDEC_FW_NORESOURCES : Failed to Open a stream as we are out of resources. +*/ + uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); + +/** + This function closes stream.This a synchronous message to FW. + For the close stream to be effective, host has to do flush with discard first and then close the stream. + @param[in] strm_handle : Handle of the stream to close. +*/ + void viddec_fw_parser_closestream(uint32_t strm_handle); + +/** + This function flushes the current stream. This is a synchronous message to FW. + Before calling this function the host has to make sure the output queue of the firmware + is empty. After this function is executed the FW will read all entries in input + es buffer queue into a free or partial workload and push it into output queue. + After this operation the host has to read all entries in output queue again to + finish the flush operation. + @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. + @param[in] strm_handle : Handle of the stream we want to flush. + @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + @retval VIDDEC_FW_NEED_FREE_WKLD : Failed to flush sice a free wkld was not available. +*/ + uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type); + +/** + This function sends an input es buffer. + @param[in] strm_handle : The handle of stream that we want to send es buffer to. + @param[in] message : The es buffer we want to send. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message); + +/** + This function gets the next processed workload. The host is required to add free workloads + to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue. + @param[in] strm_handle : The handle of stream that we want to read workload from. + @param[out] message : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message); + +/** + This function adds a free workload to current stream. + @param[in] strm_handle : The handle of stream that we want to write workload to. + @param[out] message : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Workload port is full,unsuccesful in writing wkld. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message); + +/** + This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts. + The driver can disable/enable Interrupts if it needs for this particular stream. + + @param[in] strm_handle : The handle of stream that we want to get mask from + @param[in] mask : This is read as boolean variable, true to enable, false to disable. + @retval VIDDEC_FW_SUCCESS : Successfully set mask. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask); +/** + This function gets the interrupt status for current stream. + When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams, + by calling this function. The status is what the FW thinks the current state of stream is. The status information that + FW provides is complete information on all possible events that are defined. The host should only access this information + in its ISR at which state FW doesn't modify this information. + + @param[in] strm_handle : The handle of stream that we want to get mask from + @param[out] status : The status of the stream based on viddec_fw_parser_int_status_t enum. + @retval VIDDEC_FW_SUCCESS : Successfully in reading status. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status); + +/** + This function allows to set stream attributes that are supported. + @param[in] strm_handle : The handle of stream that we want to set attribute on. + @param[in] type : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t. + @param[in] value : The value of the type that we want to set. + @retval VIDDEC_FW_SUCCESS : Successfully Set the attribute. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. +*/ + uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value); + +/** + This function allows to get current status of all the parser queues. If the current stream is active we return + number of inout messages that can be written to input queue, no of messages in output queue and number of + free available workloads the stream has. + Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT + Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or + a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT + FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee + one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT + to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will + give only one INT and host should try to empty output queue. + @param[in] strm_handle : The handle of stream that we want to get status of queues. + @param[out] status : The status of each queue gets updated in here. + @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. + @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. +*/ + uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status); + +/** + This function unloads Parser Firmware and free's the resources allocated in Load fw. + If this function is called before load fw it will crash with a segmentation fault. +*/ + void viddec_fw_parser_deinit(void); + +/** + This function gets the major and minor revison numbers of the loaded firmware. + @param[out] major : The major revision numner. + @param[out] minor : The minor revision number. + @param[out] build : The Internal Build number. +*/ + void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); + +/** + This function clears the global interrupt. This is the last thing host calls before exiting ISR. +*/ + void viddec_fw_parser_clear_global_interrupt(void); + +/*@}*/ +#ifdef __cplusplus +} +#endif + +#endif//#ifndef VIDDEC_FW_PARSER_HOST_H diff --git a/mix_vbp/viddec_fw/include/viddec_fw_workload.h b/mix_vbp/viddec_fw/include/viddec_fw_workload.h new file mode 100644 index 0000000..73c5ab3 --- /dev/null +++ b/mix_vbp/viddec_fw/include/viddec_fw_workload.h @@ -0,0 +1,152 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_WORKLOAD_H +#define VIDDEC_FW_WORKLOAD_H + +#include +#include "viddec_fw_item_types.h" +#include "viddec_fw_frame_attr.h" +#include "viddec_fw_common_defs.h" + +#define VIDDEC_WORKLOAD_FLAGS_ES_START_FRAME (1 << 0) +#define VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE (1 << 1) +#define VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE (1 << 2) +#define VIDDEC_WORKLOAD_FLAGS_ES_END_FRAME (1 << 3) + +#define VIDDEC_FRAME_REFERENCE_IS_VALID (0x1<<1) +// PIP Output Frame request bits +#define BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE 24 +#define BMSK_VIDDEC_FRAME_REFERENCE_PIP_MODE (0x3<= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +if test "x$HAVE_GLIB" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) +if test "x$HAVE_GOBJECT" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no) +if test "x$HAVE_GTHREAD" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +LIBVA_REQ=0.31 +PKG_CHECK_MODULES(LIBVA, libva >= $LIBVA_REQ,HAVE_LIBVA=yes,HAVE_LIBVA=no) +if test "x$HAVE_LIBVA" = "xno"; then + AC_MSG_ERROR(You need libva development package installed !) +fi +#LIBVA_CFLAGS="-I/usr/local/include" +#LIBVA_LIBS="-lva" + + +LIBVA_X11_REQ=0.31 +PKG_CHECK_MODULES(LIBVA_X11, libva-x11 >= $LIBVA_X11_REQ,HAVE_LIBVA_X11=yes,HAVE_LIBVA_X11=no) +if test "x$HAVE_LIBVA_X11" = "xno"; then + AC_MSG_ERROR(You need libva development package installed !) +fi +#LIBVA_X11_CFLAGS="-I/usr/local/include" +#LIBVA_X11LIBS="-lva-x11" + + +MIXCOMMON_REQ=0.1 +PKG_CHECK_MODULES(MIXCOMMON, mixcommon >= $MIXCOMMON_REQ, HAVE_MIXCOMMON=yes, HAVE_MIXCOMMON=no) +if test "x$HAVE_MIXCOMMON" = "xno"; then + AC_MSG_ERROR(You need mixcommon development package installed !) +fi + +MIXVBP_REQ=0.1 +PKG_CHECK_MODULES(MIXVBP, mixvbp >= $MIXVBP_REQ, HAVE_MIXVBP=yes, HAVE_MIXVBP=no) +if test "x$HAVE_MIXVBP" = "xno"; then + AC_MSG_ERROR(You need mixvbp development package installed !) +fi + +dnl Check for documentation xrefs +dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`" +dnl AC_SUBST(GLIB_PREFIX) + +AC_SUBST(GLIB_CFLAGS) +AC_SUBST(GLIB_LIBS) +AC_SUBST(GOBJECT_CFLAGS) +AC_SUBST(GOBJECT_LIBS) +AC_SUBST(MIX_CFLAGS) +AC_SUBST(GTHREAD_CFLAGS) +AC_SUBST(GTHREAD_LIBS) +AC_SUBST(LIBVA_CFLAGS) +AC_SUBST(LIBVA_LIBS) +AC_SUBST(MIXCOMMON_CFLAGS) +AC_SUBST(MIXCOMMON_LIBS) +AC_SUBST(MIXVBP_CFLAGS) +AC_SUBST(MIXVBP_LIBS) + + +#check for gtk-doc +#GTK_DOC_CHECK(1.9) + +AC_CONFIG_FILES([ +mixvideo.pc +mixvideoint.pc +Makefile +src/Makefile +]) + +dnl Additional Makefiles if we are building document with gtkdoc. +dnl Un-comment this section to enable building of documentation. +dnl AC_CONFIG_FILES( +dnl docs/Makefile +dnl docs/reference/Makefile +#dnl docs/reference/MixVideo/Makefile +dnl ) + +AC_OUTPUT + + diff --git a/mix_video/m4/Makefile.am b/mix_video/m4/Makefile.am new file mode 100644 index 0000000..66381d4 --- /dev/null +++ b/mix_video/m4/Makefile.am @@ -0,0 +1 @@ +EXTRA_DIST += diff --git a/mix_video/m4/as-mix-version.m4 b/mix_video/m4/as-mix-version.m4 new file mode 100644 index 0000000..f0301b1 --- /dev/null +++ b/mix_video/m4/as-mix-version.m4 @@ -0,0 +1,35 @@ +dnl as-mix-version.m4 + +dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) + +dnl example +dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,) +dnl for a 0.3.2 release version + +dnl this macro +dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE +dnl - defines [$PREFIX], VERSION +dnl - AC_SUBST's all defined vars + +AC_DEFUN([AS_MIX_VERSION], +[ + PACKAGE=[$1] + [$2]_MAJOR=[$3] + [$2]_MINOR=[$4] + [$2]_REVISION=[$5] + [$2]_CURRENT=m4_eval([$3] + [$4]) + [$2]_AGE=[$4] + VERSION=[$3].[$4].[$5] + + AC_SUBST([$2]_MAJOR) + AC_SUBST([$2]_MINOR) + AC_SUBST([$2]_REVISION) + AC_SUBST([$2]_CURRENT) + AC_SUBST([$2]_AGE) + + AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name]) + AC_SUBST(PACKAGE) + AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version]) + AC_SUBST(VERSION) + +]) diff --git a/mix_video/mixvideo.pc.in b/mix_video/mixvideo.pc.in new file mode 100644 index 0000000..8666d24 --- /dev/null +++ b/mix_video/mixvideo.pc.in @@ -0,0 +1,12 @@ +prefix=@prefix@ +exec_prefix=@exec_prefix@ +libdir=@libdir@ +includedir=@includedir@ + +Name: MI-X Video +Description: MI-X Video Library +Requires: libva >= 0.30 +Version: @VERSION@ +Libs: -L${libdir} -l@PACKAGE@ +Cflags: -I${includedir}/mix + diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec new file mode 100644 index 0000000..7be66bb --- /dev/null +++ b/mix_video/mixvideo.spec @@ -0,0 +1,67 @@ +# INTEL CONFIDENTIAL +# Copyright 2009 Intel Corporation All Rights Reserved. +# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +# +# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + +Summary: MIX Video +Name: mixvideo +Version: 0.1.14 +Release: 1 +Source0: %{name}-%{version}.tar.gz +NoSource: 0 +License: Proprietary +Group: System Environment/Libraries +BuildRoot: %{_tmppath}/%{name}-root +ExclusiveArch: i586 +Requires: glib2 , mixcommon, mixvbp + +%description +MIX Video is an user library interface for various video codecs available on the platform. + +## devel package ## + +%package devel +Summary: Libraries include files +Group: Development/Libraries +Requires: %{name} = %{version}, mixcommon-devel , glib2-devel, mixvbp-devel + +%description devel +The %{name}-devel package contains the header files and static libraries for building applications which use %{name}. + +## internal devel package ## + +%package int-devel +Summary: Libraries include files +Group: Development/Libraries +Requires: %{name} = %{version}, mixcommon-devel , glib2-devel, mixvbp-devel + +%description int-devel +The %{name}-int-devel package contains the header files and static libraries for building applications which use %{name}. + +%prep +%setup -q +%build +./autogen.sh +./configure --prefix=%{_prefix} +make +%install +make DESTDIR=$RPM_BUILD_ROOT install +%clean +rm -rf $RPM_BUILD_ROOT +%files +%defattr(-,root,root) +%{_prefix}/lib/libmixvideo.so* + +%files devel +%defattr(-,root,root) +%{_prefix}/include/mix +%{_prefix}/lib/*.la +%{_prefix}/lib/pkgconfig/mixvideo.pc + +%files int-devel +%defattr(-,root,root) +%{_prefix}/include/mixvideoint +%{_prefix}/lib/pkgconfig/mixvideoint.pc + + diff --git a/mix_video/mixvideoint.pc.in b/mix_video/mixvideoint.pc.in new file mode 100644 index 0000000..f1ff2d1 --- /dev/null +++ b/mix_video/mixvideoint.pc.in @@ -0,0 +1,12 @@ +prefix=@prefix@ +exec_prefix=@exec_prefix@ +libdir=@libdir@ +includedir=@includedir@ + +Name: MI-X Video Internal +Description: MI-X Video Library for internal development +Requires: libva >= 0.30 +Version: @VERSION@ +Libs: -L${libdir} -l@PACKAGE@ +Cflags: -I${includedir}/mixvideoint + diff --git a/mix_video/src/Makefile.am b/mix_video/src/Makefile.am new file mode 100644 index 0000000..20c601b --- /dev/null +++ b/mix_video/src/Makefile.am @@ -0,0 +1,136 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + +lib_LTLIBRARIES = libmixvideo.la + +############################################################################## +# sources used to compile +libmixvideo_la_SOURCES = mixdisplay.c \ + mixdrmparams.c \ + mixvideo.c \ + mixvideoconfigparams.c \ + mixvideoconfigparamsdec.c \ + mixvideoconfigparamsdec_vc1.c \ + mixvideoconfigparamsdec_h264.c \ + mixvideoconfigparamsdec_mp42.c \ + mixvideoframe.c \ + mixvideorenderparams.c \ + mixdisplayx11.c \ + mixvideocaps.c \ + mixvideodecodeparams.c \ + mixvideoinitparams.c \ + mixvideoformat.c \ + mixvideoformat_h264.c \ + mixvideoformat_vc1.c \ + mixvideoformat_mp42.c \ + mixsurfacepool.c \ + mixframemanager.c \ + mixbufferpool.c \ + mixbuffer.c \ + mixvideoformatenc.c \ + mixvideoformatenc_h264.c \ + mixvideoformatenc_mpeg4.c \ + mixvideoformatenc_preview.c \ + mixvideoconfigparamsenc.c \ + mixvideoconfigparamsenc_h264.c \ + mixvideoconfigparamsenc_mpeg4.c \ + mixvideoconfigparamsenc_preview.c \ + mixvideoencodeparams.c + +if MIXLOG_ENABLED +MIXLOG_CFLAGS = -DMIX_LOG_ENABLE +else +MIXLOG_CFLAGS = +endif + + +# flags used to compile this plugin +# add other _CFLAGS and _LIBS as needed +libmixvideo_la_CFLAGS = $(GLIB_CFLAGS) \ + $(MIX_CFLAGS) \ + $(MIXLOG_CFLAGS) \ + $(GOBJECT_CFLAGS) \ + $(GTHREAD_CFLAGS) \ + $(LIBVA_CFLAGS) \ + $(LIBVA_X11_CFLAGS) \ + $(MIXCOMMON_CFLAGS) \ + $(MIXVBP_CFLAGS) \ + -DMIXVIDEO_CURRENT=@MIXVIDEO_CURRENT@ \ + -DMIXVIDEO_AGE=@MIXVIDEO_AGE@ \ + -DMIXVIDEO_REVISION=@MIXVIDEO_REVISION@ + +libmixvideo_la_LIBADD = $(GLIB_LIBS) \ + $(GOBJECT_LIBS) \ + $(GTHREAD_LIBS) \ + $(LIBVA_LIBS) \ + $(LIBVA_X11_LIBS) \ + $(MIXCOMMON_LIBS) \ + $(MIXVBP_LIBS) + +libmixvideo_la_LDFLAGS = $(GLIB_LIBS) \ + $(GOBJECT_LIBS) \ + $(GTHREAD_LIBS) \ + $(LIBVA_LIBS) \ + $(LIBVA_X11_LIBS) \ + $(MIXCOMMON_LIBS) \ + $(MIXVBP_LIBS) \ + -version-info @MIXVIDEO_CURRENT@:@MIXVIDEO_REVISION@:@MIXVIDEO_AGE@ + +libmixvideo_la_LIBTOOLFLAGS = --tag=disable-static + +# headers we need but don't want installed +noinst_HEADERS = mixvideoformat.h \ + mixvideoformat_h264.h \ + mixvideoformat_vc1.h \ + mixvideoformat_mp42.h \ + mixsurfacepool.h \ + mixvideoframe_private.h \ + mixbuffer_private.h \ + mixframemanager.h \ + mixbufferpool.h \ + mixvideoformatqueue.h \ + mixvideo_private.h \ + mixvideorenderparams_internal.h \ + mixvideoformatenc_h264.h \ + mixvideoformatenc_mpeg4.h \ + mixvideoformatenc_preview.h \ + mixvideoformatenc.h \ + mixvideolog.h + +# TODO: decide whehter a /usr/include/mix is needed for mix headers +mixincludedir=$(includedir)/mix +mixinclude_HEADERS = mixvideodef.h \ + mixdisplayx11.h \ + mixvideoconfigparams.h \ + mixvideoconfigparamsdec.h \ + mixvideoconfigparamsdec_vc1.h \ + mixvideoconfigparamsdec_h264.h \ + mixvideoconfigparamsdec_mp42.h \ + mixvideoframe.h \ + mixvideoinitparams.h \ + mixdisplay.h \ + mixdrmparams.h \ + mixvideocaps.h \ + mixvideodecodeparams.h \ + mixvideoencodeparams.h \ + mixvideo.h \ + mixvideorenderparams.h \ + mixbuffer.h \ + mixvideoconfigparamsenc_h264.h \ + mixvideoconfigparamsenc_mpeg4.h \ + mixvideoconfigparamsenc_preview.h \ + mixvideoconfigparamsenc.h + + +mixintincludedir=$(includedir)/mixvideoint +mixintinclude_HEADERS = mixvideoformat.h \ + mixframemanager.h \ + mixsurfacepool.h \ + mixbufferpool.h \ + mixvideoformatqueue.h \ + mixvideoframe_private.h \ + mixvideoformat_vc1.h diff --git a/mix_video/src/Makefile.old b/mix_video/src/Makefile.old new file mode 100644 index 0000000..2bc3f79 --- /dev/null +++ b/mix_video/src/Makefile.old @@ -0,0 +1,40 @@ +INCL_CONFIG = `pkg-config --cflags glib-2.0 gobject-2.0` -g +LIB_CONFIG = `pkg-config --libs glib-2.0 gobject-2.0` -lgthread-2.0 -g + +all: + gcc -c mixparams.c -o mixparams.o $(INCL_CONFIG) + gcc -c mixdrmparams.c -o mixdrmparams.o $(INCL_CONFIG) + gcc -c mixvideocaps.c -o mixvideocaps.o $(INCL_CONFIG) + gcc -c mixdisplay.c -o mixdisplay.o $(INCL_CONFIG) + gcc -c mixdisplayx11.c -o mixdisplayx11.o $(INCL_CONFIG) + gcc -c mixvideoinitparams.c -o mixvideoinitparams.o $(INCL_CONFIG) + gcc -c mixvideoconfigparams.c -o mixvideoconfigparams.o $(INCL_CONFIG) + gcc -c mixvideoconfigparams_h264.c -o mixvideoconfigparams_h264.o $(INCL_CONFIG) + gcc -c mixvideoconfigparams_vc1.c -o mixvideoconfigparams_vc1.o $(INCL_CONFIG) + gcc -c mixvideodecodeparams.c -o mixvideodecodeparams.o $(INCL_CONFIG) + gcc -c mixvideorenderparams.c -o mixvideorenderparams.o $(INCL_CONFIG) + gcc -c mixvideoframe.c -o mixvideoframe.o $(INCL_CONFIG) + gcc -c mixvideo.c -o mixvideo.o $(INCL_CONFIG) + gcc -c test.c -o test.o $(INCL_CONFIG) + gcc test.o \ + mixvideo.o \ + mixparams.o \ + mixdrmparams.o \ + mixvideorenderparams.o \ + mixvideodecodeparams.o \ + mixvideoconfigparams.o \ + mixvideoconfigparams_vc1.o \ + mixvideoconfigparams_h264.o \ + mixvideoinitparams.o \ + mixdisplay.o \ + mixdisplayx11.o \ + mixvideocaps.o \ + mixvideoframe.o \ + -o test $(LIB_CONFIG) + +clean: + rm *~ + rm *.o + rm test + + diff --git a/mix_video/src/mixbuffer.c b/mix_video/src/mixbuffer.c new file mode 100644 index 0000000..6d51966 --- /dev/null +++ b/mix_video/src/mixbuffer.c @@ -0,0 +1,220 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixbuffer + * @short_description: VideoConfig parameters + * + * A data object which stores videoconfig specific parameters. + */ + +#include "mixvideolog.h" +#include "mixbuffer.h" +#include "mixbuffer_private.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_buffer_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_buffer_type = g_define_type_id; } + +gboolean mix_buffer_copy(MixParams * target, const MixParams * src); +MixParams *mix_buffer_dup(const MixParams * obj); +gboolean mix_buffer_equal(MixParams * first, MixParams * second); +static void mix_buffer_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixBuffer, mix_buffer, MIX_TYPE_PARAMS, + _do_init); + +static void mix_buffer_init(MixBuffer * self) { + /* initialize properties here */ + + MixBufferPrivate *priv = MIX_BUFFER_GET_PRIVATE(self); + self->reserved = priv; + + priv->pool = NULL; + + self->data = NULL; + self->size = 0; + self->token = 0; + self->callback = NULL; +} + +static void mix_buffer_class_init(MixBufferClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_buffer_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_buffer_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_buffer_dup; + mixparams_class->equal = (MixParamsEqualFunction) mix_buffer_equal; + + /* Register and allocate the space the private structure for this object */ + g_type_class_add_private(mixparams_class, sizeof(MixBufferPrivate)); +} + +MixBuffer * +mix_buffer_new(void) { + MixBuffer *ret = (MixBuffer *) g_type_create_instance(MIX_TYPE_BUFFER); + return ret; +} + +void mix_buffer_finalize(MixParams * obj) { + /* clean up here. */ + + /* MixBuffer *self = MIX_BUFFER(obj); */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixBuffer * +mix_buffer_ref(MixBuffer * mix) { + return (MixBuffer *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_buffer_dup: + * @obj: a #MixBuffer object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_buffer_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_BUFFER(obj)) { + MixBuffer *duplicate = mix_buffer_new(); + if (mix_buffer_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_buffer_unref(duplicate); + } + } + return ret; +} + +/** + * mix_buffer_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_buffer_copy(MixParams * target, const MixParams * src) { + MixBuffer *this_target, *this_src; + + if (MIX_IS_BUFFER(target) && MIX_IS_BUFFER(src)) { + // Cast the base object to this child object + this_target = MIX_BUFFER(target); + this_src = MIX_BUFFER(src); + + // Duplicate string + this_target->data = this_src->data; + this_target->size = this_src->size; + this_target->token = this_src->token; + this_target->callback = this_src->callback; + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_buffer_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_buffer_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixBuffer *this_first, *this_second; + + if (MIX_IS_BUFFER(first) && MIX_IS_BUFFER(second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_BUFFER(first); + this_second = MIX_BUFFER(second); + + if (this_first->data == this_second->data && this_first->size + == this_second->size && this_first->token == this_second->token + && this_first->callback == this_second->callback) { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = klass->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_BUFFER_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_BUFFER(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size, + gulong token, MixBufferCallback callback) { + MIX_BUFFER_SETTER_CHECK_INPUT (obj); + + obj->data = data; + obj->size = size; + obj->token = token; + obj->callback = callback; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_buffer_set_pool(MixBuffer *obj, MixBufferPool *pool) { + + MIX_BUFFER_SETTER_CHECK_INPUT (obj); + MixBufferPrivate *priv = (MixBufferPrivate *) obj->reserved; + priv->pool = pool; + + return MIX_RESULT_SUCCESS; +} + +void mix_buffer_unref(MixBuffer * obj) { + + // Unref through base class + mix_params_unref(MIX_PARAMS(obj)); + + LOG_I( "refcount = %d\n", MIX_PARAMS( + obj)->refcount); + + // Check if we have reduced to 1, in which case we add ourselves to free pool + if (MIX_PARAMS(obj)->refcount == 1) { + MixBufferPrivate *priv = (MixBufferPrivate *) obj->reserved; + g_return_if_fail(priv->pool != NULL); + + if (obj->callback) { + obj->callback(obj->token, obj->data); + } + mix_bufferpool_put(priv->pool, obj); + } +} + diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h new file mode 100644 index 0000000..53d2e1c --- /dev/null +++ b/mix_video/src/mixbuffer.h @@ -0,0 +1,130 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_BUFFER_H__ +#define __MIX_BUFFER_H__ + +#include +#include "mixvideodef.h" + +/** + * MIX_TYPE_BUFFER: + * + * Get type of class. + */ +#define MIX_TYPE_BUFFER (mix_buffer_get_type ()) + +/** + * MIX_BUFFER: + * @obj: object to be type-casted. + */ +#define MIX_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_BUFFER, MixBuffer)) + +/** + * MIX_IS_BUFFER: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_BUFFER)) + +/** + * MIX_BUFFER_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_BUFFER, MixBufferClass)) + +/** + * MIX_IS_BUFFER_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_BUFFER)) + +/** + * MIX_BUFFER_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_BUFFER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_BUFFER, MixBufferClass)) + +typedef void (*MixBufferCallback)(gulong token, guchar *data); + +typedef struct _MixBuffer MixBuffer; +typedef struct _MixBufferClass MixBufferClass; + +/** + * MixBuffer: + * + * MI-X VideoConfig Parameter object + */ +struct _MixBuffer { + /*< public > */ + MixParams parent; + + /*< public > */ + guchar *data; + guint size; + gulong token; + MixBufferCallback callback; + + gpointer reserved; +}; + +/** + * MixBufferClass: + * + * MI-X VideoConfig object class + */ +struct _MixBufferClass { + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_buffer_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_buffer_get_type(void); + +/** + * mix_buffer_new: + * @returns: A newly allocated instance of #MixBuffer + * + * Use this method to create new instance of #MixBuffer + */ +MixBuffer *mix_buffer_new(void); +/** + * mix_buffer_ref: + * @mix: object to add reference + * @returns: the MixBuffer instance where reference count has been increased. + * + * Add reference count. + */ +MixBuffer *mix_buffer_ref(MixBuffer * mix); + +/** + * mix_buffer_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +void mix_buffer_unref(MixBuffer * mix); + +/* Class Methods */ + +MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size, + gulong token, MixBufferCallback callback); + +#endif /* __MIX_BUFFER_H__ */ diff --git a/mix_video/src/mixbuffer_private.h b/mix_video/src/mixbuffer_private.h new file mode 100644 index 0000000..87c9c07 --- /dev/null +++ b/mix_video/src/mixbuffer_private.h @@ -0,0 +1,39 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_BUFFER_PRIVATE_H__ +#define __MIX_BUFFER_PRIVATE_H__ + +#include "mixbuffer.h" +#include "mixbufferpool.h" + +typedef struct _MixBufferPrivate MixBufferPrivate; + +struct _MixBufferPrivate +{ + /*< private > */ + MixBufferPool *pool; + +}; + +/** +* MIX_BUFFER_PRIVATE: +* +* Get private structure of this class. +* @obj: class object for which to get private data. +*/ +#define MIX_BUFFER_GET_PRIVATE(obj) \ + (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_BUFFER, MixBufferPrivate)) + + +/* Private functions */ +MIX_RESULT +mix_buffer_set_pool (MixBuffer *obj, MixBufferPool *pool); + + +#endif /* __MIX_BUFFER_PRIVATE_H__ */ diff --git a/mix_video/src/mixbufferpool.c b/mix_video/src/mixbufferpool.c new file mode 100644 index 0000000..9d9ad56 --- /dev/null +++ b/mix_video/src/mixbufferpool.c @@ -0,0 +1,484 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixbufferpool + * @short_description: MI-X Input Buffer Pool + * + * A data object which stores and manipulates a pool of compressed video buffers. + */ + +#include "mixvideolog.h" +#include "mixbufferpool.h" +#include "mixbuffer_private.h" + +#define MIX_LOCK(lock) g_mutex_lock(lock); +#define MIX_UNLOCK(lock) g_mutex_unlock(lock); + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_bufferpool_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_bufferpool_type = g_define_type_id; } + +gboolean mix_bufferpool_copy(MixParams * target, const MixParams * src); +MixParams *mix_bufferpool_dup(const MixParams * obj); +gboolean mix_bufferpool_equal(MixParams * first, MixParams * second); +static void mix_bufferpool_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixBufferPool, mix_bufferpool, MIX_TYPE_PARAMS, + _do_init); + +static void mix_bufferpool_init(MixBufferPool * self) { + /* initialize properties here */ + self->free_list = NULL; + self->in_use_list = NULL; + self->free_list_max_size = 0; + self->high_water_mark = 0; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; + + // TODO: relocate this mutex allocation -we can't communicate failure in ctor. + // Note that g_thread_init() has already been called by mix_video_init() + self->objectlock = g_mutex_new(); + +} + +static void mix_bufferpool_class_init(MixBufferPoolClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_bufferpool_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_bufferpool_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_bufferpool_dup; + mixparams_class->equal = (MixParamsEqualFunction) mix_bufferpool_equal; +} + +MixBufferPool * +mix_bufferpool_new(void) { + MixBufferPool *ret = (MixBufferPool *) g_type_create_instance( + MIX_TYPE_BUFFERPOOL); + return ret; +} + +void mix_bufferpool_finalize(MixParams * obj) { + /* clean up here. */ + + MixBufferPool *self = MIX_BUFFERPOOL(obj); + + if (self->objectlock) { + g_mutex_free(self->objectlock); + self->objectlock = NULL; + } + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixBufferPool * +mix_bufferpool_ref(MixBufferPool * mix) { + return (MixBufferPool *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_bufferpool_dup: + * @obj: a #MixBufferPool object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_bufferpool_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_BUFFERPOOL(obj)) { + + MIX_LOCK(MIX_BUFFERPOOL(obj)->objectlock); + + MixBufferPool *duplicate = mix_bufferpool_new(); + if (mix_bufferpool_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_bufferpool_unref(duplicate); + } + + MIX_UNLOCK(MIX_BUFFERPOOL(obj)->objectlock); + + } + return ret; +} + +/** + * mix_bufferpool_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_bufferpool_copy(MixParams * target, const MixParams * src) { + MixBufferPool *this_target, *this_src; + + if (MIX_IS_BUFFERPOOL(target) && MIX_IS_BUFFERPOOL(src)) { + + MIX_LOCK(MIX_BUFFERPOOL(src)->objectlock); + MIX_LOCK(MIX_BUFFERPOOL(target)->objectlock); + + // Cast the base object to this child object + this_target = MIX_BUFFERPOOL(target); + this_src = MIX_BUFFERPOOL(src); + + // Free the existing properties + + // Duplicate string + this_target->free_list = this_src->free_list; + this_target->in_use_list = this_src->in_use_list; + this_target->free_list_max_size = this_src->free_list_max_size; + this_target->high_water_mark = this_src->high_water_mark; + + MIX_UNLOCK(MIX_BUFFERPOOL(src)->objectlock); + MIX_UNLOCK(MIX_BUFFERPOOL(target)->objectlock); + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_bufferpool_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_bufferpool_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixBufferPool *this_first, *this_second; + + if (MIX_IS_BUFFERPOOL(first) && MIX_IS_BUFFERPOOL(second)) { + // Deep compare + // Cast the base object to this child object + + MIX_LOCK(MIX_BUFFERPOOL(first)->objectlock); + MIX_LOCK(MIX_BUFFERPOOL(second)->objectlock); + + this_first = MIX_BUFFERPOOL(first); + this_second = MIX_BUFFERPOOL(second); + + /* TODO: add comparison for other properties */ + if (this_first->free_list == this_second->free_list + && this_first->in_use_list == this_second->in_use_list + && this_first->free_list_max_size + == this_second->free_list_max_size + && this_first->high_water_mark == this_second->high_water_mark) { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = klass->equal(first, second); + else + ret = TRUE; + } + + MIX_LOCK(MIX_BUFFERPOOL(first)->objectlock); + MIX_LOCK(MIX_BUFFERPOOL(second)->objectlock); + + } + + return ret; +} + +/* Class Methods */ + +/** + * mix_bufferpool_initialize: + * @returns: MIX_RESULT_SUCCESS if successful in creating the buffer pool + * + * Use this method to create a new buffer pool, consisting of a GSList of + * buffer objects that represents a pool of buffers. + */ +MIX_RESULT mix_bufferpool_initialize(MixBufferPool * obj, guint num_buffers) { + + LOG_V( "Begin\n"); + + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + + if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { + //buffer pool is in use; return error; need proper cleanup + //TODO need cleanup here? + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_ALREADY_INIT; + } + + if (num_buffers == 0) { + obj->free_list = NULL; + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_buffers; + + obj->high_water_mark = 0; + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_SUCCESS; + } + + // Initialize the free pool with MixBuffer objects + + gint i = 0; + MixBuffer *buffer = NULL; + + for (; i < num_buffers; i++) { + + buffer = mix_buffer_new(); + + if (buffer == NULL) { + //TODO need to log an error here and do cleanup + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_NO_MEMORY; + } + + // Set the pool reference in the private data of the MixBuffer object + mix_buffer_set_pool(buffer, obj); + + //Add each MixBuffer object to the pool list + obj->free_list = g_slist_append(obj->free_list, buffer); + + } + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_buffers; + + obj->high_water_mark = 0; + + MIX_UNLOCK(obj->objectlock); + + LOG_V( "End\n"); + +return MIX_RESULT_SUCCESS; +} + +/** + * mix_bufferpool_put: + * @returns: SUCCESS or FAILURE + * + * Use this method to return a buffer to the free pool + */ +MIX_RESULT mix_bufferpool_put(MixBufferPool * obj, MixBuffer * buffer) { + + if (obj == NULL || buffer == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + + if (obj->in_use_list == NULL) { + //in use list cannot be empty if a buffer is in use + //TODO need better error code for this + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_FAIL; + } + + GSList *element = g_slist_find(obj->in_use_list, buffer); + if (element == NULL) { + //Integrity error; buffer not found in in use list + //TODO need better error code and handling for this + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_FAIL; + } else { + //Remove this element from the in_use_list + obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); + + //Concat the element to the free_list + obj->free_list = g_slist_concat(obj->free_list, element); + } + + //Note that we do nothing with the ref count for this. We want it to + //stay at 1, which is what triggered it to be added back to the free list. + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_SUCCESS; +} + +/** + * mix_bufferpool_get: + * @returns: SUCCESS or FAILURE + * + * Use this method to get a buffer from the free pool + */ +MIX_RESULT mix_bufferpool_get(MixBufferPool * obj, MixBuffer ** buffer) { + + if (obj == NULL || buffer == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + + if (obj->free_list == NULL) { + //We are out of buffers + //TODO need to log this as well + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_POOLEMPTY; + } + + //Remove a buffer from the free pool + + //We just remove the one at the head, since it's convenient + GSList *element = obj->free_list; + obj->free_list = g_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = g_slist_concat(obj->in_use_list, element); + + //TODO replace with proper logging + + LOG_I( "buffer refcount%d\n", + MIX_PARAMS(element->data)->refcount); + + //Set the out buffer pointer + *buffer = (MixBuffer *) element->data; + + //Check the high water mark for buffer use + guint size = g_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } + + //Increment the reference count for the buffer + mix_buffer_ref(*buffer); + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_SUCCESS; +} + +/** + * mix_bufferpool_deinitialize: + * @returns: SUCCESS or FAILURE + * + * Use this method to teardown a buffer pool + */ +MIX_RESULT mix_bufferpool_deinitialize(MixBufferPool * obj) { + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + + if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) + != obj->free_list_max_size)) { + //TODO better error code + //We have outstanding buffer objects in use and they need to be + //freed before we can deinitialize. + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_FAIL; + } + + //Now remove buffer objects from the list + + MixBuffer *buffer = NULL; + + while (obj->free_list != NULL) { + //Get the buffer object from the head of the list + buffer = obj->free_list->data; + //buffer = g_slist_nth_data(obj->free_list, 0); + + //Release it + mix_buffer_unref(buffer); + + //Delete the head node of the list and store the new head + obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); + + //Repeat until empty + } + + obj->free_list_max_size = 0; + + //May want to log this information for tuning + obj->high_water_mark = 0; + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_SUCCESS; +} + +#define MIX_BUFFERPOOL_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_BUFFERPOOL_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT +mix_bufferpool_dumpbuffer(MixBuffer *buffer) +{ + LOG_I( "\tBuffer %x, ptr %x, refcount %d\n", (guint)buffer, + (guint)buffer->data, MIX_PARAMS(buffer)->refcount); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT +mix_bufferpool_dumpprint (MixBufferPool * obj) +{ + //TODO replace this with proper logging later + + LOG_I( "BUFFER POOL DUMP:\n"); + LOG_I( "Free list size is %d\n", g_slist_length(obj->free_list)); + LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); + LOG_I( "High water mark is %lu\n", obj->high_water_mark); + + //Walk the free list and report the contents + LOG_I( "Free list contents:\n"); + g_slist_foreach(obj->free_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); + + //Walk the in_use list and report the contents + LOG_I( "In Use list contents:\n"); + g_slist_foreach(obj->in_use_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixbufferpool.h b/mix_video/src/mixbufferpool.h new file mode 100644 index 0000000..bf32d0d --- /dev/null +++ b/mix_video/src/mixbufferpool.h @@ -0,0 +1,150 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_BUFFERPOOL_H__ +#define __MIX_BUFFERPOOL_H__ + +#include +#include "mixvideodef.h" +#include "mixbuffer.h" + +#include + +G_BEGIN_DECLS + +/** +* MIX_TYPE_BUFFERPOOL: +* +* Get type of class. +*/ +#define MIX_TYPE_BUFFERPOOL (mix_bufferpool_get_type ()) + +/** +* MIX_BUFFERPOOL: +* @obj: object to be type-casted. +*/ +#define MIX_BUFFERPOOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_BUFFERPOOL, MixBufferPool)) + +/** +* MIX_IS_BUFFERPOOL: +* @obj: an object. +* +* Checks if the given object is an instance of #MixBufferPool +*/ +#define MIX_IS_BUFFERPOOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_BUFFERPOOL)) + +/** +* MIX_BUFFERPOOL_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_BUFFERPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_BUFFERPOOL, MixBufferPoolClass)) + +/** +* MIX_IS_BUFFERPOOL_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixBufferPoolClass +*/ +#define MIX_IS_BUFFERPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_BUFFERPOOL)) + +/** +* MIX_BUFFERPOOL_GET_CLASS: +* @obj: a #MixBufferPool object. +* +* Get the class instance of the object. +*/ +#define MIX_BUFFERPOOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_BUFFERPOOL, MixBufferPoolClass)) + +typedef struct _MixBufferPool MixBufferPool; +typedef struct _MixBufferPoolClass MixBufferPoolClass; + +/** +* MixBufferPool: +* +* MI-X Video Buffer Pool object +*/ +struct _MixBufferPool +{ + /*< public > */ + MixParams parent; + + /*< public > */ + GSList *free_list; /* list of free buffers */ + GSList *in_use_list; /* list of buffers in use */ + gulong free_list_max_size; /* initial size of the free list */ + gulong high_water_mark; /* most buffers in use at one time */ + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; + + /*< private > */ + GMutex *objectlock; + +}; + +/** +* MixBufferPoolClass: +* +* MI-X Video Buffer Pool object class +*/ +struct _MixBufferPoolClass +{ + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** +* mix_bufferpool_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_bufferpool_get_type (void); + +/** +* mix_bufferpool_new: +* @returns: A newly allocated instance of #MixBufferPool +* +* Use this method to create new instance of #MixBufferPool +*/ +MixBufferPool *mix_bufferpool_new (void); +/** +* mix_bufferpool_ref: +* @mix: object to add reference +* @returns: the MixBufferPool instance where reference count has been increased. +* +* Add reference count. +*/ +MixBufferPool *mix_bufferpool_ref (MixBufferPool * mix); + +/** +* mix_bufferpool_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_bufferpool_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +MIX_RESULT mix_bufferpool_initialize (MixBufferPool * obj, + guint num_buffers); +MIX_RESULT mix_bufferpool_put (MixBufferPool * obj, + MixBuffer * buffer); + +MIX_RESULT mix_bufferpool_get (MixBufferPool * obj, + MixBuffer ** buffer); +MIX_RESULT mix_bufferpool_deinitialize (MixBufferPool * obj); + +G_END_DECLS + +#endif /* __MIX_BUFFERPOOL_H__ */ diff --git a/mix_video/src/mixdisplay.c b/mix_video/src/mixdisplay.c new file mode 100644 index 0000000..d6da0e9 --- /dev/null +++ b/mix_video/src/mixdisplay.c @@ -0,0 +1,539 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixdisplay +* @short_description: Lightweight base class for the MIX media display +* +*/ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "mixdisplay.h" +#include + +#define DEBUG_REFCOUNT + +static void mix_display_class_init (gpointer g_class, gpointer class_data); +static void mix_display_init (GTypeInstance * instance, gpointer klass); + +static void mix_value_display_init (GValue * value); +static void mix_value_display_free (GValue * value); +static void mix_value_display_copy (const GValue * src_value, + GValue * dest_value); +static gpointer mix_value_display_peek_pointer (const GValue * value); +static gchar *mix_value_display_collect (GValue * value, + guint n_collect_values, + GTypeCValue * collect_values, + guint collect_flags); +static gchar *mix_value_display_lcopy (const GValue * value, + guint n_collect_values, + GTypeCValue * collect_values, + guint collect_flags); + +static void mix_display_finalize (MixDisplay * obj); +static gboolean mix_display_copy_default (MixDisplay * target, + const MixDisplay * src); +static MixDisplay *mix_display_dup_default (const MixDisplay * obj); +static gboolean mix_display_equal_default (MixDisplay * first, + MixDisplay * second); + +GType +mix_display_get_type (void) +{ + static GType _mix_display_type = 0; + + if (G_UNLIKELY (_mix_display_type == 0)) + { + + GTypeValueTable value_table = { + mix_value_display_init, + mix_value_display_free, + mix_value_display_copy, + mix_value_display_peek_pointer, + "p", + mix_value_display_collect, + "p", + mix_value_display_lcopy + }; + + GTypeInfo info = { + sizeof (MixDisplayClass), + NULL, + NULL, + mix_display_class_init, + NULL, + NULL, + sizeof (MixDisplay), + 0, + (GInstanceInitFunc) mix_display_init, + NULL + }; + + static const GTypeFundamentalInfo fundamental_info = { + (G_TYPE_FLAG_CLASSED | G_TYPE_FLAG_INSTANTIATABLE | + G_TYPE_FLAG_DERIVABLE | G_TYPE_FLAG_DEEP_DERIVABLE) + }; + + info.value_table = &value_table; + + _mix_display_type = g_type_fundamental_next (); + g_type_register_fundamental (_mix_display_type, "MixDisplay", + &info, &fundamental_info, + G_TYPE_FLAG_ABSTRACT); + + } + + return _mix_display_type; +} + +static void +mix_display_class_init (gpointer g_class, gpointer class_data) +{ + MixDisplayClass *klass = MIX_DISPLAY_CLASS (g_class); + + klass->dup = mix_display_dup_default; + klass->copy = mix_display_copy_default; + klass->finalize = mix_display_finalize; + klass->equal = mix_display_equal_default; +} + +static void +mix_display_init (GTypeInstance * instance, gpointer klass) +{ + MixDisplay *obj = MIX_DISPLAY_CAST (instance); + + obj->refcount = 1; +} + +gboolean +mix_display_copy (MixDisplay * target, const MixDisplay * src) +{ + /* Use the target object class. Because it knows what it is looking for. */ + MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (target); + if (klass->copy) + { + return klass->copy (target, src); + } + else + { + return mix_display_copy_default (target, src); + } +} + +/** +* mix_display_copy_default: +* @target: +* @src: +* +* The default copy method of this object. Perhap copy at this level. +* Assign this to the copy vmethod. +*/ +static gboolean +mix_display_copy_default (MixDisplay * target, const MixDisplay * src) +{ + if (MIX_IS_DISPLAY (target) && MIX_IS_DISPLAY (src)) + { + // TODO perform deep copy. + return TRUE; + } + return FALSE; +} + +static void +mix_display_finalize (MixDisplay * obj) +{ + /* do nothing */ +} + +MixDisplay * +mix_display_dup (const MixDisplay * obj) +{ + MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (obj); + + if (klass->dup) + { + return klass->dup (obj); + } + else if (MIX_IS_DISPLAY (obj)) + { + return mix_display_dup_default (obj); + } + return NULL; +} + +static MixDisplay * +mix_display_dup_default (const MixDisplay * obj) +{ + MixDisplay *ret = mix_display_new (); + if (mix_display_copy (ret, obj)) + { + return ret; + } + + return NULL; +} + +MixDisplay * +mix_display_new (GType type) +{ + MixDisplay *obj; + + /* we don't support dynamic types because they really aren't useful, + * and could cause refcount problems */ + obj = (MixDisplay *) g_type_create_instance (type); + + return obj; +} + +MixDisplay * +mix_display_ref (MixDisplay * obj) +{ + g_return_val_if_fail (MIX_IS_DISPLAY (obj), NULL); + + g_atomic_int_inc (&obj->refcount); + + return obj; +} + +static void +mix_display_free (MixDisplay * obj) +{ + MixDisplayClass *klass = NULL; + + klass = MIX_DISPLAY_GET_CLASS (obj); + klass->finalize (obj); + + /* Should we support recycling the object? */ + /* If so, refcount handling is slightly different. */ + /* i.e. If the refcount is still 0 we can really free the object, else the finalize method recycled the object -- but to where? */ + + if (g_atomic_int_get (&obj->refcount) == 0) + { + + g_type_free_instance ((GTypeInstance *) obj); + } +} + +void +mix_display_unref (MixDisplay * obj) +{ + g_return_if_fail (obj != NULL); + g_return_if_fail (obj->refcount > 0); + + if (G_UNLIKELY (g_atomic_int_dec_and_test (&obj->refcount))) + { + mix_display_free (obj); + } +} + +static void +mix_value_display_init (GValue * value) +{ + value->data[0].v_pointer = NULL; +} + +static void +mix_value_display_free (GValue * value) +{ + if (value->data[0].v_pointer) + { + mix_display_unref (MIX_DISPLAY_CAST (value->data[0].v_pointer)); + } +} + +static void +mix_value_display_copy (const GValue * src_value, GValue * dest_value) +{ + if (src_value->data[0].v_pointer) + { + dest_value->data[0].v_pointer = + mix_display_ref (MIX_DISPLAY_CAST (src_value->data[0].v_pointer)); + } + else + { + dest_value->data[0].v_pointer = NULL; + } +} + +static gpointer +mix_value_display_peek_pointer (const GValue * value) +{ + return value->data[0].v_pointer; +} + +static gchar * +mix_value_display_collect (GValue * value, guint n_collect_values, + GTypeCValue * collect_values, guint collect_flags) +{ + mix_value_set_display (value, collect_values[0].v_pointer); + + return NULL; +} + +static gchar * +mix_value_display_lcopy (const GValue * value, + guint n_collect_values, + GTypeCValue * collect_values, guint collect_flags) +{ + gpointer *obj_p = collect_values[0].v_pointer; + + if (!obj_p) + { + return g_strdup_printf ("value location for '%s' passed as NULL", + G_VALUE_TYPE_NAME (value)); + } + + if (!value->data[0].v_pointer) + *obj_p = NULL; + else if (collect_flags & G_VALUE_NOCOPY_CONTENTS) + *obj_p = value->data[0].v_pointer; + else + *obj_p = mix_display_ref (value->data[0].v_pointer); + + return NULL; +} + +/** +* mix_value_set_display: +* @value: a valid #GValue of %MIX_TYPE_DISPLAY derived type +* @obj: object value to set +* +* Set the contents of a %MIX_TYPE_DISPLAY derived #GValue to +* @obj. +* The caller retains ownership of the reference. +*/ +void +mix_value_set_display (GValue * value, MixDisplay * obj) +{ + gpointer *pointer_p; + + g_return_if_fail (MIX_VALUE_HOLDS_DISPLAY (value)); + g_return_if_fail (obj == NULL || MIX_IS_DISPLAY (obj)); + + pointer_p = &value->data[0].v_pointer; + mix_display_replace ((MixDisplay **) pointer_p, obj); +} + +/** +* mix_value_take_display: +* @value: a valid #GValue of #MIX_TYPE_DISPLAY derived type +* @obj: object value to take +* +* Set the contents of a #MIX_TYPE_DISPLAY derived #GValue to +* @obj. +* Takes over the ownership of the caller's reference to @obj; +* the caller doesn't have to unref it any more. +*/ +void +mix_value_take_display (GValue * value, MixDisplay * obj) +{ + gpointer *pointer_p; + + g_return_if_fail (MIX_VALUE_HOLDS_DISPLAY (value)); + g_return_if_fail (obj == NULL || MIX_IS_DISPLAY (obj)); + + pointer_p = &value->data[0].v_pointer; + mix_display_replace ((MixDisplay **) pointer_p, obj); + if (obj) + mix_display_unref (obj); +} + +/** +* mix_value_get_display: +* @value: a valid #GValue of #MIX_TYPE_DISPLAY derived type +* @returns:object contents of @value +* +* refcount of the MixDisplay is not increased. +*/ +MixDisplay * +mix_value_get_display (const GValue * value) +{ + g_return_val_if_fail (MIX_VALUE_HOLDS_DISPLAY (value), NULL); + + return value->data[0].v_pointer; +} + +/** +* mix_value_dup_display: +* @value: a valid #GValue of %MIX_TYPE_DISPLAY derived type +* @returns: object contents of @value +* +* refcount of MixDisplay is increased. +*/ +MixDisplay * +mix_value_dup_display (const GValue * value) +{ + g_return_val_if_fail (MIX_VALUE_HOLDS_DISPLAY (value), NULL); + + return mix_display_ref (value->data[0].v_pointer); +} + + +static void +param_display_init (GParamSpec * pspec) +{ + /* GParamSpecDisplay *ospec = G_PARAM_SPEC_DISPLAY (pspec); */ +} + +static void +param_display_set_default (GParamSpec * pspec, GValue * value) +{ + value->data[0].v_pointer = NULL; +} + +static gboolean +param_display_validate (GParamSpec * pspec, GValue * value) +{ + gboolean validated = FALSE; + MixParamSpecDisplay *ospec = MIX_PARAM_SPEC_DISPLAY (pspec); + MixDisplay *obj = value->data[0].v_pointer; + + if (obj && !g_value_type_compatible (G_OBJECT_TYPE (obj), G_PARAM_SPEC_VALUE_TYPE (ospec))) + { + mix_display_unref (obj); + value->data[0].v_pointer = NULL; + validated = TRUE; + } + + return validated; +} + +static gint +param_display_values_cmp (GParamSpec * pspec, + const GValue * value1, const GValue * value2) +{ + guint8 *p1 = value1->data[0].v_pointer; + guint8 *p2 = value2->data[0].v_pointer; + + + return p1 < p2 ? -1 : p1 > p2; +} + +GType +mix_param_spec_display_get_type (void) +{ + static GType type; + + if (G_UNLIKELY (type) == 0) + { + static const GParamSpecTypeInfo pspec_info = { + sizeof (MixParamSpecDisplay), /* instance_size */ + 16, /* n_preallocs */ + param_display_init, /* instance_init */ + G_TYPE_OBJECT, /* value_type */ + NULL, /* finalize */ + param_display_set_default, /* value_set_default */ + param_display_validate, /* value_validate */ + param_display_values_cmp, /* values_cmp */ + }; + /* FIXME 0.11: Should really be MixParamSpecDisplay */ + type = g_param_type_register_static ("GParamSpecDisplay", &pspec_info); + } + + return type; +} + +/** +* mix_param_spec_display: +* @name: the canonical name of the property +* @nick: the nickname of the property +* @blurb: a short description of the property +* @object_type: the #MixDisplayType for the property +* @flags: a combination of #GParamFlags +* @returns: a newly allocated #GParamSpec instance +* +* Creates a new #GParamSpec instance that hold #MixDisplay references. +* +*/ +GParamSpec * +mix_param_spec_display (const char *name, const char *nick, + const char *blurb, GType object_type, + GParamFlags flags) +{ + MixParamSpecDisplay *ospec; + + g_return_val_if_fail (g_type_is_a (object_type, MIX_TYPE_DISPLAY), NULL); + + ospec = g_param_spec_internal (MIX_TYPE_PARAM_DISPLAY, + name, nick, blurb, flags); + G_PARAM_SPEC (ospec)->value_type = object_type; + + return G_PARAM_SPEC (ospec); +} + +/** +* mix_display_replace: +* @olddata: pointer to a pointer to a object to be replaced +* @newdata: pointer to new object +* +* Modifies a pointer to point to a new object. The modification +* is done atomically, and the reference counts are updated correctly. +* Either @newdata and the value pointed to by @olddata may be NULL. +*/ +void +mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata) +{ + MixDisplay *olddata_val; + + g_return_if_fail (olddata != NULL); + + olddata_val = g_atomic_pointer_get ((gpointer *) olddata); + + if (olddata_val == newdata) + return; + + if (newdata) + mix_display_ref (newdata); + + while (!g_atomic_pointer_compare_and_exchange + ((gpointer *) olddata, olddata_val, newdata)) + { + olddata_val = g_atomic_pointer_get ((gpointer *) olddata); + } + + if (olddata_val) + mix_display_unref (olddata_val); + +} + +gboolean +mix_display_equal (MixDisplay * first, MixDisplay * second) +{ + if (MIX_IS_DISPLAY (first)) + { + MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (first); + + if (klass->equal) + { + return klass->equal (first, second); + } + else + { + return mix_display_equal_default (first, second); + } + } + else + return FALSE; +} + +static gboolean +mix_display_equal_default (MixDisplay * first, MixDisplay * second) +{ + if (MIX_IS_DISPLAY (first) && MIX_IS_DISPLAY (second)) + { + gboolean ret = TRUE; + + // Do data comparison here. + + return ret; + } + else + return FALSE; +} diff --git a/mix_video/src/mixdisplay.h b/mix_video/src/mixdisplay.h new file mode 100644 index 0000000..daaa5ed --- /dev/null +++ b/mix_video/src/mixdisplay.h @@ -0,0 +1,233 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_DISPLAY_H__ +#define __MIX_DISPLAY_H__ + +#include + +G_BEGIN_DECLS +#define MIX_TYPE_DISPLAY (mix_display_get_type()) +#define MIX_IS_DISPLAY(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAY)) +#define MIX_IS_DISPLAY_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAY)) +#define MIX_DISPLAY_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAY, MixDisplayClass)) +#define MIX_DISPLAY(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAY, MixDisplay)) +#define MIX_DISPLAY_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAY, MixDisplayClass)) +#define MIX_DISPLAY_CAST(obj) ((MixDisplay*)(obj)) +typedef struct _MixDisplay MixDisplay; +typedef struct _MixDisplayClass MixDisplayClass; + +/** +* MixDisplayDupFunction: +* @obj: Display to duplicate +* @returns: reference to cloned instance. +* +* Virtual function prototype for methods to create duplicate of instance. +* +*/ +typedef MixDisplay *(*MixDisplayDupFunction) (const MixDisplay * obj); + +/** +* MixDisplayCopyFunction: +* @target: target of the copy +* @src: source of the copy +* @returns: boolean indicates if copy is successful. +* +* Virtual function prototype for methods to create copies of instance. +* +*/ +typedef gboolean (*MixDisplayCopyFunction) (MixDisplay * target, + const MixDisplay * src); + +/** +* MixDisplayFinalizeFunction: +* @obj: Display to finalize +* +* Virtual function prototype for methods to free ressources used by +* object. +*/ +typedef void (*MixDisplayFinalizeFunction) (MixDisplay * obj); + +/** +* MixDisplayEqualsFunction: +* @first: first object in the comparison +* @second: second object in the comparison +* +* Virtual function prototype for methods to compare 2 objects and check if they are equal. +*/ +typedef gboolean (*MixDisplayEqualFunction) (MixDisplay * first, + MixDisplay * second); + +/** +* MIX_VALUE_HOLDS_DISPLAY: +* @value: the #GValue to check +* +* Checks if the given #GValue contains a #MIX_TYPE_PARAM value. +*/ +#define MIX_VALUE_HOLDS_DISPLAY(value) (G_VALUE_HOLDS(value, MIX_TYPE_DISPLAY)) + +/** +* MIX_DISPLAY_REFCOUNT: +* @obj: a #MixDisplay +* +* Get access to the reference count field of the object. +*/ +#define MIX_DISPLAY_REFCOUNT(obj) ((MIX_DISPLAY_CAST(obj))->refcount) +/** +* MIX_DISPLAY_REFCOUNT_VALUE: +* @obj: a #MixDisplay +* +* Get the reference count value of the object +*/ +#define MIX_DISPLAY_REFCOUNT_VALUE(obj) (g_atomic_int_get (&(MIX_DISPLAY_CAST(obj))->refcount)) + +/** +* MixDisplay: +* @instance: type instance +* @refcount: atomic refcount +* +* Base class for a refcounted parameter objects. +*/ +struct _MixDisplay +{ + GTypeInstance instance; + /*< public > */ + gint refcount; + + /*< private > */ + gpointer _reserved; +}; + +/** +* MixDisplayClass: +* @dup: method to duplicate the object. +* @copy: method to copy details in one object to the other. +* @finalize: destructor +* @equal: method to check if the content of two objects are equal. +* +* #MixDisplay class strcut. +*/ +struct _MixDisplayClass +{ + GTypeClass type_class; + + MixDisplayDupFunction dup; + MixDisplayCopyFunction copy; + MixDisplayFinalizeFunction finalize; + MixDisplayEqualFunction equal; + + /*< private > */ + gpointer _mix_reserved; +}; + +/** +* mix_display_get_type: +* @returns: type of this object. +* +* Get type. +*/ +GType mix_display_get_type (void); + +/** +* mix_display_new: +* @returns: return a newly allocated object. +* +* Create new instance of the object. +*/ +MixDisplay *mix_display_new (); + +/** +* mix_display_copy: +* @target: copy to target +* @src: copy from source +* @returns: boolean indicating if copy is successful. +* +* Copy data from one instance to the other. This method internally invoked the #MixDisplay::copy method such that derived object will be copied correctly. +*/ +gboolean mix_display_copy (MixDisplay * target, const MixDisplay * src); + +/** +* mix_display_ref: +* @obj: a #MixDisplay object. +* @returns: the object with reference count incremented. +* +* Increment reference count. +*/ +MixDisplay *mix_display_ref (MixDisplay * obj); + +/** +* mix_display_unref: +* @obj: a #MixDisplay object. +* +* Decrement reference count. +*/ +void mix_display_unref (MixDisplay * obj); + +/** +* mix_display_replace: +* @olddata: +* @newdata: +* +* Replace a pointer of the object with the new one. +*/ +void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata); + +/** +* mix_display_dup: +* @obj: #MixDisplay object to duplicate. +* @returns: A newly allocated duplicate of the object, or NULL if failed. +* +* Duplicate the given #MixDisplay and allocate a new instance. This method is chained up properly and derive object will be dupped properly. +*/ +MixDisplay *mix_display_dup (const MixDisplay * obj); + +/** +* mix_display_equal: +* @first: first object to compare +* @second: second object to compare +* @returns: boolean indicates if the 2 object contains same data. +* +* Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance. +*/ +gboolean mix_display_equal (MixDisplay * first, MixDisplay * second); + +/* GParamSpec */ + +#define MIX_TYPE_PARAM_DISPLAY (mix_param_spec_display_get_type()) +#define MIX_IS_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_TYPE ((pspec), MIX_TYPE_PARAM_DISPLAY)) +#define MIX_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_CAST ((pspec), MIX_TYPE_PARAM_DISPLAY, MixParamSpecDisplay)) + +typedef struct _MixParamSpecDisplay MixParamSpecDisplay; + +/** +* MixParamSpecDisplay: +* @parent: #GParamSpec portion +* +* A #GParamSpec derived structure that contains the meta data +* for #MixDisplay properties. +*/ +struct _MixParamSpecDisplay +{ + GParamSpec parent; +}; + +GType mix_param_spec_display_get_type (void); + +GParamSpec *mix_param_spec_display (const char *name, const char *nick, + const char *blurb, GType object_type, + GParamFlags flags); + +/* GValue methods */ + +void mix_value_set_display (GValue * value, MixDisplay * obj); +void mix_value_take_display (GValue * value, MixDisplay * obj); +MixDisplay *mix_value_get_display (const GValue * value); +MixDisplay *mix_value_dup_display (const GValue * value); + +G_END_DECLS +#endif diff --git a/mix_video/src/mixdisplayx11.c b/mix_video/src/mixdisplayx11.c new file mode 100644 index 0000000..60eb3e4 --- /dev/null +++ b/mix_video/src/mixdisplayx11.c @@ -0,0 +1,205 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixdisplayx11 + * @short_description: VideoInit parameters + * + * A data object which stores videoinit specific parameters. + */ + +#include "mixdisplayx11.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_displayx11_type = 0; +static MixDisplayClass *parent_class = NULL; + +#define _do_init { _mix_displayx11_type = g_define_type_id; } + +gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); +MixDisplay *mix_displayx11_dup(const MixDisplay * obj); +gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second); +static void mix_displayx11_finalize(MixDisplay * obj); + +G_DEFINE_TYPE_WITH_CODE (MixDisplayX11, mix_displayx11, + MIX_TYPE_DISPLAY, _do_init); + +static void mix_displayx11_init(MixDisplayX11 * self) { + + /* Initialize member varibles */ + self->display = NULL; + self->drawable = 0; +} + +static void mix_displayx11_class_init(MixDisplayX11Class * klass) { + MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); + + mixdisplay_class->finalize = mix_displayx11_finalize; + mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayx11_copy; + mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayx11_dup; + mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayx11_equal; +} + +MixDisplayX11 * +mix_displayx11_new(void) { + MixDisplayX11 *ret = (MixDisplayX11 *) g_type_create_instance( + MIX_TYPE_DISPLAYX11); + + return ret; +} + +void mix_displayx11_finalize(MixDisplay * obj) { + /* clean up here. */ + /* MixDisplayX11 *self = MIX_DISPLAYX11 (obj); */ + + /* NOTE: we don't need to do anything + * with display and drawable */ + + /* Chain up parent */ + if (parent_class->finalize) + parent_class->finalize(obj); +} + +MixDisplayX11 * +mix_displayx11_ref(MixDisplayX11 * mix) { + return (MixDisplayX11 *) mix_display_ref(MIX_DISPLAY(mix)); +} + +/** + * mix_mixdisplayx11_dup: + * @obj: a #MixDisplayX11 object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixDisplay * +mix_displayx11_dup(const MixDisplay * obj) { + MixDisplay *ret = NULL; + + if (MIX_IS_DISPLAYX11(obj)) { + MixDisplayX11 *duplicate = mix_displayx11_new(); + if (mix_displayx11_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { + ret = MIX_DISPLAY(duplicate); + } else { + mix_displayx11_unref(duplicate); + } + } + return ret; +} + +/** + * mix_mixdisplayx11_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) { + MixDisplayX11 *this_target, *this_src; + + if (MIX_IS_DISPLAYX11(target) && MIX_IS_DISPLAYX11(src)) { + // Cast the base object to this child object + this_target = MIX_DISPLAYX11(target); + this_src = MIX_DISPLAYX11(src); + + // Copy properties from source to target. + + this_target->display = this_src->display; + this_target->drawable = this_src->drawable; + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_DISPLAY_CAST(target), + MIX_DISPLAY_CAST(src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_mixdisplayx11_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second) { + gboolean ret = FALSE; + + MixDisplayX11 *this_first, *this_second; + + this_first = MIX_DISPLAYX11(first); + this_second = MIX_DISPLAYX11(second); + + if (MIX_IS_DISPLAYX11(first) && MIX_IS_DISPLAYX11(second)) { + // Compare member variables + + // TODO: if in the copy method we just copy the pointer of display, the comparison + // below is enough. But we need to decide how to copy! + + if (this_first->display == this_second->display && this_first->drawable + == this_second->drawable) { + // members within this scope equal. chaining up. + MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + return ret; +} + +#define MIX_DISPLAYX11_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_DISPLAYX11_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ + +MIX_RESULT mix_displayx11_set_display(MixDisplayX11 * obj, Display * display) { + MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); + + // TODO: needs to decide to clone or just copy pointer + obj->display = display; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayx11_get_display(MixDisplayX11 * obj, Display ** display) { + MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, display); + + // TODO: needs to decide to clone or just copy pointer + *display = obj->display; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayx11_set_drawable(MixDisplayX11 * obj, Drawable drawable) { + MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); + + // TODO: needs to decide to clone or just copy pointer + obj->drawable = drawable; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayx11_get_drawable(MixDisplayX11 * obj, Drawable * drawable) { + MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, drawable); + + // TODO: needs to decide to clone or just copy pointer + *drawable = obj->drawable; + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h new file mode 100644 index 0000000..4a14c9f --- /dev/null +++ b/mix_video/src/mixdisplayx11.h @@ -0,0 +1,141 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_DISPLAYX11_H__ +#define __MIX_DISPLAYX11_H__ + +#include "mixdisplay.h" +#include "mixvideodef.h" +#include + +/** +* MIX_TYPE_DISPLAYX11: +* +* Get type of class. +*/ +#define MIX_TYPE_DISPLAYX11 (mix_displayx11_get_type ()) + +/** +* MIX_DISPLAYX11: +* @obj: object to be type-casted. +*/ +#define MIX_DISPLAYX11(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAYX11, MixDisplayX11)) + +/** +* MIX_IS_DISPLAYX11: +* @obj: an object. +* +* Checks if the given object is an instance of #MixDisplay +*/ +#define MIX_IS_DISPLAYX11(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAYX11)) + +/** +* MIX_DISPLAYX11_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_DISPLAYX11_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAYX11, MixDisplayX11Class)) + +/** +* MIX_IS_DISPLAYX11_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixDisplayClass +*/ +#define MIX_IS_DISPLAYX11_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAYX11)) + +/** +* MIX_DISPLAYX11_GET_CLASS: +* @obj: a #MixDisplay object. +* +* Get the class instance of the object. +*/ +#define MIX_DISPLAYX11_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAYX11, MixDisplayX11Class)) + +typedef struct _MixDisplayX11 MixDisplayX11; +typedef struct _MixDisplayX11Class MixDisplayX11Class; + +/** +* MixDisplayX11: +* +* MI-X VideoInit Parameter object +*/ +struct _MixDisplayX11 +{ + /*< public > */ + MixDisplay parent; + + /*< public > */ + + Display *display; + Drawable drawable; +}; + +/** +* MixDisplayX11Class: +* +* MI-X VideoInit object class +*/ +struct _MixDisplayX11Class +{ + /*< public > */ + MixDisplayClass parent_class; + + /* class members */ +}; + +/** +* mix_displayx11_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_displayx11_get_type (void); + +/** +* mix_displayx11_new: +* @returns: A newly allocated instance of #MixDisplayX11 +* +* Use this method to create new instance of #MixDisplayX11 +*/ +MixDisplayX11 *mix_displayx11_new (void); +/** +* mix_displayx11_ref: +* @mix: object to add reference +* @returns: the MixDisplayX11 instance where reference count has been increased. +* +* Add reference count. +*/ +MixDisplayX11 *mix_displayx11_ref (MixDisplayX11 * mix); + +/** +* mix_displayx11_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_displayx11_unref(obj) mix_display_unref(MIX_DISPLAY(obj)) + +/* Class Methods */ + +/* +TO DO: Add documents +*/ + +MIX_RESULT mix_displayx11_set_display (MixDisplayX11 * obj, + Display * display); + +MIX_RESULT mix_displayx11_get_display (MixDisplayX11 * obj, + Display ** dislay); + +MIX_RESULT mix_displayx11_set_drawable (MixDisplayX11 * obj, + Drawable drawable); + +MIX_RESULT mix_displayx11_get_drawable (MixDisplayX11 * obj, + Drawable * drawable); + +#endif /* __MIX_DISPLAYX11_H__ */ diff --git a/mix_video/src/mixdrmparams.c b/mix_video/src/mixdrmparams.c new file mode 100644 index 0000000..336393b --- /dev/null +++ b/mix_video/src/mixdrmparams.c @@ -0,0 +1,189 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixdrmparams +* @short_description: Drm parameters +* +* A data object which stores drm specific parameters. +*/ + +#include "mixdrmparams.h" + +static GType _mix_drmparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_drmparams_type = g_define_type_id; } + +gboolean mix_drmparams_copy (MixParams * target, const MixParams * src); +MixParams *mix_drmparams_dup (const MixParams * obj); +gboolean mix_drmparams_equal (MixParams * first, MixParams * second); +static void mix_drmparams_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixDrmParams, mix_drmparams, MIX_TYPE_PARAMS, + _do_init); + +static void +mix_drmparams_init (MixDrmParams * self) +{ + /* initialize properties here */ + + /* TODO: initialize properties */ +} + +static void +mix_drmparams_class_init (MixDrmParamsClass * klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS (klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_drmparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_drmparams_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_drmparams_dup; + mixparams_class->equal = (MixParamsEqualFunction) mix_drmparams_equal; +} + +MixDrmParams * +mix_drmparams_new (void) +{ + MixDrmParams *ret = + (MixDrmParams *) g_type_create_instance (MIX_TYPE_DRMPARAMS); + + return ret; +} + +void +mix_drmparams_finalize (MixParams * obj) +{ + /* clean up here. */ + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + if (parent_class->finalize) + { + parent_class->finalize (obj); + } +} + +MixDrmParams * +mix_drmparams_ref (MixDrmParams * mix) +{ + return (MixDrmParams *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_drmparams_dup: +* @obj: a #MixDrmParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_drmparams_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_DRMPARAMS (obj)) + { + MixDrmParams *duplicate = mix_drmparams_new (); + if (mix_drmparams_copy (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_drmparams_unref (duplicate); + } + } + return ret; +} + +/** +* mix_drmparams_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_drmparams_copy (MixParams * target, const MixParams * src) +{ + MixDrmParams *this_target, *this_src; + + if (MIX_IS_DRMPARAMS (target) && MIX_IS_DRMPARAMS (src)) + { + // Cast the base object to this child object + this_target = MIX_DRMPARAMS (target); + this_src = MIX_DRMPARAMS (src); + + // TODO: copy properties */ + + // Now chainup base class + if (parent_class->copy) + { + return parent_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_drmparams_: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_drmparams_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixDrmParams *this_first, *this_second; + + if (MIX_IS_DRMPARAMS (first) && MIX_IS_DRMPARAMS (second)) + { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_DRMPARAMS (first); + this_second = MIX_DRMPARAMS (second); + + /* TODO: add comparison for properties */ + /* if ( first properties == sencod properties) */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + ret = parent_class->equal (first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_DRMPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DRMPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_DRMPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DRMPARAMS(obj)) return MIX_RESULT_FAIL; \ + + +/* TODO: Add getters and setters for properties. */ diff --git a/mix_video/src/mixdrmparams.h b/mix_video/src/mixdrmparams.h new file mode 100644 index 0000000..d5ffdbe --- /dev/null +++ b/mix_video/src/mixdrmparams.h @@ -0,0 +1,126 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_DRMPARAMS_H__ +#define __MIX_DRMPARAMS_H__ + +#include +#include "mixvideodef.h" + +/** +* MIX_TYPE_DRMPARAMS: +* +* Get type of class. +*/ +#define MIX_TYPE_DRMPARAMS (mix_drmparams_get_type ()) + +/** +* MIX_DRMPARAMS: +* @obj: object to be type-casted. +*/ +#define MIX_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DRMPARAMS, MixDrmParams)) + +/** +* MIX_IS_DRMPARAMS: +* @obj: an object. +* +* Checks if the given object is an instance of #MixParams +*/ +#define MIX_IS_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DRMPARAMS)) + +/** +* MIX_DRMPARAMS_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) + +/** +* MIX_IS_DRMPARAMS_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixParamsClass +*/ +#define MIX_IS_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DRMPARAMS)) + +/** +* MIX_DRMPARAMS_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_DRMPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) + +typedef struct _MixDrmParams MixDrmParams; +typedef struct _MixDrmParamsClass MixDrmParamsClass; + +/** +* MixDrmParams: +* +* MI-X Drm Parameter object +*/ +struct _MixDrmParams +{ + /*< public > */ + MixParams parent; + + /*< public > */ + + /* TODO: Add properties */ + +}; + +/** +* MixDrmParamsClass: +* +* MI-X Drm object class +*/ +struct _MixDrmParamsClass +{ + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** +* mix_drmparams_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_drmparams_get_type (void); + +/** +* mix_drmparams_new: +* @returns: A newly allocated instance of #MixDrmParams +* +* Use this method to create new instance of #MixDrmParams +*/ +MixDrmParams *mix_drmparams_new (void); +/** +* mix_drmparams_ref: +* @mix: object to add reference +* @returns: the MixDrmParams instance where reference count has been increased. +* +* Add reference count. +*/ +MixDrmParams *mix_drmparams_ref (MixDrmParams * mix); + +/** +* mix_drmparams_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for properties */ + +#endif /* __MIX_DRMPARAMS_H__ */ diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c new file mode 100644 index 0000000..4cb24e8 --- /dev/null +++ b/mix_video/src/mixframemanager.c @@ -0,0 +1,775 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include + +#include "mixvideolog.h" +#include "mixframemanager.h" +#include "mixvideoframe_private.h" + +#define INITIAL_FRAME_ARRAY_SIZE 16 +#define MIX_SECOND (G_USEC_PER_SEC * G_GINT64_CONSTANT (1000)) + +static GObjectClass *parent_class = NULL; + +static void mix_framemanager_finalize(GObject * obj); +G_DEFINE_TYPE( MixFrameManager, mix_framemanager, G_TYPE_OBJECT); + +static void mix_framemanager_init(MixFrameManager * self) { + /* TODO: public member initialization */ + + /* TODO: private member initialization */ + + if (!g_thread_supported()) { + g_thread_init(NULL); + } + + self->lock = g_mutex_new(); + + self->flushing = FALSE; + self->eos = FALSE; + self->frame_array = NULL; + self->frame_queue = NULL; + self->initialized = FALSE; + + self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + self->framerate_numerator = 30; + self->framerate_denominator = 1; + + self->is_first_frame = TRUE; + + /* for vc1 in asf */ + self->p_frame = NULL; + self->prev_timestamp = 0; +} + +static void mix_framemanager_class_init(MixFrameManagerClass * klass) { + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + gobject_class->finalize = mix_framemanager_finalize; +} + +MixFrameManager *mix_framemanager_new(void) { + MixFrameManager *ret = g_object_new(MIX_TYPE_FRAMEMANAGER, NULL); + + return ret; +} + +void mix_framemanager_finalize(GObject * obj) { + /* clean up here. */ + + MixFrameManager *fm = MIX_FRAMEMANAGER(obj); + + /* cleanup here */ + mix_framemanager_deinitialize(fm); + + if (fm->lock) { + g_mutex_free(fm->lock); + fm->lock = NULL; + } + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { + return (MixFrameManager *) g_object_ref(G_OBJECT(fm)); +} + +/* MixFrameManager class methods */ + +MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, + MixFrameOrderMode mode, gint framerate_numerator, + gint framerate_denominator, gboolean timebased_ordering) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER + && mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator + <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } + + if (fm->initialized) { + return MIX_RESULT_ALREADY_INIT; + } + + if (!g_thread_supported()) { + g_thread_init(NULL); + } + + ret = MIX_RESULT_NO_MEMORY; + if (!fm->lock) { + fm->lock = g_mutex_new(); + if (!fm->lock) { + goto cleanup; + } + } + + if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { + fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE); + if (!fm->frame_array) { + goto cleanup; + } + } + + fm->frame_queue = g_queue_new(); + if (!fm->frame_queue) { + goto cleanup; + } + + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; + + fm->mode = mode; + + fm->timebased_ordering = timebased_ordering; + + fm->initialized = TRUE; + + ret = MIX_RESULT_SUCCESS; + + cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + if (fm->frame_array) { + g_ptr_array_free(fm->frame_array, TRUE); + fm->frame_array = NULL; + } + if (fm->frame_queue) { + g_queue_free(fm->frame_queue); + fm->frame_queue = NULL; + } + } + return ret; +} +MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->lock) { + return MIX_RESULT_FAIL; + } + + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + mix_framemanager_flush(fm); + + g_mutex_lock(fm->lock); + + if (fm->frame_array) { + g_ptr_array_free(fm->frame_array, TRUE); + fm->frame_array = NULL; + } + if (fm->frame_queue) { + g_queue_free(fm->frame_queue); + fm->frame_queue = NULL; + } + + fm->initialized = FALSE; + + g_mutex_unlock(fm->lock); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, + gint framerate_numerator, gint framerate_denominator) { + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->lock) { + return MIX_RESULT_FAIL; + } + + if (framerate_numerator <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } + + g_mutex_lock(fm->lock); + + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; + + g_mutex_unlock(fm->lock); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, + gint *framerate_numerator, gint *framerate_denominator) { + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->lock) { + return MIX_RESULT_FAIL; + } + + if (!framerate_numerator || !framerate_denominator) { + return MIX_RESULT_INVALID_PARAM; + } + + g_mutex_lock(fm->lock); + + *framerate_numerator = fm->framerate_numerator; + *framerate_denominator = fm->framerate_denominator; + + g_mutex_unlock(fm->lock); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm, + MixFrameOrderMode *mode) { + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->lock) { + return MIX_RESULT_FAIL; + } + + if (!mode) { + return MIX_RESULT_INVALID_PARAM; + } + + /* no need to use lock */ + *mode = fm->mode; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + g_mutex_lock(fm->lock); + + /* flush frame_array */ + if (fm->frame_array) { + guint len = fm->frame_array->len; + if (len) { + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array, + idx); + if (frame) { + mix_videoframe_unref(frame); + g_ptr_array_index(fm->frame_array, idx) = NULL; + } + } + /* g_ptr_array_remove_range(fm->frame_array, 0, len); */ + } + } + + if (fm->frame_queue) { + guint len = fm->frame_queue->length; + if (len) { + MixVideoFrame *frame = NULL; + while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) { + mix_videoframe_unref(frame); + } + } + } + + if(fm->p_frame) { + mix_videoframe_unref(fm->p_frame); + fm->p_frame = NULL; + } + fm->prev_timestamp = 0; + + fm->eos = FALSE; + + fm->is_first_frame = TRUE; + + g_mutex_unlock(fm->lock); + + return MIX_RESULT_SUCCESS; +} + +MixVideoFrame *get_expected_frame_from_array(GPtrArray *array, + guint64 expected, guint64 tolerance, guint64 *frametimestamp) { + + guint idx = 0; + guint len = 0; + guint64 timestamp = 0; + guint64 lowest_timestamp = (guint64)-1; + guint lowest_timestamp_idx = -1; + + MixVideoFrame *frame = NULL; + + if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) { + + return NULL; + } + + len = array->len; + if (!len) { + return NULL; + } + + for (idx = 0; idx < len; idx++) { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); + if (_frame) { + + if (mix_videoframe_get_timestamp(_frame, ×tamp) + != MIX_RESULT_SUCCESS) { + + /* + * Oops, this shall never happen! + * In case it heppens, release the frame! + */ + + mix_videoframe_unref(_frame); + + /* make an available slot */ + g_ptr_array_index(array, idx) = NULL; + + break; + } + + if (lowest_timestamp > timestamp) + { + lowest_timestamp = timestamp; + lowest_timestamp_idx = idx; + } + } + } + + if (lowest_timestamp == (guint64)-1) + { + return NULL; + } + + + /* check if this is the expected next frame */ + if (lowest_timestamp <= expected + tolerance) + { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx); + /* make this slot available */ + g_ptr_array_index(array, lowest_timestamp_idx) = NULL; + + *frametimestamp = lowest_timestamp; + frame = _frame; + } + + return frame; +} + +void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) { + + gboolean found_slot = FALSE; + guint len = 0; + + if (!array || !mvf) { + return; + } + + /* do we have slot for this frame? */ + len = array->len; + if (len) { + guint idx = 0; + gpointer frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = g_ptr_array_index(array, idx); + if (!frame) { + found_slot = TRUE; + g_ptr_array_index(array, idx) = (gpointer) mvf; + break; + } + } + } + + if (!found_slot) { + g_ptr_array_add(array, (gpointer) mvf); + } + +} + +MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm, + MixVideoFrame *mvf) { + /* + * display order mode. + * + * if this is the first frame, we always push it into + * output queue, if it is not, check if it is the one + * expected, if yes, push it into the output queue. + * if not, put it into waiting list. + * + * while the expected frame is pushed into output queue, + * the expected next timestamp is also updated. with this + * updated expected next timestamp, we search for expected + * frame from the waiting list, if found, repeat the process. + * + */ + + MIX_RESULT ret = MIX_RESULT_FAIL; + guint64 timestamp = 0; + + first_frame: + + ret = mix_videoframe_get_timestamp(mvf, ×tamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + if (fm->is_first_frame) { + + /* + * for the first frame, we can always put it into the output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + /* + * what timestamp of next frame shall be? + */ + fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; + + fm->is_first_frame = FALSE; + + } else { + + /* + * is this the next frame expected? + */ + + /* calculate tolerance */ + guint64 tolerance = fm->frame_timestamp_delta / 4; + MixVideoFrame *frame_from_array = NULL; + guint64 timestamp_frame_array = 0; + + /* + * timestamp may be associated with the second field, which + * will not fall between the tolerance range. + */ + + if (timestamp <= fm->next_frame_timestamp + tolerance) { + + /* + * ok, this is the frame expected, push it into output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; + } + + /* + * since we updated next_frame_timestamp, there might be a frame + * in the frame_array that satisfying this new next_frame_timestamp + */ + + while ((frame_from_array = get_expected_frame_from_array( + fm->frame_array, fm->next_frame_timestamp, tolerance, + ×tamp_frame_array))) { + + g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp_frame_array + + fm->frame_timestamp_delta; + } + } + + } else { + + /* + * is discontinuity flag set for this frame ? + */ + gboolean discontinuity = FALSE; + ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + /* + * If this is a frame with discontinuity flag set, clear frame_array + * and treat the frame as the first frame. + */ + if (discontinuity) { + + guint len = fm->frame_array->len; + if (len) { + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index( + fm->frame_array, idx); + if (frame) { + mix_videoframe_unref(frame); + g_ptr_array_index(fm->frame_array, idx) = NULL; + } + } + } + + fm->is_first_frame = TRUE; + goto first_frame; + } + + /* + * handle variable frame rate: + * display any frame which time stamp is less than current one. + * + */ + guint64 tolerance = fm->frame_timestamp_delta / 4; + MixVideoFrame *frame_from_array = NULL; + guint64 timestamp_frame_array = 0; + + while ((frame_from_array = get_expected_frame_from_array( + fm->frame_array, timestamp, tolerance, + ×tamp_frame_array))) + { + g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp_frame_array + + fm->frame_timestamp_delta; + } + } + /* + * this is not the expected frame, put it into frame_array + */ + + add_frame_into_array(fm->frame_array, mvf); + } + } + cleanup: + + return ret; +} + +MIX_RESULT mix_framemanager_frametype_based_enqueue(MixFrameManager *fm, + MixVideoFrame *mvf) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixFrameType frame_type; + guint64 timestamp = 0; + + ret = mix_videoframe_get_frame_type(mvf, &frame_type); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + ret = mix_videoframe_get_timestamp(mvf, ×tamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + +#ifdef MIX_LOG_ENABLE + if (frame_type == TYPE_I) { + LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp); + } else if (frame_type == TYPE_P) { + LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp); + } else if (frame_type == TYPE_B) { + LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp); + } else { + LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp); + } +#endif + + if (fm->is_first_frame) { + /* + * The first frame is not a I frame, unexpected! + */ + if (frame_type != TYPE_I) { + goto cleanup; + } + + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + fm->is_first_frame = FALSE; + } else { + + /* + * I P B B P B B ... + */ + if (frame_type == TYPE_I || frame_type == TYPE_P) { + + if (fm->p_frame) { + + ret = mix_videoframe_set_timestamp(fm->p_frame, + fm->prev_timestamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); + fm->p_frame = NULL; + } + + /* it is an I frame, push it into the out queue */ + /*if (frame_type == TYPE_I) { + + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + } else*/ + { + /* it is a P frame, we can not push it to the out queue yet, save it */ + fm->p_frame = mvf; + fm->prev_timestamp = timestamp; + } + + ret = MIX_RESULT_SUCCESS; + + } else { + /* it is a B frame, replace the timestamp with the previous one */ + if (timestamp > fm->prev_timestamp) { + ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + /* save the timestamp */ + fm->prev_timestamp = timestamp; + } + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + ret = MIX_RESULT_SUCCESS; + } + } + + cleanup: + + return ret; +} + +MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + + /*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/ + + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + /* + * This should never happen! + */ + if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode + != MIX_FRAMEORDER_MODE_DECODEORDER) { + return MIX_RESULT_FAIL; + } + + g_mutex_lock(fm->lock); + + ret = MIX_RESULT_SUCCESS; + if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) { + /* + * decode order mode, push the frame into output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + } else { + + if (fm->timebased_ordering) { + ret = mix_framemanager_timestamp_based_enqueue(fm, mvf); + } else { + ret = mix_framemanager_frametype_based_enqueue(fm, mvf); + } + } + + g_mutex_unlock(fm->lock); + + return ret; +} + +MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + g_mutex_lock(fm->lock); + + ret = MIX_RESULT_FRAME_NOTAVAIL; + *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); + if (*mvf) { + ret = MIX_RESULT_SUCCESS; + } else if (fm->eos) { + ret = MIX_RESULT_EOS; + } + + g_mutex_unlock(fm->lock); + + return ret; +} + +MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + g_mutex_lock(fm->lock); + + fm->eos = TRUE; + + g_mutex_unlock(fm->lock); + + return ret; +} + diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h new file mode 100644 index 0000000..5dc663a --- /dev/null +++ b/mix_video/src/mixframemanager.h @@ -0,0 +1,164 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_FRAMEMANAGER_H__ +#define __MIX_FRAMEMANAGER_H__ + +#include +#include "mixvideodef.h" +#include "mixvideoframe.h" + +/* + * Type macros. + */ +#define MIX_TYPE_FRAMEMANAGER (mix_framemanager_get_type ()) +#define MIX_FRAMEMANAGER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_FRAMEMANAGER, MixFrameManager)) +#define MIX_IS_FRAMEMANAGER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_FRAMEMANAGER)) +#define MIX_FRAMEMANAGER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_FRAMEMANAGER, MixFrameManagerClass)) +#define MIX_IS_FRAMEMANAGER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_FRAMEMANAGER)) +#define MIX_FRAMEMANAGER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_FRAMEMANAGER, MixFrameManagerClass)) + +typedef struct _MixFrameManager MixFrameManager; +typedef struct _MixFrameManagerClass MixFrameManagerClass; + +struct _MixFrameManager { + /*< public > */ + GObject parent; + + /*< public > */ + + /*< private > */ + gboolean initialized; + gboolean flushing; + gboolean eos; + + GMutex *lock; + GPtrArray *frame_array; + GQueue *frame_queue; + + gint framerate_numerator; + gint framerate_denominator; + guint64 frame_timestamp_delta; + + MixFrameOrderMode mode; + + gboolean is_first_frame; + guint64 next_frame_timestamp; + + /* + * For VC-1 in ASF. + */ + + MixVideoFrame *p_frame; + guint64 prev_timestamp; + + gboolean timebased_ordering; +}; + +/** + * MixFrameManagerClass: + * + * MI-X Video object class + */ +struct _MixFrameManagerClass { + /*< public > */ + GObjectClass parent_class; + +/* class members */ + +/*< public > */ +}; + +/** + * mix_framemanager_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_framemanager_get_type(void); + +/** + * mix_framemanager_new: + * @returns: A newly allocated instance of #MixFrameManager + * + * Use this method to create new instance of #MixFrameManager + */ +MixFrameManager *mix_framemanager_new(void); + +/** + * mix_framemanager_ref: + * @mix: object to add reference + * @returns: the MixFrameManager instance where reference count has been increased. + * + * Add reference count. + */ +MixFrameManager *mix_framemanager_ref(MixFrameManager * mix); + +/** + * mix_framemanager_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_framemanager_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* + * Initialize FM + */ +MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, + MixFrameOrderMode mode, gint framerate_numerator, + gint framerate_denominator, gboolean timebased_ordering); +/* + * Deinitialize FM + */ +MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm); + +/* + * Set new framerate + */ +MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, + gint framerate_numerator, gint framerate_denominator); + +/* + * Get framerate + */ +MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, + gint *framerate_numerator, gint *framerate_denominator); + + +/* + * Get Frame Order Mode + */ +MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm, + MixFrameOrderMode *mode); + +/* + * For discontiunity, reset FM + */ +MIX_RESULT mix_framemanager_flush(MixFrameManager *fm); + +/* + * Enqueue MixVideoFrame + */ +MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf); + +/* + * Dequeue MixVideoFrame in proper order depends on MixFrameOrderMode value + * during initialization. + */ +MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf); + +/* + * End of stream. + */ +MIX_RESULT mix_framemanager_eos(MixFrameManager *fm); + + +#endif /* __MIX_FRAMEMANAGER_H__ */ diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c new file mode 100644 index 0000000..f7672c8 --- /dev/null +++ b/mix_video/src/mixsurfacepool.c @@ -0,0 +1,652 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixsurfacepool + * @short_description: MI-X Video Surface Pool + * + * A data object which stores and manipulates a pool of video surfaces. + */ + +#include "mixvideolog.h" +#include "mixsurfacepool.h" +#include "mixvideoframe_private.h" + +#define MIX_LOCK(lock) g_mutex_lock(lock); +#define MIX_UNLOCK(lock) g_mutex_unlock(lock); + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_surfacepool_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_surfacepool_type = g_define_type_id; } + +gboolean mix_surfacepool_copy(MixParams * target, const MixParams * src); +MixParams *mix_surfacepool_dup(const MixParams * obj); +gboolean mix_surfacepool_equal(MixParams * first, MixParams * second); +static void mix_surfacepool_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixSurfacePool, mix_surfacepool, MIX_TYPE_PARAMS, + _do_init); + +static void mix_surfacepool_init(MixSurfacePool * self) { + /* initialize properties here */ + self->free_list = NULL; + self->in_use_list = NULL; + self->free_list_max_size = 0; + self->free_list_cur_size = 0; + self->high_water_mark = 0; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; + + // TODO: relocate this mutex allocation -we can't communicate failure in ctor. + // Note that g_thread_init() has already been called by mix_video_init() + self->objectlock = g_mutex_new(); + +} + +static void mix_surfacepool_class_init(MixSurfacePoolClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_surfacepool_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_surfacepool_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_surfacepool_dup; + mixparams_class->equal = (MixParamsEqualFunction) mix_surfacepool_equal; +} + +MixSurfacePool * +mix_surfacepool_new(void) { + MixSurfacePool *ret = (MixSurfacePool *) g_type_create_instance( + MIX_TYPE_SURFACEPOOL); + return ret; +} + +void mix_surfacepool_finalize(MixParams * obj) { + /* clean up here. */ + + MixSurfacePool *self = MIX_SURFACEPOOL(obj); + + if (self->objectlock) { + g_mutex_free(self->objectlock); + self->objectlock = NULL; + } + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixSurfacePool * +mix_surfacepool_ref(MixSurfacePool * mix) { + return (MixSurfacePool *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_surfacepool_dup: + * @obj: a #MixSurfacePool object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_surfacepool_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_SURFACEPOOL(obj)) { + + MIX_LOCK(MIX_SURFACEPOOL(obj)->objectlock); + + MixSurfacePool *duplicate = mix_surfacepool_new(); + if (mix_surfacepool_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_surfacepool_unref(duplicate); + } + + MIX_UNLOCK(MIX_SURFACEPOOL(obj)->objectlock); + + } + return ret; +} + +/** + * mix_surfacepool_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_surfacepool_copy(MixParams * target, const MixParams * src) { + MixSurfacePool *this_target, *this_src; + + if (MIX_IS_SURFACEPOOL(target) && MIX_IS_SURFACEPOOL(src)) { + + MIX_LOCK(MIX_SURFACEPOOL(src)->objectlock); + MIX_LOCK(MIX_SURFACEPOOL(target)->objectlock); + + // Cast the base object to this child object + this_target = MIX_SURFACEPOOL(target); + this_src = MIX_SURFACEPOOL(src); + + // Free the existing properties + + // Duplicate string + this_target->free_list = this_src->free_list; + this_target->in_use_list = this_src->in_use_list; + this_target->free_list_max_size = this_src->free_list_max_size; + this_target->free_list_cur_size = this_src->free_list_cur_size; + this_target->high_water_mark = this_src->high_water_mark; + + MIX_UNLOCK(MIX_SURFACEPOOL(src)->objectlock); + MIX_UNLOCK(MIX_SURFACEPOOL(target)->objectlock); + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_surfacepool_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_surfacepool_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixSurfacePool *this_first, *this_second; + + if (MIX_IS_SURFACEPOOL(first) && MIX_IS_SURFACEPOOL(second)) { + // Deep compare + // Cast the base object to this child object + + MIX_LOCK(MIX_SURFACEPOOL(first)->objectlock); + MIX_LOCK(MIX_SURFACEPOOL(second)->objectlock); + + this_first = MIX_SURFACEPOOL(first); + this_second = MIX_SURFACEPOOL(second); + + /* TODO: add comparison for other properties */ + if (this_first->free_list == this_second->free_list + && this_first->in_use_list == this_second->in_use_list + && this_first->free_list_max_size + == this_second->free_list_max_size + && this_first->free_list_cur_size + == this_second->free_list_cur_size + && this_first->high_water_mark == this_second->high_water_mark) { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = klass->equal(first, second); + else + ret = TRUE; + } + + MIX_LOCK(MIX_SURFACEPOOL(first)->objectlock); + MIX_LOCK(MIX_SURFACEPOOL(second)->objectlock); + + } + + return ret; +} + +/* Class Methods */ + +/** + * mix_surfacepool_initialize: + * @returns: MIX_RESULT_SUCCESS if successful in creating the surface pool + * + * Use this method to create a new surface pool, consisting of a GSList of + * frame objects that represents a pool of surfaces. + */ +MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, + VASurfaceID *surfaces, guint num_surfaces) { + + LOG_V( "Begin\n"); + + if (obj == NULL || surfaces == NULL) { + + LOG_E( + "Error NULL ptrs, obj %x, surfaces %x\n", (guint) obj, + (guint) surfaces); + + return MIX_RESULT_NULL_PTR; + } + + MIX_LOCK(obj->objectlock); + + if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { + //surface pool is in use; return error; need proper cleanup + //TODO need cleanup here? + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_ALREADY_INIT; + } + + if (num_surfaces == 0) { + obj->free_list = NULL; + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_surfaces; + + obj->free_list_cur_size = num_surfaces; + + obj->high_water_mark = 0; + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_SUCCESS; + } + + // Initialize the free pool with frame objects + + gint i = 0; + MixVideoFrame *frame = NULL; + + for (; i < num_surfaces; i++) { + + //Create a frame object for each surface ID + frame = mix_videoframe_new(); + + if (frame == NULL) { + //TODO need to log an error here and do cleanup + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_NO_MEMORY; + } + + // Set the frame ID to the surface ID + mix_videoframe_set_frame_id(frame, surfaces[i]); + // Set the ci frame index to the surface ID + mix_videoframe_set_ci_frame_idx (frame, i); + // Leave timestamp for each frame object as zero + // Set the pool reference in the private data of the frame object + mix_videoframe_set_pool(frame, obj); + + //Add each frame object to the pool list + obj->free_list = g_slist_append(obj->free_list, frame); + + } + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_surfaces; + + obj->free_list_cur_size = num_surfaces; + + obj->high_water_mark = 0; + + MIX_UNLOCK(obj->objectlock); + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +/** + * mix_surfacepool_put: + * @returns: SUCCESS or FAILURE + * + * Use this method to return a surface to the free pool + */ +MIX_RESULT mix_surfacepool_put(MixSurfacePool * obj, MixVideoFrame * frame) { + + LOG_V( "Begin\n"); + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Frame id: %d\n", frame->frame_id); + MIX_LOCK(obj->objectlock); + + if (obj->in_use_list == NULL) { + //in use list cannot be empty if a frame is in use + //TODO need better error code for this + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_FAIL; + } + + GSList *element = g_slist_find(obj->in_use_list, frame); + if (element == NULL) { + //Integrity error; frame not found in in use list + //TODO need better error code and handling for this + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_FAIL; + } else { + //Remove this element from the in_use_list + obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); + + //Concat the element to the free_list and reset the timestamp of the frame + //Note that the surface ID stays valid + mix_videoframe_set_timestamp(frame, 0); + obj->free_list = g_slist_concat(obj->free_list, element); + + //increment the free list count + obj->free_list_cur_size++; + } + + //Note that we do nothing with the ref count for this. We want it to + //stay at 1, which is what triggered it to be added back to the free list. + + MIX_UNLOCK(obj->objectlock); + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + +/** + * mix_surfacepool_get: + * @returns: SUCCESS or FAILURE + * + * Use this method to get a surface from the free pool + */ +MIX_RESULT mix_surfacepool_get(MixSurfacePool * obj, MixVideoFrame ** frame) { + + LOG_V( "Begin\n"); + + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + +#if 0 + if (obj->free_list == NULL) { +#else + if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug +#endif + //We are out of surfaces + //TODO need to log this as well + + MIX_UNLOCK(obj->objectlock); + + LOG_E( "out of surfaces\n"); + + return MIX_RESULT_NO_MEMORY; + } + + //Remove a frame from the free pool + + //We just remove the one at the head, since it's convenient + GSList *element = obj->free_list; + obj->free_list = g_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this + + MIX_UNLOCK(obj->objectlock); + + LOG_E( "Element is null\n"); + + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = g_slist_concat(obj->in_use_list, element); + + //TODO replace with proper logging + + LOG_I( "frame refcount%d\n", + MIX_PARAMS(element->data)->refcount); + + //Set the out frame pointer + *frame = (MixVideoFrame *) element->data; + + LOG_V( "Frame id: %d\n", (*frame)->frame_id); + + //decrement the free list count + obj->free_list_cur_size--; + + //Check the high water mark for surface use + guint size = g_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } + + //Increment the reference count for the frame + mix_videoframe_ref(*frame); + + MIX_UNLOCK(obj->objectlock); + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + + +gint mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b) +{ + if (a == NULL || b == NULL) + return -1; + if (a->ci_frame_idx == b->ci_frame_idx) + return 0; + else + return -1; +} + +/** + * mix_surfacepool_get: + * @returns: SUCCESS or FAILURE + * + * Use this method to get a surface from the free pool according to the CI frame idx + */ + +MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, MixVideoFrame ** frame, MixVideoFrame *in_frame) { + + LOG_V( "Begin\n"); + + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + + if (obj->free_list == NULL) { + //We are out of surfaces + //TODO need to log this as well + + MIX_UNLOCK(obj->objectlock); + + LOG_E( "out of surfaces\n"); + + return MIX_RESULT_NO_MEMORY; + } + + //Remove a frame from the free pool + + //We just remove the one at the head, since it's convenient + GSList *element = g_slist_find_custom (obj->free_list, in_frame, (GCompareFunc) mixframe_compare_index); + obj->free_list = g_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this + + MIX_UNLOCK(obj->objectlock); + + LOG_E( "Element is null\n"); + + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = g_slist_concat(obj->in_use_list, element); + + //TODO replace with proper logging + + LOG_I( "frame refcount%d\n", + MIX_PARAMS(element->data)->refcount); + + //Set the out frame pointer + *frame = (MixVideoFrame *) element->data; + + //Check the high water mark for surface use + guint size = g_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } + + //Increment the reference count for the frame + mix_videoframe_ref(*frame); + + MIX_UNLOCK(obj->objectlock); + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} +/** + * mix_surfacepool_check_available: + * @returns: SUCCESS or FAILURE + * + * Use this method to check availability of getting a surface from the free pool + */ +MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) { + + LOG_V( "Begin\n"); + + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + +#if 0 + if (obj->free_list == NULL) { +#else + if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug +#endif + //We are out of surfaces + + MIX_UNLOCK(obj->objectlock); + + LOG_W( + "Returning MIX_RESULT_POOLEMPTY because out of surfaces\n"); + + return MIX_RESULT_POOLEMPTY; + } else { + //Pool is not empty + + MIX_UNLOCK(obj->objectlock); + + LOG_I( + "Returning MIX_RESULT_SUCCESS because surfaces are available\n"); + + return MIX_RESULT_SUCCESS; + } + +} + +/** + * mix_surfacepool_deinitialize: + * @returns: SUCCESS or FAILURE + * + * Use this method to teardown a surface pool + */ +MIX_RESULT mix_surfacepool_deinitialize(MixSurfacePool * obj) { + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + MIX_LOCK(obj->objectlock); + + if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) + != obj->free_list_max_size)) { + //TODO better error code + //We have outstanding frame objects in use and they need to be + //freed before we can deinitialize. + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_FAIL; + } + + //Now remove frame objects from the list + + MixVideoFrame *frame = NULL; + + while (obj->free_list != NULL) { + //Get the frame object from the head of the list + frame = obj->free_list->data; + //frame = g_slist_nth_data(obj->free_list, 0); + + //Release it + mix_videoframe_unref(frame); + + //Delete the head node of the list and store the new head + obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); + + //Repeat until empty + } + + obj->free_list_max_size = 0; + obj->free_list_cur_size = 0; + + //May want to log this information for tuning + obj->high_water_mark = 0; + + MIX_UNLOCK(obj->objectlock); + + return MIX_RESULT_SUCCESS; +} + +#define MIX_SURFACEPOOL_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_SURFACEPOOL_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT +mix_surfacepool_dumpframe(MixVideoFrame *frame) +{ + LOG_I( "\tFrame %x, id %lu, refcount %d, ts %lu\n", (guint)frame, + frame->frame_id, MIX_PARAMS(frame)->refcount, (gulong) frame->timestamp); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT +mix_surfacepool_dumpprint (MixSurfacePool * obj) +{ + //TODO replace this with proper logging later + + LOG_I( "SURFACE POOL DUMP:\n"); + LOG_I( "Free list size is %d\n", obj->free_list_cur_size); + LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); + LOG_I( "High water mark is %lu\n", obj->high_water_mark); + + //Walk the free list and report the contents + LOG_I( "Free list contents:\n"); + g_slist_foreach(obj->free_list, (GFunc) mix_surfacepool_dumpframe, NULL); + + //Walk the in_use list and report the contents + LOG_I( "In Use list contents:\n"); + g_slist_foreach(obj->in_use_list, (GFunc) mix_surfacepool_dumpframe, NULL); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h new file mode 100644 index 0000000..6468ebe --- /dev/null +++ b/mix_video/src/mixsurfacepool.h @@ -0,0 +1,158 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_SURFACEPOOL_H__ +#define __MIX_SURFACEPOOL_H__ + +#include +#include "mixvideodef.h" +#include "mixvideoframe.h" + +#include + +G_BEGIN_DECLS + +/** +* MIX_TYPE_SURFACEPOOL: +* +* Get type of class. +*/ +#define MIX_TYPE_SURFACEPOOL (mix_surfacepool_get_type ()) + +/** +* MIX_SURFACEPOOL: +* @obj: object to be type-casted. +*/ +#define MIX_SURFACEPOOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_SURFACEPOOL, MixSurfacePool)) + +/** +* MIX_IS_SURFACEPOOL: +* @obj: an object. +* +* Checks if the given object is an instance of #MixSurfacePool +*/ +#define MIX_IS_SURFACEPOOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_SURFACEPOOL)) + +/** +* MIX_SURFACEPOOL_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_SURFACEPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_SURFACEPOOL, MixSurfacePoolClass)) + +/** +* MIX_IS_SURFACEPOOL_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixSurfacePoolClass +*/ +#define MIX_IS_SURFACEPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_SURFACEPOOL)) + +/** +* MIX_SURFACEPOOL_GET_CLASS: +* @obj: a #MixSurfacePool object. +* +* Get the class instance of the object. +*/ +#define MIX_SURFACEPOOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_SURFACEPOOL, MixSurfacePoolClass)) + +typedef struct _MixSurfacePool MixSurfacePool; +typedef struct _MixSurfacePoolClass MixSurfacePoolClass; + +/** +* MixSurfacePool: +* +* MI-X Video Surface Pool object +*/ +struct _MixSurfacePool +{ + /*< public > */ + MixParams parent; + + /*< public > */ + GSList *free_list; /* list of free surfaces */ + GSList *in_use_list; /* list of surfaces in use */ + gulong free_list_max_size; /* initial size of the free list */ + gulong free_list_cur_size; /* current size of the free list */ + gulong high_water_mark; /* most surfaces in use at one time */ +// guint64 timestamp; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; + + /*< private > */ + GMutex *objectlock; + +}; + +/** +* MixSurfacePoolClass: +* +* MI-X Video Surface Pool object class +*/ +struct _MixSurfacePoolClass +{ + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** +* mix_surfacepool_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_surfacepool_get_type (void); + +/** +* mix_surfacepool_new: +* @returns: A newly allocated instance of #MixSurfacePool +* +* Use this method to create new instance of #MixSurfacePool +*/ +MixSurfacePool *mix_surfacepool_new (void); +/** +* mix_surfacepool_ref: +* @mix: object to add reference +* @returns: the MixSurfacePool instance where reference count has been increased. +* +* Add reference count. +*/ +MixSurfacePool *mix_surfacepool_ref (MixSurfacePool * mix); + +/** +* mix_surfacepool_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_surfacepool_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +MIX_RESULT mix_surfacepool_initialize (MixSurfacePool * obj, + VASurfaceID *surfaces, guint num_surfaces); +MIX_RESULT mix_surfacepool_put (MixSurfacePool * obj, + MixVideoFrame * frame); + +MIX_RESULT mix_surfacepool_get (MixSurfacePool * obj, + MixVideoFrame ** frame); + +MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, + MixVideoFrame ** frame, MixVideoFrame *in_frame); + +MIX_RESULT mix_surfacepool_check_available (MixSurfacePool * obj); + +MIX_RESULT mix_surfacepool_deinitialize (MixSurfacePool * obj); + +G_END_DECLS + +#endif /* __MIX_SURFACEPOOL_H__ */ diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c new file mode 100644 index 0000000..e9cba0a --- /dev/null +++ b/mix_video/src/mixvideo.c @@ -0,0 +1,1638 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include /* libVA */ +#include +#include + +#include "mixvideolog.h" + +#include "mixdisplayx11.h" +#include "mixvideoframe.h" + +#include "mixframemanager.h" +#include "mixvideorenderparams.h" +#include "mixvideorenderparams_internal.h" + +#include "mixvideoformat.h" +#include "mixvideoformat_vc1.h" +#include "mixvideoformat_h264.h" +#include "mixvideoformat_mp42.h" + +#include "mixvideoconfigparamsdec_vc1.h" +#include "mixvideoconfigparamsdec_h264.h" +#include "mixvideoconfigparamsdec_mp42.h" + +#include "mixvideoformatenc.h" +#include "mixvideoformatenc_h264.h" +#include "mixvideoformatenc_mpeg4.h" +#include "mixvideoformatenc_preview.h" + +#include "mixvideoconfigparamsenc_h264.h" +#include "mixvideoconfigparamsenc_mpeg4.h" +#include "mixvideoconfigparamsenc_preview.h" + + +#include "mixvideo.h" +#include "mixvideo_private.h" + +#define USE_OPAQUE_POINTER + +#ifdef USE_OPAQUE_POINTER +#define MIX_VIDEO_PRIVATE(mix) (MixVideoPrivate *)(mix->context) +#else +#define MIX_VIDEO_PRIVATE(mix) MIX_VIDEO_GET_PRIVATE(mix) +#endif + +#define CHECK_INIT(mix, priv) \ + if (!mix) { \ + return MIX_RESULT_NULL_PTR; \ + } \ + if (!MIX_IS_VIDEO(mix)) { \ + LOG_E( "Not MixVideo\n"); \ + return MIX_RESULT_INVALID_PARAM; \ + } \ + priv = MIX_VIDEO_PRIVATE(mix); \ + if (!priv->initialized) { \ + LOG_E( "Not initialized\n"); \ + return MIX_RESULT_NOT_INIT; \ + } + +#define CHECK_INIT_CONFIG(mix, priv) \ + CHECK_INIT(mix, priv); \ + if (!priv->configured) { \ + LOG_E( "Not configured\n"); \ + return MIX_RESULT_NOT_CONFIGURED; \ + } + +/* + * default implementation of virtual methods + */ + +MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, + guint * minor); + +MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params); + +MIX_RESULT mix_video_deinitialize_default(MixVideo * mix); + +MIX_RESULT mix_video_configure_default(MixVideo * mix, + MixVideoConfigParams * config_params, MixDrmParams * drm_config_params); + +MIX_RESULT mix_video_get_config_default(MixVideo * mix, + MixVideoConfigParams ** config_params); + +MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); + +MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame); + +MIX_RESULT mix_video_release_frame_default(MixVideo * mix, + MixVideoFrame * frame); + +MIX_RESULT mix_video_render_default(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame); + +MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); + +MIX_RESULT mix_video_flush_default(MixVideo * mix); + +MIX_RESULT mix_video_eos_default(MixVideo * mix); + +MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state); + +MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf); + +MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf); + +MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size); + + +static void mix_video_finalize(GObject * obj); +MIX_RESULT mix_video_configure_decode(MixVideo * mix, + MixVideoConfigParamsDec * config_params_dec, + MixDrmParams * drm_config_params); + +MIX_RESULT mix_video_configure_encode(MixVideo * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixDrmParams * drm_config_params); + +G_DEFINE_TYPE( MixVideo, mix_video, G_TYPE_OBJECT); + +static void mix_video_init(MixVideo * self) { + + MixVideoPrivate *priv = MIX_VIDEO_GET_PRIVATE(self); + +#ifdef USE_OPAQUE_POINTER + self->context = priv; +#else + self->context = NULL; +#endif + + /* private structure initialization */ + + mix_video_private_initialize(priv); +} + +static void mix_video_class_init(MixVideoClass * klass) { + GObjectClass *gobject_class = (GObjectClass *) klass; + + gobject_class->finalize = mix_video_finalize; + + /* Register and allocate the space the private structure for this object */ + g_type_class_add_private(gobject_class, sizeof(MixVideoPrivate)); + + klass->get_version_func = mix_video_get_version_default; + klass->initialize_func = mix_video_initialize_default; + klass->deinitialize_func = mix_video_deinitialize_default; + klass->configure_func = mix_video_configure_default; + klass->get_config_func = mix_video_get_config_default; + klass->decode_func = mix_video_decode_default; + klass->get_frame_func = mix_video_get_frame_default; + klass->release_frame_func = mix_video_release_frame_default; + klass->render_func = mix_video_render_default; + klass->encode_func = mix_video_encode_default; + klass->flush_func = mix_video_flush_default; + klass->eos_func = mix_video_eos_default; + klass->get_state_func = mix_video_get_state_default; + klass->get_mix_buffer_func = mix_video_get_mixbuffer_default; + klass->release_mix_buffer_func = mix_video_release_mixbuffer_default; + klass->get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default; +} + +MixVideo *mix_video_new(void) { + + MixVideo *ret = g_object_new(MIX_TYPE_VIDEO, NULL); + + return ret; +} + +void mix_video_finalize(GObject * obj) { + + /* clean up here. */ + + MixVideo *mix = MIX_VIDEO(obj); + mix_video_deinitialize(mix); +} + +MixVideo * +mix_video_ref(MixVideo * mix) { + return (MixVideo *) g_object_ref(G_OBJECT(mix)); +} + +/* private methods */ + +#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } + +void mix_video_private_initialize(MixVideoPrivate* priv) { + priv->objlock = NULL; + priv->initialized = FALSE; + priv->configured = FALSE; + + /* libVA */ + priv->va_display = NULL; + priv->va_major_version = -1; + priv->va_major_version = -1; + + /* mix objects */ + priv->frame_manager = NULL; + priv->video_format = NULL; + priv->video_format_enc = NULL; //for encoding + priv->surface_pool = NULL; + priv->buffer_pool = NULL; + + priv->codec_mode = MIX_CODEC_MODE_DECODE; + priv->init_params = NULL; + priv->drm_params = NULL; + priv->config_params = NULL; +} + +void mix_video_private_cleanup(MixVideoPrivate* priv) { + + VAStatus va_status; + + if (!priv) { + return; + } + + if (priv->video_format_enc) { + mix_videofmtenc_deinitialize(priv->video_format_enc); + } + + MIXUNREF(priv->frame_manager, mix_framemanager_unref) + MIXUNREF(priv->video_format, mix_videoformat_unref) + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) + //for encoding + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref) + MIXUNREF(priv->surface_pool, mix_surfacepool_unref) +/* MIXUNREF(priv->init_params, mix_videoinitparams_unref) */ + MIXUNREF(priv->drm_params, mix_drmparams_unref) + MIXUNREF(priv->config_params, mix_videoconfigparams_unref) + + /* terminate libVA */ + if (priv->va_display) { + va_status = vaTerminate(priv->va_display); + LOG_V( "vaTerminate\n"); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaTerminate\n"); + } else { + priv->va_display = NULL; + } + } + + MIXUNREF(priv->init_params, mix_videoinitparams_unref) + + priv->va_major_version = -1; + priv->va_major_version = -1; + + if (priv->objlock) { + g_mutex_free(priv->objlock); + priv->objlock = NULL; + } + + priv->codec_mode = MIX_CODEC_MODE_DECODE; + priv->initialized = FALSE; + priv->configured = FALSE; +} + +/* The following methods are defined in MI-X API */ + +MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, + guint * minor) { + if (!mix || !major || !minor) { + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEO(mix)) { + return MIX_RESULT_INVALID_PARAM; + } + + *major = MIXVIDEO_CURRENT - MIXVIDEO_AGE; + *minor = MIXVIDEO_AGE; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixDisplay *mix_display = NULL; + + LOG_V( "Begin\n"); + + if (!mix || !init_params) { + LOG_E( "!mix || !init_params\n"); + return MIX_RESULT_NULL_PTR; + } + + if (mode >= MIX_CODEC_MODE_LAST) { + LOG_E("mode >= MIX_CODEC_MODE_LAST\n"); + return MIX_RESULT_INVALID_PARAM; + } + +#if 0 //we have encoding support + /* TODO: We need to support encoding in the future */ + if (mode == MIX_CODEC_MODE_ENCODE) { + LOG_E("mode == MIX_CODEC_MODE_ENCODE\n"); + return MIX_RESULT_NOTIMPL; + } +#endif + + if (!MIX_IS_VIDEO(mix)) { + LOG_E( "!MIX_IS_VIDEO(mix)\n"); + return MIX_RESULT_INVALID_PARAM; + } + + if (!MIX_IS_VIDEOINITPARAMS(init_params)) { + LOG_E("!MIX_IS_VIDEOINITPARAMS(init_params\n"); + return MIX_RESULT_INVALID_PARAM; + } + + priv = MIX_VIDEO_PRIVATE(mix); + + if (priv->initialized) { + LOG_W( "priv->initialized\n"); + return MIX_RESULT_ALREADY_INIT; + } + + /* + * Init thread before any threads/sync object are used. + * TODO: If thread is not supported, what we do? + */ + + if (!g_thread_supported()) { + LOG_W("!g_thread_supported()\n"); + g_thread_init(NULL); + } + + /* create object lock */ + priv->objlock = g_mutex_new(); + if (!priv->objlock) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "!priv->objlock\n"); + goto cleanup; + } + + /* clone mode */ + priv->codec_mode = mode; + + /* ref init_params */ + priv->init_params = (MixVideoInitParams *) mix_params_ref(MIX_PARAMS( + init_params)); + if (!priv->init_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "!priv->init_params\n"); + goto cleanup; + } + + /* NOTE: we don't do anything with drm_init_params */ + + /* libVA initialization */ + + { + VAStatus va_status; + Display *display = NULL; + ret = mix_videoinitparams_get_display(priv->init_params, &mix_display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 1\n"); + goto cleanup; + } + + if (MIX_IS_DISPLAYX11(mix_display)) { + MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); + ret = mix_displayx11_get_display(mix_displayx11, &display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 2\n"); + goto cleanup; + } + } else { + + /* TODO: add support to other MixDisplay type. For now, just return error!*/ + LOG_E("It is not display x11\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /* Now, we can initialize libVA */ + priv->va_display = vaGetDisplay(display); + + /* Oops! Fail to get VADisplay */ + if (!priv->va_display) { + ret = MIX_RESULT_FAIL; + LOG_E("Fail to get VADisplay\n"); + goto cleanup; + } + + /* Initialize libVA */ + va_status = vaInitialize(priv->va_display, &priv->va_major_version, + &priv->va_minor_version); + + /* Oops! Fail to initialize libVA */ + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Fail to initialize libVA\n"); + goto cleanup; + } + + /* TODO: check the version numbers of libVA */ + + priv->initialized = TRUE; + ret = MIX_RESULT_SUCCESS; + } + + cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + mix_video_private_cleanup(priv); + } + + MIXUNREF(mix_display, mix_display_unref); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_deinitialize_default(MixVideo * mix) { + + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + + mix_video_private_cleanup(priv); + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_video_configure_decode(MixVideo * mix, + MixVideoConfigParamsDec * config_params_dec, MixDrmParams * drm_config_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixVideoConfigParamsDec *priv_config_params_dec = NULL; + + gchar *mime_type = NULL; + guint fps_n, fps_d; + guint bufpoolsize = 0; + + MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + + if (!config_params_dec) { + LOG_E( "!config_params_dec\n"); + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOCONFIGPARAMSDEC(config_params_dec)) { + LOG_E("Not a MixVideoConfigParamsDec\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + /* + * MixVideo has already been configured, it should be + * re-configured. + * + * TODO: Allow MixVideo re-configuration + */ + if (priv->configured) { + ret = MIX_RESULT_SUCCESS; + LOG_W( "Already configured\n"); + goto cleanup; + } + + /* Make a copy of config_params */ + priv->config_params = (MixVideoConfigParams *) mix_params_dup(MIX_PARAMS( + config_params_dec)); + if (!priv->config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Fail to duplicate config_params\n"); + goto cleanup; + } + + priv_config_params_dec = (MixVideoConfigParamsDec *)priv->config_params; + + /* Get fps, frame order mode and mime type from config_params */ + ret = mix_videoconfigparamsdec_get_mime_type(priv_config_params_dec, &mime_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mime type\n"); + goto cleanup; + } + + LOG_I( "mime : %s\n", mime_type); + +#ifdef MIX_LOG_ENABLE + if (g_strcmp0(mime_type, "video/x-wmv") == 0) { + + LOG_I( "mime : video/x-wmv\n"); + if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { + LOG_I( "VC1 config_param\n"); + } else { + LOG_E("Not VC1 config_param\n"); + } + } +#endif + + ret = mix_videoconfigparamsdec_get_frame_order_mode(priv_config_params_dec, + &frame_order_mode); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to frame order mode\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_frame_rate(priv_config_params_dec, &fps_n, + &fps_d); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get frame rate\n"); + goto cleanup; + } + + if (!fps_n) { + ret = MIX_RESULT_FAIL; + LOG_E( "fps_n is 0\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_buffer_pool_size(priv_config_params_dec, + &bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get buffer pool size\n"); + goto cleanup; + } + + /* create frame manager */ + priv->frame_manager = mix_framemanager_new(); + if (!priv->frame_manager) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create frame manager\n"); + goto cleanup; + } + + /* initialize frame manager */ + + if (g_strcmp0(mime_type, "video/x-wmv") == 0 || g_strcmp0(mime_type, + "video/mpeg") == 0 || g_strcmp0(mime_type, "video/x-divx") == 0) { + ret = mix_framemanager_initialize(priv->frame_manager, + frame_order_mode, fps_n, fps_d, FALSE); + } else { + ret = mix_framemanager_initialize(priv->frame_manager, + frame_order_mode, fps_n, fps_d, TRUE); + } + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize frame manager\n"); + goto cleanup; + } + + /* create buffer pool */ + priv->buffer_pool = mix_bufferpool_new(); + if (!priv->buffer_pool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create buffer pool\n"); + goto cleanup; + } + + ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize buffer pool\n"); + goto cleanup; + } + + /* Finally, we can create MixVideoFormat */ + /* What type of MixVideoFormat we need create? */ + + if (g_strcmp0(mime_type, "video/x-wmv") == 0 + && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { + + MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create VC-1 video format\n"); + goto cleanup; + } + + /* TODO: work specific to VC-1 */ + + priv->video_format = MIX_VIDEOFORMAT(video_format); + + } else if (g_strcmp0(mime_type, "video/x-h264") == 0 + && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { + + MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create H.264 video format\n"); + goto cleanup; + } + + /* TODO: work specific to H.264 */ + + priv->video_format = MIX_VIDEOFORMAT(video_format); + + } else if (g_strcmp0(mime_type, "video/mpeg") == 0 || g_strcmp0(mime_type, + "video/x-divx") == 0) { + + guint version = 0; + + /* Is this mpeg4:2 ? */ + if (g_strcmp0(mime_type, "video/mpeg") == 0) { + + /* + * we don't support mpeg other than mpeg verion 4 + */ + if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + /* what is the mpeg version ? */ + ret = mix_videoconfigparamsdec_mp42_get_mpegversion( + MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mpeg version\n"); + goto cleanup; + } + + /* if it is not MPEG4 */ + if (version != 4) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + } else { + + /* config_param shall be MixVideoConfigParamsDecMP42 */ + if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + /* what is the divx version ? */ + ret = mix_videoconfigparamsdec_mp42_get_divxversion( + MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get divx version\n"); + goto cleanup; + } + + /* if it is not divx 4 or 5 */ + if (version != 4 && version != 5) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + } + + MixVideoFormat_MP42 *video_format = mix_videoformat_mp42_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create MPEG-4:2 video format\n"); + goto cleanup; + } + + /* TODO: work specific to MPEG-4:2 */ + priv->video_format = MIX_VIDEOFORMAT(video_format); + + } else { + + /* Oops! A format we don't know */ + + ret = MIX_RESULT_FAIL; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + /* initialize MixVideoFormat */ + ret = mix_videofmt_initialize(priv->video_format, priv_config_params_dec, + priv->frame_manager, priv->buffer_pool, &priv->surface_pool, + priv->va_display); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + /* decide MixVideoFormat from mime_type*/ + + priv->configured = TRUE; + ret = MIX_RESULT_SUCCESS; + + cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + MIXUNREF(priv->config_params, mix_videoconfigparams_unref); + MIXUNREF(priv->frame_manager, mix_framemanager_unref); + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); + MIXUNREF(priv->video_format, mix_videoformat_unref); + } + + if (mime_type) { + g_free(mime_type); + } + + g_mutex_unlock(priv->objlock); + /* ---------------------- end lock --------------------- */ + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_configure_encode(MixVideo * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixDrmParams * drm_config_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixVideoConfigParamsEnc *priv_config_params_enc = NULL; + + + gchar *mime_type = NULL; + MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; + guint bufpoolsize = 0; + + MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DECODEORDER; + + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + + if (!config_params_enc) { + LOG_E("!config_params_enc\n"); + return MIX_RESULT_NULL_PTR; + } + if (!MIX_IS_VIDEOCONFIGPARAMSENC(config_params_enc)) { + LOG_E("Not a MixVideoConfigParams\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + /* + * MixVideo has already been configured, it should be + * re-configured. + * + * TODO: Allow MixVideo re-configuration + */ + if (priv->configured) { + ret = MIX_RESULT_SUCCESS; + LOG_E( "Already configured\n"); + goto cleanup; + } + + /* Make a copy of config_params */ + priv->config_params = (MixVideoConfigParams *) mix_params_dup( + MIX_PARAMS(config_params_enc)); + if (!priv->config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Fail to duplicate config_params\n"); + goto cleanup; + } + + priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; + + /* Get fps, frame order mode and mime type from config_params */ + ret = mix_videoconfigparamsenc_get_mime_type(priv_config_params_enc, + &mime_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mime type\n"); + goto cleanup; + } + + LOG_I( "mime : %s\n", mime_type); + + ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, + &encode_format); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get target format\n"); + goto cleanup; + } + + LOG_I( "encode_format : %d\n", + encode_format); + + ret = mix_videoconfigparamsenc_get_buffer_pool_size( + priv_config_params_enc, &bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get buffer pool size\n"); + goto cleanup; + } + + /* create frame manager */ + priv->frame_manager = mix_framemanager_new(); + if (!priv->frame_manager) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create frame manager\n"); + goto cleanup; + } + + /* initialize frame manager */ + /* frame rate can be any value for encoding. */ + ret = mix_framemanager_initialize(priv->frame_manager, frame_order_mode, + 1, 1, FALSE); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize frame manager\n"); + goto cleanup; + } + + /* create buffer pool */ + priv->buffer_pool = mix_bufferpool_new(); + if (!priv->buffer_pool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create buffer pool\n"); + goto cleanup; + } + + ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize buffer pool\n"); + goto cleanup; + } + + /* Finally, we can create MixVideoFormatEnc */ + /* What type of MixVideoFormatEnc we need create? */ + + if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 + && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { + + MixVideoFormatEnc_H264 *video_format_enc = + mix_videoformatenc_h264_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); + goto cleanup; + } + + /* TODO: work specific to h264 encode */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { + + MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); + goto cleanup; + } + + /* TODO: work specific to mpeg4 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { + + MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); + goto cleanup; + } + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else { + + /*unsupported format */ + ret = MIX_RESULT_NOT_SUPPORTED; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + /* initialize MixVideoEncFormat */ + ret = mix_videofmtenc_initialize(priv->video_format_enc, + priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, + priv->va_display); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + priv->configured = TRUE; + ret = MIX_RESULT_SUCCESS; + + cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + MIXUNREF(priv->frame_manager, mix_framemanager_unref); + MIXUNREF(priv->config_params, mix_videoconfigparams_unref); + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref); + } + + if (mime_type) { + g_free(mime_type); + } + + g_mutex_unlock(priv->objlock); + /* ---------------------- end lock --------------------- */ + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_configure_default(MixVideo * mix, + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + if(!config_params) { + LOG_E("!config_params\n"); + return MIX_RESULT_NULL_PTR; + } + + /*Decoder mode or Encoder mode*/ + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && MIX_IS_VIDEOCONFIGPARAMSDEC(config_params)) { + ret = mix_video_configure_decode(mix, (MixVideoConfigParamsDec*)config_params, NULL); + } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && MIX_IS_VIDEOCONFIGPARAMSENC(config_params)) { + ret = mix_video_configure_encode(mix, (MixVideoConfigParamsEnc*)config_params, NULL); + } else { + LOG_E("Codec mode not supported\n"); + } + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_video_get_config_default(MixVideo * mix, + MixVideoConfigParams ** config_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + CHECK_INIT_CONFIG(mix, priv); + + if (!config_params) { + LOG_E( "!config_params\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + *config_params = MIX_VIDEOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(priv->config_params))); + if(!*config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to duplicate MixVideoConfigParams\n"); + goto cleanup; + } + + cleanup: + + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + + return ret; + +} + +MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + if(!bufin || !bufincnt || !decode_params) { + LOG_E( "!bufin || !bufincnt || !decode_params\n"); + return MIX_RESULT_NULL_PTR; + } + + //First check that we have surfaces available for decode + ret = mix_surfacepool_check_available(priv->surface_pool); + + if (ret == MIX_RESULT_POOLEMPTY) { + LOG_I( "Out of surface\n"); + return MIX_RESULT_OUTOFSURFACES; + } + + g_mutex_lock(priv->objlock); + + ret = mix_videofmt_decode(priv->video_format, bufin, bufincnt, decode_params); + + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame) { + + LOG_V( "Begin\n"); + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + CHECK_INIT_CONFIG(mix, priv); + + if (!frame) { + LOG_E( "!frame\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + LOG_V("Calling frame manager dequeue\n"); + + ret = mix_framemanager_dequeue(priv->frame_manager, frame); + + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_release_frame_default(MixVideo * mix, + MixVideoFrame * frame) { + + LOG_V( "Begin\n"); + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + CHECK_INIT_CONFIG(mix, priv); + + if (!frame) { + LOG_E( "!frame\n"); + return MIX_RESULT_NULL_PTR; + } + + /* + * We don't need lock here. MixVideoFrame has lock to + * protect itself. + */ +#if 0 + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); +#endif + + LOG_I("Releasing reference frame %x\n", (guint) frame); + mix_videoframe_unref(frame); + + ret = MIX_RESULT_SUCCESS; + +#if 0 + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); +#endif + + LOG_V( "End\n"); + + return ret; + +} + +MIX_RESULT mix_video_render_default(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + + LOG_V( "Begin\n"); + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + MixDisplay *mix_display = NULL; + MixDisplayX11 *mix_display_x11 = NULL; + + Display *display = NULL; + Drawable drawable = 0; + + MixRect src_rect, dst_rect; + + VARectangle *va_cliprects = NULL; + guint number_of_cliprects = 0; + + /* VASurfaceID va_surface_id; */ + gulong va_surface_id; + VAStatus va_status; + + CHECK_INIT_CONFIG(mix, priv); + + if (!render_params || !frame) { + LOG_E( "!render_params || !frame\n"); + return MIX_RESULT_NULL_PTR; + } + + /* Is this render param valid? */ + if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { + LOG_E("Not MixVideoRenderParams\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* + * We don't need lock here. priv->va_display may be the only variable + * seems need to be protected. But, priv->va_display is initialized + * when mixvideo object is initialized, and it keeps + * the same value thoughout the life of mixvideo. + */ +#if 0 + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); +#endif + + /* get MixDisplay prop from render param */ + ret = mix_videorenderparams_get_display(render_params, &mix_display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mix_display\n"); + goto cleanup; + } + + /* Is this MixDisplayX11 ? */ + /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ + if (!MIX_IS_DISPLAYX11(mix_display)) { + ret = MIX_RESULT_INVALID_PARAM; + LOG_E( "Not MixDisplayX11\n"); + goto cleanup; + } + + /* cast MixDisplay to MixDisplayX11 */ + mix_display_x11 = MIX_DISPLAYX11(mix_display); + + /* Get Drawable */ + ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get drawable\n"); + goto cleanup; + } + + /* Get Display */ + ret = mix_displayx11_get_display(mix_display_x11, &display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get display\n"); + goto cleanup; + } + + /* get src_rect */ + ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get SOURCE src_rect\n"); + goto cleanup; + } + + /* get dst_rect */ + ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get dst_rect\n"); + goto cleanup; + } + + /* get va_cliprects */ + ret = mix_videorenderparams_get_cliprects_internal(render_params, + &va_cliprects, &number_of_cliprects); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get va_cliprects\n"); + goto cleanup; + } + + /* get surface id from frame */ + ret = mix_videoframe_get_frame_id(frame, &va_surface_id); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get va_surface_id\n"); + goto cleanup; + } + guint64 timestamp = 0; + mix_videoframe_get_timestamp(frame, ×tamp); + LOG_V( "Displaying surface ID %d, timestamp %"G_GINT64_FORMAT"\n", (int)va_surface_id, timestamp); + + guint32 frame_structure = 0; + mix_videoframe_get_frame_structure(frame, &frame_structure); + /* TODO: the last param of vaPutSurface is de-interlacing flags, + what is value shall be*/ + va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id, + drawable, src_rect.x, src_rect.y, src_rect.width, src_rect.height, + dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, + va_cliprects, number_of_cliprects, frame_structure); + + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed vaPutSurface() : va_status = %d\n", va_status); + goto cleanup; + } + + /* TODO: Is this only for X11? */ + XSync(display, FALSE); + + ret = MIX_RESULT_SUCCESS; + + cleanup: + + MIXUNREF(mix_display, mix_display_unref) + /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ + +#if 0 + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); +#endif + + LOG_V( "End\n"); + + return ret; + +} + +MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + if(!bufin || !bufincnt) { //we won't check encode_params here, it's just a placeholder + LOG_E( "!bufin || !bufincnt\n"); + return MIX_RESULT_NULL_PTR; + } + + g_mutex_lock(priv->objlock); + + ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt, + iovout, iovoutcnt, encode_params); + + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + return ret; +} + +MIX_RESULT mix_video_flush_default(MixVideo * mix) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { + ret = mix_videofmt_flush(priv->video_format); + + ret = mix_framemanager_flush(priv->frame_manager); + } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE + && priv->video_format_enc != NULL) { + /*No framemanager for encoder now*/ + ret = mix_videofmtenc_flush(priv->video_format_enc); + } else { + g_mutex_unlock(priv->objlock); + LOG_E("Invalid video_format/video_format_enc Pointer\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + + return ret; + +} + +MIX_RESULT mix_video_eos_default(MixVideo * mix) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { + ret = mix_videofmt_eos(priv->video_format); + + /* frame manager will set EOS flag to be TRUE */ + ret = mix_framemanager_eos(priv->frame_manager); + } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE + && priv->video_format_enc != NULL) { + /*No framemanager now*/ + ret = mix_videofmtenc_eos(priv->video_format_enc); + } else { + g_mutex_unlock(priv->objlock); + LOG_E("Invalid video_format/video_format_enc Pointer\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state) { + + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (!state) { + LOG_E( "!state\n"); + return MIX_RESULT_NULL_PTR; + } + + *state = MIX_STATE_CONFIGURED; + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (!buf) { + LOG_E( "!buf\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + ret = mix_bufferpool_get(priv->buffer_pool, buf); + + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); + + LOG_V( "End ret = 0x%x\n", ret); + + return ret; + +} + +MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (!buf) { + LOG_E( "!buf\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + mix_buffer_unref(buf); + + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + return ret; + +} + +MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + if (!mix || !max_size) /* TODO: add other parameter NULL checking */ + { + LOG_E( "!mix || !bufsize\n"); + return MIX_RESULT_NULL_PTR; + } + + CHECK_INIT_CONFIG(mix, priv); + + g_mutex_lock(priv->objlock); + + ret = mix_videofmtenc_get_max_coded_buffer_size(priv->video_format_enc, max_size); + + g_mutex_unlock(priv->objlock); + + LOG_V( "End\n"); + return ret; +} + +/* + * API functions + */ + +#define CHECK_AND_GET_MIX_CLASS(mix, klass) \ + if (!mix) { \ + return MIX_RESULT_NULL_PTR; \ + } \ + if (!MIX_IS_VIDEO(mix)) { \ + LOG_E( "Not MixVideo\n"); \ + return MIX_RESULT_INVALID_PARAM; \ + } \ + klass = MIX_VIDEO_GET_CLASS(mix); + + +MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->get_version_func) { + return klass->get_version_func(mix, major, minor); + } + return MIX_RESULT_NOTIMPL; + +} + +MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->initialize_func) { + return klass->initialize_func(mix, mode, init_params, drm_init_params); + } + return MIX_RESULT_NOTIMPL; + +} + +MIX_RESULT mix_video_deinitialize(MixVideo * mix) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->deinitialize_func) { + return klass->deinitialize_func(mix); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_configure(MixVideo * mix, + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->configure_func) { + return klass->configure_func(mix, config_params, drm_config_params); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_get_config(MixVideo * mix, + MixVideoConfigParams ** config_params_dec) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->get_config_func) { + return klass->get_config_func(mix, config_params_dec); + } + return MIX_RESULT_NOTIMPL; + +} + +MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->decode_func) { + return klass->decode_func(mix, bufin, bufincnt, + decode_params); + } + return MIX_RESULT_NOTIMPL; + +} + +MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->get_frame_func) { + return klass->get_frame_func(mix, frame); + } + return MIX_RESULT_NOTIMPL; + +} + +MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->release_frame_func) { + return klass->release_frame_func(mix, frame); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_render(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->render_func) { + return klass->render_func(mix, render_params, frame); + } + return MIX_RESULT_NOTIMPL; + +} + +MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->encode_func) { + return klass->encode_func(mix, bufin, bufincnt, iovout, iovoutcnt, + encode_params); + } + return MIX_RESULT_NOTIMPL; + +} + +MIX_RESULT mix_video_flush(MixVideo * mix) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->flush_func) { + return klass->flush_func(mix); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_eos(MixVideo * mix) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->eos_func) { + return klass->eos_func(mix); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->get_state_func) { + return klass->get_state_func(mix, state); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->get_mix_buffer_func) { + return klass->get_mix_buffer_func(mix, buf); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf) { + + MixVideoClass *klass = NULL; + CHECK_AND_GET_MIX_CLASS(mix, klass); + + if (klass->release_mix_buffer_func) { + return klass->release_mix_buffer_func(mix, buf); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize) { + + MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix); + + if (klass->get_max_coded_buffer_size_func) { + return klass->get_max_coded_buffer_size_func(mix, bufsize); + } + return MIX_RESULT_NOTIMPL; +} diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h new file mode 100644 index 0000000..0b50cf6 --- /dev/null +++ b/mix_video/src/mixvideo.h @@ -0,0 +1,208 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEO_H__ +#define __MIX_VIDEO_H__ + +#include + +#include "mixdrmparams.h" +#include "mixvideoinitparams.h" +#include "mixvideoconfigparamsdec.h" +#include "mixvideoconfigparamsenc.h" +#include "mixvideodecodeparams.h" +#include "mixvideoencodeparams.h" +#include "mixvideorenderparams.h" +#include "mixvideocaps.h" +#include "mixbuffer.h" + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEO (mix_video_get_type ()) +#define MIX_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEO, MixVideo)) +#define MIX_IS_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEO)) +#define MIX_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEO, MixVideoClass)) +#define MIX_IS_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEO)) +#define MIX_VIDEO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEO, MixVideoClass)) + +typedef struct _MixVideo MixVideo; +typedef struct _MixVideoClass MixVideoClass; + +/* + * Virtual methods typedef + */ + +typedef MIX_RESULT (*MixVideoGetVersionFunc)(MixVideo * mix, guint * major, + guint * minor); + +typedef MIX_RESULT (*MixVideoInitializeFunc)(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params); + +typedef MIX_RESULT (*MixVideoDeinitializeFunc)(MixVideo * mix); + +typedef MIX_RESULT (*MixVideoConfigureFunc)(MixVideo * mix, + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params); + +typedef MIX_RESULT (*MixVideoGetConfigFunc)(MixVideo * mix, + MixVideoConfigParams ** config_params); + +typedef MIX_RESULT (*MixVideoDecodeFunc)(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); + +typedef MIX_RESULT (*MixVideoGetFrameFunc)(MixVideo * mix, + MixVideoFrame ** frame); + +typedef MIX_RESULT (*MixVideoReleaseFrameFunc)(MixVideo * mix, + MixVideoFrame * frame); + +typedef MIX_RESULT (*MixVideoRenderFunc)(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame); + +typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); + +typedef MIX_RESULT (*MixVideoFlushFunc)(MixVideo * mix); + +typedef MIX_RESULT (*MixVideoEOSFunc)(MixVideo * mix); + +typedef MIX_RESULT (*MixVideoGetStateFunc)(MixVideo * mix, MixState * state); + +typedef MIX_RESULT +(*MixVideoGetMixBufferFunc)(MixVideo * mix, MixBuffer ** buf); + +typedef MIX_RESULT (*MixVideoReleaseMixBufferFunc)(MixVideo * mix, + MixBuffer * buf); + +typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix, + guint *max_size); + +/** + * MixVideo: + * @parent: Parent object. + * @streamState: Current state of the stream + * @decodeMode: Current decode mode of the device. This value is valid only when @codingMode equals #MIX_CODING_ENCODE. + * @encoding: TBD... + * + * MI-X Video object + */ +struct _MixVideo { + /*< public > */ + GObject parent; + + /*< public > */ + + /*< private > */ + gpointer context; +}; + +/** + * MixVideoClass: + * + * MI-X Video object class + */ +struct _MixVideoClass { + /*< public > */ + GObjectClass parent_class; + + /* class members */ + + MixVideoGetVersionFunc get_version_func; + MixVideoInitializeFunc initialize_func; + MixVideoDeinitializeFunc deinitialize_func; + MixVideoConfigureFunc configure_func; + MixVideoGetConfigFunc get_config_func; + MixVideoDecodeFunc decode_func; + MixVideoGetFrameFunc get_frame_func; + MixVideoReleaseFrameFunc release_frame_func; + MixVideoRenderFunc render_func; + MixVideoEncodeFunc encode_func; + MixVideoFlushFunc flush_func; + MixVideoEOSFunc eos_func; + MixVideoGetStateFunc get_state_func; + MixVideoGetMixBufferFunc get_mix_buffer_func; + MixVideoReleaseMixBufferFunc release_mix_buffer_func; + MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func; +}; + +/** + * mix_video_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_video_get_type(void); + +/** + * mix_video_new: + * @returns: A newly allocated instance of #MixVideo + * + * Use this method to create new instance of #MixVideo + */ +MixVideo *mix_video_new(void); + +/** + * mix_video_ref: + * @mix: object to add reference + * @returns: the MixVideo instance where reference count has been increased. + * + * Add reference count. + */ +MixVideo *mix_video_ref(MixVideo * mix); + +/** + * mix_video_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_video_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor); + +MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params); + +MIX_RESULT mix_video_deinitialize(MixVideo * mix); + +MIX_RESULT mix_video_configure(MixVideo * mix, + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params); + +MIX_RESULT mix_video_get_config(MixVideo * mix, + MixVideoConfigParams ** config_params); + +MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params); + +MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame); + +MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame); + +MIX_RESULT mix_video_render(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame); + +MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); + +MIX_RESULT mix_video_flush(MixVideo * mix); + +MIX_RESULT mix_video_eos(MixVideo * mix); + +MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state); + +MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf); + +MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); + +#endif /* __MIX_VIDEO_H__ */ diff --git a/mix_video/src/mixvideo_private.h b/mix_video/src/mixvideo_private.h new file mode 100644 index 0000000..806d249 --- /dev/null +++ b/mix_video/src/mixvideo_private.h @@ -0,0 +1,57 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEO_PRIVATE_H__ +#define __MIX_VIDEO_PRIVATE_H__ + + +typedef struct _MixVideoPrivate MixVideoPrivate; + +struct _MixVideoPrivate { + /*< private > */ + + GMutex *objlock; + gboolean initialized; + gboolean configured; + + VADisplay va_display; + + int va_major_version; + int va_minor_version; + + MixCodecMode codec_mode; + + MixVideoInitParams *init_params; + MixDrmParams *drm_params; + + MixVideoConfigParams *config_params; + + MixFrameManager *frame_manager; + MixVideoFormat *video_format; + MixVideoFormatEnc *video_format_enc; + + MixSurfacePool *surface_pool; + MixBufferPool *buffer_pool; + +}; + +/** + * MIX_VIDEO_PRIVATE: + * + * Get private structure of this class. + * @obj: class object for which to get private data. + */ +#define MIX_VIDEO_GET_PRIVATE(obj) \ + (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEO, MixVideoPrivate)) + +/* Private functions */ +void mix_video_private_initialize(MixVideoPrivate* priv); +void mix_video_private_cleanup(MixVideoPrivate* priv); + + +#endif /* __MIX_VIDEO_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideocaps.c b/mix_video/src/mixvideocaps.c new file mode 100644 index 0000000..3a41c47 --- /dev/null +++ b/mix_video/src/mixvideocaps.c @@ -0,0 +1,261 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixvideocaps +* @short_description: VideoConfig parameters +* +* A data object which stores videoconfig specific parameters. +*/ + +#include "mixvideocaps.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_videocaps_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videocaps_type = g_define_type_id; } + +gboolean mix_videocaps_copy (MixParams * target, const MixParams * src); +MixParams *mix_videocaps_dup (const MixParams * obj); +gboolean mix_videocaps_equal (MixParams * first, MixParams * second); +static void mix_videocaps_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoCaps, mix_videocaps, MIX_TYPE_PARAMS, + _do_init); + +static void +mix_videocaps_init (MixVideoCaps * self) +{ + /* initialize properties here */ + self->mix_caps = NULL; + self->video_hw_caps = NULL; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; + +} + +static void +mix_videocaps_class_init (MixVideoCapsClass * klass) +{ + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS (klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); + + mixparams_class->finalize = mix_videocaps_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videocaps_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videocaps_dup; + mixparams_class->equal = (MixParamsEqualFunction) mix_videocaps_equal; +} + +MixVideoCaps * +mix_videocaps_new (void) +{ + MixVideoCaps *ret = + (MixVideoCaps *) g_type_create_instance (MIX_TYPE_VIDEOCAPS); + return ret; +} + +void +mix_videocaps_finalize (MixParams * obj) +{ + /* clean up here. */ + + MixVideoCaps *self = MIX_VIDEOCAPS (obj); + SAFE_FREE (self->mix_caps); + SAFE_FREE (self->video_hw_caps); + + /* Chain up parent */ + if (parent_class->finalize) + { + parent_class->finalize (obj); + } +} + +MixVideoCaps * +mix_videocaps_ref (MixVideoCaps * mix) +{ + return (MixVideoCaps *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videocaps_dup: +* @obj: a #MixVideoCaps object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videocaps_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCAPS (obj)) + { + MixVideoCaps *duplicate = mix_videocaps_new (); + if (mix_videocaps_copy (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videocaps_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videocaps_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videocaps_copy (MixParams * target, const MixParams * src) +{ + MixVideoCaps *this_target, *this_src; + + if (MIX_IS_VIDEOCAPS (target) && MIX_IS_VIDEOCAPS (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCAPS (target); + this_src = MIX_VIDEOCAPS (src); + + // Free the existing properties + SAFE_FREE (this_target->mix_caps); + SAFE_FREE (this_target->video_hw_caps); + + // Duplicate string + this_target->mix_caps = g_strdup (this_src->mix_caps); + this_target->video_hw_caps = g_strdup (this_src->video_hw_caps); + + // Now chainup base class + if (parent_class->copy) + { + return parent_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videocaps_: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videocaps_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoCaps *this_first, *this_second; + + if (MIX_IS_VIDEOCAPS (first) && MIX_IS_VIDEOCAPS (second)) + { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEOCAPS (first); + this_second = MIX_VIDEOCAPS (second); + + /* TODO: add comparison for other properties */ + if (g_strcmp0 (this_first->mix_caps, this_second->mix_caps) == 0 + && g_strcmp0 (this_first->video_hw_caps, + this_second->video_hw_caps) == 0) + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + ret = klass->equal (first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_VIDEOCAPS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCAPS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ + + +/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ +MIX_RESULT +mix_videocaps_set_mix_caps (MixVideoCaps * obj, gchar * mix_caps) +{ + MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); + + SAFE_FREE (obj->mix_caps); + obj->mix_caps = g_strdup (mix_caps); + if (mix_caps != NULL && obj->mix_caps == NULL) + { + return MIX_RESULT_NO_MEMORY; + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT +mix_videocaps_get_mix_caps (MixVideoCaps * obj, gchar ** mix_caps) +{ + MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, mix_caps); + *mix_caps = g_strdup (obj->mix_caps); + if (*mix_caps == NULL && obj->mix_caps) + { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT +mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, gchar * video_hw_caps) +{ + MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); + + SAFE_FREE (obj->video_hw_caps); + obj->video_hw_caps = g_strdup (video_hw_caps); + if (video_hw_caps != NULL && obj->video_hw_caps == NULL) + { + return MIX_RESULT_NO_MEMORY; + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT +mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, gchar ** video_hw_caps) +{ + MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, video_hw_caps); + + *video_hw_caps = g_strdup (obj->video_hw_caps); + if (*video_hw_caps == NULL && obj->video_hw_caps) + { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideocaps.h b/mix_video/src/mixvideocaps.h new file mode 100644 index 0000000..6630c19 --- /dev/null +++ b/mix_video/src/mixvideocaps.h @@ -0,0 +1,137 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCAPS_H__ +#define __MIX_VIDEOCAPS_H__ + +#include +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCAPS: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCAPS (mix_videocaps_get_type ()) + +/** +* MIX_VIDEOCAPS: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCAPS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCAPS, MixVideoCaps)) + +/** +* MIX_IS_VIDEOCAPS: +* @obj: an object. +* +* Checks if the given object is an instance of #MixParams +*/ +#define MIX_IS_VIDEOCAPS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCAPS)) + +/** +* MIX_VIDEOCAPS_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCAPS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCAPS, MixVideoCapsClass)) + +/** +* MIX_IS_VIDEOCAPS_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixParamsClass +*/ +#define MIX_IS_VIDEOCAPS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCAPS)) + +/** +* MIX_VIDEOCAPS_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCAPS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCAPS, MixVideoCapsClass)) + +typedef struct _MixVideoCaps MixVideoCaps; +typedef struct _MixVideoCapsClass MixVideoCapsClass; + +/** +* MixVideoCaps: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoCaps +{ + /*< public > */ + MixParams parent; + + /*< public > */ + gchar *mix_caps; + gchar *video_hw_caps; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** +* MixVideoCapsClass: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoCapsClass +{ + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** +* mix_videocaps_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videocaps_get_type (void); + +/** +* mix_videocaps_new: +* @returns: A newly allocated instance of #MixVideoCaps +* +* Use this method to create new instance of #MixVideoCaps +*/ +MixVideoCaps *mix_videocaps_new (void); +/** +* mix_videocaps_ref: +* @mix: object to add reference +* @returns: the MixVideoCaps instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoCaps *mix_videocaps_ref (MixVideoCaps * mix); + +/** +* mix_videocaps_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videocaps_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +MIX_RESULT mix_videocaps_set_mix_caps (MixVideoCaps * obj, gchar * mix_caps); +MIX_RESULT mix_videocaps_get_mix_caps (MixVideoCaps * obj, + gchar ** mix_caps); + +MIX_RESULT mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, + gchar * video_hw_caps); +MIX_RESULT mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, + gchar ** video_hw_caps); + +#endif /* __MIX_VIDEOCAPS_H__ */ diff --git a/mix_video/src/mixvideoconfigparams.c b/mix_video/src/mixvideoconfigparams.c new file mode 100644 index 0000000..c355a4c --- /dev/null +++ b/mix_video/src/mixvideoconfigparams.c @@ -0,0 +1,157 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparams + * @short_description: VideoConfig parameters + * + * A data object which stores videoconfig specific parameters. + */ + +#include +#include "mixvideolog.h" +#include "mixvideoconfigparams.h" + +static GType _mix_videoconfigparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparams_type = g_define_type_id; } + +gboolean mix_videoconfigparams_copy(MixParams * target, const MixParams * src); +MixParams *mix_videoconfigparams_dup(const MixParams * obj); +gboolean mix_videoconfigparams_equal(MixParams * first, MixParams * second); +static void mix_videoconfigparams_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParams, mix_videoconfigparams, + MIX_TYPE_PARAMS, _do_init); + +static void mix_videoconfigparams_init(MixVideoConfigParams * self) { + + /* initialize properties here */ + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videoconfigparams_class_init(MixVideoConfigParamsClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videoconfigparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparams_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparams_dup; + mixparams_class->equal + = (MixParamsEqualFunction) mix_videoconfigparams_equal; +} + +MixVideoConfigParams * +mix_videoconfigparams_new(void) { + MixVideoConfigParams *ret = + (MixVideoConfigParams *) g_type_create_instance( + MIX_TYPE_VIDEOCONFIGPARAMS); + + return ret; +} + +void mix_videoconfigparams_finalize(MixParams * obj) { + + /* clean up here. */ + /* MixVideoConfigParams *self = MIX_VIDEOCONFIGPARAMS(obj); */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoConfigParams * +mix_videoconfigparams_ref(MixVideoConfigParams * mix) { + return (MixVideoConfigParams *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoconfigparams_dup: + * @obj: a #MixVideoConfigParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoconfigparams_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMS(obj)) { + MixVideoConfigParams *duplicate = mix_videoconfigparams_new(); + if (mix_videoconfigparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoconfigparams_unref(duplicate); + } + } + + return ret; +} + +/** + * mix_videoconfigparams_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparams_copy(MixParams * target, const MixParams * src) { + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMS(target) && MIX_IS_VIDEOCONFIGPARAMS(src)) { + + /* TODO: copy other properties if there's any */ + + /* Now chainup base class */ + if (parent_class->copy) { + LOG_V( "parent_class->copy != NULL\n"); + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + LOG_V( "parent_class->copy == NULL\n"); + return TRUE; + } + } + + LOG_V( "End\n"); + return FALSE; +} + +/** + * mix_videoconfigparams_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparams_equal(MixParams * first, MixParams * second) { + + gboolean ret = FALSE; + + if (MIX_IS_VIDEOCONFIGPARAMS(first) && MIX_IS_VIDEOCONFIGPARAMS(second)) { + + /* chaining up. */ + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + + return ret; +} diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h new file mode 100644 index 0000000..b2bac41 --- /dev/null +++ b/mix_video/src/mixvideoconfigparams.h @@ -0,0 +1,126 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOCONFIGPARAMS_H__ +#define __MIX_VIDEOCONFIGPARAMS_H__ + +#include +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEOCONFIGPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEOCONFIGPARAMS (mix_videoconfigparams_get_type ()) + +/** + * MIX_VIDEOCONFIGPARAMS: + * @obj: object to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParams)) + +/** + * MIX_IS_VIDEOCONFIGPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_VIDEOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMS)) + +/** + * MIX_VIDEOCONFIGPARAMS_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParamsClass)) + +/** + * MIX_IS_VIDEOCONFIGPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_VIDEOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMS)) + +/** + * MIX_VIDEOCONFIGPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEOCONFIGPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParamsClass)) + +typedef struct _MixVideoConfigParams MixVideoConfigParams; +typedef struct _MixVideoConfigParamsClass MixVideoConfigParamsClass; + +/** + * MixVideoConfigParams: + * + * MI-X VideoConfig Parameter object + */ +struct _MixVideoConfigParams { + /*< public > */ + MixParams parent; + + /*< public > */ + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoConfigParamsClass: + * + * MI-X VideoConfig object class + */ +struct _MixVideoConfigParamsClass { + /*< public > */ + MixParamsClass parent_class; + +/* class members */ +}; + +/** + * mix_videoconfigparams_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoconfigparams_get_type(void); + +/** + * mix_videoconfigparams_new: + * @returns: A newly allocated instance of #MixVideoConfigParams + * + * Use this method to create new instance of #MixVideoConfigParams + */ +MixVideoConfigParams *mix_videoconfigparams_new(void); +/** + * mix_videoconfigparams_ref: + * @mix: object to add reference + * @returns: the MixVideoConfigParams instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoConfigParams *mix_videoconfigparams_ref(MixVideoConfigParams * mix); + +/** + * mix_videoconfigparams_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoconfigparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ + +#endif /* __MIX_VIDEOCONFIGPARAMS_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c new file mode 100644 index 0000000..751b124 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec.c @@ -0,0 +1,534 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparams + * @short_description: VideoConfig parameters + * + * A data object which stores videoconfig specific parameters. + */ + +#include +#include "mixvideolog.h" +#include "mixvideoconfigparamsdec.h" + +static GType _mix_videoconfigparamsdec_type = 0; +static MixVideoConfigParamsClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsdec_type = g_define_type_id; } + +gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src); +MixParams *mix_videoconfigparamsdec_dup(const MixParams * obj); +gboolean mix_videoconfigparamsdec_equal(MixParams * first, MixParams * second); +static void mix_videoconfigparamsdec_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDec, mix_videoconfigparamsdec, + MIX_TYPE_VIDEOCONFIGPARAMS, _do_init); + +static void mix_videoconfigparamsdec_init(MixVideoConfigParamsDec * self) { + + /* initialize properties here */ + + self->frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + memset(&self->header, 0, sizeof(self->header)); + + self->mime_type = NULL; + + self->frame_rate_num = 0; + self->frame_rate_denom = 0; + + self->picture_width = 0; + self->picture_height = 0; + + self->raw_format = 0; + self->rate_control = 0; + self->mixbuffer_pool_size = 0; + self->extra_surface_allocation = 0; + + /* TODO: initialize other properties */ + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videoconfigparamsdec_class_init(MixVideoConfigParamsDecClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixVideoConfigParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videoconfigparamsdec_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsdec_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsdec_dup; + mixparams_class->equal + = (MixParamsEqualFunction) mix_videoconfigparamsdec_equal; +} + +MixVideoConfigParamsDec * +mix_videoconfigparamsdec_new(void) { + MixVideoConfigParamsDec *ret = + (MixVideoConfigParamsDec *) g_type_create_instance( + MIX_TYPE_VIDEOCONFIGPARAMSDEC); + + return ret; +} + +void mix_videoconfigparamsdec_finalize(MixParams * obj) { + + /* clean up here. */ + MixVideoConfigParamsDec *self = MIX_VIDEOCONFIGPARAMSDEC(obj); + MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); + + + /* free header */ + if (self->header.data) { + g_free(self->header.data); + memset(&self->header, 0, sizeof(self->header)); + } + + /* free mime_type */ + if (self->mime_type->str) + g_string_free(self->mime_type, TRUE); + else + g_string_free(self->mime_type, FALSE); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoConfigParamsDec * +mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix) { + return (MixVideoConfigParamsDec *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoconfigparamsdec_dup: + * @obj: a #MixVideoConfigParamsDec object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoconfigparamsdec_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) { + MixVideoConfigParamsDec *duplicate = mix_videoconfigparamsdec_new(); + if (mix_videoconfigparamsdec_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoconfigparamsdec_unref(duplicate); + } + } + + return ret; +} + +/** + * mix_videoconfigparamsdec_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src) { + + MixVideoConfigParamsDec *this_target, *this_src; + MIX_RESULT mix_result = MIX_RESULT_FAIL; + MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSDEC(target) && MIX_IS_VIDEOCONFIGPARAMSDEC(src)) { + + /* Cast the base object to this child object */ + this_target = MIX_VIDEOCONFIGPARAMSDEC(target); + this_src = MIX_VIDEOCONFIGPARAMSDEC(src); + + /* copy properties of primitive type */ + + this_target->frame_rate_num = this_src->frame_rate_num; + this_target->frame_rate_denom = this_src->frame_rate_denom; + this_target->picture_width = this_src->picture_width; + this_target->picture_height = this_src->picture_height; + this_target->raw_format = this_src->raw_format; + this_target->rate_control = this_src->rate_control; + this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size; + this_target->extra_surface_allocation = this_src->extra_surface_allocation; + + /* copy properties of non-primitive */ + + /* copy header */ + mix_result = mix_videoconfigparamsdec_set_header(this_target, + &this_src->header); + + if (mix_result != MIX_RESULT_SUCCESS) { + + LOG_E( "set_header failed: mix_result = 0x%x\n", mix_result); + return FALSE; + } + + /* copy mime_type */ + if (this_src->mime_type) { + + mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, + this_src->mime_type->str); + } else { + mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, NULL); + } + + if (mix_result != MIX_RESULT_SUCCESS) { + LOG_E( "set_mime_type failed: mix_result = 0x%x\n", mix_result); + return FALSE; + } + + /* TODO: copy other properties if there's any */ + + /* Now chainup base class */ + if (root_class->copy) { + LOG_V( "root_class->copy != NULL\n"); + return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + LOG_E( "root_class->copy == NULL\n"); + return TRUE; + } + } + + LOG_V( "End\n"); + + return FALSE; +} + +/** + * mix_videoconfigparamsdec_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsdec_equal(MixParams * first, MixParams * second) { + + gboolean ret = FALSE; + + MixVideoConfigParamsDec *this_first, *this_second; + MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); + + + if (MIX_IS_VIDEOCONFIGPARAMSDEC(first) && MIX_IS_VIDEOCONFIGPARAMSDEC(second)) { + + // Deep compare + // Cast the base object to this child object + this_first = MIX_VIDEOCONFIGPARAMSDEC(first); + this_second = MIX_VIDEOCONFIGPARAMSDEC(second); + + /* check the equalitiy of the primitive type properties */ + if (this_first->frame_order_mode != this_second->frame_order_mode) { + goto not_equal; + } + + if (this_first->frame_rate_num != this_second->frame_rate_num + && this_first->frame_rate_denom + != this_second->frame_rate_denom) { + goto not_equal; + } + + if (this_first->picture_width != this_second->picture_width + && this_first->picture_height != this_second->picture_height) { + goto not_equal; + } + + if (this_first->raw_format != this_second->raw_format) { + goto not_equal; + } + + if (this_first->rate_control != this_second->rate_control) { + goto not_equal; + } + + if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) { + goto not_equal; + } + + if (this_first->extra_surface_allocation != this_second->extra_surface_allocation) { + goto not_equal; + } + + /* check the equalitiy of the none-primitive type properties */ + + /* MixIOVec header */ + + if (this_first->header.data_size != this_second->header.data_size) { + goto not_equal; + } + + if (this_first->header.buffer_size != this_second->header.buffer_size) { + goto not_equal; + } + + if (this_first->header.data && this_second->header.data) { + if (memcmp(this_first->header.data, this_second->header.data, + this_first->header.data_size) != 0) { + goto not_equal; + } + } else if (!(!this_first->header.data && !this_second->header.data)) { + goto not_equal; + } + + /* compare mime_type */ + + if (this_first->mime_type && this_second->mime_type) { + if (g_string_equal(this_first->mime_type, this_second->mime_type) + != TRUE) { + goto not_equal; + } + } else if (!(!this_first->mime_type && !this_second->mime_type)) { + goto not_equal; + } + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* chaining up. */ + if (root_class->equal) + ret = root_class->equal(first, second); + else + ret = TRUE; + } + + return ret; +} + +#define MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ + if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ + +/* TODO: Add getters and setters for other properties. The following is incomplete */ + +MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( + MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->frame_order_mode = frame_order_mode; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( + MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode); + *frame_order_mode = obj->frame_order_mode; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj, + MixIOVec * header) { + + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + if (!header) { + return MIX_RESULT_NULL_PTR; + } + + if (header->data && header->buffer_size) { + obj->header.data = g_memdup(header->data, header->buffer_size); + if (!obj->header.data) { + return MIX_RESULT_NO_MEMORY; + } + obj->header.buffer_size = header->buffer_size; + obj->header.data_size = header->data_size; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj, + MixIOVec ** header) { + + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, header); + + if (obj->header.data && obj->header.buffer_size) { + + *header = g_malloc(sizeof(MixIOVec)); + + if (*header == NULL) { + return MIX_RESULT_NO_MEMORY; + } + + (*header)->data = g_memdup(obj->header.data, obj->header.buffer_size); + (*header)->buffer_size = obj->header.buffer_size; + (*header)->data_size = obj->header.data_size; + + } else { + *header = NULL; + } + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj, + const gchar * mime_type) { + + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + if (!mime_type) { + return MIX_RESULT_NULL_PTR; + } + + if (obj->mime_type) { + if (obj->mime_type->str) + g_string_free(obj->mime_type, TRUE); + else + g_string_free(obj->mime_type, FALSE); + } + + obj->mime_type = g_string_new(mime_type); + if (!obj->mime_type) { + return MIX_RESULT_NO_MEMORY; + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj, + gchar ** mime_type) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, mime_type); + + if (!obj->mime_type) { + *mime_type = NULL; + return MIX_RESULT_SUCCESS; + } + *mime_type = g_strdup(obj->mime_type->str); + if (!*mime_type) { + return MIX_RESULT_NO_MEMORY; + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj, + guint frame_rate_num, guint frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->frame_rate_num = frame_rate_num; + obj->frame_rate_denom = frame_rate_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj, + guint * frame_rate_num, guint * frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); + *frame_rate_num = obj->frame_rate_num; + *frame_rate_denom = obj->frame_rate_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj, + guint picture_width, guint picture_height) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->picture_width = picture_width; + obj->picture_height = picture_height; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj, + guint * picture_width, guint * picture_height) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); + *picture_width = obj->picture_width; + *picture_height = obj->picture_height; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj, + guint raw_format) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + /* TODO: check if the value of raw_format is valid */ + obj->raw_format = raw_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj, + guint *raw_format) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, raw_format); + *raw_format = obj->raw_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj, + guint rate_control) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + /* TODO: check if the value of rate_control is valid */ + obj->rate_control = rate_control; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj, + guint *rate_control) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, rate_control); + *rate_control = obj->rate_control; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size( + MixVideoConfigParamsDec * obj, guint bufpoolsize) { + + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + obj->mixbuffer_pool_size = bufpoolsize; + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size( + MixVideoConfigParamsDec * obj, guint *bufpoolsize) { + + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bufpoolsize); + *bufpoolsize = obj->mixbuffer_pool_size; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation( + MixVideoConfigParamsDec * obj, + guint extra_surface_allocation) { + + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + obj->extra_surface_allocation = extra_surface_allocation; + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation( + MixVideoConfigParamsDec * obj, + guint *extra_surface_allocation) { + + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, extra_surface_allocation); + *extra_surface_allocation = obj->extra_surface_allocation; + return MIX_RESULT_SUCCESS; + +} + + + + diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h new file mode 100644 index 0000000..6aa9047 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec.h @@ -0,0 +1,195 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOCONFIGPARAMSDEC_H__ +#define __MIX_VIDEOCONFIGPARAMSDEC_H__ + +#include +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEOCONFIGPARAMSDEC: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEOCONFIGPARAMSDEC (mix_videoconfigparamsdec_get_type ()) + +/** + * MIX_VIDEOCONFIGPARAMSDEC: + * @obj: object to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMSDEC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDec)) + +/** + * MIX_IS_VIDEOCONFIGPARAMSDEC: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_VIDEOCONFIGPARAMSDEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC)) + +/** + * MIX_VIDEOCONFIGPARAMSDEC_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMSDEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDecClass)) + +/** + * MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC)) + +/** + * MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDecClass)) + +typedef struct _MixVideoConfigParamsDec MixVideoConfigParamsDec; +typedef struct _MixVideoConfigParamsDecClass MixVideoConfigParamsDecClass; + +/** + * MixVideoConfigParamsDec: + * + * MI-X VideoConfig Parameter object + */ +struct _MixVideoConfigParamsDec { + /*< public > */ + MixVideoConfigParams parent; + + /*< public > */ + MixFrameOrderMode frame_order_mode; + MixIOVec header; + + /* the type of the following members will be changed after MIX API doc is ready */ + GString * mime_type; + guint frame_rate_num; + guint frame_rate_denom; + gulong picture_width; + gulong picture_height; + guint raw_format; + guint rate_control; + + guint mixbuffer_pool_size; + guint extra_surface_allocation; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoConfigParamsDecClass: + * + * MI-X VideoConfig object class + */ +struct _MixVideoConfigParamsDecClass { + /*< public > */ + MixVideoConfigParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_videoconfigparamsdec_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoconfigparamsdec_get_type(void); + +/** + * mix_videoconfigparamsdec_new: + * @returns: A newly allocated instance of #MixVideoConfigParamsDec + * + * Use this method to create new instance of #MixVideoConfigParamsDec + */ +MixVideoConfigParamsDec *mix_videoconfigparamsdec_new(void); +/** + * mix_videoconfigparamsdec_ref: + * @mix: object to add reference + * @returns: the MixVideoConfigParamsDec instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoConfigParamsDec *mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix); + +/** + * mix_videoconfigparamsdec_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoconfigparamsdec_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( + MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode); + +MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( + MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode); + +MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj, + MixIOVec *header); + +/* caller is responsible to g_free MixIOVec::data field */ +MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj, + MixIOVec ** header); + +MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj, + const gchar * mime_type); + +MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj, + gchar ** mime_type); + +MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj, + guint frame_rate_num, guint frame_rate_denom); + +MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj, + guint * frame_rate_num, guint * frame_rate_denom); + +MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj, + guint picture_width, guint picture_height); + +MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj, + guint * picture_width, guint * picture_height); + +MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj, + guint raw_format); + +MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj, + guint *raw_format); + +MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj, + guint rate_control); + +MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj, + guint *rate_control); + +MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size(MixVideoConfigParamsDec * obj, + guint bufpoolsize); + +MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size(MixVideoConfigParamsDec * obj, + guint *bufpoolsize); + +MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(MixVideoConfigParamsDec * obj, + guint extra_surface_allocation); + +MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigParamsDec * obj, + guint *extra_surface_allocation); + +/* TODO: Add getters and setters for other properties */ + +#endif /* __MIX_VIDEOCONFIGPARAMSDEC_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.c b/mix_video/src/mixvideoconfigparamsdec_h264.c new file mode 100644 index 0000000..271cbf7 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_h264.c @@ -0,0 +1,213 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixvideoconfigparamsdec_h264 +* @short_description: VideoConfig parameters +* +* A data object which stores videoconfig specific parameters. +*/ + +#include "mixvideoconfigparamsdec_h264.h" + +static GType _mix_videoconfigparamsdec_h264_type = 0; +static MixVideoConfigParamsDecClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsdec_h264_type = g_define_type_id; } + +gboolean mix_videoconfigparamsdec_h264_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsdec_h264_dup (const MixParams * obj); +gboolean mix_videoconfigparamsdec_h264_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsdec_h264_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecH264, /* The name of the new type, in Camel case */ + mix_videoconfigparamsdec_h264, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsdec_h264_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsdec_h264_get_type ()); +} + +static void +mix_videoconfigparamsdec_h264_init (MixVideoConfigParamsDecH264 * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void +mix_videoconfigparamsdec_h264_class_init (MixVideoConfigParamsDecH264Class * klass) +{ + MixVideoConfigParamsDecClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSDEC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsDecClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsdec_h264_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsdec_h264_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsdec_h264_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsdec_h264_equal; +} + +MixVideoConfigParamsDecH264 * +mix_videoconfigparamsdec_h264_new (void) +{ + MixVideoConfigParamsDecH264 *ret = (MixVideoConfigParamsDecH264 *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264); + + return ret; +} + +void +mix_videoconfigparamsdec_h264_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsDecH264 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsDecH264 + * mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix) +{ + return (MixVideoConfigParamsDecH264 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsdec_h264_dup: +* @obj: a #MixVideoConfigParamsDec object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsdec_h264_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (obj)) + { + MixVideoConfigParamsDecH264 *duplicate = mix_videoconfigparamsdec_h264_new (); + if (mix_videoconfigparamsdec_h264_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsdec_h264_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsdec_h264_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsdec_h264_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsDecH264 *this_target, *this_src; + MixParamsClass *root_class; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (target) + && MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSDEC_H264 (target); + this_src = MIX_VIDEOCONFIGPARAMSDEC_H264 (src); + + // TODO: copy properties */ + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsdec_h264: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsdec_h264_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsDecH264 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (first) + && MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSDEC_H264 (first); + this_second = MIX_VIDEOCONFIGPARAMSDEC_H264 (second); + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h new file mode 100644 index 0000000..6d5f97d --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_h264.h @@ -0,0 +1,130 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ +#define __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ + +#include "mixvideoconfigparamsdec.h" +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264 (mix_videoconfigparamsdec_h264_get_type ()) + +/** +* MIX_VIDEOCONFIGPARAMSDEC_H264: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSDEC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSDEC_H264: +* @obj: an object. +* +* Checks if the given object is an instance of #MixVideoConfigParamsDecH264 +*/ +#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264)) + +/** +* MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264Class)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixVideoConfigParamsDecH264Class +*/ +#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264)) + +/** +* MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264Class)) + +typedef struct _MixVideoConfigParamsDecH264 MixVideoConfigParamsDecH264; +typedef struct _MixVideoConfigParamsDecH264Class MixVideoConfigParamsDecH264Class; + +/** +* MixVideoConfigParamsDecH264: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoConfigParamsDecH264 +{ + /*< public > */ + MixVideoConfigParamsDec parent; + + /*< public > */ + + /* TODO: Add H.264 configuration paramters */ + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** +* MixVideoConfigParamsDecH264Class: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoConfigParamsDecH264Class +{ + /*< public > */ + MixVideoConfigParamsDecClass parent_class; + + /* class members */ +}; + +/** +* mix_videoconfigparamsdec_h264_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videoconfigparamsdec_h264_get_type (void); + +/** +* mix_videoconfigparamsdec_h264_new: +* @returns: A newly allocated instance of #MixVideoConfigParamsDecH264 +* +* Use this method to create new instance of #MixVideoConfigParamsDecH264 +*/ +MixVideoConfigParamsDecH264 *mix_videoconfigparamsdec_h264_new (void); +/** +* mix_videoconfigparamsdec_h264_ref: +* @mix: object to add reference +* @returns: the MixVideoConfigParamsDecH264 instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoConfigParamsDecH264 + * mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix); + +/** +* mix_videoconfigparamsdec_h264_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videoconfigparamsdec_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ + +#endif /* __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.c b/mix_video/src/mixvideoconfigparamsdec_mp42.c new file mode 100644 index 0000000..17329e1 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.c @@ -0,0 +1,244 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparamsdec_mp42 + * @short_description: VideoConfig parameters + * + * A data object which stores videoconfig specific parameters. + */ + +#include "mixvideolog.h" +#include "mixvideoconfigparamsdec_mp42.h" + +static GType _mix_videoconfigparamsdec_mp42_type = 0; +static MixVideoConfigParamsDecClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsdec_mp42_type = g_define_type_id; } + +gboolean mix_videoconfigparamsdec_mp42_copy(MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsdec_mp42_dup(const MixParams * obj); +gboolean + mix_videoconfigparamsdec_mp42_equal(MixParams * first, MixParams * second); +static void mix_videoconfigparamsdec_mp42_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecMP42, /* The name of the new type, in Camel case */ + mix_videoconfigparamsdec_mp42, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */ + _do_init); + +void _mix_videoconfigparamsdec_mp42_initialize(void) { + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref(mix_videoconfigparamsdec_mp42_get_type()); +} + +static void mix_videoconfigparamsdec_mp42_init(MixVideoConfigParamsDecMP42 * self) { + /* initialize properties here */ + /* TODO: initialize properties */ + + self->mpegversion = 0; + self->divxversion = 0; + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; + +} + +static void mix_videoconfigparamsdec_mp42_class_init( + MixVideoConfigParamsDecMP42Class * klass) { + MixVideoConfigParamsDecClass *this_parent_class = MIX_VIDEOCONFIGPARAMSDEC_CLASS( + klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS(this_parent_class); + + /* setup static parent class */ + parent_class + = (MixVideoConfigParamsDecClass *) g_type_class_peek_parent(klass); + + this_root_class->finalize = mix_videoconfigparamsdec_mp42_finalize; + this_root_class->copy + = (MixParamsCopyFunction) mix_videoconfigparamsdec_mp42_copy; + this_root_class->dup + = (MixParamsDupFunction) mix_videoconfigparamsdec_mp42_dup; + this_root_class->equal + = (MixParamsEqualFunction) mix_videoconfigparamsdec_mp42_equal; +} + +MixVideoConfigParamsDecMP42 * +mix_videoconfigparamsdec_mp42_new(void) { + MixVideoConfigParamsDecMP42 *ret = + (MixVideoConfigParamsDecMP42 *) g_type_create_instance( + MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42); + + return ret; +} + +void mix_videoconfigparamsdec_mp42_finalize(MixParams * obj) { + /* MixVideoConfigParamsDecMP42 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_MP42 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoConfigParamsDecMP42 * +mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix) { + return (MixVideoConfigParamsDecMP42 *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoconfigparamsdec_mp42_dup: + * @obj: a #MixVideoConfigParamsDec object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoconfigparamsdec_mp42_dup(const MixParams * obj) { + MixParams *ret = NULL; + + LOG_V( "Begin\n"); + if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) { + MixVideoConfigParamsDecMP42 *duplicate = mix_videoconfigparamsdec_mp42_new(); + LOG_V( "duplicate = 0x%x\n", duplicate); + if (mix_videoconfigparamsdec_mp42_copy(MIX_PARAMS(duplicate), MIX_PARAMS( + obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoconfigparamsdec_mp42_unref(duplicate); + } + } + LOG_V( "End\n"); + return ret; +} + +/** + * mix_videoconfigparamsdec_mp42_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsdec_mp42_copy(MixParams * target, + const MixParams * src) { + MixVideoConfigParamsDecMP42 *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(target) && MIX_IS_VIDEOCONFIGPARAMSDEC_MP42( + src)) { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSDEC_MP42(target); + this_src = MIX_VIDEOCONFIGPARAMSDEC_MP42(src); + + // TODO: copy properties */ + this_target->mpegversion = this_src->mpegversion; + this_target->divxversion = this_src->divxversion; + + // Now chainup base class + root_class = MIX_PARAMS_CLASS(parent_class); + + if (root_class->copy) { + LOG_V( "root_class->copy != NULL\n"); + return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + LOG_V( "root_class->copy == NULL\n\n"); + return TRUE; + } + } + LOG_V( "End\n"); + return FALSE; +} + +/** + * mix_videoconfigparamsdec_mp42: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsdec_mp42_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoConfigParamsDecMP42 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(first) && MIX_IS_VIDEOCONFIGPARAMSDEC_MP42( + second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSDEC_MP42(first); + this_second = MIX_VIDEOCONFIGPARAMSDEC_MP42(second); + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) { + ret = klass->equal(first, second); + } else { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ + +#define MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( + MixVideoConfigParamsDecMP42 *obj, guint version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); + obj->mpegversion = version; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( + MixVideoConfigParamsDecMP42 *obj, guint *version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); + *version = obj->mpegversion; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( + MixVideoConfigParamsDecMP42 *obj, guint version) { + + MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); + obj->divxversion = version; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( + MixVideoConfigParamsDecMP42 *obj, guint *version) { + + MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); + *version = obj->divxversion; + return MIX_RESULT_SUCCESS; + +} + diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h new file mode 100644 index 0000000..5f68b42 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.h @@ -0,0 +1,141 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ +#define __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ + +#include "mixvideoconfigparamsdec.h" +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42 (mix_videoconfigparamsdec_mp42_get_type ()) + +/** + * MIX_VIDEOCONFIGPARAMSDEC_MP42: + * @obj: object to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMSDEC_MP42(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42)) + +/** + * MIX_IS_VIDEOCONFIGPARAMSDEC_MP42: + * @obj: an object. + * + * Checks if the given object is an instance of #MixVideoConfigParamsDecMP42 + */ +#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42)) + +/** + * MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42Class)) + +/** + * MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixVideoConfigParamsDecMP42Class + */ +#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42)) + +/** + * MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42Class)) + +typedef struct _MixVideoConfigParamsDecMP42 MixVideoConfigParamsDecMP42; +typedef struct _MixVideoConfigParamsDecMP42Class MixVideoConfigParamsDecMP42Class; + +/** + * MixVideoConfigParamsDecMP42: + * + * MI-X VideoConfig Parameter object + */ +struct _MixVideoConfigParamsDecMP42 { + /*< public > */ + MixVideoConfigParamsDec parent; + + /*< public > */ + + guint mpegversion; + guint divxversion; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoConfigParamsDecMP42Class: + * + * MI-X VideoConfig object class + */ +struct _MixVideoConfigParamsDecMP42Class { + /*< public > */ + MixVideoConfigParamsDecClass parent_class; + +/* class members */ +}; + +/** + * mix_videoconfigparamsdec_mp42_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoconfigparamsdec_mp42_get_type(void); + +/** + * mix_videoconfigparamsdec_mp42_new: + * @returns: A newly allocated instance of #MixVideoConfigParamsDecMP42 + * + * Use this method to create new instance of #MixVideoConfigParamsDecMP42 + */ +MixVideoConfigParamsDecMP42 *mix_videoconfigparamsdec_mp42_new(void); +/** + * mix_videoconfigparamsdec_mp42_ref: + * @mix: object to add reference + * @returns: the MixVideoConfigParamsDecMP42 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoConfigParamsDecMP42 +* mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix); + +/** + * mix_videoconfigparamsdec_mp42_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoconfigparamsdec_mp42_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ +MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( + MixVideoConfigParamsDecMP42 *obj, guint version); + +MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( + MixVideoConfigParamsDecMP42 *obj, guint *version); + +MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( + MixVideoConfigParamsDecMP42 *obj, guint version); + +MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( + MixVideoConfigParamsDecMP42 *obj, guint *version); + +#endif /* __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.c b/mix_video/src/mixvideoconfigparamsdec_vc1.c new file mode 100644 index 0000000..fdce4f3 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.c @@ -0,0 +1,188 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparamsdec_vc1 + * @short_description: VideoConfig parameters + * + * A data object which stores videoconfig specific parameters. + */ + +#include "mixvideoconfigparamsdec_vc1.h" + +static GType _mix_videoconfigparamsdec_vc1_type = 0; +static MixVideoConfigParamsDecClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsdec_vc1_type = g_define_type_id; } + +gboolean mix_videoconfigparamsdec_vc1_copy(MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsdec_vc1_dup(const MixParams * obj); +gboolean mix_videoconfigparamsdec_vc1_equal(MixParams * first, MixParams * second); +static void mix_videoconfigparamsdec_vc1_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecVC1, /* The name of the new type, in Camel case */ + mix_videoconfigparamsdec_vc1, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */ + _do_init); + +void _mix_videoconfigparamsdec_vc1_initialize(void) { + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref(mix_videoconfigparamsdec_vc1_get_type()); +} + +static void mix_videoconfigparamsdec_vc1_init(MixVideoConfigParamsDecVC1 * self) { + /* initialize properties here */ + /* TODO: initialize properties */ + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videoconfigparamsdec_vc1_class_init( + MixVideoConfigParamsDecVC1Class * klass) { + MixVideoConfigParamsDecClass *this_parent_class = MIX_VIDEOCONFIGPARAMSDEC_CLASS( + klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS(this_parent_class); + + /* setup static parent class */ + parent_class + = (MixVideoConfigParamsDecClass *) g_type_class_peek_parent(klass); + + this_root_class->finalize = mix_videoconfigparamsdec_vc1_finalize; + this_root_class->copy + = (MixParamsCopyFunction) mix_videoconfigparamsdec_vc1_copy; + this_root_class->dup = (MixParamsDupFunction) mix_videoconfigparamsdec_vc1_dup; + this_root_class->equal + = (MixParamsEqualFunction) mix_videoconfigparamsdec_vc1_equal; +} + +MixVideoConfigParamsDecVC1 * +mix_videoconfigparamsdec_vc1_new(void) { + MixVideoConfigParamsDecVC1 *ret = + (MixVideoConfigParamsDecVC1 *) g_type_create_instance( + MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1); + + return ret; +} + +void mix_videoconfigparamsdec_vc1_finalize(MixParams * obj) { + /* MixVideoConfigParamsDecVC1 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_VC1 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoConfigParamsDecVC1 * +mix_videoconfigparamsdec_vc1_ref(MixVideoConfigParamsDecVC1 * mix) { + return (MixVideoConfigParamsDecVC1 *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoconfigparamsdec_vc1_dup: + * @obj: a #MixVideoConfigParamsDec object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoconfigparamsdec_vc1_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj)) { + MixVideoConfigParamsDecVC1 *duplicate = mix_videoconfigparamsdec_vc1_new(); + if (mix_videoconfigparamsdec_vc1_copy(MIX_PARAMS(duplicate), MIX_PARAMS( + obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoconfigparamsdec_vc1_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videoconfigparamsdec_vc1_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsdec_vc1_copy(MixParams * target, + const MixParams * src) { + MixVideoConfigParamsDecVC1 *this_target, *this_src; + MixParamsClass *root_class; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(target) && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1( + src)) { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSDEC_VC1(target); + this_src = MIX_VIDEOCONFIGPARAMSDEC_VC1(src); + + // TODO: copy properties */ + + // Now chainup base class + root_class = MIX_PARAMS_CLASS(parent_class); + + if (root_class->copy) { + return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_videoconfigparamsdec_vc1: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsdec_vc1_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoConfigParamsDecVC1 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(first) && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1( + second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSDEC_VC1(first); + this_second = MIX_VIDEOCONFIGPARAMSDEC_VC1(second); + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) { + ret = klass->equal(first, second); + } else { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h new file mode 100644 index 0000000..ecf90e5 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.h @@ -0,0 +1,134 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCONFIGPARAMSDEC_VC1_H__ +#define __MIX_VIDEOCONFIGPARAMSDEC_VC1_H__ + +#include "mixvideoconfigparamsdec.h" +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1 (mix_videoconfigparamsdec_vc1_get_type ()) + +/** +* MIX_VIDEOCONFIGPARAMSDEC_VC1: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSDEC_VC1(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSDEC_VC1: +* @obj: an object. +* +* Checks if the given object is an instance of #MixVideoConfigParamsDecVC1 +*/ +#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1)) + +/** +* MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1Class)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixVideoConfigParamsDecVC1Class +*/ +#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1)) + +/** +* MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1Class)) + +typedef struct _MixVideoConfigParamsDecVC1 MixVideoConfigParamsDecVC1; +typedef struct _MixVideoConfigParamsDecVC1Class MixVideoConfigParamsDecVC1Class; + +/** +* MixVideoConfigParamsDecVC1: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoConfigParamsDecVC1 +{ + /*< public > */ + MixVideoConfigParamsDec parent; + + /*< public > */ + + /* TODO: Add VC1 configuration paramters */ + /* TODO: wmv_version and fourcc type might be changed later */ + guint wmv_version; + guint fourcc; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** +* MixVideoConfigParamsDecVC1Class: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoConfigParamsDecVC1Class +{ + /*< public > */ + MixVideoConfigParamsDecClass parent_class; + + /* class members */ +}; + +/** +* mix_videoconfigparamsdec_vc1_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videoconfigparamsdec_vc1_get_type (void); + +/** +* mix_videoconfigparamsdec_vc1_new: +* @returns: A newly allocated instance of #MixVideoConfigParamsDecVC1 +* +* Use this method to create new instance of #MixVideoConfigParamsDecVC1 +*/ +MixVideoConfigParamsDecVC1 *mix_videoconfigparamsdec_vc1_new (void); +/** +* mix_videoconfigparamsdec_vc1_ref: +* @mix: object to add reference +* @returns: the MixVideoConfigParamsDecVC1 instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoConfigParamsDecVC1 + * mix_videoconfigparamsdec_vc1_ref (MixVideoConfigParamsDecVC1 * mix); + +/** +* mix_videoconfigparamsdec_vc1_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videoconfigparamsdec_vc1_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ + +#endif /* __MIX_VIDEOCONFIGPARAMSDECDEC_VC1_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c new file mode 100644 index 0000000..44a31ce --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc.c @@ -0,0 +1,688 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparamsenc + * @short_description: VideoConfig parameters + * + * A data object which stores videoconfig specific parameters. + */ + +#include +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc.h" + +static GType _mix_videoconfigparamsenc_type = 0; +static MixParamsClass *parent_class = NULL; + +#define MDEBUG + +#define _do_init { _mix_videoconfigparamsenc_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src); +MixParams *mix_videoconfigparamsenc_dup(const MixParams * obj); +gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second); +static void mix_videoconfigparamsenc_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEnc, mix_videoconfigparamsenc, + MIX_TYPE_VIDEOCONFIGPARAMS, _do_init); + +static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) { + /* initialize properties here */ + self->bitrate = 0; + self->frame_rate_num = 30; + self->frame_rate_denom = 1; + self->initial_qp = 15; + self->min_qp = 0; + + self->picture_width = 0; + self->picture_height = 0; + + self->mime_type = NULL; + self->encode_format = 0; + self->intra_period = 30; + + self->mixbuffer_pool_size = 0; + + self->share_buf_mode = FALSE; + + self->ci_frame_id = NULL; + self->ci_frame_num = 0; + + self->need_display = TRUE; + + self->rate_control = MIX_RATE_CONTROL_NONE; + self->raw_format = MIX_RAW_TARGET_FORMAT_YUV420; + self->profile = MIX_PROFILE_H264BASELINE; + + /* TODO: initialize other properties */ + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videoconfigparamsenc_class_init(MixVideoConfigParamsEncClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videoconfigparamsenc_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsenc_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsenc_dup; + mixparams_class->equal + = (MixParamsEqualFunction) mix_videoconfigparamsenc_equal; +} + +MixVideoConfigParamsEnc * +mix_videoconfigparamsenc_new(void) { + MixVideoConfigParamsEnc *ret = + (MixVideoConfigParamsEnc *) g_type_create_instance( + MIX_TYPE_VIDEOCONFIGPARAMSENC); + + return ret; +} + +void mix_videoconfigparamsenc_finalize(MixParams * obj) { + + /* clean up here. */ + MixVideoConfigParamsEnc *self = MIX_VIDEOCONFIGPARAMSENC(obj); + + /* free mime_type */ + if (self->mime_type->str) + g_string_free(self->mime_type, TRUE); + else + g_string_free(self->mime_type, FALSE); + + if (self->ci_frame_id) + g_free (self->ci_frame_id); + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoConfigParamsEnc * +mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) { + return (MixVideoConfigParamsEnc *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoconfigparamsenc_dup: + * @obj: a #MixVideoConfigParamsEnc object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoconfigparamsenc_dup(const MixParams * obj) { + MixParams *ret = NULL; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC(obj)) { + MixVideoConfigParamsEnc *duplicate = mix_videoconfigparamsenc_new(); + if (mix_videoconfigparamsenc_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + + ret = MIX_PARAMS(duplicate); + } else { + mix_videoconfigparamsenc_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videoconfigparamsenc_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src) { + + MixVideoConfigParamsEnc *this_target, *this_src; + MIX_RESULT mix_result = MIX_RESULT_FAIL; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC(target) && MIX_IS_VIDEOCONFIGPARAMSENC(src)) { + + /* Cast the base object to this child object */ + this_target = MIX_VIDEOCONFIGPARAMSENC(target); + this_src = MIX_VIDEOCONFIGPARAMSENC(src); + + /* copy properties of primitive type */ + + this_target->bitrate = this_src->bitrate; + this_target->frame_rate_num = this_src->frame_rate_num; + this_target->frame_rate_denom = this_src->frame_rate_denom; + this_target->initial_qp = this_src->initial_qp; + this_target->min_qp = this_src->min_qp; + this_target->intra_period = this_src->intra_period; + this_target->picture_width = this_src->picture_width; + this_target->picture_height = this_src->picture_height; + this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size; + this_target->share_buf_mode = this_src->share_buf_mode; + this_target->encode_format = this_src->encode_format; + this_target->ci_frame_num = this_src->ci_frame_num; + this_target->draw= this_src->draw; + this_target->need_display = this_src->need_display; + this_target->rate_control = this_src->rate_control; + this_target->raw_format = this_src->raw_format; + this_target->profile = this_src->profile; + + /* copy properties of non-primitive */ + + /* copy mime_type */ + + if (this_src->mime_type) { +#ifdef MDEBUG + if (this_src->mime_type->str) { + + LOG_I( "this_src->mime_type->str = %s %x\n", + this_src->mime_type->str, (unsigned int)this_src->mime_type->str); + } +#endif + + mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, + this_src->mime_type->str); + } else { + + LOG_I( "this_src->mime_type = NULL\n"); + + mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, NULL); + } + + if (mix_result != MIX_RESULT_SUCCESS) { + + LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n"); + return FALSE; + } + + mix_result = mix_videoconfigparamsenc_set_ci_frame_info (this_target, this_src->ci_frame_id, + this_src->ci_frame_num); + + /* TODO: copy other properties if there's any */ + + /* Now chainup base class */ + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + + return FALSE; +} + + +/** + * mix_videoconfigparamsenc_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { + + gboolean ret = FALSE; + + MixVideoConfigParamsEnc *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC(first) && MIX_IS_VIDEOCONFIGPARAMSENC(second)) { + + // Deep compare + // Cast the base object to this child object + this_first = MIX_VIDEOCONFIGPARAMSENC(first); + this_second = MIX_VIDEOCONFIGPARAMSENC(second); + + /* check the equalitiy of the primitive type properties */ + if (this_first->bitrate != this_second->bitrate) { + goto not_equal; + } + + if (this_first->frame_rate_num != this_second->frame_rate_num) { + goto not_equal; + } + + if (this_first->frame_rate_denom != this_second->frame_rate_denom) { + goto not_equal; + } + + if (this_first->initial_qp != this_second->initial_qp) { + goto not_equal; + } + + if (this_first->min_qp != this_second->min_qp) { + goto not_equal; + } + + if (this_first->intra_period != this_second->intra_period) { + goto not_equal; + } + + if (this_first->picture_width != this_second->picture_width + && this_first->picture_height != this_second->picture_height) { + goto not_equal; + } + + if (this_first->encode_format != this_second->encode_format) { + goto not_equal; + } + + if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) { + goto not_equal; + } + + if (this_first->share_buf_mode != this_second->share_buf_mode) { + goto not_equal; + } + + if (this_first->ci_frame_id != this_second->ci_frame_id) { + goto not_equal; + } + + if (this_first->ci_frame_num != this_second->ci_frame_num) { + goto not_equal; + } + + if (this_first->draw != this_second->draw) { + goto not_equal; + } + + if (this_first->need_display!= this_second->need_display) { + goto not_equal; + } + + if (this_first->rate_control != this_second->rate_control) { + goto not_equal; + } + + if (this_first->raw_format != this_second->raw_format) { + goto not_equal; + } + + if (this_first->profile != this_second->profile) { + goto not_equal; + } + + /* check the equalitiy of the none-primitive type properties */ + + /* compare mime_type */ + + if (this_first->mime_type && this_second->mime_type) { + if (g_string_equal(this_first->mime_type, this_second->mime_type) + != TRUE) { + goto not_equal; + } + } else if (!(!this_first->mime_type && !this_second->mime_type)) { + goto not_equal; + } + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* chaining up. */ + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + + return ret; +} + +#define MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ + if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ + +/* TODO: Add getters and setters for other properties. The following is incomplete */ + + +MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, + const gchar * mime_type) { + + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + + if (!mime_type) { + return MIX_RESULT_NULL_PTR; + } + + LOG_I( "mime_type = %s %x\n", + mime_type, (unsigned int)mime_type); + + if (obj->mime_type) { + if (obj->mime_type->str) + g_string_free(obj->mime_type, TRUE); + else + g_string_free(obj->mime_type, FALSE); + } + + + LOG_I( "mime_type = %s %x\n", + mime_type, (unsigned int)mime_type); + + obj->mime_type = g_string_new(mime_type); + if (!obj->mime_type) { + return MIX_RESULT_NO_MEMORY; + } + + + LOG_I( "mime_type = %s obj->mime_type->str = %s\n", + mime_type, obj->mime_type->str); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, + gchar ** mime_type) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, mime_type); + + if (!obj->mime_type) { + *mime_type = NULL; + return MIX_RESULT_SUCCESS; + } + *mime_type = g_strdup(obj->mime_type->str); + if (!*mime_type) { + return MIX_RESULT_NO_MEMORY; + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, + guint frame_rate_num, guint frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->frame_rate_num = frame_rate_num; + obj->frame_rate_denom = frame_rate_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, + guint * frame_rate_num, guint * frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); + *frame_rate_num = obj->frame_rate_num; + *frame_rate_denom = obj->frame_rate_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, + guint picture_width, guint picture_height) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->picture_width = picture_width; + obj->picture_height = picture_height; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, + guint * picture_width, guint * picture_height) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); + *picture_width = obj->picture_width; + *picture_height = obj->picture_height; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_encode_format(MixVideoConfigParamsEnc * obj, + MixEncodeTargetFormat encode_format) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->encode_format = encode_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, + MixEncodeTargetFormat* encode_format) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, encode_format); + *encode_format = obj->encode_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, + guint bitrate) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->bitrate= bitrate; + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, + guint *bitrate) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bitrate); + *bitrate = obj->bitrate; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, + guint initial_qp) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->initial_qp = initial_qp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, + guint *initial_qp) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, initial_qp); + *initial_qp = obj->initial_qp; + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, + guint min_qp) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->min_qp = min_qp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, + guint *min_qp) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, min_qp); + *min_qp = obj->min_qp; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, + guint intra_period) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->intra_period = intra_period; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, + guint *intra_period) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, intra_period); + *intra_period = obj->intra_period; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size( + MixVideoConfigParamsEnc * obj, guint bufpoolsize) { + + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + + obj->mixbuffer_pool_size = bufpoolsize; + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size( + MixVideoConfigParamsEnc * obj, guint *bufpoolsize) { + + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bufpoolsize); + *bufpoolsize = obj->mixbuffer_pool_size; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode ( + MixVideoConfigParamsEnc * obj, gboolean share_buf_mod) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + + obj->share_buf_mode = share_buf_mod; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, + gboolean *share_buf_mod) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, share_buf_mod); + + *share_buf_mod = obj->share_buf_mode; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, + gulong * ci_frame_id, guint ci_frame_num) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + + + if (!ci_frame_id || !ci_frame_num) { + obj->ci_frame_id = NULL; + obj->ci_frame_num = 0; + return MIX_RESULT_SUCCESS; + } + + if (obj->ci_frame_id) + g_free (obj->ci_frame_id); + + guint size = ci_frame_num * sizeof (gulong); + obj->ci_frame_num = ci_frame_num; + + obj->ci_frame_id = g_malloc (ci_frame_num * sizeof (gulong)); + if (!(obj->ci_frame_id)) { + return MIX_RESULT_NO_MEMORY; + } + + memcpy (obj->ci_frame_id, ci_frame_id, size); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, + gulong * *ci_frame_id, guint *ci_frame_num) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, ci_frame_id, ci_frame_num); + + *ci_frame_num = obj->ci_frame_num; + + if (!obj->ci_frame_id) { + *ci_frame_id = NULL; + return MIX_RESULT_SUCCESS; + } + + if (obj->ci_frame_num) { + *ci_frame_id = g_malloc (obj->ci_frame_num * sizeof (gulong)); + + if (!*ci_frame_id) { + return MIX_RESULT_NO_MEMORY; + } + + memcpy (*ci_frame_id, obj->ci_frame_id, obj->ci_frame_num * sizeof (gulong)); + + } else { + *ci_frame_id = NULL; + } + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, + gulong draw) { + + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->draw = draw; + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, + gulong *draw) { + + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, draw); + *draw = obj->draw; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_need_display ( + MixVideoConfigParamsEnc * obj, gboolean need_display) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + + obj->need_display = need_display; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, + gboolean *need_display) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, need_display); + + *need_display = obj->need_display; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, + MixRateControl rate_control) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->rate_control = rate_control; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, + MixRateControl * rate_control) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, rate_control); + *rate_control = obj->rate_control; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, + MixRawTargetFormat raw_format) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->raw_format = raw_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, + MixRawTargetFormat * raw_format) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, raw_format); + *raw_format = obj->raw_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, + MixProfile profile) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->profile = profile; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, + MixProfile * profile) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, profile); + *profile = obj->profile; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h new file mode 100644 index 0000000..6a1dfff --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc.h @@ -0,0 +1,254 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOCONFIGPARAMSENC_H__ +#define __MIX_VIDEOCONFIGPARAMSENC_H__ + +#include +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEOCONFIGPARAMSENC: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEOCONFIGPARAMSENC (mix_videoconfigparamsenc_get_type ()) + +/** + * MIX_VIDEOCONFIGPARAMSENC: + * @obj: object to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEnc)) + +/** + * MIX_IS_VIDEOCONFIGPARAMSENC: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC)) + +/** + * MIX_VIDEOCONFIGPARAMSENC_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) + +/** + * MIX_IS_VIDEOCONFIGPARAMSENC_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC)) + +/** + * MIX_VIDEOCONFIGPARAMSENC_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEOCONFIGPARAMSENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) + +typedef struct _MixVideoConfigParamsEnc MixVideoConfigParamsEnc; +typedef struct _MixVideoConfigParamsEncClass MixVideoConfigParamsEncClass; + +/** + * MixVideoConfigParamsEnc: + * + * MI-X VideoConfig Parameter object + */ +struct _MixVideoConfigParamsEnc { + /*< public > */ + MixVideoConfigParams parent; + + /*< public > */ + //MixIOVec header; + + /* the type of the following members will be changed after MIX API doc is ready */ + + MixProfile profile; + MixRawTargetFormat raw_format; + MixRateControl rate_control; + + guint bitrate; + guint frame_rate_num; + guint frame_rate_denom; + guint initial_qp; + guint min_qp; + guint intra_period; + guint16 picture_width; + guint16 picture_height; + + GString * mime_type; + MixEncodeTargetFormat encode_format; + + guint mixbuffer_pool_size; + + gboolean share_buf_mode; + + gulong * ci_frame_id; + guint ci_frame_num; + + gulong draw; + gboolean need_display; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoConfigParamsEncClass: + * + * MI-X VideoConfig object class + */ +struct _MixVideoConfigParamsEncClass { + /*< public > */ + MixVideoConfigParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_videoconfigparamsenc_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoconfigparamsenc_get_type(void); + +/** + * mix_videoconfigparamsenc_new: + * @returns: A newly allocated instance of #MixVideoConfigParamsEnc + * + * Use this method to create new instance of #MixVideoConfigParamsEnc + */ +MixVideoConfigParamsEnc *mix_videoconfigparamsenc_new(void); +/** + * mix_videoconfigparamsenc_ref: + * @mix: object to add reference + * @returns: the MixVideoConfigParamsEnc instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoConfigParamsEnc *mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix); + +/** + * mix_videoconfigparamsenc_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoconfigparamsenc_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + + +MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, + const gchar * mime_type); + +MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, + gchar ** mime_type); + +MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, + guint frame_rate_num, guint frame_rate_denom); + +MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, + guint * frame_rate_num, guint * frame_rate_denom); + +MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, + guint picture_width, guint picture_height); + +MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, + guint * picture_width, guint * picture_height); + +MIX_RESULT mix_videoconfigparamsenc_set_encode_format (MixVideoConfigParamsEnc * obj, + MixEncodeTargetFormat encode_format); + +MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, + MixEncodeTargetFormat * encode_format); + +MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, + guint bps); + +MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, + guint *bps); + +MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, + guint initial_qp); + +MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, + guint *initial_qp); + +MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, + guint min_qp); + +MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, + guint *min_qp); + +MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, + guint intra_period); + +MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, + guint *intra_period); + +MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size(MixVideoConfigParamsEnc * obj, + guint bufpoolsize); + +MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size(MixVideoConfigParamsEnc * obj, + guint *bufpoolsize); + +MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc * obj, + gboolean share_buf_mod); + +MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, + gboolean *share_buf_mod); + +MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, + gulong * ci_frame_id, guint ci_frame_num); + +MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, + gulong * *ci_frame_id, guint *ci_frame_num); + +MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, + gulong draw); + +MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, + gulong *draw); + +MIX_RESULT mix_videoconfigparamsenc_set_need_display ( + MixVideoConfigParamsEnc * obj, gboolean need_display); + +MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, + gboolean *need_display); + + +MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, + MixRateControl rcmode); + +MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, + MixRateControl * rcmode); + +MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, + MixRawTargetFormat raw_format); + +MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, + MixRawTargetFormat * raw_format); + +MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, + MixProfile profile); + +MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, + MixProfile * profile); + +/* TODO: Add getters and setters for other properties */ + +#endif /* __MIX_VIDEOCONFIGPARAMSENC_H__ */ + diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c new file mode 100644 index 0000000..3bfa52e --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_h264.c @@ -0,0 +1,322 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixvideoconfigparamsenc_h264 +* @short_description: VideoConfig parameters +* +* A data object which stores videoconfig specific parameters. +*/ + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_h264.h" + +#define MDEBUG + + +static GType _mix_videoconfigparamsenc_h264_type = 0; +static MixVideoConfigParamsEncClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsenc_h264_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_h264_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsenc_h264_dup (const MixParams * obj); +gboolean mix_videoconfigparamsencenc_h264_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsenc_h264_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncH264, /* The name of the new type, in Camel case */ + mix_videoconfigparamsenc_h264, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsenc_h264_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsenc_h264_get_type ()); +} + +static void +mix_videoconfigparamsenc_h264_init (MixVideoConfigParamsEncH264 * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ + self->basic_unit_size = 0; + self->slice_num = 1; + self->disable_deblocking_filter_idc = 0; + + self->delimiter_type = MIX_DELIMITER_LENGTHPREFIX; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void +mix_videoconfigparamsenc_h264_class_init (MixVideoConfigParamsEncH264Class * klass) +{ + MixVideoConfigParamsEncClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsenc_h264_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsenc_h264_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsenc_h264_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsencenc_h264_equal; +} + +MixVideoConfigParamsEncH264 * +mix_videoconfigparamsenc_h264_new (void) +{ + MixVideoConfigParamsEncH264 *ret = (MixVideoConfigParamsEncH264 *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_H264); + + return ret; +} + +void +mix_videoconfigparamsenc_h264_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsEncH264 *this_obj = MIX_VIDEOCONFIGPARAMSENC_H264 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsEncH264 + * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix) +{ + return (MixVideoConfigParamsEncH264 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_h264_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_h264_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (obj)) + { + MixVideoConfigParamsEncH264 *duplicate = mix_videoconfigparamsenc_h264_new (); + if (mix_videoconfigparamsenc_h264_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsenc_h264_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsenc_h264_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsEncH264 *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (target) + && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSENC_H264 (target); + this_src = MIX_VIDEOCONFIGPARAMSENC_H264 (src); + + //add properties + this_target->basic_unit_size = this_src->basic_unit_size; + this_target->slice_num = this_src->slice_num; + this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; + this_target->delimiter_type = this_src->delimiter_type; + + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsenc_h264: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsEncH264 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (first) + && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSENC_H264 (first); + this_second = MIX_VIDEOCONFIGPARAMSENC_H264 (second); + + if (this_first->basic_unit_size != this_second->basic_unit_size) { + goto not_equal; + } + + if (this_first->slice_num != this_second->slice_num) { + goto not_equal; + } + + if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { + goto not_equal; + } + + if (this_first->delimiter_type != this_second->delimiter_type) { + goto not_equal; + } + + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ + +#define MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj, + guint basic_unit_size) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->basic_unit_size = basic_unit_size; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj, + guint * basic_unit_size) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, basic_unit_size); + *basic_unit_size = obj->basic_unit_size; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, + guint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj, + guint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, + guint slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->slice_num = slice_num; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, slice_num); + *slice_num = obj->slice_num; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, + MixDelimiterType delimiter_type) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->delimiter_type = delimiter_type; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, + MixDelimiterType * delimiter_type) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, delimiter_type); + *delimiter_type = obj->delimiter_type; + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h new file mode 100644 index 0000000..c2359dd --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_h264.h @@ -0,0 +1,160 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCONFIGPARAMSENC_H264_H__ +#define __MIX_VIDEOCONFIGPARAMSENC_H264_H__ + +#include "mixvideoconfigparamsenc.h" +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCONFIGPARAMSENC_H264: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCONFIGPARAMSENC_H264 (mix_videoconfigparamsenc_h264_get_type ()) + +/** +* MIX_VIDEOCONFIGPARAMSENC_H264: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_H264: +* @obj: an object. +* +* Checks if the given object is an instance of #MixVideoConfigParamsEncH264 +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_H264_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixVideoConfigParamsEncH264Class +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) + +typedef struct _MixVideoConfigParamsEncH264 MixVideoConfigParamsEncH264; +typedef struct _MixVideoConfigParamsEncH264Class MixVideoConfigParamsEncH264Class; + +/** +* MixVideoConfigParamsEncH264: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoConfigParamsEncH264 +{ + /*< public > */ + MixVideoConfigParamsEnc parent; + + /*< public > */ + + /* TODO: Add H.264 configuration paramters */ + guint basic_unit_size; + guint slice_num; + guint8 disable_deblocking_filter_idc; + + MixDelimiterType delimiter_type; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** +* MixVideoConfigParamsEncH264Class: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoConfigParamsEncH264Class +{ + /*< public > */ + MixVideoConfigParamsEncClass parent_class; + + /* class members */ +}; + +/** +* mix_videoconfigparamsenc_h264_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videoconfigparamsenc_h264_get_type (void); + +/** +* mix_videoconfigparamsenc_h264_new: +* @returns: A newly allocated instance of #MixVideoConfigParamsEncH264 +* +* Use this method to create new instance of #MixVideoConfigParamsEncH264 +*/ +MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void); +/** +* mix_videoconfigparamsenc_h264_ref: +* @mix: object to add reference +* @returns: the MixVideoConfigParamsEncH264 instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoConfigParamsEncH264 + * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix); + +/** +* mix_videoconfigparamsenc_h264_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videoconfigparamsenc_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ +MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj, + guint basic_unit_size); + +MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj, + guint * basic_unit_size); + +MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, + guint disable_deblocking_filter_idc); + +MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj, + guint * disable_deblocking_filter_idc); + +MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, + guint slice_num); + +MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * slice_num); + +MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, + MixDelimiterType delimiter_type); + +MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, + MixDelimiterType * delimiter_type); + +#endif /* __MIX_VIDEOCONFIGPARAMSENC_H264_H__ */ + diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.c b/mix_video/src/mixvideoconfigparamsenc_mpeg4.c new file mode 100644 index 0000000..54e47a9 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.c @@ -0,0 +1,300 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixvideoconfigparamsenc_mpeg4 +* @short_description: VideoConfig parameters +* +* A data object which stores videoconfig specific parameters. +*/ + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_mpeg4.h" + +#define MDEBUG + + +static GType _mix_videoconfigparamsenc_mpeg4_type = 0; +static MixVideoConfigParamsEncClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsenc_mpeg4_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj); +gboolean mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncMPEG4, /* The name of the new type, in Camel case */ + mix_videoconfigparamsenc_mpeg4, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsenc_mpeg4_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsenc_mpeg4_get_type ()); +} + +static void +mix_videoconfigparamsenc_mpeg4_init (MixVideoConfigParamsEncMPEG4 * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ + + self->fixed_vop_time_increment = 3; + self->profile_and_level_indication = 3; + self->disable_deblocking_filter_idc = 0; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void +mix_videoconfigparamsenc_mpeg4_class_init (MixVideoConfigParamsEncMPEG4Class * klass) +{ + MixVideoConfigParamsEncClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsenc_mpeg4_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsenc_mpeg4_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsenc_mpeg4_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsencenc_mpeg4_equal; +} + +MixVideoConfigParamsEncMPEG4 * +mix_videoconfigparamsenc_mpeg4_new (void) +{ + MixVideoConfigParamsEncMPEG4 *ret = (MixVideoConfigParamsEncMPEG4 *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4); + + return ret; +} + +void +mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsEncMPEG4 + * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) +{ + return (MixVideoConfigParamsEncMPEG4 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_mpeg4_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (obj)) + { + MixVideoConfigParamsEncMPEG4 *duplicate = mix_videoconfigparamsenc_mpeg4_new (); + if (mix_videoconfigparamsenc_mpeg4_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsenc_mpeg4_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsenc_mpeg4_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsEncMPEG4 *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (target) + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (target); + this_src = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (src); + + //add properties + this_target->profile_and_level_indication= this_src->profile_and_level_indication; + this_target->fixed_vop_time_increment= this_src->fixed_vop_time_increment; + this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsenc_mpeg4: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsEncMPEG4 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (first) + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (first); + this_second = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (second); + + if (this_first->profile_and_level_indication!= this_second->profile_and_level_indication) { + goto not_equal; + } + + if (this_first->fixed_vop_time_increment!= this_second->fixed_vop_time_increment) { + goto not_equal; + } + + if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { + goto not_equal; + } + + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ + +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->profile_and_level_indication = profile_and_level_indication; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar * profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); + *profile_and_level_indication = obj->profile_and_level_indication; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->fixed_vop_time_increment = fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint * fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); + *fixed_vop_time_increment = obj->fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h new file mode 100644 index 0000000..e6322d5 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h @@ -0,0 +1,152 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ +#define __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ + +#include "mixvideoconfigparamsenc.h" +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 (mix_videoconfigparamsenc_mpeg4_get_type ()) + +/** +* MIX_VIDEOCONFIGPARAMSENC_MPEG4: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4: +* @obj: an object. +* +* Checks if the given object is an instance of #MixVideoConfigParamsEncMPEG4 +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixVideoConfigParamsEncMPEG4Class +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) + +typedef struct _MixVideoConfigParamsEncMPEG4 MixVideoConfigParamsEncMPEG4; +typedef struct _MixVideoConfigParamsEncMPEG4Class MixVideoConfigParamsEncMPEG4Class; + +/** +* MixVideoConfigParamsEncMPEG4: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoConfigParamsEncMPEG4 +{ + /*< public > */ + MixVideoConfigParamsEnc parent; + + /*< public > */ + + /* TODO: Add MPEG-4 configuration paramters */ + guchar profile_and_level_indication; + guint fixed_vop_time_increment; + guint disable_deblocking_filter_idc; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** +* MixVideoConfigParamsEncMPEG4Class: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoConfigParamsEncMPEG4Class +{ + /*< public > */ + MixVideoConfigParamsEncClass parent_class; + + /* class members */ +}; + +/** +* mix_videoconfigparamsenc_mpeg4_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videoconfigparamsenc_mpeg4_get_type (void); + +/** +* mix_videoconfigparamsenc_mpeg4_new: +* @returns: A newly allocated instance of #MixVideoConfigParamsEncMPEG4 +* +* Use this method to create new instance of #MixVideoConfigParamsEncMPEG4 +*/ +MixVideoConfigParamsEncMPEG4 *mix_videoconfigparamsenc_mpeg4_new (void); +/** +* mix_videoconfigparamsenc_mpeg4_ref: +* @mix: object to add reference +* @returns: the MixVideoConfigParamsEncMPEG4 instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoConfigParamsEncMPEG4 + * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix); + +/** +* mix_videoconfigparamsenc_mpeg4_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint disable_deblocking_filter_idc); + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint * disable_deblocking_filter_idc); + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar profile_and_level_indication); + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar * profile_and_level_indication); + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint fixed_vop_time_increment); + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint * fixed_vop_time_increment); + +#endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.c b/mix_video/src/mixvideoconfigparamsenc_preview.c new file mode 100644 index 0000000..ea0aaa1 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_preview.c @@ -0,0 +1,222 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixvideoconfigparamsenc_preview +* @short_description: VideoConfig parameters +* +* A data object which stores videoconfig specific parameters. +*/ + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_preview.h" + +#define MDEBUG + + +static GType _mix_videoconfigparamsenc_preview_type = 0; +static MixVideoConfigParamsEncClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsenc_preview_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_preview_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsenc_preview_dup (const MixParams * obj); +gboolean mix_videoconfigparamsencenc_preview_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsenc_preview_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncPreview, /* The name of the new type, in Camel case */ + mix_videoconfigparamsenc_preview, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsenc_preview_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsenc_preview_get_type ()); +} + +static void +mix_videoconfigparamsenc_preview_init (MixVideoConfigParamsEncPreview * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void +mix_videoconfigparamsenc_preview_class_init (MixVideoConfigParamsEncPreviewClass * klass) +{ + MixVideoConfigParamsEncClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsenc_preview_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsenc_preview_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsenc_preview_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsencenc_preview_equal; +} + +MixVideoConfigParamsEncPreview * +mix_videoconfigparamsenc_preview_new (void) +{ + MixVideoConfigParamsEncPreview *ret = (MixVideoConfigParamsEncPreview *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW); + + return ret; +} + +void +mix_videoconfigparamsenc_preview_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsEncPreview *this_obj = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsEncPreview + * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix) +{ + return (MixVideoConfigParamsEncPreview *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_preview_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_preview_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (obj)) + { + MixVideoConfigParamsEncPreview *duplicate = mix_videoconfigparamsenc_preview_new (); + if (mix_videoconfigparamsenc_preview_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsenc_preview_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsenc_preview_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_preview_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsEncPreview *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (target) + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (target); + this_src = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (src); + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsenc_preview: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_preview_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsEncPreview *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (first) + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (first); + this_second = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (second); + + + ret = TRUE; + + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.h b/mix_video/src/mixvideoconfigparamsenc_preview.h new file mode 100644 index 0000000..f9d3fbe --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_preview.h @@ -0,0 +1,124 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ +#define __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ + +#include "mixvideoconfigparamsenc.h" +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW (mix_videoconfigparamsenc_preview_get_type ()) + +/** +* MIX_VIDEOCONFIGPARAMSENC_PREVIEW: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreview)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW: +* @obj: an object. +* +* Checks if the given object is an instance of #MixVideoConfigParamsEncPreview +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreviewClass)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixVideoConfigParamsEncPreviewClass +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_PREVIEW_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreviewClass)) + +typedef struct _MixVideoConfigParamsEncPreview MixVideoConfigParamsEncPreview; +typedef struct _MixVideoConfigParamsEncPreviewClass MixVideoConfigParamsEncPreviewClass; + +/** +* MixVideoConfigParamsEncPreview: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoConfigParamsEncPreview +{ + /*< public > */ + MixVideoConfigParamsEnc parent; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** +* MixVideoConfigParamsEncPreviewClass: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoConfigParamsEncPreviewClass +{ + /*< public > */ + MixVideoConfigParamsEncClass parent_class; + + /* class members */ +}; + +/** +* mix_videoconfigparamsenc_preview_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videoconfigparamsenc_preview_get_type (void); + +/** +* mix_videoconfigparamsenc_preview_new: +* @returns: A newly allocated instance of #MixVideoConfigParamsEncPreview +* +* Use this method to create new instance of #MixVideoConfigParamsEncPreview +*/ +MixVideoConfigParamsEncPreview *mix_videoconfigparamsenc_preview_new (void); +/** +* mix_videoconfigparamsenc_preview_ref: +* @mix: object to add reference +* @returns: the MixVideoConfigParamsEncPreview instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoConfigParamsEncPreview + * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix); + +/** +* mix_videoconfigparamsenc_preview_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videoconfigparamsenc_preview_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +#endif /* __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ */ + diff --git a/mix_video/src/mixvideodecodeparams.c b/mix_video/src/mixvideodecodeparams.c new file mode 100644 index 0000000..0bb06e8 --- /dev/null +++ b/mix_video/src/mixvideodecodeparams.c @@ -0,0 +1,204 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideodecodeparams + * @short_description: VideoDecode parameters + * + * A data object which stores videodecode specific parameters. + */ + +#include "mixvideodecodeparams.h" + +static GType _mix_videodecodeparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videodecodeparams_type = g_define_type_id; } + +gboolean mix_videodecodeparams_copy(MixParams * target, const MixParams * src); +MixParams *mix_videodecodeparams_dup(const MixParams * obj); +gboolean mix_videodecodeparams_equal(MixParams * first, MixParams * second); +static void mix_videodecodeparams_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoDecodeParams, mix_videodecodeparams, + MIX_TYPE_PARAMS, _do_init); + +static void mix_videodecodeparams_init(MixVideoDecodeParams * self) { + /* initialize properties here */ + + /* TODO: initialize properties */ + + self->timestamp = 0; + self->discontinuity = FALSE; + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videodecodeparams_class_init(MixVideoDecodeParamsClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videodecodeparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videodecodeparams_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videodecodeparams_dup; + mixparams_class->equal + = (MixParamsEqualFunction) mix_videodecodeparams_equal; +} + +MixVideoDecodeParams * +mix_videodecodeparams_new(void) { + MixVideoDecodeParams *ret = + (MixVideoDecodeParams *) g_type_create_instance( + MIX_TYPE_VIDEODECODEPARAMS); + + return ret; +} + +void mix_videodecodeparams_finalize(MixParams * obj) { + /* clean up here. */ + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoDecodeParams * +mix_videodecodeparams_ref(MixVideoDecodeParams * mix) { + return (MixVideoDecodeParams *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videodecodeparams_dup: + * @obj: a #MixVideoDecodeParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videodecodeparams_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEODECODEPARAMS(obj)) { + MixVideoDecodeParams *duplicate = mix_videodecodeparams_new(); + if (mix_videodecodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videodecodeparams_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videodecodeparams_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videodecodeparams_copy(MixParams * target, const MixParams * src) { + MixVideoDecodeParams *this_target, *this_src; + + if (MIX_IS_VIDEODECODEPARAMS(target) && MIX_IS_VIDEODECODEPARAMS(src)) { + // Cast the base object to this child object + this_target = MIX_VIDEODECODEPARAMS(target); + this_src = MIX_VIDEODECODEPARAMS(src); + + // TODO: copy properties */ + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_videodecodeparams_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videodecodeparams_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoDecodeParams *this_first, *this_second; + + if (MIX_IS_VIDEODECODEPARAMS(first) && MIX_IS_VIDEODECODEPARAMS(second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEODECODEPARAMS(first); + this_second = MIX_VIDEODECODEPARAMS(second); + + /* TODO: add comparison for properties */ + /* if ( first properties == sencod properties) */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + + +/* TODO: Add getters and setters for properties. */ + +MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj, + guint64 timestamp) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj, + guint64 * timestamp) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, + gboolean discontinuity) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, + gboolean *discontinuity) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h new file mode 100644 index 0000000..34f1a22 --- /dev/null +++ b/mix_video/src/mixvideodecodeparams.h @@ -0,0 +1,139 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEODECODEPARAMS_H__ +#define __MIX_VIDEODECODEPARAMS_H__ + +#include +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEODECODEPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEODECODEPARAMS (mix_videodecodeparams_get_type ()) + +/** + * MIX_VIDEODECODEPARAMS: + * @obj: object to be type-casted. + */ +#define MIX_VIDEODECODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParams)) + +/** + * MIX_IS_VIDEODECODEPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_VIDEODECODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEODECODEPARAMS)) + +/** + * MIX_VIDEODECODEPARAMS_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEODECODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParamsClass)) + +/** + * MIX_IS_VIDEODECODEPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_VIDEODECODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEODECODEPARAMS)) + +/** + * MIX_VIDEODECODEPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEODECODEPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParamsClass)) + +typedef struct _MixVideoDecodeParams MixVideoDecodeParams; +typedef struct _MixVideoDecodeParamsClass MixVideoDecodeParamsClass; + +/** + * MixVideoDecodeParams: + * + * MI-X VideoDecode Parameter object + */ +struct _MixVideoDecodeParams { + /*< public > */ + MixParams parent; + + /*< public > */ + + /* TODO: Add properties */ + guint64 timestamp; + gboolean discontinuity; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoDecodeParamsClass: + * + * MI-X VideoDecode object class + */ +struct _MixVideoDecodeParamsClass { + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_videodecodeparams_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videodecodeparams_get_type(void); + +/** + * mix_videodecodeparams_new: + * @returns: A newly allocated instance of #MixVideoDecodeParams + * + * Use this method to create new instance of #MixVideoDecodeParams + */ +MixVideoDecodeParams *mix_videodecodeparams_new(void); +/** + * mix_videodecodeparams_ref: + * @mix: object to add reference + * @returns: the MixVideoDecodeParams instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoDecodeParams *mix_videodecodeparams_ref(MixVideoDecodeParams * mix); + +/** + * mix_videodecodeparams_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videodecodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for properties */ +MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj, + guint64 timestamp); +MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj, + guint64 * timestamp); + +MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, + gboolean discontinuity); +MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, + gboolean *discontinuity); + +#endif /* __MIX_VIDEODECODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h new file mode 100644 index 0000000..bb80987 --- /dev/null +++ b/mix_video/src/mixvideodef.h @@ -0,0 +1,114 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEO_DEF_H__ +#define __MIX_VIDEO_DEF_H__ +#include + +/* + * MI-X video error code + */ +typedef enum { + MIX_RESULT_FRAME_NOTAVAIL = MIX_RESULT_ERROR_VIDEO_START + 1, + MIX_RESULT_EOS, + MIX_RESULT_POOLEMPTY, + MIX_RESULT_OUTOFSURFACES, + MIX_RESULT_DROPFRAME, + MIX_RESULT_NOTIMPL, + MIX_RESULT_VIDEO_LAST +} MIX_VIDEO_ERROR_CODE; + +/* + MixCodecMode + */ +typedef enum { + MIX_CODEC_MODE_ENCODE = 0, + MIX_CODEC_MODE_DECODE, + MIX_CODEC_MODE_LAST +} MixCodecMode; + +typedef enum { + MIX_FRAMEORDER_MODE_DISPLAYORDER = 0, + MIX_FRAMEORDER_MODE_DECODEORDER, + MIX_FRAMEORDER_MODE_LAST +} MixFrameOrderMode; + +typedef struct _MixIOVec { + guchar *data; + gint buffer_size; + gint data_size; +} MixIOVec; + +typedef struct _MixRect { + gshort x; + gshort y; + gushort width; + gushort height; +} MixRect; + +typedef enum { + MIX_STATE_UNINITIALIZED = 0, + MIX_STATE_INITIALIZED, + MIX_STATE_CONFIGURED, + MIX_STATE_LAST +} MixState; + + +typedef enum +{ + MIX_RAW_TARGET_FORMAT_NONE = 0, + MIX_RAW_TARGET_FORMAT_YUV420 = 1, + MIX_RAW_TARGET_FORMAT_YUV422 = 2, + MIX_RAW_TARGET_FORMAT_YUV444 = 4, + MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000, + MIX_RAW_TARGET_FORMAT_LAST +} MixRawTargetFormat; + + +typedef enum +{ + MIX_ENCODE_TARGET_FORMAT_MPEG4 = 0, + MIX_ENCODE_TARGET_FORMAT_H263 = 2, + MIX_ENCODE_TARGET_FORMAT_H264 = 4, + MIX_ENCODE_TARGET_FORMAT_PREVIEW = 8, + MIX_ENCODE_TARGET_FORMAT_LAST +} MixEncodeTargetFormat; + + +typedef enum +{ + MIX_RATE_CONTROL_NONE = 1, + MIX_RATE_CONTROL_CBR = 2, + MIX_RATE_CONTROL_VBR = 4, + MIX_RATE_CONTROL_LAST +} MixRateControl; + +typedef enum +{ + MIX_PROFILE_MPEG2SIMPLE = 0, + MIX_PROFILE_MPEG2MAIN, + MIX_PROFILE_MPEG4SIMPLE, + MIX_PROFILE_MPEG4ADVANCEDSIMPLE, + MIX_PROFILE_MPEG4MAIN, + MIX_PROFILE_H264BASELINE, + MIX_PROFILE_H264MAIN, + MIX_PROFILE_H264HIGH, + MIX_PROFILE_VC1SIMPLE, + MIX_PROFILE_VC1MAIN, + MIX_PROFILE_VC1ADVANCED, + MIX_PROFILE_H263BASELINE +} MixProfile; + +typedef enum +{ + MIX_DELIMITER_LENGTHPREFIX = 0, + MIX_DELIMITER_ANNEXB +} MixDelimiterType; + + +#endif /* __MIX_VIDEO_DEF_H__ */ diff --git a/mix_video/src/mixvideoencodeparams.c b/mix_video/src/mixvideoencodeparams.c new file mode 100644 index 0000000..809004d --- /dev/null +++ b/mix_video/src/mixvideoencodeparams.c @@ -0,0 +1,204 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoencodeparams + * @short_description: VideoDecode parameters + * + * A data object which stores videodecode specific parameters. + */ + +#include "mixvideoencodeparams.h" + +static GType _mix_videoencodeparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videoencodeparams_type = g_define_type_id; } + +gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src); +MixParams *mix_videoencodeparams_dup(const MixParams * obj); +gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second); +static void mix_videoencodeparams_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoEncodeParams, mix_videoencodeparams, + MIX_TYPE_PARAMS, _do_init); + +static void mix_videoencodeparams_init(MixVideoEncodeParams * self) { + /* initialize properties here */ + + /* TODO: initialize properties */ + + self->timestamp = 0; + self->discontinuity = FALSE; + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videoencodeparams_class_init(MixVideoEncodeParamsClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videoencodeparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videoencodeparams_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videoencodeparams_dup; + mixparams_class->equal + = (MixParamsEqualFunction) mix_videoencodeparams_equal; +} + +MixVideoEncodeParams * +mix_videoencodeparams_new(void) { + MixVideoEncodeParams *ret = + (MixVideoEncodeParams *) g_type_create_instance( + MIX_TYPE_VIDEOENCODEPARAMS); + + return ret; +} + +void mix_videoencodeparams_finalize(MixParams * obj) { + /* clean up here. */ + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoEncodeParams * +mix_videoencodeparams_ref(MixVideoEncodeParams * mix) { + return (MixVideoEncodeParams *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoencodeparams_dup: + * @obj: a #MixVideoEncodeParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoencodeparams_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEOENCODEPARAMS(obj)) { + MixVideoEncodeParams *duplicate = mix_videoencodeparams_new(); + if (mix_videoencodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoencodeparams_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videoencodeparams_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src) { + MixVideoEncodeParams *this_target, *this_src; + + if (MIX_IS_VIDEOENCODEPARAMS(target) && MIX_IS_VIDEOENCODEPARAMS(src)) { + // Cast the base object to this child object + this_target = MIX_VIDEOENCODEPARAMS(target); + this_src = MIX_VIDEOENCODEPARAMS(src); + + // TODO: copy properties */ + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_videoencodeparams_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoEncodeParams *this_first, *this_second; + + if (MIX_IS_VIDEOENCODEPARAMS(first) && MIX_IS_VIDEOENCODEPARAMS(second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEOENCODEPARAMS(first); + this_second = MIX_VIDEOENCODEPARAMS(second); + + /* TODO: add comparison for properties */ + /* if ( first properties == sencod properties) */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + + +/* TODO: Add getters and setters for properties. */ + +MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj, + guint64 timestamp) { + MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj, + guint64 * timestamp) { + MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj, + gboolean discontinuity) { + MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj, + gboolean *discontinuity) { + MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoencodeparams.h b/mix_video/src/mixvideoencodeparams.h new file mode 100644 index 0000000..8709cb9 --- /dev/null +++ b/mix_video/src/mixvideoencodeparams.h @@ -0,0 +1,140 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOENCODEPARAMS_H__ +#define __MIX_VIDEOENCODEPARAMS_H__ + +#include +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEOENCODEPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEOENCODEPARAMS (mix_videoencodeparams_get_type ()) + +/** + * MIX_VIDEOENCODEPARAMS: + * @obj: object to be type-casted. + */ +#define MIX_VIDEOENCODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParams)) + +/** + * MIX_IS_VIDEOENCODEPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_VIDEOENCODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOENCODEPARAMS)) + +/** + * MIX_VIDEOENCODEPARAMS_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEOENCODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParamsClass)) + +/** + * MIX_IS_VIDEOENCODEPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_VIDEOENCODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOENCODEPARAMS)) + +/** + * MIX_VIDEOENCODEPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEOENCODEPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParamsClass)) + +typedef struct _MixVideoEncodeParams MixVideoEncodeParams; +typedef struct _MixVideoEncodeParamsClass MixVideoEncodeParamsClass; + +/** + * MixVideoEncodeParams: + * + * MI-X VideoDecode Parameter object + */ +struct _MixVideoEncodeParams { + /*< public > */ + MixParams parent; + + /*< public > */ + + /* TODO: Add properties */ + guint64 timestamp; + gboolean discontinuity; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoEncodeParamsClass: + * + * MI-X VideoDecode object class + */ +struct _MixVideoEncodeParamsClass { + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_videoencodeparams_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoencodeparams_get_type(void); + +/** + * mix_videoencodeparams_new: + * @returns: A newly allocated instance of #MixVideoEncodeParams + * + * Use this method to create new instance of #MixVideoEncodeParams + */ +MixVideoEncodeParams *mix_videoencodeparams_new(void); +/** + * mix_videoencodeparams_ref: + * @mix: object to add reference + * @returns: the MixVideoEncodeParams instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoEncodeParams *mix_videoencodeparams_ref(MixVideoEncodeParams * mix); + +/** + * mix_videoencodeparams_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoencodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for properties */ +MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj, + guint64 timestamp); +MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj, + guint64 * timestamp); + +MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj, + gboolean discontinuity); +MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj, + gboolean *discontinuity); + +#endif /* __MIX_VIDEOENCODEPARAMS_H__ */ + diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c new file mode 100644 index 0000000..fa601cb --- /dev/null +++ b/mix_video/src/mixvideoformat.c @@ -0,0 +1,401 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include "mixvideolog.h" + +#include "mixvideoformat.h" + +#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } + + +/* Default vmethods implementation */ +static MIX_RESULT mix_videofmt_getcaps_default(MixVideoFormat *mix, + GString *msg); +static MIX_RESULT mix_videofmt_initialize_default(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay vadisplay); +static MIX_RESULT + mix_videofmt_decode_default(MixVideoFormat *mix, + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params); +static MIX_RESULT mix_videofmt_flush_default(MixVideoFormat *mix); +static MIX_RESULT mix_videofmt_eos_default(MixVideoFormat *mix); +static MIX_RESULT mix_videofmt_deinitialize_default(MixVideoFormat *mix); + +static GObjectClass *parent_class = NULL; + +static void mix_videoformat_finalize(GObject * obj); +G_DEFINE_TYPE (MixVideoFormat, mix_videoformat, G_TYPE_OBJECT); + +static void mix_videoformat_init(MixVideoFormat * self) { + + /* public member initialization */ + /* These are all public because MixVideoFormat objects are completely internal to MixVideo, + no need for private members */ + + self->initialized = FALSE; + self->framemgr = NULL; + self->surfacepool = NULL; + self->inputbufpool = NULL; + self->inputbufqueue = NULL; + self->va_display = NULL; + self->va_context = VA_INVALID_ID; + self->va_config = VA_INVALID_ID; + self->va_surfaces = NULL; + self->va_num_surfaces = 0; + self->mime_type = NULL; + self->frame_rate_num = 0; + self->frame_rate_denom = 0; + self->picture_width = 0; + self->picture_height = 0; + self->parse_in_progress = FALSE; + self->current_timestamp = 0; +} + +static void mix_videoformat_class_init(MixVideoFormatClass * klass) { + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + gobject_class->finalize = mix_videoformat_finalize; + + /* setup vmethods with base implementation */ + klass->getcaps = mix_videofmt_getcaps_default; + klass->initialize = mix_videofmt_initialize_default; + klass->decode = mix_videofmt_decode_default; + klass->flush = mix_videofmt_flush_default; + klass->eos = mix_videofmt_eos_default; + klass->deinitialize = mix_videofmt_deinitialize_default; +} + +MixVideoFormat * +mix_videoformat_new(void) { + MixVideoFormat *ret = g_object_new(MIX_TYPE_VIDEOFORMAT, NULL); + + return ret; +} + +void mix_videoformat_finalize(GObject * obj) { + /* clean up here. */ + VAStatus va_status; + + MixVideoFormat *mix = MIX_VIDEOFORMAT(obj); + MixInputBufferEntry *buf_entry = NULL; + + if(mix->objectlock) { + g_mutex_free(mix->objectlock); + mix->objectlock = NULL; + } + + if (mix->mime_type) + { + if (mix->mime_type->str) + g_string_free(mix->mime_type, TRUE); + else + g_string_free(mix->mime_type, FALSE); + } + + //MiVideo object calls the _deinitialize() for frame manager + MIXUNREF(mix->framemgr, mix_framemanager_unref); + + if (mix->surfacepool) + { + mix_surfacepool_deinitialize(mix->surfacepool); + MIXUNREF(mix->surfacepool, mix_surfacepool_unref); + } + + //libVA cleanup (vaTerminate is called from MixVideo object) + if (mix->va_display) { + if (mix->va_context != VA_INVALID_ID) + { + va_status = vaDestroyConfig(mix->va_display, mix->va_config); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroyConfig\n"); + } + mix->va_config = VA_INVALID_ID; + } + if (mix->va_context != VA_INVALID_ID) + { + va_status = vaDestroyContext(mix->va_display, mix->va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroyContext\n"); + } + mix->va_context = VA_INVALID_ID; + } + if (mix->va_surfaces) + { + va_status = vaDestroySurfaces(mix->va_display, mix->va_surfaces, mix->va_num_surfaces); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroySurfaces\n"); + } + g_free(mix->va_surfaces); + mix->va_surfaces = NULL; + mix->va_num_surfaces = 0; + } + } + + + //Deinit input buffer queue + + while (!g_queue_is_empty(mix->inputbufqueue)) + { + buf_entry = g_queue_pop_head(mix->inputbufqueue); + mix_buffer_unref(buf_entry->buf); + g_free(buf_entry); + } + + g_queue_free(mix->inputbufqueue); + + //MixBuffer pool is deallocated in MixVideo object + mix->inputbufpool = NULL; + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoFormat * +mix_videoformat_ref(MixVideoFormat * mix) { + return (MixVideoFormat *) g_object_ref(G_OBJECT(mix)); +} + +/* Default vmethods implementation */ +static MIX_RESULT mix_videofmt_getcaps_default(MixVideoFormat *mix, + GString *msg) { + g_print("mix_videofmt_getcaps_default\n"); + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmt_initialize_default(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + LOG_V( "Begin\n"); + + MIX_RESULT res = MIX_RESULT_SUCCESS; + MixInputBufferEntry *buf_entry = NULL; + + if (!mix || !config_params || !frame_mgr || !input_buf_pool || !surface_pool || !va_display) + { + LOG_E( "NUll pointer passed in\n"); + return (MIX_RESULT_NULL_PTR); + } + + // Create object lock + // Note that g_thread_init() has already been called by mix_video_init() + if (mix->objectlock) //If already exists, then deallocate old one (we are being re-initialized) + { + g_mutex_free(mix->objectlock); + mix->objectlock = NULL; + } + mix->objectlock = g_mutex_new(); + if (!mix->objectlock) { + LOG_E( "!mix->objectlock\n"); + return (MIX_RESULT_NO_MEMORY); + } + + g_mutex_lock(mix->objectlock); + + //Clean up any previous framemgr + MIXUNREF(mix->framemgr, mix_framemanager_unref); + mix->framemgr = frame_mgr; + mix_framemanager_ref(mix->framemgr); + + mix->va_display = va_display; + + if (mix->mime_type) //Clean up any previous mime_type + { + if (mix->mime_type->str) + g_string_free(mix->mime_type, TRUE); + else + g_string_free(mix->mime_type, FALSE); + } + gchar *mime_tmp = NULL; + res = mix_videoconfigparamsdec_get_mime_type(config_params, &mime_tmp); + if (mime_tmp) + { + mix->mime_type = g_string_new(mime_tmp); + g_free(mime_tmp); + if (!mix->mime_type) //new failed + { + res = MIX_RESULT_NO_MEMORY; + LOG_E( "Could not duplicate mime_type\n"); + goto cleanup; + } + } //else there is no mime_type; leave as NULL + + res = mix_videoconfigparamsdec_get_frame_rate(config_params, &(mix->frame_rate_num), &(mix->frame_rate_denom)); + if (res != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting frame_rate\n"); + goto cleanup; + } + res = mix_videoconfigparamsdec_get_picture_res(config_params, &(mix->picture_width), &(mix->picture_height)); + if (res != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting picture_res\n"); + goto cleanup; + } + + if (mix->inputbufqueue) + { + //Deinit previous input buffer queue + + while (!g_queue_is_empty(mix->inputbufqueue)) + { + buf_entry = g_queue_pop_head(mix->inputbufqueue); + mix_buffer_unref(buf_entry->buf); + g_free(buf_entry); + } + + g_queue_free(mix->inputbufqueue); + } + + //MixBuffer pool is cleaned up in MixVideo object + mix->inputbufpool = NULL; + + mix->inputbufpool = input_buf_pool; + mix->inputbufqueue = g_queue_new(); + if (!mix->inputbufqueue) //New failed + { + res = MIX_RESULT_NO_MEMORY; + LOG_E( "Could not duplicate mime_type\n"); + goto cleanup; + } + + // surface pool, VA context/config and parser handle are initialized by + // derived classes + + + cleanup: + if (res != MIX_RESULT_SUCCESS) { + + MIXUNREF(mix->framemgr, mix_framemanager_unref); + if (mix->mime_type) + { + if (mix->mime_type->str) + g_string_free(mix->mime_type, TRUE); + else + g_string_free(mix->mime_type, FALSE); + mix->mime_type = NULL; + } + + if (mix->objectlock) + g_mutex_unlock(mix->objectlock); + g_mutex_free(mix->objectlock); + mix->objectlock = NULL; + mix->frame_rate_num = 0; + mix->frame_rate_denom = 1; + mix->picture_width = 0; + mix->picture_height = 0; + + } else { + //Normal unlock + if (mix->objectlock) + g_mutex_unlock(mix->objectlock); + } + + LOG_V( "End\n"); + + return res; +} + +static MIX_RESULT mix_videofmt_decode_default(MixVideoFormat *mix, + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmt_flush_default(MixVideoFormat *mix) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmt_eos_default(MixVideoFormat *mix) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmt_deinitialize_default(MixVideoFormat *mix) { + + //All teardown is being done in _finalize() + + return MIX_RESULT_SUCCESS; +} + +/* mixvideoformat class methods implementation */ + +MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg) { + MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); + g_print("mix_videofmt_getcaps\n"); + if (klass->getcaps) { + return klass->getcaps(mix, msg); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_videofmt_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); + + if (klass->initialize) { + return klass->initialize(mix, config_params, frame_mgr, + input_buf_pool, surface_pool, va_display); + } + + return MIX_RESULT_FAIL; + +} + +MIX_RESULT mix_videofmt_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params) { + + MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); + if (klass->decode) { + return klass->decode(mix, bufin, bufincnt, decode_params); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix) { + MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); + if (klass->flush) { + return klass->flush(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix) { + MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); + if (klass->eos) { + return klass->eos(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix) { + MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); + if (klass->deinitialize) { + return klass->deinitialize(mix); + } + + return MIX_RESULT_FAIL; +} diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h new file mode 100644 index 0000000..c2e4769 --- /dev/null +++ b/mix_video/src/mixvideoformat.h @@ -0,0 +1,160 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMAT_H__ +#define __MIX_VIDEOFORMAT_H__ + +#include +#include +#include "vbp_loader.h" +#include "mixvideodef.h" +#include "mixdrmparams.h" +#include "mixvideoconfigparamsdec.h" +#include "mixvideodecodeparams.h" +#include "mixvideoframe.h" +#include "mixframemanager.h" +#include "mixsurfacepool.h" +#include "mixbuffer.h" +#include "mixbufferpool.h" +#include "mixvideoformatqueue.h" + +// Redefine the Handle defined in vbp_loader.h +#define VBPhandle Handle + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMAT (mix_videoformat_get_type ()) +#define MIX_VIDEOFORMAT(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT, MixVideoFormat)) +#define MIX_IS_VIDEOFORMAT(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT)) +#define MIX_VIDEOFORMAT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT, MixVideoFormatClass)) +#define MIX_IS_VIDEOFORMAT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT)) +#define MIX_VIDEOFORMAT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT, MixVideoFormatClass)) + +typedef struct _MixVideoFormat MixVideoFormat; +typedef struct _MixVideoFormatClass MixVideoFormatClass; + +/* vmethods typedef */ + +typedef MIX_RESULT (*MixVideoFmtGetCapsFunc)(MixVideoFormat *mix, GString *msg); +typedef MIX_RESULT (*MixVideoFmtInitializeFunc)(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +typedef MIX_RESULT (*MixVideoFmtDecodeFunc)(MixVideoFormat *mix, + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params); +typedef MIX_RESULT (*MixVideoFmtFlushFunc)(MixVideoFormat *mix); +typedef MIX_RESULT (*MixVideoFmtEndOfStreamFunc)(MixVideoFormat *mix); +typedef MIX_RESULT (*MixVideoFmtDeinitializeFunc)(MixVideoFormat *mix); + +struct _MixVideoFormat { + /*< public > */ + GObject parent; + + /*< public > */ + + /*< private > */ + GMutex *objectlock; + gboolean initialized; + MixFrameManager *framemgr; + MixSurfacePool *surfacepool; + VADisplay va_display; + VAContextID va_context; + VAConfigID va_config; + VASurfaceID *va_surfaces; + guint va_num_surfaces; + VBPhandle parser_handle; + GString *mime_type; + guint frame_rate_num; + guint frame_rate_denom; + guint picture_width; + guint picture_height; + gboolean parse_in_progress; + gboolean discontinuity_frame_in_progress; + guint64 current_timestamp; + MixBufferPool *inputbufpool; + GQueue *inputbufqueue; +}; + +/** + * MixVideoFormatClass: + * + * MI-X Video object class + */ +struct _MixVideoFormatClass { + /*< public > */ + GObjectClass parent_class; + + /* class members */ + + /*< public > */ + MixVideoFmtGetCapsFunc getcaps; + MixVideoFmtInitializeFunc initialize; + MixVideoFmtDecodeFunc decode; + MixVideoFmtFlushFunc flush; + MixVideoFmtEndOfStreamFunc eos; + MixVideoFmtDeinitializeFunc deinitialize; +}; + +/** + * mix_videoformat_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformat_get_type(void); + +/** + * mix_videoformat_new: + * @returns: A newly allocated instance of #MixVideoFormat + * + * Use this method to create new instance of #MixVideoFormat + */ +MixVideoFormat *mix_videoformat_new(void); + +/** + * mix_videoformat_ref: + * @mix: object to add reference + * @returns: the MixVideoFormat instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormat *mix_videoformat_ref(MixVideoFormat * mix); + +/** + * mix_videoformat_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformat_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg); + +MIX_RESULT mix_videofmt_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + +MIX_RESULT mix_videofmt_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); + +MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix); + +MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix); + +MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix); + +#endif /* __MIX_VIDEOFORMAT_H__ */ diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c new file mode 100644 index 0000000..9e81cbf --- /dev/null +++ b/mix_video/src/mixvideoformat_h264.c @@ -0,0 +1,1663 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include + +#include "mixvideolog.h" +#include "mixvideoformat_h264.h" + +#ifdef MIX_LOG_ENABLE +static int mix_video_h264_counter = 0; +#endif /* MIX_LOG_ENABLE */ + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatClass *parent_class = NULL; + +static void mix_videoformat_h264_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT + */ +G_DEFINE_TYPE (MixVideoFormat_H264, mix_videoformat_h264, MIX_TYPE_VIDEOFORMAT); + +static void mix_videoformat_h264_init(MixVideoFormat_H264 * self) { + MixVideoFormat *parent = MIX_VIDEOFORMAT(self); + + /* public member initialization */ + /* These are all public because MixVideoFormat objects are completely internal to MixVideo, + no need for private members */ + self->dpb_surface_table = NULL; + + /* NOTE: we don't need to do this here. + * This just demostrates how to access + * member varibles beloned to parent + */ + parent->initialized = FALSE; +} + +static void mix_videoformat_h264_class_init( + MixVideoFormat_H264Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatClass *video_format_class = + MIX_VIDEOFORMAT_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformat_h264_finalize; + + /* setup vmethods with base implementation */ + /* This is where we can override base class methods if needed */ + video_format_class->getcaps = mix_videofmt_h264_getcaps; + video_format_class->initialize = mix_videofmt_h264_initialize; + video_format_class->decode = mix_videofmt_h264_decode; + video_format_class->flush = mix_videofmt_h264_flush; + video_format_class->eos = mix_videofmt_h264_eos; + video_format_class->deinitialize = mix_videofmt_h264_deinitialize; +} + +MixVideoFormat_H264 * +mix_videoformat_h264_new(void) { + MixVideoFormat_H264 *ret = + g_object_new(MIX_TYPE_VIDEOFORMAT_H264, NULL); + + return ret; +} + +void mix_videoformat_h264_finalize(GObject * obj) { + gint32 pret = VBP_OK; + + /* clean up here. */ + + MixVideoFormat *parent = NULL; + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(obj); + GObjectClass *root_class = (GObjectClass *) parent_class; + + parent = MIX_VIDEOFORMAT(self); + + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + + //Free the DPB surface table + //First remove all the entries (frames will be unrefed) + g_hash_table_remove_all(self->dpb_surface_table); + //Then unref the table + g_hash_table_unref(self->dpb_surface_table); + self->dpb_surface_table = NULL; + + g_mutex_lock(parent->objectlock); + parent->initialized = TRUE; + parent->parse_in_progress = FALSE; + parent->current_timestamp = 0; + + //Close the parser + pret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + if (pret != VBP_OK) + { + LOG_E( "Error closing parser\n"); + } + + g_mutex_unlock(parent->objectlock); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormat_H264 * +mix_videoformat_h264_ref(MixVideoFormat_H264 * mix) { + return (MixVideoFormat_H264 *) g_object_ref(G_OBJECT(mix)); +} + +/* H.264 vmethods implementation */ +MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg) { + +MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (mix == NULL || msg == NULL) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + /* Chainup parent method. + */ + + if (parent_class->getcaps) { + ret = parent_class->getcaps(mix, msg); + } + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_H264; + vbp_data_h264 *data = NULL; + MixVideoFormat *parent = NULL; + MixIOVec *header = NULL; + gint numprofs = 0, numactualprofs = 0; + gint numentrypts = 0, numactualentrypts = 0; + VADisplay vadisplay = NULL; + VAProfile *profiles = NULL; + VAEntrypoint *entrypts = NULL; + VAConfigAttrib attrib; + VAStatus vret = VA_STATUS_SUCCESS; + guint extra_surfaces = 0; + VASurfaceID *surfaces = NULL; + guint numSurfaces = 0; + + //TODO Partition this method into smaller methods + + if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + /* Chainup parent method. */ + + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error initializing\n"); + return ret; + } + + if (!MIX_IS_VIDEOFORMAT_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMAT(mix); + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + g_mutex_lock(parent->objectlock); + + LOG_V( "Before vbp_open\n"); + //Load the bitstream parser + pret = vbp_open(ptype, &(parent->parser_handle)); + + LOG_V( "After vbp_open\n"); + if (!(pret == VBP_OK)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto cleanup; + } + LOG_V( "Opened parser\n"); + + ret = mix_videoconfigparamsdec_get_header(config_params, + &header); + + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get header data\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto cleanup; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); + + pret = vbp_parse(parent->parser_handle, header->data, + header->data_size, TRUE); + + if (!((pret == VBP_OK) || (pret == VBP_DONE))) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data\n"); + goto cleanup; + } + + LOG_V( "Parsed header\n"); + + //Get the header data and save + pret = vbp_query(parent->parser_handle, (void *)&data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto cleanup; + } + + LOG_V( "Queried parser for header data\n"); + + //Time for libva initialization + + vadisplay = parent->va_display; + + numprofs = vaMaxNumProfiles(vadisplay); + profiles = g_malloc(numprofs*sizeof(VAProfile)); + + if (!profiles) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto cleanup; + } + + vret = vaQueryConfigProfiles(vadisplay, profiles, + &numactualprofs); + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto cleanup; + } + + //check the desired profile support + gint vaprof = 0; + + //TODO Need to cover more cases + switch (data->codec_data->profile_idc) + { +#if 1 +//TODO Reinstate this once constraint_set1 flag has been added to codec_data + case 66: //Baseline profile + + LOG_V( "mix_videofmt_h264_initialize: Baseline profile\n"); + if (data->codec_data->constraint_set1_flag == 0) + { + for (; vaprof < numactualprofs; vaprof++) + { + if (profiles[vaprof] == VAProfileH264Baseline) + break; + } + } else + { + for (; vaprof < numactualprofs; vaprof++) + { + if (profiles[vaprof] == VAProfileH264High) + break; + } + } + if ((vaprof >= numprofs) || ((profiles[vaprof] != VAProfileH264Baseline) && (profiles[vaprof] != VAProfileH264High))) + //Did not get the profile we wanted + { + ret = MIX_RESULT_FAIL; + LOG_E( "Profile not supported by driver\n"); + goto cleanup; + } + break; +#endif + +#if 0 +//Code left in place in case bug is fixed in libva + case 77: //Main profile (need to set to High for libva bug) + LOG_V( "mix_videofmt_h264_initialize: Main profile\n"); + + for (; vaprof < numactualprofs; vaprof++) + { + if (profiles[vaprof] == VAProfileH264Main) + break; + } + if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264Main) + //Did not get the profile we wanted + { + ret = MIX_RESULT_FAIL; + LOG_E( "Profile not supported by driver\n"); + goto cleanup; + } + break; +#endif + + case 100: //High profile + default: //Set to High as default + + LOG_V( "High profile\n"); + + for (; vaprof < numactualprofs; vaprof++) + { + if (profiles[vaprof] == VAProfileH264High) + break; + } + if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264High) + //Did not get the profile we wanted + { + ret = MIX_RESULT_FAIL; + LOG_E( "Profile not supported by driver\n"); + goto cleanup; + } + break; + + + } + + numentrypts = vaMaxNumEntrypoints(vadisplay); + entrypts = g_malloc(numentrypts*sizeof(VAEntrypoint)); + + if (!entrypts) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto cleanup; + } + + vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], + entrypts, &numactualentrypts); + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto cleanup; + } + + gint vaentrypt = 0; + for (; vaentrypt < numactualentrypts; vaentrypt++) + { + if (entrypts[vaentrypt] == VAEntrypointVLD) + break; + } + if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) + //Did not get the entrypt we wanted + { + ret = MIX_RESULT_FAIL; + LOG_E( "Entry point not supported by driver\n"); + goto cleanup; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + + vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1); + + //TODO Handle other values returned for RT format + // and check with requested format provided in config params + //Right now only YUV 4:2:0 is supported by libva + // and this is our default + if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || + vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto cleanup; + } + + //Initialize and save the VA config ID + vret = vaCreateConfig(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1, &(parent->va_config)); + + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto cleanup; + } + + LOG_V( "Created libva config with profile %d\n", vaprof); + + + //Initialize the surface pool + + LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); + + + // handle both frame and field coding for interlaced content + int num_ref_pictures = data->codec_data->num_ref_frames; + if (!data->codec_data->frame_mbs_only_flag && + !data->codec_data->mb_adaptive_frame_field_flag) + { + + // field coding, two fields share the same surface. + //num_ref_pictures *= 2; + } + + //Adding 1 to work around VBLANK issue + parent->va_num_surfaces = 1 + extra_surfaces + (((num_ref_pictures + 3) < + MIX_VIDEO_H264_SURFACE_NUM) ? + (num_ref_pictures + 3) + : MIX_VIDEO_H264_SURFACE_NUM); + + numSurfaces = parent->va_num_surfaces; + + parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + surfaces = parent->va_surfaces; + + if (surfaces == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot allocate temporary data\n"); + goto cleanup; + } + + LOG_V( "Codec data says picture size is %d x %d\n", (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); + LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height); + + vret = vaCreateSurfaces(vadisplay, (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, entrypts[vaentrypt], + numSurfaces, surfaces); + + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto cleanup; + } + + parent->surfacepool = mix_surfacepool_new(); + *surface_pool = parent->surfacepool; + + if (parent->surfacepool == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing surface pool\n"); + goto cleanup; + } + + + ret = mix_surfacepool_initialize(parent->surfacepool, + surfaces, numSurfaces); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init failure\n"); + goto cleanup; + break; + } + + LOG_V( "Created %d libva surfaces\n", numSurfaces); + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext(vadisplay, parent->va_config, + parent->picture_width, parent->picture_height, + 0, surfaces, numSurfaces, + &(parent->va_context)); + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto cleanup; + } + + LOG_V( "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height); + + //Create our table of Decoded Picture Buffer "in use" surfaces + self->dpb_surface_table = g_hash_table_new_full(NULL, NULL, mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value); + + if (self->dpb_surface_table == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating dbp surface table\n"); + goto cleanup; //leave this goto here in case other code is added between here and cleanup label + } + + cleanup: + if (ret != MIX_RESULT_SUCCESS) { + pret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + parent->initialized = FALSE; + + } else { + parent->initialized = TRUE; + } + + if (header != NULL) + { + if (header->data != NULL) + g_free(header->data); + g_free(header); + header = NULL; + } + + g_free(profiles); + g_free(entrypts); + + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); + + + return ret; +} + +MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params) { + + uint32 pret = 0; + int i = 0; + MixVideoFormat *parent = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + guint64 ts = 0; + vbp_data_h264 *data = NULL; + gboolean discontinuity = FALSE; + MixInputBufferEntry *bufentry = NULL; + + LOG_V( "Begin\n"); + + if (mix == NULL || bufin == NULL || decode_params == NULL ) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ + +#if 0 + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, + decode_params); + } +#endif + + if (!MIX_IS_VIDEOFORMAT_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMAT(mix); + + + ret = mix_videodecodeparams_get_timestamp(decode_params, + &ts); + if (ret != MIX_RESULT_SUCCESS) + { + return MIX_RESULT_FAIL; + } + + ret = mix_videodecodeparams_get_discontinuity(decode_params, + &discontinuity); + if (ret != MIX_RESULT_SUCCESS) + { + return MIX_RESULT_FAIL; + } + + //From now on, we exit this function through cleanup: + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + LOG_V( "parse in progress is %d\n", parent->parse_in_progress); + //If this is a new frame and we haven't retrieved parser + // workload data from previous frame yet, do so + if ((ts != parent->current_timestamp) && + (parent->parse_in_progress)) + { + + //query for data + pret = vbp_query(parent->parser_handle, + (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing parser\n"); + goto cleanup; + } + + LOG_V( "Queried for last frame data\n"); + + //process and decode data + ret = mix_videofmt_h264_process_decode(mix, + data, parent->current_timestamp, + parent->discontinuity_frame_in_progress); + + if (ret != MIX_RESULT_SUCCESS) + { + //We log this but need to process the new frame data, so do not return + LOG_E( "Process_decode failed.\n"); + } + + LOG_V( "Called process and decode for last frame\n"); + + parent->parse_in_progress = FALSE; + + } + + parent->current_timestamp = ts; + parent->discontinuity_frame_in_progress = discontinuity; + + LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts); + + for (i = 0; i < bufincnt; i++) + { + + LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size); + + pret = vbp_parse(parent->parser_handle, + bufin[i]->data, + bufin[i]->size, + FALSE); + + LOG_V( "Called parse for current frame\n"); + + if ((pret == VBP_DONE) || (pret == VBP_OK)) + { + //query for data + pret = vbp_query(parent->parser_handle, + (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting parser data\n"); + goto cleanup; + } + + LOG_V( "Called query for current frame\n"); + + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = g_malloc(sizeof( + MixInputBufferEntry)); + if (bufentry == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto cleanup; + } + + bufentry->buf = bufin[i]; + LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts); + bufentry->timestamp = ts; + + LOG_V( "Enqueue this input buffer for current frame\n"); + LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); + + //Enqueue this input buffer + g_queue_push_tail(parent->inputbufqueue, + (gpointer)bufentry); + + //process and decode data + ret = mix_videofmt_h264_process_decode(mix, + data, ts, discontinuity); + + if (ret != MIX_RESULT_SUCCESS) + { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Process_decode failed.\n"); + } + + LOG_V( "Called process and decode for current frame\n"); + + parent->parse_in_progress = FALSE; + } + else if (pret != VBP_OK) + { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Parsing failed.\n"); + ret = MIX_RESULT_FAIL; + } + else + { + + LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); + + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = g_malloc(sizeof + (MixInputBufferEntry)); + if (bufentry == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto cleanup; + } + bufentry->buf = bufin[i]; + LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts); + bufentry->timestamp = ts; + + LOG_V( "Enqueue this input buffer for current frame\n"); + LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); + + //Enqueue this input buffer + g_queue_push_tail(parent->inputbufqueue, + (gpointer)bufentry); + LOG_V( "Setting parse_in_progress to TRUE\n"); + parent->parse_in_progress = TRUE; + } + + } + + + cleanup: + + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix) { + +MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + uint32 pret = 0; + MixInputBufferEntry *bufentry = NULL; + + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ + +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + g_mutex_lock(mix->objectlock); + + //Clear the contents of inputbufqueue + while (!g_queue_is_empty(mix->inputbufqueue)) + { + bufentry = (MixInputBufferEntry *) g_queue_pop_head( + mix->inputbufqueue); + if (bufentry == NULL) continue; + + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + + //Clear parse_in_progress flag and current timestamp + mix->parse_in_progress = FALSE; + mix->discontinuity_frame_in_progress = FALSE; + mix->current_timestamp = 0; + + //Clear the DPB surface table + g_hash_table_remove_all(self->dpb_surface_table); + + //Call parser flush + pret = vbp_flush(mix->parser_handle); + if (pret != VBP_OK) + ret = MIX_RESULT_FAIL; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_h264 *data = NULL; + uint32 pret = 0; + + LOG_V( "Begin\n"); + + if (mix == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ + +#if 0 + if (parent_class->eos) { + return parent_class->eos(mix, msg); + } +#endif + + g_mutex_lock(mix->objectlock); + + //if a frame is in progress, process the frame + if (mix->parse_in_progress) + { + //query for data + pret = vbp_query(mix->parser_handle, + (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting last parse data\n"); + goto cleanup; + } + + //process and decode data + ret = mix_videofmt_h264_process_decode(mix, + data, mix->current_timestamp, + mix->discontinuity_frame_in_progress); + mix->parse_in_progress = FALSE; + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error processing last frame\n"); + goto cleanup; + } + + } + +cleanup: + + g_mutex_unlock(mix->objectlock); + + //Call Frame Manager with _eos() + ret = mix_framemanager_eos(mix->framemgr); + + LOG_V( "End\n"); + + return ret; + + +} + +MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix) { + +//Note this method is not called; may remove in future + + LOG_V( "Begin\n"); + + if (mix == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + /* Chainup parent method. + */ + + if (parent_class->deinitialize) { + return parent_class->deinitialize(mix); + } + + //Most stuff is cleaned up in parent_class->finalize() and in _finalize + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} +#define HACK_DPB +#ifdef HACK_DPB +static inline void mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, + vbp_picture_data_h264* pic_data + ) +{ + + gboolean found = FALSE; + guint tflags = 0; + VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; + VAPictureH264 *pRefList = NULL; + int i = 0, j = 0, k = 0, list = 0; + + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + //Set the surface ID for everything in the parser DPB to INVALID + for (i = 0; i < 16; i++) + { + pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE; + pic_params->ReferenceFrames[i].frame_idx = -1; + pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags + } + + pic_params->num_ref_frames = 0; + + for (i = 0; i < pic_data->num_slices; i++) + { + + //Copy from the List0 and List1 surface IDs + pRefList = pic_data->slc_data[i].slc_parms.RefPicList0; + for (list = 0; list < 2; list++) + { + for (j = 0; j < 32; j++) + { + if (pRefList[j].flags & VA_PICTURE_H264_INVALID) + { + break; //no more valid reference frames in this list + } + found = FALSE; + for (k = 0; k < pic_params->num_ref_frames; k++) + { + if (pic_params->ReferenceFrames[k].TopFieldOrderCnt == pRefList[j].TopFieldOrderCnt) + { + ///check for complementary field + tflags = pic_params->ReferenceFrames[k].flags | pRefList[j].flags; + //If both TOP and BOTTOM are set, we'll clear those flags + if ((tflags & VA_PICTURE_H264_TOP_FIELD) && + (tflags & VA_PICTURE_H264_TOP_FIELD)) + pic_params->ReferenceFrames[k].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + found = TRUE; //already in the DPB; will not add this one + break; + } + } + if (!found) + { + guint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); + gpointer video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + + LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + + pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = + pRefList[j].flags; + pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = + pRefList[j].frame_idx; + pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = + pRefList[j].TopFieldOrderCnt; + pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = + pRefList[j].BottomFieldOrderCnt; + } + + } + pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; + } + + } +} +#endif + + +MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, + vbp_data_h264 *data, + guint64 timestamp, + gboolean discontinuity, + int pic_index, + MixVideoFrame *frame) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + guint buffer_id_cnt = 0; + VABufferID *buffer_ids = NULL; + + //TODO Partition this method into smaller methods + + LOG_V( "Begin\n"); + + if ((mix == NULL) || (data == NULL) || (data->pic_data == NULL) || (frame == NULL)) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + vbp_picture_data_h264* pic_data = &(data->pic_data[pic_index]); + + + //After this point, all exits from this function are through cleanup: + + if (!MIX_IS_VIDEOFORMAT_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; + + if (pic_params == NULL) + { + ret = MIX_RESULT_NULL_PTR; + LOG_E( "Error reading parser data\n"); + goto cleanup; + } + + //TODO + //Check for frame gaps and repeat frames if necessary + + LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); + + buffer_ids = g_malloc(sizeof(VABufferID) * + ((pic_data->num_slices * 2) + 2)); + + if (buffer_ids == NULL) + { + LOG_E( "Cannot allocate buffer IDs\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + //Set up reference frames for the picture parameter buffer + + //Set the picture type (I, B or P frame) + //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) + MixFrameType frame_type = TYPE_INVALID; + + switch (pic_data->slc_data->slc_parms.slice_type) + { + case 0: + case 3: + case 5: + case 8: + frame_type = TYPE_P; + break; + case 1: + case 6: + frame_type = TYPE_B; + break; + case 2: + case 4: + case 7: + case 9: + frame_type = TYPE_I; + break; + default: + break; + } + + //Do not have to check for B frames after a seek + //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise + // DPB will not be correct and frames may come in with invalid references + // This will be detected when DPB is checked for valid mapped surfaces and + // error returned from there. + + LOG_V( "Getting a new surface for frame_num %d\n", pic_params->frame_num); + LOG_V( "frame type is %d\n", frame_type); + + + + //Set the frame type for the frame object (used in reordering by frame manager) + ret = mix_videoframe_set_frame_type(frame, frame_type); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error setting frame type on frame\n"); + goto cleanup; + } + + LOG_V( "Updating DPB for libva\n"); + + //Now handle the reference frames and surface IDs for DPB and current frame + mix_videofmt_h264_handle_ref_frames(mix, pic_params, frame); + +#ifdef HACK_DPB + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + mix_videofmt_h264_hack_dpb(mix, pic_data); +#endif + + //Libva buffer set up + + vadisplay = mix->va_display; + vacontext = mix->va_context; + + LOG_V( "Creating libva picture parameter buffer\n"); + LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); + + //First the picture parameter buffer + vret = vaCreateBuffer(vadisplay, vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + LOG_V( "Creating libva IQMatrix buffer\n"); + + + //Then the IQ matrix buffer + vret = vaCreateBuffer(vadisplay, vacontext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &buffer_ids[buffer_id_cnt]); + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + + //Now for slices + int i = 0; + gpointer video_frame; + for (;i < pic_data->num_slices; i++) + { + + LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); + + //Do slice parameters + + //First patch up the List0 and List1 surface IDs + int j = 0; + guint poc = 0; + for (; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l0_active_minus1; j++) + { + if (!(pic_data->slc_data[i].slc_parms.RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) + { + poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList0[j])); + video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + if (video_frame == NULL) + { + LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic)); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + else + { + pic_data->slc_data[i].slc_parms.RefPicList0[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + + } + + if ((pic_data->slc_data->slc_parms.slice_type == 1) || (pic_data->slc_data->slc_parms.slice_type == 6)) + { + for (j = 0; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l1_active_minus1; j++) + { + if (!(pic_data->slc_data[i].slc_parms.RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) + { + poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList1[j])); + video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + if (video_frame == NULL) + { + LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic)); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + else + { + pic_data->slc_data[i].slc_parms.RefPicList1[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + } + } + + + //Then do the libva setup + + vret = vaCreateBuffer(vadisplay, vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + &(pic_data->slc_data[i].slc_parms), + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + buffer_id_cnt++; + + + LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); + + + //Do slice data + + vret = vaCreateBuffer(vadisplay, vacontext, + VASliceDataBufferType, + //size + pic_data->slc_data[i].slice_size, + //num_elements + 1, + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferH264 + pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + } + + gulong surface = 0; + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(frame, &surface); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting surface ID from frame object\n"); + goto cleanup; + } + + LOG_V( "Calling vaBeginPicture\n"); + + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto cleanup; + } + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture(vadisplay, vacontext, + buffer_ids, + buffer_id_cnt); + + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto cleanup; + } + + LOG_V( "Calling vaEndPicture\n"); + + //End picture + vret = vaEndPicture(vadisplay, vacontext); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto cleanup; + } + + LOG_V( "Calling vaSyncSurface\n"); + + //Decode the picture + vret = vaSyncSurface(vadisplay, surface); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + goto cleanup; + } + + + if (pic_index == 0) + { + //Set the discontinuity flag + mix_videoframe_set_discontinuity(frame, discontinuity); + + //Set the timestamp + mix_videoframe_set_timestamp(frame, timestamp); + + guint32 frame_structure = VA_FRAME_PICTURE; + if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) + { + frame_structure = VA_TOP_FIELD; + } + else if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) + { + frame_structure = VA_BOTTOM_FIELD; + } + mix_videoframe_set_frame_structure(frame, frame_structure); + } + else + { + // frame must be field-coded, no need to set + // discontinuity falg and time stamp again + mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); + } + + //TODO need to save off frame when handling is added for repeat frames? + +//TODO Complete YUVDUMP code and move into base class +#ifdef YUVDUMP + if (mix_video_h264_counter < 10) + ret = GetImageFromSurface (mix, frame); +// g_usleep(5000000); +#endif /* YUVDUMP */ + + LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); + + + cleanup: + + if (NULL != buffer_ids) + g_free(buffer_ids); + + + LOG_V( "End\n"); + + return ret; + +} + + +MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix, + vbp_data_h264 *data, + guint64 timestamp, + gboolean discontinuity) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int i = 0; + + if ((mix == NULL) || (data == NULL)) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + //Get a frame from the surface pool + MixVideoFrame *frame = NULL; + + ret = mix_surfacepool_get(mix->surfacepool, &frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting frame from surfacepool\n"); + return MIX_RESULT_FAIL; + } + + + for (i = 0; i < data->num_pictures; i++) + { + ret = mix_videofmt_h264_process_decode_picture(mix, data, timestamp, discontinuity, i, frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Failed to process decode picture %d, error = %#X.", data->buf_number, ret); + break; + } + } + + if (ret == MIX_RESULT_SUCCESS) + { + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(mix->framemgr, frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error enqueuing frame object\n"); + mix_videoframe_unref(frame); + } + + } + else + { + mix_videoframe_unref(frame); + } + mix_videofmt_h264_release_input_buffers(mix, timestamp); + + return ret; +} + +MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, + VAPictureParameterBufferH264* pic_params, + MixVideoFrame * current_frame + ) { + + guint poc = 0; + + LOG_V( "Begin\n"); + + if (mix == NULL || current_frame == NULL || pic_params == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + + LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); + +#ifdef MIX_LOG_ENABLE + if (pic_params->CurrPic.flags & VA_PICTURE_H264_INVALID) + LOG_V( "Flags show VA_PICTURE_H264_INVALID\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) + LOG_V( "Flags show VA_PICTURE_H264_TOP_FIELD\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) + LOG_V( "Flags show VA_PICTURE_H264_BOTTOM_FIELD\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) + LOG_V( "Flags show VA_PICTURE_H264_SHORT_TERM_REFERENCE\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE) + LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n"); +#endif + + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + + //First we need to check the parser DBP against our DPB table + //So for each item in our DBP table, we look to see if it is in the parser DPB + //If it is not, it gets unrefed and removed +#ifdef MIX_LOG_ENABLE + guint num_removed = +#endif + g_hash_table_foreach_remove(self->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params); + + LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); + + + MixVideoFrame *mvf = NULL; + gboolean found = FALSE; + //Set the surface ID for everything in the parser DPB + int i = 0; + for (; i < 16; i++) + { + if (!(pic_params->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID)) + { + + poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i])); + LOG_V( "Looking up poc %d in dpb table\n", poc); + found = g_hash_table_lookup_extended(self->dpb_surface_table, (gpointer)poc, NULL, (gpointer)&mvf); + + if (found) + { + pic_params->ReferenceFrames[i].picture_id = mvf->frame_id; + LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id); + } else { + LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); + } + LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", poc, i, (gint)pic_params->ReferenceFrames[i].picture_id); + } + + } + + + //Set picture_id for current picture + pic_params->CurrPic.picture_id = current_frame->frame_id; + + //Check to see if current frame is a reference frame + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) + { + //Get current frame's POC + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + + //Increment the reference count for this frame + mix_videoframe_ref(current_frame); + + LOG_V( "Inserting poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); + //Add this frame to the DPB surface table + g_hash_table_insert(self->dpb_surface_table, (gpointer)poc, current_frame); + } + + + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +guint mix_videofmt_h264_get_poc(VAPictureH264 *pic) +{ + + if (pic == NULL) + return 0; + + if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) + return pic->BottomFieldOrderCnt; + + + if (pic->flags & VA_PICTURE_H264_TOP_FIELD) + return pic->TopFieldOrderCnt; + + return pic->TopFieldOrderCnt; + +} + + +gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer user_data) +{ + gboolean ret = TRUE; + + if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key + return FALSE; + + VAPictureH264* vaPic = NULL; + int i = 0; + for (; i < 16; i++) + { + vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]); + if (vaPic->flags & VA_PICTURE_H264_INVALID) + continue; + + if ((guint)key == vaPic->TopFieldOrderCnt || + (guint)key == vaPic->BottomFieldOrderCnt) + { + ret = FALSE; + break; + } + } + + return ret; +} + +void mix_videofmt_h264_destroy_DPB_key(gpointer data) +{ +//TODO remove this method and don't register it with the hash table foreach call; it is no longer needed + LOG_V( "Begin, poc of %d\n", (guint)data); + LOG_V( "End\n"); + + return; +} + +void mix_videofmt_h264_destroy_DPB_value(gpointer data) +{ + LOG_V( "Begin\n"); + if (data == NULL) + return ; + mix_videoframe_unref((MixVideoFrame *)data); + + return; +} + + +MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, + guint64 timestamp + ) { + + MixInputBufferEntry *bufentry = NULL; + gboolean done = FALSE; + + LOG_V( "Begin\n"); + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + //Dequeue and release all input buffers for this frame + + LOG_V( "Releasing all the MixBuffers for this frame\n"); + + //While the head of the queue has timestamp == current ts + //dequeue the entry, unref the MixBuffer, and free the struct + done = FALSE; + while (!done) + { + bufentry = (MixInputBufferEntry *) g_queue_peek_head( + mix->inputbufqueue); + if (bufentry == NULL) break; + LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); + + if (bufentry->timestamp != timestamp) + { + LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); + done = TRUE; + break; + } + + bufentry = (MixInputBufferEntry *) g_queue_pop_head( + mix->inputbufqueue); + LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf); + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + + + diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h new file mode 100644 index 0000000..a04048c --- /dev/null +++ b/mix_video/src/mixvideoformat_h264.h @@ -0,0 +1,129 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMAT_H264_H__ +#define __MIX_VIDEOFORMAT_H264_H__ + +#include "mixvideoformat.h" +#include "mixvideoframe_private.h" + +#define MIX_VIDEO_H264_SURFACE_NUM 20 + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMAT_H264 (mix_videoformat_h264_get_type ()) +#define MIX_VIDEOFORMAT_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264)) +#define MIX_IS_VIDEOFORMAT_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_H264)) +#define MIX_VIDEOFORMAT_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264Class)) +#define MIX_IS_VIDEOFORMAT_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_H264)) +#define MIX_VIDEOFORMAT_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264Class)) + +typedef struct _MixVideoFormat_H264 MixVideoFormat_H264; +typedef struct _MixVideoFormat_H264Class MixVideoFormat_H264Class; + +struct _MixVideoFormat_H264 { + /*< public > */ + MixVideoFormat parent; + + /*< public > */ + + /*< private > */ + GHashTable *dpb_surface_table; +}; + +/** + * MixVideoFormat_H264Class: + * + * MI-X Video object class + */ +struct _MixVideoFormat_H264Class { + /*< public > */ + MixVideoFormatClass parent_class; + + /* class members */ + + /*< public > */ +}; + +/** + * mix_videoformat_h264_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformat_h264_get_type(void); + +/** + * mix_videoformat_h264_new: + * @returns: A newly allocated instance of #MixVideoFormat_H264 + * + * Use this method to create new instance of #MixVideoFormat_H264 + */ +MixVideoFormat_H264 *mix_videoformat_h264_new(void); + +/** + * mix_videoformat_h264_ref: + * @mix: object to add reference + * @returns: the MixVideoFormat_H264 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix); + +/** + * mix_videoformat_h264_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformat_h264_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* H.264 vmethods */ +MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg); +MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); +MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix); +MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix); +MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix); + +/* Local Methods */ + +MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, + VAPictureParameterBufferH264* pic_params, + MixVideoFrame * current_frame); + + +MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix, + vbp_data_h264 *data, + guint64 timestamp, + gboolean discontinuity); + + +MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, + guint64 timestamp); + + +/* Helper functions to manage the DPB table */ +gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer user_data); +void mix_videofmt_h264_destroy_DPB_key(gpointer data); +void mix_videofmt_h264_destroy_DPB_value(gpointer data); +guint mix_videofmt_h264_get_poc(VAPictureH264 *pic); + + + + +#endif /* __MIX_VIDEOFORMAT_H264_H__ */ diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c new file mode 100644 index 0000000..3aae249 --- /dev/null +++ b/mix_video/src/mixvideoformat_mp42.c @@ -0,0 +1,1416 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include "mixvideolog.h" +#include "mixvideoformat_mp42.h" + +enum { + MP4_VOP_TYPE_I = 0, + MP4_VOP_TYPE_P = 1, + MP4_VOP_TYPE_B = 2, + MP4_VOP_TYPE_S = 3, +}; + +/* + * This is for divx packed stream + */ +typedef struct _PackedStream PackedStream; +struct _PackedStream { + vbp_picture_data_mp42 *picture_data; + MixBuffer *mix_buffer; +}; + +/* + * Clone and destroy vbp_picture_data_mp42 + */ +static vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data( + vbp_picture_data_mp42 *picture_data); +static void mix_videoformat_mp42_free_picture_data( + vbp_picture_data_mp42 *picture_data); +static void mix_videoformat_mp42_flush_packed_stream_queue( + GQueue *packed_stream_queue); + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatClass *parent_class = NULL; + +static void mix_videoformat_mp42_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT + */ +G_DEFINE_TYPE( MixVideoFormat_MP42, mix_videoformat_mp42, MIX_TYPE_VIDEOFORMAT); + +static void mix_videoformat_mp42_init(MixVideoFormat_MP42 * self) { + MixVideoFormat *parent = MIX_VIDEOFORMAT(self); + + self->reference_frames[0] = NULL; + self->reference_frames[1] = NULL; + + self->last_frame = NULL; + self->last_vop_coding_type = -1; + + self->packed_stream_queue = NULL; + + /* NOTE: we don't need to do this here. + * This just demostrates how to access + * member varibles beloned to parent + */ + parent->initialized = FALSE; +} + +static void mix_videoformat_mp42_class_init(MixVideoFormat_MP42Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatClass *video_format_class = MIX_VIDEOFORMAT_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformat_mp42_finalize; + + /* setup vmethods with base implementation */ + video_format_class->getcaps = mix_videofmt_mp42_getcaps; + video_format_class->initialize = mix_videofmt_mp42_initialize; + video_format_class->decode = mix_videofmt_mp42_decode; + video_format_class->flush = mix_videofmt_mp42_flush; + video_format_class->eos = mix_videofmt_mp42_eos; + video_format_class->deinitialize = mix_videofmt_mp42_deinitialize; +} + +MixVideoFormat_MP42 *mix_videoformat_mp42_new(void) { + MixVideoFormat_MP42 *ret = g_object_new(MIX_TYPE_VIDEOFORMAT_MP42, NULL); + + return ret; +} + +void mix_videoformat_mp42_finalize(GObject * obj) { + /* clean up here. */ + + /* MixVideoFormat_MP42 *mix = MIX_VIDEOFORMAT_MP42(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + MixVideoFormat *parent = NULL; + gint32 vbp_ret = VBP_OK; + MixVideoFormat_MP42 *self = NULL; + + LOG_V("Begin\n"); + + if (obj == NULL) { + LOG_E("obj is NULL\n"); + return; + } + + if (!MIX_IS_VIDEOFORMAT_MP42(obj)) { + LOG_E("obj is not mixvideoformat_mp42\n"); + return; + } + + self = MIX_VIDEOFORMAT_MP42(obj); + parent = MIX_VIDEOFORMAT(self); + + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + + g_mutex_lock(parent->objectlock); + + /* unref reference frames */ + { + gint idx = 0; + for (idx = 0; idx < 2; idx++) { + if (self->reference_frames[idx] != NULL) { + mix_videoframe_unref(self->reference_frames[idx]); + self->reference_frames[idx] = NULL; + } + } + } + + + /* Reset state */ + parent->initialized = TRUE; + parent->parse_in_progress = FALSE; + parent->discontinuity_frame_in_progress = FALSE; + parent->current_timestamp = 0; + + /* Close the parser */ + vbp_ret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + + if (self->packed_stream_queue) { + mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue); + g_queue_free(self->packed_stream_queue); + } + self->packed_stream_queue = NULL; + + g_mutex_unlock(parent->objectlock); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } + + LOG_V("End\n"); +} + +MixVideoFormat_MP42 * +mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix) { + return (MixVideoFormat_MP42 *) g_object_ref(G_OBJECT(mix)); +} + +/* MP42 vmethods implementation */ +MIX_RESULT mix_videofmt_mp42_getcaps(MixVideoFormat *mix, GString *msg) { + +//This method is reserved for future use + + LOG_V("Begin\n"); + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + + LOG_V("End\n"); + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool, + VADisplay va_display) { + uint32 vbp_ret = 0; + MIX_RESULT ret = MIX_RESULT_FAIL; + + vbp_data_mp42 *data = NULL; + MixVideoFormat *parent = NULL; + MixIOVec *header = NULL; + + VAProfile va_profile = VAProfileMPEG4AdvancedSimple; + VAConfigAttrib attrib; + + VAStatus va_ret = VA_STATUS_SUCCESS; + guint number_extra_surfaces = 0; + VASurfaceID *surfaces = NULL; + guint numSurfaces = 0; + + MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + + if (mix == NULL || config_params == NULL || frame_mgr == NULL) { + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { + return MIX_RESULT_INVALID_PARAM; + } + + LOG_V("begin\n"); + + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params, frame_mgr, + input_buf_pool, surface_pool, va_display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize parent!\n"); + return ret; + } + } + + parent = MIX_VIDEOFORMAT(mix); + + g_mutex_lock(parent->objectlock); + + parent->initialized = FALSE; + + vbp_ret = vbp_open(VBP_MPEG4, &(parent->parser_handle)); + + if (vbp_ret != VBP_OK) { + LOG_E("Failed to call vbp_open()\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /* + * avidemux doesn't pass codec_data, we need handle this. + */ + + LOG_V("Try to get header data from config_param\n"); + + ret = mix_videoconfigparamsdec_get_header(config_params, &header); + if (ret == MIX_RESULT_SUCCESS && header != NULL) { + + LOG_V("Found header data from config_param\n"); + vbp_ret = vbp_parse(parent->parser_handle, header->data, header->data_size, + TRUE); + + LOG_V("vbp_parse() returns 0x%x\n", vbp_ret); + + g_free(header->data); + g_free(header); + + if (!((vbp_ret == VBP_OK) || (vbp_ret == VBP_DONE))) { + LOG_E("Failed to call vbp_parse() to parse header data!\n"); + goto cleanup; + } + + /* Get the header data and save */ + + LOG_V("Call vbp_query()\n"); + vbp_ret = vbp_query(parent->parser_handle, (void *) &data); + LOG_V("vbp_query() returns 0x%x\n", vbp_ret); + + if ((vbp_ret != VBP_OK) || (data == NULL)) { + LOG_E("Failed to call vbp_query() to query header data parsing result\n"); + goto cleanup; + } + + if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) { + va_profile = VAProfileMPEG4AdvancedSimple; + LOG_V("The profile is VAProfileMPEG4AdvancedSimple from header data\n"); + } else { + va_profile = VAProfileMPEG4Simple; + LOG_V("The profile is VAProfileMPEG4Simple from header data\n"); + } + } + + va_display = parent->va_display; + + /* We are requesting RT attributes */ + attrib.type = VAConfigAttribRTFormat; + + va_ret = vaGetConfigAttributes(va_display, va_profile, VAEntrypointVLD, + &attrib, 1); + if (va_ret != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaGetConfigAttributes()\n"); + goto cleanup; + } + + if ((attrib.value & VA_RT_FORMAT_YUV420) == 0) { + LOG_E("The attrib.value is wrong!\n"); + goto cleanup; + } + + va_ret = vaCreateConfig(va_display, va_profile, VAEntrypointVLD, &attrib, + 1, &(parent->va_config)); + + if (va_ret != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaCreateConfig()!\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &number_extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to call mix_videoconfigparams_get_extra_surface_allocation()!\n"); + goto cleanup; + } + + parent->va_num_surfaces = number_extra_surfaces + 4; + if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) { + parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; + } + + numSurfaces = parent->va_num_surfaces; + + parent->va_surfaces = g_malloc(sizeof(VASurfaceID) * numSurfaces); + if (!parent->va_surfaces) { + LOG_E("Not enough memory to allocate surfaces!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + surfaces = parent->va_surfaces; + + va_ret = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, VA_RT_FORMAT_YUV420, numSurfaces, + surfaces); + if (va_ret != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaCreateSurfaces()!\n"); + goto cleanup; + } + + parent->surfacepool = mix_surfacepool_new(); + if (parent->surfacepool == NULL) { + LOG_E("Not enough memory to create surface pool!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + *surface_pool = parent->surfacepool; + + ret = mix_surfacepool_initialize(parent->surfacepool, surfaces, + numSurfaces); + + /* Initialize and save the VA context ID + * Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + */ + va_ret = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, 0, surfaces, + numSurfaces, &(parent->va_context)); + + if (va_ret != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaCreateContext()!\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /* + * Packed stream queue + */ + + self->packed_stream_queue = g_queue_new(); + if (!self->packed_stream_queue) { + LOG_E("Failed to crate packed stream queue!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + self->last_frame = NULL; + self->last_vop_coding_type = -1; + parent->initialized = FALSE; + ret = MIX_RESULT_SUCCESS; + + cleanup: + + g_mutex_unlock(parent->objectlock); + + LOG_V("End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params) { + uint32 vbp_ret = 0; + MixVideoFormat *parent = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + guint64 ts = 0; + vbp_data_mp42 *data = NULL; + gboolean discontinuity = FALSE; + MixInputBufferEntry *bufentry = NULL; + gint i = 0; + + LOG_V("Begin\n"); + + if (mix == NULL || bufin == NULL || decode_params == NULL) { + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { + return MIX_RESULT_INVALID_PARAM; + } + + parent = MIX_VIDEOFORMAT(mix); + + g_mutex_lock(parent->objectlock); + + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get timestamp\n"); + goto cleanup; + } + + LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts); + + ret + = mix_videodecodeparams_get_discontinuity(decode_params, + &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get discontinuity\n"); + goto cleanup; + } + + /* If this is a new frame and we haven't retrieved parser + * workload data from previous frame yet, do so + */ + + if ((ts != parent->current_timestamp) && (parent->parse_in_progress)) { + + LOG_V("timestamp changed and parsing is still in progress\n"); + + /* this is new data and the old data parsing is not complete, continue + * to parse the old data + */ + vbp_ret = vbp_query(parent->parser_handle, (void *) &data); + LOG_V("vbp_query() returns 0x%x\n", vbp_ret); + + if ((vbp_ret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E("vbp_ret != VBP_OK || data == NULL\n"); + goto cleanup; + } + + ret = mix_videofmt_mp42_process_decode(mix, data, + parent->current_timestamp, + parent->discontinuity_frame_in_progress); + + if (ret != MIX_RESULT_SUCCESS) { + /* We log this but need to process + * the new frame data, so do not return + */ + LOG_W("process_decode failed.\n"); + } + + /* we are done parsing for old data */ + parent->parse_in_progress = FALSE; + } + + parent->current_timestamp = ts; + parent->discontinuity_frame_in_progress = discontinuity; + + /* we parse data buffer one by one */ + for (i = 0; i < bufincnt; i++) { + + LOG_V( + "Calling parse for current frame, parse handle %d, buf %x, size %d\n", + (int) parent->parser_handle, (guint) bufin[i]->data, + bufin[i]->size); + + vbp_ret = vbp_parse(parent->parser_handle, bufin[i]->data, + bufin[i]->size, FALSE); + + LOG_V("vbp_parse() returns 0x%x\n", vbp_ret); + + /* The parser failed to parse */ + if (vbp_ret != VBP_DONE && vbp_ret != VBP_OK) { + LOG_E("vbp_parse() ret = %d\n", vbp_ret); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V("vbp_parse() ret = %d\n", vbp_ret); + + if (vbp_ret == VBP_OK || vbp_ret == VBP_DONE) { + + LOG_V("Now, parsing is done (VBP_DONE)!\n"); + + vbp_ret = vbp_query(parent->parser_handle, (void *) &data); + LOG_V("vbp_query() returns 0x%x\n", vbp_ret); + + if ((vbp_ret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /* Increase the ref count of this input buffer */ + mix_buffer_ref(bufin[i]); + + /* Create a new MixInputBufferEntry + * TODO: make this from a pool later + */ + bufentry = g_malloc(sizeof(MixInputBufferEntry)); + if (bufentry == NULL) { + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + bufentry->buf = bufin[i]; + bufentry->timestamp = ts; + + LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_DONE = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp); + + /* Enqueue this input buffer */ + g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry); + + /* process and decode data */ + ret + = mix_videofmt_mp42_process_decode(mix, data, ts, + discontinuity); + + if (ret != MIX_RESULT_SUCCESS) { + /* We log this but continue since we need + * to complete our processing + */ + LOG_W("process_decode failed.\n"); + } + + LOG_V("Called process and decode for current frame\n"); + + parent->parse_in_progress = FALSE; + + } +#if 0 + /* + * The DHG parser checks for next_sc, if next_sc is a start code, it thinks the current parsing is done: VBP_DONE. + * For our situtation, this not the case. The start code is always begin with the gstbuffer. At the end of frame, + * the start code is never found. + */ + + else if (vbp_ret == VBP_OK) { + + LOG_V("Now, parsing is not done (VBP_OK)!\n"); + + LOG_V( + "Enqueuing buffer and going on to next (if any) for this frame\n"); + + /* Increase the ref count of this input buffer */ + mix_buffer_ref(bufin[i]); + + /* Create a new MixInputBufferEntry + * TODO make this from a pool later + */ + bufentry = g_malloc(sizeof(MixInputBufferEntry)); + if (bufentry == NULL) { + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + bufentry->buf = bufin[i]; + bufentry->timestamp = ts; + LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_OK = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp); + + /* Enqueue this input buffer */ + g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry); + parent->parse_in_progress = TRUE; + } +#endif + } + + cleanup: + + g_mutex_unlock(parent->objectlock); + + LOG_V("End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, + vbp_data_mp42 *data, guint64 timestamp, gboolean discontinuity) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_ret = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + + MixVideoFormat_MP42 *self = NULL; + vbp_picture_data_mp42 *picture_data = NULL; + VAPictureParameterBufferMPEG4 *picture_param = NULL; + VAIQMatrixBufferMPEG4 *iq_matrix_buffer = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + VASliceParameterBufferMPEG4 *slice_param = NULL; + + gint frame_type = -1; + guint buffer_id_number = 0; + guint buffer_id_cnt = 0; + VABufferID *buffer_ids = NULL; + MixVideoFrame *frame = NULL; + + gint idx = 0, jdx = 0; + gulong surface = 0; + + MixBuffer *mix_buffer = NULL; + gboolean is_from_queued_data = FALSE; + + LOG_V("Begin\n"); + + if ((mix == NULL) || (data == NULL)) { + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { + return MIX_RESULT_INVALID_PARAM; + } + + self = MIX_VIDEOFORMAT_MP42(mix); + + LOG_V("data->number_pictures = %d\n", data->number_pictures); + + if (data->number_pictures == 0) { + LOG_W("data->number_pictures == 0\n"); + mix_videofmt_mp42_release_input_buffers(mix, timestamp); + return ret; + } + + is_from_queued_data = FALSE; + + /* Do we have packed frames? */ + if (data->number_pictures > 1) { + + /* + + Assumption: + + 1. In one packed frame, there's only one P or I frame and the + reference frame will be the first one in the packed frame + 2. In packed frame, there's no skipped frame(vop_coded = 0) + 3. In one packed frame, if there're n B frames, there will be + n N-VOP frames to follow the packed frame. + The timestamp of each N-VOP frame will be used for each B frames + in the packed frame + 4. N-VOP frame is the frame with vop_coded = 0. + + {P, B, B, B }, N, N, N, P, P, P, I, ... + + */ + + MixInputBufferEntry *bufentry = NULL; + PackedStream *packed_stream = NULL; + vbp_picture_data_mp42 *cloned_picture_data = NULL; + + LOG_V("This is packed frame\n"); + + /* + * Is the packed_frame_queue empty? If not, how come + * a packed frame can follow another packed frame without + * necessary number of N-VOP between them? + */ + + if (!g_queue_is_empty(self->packed_stream_queue)) { + ret = MIX_RESULT_FAIL; + LOG_E("The previous packed frame is not fully processed yet!\n"); + goto cleanup; + } + + /* Packed frame shall be something like this {P, B, B, B, ... B } */ + for (idx = 0; idx < data->number_pictures; idx++) { + picture_data = &(data->picture_data[idx]); + picture_param = &(picture_data->picture_param); + frame_type = picture_param->vop_fields.bits.vop_coding_type; + + /* Is the first frame in the packed frames a reference frame? */ + if (idx == 0 && frame_type != MP4_VOP_TYPE_I && frame_type + != MP4_VOP_TYPE_P) { + ret = MIX_RESULT_FAIL; + LOG_E("The first frame in packed frame is not I or B\n"); + goto cleanup; + } + + if (idx != 0 && frame_type != MP4_VOP_TYPE_B) { + ret = MIX_RESULT_FAIL; + LOG_E("The frame other than the first one in packed frame is not B\n"); + goto cleanup; + } + + if (picture_data->vop_coded == 0) { + ret = MIX_RESULT_FAIL; + LOG_E("In packed frame, there's unexpected skipped frame\n"); + goto cleanup; + } + } + + LOG_V("The packed frame looks valid\n"); + + /* Okay, the packed-frame looks ok. Now, we enqueue all the B frames */ + bufentry + = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue); + if (bufentry == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("There's data in in inputbufqueue\n"); + goto cleanup; + } + + LOG_V("Enqueue all B frames in the packed frame\n"); + + mix_buffer = bufentry->buf; + for (idx = 1; idx < data->number_pictures; idx++) { + picture_data = &(data->picture_data[idx]); + cloned_picture_data = mix_videoformat_mp42_clone_picture_data( + picture_data); + if (!cloned_picture_data) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to allocate memory for cloned picture_data\n"); + goto cleanup; + } + + packed_stream = g_malloc(sizeof(PackedStream)); + if (packed_stream == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to allocate memory for packed_stream\n"); + goto cleanup; + } + + packed_stream->mix_buffer = mix_buffer_ref(mix_buffer); + packed_stream->picture_data = cloned_picture_data; + + g_queue_push_tail(self->packed_stream_queue, + (gpointer) packed_stream); + } + + LOG_V("Prepare to decode the first frame in the packed frame\n"); + + /* we are going to process the firs frame */ + picture_data = &(data->picture_data[0]); + + } else { + + LOG_V("This is a single frame\n"); + + /* Okay, we only have one frame */ + if (g_queue_is_empty(self->packed_stream_queue)) { + /* If the packed_stream_queue is empty, everything is fine */ + picture_data = &(data->picture_data[0]); + + LOG_V("There's no packed frame not processed yet\n"); + + } else { + /* The packed_stream_queue is not empty, is this frame N-VOP? */ + picture_data = &(data->picture_data[0]); + if (picture_data->vop_coded != 0) { + + LOG_V("The packed frame queue is not empty, we will flush it\n"); + + /* + * Unexpected! We flush the packed_stream_queue and begin to process the + * current frame if it is not a B frame + */ + mix_videoformat_mp42_flush_packed_stream_queue( + self->packed_stream_queue); + + picture_param = &(picture_data->picture_param); + frame_type = picture_param->vop_fields.bits.vop_coding_type; + + if (frame_type == MP4_VOP_TYPE_B) { + ret = MIX_RESULT_FAIL; + LOG_E("The frame right after packed frame is B frame!\n"); + goto cleanup; + } + + } else { + /* This is N-VOP, process B frame from the packed_stream_queue */ + PackedStream *packed_stream = NULL; + + LOG_V("N-VOP found, we ignore it and start to process the B frame from the packed frame queue\n"); + + packed_stream = (PackedStream *) g_queue_pop_head( + self->packed_stream_queue); + picture_data = packed_stream->picture_data; + mix_buffer = packed_stream->mix_buffer; + g_free(packed_stream); + is_from_queued_data = TRUE; + } + } + } + + picture_param = &(picture_data->picture_param); + iq_matrix_buffer = &(picture_data->iq_matrix_buffer); + + if (picture_param == NULL) { + ret = MIX_RESULT_NULL_PTR; + LOG_E("picture_param == NULL\n"); + goto cleanup; + } + + /* If the frame type is not I, P or B */ + frame_type = picture_param->vop_fields.bits.vop_coding_type; + if (frame_type != MP4_VOP_TYPE_I && frame_type != MP4_VOP_TYPE_P + && frame_type != MP4_VOP_TYPE_B) { + ret = MIX_RESULT_FAIL; + LOG_E("frame_type is not I, P or B. frame_type = %d\n", frame_type); + goto cleanup; + } + + /* + * This is a skipped frame (vop_coded = 0) + * Please note that this is not a N-VOP (DivX). + */ + if (picture_data->vop_coded == 0) { + + MixVideoFrame *skip_frame = NULL; + gulong frame_id = VA_INVALID_SURFACE; + + LOG_V("vop_coded == 0\n"); + if (self->last_frame == NULL) { + LOG_W("Previous frame is NULL\n"); + + /* + * We shouldn't get a skipped frame + * before we are able to get a real frame + */ + ret = MIX_RESULT_DROPFRAME; + goto cleanup; + } + + skip_frame = mix_videoframe_new(); + ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); + mix_videoframe_ref(self->last_frame); + + ret = mix_videoframe_get_frame_id(self->last_frame, &frame_id); + ret = mix_videoframe_set_frame_id(skip_frame, frame_id); + ret = mix_videoframe_set_frame_type(skip_frame, MP4_VOP_TYPE_P); + ret = mix_videoframe_set_real_frame(skip_frame, self->last_frame); + ret = mix_videoframe_set_timestamp(skip_frame, timestamp); + ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); + + LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", + (guint)skip_frame, (guint)frame_id, timestamp); + + /* Release our input buffers */ + ret = mix_videofmt_mp42_release_input_buffers(mix, timestamp); + + /* Enqueue the skipped frame using frame manager */ + ret = mix_framemanager_enqueue(mix->framemgr, skip_frame); + goto cleanup; + } + + /* + * Decide the number of buffer to use + */ + + buffer_id_number = picture_data->number_slices * 2 + 2; + LOG_V("number_slices is %d, allocating %d buffer_ids\n", + picture_data->number_slices, buffer_id_number); + + /* + * Check for B frames after a seek + * We need to have both reference frames in hand before we can decode a B frame + * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME + */ + if (frame_type == MP4_VOP_TYPE_B) { + + if (self->reference_frames[1] == NULL) { + LOG_W("Insufficient reference frames for B frame\n"); + ret = MIX_RESULT_DROPFRAME; + goto cleanup; + } + } + + buffer_ids = g_malloc(sizeof(VABufferID) * buffer_id_number); + if (buffer_ids == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to allocate buffer_ids!\n"); + goto cleanup; + } + + LOG_V("Getting a new surface\n");LOG_V("frame type is %d\n", frame_type); + + /* Get a frame from the surface pool */ + ret = mix_surfacepool_get(mix->surfacepool, &frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get frame from surface pool!\n"); + goto cleanup; + } + + /* + * Set the frame type for the frame object (used in reordering by frame manager) + */ + ret = mix_videoframe_set_frame_type(frame, frame_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set frame type!\n"); + goto cleanup; + } + + /* If I or P frame, update the reference array */ + if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { + LOG_V("Updating forward/backward references for libva\n"); + + self->last_vop_coding_type = frame_type; + mix_videofmt_mp42_handle_ref_frames(mix, frame_type, frame); + } + + LOG_V("Setting reference frames in picparams, frame_type = %d\n", + frame_type); + + switch (frame_type) { + case MP4_VOP_TYPE_I: + picture_param->forward_reference_picture = VA_INVALID_SURFACE; + picture_param->backward_reference_picture = VA_INVALID_SURFACE; + LOG_V("I frame, surface ID %u\n", (guint) frame->frame_id); + break; + case MP4_VOP_TYPE_P: + picture_param-> forward_reference_picture + = self->reference_frames[0]->frame_id; + picture_param-> backward_reference_picture = VA_INVALID_SURFACE; + + LOG_V("P frame, surface ID %u, forw ref frame is %u\n", + (guint) frame->frame_id, + (guint) self->reference_frames[0]->frame_id); + break; + case MP4_VOP_TYPE_B: + + picture_param->vop_fields.bits.backward_reference_vop_coding_type + = self->last_vop_coding_type; + + picture_param->forward_reference_picture + = self->reference_frames[1]->frame_id; + picture_param->backward_reference_picture + = self->reference_frames[0]->frame_id; + + LOG_V("B frame, surface ID %u, forw ref %d, back ref %d\n", + (guint) frame->frame_id, + (guint) picture_param->forward_reference_picture, + (guint) picture_param->backward_reference_picture); + break; + case MP4_VOP_TYPE_S: + LOG_W("MP4_VOP_TYPE_S, Will never reach here\n"); + break; + + default: + LOG_W("default, Will never reach here\n"); + break; + + } + + /* Libva buffer set up */ + va_display = mix->va_display; + va_context = mix->va_context; + + LOG_V("Creating libva picture parameter buffer\n"); + + /* First the picture parameter buffer */ + buffer_id_cnt = 0; + va_ret = vaCreateBuffer(va_display, va_context, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferMPEG4), 1, picture_param, + &buffer_ids[buffer_id_cnt]); + buffer_id_cnt++; + + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to create va buffer of type VAPictureParameterBufferMPEG4!\n"); + goto cleanup; + } + + LOG_V("Creating libva VAIQMatrixBufferMPEG4 buffer\n"); + + if (picture_param->vol_fields.bits.quant_type) { + va_ret = vaCreateBuffer(va_display, va_context, VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferMPEG4), 1, iq_matrix_buffer, + &buffer_ids[buffer_id_cnt]); + + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to create va buffer of type VAIQMatrixBufferType!\n"); + goto cleanup; + } + buffer_id_cnt++; + } + + /* Now for slices */ + for (jdx = 0; jdx < picture_data->number_slices; jdx++) { + + slice_data = &(picture_data->slice_data[jdx]); + slice_param = &(slice_data->slice_param); + + LOG_V( + "Creating libva slice parameter buffer, for slice %d\n", + jdx); + + /* Do slice parameters */ + va_ret = vaCreateBuffer(va_display, va_context, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferMPEG4), 1, slice_param, + &buffer_ids[buffer_id_cnt]); + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to create va buffer of type VASliceParameterBufferMPEG4!\n"); + goto cleanup; + } + buffer_id_cnt++; + + /* Do slice data */ + va_ret = vaCreateBuffer(va_display, va_context, VASliceDataBufferType, + slice_data->slice_size, 1, slice_data->buffer_addr + + slice_data->slice_offset, &buffer_ids[buffer_id_cnt]); + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to create va buffer of type VASliceDataBufferType!\n"); + goto cleanup; + } + buffer_id_cnt++; + } + + /* Get our surface ID from the frame object */ + ret = mix_videoframe_get_frame_id(frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get frame id: ret = 0x%x\n", ret); + goto cleanup; + } + + LOG_V("Calling vaBeginPicture\n"); + + /* Now we can begin the picture */ + va_ret = vaBeginPicture(va_display, va_context, surface); + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to vaBeginPicture(): va_ret = 0x%x\n", va_ret); + goto cleanup; + } + + LOG_V("Calling vaRenderPicture\n"); + + /* Render the picture */ + va_ret = vaRenderPicture(va_display, va_context, buffer_ids, buffer_id_cnt); + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to vaRenderPicture(): va_ret = 0x%x\n", va_ret); + goto cleanup; + } + + LOG_V("Calling vaEndPicture\n"); + + /* End picture */ + va_ret = vaEndPicture(va_display, va_context); + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to vaEndPicture(): va_ret = 0x%x\n", va_ret); + goto cleanup; + } + + LOG_V("Calling vaSyncSurface\n"); + + /* Decode the picture */ + va_ret = vaSyncSurface(va_display, surface); + if (va_ret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed to vaSyncSurface(): va_ret = 0x%x\n", va_ret); + goto cleanup; + } + + /* Set the discontinuity flag */ + mix_videoframe_set_discontinuity(frame, discontinuity); + + /* Set the timestamp */ + mix_videoframe_set_timestamp(frame, timestamp); + + LOG_V("Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); + + /* Enqueue the decoded frame using frame manager */ + ret = mix_framemanager_enqueue(mix->framemgr, frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_framemanager_enqueue()!\n"); + goto cleanup; + } + + /* For I or P frames, save this frame off for skipped frame handling */ + if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { + if (self->last_frame != NULL) { + mix_videoframe_unref(self->last_frame); + } + self->last_frame = frame; + mix_videoframe_ref(frame); + } + + ret = MIX_RESULT_SUCCESS; + + cleanup: + + if (ret != MIX_RESULT_SUCCESS && frame != NULL) { + mix_videoframe_unref(frame); + } + + if (ret != MIX_RESULT_SUCCESS) { + mix_videoformat_mp42_flush_packed_stream_queue( + self->packed_stream_queue); + } + + g_free(buffer_ids); + mix_videofmt_mp42_release_input_buffers(mix, timestamp); + + if (is_from_queued_data) { + if (mix_buffer) { + mix_buffer_unref(mix_buffer); + } + mix_videoformat_mp42_free_picture_data(picture_data); + } + + LOG_V("End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + MixInputBufferEntry *bufentry = NULL; + + LOG_V("Begin\n"); + + g_mutex_lock(mix->objectlock); + + mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue); + + /* + * Clear the contents of inputbufqueue + */ + while (!g_queue_is_empty(mix->inputbufqueue)) { + bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); + if (bufentry == NULL) { + continue; + } + + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + + /* + * Clear parse_in_progress flag and current timestamp + */ + mix->parse_in_progress = FALSE; + mix->discontinuity_frame_in_progress = FALSE; + mix->current_timestamp = 0; + + { + gint idx = 0; + for (idx = 0; idx < 2; idx++) { + if (self->reference_frames[idx] != NULL) { + mix_videoframe_unref(self->reference_frames[idx]); + self->reference_frames[idx] = NULL; + } + } + } + + /* Call parser flush */ + vbp_flush(mix->parser_handle); + + g_mutex_unlock(mix->objectlock); + + LOG_V("End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_mp42 *data = NULL; + uint32 vbp_ret = 0; + + LOG_V("Begin\n"); + + if (mix == NULL) { + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { + return MIX_RESULT_INVALID_PARAM; + } + + g_mutex_lock(mix->objectlock); + + /* if a frame is in progress, process the frame */ + if (mix->parse_in_progress) { + /* query for data */ + vbp_ret = vbp_query(mix->parser_handle, (void *) &data); + LOG_V("vbp_query() returns 0x%x\n", vbp_ret); + + if ((vbp_ret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E("vbp_ret != VBP_OK || data == NULL\n"); + goto cleanup; + } + + /* process and decode data */ + ret = mix_videofmt_mp42_process_decode(mix, data, + mix->current_timestamp, mix->discontinuity_frame_in_progress); + mix->parse_in_progress = FALSE; + + } + + ret = mix_framemanager_eos(mix->framemgr); + + cleanup: + + g_mutex_unlock(mix->objectlock); + + LOG_V("End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_mp42_deinitialize(MixVideoFormat *mix) { + + /* + * We do the all the cleanup in _finalize + */ + + MIX_RESULT ret = MIX_RESULT_FAIL; + + LOG_V("Begin\n"); + + if (mix == NULL) { + LOG_V("mix is NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { + LOG_V("mix is not mixvideoformat_mp42\n"); + return MIX_RESULT_INVALID_PARAM; + } + + if (parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + LOG_V("End\n"); + return ret; +} + +MIX_RESULT mix_videofmt_mp42_handle_ref_frames(MixVideoFormat *mix, + enum _picture_type frame_type, MixVideoFrame * current_frame) { + + MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + + LOG_V("Begin\n"); + + if (mix == NULL || current_frame == NULL) { + return MIX_RESULT_NULL_PTR; + } + + switch (frame_type) { + case MP4_VOP_TYPE_I: + case MP4_VOP_TYPE_P: + LOG_V("Refing reference frame %x\n", (guint) current_frame); + + mix_videoframe_ref(current_frame); + + /* should only happen on first frame */ + if (self->reference_frames[0] == NULL) { + self->reference_frames[0] = current_frame; + /* should only happen on second frame */ + } else if (self->reference_frames[1] == NULL) { + self->reference_frames[1] = current_frame; + } else { + LOG_V("Releasing reference frame %x\n", + (guint) self->reference_frames[0]); + mix_videoframe_unref(self->reference_frames[0]); + self->reference_frames[0] = self->reference_frames[1]; + self->reference_frames[1] = current_frame; + } + break; + case MP4_VOP_TYPE_B: + case MP4_VOP_TYPE_S: + default: + break; + + } + + LOG_V("End\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix, + guint64 timestamp) { + + MixInputBufferEntry *bufentry = NULL; + gboolean done = FALSE; + + LOG_V("Begin\n"); + + if (mix == NULL) { + return MIX_RESULT_NULL_PTR; + } + + /* Dequeue and release all input buffers for this frame */ + LOG_V("Releasing all the MixBuffers for this frame\n"); + + /* + * While the head of the queue has timestamp == current ts + * dequeue the entry, unref the MixBuffer, and free the struct + */ + done = FALSE; + while (!done) { + bufentry + = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue); + if (bufentry == NULL) { + break; + } + + LOG_V("head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", + (guint)bufentry->buf, timestamp, bufentry->timestamp); + + if (bufentry->timestamp != timestamp) { + LOG_V("buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", + (guint)bufentry->buf, timestamp, bufentry->timestamp); + + done = TRUE; + break; + } + + bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); + LOG_V("Unref this MixBuffers %x\n", (guint) bufentry->buf); + + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + + LOG_V("End\n"); + + return MIX_RESULT_SUCCESS; +} + +vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data( + vbp_picture_data_mp42 *picture_data) { + + gboolean succ = FALSE; + + if (!picture_data) { + return NULL; + } + + if (picture_data->number_slices == 0) { + return NULL; + } + + vbp_picture_data_mp42 *cloned_picture_data = g_try_new0( + vbp_picture_data_mp42, 1); + if (cloned_picture_data == NULL) { + goto cleanup; + } + + memcpy(cloned_picture_data, picture_data, sizeof(vbp_picture_data_mp42)); + + cloned_picture_data->number_slices = picture_data->number_slices; + cloned_picture_data->slice_data = g_try_new0(vbp_slice_data_mp42, + picture_data->number_slices); + if (cloned_picture_data->slice_data == NULL) { + goto cleanup; + } + + memcpy(cloned_picture_data->slice_data, picture_data->slice_data, + sizeof(vbp_slice_data_mp42) * (picture_data->number_slices)); + + succ = TRUE; + + cleanup: + + if (!succ) { + mix_videoformat_mp42_free_picture_data(cloned_picture_data); + return NULL; + } + + return cloned_picture_data; +} + +void mix_videoformat_mp42_free_picture_data(vbp_picture_data_mp42 *picture_data) { + if (picture_data) { + if (picture_data->slice_data) { + g_free(picture_data->slice_data); + } + g_free(picture_data); + } +} + +void mix_videoformat_mp42_flush_packed_stream_queue(GQueue *packed_stream_queue) { + + PackedStream *packed_stream = NULL; + + if (packed_stream_queue == NULL) { + return; + } + while (!g_queue_is_empty(packed_stream_queue)) { + packed_stream = (PackedStream *) g_queue_pop_head(packed_stream_queue); + if (packed_stream == NULL) { + continue; + } + + if (packed_stream->picture_data) { + mix_videoformat_mp42_free_picture_data(packed_stream->picture_data); + } + + if (packed_stream->mix_buffer) { + mix_buffer_unref(packed_stream->mix_buffer); + } + g_free(packed_stream); + } +} diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h new file mode 100644 index 0000000..67ee210 --- /dev/null +++ b/mix_video/src/mixvideoformat_mp42.h @@ -0,0 +1,117 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMAT_MP42_H__ +#define __MIX_VIDEOFORMAT_MP42_H__ + +#include "mixvideoformat.h" +#include "mixvideoframe_private.h" + +//Note: this is only a max limit. Real number of surfaces allocated is calculated in mix_videoformat_mp42_initialize() +#define MIX_VIDEO_MP42_SURFACE_NUM 8 + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMAT_MP42 (mix_videoformat_mp42_get_type ()) +#define MIX_VIDEOFORMAT_MP42(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42)) +#define MIX_IS_VIDEOFORMAT_MP42(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_MP42)) +#define MIX_VIDEOFORMAT_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42Class)) +#define MIX_IS_VIDEOFORMAT_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_MP42)) +#define MIX_VIDEOFORMAT_MP42_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42Class)) + +typedef struct _MixVideoFormat_MP42 MixVideoFormat_MP42; +typedef struct _MixVideoFormat_MP42Class MixVideoFormat_MP42Class; + +struct _MixVideoFormat_MP42 { + /*< public > */ + MixVideoFormat parent; + + /*< public > */ + + /*< private > */ + MixVideoFrame * reference_frames[2]; + MixVideoFrame * last_frame; + gint last_vop_coding_type; + + GQueue *packed_stream_queue; +}; + +/** + * MixVideoFormat_MP42Class: + * + * MI-X Video object class + */ +struct _MixVideoFormat_MP42Class { + /*< public > */ + MixVideoFormatClass parent_class; + +/* class members */ + +/*< public > */ +}; + +/** + * mix_videoformat_mp42_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformat_mp42_get_type(void); + +/** + * mix_videoformat_mp42_new: + * @returns: A newly allocated instance of #MixVideoFormat_MP42 + * + * Use this method to create new instance of #MixVideoFormat_MP42 + */ +MixVideoFormat_MP42 *mix_videoformat_mp42_new(void); + +/** + * mix_videoformat_mp42_ref: + * @mix: object to add reference + * @returns: the MixVideoFormat_MP42 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormat_MP42 *mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix); + +/** + * mix_videoformat_mp42_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformat_mp42_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* MP42 vmethods */ +MIX_RESULT mix_videofmt_mp42_getcaps(MixVideoFormat *mix, GString *msg); +MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); +MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix); +MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix); +MIX_RESULT mix_videofmt_mp42_deinitialize(MixVideoFormat *mix); + +/* Local Methods */ + +MIX_RESULT mix_videofmt_mp42_handle_ref_frames(MixVideoFormat *mix, + enum _picture_type frame_type, MixVideoFrame * current_frame); + +MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, + vbp_data_mp42 *data, guint64 timestamp, gboolean discontinuity); + +MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix, + guint64 timestamp); + +#endif /* __MIX_VIDEOFORMAT_MP42_H__ */ diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c new file mode 100644 index 0000000..ec09985 --- /dev/null +++ b/mix_video/src/mixvideoformat_vc1.c @@ -0,0 +1,1749 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include "mixvideolog.h" + +#include "mixvideoformat_vc1.h" +#include + +#ifdef YUVDUMP +//TODO Complete YUVDUMP code and move into base class +#include +#endif /* YUVDUMP */ + +#include + + +#ifdef MIX_LOG_ENABLE +static int mix_video_vc1_counter = 0; +#endif + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatClass *parent_class = NULL; + +static void mix_videoformat_vc1_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT + */ +G_DEFINE_TYPE (MixVideoFormat_VC1, mix_videoformat_vc1, MIX_TYPE_VIDEOFORMAT); + +static void mix_videoformat_vc1_init(MixVideoFormat_VC1 * self) { + MixVideoFormat *parent = MIX_VIDEOFORMAT(self); + + /* public member initialization */ + /* These are all public because MixVideoFormat objects are completely internal to MixVideo, + no need for private members */ + self->reference_frames[0] = NULL; + self->reference_frames[1] = NULL; + + /* NOTE: we don't need to do this here. + * This just demostrates how to access + * member varibles beloned to parent + */ + parent->initialized = FALSE; +} + +static void mix_videoformat_vc1_class_init( + MixVideoFormat_VC1Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatClass *video_format_class = + MIX_VIDEOFORMAT_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformat_vc1_finalize; + + /* setup vmethods with base implementation */ + /* This is where we can override base class methods if needed */ + video_format_class->getcaps = mix_videofmt_vc1_getcaps; + video_format_class->initialize = mix_videofmt_vc1_initialize; + video_format_class->decode = mix_videofmt_vc1_decode; + video_format_class->flush = mix_videofmt_vc1_flush; + video_format_class->eos = mix_videofmt_vc1_eos; + video_format_class->deinitialize = mix_videofmt_vc1_deinitialize; +} + +MixVideoFormat_VC1 * +mix_videoformat_vc1_new(void) { + MixVideoFormat_VC1 *ret = + g_object_new(MIX_TYPE_VIDEOFORMAT_VC1, NULL); + + return ret; +} + +void mix_videoformat_vc1_finalize(GObject * obj) { + gint32 pret = VBP_OK; + + /* clean up here. */ + + MixVideoFormat *parent = NULL; + MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(obj); + GObjectClass *root_class = (GObjectClass *) parent_class; + + parent = MIX_VIDEOFORMAT(self); + + g_mutex_lock(parent->objectlock); + + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + + //Unref our reference frames + int i = 0; + for (; i < 2; i++) + { + if (self->reference_frames[i] != NULL) + { + mix_videoframe_unref(self->reference_frames[i]); + self->reference_frames[i] = NULL; + } + } + + //Reset state + parent->initialized = TRUE; + parent->parse_in_progress = FALSE; + parent->discontinuity_frame_in_progress = FALSE; + parent->current_timestamp = 0; + + //Close the parser + pret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + if (pret != VBP_OK) + { + LOG_E( "Error closing parser\n"); + } + + g_mutex_unlock(parent->objectlock); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormat_VC1 * +mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix) { + return (MixVideoFormat_VC1 *) g_object_ref(G_OBJECT(mix)); +} + +/* VC1 vmethods implementation */ +MIX_RESULT mix_videofmt_vc1_getcaps(MixVideoFormat *mix, GString *msg) { + + MIX_RESULT ret = MIX_RESULT_NOTIMPL; + +//This method is reserved for future use + + if (mix == NULL || msg == NULL) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + /* Chainup parent method. + */ + + if (parent_class->getcaps) { + ret = parent_class->getcaps(mix, msg); + } + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_vc1_update_seq_header( + MixVideoConfigParamsDec* config_params, + MixIOVec *header) +{ + guint width = 0; + guint height = 0; + + guint i = 0; + guchar* p = header->data; + MIX_RESULT res = MIX_RESULT_SUCCESS; + + if (!config_params || !header) + { + LOG_E( "NUll pointer passed in\n"); + return (MIX_RESULT_NULL_PTR); + } + + res = mix_videoconfigparamsdec_get_picture_res( + config_params, + &width, + &height); + + if (MIX_RESULT_SUCCESS != res) + { + return res; + } + + /* Check for start codes. If one exist, then this is VC-1 and not WMV. */ + while (i < header->data_size - 2) + { + if ((p[i] == 0) && + (p[i + 1] == 0) && + (p[i + 2] == 1)) + { + return MIX_RESULT_SUCCESS; + } + i++; + } + + p = g_malloc0(header->data_size + 9); + + if (!p) + { + LOG_E( "Cannot allocate memory\n"); + return MIX_RESULT_NO_MEMORY; + } + + /* If we get here we have 4+ bytes of codec data that must be formatted */ + /* to pass through as an RCV sequence header. */ + p[0] = 0; + p[1] = 0; + p[2] = 1; + p[3] = 0x0f; /* Start code. */ + + p[4] = (width >> 8) & 0x0ff; + p[5] = width & 0x0ff; + p[6] = (height >> 8) & 0x0ff; + p[7] = height & 0x0ff; + + memcpy(p + 8, header->data, header->data_size); + *(p + header->data_size + 8) = 0x80; + + g_free(header->data); + header->data = p; + header->data_size = header->data_size + 9; + + return MIX_RESULT_SUCCESS; +} + + + +MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_VC1; + vbp_data_vc1 *data = NULL; + MixVideoFormat *parent = NULL; + MixVideoFormat_VC1 *self = NULL; + MixIOVec *header = NULL; + gint numprofs = 0, numactualprofs = 0; + gint numentrypts = 0, numactualentrypts = 0; + VADisplay vadisplay = NULL; + VAProfile *profiles = NULL; + VAEntrypoint *entrypts = NULL; + VAConfigAttrib attrib; + VAStatus vret = VA_STATUS_SUCCESS; + guint extra_surfaces = 0; + VASurfaceID *surfaces = NULL; + guint numSurfaces = 0; + + //TODO Partition this method into smaller methods + + if (mix == NULL || config_params == NULL || frame_mgr == NULL || !input_buf_pool || !surface_pool || !va_display) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + /* Chainup parent method. + */ + + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + if (!MIX_IS_VIDEOFORMAT_VC1(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMAT(mix); + self = MIX_VIDEOFORMAT_VC1(mix); + + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + g_mutex_lock(parent->objectlock); + + //Load the bitstream parser + pret = vbp_open(ptype, &(parent->parser_handle)); + + if (!(pret == VBP_OK)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto cleanup; + } + + LOG_V( "Opened parser\n"); + + ret = mix_videoconfigparamsdec_get_header(config_params, + &header); + + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get header data\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto cleanup; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); + + ret = mix_videofmt_vc1_update_seq_header( + config_params, + header); + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error updating sequence header\n"); + goto cleanup; + } + + pret = vbp_parse(parent->parser_handle, header->data, + header->data_size, TRUE); + + if (!((pret == VBP_OK) || (pret == VBP_DONE))) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data, size %d\n", header->data_size); + goto cleanup; + } + + + LOG_V( "Parsed header\n"); + //Get the header data and save + pret = vbp_query(parent->parser_handle, (void *)&data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto cleanup; + } + + LOG_V( "Queried parser for header data\n"); + + //Time for libva initialization + + vadisplay = parent->va_display; + + numprofs = vaMaxNumProfiles(vadisplay); + profiles = g_malloc(numprofs*sizeof(VAProfile)); + + if (!profiles) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto cleanup; + } + + vret = vaQueryConfigProfiles(vadisplay, profiles, + &numactualprofs); + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto cleanup; + } + + //check the desired profile support + gint vaprof = 0; + + VAProfile profile; + switch (data->se_data->PROFILE) + { + case 0: + profile = VAProfileVC1Simple; + break; + + case 1: + profile = VAProfileVC1Main; + break; + + default: + profile = VAProfileVC1Advanced; + break; + } + + for (; vaprof < numactualprofs; vaprof++) + { + if (profiles[vaprof] == profile) + break; + } + if (vaprof >= numprofs || profiles[vaprof] != profile) + //Did not get the profile we wanted + { + ret = MIX_RESULT_FAIL; + LOG_E( "Profile not supported by driver\n"); + goto cleanup; + } + + numentrypts = vaMaxNumEntrypoints(vadisplay); + entrypts = g_malloc(numentrypts*sizeof(VAEntrypoint)); + + if (!entrypts) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto cleanup; + } + + vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], + entrypts, &numactualentrypts); + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto cleanup; + } + + gint vaentrypt = 0; + for (; vaentrypt < numactualentrypts; vaentrypt++) + { + if (entrypts[vaentrypt] == VAEntrypointVLD) + break; + } + if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) + //Did not get the entrypt we wanted + { + ret = MIX_RESULT_FAIL; + LOG_E( "Entry point not supported by driver\n"); + goto cleanup; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + + vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1); + + //TODO Handle other values returned for RT format + // and check with requested format provided in config params + //Right now only YUV 4:2:0 is supported by libva + // and this is our default + if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || + vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto cleanup; + } + + //Initialize and save the VA config ID + vret = vaCreateConfig(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1, &(parent->va_config)); + + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto cleanup; + } + + LOG_V( "Created libva config with profile %d\n", vaprof); + + //Check for loop filtering + if (data->se_data->LOOPFILTER == 1) + self->loopFilter = TRUE; + else + self->loopFilter = FALSE; + + LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); + + //Initialize the surface pool + + + if ((data->se_data->MAXBFRAMES > 0) || (data->se_data->PROFILE == 3) || (data->se_data->PROFILE == 1)) + //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof + self->haveBframes = TRUE; + else + self->haveBframes = FALSE; + + //Calculate VC1 numSurfaces based on max number of B frames or + // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less + + //Adding 1 to work around VBLANK issue + parent->va_num_surfaces = 1 + extra_surfaces + ((3 + (self->haveBframes ? 1 : 0) < + MIX_VIDEO_VC1_SURFACE_NUM) ? + (3 + (self->haveBframes ? 1 : 0)) + : MIX_VIDEO_VC1_SURFACE_NUM); + + numSurfaces = parent->va_num_surfaces; + + parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + surfaces = parent->va_surfaces; + + if (surfaces == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot allocate temporary data\n"); + goto cleanup; + } + + vret = vaCreateSurfaces(vadisplay, parent->picture_width, + parent->picture_height, entrypts[vaentrypt], + numSurfaces, surfaces); + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto cleanup; + } + + parent->surfacepool = mix_surfacepool_new(); + *surface_pool = parent->surfacepool; + + if (parent->surfacepool == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing surface pool\n"); + goto cleanup; + } + + + ret = mix_surfacepool_initialize(parent->surfacepool, + surfaces, numSurfaces); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init failure\n"); + goto cleanup; + break; + } + + LOG_V( "Created %d libva surfaces, MAXBFRAMES is %d\n", numSurfaces, data->se_data->MAXBFRAMES); + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext(vadisplay, parent->va_config, + parent->picture_width, parent->picture_height, + 0, surfaces, numSurfaces, + &(parent->va_context)); + if (!(vret == VA_STATUS_SUCCESS)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto cleanup; + } + + LOG_V( "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height); + + LOG_V( "mix_video vinfo: Content type %s, %s\n", (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); + LOG_V( "mix_video vinfo: Content width %d, height %d\n", parent->picture_width, parent->picture_height); + LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", data->se_data->MAXBFRAMES); + LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", data->se_data->PROFILE, data->se_data->LEVEL); + + + cleanup: + if (ret != MIX_RESULT_SUCCESS) { + pret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + parent->initialized = FALSE; + + } else { + parent->initialized = TRUE; + } + + if (header != NULL) + { + if (header->data != NULL) + g_free(header->data); + g_free(header); + header = NULL; + } + + g_free(profiles); + g_free(entrypts); + + self->lastFrame = NULL; + + + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_vc1_decode(MixVideoFormat *mix, + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params) { + + uint32 pret = 0; + int i = 0; + MixVideoFormat *parent = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + guint64 ts = 0; + vbp_data_vc1 *data = NULL; + gboolean discontinuity = FALSE; + MixInputBufferEntry *bufentry = NULL; + + if (mix == NULL || bufin == NULL || decode_params == NULL ) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + //TODO remove iovout and iovoutcnt; they are not used (need to remove from MixVideo/MI-X API too) + + LOG_V( "Begin\n"); + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ + +#if 0 + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, + decode_params); + } +#endif + + if (!MIX_IS_VIDEOFORMAT_VC1(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMAT(mix); + + + ret = mix_videodecodeparams_get_timestamp(decode_params, + &ts); + if (ret != MIX_RESULT_SUCCESS) + { + return MIX_RESULT_FAIL; + } + + ret = mix_videodecodeparams_get_discontinuity(decode_params, + &discontinuity); + if (ret != MIX_RESULT_SUCCESS) + { + return MIX_RESULT_FAIL; + } + + //From now on, we exit this function through cleanup: + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + //If this is a new frame and we haven't retrieved parser + // workload data from previous frame yet, do so + if ((ts != parent->current_timestamp) && + (parent->parse_in_progress)) + { + + //query for data + pret = vbp_query(parent->parser_handle, + (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing parser\n"); + goto cleanup; + } + + LOG_V( "Queried for last frame data\n"); + + //process and decode data + ret = mix_videofmt_vc1_process_decode(mix, + data, parent->current_timestamp, + parent->discontinuity_frame_in_progress); + + if (ret != MIX_RESULT_SUCCESS) + { + //We log this but need to process the new frame data, so do not return + LOG_E( "process_decode failed.\n"); + } + + LOG_V( "Called process and decode for last frame\n"); + + parent->parse_in_progress = FALSE; + + } + + parent->current_timestamp = ts; + parent->discontinuity_frame_in_progress = discontinuity; + + LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_vc1_counter++, ts); + + for (i = 0; i < bufincnt; i++) + { + + LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size); + + pret = vbp_parse(parent->parser_handle, + bufin[i]->data, + bufin[i]->size, + FALSE); + + LOG_V( "Called parse for current frame\n"); + + if (pret == VBP_DONE) + { + //query for data + pret = vbp_query(parent->parser_handle, + (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting parser data\n"); + goto cleanup; + } + + LOG_V( "Called query for current frame\n"); + + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = g_malloc(sizeof( + MixInputBufferEntry)); + if (bufentry == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto cleanup; + } + + bufentry->buf = bufin[i]; + LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts); + bufentry->timestamp = ts; + + LOG_V( "Enqueue this input buffer for current frame\n"); + LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); + + //Enqueue this input buffer + g_queue_push_tail(parent->inputbufqueue, + (gpointer)bufentry); + + //process and decode data + ret = mix_videofmt_vc1_process_decode(mix, + data, ts, discontinuity); + + if (ret != MIX_RESULT_SUCCESS) + { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Process_decode failed.\n"); + } + + LOG_V( "Called process and decode for current frame\n"); + + parent->parse_in_progress = FALSE; + } + else if (pret != VBP_OK) + { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Parsing failed.\n"); + ret = MIX_RESULT_FAIL; + } + else + { + + LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); + + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = g_malloc(sizeof + (MixInputBufferEntry)); + if (bufentry == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto cleanup; + } + bufentry->buf = bufin[i]; + bufentry->timestamp = ts; + + //Enqueue this input buffer + g_queue_push_tail(parent->inputbufqueue, + (gpointer)bufentry); + parent->parse_in_progress = TRUE; + } + + } + + + cleanup: + + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); + + + LOG_V( "End\n"); + + return ret; +} + +#ifdef YUVDUMP +//TODO Complete this YUVDUMP code and move into base class + +MIX_RESULT GetImageFromSurface (MixVideoFormat *mix, MixVideoFrame * frame) + +{ + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAImageFormat va_image_format; + VAImage va_image; + + unsigned char* pBuffer; + unsigned int ui32SrcWidth = mix->picture_width; + unsigned int ui32SrcHeight = mix->picture_height; + unsigned int ui32Stride; + unsigned int ui32ChromaOffset; + FILE *fp = NULL; + int r = 0; + + int i; + + g_print ("GetImageFromSurface \n"); + + if ((mix == NULL) || (frame == NULL)) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + fp = fopen("yuvdump.yuv", "a+"); + + static int have_va_image = 0; + + if (!have_va_image) + { + va_image_format.fourcc = VA_FOURCC_NV12; +// va_image_format.fourcc = VA_FOURCC_YV12; + + vaStatus = vaCreateImage(mix->va_display, &va_image_format, ui32SrcWidth, ui32SrcHeight, &va_image); + have_va_image = 1; + } + + vaStatus = vaGetImage( mix->va_display, frame->frame_id, 0, 0, ui32SrcWidth, ui32SrcHeight, va_image.image_id ); + vaStatus = vaMapBuffer( mix->va_display, va_image.buf, (void **) &pBuffer); + ui32ChromaOffset = va_image.offsets[1]; + ui32Stride = va_image.pitches[0]; + + if (VA_STATUS_SUCCESS != vaStatus) + { + g_print ("VideoProcessBlt: Unable to copy surface\n\r"); + return vaStatus; + } + + { + g_print ("before copy memory....\n"); + g_print ("width = %d, height = %d\n", ui32SrcWidth, ui32SrcHeight); + g_print ("data_size = %d\n", va_image.data_size); + g_print ("num_planes = %d\n", va_image.num_planes); + g_print ("va_image.pitches[0] = %d\n", va_image.pitches[0]); + g_print ("va_image.pitches[1] = %d\n", va_image.pitches[1]); + g_print ("va_image.pitches[2] = %d\n", va_image.pitches[2]); + g_print ("va_image.offsets[0] = %d\n", va_image.offsets[0]); + g_print ("va_image.offsets[1] = %d\n", va_image.offsets[1]); + g_print ("va_image.offsets[2] = %d\n", va_image.offsets[2]); +// r = fwrite (pBuffer, 1, va_image.offsets[1], fp); + + r = fwrite (pBuffer, va_image.offsets[1], 1, fp); + + for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) + r = fwrite (pBuffer + va_image.offsets[1] + i / 2, 1, 1, fp); + + for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) + r = fwrite (pBuffer + va_image.offsets[1] + i / 2 + 1, 1, 1, fp); + + g_print ("ui32ChromaOffset = %d, ui32Stride = %d\n", ui32ChromaOffset, ui32Stride); + + } + + vaStatus = vaUnmapBuffer( mix->va_display, va_image.buf); + + return vaStatus; + +} +#endif /* YUVDUMP */ + + +MIX_RESULT mix_videofmt_vc1_decode_a_picture( + MixVideoFormat* mix, + vbp_data_vc1 *data, + int pic_index, + MixVideoFrame *frame) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + guint buffer_id_cnt = 0; + VABufferID *buffer_ids = NULL; + MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); + + vbp_picture_data_vc1* pic_data = &(data->pic_data[pic_index]); + VAPictureParameterBufferVC1 *pic_params = pic_data->pic_parms; + + if (pic_params == NULL) + { + ret = MIX_RESULT_NULL_PTR; + LOG_E( "Error reading parser data\n"); + goto cleanup; + } + + LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); + + //Set up reference frames for the picture parameter buffer + + //Set the picture type (I, B or P frame) + enum _picture_type frame_type = pic_params->picture_fields.bits.picture_type; + + + //Check for B frames after a seek + //We need to have both reference frames in hand before we can decode a B frame + //If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME + //Note: demuxer should do the right thing and only seek to I frame, so we should + // not get P frame first, but may get B frames after the first I frame + if (frame_type == VC1_PTYPE_B) + { + if (self->reference_frames[1] == NULL) + { + LOG_E( "Insufficient reference frames for B frame\n"); + ret = MIX_RESULT_DROPFRAME; + goto cleanup; + } + } + + buffer_ids = g_malloc(sizeof(VABufferID) * ((pic_data->num_slices * 2) + 2)); + if (buffer_ids == NULL) + { + LOG_E( "Cannot allocate buffer IDs\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_V( "Getting a new surface\n"); + LOG_V( "frame type is %d\n", frame_type); + + gulong surface = 0; + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting surface ID from frame object\n"); + goto cleanup; + } + + //Get a frame from the surface pool + + if (0 == pic_index) + { + //Set the frame type for the frame object (used in reordering by frame manager) + switch (frame_type) + { + case VC1_PTYPE_I: // I frame type + case VC1_PTYPE_P: // P frame type + case VC1_PTYPE_B: // B frame type + ret = mix_videoframe_set_frame_type(frame, frame_type); + break; + case VC1_PTYPE_BI: // BI frame type + ret = mix_videoframe_set_frame_type(frame, TYPE_I); + break; + //Not indicated here case VC1_PTYPE_SKIPPED: + default: + break; + } + } + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error setting frame type on frame\n"); + goto cleanup; + } + + LOG_V( "Setting reference frames in picparams, frame_type = %d\n", frame_type); + + //TODO Check if we need to add more handling of B or P frames when reference frames are not set up (such as after flush/seek) + + switch (frame_type) + { + case VC1_PTYPE_I: // I frame type + /* forward and backward reference pictures are not used but just set to current + surface to be in consistence with test suite + */ + pic_params->forward_reference_picture = surface; + pic_params->backward_reference_picture = surface; + LOG_V( "I frame, surface ID %u\n", (guint)frame->frame_id); + LOG_V( "mix_video vinfo: Frame type is I\n"); + break; + case VC1_PTYPE_P: // P frame type + + // check REFDIST in the picture parameter buffer + if (0 != pic_params->reference_fields.bits.reference_distance_flag && + 0 != pic_params->reference_fields.bits.reference_distance) + { + /* The previous decoded frame (distance is up to 16 but not 0) is used + for reference, as we don't allocate that many surfaces so the reference picture + could have been overwritten and hence not avaiable for reference. + */ + LOG_E( "reference distance is not 0!"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + if (1 == pic_index) + { + // handle interlace field coding case + if (1 == pic_params->reference_fields.bits.num_reference_pictures || + 1 == pic_params->reference_fields.bits.reference_field_pic_indicator) + { + /* two reference fields or the second closest I/P field is used for + prediction. Set forward reference picture to INVALID so it will be + updated to a valid previous reconstructed reference frame later. + */ + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + } + else + { + /* the closest I/P is used for reference so it must be the + complementary field in the same surface. + */ + pic_params->forward_reference_picture = surface; + } + } + if (VA_INVALID_SURFACE == pic_params->forward_reference_picture) + { + if (self->reference_frames[1]) + { + pic_params->forward_reference_picture = self->reference_frames[1]->frame_id; + } + else if (self->reference_frames[0]) + { + pic_params->forward_reference_picture = self->reference_frames[0]->frame_id; + } + else + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error could not find reference frames for P frame\n"); + goto cleanup; + } + } + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + + LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id); + LOG_V( "mix_video vinfo: Frame type is P\n"); + break; + + case VC1_PTYPE_B: // B frame type + LOG_V( "B frame, forw ref %d, back ref %d\n", (guint)self->reference_frames[0]->frame_id, (guint)self->reference_frames[1]->frame_id); + + if (!self->haveBframes) //We don't expect B frames and have not allocated a surface + // for the extra ref frame so this is an error + { + ret = MIX_RESULT_FAIL; + LOG_E( "Unexpected B frame, cannot process\n"); + goto cleanup; + } + + pic_params->forward_reference_picture = self->reference_frames[0]->frame_id; + pic_params->backward_reference_picture = self->reference_frames[1]->frame_id; + + LOG_V( "B frame, surface ID %u, forw ref %d, back ref %d\n", (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id, (guint)self->reference_frames[1]->frame_id); + LOG_V( "mix_video vinfo: Frame type is B\n"); + break; + + case VC1_PTYPE_BI: + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + LOG_V( "BI frame\n"); + LOG_V( "mix_video vinfo: Frame type is BI\n"); + break; + + case VC1_PTYPE_SKIPPED: + //Will never happen here + break; + + default: + LOG_V( "Hit default\n"); + break; + + } + + //Loop filter handling + if (self->loopFilter) + { + LOG_V( "Setting in loop decoded picture to current frame\n"); + LOG_V( "Double checking picparams inloop filter is %d\n", pic_params->entrypoint_fields.bits.loopfilter); + pic_params->inloop_decoded_picture = frame->frame_id; + } + else + { + LOG_V( "Setting in loop decoded picture to invalid\n"); + pic_params->inloop_decoded_picture = VA_INVALID_SURFACE; + } + + //Libva buffer set up + + vadisplay = mix->va_display; + vacontext = mix->va_context; + + LOG_V( "Creating libva picture parameter buffer\n"); + + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferVC1), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + LOG_V( "Creating libva bitplane buffer\n"); + + if (pic_params->bitplane_present.value) + { + //Then the bitplane buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VABitPlaneBufferType, + pic_data->size_bitplanes, + 1, + pic_data->packed_bitplanes, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + } + + //Now for slices + int i = 0; + for (; i < pic_data->num_slices; i++) + { + LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); + + //Do slice parameters + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferVC1), + 1, + &(pic_data->slc_data[i].slc_parms), + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + buffer_id_cnt++; + + LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); + + + //Do slice data + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + //size + pic_data->slc_data[i].slice_size, + //num_elements + 1, + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferVC1 + pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + } + + + LOG_V( "Calling vaBeginPicture\n"); + + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto cleanup; + } + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto cleanup; + } + + LOG_V( "Calling vaEndPicture\n"); + + //End picture + vret = vaEndPicture(vadisplay, vacontext); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto cleanup; + } + + LOG_V( "Calling vaSyncSurface\n"); + + //Decode the picture + vret = vaSyncSurface(vadisplay, surface); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + goto cleanup; + } + +cleanup: + if (NULL != buffer_ids) + g_free(buffer_ids); + + return ret; +} + + +MIX_RESULT mix_videofmt_vc1_process_decode( + MixVideoFormat *mix, + vbp_data_vc1 *data, + guint64 timestamp, + gboolean discontinuity) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + gboolean unrefVideoFrame = FALSE; + MixVideoFrame *frame = NULL; + + //TODO Partition this method into smaller methods + + LOG_V( "Begin\n"); + + if ((mix == NULL) || (data == NULL)) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + if (0 == data->num_pictures || NULL == data->pic_data) + { + return MIX_RESULT_INVALID_PARAM; + } + + if (!MIX_IS_VIDEOFORMAT_VC1(mix)) + { + return MIX_RESULT_INVALID_PARAM; + } + + //After this point, all exits from this function are through cleanup: + MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); + + //Check for skipped frame + //For skipped frames, we will reuse the last P or I frame surface and treat as P frame + if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) + { + + LOG_V( "mix_video vinfo: Frame type is SKIPPED\n"); + if (self->lastFrame == NULL) + { + //we shouldn't get a skipped frame before we are able to get a real frame + LOG_E( "Error for skipped frame, prev frame is NULL\n"); + ret = MIX_RESULT_DROPFRAME; + goto cleanup; + } + + //We don't worry about this memory allocation because SKIPPED is not a common case + //Doing the allocation on the fly is a more efficient choice than trying to manage yet another pool + MixVideoFrame *skip_frame = mix_videoframe_new(); + if (skip_frame == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating new video frame object for skipped frame\n"); + goto cleanup; + } + + mix_videoframe_set_is_skipped(skip_frame, TRUE); +// mix_videoframe_ref(skip_frame); + mix_videoframe_ref(self->lastFrame); + gulong frameid = VA_INVALID_SURFACE; + mix_videoframe_get_frame_id(self->lastFrame, &frameid); + mix_videoframe_set_frame_id(skip_frame, frameid); + mix_videoframe_set_frame_type(skip_frame, VC1_PTYPE_P); + mix_videoframe_set_real_frame(skip_frame, self->lastFrame); + mix_videoframe_set_timestamp(skip_frame, timestamp); + mix_videoframe_set_discontinuity(skip_frame, FALSE); + LOG_V( "Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", (guint)skip_frame, (guint)frameid, timestamp); + + //Process reference frames + LOG_V( "Updating skipped frame forward/backward references for libva\n"); + mix_videofmt_vc1_handle_ref_frames(mix, + VC1_PTYPE_P, + skip_frame); + + //Enqueue the skipped frame using frame manager + ret = mix_framemanager_enqueue(mix->framemgr, skip_frame); + + goto cleanup; + + } + + ret = mix_surfacepool_get(mix->surfacepool, &frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting frame from surfacepool\n"); + goto cleanup; + + } + unrefVideoFrame = TRUE; + + // TO DO: handle multiple frames parsed from a sample buffer + int index; + int num_pictures = (data->num_pictures > 1) ? 2 : 1; + + for (index = 0; index < num_pictures; index++) + { + ret = mix_videofmt_vc1_decode_a_picture(mix, data, index, frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Failed to decode a picture.\n"); + goto cleanup; + } + } + + //Set the discontinuity flag + mix_videoframe_set_discontinuity(frame, discontinuity); + + //Set the timestamp + mix_videoframe_set_timestamp(frame, timestamp); + + // setup frame structure + if (data->num_pictures > 1) + { + if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) + mix_videoframe_set_frame_structure(frame, VA_TOP_FIELD); + else + mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD); + } + else + { + mix_videoframe_set_frame_structure(frame, VA_FRAME_PICTURE); + } + + enum _picture_type frame_type = data->pic_data[0].pic_parms->picture_fields.bits.picture_type; + + //For I or P frames + //Save this frame off for skipped frame handling + if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) + { + if (self->lastFrame != NULL) + { + mix_videoframe_unref(self->lastFrame); + } + self->lastFrame = frame; + mix_videoframe_ref(frame); + } + + //Update the references frames for the current frame + if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) //If I or P frame, update the reference array + { + LOG_V( "Updating forward/backward references for libva\n"); + mix_videofmt_vc1_handle_ref_frames(mix, + frame_type, + frame); + } + +//TODO Complete YUVDUMP code and move into base class +#ifdef YUVDUMP + if (mix_video_vc1_counter < 10) + ret = GetImageFromSurface (mix, frame); +// g_usleep(5000000); +#endif /* YUVDUMP */ + + LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(mix->framemgr, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error enqueuing frame object\n"); + goto cleanup; + } + unrefVideoFrame = FALSE; + + +cleanup: + + mix_videofmt_vc1_release_input_buffers(mix, timestamp); + if (unrefVideoFrame) + mix_videoframe_unref(frame); + + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_vc1_flush(MixVideoFormat *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (mix == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + uint32 pret = 0; + MixInputBufferEntry *bufentry = NULL; + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ + +#if 0 + if (parent_class->flush) + { + return parent_class->flush(mix, msg); + } +#endif + + MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); + + g_mutex_lock(mix->objectlock); + + //Clear the contents of inputbufqueue + while (!g_queue_is_empty(mix->inputbufqueue)) + { + bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); + if (bufentry == NULL) + continue; + + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + + //Clear parse_in_progress flag and current timestamp + mix->parse_in_progress = FALSE; + mix->discontinuity_frame_in_progress = FALSE; + mix->current_timestamp = 0; + + int i = 0; + for (; i < 2; i++) + { + if (self->reference_frames[i] != NULL) + { + mix_videoframe_unref(self->reference_frames[i]); + self->reference_frames[i] = NULL; + } + } + + //Call parser flush + pret = vbp_flush(mix->parser_handle); + if (pret != VBP_OK) + ret = MIX_RESULT_FAIL; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_vc1_eos(MixVideoFormat *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_vc1 *data = NULL; + uint32 pret = 0; + + if (mix == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ + +#if 0 + if (parent_class->eos) + { + return parent_class->eos(mix, msg); + } +#endif + + g_mutex_lock(mix->objectlock); + + //if a frame is in progress, process the frame + if (mix->parse_in_progress) + { + //query for data + pret = vbp_query(mix->parser_handle, (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting last parse data\n"); + goto cleanup; + } + + //process and decode data + ret = mix_videofmt_vc1_process_decode(mix, + data, mix->current_timestamp, + mix->discontinuity_frame_in_progress); + mix->parse_in_progress = FALSE; + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error processing last frame\n"); + goto cleanup; + } + } + +cleanup: + + g_mutex_unlock(mix->objectlock); + + //Call Frame Manager with _eos() + ret = mix_framemanager_eos(mix->framemgr); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_videofmt_vc1_deinitialize(MixVideoFormat *mix) +{ + //Note this method is not called; may remove in future + if (mix == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + /* Chainup parent method. + */ + + if (parent_class->deinitialize) + { + return parent_class->deinitialize(mix); + } + + //Most stuff is cleaned up in parent_class->finalize() and in _finalize + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmt_vc1_handle_ref_frames( + MixVideoFormat *mix, + enum _picture_type frame_type, + MixVideoFrame * current_frame) +{ + + LOG_V( "Begin\n"); + + if (mix == NULL || current_frame == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); + + + switch (frame_type) + { + case VC1_PTYPE_I: // I frame type + case VC1_PTYPE_P: // P frame type + LOG_V( "Refing reference frame %x\n", (guint) current_frame); + mix_videoframe_ref(current_frame); + + //If we have B frames, we need to keep forward and backward reference frames + if (self->haveBframes) + { + if (self->reference_frames[0] == NULL) //should only happen on first frame + { + self->reference_frames[0] = current_frame; +// self->reference_frames[1] = NULL; + } + else if (self->reference_frames[1] == NULL) //should only happen on second frame + { + self->reference_frames[1] = current_frame; + } + else + { + LOG_V( "Releasing reference frame %x\n", (guint) self->reference_frames[0]); + mix_videoframe_unref(self->reference_frames[0]); + self->reference_frames[0] = self->reference_frames[1]; + self->reference_frames[1] = current_frame; + } + } + else //No B frames in this content, only need to keep the forward reference frame + { + LOG_V( "Releasing reference frame %x\n", (guint) self->reference_frames[0]); + if (self->reference_frames[0] != NULL) + mix_videoframe_unref(self->reference_frames[0]); + self->reference_frames[0] = current_frame; + + } + break; + case VC1_PTYPE_B: // B or BI frame type (should not happen) + case VC1_PTYPE_BI: + default: + LOG_E( "Wrong frame type for handling reference frames\n"); + return MIX_RESULT_FAIL; + break; + + } + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmt_vc1_release_input_buffers( + MixVideoFormat *mix, + guint64 timestamp) +{ + MixInputBufferEntry *bufentry = NULL; + gboolean done = FALSE; + + LOG_V( "Begin\n"); + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + //Dequeue and release all input buffers for this frame + + LOG_V( "Releasing all the MixBuffers for this frame\n"); + + //While the head of the queue has timestamp == current ts + //dequeue the entry, unref the MixBuffer, and free the struct + done = FALSE; + while (!done) + { + bufentry = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue); + if (bufentry == NULL) + break; + + LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); + + if (bufentry->timestamp != timestamp) + { + LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); + done = TRUE; + break; + } + + bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); + + LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf); + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + + diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h new file mode 100644 index 0000000..9af8a8d --- /dev/null +++ b/mix_video/src/mixvideoformat_vc1.h @@ -0,0 +1,123 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMAT_VC1_H__ +#define __MIX_VIDEOFORMAT_VC1_H__ + +#include "mixvideoformat.h" +#include "mixvideoframe_private.h" + +//Note: this is only a max limit. Actual number of surfaces allocated is calculated in mix_videoformat_vc1_initialize() +#define MIX_VIDEO_VC1_SURFACE_NUM 8 + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMAT_VC1 (mix_videoformat_vc1_get_type ()) +#define MIX_VIDEOFORMAT_VC1(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1)) +#define MIX_IS_VIDEOFORMAT_VC1(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_VC1)) +#define MIX_VIDEOFORMAT_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1Class)) +#define MIX_IS_VIDEOFORMAT_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_VC1)) +#define MIX_VIDEOFORMAT_VC1_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1Class)) + +typedef struct _MixVideoFormat_VC1 MixVideoFormat_VC1; +typedef struct _MixVideoFormat_VC1Class MixVideoFormat_VC1Class; + +struct _MixVideoFormat_VC1 { + /*< public > */ + MixVideoFormat parent; + + /*< public > */ + + /*< private > */ + MixVideoFrame * reference_frames[2]; + gboolean haveBframes; + gboolean loopFilter; + MixVideoFrame * lastFrame; +}; + +/** + * MixVideoFormat_VC1Class: + * + * MI-X Video object class + */ +struct _MixVideoFormat_VC1Class { + /*< public > */ + MixVideoFormatClass parent_class; + + /* class members */ + + /*< public > */ +}; + +/** + * mix_videoformat_vc1_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformat_vc1_get_type(void); + +/** + * mix_videoformat_vc1_new: + * @returns: A newly allocated instance of #MixVideoFormat_VC1 + * + * Use this method to create new instance of #MixVideoFormat_VC1 + */ +MixVideoFormat_VC1 *mix_videoformat_vc1_new(void); + +/** + * mix_videoformat_vc1_ref: + * @mix: object to add reference + * @returns: the MixVideoFormat_VC1 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormat_VC1 *mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix); + +/** + * mix_videoformat_vc1_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformat_vc1_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* VC1 vmethods */ +MIX_RESULT mix_videofmt_vc1_getcaps(MixVideoFormat *mix, GString *msg); +MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmt_vc1_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); +MIX_RESULT mix_videofmt_vc1_flush(MixVideoFormat *mix); +MIX_RESULT mix_videofmt_vc1_eos(MixVideoFormat *mix); +MIX_RESULT mix_videofmt_vc1_deinitialize(MixVideoFormat *mix); + +/* Local Methods */ + +MIX_RESULT mix_videofmt_vc1_handle_ref_frames(MixVideoFormat *mix, + enum _picture_type frame_type, + MixVideoFrame * current_frame); + + +MIX_RESULT mix_videofmt_vc1_process_decode(MixVideoFormat *mix, + vbp_data_vc1 *data, + guint64 timestamp, + gboolean discontinuity); + + +MIX_RESULT mix_videofmt_vc1_release_input_buffers(MixVideoFormat *mix, + guint64 timestamp); + +#endif /* __MIX_VIDEOFORMAT_VC1_H__ */ diff --git a/mix_video/src/mixvideoformatenc.c b/mix_video/src/mixvideoformatenc.c new file mode 100644 index 0000000..e7d1e8e --- /dev/null +++ b/mix_video/src/mixvideoformatenc.c @@ -0,0 +1,502 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include "mixvideolog.h" +#include "mixvideoformatenc.h" + +//#define MDEBUG + +/* Default vmethods implementation */ +static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, + GString *msg); +static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay vadisplay); + +static MIX_RESULT +mix_videofmtenc_encode_default(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix); +static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix); +static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix); +static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( + MixVideoFormatEnc *mix, guint *max_size); + + +static GObjectClass *parent_class = NULL; + +static void mix_videoformatenc_finalize(GObject * obj); +G_DEFINE_TYPE (MixVideoFormatEnc, mix_videoformatenc, G_TYPE_OBJECT); + +static void mix_videoformatenc_init(MixVideoFormatEnc * self) { + /* TODO: public member initialization */ + + /* TODO: private member initialization */ + + self->objectlock = g_mutex_new(); + + self->initialized = FALSE; + self->framemgr = NULL; + self->surfacepool = NULL; + self->inputbufpool = NULL; + self->inputbufqueue = NULL; + self->va_display = NULL; + self->va_context = 0; + self->va_config = 0; + self->mime_type = NULL; + self->frame_rate_num= 0; + self->frame_rate_denom = 1; + self->picture_width = 0; + self->picture_height = 0; + self->initial_qp = 0; + self->min_qp = 0; + self->intra_period = 0; + self->bitrate = 0; + self->share_buf_mode = FALSE; + self->ci_frame_id = NULL; + self->ci_frame_num = 0; + self->drawable = 0x0; + self->need_display = TRUE; + + self->va_rcmode = VA_RC_NONE; + self->va_format = VA_RT_FORMAT_YUV420; + self->va_entrypoint = VAEntrypointEncSlice; + self->va_profile = VAProfileH264Baseline; + + //add more properties here +} + +static void mix_videoformatenc_class_init(MixVideoFormatEncClass * klass) { + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + gobject_class->finalize = mix_videoformatenc_finalize; + + /* setup vmethods with base implementation */ + klass->getcaps = mix_videofmtenc_getcaps_default; + klass->initialize = mix_videofmtenc_initialize_default; + klass->encode = mix_videofmtenc_encode_default; + klass->flush = mix_videofmtenc_flush_default; + klass->eos = mix_videofmtenc_eos_default; + klass->deinitialize = mix_videofmtenc_deinitialize_default; + klass->getmaxencodedbufsize = mix_videofmtenc_get_max_coded_buffer_size_default; +} + +MixVideoFormatEnc * +mix_videoformatenc_new(void) { + MixVideoFormatEnc *ret = g_object_new(MIX_TYPE_VIDEOFORMATENC, NULL); + + return ret; +} + +void mix_videoformatenc_finalize(GObject * obj) { + /* clean up here. */ + + if (obj == NULL) { + LOG_E( "obj == NULL\n"); + return; + } + + MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj); + + LOG_V( "\n"); + + if(mix->objectlock) { + g_mutex_free(mix->objectlock); + mix->objectlock = NULL; + } + + //MiVideo object calls the _deinitialize() for frame manager + if (mix->framemgr) + { + mix_framemanager_unref(mix->framemgr); + mix->framemgr = NULL; + } + + if (mix->mime_type) + { + if (mix->mime_type->str) + g_string_free(mix->mime_type, TRUE); + else + g_string_free(mix->mime_type, FALSE); + } + + if (mix->ci_frame_id) + g_free (mix->ci_frame_id); + + + if (mix->surfacepool) + { + mix_surfacepool_deinitialize(mix->surfacepool); + mix_surfacepool_unref(mix->surfacepool); + mix->surfacepool = NULL; + } + + + /* TODO: cleanup here */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoFormatEnc * +mix_videoformatenc_ref(MixVideoFormatEnc * mix) { + return (MixVideoFormatEnc *) g_object_ref(G_OBJECT(mix)); +} + +/* Default vmethods implementation */ +static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, + GString *msg) { + LOG_V( "Begin\n"); + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + LOG_V( "Begin\n"); + + if (mix == NULL ||config_params_enc == NULL) { + LOG_E( + "!mix || config_params_enc == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + //TODO check return values of getter fns for config_params + + g_mutex_lock(mix->objectlock); + + mix->framemgr = frame_mgr; + mix_framemanager_ref(mix->framemgr); + + mix->va_display = va_display; + + LOG_V( + "Start to get properities from parent params\n"); + + /* get properties from param (parent) Object*/ + ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, + &(mix->bitrate)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_get_bps\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, + &(mix->frame_rate_num), &(mix->frame_rate_denom)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, + &(mix->initial_qp)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_init_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, + &(mix->min_qp)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_min_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, + &(mix->intra_period)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_intra_period\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, + &(mix->picture_width), &(mix->picture_height)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_picture_res\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_share_buf_mode (config_params_enc, + &(mix->share_buf_mode)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_ci_frame_info (config_params_enc, + &(mix->ci_frame_id), &(mix->ci_frame_num)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_drawable (config_params_enc, + &(mix->drawable)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_drawable\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_need_display (config_params_enc, + &(mix->need_display)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_drawable\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, + &(mix->va_rcmode)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_rc_mode\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_raw_format (config_params_enc, + &(mix->va_format)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_format\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_profile (config_params_enc, + (MixProfile *) &(mix->va_profile)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_profile\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + LOG_V( + "======Video Encode Parent Object properities======:\n"); + + LOG_I( "mix->bitrate = %d\n", + mix->bitrate); + LOG_I( "mix->frame_rate = %d\n", + mix->frame_rate_denom / mix->frame_rate_denom); + LOG_I( "mix->initial_qp = %d\n", + mix->initial_qp); + LOG_I( "mix->min_qp = %d\n", + mix->min_qp); + LOG_I( "mix->intra_period = %d\n", + mix->intra_period); + LOG_I( "mix->picture_width = %d\n", + mix->picture_width); + LOG_I( "mix->picture_height = %d\n", + mix->picture_height); + LOG_I( "mix->share_buf_mode = %d\n", + mix->share_buf_mode); + LOG_I( "mix->ci_frame_id = 0x%08x\n", + mix->ci_frame_id); + LOG_I( "mix->ci_frame_num = %d\n", + mix->ci_frame_num); + LOG_I( "mix->drawable = 0x%08x\n", + mix->drawable); + LOG_I( "mix->need_display = %d\n", + mix->need_display); + LOG_I( "mix->va_format = %d\n", + mix->va_format); + LOG_I( "mix->va_profile = %d\n", + mix->va_profile); + LOG_I( "mix->va_rcmode = %d\n\n", + mix->va_rcmode); + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_encode_default (MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix) { + + //TODO decide whether to put any of the teardown from _finalize() here + + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( + MixVideoFormatEnc *mix, guint *max_size) { + + + return MIX_RESULT_SUCCESS; +} + +/* mixvideoformatenc class methods implementation */ + +MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + + LOG_V( "Begin\n"); + + if (klass->getcaps) { + return klass->getcaps(mix, msg); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + + /*frame_mgr and input_buf_pool is reserved for future use*/ + if (klass->initialize) { + return klass->initialize(mix, config_params_enc, frame_mgr, + input_buf_pool, surface_pool, va_display); + } + + return MIX_RESULT_FAIL; + +} + +MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->encode) { + return klass->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->flush) { + return klass->flush(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->eos) { + return klass->eos(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->deinitialize) { + return klass->deinitialize(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint * max_size) { + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->encode) { + return klass->getmaxencodedbufsize(mix, max_size); + } + + return MIX_RESULT_FAIL; +} diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h new file mode 100644 index 0000000..0e1c07a --- /dev/null +++ b/mix_video/src/mixvideoformatenc.h @@ -0,0 +1,178 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMATENC_H__ +#define __MIX_VIDEOFORMATENC_H__ + +#include +#include +#include "mixvideodef.h" +#include "mixdrmparams.h" +#include "mixvideoconfigparamsenc.h" +#include "mixvideoframe.h" +#include "mixframemanager.h" +#include "mixsurfacepool.h" +#include "mixbuffer.h" +#include "mixbufferpool.h" +#include "mixvideoformatqueue.h" +#include "mixvideoencodeparams.h" + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMATENC (mix_videoformatenc_get_type ()) +#define MIX_VIDEOFORMATENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEnc)) +#define MIX_IS_VIDEOFORMATENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC)) +#define MIX_VIDEOFORMATENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEncClass)) +#define MIX_IS_VIDEOFORMATENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC)) +#define MIX_VIDEOFORMATENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEncClass)) + +typedef struct _MixVideoFormatEnc MixVideoFormatEnc; +typedef struct _MixVideoFormatEncClass MixVideoFormatEncClass; + +/* vmethods typedef */ + +/* TODO: change return type and method parameters */ +typedef MIX_RESULT (*MixVideoFmtEncGetCapsFunc)(MixVideoFormatEnc *mix, GString *msg); +typedef MIX_RESULT (*MixVideoFmtEncInitializeFunc)(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +typedef MIX_RESULT (*MixVideoFmtEncodeFunc)(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +typedef MIX_RESULT (*MixVideoFmtEncFlushFunc)(MixVideoFormatEnc *mix); +typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix); +typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix); +typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, guint *max_size); + +struct _MixVideoFormatEnc { + /*< public > */ + GObject parent; + + /*< public > */ + + /*< private > */ + GMutex *objectlock; + gboolean initialized; + MixFrameManager *framemgr; + MixSurfacePool *surfacepool; + VADisplay va_display; + VAContextID va_context; + VAConfigID va_config; + GString *mime_type; + + guint frame_rate_num; + guint frame_rate_denom; + guint picture_width; + guint picture_height; + + guint initial_qp; + guint min_qp; + guint intra_period; + guint bitrate; + + gboolean share_buf_mode; + gulong * ci_frame_id; + guint ci_frame_num; + + gulong drawable; + gboolean need_display; + + VAProfile va_profile; + VAEntrypoint va_entrypoint; + guint va_format; + guint va_rcmode; + + + MixBufferPool *inputbufpool; + GQueue *inputbufqueue; +}; + +/** + * MixVideoFormatEncClass: + * + * MI-X Video object class + */ +struct _MixVideoFormatEncClass { + /*< public > */ + GObjectClass parent_class; + + /* class members */ + + /*< public > */ + MixVideoFmtEncGetCapsFunc getcaps; + MixVideoFmtEncInitializeFunc initialize; + MixVideoFmtEncodeFunc encode; + MixVideoFmtEncFlushFunc flush; + MixVideoFmtEncEndOfStreamFunc eos; + MixVideoFmtEncDeinitializeFunc deinitialize; + MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize; +}; + +/** + * mix_videoformatenc_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformatenc_get_type(void); + +/** + * mix_videoformatenc_new: + * @returns: A newly allocated instance of #MixVideoFormatEnc + * + * Use this method to create new instance of #MixVideoFormatEnc + */ +MixVideoFormatEnc *mix_videoformatenc_new(void); + +/** + * mix_videoformatenc_ref: + * @mix: object to add reference + * @returns: the MixVideoFormatEnc instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormatEnc *mix_videoformatenc_ref(MixVideoFormatEnc * mix); + +/** + * mix_videoformatenc_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformatenc_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* TODO: change method parameter list */ +MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg); + +MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * enc_config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + +MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); + +MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix); + +MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix); + +MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix); + +MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint *max_size); + + +#endif /* __MIX_VIDEOFORMATENC_H__ */ diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c new file mode 100644 index 0000000..8472e93 --- /dev/null +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -0,0 +1,1954 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_h264.h" +#include "mixvideoconfigparamsenc_h264.h" + +#define MDEBUG +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_h264_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_H264, mix_videoformatenc_h264, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /* TODO: public member initialization */ + + /* TODO: private member initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_fame = NULL; + self->ref_fame = NULL; + self->rec_fame = NULL; + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + + parent->initialized = FALSE; +} + +static void mix_videoformatenc_h264_class_init( + MixVideoFormatEnc_H264Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_h264_finalize; + + /* setup vmethods with base implementation */ + /* TODO: decide if we need to override the parent's methods */ + video_formatenc_class->getcaps = mix_videofmtenc_h264_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_h264_initialize; + video_formatenc_class->encode = mix_videofmtenc_h264_encode; + video_formatenc_class->flush = mix_videofmtenc_h264_flush; + video_formatenc_class->eos = mix_videofmtenc_h264_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_h264_deinitialize; + video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h264_get_max_encoded_buf_size; +} + +MixVideoFormatEnc_H264 * +mix_videoformatenc_h264_new(void) { + MixVideoFormatEnc_H264 *ret = + g_object_new(MIX_TYPE_VIDEOFORMATENC_H264, NULL); + + return ret; +} + +void mix_videoformatenc_h264_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_H264 *mix = MIX_VIDEOFORMATENC_H264(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_H264 * +mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix) { + return (MixVideoFormatEnc_H264 *) g_object_ref(G_OBJECT(mix)); +} + +/*H.264 vmethods implementation */ +MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + /* TODO: add codes for H.264 */ + + /* TODO: decide if we need to chainup parent method. + * if we do, the following is the code: + */ + + LOG_V( "mix_videofmtenc_h264_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncH264 * config_params_enc_h264; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + + //TODO additional parameter checking + + /* Chainup parent method. */ +#if 1 + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + +#endif //disable it currently + + if (MIX_IS_VIDEOFORMATENC_H264(mix)) + { + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { + config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_h264_initialize: no h264 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from h.264 params\n"); + + /* get properties from H264 params Object, which is special to H264 format*/ + ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, + &self->basic_unit_size); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, + &self->disable_deblocking_filter_idc); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, + &self->slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, + &self->delimiter_type); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E ( + "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( + "======H264 Encode Object properities======:\n"); + + LOG_I( "self->basic_unit_size = %d\n", + self->basic_unit_size); + LOG_I( "self->disable_deblocking_filter_idc = %d\n", + self->disable_deblocking_filter_idc); + LOG_I( "self->slice_num = %d\n", + self->slice_num); + LOG_I ("self->delimiter_type = %d\n", + self->delimiter_type); + + LOG_V( + "Get properities from params done\n"); + + + //display = XOpenDisplay(NULL); + //va_display = vaGetDisplay (videoencobj->display); + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver); + //g_print ("vaInitialize va_status = %d\n", va_status); + + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + + /*free profiles and entrypoints*/ + g_free(va_profiles); + g_free (va_entrypoints); + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + //TODO check vret and return fail if needed + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + //TODO cleanup and/or retry + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + 0, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + guint max_size = 0; + ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + g_free (surfaces); + + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_FAIL; + + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + //TODO: params i + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + + //TODO: encode_params is reserved here for future usage. + + /* TODO: decide if we need to chainup parent method. + * * * if we do, the following is the code: + * */ + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (MIX_IS_VIDEOFORMATENC_H264(mix)) + { + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_h264_process_encode\n"); + + ret = mix_videofmtenc_h264_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_process_encode\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + g_mutex_lock(mix->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_fame != NULL) + { + mix_videoframe_unref (self->cur_fame); + self->cur_fame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_fame != NULL) + { + mix_videoframe_unref (self->rec_fame); + self->rec_fame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_fame != NULL) + { + mix_videoframe_unref (self->ref_fame); + self->ref_fame = NULL; + } + + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) { + + /* TODO: add codes for H.264 */ + + /* TODO: decide if we need to chainup parent method. + * if we do, the following is the code: + */ + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_fame != NULL) + { + mix_videoframe_unref (self->cur_fame); + self->cur_fame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_fame != NULL) + { + mix_videoframe_unref (self->rec_fame); + self->rec_fame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_fame != NULL) + { + mix_videoframe_unref (self->ref_fame); + self->ref_fame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + +#if 1 + if (parent_class->deinitialize) { + return parent_class->deinitialize(mix); + } +#endif + + //Most stuff is cleaned up in parent_class->finalize() + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) +{ + + VAStatus va_status; + VAEncSequenceParameterBufferH264 h264_seq_param; + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Begin\n\n"); + + if (MIX_IS_VIDEOFORMATENC_H264(mix)) + { + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + h264_seq_param.level_idc = 30; //TODO, hard code now + h264_seq_param.intra_period = parent->intra_period; + h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; + h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; + h264_seq_param.bits_per_second = parent->bitrate; + h264_seq_param.frame_rate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + h264_seq_param.initial_qp = parent->initial_qp; + h264_seq_param.min_qp = parent->min_qp; + h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage + h264_seq_param.intra_period = parent->intra_period; + //h264_seq_param.vui_flag = 248; + //h264_seq_param.seq_parameter_set_id = 176; + + LOG_V( + "===h264 sequence params===\n"); + + LOG_I( "seq_parameter_set_id = %d\n", + (guint)h264_seq_param.seq_parameter_set_id); + LOG_I( "level_idc = %d\n", + (guint)h264_seq_param.level_idc); + LOG_I( "intra_period = %d\n", + h264_seq_param.intra_period); + LOG_I( "picture_width_in_mbs = %d\n", + h264_seq_param.picture_width_in_mbs); + LOG_I( "picture_height_in_mbs = %d\n", + h264_seq_param.picture_height_in_mbs); + LOG_I( "bitrate = %d\n", + h264_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h264_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h264_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h264_seq_param.min_qp); + LOG_I( "basic_unit_size = %d\n", + h264_seq_param.basic_unit_size); + LOG_I( "vui_flag = %d\n\n", + h264_seq_param.vui_flag); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(h264_seq_param), + 1, &h264_seq_param, + &mix->seq_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->seq_param_buf, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; + + +} + +MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + VAEncPictureParameterBufferH264 h264_pic_param; + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Begin\n\n"); + +#if 0 //not needed currently + MixVideoConfigParamsEncH264 * params_h264 + = MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); +#endif + + if (MIX_IS_VIDEOFORMATENC_H264(mix)) { + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + h264_pic_param.reference_picture = mix->ref_fame->frame_id; + h264_pic_param.reconstructed_picture = mix->rec_fame->frame_id; + h264_pic_param.coded_buf = mix->coded_buf; + h264_pic_param.picture_width = parent->picture_width; + h264_pic_param.picture_height = parent->picture_height; + h264_pic_param.last_picture = 0; + + + LOG_V( + "======h264 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h264_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h264_pic_param.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", + h264_pic_param.coded_buf); + LOG_I( "picture_width = %d\n", + h264_pic_param.picture_width); + LOG_I( "picture_height = %d\n\n", + h264_pic_param.picture_height); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(h264_pic_param), + 1,&h264_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + guint slice_num; + guint slice_height; + guint slice_index; + guint slice_height_in_mb; + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (MIX_IS_VIDEOFORMATENC_H264(mix)) + { + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + slice_num = mix->slice_num; + slice_height = parent->picture_height / slice_num; + + slice_height += 15; + slice_height &= (~15); + +#if 1 + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + slice_num, NULL, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + VAEncSliceParameterBuffer *slice_param, *current_slice; + + va_status = vaMapBuffer(parent->va_display, + mix->slice_param_buf, + (void **)&slice_param); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + current_slice = slice_param; + + + for (slice_index = 0; slice_index < slice_num; slice_index++) { + current_slice = slice_param + slice_index; + slice_height_in_mb = + min (slice_height, parent->picture_height + - slice_index * slice_height) / 16; + + // starting MB row number for this slice + current_slice->start_row_number = slice_index * slice_height / 16; + // slice height measured in MB + current_slice->slice_height = slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = mix->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + LOG_V( + "======h264 slice params======\n"); + + LOG_I( "slice_index = %d\n", + (gint) slice_index); + LOG_I( "start_row_number = %d\n", + (gint) current_slice->start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (gint) current_slice->slice_height); + LOG_I( "slice.is_intra = %d\n", + (gint) current_slice->slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + } + + va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } +#endif + +#if 0 + VAEncSliceParameterBuffer slice_param; + slice_index = 0; + slice_height_in_mb = slice_height / 16; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(slice_param), + slice_num, &slice_param, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } +#endif + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + MixVideoFrame * tmp_fame; + guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_H264(mix)) + { + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->rec_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + if (parent->need_display) { + mix->cur_fame = NULL; + } + + if (mix->cur_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + return MIX_RESULT_FAIL; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + + vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_fame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_fame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->rec_fame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_fame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + //mix_videoframe_unref (mix->cur_fame); + + if (parent->need_display) { + mix->cur_fame = NULL; + } + + if (mix->cur_fame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_fame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "mix_videofmtenc_h264_send_seq_params\n"); + + if (mix->encoded_frames == 0) { + mix_videofmtenc_h264_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_h264_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_h264_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "before vaEndPicture\n"); + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; + + guint size = iovout->data_size + 100; + + iovout->buffer_size = size; + + //We will support two buffer mode, one is application allocates the buffer and passes to encode, + //the other is encode allocate memory + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed + if (iovout->data == NULL) { + return MIX_RESULT_NO_MEMORY; + } + } + + if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) { + memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte + size = iovout->data_size; + } else { + + guint pos = 0; + guint zero_byte_count = 0; + guint prefix_length = 0; + guint8 nal_unit_type = 0; + guint8 * payload = buf + 16; + + while ((payload[pos++] == 0x00)) { + zero_byte_count ++; + if (pos >= iovout->data_size) //to make sure the buffer to be accessed is valid + break; + } + + nal_unit_type = (guint8)(payload[pos] & 0x1f); + prefix_length = zero_byte_count + 1; + + LOG_I ("nal_unit_type = %d\n", nal_unit_type); + LOG_I ("zero_byte_count = %d\n", zero_byte_count); + + if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1) { + size = iovout->data_size; + iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; + iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; + iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; + iovout->data[3] = (size - prefix_length) & 0xff; + // use 4 bytes to indicate the NALU length + memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); + LOG_V ("We only have one start code, copy directly\n"); + } + else { + ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (buf + 16, iovout->data_size, iovout->data, &size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); + return MIX_RESULT_FAIL; + } + } + } + + iovout->data_size = size; + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "get encoded data done\n"); +#if 0 + if (parent->drawable) { + va_status = vaPutSurface(va_display, surface, (Drawable)parent->drawable, + 0,0, width, height, + 0,0, width, height, + NULL,0,0); + } + +#ifdef SHOW_SRC + else { + + va_status = vaPutSurface(va_display, surface, win, + 0,0, width, height, + 0,0, width, height, + NULL,0,0); + } +#endif //SHOW_SRC +#endif + + VASurfaceStatus status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + return MIX_RESULT_FAIL; + } + mix->pic_skipped = status & VASurfaceSkipped; + + if (parent->need_display) { + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_framemanager_enqueue\n"); + return MIX_RESULT_FAIL; + } + } + + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_fame = mix->rec_fame; + mix->rec_fame= mix->ref_fame; + mix->ref_fame = tmp_fame; + } + +#if 0 + if (mix->ref_fame != NULL) + mix_videoframe_unref (mix->ref_fame); + mix->ref_fame = mix->rec_fame; + + mix_videoframe_unref (mix->cur_fame); +#endif + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_fame); + mix->cur_fame = NULL; + } + + mix->encoded_frames ++; + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size ( + MixVideoFormatEnc *mix, guint *max_size) +{ + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL || max_size == NULL) + { + LOG_E( + "mix == NULL || max_size == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + parent = MIX_VIDEOFORMATENC(mix); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_H264(self)) { + + if (self->coded_buf_size > 0) { + *max_size = self->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (self->va_rcmode == VA_RC_NONE) { + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 400) / (16 * 16); + // set to value according to QP + } + else { + self->coded_buf_size = parent->bitrate/ 4; + } + + self->coded_buf_size = + max (self->coded_buf_size , + (parent->picture_width* parent->picture_height * 400) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + self->coded_buf_size = + min(self->coded_buf_size, + (parent->picture_width * parent->picture_height * 1.5 * 8)); + self->coded_buf_size = (self->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + *max_size = self->coded_buf_size; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( + guint8 * bufin, guint bufin_len, guint8* bufout, guint * bufout_len) +{ + + guint pos = 0; + guint last_pos = 0; + + guint zero_byte_count = 0; + guint nal_size = 0; + guint prefix_length = 0; + guint size_copied = 0; + guint leading_zero_count = 0; + + if (bufin == NULL || bufout == NULL || bufout_len == NULL) { + + LOG_E( + "bufin == NULL || bufout == NULL || bufout_len = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (bufin_len <= 0 || *bufout_len <= 0) { + LOG_E( + "bufin_len <= 0 || *bufout_len <= 0\n"); + return MIX_RESULT_FAIL; + } + + LOG_V ("Begin\n"); + + while ((bufin[pos++] == 0x00)) { + zero_byte_count ++; + if (pos >= bufin_len) //to make sure the buffer to be accessed is valid + break; + } + + if (bufin[pos - 1] != 0x01 || zero_byte_count < 2) + { + LOG_E("The stream is not AnnexB format \n"); + return MIX_RESULT_FAIL; ; //not AnnexB, we won't process it + } + + zero_byte_count = 0; + last_pos = pos; + + while (pos < bufin_len) { + + while (bufin[pos++] == 0) { + zero_byte_count ++; + if (pos >= bufin_len) //to make sure the buffer to be accessed is valid + break; + } + + if (bufin[pos - 1] == 0x01 && zero_byte_count >= 2) { + if (zero_byte_count == 2) { + prefix_length = 3; + } + else { + prefix_length = 4; + leading_zero_count = zero_byte_count - 3; + } + + LOG_I("leading_zero_count = %d\n", leading_zero_count); + + nal_size = pos - last_pos - prefix_length - leading_zero_count; + if (nal_size < 0) { + LOG_E ("something wrong in the stream\n"); + return MIX_RESULT_FAIL; //not AnnexB, we won't process it + } + + if (*bufout_len < (size_copied + nal_size + 4)) { + LOG_E ("The length of destination buffer is too small\n"); + return MIX_RESULT_FAIL; + } + + LOG_I ("nal_size = %d\n", nal_size); + + /*We use 4 bytes length prefix*/ + bufout [size_copied] = nal_size >> 24 & 0xff; + bufout [size_copied + 1] = nal_size >> 16 & 0xff; + bufout [size_copied + 2] = nal_size >> 8 & 0xff; + bufout [size_copied + 3] = nal_size & 0xff; + + size_copied += 4; //4 bytes length prefix + memcpy (bufout + size_copied, bufin + last_pos, nal_size); + size_copied += nal_size; + + LOG_I ("size_copied = %d\n", size_copied); + + zero_byte_count = 0; + leading_zero_count = 0; + last_pos = pos; + } + + else if (pos == bufin_len) { + + LOG_V ("Last NALU in this frame\n"); + + nal_size = pos - last_pos; + + if (*bufout_len < (size_copied + nal_size + 4)) { + LOG_E ("The length of destination buffer is too small\n"); + return MIX_RESULT_FAIL; + } + + /*We use 4 bytes length prefix*/ + bufout [size_copied] = nal_size >> 24 & 0xff; + bufout [size_copied + 1] = nal_size >> 16 & 0xff; + bufout [size_copied + 2] = nal_size >> 8 & 0xff; + bufout [size_copied + 3] = nal_size & 0xff; + + size_copied += 4; //4 bytes length prefix + memcpy (bufout + size_copied, bufin + last_pos, nal_size); + size_copied += nal_size; + + LOG_I ("size_copied = %d\n", size_copied); + } + + else { + zero_byte_count = 0; + leading_zero_count = 0; + } + + } + + if (size_copied != *bufout_len) { + *bufout_len = size_copied; + } + + LOG_V ("End\n"); + + return MIX_RESULT_SUCCESS; + +} + diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h new file mode 100644 index 0000000..eeef2d9 --- /dev/null +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -0,0 +1,137 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMATENC_H264_H__ +#define __MIX_VIDEOFORMATENC_H264_H__ + +#include "mixvideoformatenc.h" +#include "mixvideoframe_private.h" + +#define MIX_VIDEO_ENC_H264_SURFACE_NUM 20 + +#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) +#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMATENC_H264 (mix_videoformatenc_h264_get_type ()) +#define MIX_VIDEOFORMATENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264)) +#define MIX_IS_VIDEOFORMATENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_H264)) +#define MIX_VIDEOFORMATENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264Class)) +#define MIX_IS_VIDEOFORMATENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_H264)) +#define MIX_VIDEOFORMATENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264Class)) + +typedef struct _MixVideoFormatEnc_H264 MixVideoFormatEnc_H264; +typedef struct _MixVideoFormatEnc_H264Class MixVideoFormatEnc_H264Class; + +struct _MixVideoFormatEnc_H264 { + /*< public > */ + MixVideoFormatEnc parent; + + VABufferID coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * ci_shared_surfaces; + VASurfaceID * surfaces; + guint surface_num; + + MixVideoFrame *cur_fame; //current input frame to be encoded; + MixVideoFrame *ref_fame; //reference frame + MixVideoFrame *rec_fame; //reconstructed frame; + + guint basic_unit_size; //for rate control + guint disable_deblocking_filter_idc; + MixDelimiterType delimiter_type; + guint slice_num; + guint va_rcmode; + + guint encoded_frames; + gboolean pic_skipped; + + gboolean is_intra; + + guint coded_buf_size; + + /*< public > */ +}; + +/** + * MixVideoFormatEnc_H264Class: + * + * MI-X Video object class + */ +struct _MixVideoFormatEnc_H264Class { + /*< public > */ + MixVideoFormatEncClass parent_class; + + /* class members */ + + /*< public > */ +}; + +/** + * mix_videoformatenc_h264_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformatenc_h264_get_type(void); + +/** + * mix_videoformatenc_h264_new: + * @returns: A newly allocated instance of #MixVideoFormatEnc_H264 + * + * Use this method to create new instance of #MixVideoFormatEnc_H264 + */ +MixVideoFormatEnc_H264 *mix_videoformatenc_h264_new(void); + +/** + * mix_videoformatenc_h264_ref: + * @mix: object to add reference + * @returns: the MixVideoFormatEnc_H264 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormatEnc_H264 *mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix); + +/** + * mix_videoformatenc_h264_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformatenc_h264_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* H.264 vmethods */ +MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg); +MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); + +/* Local Methods */ + +MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin, + MixIOVec * iovout); +MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( + guint8 * bufin, guint bufin_len, guint8* bufout, guint *bufout_len); + +#endif /* __MIX_VIDEOFORMATENC_H264_H__ */ diff --git a/mix_video/src/mixvideoformatenc_mpeg4.c b/mix_video/src/mixvideoformatenc_mpeg4.c new file mode 100644 index 0000000..e58976b --- /dev/null +++ b/mix_video/src/mixvideoformatenc_mpeg4.c @@ -0,0 +1,1713 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_mpeg4.h" +#include "mixvideoconfigparamsenc_mpeg4.h" + +#define MDEBUG +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_mpeg4_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_MPEG4, mix_videoformatenc_mpeg4, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /* TODO: public member initialization */ + + /* TODO: private member initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_fame = NULL; + self->ref_fame = NULL; + self->rec_fame = NULL; + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + + parent->initialized = FALSE; +} + +static void mix_videoformatenc_mpeg4_class_init( + MixVideoFormatEnc_MPEG4Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_mpeg4_finalize; + + /* setup vmethods with base implementation */ + /* TODO: decide if we need to override the parent's methods */ + video_formatenc_class->getcaps = mix_videofmtenc_mpeg4_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_mpeg4_initialize; + video_formatenc_class->encode = mix_videofmtenc_mpeg4_encode; + video_formatenc_class->flush = mix_videofmtenc_mpeg4_flush; + video_formatenc_class->eos = mix_videofmtenc_mpeg4_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_mpeg4_deinitialize; + video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_mpeg4_get_max_encoded_buf_size; +} + +MixVideoFormatEnc_MPEG4 * +mix_videoformatenc_mpeg4_new(void) { + MixVideoFormatEnc_MPEG4 *ret = + g_object_new(MIX_TYPE_VIDEOFORMATENC_MPEG4, NULL); + + return ret; +} + +void mix_videoformatenc_mpeg4_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_MPEG4 *mix = MIX_VIDEOFORMATENC_MPEG4(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_MPEG4 * +mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { + return (MixVideoFormatEnc_MPEG4 *) g_object_ref(G_OBJECT(mix)); +} + +/*MPEG-4:2 vmethods implementation */ +MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + /* TODO: add codes for MPEG-4:2 */ + + /* TODO: decide if we need to chainup parent method. + * if we do, the following is the code: + */ + + LOG_V( "mix_videofmtenc_mpeg4_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + + //TODO additional parameter checking + + /* Chainup parent method. */ +#if 1 + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + +#endif //disable it currently + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) { + config_params_enc_mpeg4 = + MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from MPEG-4:2 params\n"); + + /* get properties from MPEG4 params Object, which is special to MPEG4 format*/ + + ret = mix_videoconfigparamsenc_mpeg4_get_profile_level (config_params_enc_mpeg4, + &self->profile_and_level_indication); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti (config_params_enc_mpeg4, + &(self->fixed_vop_time_increment)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_dlk (config_params_enc_mpeg4, + &(self->disable_deblocking_filter_idc)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to config_params_enc_mpeg4\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + + LOG_V( + "======MPEG4 Encode Object properities======:\n"); + + LOG_I( "self->profile_and_level_indication = %d\n", + self->profile_and_level_indication); + LOG_I( "self->fixed_vop_time_increment = %d\n\n", + self->fixed_vop_time_increment); + + LOG_V( + "Get properities from params done\n"); + + + //display = XOpenDisplay(NULL); + //va_display = vaGetDisplay (videoencobj->display); + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver); + //g_print ("vaInitialize va_status = %d\n", va_status); + + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + + /*free profiles and entrypoints*/ + g_free(va_profiles); + g_free (va_entrypoints); + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + //TODO check vret and return fail if needed + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + //TODO cleanup and/or retry + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + guint max_size = 0; + ret = mix_videofmtenc_mpeg4_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + g_free (surfaces); + + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_FAIL; + + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + //TODO: params i + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + + //TODO: encode_params is reserved here for future usage. + + /* TODO: decide if we need to chainup parent method. + * * * if we do, the following is the code: + * */ + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_mpeg4_process_encode\n"); + + ret = mix_videofmtenc_mpeg4_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_mpeg4_process_encode\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + g_mutex_lock(mix->objectlock); + + /*unref the current source surface*/ + if (self->cur_fame != NULL) + { + mix_videoframe_unref (self->cur_fame); + self->cur_fame = NULL; + } + + /*unref the reconstructed surface*/ + if (self->rec_fame != NULL) + { + mix_videoframe_unref (self->rec_fame); + self->rec_fame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_fame != NULL) + { + mix_videoframe_unref (self->ref_fame); + self->ref_fame = NULL; + } + + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix) { + + /* TODO: add codes for MPEG-4:2 */ + + /* TODO: decide if we need to chainup parent method. + * if we do, the following is the code: + */ + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_fame != NULL) + { + mix_videoframe_unref (self->cur_fame); + self->cur_fame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_fame != NULL) + { + mix_videoframe_unref (self->rec_fame); + self->rec_fame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_fame != NULL) + { + mix_videoframe_unref (self->ref_fame); + self->ref_fame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + +#if 1 + if (parent_class->deinitialize) { + return parent_class->deinitialize(mix); + } +#endif + + //Most stuff is cleaned up in parent_class->finalize() + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_send_seq_params (MixVideoFormatEnc_MPEG4 *mix) +{ + + VAStatus va_status; + VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param; + VABufferID seq_para_buf_id; + + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Begin\n\n"); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + mpeg4_seq_param.profile_and_level_indication = mix->profile_and_level_indication; //TODO, hard code now + mpeg4_seq_param.video_object_layer_width= parent->picture_width; + mpeg4_seq_param.video_object_layer_height= parent->picture_height; + mpeg4_seq_param.vop_time_increment_resolution = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + mpeg4_seq_param.fixed_vop_time_increment= mix->fixed_vop_time_increment; + mpeg4_seq_param.bits_per_second= parent->bitrate; + mpeg4_seq_param.frame_rate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + mpeg4_seq_param.initial_qp = parent->initial_qp; + mpeg4_seq_param.min_qp = parent->min_qp; + mpeg4_seq_param.intra_period = parent->intra_period; + + + //mpeg4_seq_param.fixed_vop_rate = 30; + + + + LOG_V( + "===mpeg4 sequence params===\n"); + + LOG_I( "profile_and_level_indication = %d\n", + (guint)mpeg4_seq_param.profile_and_level_indication); + LOG_I( "intra_period = %d\n", + mpeg4_seq_param.intra_period); + LOG_I( "video_object_layer_width = %d\n", + mpeg4_seq_param.video_object_layer_width); + LOG_I( "video_object_layer_height = %d\n", + mpeg4_seq_param.video_object_layer_height); + LOG_I( "vop_time_increment_resolution = %d\n", + mpeg4_seq_param.vop_time_increment_resolution); + LOG_I( "fixed_vop_rate = %d\n", + mpeg4_seq_param.fixed_vop_rate); + LOG_I( "fixed_vop_time_increment = %d\n", + mpeg4_seq_param.fixed_vop_time_increment); + LOG_I( "bitrate = %d\n", + mpeg4_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + mpeg4_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + mpeg4_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + mpeg4_seq_param.min_qp); + LOG_I( "intra_period = %d\n\n", + mpeg4_seq_param.intra_period); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(mpeg4_seq_param), + 1, &mpeg4_seq_param, + &seq_para_buf_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &seq_para_buf_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; + + +} + +MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 *mix) +{ + VAStatus va_status; + VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Begin\n\n"); + +#if 0 //not needed currently + MixVideoConfigParamsEncMPEG4 * params_mpeg4 + = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); +#endif + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) { + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + mpeg4_pic_param.reference_picture = mix->ref_fame->frame_id; + mpeg4_pic_param.reconstructed_picture = mix->rec_fame->frame_id; + mpeg4_pic_param.coded_buf = mix->coded_buf; + mpeg4_pic_param.picture_width = parent->picture_width; + mpeg4_pic_param.picture_height = parent->picture_height; + mpeg4_pic_param.vop_time_increment= mix->encoded_frames; + mpeg4_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + + + LOG_V( + "======mpeg4 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + mpeg4_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + mpeg4_pic_param.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", + mpeg4_pic_param.coded_buf); + LOG_I( "picture_width = %d\n", + mpeg4_pic_param.picture_width); + LOG_I( "picture_height = %d\n", + mpeg4_pic_param.picture_height); + LOG_I( "vop_time_increment = %d\n", + mpeg4_pic_param.vop_time_increment); + LOG_I( "picture_type = %d\n\n", + mpeg4_pic_param.picture_type); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(mpeg4_pic_param), + 1,&mpeg4_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT mix_videofmtenc_mpeg4_send_slice_parameter (MixVideoFormatEnc_MPEG4 *mix) +{ + VAStatus va_status; + + guint slice_height; + guint slice_index; + guint slice_height_in_mb; + + if (mix == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + slice_height = parent->picture_height; + + slice_height += 15; + slice_height &= (~15); + + VAEncSliceParameterBuffer slice_param; + slice_index = 0; + slice_height_in_mb = slice_height / 16; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + LOG_V( + "======mpeg4 slice params======\n"); + + LOG_I( "start_row_number = %d\n", + (gint) slice_param.start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (gint) slice_param.slice_height); + LOG_I( "slice.is_intra = %d\n", + (gint) slice_param.slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + 1, &slice_param, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + MixVideoFrame * tmp_fame; + guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->rec_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + if (parent->need_display) { + mix->cur_fame = NULL; + } + + if (mix->cur_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + return MIX_RESULT_FAIL; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + + vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_fame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_fame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->rec_fame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_fame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + if (parent->need_display) { + mix->cur_fame = NULL; + } + + if (mix->cur_fame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_fame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "mix_videofmtenc_mpeg4_send_seq_params\n"); + + if (mix->encoded_frames == 0) { + mix_videofmtenc_mpeg4_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_mpeg4_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_mpeg4_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_mpeg4_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "before vaEndPicture\n"); + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + + iovout->data = g_malloc (iovout->data_size); + if (iovout->data == NULL) { + return MIX_RESULT_NO_MEMORY; + } + } + + memcpy (iovout->data, buf + 16, iovout->data_size); + + iovout->buffer_size = iovout->data_size; + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "get encoded data done\n"); + +#if 0 + if (parent->drawable) { + va_status = vaPutSurface(va_display, surface, (Drawable)parent->drawable, + 0,0, width, height, + 0,0, width, height, + NULL,0,0); + } + +#ifdef SHOW_SRC + else { + + va_status = vaPutSurface(va_display, surface, win, + 0,0, width, height, + 0,0, width, height, + NULL,0,0); + } +#endif //SHOW_SRC +#endif + + VASurfaceStatus status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + return MIX_RESULT_FAIL; + } + mix->pic_skipped = status & VASurfaceSkipped; + + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_fame); + + if (parent->need_display) { + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_framemanager_enqueue\n"); + return MIX_RESULT_FAIL; + } + } + + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_fame = mix->rec_fame; + mix->rec_fame= mix->ref_fame; + mix->ref_fame = tmp_fame; + } + + +#if 0 + if (mix->ref_fame != NULL) + mix_videoframe_unref (mix->ref_fame); + mix->ref_fame = mix->rec_fame; + + mix_videoframe_unref (mix->cur_fame); +#endif + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_fame); + mix->cur_fame = NULL; + } + + mix->encoded_frames ++; + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size ( + MixVideoFormatEnc *mix, guint * max_size) +{ + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + { + LOG_E( + "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + parent = MIX_VIDEOFORMATENC(mix); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(self)) { + + if (self->coded_buf_size > 0) { + *max_size = self->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (self->va_rcmode == VA_RC_NONE) { + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 400) / (16 * 16); + // set to value according to QP + } + else { + self->coded_buf_size = parent->bitrate/ 4; + } + + self->coded_buf_size = + max (self->coded_buf_size , + (parent->picture_width* parent->picture_height * 400) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + self->coded_buf_size = + max(self->coded_buf_size, + (parent->picture_width * parent->picture_height * 1.5 * 8)); + self->coded_buf_size = (self->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_FAIL; + } + + *max_size = self->coded_buf_size; + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h new file mode 100644 index 0000000..dc26efe --- /dev/null +++ b/mix_video/src/mixvideoformatenc_mpeg4.h @@ -0,0 +1,137 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMATENC_MPEG4_H__ +#define __MIX_VIDEOFORMATENC_MPEG4_H__ + +#include "mixvideoformatenc.h" +#include "mixvideoframe_private.h" + +#define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20 + +#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) +#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMATENC_MPEG4 (mix_videoformatenc_mpeg4_get_type ()) +#define MIX_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4)) +#define MIX_IS_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4)) +#define MIX_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) +#define MIX_IS_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4)) +#define MIX_VIDEOFORMATENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) + +typedef struct _MixVideoFormatEnc_MPEG4 MixVideoFormatEnc_MPEG4; +typedef struct _MixVideoFormatEnc_MPEG4Class MixVideoFormatEnc_MPEG4Class; + +struct _MixVideoFormatEnc_MPEG4 { + /*< public > */ + MixVideoFormatEnc parent; + + + VABufferID coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * ci_shared_surfaces; + VASurfaceID * surfaces; + guint surface_num; + + MixVideoFrame *cur_fame; //current input frame to be encoded; + MixVideoFrame *ref_fame; //reference frame + MixVideoFrame *rec_fame; //reconstructed frame; + + guchar profile_and_level_indication; + guint fixed_vop_time_increment; + guint disable_deblocking_filter_idc; + + guint va_rcmode; + + guint encoded_frames; + gboolean pic_skipped; + + gboolean is_intra; + + guint coded_buf_size; + + /*< public > */ +}; + +/** + * MixVideoFormatEnc_MPEG4Class: + * + * MI-X Video object class + */ +struct _MixVideoFormatEnc_MPEG4Class { + /*< public > */ + MixVideoFormatEncClass parent_class; + + /* class members */ + + /*< public > */ +}; + +/** + * mix_videoformatenc_mpeg4_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformatenc_mpeg4_get_type(void); + +/** + * mix_videoformatenc_mpeg4_new: + * @returns: A newly allocated instance of #MixVideoFormatEnc_MPEG4 + * + * Use this method to create new instance of #MixVideoFormatEnc_MPEG4 + */ +MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_new(void); + +/** + * mix_videoformatenc_mpeg4_ref: + * @mix: object to add reference + * @returns: the MixVideoFormatEnc_MPEG4 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix); + +/** + * mix_videoformatenc_mpeg4_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformatenc_mpeg4_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* MPEG-4:2 vmethods */ +MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg); +MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); + +/* Local Methods */ + +MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, MixBuffer * bufin, + MixIOVec * iovout); + +#endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */ + diff --git a/mix_video/src/mixvideoformatenc_preview.c b/mix_video/src/mixvideoformatenc_preview.c new file mode 100644 index 0000000..17b9a4b --- /dev/null +++ b/mix_video/src/mixvideoformatenc_preview.c @@ -0,0 +1,1187 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_preview.h" +#include "mixvideoconfigparamsenc_preview.h" + +#define MDEBUG +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_preview_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_Preview, mix_videoformatenc_preview, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_preview_init(MixVideoFormatEnc_Preview * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /* TODO: public member initialization */ + + /* TODO: private member initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_fame = NULL; + self->ref_fame = NULL; + self->rec_fame = NULL; + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + + parent->initialized = FALSE; +} + +static void mix_videoformatenc_preview_class_init( + MixVideoFormatEnc_PreviewClass * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_preview_finalize; + + /* setup vmethods with base implementation */ + /* TODO: decide if we need to override the parent's methods */ + video_formatenc_class->getcaps = mix_videofmtenc_preview_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_preview_initialize; + video_formatenc_class->encode = mix_videofmtenc_preview_encode; + video_formatenc_class->flush = mix_videofmtenc_preview_flush; + video_formatenc_class->eos = mix_videofmtenc_preview_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_preview_deinitialize; +} + +MixVideoFormatEnc_Preview * +mix_videoformatenc_preview_new(void) { + MixVideoFormatEnc_Preview *ret = + g_object_new(MIX_TYPE_VIDEOFORMATENC_PREVIEW, NULL); + + return ret; +} + +void mix_videoformatenc_preview_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_Preview *mix = MIX_VIDEOFORMATENC_PREVIEW(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_Preview * +mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) { + return (MixVideoFormatEnc_Preview *) g_object_ref(G_OBJECT(mix)); +} + +/*Preview vmethods implementation */ +MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + /* TODO: add codes for Preview format */ + + /* TODO: decide if we need to chainup parent method. + * if we do, the following is the code: + */ + + LOG_V( "mix_videofmtenc_preview_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncPreview * config_params_enc_preview; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + + //TODO additional parameter checking + + /* Chainup parent method. */ +#if 1 + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + +#endif //disable it currently + + if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + { + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc)) { + config_params_enc_preview = + MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_preview_initialize: no preview config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + + LOG_V( + "Get properities from params done\n"); + + + //display = XOpenDisplay(NULL); + //va_display = vaGetDisplay (videoencobj->display); + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver); + //g_print ("vaInitialize va_status = %d\n", va_status); + + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + g_free(va_profiles); + g_free (va_entrypoints); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + + /*free profiles and entrypoints*/ + g_free(va_profiles); + g_free (va_entrypoints); + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; //Todo, add error handling here + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + //TODO check vret and return fail if needed + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_NO_MEMORY; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + //TODO cleanup and/or retry + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + 0, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + self->coded_buf_size = 4; + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + g_free (surfaces); + + } + else + { + LOG_E( + "not Preview video encode Object\n"); + return MIX_RESULT_FAIL; + + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + //TODO: params i + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + + //TODO: encode_params is reserved here for future usage. + + /* TODO: decide if we need to chainup parent method. + * * * if we do, the following is the code: + * */ + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + { + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_preview_process_encode\n"); + + ret = mix_videofmtenc_preview_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_preview_process_encode\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + } + else + { + LOG_E( + "not Preview video encode Object\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); + + g_mutex_lock(mix->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_fame != NULL) + { + mix_videoframe_unref (self->cur_fame); + self->cur_fame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_fame != NULL) + { + mix_videoframe_unref (self->rec_fame); + self->rec_fame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_fame != NULL) + { + mix_videoframe_unref (self->ref_fame); + self->ref_fame = NULL; + } + + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix) { + + /* TODO: add codes for preview */ + + /* TODO: decide if we need to chainup parent method. + * if we do, the following is the code: + */ + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_fame != NULL) + { + mix_videoframe_unref (self->cur_fame); + self->cur_fame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_fame != NULL) + { + mix_videoframe_unref (self->rec_fame); + self->rec_fame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_fame != NULL) + { + mix_videoframe_unref (self->ref_fame); + self->ref_fame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + +#if 1 + if (parent_class->deinitialize) { + return parent_class->deinitialize(mix); + } +#endif + + //Most stuff is cleaned up in parent_class->finalize() + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + //MixVideoFrame * tmp_fame; + //guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + { + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->rec_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + if (parent->need_display) { + mix->cur_fame = NULL; + } + + if (mix->cur_fame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + return MIX_RESULT_FAIL; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + return MIX_RESULT_FAIL; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + + vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_fame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_fame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->rec_fame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_fame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + //mix_videoframe_unref (mix->cur_fame); + + if (parent->need_display) { + mix->cur_fame = NULL; + } + + if (mix->cur_fame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_fame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + iovout->data_size = 4; + iovout->data = g_malloc (iovout->data_size); + if (iovout->data == NULL) { + return MIX_RESULT_NO_MEMORY; + } + + memset (iovout->data, 0, iovout->data_size); + + iovout->buffer_size = iovout->data_size; + + + if (parent->need_display) { + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_framemanager_enqueue\n"); + return MIX_RESULT_FAIL; + } + } + + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_fame); + mix->cur_fame = NULL; + } + + mix->encoded_frames ++; + } + else + { + LOG_E( + "not Preview video encode Object\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoformatenc_preview.h b/mix_video/src/mixvideoformatenc_preview.h new file mode 100644 index 0000000..dd404e2 --- /dev/null +++ b/mix_video/src/mixvideoformatenc_preview.h @@ -0,0 +1,133 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMATENC_PREVIEW_H__ +#define __MIX_VIDEOFORMATENC_PREVIEW_H__ + +#include "mixvideoformatenc.h" +#include "mixvideoframe_private.h" + +#define MIX_VIDEO_ENC_PREVIEW_SURFACE_NUM 20 + +#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) +#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMATENC_PREVIEW (mix_videoformatenc_preview_get_type ()) +#define MIX_VIDEOFORMATENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_Preview)) +#define MIX_IS_VIDEOFORMATENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW)) +#define MIX_VIDEOFORMATENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_PreviewClass)) +#define MIX_IS_VIDEOFORMATENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_PREVIEW)) +#define MIX_VIDEOFORMATENC_PREVIEW_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_PreviewClass)) + +typedef struct _MixVideoFormatEnc_Preview MixVideoFormatEnc_Preview; +typedef struct _MixVideoFormatEnc_PreviewClass MixVideoFormatEnc_PreviewClass; + +struct _MixVideoFormatEnc_Preview { + /*< public > */ + MixVideoFormatEnc parent; + + VABufferID coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * ci_shared_surfaces; + VASurfaceID * surfaces; + guint surface_num; + + MixVideoFrame *cur_fame; //current input frame to be encoded; + MixVideoFrame *ref_fame; //reference frame + MixVideoFrame *rec_fame; //reconstructed frame; + + guint basic_unit_size; //for rate control + guint disable_deblocking_filter_idc; + guint slice_num; + guint va_rcmode; + + + guint encoded_frames; + gboolean pic_skipped; + + gboolean is_intra; + + guint coded_buf_size; + + /*< public > */ +}; + +/** + * MixVideoFormatEnc_PreviewClass: + * + * MI-X Video object class + */ +struct _MixVideoFormatEnc_PreviewClass { + /*< public > */ + MixVideoFormatEncClass parent_class; + + /* class members */ + + /*< public > */ +}; + +/** + * mix_videoformatenc_preview_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformatenc_preview_get_type(void); + +/** + * mix_videoformatenc_preview_new: + * @returns: A newly allocated instance of #MixVideoFormatEnc_Preview + * + * Use this method to create new instance of #MixVideoFormatEnc_Preview + */ +MixVideoFormatEnc_Preview *mix_videoformatenc_preview_new(void); + +/** + * mix_videoformatenc_preview_ref: + * @mix: object to add reference + * @returns: the MixVideoFormatEnc_Preview instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormatEnc_Preview *mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix); + +/** + * mix_videoformatenc_preview_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformatenc_preview_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* Pure preview vmethods */ +MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg); +MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix); + +/* Local Methods */ +MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, MixBuffer * bufin, + MixIOVec * iovout); + +#endif /* __MIX_VIDEOFORMATENC_PREVIEW_H__ */ diff --git a/mix_video/src/mixvideoformatqueue.h b/mix_video/src/mixvideoformatqueue.h new file mode 100644 index 0000000..5594aba --- /dev/null +++ b/mix_video/src/mixvideoformatqueue.h @@ -0,0 +1,24 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOFORMATQUEUE_H__ +#define __MIX_VIDEOFORMATQUEUE_H__ + +#include "mixbuffer.h" + +typedef struct _MixInputBufferEntry MixInputBufferEntry; + +struct _MixInputBufferEntry +{ + /*< private > */ + MixBuffer *buf; + guint64 timestamp; + +}; + +#endif /* __MIX_VIDEOFORMATQUEUE_H__ */ diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c new file mode 100644 index 0000000..2bea5d0 --- /dev/null +++ b/mix_video/src/mixvideoframe.c @@ -0,0 +1,391 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoframe + * @short_description: VideoConfig parameters + * + * A data object which stores videoconfig specific parameters. + */ + + +#include +#include +#include "mixvideolog.h" +#include "mixvideoframe.h" +#include "mixvideoframe_private.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_videoframe_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videoframe_type = g_define_type_id; } + +gboolean mix_videoframe_copy(MixParams * target, const MixParams * src); +MixParams *mix_videoframe_dup(const MixParams * obj); +gboolean mix_videoframe_equal(MixParams * first, MixParams * second); +static void mix_videoframe_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoFrame, mix_videoframe, MIX_TYPE_PARAMS, + _do_init); + +#define VIDEOFRAME_PRIVATE(self) ((MixVideoFramePrivate *)((self)->reserved1)) +static void mix_videoframe_init(MixVideoFrame * self) { + /* initialize properties here */ + self->frame_id = VA_INVALID_SURFACE; + self->timestamp = 0; + self->discontinuity = FALSE; + self->frame_structure = VA_FRAME_PICTURE; + + MixVideoFramePrivate *priv = MIX_VIDEOFRAME_GET_PRIVATE(self); + self->reserved1 = priv; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; + + /* set pool pointer in private structure to NULL */ + priv -> pool = NULL; + + /* set stuff for skipped frames */ + priv -> is_skipped = FALSE; + priv -> real_frame = NULL; + + g_static_rec_mutex_init (&priv -> lock); + +} + +static void mix_videoframe_class_init(MixVideoFrameClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videoframe_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videoframe_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videoframe_dup; + mixparams_class->equal = (MixParamsEqualFunction) mix_videoframe_equal; + + /* Register and allocate the space the private structure for this object */ + g_type_class_add_private(mixparams_class, sizeof(MixVideoFramePrivate)); + +} + +MixVideoFrame * +mix_videoframe_new(void) { + MixVideoFrame *ret = (MixVideoFrame *) g_type_create_instance( + MIX_TYPE_VIDEOFRAME); + return ret; +} + +void mix_videoframe_finalize(MixParams * obj) { + /* clean up here. */ + MixVideoFrame *self = MIX_VIDEOFRAME (obj); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(self); + + g_static_rec_mutex_free (&priv->lock); + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoFrame * +mix_videoframe_ref(MixVideoFrame * obj) { + + MixVideoFrame *ret = NULL; + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + g_static_rec_mutex_lock(&priv->lock); + LOG_I("obj %x, new refcount is %d\n", (guint) obj, + MIX_PARAMS(obj)->refcount + 1); + + ret = (MixVideoFrame *) mix_params_ref(MIX_PARAMS(obj)); + g_static_rec_mutex_unlock (&priv->lock); + return ret; +} + +void mix_videoframe_unref(MixVideoFrame * obj) { + + if(obj == NULL) { + LOG_E("obj is NULL\n"); + return; + } + + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + g_static_rec_mutex_lock(&priv->lock); + + LOG_I("obj %x, frame id %d, new refcount is %d\n", (guint) obj, + (guint) obj->frame_id, MIX_PARAMS(obj)->refcount - 1); + + // Check if we have reduced to 1, in which case we add ourselves to free pool + // but only do this for real frames, not skipped frames + if (((MIX_PARAMS(obj)->refcount - 1) == 1) && (!(priv -> is_skipped))) { + + LOG_I("Adding obj %x, frame id %d back to pool\n", (guint) obj, + (guint) obj->frame_id); + + MixSurfacePool *pool = NULL; + pool = priv -> pool; + if(pool == NULL) { + LOG_E("pool is NULL\n"); + g_static_rec_mutex_unlock (&priv->lock); + return; + } + mix_surfacepool_put(pool, obj); + } + + //If this is a skipped frame that is being deleted, release the real frame + if (((MIX_PARAMS(obj)->refcount - 1) == 0) && (priv -> is_skipped)) { + + LOG_I("skipped frame obj %x, releasing real frame %x \n", + (guint) obj, (guint) priv->real_frame); + + mix_videoframe_unref(priv -> real_frame); + } + + // Unref through base class + mix_params_unref(MIX_PARAMS(obj)); + g_static_rec_mutex_unlock (&priv->lock); +} + +/** + * mix_videoframe_dup: + * @obj: a #MixVideoFrame object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoframe_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEOFRAME(obj)) { + MixVideoFrame *duplicate = mix_videoframe_new(); + if (mix_videoframe_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoframe_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videoframe_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoframe_copy(MixParams * target, const MixParams * src) { + MixVideoFrame *this_target, *this_src; + + if (MIX_IS_VIDEOFRAME(target) && MIX_IS_VIDEOFRAME(src)) { + // Cast the base object to this child object + this_target = MIX_VIDEOFRAME(target); + this_src = MIX_VIDEOFRAME(src); + + // Free the existing properties + + // Duplicate string + this_target->frame_id = this_src->frame_id; + this_target->timestamp = this_src->timestamp; + this_target->discontinuity = this_src->discontinuity; + this_target->frame_structure = this_src->frame_structure; + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_videoframe_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoframe_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoFrame *this_first, *this_second; + + if (MIX_IS_VIDEOFRAME(first) && MIX_IS_VIDEOFRAME(second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEOFRAME(first); + this_second = MIX_VIDEOFRAME(second); + + /* TODO: add comparison for other properties */ + if (this_first->frame_id == this_second->frame_id + && this_first->timestamp == this_second->timestamp + && this_first->discontinuity == this_second->discontinuity + && this_first->frame_structure == this_second->frame_structure) { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = klass->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_VIDEOFRAME_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ + + +/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ +MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, gulong frame_id) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->frame_id = frame_id; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, gulong * frame_id) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_id); + *frame_id = obj->frame_id; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_ci_frame_idx (MixVideoFrame * obj, guint ci_frame_idx) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->ci_frame_idx = ci_frame_idx; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_ci_frame_idx (MixVideoFrame * obj, guint * ci_frame_idx) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, ci_frame_idx); + *ci_frame_idx = obj->ci_frame_idx; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, guint64 timestamp) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + + obj->timestamp = timestamp; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, + guint64 * timestamp) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, + gboolean discontinuity) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, + gboolean * discontinuity) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, + guint32 frame_structure) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->frame_structure = frame_structure; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, + guint32* frame_structure) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure); + *frame_structure = obj->frame_structure; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_pool(MixVideoFrame * obj, MixSurfacePool * pool) { + + /* set pool pointer in private structure */ + VIDEOFRAME_PRIVATE(obj) -> pool = pool; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_frame_type(MixVideoFrame *obj, + MixFrameType frame_type) { + + VIDEOFRAME_PRIVATE(obj) -> frame_type = frame_type; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_frame_type(MixVideoFrame *obj, + MixFrameType *frame_type) { + + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, frame_type); + + *frame_type = VIDEOFRAME_PRIVATE(obj) -> frame_type; + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoframe_set_is_skipped(MixVideoFrame *obj, + gboolean is_skipped) { + + VIDEOFRAME_PRIVATE(obj) -> is_skipped = is_skipped; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_is_skipped(MixVideoFrame *obj, + gboolean *is_skipped) { + + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, is_skipped); + + *is_skipped = VIDEOFRAME_PRIVATE(obj) -> is_skipped; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_real_frame(MixVideoFrame *obj, + MixVideoFrame *real) { + + VIDEOFRAME_PRIVATE(obj) -> real_frame = real; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_real_frame(MixVideoFrame *obj, + MixVideoFrame **real) { + + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, real); + + *real = VIDEOFRAME_PRIVATE(obj) -> real_frame; + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h new file mode 100644 index 0000000..02338dd --- /dev/null +++ b/mix_video/src/mixvideoframe.h @@ -0,0 +1,144 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFRAME_H__ +#define __MIX_VIDEOFRAME_H__ + +#include +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEOFRAME: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEOFRAME (mix_videoframe_get_type ()) + +/** + * MIX_VIDEOFRAME: + * @obj: object to be type-casted. + */ +#define MIX_VIDEOFRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFrame)) + +/** + * MIX_IS_VIDEOFRAME: + * @obj: an object. + * + * Checks if the given object is an instance of #MixVideoFrame + */ +#define MIX_IS_VIDEOFRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFRAME)) + +/** + * MIX_VIDEOFRAME_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEOFRAME_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFRAME, MixVideoFrameClass)) + +/** + * MIX_IS_VIDEOFRAME_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixVideoFrameClass + */ +#define MIX_IS_VIDEOFRAME_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFRAME)) + +/** + * MIX_VIDEOFRAME_GET_CLASS: + * @obj: a #MixVideoFrame object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEOFRAME_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFrameClass)) + +typedef struct _MixVideoFrame MixVideoFrame; +typedef struct _MixVideoFrameClass MixVideoFrameClass; + +/** + * MixVideoFrame: + * + * MI-X VideoConfig Parameter object + */ +struct _MixVideoFrame { + /*< public > */ + MixParams parent; + + /*< public > */ + gulong frame_id; + guint ci_frame_idx; + guint64 timestamp; + gboolean discontinuity; + guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoFrameClass: + * + * MI-X VideoConfig object class + */ +struct _MixVideoFrameClass { + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_videoframe_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoframe_get_type(void); + +/** + * mix_videoframe_new: + * @returns: A newly allocated instance of #MixVideoFrame + * + * Use this method to create new instance of #MixVideoFrame + */ +MixVideoFrame *mix_videoframe_new(void); +/** + * mix_videoframe_ref: + * @mix: object to add reference + * @returns: the MixVideoFrame instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFrame *mix_videoframe_ref(MixVideoFrame * obj); + +/** + * mix_videoframe_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +void mix_videoframe_unref(MixVideoFrame * obj); + +/* Class Methods */ + +MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, gulong frame_id); +MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, gulong * frame_id); + +MIX_RESULT mix_videoframe_set_ci_frame_idx(MixVideoFrame * obj, guint ci_frame_idx); +MIX_RESULT mix_videoframe_get_ci_frame_idx(MixVideoFrame * obj, guint * ci_frame_idx); + +MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, guint64 timestamp); +MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, guint64 * timestamp); + +MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, gboolean discontinuity); +MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, gboolean * discontinuity); + +MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); +MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); + +#endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h new file mode 100644 index 0000000..5d4b894 --- /dev/null +++ b/mix_video/src/mixvideoframe_private.h @@ -0,0 +1,68 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOFRAME_PRIVATE_H__ +#define __MIX_VIDEOFRAME_PRIVATE_H__ + +#include "mixvideoframe.h" +#include "mixsurfacepool.h" + +typedef enum _MixFrameType +{ + TYPE_I, + TYPE_P, + TYPE_B, + TYPE_INVALID +} MixFrameType; + +typedef struct _MixVideoFramePrivate MixVideoFramePrivate; + +struct _MixVideoFramePrivate +{ + /*< private > */ + MixSurfacePool *pool; + MixFrameType frame_type; + gboolean is_skipped; + MixVideoFrame *real_frame; + GStaticRecMutex lock; +}; + +/** +* MIX_VIDEOFRAME_PRIVATE: +* +* Get private structure of this class. +* @obj: class object for which to get private data. +*/ +#define MIX_VIDEOFRAME_GET_PRIVATE(obj) \ + (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFramePrivate)) + + +/* Private functions */ +MIX_RESULT +mix_videoframe_set_pool (MixVideoFrame *obj, MixSurfacePool *pool); + +MIX_RESULT +mix_videoframe_set_frame_type (MixVideoFrame *obj, MixFrameType frame_type); + +MIX_RESULT +mix_videoframe_get_frame_type (MixVideoFrame *obj, MixFrameType *frame_type); + +MIX_RESULT +mix_videoframe_set_is_skipped (MixVideoFrame *obj, gboolean is_skipped); + +MIX_RESULT +mix_videoframe_get_is_skipped (MixVideoFrame *obj, gboolean *is_skipped); + +MIX_RESULT +mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real); + +MIX_RESULT +mix_videoframe_get_real_frame (MixVideoFrame *obj, MixVideoFrame **real); + + +#endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideoinitparams.c b/mix_video/src/mixvideoinitparams.c new file mode 100644 index 0000000..ac58548 --- /dev/null +++ b/mix_video/src/mixvideoinitparams.c @@ -0,0 +1,219 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoinitparams + * @short_description: VideoInit parameters + * + * A data object which stores videoinit specific parameters. + */ + +#include "mixvideoinitparams.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_videoinitparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videoinitparams_type = g_define_type_id; } + +gboolean mix_videoinitparams_copy(MixParams * target, const MixParams * src); +MixParams *mix_videoinitparams_dup(const MixParams * obj); +gboolean mix_videoinitparams_equal(MixParams * first, MixParams * second); +static void mix_videoinitparams_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoInitParams, mix_videoinitparams, + MIX_TYPE_PARAMS, _do_init); + +static void mix_videoinitparams_init(MixVideoInitParams * self) { + + /* Initialize member varibles */ + self->display = NULL; + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videoinitparams_class_init(MixVideoInitParamsClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videoinitparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videoinitparams_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videoinitparams_dup; + mixparams_class->equal = (MixParamsEqualFunction) mix_videoinitparams_equal; +} + +MixVideoInitParams * +mix_videoinitparams_new(void) { + MixVideoInitParams *ret = (MixVideoInitParams *) g_type_create_instance( + MIX_TYPE_VIDEOINITPARAMS); + + return ret; +} + +void mix_videoinitparams_finalize(MixParams * obj) { + /* clean up here. */ + + MixVideoInitParams *self = MIX_VIDEOINITPARAMS(obj); + + /* unref display */ + if (self->display) { + mix_display_unref(self->display); + self->display = NULL; + } + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoInitParams * +mix_videoinitparams_ref(MixVideoInitParams * mix) { + return (MixVideoInitParams *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoinitparams_dup: + * @obj: a #MixVideoInitParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoinitparams_dup(const MixParams * obj) { + MixParams *ret = NULL; + if (MIX_IS_VIDEOINITPARAMS(obj)) { + MixVideoInitParams *duplicate = mix_videoinitparams_new(); + if (mix_videoinitparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoinitparams_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videoinitparams_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoinitparams_copy(MixParams * target, const MixParams * src) { + MixVideoInitParams *this_target, *this_src; + if (MIX_IS_VIDEOINITPARAMS(target) && MIX_IS_VIDEOINITPARAMS(src)) { + /* Cast the base object to this child object */ + this_target = MIX_VIDEOINITPARAMS(target); + this_src = MIX_VIDEOINITPARAMS(src); + /* Copy properties from source to target. */ + + /* duplicate display */ + + this_target->display = mix_display_dup(this_src->display); + + /* Now chainup base class */ + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_videoinitparams_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoinitparams_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoInitParams *this_first, *this_second; + this_first = MIX_VIDEOINITPARAMS(first); + this_second = MIX_VIDEOINITPARAMS(second); + if (MIX_IS_VIDEOINITPARAMS(first) && MIX_IS_VIDEOINITPARAMS(second)) { + // Compare member variables + if (!this_first->display && !this_second->display) { + ret = TRUE; + } else if (this_first->display && this_second->display) { + + /* compare MixDisplay */ + ret = mix_display_equal(this_first->display, this_second->display); + } + + if (ret == FALSE) { + return FALSE; + } + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + return ret; +} + +#define MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ + +MIX_RESULT mix_videoinitparams_set_display(MixVideoInitParams * obj, + MixDisplay * display) { + MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT (obj); + + if(obj->display) { + mix_display_unref(obj->display); + } + obj->display = NULL; + + if(display) { + /* obj->display = mix_display_dup(display); + if(!obj->display) { + return MIX_RESULT_NO_MEMORY; + }*/ + + obj->display = mix_display_ref(display); + } + + return MIX_RESULT_SUCCESS; +} + +/* + Caller is responsible to use g_free to free the memory + */ +MIX_RESULT mix_videoinitparams_get_display(MixVideoInitParams * obj, + MixDisplay ** display) { + MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT (obj, display); + + *display = NULL; + if(obj->display) { + /* *display = mix_display_dup(obj->display); + if(!*display) { + return MIX_RESULT_NO_MEMORY; + }*/ + *display = mix_display_ref(obj->display); + } + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h new file mode 100644 index 0000000..eb7c118 --- /dev/null +++ b/mix_video/src/mixvideoinitparams.h @@ -0,0 +1,138 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOINITPARAMS_H__ +#define __MIX_VIDEOINITPARAMS_H__ + +#include +#include "mixdisplay.h" +#include "mixvideodef.h" + +/** + * MIX_TYPE_VIDEOINITPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEOINITPARAMS (mix_videoinitparams_get_type ()) + +/** + * MIX_VIDEOINITPARAMS: + * @obj: object to be type-casted. + */ +#define MIX_VIDEOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParams)) + +/** + * MIX_IS_VIDEOINITPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_VIDEOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOINITPARAMS)) + +/** + * MIX_VIDEOINITPARAMS_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParamsClass)) + +/** + * MIX_IS_VIDEOINITPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_VIDEOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOINITPARAMS)) + +/** + * MIX_VIDEOINITPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEOINITPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParamsClass)) + +typedef struct _MixVideoInitParams MixVideoInitParams; +typedef struct _MixVideoInitParamsClass MixVideoInitParamsClass; + +/** + * MixVideoInitParams: + * + * MI-X VideoInit Parameter object + */ +struct _MixVideoInitParams +{ + /*< public > */ + MixParams parent; + + /*< public > */ + + MixDisplay *display; + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; +}; + +/** + * MixVideoInitParamsClass: + * + * MI-X VideoInit object class + */ +struct _MixVideoInitParamsClass +{ + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_videoinitparams_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoinitparams_get_type (void); + +/** + * mix_videoinitparams_new: + * @returns: A newly allocated instance of #MixVideoInitParams + * + * Use this method to create new instance of #MixVideoInitParams + */ +MixVideoInitParams *mix_videoinitparams_new (void); +/** + * mix_videoinitparams_ref: + * @mix: object to add reference + * @returns: the MixVideoInitParams instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix); + +/** + * mix_videoinitparams_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoinitparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* + TO DO: Add documents +*/ + +MIX_RESULT mix_videoinitparams_set_display (MixVideoInitParams * obj, + MixDisplay * display); + +MIX_RESULT mix_videoinitparams_get_display (MixVideoInitParams * obj, + MixDisplay ** dislay); + +#endif /* __MIX_VIDEOINITPARAMS_H__ */ diff --git a/mix_video/src/mixvideolog.h b/mix_video/src/mixvideolog.h new file mode 100644 index 0000000..89a8827 --- /dev/null +++ b/mix_video/src/mixvideolog.h @@ -0,0 +1,25 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEO_LOG_H__ +#define __MIX_VIDEO_LOG_H__ +#include + +#ifdef MIX_LOG_ENABLE +#define LOG_V(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) +#define LOG_I(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_INFO, format, ##__VA_ARGS__) +#define LOG_W(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_WARNING, format, ##__VA_ARGS__) +#define LOG_E(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, format, ##__VA_ARGS__) +#else +#define LOG_V(format, ...) +#define LOG_I(format, ...) +#define LOG_W(format, ...) +#define LOG_E(format, ...) +#endif + +#endif /* __MIX_VIDEO_LOG_H__ */ diff --git a/mix_video/src/mixvideorenderparams.c b/mix_video/src/mixvideorenderparams.c new file mode 100644 index 0000000..0dc8be7 --- /dev/null +++ b/mix_video/src/mixvideorenderparams.c @@ -0,0 +1,420 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideorenderparams + * @short_description: VideoRender parameters + * + * A data object which stores videorender specific parameters. + */ +#include /* libVA */ +#include + +#include "mixvideorenderparams.h" +#include "mixvideorenderparams_internal.h" + +#include + +static GType _mix_videorenderparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videorenderparams_type = g_define_type_id; } + +gboolean mix_videorenderparams_copy(MixParams * target, const MixParams * src); +MixParams *mix_videorenderparams_dup(const MixParams * obj); +gboolean mix_videorenderparams_equal(MixParams * first, MixParams * second); +static void mix_videorenderparams_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoRenderParams, mix_videorenderparams, + MIX_TYPE_PARAMS, _do_init); + +static void mix_videorenderparams_init(MixVideoRenderParams * self) { + + MixVideoRenderParamsPrivate *priv = MIX_VIDEORENDERPARAMS_GET_PRIVATE(self); + priv->va_cliprects = NULL; + self->reserved = priv; + + /* initialize properties here */ + self->display = NULL; + memset(&(self->src_rect), 0, sizeof(MixRect)); + memset(&(self->dst_rect), 0, sizeof(MixRect)); + + self->clipping_rects = NULL; + self->number_of_clipping_rects = 0; + + /* TODO: initialize other properties */ + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videorenderparams_class_init(MixVideoRenderParamsClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videorenderparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videorenderparams_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videorenderparams_dup; + mixparams_class->equal + = (MixParamsEqualFunction) mix_videorenderparams_equal; + + /* Register and allocate the space the private structure for this object */ + g_type_class_add_private(mixparams_class, sizeof(MixVideoRenderParamsPrivate)); +} + +MixVideoRenderParams * +mix_videorenderparams_new(void) { + MixVideoRenderParams *ret = + (MixVideoRenderParams *) g_type_create_instance( + MIX_TYPE_VIDEORENDERPARAMS); + + return ret; +} + +void mix_videorenderparams_finalize(MixParams * obj) { + /* clean up here. */ + + MixVideoRenderParams *self = MIX_VIDEORENDERPARAMS(obj); + MixVideoRenderParamsPrivate *priv = + (MixVideoRenderParamsPrivate *) self->reserved; + + if (self->clipping_rects) { + g_free(self->clipping_rects); + self->clipping_rects = NULL; + } + + if (priv->va_cliprects) { + g_free(self->clipping_rects); + priv->va_cliprects = NULL; + } + + self->number_of_clipping_rects = 0; + + if (self->display) { + mix_display_unref(self->display); + self->display = NULL; + } + + /* TODO: cleanup other resources allocated */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoRenderParams * +mix_videorenderparams_ref(MixVideoRenderParams * mix) { + return (MixVideoRenderParams *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videorenderparams_dup: + * @obj: a #MixVideoRenderParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videorenderparams_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEORENDERPARAMS(obj)) { + MixVideoRenderParams *duplicate = mix_videorenderparams_new(); + if (mix_videorenderparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videorenderparams_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videorenderparams_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videorenderparams_copy(MixParams * target, const MixParams * src) { + + MixVideoRenderParams *this_target, *this_src; + MIX_RESULT mix_result = MIX_RESULT_FAIL; + + if (target == src) { + return TRUE; + } + + if (MIX_IS_VIDEORENDERPARAMS(target) && MIX_IS_VIDEORENDERPARAMS(src)) { + + // Cast the base object to this child object + this_target = MIX_VIDEORENDERPARAMS(target); + this_src = MIX_VIDEORENDERPARAMS(src); + + mix_result = mix_videorenderparams_set_display(this_target, + this_src->display); + if (mix_result != MIX_RESULT_SUCCESS) { + return FALSE; + } + + mix_result = mix_videorenderparams_set_clipping_rects(this_target, + this_src->clipping_rects, this_src->number_of_clipping_rects); + + if (mix_result != MIX_RESULT_SUCCESS) { + return FALSE; + } + + this_target->src_rect = this_src->src_rect; + this_target->dst_rect = this_src->dst_rect; + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +gboolean mix_rect_equal(MixRect rc1, MixRect rc2) { + + if (rc1.x == rc2.x && rc1.y == rc2.y && rc1.width == rc2.width + && rc1.height == rc2.height) { + return TRUE; + } + + return FALSE; +} + +/** + * mix_videorenderparams_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videorenderparams_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoRenderParams *this_first, *this_second; + + if (MIX_IS_VIDEORENDERPARAMS(first) && MIX_IS_VIDEORENDERPARAMS(second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEORENDERPARAMS(first); + this_second = MIX_VIDEORENDERPARAMS(second); + + if (mix_display_equal(MIX_DISPLAY(this_first->display), MIX_DISPLAY( + this_second->display)) && mix_rect_equal(this_first->src_rect, + this_second->src_rect) && mix_rect_equal(this_first->dst_rect, + this_second->dst_rect) && this_first->number_of_clipping_rects + == this_second->number_of_clipping_rects && memcmp( + (guchar *) this_first->number_of_clipping_rects, + (guchar *) this_second->number_of_clipping_rects, + this_first->number_of_clipping_rects) == 0) { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ + + +/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ + +MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj, + MixDisplay * display) { + + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + + if (obj->display) { + mix_display_unref(obj->display); + obj->display = NULL; + } + + /* dup */ + if (display) { + obj->display = mix_display_dup(display); + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj, + MixDisplay ** display) { + + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, display); + + /* dup? */ + if (obj->display) { + *display = mix_display_dup(obj->display); + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj, + MixRect src_rect) { + + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + + obj->src_rect = src_rect; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj, + MixRect * src_rect) { + + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, src_rect); + + *src_rect = obj->src_rect; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj, + MixRect dst_rect) { + + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + + obj->dst_rect = dst_rect; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj, + MixRect * dst_rect) { + + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, dst_rect); + + *dst_rect = obj->dst_rect; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj, + MixRect* clipping_rects, guint number_of_clipping_rects) { + + MixVideoRenderParamsPrivate *priv = NULL; + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + + priv = (MixVideoRenderParamsPrivate *) obj->reserved; + + + if (obj->clipping_rects) { + g_free(obj->clipping_rects); + obj->clipping_rects = NULL; + obj->number_of_clipping_rects = 0; + } + + if(priv->va_cliprects) { + g_free(priv->va_cliprects); + priv->va_cliprects = NULL; + } + + + if (clipping_rects && number_of_clipping_rects) { + + gint idx = 0; + + obj->clipping_rects = g_memdup(clipping_rects, number_of_clipping_rects + * sizeof(MixRect)); + if (!obj->clipping_rects) { + return MIX_RESULT_NO_MEMORY; + } + + obj->number_of_clipping_rects = number_of_clipping_rects; + + /* create VARectangle list */ + priv->va_cliprects = g_malloc(number_of_clipping_rects * sizeof(VARectangle)); + if (!priv->va_cliprects) { + return MIX_RESULT_NO_MEMORY; + } + + for (idx = 0; idx < number_of_clipping_rects; idx++) { + priv->va_cliprects[idx].x = clipping_rects[idx].x; + priv->va_cliprects[idx].y = clipping_rects[idx].y; + priv->va_cliprects[idx].width = clipping_rects[idx].width; + priv->va_cliprects[idx].height = clipping_rects[idx].height; + } + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj, + MixRect ** clipping_rects, guint* number_of_clipping_rects) { + + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, clipping_rects); + if (!number_of_clipping_rects) { + return MIX_RESULT_NULL_PTR; + } + + *clipping_rects = NULL; + *number_of_clipping_rects = 0; + + if (obj->clipping_rects && obj->number_of_clipping_rects) { + *clipping_rects = g_memdup(obj->clipping_rects, + obj->number_of_clipping_rects * sizeof(MixRect)); + if (!*clipping_rects) { + return MIX_RESULT_NO_MEMORY; + } + + *number_of_clipping_rects = obj->number_of_clipping_rects; + } + + return MIX_RESULT_SUCCESS; +} + +/* The mixvideo internal method */ +MIX_RESULT mix_videorenderparams_get_cliprects_internal( + MixVideoRenderParams * obj, VARectangle ** va_cliprects, + guint* number_of_cliprects) { + + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, va_cliprects); + if (!number_of_cliprects) { + return MIX_RESULT_NULL_PTR; + } + MixVideoRenderParamsPrivate *priv = + (MixVideoRenderParamsPrivate *) obj->reserved; + + *va_cliprects = NULL; + *number_of_cliprects = 0; + + if (priv->va_cliprects && obj->number_of_clipping_rects) { + *va_cliprects = priv->va_cliprects; + *number_of_cliprects = obj->number_of_clipping_rects; + } + + return MIX_RESULT_SUCCESS; + +} + +/* TODO: implement properties' setters and getters */ diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h new file mode 100644 index 0000000..f6148e7 --- /dev/null +++ b/mix_video/src/mixvideorenderparams.h @@ -0,0 +1,158 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEORENDERPARAMS_H__ +#define __MIX_VIDEORENDERPARAMS_H__ + +#include +#include "mixvideodef.h" +#include "mixdisplay.h" +#include "mixvideoframe.h" + +/** + * MIX_TYPE_VIDEORENDERPARAMS: + * + * Get type of class. + */ +#define MIX_TYPE_VIDEORENDERPARAMS (mix_videorenderparams_get_type ()) + +/** + * MIX_VIDEORENDERPARAMS: + * @obj: object to be type-casted. + */ +#define MIX_VIDEORENDERPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParams)) + +/** + * MIX_IS_VIDEORENDERPARAMS: + * @obj: an object. + * + * Checks if the given object is an instance of #MixParams + */ +#define MIX_IS_VIDEORENDERPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEORENDERPARAMS)) + +/** + * MIX_VIDEORENDERPARAMS_CLASS: + * @klass: class to be type-casted. + */ +#define MIX_VIDEORENDERPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsClass)) + +/** + * MIX_IS_VIDEORENDERPARAMS_CLASS: + * @klass: a class. + * + * Checks if the given class is #MixParamsClass + */ +#define MIX_IS_VIDEORENDERPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEORENDERPARAMS)) + +/** + * MIX_VIDEORENDERPARAMS_GET_CLASS: + * @obj: a #MixParams object. + * + * Get the class instance of the object. + */ +#define MIX_VIDEORENDERPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsClass)) + +typedef struct _MixVideoRenderParams MixVideoRenderParams; +typedef struct _MixVideoRenderParamsClass MixVideoRenderParamsClass; + +/** + * MixVideoRenderParams: + * + * MI-X VideoRender Parameter object + */ +struct _MixVideoRenderParams { + /*< public > */ + MixParams parent; + + /*< public > */ + MixDisplay *display; + + MixRect src_rect; + MixRect dst_rect; + + MixRect *clipping_rects; + guint number_of_clipping_rects; + + guint post_proc; + + gpointer reserved; + gpointer reserved1; + gpointer reserved2; + gpointer reserved3; + gpointer reserved4; +}; + +/** + * MixVideoRenderParamsClass: + * + * MI-X VideoRender object class + */ +struct _MixVideoRenderParamsClass { + /*< public > */ + MixParamsClass parent_class; + + /* class members */ +}; + +/** + * mix_videorenderparams_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videorenderparams_get_type(void); + +/** + * mix_videorenderparams_new: + * @returns: A newly allocated instance of #MixVideoRenderParams + * + * Use this method to create new instance of #MixVideoRenderParams + */ +MixVideoRenderParams *mix_videorenderparams_new(void); +/** + * mix_videorenderparams_ref: + * @mix: object to add reference + * @returns: the MixVideoRenderParams instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoRenderParams *mix_videorenderparams_ref(MixVideoRenderParams * mix); + +/** + * mix_videorenderparams_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videorenderparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj, + MixDisplay * display); +MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj, + MixDisplay ** display); + +MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj, + MixRect src_rect); +MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj, + MixRect * src_rect); + +MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj, + MixRect dst_rect); +MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj, + MixRect * dst_rect); + +MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj, + MixRect* clipping_rects, guint number_of_clipping_rects); +MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj, + MixRect ** clipping_rects, guint* number_of_clipping_rects); + +/* TODO: Add getters and setters for other properties */ + +#endif /* __MIX_VIDEORENDERPARAMS_H__ */ diff --git a/mix_video/src/mixvideorenderparams_internal.h b/mix_video/src/mixvideorenderparams_internal.h new file mode 100644 index 0000000..8619173 --- /dev/null +++ b/mix_video/src/mixvideorenderparams_internal.h @@ -0,0 +1,36 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEORENDERPARAMS_PRIVATE_H__ +#define __MIX_VIDEORENDERPARAMS_PRIVATE_H__ + +typedef struct _MixVideoRenderParamsPrivate MixVideoRenderParamsPrivate; + +struct _MixVideoRenderParamsPrivate { + /*< private > */ + + VARectangle *va_cliprects; +}; + +/** + * MIX_VIDEO_PRIVATE: + * + * Get private structure of this class. + * @obj: class object for which to get private data. + */ +#define MIX_VIDEORENDERPARAMS_GET_PRIVATE(obj) \ + (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsPrivate)) + +/* Internal function */ + +MIX_RESULT mix_videorenderparams_get_cliprects_internal( + MixVideoRenderParams * obj, + VARectangle ** va_cliprects, + guint* number_of_cliprects); + +#endif /* __MIX_VIDEORENDERPARAMS_PRIVATE_H__ */ diff --git a/mix_video/src/test.c b/mix_video/src/test.c new file mode 100644 index 0000000..8f9aee5 --- /dev/null +++ b/mix_video/src/test.c @@ -0,0 +1,87 @@ +#include +#include +#include +#include "mixvideo.h" +#include "mixdisplayx11.h" + +int +main (int argc, char **argv) +{ + MIX_RESULT ret; + + g_type_init (); + +/* test MixDisplay */ + { + + MixDisplayX11 *x11_clone = NULL; + MixDisplayX11 *x11 = mix_displayx11_new (); + + MixDisplay *base = MIX_DISPLAY (x11); + + gboolean flag = MIX_IS_DISPLAYX11 (base); + + Drawable drawable = 1024; + + mix_displayx11_set_drawable (x11, drawable); + +/* clone x11 */ + + x11_clone = (MixDisplayX11 *) mix_display_dup (MIX_DISPLAY (x11)); + + base = MIX_DISPLAY (x11_clone); + + flag = MIX_IS_DISPLAYX11 (base); + + mix_displayx11_get_drawable (x11_clone, &drawable); + +/* TODO: add more test cases */ + +/* release */ + mix_display_unref (MIX_DISPLAY (x11)); + mix_display_unref (MIX_DISPLAY (x11_clone)); + g_print ("MixDisplayX11 test is done!\n"); + } + +/* test MixVideoInitParams */ + { + MixVideoInitParams *init_params = mix_videoinitparams_new (); + + MixDisplayX11 *x11 = mix_displayx11_new (); + mix_displayx11_set_drawable (x11, 1024); + + mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); + +/* release */ + mix_params_unref (MIX_PARAMS (init_params)); + mix_display_unref (MIX_DISPLAY (x11)); + + g_print ("MixVideoInitParams test is done!\n"); + } + +/* test MixVideo */ + + { + MixVideo *video = mix_video_new (); + MixVideoInitParams *init_params = mix_videoinitparams_new (); + MixDisplayX11 *x11 = mix_displayx11_new (); + MixDrmParams *drm = mix_drmparams_new (); + MixCodecMode mode = MIX_CODEC_MODE_DECODE; + + mix_displayx11_set_drawable (x11, 1024); + mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); + + mix_video_initialize (video, mode, init_params, drm); + +/* TODO: add more test cases */ + +/* unref the objects. */ + + mix_params_unref (MIX_PARAMS (init_params)); + mix_params_unref (MIX_PARAMS (drm)); + mix_display_unref (MIX_DISPLAY (x11)); + g_object_unref (G_OBJECT (video)); + + g_print ("MixVideo test is done!\n"); + } +} diff --git a/mix_video/test/Makefile.am b/mix_video/test/Makefile.am new file mode 100644 index 0000000..aa58280 --- /dev/null +++ b/mix_video/test/Makefile.am @@ -0,0 +1,2 @@ +SUBDIRS = src +EXTRA_DIST = autogen.sh diff --git a/mix_video/test/autogen.sh b/mix_video/test/autogen.sh new file mode 100644 index 0000000..79033fb --- /dev/null +++ b/mix_video/test/autogen.sh @@ -0,0 +1 @@ +autoreconf diff --git a/mix_video/test/configure.ac b/mix_video/test/configure.ac new file mode 100644 index 0000000..4e3a279 --- /dev/null +++ b/mix_video/test/configure.ac @@ -0,0 +1,53 @@ + +AC_INIT([testmixvideo], [0.1], [tao.q.tao@intel.com]) + +dnl AC_CONFIG_MACRO_DIR([m4]) + +AM_INIT_AUTOMAKE($PACKAGE, $VERSION) +AM_INIT_AUTOMAKE([-Wall -Werror foreign]) + +AC_PROG_CC +AC_PROG_LIBTOOL + +AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes , no) + +dnl Give error and exit if we don't have pkgconfig +if test "x$HAVE_PKGCONFIG" = "xno"; then + AC_MSG_ERROR(you need to have pkgconfig installed !) +fi + +GLIB_REQ=2.18 +dnl Check for glib2 without extra fat, useful for the unversioned tool frontends +dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) +if test "x$HAVE_GLIB" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) +if test "x$HAVE_GOBJECT" = "xno"; then + AC_MSG_ERROR(You need glib development packages installed !) +fi + +MIXVIDEO_REQ=0.5 +PKG_CHECK_MODULES(MIXVIDEO, mixvideo >= $MIXVIDEO_REQ,HAVE_MIXVIDEO=yes,HAVE_MIXVIDEO=no) +if test "x$HAVE_MIXVIDEO" = "xno"; then + AC_MSG_ERROR(You need mixvideo development packages installed !) +fi + +AC_ARG_ENABLE(optimization, AC_HELP_STRING([ --disable-optimization], [Do not optimize the library for speed. Might be required for debugging.])) +AC_ARG_ENABLE(debuginfo, AC_HELP_STRING([ --enable-debuginfo ], [add -g to the compiler flags (to create debug information)])) + +if test "$enable_optimization" = "no" ; then + DEBUG=true +else + DEBUG=false +fi + + +AC_CONFIG_HEADERS([config.h]) +AC_CONFIG_FILES([ + Makefile + src/Makefile +]) +AC_OUTPUT diff --git a/mix_video/test/src/Makefile.am b/mix_video/test/src/Makefile.am new file mode 100644 index 0000000..2c98fa4 --- /dev/null +++ b/mix_video/test/src/Makefile.am @@ -0,0 +1,22 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + +noinst_PROGRAMS = test_framemanager + +############################################################################## +# sources used to compile +test_framemanager_SOURCES = test_framemanager.c + +test_framemanager_CFLAGS = $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXVIDEO_CFLAGS) +test_framemanager_LDADD = $(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXVIDEO_LIBS) +test_framemanager_LIBTOOLFLAGS = --tag=disable-static + +# headers we need but don't want installed +noinst_HEADERS = + + + diff --git a/mix_video/test/src/test_framemanager.c b/mix_video/test/src/test_framemanager.c new file mode 100644 index 0000000..f4b8be9 --- /dev/null +++ b/mix_video/test/src/test_framemanager.c @@ -0,0 +1,200 @@ +#include "../../src/mixframemanager.h" + +gboolean stop_thread = FALSE; +GCond* data_cond = NULL; +GMutex* data_mutex = NULL; + + +void *deque_function(void *data) { + + MixFrameManager *fm = (MixFrameManager *) data; + MIX_RESULT mixresult; + MixVideoFrame *mvf = NULL; + guint64 pts; + while(!stop_thread) { + + g_mutex_lock (data_mutex); + + mixresult = mix_framemanager_dequeue(fm, &mvf); + if(mixresult == MIX_RESULT_SUCCESS) { + mixresult = mix_videoframe_get_timestamp(mvf, &pts); + g_print("dequeued timestamp = %"G_GINT64_FORMAT"\n", pts); + /* mix_videoframe_unref(mvf); */ + } else if(mixresult == MIX_RESULT_FRAME_NOTAVAIL) { + g_print("mixresult == MIX_RESULT_FRAME_NOTAVAIL\n"); + g_cond_wait (data_cond, data_mutex); + } + + g_mutex_unlock (data_mutex); + + } +} + +void shuffle(GPtrArray *list) { + guint idx, jdx; + guint len = list->len; + for (idx = 0; idx < len - 1; idx++) { + jdx = rand() % len; + if (idx != jdx) { + gpointer tmp = g_ptr_array_index(list, jdx); + g_ptr_array_index(list, jdx) = g_ptr_array_index(list, idx); + g_ptr_array_index(list, idx) = tmp; + } + } +} + +int main() { + MIX_RESULT mixresult; + + gint fps_n = 24000; + gint fps_d = 1001; + +/* + gint fps_n = 2500000; + gint fps_d = 104297; +*/ + GPtrArray *fa = NULL; + MixFrameManager *fm = NULL; + MixVideoFrame *mvf = NULL; + MixVideoFrame *mvf_1st = NULL; + + gint idx = 0; + guint64 pts = 0; + + GThread *deque_thread = NULL; + GError *deque_thread_error = NULL; + + /* first ting first */ + g_type_init(); + + /* create frame manager */ + fm = mix_framemanager_new(); + if (!fm) { + goto cleanup; + } + + /* initialize frame manager */ + mixresult = mix_framemanager_initialize(fm, + MIX_FRAMEORDER_MODE_DISPLAYORDER, fps_n, fps_d); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + /* create frame_array */ + fa = g_ptr_array_sized_new(64); + if (!fa) { + goto cleanup; + } + + for (idx = 0; idx < 16; idx++) { + /* generate MixVideoFrame */ + mvf = mix_videoframe_new(); + if (!mvf) { + goto cleanup; + } + + pts = idx * G_USEC_PER_SEC * G_GINT64_CONSTANT(1000) * fps_d / fps_n; + mixresult = mix_videoframe_set_timestamp(mvf, pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + g_print("original timestamp = %"G_GINT64_FORMAT"\n", pts); + + if (idx == 0) { + mvf_1st = mvf; + } else { + g_ptr_array_add(fa, (gpointer) mvf); + } + } + + /* shuffle the array */ + shuffle( fa); + + data_mutex = g_mutex_new (); + if(!data_mutex) { + goto cleanup; + } + + data_cond = g_cond_new(); + if(!data_cond) { + goto cleanup; + } + + + /* create another thread to dequeue */ + deque_thread = g_thread_create((GThreadFunc) deque_function, (void *) fm, + TRUE, &deque_thread_error); + if (!deque_thread) { + goto cleanup; + } + + /* enqueue */ + mixresult = mix_framemanager_enqueue(fm, mvf_1st); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + mixresult = mix_videoframe_get_timestamp(mvf_1st, &pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); + + for (idx = 0; idx < fa->len; idx++) { + + g_mutex_lock (data_mutex); + + /* wait for 100ms to enqueue another frame */ + g_usleep(G_USEC_PER_SEC / 10 ); + + mvf = (MixVideoFrame *) g_ptr_array_index(fa, idx); + mixresult = mix_framemanager_enqueue(fm, mvf); + + /* wake up deque thread */ + g_cond_signal (data_cond); + + + g_mutex_unlock (data_mutex); + + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + mixresult = mix_videoframe_get_timestamp(mvf, &pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); + } + + getchar(); + + stop_thread = TRUE; + + /* wake up deque thread */ + g_cond_signal (data_cond); + + g_thread_join(deque_thread); + +cleanup: + + if(data_mutex) { + g_mutex_free(data_mutex); + } + + if(data_cond) { + g_cond_free(data_cond); + } + + if (fm) { + mix_framemanager_unref(fm); + } + + if (fa) { + g_ptr_array_free(fa, TRUE); + } + + return 0; +} -- cgit v1.2.3 From 54c721978b1d03fb2315f6eac42362a97b7d8712 Mon Sep 17 00:00:00 2001 From: "wonjong.lee" Date: Mon, 8 Feb 2010 11:32:38 +0900 Subject: start new branch (wrs-cdk-android-libmix-20100129) from: Ho-Eun, Ryu base branch: intel-cdk-moblin-libmix-20100129 --- Android.mk | 11 ++ mix_audio/src/Android.mk | 45 +++++ mix_audio/src/mixacp.c | 3 +- mix_audio/src/mixaudio.c | 14 +- mix_common/src/Android.mk | 32 ++++ mix_vbp/Android.mk | 7 + mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk | 40 +++++ mix_vbp/viddec_fw/fw/include/stdint.h | 4 + mix_vbp/viddec_fw/fw/parser/Android.mk | 54 ++++++ .../viddec_fw/fw/parser/include/viddec_emitter.h | 2 +- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 6 + mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 2 + mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 12 ++ mix_video/src/Android.mk | 107 ++++++++++++ mix_video/src/mixdisplayx11.c | 6 + mix_video/src/mixdisplayx11.h | 9 + mix_video/src/mixvideo.c | 192 ++++++++++++++++++++- mix_video/src/mixvideo.h | 9 + mix_video/src/mixvideocaps.c | 4 +- 19 files changed, 545 insertions(+), 14 deletions(-) create mode 100644 Android.mk create mode 100644 mix_audio/src/Android.mk create mode 100644 mix_common/src/Android.mk create mode 100644 mix_vbp/Android.mk create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk create mode 100644 mix_vbp/viddec_fw/fw/parser/Android.mk create mode 100644 mix_video/src/Android.mk diff --git a/Android.mk b/Android.mk new file mode 100644 index 0000000..d0deed6 --- /dev/null +++ b/Android.mk @@ -0,0 +1,11 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) + +GLIB_TOP := external/glib + +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_common/src/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_audio/src/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_video/src/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk diff --git a/mix_audio/src/Android.mk b/mix_audio/src/Android.mk new file mode 100644 index 0000000..c108526 --- /dev/null +++ b/mix_audio/src/Android.mk @@ -0,0 +1,45 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + mixaip.c \ + mixacp.c \ + mixacpmp3.c \ + mixacpwma.c \ + mixacpaac.c \ + mixaudio.c \ + sst_proxy.c + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(GLIB_TOP) \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/glib \ + $(GLIB_TOP)/gobject \ + $(TARGET_OUT_HEADERS)/libmixcommon + +LOCAL_SHARED_LIBRARIES := \ + libglib-2.0 \ + libgobject-2.0 \ + libgthread-2.0 \ + libgmodule-2.0 \ + libmixcommon + +LOCAL_COPY_HEADERS_TO := libmixaudio + +LOCAL_COPY_HEADERS := \ + amhelper.h \ + intel_sst_ioctl.h \ + mixacp.h \ + mixacpaac.h \ + mixacpmp3.h \ + mixacpwma.h \ + mixaip.h \ + mixaudio.h \ + mixaudiotypes.h \ + pvt.h \ + sst_proxy.h + +LOCAL_MODULE := libmixaudio + +include $(BUILD_SHARED_LIBRARY) diff --git a/mix_audio/src/mixacp.c b/mix_audio/src/mixacp.c index e7ce507..d66ee3c 100644 --- a/mix_audio/src/mixacp.c +++ b/mix_audio/src/mixacp.c @@ -197,8 +197,7 @@ gboolean mix_acp_equal(MixParams* first, MixParams *second) (acp1->sample_freq == acp2->sample_freq) && (acp1->bits_per_sample == acp2->bits_per_sample) && (acp1->op_align == acp2->op_align) && - (!g_strcmp0(acp1->stream_name, acp2->stream_name)); - //g_strcmp0 handles NULL gracefully + (!strcmp(acp1->stream_name, acp2->stream_name)); } return ret; diff --git a/mix_audio/src/mixaudio.c b/mix_audio/src/mixaudio.c index 6d41350..196a0b0 100644 --- a/mix_audio/src/mixaudio.c +++ b/mix_audio/src/mixaudio.c @@ -89,7 +89,7 @@ * * LPE Device location. */ -static const char* LPE_DEVICE="/dev/lpe"; +static const char* LPE_DEVICE="/dev/sst"; /* #define LPE_DEVICE "/dev/lpe" */ #define _LOCK(obj) g_static_rec_mutex_lock(obj); @@ -858,7 +858,11 @@ MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audi } // now configure stream. +#ifdef AUDIO_MANAGER ret = mix_audio_am_unregister(mix, audioconfigparams); +#else + ret = MIX_RESULT_SUCCESS; +#endif if (MIX_SUCCEEDED(ret)) { @@ -867,7 +871,11 @@ MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audi if (MIX_SUCCEEDED(ret)) { +#ifdef AUDIO_MANAGER ret = mix_audio_am_register(mix, audioconfigparams); +#else + ret = MIX_RESULT_SUCCESS; +#endif } if (MIX_SUCCEEDED(ret)) @@ -953,9 +961,10 @@ MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs) return ret; } +#ifdef AUDIO_MANAGER gboolean mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams) { - if (g_strcmp0(oldparams->stream_name, newparams->stream_name) == 0) { + if (strcmp(oldparams->stream_name, newparams->stream_name) == 0) { return FALSE; } @@ -1025,6 +1034,7 @@ MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfi return ret; } +#endif /* AUDIO_MANAGER */ MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt) { diff --git a/mix_common/src/Android.mk b/mix_common/src/Android.mk new file mode 100644 index 0000000..7f2bc52 --- /dev/null +++ b/mix_common/src/Android.mk @@ -0,0 +1,32 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + mixlog.c \ + mixparams.c \ + mixdrmparams.c \ + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(GLIB_TOP) \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/glib \ + $(GLIB_TOP)/gobject + +LOCAL_SHARED_LIBRARIES := \ + libglib-2.0 \ + libgobject-2.0 \ + libgthread-2.0 \ + libgmodule-2.0 + +LOCAL_COPY_HEADERS_TO := libmixcommon + +LOCAL_COPY_HEADERS := \ + mixlog.h \ + mixresult.h \ + mixparams.h \ + mixdrmparams.h + +LOCAL_MODULE := libmixcommon + +include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk new file mode 100644 index 0000000..9f286ec --- /dev/null +++ b/mix_vbp/Android.mk @@ -0,0 +1,7 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +VENDORS_INTEL_MRST_MIXVBP_ROOT := $(LOCAL_PATH) + +include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/Android.mk +include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/parser/Android.mk diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk new file mode 100644 index 0000000..37dfdcf --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk @@ -0,0 +1,40 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + h264parse.c \ + h264parse_bsd.c \ + h264parse_math.c \ + h264parse_mem.c \ + h264parse_sei.c \ + h264parse_sh.c \ + h264parse_pps.c \ + h264parse_sps.c \ + h264parse_dpb.c \ + viddec_h264_parse.c \ + mix_vbp_h264_stubs.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES := \ + $(GLIB_TOP) \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/glib \ + $(GLIB_TOP)/gobject \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/include \ + $(TARGET_OUT_HEADERS)/libmixcommon + +LOCAL_MODULE := libmixvbp_h264 + +LOCAL_SHARED_LIBRARIES := \ + libglib-2.0 \ + libgobject-2.0 \ + libgthread-2.0 \ + libgmodule-2.0 \ + libmixvbp + +include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/include/stdint.h b/mix_vbp/viddec_fw/fw/include/stdint.h index 885cfe1..cf23208 100644 --- a/mix_vbp/viddec_fw/fw/include/stdint.h +++ b/mix_vbp/viddec_fw/fw/include/stdint.h @@ -1,6 +1,8 @@ #ifndef __STDINT_H #define __STDINT_H +#ifndef ANDROID + typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; @@ -13,6 +15,8 @@ typedef signed int int32_t; typedef signed long long int64_t; //#endif +#endif + #ifndef NULL #define NULL (void*)0x0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk new file mode 100644 index 0000000..1cdf50d --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -0,0 +1,54 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + vbp_h264_parser.c \ + vbp_vc1_parser.c \ + vbp_loader.c \ + vbp_mp42_parser.c \ + vbp_utils.c \ + viddec_emit.c \ + viddec_parse_sc.c \ + viddec_parse_sc_stub.c \ + viddec_pm.c \ + viddec_pm_parser_ops.c \ + viddec_pm_stubs.c \ + viddec_pm_tags.c \ + viddec_pm_utils_bstream.c \ + viddec_pm_utils_list.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES += \ + $(GLIB_TOP) \ + $(GLIB_TOP)/glib \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/gobject \ + $(LOCAL_PATH)/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp2/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser \ + $(TARGET_OUT_HEADERS)/libmixcommon \ + $(TARGET_OUT_HEADERS)/libva + +LOCAL_COPY_HEADERS_TO := libmixvbp + +LOCAL_COPY_HEADERS := \ + vbp_loader.h + +LOCAL_MODULE := libmixvbp + +LOCAL_SHARED_LIBRARIES := \ + libdl \ + libcutils \ + libglib-2.0 \ + libgobject-2.0 \ + libgthread-2.0 \ + libgmodule-2.0 + +include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h index bb96bab..d887501 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h @@ -1,7 +1,7 @@ #ifndef VIDDEC_EMITTER_H #define VIDDEC_EMITTER_H -#include +#include "../../include/stdint.h" #ifndef HOST_ONLY #define DDR_MEM_MASK 0x80000000 #else diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 033f6b6..7a056df 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1436,10 +1436,16 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) size_left = cubby->size; +#ifndef ANDROID while (size_left >= NAL_length_size) { NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); +#else + while (size_left > 0) + { + NAL_length = size_left; +#endif size_parsed += NAL_length_size; cxt->list.data[cxt->list.num_items].stpos = size_parsed; size_parsed += NAL_length; /* skip NAL bytes */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 66169dd..b57821c 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -29,9 +29,11 @@ typedef unsigned short uint16; #ifndef uint32 typedef unsigned int uint32; #endif +#ifndef ANDROID #ifndef bool typedef int bool; #endif +#endif typedef void *Handle; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index 651b801..c350342 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -66,7 +66,11 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) switch (pcontext->parser_type) { case VBP_VC1: +#ifndef ANDROID parser_name = "libmixvbp_vc1.so.0"; +#else + parser_name = "libmixvbp_vc1.so"; +#endif break; /* MPEG-2 parser is not supported. */ @@ -76,11 +80,19 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) break;*/ case VBP_MPEG4: +#ifndef ANDROID parser_name = "libmixvbp_mpeg4.so.0"; +#else + parser_name = "libmixvbp_mpeg4.so"; +#endif break; case VBP_H264: +#ifndef ANDROID parser_name = "libmixvbp_h264.so.0"; +#else + parser_name = "libmixvbp_h264.so"; +#endif break; default: diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk new file mode 100644 index 0000000..614ac4b --- /dev/null +++ b/mix_video/src/Android.mk @@ -0,0 +1,107 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + mixbuffer.c \ + mixbufferpool.c \ + mixdisplay.c \ + mixdisplayx11.c \ + mixdrmparams.c \ + mixframemanager.c \ + mixsurfacepool.c \ + mixvideo.c \ + mixvideocaps.c \ + mixvideoconfigparams.c \ + mixvideoconfigparamsdec.c \ + mixvideoconfigparamsdec_h264.c \ + mixvideoconfigparamsdec_mp42.c \ + mixvideoconfigparamsdec_vc1.c \ + mixvideoconfigparamsenc.c \ + mixvideoconfigparamsenc_h264.c \ + mixvideoconfigparamsenc_mpeg4.c \ + mixvideoconfigparamsenc_preview.c \ + mixvideodecodeparams.c \ + mixvideoencodeparams.c \ + mixvideoformat.c \ + mixvideoformat_h264.c \ + mixvideoformat_mp42.c \ + mixvideoformat_vc1.c \ + mixvideoformatenc.c \ + mixvideoformatenc_h264.c \ + mixvideoformatenc_mpeg4.c \ + mixvideoformatenc_preview.c \ + mixvideoframe.c \ + mixvideoinitparams.c \ + mixvideorenderparams.c + +LOCAL_CFLAGS := \ + -DMIXVIDEO_AGE=1 \ + -DMIXVIDEO_CURRENT=1 \ + -DMIXVIDEO_MAJOR=0 \ + -DMIXVIDEO_MINOR=1 \ + -DMIXVIDEO_REVISION=8 + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(GLIB_TOP) \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/glib \ + $(GLIB_TOP)/gobject \ + $(TARGET_OUT_HEADERS)/libmixcommon \ + $(TARGET_OUT_HEADERS)/libmixvbp \ + $(TARGET_OUT_HEADERS)/libva + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libglib-2.0 \ + libgobject-2.0 \ + libgthread-2.0 \ + libgmodule-2.0 \ + libmixcommon \ + libmixvbp \ + libva + +LOCAL_COPY_HEADERS_TO := libmixvideo + +LOCAL_COPY_HEADERS := \ + mixbuffer.h \ + mixbuffer_private.h \ + mixbufferpool.h \ + mixdisplay.h \ + mixdisplayx11.h \ + mixdrmparams.h \ + mixframemanager.h \ + mixsurfacepool.h \ + mixvideo.h \ + mixvideodef.h \ + mixvideo_private.h \ + mixvideocaps.h \ + mixvideoconfigparams.h \ + mixvideoconfigparamsdec.h \ + mixvideoconfigparamsdec_h264.h \ + mixvideoconfigparamsdec_mp42.h \ + mixvideoconfigparamsdec_vc1.h \ + mixvideoconfigparamsenc.h \ + mixvideoconfigparamsenc_h264.h \ + mixvideoconfigparamsenc_mpeg4.h \ + mixvideoconfigparamsenc_preview.h \ + mixvideodecodeparams.h \ + mixvideoencodeparams.h \ + mixvideoformat.h \ + mixvideoformat_h264.h \ + mixvideoformat_mp42.h \ + mixvideoformat_vc1.h \ + mixvideoformatenc.h \ + mixvideoformatenc_h264.h \ + mixvideoformatenc_mpeg4.h \ + mixvideoformatenc_preview.h \ + mixvideoformatqueue.h \ + mixvideoframe.h \ + mixvideoframe_private.h \ + mixvideoinitparams.h \ + mixvideorenderparams.h \ + mixvideorenderparams_internal.h + +LOCAL_MODULE := libmixvideo + +include $(BUILD_SHARED_LIBRARY) diff --git a/mix_video/src/mixdisplayx11.c b/mix_video/src/mixdisplayx11.c index 60eb3e4..467bde2 100644 --- a/mix_video/src/mixdisplayx11.c +++ b/mix_video/src/mixdisplayx11.c @@ -22,6 +22,12 @@ static MixDisplayClass *parent_class = NULL; #define _do_init { _mix_displayx11_type = g_define_type_id; } +#ifdef ANDROID +int XSync(Display* display, Bool bvalue) { + return 0; +} +#endif + gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); MixDisplay *mix_displayx11_dup(const MixDisplay * obj); gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second); diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h index 4a14c9f..d6db5bd 100644 --- a/mix_video/src/mixdisplayx11.h +++ b/mix_video/src/mixdisplayx11.h @@ -11,7 +11,16 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixdisplay.h" #include "mixvideodef.h" +#ifndef ANDROID #include +#else +#define Display unsigned int +#define Drawable unsigned int +#define Bool int +#define True 1 +#define False 0 +int XSync(Display* display, Bool bvalue); +#endif /** * MIX_TYPE_DISPLAYX11: diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c index e9cba0a..ef74184 100644 --- a/mix_video/src/mixvideo.c +++ b/mix_video/src/mixvideo.c @@ -7,7 +7,9 @@ */ #include /* libVA */ +#ifndef ANDROID #include +#endif #include #include "mixvideolog.h" @@ -99,6 +101,10 @@ MIX_RESULT mix_video_release_frame_default(MixVideo * mix, MIX_RESULT mix_video_render_default(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame); +MIX_RESULT mix_video_get_decoded_data_default(MixVideo * mix, MixIOVec * iovout, + MixVideoRenderParams * render_params, MixVideoFrame *frame); + + MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); @@ -159,6 +165,7 @@ static void mix_video_class_init(MixVideoClass * klass) { klass->get_frame_func = mix_video_get_frame_default; klass->release_frame_func = mix_video_release_frame_default; klass->render_func = mix_video_render_default; + klass->get_decoded_data_func = mix_video_get_decoded_data_default; klass->encode_func = mix_video_encode_default; klass->flush_func = mix_video_flush_default; klass->eos_func = mix_video_eos_default; @@ -370,6 +377,12 @@ MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, if (MIX_IS_DISPLAYX11(mix_display)) { MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); +#if 1 + mix_displayx11->display = g_malloc(sizeof(Display)); + *(mix_displayx11->display) = 0x18c34078; +#else + //mix_displayx11->display = 1; +#endif ret = mix_displayx11_get_display(mix_displayx11, &display); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to get display 2\n"); @@ -500,7 +513,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, LOG_I( "mime : %s\n", mime_type); #ifdef MIX_LOG_ENABLE - if (g_strcmp0(mime_type, "video/x-wmv") == 0) { + if (strcmp(mime_type, "video/x-wmv") == 0) { LOG_I( "mime : video/x-wmv\n"); if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { @@ -548,8 +561,8 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, /* initialize frame manager */ - if (g_strcmp0(mime_type, "video/x-wmv") == 0 || g_strcmp0(mime_type, - "video/mpeg") == 0 || g_strcmp0(mime_type, "video/x-divx") == 0) { + if (strcmp(mime_type, "video/x-wmv") == 0 || strcmp(mime_type, + "video/mpeg") == 0 || strcmp(mime_type, "video/x-divx") == 0) { ret = mix_framemanager_initialize(priv->frame_manager, frame_order_mode, fps_n, fps_d, FALSE); } else { @@ -579,7 +592,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, /* Finally, we can create MixVideoFormat */ /* What type of MixVideoFormat we need create? */ - if (g_strcmp0(mime_type, "video/x-wmv") == 0 + if (strcmp(mime_type, "video/x-wmv") == 0 && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); @@ -593,7 +606,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, priv->video_format = MIX_VIDEOFORMAT(video_format); - } else if (g_strcmp0(mime_type, "video/x-h264") == 0 + } else if (strcmp(mime_type, "video/x-h264") == 0 && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); @@ -607,13 +620,13 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, priv->video_format = MIX_VIDEOFORMAT(video_format); - } else if (g_strcmp0(mime_type, "video/mpeg") == 0 || g_strcmp0(mime_type, + } else if (strcmp(mime_type, "video/mpeg") == 0 || strcmp(mime_type, "video/x-divx") == 0) { guint version = 0; /* Is this mpeg4:2 ? */ - if (g_strcmp0(mime_type, "video/mpeg") == 0) { + if (strcmp(mime_type, "video/mpeg") == 0) { /* * we don't support mpeg other than mpeg verion 4 @@ -1234,6 +1247,171 @@ MIX_RESULT mix_video_render_default(MixVideo * mix, } +#ifdef ANDROID +MIX_RESULT mix_video_get_decoded_data_default(MixVideo * mix, MixIOVec * iovout, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, "Begin\n"); + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + MixDisplay *mix_display = NULL; + MixDisplayX11 *mix_display_x11 = NULL; + + Display *display = NULL; + Drawable drawable = 0; + + MixRect src_rect, dst_rect; + + VARectangle *va_cliprects = NULL; + guint number_of_cliprects = 0; + + /* VASurfaceID va_surface_id; */ + gulong va_surface_id; + VAStatus va_status; + + if (!mix || !render_params) { + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEO(mix)) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not MixVideo\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* Is this render param valid? */ + if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, + "Not MixVideoRenderParams\n"); + return MIX_RESULT_INVALID_PARAM; + } + + priv = MIX_VIDEO_PRIVATE(mix); + + if (!priv->initialized) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not initialized\n"); + return MIX_RESULT_NOT_INIT; + } + + if (!priv->configured) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not configured\n"); + return MIX_RESULT_NOT_CONFIGURED; + } + + /* ---------------------- begin lock --------------------- */ + g_mutex_lock(priv->objlock); + + /* get MixDisplay prop from render param */ + ret = mix_videorenderparams_get_display(render_params, &mix_display); + if (ret != MIX_RESULT_SUCCESS) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, + "Failed to get mix_display\n"); + goto cleanup; + } + + /* Is this MixDisplayX11 ? */ + /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ + if (!MIX_IS_DISPLAYX11(mix_display)) { + ret = MIX_RESULT_INVALID_PARAM; + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not MixDisplayX11\n"); + goto cleanup; + } + + /* cast MixDisplay to MixDisplayX11 */ + mix_display_x11 = MIX_DISPLAYX11(mix_display); + + /* Get Drawable */ + ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); + if (ret != MIX_RESULT_SUCCESS) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to get drawable\n"); + goto cleanup; + } + + /* Get Display */ + ret = mix_displayx11_get_display(mix_display_x11, &display); + if (ret != MIX_RESULT_SUCCESS) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to get display\n"); + goto cleanup; + } + + /* get src_rect */ + ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); + if (ret != MIX_RESULT_SUCCESS) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, + "Failed to get SOURCE src_rect\n"); + goto cleanup; + } + + /* get dst_rect */ + ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); + if (ret != MIX_RESULT_SUCCESS) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to get dst_rect\n"); + goto cleanup; + } + /* get va_cliprects */ + ret = mix_videorenderparams_get_cliprects_internal(render_params, + &va_cliprects, &number_of_cliprects); + if (ret != MIX_RESULT_SUCCESS) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, + "Failed to get va_cliprects\n"); + goto cleanup; + } + + /* get surface id from frame */ + ret = mix_videoframe_get_frame_id(frame, &va_surface_id); + if (ret != MIX_RESULT_SUCCESS) { + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, + "Failed to get va_surface_id\n"); + goto cleanup; + } + guint64 timestamp = 0; + mix_videoframe_get_timestamp(frame, ×tamp); + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, "Displaying surface ID %d, timestamp %"G_GINT64_FORMAT"\n", (int)va_surface_id, timestamp); + + guint32 frame_structure = 0; + mix_videoframe_get_frame_structure(frame, &frame_structure); + /* TODO: the last param of vaPutSurface is de-interlacing flags, + what is value shall be*/ + va_status = vaPutSurfaceBuf(priv->va_display, (VASurfaceID) va_surface_id, + drawable, iovout->data, &iovout->data_size, src_rect.x, src_rect.y, src_rect.width, src_rect.height, + dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, + va_cliprects, number_of_cliprects, frame_structure); + + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, + "Failed vaPutSurface() : va_status = %d\n", va_status); + goto cleanup; + } + + ret = MIX_RESULT_SUCCESS; + +cleanup: + + MIXUNREF(mix_display, mix_display_unref) + /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ + + /* ---------------------- end lock --------------------- */ + g_mutex_unlock(priv->objlock); + + mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, "End\n"); + + return ret; +} +#endif + +MIX_RESULT mix_video_get_decoded_data(MixVideo * mix, MixIOVec * iovout, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix); + + if (klass->get_decoded_data_func) { + return klass->get_decoded_data_func(mix, iovout, render_params, frame); + } + return MIX_RESULT_NOTIMPL; +} + + MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params) { diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h index 0b50cf6..73c6b71 100644 --- a/mix_video/src/mixvideo.h +++ b/mix_video/src/mixvideo.h @@ -65,6 +65,11 @@ typedef MIX_RESULT (*MixVideoReleaseFrameFunc)(MixVideo * mix, typedef MIX_RESULT (*MixVideoRenderFunc)(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame); +typedef MIX_RESULT (*MixVideoGetDecodedDataFunc)(MixVideo * mix, + MixIOVec * iovout, MixVideoRenderParams * render_params, + MixVideoFrame *frame); + + typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); @@ -123,6 +128,7 @@ struct _MixVideoClass { MixVideoGetFrameFunc get_frame_func; MixVideoReleaseFrameFunc release_frame_func; MixVideoRenderFunc render_func; + MixVideoGetDecodedDataFunc get_decoded_data_func; MixVideoEncodeFunc encode_func; MixVideoFlushFunc flush_func; MixVideoEOSFunc eos_func; @@ -191,6 +197,9 @@ MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame); MIX_RESULT mix_video_render(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame); +MIX_RESULT mix_video_get_decoded_data(MixVideo * mix, MixIOVec * iovout, + MixVideoRenderParams * render_params, MixVideoFrame *frame); + MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); diff --git a/mix_video/src/mixvideocaps.c b/mix_video/src/mixvideocaps.c index 3a41c47..2c98ab3 100644 --- a/mix_video/src/mixvideocaps.c +++ b/mix_video/src/mixvideocaps.c @@ -179,8 +179,8 @@ mix_videocaps_equal (MixParams * first, MixParams * second) this_second = MIX_VIDEOCAPS (second); /* TODO: add comparison for other properties */ - if (g_strcmp0 (this_first->mix_caps, this_second->mix_caps) == 0 - && g_strcmp0 (this_first->video_hw_caps, + if (strcmp (this_first->mix_caps, this_second->mix_caps) == 0 + && strcmp (this_first->video_hw_caps, this_second->video_hw_caps) == 0) { // members within this scope equal. chaining up. -- cgit v1.2.3 From 22741fadfb8c0b46ac741785c00f107081668974 Mon Sep 17 00:00:00 2001 From: Ho-Eun Ryu Date: Wed, 10 Feb 2010 12:56:43 +0900 Subject: log: fix for log redirection to logcat you can see libmix log throw logcat by declaring MIX*_LOG_ENABLE true in Android.mk for audio MIXAUDIO_LOG_ENABLE := true for video MIXVIDEO_LOG_ENABLE := true for vbp MIXVBP_LOG_ENABLE := true --- mix_audio/src/Android.mk | 7 +++++++ mix_common/src/mixlog.c | 4 +++- mix_common/src/mixlog.h | 23 ++++++++++++++++++++++- mix_vbp/viddec_fw/fw/parser/Android.mk | 7 +++++++ mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 22 +++++++++++++++++++++- mix_video/src/Android.mk | 7 +++++++ 6 files changed, 67 insertions(+), 3 deletions(-) diff --git a/mix_audio/src/Android.mk b/mix_audio/src/Android.mk index c108526..466e3ce 100644 --- a/mix_audio/src/Android.mk +++ b/mix_audio/src/Android.mk @@ -1,6 +1,8 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) +#MIXAUDIO_LOG_ENABLE := true + LOCAL_SRC_FILES := \ mixaip.c \ mixacp.c \ @@ -25,6 +27,11 @@ LOCAL_SHARED_LIBRARIES := \ libgmodule-2.0 \ libmixcommon +ifeq ($(strip $(MIXAUDIO_LOG_ENABLE)),true) +LOCAL_CFLAGS += -DMIX_LOG_ENABLE +LOCAL_SHARED_LIBRARIES += liblog +endif + LOCAL_COPY_HEADERS_TO := libmixaudio LOCAL_COPY_HEADERS := \ diff --git a/mix_common/src/mixlog.c b/mix_common/src/mixlog.c index a9dd359..0d306e4 100644 --- a/mix_common/src/mixlog.c +++ b/mix_common/src/mixlog.c @@ -18,6 +18,8 @@ #define MIX_LOG_LEVEL "MIX_LOG_LEVEL" +#ifndef ANDROID + static GStaticMutex g_mutex = G_STATIC_MUTEX_INIT; #ifdef MIX_LOG_USE_HT @@ -254,4 +256,4 @@ exit: #endif /* MIX_LOG_USE_HT */ - +#endif /* !ANDROID */ diff --git a/mix_common/src/mixlog.h b/mix_common/src/mixlog.h index 2fe60fd..906e6c9 100644 --- a/mix_common/src/mixlog.h +++ b/mix_common/src/mixlog.h @@ -31,13 +31,34 @@ void mix_log_func(const gchar* comp, gint level, const gchar *file, #define MIX_LOG_LEVEL_INFO 3 #define MIX_LOG_LEVEL_VERBOSE 4 - /* MACROS for mixlog */ #ifdef MIX_LOG_ENABLE +#ifdef ANDROID + +#include + +#undef MIX_LOG_LEVEL_ERROR +#undef MIX_LOG_LEVEL_WARNING +#undef MIX_LOG_LEVEL_INFO +#undef MIX_LOG_LEVEL_VERBOSE + +#define MIX_LOG_LEVEL_ERROR ANDROID_LOG_ERROR +#define MIX_LOG_LEVEL_WARNING ANDROID_LOG_WARN +#define MIX_LOG_LEVEL_INFO ANDROID_LOG_INFO +#define MIX_LOG_LEVEL_VERBOSE ANDROID_LOG_VERBOSE + +#define mix_log(comp, level, format, ...) \ + __android_log_print(level, comp, "%s():%d: "format, \ + __FUNCTION__, __LINE__, ##__VA_ARGS__) + +#else + #define mix_log(comp, level, format, ...) \ mix_log_func(comp, level, __FILE__, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) +#endif /* ANDROID */ + #else #define mix_log(comp, level, format, ...) diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 1cdf50d..797c640 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -2,6 +2,8 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) +#MIXVBP_LOG_ENABLE := true + LOCAL_SRC_FILES := \ vbp_h264_parser.c \ vbp_vc1_parser.c \ @@ -51,4 +53,9 @@ LOCAL_SHARED_LIBRARIES := \ libgthread-2.0 \ libgmodule-2.0 +ifeq ($(strip $(MIXVBP_LOG_ENABLE)),true) +LOCAL_CFLAGS += -DVBP_TRACE +LOCAL_SHARED_LIBRARIES += liblog +endif + include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h index 9f2a21c..c532b67 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -17,6 +17,25 @@ #ifdef VBP_TRACE /* if VBP_TRACE is defined*/ +#ifdef ANDROID + +#include + +#define ETRACE(format, ...) \ + __android_log_print(ANDROID_LOG_ERROR, "mixvbp", "%s():%d: "format, \ + __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define WTRACE(format, ...) \ + __android_log_print(ANDROID_LOG_WARN, "mixvbp", "%s():%d: "format, \ + __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define ITRACE(format, ...) \ + __android_log_print(ANDROID_LOG_INFO, "mixvbp", "%s():%d: "format, \ + __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define VTRACE(format, ...) \ + __android_log_print(ANDROID_LOG_VERBOSE, "mixvbp", "%s():%d: "format, \ + __FUNCTION__, __LINE__, ##__VA_ARGS__) + +#else + #include #include @@ -31,6 +50,8 @@ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__) #define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__) +#endif /* ANDROID */ + #else /* if VBP_TRACE is not defined */ #define ETRACE(format, ...) @@ -38,7 +59,6 @@ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define ITRACE(format, ...) #define VTRACE(format, ...) - #endif /* VBP_TRACE*/ diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index 614ac4b..d60919c 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -1,6 +1,8 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) +#MIXVIDEO_LOG_ENABLE := true + LOCAL_SRC_FILES := \ mixbuffer.c \ mixbufferpool.c \ @@ -61,6 +63,11 @@ LOCAL_SHARED_LIBRARIES := \ libmixvbp \ libva +ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) +LOCAL_CFLAGS += -DMIX_LOG_ENABLE +LOCAL_SHARED_LIBRARIES += liblog +endif + LOCAL_COPY_HEADERS_TO := libmixvideo LOCAL_COPY_HEADERS := \ -- cgit v1.2.3 From 22a94266bf975836273acbfd496712ede09c01ed Mon Sep 17 00:00:00 2001 From: Ho-Eun Ryu Date: Wed, 10 Feb 2010 21:32:36 +0900 Subject: mixvbp: we don't use 4 bytes-sized NAL header anymore --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 7a056df..e82d7b5 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1441,12 +1441,12 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) { NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); + size_parsed += NAL_length_size; #else while (size_left > 0) { NAL_length = size_left; #endif - size_parsed += NAL_length_size; cxt->list.data[cxt->list.num_items].stpos = size_parsed; size_parsed += NAL_length; /* skip NAL bytes */ /* end position is exclusive */ -- cgit v1.2.3 From b25222d8338f314e0fb26b07e8ecb34d3aa24e54 Mon Sep 17 00:00:00 2001 From: Chang-Man Lee Date: Tue, 23 Feb 2010 17:29:55 +0900 Subject: mix_vbp: Added libmixvbp_mpeg4 --- mix_vbp/Android.mk | 1 + mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk | 37 ++++++++++++++++++++++ .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 1 + .../fw/codecs/mp4/parser/viddec_mp4_parse.c | 1 + .../fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 1 + .../mp4/parser/viddec_mp4_videoobjectlayer.c | 1 + .../mp4/parser/viddec_mp4_videoobjectplane.c | 1 + .../fw/codecs/mp4/parser/viddec_mp4_visualobject.c | 1 + .../viddec_fw/fw/codecs/mp4/parser/viddec_types.h | 7 ++++ 9 files changed, 51 insertions(+) create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk create mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index 9f286ec..7d5d2a8 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -5,3 +5,4 @@ VENDORS_INTEL_MRST_MIXVBP_ROOT := $(LOCAL_PATH) include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/parser/Android.mk +include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/parser/Android.mk diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk new file mode 100644 index 0000000..c539d61 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk @@ -0,0 +1,37 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + viddec_mp4_visualobject.c \ + viddec_mp4_decodevideoobjectplane.c \ + viddec_mp4_parse.c \ + viddec_fw_mp4_workload.c \ + viddec_mp4_videoobjectplane.c \ + viddec_parse_sc_mp4.c \ + viddec_mp4_shortheader.c \ + viddec_mp4_videoobjectlayer.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES := \ + $(GLIB_TOP) \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/glib \ + $(GLIB_TOP)/gobject \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/include \ + $(TARGET_OUT_HEADERS)/libmixcommon + +LOCAL_MODULE := libmixvbp_mpeg4 + +LOCAL_SHARED_LIBRARIES := \ + libglib-2.0 \ + libgobject-2.0 \ + libgthread-2.0 \ + libgmodule-2.0 \ + libmixvbp + +include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c index f595c91..5632728 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -2,6 +2,7 @@ #include "viddec_parser_ops.h" #include "viddec_fw_mp4.h" #include "viddec_mp4_parse.h" +#include "viddec_types.h" uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser) { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c index b4cc302..17db475 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c @@ -6,6 +6,7 @@ #include "viddec_mp4_videoobjectlayer.h" #include "viddec_mp4_videoobjectplane.h" #include "viddec_mp4_visualobject.h" +#include "viddec_types.h" extern uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state); diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index a3d894d..7e17984 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -1,4 +1,5 @@ #include "viddec_mp4_shortheader.h" +#include "viddec_types.h" typedef struct { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index 6df06b6..9145342 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -1,4 +1,5 @@ #include "viddec_mp4_videoobjectlayer.h" +#include "viddec_types.h" const unsigned char mp4_DefaultIntraQuantMatrix[64] = { 8, 17, 18, 19, 21, 23, 25, 27, diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c index 9840af4..cf761e3 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c @@ -1,4 +1,5 @@ #include "viddec_mp4_videoobjectplane.h" +#include "viddec_types.h" mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c index 36c0b29..49e7887 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c @@ -1,4 +1,5 @@ #include "viddec_mp4_visualobject.h" +#include "viddec_types.h" static inline uint8_t mp4_pvt_isValid_verID(uint8_t id) { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h new file mode 100644 index 0000000..98d4ce8 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h @@ -0,0 +1,7 @@ +#ifndef __VIDDEC_TYPES_H__ +#define __VIDDEC_TYPES_H__ + +#define true 1 +#define false 0 + +#endif //__VIDDEC_TYPES_H__ -- cgit v1.2.3 From 8558cf880a31b4b8ee7c76dbd64822942f88cfec Mon Sep 17 00:00:00 2001 From: "Khanh V. Nguyen" Date: Tue, 20 Apr 2010 13:37:17 -0700 Subject: sync up with WRS 0329 build Change-Id: I8cb84e48333f621fedc81a855a615485febaf3a8 --- mix_audio/ChangeLog | 27 + mix_audio/configure.ac | 4 +- mix_audio/mixaudio.spec | 2 +- mix_audio/src/Android.mk | 2 +- mix_audio/src/Makefile.am | 2 +- mix_audio/src/amhelper.c | 12 +- mix_audio/src/amhelper.h | 6 +- mix_audio/src/mixacp.c | 34 ++ mix_audio/src/mixacpaac.c | 6 +- mix_audio/src/mixacpmp3.c | 5 +- mix_audio/src/mixacpwma.c | 5 +- mix_audio/src/mixaudio.c | 1181 +++++++++++++++++++++++++++++--------------- mix_audio/src/mixaudio.h | 23 +- mix_audio/src/sst_proxy.c | 6 +- mix_common/ChangeLog | 4 + mix_common/configure.ac | 2 +- mix_common/mixcommon.spec | 2 +- mix_common/src/mixlog.c | 1 + mix_common/src/mixlog.h | 1 + mix_common/src/mixresult.h | 4 + 20 files changed, 911 insertions(+), 418 deletions(-) diff --git a/mix_audio/ChangeLog b/mix_audio/ChangeLog index 3eb86f3..00fa650 100644 --- a/mix_audio/ChangeLog +++ b/mix_audio/ChangeLog @@ -1,3 +1,30 @@ +2010-01-31 Echo Choi + + * Reverted to use num_chan as output number of channel for AAC. + +2010-01-29 Echo Choi + + * Fixed Audio Manager setting. + * Updated version to 0.4.1 since API changes since 0.3.5. + +2010-01-25 Echo Choi + + * Updated MixCommon dependency to 0.1.8. + * Updated version to 0.3.6. + +2010-01-24 Echo Choi + + * Sync MixIOVec between capture and decode. + +2010-01-22 Echo Choi + + * Updated MixIOVec definition. + * Updated API sync with 0.79 doc. + +2010-01-20 Echo Choi + + * Updated API doc 0.79 sync up. + 2010-01-18 Echo Choi * Updated version to 0.3.5 and submit for build. diff --git a/mix_audio/configure.ac b/mix_audio/configure.ac index bcbb4ab..01c84a9 100644 --- a/mix_audio/configure.ac +++ b/mix_audio/configure.ac @@ -2,7 +2,7 @@ AC_INIT("","",[echo.choi@intel.com]) AC_CONFIG_MACRO_DIR(m4) -AS_MIX_VERSION(mixaudio, MIXAUDIO, 0, 3, 5) +AS_MIX_VERSION(mixaudio, MIXAUDIO, 0, 4, 1) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE @@ -88,7 +88,7 @@ if test "x$HAVE_GTRHEAD" = "xno"; then AC_MSG_ERROR(You need glib development packages installed !) fi -MIXCOMMON_REQUIRED=0.1.6 +MIXCOMMON_REQUIRED=0.1.8 PKG_CHECK_MODULES(MIXCOMMON, mixcommon >= $MIXCOMMON_REQUIRED , HAVE_MIXCOMMON=yes, HAVE_MIXCOMMON=no) if test "x$HAVE_MIXCOMMON" = "xno"; then AC_MSG_ERROR(You need mixcommon development package $MIXCOMMON_REQUIRED installed !) diff --git a/mix_audio/mixaudio.spec b/mix_audio/mixaudio.spec index e618d51..54e658b 100644 --- a/mix_audio/mixaudio.spec +++ b/mix_audio/mixaudio.spec @@ -6,7 +6,7 @@ Summary: MIX Audio Name: mixaudio -Version: 0.3.5 +Version: 0.4.1 Release: 1 Source0: %{name}-%{version}.tar.gz NoSource: 0 diff --git a/mix_audio/src/Android.mk b/mix_audio/src/Android.mk index 466e3ce..08f3566 100644 --- a/mix_audio/src/Android.mk +++ b/mix_audio/src/Android.mk @@ -44,8 +44,8 @@ LOCAL_COPY_HEADERS := \ mixaip.h \ mixaudio.h \ mixaudiotypes.h \ - pvt.h \ sst_proxy.h +# pvt.h \ LOCAL_MODULE := libmixaudio diff --git a/mix_audio/src/Makefile.am b/mix_audio/src/Makefile.am index b03751b..4a4cd36 100644 --- a/mix_audio/src/Makefile.am +++ b/mix_audio/src/Makefile.am @@ -42,7 +42,7 @@ endif #libmixaudio_stub_la_LIBTOOLFLAGS = $(libmixaudio_la_LIBTOOLFLAGS) # headers we need but don't want installed -noinst_HEADERS = intel_sst_ioctl.h sst_proxy.h pvt.h amhelper.h +noinst_HEADERS = intel_sst_ioctl.h sst_proxy.h amhelper.h # TODO: decide whehter a /usr/include/mix is needed for mix headers include_HEADERS = mixaudio.h \ diff --git a/mix_audio/src/amhelper.c b/mix_audio/src/amhelper.c index 501ece7..5124a6a 100644 --- a/mix_audio/src/amhelper.c +++ b/mix_audio/src/amhelper.c @@ -11,7 +11,7 @@ static gboolean am_enable=FALSE; * return -1 means failed * return 0 means succeeded * */ -gint dbus_init() { +gint32 dbus_init() { GError *error; const char *name = "org.moblin.audiomanager"; @@ -25,7 +25,7 @@ gint dbus_init() { else am_enable = FALSE; - if (am_enable) { + if (am_enable && (proxy_lpe == NULL)) { error = NULL; connection = dbus_g_bus_get(DBUS_BUS_SESSION, &error); @@ -94,25 +94,25 @@ gint32 lpe_stream_unregister(guint32 am_stream_id) return s_output; } -gint32 lpe_stream_notify_pause(guint32 stream_id) +gint32 lpe_stream_notify_pause(guint32 am_stream_id) { GError *error; gint32 s_output=0; if (am_enable) { - dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyPause", &error, G_TYPE_UINT, stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); + dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyPause", &error, G_TYPE_UINT, am_stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); } return s_output; } -gint32 lpe_stream_notify_resume(guint32 stream_id) +gint32 lpe_stream_notify_resume(guint32 am_stream_id) { GError *error; gint32 s_output=0; if (am_enable) { - dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyResume", &error, G_TYPE_UINT, stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); + dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyResume", &error, G_TYPE_UINT, am_stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); } return s_output; diff --git a/mix_audio/src/amhelper.h b/mix_audio/src/amhelper.h index 9ec115c..8a00681 100644 --- a/mix_audio/src/amhelper.h +++ b/mix_audio/src/amhelper.h @@ -12,14 +12,14 @@ #include #include -gint dbus_init(); +gint32 dbus_init(); gint32 lpe_stream_register(guint32 lpe_stream_id, char* media_role, char* lpe_stream_name, guint32 stream_type); gint32 lpe_stream_unregister(guint32 am_stream_id); -gint32 lpe_stream_notify_pause(guint32 stream_id); +gint32 lpe_stream_notify_pause(guint32 am_stream_id); -gint32 lpe_stream_notify_resume(guint32 stream_id); +gint32 lpe_stream_notify_resume(guint32 am_stream_id); #endif diff --git a/mix_audio/src/mixacp.c b/mix_audio/src/mixacp.c index d66ee3c..3478bf3 100644 --- a/mix_audio/src/mixacp.c +++ b/mix_audio/src/mixacp.c @@ -319,3 +319,37 @@ void mix_acp_print_params(MixAudioConfigParams *obj) } } + +MIX_RESULT mix_acp_set_audio_manager(MixAudioConfigParams *obj, MixAudioManager am) +{ + if (obj == NULL) return MIX_RESULT_NULL_PTR; + + if (!G_UNLIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) + { + return MIX_RESULT_INVALID_PARAM; + } + + if ((am >= MIX_AUDIOMANAGER_LAST) || (am audio_manager = am; + } + + return MIX_RESULT_SUCCESS; + +} + +MixAudioManager mix_acp_get_audio_manager(MixAudioConfigParams *obj) +{ + if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) + { + return obj->audio_manager; + } + else + return MIX_AUDIOMANAGER_NONE; +} + + diff --git a/mix_audio/src/mixacpaac.c b/mix_audio/src/mixacpaac.c index 4f83eb9..4b47c3d 100644 --- a/mix_audio/src/mixacpaac.c +++ b/mix_audio/src/mixacpaac.c @@ -178,6 +178,10 @@ gboolean mix_acp_aac_equal(MixParams* first, MixParams *second) return FALSE; } + // If either one of the input is not the same class then forget it. + if (!MIX_IS_AUDIOCONFIGPARAMSAAC(first) || !MIX_IS_AUDIOCONFIGPARAMSAAC(second)) + return FALSE; + // members within this scope equal. chaining up. MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); if (klass->equal) @@ -185,7 +189,7 @@ gboolean mix_acp_aac_equal(MixParams* first, MixParams *second) else ret = TRUE; - if (ret && MIX_IS_AUDIOCONFIGPARAMSAAC(first) && MIX_IS_AUDIOCONFIGPARAMSAAC(second)) + if (ret) { MixAudioConfigParamsAAC *acp1 = MIX_AUDIOCONFIGPARAMSAAC(first); diff --git a/mix_audio/src/mixacpmp3.c b/mix_audio/src/mixacpmp3.c index 75ab8cb..5514a24 100644 --- a/mix_audio/src/mixacpmp3.c +++ b/mix_audio/src/mixacpmp3.c @@ -152,6 +152,9 @@ gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second) return FALSE; } + if (!MIX_IS_AUDIOCONFIGPARAMSMP3(first) || !MIX_IS_AUDIOCONFIGPARAMSMP3(second)) + return FALSE; + // members within this scope equal. chaining up. MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); if (klass->equal) @@ -159,7 +162,7 @@ gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second) else ret = TRUE; - if (ret && MIX_IS_AUDIOCONFIGPARAMSMP3(first) && MIX_IS_AUDIOCONFIGPARAMSMP3(second)) + if (ret) { MixAudioConfigParamsMP3 *acp1 = MIX_AUDIOCONFIGPARAMSMP3(first); MixAudioConfigParamsMP3 *acp2 = MIX_AUDIOCONFIGPARAMSMP3(second); diff --git a/mix_audio/src/mixacpwma.c b/mix_audio/src/mixacpwma.c index cf2590f..9e0db82 100644 --- a/mix_audio/src/mixacpwma.c +++ b/mix_audio/src/mixacpwma.c @@ -159,13 +159,16 @@ gboolean mix_acp_wma_equal(MixParams* first, MixParams *second) return FALSE; } + if (!MIX_IS_AUDIOCONFIGPARAMSWMA(first) || !MIX_IS_AUDIOCONFIGPARAMSWMA(second)) + return FALSE; + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); if (klass->equal) ret = klass->equal(first, second); else ret = TRUE; - if (ret && MIX_IS_AUDIOCONFIGPARAMSWMA(first) && MIX_IS_AUDIOCONFIGPARAMSWMA(second)) + if (ret) { MixAudioConfigParamsWMA *acp1 = MIX_AUDIOCONFIGPARAMSWMA(first); MixAudioConfigParamsWMA *acp2 = MIX_AUDIOCONFIGPARAMSWMA(second); diff --git a/mix_audio/src/mixaudio.c b/mix_audio/src/mixaudio.c index 196a0b0..53d61a5 100644 --- a/mix_audio/src/mixaudio.c +++ b/mix_audio/src/mixaudio.c @@ -59,6 +59,7 @@ #include #include #include "mixaudio.h" +#include "mixacpaac.h" #ifdef AUDIO_MANAGER #include "amhelper.h" @@ -105,7 +106,7 @@ typedef enum { MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); -MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); +MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); MIX_RESULT mix_audio_start_default(MixAudio *mix); MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix); @@ -122,13 +123,13 @@ MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix); MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState); MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state); -MIX_RESULT mix_audio_is_am_available_default(MixAudio *mix, MixAudioManager am, gboolean *avail); -MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams); +MIX_RESULT mix_audio_get_config_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams); static gboolean g_IAM_available = FALSE; MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audioconfigparams); MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfigparams); MIX_RESULT mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams); +static MIX_RESULT mix_audio_verify_params(MixAudio *mix, const MixAudioConfigParams *audioconfigparams); static void mix_audio_finalize(GObject *obj); G_DEFINE_TYPE (MixAudio, mix_audio, G_TYPE_OBJECT); @@ -139,8 +140,9 @@ static struct snd_sst_fw_info cur_FW_INFO = {{0}}; static MIX_RESULT mix_audio_FW_INFO(MixAudio *mix); static MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params); static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize); -static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); +static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); static void mix_audio_debug_dump(MixAudio *mix); +static MIX_RESULT mix_audio_is_stream_changed(MixAudio *mix, MixAudioConfigParams *new_params, gboolean *pChanged); static guint g_log_handler=0; static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, const gchar *message, gpointer user_data); @@ -155,7 +157,6 @@ void mix_acp_print_params(MixAudioConfigParams *obj); static void mix_audio_init (MixAudio *self) { - self->useIAM = FALSE; self->streamID = 0; // TODO: Find out the invalid value for stream ID when integrates with IAM. self->amStreamID = 0; // TODO: as above self->streamState = MIX_STREAM_NULL; @@ -164,6 +165,7 @@ static void mix_audio_init (MixAudio *self) self->state = MIX_STATE_UNINITIALIZED; self->codecMode = MIX_CODING_INVALID; self->am_registered = FALSE; + self->stream_muted = FALSE; /* private member initialization */ g_static_rec_mutex_init (&self->streamlock); @@ -216,8 +218,7 @@ static void mix_audio_class_init (MixAudioClass *klass) klass->deinitialize = mix_audio_deinitialize_default; klass->get_stream_state = mix_audio_get_stream_state_default; klass->get_state = mix_audio_get_state_default; - klass->is_am_available = mix_audio_is_am_available_default; - klass->get_output_configuration = mix_audio_get_output_configuration_default; + klass->get_config = mix_audio_get_config_default; // Set log handler... if (!g_log_handler) @@ -268,7 +269,7 @@ static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, con // convert bit mask back to index. index = ffs(mask) - 1; - + if ((index<0) || (index >= (sizeof(lognames)/sizeof(lognames[0])))) return; g_get_current_time(&t); @@ -323,88 +324,80 @@ MixAudio *mix_audio_ref(MixAudio *mix) MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams) { - MIX_RESULT ret = MIX_RESULT_FAIL; + // API version 0.79. 1/19/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - // TODO: parse and process MixAudioInitParams. It is ignored for now. + if ((mode <= MIX_CODING_INVALID) || (mode >= MIX_CODING_LAST)) return MIX_RESULT_INVALID_PARAM; - // initialized must be called with both thread-lock held, so no other operation is allowed. - - // try lock stream thread. If failed, a pending _decode/_encode/_drain is ongoing. - if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; + // initialized must be called with both thread-lock held, so no other operation is allowed. + + // try lock stream thread. If failed, a pending _decode/_encode/_drain is ongoing. + if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; - // also lock the control thread lock. - _LOCK(&mix->controllock); + // also lock the control thread lock. + _LOCK(&mix->controllock); - if (mix->state == MIX_STATE_UNINITIALIZED) - { - // Only allowed in uninitialized state. - switch (mode) + if (mix->state != MIX_STATE_UNINITIALIZED) + ret = MIX_RESULT_ALREADY_INIT; + + if (!MIX_SUCCEEDED(ret)) { - case MIX_CODING_DECODE: - case MIX_CODING_ENCODE: - { + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); + return ret; + } + // Open device. Same flags to open for decode and encode? #ifdef LPESTUB - //g_debug("Reading env var LPESTUB_FILE for data output file.\n"); - //const char* filename = g_getenv("LPESTUB_FILE"); - gchar *filename = NULL; - GError *err = NULL; - const gchar* fn = NULL; - fn = g_getenv("MIX_AUDIO_OUTPUT"); - if (fn) - mix->fileDescriptor = open(fn, O_RDWR|O_CREAT, S_IRUSR|S_IWUSR); - - if (mix->fileDescriptor == -1) - { - mix->fileDescriptor = g_file_open_tmp ("mixaudio.XXXXXX", &filename, &err); - + { + gchar *filename = NULL; + GError *err = NULL; + const gchar* fn = NULL; + fn = g_getenv("MIX_AUDIO_OUTPUT"); + if (fn) mix->fileDescriptor = open(fn, O_RDWR|O_CREAT, S_IRUSR|S_IWUSR); + if (mix->fileDescriptor == -1) + { + mix->fileDescriptor = g_file_open_tmp ("mixaudio.XXXXXX", &filename, &err); if (err) { - g_warning("Oops, cannot open temp file: Error message: %s", err->message); + g_warning("Oops, cannot open temp file: Error message: %s", err->message); } else { - g_debug("Opening %s as output data file.\n", filename); + g_debug("Opening %s instead of %s as output data file.\n", filename, LPE_DEVICE); } - } - else - { + } + else + { g_debug("Opening %s as output data file.\n", fn); - } - if (filename) g_free(filename); + } + if (filename) g_free(filename); + } #else - g_debug("Opening %s\n", LPE_DEVICE); - mix->fileDescriptor = open(LPE_DEVICE, O_RDWR); + g_debug("Calling open(%s)\n", LPE_DEVICE); + mix->fileDescriptor = open(LPE_DEVICE, O_RDWR); + g_debug("open returned %d", mix->fileDescriptor); #endif - if (mix->fileDescriptor != -1) - { - mix->codecMode = mode; - mix->state = MIX_STATE_INITIALIZED; - ret = MIX_RESULT_SUCCESS; - g_debug("open() succeeded. fd=%d", mix->fileDescriptor); - } - else - { - ret = MIX_RESULT_LPE_NOTAVAIL; - } - } - break; - default: - ret = MIX_RESULT_INVALID_PARAM; - break; + + if (mix->fileDescriptor != -1) + { + mix->codecMode = mode; + mix->state = MIX_STATE_INITIALIZED; + ret = MIX_RESULT_SUCCESS; + mix->deviceState = MIX_AUDIO_DEV_OPENED; + } + else + { + ret = MIX_RESULT_LPE_NOTAVAIL; + g_warning("open() failed. Error(0x%08x): %s", errno, strerror(errno)); } - } - else - { - ret = MIX_RESULT_WRONG_STATE; - } - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); - return ret; + return ret; } gboolean mix_audio_am_is_available(void) @@ -538,18 +531,18 @@ MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params) MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - if (state) + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (G_UNLIKELY(mix == NULL)) return MIX_RESULT_NULL_PTR; + if (state == NULL) return MIX_RESULT_NULL_PTR; + *state = mix->state; - else - ret = MIX_RESULT_NULL_PTR; - return ret; + return ret; } -MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize) +MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt) { MIX_RESULT ret = MIX_RESULT_FAIL; @@ -562,7 +555,7 @@ MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint i if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DIRECTRENDER) ret = mix_audio_SST_writev(mix, iovin, iovincnt, insize); else - ret = mix_audio_SST_STREAM_DECODE(mix, iovin, iovincnt, insize, iovout, iovoutcnt, outsize); + ret = mix_audio_SST_STREAM_DECODE(mix, iovin, iovincnt, insize, iovout, iovoutcnt); _UNLOCK(&mix->streamlock); @@ -599,6 +592,7 @@ MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix) mix->deviceState = MIX_AUDIO_DEV_CLOSED; } mix->state = MIX_STATE_UNINITIALIZED; + mix->stream_muted = FALSE; } mix->bytes_written = 0; @@ -612,21 +606,29 @@ MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix) MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix) { - MIX_RESULT ret = MIX_RESULT_FAIL; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int retVal = 0; - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - _LOCK(&mix->controllock); + _LOCK(&mix->controllock); - if (mix->state != MIX_STATE_CONFIGURED) - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + // ret should be SUCCESS when this sequence starts... + if (mix->state != MIX_STATE_CONFIGURED) + { + // Not allowing control operation if it is not configured. + ret = MIX_RESULT_NOT_CONFIGURED; + } + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + { + // Not allowing control operation if it is configured as DNR. + ret = MIX_RESULT_WRONGMODE; + } - // Will call DROP even if we are already stopped. It is needed to unblock any pending write() call. -// if (mix->streamState == MIX_STREAM_DRAINING) -// ret = MIX_RESULT_WRONG_STATE; -// else - { - int retVal = 0; + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); + + // Will call DROP even if we are already stopped. It is needed to unblock any pending readv()/write() call that is made after the last STOP_x and before the next START. #ifdef LPESTUB // Not calling ioctl. #else @@ -635,124 +637,146 @@ MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix) g_debug("_DROP returned %d", retVal); #endif - if (!retVal) - { + if (retVal != 0) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_warning("Failed to stop stream. Error(0x%08x): %s", errno, strerror(errno)); + } + else + { mix->streamState = MIX_STREAM_STOPPED; ret = MIX_RESULT_SUCCESS; - } - else - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("Failed to stop stream. Error:0x%08x. Unknown stream state.", errno); - } - } + } - _UNLOCK(&mix->controllock); + _UNLOCK(&mix->controllock); - return ret; + return ret; } MIX_RESULT mix_audio_stop_drain_default(MixAudio *mix) { - MIX_RESULT ret = MIX_RESULT_FAIL; - int retVal = 0; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int retVal = 0; + gboolean doDrain = FALSE; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + // No need to lock to check vars that won't be changed in this function - // No need to lock to check vars that won't be changed in this function + // If cannot obtain stream lock meaning there's a pending _decode/_encode and will not proceed. + if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; - if (g_static_rec_mutex_trylock(&mix->streamlock)) - { - gboolean doDrain = FALSE; + _LOCK(&mix->controllock); - if (mix->state != MIX_STATE_CONFIGURED) - _UNLOCK_RETURN(&mix->streamlock, MIX_RESULT_NOT_CONFIGURED); + // Check unallowed condition - _LOCK(&mix->controllock); + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + { + // Not allowing control operation if it is configure for decode. + ret = MIX_RESULT_WRONGMODE; + } + else if (mix->state != MIX_STATE_CONFIGURED) { - if (mix->streamState == MIX_STREAM_STOPPED) + // Not allowing control operation if it is not configured. + ret = MIX_RESULT_NOT_CONFIGURED; + } + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + { + // Not allowing control operation if it is configured as DNR. + ret = MIX_RESULT_WRONGMODE; + } + + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) + { + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); + return ret; + } + + if (mix->streamState == MIX_STREAM_STOPPED) + // no need to drain and we already stopped. ret = MIX_RESULT_SUCCESS; - else if ((mix->streamState == MIX_STREAM_DRAINING) || mix->streamState == MIX_STREAM_PAUSED_DRAINING) + else if ((mix->streamState == MIX_STREAM_DRAINING) || mix->streamState == MIX_STREAM_PAUSED_DRAINING) + // Not allowed if we are already draining or PAUSED in draining state. ret = MIX_RESULT_WRONG_STATE; - else - { + else + { doDrain = TRUE; - g_debug("MIX stream is DRAINING"); + g_debug("MIX stream needs DRAINING"); mix->streamState = MIX_STREAM_DRAINING; - } + // Set state to MIX_STREAM_DRAINING and other operations that may change teh streamState has to + // be careful when handling stream with this state. } + + // release the control lock. we only hold the stream lock during this blocking DRAIN call. _UNLOCK(&mix->controllock); if (doDrain) { - // Calling the blocking DRAIN without holding the controllock - // TODO: remove this ifdef when API becomes available. - #ifdef LPESTUB + // Calling the blocking DRAIN holding just the stream lock, without the control lock + +#ifdef LPESTUB - #else - //g_debug("Calling SNDRV_SST_STREAM_DRAIN. fd=0x%08x", mix->fileDescriptor); - //retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DRAIN); -// g_warning("Calling SNDRV_SST_STREAM_DROP instead of SNDRV_SST_STREAM_DRAIN here since DRAIN is not yet integrated. There may be data loss. fd=%d", mix->fileDescriptor); +#else g_debug("Calling SNDRV_SST_STREAM_DRAIN fd=%d", mix->fileDescriptor); retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DRAIN); g_debug("_DRAIN returned %d", retVal); - #endif +#endif - if (retVal) - { - _LOCK(&mix->controllock); - if (mix->streamState != MIX_STREAM_STOPPED) - { - // DRAIN could return failed if DROP is called during DRAIN. - // Any state resulting as a failed DRAIN would be error, execpt STOPPED. - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("Failed to drain stream. Error:0x%08x. Unknown stream state.", errno); - } - _UNLOCK(&mix->controllock); - } - else - { + // obtain control lock and update state. _LOCK(&mix->controllock); - if ((mix->streamState != MIX_STREAM_DRAINING) && - (mix->streamState != MIX_STREAM_STOPPED)) + + if (retVal != 0) { - // State is changed while in DRAINING. This should not be allowed and is a bug. - g_warning("MIX Internal state error! DRAIN state(%u) changed!",mix->streamState); - ret = MIX_RESULT_FAIL; + if (mix->streamState != MIX_STREAM_STOPPED) + { + // DRAIN could return failed if DROP is called during DRAIN. + // Any state resulting as a failed DRAIN would be error, except STOPPED. + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("Failed to drain stream. Error(0x%08x): %s", errno, strerror(errno)); + } } else { - mix->streamState = MIX_STREAM_STOPPED; - ret = MIX_RESULT_SUCCESS; + if ((mix->streamState != MIX_STREAM_DRAINING) && + (mix->streamState != MIX_STREAM_STOPPED)) + { + // State is changed while in DRAINING. This should not be allowed and is a bug. + g_warning("MIX Internal state error! DRAIN state(%u) changed!",mix->streamState); + ret = MIX_RESULT_FAIL; + } + else + { + mix->streamState = MIX_STREAM_STOPPED; + ret = MIX_RESULT_SUCCESS; + } } + _UNLOCK(&mix->controllock); - } } _UNLOCK(&mix->streamlock); - } - else - { - // Cannot obtain stream lock meaning there's a pending _decode/_encode. - // Will not proceed. - ret = MIX_RESULT_WRONG_STATE; - } return ret; } MIX_RESULT mix_audio_start_default(MixAudio *mix) { + // API version 0.79. 1/22/2009 MIX_RESULT ret = MIX_RESULT_FAIL; - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + if (G_UNLIKELY(mix==NULL)) return MIX_RESULT_NULL_PTR; _LOCK(&mix->controllock); if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + // Not allowed if in DNR mode. if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONGMODE); @@ -774,7 +798,8 @@ MIX_RESULT mix_audio_start_default(MixAudio *mix) retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_START); g_debug("_START returned %d", retVal); #endif - if (retVal) + + if (retVal != 0) { ret = MIX_RESULT_SYSTEM_ERRNO; g_debug("Fail to START. Error:0x%08x. Stream state unchanged.", errno); @@ -823,15 +848,31 @@ MIX_RESULT mix_audio_get_version(guint* major, guint *minor) MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; + // API version 0.79. 1/19/2009 + + MIX_RESULT ret = MIX_RESULT_FAIL; + gboolean changed = FALSE; + MixParams *new_params = NULL; if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - // param checks + g_message("_configure() starts."); + + // input param checks if (!MIX_IS_AUDIOCONFIGPARAMS(audioconfigparams)) return MIX_RESULT_NOT_ACP; if (MIX_ACP_DECODEMODE(audioconfigparams) >= MIX_DECODE_LAST) return MIX_RESULT_INVALID_DECODE_MODE; if (!mix_acp_is_streamname_valid(audioconfigparams)) return MIX_RESULT_INVALID_STREAM_NAME; + // dup a copy. we will need it when configure is successful. + // don't want to find out we can't dup it only after everything is set. + new_params = mix_params_dup(MIX_PARAMS(audioconfigparams)); + if (!MIX_IS_AUDIOCONFIGPARAMS(new_params)) + { + if (MIX_IS_PARAMS(new_params)) mix_params_unref(new_params); + g_error("Failed to duplicate input MixAudioConfigParams!"); + return MIX_RESULT_FAIL; + } + // If we cannot lock stream thread, data is flowing and we can't configure. if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; @@ -839,23 +880,20 @@ MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audi // Check all unallowed conditions if (mix->state == MIX_STATE_UNINITIALIZED) + { ret = MIX_RESULT_NOT_INIT; // Will not allowed if the state is still UNINITIALIZED - else if ((mix->codecMode != MIX_CODING_DECODE) && (mix->codecMode != MIX_CODING_ENCODE)) - ret = MIX_RESULT_WRONGMODE; // This configure is allowed only in DECODE mode. - else if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL)) - ret = MIX_RESULT_WRONG_STATE; + goto _configure_done; + } - if (!MIX_SUCCEEDED(ret)) + if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL)) { - // Some check failed. Unlock and return. - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); - return ret; + ret = MIX_RESULT_WRONG_STATE; + goto _configure_done; } - if (audioconfigparams->audio_manager == MIX_AUDIOMANAGER_INTELAUDIOMANAGER) { - mix->useIAM = TRUE; - } + ret = mix_audio_verify_params(mix, audioconfigparams); + if (!MIX_SUCCEEDED(ret)) goto _configure_done; + // now configure stream. #ifdef AUDIO_MANAGER @@ -863,102 +901,131 @@ MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audi #else ret = MIX_RESULT_SUCCESS; #endif + if (!MIX_SUCCEEDED(ret)) goto _configure_failed; - if (MIX_SUCCEEDED(ret)) + ret = mix_audio_is_stream_changed(mix, audioconfigparams, &changed); + if (!MIX_SUCCEEDED(ret)) goto _configure_failed; + + if (changed) { ret = mix_audio_SST_SET_PARAMS(mix, audioconfigparams); + if (!MIX_SUCCEEDED(ret)) goto _configure_failed; } - - if (MIX_SUCCEEDED(ret)) + else { + g_message("No stream change is detected. Not calling SET_PARAMS."); + } + #ifdef AUDIO_MANAGER - ret = mix_audio_am_register(mix, audioconfigparams); + ret = mix_audio_am_register(mix, audioconfigparams); #else ret = MIX_RESULT_SUCCESS; #endif - } + if (!MIX_SUCCEEDED(ret)) goto _configure_failed; - if (MIX_SUCCEEDED(ret)) - { - mix->state = MIX_STATE_CONFIGURED; - } - else - { - mix->state = MIX_STATE_INITIALIZED; - } + ret = MIX_RESULT_SUCCESS; - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); +_configure_failed: - return ret; + if (mix->audioconfigparams) + { + mix_acp_unref(mix->audioconfigparams); + mix->audioconfigparams=NULL; + } + + if (MIX_SUCCEEDED(ret)) + { + mix->state = MIX_STATE_CONFIGURED; + mix->audioconfigparams = MIX_AUDIOCONFIGPARAMS(new_params); + new_params = NULL; + } + else + { + // attempt to configure has failed. Revert state back to INITIALIZED only. + mix->state = MIX_STATE_INITIALIZED; + mix->streamState = MIX_STREAM_STOPPED; + // No need to un-set SST PARAM. not supported really. + } + +_configure_done: + + _UNLOCK(&mix->controllock); + _UNLOCK(&mix->streamlock); + + if (new_params) + { + mix_params_unref(new_params); + } + + g_message("_configure() done returning 0x%08x.", ret); + + return ret; } MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + unsigned long long ts = 0; + int retVal = 0; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + if (!msecs) return MIX_RESULT_NULL_PTR; - if (!msecs) return MIX_RESULT_NULL_PTR; + _LOCK(&mix->controllock); - _LOCK(&mix->controllock); + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; - if (mix->state == MIX_STATE_CONFIGURED) - { - if ((mix->codecMode == MIX_CODING_DECODE) && (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN)) - { - ret = MIX_RESULT_WRONGMODE; - } - else { + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - unsigned long long ts = 0; - int retVal = 0; #ifdef LPESTUB - // For stubbing, just get system clock. - if (MIX_ACP_BITRATE(mix->audioconfigparams) > 0) - { + // For stubbing, just get system clock. + if (MIX_ACP_BITRATE(mix->audioconfigparams) > 0) + { // use bytes_written and bitrate // to get times in msec. ts = mix->bytes_written * 8000 / MIX_ACP_BITRATE(mix->audioconfigparams); - } - else if (mix->ts_last) - { + } + else if (mix->ts_last) + { GTimeVal tval = {0}; g_get_current_time(&tval); ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000; ts -= mix->ts_last; ts += mix->ts_elapsed; - } - else - { + } + else + { ts = 0; - } + } #else - g_debug("Calling SNDRV_SST_STREAM_GET_TSTAMP. fd=%d", mix->fileDescriptor); - ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_TSTAMP, &ts); + g_debug("Calling _GET_TSTAMP. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_TSTAMP, &ts); + g_debug("_GET_TSTAMP returned %d. timestamp=%" G_GUINT64_FORMAT, retVal, ts); #endif - if (retVal) - { + if (retVal != 0) + { ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_GET_TSTAMP failed. Error:0x%08x", errno); - //ret = MIX_RESULT_FAIL; + g_debug("_GET_TSTAMP failed. Error(0x%08x): %s", errno, strerror(errno)); mix_audio_debug_dump(mix); - } - else - { + } + else + { *msecs = ts; - g_debug("_GET_TSTAMP returned %" G_GUINT64_FORMAT, ts); - } } - } - else - ret = MIX_RESULT_NOT_CONFIGURED; - _UNLOCK(&mix->controllock); + _UNLOCK(&mix->controllock); - return ret; + return ret; } #ifdef AUDIO_MANAGER @@ -986,8 +1053,9 @@ MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audiocon // decode mode change. if (mix->amStreamID > 0) { if (lpe_stream_unregister(mix->amStreamID) != 0) { - return MIX_RESULT_FAIL; + return MIX_RESULT_AM_UNREGISTER_FAIL; } + mix->amStreamID = 0; mix->am_registered = FALSE; } } @@ -1016,15 +1084,19 @@ MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfi return MIX_RESULT_FAIL; // if AM is enable, and not_registered, then register - if (mix->useIAM && !mix->am_registered) { + if ((audioconfigparams->audio_manager == MIX_AUDIOMANAGER_INTELAUDIOMANAGER) && !mix->am_registered) { +// if (!mix->am_registered) { // TODO: remove this and uncomment line above + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "audio_manager=MIX_AUDIOMANAGER_INTELAUDIOMANAGER and !am_registered\n"); gint32 amStreamID = lpe_stream_register(mix->streamID, "music", audioconfigparams->stream_name, codec_mode); if (amStreamID == -1){ mix->amStreamID = 0; - return MIX_RESULT_FAIL; + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "AM register failed: general error\n"); + return MIX_RESULT_AM_REGISTER_FAIL; } - else if (amStreamID == -2) { // -2: Direct render not avail, see AM spec + else if (amStreamID == -2) { // -2: Direct render not avail, see AM spec mix->amStreamID = 0; + mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "AM register failed: Direct render not available\n"); return MIX_RESULT_DIRECT_NOTAVAIL; } mix->am_registered = TRUE; @@ -1058,33 +1130,41 @@ MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gin for (i=0; i < iovoutcnt; i++) { vec[i].iov_base = iovout[i].data; - vec[i].iov_len = iovout[i].size; + vec[i].iov_len = iovout[i].buffer_size; + iovout[i].data_size = 0; } mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "begin readv()\n"); bytes_read = readv(mix->fileDescriptor, vec, iovoutcnt); mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "end readv(), return: %d\n", bytes_read); - if (bytes_read < 0) { // TODO: should not be 0, but driver return 0 right now + if (bytes_read < 1) { mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_ERROR, "return: %d\n", bytes_read); return MIX_RESULT_FAIL; } -/* - gint bytes_count=0; - for (i=0; i < iovoutcnt; i++) + + i=0; + while (bytes_read > 0) { - bytes_count += iovout[i].size; + if (bytes_read > iovout[i].buffer_size) { + iovout[i].data_size = iovout[i].buffer_size; + bytes_read = bytes_read - iovout[i].buffer_size; + } + else { + iovout[i].data_size = bytes_read; + bytes_read = 0; + } + i++; } - iovout[i].size = bytes_read - bytes_count; -*/ + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol) { - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - + // API version 0.79. 1/22/2009 MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; if (!maxvol) return MIX_RESULT_NULL_PTR; _LOCK(&mix->controllock); @@ -1106,11 +1186,11 @@ MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol) MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - +{ + // API version 0.79. 1/22/2009 MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; if (!minvol) return MIX_RESULT_NULL_PTR; _LOCK(&mix->controllock); @@ -1132,57 +1212,73 @@ MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol) MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState) { - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (!streamState) return MIX_RESULT_NULL_PTR; + if (G_UNLIKELY(mix==NULL)) return MIX_RESULT_NULL_PTR; + if (streamState == NULL) return MIX_RESULT_NULL_PTR; - _LOCK(&mix->controllock); + _LOCK(&mix->controllock); - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; - // PAUSED_DRAINING is internal state. - if (mix->streamState == MIX_STREAM_PAUSED_DRAINING) - *streamState = MIX_STREAM_PAUSED; - else - *streamState = mix->streamState; + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - _UNLOCK(&mix->controllock); + // PAUSED_DRAINING is internal state. + if (mix->streamState == MIX_STREAM_PAUSED_DRAINING) + *streamState = MIX_STREAM_PAUSED; + else + *streamState = mix->streamState; - return MIX_RESULT_SUCCESS; + _UNLOCK(&mix->controllock); + + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType type) { - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - struct snd_sst_vol vol = {0}; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + struct snd_sst_vol vol = {0}; + int retVal = 0; + + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + if (!currvol) return MIX_RESULT_NULL_PTR; + if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; - if (!currvol) return MIX_RESULT_NULL_PTR; - if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; + _LOCK(&mix->controllock); - _LOCK(&mix->controllock); + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - vol.stream_id = mix->streamID; + vol.stream_id = mix->streamID; - int retVal = 0; #ifdef LPESTUB // Not calling. #else - g_debug("Calling SNDRV_SST_GET_VOL. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_GET_VOL, &vol); - g_debug("SNDRV_SST_GET_VOL returned %d. vol=%d", retVal, vol.volume); + g_debug("Calling _GET_VOL. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_GET_VOL, &vol); + g_debug("_GET_VOL returned %d. vol=%d", retVal, vol.volume); #endif - if (retVal) + if (retVal != 0) { ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_GET_VOL failed. Error:0x%08x", errno); + g_debug("_GET_VOL failed. Error(0x%08x): %s", errno, strerror(errno)); mix_audio_debug_dump(mix); } else @@ -1206,42 +1302,77 @@ MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType MIX_RESULT mix_audio_get_mute_default(MixAudio *mix, gboolean* muted) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - return ret; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + if (muted == NULL) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; + + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); + + *muted = mix->stream_muted; + + _UNLOCK(&mix->controllock); + + return ret; } MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute) { - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int retVal = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - struct snd_sst_mute m = { 0 }; + struct snd_sst_mute m = { 0 }; - if (mute) m.mute = 1; - else m.mute = 0; + if (mute) m.mute = 1; + else m.mute = 0; - _LOCK(&mix->controllock); + _LOCK(&mix->controllock); - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; - m.stream_id = mix->streamID; + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - int retVal = 0; + m.stream_id = mix->streamID; #ifdef LPESTUB // Not calling. #else - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_MUTE, &m); + g_debug("Calling _MUTE. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_MUTE, &m); + g_debug("_MUTE returned %d", retVal); #endif - if (retVal) - { - //ret = MIX_RESULT_FAIL; - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_MUTE failed. Error:0x%08x", errno); - mix_audio_debug_dump(mix); - } + if (retVal != 0) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_MUTE failed. Error(0x%08x): %s", errno, strerror(errno)); + mix_audio_debug_dump(mix); + } + else + { + mix->stream_muted = mute; + } _UNLOCK(&mix->controllock); @@ -1250,38 +1381,51 @@ MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute) MIX_RESULT mix_audio_pause_default(MixAudio *mix) { - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MIX_RESULT ret = MIX_RESULT_SUCCESS; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int retVal = 0; - _LOCK(&mix->controllock); + if (G_UNLIKELY(mix==NULL)) return MIX_RESULT_NULL_PTR; - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + _LOCK(&mix->controllock); - if (mix->streamState == MIX_STREAM_PAUSED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; + else if ((mix->streamState != MIX_STREAM_PLAYING) && (mix->streamState != MIX_STREAM_DRAINING)) + ret = MIX_RESULT_WRONG_STATE; - if ((mix->streamState != MIX_STREAM_PLAYING) && (mix->streamState != MIX_STREAM_DRAINING)) - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONG_STATE); + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - int retVal = 0; + // If stream is paused, return success. + if ((mix->streamState == MIX_STREAM_PAUSED) || + (mix->streamState == MIX_STREAM_PAUSED_DRAINING)) + { + g_debug("Stream already paused."); + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); + } #ifdef LPESTUB // Not calling #else - g_debug("Calling SNDRV_SST_STREAM_PAUSE. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_PAUSE); - g_debug("_PAUSE returned %d", retVal); + g_debug("Calling SNDRV_SST_STREAM_PAUSE. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_PAUSE); + g_debug("_PAUSE returned %d", retVal); #endif - if (retVal) + if (retVal != 0) { if (mix->streamState == MIX_STREAM_DRAINING) { - // if stream state has been DRAINING, DRAIN could become successful during the PAUSE call, but not yet have chance to update streamState since we now hold the lock. + // if stream state has been DRAINING, DRAIN could become successful during the PAUSE call, but not yet have chance to update streamState since we now hold the control lock. // In this case, the mix_streamState becomes out-of-sync with the actual playback state. PAUSE failed due to stream already STOPPED but mix->streamState remains at "DRAINING" // On the other hand, we can't let DRAIN hold the lock the entire time. // We would not know if we fail PAUSE due to DRAINING, or a valid reason. - // Need a better mechanism to sync DRAINING. // DRAINING is not likely problem for resume, as long as the PAUSED state is set when stream is really PAUSED. ret = MIX_RESULT_NEED_RETRY; g_warning("PAUSE failed while DRAINING. Draining could be just completed. Retry needed."); @@ -1296,13 +1440,16 @@ MIX_RESULT mix_audio_pause_default(MixAudio *mix) else { if (mix->streamState == MIX_STREAM_DRAINING) - { mix->streamState = MIX_STREAM_PAUSED_DRAINING; - } else - { mix->streamState = MIX_STREAM_PAUSED; + +#ifdef AUDIO_MANAGER + if (lpe_stream_notify_pause(mix->amStreamID) != 0) + { + ret = MIX_RESULT_AM_NOTIFY_PAUSE_FAIL; } +#endif } _UNLOCK(&mix->controllock); @@ -1322,41 +1469,61 @@ MIX_RESULT mix_audio_pause_default(MixAudio *mix) MIX_RESULT mix_audio_resume_default(MixAudio *mix) { - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int retVal = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - _LOCK(&mix->controllock); + _LOCK(&mix->controllock); - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; + else if ((mix->streamState != MIX_STREAM_PAUSED) && (mix->streamState != MIX_STREAM_PAUSED_DRAINING)) + ret = MIX_RESULT_WRONG_STATE; - if ((mix->streamState == MIX_STREAM_PLAYING) || (mix->streamState == MIX_STREAM_DRAINING)) - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); - - if ((mix->streamState != MIX_STREAM_PAUSED_DRAINING) && (mix->streamState != MIX_STREAM_PAUSED)) - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONG_STATE); + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - int retVal = 0; + // If stream is paused, return success. + if ((mix->streamState == MIX_STREAM_PLAYING) || + (mix->streamState == MIX_STREAM_DRAINING)) + { + g_debug("Stream already playing."); + _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); + } #ifdef LPESTUB // Not calling #else - g_debug("Calling SNDRV_SST_STREAM_RESUME"); + g_debug("Calling SNDRV_SST_STREAM_RESUME. fd=%d", mix->fileDescriptor); retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_RESUME); g_debug("_STREAM_RESUME returned %d", retVal); #endif - if (retVal) + if (retVal != 0) { ret = MIX_RESULT_SYSTEM_ERRNO; g_debug("_PAUSE failed. Error:0x%08x", errno); mix_audio_debug_dump(mix); } + else { if (mix->streamState == MIX_STREAM_PAUSED_DRAINING) mix->streamState = MIX_STREAM_DRAINING; else mix->streamState = MIX_STREAM_PLAYING; + +#ifdef AUDIO_MANAGER + if (lpe_stream_notify_resume(mix->amStreamID) != 0) { + ret = MIX_RESULT_AM_NOTIFY_RESUME_FAIL; + } +#endif } _UNLOCK(&mix->controllock); @@ -1376,57 +1543,63 @@ MIX_RESULT mix_audio_resume_default(MixAudio *mix) MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype) { - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - struct snd_sst_vol vol = {0}; - - vol.ramp_duration = msecs; - vol.ramp_type = ramptype; // TODO: confirm the mappings between Mix and SST. - - if (!mix) return MIX_RESULT_NULL_PTR; - - if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; - - _LOCK(&mix->controllock); + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int retVal = 0; + struct snd_sst_vol vol = { + .ramp_duration = msecs, + .ramp_type = ramptype + }; + + if (G_UNLIKELY(mix == NULL)) return MIX_RESULT_NULL_PTR; + if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; + + _LOCK(&mix->controllock); - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; - vol.stream_id = mix->streamID; + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - if (type == MIX_VOL_DECIBELS) - { - vol.volume = currvol; - } - else - { - gint maxvol = 0; - ret = mix_audio_get_max_vol(mix, &maxvol); - - if (!maxvol) - g_critical("Max Vol is 0!"); + vol.stream_id = mix->streamID; - if (MIX_SUCCEEDED(ret)) + if (type == MIX_VOL_DECIBELS) { - vol.volume = currvol * maxvol / 100; + vol.volume = currvol; + } + else + { + gint maxvol = 0; + ret = mix_audio_get_max_vol(mix, &maxvol); + + if (!maxvol) + g_critical("Max Vol is 0!"); + + if (MIX_SUCCEEDED(ret)) + { + vol.volume = currvol * maxvol / 100; + } } - } - int retVal = 0; #ifdef LPESTUB // Not calling #else - g_debug("calling SNDRV_SST_SET_VOL vol=%d", vol.volume); + g_debug("calling _SET_VOL vol=%d", vol.volume); retVal = ioctl(mix->fileDescriptor, SNDRV_SST_SET_VOL, &vol); - g_debug("SNDRV_SST_SET_VOL returned %d", retVal); + g_debug("_SET_VOL returned %d", retVal); #endif - if (retVal) + if (retVal != 0) { ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_SET_VOL failed. Error:0x%08x", errno); + g_debug("_SET_VOL failed. Error(0x%08x): %s", errno, strerror(errno)); mix_audio_debug_dump(mix); } @@ -1496,7 +1669,7 @@ static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gin for (i=0;i iovout[i].buffer_size) + { + iovout[i].data_size = iovout[i].buffer_size; + produced -= iovout[i].data_size; + } + else + { + iovout[i].data_size = produced; + produced = 0; + } + } + else + { + iovout[i].data_size = 0; + } + } + produced = 0; + for (i=0;iconfigure(mix, audioconfigparams, drmparams); } -MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize) +MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt) { if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; @@ -1710,7 +1911,7 @@ MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, if (!klass->decode) return MIX_RESULT_FAIL; - return klass->decode(mix, iovin, iovincnt, insize, iovout, iovoutcnt, outsize); + return klass->decode(mix, iovin, iovincnt, insize, iovout, iovoutcnt); } MIX_RESULT mix_audio_capture_encode(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt) @@ -1905,7 +2106,7 @@ MIX_RESULT mix_audio_get_state(MixAudio *mix, MixState *state) return klass->get_state(mix, state); } -MIX_RESULT mix_audio_is_am_available_default(MixAudio *mix, MixAudioManager am, gboolean *avail) +MIX_RESULT mix_audio_is_am_available(MixAudioManager am, gboolean *avail) { MIX_RESULT ret = MIX_RESULT_SUCCESS; @@ -1917,16 +2118,6 @@ MIX_RESULT mix_audio_is_am_available_default(MixAudio *mix, MixAudioManager am, return ret; } -MIX_RESULT mix_audio_is_am_available(MixAudio *mix, MixAudioManager am, gboolean *avail) -{ - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->is_am_available) - return MIX_RESULT_FAIL; - - return klass->is_am_available(mix, am, avail); -} - const gchar* dbgstr_UNKNOWN="UNKNOWN"; static const gchar* _mix_stream_state_get_name (MixStreamState s) @@ -2046,20 +2237,21 @@ void mix_audio_debug_dump(MixAudio *mix) return; } -MIX_RESULT mix_audio_get_output_configuration(MixAudio *mix, MixAudioConfigParams **audioconfigparams) +MIX_RESULT mix_audio_get_config(MixAudio *mix, MixAudioConfigParams **audioconfigparams) { if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - if (!klass->get_output_configuration) + if (!klass->get_config) return MIX_RESULT_FAIL; - return klass->get_output_configuration(mix, audioconfigparams); + return klass->get_config(mix, audioconfigparams); } -MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams) +MIX_RESULT mix_audio_get_config_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams) { + // API version 0.79. 1/22/2009 MIX_RESULT ret = MIX_RESULT_SUCCESS; struct snd_sst_get_stream_params stream_params = {{0}}; MixAudioConfigParams *p = NULL; @@ -2069,7 +2261,8 @@ MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioCon _LOCK(&mix->controllock); - if (mix->state <= MIX_STATE_UNINITIALIZED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_INIT); + // _get_config is called only it is configured. + if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); #ifdef LPESTUB #else @@ -2079,9 +2272,7 @@ MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioCon g_debug("_GET_PARAMS returned %d", retVal); #endif - _UNLOCK(&mix->controllock); - - if (retVal) + if (retVal != 0) { ret = MIX_RESULT_SYSTEM_ERRNO; g_debug("Failed to GET_PARAMS. errno:0x%08x. %s\n", errno, strerror(errno)); @@ -2089,14 +2280,232 @@ MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioCon else { p = mix_sst_params_to_acp(&stream_params); + if (p != NULL) + { + if (mix->audioconfigparams != NULL) + { + // Complete the return structure with fields that are not returned from the SST. + MIX_ACP_DECODEMODE(p) = MIX_ACP_DECODEMODE(mix->audioconfigparams); + + gchar *sn = mix_acp_get_streamname(mix->audioconfigparams); + if (sn != NULL) + { + if (!MIX_SUCCEEDED(mix_acp_set_streamname(MIX_AUDIOCONFIGPARAMS(p), sn))) + { + g_error("Failed to set stream name!"); + } + g_free(sn); + } + + if (!MIX_SUCCEEDED(mix_acp_set_audio_manager(MIX_AUDIOCONFIGPARAMS(p), mix_acp_get_audio_manager(mix->audioconfigparams)))) + { + g_error("Failed to set audio manager!"); + } + + // remove existingi copy of audioconfigparams and copy this return struct. + mix_acp_unref(mix->audioconfigparams); + mix->audioconfigparams = NULL; + } + + mix->audioconfigparams = MIX_AUDIOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(p))); + } *audioconfigparams = p; } + _UNLOCK(&mix->controllock); + + return ret; +} + +MIX_RESULT mix_audio_get_bytes_decoded(MixAudio *mix, guint64 *byte) +{ + if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; + + MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); + + if (!klass->get_bytes_decoded) + return MIX_RESULT_FAIL; + + return klass->get_bytes_decoded(mix, byte); +} + +MIX_RESULT mix_audio_get_bytes_decoded_default(MixAudio *mix, guint64 *bytecount) +{ + // API version 0.79. 1/22/2009 + MIX_RESULT ret = MIX_RESULT_SUCCESS; + unsigned long long bytes_decoded = 0; + int retVal = 0; + + if ((G_UNLIKELY(!mix)) || (bytecount == NULL)) return MIX_RESULT_NULL_PTR; + + _LOCK(&mix->controllock); + + // ret should be SUCCESS when this sequence starts... + if (mix->codecMode != MIX_CODING_DECODE) + ret = MIX_RESULT_WRONGMODE; + else if (mix->state != MIX_STATE_CONFIGURED) + ret = MIX_RESULT_NOT_CONFIGURED; + else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) + ret = MIX_RESULT_WRONGMODE; + + // Now check if we need to exit. + if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); + + +#ifdef LPESTUB +#else + g_debug("Calling _STREAM_BYTES_DECODED. fd=%d", mix->fileDescriptor); + retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_BYTES_DECODED, &bytes_decoded); + g_debug("_STREAM_BYTES_DECODED returned %d. Bytes decoded=%" G_GUINT64_FORMAT, retVal, bytes_decoded); +#endif + + if (retVal != 0) + { + ret = MIX_RESULT_SYSTEM_ERRNO; + g_debug("_STREAM_BYTES_DECODED failed. Error(0x%08x): %s", errno, strerror(errno)); + mix_audio_debug_dump(mix); + } + else + { + *bytecount = bytes_decoded; + } + + _UNLOCK(&mix->controllock); + return ret; } -MIX_RESULT mix_audio_get_stream_byte_decoded(MixAudio *mix, guint64 *byte) +static MIX_RESULT mix_audio_verify_params(MixAudio *mix, const MixAudioConfigParams *audioconfigparams) { - return MIX_RESULT_NOT_SUPPORTED; + MIX_RESULT ret = MIX_RESULT_FAIL; + + if (G_UNLIKELY(!mix)) + { + g_error("Null pointer passed to internal function!"); + return MIX_RESULT_NULL_PTR; + } + + g_message("_verify_params() starts."); + + switch (mix->codecMode) + { + case MIX_CODING_DECODE: + { + if ((MIX_ACP_DECODEMODE(audioconfigparams) != MIX_DECODE_DIRECTRENDER) && + (MIX_ACP_DECODEMODE(audioconfigparams) != MIX_DECODE_DECODERETURN)) + { + ret = MIX_RESULT_CONF_MISMATCH; + goto verify_params_done; + } + break; + } + case MIX_CODING_ENCODE: + { + if (!MIX_IS_AUDIOCONFIGPARAMSAAC(audioconfigparams)) + { + ret = MIX_RESULT_CODEC_NOTSUPPORTED; + goto verify_params_done; + } + } + default: + break; + } + + ret = MIX_RESULT_SUCCESS; + +verify_params_done: + + g_message("_verify_params() done. Returning 0x%08x.", ret); + + return ret; } +static MIX_RESULT mix_audio_is_stream_changed(MixAudio *mix, MixAudioConfigParams *new_params, gboolean *pChanged) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + + gboolean changed = FALSE; + MixParams *old_mask = NULL; + MixParams *new_mask = NULL; + + g_message("is_stream_changed() starts"); + + if ((mix == NULL) || (new_params == NULL) || (pChanged == NULL)) + { + g_error("Null pointer passed to internal function!"); + return MIX_RESULT_NULL_PTR; + } + + changed = (mix->audioconfigparams == NULL); + if (changed) + { + ret = MIX_RESULT_SUCCESS; + goto stream_changed_done; + } + + old_mask = mix_params_dup(MIX_PARAMS(mix->audioconfigparams)); + if (MIX_IS_AUDIOCONFIGPARAMS(old_mask)) + { + MIX_ACP_DECODEMODE(old_mask) = MIX_DECODE_NULL; + if (!MIX_SUCCEEDED(mix_acp_set_streamname(MIX_AUDIOCONFIGPARAMS(old_mask), NULL))) + { + g_error("Failed to set stream name!"); + goto stream_changed_done; + } + if (!MIX_SUCCEEDED(mix_acp_set_audio_manager(MIX_AUDIOCONFIGPARAMS(old_mask), MIX_AUDIOMANAGER_NONE))) + { + g_error("Failed to set audio manager!"); + goto stream_changed_done; + } + } + else + { + g_error("Failed to duplicate param!"); + goto stream_changed_done; + } + + new_mask = mix_params_dup(MIX_PARAMS(new_params)); + if (MIX_IS_AUDIOCONFIGPARAMS(new_mask)) + { + MIX_ACP_DECODEMODE(new_mask) = MIX_DECODE_NULL; + if (!MIX_SUCCEEDED(mix_acp_set_streamname(MIX_AUDIOCONFIGPARAMS(new_mask), NULL))) + { + g_error("Failed to set stream name!"); + goto stream_changed_done; + } + if (!MIX_SUCCEEDED(mix_acp_set_audio_manager(MIX_AUDIOCONFIGPARAMS(new_mask), MIX_AUDIOMANAGER_NONE))) + { + g_error("Failed to set audio manager!"); + goto stream_changed_done; + } + } + else + { + g_error("Failed to duplicate param!"); + goto stream_changed_done; + } + + + changed = !mix_params_equal(old_mask, new_mask); + + ret = MIX_RESULT_SUCCESS; + +stream_changed_done: + + if (old_mask) + { + mix_params_unref(old_mask); + old_mask=NULL; + } + if (new_mask) + { + mix_params_unref(new_mask); + new_mask=NULL; + } + + *pChanged = changed; + + g_message("is_stream_changed() done returning 0x%08x, changed=%d", ret, changed); + + return ret; +} diff --git a/mix_audio/src/mixaudio.h b/mix_audio/src/mixaudio.h index a3cef5a..ca7e353 100644 --- a/mix_audio/src/mixaudio.h +++ b/mix_audio/src/mixaudio.h @@ -120,7 +120,8 @@ typedef enum */ typedef struct { guchar *data; - gint size; + gint32 buffer_size; + gint32 data_size; } MixIOVec; /** @@ -151,7 +152,7 @@ struct _MixAudioClass /*< virtual public >*/ MIX_RESULT (*initialize) (MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); MIX_RESULT (*configure) (MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); - MIX_RESULT (*decode) (MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); + MIX_RESULT (*decode) (MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); MIX_RESULT (*capture_encode) (MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); MIX_RESULT (*start) (MixAudio *mix); MIX_RESULT (*stop_drop) (MixAudio *mix); @@ -168,8 +169,8 @@ struct _MixAudioClass MIX_RESULT (*deinitialize) (MixAudio *mix); MIX_RESULT (*get_stream_state) (MixAudio *mix, MixStreamState *streamState); MIX_RESULT (*get_state) (MixAudio *mix, MixState *state); - MIX_RESULT (*is_am_available) (MixAudio *mix, MixAudioManager am, gboolean *avail); - MIX_RESULT (*get_output_configuration) (MixAudio *mix, MixAudioConfigParams **audioconfigparams); + MIX_RESULT (*get_config) (MixAudio *mix, MixAudioConfigParams **audioconfigparams); + MIX_RESULT (*get_bytes_decoded) (MixAudio *mix, guint64 *byte); }; /** @@ -197,7 +198,6 @@ struct _MixAudio gchar *encoding; MixState state; MixCodecMode codecMode; - gboolean useIAM; int fileDescriptor; gint streamID; guint32 amStreamID; @@ -206,6 +206,7 @@ struct _MixAudio MixAudioConfigParams *audioconfigparams; gboolean am_registered; MixDeviceState deviceState; + gboolean stream_muted; guint64 ts_last; guint64 ts_elapsed; @@ -330,7 +331,7 @@ MIX_RESULT mix_audio_configure(MixAudio *mix, MixAudioConfigParams *audioconfigp * Note: If the stream is configured as #MIX_DECODE_DIRECTRENDER, and whenever the stream in #MIX_STREAM_STOPPED state, the call to mix_audio_decode() will not start the playback until mix_audio_start() is called. This behavior would allow application to queue up data but delay the playback until appropriate time. * */ -MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize); +MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); /** * mix_audio_capture_encode: @@ -547,20 +548,20 @@ gboolean mix_audio_am_is_enabled(MixAudio *mix); * * Check if AM is available. */ -MIX_RESULT mix_audio_is_am_available(MixAudio *mix, MixAudioManager am, gboolean *avail); +MIX_RESULT mix_audio_is_am_available(MixAudioManager am, gboolean *avail); /** - * mix_audio_get_output_configuration: + * mix_audio_get_config: * @mix: #MixAudio object. * @audioconfigparams: double pointer to hold output configuration. * @returns: #MIX_RESULT_SUCCESS on success or other fail code. * * This method retrieve the current configuration. This can be called after initialization. If a stream has been configured, it returns the corresponding derive object of MixAudioConfigParams. */ -MIX_RESULT mix_audio_get_output_configuration(MixAudio *mix, MixAudioConfigParams **audioconfigparams); +MIX_RESULT mix_audio_get_config(MixAudio *mix, MixAudioConfigParams **audioconfigparams); /** - * mix_audio_get_stream_byte_decoded: + * mix_audio_get_bytes_decoded: * @mix: #MixAudio object. * @msecs: stream byte decoded.. * @returns: #MIX_RESULT_SUCCESS if the value is available. #MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. @@ -569,6 +570,6 @@ MIX_RESULT mix_audio_get_output_configuration(MixAudio *mix, MixAudioConfigParam * * Not Implemented. */ -MIX_RESULT mix_audio_get_stream_byte_decoded(MixAudio *mix, guint64 *byte); +MIX_RESULT mix_audio_get_bytes_decoded(MixAudio *mix, guint64 *byte); #endif /* __MIX_AUDIO_H__ */ diff --git a/mix_audio/src/sst_proxy.c b/mix_audio/src/sst_proxy.c index 438e06e..de7f7a4 100644 --- a/mix_audio/src/sst_proxy.c +++ b/mix_audio/src/sst_proxy.c @@ -147,8 +147,10 @@ gboolean mix_sst_params_convert_aac(MixAudioConfigParamsAAC *acp, struct snd_sst s->codec = p->codec = SST_CODEC_TYPE_AAC; } - p->num_chan = MIX_ACP_AAC_CHANNELS(acp); // core/internal channels - p->ext_chl = MIX_ACP_NUM_CHANNELS(acp); // external channels + p->num_chan = MIX_ACP_NUM_CHANNELS(acp); // external channels + p->ext_chl = MIX_ACP_AAC_CHANNELS(acp); // extension channel configuration. + //p->ext_chl = MIX_ACP_AAC_CHANNELS(acp); // core/internal channels + if (p->num_chan <= 0) p->num_chan = p->ext_chl; p->aac_srate = MIX_ACP_AAC_SAMPLE_RATE(acp); // aac decoder internal frequency p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); // output/external frequency diff --git a/mix_common/ChangeLog b/mix_common/ChangeLog index b3469f8..d5dceab 100644 --- a/mix_common/ChangeLog +++ b/mix_common/ChangeLog @@ -1,3 +1,7 @@ +2010-01-25 Echo Choi + + * Updated version to 0.1.8 + 2010-01-11 Echo Choi * Updated version to 0.1.6 diff --git a/mix_common/configure.ac b/mix_common/configure.ac index 2165138..2dfa6aa 100644 --- a/mix_common/configure.ac +++ b/mix_common/configure.ac @@ -2,7 +2,7 @@ AC_INIT("", "", [khanh.v.nguyen@intel.com]) AC_CONFIG_MACRO_DIR(m4) -AS_MIX_VERSION(mixcommon, MIXCOMMON, 0, 1, 7) +AS_MIX_VERSION(mixcommon, MIXCOMMON, 0, 1, 8) AM_INIT_AUTOMAKE($PACKAGE, $VERSION) #AM_INIT_AUTOMAKE([-Wall -Werror foreign]) diff --git a/mix_common/mixcommon.spec b/mix_common/mixcommon.spec index 46f900a..be17602 100644 --- a/mix_common/mixcommon.spec +++ b/mix_common/mixcommon.spec @@ -1,6 +1,6 @@ Summary: MIX Common Name: mixcommon -Version: 0.1.7 +Version: 0.1.8 Release: 1 Source0: %{name}-%{version}.tar.gz NoSource: 0 diff --git a/mix_common/src/mixlog.c b/mix_common/src/mixlog.c index 0d306e4..239920e 100644 --- a/mix_common/src/mixlog.c +++ b/mix_common/src/mixlog.c @@ -257,3 +257,4 @@ exit: #endif /* MIX_LOG_USE_HT */ #endif /* !ANDROID */ + diff --git a/mix_common/src/mixlog.h b/mix_common/src/mixlog.h index 906e6c9..99ab4e2 100644 --- a/mix_common/src/mixlog.h +++ b/mix_common/src/mixlog.h @@ -31,6 +31,7 @@ void mix_log_func(const gchar* comp, gint level, const gchar *file, #define MIX_LOG_LEVEL_INFO 3 #define MIX_LOG_LEVEL_VERBOSE 4 + /* MACROS for mixlog */ #ifdef MIX_LOG_ENABLE diff --git a/mix_common/src/mixresult.h b/mix_common/src/mixresult.h index 9472a7e..0559bc2 100644 --- a/mix_common/src/mixresult.h +++ b/mix_common/src/mixresult.h @@ -69,6 +69,10 @@ typedef enum { MIX_RESULT_NO_MEMORY = (MIX_RESULT)0x80000016, MIX_RESULT_NEED_RETRY = (MIX_RESULT)0x80000017, MIX_RESULT_SYSTEM_ERRNO = (MIX_RESULT)0x80000018, + MIX_RESULT_AM_REGISTER_FAIL = (MIX_RESULT)0x80000019, + MIX_RESULT_AM_UNREGISTER_FAIL = (MIX_RESULT)0x80000020, + MIX_RESULT_AM_NOTIFY_PAUSE_FAIL = (MIX_RESULT)0x80000021, + MIX_RESULT_AM_NOTIFY_RESUME_FAIL = (MIX_RESULT)0x80000022, /** Module specific errors starting number */ -- cgit v1.2.3 From 21a0e0ac15a96c3ff07eefa1f9686967341b3663 Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Fri, 28 May 2010 17:12:14 -0700 Subject: Merge of 0427 GAID fixes and improve HW Decode for H.264 (overlay) Change-Id: I5f4c77c123200e11d10468142a3b929778a434db --- mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk | 15 +- mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk | 15 +- mix_vbp/viddec_fw/fw/parser/Android.mk | 7 +- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 7 +- mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 22 +- mix_video/configure.ac | 2 +- mix_video/mixvideo.spec | 2 +- mix_video/src/mixdisplayx11.c | 4 +- mix_video/src/mixsurfacepool.c | 4 +- mix_video/src/mixsurfacepool.h | 2 +- mix_video/src/mixvideo.c | 46 ++- mix_video/src/mixvideo.h | 1 - mix_video/src/mixvideoformat.c | 16 +- mix_video/src/mixvideoformat_h264.c | 24 +- mix_video/src/mixvideoformat_mp42.c | 14 +- mix_video/src/mixvideoformat_vc1.c | 21 +- mix_video/src/mixvideoformatenc_h264.c | 386 ++++++++++++++------- mix_video/src/mixvideoformatenc_h264.h | 15 +- mix_video/src/mixvideoformatenc_mpeg4.c | 332 +++++++++++------- mix_video/src/mixvideoformatenc_mpeg4.h | 14 +- mix_video/src/mixvideoformatenc_preview.c | 93 ++--- mix_video/src/mixvideoformatenc_preview.h | 6 +- mix_video/src/mixvideoframe.c | 108 +++++- mix_video/src/mixvideoframe.h | 101 +++++- mix_video/src/mixvideoframe_private.h | 18 + mix_video/src/mixvideorenderparams.c | 4 +- 26 files changed, 860 insertions(+), 419 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk index 37dfdcf..b7c15d6 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk @@ -17,24 +17,19 @@ LOCAL_SRC_FILES := \ LOCAL_CFLAGS := -DVBP -DHOST_ONLY -LOCAL_C_INCLUDES := \ - $(GLIB_TOP) \ - $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib \ - $(GLIB_TOP)/gobject \ +LOCAL_C_INCLUDES := \ + $(GLIB_TOP) \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/glib \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ - $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/include \ - $(TARGET_OUT_HEADERS)/libmixcommon + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/include LOCAL_MODULE := libmixvbp_h264 LOCAL_SHARED_LIBRARIES := \ libglib-2.0 \ - libgobject-2.0 \ - libgthread-2.0 \ - libgmodule-2.0 \ libmixvbp include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk index c539d61..23c0c52 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk @@ -14,24 +14,19 @@ LOCAL_SRC_FILES := \ LOCAL_CFLAGS := -DVBP -DHOST_ONLY -LOCAL_C_INCLUDES := \ - $(GLIB_TOP) \ - $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib \ - $(GLIB_TOP)/gobject \ +LOCAL_C_INCLUDES := \ + $(GLIB_TOP) \ + $(GLIB_TOP)/android \ + $(GLIB_TOP)/glib \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ - $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/include \ - $(TARGET_OUT_HEADERS)/libmixcommon + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/include LOCAL_MODULE := libmixvbp_mpeg4 LOCAL_SHARED_LIBRARIES := \ libglib-2.0 \ - libgobject-2.0 \ - libgthread-2.0 \ - libgmodule-2.0 \ libmixvbp include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 797c640..cc9ba5a 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -26,7 +26,6 @@ LOCAL_C_INCLUDES += \ $(GLIB_TOP) \ $(GLIB_TOP)/glib \ $(GLIB_TOP)/android \ - $(GLIB_TOP)/gobject \ $(LOCAL_PATH)/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ @@ -35,7 +34,6 @@ LOCAL_C_INCLUDES += \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser \ - $(TARGET_OUT_HEADERS)/libmixcommon \ $(TARGET_OUT_HEADERS)/libva LOCAL_COPY_HEADERS_TO := libmixvbp @@ -48,10 +46,7 @@ LOCAL_MODULE := libmixvbp LOCAL_SHARED_LIBRARIES := \ libdl \ libcutils \ - libglib-2.0 \ - libgobject-2.0 \ - libgthread-2.0 \ - libgmodule-2.0 + libglib-2.0 ifeq ($(strip $(MIXVBP_LOG_ENABLE)),true) LOCAL_CFLAGS += -DVBP_TRACE diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index e82d7b5..8695ef7 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1440,13 +1440,14 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) while (size_left >= NAL_length_size) { NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); - - size_parsed += NAL_length_size; + + size_parsed += NAL_length_size; #else while (size_left > 0) { - NAL_length = size_left; + NAL_length = size_left; #endif + cxt->list.data[cxt->list.num_items].stpos = size_parsed; size_parsed += NAL_length; /* skip NAL bytes */ /* end position is exclusive */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h index c532b67..9f2a21c 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -17,25 +17,6 @@ #ifdef VBP_TRACE /* if VBP_TRACE is defined*/ -#ifdef ANDROID - -#include - -#define ETRACE(format, ...) \ - __android_log_print(ANDROID_LOG_ERROR, "mixvbp", "%s():%d: "format, \ - __FUNCTION__, __LINE__, ##__VA_ARGS__) -#define WTRACE(format, ...) \ - __android_log_print(ANDROID_LOG_WARN, "mixvbp", "%s():%d: "format, \ - __FUNCTION__, __LINE__, ##__VA_ARGS__) -#define ITRACE(format, ...) \ - __android_log_print(ANDROID_LOG_INFO, "mixvbp", "%s():%d: "format, \ - __FUNCTION__, __LINE__, ##__VA_ARGS__) -#define VTRACE(format, ...) \ - __android_log_print(ANDROID_LOG_VERBOSE, "mixvbp", "%s():%d: "format, \ - __FUNCTION__, __LINE__, ##__VA_ARGS__) - -#else - #include #include @@ -50,8 +31,6 @@ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__) #define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__) -#endif /* ANDROID */ - #else /* if VBP_TRACE is not defined */ #define ETRACE(format, ...) @@ -59,6 +38,7 @@ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define ITRACE(format, ...) #define VTRACE(format, ...) + #endif /* VBP_TRACE*/ diff --git a/mix_video/configure.ac b/mix_video/configure.ac index 8605a92..6baf0ed 100644 --- a/mix_video/configure.ac +++ b/mix_video/configure.ac @@ -2,7 +2,7 @@ AC_INIT("", "", [linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -AS_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 14) +AS_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 15) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec index 7be66bb..8e5efb4 100644 --- a/mix_video/mixvideo.spec +++ b/mix_video/mixvideo.spec @@ -6,7 +6,7 @@ Summary: MIX Video Name: mixvideo -Version: 0.1.14 +Version: 0.1.15 Release: 1 Source0: %{name}-%{version}.tar.gz NoSource: 0 diff --git a/mix_video/src/mixdisplayx11.c b/mix_video/src/mixdisplayx11.c index 467bde2..7398234 100644 --- a/mix_video/src/mixdisplayx11.c +++ b/mix_video/src/mixdisplayx11.c @@ -20,14 +20,14 @@ static GType _mix_displayx11_type = 0; static MixDisplayClass *parent_class = NULL; -#define _do_init { _mix_displayx11_type = g_define_type_id; } - #ifdef ANDROID int XSync(Display* display, Bool bvalue) { return 0; } #endif +#define _do_init { _mix_displayx11_type = g_define_type_id; } + gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); MixDisplay *mix_displayx11_dup(const MixDisplay * obj); gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second); diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c index f7672c8..0c778af 100644 --- a/mix_video/src/mixsurfacepool.c +++ b/mix_video/src/mixsurfacepool.c @@ -221,7 +221,7 @@ gboolean mix_surfacepool_equal(MixParams * first, MixParams * second) { * frame objects that represents a pool of surfaces. */ MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, - VASurfaceID *surfaces, guint num_surfaces) { + VASurfaceID *surfaces, guint num_surfaces, VADisplay va_display) { LOG_V( "Begin\n"); @@ -287,6 +287,8 @@ MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, // Set the pool reference in the private data of the frame object mix_videoframe_set_pool(frame, obj); + mix_videoframe_set_vadisplay(frame, va_display); + //Add each frame object to the pool list obj->free_list = g_slist_append(obj->free_list, frame); diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h index 6468ebe..d475792 100644 --- a/mix_video/src/mixsurfacepool.h +++ b/mix_video/src/mixsurfacepool.h @@ -139,7 +139,7 @@ MixSurfacePool *mix_surfacepool_ref (MixSurfacePool * mix); /* Class Methods */ MIX_RESULT mix_surfacepool_initialize (MixSurfacePool * obj, - VASurfaceID *surfaces, guint num_surfaces); + VASurfaceID *surfaces, guint num_surfaces, VADisplay va_display); MIX_RESULT mix_surfacepool_put (MixSurfacePool * obj, MixVideoFrame * frame); diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c index ef74184..ffd72d5 100644 --- a/mix_video/src/mixvideo.c +++ b/mix_video/src/mixvideo.c @@ -12,6 +12,8 @@ #endif #include +#include + #include "mixvideolog.h" #include "mixdisplayx11.h" @@ -104,7 +106,6 @@ MIX_RESULT mix_video_render_default(MixVideo * mix, MIX_RESULT mix_video_get_decoded_data_default(MixVideo * mix, MixIOVec * iovout, MixVideoRenderParams * render_params, MixVideoFrame *frame); - MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); @@ -165,7 +166,7 @@ static void mix_video_class_init(MixVideoClass * klass) { klass->get_frame_func = mix_video_get_frame_default; klass->release_frame_func = mix_video_release_frame_default; klass->render_func = mix_video_render_default; - klass->get_decoded_data_func = mix_video_get_decoded_data_default; + klass->get_decoded_data_func = mix_video_get_decoded_data_default; klass->encode_func = mix_video_encode_default; klass->flush_func = mix_video_flush_default; klass->eos_func = mix_video_eos_default; @@ -377,12 +378,10 @@ MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, if (MIX_IS_DISPLAYX11(mix_display)) { MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); -#if 1 - mix_displayx11->display = g_malloc(sizeof(Display)); - *(mix_displayx11->display) = 0x18c34078; -#else - //mix_displayx11->display = 1; -#endif + + /* XXX NOTE: This must be fixed in all clients */ + mix_displayx11->display = 0x18c34078; + ret = mix_displayx11_get_display(mix_displayx11, &display); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to get display 2\n"); @@ -1123,6 +1122,8 @@ MIX_RESULT mix_video_render_default(MixVideo * mix, gulong va_surface_id; VAStatus va_status; + gboolean sync_flag = FALSE; + CHECK_INIT_CONFIG(mix, priv); if (!render_params || !frame) { @@ -1213,6 +1214,29 @@ MIX_RESULT mix_video_render_default(MixVideo * mix, guint32 frame_structure = 0; mix_videoframe_get_frame_structure(frame, &frame_structure); + + ret = mix_videoframe_get_sync_flag(frame, &sync_flag); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get sync_flag\n"); + goto cleanup; + } + + if (!sync_flag) { + ret = mix_videoframe_set_sync_flag(frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; + } + + va_status = vaSyncSurface(priv->va_display, va_surface_id); + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed vaSyncSurface() : va_status = 0x%x\n", va_status); + goto cleanup; + } + } + + /* TODO: the last param of vaPutSurface is de-interlacing flags, what is value shall be*/ va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id, @@ -1222,13 +1246,10 @@ MIX_RESULT mix_video_render_default(MixVideo * mix, if (va_status != VA_STATUS_SUCCESS) { ret = MIX_RESULT_FAIL; - LOG_E("Failed vaPutSurface() : va_status = %d\n", va_status); + LOG_E("Failed vaPutSurface() : va_status = 0x%x\n", va_status); goto cleanup; } - /* TODO: Is this only for X11? */ - XSync(display, FALSE); - ret = MIX_RESULT_SUCCESS; cleanup: @@ -1411,7 +1432,6 @@ MIX_RESULT mix_video_get_decoded_data(MixVideo * mix, MixIOVec * iovout, return MIX_RESULT_NOTIMPL; } - MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params) { diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h index 73c6b71..91e9011 100644 --- a/mix_video/src/mixvideo.h +++ b/mix_video/src/mixvideo.h @@ -69,7 +69,6 @@ typedef MIX_RESULT (*MixVideoGetDecodedDataFunc)(MixVideo * mix, MixIOVec * iovout, MixVideoRenderParams * render_params, MixVideoFrame *frame); - typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c index fa601cb..f446651 100644 --- a/mix_video/src/mixvideoformat.c +++ b/mix_video/src/mixvideoformat.c @@ -115,14 +115,6 @@ void mix_videoformat_finalize(GObject * obj) { //libVA cleanup (vaTerminate is called from MixVideo object) if (mix->va_display) { - if (mix->va_context != VA_INVALID_ID) - { - va_status = vaDestroyConfig(mix->va_display, mix->va_config); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroyConfig\n"); - } - mix->va_config = VA_INVALID_ID; - } if (mix->va_context != VA_INVALID_ID) { va_status = vaDestroyContext(mix->va_display, mix->va_context); @@ -131,6 +123,14 @@ void mix_videoformat_finalize(GObject * obj) { } mix->va_context = VA_INVALID_ID; } + if (mix->va_config != VA_INVALID_ID) + { + va_status = vaDestroyConfig(mix->va_display, mix->va_config); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroyConfig\n"); + } + mix->va_config = VA_INVALID_ID; + } if (mix->va_surfaces) { va_status = vaDestroySurfaces(mix->va_display, mix->va_surfaces, mix->va_num_surfaces); diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c index 9e81cbf..1f57019 100644 --- a/mix_video/src/mixvideoformat_h264.c +++ b/mix_video/src/mixvideoformat_h264.c @@ -493,7 +493,7 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, ret = mix_surfacepool_initialize(parent->surfacepool, - surfaces, numSurfaces); + surfaces, numSurfaces, vadisplay); switch (ret) { @@ -932,7 +932,7 @@ MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix) { } #define HACK_DPB #ifdef HACK_DPB -static inline void mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, +static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, vbp_picture_data_h264* pic_data ) { @@ -942,6 +942,7 @@ static inline void mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; VAPictureH264 *pRefList = NULL; int i = 0, j = 0, k = 0, list = 0; + MIX_RESULT ret = MIX_RESULT_FAIL; MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); @@ -989,6 +990,9 @@ static inline void mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, { guint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); gpointer video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + + if (!video_frame) return MIX_RESULT_DROPFRAME; //return non-fatal error + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = ((MixVideoFrame *)video_frame)->frame_id; @@ -1009,6 +1013,7 @@ static inline void mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, } } + return MIX_RESULT_SUCCESS; } #endif @@ -1127,7 +1132,12 @@ MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, #ifdef HACK_DPB //We have to provide a hacked DPB rather than complete DPB for libva as workaround - mix_videofmt_h264_hack_dpb(mix, pic_data); + ret = mix_videofmt_h264_hack_dpb(mix, pic_data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error reference frame not found\n"); + goto cleanup; + } #endif //Libva buffer set up @@ -1196,7 +1206,7 @@ MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, if (video_frame == NULL) { LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic)); - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; //return non-fatal error goto cleanup; } else @@ -1219,7 +1229,7 @@ MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, if (video_frame == NULL) { LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic)); - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; //return non-fatal error goto cleanup; } else @@ -1330,6 +1340,8 @@ MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, goto cleanup; } +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ + LOG_V( "Calling vaSyncSurface\n"); //Decode the picture @@ -1341,7 +1353,7 @@ MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, LOG_E( "Video driver returned error from vaSyncSurface\n"); goto cleanup; } - +#endif if (pic_index == 0) { diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c index 3aae249..c6c7b30 100644 --- a/mix_video/src/mixvideoformat_mp42.c +++ b/mix_video/src/mixvideoformat_mp42.c @@ -347,7 +347,7 @@ MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix, *surface_pool = parent->surfacepool; ret = mix_surfacepool_initialize(parent->surfacepool, surfaces, - numSurfaces); + numSurfaces, va_display); /* Initialize and save the VA context ID * Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 @@ -665,7 +665,7 @@ MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, */ if (!g_queue_is_empty(self->packed_stream_queue)) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; LOG_E("The previous packed frame is not fully processed yet!\n"); goto cleanup; } @@ -679,19 +679,19 @@ MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, /* Is the first frame in the packed frames a reference frame? */ if (idx == 0 && frame_type != MP4_VOP_TYPE_I && frame_type != MP4_VOP_TYPE_P) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME;; LOG_E("The first frame in packed frame is not I or B\n"); goto cleanup; } if (idx != 0 && frame_type != MP4_VOP_TYPE_B) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME;; LOG_E("The frame other than the first one in packed frame is not B\n"); goto cleanup; } if (picture_data->vop_coded == 0) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; LOG_E("In packed frame, there's unexpected skipped frame\n"); goto cleanup; } @@ -769,7 +769,7 @@ MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, frame_type = picture_param->vop_fields.bits.vop_coding_type; if (frame_type == MP4_VOP_TYPE_B) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; LOG_E("The frame right after packed frame is B frame!\n"); goto cleanup; } @@ -1055,6 +1055,7 @@ MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, goto cleanup; } +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ LOG_V("Calling vaSyncSurface\n"); /* Decode the picture */ @@ -1064,6 +1065,7 @@ MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, LOG_E("Failed to vaSyncSurface(): va_ret = 0x%x\n", va_ret); goto cleanup; } +#endif /* Set the discontinuity flag */ mix_videoframe_set_discontinuity(frame, discontinuity); diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c index ec09985..0702c85 100644 --- a/mix_video/src/mixvideoformat_vc1.c +++ b/mix_video/src/mixvideoformat_vc1.c @@ -546,7 +546,7 @@ MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, ret = mix_surfacepool_initialize(parent->surfacepool, - surfaces, numSurfaces); + surfaces, numSurfaces, vadisplay); switch (ret) { @@ -995,7 +995,7 @@ MIX_RESULT mix_videofmt_vc1_decode_a_picture( ret = mix_videoframe_set_frame_type(frame, frame_type); break; case VC1_PTYPE_BI: // BI frame type - ret = mix_videoframe_set_frame_type(frame, TYPE_I); + ret = mix_videoframe_set_frame_type(frame, TYPE_B); break; //Not indicated here case VC1_PTYPE_SKIPPED: default: @@ -1035,7 +1035,7 @@ MIX_RESULT mix_videofmt_vc1_decode_a_picture( could have been overwritten and hence not avaiable for reference. */ LOG_E( "reference distance is not 0!"); - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; goto cleanup; } if (1 == pic_index) @@ -1070,14 +1070,19 @@ MIX_RESULT mix_videofmt_vc1_decode_a_picture( } else { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; LOG_E( "Error could not find reference frames for P frame\n"); goto cleanup; } } pic_params->backward_reference_picture = VA_INVALID_SURFACE; - LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id); +#ifdef MIX_LOG_ENABLE /* this is to fix a crash when MIX_LOG_ENABLE is set */ + if(self->reference_frames[0] && frame) { + LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", + (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id); + } +#endif LOG_V( "mix_video vinfo: Frame type is P\n"); break; @@ -1087,7 +1092,7 @@ MIX_RESULT mix_videofmt_vc1_decode_a_picture( if (!self->haveBframes) //We don't expect B frames and have not allocated a surface // for the extra ref frame so this is an error { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_DROPFRAME; LOG_E( "Unexpected B frame, cannot process\n"); goto cleanup; } @@ -1099,7 +1104,7 @@ MIX_RESULT mix_videofmt_vc1_decode_a_picture( LOG_V( "mix_video vinfo: Frame type is B\n"); break; - case VC1_PTYPE_BI: + case VC1_PTYPE_BI: pic_params->forward_reference_picture = VA_INVALID_SURFACE; pic_params->backward_reference_picture = VA_INVALID_SURFACE; LOG_V( "BI frame\n"); @@ -1274,6 +1279,7 @@ MIX_RESULT mix_videofmt_vc1_decode_a_picture( goto cleanup; } +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ LOG_V( "Calling vaSyncSurface\n"); //Decode the picture @@ -1285,6 +1291,7 @@ MIX_RESULT mix_videofmt_vc1_decode_a_picture( LOG_E( "Video driver returned error from vaSyncSurface\n"); goto cleanup; } +#endif cleanup: if (NULL != buffer_ids) diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index 8472e93..bf25304 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -44,14 +44,15 @@ static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { self->encoded_frames = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; - self->cur_fame = NULL; - self->ref_fame = NULL; - self->rec_fame = NULL; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; self->ci_shared_surfaces = NULL; self->surfaces= NULL; self->surface_num = 0; + self->coded_buf_index = 0; parent->initialized = FALSE; } @@ -585,7 +586,7 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, "mix_surfacepool_initialize\n"); ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces); + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); switch (ret) { @@ -641,7 +642,23 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, VAEncCodedBufferType, self->coded_buf_size, // 1, NULL, - &self->coded_buf); + &(self->coded_buf[0])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); if (va_status != VA_STATUS_SUCCESS) { @@ -793,25 +810,25 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { #if 0 /*unref the current source surface*/ - if (self->cur_fame != NULL) + if (self->cur_frame != NULL) { - mix_videoframe_unref (self->cur_fame); - self->cur_fame = NULL; + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; } #endif /*unref the reconstructed surface*/ - if (self->rec_fame != NULL) + if (self->rec_frame != NULL) { - mix_videoframe_unref (self->rec_fame); - self->rec_fame = NULL; + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_fame != NULL) + if (self->ref_frame != NULL) { - mix_videoframe_unref (self->ref_fame); - self->ref_fame = NULL; + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; } /*reset the properities*/ @@ -868,25 +885,25 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { #if 0 /*unref the current source surface*/ - if (self->cur_fame != NULL) + if (self->cur_frame != NULL) { - mix_videoframe_unref (self->cur_fame); - self->cur_fame = NULL; + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; } #endif /*unref the reconstructed surface*/ - if (self->rec_fame != NULL) + if (self->rec_frame != NULL) { - mix_videoframe_unref (self->rec_fame); - self->rec_fame = NULL; + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_fame != NULL) + if (self->ref_frame != NULL) { - mix_videoframe_unref (self->ref_fame); - self->ref_fame = NULL; + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; } LOG_V( "Release surfaces\n"); @@ -1045,20 +1062,15 @@ MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 * return MIX_RESULT_NULL_PTR; LOG_V( "Begin\n\n"); - -#if 0 //not needed currently - MixVideoConfigParamsEncH264 * params_h264 - = MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); -#endif - + if (MIX_IS_VIDEOFORMATENC_H264(mix)) { parent = MIX_VIDEOFORMATENC(&(mix->parent)); /*set picture params for HW*/ - h264_pic_param.reference_picture = mix->ref_fame->frame_id; - h264_pic_param.reconstructed_picture = mix->rec_fame->frame_id; - h264_pic_param.coded_buf = mix->coded_buf; + h264_pic_param.reference_picture = mix->ref_frame->frame_id; + h264_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + h264_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; h264_pic_param.picture_width = parent->picture_width; h264_pic_param.picture_height = parent->picture_height; h264_pic_param.last_picture = 0; @@ -1070,8 +1082,10 @@ MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 * h264_pic_param.reference_picture); LOG_I( "reconstructed_picture = 0x%08x\n", h264_pic_param.reconstructed_picture); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); LOG_I( "coded_buf = 0x%08x\n", - h264_pic_param.coded_buf); + h264_pic_param.coded_buf); LOG_I( "picture_width = %d\n", h264_pic_param.picture_width); LOG_I( "picture_height = %d\n\n", @@ -1107,7 +1121,7 @@ MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 * "not H264 video encode Object\n"); return MIX_RESULT_FAIL; } - + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; @@ -1270,7 +1284,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, gulong surface = 0; guint16 width, height; - MixVideoFrame * tmp_fame; + MixVideoFrame * tmp_frame; guint8 *buf; if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { @@ -1320,9 +1334,9 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_V( "We are NOT in share buffer mode\n"); - if (mix->ref_fame == NULL) + if (mix->ref_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( @@ -1331,9 +1345,9 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } } - if (mix->rec_fame == NULL) + if (mix->rec_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1343,12 +1357,12 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } if (parent->need_display) { - mix->cur_fame = NULL; + mix->cur_frame = NULL; } - if (mix->cur_fame == NULL) + if (mix->cur_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1369,7 +1383,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_V( "map source data to surface\n"); - ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1425,6 +1439,12 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, guint8 *inbuf = bufin->data; +/* mutually exclusive */ +//#define USE_SRC_FMT_YUV420 +//#define USE_SRC_FMT_NV12 +#define USE_SRC_FMT_NV21 + +#ifdef USE_SRC_FMT_YUV420 /*need to convert YUV420 to NV12*/ dst_y = pvbuf +image->offsets[0]; @@ -1443,6 +1463,36 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } dst_uv += image->pitches[1]; } +#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + int offset_uv = width * height; + guint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + +#ifdef USE_SRC_FMT_NV12 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v + } + dst_uv += image->pitches[1]; + inbuf_uv += width_uv; + } +#endif +#endif //USE_SRC_FMT_YUV420 vaUnmapBuffer(va_display, image->buf); if (va_status != VA_STATUS_SUCCESS) @@ -1469,12 +1519,12 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixVideoFrame * frame = mix_videoframe_new(); - if (mix->ref_fame == NULL) + if (mix->ref_frame == NULL) { ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_fame, frame); + (parent->surfacepool, &mix->ref_frame, frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( @@ -1483,12 +1533,12 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } } - if (mix->rec_fame == NULL) + if (mix->rec_frame == NULL) { ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_fame, frame); + (parent->surfacepool, &mix->rec_frame, frame); if (ret != MIX_RESULT_SUCCESS) { @@ -1498,13 +1548,13 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } } - //mix_videoframe_unref (mix->cur_fame); + //mix_videoframe_unref (mix->cur_frame); if (parent->need_display) { - mix->cur_fame = NULL; + mix->cur_frame = NULL; } - if (mix->cur_fame == NULL) + if (mix->cur_frame == NULL) { guint ci_idx; memcpy (&ci_idx, bufin->data, bufin->size); @@ -1524,7 +1574,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_fame, frame); + (parent->surfacepool, &mix->cur_frame, frame); if (ret != MIX_RESULT_SUCCESS) { @@ -1534,7 +1584,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } } - ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); } @@ -1542,6 +1592,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_I( "va_context = 0x%08x\n",(guint)va_context); LOG_I( "surface = 0x%08x\n",(guint)surface); LOG_I( "va_display = 0x%08x\n",(guint)va_display); + va_status = vaBeginPicture(va_display, va_context, surface); if (va_status != VA_STATUS_SUCCESS) @@ -1549,67 +1600,71 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_E( "Failed vaBeginPicture\n"); return MIX_RESULT_FAIL; } - - LOG_V( "mix_videofmtenc_h264_send_seq_params\n"); - - if (mix->encoded_frames == 0) { - mix_videofmtenc_h264_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) + + ret = mix_videofmtenc_h264_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + return MIX_RESULT_FAIL; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed mix_videofmtenc_h264_send_seq_params\n"); + LOG_E( "Failed vaEndPicture\n"); return MIX_RESULT_FAIL; } } - ret = mix_videofmtenc_h264_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); - return MIX_RESULT_FAIL; + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + } - ret = mix_videofmtenc_h264_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "before vaEndPicture\n"); - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - LOG_V( "vaSyncSurface\n"); - va_status = vaSyncSurface(va_display, surface); + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaSyncSurface\n"); - return MIX_RESULT_FAIL; - } - + + //return MIX_RESULT_FAIL; + } LOG_V( "Start to get encoded data\n"); - + /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->coded_buf, (void **)&buf); + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaMapBuffer\n"); return MIX_RESULT_FAIL; - } - + } + // first 4 bytes is the size of the buffer memcpy (&(iovout->data_size), (void*)buf, 4); //size = (guint*) buf; @@ -1620,6 +1675,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, //We will support two buffer mode, one is application allocates the buffer and passes to encode, //the other is encode allocate memory + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed @@ -1637,15 +1693,15 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, guint zero_byte_count = 0; guint prefix_length = 0; guint8 nal_unit_type = 0; - guint8 * payload = buf + 16; + guint8 * payload = buf + 16; while ((payload[pos++] == 0x00)) { zero_byte_count ++; if (pos >= iovout->data_size) //to make sure the buffer to be accessed is valid break; } - - nal_unit_type = (guint8)(payload[pos] & 0x1f); + + nal_unit_type = (guint8)(payload[pos] & 0x1f); prefix_length = zero_byte_count + 1; LOG_I ("nal_unit_type = %d\n", nal_unit_type); @@ -1675,34 +1731,51 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, iovout->data_size = size; LOG_I( "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->coded_buf); + + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaUnmapBuffer\n"); return MIX_RESULT_FAIL; } - LOG_V( "get encoded data done\n"); -#if 0 - if (parent->drawable) { - va_status = vaPutSurface(va_display, surface, (Drawable)parent->drawable, - 0,0, width, height, - 0,0, width, height, - NULL,0,0); - } - -#ifdef SHOW_SRC - else { - - va_status = vaPutSurface(va_display, surface, win, - 0,0, width, height, - 0,0, width, height, - NULL,0,0); - } -#endif //SHOW_SRC -#endif + LOG_V( "get encoded data done\n"); + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_h264_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + + } + VASurfaceStatus status; /*query the status of current surface*/ @@ -1716,7 +1789,13 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, mix->pic_skipped = status & VASurfaceSkipped; if (parent->need_display) { - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame); + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + return ret; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1728,25 +1807,30 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, /*update the reference surface and reconstructed surface */ if (!mix->pic_skipped) { - tmp_fame = mix->rec_fame; - mix->rec_fame= mix->ref_fame; - mix->ref_fame = tmp_fame; + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; } #if 0 - if (mix->ref_fame != NULL) - mix_videoframe_unref (mix->ref_fame); - mix->ref_fame = mix->rec_fame; + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; - mix_videoframe_unref (mix->cur_fame); + mix_videoframe_unref (mix->cur_frame); #endif + + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; + if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_fame); - mix->cur_fame = NULL; + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; } - - mix->encoded_frames ++; } else { @@ -1754,8 +1838,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, "not H264 video encode Object\n"); return MIX_RESULT_FAIL; } - - + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; @@ -1952,3 +2035,46 @@ MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( } +MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_H264(mix)) + { + if (mix->encoded_frames == 0) { + mix_videofmtenc_h264_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_h264_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_h264_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + } + + LOG_V( "End\n"); + + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h index eeef2d9..0ef0e18 100644 --- a/mix_video/src/mixvideoformatenc_h264.h +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -34,7 +34,8 @@ struct _MixVideoFormatEnc_H264 { /*< public > */ MixVideoFormatEnc parent; - VABufferID coded_buf; + VABufferID coded_buf[2]; + VABufferID last_coded_buf; VABufferID seq_param_buf; VABufferID pic_param_buf; VABufferID slice_param_buf; @@ -42,9 +43,10 @@ struct _MixVideoFormatEnc_H264 { VASurfaceID * surfaces; guint surface_num; - MixVideoFrame *cur_fame; //current input frame to be encoded; - MixVideoFrame *ref_fame; //reference frame - MixVideoFrame *rec_fame; //reconstructed frame; + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *last_frame; //last frame; guint basic_unit_size; //for rate control guint disable_deblocking_filter_idc; @@ -58,6 +60,7 @@ struct _MixVideoFormatEnc_H264 { gboolean is_intra; guint coded_buf_size; + guint coded_buf_index; /*< public > */ }; @@ -125,13 +128,15 @@ MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); /* Local Methods */ +MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint *max_size); MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin, MixIOVec * iovout); MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( guint8 * bufin, guint bufin_len, guint8* bufout, guint *bufout_len); +MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix); + #endif /* __MIX_VIDEOFORMATENC_H264_H__ */ diff --git a/mix_video/src/mixvideoformatenc_mpeg4.c b/mix_video/src/mixvideoformatenc_mpeg4.c index e58976b..25b3b3e 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.c +++ b/mix_video/src/mixvideoformatenc_mpeg4.c @@ -44,15 +44,17 @@ static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) { self->encoded_frames = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; - self->cur_fame = NULL; - self->ref_fame = NULL; - self->rec_fame = NULL; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; self->ci_shared_surfaces = NULL; self->surfaces= NULL; self->surface_num = 0; + self->coded_buf_index = 0; parent->initialized = FALSE; + } static void mix_videoformatenc_mpeg4_class_init( @@ -570,7 +572,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, "mix_surfacepool_initialize\n"); ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces); + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); switch (ret) { @@ -626,7 +628,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, VAEncCodedBufferType, self->coded_buf_size, // 1, NULL, - &self->coded_buf); + &self->coded_buf[0]); if (va_status != VA_STATUS_SUCCESS) { @@ -636,7 +638,24 @@ MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, g_mutex_unlock(parent->objectlock); return MIX_RESULT_FAIL; } + + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + g_free (surfaces); + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + #ifdef SHOW_SRC Display * display = XOpenDisplay (NULL); @@ -776,24 +795,24 @@ MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { g_mutex_lock(mix->objectlock); /*unref the current source surface*/ - if (self->cur_fame != NULL) + if (self->cur_frame != NULL) { - mix_videoframe_unref (self->cur_fame); - self->cur_fame = NULL; + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; } /*unref the reconstructed surface*/ - if (self->rec_fame != NULL) + if (self->rec_frame != NULL) { - mix_videoframe_unref (self->rec_fame); - self->rec_fame = NULL; + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_fame != NULL) + if (self->ref_frame != NULL) { - mix_videoframe_unref (self->ref_fame); - self->ref_fame = NULL; + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; } /*reset the properities*/ @@ -850,25 +869,25 @@ MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) { #if 0 /*unref the current source surface*/ - if (self->cur_fame != NULL) + if (self->cur_frame != NULL) { - mix_videoframe_unref (self->cur_fame); - self->cur_fame = NULL; + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; } #endif /*unref the reconstructed surface*/ - if (self->rec_fame != NULL) + if (self->rec_frame != NULL) { - mix_videoframe_unref (self->rec_fame); - self->rec_fame = NULL; + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_fame != NULL) + if (self->ref_frame != NULL) { - mix_videoframe_unref (self->ref_fame); - self->ref_fame = NULL; + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; } LOG_V( "Release surfaces\n"); @@ -1047,9 +1066,9 @@ MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 parent = MIX_VIDEOFORMATENC(&(mix->parent)); /*set picture params for HW*/ - mpeg4_pic_param.reference_picture = mix->ref_fame->frame_id; - mpeg4_pic_param.reconstructed_picture = mix->rec_fame->frame_id; - mpeg4_pic_param.coded_buf = mix->coded_buf; + mpeg4_pic_param.reference_picture = mix->ref_frame->frame_id; + mpeg4_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + mpeg4_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; mpeg4_pic_param.picture_width = parent->picture_width; mpeg4_pic_param.picture_height = parent->picture_height; mpeg4_pic_param.vop_time_increment= mix->encoded_frames; @@ -1065,6 +1084,8 @@ MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 mpeg4_pic_param.reconstructed_picture); LOG_I( "coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); LOG_I( "picture_width = %d\n", mpeg4_pic_param.picture_width); LOG_I( "picture_height = %d\n", @@ -1206,7 +1227,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, gulong surface = 0; guint16 width, height; - MixVideoFrame * tmp_fame; + MixVideoFrame * tmp_frame; guint8 *buf; if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { @@ -1256,9 +1277,9 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, LOG_V( "We are NOT in share buffer mode\n"); - if (mix->ref_fame == NULL) + if (mix->ref_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( @@ -1267,9 +1288,9 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, } } - if (mix->rec_fame == NULL) + if (mix->rec_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1279,12 +1300,12 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, } if (parent->need_display) { - mix->cur_fame = NULL; + mix->cur_frame = NULL; } - if (mix->cur_fame == NULL) + if (mix->cur_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1305,7 +1326,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, LOG_V( "map source data to surface\n"); - ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1405,12 +1426,12 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, MixVideoFrame * frame = mix_videoframe_new(); - if (mix->ref_fame == NULL) + if (mix->ref_frame == NULL) { ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_fame, frame); + (parent->surfacepool, &mix->ref_frame, frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( @@ -1419,12 +1440,12 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, } } - if (mix->rec_fame == NULL) + if (mix->rec_frame == NULL) { ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_fame, frame); + (parent->surfacepool, &mix->rec_frame, frame); if (ret != MIX_RESULT_SUCCESS) { @@ -1435,10 +1456,10 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, } if (parent->need_display) { - mix->cur_fame = NULL; + mix->cur_frame = NULL; } - if (mix->cur_fame == NULL) + if (mix->cur_frame == NULL) { guint ci_idx; memcpy (&ci_idx, bufin->data, bufin->size); @@ -1458,7 +1479,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_fame, frame); + (parent->surfacepool, &mix->cur_frame, frame); if (ret != MIX_RESULT_SUCCESS) { @@ -1468,7 +1489,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, } } - ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); } @@ -1476,60 +1497,64 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, LOG_I( "va_context = 0x%08x\n",(guint)va_context); LOG_I( "surface = 0x%08x\n",(guint)surface); LOG_I( "va_display = 0x%08x\n",(guint)va_display); - + + va_status = vaBeginPicture(va_display, va_context, surface); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaBeginPicture\n"); return MIX_RESULT_FAIL; } - - LOG_V( "mix_videofmtenc_mpeg4_send_seq_params\n"); - - if (mix->encoded_frames == 0) { - mix_videofmtenc_mpeg4_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) + + ret = mix_videofmtenc_mpeg4_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + return MIX_RESULT_FAIL; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed mix_videofmtenc_mpeg4_send_seq_params\n"); + LOG_E( "Failed vaEndPicture\n"); return MIX_RESULT_FAIL; } } - ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_mpeg4_send_picture_parameter\n"); - return MIX_RESULT_FAIL; + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + } - ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_mpeg4_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "before vaEndPicture\n"); - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "vaSyncSurface\n"); - va_status = vaSyncSurface(va_display, surface); + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaSyncSurface\n"); - return MIX_RESULT_FAIL; + //return MIX_RESULT_FAIL; } @@ -1537,7 +1562,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, "Start to get encoded data\n"); /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->coded_buf, (void **)&buf); + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaMapBuffer\n"); @@ -1563,7 +1588,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, LOG_I( "out size is = %d\n", iovout->data_size); - va_status = vaUnmapBuffer (va_display, mix->coded_buf); + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaUnmapBuffer\n"); @@ -1572,24 +1597,40 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, LOG_V( "get encoded data done\n"); -#if 0 - if (parent->drawable) { - va_status = vaPutSurface(va_display, surface, (Drawable)parent->drawable, - 0,0, width, height, - 0,0, width, height, - NULL,0,0); - } - -#ifdef SHOW_SRC - else { - - va_status = vaPutSurface(va_display, surface, win, - 0,0, width, height, - 0,0, width, height, - NULL,0,0); - } -#endif //SHOW_SRC -#endif + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_mpeg4_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + + } VASurfaceStatus status; @@ -1603,41 +1644,48 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, } mix->pic_skipped = status & VASurfaceSkipped; - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_fame); + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); if (parent->need_display) { - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_framemanager_enqueue\n"); - return MIX_RESULT_FAIL; - } - } - - + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + return ret; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_framemanager_enqueue\n"); + return MIX_RESULT_FAIL; + } + } + /*update the reference surface and reconstructed surface */ if (!mix->pic_skipped) { - tmp_fame = mix->rec_fame; - mix->rec_fame= mix->ref_fame; - mix->ref_fame = tmp_fame; + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; } #if 0 - if (mix->ref_fame != NULL) - mix_videoframe_unref (mix->ref_fame); - mix->ref_fame = mix->rec_fame; + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; - mix_videoframe_unref (mix->cur_fame); + mix_videoframe_unref (mix->cur_frame); #endif + + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_fame); - mix->cur_fame = NULL; + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; } - - mix->encoded_frames ++; } else { @@ -1711,3 +1759,45 @@ MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size ( return MIX_RESULT_SUCCESS; } + +MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + if (mix->encoded_frames == 0) { + mix_videofmtenc_mpeg4_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + } + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h index dc26efe..4a7deb1 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.h +++ b/mix_video/src/mixvideoformatenc_mpeg4.h @@ -35,7 +35,8 @@ struct _MixVideoFormatEnc_MPEG4 { MixVideoFormatEnc parent; - VABufferID coded_buf; + VABufferID coded_buf[2]; + VABufferID last_coded_buf; VABufferID seq_param_buf; VABufferID pic_param_buf; VABufferID slice_param_buf; @@ -43,9 +44,11 @@ struct _MixVideoFormatEnc_MPEG4 { VASurfaceID * surfaces; guint surface_num; - MixVideoFrame *cur_fame; //current input frame to be encoded; - MixVideoFrame *ref_fame; //reference frame - MixVideoFrame *rec_fame; //reconstructed frame; + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *last_frame; //last frame; + guchar profile_and_level_indication; guint fixed_vop_time_increment; @@ -59,6 +62,8 @@ struct _MixVideoFormatEnc_MPEG4 { gboolean is_intra; guint coded_buf_size; + guint coded_buf_index; + /*< public > */ }; @@ -132,6 +137,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size (MixVideoFormatEnc *mi MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, MixBuffer * bufin, MixIOVec * iovout); +MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix); #endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoformatenc_preview.c b/mix_video/src/mixvideoformatenc_preview.c index 17b9a4b..6aeeb9e 100644 --- a/mix_video/src/mixvideoformatenc_preview.c +++ b/mix_video/src/mixvideoformatenc_preview.c @@ -44,9 +44,9 @@ static void mix_videoformatenc_preview_init(MixVideoFormatEnc_Preview * self) { self->encoded_frames = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; - self->cur_fame = NULL; - self->ref_fame = NULL; - self->rec_fame = NULL; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; self->ci_shared_surfaces = NULL; self->surfaces= NULL; @@ -523,7 +523,7 @@ MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, "mix_surfacepool_initialize\n"); ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces); + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); switch (ret) { @@ -720,25 +720,25 @@ MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { #if 0 /*unref the current source surface*/ - if (self->cur_fame != NULL) + if (self->cur_frame != NULL) { - mix_videoframe_unref (self->cur_fame); - self->cur_fame = NULL; + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; } #endif /*unref the reconstructed surface*/ - if (self->rec_fame != NULL) + if (self->rec_frame != NULL) { - mix_videoframe_unref (self->rec_fame); - self->rec_fame = NULL; + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_fame != NULL) + if (self->ref_frame != NULL) { - mix_videoframe_unref (self->ref_fame); - self->ref_fame = NULL; + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; } /*reset the properities*/ @@ -795,25 +795,25 @@ MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { #if 0 /*unref the current source surface*/ - if (self->cur_fame != NULL) + if (self->cur_frame != NULL) { - mix_videoframe_unref (self->cur_fame); - self->cur_fame = NULL; + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; } #endif /*unref the reconstructed surface*/ - if (self->rec_fame != NULL) + if (self->rec_frame != NULL) { - mix_videoframe_unref (self->rec_fame); - self->rec_fame = NULL; + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_fame != NULL) + if (self->ref_frame != NULL) { - mix_videoframe_unref (self->ref_fame); - self->ref_fame = NULL; + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; } LOG_V( "Release surfaces\n"); @@ -881,7 +881,7 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi gulong surface = 0; guint16 width, height; - //MixVideoFrame * tmp_fame; + //MixVideoFrame * tmp_frame; //guint8 *buf; if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { @@ -921,9 +921,9 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi LOG_V( "We are NOT in share buffer mode\n"); - if (mix->ref_fame == NULL) + if (mix->ref_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( @@ -932,9 +932,9 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi } } - if (mix->rec_fame == NULL) + if (mix->rec_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -944,12 +944,12 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi } if (parent->need_display) { - mix->cur_fame = NULL; + mix->cur_frame = NULL; } - if (mix->cur_fame == NULL) + if (mix->cur_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame); + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -970,7 +970,7 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi LOG_V( "map source data to surface\n"); - ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1070,12 +1070,12 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi MixVideoFrame * frame = mix_videoframe_new(); - if (mix->ref_fame == NULL) + if (mix->ref_frame == NULL) { ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_fame, frame); + (parent->surfacepool, &mix->ref_frame, frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( @@ -1084,12 +1084,12 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi } } - if (mix->rec_fame == NULL) + if (mix->rec_frame == NULL) { ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_fame, frame); + (parent->surfacepool, &mix->rec_frame, frame); if (ret != MIX_RESULT_SUCCESS) { @@ -1099,13 +1099,13 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi } } - //mix_videoframe_unref (mix->cur_fame); + //mix_videoframe_unref (mix->cur_frame); if (parent->need_display) { - mix->cur_fame = NULL; + mix->cur_frame = NULL; } - if (mix->cur_fame == NULL) + if (mix->cur_frame == NULL) { guint ci_idx; memcpy (&ci_idx, bufin->data, bufin->size); @@ -1125,7 +1125,7 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_fame, frame); + (parent->surfacepool, &mix->cur_frame, frame); if (ret != MIX_RESULT_SUCCESS) { @@ -1135,7 +1135,7 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi } } - ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface); + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); } @@ -1156,7 +1156,14 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi if (parent->need_display) { - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame); + + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + return ret; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( @@ -1167,8 +1174,8 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_fame); - mix->cur_fame = NULL; + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; } mix->encoded_frames ++; diff --git a/mix_video/src/mixvideoformatenc_preview.h b/mix_video/src/mixvideoformatenc_preview.h index dd404e2..45ae101 100644 --- a/mix_video/src/mixvideoformatenc_preview.h +++ b/mix_video/src/mixvideoformatenc_preview.h @@ -42,9 +42,9 @@ struct _MixVideoFormatEnc_Preview { VASurfaceID * surfaces; guint surface_num; - MixVideoFrame *cur_fame; //current input frame to be encoded; - MixVideoFrame *ref_fame; //reference frame - MixVideoFrame *rec_fame; //reconstructed frame; + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; guint basic_unit_size; //for rate control guint disable_deblocking_filter_idc; diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c index 2bea5d0..64d7831 100644 --- a/mix_video/src/mixvideoframe.c +++ b/mix_video/src/mixvideoframe.c @@ -8,9 +8,24 @@ /** * SECTION:mixvideoframe - * @short_description: VideoConfig parameters - * - * A data object which stores videoconfig specific parameters. + * @short_description: MI-X Video Frame Object + * + * + * The MixVideoFrame object will be created by + * MixVideo and provided to the MMF/App in the + * MixVideo mix_video_get_frame() function. + * + * + * mix_video_release_frame() must be used + * to release frame object returned from + * mix_video_get_frame(). Caller must not + * use mix_videoframe_ref() or mix_videoframe_unref() + * or adjust the reference count directly in any way. + * This object can be supplied in the mix_video_render() + * function to render the associated video frame. + * The MMF/App can release this object when it no longer + * needs to display/re-display this frame. + * */ @@ -41,7 +56,6 @@ static void mix_videoframe_init(MixVideoFrame * self) { self->frame_id = VA_INVALID_SURFACE; self->timestamp = 0; self->discontinuity = FALSE; - self->frame_structure = VA_FRAME_PICTURE; MixVideoFramePrivate *priv = MIX_VIDEOFRAME_GET_PRIVATE(self); self->reserved1 = priv; @@ -55,9 +69,12 @@ static void mix_videoframe_init(MixVideoFrame * self) { /* set stuff for skipped frames */ priv -> is_skipped = FALSE; priv -> real_frame = NULL; + priv -> sync_flag = FALSE; + priv -> frame_structure = VA_FRAME_PICTURE; - g_static_rec_mutex_init (&priv -> lock); + priv -> va_display = NULL; + g_static_rec_mutex_init (&priv -> lock); } static void mix_videoframe_class_init(MixVideoFrameClass * klass) { @@ -137,6 +154,8 @@ void mix_videoframe_unref(MixVideoFrame * obj) { g_static_rec_mutex_unlock (&priv->lock); return; } + + mix_videoframe_reset(obj); mix_surfacepool_put(pool, obj); } @@ -198,7 +217,6 @@ gboolean mix_videoframe_copy(MixParams * target, const MixParams * src) { this_target->frame_id = this_src->frame_id; this_target->timestamp = this_src->timestamp; this_target->discontinuity = this_src->discontinuity; - this_target->frame_structure = this_src->frame_structure; // Now chainup base class if (parent_class->copy) { @@ -233,8 +251,7 @@ gboolean mix_videoframe_equal(MixParams * first, MixParams * second) { /* TODO: add comparison for other properties */ if (this_first->frame_id == this_second->frame_id && this_first->timestamp == this_second->timestamp - && this_first->discontinuity == this_second->discontinuity - && this_first->frame_structure == this_second->frame_structure) { + && this_first->discontinuity == this_second->discontinuity) { // members within this scope equal. chaining up. MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); if (klass->equal) @@ -315,14 +332,16 @@ MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure) { MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->frame_structure = frame_structure; + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + priv->frame_structure = frame_structure; return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure) { MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure); - *frame_structure = obj->frame_structure; + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + *frame_structure = priv->frame_structure; return MIX_RESULT_SUCCESS; } @@ -356,6 +375,7 @@ MIX_RESULT mix_videoframe_get_frame_type(MixVideoFrame *obj, MIX_RESULT mix_videoframe_set_is_skipped(MixVideoFrame *obj, gboolean is_skipped) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); VIDEOFRAME_PRIVATE(obj) -> is_skipped = is_skipped; return MIX_RESULT_SUCCESS; @@ -374,6 +394,7 @@ MIX_RESULT mix_videoframe_get_is_skipped(MixVideoFrame *obj, MIX_RESULT mix_videoframe_set_real_frame(MixVideoFrame *obj, MixVideoFrame *real) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); VIDEOFRAME_PRIVATE(obj) -> real_frame = real; return MIX_RESULT_SUCCESS; @@ -389,3 +410,70 @@ MIX_RESULT mix_videoframe_get_real_frame(MixVideoFrame *obj, return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videoframe_reset(MixVideoFrame *obj) { + + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + + obj->timestamp = 0; + obj->discontinuity = FALSE; + + priv -> is_skipped = FALSE; + priv -> real_frame = NULL; + priv -> sync_flag = FALSE; + priv -> frame_structure = VA_FRAME_PICTURE; + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + + priv -> sync_flag = sync_flag; + if (priv->real_frame && priv->real_frame != obj) { + mix_videoframe_set_sync_flag(priv->real_frame, sync_flag); + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, sync_flag); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + if (priv->real_frame && priv->real_frame != obj) { + return mix_videoframe_get_sync_flag(priv->real_frame, sync_flag); + } else { + *sync_flag = priv -> sync_flag; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display) { + + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + + priv -> va_display = va_display; + if (priv->real_frame && priv->real_frame != obj) { + mix_videoframe_set_vadisplay(priv->real_frame, va_display); + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display) { + + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, va_display); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + + if (priv->real_frame && priv->real_frame != obj) { + return mix_videoframe_get_vadisplay(priv->real_frame, va_display); + } else { + *va_display = priv -> va_display; + } + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h index 02338dd..1178d25 100644 --- a/mix_video/src/mixvideoframe.h +++ b/mix_video/src/mixvideoframe.h @@ -68,15 +68,36 @@ struct _MixVideoFrame { MixParams parent; /*< public > */ + + /* ID associated with the decoded frame */ gulong frame_id; + + /* ID associated with the CI frame + * (used for encode only) */ guint ci_frame_idx; + + /* 64 bit timestamp. For decode, + * this is preserved from the corresponding + * MixVideoDecodeParams field. For encode, + * this is created during encoding. */ guint64 timestamp; + + /* Flag indicating whether there + * is a discontinuity. For decode, + * this is preserved from the corresponding + * MixVideoDecodeParams field. */ gboolean discontinuity; - guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -110,7 +131,7 @@ MixVideoFrame *mix_videoframe_new(void); /** * mix_videoframe_ref: * @mix: object to add reference - * @returns: the MixVideoFrame instance where reference count has been increased. + * @returns: the #MixVideoFrame instance where reference count has been increased. * * Add reference count. */ @@ -126,19 +147,91 @@ void mix_videoframe_unref(MixVideoFrame * obj); /* Class Methods */ +/** + * mix_videoframe_set_frame_id: + * @obj: #MixVideoFrame object + * @frame_id: ID associated with the decoded frame + * @returns: Common Video Error Return Codes + * + * Set Frame ID + */ MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, gulong frame_id); + +/** + * mix_videoframe_get_frame_id: + * @obj: #MixVideoFrame object + * @frame_id: frame ID to be returned + * @returns: Common Video Error Return Codes + * + * Get Frame ID + */ MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, gulong * frame_id); +/** + * mix_videoframe_set_ci_frame_idx: + * @obj: #MixVideoFrame object + * @ci_frame_idx: ID associated with the CI frame (used for encode only) + * @returns: Common Video Error Return Codes + * + * Set CI Frame ID + */ MIX_RESULT mix_videoframe_set_ci_frame_idx(MixVideoFrame * obj, guint ci_frame_idx); + +/** + * mix_videoframe_get_ci_frame_idx: + * @obj: #MixVideoFrame object + * @ci_frame_idx: CI Frame ID to be returned + * @returns: Common Video Error Return Codes + * + * Get CI Frame ID + */ MIX_RESULT mix_videoframe_get_ci_frame_idx(MixVideoFrame * obj, guint * ci_frame_idx); +/** + * mix_videoframe_set_timestamp: + * @obj: #MixVideoFrame object + * @timestamp: Frame timestamp + * @returns: Common Video Error Return Codes + * + * Set Frame timestamp + */ MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, guint64 timestamp); + +/** + * mix_videoframe_get_timestamp: + * @obj: #MixVideoFrame object + * @timestamp: Frame timestamp to be returned + * @returns: Common Video Error Return Codes + * + * Get Frame timestamp + */ MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, guint64 * timestamp); +/** + * mix_videoframe_set_discontinuity: + * @obj: #MixVideoFrame object + * @discontinuity: Discontinuity flag + * @returns: Common Video Error Return Codes + * + * Get discontinuity flag + */ MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, gboolean discontinuity); + +/** + * mix_videoframe_get_discontinuity: + * @obj: #MixVideoFrame object + * @discontinuity: Discontinuity flag to be returned + * @returns: Common Video Error Return Codes + * + * Get discontinuity flag + */ MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, gboolean * discontinuity); -MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); -MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); +/** + * TODO: Add document the following 2 functions + * + */ +MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display); +MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display); #endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h index 5d4b894..d49fe38 100644 --- a/mix_video/src/mixvideoframe_private.h +++ b/mix_video/src/mixvideoframe_private.h @@ -30,6 +30,9 @@ struct _MixVideoFramePrivate gboolean is_skipped; MixVideoFrame *real_frame; GStaticRecMutex lock; + gboolean sync_flag; + guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field + void *va_display; }; /** @@ -64,5 +67,20 @@ mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real); MIX_RESULT mix_videoframe_get_real_frame (MixVideoFrame *obj, MixVideoFrame **real); +MIX_RESULT +mix_videoframe_reset(MixVideoFrame *obj); + +MIX_RESULT +mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag); + +MIX_RESULT +mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag); + +MIX_RESULT +mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); + +MIX_RESULT +mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); + #endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideorenderparams.c b/mix_video/src/mixvideorenderparams.c index 0dc8be7..12a711c 100644 --- a/mix_video/src/mixvideorenderparams.c +++ b/mix_video/src/mixvideorenderparams.c @@ -260,7 +260,7 @@ MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj, /* dup */ if (display) { - obj->display = mix_display_dup(display); + obj->display = mix_display_ref(display); } return MIX_RESULT_SUCCESS; @@ -273,7 +273,7 @@ MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj, /* dup? */ if (obj->display) { - *display = mix_display_dup(obj->display); + *display = mix_display_ref(obj->display); } return MIX_RESULT_SUCCESS; -- cgit v1.2.3 From 747c064f02ed298956e750bb5d5eb908f4ba7088 Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Thu, 3 Jun 2010 17:05:22 -0700 Subject: fix frame reordering for 264 b frames Change-Id: I8d9a2f618f0684efdf5655ae3996bea7cdb8c587 --- mix_video/src/Android.mk | 8 +- mix_video/src/mixframemanager.c | 1343 ++++++++++++++++++++------------- mix_video/src/mixframemanager.h | 2 + mix_video/src/mixvideoformat_h264.c | 7 + mix_video/src/mixvideoframe.c | 19 + mix_video/src/mixvideoframe.h | 5 + mix_video/src/mixvideoframe_private.h | 12 +- 7 files changed, 858 insertions(+), 538 deletions(-) diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index d60919c..0e6cb3d 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -63,10 +63,10 @@ LOCAL_SHARED_LIBRARIES := \ libmixvbp \ libva -ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) -LOCAL_CFLAGS += -DMIX_LOG_ENABLE -LOCAL_SHARED_LIBRARIES += liblog -endif +#ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) +#LOCAL_CFLAGS += -DMIX_LOG_ENABLE +#LOCAL_SHARED_LIBRARIES += liblog +#endif LOCAL_COPY_HEADERS_TO := libmixvideo diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c index 4cb24e8..f67137f 100644 --- a/mix_video/src/mixframemanager.c +++ b/mix_video/src/mixframemanager.c @@ -1,10 +1,10 @@ /* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ #include #include "mixvideolog.h" @@ -20,756 +20,1035 @@ static void mix_framemanager_finalize(GObject * obj); G_DEFINE_TYPE( MixFrameManager, mix_framemanager, G_TYPE_OBJECT); static void mix_framemanager_init(MixFrameManager * self) { - /* TODO: public member initialization */ + /* TODO: public member initialization */ - /* TODO: private member initialization */ + /* TODO: private member initialization */ - if (!g_thread_supported()) { - g_thread_init(NULL); - } + if (!g_thread_supported()) { + g_thread_init(NULL); + } - self->lock = g_mutex_new(); + self->lock = g_mutex_new(); - self->flushing = FALSE; - self->eos = FALSE; - self->frame_array = NULL; - self->frame_queue = NULL; - self->initialized = FALSE; + self->flushing = FALSE; + self->eos = FALSE; + self->frame_array = NULL; + self->frame_queue = NULL; + self->initialized = FALSE; - self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; - self->framerate_numerator = 30; - self->framerate_denominator = 1; + self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + self->framerate_numerator = 30; + self->framerate_denominator = 1; - self->is_first_frame = TRUE; + self->is_first_frame = TRUE; - /* for vc1 in asf */ - self->p_frame = NULL; - self->prev_timestamp = 0; + /* for vc1 in asf */ + self->p_frame = NULL; + self->prev_timestamp = 0; } static void mix_framemanager_class_init(MixFrameManagerClass * klass) { - GObjectClass *gobject_class = (GObjectClass *) klass; + GObjectClass *gobject_class = (GObjectClass *) klass; - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); - gobject_class->finalize = mix_framemanager_finalize; + gobject_class->finalize = mix_framemanager_finalize; } MixFrameManager *mix_framemanager_new(void) { - MixFrameManager *ret = g_object_new(MIX_TYPE_FRAMEMANAGER, NULL); + MixFrameManager *ret = g_object_new(MIX_TYPE_FRAMEMANAGER, NULL); - return ret; + return ret; } void mix_framemanager_finalize(GObject * obj) { - /* clean up here. */ + /* clean up here. */ - MixFrameManager *fm = MIX_FRAMEMANAGER(obj); + MixFrameManager *fm = MIX_FRAMEMANAGER(obj); - /* cleanup here */ - mix_framemanager_deinitialize(fm); + /* cleanup here */ + mix_framemanager_deinitialize(fm); - if (fm->lock) { - g_mutex_free(fm->lock); - fm->lock = NULL; - } + if (fm->lock) { + g_mutex_free(fm->lock); + fm->lock = NULL; + } - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } } MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { - return (MixFrameManager *) g_object_ref(G_OBJECT(fm)); + return (MixFrameManager *) g_object_ref(G_OBJECT(fm)); } /* MixFrameManager class methods */ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, - MixFrameOrderMode mode, gint framerate_numerator, - gint framerate_denominator, gboolean timebased_ordering) { + MixFrameOrderMode mode, gint framerate_numerator, + gint framerate_denominator, gboolean timebased_ordering) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_FAIL; - if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER - && mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator - <= 0 || framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER + && mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator + <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } - if (fm->initialized) { - return MIX_RESULT_ALREADY_INIT; - } + if (fm->initialized) { + return MIX_RESULT_ALREADY_INIT; + } - if (!g_thread_supported()) { - g_thread_init(NULL); - } + if (!g_thread_supported()) { + g_thread_init(NULL); + } - ret = MIX_RESULT_NO_MEMORY; + ret = MIX_RESULT_NO_MEMORY; + if (!fm->lock) { + fm->lock = g_mutex_new(); if (!fm->lock) { - fm->lock = g_mutex_new(); - if (!fm->lock) { - goto cleanup; - } + goto cleanup; } + } - if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { - fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE); - if (!fm->frame_array) { - goto cleanup; - } + if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { + fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE); + if (!fm->frame_array) { + goto cleanup; } + } - fm->frame_queue = g_queue_new(); - if (!fm->frame_queue) { - goto cleanup; - } + fm->frame_queue = g_queue_new(); + if (!fm->frame_queue) { + goto cleanup; + } - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; - fm->mode = mode; + fm->mode = mode; - fm->timebased_ordering = timebased_ordering; + fm->timebased_ordering = timebased_ordering; - fm->initialized = TRUE; + fm->initialized = TRUE; - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_SUCCESS; - cleanup: +cleanup: - if (ret != MIX_RESULT_SUCCESS) { - if (fm->frame_array) { - g_ptr_array_free(fm->frame_array, TRUE); - fm->frame_array = NULL; - } - if (fm->frame_queue) { - g_queue_free(fm->frame_queue); - fm->frame_queue = NULL; - } + if (ret != MIX_RESULT_SUCCESS) { + if (fm->frame_array) { + g_ptr_array_free(fm->frame_array, TRUE); + fm->frame_array = NULL; + } + if (fm->frame_queue) { + g_queue_free(fm->frame_queue); + fm->frame_queue = NULL; } - return ret; + } + return ret; } MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - mix_framemanager_flush(fm); + mix_framemanager_flush(fm); - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - if (fm->frame_array) { - g_ptr_array_free(fm->frame_array, TRUE); - fm->frame_array = NULL; - } - if (fm->frame_queue) { - g_queue_free(fm->frame_queue); - fm->frame_queue = NULL; - } + if (fm->frame_array) { + g_ptr_array_free(fm->frame_array, TRUE); + fm->frame_array = NULL; + } + if (fm->frame_queue) { + g_queue_free(fm->frame_queue); + fm->frame_queue = NULL; + } - fm->initialized = FALSE; + fm->initialized = FALSE; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, - gint framerate_numerator, gint framerate_denominator) { + gint framerate_numerator, gint framerate_denominator) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (framerate_numerator <= 0 || framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } + if (framerate_numerator <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, - gint *framerate_numerator, gint *framerate_denominator) { + gint *framerate_numerator, gint *framerate_denominator) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (!framerate_numerator || !framerate_denominator) { - return MIX_RESULT_INVALID_PARAM; - } + if (!framerate_numerator || !framerate_denominator) { + return MIX_RESULT_INVALID_PARAM; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - *framerate_numerator = fm->framerate_numerator; - *framerate_denominator = fm->framerate_denominator; + *framerate_numerator = fm->framerate_numerator; + *framerate_denominator = fm->framerate_denominator; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm, - MixFrameOrderMode *mode) { + MixFrameOrderMode *mode) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (!mode) { - return MIX_RESULT_INVALID_PARAM; - } + if (!mode) { + return MIX_RESULT_INVALID_PARAM; + } - /* no need to use lock */ - *mode = fm->mode; + /* no need to use lock */ + *mode = fm->mode; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - /* flush frame_array */ - if (fm->frame_array) { - guint len = fm->frame_array->len; - if (len) { - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array, - idx); - if (frame) { - mix_videoframe_unref(frame); - g_ptr_array_index(fm->frame_array, idx) = NULL; - } - } - /* g_ptr_array_remove_range(fm->frame_array, 0, len); */ + /* flush frame_array */ + if (fm->frame_array) { + guint len = fm->frame_array->len; + if (len) { + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array, + idx); + if (frame) { + mix_videoframe_unref(frame); + g_ptr_array_index(fm->frame_array, idx) = NULL; } + } + /* g_ptr_array_remove_range(fm->frame_array, 0, len); */ } + } - if (fm->frame_queue) { - guint len = fm->frame_queue->length; - if (len) { - MixVideoFrame *frame = NULL; - while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) { - mix_videoframe_unref(frame); - } - } + if (fm->frame_queue) { + guint len = fm->frame_queue->length; + if (len) { + MixVideoFrame *frame = NULL; + while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) { + mix_videoframe_unref(frame); + } } + } - if(fm->p_frame) { - mix_videoframe_unref(fm->p_frame); - fm->p_frame = NULL; - } - fm->prev_timestamp = 0; + if(fm->p_frame) { + mix_videoframe_unref(fm->p_frame); + fm->p_frame = NULL; + } + fm->prev_timestamp = 0; - fm->eos = FALSE; + fm->eos = FALSE; - fm->is_first_frame = TRUE; + fm->is_first_frame = TRUE; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MixVideoFrame *get_expected_frame_from_array(GPtrArray *array, - guint64 expected, guint64 tolerance, guint64 *frametimestamp) { + guint64 expected, guint64 tolerance, guint64 *frametimestamp) { - guint idx = 0; - guint len = 0; - guint64 timestamp = 0; - guint64 lowest_timestamp = (guint64)-1; - guint lowest_timestamp_idx = -1; - - MixVideoFrame *frame = NULL; + guint idx = 0; + guint len = 0; + guint64 timestamp = 0; + guint64 lowest_timestamp = (guint64)-1; + guint lowest_timestamp_idx = -1; - if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) { + MixVideoFrame *frame = NULL; - return NULL; - } + if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) { - len = array->len; - if (!len) { - return NULL; - } + return NULL; + } - for (idx = 0; idx < len; idx++) { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); - if (_frame) { + len = array->len; + if (!len) { + return NULL; + } - if (mix_videoframe_get_timestamp(_frame, ×tamp) - != MIX_RESULT_SUCCESS) { + for (idx = 0; idx < len; idx++) { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); + if (_frame) { - /* - * Oops, this shall never happen! - * In case it heppens, release the frame! - */ + if (mix_videoframe_get_timestamp(_frame, ×tamp) + != MIX_RESULT_SUCCESS) { - mix_videoframe_unref(_frame); + /* + * Oops, this shall never happen! + * In case it heppens, release the frame! + */ - /* make an available slot */ - g_ptr_array_index(array, idx) = NULL; + mix_videoframe_unref(_frame); - break; - } - - if (lowest_timestamp > timestamp) - { - lowest_timestamp = timestamp; - lowest_timestamp_idx = idx; - } - } - } - - if (lowest_timestamp == (guint64)-1) - { - return NULL; + /* make an available slot */ + g_ptr_array_index(array, idx) = NULL; + + break; + } + + if (lowest_timestamp > timestamp) + { + lowest_timestamp = timestamp; + lowest_timestamp_idx = idx; + } } - + } + + if (lowest_timestamp == (guint64)-1) + { + return NULL; + } + + + /* check if this is the expected next frame */ + if (lowest_timestamp <= expected + tolerance) + { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx); + /* make this slot available */ + g_ptr_array_index(array, lowest_timestamp_idx) = NULL; + + *frametimestamp = lowest_timestamp; + frame = _frame; + } + + return frame; +} + + +MixVideoFrame *get_expected_frame_from_array_DO(GPtrArray *array, + guint32 expected, guint32 *framedisplayorder) { + + guint idx = 0; + guint len = 0; + guint32 displayorder = 0; + guint32 lowest_displayorder = (guint32)-1; + guint lowest_displayorder_idx = -1; + + MixVideoFrame *frame = NULL; + + if (!array || !expected || !framedisplayorder) { - /* check if this is the expected next frame */ - if (lowest_timestamp <= expected + tolerance) - { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx); - /* make this slot available */ - g_ptr_array_index(array, lowest_timestamp_idx) = NULL; + return NULL; + } - *frametimestamp = lowest_timestamp; - frame = _frame; + len = array->len; + if (!len) { + return NULL; + } + + for (idx = 0; idx < len; idx++) { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); + if (_frame) { + + if (mix_videoframe_get_displayorder(_frame, &displayorder) + != MIX_RESULT_SUCCESS) { + + /* + * Oops, this shall never happen! + * In case it heppens, release the frame! + */ + + mix_videoframe_unref(_frame); + + /* make an available slot */ + g_ptr_array_index(array, idx) = NULL; + + break; + } + + if (lowest_displayorder > displayorder) + { + lowest_displayorder = displayorder; + lowest_displayorder_idx = idx; + } } - - return frame; + } + + if (lowest_displayorder == (guint32)-1) + { + return NULL; + } + + + /* check if this is the expected next frame */ + if (lowest_displayorder <= expected) + { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_displayorder_idx); + /* make this slot available */ + g_ptr_array_index(array, lowest_displayorder_idx) = NULL; + + *framedisplayorder = lowest_displayorder; + frame = _frame; + } + + return frame; } + void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) { - gboolean found_slot = FALSE; - guint len = 0; + gboolean found_slot = FALSE; + guint len = 0; - if (!array || !mvf) { - return; - } + if (!array || !mvf) { + return; + } - /* do we have slot for this frame? */ - len = array->len; - if (len) { - guint idx = 0; - gpointer frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = g_ptr_array_index(array, idx); - if (!frame) { - found_slot = TRUE; - g_ptr_array_index(array, idx) = (gpointer) mvf; - break; - } - } + /* do we have slot for this frame? */ + len = array->len; + if (len) { + guint idx = 0; + gpointer frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = g_ptr_array_index(array, idx); + if (!frame) { + found_slot = TRUE; + g_ptr_array_index(array, idx) = (gpointer) mvf; + break; + } } + } - if (!found_slot) { - g_ptr_array_add(array, (gpointer) mvf); - } + if (!found_slot) { + g_ptr_array_add(array, (gpointer) mvf); + } } -MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm, - MixVideoFrame *mvf) { + +gint frame_sorting_func_DO(gconstpointer a, gconstpointer b) { + + MixVideoFrame *fa = *((MixVideoFrame **) a); + MixVideoFrame *fb = *((MixVideoFrame **) b); + + guint32 ta, tb; + + if (!fa && !fb) { + return 0; + } + + if (fa && !fb) { + return 1; + } + + if (!fa && fb) { + return -1; + } + + mix_videoframe_get_displayorder(fa, &ta); + mix_videoframe_get_displayorder(fb, &tb); + + if (ta > tb) { + return 1; + } + + if (ta == tb) { + return 0; + } + + return -1; +} + + +MIX_RESULT mix_framemanager_displayorder_based_enqueue(MixFrameManager *fm, + MixVideoFrame *mvf) { + /* + * display order mode. + * + * if this is the first frame, we always push it into + * output queue, if it is not, check if it is the one + * expected, if yes, push it into the output queue. + * if not, put it into waiting list. + * + * while the expected frame is pushed into output queue, + * the expected next timestamp is also updated. with this + * updated expected next timestamp, we search for expected + * frame from the waiting list, if found, repeat the process. + * + */ + + MIX_RESULT ret = MIX_RESULT_FAIL; + guint32 displayorder = 0; + +first_frame: + + ret = mix_videoframe_get_displayorder(mvf, &displayorder); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + if (fm->is_first_frame) { + /* - * display order mode. - * - * if this is the first frame, we always push it into - * output queue, if it is not, check if it is the one - * expected, if yes, push it into the output queue. - * if not, put it into waiting list. - * - * while the expected frame is pushed into output queue, - * the expected next timestamp is also updated. with this - * updated expected next timestamp, we search for expected - * frame from the waiting list, if found, repeat the process. - * + * for the first frame, we can always put it into the output queue */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - MIX_RESULT ret = MIX_RESULT_FAIL; - guint64 timestamp = 0; + /* + * what displayorder of next frame shall be? + */ + fm->next_displayorder = displayorder + 1; - first_frame: + fm->is_first_frame = FALSE; - ret = mix_videoframe_get_timestamp(mvf, ×tamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + } else { - if (fm->is_first_frame) { + /* + * If displayorder is 0, send all the frames in the array to the queue + */ + if(displayorder == 0) { + if (fm->frame_array) { + guint len = fm->frame_array->len; + if (len) { + + /* sorting frames in the array by displayorder */ + g_ptr_array_sort(fm->frame_array, frame_sorting_func_DO); + + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index( + fm->frame_array, idx); + if (frame) { + g_ptr_array_index(fm->frame_array, idx) = NULL; + g_queue_push_tail(fm->frame_queue, (gpointer) frame); + } + } + } + } + + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + /* + * what displayorder of next frame shall be? + */ + fm->next_displayorder = displayorder + 1; + + } else { + + /* + * is this the next frame expected? + */ + + /* calculate tolerance */ + MixVideoFrame *frame_from_array = NULL; + guint32 displayorder_frame_array = 0; + + /* + * timestamp may be associated with the second field, which + * will not fall between the tolerance range. + */ + + if (displayorder <= fm->next_displayorder) { /* - * for the first frame, we can always put it into the output queue + * ok, this is the frame expected, push it into output queue */ g_queue_push_tail(fm->frame_queue, (gpointer) mvf); /* - * what timestamp of next frame shall be? + * update next_frame_timestamp only if it falls within the tolerance range */ - fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; + if (displayorder == fm->next_displayorder) + { + fm->next_displayorder = displayorder + 1; + } - fm->is_first_frame = FALSE; + /* + * since we updated next_displayorder, there might be a frame + * in the frame_array that satisfying this new next_displayorder + */ - } else { + while ((frame_from_array = get_expected_frame_from_array_DO( + fm->frame_array, fm->next_displayorder, + &displayorder_frame_array))) { + + g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (displayorder_frame_array >= fm->next_displayorder) + { + fm->next_displayorder = displayorder_frame_array + 1; + } + } + + } else { /* - * is this the next frame expected? + * is discontinuity flag set for this frame ? */ - - /* calculate tolerance */ - guint64 tolerance = fm->frame_timestamp_delta / 4; - MixVideoFrame *frame_from_array = NULL; - guint64 timestamp_frame_array = 0; + gboolean discontinuity = FALSE; + ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } /* - * timestamp may be associated with the second field, which - * will not fall between the tolerance range. - */ - - if (timestamp <= fm->next_frame_timestamp + tolerance) { - - /* - * ok, this is the frame expected, push it into output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; - } - - /* - * since we updated next_frame_timestamp, there might be a frame - * in the frame_array that satisfying this new next_frame_timestamp - */ - - while ((frame_from_array = get_expected_frame_from_array( - fm->frame_array, fm->next_frame_timestamp, tolerance, - ×tamp_frame_array))) { - - g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp_frame_array - + fm->frame_timestamp_delta; - } + * If this is a frame with discontinuity flag set, clear frame_array + * and treat the frame as the first frame. + */ + if (discontinuity) { + + guint len = fm->frame_array->len; + if (len) { + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index( + fm->frame_array, idx); + if (frame) { + mix_videoframe_unref(frame); + g_ptr_array_index(fm->frame_array, idx) = NULL; + } } + } - } else { + fm->is_first_frame = TRUE; + goto first_frame; + } - /* - * is discontinuity flag set for this frame ? - */ - gboolean discontinuity = FALSE; - ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + /* + * this is not the expected frame, put it into frame_array + */ + add_frame_into_array(fm->frame_array, mvf); + } + } + } +cleanup: - /* - * If this is a frame with discontinuity flag set, clear frame_array - * and treat the frame as the first frame. - */ - if (discontinuity) { - - guint len = fm->frame_array->len; - if (len) { - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index( - fm->frame_array, idx); - if (frame) { - mix_videoframe_unref(frame); - g_ptr_array_index(fm->frame_array, idx) = NULL; - } - } - } - - fm->is_first_frame = TRUE; - goto first_frame; - } + return ret; +} + + +MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm, + MixVideoFrame *mvf) { + /* + * display order mode. + * + * if this is the first frame, we always push it into + * output queue, if it is not, check if it is the one + * expected, if yes, push it into the output queue. + * if not, put it into waiting list. + * + * while the expected frame is pushed into output queue, + * the expected next timestamp is also updated. with this + * updated expected next timestamp, we search for expected + * frame from the waiting list, if found, repeat the process. + * + */ + + MIX_RESULT ret = MIX_RESULT_FAIL; + guint64 timestamp = 0; + +first_frame: + + ret = mix_videoframe_get_timestamp(mvf, ×tamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + if (fm->is_first_frame) { + + /* + * for the first frame, we can always put it into the output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + /* + * what timestamp of next frame shall be? + */ + fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; + + fm->is_first_frame = FALSE; + + } else { + + /* + * is this the next frame expected? + */ + + /* calculate tolerance */ + guint64 tolerance = fm->frame_timestamp_delta / 4; + MixVideoFrame *frame_from_array = NULL; + guint64 timestamp_frame_array = 0; + + /* + * timestamp may be associated with the second field, which + * will not fall between the tolerance range. + */ + + if (timestamp <= fm->next_frame_timestamp + tolerance) { + + /* + * ok, this is the frame expected, push it into output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; + } + + /* + * since we updated next_frame_timestamp, there might be a frame + * in the frame_array that satisfying this new next_frame_timestamp + */ + + while ((frame_from_array = get_expected_frame_from_array( + fm->frame_array, fm->next_frame_timestamp, tolerance, + ×tamp_frame_array))) { - /* - * handle variable frame rate: - * display any frame which time stamp is less than current one. - * - */ - guint64 tolerance = fm->frame_timestamp_delta / 4; - MixVideoFrame *frame_from_array = NULL; - guint64 timestamp_frame_array = 0; - - while ((frame_from_array = get_expected_frame_from_array( - fm->frame_array, timestamp, tolerance, - ×tamp_frame_array))) - { - g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp_frame_array - + fm->frame_timestamp_delta; - } + g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp_frame_array + + fm->frame_timestamp_delta; + } + } + + } else { + + /* + * is discontinuity flag set for this frame ? + */ + gboolean discontinuity = FALSE; + ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + /* + * If this is a frame with discontinuity flag set, clear frame_array + * and treat the frame as the first frame. + */ + if (discontinuity) { + + guint len = fm->frame_array->len; + if (len) { + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index( + fm->frame_array, idx); + if (frame) { + mix_videoframe_unref(frame); + g_ptr_array_index(fm->frame_array, idx) = NULL; } - /* - * this is not the expected frame, put it into frame_array - */ + } + } - add_frame_into_array(fm->frame_array, mvf); + fm->is_first_frame = TRUE; + goto first_frame; + } + + /* + * handle variable frame rate: + * display any frame which time stamp is less than current one. + * + */ + guint64 tolerance = fm->frame_timestamp_delta / 4; + MixVideoFrame *frame_from_array = NULL; + guint64 timestamp_frame_array = 0; + + while ((frame_from_array = get_expected_frame_from_array( + fm->frame_array, timestamp, tolerance, + ×tamp_frame_array))) + { + g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp_frame_array + + fm->frame_timestamp_delta; } + } + /* + * this is not the expected frame, put it into frame_array + */ + + add_frame_into_array(fm->frame_array, mvf); } - cleanup: + } +cleanup: - return ret; + return ret; } MIX_RESULT mix_framemanager_frametype_based_enqueue(MixFrameManager *fm, - MixVideoFrame *mvf) { + MixVideoFrame *mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixFrameType frame_type; - guint64 timestamp = 0; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixFrameType frame_type; + guint64 timestamp = 0; - ret = mix_videoframe_get_frame_type(mvf, &frame_type); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + ret = mix_videoframe_get_frame_type(mvf, &frame_type); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } - ret = mix_videoframe_get_timestamp(mvf, ×tamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + ret = mix_videoframe_get_timestamp(mvf, ×tamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } #ifdef MIX_LOG_ENABLE - if (frame_type == TYPE_I) { - LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp); - } else if (frame_type == TYPE_P) { - LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp); - } else if (frame_type == TYPE_B) { - LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp); - } else { - LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp); - } + if (frame_type == TYPE_I) { + LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp); + } else if (frame_type == TYPE_P) { + LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp); + } else if (frame_type == TYPE_B) { + LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp); + } else { + LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp); + } #endif - if (fm->is_first_frame) { - /* - * The first frame is not a I frame, unexpected! - */ - if (frame_type != TYPE_I) { - goto cleanup; - } + if (fm->is_first_frame) { + /* + * The first frame is not a I frame, unexpected! + */ + if (frame_type != TYPE_I) { + goto cleanup; + } - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - fm->is_first_frame = FALSE; - } else { + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + fm->is_first_frame = FALSE; + } else { - /* - * I P B B P B B ... - */ - if (frame_type == TYPE_I || frame_type == TYPE_P) { - - if (fm->p_frame) { + /* + * I P B B P B B ... + */ + if (frame_type == TYPE_I || frame_type == TYPE_P) { - ret = mix_videoframe_set_timestamp(fm->p_frame, - fm->prev_timestamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + if (fm->p_frame) { - g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); - fm->p_frame = NULL; - } + ret = mix_videoframe_set_timestamp(fm->p_frame, + fm->prev_timestamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } - /* it is an I frame, push it into the out queue */ - /*if (frame_type == TYPE_I) { + g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); + fm->p_frame = NULL; + } - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + /* it is an I frame, push it into the out queue */ + /*if (frame_type == TYPE_I) { - } else*/ - { - /* it is a P frame, we can not push it to the out queue yet, save it */ - fm->p_frame = mvf; - fm->prev_timestamp = timestamp; - } + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - ret = MIX_RESULT_SUCCESS; + } else*/ + { + /* it is a P frame, we can not push it to the out queue yet, save it */ + fm->p_frame = mvf; + fm->prev_timestamp = timestamp; + } - } else { - /* it is a B frame, replace the timestamp with the previous one */ - if (timestamp > fm->prev_timestamp) { - ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + ret = MIX_RESULT_SUCCESS; - /* save the timestamp */ - fm->prev_timestamp = timestamp; - } - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - ret = MIX_RESULT_SUCCESS; + } else { + /* it is a B frame, replace the timestamp with the previous one */ + if (timestamp > fm->prev_timestamp) { + ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; } + + /* save the timestamp */ + fm->prev_timestamp = timestamp; + } + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + ret = MIX_RESULT_SUCCESS; } + } - cleanup: +cleanup: - return ret; + return ret; } MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_FAIL; - /*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/ + /*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/ - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + /* + * This should never happen! + */ + if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode + != MIX_FRAMEORDER_MODE_DECODEORDER) { + return MIX_RESULT_FAIL; + } + + g_mutex_lock(fm->lock); + ret = MIX_RESULT_SUCCESS; + if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) { /* - * This should never happen! + * decode order mode, push the frame into output queue */ - if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode - != MIX_FRAMEORDER_MODE_DECODEORDER) { - return MIX_RESULT_FAIL; - } - - g_mutex_lock(fm->lock); + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - ret = MIX_RESULT_SUCCESS; - if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) { - /* - * decode order mode, push the frame into output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + } else { + if (fm->timebased_ordering) { + // ret = mix_framemanager_timestamp_based_enqueue(fm, mvf); + ret = mix_framemanager_displayorder_based_enqueue(fm, mvf); } else { - - if (fm->timebased_ordering) { - ret = mix_framemanager_timestamp_based_enqueue(fm, mvf); - } else { - ret = mix_framemanager_frametype_based_enqueue(fm, mvf); - } + ret = mix_framemanager_frametype_based_enqueue(fm, mvf); } + } - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return ret; + return ret; } MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_FAIL; - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - ret = MIX_RESULT_FRAME_NOTAVAIL; - *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); - if (*mvf) { - ret = MIX_RESULT_SUCCESS; - } else if (fm->eos) { - ret = MIX_RESULT_EOS; - } + ret = MIX_RESULT_FRAME_NOTAVAIL; + *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); + if (*mvf) { + ret = MIX_RESULT_SUCCESS; + } else if (fm->eos) { + ret = MIX_RESULT_EOS; + } - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return ret; + return ret; } MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_FAIL; - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - fm->eos = TRUE; + fm->eos = TRUE; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return ret; + return ret; } diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h index 5dc663a..cd33038 100644 --- a/mix_video/src/mixframemanager.h +++ b/mix_video/src/mixframemanager.h @@ -58,6 +58,8 @@ struct _MixFrameManager { guint64 prev_timestamp; gboolean timebased_ordering; + + guint32 next_displayorder; }; /** diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c index 1f57019..e43648d 100644 --- a/mix_video/src/mixvideoformat_h264.c +++ b/mix_video/src/mixvideoformat_h264.c @@ -1115,6 +1115,13 @@ MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, LOG_V( "frame type is %d\n", frame_type); + // Set displayorder + ret = mix_videoframe_set_displayorder(frame, pic_params->CurrPic.TopFieldOrderCnt / 2); + if(ret != MIX_RESULT_SUCCESS) + { + LOG_E("Error setting displayorder\n"); + goto cleanup; + } //Set the frame type for the frame object (used in reordering by frame manager) ret = mix_videoframe_set_frame_type(frame, frame_type); diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c index 64d7831..c06ff5e 100644 --- a/mix_video/src/mixvideoframe.c +++ b/mix_video/src/mixvideoframe.c @@ -477,3 +477,22 @@ MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display) return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder) { + + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + + priv -> displayorder = displayorder; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder) { + + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + + *displayorder = priv -> displayorder; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h index 1178d25..8e5637f 100644 --- a/mix_video/src/mixvideoframe.h +++ b/mix_video/src/mixvideoframe.h @@ -234,4 +234,9 @@ MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, gboolean * disc MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display); MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display); +MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); +MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); + + + #endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h index d49fe38..72aeaa1 100644 --- a/mix_video/src/mixvideoframe_private.h +++ b/mix_video/src/mixvideoframe_private.h @@ -32,7 +32,8 @@ struct _MixVideoFramePrivate GStaticRecMutex lock; gboolean sync_flag; guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field - void *va_display; + void *va_display; + guint32 displayorder; }; /** @@ -76,11 +77,18 @@ mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag); MIX_RESULT mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag); +MIX_RESULT +mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder); + +MIX_RESULT +mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder); + +/* MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); - +*/ #endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ -- cgit v1.2.3 From 35aa7d1e2fe86858bd5381d3f1bb7a6883e8df7f Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Wed, 9 Jun 2010 20:28:28 -0700 Subject: Changed mixvideo to work with new libva Change-Id: Ibeb2cf0996b33eb5ffccf8ebbbf5176147135ab4 --- mix_video/ChangeLog | 43 + mix_video/configure.ac | 8 +- mix_video/docs/Makefile.am | 4 + mix_video/docs/readme | 17 + mix_video/docs/reference/Makefile.am | 4 + .../reference/MixVideo/BackupMixVideo-docs.sgml | 53 + mix_video/docs/reference/MixVideo/Makefile.am | 116 + .../docs/reference/MixVideo/MixVideo-sections.txt | 452 +++ mix_video/docs/reference/MixVideo/MixVideo.types | 22 + .../docs/reference/MixVideo/html/MixBuffer.html | 227 ++ .../docs/reference/MixVideo/html/MixDisplay.html | 591 ++++ .../reference/MixVideo/html/MixDisplayX11.html | 271 ++ .../docs/reference/MixVideo/html/MixDrmParams.html | 137 + .../MixVideo/html/MixVideo-mixvideodef.html | 221 ++ .../docs/reference/MixVideo/html/MixVideo.devhelp | 244 ++ .../docs/reference/MixVideo/html/MixVideo.devhelp2 | 244 ++ .../docs/reference/MixVideo/html/MixVideo.html | 958 ++++++ .../MixVideo/html/MixVideoConfigParams.html | 162 + .../MixVideo/html/MixVideoConfigParamsDec.html | 797 +++++ .../MixVideo/html/MixVideoConfigParamsDecH264.html | 130 + .../MixVideo/html/MixVideoConfigParamsDecMP42.html | 240 ++ .../MixVideo/html/MixVideoConfigParamsDecVC1.html | 137 + .../MixVideo/html/MixVideoConfigParamsEnc.html | 1245 +++++++ .../MixVideo/html/MixVideoConfigParamsEncH264.html | 398 +++ .../html/MixVideoConfigParamsEncMPEG4.html | 319 ++ .../MixVideo/html/MixVideoDecodeParams.html | 281 ++ .../MixVideo/html/MixVideoEncodeParams.html | 155 + .../reference/MixVideo/html/MixVideoFrame.html | 423 +++ .../MixVideo/html/MixVideoInitParams.html | 214 ++ .../MixVideo/html/MixVideoRenderParams.html | 418 +++ .../reference/MixVideo/html/api-index-full.html | 466 +++ mix_video/docs/reference/MixVideo/html/ch01.html | 92 + mix_video/docs/reference/MixVideo/html/home.png | Bin 0 -> 654 bytes mix_video/docs/reference/MixVideo/html/index.html | 96 + mix_video/docs/reference/MixVideo/html/index.sgml | 307 ++ mix_video/docs/reference/MixVideo/html/left.png | Bin 0 -> 459 bytes .../docs/reference/MixVideo/html/object-tree.html | 55 + mix_video/docs/reference/MixVideo/html/right.png | Bin 0 -> 472 bytes mix_video/docs/reference/MixVideo/html/style.css | 167 + mix_video/docs/reference/MixVideo/html/up.png | Bin 0 -> 406 bytes mix_video/mixvideo.spec | 4 +- mix_video/src/Android.mk | 21 +- mix_video/src/Makefile.am | 15 +- mix_video/src/mixbuffer.c | 17 +- mix_video/src/mixbuffer.h | 31 +- mix_video/src/mixdisplay.c | 6 +- mix_video/src/mixdisplay.h | 4 +- mix_video/src/mixdisplayandroid.c | 197 ++ mix_video/src/mixdisplayandroid.h | 172 + mix_video/src/mixdisplayx11.c | 15 +- mix_video/src/mixdisplayx11.h | 52 +- mix_video/src/mixdrmparams.c | 189 - mix_video/src/mixdrmparams.h | 126 - mix_video/src/mixframemanager.c | 1164 ++++--- mix_video/src/mixframemanager.h | 2 +- mix_video/src/mixvideo.c | 322 +- mix_video/src/mixvideo.h | 363 +- mix_video/src/mixvideocaps.c | 12 +- mix_video/src/mixvideoconfigparams.c | 23 +- mix_video/src/mixvideoconfigparams.h | 5 +- mix_video/src/mixvideoconfigparamsdec.c | 6 +- mix_video/src/mixvideoconfigparamsdec.h | 189 +- mix_video/src/mixvideoconfigparamsdec_h264.c | 10 +- mix_video/src/mixvideoconfigparamsdec_h264.h | 10 +- mix_video/src/mixvideoconfigparamsdec_mp42.c | 5 +- mix_video/src/mixvideoconfigparamsdec_mp42.h | 46 +- mix_video/src/mixvideoconfigparamsdec_vc1.c | 5 +- mix_video/src/mixvideoconfigparamsdec_vc1.h | 13 +- mix_video/src/mixvideoconfigparamsenc.c | 5 +- mix_video/src/mixvideoconfigparamsenc.h | 329 +- mix_video/src/mixvideoconfigparamsenc_h263.c | 281 ++ mix_video/src/mixvideoconfigparamsenc_h263.h | 188 + mix_video/src/mixvideoconfigparamsenc_h264.c | 11 +- mix_video/src/mixvideoconfigparamsenc_h264.h | 82 +- mix_video/src/mixvideoconfigparamsenc_mpeg4.c | 601 ++-- mix_video/src/mixvideoconfigparamsenc_mpeg4.h | 375 +- mix_video/src/mixvideoconfigparamsenc_preview.c | 5 - mix_video/src/mixvideoconfigparamsenc_preview.h | 6 +- mix_video/src/mixvideodecodeparams.c | 5 +- mix_video/src/mixvideodecodeparams.h | 51 +- mix_video/src/mixvideodef.h | 21 + mix_video/src/mixvideoencodeparams.c | 9 +- mix_video/src/mixvideoencodeparams.h | 11 + mix_video/src/mixvideoformat.h | 2 +- mix_video/src/mixvideoformat_h264.c | 17 +- mix_video/src/mixvideoformat_vc1.c | 2 + mix_video/src/mixvideoformatenc.h | 2 +- mix_video/src/mixvideoformatenc_h263.c | 1806 ++++++++++ mix_video/src/mixvideoformatenc_h263.h | 141 + mix_video/src/mixvideoformatenc_h264.c | 2588 +++++++------- mix_video/src/mixvideoformatenc_mpeg4.c | 3626 ++++++++++---------- mix_video/src/mixvideoformatenc_mpeg4.h | 286 +- mix_video/src/mixvideoformatenc_preview.c | 1434 ++++---- mix_video/src/mixvideoframe.c | 4 + mix_video/src/mixvideoframe.h | 4 - mix_video/src/mixvideoframe_private.h | 13 +- mix_video/src/mixvideoinitparams.c | 7 +- mix_video/src/mixvideoinitparams.h | 31 +- mix_video/src/mixvideorenderparams.c | 6 +- mix_video/src/mixvideorenderparams.h | 98 + 100 files changed, 19171 insertions(+), 5614 deletions(-) create mode 100644 mix_video/docs/Makefile.am create mode 100644 mix_video/docs/readme create mode 100644 mix_video/docs/reference/Makefile.am create mode 100644 mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml create mode 100644 mix_video/docs/reference/MixVideo/Makefile.am create mode 100644 mix_video/docs/reference/MixVideo/MixVideo-sections.txt create mode 100644 mix_video/docs/reference/MixVideo/MixVideo.types create mode 100644 mix_video/docs/reference/MixVideo/html/MixBuffer.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixDisplay.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixDisplayX11.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixDrmParams.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo.devhelp create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoFrame.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html create mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html create mode 100644 mix_video/docs/reference/MixVideo/html/api-index-full.html create mode 100644 mix_video/docs/reference/MixVideo/html/ch01.html create mode 100644 mix_video/docs/reference/MixVideo/html/home.png create mode 100644 mix_video/docs/reference/MixVideo/html/index.html create mode 100644 mix_video/docs/reference/MixVideo/html/index.sgml create mode 100644 mix_video/docs/reference/MixVideo/html/left.png create mode 100644 mix_video/docs/reference/MixVideo/html/object-tree.html create mode 100644 mix_video/docs/reference/MixVideo/html/right.png create mode 100644 mix_video/docs/reference/MixVideo/html/style.css create mode 100644 mix_video/docs/reference/MixVideo/html/up.png create mode 100644 mix_video/src/mixdisplayandroid.c create mode 100644 mix_video/src/mixdisplayandroid.h delete mode 100644 mix_video/src/mixdrmparams.c delete mode 100644 mix_video/src/mixdrmparams.h create mode 100644 mix_video/src/mixvideoconfigparamsenc_h263.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_h263.h create mode 100644 mix_video/src/mixvideoformatenc_h263.c create mode 100644 mix_video/src/mixvideoformatenc_h263.h diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog index 139597f..903e037 100644 --- a/mix_video/ChangeLog +++ b/mix_video/ChangeLog @@ -1,2 +1,45 @@ +2010-06-04 Tao Tao + * Rolled version number to 0.1.17 + +2010-06-01 Tao Tao + + * Added va_display to MixVideoFrame + * Made changes in several MI-X video source files to reflect the MixVideoFrame change. + +2010-05-28 Weian Chen + + * Make changes to H.264/H.263/MPEG-4 encode to align with the data structure change of coded buffer in LibVA + * Link encode to libva-tpi + +2010-05-07 Tao Tao + + * Added gtk-doc support to the project + +2010-05-03 Tao Tao + + * Removed mixdrmparams.h/c files and made related changes. + +2010-04-21 Chris Pearson + + * Added null-pointer check to mixvideoformat_h264 to fix shutdown crash. + +2010-03-25 Tao Tao + + * Changed mixvideo and mixframemanger to handle last frame correctly. + +2010-03-22 Tao Tao + + * Rolled version number to 0.1.16 + + +2010-03-09 Weian Chen + + * lots of defects fix for mixvideo encode + + +2010-03-09 Weian Chen + + * Change to use Cleanup convention for mixvideoenc (all mixvideformatenc derived Object, H.264/MPEG-4:2/preview) + * remove some unnecessary comments diff --git a/mix_video/configure.ac b/mix_video/configure.ac index 6baf0ed..8768cee 100644 --- a/mix_video/configure.ac +++ b/mix_video/configure.ac @@ -2,7 +2,7 @@ AC_INIT("", "", [linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -AS_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 15) +AS_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 17) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE @@ -114,8 +114,8 @@ AC_SUBST(MIXVBP_CFLAGS) AC_SUBST(MIXVBP_LIBS) -#check for gtk-doc -#GTK_DOC_CHECK(1.9) +dnl check for gtkdoc. Uncomment the following line to build gtkdoc +dnl GTK_DOC_CHECK(1.9) AC_CONFIG_FILES([ mixvideo.pc @@ -129,7 +129,7 @@ dnl Un-comment this section to enable building of documentation. dnl AC_CONFIG_FILES( dnl docs/Makefile dnl docs/reference/Makefile -#dnl docs/reference/MixVideo/Makefile +dnl docs/reference/MixVideo/Makefile dnl ) AC_OUTPUT diff --git a/mix_video/docs/Makefile.am b/mix_video/docs/Makefile.am new file mode 100644 index 0000000..621e3f7 --- /dev/null +++ b/mix_video/docs/Makefile.am @@ -0,0 +1,4 @@ +SUBDIRS = reference + +DIST_SUBDIRS = reference + diff --git a/mix_video/docs/readme b/mix_video/docs/readme new file mode 100644 index 0000000..56b1785 --- /dev/null +++ b/mix_video/docs/readme @@ -0,0 +1,17 @@ +How to generate gtk-doc? + + +By default, gtk-doc generation is disabled in mix_video. To generate gtk-docs + + +01. in mix_video/autogen.sh file, uncomment line #gtkdocize ... +02. in mix_video/Makefile.am, uncomment line #SUBDIRS += docs +03. in mix_video/configure.ac, uncomment #GTK_DOC_CHECK(1.9) and the commented block AC_CONFIG_FILES ... of gtkdoc +04. sh autogen.sh +05. ./configure --enable-gtk-doc +06. make +07. go to directory mix_video/docs/reference/MixVideo, type command +08. cp BackupMixVideo-docs.sgml MixVideo-docs.sgml +09. make +10. use browser to open mix_video/docs/reference/MixVideo/html/index.html + diff --git a/mix_video/docs/reference/Makefile.am b/mix_video/docs/reference/Makefile.am new file mode 100644 index 0000000..39b3000 --- /dev/null +++ b/mix_video/docs/reference/Makefile.am @@ -0,0 +1,4 @@ +SUBDIRS = MixVideo + +DIST_SUBDIRS = MixVideo + diff --git a/mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml b/mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml new file mode 100644 index 0000000..2f23d4e --- /dev/null +++ b/mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml @@ -0,0 +1,53 @@ + + +]> + + + Mi-X Video Reference Manual + + for MI-X Video 0.1 + + + + + + MI-X Video API + + + + + + + + + + + + + + + + + + + + + + + Object Hierarchy + + + + API Index + + + + diff --git a/mix_video/docs/reference/MixVideo/Makefile.am b/mix_video/docs/reference/MixVideo/Makefile.am new file mode 100644 index 0000000..def9d68 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/Makefile.am @@ -0,0 +1,116 @@ +#INTEL CONFIDENTIAL +#Copyright 2009 Intel Corporation All Rights Reserved. +#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +# + + +## Process this file with automake to produce Makefile.in + +# We require automake 1.6 at least. +AUTOMAKE_OPTIONS = 1.6 + +# This is a blank Makefile.am for using gtk-doc. +# Copy this to your project's API docs directory and modify the variables to +# suit your project. See the GTK+ Makefiles in gtk+/docs/reference for examples +# of using the various options. + +# The name of the module, e.g. 'glib'. +DOC_MODULE=MixVideo + +# The top-level SGML file. You can change this if you want to. +DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml + +# The directory containing the source code. Relative to $(srcdir). +# gtk-doc will search all .c & .h files beneath here for inline comments +# documenting the functions and macros. +# e.g. DOC_SOURCE_DIR=../../../gtk +DOC_SOURCE_DIR=$(top_srcdir)/src + +# Extra options to pass to gtkdoc-scangobj. Not normally needed. +#SCANGOBJ_OPTIONS=--type-init-func="g_init(NULL,NULL)" + +# Extra options to supply to gtkdoc-scan. +# e.g. SCAN_OPTIONS=--deprecated-guards="GTK_DISABLE_DEPRECATED" +#SCAN_OPTIONS=--rebuild-sections --rebuild-types +#SCAN_OPTIONS=--rebuild-sections +#SCAN_OPTIONS=--rebuild-types + +# Extra options to supply to gtkdoc-mkdb. +# e.g. MKDB_OPTIONS=--sgml-mode --output-format=xml +MKDB_OPTIONS=--sgml-mode --output-format=xml + +# Extra options to supply to gtkdoc-mktmpl +# e.g. MKTMPL_OPTIONS=--only-section-tmpl +MKTMPL_OPTIONS= + +# Extra options to supply to gtkdoc-fixref. Not normally needed. +# e.g. FIXXREF_OPTIONS=--extra-dir=../gdk-pixbuf/html --extra-dir=../gdk/html +FIXXREF_OPTIONS= + +# Used for dependencies. The docs will be rebuilt if any of these change. +# e.g. HFILE_GLOB=$(top_srcdir)/gtk/*.h +# e.g. CFILE_GLOB=$(top_srcdir)/gtk/*.c +HFILE_GLOB=$(top_srcdir)/src/*.h +CFILE_GLOB=$(top_srcdir)/src/*.c + +# Header files to ignore when scanning. +# e.g. IGNORE_HFILES=gtkdebug.h gtkintl.h +IGNORE_HFILES=*~ \ + mixbufferpool.h \ + mixbuffer_private.h \ + mixframemanager.h \ + mixsurfacepool.h \ + mixvideocaps.h \ + mixvideoformatenc.h \ + mixvideoformatenc_h264.h \ + mixvideoformatenc_mpeg4.h \ + mixvideoformatenc_preview.h \ + mixvideoformat.h \ + mixvideoformat_h264.h \ + mixvideoformat_mp42.h \ + mixvideoformatqueue.h \ + mixvideoformat_vc1.h \ + mixvideoframe_private.h \ + mixvideolog.h \ + mixvideo_private.h \ + mixvideorenderparams_internal.h \ + mixvideoconfigparamsenc_preview.h + +# Images to copy into HTML directory. +# e.g. HTML_IMAGES=$(top_srcdir)/gtk/stock-icons/stock_about_24.png +HTML_IMAGES= + +# Extra SGML files that are included by $(DOC_MAIN_SGML_FILE). +# e.g. content_files=running.sgml building.sgml changes-2.0.sgml +content_files= + +# SGML files where gtk-doc abbrevations (#GtkWidget) are expanded +# These files must be listed here *and* in content_files +# e.g. expand_content_files=running.sgml +expand_content_files= + +# CFLAGS and LDFLAGS for compiling gtkdoc-scangobj with your library. +# Only needed if you are using gtkdoc-scangobj to dynamically query widget +# signals and properties. +# e.g. INCLUDES=-I$(top_srcdir) -I$(top_builddir) $(GTK_DEBUG_FLAGS) +# e.g. GTKDOC_LIBS=$(top_builddir)/gtk/$(gtktargetlib) +AM_CFLAGS=$(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS) +GTKDOC_LIBS=$(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS) $(top_srcdir)/src/libmixvideo.la + +# This includes the standard gtk-doc make rules, copied by gtkdocize. +include $(top_srcdir)/gtk-doc.make + +# Other files to distribute +# e.g. EXTRA_DIST += version.xml.in +EXTRA_DIST += + +# Files not to distribute +# for --rebuild-types in $(SCAN_OPTIONS), e.g. $(DOC_MODULE).types +# for --rebuild-sections in $(SCAN_OPTIONS) e.g. $(DOC_MODULE)-sections.txt +#DISTCLEANFILES = + +# Comment this out if you want your docs-status tested during 'make check' +#TESTS = $(GTKDOC_CHECK) + diff --git a/mix_video/docs/reference/MixVideo/MixVideo-sections.txt b/mix_video/docs/reference/MixVideo/MixVideo-sections.txt new file mode 100644 index 0000000..dc55540 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/MixVideo-sections.txt @@ -0,0 +1,452 @@ +
+mixvideo +MixVideo +MixVideo +mix_video_new +mix_video_ref +mix_video_unref +mix_video_get_version +mix_video_initialize +mix_video_deinitialize +mix_video_configure +mix_video_get_config +mix_video_decode +mix_video_get_frame +mix_video_release_frame +mix_video_render +mix_video_encode +mix_video_flush +mix_video_eos +mix_video_get_state +mix_video_get_mixbuffer +mix_video_release_mixbuffer +mix_video_get_max_coded_buffer_size + +MIX_VIDEO +MIX_IS_VIDEO +MIX_TYPE_VIDEO +mix_video_get_type +MIX_VIDEO_CLASS +MIX_IS_VIDEO_CLASS +MIX_VIDEO_GET_CLASS +
+ +
+mixvideoinitparams +MixVideoInitParams +MixVideoInitParams +mix_videoinitparams_new +mix_videoinitparams_ref +mix_videoinitparams_unref +mix_videoinitparams_set_display +mix_videoinitparams_get_display + +MIX_VIDEOINITPARAMS +MIX_IS_VIDEOINITPARAMS +MIX_TYPE_VIDEOINITPARAMS +mix_videoinitparams_get_type +MIX_VIDEOINITPARAMS_CLASS +MIX_IS_VIDEOINITPARAMS_CLASS +MIX_VIDEOINITPARAMS_GET_CLASS +
+ +
+mixdrmparams +MixDrmParams +MixDrmParams +mix_drmparams_new +mix_drmparams_ref +mix_drmparams_unref + +MIX_DRMPARAMS +MIX_IS_DRMPARAMS +MIX_TYPE_DRMPARAMS +mix_drmparams_get_type +MIX_DRMPARAMS_CLASS +MIX_IS_DRMPARAMS_CLASS +MIX_DRMPARAMS_GET_CLASS +
+ +
+mixdisplay +MIX_DISPLAY_CLASS +MIX_DISPLAY_CAST +MixDisplayDupFunction +MixDisplayCopyFunction +MixDisplayFinalizeFunction +MixDisplayEqualFunction +MIX_VALUE_HOLDS_DISPLAY +MIX_DISPLAY_REFCOUNT +MIX_DISPLAY_REFCOUNT_VALUE +MixDisplay +MixDisplay +mix_display_new +mix_display_copy +mix_display_ref +mix_display_unref +mix_display_replace +mix_display_dup +mix_display_equal +MIX_TYPE_PARAM_DISPLAY +MIX_IS_PARAM_SPEC_DISPLAY +MIX_PARAM_SPEC_DISPLAY +MixParamSpecDisplay +mix_param_spec_display_get_type +mix_param_spec_display +mix_value_set_display +mix_value_take_display +mix_value_get_display +mix_value_dup_display + +MIX_DISPLAY +MIX_IS_DISPLAY +MIX_TYPE_DISPLAY +mix_display_get_type +MIX_IS_DISPLAY_CLASS +MIX_DISPLAY_GET_CLASS +
+ +
+mixdisplayx11 +MixDisplayX11 +MixDisplayX11 +mix_displayx11_new +mix_displayx11_ref +mix_displayx11_unref +mix_displayx11_set_display +mix_displayx11_get_display +mix_displayx11_set_drawable +mix_displayx11_get_drawable + +MIX_DISPLAYX11 +MIX_IS_DISPLAYX11 +MIX_TYPE_DISPLAYX11 +mix_displayx11_get_type +MIX_DISPLAYX11_CLASS +MIX_IS_DISPLAYX11_CLASS +MIX_DISPLAYX11_GET_CLASS +
+ +
+mixbuffer +MixBufferCallback +MixBuffer +MixBuffer +mix_buffer_new +mix_buffer_ref +mix_buffer_unref +mix_buffer_set_data + +MIX_BUFFER +MIX_IS_BUFFER +MIX_TYPE_BUFFER +mix_buffer_get_type +MIX_BUFFER_CLASS +MIX_IS_BUFFER_CLASS +MIX_BUFFER_GET_CLASS +
+ +
+mixvideoframe +MixVideoFrame +MixVideoFrame +mix_videoframe_new +mix_videoframe_ref +mix_videoframe_unref +mix_videoframe_set_frame_id +mix_videoframe_get_frame_id +mix_videoframe_set_ci_frame_idx +mix_videoframe_get_ci_frame_idx +mix_videoframe_set_timestamp +mix_videoframe_get_timestamp +mix_videoframe_set_discontinuity +mix_videoframe_get_discontinuity + +MIX_VIDEOFRAME +MIX_IS_VIDEOFRAME +MIX_TYPE_VIDEOFRAME +mix_videoframe_get_type +MIX_VIDEOFRAME_CLASS +MIX_IS_VIDEOFRAME_CLASS +MIX_VIDEOFRAME_GET_CLASS +
+ +
+mixvideoconfigparams +MixVideoConfigParams +MixVideoConfigParams +mix_videoconfigparams_new +mix_videoconfigparams_ref +mix_videoconfigparams_unref + +MIX_VIDEOCONFIGPARAMS +MIX_IS_VIDEOCONFIGPARAMS +MIX_TYPE_VIDEOCONFIGPARAMS +mix_videoconfigparams_get_type +MIX_VIDEOCONFIGPARAMS_CLASS +MIX_IS_VIDEOCONFIGPARAMS_CLASS +MIX_VIDEOCONFIGPARAMS_GET_CLASS +
+ +
+mixvideoconfigparamsdec +MixVideoConfigParamsDec +MixVideoConfigParamsDec +mix_videoconfigparamsdec_new +mix_videoconfigparamsdec_ref +mix_videoconfigparamsdec_unref +mix_videoconfigparamsdec_set_frame_order_mode +mix_videoconfigparamsdec_get_frame_order_mode +mix_videoconfigparamsdec_set_header +mix_videoconfigparamsdec_get_header +mix_videoconfigparamsdec_set_mime_type +mix_videoconfigparamsdec_get_mime_type +mix_videoconfigparamsdec_set_frame_rate +mix_videoconfigparamsdec_get_frame_rate +mix_videoconfigparamsdec_set_picture_res +mix_videoconfigparamsdec_get_picture_res +mix_videoconfigparamsdec_set_raw_format +mix_videoconfigparamsdec_get_raw_format +mix_videoconfigparamsdec_set_rate_control +mix_videoconfigparamsdec_get_rate_control +mix_videoconfigparamsdec_set_buffer_pool_size +mix_videoconfigparamsdec_get_buffer_pool_size +mix_videoconfigparamsdec_set_extra_surface_allocation +mix_videoconfigparamsdec_get_extra_surface_allocation + +MIX_VIDEOCONFIGPARAMSDEC +MIX_IS_VIDEOCONFIGPARAMSDEC +MIX_TYPE_VIDEOCONFIGPARAMSDEC +mix_videoconfigparamsdec_get_type +MIX_VIDEOCONFIGPARAMSDEC_CLASS +MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS +MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS +
+ +
+mixvideoconfigparamsdec_vc1 +MixVideoConfigParamsDecVC1 +MixVideoConfigParamsDecVC1 +mix_videoconfigparamsdec_vc1_new +mix_videoconfigparamsdec_vc1_unref + +MIX_VIDEOCONFIGPARAMSDEC_VC1 +MIX_IS_VIDEOCONFIGPARAMSDEC_VC1 +MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1 +mix_videoconfigparamsdec_vc1_get_type +MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS +MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS +MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS +
+ +
+mixvideoconfigparamsdec_h264 +MixVideoConfigParamsDecH264 +MixVideoConfigParamsDecH264 +mix_videoconfigparamsdec_h264_new +mix_videoconfigparamsdec_h264_unref + +MIX_VIDEOCONFIGPARAMSDEC_H264 +MIX_IS_VIDEOCONFIGPARAMSDEC_H264 +MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264 +mix_videoconfigparamsdec_h264_get_type +MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS +MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS +MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS +
+ +
+mixvideoconfigparamsdec_mp42 +MixVideoConfigParamsDecMP42 +MixVideoConfigParamsDecMP42 +mix_videoconfigparamsdec_mp42_new +mix_videoconfigparamsdec_mp42_unref +mix_videoconfigparamsdec_mp42_set_mpegversion +mix_videoconfigparamsdec_mp42_get_mpegversion +mix_videoconfigparamsdec_mp42_set_divxversion +mix_videoconfigparamsdec_mp42_get_divxversion + +MIX_VIDEOCONFIGPARAMSDEC_MP42 +MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 +MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42 +mix_videoconfigparamsdec_mp42_get_type +MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS +MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS +MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS +
+ + +
+mixvideoconfigparamsenc +MixVideoConfigParamsEnc +MixVideoConfigParamsEnc +mix_videoconfigparamsenc_new +mix_videoconfigparamsenc_ref +mix_videoconfigparamsenc_unref +mix_videoconfigparamsenc_set_mime_type +mix_videoconfigparamsenc_get_mime_type +mix_videoconfigparamsenc_set_frame_rate +mix_videoconfigparamsenc_get_frame_rate +mix_videoconfigparamsenc_set_picture_res +mix_videoconfigparamsenc_get_picture_res +mix_videoconfigparamsenc_set_encode_format +mix_videoconfigparamsenc_get_encode_format +mix_videoconfigparamsenc_set_bit_rate +mix_videoconfigparamsenc_get_bit_rate +mix_videoconfigparamsenc_set_init_qp +mix_videoconfigparamsenc_get_init_qp +mix_videoconfigparamsenc_set_min_qp +mix_videoconfigparamsenc_get_min_qp +mix_videoconfigparamsenc_set_intra_period +mix_videoconfigparamsenc_get_intra_period +mix_videoconfigparamsenc_set_buffer_pool_size +mix_videoconfigparamsenc_get_buffer_pool_size +mix_videoconfigparamsenc_set_share_buf_mode +mix_videoconfigparamsenc_get_share_buf_mode +mix_videoconfigparamsenc_set_ci_frame_info +mix_videoconfigparamsenc_get_ci_frame_info +mix_videoconfigparamsenc_set_drawable +mix_videoconfigparamsenc_get_drawable +mix_videoconfigparamsenc_set_need_display +mix_videoconfigparamsenc_get_need_display +mix_videoconfigparamsenc_set_rate_control +mix_videoconfigparamsenc_get_rate_control +mix_videoconfigparamsenc_set_raw_format +mix_videoconfigparamsenc_get_raw_format +mix_videoconfigparamsenc_set_profile +mix_videoconfigparamsenc_get_profile + +MIX_VIDEOCONFIGPARAMSENC +MIX_IS_VIDEOCONFIGPARAMSENC +MIX_TYPE_VIDEOCONFIGPARAMSENC +mix_videoconfigparamsenc_get_type +MIX_VIDEOCONFIGPARAMSENC_CLASS +MIX_IS_VIDEOCONFIGPARAMSENC_CLASS +MIX_VIDEOCONFIGPARAMSENC_GET_CLASS +
+ + +
+mixvideoconfigparamsenc_h264 +MixVideoConfigParamsEncH264 +MixVideoConfigParamsEncH264 +mix_videoconfigparamsenc_h264_new +mix_videoconfigparamsenc_h264_unref +mix_videoconfigparamsenc_h264_set_bus +mix_videoconfigparamsenc_h264_get_bus +mix_videoconfigparamsenc_h264_set_dlk +mix_videoconfigparamsenc_h264_get_dlk +mix_videoconfigparamsenc_h264_set_slice_num +mix_videoconfigparamsenc_h264_get_slice_num +mix_videoconfigparamsenc_h264_set_delimiter_type +mix_videoconfigparamsenc_h264_get_delimiter_type + +MIX_VIDEOCONFIGPARAMSENC_H264 +MIX_IS_VIDEOCONFIGPARAMSENC_H264 +MIX_TYPE_VIDEOCONFIGPARAMSENC_H264 +mix_videoconfigparamsenc_h264_get_type +MIX_VIDEOCONFIGPARAMSENC_H264_CLASS +MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS +MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS +
+ +
+mixvideoconfigparamsenc_mpeg4 +MixVideoConfigParamsEncMPEG4 +MixVideoConfigParamsEncMPEG4 +mix_videoconfigparamsenc_mpeg4_new +mix_videoconfigparamsenc_mpeg4_unref +mix_videoconfigparamsenc_mpeg4_set_dlk +mix_videoconfigparamsenc_mpeg4_get_dlk +mix_videoconfigparamsenc_mpeg4_set_profile_level +mix_videoconfigparamsenc_mpeg4_get_profile_level +mix_videoconfigparamsenc_mpeg4_set_fixed_vti +mix_videoconfigparamsenc_mpeg4_get_fixed_vti + +MIX_VIDEOCONFIGPARAMSENC_MPEG4 +MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 +MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 +mix_videoconfigparamsenc_mpeg4_get_type +MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS +MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS +MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS +
+ +
+mixvideodecodeparams +MixVideoDecodeParams +MixVideoDecodeParams +mix_videodecodeparams_new +mix_videodecodeparams_ref +mix_videodecodeparams_unref +mix_videodecodeparams_set_timestamp +mix_videodecodeparams_get_timestamp +mix_videodecodeparams_set_discontinuity +mix_videodecodeparams_get_discontinuity + +MIX_VIDEODECODEPARAMS +MIX_IS_VIDEODECODEPARAMS +MIX_TYPE_VIDEODECODEPARAMS +mix_videodecodeparams_get_type +MIX_VIDEODECODEPARAMS_CLASS +MIX_IS_VIDEODECODEPARAMS_CLASS +MIX_VIDEODECODEPARAMS_GET_CLASS +
+ +
+mixvideoencodeparams +MixVideoEncodeParams +MixVideoEncodeParams +mix_videoencodeparams_new +mix_videoencodeparams_ref +mix_videoencodeparams_unref + +MIX_VIDEOENCODEPARAMS +MIX_IS_VIDEOENCODEPARAMS +MIX_TYPE_VIDEOENCODEPARAMS +mix_videoencodeparams_get_type +MIX_VIDEOENCODEPARAMS_CLASS +MIX_IS_VIDEOENCODEPARAMS_CLASS +MIX_VIDEOENCODEPARAMS_GET_CLASS +
+ +
+mixvideorenderparams +MixVideoRenderParams +MixVideoRenderParams +mix_videorenderparams_new +mix_videorenderparams_ref +mix_videorenderparams_unref +mix_videorenderparams_set_display +mix_videorenderparams_get_display +mix_videorenderparams_set_src_rect +mix_videorenderparams_get_src_rect +mix_videorenderparams_set_dest_rect +mix_videorenderparams_get_dest_rect +mix_videorenderparams_set_clipping_rects +mix_videorenderparams_get_clipping_rects + +MIX_VIDEORENDERPARAMS +MIX_IS_VIDEORENDERPARAMS +MIX_TYPE_VIDEORENDERPARAMS +mix_videorenderparams_get_type +MIX_VIDEORENDERPARAMS_CLASS +MIX_IS_VIDEORENDERPARAMS_CLASS +MIX_VIDEORENDERPARAMS_GET_CLASS +
+ +
+mixvideodef +MIX_VIDEO_ERROR_CODE +MixCodecMode +MixFrameOrderMode +MixIOVec +MixRect +MixState +MixRawTargetFormat +MixEncodeTargetFormat +MixRateControl +MixProfile +MixDelimiterType +
+ diff --git a/mix_video/docs/reference/MixVideo/MixVideo.types b/mix_video/docs/reference/MixVideo/MixVideo.types new file mode 100644 index 0000000..da3937b --- /dev/null +++ b/mix_video/docs/reference/MixVideo/MixVideo.types @@ -0,0 +1,22 @@ +#include +#include +mix_video_get_type +mix_videoconfigparamsenc_mpeg4_get_type +mix_videoconfigparams_get_type +mix_videoconfigparamsenc_h264_get_type +mix_videoconfigparamsdec_get_type +mix_videoconfigparamsenc_get_type +mix_videorenderparams_get_type +mix_videoframe_get_type +mix_videodecodeparams_get_type +mix_videoconfigparamsdec_mp42_get_type +mix_buffer_get_type +mix_displayx11_get_type +mix_videoencodeparams_get_type +mix_videoconfigparamsdec_vc1_get_type +mix_videoinitparams_get_type +mix_drmparams_get_type +mix_display_get_type +mix_param_spec_display_get_type +mix_videoconfigparamsdec_h264_get_type +mix_params_get_type diff --git a/mix_video/docs/reference/MixVideo/html/MixBuffer.html b/mix_video/docs/reference/MixVideo/html/MixBuffer.html new file mode 100644 index 0000000..1183b43 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixBuffer.html @@ -0,0 +1,227 @@ + + + + +MixBuffer + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixBuffer

+

MixBuffer — MI-X Video Buffer Parameters

+
+
+

Synopsis

+
+void                (*MixBufferCallback)                (gulong token,
+                                                         guchar *data);
+                    MixBuffer;
+MixBuffer *         mix_buffer_new                      (void);
+MixBuffer *         mix_buffer_ref                      (MixBuffer *mix);
+void                mix_buffer_unref                    (MixBuffer *mix);
+MIX_RESULT          mix_buffer_set_data                 (MixBuffer *obj,
+                                                         guchar *data,
+                                                         guint size,
+                                                         gulong token,
+                                                         MixBufferCallback callback);
+
+
+
+

Object Hierarchy

+
+  MixParams
+   +----MixBuffer
+
+
+
+

Description

+

+

+

+MixBuffer objects are used to wrap input data buffers in a reference counted object as +described in the buffer model section. Data buffers themselves are allocated by the +App/MMF. MixBuffer objects are allocated by MixVideo in a pool and retrieved by the +application using mix_video_get_mixbuffer(). The application will wrap a data buffer +in a Mixbuffer object and pass it into mix_video_decode() or to mix_video_encode(). +

+

+

+

+The MixBuffer objects will be released by MixVideo when they are no longer needed +for the decode or encoder operation. The App/MMF will also release the MixBuffer +object after use. When the MixBuffer is completely released, the callback to the +function registered in the MixBuffer will be called (allowing the App/MMF to release +data buffers as necessary). +

+
+
+

Details

+
+

MixBufferCallback ()

+
void                (*MixBufferCallback)                (gulong token,
+                                                         guchar *data);
+
+
+
+

MixBuffer

+
typedef struct {
+	MixParams parent;
+
+	
+	/* Pointer to coded data buffer */
+	guchar *data;
+	
+	/* Size of coded data buffer */
+	guint size;
+	
+	/* Token that will be passed to 
+	 * the callback function. Can be 
+	 * used by the application for 
+	 * any information to be associated 
+	 * with this coded data buffer, 
+	 * such as a pointer to a structure 
+	 * belonging to the application. */
+	gulong token;
+	
+	/* callback function pointer */
+	MixBufferCallback callback;
+} MixBuffer;
+
+

+MI-X Buffer Parameter object

+
+
+
+

mix_buffer_new ()

+
MixBuffer *         mix_buffer_new                      (void);
+

+Use this method to create new instance of MixBuffer

+
++ + + + +

returns :

A newly allocated instance of MixBuffer +
+
+
+
+

mix_buffer_ref ()

+
MixBuffer *         mix_buffer_ref                      (MixBuffer *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixBuffer instance where reference count has been increased. +
+
+
+
+

mix_buffer_unref ()

+
void                mix_buffer_unref                    (MixBuffer *mix);
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_buffer_set_data ()

+
MIX_RESULT          mix_buffer_set_data                 (MixBuffer *obj,
+                                                         guchar *data,
+                                                         guint size,
+                                                         gulong token,
+                                                         MixBufferCallback callback);
+

+Set data buffer, size, token and callback function

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + +

obj :

MixBuffer object +

data :

data buffer +

size :

data buffer size +

token :

token +

callback :

callback function pointer +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixDisplay.html b/mix_video/docs/reference/MixVideo/html/MixDisplay.html new file mode 100644 index 0000000..04a75c6 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixDisplay.html @@ -0,0 +1,591 @@ + + + + +MixDisplay + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixDisplay

+

MixDisplay — Lightweight Base Object for MI-X Video Display

+
+
+

Synopsis

+
+#define             MIX_DISPLAY_CLASS                   (klass)
+#define             MIX_DISPLAY_CAST                    (obj)
+MixDisplay *        (*MixDisplayDupFunction)            (const MixDisplay *obj);
+gboolean            (*MixDisplayCopyFunction)           (MixDisplay *target,
+                                                         const MixDisplay *src);
+void                (*MixDisplayFinalizeFunction)       (MixDisplay *obj);
+gboolean            (*MixDisplayEqualFunction)          (MixDisplay *first,
+                                                         MixDisplay *second);
+#define             MIX_VALUE_HOLDS_DISPLAY             (value)
+#define             MIX_DISPLAY_REFCOUNT                (obj)
+#define             MIX_DISPLAY_REFCOUNT_VALUE          (obj)
+                    MixDisplay;
+MixDisplay *        mix_display_new                     ();
+gboolean            mix_display_copy                    (MixDisplay *target,
+                                                         const MixDisplay *src);
+MixDisplay *        mix_display_ref                     (MixDisplay *obj);
+void                mix_display_unref                   (MixDisplay *obj);
+void                mix_display_replace                 (MixDisplay **olddata,
+                                                         MixDisplay *newdata);
+MixDisplay *        mix_display_dup                     (const MixDisplay *obj);
+gboolean            mix_display_equal                   (MixDisplay *first,
+                                                         MixDisplay *second);
+#define             MIX_TYPE_PARAM_DISPLAY
+#define             MIX_IS_PARAM_SPEC_DISPLAY           (pspec)
+#define             MIX_PARAM_SPEC_DISPLAY              (pspec)
+                    MixParamSpecDisplay;
+GType               mix_param_spec_display_get_type     (void);
+GParamSpec *        mix_param_spec_display              (const char *name,
+                                                         const char *nick,
+                                                         const char *blurb,
+                                                         GType object_type,
+                                                         GParamFlags flags);
+void                mix_value_set_display               (GValue *value,
+                                                         MixDisplay *obj);
+void                mix_value_take_display              (GValue *value,
+                                                         MixDisplay *obj);
+MixDisplay *        mix_value_get_display               (const GValue *value);
+MixDisplay *        mix_value_dup_display               (const GValue *value);
+
+
+
+

Object Hierarchy

+
+  MixDisplay
+   +----MixDisplayX11
+
+
+
+

Description

+

+

+
+
+

Details

+
+

MIX_DISPLAY_CLASS()

+
#define MIX_DISPLAY_CLASS(klass)  (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAY, MixDisplayClass))
+
+
+
+
+

MIX_DISPLAY_CAST()

+
#define MIX_DISPLAY_CAST(obj)     ((MixDisplay*)(obj))
+
+
+
+
+

MixDisplayDupFunction ()

+
MixDisplay *        (*MixDisplayDupFunction)            (const MixDisplay *obj);
+

+Virtual function prototype for methods to create duplicate of instance.

+
++ + + + + + + + + + +

obj :

Display to duplicate +

returns :

reference to cloned instance. +
+
+
+
+

MixDisplayCopyFunction ()

+
gboolean            (*MixDisplayCopyFunction)           (MixDisplay *target,
+                                                         const MixDisplay *src);
+

+Virtual function prototype for methods to create copies of instance.

+
++ + + + + + + + + + + + + + +

target :

target of the copy +

src :

source of the copy +

returns :

boolean indicates if copy is successful. +
+
+
+
+

MixDisplayFinalizeFunction ()

+
void                (*MixDisplayFinalizeFunction)       (MixDisplay *obj);
+

+Virtual function prototype for methods to free ressources used by +object.

+
++ + + + +

obj :

Display to finalize +
+
+
+
+

MixDisplayEqualFunction ()

+
gboolean            (*MixDisplayEqualFunction)          (MixDisplay *first,
+                                                         MixDisplay *second);
+
+
+
+

MIX_VALUE_HOLDS_DISPLAY()

+
#define MIX_VALUE_HOLDS_DISPLAY(value)  (G_VALUE_HOLDS(value, MIX_TYPE_DISPLAY))
+
+

+Checks if the given GValue contains a MIX_TYPE_PARAM value.

+
++ + + + +

value :

the GValue to check +
+
+
+
+

MIX_DISPLAY_REFCOUNT()

+
#define MIX_DISPLAY_REFCOUNT(obj)           ((MIX_DISPLAY_CAST(obj))->refcount)
+
+

+Get access to the reference count field of the object.

+
++ + + + +

obj :

a MixDisplay +
+
+
+
+

MIX_DISPLAY_REFCOUNT_VALUE()

+
#define MIX_DISPLAY_REFCOUNT_VALUE(obj)     (g_atomic_int_get (&(MIX_DISPLAY_CAST(obj))->refcount))
+
+

+Get the reference count value of the object

+
++ + + + +

obj :

a MixDisplay +
+
+
+
+

MixDisplay

+
typedef struct {
+  gint refcount;
+} MixDisplay;
+
+

+Base class for a refcounted parameter objects.

+
++ + + + +

gint refcount;

atomic refcount +
+
+
+
+

mix_display_new ()

+
MixDisplay *        mix_display_new                     ();
+

+Create new instance of the object.

+
++ + + + +

returns :

return a newly allocated object. +
+
+
+
+

mix_display_copy ()

+
gboolean            mix_display_copy                    (MixDisplay *target,
+                                                         const MixDisplay *src);
+

+Copy data from one instance to the other. This method internally invoked the "copy" method such that derived object will be copied correctly.

+
++ + + + + + + + + + + + + + +

target :

copy to target +

src :

copy from source +

returns :

boolean indicating if copy is successful. +
+
+
+
+

mix_display_ref ()

+
MixDisplay *        mix_display_ref                     (MixDisplay *obj);
+

+Increment reference count.

+
++ + + + + + + + + + +

obj :

a MixDisplay object. +

returns :

the object with reference count incremented. +
+
+
+
+

mix_display_unref ()

+
void                mix_display_unref                   (MixDisplay *obj);
+

+Decrement reference count.

+
++ + + + +

obj :

a MixDisplay object. +
+
+
+
+

mix_display_replace ()

+
void                mix_display_replace                 (MixDisplay **olddata,
+                                                         MixDisplay *newdata);
+

+Replace a pointer of the object with the new one.

+
++ + + + + + + + + + +

olddata :

old data +

newdata :

new data +
+
+
+
+

mix_display_dup ()

+
MixDisplay *        mix_display_dup                     (const MixDisplay *obj);
+

+Duplicate the given MixDisplay and allocate a new instance. This method is chained up properly and derive object will be dupped properly.

+
++ + + + + + + + + + +

obj :

MixDisplay object to duplicate. +

returns :

A newly allocated duplicate of the object, or NULL if failed. +
+
+
+
+

mix_display_equal ()

+
gboolean            mix_display_equal                   (MixDisplay *first,
+                                                         MixDisplay *second);
+

+Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance.

+
++ + + + + + + + + + + + + + +

first :

first object to compare +

second :

second object to compare +

returns :

boolean indicates if the 2 object contains same data. +
+
+
+
+

MIX_TYPE_PARAM_DISPLAY

+
#define MIX_TYPE_PARAM_DISPLAY (mix_param_spec_display_get_type())
+
+
+
+
+

MIX_IS_PARAM_SPEC_DISPLAY()

+
#define MIX_IS_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_TYPE ((pspec), MIX_TYPE_PARAM_DISPLAY))
+
+
+
+
+

MIX_PARAM_SPEC_DISPLAY()

+
#define MIX_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_CAST ((pspec), MIX_TYPE_PARAM_DISPLAY, MixParamSpecDisplay))
+
+
+
+
+

MixParamSpecDisplay

+
typedef struct {
+  GParamSpec parent;
+} MixParamSpecDisplay;
+
+

+A GParamSpec derived structure that contains the meta data +for MixDisplay properties.

+
++ + + + +

GParamSpec parent;

GParamSpec portion +
+
+
+
+

mix_param_spec_display_get_type ()

+
GType               mix_param_spec_display_get_type     (void);
+
+
+
+

mix_param_spec_display ()

+
GParamSpec *        mix_param_spec_display              (const char *name,
+                                                         const char *nick,
+                                                         const char *blurb,
+                                                         GType object_type,
+                                                         GParamFlags flags);
+

+Creates a new GParamSpec instance that hold MixDisplay references.

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + +

name :

the canonical name of the property +

nick :

the nickname of the property +

blurb :

a short description of the property +

object_type :

the MixDisplayType for the property +

flags :

a combination of GParamFlags +

returns :

a newly allocated GParamSpec instance +
+
+
+
+

mix_value_set_display ()

+
void                mix_value_set_display               (GValue *value,
+                                                         MixDisplay *obj);
+

+Set the contents of a MIX_TYPE_DISPLAY derived GValue to +obj. +The caller retains ownership of the reference.

+
++ + + + + + + + + + +

value :

a valid GValue of MIX_TYPE_DISPLAY derived type +

obj :

object value to set +
+
+
+
+

mix_value_take_display ()

+
void                mix_value_take_display              (GValue *value,
+                                                         MixDisplay *obj);
+

+Set the contents of a MIX_TYPE_DISPLAY derived GValue to +obj. +Takes over the ownership of the caller's reference to obj; +the caller doesn't have to unref it any more.

+
++ + + + + + + + + + +

value :

a valid GValue of MIX_TYPE_DISPLAY derived type +

obj :

object value to take +
+
+
+
+

mix_value_get_display ()

+
MixDisplay *        mix_value_get_display               (const GValue *value);
+

+refcount of the MixDisplay is not increased.

+
++ + + + + + + + + + +

value :

a valid GValue of MIX_TYPE_DISPLAY derived type +

returns :

object contents of value +
+
+
+
+

mix_value_dup_display ()

+
MixDisplay *        mix_value_dup_display               (const GValue *value);
+

+refcount of MixDisplay is increased.

+
++ + + + + + + + + + +

value :

a valid GValue of MIX_TYPE_DISPLAY derived type +

returns :

object contents of value +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixDisplayX11.html b/mix_video/docs/reference/MixVideo/html/MixDisplayX11.html new file mode 100644 index 0000000..c858e79 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixDisplayX11.html @@ -0,0 +1,271 @@ + + + + +MixDisplayX11 + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixDisplayX11

+

MixDisplayX11 — MI-X Video X11 Display

+
+
+

Synopsis

+
+                    MixDisplayX11;
+MixDisplayX11 *     mix_displayx11_new                  (void);
+MixDisplayX11 *     mix_displayx11_ref                  (MixDisplayX11 *mix);
+#define             mix_displayx11_unref                (obj)
+MIX_RESULT          mix_displayx11_set_display          (MixDisplayX11 *obj,
+                                                         Display *display);
+MIX_RESULT          mix_displayx11_get_display          (MixDisplayX11 *obj,
+                                                         Display **dislay);
+MIX_RESULT          mix_displayx11_set_drawable         (MixDisplayX11 *obj,
+                                                         Drawable drawable);
+MIX_RESULT          mix_displayx11_get_drawable         (MixDisplayX11 *obj,
+                                                         Drawable *drawable);
+
+
+
+

Object Hierarchy

+
+  MixDisplay
+   +----MixDisplayX11
+
+
+
+

Description

+

+A data object which stores X11 specific parameters. +

+

+

+
+

Data Structures Used in MixDisplayX11 Fields:

+See X11/Xlib.h for Display and Drawable definitions. +
+
+
+

Details

+
+

MixDisplayX11

+
typedef struct {
+  MixDisplay parent;
+
+
+  /* Pointer to a X Window Display structure */
+  Display *display;
+  
+  /* An X Window Drawable that is either a Window 
+   * or a Pixmap. This field is not used in 
+   * mix_video_initialize(). 
+   * See X11/Xlib.h for Display and Drawable definitions.*/
+  Drawable drawable;
+} MixDisplayX11;
+
+

+MI-X VideoInit Parameter object

+
+
+
+

mix_displayx11_new ()

+
MixDisplayX11 *     mix_displayx11_new                  (void);
+

+Use this method to create new instance of MixDisplayX11

+
++ + + + +

returns :

A newly allocated instance of MixDisplayX11 +
+
+
+
+

mix_displayx11_ref ()

+
MixDisplayX11 *     mix_displayx11_ref                  (MixDisplayX11 *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixDisplayX11 instance where reference count has been increased. +
+
+
+
+

mix_displayx11_unref()

+
#define mix_displayx11_unref(obj) mix_display_unref(MIX_DISPLAY(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_displayx11_set_display ()

+
MIX_RESULT          mix_displayx11_set_display          (MixDisplayX11 *obj,
+                                                         Display *display);
+

+Set Display

+
++ + + + + + + + + + + + + + +

obj :

MixDisplayX11 object +

display :

Pointer to a X Window Display structure +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_displayx11_get_display ()

+
MIX_RESULT          mix_displayx11_get_display          (MixDisplayX11 *obj,
+                                                         Display **dislay);
+

+Get Display

+
++ + + + + + + + + + + + + + +

obj :

MixDisplayX11 object +

display :

Pointer to pointer of X Window Display structure +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_displayx11_set_drawable ()

+
MIX_RESULT          mix_displayx11_set_drawable         (MixDisplayX11 *obj,
+                                                         Drawable drawable);
+

+Set drawable

+
++ + + + + + + + + + + + + + +

obj :

MixDisplayX11 object +

drawable :

An X Window Drawable that is either a Window or a Pixmap. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_displayx11_get_drawable ()

+
MIX_RESULT          mix_displayx11_get_drawable         (MixDisplayX11 *obj,
+                                                         Drawable *drawable);
+

+Get drawable

+
++ + + + + + + + + + + + + + +

obj :

MixDisplayX11 object +

drawable :

An X Window Drawable that is either a Window or a Pixmap to be returned. +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixDrmParams.html b/mix_video/docs/reference/MixVideo/html/MixDrmParams.html new file mode 100644 index 0000000..c5f77f9 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixDrmParams.html @@ -0,0 +1,137 @@ + + + + +MixDrmParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixDrmParams

+

MixDrmParams — Drm Parameters Base Object

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixDrmParams
+
+
+
+

Description

+

+A data object which stores drm specific parameters.

+
+
+

Details

+
+

MixDrmParams

+
typedef struct {
+  MixParams parent;
+
+
+  /* TODO: Add properties */
+} MixDrmParams;
+
+

+MI-X Drm Parameter object

+
+
+
+

mix_drmparams_new ()

+
MixDrmParams *      mix_drmparams_new                   (void);
+

+Use this method to create new instance of MixDrmParams

+
++ + + + +

returns :

A newly allocated instance of MixDrmParams +
+
+
+
+

mix_drmparams_ref ()

+
MixDrmParams *      mix_drmparams_ref                   (MixDrmParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixDrmParams instance where reference count has been increased. +
+
+
+
+

mix_drmparams_unref()

+
#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html b/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html new file mode 100644 index 0000000..3250169 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html @@ -0,0 +1,221 @@ + + + + +MI-X Video Data Definitons And Common Error Code + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MI-X Video Data Definitons And Common Error Code

+

MI-X Video Data Definitons And Common Error Code — MI-X Video data definitons and common error code

+
+
+

Synopsis

+
+
+#include <mixvideodef.h>
+
+enum                MIX_VIDEO_ERROR_CODE;
+enum                MixCodecMode;
+enum                MixFrameOrderMode;
+                    MixIOVec;
+                    MixRect;
+enum                MixState;
+enum                MixRawTargetFormat;
+enum                MixEncodeTargetFormat;
+enum                MixRateControl;
+enum                MixProfile;
+enum                MixDelimiterType;
+
+
+
+

Description

+

+The section includes the definition of enum and struct as well as +

+
+

Common Video Error Return Codes of MI-X video functions

+
    +
  • +MIX_RESULT_SUCCESS, Successfully resumed
  • +
  • MIX_RESULT_NULL_PTR, The pointer passed to the function was null.
  • +
  • MIX_RESULT_NO_MEMORY, Memory needed for the operation could not be allocated.
  • +
  • MIX_RESULT_INVALID_PARAM, An argument passed to the function was invalid.
  • +
  • MIX_RESULT_NOT_INIT, MixVideo object has not been initialized yet.
  • +
  • MIX_RESULT_NOT_CONFIGURED, MixVideo object has not been configured yet.
  • +
  • MIX_RESULT_FAIL, For any failure.
  • +
+
+
+
+

Details

+
+

enum MIX_VIDEO_ERROR_CODE

+
typedef enum {
+	MIX_RESULT_FRAME_NOTAVAIL = MIX_RESULT_ERROR_VIDEO_START + 1,
+	MIX_RESULT_EOS,
+	MIX_RESULT_POOLEMPTY,
+	MIX_RESULT_OUTOFSURFACES,
+	MIX_RESULT_DROPFRAME,
+	MIX_RESULT_NOTIMPL,
+	MIX_RESULT_VIDEO_LAST
+} MIX_VIDEO_ERROR_CODE;
+
+
+
+
+

enum MixCodecMode

+
typedef enum {
+	MIX_CODEC_MODE_ENCODE = 0,
+	MIX_CODEC_MODE_DECODE,
+	MIX_CODEC_MODE_LAST
+} MixCodecMode;
+
+
+
+
+

enum MixFrameOrderMode

+
typedef enum {
+	MIX_FRAMEORDER_MODE_DISPLAYORDER = 0,
+	MIX_FRAMEORDER_MODE_DECODEORDER,
+	MIX_FRAMEORDER_MODE_LAST
+} MixFrameOrderMode;
+
+
+
+
+

MixIOVec

+
typedef struct {
+	guchar *data;
+	gint buffer_size;
+    gint data_size;
+} MixIOVec;
+
+
+
+
+

MixRect

+
typedef struct {
+	gshort x;
+	gshort y;
+	gushort width;
+	gushort height;
+} MixRect;
+
+
+
+
+

enum MixState

+
typedef enum {
+	MIX_STATE_UNINITIALIZED = 0,
+	MIX_STATE_INITIALIZED,
+	MIX_STATE_CONFIGURED,
+	MIX_STATE_LAST
+} MixState;
+
+
+
+
+

enum MixRawTargetFormat

+
typedef enum
+{
+    MIX_RAW_TARGET_FORMAT_NONE = 0,
+    MIX_RAW_TARGET_FORMAT_YUV420 = 1,
+    MIX_RAW_TARGET_FORMAT_YUV422 = 2,
+    MIX_RAW_TARGET_FORMAT_YUV444 = 4,
+    MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000,    
+    MIX_RAW_TARGET_FORMAT_LAST
+} MixRawTargetFormat;
+
+
+
+
+

enum MixEncodeTargetFormat

+
typedef enum
+{
+    MIX_ENCODE_TARGET_FORMAT_MPEG4 = 0,
+    MIX_ENCODE_TARGET_FORMAT_H263 = 2,
+    MIX_ENCODE_TARGET_FORMAT_H264 = 4,
+    MIX_ENCODE_TARGET_FORMAT_PREVIEW = 8,
+    MIX_ENCODE_TARGET_FORMAT_LAST
+} MixEncodeTargetFormat;
+
+
+
+
+

enum MixRateControl

+
typedef enum
+{
+    MIX_RATE_CONTROL_NONE = 1,
+    MIX_RATE_CONTROL_CBR = 2,
+    MIX_RATE_CONTROL_VBR = 4,
+    MIX_RATE_CONTROL_LAST
+} MixRateControl;
+
+
+
+
+

enum MixProfile

+
typedef enum
+{
+    MIX_PROFILE_MPEG2SIMPLE = 0,
+    MIX_PROFILE_MPEG2MAIN,
+    MIX_PROFILE_MPEG4SIMPLE,
+    MIX_PROFILE_MPEG4ADVANCEDSIMPLE,
+    MIX_PROFILE_MPEG4MAIN,
+    MIX_PROFILE_H264BASELINE,
+    MIX_PROFILE_H264MAIN,
+    MIX_PROFILE_H264HIGH,
+    MIX_PROFILE_VC1SIMPLE,
+    MIX_PROFILE_VC1MAIN,
+    MIX_PROFILE_VC1ADVANCED,
+    MIX_PROFILE_H263BASELINE
+} MixProfile;
+
+
+
+
+

enum MixDelimiterType

+
typedef enum
+{
+    MIX_DELIMITER_LENGTHPREFIX = 0,
+    MIX_DELIMITER_ANNEXB
+} MixDelimiterType;
+
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp b/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp new file mode 100644 index 0000000..977309b --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp @@ -0,0 +1,244 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 b/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 new file mode 100644 index 0000000..5655e98 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 @@ -0,0 +1,244 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo.html b/mix_video/docs/reference/MixVideo/html/MixVideo.html new file mode 100644 index 0000000..f0fb27c --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideo.html @@ -0,0 +1,958 @@ + + + + +MixVideo + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideo

+

MixVideo — Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder.

+
+
+

Synopsis

+
+
+#include <mixvideo.h>
+
+                    MixVideo;
+MixVideo *          mix_video_new                       (void);
+MixVideo *          mix_video_ref                       (MixVideo *mix);
+#define             mix_video_unref                     (obj)
+MIX_RESULT          mix_video_get_version               (MixVideo *mix,
+                                                         guint *major,
+                                                         guint *minor);
+MIX_RESULT          mix_video_initialize                (MixVideo *mix,
+                                                         MixCodecMode mode,
+                                                         MixVideoInitParams *init_params,
+                                                         MixDrmParams *drm_init_params);
+MIX_RESULT          mix_video_deinitialize              (MixVideo *mix);
+MIX_RESULT          mix_video_configure                 (MixVideo *mix,
+                                                         MixVideoConfigParams *config_params,
+                                                         MixDrmParams *drm_config_params);
+MIX_RESULT          mix_video_get_config                (MixVideo *mix,
+                                                         MixVideoConfigParams **config_params);
+MIX_RESULT          mix_video_decode                    (MixVideo *mix,
+                                                         MixBuffer *bufin[],
+                                                         gint bufincnt,
+                                                         MixVideoDecodeParams *decode_params);
+MIX_RESULT          mix_video_get_frame                 (MixVideo *mix,
+                                                         MixVideoFrame **frame);
+MIX_RESULT          mix_video_release_frame             (MixVideo *mix,
+                                                         MixVideoFrame *frame);
+MIX_RESULT          mix_video_render                    (MixVideo *mix,
+                                                         MixVideoRenderParams *render_params,
+                                                         MixVideoFrame *frame);
+MIX_RESULT          mix_video_encode                    (MixVideo *mix,
+                                                         MixBuffer *bufin[],
+                                                         gint bufincnt,
+                                                         MixIOVec *iovout[],
+                                                         gint iovoutcnt,
+                                                         MixVideoEncodeParams *encode_params);
+MIX_RESULT          mix_video_flush                     (MixVideo *mix);
+MIX_RESULT          mix_video_eos                       (MixVideo *mix);
+MIX_RESULT          mix_video_get_state                 (MixVideo *mix,
+                                                         MixState *state);
+MIX_RESULT          mix_video_get_mixbuffer             (MixVideo *mix,
+                                                         MixBuffer **buf);
+MIX_RESULT          mix_video_release_mixbuffer         (MixVideo *mix,
+                                                         MixBuffer *buf);
+MIX_RESULT          mix_video_get_max_coded_buffer_size (MixVideo *mix,
+                                                         guint *bufsize);
+
+
+
+

Object Hierarchy

+
+  GObject
+   +----MixVideo
+
+
+
+

Description

+

+MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video. +

+

+The MixVideo object handles any of the video formats internally. +The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/ +MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure() +call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and +MixVideoRenderParams objects will be passed in the mix_video_initialize(), +mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively. +

+

+The application can take the following steps to decode video: +

+
+

+

+

+For encoding, the application can take the following steps to encode video: +

+
+
+
+

Details

+
+

MixVideo

+
typedef struct {
+	GObject parent;
+} MixVideo;
+
+

+MI-X Video object

+
++ + + + +

GObject parent;

Parent object. +
+
+
+
+

mix_video_new ()

+
MixVideo *          mix_video_new                       (void);
+

+Use this method to create new instance of MixVideo

+
++ + + + +

returns :

A newly allocated instance of MixVideo +
+
+
+
+

mix_video_ref ()

+
MixVideo *          mix_video_ref                       (MixVideo *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideo instance where reference count has been increased. +
+
+
+
+

mix_video_unref()

+
#define mix_video_unref(obj) g_object_unref (G_OBJECT(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_video_get_version ()

+
MIX_RESULT          mix_video_get_version               (MixVideo *mix,
+                                                         guint *major,
+                                                         guint *minor);
+

+This function will return the major and minor version numbers of the library.

+
++ + + + + + + + + + + + + + + + + + +

mix :

MixVideo object. +

major :

Pointer to an unsigned integer indicating the major version number of this MI-X Video library +

minor :

Pointer to an unsigned integer indicating the minor version number of this MI-X Video library +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_initialize ()

+
MIX_RESULT          mix_video_initialize                (MixVideo *mix,
+                                                         MixCodecMode mode,
+                                                         MixVideoInitParams *init_params,
+                                                         MixDrmParams *drm_init_params);
+

+This function will return the major and minor version numbers of the library.

+
++ + + + + + + + + + + + + + + + + + + + + + +

mix :

MixVideo object. +

mode :

Enum value to indicate encode or decode mode +

init_params :

MixVideoInitParams object which includes display type and pointer to display, encode or decode mode +

drm_init_params :

MixDrmParams defined in Moorestown MI-X DRM API. + This can be null if content is not protected. +

returns :

In addition to the Common Video Error Return Codes, + the following error codes may be returned. +
+
+
+
+
+

mix_video_deinitialize ()

+
MIX_RESULT          mix_video_deinitialize              (MixVideo *mix);
+

+This function will un-initialize a session with this MI-X instance. During this call, the +LibVA session is closed and all resources including surface buffers, MixBuffers and +MixVideoFrame objects are freed. This function is called by the application once +mix_video_initialize() is called, before exiting.

+
++ + + + + + + + + + +

mix :

MixVideo object. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_configure ()

+
MIX_RESULT          mix_video_configure                 (MixVideo *mix,
+                                                         MixVideoConfigParams *config_params,
+                                                         MixDrmParams *drm_config_params);
+

+This function can be used to configure a stream for the current session. + The caller can use this function to do the following: +

+
    +
  • Choose frame ordering mode (display order or decode order)
  • +
  • Choose encode or decode mode
  • +
  • Choose whether display frames are enqueued for encode mode
  • +
  • Provide stream parameters
  • +
+

+

+

+This function can only be called after mix_video_initialize() has been called

+
++ + + + + + + + + + + + + + + + + + +

mix :

MixVideo object. +

config_params :

Pointer to MixVideoConfigParams object (either MixVideoConfigParamsDec or + MixVideoConfigParamsEnc for specific media type) +

drm_config_params :

Pointer to MixDrmParams defined in Moorestown MI-X DRM API. + This can be null if content is not protected. +

returns :

In addition to the Common Video Error Return Codes, + the following error codes may be returned. +
    +
  • MIX_RESULT_RESOURCES_NOTAVAIL, HW accelerated decoding is not available.
  • +
  • MIX_RESULT_NOTSUPPORTED, A requested parameter is not supported or not available.
  • +
+
+
+
+
+

mix_video_get_config ()

+
MIX_RESULT          mix_video_get_config                (MixVideo *mix,
+                                                         MixVideoConfigParams **config_params);
+

+This function can be used to get the current configuration of a stream for the current session. +A MixVideoConfigParams object will be returned, which can be used to get each of the +parameter current values. The caller will need to release this object when it is no +longer needed. +

+

+This function can only be called once mix_video_configure() has been called. +

+

+

+
+

Note

See description of mix_video_configure() for MixVideoConfigParams object details. +For mix_video_get_config(), all input parameter fields become OUT parameters. +
+
++ + + + + + + + + + + + + + +

mix :

MixVideo object. +

config_params :

Pointer to pointer to MixVideoConfigParams object defined in + description of mix_video_configure() +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_decode ()

+
MIX_RESULT          mix_video_decode                    (MixVideo *mix,
+                                                         MixBuffer *bufin[],
+                                                         gint bufincnt,
+                                                         MixVideoDecodeParams *decode_params);
+

+

+

+This function is used to initiate HW accelerated decoding of encoded data buffers. This +function is used to decode to a surface buffer, which can then be rendered using +mix_video_render(). +Video data input buffers are provided in a scatter/gather list of reference counted +MixBuffers. The input MixBuffers are retained until a full frame of coded data is +accumulated, at which point it will be decoded and the input buffers released. The +decoded data will be stored in a surface buffer until it is rendered. The caller must +provide the presentation timestamp and any stream discontinuity for the video frame +for the encoded data, in the MixVideoDecodeParams object. These will be preserved +and provided for the MixVideoFrame object that contains the decoded data for this +frame data. +

+

+

+

+

+

+As only one timestamp is passed in for the buffer, there should be no more than one +video frame included in the encoded data buffer provided in a single call to +mix_video_decode(). If partial frame data is passed in over multiple calls to +mix_video_decode(), the same timestamp should be provided with each call having +data associated with the same frame. +

+

+

+

+

+

+The application should request a MixBuffer object using mix_video_get_mixbuffer(), +initialize the MixBuffer with the data pointer to the coded input data, along with the +size of the input data buffer, and optionally can provide a token value and a callback +function pointer. When the MixBuffer is released by both the application and MixVideo, +the callback will be called and passed the token value and the input data buffer +pointer for any buffer management processing that the application needs or wants to +perform (such as releasing the actual coded data buffer that was assigned to that +MixBuffer). MixBuffers are allocated in a pool, and the application determines the size +of this pool, which is passed to mix_video_configure() in the MixVideoConfigParams object. +

+
++ + + + + + + + + + + + + + + + + + + + + + +

mix :

MixVideo object. +

bufin :

Array of pointers to MixBuffer objects, described in mix_video_get_mixbuffer() * +

bufincnt :

Number of MixBuffer objects +

decode_params :

MixVideoDecodeParams object +

returns :

In addition to the Common Video Error Return Codes, + the following error codes may be returned. +
  • + MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done. + Caller can try again with the same MixBuffers later when surfaces may have been freed. +
+
+
+
+
+

mix_video_get_frame ()

+
MIX_RESULT          mix_video_get_frame                 (MixVideo *mix,
+                                                         MixVideoFrame **frame);
+

+

+

+This function returns a frame object that represents the next frame ID and includes +timestamp and discontinuity information. If display frame ordering has been +configured, it is the next frame displayed. If decode order frame ordering has been +configured, it is the next frame decoded. In both cases the timestamp reflects the +presentation timestamp. For encode mode the frame order is always display order. +

+

+

+

+

+

+The frame object is a reference counted object that represents the frame. The +application can retain this frame object as long as needed to display the frame and +redisplay as needed. At presentation time, the application can call mix_video_render() +with this frame object to display the frame immediately. When the application no +longer needs to display this frame, it should release the object by calling +mix_video_release_frame(). The application should not modify the reference count or +delete this object directly. +

+
++ + + + + + + + + + + + + + +

mix :

MixVideo object. +

frame :

A pointer to a pointer to a MixVideoFrame object +

returns :

In addition to the Common Video Error Return Codes, + the following error codes may be returned. +
    +
  • + MIX_RESULT_FRAME_NOTAVAIL, No decoded frames are available. +
  • +
  • + MIX_RESULT_EOS, No more decoded frames are available, + since end of stream has been encountered. +
  • +
+
+
+
+
+

mix_video_release_frame ()

+
MIX_RESULT          mix_video_release_frame             (MixVideo *mix,
+                                                         MixVideoFrame *frame);
+

+This function releases a frame object that was acquired from mix_video_get_frame().

+
++ + + + + + + + + + + + + + +

mix :

MixVideo object. +

frame :

A pointer to a MixVideoFrame object, described in mix_video_get_frame() +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_render ()

+
MIX_RESULT          mix_video_render                    (MixVideo *mix,
+                                                         MixVideoRenderParams *render_params,
+                                                         MixVideoFrame *frame);
+

+This function renders a video frame associated with a MixVideoFrame object to the display. +The display is either an X11 Pixmap or an X11 Window using the overlay.

+
++ + + + + + + + + + + + + + + + + + +

mix :

MixVideo object. +

render_params :

MixVideoRenderParams object defined below, + which includes the display window and type, + src and dest image sizes, deinterlace info, clipping rectangles, + some post processing parameters, and so forth. +

frame :

Pointer to a MixVideoFrame object returned from mix_video_get_frame(). +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_encode ()

+
MIX_RESULT          mix_video_encode                    (MixVideo *mix,
+                                                         MixBuffer *bufin[],
+                                                         gint bufincnt,
+                                                         MixIOVec *iovout[],
+                                                         gint iovoutcnt,
+                                                         MixVideoEncodeParams *encode_params);
+

+

+

+This function is used to initiate HW accelerated encoding of uncompressed video input +buffers. The input buffers may either be uncompressed video in user space buffers, or +CI frame indexes from libCI captured frames. In order to use CI frame indexes, the +shared buffer mode should be indicated in the MixVideoConfigParamsEnc object +provided to mix_video_configure(). +

+

+

+

+

+

+Video uncompressed data input buffers are provided in a scatter/gather list of +reference counted MixBuffers. The input MixBuffers are considered a complete frame +of data, and are used for encoding before the input buffers are released. LibCI frame +indices may also be provided in MixBuffers. +

+

+

+

+

+

+The encoded data will be copied to the output buffers provided in the array of +MixIOVec structures, also in a scatter/gather list. These output buffers are allocated +by the application. The application can query for the proper size of buffer to allocate +for this, using mix_video_get_max_coded_buffer_size(). It is suggested that the +application create a pool of these buffers to pass in, for efficiency. The application will +also set the buffer_size field in the MixIOVec structures to the allocated buffer size. +When the buffers are filled with encoded data by MixVideo, the data_size will be set to +the encoded data size placed in the buffer. For any buffer not used for encoded data, +the data_size will be set to zero. +

+

+

+

+

+

+Alternatively, if the application does not allocate the output buffers, the data pointers +in the MixIOVec structures (still provided by the application) can be set to NULL, +whereupon MixVideo will allocate a data buffer for each frame and set the data, +buffer_size and data_size pointers in the MixIOVec structures accordingly. +

+

+

+

+

+
+

Note

+This is not an efficient method to handle these buffers and it is preferred that +the application provide pre-allocated buffers. +
+

+

+

+

+

+The application should request a MixBuffer object using mix_video_get_mixbuffer(), +initialize the MixBuffer with the data pointer to the uncompressed input data or a LibCI +frame index, along with the size of the input data buffer, and optionally can provide a +token value and a callback function pointer. When the MixBuffer is released by both +the application and MixVideo, the callback will be called and passed the token value +and the input data buffer pointer for any buffer management processing that the +application needs or wants to perform (such as releasing the actual data buffer that +was assigned to that MixBuffer). MixBuffers are allocated in a pool, and the application +determines the size of this pool, which is passed to mix_video_configure() in the +MixVideoConfigParams object. +

+

+

+

+

+

+The application can choose to enable or disable display of the uncompressed video +frames using the need_display of the MixVideoConfigParamsEnc object in +mix_video_configure(). If display is enabled, MixVideoFrames are enqueued by +MixVideo, to be requested by the application with mix_video_get_frame() and used to +provide to mix_video_render() for rendering before releasing with +mix_video_release_frame(). If display is disabled, no MixVideoFrames will be +enqueued. +

+
++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

mix :

MixVideo object. +

bufin :

Array of pointers to MixBuffer objects, structure defined in mix_video_decode() +

bufincnt :

Number of MixBuffer objects +

iovout :

Array of MixIOVec structures, pointing to buffers allocated by the application +

iovoutcnt :

Number of items in iovout array +

encode_params :

MixVideoEncodeParams object +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_flush ()

+
MIX_RESULT          mix_video_flush                     (MixVideo *mix);
+

+This function will flush all encoded and decoded buffers that are currently enqueued or +in the process of decoding. After this call, decoding can commence again, but would +need to start at the beginning of a sequence (for example, with no dependencies on +previously decoded reference frames).

+
++ + + + + + + + + + +

mix :

MixVideo object. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_eos ()

+
MIX_RESULT          mix_video_eos                       (MixVideo *mix);
+

+This function will signal end of stream to MixVideo. This can be used to finalize +decoding of the last frame and other end of stream processing. MixVideo will complete +the decoding of all buffers received, and will continue to provide the decoded frame +objects by means of the mix_video_get_frame() until all frames have been provided, +at which point mix_video_get_frame() will return MIX_RESULT_EOS.

+
++ + + + + + + + + + +

mix :

MixVideo object. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_get_state ()

+
MIX_RESULT          mix_video_get_state                 (MixVideo *mix,
+                                                         MixState *state);
+

+This function returns the current state of the MI-X session.

+
++ + + + + + + + + + + + + + +

mix :

MixVideo object. +

state :

Current state of MI-X session. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_get_mixbuffer ()

+
MIX_RESULT          mix_video_get_mixbuffer             (MixVideo *mix,
+                                                         MixBuffer **buf);
+

+

+

+This function returns a frame object that represents the next frame ID and includes +timestamp and discontinuity information. If display frame ordering has been +configured, it is the next frame displayed. If decode order frame ordering has been +configured, it is the next frame decoded. In both cases the timestamp reflects the +presentation timestamp. +

+

+

+

+

+

+The frame object is a reference counted object that represents the frame. The +application can retain this frame object as long as needed to display the frame and +redisplay as needed. At presentation time, the application can call mix_video_render() +with this frame object to display the frame immediately. When the application no +longer needs to display this frame, it should release the object by calling +mix_video_release_frame(). The application should not modify the reference count or +delete this object directly. +

+
++ + + + + + + + + + + + + + +

mix :

MixVideo object. +

buf :

A pointer to a pointer to a MixBuffer object +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_release_mixbuffer ()

+
MIX_RESULT          mix_video_release_mixbuffer         (MixVideo *mix,
+                                                         MixBuffer *buf);
+

+This function releases a frame object that was acquired from mix_video_get_mixbuffer().

+
++ + + + + + + + + + + + + + +

mix :

MixVideo object. +

buf :

A pointer to a MixBuffer object, described in mix_video_get_mixbuffer(). +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_video_get_max_coded_buffer_size ()

+
MIX_RESULT          mix_video_get_max_coded_buffer_size (MixVideo *mix,
+                                                         guint *bufsize);
+

+

+

+This function can be used to get the maximum size of encoded data buffer needed for +the mix_video_encode() call. +

+

+

+

+This function can only be called once mix_video_configure() has been called. +

+
++ + + + + + + + + + + + + + +

mix :

MixVideo object. +

bufsize :

Pointer to guint. +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html new file mode 100644 index 0000000..c8c9a02 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html @@ -0,0 +1,162 @@ + + + + +MixVideoConfigParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParams

+

MixVideoConfigParams — MI-X Video Configuration Parameter Base Object

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsEnc
+         +----MixVideoConfigParamsDec
+
+
+
+

Description

+

+

+

+A base object of MI-X video configuration parameter objects. +

+

+

+

+The derived MixVideoConfigParams object is created by the MMF/App +and provided in the MixVideo mix_video_configure() function. The get and set +methods for the properties will be available for the caller to set and get information at +configuration time. It will also be created by MixVideo and returned from the +mix_video_get_config() function, whereupon the MMF/App can get the get methods to +obtain current configuration information. +

+

+

+

+There are decode mode objects (for example, MixVideoConfigParamsDec) and encode +mode objects (for example, MixVideoConfigParamsEnc). Each of these types is refined +further with media specific objects. The application should create the correct type of +object to match the media format of the stream to be handled, e.g. if the media +format of the stream to be decoded is H.264, the application would create a +MixVideoConfigParamsDecH264 object for the mix_video_configure() call. +

+
+
+

Details

+
+

MixVideoConfigParams

+
typedef struct {
+	MixParams parent;
+} MixVideoConfigParams;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparams_new ()

+
MixVideoConfigParams * mix_videoconfigparams_new        (void);
+

+Use this method to create new instance of MixVideoConfigParams

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParams +
+
+
+
+

mix_videoconfigparams_ref ()

+
MixVideoConfigParams * mix_videoconfigparams_ref        (MixVideoConfigParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoConfigParams instance where reference count has been increased. +
+
+
+
+

mix_videoconfigparams_unref()

+
#define mix_videoconfigparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html new file mode 100644 index 0000000..91ca416 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html @@ -0,0 +1,797 @@ + + + + +MixVideoConfigParamsDec + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParamsDec

+

MixVideoConfigParamsDec — MI-X Video Decode Configuration Parameter Base Object

+
+
+

Synopsis

+
+                    MixVideoConfigParamsDec;
+MixVideoConfigParamsDec * mix_videoconfigparamsdec_new  (void);
+MixVideoConfigParamsDec * mix_videoconfigparamsdec_ref  (MixVideoConfigParamsDec *mix);
+#define             mix_videoconfigparamsdec_unref      (obj)
+MIX_RESULT          mix_videoconfigparamsdec_set_frame_order_mode
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         MixFrameOrderMode frame_order_mode);
+MIX_RESULT          mix_videoconfigparamsdec_get_frame_order_mode
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         MixFrameOrderMode *frame_order_mode);
+MIX_RESULT          mix_videoconfigparamsdec_set_header (MixVideoConfigParamsDec *obj,
+                                                         MixIOVec *header);
+MIX_RESULT          mix_videoconfigparamsdec_get_header (MixVideoConfigParamsDec *obj,
+                                                         MixIOVec **header);
+MIX_RESULT          mix_videoconfigparamsdec_set_mime_type
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         const gchar *mime_type);
+MIX_RESULT          mix_videoconfigparamsdec_get_mime_type
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         gchar **mime_type);
+MIX_RESULT          mix_videoconfigparamsdec_set_frame_rate
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint frame_rate_num,
+                                                         guint frame_rate_denom);
+MIX_RESULT          mix_videoconfigparamsdec_get_frame_rate
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *frame_rate_num,
+                                                         guint *frame_rate_denom);
+MIX_RESULT          mix_videoconfigparamsdec_set_picture_res
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint picture_width,
+                                                         guint picture_height);
+MIX_RESULT          mix_videoconfigparamsdec_get_picture_res
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *picture_width,
+                                                         guint *picture_height);
+MIX_RESULT          mix_videoconfigparamsdec_set_raw_format
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint raw_format);
+MIX_RESULT          mix_videoconfigparamsdec_get_raw_format
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *raw_format);
+MIX_RESULT          mix_videoconfigparamsdec_set_rate_control
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint rate_control);
+MIX_RESULT          mix_videoconfigparamsdec_get_rate_control
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *rate_control);
+MIX_RESULT          mix_videoconfigparamsdec_set_buffer_pool_size
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint bufpoolsize);
+MIX_RESULT          mix_videoconfigparamsdec_get_buffer_pool_size
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *bufpoolsize);
+MIX_RESULT          mix_videoconfigparamsdec_set_extra_surface_allocation
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint extra_surface_allocation);
+MIX_RESULT          mix_videoconfigparamsdec_get_extra_surface_allocation
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *extra_surface_allocation);
+
+
+
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsDec
+               +----MixVideoConfigParamsDecMP42
+               +----MixVideoConfigParamsDecVC1
+               +----MixVideoConfigParamsDecH264
+
+
+
+

Description

+

+A base object of MI-X video decode configuration parameter objects.

+
+
+

Details

+
+

MixVideoConfigParamsDec

+
typedef struct {
+	MixVideoConfigParams parent;
+
+	
+	/* Frame re-ordering mode */
+	MixFrameOrderMode frame_order_mode;
+	
+	/* Stream header information, such as 
+	 * codec_data in GStreamer pipelines */ 
+	MixIOVec header;
+
+	/* Mime type */
+	GString * mime_type;
+	
+	/* Frame rate numerator value */
+	guint frame_rate_num;
+	
+	/* Frame rate denominator value */	
+	guint frame_rate_denom;
+	
+	/* Picture width */
+	gulong picture_width;
+	
+	/* Picture height */
+	gulong picture_height;
+	
+	/* Render target format */
+	guint raw_format;
+	
+	/* Rate control: CBR, VBR, none. Only valid for encoding.
+	 * This should be set to none for decoding. */ 
+	guint rate_control;
+
+	/* Size of pool of MixBuffers to allocate */
+	guint mixbuffer_pool_size;
+	
+	/* Extra surfaces for MixVideoFrame objects to be allocated */
+	guint extra_surface_allocation;
+	
+	/* Reserved for future use */
+	void *reserved1;
+	
+	/* Reserved for future use */
+	void *reserved2;
+	
+	/* Reserved for future use */
+	void *reserved3;
+	
+	/* Reserved for future use */
+	void *reserved4;
+} MixVideoConfigParamsDec;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparamsdec_new ()

+
MixVideoConfigParamsDec * mix_videoconfigparamsdec_new  (void);
+

+Use this method to create new instance of MixVideoConfigParamsDec

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParamsDec +
+
+
+
+

mix_videoconfigparamsdec_ref ()

+
MixVideoConfigParamsDec * mix_videoconfigparamsdec_ref  (MixVideoConfigParamsDec *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoConfigParamsDec instance where reference count has been increased. +
+
+
+
+

mix_videoconfigparamsdec_unref()

+
#define mix_videoconfigparamsdec_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videoconfigparamsdec_set_frame_order_mode ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_frame_order_mode
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         MixFrameOrderMode frame_order_mode);
+

+Set frame order mode.

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

frame_order_mode :

Frame re-ordering mode +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_frame_order_mode ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_frame_order_mode
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         MixFrameOrderMode *frame_order_mode);
+

+Get frame order mode.

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

frame_order_mode :

pointer to frame re-ordering mode +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_header ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_header (MixVideoConfigParamsDec *obj,
+                                                         MixIOVec *header);
+

+Set stream header information.

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

header :

Stream header information, such as codec_data in GStreamer pipelines +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_header ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_header (MixVideoConfigParamsDec *obj,
+                                                         MixIOVec **header);
+

+Get stream header information. +

+
+

Note

+Caller is responsible to g_free (*header)->data field and *header +
+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

header :

Pointer to pointer of Stream header information +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_mime_type ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_mime_type
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         const gchar *mime_type);
+

+Set stream mime type

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

mime_type :

mime type +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_mime_type ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_mime_type
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         gchar **mime_type);
+

+Get mime type +

+
+

Note

+Caller is responsible to g_free *mime_type +
+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

mime_type :

Pointer to pointer of type gchar +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_frame_rate ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_frame_rate
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint frame_rate_num,
+                                                         guint frame_rate_denom);
+

+Set frame rate

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

frame_rate_num :

Frame rate numerator value +

frame_rate_denom :

Frame rate denominator value * +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_frame_rate ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_frame_rate
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *frame_rate_num,
+                                                         guint *frame_rate_denom);
+

+Get frame rate

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

frame_rate_num :

Frame rate numerator value to be returned +

frame_rate_denom :

Frame rate denominator value to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_picture_res ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_picture_res
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint picture_width,
+                                                         guint picture_height);
+

+Set video resolution

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

picture_width :

Picture width +

picture_height :

Picture height +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_picture_res ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_picture_res
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *picture_width,
+                                                         guint *picture_height);
+

+Get video resolution

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

picture_width :

Picture width to be returned +

picture_height :

Picture height to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_raw_format ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_raw_format
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint raw_format);
+

+Set Render target format

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

raw_format :

Render target format +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_raw_format ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_raw_format
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *raw_format);
+

+Get Render target format

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

raw_format :

Render target format to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_rate_control ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_rate_control
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint rate_control);
+

+Set rate control

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

rate_control :

Rate control: CBR, VBR, none. Only valid for encoding. + This should be set to none for decoding. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_rate_control ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_rate_control
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *rate_control);
+

+Get rate control

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

rate_control :

Rate control to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_buffer_pool_size ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_buffer_pool_size
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint bufpoolsize);
+

+Set buffer pool size

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

bufpoolsize :

Size of pool of MixBuffers to allocate +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_buffer_pool_size ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_buffer_pool_size
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *bufpoolsize);
+

+Get buffer pool size

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

bufpoolsize :

Size of pool of MixBuffers to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_set_extra_surface_allocation ()

+
MIX_RESULT          mix_videoconfigparamsdec_set_extra_surface_allocation
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint extra_surface_allocation);
+

+Set extra surface allocation

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

extra_surface_allocation :

Extra surfaces for MixVideoFrame objects to be allocated +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_get_extra_surface_allocation ()

+
MIX_RESULT          mix_videoconfigparamsdec_get_extra_surface_allocation
+                                                        (MixVideoConfigParamsDec *obj,
+                                                         guint *extra_surface_allocation);
+

+Get extra surface allocation

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDec object +

extra_surface_allocation :

Extra surfaces for MixVideoFrame objects to be retuned +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html new file mode 100644 index 0000000..eb76a0e --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html @@ -0,0 +1,130 @@ + + + + +MixVideoConfigParamsDecH264 + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParamsDecH264

+

MixVideoConfigParamsDecH264 — MI-X Video H.264 Decode Configuration Parameter

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsDec
+               +----MixVideoConfigParamsDecH264
+
+
+
+

Description

+

+MI-X video H.264 decode configuration parameter objects.

+
+
+

Details

+
+

MixVideoConfigParamsDecH264

+
typedef struct {
+  MixVideoConfigParamsDec parent;
+
+
+  /* TODO: Add H.264 configuration paramters */
+  
+  /* Reserved for future use */
+  void *reserved1;
+  
+  /* Reserved for future use */  
+  void *reserved2;
+  
+  /* Reserved for future use */  
+  void *reserved3;
+  
+  /* Reserved for future use */  
+  void *reserved4;
+} MixVideoConfigParamsDecH264;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparamsdec_h264_new ()

+
MixVideoConfigParamsDecH264 * mix_videoconfigparamsdec_h264_new
+                                                        (void);
+

+Use this method to create new instance of MixVideoConfigParamsDecH264

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParamsDecH264 +
+
+
+
+

mix_videoconfigparamsdec_h264_unref()

+
#define mix_videoconfigparamsdec_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html new file mode 100644 index 0000000..9ef4860 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html @@ -0,0 +1,240 @@ + + + + +MixVideoConfigParamsDecMP42 + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParamsDecMP42

+

MixVideoConfigParamsDecMP42 — MI-X Video MPEG 4:2 Decode Configuration Parameter

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsDec
+               +----MixVideoConfigParamsDecMP42
+
+
+
+

Description

+

+MI-X video MPEG 4:2 decode configuration parameter objects.

+
+
+

Details

+
+

MixVideoConfigParamsDecMP42

+
typedef struct {
+	MixVideoConfigParamsDec parent;
+
+
+	/* MPEG version */
+	guint mpegversion;
+	
+	/* DivX version */
+	guint divxversion;
+
+	/* Reserved for future use */
+	void *reserved1;
+	
+	/* Reserved for future use */
+	void *reserved2;
+	
+	/* Reserved for future use */	
+	void *reserved3;
+	
+	/* Reserved for future use */	
+	void *reserved4;
+} MixVideoConfigParamsDecMP42;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparamsdec_mp42_new ()

+
MixVideoConfigParamsDecMP42 * mix_videoconfigparamsdec_mp42_new
+                                                        (void);
+

+Use this method to create new instance of MixVideoConfigParamsDecMP42

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParamsDecMP42 +
+
+
+
+

mix_videoconfigparamsdec_mp42_unref()

+
#define mix_videoconfigparamsdec_mp42_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videoconfigparamsdec_mp42_set_mpegversion ()

+
MIX_RESULT          mix_videoconfigparamsdec_mp42_set_mpegversion
+                                                        (MixVideoConfigParamsDecMP42 *obj,
+                                                         guint version);
+

+Set MPEG version

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDecMP42 object +

version :

MPEG version +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_mp42_get_mpegversion ()

+
MIX_RESULT          mix_videoconfigparamsdec_mp42_get_mpegversion
+                                                        (MixVideoConfigParamsDecMP42 *obj,
+                                                         guint *version);
+

+Get MPEG version

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDecMP42 object +

version :

MPEG version to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_mp42_set_divxversion ()

+
MIX_RESULT          mix_videoconfigparamsdec_mp42_set_divxversion
+                                                        (MixVideoConfigParamsDecMP42 *obj,
+                                                         guint version);
+

+Get DivX version

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsDecMP42 object +

version :

DivX version to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsdec_mp42_get_divxversion ()

+
MIX_RESULT          mix_videoconfigparamsdec_mp42_get_divxversion
+                                                        (MixVideoConfigParamsDecMP42 *obj,
+                                                         guint *version);
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html new file mode 100644 index 0000000..063ae2b --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html @@ -0,0 +1,137 @@ + + + + +MixVideoConfigParamsDecVC1 + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParamsDecVC1

+

MixVideoConfigParamsDecVC1 — MI-X Video VC-1 Decode Configuration Parameter

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsDec
+               +----MixVideoConfigParamsDecVC1
+
+
+
+

Description

+

+MI-X video VC-1 decode configuration parameter objects.

+
+
+

Details

+
+

MixVideoConfigParamsDecVC1

+
typedef struct {
+  MixVideoConfigParamsDec parent;
+
+
+  /* TODO: Add VC1 configuration paramters */
+  /* TODO: wmv_version and fourcc type might be changed later */
+  
+  /* WMV version */
+  guint wmv_version;
+  
+  /* FourCC code */
+  guint fourcc;
+
+  /* Reserved for future use */
+  void *reserved1;
+  
+  /* Reserved for future use */  
+  void *reserved2;
+  
+  /* Reserved for future use */  
+  void *reserved3;
+  
+  /* Reserved for future use */  
+  void *reserved4;
+} MixVideoConfigParamsDecVC1;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparamsdec_vc1_new ()

+
MixVideoConfigParamsDecVC1 * mix_videoconfigparamsdec_vc1_new
+                                                        (void);
+

+Use this method to create new instance of MixVideoConfigParamsDecVC1

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParamsDecVC1 +
+
+
+
+

mix_videoconfigparamsdec_vc1_unref()

+
#define mix_videoconfigparamsdec_vc1_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html new file mode 100644 index 0000000..f02e153 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html @@ -0,0 +1,1245 @@ + + + + +MixVideoConfigParamsEnc + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParamsEnc

+

MixVideoConfigParamsEnc — MI-X Video Encode Configuration Parameter Base Object

+
+
+

Synopsis

+
+                    MixVideoConfigParamsEnc;
+MixVideoConfigParamsEnc * mix_videoconfigparamsenc_new  (void);
+MixVideoConfigParamsEnc * mix_videoconfigparamsenc_ref  (MixVideoConfigParamsEnc *mix);
+#define             mix_videoconfigparamsenc_unref      (obj)
+MIX_RESULT          mix_videoconfigparamsenc_set_mime_type
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         const gchar *mime_type);
+MIX_RESULT          mix_videoconfigparamsenc_get_mime_type
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gchar **mime_type);
+MIX_RESULT          mix_videoconfigparamsenc_set_frame_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint frame_rate_num,
+                                                         guint frame_rate_denom);
+MIX_RESULT          mix_videoconfigparamsenc_get_frame_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *frame_rate_num,
+                                                         guint *frame_rate_denom);
+MIX_RESULT          mix_videoconfigparamsenc_set_picture_res
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint picture_width,
+                                                         guint picture_height);
+MIX_RESULT          mix_videoconfigparamsenc_get_picture_res
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *picture_width,
+                                                         guint *picture_height);
+MIX_RESULT          mix_videoconfigparamsenc_set_encode_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixEncodeTargetFormat encode_format);
+MIX_RESULT          mix_videoconfigparamsenc_get_encode_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixEncodeTargetFormat *encode_format);
+MIX_RESULT          mix_videoconfigparamsenc_set_bit_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint bps);
+MIX_RESULT          mix_videoconfigparamsenc_get_bit_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *bps);
+MIX_RESULT          mix_videoconfigparamsenc_set_init_qp
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint initial_qp);
+MIX_RESULT          mix_videoconfigparamsenc_get_init_qp
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *initial_qp);
+MIX_RESULT          mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc *obj,
+                                                         guint min_qp);
+MIX_RESULT          mix_videoconfigparamsenc_get_min_qp (MixVideoConfigParamsEnc *obj,
+                                                         guint *min_qp);
+MIX_RESULT          mix_videoconfigparamsenc_set_intra_period
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint intra_period);
+MIX_RESULT          mix_videoconfigparamsenc_get_intra_period
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *intra_period);
+MIX_RESULT          mix_videoconfigparamsenc_set_buffer_pool_size
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint bufpoolsize);
+MIX_RESULT          mix_videoconfigparamsenc_get_buffer_pool_size
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *bufpoolsize);
+MIX_RESULT          mix_videoconfigparamsenc_set_share_buf_mode
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean share_buf_mod);
+MIX_RESULT          mix_videoconfigparamsenc_get_share_buf_mode
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean *share_buf_mod);
+MIX_RESULT          mix_videoconfigparamsenc_set_ci_frame_info
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong *ci_frame_id,
+                                                         guint ci_frame_num);
+MIX_RESULT          mix_videoconfigparamsenc_get_ci_frame_info
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong * *ci_frame_id,
+                                                         guint *ci_frame_num);
+MIX_RESULT          mix_videoconfigparamsenc_set_drawable
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong draw);
+MIX_RESULT          mix_videoconfigparamsenc_get_drawable
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong *draw);
+MIX_RESULT          mix_videoconfigparamsenc_set_need_display
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean need_display);
+MIX_RESULT          mix_videoconfigparamsenc_get_need_display
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean *need_display);
+MIX_RESULT          mix_videoconfigparamsenc_set_rate_control
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRateControl rcmode);
+MIX_RESULT          mix_videoconfigparamsenc_get_rate_control
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRateControl *rcmode);
+MIX_RESULT          mix_videoconfigparamsenc_set_raw_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRawTargetFormat raw_format);
+MIX_RESULT          mix_videoconfigparamsenc_get_raw_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRawTargetFormat *raw_format);
+MIX_RESULT          mix_videoconfigparamsenc_set_profile
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixProfile profile);
+MIX_RESULT          mix_videoconfigparamsenc_get_profile
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixProfile *profile);
+
+
+
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsEnc
+               +----MixVideoConfigParamsEncMPEG4
+               +----MixVideoConfigParamsEncH264
+
+
+
+

Description

+

+A base object of MI-X video encode configuration parameter objects.

+
+
+

Details

+
+

MixVideoConfigParamsEnc

+
typedef struct {
+	MixVideoConfigParams parent;
+
+	//MixIOVec header;
+
+	/* the type of the following members will be changed after MIX API doc is ready */
+
+	/* Encoding profile */
+	MixProfile profile;
+
+	/* Raw format to be encoded */
+	MixRawTargetFormat raw_format;
+
+	/* Rate control mode */
+	MixRateControl rate_control;  	
+
+	/* Bitrate when rate control is used */
+	guint bitrate;
+	
+	/* Numerator of frame rate */
+	guint frame_rate_num;
+	
+	/* Denominator of frame rate */
+	guint frame_rate_denom;
+	
+	/* The initial QP value */
+	guint initial_qp;
+	
+	/* The minimum QP value */
+	guint min_qp;
+	
+	/* Number of frames between key frames (GOP size) */
+	guint intra_period;
+	
+	/* Width of video frame */
+	guint16 picture_width;
+	
+	/* Height of the video frame */
+	guint16 picture_height;	
+
+	/* Mime type, reserved */
+	GString * mime_type;
+	
+	/* Encode target format */
+	MixEncodeTargetFormat encode_format;
+
+	/* Size of the pool of MixBuffer objects */
+	guint mixbuffer_pool_size;
+
+	/* Are buffers shared between capture and encoding drivers */
+	gboolean share_buf_mode;	
+
+	/* Array of frame IDs created by capture library */
+	gulong *	ci_frame_id;
+	
+	/* Size of the array ci_frame_id */
+	guint	ci_frame_num;
+	
+	
+	/* Indicates whether MixVideoFrames suitable for displaying 
+	 * need to be enqueued for retrieval using mix_video_get_frame() */
+	gboolean need_display;
+	
+	/* Reserved for future use */
+	void *reserved1;
+	
+	/* Reserved for future use */	
+	void *reserved2;
+	
+	/* Reserved for future use */	
+	void *reserved3;
+	
+	/* Reserved for future use */	
+	void *reserved4;
+} MixVideoConfigParamsEnc;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparamsenc_new ()

+
MixVideoConfigParamsEnc * mix_videoconfigparamsenc_new  (void);
+

+Use this method to create new instance of MixVideoConfigParamsEnc

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParamsEnc +
+
+
+
+

mix_videoconfigparamsenc_ref ()

+
MixVideoConfigParamsEnc * mix_videoconfigparamsenc_ref  (MixVideoConfigParamsEnc *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoConfigParamsEnc instance where reference count has been increased. +
+
+
+
+

mix_videoconfigparamsenc_unref()

+
#define mix_videoconfigparamsenc_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videoconfigparamsenc_set_mime_type ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_mime_type
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         const gchar *mime_type);
+

+Set mime type

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

mime_type :

Mime type +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_mime_type ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_mime_type
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gchar **mime_type);
+

+Get mime type +

+

+

+
+

Note

+Caller is responsible to g_free *mime_type +
+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

mime_type :

Mime type to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_frame_rate ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_frame_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint frame_rate_num,
+                                                         guint frame_rate_denom);
+

+Set frame rate

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

frame_rate_num :

Numerator of frame rate +

frame_rate_denom :

Denominator of frame rate +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_frame_rate ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_frame_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *frame_rate_num,
+                                                         guint *frame_rate_denom);
+

+Get frame rate

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

frame_rate_num :

Numerator of frame rate to be returned +

frame_rate_denom :

Denominator of frame rate to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_picture_res ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_picture_res
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint picture_width,
+                                                         guint picture_height);
+

+Set width and height of video frame

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

picture_width :

Width of video frame +

picture_height :

Height of the video frame +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_picture_res ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_picture_res
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *picture_width,
+                                                         guint *picture_height);
+

+Get width and height of video frame

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

picture_width :

Width of video frame to be returned +

picture_height :

Height of the video frame to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_encode_format ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_encode_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixEncodeTargetFormat encode_format);
+

+Set Encode target format

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

encode_format :

Encode target format +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_encode_format ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_encode_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixEncodeTargetFormat *encode_format);
+

+Get Encode target format

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

encode_format :

Encode target format to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_bit_rate ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_bit_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint bps);
+

+Set bitrate

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

bps :

bitrate +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_bit_rate ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_bit_rate
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *bps);
+

+Get bitrate

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

bps :

bitrate to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_init_qp ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_init_qp
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint initial_qp);
+

+Set The initial QP value

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

initial_qp :

The initial QP value +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_init_qp ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_init_qp
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *initial_qp);
+

+Get The initial QP value

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

initial_qp :

The initial QP value to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_min_qp ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc *obj,
+                                                         guint min_qp);
+

+Set The minimum QP value

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

min_qp :

The minimum QP value +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_min_qp ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_min_qp (MixVideoConfigParamsEnc *obj,
+                                                         guint *min_qp);
+

+Get The minimum QP value

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

min_qp :

The minimum QP value to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_intra_period ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_intra_period
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint intra_period);
+

+Set Number of frames between key frames (GOP size)

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

intra_period :

Number of frames between key frames (GOP size) +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_intra_period ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_intra_period
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *intra_period);
+

+Get Number of frames between key frames (GOP size)

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

intra_period :

Number of frames between key frames (GOP size) to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_buffer_pool_size ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_buffer_pool_size
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint bufpoolsize);
+

+Get Size of the pool of MixBuffer objects

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

bufpoolsize :

Size of the pool of MixBuffer objects to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_buffer_pool_size ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_buffer_pool_size
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         guint *bufpoolsize);
+
+
+
+

mix_videoconfigparamsenc_set_share_buf_mode ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_share_buf_mode
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean share_buf_mod);
+

+Set the flag that indicates whether buffers are shared between capture and encoding drivers or not

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

share_buf_mod :

A flag to indicate whether buffers are shared + between capture and encoding drivers or not +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_share_buf_mode ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_share_buf_mode
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean *share_buf_mod);
+

+Get the flag that indicates whether buffers are shared between capture and encoding drivers or not

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

share_buf_mod :

the flag to be returned that indicates whether buffers + are shared between capture and encoding drivers or not +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_ci_frame_info ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_ci_frame_info
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong *ci_frame_id,
+                                                         guint ci_frame_num);
+

+Set CI frame information

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

ci_frame_id :

Array of frame IDs created by capture library * +

ci_frame_num :

Size of the array ci_frame_id +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_ci_frame_info ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_ci_frame_info
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong * *ci_frame_id,
+                                                         guint *ci_frame_num);
+

+Get CI frame information +

+
+

Note

+Caller is responsible to g_free *ci_frame_id +
+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

ci_frame_id :

Array of frame IDs created by capture library to be returned +

ci_frame_num :

Size of the array ci_frame_id to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_drawable ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_drawable
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong draw);
+

+Set drawable

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

draw :

drawable +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_drawable ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_drawable
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gulong *draw);
+

+Get drawable

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

draw :

drawable to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_need_display ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_need_display
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean need_display);
+

+Set the flag used to indicate whether MixVideoFrames suitable for displaying +need to be enqueued for retrieval using mix_video_get_frame()

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

need_display :

Flag to indicates whether MixVideoFrames suitable for displaying + need to be enqueued for retrieval using mix_video_get_frame() +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_need_display ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_need_display
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         gboolean *need_display);
+

+Get the flag used to indicate whether MixVideoFrames suitable for displaying +need to be enqueued for retrieval using mix_video_get_frame()

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

need_display :

A flag to be returned to indicates whether MixVideoFrames suitable for displaying + need to be enqueued for retrieval using mix_video_get_frame() +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_rate_control ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_rate_control
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRateControl rcmode);
+

+Get Rate control mode

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

rcmode :

Rate control mode to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_rate_control ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_rate_control
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRateControl *rcmode);
+
+
+
+

mix_videoconfigparamsenc_set_raw_format ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_raw_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRawTargetFormat raw_format);
+

+Set Raw format to be encoded

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

raw_format :

Raw format to be encoded +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_raw_format ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_raw_format
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixRawTargetFormat *raw_format);
+

+Get Raw format

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

raw_format :

Raw format to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_set_profile ()

+
MIX_RESULT          mix_videoconfigparamsenc_set_profile
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixProfile profile);
+

+Set Encoding profile

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

profile :

Encoding profile +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_get_profile ()

+
MIX_RESULT          mix_videoconfigparamsenc_get_profile
+                                                        (MixVideoConfigParamsEnc *obj,
+                                                         MixProfile *profile);
+

+Get Encoding profile

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEnc object +

profile :

Encoding profile to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html new file mode 100644 index 0000000..524f115 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html @@ -0,0 +1,398 @@ + + + + +MixVideoConfigParamsEncH264 + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParamsEncH264

+

MixVideoConfigParamsEncH264 — MI-X Video H.264 Eecode Configuration Parameter

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsEnc
+               +----MixVideoConfigParamsEncH264
+
+
+
+

Description

+

+MI-X video H.264 eecode configuration parameter objects.

+
+
+

Details

+
+

MixVideoConfigParamsEncH264

+
typedef struct {
+  MixVideoConfigParamsEnc parent;
+
+
+  /* TODO: Add H.264 configuration paramters */
+  
+  /* The basic unit size used by rate control */  
+  guint basic_unit_size;
+  
+  /* Number of slices in one frame */
+  guint slice_num;
+  
+  /* enable/disable deblocking */
+  guint8 disable_deblocking_filter_idc;	
+
+  /* delimiter_type */
+  MixDelimiterType delimiter_type;
+  
+  /* Reserved for future use */  
+  void *reserved1;
+  
+  /* Reserved for future use */  
+  void *reserved2;
+  
+  /* Reserved for future use */  
+  void *reserved3;
+  
+  /* Reserved for future use */  
+  void *reserved4;
+} MixVideoConfigParamsEncH264;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparamsenc_h264_new ()

+
MixVideoConfigParamsEncH264 * mix_videoconfigparamsenc_h264_new
+                                                        (void);
+

+Use this method to create new instance of MixVideoConfigParamsEncH264

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParamsEncH264 +
+
+
+
+

mix_videoconfigparamsenc_h264_unref()

+
#define mix_videoconfigparamsenc_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videoconfigparamsenc_h264_set_bus ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_set_bus
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         guint basic_unit_size);
+

+Set The basic unit size used by rate control

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

basic_unit_size :

The basic unit size used by rate control +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_h264_get_bus ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_get_bus
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         guint *basic_unit_size);
+

+Get The basic unit size used by rate control

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

basic_unit_size :

The basic unit size to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_h264_set_dlk ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_set_dlk
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         guint disable_deblocking_filter_idc);
+

+Set the The flag to enable/disable deblocking

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

disable_deblocking_filter_idc :

The flag to enable/disable deblocking +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_h264_get_dlk ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_get_dlk
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         guint *disable_deblocking_filter_idc);
+

+Get the The flag to enable/disable deblocking

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

disable_deblocking_filter_idc :

deblocking flag to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_h264_set_slice_num ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_set_slice_num
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         guint slice_num);
+

+Set the Number of slices in one frame

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

slice_num :

Number of slices in one frame +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_h264_get_slice_num ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_get_slice_num
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         guint *slice_num);
+

+Get the Number of slices in one frame

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

slice_num :

Number of slices in one frame to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_h264_set_delimiter_type ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_set_delimiter_type
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         MixDelimiterType delimiter_type);
+

+Set Delimiter type

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

delimiter_type :

Delimiter type +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_h264_get_delimiter_type ()

+
MIX_RESULT          mix_videoconfigparamsenc_h264_get_delimiter_type
+                                                        (MixVideoConfigParamsEncH264 *obj,
+                                                         MixDelimiterType *delimiter_type);
+

+Get Delimiter type

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncH264 object +

delimiter_type :

Delimiter type to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html new file mode 100644 index 0000000..25a310e --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html @@ -0,0 +1,319 @@ + + + + +MixVideoConfigParamsEncMPEG4 + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoConfigParamsEncMPEG4

+

MixVideoConfigParamsEncMPEG4 — MI-X Video MPEG 4:2 Eecode Configuration Parameter

+
+
+

Synopsis

+
+                    MixVideoConfigParamsEncMPEG4;
+MixVideoConfigParamsEncMPEG4 * mix_videoconfigparamsenc_mpeg4_new
+                                                        (void);
+#define             mix_videoconfigparamsenc_mpeg4_unref(obj)
+MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_dlk
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint disable_deblocking_filter_idc);
+MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_dlk
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint *disable_deblocking_filter_idc);
+MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_profile_level
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guchar profile_and_level_indication);
+MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_profile_level
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guchar *profile_and_level_indication);
+MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_fixed_vti
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint fixed_vop_time_increment);
+MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_fixed_vti
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint *fixed_vop_time_increment);
+
+
+
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoConfigParams
+         +----MixVideoConfigParamsEnc
+               +----MixVideoConfigParamsEncMPEG4
+
+
+
+

Description

+

+MI-X video MPEG 4:2 eecode configuration parameter objects.

+
+
+

Details

+
+

MixVideoConfigParamsEncMPEG4

+
typedef struct {
+  MixVideoConfigParamsEnc parent;
+
+
+  /* TODO: Add MPEG-4 configuration paramters */
+  
+  /* Indicate profile and level. 
+   * Default value is 3. 
+   * Can be ignored (refer to encoding 
+   * specification for more info). */
+  guchar  profile_and_level_indication;
+  
+  /* Number of ticks between two successive VOPs 
+   * in display order. Default value is 3. 
+   * Can be ignored (refer to encoding specification 
+   * for more info) */
+  guint fixed_vop_time_increment;
+  
+  /* enable/disable deblocking */
+  guint disable_deblocking_filter_idc;
+  
+  /* Reserved for future use */
+  void *reserved1;
+  
+  /* Reserved for future use */  
+  void *reserved2;
+  
+  /* Reserved for future use */  
+  void *reserved3;
+  
+  /* Reserved for future use */  
+  void *reserved4;
+} MixVideoConfigParamsEncMPEG4;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoconfigparamsenc_mpeg4_new ()

+
MixVideoConfigParamsEncMPEG4 * mix_videoconfigparamsenc_mpeg4_new
+                                                        (void);
+

+Use this method to create new instance of MixVideoConfigParamsEncMPEG4

+
++ + + + +

returns :

A newly allocated instance of MixVideoConfigParamsEncMPEG4 +
+
+
+
+

mix_videoconfigparamsenc_mpeg4_unref()

+
#define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videoconfigparamsenc_mpeg4_set_dlk ()

+
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_dlk
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint disable_deblocking_filter_idc);
+

+Set the The flag to enable/disable deblocking

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncMPEG4 object +

disable_deblocking_filter_idc :

The flag to enable/disable deblocking +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_mpeg4_get_dlk ()

+
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_dlk
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint *disable_deblocking_filter_idc);
+

+Get the The flag to enable/disable deblocking

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncMPEG4 object +

disable_deblocking_filter_idc :

deblocking flag to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_mpeg4_set_profile_level ()

+
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_profile_level
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guchar profile_and_level_indication);
+

+Set profile_and_level_indication

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncMPEG4 object +

profile_and_level_indication :

Indicate profile and level. Default value is 3. + Can be ignored (refer to encoding specification + for more info). +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_mpeg4_get_profile_level ()

+
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_profile_level
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guchar *profile_and_level_indication);
+

+Set fixed_vop_time_increment

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncMPEG4 object +

fixed_vop_time_increment :

Number of ticks between two successive VOPs in display order. + Default value is 3. Can be ignored (refer to encoding specification + for more info) +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoconfigparamsenc_mpeg4_set_fixed_vti ()

+
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_fixed_vti
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint fixed_vop_time_increment);
+
+
+
+

mix_videoconfigparamsenc_mpeg4_get_fixed_vti ()

+
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_fixed_vti
+                                                        (MixVideoConfigParamsEncMPEG4 *obj,
+                                                         guint *fixed_vop_time_increment);
+

+Get fixed_vop_time_increment

+
++ + + + + + + + + + + + + + +

obj :

MixVideoConfigParamsEncMPEG4 object +

fixed_vop_time_increment :

fixed_vop_time_increment to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html new file mode 100644 index 0000000..d6b8394 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html @@ -0,0 +1,281 @@ + + + + +MixVideoDecodeParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoDecodeParams

+

MixVideoDecodeParams — MI-X Video Decode Paramters

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoDecodeParams
+
+
+
+

Description

+

+The MixVideoDecodeParams object will be created by the MMF/App +and provided to MixVideo in the MixVideo mix_video_decode() function.

+
+
+

Details

+
+

MixVideoDecodeParams

+
typedef struct {
+	MixParams parent;
+
+
+	/* TODO: Add properties */
+	
+	/* Presentation timestamp for the video 
+	 * frame data, in milliseconds */
+	guint64 timestamp;
+	
+	/* Indicates a discontinuity in the stream */
+	gboolean discontinuity;
+
+	/* Reserved for future use */	
+	void *reserved1;
+	
+	/* Reserved for future use */	
+	void *reserved2;
+	
+	/* Reserved for future use */	
+	void *reserved3;
+	
+	/* Reserved for future use */	
+	void *reserved4;
+} MixVideoDecodeParams;
+
+

+MI-X VideoDecode Parameter object

+
+
+
+

mix_videodecodeparams_new ()

+
MixVideoDecodeParams * mix_videodecodeparams_new        (void);
+

+Use this method to create new instance of MixVideoDecodeParams

+
++ + + + +

returns :

A newly allocated instance of MixVideoDecodeParams +
+
+
+
+

mix_videodecodeparams_ref ()

+
MixVideoDecodeParams * mix_videodecodeparams_ref        (MixVideoDecodeParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoDecodeParams instance where reference count has been increased. +
+
+
+
+

mix_videodecodeparams_unref()

+
#define mix_videodecodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videodecodeparams_set_timestamp ()

+
MIX_RESULT          mix_videodecodeparams_set_timestamp (MixVideoDecodeParams *obj,
+                                                         guint64 timestamp);
+

+Set Presentation timestamp

+
++ + + + + + + + + + + + + + +

obj :

MixVideoDecodeParams object +

timestamp :

Presentation timestamp for the video frame data, in milliseconds +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videodecodeparams_get_timestamp ()

+
MIX_RESULT          mix_videodecodeparams_get_timestamp (MixVideoDecodeParams *obj,
+                                                         guint64 *timestamp);
+

+Get Presentation timestamp

+
++ + + + + + + + + + + + + + +

obj :

MixVideoDecodeParams object +

timestamp :

Presentation timestamp for the video frame data, in milliseconds to be returned. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videodecodeparams_set_discontinuity ()

+
MIX_RESULT          mix_videodecodeparams_set_discontinuity
+                                                        (MixVideoDecodeParams *obj,
+                                                         gboolean discontinuity);
+

+Set discontinuity flag

+
++ + + + + + + + + + + + + + +

obj :

MixVideoDecodeParams object +

discontinuity :

Flag to in Indicates a discontinuity in the stream. +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videodecodeparams_get_discontinuity ()

+
MIX_RESULT          mix_videodecodeparams_get_discontinuity
+                                                        (MixVideoDecodeParams *obj,
+                                                         gboolean *discontinuity);
+

+Get discontinuity flag

+
++ + + + + + + + + + + + + + +

obj :

MixVideoDecodeParams object +

discontinuity :

Discontinuity flag to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html new file mode 100644 index 0000000..e84b412 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html @@ -0,0 +1,155 @@ + + + + +MixVideoEncodeParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoEncodeParams

+

MixVideoEncodeParams — MI-X Video Encode Parameters

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoEncodeParams
+
+
+
+

Description

+

+The MixVideoEncodeParams object will be created by +the MMF/App and provided to MixVideo in the MixVideo +mix_video_encode() function. Get methods for the +properties will be available for the caller to +retrieve configuration information. Currently this +object is reserved for future use.

+
+
+

Details

+
+

MixVideoEncodeParams

+
typedef struct {
+	MixParams parent;
+
+
+	/* TODO: Add properties */
+	
+	
+	/* Reserved for future use */ 
+	void *reserved1;
+	
+	/* Reserved for future use */	
+	void *reserved2;
+	
+	/* Reserved for future use */	
+	void *reserved3;
+	
+	/* Reserved for future use */	
+	void *reserved4;
+} MixVideoEncodeParams;
+
+

+MI-X VideoDecode Parameter object

+
+
+
+

mix_videoencodeparams_new ()

+
MixVideoEncodeParams * mix_videoencodeparams_new        (void);
+

+Use this method to create new instance of MixVideoEncodeParams

+
++ + + + +

returns :

A newly allocated instance of MixVideoEncodeParams +
+
+
+
+

mix_videoencodeparams_ref ()

+
MixVideoEncodeParams * mix_videoencodeparams_ref        (MixVideoEncodeParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoEncodeParams instance where reference count has been increased. +
+
+
+
+

mix_videoencodeparams_unref()

+
#define mix_videoencodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html b/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html new file mode 100644 index 0000000..5147c84 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html @@ -0,0 +1,423 @@ + + + + +MixVideoFrame + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoFrame

+

MixVideoFrame — MI-X Video Frame Object

+
+
+

Synopsis

+
+                    MixVideoFrame;
+MixVideoFrame *     mix_videoframe_new                  (void);
+MixVideoFrame *     mix_videoframe_ref                  (MixVideoFrame *obj);
+void                mix_videoframe_unref                (MixVideoFrame *obj);
+MIX_RESULT          mix_videoframe_set_frame_id         (MixVideoFrame *obj,
+                                                         gulong frame_id);
+MIX_RESULT          mix_videoframe_get_frame_id         (MixVideoFrame *obj,
+                                                         gulong *frame_id);
+MIX_RESULT          mix_videoframe_set_ci_frame_idx     (MixVideoFrame *obj,
+                                                         guint ci_frame_idx);
+MIX_RESULT          mix_videoframe_get_ci_frame_idx     (MixVideoFrame *obj,
+                                                         guint *ci_frame_idx);
+MIX_RESULT          mix_videoframe_set_timestamp        (MixVideoFrame *obj,
+                                                         guint64 timestamp);
+MIX_RESULT          mix_videoframe_get_timestamp        (MixVideoFrame *obj,
+                                                         guint64 *timestamp);
+MIX_RESULT          mix_videoframe_set_discontinuity    (MixVideoFrame *obj,
+                                                         gboolean discontinuity);
+MIX_RESULT          mix_videoframe_get_discontinuity    (MixVideoFrame *obj,
+                                                         gboolean *discontinuity);
+
+
+
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoFrame
+
+
+
+

Description

+

+

+

+The MixVideoFrame object will be created by +MixVideo and provided to the MMF/App in the +MixVideo mix_video_get_frame() function. +

+

+

+

+mix_video_release_frame() must be used +to release frame object returned from +mix_video_get_frame(). Caller must not +use mix_videoframe_ref() or mix_videoframe_unref() +or adjust the reference count directly in any way. +This object can be supplied in the mix_video_render() +function to render the associated video frame. +The MMF/App can release this object when it no longer +needs to display/re-display this frame. +

+
+
+

Details

+
+

MixVideoFrame

+
typedef struct {
+	MixParams parent;
+
+	
+	/* ID associated with the decoded frame */
+	gulong frame_id;
+	
+	/* ID associated with the CI frame 
+	 * (used for encode only) */	
+	guint ci_frame_idx;	
+	
+	/* 64 bit timestamp. For decode, 
+	 * this is preserved from the corresponding 
+	 * MixVideoDecodeParams field. For encode, 
+	 * this is created during encoding. */
+	guint64 timestamp;
+	
+	/* Flag indicating whether there 
+	 * is a discontinuity. For decode, 
+	 * this is preserved from the corresponding 
+	 * MixVideoDecodeParams field. */
+	gboolean discontinuity;
+
+	/* Reserved for future use */ 
+	void *reserved1;
+	
+	/* Reserved for future use */ 
+	void *reserved2;
+	
+	/* Reserved for future use */ 
+	void *reserved3;
+	
+	/* Reserved for future use */ 
+	void *reserved4;
+} MixVideoFrame;
+
+

+MI-X VideoConfig Parameter object

+
+
+
+

mix_videoframe_new ()

+
MixVideoFrame *     mix_videoframe_new                  (void);
+

+Use this method to create new instance of MixVideoFrame

+
++ + + + +

returns :

A newly allocated instance of MixVideoFrame +
+
+
+
+

mix_videoframe_ref ()

+
MixVideoFrame *     mix_videoframe_ref                  (MixVideoFrame *obj);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoFrame instance where reference count has been increased. +
+
+
+
+

mix_videoframe_unref ()

+
void                mix_videoframe_unref                (MixVideoFrame *obj);
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videoframe_set_frame_id ()

+
MIX_RESULT          mix_videoframe_set_frame_id         (MixVideoFrame *obj,
+                                                         gulong frame_id);
+

+Set Frame ID

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

frame_id :

ID associated with the decoded frame +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoframe_get_frame_id ()

+
MIX_RESULT          mix_videoframe_get_frame_id         (MixVideoFrame *obj,
+                                                         gulong *frame_id);
+

+Get Frame ID

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

frame_id :

frame ID to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoframe_set_ci_frame_idx ()

+
MIX_RESULT          mix_videoframe_set_ci_frame_idx     (MixVideoFrame *obj,
+                                                         guint ci_frame_idx);
+

+Set CI Frame ID

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

ci_frame_idx :

ID associated with the CI frame (used for encode only) +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoframe_get_ci_frame_idx ()

+
MIX_RESULT          mix_videoframe_get_ci_frame_idx     (MixVideoFrame *obj,
+                                                         guint *ci_frame_idx);
+

+Get CI Frame ID

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

ci_frame_idx :

CI Frame ID to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoframe_set_timestamp ()

+
MIX_RESULT          mix_videoframe_set_timestamp        (MixVideoFrame *obj,
+                                                         guint64 timestamp);
+

+Set Frame timestamp

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

timestamp :

Frame timestamp +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoframe_get_timestamp ()

+
MIX_RESULT          mix_videoframe_get_timestamp        (MixVideoFrame *obj,
+                                                         guint64 *timestamp);
+

+Get Frame timestamp

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

timestamp :

Frame timestamp to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoframe_set_discontinuity ()

+
MIX_RESULT          mix_videoframe_set_discontinuity    (MixVideoFrame *obj,
+                                                         gboolean discontinuity);
+

+Get discontinuity flag

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

discontinuity :

Discontinuity flag +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoframe_get_discontinuity ()

+
MIX_RESULT          mix_videoframe_get_discontinuity    (MixVideoFrame *obj,
+                                                         gboolean *discontinuity);
+

+Get discontinuity flag

+
++ + + + + + + + + + + + + + +

obj :

MixVideoFrame object +

discontinuity :

Discontinuity flag to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html new file mode 100644 index 0000000..79cb486 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html @@ -0,0 +1,214 @@ + + + + +MixVideoInitParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoInitParams

+

MixVideoInitParams — MI-X Video Initialization Parameters

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoInitParams
+
+
+
+

Description

+

+The MixVideoInitParams object will be created by the MMF/App +and provided in the mix_video_initialize() function. +The get and set methods for the properties will be available for +the caller to set and get information used at initialization time.

+
+
+

Details

+
+

MixVideoInitParams

+
typedef struct {
+  MixParams parent;
+
+
+  /* Pointer to a MixDisplay object 
+   * such as MixDisplayX11 */
+  MixDisplay *display;
+  
+  /* Reserved for future use */
+  void *reserved1;
+  
+  /* Reserved for future use */  
+  void *reserved2;
+  
+  /* Reserved for future use */  
+  void *reserved3;
+  
+  /* Reserved for future use */  
+  void *reserved4;
+} MixVideoInitParams;
+
+

+MI-X VideoInit Parameter object

+
+
+
+

mix_videoinitparams_new ()

+
MixVideoInitParams * mix_videoinitparams_new            (void);
+

+Use this method to create new instance of MixVideoInitParams

+
++ + + + +

returns :

A newly allocated instance of MixVideoInitParams +
+
+
+
+

mix_videoinitparams_ref ()

+
MixVideoInitParams * mix_videoinitparams_ref            (MixVideoInitParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoInitParams instance where reference count has been increased. +
+
+
+
+

mix_videoinitparams_unref()

+
#define mix_videoinitparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videoinitparams_set_display ()

+
MIX_RESULT          mix_videoinitparams_set_display     (MixVideoInitParams *obj,
+                                                         MixDisplay *display);
+

+Set MixDisplay object

+
++ + + + + + + + + + + + + + +

obj :

MixVideoInitParams object +

display :

Pointer to a MixDisplay object such as MixDisplayX11 +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videoinitparams_get_display ()

+
MIX_RESULT          mix_videoinitparams_get_display     (MixVideoInitParams *obj,
+                                                         MixDisplay **dislay);
+

+Get MixDisplay object

+
++ + + + + + + + + + + + + + +

obj :

MixVideoInitParams object +

dislay :

Pointer to pointer of a MixDisplay object such as MixDisplayX11 +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html new file mode 100644 index 0000000..6d998bf --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html @@ -0,0 +1,418 @@ + + + + +MixVideoRenderParams + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+

MixVideoRenderParams

+

MixVideoRenderParams — MI-X Video Render Parameters

+
+ +
+

Object Hierarchy

+
+  MixParams
+   +----MixVideoRenderParams
+
+
+
+

Description

+

+The MixVideoRenderParams object will be created by the MMF/App +and provided to MixVideo in the MixVideo mix_video_render() function.

+
+
+

Details

+
+

MixVideoRenderParams

+
typedef struct {
+	MixParams parent;
+
+	
+	/* Pointer to a MixDisplay object 
+	 * such as MixDisplayX11 */
+	MixDisplay *display;
+
+	/* MixRect object to define offset, 
+	 * height and width of source image */
+	MixRect src_rect;
+	
+	/* MixRect object to define offset, 
+	 * height and width of the display 
+	 * destination */
+	MixRect dst_rect;
+
+	/* Array of clipping rectangles 
+	 * to be applied */
+	MixRect *clipping_rects;
+	
+	/* Number of clipping rectangles 
+	 * in clipping_rects */
+	guint number_of_clipping_rects;
+
+	/* Post processing parameters */
+	guint post_proc;
+
+	/* Reserved */
+	gpointer reserved;
+	
+	/* Reserved for future use */
+	gpointer reserved1;
+	
+	/* Reserved for future use */	
+	gpointer reserved2;
+	
+	/* Reserved for future use */	
+	gpointer reserved3;
+	
+	/* Reserved for future use */	
+	gpointer reserved4;
+} MixVideoRenderParams;
+
+

+MI-X VideoRender Parameter object

+
+
+
+

mix_videorenderparams_new ()

+
MixVideoRenderParams * mix_videorenderparams_new        (void);
+

+Use this method to create new instance of MixVideoRenderParams

+
++ + + + +

returns :

A newly allocated instance of MixVideoRenderParams +
+
+
+
+

mix_videorenderparams_ref ()

+
MixVideoRenderParams * mix_videorenderparams_ref        (MixVideoRenderParams *mix);
+

+Add reference count.

+
++ + + + + + + + + + +

mix :

object to add reference +

returns :

the MixVideoRenderParams instance where reference count has been increased. +
+
+
+
+

mix_videorenderparams_unref()

+
#define mix_videorenderparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+

+Decrement reference count of the object.

+
++ + + + +

obj :

object to unref. +
+
+
+
+

mix_videorenderparams_set_display ()

+
MIX_RESULT          mix_videorenderparams_set_display   (MixVideoRenderParams *obj,
+                                                         MixDisplay *display);
+

+Set MixDisplay Object

+
++ + + + + + + + + + + + + + +

obj :

MixVideoRenderParams object +

display :

MixDisplay object +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videorenderparams_get_display ()

+
MIX_RESULT          mix_videorenderparams_get_display   (MixVideoRenderParams *obj,
+                                                         MixDisplay **display);
+

+Get MixDisplay Object

+
++ + + + + + + + + + + + + + +

obj :

MixVideoRenderParams object +

display :

pointer to MixDisplay object +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videorenderparams_set_src_rect ()

+
MIX_RESULT          mix_videorenderparams_set_src_rect  (MixVideoRenderParams *obj,
+                                                         MixRect src_rect);
+

+Set source rectangle

+
++ + + + + + + + + + + + + + +

obj :

MixVideoRenderParams object +

src_rect :

MixRect object to define offset, height and width of source image +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videorenderparams_get_src_rect ()

+
MIX_RESULT          mix_videorenderparams_get_src_rect  (MixVideoRenderParams *obj,
+                                                         MixRect *src_rect);
+

+Get source rectangle

+
++ + + + + + + + + + + + + + +

obj :

MixVideoRenderParams object +

src_rect :

Source rectangle to be returned +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videorenderparams_set_dest_rect ()

+
MIX_RESULT          mix_videorenderparams_set_dest_rect (MixVideoRenderParams *obj,
+                                                         MixRect dst_rect);
+

+Get destination rectangle

+
++ + + + + + + + + + + + + + +

obj :

MixVideoRenderParams object +

dst_rect :

MixRect object to define offset, height and width of the display destination +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videorenderparams_get_dest_rect ()

+
MIX_RESULT          mix_videorenderparams_get_dest_rect (MixVideoRenderParams *obj,
+                                                         MixRect *dst_rect);
+
+
+
+

mix_videorenderparams_set_clipping_rects ()

+
MIX_RESULT          mix_videorenderparams_set_clipping_rects
+                                                        (MixVideoRenderParams *obj,
+                                                         MixRect *clipping_rects,
+                                                         guint number_of_clipping_rects);
+

+Set clipping rectangles

+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoRenderParams object +

clipping_rects :

Array of clipping rectangles to be applied +

number_of_clipping_rects :

Number of clipping rectangles in clipping_rects +

returns :

Common Video Error Return Codes +
+
+
+
+

mix_videorenderparams_get_clipping_rects ()

+
MIX_RESULT          mix_videorenderparams_get_clipping_rects
+                                                        (MixVideoRenderParams *obj,
+                                                         MixRect **clipping_rects,
+                                                         guint *number_of_clipping_rects);
+

+Get clipping rectangles +

+

+

+
+

Note

+DO NOT free clipping_rects! +
+
++ + + + + + + + + + + + + + + + + + +

obj :

MixVideoRenderParams object +

clipping_rects :

Array of clipping rectangles returned +

number_of_clipping_rects :

Number of clipping rectangles in clipping_rects returned +

returns :

Common Video Error Return Codes +
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/api-index-full.html b/mix_video/docs/reference/MixVideo/html/api-index-full.html new file mode 100644 index 0000000..29a702d --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/api-index-full.html @@ -0,0 +1,466 @@ + + + + +API Index + + + + + + + + + + + + + + + + + + +
+

+API Index

+
+
+

M

+
+
MixBuffer
+
+
MixBufferCallback
+
+
MixCodecMode
+
+
MixDelimiterType
+
+
MixDisplay
+
+
MixDisplayCopyFunction
+
+
MixDisplayDupFunction
+
+
MixDisplayEqualFunction
+
+
MixDisplayFinalizeFunction
+
+
MixDisplayX11
+
+
MixDrmParams
+
+
MixEncodeTargetFormat
+
+
MixFrameOrderMode
+
+
MixIOVec
+
+
MixParamSpecDisplay
+
+
MixProfile
+
+
MixRateControl
+
+
MixRawTargetFormat
+
+
MixRect
+
+
MixState
+
+
MixVideo
+
+
MixVideoConfigParams
+
+
MixVideoConfigParamsDec
+
+
MixVideoConfigParamsDecH264
+
+
MixVideoConfigParamsDecMP42
+
+
MixVideoConfigParamsDecVC1
+
+
MixVideoConfigParamsEnc
+
+
MixVideoConfigParamsEncH264
+
+
MixVideoConfigParamsEncMPEG4
+
+
MixVideoDecodeParams
+
+
MixVideoEncodeParams
+
+
MixVideoFrame
+
+
MixVideoInitParams
+
+
MixVideoRenderParams
+
+
mix_buffer_new
+
+
mix_buffer_ref
+
+
mix_buffer_set_data
+
+
mix_buffer_unref
+
+
mix_displayx11_get_display
+
+
mix_displayx11_get_drawable
+
+
mix_displayx11_new
+
+
mix_displayx11_ref
+
+
mix_displayx11_set_display
+
+
mix_displayx11_set_drawable
+
+
mix_displayx11_unref
+
+
MIX_DISPLAY_CAST
+
+
MIX_DISPLAY_CLASS
+
+
mix_display_copy
+
+
mix_display_dup
+
+
mix_display_equal
+
+
mix_display_new
+
+
mix_display_ref
+
+
MIX_DISPLAY_REFCOUNT
+
+
MIX_DISPLAY_REFCOUNT_VALUE
+
+
mix_display_replace
+
+
mix_display_unref
+
+
mix_drmparams_new
+
+
mix_drmparams_ref
+
+
mix_drmparams_unref
+
+
MIX_IS_PARAM_SPEC_DISPLAY
+
+
MIX_PARAM_SPEC_DISPLAY
+
+
mix_param_spec_display
+
+
mix_param_spec_display_get_type
+
+
MIX_TYPE_PARAM_DISPLAY
+
+
mix_value_dup_display
+
+
mix_value_get_display
+
+
MIX_VALUE_HOLDS_DISPLAY
+
+
mix_value_set_display
+
+
mix_value_take_display
+
+
mix_videoconfigparamsdec_get_buffer_pool_size
+
+
mix_videoconfigparamsdec_get_extra_surface_allocation
+
+
mix_videoconfigparamsdec_get_frame_order_mode
+
+
mix_videoconfigparamsdec_get_frame_rate
+
+
mix_videoconfigparamsdec_get_header
+
+
mix_videoconfigparamsdec_get_mime_type
+
+
mix_videoconfigparamsdec_get_picture_res
+
+
mix_videoconfigparamsdec_get_rate_control
+
+
mix_videoconfigparamsdec_get_raw_format
+
+
mix_videoconfigparamsdec_h264_new
+
+
mix_videoconfigparamsdec_h264_unref
+
+
mix_videoconfigparamsdec_mp42_get_divxversion
+
+
mix_videoconfigparamsdec_mp42_get_mpegversion
+
+
mix_videoconfigparamsdec_mp42_new
+
+
mix_videoconfigparamsdec_mp42_set_divxversion
+
+
mix_videoconfigparamsdec_mp42_set_mpegversion
+
+
mix_videoconfigparamsdec_mp42_unref
+
+
mix_videoconfigparamsdec_new
+
+
mix_videoconfigparamsdec_ref
+
+
mix_videoconfigparamsdec_set_buffer_pool_size
+
+
mix_videoconfigparamsdec_set_extra_surface_allocation
+
+
mix_videoconfigparamsdec_set_frame_order_mode
+
+
mix_videoconfigparamsdec_set_frame_rate
+
+
mix_videoconfigparamsdec_set_header
+
+
mix_videoconfigparamsdec_set_mime_type
+
+
mix_videoconfigparamsdec_set_picture_res
+
+
mix_videoconfigparamsdec_set_rate_control
+
+
mix_videoconfigparamsdec_set_raw_format
+
+
mix_videoconfigparamsdec_unref
+
+
mix_videoconfigparamsdec_vc1_new
+
+
mix_videoconfigparamsdec_vc1_unref
+
+
mix_videoconfigparamsenc_get_bit_rate
+
+
mix_videoconfigparamsenc_get_buffer_pool_size
+
+
mix_videoconfigparamsenc_get_ci_frame_info
+
+
mix_videoconfigparamsenc_get_drawable
+
+
mix_videoconfigparamsenc_get_encode_format
+
+
mix_videoconfigparamsenc_get_frame_rate
+
+
mix_videoconfigparamsenc_get_init_qp
+
+
mix_videoconfigparamsenc_get_intra_period
+
+
mix_videoconfigparamsenc_get_mime_type
+
+
mix_videoconfigparamsenc_get_min_qp
+
+
mix_videoconfigparamsenc_get_need_display
+
+
mix_videoconfigparamsenc_get_picture_res
+
+
mix_videoconfigparamsenc_get_profile
+
+
mix_videoconfigparamsenc_get_rate_control
+
+
mix_videoconfigparamsenc_get_raw_format
+
+
mix_videoconfigparamsenc_get_share_buf_mode
+
+
mix_videoconfigparamsenc_h264_get_bus
+
+
mix_videoconfigparamsenc_h264_get_delimiter_type
+
+
mix_videoconfigparamsenc_h264_get_dlk
+
+
mix_videoconfigparamsenc_h264_get_slice_num
+
+
mix_videoconfigparamsenc_h264_new
+
+
mix_videoconfigparamsenc_h264_set_bus
+
+
mix_videoconfigparamsenc_h264_set_delimiter_type
+
+
mix_videoconfigparamsenc_h264_set_dlk
+
+
mix_videoconfigparamsenc_h264_set_slice_num
+
+
mix_videoconfigparamsenc_h264_unref
+
+
mix_videoconfigparamsenc_mpeg4_get_dlk
+
+
mix_videoconfigparamsenc_mpeg4_get_fixed_vti
+
+
mix_videoconfigparamsenc_mpeg4_get_profile_level
+
+
mix_videoconfigparamsenc_mpeg4_new
+
+
mix_videoconfigparamsenc_mpeg4_set_dlk
+
+
mix_videoconfigparamsenc_mpeg4_set_fixed_vti
+
+
mix_videoconfigparamsenc_mpeg4_set_profile_level
+
+
mix_videoconfigparamsenc_mpeg4_unref
+
+
mix_videoconfigparamsenc_new
+
+
mix_videoconfigparamsenc_ref
+
+
mix_videoconfigparamsenc_set_bit_rate
+
+
mix_videoconfigparamsenc_set_buffer_pool_size
+
+
mix_videoconfigparamsenc_set_ci_frame_info
+
+
mix_videoconfigparamsenc_set_drawable
+
+
mix_videoconfigparamsenc_set_encode_format
+
+
mix_videoconfigparamsenc_set_frame_rate
+
+
mix_videoconfigparamsenc_set_init_qp
+
+
mix_videoconfigparamsenc_set_intra_period
+
+
mix_videoconfigparamsenc_set_mime_type
+
+
mix_videoconfigparamsenc_set_min_qp
+
+
mix_videoconfigparamsenc_set_need_display
+
+
mix_videoconfigparamsenc_set_picture_res
+
+
mix_videoconfigparamsenc_set_profile
+
+
mix_videoconfigparamsenc_set_rate_control
+
+
mix_videoconfigparamsenc_set_raw_format
+
+
mix_videoconfigparamsenc_set_share_buf_mode
+
+
mix_videoconfigparamsenc_unref
+
+
mix_videoconfigparams_new
+
+
mix_videoconfigparams_ref
+
+
mix_videoconfigparams_unref
+
+
mix_videodecodeparams_get_discontinuity
+
+
mix_videodecodeparams_get_timestamp
+
+
mix_videodecodeparams_new
+
+
mix_videodecodeparams_ref
+
+
mix_videodecodeparams_set_discontinuity
+
+
mix_videodecodeparams_set_timestamp
+
+
mix_videodecodeparams_unref
+
+
mix_videoencodeparams_new
+
+
mix_videoencodeparams_ref
+
+
mix_videoencodeparams_unref
+
+
mix_videoframe_get_ci_frame_idx
+
+
mix_videoframe_get_discontinuity
+
+
mix_videoframe_get_frame_id
+
+
mix_videoframe_get_timestamp
+
+
mix_videoframe_new
+
+
mix_videoframe_ref
+
+
mix_videoframe_set_ci_frame_idx
+
+
mix_videoframe_set_discontinuity
+
+
mix_videoframe_set_frame_id
+
+
mix_videoframe_set_timestamp
+
+
mix_videoframe_unref
+
+
mix_videoinitparams_get_display
+
+
mix_videoinitparams_new
+
+
mix_videoinitparams_ref
+
+
mix_videoinitparams_set_display
+
+
mix_videoinitparams_unref
+
+
mix_videorenderparams_get_clipping_rects
+
+
mix_videorenderparams_get_dest_rect
+
+
mix_videorenderparams_get_display
+
+
mix_videorenderparams_get_src_rect
+
+
mix_videorenderparams_new
+
+
mix_videorenderparams_ref
+
+
mix_videorenderparams_set_clipping_rects
+
+
mix_videorenderparams_set_dest_rect
+
+
mix_videorenderparams_set_display
+
+
mix_videorenderparams_set_src_rect
+
+
mix_videorenderparams_unref
+
+
mix_video_configure
+
+
mix_video_decode
+
+
mix_video_deinitialize
+
+
mix_video_encode
+
+
mix_video_eos
+
+
MIX_VIDEO_ERROR_CODE
+
+
mix_video_flush
+
+
mix_video_get_config
+
+
mix_video_get_frame
+
+
mix_video_get_max_coded_buffer_size
+
+
mix_video_get_mixbuffer
+
+
mix_video_get_state
+
+
mix_video_get_version
+
+
mix_video_initialize
+
+
mix_video_new
+
+
mix_video_ref
+
+
mix_video_release_frame
+
+
mix_video_release_mixbuffer
+
+
mix_video_render
+
+
mix_video_unref
+
+
+
+
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/ch01.html b/mix_video/docs/reference/MixVideo/html/ch01.html new file mode 100644 index 0000000..d3b325c --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/ch01.html @@ -0,0 +1,92 @@ + + + + +MI-X Video API + + + + + + + + + + + + + + + + + + + +
+

+MI-X Video API

+
+
+MixVideo — Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. +
+
+MixVideoInitParams — MI-X Video Initialization Parameters +
+
+MixDrmParams — Drm Parameters Base Object +
+
+MixDisplay — Lightweight Base Object for MI-X Video Display +
+
+MixDisplayX11 — MI-X Video X11 Display +
+
+MixBuffer — MI-X Video Buffer Parameters +
+
+MixVideoFrame — MI-X Video Frame Object +
+
+MixVideoConfigParams — MI-X Video Configuration Parameter Base Object +
+
+MixVideoConfigParamsDec — MI-X Video Decode Configuration Parameter Base Object +
+
+MixVideoConfigParamsDecVC1 — MI-X Video VC-1 Decode Configuration Parameter +
+
+MixVideoConfigParamsDecH264 — MI-X Video H.264 Decode Configuration Parameter +
+
+MixVideoConfigParamsDecMP42 — MI-X Video MPEG 4:2 Decode Configuration Parameter +
+
+MixVideoConfigParamsEnc — MI-X Video Encode Configuration Parameter Base Object +
+
+MixVideoConfigParamsEncH264 — MI-X Video H.264 Eecode Configuration Parameter +
+
+MixVideoConfigParamsEncMPEG4 — MI-X Video MPEG 4:2 Eecode Configuration Parameter +
+
+MixVideoDecodeParams — MI-X Video Decode Paramters +
+
+MixVideoEncodeParams — MI-X Video Encode Parameters +
+
+MixVideoRenderParams — MI-X Video Render Parameters +
+
+MI-X Video Data Definitons And Common Error Code — MI-X Video data definitons and common error code +
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/home.png b/mix_video/docs/reference/MixVideo/html/home.png new file mode 100644 index 0000000..1700361 Binary files /dev/null and b/mix_video/docs/reference/MixVideo/html/home.png differ diff --git a/mix_video/docs/reference/MixVideo/html/index.html b/mix_video/docs/reference/MixVideo/html/index.html new file mode 100644 index 0000000..4924a06 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/index.html @@ -0,0 +1,96 @@ + + + + +Mi-X Video Reference Manual + + + + + + + + + + +
+
+
+
+

+ for MI-X Video 0.1 + +

+
+
+
+
+
MI-X Video API
+
+
+MixVideo — Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. +
+
+MixVideoInitParams — MI-X Video Initialization Parameters +
+
+MixDrmParams — Drm Parameters Base Object +
+
+MixDisplay — Lightweight Base Object for MI-X Video Display +
+
+MixDisplayX11 — MI-X Video X11 Display +
+
+MixBuffer — MI-X Video Buffer Parameters +
+
+MixVideoFrame — MI-X Video Frame Object +
+
+MixVideoConfigParams — MI-X Video Configuration Parameter Base Object +
+
+MixVideoConfigParamsDec — MI-X Video Decode Configuration Parameter Base Object +
+
+MixVideoConfigParamsDecVC1 — MI-X Video VC-1 Decode Configuration Parameter +
+
+MixVideoConfigParamsDecH264 — MI-X Video H.264 Decode Configuration Parameter +
+
+MixVideoConfigParamsDecMP42 — MI-X Video MPEG 4:2 Decode Configuration Parameter +
+
+MixVideoConfigParamsEnc — MI-X Video Encode Configuration Parameter Base Object +
+
+MixVideoConfigParamsEncH264 — MI-X Video H.264 Eecode Configuration Parameter +
+
+MixVideoConfigParamsEncMPEG4 — MI-X Video MPEG 4:2 Eecode Configuration Parameter +
+
+MixVideoDecodeParams — MI-X Video Decode Paramters +
+
+MixVideoEncodeParams — MI-X Video Encode Parameters +
+
+MixVideoRenderParams — MI-X Video Render Parameters +
+
+MI-X Video Data Definitons And Common Error Code — MI-X Video data definitons and common error code +
+
+
Object Hierarchy
+
API Index
+
+
+ + + diff --git a/mix_video/docs/reference/MixVideo/html/index.sgml b/mix_video/docs/reference/MixVideo/html/index.sgml new file mode 100644 index 0000000..7a6764e --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/index.sgml @@ -0,0 +1,307 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mix_video/docs/reference/MixVideo/html/left.png b/mix_video/docs/reference/MixVideo/html/left.png new file mode 100644 index 0000000..2d05b3d Binary files /dev/null and b/mix_video/docs/reference/MixVideo/html/left.png differ diff --git a/mix_video/docs/reference/MixVideo/html/object-tree.html b/mix_video/docs/reference/MixVideo/html/object-tree.html new file mode 100644 index 0000000..31a1ca9 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/object-tree.html @@ -0,0 +1,55 @@ + + + + +Object Hierarchy + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mix_video/docs/reference/MixVideo/html/right.png b/mix_video/docs/reference/MixVideo/html/right.png new file mode 100644 index 0000000..92832e3 Binary files /dev/null and b/mix_video/docs/reference/MixVideo/html/right.png differ diff --git a/mix_video/docs/reference/MixVideo/html/style.css b/mix_video/docs/reference/MixVideo/html/style.css new file mode 100644 index 0000000..bb44c28 --- /dev/null +++ b/mix_video/docs/reference/MixVideo/html/style.css @@ -0,0 +1,167 @@ +.synopsis, .classsynopsis +{ + background: #eeeeee; + border: solid 1px #aaaaaa; + padding: 0.5em; +} +.programlisting +{ + background: #eeeeff; + border: solid 1px #aaaaff; + padding: 0.5em; +} +.variablelist +{ + padding: 4px; + margin-left: 3em; +} +.variablelist td:first-child +{ + vertical-align: top; +} + +/* this is needed so that the local anchors are displayed below the naviagtion */ +@media screen { + sup a.footnote + { + position: relative; + top: 0em ! important; + } + div.refnamediv a[name], div.refsect1 a[name] + { + position: relative; + top: -4.5em; + } + table.navigation#top + { + background: #ffeeee; + border: solid 1px #ffaaaa; + margin-top: 0; + margin-bottom: 0; + position: fixed; + top: 0; + left: 0; + height: 2em; + z-index: 1; + } + .navigation a + { + color: #770000; + } + .navigation a:visited + { + color: #550000; + } + td.shortcuts + { + color: #770000; + font-size: 80%; + white-space: nowrap; + } + div.refentry, div.chapter, div.reference, div.part, div.book, div.glossary, div.sect1, div.appendix, div.preface + { + position: relative; + top: 3em; + z-index: 0; + } + div.glossary, div.index + { + position: relative; + top: 2em; + z-index: 0; + } + div.refnamediv + { + margin-top: 2em; + } + body + { + padding-bottom: 20em; + } +} +@media print { + table.navigation { + visibility: collapse; + display: none; + } + div.titlepage table.navigation { + visibility: visible; + display: table; + background: #ffeeee; + border: solid 1px #ffaaaa; + margin-top: 0; + margin-bottom: 0; + top: 0; + left: 0; + height: 2em; + } +} + +.navigation .title +{ + font-size: 200%; +} + + +div.gallery-float +{ + float: left; + padding: 10px; +} +div.gallery-float img +{ + border-style: none; +} +div.gallery-spacer +{ + clear: both; +} +a +{ + text-decoration: none; +} +a:hover +{ + text-decoration: underline; + color: #FF0000; +} + +div.table table +{ + border-collapse: collapse; + border-spacing: 0px; + border-style: solid; + border-color: #777777; + border-width: 1px; +} + +div.table table td, div.table table th +{ + border-style: solid; + border-color: #777777; + border-width: 1px; + padding: 3px; + vertical-align: top; +} + +div.table table th +{ + background-color: #eeeeee; +} + +hr +{ + color: #777777; + background: #777777; + border: 0; + height: 1px; + clear: both; +} + +.footer +{ + padding-top: 3.5em; + color: #777777; + text-align: center; + font-size: 80%; +} diff --git a/mix_video/docs/reference/MixVideo/html/up.png b/mix_video/docs/reference/MixVideo/html/up.png new file mode 100644 index 0000000..85b3e2a Binary files /dev/null and b/mix_video/docs/reference/MixVideo/html/up.png differ diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec index 8e5efb4..50752eb 100644 --- a/mix_video/mixvideo.spec +++ b/mix_video/mixvideo.spec @@ -6,7 +6,7 @@ Summary: MIX Video Name: mixvideo -Version: 0.1.15 +Version: 0.1.17 Release: 1 Source0: %{name}-%{version}.tar.gz NoSource: 0 @@ -24,7 +24,7 @@ MIX Video is an user library interface for various video codecs available on the %package devel Summary: Libraries include files Group: Development/Libraries -Requires: %{name} = %{version}, mixcommon-devel , glib2-devel, mixvbp-devel +Requires: %{name} = %{version}, mixcommon-devel , glib2-devel %description devel The %{name}-devel package contains the header files and static libraries for building applications which use %{name}. diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index 0e6cb3d..718fddf 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -7,8 +7,7 @@ LOCAL_SRC_FILES := \ mixbuffer.c \ mixbufferpool.c \ mixdisplay.c \ - mixdisplayx11.c \ - mixdrmparams.c \ + mixdisplayandroid.c \ mixframemanager.c \ mixsurfacepool.c \ mixvideo.c \ @@ -20,6 +19,7 @@ LOCAL_SRC_FILES := \ mixvideoconfigparamsdec_vc1.c \ mixvideoconfigparamsenc.c \ mixvideoconfigparamsenc_h264.c \ + mixvideoconfigparamsenc_h263.c \ mixvideoconfigparamsenc_mpeg4.c \ mixvideoconfigparamsenc_preview.c \ mixvideodecodeparams.c \ @@ -30,6 +30,7 @@ LOCAL_SRC_FILES := \ mixvideoformat_vc1.c \ mixvideoformatenc.c \ mixvideoformatenc_h264.c \ + mixvideoformatenc_h263.c \ mixvideoformatenc_mpeg4.c \ mixvideoformatenc_preview.c \ mixvideoframe.c \ @@ -61,12 +62,15 @@ LOCAL_SHARED_LIBRARIES := \ libgmodule-2.0 \ libmixcommon \ libmixvbp \ - libva + libva \ + libva-android \ + libva-tpi -#ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) -#LOCAL_CFLAGS += -DMIX_LOG_ENABLE -#LOCAL_SHARED_LIBRARIES += liblog -#endif +LOCAL_CFLAGS += -DANDROID +ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) +LOCAL_CFLAGS += -DMIX_LOG_ENABLE +LOCAL_SHARED_LIBRARIES += liblog +endif LOCAL_COPY_HEADERS_TO := libmixvideo @@ -75,8 +79,7 @@ LOCAL_COPY_HEADERS := \ mixbuffer_private.h \ mixbufferpool.h \ mixdisplay.h \ - mixdisplayx11.h \ - mixdrmparams.h \ + mixdisplayandroid.h \ mixframemanager.h \ mixsurfacepool.h \ mixvideo.h \ diff --git a/mix_video/src/Makefile.am b/mix_video/src/Makefile.am index 20c601b..cbbe071 100644 --- a/mix_video/src/Makefile.am +++ b/mix_video/src/Makefile.am @@ -10,7 +10,6 @@ lib_LTLIBRARIES = libmixvideo.la ############################################################################## # sources used to compile libmixvideo_la_SOURCES = mixdisplay.c \ - mixdrmparams.c \ mixvideo.c \ mixvideoconfigparams.c \ mixvideoconfigparamsdec.c \ @@ -35,11 +34,13 @@ libmixvideo_la_SOURCES = mixdisplay.c \ mixvideoformatenc_h264.c \ mixvideoformatenc_mpeg4.c \ mixvideoformatenc_preview.c \ - mixvideoconfigparamsenc.c \ + mixvideoformatenc_h263.c \ + mixvideoconfigparamsenc.c \ mixvideoconfigparamsenc_h264.c \ mixvideoconfigparamsenc_mpeg4.c \ mixvideoconfigparamsenc_preview.c \ - mixvideoencodeparams.c + mixvideoconfigparamsenc_h263.c \ + mixvideoencodeparams.c if MIXLOG_ENABLED MIXLOG_CFLAGS = -DMIX_LOG_ENABLE @@ -78,6 +79,7 @@ libmixvideo_la_LDFLAGS = $(GLIB_LIBS) \ $(LIBVA_X11_LIBS) \ $(MIXCOMMON_LIBS) \ $(MIXVBP_LIBS) \ + -lva-tpi \ -version-info @MIXVIDEO_CURRENT@:@MIXVIDEO_REVISION@:@MIXVIDEO_AGE@ libmixvideo_la_LIBTOOLFLAGS = --tag=disable-static @@ -98,7 +100,8 @@ noinst_HEADERS = mixvideoformat.h \ mixvideoformatenc_h264.h \ mixvideoformatenc_mpeg4.h \ mixvideoformatenc_preview.h \ - mixvideoformatenc.h \ + mixvideoformatenc_h263.h \ + mixvideoformatenc.h \ mixvideolog.h # TODO: decide whehter a /usr/include/mix is needed for mix headers @@ -113,7 +116,6 @@ mixinclude_HEADERS = mixvideodef.h \ mixvideoframe.h \ mixvideoinitparams.h \ mixdisplay.h \ - mixdrmparams.h \ mixvideocaps.h \ mixvideodecodeparams.h \ mixvideoencodeparams.h \ @@ -123,7 +125,8 @@ mixinclude_HEADERS = mixvideodef.h \ mixvideoconfigparamsenc_h264.h \ mixvideoconfigparamsenc_mpeg4.h \ mixvideoconfigparamsenc_preview.h \ - mixvideoconfigparamsenc.h + mixvideoconfigparamsenc_h263.h \ + mixvideoconfigparamsenc.h mixintincludedir=$(includedir)/mixvideoint diff --git a/mix_video/src/mixbuffer.c b/mix_video/src/mixbuffer.c index 6d51966..3a19a5b 100644 --- a/mix_video/src/mixbuffer.c +++ b/mix_video/src/mixbuffer.c @@ -8,9 +8,22 @@ /** * SECTION:mixbuffer - * @short_description: VideoConfig parameters + * @short_description: MI-X Video Buffer Parameters * - * A data object which stores videoconfig specific parameters. + * + * #MixBuffer objects are used to wrap input data buffers in a reference counted object as + * described in the buffer model section. Data buffers themselves are allocated by the + * App/MMF. #MixBuffer objects are allocated by #MixVideo in a pool and retrieved by the + * application using mix_video_get_mixbuffer(). The application will wrap a data buffer + * in a #Mixbuffer object and pass it into mix_video_decode() or to mix_video_encode(). + * + * + * The #MixBuffer objects will be released by #MixVideo when they are no longer needed + * for the decode or encoder operation. The App/MMF will also release the #MixBuffer + * object after use. When the #MixBuffer is completely released, the callback to the + * function registered in the #MixBuffer will be called (allowing the App/MMF to release + * data buffers as necessary). + * */ #include "mixvideolog.h" diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h index 53d2e1c..6977e92 100644 --- a/mix_video/src/mixbuffer.h +++ b/mix_video/src/mixbuffer.h @@ -63,18 +63,34 @@ typedef struct _MixBufferClass MixBufferClass; /** * MixBuffer: * - * MI-X VideoConfig Parameter object + * MI-X Buffer Parameter object */ struct _MixBuffer { /*< public > */ MixParams parent; /*< public > */ + + /* Pointer to coded data buffer */ guchar *data; + + /* Size of coded data buffer */ guint size; + + /* Token that will be passed to + * the callback function. Can be + * used by the application for + * any information to be associated + * with this coded data buffer, + * such as a pointer to a structure + * belonging to the application. */ gulong token; + + /* callback function pointer */ MixBufferCallback callback; + /* < private > */ + /* reserved */ gpointer reserved; }; @@ -108,7 +124,7 @@ MixBuffer *mix_buffer_new(void); /** * mix_buffer_ref: * @mix: object to add reference - * @returns: the MixBuffer instance where reference count has been increased. + * @returns: the #MixBuffer instance where reference count has been increased. * * Add reference count. */ @@ -124,6 +140,17 @@ void mix_buffer_unref(MixBuffer * mix); /* Class Methods */ +/** + * mix_buffer_set_data: + * @obj: #MixBuffer object + * @data: data buffer + * @size: data buffer size + * @token: token + * @callback: callback function pointer + * @returns: Common Video Error Return Codes + * + * Set data buffer, size, token and callback function + */ MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size, gulong token, MixBufferCallback callback); diff --git a/mix_video/src/mixdisplay.c b/mix_video/src/mixdisplay.c index d6da0e9..cd12846 100644 --- a/mix_video/src/mixdisplay.c +++ b/mix_video/src/mixdisplay.c @@ -8,7 +8,7 @@ No license under any patent, copyright, trade secret or other intellectual prope /** * SECTION:mixdisplay -* @short_description: Lightweight base class for the MIX media display +* @short_description: Lightweight Base Object for MI-X Video Display * */ #ifdef HAVE_CONFIG_H @@ -129,8 +129,8 @@ mix_display_copy (MixDisplay * target, const MixDisplay * src) /** * mix_display_copy_default: -* @target: -* @src: +* @target: target +* @src: source * * The default copy method of this object. Perhap copy at this level. * Assign this to the copy vmethod. diff --git a/mix_video/src/mixdisplay.h b/mix_video/src/mixdisplay.h index daaa5ed..04c8637 100644 --- a/mix_video/src/mixdisplay.h +++ b/mix_video/src/mixdisplay.h @@ -170,8 +170,8 @@ void mix_display_unref (MixDisplay * obj); /** * mix_display_replace: -* @olddata: -* @newdata: +* @olddata: old data +* @newdata: new data * * Replace a pointer of the object with the new one. */ diff --git a/mix_video/src/mixdisplayandroid.c b/mix_video/src/mixdisplayandroid.c new file mode 100644 index 0000000..08bec6d --- /dev/null +++ b/mix_video/src/mixdisplayandroid.c @@ -0,0 +1,197 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixdisplayandroid + * @short_description: MI-X Video Android Display + * + * A data object which stores Android specific parameters. + * + * + * Data Structures Used in MixDisplayAndroid Fields: + * + */ + +#ifdef ANDROID + +#include "mixdisplayandroid.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_displayandroid_type = 0; +static MixDisplayClass *parent_class = NULL; + +#define _do_init { _mix_displayandroid_type = g_define_type_id; } + +gboolean mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src); +MixDisplay *mix_displayandroid_dup(const MixDisplay * obj); +gboolean mix_displayandroid_equal(MixDisplay * first, MixDisplay * second); +static void mix_displayandroid_finalize(MixDisplay * obj); + +G_DEFINE_TYPE_WITH_CODE (MixDisplayAndroid, mix_displayandroid, + MIX_TYPE_DISPLAY, _do_init); + +static void mix_displayandroid_init(MixDisplayAndroid * self) { + + /* Initialize member varibles */ + self->display = NULL; +// self->drawable = 0; +} + +static void mix_displayandroid_class_init(MixDisplayAndroidClass * klass) { + MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); + + mixdisplay_class->finalize = mix_displayandroid_finalize; + mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayandroid_copy; + mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayandroid_dup; + mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayandroid_equal; +} + +MixDisplayAndroid * +mix_displayandroid_new(void) { + MixDisplayAndroid *ret = (MixDisplayAndroid *) g_type_create_instance( + MIX_TYPE_DISPLAYANDROID); + + return ret; +} + +void mix_displayandroid_finalize(MixDisplay * obj) { + /* clean up here. */ + /* MixDisplayAndroid *self = MIX_DISPLAYANDROID (obj); */ + + /* NOTE: we don't need to do anything + * with display and drawable */ + + /* Chain up parent */ + if (parent_class->finalize) + parent_class->finalize(obj); +} + +MixDisplayAndroid * +mix_displayandroid_ref(MixDisplayAndroid * mix) { + return (MixDisplayAndroid *) mix_display_ref(MIX_DISPLAY(mix)); +} + +/** + * mix_mixdisplayandroid_dup: + * @obj: a #MixDisplayAndroid object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixDisplay * +mix_displayandroid_dup(const MixDisplay * obj) { + MixDisplay *ret = NULL; + + if (MIX_IS_DISPLAYANDROID(obj)) { + MixDisplayAndroid *duplicate = mix_displayandroid_new(); + if (mix_displayandroid_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { + ret = MIX_DISPLAY(duplicate); + } else { + mix_displayandroid_unref(duplicate); + } + } + return ret; +} + +/** + * mix_mixdisplayandroid_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src) { + MixDisplayAndroid *this_target, *this_src; + + if (MIX_IS_DISPLAYANDROID(target) && MIX_IS_DISPLAYANDROID(src)) { + // Cast the base object to this child object + this_target = MIX_DISPLAYANDROID(target); + this_src = MIX_DISPLAYANDROID(src); + + // Copy properties from source to target. + + this_target->display = this_src->display; +// this_target->drawable = this_src->drawable; + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_DISPLAY_CAST(target), + MIX_DISPLAY_CAST(src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_mixdisplayandroid_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayandroid_equal(MixDisplay * first, MixDisplay * second) { + gboolean ret = FALSE; + + MixDisplayAndroid *this_first, *this_second; + + this_first = MIX_DISPLAYANDROID(first); + this_second = MIX_DISPLAYANDROID(second); + + if (MIX_IS_DISPLAYANDROID(first) && MIX_IS_DISPLAYANDROID(second)) { + // Compare member variables + + // TODO: if in the copy method we just copy the pointer of display, the comparison + // below is enough. But we need to decide how to copy! + + if (this_first->display == this_second->display /*&& this_first->drawable + == this_second->drawable*/) { + // members within this scope equal. chaining up. + MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + return ret; +} + +#define MIX_DISPLAYANDROID_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DISPLAYANDROID(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_DISPLAYANDROID_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || prop == NULL) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DISPLAYANDROID(obj)) return MIX_RESULT_FAIL; \ + +MIX_RESULT mix_displayandroid_set_display(MixDisplayAndroid * obj, void * display) { + MIX_DISPLAYANDROID_SETTER_CHECK_INPUT (obj); + + // TODO: needs to decide to clone or just copy pointer + obj->display = display; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayandroid_get_display(MixDisplayAndroid * obj, void ** display) { + MIX_DISPLAYANDROID_GETTER_CHECK_INPUT (obj, display); + + // TODO: needs to decide to clone or just copy pointer + *display = obj->display; + + return MIX_RESULT_SUCCESS; +} + +#endif /* ANDROID */ diff --git a/mix_video/src/mixdisplayandroid.h b/mix_video/src/mixdisplayandroid.h new file mode 100644 index 0000000..95fe951 --- /dev/null +++ b/mix_video/src/mixdisplayandroid.h @@ -0,0 +1,172 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_DISPLAYANDROID_H__ +#define __MIX_DISPLAYANDROID_H__ + +#include "mixdisplay.h" +#include "mixvideodef.h" + +//#ifdef ANDROID +//#include +//using namespace android; +//#endif + +//#ifdef __cplusplus +//extern "C" { +//#endif + +#ifdef ANDROID + +/** +* MIX_TYPE_DISPLAYANDROID: +* +* Get type of class. +*/ +#define MIX_TYPE_DISPLAYANDROID (mix_displayandroid_get_type ()) + +/** +* MIX_DISPLAYANDROID: +* @obj: object to be type-casted. +*/ +#define MIX_DISPLAYANDROID(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAYANDROID, MixDisplayAndroid)) + +/** +* MIX_IS_DISPLAYANDROID: +* @obj: an object. +* +* Checks if the given object is an instance of #MixDisplay +*/ +#define MIX_IS_DISPLAYANDROID(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAYANDROID)) + +/** +* MIX_DISPLAYANDROID_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_DISPLAYANDROID_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAYANDROID, MixDisplayAndroidClass)) + +/** +* MIX_IS_DISPLAYANDROID_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixDisplayClass +*/ +#define MIX_IS_DISPLAYANDROID_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAYANDROID)) + +/** +* MIX_DISPLAYANDROID_GET_CLASS: +* @obj: a #MixDisplay object. +* +* Get the class instance of the object. +*/ +#define MIX_DISPLAYANDROID_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAYANDROID, MixDisplayAndroidClass)) + +typedef struct _MixDisplayAndroid MixDisplayAndroid; +typedef struct _MixDisplayAndroidClass MixDisplayAndroidClass; + +/** +* MixDisplayAndroid: +* +* MI-X VideoInit Parameter object +*/ +struct _MixDisplayAndroid +{ + /*< public > */ + MixDisplay parent; + + /*< public > */ + + /* Pointer to a Android specific display */ + void *display; + + /* An Android drawable that is a smart pointer + * of ISurface. This field is not used in + * mix_video_initialize(). + */ + // sp drawable; +}; + +/** +* MixDisplayAndroidClass: +* +* MI-X VideoInit object class +*/ +struct _MixDisplayAndroidClass +{ + /*< public > */ + MixDisplayClass parent_class; + + /* class members */ +}; + +/** +* mix_displayandroid_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_displayandroid_get_type (void); + +/** +* mix_displayandroid_new: +* @returns: A newly allocated instance of #MixDisplayAndroid +* +* Use this method to create new instance of #MixDisplayAndroid +*/ +MixDisplayAndroid *mix_displayandroid_new (void); +/** +* mix_displayandroid_ref: +* @mix: object to add reference +* @returns: the #MixDisplayAndroid instance where reference count has been increased. +* +* Add reference count. +*/ +MixDisplayAndroid *mix_displayandroid_ref (MixDisplayAndroid * mix); + +/** +* mix_displayandroid_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_displayandroid_unref(obj) mix_display_unref(MIX_DISPLAY(obj)) + +/* Class Methods */ + + +/** + * mix_displayandroid_set_display: + * @obj: #MixDisplayAndroid object + * @display: Pointer to Android specific display + * @returns: Common Video Error Return Codes + * + * Set Display + */ +MIX_RESULT mix_displayandroid_set_display (MixDisplayAndroid * obj, + void * display); + +/** + * mix_displayandroid_get_display: + * @obj: #MixDisplayAndroid object + * @display: Pointer to pointer of Android specific display + * @returns: Common Video Error Return Codes + * + * Get Display + */ +MIX_RESULT mix_displayandroid_get_display (MixDisplayAndroid * obj, + void ** dislay); + + +#endif /* ANDROID */ + +//#ifdef __cplusplus +//} +//#endif + +#endif /* __MIX_DISPLAYANDROID_H__ */ + diff --git a/mix_video/src/mixdisplayx11.c b/mix_video/src/mixdisplayx11.c index 7398234..43839ab 100644 --- a/mix_video/src/mixdisplayx11.c +++ b/mix_video/src/mixdisplayx11.c @@ -8,9 +8,14 @@ /** * SECTION:mixdisplayx11 - * @short_description: VideoInit parameters + * @short_description: MI-X Video X11 Display * - * A data object which stores videoinit specific parameters. + * A data object which stores X11 specific parameters. + * + * + * Data Structures Used in MixDisplayX11 Fields: + * See X11/Xlib.h for Display and Drawable definitions. + * */ #include "mixdisplayx11.h" @@ -20,12 +25,6 @@ static GType _mix_displayx11_type = 0; static MixDisplayClass *parent_class = NULL; -#ifdef ANDROID -int XSync(Display* display, Bool bvalue) { - return 0; -} -#endif - #define _do_init { _mix_displayx11_type = g_define_type_id; } gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h index d6db5bd..8b1788d 100644 --- a/mix_video/src/mixdisplayx11.h +++ b/mix_video/src/mixdisplayx11.h @@ -11,16 +11,7 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixdisplay.h" #include "mixvideodef.h" -#ifndef ANDROID #include -#else -#define Display unsigned int -#define Drawable unsigned int -#define Bool int -#define True 1 -#define False 0 -int XSync(Display* display, Bool bvalue); -#endif /** * MIX_TYPE_DISPLAYX11: @@ -80,7 +71,13 @@ struct _MixDisplayX11 /*< public > */ + /* Pointer to a X Window Display structure */ Display *display; + + /* An X Window Drawable that is either a Window + * or a Pixmap. This field is not used in + * mix_video_initialize(). + * See X11/Xlib.h for Display and Drawable definitions.*/ Drawable drawable; }; @@ -115,7 +112,7 @@ MixDisplayX11 *mix_displayx11_new (void); /** * mix_displayx11_ref: * @mix: object to add reference -* @returns: the MixDisplayX11 instance where reference count has been increased. +* @returns: the #MixDisplayX11 instance where reference count has been increased. * * Add reference count. */ @@ -131,19 +128,48 @@ MixDisplayX11 *mix_displayx11_ref (MixDisplayX11 * mix); /* Class Methods */ -/* -TO DO: Add documents -*/ +/** + * mix_displayx11_set_display: + * @obj: #MixDisplayX11 object + * @display: Pointer to a X Window Display structure + * @returns: Common Video Error Return Codes + * + * Set Display + */ MIX_RESULT mix_displayx11_set_display (MixDisplayX11 * obj, Display * display); +/** + * mix_displayx11_get_display: + * @obj: #MixDisplayX11 object + * @display: Pointer to pointer of X Window Display structure + * @returns: Common Video Error Return Codes + * + * Get Display + */ MIX_RESULT mix_displayx11_get_display (MixDisplayX11 * obj, Display ** dislay); +/** + * mix_displayx11_set_drawable: + * @obj: #MixDisplayX11 object + * @drawable: An X Window Drawable that is either a Window or a Pixmap. + * @returns: Common Video Error Return Codes + * + * Set drawable + */ MIX_RESULT mix_displayx11_set_drawable (MixDisplayX11 * obj, Drawable drawable); +/** + * mix_displayx11_get_drawable: + * @obj: #MixDisplayX11 object + * @drawable: An X Window Drawable that is either a Window or a Pixmap to be returned. + * @returns: Common Video Error Return Codes + * + * Get drawable + */ MIX_RESULT mix_displayx11_get_drawable (MixDisplayX11 * obj, Drawable * drawable); diff --git a/mix_video/src/mixdrmparams.c b/mix_video/src/mixdrmparams.c deleted file mode 100644 index 336393b..0000000 --- a/mix_video/src/mixdrmparams.c +++ /dev/null @@ -1,189 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixdrmparams -* @short_description: Drm parameters -* -* A data object which stores drm specific parameters. -*/ - -#include "mixdrmparams.h" - -static GType _mix_drmparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_drmparams_type = g_define_type_id; } - -gboolean mix_drmparams_copy (MixParams * target, const MixParams * src); -MixParams *mix_drmparams_dup (const MixParams * obj); -gboolean mix_drmparams_equal (MixParams * first, MixParams * second); -static void mix_drmparams_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixDrmParams, mix_drmparams, MIX_TYPE_PARAMS, - _do_init); - -static void -mix_drmparams_init (MixDrmParams * self) -{ - /* initialize properties here */ - - /* TODO: initialize properties */ -} - -static void -mix_drmparams_class_init (MixDrmParamsClass * klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS (klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_drmparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_drmparams_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_drmparams_dup; - mixparams_class->equal = (MixParamsEqualFunction) mix_drmparams_equal; -} - -MixDrmParams * -mix_drmparams_new (void) -{ - MixDrmParams *ret = - (MixDrmParams *) g_type_create_instance (MIX_TYPE_DRMPARAMS); - - return ret; -} - -void -mix_drmparams_finalize (MixParams * obj) -{ - /* clean up here. */ - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - if (parent_class->finalize) - { - parent_class->finalize (obj); - } -} - -MixDrmParams * -mix_drmparams_ref (MixDrmParams * mix) -{ - return (MixDrmParams *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_drmparams_dup: -* @obj: a #MixDrmParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_drmparams_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_DRMPARAMS (obj)) - { - MixDrmParams *duplicate = mix_drmparams_new (); - if (mix_drmparams_copy (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_drmparams_unref (duplicate); - } - } - return ret; -} - -/** -* mix_drmparams_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_drmparams_copy (MixParams * target, const MixParams * src) -{ - MixDrmParams *this_target, *this_src; - - if (MIX_IS_DRMPARAMS (target) && MIX_IS_DRMPARAMS (src)) - { - // Cast the base object to this child object - this_target = MIX_DRMPARAMS (target); - this_src = MIX_DRMPARAMS (src); - - // TODO: copy properties */ - - // Now chainup base class - if (parent_class->copy) - { - return parent_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_drmparams_: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_drmparams_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixDrmParams *this_first, *this_second; - - if (MIX_IS_DRMPARAMS (first) && MIX_IS_DRMPARAMS (second)) - { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_DRMPARAMS (first); - this_second = MIX_DRMPARAMS (second); - - /* TODO: add comparison for properties */ - /* if ( first properties == sencod properties) */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - ret = parent_class->equal (first, second); - else - ret = TRUE; - } - } - - return ret; -} - -#define MIX_DRMPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DRMPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_DRMPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DRMPARAMS(obj)) return MIX_RESULT_FAIL; \ - - -/* TODO: Add getters and setters for properties. */ diff --git a/mix_video/src/mixdrmparams.h b/mix_video/src/mixdrmparams.h deleted file mode 100644 index d5ffdbe..0000000 --- a/mix_video/src/mixdrmparams.h +++ /dev/null @@ -1,126 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_DRMPARAMS_H__ -#define __MIX_DRMPARAMS_H__ - -#include -#include "mixvideodef.h" - -/** -* MIX_TYPE_DRMPARAMS: -* -* Get type of class. -*/ -#define MIX_TYPE_DRMPARAMS (mix_drmparams_get_type ()) - -/** -* MIX_DRMPARAMS: -* @obj: object to be type-casted. -*/ -#define MIX_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DRMPARAMS, MixDrmParams)) - -/** -* MIX_IS_DRMPARAMS: -* @obj: an object. -* -* Checks if the given object is an instance of #MixParams -*/ -#define MIX_IS_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DRMPARAMS)) - -/** -* MIX_DRMPARAMS_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) - -/** -* MIX_IS_DRMPARAMS_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixParamsClass -*/ -#define MIX_IS_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DRMPARAMS)) - -/** -* MIX_DRMPARAMS_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -#define MIX_DRMPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) - -typedef struct _MixDrmParams MixDrmParams; -typedef struct _MixDrmParamsClass MixDrmParamsClass; - -/** -* MixDrmParams: -* -* MI-X Drm Parameter object -*/ -struct _MixDrmParams -{ - /*< public > */ - MixParams parent; - - /*< public > */ - - /* TODO: Add properties */ - -}; - -/** -* MixDrmParamsClass: -* -* MI-X Drm object class -*/ -struct _MixDrmParamsClass -{ - /*< public > */ - MixParamsClass parent_class; - - /* class members */ -}; - -/** -* mix_drmparams_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_drmparams_get_type (void); - -/** -* mix_drmparams_new: -* @returns: A newly allocated instance of #MixDrmParams -* -* Use this method to create new instance of #MixDrmParams -*/ -MixDrmParams *mix_drmparams_new (void); -/** -* mix_drmparams_ref: -* @mix: object to add reference -* @returns: the MixDrmParams instance where reference count has been increased. -* -* Add reference count. -*/ -MixDrmParams *mix_drmparams_ref (MixDrmParams * mix); - -/** -* mix_drmparams_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for properties */ - -#endif /* __MIX_DRMPARAMS_H__ */ diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c index f67137f..e940262 100644 --- a/mix_video/src/mixframemanager.c +++ b/mix_video/src/mixframemanager.c @@ -1,10 +1,10 @@ /* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ #include #include "mixvideolog.h" @@ -20,372 +20,371 @@ static void mix_framemanager_finalize(GObject * obj); G_DEFINE_TYPE( MixFrameManager, mix_framemanager, G_TYPE_OBJECT); static void mix_framemanager_init(MixFrameManager * self) { - /* TODO: public member initialization */ + /* TODO: public member initialization */ - /* TODO: private member initialization */ + /* TODO: private member initialization */ - if (!g_thread_supported()) { - g_thread_init(NULL); - } + if (!g_thread_supported()) { + g_thread_init(NULL); + } - self->lock = g_mutex_new(); + self->lock = g_mutex_new(); - self->flushing = FALSE; - self->eos = FALSE; - self->frame_array = NULL; - self->frame_queue = NULL; - self->initialized = FALSE; + self->flushing = FALSE; + self->eos = FALSE; + self->frame_array = NULL; + self->frame_queue = NULL; + self->initialized = FALSE; - self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; - self->framerate_numerator = 30; - self->framerate_denominator = 1; + self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + self->framerate_numerator = 30; + self->framerate_denominator = 1; - self->is_first_frame = TRUE; + self->is_first_frame = TRUE; - /* for vc1 in asf */ - self->p_frame = NULL; - self->prev_timestamp = 0; + /* for vc1 in asf */ + self->p_frame = NULL; + self->prev_timestamp = 0; } static void mix_framemanager_class_init(MixFrameManagerClass * klass) { - GObjectClass *gobject_class = (GObjectClass *) klass; + GObjectClass *gobject_class = (GObjectClass *) klass; - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); - gobject_class->finalize = mix_framemanager_finalize; + gobject_class->finalize = mix_framemanager_finalize; } MixFrameManager *mix_framemanager_new(void) { - MixFrameManager *ret = g_object_new(MIX_TYPE_FRAMEMANAGER, NULL); + MixFrameManager *ret = g_object_new(MIX_TYPE_FRAMEMANAGER, NULL); - return ret; + return ret; } void mix_framemanager_finalize(GObject * obj) { - /* clean up here. */ + /* clean up here. */ - MixFrameManager *fm = MIX_FRAMEMANAGER(obj); + MixFrameManager *fm = MIX_FRAMEMANAGER(obj); - /* cleanup here */ - mix_framemanager_deinitialize(fm); + /* cleanup here */ + mix_framemanager_deinitialize(fm); - if (fm->lock) { - g_mutex_free(fm->lock); - fm->lock = NULL; - } + if (fm->lock) { + g_mutex_free(fm->lock); + fm->lock = NULL; + } - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } } MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { - return (MixFrameManager *) g_object_ref(G_OBJECT(fm)); + return (MixFrameManager *) g_object_ref(G_OBJECT(fm)); } /* MixFrameManager class methods */ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, - MixFrameOrderMode mode, gint framerate_numerator, - gint framerate_denominator, gboolean timebased_ordering) { + MixFrameOrderMode mode, gint framerate_numerator, + gint framerate_denominator, gboolean timebased_ordering) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_FAIL; - if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER - && mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator - <= 0 || framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER + && mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator + <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } - if (fm->initialized) { - return MIX_RESULT_ALREADY_INIT; - } + if (fm->initialized) { + return MIX_RESULT_ALREADY_INIT; + } - if (!g_thread_supported()) { - g_thread_init(NULL); - } + if (!g_thread_supported()) { + g_thread_init(NULL); + } - ret = MIX_RESULT_NO_MEMORY; - if (!fm->lock) { - fm->lock = g_mutex_new(); + ret = MIX_RESULT_NO_MEMORY; if (!fm->lock) { - goto cleanup; + fm->lock = g_mutex_new(); + if (!fm->lock) { + goto cleanup; + } } - } - if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { - fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE); - if (!fm->frame_array) { - goto cleanup; + if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { + fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE); + if (!fm->frame_array) { + goto cleanup; + } } - } - fm->frame_queue = g_queue_new(); - if (!fm->frame_queue) { - goto cleanup; - } + fm->frame_queue = g_queue_new(); + if (!fm->frame_queue) { + goto cleanup; + } - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; - fm->mode = mode; + fm->mode = mode; - fm->timebased_ordering = timebased_ordering; + fm->timebased_ordering = timebased_ordering; - fm->initialized = TRUE; + fm->initialized = TRUE; - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_SUCCESS; -cleanup: + cleanup: - if (ret != MIX_RESULT_SUCCESS) { - if (fm->frame_array) { - g_ptr_array_free(fm->frame_array, TRUE); - fm->frame_array = NULL; - } - if (fm->frame_queue) { - g_queue_free(fm->frame_queue); - fm->frame_queue = NULL; + if (ret != MIX_RESULT_SUCCESS) { + if (fm->frame_array) { + g_ptr_array_free(fm->frame_array, TRUE); + fm->frame_array = NULL; + } + if (fm->frame_queue) { + g_queue_free(fm->frame_queue); + fm->frame_queue = NULL; + } } - } - return ret; + return ret; } MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - mix_framemanager_flush(fm); + mix_framemanager_flush(fm); - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - if (fm->frame_array) { - g_ptr_array_free(fm->frame_array, TRUE); - fm->frame_array = NULL; - } - if (fm->frame_queue) { - g_queue_free(fm->frame_queue); - fm->frame_queue = NULL; - } + if (fm->frame_array) { + g_ptr_array_free(fm->frame_array, TRUE); + fm->frame_array = NULL; + } + if (fm->frame_queue) { + g_queue_free(fm->frame_queue); + fm->frame_queue = NULL; + } - fm->initialized = FALSE; + fm->initialized = FALSE; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, - gint framerate_numerator, gint framerate_denominator) { + gint framerate_numerator, gint framerate_denominator) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (framerate_numerator <= 0 || framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } + if (framerate_numerator <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, - gint *framerate_numerator, gint *framerate_denominator) { + gint *framerate_numerator, gint *framerate_denominator) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (!framerate_numerator || !framerate_denominator) { - return MIX_RESULT_INVALID_PARAM; - } + if (!framerate_numerator || !framerate_denominator) { + return MIX_RESULT_INVALID_PARAM; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - *framerate_numerator = fm->framerate_numerator; - *framerate_denominator = fm->framerate_denominator; + *framerate_numerator = fm->framerate_numerator; + *framerate_denominator = fm->framerate_denominator; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm, - MixFrameOrderMode *mode) { + MixFrameOrderMode *mode) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->lock) { - return MIX_RESULT_FAIL; - } + if (!fm->lock) { + return MIX_RESULT_FAIL; + } - if (!mode) { - return MIX_RESULT_INVALID_PARAM; - } + if (!mode) { + return MIX_RESULT_INVALID_PARAM; + } - /* no need to use lock */ - *mode = fm->mode; + /* no need to use lock */ + *mode = fm->mode; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - /* flush frame_array */ - if (fm->frame_array) { - guint len = fm->frame_array->len; - if (len) { - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array, - idx); - if (frame) { - mix_videoframe_unref(frame); - g_ptr_array_index(fm->frame_array, idx) = NULL; + /* flush frame_array */ + if (fm->frame_array) { + guint len = fm->frame_array->len; + if (len) { + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array, + idx); + if (frame) { + mix_videoframe_unref(frame); + g_ptr_array_index(fm->frame_array, idx) = NULL; + } + } + /* g_ptr_array_remove_range(fm->frame_array, 0, len); */ } - } - /* g_ptr_array_remove_range(fm->frame_array, 0, len); */ } - } - if (fm->frame_queue) { - guint len = fm->frame_queue->length; - if (len) { - MixVideoFrame *frame = NULL; - while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) { - mix_videoframe_unref(frame); - } + if (fm->frame_queue) { + guint len = fm->frame_queue->length; + if (len) { + MixVideoFrame *frame = NULL; + while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) { + mix_videoframe_unref(frame); + } + } } - } - if(fm->p_frame) { - mix_videoframe_unref(fm->p_frame); - fm->p_frame = NULL; - } - fm->prev_timestamp = 0; + if(fm->p_frame) { + mix_videoframe_unref(fm->p_frame); + fm->p_frame = NULL; + } + fm->prev_timestamp = 0; - fm->eos = FALSE; + fm->eos = FALSE; - fm->is_first_frame = TRUE; + fm->is_first_frame = TRUE; - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MixVideoFrame *get_expected_frame_from_array(GPtrArray *array, - guint64 expected, guint64 tolerance, guint64 *frametimestamp) { - - guint idx = 0; - guint len = 0; - guint64 timestamp = 0; - guint64 lowest_timestamp = (guint64)-1; - guint lowest_timestamp_idx = -1; + guint64 expected, guint64 tolerance, guint64 *frametimestamp) { - MixVideoFrame *frame = NULL; + guint idx = 0; + guint len = 0; + guint64 timestamp = 0; + guint64 lowest_timestamp = (guint64)-1; + guint lowest_timestamp_idx = -1; + + MixVideoFrame *frame = NULL; - if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) { + if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) { - return NULL; - } + return NULL; + } - len = array->len; - if (!len) { - return NULL; - } + len = array->len; + if (!len) { + return NULL; + } - for (idx = 0; idx < len; idx++) { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); - if (_frame) { + for (idx = 0; idx < len; idx++) { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); + if (_frame) { - if (mix_videoframe_get_timestamp(_frame, ×tamp) - != MIX_RESULT_SUCCESS) { + if (mix_videoframe_get_timestamp(_frame, ×tamp) + != MIX_RESULT_SUCCESS) { - /* - * Oops, this shall never happen! - * In case it heppens, release the frame! - */ + /* + * Oops, this shall never happen! + * In case it heppens, release the frame! + */ - mix_videoframe_unref(_frame); + mix_videoframe_unref(_frame); - /* make an available slot */ - g_ptr_array_index(array, idx) = NULL; + /* make an available slot */ + g_ptr_array_index(array, idx) = NULL; - break; - } - - if (lowest_timestamp > timestamp) - { - lowest_timestamp = timestamp; - lowest_timestamp_idx = idx; - } + break; + } + + if (lowest_timestamp > timestamp) + { + lowest_timestamp = timestamp; + lowest_timestamp_idx = idx; + } + } } - } - - if (lowest_timestamp == (guint64)-1) - { - return NULL; - } - + + if (lowest_timestamp == (guint64)-1) + { + return NULL; + } + - /* check if this is the expected next frame */ - if (lowest_timestamp <= expected + tolerance) - { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx); - /* make this slot available */ - g_ptr_array_index(array, lowest_timestamp_idx) = NULL; + /* check if this is the expected next frame */ + if (lowest_timestamp <= expected + tolerance) + { + MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx); + /* make this slot available */ + g_ptr_array_index(array, lowest_timestamp_idx) = NULL; - *frametimestamp = lowest_timestamp; - frame = _frame; - } - - return frame; + *frametimestamp = lowest_timestamp; + frame = _frame; + } + + return frame; } - MixVideoFrame *get_expected_frame_from_array_DO(GPtrArray *array, guint32 expected, guint32 *framedisplayorder) { @@ -455,38 +454,36 @@ MixVideoFrame *get_expected_frame_from_array_DO(GPtrArray *array, return frame; } - void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) { - gboolean found_slot = FALSE; - guint len = 0; + gboolean found_slot = FALSE; + guint len = 0; - if (!array || !mvf) { - return; - } + if (!array || !mvf) { + return; + } - /* do we have slot for this frame? */ - len = array->len; - if (len) { - guint idx = 0; - gpointer frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = g_ptr_array_index(array, idx); - if (!frame) { - found_slot = TRUE; - g_ptr_array_index(array, idx) = (gpointer) mvf; - break; - } + /* do we have slot for this frame? */ + len = array->len; + if (len) { + guint idx = 0; + gpointer frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = g_ptr_array_index(array, idx); + if (!frame) { + found_slot = TRUE; + g_ptr_array_index(array, idx) = (gpointer) mvf; + break; + } + } } - } - if (!found_slot) { - g_ptr_array_add(array, (gpointer) mvf); - } + if (!found_slot) { + g_ptr_array_add(array, (gpointer) mvf); + } } - gint frame_sorting_func_DO(gconstpointer a, gconstpointer b) { MixVideoFrame *fa = *((MixVideoFrame **) a); @@ -520,7 +517,6 @@ gint frame_sorting_func_DO(gconstpointer a, gconstpointer b) { return -1; } - MIX_RESULT mix_framemanager_displayorder_based_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { /* @@ -692,363 +688,443 @@ cleanup: return ret; } - MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm, - MixVideoFrame *mvf) { - /* - * display order mode. - * - * if this is the first frame, we always push it into - * output queue, if it is not, check if it is the one - * expected, if yes, push it into the output queue. - * if not, put it into waiting list. - * - * while the expected frame is pushed into output queue, - * the expected next timestamp is also updated. with this - * updated expected next timestamp, we search for expected - * frame from the waiting list, if found, repeat the process. - * - */ - - MIX_RESULT ret = MIX_RESULT_FAIL; - guint64 timestamp = 0; - -first_frame: - - ret = mix_videoframe_get_timestamp(mvf, ×tamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - if (fm->is_first_frame) { - + MixVideoFrame *mvf) { /* - * for the first frame, we can always put it into the output queue + * display order mode. + * + * if this is the first frame, we always push it into + * output queue, if it is not, check if it is the one + * expected, if yes, push it into the output queue. + * if not, put it into waiting list. + * + * while the expected frame is pushed into output queue, + * the expected next timestamp is also updated. with this + * updated expected next timestamp, we search for expected + * frame from the waiting list, if found, repeat the process. + * */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - /* - * what timestamp of next frame shall be? - */ - fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; - - fm->is_first_frame = FALSE; - - } else { - - /* - * is this the next frame expected? - */ + MIX_RESULT ret = MIX_RESULT_FAIL; + guint64 timestamp = 0; - /* calculate tolerance */ - guint64 tolerance = fm->frame_timestamp_delta / 4; - MixVideoFrame *frame_from_array = NULL; - guint64 timestamp_frame_array = 0; + first_frame: - /* - * timestamp may be associated with the second field, which - * will not fall between the tolerance range. - */ + ret = mix_videoframe_get_timestamp(mvf, ×tamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } - if (timestamp <= fm->next_frame_timestamp + tolerance) { + if (fm->is_first_frame) { - /* - * ok, this is the frame expected, push it into output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + /* + * for the first frame, we can always put it into the output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp >= fm->next_frame_timestamp - tolerance) - { + /* + * what timestamp of next frame shall be? + */ fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; - } - /* - * since we updated next_frame_timestamp, there might be a frame - * in the frame_array that satisfying this new next_frame_timestamp - */ - - while ((frame_from_array = get_expected_frame_from_array( - fm->frame_array, fm->next_frame_timestamp, tolerance, - ×tamp_frame_array))) { + fm->is_first_frame = FALSE; - g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + } else { /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp_frame_array - + fm->frame_timestamp_delta; - } - } + * is this the next frame expected? + */ - } else { + /* calculate tolerance */ + guint64 tolerance = fm->frame_timestamp_delta / 4; + MixVideoFrame *frame_from_array = NULL; + guint64 timestamp_frame_array = 0; - /* - * is discontinuity flag set for this frame ? - */ - gboolean discontinuity = FALSE; - ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + /* + * timestamp may be associated with the second field, which + * will not fall between the tolerance range. + */ + + if (timestamp <= fm->next_frame_timestamp + tolerance) { + + /* + * ok, this is the frame expected, push it into output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; + } + + /* + * since we updated next_frame_timestamp, there might be a frame + * in the frame_array that satisfying this new next_frame_timestamp + */ + + while ((frame_from_array = get_expected_frame_from_array( + fm->frame_array, fm->next_frame_timestamp, tolerance, + ×tamp_frame_array))) { + + g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp_frame_array + + fm->frame_timestamp_delta; + } + } - /* - * If this is a frame with discontinuity flag set, clear frame_array - * and treat the frame as the first frame. - */ - if (discontinuity) { + } else { - guint len = fm->frame_array->len; - if (len) { - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index( - fm->frame_array, idx); - if (frame) { - mix_videoframe_unref(frame); - g_ptr_array_index(fm->frame_array, idx) = NULL; + /* + * is discontinuity flag set for this frame ? + */ + gboolean discontinuity = FALSE; + ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; } - } - } - - fm->is_first_frame = TRUE; - goto first_frame; - } - /* - * handle variable frame rate: - * display any frame which time stamp is less than current one. - * - */ - guint64 tolerance = fm->frame_timestamp_delta / 4; - MixVideoFrame *frame_from_array = NULL; - guint64 timestamp_frame_array = 0; + /* + * If this is a frame with discontinuity flag set, clear frame_array + * and treat the frame as the first frame. + */ + if (discontinuity) { + + guint len = fm->frame_array->len; + if (len) { + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index( + fm->frame_array, idx); + if (frame) { + mix_videoframe_unref(frame); + g_ptr_array_index(fm->frame_array, idx) = NULL; + } + } + } + + fm->is_first_frame = TRUE; + goto first_frame; + } - while ((frame_from_array = get_expected_frame_from_array( - fm->frame_array, timestamp, tolerance, - ×tamp_frame_array))) - { - g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + /* + * handle variable frame rate: + * display any frame which time stamp is less than current one. + * + */ + guint64 tolerance = fm->frame_timestamp_delta / 4; + MixVideoFrame *frame_from_array = NULL; + guint64 timestamp_frame_array = 0; + + while ((frame_from_array = get_expected_frame_from_array( + fm->frame_array, timestamp, tolerance, + ×tamp_frame_array))) + { + g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); + + /* + * update next_frame_timestamp only if it falls within the tolerance range + */ + if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) + { + fm->next_frame_timestamp = timestamp_frame_array + + fm->frame_timestamp_delta; + } + } + /* + * this is not the expected frame, put it into frame_array + */ - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp_frame_array - + fm->frame_timestamp_delta; + add_frame_into_array(fm->frame_array, mvf); } - } - /* - * this is not the expected frame, put it into frame_array - */ - - add_frame_into_array(fm->frame_array, mvf); } - } -cleanup: + cleanup: - return ret; + return ret; } MIX_RESULT mix_framemanager_frametype_based_enqueue(MixFrameManager *fm, - MixVideoFrame *mvf) { + MixVideoFrame *mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixFrameType frame_type; - guint64 timestamp = 0; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixFrameType frame_type; + guint64 timestamp = 0; - ret = mix_videoframe_get_frame_type(mvf, &frame_type); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + ret = mix_videoframe_get_frame_type(mvf, &frame_type); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } - ret = mix_videoframe_get_timestamp(mvf, ×tamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + ret = mix_videoframe_get_timestamp(mvf, ×tamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } #ifdef MIX_LOG_ENABLE - if (frame_type == TYPE_I) { - LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp); - } else if (frame_type == TYPE_P) { - LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp); - } else if (frame_type == TYPE_B) { - LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp); - } else { - LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp); - } + if (frame_type == TYPE_I) { + LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp); + } else if (frame_type == TYPE_P) { + LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp); + } else if (frame_type == TYPE_B) { + LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp); + } else { + LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp); + } #endif - if (fm->is_first_frame) { - /* - * The first frame is not a I frame, unexpected! - */ - if (frame_type != TYPE_I) { - goto cleanup; - } + if (fm->is_first_frame) { + /* + * The first frame is not a I frame, unexpected! + */ + if (frame_type != TYPE_I) { + goto cleanup; + } - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - fm->is_first_frame = FALSE; - } else { + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + fm->is_first_frame = FALSE; + } else { - /* - * I P B B P B B ... - */ - if (frame_type == TYPE_I || frame_type == TYPE_P) { + /* + * I P B B P B B ... + */ + if (frame_type == TYPE_I || frame_type == TYPE_P) { - if (fm->p_frame) { + if (fm->p_frame) { - ret = mix_videoframe_set_timestamp(fm->p_frame, - fm->prev_timestamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + ret = mix_videoframe_set_timestamp(fm->p_frame, + fm->prev_timestamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } - g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); - fm->p_frame = NULL; - } + g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); + fm->p_frame = NULL; + } - /* it is an I frame, push it into the out queue */ - /*if (frame_type == TYPE_I) { + /* it is an I frame, push it into the out queue */ + /*if (frame_type == TYPE_I) { - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - } else*/ - { - /* it is a P frame, we can not push it to the out queue yet, save it */ - fm->p_frame = mvf; - fm->prev_timestamp = timestamp; - } + } else*/ + { + /* it is a P frame, we can not push it to the out queue yet, save it */ + fm->p_frame = mvf; + fm->prev_timestamp = timestamp; + } - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_SUCCESS; - } else { - /* it is a B frame, replace the timestamp with the previous one */ - if (timestamp > fm->prev_timestamp) { - ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + } else { + /* it is a B frame, replace the timestamp with the previous one */ + if (timestamp > fm->prev_timestamp) { + ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp); + if (ret != MIX_RESULT_SUCCESS) { + goto cleanup; + } - /* save the timestamp */ - fm->prev_timestamp = timestamp; - } - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - ret = MIX_RESULT_SUCCESS; + /* save the timestamp */ + fm->prev_timestamp = timestamp; + } + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + ret = MIX_RESULT_SUCCESS; + } } - } -cleanup: + cleanup: - return ret; + return ret; } MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; - - /*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/ - - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } + MIX_RESULT ret = MIX_RESULT_FAIL; - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + /*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/ - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } - /* - * This should never happen! - */ - if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode - != MIX_FRAMEORDER_MODE_DECODEORDER) { - return MIX_RESULT_FAIL; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - g_mutex_lock(fm->lock); + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - ret = MIX_RESULT_SUCCESS; - if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) { /* - * decode order mode, push the frame into output queue + * This should never happen! */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); + if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode + != MIX_FRAMEORDER_MODE_DECODEORDER) { + return MIX_RESULT_FAIL; + } - } else { + g_mutex_lock(fm->lock); + + ret = MIX_RESULT_SUCCESS; + if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) { + /* + * decode order mode, push the frame into output queue + */ + g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - if (fm->timebased_ordering) { - // ret = mix_framemanager_timestamp_based_enqueue(fm, mvf); - ret = mix_framemanager_displayorder_based_enqueue(fm, mvf); } else { - ret = mix_framemanager_frametype_based_enqueue(fm, mvf); + + if (fm->timebased_ordering) { +#ifndef ANDROID + ret = mix_framemanager_timestamp_based_enqueue(fm, mvf); +#else + ret = mix_framemanager_displayorder_based_enqueue(fm, mvf); +#endif + + } else { + ret = mix_framemanager_frametype_based_enqueue(fm, mvf); + } } - } - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return ret; + return ret; } MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_FAIL; - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); - ret = MIX_RESULT_FRAME_NOTAVAIL; - *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); - if (*mvf) { - ret = MIX_RESULT_SUCCESS; - } else if (fm->eos) { - ret = MIX_RESULT_EOS; - } + ret = MIX_RESULT_FRAME_NOTAVAIL; + *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); + if (*mvf) { + ret = MIX_RESULT_SUCCESS; + } else if (fm->eos) { + ret = MIX_RESULT_EOS; + } - g_mutex_unlock(fm->lock); + g_mutex_unlock(fm->lock); - return ret; + return ret; +} + +gint frame_sorting_func(gconstpointer a, gconstpointer b) { + + MixVideoFrame *fa = *((MixVideoFrame **) a); + MixVideoFrame *fb = *((MixVideoFrame **) b); + + guint64 ta, tb; + + if (!fa && !fb) { + return 0; + } + + if (fa && !fb) { + return 1; + } + + if (!fa && fb) { + return -1; + } + + mix_videoframe_get_timestamp(fa, &ta); + mix_videoframe_get_timestamp(fb, &tb); + + if (ta > tb) { + return 1; + } + + if (ta == tb) { + return 0; + } + + return -1; } MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_FAIL; - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); + + if (fm->mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { - fm->eos = TRUE; + /* Do we have frames that are not in the output queue? + * MixVideoFormat* must guarantee that when this + * function called, the last frame is already enqueued! + */ - g_mutex_unlock(fm->lock); + /* In case it is frame type based enqueue, p_frame is the + * only frame that is not in the output queue + */ + if (fm->p_frame && fm->frame_queue) { + g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); + fm->p_frame = NULL; + } + + /* In case it is timestamp based enqueue, throw all the frames + * in the array into the output queue by the order of timestamp + */ + if (fm->frame_array) { + guint len = fm->frame_array->len; + if (len) { +#ifndef ANDROID + /* sorting frames in the array by timestamp */ + g_ptr_array_sort(fm->frame_array, frame_sorting_func); +#else + /* sorting frames is the array by displayorder */ + g_ptr_array_sort(fm->frame_array, frame_sorting_func_DO); +#endif + + guint idx = 0; + MixVideoFrame *frame = NULL; + for (idx = 0; idx < len; idx++) { + frame = (MixVideoFrame *) g_ptr_array_index( + fm->frame_array, idx); + if (frame) { + g_ptr_array_index(fm->frame_array, idx) = NULL; + g_queue_push_tail(fm->frame_queue, (gpointer) frame); + } + } + } + } + } + + + fm->eos = TRUE; - return ret; + g_mutex_unlock(fm->lock); + + return ret; } diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h index cd33038..43343d5 100644 --- a/mix_video/src/mixframemanager.h +++ b/mix_video/src/mixframemanager.h @@ -59,7 +59,7 @@ struct _MixFrameManager { gboolean timebased_ordering; - guint32 next_displayorder; + guint32 next_displayorder; }; /** diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c index ffd72d5..063f641 100644 --- a/mix_video/src/mixvideo.c +++ b/mix_video/src/mixvideo.c @@ -6,17 +6,68 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ +/** + * SECTION:mixvideo + * @short_description: Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. + * @include: mixvideo.h + * + * #MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video. + * + * The MixVideo object handles any of the video formats internally. + * The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/ + * MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure() + * call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and + * MixVideoRenderParams objects will be passed in the mix_video_initialize(), + * mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively. + * + * The application can take the following steps to decode video: + * + * Create a mix_video object using mix_video_new() + * Initialize the object using mix_video_initialize() + * Configure the stream using mix_video_configure() + * Decode frames using mix_video_decode() + * Retrieve the decoded frames using mix_video_get_frame(). The decoded frames can be retrieved in decode order or display order. + * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). The frame can be retained for redrawing until the next frame is retrieved. + * When the frame is no longer needed for redrawing, release the frame using mix_video_release_frame(). + * + * + * For encoding, the application can take the following steps to encode video: + * + * Create a mix_video object using mix_video_new() + * Initialize the object using mix_video_initialize() + * Configure the stream using mix_video_configure() + * Encode frames using mix_video_encode() + * Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file. + * Retrieve the uncompressed frames for display using mix_video_get_frame(). + * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). For encode, the frame should not be retained for redrawing after the initial rendering, due to resource limitations. + * Release the frame using mix_video_release_frame(). + * + * + */ + #include /* libVA */ + #ifndef ANDROID #include -#endif #include +#else +#define Display unsigned int +//#include "mix_vagetdisplay.h" + +VADisplay vaGetDisplay ( + void *android_dpy +); -#include + +#endif #include "mixvideolog.h" +#ifndef ANDROID #include "mixdisplayx11.h" +#else +#include "mixdisplayandroid.h" +#endif #include "mixvideoframe.h" #include "mixframemanager.h" @@ -36,15 +87,22 @@ #include "mixvideoformatenc_h264.h" #include "mixvideoformatenc_mpeg4.h" #include "mixvideoformatenc_preview.h" +#include "mixvideoformatenc_h263.h" #include "mixvideoconfigparamsenc_h264.h" #include "mixvideoconfigparamsenc_mpeg4.h" #include "mixvideoconfigparamsenc_preview.h" - +#include "mixvideoconfigparamsenc_h263.h" #include "mixvideo.h" #include "mixvideo_private.h" +#ifdef ANDROID +#define mix_strcmp strcmp +#else +#define mix_strcmp g_strcmp0 +#endif + #define USE_OPAQUE_POINTER #ifdef USE_OPAQUE_POINTER @@ -103,9 +161,6 @@ MIX_RESULT mix_video_release_frame_default(MixVideo * mix, MIX_RESULT mix_video_render_default(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame); -MIX_RESULT mix_video_get_decoded_data_default(MixVideo * mix, MixIOVec * iovout, - MixVideoRenderParams * render_params, MixVideoFrame *frame); - MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); @@ -166,7 +221,6 @@ static void mix_video_class_init(MixVideoClass * klass) { klass->get_frame_func = mix_video_get_frame_default; klass->release_frame_func = mix_video_release_frame_default; klass->render_func = mix_video_render_default; - klass->get_decoded_data_func = mix_video_get_decoded_data_default; klass->encode_func = mix_video_encode_default; klass->flush_func = mix_video_flush_default; klass->eos_func = mix_video_eos_default; @@ -178,7 +232,7 @@ static void mix_video_class_init(MixVideoClass * klass) { MixVideo *mix_video_new(void) { - MixVideo *ret = g_object_new(MIX_TYPE_VIDEO, NULL); + MixVideo *ret = (MixVideo *)g_object_new(MIX_TYPE_VIDEO, NULL); return ret; } @@ -375,27 +429,40 @@ MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, LOG_E("Failed to get display 1\n"); goto cleanup; } - +#ifndef ANDROID if (MIX_IS_DISPLAYX11(mix_display)) { MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); - - /* XXX NOTE: This must be fixed in all clients */ - mix_displayx11->display = 0x18c34078; - ret = mix_displayx11_get_display(mix_displayx11, &display); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to get display 2\n"); goto cleanup; + } } else { - /* TODO: add support to other MixDisplay type. For now, just return error!*/ LOG_E("It is not display x11\n"); ret = MIX_RESULT_FAIL; goto cleanup; } - +#else + if (MIX_IS_DISPLAYANDROID(mix_display)) { + MixDisplayAndroid *mix_displayandroid = MIX_DISPLAYANDROID(mix_display); + ret = mix_displayandroid_get_display(mix_displayandroid, &display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 2\n"); + goto cleanup; + + } + } else { + /* TODO: add support to other MixDisplay type. For now, just return error!*/ + LOG_E("It is not display android\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } +#endif /* Now, we can initialize libVA */ + + LOG_V("Try to get vaDisplay : display = %x\n", display); priv->va_display = vaGetDisplay(display); /* Oops! Fail to get VADisplay */ @@ -435,6 +502,7 @@ MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, return ret; } + MIX_RESULT mix_video_deinitialize_default(MixVideo * mix) { MixVideoPrivate *priv = NULL; @@ -512,7 +580,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, LOG_I( "mime : %s\n", mime_type); #ifdef MIX_LOG_ENABLE - if (strcmp(mime_type, "video/x-wmv") == 0) { + if (mix_strcmp(mime_type, "video/x-wmv") == 0) { LOG_I( "mime : video/x-wmv\n"); if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { @@ -560,8 +628,9 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, /* initialize frame manager */ - if (strcmp(mime_type, "video/x-wmv") == 0 || strcmp(mime_type, - "video/mpeg") == 0 || strcmp(mime_type, "video/x-divx") == 0) { + if (mix_strcmp(mime_type, "video/x-wmv") == 0 || mix_strcmp(mime_type, + "video/mpeg") == 0 || mix_strcmp(mime_type, "video/x-divx") == 0 + || mix_strcmp(mime_type, "video/x-h263") == 0) { ret = mix_framemanager_initialize(priv->frame_manager, frame_order_mode, fps_n, fps_d, FALSE); } else { @@ -591,7 +660,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, /* Finally, we can create MixVideoFormat */ /* What type of MixVideoFormat we need create? */ - if (strcmp(mime_type, "video/x-wmv") == 0 + if (mix_strcmp(mime_type, "video/x-wmv") == 0 && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); @@ -605,7 +674,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, priv->video_format = MIX_VIDEOFORMAT(video_format); - } else if (strcmp(mime_type, "video/x-h264") == 0 + } else if (mix_strcmp(mime_type, "video/x-h264") == 0 && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); @@ -619,13 +688,13 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, priv->video_format = MIX_VIDEOFORMAT(video_format); - } else if (strcmp(mime_type, "video/mpeg") == 0 || strcmp(mime_type, - "video/x-divx") == 0) { + } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || mix_strcmp(mime_type, + "video/x-divx") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 ) { guint version = 0; /* Is this mpeg4:2 ? */ - if (strcmp(mime_type, "video/mpeg") == 0) { + if (mix_strcmp(mime_type, "video/mpeg") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 ) { /* * we don't support mpeg other than mpeg verion 4 @@ -857,7 +926,7 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, goto cleanup; } - /* TODO: work specific to h264 encode */ + /* work specific to h264 encode */ priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); @@ -872,12 +941,29 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, goto cleanup; } - /* TODO: work specific to mpeg4 */ + /* work specific to mpeg4 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 + && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { + + MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); + goto cleanup; + } + + /* work specific to h.263 */ priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); @@ -1099,6 +1185,15 @@ MIX_RESULT mix_video_release_frame_default(MixVideo * mix, } +#ifdef ANDROID + +MIX_RESULT mix_video_render_default(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + + return MIX_RESULT_NOTIMPL; +} + +#else MIX_RESULT mix_video_render_default(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame) { @@ -1111,8 +1206,8 @@ MIX_RESULT mix_video_render_default(MixVideo * mix, MixDisplayX11 *mix_display_x11 = NULL; Display *display = NULL; - Drawable drawable = 0; + Drawable drawable = 0; MixRect src_rect, dst_rect; VARectangle *va_cliprects = NULL; @@ -1267,170 +1362,7 @@ MIX_RESULT mix_video_render_default(MixVideo * mix, return ret; } - -#ifdef ANDROID -MIX_RESULT mix_video_get_decoded_data_default(MixVideo * mix, MixIOVec * iovout, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, "Begin\n"); - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - MixDisplay *mix_display = NULL; - MixDisplayX11 *mix_display_x11 = NULL; - - Display *display = NULL; - Drawable drawable = 0; - - MixRect src_rect, dst_rect; - - VARectangle *va_cliprects = NULL; - guint number_of_cliprects = 0; - - /* VASurfaceID va_surface_id; */ - gulong va_surface_id; - VAStatus va_status; - - if (!mix || !render_params) { - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEO(mix)) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not MixVideo\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* Is this render param valid? */ - if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, - "Not MixVideoRenderParams\n"); - return MIX_RESULT_INVALID_PARAM; - } - - priv = MIX_VIDEO_PRIVATE(mix); - - if (!priv->initialized) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not initialized\n"); - return MIX_RESULT_NOT_INIT; - } - - if (!priv->configured) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not configured\n"); - return MIX_RESULT_NOT_CONFIGURED; - } - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - /* get MixDisplay prop from render param */ - ret = mix_videorenderparams_get_display(render_params, &mix_display); - if (ret != MIX_RESULT_SUCCESS) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, - "Failed to get mix_display\n"); - goto cleanup; - } - - /* Is this MixDisplayX11 ? */ - /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ - if (!MIX_IS_DISPLAYX11(mix_display)) { - ret = MIX_RESULT_INVALID_PARAM; - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Not MixDisplayX11\n"); - goto cleanup; - } - - /* cast MixDisplay to MixDisplayX11 */ - mix_display_x11 = MIX_DISPLAYX11(mix_display); - - /* Get Drawable */ - ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); - if (ret != MIX_RESULT_SUCCESS) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to get drawable\n"); - goto cleanup; - } - - /* Get Display */ - ret = mix_displayx11_get_display(mix_display_x11, &display); - if (ret != MIX_RESULT_SUCCESS) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to get display\n"); - goto cleanup; - } - - /* get src_rect */ - ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); - if (ret != MIX_RESULT_SUCCESS) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, - "Failed to get SOURCE src_rect\n"); - goto cleanup; - } - - /* get dst_rect */ - ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); - if (ret != MIX_RESULT_SUCCESS) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to get dst_rect\n"); - goto cleanup; - } - /* get va_cliprects */ - ret = mix_videorenderparams_get_cliprects_internal(render_params, - &va_cliprects, &number_of_cliprects); - if (ret != MIX_RESULT_SUCCESS) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, - "Failed to get va_cliprects\n"); - goto cleanup; - } - - /* get surface id from frame */ - ret = mix_videoframe_get_frame_id(frame, &va_surface_id); - if (ret != MIX_RESULT_SUCCESS) { - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, - "Failed to get va_surface_id\n"); - goto cleanup; - } - guint64 timestamp = 0; - mix_videoframe_get_timestamp(frame, ×tamp); - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, "Displaying surface ID %d, timestamp %"G_GINT64_FORMAT"\n", (int)va_surface_id, timestamp); - - guint32 frame_structure = 0; - mix_videoframe_get_frame_structure(frame, &frame_structure); - /* TODO: the last param of vaPutSurface is de-interlacing flags, - what is value shall be*/ - va_status = vaPutSurfaceBuf(priv->va_display, (VASurfaceID) va_surface_id, - drawable, iovout->data, &iovout->data_size, src_rect.x, src_rect.y, src_rect.width, src_rect.height, - dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, - va_cliprects, number_of_cliprects, frame_structure); - - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, - "Failed vaPutSurface() : va_status = %d\n", va_status); - goto cleanup; - } - - ret = MIX_RESULT_SUCCESS; - -cleanup: - - MIXUNREF(mix_display, mix_display_unref) - /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ - - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); - - mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, "End\n"); - - return ret; -} -#endif - -MIX_RESULT mix_video_get_decoded_data(MixVideo * mix, MixIOVec * iovout, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix); - - if (klass->get_decoded_data_func) { - return klass->get_decoded_data_func(mix, iovout, render_params, frame); - } - return MIX_RESULT_NOTIMPL; -} +#endif /* ANDROID */ MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, @@ -1508,8 +1440,12 @@ MIX_RESULT mix_video_eos_default(MixVideo * mix) { if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { ret = mix_videofmt_eos(priv->video_format); - /* frame manager will set EOS flag to be TRUE */ - ret = mix_framemanager_eos(priv->frame_manager); + /* We should not call mix_framemanager_eos() here. + * MixVideoFormat* is responsible to call this function. + * Commnet the function call here! + */ + /* frame manager will set EOS flag to be TRUE */ + /* ret = mix_framemanager_eos(priv->frame_manager); */ } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && priv->video_format_enc != NULL) { /*No framemanager now*/ diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h index 91e9011..1184d05 100644 --- a/mix_video/src/mixvideo.h +++ b/mix_video/src/mixvideo.h @@ -11,7 +11,7 @@ #include -#include "mixdrmparams.h" +#include #include "mixvideoinitparams.h" #include "mixvideoconfigparamsdec.h" #include "mixvideoconfigparamsenc.h" @@ -65,10 +65,6 @@ typedef MIX_RESULT (*MixVideoReleaseFrameFunc)(MixVideo * mix, typedef MIX_RESULT (*MixVideoRenderFunc)(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame); -typedef MIX_RESULT (*MixVideoGetDecodedDataFunc)(MixVideo * mix, - MixIOVec * iovout, MixVideoRenderParams * render_params, - MixVideoFrame *frame); - typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); @@ -79,8 +75,7 @@ typedef MIX_RESULT (*MixVideoEOSFunc)(MixVideo * mix); typedef MIX_RESULT (*MixVideoGetStateFunc)(MixVideo * mix, MixState * state); -typedef MIX_RESULT -(*MixVideoGetMixBufferFunc)(MixVideo * mix, MixBuffer ** buf); +typedef MIX_RESULT (*MixVideoGetMixBufferFunc)(MixVideo * mix, MixBuffer ** buf); typedef MIX_RESULT (*MixVideoReleaseMixBufferFunc)(MixVideo * mix, MixBuffer * buf); @@ -91,9 +86,6 @@ typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix, /** * MixVideo: * @parent: Parent object. - * @streamState: Current state of the stream - * @decodeMode: Current decode mode of the device. This value is valid only when @codingMode equals #MIX_CODING_ENCODE. - * @encoding: TBD... * * MI-X Video object */ @@ -101,8 +93,6 @@ struct _MixVideo { /*< public > */ GObject parent; - /*< public > */ - /*< private > */ gpointer context; }; @@ -116,8 +106,7 @@ struct _MixVideoClass { /*< public > */ GObjectClass parent_class; - /* class members */ - + /*< virtual public >*/ MixVideoGetVersionFunc get_version_func; MixVideoInitializeFunc initialize_func; MixVideoDeinitializeFunc deinitialize_func; @@ -127,7 +116,6 @@ struct _MixVideoClass { MixVideoGetFrameFunc get_frame_func; MixVideoReleaseFrameFunc release_frame_func; MixVideoRenderFunc render_func; - MixVideoGetDecodedDataFunc get_decoded_data_func; MixVideoEncodeFunc encode_func; MixVideoFlushFunc flush_func; MixVideoEOSFunc eos_func; @@ -172,45 +160,386 @@ MixVideo *mix_video_ref(MixVideo * mix); /* Class Methods */ +/** + * mix_video_get_version: + * @mix: #MixVideo object. + * @major: Pointer to an unsigned integer indicating the major version number of this MI-X Video library + * @minor: Pointer to an unsigned integer indicating the minor version number of this MI-X Video library + * @returns: Common Video Error Return Codes + * + * This function will return the major and minor version numbers of the library. + */ MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor); + + +/** + * mix_video_initialize: + * @mix: #MixVideo object. + * @mode: Enum value to indicate encode or decode mode + * @init_params: MixVideoInitParams object which includes display type and pointer to display, encode or decode mode + * @drm_init_params: MixDrmParams defined in Moorestown MI-X DRM API. + * This can be null if content is not protected. + * @returns: In addition to the Common Video Error Return Codes, + * the following error codes may be returned. + * + * MIX_RESULT_ALREADY_INIT, mix_video_initialize() has already been called. + * + * + * This function will return the major and minor version numbers of the library. + */ MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, MixVideoInitParams * init_params, MixDrmParams * drm_init_params); +/** + * mix_video_deinitialize: + * @mix: #MixVideo object. + * @returns: Common Video Error Return Codes + * + * This function will un-initialize a session with this MI-X instance. During this call, the + * LibVA session is closed and all resources including surface buffers, #MixBuffers and + * #MixVideoFrame objects are freed. This function is called by the application once + * mix_video_initialize() is called, before exiting. + */ MIX_RESULT mix_video_deinitialize(MixVideo * mix); + +/** + * mix_video_configure: + * @mix: #MixVideo object. + * @config_params: Pointer to #MixVideoConfigParams object (either #MixVideoConfigParamsDec or + * #MixVideoConfigParamsEnc for specific media type) + * @drm_config_params: Pointer to #MixDrmParams defined in Moorestown MI-X DRM API. + * This can be null if content is not protected. + * @returns: In addition to the Common Video Error Return Codes, + * the following error codes may be returned. + * + * MIX_RESULT_RESOURCES_NOTAVAIL, HW accelerated decoding is not available. + * MIX_RESULT_NOTSUPPORTED, A requested parameter is not supported or not available. + * + * + * This function can be used to configure a stream for the current session. + * The caller can use this function to do the following: + * + * Choose frame ordering mode (display order or decode order) + * Choose encode or decode mode + * Choose whether display frames are enqueued for encode mode + * Provide stream parameters + * + + * This function can only be called after mix_video_initialize() has been called + */ MIX_RESULT mix_video_configure(MixVideo * mix, MixVideoConfigParams * config_params, MixDrmParams * drm_config_params); + +/** + * mix_video_get_config: + * @mix: #MixVideo object. + * @config_params: Pointer to pointer to #MixVideoConfigParams object defined in + * description of mix_video_configure() + * @returns: Common Video Error Return Codes + * + * This function can be used to get the current configuration of a stream for the current session. + * A #MixVideoConfigParams object will be returned, which can be used to get each of the + * parameter current values. The caller will need to release this object when it is no + * longer needed. + * + * This function can only be called once mix_video_configure() has been called. + * + * See description of mix_video_configure() for #MixVideoConfigParams object details. + * For mix_video_get_config(), all input parameter fields become OUT parameters. + * + */ MIX_RESULT mix_video_get_config(MixVideo * mix, MixVideoConfigParams ** config_params); +/** + * mix_video_decode: + * @mix: #MixVideo object. + * @bufin: Array of pointers to #MixBuffer objects, described in mix_video_get_mixbuffer() * + * @bufincnt: Number of #MixBuffer objects + * @decode_params: #MixVideoDecodeParams object + * @returns: In addition to the Common Video Error Return Codes, + * the following error codes may be returned. + * + * + * MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done. + * Caller can try again with the same MixBuffers later when surfaces may have been freed. + * + * + * + * + * This function is used to initiate HW accelerated decoding of encoded data buffers. This + * function is used to decode to a surface buffer, which can then be rendered using + * mix_video_render(). + * Video data input buffers are provided in a scatter/gather list of reference counted + * #MixBuffers. The input #MixBuffers are retained until a full frame of coded data is + * accumulated, at which point it will be decoded and the input buffers released. The + * decoded data will be stored in a surface buffer until it is rendered. The caller must + * provide the presentation timestamp and any stream discontinuity for the video frame + * for the encoded data, in the #MixVideoDecodeParams object. These will be preserved + * and provided for the #MixVideoFrame object that contains the decoded data for this + * frame data. + * + * + * + * As only one timestamp is passed in for the buffer, there should be no more than one + * video frame included in the encoded data buffer provided in a single call to + * mix_video_decode(). If partial frame data is passed in over multiple calls to + * mix_video_decode(), the same timestamp should be provided with each call having + * data associated with the same frame. + * + * + * + * The application should request a #MixBuffer object using mix_video_get_mixbuffer(), + * initialize the #MixBuffer with the data pointer to the coded input data, along with the + * size of the input data buffer, and optionally can provide a token value and a callback + * function pointer. When the MixBuffer is released by both the application and #MixVideo, + * the callback will be called and passed the token value and the input data buffer + * pointer for any buffer management processing that the application needs or wants to + * perform (such as releasing the actual coded data buffer that was assigned to that + * #MixBuffer). MixBuffers are allocated in a pool, and the application determines the size + * of this pool, which is passed to mix_video_configure() in #the MixVideoConfigParams object. + * + */ MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixVideoDecodeParams * decode_params); + +/** + * mix_video_get_frame: + * @mix: #MixVideo object. + * @frame: A pointer to a pointer to a #MixVideoFrame object + * @returns: In addition to the Common Video Error Return Codes, + * the following error codes may be returned. + * + * + * MIX_RESULT_FRAME_NOTAVAIL, No decoded frames are available. + * + * + * MIX_RESULT_EOS, No more decoded frames are available, + * since end of stream has been encountered. + * + * + * + * + * This function returns a frame object that represents the next frame ID and includes + * timestamp and discontinuity information. If display frame ordering has been + * configured, it is the next frame displayed. If decode order frame ordering has been + * configured, it is the next frame decoded. In both cases the timestamp reflects the + * presentation timestamp. For encode mode the frame order is always display order. + * + * + * + * The frame object is a reference counted object that represents the frame. The + * application can retain this frame object as long as needed to display the frame and + * redisplay as needed. At presentation time, the application can call mix_video_render() + * with this frame object to display the frame immediately. When the application no + * longer needs to display this frame, it should release the object by calling + * mix_video_release_frame(). The application should not modify the reference count or + * delete this object directly. + * + */ MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame); + + +/** + * mix_video_release_frame: + * @mix: #MixVideo object. + * @frame: A pointer to a #MixVideoFrame object, described in mix_video_get_frame() + * @returns: Common Video Error Return Codes + * + * This function releases a frame object that was acquired from mix_video_get_frame(). + */ MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame); + +/** + * mix_video_render: + * @mix: #MixVideo object. + * @render_params: #MixVideoRenderParams object defined below, + * which includes the display window and type, + * src and dest image sizes, deinterlace info, clipping rectangles, + * some post processing parameters, and so forth. + * @frame: Pointer to a #MixVideoFrame object returned from mix_video_get_frame(). + * @returns: Common Video Error Return Codes + * + * This function renders a video frame associated with a MixVideoFrame object to the display. + * The display is either an X11 Pixmap or an X11 Window using the overlay. + */ MIX_RESULT mix_video_render(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame); -MIX_RESULT mix_video_get_decoded_data(MixVideo * mix, MixIOVec * iovout, - MixVideoRenderParams * render_params, MixVideoFrame *frame); +/** + * mix_video_encode: + * @mix: #MixVideo object. + * @bufin: Array of pointers to #MixBuffer objects, structure defined in mix_video_decode() + * @bufincnt: Number of #MixBuffer objects + * @iovout: Array of #MixIOVec structures, pointing to buffers allocated by the application + * @iovoutcnt: Number of items in iovout array + * @encode_params: #MixVideoEncodeParams object + * @returns: Common Video Error Return Codes + * + * + * This function is used to initiate HW accelerated encoding of uncompressed video input + * buffers. The input buffers may either be uncompressed video in user space buffers, or + * CI frame indexes from libCI captured frames. In order to use CI frame indexes, the + * shared buffer mode should be indicated in the #MixVideoConfigParamsEnc object + * provided to mix_video_configure(). + * + * + * + * Video uncompressed data input buffers are provided in a scatter/gather list of + * reference counted MixBuffers. The input #MixBuffers are considered a complete frame + * of data, and are used for encoding before the input buffers are released. LibCI frame + * indices may also be provided in MixBuffers. + * + * + * + * The encoded data will be copied to the output buffers provided in the array of + * #MixIOVec structures, also in a scatter/gather list. These output buffers are allocated + * by the application. The application can query for the proper size of buffer to allocate + * for this, using mix_video_get_max_coded_buffer_size(). It is suggested that the + * application create a pool of these buffers to pass in, for efficiency. The application will + * also set the buffer_size field in the #MixIOVec structures to the allocated buffer size. + * When the buffers are filled with encoded data by #MixVideo, the data_size will be set to + * the encoded data size placed in the buffer. For any buffer not used for encoded data, + * the data_size will be set to zero. + * + * + * + * Alternatively, if the application does not allocate the output buffers, the data pointers + * in the #MixIOVec structures (still provided by the application) can be set to NULL, + * whereupon #MixVideo will allocate a data buffer for each frame and set the data, + * buffer_size and data_size pointers in the #MixIOVec structures accordingly. + * + * + * + * This is not an efficient method to handle these buffers and it is preferred that + * the application provide pre-allocated buffers. + * + * + * + * The application should request a #MixBuffer object using mix_video_get_mixbuffer(), + * initialize the #MixBuffer with the data pointer to the uncompressed input data or a LibCI + * frame index, along with the size of the input data buffer, and optionally can provide a + * token value and a callback function pointer. When the #MixBuffer is released by both + * the application and #MixVideo, the callback will be called and passed the token value + * and the input data buffer pointer for any buffer management processing that the + * application needs or wants to perform (such as releasing the actual data buffer that + * was assigned to that #MixBuffer). #MixBuffers are allocated in a pool, and the application + * determines the size of this pool, which is passed to mix_video_configure() in the + * #MixVideoConfigParams object. + * + * + * + * The application can choose to enable or disable display of the uncompressed video + * frames using the need_display of the #MixVideoConfigParamsEnc object in + * mix_video_configure(). If display is enabled, #MixVideoFrames are enqueued by + * #MixVideo, to be requested by the application with mix_video_get_frame() and used to + * provide to mix_video_render() for rendering before releasing with + * mix_video_release_frame(). If display is disabled, no #MixVideoFrames will be + * enqueued. + * + * + */ MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); +/** + * mix_video_flush: + * @mix: #MixVideo object. + * @returns: Common Video Error Return Codes + * + * This function will flush all encoded and decoded buffers that are currently enqueued or + * in the process of decoding. After this call, decoding can commence again, but would + * need to start at the beginning of a sequence (for example, with no dependencies on + * previously decoded reference frames). + */ MIX_RESULT mix_video_flush(MixVideo * mix); +/** + * mix_video_eos: + * @mix: #MixVideo object. + * @returns: Common Video Error Return Codes + * + * This function will signal end of stream to #MixVideo. This can be used to finalize + * decoding of the last frame and other end of stream processing. #MixVideo will complete + * the decoding of all buffers received, and will continue to provide the decoded frame + * objects by means of the mix_video_get_frame() until all frames have been provided, + * at which point mix_video_get_frame() will return MIX_RESULT_EOS. + */ MIX_RESULT mix_video_eos(MixVideo * mix); + +/** + * mix_video_get_state: + * @mix: #MixVideo object. + * @state: Current state of MI-X session. + * @returns: Common Video Error Return Codes + * + * This function returns the current state of the MI-X session. + */ MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state); +/** + * mix_video_get_mixbuffer: + * @mix: #MixVideo object. + * @buf: A pointer to a pointer to a #MixBuffer object + * @returns: Common Video Error Return Codes + * + * + * This function returns a frame object that represents the next frame ID and includes + * timestamp and discontinuity information. If display frame ordering has been + * configured, it is the next frame displayed. If decode order frame ordering has been + * configured, it is the next frame decoded. In both cases the timestamp reflects the + * presentation timestamp. + * + * + * + * The frame object is a reference counted object that represents the frame. The + * application can retain this frame object as long as needed to display the frame and + * redisplay as needed. At presentation time, the application can call mix_video_render() + * with this frame object to display the frame immediately. When the application no + * longer needs to display this frame, it should release the object by calling + * mix_video_release_frame(). The application should not modify the reference count or + * delete this object directly. + * + * + */ MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf); + +/** + * mix_video_release_mixbuffer: + * @mix: #MixVideo object. + * @buf: A pointer to a #MixBuffer object, described in mix_video_get_mixbuffer(). + * @returns: Common Video Error Return Codes + * + * This function releases a frame object that was acquired from mix_video_get_mixbuffer(). + */ MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); + +/** + * mix_video_get_max_coded_buffer_size: + * @mix: #MixVideo object. + * @bufsize: Pointer to guint. + * @returns: Common Video Error Return Codes + * + * + * This function can be used to get the maximum size of encoded data buffer needed for + * the mix_video_encode() call. + * + * + * This function can only be called once mix_video_configure() has been called. + * + */ +MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize); + #endif /* __MIX_VIDEO_H__ */ diff --git a/mix_video/src/mixvideocaps.c b/mix_video/src/mixvideocaps.c index 2c98ab3..7e378a9 100644 --- a/mix_video/src/mixvideocaps.c +++ b/mix_video/src/mixvideocaps.c @@ -15,6 +15,13 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideocaps.h" +#ifdef ANDROID +#define mix_strcmp strcmp +#else +#define mix_strcmp g_strcmp0 +#endif + + #define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } static GType _mix_videocaps_type = 0; @@ -179,8 +186,8 @@ mix_videocaps_equal (MixParams * first, MixParams * second) this_second = MIX_VIDEOCAPS (second); /* TODO: add comparison for other properties */ - if (strcmp (this_first->mix_caps, this_second->mix_caps) == 0 - && strcmp (this_first->video_hw_caps, + if (mix_strcmp (this_first->mix_caps, this_second->mix_caps) == 0 + && mix_strcmp (this_first->video_hw_caps, this_second->video_hw_caps) == 0) { // members within this scope equal. chaining up. @@ -236,7 +243,6 @@ MIX_RESULT mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, gchar * video_hw_caps) { MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); - SAFE_FREE (obj->video_hw_caps); obj->video_hw_caps = g_strdup (video_hw_caps); if (video_hw_caps != NULL && obj->video_hw_caps == NULL) diff --git a/mix_video/src/mixvideoconfigparams.c b/mix_video/src/mixvideoconfigparams.c index c355a4c..6ec641b 100644 --- a/mix_video/src/mixvideoconfigparams.c +++ b/mix_video/src/mixvideoconfigparams.c @@ -8,9 +8,28 @@ /** * SECTION:mixvideoconfigparams - * @short_description: VideoConfig parameters + * @short_description: MI-X Video Configuration Parameter Base Object + * @include: mixvideoconfigparams.h * - * A data object which stores videoconfig specific parameters. + * + * A base object of MI-X video configuration parameter objects. + * + * + * The derived MixVideoConfigParams object is created by the MMF/App + * and provided in the MixVideo mix_video_configure() function. The get and set + * methods for the properties will be available for the caller to set and get information at + * configuration time. It will also be created by MixVideo and returned from the + * mix_video_get_config() function, whereupon the MMF/App can get the get methods to + * obtain current configuration information. + * + * + * There are decode mode objects (for example, MixVideoConfigParamsDec) and encode + * mode objects (for example, MixVideoConfigParamsEnc). Each of these types is refined + * further with media specific objects. The application should create the correct type of + * object to match the media format of the stream to be handled, e.g. if the media + * format of the stream to be decoded is H.264, the application would create a + * MixVideoConfigParamsDecH264 object for the mix_video_configure() call. + * */ #include diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h index b2bac41..acfa595 100644 --- a/mix_video/src/mixvideoconfigparams.h +++ b/mix_video/src/mixvideoconfigparams.h @@ -67,7 +67,7 @@ struct _MixVideoConfigParams { /*< public > */ MixParams parent; - /*< public > */ + /*< private > */ void *reserved1; void *reserved2; @@ -102,10 +102,11 @@ GType mix_videoconfigparams_get_type(void); * Use this method to create new instance of #MixVideoConfigParams */ MixVideoConfigParams *mix_videoconfigparams_new(void); + /** * mix_videoconfigparams_ref: * @mix: object to add reference - * @returns: the MixVideoConfigParams instance where reference count has been increased. + * @returns: the #MixVideoConfigParams instance where reference count has been increased. * * Add reference count. */ diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c index 751b124..5491f00 100644 --- a/mix_video/src/mixvideoconfigparamsdec.c +++ b/mix_video/src/mixvideoconfigparamsdec.c @@ -7,10 +7,10 @@ */ /** - * SECTION:mixvideoconfigparams - * @short_description: VideoConfig parameters + * SECTION:mixvideoconfigparamsdec + * @short_description: MI-X Video Decode Configuration Parameter Base Object * - * A data object which stores videoconfig specific parameters. + * A base object of MI-X video decode configuration parameter objects. */ #include diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h index 6aa9047..fba4b78 100644 --- a/mix_video/src/mixvideoconfigparamsdec.h +++ b/mix_video/src/mixvideoconfigparamsdec.h @@ -68,24 +68,52 @@ struct _MixVideoConfigParamsDec { MixVideoConfigParams parent; /*< public > */ + + /* Frame re-ordering mode */ MixFrameOrderMode frame_order_mode; + + /* Stream header information, such as + * codec_data in GStreamer pipelines */ MixIOVec header; - /* the type of the following members will be changed after MIX API doc is ready */ + /* Mime type */ GString * mime_type; + + /* Frame rate numerator value */ guint frame_rate_num; + + /* Frame rate denominator value */ guint frame_rate_denom; + + /* Picture width */ gulong picture_width; + + /* Picture height */ gulong picture_height; + + /* Render target format */ guint raw_format; + + /* Rate control: CBR, VBR, none. Only valid for encoding. + * This should be set to none for decoding. */ guint rate_control; + /* Size of pool of MixBuffers to allocate */ guint mixbuffer_pool_size; + + /* Extra surfaces for MixVideoFrame objects to be allocated */ guint extra_surface_allocation; + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -119,7 +147,7 @@ MixVideoConfigParamsDec *mix_videoconfigparamsdec_new(void); /** * mix_videoconfigparamsdec_ref: * @mix: object to add reference - * @returns: the MixVideoConfigParamsDec instance where reference count has been increased. + * @returns: the #MixVideoConfigParamsDec instance where reference count has been increased. * * Add reference count. */ @@ -135,58 +163,213 @@ MixVideoConfigParamsDec *mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * /* Class Methods */ + +/** + * mix_videoconfigparamsdec_set_frame_order_mode: + * @obj: #MixVideoConfigParamsDec object + * @frame_order_mode: Frame re-ordering mode + * @returns: Common Video Error Return Codes + * + * Set frame order mode. + */ MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode); +/** + * mix_videoconfigparamsdec_get_frame_order_mode: + * @obj: #MixVideoConfigParamsDec object + * @frame_order_mode: pointer to frame re-ordering mode + * @returns: Common Video Error Return Codes + * + * Get frame order mode. + */ MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode); +/** + * mix_videoconfigparamsdec_set_header: + * @obj: #MixVideoConfigParamsDec object + * @header: Stream header information, such as codec_data in GStreamer pipelines + * @returns: Common Video Error Return Codes + * + * Set stream header information. + */ MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj, MixIOVec *header); -/* caller is responsible to g_free MixIOVec::data field */ +/** + * mix_videoconfigparamsdec_get_header: + * @obj: #MixVideoConfigParamsDec object + * @header: Pointer to pointer of Stream header information + * @returns: Common Video Error Return Codes + * + * Get stream header information. + * + * Caller is responsible to g_free (*header)->data field and *header + * + */ MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj, MixIOVec ** header); +/** + * mix_videoconfigparamsdec_set_mime_type: + * @obj: #MixVideoConfigParamsDec object + * @mime_type: mime type + * @returns: Common Video Error Return Codes + * + * Set stream mime type + */ MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj, const gchar * mime_type); +/** + * mix_videoconfigparamsdec_get_mime_type: + * @obj: #MixVideoConfigParamsDec object + * @mime_type: Pointer to pointer of type gchar + * @returns: Common Video Error Return Codes + * + * Get mime type + * + * Caller is responsible to g_free *mime_type + * + */ MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj, gchar ** mime_type); +/** + * mix_videoconfigparamsdec_set_frame_rate: + * @obj: #MixVideoConfigParamsDec object + * @frame_rate_num: Frame rate numerator value + * @frame_rate_denom: Frame rate denominator value * + * @returns: Common Video Error Return Codes + * + * Set frame rate + */ MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj, guint frame_rate_num, guint frame_rate_denom); +/** + * mix_videoconfigparamsdec_get_frame_rate: + * @obj: #MixVideoConfigParamsDec object + * @frame_rate_num: Frame rate numerator value to be returned + * @frame_rate_denom: Frame rate denominator value to be returned + * @returns: Common Video Error Return Codes + * + * Get frame rate + */ MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj, guint * frame_rate_num, guint * frame_rate_denom); +/** + * mix_videoconfigparamsdec_set_picture_res: + * @obj: #MixVideoConfigParamsDec object + * @picture_width: Picture width + * @picture_height: Picture height + * @returns: Common Video Error Return Codes + * + * Set video resolution + */ MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj, guint picture_width, guint picture_height); +/** + * mix_videoconfigparamsdec_get_picture_res: + * @obj: #MixVideoConfigParamsDec object + * @picture_width: Picture width to be returned + * @picture_height: Picture height to be returned + * @returns: Common Video Error Return Codes + * + * Get video resolution + */ MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj, guint * picture_width, guint * picture_height); +/** + * mix_videoconfigparamsdec_set_raw_format: + * @obj: #MixVideoConfigParamsDec object + * @raw_format: Render target format + * @returns: Common Video Error Return Codes + * + * Set Render target format + */ MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj, guint raw_format); +/** + * mix_videoconfigparamsdec_get_raw_format: + * @obj: #MixVideoConfigParamsDec object + * @raw_format: Render target format to be returned + * @returns: Common Video Error Return Codes + * + * Get Render target format + */ MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj, guint *raw_format); +/** + * mix_videoconfigparamsdec_set_rate_control: + * @obj: #MixVideoConfigParamsDec object + * @rate_control: Rate control: CBR, VBR, none. Only valid for encoding. + * This should be set to none for decoding. + * @returns: Common Video Error Return Codes + * + * Set rate control + */ MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj, guint rate_control); +/** + * mix_videoconfigparamsdec_get_rate_control: + * @obj: #MixVideoConfigParamsDec object + * @rate_control: Rate control to be returned + * @returns: Common Video Error Return Codes + * + * Get rate control + */ MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj, guint *rate_control); +/** + * mix_videoconfigparamsdec_set_buffer_pool_size: + * @obj: #MixVideoConfigParamsDec object + * @bufpoolsize: Size of pool of #MixBuffers to allocate + * @returns: Common Video Error Return Codes + * + * Set buffer pool size + */ MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size(MixVideoConfigParamsDec * obj, guint bufpoolsize); +/** + * mix_videoconfigparamsdec_get_buffer_pool_size: + * @obj: #MixVideoConfigParamsDec object + * @bufpoolsize: Size of pool of #MixBuffers to be returned + * @returns: Common Video Error Return Codes + * + * Get buffer pool size + */ MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size(MixVideoConfigParamsDec * obj, guint *bufpoolsize); +/** + * mix_videoconfigparamsdec_set_extra_surface_allocation: + * @obj: #MixVideoConfigParamsDec object + * @extra_surface_allocation: Extra surfaces for #MixVideoFrame objects to be allocated + * @returns: Common Video Error Return Codes + * + * Set extra surface allocation + */ MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(MixVideoConfigParamsDec * obj, guint extra_surface_allocation); +/** + * mix_videoconfigparamsdec_get_extra_surface_allocation: + * @obj: #MixVideoConfigParamsDec object + * @extra_surface_allocation: Extra surfaces for #MixVideoFrame objects to be retuned + * @returns: Common Video Error Return Codes + * + * Get extra surface allocation + */ MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigParamsDec * obj, guint *extra_surface_allocation); diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.c b/mix_video/src/mixvideoconfigparamsdec_h264.c index 271cbf7..8047171 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.c +++ b/mix_video/src/mixvideoconfigparamsdec_h264.c @@ -7,11 +7,11 @@ No license under any patent, copyright, trade secret or other intellectual prope */ /** -* SECTION:mixvideoconfigparamsdec_h264 -* @short_description: VideoConfig parameters -* -* A data object which stores videoconfig specific parameters. -*/ + * SECTION:mixvideoconfigparamsdec_h264 + * @short_description: MI-X Video H.264 Decode Configuration Parameter + * + * MI-X video H.264 decode configuration parameter objects. + */ #include "mixvideoconfigparamsdec_h264.h" diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h index 6d5f97d..f1d6e16 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.h +++ b/mix_video/src/mixvideoconfigparamsdec_h264.h @@ -71,9 +71,17 @@ struct _MixVideoConfigParamsDecH264 /*< public > */ /* TODO: Add H.264 configuration paramters */ + + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -108,7 +116,7 @@ MixVideoConfigParamsDecH264 *mix_videoconfigparamsdec_h264_new (void); /** * mix_videoconfigparamsdec_h264_ref: * @mix: object to add reference -* @returns: the MixVideoConfigParamsDecH264 instance where reference count has been increased. +* @returns: #the MixVideoConfigParamsDecH264 instance where reference count has been increased. * * Add reference count. */ diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.c b/mix_video/src/mixvideoconfigparamsdec_mp42.c index 17329e1..fe7efc6 100644 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.c +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.c @@ -8,11 +8,12 @@ /** * SECTION:mixvideoconfigparamsdec_mp42 - * @short_description: VideoConfig parameters + * @short_description: MI-X Video MPEG 4:2 Decode Configuration Parameter * - * A data object which stores videoconfig specific parameters. + * MI-X video MPEG 4:2 decode configuration parameter objects. */ + #include "mixvideolog.h" #include "mixvideoconfigparamsdec_mp42.h" diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h index 5f68b42..3ac3b8c 100644 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.h +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.h @@ -69,12 +69,22 @@ struct _MixVideoConfigParamsDecMP42 { /*< public > */ + /* MPEG version */ guint mpegversion; + + /* DivX version */ guint divxversion; + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -108,7 +118,7 @@ MixVideoConfigParamsDecMP42 *mix_videoconfigparamsdec_mp42_new(void); /** * mix_videoconfigparamsdec_mp42_ref: * @mix: object to add reference - * @returns: the MixVideoConfigParamsDecMP42 instance where reference count has been increased. + * @returns: the #MixVideoConfigParamsDecMP42 instance where reference count has been increased. * * Add reference count. */ @@ -126,15 +136,49 @@ MixVideoConfigParamsDecMP42 /* Class Methods */ /* TODO: Add getters and setters for other properties */ + + +/** + * mix_videoconfigparamsdec_mp42_set_mpegversion: + * @obj: #MixVideoConfigParamsDecMP42 object + * @version: MPEG version + * @returns: Common Video Error Return Codes + * + * Set MPEG version + */ MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( MixVideoConfigParamsDecMP42 *obj, guint version); +/** + * mix_videoconfigparamsdec_mp42_get_mpegversion: + * @obj: #MixVideoConfigParamsDecMP42 object + * @version: MPEG version to be returned + * @returns: Common Video Error Return Codes + * + * Get MPEG version + */ MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( MixVideoConfigParamsDecMP42 *obj, guint *version); +/** + * mix_videoconfigparamsdec_mp42_set_divxversion: + * @obj: #MixVideoConfigParamsDecMP42 object + * @version: DivX version + * @returns: Common Video Error Return Codes + * + * Set DivX version + */ MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( MixVideoConfigParamsDecMP42 *obj, guint version); +/** + * mix_videoconfigparamsdec_mp42_set_divxversion: + * @obj: #MixVideoConfigParamsDecMP42 object + * @version: DivX version to be returned + * @returns: Common Video Error Return Codes + * + * Get DivX version + */ MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( MixVideoConfigParamsDecMP42 *obj, guint *version); diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.c b/mix_video/src/mixvideoconfigparamsdec_vc1.c index fdce4f3..635487c 100644 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.c +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.c @@ -8,11 +8,12 @@ /** * SECTION:mixvideoconfigparamsdec_vc1 - * @short_description: VideoConfig parameters + * @short_description: MI-X Video VC-1 Decode Configuration Parameter * - * A data object which stores videoconfig specific parameters. + * MI-X video VC-1 decode configuration parameter objects. */ + #include "mixvideoconfigparamsdec_vc1.h" static GType _mix_videoconfigparamsdec_vc1_type = 0; diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h index ecf90e5..9d0744b 100644 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.h +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.h @@ -72,12 +72,23 @@ struct _MixVideoConfigParamsDecVC1 /* TODO: Add VC1 configuration paramters */ /* TODO: wmv_version and fourcc type might be changed later */ + + /* WMV version */ guint wmv_version; + + /* FourCC code */ guint fourcc; + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -112,7 +123,7 @@ MixVideoConfigParamsDecVC1 *mix_videoconfigparamsdec_vc1_new (void); /** * mix_videoconfigparamsdec_vc1_ref: * @mix: object to add reference -* @returns: the MixVideoConfigParamsDecVC1 instance where reference count has been increased. +* @returns: the #MixVideoConfigParamsDecVC1 instance where reference count has been increased. * * Add reference count. */ diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c index 44a31ce..28cd288 100644 --- a/mix_video/src/mixvideoconfigparamsenc.c +++ b/mix_video/src/mixvideoconfigparamsenc.c @@ -8,11 +8,12 @@ /** * SECTION:mixvideoconfigparamsenc - * @short_description: VideoConfig parameters + * @short_description: MI-X Video Encode Configuration Parameter Base Object * - * A data object which stores videoconfig specific parameters. + * A base object of MI-X video encode configuration parameter objects. */ + #include #include "mixvideolog.h" #include "mixvideoconfigparamsenc.h" diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h index 6a1dfff..be7ec9f 100644 --- a/mix_video/src/mixvideoconfigparamsenc.h +++ b/mix_video/src/mixvideoconfigparamsenc.h @@ -72,35 +72,76 @@ struct _MixVideoConfigParamsEnc { /* the type of the following members will be changed after MIX API doc is ready */ - MixProfile profile; - MixRawTargetFormat raw_format; - MixRateControl rate_control; + /* Encoding profile */ + MixProfile profile; + /* Raw format to be encoded */ + MixRawTargetFormat raw_format; + + /* Rate control mode */ + MixRateControl rate_control; + + /* Bitrate when rate control is used */ guint bitrate; + + /* Numerator of frame rate */ guint frame_rate_num; + + /* Denominator of frame rate */ guint frame_rate_denom; + + /* The initial QP value */ guint initial_qp; + + /* The minimum QP value */ guint min_qp; + + /* Number of frames between key frames (GOP size) */ guint intra_period; + + /* Width of video frame */ guint16 picture_width; + + /* Height of the video frame */ guint16 picture_height; + /* Mime type, reserved */ GString * mime_type; + + /* Encode target format */ MixEncodeTargetFormat encode_format; + /* Size of the pool of MixBuffer objects */ guint mixbuffer_pool_size; + /* Are buffers shared between capture and encoding drivers */ gboolean share_buf_mode; + /* Array of frame IDs created by capture library */ gulong * ci_frame_id; + + /* Size of the array ci_frame_id */ guint ci_frame_num; + /* < private > */ gulong draw; + + /*< public > */ + + /* Indicates whether MixVideoFrames suitable for displaying + * need to be enqueued for retrieval using mix_video_get_frame() */ gboolean need_display; + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -134,7 +175,7 @@ MixVideoConfigParamsEnc *mix_videoconfigparamsenc_new(void); /** * mix_videoconfigparamsenc_ref: * @mix: object to add reference - * @returns: the MixVideoConfigParamsEnc instance where reference count has been increased. + * @returns: the #MixVideoConfigParamsEnc instance where reference count has been increased. * * Add reference count. */ @@ -150,101 +191,377 @@ MixVideoConfigParamsEnc *mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * /* Class Methods */ - +/** + * mix_videoconfigparamsenc_set_mime_type: + * @obj: #MixVideoConfigParamsEnc object + * @mime_type: Mime type + * @returns: Common Video Error Return Codes + * + * Set mime type + */ MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, const gchar * mime_type); +/** + * mix_videoconfigparamsenc_get_mime_type: + * @obj: #MixVideoConfigParamsEnc object + * @mime_type: Mime type to be returned + * @returns: Common Video Error Return Codes + * + * Get mime type + * + * + * Caller is responsible to g_free *mime_type + * + */ MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, gchar ** mime_type); + +/** + * mix_videoconfigparamsenc_set_frame_rate: + * @obj: #MixVideoConfigParamsEnc object + * @frame_rate_num: Numerator of frame rate + * @frame_rate_denom: Denominator of frame rate + * @returns: Common Video Error Return Codes + * + * Set frame rate + */ MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, guint frame_rate_num, guint frame_rate_denom); +/** + * mix_videoconfigparamsenc_get_frame_rate: + * @obj: #MixVideoConfigParamsEnc object + * @frame_rate_num: Numerator of frame rate to be returned + * @frame_rate_denom: Denominator of frame rate to be returned + * @returns: Common Video Error Return Codes + * + * Get frame rate + */ MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, guint * frame_rate_num, guint * frame_rate_denom); +/** + * mix_videoconfigparamsenc_set_picture_res: + * @obj: #MixVideoConfigParamsEnc object + * @picture_width: Width of video frame + * @picture_height: Height of the video frame + * @returns: Common Video Error Return Codes + * + * Set width and height of video frame + */ MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, guint picture_width, guint picture_height); +/** + * mix_videoconfigparamsenc_get_picture_res: + * @obj: #MixVideoConfigParamsEnc object + * @picture_width: Width of video frame to be returned + * @picture_height: Height of the video frame to be returned + * @returns: Common Video Error Return Codes + * + * Get width and height of video frame + */ MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, guint * picture_width, guint * picture_height); +/** + * mix_videoconfigparamsenc_set_encode_format: + * @obj: #MixVideoConfigParamsEnc object + * @encode_format: Encode target format + * @returns: Common Video Error Return Codes + * + * Set Encode target format + */ MIX_RESULT mix_videoconfigparamsenc_set_encode_format (MixVideoConfigParamsEnc * obj, MixEncodeTargetFormat encode_format); +/** + * mix_videoconfigparamsenc_get_encode_format: + * @obj: #MixVideoConfigParamsEnc object + * @encode_format: Encode target format to be returned + * @returns: Common Video Error Return Codes + * + * Get Encode target format + */ MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, MixEncodeTargetFormat * encode_format); +/** + * mix_videoconfigparamsenc_set_bit_rate: + * @obj: #MixVideoConfigParamsEnc object + * @bps: bitrate + * @returns: Common Video Error Return Codes + * + * Set bitrate + */ MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, guint bps); +/** + * mix_videoconfigparamsenc_get_bit_rate: + * @obj: #MixVideoConfigParamsEnc object + * @bps: bitrate to be returned + * @returns: Common Video Error Return Codes + * + * Get bitrate + */ MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, guint *bps); +/** + * mix_videoconfigparamsenc_set_init_qp: + * @obj: #MixVideoConfigParamsEnc object + * @initial_qp: The initial QP value + * @returns: Common Video Error Return Codes + * + * Set The initial QP value + */ MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, guint initial_qp); +/** + * mix_videoconfigparamsenc_get_init_qp: + * @obj: #MixVideoConfigParamsEnc object + * @initial_qp: The initial QP value to be returned + * @returns: Common Video Error Return Codes + * + * Get The initial QP value + */ MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, guint *initial_qp); +/** + * mix_videoconfigparamsenc_set_min_qp: + * @obj: #MixVideoConfigParamsEnc object + * @min_qp: The minimum QP value + * @returns: Common Video Error Return Codes + * + * Set The minimum QP value + */ MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, guint min_qp); +/** + * mix_videoconfigparamsenc_get_min_qp: + * @obj: #MixVideoConfigParamsEnc object + * @min_qp: The minimum QP value to be returned + * @returns: Common Video Error Return Codes + * + * Get The minimum QP value + */ MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, guint *min_qp); +/** + * mix_videoconfigparamsenc_set_intra_period: + * @obj: #MixVideoConfigParamsEnc object + * @intra_period: Number of frames between key frames (GOP size) + * @returns: Common Video Error Return Codes + * + * Set Number of frames between key frames (GOP size) + */ MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, guint intra_period); +/** + * mix_videoconfigparamsenc_get_intra_period: + * @obj: #MixVideoConfigParamsEnc object + * @intra_period: Number of frames between key frames (GOP size) to be returned + * @returns: Common Video Error Return Codes + * + * Get Number of frames between key frames (GOP size) + */ MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, guint *intra_period); +/** + * mix_videoconfigparamsenc_set_buffer_pool_size: + * @obj: #MixVideoConfigParamsEnc object + * @bufpoolsize: Size of the pool of #MixBuffer objects + * @returns: Common Video Error Return Codes + * + * Set Size of the pool of #MixBuffer objects + */ MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size(MixVideoConfigParamsEnc * obj, guint bufpoolsize); +/** + * mix_videoconfigparamsenc_set_buffer_pool_size: + * @obj: #MixVideoConfigParamsEnc object + * @bufpoolsize: Size of the pool of #MixBuffer objects to be returned + * @returns: Common Video Error Return Codes + * + * Get Size of the pool of #MixBuffer objects + */ MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size(MixVideoConfigParamsEnc * obj, guint *bufpoolsize); +/** + * mix_videoconfigparamsenc_set_share_buf_mode: + * @obj: #MixVideoConfigParamsEnc object + * @share_buf_mod: A flag to indicate whether buffers are shared + * between capture and encoding drivers or not + * @returns: Common Video Error Return Codes + * + * Set the flag that indicates whether buffers are shared between capture and encoding drivers or not + */ MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc * obj, gboolean share_buf_mod); +/** + * mix_videoconfigparamsenc_get_share_buf_mode: + * @obj: #MixVideoConfigParamsEnc object + * @share_buf_mod: the flag to be returned that indicates whether buffers + * are shared between capture and encoding drivers or not + * @returns: Common Video Error Return Codes + * + * Get the flag that indicates whether buffers are shared between capture and encoding drivers or not + */ MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, gboolean *share_buf_mod); +/** + * mix_videoconfigparamsenc_set_ci_frame_info: + * @obj: #MixVideoConfigParamsEnc object + * @ci_frame_id: Array of frame IDs created by capture library * + * @ci_frame_num: Size of the array ci_frame_id + * @returns: Common Video Error Return Codes + * + * Set CI frame information + */ MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, gulong * ci_frame_id, guint ci_frame_num); +/** + * mix_videoconfigparamsenc_get_ci_frame_info: + * @obj: #MixVideoConfigParamsEnc object + * @ci_frame_id: Array of frame IDs created by capture library to be returned + * @ci_frame_num: Size of the array ci_frame_id to be returned + * @returns: Common Video Error Return Codes + * + * Get CI frame information + * + * Caller is responsible to g_free *ci_frame_id + * + */ MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, gulong * *ci_frame_id, guint *ci_frame_num); + +/** + * mix_videoconfigparamsenc_set_drawable: + * @obj: #MixVideoConfigParamsEnc object + * @draw: drawable + * @returns: Common Video Error Return Codes + * + * Set drawable + */ MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, gulong draw); +/** + * mix_videoconfigparamsenc_get_drawable: + * @obj: #MixVideoConfigParamsEnc object + * @draw: drawable to be returned + * @returns: Common Video Error Return Codes + * + * Get drawable + */ MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, gulong *draw); +/** + * mix_videoconfigparamsenc_set_need_display: + * @obj: #MixVideoConfigParamsEnc object + * @need_display: Flag to indicates whether MixVideoFrames suitable for displaying + * need to be enqueued for retrieval using mix_video_get_frame() + * @returns: Common Video Error Return Codes + * + * Set the flag used to indicate whether MixVideoFrames suitable for displaying + * need to be enqueued for retrieval using mix_video_get_frame() + */ MIX_RESULT mix_videoconfigparamsenc_set_need_display ( MixVideoConfigParamsEnc * obj, gboolean need_display); + +/** + * mix_videoconfigparamsenc_get_need_display: + * @obj: #MixVideoConfigParamsEnc object + * @need_display: A flag to be returned to indicates whether MixVideoFrames suitable for displaying + * need to be enqueued for retrieval using mix_video_get_frame() + * @returns: Common Video Error Return Codes + * + * Get the flag used to indicate whether MixVideoFrames suitable for displaying + * need to be enqueued for retrieval using mix_video_get_frame() + */ MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, gboolean *need_display); - +/** + * mix_videoconfigparamsenc_set_rate_control: + * @obj: #MixVideoConfigParamsEnc object + * @rcmode: Rate control mode + * @returns: Common Video Error Return Codes + * + * Set Rate control mode + */ MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, MixRateControl rcmode); +/** + * mix_videoconfigparamsenc_set_rate_control: + * @obj: #MixVideoConfigParamsEnc object + * @rcmode: Rate control mode to be returned + * @returns: Common Video Error Return Codes + * + * Get Rate control mode + */ MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, MixRateControl * rcmode); +/** + * mix_videoconfigparamsenc_set_raw_format: + * @obj: #MixVideoConfigParamsEnc object + * @raw_format: Raw format to be encoded + * @returns: Common Video Error Return Codes + * + * Set Raw format to be encoded + */ MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, MixRawTargetFormat raw_format); +/** + * mix_videoconfigparamsenc_get_raw_format: + * @obj: #MixVideoConfigParamsEnc object + * @raw_format: Raw format to be returned + * @returns: Common Video Error Return Codes + * + * Get Raw format + */ MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, MixRawTargetFormat * raw_format); +/** + * mix_videoconfigparamsenc_set_profile: + * @obj: #MixVideoConfigParamsEnc object + * @profile: Encoding profile + * @returns: Common Video Error Return Codes + * + * Set Encoding profile + */ MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, MixProfile profile); +/** + * mix_videoconfigparamsenc_get_profile: + * @obj: #MixVideoConfigParamsEnc object + * @profile: Encoding profile to be returned + * @returns: Common Video Error Return Codes + * + * Get Encoding profile + */ MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, MixProfile * profile); diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.c b/mix_video/src/mixvideoconfigparamsenc_h263.c new file mode 100644 index 0000000..d0fb4d8 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_h263.c @@ -0,0 +1,281 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixvideoconfigparamsenc_h263 + * @short_description: MI-X Video H.263 Eecode Configuration Parameter + * + * MI-X video H.263 eecode configuration parameter objects. + */ + + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_h263.h" + +#define MDEBUG + + +static GType _mix_videoconfigparamsenc_h263_type = 0; +static MixVideoConfigParamsEncClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsenc_h263_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_h263_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsenc_h263_dup (const MixParams * obj); +gboolean mix_videoconfigparamsencenc_h263_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsenc_h263_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncH263, /* The name of the new type, in Camel case */ + mix_videoconfigparamsenc_h263, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsenc_h263_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsenc_h263_get_type ()); +} + +static void +mix_videoconfigparamsenc_h263_init (MixVideoConfigParamsEncH263 * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ + + self->slice_num = 1; + self->disable_deblocking_filter_idc = 0; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void +mix_videoconfigparamsenc_h263_class_init (MixVideoConfigParamsEncH263Class * klass) +{ + MixVideoConfigParamsEncClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsenc_h263_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsenc_h263_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsenc_h263_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsencenc_h263_equal; +} + +MixVideoConfigParamsEncH263 * +mix_videoconfigparamsenc_h263_new (void) +{ + MixVideoConfigParamsEncH263 *ret = (MixVideoConfigParamsEncH263 *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_H263); + + return ret; +} + +void +mix_videoconfigparamsenc_h263_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsEncH263 *this_obj = MIX_VIDEOCONFIGPARAMSENC_H263 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsEncH263 + * mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix) +{ + return (MixVideoConfigParamsEncH263 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_h263_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_h263_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (obj)) + { + MixVideoConfigParamsEncH263 *duplicate = mix_videoconfigparamsenc_h263_new (); + if (mix_videoconfigparamsenc_h263_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsenc_h263_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsenc_h263_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_h263_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsEncH263 *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (target) + && MIX_IS_VIDEOCONFIGPARAMSENC_H263 (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSENC_H263 (target); + this_src = MIX_VIDEOCONFIGPARAMSENC_H263 (src); + + //add properties + this_target->slice_num = this_src->slice_num; + this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsenc_h263: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_h263_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsEncH263 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (first) + && MIX_IS_VIDEOCONFIGPARAMSENC_H263 (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSENC_H263 (first); + this_second = MIX_VIDEOCONFIGPARAMSENC_H263 (second); + + if (this_first->slice_num != this_second->slice_num) { + goto not_equal; + } + + if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { + goto not_equal; + } + + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ + +#define MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH263 * obj, + guint slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); + obj->slice_num = slice_num; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, + guint * slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, slice_num); + *slice_num = obj->slice_num; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk (MixVideoConfigParamsEncH263 * obj, + guint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk (MixVideoConfigParamsEncH263 * obj, + guint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.h b/mix_video/src/mixvideoconfigparamsenc_h263.h new file mode 100644 index 0000000..8355207 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_h263.h @@ -0,0 +1,188 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCONFIGPARAMSENC_H263_H__ +#define __MIX_VIDEOCONFIGPARAMSENC_H263_H__ + +#include "mixvideoconfigparamsenc.h" +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCONFIGPARAMSENC_H263: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCONFIGPARAMSENC_H263 (mix_videoconfigparamsenc_h263_get_type ()) + +/** +* MIX_VIDEOCONFIGPARAMSENC_H263: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_H263: +* @obj: an object. +* +* Checks if the given object is an instance of #MixVideoConfigParamsEncH263 +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_H263_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_H263_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixVideoConfigParamsEncH263Class +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_H263_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_H263_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) + +typedef struct _MixVideoConfigParamsEncH263 MixVideoConfigParamsEncH263; +typedef struct _MixVideoConfigParamsEncH263Class MixVideoConfigParamsEncH263Class; + +/** +* MixVideoConfigParamsEncH263: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoConfigParamsEncH263 +{ + /*< public > */ + MixVideoConfigParamsEnc parent; + + /*< public > */ + + /* TODO: Add H.263 configuration paramters */ + + /* slice number in one picture */ + guint slice_num; + + /* enable/disable deblocking */ + guint disable_deblocking_filter_idc; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; +}; + +/** +* MixVideoConfigParamsEncH263Class: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoConfigParamsEncH263Class +{ + /*< public > */ + MixVideoConfigParamsEncClass parent_class; + + /* class members */ +}; + +/** +* mix_videoconfigparamsenc_h263_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videoconfigparamsenc_h263_get_type (void); + +/** +* mix_videoconfigparamsenc_h263_new: +* @returns: A newly allocated instance of #MixVideoConfigParamsEncH263 +* +* Use this method to create new instance of #MixVideoConfigParamsEncH263 +*/ +MixVideoConfigParamsEncH263 *mix_videoconfigparamsenc_h263_new (void); +/** +* mix_videoconfigparamsenc_h263_ref: +* @mix: object to add reference +* @returns: the #MixVideoConfigParamsEncH263 instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoConfigParamsEncH263 + * mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix); + +/** +* mix_videoconfigparamsenc_h263_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videoconfigparamsenc_h263_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ + +/** + * mix_videoconfigparamsenc_h263_set_dlk: + * @obj: #MixVideoConfigParamsEncH263 object + * @disable_deblocking_filter_idc: The flag to enable/disable deblocking + * @returns: Common Video Error Return Codes + * + * Set the The flag to enable/disable deblocking + */ +MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk (MixVideoConfigParamsEncH263 * obj, + guint disable_deblocking_filter_idc); + +/** + * mix_videoconfigparamsenc_h263_get_dlk: + * @obj: #MixVideoConfigParamsEncH263 object + * @disable_deblocking_filter_idc: deblocking flag to be returned + * @returns: Common Video Error Return Codes + * + * Get the The flag to enable/disable deblocking + */ +MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk (MixVideoConfigParamsEncH263 * obj, + guint * disable_deblocking_filter_idc); + +/** + * mix_videoconfigparamsenc_h263_set_slice_num: + * @obj: #MixVideoConfigParamsEncH263 object + * @slice_num: Number of slice in one picture encoded. + * @returns: Common Video Error Return Codes + * + * Set slice_num + */ +MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH263 * obj, + guint slice_num); + +/** + * mix_videoconfigparamsenc_h263_get_slice_num: + * @obj: #MixVideoConfigParamsEncH263 object + * @slice_num: Number of slice in one picture encoded. + * @returns: Common Video Error Return Codes + * + * Get slice_num + */ +MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, + guint * slice_num); + +#endif /* __MIX_VIDEOCONFIGPARAMSENC_H263_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c index 3bfa52e..69b6b0c 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.c +++ b/mix_video/src/mixvideoconfigparamsenc_h264.c @@ -7,11 +7,12 @@ No license under any patent, copyright, trade secret or other intellectual prope */ /** -* SECTION:mixvideoconfigparamsenc_h264 -* @short_description: VideoConfig parameters -* -* A data object which stores videoconfig specific parameters. -*/ + * SECTION:mixvideoconfigparamsenc_h264 + * @short_description: MI-X Video H.264 Eecode Configuration Parameter + * + * MI-X video H.264 eecode configuration parameter objects. + */ + #include "mixvideolog.h" #include "mixvideoconfigparamsenc_h264.h" diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h index c2359dd..b1334aa 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.h +++ b/mix_video/src/mixvideoconfigparamsenc_h264.h @@ -71,15 +71,29 @@ struct _MixVideoConfigParamsEncH264 /*< public > */ /* TODO: Add H.264 configuration paramters */ + + /* The basic unit size used by rate control */ guint basic_unit_size; + + /* Number of slices in one frame */ guint slice_num; + + /* enable/disable deblocking */ guint8 disable_deblocking_filter_idc; + /* delimiter_type */ MixDelimiterType delimiter_type; + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -114,7 +128,7 @@ MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void); /** * mix_videoconfigparamsenc_h264_ref: * @mix: object to add reference -* @returns: the MixVideoConfigParamsEncH264 instance where reference count has been increased. +* @returns: the #MixVideoConfigParamsEncH264 instance where reference count has been increased. * * Add reference count. */ @@ -132,27 +146,93 @@ MixVideoConfigParamsEncH264 /* Class Methods */ /* TODO: Add getters and setters for other properties */ + + +/** + * mix_videoconfigparamsenc_h264_set_bus: + * @obj: #MixVideoConfigParamsEncH264 object + * @basic_unit_size: The basic unit size used by rate control + * @returns: Common Video Error Return Codes + * + * Set The basic unit size used by rate control + */ MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj, guint basic_unit_size); +/** + * mix_videoconfigparamsenc_h264_get_bus: + * @obj: #MixVideoConfigParamsEncH264 object + * @basic_unit_size: The basic unit size to be returned + * @returns: Common Video Error Return Codes + * + * Get The basic unit size used by rate control + */ MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj, guint * basic_unit_size); +/** + * mix_videoconfigparamsenc_h264_set_dlk: + * @obj: #MixVideoConfigParamsEncH264 object + * @disable_deblocking_filter_idc: The flag to enable/disable deblocking + * @returns: Common Video Error Return Codes + * + * Set the The flag to enable/disable deblocking + */ MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, guint disable_deblocking_filter_idc); +/** + * mix_videoconfigparamsenc_h264_get_dlk: + * @obj: #MixVideoConfigParamsEncH264 object + * @disable_deblocking_filter_idc: deblocking flag to be returned + * @returns: Common Video Error Return Codes + * + * Get the The flag to enable/disable deblocking + */ MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj, guint * disable_deblocking_filter_idc); +/** + * mix_videoconfigparamsenc_h264_set_slice_num: + * @obj: #MixVideoConfigParamsEncH264 object + * @slice_num: Number of slices in one frame + * @returns: Common Video Error Return Codes + * + * Set the Number of slices in one frame + */ MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, guint slice_num); +/** + * mix_videoconfigparamsenc_h264_get_slice_num: + * @obj: #MixVideoConfigParamsEncH264 object + * @slice_num: Number of slices in one frame to be returned + * @returns: Common Video Error Return Codes + * + * Get the Number of slices in one frame + */ MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, guint * slice_num); +/** + * mix_videoconfigparamsenc_h264_set_delimiter_type: + * @obj: #MixVideoConfigParamsEncH264 object + * @delimiter_type: Delimiter type + * @returns: Common Video Error Return Codes + * + * Set Delimiter type + */ MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, MixDelimiterType delimiter_type); +/** + * mix_videoconfigparamsenc_h264_get_delimiter_type: + * @obj: #MixVideoConfigParamsEncH264 object + * @delimiter_type: Delimiter type to be returned + * @returns: Common Video Error Return Codes + * + * Get Delimiter type + */ MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, MixDelimiterType * delimiter_type); diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.c b/mix_video/src/mixvideoconfigparamsenc_mpeg4.c index 54e47a9..6e11d22 100644 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.c +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.c @@ -1,300 +1,301 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixvideoconfigparamsenc_mpeg4 -* @short_description: VideoConfig parameters -* -* A data object which stores videoconfig specific parameters. -*/ - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_mpeg4.h" - -#define MDEBUG - - -static GType _mix_videoconfigparamsenc_mpeg4_type = 0; -static MixVideoConfigParamsEncClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsenc_mpeg4_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj); -gboolean mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncMPEG4, /* The name of the new type, in Camel case */ - mix_videoconfigparamsenc_mpeg4, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsenc_mpeg4_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsenc_mpeg4_get_type ()); -} - -static void -mix_videoconfigparamsenc_mpeg4_init (MixVideoConfigParamsEncMPEG4 * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ - - self->fixed_vop_time_increment = 3; - self->profile_and_level_indication = 3; - self->disable_deblocking_filter_idc = 0; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void -mix_videoconfigparamsenc_mpeg4_class_init (MixVideoConfigParamsEncMPEG4Class * klass) -{ - MixVideoConfigParamsEncClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsenc_mpeg4_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsenc_mpeg4_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsenc_mpeg4_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsencenc_mpeg4_equal; -} - -MixVideoConfigParamsEncMPEG4 * -mix_videoconfigparamsenc_mpeg4_new (void) -{ - MixVideoConfigParamsEncMPEG4 *ret = (MixVideoConfigParamsEncMPEG4 *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4); - - return ret; -} - -void -mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) - { - root_class->finalize (obj); - } -} - -MixVideoConfigParamsEncMPEG4 - * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) -{ - return (MixVideoConfigParamsEncMPEG4 *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsenc_mpeg4_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (obj)) - { - MixVideoConfigParamsEncMPEG4 *duplicate = mix_videoconfigparamsenc_mpeg4_new (); - if (mix_videoconfigparamsenc_mpeg4_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsenc_mpeg4_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videoconfigparamsenc_mpeg4_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsEncMPEG4 *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (target) - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (target); - this_src = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (src); - - //add properties - this_target->profile_and_level_indication= this_src->profile_and_level_indication; - this_target->fixed_vop_time_increment= this_src->fixed_vop_time_increment; - this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_videoconfigparamsenc_mpeg4: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsEncMPEG4 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (first) - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (first); - this_second = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (second); - - if (this_first->profile_and_level_indication!= this_second->profile_and_level_indication) { - goto not_equal; - } - - if (this_first->fixed_vop_time_increment!= this_second->fixed_vop_time_increment) { - goto not_equal; - } - - if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { - goto not_equal; - } - - - ret = TRUE; - - not_equal: - - if (ret != TRUE) { - return ret; - } - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->profile_and_level_indication = profile_and_level_indication; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar * profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); - *profile_and_level_indication = obj->profile_and_level_indication; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->fixed_vop_time_increment = fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint * fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); - *fixed_vop_time_increment = obj->fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixvideoconfigparamsenc_mpeg4 + * @short_description: MI-X Video MPEG 4:2 Eecode Configuration Parameter + * + * MI-X video MPEG 4:2 eecode configuration parameter objects. + */ + + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_mpeg4.h" + +#define MDEBUG + + +static GType _mix_videoconfigparamsenc_mpeg4_type = 0; +static MixVideoConfigParamsEncClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsenc_mpeg4_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj); +gboolean mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncMPEG4, /* The name of the new type, in Camel case */ + mix_videoconfigparamsenc_mpeg4, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsenc_mpeg4_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsenc_mpeg4_get_type ()); +} + +static void +mix_videoconfigparamsenc_mpeg4_init (MixVideoConfigParamsEncMPEG4 * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ + + self->fixed_vop_time_increment = 3; + self->profile_and_level_indication = 3; + self->disable_deblocking_filter_idc = 0; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void +mix_videoconfigparamsenc_mpeg4_class_init (MixVideoConfigParamsEncMPEG4Class * klass) +{ + MixVideoConfigParamsEncClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsenc_mpeg4_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsenc_mpeg4_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsenc_mpeg4_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsencenc_mpeg4_equal; +} + +MixVideoConfigParamsEncMPEG4 * +mix_videoconfigparamsenc_mpeg4_new (void) +{ + MixVideoConfigParamsEncMPEG4 *ret = (MixVideoConfigParamsEncMPEG4 *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4); + + return ret; +} + +void +mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsEncMPEG4 + * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) +{ + return (MixVideoConfigParamsEncMPEG4 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_mpeg4_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (obj)) + { + MixVideoConfigParamsEncMPEG4 *duplicate = mix_videoconfigparamsenc_mpeg4_new (); + if (mix_videoconfigparamsenc_mpeg4_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsenc_mpeg4_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsenc_mpeg4_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsEncMPEG4 *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (target) + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (target); + this_src = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (src); + + //add properties + this_target->profile_and_level_indication= this_src->profile_and_level_indication; + this_target->fixed_vop_time_increment= this_src->fixed_vop_time_increment; + this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsenc_mpeg4: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsEncMPEG4 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (first) + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (first); + this_second = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (second); + + if (this_first->profile_and_level_indication!= this_second->profile_and_level_indication) { + goto not_equal; + } + + if (this_first->fixed_vop_time_increment!= this_second->fixed_vop_time_increment) { + goto not_equal; + } + + if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { + goto not_equal; + } + + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ + +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->profile_and_level_indication = profile_and_level_indication; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar * profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); + *profile_and_level_indication = obj->profile_and_level_indication; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->fixed_vop_time_increment = fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint * fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); + *fixed_vop_time_increment = obj->fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h index e6322d5..123a6ae 100644 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h @@ -1,152 +1,223 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ -#define __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ - -#include "mixvideoconfigparamsenc.h" -#include "mixvideodef.h" - -/** -* MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4: -* -* Get type of class. -*/ -#define MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 (mix_videoconfigparamsenc_mpeg4_get_type ()) - -/** -* MIX_VIDEOCONFIGPARAMSENC_MPEG4: -* @obj: object to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4: -* @obj: an object. -* -* Checks if the given object is an instance of #MixVideoConfigParamsEncMPEG4 -*/ -#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) - -/** -* MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixVideoConfigParamsEncMPEG4Class -*/ -#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) - -/** -* MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) - -typedef struct _MixVideoConfigParamsEncMPEG4 MixVideoConfigParamsEncMPEG4; -typedef struct _MixVideoConfigParamsEncMPEG4Class MixVideoConfigParamsEncMPEG4Class; - -/** -* MixVideoConfigParamsEncMPEG4: -* -* MI-X VideoConfig Parameter object -*/ -struct _MixVideoConfigParamsEncMPEG4 -{ - /*< public > */ - MixVideoConfigParamsEnc parent; - - /*< public > */ - - /* TODO: Add MPEG-4 configuration paramters */ - guchar profile_and_level_indication; - guint fixed_vop_time_increment; - guint disable_deblocking_filter_idc; - - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; -}; - -/** -* MixVideoConfigParamsEncMPEG4Class: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoConfigParamsEncMPEG4Class -{ - /*< public > */ - MixVideoConfigParamsEncClass parent_class; - - /* class members */ -}; - -/** -* mix_videoconfigparamsenc_mpeg4_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_videoconfigparamsenc_mpeg4_get_type (void); - -/** -* mix_videoconfigparamsenc_mpeg4_new: -* @returns: A newly allocated instance of #MixVideoConfigParamsEncMPEG4 -* -* Use this method to create new instance of #MixVideoConfigParamsEncMPEG4 -*/ -MixVideoConfigParamsEncMPEG4 *mix_videoconfigparamsenc_mpeg4_new (void); -/** -* mix_videoconfigparamsenc_mpeg4_ref: -* @mix: object to add reference -* @returns: the MixVideoConfigParamsEncMPEG4 instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoConfigParamsEncMPEG4 - * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix); - -/** -* mix_videoconfigparamsenc_mpeg4_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for other properties */ - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint disable_deblocking_filter_idc); - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint * disable_deblocking_filter_idc); - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar profile_and_level_indication); - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar * profile_and_level_indication); - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint fixed_vop_time_increment); - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint * fixed_vop_time_increment); - -#endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +#ifndef __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ +#define __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ + +#include "mixvideoconfigparamsenc.h" +#include "mixvideodef.h" + +/** +* MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4: +* +* Get type of class. +*/ +#define MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 (mix_videoconfigparamsenc_mpeg4_get_type ()) + +/** +* MIX_VIDEOCONFIGPARAMSENC_MPEG4: +* @obj: object to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4: +* @obj: an object. +* +* Checks if the given object is an instance of #MixVideoConfigParamsEncMPEG4 +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: +* @klass: class to be type-casted. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) + +/** +* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: +* @klass: a class. +* +* Checks if the given class is #MixVideoConfigParamsEncMPEG4Class +*/ +#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) + +/** +* MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS: +* @obj: a #MixParams object. +* +* Get the class instance of the object. +*/ +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) + +typedef struct _MixVideoConfigParamsEncMPEG4 MixVideoConfigParamsEncMPEG4; +typedef struct _MixVideoConfigParamsEncMPEG4Class MixVideoConfigParamsEncMPEG4Class; + +/** +* MixVideoConfigParamsEncMPEG4: +* +* MI-X VideoConfig Parameter object +*/ +struct _MixVideoConfigParamsEncMPEG4 +{ + /*< public > */ + MixVideoConfigParamsEnc parent; + + /*< public > */ + + /* TODO: Add MPEG-4 configuration paramters */ + + /* Indicate profile and level. + * Default value is 3. + * Can be ignored (refer to encoding + * specification for more info). */ + guchar profile_and_level_indication; + + /* Number of ticks between two successive VOPs + * in display order. Default value is 3. + * Can be ignored (refer to encoding specification + * for more info) */ + guint fixed_vop_time_increment; + + /* enable/disable deblocking */ + guint disable_deblocking_filter_idc; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; +}; + +/** +* MixVideoConfigParamsEncMPEG4Class: +* +* MI-X VideoConfig object class +*/ +struct _MixVideoConfigParamsEncMPEG4Class +{ + /*< public > */ + MixVideoConfigParamsEncClass parent_class; + + /* class members */ +}; + +/** +* mix_videoconfigparamsenc_mpeg4_get_type: +* @returns: type +* +* Get the type of object. +*/ +GType mix_videoconfigparamsenc_mpeg4_get_type (void); + +/** +* mix_videoconfigparamsenc_mpeg4_new: +* @returns: A newly allocated instance of #MixVideoConfigParamsEncMPEG4 +* +* Use this method to create new instance of #MixVideoConfigParamsEncMPEG4 +*/ +MixVideoConfigParamsEncMPEG4 *mix_videoconfigparamsenc_mpeg4_new (void); +/** +* mix_videoconfigparamsenc_mpeg4_ref: +* @mix: object to add reference +* @returns: the #MixVideoConfigParamsEncMPEG4 instance where reference count has been increased. +* +* Add reference count. +*/ +MixVideoConfigParamsEncMPEG4 + * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix); + +/** +* mix_videoconfigparamsenc_mpeg4_unref: +* @obj: object to unref. +* +* Decrement reference count of the object. +*/ +#define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj)) + +/* Class Methods */ + +/* TODO: Add getters and setters for other properties */ + +/** + * mix_videoconfigparamsenc_mpeg4_set_dlk: + * @obj: #MixVideoConfigParamsEncMPEG4 object + * @disable_deblocking_filter_idc: The flag to enable/disable deblocking + * @returns: Common Video Error Return Codes + * + * Set the The flag to enable/disable deblocking + */ +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint disable_deblocking_filter_idc); + +/** + * mix_videoconfigparamsenc_mpeg4_get_dlk: + * @obj: #MixVideoConfigParamsEncMPEG4 object + * @disable_deblocking_filter_idc: deblocking flag to be returned + * @returns: Common Video Error Return Codes + * + * Get the The flag to enable/disable deblocking + */ +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint * disable_deblocking_filter_idc); + +/** + * mix_videoconfigparamsenc_mpeg4_set_profile_level: + * @obj: #MixVideoConfigParamsEncMPEG4 object + * @profile_and_level_indication: Indicate profile and level. Default value is 3. + * Can be ignored (refer to encoding specification + * for more info). + * @returns: Common Video Error Return Codes + * + * Set profile_and_level_indication + */ +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar profile_and_level_indication); + +/** + * mix_videoconfigparamsenc_mpeg4_get_profile_level: + * @obj: #MixVideoConfigParamsEncMPEG4 object + * @profile_and_level_indication: profile_and_level_indication to be returned + * @returns: Common Video Error Return Codes + * + * Get profile_and_level_indication + */ +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar * profile_and_level_indication); + +/** + * mix_videoconfigparamsenc_mpeg4_get_profile_level: + * @obj: #MixVideoConfigParamsEncMPEG4 object + * @fixed_vop_time_increment: Number of ticks between two successive VOPs in display order. + * Default value is 3. Can be ignored (refer to encoding specification + * for more info) + * @returns: Common Video Error Return Codes + * + * Set fixed_vop_time_increment + */ +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint fixed_vop_time_increment); + +/** + * mix_videoconfigparamsenc_mpeg4_get_fixed_vti: + * @obj: #MixVideoConfigParamsEncMPEG4 object + * @fixed_vop_time_increment: fixed_vop_time_increment to be returned + * @returns: Common Video Error Return Codes + * + * Get fixed_vop_time_increment + */ +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint * fixed_vop_time_increment); + +#endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.c b/mix_video/src/mixvideoconfigparamsenc_preview.c index ea0aaa1..890aaac 100644 --- a/mix_video/src/mixvideoconfigparamsenc_preview.c +++ b/mix_video/src/mixvideoconfigparamsenc_preview.c @@ -50,11 +50,6 @@ mix_videoconfigparamsenc_preview_init (MixVideoConfigParamsEncPreview * self) { /* initialize properties here */ /* TODO: initialize properties */ - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; } static void diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.h b/mix_video/src/mixvideoconfigparamsenc_preview.h index f9d3fbe..8e57952 100644 --- a/mix_video/src/mixvideoconfigparamsenc_preview.h +++ b/mix_video/src/mixvideoconfigparamsenc_preview.h @@ -67,11 +67,7 @@ struct _MixVideoConfigParamsEncPreview { /*< public > */ MixVideoConfigParamsEnc parent; - - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; + }; /** diff --git a/mix_video/src/mixvideodecodeparams.c b/mix_video/src/mixvideodecodeparams.c index 0bb06e8..0c74eb0 100644 --- a/mix_video/src/mixvideodecodeparams.c +++ b/mix_video/src/mixvideodecodeparams.c @@ -8,9 +8,10 @@ /** * SECTION:mixvideodecodeparams - * @short_description: VideoDecode parameters + * @short_description: MI-X Video Decode Paramters * - * A data object which stores videodecode specific parameters. + * The #MixVideoDecodeParams object will be created by the MMF/App + * and provided to MixVideo in the MixVideo mix_video_decode() function. */ #include "mixvideodecodeparams.h" diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h index 34f1a22..00cfa98 100644 --- a/mix_video/src/mixvideodecodeparams.h +++ b/mix_video/src/mixvideodecodeparams.h @@ -70,12 +70,24 @@ struct _MixVideoDecodeParams { /*< public > */ /* TODO: Add properties */ + + /* Presentation timestamp for the video + * frame data, in milliseconds */ guint64 timestamp; + + /* Indicates a discontinuity in the stream */ gboolean discontinuity; + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -109,7 +121,7 @@ MixVideoDecodeParams *mix_videodecodeparams_new(void); /** * mix_videodecodeparams_ref: * @mix: object to add reference - * @returns: the MixVideoDecodeParams instance where reference count has been increased. + * @returns: the #MixVideoDecodeParams instance where reference count has been increased. * * Add reference count. */ @@ -126,13 +138,50 @@ MixVideoDecodeParams *mix_videodecodeparams_ref(MixVideoDecodeParams * mix); /* Class Methods */ /* TODO: Add getters and setters for properties */ + + +/** + * mix_videodecodeparams_set_timestamp: + * @obj: #MixVideoDecodeParams object + * @timestamp: Presentation timestamp for the video frame data, in milliseconds + * @returns: Common Video Error Return Codes + * + * Set Presentation timestamp + */ MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj, guint64 timestamp); + +/** + * mix_videodecodeparams_get_timestamp: + * @obj: #MixVideoDecodeParams object + * @timestamp: Presentation timestamp for the video frame data, in milliseconds to be returned. + * @returns: Common Video Error Return Codes + * + * Get Presentation timestamp + */ MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj, guint64 * timestamp); +/** + * mix_videodecodeparams_set_discontinuity: + * @obj: #MixVideoDecodeParams object + * @discontinuity: Flag to in Indicates a discontinuity in the stream. + * @returns: Common Video Error Return Codes + * + * Set discontinuity flag + */ MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, gboolean discontinuity); + + +/** + * mix_videodecodeparams_get_discontinuity: + * @obj: #MixVideoDecodeParams object + * @discontinuity: Discontinuity flag to be returned + * @returns: Common Video Error Return Codes + * + * Get discontinuity flag + */ MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, gboolean *discontinuity); diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h index bb80987..d3adcd8 100644 --- a/mix_video/src/mixvideodef.h +++ b/mix_video/src/mixvideodef.h @@ -6,6 +6,27 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ +/** + * SECTION:mixvideodef + * @title: MI-X Video Data Definitons And Common Error Code + * @short_description: MI-X Video data definitons and common error code + * @include: mixvideodef.h + * + * The section includes the definition of enum and struct as well as + * + * Common Video Error Return Codes of MI-X video functions + * + * #MIX_RESULT_SUCCESS, Successfully resumed + * MIX_RESULT_NULL_PTR, The pointer passed to the function was null. + * MIX_RESULT_NO_MEMORY, Memory needed for the operation could not be allocated. + * MIX_RESULT_INVALID_PARAM, An argument passed to the function was invalid. + * MIX_RESULT_NOT_INIT, MixVideo object has not been initialized yet. + * MIX_RESULT_NOT_CONFIGURED, MixVideo object has not been configured yet. + * MIX_RESULT_FAIL, For any failure. + * + * + */ + #ifndef __MIX_VIDEO_DEF_H__ #define __MIX_VIDEO_DEF_H__ #include diff --git a/mix_video/src/mixvideoencodeparams.c b/mix_video/src/mixvideoencodeparams.c index 809004d..52be78f 100644 --- a/mix_video/src/mixvideoencodeparams.c +++ b/mix_video/src/mixvideoencodeparams.c @@ -8,9 +8,14 @@ /** * SECTION:mixvideoencodeparams - * @short_description: VideoDecode parameters + * @short_description: MI-X Video Encode Parameters * - * A data object which stores videodecode specific parameters. + * The #MixVideoEncodeParams object will be created by + * the MMF/App and provided to #MixVideo in the #MixVideo + * mix_video_encode() function. Get methods for the + * properties will be available for the caller to + * retrieve configuration information. Currently this + * object is reserved for future use. */ #include "mixvideoencodeparams.h" diff --git a/mix_video/src/mixvideoencodeparams.h b/mix_video/src/mixvideoencodeparams.h index 8709cb9..54804dd 100644 --- a/mix_video/src/mixvideoencodeparams.h +++ b/mix_video/src/mixvideoencodeparams.h @@ -70,12 +70,23 @@ struct _MixVideoEncodeParams { /*< public > */ /* TODO: Add properties */ + + /* < private > */ guint64 timestamp; gboolean discontinuity; + /* < public > */ + + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h index c2e4769..2553667 100644 --- a/mix_video/src/mixvideoformat.h +++ b/mix_video/src/mixvideoformat.h @@ -13,7 +13,7 @@ #include #include "vbp_loader.h" #include "mixvideodef.h" -#include "mixdrmparams.h" +#include #include "mixvideoconfigparamsdec.h" #include "mixvideodecodeparams.h" #include "mixvideoframe.h" diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c index e43648d..b65936e 100644 --- a/mix_video/src/mixvideoformat_h264.c +++ b/mix_video/src/mixvideoformat_h264.c @@ -6,7 +6,9 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ #include +#ifndef ANDROID #include +#endif #include "mixvideolog.h" #include "mixvideoformat_h264.h" @@ -92,12 +94,14 @@ void mix_videoformat_h264_finalize(GObject * obj) { //inputbufqueue is deallocated by parent //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - //Free the DPB surface table - //First remove all the entries (frames will be unrefed) - g_hash_table_remove_all(self->dpb_surface_table); - //Then unref the table - g_hash_table_unref(self->dpb_surface_table); - self->dpb_surface_table = NULL; + if (self->dpb_surface_table) { + //Free the DPB surface table + //First remove all the entries (frames will be unrefed) + g_hash_table_remove_all(self->dpb_surface_table); + //Then unref the table + g_hash_table_unref(self->dpb_surface_table); + self->dpb_surface_table = NULL; + } g_mutex_lock(parent->objectlock); parent->initialized = TRUE; @@ -1114,7 +1118,6 @@ MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, LOG_V( "Getting a new surface for frame_num %d\n", pic_params->frame_num); LOG_V( "frame type is %d\n", frame_type); - // Set displayorder ret = mix_videoframe_set_displayorder(frame, pic_params->CurrPic.TopFieldOrderCnt / 2); if(ret != MIX_RESULT_SUCCESS) diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c index 0702c85..151cff3 100644 --- a/mix_video/src/mixvideoformat_vc1.c +++ b/mix_video/src/mixvideoformat_vc1.c @@ -9,7 +9,9 @@ #include "mixvideolog.h" #include "mixvideoformat_vc1.h" +#ifndef ANDROID #include +#endif #ifdef YUVDUMP //TODO Complete YUVDUMP code and move into base class diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h index 0e1c07a..c0da910 100644 --- a/mix_video/src/mixvideoformatenc.h +++ b/mix_video/src/mixvideoformatenc.h @@ -12,7 +12,7 @@ #include #include #include "mixvideodef.h" -#include "mixdrmparams.h" +#include #include "mixvideoconfigparamsenc.h" #include "mixvideoframe.h" #include "mixframemanager.h" diff --git a/mix_video/src/mixvideoformatenc_h263.c b/mix_video/src/mixvideoformatenc_h263.c new file mode 100644 index 0000000..96f8c9a --- /dev/null +++ b/mix_video/src/mixvideoformatenc_h263.c @@ -0,0 +1,1806 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_h263.h" +#include "mixvideoconfigparamsenc_h263.h" + +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_h263_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_H263, mix_videoformatenc_h263, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_h263_init(MixVideoFormatEnc_H263 * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /* member initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + self->coded_buf_index = 0; + + parent->initialized = FALSE; + +} + +static void mix_videoformatenc_h263_class_init( + MixVideoFormatEnc_H263Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_h263_finalize; + + /* setup vmethods with base implementation */ + video_formatenc_class->getcaps = mix_videofmtenc_h263_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_h263_initialize; + video_formatenc_class->encode = mix_videofmtenc_h263_encode; + video_formatenc_class->flush = mix_videofmtenc_h263_flush; + video_formatenc_class->eos = mix_videofmtenc_h263_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_h263_deinitialize; + video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h263_get_max_encoded_buf_size; +} + +MixVideoFormatEnc_H263 * +mix_videoformatenc_h263_new(void) { + MixVideoFormatEnc_H263 *ret = + g_object_new(MIX_TYPE_VIDEOFORMATENC_H263, NULL); + + return ret; +} + +void mix_videoformatenc_h263_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_H263 *mix = MIX_VIDEOFORMATENC_H263(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_H263 * +mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix) { + return (MixVideoFormatEnc_H263 *) g_object_ref(G_OBJECT(mix)); +} + +/*H263 vmethods implementation */ +MIX_RESULT mix_videofmtenc_h263_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + LOG_V( "mix_videofmtenc_h263_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h263_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncH263 * config_params_enc_h263; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces = NULL; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + /* Chainup parent method. */ + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + if (!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (config_params_enc)) { + config_params_enc_h263 = + MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_h263_initialize: no h263 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from H263 params\n"); + + /* get properties from H263 params Object, which is special to H263 format*/ + + ret = mix_videoconfigparamsenc_h263_get_slice_num (config_params_enc_h263, + &self->slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h263_get_slice_num\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsenc_h263_get_dlk (config_params_enc_h263, + &(self->disable_deblocking_filter_idc)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h263_get_dlk\n"); + goto cleanup; + } + + + LOG_V( + "======H263 Encode Object properities======:\n"); + + LOG_I( "self->slice_num = %d\n", + self->slice_num); + LOG_I( "self->disabled_deblocking_filter_idc = %d\n\n", + self->disable_deblocking_filter_idc); + + LOG_V( + "Get properities from params done\n"); + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + + LOG_E("Error init failure\n"); + + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + guint max_size = 0; + ret = mix_videofmtenc_h263_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_h263_get_max_encoded_buf_size\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf[0]); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + +cleanup: + + if(ret == MIX_RESULT_SUCCESS) { + parent->initialized = TRUE; + } + + /*free profiles and entrypoints*/ + if(va_profiles) + g_free(va_profiles); + if(va_entrypoints) + g_free(va_entrypoints); + if(surfaces) + g_free(surfaces); + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_videofmtenc_h263_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_h263_process_encode\n"); + + ret = mix_videofmtenc_h263_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_process_encode\n"); + goto cleanup; + } + +cleanup: + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + if(!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); + + g_mutex_lock(mix->objectlock); + + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h263_eos(MixVideoFormatEnc *mix) { + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h263_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if(!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + if(parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if(ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +cleanup: + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_videofmtenc_h263_send_seq_params (MixVideoFormatEnc_H263 *mix) +{ + + VAStatus va_status; + VAEncSequenceParameterBufferH263 h263_seq_param; + VABufferID seq_para_buf_id; + + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + if (!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + h263_seq_param.bits_per_second= parent->bitrate; + h263_seq_param.frame_rate = 30; //hard-coded, driver need; + //(unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + h263_seq_param.initial_qp = parent->initial_qp; + h263_seq_param.min_qp = parent->min_qp; + h263_seq_param.intra_period = parent->intra_period; + + //h263_seq_param.fixed_vop_rate = 30; + + LOG_V( + "===h263 sequence params===\n"); + + LOG_I( "bitrate = %d\n", + h263_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h263_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h263_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h263_seq_param.min_qp); + LOG_I( "intra_period = %d\n\n", + h263_seq_param.intra_period); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(h263_seq_param), + 1, &h263_seq_param, + &seq_para_buf_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &seq_para_buf_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videofmtenc_h263_send_picture_parameter (MixVideoFormatEnc_H263 *mix) +{ + VAStatus va_status; + VAEncPictureParameterBufferH263 h263_pic_param; + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + +#if 0 //not needed currently + MixVideoConfigParamsEncH263 * params_h263 + = MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); +#endif + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + h263_pic_param.reference_picture = mix->ref_frame->frame_id; + h263_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + h263_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; + h263_pic_param.picture_width = parent->picture_width; + h263_pic_param.picture_height = parent->picture_height; + h263_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + + + LOG_V( + "======h263 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h263_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h263_pic_param.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", + h263_pic_param.coded_buf); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); + LOG_I( "picture_width = %d\n", + h263_pic_param.picture_width); + LOG_I( "picture_height = %d\n", + h263_pic_param.picture_height); + LOG_I( "picture_type = %d\n\n", + h263_pic_param.picture_type); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(h263_pic_param), + 1,&h263_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT mix_videofmtenc_h263_send_slice_parameter (MixVideoFormatEnc_H263 *mix) +{ + VAStatus va_status; + + guint slice_num; + guint slice_height; + guint slice_index; + guint slice_height_in_mb; + + if (mix == NULL) { + LOG_E("mix = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V("Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + //slice_num = mix->slice_num; + slice_num = 1; // one slice per picture; + slice_height = parent->picture_height / slice_num; + + slice_height += 15; + slice_height &= (~15); + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + slice_num, NULL, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + VAEncSliceParameterBuffer *slice_param, *current_slice; + + va_status = vaMapBuffer(parent->va_display, + mix->slice_param_buf, + (void **)&slice_param); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + current_slice = slice_param; + + for (slice_index = 0; slice_index < slice_num; slice_index++) { + current_slice = slice_param + slice_index; + slice_height_in_mb = + min (slice_height, parent->picture_height + - slice_index * slice_height) / 16; + + // starting MB row number for this slice + current_slice->start_row_number = slice_index * slice_height / 16; + // slice height measured in MB + current_slice->slice_height = slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = mix->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + LOG_V("======h263 slice params======\n"); + + LOG_I("slice_index = %d\n", + (gint) slice_index); + LOG_I("start_row_number = %d\n", + (gint) current_slice->start_row_number); + LOG_I("slice_height_in_mb = %d\n", + (gint) current_slice->slice_height); + LOG_I("slice.is_intra = %d\n", + (gint) current_slice->slice_flags.bits.is_intra); + LOG_I("disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + } + + va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V("end\n"); + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + MixVideoFrame * tmp_frame; + guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + goto cleanup; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto cleanup; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + goto cleanup; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videofmtenc_h263_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } + + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + + } + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + //return MIX_RESULT_FAIL; + } + + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + guint total_size = 0; + guint size = 0; + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg ++; + num_seg ++; + } + + +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; +#endif + + iovout->data_size = total_size; + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + + iovout->data = g_malloc (iovout->data_size); + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + //memcpy (iovout->data, buf + 16, iovout->data_size); + + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; + + while (1) { + + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg ++; + } + + iovout->buffer_size = iovout->data_size; + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "get encoded data done\n"); + + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videofmtenc_h263_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + } + + VASurfaceStatus status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + mix->pic_skipped = status & VASurfaceSkipped; + + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); + + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_framemanager_enqueue\n"); + goto cleanup; + } + } + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + } + + +#if 0 + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; + + mix_videoframe_unref (mix->cur_frame); +#endif + + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; + } + +cleanup: + + if(ret != MIX_RESULT_SUCCESS) { + if(iovout->data) { + g_free(iovout->data); + iovout->data = NULL; + } + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h263_get_max_encoded_buf_size ( + MixVideoFormatEnc *mix, guint * max_size) +{ + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + { + LOG_E( + "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + parent = MIX_VIDEOFORMATENC(mix); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); + + if (MIX_IS_VIDEOFORMATENC_H263(self)) { + + if (self->coded_buf_size > 0) { + *max_size = self->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (self->va_rcmode == VA_RC_NONE) { + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 830) / (16 * 16); + // set to value according to QP + } + else { + self->coded_buf_size = parent->bitrate/ 4; + } + + self->coded_buf_size = + max (self->coded_buf_size , + (parent->picture_width* parent->picture_height * 830) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + self->coded_buf_size = + max(self->coded_buf_size, + (parent->picture_width * parent->picture_height * 1.5 * 8)); + self->coded_buf_size = (self->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not H263 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + *max_size = self->coded_buf_size; + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_H263(mix)) + { + if (mix->encoded_frames == 0) { + ret = mix_videofmtenc_h263_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_h263_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_h263_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + } + else + { + LOG_E( + "not H263 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoformatenc_h263.h b/mix_video/src/mixvideoformatenc_h263.h new file mode 100644 index 0000000..b4d1f84 --- /dev/null +++ b/mix_video/src/mixvideoformatenc_h263.h @@ -0,0 +1,141 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMATENC_H263_H__ +#define __MIX_VIDEOFORMATENC_H263_H__ + +#include "mixvideoformatenc.h" +#include "mixvideoframe_private.h" + +#define MIX_VIDEO_ENC_H263_SURFACE_NUM 20 + +#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) +#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMATENC_H263 (mix_videoformatenc_h263_get_type ()) +#define MIX_VIDEOFORMATENC_H263(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_H263, MixVideoFormatEnc_H263)) +#define MIX_IS_VIDEOFORMATENC_H263(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_H263)) +#define MIX_VIDEOFORMATENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_H263, MixVideoFormatEnc_H263Class)) +#define MIX_IS_VIDEOFORMATENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_H263)) +#define MIX_VIDEOFORMATENC_H263_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_H263, MixVideoFormatEnc_H263Class)) + +typedef struct _MixVideoFormatEnc_H263 MixVideoFormatEnc_H263; +typedef struct _MixVideoFormatEnc_H263Class MixVideoFormatEnc_H263Class; + +struct _MixVideoFormatEnc_H263 { + /*< public > */ + MixVideoFormatEnc parent; + + + VABufferID coded_buf[2]; + VABufferID last_coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * ci_shared_surfaces; + VASurfaceID * surfaces; + guint surface_num; + + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *last_frame; //last frame; + + + guint disable_deblocking_filter_idc; + guint slice_num; + guint va_rcmode; + + guint encoded_frames; + gboolean pic_skipped; + + gboolean is_intra; + + guint coded_buf_size; + guint coded_buf_index; + + + /*< public > */ +}; + +/** + * MixVideoFormatEnc_H263Class: + * + * MI-X Video object class + */ +struct _MixVideoFormatEnc_H263Class { + /*< public > */ + MixVideoFormatEncClass parent_class; + + /* class members */ + + /*< public > */ +}; + +/** + * mix_videoformatenc_h263_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformatenc_h263_get_type(void); + +/** + * mix_videoformatenc_h263_new: + * @returns: A newly allocated instance of #MixVideoFormatEnc_H263 + * + * Use this method to create new instance of #MixVideoFormatEnc_H263 + */ +MixVideoFormatEnc_H263 *mix_videoformatenc_h263_new(void); + +/** + * mix_videoformatenc_h263_ref: + * @mix: object to add reference + * @returns: the MixVideoFormatEnc_H263 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormatEnc_H263 *mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix); + +/** + * mix_videoformatenc_h263_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformatenc_h263_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* MPEG-4:2 vmethods */ +MIX_RESULT mix_videofmtenc_h263_getcaps(MixVideoFormatEnc *mix, GString *msg); +MIX_RESULT mix_videofmtenc_h263_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmtenc_h263_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_h263_eos(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_h263_deinitialize(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_h263_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); + +/* Local Methods */ + +MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, MixBuffer * bufin, + MixIOVec * iovout); +MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix); + +#endif /* __MIX_VIDEOFORMATENC_H263_H__ */ + diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index bf25304..e222ace 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -14,7 +14,6 @@ #include "mixvideoformatenc_h264.h" #include "mixvideoconfigparamsenc_h264.h" -#define MDEBUG #undef SHOW_SRC #ifdef SHOW_SRC @@ -38,9 +37,7 @@ G_DEFINE_TYPE (MixVideoFormatEnc_H264, mix_videoformatenc_h264, MIX_TYPE_VIDEOFO static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - /* TODO: public member initialization */ - - /* TODO: private member initialization */ + /* member initialization */ self->encoded_frames = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; @@ -73,7 +70,6 @@ static void mix_videoformatenc_h264_class_init( gobject_class->finalize = mix_videoformatenc_h264_finalize; /* setup vmethods with base implementation */ - /* TODO: decide if we need to override the parent's methods */ video_formatenc_class->getcaps = mix_videofmtenc_h264_getcaps; video_formatenc_class->initialize = mix_videofmtenc_h264_initialize; video_formatenc_class->encode = mix_videofmtenc_h264_encode; @@ -113,12 +109,6 @@ mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix) { /*H.264 vmethods implementation */ MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) { - /* TODO: add codes for H.264 */ - - /* TODO: decide if we need to chainup parent method. - * if we do, the following is the code: - */ - LOG_V( "mix_videofmtenc_h264_getcaps\n"); if (mix == NULL) { @@ -145,7 +135,7 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, MixVideoConfigParamsEncH264 * config_params_enc_h264; VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces; + VASurfaceID * surfaces = NULL; gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; gint va_num_profiles, va_num_entrypoints; @@ -167,10 +157,8 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, LOG_V( "begin\n"); - //TODO additional parameter checking - + /* Chainup parent method. */ -#if 1 if (parent_class->initialize) { ret = parent_class->initialize(mix, config_params_enc, frame_mgr, input_buf_pool, surface_pool, @@ -182,527 +170,495 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, return ret; } -#endif //disable it currently - - if (MIX_IS_VIDEOFORMATENC_H264(mix)) - { - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { - config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h264_initialize: no h264 config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - - LOG_V( - "Start to get properities from h.264 params\n"); - - /* get properties from H264 params Object, which is special to H264 format*/ - ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, - &self->basic_unit_size); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, - &self->disable_deblocking_filter_idc); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, - &self->slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, - &self->delimiter_type); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - LOG_V( - "======H264 Encode Object properities======:\n"); + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; - LOG_I( "self->basic_unit_size = %d\n", - self->basic_unit_size); - LOG_I( "self->disable_deblocking_filter_idc = %d\n", - self->disable_deblocking_filter_idc); - LOG_I( "self->slice_num = %d\n", - self->slice_num); - LOG_I ("self->delimiter_type = %d\n", - self->delimiter_type); - + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { + config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); + } else { LOG_V( - "Get properities from params done\n"); - - - //display = XOpenDisplay(NULL); - //va_display = vaGetDisplay (videoencobj->display); - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); - - //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver); - //g_print ("vaInitialize va_status = %d\n", va_status); - + "mix_videofmtenc_h264_initialize: no h264 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from h.264 params\n"); + + /* get properties from H264 params Object, which is special to H264 format*/ + ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, + &self->basic_unit_size); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); + goto cleanup; + } + + + ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, + &self->disable_deblocking_filter_idc); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); + goto cleanup; + } + + + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, + &self->slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, + &self->delimiter_type); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E ( + "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); + goto cleanup; + } + + LOG_V( + "======H264 Encode Object properities======:\n"); + + LOG_I( "self->basic_unit_size = %d\n", + self->basic_unit_size); + LOG_I( "self->disable_deblocking_filter_idc = %d\n", + self->disable_deblocking_filter_idc); + LOG_I( "self->slice_num = %d\n", + self->slice_num); + LOG_I ("self->delimiter_type = %d\n", + self->delimiter_type); + + LOG_V( + "Get properities from params done\n"); + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + #if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); #endif - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - - /*free profiles and entrypoints*/ - g_free(va_profiles); - g_free (va_entrypoints); - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - /*TODO: compute the surface number*/ - int numSurfaces; + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if (parent->share_buf_mode) { - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - if (surfaces == NULL) + if (self->ci_shared_surfaces == NULL) { LOG_E( - "Failed allocate surface\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; + "Failed allocate shared surface\n"); + + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; } - - LOG_V( "vaCreateSurfaces\n"); - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - //TODO check vret and return fail if needed - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - if (parent->share_buf_mode) { + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - } + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); + "vaCreateSurfaceFromCIFrame\n"); - }// if (parent->share_buf_mode) - - self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } } - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode - - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - LOG_V( - "mix_surfacepool_initialize\n"); + "vaCreateSurfaceFromCIFrame Done\n"); - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - //TODO cleanup and/or retry - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - 0, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - guint max_size = 0; - ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - - } + }// if (parent->share_buf_mode) - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[0])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } -#endif /* SHOW_SRC */ + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode - parent->initialized = TRUE; - - g_mutex_unlock(parent->objectlock); - g_free (surfaces); - + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - else + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + LOG_E( "Error init failure\n"); + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + 0, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_FAIL; + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + guint max_size = 0; + ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); + goto cleanup; } - + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[0])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + +cleanup: + + + if (ret == MIX_RESULT_SUCCESS) { + parent->initialized = TRUE; + } + + /*free profiles and entrypoints*/ + if (va_profiles) + g_free(va_profiles); + + if (va_entrypoints) + g_free (va_entrypoints); + + if (surfaces) + g_free (surfaces); + + g_mutex_unlock(parent->objectlock); + LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; + return ret; } MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], @@ -715,7 +671,6 @@ MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin LOG_V( "Begin\n"); /*currenly only support one input and output buffer*/ - //TODO: params i if (bufincnt != 1 || iovoutcnt != 1) { LOG_E( @@ -730,12 +685,6 @@ MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin return MIX_RESULT_NULL_PTR; } - //TODO: encode_params is reserved here for future usage. - - /* TODO: decide if we need to chainup parent method. - * * * if we do, the following is the code: - * */ - #if 0 if (parent_class->encode) { return parent_class->encode(mix, bufin, bufincnt, iovout, @@ -743,46 +692,39 @@ MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin } #endif - if (MIX_IS_VIDEOFORMATENC_H264(mix)) - { - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: also we could move some encode Preparation work to here + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); - LOG_V( - "mix_videofmtenc_h264_process_encode\n"); - ret = mix_videofmtenc_h264_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_process_encode\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - } - else + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_h264_process_encode\n"); + + ret = mix_videofmtenc_h264_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_FAIL; + "Failed mix_videofmtenc_h264_process_encode\n"); + goto cleanup; } - + +cleanup: + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; + return ret; } MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { @@ -803,6 +745,9 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { return parent_class->flush(mix, msg); } #endif + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); @@ -844,12 +789,6 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { } MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) { - - /* TODO: add codes for H.264 */ - - /* TODO: decide if we need to chainup parent method. - * if we do, the following is the code: - */ LOG_V( "\n"); @@ -868,6 +807,8 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { MixVideoFormatEnc *parent = NULL; VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); @@ -876,6 +817,18 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { return MIX_RESULT_NULL_PTR; } + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + if (parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + parent = MIX_VIDEOFORMATENC(&(mix->parent)); MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); @@ -927,8 +880,8 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { { LOG_E( "Failed vaDestroyContext\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; + ret = MIX_RESULT_FAIL; + goto cleanup; } LOG_V( "vaDestroyConfig\n"); @@ -938,25 +891,18 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { { LOG_E( "Failed vaDestroyConfig\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; + ret = MIX_RESULT_FAIL; + goto cleanup; } - parent->initialized = TRUE; +cleanup: + parent->initialized = FALSE; g_mutex_unlock(parent->objectlock); -#if 1 - if (parent_class->deinitialize) { - return parent_class->deinitialize(mix); - } -#endif - - //Most stuff is cleaned up in parent_class->finalize() - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; + return ret; } MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) @@ -967,89 +913,81 @@ MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) MixVideoFormatEnc *parent = NULL; - if (mix == NULL) + if (mix == NULL) { + LOG_E("mix == NULL\n"); return MIX_RESULT_NULL_PTR; + } LOG_V( "Begin\n\n"); - if (MIX_IS_VIDEOFORMATENC_H264(mix)) + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + h264_seq_param.level_idc = 30; //TODO, hard code now + h264_seq_param.intra_period = parent->intra_period; + h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; + h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; + h264_seq_param.bits_per_second = parent->bitrate; + h264_seq_param.frame_rate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + h264_seq_param.initial_qp = parent->initial_qp; + h264_seq_param.min_qp = parent->min_qp; + h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage + h264_seq_param.intra_period = parent->intra_period; + //h264_seq_param.vui_flag = 248; + //h264_seq_param.seq_parameter_set_id = 176; + + LOG_V( + "===h264 sequence params===\n"); + + LOG_I( "seq_parameter_set_id = %d\n", + (guint)h264_seq_param.seq_parameter_set_id); + LOG_I( "level_idc = %d\n", + (guint)h264_seq_param.level_idc); + LOG_I( "intra_period = %d\n", + h264_seq_param.intra_period); + LOG_I( "picture_width_in_mbs = %d\n", + h264_seq_param.picture_width_in_mbs); + LOG_I( "picture_height_in_mbs = %d\n", + h264_seq_param.picture_height_in_mbs); + LOG_I( "bitrate = %d\n", + h264_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h264_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h264_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h264_seq_param.min_qp); + LOG_I( "basic_unit_size = %d\n", + h264_seq_param.basic_unit_size); + LOG_I( "vui_flag = %d\n\n", + h264_seq_param.vui_flag); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(h264_seq_param), + 1, &h264_seq_param, + &mix->seq_param_buf); + if (va_status != VA_STATUS_SUCCESS) { - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set up the sequence params for HW*/ - h264_seq_param.level_idc = 30; //TODO, hard code now - h264_seq_param.intra_period = parent->intra_period; - h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; - h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; - h264_seq_param.bits_per_second = parent->bitrate; - h264_seq_param.frame_rate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - h264_seq_param.initial_qp = parent->initial_qp; - h264_seq_param.min_qp = parent->min_qp; - h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage - h264_seq_param.intra_period = parent->intra_period; - //h264_seq_param.vui_flag = 248; - //h264_seq_param.seq_parameter_set_id = 176; - - LOG_V( - "===h264 sequence params===\n"); - - LOG_I( "seq_parameter_set_id = %d\n", - (guint)h264_seq_param.seq_parameter_set_id); - LOG_I( "level_idc = %d\n", - (guint)h264_seq_param.level_idc); - LOG_I( "intra_period = %d\n", - h264_seq_param.intra_period); - LOG_I( "picture_width_in_mbs = %d\n", - h264_seq_param.picture_width_in_mbs); - LOG_I( "picture_height_in_mbs = %d\n", - h264_seq_param.picture_height_in_mbs); - LOG_I( "bitrate = %d\n", - h264_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - h264_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - h264_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - h264_seq_param.min_qp); - LOG_I( "basic_unit_size = %d\n", - h264_seq_param.basic_unit_size); - LOG_I( "vui_flag = %d\n\n", - h264_seq_param.vui_flag); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(h264_seq_param), - 1, &h264_seq_param, - &mix->seq_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->seq_param_buf, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; } - else + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->seq_param_buf, 1); + if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } return MIX_RESULT_SUCCESS; - - } MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 *mix) @@ -1058,70 +996,66 @@ MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 * VAEncPictureParameterBufferH264 h264_pic_param; MixVideoFormatEnc *parent = NULL; - if (mix == NULL) + if (mix == NULL) { + LOG_E("mix == NULL\n"); return MIX_RESULT_NULL_PTR; + } LOG_V( "Begin\n\n"); - if (MIX_IS_VIDEOFORMATENC_H264(mix)) { + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set picture params for HW*/ - h264_pic_param.reference_picture = mix->ref_frame->frame_id; - h264_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - h264_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - h264_pic_param.picture_width = parent->picture_width; - h264_pic_param.picture_height = parent->picture_height; - h264_pic_param.last_picture = 0; - - - LOG_V( - "======h264 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h264_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - h264_pic_param.reconstructed_picture); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "coded_buf = 0x%08x\n", - h264_pic_param.coded_buf); - LOG_I( "picture_width = %d\n", - h264_pic_param.picture_width); - LOG_I( "picture_height = %d\n\n", - h264_pic_param.picture_height); + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + h264_pic_param.reference_picture = mix->ref_frame->frame_id; + h264_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + h264_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; + h264_pic_param.picture_width = parent->picture_width; + h264_pic_param.picture_height = parent->picture_height; + h264_pic_param.last_picture = 0; + + + LOG_V( + "======h264 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h264_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h264_pic_param.reconstructed_picture); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); + LOG_I( "coded_buf = 0x%08x\n", + h264_pic_param.coded_buf); + LOG_I( "picture_width = %d\n", + h264_pic_param.picture_width); + LOG_I( "picture_height = %d\n\n", + h264_pic_param.picture_height); - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(h264_pic_param), - 1,&h264_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(h264_pic_param), + 1,&h264_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; } - else + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_FAIL; - } - + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; @@ -1137,136 +1071,132 @@ MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mi guint slice_index; guint slice_height_in_mb; - if (mix == NULL) + if (mix == NULL) { + LOG_E("mix == NULL\n"); return MIX_RESULT_NULL_PTR; + } LOG_V( "Begin\n\n"); MixVideoFormatEnc *parent = NULL; - if (MIX_IS_VIDEOFORMATENC_H264(mix)) - { - parent = MIX_VIDEOFORMATENC(&(mix->parent)); + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); - slice_num = mix->slice_num; - slice_height = parent->picture_height / slice_num; + slice_num = mix->slice_num; + slice_height = parent->picture_height / slice_num; - slice_height += 15; - slice_height &= (~15); + slice_height += 15; + slice_height &= (~15); #if 1 - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - slice_num, NULL, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - VAEncSliceParameterBuffer *slice_param, *current_slice; + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + slice_num, NULL, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + VAEncSliceParameterBuffer *slice_param, *current_slice; + + va_status = vaMapBuffer(parent->va_display, + mix->slice_param_buf, + (void **)&slice_param); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + current_slice = slice_param; - va_status = vaMapBuffer(parent->va_display, - mix->slice_param_buf, - (void **)&slice_param); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - current_slice = slice_param; + for (slice_index = 0; slice_index < slice_num; slice_index++) { + current_slice = slice_param + slice_index; + slice_height_in_mb = + min (slice_height, parent->picture_height + - slice_index * slice_height) / 16; - for (slice_index = 0; slice_index < slice_num; slice_index++) { - current_slice = slice_param + slice_index; - slice_height_in_mb = - min (slice_height, parent->picture_height - - slice_index * slice_height) / 16; - - // starting MB row number for this slice - current_slice->start_row_number = slice_index * slice_height / 16; - // slice height measured in MB - current_slice->slice_height = slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = mix->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - LOG_V( - "======h264 slice params======\n"); - - LOG_I( "slice_index = %d\n", - (gint) slice_index); - LOG_I( "start_row_number = %d\n", - (gint) current_slice->start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (gint) current_slice->slice_height); - LOG_I( "slice.is_intra = %d\n", - (gint) current_slice->slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - - } + // starting MB row number for this slice + current_slice->start_row_number = slice_index * slice_height / 16; + // slice height measured in MB + current_slice->slice_height = slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = mix->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; - va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); + LOG_V( + "======h264 slice params======\n"); + + LOG_I( "slice_index = %d\n", + (gint) slice_index); + LOG_I( "start_row_number = %d\n", + (gint) current_slice->start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (gint) current_slice->slice_height); + LOG_I( "slice.is_intra = %d\n", + (gint) current_slice->slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } + } + + va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } #endif #if 0 - VAEncSliceParameterBuffer slice_param; - slice_index = 0; - slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; - slice_param.slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(slice_param), - slice_num, &slice_param, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } -#endif - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - + VAEncSliceParameterBuffer slice_param; + slice_index = 0; + slice_height_in_mb = slice_height / 16; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(slice_param), + slice_num, &slice_param, + &mix->slice_param_buf); + if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaRenderPicture\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; - } + } +#endif - } - else + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_FAIL; - } + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + LOG_V( "end\n"); @@ -1295,553 +1225,631 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_V( "Begin\n"); - if (MIX_IS_VIDEOFORMATENC_H264(mix)) - { - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } - - LOG_V( "Get Surface Done\n"); + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - return MIX_RESULT_FAIL; + "Failed to mix_surfacepool_get\n"); + goto cleanup; } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to vaDeriveImage\n"); - return MIX_RESULT_FAIL; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - -/* mutually exclusive */ -//#define USE_SRC_FMT_YUV420 -//#define USE_SRC_FMT_NV12 -#define USE_SRC_FMT_NV21 - -#ifdef USE_SRC_FMT_YUV420 - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - int offset_uv = width * height; - guint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; + "Failed to mix_surfacepool_get\n"); + goto cleanup; } + } -#ifdef USE_SRC_FMT_NV12 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif -#endif //USE_SRC_FMT_YUV420 - - vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to vaDestroyImage\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( - "Map source data to surface done\n"); - + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } } - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - return MIX_RESULT_FAIL; - } - } + LOG_V( "Get Surface Done\n"); - //mix_videoframe_unref (mix->cur_frame); - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - return MIX_RESULT_FAIL; - } + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - + dst_uv += image->pitches[1]; } - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - - - va_status = vaBeginPicture(va_display, va_context, surface); + va_status = vaUnmapBuffer(va_display, image->buf); if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaBeginPicture\n"); - return MIX_RESULT_FAIL; + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - ret = mix_videofmtenc_h264_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - return MIX_RESULT_FAIL; - } + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, frame); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; + LOG_E( + "get reference surface from pool failed\n"); + goto cleanup; } } - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } - mix->last_frame = mix->cur_frame; + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto cleanup; + } + } + + //mix_videoframe_unref (mix->cur_frame); + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + + } - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + goto cleanup; + + } } + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - - //return MIX_RESULT_FAIL; - } + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videofmtenc_h264_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } + + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + + } + + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); - guint size = iovout->data_size + 100; + //return MIX_RESULT_FAIL; + } - iovout->buffer_size = size; + LOG_V( + "Start to get encoded data\n"); - //We will support two buffer mode, one is application allocates the buffer and passes to encode, - //the other is encode allocate memory + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + guint total_size = 0; + guint size = 0; + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed - if (iovout->data == NULL) { - return MIX_RESULT_NO_MEMORY; - } - } + if (coded_seg->next == NULL) + break; + + coded_seg ++; + num_seg ++; + } - if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) { - memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte - size = iovout->data_size; - } else { +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; + + guint size = iovout->data_size + 100; +#endif + + iovout->data_size = total_size; + size = total_size + 100; + + iovout->buffer_size = size; + + //We will support two buffer mode, one is application allocates the buffer and passes to encode, + //the other is encode allocate memory + + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; + + if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) { + + while (1) { + + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg ++; + } - guint pos = 0; - guint zero_byte_count = 0; - guint prefix_length = 0; - guint8 nal_unit_type = 0; - guint8 * payload = buf + 16; - - while ((payload[pos++] == 0x00)) { - zero_byte_count ++; - if (pos >= iovout->data_size) //to make sure the buffer to be accessed is valid - break; - } + //memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte + //size = iovout->data_size; + + } else { + + guint pos = 0; + guint zero_byte_count = 0; + guint prefix_length = 0; + guint8 nal_unit_type = 0; + //guint8 * payload = buf + 16; + guint8 * payload = coded_seg->buf; + + while ((payload[pos++] == 0x00)) { + zero_byte_count ++; + if (pos >= coded_seg->size) //to make sure the buffer to be accessed is valid + break; + } + + nal_unit_type = (guint8)(payload[pos] & 0x1f); + prefix_length = zero_byte_count + 1; + + LOG_I ("nal_unit_type = %d\n", nal_unit_type); + LOG_I ("zero_byte_count = %d\n", zero_byte_count); + + size = iovout->data_size; + + if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1 && num_seg == 1) { + iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; + iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; + iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; + iovout->data[3] = (size - prefix_length) & 0xff; + // use 4 bytes to indicate the NALU length + //memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); + memcpy (iovout->data + 4, coded_seg->buf + prefix_length, size - prefix_length); + LOG_V ("We only have one start code, copy directly\n"); + } + else { + + if (num_seg == 1) { + ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (coded_seg->buf, coded_seg->size, iovout->data, &size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); + goto cleanup; + } - nal_unit_type = (guint8)(payload[pos] & 0x1f); - prefix_length = zero_byte_count + 1; - - LOG_I ("nal_unit_type = %d\n", nal_unit_type); - LOG_I ("zero_byte_count = %d\n", zero_byte_count); - - if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1) { - size = iovout->data_size; - iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; - iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; - iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; - iovout->data[3] = (size - prefix_length) & 0xff; - // use 4 bytes to indicate the NALU length - memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); - LOG_V ("We only have one start code, copy directly\n"); - } - else { - ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (buf + 16, iovout->data_size, iovout->data, &size); + } else { + + guint8 * tem_buf = NULL; + tem_buf = g_malloc (size); + if (tem_buf == NULL) { + LOG_E( "tem_buf == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + while (1) { + + memcpy (tem_buf + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg ++; + } + + ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); if (ret != MIX_RESULT_SUCCESS) { LOG_E ( "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); - return MIX_RESULT_FAIL; + goto cleanup; } + + g_free (tem_buf); } } + } + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "get encoded data done\n"); + + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - iovout->data_size = size; - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + va_status = vaEndPicture (va_display, va_context); if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "get encoded data done\n"); - - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_h264_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } - } - - VASurfaceStatus status; + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videofmtenc_h264_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); + va_status = vaEndPicture (va_display, va_context); if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + } + + VASurfaceStatus status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + mix->pic_skipped = status & VASurfaceSkipped; + + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { LOG_E( - "Failed vaQuerySurfaceStatus\n"); - return MIX_RESULT_FAIL; - } - mix->pic_skipped = status & VASurfaceSkipped; + "Failed mix_framemanager_enqueue\n"); + goto cleanup; + } + } - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - return ret; - } + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + } + +#if 0 + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; + + mix_videoframe_unref (mix->cur_frame); +#endif + + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_framemanager_enqueue\n"); - return MIX_RESULT_FAIL; - } - } - - /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - } - -#if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - + if (!(parent->need_display)) { mix_videoframe_unref (mix->cur_frame); -#endif - - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; + mix->cur_frame = NULL; + } +cleanup: - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; + if (ret != MIX_RESULT_SUCCESS) { + if (iovout->data) { + g_free (iovout->data); + iovout->data = NULL; } } - else - { - LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_FAIL; - } LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; + return ret; } MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size ( @@ -1894,7 +1902,7 @@ MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size ( { LOG_E( "not H264 video encode Object\n"); - return MIX_RESULT_FAIL; + return MIX_RESULT_INVALID_PARAM; } *max_size = self->coded_buf_size; @@ -2041,40 +2049,40 @@ MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix LOG_V( "Begin\n"); - if (MIX_IS_VIDEOFORMATENC_H264(mix)) - { - if (mix->encoded_frames == 0) { - mix_videofmtenc_h264_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - } - - ret = mix_videofmtenc_h264_send_picture_parameter (mix); - + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + if (mix->encoded_frames == 0) { + ret = mix_videofmtenc_h264_send_seq_params (mix); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + "Failed mix_videofmtenc_h264_send_seq_params\n"); return MIX_RESULT_FAIL; } - - ret = mix_videofmtenc_h264_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - } + + ret = mix_videofmtenc_h264_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_h264_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; + + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoformatenc_mpeg4.c b/mix_video/src/mixvideoformatenc_mpeg4.c index 25b3b3e..5e95d64 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.c +++ b/mix_video/src/mixvideoformatenc_mpeg4.c @@ -1,1803 +1,1823 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_mpeg4.h" -#include "mixvideoconfigparamsenc_mpeg4.h" - -#define MDEBUG -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_mpeg4_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_MPEG4, mix_videoformatenc_mpeg4, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - /* TODO: public member initialization */ - - /* TODO: private member initialization */ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; - - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - self->coded_buf_index = 0; - - parent->initialized = FALSE; - -} - -static void mix_videoformatenc_mpeg4_class_init( - MixVideoFormatEnc_MPEG4Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_mpeg4_finalize; - - /* setup vmethods with base implementation */ - /* TODO: decide if we need to override the parent's methods */ - video_formatenc_class->getcaps = mix_videofmtenc_mpeg4_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_mpeg4_initialize; - video_formatenc_class->encode = mix_videofmtenc_mpeg4_encode; - video_formatenc_class->flush = mix_videofmtenc_mpeg4_flush; - video_formatenc_class->eos = mix_videofmtenc_mpeg4_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_mpeg4_deinitialize; - video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_mpeg4_get_max_encoded_buf_size; -} - -MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_new(void) { - MixVideoFormatEnc_MPEG4 *ret = - g_object_new(MIX_TYPE_VIDEOFORMATENC_MPEG4, NULL); - - return ret; -} - -void mix_videoformatenc_mpeg4_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_MPEG4 *mix = MIX_VIDEOFORMATENC_MPEG4(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { - return (MixVideoFormatEnc_MPEG4 *) g_object_ref(G_OBJECT(mix)); -} - -/*MPEG-4:2 vmethods implementation */ -MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - /* TODO: add codes for MPEG-4:2 */ - - /* TODO: decide if we need to chainup parent method. - * if we do, the following is the code: - */ - - LOG_V( "mix_videofmtenc_mpeg4_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - - - /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "begin\n"); - - - //TODO additional parameter checking - - /* Chainup parent method. */ -#if 1 - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - -#endif //disable it currently - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) { - config_params_enc_mpeg4 = - MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - - LOG_V( - "Start to get properities from MPEG-4:2 params\n"); - - /* get properties from MPEG4 params Object, which is special to MPEG4 format*/ - - ret = mix_videoconfigparamsenc_mpeg4_get_profile_level (config_params_enc_mpeg4, - &self->profile_and_level_indication); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti (config_params_enc_mpeg4, - &(self->fixed_vop_time_increment)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_dlk (config_params_enc_mpeg4, - &(self->disable_deblocking_filter_idc)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to config_params_enc_mpeg4\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - - LOG_V( - "======MPEG4 Encode Object properities======:\n"); - - LOG_I( "self->profile_and_level_indication = %d\n", - self->profile_and_level_indication); - LOG_I( "self->fixed_vop_time_increment = %d\n\n", - self->fixed_vop_time_increment); - - LOG_V( - "Get properities from params done\n"); - - - //display = XOpenDisplay(NULL); - //va_display = vaGetDisplay (videoencobj->display); - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); - - //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver); - //g_print ("vaInitialize va_status = %d\n", va_status); - - -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - - /*free profiles and entrypoints*/ - g_free(va_profiles); - g_free (va_entrypoints); - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - //TODO check vret and return fail if needed - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode - - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - //TODO cleanup and/or retry - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - guint max_size = 0; - ret = mix_videofmtenc_mpeg4_get_max_encoded_buf_size (parent, &max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf[0]); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - - parent->initialized = TRUE; - - g_mutex_unlock(parent->objectlock); - g_free (surfaces); - - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_FAIL; - - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ - //TODO: params i - - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - - //TODO: encode_params is reserved here for future usage. - - /* TODO: decide if we need to chainup parent method. - * * * if we do, the following is the code: - * */ - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: also we could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_mpeg4_process_encode\n"); - - ret = mix_videofmtenc_mpeg4_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_mpeg4_process_encode\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - g_mutex_lock(mix->objectlock); - - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - /*reset the properities*/ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix) { - - /* TODO: add codes for MPEG-4:2 */ - - /* TODO: decide if we need to chainup parent method. - * if we do, the following is the code: - */ - - LOG_V( "\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (parent_class->eos) { - return parent_class->eos(mix); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; - VAStatus va_status; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - LOG_V( "Release frames\n"); - - g_mutex_lock(parent->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; - } - - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - parent->initialized = TRUE; - - g_mutex_unlock(parent->objectlock); - -#if 1 - if (parent_class->deinitialize) { - return parent_class->deinitialize(mix); - } -#endif - - //Most stuff is cleaned up in parent_class->finalize() - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_send_seq_params (MixVideoFormatEnc_MPEG4 *mix) -{ - - VAStatus va_status; - VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param; - VABufferID seq_para_buf_id; - - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) - return MIX_RESULT_NULL_PTR; - - LOG_V( "Begin\n\n"); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set up the sequence params for HW*/ - mpeg4_seq_param.profile_and_level_indication = mix->profile_and_level_indication; //TODO, hard code now - mpeg4_seq_param.video_object_layer_width= parent->picture_width; - mpeg4_seq_param.video_object_layer_height= parent->picture_height; - mpeg4_seq_param.vop_time_increment_resolution = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - mpeg4_seq_param.fixed_vop_time_increment= mix->fixed_vop_time_increment; - mpeg4_seq_param.bits_per_second= parent->bitrate; - mpeg4_seq_param.frame_rate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - mpeg4_seq_param.initial_qp = parent->initial_qp; - mpeg4_seq_param.min_qp = parent->min_qp; - mpeg4_seq_param.intra_period = parent->intra_period; - - - //mpeg4_seq_param.fixed_vop_rate = 30; - - - - LOG_V( - "===mpeg4 sequence params===\n"); - - LOG_I( "profile_and_level_indication = %d\n", - (guint)mpeg4_seq_param.profile_and_level_indication); - LOG_I( "intra_period = %d\n", - mpeg4_seq_param.intra_period); - LOG_I( "video_object_layer_width = %d\n", - mpeg4_seq_param.video_object_layer_width); - LOG_I( "video_object_layer_height = %d\n", - mpeg4_seq_param.video_object_layer_height); - LOG_I( "vop_time_increment_resolution = %d\n", - mpeg4_seq_param.vop_time_increment_resolution); - LOG_I( "fixed_vop_rate = %d\n", - mpeg4_seq_param.fixed_vop_rate); - LOG_I( "fixed_vop_time_increment = %d\n", - mpeg4_seq_param.fixed_vop_time_increment); - LOG_I( "bitrate = %d\n", - mpeg4_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - mpeg4_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - mpeg4_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - mpeg4_seq_param.min_qp); - LOG_I( "intra_period = %d\n\n", - mpeg4_seq_param.intra_period); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(mpeg4_seq_param), - 1, &mpeg4_seq_param, - &seq_para_buf_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &seq_para_buf_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; - - -} - -MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 *mix) -{ - VAStatus va_status; - VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) - return MIX_RESULT_NULL_PTR; - - LOG_V( "Begin\n\n"); - -#if 0 //not needed currently - MixVideoConfigParamsEncMPEG4 * params_mpeg4 - = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); -#endif - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) { - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set picture params for HW*/ - mpeg4_pic_param.reference_picture = mix->ref_frame->frame_id; - mpeg4_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - mpeg4_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - mpeg4_pic_param.picture_width = parent->picture_width; - mpeg4_pic_param.picture_height = parent->picture_height; - mpeg4_pic_param.vop_time_increment= mix->encoded_frames; - mpeg4_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; - - - - LOG_V( - "======mpeg4 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - mpeg4_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - mpeg4_pic_param.reconstructed_picture); - LOG_I( "coded_buf = 0x%08x\n", - mpeg4_pic_param.coded_buf); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "picture_width = %d\n", - mpeg4_pic_param.picture_width); - LOG_I( "picture_height = %d\n", - mpeg4_pic_param.picture_height); - LOG_I( "vop_time_increment = %d\n", - mpeg4_pic_param.vop_time_increment); - LOG_I( "picture_type = %d\n\n", - mpeg4_pic_param.picture_type); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(mpeg4_pic_param), - 1,&mpeg4_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT mix_videofmtenc_mpeg4_send_slice_parameter (MixVideoFormatEnc_MPEG4 *mix) -{ - VAStatus va_status; - - guint slice_height; - guint slice_index; - guint slice_height_in_mb; - - if (mix == NULL) - return MIX_RESULT_NULL_PTR; - - LOG_V( "Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - slice_height = parent->picture_height; - - slice_height += 15; - slice_height &= (~15); - - VAEncSliceParameterBuffer slice_param; - slice_index = 0; - slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; - slice_param.slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - LOG_V( - "======mpeg4 slice params======\n"); - - LOG_I( "start_row_number = %d\n", - (gint) slice_param.start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (gint) slice_param.slice_height); - LOG_I( "slice.is_intra = %d\n", - (gint) slice_param.slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - 1, &slice_param, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - gulong surface = 0; - guint16 width, height; - - MixVideoFrame * tmp_frame; - guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - return MIX_RESULT_FAIL; - } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); - return MIX_RESULT_FAIL; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - - vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( - "Map source data to surface done\n"); - - } - - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - return MIX_RESULT_FAIL; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - return MIX_RESULT_FAIL; - } - } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - - - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_mpeg4_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - return MIX_RESULT_FAIL; - } - - - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - - mix->last_frame = mix->cur_frame; - - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - - - } - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - //return MIX_RESULT_FAIL; - } - - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - - iovout->data = g_malloc (iovout->data_size); - if (iovout->data == NULL) { - return MIX_RESULT_NO_MEMORY; - } - } - - memcpy (iovout->data, buf + 16, iovout->data_size); - - iovout->buffer_size = iovout->data_size; - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "get encoded data done\n"); - - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_mpeg4_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - - } - - VASurfaceStatus status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - return MIX_RESULT_FAIL; - } - mix->pic_skipped = status & VASurfaceSkipped; - - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - return ret; - } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_framemanager_enqueue\n"); - return MIX_RESULT_FAIL; - } - } - - /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - } - - -#if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - - mix_videoframe_unref (mix->cur_frame); -#endif - - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; - - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size ( - MixVideoFormatEnc *mix, guint * max_size) -{ - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) - { - LOG_E( - "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(self)) { - - if (self->coded_buf_size > 0) { - *max_size = self->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 400) / (16 * 16); - // set to value according to QP - } - else { - self->coded_buf_size = parent->bitrate/ 4; - } - - self->coded_buf_size = - max (self->coded_buf_size , - (parent->picture_width* parent->picture_height * 400) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - max(self->coded_buf_size, - (parent->picture_width * parent->picture_height * 1.5 * 8)); - self->coded_buf_size = (self->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_FAIL; - } - - *max_size = self->coded_buf_size; - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - if (mix->encoded_frames == 0) { - mix_videofmtenc_mpeg4_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - } - - ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - } - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_mpeg4.h" +#include "mixvideoconfigparamsenc_mpeg4.h" + +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_mpeg4_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_MPEG4, mix_videoformatenc_mpeg4, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /*member initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + self->coded_buf_index = 0; + + parent->initialized = FALSE; + +} + +static void mix_videoformatenc_mpeg4_class_init( + MixVideoFormatEnc_MPEG4Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = g_type_class_peek_parent(klass); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_mpeg4_finalize; + + /* setup vmethods with base implementation */ + video_formatenc_class->getcaps = mix_videofmtenc_mpeg4_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_mpeg4_initialize; + video_formatenc_class->encode = mix_videofmtenc_mpeg4_encode; + video_formatenc_class->flush = mix_videofmtenc_mpeg4_flush; + video_formatenc_class->eos = mix_videofmtenc_mpeg4_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_mpeg4_deinitialize; + video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_mpeg4_get_max_encoded_buf_size; +} + +MixVideoFormatEnc_MPEG4 * +mix_videoformatenc_mpeg4_new(void) { + MixVideoFormatEnc_MPEG4 *ret = + g_object_new(MIX_TYPE_VIDEOFORMATENC_MPEG4, NULL); + + return ret; +} + +void mix_videoformatenc_mpeg4_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_MPEG4 *mix = MIX_VIDEOFORMATENC_MPEG4(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_MPEG4 * +mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { + return (MixVideoFormatEnc_MPEG4 *) g_object_ref(G_OBJECT(mix)); +} + +/*MPEG-4:2 vmethods implementation */ +MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + LOG_V( "mix_videofmtenc_mpeg4_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces = NULL; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + + + /* Chainup parent method. */ + + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) { + config_params_enc_mpeg4 = + MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from MPEG-4:2 params\n"); + + /* get properties from MPEG4 params Object, which is special to MPEG4 format*/ + + ret = mix_videoconfigparamsenc_mpeg4_get_profile_level (config_params_enc_mpeg4, + &self->profile_and_level_indication); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti (config_params_enc_mpeg4, + &(self->fixed_vop_time_increment)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_dlk (config_params_enc_mpeg4, + &(self->disable_deblocking_filter_idc)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to config_params_enc_mpeg4\n"); + goto cleanup; + } + + + LOG_V( + "======MPEG4 Encode Object properities======:\n"); + + LOG_I( "self->profile_and_level_indication = %d\n", + self->profile_and_level_indication); + LOG_I( "self->fixed_vop_time_increment = %d\n\n", + self->fixed_vop_time_increment); + + LOG_V( + "Get properities from params done\n"); + + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + LOG_E( "Error init failure\n"); + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + guint max_size = 0; + ret = mix_videofmtenc_mpeg4_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); + goto cleanup; + + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf[0]); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + +cleanup: + + if (ret == MIX_RESULT_SUCCESS) { + parent->initialized = TRUE; + } + + /*free profiles and entrypoints*/ + if (va_profiles) + g_free(va_profiles); + + if (va_entrypoints) + g_free (va_entrypoints); + + if (surfaces) + g_free (surfaces); + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_mpeg4_process_encode\n"); + + ret = mix_videofmtenc_mpeg4_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_mpeg4_process_encode\n"); + goto cleanup; + } + +cleanup: + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + g_mutex_lock(mix->objectlock); + + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix) { + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + + if (parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +cleanup: + + parent->initialized = TRUE; + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + +return ret; + +} + +MIX_RESULT mix_videofmtenc_mpeg4_send_seq_params (MixVideoFormatEnc_MPEG4 *mix) +{ + + VAStatus va_status; + VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param; + VABufferID seq_para_buf_id; + + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + mpeg4_seq_param.profile_and_level_indication = mix->profile_and_level_indication; //TODO, hard code now + mpeg4_seq_param.video_object_layer_width= parent->picture_width; + mpeg4_seq_param.video_object_layer_height= parent->picture_height; + mpeg4_seq_param.vop_time_increment_resolution = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + mpeg4_seq_param.fixed_vop_time_increment= mix->fixed_vop_time_increment; + mpeg4_seq_param.bits_per_second= parent->bitrate; + mpeg4_seq_param.frame_rate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + mpeg4_seq_param.initial_qp = parent->initial_qp; + mpeg4_seq_param.min_qp = parent->min_qp; + mpeg4_seq_param.intra_period = parent->intra_period; + + + //mpeg4_seq_param.fixed_vop_rate = 30; + + + + LOG_V( + "===mpeg4 sequence params===\n"); + + LOG_I( "profile_and_level_indication = %d\n", + (guint)mpeg4_seq_param.profile_and_level_indication); + LOG_I( "intra_period = %d\n", + mpeg4_seq_param.intra_period); + LOG_I( "video_object_layer_width = %d\n", + mpeg4_seq_param.video_object_layer_width); + LOG_I( "video_object_layer_height = %d\n", + mpeg4_seq_param.video_object_layer_height); + LOG_I( "vop_time_increment_resolution = %d\n", + mpeg4_seq_param.vop_time_increment_resolution); + LOG_I( "fixed_vop_rate = %d\n", + mpeg4_seq_param.fixed_vop_rate); + LOG_I( "fixed_vop_time_increment = %d\n", + mpeg4_seq_param.fixed_vop_time_increment); + LOG_I( "bitrate = %d\n", + mpeg4_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + mpeg4_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + mpeg4_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + mpeg4_seq_param.min_qp); + LOG_I( "intra_period = %d\n\n", + mpeg4_seq_param.intra_period); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(mpeg4_seq_param), + 1, &mpeg4_seq_param, + &seq_para_buf_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &seq_para_buf_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; + + +} + +MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 *mix) +{ + VAStatus va_status; + VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + +#if 0 //not needed currently + MixVideoConfigParamsEncMPEG4 * params_mpeg4 + = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); +#endif + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + mpeg4_pic_param.reference_picture = mix->ref_frame->frame_id; + mpeg4_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + mpeg4_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; + mpeg4_pic_param.picture_width = parent->picture_width; + mpeg4_pic_param.picture_height = parent->picture_height; + mpeg4_pic_param.vop_time_increment= mix->encoded_frames; + mpeg4_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + + + LOG_V( + "======mpeg4 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + mpeg4_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + mpeg4_pic_param.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", + mpeg4_pic_param.coded_buf); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); + LOG_I( "picture_width = %d\n", + mpeg4_pic_param.picture_width); + LOG_I( "picture_height = %d\n", + mpeg4_pic_param.picture_height); + LOG_I( "vop_time_increment = %d\n", + mpeg4_pic_param.vop_time_increment); + LOG_I( "picture_type = %d\n\n", + mpeg4_pic_param.picture_type); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(mpeg4_pic_param), + 1,&mpeg4_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT mix_videofmtenc_mpeg4_send_slice_parameter (MixVideoFormatEnc_MPEG4 *mix) +{ + VAStatus va_status; + + guint slice_height; + guint slice_index; + guint slice_height_in_mb; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + slice_height = parent->picture_height; + + slice_height += 15; + slice_height &= (~15); + + VAEncSliceParameterBuffer slice_param; + slice_index = 0; + slice_height_in_mb = slice_height / 16; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + LOG_V( + "======mpeg4 slice params======\n"); + + LOG_I( "start_row_number = %d\n", + (gint) slice_param.start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (gint) slice_param.slice_height); + LOG_I( "slice.is_intra = %d\n", + (gint) slice_param.slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + 1, &slice_param, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + MixVideoFrame * tmp_frame; + guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + goto cleanup; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto cleanup; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + goto cleanup; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videofmtenc_mpeg4_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } + + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + + } + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + //return MIX_RESULT_FAIL; + } + + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + guint total_size = 0; + guint size = 0; + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg ++; + num_seg ++; + } + + +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; +#endif + + iovout->data_size = total_size; + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + + iovout->data = g_malloc (iovout->data_size); + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + //memcpy (iovout->data, buf + 16, iovout->data_size); + + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; + + while (1) { + + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg ++; + } + + iovout->buffer_size = iovout->data_size; + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "get encoded data done\n"); + + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } + + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videofmtenc_mpeg4_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + } + + VASurfaceStatus status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + mix->pic_skipped = status & VASurfaceSkipped; + + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); + + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_framemanager_enqueue\n"); + goto cleanup; + } + } + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + } + +#if 0 + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; + + mix_videoframe_unref (mix->cur_frame); +#endif + + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; + } + + +cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + if (iovout->data) { + g_free (iovout->data); + iovout->data = NULL; + } + } + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size ( + MixVideoFormatEnc *mix, guint * max_size) +{ + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + { + LOG_E( + "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + parent = MIX_VIDEOFORMATENC(mix); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(self)) { + + if (self->coded_buf_size > 0) { + *max_size = self->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (self->va_rcmode == VA_RC_NONE) { + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 400) / (16 * 16); + // set to value according to QP + } + else { + self->coded_buf_size = parent->bitrate/ 4; + } + + self->coded_buf_size = + max (self->coded_buf_size , + (parent->picture_width* parent->picture_height * 400) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + self->coded_buf_size = + max(self->coded_buf_size, + (parent->picture_width * parent->picture_height * 1.5 * 8)); + self->coded_buf_size = (self->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + *max_size = self->coded_buf_size; + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + if (mix->encoded_frames == 0) { + ret = mix_videofmtenc_mpeg4_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h index 4a7deb1..fc83d95 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.h +++ b/mix_video/src/mixvideoformatenc_mpeg4.h @@ -1,143 +1,143 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMATENC_MPEG4_H__ -#define __MIX_VIDEOFORMATENC_MPEG4_H__ - -#include "mixvideoformatenc.h" -#include "mixvideoframe_private.h" - -#define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20 - -#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) -#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) - -/* - * Type macros. - */ -#define MIX_TYPE_VIDEOFORMATENC_MPEG4 (mix_videoformatenc_mpeg4_get_type ()) -#define MIX_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4)) -#define MIX_IS_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4)) -#define MIX_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) -#define MIX_IS_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4)) -#define MIX_VIDEOFORMATENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) - -typedef struct _MixVideoFormatEnc_MPEG4 MixVideoFormatEnc_MPEG4; -typedef struct _MixVideoFormatEnc_MPEG4Class MixVideoFormatEnc_MPEG4Class; - -struct _MixVideoFormatEnc_MPEG4 { - /*< public > */ - MixVideoFormatEnc parent; - - - VABufferID coded_buf[2]; - VABufferID last_coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * ci_shared_surfaces; - VASurfaceID * surfaces; - guint surface_num; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; - - - guchar profile_and_level_indication; - guint fixed_vop_time_increment; - guint disable_deblocking_filter_idc; - - guint va_rcmode; - - guint encoded_frames; - gboolean pic_skipped; - - gboolean is_intra; - - guint coded_buf_size; - guint coded_buf_index; - - - /*< public > */ -}; - -/** - * MixVideoFormatEnc_MPEG4Class: - * - * MI-X Video object class - */ -struct _MixVideoFormatEnc_MPEG4Class { - /*< public > */ - MixVideoFormatEncClass parent_class; - - /* class members */ - - /*< public > */ -}; - -/** - * mix_videoformatenc_mpeg4_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformatenc_mpeg4_get_type(void); - -/** - * mix_videoformatenc_mpeg4_new: - * @returns: A newly allocated instance of #MixVideoFormatEnc_MPEG4 - * - * Use this method to create new instance of #MixVideoFormatEnc_MPEG4 - */ -MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_new(void); - -/** - * mix_videoformatenc_mpeg4_ref: - * @mix: object to add reference - * @returns: the MixVideoFormatEnc_MPEG4 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix); - -/** - * mix_videoformatenc_mpeg4_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videoformatenc_mpeg4_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* MPEG-4:2 vmethods */ -MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg); -MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); - -/* Local Methods */ - -MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, MixBuffer * bufin, - MixIOVec * iovout); -MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix); - -#endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */ - +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEOFORMATENC_MPEG4_H__ +#define __MIX_VIDEOFORMATENC_MPEG4_H__ + +#include "mixvideoformatenc.h" +#include "mixvideoframe_private.h" + +#define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20 + +#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) +#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) + +/* + * Type macros. + */ +#define MIX_TYPE_VIDEOFORMATENC_MPEG4 (mix_videoformatenc_mpeg4_get_type ()) +#define MIX_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4)) +#define MIX_IS_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4)) +#define MIX_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) +#define MIX_IS_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4)) +#define MIX_VIDEOFORMATENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) + +typedef struct _MixVideoFormatEnc_MPEG4 MixVideoFormatEnc_MPEG4; +typedef struct _MixVideoFormatEnc_MPEG4Class MixVideoFormatEnc_MPEG4Class; + +struct _MixVideoFormatEnc_MPEG4 { + /*< public > */ + MixVideoFormatEnc parent; + + + VABufferID coded_buf[2]; + VABufferID last_coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * ci_shared_surfaces; + VASurfaceID * surfaces; + guint surface_num; + + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *last_frame; //last frame; + + + guchar profile_and_level_indication; + guint fixed_vop_time_increment; + guint disable_deblocking_filter_idc; + + guint va_rcmode; + + guint encoded_frames; + gboolean pic_skipped; + + gboolean is_intra; + + guint coded_buf_size; + guint coded_buf_index; + + + /*< public > */ +}; + +/** + * MixVideoFormatEnc_MPEG4Class: + * + * MI-X Video object class + */ +struct _MixVideoFormatEnc_MPEG4Class { + /*< public > */ + MixVideoFormatEncClass parent_class; + + /* class members */ + + /*< public > */ +}; + +/** + * mix_videoformatenc_mpeg4_get_type: + * @returns: type + * + * Get the type of object. + */ +GType mix_videoformatenc_mpeg4_get_type(void); + +/** + * mix_videoformatenc_mpeg4_new: + * @returns: A newly allocated instance of #MixVideoFormatEnc_MPEG4 + * + * Use this method to create new instance of #MixVideoFormatEnc_MPEG4 + */ +MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_new(void); + +/** + * mix_videoformatenc_mpeg4_ref: + * @mix: object to add reference + * @returns: the MixVideoFormatEnc_MPEG4 instance where reference count has been increased. + * + * Add reference count. + */ +MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix); + +/** + * mix_videoformatenc_mpeg4_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ +#define mix_videoformatenc_mpeg4_unref(obj) g_object_unref (G_OBJECT(obj)) + +/* Class Methods */ + +/* MPEG-4:2 vmethods */ +MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg); +MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); + +/* Local Methods */ + +MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, MixBuffer * bufin, + MixIOVec * iovout); +MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix); + +#endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */ + diff --git a/mix_video/src/mixvideoformatenc_preview.c b/mix_video/src/mixvideoformatenc_preview.c index 6aeeb9e..34aa89b 100644 --- a/mix_video/src/mixvideoformatenc_preview.c +++ b/mix_video/src/mixvideoformatenc_preview.c @@ -14,7 +14,6 @@ #include "mixvideoformatenc_preview.h" #include "mixvideoconfigparamsenc_preview.h" -#define MDEBUG #undef SHOW_SRC #ifdef SHOW_SRC @@ -38,9 +37,8 @@ G_DEFINE_TYPE (MixVideoFormatEnc_Preview, mix_videoformatenc_preview, MIX_TYPE_V static void mix_videoformatenc_preview_init(MixVideoFormatEnc_Preview * self) { MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - /* TODO: public member initialization */ - /* TODO: private member initialization */ + /* member variable initialization */ self->encoded_frames = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; @@ -72,7 +70,6 @@ static void mix_videoformatenc_preview_class_init( gobject_class->finalize = mix_videoformatenc_preview_finalize; /* setup vmethods with base implementation */ - /* TODO: decide if we need to override the parent's methods */ video_formatenc_class->getcaps = mix_videofmtenc_preview_getcaps; video_formatenc_class->initialize = mix_videofmtenc_preview_initialize; video_formatenc_class->encode = mix_videofmtenc_preview_encode; @@ -111,12 +108,6 @@ mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) { /*Preview vmethods implementation */ MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg) { - /* TODO: add codes for Preview format */ - - /* TODO: decide if we need to chainup parent method. - * if we do, the following is the code: - */ - LOG_V( "mix_videofmtenc_preview_getcaps\n"); if (mix == NULL) { @@ -143,7 +134,7 @@ MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, MixVideoConfigParamsEncPreview * config_params_enc_preview; VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces; + VASurfaceID * surfaces = NULL; gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; gint va_num_profiles, va_num_entrypoints; @@ -164,11 +155,8 @@ MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, LOG_V( "begin\n"); - - //TODO additional parameter checking - + /* Chainup parent method. */ -#if 1 if (parent_class->initialize) { ret = parent_class->initialize(mix, config_params_enc, frame_mgr, input_buf_pool, surface_pool, @@ -180,440 +168,421 @@ MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, return ret; } -#endif //disable it currently - if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - { - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc)) { - config_params_enc_preview = - MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_preview_initialize: no preview config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc)) { + config_params_enc_preview = + MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); + } else { LOG_V( - "Get properities from params done\n"); - + "mix_videofmtenc_preview_initialize: no preview config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); - //display = XOpenDisplay(NULL); - //va_display = vaGetDisplay (videoencobj->display); + + LOG_V( + "Get properities from params done\n"); - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); - //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver); - //g_print ("vaInitialize va_status = %d\n", va_status); + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } + LOG_V( "vaQueryConfigEntrypoints\n"); + - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - g_free(va_profiles); - g_free (va_entrypoints); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - - /*free profiles and entrypoints*/ - g_free(va_profiles); - g_free (va_entrypoints); - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; //Todo, add error handling here - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - LOG_V( "vaCreateSurfaces\n"); + /*compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if (parent->share_buf_mode) { - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - //TODO check vret and return fail if needed + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - if (va_status != VA_STATUS_SUCCESS) + if (self->ci_shared_surfaces == NULL) { LOG_E( - "Failed vaCreateSurfaces\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; + "Failed allocate shared surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; } - - if (parent->share_buf_mode) { + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - } + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); + "vaCreateSurfaceFromCIFrame\n"); - }// if (parent->share_buf_mode) + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + + } + } - self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_NO_MEMORY; - } + }// if (parent->share_buf_mode) + + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } + } - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + #if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } #endif - LOG_V( "mix_surfacepool_new\n"); + LOG_V( "mix_surfacepool_new\n"); - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - //TODO cleanup and/or retry - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - default: - break; - } + ret = MIX_RESULT_FAIL; + goto cleanup; + } - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - 0, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + + default: + break; + } - self->coded_buf_size = 4; - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - g_free (surfaces); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + 0, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + + ret = MIX_RESULT_FAIL; + goto cleanup; - parent->initialized = TRUE; - - g_mutex_unlock(parent->objectlock); - g_free (surfaces); - } - else + + self->coded_buf_size = 4; + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf); + + if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "not Preview video encode Object\n"); - return MIX_RESULT_FAIL; + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); +#endif /* SHOW_SRC */ + + + LOG_V( "end\n"); + +cleanup: + + + if (ret == MIX_RESULT_SUCCESS) { + parent->initialized = TRUE; } - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; + /*free profiles and entrypoints*/ + if (va_profiles) + g_free(va_profiles); + + if (va_entrypoints) + g_free (va_entrypoints); + + if (surfaces) + g_free (surfaces); + + g_mutex_unlock(parent->objectlock); + + return ret; } MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], @@ -626,7 +595,6 @@ MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bu LOG_V( "Begin\n"); /*currenly only support one input and output buffer*/ - //TODO: params i if (bufincnt != 1 || iovoutcnt != 1) { LOG_E( @@ -641,11 +609,6 @@ MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bu return MIX_RESULT_NULL_PTR; } - //TODO: encode_params is reserved here for future usage. - - /* TODO: decide if we need to chainup parent method. - * * * if we do, the following is the code: - * */ #if 0 if (parent_class->encode) { @@ -654,45 +617,40 @@ MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bu } #endif - if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - { - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; - //TODO: also we could move some encode Preparation work to here + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW (mix); - LOG_V( - "mix_videofmtenc_preview_process_encode\n"); + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: we also could move some encode Preparation work to here - ret = mix_videofmtenc_preview_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_preview_process_encode\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - } - else + LOG_V( + "mix_videofmtenc_preview_process_encode\n"); + + ret = mix_videofmtenc_preview_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "not Preview video encode Object\n"); - return MIX_RESULT_FAIL; + "Failed mix_videofmtenc_preview_process_encode\n"); + goto cleanup; } + +cleanup: + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; + return ret; } MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { @@ -713,6 +671,9 @@ MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { return parent_class->flush(mix, msg); } #endif + + if (!MIX_IS_VIDEOFORMATENC_PREVIEW (mix)) + return MIX_RESULT_INVALID_PARAM; MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); @@ -754,12 +715,6 @@ MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { } MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix) { - - /* TODO: add codes for preview */ - - /* TODO: decide if we need to chainup parent method. - * if we do, the following is the code: - */ LOG_V( "\n"); @@ -778,6 +733,7 @@ MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { MixVideoFormatEnc *parent = NULL; VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; LOG_V( "Begin\n"); @@ -786,9 +742,23 @@ MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { return MIX_RESULT_NULL_PTR; } + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; + + + if (parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - + LOG_V( "Release frames\n"); g_mutex_lock(parent->objectlock); @@ -837,8 +807,8 @@ MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { { LOG_E( "Failed vaDestroyContext\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; + ret = MIX_RESULT_FAIL; + goto cleanup; } LOG_V( "vaDestroyConfig\n"); @@ -848,25 +818,20 @@ MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { { LOG_E( "Failed vaDestroyConfig\n"); - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; + ret = MIX_RESULT_FAIL; + goto cleanup; } - parent->initialized = TRUE; - - g_mutex_unlock(parent->objectlock); -#if 1 - if (parent_class->deinitialize) { - return parent_class->deinitialize(mix); - } -#endif +cleanup: - //Most stuff is cleaned up in parent_class->finalize() + parent->initialized = FALSE; + + g_mutex_unlock(parent->objectlock); LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; + + return ret; } @@ -892,303 +857,330 @@ MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mi LOG_V( "Begin\n"); - if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - { - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } - } + LOG_V( + "Get Surface from the pool\n"); - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - return MIX_RESULT_FAIL; - } + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - return MIX_RESULT_FAIL; + "Failed to mix_surfacepool_get\n"); + goto cleanup; } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to vaDeriveImage\n"); - return MIX_RESULT_FAIL; + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); + dst_uv += image->pitches[1]; + } + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "Map source data to surface done\n"); + + } - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - - vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( - "Failed to vaDestroyImage\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( - "Map source data to surface done\n"); - + "get reference surface from pool failed\n"); + goto cleanup; + } } - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - return MIX_RESULT_FAIL; - } - } + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } - if (mix->rec_frame == NULL) + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - return MIX_RESULT_FAIL; - } + LOG_E( + "get recontructed surface from pool failed\n"); + goto cleanup; } + } - //mix_videoframe_unref (mix->cur_frame); + //mix_videoframe_unref (mix->cur_frame); - if (parent->need_display) { - mix->cur_frame = NULL; - } + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); - if (mix->cur_frame == NULL) - { - guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - return MIX_RESULT_FAIL; - } + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - - iovout->data_size = 4; - iovout->data = g_malloc (iovout->data_size); - if (iovout->data == NULL) { - return MIX_RESULT_NO_MEMORY; - } - - memset (iovout->data, 0, iovout->data_size); - - iovout->buffer_size = iovout->data_size; - - - if (parent->need_display) { + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - return ret; - } + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, frame); - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) - { + { LOG_E( - "Failed mix_framemanager_enqueue\n"); - return MIX_RESULT_FAIL; - } + "get current working surface from pool failed\n"); + goto cleanup; + } } + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + + { + LOG_E( + "mix_videoframe_get_frame_id failed\n"); + goto cleanup; + } + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + iovout->data_size = 4; + iovout->data = g_malloc (iovout->data_size); + if (iovout->data == NULL) { + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + memset (iovout->data, 0, iovout->data_size); + iovout->buffer_size = iovout->data_size; + - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; } - mix->encoded_frames ++; - } - else - { - LOG_E( - "not Preview video encode Object\n"); - return MIX_RESULT_FAIL; + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_framemanager_enqueue\n"); + goto cleanup; + } } + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; + } + mix->encoded_frames ++; + +cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + if (iovout->data) { + g_free (iovout->data); + iovout->data = NULL; + } + } + LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; + + return ret; } diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c index c06ff5e..4ea0252 100644 --- a/mix_video/src/mixvideoframe.c +++ b/mix_video/src/mixvideoframe.c @@ -30,7 +30,9 @@ #include +#ifndef ANDROID #include +#endif #include "mixvideolog.h" #include "mixvideoframe.h" #include "mixvideoframe_private.h" @@ -496,3 +498,5 @@ MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayo return MIX_RESULT_SUCCESS; } + + diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h index 8e5637f..626e2b1 100644 --- a/mix_video/src/mixvideoframe.h +++ b/mix_video/src/mixvideoframe.h @@ -233,10 +233,6 @@ MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, gboolean * disc */ MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display); MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display); - -MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); - - #endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h index 72aeaa1..f337417 100644 --- a/mix_video/src/mixvideoframe_private.h +++ b/mix_video/src/mixvideoframe_private.h @@ -33,7 +33,7 @@ struct _MixVideoFramePrivate gboolean sync_flag; guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field void *va_display; - guint32 displayorder; + guint32 displayorder; }; /** @@ -77,18 +77,13 @@ mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag); MIX_RESULT mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag); +MIX_RESULT +mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); + MIX_RESULT mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder); MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder); -/* -MIX_RESULT -mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); - -MIX_RESULT -mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); -*/ - #endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideoinitparams.c b/mix_video/src/mixvideoinitparams.c index ac58548..603b6cd 100644 --- a/mix_video/src/mixvideoinitparams.c +++ b/mix_video/src/mixvideoinitparams.c @@ -8,9 +8,12 @@ /** * SECTION:mixvideoinitparams - * @short_description: VideoInit parameters + * @short_description: MI-X Video Initialization Parameters * - * A data object which stores videoinit specific parameters. + * The MixVideoInitParams object will be created by the MMF/App + * and provided in the mix_video_initialize() function. + * The get and set methods for the properties will be available for + * the caller to set and get information used at initialization time. */ #include "mixvideoinitparams.h" diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h index eb7c118..e234999 100644 --- a/mix_video/src/mixvideoinitparams.h +++ b/mix_video/src/mixvideoinitparams.h @@ -71,10 +71,20 @@ struct _MixVideoInitParams /*< public > */ + /* Pointer to a MixDisplay object + * such as MixDisplayX11 */ MixDisplay *display; + + /* Reserved for future use */ void *reserved1; + + /* Reserved for future use */ void *reserved2; + + /* Reserved for future use */ void *reserved3; + + /* Reserved for future use */ void *reserved4; }; @@ -109,7 +119,7 @@ MixVideoInitParams *mix_videoinitparams_new (void); /** * mix_videoinitparams_ref: * @mix: object to add reference - * @returns: the MixVideoInitParams instance where reference count has been increased. + * @returns: the #MixVideoInitParams instance where reference count has been increased. * * Add reference count. */ @@ -125,13 +135,26 @@ MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix); /* Class Methods */ -/* - TO DO: Add documents -*/ +/** + * mix_videoinitparams_set_display: + * @obj: #MixVideoInitParams object + * @display: Pointer to a MixDisplay object such as MixDisplayX11 + * @returns: Common Video Error Return Codes + * + * Set MixDisplay object + */ MIX_RESULT mix_videoinitparams_set_display (MixVideoInitParams * obj, MixDisplay * display); +/** + * mix_videoinitparams_get_display: + * @obj: #MixVideoInitParams object + * @dislay: Pointer to pointer of a MixDisplay object such as MixDisplayX11 + * @returns: Common Video Error Return Codes + * + * Get MixDisplay object + */ MIX_RESULT mix_videoinitparams_get_display (MixVideoInitParams * obj, MixDisplay ** dislay); diff --git a/mix_video/src/mixvideorenderparams.c b/mix_video/src/mixvideorenderparams.c index 12a711c..9c47ddd 100644 --- a/mix_video/src/mixvideorenderparams.c +++ b/mix_video/src/mixvideorenderparams.c @@ -8,10 +8,12 @@ /** * SECTION:mixvideorenderparams - * @short_description: VideoRender parameters + * @short_description: MI-X Video Render Parameters * - * A data object which stores videorender specific parameters. + * The #MixVideoRenderParams object will be created by the MMF/App + * and provided to #MixVideo in the #MixVideo mix_video_render() function. */ + #include /* libVA */ #include diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h index f6148e7..e17136d 100644 --- a/mix_video/src/mixvideorenderparams.h +++ b/mix_video/src/mixvideorenderparams.h @@ -70,20 +70,44 @@ struct _MixVideoRenderParams { MixParams parent; /*< public > */ + + /* Pointer to a MixDisplay object + * such as MixDisplayX11 */ MixDisplay *display; + /* MixRect object to define offset, + * height and width of source image */ MixRect src_rect; + + /* MixRect object to define offset, + * height and width of the display + * destination */ MixRect dst_rect; + /* Array of clipping rectangles + * to be applied */ MixRect *clipping_rects; + + /* Number of clipping rectangles + * in clipping_rects */ guint number_of_clipping_rects; + /* Post processing parameters */ guint post_proc; + /* Reserved */ gpointer reserved; + + /* Reserved for future use */ gpointer reserved1; + + /* Reserved for future use */ gpointer reserved2; + + /* Reserved for future use */ gpointer reserved3; + + /* Reserved for future use */ gpointer reserved4; }; @@ -133,23 +157,97 @@ MixVideoRenderParams *mix_videorenderparams_ref(MixVideoRenderParams * mix); /* Class Methods */ +/** + * mix_videorenderparams_set_display: + * @obj: #MixVideoRenderParams object + * @display: #MixDisplay object + * @returns: Common Video Error Return Codes + * + * Set MixDisplay Object + */ MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj, MixDisplay * display); + +/** + * mix_videorenderparams_get_display: + * @obj: #MixVideoRenderParams object + * @display: pointer to #MixDisplay object + * @returns: Common Video Error Return Codes + * + * Get MixDisplay Object + */ MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj, MixDisplay ** display); +/** + * mix_videorenderparams_set_src_rect: + * @obj: #MixVideoRenderParams object + * @src_rect: MixRect object to define offset, height and width of source image + * @returns: Common Video Error Return Codes + * + * Set source rectangle + */ MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj, MixRect src_rect); + +/** + * mix_videorenderparams_get_src_rect: + * @obj: #MixVideoRenderParams object + * @src_rect: Source rectangle to be returned + * @returns: Common Video Error Return Codes + * + * Get source rectangle + */ MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj, MixRect * src_rect); +/** + * mix_videorenderparams_set_dest_rect: + * @obj: #MixVideoRenderParams object + * @dst_rect: MixRect object to define offset, height and width of the display destination + * @returns: Common Video Error Return Codes + * + * Set destination rectangle + */ MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj, MixRect dst_rect); + +/** + * mix_videorenderparams_set_dest_rect: + * @obj: #MixVideoRenderParams object + * @dst_rect: MixRect object to define offset, height and width of the display destination + * @returns: Common Video Error Return Codes + * + * Get destination rectangle + */ MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj, MixRect * dst_rect); +/** + * mix_videorenderparams_set_clipping_rects: + * @obj: #MixVideoRenderParams object + * @clipping_rects: Array of clipping rectangles to be applied + * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects + * @returns: Common Video Error Return Codes + * + * Set clipping rectangles + */ MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj, MixRect* clipping_rects, guint number_of_clipping_rects); + +/** + * mix_videorenderparams_get_clipping_rects: + * @obj: #MixVideoRenderParams object + * @clipping_rects: Array of clipping rectangles returned + * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects returned + * @returns: Common Video Error Return Codes + * + * Get clipping rectangles + * + * + * DO NOT free clipping_rects! + * + */ MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj, MixRect ** clipping_rects, guint* number_of_clipping_rects); -- cgit v1.2.3 From 4f46870396c4b236643287cf9319d979a31a8b5c Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Tue, 22 Jun 2010 17:42:57 -0700 Subject: Update of MixVBP from main MI-X repo. MixVideo and MixVBP fixes for partial frame and multiple NAL handling to fix several decode issues. Color conversion fix for encode. Change-Id: Ide0888da73ce8c5a5d7f6039643cf4ed5b85cb45 --- .../viddec_fw/fw/codecs/h264/parser/h264parse.c | 15 +- .../fw/codecs/h264/parser/h264parse_bsd.c | 456 ++++---- .../fw/codecs/h264/parser/h264parse_dpb.c | 12 +- .../fw/codecs/h264/parser/h264parse_math.c | 164 +-- .../fw/codecs/h264/parser/h264parse_sei.c | 94 +- .../viddec_fw/fw/codecs/h264/parser/h264parse_sh.c | 1 + .../fw/codecs/h264/parser/viddec_h264_parse.c | 2 + .../fw/codecs/h264/parser/viddec_h264_workload.c | 237 +--- .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 20 +- .../fw/codecs/mp4/parser/viddec_mp4_parse.c | 32 +- .../fw/codecs/mp4/parser/viddec_mp4_parse.h | 13 +- .../fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 269 ++++- .../mp4/parser/viddec_mp4_videoobjectlayer.c | 4 +- .../mp4/parser/viddec_mp4_videoobjectplane.c | 4 +- .../fw/codecs/mp4/parser/viddec_mp4_visualobject.c | 8 +- .../viddec_fw/fw/codecs/vc1/include/vc1common.h | 14 +- .../fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c | 14 + mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h | 4 +- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c | 24 +- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h | 276 ++--- .../fw/codecs/vc1/parser/vc1parse_bitplane.c | 73 +- .../viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c | 200 ++-- .../fw/codecs/vc1/parser/vc1parse_bpic_adv.c | 514 ++++----- .../fw/codecs/vc1/parser/vc1parse_common_defs.h | 21 +- .../fw/codecs/vc1/parser/vc1parse_huffman.c | 194 ++-- .../fw/codecs/vc1/parser/vc1parse_ipic_adv.c | 514 ++++----- .../fw/codecs/vc1/parser/vc1parse_mv_com.c | 164 +-- .../fw/codecs/vc1/parser/vc1parse_pic_com_adv.c | 808 +++++++------- .../viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c | 5 +- .../fw/codecs/vc1/parser/vc1parse_ppic_adv.c | 736 ++++++------- .../fw/codecs/vc1/parser/viddec_vc1_parse.c | 34 +- .../fw/codecs/vc1/parser/viddec_vc1_workload.c | 1153 ++++++++++++-------- mix_vbp/viddec_fw/fw/include/viddec_fw_version.h | 2 +- .../viddec_fw/fw/parser/include/viddec_emitter.h | 5 +- .../fw/parser/include/viddec_parser_ops.h | 8 +- .../fw/parser/include/viddec_pm_utils_bstream.h | 11 +- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 67 +- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 14 +- mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 12 +- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 8 + mix_vbp/viddec_fw/fw/parser/vbp_utils.h | 4 + mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 3 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c | 24 +- .../viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 41 +- mix_vbp/viddec_fw/include/viddec_fw_common_defs.h | 3 + mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h | 77 +- mix_vbp/viddec_fw/include/viddec_fw_item_types.h | 152 ++- mix_video/src/mixframemanager.c | 87 +- mix_video/src/mixframemanager.h | 6 +- mix_video/src/mixvideoformat_h264.c | 38 + mix_video/src/mixvideoformatenc_h264.c | 43 +- 51 files changed, 3667 insertions(+), 3017 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c index a96285d..180e7b6 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c @@ -398,8 +398,8 @@ void h264_init_Info(h264_Info* pInfo) pInfo->Is_first_frame_in_stream =1; pInfo->img.frame_count = 0; - pInfo->last_I_frame_idc = 255; - + pInfo->last_I_frame_idc = 255; + return; } @@ -699,7 +699,7 @@ void h264_update_frame_type(h264_Info * pInfo ) { pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET); } - pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; break; default: @@ -735,6 +735,10 @@ void h264_update_frame_type(h264_Info * pInfo ) { pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); } + if (pInfo->sei_rp_received) + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; + else + pInfo->last_I_frame_idc = 255; break; default: break; @@ -768,6 +772,11 @@ void h264_update_frame_type(h264_Info * pInfo ) { pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); } + if (pInfo->sei_rp_received) + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc + PUT_LIST_INDEX_FIELD_BIT(1); + else + pInfo->last_I_frame_idc = 255; + break; default: break; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c index c4e00ee..dbbe5c6 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c @@ -1,228 +1,228 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. -// -// Description: h264 bistream decoding -// -///////////////////////////////////////////////////////////////////////*/ - - -#include "h264.h" -#include "h264parse.h" -#include "viddec_parser_ops.h" - - - - - -/** - get_codeNum :Get codenum based on sec 9.1 of H264 spec. - @param cxt : Buffer adress & size are part inputs, the cxt is updated - with codeNum & sign on sucess. - Assumption: codeNum is a max of 32 bits - - @retval 1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code. - @retval 0 : Couldn't find a code in the current buffer. - be freed. -*/ - -uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) -{ - int32_t leadingZeroBits= 0; - uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; - uint32_t codeNum =0; - uint32_t bits_offset =0, byte_offset =0; - uint8_t is_emul =0; - uint8_t is_first_byte = 1; - uint32_t length =0; - uint32_t bits_need_add_in_first_byte =0; - int32_t bits_operation_result=0; - - //remove warning - pInfo = pInfo; - - ////// Step 1: parse through zero bits until we find a bit with value 1. - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - - while(!match) - { - if ((bits_offset != 0) && ( is_first_byte == 1)) - { - //we handle byte at a time, if we have offset then for first - // byte handle only 8 - offset bits - noOfBits = (uint8_t)(8 - bits_offset); - bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits); - - - temp = (temp << bits_offset); - if(temp!=0) - { - bits_need_add_in_first_byte = bits_offset; - } - is_first_byte =0; - } - else - { - noOfBits = 8;/* always 8 bits as we read a byte at a time */ - bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); - - } - - if(-1==bits_operation_result) - { - return MAX_INT32_VALUE; - } - - if(temp != 0) - { - // if byte!=0 we have at least one bit with value 1. - count=1; - while(((temp & 0x80) != 0x80) && (count <= noOfBits)) - { - count++; - temp = temp <<1; - } - //At this point we get the bit position of 1 in current byte(count). - - match = 1; - leadingZeroBits += count; - } - else - { - // we don't have a 1 in current byte - leadingZeroBits += noOfBits; - } - - if(!match) - { - //actually move the bitoff by viddec_pm_get_bits - viddec_pm_get_bits(parent, &temp, noOfBits); - } - else - { - //actually move the bitoff by viddec_pm_get_bits - viddec_pm_get_bits(parent, &temp, count); - } - - } - ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value. - - - if(match) - { - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - /* bit position in current byte */ - //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7); - count = ((count + bits_need_add_in_first_byte)& 0x7); - - leadingZeroBits --; - length = leadingZeroBits; - codeNum = 0; - noOfBits = 8 - count; - - - while(leadingZeroBits > 0) - { - if(noOfBits < (uint32_t)leadingZeroBits) - { - viddec_pm_get_bits(parent, &temp, noOfBits); - - - codeNum = (codeNum << noOfBits) | temp; - leadingZeroBits -= noOfBits; - } - else - { - viddec_pm_get_bits(parent, &temp, leadingZeroBits); - - codeNum = (codeNum << leadingZeroBits) | temp; - leadingZeroBits = 0; - } - - - noOfBits = 8; - } - // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits). - codeNum = codeNum + (1 << length) -1; - - } - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - if(bits_offset!=0) - { - viddec_pm_peek_bits(parent, &temp, 8-bits_offset); - } - - return codeNum; -} - - -/*---------------------------------------*/ -/*---------------------------------------*/ -int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) -{ - int32_t sval = 0; - signed char sign; - - sval = h264_get_codeNum(parent , pInfo); - - if(bIsSigned) //get signed integer golomb code else the value is unsigned - { - sign = (sval & 0x1)?1:-1; - sval = (sval +1) >> 1; - sval = sval * sign; - } - - return sval; -} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned) - -/// -/// Check whether more RBSP data left in current NAL -/// -uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) -{ - uint8_t cnt = 0; - - uint8_t is_emul =0; - uint8_t cur_byte = 0; - int32_t shift_bits =0; - uint32_t ctr_bit = 0; - uint32_t bits_offset =0, byte_offset =0; - - //remove warning - pInfo = pInfo; - - if (!viddec_pm_is_nomoredata(parent)) - return 1; - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - shift_bits = 7-bits_offset; - - // read one byte - viddec_pm_get_cur_byte(parent, &cur_byte); - - ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; - - // a stop bit has to be one - if (ctr_bit==0) - return 1; - - while (shift_bits>=0 && !cnt) - { - cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit - } - - return (cnt); -} - - - -///////////// EOF///////////////////// - +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: h264 bistream decoding +// +///////////////////////////////////////////////////////////////////////*/ + + +#include "h264.h" +#include "h264parse.h" +#include "viddec_parser_ops.h" + + + + + +/** + get_codeNum :Get codenum based on sec 9.1 of H264 spec. + @param cxt : Buffer adress & size are part inputs, the cxt is updated + with codeNum & sign on sucess. + Assumption: codeNum is a max of 32 bits + + @retval 1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code. + @retval 0 : Couldn't find a code in the current buffer. + be freed. +*/ + +uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) +{ + int32_t leadingZeroBits= 0; + uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; + uint32_t codeNum =0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + uint8_t is_first_byte = 1; + uint32_t length =0; + uint32_t bits_need_add_in_first_byte =0; + int32_t bits_operation_result=0; + + //remove warning + pInfo = pInfo; + + ////// Step 1: parse through zero bits until we find a bit with value 1. + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + + while(!match) + { + if ((bits_offset != 0) && ( is_first_byte == 1)) + { + //we handle byte at a time, if we have offset then for first + // byte handle only 8 - offset bits + noOfBits = (uint8_t)(8 - bits_offset); + bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits); + + + temp = (temp << bits_offset); + if(temp!=0) + { + bits_need_add_in_first_byte = bits_offset; + } + is_first_byte =0; + } + else + { + noOfBits = 8;/* always 8 bits as we read a byte at a time */ + bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); + + } + + if(-1==bits_operation_result) + { + return MAX_INT32_VALUE; + } + + if(temp != 0) + { + // if byte!=0 we have at least one bit with value 1. + count=1; + while(((temp & 0x80) != 0x80) && (count <= noOfBits)) + { + count++; + temp = temp <<1; + } + //At this point we get the bit position of 1 in current byte(count). + + match = 1; + leadingZeroBits += count; + } + else + { + // we don't have a 1 in current byte + leadingZeroBits += noOfBits; + } + + if(!match) + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, noOfBits); + } + else + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, count); + } + + } + ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value. + + + if(match) + { + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + /* bit position in current byte */ + //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7); + count = ((count + bits_need_add_in_first_byte)& 0x7); + + leadingZeroBits --; + length = leadingZeroBits; + codeNum = 0; + noOfBits = 8 - count; + + + while(leadingZeroBits > 0) + { + if(noOfBits < (uint32_t)leadingZeroBits) + { + viddec_pm_get_bits(parent, &temp, noOfBits); + + + codeNum = (codeNum << noOfBits) | temp; + leadingZeroBits -= noOfBits; + } + else + { + viddec_pm_get_bits(parent, &temp, leadingZeroBits); + + codeNum = (codeNum << leadingZeroBits) | temp; + leadingZeroBits = 0; + } + + + noOfBits = 8; + } + // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits). + codeNum = codeNum + (1 << length) -1; + + } + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + if(bits_offset!=0) + { + viddec_pm_peek_bits(parent, &temp, 8-bits_offset); + } + + return codeNum; +} + + +/*---------------------------------------*/ +/*---------------------------------------*/ +int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) +{ + int32_t sval = 0; + signed char sign; + + sval = h264_get_codeNum(parent , pInfo); + + if(bIsSigned) //get signed integer golomb code else the value is unsigned + { + sign = (sval & 0x1)?1:-1; + sval = (sval +1) >> 1; + sval = sval * sign; + } + + return sval; +} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned) + +/// +/// Check whether more RBSP data left in current NAL +/// +uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) +{ + uint8_t cnt = 0; + + uint8_t is_emul =0; + uint8_t cur_byte = 0; + int32_t shift_bits =0; + uint32_t ctr_bit = 0; + uint32_t bits_offset =0, byte_offset =0; + + //remove warning + pInfo = pInfo; + + if (!viddec_pm_is_nomoredata(parent)) + return 1; + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + shift_bits = 7-bits_offset; + + // read one byte + viddec_pm_get_cur_byte(parent, &cur_byte); + + ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; + + // a stop bit has to be one + if (ctr_bit==0) + return 1; + + while (shift_bits>=0 && !cnt) + { + cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit + } + + return (cnt); +} + + + +///////////// EOF///////////////////// + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index d1b693b..5ad9d09 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -1270,7 +1270,8 @@ void h264_dpb_RP_check_list (h264_Info * pInfo) int32_t idx, rp_found = 0; - if(pInfo->SliceHeader.num_ref_idx_l0_active == 1) + if( ((pInfo->SliceHeader.num_ref_idx_l0_active == 1)&&(pInfo->SliceHeader.structure == FRAME)) || + ((pInfo->SliceHeader.num_ref_idx_l0_active == 2)&&(pInfo->SliceHeader.structure != FRAME)) ) { if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) { @@ -1329,6 +1330,7 @@ void h264_dpb_RP_check_list (h264_Info * pInfo) } } #endif + ///// Set the reference to last I frame if( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0])) { @@ -1336,8 +1338,9 @@ void h264_dpb_RP_check_list (h264_Info * pInfo) h264_dpb_unmark_for_reference(p_dpb, p_list[0]); h264_dpb_remove_ref_list(p_dpb, p_list[0]); p_list[0] = pInfo->last_I_frame_idc; + if (pInfo->SliceHeader.structure != FRAME) + p_list[1] = (pInfo->last_I_frame_idc ^ 0x20); } - } } @@ -1398,7 +1401,10 @@ void h264_dpb_reorder_lists(h264_Info * pInfo) } //// Check if need recover reference list with previous recovery point - h264_dpb_RP_check_list(pInfo); + if(!pInfo->img.second_field) + { + h264_dpb_RP_check_list(pInfo); + } return; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c index b5df6d9..2793dbd 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c @@ -1,82 +1,82 @@ -//#include "math.h" -// Arithmatic functions using add & subtract - -unsigned long mult_u(register unsigned long var1, register unsigned long var2) -{ - - register unsigned long var_out = 0; - - while (var2 > 0) - { - - if (var2 & 0x01) - { - var_out += var1; - } - var2 >>= 1; - var1 <<= 1; - } - return var_out; - -}// mult_u - -unsigned long ldiv_mod_u(register unsigned long a, register unsigned long b, unsigned long * mod) -{ - register unsigned long div = b; - register unsigned long res = 0; - register unsigned long bit = 0x1; - - if (!div) - { - *mod = 0; - return 0xffffffff ; // Div by 0 - } - - if (a < b) - { - *mod = a; - return 0; // It won't even go once - } - - while(!(div & 0x80000000)) - { - div <<= 1; - bit <<= 1; - } - - while (bit) - { - if (div <= a) - { - res |= bit; - a -= div; - } - div >>= 1; - bit >>= 1; - } - *mod = a; - return res; -}// ldiv_mod_u - - -unsigned ldiv_u(register unsigned a, register unsigned b) -{ - register unsigned div = b << 16; - register unsigned res = 0; - register unsigned bit = 0x10000; - - while (bit) - { - div >>= 1; - bit >>= 1; - if (div < a) - { - res |= bit; - a -= div; - } - } - - return res; -} - - +//#include "math.h" +// Arithmatic functions using add & subtract + +unsigned long mult_u(register unsigned long var1, register unsigned long var2) +{ + + register unsigned long var_out = 0; + + while (var2 > 0) + { + + if (var2 & 0x01) + { + var_out += var1; + } + var2 >>= 1; + var1 <<= 1; + } + return var_out; + +}// mult_u + +unsigned long ldiv_mod_u(register unsigned long a, register unsigned long b, unsigned long * mod) +{ + register unsigned long div = b; + register unsigned long res = 0; + register unsigned long bit = 0x1; + + if (!div) + { + *mod = 0; + return 0xffffffff ; // Div by 0 + } + + if (a < b) + { + *mod = a; + return 0; // It won't even go once + } + + while(!(div & 0x80000000)) + { + div <<= 1; + bit <<= 1; + } + + while (bit) + { + if (div <= a) + { + res |= bit; + a -= div; + } + div >>= 1; + bit >>= 1; + } + *mod = a; + return res; +}// ldiv_mod_u + + +unsigned ldiv_u(register unsigned a, register unsigned b) +{ + register unsigned div = b << 16; + register unsigned res = 0; + register unsigned bit = 0x10000; + + while (bit) + { + div >>= 1; + bit >>= 1; + if (div < a) + { + res |= bit; + a -= div; + } + } + + return res; +} + + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c index 829eb55..eadd6cc 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c @@ -150,11 +150,7 @@ h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo) #ifndef VBP //Push to current if we are in first frame, or we do not detect previous frame end - if( (pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done) ) { - viddec_pm_append_workitem( parent, &wi ); - } else { - viddec_pm_append_workitem_next( parent, &wi ); - } + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); #endif if(sei_msg_ptr->pic_struct < 3) { @@ -296,17 +292,10 @@ h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo) sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false); wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period; } - -#ifndef VBP - if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } -#endif +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif if(!sei_msg_ptr->pan_scan_rect_cancel_flag) { @@ -321,14 +310,10 @@ h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo) viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]); viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]); viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]); - -#ifndef VBP - if(pInfo->Is_first_frame_in_stream) { //cur is first frame - viddec_pm_append_workitem( parent, &wi ); - } else { - viddec_pm_append_workitem_next( parent, &wi ); - } -#endif +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif } } @@ -402,14 +387,8 @@ h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payloa { viddec_pm_setup_userdata(&wi); #ifndef VBP - if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); #endif wi.user_data.size =0; } @@ -420,16 +399,9 @@ h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payloa if(0!=wi.user_data.size) { viddec_pm_setup_userdata(&wi); - -#ifndef VBP - if(pInfo->Is_first_frame_in_stream) //cur is first frame - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); #endif } @@ -474,15 +446,10 @@ h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t pay if(11 == wi.user_data.size) { viddec_pm_setup_userdata(&wi); - if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } - +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif wi.user_data.size =0; } } @@ -490,14 +457,10 @@ h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t pay if(0!=wi.user_data.size) { viddec_pm_setup_userdata(&wi); - if(pInfo->Is_first_frame_in_stream) //cur is first frame - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif } return H264_STATUS_OK; @@ -549,15 +512,10 @@ h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag); viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag); wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc; - - if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif return H264_STATUS_OK; } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c index 3134ae0..29340ac 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c @@ -728,6 +728,7 @@ h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_S } } + SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index 9388d81..a763d00 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -382,6 +382,8 @@ dump_slice_header(pInfo, &next_SliceHeader); h264_parse_emit_eos(parent, pInfo); h264_init_dpb(&(pInfo->dpb)); + + pInfo->is_current_workload_done=1; /* picture level info which will always be initialized */ //h264_init_Info_under_sps_pps_level(pInfo); diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c index 4fc2f1a..2faa136 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c @@ -317,16 +317,8 @@ static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo ) (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24); - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } - + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } break; @@ -344,15 +336,8 @@ static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo ) (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24); - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } break; @@ -363,14 +348,8 @@ static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo ) wi.data.data_offset = i + (DEFAULT_QM << 4); wi.data.data_payload[0] = 0; wi.data.data_payload[1] = 0; - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); break; } default: @@ -448,12 +427,8 @@ static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo ) (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24); - - if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi ); - } else { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } break; } @@ -470,14 +445,9 @@ static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo ) (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24); - - if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi ); - } else { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } - break; } case (DEFAULT_QM): @@ -485,12 +455,8 @@ static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo ) wi.data.data_offset = i + (DEFAULT_QM << 4); wi.data.data_payload[0] = 0; wi.data.data_payload[1] = 0; - if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi ); - } else { - viddec_pm_append_workitem_next( parent, &wi ); - } - + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); break; } default:{ @@ -592,15 +558,9 @@ static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag); wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1; wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); viddec_fw_reset_workload_item(&wi); if(pInfo->active_SPS.sps_disp.frame_cropping_flag) @@ -610,15 +570,8 @@ static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset); viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset); viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset); - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } viddec_fw_reset_workload_item(&wi); if(pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1) @@ -664,14 +617,8 @@ static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag); } - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } viddec_fw_reset_workload_item(&wi); @@ -682,17 +629,9 @@ static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick; wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale; - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } - } - - + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } pInfo->Is_SPS_updated =0; } @@ -795,14 +734,8 @@ static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t l if(data_writed&0x1) { - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } data_writed ++; } @@ -896,15 +829,8 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent , &wi); - } - else - { - viddec_pm_append_workitem_next( parent , &wi); - } - + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); ///////////////////////////predict weight table item and data if have/////////////////////////// if(pInfo->h264_pwt_enabled) @@ -916,7 +842,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) if(pInfo->push_to_cur) //cur is empty, fill new frame in cur { - viddec_pm_append_workitem( parent , &wi); + viddec_pm_append_workitem( parent , &wi, false); wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; wi.es.es_flags = 0; @@ -924,7 +850,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) } else { - viddec_pm_append_workitem_next( parent , &wi); + viddec_pm_append_workitem( parent , &wi, true); wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; wi.es.es_flags = 0; @@ -951,13 +877,8 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) wi.data.data_offset = bits_offset; wi.data.data_payload[0]=0; wi.data.data_payload[1]=0; - - if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent , &wi); - } - else { - viddec_pm_append_workitem_next( parent , &wi); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } } @@ -1004,16 +925,8 @@ void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) wi.data.data_payload[0] = pl[0]; wi.data.data_payload[1] = pl[1]; pl += 2; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } return; @@ -1059,15 +972,8 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } pInfo->dpb.frame_numbers_need_to_be_displayed =0; @@ -1081,16 +987,8 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } - + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } pInfo->dpb.frame_numbers_need_to_be_removed =0; @@ -1103,16 +1001,8 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } - + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } pInfo->dpb.frame_numbers_need_to_be_dropped =0; @@ -1128,19 +1018,11 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) wi.ref_frame.reference_id = fs_id; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } } - /////////////////////updata dpb frames info (poc)///////////////////// nitems = pInfo->dpb.used_size; for(i=0; ipush_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } - + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } } @@ -1226,16 +1099,9 @@ void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - //// //// Now we can flush out all frames in DPB fro display if(MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc) @@ -1260,15 +1126,8 @@ void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi ); - } - else - { - viddec_pm_append_workitem_next( parent, &wi ); - } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } pInfo->dpb.frame_numbers_need_to_be_displayed =0; @@ -1285,12 +1144,12 @@ void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) if(pInfo->push_to_cur) //cur is empty, fill new frame in cur { - viddec_pm_append_workitem( parent, &wi ); + viddec_pm_append_workitem( parent, &wi , false); viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); } else { - viddec_pm_append_workitem_next( parent, &wi ); + viddec_pm_append_workitem( parent, &wi , true); viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); } } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c index 5632728..4a53b8b 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -2,7 +2,9 @@ #include "viddec_parser_ops.h" #include "viddec_fw_mp4.h" #include "viddec_mp4_parse.h" +#ifdef ANDROID #include "viddec_types.h" +#endif uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser) { @@ -76,7 +78,7 @@ uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *pa wi.vwi_payload[1] = vol_info.vol_size; wi.vwi_payload[2] = vol_info.vol_item; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); return result; } // viddec_fw_mp4_insert_vol_workitem @@ -125,7 +127,7 @@ uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *pa wi.vwi_payload[1] = vop_info.vop_data; wi.vwi_payload[2] = vop_info.bit_offset; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); return result; } // viddec_fw_mp4_insert_vop_workitem @@ -150,7 +152,7 @@ uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *p wi.vwi_payload[1] = svh_info.pad1; wi.vwi_payload[2] = svh_info.pad2; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); return result; } // viddec_fw_mp4_insert_vpsh_workitem @@ -196,7 +198,7 @@ uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t wi.vwi_payload[1] = sprite_info.warping_mv_code[1]; wi.vwi_payload[2] = sprite_info.warping_mv_code[2]; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); } return result; @@ -213,7 +215,7 @@ uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *p wi.vwi_payload[1] = vol->TRD; wi.vwi_payload[2] = vol->TRB; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); return result; } // viddec_fw_mp4_insert_bvop_workitem @@ -251,7 +253,7 @@ uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint3 qmat += 3; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); } return result; @@ -283,7 +285,7 @@ uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent) wi.ref_frame.reference_id = 0; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); return result; } // viddec_fw_mp4_insert_past_frame_workitem @@ -297,7 +299,7 @@ uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent) wi.ref_frame.reference_id = 0; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); return result; } // viddec_fw_mp4_insert_future_frame_workitem @@ -313,7 +315,7 @@ uint32_t viddec_fw_mp4_insert_reorder_workitem(void *parent) wi.ref_reorder.ref_reorder_00010203 = 0x01010203; wi.ref_reorder.ref_reorder_04050607 = 0x04050607; - result = viddec_pm_append_workitem(parent, &wi); + result = viddec_pm_append_workitem(parent, &wi, false); return result; } // viddec_fw_mp4_insert_reorder_workitem diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c index 17db475..6a4a8ac 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c @@ -6,7 +6,9 @@ #include "viddec_mp4_videoobjectlayer.h" #include "viddec_mp4_videoobjectplane.h" #include "viddec_mp4_visualobject.h" +#ifdef ANDROID #include "viddec_types.h" +#endif extern uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state); @@ -23,7 +25,7 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; int result = VIDDEC_PARSE_SUCESS; uint8_t frame_boundary = false; - uint8_t force_frame_complete = false; + uint8_t emit_workload = false; //DEB("entering is_wkld_done: next_sc: 0x%x, sc_seen: %d\n", next_sc, parser->sc_seen); @@ -38,15 +40,35 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 ((SHORT_THIRD_STARTCODE_BYTE & 0xFC) == (next_sc & 0xFC)) || (MP4_SC_GROUP_OF_VOP == next_sc) ); + // Mark workload is ready to be emitted based on the start codes seen. + if (frame_boundary) + { + uint8_t vol_error_found = false, frame_complete = false; + + // Frame is considered complete and without errors, if a VOL was received since startup and + // if a VOP was received for this workload (or) if short video header is found. + frame_complete = ( ((parser->sc_seen & MP4_SC_SEEN_VOL) && (parser->sc_seen & MP4_SC_SEEN_VOP)) || + (parser->sc_seen & MP4_SC_SEEN_SVH) ); + + // For non SVH case, the video object layer data should be followed by video object plane data + // If not, error occurred and we need to throw the current workload as error. + vol_error_found = ( (parser->prev_sc == MP4_SC_VIDEO_OBJECT_LAYER_MIN) && + !(MP4_SC_VIDEO_OBJECT_PLANE == next_sc) ); + + emit_workload = (frame_complete || vol_error_found); + + //DEB("emit workload: frame_complete: %d, vol_error_found %d\n", frame_complete, vol_error_found); + } + // EOS and discontinuity should force workload completion. - force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); + emit_workload |= ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); - if(frame_boundary | force_frame_complete) + if(emit_workload) { *codec_specific_errors = 0; - // Frame is considered complete and without errors, if a VOL was received since startup and - // if a VOP was received for this workload. + // If the frame is not complete but we have received force frame complete because of EOS or + // discontinuity, we mark the workload as not decodeable. if (!((parser->sc_seen & MP4_SC_SEEN_VOL) && (parser->sc_seen & MP4_SC_SEEN_VOP)) && !(parser->sc_seen & MP4_SC_SEEN_SVH)) *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h index 12447a4..e9f8bbf 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h @@ -299,13 +299,18 @@ typedef struct _mp4_VideoObjectPlaneH263 uint16_t num_gobs_in_vop; uint16_t num_macroblocks_in_gob; uint8_t num_rows_in_gob; - #if 0 uint8_t gob_number; - int gob_header_empty; - int gob_frame_id; - int quant_scale; + int gob_header_empty; + int gob_frame_id; + int quant_scale; #endif + uint8_t vop_rounding_type; + //the following are required for PLUSPTYPE + uint8_t ufep; + uint16_t pixel_aspect_ratio_code; + uint16_t picture_width_indication; + uint16_t picture_height_indication; } mp4_VideoObjectPlaneH263; typedef struct diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index 7e17984..7603cd7 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -1,5 +1,7 @@ #include "viddec_mp4_shortheader.h" +#ifdef ANDROID #include "viddec_types.h" +#endif typedef struct { @@ -25,37 +27,222 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p unsigned int data; mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); int32_t getbits = 0; + uint8_t pei = 0; do { - getbits = viddec_pm_get_bits(parent, &data, 27); + //temporal reference + getbits = viddec_pm_get_bits(parent, &data, 8); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->temporal_reference = (data & 0xff); + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 1 != (data & 0x1)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //zero bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0x1)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //split_screen_indicator, document_camera_indicator, full_picture_freeze_release + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + //source format + getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); - - data = data >> 1; // zero_bit - svh->vop_quant = (data & 0x1F); - data = data >> 9; // vop_quant + four_reserved_zero_bits - svh->picture_coding_type = (data & 0x1); - data = data >> 1; // vop_quant + four_reserved_zero_bits svh->source_format = (data & 0x7); - data = data >> 8; // source_format + full_picture_freeze_release + document_camera_indicator + split_screen_indicator + zero_bit + marker_bit - svh->temporal_reference = data; - - if (svh->source_format == 0 || svh->source_format > 5) + if (svh->source_format == 0 || svh->source_format == 6) { DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); ret = MP4_STATUS_NOTSUPPORT; break; } - - for (;;) + + if(svh->source_format != 7) { - getbits = viddec_pm_get_bits(parent, &data, 1); // pei - BREAK_GETBITS_FAIL(getbits, ret); - if (!data) + //picture coding type + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_coding_type = (data & 0x1); + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0xf)) + { + ret = MP4_STATUS_NOTSUPPORT; break; - getbits = viddec_pm_get_bits(parent, &data, 8); // psupp - BREAK_GETBITS_FAIL(getbits, ret); + } + //vop quant + getbits = viddec_pm_get_bits(parent, &data, 5); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->vop_quant = (data & 0x1f); + //zero bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0x1)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } } + else //extended PTYPE (PLUSPTYPE) + { + //ufep + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->ufep = (data & 0x7); //ufep + if(svh->ufep == 0x0) + { + DEB("Info: don't support to handle the 0x000 case of Update Full Extended PTYPE\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + else if (svh->ufep == 0x1) + { + //source format + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->source_format = (data & 0x7); + if(svh->source_format < 1 || svh->source_format > 6) + { + DEB("Error: bad value of source_format\n"); + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //optional indicators + getbits = viddec_pm_get_bits(parent, &data, 8); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0xff)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0x7)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 1 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0x7)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + } + else + { + DEB("Info: don't support to handle the other case of Update Full Extended PTYPE\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //MPPTYPE + //picture coding type + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_coding_type = (data & 0x7); + if(svh->picture_coding_type > 1) + { + DEB("Info: only support I and P frames\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //optional RPR mode + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //optional PRU mode + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //vop rounding type + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->vop_rounding_type = (data & 0x1); + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 0 != (data & 0x3)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 1 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //cpm + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if(svh->ufep == 1 && svh->source_format == 6) + { //CPFMT + getbits = viddec_pm_get_bits(parent, &data, 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->pixel_aspect_ratio_code = (data & 0xf); + // + getbits = viddec_pm_get_bits(parent, &data, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_width_indication = (data & 0x1ff); + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if( 1 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + // + getbits = viddec_pm_get_bits(parent, &data, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_height_indication = (data & 0x1ff); + } + viddec_pm_get_bits(parent, &data, 5); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->vop_quant = (data & 0x1f); + } + //PEI + do + { + getbits = viddec_pm_get_bits(parent, &data, 1); // pei + BREAK_GETBITS_FAIL(getbits, ret); + pei = (data & 0x1); + if(0 != pei) + { + getbits = viddec_pm_get_bits(parent, &data, 8); // psupp + BREAK_GETBITS_FAIL(getbits, ret); + } + }while( 1 == pei); // Anything after this needs to be fed to the decoder as PIXEL_ES } while(0); @@ -72,6 +259,7 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); mp4_VideoObjectPlaneH263 *svh = &(pInfo->VisualObject.VideoObject.VideoObjectPlaneH263); uint8_t index = 0; + uint8_t k = 0; ret = mp4_Parse_VideoObjectPlane_svh(parent, parser); if(ret == MP4_STATUS_OK) @@ -94,7 +282,7 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser vol->fixed_vop_time_increment = 1001; vol->aspect_ratio_info = MP4_ASPECT_RATIO_12_11; - vop->vop_rounding_type = 0; + vop->vop_rounding_type = svh->vop_rounding_type; vop->vop_fcode_forward = 1; vop->vop_coded = 1; vop->vop_coding_type = svh->picture_coding_type ? MP4_VOP_TYPE_P: MP4_VOP_TYPE_I; @@ -104,12 +292,41 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser vst->transfer_characteristics = 1; vst->matrix_coefficients = 6; - index = svh->source_format - 1; - vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width; - vol->video_object_layer_height = svh_src_fmt_defaults[index].vop_height; - svh->num_macroblocks_in_gob = svh_src_fmt_defaults[index].num_macroblocks_in_gob; - svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop; - svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob; + if(svh->source_format >= 1 && svh->source_format <= 5) + { + index = svh->source_format - 1; + vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width; + vol->video_object_layer_height = svh_src_fmt_defaults[index].vop_height; + svh->num_macroblocks_in_gob = svh_src_fmt_defaults[index].num_macroblocks_in_gob; + svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop; + svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob; + } + else if(svh->source_format == 6) //custom format + { + vol->video_object_layer_width = (svh->picture_width_indication + 1)*4; + vol->video_object_layer_height = (svh->picture_height_indication)*4; + if(vol->video_object_layer_height < 404) + { + k = 1; + } + else if (vol->video_object_layer_height < 804) + { + k = 2; + } + else + { + k = 4; + } + svh->num_macroblocks_in_gob = (vol->video_object_layer_width/16)*k; + svh->num_gobs_in_vop = (vol->video_object_layer_height)/(16*k); + svh->num_rows_in_gob = k; + } + else + { + DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); + ret = MP4_STATUS_NOTSUPPORT; + return ret; + } } mp4_set_hdr_bitstream_error(parser, false, ret); @@ -126,7 +343,7 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format); - ret = viddec_pm_append_workitem(parent, &wi); + ret = viddec_pm_append_workitem(parent, &wi, false); if(ret == 1) ret = MP4_STATUS_OK; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index 9145342..e92a26d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -1,5 +1,7 @@ #include "viddec_mp4_videoobjectlayer.h" +#ifdef ANDROID #include "viddec_types.h" +#endif const unsigned char mp4_DefaultIntraQuantMatrix[64] = { 8, 17, 18, 19, 21, 23, 25, 27, @@ -583,7 +585,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment); viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution); - ret = viddec_pm_append_workitem(parent, &wi); + ret = viddec_pm_append_workitem(parent, &wi, false); if(ret == 1) ret = MP4_STATUS_OK; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c index cf761e3..5d1ae0f 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c @@ -1,5 +1,7 @@ #include "viddec_mp4_videoobjectplane.h" +#ifdef ANDROID #include "viddec_types.h" +#endif mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) { @@ -55,7 +57,7 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov); viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code); - ret = viddec_pm_append_workitem(parent, &wi); + ret = viddec_pm_append_workitem(parent, &wi, false); if(ret == 1) ret = MP4_STATUS_OK; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c index 49e7887..35a352d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c @@ -1,5 +1,7 @@ #include "viddec_mp4_visualobject.h" +#ifdef ANDROID #include "viddec_types.h" +#endif static inline uint8_t mp4_pvt_isValid_verID(uint8_t id) { @@ -214,7 +216,7 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) } } - ret = viddec_pm_append_workitem(parent, &wi); + ret = viddec_pm_append_workitem(parent, &wi, false); if(ret == 1) ret = MP4_STATUS_OK; } @@ -263,7 +265,7 @@ mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) if (wi.user_data.size >= 11) { viddec_pm_setup_userdata(&wi); - ret = viddec_pm_append_workitem(parent, &wi); + ret = viddec_pm_append_workitem(parent, &wi, false); wi.user_data.size = 0; } } @@ -276,7 +278,7 @@ mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) wi.user_data.data_payload[i] = 0; } viddec_pm_setup_userdata(&wi); - ret = viddec_pm_append_workitem(parent, &wi); + ret = viddec_pm_append_workitem(parent, &wi, false); wi.user_data.size = 0; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h index d57a9bf..66a93df 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h @@ -31,6 +31,15 @@ enum vc1_workload_item_type VIDDEC_WORKLOAD_VC1_BITPLANE0, VIDDEC_WORKLOAD_VC1_BITPLANE1, VIDDEC_WORKLOAD_VC1_BITPLANE2, + VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY, + VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP, + VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW, + VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW, + VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO, + VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE, VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, VIDDEC_WORKLOAD_VC1_FUTURE_FRAME, }; @@ -75,7 +84,7 @@ typedef struct { /* 0x08 */ uint32_t coded_size; /* 0x0c */ uint32_t stream_format2; /* 0x10 */ uint32_t entrypoint1; - /* 0x14 */ uint32_t range_map; + /* 0x14 */ uint32_t ap_range_map; /* 0x18 */ uint32_t frame_type; /* 0x1c */ uint32_t recon_control; /* 0x20 */ uint32_t mv_control; @@ -93,7 +102,7 @@ typedef struct { /* 0x58 */ uint32_t intcomp_fwd_bot; /* 0x5c */ uint32_t intcomp_bwd_top; /* 0x60 */ uint32_t intcomp_bwd_bot; - /* 0x64 */ uint32_t _stuffing; + /* 0x14 */ uint32_t smp_range_map; } VC1D_SPR_REGS; /* @@ -105,6 +114,7 @@ enum { VC1_FRAME_CURRENT_DIS, VC1_FRAME_PAST, VC1_FRAME_FUTURE, + VC1_FRAME_ALT }; #endif //_VC1_COMMON_H_ diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c index a2d6721..72b770f 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c @@ -10,7 +10,21 @@ void vc1_end_frame (vc1_viddec_parser_t *parser) return; } + int32_t vc1_parse_emit_current_frame( void *parent, vc1_viddec_parser_t *parser ) { return(0); } + + +void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) +{ +} + +void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser) +{ +} + +void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser) +{ +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h index 8416b24..a2607d7 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h @@ -187,6 +187,7 @@ typedef struct { int anchor[2]; /* one per field */ int rr_en; /* range reduction enable flag at sequence layer */ int rr_frm; /* range reduction flag at picture layer */ + int tff; } ref_frame_t; typedef struct @@ -194,7 +195,8 @@ typedef struct uint32_t sc_seen_since_last_wkld; uint32_t sc_seen; uint32_t is_frame_start; - uint8_t is_reference_picture; + uint32_t is_second_start; + uint32_t is_reference_picture; uint32_t intcomp_last[4]; /* for B frames */ uint32_t intcomp_top[2]; uint32_t intcomp_bot[2]; diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c index a033385..d98b38a 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c @@ -95,7 +95,7 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) viddec_fw_vc1_set_rcv_maxbframes(&wi.vc1_sh_struct_a_c, rcv.struct_c.MAXBFRAMES); viddec_fw_vc1_set_rcv_finterpflag(&wi.vc1_sh_struct_a_c, rcv.struct_c.FINTERPFLAG); - result = viddec_pm_append_workitem(ctxt, &wi); + result = viddec_pm_append_workitem(ctxt, &wi, false); } return status; @@ -125,9 +125,10 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) if(result == 1) { md->PROFILE = sh.seq_flags.PROFILE; -#ifdef VBP md->LEVEL = sh.seq_flags.LEVEL; -#endif + md->CHROMAFORMAT = sh.seq_flags.COLORDIFF_FORMAT; + md->FRMRTQ = sh.seq_flags.FRMRTQ_POSTPROC; + md->BITRTQ = sh.seq_flags.BITRTQ_POSTPROC; } result = viddec_pm_get_bits(ctxt, &sh.max_size, 32); @@ -230,7 +231,7 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) viddec_fw_vc1_set_psf(&wi_sl.vc1_sl, sh.seq_max_size.PSF); viddec_fw_vc1_set_display_ext(&wi_sl.vc1_sl, sh.seq_max_size.DISPLAY_EXT); - result = viddec_pm_append_workitem(ctxt, &wi_sl); + result = viddec_pm_append_workitem(ctxt, &wi_sl, false); // send DISPLAY EXTENSION metadata if present if (sh.seq_max_size.DISPLAY_EXT) @@ -258,7 +259,7 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) viddec_fw_vc1_set_disp_color_prim(&wi_de.vc1_sl_de, sh.seq_color_format.COLOR_PRIM); viddec_fw_vc1_set_disp_transfer_char(&wi_de.vc1_sl_de, sh.seq_color_format.TRANSFER_CHAR); - result = viddec_pm_append_workitem(ctxt, &wi_de); + result = viddec_pm_append_workitem(ctxt, &wi_de, false); } } @@ -298,6 +299,8 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) result = viddec_pm_skip_bits(ctxt, hrd_bits); md->REFDIST = 0; + md->BROKEN_LINK = ep.ep_flags.BROKEN_LINK; + md->CLOSED_ENTRY = ep.ep_flags.CLOSED_ENTRY; md->PANSCAN_FLAG = ep.ep_flags.PANSCAN_FLAG; md->REFDIST_FLAG = ep.ep_flags.REFDIST_FLAG; md->LOOPFILTER = ep.ep_flags.LOOPFILTER; @@ -370,14 +373,9 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) viddec_fw_vc1_set_ep_range_mapuv_flag(&wi.vc1_ep, ep.RANGE_MAPUV_FLAG); viddec_fw_vc1_set_ep_range_mapuv(&wi.vc1_ep, ep.RANGE_MAPUV); - result = viddec_pm_append_workitem(ctxt, &wi); + result = viddec_pm_append_workitem(ctxt, &wi, false); } -#ifdef VBP - md->BROKEN_LINK = ep.ep_flags.BROKEN_LINK; - md->CLOSED_ENTRY = ep.ep_flags.CLOSED_ENTRY; -#endif - DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT); DEB("md: after ep: res: %dx%d\n", md->width, md->height); return status; @@ -534,7 +532,7 @@ vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) if (wi.user_data.size >= 11) { viddec_pm_setup_userdata(&wi); - viddec_pm_append_workitem(ctxt, &wi); + viddec_pm_append_workitem(ctxt, &wi,false); wi.user_data.size = 0; } if(user_data == 0x80) // flushing byte @@ -549,7 +547,7 @@ vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) wi.user_data.data_payload[i] = 0; } viddec_pm_setup_userdata(&wi); - viddec_pm_append_workitem(ctxt, &wi); + viddec_pm_append_workitem(ctxt, &wi,false); wi.user_data.size = 0; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h index d0e2f00..16e0497 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h @@ -1,136 +1,140 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Common functions for parsing VC-1 bitstreams. -// -*/ - -#ifndef _VC1PARSE_H_ -#define _VC1PARSE_H_ - -#include "viddec_parser_ops.h" -#include "vc1.h" - -/** @weakgroup vc1parse_defs VC-1 Parse Definitions */ -/** @ingroup vc1parse_defs */ -/*@{*/ - -/* This macro gets the next less-than-nine bits from the bitstream. It is -assumed that numBits is less than ten. */ -#ifdef VC1_VERBOSE -#include -#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__) -#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args) -#else -#define AUTO_TRACE -#define DEBUGBITS(...) -#endif - -extern void *memset(void *s, int32_t c, uint32_t n); - -/* This macro gets the next numBits from the bitstream. */ -#define VC1_GET_BITS VC1_GET_BITS9 -#define VC1_GET_BITS9(numBits, value) \ -{ uint32_t __tmp__; \ - viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ - value = __tmp__;\ - DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ -} - -#define VC1_PEEK_BITS(numBits, value) \ -{ uint32_t __tmp__; \ - viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ - value = __tmp__;\ - DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ -} - -/* This macro asserts if the condition is not true. */ -#ifdef VC1_VERBOSE -#define VC1_ASSERT(condition) \ -{ \ - if (! (condition)) \ - OS_INFO("Failed " #condition "!\n"); \ -} -#else -#define VC1_ASSERT(condition) -#endif - -/*@}*/ - -/** @weakgroup vc1parse VC-1 Parse Functions */ -/** @ingroup vc1parse */ -/*@{*/ - -extern const uint8_t VC1_MVMODE_LOW_TBL[]; -extern const uint8_t VC1_MVMODE_HIGH_TBL[]; -extern const int32_t VC1_BITPLANE_IMODE_TBL[]; -extern const int32_t VC1_BITPLANE_K_TBL[]; -extern const int32_t VC1_BFRACTION_TBL[]; -extern const int32_t VC1_REFDIST_TBL[]; - -void vc1_end_frame(vc1_viddec_parser_t *parser); - -/* Top-level functions to parse bitstream layers for rcv format. */ -vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo); - -/* Top-level functions to parse bitstream layers for the various profiles. */ -vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo); - -/* Top-level functions to parse headers for various picture layers for the -simple and main profiles. */ -vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo); - -/* Top-level functions to parse common part of the headers for various picture -layers for the advanced profile. */ -vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParseFieldHeader_Adv (void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo); - -/* Functions to parse remainder part of the headers for various progressive -picture layers for the advanced profile. */ -vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo); - -/* Functions to parse remainder part of the headers for various interlace frame -layers for the advanced profile. */ -vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); - -/* Functions to parse remainder part of the headers for various interlace frame -layers for the advanced profile. */ -vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); - -/* Functions to parse syntax element in bitstream. */ -vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo); -vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo); -vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bptype); -vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable); -vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond); - -void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser); -int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser); - -/* function to handle user data */ -vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc); - -/*@}*/ - -#endif /* _VC1PARSE_H_. */ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Common functions for parsing VC-1 bitstreams. +// +*/ + +#ifndef _VC1PARSE_H_ +#define _VC1PARSE_H_ + +#include "viddec_parser_ops.h" +#include "vc1.h" + +/** @weakgroup vc1parse_defs VC-1 Parse Definitions */ +/** @ingroup vc1parse_defs */ +/*@{*/ + +/* This macro gets the next less-than-nine bits from the bitstream. It is +assumed that numBits is less than ten. */ +#ifdef VC1_VERBOSE +#include +#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__) +#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args) +#else +#define AUTO_TRACE +#define DEBUGBITS(...) +#endif + +extern void *memset(void *s, int32_t c, uint32_t n); + +/* This macro gets the next numBits from the bitstream. */ +#define VC1_GET_BITS VC1_GET_BITS9 +#define VC1_GET_BITS9(numBits, value) \ +{ uint32_t __tmp__; \ + viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ + value = __tmp__;\ + DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ +} + +#define VC1_PEEK_BITS(numBits, value) \ +{ uint32_t __tmp__; \ + viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ + value = __tmp__;\ + DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ +} + +/* This macro asserts if the condition is not true. */ +#ifdef VC1_VERBOSE +#define VC1_ASSERT(condition) \ +{ \ + if (! (condition)) \ + OS_INFO("Failed " #condition "!\n"); \ +} +#else +#define VC1_ASSERT(condition) +#endif + +/*@}*/ + +/** @weakgroup vc1parse VC-1 Parse Functions */ +/** @ingroup vc1parse */ +/*@{*/ + +extern const uint8_t VC1_MVMODE_LOW_TBL[]; +extern const uint8_t VC1_MVMODE_HIGH_TBL[]; +extern const int32_t VC1_BITPLANE_IMODE_TBL[]; +extern const int32_t VC1_BITPLANE_K_TBL[]; +extern const int32_t VC1_BFRACTION_TBL[]; +extern const int32_t VC1_REFDIST_TBL[]; + +void vc1_end_frame(vc1_viddec_parser_t *parser); + +/* Top-level functions to parse bitstream layers for rcv format. */ +vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse bitstream layers for the various profiles. */ +vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse headers for various picture layers for the +simple and main profiles. */ +vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse common part of the headers for various picture +layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_Adv (void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various progressive +picture layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various interlace frame +layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various interlace frame +layers for the advanced profile. */ +vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse syntax element in bitstream. */ +vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo); +vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bptype); +vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable); +vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond); + +void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser); +void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser); +void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser); + + + + +/* function to handle user data */ +vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc); + +/*@}*/ + +#endif /* _VC1PARSE_H_. */ diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c index 5ee9e18..b7dd271 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c @@ -369,20 +369,19 @@ static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, ResidualY = height & 1; } -#ifndef VBP for (i = 0; i < ResidualX; i++) { int32_t ColSkip; VC1_GET_BITS(1, ColSkip); - if (1 == ColSkip) + //if (1 == ColSkip) { for(j = 0; j < height; j++) { - int32_t Value = 0; - VC1_GET_BITS(1, Value); - put_bit(Value, i, j, width, height,pBitplane->invert, - pBitplane->databits); + int32_t Value = 0; + if (1 == ColSkip) VC1_GET_BITS(1, Value); + + put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); } } } @@ -391,51 +390,17 @@ static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, { int32_t RowSkip; VC1_GET_BITS(1, RowSkip); - if (1 == RowSkip) + //if (1 == RowSkip) { for (i = ResidualX; i < width; i++) { - int32_t Value = 0; - VC1_GET_BITS(1, Value); - put_bit(Value, i, j, width, height,pBitplane->invert, - pBitplane->databits); + int32_t Value = 0; + if (1 == RowSkip) VC1_GET_BITS(1, Value); + + put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); } } } - #else - int32_t Value = 0; - for (i = 0; i < ResidualX; i++) - { - int32_t ColSkip; - VC1_GET_BITS(1, ColSkip); - Value = 0; - for(j = 0; j < height; j++) - { - if (1 == ColSkip) - { - VC1_GET_BITS(1, Value); - } - put_bit(Value, i, j, width, height,pBitplane->invert, - pBitplane->databits); - } - } - - for (j = 0; j < ResidualY; j++) - { - int32_t RowSkip; - VC1_GET_BITS(1, RowSkip); - Value = 0; - for (i = ResidualX; i < width; i++) - { - if (1 == RowSkip) - { - VC1_GET_BITS(1, Value); - } - put_bit(Value, i, j, width, height,pBitplane->invert, - pBitplane->databits); - } - } - #endif /* restore value */ pBitplane->invert=tmp; @@ -489,8 +454,7 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, VC1_GET_BITS(1, tempValue); bpp->invert = (uint8_t) tempValue; - if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode, - VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK) + if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode,VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK) { return status; } @@ -531,14 +495,12 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, for (j = 0; j < width; j++) { VC1_GET_BITS(1, tempValue); - put_bit( tempValue, j, i, width, height, bpp->invert, - bpp->databits); + put_bit( tempValue, j, i, width, height, bpp->invert,bpp->databits); } } else if (bpp->invert) { //TO TEST for (j = 0; j < width; j++) { - put_bit( 0, j, i, width, height, bpp->invert, - bpp->databits); + put_bit( 0, j, i, width, height, bpp->invert, bpp->databits); } } } @@ -555,14 +517,12 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, for (j = 0; j < height; j++) { VC1_GET_BITS(1, tempValue); - put_bit( tempValue, i, j, width, height, bpp->invert, - bpp->databits); + put_bit( tempValue, i, j, width, height, bpp->invert, bpp->databits); } } else if (bpp->invert) { // fill column with ones for (j = 0; j < height; j++) { - put_bit( 0, i, j, width, height, bpp->invert, - bpp->databits); + put_bit( 0, i, j, width, height, bpp->invert, bpp->databits); } }//end for else } @@ -591,11 +551,12 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, { wi.vwi_type = bpnum; wi.data.data_offset = (char *)pl - (char *)bit_dw; // offset within struct + wi.data.data_payload[0] = pl[0]; wi.data.data_payload[1] = pl[1]; pl += 2; - viddec_pm_append_workitem( ctxt, &wi ); + viddec_pm_append_workitem( ctxt, &wi, false); } } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c index e73cde3..1b702e3 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c @@ -1,100 +1,100 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Parses VC-1 picture layer for progressive B picture in simple -// or main profile bitstream. -// -*/ - -#include "vc1parse.h" -#include "viddec_fw_debug.h" // For DEB - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses progressive B picture for main - * profile bitstream. This parser starts after PTYPE was parsed but stops - * before parsing of macroblock layer. - * Table 21 of SMPTE 421M after processing up to PTYPE for B picture. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, - &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != - VC1_STATUS_OK) - { - return status; - } - - VC1_GET_BITS9(5, picLayerHeader->PQINDEX); - if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) - return status; - - if (picLayerHeader->PQINDEX <= 8) - { - VC1_GET_BITS9(1, picLayerHeader->HALFQP); - } - else picLayerHeader->HALFQP=0; - - if (md->QUANTIZER == 1) - { - VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); - } - - if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? - VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) - { - return VC1_STATUS_OK; - } - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) - { - return status; - } - - VC1_GET_BITS9(2, picLayerHeader->MVTAB); - VC1_GET_BITS9(2, picLayerHeader->CBPTAB); - - if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (md->VSTRANSFORM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TTMBF); - if (picLayerHeader->TTMBF) - { - VC1_GET_BITS9(2, picLayerHeader->TTFRM); - } - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - /* Skip parsing of macroblock layer. */ - - return status; -} - +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive B picture in simple +// or main profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" // For DEB + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive B picture for main + * profile bitstream. This parser starts after PTYPE was parsed but stops + * before parsing of macroblock layer. + * Table 21 of SMPTE 421M after processing up to PTYPE for B picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else picLayerHeader->HALFQP=0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return VC1_STATUS_OK; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c index 4074309..7a6a8e0 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c @@ -1,257 +1,257 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Parses VC-1 picture layer for progressive B picture in advanced -// profile bitstream. -// -*/ - -#include "vc1parse.h" -#include "viddec_fw_debug.h" // For DEB - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses progressive B picture for advanced - * profile bitstream. - * Table 22 of SMPTE 421M after processing up to POSTPROC by - * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? - VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) - { - return status; - } - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) - { - return status; - } - - VC1_GET_BITS9(2, picLayerHeader->MVTAB); - VC1_GET_BITS9(2, picLayerHeader->CBPTAB); - - if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (md->VSTRANSFORM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TTMBF); - if (picLayerHeader->TTMBF == 1) - { - VC1_GET_BITS9(2, picLayerHeader->TTFRM); - } - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - /* Skip parsing of macroblock layer. */ - - return status; -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses interlace B frame for advanced - * profile bitstream. - * Table 84 of SMPTE 421M after processing up to POSTPROC by - * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, - &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != - VC1_STATUS_OK) - { - return status; - } - - if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - VC1_GET_BITS9(1, picLayerHeader->INTCOMP); - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) - { - return status; - } - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) - { - return status; - } - - // EPC picLayerHeader->MVMODE = VC1_MVMODE_1MV; - VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); - VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ - VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ - VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ - VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ - - if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (md->VSTRANSFORM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TTMBF); - if (picLayerHeader->TTMBF == 1) - { - VC1_GET_BITS9(2, picLayerHeader->TTFRM); - } - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - /* Skip parsing of macroblock layer. */ - - return status; -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses interlace B field for advanced - * profile bitstream. - * Table 89 of SMPTE 421M after processing up to BFRACTION by - * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - uint8_t bit_count; - const uint8_t *table; - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader* picLayerHeader = &pInfo->picLayerHeader; - - VC1_GET_BITS9(5, picLayerHeader->PQINDEX); - - if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) - return status; - - if (picLayerHeader->PQINDEX <= 8) - { - VC1_GET_BITS9(1, picLayerHeader->HALFQP); - } - else - picLayerHeader->HALFQP = 0; - - if (md->QUANTIZER == 1) - { - VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); - picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; - } - - if (md->POSTPROCFLAG == 1) - { - VC1_GET_BITS9(2, picLayerHeader->POSTPROC); - } - - if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (picLayerHeader->PQUANT > 12) - table = VC1_MVMODE_LOW_TBL; - else - table = VC1_MVMODE_HIGH_TBL; - - bit_count = 0; - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) - { - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - bit_count++; - } - if ((bit_count == 2) && (picLayerHeader->MVMODE == 0)) - bit_count++; - picLayerHeader->MVMODE = table[bit_count]; - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) != - VC1_STATUS_OK) - { - return status; - } - - VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); - VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ - VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ - - if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) - { - VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ - } - - if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (md->VSTRANSFORM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TTMBF); - if (picLayerHeader->TTMBF == 1) - { - VC1_GET_BITS9(2, picLayerHeader->TTFRM); - } - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - /* Skip parsing of macroblock layer. */ - - return status; -} +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive B picture in advanced +// profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" // For DEB + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive B picture for advanced + * profile bitstream. + * Table 22 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace B frame for advanced + * profile bitstream. + * Table 84 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->INTCOMP); + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + // EPC picLayerHeader->MVMODE = VC1_MVMODE_1MV; + VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace B field for advanced + * profile bitstream. + * Table 89 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader* picLayerHeader = &pInfo->picLayerHeader; + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if ((bit_count == 2) && (picLayerHeader->MVMODE == 0)) + bit_count++; + picLayerHeader->MVMODE = table[bit_count]; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) != + VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + + if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h index 9e621fc..149e364 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h @@ -146,12 +146,15 @@ typedef struct { // From Sequence Layer for Advanced Profile uint8_t PROFILE; /** 2 bit(s). */ -#ifdef VBP - uint8_t LEVEL; -#endif - uint8_t POSTPROCFLAG; /** 1 bit(s). */ + uint8_t LEVEL; /** 3 bit(s). */ + uint8_t CHROMAFORMAT; /** 2 bit(s). */ + uint8_t FRMRTQ; /** 3 bit(s). */ + + uint8_t BITRTQ; /** 5 bit(s). */ + uint8_t POSTPROCFLAG; /** 1 bit(s). */ uint8_t PULLDOWN; /** 1 bit(s). */ uint8_t INTERLACE; /** 1 bit(s). */ + uint8_t TFCNTRFLAG; /** 1 bit(s). */ uint8_t FINTERPFLAG; /** 1 bit(s). */ uint8_t PSF; /** 1 bit(s). */ @@ -162,18 +165,24 @@ typedef struct uint8_t MULTIRES; /** 1 bit(s). */ // From EntryPoint Layer for Advanced Profile + uint8_t BROKEN_LINK; + uint8_t CLOSED_ENTRY; + uint8_t PANSCAN_FLAG; uint8_t REFDIST_FLAG; uint8_t LOOPFILTER; uint8_t FASTUVMC; + uint8_t EXTENDED_MV; uint8_t DQUANT; uint8_t VSTRANSFORM; uint8_t OVERLAP; + uint8_t QUANTIZER; uint8_t EXTENDED_DMV; uint8_t RANGE_MAPY_FLAG; uint8_t RANGE_MAPY; + uint8_t RANGE_MAPUV_FLAG; uint8_t RANGE_MAPUV; @@ -187,7 +196,9 @@ typedef struct uint8_t INTCOMPFIELD; /** ? bit(s)? */ uint8_t LUMSCALE2; /** 6 bit(s). */ uint8_t LUMSHIFT2; /** 6 bit(s). */ + uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS]; + uint8_t res_1; // From SequenceLayerHeader, EntryPointHeader or Struct_A uint16_t width; @@ -196,8 +207,6 @@ typedef struct uint16_t heightMB; #ifdef VBP - uint8_t CLOSED_ENTRY; - uint8_t BROKEN_LINK; uint8_t SYNCMARKER; #endif diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c index c2f5985..5dc9b4d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c @@ -1,97 +1,97 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Parses VLC syntax elements within VC-1 bitstream. -// -*/ - -#include "vc1parse.h" - -/*----------------------------------------------------------------------------*/ - -vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable) -{ - uint32_t tempValue; - const int32_t *pTable = pDecodeTable; - vc1_Status status = VC1_STATUS_OK; - int32_t i, j, maxBits, loopCount, totalBits, value; - - maxBits = *pTable++; - loopCount = *pTable++; - totalBits = 0; - for (i = 0; i < loopCount; i++) - totalBits += *pTable++; - - if (totalBits != maxBits) - return VC1_STATUS_PARSE_ERROR; - - value = 0; - for (i = 0; i < maxBits; i++) - { - VC1_GET_BITS9(1, tempValue); - value = (value << 1) | tempValue; - loopCount = *pTable++; - if (loopCount == -1) - break; - for (j = 0; j < loopCount; j++) - { - if (value == *pTable++) - { - *pDst = *pTable; - return status; - } - else - pTable++; - } - } - - return status; -} - -/*----------------------------------------------------------------------------*/ - -vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, - int8_t *pFirst, int16_t *pSecond) -{ - uint32_t tempValue; - const int32_t *pTable = pDecodeTable; - vc1_Status status = VC1_STATUS_OK; - int32_t i, j, maxBits, loopCount, totalBits, value; - - maxBits = *pTable++; - loopCount = *pTable++; - totalBits = 0; - for (i = 0; i < loopCount; i++) - totalBits += *pTable++; - - if (totalBits != maxBits) - return VC1_STATUS_PARSE_ERROR; - - value = 0; - for (i = 0; i < maxBits; i++) - { - VC1_GET_BITS9(1, tempValue); - value = (value << 1) | tempValue; - loopCount = *pTable++; - if (loopCount == -1) - break; - for (j = 0; j < loopCount; j++) - { - if (value == *pTable++) - { - *pFirst = *pTable++; - *pSecond = *pTable; - return status; - } - else - pTable += 2; - } - } - - return status; -} +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VLC syntax elements within VC-1 bitstream. +// +*/ + +#include "vc1parse.h" + +/*----------------------------------------------------------------------------*/ + +vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable) +{ + uint32_t tempValue; + const int32_t *pTable = pDecodeTable; + vc1_Status status = VC1_STATUS_OK; + int32_t i, j, maxBits, loopCount, totalBits, value; + + maxBits = *pTable++; + loopCount = *pTable++; + totalBits = 0; + for (i = 0; i < loopCount; i++) + totalBits += *pTable++; + + if (totalBits != maxBits) + return VC1_STATUS_PARSE_ERROR; + + value = 0; + for (i = 0; i < maxBits; i++) + { + VC1_GET_BITS9(1, tempValue); + value = (value << 1) | tempValue; + loopCount = *pTable++; + if (loopCount == -1) + break; + for (j = 0; j < loopCount; j++) + { + if (value == *pTable++) + { + *pDst = *pTable; + return status; + } + else + pTable++; + } + } + + return status; +} + +/*----------------------------------------------------------------------------*/ + +vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, + int8_t *pFirst, int16_t *pSecond) +{ + uint32_t tempValue; + const int32_t *pTable = pDecodeTable; + vc1_Status status = VC1_STATUS_OK; + int32_t i, j, maxBits, loopCount, totalBits, value; + + maxBits = *pTable++; + loopCount = *pTable++; + totalBits = 0; + for (i = 0; i < loopCount; i++) + totalBits += *pTable++; + + if (totalBits != maxBits) + return VC1_STATUS_PARSE_ERROR; + + value = 0; + for (i = 0; i < maxBits; i++) + { + VC1_GET_BITS9(1, tempValue); + value = (value << 1) | tempValue; + loopCount = *pTable++; + if (loopCount == -1) + break; + for (j = 0; j < loopCount; j++) + { + if (value == *pTable++) + { + *pFirst = *pTable++; + *pSecond = *pTable; + return status; + } + else + pTable += 2; + } + } + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c index 03aeb79..e478250 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c @@ -1,257 +1,257 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Parses VC-1 picture layer for progressive I or BI picture in -// advanced profile bitstream. -// -*/ - -#include "vc1parse.h" -#include "viddec_fw_debug.h" -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses progressive I or BI picture for - * advanced profile bitstream. - * Table 18 of SMPTE 421M after processing up to POSTPROC by - * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) - { - return status; - } - - if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) - { - VC1_GET_BITS9(1, picLayerHeader->CONDOVER); - if (picLayerHeader->CONDOVER) - { - VC1_GET_BITS9(1, picLayerHeader->CONDOVER); - if (! picLayerHeader->CONDOVER) - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; - else - { - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, - md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) - { - return status; - } - } - } - else - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); - if (picLayerHeader->TRANSACFRM2) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); - picLayerHeader->TRANSACFRM2 += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - status = vc1_VOPDQuant(ctxt, pInfo); - - /* Skip parsing of macroblock layer. */ - - return status; -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses interlace I or BI frame for - * advanced profile bitstream. - * Table 82 of SMPTE 421M after processing up to POSTPROC by - * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK) - { - return status; - } - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) - { - return status; - } - - if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) - { - VC1_GET_BITS9(1, picLayerHeader->CONDOVER); - if (picLayerHeader->CONDOVER) - { - VC1_GET_BITS9(1, picLayerHeader->CONDOVER); - if (! picLayerHeader->CONDOVER) - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; - else - { - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, - md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) - { - return status; - } - } - } - else - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); - if (picLayerHeader->TRANSACFRM2) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); - picLayerHeader->TRANSACFRM2 += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - status = vc1_VOPDQuant(ctxt, pInfo); - - /* Skip parsing of macroblock layer. */ - - return status; -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses interlace I or BI field for - * advanced profile bitstream. - * Table 87 of SMPTE 421M after processing up to BFRACTION by - * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - uint32_t tempValue; - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - // Reset MVMODE when the second field is an I picture - // to avoid carrying forward the mvmode values from previous field - // especially the intensity compensation value - picLayerHeader->MVMODE = 0; - - VC1_GET_BITS9(5, picLayerHeader->PQINDEX); - if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) { - DEB("Error parsing I field \n"); - return status; - } - - if (picLayerHeader->PQINDEX <= 8) - { - VC1_GET_BITS9(1, picLayerHeader->HALFQP); - } - else - picLayerHeader->HALFQP = 0; - - if (md->QUANTIZER == 1) { - VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); - picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; - } - - if (md->POSTPROCFLAG == 1) - VC1_GET_BITS9(2, tempValue); /* POSTPROC. */ - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) != - VC1_STATUS_OK) - { - DEB("Error parsing I field \n"); - return status; - } - - if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) - { - VC1_GET_BITS9(1, picLayerHeader->CONDOVER); - if (picLayerHeader->CONDOVER) - { - VC1_GET_BITS9(1, picLayerHeader->CONDOVER); - if (! picLayerHeader->CONDOVER) - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; - else - { - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, - (md->heightMB+1)/2, BPP_OVERFLAGS)) != - VC1_STATUS_OK) - { - DEB("Error parsing I field \n"); - return status; - } - } - } - else - picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); - if (picLayerHeader->TRANSACFRM2) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); - picLayerHeader->TRANSACFRM2 += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - status = vc1_VOPDQuant(ctxt, pInfo); - if (status != VC1_STATUS_OK) { - DEB("Error parsing I field \n"); - return status; - } - - /* Skip parsing of macroblock layer. */ - - return status; -} +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive I or BI picture in +// advanced profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive I or BI picture for + * advanced profile bitstream. + * Table 18 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + { + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + { + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace I or BI frame for + * advanced profile bitstream. + * Table 82 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + { + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + { + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace I or BI field for + * advanced profile bitstream. + * Table 87 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + // Reset MVMODE when the second field is an I picture + // to avoid carrying forward the mvmode values from previous field + // especially the intensity compensation value + picLayerHeader->MVMODE = 0; + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) { + DEB("Error parsing I field \n"); + return status; + } + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + VC1_GET_BITS9(2, tempValue); /* POSTPROC. */ + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) != + VC1_STATUS_OK) + { + DEB("Error parsing I field \n"); + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + (md->heightMB+1)/2, BPP_OVERFLAGS)) != + VC1_STATUS_OK) + { + DEB("Error parsing I field \n"); + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + if (status != VC1_STATUS_OK) { + DEB("Error parsing I field \n"); + return status; + } + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c index 7cbcc34..55373b4 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c @@ -1,82 +1,82 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Parses VC-1 syntax elements MVRANGE and DMVRANGE. -// -*/ - -#include "vc1parse.h" - -/*------------------------------------------------------------------------------ - * Parse syntax element MVRANGE, which exists for main and advanced profiles. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if (md->EXTENDED_MV == 1) - { - VC1_GET_BITS9(1, picLayerHeader->MVRANGE); - if (picLayerHeader->MVRANGE) - { - VC1_GET_BITS9(1, picLayerHeader->MVRANGE); - if (picLayerHeader->MVRANGE) - { - VC1_GET_BITS9(1, picLayerHeader->MVRANGE); - picLayerHeader->MVRANGE += 1; - } - picLayerHeader->MVRANGE += 1; - } - } - else - picLayerHeader->MVRANGE = 0; - - return status; -} - -/*------------------------------------------------------------------------------ - * Parse syntax element DMVRANGE. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if (md->EXTENDED_DMV == 1) - { - VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); - if (picLayerHeader->DMVRANGE == 0) - picLayerHeader->DMVRANGE = VC1_DMVRANGE_NONE; - else - { - VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); - if (picLayerHeader->DMVRANGE == 0) - picLayerHeader->DMVRANGE = VC1_DMVRANGE_HORIZONTAL_RANGE; - else - { - VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); - if (picLayerHeader->DMVRANGE == 0) - picLayerHeader->DMVRANGE = VC1_DMVRANGE_VERTICAL_RANGE; - else - { - picLayerHeader->DMVRANGE = - VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE; - } - } - } - } - - return status; -} +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 syntax elements MVRANGE and DMVRANGE. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse syntax element MVRANGE, which exists for main and advanced profiles. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->EXTENDED_MV == 1) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + if (picLayerHeader->MVRANGE) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + if (picLayerHeader->MVRANGE) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + picLayerHeader->MVRANGE += 1; + } + picLayerHeader->MVRANGE += 1; + } + } + else + picLayerHeader->MVRANGE = 0; + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse syntax element DMVRANGE. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->EXTENDED_DMV == 1) + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_NONE; + else + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_HORIZONTAL_RANGE; + else + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_VERTICAL_RANGE; + else + { + picLayerHeader->DMVRANGE = + VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE; + } + } + } + } + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c index fa9c3c7..2b1c75a 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c @@ -1,403 +1,405 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Parses VC-1 picture layer for advanced profile. -// -*/ - -#include "vc1parse.h" -#include "viddec_fw_debug.h" - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses the picture header for advanced - * profile down to POSTPROC syntax element. - * Table 18 of SMPTE 421M for progressive I or BI picture. - * Table 20 of SMPTE 421M for progressive P picture. - * Table 22 of SMPTE 421M for progressive B picture. - * Table 23 of SMPTE 421M for skipped picture. - * Table 82 of SMPTE 421M for interlace I or BI frame. - * Table 83 of SMPTE 421M for interlace P frame. - * Table 84 of SMPTE 421M for interlace B frame. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo) -{ - uint32_t i = 0; - uint32_t tempValue; - vc1_Status status = VC1_STATUS_OK; - uint32_t number_of_pan_scan_window; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - if (md->INTERLACE == 1) - { - VC1_GET_BITS9(1, picLayerHeader->FCM); - if (picLayerHeader->FCM) - { - VC1_GET_BITS9(1, picLayerHeader->FCM); - if (picLayerHeader->FCM) - { - picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; - return VC1_STATUS_PARSE_ERROR; - } - else - picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; - } - else - picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; - } - else - picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; - - - VC1_GET_BITS9(1, picLayerHeader->PTYPE); - if (picLayerHeader->PTYPE) - { - VC1_GET_BITS9(1, picLayerHeader->PTYPE); - if (picLayerHeader->PTYPE) - { - VC1_GET_BITS9(1, picLayerHeader->PTYPE); - if (picLayerHeader->PTYPE) - { - VC1_GET_BITS9(1, picLayerHeader->PTYPE); - if (picLayerHeader->PTYPE) - picLayerHeader->PTYPE = VC1_SKIPPED_FRAME; - else - picLayerHeader->PTYPE = VC1_BI_FRAME; - } - else - picLayerHeader->PTYPE = VC1_I_FRAME; - } - else - picLayerHeader->PTYPE = VC1_B_FRAME; - } - else - picLayerHeader->PTYPE = VC1_P_FRAME; - - if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) - { - if (md->TFCNTRFLAG) - { - VC1_GET_BITS9(8, picLayerHeader->TFCNTR); /* TFCNTR. */ - } - } - - if (md->PULLDOWN) - { - if ((md->INTERLACE == 0) || (md->PSF == 1)) - { - VC1_GET_BITS9(2, picLayerHeader->RPTFRM); - } - else - { - VC1_GET_BITS9(1, picLayerHeader->TFF); - VC1_GET_BITS9(1, picLayerHeader->RFF); - } - } - - if (md->PANSCAN_FLAG == 1) - { - VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); /* PS_PRESENT. */ - if (picLayerHeader->PS_PRESENT == 1) - { - if ((md->INTERLACE == 1) && - (md->PSF == 0)) - { - if (md->PULLDOWN == 1) - number_of_pan_scan_window = 2 + picLayerHeader->RFF; - else - number_of_pan_scan_window = 2; - } - else - { - if (md->PULLDOWN == 1) - number_of_pan_scan_window = 1 + picLayerHeader->RPTFRM; - else - number_of_pan_scan_window = 1; - } - picLayerHeader->number_of_pan_scan_window = number_of_pan_scan_window; - - for (i = 0; i < number_of_pan_scan_window; i++) - { - VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ - VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ - VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ - VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ - } - } - } - - if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) - { - VC1_GET_BITS9(1, picLayerHeader->RNDCTRL); - md->RNDCTRL = picLayerHeader->RNDCTRL; - - if ((md->INTERLACE == 1) || - (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE)) - { - VC1_GET_BITS9(1, picLayerHeader->UVSAMP); - } - - if ((md->FINTERPFLAG == 1) && - (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) - { - VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */ - } - - if ((picLayerHeader->PTYPE == VC1_B_FRAME) && - (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) - { - if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, - &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) - != VC1_STATUS_OK) - { - return status; - } - } - - VC1_GET_BITS9(5, picLayerHeader->PQINDEX); - if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) - return status; - - if (picLayerHeader->PQINDEX <= 8) - { - VC1_GET_BITS9(1, picLayerHeader->HALFQP); - } - else - picLayerHeader->HALFQP = 0; - - if (md->QUANTIZER == 1) - { - VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); - picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; - } - - if (md->POSTPROCFLAG == 1) - { - VC1_GET_BITS9(2, picLayerHeader->POSTPROC); - } - } - - return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses the picture header for advanced - * profile down to BFRACTION syntax element. - * Table 85 of SMPTE 421M. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) -{ - uint32_t i = 0; - vc1_Status status = VC1_STATUS_OK; - uint32_t number_of_pan_scan_window; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - VC1_GET_BITS9(1, picLayerHeader->FCM); - if (picLayerHeader->FCM) - { - VC1_GET_BITS9(1, picLayerHeader->FCM); - if (picLayerHeader->FCM) - picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; - else - picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; - } - else - picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; - if (picLayerHeader->FCM != VC1_FCM_FIELD_INTERLACE) - return VC1_STATUS_PARSE_ERROR; - - VC1_GET_BITS9(3, picLayerHeader->FPTYPE); - if (picLayerHeader->FPTYPE == 0) - { - picLayerHeader->PTypeField1 = VC1_I_FRAME; - picLayerHeader->PTypeField2 = VC1_I_FRAME; - } - else if (picLayerHeader->FPTYPE == 1) - { - picLayerHeader->PTypeField1 = VC1_I_FRAME; - picLayerHeader->PTypeField2 = VC1_P_FRAME; - } - else if (picLayerHeader->FPTYPE == 2) - { - picLayerHeader->PTypeField1 = VC1_P_FRAME; - picLayerHeader->PTypeField2 = VC1_I_FRAME; - } - else if (picLayerHeader->FPTYPE == 3) - { - picLayerHeader->PTypeField1 = VC1_P_FRAME; - picLayerHeader->PTypeField2 = VC1_P_FRAME; - } - else if (picLayerHeader->FPTYPE == 4) - { - picLayerHeader->PTypeField1 = VC1_B_FRAME; - picLayerHeader->PTypeField2 = VC1_B_FRAME; - } - else if (picLayerHeader->FPTYPE == 5) - { - picLayerHeader->PTypeField1 = VC1_B_FRAME; - picLayerHeader->PTypeField2 = VC1_BI_FRAME; - } - else if (picLayerHeader->FPTYPE == 6) - { - picLayerHeader->PTypeField1 = VC1_BI_FRAME; - picLayerHeader->PTypeField2 = VC1_B_FRAME; - } - else if (picLayerHeader->FPTYPE == 7) - { - picLayerHeader->PTypeField1 = VC1_BI_FRAME; - picLayerHeader->PTypeField2 = VC1_BI_FRAME; - } - - if (md->TFCNTRFLAG) - { - VC1_GET_BITS9(8, picLayerHeader->TFCNTR); - } - - if (md->PULLDOWN == 1) - { - if (md->PSF == 1) - { - VC1_GET_BITS9(2, picLayerHeader->RPTFRM); - } - else - { - VC1_GET_BITS9(1, picLayerHeader->TFF); - VC1_GET_BITS9(1, picLayerHeader->RFF); - } - } else - picLayerHeader->TFF = 1; - - if (md->PANSCAN_FLAG == 1) - { - VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); - if (picLayerHeader->PS_PRESENT) - { - if (md->PULLDOWN) - number_of_pan_scan_window = 2 + picLayerHeader->RFF; - else - number_of_pan_scan_window = 2; - picLayerHeader->number_of_pan_scan_window =number_of_pan_scan_window; - - for (i = 0; i < number_of_pan_scan_window; i++) - { - VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ - VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ - VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ - VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ - } - } - } - VC1_GET_BITS9(1, md->RNDCTRL); - -#ifdef VBP - picLayerHeader->RNDCTRL = md->RNDCTRL; -#endif - - VC1_GET_BITS9(1, picLayerHeader->UVSAMP); - - if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3)) - { - int32_t tmp; - if ((status = vc1_DecodeHuffmanOne(ctxt, &tmp, - VC1_REFDIST_TBL)) != VC1_STATUS_OK) - { - return status; - } - md->REFDIST = tmp; - } - - if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7)) - { - if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, - &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != - VC1_STATUS_OK) - { - return status; - } - } - - if (picLayerHeader->CurrField == 0) - { - picLayerHeader->PTYPE = picLayerHeader->PTypeField1; - picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF); - } - else - { - picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF); - picLayerHeader->PTYPE = picLayerHeader->PTypeField2; - } - - return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function calls the appropriate function to further - * parse the picture header for advanced profile down to macroblock layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_PARSE_ERROR; - - if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE) - { - if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || - (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) - { - status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo); - } - else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) - status = vc1_ParsePictureHeader_ProgressivePpicture_Adv(ctxt, pInfo); - else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) - status = vc1_ParsePictureHeader_ProgressiveBpicture_Adv(ctxt, pInfo); - else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) - status = VC1_STATUS_OK; - } - else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE) - { - if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || - (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) - { - status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo); - } - else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) - status = vc1_ParsePictureHeader_InterlacePpicture_Adv(ctxt, pInfo); - else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) - status = vc1_ParsePictureHeader_InterlaceBpicture_Adv(ctxt, pInfo); - else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) - status = VC1_STATUS_OK; - } - else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) - { - int ptype; - if( pInfo->picLayerHeader.CurrField == 0) - ptype = pInfo->picLayerHeader.PTypeField1; - else - ptype = pInfo->picLayerHeader.PTypeField2; - - if ((ptype == VC1_I_FRAME) || - (ptype == VC1_BI_FRAME)) - { - status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo); - } - else if (ptype == VC1_P_FRAME) - status = vc1_ParseFieldHeader_InterlacePpicture_Adv(ctxt, pInfo); - else if (ptype == VC1_B_FRAME) - status = vc1_ParseFieldHeader_InterlaceBpicture_Adv(ctxt, pInfo); - else if (ptype == VC1_SKIPPED_FRAME) - status = VC1_STATUS_OK; - } - - return status; -} +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for advanced profile. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for advanced + * profile down to POSTPROC syntax element. + * Table 18 of SMPTE 421M for progressive I or BI picture. + * Table 20 of SMPTE 421M for progressive P picture. + * Table 22 of SMPTE 421M for progressive B picture. + * Table 23 of SMPTE 421M for skipped picture. + * Table 82 of SMPTE 421M for interlace I or BI frame. + * Table 83 of SMPTE 421M for interlace P frame. + * Table 84 of SMPTE 421M for interlace B frame. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t i = 0; + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + uint32_t number_of_pan_scan_window; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->INTERLACE == 1) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; + return VC1_STATUS_PARSE_ERROR; + } + else + picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + + + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + picLayerHeader->PTYPE = VC1_SKIPPED_FRAME; + else + picLayerHeader->PTYPE = VC1_BI_FRAME; + } + else + picLayerHeader->PTYPE = VC1_I_FRAME; + } + else + picLayerHeader->PTYPE = VC1_B_FRAME; + } + else + picLayerHeader->PTYPE = VC1_P_FRAME; + + if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) + { + if (md->TFCNTRFLAG) + { + VC1_GET_BITS9(8, picLayerHeader->TFCNTR); /* TFCNTR. */ + } + } + + if (md->PULLDOWN) + { + if ((md->INTERLACE == 0) || (md->PSF == 1)) + { + VC1_GET_BITS9(2, picLayerHeader->RPTFRM); + } + else + { + VC1_GET_BITS9(1, picLayerHeader->TFF); + VC1_GET_BITS9(1, picLayerHeader->RFF); + } + } + + if (md->PANSCAN_FLAG == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); /* PS_PRESENT. */ + if (picLayerHeader->PS_PRESENT == 1) + { + if ((md->INTERLACE == 1) && + (md->PSF == 0)) + { + if (md->PULLDOWN == 1) + number_of_pan_scan_window = 2 + picLayerHeader->RFF; + else + number_of_pan_scan_window = 2; + } + else + { + if (md->PULLDOWN == 1) + number_of_pan_scan_window = 1 + picLayerHeader->RPTFRM; + else + number_of_pan_scan_window = 1; + } + picLayerHeader->number_of_pan_scan_window = number_of_pan_scan_window; + + for (i = 0; i < number_of_pan_scan_window; i++) + { + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ + } + } + } + + if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) + { + VC1_GET_BITS9(1, picLayerHeader->RNDCTRL); + md->RNDCTRL = picLayerHeader->RNDCTRL; + + if ((md->INTERLACE == 1) || + (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE)) + { + VC1_GET_BITS9(1, picLayerHeader->UVSAMP); + } + + if ((md->FINTERPFLAG == 1) && + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + { + VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */ + } + + if ((picLayerHeader->PTYPE == VC1_B_FRAME) && + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) + != VC1_STATUS_OK) + { + return status; + } + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + } + + return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for advanced + * profile down to BFRACTION syntax element. + * Table 85 of SMPTE 421M. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t i = 0; + vc1_Status status = VC1_STATUS_OK; + uint32_t number_of_pan_scan_window; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; + else + picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + if (picLayerHeader->FCM != VC1_FCM_FIELD_INTERLACE) + return VC1_STATUS_PARSE_ERROR; + + VC1_GET_BITS9(3, picLayerHeader->FPTYPE); + if (picLayerHeader->FPTYPE == 0) + { + picLayerHeader->PTypeField1 = VC1_I_FRAME; + picLayerHeader->PTypeField2 = VC1_I_FRAME; + } + else if (picLayerHeader->FPTYPE == 1) + { + picLayerHeader->PTypeField1 = VC1_I_FRAME; + picLayerHeader->PTypeField2 = VC1_P_FRAME; + } + else if (picLayerHeader->FPTYPE == 2) + { + picLayerHeader->PTypeField1 = VC1_P_FRAME; + picLayerHeader->PTypeField2 = VC1_I_FRAME; + } + else if (picLayerHeader->FPTYPE == 3) + { + picLayerHeader->PTypeField1 = VC1_P_FRAME; + picLayerHeader->PTypeField2 = VC1_P_FRAME; + } + else if (picLayerHeader->FPTYPE == 4) + { + picLayerHeader->PTypeField1 = VC1_B_FRAME; + picLayerHeader->PTypeField2 = VC1_B_FRAME; + } + else if (picLayerHeader->FPTYPE == 5) + { + picLayerHeader->PTypeField1 = VC1_B_FRAME; + picLayerHeader->PTypeField2 = VC1_BI_FRAME; + } + else if (picLayerHeader->FPTYPE == 6) + { + picLayerHeader->PTypeField1 = VC1_BI_FRAME; + picLayerHeader->PTypeField2 = VC1_B_FRAME; + } + else if (picLayerHeader->FPTYPE == 7) + { + picLayerHeader->PTypeField1 = VC1_BI_FRAME; + picLayerHeader->PTypeField2 = VC1_BI_FRAME; + } + + if (md->TFCNTRFLAG) + { + VC1_GET_BITS9(8, picLayerHeader->TFCNTR); + } + + if (md->PULLDOWN == 1) + { + if (md->PSF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->RPTFRM); + } + else + { + VC1_GET_BITS9(1, picLayerHeader->TFF); + VC1_GET_BITS9(1, picLayerHeader->RFF); + } + } else + picLayerHeader->TFF = 1; + + if (md->PANSCAN_FLAG == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); + if (picLayerHeader->PS_PRESENT) + { + if (md->PULLDOWN) + number_of_pan_scan_window = 2 + picLayerHeader->RFF; + else + number_of_pan_scan_window = 2; + picLayerHeader->number_of_pan_scan_window =number_of_pan_scan_window; + + for (i = 0; i < number_of_pan_scan_window; i++) + { + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ + } + } + } + VC1_GET_BITS9(1, md->RNDCTRL); + +#ifdef VBP + picLayerHeader->RNDCTRL = md->RNDCTRL; +#endif + + VC1_GET_BITS9(1, picLayerHeader->UVSAMP); + + if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3)) + { + int32_t tmp; + if ((status = vc1_DecodeHuffmanOne(ctxt, &tmp, + VC1_REFDIST_TBL)) != VC1_STATUS_OK) + { + return status; + } + md->REFDIST = tmp; + } else if (md->REFDIST_FLAG == 0) { + md->REFDIST = 0; + } + + if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7)) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + } + + if (picLayerHeader->CurrField == 0) + { + picLayerHeader->PTYPE = picLayerHeader->PTypeField1; + picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF); + } + else + { + picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF); + picLayerHeader->PTYPE = picLayerHeader->PTypeField2; + } + + return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function calls the appropriate function to further + * parse the picture header for advanced profile down to macroblock layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_PARSE_ERROR; + + if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE) + { + if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + { + status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo); + } + else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_ProgressivePpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_ProgressiveBpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE) + { + if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + { + status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo); + } + else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_InterlacePpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_InterlaceBpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) + { + int ptype; + if( pInfo->picLayerHeader.CurrField == 0) + ptype = pInfo->picLayerHeader.PTypeField1; + else + ptype = pInfo->picLayerHeader.PTypeField2; + + if ((ptype == VC1_I_FRAME) || + (ptype == VC1_BI_FRAME)) + { + status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo); + } + else if (ptype == VC1_P_FRAME) + status = vc1_ParseFieldHeader_InterlacePpicture_Adv(ctxt, pInfo); + else if (ptype == VC1_B_FRAME) + status = vc1_ParseFieldHeader_InterlaceBpicture_Adv(ctxt, pInfo); + else if (ptype == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c index ba9c756..f57c61d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c @@ -92,11 +92,10 @@ vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInf VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); } -#ifdef VBP else +#ifdef VBP picLayerHeader->MVMODE2 = 0; -#else - else +#else picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; #endif diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c index 144c138..ff81282 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c @@ -1,368 +1,368 @@ -/* /////////////////////////////////////////////////////////////////////// -// -// INTEL CORPORATION PROPRIETARY INFORMATION -// This software is supplied under the terms of a license agreement or -// nondisclosure agreement with Intel Corporation and may not be copied -// or disclosed except in accordance with the terms of that agreement. -// Copyright (c) 2008 Intel Corporation. All Rights Reserved. -// -// Description: Parses VC-1 picture layer for progressive P picture in advanced -// profile bitstream. -// -*/ - -#include "vc1parse.h" -#include "viddec_fw_debug.h" -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses progressive P picture for advanced - * profile bitstream. - * Table 20 of SMPTE 421M after processing up to POSTPROC by - * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - uint8_t bit_count; - const uint8_t *table; - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - /* MVRANGE. */ - if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (picLayerHeader->PQUANT > 12) - table = VC1_MVMODE_LOW_TBL; - else - table = VC1_MVMODE_HIGH_TBL; - - bit_count = 0; - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - while ((picLayerHeader->MVMODE == 0) && (bit_count < 3)) - { - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - bit_count++; - } - if (bit_count == 3) - bit_count += picLayerHeader->MVMODE; - picLayerHeader->MVMODE = table[bit_count]; - - if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) - { - bit_count = 0; - VC1_GET_BITS9(1, picLayerHeader->MVMODE2); - while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) - { - VC1_GET_BITS9(1, picLayerHeader->MVMODE2); - bit_count++; - } - if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) - bit_count++; - picLayerHeader->MVMODE2 = table[bit_count]; - VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); - VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); - md->LUMSCALE2 = picLayerHeader->LUMSCALE; - md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; - } - else -#ifdef VBP - picLayerHeader->MVMODE2 = 0; -#else - picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; -#endif - - if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || - ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && - (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) - { - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_MVTYPEMB)) != - VC1_STATUS_OK) - { - return status; - } - } - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) - { - return status; - } - - VC1_GET_BITS9(2, picLayerHeader->MVTAB); - VC1_GET_BITS9(2, picLayerHeader->CBPTAB); - - if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (md->VSTRANSFORM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TTMBF); - if (picLayerHeader->TTMBF == 1) - { - VC1_GET_BITS9(2, picLayerHeader->TTFRM); - } - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - /* Skip parsing of macroblock layer. */ - - return status; -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses interlace P frame for advanced - * profile bitstream. - * Table 83 of SMPTE 421M after processing up to POSTPROC by - * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - /* MVRANGE. */ - if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - /* DMVRANGE. */ - if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - VC1_GET_BITS9(1, picLayerHeader->MV4SWITCH); - - VC1_GET_BITS9(1, picLayerHeader->INTCOMP); - if (picLayerHeader->INTCOMP) - { - VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); - VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); - md->LUMSCALE2 = picLayerHeader->LUMSCALE; - md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; - } - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) - { - return status; - } - - VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); - VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ - VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ - VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ - - if (picLayerHeader->MV4SWITCH == 1) - { - VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ - } - - if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (md->VSTRANSFORM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TTMBF); - if (picLayerHeader->TTMBF == 1) - { - VC1_GET_BITS9(2, picLayerHeader->TTFRM); - } - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - /* Skip parsing of macroblock layer. */ - - return status; -} - -/*------------------------------------------------------------------------------ - * Parse picture layer. This function parses interlace P field for advanced - * profile bitstream. - * Table 88 of SMPTE 421M after processing up to BFRACTION by - * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock - * layer. - *------------------------------------------------------------------------------ - */ - -vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) -{ - uint8_t bit_count; - const uint8_t *table; - vc1_Status status = VC1_STATUS_OK; - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - - - VC1_GET_BITS9(5, picLayerHeader->PQINDEX); - if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) - return status; - - if (picLayerHeader->PQINDEX <= 8) - { - VC1_GET_BITS9(1, picLayerHeader->HALFQP); - } - else - picLayerHeader->HALFQP = 0; - - - if (md->QUANTIZER == 1) - { - VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); - picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; - } - - if (md->POSTPROCFLAG == 1) - { - VC1_GET_BITS9(2, picLayerHeader->POSTPROC); - } - - VC1_GET_BITS9(1, picLayerHeader->NUMREF); - - if (picLayerHeader->NUMREF == 0) - { - VC1_GET_BITS9(1, picLayerHeader->REFFIELD); - } - - if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) { - DEB("Error in vc1_MVRangeDecode \n"); - return status; - } - - if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (picLayerHeader->PQUANT > 12) - table = VC1_MVMODE_LOW_TBL; - else - table = VC1_MVMODE_HIGH_TBL; - - bit_count = 0; - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) - { - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - bit_count++; - } - if (bit_count == 2 && picLayerHeader->MVMODE == 0) { - VC1_GET_BITS9(1, picLayerHeader->MVMODE); - - if ( picLayerHeader->MVMODE == 1) - bit_count ++; - - bit_count++; - } - picLayerHeader->MVMODE = table[bit_count]; - - if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) - { - bit_count = 0; - VC1_GET_BITS9(1, picLayerHeader->MVMODE2); - while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) - { - VC1_GET_BITS9(1, picLayerHeader->MVMODE2); - bit_count++; - } - if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) - bit_count++; - picLayerHeader->MVMODE2 = table[bit_count]; - - VC1_GET_BITS9(1, md->INTCOMPFIELD); - if (md->INTCOMPFIELD == 1) - md->INTCOMPFIELD = VC1_INTCOMP_BOTH_FIELD; - else - { - VC1_GET_BITS9(1, md->INTCOMPFIELD); - if(md->INTCOMPFIELD == 1) - md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD; - else - md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD; - } - VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); /* LUMSCALE1. */ - VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); /* LUMSHIFT1. */ - if ( md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD ) { - md->LUMSCALE2 = picLayerHeader->LUMSCALE; - md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; - } - if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) - { - VC1_GET_BITS9(6, md->LUMSCALE2); - VC1_GET_BITS9(6, md->LUMSHIFT2); - } - } - else -#ifdef VBP - picLayerHeader->MVMODE2 = 0; -#else - picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; -#endif - - VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); - - if (picLayerHeader->NUMREF) - { - VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ - } - else - { - VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ - } - - VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ - -#ifdef VBP - if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) -#else - if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV) -#endif - { - VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ - } - - if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) - return status; - - if (md->VSTRANSFORM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TTMBF); - if (picLayerHeader->TTMBF == 1) - { - VC1_GET_BITS9(2, picLayerHeader->TTFRM); - } - } - - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - if (picLayerHeader->TRANSACFRM == 1) - { - VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); - picLayerHeader->TRANSACFRM += 2; - } - picLayerHeader->TRANSACFRM2 = 0; - - VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); - - /* Skip parsing of macroblock layer. */ - - return status; -} +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive P picture in advanced +// profile bitstream. +// +*/ + +#include "vc1parse.h" +#include "viddec_fw_debug.h" +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive P picture for advanced + * profile bitstream. + * Table 20 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 3)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 3) + bit_count += picLayerHeader->MVMODE; + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || + ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && + (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) + { + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_MVTYPEMB)) != + VC1_STATUS_OK) + { + return status; + } + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace P frame for advanced + * profile bitstream. + * Table 83 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + /* DMVRANGE. */ + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MV4SWITCH); + + VC1_GET_BITS9(1, picLayerHeader->INTCOMP); + if (picLayerHeader->INTCOMP) + { + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ + + if (picLayerHeader->MV4SWITCH == 1) + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace P field for advanced + * profile bitstream. + * Table 88 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + + VC1_GET_BITS9(1, picLayerHeader->NUMREF); + + if (picLayerHeader->NUMREF == 0) + { + VC1_GET_BITS9(1, picLayerHeader->REFFIELD); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) { + DEB("Error in vc1_MVRangeDecode \n"); + return status; + } + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE == 0) { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + + if ( picLayerHeader->MVMODE == 1) + bit_count ++; + + bit_count++; + } + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + + VC1_GET_BITS9(1, md->INTCOMPFIELD); + if (md->INTCOMPFIELD == 1) + md->INTCOMPFIELD = VC1_INTCOMP_BOTH_FIELD; + else + { + VC1_GET_BITS9(1, md->INTCOMPFIELD); + if(md->INTCOMPFIELD == 1) + md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD; + else + md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD; + } + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); /* LUMSCALE1. */ + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); /* LUMSHIFT1. */ + if ( md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD ) { + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) + { + VC1_GET_BITS9(6, md->LUMSCALE2); + VC1_GET_BITS9(6, md->LUMSHIFT2); + } + } + else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); + + if (picLayerHeader->NUMREF) + { + VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ + } + else + { + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + } + + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + +#ifdef VBP + if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) +#else + if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV) +#endif + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + picLayerHeader->TRANSACFRM2 = 0; + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c index 6af6f09..9ddc237 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c @@ -27,6 +27,7 @@ static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve parser->ref_frame[i].anchor[1] = 1; parser->ref_frame[i].intcomp_top = 0; parser->ref_frame[i].intcomp_bot = 0; + parser->ref_frame[i].tff=0; } parser->intcomp_top[0] = 0; @@ -90,13 +91,14 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) ret = viddec_pm_get_bits(parent, &sc, 32); #endif sc = sc & 0xFF; - parser->is_frame_start = (sc == vc1_SCFrameHeader); + parser->is_frame_start = 0; + parser->is_second_start = 0; DEB("START_CODE = %02x\n", sc); switch( sc ) { case vc1_SCSequenceHeader: { - uint32_t data=0; + uint32_t data; parser->ref_frame[0].anchor[0] = 1; parser->ref_frame[0].anchor[1] = 1; parser->ref_frame[1].anchor[0] = 1; @@ -112,7 +114,7 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) else { status = vc1_ParseRCVSequenceLayer(parent, &parser->info); - } + } parser->sc_seen = VC1_SC_SEQ; parser->sc_seen_since_last_wkld |= VC1_SC_SEQ; #ifdef VBP @@ -131,7 +133,7 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) #ifdef VBP parser->start_code = VC1_SC_EP; #endif - break; + break; } case vc1_SCFrameHeader: @@ -149,17 +151,23 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) // Clear all bits indicating data below frm header parser->sc_seen &= VC1_FRM_MASK; parser->sc_seen_since_last_wkld |= VC1_SC_FRM; - vc1_start_new_frame ( parent, parser ); + //vc1_start_new_frame ( parent, parser ); + + parser->is_frame_start = 1; + vc1_parse_emit_frame_start( parent, parser ); #ifdef VBP parser->start_code = VC1_SC_FRM; #endif - break; + break; } case vc1_SCSlice: { status = vc1_ParseSliceLayer(parent, &parser->info); parser->sc_seen_since_last_wkld |= VC1_SC_SLC; + + vc1_parse_emit_current_slice( parent, parser ); + #ifdef VBP parser->start_code = VC1_SC_SLC; #endif @@ -203,14 +211,16 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) if((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) || (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME)) { - vc1_swap_intcomp(parser); + //vc1_swap_intcomp(parser); } - parser->sc_seen |= VC1_SC_FLD; parser->sc_seen_since_last_wkld |= VC1_SC_FLD; + + parser->is_second_start = 1; + vc1_parse_emit_second_field_start( parent, parser ); #ifdef VBP - parser->start_code = VC1_SC_FLD; -#endif + parser->start_code = VC1_SC_FLD; +#endif break; } @@ -247,9 +257,7 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) } } - if( vc1_is_frame_start_code( sc ) ) { - vc1_parse_emit_current_frame( parent, parser ); - } + return VIDDEC_PARSE_SUCESS; } // viddec_vc1_parse diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c index b787831..b5bba2b 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c @@ -82,513 +82,645 @@ static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_p return; } // translate_parser_info_to_frame_attributes -void vc1_intcomp(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr) +/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ +static inline void vc1_send_past_ref_items(void *parent) { - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; - uint32_t intcomp1 = 1; - uint32_t intcomp2 = 0; + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi, false ); + return; +} - // Get the intensity compensation from the bitstream - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp1, pic->LUMSCALE); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp1, pic->LUMSHIFT); +/* send future frame item */ +static inline void vc1_send_future_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi, false ); + return; +} - if(md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) - { - intcomp2 = 1; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp2, md->LUMSCALE2); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp2, md->LUMSHIFT2); - } +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_reorder_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0 + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + + +/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ +static inline void vc1_send_ref_fcm_items(void *parent, uint32_t past_fcm, uint32_t future_fcm) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE; + wi.vwi_payload[0]= 0; + wi.vwi_payload[1]= past_fcm; + wi.vwi_payload[2]= future_fcm; + viddec_pm_append_workitem( parent, &wi, false ); + return; +} - switch(md->INTCOMPFIELD) - { - case VC1_INTCOMP_TOP_FIELD: - if(pic->CurrField == 0) // First field decoded - { - if(pic->TFF) - { - //parser->intcomp_bot[0] = intcomp1 << 13; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1); - } - else - { - parser->intcomp_top[0] = intcomp1; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp1); - } - } - else // Second field - { - if(pic->TFF) - { - parser->intcomp_top[0] = intcomp1; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1); - } - else - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1); - } - } - break; - case VC1_INTCOMP_BOTTOM_FIELD: - if(pic->CurrField == 0) // First field decoded - { - if(pic->TFF) - { - parser->intcomp_bot[0] = intcomp1; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp1); - } - else - { - parser->intcomp_bot[0] = intcomp1 << 13; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1); - } - } - else // Second field - { - if(pic->TFF) - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1); - } - else - { - parser->intcomp_bot[0] = intcomp1; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1); - } - } - break; - case VC1_INTCOMP_BOTH_FIELD: - if(pic->CurrField == 0) // First field decoded - { - if(pic->TFF) - { - parser->intcomp_bot[0] = intcomp2; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp2; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp2); - } - else - { - parser->intcomp_top[0] = intcomp2; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp2; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp2); - } - } - else // Second field - { - if(pic->TFF) - { - parser->intcomp_top[0] = intcomp1; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp2); - } - else - { - parser->intcomp_bot[0] = intcomp1; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1; - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp2); - } - } - break; - default: - break; - } // switch on INTCOMPFIELD - return; -} // vc1_intcomp -static void handle_intensity_compensation(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr) +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SEQ_ENTRY_registers(void *parent, vc1_viddec_parser_t *parser) { - vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; - uint8_t intcomp_present = false; + uint32_t stream_format1 = 0; + uint32_t stream_format2 = 0; + uint32_t entrypoint1 = 0; + viddec_workload_item_t wi; - if((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) - { - intcomp_present = true; - if(pic->FCM == VC1_FCM_FIELD_INTERLACE) - { - vc1_intcomp(parser, pInfo, spr); - } - else - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, spr->intcomp_fwd_top, pic->LUMSCALE); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, spr->intcomp_fwd_top, pic->LUMSHIFT); + vc1_metadata_t *md = &(parser->info.metadata); - if(parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE) - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, spr->intcomp_fwd_bot, pic->LUMSCALE); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, spr->intcomp_fwd_bot, pic->LUMSHIFT); - } - parser->intcomp_top[0] = spr->intcomp_fwd_top; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = spr->intcomp_fwd_top; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = spr->intcomp_fwd_top; - } - } - // Propagate the previous picture's intensity compensation - if(pic->FCM == VC1_FCM_FIELD_INTERLACE) - { - if( (pic->CurrField) || - ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE))) - { - spr->intcomp_fwd_top |= parser->intcomp_top[1]; - spr->intcomp_fwd_bot |= parser->intcomp_bot[1]; - } - } - if(pic->FCM == VC1_FCM_FRAME_INTERLACE) - { - if( (pic->CurrField) || - ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE))) - { - spr->intcomp_fwd_bot |= parser->intcomp_bot[1]; - } - } + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, stream_format1, md->PROFILE); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, LEVEL, stream_format1, md->LEVEL); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, CHROMAFORMAT, stream_format1, md->CHROMAFORMAT); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, FRMRTQ, stream_format1, md->FRMRTQ); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, BITRTQ, stream_format1, md->BITRTQ); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, POSTPRO, stream_format1, md->POSTPROCFLAG); - switch(pic->PTYPE) - { - case VC1_B_FRAME: - spr->intcomp_fwd_top = parser->intcomp_last[0]; - spr->intcomp_fwd_bot = parser->intcomp_last[1]; - spr->intcomp_bwd_top = parser->intcomp_last[2]; - spr->intcomp_bwd_bot = parser->intcomp_last[3]; - break; - case VC1_P_FRAME: - // If first field, store the intcomp values to propagate. - // If second field has valid intcomp values, store them - // to propagate. - if(pic->CurrField == 0) // first field - { - parser->intcomp_last[0] = spr->intcomp_fwd_top; - parser->intcomp_last[1] = spr->intcomp_fwd_bot; - parser->intcomp_last[2] = spr->intcomp_bwd_top; - parser->intcomp_last[3] = spr->intcomp_bwd_bot; - } - else // Second field - { - parser->intcomp_last[0] |= spr->intcomp_fwd_top; - parser->intcomp_last[1] |= spr->intcomp_fwd_bot; - parser->intcomp_last[2] |= spr->intcomp_bwd_top; - parser->intcomp_last[3] |= spr->intcomp_bwd_bot; - } - break; - case VC1_I_FRAME: - case VC1_BI_FRAME: - break; - default: - break; - } - return; -} // handle_intensity_compensation + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PULLDOWN, stream_format2, md->PULLDOWN); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, stream_format2, md->INTERLACE); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, TFCNTRFLAG, stream_format2, md->TFCNTRFLAG); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, FINTERPFLAG, stream_format2, md->FINTERPFLAG); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PSF, stream_format2, md->PSF); -/** - * This function populates the registers for range reduction (main profile) - * This function assumes pInfo->metadata.RANGERED is ON at the sequence layer (J.1.17) - * A frame is marked as range reduced by the RANGEREDFRM flag at the picture layer, - * and the output of the decoded range reduced frame needs to be scaled up (8.1.1.4). - * Previous reference frame needs be upscaled or downscaled based on the RR status of - * current and previous frame (8.3.4.11) - */ -static inline void vc1_fill_RR_hw_struct(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr) + + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, BROKEN_LINK, entrypoint1, md->BROKEN_LINK); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, CLOSED_ENTRY, entrypoint1, md->CLOSED_ENTRY); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, PANSCAN_FLAG, entrypoint1, md->PANSCAN_FLAG); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, REFDIST_FLAG, entrypoint1, md->REFDIST_FLAG); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, entrypoint1, md->LOOPFILTER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, entrypoint1, md->FASTUVMC); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, entrypoint1, md->EXTENDED_MV); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, entrypoint1, md->DQUANT); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, entrypoint1, md->VSTRANSFORM); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, entrypoint1, md->OVERLAP); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, entrypoint1, md->QUANTIZER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, entrypoint1, md->EXTENDED_DMV); + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY; + + + wi.vwi_payload[0] = stream_format1; + wi.vwi_payload[1] = stream_format2; + wi.vwi_payload[2] = entrypoint1; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SIZE_AND_AP_RANGEMAP_registers(void *parent, vc1_viddec_parser_t *parser) { - vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; - int is_previous_ref_rr=0; + uint32_t coded_size = 0; + uint32_t ap_range_map = 0; + + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, coded_size, md->width); + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, coded_size, md->height); + + + /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/ + if(VC1_PROFILE_ADVANCED == md->PROFILE) + { + + + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, ap_range_map, md->RANGE_MAPY_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, ap_range_map, md->RANGE_MAPY); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, ap_range_map, md->RANGE_MAPUV_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, ap_range_map, md->RANGE_MAPUV); + + + + + } + else + { + ap_range_map = 0; + } + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP; + - /* range reduction applies to luma and chroma component - which are the same register bit as RANGE_MAPY_FLAG, RANGE_MAPUV_FLAG */ - BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, pic->RANGEREDFRM); - BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, pic->RANGEREDFRM); - - /* Get the range reduced status of the previous frame */ - switch (pic->PTYPE) - { - case VC1_P_FRAME: - { - is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm; - break; - } - case VC1_B_FRAME: - { - is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm; - break; - } - default: - { - break; - } - } + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = coded_size; + wi.vwi_payload[2] = ap_range_map; - /* if current frame is RR and previous frame is not - donwscale the reference pixel ( RANGE_REF_RED_TYPE =1 in register) */ - if(pic->RANGEREDFRM) - { - if(!is_previous_ref_rr) - { - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1); - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 1); - } - } - else - { - /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */ - if(is_previous_ref_rr) - { - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1); - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 0); - } - } // end for RR upscale + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items -} // vc1_fill_RR_hw_struct -/** - * fill workload items that will load registers for HW decoder - */ -static void vc1_fill_hw_struct(vc1_viddec_parser_t *parser, vc1_Info* pInfo, VC1D_SPR_REGS *spr) + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SLICE_FRAME_TYPE_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { - vc1_metadata_t *md = &pInfo->metadata; - vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader; - int field = pic->CurrField; - int ptype; + uint32_t alt_frame_type = 0; + uint32_t frame_type = 0; - ptype = pic->PTYPE; + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; - LOG_CRIT("ptype = %d, field = %d, topfield = %d, slice = %d", ptype, pic->CurrField, pic->BottomField, pic->SLICE_ADDR); + vc1_metadata_t *md = &(parser->info.metadata); - /* Common to both fields */ - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, spr->stream_format1, md->PROFILE); - BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, spr->coded_size, md->width); - BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, spr->coded_size, md->height); + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, frame_type, pic->FCM); + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, frame_type, pic->PTYPE); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, spr->stream_format2, md->INTERLACE); + alt_frame_type = frame_type; - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, spr->entrypoint1, md->LOOPFILTER); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, spr->entrypoint1, md->FASTUVMC); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, spr->entrypoint1, md->EXTENDED_MV); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, spr->entrypoint1, md->DQUANT); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, spr->entrypoint1, md->VSTRANSFORM); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, spr->entrypoint1, md->OVERLAP); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, spr->entrypoint1, md->QUANTIZER); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, spr->entrypoint1, md->EXTENDED_DMV); + if(VC1_PROFILE_ADVANCED == md->PROFILE) + { + if( (VC1_P_FRAME == pic->PTYPE)||(VC1_B_FRAME == pic->PTYPE) ) + { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); + } + } + else + { + if( VC1_SKIPPED_FRAME== pic->PTYPE) + { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, 0); + } else { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); + } + } - /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/ - if(md->RANGERED) - { - vc1_fill_RR_hw_struct(parser, pInfo, spr ); - } - else - { //range mapping - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, md->RANGE_MAPY_FLAG); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, spr->range_map, md->RANGE_MAPY); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, md->RANGE_MAPUV_FLAG); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, spr->range_map, md->RANGE_MAPUV); - } - BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, spr->frame_type, pic->FCM); - BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, spr->frame_type, pic->PTYPE); - - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, spr->recon_control, md->RNDCTRL); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, spr->recon_control, pic->UVSAMP); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, spr->recon_control, pic->PQUANT); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, spr->recon_control, pic->HALFQP); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, spr->recon_control, pic->UniformQuant); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, spr->recon_control, pic->POSTPROC); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, spr->recon_control, pic->CONDOVER); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, spr->recon_control, (pic->PQINDEX <= 8)); - - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, spr->mv_control, pic->MVRANGE); - if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP) - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE2); - else - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, spr->mv_control, pic->MVTAB); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, spr->mv_control, pic->DMVRANGE); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, spr->mv_control, pic->MV4SWITCH); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, spr->mv_control, pic->MBMODETAB); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, spr->mv_control, - pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) )); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, spr->mv_control, pic->REFFIELD); - - handle_intensity_compensation(parser, pInfo, spr); - - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, spr->ref_bfraction, pic->BFRACTION_DEN); - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, spr->ref_bfraction, pic->BFRACTION_NUM); - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, spr->ref_bfraction, md->REFDIST); - - // BLOCK CONTROL REGISTER Offset 0x2C - BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, spr->blk_control, pic->CBPTAB); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, spr->blk_control, pic->TTMBF); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, spr->blk_control, pic->TTFRM); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, spr->blk_control, pic->MV2BPTAB); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, spr->blk_control, pic->MV4BPTAB); - if((field == 1) && (pic->SLICE_ADDR)) - { - int mby = md->height * 2 + 2; - mby = (mby + 15 ) / 16; - pic->SLICE_ADDR -= (mby/2); - } - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, spr->blk_control, pic->SLICE_ADDR); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, spr->blk_control, md->bp_raw[0]); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, spr->blk_control, md->bp_raw[1]); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, spr->blk_control, md->bp_raw[2]); - - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, spr->trans_data, pic->TRANSACFRM); - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, spr->trans_data, pic->TRANSACFRM2); - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, spr->trans_data, pic->TRANSDCTAB); - - // When DQUANT is 1 or 2, we have the VOPDQUANT structure in the bitstream that - // controls the value calculated for ALTPQUANT - // ALTPQUANT must be in the range of 1 and 31 for it to be valid - // DQUANTFRM is present only when DQUANT is 1 and ALTPQUANT setting should be dependent on DQUANT instead - if(md->DQUANT) - { - if(pic->PQDIFF == 7) - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->ABSPQ); - else if (pic->DQUANTFRM == 1) - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->PQUANT + pic->PQDIFF + 1); - } - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, spr->vop_dquant, pic->DQUANTFRM); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, spr->vop_dquant, pic->DQPROFILE); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, spr->vop_dquant, pic->DQSBEDGE); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, spr->vop_dquant, pic->DQBILEVEL); - - BF_WRITE(VC1_0_SEQPIC_CURR_FRAME_ID,FCM, spr->ref_frm_id[VC1_FRAME_CURRENT_REF], pic->FCM ); - - if ( ptype == VC1_B_FRAME) { - // Forward reference is past reference and is the second temporally closest reference - hence minus_2 - BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm ); - // Backward reference is future reference frame and is temporally the closest - hence minus_1 - BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm ); - } else { - // Only Forward reference is valid and is the temporally closest reference - hence minus_1, backward is set same as forward - BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm ); - BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm ); - } + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO; - BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, spr->fieldref_ctrl_id, pic->BottomField); - BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, spr->fieldref_ctrl_id, pic->CurrField); - if(parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) - { - BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, 1); - } - else - { - BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]); - } - if( pic->FCM == VC1_FCM_FIELD_INTERLACE ) { - BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, spr->imgstruct, (pic->BottomField) ? 2 : 1); - } + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = frame_type; + wi.vwi_payload[2] = alt_frame_type; - return; -} // vc1_fill_hw_struct + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items -int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser) +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SLICE_CONTROL_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { - viddec_workload_item_t wi; - const uint32_t *pl; - int i; - int nitems; + uint32_t recon_control = 0; + uint32_t mv_control = 0; + uint32_t blk_control = 0; - if( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) { - translate_parser_info_to_frame_attributes( parent, parser ); - return 0; - } + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; - translate_parser_info_to_frame_attributes( parent, parser ); - memset(&parser->spr, 0, sizeof(VC1D_SPR_REGS)); - vc1_fill_hw_struct( parser, &parser->info, &parser->spr ); + int is_previous_ref_rr=0; - /* STUFF BSP Data Memory it into a variety of workload items */ + vc1_metadata_t *md = &(parser->info.metadata); - pl = (const uint32_t *) &parser->spr; - // How many payloads must be generated - nitems = (sizeof(parser->spr) + 7) / 8; /* In QWORDs rounded up */ + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, recon_control, md->RNDCTRL); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, recon_control, pic->UVSAMP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, recon_control, pic->PQUANT); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, recon_control, pic->HALFQP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, recon_control, pic->UniformQuant); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, recon_control, pic->POSTPROC); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, recon_control, pic->CONDOVER); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, recon_control, (pic->PQINDEX <= 8)); + /* Get the range reduced status of the previous frame */ + switch (pic->PTYPE) + { + case VC1_P_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm; + break; + } + case VC1_B_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm; + break; + } + default: + { + break; + } + } - // Dump DMEM to an array of workitems - for( i = 0; (i < nitems) && ( (parser->info.picLayerHeader.SLICE_ADDR == 0) || parser->info.picture_info_has_changed ); i++ ) + if(pic->RANGEREDFRM) { - wi.vwi_type = VIDDEC_WORKLOAD_DECODER_SPECIFIC; - wi.data.data_offset = (unsigned int)pl - (unsigned int)&parser->spr; // offset within struct - wi.data.data_payload[0] = pl[0]; - wi.data.data_payload[1] = pl[1]; - pl += 2; - viddec_pm_append_workitem( parent, &wi ); + if(!is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 1); + } } - + else { - uint32_t bit, byte; - uint8_t is_emul; - viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); - // Send current bit offset and current slice - wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; - // If slice data starts in the middle of the emulation prevention sequence - - // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data - // to the decoder starting at the first byte of 0s so that the decoder can detect the - // emulation prevention. But the actual data starts are offset 8 in this bit sequence. - wi.vwi_payload[0] = bit + (is_emul*8); - wi.vwi_payload[1] = parser->info.picLayerHeader.SLICE_ADDR; - wi.vwi_payload[2] = 0xdeaddead; - viddec_pm_append_workitem( parent, &wi ); + /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */ + if(is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 0); + } + } // end for RR upscale + + + + + + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, mv_control, pic->MVRANGE); + if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP) + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE2); + else + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, mv_control, pic->MVTAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, mv_control, pic->DMVRANGE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, mv_control, pic->MV4SWITCH); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, mv_control, pic->MBMODETAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, mv_control, + pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) )); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, mv_control, pic->REFFIELD); + + + + // BLOCK CONTROL REGISTER Offset 0x2C + BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, blk_control, pic->CBPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, blk_control, pic->TTMBF); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, blk_control, pic->TTFRM); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, blk_control, pic->MV2BPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, blk_control, pic->MV4BPTAB); + if((pic->CurrField == 1) && (pic->SLICE_ADDR)) + { + int mby = md->height * 2 + 2; + mby = (mby + 15 ) / 16; + pic->SLICE_ADDR -= (mby/2); } + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, blk_control, pic->SLICE_ADDR); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, blk_control, md->bp_raw[0]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, blk_control, md->bp_raw[1]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, blk_control, md->bp_raw[2]); - viddec_pm_append_pixeldata( parent ); + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO; - return(0); -} -/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ -static inline void vc1_send_past_ref_items(void *parent) + wi.vwi_payload[0] = recon_control; + wi.vwi_payload[1] = mv_control; + wi.vwi_payload[2] = blk_control; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SLICE_OTHER_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { + uint32_t trans_data = 0; + uint32_t vop_dquant = 0; + uint32_t ref_bfraction = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - viddec_pm_append_workitem( parent, &wi ); + + vc1_metadata_t *md = &(parser->info.metadata); + + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, ref_bfraction, pic->BFRACTION_DEN); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, ref_bfraction, pic->BFRACTION_NUM); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, ref_bfraction, md->REFDIST); + + if(md->DQUANT) + { + if(pic->PQDIFF == 7) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->ABSPQ); + else if (pic->DQUANTFRM == 1) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->PQUANT + pic->PQDIFF + 1); + } + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, vop_dquant, pic->DQUANTFRM); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, vop_dquant, pic->DQPROFILE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, vop_dquant, pic->DQSBEDGE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, vop_dquant, pic->DQBILEVEL); + + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, trans_data, pic->TRANSACFRM); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, trans_data, pic->TRANSACFRM2); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, trans_data, pic->TRANSDCTAB); + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO; + + + wi.vwi_payload[0] = trans_data; + wi.vwi_payload[1] = vop_dquant; + wi.vwi_payload[2] = ref_bfraction; + + viddec_pm_append_workitem( parent, &wi, false ); return; -} +} // send_reorder_ref_items -/* send future frame item */ -static inline void vc1_send_future_ref_items(void *parent) + + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { + uint32_t imgstruct = 0; + uint32_t fieldref_ctrl_id = 0; + uint32_t smp_rangemap = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - viddec_pm_append_workitem( parent, &wi ); + + vc1_metadata_t *md = &(parser->info.metadata); + + if( pic->FCM == VC1_FCM_FIELD_INTERLACE ) { + BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, imgstruct, (pic->BottomField) ? 2 : 1); + } + + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, fieldref_ctrl_id, pic->BottomField); + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, fieldref_ctrl_id, pic->CurrField); + if(parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, 1); + } + else + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]); + } + + if(VC1_PROFILE_ADVANCED != md->PROFILE) + { + if(pic->RANGEREDFRM) + { + //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, smp_rangemap, md->RANGE_MAPY_FLAG); + //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, smp_rangemap, md->RANGE_MAPUV_FLAG); + smp_rangemap = 0x11; + } + + } + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO; + + + wi.vwi_payload[0] = imgstruct; + wi.vwi_payload[1] = fieldref_ctrl_id; + wi.vwi_payload[2] = smp_rangemap; + + viddec_pm_append_workitem( parent, &wi, false ); return; -} +} // send_reorder_ref_items + /* send reorder frame item to host * future frame gets push to past */ -static inline void send_reorder_ref_items(void *parent) +static inline void send_INT_COM_registers(void *parent, vc1_viddec_parser_t *parser) { + uint32_t intcomp_fwd_top = 0; + uint32_t intcomp_fwd_bot = 0; + uint32_t intcomp_bwd_top = 0; + uint32_t intcomp_bwd_bot = 0; + uint32_t intcomp_cur = 0; + + uint32_t POS_2nd_INTCOMP = 13; + uint32_t MASK_1st_INTCOMP = 0x1fff; + uint32_t MASK_2nd_INTCOMP = 0x3ffe000; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; - wi.ref_reorder.ref_table_offset = 0; - wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0 - wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same - viddec_pm_append_workitem( parent, &wi ); + + vc1_metadata_t *md = &(parser->info.metadata); + + + + if(VC1_SKIPPED_FRAME == pic->PTYPE) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top =0; + return; + } + + if( VC1_FCM_FIELD_INTERLACE != pic->FCM ) + { + + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); + + if ( !((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) ) + intcomp_cur = 0; + + if( (VC1_BI_FRAME==pic->PTYPE)||(VC1_B_FRAME==pic->PTYPE) ) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = 0; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = 0; + + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; + intcomp_fwd_bot = parser->intcomp_bot[1]; + + + if( parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != (-1) ) + { + if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].type) + intcomp_fwd_top = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].intcomp_top; + } + else + { + if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) + intcomp_fwd_top = parser->intcomp_top[1]; + } + } + else + { //I,P TYPE + + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; + + if(VC1_FCM_FIELD_INTERLACE == parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm) + { + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_top |= intcomp_cur << POS_2nd_INTCOMP; + + intcomp_fwd_bot = parser->intcomp_bot[1]; + intcomp_fwd_bot |= intcomp_cur << POS_2nd_INTCOMP; + } + else + { + intcomp_fwd_top = intcomp_cur;// << POS_2nd_INTCOMP; + intcomp_fwd_bot = 0; + } + } + } + else + { + //FIELD INTERLACE + //if(0!=md->INTCOMPFIELD) + //No debugging + + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); + } + else + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); + } + + if(md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); + } + + if(pic->MVMODE != VC1_MVMODE_INTENSCOMP) + { + intcomp_cur = 0; + } + + if(pic->CurrField == 0) + { + if(pic->TFF) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; + } + else + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; + } + } + else + { + if(pic->TFF) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; + } + else + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; + } + } + + if(pic->CurrField == 1) + { //SECOND FIELD + + if(VC1_B_FRAME != pic->PTYPE) + { + if(pic->TFF) + { + intcomp_bwd_top = intcomp_cur & MASK_1st_INTCOMP; + + intcomp_fwd_bot = (parser->intcomp_bot[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; //??????? + intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); + + intcomp_fwd_top = parser->intcomp_top[1]; + } + else + { + intcomp_bwd_bot= (intcomp_cur & MASK_2nd_INTCOMP)>>POS_2nd_INTCOMP; + + intcomp_fwd_top = (parser->intcomp_top[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; + intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP) << POS_2nd_INTCOMP; + + intcomp_fwd_bot = parser->intcomp_bot[1]; + } + } + else + { //B TYPE + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_bot = parser->intcomp_bot[1]; + + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; + } + } + else + { //FIRST FILED + + if( (VC1_B_FRAME==pic->PTYPE)||(VC1_BI_FRAME==pic->PTYPE) ) + { + if(VC1_SKIPPED_FRAME!=parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) + { + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_bot = parser->intcomp_bot[1]; + } + + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; + + } + else + { //I,P TYPE + + intcomp_fwd_top = parser->intcomp_top[1] & MASK_1st_INTCOMP; + intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP)<intcomp_bot[1] & MASK_1st_INTCOMP; + intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); + } //pic->PTYPE == I,P TYPE + } //pic->CurrField == 0 + } //VC1_FCM_FIELD_INTERLACE != pic->FCM + + if ( (VC1_B_FRAME != pic->PTYPE) && (VC1_BI_FRAME != pic->PTYPE) ) + { + parser->intcomp_top[1] = intcomp_fwd_top; + parser->intcomp_bot[1] = intcomp_fwd_bot; + + parser->intcomp_top[0] = intcomp_bwd_top; + parser->intcomp_bot[0] = intcomp_bwd_bot; + } + + //OS_INFO("intcomp_fwd_top = %d\n", intcomp_fwd_top); + //OS_INFO("intcomp_fwd_bot = %d\n", intcomp_fwd_bot); + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW; + + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = intcomp_fwd_top; + wi.vwi_payload[2] = intcomp_fwd_bot; + + viddec_pm_append_workitem( parent, &wi, false ); + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW; + + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = intcomp_bwd_top; + wi.vwi_payload[2] = intcomp_bwd_bot; + + viddec_pm_append_workitem( parent, &wi, false ); + + return; } // send_reorder_ref_items + /** update workload with more workload items for ref and update values to store... */ -void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser) +void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) { vc1_metadata_t *md = &(parser->info.metadata); viddec_workload_t *wl = viddec_pm_get_header(parent); @@ -630,17 +762,20 @@ void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser) { vc1_send_past_ref_items(parent); vc1_send_future_ref_items(parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm); break; } case VC1_SKIPPED_FRAME: { wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; vc1_send_past_ref_items(parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); break; } case VC1_P_FRAME: { vc1_send_past_ref_items( parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); break; } default: @@ -668,15 +803,149 @@ void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser) parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].type = parser->info.picLayerHeader.PTYPE; parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_en = md->RANGERED; parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_frm = parser->info.picLayerHeader.RANGEREDFRM; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].tff = parser->info.picLayerHeader.TFF; LOG_CRIT("anchor[0] = %d, anchor[1] = %d", parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] ); } + if( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) + { + translate_parser_info_to_frame_attributes( parent, parser ); + return; + } + + translate_parser_info_to_frame_attributes( parent, parser ); + + + send_SEQ_ENTRY_registers(parent, parser); + send_SIZE_AND_AP_RANGEMAP_registers(parent, parser); + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + send_INT_COM_registers(parent, parser); + + { + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + wi.vwi_payload[0] = bit + (is_emul*8) ; + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); + } + + + viddec_pm_append_pixeldata( parent ); + return; } // vc1_start_new_frame + +void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser) +{ + + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + send_INT_COM_registers(parent, parser); + + { + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + + wi.vwi_payload[0] = bit + (is_emul*8); + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); + } + + viddec_pm_append_pixeldata( parent ); + + return; + +} + + +void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser) +{ + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + //send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + //send_INT_COM_registers(parent, parser); + + { + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + wi.vwi_payload[0] = bit + (is_emul*8); + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); + } + + viddec_pm_append_pixeldata( parent ); + + return; +} + + void vc1_end_frame(vc1_viddec_parser_t *parser) { /* update status of reference frames */ diff --git a/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h index 099be69..227dd67 100644 --- a/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h +++ b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h @@ -3,5 +3,5 @@ #define VIDDEC_FW_MAJOR_NUM 0 #define VIDDEC_FW_MINOR_NUM 8 -#define VIDDEC_FW_BUILD_NUM 11 +#define VIDDEC_FW_BUILD_NUM 16 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h index d887501..a8efafb 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h @@ -1,7 +1,10 @@ #ifndef VIDDEC_EMITTER_H #define VIDDEC_EMITTER_H - +#ifndef ANDROID +#include +#else #include "../../include/stdint.h" +#endif #ifndef HOST_ONLY #define DDR_MEM_MASK 0x80000000 #else diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h index a61e340..0c643fa 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h @@ -55,13 +55,9 @@ int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); */ int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits); -/* This function appends a work item to current workload. +/* This function appends a work item to current/next workload. */ -int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item); - -/* This function appends a work item to next workload. - */ -int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item); +int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next); /* This function gets current byte and bit positions and information on whether an emulation byte is present after current byte. diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h index 1971a36..8dc2c53 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h @@ -4,7 +4,6 @@ #include "viddec_pm_utils_list.h" #define CUBBY_SIZE 1024 -//#define CUBBY_SIZE 512 #define SCRATCH_SIZE 20 #define MIN_DATA 8 @@ -32,7 +31,7 @@ typedef struct typedef struct { #ifdef VBP - /* counter of emulation preventation byte */ + /* counter of emulation prevention byte */ uint32_t emulation_byte_counter; #endif /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store @@ -64,6 +63,13 @@ uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt); uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt); +void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt); + +/* + This function gets bit and byte position of where we are in the current AU. We always return the position of next byte to be + read. + is_emul on true indicates we are on second zero byte in emulation prevention sequence. + */ static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul) { uint32_t phase=cxt->phase; @@ -74,6 +80,7 @@ static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstrea { phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 ); } + /* Assumption: we will never be parked on 0x3 byte of emulation prevention sequence */ *is_emul = (cxt->is_emul_reqd) && (phase > 0) && (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) && (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3); diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 8695ef7..1b6b3d6 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -836,7 +836,7 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) int pic_data_index = query_data->num_pictures - 1; if (pic_data_index < 0) { - WTRACE("MB address does not start from 0!"); + ETRACE("MB address does not start from 0!"); return VBP_DATA; } @@ -1256,6 +1256,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cxt->parse_cubby.size < 6) { /* need at least 6 bytes to start parsing the structure, see spec 15 */ + ETRACE ("Need at least 6 bytes to start parsing\n" ); return VBP_DATA; } @@ -1299,6 +1300,8 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) { /* need at least 2 bytes to parse sequence_parameter_set_length */ + + ETRACE ("Need at least 2 bytes to parse sps." ); return VBP_DATA; } @@ -1311,6 +1314,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) { /* need at least sequence_parameter_set_length bytes for SPS */ + ETRACE ("Need at least sequence paramter set length bytes." ); return VBP_DATA; } @@ -1328,6 +1332,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) { /* need at least one more byte to parse num_of_picture_parameter_sets */ + ETRACE ("need at least one more byte to parse num_of_picture_parameter_sets." ); return VBP_DATA; } @@ -1342,6 +1347,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) { /* need at least 2 bytes to parse picture_parameter_set_length */ + ETRACE ("need at least 2 bytes to parse picture_parameter_set_length."); return VBP_DATA; } @@ -1353,6 +1359,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) { /* need at least picture_parameter_set_length bytes for PPS */ + ETRACE("need at least picture_parameter_set_length bytes for PPS"); return VBP_DATA; } @@ -1420,12 +1427,26 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; int i; +#ifndef ANDROID for (i = 0; i < MAX_NUM_PICTURES; i++) { query_data->pic_data[i].num_slices = 0; } query_data->num_pictures = 0; - +#else + ITRACE("pcontext->h264_frame_flag = %d\n", pcontext->h264_frame_flag); + + if(pcontext->h264_frame_flag == 0) + { + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + } + + pcontext->h264_frame_flag = 1; +#endif cubby = &(cxt->parse_cubby); @@ -1439,34 +1460,34 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) #ifndef ANDROID while (size_left >= NAL_length_size) { - NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); - size_parsed += NAL_length_size; + size_parsed += NAL_length_size; #else while (size_left > 0) { - NAL_length = size_left; + NAL_length = size_left; #endif - cxt->list.data[cxt->list.num_items].stpos = size_parsed; - size_parsed += NAL_length; /* skip NAL bytes */ - /* end position is exclusive */ - cxt->list.data[cxt->list.num_items].edpos = size_parsed; - cxt->list.num_items++; - if (cxt->list.num_items >= MAX_IBUFS_PER_SC) - { + cxt->list.data[cxt->list.num_items].stpos = size_parsed; + size_parsed += NAL_length; /* skip NAL bytes */ + /* end position is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); break; - } + } - size_left = cubby->size - size_parsed; - } - - if (size_left != 0) - { - WTRACE("Elementary stream is not aligned (%d).", size_left); - } - return VBP_OK; + size_left = cubby->size - size_parsed; + } + + if (size_left != 0) + { + WTRACE("Elementary stream is not aligned (%d).", size_left); + } + return VBP_OK; } /** @@ -1488,7 +1509,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) switch (parser->info.nal_unit_type) { case h264_NAL_UNIT_TYPE_SLICE: - /* ITRACE("slice header is parsed."); */ + ITRACE("slice header is parsed."); error = vbp_add_pic_data_h264(pcontext, i); if (VBP_OK == error) { @@ -1497,7 +1518,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_IDR: - /* ITRACE("IDR header is parsed."); */ + ITRACE("IDR header is parsed."); error = vbp_add_pic_data_h264(pcontext, i); if (VBP_OK == error) { diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 87beca4..cabdb29 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -551,13 +551,13 @@ uint32 vbp_get_sc_pos_mp42( *sc_end_pos = size; phase = 3; - if (normal_sc) { - } else { - /* For short start code since start code is in one nibble just return at this point */ - phase += 1; - ret = 1; - break; - } + //if (normal_sc) { + //} else { + // /* For short start code since start code is in one nibble just return at this point */ + // phase += 1; + // ret = 1; + // break; + //} } ptr++; size++; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h index 9f2a21c..71e76fb 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -17,14 +17,24 @@ #ifdef VBP_TRACE /* if VBP_TRACE is defined*/ +#ifndef ANDROID + #include #include extern void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...); - #define VBP_TRACE_UTIL(cat, format, ...) \ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) +#else + +#include +#define VBP_TRACE_UTIL(cat, format, ...) \ +__android_log_print(ANDROID_LOG_VERBOSE, "mixvbp : "cat, "%s() : %d: "format, \ +__FUNCTION__, __LINE__, ##__VA_ARGS__) + +#endif + #define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR: ", format, ##__VA_ARGS__) #define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ", format, ##__VA_ARGS__) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index c350342..e0a9806 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -144,6 +144,10 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) */ error = pcontext->func_init_parser_entries(pcontext); +#ifdef ANDROID + pcontext->h264_frame_flag = 0; +#endif + cleanup: if (VBP_OK != error) @@ -545,6 +549,10 @@ uint32 vbp_utils_query(vbp_context *pcontext, void **data) { *data = NULL; } +#ifdef ANDROID + pcontext->h264_frame_flag = 0; +#endif + return error; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h index 67ff3e8..69202f8 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h @@ -76,6 +76,10 @@ struct vbp_context_t function_process_parsing_result func_process_parsing_result; function_populate_query_data func_populate_query_data; +#ifdef ANDROID + uint32 h264_frame_flag; +#endif + }; /** diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 502cdc6..4af107a 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -292,7 +292,8 @@ static uint32 vbp_parse_start_code_helper_vc1( /* But even though we want the list to contain a segment as described */ /* above, we want the cubby buffer to start just past the prefix, or it will */ /* find the same SC again. So I bump the cubby buffer past the prefix. */ - cubby.buf = cubby.buf + + cubby.buf = /*cubby.buf +*/ + cxt->parse_cubby.buf + cxt->list.data[cxt->list.num_items].stpos + PREFIX_SIZE; diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c index 9a7d828..6719ccf 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c @@ -41,24 +41,20 @@ int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits) return ret; } -int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item) +int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next) { +#ifndef VBP int32_t ret = 1; viddec_pm_cxt_t *cxt; - - cxt = (viddec_pm_cxt_t *)parent; - ret = viddec_emit_append(&(cxt->emitter.cur), item); - return ret; -} - -int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item) -{ - int32_t ret = 1; - viddec_pm_cxt_t *cxt; - + viddec_emitter_wkld *emit; + cxt = (viddec_pm_cxt_t *)parent; - ret = viddec_emit_append(&(cxt->emitter.next), item); + emit = (next) ? &(cxt->emitter.next) : &(cxt->emitter.cur); + ret = viddec_emit_append(emit, item); return ret; +#else + return 1; +#endif } int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_t *is_emul) @@ -67,6 +63,7 @@ int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_ viddec_pm_cxt_t *cxt; cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits)); viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul); return ret; @@ -82,6 +79,7 @@ static inline int32_t viddec_pm_append_restof_pixel_data(void *parent, uint32_t viddec_workload_item_t wi; cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits)); viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), &b_off, &start, &emul); if(emul) start--; diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c index 8d3f329..31572a0 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -74,7 +74,8 @@ static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_ } /* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if - we need to go to next es buffer */ + we need to go to next es buffer +*/ static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset) { uint32_t ret=0; @@ -87,7 +88,8 @@ static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_lis /* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter - at returns index of ES buffer in list which has byte_offset */ + at returns index of ES buffer in list which has byte_offset +*/ static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *lst_index, uint32_t byte_offset, @@ -103,13 +105,7 @@ static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_uti last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes; if(byte_offset < last_byte_offst) {/* Found a match so return with data remaining */ -#if 1 - int32_t val=0; - val = last_byte_offst - (int32_t)byte_offset; - if(val > 0) bytes_left = (uint32_t)val; -#else bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset); -#endif *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index); break; } @@ -129,7 +125,7 @@ static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstrea } } -/* This function is for copying trailing bytes from scratch buffer to bitstream buffer*/ +/* This function is for copying trailing bytes from scratch buffer to bitstream buffer */ static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data) { uint32_t i=0; @@ -221,6 +217,7 @@ static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_ } #endif } + /* This function moves the stream position by N bits(parameter bits). The bytes parameter tells us how many bytes were read for this N bits(can be different due to emulation bytes). @@ -239,6 +236,27 @@ static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_bu } } +/* + This function skips emulation byte if necessary. + During Normal flow we skip emulation byte only if we read at least one bit after the the two zero bytes. + However in some cases we might send data to HW without reading the next bit, in which case we are on + emulation byte. To avoid sending invalid data, this function has to be called first to skip. +*/ + +void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt) +{ + viddec_pm_utils_bstream_buf_cxt_t *bstream = &(cxt->bstrm_buf); + + if(cxt->is_emul_reqd && + (cxt->phase >= 2) && + (bstream->buf_bitoff == 0) && + (bstream->buf[bstream->buf_index] == 0x3) ) + { + bstream->buf_index += 1; + cxt->phase = 0; + } +} + /* This function gets physical address of the requested au offset(pos). */ @@ -389,10 +407,12 @@ int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uin viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); ret=1; +#ifdef VBP if (act_bytes > bytes_required) { cxt->emulation_byte_counter = act_bytes - bytes_required; } +#endif } } } @@ -444,7 +464,6 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin /* We have to use both the words to get required data */ shift_by = total_bits - 32; data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by)); - //total_bits -= shift_by;/* BUG */ } else { @@ -458,10 +477,12 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); cxt->phase = phase; +#ifdef VBP if (act_bytes > bytes_required) { cxt->emulation_byte_counter += act_bytes - bytes_required; } +#endif } ret =1; diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h index bc2c239..e034b86 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h @@ -61,6 +61,9 @@ #define VIDDEC_FW_PARSER_IPC_HOST_INT 0x87654321 #define EMITTER_WORKLOAD_ENTRIES 2048 +/* Maximum supported dependent views for H264 MVC. Based on spec this can be 1023 */ +#define MVC_MAX_SUPPORTED_VIEWS 1 + /* This enum defines priority level for opening a stream */ enum viddec_stream_priority { diff --git a/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h index a816dd4..cdc0bff 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h @@ -58,6 +58,8 @@ #ifndef VIDDEC_FW_FRAME_ATTR_H #define VIDDEC_FW_FRAME_ATTR_H +#include "viddec_fw_item_types.h" + #define VIDDEC_PANSCAN_MAX_OFFSETS 4 #define VIDDEC_MAX_CPB_CNT 32 @@ -140,6 +142,7 @@ typedef struct viddec_mpeg2_frame_attributes */ unsigned int repeat_first_field; + }viddec_mpeg2_frame_attributes_t; /** @@ -190,38 +193,49 @@ This structure contains the H264 specific frame attributes. */ typedef struct viddec_h264_frame_attributes { - /** - used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame - */ - ///// This flag hasn't been enable so far - unsigned int used_for_reference; - - - /** - - Picture Order Count for the current frame/field.- - This value is computed using information from the bitstream.- - Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification.- - */ - // These fileds will be supported in future - int top_field_poc; - int bottom_field_poc; - - /** - Display size, which is cropped from content size. - Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed - */ - viddec_rect_size_t cropped_size; - - /** - top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1 - */ - unsigned int top_field_first; - - /** - field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1 - */ - unsigned int field_pic_flag; + /** + used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame + */ + unsigned int used_for_reference; + /** + Picture Order Count for the current frame/field. + This value is computed using information from the bitstream. + Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification. + These fileds will be supported in future + */ + int top_field_poc; + int bottom_field_poc; + + /** + Display size, which is cropped from content size. + Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed + */ + viddec_rect_size_t cropped_size; + /** + top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1 + */ + unsigned int top_field_first; + + /** + field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1 + */ + unsigned int field_pic_flag; + + /** + This data type holds view specific information of current frame. + The following information is packed into this data type: + view_id(0-9 bits): Assigned 10 bit value in the encoded stream. + priority_id(10-15bits): Assigned 6 bit priority id. + is_base_view(16th bit): Flag on true indicates current frame belongs to base view, else dependent view. + */ +#define viddec_fw_h264_mvc_get_view_id(x) viddec_fw_bitfields_extract( (x)->view_spcific_info, 0, 0x3FF) +#define viddec_fw_h264_mvc_set_view_id(x, val) viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 0, 0x3FF) +#define viddec_fw_h264_mvc_get_priority_id(x) viddec_fw_bitfields_extract( (x)->view_spcific_info, 10, 0x3F) +#define viddec_fw_h264_mvc_set_priority_id(x, val) viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 10, 0x3F) +#define viddec_fw_h264_mvc_get_is_base_view(x) viddec_fw_bitfields_extract( (x)->view_spcific_info, 16, 0x1) +#define viddec_fw_h264_mvc_set_is_base_view(x, val) viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 16, 0x1) + unsigned int view_spcific_info; }viddec_h264_frame_attributes_t; /** @@ -241,7 +255,6 @@ typedef struct viddec_mpeg4_frame_attributes This structure groups all the frame attributes that are exported by the firmware. The frame attributes are split into attributes that are common to all codecs and that are specific to codec type. -As of this release, it is populated only for mpeg2 only. */ typedef struct viddec_frame_attributes { diff --git a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h index 66e5f59..3a69c6d 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h @@ -76,66 +76,107 @@ #define viddec_fw_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) ) #define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start)))) + /* Workload items type. Each item here represents data that Parser detected ex:slice data which is used either by host or decoder.*/ enum workload_item_type { - VIDDEC_WORKLOAD_INVALID=0x0, /* Unknown type */ - VIDDEC_WORKLOAD_PIXEL_ES=0x100, /* Slice data tag */ - VIDDEC_WORKLOAD_TAG=0x200, /* Frame association tag */ - VIDDEC_WORKLOAD_USERDATA=0x300, /* user data tag */ - VIDDEC_WORKLOAD_DECODER_INFO=0x400, /* decoder specific data tag which decoder module understands*/ - VIDDEC_WORKLOAD_IBUF_DONE=0x500, /* Es buffer completely used tag */ - VIDDEC_WORKLOAD_IBUF_CONTINUED=0x600, /* Es buffer partially used tag */ - VIDDEC_WORKLOAD_TAG_BUFFER_LOOSE_START=0x700, /* ??? */ - VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER=0x800, /* Reorder frames in DPB tag */ - VIDDEC_WORKLOAD_DISPLAY_FRAME=0x900, /* Display order in DPB tag, for H264 NOT required??? */ - - VIDDEC_WORKLOAD_SEQUENCE_INFO=0xa00, /* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */ - VIDDEC_WORKLOAD_DISPLAY_INFO=0xb00, /* MPEG2 Seq Disp Ext, H264 VUI */ - VIDDEC_WORKLOAD_GOP_INFO=0xc00, /* MPEG2 GOP, VC1 Entrypoint */ - VIDDEC_WORKLOAD_SEQ_USER_DATA=0xd00, /* MPEG2, VC1 Sequence Level User data */ - VIDDEC_WORKLOAD_GOP_USER_DATA=0xe00, /* MPEG2, VC1 Gop Level User data */ - VIDDEC_WORKLOAD_FRM_USER_DATA=0xf00, /* MPEG2 Picture User data, VC1 Frame User data */ - VIDDEC_WORKLOAD_FLD_USER_DATA=0x1000, /* MPEG2, VC1 Field User data */ - VIDDEC_WORKLOAD_SLC_USER_DATA=0x1100, /* VC1 Slice User data */ - VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA=0x1200, /* MPEG4 Visual Object User data */ - VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA=0x1300, /* MPEG4 Video Object Layer User data */ - - VIDDEC_WORKLOAD_MPEG2_SEQ_EXT=0x1150, /* MPEG2 Only - Sequence Extension */ - VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C=0x1200, /* VC1 Only */ - - VIDDEC_WORKLOAD_H264_CROPPING=0x1400, /* H264 only */ - VIDDEC_WORKLOAD_H264_PAN_SCAN=0x1500, /* H264 only */ - VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO=0x2100, /* H264 only */ - VIDDEC_WORKLOAD_SEI_PIC_TIMING=0x1600, /* H264 only */ - VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT=0x1700, /* H264 only */ - VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED=0x1800, /* H264 only */ - VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED=0x1900, /* H264 only */ - VIDDEC_WORKLOAD_SEI_RECOVERY_POINT=0x1a00, /* H264 only */ - VIDDEC_WORKLOAD_IBUF_EOS=0x1b00, /* EOS tag on last workload used for current stream */ - VIDDEC_WORKLOAD_IBUF_DISCONTINUITY=0x1c00, /* Discontinuity tag on first workload after discontinuity */ - - VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ=0x1d00, /* MPEG4 Only - Visual Sequence */ - VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ=0x1e00, /* MPEG4 Only - Video Object Layer */ - VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ=0x1f00, /* MPEG4 Only - Group of Video Object Planes */ - VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT=0x2000, /* MPEG4 Only - Video Plane with Short Header */ - - VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 = 0x10000, /* required reference frames tag, last eight bits tell the id of frame in dpb */ - VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 = 0x20000, /* release frames tag, last eight bits tell the id of frame in dpb */ - VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 = 0x30000, /* Display order in DPB tag, for H264 */ - VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 = 0x40000, /* Release frames but not display, for H264 */ - VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 = 0x50000, /* Release list while EOS, last eight bits tell the id of frame in dpb */ - VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 = 0x60000, /* Diaplay list while EOS, last eight bits tell the id of frame in dpb */ - - VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 = 0x70000, /* required for H264 as it needs whole DPB for each frame */ - VIDDEC_WORKLOAD_H264_REFR_LIST_0 = 0x80000, /* ref list 0 for H264 */ - VIDDEC_WORKLOAD_H264_REFR_LIST_1 = 0x90000, /* ref list 1 for H264 */ - VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY = 0xa0000, /* eos items begin after this */ - VIDDEC_WORKLOAD_DECODER_SPECIFIC = 0x100000, /* pvt info for decoder tags */ + VIDDEC_WORKLOAD_INVALID =0x0,/* Unknown type */ + VIDDEC_WORKLOAD_PIXEL_ES =0x100,/* Slice data tag */ + VIDDEC_WORKLOAD_TAG =0x200,/* Frame association tag */ + VIDDEC_WORKLOAD_USERDATA =0x300,/* user data tag */ + + VIDDEC_WORKLOAD_IBUF_DONE =0x500,/* Es buffer completely used tag */ + VIDDEC_WORKLOAD_IBUF_CONTINUED =0x600,/* Es buffer partially used tag */ + VIDDEC_WORKLOAD_IBUF_DISCONTINUITY =0x700,/* Discontinuity tag on first workload after discontinuity */ + VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER =0x800, /* Reorder frames in DPB tag */ + VIDDEC_WORKLOAD_IBUF_EOS =0x900,/* EOS tag on last workload used for current stream */ + VIDDEC_WORKLOAD_SEQUENCE_INFO =0xa00,/* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */ + VIDDEC_WORKLOAD_DISPLAY_INFO =0xb00,/* MPEG2 Seq Disp Ext, H264 VUI */ + VIDDEC_WORKLOAD_GOP_INFO =0xc00,/* MPEG2 GOP, VC1 Entrypoint */ + VIDDEC_WORKLOAD_SEQ_USER_DATA =0xd00,/* MPEG2, VC1 Sequence Level User data */ + VIDDEC_WORKLOAD_GOP_USER_DATA =0xe00,/* MPEG2, VC1 Gop Level User data */ + VIDDEC_WORKLOAD_FRM_USER_DATA =0xf00,/* MPEG2 Picture User data, VC1 Frame User data */ + + VIDDEC_WORKLOAD_FLD_USER_DATA =0x1000,/* MPEG2, VC1 Field User data */ + VIDDEC_WORKLOAD_SLC_USER_DATA =0x1100,/* VC1 Slice User data */ + VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA =0x1200,/* MPEG4 Visual Object User data */ + VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C =0x1200,/* VC1 Only */ + VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA =0x1300,/* MPEG4 Video Object Layer User data */ + VIDDEC_WORKLOAD_H264_CROPPING =0x1400,/* H264 only */ + VIDDEC_WORKLOAD_H264_PAN_SCAN =0x1500,/* H264 only */ + VIDDEC_WORKLOAD_SEI_PIC_TIMING =0x1600,/* H264 only */ + VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT =0x1700,/* H264 only */ + VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED =0x1800,/* H264 only */ + VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED =0x1900,/* H264 only */ + VIDDEC_WORKLOAD_SEI_RECOVERY_POINT =0x1a00,/* H264 only */ + VIDDEC_WORKLOAD_MPEG2_SEQ_EXT =0x1b00,/* MPEG2 Only - Sequence Extension */ + VIDDEC_WORKLOAD_H264_MVC_SPS_VIEW_IDS =0x1c00,/* H264 only */ + VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ =0x1d00,/* MPEG4 Only - Visual Sequence */ + VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ =0x1e00,/* MPEG4 Only - Video Object Layer */ + VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ =0x1f00,/* MPEG4 Only - Group of Video Object Planes */ + + VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT =0x2000,/* MPEG4 Only - Video Plane with Short Header */ + VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO =0x2100,/* H264 only */ + + VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 =0x10000,/* required reference frames tag,last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 =0x20000,/* release frames tag, last eight bits indicate index in dpb*/ + VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 =0x30000,/* Display order in DPB tag, for H264 */ + VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 =0x40000,/* Release frames but not display, for H264 */ + VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 =0x50000,/* Release list while EOS, last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 =0x60000,/* Display list while EOS, last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 =0x70000,/* required for H264 as it needs whole DPB for each frame */ + VIDDEC_WORKLOAD_H264_REFR_LIST_0 =0x80000,/* ref list 0 for H264 */ + VIDDEC_WORKLOAD_H264_REFR_LIST_1 =0x90000,/* ref list 1 for H264 */ + VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY =0xa0000,/* eos items begin after this */ + + VIDDEC_WORKLOAD_DECODER_SPECIFIC =0x100000,/* pvt info for decoder tags */ VIDDEC_WORKLOAD_MAX, }; +struct h264_witem_sps_mvc_id +{ + /* + 0-9: num_views_minus1 + 10-19: start index of views in current item. + 20-23: Number of valid items. + */ +#define viddec_fw_h264_sps_mvc_id_get_num_views_minus1(x) viddec_fw_bitfields_extract( (x)->num_views, 0, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_set_num_views_minus1(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 0, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_get_cur_start_index(x) viddec_fw_bitfields_extract( (x)->num_views, 10, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_set_cur_start_index(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 10, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_get_num_cur_valid_items(x) viddec_fw_bitfields_extract( (x)->num_views, 20, 0x7) +#define viddec_fw_h264_sps_mvc_id_set_num_cur_valid_items(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 20, 0x7) + unsigned int num_views; + + /* We pack six id's into two integers.Each packed_view(integer) contains three 10 bit ids at 0-9, 10-19, 20-29 + These values can be extracted/set using viddec_fw_h264_sps_mvc_id_get_data_frm_index() + and viddec_fw_h264_sps_mvc_id_set_data_frm_index() functions. + */ +#define viddec_fw_h264_sps_mvc_id_max_packed_ids 6 /* Max number of packed ids in a workload item */ + unsigned int packed_view[2]; +}; + +/* This function extracts a 10 bit view id of index( <6) that was packed into h264_witem_sps_mvc_id structure */ +static inline unsigned int viddec_fw_h264_sps_mvc_id_get_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index) +{ + unsigned int start=0, *word; + + start = ((index > 2) ?(index - 3) : index) *10; + word = &(data->packed_view[(index > 2) ? 1:0]); + return viddec_fw_bitfields_extract(*word, start, 0x3FF); +} + +/* This function packs a 10 bit view id(val) at index( <6) in h264_witem_sps_mvc_id structure */ +static inline void viddec_fw_h264_sps_mvc_id_set_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index, unsigned int val) +{ + unsigned int start=0, *word; + + start = ((index > 2) ?(index - 3) : index) *10; + word = &(data->packed_view[(index > 2) ? 1:0]); + viddec_fw_bitfields_insert(*word, val, start, 0x3FF); +} + /* 16-byte workload */ typedef struct viddec_workload_item { @@ -500,6 +541,8 @@ typedef struct viddec_workload_item unsigned int pic_height_in_map_units_minus1; } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO + struct h264_witem_sps_mvc_id h264_sps_mvc_id; + struct { #define viddec_fw_h264_cropping_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF) @@ -622,6 +665,7 @@ typedef struct viddec_workload_item unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */ } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT + struct { @@ -735,4 +779,6 @@ typedef struct viddec_workload_item }; }viddec_workload_item_t; + + #endif /* VIDDEC_ITEM_TYPES_H */ diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c index e940262..97618c5 100644 --- a/mix_video/src/mixframemanager.c +++ b/mix_video/src/mixframemanager.c @@ -45,6 +45,10 @@ static void mix_framemanager_init(MixFrameManager * self) { /* for vc1 in asf */ self->p_frame = NULL; self->prev_timestamp = 0; + +#ifdef ANDROID + self->timestamp_storage = NULL; +#endif } static void mix_framemanager_class_init(MixFrameManagerClass * klass) { @@ -120,6 +124,15 @@ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, if (!fm->frame_array) { goto cleanup; } + + +#ifdef ANDROID + fm->timestamp_storage = g_array_sized_new(FALSE, TRUE, + sizeof(guint64), INITIAL_FRAME_ARRAY_SIZE); + if (!fm->timestamp_storage) { + goto cleanup; + } +#endif } fm->frame_queue = g_queue_new(); @@ -180,6 +193,12 @@ MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { g_queue_free(fm->frame_queue); fm->frame_queue = NULL; } +#ifdef ANDROID + if (fm->timestamp_storage) { + g_array_free(fm->timestamp_storage, TRUE); + fm->timestamp_storage = NULL; + } +#endif fm->initialized = FALSE; @@ -291,6 +310,12 @@ MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { } } +#ifdef ANDROID + if(fm->timestamp_storage) { + g_array_remove_range(fm->timestamp_storage, 0, fm->timestamp_storage->len); + } +#endif + if (fm->frame_queue) { guint len = fm->frame_queue->length; if (len) { @@ -385,6 +410,7 @@ MixVideoFrame *get_expected_frame_from_array(GPtrArray *array, return frame; } +#ifdef ANDROID MixVideoFrame *get_expected_frame_from_array_DO(GPtrArray *array, guint32 expected, guint32 *framedisplayorder) { @@ -453,6 +479,7 @@ MixVideoFrame *get_expected_frame_from_array_DO(GPtrArray *array, return frame; } +#endif /* ANDROID */ void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) { @@ -484,6 +511,7 @@ void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) { } +#ifdef ANDROID gint frame_sorting_func_DO(gconstpointer a, gconstpointer b) { MixVideoFrame *fa = *((MixVideoFrame **) a); @@ -537,6 +565,7 @@ MIX_RESULT mix_framemanager_displayorder_based_enqueue(MixFrameManager *fm, MIX_RESULT ret = MIX_RESULT_FAIL; guint32 displayorder = 0; + first_frame: ret = mix_videoframe_get_displayorder(mvf, &displayorder); @@ -687,6 +716,8 @@ cleanup: return ret; } +#endif /* ANDROID */ + MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { @@ -982,11 +1013,35 @@ MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { } else { +#ifdef ANDROID + guint64 timestamp = 0; + mix_videoframe_get_timestamp(mvf, ×tamp); + + /* add timestamp into timestamp storage */ + if(fm->timestamp_storage) { + gint idx = 0; + gboolean found = FALSE; + + if(fm->timestamp_storage->len) { + for(idx = 0; idx < fm->timestamp_storage->len; idx ++) { + if(timestamp == g_array_index(fm->timestamp_storage, guint64, idx)) { + found = TRUE; + break; + } + } + } + + if(!found) { + g_array_append_val(fm->timestamp_storage, timestamp); + } + } +#endif + if (fm->timebased_ordering) { #ifndef ANDROID ret = mix_framemanager_timestamp_based_enqueue(fm, mvf); #else - ret = mix_framemanager_displayorder_based_enqueue(fm, mvf); + ret = mix_framemanager_displayorder_based_enqueue(fm, mvf); #endif } else { @@ -999,6 +1054,21 @@ MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { return ret; } +#ifdef ANDROID +gint timestamp_storage_sorting_func(gconstpointer a, gconstpointer b) { + + guint64 ta = *((guint64 *)a); + guint64 tb = *((guint64 *)b); + + if(ta > tb) { + return +1; + } else if(ta == tb) { + return 0; + } + return -1; +} +#endif + MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { MIX_RESULT ret = MIX_RESULT_FAIL; @@ -1020,6 +1090,17 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { ret = MIX_RESULT_FRAME_NOTAVAIL; *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); if (*mvf) { +#ifdef ANDORID + if(fm->timestamp_storage && fm->mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { + if(fm->timestamp_storage->len) { + guint64 ts; + g_array_sort(fm->timestamp_storage, timestamp_storage_sorting_func); + ts = g_array_index(val_array, guint64, 0) + mix_videoframe_set_timestamp(*mvf, ts); + g_array_remove_index_fast(fm->timestamp_storage, 0); + } + } +#endif ret = MIX_RESULT_SUCCESS; } else if (fm->eos) { ret = MIX_RESULT_EOS; @@ -1102,7 +1183,7 @@ MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { /* sorting frames in the array by timestamp */ g_ptr_array_sort(fm->frame_array, frame_sorting_func); #else - /* sorting frames is the array by displayorder */ + /* sorting frames is the array by displayorder */ g_ptr_array_sort(fm->frame_array, frame_sorting_func_DO); #endif @@ -1120,9 +1201,7 @@ MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { } } - fm->eos = TRUE; - g_mutex_unlock(fm->lock); return ret; diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h index 43343d5..bde47ee 100644 --- a/mix_video/src/mixframemanager.h +++ b/mix_video/src/mixframemanager.h @@ -58,8 +58,10 @@ struct _MixFrameManager { guint64 prev_timestamp; gboolean timebased_ordering; - - guint32 next_displayorder; +#ifdef ANDROID + guint32 next_displayorder; + GArray *timestamp_storage; +#endif }; /** diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c index b65936e..18015fc 100644 --- a/mix_video/src/mixvideoformat_h264.c +++ b/mix_video/src/mixvideoformat_h264.c @@ -17,6 +17,15 @@ static int mix_video_h264_counter = 0; #endif /* MIX_LOG_ENABLE */ +#ifdef ANDROID +typedef struct _NalBuffer { + unsigned char *buffer; + unsigned int offset; + unsigned int length; + void *appdata; +} NalBuffer; +#endif + /* The parent class. The pointer will be saved * in this class's initialization. The pointer * can be used for chaining method call if needed. @@ -669,10 +678,39 @@ MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[], LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size); +#ifndef ANDROID pret = vbp_parse(parent->parser_handle, bufin[i]->data, bufin[i]->size, FALSE); +#else + /* we got an array of NALs for a frame */ + { + gint nal_index = 0; + NalBuffer *nals = (NalBuffer *)bufin[i]->data; + gint nal_count = bufin[i]->size; + + LOG_V("nal_count = %d\n", nal_count); + for(nal_index = 0; nal_index < nal_count; nal_index ++) { + + LOG_V("nals[%d].offset = 0x%x nals[nal_index].length = %d\n", + nal_index, nals[nal_index].offset, nals[nal_index].length); + + pret = vbp_parse(parent->parser_handle, + nals[nal_index].buffer + nals[nal_index].offset, + nals[nal_index].length, + FALSE); + + LOG_V("nal_index = %d pret = 0x%x\n", nal_index, pret); + + if(pret != VBP_OK && pret != VBP_DONE) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing data : pret = 0x%x\n", pret); + goto cleanup; + } + } + } +#endif LOG_V( "Called parse for current frame\n"); diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index e222ace..1a89173 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -1369,8 +1369,15 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_I( "input buf size = %d\n", bufin->size); - guint8 *inbuf = bufin->data; - + guint8 *inbuf = bufin->data; + +#ifndef ANDROID +#define USE_SRC_FMT_YUV420 +#else +#define USE_SRC_FMT_NV21 +#endif + +#ifdef USE_SRC_FMT_YUV420 /*need to convert YUV420 to NV12*/ dst_y = pvbuf +image->offsets[0]; @@ -1389,7 +1396,37 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } dst_uv += image->pitches[1]; } - +#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + int offset_uv = width * height; + guint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + +#ifdef USE_SRC_FMT_NV12 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v + } + dst_uv += image->pitches[1]; + inbuf_uv += width_uv; + } +#endif +#endif //USE_SRC_FMT_YUV420 + va_status = vaUnmapBuffer(va_display, image->buf); if (va_status != VA_STATUS_SUCCESS) { -- cgit v1.2.3 From 21ab8997c9499686cb81b3f6e868d44c8b74ed75 Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Tue, 13 Jul 2010 17:04:46 -0700 Subject: Fix bug in seeking Change-Id: I7cc033dabbaff260f90d7074c58dffcb90f3bc11 --- mix_video/src/mixframemanager.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c index 97618c5..569dabd 100644 --- a/mix_video/src/mixframemanager.c +++ b/mix_video/src/mixframemanager.c @@ -1090,12 +1090,12 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { ret = MIX_RESULT_FRAME_NOTAVAIL; *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); if (*mvf) { -#ifdef ANDORID +#ifdef ANDROID if(fm->timestamp_storage && fm->mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { if(fm->timestamp_storage->len) { guint64 ts; g_array_sort(fm->timestamp_storage, timestamp_storage_sorting_func); - ts = g_array_index(val_array, guint64, 0) + ts = g_array_index(fm->timestamp_storage, guint64, 0); mix_videoframe_set_timestamp(*mvf, ts); g_array_remove_index_fast(fm->timestamp_storage, 0); } -- cgit v1.2.3 From 04e75130d6c74841c629a667b3e2ba8044965213 Mon Sep 17 00:00:00 2001 From: mgross Date: Mon, 19 Jul 2010 09:55:00 -0700 Subject: hack workaround to deal with the missing true/false keywords and the new android buid process and BSP code base layout. Change-Id: I76f13fb348a0d1fd1c02d0bfeee758ddd1372cc4 --- Android.mk | 2 +- .../fw/codecs/h264/parser/h264parse_dpb.c | 10 ++++++++ .../fw/codecs/h264/parser/viddec_h264_parse.c | 9 ++++++++ mix_vbp/viddec_fw/fw/include/stdint.h | 27 ---------------------- mix_vbp/viddec_fw/fw/include/stdint.h.bad.idea | 27 ++++++++++++++++++++++ mix_vbp/viddec_fw/fw/parser/main.c | 9 ++++++++ mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 9 ++++++++ mix_vbp/viddec_fw/fw/parser/vbp_loader.c | 10 ++++++++ mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 9 ++++++++ mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 10 ++++++++ mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 10 ++++++++ mix_vbp/viddec_fw/fw/parser/viddec_emit.c | 10 ++++++++ mix_vbp/viddec_fw/fw/parser/viddec_pm.c | 10 ++++++++ mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c | 9 ++++++++ mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c | 10 ++++++++ mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c | 10 ++++++++ .../viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 10 ++++++++ mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c | 10 ++++++++ 18 files changed, 173 insertions(+), 28 deletions(-) delete mode 100644 mix_vbp/viddec_fw/fw/include/stdint.h create mode 100644 mix_vbp/viddec_fw/fw/include/stdint.h.bad.idea diff --git a/Android.mk b/Android.mk index d0deed6..3e0347f 100644 --- a/Android.mk +++ b/Android.mk @@ -3,7 +3,7 @@ include $(CLEAR_VARS) VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) -GLIB_TOP := external/glib +GLIB_TOP := hardware/intel/glib include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_common/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_audio/src/Android.mk diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 5ad9d09..d7be02f 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -1,4 +1,14 @@ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + + /*! *********************************************************************** * \file: h264_dpb_ctl.c diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index a763d00..bc301da 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -1,3 +1,12 @@ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "viddec_fw_debug.h" #include "viddec_parser_ops.h" diff --git a/mix_vbp/viddec_fw/fw/include/stdint.h b/mix_vbp/viddec_fw/fw/include/stdint.h deleted file mode 100644 index cf23208..0000000 --- a/mix_vbp/viddec_fw/fw/include/stdint.h +++ /dev/null @@ -1,27 +0,0 @@ -#ifndef __STDINT_H -#define __STDINT_H - -#ifndef ANDROID - -typedef unsigned char uint8_t; -typedef unsigned short uint16_t; -typedef unsigned int uint32_t; -typedef unsigned long long uint64_t; - -//#ifndef _MACHTYPES_H_ -typedef signed char int8_t; -typedef signed short int16_t; -typedef signed int int32_t; -typedef signed long long int64_t; -//#endif - -#endif - -#ifndef NULL -#define NULL (void*)0x0 -#endif - -#define true 1 -#define false 0 - -#endif diff --git a/mix_vbp/viddec_fw/fw/include/stdint.h.bad.idea b/mix_vbp/viddec_fw/fw/include/stdint.h.bad.idea new file mode 100644 index 0000000..cf23208 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/include/stdint.h.bad.idea @@ -0,0 +1,27 @@ +#ifndef __STDINT_H +#define __STDINT_H + +#ifndef ANDROID + +typedef unsigned char uint8_t; +typedef unsigned short uint16_t; +typedef unsigned int uint32_t; +typedef unsigned long long uint64_t; + +//#ifndef _MACHTYPES_H_ +typedef signed char int8_t; +typedef signed short int16_t; +typedef signed int int32_t; +typedef signed long long int64_t; +//#endif + +#endif + +#ifndef NULL +#define NULL (void*)0x0 +#endif + +#define true 1 +#define false 0 + +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c index 1bb368a..6b5f402 100644 --- a/mix_vbp/viddec_fw/fw/parser/main.c +++ b/mix_vbp/viddec_fw/fw/parser/main.c @@ -1,3 +1,12 @@ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "fw_pvt.h" #include "viddec_fw_parser_ipclib_config.h" #include "viddec_fw_common_defs.h" diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 1b6b3d6..90dea6a 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -6,6 +6,15 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c index 27a2dd0..e7b4cd6 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c @@ -6,6 +6,16 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + + #include #include "vbp_loader.h" diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index cabdb29..47408dc 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -6,6 +6,15 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index e0a9806..c5d378d 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -7,6 +7,16 @@ */ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + + #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 4af107a..5c05c83 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -7,6 +7,16 @@ */ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + + #include #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c index f6e6a8a..dc52e5c 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c @@ -1,3 +1,13 @@ + +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "viddec_emitter.h" #include "viddec_fw_workload.h" #include "viddec_fw_debug.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c index ffcff11..6e0b66b 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c @@ -1,3 +1,13 @@ + +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "viddec_pm.h" #include "viddec_fw_debug.h" #include "viddec_fw_common_defs.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c index f16fbcd..fb690a3 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c @@ -1,3 +1,12 @@ +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "fw_pvt.h" #include "viddec_fw_parser_ipclib_config.h" #include "viddec_fw_common_defs.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c index 6719ccf..10bb179 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c @@ -1,3 +1,13 @@ + +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "viddec_pm.h" #include "viddec_fw_debug.h" #include "viddec_parser_ops.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c index b0d8842..eada1fa 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c @@ -1,3 +1,13 @@ + +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "viddec_pm.h" #include "viddec_fw_debug.h" #include "viddec_fw_common_defs.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c index 31572a0..731a78e 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -1,3 +1,13 @@ + +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "viddec_pm_utils_bstream.h" #include "viddec_fw_debug.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c index ccc83b3..164710e 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c @@ -1,3 +1,13 @@ + +#ifdef ANDROID +//#ifndef NULL +//#define NULL (void*)0x0 +//#endif + +#define true 1 +#define false 0 +#endif + #include "viddec_pm_utils_list.h" #include "viddec_fw_debug.h" -- cgit v1.2.3 From f51db1b66597bce6839e462388453532298e2066 Mon Sep 17 00:00:00 2001 From: Yanlong Fang Date: Mon, 19 Jul 2010 18:32:52 +0800 Subject: modified mix h264 encoder codes to support shared buffer mode encoding Change-Id: Icd3b5bed434e80e50a11fa138b6f8b44b3b555da Signed-off-by: Yanlong Fang --- mix_video/src/mixvideoformatenc_h264.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index 1a89173..776d1fc 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -1504,8 +1504,9 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (mix->cur_frame == NULL) { guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - + //memcpy (&ci_idx, bufin->data, bufin->size); + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); + LOG_I( "surface_num = %d\n", mix->surface_num); LOG_I( -- cgit v1.2.3 From 58763ffb9c30a0e7d2a81851690e41b99622a331 Mon Sep 17 00:00:00 2001 From: Yanlong Fang Date: Mon, 19 Jul 2010 18:32:52 +0800 Subject: modified mix h264 encoder codes to support shared buffer mode encoding Change-Id: I9398120697b1e703975482459bbbe0ffdd88f894 Signed-off-by: Yanlong Fang --- mix_video/src/Android.mk | 2 + mix_video/src/mixvideoformatenc_h263.c | 69 ++++++++++++++++++++++++++++++---- mix_video/src/mixvideoformatenc_h263.h | 4 +- mix_video/src/mixvideoformatenc_h264.c | 24 +++++++++--- mix_video/src/mixvideoformatenc_h264.h | 1 + 5 files changed, 85 insertions(+), 15 deletions(-) diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index 718fddf..0f060fc 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -93,6 +93,7 @@ LOCAL_COPY_HEADERS := \ mixvideoconfigparamsdec_vc1.h \ mixvideoconfigparamsenc.h \ mixvideoconfigparamsenc_h264.h \ + mixvideoconfigparamsenc_h263.h \ mixvideoconfigparamsenc_mpeg4.h \ mixvideoconfigparamsenc_preview.h \ mixvideodecodeparams.h \ @@ -103,6 +104,7 @@ LOCAL_COPY_HEADERS := \ mixvideoformat_vc1.h \ mixvideoformatenc.h \ mixvideoformatenc_h264.h \ + mixvideoformatenc_h263.h \ mixvideoformatenc_mpeg4.h \ mixvideoformatenc_preview.h \ mixvideoformatqueue.h \ diff --git a/mix_video/src/mixvideoformatenc_h263.c b/mix_video/src/mixvideoformatenc_h263.c index 96f8c9a..5f28682 100644 --- a/mix_video/src/mixvideoformatenc_h263.c +++ b/mix_video/src/mixvideoformatenc_h263.c @@ -44,6 +44,7 @@ static void mix_videoformatenc_h263_init(MixVideoFormatEnc_H263 * self) { self->cur_frame = NULL; self->ref_frame = NULL; self->rec_frame = NULL; + self->last_mix_buffer = NULL; self->ci_shared_surfaces = NULL; self->surfaces= NULL; @@ -748,6 +749,11 @@ MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix) { mix_videoframe_unref (self->ref_frame); self->ref_frame = NULL; } + + if(self->last_mix_buffer) { + mix_buffer_unref(self->last_mix_buffer); + self->last_mix_buffer = NULL; + } /*reset the properities*/ self->encoded_frames = 0; @@ -1310,7 +1316,13 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, "input buf size = %d\n", bufin->size); guint8 *inbuf = bufin->data; - + +#ifndef ANDROID +#define USE_SRC_FMT_YUV420 +#else +#define USE_SRC_FMT_NV21 +#endif +#ifdef USE_SRC_FMT_YUV420 /*need to convert YUV420 to NV12*/ dst_y = pvbuf +image->offsets[0]; @@ -1329,7 +1341,38 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, } dst_uv += image->pitches[1]; } - + +#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + int offset_uv = width * height; + guint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + +#ifdef USE_SRC_FMT_NV12 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v + } + dst_uv += image->pitches[1]; + inbuf_uv += width_uv; + } +#endif +#endif //USE_SRC_FMT_YUV420 + va_status = vaUnmapBuffer(va_display, image->buf); if (va_status != VA_STATUS_SUCCESS) { @@ -1405,8 +1448,9 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, if (mix->cur_frame == NULL) { guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - +// memcpy (&ci_idx, bufin->data, bufin->size); + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); + LOG_I( "surface_num = %d\n", mix->surface_num); LOG_I( @@ -1536,7 +1580,7 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, if (coded_seg->next == NULL) break; - coded_seg ++; + coded_seg = coded_seg->next; num_seg ++; } @@ -1572,7 +1616,7 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, if (coded_seg->next == NULL) break; - coded_seg ++; + coded_seg = coded_seg->next; } iovout->buffer_size = iovout->data_size; @@ -1678,9 +1722,18 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, mix->coded_buf_index %=2; mix->last_frame = mix->cur_frame; + if(mix->last_mix_buffer) { + LOG_V("calls to mix_buffer_unref \n"); + LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); + mix_buffer_unref(mix->last_mix_buffer); + } + + LOG_V("ref the current bufin\n"); + mix->last_mix_buffer = mix_buffer_ref(bufin); + if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; } cleanup: diff --git a/mix_video/src/mixvideoformatenc_h263.h b/mix_video/src/mixvideoformatenc_h263.h index b4d1f84..ece5fc8 100644 --- a/mix_video/src/mixvideoformatenc_h263.h +++ b/mix_video/src/mixvideoformatenc_h263.h @@ -47,8 +47,8 @@ struct _MixVideoFormatEnc_H263 { MixVideoFrame *cur_frame; //current input frame to be encoded; MixVideoFrame *ref_frame; //reference frame MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; - + MixVideoFrame *last_frame; //last frame; + MixBuffer *last_mix_buffer; guint disable_deblocking_filter_idc; guint slice_num; diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index 776d1fc..82fad06 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -43,7 +43,8 @@ static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { self->is_intra = TRUE; self->cur_frame = NULL; self->ref_frame = NULL; - self->rec_frame = NULL; + self->rec_frame = NULL; + self->last_mix_buffer = NULL; self->ci_shared_surfaces = NULL; self->surfaces= NULL; @@ -775,7 +776,12 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { mix_videoframe_unref (self->ref_frame); self->ref_frame = NULL; } - + + if(self->last_mix_buffer) { + mix_buffer_unref(self->last_mix_buffer); + self->last_mix_buffer = NULL; + } + /*reset the properities*/ self->encoded_frames = 0; self->pic_skipped = FALSE; @@ -1641,7 +1647,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (coded_seg->next == NULL) break; - coded_seg ++; + coded_seg = coded_seg->next; num_seg ++; } @@ -1684,7 +1690,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (coded_seg->next == NULL) break; - coded_seg ++; + coded_seg = coded_seg->next; } //memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte @@ -1752,7 +1758,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (coded_seg->next == NULL) break; - coded_seg ++; + coded_seg = coded_seg->next; } ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); @@ -1870,6 +1876,14 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, mix->coded_buf_index %=2; mix->last_frame = mix->cur_frame; + if(mix->last_mix_buffer) { + LOG_V("calls to mix_buffer_unref \n"); + LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); + mix_buffer_unref(mix->last_mix_buffer); + } + + LOG_V("ref the current bufin\n"); + mix->last_mix_buffer = mix_buffer_ref(bufin); if (!(parent->need_display)) { mix_videoframe_unref (mix->cur_frame); diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h index 0ef0e18..6f470af 100644 --- a/mix_video/src/mixvideoformatenc_h264.h +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -47,6 +47,7 @@ struct _MixVideoFormatEnc_H264 { MixVideoFrame *ref_frame; //reference frame MixVideoFrame *rec_frame; //reconstructed frame; MixVideoFrame *last_frame; //last frame; + MixBuffer *last_mix_buffer; guint basic_unit_size; //for rate control guint disable_deblocking_filter_idc; -- cgit v1.2.3 From aa78e4eb4c3827cf30ff101dc38c5c15af9b98a9 Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Thu, 30 Sep 2010 16:46:38 -0700 Subject: Decoder changes for RTP, partial frame handling, frame manager update for frame gap handling, handling slice loss. Change-Id: I0a9fd7d7f6a656e8be0f9bb427da25edde1dbe6c --- mix_vbp/ChangeLog | 6 + mix_vbp/configure.ac | 8 +- mix_vbp/m4/as-mix-version.m4 | 6 +- mix_vbp/mixvbp.spec | 5 +- .../fw/codecs/h264/parser/h264parse_dpb.c | 4 - .../fw/codecs/h264/parser/viddec_h264_parse.c | 4 - mix_vbp/viddec_fw/fw/parser/Makefile.am | 2 - mix_vbp/viddec_fw/fw/parser/main.c | 4 - mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 311 ++- mix_vbp/viddec_fw/fw/parser/vbp_loader.c | 4 - mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 37 +- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 17 +- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 17 +- mix_vbp/viddec_fw/fw/parser/vbp_utils.h | 4 - mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_emit.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_pm.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c | 4 - .../viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c | 4 - mix_vbp/viddec_fw/include/viddec_fw_common_defs.h | 14 + mix_video/ChangeLog | 21 + mix_video/configure.ac | 8 +- mix_video/m4/as-mix-version.m4 | 6 +- mix_video/mixvideo.spec | 7 +- mix_video/src/mixbuffer.h | 4 + mix_video/src/mixbuffer_private.h | 3 + mix_video/src/mixdisplayx11.h | 4 + mix_video/src/mixframemanager.c | 1346 +++++------- mix_video/src/mixframemanager.h | 64 +- mix_video/src/mixsurfacepool.c | 14 + mix_video/src/mixsurfacepool.h | 1 + mix_video/src/mixvideo.c | 352 +++- mix_video/src/mixvideo.h | 26 + mix_video/src/mixvideo_private.h | 2 + mix_video/src/mixvideocaps.h | 4 + mix_video/src/mixvideoconfigparams.h | 4 + mix_video/src/mixvideoconfigparamsdec.c | 3 + mix_video/src/mixvideoconfigparamsdec.h | 4 + mix_video/src/mixvideoconfigparamsdec_h264.h | 4 + mix_video/src/mixvideoconfigparamsdec_mp42.h | 4 + mix_video/src/mixvideoconfigparamsdec_vc1.h | 4 + mix_video/src/mixvideoconfigparamsenc.c | 40 + mix_video/src/mixvideoconfigparamsenc.h | 57 + mix_video/src/mixvideoconfigparamsenc_h263.h | 4 + mix_video/src/mixvideoconfigparamsenc_h264.c | 23 +- mix_video/src/mixvideoconfigparamsenc_h264.h | 31 + mix_video/src/mixvideoconfigparamsenc_mpeg4.h | 4 + mix_video/src/mixvideoconfigparamsenc_preview.h | 4 + mix_video/src/mixvideodecodeparams.h | 4 + mix_video/src/mixvideodef.h | 38 + mix_video/src/mixvideoencodeparams.h | 4 + mix_video/src/mixvideoformat.c | 43 +- mix_video/src/mixvideoformat.h | 60 +- mix_video/src/mixvideoformat_h264.c | 2185 ++++++++++---------- mix_video/src/mixvideoformat_h264.h | 4 +- mix_video/src/mixvideoformat_mp42.c | 4 +- mix_video/src/mixvideoformat_mp42.h | 4 + mix_video/src/mixvideoformat_vc1.c | 4 +- mix_video/src/mixvideoformat_vc1.h | 4 + mix_video/src/mixvideoformatenc.c | 195 ++ mix_video/src/mixvideoformatenc.h | 18 +- mix_video/src/mixvideoformatenc_h263.c | 20 +- mix_video/src/mixvideoformatenc_h263.h | 6 + mix_video/src/mixvideoformatenc_h264.c | 142 +- mix_video/src/mixvideoformatenc_h264.h | 12 +- mix_video/src/mixvideoformatenc_mpeg4.c | 5 +- mix_video/src/mixvideoformatenc_mpeg4.h | 4 + mix_video/src/mixvideoformatenc_preview.c | 1 + mix_video/src/mixvideoformatenc_preview.h | 4 + mix_video/src/mixvideoformatqueue.h | 4 + mix_video/src/mixvideoframe.c | 6 +- mix_video/src/mixvideoframe.h | 4 + mix_video/src/mixvideoframe_private.h | 7 +- mix_video/src/mixvideoinitparams.h | 4 + mix_video/src/mixvideolog.h | 3 + mix_video/src/mixvideorenderparams.h | 4 + mix_video/src/mixvideorenderparams_internal.h | 5 + mix_video/test/configure.ac | 6 +- 81 files changed, 3027 insertions(+), 2300 deletions(-) diff --git a/mix_vbp/ChangeLog b/mix_vbp/ChangeLog index 139597f..98a1ee8 100644 --- a/mix_vbp/ChangeLog +++ b/mix_vbp/ChangeLog @@ -1,2 +1,8 @@ +2010-09-15 Andy Qiu + * Merged changes for Android +2010-09-02 Andy Qiu + * change H.264 parser to support partial frame parsing + * change H.264 parser to support byte stream parsing + * change version number to 0.1.17 diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac index 93a9081..7b9edae 100644 --- a/mix_vbp/configure.ac +++ b/mix_vbp/configure.ac @@ -1,8 +1,8 @@ -AC_INIT("", "", [linda.s.cline@intel.com]) +AC_INIT([""],[""],[linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -AS_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 15) +UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 17) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE @@ -12,11 +12,11 @@ AM_INIT_AUTOMAKE($PACKAGE, $VERSION) dnl make aclocal work in maintainer mode AC_SUBST(ACLOCAL_AMFLAGS, "-I m4") -AM_CONFIG_HEADER(config.h) +AC_CONFIG_HEADERS([config.h]) dnl check for tools AC_PROG_CC -AC_PROG_LIBTOOL +LT_INIT MIX_CFLAGS="-Wall -Werror" diff --git a/mix_vbp/m4/as-mix-version.m4 b/mix_vbp/m4/as-mix-version.m4 index f0301b1..82f6c95 100644 --- a/mix_vbp/m4/as-mix-version.m4 +++ b/mix_vbp/m4/as-mix-version.m4 @@ -1,9 +1,9 @@ dnl as-mix-version.m4 -dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) +dnl UMG_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) dnl example -dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,) +dnl UMG_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,) dnl for a 0.3.2 release version dnl this macro @@ -11,7 +11,7 @@ dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE dnl - defines [$PREFIX], VERSION dnl - AC_SUBST's all defined vars -AC_DEFUN([AS_MIX_VERSION], +AC_DEFUN([UMG_MIX_VERSION], [ PACKAGE=[$1] [$2]_MAJOR=[$3] diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec index da15b9d..77445d3 100644 --- a/mix_vbp/mixvbp.spec +++ b/mix_vbp/mixvbp.spec @@ -6,14 +6,15 @@ Summary: MIX Video Bitstream Parser Name: mixvbp -Version: 0.1.15 +Version: 0.1.17 Release: 1 -Source0: %{name}-%{version}.tar.gz +Source0: %{name}-%{version}.tar.bz2 NoSource: 0 License: Proprietary Group: System Environment/Libraries BuildRoot: %{_tmppath}/%{name}-root ExclusiveArch: i586 +BuildRequires: glib2-devel libva-devel %description MIX Video Bitstream Parser is an user library interface for various video format bitstream parsing diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index d7be02f..d174f12 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -1,9 +1,5 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index bc301da..c30167b 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -1,8 +1,4 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/Makefile.am b/mix_vbp/viddec_fw/fw/parser/Makefile.am index c94b935..89c995c 100644 --- a/mix_vbp/viddec_fw/fw/parser/Makefile.am +++ b/mix_vbp/viddec_fw/fw/parser/Makefile.am @@ -168,7 +168,6 @@ noinst_HEADERS = ./vbp_h264_parser.h \ ./include/viddec_pm_utils_bstream.h \ ./include/viddec_pm_utils_list.h \ ./include/viddec_vc1_parse.h \ - ../include/stdint.h \ ../include/viddec_debug.h \ ../include/viddec_fw_version.h \ ../../include/viddec_fw_common_defs.h \ @@ -177,7 +176,6 @@ noinst_HEADERS = ./vbp_h264_parser.h \ ../../include/viddec_fw_item_types.h \ ../../include/viddec_fw_parser_host.h \ ../../include/viddec_fw_workload.h \ - ../../fw/include/stdint.h \ ../../fw/include/viddec_debug.h \ ../../fw/include/viddec_fw_version.h \ ../../fw/codecs/h264/include/h264.h \ diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c index 6b5f402..4ba89f7 100644 --- a/mix_vbp/viddec_fw/fw/parser/main.c +++ b/mix_vbp/viddec_fw/fw/parser/main.c @@ -1,8 +1,4 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 90dea6a..38392e1 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -7,10 +7,6 @@ */ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif @@ -25,8 +21,23 @@ #include "vbp_h264_parser.h" -/* number of bytes used to encode length of NAL payload. Default is 4 bytes. */ -static int NAL_length_size = 4; +typedef enum +{ + H264_BS_LENGTH_PREFIXED, + H264_BS_SC_PREFIXED, + H264_BS_SINGLE_NAL +} H264_BS_PATTERN; + +/* number of bytes used to encode length of NAL payload. If parser does not receive configuration data +and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB +byte stream format. */ +static int NAL_length_size = 0; + +/* indicate if stream is length prefixed */ +static int length_prefix_verified = 0; + +static H264_BS_PATTERN bitstream_pattern = H264_BS_SC_PREFIXED; + /* default scaling list table */ unsigned char Default_4x4_Intra[16] = @@ -77,7 +88,7 @@ unsigned char quant_flat[16] = 16,16,16,16 }; -unsigned char quant8_flat[64] = +unsigned char quant8_flat[64] = { 16,16,16,16,16,16,16,16, 16,16,16,16,16,16,16,16, @@ -234,6 +245,10 @@ uint32 vbp_free_query_data_h264(vbp_context *pcontext) pcontext->query_data = NULL; + NAL_length_size = 0; + length_prefix_verified = 0; + bitstream_pattern = H264_BS_SC_PREFIXED; + return VBP_OK; } @@ -553,9 +568,11 @@ static inline void vbp_set_scaling_list_h264( VAIQMatrixBufferH264* IQ_matrix_buf) { int i; + int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0); + if (parser->info.active_PPS.pic_scaling_matrix_present_flag) { - for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++) + for (i = 0; i < lists_to_set; i++) { if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) { @@ -657,7 +674,7 @@ static inline void vbp_set_scaling_list_h264( /* PPS matrix not present, use SPS information */ if (parser->info.active_SPS.seq_scaling_matrix_present_flag) { - for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++) + for (i = 0; i < lists_to_set; i++) { if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) { @@ -815,7 +832,12 @@ static void vbp_set_codec_data_h264( parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; codec_data->video_format = - parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag; + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag; + + /* picture order type and count */ + codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + } @@ -835,6 +857,12 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) /* a new picture is parsed */ query_data->num_pictures++; } + + if (query_data->num_pictures == 0) + { + /* partial frame */ + query_data->num_pictures = 1; + } if (query_data->num_pictures > MAX_NUM_PICTURES) { @@ -845,14 +873,16 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) int pic_data_index = query_data->num_pictures - 1; if (pic_data_index < 0) { - ETRACE("MB address does not start from 0!"); + WTRACE("MB address does not start from 0!"); return VBP_DATA; } pic_data = &(query_data->pic_data[pic_data_index]); pic_parms = pic_data->pic_parms; + + // relax this condition to support partial frame parsing - if (parser->info.SliceHeader.first_mb_in_slice == 0) + //if (parser->info.SliceHeader.first_mb_in_slice == 0) { /** * picture parameter only needs to be set once, @@ -1237,6 +1267,11 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); return VBP_DATA; } + + /*if (pic_data->num_slices > 1) + { + ITRACE("number of slices per picture is %d.", pic_data->num_slices); + }*/ return VBP_OK; } @@ -1259,13 +1294,27 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) int i = 0; viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + /* check if configuration data is start code prefix */ + viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + WTRACE("configuration data is start-code prefixed.\n"); + bitstream_pattern = H264_BS_SC_PREFIXED; + return vbp_parse_start_code_h264(pcontext); + } + + uint8* cur_data = cxt->parse_cubby.buf; if (cxt->parse_cubby.size < 6) { /* need at least 6 bytes to start parsing the structure, see spec 15 */ - ETRACE ("Need at least 6 bytes to start parsing\n" ); return VBP_DATA; } @@ -1309,8 +1358,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) { /* need at least 2 bytes to parse sequence_parameter_set_length */ - - ETRACE ("Need at least 2 bytes to parse sps." ); + ETRACE("Not enough data to parse SPS length."); return VBP_DATA; } @@ -1323,7 +1371,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) { /* need at least sequence_parameter_set_length bytes for SPS */ - ETRACE ("Need at least sequence paramter set length bytes." ); + ETRACE("Not enough data to parse SPS."); return VBP_DATA; } @@ -1341,7 +1389,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) { /* need at least one more byte to parse num_of_picture_parameter_sets */ - ETRACE ("need at least one more byte to parse num_of_picture_parameter_sets." ); + ETRACE("Not enough data to parse number of PPS."); return VBP_DATA; } @@ -1356,7 +1404,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) { /* need at least 2 bytes to parse picture_parameter_set_length */ - ETRACE ("need at least 2 bytes to parse picture_parameter_set_length."); + ETRACE("Not enough data to parse PPS length."); return VBP_DATA; } @@ -1368,7 +1416,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) { /* need at least picture_parameter_set_length bytes for PPS */ - ETRACE("need at least picture_parameter_set_length bytes for PPS"); + ETRACE("Not enough data to parse PPS."); return VBP_DATA; } @@ -1388,7 +1436,8 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); } - + + bitstream_pattern = H264_BS_LENGTH_PREFIXED; return VBP_OK; } @@ -1426,77 +1475,157 @@ static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p) */ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - int32_t size_left = 0; - int32_t size_parsed = 0; - int32_t NAL_length = 0; - viddec_sc_parse_cubby_cxt_t* cubby = NULL; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - /* reset query data for the new sample buffer */ - vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; - int i; + /* reset query data for the new sample buffer */ + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + int i; -#ifndef ANDROID - for (i = 0; i < MAX_NUM_PICTURES; i++) - { - query_data->pic_data[i].num_slices = 0; - } - query_data->num_pictures = 0; -#else - ITRACE("pcontext->h264_frame_flag = %d\n", pcontext->h264_frame_flag); - - if(pcontext->h264_frame_flag == 0) - { - for (i = 0; i < MAX_NUM_PICTURES; i++) - { - query_data->pic_data[i].num_slices = 0; - } - query_data->num_pictures = 0; - } + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; - pcontext->h264_frame_flag = 1; -#endif - - cubby = &(cxt->parse_cubby); + cxt->list.num_items = 0; - cxt->list.num_items = 0; + /* reset start position of first item to 0 in case there is only one item */ + cxt->list.data[0].stpos = 0; - /* start code emulation prevention byte is present in NAL */ - cxt->getbits.is_emul_reqd = 1; + /* start code emulation prevention byte is present in NAL */ + cxt->getbits.is_emul_reqd = 1; - size_left = cubby->size; + if (bitstream_pattern == H264_BS_LENGTH_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t* cubby = NULL; + int32_t size_left = 0; + int32_t size_parsed = 0; + int32_t NAL_length = 0; -#ifndef ANDROID - while (size_left >= NAL_length_size) - { - NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); + cubby = &(cxt->parse_cubby); - size_parsed += NAL_length_size; -#else - while (size_left > 0) - { - NAL_length = size_left; -#endif - - cxt->list.data[cxt->list.num_items].stpos = size_parsed; - size_parsed += NAL_length; /* skip NAL bytes */ - /* end position is exclusive */ - cxt->list.data[cxt->list.num_items].edpos = size_parsed; - cxt->list.num_items++; - if (cxt->list.num_items >= MAX_IBUFS_PER_SC) - { - ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); - break; - } - - size_left = cubby->size - size_parsed; - } + size_left = cubby->size; - if (size_left != 0) + while (size_left >= NAL_length_size) { - WTRACE("Elementary stream is not aligned (%d).", size_left); + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); + + size_parsed += NAL_length_size; + cxt->list.data[cxt->list.num_items].stpos = size_parsed; + size_parsed += NAL_length; /* skip NAL bytes */ + /* end position is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); + break; + } + + size_left = cubby->size - size_parsed; } - return VBP_OK; + + if (size_left != 0 && length_prefix_verified == 0) + { + WTRACE("Elementary stream is not aligned (%d).", size_left); + + /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will + * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed + */ + length_prefix_verified = 1; + viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby; + + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&temp_cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + + /* found start code */ + if (ret == 1) + { + WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed."); + NAL_length_size = 0; + bitstream_pattern = H264_BS_SC_PREFIXED; + /* reset parsing data */ + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + cxt->list.num_items = 0; + } + } + } + + + if (bitstream_pattern == H264_BS_SC_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t cubby; + /* memory copy without updating cxt->parse_cubby */ + cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = 0; + + while(1) + { + ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if(ret == 1) + { + cubby.phase = 0; + + if (cxt->list.num_items == 0) + { + cxt->list.data[0].stpos = cubby.sc_end_pos; + } + else + { + cxt->list.data[cxt->list.num_items - 1].edpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + + cxt->list.data[cxt->list.num_items].stpos = + cxt->list.data[cxt->list.num_items - 1].edpos; + } + + cubby.buf = cxt->parse_cubby.buf + + cxt->list.data[cxt->list.num_items].stpos; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos; + + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + if (cxt->list.num_items == 0) + { + cxt->list.num_items = 1; + bitstream_pattern = H264_BS_SINGLE_NAL; + WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL."); + } + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + + } + + if (bitstream_pattern == H264_BS_SINGLE_NAL) + { + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + + return VBP_OK; } /** @@ -1515,10 +1644,11 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) struct h264_viddec_parser* parser = NULL; parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); + vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data; switch (parser->info.nal_unit_type) { case h264_NAL_UNIT_TYPE_SLICE: - ITRACE("slice header is parsed."); + //ITRACE("slice header is parsed."); error = vbp_add_pic_data_h264(pcontext, i); if (VBP_OK == error) { @@ -1527,7 +1657,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_IDR: - ITRACE("IDR header is parsed."); + //ITRACE("IDR header is parsed."); error = vbp_add_pic_data_h264(pcontext, i); if (VBP_OK == error) { @@ -1536,31 +1666,34 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_SEI: - /* ITRACE("SEI header is parsed."); */ + //ITRACE("SEI header is parsed."); break; case h264_NAL_UNIT_TYPE_SPS: - /*ITRACE("SPS header is parsed."); */ + query_data->has_sps = 1; + query_data->has_pps = 0; + ITRACE("SPS header is parsed."); break; case h264_NAL_UNIT_TYPE_PPS: - /* ITRACE("PPS header is parsed."); */ + query_data->has_pps = 1; + ITRACE("PPS header is parsed."); break; case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: - /* ITRACE("ACC unit delimiter is parsed."); */ + //ITRACE("ACC unit delimiter is parsed."); break; case h264_NAL_UNIT_TYPE_EOSeq: - /* ITRACE("EOSeq is parsed."); */ + ITRACE("EOSeq is parsed."); break; case h264_NAL_UNIT_TYPE_EOstream: - /* ITRACE("EOStream is parsed."); */ + ITRACE("EOStream is parsed."); break; default: - WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); + WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); break; } return error; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c index e7b4cd6..cfcad5b 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c @@ -7,10 +7,6 @@ */ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index b57821c..00a921c 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -111,7 +111,10 @@ typedef struct _vbp_codec_data_h264 /* video fromat */ uint8 video_signal_type_present_flag; - uint8 video_format; + uint8 video_format; + + uint8 pic_order_cnt_type; + int log2_max_pic_order_cnt_lsb_minus4; } vbp_codec_data_h264; @@ -141,20 +144,26 @@ typedef struct _vbp_slice_data_h264 typedef struct _vbp_data_h264 { - /* rolling counter of buffers sent by vbp_parse */ - uint32 buf_number; + /* rolling counter of buffers sent by vbp_parse */ + uint32 buf_number; - uint32 num_pictures; - - vbp_picture_data_h264* pic_data; - - /** - * do we need to send matrix to VA for each picture? If not, we need - * a flag indicating whether it is updated. - */ - VAIQMatrixBufferH264* IQ_matrix_buf; - - vbp_codec_data_h264* codec_data; + uint32 num_pictures; + + /* if SPS has been received */ + uint8 has_sps; + + /* if PPS has been received */ + uint8 has_pps; + + vbp_picture_data_h264* pic_data; + + /** + * do we need to send matrix to VA for each picture? If not, we need + * a flag indicating whether it is updated. + */ + VAIQMatrixBufferH264* IQ_matrix_buf; + + vbp_codec_data_h264* codec_data; } vbp_data_h264; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 47408dc..85b32e0 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -7,10 +7,6 @@ */ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif @@ -632,7 +628,7 @@ mp4_Status_t vbp_video_packet_header_mp42( getbits = viddec_pm_get_bits(parent, &code, length); BREAK_GETBITS_FAIL(getbits, ret); - length = code; + _macroblock_number = code; } /* quant_scale */ @@ -955,6 +951,17 @@ mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) break; } + if (bit_offset) + { + /* byte-align parsing position */ + getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + if (getbits == -1) + { + ret = MP4_STATUS_PARSE_ERROR; + return ret; + } + } + picture_data->number_slices = slice_index; } } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index c5d378d..275f43c 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -5,18 +5,11 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ - - #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif - #include #include @@ -154,10 +147,6 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) */ error = pcontext->func_init_parser_entries(pcontext); -#ifdef ANDROID - pcontext->h264_frame_flag = 0; -#endif - cleanup: if (VBP_OK != error) @@ -515,7 +504,7 @@ uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint32 error = VBP_OK; - /* ITRACE("buffer counter: %d",buffer_counter); */ + //ITRACE("buffer counter: %d",buffer_counter); /* set up emitter. */ pcontext->parser_cxt->emitter.cur.data = pcontext->workload1; @@ -559,10 +548,6 @@ uint32 vbp_utils_query(vbp_context *pcontext, void **data) { *data = NULL; } -#ifdef ANDROID - pcontext->h264_frame_flag = 0; -#endif - return error; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h index 69202f8..67ff3e8 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h @@ -76,10 +76,6 @@ struct vbp_context_t function_process_parsing_result func_process_parsing_result; function_populate_query_data func_populate_query_data; -#ifdef ANDROID - uint32 h264_frame_flag; -#endif - }; /** diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 5c05c83..d43ada6 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -8,10 +8,6 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c index dc52e5c..2bae85b 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c @@ -1,9 +1,5 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c index 6e0b66b..8671ef3 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c @@ -1,9 +1,5 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c index fb690a3..4458834 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c @@ -1,8 +1,4 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c index 10bb179..06c6cbd 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c @@ -1,9 +1,5 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c index eada1fa..df7d502 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c @@ -1,9 +1,5 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c index 731a78e..a90242a 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -1,9 +1,5 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c index 164710e..adacf2c 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c @@ -1,9 +1,5 @@ #ifdef ANDROID -//#ifndef NULL -//#define NULL (void*)0x0 -//#endif - #define true 1 #define false 0 #endif diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h index e034b86..acca3ce 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h @@ -200,4 +200,18 @@ enum viddec_fw_mpeg2_error_codes VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT = (1 << 29),/* Parser detected corruption in quantization matrix extension. */ }; +#ifdef VBP + +#ifndef NULL +#define NULL (void*)0x0 +#endif + +#ifndef true +#define true 1 +#define false 0 +#endif + +#endif +/* end of #ifdef VBP */ + #endif diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog index 903e037..2f7a06c 100644 --- a/mix_video/ChangeLog +++ b/mix_video/ChangeLog @@ -1,3 +1,24 @@ +2010-09-15 Tao Tao + + * Merged the changes for Android + +2010-09-02 Andy Qiu + * Refactored h.264 decoder to support partial frame and byte stream handling and delayed decoder configuration. + * Supported MPEG-4 video decoding with "video/x-xvid" mime type. + * Rolled version number to 0.1.19 + +2010-07-29 Andy Qiu + * use high profile for baseline H.264 contents + * Rolled version number to 0.1.18 + +2010-06-30 Tao Tao + + * Added G_BEGIN_DECLS and G_END_DECLS to all header files + +2010-06-09 Weian Chen + + * convert DOS line end format to Unix + 2010-06-04 Tao Tao * Rolled version number to 0.1.17 diff --git a/mix_video/configure.ac b/mix_video/configure.ac index 8768cee..14f9ac2 100644 --- a/mix_video/configure.ac +++ b/mix_video/configure.ac @@ -1,8 +1,8 @@ -AC_INIT("", "", [linda.s.cline@intel.com]) +AC_INIT([""],[""],[linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -AS_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 17) +UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 19) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE @@ -13,11 +13,11 @@ AM_INIT_AUTOMAKE($PACKAGE, $VERSION) dnl make aclocal work in maintainer mode AC_SUBST(ACLOCAL_AMFLAGS, "-I m4") -AM_CONFIG_HEADER(config.h) +AC_CONFIG_HEADERS([config.h]) dnl check for tools AC_PROG_CC -AC_PROG_LIBTOOL +LT_INIT #MIX_CFLAGS="-Wall -Werror" MIX_CFLAGS="-Wall" diff --git a/mix_video/m4/as-mix-version.m4 b/mix_video/m4/as-mix-version.m4 index f0301b1..82f6c95 100644 --- a/mix_video/m4/as-mix-version.m4 +++ b/mix_video/m4/as-mix-version.m4 @@ -1,9 +1,9 @@ dnl as-mix-version.m4 -dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) +dnl UMG_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) dnl example -dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,) +dnl UMG_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,) dnl for a 0.3.2 release version dnl this macro @@ -11,7 +11,7 @@ dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE dnl - defines [$PREFIX], VERSION dnl - AC_SUBST's all defined vars -AC_DEFUN([AS_MIX_VERSION], +AC_DEFUN([UMG_MIX_VERSION], [ PACKAGE=[$1] [$2]_MAJOR=[$3] diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec index 50752eb..df31162 100644 --- a/mix_video/mixvideo.spec +++ b/mix_video/mixvideo.spec @@ -6,15 +6,16 @@ Summary: MIX Video Name: mixvideo -Version: 0.1.17 +Version: 0.1.19 Release: 1 -Source0: %{name}-%{version}.tar.gz +Source0: %{name}-%{version}.tar.bz2 NoSource: 0 License: Proprietary Group: System Environment/Libraries BuildRoot: %{_tmppath}/%{name}-root ExclusiveArch: i586 -Requires: glib2 , mixcommon, mixvbp +Requires: glib2 , mixcommon, mixvbp, libva, libX11 +BuildRequires: glib2-devel mixcommon-devel mixvbp-devel libva-devel libX11-devel %description MIX Video is an user library interface for various video codecs available on the platform. diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h index 6977e92..0688442 100644 --- a/mix_video/src/mixbuffer.h +++ b/mix_video/src/mixbuffer.h @@ -12,6 +12,8 @@ #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_BUFFER: * @@ -154,4 +156,6 @@ void mix_buffer_unref(MixBuffer * mix); MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size, gulong token, MixBufferCallback callback); +G_END_DECLS + #endif /* __MIX_BUFFER_H__ */ diff --git a/mix_video/src/mixbuffer_private.h b/mix_video/src/mixbuffer_private.h index 87c9c07..43c8347 100644 --- a/mix_video/src/mixbuffer_private.h +++ b/mix_video/src/mixbuffer_private.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixbuffer.h" #include "mixbufferpool.h" +G_BEGIN_DECLS + typedef struct _MixBufferPrivate MixBufferPrivate; struct _MixBufferPrivate @@ -35,5 +37,6 @@ struct _MixBufferPrivate MIX_RESULT mix_buffer_set_pool (MixBuffer *obj, MixBufferPool *pool); +G_END_DECLS #endif /* __MIX_BUFFER_PRIVATE_H__ */ diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h index 8b1788d..3b51f5e 100644 --- a/mix_video/src/mixdisplayx11.h +++ b/mix_video/src/mixdisplayx11.h @@ -13,6 +13,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideodef.h" #include +G_BEGIN_DECLS + /** * MIX_TYPE_DISPLAYX11: * @@ -173,4 +175,6 @@ MIX_RESULT mix_displayx11_set_drawable (MixDisplayX11 * obj, MIX_RESULT mix_displayx11_get_drawable (MixDisplayX11 * obj, Drawable * drawable); +G_END_DECLS + #endif /* __MIX_DISPLAYX11_H__ */ diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c index 569dabd..0a843e6 100644 --- a/mix_video/src/mixframemanager.c +++ b/mix_video/src/mixframemanager.c @@ -12,6 +12,14 @@ #include "mixvideoframe_private.h" #define INITIAL_FRAME_ARRAY_SIZE 16 + +// Assume only one backward reference is used. This will hold up to 2 frames before forcing +// the earliest frame out of queue. +#define MIX_MAX_ENQUEUE_SIZE 2 + +// RTP timestamp is 32-bit long and could be rollover in 13 hours (based on 90K Hz clock) +#define TS_ROLLOVER_THRESHOLD (0xFFFFFFFF/2) + #define MIX_SECOND (G_USEC_PER_SEC * G_GINT64_CONSTANT (1000)) static GObjectClass *parent_class = NULL; @@ -32,23 +40,19 @@ static void mix_framemanager_init(MixFrameManager * self) { self->flushing = FALSE; self->eos = FALSE; - self->frame_array = NULL; - self->frame_queue = NULL; + self->frame_list = NULL; self->initialized = FALSE; - self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + self->mode = MIX_DISPLAY_ORDER_UNKNOWN; self->framerate_numerator = 30; self->framerate_denominator = 1; self->is_first_frame = TRUE; - - /* for vc1 in asf */ - self->p_frame = NULL; - self->prev_timestamp = 0; - -#ifdef ANDROID - self->timestamp_storage = NULL; -#endif + self->next_frame_timestamp = 0; + self->last_frame_timestamp = 0; + self->next_frame_picnumber = 0; + self->max_enqueue_size = MIX_MAX_ENQUEUE_SIZE; + self->max_picture_number = (guint32)-1; } static void mix_framemanager_class_init(MixFrameManagerClass * klass) { @@ -92,14 +96,16 @@ MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { /* MixFrameManager class methods */ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, - MixFrameOrderMode mode, gint framerate_numerator, - gint framerate_denominator, gboolean timebased_ordering) { + MixDisplayOrderMode mode, gint framerate_numerator, + gint framerate_denominator) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER - && mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator - <= 0 || framerate_denominator <= 0) { + if (!MIX_IS_FRAMEMANAGER(fm) || + mode <= MIX_DISPLAY_ORDER_UNKNOWN || + mode >= MIX_DISPLAY_ORDER_LAST || + framerate_numerator <= 0 || + framerate_denominator <= 0) { return MIX_RESULT_INVALID_PARAM; } @@ -111,35 +117,15 @@ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, g_thread_init(NULL); } - ret = MIX_RESULT_NO_MEMORY; if (!fm->lock) { fm->lock = g_mutex_new(); if (!fm->lock) { + ret = MIX_RESULT_NO_MEMORY; goto cleanup; } } - if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { - fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE); - if (!fm->frame_array) { - goto cleanup; - } - - -#ifdef ANDROID - fm->timestamp_storage = g_array_sized_new(FALSE, TRUE, - sizeof(guint64), INITIAL_FRAME_ARRAY_SIZE); - if (!fm->timestamp_storage) { - goto cleanup; - } -#endif - } - - fm->frame_queue = g_queue_new(); - if (!fm->frame_queue) { - goto cleanup; - } - + fm->frame_list = NULL; fm->framerate_numerator = framerate_numerator; fm->framerate_denominator = framerate_denominator; fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND @@ -147,26 +133,20 @@ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, fm->mode = mode; - fm->timebased_ordering = timebased_ordering; + LOG_V("fm->mode = %d\n", fm->mode); - fm->initialized = TRUE; + fm->is_first_frame = TRUE; + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + fm->next_frame_picnumber = 0; - ret = MIX_RESULT_SUCCESS; + fm->initialized = TRUE; - cleanup: +cleanup: - if (ret != MIX_RESULT_SUCCESS) { - if (fm->frame_array) { - g_ptr_array_free(fm->frame_array, TRUE); - fm->frame_array = NULL; - } - if (fm->frame_queue) { - g_queue_free(fm->frame_queue); - fm->frame_queue = NULL; - } - } return ret; } + MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { if (!MIX_IS_FRAMEMANAGER(fm)) { @@ -185,21 +165,6 @@ MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { g_mutex_lock(fm->lock); - if (fm->frame_array) { - g_ptr_array_free(fm->frame_array, TRUE); - fm->frame_array = NULL; - } - if (fm->frame_queue) { - g_queue_free(fm->frame_queue); - fm->frame_queue = NULL; - } -#ifdef ANDROID - if (fm->timestamp_storage) { - g_array_free(fm->timestamp_storage, TRUE); - fm->timestamp_storage = NULL; - } -#endif - fm->initialized = FALSE; g_mutex_unlock(fm->lock); @@ -259,8 +224,8 @@ MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm, - MixFrameOrderMode *mode) { +MIX_RESULT mix_framemanager_get_display_order_mode(MixFrameManager *fm, + MixDisplayOrderMode *mode) { if (!MIX_IS_FRAMEMANAGER(fm)) { return MIX_RESULT_INVALID_PARAM; @@ -280,798 +245,435 @@ MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm, return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { - +MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size) +{ if (!MIX_IS_FRAMEMANAGER(fm)) { return MIX_RESULT_INVALID_PARAM; } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; + if (!fm->lock) { + return MIX_RESULT_FAIL; } + if (size <= 0) + { + return MIX_RESULT_FAIL; + } + g_mutex_lock(fm->lock); - /* flush frame_array */ - if (fm->frame_array) { - guint len = fm->frame_array->len; - if (len) { - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array, - idx); - if (frame) { - mix_videoframe_unref(frame); - g_ptr_array_index(fm->frame_array, idx) = NULL; - } - } - /* g_ptr_array_remove_range(fm->frame_array, 0, len); */ - } - } - -#ifdef ANDROID - if(fm->timestamp_storage) { - g_array_remove_range(fm->timestamp_storage, 0, fm->timestamp_storage->len); - } -#endif - - if (fm->frame_queue) { - guint len = fm->frame_queue->length; - if (len) { - MixVideoFrame *frame = NULL; - while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) { - mix_videoframe_unref(frame); - } - } - } - - if(fm->p_frame) { - mix_videoframe_unref(fm->p_frame); - fm->p_frame = NULL; - } - fm->prev_timestamp = 0; - - fm->eos = FALSE; - - fm->is_first_frame = TRUE; + fm->max_enqueue_size = size; + LOG_V("max enqueue size is %d\n", size); g_mutex_unlock(fm->lock); return MIX_RESULT_SUCCESS; } -MixVideoFrame *get_expected_frame_from_array(GPtrArray *array, - guint64 expected, guint64 tolerance, guint64 *frametimestamp) { - - guint idx = 0; - guint len = 0; - guint64 timestamp = 0; - guint64 lowest_timestamp = (guint64)-1; - guint lowest_timestamp_idx = -1; - - MixVideoFrame *frame = NULL; - - if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) { - - return NULL; +MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 num) +{ + // NOTE: set maximum picture order number only if pic_order_cnt_type is 0 (see H.264 spec) + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; } - len = array->len; - if (!len) { - return NULL; + if (!fm->lock) { + return MIX_RESULT_FAIL; } - for (idx = 0; idx < len; idx++) { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); - if (_frame) { - - if (mix_videoframe_get_timestamp(_frame, ×tamp) - != MIX_RESULT_SUCCESS) { - - /* - * Oops, this shall never happen! - * In case it heppens, release the frame! - */ - - mix_videoframe_unref(_frame); + if (num < 16) + { + // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. + return MIX_RESULT_INVALID_PARAM; + } + + g_mutex_lock(fm->lock); - /* make an available slot */ - g_ptr_array_index(array, idx) = NULL; + // max_picture_number is exclusie (range from 0 to num - 1). + // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the + // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches + // fm->max_picture_number. + fm->max_picture_number = num; + LOG_V("max picture number is %d\n", num); - break; - } - - if (lowest_timestamp > timestamp) - { - lowest_timestamp = timestamp; - lowest_timestamp_idx = idx; - } - } - } - - if (lowest_timestamp == (guint64)-1) - { - return NULL; - } - + g_mutex_unlock(fm->lock); - /* check if this is the expected next frame */ - if (lowest_timestamp <= expected + tolerance) - { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx); - /* make this slot available */ - g_ptr_array_index(array, lowest_timestamp_idx) = NULL; + return MIX_RESULT_SUCCESS; - *frametimestamp = lowest_timestamp; - frame = _frame; - } - - return frame; } -#ifdef ANDROID -MixVideoFrame *get_expected_frame_from_array_DO(GPtrArray *array, - guint32 expected, guint32 *framedisplayorder) { - guint idx = 0; - guint len = 0; - guint32 displayorder = 0; - guint32 lowest_displayorder = (guint32)-1; - guint lowest_displayorder_idx = -1; +MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { MixVideoFrame *frame = NULL; + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!array || !expected || !framedisplayorder) { - - return NULL; - } - - len = array->len; - if (!len) { - return NULL; - } - - for (idx = 0; idx < len; idx++) { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx); - if (_frame) { - - if (mix_videoframe_get_displayorder(_frame, &displayorder) - != MIX_RESULT_SUCCESS) { - - /* - * Oops, this shall never happen! - * In case it heppens, release the frame! - */ - - mix_videoframe_unref(_frame); - - /* make an available slot */ - g_ptr_array_index(array, idx) = NULL; - - break; - } - - if (lowest_displayorder > displayorder) - { - lowest_displayorder = displayorder; - lowest_displayorder_idx = idx; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; } - } - if (lowest_displayorder == (guint32)-1) - { - return NULL; - } + g_mutex_lock(fm->lock); + while (fm->frame_list) + { + frame = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame); + mix_videoframe_unref(frame); + LOG_V("one frame is flushed\n"); + }; - /* check if this is the expected next frame */ - if (lowest_displayorder <= expected) - { - MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_displayorder_idx); - /* make this slot available */ - g_ptr_array_index(array, lowest_displayorder_idx) = NULL; + fm->eos = FALSE; + fm->is_first_frame = TRUE; + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + fm->next_frame_picnumber = 0; - *framedisplayorder = lowest_displayorder; - frame = _frame; - } + g_mutex_unlock(fm->lock); - return frame; + return MIX_RESULT_SUCCESS; } -#endif /* ANDROID */ -void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) { - gboolean found_slot = FALSE; - guint len = 0; +MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { - if (!array || !mvf) { - return; - } + MIX_RESULT ret = MIX_RESULT_SUCCESS; - /* do we have slot for this frame? */ - len = array->len; - if (len) { - guint idx = 0; - gpointer frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = g_ptr_array_index(array, idx); - if (!frame) { - found_slot = TRUE; - g_ptr_array_index(array, idx) = (gpointer) mvf; - break; - } - } - } + LOG_V("Begin fm->mode = %d\n", fm->mode); - if (!found_slot) { - g_ptr_array_add(array, (gpointer) mvf); + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; } -} - -#ifdef ANDROID -gint frame_sorting_func_DO(gconstpointer a, gconstpointer b) { - - MixVideoFrame *fa = *((MixVideoFrame **) a); - MixVideoFrame *fb = *((MixVideoFrame **) b); - - guint32 ta, tb; + if (!MIX_IS_FRAMEMANAGER(fm)) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fa && !fb) { - return 0; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - if (fa && !fb) { - return 1; + gboolean discontinuity = FALSE; + mix_videoframe_get_discontinuity(mvf, &discontinuity); + if (discontinuity) + { + LOG_V("current frame has discontinuity!\n"); + mix_framemanager_flush(fm); } - - if (!fa && fb) { - return -1; +#ifdef MIX_LOG_ENABLE + if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) + { + guint32 num; + mix_videoframe_get_displayorder(mvf, &num); + LOG_V("pic %d is enqueued.\n", num); } - mix_videoframe_get_displayorder(fa, &ta); - mix_videoframe_get_displayorder(fb, &tb); - - if (ta > tb) { - return 1; + if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) + { + guint64 ts; + mix_videoframe_get_timestamp(mvf, &ts); + LOG_V("ts %"G_GINT64_FORMAT" is enqueued.\n", ts); } +#endif - if (ta == tb) { - return 0; - } + g_mutex_lock(fm->lock); + fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf); + g_mutex_unlock(fm->lock); + + LOG_V("End\n"); - return -1; + return ret; } -MIX_RESULT mix_framemanager_displayorder_based_enqueue(MixFrameManager *fm, - MixVideoFrame *mvf) { - /* - * display order mode. - * - * if this is the first frame, we always push it into - * output queue, if it is not, check if it is the one - * expected, if yes, push it into the output queue. - * if not, put it into waiting list. - * - * while the expected frame is pushed into output queue, - * the expected next timestamp is also updated. with this - * updated expected next timestamp, we search for expected - * frame from the waiting list, if found, repeat the process. - * - */ - - MIX_RESULT ret = MIX_RESULT_FAIL; - guint32 displayorder = 0; - - -first_frame: - - ret = mix_videoframe_get_displayorder(mvf, &displayorder); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; +void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) +{ + // this function finds the lowest time stamp in the list and assign it to the dequeued video frame, + // if that timestamp is smaller than the timestamp of dequeued video frame. + int i; + guint64 ts, min_ts; + MixVideoFrame *p, *min_p; + int len = g_slist_length(fm->frame_list); + if (len == 0) + { + // nothing to update + return; } - - if (fm->is_first_frame) { - - /* - * for the first frame, we can always put it into the output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - /* - * what displayorder of next frame shall be? - */ - fm->next_displayorder = displayorder + 1; - - fm->is_first_frame = FALSE; - - } else { - - /* - * If displayorder is 0, send all the frames in the array to the queue - */ - if(displayorder == 0) { - if (fm->frame_array) { - guint len = fm->frame_array->len; - if (len) { - - /* sorting frames in the array by displayorder */ - g_ptr_array_sort(fm->frame_array, frame_sorting_func_DO); - - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index( - fm->frame_array, idx); - if (frame) { - g_ptr_array_index(fm->frame_array, idx) = NULL; - g_queue_push_tail(fm->frame_queue, (gpointer) frame); - } - } - } - } - - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - /* - * what displayorder of next frame shall be? - */ - fm->next_displayorder = displayorder + 1; - - } else { - - /* - * is this the next frame expected? - */ - - /* calculate tolerance */ - MixVideoFrame *frame_from_array = NULL; - guint32 displayorder_frame_array = 0; - - /* - * timestamp may be associated with the second field, which - * will not fall between the tolerance range. - */ - - if (displayorder <= fm->next_displayorder) { - - /* - * ok, this is the frame expected, push it into output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (displayorder == fm->next_displayorder) - { - fm->next_displayorder = displayorder + 1; - } - - /* - * since we updated next_displayorder, there might be a frame - * in the frame_array that satisfying this new next_displayorder - */ - - while ((frame_from_array = get_expected_frame_from_array_DO( - fm->frame_array, fm->next_displayorder, - &displayorder_frame_array))) { - - g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (displayorder_frame_array >= fm->next_displayorder) - { - fm->next_displayorder = displayorder_frame_array + 1; - } - } - - } else { - - /* - * is discontinuity flag set for this frame ? - */ - gboolean discontinuity = FALSE; - ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - /* - * If this is a frame with discontinuity flag set, clear frame_array - * and treat the frame as the first frame. - */ - if (discontinuity) { - - guint len = fm->frame_array->len; - if (len) { - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index( - fm->frame_array, idx); - if (frame) { - mix_videoframe_unref(frame); - g_ptr_array_index(fm->frame_array, idx) = NULL; - } - } - } - - fm->is_first_frame = TRUE; - goto first_frame; - } - - /* - * this is not the expected frame, put it into frame_array - */ - add_frame_into_array(fm->frame_array, mvf); - } - } + + // find video frame with the smallest timestamp, take rollover into account when + // comparing timestamp. + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_timestamp(p, &ts); + if (i == 0 || + (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + { + min_ts = ts; + min_p = p; + } } -cleanup: - return ret; -} -#endif /* ANDROID */ - - -MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm, - MixVideoFrame *mvf) { - /* - * display order mode. - * - * if this is the first frame, we always push it into - * output queue, if it is not, check if it is the one - * expected, if yes, push it into the output queue. - * if not, put it into waiting list. - * - * while the expected frame is pushed into output queue, - * the expected next timestamp is also updated. with this - * updated expected next timestamp, we search for expected - * frame from the waiting list, if found, repeat the process. - * - */ - - MIX_RESULT ret = MIX_RESULT_FAIL; - guint64 timestamp = 0; - - first_frame: - - ret = mix_videoframe_get_timestamp(mvf, ×tamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - if (fm->is_first_frame) { - - /* - * for the first frame, we can always put it into the output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - /* - * what timestamp of next frame shall be? - */ - fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; - - fm->is_first_frame = FALSE; - - } else { - - /* - * is this the next frame expected? - */ - - /* calculate tolerance */ - guint64 tolerance = fm->frame_timestamp_delta / 4; - MixVideoFrame *frame_from_array = NULL; - guint64 timestamp_frame_array = 0; - - /* - * timestamp may be associated with the second field, which - * will not fall between the tolerance range. - */ - - if (timestamp <= fm->next_frame_timestamp + tolerance) { - - /* - * ok, this is the frame expected, push it into output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta; - } - - /* - * since we updated next_frame_timestamp, there might be a frame - * in the frame_array that satisfying this new next_frame_timestamp - */ - - while ((frame_from_array = get_expected_frame_from_array( - fm->frame_array, fm->next_frame_timestamp, tolerance, - ×tamp_frame_array))) { - - g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp_frame_array - + fm->frame_timestamp_delta; - } - } - - } else { - - /* - * is discontinuity flag set for this frame ? - */ - gboolean discontinuity = FALSE; - ret = mix_videoframe_get_discontinuity(mvf, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - /* - * If this is a frame with discontinuity flag set, clear frame_array - * and treat the frame as the first frame. - */ - if (discontinuity) { - - guint len = fm->frame_array->len; - if (len) { - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index( - fm->frame_array, idx); - if (frame) { - mix_videoframe_unref(frame); - g_ptr_array_index(fm->frame_array, idx) = NULL; - } - } - } - - fm->is_first_frame = TRUE; - goto first_frame; - } - - /* - * handle variable frame rate: - * display any frame which time stamp is less than current one. - * - */ - guint64 tolerance = fm->frame_timestamp_delta / 4; - MixVideoFrame *frame_from_array = NULL; - guint64 timestamp_frame_array = 0; - - while ((frame_from_array = get_expected_frame_from_array( - fm->frame_array, timestamp, tolerance, - ×tamp_frame_array))) - { - g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array); - - /* - * update next_frame_timestamp only if it falls within the tolerance range - */ - if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance) - { - fm->next_frame_timestamp = timestamp_frame_array - + fm->frame_timestamp_delta; - } - } - /* - * this is not the expected frame, put it into frame_array - */ - - add_frame_into_array(fm->frame_array, mvf); - } - } - cleanup: - - return ret; + mix_videoframe_get_timestamp(mvf, &ts); + if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + { + // frame to be updated has smaller time stamp + } + else + { + // time stamp needs to be monotonically non-decreasing so swap timestamp. + mix_videoframe_set_timestamp(mvf, min_ts); + mix_videoframe_set_timestamp(min_p, ts); + LOG_V("timestamp for current frame is updated from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT"\n", + ts, min_ts); + } } -MIX_RESULT mix_framemanager_frametype_based_enqueue(MixFrameManager *fm, - MixVideoFrame *mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixFrameType frame_type; - guint64 timestamp = 0; +MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +{ + int i, num_i_or_p; + MixVideoFrame *p, *first_i_or_p; + MixFrameType type; + int len = g_slist_length(fm->frame_list); - ret = mix_videoframe_get_frame_type(mvf, &frame_type); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - ret = mix_videoframe_get_timestamp(mvf, ×tamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } + num_i_or_p = 0; + first_i_or_p = NULL; + + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_frame_type(p, &type); + if (type == TYPE_B) + { + // B frame has higher display priority as only one reference frame is kept in the list + // and it should be backward reference frame for B frame. + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + mix_framemanager_update_timestamp(fm, p); + *mvf = p; + LOG_V("B frame is dequeued.\n"); + return MIX_RESULT_SUCCESS; + } + + if (type != TYPE_I && type != TYPE_P) + { + // this should never happen + LOG_E("Frame typs is invalid!!!\n"); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + mix_videoframe_unref(p); + return MIX_RESULT_FRAME_NOTAVAIL; + } + num_i_or_p++; + if (first_i_or_p == NULL) + { + first_i_or_p = p; + } + } + // if there are more than one reference frame in the list, the first one is dequeued. + if (num_i_or_p > 1 || fm->eos) + { + if (first_i_or_p == NULL) + { + // this should never happen! + LOG_E("first_i_or_p frame is NULL!\n"); + return MIX_RESULT_FAIL; + } + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)first_i_or_p); + mix_framemanager_update_timestamp(fm, first_i_or_p); + *mvf = first_i_or_p; #ifdef MIX_LOG_ENABLE - if (frame_type == TYPE_I) { - LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp); - } else if (frame_type == TYPE_P) { - LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp); - } else if (frame_type == TYPE_B) { - LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp); - } else { - LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp); - } + mix_videoframe_get_frame_type(first_i_or_p, &type); + if (type == TYPE_I) + { + LOG_V("I frame is dequeued.\n"); + } + else + { + LOG_V("P frame is dequeued.\n"); + } #endif - - if (fm->is_first_frame) { - /* - * The first frame is not a I frame, unexpected! - */ - if (frame_type != TYPE_I) { - goto cleanup; - } - - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - fm->is_first_frame = FALSE; - } else { - - /* - * I P B B P B B ... - */ - if (frame_type == TYPE_I || frame_type == TYPE_P) { - - if (fm->p_frame) { - - ret = mix_videoframe_set_timestamp(fm->p_frame, - fm->prev_timestamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); - fm->p_frame = NULL; - } - - /* it is an I frame, push it into the out queue */ - /*if (frame_type == TYPE_I) { - - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - } else*/ - { - /* it is a P frame, we can not push it to the out queue yet, save it */ - fm->p_frame = mvf; - fm->prev_timestamp = timestamp; - } - - ret = MIX_RESULT_SUCCESS; - - } else { - /* it is a B frame, replace the timestamp with the previous one */ - if (timestamp > fm->prev_timestamp) { - ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp); - if (ret != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - /* save the timestamp */ - fm->prev_timestamp = timestamp; - } - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - ret = MIX_RESULT_SUCCESS; - } - } - - cleanup: - - return ret; + return MIX_RESULT_SUCCESS; + } + + return MIX_RESULT_FRAME_NOTAVAIL; } -MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - - /*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/ +MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +{ + int i, len; + MixVideoFrame *p, *p_out_of_dated; + guint64 ts, ts_next_pending, ts_out_of_dated; + guint64 tolerance = fm->frame_timestamp_delta/4; + +retry: + // len may be changed during retry! + len = g_slist_length(fm->frame_list); + ts_next_pending = (guint64)-1; + ts_out_of_dated = 0; + p_out_of_dated = NULL; + + + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_timestamp(p, &ts); + if (ts >= fm->last_frame_timestamp && + ts <= fm->next_frame_timestamp + tolerance) + { + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + *mvf = p; + mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp)); + fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; + LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts); + return MIX_RESULT_SUCCESS; + } + + if (ts > fm->next_frame_timestamp + tolerance && + ts < ts_next_pending) + { + ts_next_pending = ts; + } + if (ts < fm->last_frame_timestamp && + ts >= ts_out_of_dated) + { + // video frame that is most recently out-of-dated. + // this may happen in variable frame rate scenario where two adjacent frames both meet + // the "next frame" criteria, and the one with larger timestamp is dequeued first. + ts_out_of_dated = ts; + p_out_of_dated = p; + } + } - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } + if (p_out_of_dated && + fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) + { + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated); + mix_videoframe_unref(p_out_of_dated); + LOG_W("video frame is out of dated. ts = %"G_GINT64_FORMAT" compared to last ts = %"G_GINT64_FORMAT".\n", + ts_out_of_dated, fm->last_frame_timestamp); + return MIX_RESULT_FRAME_NOTAVAIL; + } + + if (len <= fm->max_enqueue_size && fm->eos == FALSE) + { + LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", + fm->next_frame_timestamp, ts_next_pending, len); + return MIX_RESULT_FRAME_NOTAVAIL; + } - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } + // timestamp has gap + if (ts_next_pending != -1) + { + LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n", + fm->next_frame_timestamp, ts_next_pending); + + fm->next_frame_timestamp = ts_next_pending; + goto retry; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + // time stamp roll-over + LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", + fm->next_frame_timestamp); - /* - * This should never happen! - */ - if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode - != MIX_FRAMEORDER_MODE_DECODEORDER) { - return MIX_RESULT_FAIL; - } + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + goto retry; - g_mutex_lock(fm->lock); + // should never run to here + LOG_E("Error in timestamp-based dequeue implementation!\n"); + return MIX_RESULT_FAIL; +} - ret = MIX_RESULT_SUCCESS; - if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) { - /* - * decode order mode, push the frame into output queue - */ - g_queue_push_tail(fm->frame_queue, (gpointer) mvf); - - } else { - -#ifdef ANDROID - guint64 timestamp = 0; - mix_videoframe_get_timestamp(mvf, ×tamp); - - /* add timestamp into timestamp storage */ - if(fm->timestamp_storage) { - gint idx = 0; - gboolean found = FALSE; - - if(fm->timestamp_storage->len) { - for(idx = 0; idx < fm->timestamp_storage->len; idx ++) { - if(timestamp == g_array_index(fm->timestamp_storage, guint64, idx)) { - found = TRUE; - break; - } - } - } - - if(!found) { - g_array_append_val(fm->timestamp_storage, timestamp); - } - } -#endif +MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +{ + int i, len; + MixVideoFrame* p; + guint32 picnum, smallest_picnum; + guint32 next_picnum_pending; - if (fm->timebased_ordering) { -#ifndef ANDROID - ret = mix_framemanager_timestamp_based_enqueue(fm, mvf); -#else - ret = mix_framemanager_displayorder_based_enqueue(fm, mvf); -#endif + len = g_slist_length(fm->frame_list); - } else { - ret = mix_framemanager_frametype_based_enqueue(fm, mvf); - } - } +retry: + next_picnum_pending = (guint32)-1; + smallest_picnum = (guint32)-1; + + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_displayorder(p, &picnum); + if (picnum == fm->next_frame_picnumber) + { + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + mix_framemanager_update_timestamp(fm, p); + *mvf = p; + LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber); + fm->next_frame_picnumber++; + //if (fm->next_frame_picnumber == fm->max_picture_number) + // fm->next_frame_picnumber = 0; + return MIX_RESULT_SUCCESS; + } + + if (picnum > fm->next_frame_picnumber && + picnum < next_picnum_pending) + { + next_picnum_pending = picnum; + } + + if (picnum < fm->next_frame_picnumber && + picnum < smallest_picnum) + { + smallest_picnum = picnum; + } + } - g_mutex_unlock(fm->lock); + if (smallest_picnum != (guint32)-1 && fm->next_frame_picnumber - smallest_picnum < 8) + { + // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" + // to the smallest pic number in the list is less than half of 16, it is safely to assume that pic number + // is reset when an new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). + LOG_V("next frame number is reset from %d to 0, smallest picnumber in list (size = %d) is %d.\n", + fm->next_frame_picnumber, len, smallest_picnum); + fm->next_frame_picnumber = 0; + goto retry; + } + + if (len <= fm->max_enqueue_size && fm->eos == FALSE) + { + LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", + fm->next_frame_picnumber, next_picnum_pending, len); + return MIX_RESULT_FRAME_NOTAVAIL; + } - return ret; -} + // picture number has gap + if (next_picnum_pending != -1) + { + LOG_V("picture number has gap, jumping from %d to %d.\n", + fm->next_frame_picnumber, next_picnum_pending); + + fm->next_frame_picnumber = next_picnum_pending; + goto retry; + } -#ifdef ANDROID -gint timestamp_storage_sorting_func(gconstpointer a, gconstpointer b) { + // picture number roll-over + LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", + fm->next_frame_picnumber); - guint64 ta = *((guint64 *)a); - guint64 tb = *((guint64 *)b); + fm->next_frame_picnumber = 0; + goto retry; - if(ta > tb) { - return +1; - } else if(ta == tb) { - return 0; - } - return -1; + // should never run to here + LOG_E("Error in picnumber-based dequeue implementation!\n"); + return MIX_RESULT_FAIL; } -#endif MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V("Begin\n"); if (!MIX_IS_FRAMEMANAGER(fm)) { return MIX_RESULT_INVALID_PARAM; @@ -1087,66 +689,99 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { g_mutex_lock(fm->lock); - ret = MIX_RESULT_FRAME_NOTAVAIL; - *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue); - if (*mvf) { -#ifdef ANDROID - if(fm->timestamp_storage && fm->mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { - if(fm->timestamp_storage->len) { - guint64 ts; - g_array_sort(fm->timestamp_storage, timestamp_storage_sorting_func); - ts = g_array_index(fm->timestamp_storage, guint64, 0); - mix_videoframe_set_timestamp(*mvf, ts); - g_array_remove_index_fast(fm->timestamp_storage, 0); - } - } -#endif - ret = MIX_RESULT_SUCCESS; - } else if (fm->eos) { - ret = MIX_RESULT_EOS; + if (fm->frame_list == NULL) + { + if (fm->eos) + { + LOG_V("No frame is dequeued (eos)!\n"); + ret = MIX_RESULT_EOS; + } + else + { + LOG_V("No frame is dequeued as queue is empty!\n"); + ret = MIX_RESULT_FRAME_NOTAVAIL; + } + } + else if (fm->is_first_frame) + { + // dequeue the first entry in the list. Not need to update the time stamp as + // the list should contain only one frame. +#ifdef MIX_LOG_ENABLE + if (g_slist_length(fm->frame_list) != 1) + { + LOG_W("length of list is not equal to 1 for the first frame.\n"); + } +#endif + *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); + + if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) + { + mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); + fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; + LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp); + } + else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) + { + mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber)); + LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber); + fm->next_frame_picnumber++; + //if (fm->next_frame_picnumber == fm->max_picture_number) + // fm->next_frame_picnumber = 0; + } + else + { +#ifdef MIX_LOG_ENABLE + MixFrameType type; + mix_videoframe_get_frame_type(*mvf, &type); + LOG_V("The first frame is dequeud, frame type is %d.\n", type); +#endif + } + fm->is_first_frame = FALSE; + + ret = MIX_RESULT_SUCCESS; + } + else + { + // not the first frame and list is not empty + switch(fm->mode) + { + case MIX_DISPLAY_ORDER_TIMESTAMP: + ret = mix_framemanager_timestamp_based_dequeue(fm, mvf); + break; + + case MIX_DISPLAY_ORDER_PICNUMBER: + ret = mix_framemanager_picnumber_based_dequeue(fm, mvf); + break; + + case MIX_DISPLAY_ORDER_PICTYPE: + ret = mix_framemanager_pictype_based_dequeue(fm, mvf); + break; + + case MIX_DISPLAY_ORDER_FIFO: + *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); + ret = MIX_RESULT_SUCCESS; + LOG_V("One frame is dequeued.\n"); + break; + + default: + LOG_E("Invalid frame order mode\n"); + ret = MIX_RESULT_FAIL; + break; + } } g_mutex_unlock(fm->lock); - return ret; -} - -gint frame_sorting_func(gconstpointer a, gconstpointer b) { - - MixVideoFrame *fa = *((MixVideoFrame **) a); - MixVideoFrame *fb = *((MixVideoFrame **) b); - - guint64 ta, tb; - - if (!fa && !fb) { - return 0; - } - - if (fa && !fb) { - return 1; - } - - if (!fa && fb) { - return -1; - } - - mix_videoframe_get_timestamp(fa, &ta); - mix_videoframe_get_timestamp(fb, &tb); - - if (ta > tb) { - return 1; - } - - if (ta == tb) { - return 0; - } + LOG_V("End\n"); - return -1; + return ret; } MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; if (!MIX_IS_FRAMEMANAGER(fm)) { return MIX_RESULT_INVALID_PARAM; @@ -1156,52 +791,9 @@ MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { return MIX_RESULT_NOT_INIT; } - g_mutex_lock(fm->lock); - - if (fm->mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) { - - /* Do we have frames that are not in the output queue? - * MixVideoFormat* must guarantee that when this - * function called, the last frame is already enqueued! - */ - - /* In case it is frame type based enqueue, p_frame is the - * only frame that is not in the output queue - */ - if (fm->p_frame && fm->frame_queue) { - g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame); - fm->p_frame = NULL; - } - - /* In case it is timestamp based enqueue, throw all the frames - * in the array into the output queue by the order of timestamp - */ - if (fm->frame_array) { - guint len = fm->frame_array->len; - if (len) { -#ifndef ANDROID - /* sorting frames in the array by timestamp */ - g_ptr_array_sort(fm->frame_array, frame_sorting_func); -#else - /* sorting frames is the array by displayorder */ - g_ptr_array_sort(fm->frame_array, frame_sorting_func_DO); -#endif - - guint idx = 0; - MixVideoFrame *frame = NULL; - for (idx = 0; idx < len; idx++) { - frame = (MixVideoFrame *) g_ptr_array_index( - fm->frame_array, idx); - if (frame) { - g_ptr_array_index(fm->frame_array, idx) = NULL; - g_queue_push_tail(fm->frame_queue, (gpointer) frame); - } - } - } - } - } - + g_mutex_lock(fm->lock); fm->eos = TRUE; + LOG_V("EOS is received.\n"); g_mutex_unlock(fm->lock); return ret; diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h index bde47ee..fae5948 100644 --- a/mix_video/src/mixframemanager.h +++ b/mix_video/src/mixframemanager.h @@ -13,6 +13,7 @@ #include "mixvideodef.h" #include "mixvideoframe.h" +G_BEGIN_DECLS /* * Type macros. */ @@ -26,6 +27,21 @@ typedef struct _MixFrameManager MixFrameManager; typedef struct _MixFrameManagerClass MixFrameManagerClass; +/* +* MIX_FRAMEORDER_MODE_DECODEORDER is here interpreted as +* MIX_DISPLAY_ORDER_FIFO, a special case of display order mode. +*/ +typedef enum +{ + MIX_DISPLAY_ORDER_UNKNOWN, + MIX_DISPLAY_ORDER_FIFO, + MIX_DISPLAY_ORDER_TIMESTAMP, + MIX_DISPLAY_ORDER_PICNUMBER, + MIX_DISPLAY_ORDER_PICTYPE, + MIX_DISPLAY_ORDER_LAST +} MixDisplayOrderMode; + + struct _MixFrameManager { /*< public > */ GObject parent; @@ -38,30 +54,20 @@ struct _MixFrameManager { gboolean eos; GMutex *lock; - GPtrArray *frame_array; - GQueue *frame_queue; + GSList* frame_list; gint framerate_numerator; gint framerate_denominator; guint64 frame_timestamp_delta; - MixFrameOrderMode mode; + MixDisplayOrderMode mode; gboolean is_first_frame; + guint64 last_frame_timestamp; guint64 next_frame_timestamp; - - /* - * For VC-1 in ASF. - */ - - MixVideoFrame *p_frame; - guint64 prev_timestamp; - - gboolean timebased_ordering; -#ifdef ANDROID - guint32 next_displayorder; - GArray *timestamp_storage; -#endif + guint32 next_frame_picnumber; + gint max_enqueue_size; + guint32 max_picture_number; }; /** @@ -117,8 +123,8 @@ MixFrameManager *mix_framemanager_ref(MixFrameManager * mix); * Initialize FM */ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, - MixFrameOrderMode mode, gint framerate_numerator, - gint framerate_denominator, gboolean timebased_ordering); + MixDisplayOrderMode mode, gint framerate_numerator, + gint framerate_denominator); /* * Deinitialize FM */ @@ -138,10 +144,22 @@ MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, /* - * Get Frame Order Mode + * Set miximum size of queue */ -MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm, - MixFrameOrderMode *mode); +MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size); + + +/* + * Set miximum picture number + */ +MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 num); + + +/* + * Get Display Order Mode + */ +MIX_RESULT mix_framemanager_get_display_order_mode(MixFrameManager *fm, + MixDisplayOrderMode *mode); /* * For discontiunity, reset FM @@ -154,7 +172,7 @@ MIX_RESULT mix_framemanager_flush(MixFrameManager *fm); MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf); /* - * Dequeue MixVideoFrame in proper order depends on MixFrameOrderMode value + * Dequeue MixVideoFrame in proper order depends on MixDisplayOrderMode value * during initialization. */ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf); @@ -164,5 +182,5 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf); */ MIX_RESULT mix_framemanager_eos(MixFrameManager *fm); - +G_END_DECLS #endif /* __MIX_FRAMEMANAGER_H__ */ diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c index 0c778af..9f92ae1 100644 --- a/mix_video/src/mixsurfacepool.c +++ b/mix_video/src/mixsurfacepool.c @@ -42,6 +42,7 @@ static void mix_surfacepool_init(MixSurfacePool * self) { self->free_list_max_size = 0; self->free_list_cur_size = 0; self->high_water_mark = 0; + self->initialized = FALSE; self->reserved1 = NULL; self->reserved2 = NULL; @@ -256,6 +257,9 @@ MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, obj->high_water_mark = 0; + /* assume it is initialized */ + obj->initialized = TRUE; + MIX_UNLOCK(obj->objectlock); return MIX_RESULT_SUCCESS; @@ -302,6 +306,8 @@ MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, obj->high_water_mark = 0; + obj->initialized = TRUE; + MIX_UNLOCK(obj->objectlock); LOG_V( "End\n"); @@ -536,6 +542,14 @@ MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) { MIX_LOCK(obj->objectlock); + if (obj->initialized == FALSE) + { + LOG_W("surface pool is not initialized, probably configuration data has not been received yet.\n"); + MIX_UNLOCK(obj->objectlock); + return MIX_RESULT_NOT_INIT; + } + + #if 0 if (obj->free_list == NULL) { #else diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h index d475792..0639fbc 100644 --- a/mix_video/src/mixsurfacepool.h +++ b/mix_video/src/mixsurfacepool.h @@ -79,6 +79,7 @@ struct _MixSurfacePool gulong free_list_max_size; /* initial size of the free list */ gulong free_list_cur_size; /* current size of the free list */ gulong high_water_mark; /* most surfaces in use at one time */ + gboolean initialized; // guint64 timestamp; void *reserved1; diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c index 063f641..718d355 100644 --- a/mix_video/src/mixvideo.c +++ b/mix_video/src/mixvideo.c @@ -177,6 +177,8 @@ MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf); MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size); +MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); static void mix_video_finalize(GObject * obj); MIX_RESULT mix_video_configure_decode(MixVideo * mix, @@ -228,6 +230,7 @@ static void mix_video_class_init(MixVideoClass * klass) { klass->get_mix_buffer_func = mix_video_get_mixbuffer_default; klass->release_mix_buffer_func = mix_video_release_mixbuffer_default; klass->get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default; + klass->set_dynamic_enc_config_func = mix_video_set_dynamic_enc_config_default; } MixVideo *mix_video_new(void) { @@ -529,6 +532,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, guint bufpoolsize = 0; MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + MixDisplayOrderMode display_order_mode = MIX_DISPLAY_ORDER_UNKNOWN; LOG_V( "Begin\n"); @@ -626,18 +630,28 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, goto cleanup; } - /* initialize frame manager */ - - if (mix_strcmp(mime_type, "video/x-wmv") == 0 || mix_strcmp(mime_type, - "video/mpeg") == 0 || mix_strcmp(mime_type, "video/x-divx") == 0 - || mix_strcmp(mime_type, "video/x-h263") == 0) { - ret = mix_framemanager_initialize(priv->frame_manager, - frame_order_mode, fps_n, fps_d, FALSE); - } else { - ret = mix_framemanager_initialize(priv->frame_manager, - frame_order_mode, fps_n, fps_d, TRUE); + if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER) + { + display_order_mode = MIX_DISPLAY_ORDER_FIFO; + } + else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || + mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 || + mix_strcmp(mime_type, "video/x-xvid") == 0 ) + { + display_order_mode = MIX_DISPLAY_ORDER_PICTYPE; + } + else + { + //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP; + display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER; } + /* initialize frame manager */ + ret = mix_framemanager_initialize(priv->frame_manager, + display_order_mode, fps_n, fps_d); + if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to initialize frame manager\n"); goto cleanup; @@ -688,13 +702,16 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, priv->video_format = MIX_VIDEOFORMAT(video_format); - } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || mix_strcmp(mime_type, - "video/x-divx") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 ) { + } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 || + mix_strcmp(mime_type, "video/x-xvid") == 0) { guint version = 0; /* Is this mpeg4:2 ? */ - if (mix_strcmp(mime_type, "video/mpeg") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 ) { + if (mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 ) { /* * we don't support mpeg other than mpeg verion 4 @@ -722,6 +739,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, /* config_param shall be MixVideoConfigParamsDecMP42 */ if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { + LOG_E("MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 failed.\n"); ret = MIX_RESULT_NOT_SUPPORTED; goto cleanup; } @@ -736,6 +754,7 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, /* if it is not divx 4 or 5 */ if (version != 4 && version != 5) { + LOG_E("Invalid divx version.\n"); ret = MIX_RESULT_NOT_SUPPORTED; goto cleanup; } @@ -811,9 +830,6 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; guint bufpoolsize = 0; - MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DECODEORDER; - - LOG_V( "Begin\n"); CHECK_INIT(mix, priv); @@ -890,8 +906,8 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, /* initialize frame manager */ /* frame rate can be any value for encoding. */ - ret = mix_framemanager_initialize(priv->frame_manager, frame_order_mode, - 1, 1, FALSE); + ret = mix_framemanager_initialize(priv->frame_manager, MIX_DISPLAY_ORDER_FIFO, + 1, 1); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to initialize frame manager\n"); @@ -1052,7 +1068,7 @@ MIX_RESULT mix_video_configure_default(MixVideo * mix, MIX_RESULT mix_video_get_config_default(MixVideo * mix, MixVideoConfigParams ** config_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; MixVideoPrivate *priv = NULL; CHECK_INIT_CONFIG(mix, priv); @@ -1564,6 +1580,293 @@ MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *m return ret; } + +MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (dynamic_params == NULL) { + LOG_E( + "dynamic_params == NULL\n"); + return MIX_RESULT_FAIL; + } + + MixVideoConfigParamsEnc *priv_config_params_enc = NULL; + if (priv->config_params) { + /* + * FIXME: It would be better to use ref/unref + */ + priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; + //priv_config_params_enc = mix_videoconfigparamsenc_ref (priv->config_params); + } + else { + LOG_E( + "priv->config_params is invalid\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(priv->objlock); + + switch (params_type) { + case MIX_ENC_PARAMS_BITRATE: + { + ret = mix_videoconfigparamsenc_set_bit_rate (priv_config_params_enc, dynamic_params->bitrate); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n"); + goto cleanup; + } + } + break; + case MIX_ENC_PARAMS_SLICE_SIZE: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_IDR_INTERVAL: + { + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_RC_MODE: + case MIX_ENC_PARAMS_RESOLUTION: + { + /* + * Step 1: Release videofmtenc Object + */ + if (priv->video_format_enc) { + mix_videofmtenc_deinitialize(priv->video_format_enc); + } + + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) + + //priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0 + + /* + * Please note there maybe issue here for usrptr shared buffer mode + */ + + /* + * Step 2: Change configuration parameters (frame size) + */ + + if (params_type == MIX_ENC_PARAMS_RESOLUTION) { + ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n"); + goto cleanup; + } + } + else if (params_type == MIX_ENC_PARAMS_RC_MODE) { + ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n"); + goto cleanup; + } + } + + + /* + * Step 3: Renew mixvideofmtenc object + */ + + MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; + + ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, + &encode_format); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get target format\n"); + goto cleanup; + } + + if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 + && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { + + MixVideoFormatEnc_H264 *video_format_enc = + mix_videoformatenc_h264_new(); + + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); + goto cleanup; + } + + /* work specific to h264 encode */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { + + MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); + goto cleanup; + } + + /* work specific to mpeg4 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 + && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { + + MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); + goto cleanup; + } + + /* work specific to h.263 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { + + MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); + goto cleanup; + } + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else { + + /*unsupported format */ + ret = MIX_RESULT_NOT_SUPPORTED; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + + /* + * Step 4: Re-initialize and start a new encode session, of course with new resolution value + */ + + /* + * Initialize MixVideoEncFormat + */ + + /* + * If we are using usrptr shared buffer mode, alloc_surfaces/usrptr/alloc_surface_cnt + * will be re-requested by v4l2camsrc, how to differetiate old surface pools and new one + * is a problem. + */ + + /* + * priv->alloc_surface_cnt already been reset to 0 after calling mix_videofmtenc_initialize + * For dynamic frame size change, upstream element need to re-call buffer allocation method + * and priv->alloc_surface_cnt will get a new value. + */ + //priv->alloc_surface_cnt = 5; + ret = mix_videofmtenc_initialize(priv->video_format_enc, + priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, + priv->va_display/*, priv->alloc_surfaces, priv->usrptr, priv->alloc_surface_cnt*/); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + + } + break; + case MIX_ENC_PARAMS_GOP_SIZE: + { + ret = mix_videoconfigparamsenc_set_intra_period (priv_config_params_enc, dynamic_params->intra_period); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n"); + goto cleanup; + } + + } + break; + case MIX_ENC_PARAMS_FRAME_RATE: + { + ret = mix_videoconfigparamsenc_set_frame_rate (priv_config_params_enc, dynamic_params->frame_rate_num, dynamic_params->frame_rate_denom); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n"); + goto cleanup; + } + } + break; + case MIX_ENC_PARAMS_FORCE_KEY_FRAME: + { + /* + * nothing to be done now. + */ + } + break; + case MIX_ENC_PARAMS_QP: + { + ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->QP); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); + goto cleanup; + } + } + break; + case MIX_ENC_PARAMS_CIR_FRAME_CNT: + { + ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n"); + goto cleanup; + } + + } + break; + + default: + break; + } + + ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type); + +cleanup: + + g_mutex_unlock(priv->objlock); + + LOG_V( "End ret = 0x%x\n", ret); + + return ret; +} /* * API functions */ @@ -1770,3 +2073,14 @@ MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize) { } return MIX_RESULT_NOTIMPL; } + +MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) +{ + MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix); + if (klass->set_dynamic_enc_config_func) { + return klass->set_dynamic_enc_config_func(mix, params_type, dynamic_params); + } + return MIX_RESULT_NOTIMPL; + +} diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h index 1184d05..f8e4828 100644 --- a/mix_video/src/mixvideo.h +++ b/mix_video/src/mixvideo.h @@ -21,6 +21,8 @@ #include "mixvideocaps.h" #include "mixbuffer.h" +G_BEGIN_DECLS + /* * Type macros. */ @@ -83,6 +85,9 @@ typedef MIX_RESULT (*MixVideoReleaseMixBufferFunc)(MixVideo * mix, typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix, guint *max_size); + +typedef MIX_RESULT (*MixVideoSetDynamicEncConfigFunc) (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); /** * MixVideo: * @parent: Parent object. @@ -123,6 +128,7 @@ struct _MixVideoClass { MixVideoGetMixBufferFunc get_mix_buffer_func; MixVideoReleaseMixBufferFunc release_mix_buffer_func; MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func; + MixVideoSetDynamicEncConfigFunc set_dynamic_enc_config_func; }; /** @@ -542,4 +548,24 @@ MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); */ MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize); + +/** + * mix_video_set_dynamic_enc_config: + * @mix: #MixVideo object. + * @params_type: Dynamic encoder configuration type + * @dynamic_params: Point to dynamic control data structure which includes the new value to be changed to + * @returns: Common Video Error Return Codes + * + * + * This function can be used to change the encoder parameters at run-time + * + * + * Usually this function is after the encoding session is started. + * + */ +MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); + +G_END_DECLS + #endif /* __MIX_VIDEO_H__ */ diff --git a/mix_video/src/mixvideo_private.h b/mix_video/src/mixvideo_private.h index 806d249..ffa403f 100644 --- a/mix_video/src/mixvideo_private.h +++ b/mix_video/src/mixvideo_private.h @@ -9,6 +9,7 @@ #ifndef __MIX_VIDEO_PRIVATE_H__ #define __MIX_VIDEO_PRIVATE_H__ +G_BEGIN_DECLS typedef struct _MixVideoPrivate MixVideoPrivate; @@ -53,5 +54,6 @@ struct _MixVideoPrivate { void mix_video_private_initialize(MixVideoPrivate* priv); void mix_video_private_cleanup(MixVideoPrivate* priv); +G_END_DECLS #endif /* __MIX_VIDEO_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideocaps.h b/mix_video/src/mixvideocaps.h index 6630c19..ff50647 100644 --- a/mix_video/src/mixvideocaps.h +++ b/mix_video/src/mixvideocaps.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCAPS: * @@ -134,4 +136,6 @@ MIX_RESULT mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, MIX_RESULT mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, gchar ** video_hw_caps); +G_END_DECLS + #endif /* __MIX_VIDEOCAPS_H__ */ diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h index acfa595..d3939af 100644 --- a/mix_video/src/mixvideoconfigparams.h +++ b/mix_video/src/mixvideoconfigparams.h @@ -12,6 +12,8 @@ #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMS: * @@ -124,4 +126,6 @@ MixVideoConfigParams *mix_videoconfigparams_ref(MixVideoConfigParams * mix); /* TODO: Add getters and setters for other properties */ +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMS_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c index 5491f00..7ad334f 100644 --- a/mix_video/src/mixvideoconfigparamsdec.c +++ b/mix_video/src/mixvideoconfigparamsdec.c @@ -156,6 +156,7 @@ gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src /* copy properties of primitive type */ + this_target->frame_order_mode = this_src->frame_order_mode; this_target->frame_rate_num = this_src->frame_rate_num; this_target->frame_rate_denom = this_src->frame_rate_denom; this_target->picture_width = this_src->picture_width; @@ -332,6 +333,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) { MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); obj->frame_order_mode = frame_order_mode; + LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); return MIX_RESULT_SUCCESS; } @@ -339,6 +341,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) { MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode); *frame_order_mode = obj->frame_order_mode; + LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h index fba4b78..809eb1e 100644 --- a/mix_video/src/mixvideoconfigparamsdec.h +++ b/mix_video/src/mixvideoconfigparamsdec.h @@ -12,6 +12,8 @@ #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSDEC: * @@ -375,4 +377,6 @@ MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigP /* TODO: Add getters and setters for other properties */ +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSDEC_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h index f1d6e16..8a99313 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.h +++ b/mix_video/src/mixvideoconfigparamsdec_h264.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsdec.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264: * @@ -135,4 +137,6 @@ MixVideoConfigParamsDecH264 /* TODO: Add getters and setters for other properties */ +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h index 3ac3b8c..6e3d84c 100644 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.h +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.h @@ -12,6 +12,8 @@ #include "mixvideoconfigparamsdec.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42: * @@ -182,4 +184,6 @@ MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( MixVideoConfigParamsDecMP42 *obj, guint *version); +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h index 9d0744b..d38f975 100644 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.h +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsdec.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1: * @@ -142,4 +144,6 @@ MixVideoConfigParamsDecVC1 /* TODO: Add getters and setters for other properties */ +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSDECDEC_VC1_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c index 28cd288..040b612 100644 --- a/mix_video/src/mixvideoconfigparamsenc.c +++ b/mix_video/src/mixvideoconfigparamsenc.c @@ -60,6 +60,9 @@ static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) { self->rate_control = MIX_RATE_CONTROL_NONE; self->raw_format = MIX_RAW_TARGET_FORMAT_YUV420; self->profile = MIX_PROFILE_H264BASELINE; + self->level = 30; + + self->CIR_frame_cnt = 15; /* TODO: initialize other properties */ self->reserved1 = NULL; @@ -180,6 +183,8 @@ gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src this_target->rate_control = this_src->rate_control; this_target->raw_format = this_src->raw_format; this_target->profile = this_src->profile; + this_target->level = this_src->level; + this_target->CIR_frame_cnt = this_src->CIR_frame_cnt; /* copy properties of non-primitive */ @@ -317,7 +322,13 @@ gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { if (this_first->profile != this_second->profile) { goto not_equal; } + if (this_first->level != this_second->level) { + goto not_equal; + } + if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) { + goto not_equal; + } /* check the equalitiy of the none-primitive type properties */ /* compare mime_type */ @@ -687,3 +698,32 @@ MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, + guint8 level) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->level = level; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, + guint8 * level) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, level); + *level = obj->level; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, + guint CIR_frame_cnt) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->CIR_frame_cnt = CIR_frame_cnt; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, + guint * CIR_frame_cnt) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, CIR_frame_cnt); + *CIR_frame_cnt = obj->CIR_frame_cnt; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h index be7ec9f..adb5f25 100644 --- a/mix_video/src/mixvideoconfigparamsenc.h +++ b/mix_video/src/mixvideoconfigparamsenc.h @@ -12,6 +12,8 @@ #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSENC: * @@ -75,6 +77,8 @@ struct _MixVideoConfigParamsEnc { /* Encoding profile */ MixProfile profile; + guint8 level; + /* Raw format to be encoded */ MixRawTargetFormat raw_format; @@ -123,6 +127,8 @@ struct _MixVideoConfigParamsEnc { /* Size of the array ci_frame_id */ guint ci_frame_num; + guint CIR_frame_cnt; + /* < private > */ gulong draw; @@ -565,7 +571,58 @@ MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, MixProfile * profile); + +/** + * mix_videoconfigparamsenc_set_level: + * @obj: #MixVideoConfigParamsEnc object + * @level: Encoding level + * @returns: Common Video Error Return Codes + * + * Set Encoding level + */ +MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, + guint8 level); + + +/** + * mix_videoconfigparamsenc_get_level: + * @obj: #MixVideoConfigParamsEnc object + * @level: Encoding level to be returned + * @returns: Common Video Error Return Codes + * + * Get Encoding level + */ + +MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, + guint8 * level); + + +/** + * mix_videoconfigparamsenc_set_CIR_frame_cnt: + * @obj: #MixVideoConfigParamsEnc object + * @CIR_frame_cnt: Encoding CIR frame count + * @returns: Common Video Error Return Codes + * + * Set Encoding CIR frame count + */ +MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, + guint CIR_frame_cnt); + +/** + * mix_videoconfigparamsenc_set_CIR_frame_cnt: + * @obj: #MixVideoConfigParamsEnc object + * @CIR_frame_cnt: Encoding CIR frame count to be returned + * @returns: Common Video Error Return Codes + * + * Get Encoding CIR frame count + */ + +MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, + guint * CIR_frame_cnt); + /* TODO: Add getters and setters for other properties */ +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSENC_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.h b/mix_video/src/mixvideoconfigparamsenc_h263.h index 8355207..097041c 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h263.h +++ b/mix_video/src/mixvideoconfigparamsenc_h263.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSENC_H263: * @@ -185,4 +187,6 @@ MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, guint * slice_num); +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSENC_H263_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c index 69b6b0c..620093d 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.c +++ b/mix_video/src/mixvideoconfigparamsenc_h264.c @@ -56,6 +56,7 @@ mix_videoconfigparamsenc_h264_init (MixVideoConfigParamsEncH264 * self) self->disable_deblocking_filter_idc = 0; self->delimiter_type = MIX_DELIMITER_LENGTHPREFIX; + self->idr_interval = 2; self->reserved1 = NULL; self->reserved2 = NULL; @@ -170,6 +171,8 @@ mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) this_target->slice_num = this_src->slice_num; this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; this_target->delimiter_type = this_src->delimiter_type; + this_target->idr_interval = this_src->idr_interval; + // Now chainup base class @@ -225,7 +228,11 @@ mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second) if (this_first->delimiter_type != this_second->delimiter_type) { goto not_equal; } - + + if (this_first->idr_interval != this_second->idr_interval) { + goto not_equal; + } + ret = TRUE; @@ -321,3 +328,17 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParam *delimiter_type = obj->delimiter_type; return MIX_RESULT_SUCCESS; } + +MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj, + guint idr_interval) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->idr_interval = idr_interval; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj, + guint * idr_interval) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, idr_interval); + *idr_interval = obj->idr_interval; + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h index b1334aa..1885846 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.h +++ b/mix_video/src/mixvideoconfigparamsenc_h264.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSENC_H264: * @@ -83,6 +85,8 @@ struct _MixVideoConfigParamsEncH264 /* delimiter_type */ MixDelimiterType delimiter_type; + + guint idr_interval; /* Reserved for future use */ void *reserved1; @@ -236,5 +240,32 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParam MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, MixDelimiterType * delimiter_type); + +/** + * mix_videoconfigparamsenc_h264_set_IDR_interval: + * @obj: #MixVideoConfigParamsEncH264 object + * @idr_interval: IDR interval + * @returns: Common Video Error Return Codes + * + * Set IDR interval + */ +MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj, + guint idr_interval); + + +/** + * mix_videoconfigparamsenc_h264_get_IDR_interval: + * @obj: #MixVideoConfigParamsEncH264 object + * @idr_interval: IDR interval to be returned + * @returns: Common Video Error Return Codes + * + * Get IDR interval + */ +MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj, + guint * idr_interval); + + +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSENC_H264_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h index 123a6ae..7ff32bc 100644 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4: * @@ -220,4 +222,6 @@ MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEnc MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, guint * fixed_vop_time_increment); +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.h b/mix_video/src/mixvideoconfigparamsenc_preview.h index 8e57952..ddfe075 100644 --- a/mix_video/src/mixvideoconfigparamsenc_preview.h +++ b/mix_video/src/mixvideoconfigparamsenc_preview.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW: * @@ -116,5 +118,7 @@ MixVideoConfigParamsEncPreview */ #define mix_videoconfigparamsenc_preview_unref(obj) mix_params_unref(MIX_PARAMS(obj)) +G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ */ diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h index 00cfa98..dfd614b 100644 --- a/mix_video/src/mixvideodecodeparams.h +++ b/mix_video/src/mixvideodecodeparams.h @@ -12,6 +12,8 @@ #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEODECODEPARAMS: * @@ -185,4 +187,6 @@ MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, gboolean *discontinuity); +G_END_DECLS + #endif /* __MIX_VIDEODECODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h index d3adcd8..9f8651a 100644 --- a/mix_video/src/mixvideodef.h +++ b/mix_video/src/mixvideodef.h @@ -29,8 +29,11 @@ #ifndef __MIX_VIDEO_DEF_H__ #define __MIX_VIDEO_DEF_H__ + #include +G_BEGIN_DECLS + /* * MI-X video error code */ @@ -132,4 +135,39 @@ typedef enum } MixDelimiterType; +typedef enum { + MIX_ENC_PARAMS_START_UNUSED = 0x01000000, + MIX_ENC_PARAMS_BITRATE, + MIX_ENC_PARAMS_SLICE_SIZE, + MIX_ENC_PARAMS_RESOLUTION, + MIX_ENC_PARAMS_GOP_SIZE, + MIX_ENC_PARAMS_FRAME_RATE, + MIX_ENC_PARAMS_FORCE_KEY_FRAME, + MIX_ENC_PARAMS_IDR_INTERVAL, + MIX_ENC_PARAMS_RC_MODE, + MIX_ENC_PARAMS_MAX_ENCODED_SLICE_SIZE, + MIX_ENC_PARAMS_QP, + MIX_ENC_PARAMS_CIR_FRAME_CNT, + MIX_ENC_PARAMS_LAST +} MixEncParamsType; + +typedef struct _MixEncDynamicParams { + guint bitrate; + guint slice_num; + guint width; + guint height; + guint frame_rate_num; + guint frame_rate_denom; + guint intra_period; + guint idr_interval; + guint QP; + guint CIR_frame_cnt; + guint max_slice_size; + gboolean force_idr; + MixRateControl rc_mode; + +} MixEncDynamicParams; + +G_END_DECLS + #endif /* __MIX_VIDEO_DEF_H__ */ diff --git a/mix_video/src/mixvideoencodeparams.h b/mix_video/src/mixvideoencodeparams.h index 54804dd..ac8e6c2 100644 --- a/mix_video/src/mixvideoencodeparams.h +++ b/mix_video/src/mixvideoencodeparams.h @@ -12,6 +12,8 @@ #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOENCODEPARAMS: * @@ -147,5 +149,7 @@ MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj, MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj, gboolean *discontinuity); +G_END_DECLS + #endif /* __MIX_VIDEOENCODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c index f446651..c78423f 100644 --- a/mix_video/src/mixvideoformat.c +++ b/mix_video/src/mixvideoformat.c @@ -41,23 +41,27 @@ static void mix_videoformat_init(MixVideoFormat * self) { /* These are all public because MixVideoFormat objects are completely internal to MixVideo, no need for private members */ - self->initialized = FALSE; - self->framemgr = NULL; - self->surfacepool = NULL; - self->inputbufpool = NULL; - self->inputbufqueue = NULL; - self->va_display = NULL; - self->va_context = VA_INVALID_ID; - self->va_config = VA_INVALID_ID; - self->va_surfaces = NULL; - self->va_num_surfaces = 0; - self->mime_type = NULL; - self->frame_rate_num = 0; - self->frame_rate_denom = 0; - self->picture_width = 0; - self->picture_height = 0; - self->parse_in_progress = FALSE; - self->current_timestamp = 0; + self->initialized = FALSE; + self->va_initialized = FALSE; + self->framemgr = NULL; + self->surfacepool = NULL; + self->inputbufpool = NULL; + self->inputbufqueue = NULL; + self->va_display = NULL; + self->va_context = VA_INVALID_ID; + self->va_config = VA_INVALID_ID; + self->va_surfaces = NULL; + self->va_num_surfaces = 0; + self->mime_type = NULL; + self->frame_rate_num = 0; + self->frame_rate_denom = 0; + self->picture_width = 0; + self->picture_height = 0; + self->parse_in_progress = FALSE; + self->current_timestamp = (guint64)-1; + self->end_picture_pending = FALSE; + self->video_frame = NULL; + self->extra_surfaces = 0; } static void mix_videoformat_class_init(MixVideoFormatClass * klass) { @@ -143,6 +147,11 @@ void mix_videoformat_finalize(GObject * obj) { } } + if (mix->video_frame) + { + mix_videoframe_unref(mix->video_frame); + mix->video_frame = NULL; + } //Deinit input buffer queue diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h index 2553667..2499934 100644 --- a/mix_video/src/mixvideoformat.h +++ b/mix_video/src/mixvideoformat.h @@ -23,6 +23,8 @@ #include "mixbufferpool.h" #include "mixvideoformatqueue.h" +G_BEGIN_DECLS + // Redefine the Handle defined in vbp_loader.h #define VBPhandle Handle @@ -56,32 +58,36 @@ typedef MIX_RESULT (*MixVideoFmtEndOfStreamFunc)(MixVideoFormat *mix); typedef MIX_RESULT (*MixVideoFmtDeinitializeFunc)(MixVideoFormat *mix); struct _MixVideoFormat { - /*< public > */ - GObject parent; - - /*< public > */ - - /*< private > */ - GMutex *objectlock; - gboolean initialized; - MixFrameManager *framemgr; - MixSurfacePool *surfacepool; - VADisplay va_display; - VAContextID va_context; - VAConfigID va_config; - VASurfaceID *va_surfaces; - guint va_num_surfaces; - VBPhandle parser_handle; - GString *mime_type; - guint frame_rate_num; - guint frame_rate_denom; - guint picture_width; - guint picture_height; - gboolean parse_in_progress; - gboolean discontinuity_frame_in_progress; - guint64 current_timestamp; - MixBufferPool *inputbufpool; - GQueue *inputbufqueue; + /*< public > */ + GObject parent; + + /*< public > */ + + /*< private > */ + GMutex *objectlock; + gboolean initialized; + MixFrameManager *framemgr; + MixSurfacePool *surfacepool; + VADisplay va_display; + VAContextID va_context; + VAConfigID va_config; + VASurfaceID *va_surfaces; + guint va_num_surfaces; + VBPhandle parser_handle; + GString *mime_type; + guint frame_rate_num; + guint frame_rate_denom; + guint picture_width; + guint picture_height; + gboolean parse_in_progress; + gboolean discontinuity_frame_in_progress; + guint64 current_timestamp; + MixBufferPool *inputbufpool; + GQueue *inputbufqueue; + gboolean va_initialized; + gboolean end_picture_pending; + MixVideoFrame* video_frame; + guint extra_surfaces; }; /** @@ -157,4 +163,6 @@ MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix); MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix); +G_END_DECLS + #endif /* __MIX_VIDEOFORMAT_H__ */ diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c index 18015fc..6464177 100644 --- a/mix_video/src/mixvideoformat_h264.c +++ b/mix_video/src/mixvideoformat_h264.c @@ -6,6 +6,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ #include +#include #ifndef ANDROID #include #endif @@ -17,14 +18,7 @@ static int mix_video_h264_counter = 0; #endif /* MIX_LOG_ENABLE */ -#ifdef ANDROID -typedef struct _NalBuffer { - unsigned char *buffer; - unsigned int offset; - unsigned int length; - void *appdata; -} NalBuffer; -#endif +#define DECODER_ROBUSTNESS /* The parent class. The pointer will be saved * in this class's initialization. The pointer @@ -115,10 +109,9 @@ void mix_videoformat_h264_finalize(GObject * obj) { g_mutex_lock(parent->objectlock); parent->initialized = TRUE; parent->parse_in_progress = FALSE; - parent->current_timestamp = 0; //Close the parser - pret = vbp_close(parent->parser_handle); + pret = vbp_close(parent->parser_handle); parent->parser_handle = NULL; if (pret != VBP_OK) { @@ -141,7 +134,7 @@ mix_videoformat_h264_ref(MixVideoFormat_H264 * mix) { /* H.264 vmethods implementation */ MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg) { -MIX_RESULT ret = MIX_RESULT_SUCCESS; + MIX_RESULT ret = MIX_RESULT_SUCCESS; if (mix == NULL || msg == NULL) { @@ -163,665 +156,1119 @@ MIX_RESULT ret = MIX_RESULT_SUCCESS; return ret; } -MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { +MIX_RESULT mix_videofmt_h264_initialize_va( + MixVideoFormat *mix, + vbp_data_h264 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VAConfigAttrib attrib; + + MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + if (parent->va_initialized) + { + LOG_W("va already initialized.\n"); + return MIX_RESULT_SUCCESS; + } + + + LOG_V( "Begin\n"); + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + //Initialize and save the VA config ID + //We use high profile for all kinds of H.264 profiles (baseline, main and high) + vret = vaCreateConfig( + parent->va_display, + VAProfileH264High, + VAEntrypointVLD, + &attrib, + 1, + &(parent->va_config)); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E("vaCreateConfig failed\n"); + goto cleanup; + } + + LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); + + + // handle both frame and field coding for interlaced content + int num_ref_pictures = data->codec_data->num_ref_frames; + + + //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that + // will not start decoding until a new frame is received. + parent->va_num_surfaces = 1 + 1 + parent->extra_surfaces + (((num_ref_pictures + 3) < + MIX_VIDEO_H264_SURFACE_NUM) ? + (num_ref_pictures + 3) + : MIX_VIDEO_H264_SURFACE_NUM); + + parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces); + if (parent->va_surfaces == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E( "parent->va_surfaces == NULL. \n"); + goto cleanup; + } + + LOG_V( "Codec data says picture size is %d x %d\n", + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); + LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height); + + vret = vaCreateSurfaces( + parent->va_display, + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, + VA_RT_FORMAT_YUV420, + parent->va_num_surfaces, + parent->va_surfaces); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto cleanup; + } + + LOG_V( "Created %d libva surfaces\n", parent->va_num_surfaces); + + //Initialize the surface pool + ret = mix_surfacepool_initialize( + parent->surfacepool, + parent->va_surfaces, + parent->va_num_surfaces, + parent->va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init surface pool\n"); + goto cleanup; + break; + } + + if (data->codec_data->pic_order_cnt_type == 0) + { + int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); + mix_framemanager_set_max_picture_number(parent->framemgr, max); + } + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext( + parent->va_display, + parent->va_config, + parent->picture_width, + parent->picture_height, + 0, // no flag set + parent->va_surfaces, + parent->va_num_surfaces, + &(parent->va_context)); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto cleanup; + } + + parent->va_initialized = TRUE; - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_H264; - vbp_data_h264 *data = NULL; - MixVideoFormat *parent = NULL; - MixIOVec *header = NULL; - gint numprofs = 0, numactualprofs = 0; - gint numentrypts = 0, numactualentrypts = 0; - VADisplay vadisplay = NULL; - VAProfile *profiles = NULL; - VAEntrypoint *entrypts = NULL; - VAConfigAttrib attrib; - VAStatus vret = VA_STATUS_SUCCESS; - guint extra_surfaces = 0; - VASurfaceID *surfaces = NULL; - guint numSurfaces = 0; - - //TODO Partition this method into smaller methods - - if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } +cleanup: + /* nothing to clean up */ - LOG_V( "Begin\n"); + return ret; - /* Chainup parent method. */ +} - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error initializing\n"); - return ret; - } +MIX_RESULT mix_videofmt_h264_update_ref_pic_list( + MixVideoFormat *mix, + VAPictureParameterBufferH264* picture_params, + VASliceParameterBufferH264* slice_params) +{ + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + //Do slice parameters + + //First patch up the List0 and List1 surface IDs + int j = 0; + guint poc = 0; + gpointer video_frame = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++) + { + if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) + { + poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList0[j])); + video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + if (video_frame == NULL) + { + LOG_E("unable to find surface of picture %d (current picture %d).", + poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); + ret = MIX_RESULT_DROPFRAME; //return non-fatal error + goto cleanup; + } + else + { + slice_params->RefPicList0[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + + } + + if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6)) + { + for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++) + { + if (!(slice_params->RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) + { + poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList1[j])); + video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + if (video_frame == NULL) + { + LOG_E("unable to find surface of picture %d (current picture %d).", + poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); + ret = MIX_RESULT_DROPFRAME; //return non-fatal error + goto cleanup; + } + else + { + slice_params->RefPicList1[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + } + } - if (!MIX_IS_VIDEOFORMAT_H264(mix)) - return MIX_RESULT_INVALID_PARAM; +cleanup: + // do nothing - parent = MIX_VIDEOFORMAT(mix); - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + return ret; +} - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - g_mutex_lock(parent->objectlock); - LOG_V( "Before vbp_open\n"); - //Load the bitstream parser - pret = vbp_open(ptype, &(parent->parser_handle)); +MIX_RESULT mix_videofmt_h264_decode_a_slice( + MixVideoFormat *mix, + vbp_data_h264 *data, + int picture_index, + int slice_index) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + guint buffer_id_cnt = 0; + + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID buffer_ids[4]; - LOG_V( "After vbp_open\n"); - if (!(pret == VBP_OK)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto cleanup; - } - LOG_V( "Opened parser\n"); - ret = mix_videoconfigparamsdec_get_header(config_params, - &header); + LOG_V( "Begin\n"); - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + vbp_picture_data_h264* pic_data = &(data->pic_data[picture_index]); + vbp_slice_data_h264* slice_data = &(pic_data->slc_data[slice_index]); + VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; + VASliceParameterBufferH264* slice_params = &(slice_data->slc_parms); + vadisplay = mix->va_display; + vacontext = mix->va_context; + +#ifdef DECODER_ROBUSTNESS + if ((slice_params->first_mb_in_slice == 0) || (!mix->end_picture_pending)) +#else + if (slice_params->first_mb_in_slice == 0) +#endif + { + // this is the first slice of the picture + if (mix->end_picture_pending) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get header data\n"); - goto cleanup; + // interlace content, decoding the first field + vret = vaEndPicture(vadisplay, vacontext); + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E("vaEndPicture failed.\n"); + goto cleanup; + } + + // for interlace content, top field may be valid only after the second field is parsed + mix_videoframe_set_displayorder(mix->video_frame, pic_params->CurrPic.TopFieldOrderCnt); } - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &extra_surfaces); + gulong surface = 0; + LOG_V("mix->video_frame = 0x%x\n", mix->video_frame); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(mix->video_frame, &surface); if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto cleanup; + LOG_E( "Error getting surface ID from frame object\n"); + goto cleanup; } - LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); +#ifdef DECODER_ROBUSTNESS + LOG_V( "Updating DPB for libva\n"); - pret = vbp_parse(parent->parser_handle, header->data, - header->data_size, TRUE); + //Now handle the reference frames and surface IDs for DPB and current frame + mix_videofmt_h264_handle_ref_frames(mix, pic_params, mix->video_frame); - if (!((pret == VBP_OK) || (pret == VBP_DONE))) +#ifdef HACK_DPB + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + ret = mix_videofmt_h264_hack_dpb(mix, pic_data); + if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data\n"); + LOG_E( "Error reference frame not found\n"); + //Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it + mix_videofmt_h264_cleanup_ref_frame(mix, pic_params, mix->frame); goto cleanup; } +#endif - LOG_V( "Parsed header\n"); + LOG_V( "Calling vaBeginPicture\n"); - //Get the header data and save - pret = vbp_query(parent->parser_handle, (void *)&data); + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto cleanup; + } - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto cleanup; - } + // vaBeginPicture needs a matching vaEndPicture + mix->end_picture_pending = TRUE; - LOG_V( "Queried parser for header data\n"); +#else + LOG_V( "Calling vaBeginPicture\n"); - //Time for libva initialization + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto cleanup; + } - vadisplay = parent->va_display; + // vaBeginPicture needs a matching vaEndPicture + mix->end_picture_pending = TRUE; - numprofs = vaMaxNumProfiles(vadisplay); - profiles = g_malloc(numprofs*sizeof(VAProfile)); + LOG_V( "Updating DPB for libva\n"); - if (!profiles) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto cleanup; - } + //Now handle the reference frames and surface IDs for DPB and current frame + mix_videofmt_h264_handle_ref_frames(mix, pic_params, mix->video_frame); - vret = vaQueryConfigProfiles(vadisplay, profiles, - &numactualprofs); - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); +#ifdef HACK_DPB + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + ret = mix_videofmt_h264_hack_dpb(mix, pic_data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error reference frame not found\n"); goto cleanup; - } + } +#endif - //check the desired profile support - gint vaprof = 0; +#endif - //TODO Need to cover more cases - switch (data->codec_data->profile_idc) - { -#if 1 -//TODO Reinstate this once constraint_set1 flag has been added to codec_data - case 66: //Baseline profile + //Libva buffer set up - LOG_V( "mix_videofmt_h264_initialize: Baseline profile\n"); - if (data->codec_data->constraint_set1_flag == 0) - { - for (; vaprof < numactualprofs; vaprof++) - { - if (profiles[vaprof] == VAProfileH264Baseline) - break; - } - } else - { - for (; vaprof < numactualprofs; vaprof++) - { - if (profiles[vaprof] == VAProfileH264High) - break; - } - } - if ((vaprof >= numprofs) || ((profiles[vaprof] != VAProfileH264Baseline) && (profiles[vaprof] != VAProfileH264High))) - //Did not get the profile we wanted - { - ret = MIX_RESULT_FAIL; - LOG_E( "Profile not supported by driver\n"); - goto cleanup; - } - break; -#endif -#if 0 -//Code left in place in case bug is fixed in libva - case 77: //Main profile (need to set to High for libva bug) - LOG_V( "mix_videofmt_h264_initialize: Main profile\n"); - - for (; vaprof < numactualprofs; vaprof++) - { - if (profiles[vaprof] == VAProfileH264Main) - break; - } - if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264Main) - //Did not get the profile we wanted - { - ret = MIX_RESULT_FAIL; - LOG_E( "Profile not supported by driver\n"); - goto cleanup; - } - break; + LOG_V( "Creating libva picture parameter buffer\n"); + LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); + + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + buffer_id_cnt++; + + LOG_V( "Creating libva IQMatrix buffer\n"); + + + //Then the IQ matrix buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + buffer_id_cnt++; + } + +#ifndef DECODER_ROBUSTNESS + if (!mix->end_picture_pending) + { + LOG_E("first slice is lost??????????\n"); + ret = MIX_RESULT_DROPFRAME; + goto cleanup; + } #endif - case 100: //High profile - default: //Set to High as default + //Now for slices - LOG_V( "High profile\n"); + ret = mix_videofmt_h264_update_ref_pic_list(mix, pic_params, slice_params); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E("mix_videofmt_h264_update_ref_pic_list failed.\n"); + goto cleanup; + } - for (; vaprof < numactualprofs; vaprof++) - { - if (profiles[vaprof] == VAProfileH264High) - break; - } - if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264High) - //Did not get the profile we wanted - { - ret = MIX_RESULT_FAIL; - LOG_E( "Profile not supported by driver\n"); - goto cleanup; - } - break; + LOG_V( "Creating libva slice parameter buffer\n"); + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + slice_params, + &buffer_ids[buffer_id_cnt]); - } + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } - numentrypts = vaMaxNumEntrypoints(vadisplay); - entrypts = g_malloc(numentrypts*sizeof(VAEntrypoint)); + buffer_id_cnt++; - if (!entrypts) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto cleanup; - } - vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], - entrypts, &numactualentrypts); - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto cleanup; - } + //Do slice data - gint vaentrypt = 0; - for (; vaentrypt < numactualentrypts; vaentrypt++) - { - if (entrypts[vaentrypt] == VAEntrypointVLD) - break; - } - if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) - //Did not get the entrypt we wanted - { - ret = MIX_RESULT_FAIL; - LOG_E( "Entry point not supported by driver\n"); - goto cleanup; - } + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferH264 + + LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", + (guint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size); + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + slice_data->slice_size, //size + 1, //num_elements + slice_data->buffer_addr + slice_data->slice_offset, + &buffer_ids[buffer_id_cnt]); - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; + buffer_id_cnt++; - vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1); + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } - //TODO Handle other values returned for RT format - // and check with requested format provided in config params - //Right now only YUV 4:2:0 is supported by libva - // and this is our default - if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || - vret != VA_STATUS_SUCCESS) + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto cleanup; + } + + +cleanup: + LOG_V( "End\n"); + + return ret; + +} + + +MIX_RESULT mix_videofmt_h264_decode_end( + MixVideoFormat *mix, + gboolean drop_picture) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + MixVideoFormat* parent = MIX_VIDEOFORMAT(mix); + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + LOG_V("Begin\n"); + + if (!parent->end_picture_pending) + { + if (parent->video_frame) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto cleanup; + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame is not unreferenced.\n"); } + goto cleanup; + } + + if (parent->video_frame == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame has been unreferenced.\n"); + goto cleanup; + } + + LOG_V( "Calling vaEndPicture\n"); + vret = vaEndPicture(parent->va_display, parent->va_context); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto cleanup; + } - //Initialize and save the VA config ID - vret = vaCreateConfig(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1, &(parent->va_config)); +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto cleanup; - } + LOG_V( "Calling vaSyncSurface\n"); - LOG_V( "Created libva config with profile %d\n", vaprof); + //Decode the picture + vret = vaSyncSurface(parent->va_display, parent->video_frame->frame_id); + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + goto cleanup; + } +#endif - //Initialize the surface pool + if (drop_picture) + { + // we are asked to drop this decoded picture + mix_videoframe_unref(parent->video_frame); + parent->video_frame = NULL; + goto cleanup; + } + + LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", + parent->current_timestamp); + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(parent->framemgr, parent->video_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error enqueuing frame object\n"); + goto cleanup; + } + else + { + // video frame is passed to frame manager + parent->video_frame = NULL; + LOG_V("video_frame is assigned to be NULL !\n"); + } - LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); +cleanup: + if (parent->video_frame) + { + /* this always indicates an error */ + mix_videoframe_unref(parent->video_frame); + parent->video_frame = NULL; + } + parent->end_picture_pending = FALSE; + LOG_V("End\n"); + return ret; +} - // handle both frame and field coding for interlaced content - int num_ref_pictures = data->codec_data->num_ref_frames; - if (!data->codec_data->frame_mbs_only_flag && - !data->codec_data->mb_adaptive_frame_field_flag) - { - - // field coding, two fields share the same surface. - //num_ref_pictures *= 2; - } - //Adding 1 to work around VBLANK issue - parent->va_num_surfaces = 1 + extra_surfaces + (((num_ref_pictures + 3) < - MIX_VIDEO_H264_SURFACE_NUM) ? - (num_ref_pictures + 3) - : MIX_VIDEO_H264_SURFACE_NUM); - - numSurfaces = parent->va_num_surfaces; - - parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); +MIX_RESULT mix_videofmt_h264_decode_continue( + MixVideoFormat *mix, + vbp_data_h264 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int i, j; + vbp_picture_data_h264* pic_data = NULL; - surfaces = parent->va_surfaces; + LOG_V("Begin\n"); - if (surfaces == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot allocate temporary data\n"); - goto cleanup; - } + for (i = 0; i < data->num_pictures; i++) + { + pic_data = &(data->pic_data[i]); + if (pic_data->pic_parms == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->pic_parms is NULL.\n"); + goto cleanup; + } - LOG_V( "Codec data says picture size is %d x %d\n", (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); - LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height); + if (pic_data->slc_data == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->slc_data is NULL.\n"); + goto cleanup; + } - vret = vaCreateSurfaces(vadisplay, (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, entrypts[vaentrypt], - numSurfaces, surfaces); + if (pic_data->num_slices == 0) + { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->num_slices == 0.\n"); + goto cleanup; + } - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto cleanup; - } + LOG_V( "num_slices is %d\n", pic_data->num_slices); + for (j = 0; j < pic_data->num_slices; j++) + { + LOG_V( "Decoding slice %d\n", j); + ret = mix_videofmt_h264_decode_a_slice(mix, data, i, j); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "mix_videofmt_h264_decode_a_slice failed, error = %#X.", ret); + goto cleanup; + } + } + } - parent->surfacepool = mix_surfacepool_new(); - *surface_pool = parent->surfacepool; +cleanup: + // nothing to cleanup; - if (parent->surfacepool == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing surface pool\n"); - goto cleanup; - } - + LOG_V("End\n"); + return ret; +} - ret = mix_surfacepool_initialize(parent->surfacepool, - surfaces, numSurfaces, vadisplay); - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init failure\n"); - goto cleanup; - break; - } +MIX_RESULT mix_videofmt_h264_set_frame_type( + MixVideoFormat *mix, + vbp_data_h264 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + //Set the picture type (I, B or P frame) + //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) + MixFrameType frame_type = TYPE_INVALID; + + switch (data->pic_data[0].slc_data[0].slc_parms.slice_type) + { + case 0: + case 3: + case 5: + case 8: + frame_type = TYPE_P; + break; + case 1: + case 6: + frame_type = TYPE_B; + break; + case 2: + case 4: + case 7: + case 9: + frame_type = TYPE_I; + break; + default: + break; + } + + //Do not have to check for B frames after a seek + //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise + // DPB will not be correct and frames may come in with invalid references + // This will be detected when DPB is checked for valid mapped surfaces and + // error returned from there. - LOG_V( "Created %d libva surfaces\n", numSurfaces); + LOG_V( "frame type is %d\n", frame_type); - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext(vadisplay, parent->va_config, - parent->picture_width, parent->picture_height, - 0, surfaces, numSurfaces, - &(parent->va_context)); - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto cleanup; - } + //Set the frame type for the frame object (used in reordering by frame manager) + ret = mix_videoframe_set_frame_type(mix->video_frame, frame_type); - LOG_V( "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error setting frame type on frame\n"); + } - //Create our table of Decoded Picture Buffer "in use" surfaces - self->dpb_surface_table = g_hash_table_new_full(NULL, NULL, mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value); + return ret; +} - if (self->dpb_surface_table == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating dbp surface table\n"); - goto cleanup; //leave this goto here in case other code is added between here and cleanup label - } - cleanup: - if (ret != MIX_RESULT_SUCCESS) { - pret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - parent->initialized = FALSE; +MIX_RESULT mix_videofmt_h264_set_frame_structure( + MixVideoFormat *mix, + vbp_data_h264 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; - } else { - parent->initialized = TRUE; - } + if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) + { + mix_videoframe_set_frame_structure(mix->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); + } + else + { + mix_videoframe_set_frame_structure(mix->video_frame, VA_FRAME_PICTURE); + } + + return ret; +} - if (header != NULL) - { - if (header->data != NULL) - g_free(header->data); - g_free(header); - header = NULL; - } - g_free(profiles); - g_free(entrypts); +MIX_RESULT mix_videofmt_h264_decode_begin( + MixVideoFormat *mix, + vbp_data_h264 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); + //Get a frame from the surface pool + LOG_V("Begin\n"); + ret = mix_surfacepool_get(mix->surfacepool, &(mix->video_frame)); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting frame from surfacepool\n"); + return ret; + } + + /* the following calls will always succeed */ + + // set frame type + ret = mix_videofmt_h264_set_frame_type(mix, data); + + // set frame structure + ret = mix_videofmt_h264_set_frame_structure(mix, data); + + //Set the discontinuity flag + mix_videoframe_set_discontinuity(mix->video_frame, mix->discontinuity_frame_in_progress); + + //Set the timestamp + mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp); + + // Set displayorder + ret = mix_videoframe_set_displayorder(mix->video_frame, + data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt); + if(ret != MIX_RESULT_SUCCESS) + { + LOG_E("Error setting displayorder\n"); + return ret; + } + + ret = mix_videofmt_h264_decode_continue(mix, data); + + LOG_V("End\n"); + return ret; - return ret; } -MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params) { - uint32 pret = 0; - int i = 0; - MixVideoFormat *parent = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - guint64 ts = 0; - vbp_data_h264 *data = NULL; - gboolean discontinuity = FALSE; - MixInputBufferEntry *bufentry = NULL; +MIX_RESULT mix_videofmt_h264_decode_a_buffer( + MixVideoFormat *mix, + MixBuffer * bufin, + guint64 ts, + gboolean discontinuity) +{ + uint32 pret = 0; + MixVideoFormat *parent = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_h264 *data = NULL; + + LOG_V( "Begin\n"); + + parent = MIX_VIDEOFORMAT(mix); + + LOG_V( "Calling parse for current frame, parse handle %d\n", (int)parent->parser_handle); + pret = vbp_parse(parent->parser_handle, + bufin->data, + bufin->size, + FALSE); + + LOG_V( "Called parse for current frame\n"); + if ((pret != VBP_DONE) &&(pret != VBP_OK)) + { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "vbp_parse failed.\n"); + goto cleanup; + } + + //query for data + pret = vbp_query(parent->parser_handle, (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "vbp_query failed.\n"); + goto cleanup; + } + LOG_V( "Called query for current frame\n"); + + + if (data->has_sps == 0 || data->has_pps == 0) + { + ret = MIX_RESULT_SUCCESS; + LOG_V("SPS or PPS is not available.\n"); + goto cleanup; + } + + if (parent->va_initialized == FALSE) + { + LOG_V("try initializing VA...\n"); + ret = mix_videofmt_h264_initialize_va(parent, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_h264_initialize_va failed.\n"); + goto cleanup; + } + } + + // first pic_data always exists, check if any slice is parsed + if (data->pic_data[0].num_slices == 0) + { + ret = MIX_RESULT_SUCCESS; + LOG_V("slice is not available.\n"); + goto cleanup; + } + + guint64 last_ts = parent->current_timestamp; + parent->current_timestamp = ts; + parent->discontinuity_frame_in_progress = discontinuity; + + LOG_V("ts = %lli last_ts = %lli\n", ts, last_ts); + + if (last_ts != ts) + { + // finish decoding the last frame + ret = mix_videofmt_h264_decode_end(parent, FALSE); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_h264_decode_end failed.\n"); + goto cleanup; + } - LOG_V( "Begin\n"); + // start decoding a new frame + ret = mix_videofmt_h264_decode_begin(parent, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_h264_decode_begin failed.\n"); + goto cleanup; + } + } + else + { + // parital frame + LOG_V("partial frame handling...\n"); + ret = mix_videofmt_h264_decode_continue(parent, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_h264_decode_continue failed.\n"); + goto cleanup; + } + } - if (mix == NULL || bufin == NULL || decode_params == NULL ) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } + cleanup: - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + LOG_V( "End\n"); -#if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, - decode_params); - } -#endif + return ret; +} - if (!MIX_IS_VIDEOFORMAT_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMAT(mix); +MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_H264; + vbp_data_h264 *data = NULL; + MixVideoFormat *parent = NULL; + MixIOVec *header = NULL; + guint pic_width_in_codec_data = 0; + guint pic_height_in_codec_data = 0; + + if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + /* Chainup parent method. */ + + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error initializing\n"); + return ret; + } + + if (!MIX_IS_VIDEOFORMAT_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMAT(mix); + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + g_mutex_lock(parent->objectlock); + + parent->surfacepool = mix_surfacepool_new(); + *surface_pool = parent->surfacepool; + + if (parent->surfacepool == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "parent->surfacepool == NULL.\n"); + goto cleanup; + } + + //Create our table of Decoded Picture Buffer "in use" surfaces + self->dpb_surface_table = g_hash_table_new_full( + NULL, + NULL, + mix_videofmt_h264_destroy_DPB_key, + mix_videofmt_h264_destroy_DPB_value); + + if (self->dpb_surface_table == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating dbp surface table\n"); + goto cleanup; //leave this goto here in case other code is added between here and cleanup label + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation( + config_params, + &parent->extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto cleanup; + } - ret = mix_videodecodeparams_get_timestamp(decode_params, - &ts); - if (ret != MIX_RESULT_SUCCESS) - { - return MIX_RESULT_FAIL; - } + LOG_V( "Before vbp_open\n"); + //Load the bitstream parser + pret = vbp_open(ptype, &(parent->parser_handle)); + LOG_V( "After vbp_open\n"); - ret = mix_videodecodeparams_get_discontinuity(decode_params, - &discontinuity); - if (ret != MIX_RESULT_SUCCESS) - { - return MIX_RESULT_FAIL; - } + if (!(pret == VBP_OK)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto cleanup; + } + LOG_V( "Opened parser\n"); + + + ret = mix_videoconfigparamsdec_get_header(config_params, &header); + + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) + { + // Delay initializing VA if codec configuration data is not ready, but don't return an error. + ret = MIX_RESULT_SUCCESS; + LOG_W( "Codec data is not available in the configuration parameter.\n"); + goto cleanup; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); + + pret = vbp_parse( + parent->parser_handle, + header->data, + header->data_size, + TRUE); + + if (!((pret == VBP_OK) || (pret == VBP_DONE))) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data\n"); + goto cleanup; + } + + LOG_V( "Parsed header\n"); + + //Get the header data and save + pret = vbp_query(parent->parser_handle, (void *)&data); + + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto cleanup; + } + + LOG_V( "Queried parser for header data\n"); + + // Update the pic size according to the parsed codec_data + pic_width_in_codec_data = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; + pic_height_in_codec_data = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; + mix_videoconfigparamsdec_set_picture_res (config_params, pic_width_in_codec_data, pic_height_in_codec_data); + parent->picture_width = pic_width_in_codec_data; + parent->picture_height = pic_height_in_codec_data; + + ret = mix_videofmt_h264_initialize_va(mix, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error initializing va. \n"); + goto cleanup; + } + + + cleanup: + if (ret != MIX_RESULT_SUCCESS) { + pret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + parent->initialized = FALSE; - //From now on, we exit this function through cleanup: + } else { + parent->initialized = TRUE; + } - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); + if (header != NULL) + { + if (header->data != NULL) + g_free(header->data); + g_free(header); + header = NULL; + } - LOG_V( "parse in progress is %d\n", parent->parse_in_progress); - //If this is a new frame and we haven't retrieved parser - // workload data from previous frame yet, do so - if ((ts != parent->current_timestamp) && - (parent->parse_in_progress)) - { - //query for data - pret = vbp_query(parent->parser_handle, - (void *) &data); + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing parser\n"); - goto cleanup; - } - - LOG_V( "Queried for last frame data\n"); - //process and decode data - ret = mix_videofmt_h264_process_decode(mix, - data, parent->current_timestamp, - parent->discontinuity_frame_in_progress); + return ret; +} - if (ret != MIX_RESULT_SUCCESS) - { - //We log this but need to process the new frame data, so do not return - LOG_E( "Process_decode failed.\n"); - } +MIX_RESULT mix_videofmt_h264_decode( + MixVideoFormat *mix, + MixBuffer * bufin[], + gint bufincnt, + MixVideoDecodeParams * decode_params) { - LOG_V( "Called process and decode for last frame\n"); + int i = 0; + MixVideoFormat *parent = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + guint64 ts = 0; + gboolean discontinuity = FALSE; - parent->parse_in_progress = FALSE; + LOG_V( "Begin\n"); - } + if (mix == NULL || bufin == NULL || decode_params == NULL || bufincnt == 0) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } - parent->current_timestamp = ts; - parent->discontinuity_frame_in_progress = discontinuity; + /* Chainup parent method. + We are not chaining up to parent method for now. + */ - LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts); +#if 0 + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, decode_params); + } +#endif - for (i = 0; i < bufincnt; i++) - { + if (!MIX_IS_VIDEOFORMAT_H264(mix)) + return MIX_RESULT_INVALID_PARAM; - LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size); + parent = MIX_VIDEOFORMAT(mix); -#ifndef ANDROID - pret = vbp_parse(parent->parser_handle, - bufin[i]->data, - bufin[i]->size, - FALSE); -#else - /* we got an array of NALs for a frame */ - { - gint nal_index = 0; - NalBuffer *nals = (NalBuffer *)bufin[i]->data; - gint nal_count = bufin[i]->size; - - LOG_V("nal_count = %d\n", nal_count); - for(nal_index = 0; nal_index < nal_count; nal_index ++) { - - LOG_V("nals[%d].offset = 0x%x nals[nal_index].length = %d\n", - nal_index, nals[nal_index].offset, nals[nal_index].length); - - pret = vbp_parse(parent->parser_handle, - nals[nal_index].buffer + nals[nal_index].offset, - nals[nal_index].length, - FALSE); - - LOG_V("nal_index = %d pret = 0x%x\n", nal_index, pret); - - if(pret != VBP_OK && pret != VBP_DONE) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing data : pret = 0x%x\n", pret); - goto cleanup; - } - } - } -#endif + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) + { + // never happen + return MIX_RESULT_FAIL; + } - LOG_V( "Called parse for current frame\n"); + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) + { + // never happen + return MIX_RESULT_FAIL; + } - if ((pret == VBP_DONE) || (pret == VBP_OK)) - { - //query for data - pret = vbp_query(parent->parser_handle, - (void *) &data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting parser data\n"); - goto cleanup; - } - - LOG_V( "Called query for current frame\n"); - - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = g_malloc(sizeof( - MixInputBufferEntry)); - if (bufentry == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); - goto cleanup; - } - - bufentry->buf = bufin[i]; - LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts); - bufentry->timestamp = ts; - - LOG_V( "Enqueue this input buffer for current frame\n"); - LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); - - //Enqueue this input buffer - g_queue_push_tail(parent->inputbufqueue, - (gpointer)bufentry); - - //process and decode data - ret = mix_videofmt_h264_process_decode(mix, - data, ts, discontinuity); - - if (ret != MIX_RESULT_SUCCESS) - { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Process_decode failed.\n"); - } - - LOG_V( "Called process and decode for current frame\n"); - - parent->parse_in_progress = FALSE; - } - else if (pret != VBP_OK) - { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Parsing failed.\n"); - ret = MIX_RESULT_FAIL; - } - else - { + //From now on, we exit this function through cleanup: - LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); - - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = g_malloc(sizeof - (MixInputBufferEntry)); - if (bufentry == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); - goto cleanup; - } - bufentry->buf = bufin[i]; - LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts); - bufentry->timestamp = ts; - - LOG_V( "Enqueue this input buffer for current frame\n"); - LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); - - //Enqueue this input buffer - g_queue_push_tail(parent->inputbufqueue, - (gpointer)bufentry); - LOG_V( "Setting parse_in_progress to TRUE\n"); - parent->parse_in_progress = TRUE; - } + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); - } + LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts); + + for (i = 0; i < bufincnt; i++) + { + LOG_V( "Decoding a buf %x, size %d\n", (guint)bufin[i]->data, bufin[i]->size); + + // decode a buffer at a time + ret = mix_videofmt_h264_decode_a_buffer( + mix, + bufin[i], + ts, + discontinuity); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); + goto cleanup; + } + } - cleanup: - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); +cleanup: - LOG_V( "End\n"); + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); - return ret; + LOG_V( "End\n"); + + return ret; } MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix) { -MIX_RESULT ret = MIX_RESULT_SUCCESS; + MIX_RESULT ret = MIX_RESULT_SUCCESS; LOG_V( "Begin\n"); @@ -831,9 +1278,7 @@ MIX_RESULT ret = MIX_RESULT_SUCCESS; return MIX_RESULT_NULL_PTR; } - uint32 pret = 0; - MixInputBufferEntry *bufentry = NULL; - + uint32 pret = 0; /* Chainup parent method. We are not chaining up to parent method for now. @@ -847,23 +1292,15 @@ MIX_RESULT ret = MIX_RESULT_SUCCESS; MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - g_mutex_lock(mix->objectlock); - - //Clear the contents of inputbufqueue - while (!g_queue_is_empty(mix->inputbufqueue)) - { - bufentry = (MixInputBufferEntry *) g_queue_pop_head( - mix->inputbufqueue); - if (bufentry == NULL) continue; - - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } + g_mutex_lock(mix->objectlock); + // drop any decode-pending picture, and ignore return value + mix_videofmt_h264_decode_end(mix, TRUE); + //Clear parse_in_progress flag and current timestamp - mix->parse_in_progress = FALSE; + mix->parse_in_progress = FALSE; mix->discontinuity_frame_in_progress = FALSE; - mix->current_timestamp = 0; + mix->current_timestamp = (guint64)-1; //Clear the DPB surface table g_hash_table_remove_all(self->dpb_surface_table); @@ -873,7 +1310,7 @@ MIX_RESULT ret = MIX_RESULT_SUCCESS; if (pret != VBP_OK) ret = MIX_RESULT_FAIL; - g_mutex_unlock(mix->objectlock); + g_mutex_unlock(mix->objectlock); LOG_V( "End\n"); @@ -883,10 +1320,8 @@ MIX_RESULT ret = MIX_RESULT_SUCCESS; MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_h264 *data = NULL; - uint32 pret = 0; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); if (mix == NULL) { @@ -904,38 +1339,12 @@ MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix) { } #endif - g_mutex_lock(mix->objectlock); - - //if a frame is in progress, process the frame - if (mix->parse_in_progress) - { - //query for data - pret = vbp_query(mix->parser_handle, - (void *) &data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting last parse data\n"); - goto cleanup; - } - - //process and decode data - ret = mix_videofmt_h264_process_decode(mix, - data, mix->current_timestamp, - mix->discontinuity_frame_in_progress); - mix->parse_in_progress = FALSE; - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error processing last frame\n"); - goto cleanup; - } - - } - -cleanup: + g_mutex_lock(mix->objectlock); - g_mutex_unlock(mix->objectlock); + // finished decoding the pending frame + mix_videofmt_h264_decode_end(mix, FALSE); + + g_mutex_unlock(mix->objectlock); //Call Frame Manager with _eos() ret = mix_framemanager_eos(mix->framemgr); @@ -966,13 +1375,15 @@ MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix) { return parent_class->deinitialize(mix); } - //Most stuff is cleaned up in parent_class->finalize() and in _finalize + //Most stuff is cleaned up in parent_class->finalize() and in _finalize - LOG_V( "End\n"); + LOG_V( "End\n"); return MIX_RESULT_SUCCESS; } -#define HACK_DPB + + +#define HACK_DPB #ifdef HACK_DPB static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, vbp_picture_data_h264* pic_data @@ -984,7 +1395,6 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; VAPictureH264 *pRefList = NULL; int i = 0, j = 0, k = 0, list = 0; - MIX_RESULT ret = MIX_RESULT_FAIL; MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); @@ -1038,7 +1448,7 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = ((MixVideoFrame *)video_frame)->frame_id; - LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = pRefList[j].flags; @@ -1051,7 +1461,7 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, } } - pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; + pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; } } @@ -1059,456 +1469,7 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, } #endif - -MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix, - vbp_data_h264 *data, - guint64 timestamp, - gboolean discontinuity, - int pic_index, - MixVideoFrame *frame) -{ - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - guint buffer_id_cnt = 0; - VABufferID *buffer_ids = NULL; - - //TODO Partition this method into smaller methods - - LOG_V( "Begin\n"); - - if ((mix == NULL) || (data == NULL) || (data->pic_data == NULL) || (frame == NULL)) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - vbp_picture_data_h264* pic_data = &(data->pic_data[pic_index]); - - - //After this point, all exits from this function are through cleanup: - - if (!MIX_IS_VIDEOFORMAT_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; - - if (pic_params == NULL) - { - ret = MIX_RESULT_NULL_PTR; - LOG_E( "Error reading parser data\n"); - goto cleanup; - } - - //TODO - //Check for frame gaps and repeat frames if necessary - - LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); - - buffer_ids = g_malloc(sizeof(VABufferID) * - ((pic_data->num_slices * 2) + 2)); - - if (buffer_ids == NULL) - { - LOG_E( "Cannot allocate buffer IDs\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - //Set up reference frames for the picture parameter buffer - - //Set the picture type (I, B or P frame) - //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) - MixFrameType frame_type = TYPE_INVALID; - - switch (pic_data->slc_data->slc_parms.slice_type) - { - case 0: - case 3: - case 5: - case 8: - frame_type = TYPE_P; - break; - case 1: - case 6: - frame_type = TYPE_B; - break; - case 2: - case 4: - case 7: - case 9: - frame_type = TYPE_I; - break; - default: - break; - } - - //Do not have to check for B frames after a seek - //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise - // DPB will not be correct and frames may come in with invalid references - // This will be detected when DPB is checked for valid mapped surfaces and - // error returned from there. - - LOG_V( "Getting a new surface for frame_num %d\n", pic_params->frame_num); - LOG_V( "frame type is %d\n", frame_type); - - // Set displayorder - ret = mix_videoframe_set_displayorder(frame, pic_params->CurrPic.TopFieldOrderCnt / 2); - if(ret != MIX_RESULT_SUCCESS) - { - LOG_E("Error setting displayorder\n"); - goto cleanup; - } - - //Set the frame type for the frame object (used in reordering by frame manager) - ret = mix_videoframe_set_frame_type(frame, frame_type); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error setting frame type on frame\n"); - goto cleanup; - } - - LOG_V( "Updating DPB for libva\n"); - - //Now handle the reference frames and surface IDs for DPB and current frame - mix_videofmt_h264_handle_ref_frames(mix, pic_params, frame); - -#ifdef HACK_DPB - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - ret = mix_videofmt_h264_hack_dpb(mix, pic_data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error reference frame not found\n"); - goto cleanup; - } -#endif - - //Libva buffer set up - - vadisplay = mix->va_display; - vacontext = mix->va_context; - - LOG_V( "Creating libva picture parameter buffer\n"); - LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); - - //First the picture parameter buffer - vret = vaCreateBuffer(vadisplay, vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferH264), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - LOG_V( "Creating libva IQMatrix buffer\n"); - - - //Then the IQ matrix buffer - vret = vaCreateBuffer(vadisplay, vacontext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferH264), - 1, - data->IQ_matrix_buf, - &buffer_ids[buffer_id_cnt]); - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - - //Now for slices - int i = 0; - gpointer video_frame; - for (;i < pic_data->num_slices; i++) - { - - LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); - - //Do slice parameters - - //First patch up the List0 and List1 surface IDs - int j = 0; - guint poc = 0; - for (; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l0_active_minus1; j++) - { - if (!(pic_data->slc_data[i].slc_parms.RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) - { - poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList0[j])); - video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); - if (video_frame == NULL) - { - LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic)); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - goto cleanup; - } - else - { - pic_data->slc_data[i].slc_parms.RefPicList0[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - - } - - if ((pic_data->slc_data->slc_parms.slice_type == 1) || (pic_data->slc_data->slc_parms.slice_type == 6)) - { - for (j = 0; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l1_active_minus1; j++) - { - if (!(pic_data->slc_data[i].slc_parms.RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) - { - poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList1[j])); - video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); - if (video_frame == NULL) - { - LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic)); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - goto cleanup; - } - else - { - pic_data->slc_data[i].slc_parms.RefPicList1[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - } - } - - - //Then do the libva setup - - vret = vaCreateBuffer(vadisplay, vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), - 1, - &(pic_data->slc_data[i].slc_parms), - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - buffer_id_cnt++; - - - LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); - - - //Do slice data - - vret = vaCreateBuffer(vadisplay, vacontext, - VASliceDataBufferType, - //size - pic_data->slc_data[i].slice_size, - //num_elements - 1, - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferH264 - pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - } - - gulong surface = 0; - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(frame, &surface); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting surface ID from frame object\n"); - goto cleanup; - } - - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto cleanup; - } - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture(vadisplay, vacontext, - buffer_ids, - buffer_id_cnt); - - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto cleanup; - } - - LOG_V( "Calling vaEndPicture\n"); - - //End picture - vret = vaEndPicture(vadisplay, vacontext); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto cleanup; - } - -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - - LOG_V( "Calling vaSyncSurface\n"); - - //Decode the picture - vret = vaSyncSurface(vadisplay, surface); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - goto cleanup; - } -#endif - - if (pic_index == 0) - { - //Set the discontinuity flag - mix_videoframe_set_discontinuity(frame, discontinuity); - - //Set the timestamp - mix_videoframe_set_timestamp(frame, timestamp); - - guint32 frame_structure = VA_FRAME_PICTURE; - if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) - { - frame_structure = VA_TOP_FIELD; - } - else if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) - { - frame_structure = VA_BOTTOM_FIELD; - } - mix_videoframe_set_frame_structure(frame, frame_structure); - } - else - { - // frame must be field-coded, no need to set - // discontinuity falg and time stamp again - mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); - } - - //TODO need to save off frame when handling is added for repeat frames? - -//TODO Complete YUVDUMP code and move into base class -#ifdef YUVDUMP - if (mix_video_h264_counter < 10) - ret = GetImageFromSurface (mix, frame); -// g_usleep(5000000); -#endif /* YUVDUMP */ - - LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); - - - cleanup: - - if (NULL != buffer_ids) - g_free(buffer_ids); - - - LOG_V( "End\n"); - - return ret; - -} - - -MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix, - vbp_data_h264 *data, - guint64 timestamp, - gboolean discontinuity) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int i = 0; - - if ((mix == NULL) || (data == NULL)) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - //Get a frame from the surface pool - MixVideoFrame *frame = NULL; - - ret = mix_surfacepool_get(mix->surfacepool, &frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting frame from surfacepool\n"); - return MIX_RESULT_FAIL; - } - - - for (i = 0; i < data->num_pictures; i++) - { - ret = mix_videofmt_h264_process_decode_picture(mix, data, timestamp, discontinuity, i, frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Failed to process decode picture %d, error = %#X.", data->buf_number, ret); - break; - } - } - if (ret == MIX_RESULT_SUCCESS) - { - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(mix->framemgr, frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error enqueuing frame object\n"); - mix_videoframe_unref(frame); - } - - } - else - { - mix_videoframe_unref(frame); - } - mix_videofmt_h264_release_input_buffers(mix, timestamp); - - return ret; -} MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, VAPictureParameterBufferH264* pic_params, @@ -1519,7 +1480,7 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, LOG_V( "Begin\n"); - if (mix == NULL || current_frame == NULL || pic_params == NULL) + if (mix == NULL || current_frame == NULL || pic_params == NULL) { LOG_E( "Null pointer passed in\n"); return MIX_RESULT_NULL_PTR; @@ -1545,7 +1506,7 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n"); #endif - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); //First we need to check the parser DBP against our DPB table @@ -1556,7 +1517,7 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, #endif g_hash_table_foreach_remove(self->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params); - LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); + LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); MixVideoFrame *mvf = NULL; @@ -1569,17 +1530,17 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, { poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i])); - LOG_V( "Looking up poc %d in dpb table\n", poc); + LOG_V( "Looking up poc %d in dpb table\n", poc); found = g_hash_table_lookup_extended(self->dpb_surface_table, (gpointer)poc, NULL, (gpointer)&mvf); if (found) { pic_params->ReferenceFrames[i].picture_id = mvf->frame_id; - LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id); + LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id); } else { - LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); + LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); } - LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", poc, i, (gint)pic_params->ReferenceFrames[i].picture_id); + LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", poc, i, (gint)pic_params->ReferenceFrames[i].picture_id); } } @@ -1589,7 +1550,8 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, pic_params->CurrPic.picture_id = current_frame->frame_id; //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { //Get current frame's POC poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); @@ -1602,6 +1564,44 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, g_hash_table_insert(self->dpb_surface_table, (gpointer)poc, current_frame); } + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmt_h264_cleanup_ref_frame(MixVideoFormat *mix, + VAPictureParameterBufferH264* pic_params, + MixVideoFrame * current_frame + ) { + + guint poc = 0; + + LOG_V( "Begin\n"); + + if (mix == NULL || current_frame == NULL || pic_params == NULL) + { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + + LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); + + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + //Check to see if current frame is a reference frame + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) + { + //Get current frame's POC + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + + //Decrement the reference count for this frame +// mix_videoframe_unref(current_frame); + + LOG_V( "Removing poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); + //Remove this frame from the DPB surface table + g_hash_table_remove(self->dpb_surface_table, (gpointer)poc); + } LOG_V( "End\n"); @@ -1611,14 +1611,12 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, guint mix_videofmt_h264_get_poc(VAPictureH264 *pic) { - - if (pic == NULL) - return 0; + if (pic == NULL) + return 0; if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) return pic->BottomFieldOrderCnt; - if (pic->flags & VA_PICTURE_H264_TOP_FIELD) return pic->TopFieldOrderCnt; @@ -1631,8 +1629,8 @@ gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer u { gboolean ret = TRUE; - if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key - return FALSE; + if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key + return FALSE; VAPictureH264* vaPic = NULL; int i = 0; @@ -1653,9 +1651,10 @@ gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer u return ret; } + void mix_videofmt_h264_destroy_DPB_key(gpointer data) { -//TODO remove this method and don't register it with the hash table foreach call; it is no longer needed + //TODO remove this method and don't register it with the hash table foreach call; it is no longer needed LOG_V( "Begin, poc of %d\n", (guint)data); LOG_V( "End\n"); @@ -1665,10 +1664,11 @@ void mix_videofmt_h264_destroy_DPB_key(gpointer data) void mix_videofmt_h264_destroy_DPB_value(gpointer data) { LOG_V( "Begin\n"); - if (data == NULL) - return ; - mix_videoframe_unref((MixVideoFrame *)data); - + if (data != NULL) + { + mix_videoframe_unref((MixVideoFrame *)data); + } + LOG_V( "End\n"); return; } @@ -1677,47 +1677,8 @@ MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, guint64 timestamp ) { - MixInputBufferEntry *bufentry = NULL; - gboolean done = FALSE; - - LOG_V( "Begin\n"); - - if (mix == NULL) - return MIX_RESULT_NULL_PTR; - - //Dequeue and release all input buffers for this frame - - LOG_V( "Releasing all the MixBuffers for this frame\n"); - - //While the head of the queue has timestamp == current ts - //dequeue the entry, unref the MixBuffer, and free the struct - done = FALSE; - while (!done) - { - bufentry = (MixInputBufferEntry *) g_queue_peek_head( - mix->inputbufqueue); - if (bufentry == NULL) break; - LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); - - if (bufentry->timestamp != timestamp) - { - LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); - done = TRUE; - break; - } - - bufentry = (MixInputBufferEntry *) g_queue_pop_head( - mix->inputbufqueue); - LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf); - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } - - - LOG_V( "End\n"); - + // no longer used, need to be removed. return MIX_RESULT_SUCCESS; } - diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h index a04048c..b6d062e 100644 --- a/mix_video/src/mixvideoformat_h264.h +++ b/mix_video/src/mixvideoformat_h264.h @@ -12,6 +12,8 @@ #include "mixvideoformat.h" #include "mixvideoframe_private.h" +G_BEGIN_DECLS + #define MIX_VIDEO_H264_SURFACE_NUM 20 /* @@ -123,7 +125,7 @@ void mix_videofmt_h264_destroy_DPB_key(gpointer data); void mix_videofmt_h264_destroy_DPB_value(gpointer data); guint mix_videofmt_h264_get_poc(VAPictureH264 *pic); - +G_END_DECLS #endif /* __MIX_VIDEOFORMAT_H264_H__ */ diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c index c6c7b30..b11ab88 100644 --- a/mix_video/src/mixvideoformat_mp42.c +++ b/mix_video/src/mixvideoformat_mp42.c @@ -142,7 +142,7 @@ void mix_videoformat_mp42_finalize(GObject * obj) { parent->initialized = TRUE; parent->parse_in_progress = FALSE; parent->discontinuity_frame_in_progress = FALSE; - parent->current_timestamp = 0; + parent->current_timestamp = (guint64)-1; /* Close the parser */ vbp_ret = vbp_close(parent->parser_handle); @@ -1149,7 +1149,7 @@ MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix) { */ mix->parse_in_progress = FALSE; mix->discontinuity_frame_in_progress = FALSE; - mix->current_timestamp = 0; + mix->current_timestamp = (guint64)-1; { gint idx = 0; diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h index 67ee210..49a1299 100644 --- a/mix_video/src/mixvideoformat_mp42.h +++ b/mix_video/src/mixvideoformat_mp42.h @@ -12,6 +12,8 @@ #include "mixvideoformat.h" #include "mixvideoframe_private.h" +G_BEGIN_DECLS + //Note: this is only a max limit. Real number of surfaces allocated is calculated in mix_videoformat_mp42_initialize() #define MIX_VIDEO_MP42_SURFACE_NUM 8 @@ -114,4 +116,6 @@ MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix, guint64 timestamp); +G_END_DECLS + #endif /* __MIX_VIDEOFORMAT_MP42_H__ */ diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c index 151cff3..bf4d1f4 100644 --- a/mix_video/src/mixvideoformat_vc1.c +++ b/mix_video/src/mixvideoformat_vc1.c @@ -120,7 +120,7 @@ void mix_videoformat_vc1_finalize(GObject * obj) { parent->initialized = TRUE; parent->parse_in_progress = FALSE; parent->discontinuity_frame_in_progress = FALSE; - parent->current_timestamp = 0; + parent->current_timestamp = (guint64)-1; //Close the parser pret = vbp_close(parent->parser_handle); @@ -1526,7 +1526,7 @@ MIX_RESULT mix_videofmt_vc1_flush(MixVideoFormat *mix) //Clear parse_in_progress flag and current timestamp mix->parse_in_progress = FALSE; mix->discontinuity_frame_in_progress = FALSE; - mix->current_timestamp = 0; + mix->current_timestamp = (guint64)-1; int i = 0; for (; i < 2; i++) diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h index 9af8a8d..366428e 100644 --- a/mix_video/src/mixvideoformat_vc1.h +++ b/mix_video/src/mixvideoformat_vc1.h @@ -12,6 +12,8 @@ #include "mixvideoformat.h" #include "mixvideoframe_private.h" +G_BEGIN_DECLS + //Note: this is only a max limit. Actual number of surfaces allocated is calculated in mix_videoformat_vc1_initialize() #define MIX_VIDEO_VC1_SURFACE_NUM 8 @@ -120,4 +122,6 @@ MIX_RESULT mix_videofmt_vc1_process_decode(MixVideoFormat *mix, MIX_RESULT mix_videofmt_vc1_release_input_buffers(MixVideoFormat *mix, guint64 timestamp); +G_END_DECLS + #endif /* __MIX_VIDEOFORMAT_VC1_H__ */ diff --git a/mix_video/src/mixvideoformatenc.c b/mix_video/src/mixvideoformatenc.c index e7d1e8e..f39f77f 100644 --- a/mix_video/src/mixvideoformatenc.c +++ b/mix_video/src/mixvideoformatenc.c @@ -30,6 +30,9 @@ static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix); static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix); static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( MixVideoFormatEnc *mix, guint *max_size); +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type); static GObjectClass *parent_class = NULL; @@ -71,6 +74,10 @@ static void mix_videoformatenc_init(MixVideoFormatEnc * self) { self->va_format = VA_RT_FORMAT_YUV420; self->va_entrypoint = VAEntrypointEncSlice; self->va_profile = VAProfileH264Baseline; + self->level = 30; + self->CIR_frame_cnt = 15; //default value + self->force_key_frame = FALSE; + self->new_header_required = FALSE; //add more properties here } @@ -91,6 +98,7 @@ static void mix_videoformatenc_class_init(MixVideoFormatEncClass * klass) { klass->eos = mix_videofmtenc_eos_default; klass->deinitialize = mix_videofmtenc_deinitialize_default; klass->getmaxencodedbufsize = mix_videofmtenc_get_max_coded_buffer_size_default; + klass->set_dynamic_config = mix_videofmtenc_set_dynamic_enc_config_default; } MixVideoFormatEnc * @@ -350,7 +358,31 @@ static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; } + + ret = mix_videoconfigparamsenc_get_level (config_params_enc, + &(mix->level)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_level\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, + &(mix->CIR_frame_cnt)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + LOG_V( "======Video Encode Parent Object properities======:\n"); @@ -421,6 +453,157 @@ static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (mix == NULL ||config_params_enc == NULL) { + LOG_E( + "!mix || config_params_enc == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + + + g_mutex_lock(mix->objectlock); + + mix->new_header_required = FALSE; + + switch (params_type) { + case MIX_ENC_PARAMS_BITRATE: + { + ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(mix->bitrate)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_get_bit_rate\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + } + break; + case MIX_ENC_PARAMS_SLICE_SIZE: + { + /* + * This type of dynamic control will be handled in H.264 override method + */ + } + break; + + case MIX_ENC_PARAMS_RC_MODE: + { + ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, &(mix->va_rcmode)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_rate_control\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_RESOLUTION: + { + + ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(mix->picture_width), &(mix->picture_height)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_picture_res\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + } + break; + case MIX_ENC_PARAMS_GOP_SIZE: + { + + ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(mix->intra_period)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_intra_period\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + + } + break; + case MIX_ENC_PARAMS_FRAME_RATE: + { + ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(mix->frame_rate_num), &(mix->frame_rate_denom)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + } + break; + case MIX_ENC_PARAMS_FORCE_KEY_FRAME: + { + mix->new_header_required = TRUE; + + } + break; + case MIX_ENC_PARAMS_QP: + { + ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_init_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + } + break; + case MIX_ENC_PARAMS_CIR_FRAME_CNT: + { + ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(mix->CIR_frame_cnt)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + } + break; + + default: + break; + } + + g_mutex_unlock(mix->objectlock); + + return MIX_RESULT_SUCCESS; +} + /* mixvideoformatenc class methods implementation */ MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) { @@ -500,3 +683,15 @@ MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, gui return MIX_RESULT_FAIL; } + +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->set_dynamic_config) { + return klass->set_dynamic_config(mix, config_params_enc, params_type); + } + + return MIX_RESULT_FAIL; +} diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h index c0da910..b66cc6a 100644 --- a/mix_video/src/mixvideoformatenc.h +++ b/mix_video/src/mixvideoformatenc.h @@ -22,6 +22,8 @@ #include "mixvideoformatqueue.h" #include "mixvideoencodeparams.h" +G_BEGIN_DECLS + /* * Type macros. */ @@ -52,6 +54,9 @@ typedef MIX_RESULT (*MixVideoFmtEncFlushFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, guint *max_size); +typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params, + MixEncParamsType params_type); struct _MixVideoFormatEnc { /*< public > */ @@ -82,6 +87,10 @@ struct _MixVideoFormatEnc { gboolean share_buf_mode; gulong * ci_frame_id; guint ci_frame_num; + + gboolean force_key_frame; + gboolean new_header_required; + guint CIR_frame_cnt; gulong drawable; gboolean need_display; @@ -90,6 +99,7 @@ struct _MixVideoFormatEnc { VAEntrypoint va_entrypoint; guint va_format; guint va_rcmode; + guint8 level; MixBufferPool *inputbufpool; @@ -115,6 +125,7 @@ struct _MixVideoFormatEncClass { MixVideoFmtEncEndOfStreamFunc eos; MixVideoFmtEncDeinitializeFunc deinitialize; MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize; + MixVideoFmtEncSetDynamicEncConfigFunc set_dynamic_config; }; /** @@ -172,7 +183,12 @@ MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint *max_size); +MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, + guint *max_size); +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params, + MixEncParamsType params_type); +G_END_DECLS #endif /* __MIX_VIDEOFORMATENC_H__ */ diff --git a/mix_video/src/mixvideoformatenc_h263.c b/mix_video/src/mixvideoformatenc_h263.c index 5f28682..809332e 100644 --- a/mix_video/src/mixvideoformatenc_h263.c +++ b/mix_video/src/mixvideoformatenc_h263.c @@ -13,6 +13,7 @@ #include "mixvideoformatenc_h263.h" #include "mixvideoconfigparamsenc_h263.h" +#include #undef SHOW_SRC @@ -43,9 +44,10 @@ static void mix_videoformatenc_h263_init(MixVideoFormatEnc_H263 * self) { self->is_intra = TRUE; self->cur_frame = NULL; self->ref_frame = NULL; - self->rec_frame = NULL; + self->rec_frame = NULL; +#ifdef ANDROID self->last_mix_buffer = NULL; - +#endif self->ci_shared_surfaces = NULL; self->surfaces= NULL; self->surface_num = 0; @@ -749,12 +751,12 @@ MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix) { mix_videoframe_unref (self->ref_frame); self->ref_frame = NULL; } - +#ifdef ANDROID if(self->last_mix_buffer) { mix_buffer_unref(self->last_mix_buffer); self->last_mix_buffer = NULL; } - +#endif /*reset the properities*/ self->encoded_frames = 0; self->pic_skipped = FALSE; @@ -1322,6 +1324,7 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, #else #define USE_SRC_FMT_NV21 #endif + #ifdef USE_SRC_FMT_YUV420 /*need to convert YUV420 to NV12*/ dst_y = pvbuf +image->offsets[0]; @@ -1448,8 +1451,11 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, if (mix->cur_frame == NULL) { guint ci_idx; -// memcpy (&ci_idx, bufin->data, bufin->size); +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif LOG_I( "surface_num = %d\n", mix->surface_num); @@ -1721,7 +1727,8 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, mix->coded_buf_index ++; mix->coded_buf_index %=2; mix->last_frame = mix->cur_frame; - + +#ifdef ANDROID if(mix->last_mix_buffer) { LOG_V("calls to mix_buffer_unref \n"); LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); @@ -1730,6 +1737,7 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, LOG_V("ref the current bufin\n"); mix->last_mix_buffer = mix_buffer_ref(bufin); +#endif if (!(parent->need_display)) { mix_videoframe_unref (mix->cur_frame); diff --git a/mix_video/src/mixvideoformatenc_h263.h b/mix_video/src/mixvideoformatenc_h263.h index ece5fc8..f13db38 100644 --- a/mix_video/src/mixvideoformatenc_h263.h +++ b/mix_video/src/mixvideoformatenc_h263.h @@ -12,6 +12,8 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" +G_BEGIN_DECLS + #define MIX_VIDEO_ENC_H263_SURFACE_NUM 20 #define min(X,Y) (((X) < (Y)) ? (X) : (Y)) @@ -48,7 +50,9 @@ struct _MixVideoFormatEnc_H263 { MixVideoFrame *ref_frame; //reference frame MixVideoFrame *rec_frame; //reconstructed frame; MixVideoFrame *last_frame; //last frame; +#ifdef ANDROID MixBuffer *last_mix_buffer; +#endif guint disable_deblocking_filter_idc; guint slice_num; @@ -137,5 +141,7 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, Mix MixIOVec * iovout); MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix); +G_END_DECLS + #endif /* __MIX_VIDEOFORMATENC_H263_H__ */ diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index 82fad06..336b4d8 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -13,6 +13,7 @@ #include "mixvideoformatenc_h264.h" #include "mixvideoconfigparamsenc_h264.h" +#include #undef SHOW_SRC @@ -44,7 +45,9 @@ static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { self->cur_frame = NULL; self->ref_frame = NULL; self->rec_frame = NULL; +#ifdef ANDROID self->last_mix_buffer = NULL; +#endif self->ci_shared_surfaces = NULL; self->surfaces= NULL; @@ -78,6 +81,7 @@ static void mix_videoformatenc_h264_class_init( video_formatenc_class->eos = mix_videofmtenc_h264_eos; video_formatenc_class->deinitialize = mix_videofmtenc_h264_deinitialize; video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h264_get_max_encoded_buf_size; + video_formatenc_class->set_dynamic_config = mix_videofmtenc_h264_set_dynamic_enc_config; } MixVideoFormatEnc_H264 * @@ -230,7 +234,16 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); goto cleanup; } + + ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, + &self->idr_interval); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E ( + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + goto cleanup; + } + LOG_V( "======H264 Encode Object properities======:\n"); @@ -241,7 +254,9 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, LOG_I( "self->slice_num = %d\n", self->slice_num); LOG_I ("self->delimiter_type = %d\n", - self->delimiter_type); + self->delimiter_type); + LOG_I ("self->idr_interval = %d\n", + self->idr_interval); LOG_V( "Get properities from params done\n"); @@ -251,6 +266,7 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, LOG_V( "Get Display\n"); LOG_I( "Display = 0x%08x\n", (guint)va_display); + #if 0 /* query the vender information, can ignore*/ @@ -776,12 +792,12 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { mix_videoframe_unref (self->ref_frame); self->ref_frame = NULL; } - +#ifdef ANDROID if(self->last_mix_buffer) { mix_buffer_unref(self->last_mix_buffer); self->last_mix_buffer = NULL; } - +#endif /*reset the properities*/ self->encoded_frames = 0; self->pic_skipped = FALSE; @@ -934,6 +950,7 @@ MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) /*set up the sequence params for HW*/ h264_seq_param.level_idc = 30; //TODO, hard code now h264_seq_param.intra_period = parent->intra_period; + h264_seq_param.intra_idr_period = mix->idr_interval; h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; h264_seq_param.bits_per_second = parent->bitrate; @@ -954,7 +971,9 @@ MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) LOG_I( "level_idc = %d\n", (guint)h264_seq_param.level_idc); LOG_I( "intra_period = %d\n", - h264_seq_param.intra_period); + h264_seq_param.intra_period); + LOG_I( "idr_interval = %d\n", + h264_seq_param.intra_idr_period); LOG_I( "picture_width_in_mbs = %d\n", h264_seq_param.picture_width_in_mbs); LOG_I( "picture_height_in_mbs = %d\n", @@ -1510,8 +1529,11 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (mix->cur_frame == NULL) { guint ci_idx; - //memcpy (&ci_idx, bufin->data, bufin->size); +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif LOG_I( "surface_num = %d\n", mix->surface_num); @@ -1551,6 +1573,10 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); } + + /** + * Start encoding process + **/ LOG_V( "vaBeginPicture\n"); LOG_I( "va_context = 0x%08x\n",(guint)va_context); @@ -1585,6 +1611,8 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, goto cleanup; } } + + LOG_V( "vaEndPicture\n"); if (mix->encoded_frames == 0) { mix->encoded_frames ++; @@ -1651,6 +1679,8 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, num_seg ++; } + LOG_I ("segment number = %d\n", num_seg); + #if 0 // first 4 bytes is the size of the buffer memcpy (&(iovout->data_size), (void*)buf, 4); @@ -1876,6 +1906,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, mix->coded_buf_index %=2; mix->last_frame = mix->cur_frame; +#ifdef ANDROID if(mix->last_mix_buffer) { LOG_V("calls to mix_buffer_unref \n"); LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); @@ -1884,6 +1915,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_V("ref the current bufin\n"); mix->last_mix_buffer = mix_buffer_ref(bufin); +#endif if (!(parent->need_display)) { mix_videoframe_unref (mix->cur_frame); @@ -2101,10 +2133,12 @@ MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix LOG_V( "Begin\n"); + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - if (mix->encoded_frames == 0) { + if (mix->encoded_frames == 0 || parent->new_header_required) { ret = mix_videofmtenc_h264_send_seq_params (mix); if (ret != MIX_RESULT_SUCCESS) { @@ -2112,6 +2146,8 @@ MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix "Failed mix_videofmtenc_h264_send_seq_params\n"); return MIX_RESULT_FAIL; } + + parent->new_header_required = FALSE; //Set to require new header filed to FALSE } ret = mix_videofmtenc_h264_send_picture_parameter (mix); @@ -2138,3 +2174,97 @@ MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { + + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncH264 * config_params_enc_h264; + + LOG_V( "Begin\n"); + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { + config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_h264_initialize: no h264 config params found\n"); + return MIX_RESULT_FAIL; + } + + /* + * For case params_type == MIX_ENC_PARAMS_SLICE_SIZE + * we don't need to chain up to parent method, as we will handle + * dynamic slice height change inside this method, and other dynamic + * controls will be handled in parent method. + */ + if (params_type == MIX_ENC_PARAMS_SLICE_SIZE) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, + &self->slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + g_mutex_unlock(parent->objectlock); + + } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, + &self->idr_interval); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + parent->new_header_required = TRUE; + + g_mutex_unlock(parent->objectlock); + + } else{ + + /* Chainup parent method. */ + if (parent_class->set_dynamic_config) { + ret = parent_class->set_dynamic_config(mix, config_params_enc, + params_type); + } + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V( + "chainup parent method (set_dynamic_config) failed \n"); + return ret; + } + } + + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; + +} + diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h index 6f470af..a8f813a 100644 --- a/mix_video/src/mixvideoformatenc_h264.h +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -12,6 +12,8 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" +G_BEGIN_DECLS + #define MIX_VIDEO_ENC_H264_SURFACE_NUM 20 #define min(X,Y) (((X) < (Y)) ? (X) : (Y)) @@ -46,12 +48,15 @@ struct _MixVideoFormatEnc_H264 { MixVideoFrame *cur_frame; //current input frame to be encoded; MixVideoFrame *ref_frame; //reference frame MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; + MixVideoFrame *last_frame; //last frame; +#ifdef ANDROID MixBuffer *last_mix_buffer; +#endif guint basic_unit_size; //for rate control guint disable_deblocking_filter_idc; MixDelimiterType delimiter_type; + guint idr_interval; guint slice_num; guint va_rcmode; @@ -129,6 +134,9 @@ MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix); +MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type); /* Local Methods */ @@ -140,4 +148,6 @@ MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix); +G_END_DECLS + #endif /* __MIX_VIDEOFORMATENC_H264_H__ */ diff --git a/mix_video/src/mixvideoformatenc_mpeg4.c b/mix_video/src/mixvideoformatenc_mpeg4.c index 5e95d64..18c0b16 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.c +++ b/mix_video/src/mixvideoformatenc_mpeg4.c @@ -13,6 +13,7 @@ #include "mixvideoformatenc_mpeg4.h" #include "mixvideoconfigparamsenc_mpeg4.h" +#include #undef SHOW_SRC @@ -1552,7 +1553,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, if (coded_seg->next == NULL) break; - coded_seg ++; + coded_seg = coded_seg->next; num_seg ++; } @@ -1588,7 +1589,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, if (coded_seg->next == NULL) break; - coded_seg ++; + coded_seg = coded_seg->next; } iovout->buffer_size = iovout->data_size; diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h index fc83d95..7e8e29b 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.h +++ b/mix_video/src/mixvideoformatenc_mpeg4.h @@ -12,6 +12,8 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" +G_BEGIN_DECLS + #define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20 #define min(X,Y) (((X) < (Y)) ? (X) : (Y)) @@ -139,5 +141,7 @@ MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, M MixIOVec * iovout); MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix); +G_END_DECLS + #endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoformatenc_preview.c b/mix_video/src/mixvideoformatenc_preview.c index 34aa89b..649ad49 100644 --- a/mix_video/src/mixvideoformatenc_preview.c +++ b/mix_video/src/mixvideoformatenc_preview.c @@ -13,6 +13,7 @@ #include "mixvideoformatenc_preview.h" #include "mixvideoconfigparamsenc_preview.h" +#include #undef SHOW_SRC diff --git a/mix_video/src/mixvideoformatenc_preview.h b/mix_video/src/mixvideoformatenc_preview.h index 45ae101..09bc149 100644 --- a/mix_video/src/mixvideoformatenc_preview.h +++ b/mix_video/src/mixvideoformatenc_preview.h @@ -12,6 +12,8 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" +G_BEGIN_DECLS + #define MIX_VIDEO_ENC_PREVIEW_SURFACE_NUM 20 #define min(X,Y) (((X) < (Y)) ? (X) : (Y)) @@ -130,4 +132,6 @@ MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, MixBuffer * bufin, MixIOVec * iovout); +G_END_DECLS + #endif /* __MIX_VIDEOFORMATENC_PREVIEW_H__ */ diff --git a/mix_video/src/mixvideoformatqueue.h b/mix_video/src/mixvideoformatqueue.h index 5594aba..f21edfb 100644 --- a/mix_video/src/mixvideoformatqueue.h +++ b/mix_video/src/mixvideoformatqueue.h @@ -11,6 +11,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixbuffer.h" +G_BEGIN_DECLS + typedef struct _MixInputBufferEntry MixInputBufferEntry; struct _MixInputBufferEntry @@ -21,4 +23,6 @@ struct _MixInputBufferEntry }; +G_END_DECLS + #endif /* __MIX_VIDEOFORMATQUEUE_H__ */ diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c index 4ea0252..2891cf0 100644 --- a/mix_video/src/mixvideoframe.c +++ b/mix_video/src/mixvideoframe.c @@ -492,10 +492,10 @@ MIX_RESULT mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayor MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder) { MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); + MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - *displayorder = priv -> displayorder; - return MIX_RESULT_SUCCESS; + *displayorder = priv -> displayorder; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h index 626e2b1..c3089a9 100644 --- a/mix_video/src/mixvideoframe.h +++ b/mix_video/src/mixvideoframe.h @@ -12,6 +12,8 @@ #include #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOFRAME: * @@ -235,4 +237,6 @@ MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display); MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display); MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); +G_END_DECLS + #endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h index f337417..bb8bd1e 100644 --- a/mix_video/src/mixvideoframe_private.h +++ b/mix_video/src/mixvideoframe_private.h @@ -12,6 +12,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoframe.h" #include "mixsurfacepool.h" +G_BEGIN_DECLS + typedef enum _MixFrameType { TYPE_I, @@ -33,7 +35,7 @@ struct _MixVideoFramePrivate gboolean sync_flag; guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field void *va_display; - guint32 displayorder; + guint32 displayorder; }; /** @@ -86,4 +88,7 @@ mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder); MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder); + +G_END_DECLS + #endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h index e234999..bd83224 100644 --- a/mix_video/src/mixvideoinitparams.h +++ b/mix_video/src/mixvideoinitparams.h @@ -13,6 +13,8 @@ #include "mixdisplay.h" #include "mixvideodef.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOINITPARAMS: * @@ -158,4 +160,6 @@ MIX_RESULT mix_videoinitparams_set_display (MixVideoInitParams * obj, MIX_RESULT mix_videoinitparams_get_display (MixVideoInitParams * obj, MixDisplay ** dislay); +G_END_DECLS + #endif /* __MIX_VIDEOINITPARAMS_H__ */ diff --git a/mix_video/src/mixvideolog.h b/mix_video/src/mixvideolog.h index 89a8827..7bb9ace 100644 --- a/mix_video/src/mixvideolog.h +++ b/mix_video/src/mixvideolog.h @@ -10,6 +10,7 @@ #define __MIX_VIDEO_LOG_H__ #include +G_BEGIN_DECLS #ifdef MIX_LOG_ENABLE #define LOG_V(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) #define LOG_I(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_INFO, format, ##__VA_ARGS__) @@ -22,4 +23,6 @@ #define LOG_E(format, ...) #endif +G_END_DECLS + #endif /* __MIX_VIDEO_LOG_H__ */ diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h index e17136d..b377136 100644 --- a/mix_video/src/mixvideorenderparams.h +++ b/mix_video/src/mixvideorenderparams.h @@ -14,6 +14,8 @@ #include "mixdisplay.h" #include "mixvideoframe.h" +G_BEGIN_DECLS + /** * MIX_TYPE_VIDEORENDERPARAMS: * @@ -253,4 +255,6 @@ MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj, /* TODO: Add getters and setters for other properties */ +G_END_DECLS + #endif /* __MIX_VIDEORENDERPARAMS_H__ */ diff --git a/mix_video/src/mixvideorenderparams_internal.h b/mix_video/src/mixvideorenderparams_internal.h index 8619173..c7d1fe4 100644 --- a/mix_video/src/mixvideorenderparams_internal.h +++ b/mix_video/src/mixvideorenderparams_internal.h @@ -9,6 +9,9 @@ #ifndef __MIX_VIDEORENDERPARAMS_PRIVATE_H__ #define __MIX_VIDEORENDERPARAMS_PRIVATE_H__ +G_BEGIN_DECLS + + typedef struct _MixVideoRenderParamsPrivate MixVideoRenderParamsPrivate; struct _MixVideoRenderParamsPrivate { @@ -33,4 +36,6 @@ MIX_RESULT mix_videorenderparams_get_cliprects_internal( VARectangle ** va_cliprects, guint* number_of_cliprects); +G_END_DECLS + #endif /* __MIX_VIDEORENDERPARAMS_PRIVATE_H__ */ diff --git a/mix_video/test/configure.ac b/mix_video/test/configure.ac index 4e3a279..82235ad 100644 --- a/mix_video/test/configure.ac +++ b/mix_video/test/configure.ac @@ -1,5 +1,5 @@ -AC_INIT([testmixvideo], [0.1], [tao.q.tao@intel.com]) +AC_INIT([testmixvideo],[0.1],[tao.q.tao@intel.com]) dnl AC_CONFIG_MACRO_DIR([m4]) @@ -35,8 +35,8 @@ if test "x$HAVE_MIXVIDEO" = "xno"; then AC_MSG_ERROR(You need mixvideo development packages installed !) fi -AC_ARG_ENABLE(optimization, AC_HELP_STRING([ --disable-optimization], [Do not optimize the library for speed. Might be required for debugging.])) -AC_ARG_ENABLE(debuginfo, AC_HELP_STRING([ --enable-debuginfo ], [add -g to the compiler flags (to create debug information)])) +AC_ARG_ENABLE(optimization, AS_HELP_STRING([ --disable-optimization],[Do not optimize the library for speed. Might be required for debugging.])) +AC_ARG_ENABLE(debuginfo, AS_HELP_STRING([ --enable-debuginfo ],[add -g to the compiler flags (to create debug information)])) if test "$enable_optimization" = "no" ; then DEBUG=true -- cgit v1.2.3 From 74b62593fe676652ee23fa7a834e69fcc24535f7 Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Mon, 18 Oct 2010 17:47:47 -0700 Subject: Multiple video conferencing use case fixes. Fixed IDR interval always falling to default value. Fixes for HSD 3470714 (NAL delimiter API support), HSD 3470713 (Decoder freeze with packet loss), green screen startup issues, video corruption issues, frame manager fix. Change-Id: Id180dce3e2cd827ed3d1f0d79eb7c4651c97a73c --- mix_vbp/ChangeLog | 6 +- mix_vbp/configure.ac | 2 +- mix_vbp/mixvbp.spec | 27 +- .../fw/codecs/h264/parser/h264parse_dpb.c | 7 - .../fw/codecs/h264/parser/viddec_h264_parse.c | 5 - .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 3 - .../fw/codecs/mp4/parser/viddec_mp4_parse.c | 3 - .../fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 3 - .../mp4/parser/viddec_mp4_videoobjectlayer.c | 3 - .../mp4/parser/viddec_mp4_videoobjectplane.c | 3 - .../fw/codecs/mp4/parser/viddec_mp4_visualobject.c | 3 - .../viddec_fw/fw/codecs/mp4/parser/viddec_types.h | 7 - mix_vbp/viddec_fw/fw/parser/main.c | 5 - mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 6 - mix_vbp/viddec_fw/fw/parser/vbp_loader.c | 6 - mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 5 - mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 5 - mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 4 - mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 7 - mix_vbp/viddec_fw/fw/parser/viddec_emit.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_pm.c | 5 - mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c | 4 - mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c | 5 - mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c | 5 - .../viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 5 - mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c | 5 - mix_vbp/viddec_fw/include/viddec_fw_common_defs.h | 4 + mix_video/ChangeLog | 5 + mix_video/configure.ac | 2 +- mix_video/mixvideo.spec | 17 +- mix_video/src/mixframemanager.c | 155 ++++++----- mix_video/src/mixvideoformat_h264.c | 294 ++++++++++++--------- mix_video/src/mixvideoformat_h264.h | 12 +- mix_video/src/mixvideoformatenc_h264.c | 2 + 34 files changed, 302 insertions(+), 332 deletions(-) delete mode 100644 mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h diff --git a/mix_vbp/ChangeLog b/mix_vbp/ChangeLog index 98a1ee8..5efdae9 100644 --- a/mix_vbp/ChangeLog +++ b/mix_vbp/ChangeLog @@ -1,4 +1,8 @@ -2010-09-15 Andy Qiu +2010-09-29 Andy Qiu + * Enhanced H.264 bitstream parsing + * Changed version number to 0.1.18 + +2010-09-15 Tao Tao * Merged changes for Android 2010-09-02 Andy Qiu diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac index 7b9edae..933f6ab 100644 --- a/mix_vbp/configure.ac +++ b/mix_vbp/configure.ac @@ -2,7 +2,7 @@ AC_INIT([""],[""],[linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 17) +UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 18) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec index 77445d3..d3ccd71 100644 --- a/mix_vbp/mixvbp.spec +++ b/mix_vbp/mixvbp.spec @@ -6,7 +6,7 @@ Summary: MIX Video Bitstream Parser Name: mixvbp -Version: 0.1.17 +Version: 0.1.18 Release: 1 Source0: %{name}-%{version}.tar.bz2 NoSource: 0 @@ -29,25 +29,40 @@ The %{name}-devel package contains the header files and static libraries for bui %prep %setup -q + %build ./autogen.sh ./configure --prefix=%{_prefix} make + %install make DESTDIR=$RPM_BUILD_ROOT install rm -f $RPM_BUILD_ROOT/%{_prefix}/lib/libmixvbp_mpeg2* +%post -p /sbin/ldconfig +%postun -p /sbin/ldconfig + %clean rm -rf $RPM_BUILD_ROOT %files %defattr(-,root,root) -%{_prefix}/lib/libmixvbp.so* -%{_prefix}/lib/libmixvbp_vc1.so* -%{_prefix}/lib/libmixvbp_h264.so* -%{_prefix}/lib/libmixvbp_mpeg4.so* +%{_prefix}/lib/libmixvbp.so.* +%{_prefix}/lib/libmixvbp_vc1.so.* +%{_prefix}/lib/libmixvbp_h264.so.* +%{_prefix}/lib/libmixvbp_mpeg4.so.* %files devel %defattr(-,root,root) -%{_prefix}/include +%{_prefix}/include/mixvbp %{_prefix}/lib/*.la %{_prefix}/lib/pkgconfig/mixvbp.pc +%{_prefix}/lib/libmixvbp.so +%{_prefix}/lib/libmixvbp_vc1.so +%{_prefix}/lib/libmixvbp_h264.so +%{_prefix}/lib/libmixvbp_mpeg4.so + + +%changelog +* Mon Sep 13 2010 John Q Public 0.0 +- Dummy changelog to satisfy rpmlint. + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index d174f12..3a111c4 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -1,10 +1,3 @@ - -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - - /*! *********************************************************************** * \file: h264_dpb_ctl.c diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index c30167b..a763d00 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -1,8 +1,3 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include "viddec_fw_debug.h" #include "viddec_parser_ops.h" diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c index 4a53b8b..6b44c7a 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -2,9 +2,6 @@ #include "viddec_parser_ops.h" #include "viddec_fw_mp4.h" #include "viddec_mp4_parse.h" -#ifdef ANDROID -#include "viddec_types.h" -#endif uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser) { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c index 6a4a8ac..d2722a2 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c @@ -6,9 +6,6 @@ #include "viddec_mp4_videoobjectlayer.h" #include "viddec_mp4_videoobjectplane.h" #include "viddec_mp4_visualobject.h" -#ifdef ANDROID -#include "viddec_types.h" -#endif extern uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state); diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index 7603cd7..33cb1d7 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -1,7 +1,4 @@ #include "viddec_mp4_shortheader.h" -#ifdef ANDROID -#include "viddec_types.h" -#endif typedef struct { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index e92a26d..31ac9d1 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -1,7 +1,4 @@ #include "viddec_mp4_videoobjectlayer.h" -#ifdef ANDROID -#include "viddec_types.h" -#endif const unsigned char mp4_DefaultIntraQuantMatrix[64] = { 8, 17, 18, 19, 21, 23, 25, 27, diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c index 5d1ae0f..3fee166 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c @@ -1,7 +1,4 @@ #include "viddec_mp4_videoobjectplane.h" -#ifdef ANDROID -#include "viddec_types.h" -#endif mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c index 35a352d..acfb8fa 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c @@ -1,7 +1,4 @@ #include "viddec_mp4_visualobject.h" -#ifdef ANDROID -#include "viddec_types.h" -#endif static inline uint8_t mp4_pvt_isValid_verID(uint8_t id) { diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h deleted file mode 100644 index 98d4ce8..0000000 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_types.h +++ /dev/null @@ -1,7 +0,0 @@ -#ifndef __VIDDEC_TYPES_H__ -#define __VIDDEC_TYPES_H__ - -#define true 1 -#define false 0 - -#endif //__VIDDEC_TYPES_H__ diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c index 4ba89f7..1bb368a 100644 --- a/mix_vbp/viddec_fw/fw/parser/main.c +++ b/mix_vbp/viddec_fw/fw/parser/main.c @@ -1,8 +1,3 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include "fw_pvt.h" #include "viddec_fw_parser_ipclib_config.h" #include "viddec_fw_common_defs.h" diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 38392e1..e62c411 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -6,12 +6,6 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - - #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c index cfcad5b..27a2dd0 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c @@ -6,12 +6,6 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - - #include #include "vbp_loader.h" diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 00a921c..e266ea6 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -29,11 +29,6 @@ typedef unsigned short uint16; #ifndef uint32 typedef unsigned int uint32; #endif -#ifndef ANDROID -#ifndef bool -typedef int bool; -#endif -#endif typedef void *Handle; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 85b32e0..7a65dbe 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -6,11 +6,6 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index 275f43c..a26a9f1 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -5,10 +5,6 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index d43ada6..af16e8d 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -6,13 +6,6 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ - -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - - #include #include #include diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c index 2bae85b..12ddfe9 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c @@ -1,8 +1,4 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif #include "viddec_emitter.h" #include "viddec_fw_workload.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c index 8671ef3..42cc3e9 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c @@ -1,9 +1,4 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include "viddec_pm.h" #include "viddec_fw_debug.h" #include "viddec_fw_common_defs.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c index 4458834..9d5c132 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c @@ -1,7 +1,3 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif #include "fw_pvt.h" #include "viddec_fw_parser_ipclib_config.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c index 06c6cbd..9d488fe 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c @@ -1,9 +1,4 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include "viddec_pm.h" #include "viddec_fw_debug.h" #include "viddec_parser_ops.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c index df7d502..baa8330 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c @@ -1,9 +1,4 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include "viddec_pm.h" #include "viddec_fw_debug.h" #include "viddec_fw_common_defs.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c index a90242a..cecaac3 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -1,9 +1,4 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include "viddec_pm_utils_bstream.h" #include "viddec_fw_debug.h" diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c index adacf2c..d6f6adf 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c @@ -1,9 +1,4 @@ -#ifdef ANDROID -#define true 1 -#define false 0 -#endif - #include "viddec_pm_utils_list.h" #include "viddec_fw_debug.h" diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h index acca3ce..5cbbab1 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h @@ -211,6 +211,10 @@ enum viddec_fw_mpeg2_error_codes #define false 0 #endif +#ifndef bool +typedef int bool; +#endif + #endif /* end of #ifdef VBP */ diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog index 2f7a06c..654fed0 100644 --- a/mix_video/ChangeLog +++ b/mix_video/ChangeLog @@ -1,3 +1,8 @@ +2010-09-29 Andy Qiu + * Supported FLV playback + * Re-factored frame manager + * Changed version number to 0.1.20 + 2010-09-15 Tao Tao * Merged the changes for Android diff --git a/mix_video/configure.ac b/mix_video/configure.ac index 14f9ac2..ec50fd4 100644 --- a/mix_video/configure.ac +++ b/mix_video/configure.ac @@ -2,7 +2,7 @@ AC_INIT([""],[""],[linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 19) +UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 20) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec index df31162..f8f1947 100644 --- a/mix_video/mixvideo.spec +++ b/mix_video/mixvideo.spec @@ -6,7 +6,7 @@ Summary: MIX Video Name: mixvideo -Version: 0.1.19 +Version: 0.1.20 Release: 1 Source0: %{name}-%{version}.tar.bz2 NoSource: 0 @@ -42,27 +42,40 @@ The %{name}-int-devel package contains the header files and static libraries for %prep %setup -q + %build ./autogen.sh ./configure --prefix=%{_prefix} make + %install make DESTDIR=$RPM_BUILD_ROOT install + %clean rm -rf $RPM_BUILD_ROOT + +%post -p /sbin/ldconfig + +%postun -p /sbin/ldconfig + %files %defattr(-,root,root) -%{_prefix}/lib/libmixvideo.so* +%{_prefix}/lib/libmixvideo.so.* %files devel %defattr(-,root,root) %{_prefix}/include/mix +%{_prefix}/lib/libmixvideo.so %{_prefix}/lib/*.la %{_prefix}/lib/pkgconfig/mixvideo.pc %files int-devel %defattr(-,root,root) +%{_prefix}/lib/libmixvideo.so %{_prefix}/include/mixvideoint %{_prefix}/lib/pkgconfig/mixvideoint.pc +%changelog +* Mon Sep 13 2010 John Q Public 0.0 +- Dummy changelog to satisfy rpmlint. diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c index 0a843e6..6be0ace 100644 --- a/mix_video/src/mixframemanager.c +++ b/mix_video/src/mixframemanager.c @@ -13,7 +13,7 @@ #define INITIAL_FRAME_ARRAY_SIZE 16 -// Assume only one backward reference is used. This will hold up to 2 frames before forcing +// Assume only one backward reference is used. This will hold up to 2 frames before forcing // the earliest frame out of queue. #define MIX_MAX_ENQUEUE_SIZE 2 @@ -101,10 +101,10 @@ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (!MIX_IS_FRAMEMANAGER(fm) || + if (!MIX_IS_FRAMEMANAGER(fm) || mode <= MIX_DISPLAY_ORDER_UNKNOWN || - mode >= MIX_DISPLAY_ORDER_LAST || - framerate_numerator <= 0 || + mode >= MIX_DISPLAY_ORDER_LAST || + framerate_numerator <= 0 || framerate_denominator <= 0) { return MIX_RESULT_INVALID_PARAM; } @@ -259,7 +259,7 @@ MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size) { return MIX_RESULT_FAIL; } - + g_mutex_lock(fm->lock); fm->max_enqueue_size = size; @@ -286,12 +286,12 @@ MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. return MIX_RESULT_INVALID_PARAM; } - + g_mutex_lock(fm->lock); // max_picture_number is exclusie (range from 0 to num - 1). // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the - // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches + // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches // fm->max_picture_number. fm->max_picture_number = num; LOG_V("max picture number is %d\n", num); @@ -322,7 +322,7 @@ MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame); mix_videoframe_unref(frame); LOG_V("one frame is flushed\n"); - }; + }; fm->eos = FALSE; fm->is_first_frame = TRUE; @@ -377,16 +377,16 @@ MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { } #endif - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf); g_mutex_unlock(fm->lock); - + LOG_V("End\n"); return ret; } -void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) +void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) { // this function finds the lowest time stamp in the list and assign it to the dequeued video frame, // if that timestamp is smaller than the timestamp of dequeued video frame. @@ -399,7 +399,7 @@ void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) // nothing to update return; } - + // find video frame with the smallest timestamp, take rollover into account when // comparing timestamp. for (i = 0; i < len; i++) @@ -408,19 +408,19 @@ void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) mix_videoframe_get_timestamp(p, &ts); if (i == 0 || (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) { min_ts = ts; min_p = p; - } + } } mix_videoframe_get_timestamp(mvf, &ts); if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) { // frame to be updated has smaller time stamp - } + } else { // time stamp needs to be monotonically non-decreasing so swap timestamp. @@ -432,7 +432,7 @@ void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) } -MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, num_i_or_p; MixVideoFrame *p, *first_i_or_p; @@ -441,7 +441,7 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF num_i_or_p = 0; first_i_or_p = NULL; - + for (i = 0; i < len; i++) { p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); @@ -455,15 +455,15 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF *mvf = p; LOG_V("B frame is dequeued.\n"); return MIX_RESULT_SUCCESS; - } - + } + if (type != TYPE_I && type != TYPE_P) { - // this should never happen + // this should never happen LOG_E("Frame typs is invalid!!!\n"); fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); mix_videoframe_unref(p); - return MIX_RESULT_FRAME_NOTAVAIL; + return MIX_RESULT_FRAME_NOTAVAIL; } num_i_or_p++; if (first_i_or_p == NULL) @@ -472,7 +472,7 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF } } - // if there are more than one reference frame in the list, the first one is dequeued. + // if there are more than one reference frame in the list, the first one is dequeued. if (num_i_or_p > 1 || fm->eos) { if (first_i_or_p == NULL) @@ -493,40 +493,40 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF else { LOG_V("P frame is dequeued.\n"); - } + } #endif - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } - - return MIX_RESULT_FRAME_NOTAVAIL; + + return MIX_RESULT_FRAME_NOTAVAIL; } -MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, len; MixVideoFrame *p, *p_out_of_dated; guint64 ts, ts_next_pending, ts_out_of_dated; guint64 tolerance = fm->frame_timestamp_delta/4; -retry: +retry: // len may be changed during retry! len = g_slist_length(fm->frame_list); - ts_next_pending = (guint64)-1; + ts_next_pending = (guint64)-1; ts_out_of_dated = 0; p_out_of_dated = NULL; - - + + for (i = 0; i < len; i++) { p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); mix_videoframe_get_timestamp(p, &ts); - if (ts >= fm->last_frame_timestamp && + if (ts >= fm->last_frame_timestamp && ts <= fm->next_frame_timestamp + tolerance) { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); *mvf = p; mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp)); - fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; + fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts); return MIX_RESULT_SUCCESS; } @@ -536,7 +536,7 @@ retry: { ts_next_pending = ts; } - if (ts < fm->last_frame_timestamp && + if (ts < fm->last_frame_timestamp && ts >= ts_out_of_dated) { // video frame that is most recently out-of-dated. @@ -544,10 +544,10 @@ retry: // the "next frame" criteria, and the one with larger timestamp is dequeued first. ts_out_of_dated = ts; p_out_of_dated = p; - } + } } - if (p_out_of_dated && + if (p_out_of_dated && fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated); @@ -556,10 +556,10 @@ retry: ts_out_of_dated, fm->last_frame_timestamp); return MIX_RESULT_FRAME_NOTAVAIL; } - + if (len <= fm->max_enqueue_size && fm->eos == FALSE) { - LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", + LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", fm->next_frame_timestamp, ts_next_pending, len); return MIX_RESULT_FRAME_NOTAVAIL; } @@ -569,13 +569,13 @@ retry: { LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n", fm->next_frame_timestamp, ts_next_pending); - + fm->next_frame_timestamp = ts_next_pending; goto retry; } // time stamp roll-over - LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", + LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", fm->next_frame_timestamp); fm->next_frame_timestamp = 0; @@ -587,19 +587,18 @@ retry: return MIX_RESULT_FAIL; } -MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, len; MixVideoFrame* p; - guint32 picnum, smallest_picnum; + guint32 picnum; guint32 next_picnum_pending; len = g_slist_length(fm->frame_list); -retry: +retry: next_picnum_pending = (guint32)-1; - smallest_picnum = (guint32)-1; - + for (i = 0; i < len; i++) { p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); @@ -608,11 +607,11 @@ retry: { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); mix_framemanager_update_timestamp(fm, p); - *mvf = p; + *mvf = p; LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber); fm->next_frame_picnumber++; //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; + // fm->next_frame_picnumber = 0; return MIX_RESULT_SUCCESS; } @@ -623,42 +622,36 @@ retry: } if (picnum < fm->next_frame_picnumber && - picnum < smallest_picnum) + fm->next_frame_picnumber - picnum < 8) { - smallest_picnum = picnum; + // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" + // to the pic number in the list is less than half of 16, it is safe to assume that pic number + // is reset when a new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). + LOG_V("picture number is reset to %d, next pic number is %d, next pending number is %d.\n", + picnum, fm->next_frame_picnumber, next_picnum_pending); + break; } } - if (smallest_picnum != (guint32)-1 && fm->next_frame_picnumber - smallest_picnum < 8) - { - // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" - // to the smallest pic number in the list is less than half of 16, it is safely to assume that pic number - // is reset when an new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). - LOG_V("next frame number is reset from %d to 0, smallest picnumber in list (size = %d) is %d.\n", - fm->next_frame_picnumber, len, smallest_picnum); - fm->next_frame_picnumber = 0; - goto retry; - } - if (len <= fm->max_enqueue_size && fm->eos == FALSE) { - LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", + LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", fm->next_frame_picnumber, next_picnum_pending, len); return MIX_RESULT_FRAME_NOTAVAIL; } // picture number has gap - if (next_picnum_pending != -1) + if (next_picnum_pending != (guint32)-1) { LOG_V("picture number has gap, jumping from %d to %d.\n", fm->next_frame_picnumber, next_picnum_pending); - + fm->next_frame_picnumber = next_picnum_pending; goto retry; } // picture number roll-over - LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", + LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", fm->next_frame_picnumber); fm->next_frame_picnumber = 0; @@ -700,29 +693,29 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { { LOG_V("No frame is dequeued as queue is empty!\n"); ret = MIX_RESULT_FRAME_NOTAVAIL; - } + } } else if (fm->is_first_frame) { // dequeue the first entry in the list. Not need to update the time stamp as // the list should contain only one frame. -#ifdef MIX_LOG_ENABLE +#ifdef MIX_LOG_ENABLE if (g_slist_length(fm->frame_list) != 1) { - LOG_W("length of list is not equal to 1 for the first frame.\n"); + LOG_W("length of list is not equal to 1 for the first frame.\n"); } -#endif +#endif *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) - { - mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); + { + mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp); } else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) - { + { mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber)); LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber); fm->next_frame_picnumber++; @@ -731,15 +724,15 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { } else { -#ifdef MIX_LOG_ENABLE +#ifdef MIX_LOG_ENABLE MixFrameType type; mix_videoframe_get_frame_type(*mvf, &type); LOG_V("The first frame is dequeud, frame type is %d.\n", type); -#endif +#endif } fm->is_first_frame = FALSE; - - ret = MIX_RESULT_SUCCESS; + + ret = MIX_RESULT_SUCCESS; } else { @@ -758,14 +751,14 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { ret = mix_framemanager_pictype_based_dequeue(fm, mvf); break; - case MIX_DISPLAY_ORDER_FIFO: + case MIX_DISPLAY_ORDER_FIFO: *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_SUCCESS; LOG_V("One frame is dequeued.\n"); break; - - default: + + default: LOG_E("Invalid frame order mode\n"); ret = MIX_RESULT_FAIL; break; @@ -791,7 +784,7 @@ MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { return MIX_RESULT_NOT_INIT; } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); fm->eos = TRUE; LOG_V("EOS is received.\n"); g_mutex_unlock(fm->lock); diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c index 6464177..04d517d 100644 --- a/mix_video/src/mixvideoformat_h264.c +++ b/mix_video/src/mixvideoformat_h264.c @@ -18,8 +18,6 @@ static int mix_video_h264_counter = 0; #endif /* MIX_LOG_ENABLE */ -#define DECODER_ROBUSTNESS - /* The parent class. The pointer will be saved * in this class's initialization. The pointer * can be used for chaining method call if needed. @@ -40,6 +38,9 @@ static void mix_videoformat_h264_init(MixVideoFormat_H264 * self) { /* These are all public because MixVideoFormat objects are completely internal to MixVideo, no need for private members */ self->dpb_surface_table = NULL; +#ifdef DECODER_ROBUSTNESS + self->last_decoded_frame = NULL; +#endif /* NOTE: we don't need to do this here. * This just demostrates how to access @@ -165,7 +166,7 @@ MIX_RESULT mix_videofmt_h264_initialize_va( VAConfigAttrib attrib; MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); if (parent->va_initialized) { @@ -183,11 +184,11 @@ MIX_RESULT mix_videofmt_h264_initialize_va( //Initialize and save the VA config ID //We use high profile for all kinds of H.264 profiles (baseline, main and high) vret = vaCreateConfig( - parent->va_display, - VAProfileH264High, - VAEntrypointVLD, - &attrib, - 1, + parent->va_display, + VAProfileH264High, + VAEntrypointVLD, + &attrib, + 1, &(parent->va_config)); if (vret != VA_STATUS_SUCCESS) @@ -207,7 +208,7 @@ MIX_RESULT mix_videofmt_h264_initialize_va( //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that // will not start decoding until a new frame is received. parent->va_num_surfaces = 1 + 1 + parent->extra_surfaces + (((num_ref_pictures + 3) < - MIX_VIDEO_H264_SURFACE_NUM) ? + MIX_VIDEO_H264_SURFACE_NUM) ? (num_ref_pictures + 3) : MIX_VIDEO_H264_SURFACE_NUM); @@ -219,17 +220,17 @@ MIX_RESULT mix_videofmt_h264_initialize_va( goto cleanup; } - LOG_V( "Codec data says picture size is %d x %d\n", - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + LOG_V( "Codec data says picture size is %d x %d\n", + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height); vret = vaCreateSurfaces( - parent->va_display, - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, + parent->va_display, + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, VA_RT_FORMAT_YUV420, - parent->va_num_surfaces, + parent->va_num_surfaces, parent->va_surfaces); if (vret != VA_STATUS_SUCCESS) @@ -244,8 +245,8 @@ MIX_RESULT mix_videofmt_h264_initialize_va( //Initialize the surface pool ret = mix_surfacepool_initialize( parent->surfacepool, - parent->va_surfaces, - parent->va_num_surfaces, + parent->va_surfaces, + parent->va_num_surfaces, parent->va_display); switch (ret) @@ -265,16 +266,16 @@ MIX_RESULT mix_videofmt_h264_initialize_va( int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); mix_framemanager_set_max_picture_number(parent->framemgr, max); } - + //Initialize and save the VA context ID //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 vret = vaCreateContext( - parent->va_display, + parent->va_display, parent->va_config, - parent->picture_width, + parent->picture_width, parent->picture_height, 0, // no flag set - parent->va_surfaces, + parent->va_surfaces, parent->va_num_surfaces, &(parent->va_context)); @@ -288,7 +289,7 @@ MIX_RESULT mix_videofmt_h264_initialize_va( parent->va_initialized = TRUE; cleanup: - /* nothing to clean up */ + /* nothing to clean up */ return ret; @@ -296,20 +297,20 @@ cleanup: MIX_RESULT mix_videofmt_h264_update_ref_pic_list( - MixVideoFormat *mix, + MixVideoFormat *mix, VAPictureParameterBufferH264* picture_params, VASliceParameterBufferH264* slice_params) { - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + //Do slice parameters - + //First patch up the List0 and List1 surface IDs int j = 0; - guint poc = 0; + guint poc = 0; gpointer video_frame = NULL; MIX_RESULT ret = MIX_RESULT_SUCCESS; - + for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++) { if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) @@ -318,20 +319,20 @@ MIX_RESULT mix_videofmt_h264_update_ref_pic_list( video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); if (video_frame == NULL) { - LOG_E("unable to find surface of picture %d (current picture %d).", + LOG_E("unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); ret = MIX_RESULT_DROPFRAME; //return non-fatal error goto cleanup; } else { - slice_params->RefPicList0[j].picture_id = + slice_params->RefPicList0[j].picture_id = ((MixVideoFrame *)video_frame)->frame_id; } } - + } - + if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6)) { for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++) @@ -342,14 +343,14 @@ MIX_RESULT mix_videofmt_h264_update_ref_pic_list( video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); if (video_frame == NULL) { - LOG_E("unable to find surface of picture %d (current picture %d).", + LOG_E("unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); ret = MIX_RESULT_DROPFRAME; //return non-fatal error goto cleanup; } else - { - slice_params->RefPicList1[j].picture_id = + { + slice_params->RefPicList1[j].picture_id = ((MixVideoFrame *)video_frame)->frame_id; } } @@ -368,7 +369,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( vbp_data_h264 *data, int picture_index, int slice_index) -{ +{ MIX_RESULT ret = MIX_RESULT_SUCCESS; VAStatus vret = VA_STATUS_SUCCESS; VADisplay vadisplay = NULL; @@ -409,7 +410,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( } // for interlace content, top field may be valid only after the second field is parsed - mix_videoframe_set_displayorder(mix->video_frame, pic_params->CurrPic.TopFieldOrderCnt); + mix_videoframe_set_displayorder(mix->video_frame, pic_params->CurrPic.TopFieldOrderCnt); } gulong surface = 0; @@ -417,7 +418,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( LOG_V("mix->video_frame = 0x%x\n", mix->video_frame); //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(mix->video_frame, &surface); + ret = mix_videoframe_get_frame_id(mix->video_frame, &surface); if (ret != MIX_RESULT_SUCCESS) { LOG_E( "Error getting surface ID from frame object\n"); @@ -453,7 +454,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( goto cleanup; } - // vaBeginPicture needs a matching vaEndPicture + // vaBeginPicture needs a matching vaEndPicture mix->end_picture_pending = TRUE; #else @@ -468,7 +469,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( goto cleanup; } - // vaBeginPicture needs a matching vaEndPicture + // vaBeginPicture needs a matching vaEndPicture mix->end_picture_pending = TRUE; LOG_V( "Updating DPB for libva\n"); @@ -496,7 +497,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( //First the picture parameter buffer vret = vaCreateBuffer( - vadisplay, + vadisplay, vacontext, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferH264), @@ -532,7 +533,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( LOG_E( "Video driver returned error from vaCreateBuffer\n"); goto cleanup; } - buffer_id_cnt++; + buffer_id_cnt++; } #ifndef DECODER_ROBUSTNESS @@ -556,7 +557,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( LOG_V( "Creating libva slice parameter buffer\n"); vret = vaCreateBuffer( - vadisplay, + vadisplay, vacontext, VASliceParameterBufferType, sizeof(VASliceParameterBufferH264), @@ -581,11 +582,11 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( // offset to the actual slice data is provided in // slice_data_offset in VASliceParameterBufferH264 - LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", + LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", (guint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size); vret = vaCreateBuffer( - vadisplay, + vadisplay, vacontext, VASliceDataBufferType, slice_data->slice_size, //size @@ -629,13 +630,17 @@ cleanup: MIX_RESULT mix_videofmt_h264_decode_end( - MixVideoFormat *mix, + MixVideoFormat *mix, gboolean drop_picture) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; + MIX_RESULT ret = MIX_RESULT_SUCCESS; VAStatus vret = VA_STATUS_SUCCESS; MixVideoFormat* parent = MIX_VIDEOFORMAT(mix); - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); +#ifdef DECODER_ROBUSTNESS + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); +#else + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); +#endif LOG_V("Begin\n"); @@ -647,7 +652,7 @@ MIX_RESULT mix_videofmt_h264_decode_end( LOG_E("Unexpected: video_frame is not unreferenced.\n"); } goto cleanup; - } + } if (parent->video_frame == NULL) { @@ -655,8 +660,8 @@ MIX_RESULT mix_videofmt_h264_decode_end( LOG_E("Unexpected: video_frame has been unreferenced.\n"); goto cleanup; } - - LOG_V( "Calling vaEndPicture\n"); + + LOG_V( "Calling vaEndPicture\n"); vret = vaEndPicture(parent->va_display, parent->va_context); if (vret != VA_STATUS_SUCCESS) @@ -689,8 +694,15 @@ MIX_RESULT mix_videofmt_h264_decode_end( goto cleanup; } - LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", - parent->current_timestamp); + LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", + parent->video_frame->timestamp); + +#ifdef DECODER_ROBUSTNESS + if (self->last_decoded_frame) + mix_videoframe_unref(self->last_decoded_frame); + self->last_decoded_frame = parent->video_frame; + mix_videoframe_ref(self->last_decoded_frame); +#endif //Enqueue the decoded frame using frame manager ret = mix_framemanager_enqueue(parent->framemgr, parent->video_frame); @@ -709,7 +721,7 @@ MIX_RESULT mix_videofmt_h264_decode_end( cleanup: if (parent->video_frame) { - /* this always indicates an error */ + /* this always indicates an error */ mix_videoframe_unref(parent->video_frame); parent->video_frame = NULL; } @@ -721,7 +733,7 @@ cleanup: MIX_RESULT mix_videofmt_h264_decode_continue( - MixVideoFormat *mix, + MixVideoFormat *mix, vbp_data_h264 *data) { MIX_RESULT ret = MIX_RESULT_SUCCESS; @@ -745,26 +757,26 @@ MIX_RESULT mix_videofmt_h264_decode_continue( ret = MIX_RESULT_FAIL; LOG_E("pic_data->slc_data is NULL.\n"); goto cleanup; - } + } if (pic_data->num_slices == 0) { ret = MIX_RESULT_FAIL; LOG_E("pic_data->num_slices == 0.\n"); goto cleanup; - } + } - LOG_V( "num_slices is %d\n", pic_data->num_slices); + LOG_V( "num_slices is %d\n", pic_data->num_slices); for (j = 0; j < pic_data->num_slices; j++) { - LOG_V( "Decoding slice %d\n", j); + LOG_V( "Decoding slice %d\n", j); ret = mix_videofmt_h264_decode_a_slice(mix, data, i, j); if (ret != MIX_RESULT_SUCCESS) { LOG_E( "mix_videofmt_h264_decode_a_slice failed, error = %#X.", ret); goto cleanup; - } - } + } + } } cleanup: @@ -776,9 +788,9 @@ cleanup: MIX_RESULT mix_videofmt_h264_set_frame_type( - MixVideoFormat *mix, + MixVideoFormat *mix, vbp_data_h264 *data) -{ +{ MIX_RESULT ret = MIX_RESULT_SUCCESS; //Set the picture type (I, B or P frame) @@ -810,7 +822,7 @@ MIX_RESULT mix_videofmt_h264_set_frame_type( //Do not have to check for B frames after a seek //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise // DPB will not be correct and frames may come in with invalid references - // This will be detected when DPB is checked for valid mapped surfaces and + // This will be detected when DPB is checked for valid mapped surfaces and // error returned from there. LOG_V( "frame type is %d\n", frame_type); @@ -828,14 +840,14 @@ MIX_RESULT mix_videofmt_h264_set_frame_type( MIX_RESULT mix_videofmt_h264_set_frame_structure( - MixVideoFormat *mix, + MixVideoFormat *mix, vbp_data_h264 *data) -{ +{ MIX_RESULT ret = MIX_RESULT_SUCCESS; if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) { - mix_videoframe_set_frame_structure(mix->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); + mix_videoframe_set_frame_structure(mix->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); } else { @@ -847,7 +859,7 @@ MIX_RESULT mix_videofmt_h264_set_frame_structure( MIX_RESULT mix_videofmt_h264_decode_begin( - MixVideoFormat *mix, + MixVideoFormat *mix, vbp_data_h264 *data) { MIX_RESULT ret = MIX_RESULT_SUCCESS; @@ -859,7 +871,7 @@ MIX_RESULT mix_videofmt_h264_decode_begin( if (ret != MIX_RESULT_SUCCESS) { LOG_E( "Error getting frame from surfacepool\n"); - return ret; + return ret; } /* the following calls will always succeed */ @@ -874,10 +886,10 @@ MIX_RESULT mix_videofmt_h264_decode_begin( mix_videoframe_set_discontinuity(mix->video_frame, mix->discontinuity_frame_in_progress); //Set the timestamp - mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp); + mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp); // Set displayorder - ret = mix_videoframe_set_displayorder(mix->video_frame, + ret = mix_videoframe_set_displayorder(mix->video_frame, data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt); if(ret != MIX_RESULT_SUCCESS) { @@ -894,10 +906,10 @@ MIX_RESULT mix_videofmt_h264_decode_begin( MIX_RESULT mix_videofmt_h264_decode_a_buffer( - MixVideoFormat *mix, + MixVideoFormat *mix, MixBuffer * bufin, guint64 ts, - gboolean discontinuity) + gboolean discontinuity) { uint32 pret = 0; MixVideoFormat *parent = NULL; @@ -909,8 +921,8 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( parent = MIX_VIDEOFORMAT(mix); LOG_V( "Calling parse for current frame, parse handle %d\n", (int)parent->parser_handle); - pret = vbp_parse(parent->parser_handle, - bufin->data, + pret = vbp_parse(parent->parser_handle, + bufin->data, bufin->size, FALSE); @@ -938,7 +950,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( { ret = MIX_RESULT_SUCCESS; LOG_V("SPS or PPS is not available.\n"); - goto cleanup; + goto cleanup; } if (parent->va_initialized == FALSE) @@ -946,9 +958,9 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( LOG_V("try initializing VA...\n"); ret = mix_videofmt_h264_initialize_va(parent, data); if (ret != MIX_RESULT_SUCCESS) - { + { LOG_V("mix_videofmt_h264_initialize_va failed.\n"); - goto cleanup; + goto cleanup; } } @@ -957,10 +969,10 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( { ret = MIX_RESULT_SUCCESS; LOG_V("slice is not available.\n"); - goto cleanup; + goto cleanup; } - - guint64 last_ts = parent->current_timestamp; + + guint64 last_ts = parent->current_timestamp; parent->current_timestamp = ts; parent->discontinuity_frame_in_progress = discontinuity; @@ -971,18 +983,18 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( // finish decoding the last frame ret = mix_videofmt_h264_decode_end(parent, FALSE); if (ret != MIX_RESULT_SUCCESS) - { + { LOG_V("mix_videofmt_h264_decode_end failed.\n"); - goto cleanup; + goto cleanup; } // start decoding a new frame - ret = mix_videofmt_h264_decode_begin(parent, data); + ret = mix_videofmt_h264_decode_begin(parent, data); if (ret != MIX_RESULT_SUCCESS) - { + { LOG_V("mix_videofmt_h264_decode_begin failed.\n"); - goto cleanup; - } + goto cleanup; + } } else { @@ -990,10 +1002,10 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( LOG_V("partial frame handling...\n"); ret = mix_videofmt_h264_decode_continue(parent, data); if (ret != MIX_RESULT_SUCCESS) - { + { LOG_V("mix_videofmt_h264_decode_continue failed.\n"); - goto cleanup; - } + goto cleanup; + } } cleanup: @@ -1004,7 +1016,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( } -MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, +MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, MixBufferPool * input_buf_pool, @@ -1032,7 +1044,7 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, if (parent_class->initialize) { ret = parent_class->initialize(mix, config_params, - frame_mgr, input_buf_pool, surface_pool, + frame_mgr, input_buf_pool, surface_pool, va_display); } @@ -1064,9 +1076,9 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, //Create our table of Decoded Picture Buffer "in use" surfaces self->dpb_surface_table = g_hash_table_new_full( - NULL, - NULL, - mix_videofmt_h264_destroy_DPB_key, + NULL, + NULL, + mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value); if (self->dpb_surface_table == NULL) @@ -1085,7 +1097,7 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, ret = MIX_RESULT_FAIL; LOG_E( "Cannot get extra surface allocation setting\n"); goto cleanup; - } + } LOG_V( "Before vbp_open\n"); //Load the bitstream parser @@ -1115,7 +1127,7 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, pret = vbp_parse( parent->parser_handle, - header->data, + header->data, header->data_size, TRUE); @@ -1144,8 +1156,13 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, pic_width_in_codec_data = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; pic_height_in_codec_data = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; mix_videoconfigparamsdec_set_picture_res (config_params, pic_width_in_codec_data, pic_height_in_codec_data); - parent->picture_width = pic_width_in_codec_data; - parent->picture_height = pic_height_in_codec_data; + + if (parent->picture_width == 0 || parent->picture_height == 0) + { + // Update picture resolution only if it is not set. The derived picture res from mbs may not be accurate. + parent->picture_width = pic_width_in_codec_data; + parent->picture_height = pic_height_in_codec_data; + } ret = mix_videofmt_h264_initialize_va(mix, data); if (ret != MIX_RESULT_SUCCESS) @@ -1154,7 +1171,6 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, goto cleanup; } - cleanup: if (ret != MIX_RESULT_SUCCESS) { pret = vbp_close(parent->parser_handle); @@ -1182,9 +1198,9 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, } MIX_RESULT mix_videofmt_h264_decode( - MixVideoFormat *mix, + MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, + gint bufincnt, MixVideoDecodeParams * decode_params) { int i = 0; @@ -1239,11 +1255,11 @@ MIX_RESULT mix_videofmt_h264_decode( for (i = 0; i < bufincnt; i++) { - LOG_V( "Decoding a buf %x, size %d\n", (guint)bufin[i]->data, bufin[i]->size); - + LOG_V( "Decoding a buf %x, size %d\n", (guint)bufin[i]->data, bufin[i]->size); + // decode a buffer at a time ret = mix_videofmt_h264_decode_a_buffer( - mix, + mix, bufin[i], ts, discontinuity); @@ -1252,7 +1268,7 @@ MIX_RESULT mix_videofmt_h264_decode( { LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); goto cleanup; - } + } } @@ -1296,7 +1312,7 @@ MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix) { // drop any decode-pending picture, and ignore return value mix_videofmt_h264_decode_end(mix, TRUE); - + //Clear parse_in_progress flag and current timestamp mix->parse_in_progress = FALSE; mix->discontinuity_frame_in_progress = FALSE; @@ -1343,7 +1359,7 @@ MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix) { // finished decoding the pending frame mix_videofmt_h264_decode_end(mix, FALSE); - + g_mutex_unlock(mix->objectlock); //Call Frame Manager with _eos() @@ -1385,9 +1401,9 @@ MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix) { #define HACK_DPB #ifdef HACK_DPB -static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, +static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, vbp_picture_data_h264* pic_data - ) + ) { gboolean found = FALSE; @@ -1402,9 +1418,9 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, for (i = 0; i < 16; i++) { pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE; - pic_params->ReferenceFrames[i].frame_idx = -1; - pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; - pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].frame_idx = -1; + pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags } @@ -1417,7 +1433,7 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, pRefList = pic_data->slc_data[i].slc_parms.RefPicList0; for (list = 0; list < 2; list++) { - for (j = 0; j < 32; j++) + for (j = 0; j < 32; j++) { if (pRefList[j].flags & VA_PICTURE_H264_INVALID) { @@ -1443,20 +1459,41 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, guint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); gpointer video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); +#ifdef DECODER_ROBUSTNESS + if (!video_frame) + { + if (!self->last_decoded_frame) + { + //No saved reference frame, can't recover this one + return MIX_RESULT_DROPFRAME; + } + + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)self->last_decoded_frame)->frame_id; + LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + + } + else + { + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } +#else if (!video_frame) return MIX_RESULT_DROPFRAME; //return non-fatal error - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = ((MixVideoFrame *)video_frame)->frame_id; +#endif LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); - pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = + pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = pRefList[j].flags; - pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = + pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = pRefList[j].frame_idx; - pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = + pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = pRefList[j].TopFieldOrderCnt; - pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = + pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = pRefList[j].BottomFieldOrderCnt; } @@ -1469,9 +1506,9 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, } #endif - -MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, + +MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame ) { @@ -1550,11 +1587,11 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, pic_params->CurrPic.picture_id = current_frame->frame_id; //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); //Increment the reference count for this frame mix_videoframe_ref(current_frame); @@ -1569,7 +1606,7 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmt_h264_cleanup_ref_frame(MixVideoFormat *mix, +MIX_RESULT mix_videofmt_h264_cleanup_ref_frame(MixVideoFormat *mix, VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame ) { @@ -1593,10 +1630,9 @@ MIX_RESULT mix_videofmt_h264_cleanup_ref_frame(MixVideoFormat *mix, if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); - //Decrement the reference count for this frame -// mix_videoframe_unref(current_frame); + //We don't need to decrement the ref count for the video frame here; it's done elsewhere LOG_V( "Removing poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); //Remove this frame from the DPB surface table @@ -1639,7 +1675,7 @@ gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer u vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]); if (vaPic->flags & VA_PICTURE_H264_INVALID) continue; - + if ((guint)key == vaPic->TopFieldOrderCnt || (guint)key == vaPic->BottomFieldOrderCnt) { @@ -1673,7 +1709,7 @@ void mix_videofmt_h264_destroy_DPB_value(gpointer data) } -MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, +MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, guint64 timestamp ) { diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h index b6d062e..84bfc78 100644 --- a/mix_video/src/mixvideoformat_h264.h +++ b/mix_video/src/mixvideoformat_h264.h @@ -12,6 +12,8 @@ #include "mixvideoformat.h" #include "mixvideoframe_private.h" +#define DECODER_ROBUSTNESS + G_BEGIN_DECLS #define MIX_VIDEO_H264_SURFACE_NUM 20 @@ -37,6 +39,10 @@ struct _MixVideoFormat_H264 { /*< private > */ GHashTable *dpb_surface_table; +#ifdef DECODER_ROBUSTNESS + //Can improve which frame is used for this at a later time + MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing +#endif }; /** @@ -90,7 +96,7 @@ MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix); /* H.264 vmethods */ MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg); -MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, +MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, MixBufferPool * input_buf_pool, @@ -110,12 +116,12 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix, - vbp_data_h264 *data, + vbp_data_h264 *data, guint64 timestamp, gboolean discontinuity); -MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, +MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, guint64 timestamp); diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index 336b4d8..32e1bdb 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -1117,6 +1117,8 @@ MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mi slice_height += 15; slice_height &= (~15); + slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height; + #if 1 va_status = vaCreateBuffer (parent->va_display, parent->va_context, VAEncSliceParameterBufferType, -- cgit v1.2.3 From a6dc6205da274b2ce2a5d4c218f2e076e5549063 Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Fri, 22 Oct 2010 16:03:58 -0700 Subject: Fixed memory leak in H.264 encoder. Change-Id: I808dc7b68f1e83679c2cfd2b05caff037e190a5e --- mix_video/src/mixvideoformatenc_h264.c | 2076 ++++++++++++++++---------------- mix_video/src/mixvideoformatenc_h264.h | 3 +- 2 files changed, 1055 insertions(+), 1024 deletions(-) diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index 32e1bdb..db532e4 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -45,6 +45,7 @@ static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { self->cur_frame = NULL; self->ref_frame = NULL; self->rec_frame = NULL; + self->lookup_frame = NULL; #ifdef ANDROID self->last_mix_buffer = NULL; #endif @@ -64,7 +65,7 @@ static void mix_videoformatenc_h264_class_init( GObjectClass *gobject_class = (GObjectClass *) klass; /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = + MixVideoFormatEncClass *video_formatenc_class = MIX_VIDEOFORMATENC_CLASS(klass); /* parent class for later use */ @@ -117,10 +118,10 @@ MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) { LOG_V( "mix_videofmtenc_h264_getcaps\n"); if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; } - + if (parent_class->getcaps) { return parent_class->getcaps(mix, msg); @@ -128,7 +129,7 @@ MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) { return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, +MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, MixVideoConfigParamsEnc * config_params_enc, MixFrameManager * frame_mgr, MixBufferPool * input_buf_pool, @@ -138,624 +139,624 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, MIX_RESULT ret = MIX_RESULT_SUCCESS; MixVideoFormatEnc *parent = NULL; MixVideoConfigParamsEncH264 * config_params_enc_h264; - + VAStatus va_status = VA_STATUS_SUCCESS; VASurfaceID * surfaces = NULL; - + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; gint va_num_profiles, va_num_entrypoints; VAProfile *va_profiles = NULL; VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - + VAConfigAttrib va_attrib[2]; + guint index; + /*frame_mgr and input_buf_pool is reservered for future use*/ - + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); return MIX_RESULT_NULL_PTR; } LOG_V( "begin\n"); - + /* Chainup parent method. */ if (parent_class->initialize) { ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, + frame_mgr, input_buf_pool, surface_pool, va_display); } - + if (ret != MIX_RESULT_SUCCESS) { return ret; } - + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - + parent = MIX_VIDEOFORMATENC(&(mix->parent)); MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { - config_params_enc_h264 = + config_params_enc_h264 = MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); } else { - LOG_V( + LOG_V( "mix_videofmtenc_h264_initialize: no h264 config params found\n"); return MIX_RESULT_FAIL; } - - g_mutex_lock(parent->objectlock); - - LOG_V( + + g_mutex_lock(parent->objectlock); + + LOG_V( "Start to get properities from h.264 params\n"); - + /* get properties from H264 params Object, which is special to H264 format*/ - ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, + ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, &self->basic_unit_size); - + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); goto cleanup; - } - - + } + + ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, &self->disable_deblocking_filter_idc); - + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); goto cleanup; - } - - + } + + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, &self->slice_num); - + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); goto cleanup; - } - + } + ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, &self->delimiter_type); - + if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); + LOG_E ( + "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); goto cleanup; - } + } ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, &self->idr_interval); - + if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + LOG_E ( + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); goto cleanup; - } - - LOG_V( + } + + LOG_V( "======H264 Encode Object properities======:\n"); - - LOG_I( "self->basic_unit_size = %d\n", - self->basic_unit_size); - LOG_I( "self->disable_deblocking_filter_idc = %d\n", - self->disable_deblocking_filter_idc); - LOG_I( "self->slice_num = %d\n", - self->slice_num); - LOG_I ("self->delimiter_type = %d\n", - self->delimiter_type); - LOG_I ("self->idr_interval = %d\n", - self->idr_interval); - - LOG_V( + + LOG_I( "self->basic_unit_size = %d\n", + self->basic_unit_size); + LOG_I( "self->disable_deblocking_filter_idc = %d\n", + self->disable_deblocking_filter_idc); + LOG_I( "self->slice_num = %d\n", + self->slice_num); + LOG_I ("self->delimiter_type = %d\n", + self->delimiter_type); + LOG_I ("self->idr_interval = %d\n", + self->idr_interval); + + LOG_V( "Get properities from params done\n"); - parent->va_display = va_display; - + parent->va_display = va_display; + LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); - - + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + #if 0 /* query the vender information, can ignore*/ va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + /*get the max number for profiles/entrypoints/attribs*/ va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) + va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) { - LOG_E( - "!va_profiles || !va_entrypoints\n"); + LOG_E( + "!va_profiles || !va_entrypoints\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + goto cleanup; } - - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + LOG_V( "vaQueryConfigProfiles\n"); - - + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + LOG_V( "vaQueryConfigProfiles Done\n"); - - - + + + /*check whether profile is supported*/ for(index= 0; index < va_num_profiles; index++) { if(parent->va_profile == va_profiles[index]) break; } - - if(index == va_num_profiles) + + if(index == va_num_profiles) { - LOG_E( "Profile not supported\n"); + LOG_E( "Profile not supported\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + LOG_V( "vaQueryConfigEntrypoints\n"); - - + + /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + for (index = 0; index < va_num_entrypoints; index ++) { if (va_entrypoints[index] == VAEntrypointEncSlice) { break; } } - + if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); + LOG_E( "Entrypoint not found\n"); ret = MIX_RESULT_FAIL; - goto cleanup; - } - + goto cleanup; + } + va_attrib[0].type = VAConfigAttribRTFormat; va_attrib[1].type = VAConfigAttribRateControl; - + LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); + LOG_E( + "Failed to call vaGetConfigAttributes\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); + LOG_E( "Matched format not found\n"); ret = MIX_RESULT_FAIL; - goto cleanup; - } - - + goto cleanup; + } + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); + LOG_E( "RC mode not found\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - + va_attrib[1].value = parent->va_rcmode; + LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaCreateConfig\n"); + LOG_E( "Failed vaCreateConfig\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + /*TODO: compute the surface number*/ int numSurfaces; - + if (parent->share_buf_mode) { numSurfaces = 2; } else { numSurfaces = 8; - parent->ci_frame_num = 0; + parent->ci_frame_num = 0; } - + self->surface_num = numSurfaces + parent->ci_frame_num; - + surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - + if (surfaces == NULL) { - LOG_E( - "Failed allocate surface\n"); + LOG_E( + "Failed allocate surface\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + goto cleanup; } - + LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, + + va_status = vaCreateSurfaces(va_display, parent->picture_width, parent->picture_height, parent->va_format, numSurfaces, surfaces); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed vaCreateSurfaces\n"); + LOG_E( + "Failed vaCreateSurfaces\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - + if (self->ci_shared_surfaces == NULL) { - LOG_E( - "Failed allocate shared surface\n"); - + LOG_E( + "Failed allocate shared surface\n"); + ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + goto cleanup; } - + guint index; for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); ret = MIX_RESULT_FAIL; - goto cleanup; - } + goto cleanup; + } } - - LOG_V( + + LOG_V( "vaCreateSurfaceFromCIFrame Done\n"); - + }// if (parent->share_buf_mode) - + self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - + if (self->surfaces == NULL) { - LOG_E( - "Failed allocate private surface\n"); + LOG_E( + "Failed allocate private surface\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, + goto cleanup; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, because we will get it accoring to CI index*/ for(index = 0; index < parent->ci_frame_num; index++) self->surfaces[index] = self->ci_shared_surfaces[index]; } - + for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + #if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); + images = g_malloc(sizeof(VAImage)*numSurfaces); if (images == NULL) { - g_mutex_unlock(parent->objectlock); + g_mutex_unlock(parent->objectlock); return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. //The image buffer can then be mapped/unmapped for CPU access va_status = vaDeriveImage(va_display, surfaces[index], &images[index]); } -#endif - - LOG_V( "mix_surfacepool_new\n"); - +#endif + + LOG_V( "mix_surfacepool_new\n"); + parent->surfacepool = mix_surfacepool_new(); if (surface_pool) - *surface_pool = parent->surfacepool; + *surface_pool = parent->surfacepool; //which is useful to check before encode if (parent->surfacepool == NULL) { - LOG_E( + LOG_E( "Failed to mix_surfacepool_new\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - - LOG_V( - "mix_surfacepool_initialize\n"); - + + LOG_V( + "mix_surfacepool_initialize\n"); + ret = mix_surfacepool_initialize(parent->surfacepool, self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - + switch (ret) { case MIX_RESULT_SUCCESS: break; case MIX_RESULT_ALREADY_INIT: - LOG_E( "Error init failure\n"); + LOG_E( "Error init failure\n"); ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; + goto cleanup; default: break; } - - + + //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - + LOG_V( "vaCreateContext\n"); + va_status = vaCreateContext(va_display, parent->va_config, parent->picture_width, parent->picture_height, 0, self->surfaces, parent->ci_frame_num + numSurfaces, &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", + + LOG_I( + "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + guint max_size = 0; ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); - goto cleanup; - + LOG_E( + "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); + goto cleanup; + } - + /*Create coded buffer for output*/ va_status = vaCreateBuffer (va_display, parent->va_context, VAEncCodedBufferType, self->coded_buf_size, // 1, NULL, &(self->coded_buf[0])); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + /*Create coded buffer for output*/ va_status = vaCreateBuffer (va_display, parent->va_context, VAEncCodedBufferType, self->coded_buf_size, // 1, NULL, &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - + #ifdef SHOW_SRC Display * display = XOpenDisplay (NULL); - LOG_I( "display = 0x%08x\n", - (guint) display); + LOG_I( "display = 0x%08x\n", + (guint) display); win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, parent->picture_width, parent->picture_height, 0, 0, WhitePixel(display, 0)); XMapWindow(display, win); XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - + XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + cleanup: - - + + if (ret == MIX_RESULT_SUCCESS) { - parent->initialized = TRUE; + parent->initialized = TRUE; } - + /*free profiles and entrypoints*/ - if (va_profiles) + if (va_profiles) g_free(va_profiles); - + if (va_entrypoints) - g_free (va_entrypoints); - - if (surfaces) + g_free (va_entrypoints); + + if (surfaces) g_free (surfaces); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + return ret; } MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params) { - + MIX_RESULT ret = MIX_RESULT_SUCCESS; MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - + + LOG_V( "Begin\n"); + /*currenly only support one input and output buffer*/ if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); } - + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); return MIX_RESULT_NULL_PTR; } - + #if 0 if (parent_class->encode) { return parent_class->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params); } #endif - + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - + return MIX_RESULT_INVALID_PARAM; + parent = MIX_VIDEOFORMATENC(&(mix->parent)); MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix); - - LOG_V( "Locking\n"); + + LOG_V( "Locking\n"); g_mutex_lock(parent->objectlock); - + //TODO: also we could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_h264_process_encode\n"); - - ret = mix_videofmtenc_h264_process_encode (self, + + LOG_V( + "mix_videofmtenc_h264_process_encode\n"); + + ret = mix_videofmtenc_h264_process_encode (self, bufin[0], iovout[0]); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed mix_videofmtenc_h264_process_encode\n"); + LOG_E( + "Failed mix_videofmtenc_h264_process_encode\n"); goto cleanup; } - -cleanup: - - LOG_V( "UnLocking\n"); - + +cleanup: + + LOG_V( "UnLocking\n"); + g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - + + LOG_V( "end\n"); + return ret; } MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { - + //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); + + LOG_V( "Begin\n"); if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + /*not chain to parent flush func*/ #if 0 if (parent_class->flush) { @@ -765,59 +766,60 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - + g_mutex_lock(mix->objectlock); -#if 0 - /*unref the current source surface*/ +#if 0 + /*unref the current source surface*/ if (self->cur_frame != NULL) { mix_videoframe_unref (self->cur_frame); self->cur_frame = NULL; } -#endif - - /*unref the reconstructed surface*/ +#endif + + /*unref the reconstructed surface*/ if (self->rec_frame != NULL) { mix_videoframe_unref (self->rec_frame); self->rec_frame = NULL; } - /*unref the reference surface*/ + /*unref the reference surface*/ if (self->ref_frame != NULL) { mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; + self->ref_frame = NULL; } + #ifdef ANDROID if(self->last_mix_buffer) { mix_buffer_unref(self->last_mix_buffer); self->last_mix_buffer = NULL; } -#endif - /*reset the properities*/ +#endif + /*reset the properities*/ self->encoded_frames = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; - + g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) { - - LOG_V( "\n"); + + LOG_V( "\n"); if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } if (parent_class->eos) { return parent_class->eos(mix); @@ -826,21 +828,21 @@ MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) { } MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { - + MixVideoFormatEnc *parent = NULL; VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - - LOG_V( "Begin\n"); + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + + LOG_V( "Begin\n"); if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; + return MIX_RESULT_INVALID_PARAM; if (parent_class->deinitialize) { ret = parent_class->deinitialize(mix); @@ -849,39 +851,45 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { if (ret != MIX_RESULT_SUCCESS) { return ret; - } + } parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - LOG_V( "Release frames\n"); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + LOG_V( "Release frames\n"); g_mutex_lock(parent->objectlock); #if 0 - /*unref the current source surface*/ + /*unref the current source surface*/ if (self->cur_frame != NULL) { mix_videoframe_unref (self->cur_frame); self->cur_frame = NULL; } -#endif - - /*unref the reconstructed surface*/ +#endif + + /*unref the reconstructed surface*/ if (self->rec_frame != NULL) { mix_videoframe_unref (self->rec_frame); self->rec_frame = NULL; } - /*unref the reference surface*/ + /*unref the reference surface*/ if (self->ref_frame != NULL) { mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } + self->ref_frame = NULL; + } + + if (self->lookup_frame != NULL) + { + mix_videoframe_unref (self->lookup_frame); + self->lookup_frame = NULL; + } - LOG_V( "Release surfaces\n"); + LOG_V( "Release surfaces\n"); if (self->ci_shared_surfaces) { @@ -891,62 +899,62 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { if (self->surfaces) { - g_free (self->surfaces); + g_free (self->surfaces); self->surfaces = NULL; - } + } + + LOG_V( "vaDestroyContext\n"); - LOG_V( "vaDestroyContext\n"); - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed vaDestroyContext\n"); + LOG_E( + "Failed vaDestroyContext\n"); ret = MIX_RESULT_FAIL; goto cleanup; - } + } - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed vaDestroyConfig\n"); + LOG_E( + "Failed vaDestroyConfig\n"); ret = MIX_RESULT_FAIL; goto cleanup; - } + } cleanup: parent->initialized = FALSE; - g_mutex_unlock(parent->objectlock); + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); - LOG_V( "end\n"); - return ret; } MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) { - + VAStatus va_status; VAEncSequenceParameterBufferH264 h264_seq_param; - + MixVideoFormatEnc *parent = NULL; - + if (mix == NULL) { - LOG_E("mix == NULL\n"); + LOG_E("mix == NULL\n"); return MIX_RESULT_NULL_PTR; } - - LOG_V( "Begin\n\n"); - + + LOG_V( "Begin\n\n"); + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + /*set up the sequence params for HW*/ h264_seq_param.level_idc = 30; //TODO, hard code now h264_seq_param.intra_period = parent->intra_period; @@ -954,64 +962,64 @@ MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; h264_seq_param.bits_per_second = parent->bitrate; - h264_seq_param.frame_rate = + h264_seq_param.frame_rate = (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; h264_seq_param.initial_qp = parent->initial_qp; h264_seq_param.min_qp = parent->min_qp; h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage h264_seq_param.intra_period = parent->intra_period; //h264_seq_param.vui_flag = 248; - //h264_seq_param.seq_parameter_set_id = 176; - - LOG_V( - "===h264 sequence params===\n"); - - LOG_I( "seq_parameter_set_id = %d\n", - (guint)h264_seq_param.seq_parameter_set_id); - LOG_I( "level_idc = %d\n", - (guint)h264_seq_param.level_idc); - LOG_I( "intra_period = %d\n", - h264_seq_param.intra_period); - LOG_I( "idr_interval = %d\n", - h264_seq_param.intra_idr_period); - LOG_I( "picture_width_in_mbs = %d\n", - h264_seq_param.picture_width_in_mbs); - LOG_I( "picture_height_in_mbs = %d\n", - h264_seq_param.picture_height_in_mbs); - LOG_I( "bitrate = %d\n", - h264_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - h264_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - h264_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - h264_seq_param.min_qp); - LOG_I( "basic_unit_size = %d\n", - h264_seq_param.basic_unit_size); - LOG_I( "vui_flag = %d\n\n", - h264_seq_param.vui_flag); - + //h264_seq_param.seq_parameter_set_id = 176; + + LOG_V( + "===h264 sequence params===\n"); + + LOG_I( "seq_parameter_set_id = %d\n", + (guint)h264_seq_param.seq_parameter_set_id); + LOG_I( "level_idc = %d\n", + (guint)h264_seq_param.level_idc); + LOG_I( "intra_period = %d\n", + h264_seq_param.intra_period); + LOG_I( "idr_interval = %d\n", + h264_seq_param.intra_idr_period); + LOG_I( "picture_width_in_mbs = %d\n", + h264_seq_param.picture_width_in_mbs); + LOG_I( "picture_height_in_mbs = %d\n", + h264_seq_param.picture_height_in_mbs); + LOG_I( "bitrate = %d\n", + h264_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h264_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h264_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h264_seq_param.min_qp); + LOG_I( "basic_unit_size = %d\n", + h264_seq_param.basic_unit_size); + LOG_I( "vui_flag = %d\n\n", + h264_seq_param.vui_flag); + va_status = vaCreateBuffer(parent->va_display, parent->va_context, VAEncSequenceParameterBufferType, sizeof(h264_seq_param), 1, &h264_seq_param, &mix->seq_param_buf); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateBuffer\n"); + LOG_E( + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, + + va_status = vaRenderPicture(parent->va_display, parent->va_context, &mix->seq_param_buf, 1); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaRenderPicture\n"); + LOG_E( + "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; - } - + } + return MIX_RESULT_SUCCESS; } @@ -1020,254 +1028,266 @@ MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 * VAStatus va_status; VAEncPictureParameterBufferH264 h264_pic_param; MixVideoFormatEnc *parent = NULL; - + if (mix == NULL) { - LOG_E("mix == NULL\n"); + LOG_E("mix == NULL\n"); return MIX_RESULT_NULL_PTR; } - - LOG_V( "Begin\n\n"); + + LOG_V( "Begin\n\n"); if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - + parent = MIX_VIDEOFORMATENC(&(mix->parent)); - + /*set picture params for HW*/ - h264_pic_param.reference_picture = mix->ref_frame->frame_id; + h264_pic_param.reference_picture = mix->ref_frame->frame_id; h264_pic_param.reconstructed_picture = mix->rec_frame->frame_id; h264_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; h264_pic_param.picture_width = parent->picture_width; h264_pic_param.picture_height = parent->picture_height; - h264_pic_param.last_picture = 0; - - - LOG_V( - "======h264 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h264_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - h264_pic_param.reconstructed_picture); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "coded_buf = 0x%08x\n", + h264_pic_param.last_picture = 0; + + + LOG_V( + "======h264 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h264_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h264_pic_param.reconstructed_picture); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); + LOG_I( "coded_buf = 0x%08x\n", h264_pic_param.coded_buf); - LOG_I( "picture_width = %d\n", - h264_pic_param.picture_width); - LOG_I( "picture_height = %d\n\n", - h264_pic_param.picture_height); - + LOG_I( "picture_width = %d\n", + h264_pic_param.picture_width); + LOG_I( "picture_height = %d\n\n", + h264_pic_param.picture_height); + va_status = vaCreateBuffer(parent->va_display, parent->va_context, VAEncPictureParameterBufferType, sizeof(h264_pic_param), 1,&h264_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateBuffer\n"); + LOG_E( + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - - + + va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaRenderPicture\n"); + LOG_E( + "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - + } + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + } MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mix) { VAStatus va_status; - + guint slice_num; guint slice_height; guint slice_index; guint slice_height_in_mb; - + if (mix == NULL) { - LOG_E("mix == NULL\n"); + LOG_E("mix == NULL\n"); return MIX_RESULT_NULL_PTR; } - - LOG_V( "Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + slice_num = mix->slice_num; - slice_height = parent->picture_height / slice_num; - + slice_height = parent->picture_height / slice_num; + slice_height += 15; slice_height &= (~15); slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height; -#if 1 - va_status = vaCreateBuffer (parent->va_display, parent->va_context, +#if 0 + if (!mix->is_intra){ + slice_num = 9; + + slice_height = parent->picture_height / slice_num; + + slice_height += 15; + slice_height &= (~15); + + } +#endif + +#if 1 + va_status = vaCreateBuffer (parent->va_display, parent->va_context, VAEncSliceParameterBufferType, sizeof(VAEncSliceParameterBuffer), slice_num, NULL, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateBuffer\n"); + LOG_E( + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - + VAEncSliceParameterBuffer *slice_param, *current_slice; va_status = vaMapBuffer(parent->va_display, mix->slice_param_buf, - (void **)&slice_param); - - if (va_status != VA_STATUS_SUCCESS) + (void **)&slice_param); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaMapBuffer\n"); + LOG_E( + "Failed to vaMapBuffer\n"); return MIX_RESULT_FAIL; - } - + } + current_slice = slice_param; - + for (slice_index = 0; slice_index < slice_num; slice_index++) { current_slice = slice_param + slice_index; - slice_height_in_mb = - min (slice_height, parent->picture_height + slice_height_in_mb = + min (slice_height, parent->picture_height - slice_index * slice_height) / 16; - + // starting MB row number for this slice - current_slice->start_row_number = slice_index * slice_height / 16; + current_slice->start_row_number = slice_index * slice_height / 16; // slice height measured in MB - current_slice->slice_height = slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = mix->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc + current_slice->slice_height = slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = mix->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc = mix->disable_deblocking_filter_idc; - - LOG_V( - "======h264 slice params======\n"); - - LOG_I( "slice_index = %d\n", - (gint) slice_index); - LOG_I( "start_row_number = %d\n", - (gint) current_slice->start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (gint) current_slice->slice_height); - LOG_I( "slice.is_intra = %d\n", - (gint) current_slice->slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - - } - + + LOG_V( + "======h264 slice params======\n"); + + LOG_I( "slice_index = %d\n", + (gint) slice_index); + LOG_I( "start_row_number = %d\n", + (gint) current_slice->start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (gint) current_slice->slice_height); + LOG_I( "slice.is_intra = %d\n", + (gint) current_slice->slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + } + va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaUnmapBuffer\n"); + LOG_E( + "Failed to vaUnmapBuffer\n"); return MIX_RESULT_FAIL; - } -#endif + } +#endif #if 0 VAEncSliceParameterBuffer slice_param; slice_index = 0; slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; slice_param.slice_flags.bits.disable_deblocking_filter_idc = mix->disable_deblocking_filter_idc; - va_status = vaCreateBuffer (parent->va_display, parent->va_context, + va_status = vaCreateBuffer (parent->va_display, parent->va_context, VAEncSliceParameterBufferType, sizeof(slice_param), slice_num, &slice_param, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateBuffer\n"); + LOG_E( + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; - } + } #endif - + va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaRenderPicture\n"); + LOG_E( + "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; - } - - - LOG_V( "end\n"); - + } + + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin, MixIOVec * iovout) { - + MIX_RESULT ret = MIX_RESULT_SUCCESS; VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; + VADisplay va_display = NULL; VAContextID va_context; gulong surface = 0; guint16 width, height; - + MixVideoFrame * tmp_frame; guint8 *buf; - + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( + LOG_E( "mix == NUL) || bufin == NULL || iovout == NULL\n"); return MIX_RESULT_NULL_PTR; - } + } + + LOG_V( "Begin\n"); - LOG_V( "Begin\n"); - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - + va_display = parent->va_display; va_context = parent->va_context; width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", (guint) parent->ci_frame_id); /* determine the picture type*/ @@ -1275,127 +1295,127 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, mix->is_intra = TRUE; } else { mix->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, * one for reference and one for reconstructed*/ /*TODO, could be refine here*/ if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - + LOG_V( + "We are NOT in share buffer mode\n"); + if (mix->ref_frame == NULL) { ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { - LOG_E( - "Failed to mix_surfacepool_get\n"); + LOG_E( + "Failed to mix_surfacepool_get\n"); goto cleanup; } } - - if (mix->rec_frame == NULL) + + if (mix->rec_frame == NULL) { ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_surfacepool_get\n"); + LOG_E( + "Failed to mix_surfacepool_get\n"); goto cleanup; } } if (parent->need_display) { - mix->cur_frame = NULL; + mix->cur_frame = NULL; } - + if (mix->cur_frame == NULL) { ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_surfacepool_get\n"); + LOG_E( + "Failed to mix_surfacepool_get\n"); goto cleanup; - } + } } - - LOG_V( "Get Surface Done\n"); - + LOG_V( "Get Surface Done\n"); + + VAImage src_image; guint8 *pvbuf; guint8 *dst_y; - guint8 *dst_uv; + guint8 *dst_uv; int i,j; - - LOG_V( - "map source data to surface\n"); - + + LOG_V( + "map source data to surface\n"); + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); goto cleanup; } - - - LOG_I( + + + LOG_I( "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); + + va_status = vaDeriveImage(va_display, surface, &src_image); //need to destroy - - if (va_status != VA_STATUS_SUCCESS) + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaDeriveImage\n"); + LOG_E( + "Failed to vaDeriveImage\n"); ret = MIX_RESULT_FAIL; goto cleanup; } - + VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - + LOG_V( "vaDeriveImage Done\n"); + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed to vaMapBuffer\n"); ret = MIX_RESULT_FAIL; goto cleanup; - } - - LOG_V( - "vaImage information\n"); - LOG_I( + } + + LOG_V( + "vaImage information\n"); + LOG_I( "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + guint8 *inbuf = bufin->data; #ifndef ANDROID @@ -1407,18 +1427,18 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, #ifdef USE_SRC_FMT_YUV420 /*need to convert YUV420 to NV12*/ dst_y = pvbuf +image->offsets[0]; - + for (i = 0; i < height; i ++) { memcpy (dst_y, inbuf + i * width, width); dst_y += image->pitches[0]; } - + dst_uv = pvbuf + image->offsets[1]; - + for (i = 0; i < height / 2; i ++) { for (j = 0; j < width; j+=2) { dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = + dst_uv [j + 1] = inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; } dst_uv += image->pitches[1]; @@ -1454,70 +1474,80 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, #endif #endif //USE_SRC_FMT_YUV420 - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; goto cleanup; - } - + } + va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; goto cleanup; - } - - LOG_V( - "Map source data to surface done\n"); - + } + + LOG_V( + "Map source data to surface done\n"); + } - + else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - + + //MixVideoFrame * frame = mix_videoframe_new(); + if (mix->lookup_frame == NULL) + { + mix->lookup_frame = mix_videoframe_new (); + if (mix->lookup_frame == NULL) + { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + if (mix->ref_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 1); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, mix->lookup_frame); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "get reference surface from pool failed\n"); + LOG_E( + "get reference surface from pool failed\n"); goto cleanup; } } - - if (mix->rec_frame == NULL) + + if (mix->rec_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) + ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); goto cleanup; - } - + } + ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); + (parent->surfacepool, &mix->rec_frame, mix->lookup_frame); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "get recontructed surface from pool failed\n"); + LOG_E( + "get recontructed surface from pool failed\n"); goto cleanup; } } @@ -1525,9 +1555,9 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, //mix_videoframe_unref (mix->cur_frame); if (parent->need_display) { - mix->cur_frame = NULL; + mix->cur_frame = NULL; } - + if (mix->cur_frame == NULL) { guint ci_idx; @@ -1537,146 +1567,146 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); #endif - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; goto cleanup; } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) + + + ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); goto cleanup; - } + } + - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); + (parent->surfacepool, &mix->cur_frame, mix->lookup_frame); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( + LOG_E( "get current working surface from pool failed\n"); goto cleanup; - } + } } - + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - + } /** * Start encoding process **/ - - LOG_V( "vaBeginPicture\n"); + + LOG_V( "vaBeginPicture\n"); LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "surface = 0x%08x\n",(guint)surface); LOG_I( "va_display = 0x%08x\n",(guint)va_display); va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_FAIL; goto cleanup; - } - + } + ret = mix_videofmtenc_h264_send_encode_command (mix); if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); goto cleanup; - } - - + } + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { - + va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; goto cleanup; } } - LOG_V( "vaEndPicture\n"); - + LOG_V( "vaEndPicture\n"); + if (mix->encoded_frames == 0) { mix->encoded_frames ++; mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; + mix->coded_buf_index ++; mix->coded_buf_index %=2; - + mix->last_frame = mix->cur_frame; - - + + /* determine the picture type*/ if ((mix->encoded_frames % parent->intra_period) == 0) { mix->is_intra = TRUE; } else { mix->is_intra = FALSE; - } - + } + tmp_frame = mix->rec_frame; mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - - - } - - - LOG_V( "vaSyncSurface\n"); - + mix->ref_frame = tmp_frame; + + + } + + + LOG_V( "vaSyncSurface\n"); + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaSyncSurface\n"); + LOG_E( "Failed vaSyncSurface\n"); //return MIX_RESULT_FAIL; - } + } - LOG_V( - "Start to get encoded data\n"); + LOG_V( + "Start to get encoded data\n"); /*get encoded data from the VA buffer*/ va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; goto cleanup; - } + } + - VACodedBufferSegment *coded_seg = NULL; int num_seg = 0; guint total_size = 0; - guint size = 0; - + guint size = 0; + coded_seg = (VACodedBufferSegment *)buf; num_seg = 1; - + while (1) { total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - + + if (coded_seg->next == NULL) + break; + coded_seg = coded_seg->next; num_seg ++; } @@ -1685,190 +1715,190 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, #if 0 // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); + memcpy (&(iovout->data_size), (void*)buf, 4); //size = (guint*) buf; guint size = iovout->data_size + 100; -#endif +#endif iovout->data_size = total_size; size = total_size + 100; - + iovout->buffer_size = size; - //We will support two buffer mode, one is application allocates the buffer and passes to encode, + //We will support two buffer mode, one is application allocates the buffer and passes to encode, //the other is encode allocate memory - + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); + LOG_E( "iovout->data == NULL\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + goto cleanup; } } - + coded_seg = (VACodedBufferSegment *)buf; total_size = 0; - + if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) { - + while (1) { - + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + coded_seg = coded_seg->next; - } + } //memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte - //size = iovout->data_size; - + //size = iovout->data_size; + } else { - + guint pos = 0; - guint zero_byte_count = 0; - guint prefix_length = 0; - guint8 nal_unit_type = 0; + guint zero_byte_count = 0; + guint prefix_length = 0; + guint8 nal_unit_type = 0; //guint8 * payload = buf + 16; - guint8 * payload = coded_seg->buf; - - while ((payload[pos++] == 0x00)) { + guint8 * payload = coded_seg->buf; + + while ((payload[pos++] == 0x00)) { zero_byte_count ++; if (pos >= coded_seg->size) //to make sure the buffer to be accessed is valid break; - } + } nal_unit_type = (guint8)(payload[pos] & 0x1f); - prefix_length = zero_byte_count + 1; + prefix_length = zero_byte_count + 1; - LOG_I ("nal_unit_type = %d\n", nal_unit_type); - LOG_I ("zero_byte_count = %d\n", zero_byte_count); + LOG_I ("nal_unit_type = %d\n", nal_unit_type); + LOG_I ("zero_byte_count = %d\n", zero_byte_count); - size = iovout->data_size; + size = iovout->data_size; if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1 && num_seg == 1) { iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; - iovout->data[3] = (size - prefix_length) & 0xff; + iovout->data[3] = (size - prefix_length) & 0xff; // use 4 bytes to indicate the NALU length - //memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); - memcpy (iovout->data + 4, coded_seg->buf + prefix_length, size - prefix_length); - LOG_V ("We only have one start code, copy directly\n"); - } - else { + //memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); + memcpy (iovout->data + 4, coded_seg->buf + prefix_length, size - prefix_length); + LOG_V ("We only have one start code, copy directly\n"); + } + else { - if (num_seg == 1) { + if (num_seg == 1) { ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (coded_seg->buf, coded_seg->size, iovout->data, &size); if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); + LOG_E ( + "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); goto cleanup; } - + } else { - + guint8 * tem_buf = NULL; tem_buf = g_malloc (size); if (tem_buf == NULL) { - LOG_E( "tem_buf == NULL\n"); + LOG_E( "tem_buf == NULL\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - + goto cleanup; + } + while (1) { - + memcpy (tem_buf + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + coded_seg = coded_seg->next; - } - + } + ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); + LOG_E ( + "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); goto cleanup; - } + } g_free (tem_buf); } } } - - LOG_I( - "out size is = %d\n", iovout->data_size); + + LOG_I( + "out size is = %d\n", iovout->data_size); va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_FAIL; goto cleanup; - } + } - LOG_V( "get encoded data done\n"); + LOG_V( "get encoded data done\n"); if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - + va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; goto cleanup; } - } - + } + if (mix->encoded_frames == 1) { va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaBeginPicture\n"); ret = MIX_RESULT_FAIL; - goto cleanup; - } - + goto cleanup; + } + ret = mix_videofmtenc_h264_send_encode_command (mix); if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); goto cleanup; - } - + } + va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaEndPicture\n"); + LOG_E( "Failed vaEndPicture\n"); ret = MIX_RESULT_FAIL; - goto cleanup; - } - - } - + goto cleanup; + } + + } + VASurfaceStatus status; - + /*query the status of current surface*/ va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); + LOG_E( + "Failed vaQuerySurfaceStatus\n"); ret = MIX_RESULT_FAIL; - goto cleanup; - } - mix->pic_skipped = status & VASurfaceSkipped; + goto cleanup; + } + mix->pic_skipped = status & VASurfaceSkipped; if (parent->need_display) { ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); @@ -1876,40 +1906,40 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_E("Failed to set sync_flag\n"); goto cleanup; } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_framemanager_enqueue\n"); + { + LOG_E( + "Failed mix_framemanager_enqueue\n"); goto cleanup; - } + } } - + /*update the reference surface and reconstructed surface */ if (!mix->pic_skipped) { tmp_frame = mix->rec_frame; mix->rec_frame= mix->ref_frame; mix->ref_frame = tmp_frame; - } - + } + #if 0 if (mix->ref_frame != NULL) mix_videoframe_unref (mix->ref_frame); mix->ref_frame = mix->rec_frame; - + mix_videoframe_unref (mix->cur_frame); -#endif - +#endif + mix->encoded_frames ++; mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; + mix->coded_buf_index ++; mix->coded_buf_index %=2; mix->last_frame = mix->cur_frame; #ifdef ANDROID - if(mix->last_mix_buffer) { + if(mix->last_mix_buffer) { LOG_V("calls to mix_buffer_unref \n"); LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); mix_buffer_unref(mix->last_mix_buffer); @@ -1932,9 +1962,9 @@ cleanup: iovout->data = NULL; } } - - LOG_V( "end\n"); - + + LOG_V( "end\n"); + return ret; } @@ -1943,110 +1973,110 @@ MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size ( { MixVideoFormatEnc *parent = NULL; - + if (mix == NULL || max_size == NULL) { - LOG_E( + LOG_E( "mix == NULL || max_size == NULL\n"); return MIX_RESULT_NULL_PTR; } parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - LOG_V( "Begin\n"); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + LOG_V( "Begin\n"); if (MIX_IS_VIDEOFORMATENC_H264(self)) { if (self->coded_buf_size > 0) { *max_size = self->coded_buf_size; LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } - + /*base on the rate control mode to calculate the defaule encoded buffer size*/ if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 400) / (16 * 16); + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 400) / (16 * 16); // set to value according to QP } - else { - self->coded_buf_size = parent->bitrate/ 4; + else { + self->coded_buf_size = parent->bitrate/ 4; } - - self->coded_buf_size = - max (self->coded_buf_size , + + self->coded_buf_size = + max (self->coded_buf_size , (parent->picture_width* parent->picture_height * 400) / (16 * 16)); - + /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - min(self->coded_buf_size, + self->coded_buf_size = + min(self->coded_buf_size, (parent->picture_width * parent->picture_height * 1.5 * 8)); self->coded_buf_size = (self->coded_buf_size + 15) &(~15); } else { - LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; } *max_size = self->coded_buf_size; - - return MIX_RESULT_SUCCESS; + + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( guint8 * bufin, guint bufin_len, guint8* bufout, guint * bufout_len) { - + guint pos = 0; guint last_pos = 0; - - guint zero_byte_count = 0; + + guint zero_byte_count = 0; guint nal_size = 0; - guint prefix_length = 0; - guint size_copied = 0; - guint leading_zero_count = 0; - + guint prefix_length = 0; + guint size_copied = 0; + guint leading_zero_count = 0; + if (bufin == NULL || bufout == NULL || bufout_len == NULL) { - + LOG_E( "bufin == NULL || bufout == NULL || bufout_len = NULL\n"); return MIX_RESULT_NULL_PTR; } - + if (bufin_len <= 0 || *bufout_len <= 0) { LOG_E( "bufin_len <= 0 || *bufout_len <= 0\n"); - return MIX_RESULT_FAIL; + return MIX_RESULT_FAIL; } - - LOG_V ("Begin\n"); - - while ((bufin[pos++] == 0x00)) { + + LOG_V ("Begin\n"); + + while ((bufin[pos++] == 0x00)) { zero_byte_count ++; if (pos >= bufin_len) //to make sure the buffer to be accessed is valid break; - } - + } + if (bufin[pos - 1] != 0x01 || zero_byte_count < 2) { LOG_E("The stream is not AnnexB format \n"); return MIX_RESULT_FAIL; ; //not AnnexB, we won't process it - } - - zero_byte_count = 0; - last_pos = pos; - + } + + zero_byte_count = 0; + last_pos = pos; + while (pos < bufin_len) { - + while (bufin[pos++] == 0) { zero_byte_count ++; if (pos >= bufin_len) //to make sure the buffer to be accessed is valid - break; + break; } - + if (bufin[pos - 1] == 0x01 && zero_byte_count >= 2) { if (zero_byte_count == 2) { prefix_length = 3; @@ -2055,129 +2085,129 @@ MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( prefix_length = 4; leading_zero_count = zero_byte_count - 3; } - - LOG_I("leading_zero_count = %d\n", leading_zero_count); - - nal_size = pos - last_pos - prefix_length - leading_zero_count; + + LOG_I("leading_zero_count = %d\n", leading_zero_count); + + nal_size = pos - last_pos - prefix_length - leading_zero_count; if (nal_size < 0) { - LOG_E ("something wrong in the stream\n"); - return MIX_RESULT_FAIL; //not AnnexB, we won't process it + LOG_E ("something wrong in the stream\n"); + return MIX_RESULT_FAIL; //not AnnexB, we won't process it } - + if (*bufout_len < (size_copied + nal_size + 4)) { - LOG_E ("The length of destination buffer is too small\n"); - return MIX_RESULT_FAIL; + LOG_E ("The length of destination buffer is too small\n"); + return MIX_RESULT_FAIL; } - - LOG_I ("nal_size = %d\n", nal_size); - - /*We use 4 bytes length prefix*/ + + LOG_I ("nal_size = %d\n", nal_size); + + /*We use 4 bytes length prefix*/ bufout [size_copied] = nal_size >> 24 & 0xff; bufout [size_copied + 1] = nal_size >> 16 & 0xff; bufout [size_copied + 2] = nal_size >> 8 & 0xff; - bufout [size_copied + 3] = nal_size & 0xff; - - size_copied += 4; //4 bytes length prefix + bufout [size_copied + 3] = nal_size & 0xff; + + size_copied += 4; //4 bytes length prefix memcpy (bufout + size_copied, bufin + last_pos, nal_size); - size_copied += nal_size; - - LOG_I ("size_copied = %d\n", size_copied); - - zero_byte_count = 0; + size_copied += nal_size; + + LOG_I ("size_copied = %d\n", size_copied); + + zero_byte_count = 0; leading_zero_count = 0; - last_pos = pos; + last_pos = pos; } - + else if (pos == bufin_len) { - - LOG_V ("Last NALU in this frame\n"); - - nal_size = pos - last_pos; - + + LOG_V ("Last NALU in this frame\n"); + + nal_size = pos - last_pos; + if (*bufout_len < (size_copied + nal_size + 4)) { - LOG_E ("The length of destination buffer is too small\n"); - return MIX_RESULT_FAIL; + LOG_E ("The length of destination buffer is too small\n"); + return MIX_RESULT_FAIL; } - - /*We use 4 bytes length prefix*/ + + /*We use 4 bytes length prefix*/ bufout [size_copied] = nal_size >> 24 & 0xff; bufout [size_copied + 1] = nal_size >> 16 & 0xff; bufout [size_copied + 2] = nal_size >> 8 & 0xff; - bufout [size_copied + 3] = nal_size & 0xff; - - size_copied += 4; //4 bytes length prefix + bufout [size_copied + 3] = nal_size & 0xff; + + size_copied += 4; //4 bytes length prefix memcpy (bufout + size_copied, bufin + last_pos, nal_size); - size_copied += nal_size; - - LOG_I ("size_copied = %d\n", size_copied); + size_copied += nal_size; + + LOG_I ("size_copied = %d\n", size_copied); } - + else { zero_byte_count = 0; leading_zero_count = 0; } - + } - + if (size_copied != *bufout_len) { *bufout_len = size_copied; } - - LOG_V ("End\n"); - - return MIX_RESULT_SUCCESS; - + + LOG_V ("End\n"); + + return MIX_RESULT_SUCCESS; + } MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - + return MIX_RESULT_INVALID_PARAM; + if (mix->encoded_frames == 0 || parent->new_header_required) { ret = mix_videofmtenc_h264_send_seq_params (mix); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( + LOG_E( "Failed mix_videofmtenc_h264_send_seq_params\n"); return MIX_RESULT_FAIL; } parent->new_header_required = FALSE; //Set to require new header filed to FALSE } - - ret = mix_videofmtenc_h264_send_picture_parameter (mix); - + + ret = mix_videofmtenc_h264_send_picture_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); return MIX_RESULT_FAIL; } - + ret = mix_videofmtenc_h264_send_slice_parameter (mix); if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); return MIX_RESULT_FAIL; - } - + } - LOG_V( "End\n"); + + LOG_V( "End\n"); return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, +MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, MixEncParamsType params_type) { @@ -2185,23 +2215,23 @@ MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, MixVideoFormatEnc *parent = NULL; MixVideoConfigParamsEncH264 * config_params_enc_h264; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; + return MIX_RESULT_INVALID_PARAM; MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); parent = MIX_VIDEOFORMATENC(&(mix->parent)); if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { - config_params_enc_h264 = + config_params_enc_h264 = MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); } else { - LOG_V( + LOG_V( "mix_videofmtenc_h264_initialize: no h264 config params found\n"); return MIX_RESULT_FAIL; - } + } /* * For case params_type == MIX_ENC_PARAMS_SLICE_SIZE @@ -2211,42 +2241,42 @@ MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, */ if (params_type == MIX_ENC_PARAMS_SLICE_SIZE) { - g_mutex_lock(parent->objectlock); - + g_mutex_lock(parent->objectlock); + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, &self->slice_num); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - g_mutex_unlock(parent->objectlock); + g_mutex_unlock(parent->objectlock); return ret; - } + } + + g_mutex_unlock(parent->objectlock); - g_mutex_unlock(parent->objectlock); - } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { - g_mutex_lock(parent->objectlock); - + g_mutex_lock(parent->objectlock); + ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, &self->idr_interval); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); - g_mutex_unlock(parent->objectlock); + g_mutex_unlock(parent->objectlock); return ret; - } + } parent->new_header_required = TRUE; - g_mutex_unlock(parent->objectlock); - + g_mutex_unlock(parent->objectlock); + } else{ /* Chainup parent method. */ @@ -2257,16 +2287,16 @@ MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, if (ret != MIX_RESULT_SUCCESS) { - LOG_V( + LOG_V( "chainup parent method (set_dynamic_config) failed \n"); return ret; - } + } } - LOG_V( "End\n"); + LOG_V( "End\n"); return MIX_RESULT_SUCCESS; - + } diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h index a8f813a..2e7b12d 100644 --- a/mix_video/src/mixvideoformatenc_h264.h +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -49,7 +49,8 @@ struct _MixVideoFormatEnc_H264 { MixVideoFrame *ref_frame; //reference frame MixVideoFrame *rec_frame; //reconstructed frame; MixVideoFrame *last_frame; //last frame; -#ifdef ANDROID + MixVideoFrame *lookup_frame; +#ifdef ANDROID MixBuffer *last_mix_buffer; #endif -- cgit v1.2.3 From d9236e3bac84748815a588fe060fd1fcc70e8790 Mon Sep 17 00:00:00 2001 From: Tao Tao Date: Mon, 22 Nov 2010 18:25:39 -0800 Subject: Bug fixes and Video conferencing use case feature support: dynamic bitrate control(bitrate, bitrate percentage, window, minQP and initQP), NAL size, slice number setting for I/P frames, dynamic resolution change handling, AIR and decode error types Change-Id: Id4b7b4a4c58c838b4adbf06116bb12897c53cf0b --- mix_vbp/ChangeLog | 11 +- mix_vbp/configure.ac | 2 +- mix_vbp/mixvbp.spec | 2 +- mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 4 + .../fw/codecs/h264/parser/h264parse_sps.c | 6 + mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c | 12 +- .../fw/codecs/vc1/parser/vc1parse_common_defs.h | 7 + mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 134 +- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 77 +- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 1267 ++++++------ mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 21 +- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 78 +- mix_video/ChangeLog | 34 + mix_video/configure.ac | 2 +- mix_video/mixvideo.spec | 2 +- mix_video/src/mixframemanager.c | 138 +- mix_video/src/mixsurfacepool.c | 4 +- mix_video/src/mixvideo.c | 245 ++- mix_video/src/mixvideo.h | 112 +- mix_video/src/mixvideoconfigparamsdec.c | 112 ++ mix_video/src/mixvideoconfigparamsdec.h | 116 ++ mix_video/src/mixvideoconfigparamsenc.c | 348 ++-- mix_video/src/mixvideoconfigparamsenc.h | 261 ++- mix_video/src/mixvideoconfigparamsenc_h264.c | 76 +- mix_video/src/mixvideoconfigparamsenc_h264.h | 103 +- mix_video/src/mixvideodecodeparams.c | 15 + mix_video/src/mixvideodecodeparams.h | 30 +- mix_video/src/mixvideodef.h | 52 +- mix_video/src/mixvideoformat.c | 14 + mix_video/src/mixvideoformat.h | 1 + mix_video/src/mixvideoformat_h264.c | 152 +- mix_video/src/mixvideoformat_h264.h | 8 +- mix_video/src/mixvideoformat_mp42.c | 2063 ++++++++++---------- mix_video/src/mixvideoformat_mp42.h | 7 +- mix_video/src/mixvideoformat_vc1.c | 57 +- mix_video/src/mixvideoformatenc.c | 581 ++++-- mix_video/src/mixvideoformatenc.h | 59 +- mix_video/src/mixvideoformatenc_h264.c | 524 ++++- mix_video/src/mixvideoformatenc_h264.h | 26 +- 39 files changed, 4373 insertions(+), 2390 deletions(-) diff --git a/mix_vbp/ChangeLog b/mix_vbp/ChangeLog index 5efdae9..8b6bad2 100644 --- a/mix_vbp/ChangeLog +++ b/mix_vbp/ChangeLog @@ -1,5 +1,14 @@ +2010-11-10 Andy Qiu + * Parse color matrix, video range and aspect ratio + * Changed version number to 0.1.20 + +2010-10-25 Andy Qiu + * MPEG-4/H.263 partial frame support + * Changed VC1 slice parsing output to contain pic/slice header + * Changed version number to 0.1.19 + 2010-09-29 Andy Qiu - * Enhanced H.264 bitstream parsing + * Enhanced H.264 bitstream parsing * Changed version number to 0.1.18 2010-09-15 Tao Tao diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac index 933f6ab..68e5d09 100644 --- a/mix_vbp/configure.ac +++ b/mix_vbp/configure.ac @@ -2,7 +2,7 @@ AC_INIT([""],[""],[linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 18) +UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 20) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec index d3ccd71..1b6a353 100644 --- a/mix_vbp/mixvbp.spec +++ b/mix_vbp/mixvbp.spec @@ -6,7 +6,7 @@ Summary: MIX Video Bitstream Parser Name: mixvbp -Version: 0.1.18 +Version: 0.1.20 Release: 1 Source0: %{name}-%{version}.tar.bz2 NoSource: 0 diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h index 51f0602..4de5245 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -502,6 +502,10 @@ typedef struct _vui_seq_parameters_t_used uint8_t aspect_ratio_idc; // u(8) uint8_t video_signal_type_present_flag; // u(1) uint8_t video_format; // u(3) +#ifdef VBP + uint8_t video_full_range_flag; // u(1) + uint8_t matrix_coefficients; // u(8) +#endif uint8_t colour_description_present_flag; // u(1) uint8_t colour_primaries; // u(8) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c index 29ef54d..c377e2d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c @@ -166,6 +166,9 @@ h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_ viddec_pm_get_bits(parent, &code, 1); pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code; +#ifdef VBP + SPS->sps_disp.vui_seq_parameters.video_full_range_flag = (uint8_t)code; +#endif viddec_pm_get_bits(parent, &code, 1); SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code; @@ -180,6 +183,9 @@ h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_ viddec_pm_get_bits(parent, &code, 8); pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code; +#ifdef VBP + SPS->sps_disp.vui_seq_parameters.matrix_coefficients = (uint8_t)code; +#endif } } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c index d98b38a..4973b1d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c @@ -150,13 +150,19 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) if(result == 1) { if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1) - { + { result = viddec_pm_get_bits(ctxt, &tempValue, 4); sh.ASPECT_RATIO = tempValue; if (sh.ASPECT_RATIO == 15) { result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16); } +#ifdef VBP + md->ASPECT_RATIO_FLAG = 1; + md->ASPECT_RATIO = sh.ASPECT_RATIO; + md->ASPECT_HORIZ_SIZE = sh.seq_aspect_size.ASPECT_HORIZ_SIZE; + md->ASPECT_VERT_SIZE = sh.seq_aspect_size.ASPECT_VERT_SIZE; +#endif } result = viddec_pm_get_bits(ctxt, &tempValue, 1); @@ -182,6 +188,10 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) { result = viddec_pm_get_bits(ctxt, &sh.color_format, 24); } +#ifdef VBP + md->COLOR_FORMAT_FLAG = sh.COLOR_FORMAT_FLAG; + md->MATRIX_COEF = sh.seq_color_format.MATRIX_COEF; +#endif } // Successful get of display size } // DISPLAY_EXT is 1 diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h index 149e364..ce36849 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h @@ -207,7 +207,14 @@ typedef struct uint16_t heightMB; #ifdef VBP + uint8_t COLOR_FORMAT_FLAG; + uint8_t MATRIX_COEF; uint8_t SYNCMARKER; + uint8_t ASPECT_RATIO_FLAG; + uint8_t ASPECT_RATIO; + uint8_t ASPECT_HORIZ_SIZE; + uint8_t ASPECT_VERT_SIZE; + #endif } vc1_metadata_t; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index e62c411..27436b9 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -102,6 +102,31 @@ unsigned char* UseDefaultList[8] = Default_8x8_Inter }; +static uint8 h264_aspect_ratio_table[][2] = +{ + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + {24, 11}, + {20, 11}, + {32, 11}, + {80, 33}, + {18, 11}, + {15, 11}, + {64, 33}, + {160, 99}, + {4, 3}, + {3, 2}, + {2, 1}, + // reserved + {0, 0} +}; + + + /** * */ @@ -468,7 +493,7 @@ static inline void vbp_set_reference_frames_h264( /* set short term reference frames */ for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) { - if (frame_idx >= 16) + if (frame_idx >= 16 || buffer_idx >= 16) { WTRACE("Frame index is out of bound."); break; @@ -508,7 +533,7 @@ static inline void vbp_set_reference_frames_h264( /* set long term reference frames */ for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) { - if (frame_idx >= 16) + if (frame_idx >= 16 || buffer_idx >= 16) { WTRACE("Frame index is out of bound."); break; @@ -792,41 +817,67 @@ static void vbp_set_codec_data_h264( codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; - /* frame cropping */ - codec_data->frame_cropping_flag = - parser->info.active_SPS.sps_disp.frame_cropping_flag; - - codec_data->frame_crop_rect_left_offset = - parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; - - codec_data->frame_crop_rect_right_offset = - parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; - - codec_data->frame_crop_rect_top_offset = - parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; - - codec_data->frame_crop_rect_bottom_offset = - parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; - - /* aspect ratio */ - codec_data->aspect_ratio_info_present_flag = - parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag; - - codec_data->aspect_ratio_idc = - parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; - codec_data->sar_width = - parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; - - codec_data->sar_height = - parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + /* aspect ratio */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + codec_data->aspect_ratio_idc = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; + + if (codec_data->aspect_ratio_idc < 17) + { + codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0]; + codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1]; + } + else if (codec_data->aspect_ratio_idc == 255) + { + codec_data->sar_width = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; + + codec_data->sar_height = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + } + else + { + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + } + else + { + // unspecified + codec_data->aspect_ratio_idc = 0; + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } - /* video format */ - codec_data->video_format = - parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + /* video format */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + codec_data->video_format = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + } + else + { + // Unspecified video format + codec_data->video_format = 5; + } - codec_data->video_format = - parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag; + codec_data->video_full_range_flag = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; + + + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + codec_data->matrix_coefficients = + parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; + } + else + { + // Unspecified + codec_data->matrix_coefficients = 2; + } + /* picture order type and count */ codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; @@ -1481,6 +1532,16 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) } query_data->num_pictures = 0; + if (query_data->new_sps && !query_data->has_pps) + { + // we are waiting for a new pps, so should net reset new_sps flag + } + else + { + query_data->new_sps = 0; + } + query_data->new_pps = 0; + cxt->list.num_items = 0; /* reset start position of first item to 0 in case there is only one item */ @@ -1664,12 +1725,17 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_SPS: + if (query_data->has_sps) + query_data->new_sps = 1; query_data->has_sps = 1; query_data->has_pps = 0; ITRACE("SPS header is parsed."); break; case h264_NAL_UNIT_TYPE_PPS: + if (query_data->has_pps || query_data->new_sps) + query_data->new_pps = 1; + query_data->has_pps = 1; ITRACE("PPS header is parsed."); break; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index e266ea6..60a30b1 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -39,32 +39,57 @@ typedef void *Handle; typedef struct _vbp_codec_data_mp42 { uint8 profile_and_level_indication; + uint32 video_object_layer_width; + uint32 video_object_layer_height; + + // 0 for unspecified, PAL/NTSC/SECAM + uint8 video_format; + + // 0 short range, 1 full range + uint8 video_range; + + // default 2 (unspecified), 1 for BT709. + uint8 matrix_coefficients; + + uint8 short_video_header; + + // always exist for mpeg-4, + uint8 aspect_ratio_info; + uint8 par_width; + uint8 par_height; + } vbp_codec_data_mp42; typedef struct _vbp_slice_data_mp42 -{ +{ uint8* buffer_addr; uint32 slice_offset; uint32 slice_size; VASliceParameterBufferMPEG4 slice_param; } vbp_slice_data_mp42; -typedef struct _vbp_picture_data_mp42 +typedef struct _vbp_picture_data_mp42 vbp_picture_data_mp42; + +struct _vbp_picture_data_mp42 { uint8 vop_coded; + uint16 vop_time_increment; + /* indicates if current buffer contains parameter for the first slice of the picture */ + uint8 new_picture_flag; VAPictureParameterBufferMPEG4 picture_param; - VAIQMatrixBufferMPEG4 iq_matrix_buffer; - - uint32 number_slices; - vbp_slice_data_mp42 *slice_data; + vbp_slice_data_mp42 slice_data; -} vbp_picture_data_mp42; + vbp_picture_data_mp42* next_picture_data; +}; typedef struct _vbp_data_mp42 { vbp_codec_data_mp42 codec_data; + VAIQMatrixBufferMPEG4 iq_matrix_buffer; + uint32 number_picture_data; uint32 number_pictures; + vbp_picture_data_mp42 *picture_data; } vbp_data_mp42; @@ -91,25 +116,26 @@ typedef struct _vbp_codec_data_h264 int frame_width; int frame_height; - uint8 frame_cropping_flag; - int frame_crop_rect_left_offset; - int frame_crop_rect_right_offset; - int frame_crop_rect_top_offset; - int frame_crop_rect_bottom_offset; - uint8 vui_parameters_present_flag; + /* aspect ratio */ - uint8 aspect_ratio_info_present_flag; uint8 aspect_ratio_idc; uint16 sar_width; uint16 sar_height; /* video fromat */ - uint8 video_signal_type_present_flag; + + // default 5 unspecified uint8 video_format; + uint8 video_full_range_flag; + + // default 2 unspecified + uint8 matrix_coefficients; uint8 pic_order_cnt_type; int log2_max_pic_order_cnt_lsb_minus4; + + int bit_rate; } vbp_codec_data_h264; @@ -150,6 +176,10 @@ typedef struct _vbp_data_h264 /* if PPS has been received */ uint8 has_pps; + uint8 new_sps; + + uint8 new_pps; + vbp_picture_data_h264* pic_data; /** @@ -177,6 +207,9 @@ typedef struct _vbp_codec_data_vc1 uint8 FINTERPFLAG; uint8 PSF; + // default 2: unspecified + uint8 MATRIX_COEF; + /* Entry point layer. */ uint8 BROKEN_LINK; uint8 CLOSED_ENTRY; @@ -210,6 +243,15 @@ typedef struct _vbp_codec_data_vc1 uint8 INTCOMPFIELD; uint8 LUMSCALE2; uint8 LUMSHIFT2; + + // aspect ratio + + // default unspecified + uint8 ASPECT_RATIO; + + uint8 ASPECT_HORIZ_SIZE; + uint8 ASPECT_VERT_SIZE; + } vbp_codec_data_vc1; typedef struct _vbp_slice_data_vc1 @@ -255,15 +297,12 @@ enum _vbp_parser_error VBP_OK, VBP_TYPE, VBP_LOAD, - VBP_UNLOAD, VBP_INIT, VBP_DATA, VBP_DONE, - VBP_GLIB, VBP_MEM, VBP_PARM, - VBP_CXT, - VBP_IMPL + VBP_PARTIAL }; enum _vbp_parser_type diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 7a65dbe..1acfd9b 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -16,31 +16,55 @@ #include "vbp_mp42_parser.h" #include "../codecs/mp4/parser/viddec_mp4_parse.h" -#define MIX_VBP_COMP "mixvbp" + + +static bool short_video_header = TRUE; + +static uint8 mp4_aspect_ratio_table[][2] = +{ + // forbidden + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + + // reserved + {0, 0} +}; + /* * Some divX avi files contains 2 frames in one gstbuffer. */ -#define MAX_NUM_PICTURES_MP42 8 -uint32 vbp_get_sc_pos_mp42(uint8 *buf, uint32 length, - uint32* sc_phase, uint32 *sc_end_pos, uint8 *is_normal_sc); + +uint32 vbp_get_sc_pos_mp42( + uint8 *buf, + uint32 length, + uint32 *sc_end_pos, + uint8 *is_normal_sc, + uint8* resync_marker); void vbp_on_vop_mp42(vbp_context *pcontext, int list_index); void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index); -void vbp_dump_query_data(vbp_context *pcontext, int list_index); - +void vbp_fill_codec_data(vbp_context *pcontext); +vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data); uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index); uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index); +uint32 vbp_process_video_packet_mp42(vbp_context *pcontext); + +static inline uint32 vbp_sprite_trajectory_mp42( + void *parent, + mp4_VideoObjectLayer_t *vidObjLay, + mp4_VideoObjectPlane_t *vidObjPlane); -/* This is coppied from DHG mp42 parser */ -static inline mp4_Status_t -vbp_sprite_trajectory_mp42(void *parent, mp4_VideoObjectLayer_t *vidObjLay, - mp4_VideoObjectPlane_t *vidObjPlane); -/* This is coppied from DHG mp42 parser */ -static inline int32_t vbp_sprite_dmv_length_mp42(void * parent, - int32_t *dmv_length); +static inline uint32 vbp_sprite_dmv_length_mp42( + void * parent, + int32_t *dmv_length); + /** * @@ -49,7 +73,7 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) { if (NULL == pcontext->parser_ops) { - /* absolutely impossible, just sanity check */ + // absolutely impossible, just sanity check return VBP_PARM; } pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init"); @@ -96,11 +120,9 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) */ uint32 vbp_parse_init_data_mp42(vbp_context *pcontext) { - VTRACE ("begin\n"); - vbp_parse_start_code_mp42(pcontext); - VTRACE ("end\n"); - - return VBP_OK; + uint32 ret = VBP_OK; + ret = vbp_parse_start_code_mp42(pcontext); + return ret; } uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) @@ -113,78 +135,101 @@ uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) uint32 current_sc = parser->current_sc; is_svh = parser->cur_sc_prefix ? false : true; - VTRACE ("begin\n"); - - VTRACE ("current_sc = 0x%x profile_and_level_indication = 0x%x\n", - parser->current_sc, parser->info.profile_and_level_indication); - if (!is_svh) { - /* remove prefix from current_sc */ + // remove prefix from current_sc current_sc &= 0x0FF; switch (current_sc) { case MP4_SC_VISUAL_OBJECT_SEQUENCE: - VTRACE ("MP4_SC_VISUAL_OBJECT_SEQUENCE\n"); - + VTRACE ("Visual Object Sequence is parsed.\n"); query_data->codec_data.profile_and_level_indication = parser->info.profile_and_level_indication; - + VTRACE ("profile_and_level_indication = 0x%x\n", parser->info.profile_and_level_indication); break; + case MP4_SC_VIDEO_OBJECT_PLANE: - VTRACE ("MP4_SC_VIDEO_OBJECT_PLANE\n"); + VTRACE ("Video Object Plane is parsed.\n"); vbp_on_vop_mp42(pcontext, list_index); break; - default: { - if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (current_sc - <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) { - query_data->codec_data.profile_and_level_indication - = parser->info.profile_and_level_indication; - } else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX) { - if (parser->sc_seen == MP4_SC_SEEN_SVH) { - VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n"); + + default: + if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && + (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) + { + VTRACE ("Video Object Layer is parsed\n"); + short_video_header = FALSE; + vbp_fill_codec_data(pcontext); + } + else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX && + current_sc >= MP4_SC_VIDEO_OBJECT_MIN) + { + if (parser->sc_seen == MP4_SC_SEEN_SVH) + { + // this should never happen!!!! + WTRACE ("Short video header is parsed.\n"); vbp_on_vop_svh_mp42(pcontext, list_index); } } - } break; } - - } else { - if (parser->sc_seen == MP4_SC_SEEN_SVH) { - VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n"); + } + else + { + if (parser->sc_seen == MP4_SC_SEEN_SVH) + { + VTRACE ("Short video header is parsed.\n"); vbp_on_vop_svh_mp42(pcontext, list_index); } } - VTRACE ("End\n"); - return VBP_OK; } -/* - * This function fills viddec_pm_cxt_t by start codes - * I may change the codes to make it more efficient later - */ + +/* +* partial frame handling: +* +* h.263: picture header is lost if the first GOB is discarded, a redudant pic header must be +* conveyed in the packet (RFC 4629) for each following GOB, otherwise, +* picture can't be decoded. +* +* MPEG4: VideoObjectPlane header is lost if the first slice is discarded. However, picture +* is still decodable as long as the header_extension_code is 1 in video_packet_header. +* +*MPEG-4 with short header: video_plane_with_short_header is lost if the first GOB +* is discarded. As this header is not duplicated (RFC 3016), picture is not decodable. +* +* In sum: +* If buffer contains the 32-bit start code (0x000001xx), proceed as normal. +* +* If buffer contains 22-bits of "0000 0000 0000 0000 1000 00", which indicates h.263 +* picture start code or short_video_start_marker, proceed as normal. +* +* If buffer contains 22-bits of "0000 0000 0000 0000 1XXX XX", (when XXX XX starts from 000 01), which +* indicates h.263 Group Start code or gob_resync_marker of gob_layer in MPEG-4 with +* short header, we should report packet as a partial frame - no more parsing is needed. +* +* If buffer contains a string of 0 between 16 bits and 22 bits, followed by 1-bit of '1', which indicates a resync-marker, +* the buffer will be immeidately parsed and num_items is set to 0. +*/ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - /*viddec_parser_ops_t *func = pcontext->parser_ops; */ uint8 *buf = NULL; uint32 size = 0; - uint32 sc_phase = 0; uint32 sc_end_pos = -1; - uint32 bytes_parsed = 0; - viddec_mp4_parser_t *pinfo = NULL; - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - /* reset query data for the new sample buffer */ + + + // reset query data for the new sample buffer + query_data->number_picture_data= 0; query_data->number_pictures = 0; - /* emulation prevention byte is always present */ + // emulation prevention byte is always present cxt->getbits.is_emul_reqd = 1; cxt->list.num_items = 0; @@ -197,25 +242,21 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]); uint8 is_normal_sc = 0; - + uint8 resync_marker = 0; uint32 found_sc = 0; + uint32 ret = VBP_OK; - VTRACE ("begin cxt->parse_cubby.size= %d\n", size); - - while (1) { - - sc_phase = 0; - - found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size - - bytes_parsed, &sc_phase, &sc_end_pos, &is_normal_sc); - - if (found_sc) { - - VTRACE ("sc_end_pos = %d\n", sc_end_pos); + while (1) + { + found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, + &sc_end_pos, &is_normal_sc, &resync_marker); + if (found_sc) + { cxt->list.data[cxt->list.num_items].stpos = bytes_parsed + sc_end_pos - 3; - if (cxt->list.num_items != 0) { + if (cxt->list.num_items != 0) + { cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed + sc_end_pos - 3; } @@ -223,27 +264,53 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) cxt->list.num_items++; pinfo->cur_sc_prefix = is_normal_sc; - - } else { - - if (cxt->list.num_items != 0) { + } + else + { + if (cxt->list.num_items != 0) + { cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; break; - } else { - - VTRACE ("I didn't find any sc in cubby buffer! The size of cubby is %d\n", - size); - + } + else + { + WTRACE ("No start-code is found in cubby buffer! The size of cubby is %d\n", size); cxt->list.num_items = 1; cxt->list.data[0].stpos = 0; cxt->list.data[0].edpos = cxt->parse_cubby.size; + + if (resync_marker) + { + // either the first slice (GOB) is lost or parser receives a single slice (GOB) + if (short_video_header) + { + // TODO: revisit if HW supportd GOB layer decoding for h.263 + WTRACE("Partial frame: GOB buffer.\n"); + ret = VBP_PARTIAL; + } + else + { + WTRACE("Partial frame: video packet header buffer.\n"); + ret = vbp_process_video_packet_mp42(pcontext); + } + + // set num_items to 0 so buffer will not be parsed again + cxt->list.num_items = 0; + } + else + { + ETRACE("Invalid data received.\n"); + cxt->list.num_items = 0; + return VBP_DATA; + } + break; } } } - return VBP_OK; + return ret; } uint32 vbp_populate_query_data_mp42(vbp_context *pcontext) @@ -254,33 +321,103 @@ uint32 vbp_populate_query_data_mp42(vbp_context *pcontext) return VBP_OK; } -void vbp_fill_codec_data(vbp_context *pcontext, int list_index) +vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data) { + vbp_picture_data_mp42 *picture_data = query_data->picture_data; + int num_pictures = query_data->number_picture_data; + while (num_pictures > 1) + { + picture_data = picture_data->next_picture_data; + num_pictures--; + } + + return picture_data; +} - /* fill vbp_codec_data_mp42 data */ +void vbp_fill_codec_data(vbp_context *pcontext) +{ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - query_data->codec_data.profile_and_level_indication + vbp_codec_data_mp42* codec_data = &(query_data->codec_data); + + codec_data->profile_and_level_indication = parser->info.profile_and_level_indication; + + codec_data->video_object_layer_width = + parser->info.VisualObject.VideoObject.video_object_layer_width; + + codec_data->video_object_layer_height = + parser->info.VisualObject.VideoObject.video_object_layer_height; + + if (parser->info.VisualObject.VideoSignalType.is_video_signal_type) + { + codec_data->video_format = + parser->info.VisualObject.VideoSignalType.video_format; + } + else + { + // Unspecified video format + codec_data->video_format = 5; + } + + codec_data->video_range = + parser->info.VisualObject.VideoSignalType.video_range; + + if (parser->info.VisualObject.VideoSignalType.is_colour_description) + { + codec_data->matrix_coefficients = + parser->info.VisualObject.VideoSignalType.matrix_coefficients; + } + else if (short_video_header) + { + // SMPTE 170M + codec_data->matrix_coefficients = 6; + } + else + { + // ITU-R Recommendation BT.709 + codec_data->matrix_coefficients = 1; + } + + codec_data->short_video_header = short_video_header; + + // aspect ratio + codec_data->aspect_ratio_info = parser->info.VisualObject.VideoObject.aspect_ratio_info; + if (codec_data->aspect_ratio_info < 6) + { + codec_data->par_width = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][0]; + codec_data->par_height = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][1]; + } + else if (codec_data->aspect_ratio_info == 15) + { + codec_data->par_width = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_width; + codec_data->par_height = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_height; + } + else + { + codec_data->par_width = 0; + codec_data->par_height = 0; + } } void vbp_fill_slice_data(vbp_context *pcontext, int list_index) { - viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); - if (!parser->info.VisualObject.VideoObject.short_video_header) { + if (!parser->info.VisualObject.VideoObject.short_video_header) + { vbp_process_slices_mp42(pcontext, list_index); - } else { + } + else + { vbp_process_slices_svh_mp42(pcontext, list_index); } } -void vbp_fill_picture_param(vbp_context *pcontext, int list_index) +void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) { - viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; @@ -288,21 +425,61 @@ void vbp_fill_picture_param(vbp_context *pcontext, int list_index) vbp_picture_data_mp42 *picture_data = NULL; VAPictureParameterBufferMPEG4 *picture_param = NULL; - picture_data = &(query_data->picture_data[query_data->number_pictures]); - + if (new_picture_flag) + { + query_data->number_pictures++; + } + + picture_data = query_data->picture_data; + if (picture_data == NULL || query_data->number_picture_data == 0) + { + // first entry + if (picture_data == NULL) + { + picture_data = (vbp_picture_data_mp42*)g_try_new0(vbp_picture_data_mp42, 1); + query_data->picture_data = picture_data; + } + query_data->number_picture_data = 1; + } + else + { + // find the last active one + int i = query_data->number_picture_data; + while (i > 1) + { + picture_data = picture_data->next_picture_data; + i--; + } + if (picture_data->next_picture_data == NULL) + { + picture_data->next_picture_data = g_try_new0(vbp_picture_data_mp42, 1); + } + + query_data->number_picture_data++; + + picture_data = picture_data->next_picture_data; + } + picture_param = &(picture_data->picture_param); uint8 idx = 0; + picture_data->new_picture_flag = new_picture_flag; + picture_data->vop_coded = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded; + VTRACE ("vop_coded = %d\n", picture_data->vop_coded); - /* - * fill picture_param - */ - /* NOTE: for short video header, the parser saves vop_width and vop_height + picture_data->vop_time_increment = + parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment; + + // fill picture_param + + + /* + * NOTE: for short video header, the parser saves vop_width and vop_height * to VOL->video_object_layer_width and VOL->video_object_layer_height */ picture_param->vop_width @@ -313,16 +490,15 @@ void vbp_fill_picture_param(vbp_context *pcontext, int list_index) picture_param->forward_reference_picture = VA_INVALID_SURFACE; picture_param->backward_reference_picture = VA_INVALID_SURFACE; - /* - * VAPictureParameterBufferMPEG4::vol_fields - */ + // Fill VAPictureParameterBufferMPEG4::vol_fields + picture_param->vol_fields.bits.short_video_header = parser->info.VisualObject.VideoObject.short_video_header; picture_param->vol_fields.bits.chroma_format = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format; /* TODO: find out why testsuite always set this value to be 0 */ - // picture_param->vol_fields.bits.chroma_format = 0; + picture_param->vol_fields.bits.chroma_format = 0; picture_param->vol_fields.bits.interlaced = parser->info.VisualObject.VideoObject.interlaced; @@ -345,8 +521,9 @@ void vbp_fill_picture_param(vbp_context *pcontext, int list_index) picture_param->no_of_sprite_warping_points = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points; - - for (idx = 0; idx < 3; idx++) { + + for (idx = 0; idx < 3; idx++) + { picture_param->sprite_trajectory_du[idx] = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx]; picture_param->sprite_trajectory_dv[idx] @@ -356,24 +533,28 @@ void vbp_fill_picture_param(vbp_context *pcontext, int list_index) picture_param->quant_precision = parser->info.VisualObject.VideoObject.quant_precision; - /* - * VAPictureParameterBufferMPEG4::vop_fields - */ + // fill VAPictureParameterBufferMPEG4::vop_fields + - if (!parser->info.VisualObject.VideoObject.short_video_header) { + if (!parser->info.VisualObject.VideoObject.short_video_header) + { picture_param->vop_fields.bits.vop_coding_type = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type; - } else { + } + else + { picture_param->vop_fields.bits.vop_coding_type = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type; } - /* TODO: + /* + * TODO: * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7 */ - if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) { + if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) + { picture_param->vop_fields.bits.backward_reference_vop_coding_type = picture_param->vop_fields.bits.vop_coding_type; } @@ -394,33 +575,19 @@ void vbp_fill_picture_param(vbp_context *pcontext, int list_index) picture_param->vop_time_increment_resolution = parser->info.VisualObject.VideoObject.vop_time_increment_resolution; - /* short header related */ + // short header related picture_param->num_gobs_in_vop = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop; picture_param->num_macroblocks_in_gob = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob; - /* for direct mode prediction */ + // for direct mode prediction picture_param->TRB = parser->info.VisualObject.VideoObject.TRB; picture_param->TRD = parser->info.VisualObject.VideoObject.TRD; - -#if 0 - printf( - "parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable = %d\n", - parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable); - - printf("parser->info.VisualObject.VideoObject.data_partitioned = %d\n", - parser->info.VisualObject.VideoObject.data_partitioned); - - printf( - "####parser->info.VisualObject.VideoObject.resync_marker_disable = %d####\n", - parser->info.VisualObject.VideoObject.resync_marker_disable); -#endif } -void vbp_fill_iq_matrix_buffer(vbp_context *pcontext, int list_index) +void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) { - viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; @@ -428,52 +595,40 @@ void vbp_fill_iq_matrix_buffer(vbp_context *pcontext, int list_index) mp4_VOLQuant_mat_t *quant_mat_info = &(parser->info.VisualObject.VideoObject.quant_mat_info); - vbp_picture_data_mp42 *picture_data = NULL; VAIQMatrixBufferMPEG4 *iq_matrix = NULL; - picture_data = &(query_data->picture_data[query_data->number_pictures]); - iq_matrix = &(picture_data->iq_matrix_buffer); + iq_matrix = &(query_data->iq_matrix_buffer); iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat; iq_matrix->load_non_intra_quant_mat = quant_mat_info->load_nonintra_quant_mat; memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64); - memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, - 64); + memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64); } + void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - - vbp_fill_codec_data(pcontext, list_index); - - vbp_fill_picture_param(pcontext, list_index); - vbp_fill_iq_matrix_buffer(pcontext, list_index); + vbp_fill_codec_data(pcontext); + vbp_fill_picture_param(pcontext, 1); + vbp_fill_iq_matrix_buffer(pcontext); vbp_fill_slice_data(pcontext, list_index); - - query_data->number_pictures++; } void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - - vbp_fill_codec_data(pcontext, list_index); - - vbp_fill_picture_param(pcontext, list_index); - vbp_fill_iq_matrix_buffer(pcontext, list_index); + vbp_fill_codec_data(pcontext); + vbp_fill_picture_param(pcontext, 1); + vbp_fill_iq_matrix_buffer(pcontext); vbp_fill_slice_data(pcontext, list_index); - - query_data->number_pictures++; } uint32 vbp_get_sc_pos_mp42( uint8 *buf, uint32 length, - uint32* sc_phase, uint32 *sc_end_pos, - uint8 *is_normal_sc) + uint8 *is_normal_sc, + uint8 *resync_marker) { uint8 *ptr = buf; uint32 size; @@ -481,15 +636,17 @@ uint32 vbp_get_sc_pos_mp42( size = 0; data_left = length; - phase = *sc_phase; *sc_end_pos = -1; /* parse until there is more data and start code not found */ - while ((data_left > 0) && (phase < 3)) { + while ((data_left > 0) && (phase < 3)) + { /* Check if we are byte aligned & phase=0, if thats the case we can check work at a time instead of byte*/ - if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) { - while (data_left > 3) { + if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) + { + while (data_left > 3) + { uint32 data; char mask1 = 0, mask2 = 0; @@ -501,12 +658,16 @@ uint32 vbp_get_sc_pos_mp42( mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need two consecutive zero bytes for a start code pattern */ - if (mask1 && mask2) {/* Success so skip 4 bytes and start over */ + if (mask1 && mask2) + { + /* Success so skip 4 bytes and start over */ ptr += 4; size += 4; data_left -= 4; continue; - } else { + } + else + { break; } } @@ -514,18 +675,25 @@ uint32 vbp_get_sc_pos_mp42( /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected two zero bytes in the word so we look one byte at a time*/ - if (data_left > 0) { - if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */ + if (data_left > 0) + { + if (*ptr == FIRST_STARTCODE_BYTE) + { + /* Phase can be 3 only if third start code byte is found */ phase++; ptr++; size++; data_left--; - if (phase > 2) { + if (phase > 2) + { phase = 2; - if ((((uint32) ptr) & 0x3) == 0) { - while (data_left > 3) { - if (*((uint32 *) ptr) != 0) { + if ((((uint32) ptr) & 0x3) == 0) + { + while (data_left > 3) + { + if (*((uint32 *) ptr) != 0) + { break; } ptr += 4; @@ -534,30 +702,31 @@ uint32 vbp_get_sc_pos_mp42( } } } - } else { + } + else + { uint8 normal_sc = 0, short_sc = 0; - if (phase == 2) { + if (phase == 2) + { normal_sc = (*ptr == THIRD_STARTCODE_BYTE); short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); - VTRACE ("short_sc = %d\n", short_sc); - *is_normal_sc = normal_sc; + + // at least 16-bit 0, may be GOB start code or + // resync marker. + *resync_marker = 1; } - if (!(normal_sc | short_sc)) { + if (!(normal_sc | short_sc)) + { phase = 0; - } else {/* Match for start code so update context with byte position */ + } + else + { + /* Match for start code so update context with byte position */ *sc_end_pos = size; phase = 3; - - //if (normal_sc) { - //} else { - // /* For short start code since start code is in one nibble just return at this point */ - // phase += 1; - // ret = 1; - // break; - //} } ptr++; size++; @@ -565,35 +734,38 @@ uint32 vbp_get_sc_pos_mp42( } } } - if ((data_left > 0) && (phase == 3)) { + if ((data_left > 0) && (phase == 3)) + { (*sc_end_pos)++; phase++; ret = 1; } - *sc_phase = phase; - /* Return SC found only if phase is 4, else always success */ + + // Return 1 only if phase is 4, else always return 0 return ret; } + uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs) { uint32 length = 0; numOfMbs--; - do { + do + { numOfMbs >>= 1; length++; - } while (numOfMbs); + } + while (numOfMbs); return length; } -mp4_Status_t vbp_video_packet_header_mp42( +uint32 vbp_parse_video_packet_header_mp42( void *parent, viddec_mp4_parser_t *parser_cxt, uint16_t *quant_scale, uint32 *macroblock_number) { - - mp4_Status_t ret = MP4_STATUS_OK; + uint32 ret = VBP_DATA; mp4_Info_t *pInfo = &(parser_cxt->info); mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); mp4_VideoObjectPlane_t *vidObjPlane = @@ -607,148 +779,165 @@ mp4_Status_t vbp_video_packet_header_mp42( uint32 header_extension_codes = 0; uint8 vop_coding_type = vidObjPlane->vop_coding_type; - do { - if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) { - ret = MP4_STATUS_NOTSUPPORT; - break; - } - - /* get macroblock_number */ - { - uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4; - uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4; - uint32 length = vbp_macroblock_number_length_mp42(mbs_x - * mbs_y); + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + { + return VBP_DATA; + } + + do + { + // get macroblock_number + uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4; + uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4; + uint32 length = vbp_macroblock_number_length_mp42(mbs_x * mbs_y); - getbits = viddec_pm_get_bits(parent, &code, length); - BREAK_GETBITS_FAIL(getbits, ret); + getbits = viddec_pm_get_bits(parent, &code, length); + BREAK_GETBITS_FAIL(getbits, ret); - _macroblock_number = code; - } + _macroblock_number = code; - /* quant_scale */ - if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) { - getbits = viddec_pm_get_bits(parent, &code, - vidObjLay->quant_precision); + // quant_scale + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision); BREAK_GETBITS_FAIL(getbits, ret); _quant_scale = code; } - /* header_extension_codes */ - if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) { + // header_extension_codes + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) + { getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_FAIL(getbits, ret); header_extension_codes = code; } - if (header_extension_codes) { - do { + if (header_extension_codes) + { + // modulo time base + do + { getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_FAIL(getbits, ret); } while (code); - /* marker_bit */ + // marker_bit getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_FAIL(getbits, ret); - /* vop_time_increment */ + // vop_time_increment + uint32 numbits = 0; + numbits = vidObjLay->vop_time_increment_resolution_bits; + if (numbits == 0) { - uint32 numbits = 0; - numbits = vidObjLay->vop_time_increment_resolution_bits; - if (numbits == 0) { - numbits = 1; - } - getbits = viddec_pm_get_bits(parent, &code, numbits); - BREAK_GETBITS_FAIL(getbits, ret); + // ?? + numbits = 1; } - /* marker_bit */ + getbits = viddec_pm_get_bits(parent, &code, numbits); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_time_increment = code; + + + // marker_bit getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_FAIL(getbits, ret); - /* vop_coding_type */ + // vop_coding_type getbits = viddec_pm_get_bits(parent, &code, 2); BREAK_GETBITS_FAIL(getbits, ret); vop_coding_type = code & 0x3; + vidObjPlane->vop_coding_type = vop_coding_type; - /* Fixed Klocwork issue: Code is unreachable. - * Comment the following codes because we have - * already checked video_object_layer_shape - */ - /* if (vidObjLay->video_object_layer_shape - != MP4_SHAPE_TYPE_RECTANGULAR) { - ret = MP4_STATUS_NOTSUPPORT; - break; - } - */ - if (vidObjLay->video_object_layer_shape - != MP4_SHAPE_TYPE_BINARYONLY) { - /* intra_dc_vlc_thr */ + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + // intra_dc_vlc_thr getbits = viddec_pm_get_bits(parent, &code, 3); BREAK_GETBITS_FAIL(getbits, ret); - if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) - && (vop_coding_type == MP4_VOP_TYPE_S) - && (vidObjLay->sprite_info.no_of_sprite_warping_points - > 0)) { + + vidObjPlane->intra_dc_vlc_thr = code; + if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && + (vop_coding_type == MP4_VOP_TYPE_S) && + (vidObjLay->sprite_info.no_of_sprite_warping_points> 0)) + { if (vbp_sprite_trajectory_mp42(parent, vidObjLay, - vidObjPlane) != MP4_STATUS_OK) { + vidObjPlane) != VBP_OK) + { break; } } - if (vidObjLay->reduced_resolution_vop_enable - && (vidObjLay->video_object_layer_shape - == MP4_SHAPE_TYPE_RECTANGULAR) - && ((vop_coding_type == MP4_VOP_TYPE_I) - || (vop_coding_type == MP4_VOP_TYPE_P))) { - /* vop_reduced_resolution */ + if (vidObjLay->reduced_resolution_vop_enable && + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && + ((vop_coding_type == MP4_VOP_TYPE_I) || + (vop_coding_type == MP4_VOP_TYPE_P))) + { + // vop_reduced_resolution getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_FAIL(getbits, ret); } - if (vop_coding_type == MP4_VOP_TYPE_I) { - /* vop_fcode_forward */ - getbits = viddec_pm_get_bits(parent, &code, 3); + if (vop_coding_type != MP4_VOP_TYPE_I) + { + // vop_fcode_forward + getbits = viddec_pm_get_bits(parent, &code, 3); BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_fcode_forward = code; } - if (vop_coding_type == MP4_VOP_TYPE_B) { - /* vop_fcode_backward */ + if (vop_coding_type == MP4_VOP_TYPE_B) + { + // vop_fcode_backward getbits = viddec_pm_get_bits(parent, &code, 3); BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_fcode_backward = code; } } } - if (vidObjLay->newpred_enable) { - /* New pred mode not supported in HW, but, does libva support this? */ - ret = MP4_STATUS_NOTSUPPORT; + if (vidObjLay->newpred_enable) + { + // New pred mode not supported in HW, but, does libva support this? + ret = VBP_DATA; break; } *quant_scale = _quant_scale; *macroblock_number = _macroblock_number; - } while (0); + + ret = VBP_OK; + } + while (0); return ret; } uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt) { - mp4_Info_t *pInfo = &(parser_cxt->info); mp4_VideoObjectPlane_t *vidObjPlane = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); uint32 resync_marker_length = 0; - if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) { + if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) + { resync_marker_length = 17; - } else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) { + } + else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) + { uint8 fcode_max = vidObjPlane->vop_fcode_forward; - if (fcode_max < vidObjPlane->vop_fcode_backward) { + if (fcode_max < vidObjPlane->vop_fcode_backward) + { fcode_max = vidObjPlane->vop_fcode_backward; - } + } resync_marker_length = 16 + fcode_max; - } else { + + // resync_marker is max(15+fcode,17) zeros followed by a one + if (resync_marker_length < 18) + resync_marker_length = 18; + } + else + { resync_marker_length = 16 + vidObjPlane->vop_fcode_forward; } return resync_marker_length; @@ -756,27 +945,22 @@ uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt) uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) { - uint32 ret = MP4_STATUS_OK; + uint32 ret = VBP_OK; vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; viddec_pm_cxt_t *parent = pcontext->parser_cxt; viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); - VTRACE ("begin\n"); - - vbp_picture_data_mp42 *picture_data = - &(query_data->picture_data[query_data->number_pictures]); - vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data[0]); + vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data); + vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data); VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param); - picture_data->number_slices = 1; - uint8 is_emul = 0; uint32 bit_offset = 0; uint32 byte_offset = 0; - /* The offsets are relative to parent->parse_cubby.buf */ + // The offsets are relative to parent->parse_cubby.buf viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); slice_data->buffer_addr = parent->parse_cubby.buf; @@ -794,24 +978,20 @@ uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) slice_param->quant_scale = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant; - VTRACE ("end\n"); - return ret; } -mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) +uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; viddec_pm_cxt_t *parent = pcontext->parser_cxt; - viddec_mp4_parser_t *parser_cxt = - (viddec_mp4_parser_t *) &(parent->codec_data[0]); + viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); vbp_picture_data_mp42 *picture_data = NULL; vbp_slice_data_mp42 *slice_data = NULL; VASliceParameterBufferMPEG4* slice_param = NULL; - uint32 ret = MP4_STATUS_OK; + uint32 ret = VBP_OK; uint8 is_emul = 0; uint32 bit_offset = 0; @@ -821,12 +1001,9 @@ mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) int32_t getbits = 0; uint32 resync_marker_length = 0; - uint32 slice_index = 0; - #ifdef VBP_TRACE uint32 list_size_at_index = parent->list.data[list_index].edpos - parent->list.data[list_index].stpos; -#endif VTRACE ("list_index = %d list_size_at_index = %d\n", list_index, list_size_at_index); @@ -834,6 +1011,7 @@ mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index, parent->list.data[list_index].edpos, parent->list.data[list_index].stpos); +#endif /* The offsets are relative to parent->parse_cubby.buf */ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); @@ -845,8 +1023,9 @@ mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) } #endif - picture_data = &(query_data->picture_data[query_data->number_pictures]); - slice_data = &(picture_data->slice_data[slice_index]); + + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); slice_param = &(slice_data->slice_param); slice_data->buffer_addr = parent->parse_cubby.buf; @@ -864,216 +1043,339 @@ mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) slice_param->quant_scale = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant; - slice_index++; - picture_data->number_slices = slice_index; - - /* - * scan for resync_marker - */ - - if (!parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) { + if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) + { + // no resync_marker + return VBP_OK; + } - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); - if (bit_offset) { - getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); - if (getbits == -1) { - ret = MP4_STATUS_PARSE_ERROR; - return ret; - } + // scan for resync_marker + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + if (bit_offset) + { + // byte-aligned + getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + if (getbits == -1) + { + return VBP_DATA; } + } - /* - * get resync_marker_length - */ - resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt); + // get resync_marker_length + resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt); - while (1) { + uint16_t quant_scale = 0; + uint32 macroblock_number = 0; - uint16_t quant_scale = 0; - uint32 macroblock_number = 0; + while (1) + { + getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length); - getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length); + // return VBP_OK as resync_marker may not be present + BREAK_GETBITS_FAIL(getbits, ret); + + if (code != 1) + { + getbits = viddec_pm_get_bits(parent, &code, 8); BREAK_GETBITS_FAIL(getbits, ret); + continue; + } - if (code != 1) { - getbits = viddec_pm_get_bits(parent, &code, 8); - BREAK_GETBITS_FAIL(getbits, ret); - continue; - } + // We found resync_marker + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); - /* - * We found resync_marker - */ + // update slice data as we found resync_marker + slice_data->slice_size -= (parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset); + slice_param->slice_data_size = slice_data->slice_size; - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + // skip resync marker + getbits = viddec_pm_get_bits(parent, &code, resync_marker_length); - slice_data->slice_size -= (parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset); - slice_param->slice_data_size = slice_data->slice_size; + // return VBP_DATA, this should never happen! + BREAK_GETBITS_FAIL(getbits, ret); + + // parse video_packet_header + ret = vbp_parse_video_packet_header_mp42(parent, parser_cxt, + &quant_scale, ¯oblock_number); + + if (ret != VBP_OK) + { + ETRACE("Failed to parse video packet header.\n"); + return ret; + } + + // new_picture_flag = 0, this is not the first slice of a picture + vbp_fill_picture_param(pcontext, 0); + + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); + slice_param = &(slice_data->slice_param); + - slice_data = &(picture_data->slice_data[slice_index]); - slice_param = &(slice_data->slice_param); + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); - /* - * parse video_packet_header - */ - getbits = viddec_pm_get_bits(parent, &code, resync_marker_length); - BREAK_GETBITS_FAIL(getbits, ret); + slice_data->buffer_addr = parent->parse_cubby.buf; - vbp_video_packet_header_mp42(parent, parser_cxt, - &quant_scale, ¯oblock_number); + slice_data->slice_offset = byte_offset + + parent->list.data[list_index].stpos; + slice_data->slice_size = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset; - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = macroblock_number; + slice_param->quant_scale = quant_scale; - slice_data->buffer_addr = parent->parse_cubby.buf; + if (bit_offset) + { + // byte-align parsing position + getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + if (getbits == -1) + { + ETRACE("Failed to align parser to byte position.\n"); + return VBP_DATA; + } + } - slice_data->slice_offset = byte_offset - + parent->list.data[list_index].stpos; - slice_data->slice_size = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset; + } - slice_param->slice_data_size = slice_data->slice_size; - slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - slice_param->slice_data_offset = 0; - slice_param->macroblock_offset = bit_offset; - slice_param->macroblock_number = macroblock_number; - slice_param->quant_scale = quant_scale; + return VBP_OK; +} - slice_index++; +uint32 vbp_process_video_packet_mp42(vbp_context *pcontext) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); + uint32 code = 0; + int32_t getbits = 0; + + uint32 ret = VBP_DATA; - if (slice_index >= MAX_NUM_SLICES) { - ret = MP4_STATUS_PARSE_ERROR; - break; - } - if (bit_offset) - { - /* byte-align parsing position */ - getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); - if (getbits == -1) - { - ret = MP4_STATUS_PARSE_ERROR; - return ret; - } - } + // setup bitstream parser + parent->getbits.list = &(parent->list); + + parent->getbits.bstrm_buf.buf = parent->parse_cubby.buf; + parent->getbits.bstrm_buf.buf_index = 0; + parent->getbits.bstrm_buf.buf_st = 0; + parent->getbits.bstrm_buf.buf_end = parent->parse_cubby.size; + parent->getbits.bstrm_buf.buf_bitoff = 0; + + parent->getbits.au_pos = 0; + parent->getbits.list_off = 0; + parent->getbits.phase = 0; + parent->getbits.emulation_byte_counter = 0; + + parent->list.start_offset = 0; + parent->list.end_offset = parent->parse_cubby.size; + parent->list.total_bytes = parent->parse_cubby.size; + + + // skip leading zero-byte + while (code == 0) + { + getbits = viddec_pm_get_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); + getbits = viddec_pm_peek_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); + } + + if (getbits != 0) + { + return VBP_DATA; + } + + // resync-marker is represented as 17-23 bits. (16-22 bits of 0) + // as 16-bit '0' has been skipped, we try to parse buffer bit by bit + // until bit 1 is encounted or up to 7 bits are parsed. + code = 0; + uint8 count = 0; + while (code == 0 && count < 7) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + count++; + } + + if (code == 0 || getbits != 0) + { + ETRACE("no resync-marker in the buffer.\n"); + return ret; + } + + // resync marker is skipped + uint16_t quant_scale = 0; + uint32 macroblock_number = 0; + + // parse video_packet_header + vbp_parse_video_packet_header_mp42(parent, parser_cxt, + &quant_scale, ¯oblock_number); + + // new_picture_flag = 0, this is not the first slice of a picture + vbp_fill_picture_param(pcontext, 0); + + vbp_picture_data_mp42 *picture_data = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + VASliceParameterBufferMPEG4* slice_param = NULL; + + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); + slice_param = &(slice_data->slice_param); - picture_data->number_slices = slice_index; - } - } + ret = vbp_process_slices_mp42(pcontext, 0); + + // update slice's QP and macro_block number as it is set to 0 by default. + slice_param->macroblock_number = macroblock_number; + slice_param->quant_scale = quant_scale; + + // VOP must be coded! + picture_data->vop_coded = 1; return ret; + } -/* This is coppied from DHG MP42 parser */ -static inline int32_t vbp_sprite_dmv_length_mp42( + +static inline uint32 vbp_sprite_dmv_length_mp42( void * parent, int32_t *dmv_length) { uint32 code, skip; int32_t getbits = 0; - mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + uint32 ret = VBP_DATA; *dmv_length = 0; skip = 3; - do { + do + { getbits = viddec_pm_peek_bits(parent, &code, skip); BREAK_GETBITS_FAIL(getbits, ret); - if (code == 7) { + if (code == 7) + { viddec_pm_skip_bits(parent, skip); getbits = viddec_pm_peek_bits(parent, &code, 9); BREAK_GETBITS_FAIL(getbits, ret); skip = 1; - while ((code & 256) != 0) {/* count number of 1 bits */ + while ((code & 256) != 0) + { + // count number of 1 bits code <<= 1; skip++; } *dmv_length = 5 + skip; - } else { + } + else + { skip = (code <= 1) ? 2 : 3; *dmv_length = code - 1; } viddec_pm_skip_bits(parent, skip); - ret = MP4_STATUS_OK; + ret = VBP_OK; - } while (0); + } + while (0); return ret; } -/* This is coppied from DHG MP42 parser */ -static inline mp4_Status_t vbp_sprite_trajectory_mp42( + +static inline uint32 vbp_sprite_trajectory_mp42( void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_VideoObjectPlane_t *vidObjPlane) { uint32 code, i; int32_t dmv_length = 0, dmv_code = 0, getbits = 0; - mp4_Status_t ret = MP4_STATUS_OK; - for (i = 0; i - < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) { + uint32 ret = VBP_OK; + for (i = 0; i < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) + { + ret = VBP_DATA; ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); - if (ret != MP4_STATUS_OK) { + if (ret != VBP_OK) + { break; } - if (dmv_length <= 0) { + if (dmv_length <= 0) + { dmv_code = 0; - } else { + } + else + { getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); BREAK_GETBITS_FAIL(getbits, ret); dmv_code = (int32_t) code; - if ((dmv_code & (1 << (dmv_length - 1))) == 0) { + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { dmv_code -= (1 << dmv_length) - 1; } } getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_FAIL(getbits, ret); - if (code != 1) { - ret = MP4_STATUS_PARSE_ERROR; + if (code != 1) + { + ret = VBP_DATA; break; } vidObjPlane->warping_mv_code_du[i] = dmv_code; - /* TODO: create another inline function to avoid code duplication */ + // TODO: create another inline function to avoid code duplication ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); - if (ret != MP4_STATUS_OK) { + if (ret != VBP_OK) + { break; } - if (dmv_length <= 0) { + // reset return value in case early break + ret = VBP_DATA; + if (dmv_length <= 0) + { dmv_code = 0; - } else { + } + else + { getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); BREAK_GETBITS_FAIL(getbits, ret); dmv_code = (int32_t) code; - if ((dmv_code & (1 << (dmv_length - 1))) == 0) { + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { dmv_code -= (1 << dmv_length) - 1; } } getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_FAIL(getbits, ret); - if (code != 1) { - ret = MP4_STATUS_PARSE_ERROR; + if (code != 1) + { break; } vidObjPlane->warping_mv_code_dv[i] = dmv_code; + // set to VBP_OK + ret = VBP_OK; + } return ret; } + /* * free memory of vbp_data_mp42 structure and its members */ uint32 vbp_free_query_data_mp42(vbp_context *pcontext) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - gint idx = 0; + vbp_picture_data_mp42* current = NULL; + vbp_picture_data_mp42* next = NULL; - if (query_data) { - if (query_data->picture_data) { - for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) { - g_free(query_data->picture_data[idx].slice_data); - } - g_free(query_data->picture_data); - } + if (query_data) + { + current = query_data->picture_data; + while (current != NULL) + { + next = current->next_picture_data; + g_free(current); + current = next; + } g_free(query_data); } @@ -1087,202 +1389,25 @@ uint32 vbp_free_query_data_mp42(vbp_context *pcontext) */ uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) { - - gint idx = 0; vbp_data_mp42 *query_data; pcontext->query_data = NULL; query_data = g_try_new0(vbp_data_mp42, 1); - if (query_data == NULL) { - goto cleanup; - } - - query_data->picture_data = g_try_new0(vbp_picture_data_mp42, - MAX_NUM_PICTURES_MP42); - if (NULL == query_data->picture_data) { + if (query_data == NULL) + { goto cleanup; } - for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) { - query_data->picture_data[idx].number_slices = 0; - query_data->picture_data[idx].slice_data = g_try_new0( - vbp_slice_data_mp42, MAX_NUM_SLICES); - - if (query_data->picture_data[idx].slice_data == NULL) { - goto cleanup; - } - } - pcontext->query_data = (void *) query_data; + query_data->picture_data = NULL; + query_data->number_picture_data = 0; + query_data->number_pictures = 0; + return VBP_OK; - cleanup: - - if (query_data) { - if (query_data->picture_data) { - for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) { - g_free(query_data->picture_data[idx].slice_data); - } - g_free(query_data->picture_data); - } - - g_free(query_data); - } +cleanup: + vbp_free_query_data_mp42(pcontext); + return VBP_MEM; } - -void vbp_dump_query_data(vbp_context *pcontext, int list_index) -{ - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - - vbp_picture_data_mp42 *picture_data = NULL; - VAPictureParameterBufferMPEG4 *picture_param = NULL; - vbp_slice_data_mp42 *slice_data = NULL; - - uint32 idx = 0, jdx = 0; - - for (idx = 0; idx < query_data->number_pictures; idx++) { - - picture_data = &(query_data->picture_data[idx]); - picture_param = &(picture_data->picture_param); - slice_data = &(picture_data->slice_data[0]); - - g_print("======================= dump_begin ======================\n\n"); - g_print("======================= codec_data ======================\n"); - - /* codec_data */ - g_print("codec_data.profile_and_level_indication = 0x%x\n", - query_data->codec_data.profile_and_level_indication); - - g_print("==================== picture_param =======================\n"); - - /* picture_param */ - g_print("picture_param->vop_width = %d\n", picture_param->vop_width); - g_print("picture_param->vop_height = %d\n", picture_param->vop_height); - - g_print("picture_param->vol_fields.bits.short_video_header = %d\n", - picture_param->vol_fields.bits.short_video_header); - g_print("picture_param->vol_fields.bits.chroma_format = %d\n", - picture_param->vol_fields.bits.chroma_format); - g_print("picture_param->vol_fields.bits.interlaced = %d\n", - picture_param->vol_fields.bits.interlaced); - g_print("picture_param->vol_fields.bits.obmc_disable = %d\n", - picture_param->vol_fields.bits.obmc_disable); - g_print("picture_param->vol_fields.bits.sprite_enable = %d\n", - picture_param->vol_fields.bits.sprite_enable); - g_print( - "picture_param->vol_fields.bits.sprite_warping_accuracy = %d\n", - picture_param->vol_fields.bits.sprite_warping_accuracy); - g_print("picture_param->vol_fields.bits.quant_type = %d\n", - picture_param->vol_fields.bits.quant_type); - g_print("picture_param->vol_fields.bits.quarter_sample = %d\n", - picture_param->vol_fields.bits.quarter_sample); - g_print("picture_param->vol_fields.bits.data_partitioned = %d\n", - picture_param->vol_fields.bits.data_partitioned); - g_print("picture_param->vol_fields.bits.reversible_vlc = %d\n", - picture_param->vol_fields.bits.reversible_vlc); - - g_print("picture_param->no_of_sprite_warping_points = %d\n", - picture_param->no_of_sprite_warping_points); - g_print("picture_param->quant_precision = %d\n", - picture_param->quant_precision); - g_print("picture_param->sprite_trajectory_du = %d, %d, %d\n", - picture_param->sprite_trajectory_du[0], - picture_param->sprite_trajectory_du[1], - picture_param->sprite_trajectory_du[2]); - g_print("picture_param->sprite_trajectory_dv = %d, %d, %d\n", - picture_param->sprite_trajectory_dv[0], - picture_param->sprite_trajectory_dv[1], - picture_param->sprite_trajectory_dv[2]); - - g_print("picture_param->vop_fields.bits.vop_coding_type = %d\n", - picture_param->vop_fields.bits.vop_coding_type); - g_print( - "picture_param->vop_fields.bits.backward_reference_vop_coding_type = %d\n", - picture_param->vop_fields.bits.backward_reference_vop_coding_type); - g_print("picture_param->vop_fields.bits.vop_rounding_type = %d\n", - picture_param->vop_fields.bits.vop_rounding_type); - g_print("picture_param->vop_fields.bits.intra_dc_vlc_thr = %d\n", - picture_param->vop_fields.bits.intra_dc_vlc_thr); - g_print("picture_param->vop_fields.bits.top_field_first = %d\n", - picture_param->vop_fields.bits.top_field_first); - g_print( - "picture_param->vop_fields.bits.alternate_vertical_scan_flag = %d\n", - picture_param->vop_fields.bits.alternate_vertical_scan_flag); - - g_print("picture_param->vop_fcode_forward = %d\n", - picture_param->vop_fcode_forward); - g_print("picture_param->vop_fcode_backward = %d\n", - picture_param->vop_fcode_backward); - g_print("picture_param->num_gobs_in_vop = %d\n", - picture_param->num_gobs_in_vop); - g_print("picture_param->num_macroblocks_in_gob = %d\n", - picture_param->num_macroblocks_in_gob); - g_print("picture_param->TRB = %d\n", picture_param->TRB); - g_print("picture_param->TRD = %d\n", picture_param->TRD); - - g_print("==================== slice_data ==========================\n"); - - g_print("slice_data.buffer_addr = 0x%x\n", - (unsigned int) slice_data->buffer_addr); - g_print("slice_data.slice_offset = 0x%x\n", slice_data->slice_offset); - g_print("slice_data.slice_size = 0x%x\n", slice_data->slice_size); - - g_print("slice_data.slice_param.macroblock_number = %d\n", - slice_data->slice_param.macroblock_number); - g_print("slice_data.slice_param.macroblock_offset = 0x%x\n", - slice_data->slice_param.macroblock_offset); - g_print("slice_data.slice_param.quant_scale = %d\n", - slice_data->slice_param.quant_scale); - g_print("slice_data.slice_param.slice_data_flag = %d\n", - slice_data->slice_param.slice_data_flag); - g_print("slice_data.slice_param.slice_data_offset = %d\n", - slice_data->slice_param.slice_data_offset); - g_print("slice_data.slice_param.slice_data_size = %d\n", - slice_data->slice_param.slice_data_size); - - g_print("================= iq_matrix_buffer ======================\n"); - g_print("iq_matrix_buffer.load_intra_quant_mat = %d\n", - picture_data->iq_matrix_buffer.load_intra_quant_mat); - g_print("iq_matrix_buffer.load_non_intra_quant_mat = %d\n", - picture_data->iq_matrix_buffer.load_non_intra_quant_mat); - - g_print("------- iq_matrix_buffer.intra_quant_mat ----------\n"); - for (jdx = 0; jdx < 64; jdx++) { - - g_print("%02x ", - picture_data->iq_matrix_buffer.intra_quant_mat[jdx]); - - if ((jdx + 1) % 8 == 0) { - g_print("\n"); - } - } - - g_print("----- iq_matrix_buffer.non_intra_quant_mat --------\n"); - for (jdx = 0; jdx < 64; jdx++) { - - g_print("%02x ", - picture_data->iq_matrix_buffer.non_intra_quant_mat[jdx]); - - if ((jdx + 1) % 8 == 0) { - g_print("\n"); - } - } - - g_print("-------- slice buffer begin ------------\n"); - - for (jdx = 0; jdx < 64; jdx++) { - g_print("%02x ", *(slice_data->buffer_addr - + slice_data->slice_offset + jdx)); - if ((jdx + 1) % 8 == 0) { - g_print("\n"); - } - } - g_print("-------- slice buffer begin ------------\n"); - - g_print("\n\n============== dump_end ==========================\n\n"); - - } -} - diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index a26a9f1..b5548ab 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -234,7 +234,7 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) { /* mandatory for H.264 */ ETRACE("Failed to allocate memory"); - error = VBP_CXT; + error = VBP_TYPE; goto cleanup; } } @@ -377,23 +377,6 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f } } - /* currently always assume a complete frame is supplied for parsing, so - * there is no need to check if workload is done - */ - - /* - uint32_t codec_errors = 0; - uint32_t state; - - error = ops->is_wkld_done( - (void *)cxt, - (void *)&(cxt->codec_data[0]), - (uint32_t)cxt->sc_prefix_info.next_sc, - &codec_errors); - state = (ret == VIDDEC_PARSE_FRMDONE) ? VBP_DONE : VBP_OK; - return state; - */ - return VBP_OK; } @@ -554,6 +537,6 @@ uint32 vbp_utils_query(vbp_context *pcontext, void **data) */ uint32 vbp_utils_flush(vbp_context *pcontext) { - return VBP_IMPL; + return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index af16e8d..4739798 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -34,6 +34,28 @@ static uint32 b_fraction_table[][9] = { }; +static uint8 vc1_aspect_ratio_table[][2] = +{ + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + {24, 11}, + {20, 11}, + {32, 11}, + {80, 33}, + {18, 11}, + {15, 11}, + {64, 33}, + {160, 99}, + + // reserved + {0, 0} +}; + + /** * set parser entry points @@ -678,6 +700,46 @@ uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG; se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG; se_data->PSF = seqLayerHeader->PSF; + + // color matrix + if (seqLayerHeader->COLOR_FORMAT_FLAG) + { + se_data->MATRIX_COEF = seqLayerHeader->MATRIX_COEF; + } + else + { + //ITU-R BT. 601-5. + se_data->MATRIX_COEF = 6; + } + + // aspect ratio + if (seqLayerHeader->ASPECT_RATIO_FLAG == 1) + { + se_data->ASPECT_RATIO = seqLayerHeader->ASPECT_RATIO; + if (se_data->ASPECT_RATIO < 14) + { + se_data->ASPECT_HORIZ_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][0]; + se_data->ASPECT_VERT_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][1]; + } + else if (se_data->ASPECT_RATIO == 15) + { + se_data->ASPECT_HORIZ_SIZE = seqLayerHeader->ASPECT_HORIZ_SIZE; + se_data->ASPECT_VERT_SIZE = seqLayerHeader->ASPECT_VERT_SIZE; + } + else // se_data->ASPECT_RATIO == 14 + { + se_data->ASPECT_HORIZ_SIZE = 0; + se_data->ASPECT_VERT_SIZE = 0; + } + } + else + { + // unspecified + se_data->ASPECT_RATIO = 0; + se_data->ASPECT_HORIZ_SIZE = 0; + se_data->ASPECT_VERT_SIZE = 0; + } + se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK; se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY; se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG; @@ -739,9 +801,16 @@ static void vbp_pack_picture_params_vc1( pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE; pic_parms->sequence_fields.value = 0; + pic_parms->sequence_fields.bits.pulldown = seqLayerHeader->PULLDOWN; pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE; + pic_parms->sequence_fields.bits.tfcntrflag = seqLayerHeader->TFCNTRFLAG; + pic_parms->sequence_fields.bits.finterpflag = seqLayerHeader->FINTERPFLAG; + pic_parms->sequence_fields.bits.psf = seqLayerHeader->PSF; + pic_parms->sequence_fields.bits.multires = seqLayerHeader->MULTIRES; + pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP; pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER; - pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP; + pic_parms->sequence_fields.bits.rangered = seqLayerHeader->RANGERED; + pic_parms->sequence_fields.bits.max_b_frames = seqLayerHeader->MAXBFRAMES; pic_parms->coded_width = (seqLayerHeader->width + 1) << 1; pic_parms->coded_height = (seqLayerHeader->height + 1) << 1; @@ -750,6 +819,7 @@ static void vbp_pack_picture_params_vc1( pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY; pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK; pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER; + pic_parms->entrypoint_fields.bits.panscan_flag = seqLayerHeader->PANSCAN_FLAG; pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER; pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC; @@ -939,8 +1009,8 @@ static void vbp_pack_slice_data_vc1( /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/ slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos; - slc_data->slice_size = slice_size - byte; - slc_data->slice_offset = byte; + slc_data->slice_size = slice_size; + slc_data->slice_offset = 0; slc_parms->slice_data_size = slc_data->slice_size; slc_parms->slice_data_offset = 0; @@ -948,7 +1018,7 @@ static void vbp_pack_slice_data_vc1( /* fix this. we need to be able to handle partial slices. */ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - slc_parms->macroblock_offset = bit; + slc_parms->macroblock_offset = bit + byte * 8; /* fix this. we need o get the slice_vertical_position from the code */ slc_parms->slice_vertical_position = pic_data->num_slices; diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog index 654fed0..6ac8f39 100644 --- a/mix_video/ChangeLog +++ b/mix_video/ChangeLog @@ -1,3 +1,37 @@ +2010-11-18 Andy Qiu + * Changed version number to 0.1.24 + +2010-11-17 Weian Chen + * Support dynamic frame rate change in MI-X + +2010-11-16 Weian Chen + * Change to use one API to set AIR parameters + * Support to set AIP dynamically + +2010-11-15 Weian Chen + * Support to set slice number for I and P frame seperately + +2010-11-12 Tao Tao + * Added new error codes to MI-X video and changed some return values in mixvideoformat_h264.c + +2010-11-12 Andy Qiu + * Changed version number to 0.1.23 + +2010-11-11 Weian Chen + * Add new feature (VCM, MTU, etc) + +2010-11-10 Andy Qiu + * Support color matrix, video range, aspect ratio. + * Support h264 dynamic stream detection + * Support H264 RTSP streaming + * Changed version number to 0.1.22 + +2010-10-25 Andy Qiu + * H.264 error robust improvement + * MPEG-4/H.263 partial frame support + * MPEG-4/H.263 RTSP stream support + * Changed version number to 0.1.21 + 2010-09-29 Andy Qiu * Supported FLV playback * Re-factored frame manager diff --git a/mix_video/configure.ac b/mix_video/configure.ac index ec50fd4..93f2986 100644 --- a/mix_video/configure.ac +++ b/mix_video/configure.ac @@ -2,7 +2,7 @@ AC_INIT([""],[""],[linda.s.cline@intel.com]) AC_CONFIG_MACRO_DIR(m4) -UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 20) +UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 24) dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode AM_MAINTAINER_MODE diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec index f8f1947..dec7602 100644 --- a/mix_video/mixvideo.spec +++ b/mix_video/mixvideo.spec @@ -6,7 +6,7 @@ Summary: MIX Video Name: mixvideo -Version: 0.1.20 +Version: 0.1.24 Release: 1 Source0: %{name}-%{version}.tar.bz2 NoSource: 0 diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c index 6be0ace..b3edd90 100644 --- a/mix_video/src/mixframemanager.c +++ b/mix_video/src/mixframemanager.c @@ -13,7 +13,7 @@ #define INITIAL_FRAME_ARRAY_SIZE 16 -// Assume only one backward reference is used. This will hold up to 2 frames before forcing +// Assume only one backward reference is used. This will hold up to 2 frames before forcing // the earliest frame out of queue. #define MIX_MAX_ENQUEUE_SIZE 2 @@ -101,10 +101,10 @@ MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (!MIX_IS_FRAMEMANAGER(fm) || + if (!MIX_IS_FRAMEMANAGER(fm) || mode <= MIX_DISPLAY_ORDER_UNKNOWN || - mode >= MIX_DISPLAY_ORDER_LAST || - framerate_numerator <= 0 || + mode >= MIX_DISPLAY_ORDER_LAST || + framerate_numerator <= 0 || framerate_denominator <= 0) { return MIX_RESULT_INVALID_PARAM; } @@ -259,7 +259,7 @@ MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size) { return MIX_RESULT_FAIL; } - + g_mutex_lock(fm->lock); fm->max_enqueue_size = size; @@ -286,12 +286,12 @@ MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. return MIX_RESULT_INVALID_PARAM; } - + g_mutex_lock(fm->lock); // max_picture_number is exclusie (range from 0 to num - 1). // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the - // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches + // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches // fm->max_picture_number. fm->max_picture_number = num; LOG_V("max picture number is %d\n", num); @@ -322,7 +322,7 @@ MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame); mix_videoframe_unref(frame); LOG_V("one frame is flushed\n"); - }; + }; fm->eos = FALSE; fm->is_first_frame = TRUE; @@ -377,29 +377,29 @@ MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { } #endif - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf); g_mutex_unlock(fm->lock); - + LOG_V("End\n"); return ret; } -void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) +void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) { // this function finds the lowest time stamp in the list and assign it to the dequeued video frame, // if that timestamp is smaller than the timestamp of dequeued video frame. int i; - guint64 ts, min_ts; - MixVideoFrame *p, *min_p; + guint64 ts = 0, min_ts = 0; + MixVideoFrame *p = NULL, *min_p = NULL; int len = g_slist_length(fm->frame_list); if (len == 0) { // nothing to update return; } - + // find video frame with the smallest timestamp, take rollover into account when // comparing timestamp. for (i = 0; i < len; i++) @@ -408,19 +408,19 @@ void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) mix_videoframe_get_timestamp(p, &ts); if (i == 0 || (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) { min_ts = ts; min_p = p; - } + } } mix_videoframe_get_timestamp(mvf, &ts); if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) { // frame to be updated has smaller time stamp - } + } else { // time stamp needs to be monotonically non-decreasing so swap timestamp. @@ -432,7 +432,7 @@ void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) } -MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, num_i_or_p; MixVideoFrame *p, *first_i_or_p; @@ -441,7 +441,7 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF num_i_or_p = 0; first_i_or_p = NULL; - + for (i = 0; i < len; i++) { p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); @@ -455,15 +455,15 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF *mvf = p; LOG_V("B frame is dequeued.\n"); return MIX_RESULT_SUCCESS; - } - + } + if (type != TYPE_I && type != TYPE_P) { - // this should never happen + // this should never happen LOG_E("Frame typs is invalid!!!\n"); fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); mix_videoframe_unref(p); - return MIX_RESULT_FRAME_NOTAVAIL; + return MIX_RESULT_FRAME_NOTAVAIL; } num_i_or_p++; if (first_i_or_p == NULL) @@ -472,7 +472,7 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF } } - // if there are more than one reference frame in the list, the first one is dequeued. + // if there are more than one reference frame in the list, the first one is dequeued. if (num_i_or_p > 1 || fm->eos) { if (first_i_or_p == NULL) @@ -493,40 +493,40 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF else { LOG_V("P frame is dequeued.\n"); - } + } #endif - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } - - return MIX_RESULT_FRAME_NOTAVAIL; + + return MIX_RESULT_FRAME_NOTAVAIL; } -MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, len; MixVideoFrame *p, *p_out_of_dated; guint64 ts, ts_next_pending, ts_out_of_dated; guint64 tolerance = fm->frame_timestamp_delta/4; -retry: +retry: // len may be changed during retry! len = g_slist_length(fm->frame_list); - ts_next_pending = (guint64)-1; + ts_next_pending = (guint64)-1; ts_out_of_dated = 0; p_out_of_dated = NULL; - - + + for (i = 0; i < len; i++) { p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); mix_videoframe_get_timestamp(p, &ts); - if (ts >= fm->last_frame_timestamp && + if (ts >= fm->last_frame_timestamp && ts <= fm->next_frame_timestamp + tolerance) { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); *mvf = p; mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp)); - fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; + fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts); return MIX_RESULT_SUCCESS; } @@ -536,7 +536,7 @@ retry: { ts_next_pending = ts; } - if (ts < fm->last_frame_timestamp && + if (ts < fm->last_frame_timestamp && ts >= ts_out_of_dated) { // video frame that is most recently out-of-dated. @@ -544,10 +544,10 @@ retry: // the "next frame" criteria, and the one with larger timestamp is dequeued first. ts_out_of_dated = ts; p_out_of_dated = p; - } + } } - if (p_out_of_dated && + if (p_out_of_dated && fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated); @@ -556,10 +556,10 @@ retry: ts_out_of_dated, fm->last_frame_timestamp); return MIX_RESULT_FRAME_NOTAVAIL; } - + if (len <= fm->max_enqueue_size && fm->eos == FALSE) { - LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", + LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", fm->next_frame_timestamp, ts_next_pending, len); return MIX_RESULT_FRAME_NOTAVAIL; } @@ -569,13 +569,13 @@ retry: { LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n", fm->next_frame_timestamp, ts_next_pending); - + fm->next_frame_timestamp = ts_next_pending; goto retry; } // time stamp roll-over - LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", + LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", fm->next_frame_timestamp); fm->next_frame_timestamp = 0; @@ -587,7 +587,7 @@ retry: return MIX_RESULT_FAIL; } -MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, len; MixVideoFrame* p; @@ -596,9 +596,9 @@ MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVide len = g_slist_length(fm->frame_list); -retry: +retry: next_picnum_pending = (guint32)-1; - + for (i = 0; i < len; i++) { p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); @@ -607,11 +607,11 @@ retry: { fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); mix_framemanager_update_timestamp(fm, p); - *mvf = p; + *mvf = p; LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber); fm->next_frame_picnumber++; //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; + // fm->next_frame_picnumber = 0; return MIX_RESULT_SUCCESS; } @@ -624,7 +624,7 @@ retry: if (picnum < fm->next_frame_picnumber && fm->next_frame_picnumber - picnum < 8) { - // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" + // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" // to the pic number in the list is less than half of 16, it is safe to assume that pic number // is reset when a new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). LOG_V("picture number is reset to %d, next pic number is %d, next pending number is %d.\n", @@ -632,10 +632,10 @@ retry: break; } } - + if (len <= fm->max_enqueue_size && fm->eos == FALSE) { - LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", + LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", fm->next_frame_picnumber, next_picnum_pending, len); return MIX_RESULT_FRAME_NOTAVAIL; } @@ -645,13 +645,13 @@ retry: { LOG_V("picture number has gap, jumping from %d to %d.\n", fm->next_frame_picnumber, next_picnum_pending); - + fm->next_frame_picnumber = next_picnum_pending; goto retry; } // picture number roll-over - LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", + LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", fm->next_frame_picnumber); fm->next_frame_picnumber = 0; @@ -693,29 +693,29 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { { LOG_V("No frame is dequeued as queue is empty!\n"); ret = MIX_RESULT_FRAME_NOTAVAIL; - } + } } else if (fm->is_first_frame) { // dequeue the first entry in the list. Not need to update the time stamp as // the list should contain only one frame. -#ifdef MIX_LOG_ENABLE +#ifdef MIX_LOG_ENABLE if (g_slist_length(fm->frame_list) != 1) { - LOG_W("length of list is not equal to 1 for the first frame.\n"); + LOG_W("length of list is not equal to 1 for the first frame.\n"); } -#endif +#endif *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) - { - mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); + { + mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp); } else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) - { + { mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber)); LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber); fm->next_frame_picnumber++; @@ -724,15 +724,15 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { } else { -#ifdef MIX_LOG_ENABLE +#ifdef MIX_LOG_ENABLE MixFrameType type; mix_videoframe_get_frame_type(*mvf, &type); LOG_V("The first frame is dequeud, frame type is %d.\n", type); -#endif +#endif } fm->is_first_frame = FALSE; - - ret = MIX_RESULT_SUCCESS; + + ret = MIX_RESULT_SUCCESS; } else { @@ -751,14 +751,14 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { ret = mix_framemanager_pictype_based_dequeue(fm, mvf); break; - case MIX_DISPLAY_ORDER_FIFO: + case MIX_DISPLAY_ORDER_FIFO: *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_SUCCESS; LOG_V("One frame is dequeued.\n"); break; - - default: + + default: LOG_E("Invalid frame order mode\n"); ret = MIX_RESULT_FAIL; break; @@ -784,7 +784,7 @@ MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { return MIX_RESULT_NOT_INIT; } - g_mutex_lock(fm->lock); + g_mutex_lock(fm->lock); fm->eos = TRUE; LOG_V("EOS is received.\n"); g_mutex_unlock(fm->lock); diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c index 9f92ae1..5ed6007 100644 --- a/mix_video/src/mixsurfacepool.c +++ b/mix_video/src/mixsurfacepool.c @@ -396,7 +396,7 @@ MIX_RESULT mix_surfacepool_get(MixSurfacePool * obj, MixVideoFrame ** frame) { LOG_E( "out of surfaces\n"); - return MIX_RESULT_NO_MEMORY; + return MIX_RESULT_OUTOFSURFACES; } //Remove a frame from the free pool @@ -482,7 +482,7 @@ MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, Mix LOG_E( "out of surfaces\n"); - return MIX_RESULT_NO_MEMORY; + return MIX_RESULT_OUTOFSURFACES; } //Remove a frame from the free pool diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c index 718d355..56bd264 100644 --- a/mix_video/src/mixvideo.c +++ b/mix_video/src/mixvideo.c @@ -10,16 +10,16 @@ * SECTION:mixvideo * @short_description: Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. * @include: mixvideo.h - * + * * #MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video. - * + * * The MixVideo object handles any of the video formats internally. * The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/ * MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure() * call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and * MixVideoRenderParams objects will be passed in the mix_video_initialize(), * mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively. - * + * * The application can take the following steps to decode video: * * Create a mix_video object using mix_video_new() @@ -30,19 +30,19 @@ * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). The frame can be retained for redrawing until the next frame is retrieved. * When the frame is no longer needed for redrawing, release the frame using mix_video_release_frame(). * - * + * * For encoding, the application can take the following steps to encode video: * * Create a mix_video object using mix_video_new() * Initialize the object using mix_video_initialize() * Configure the stream using mix_video_configure() * Encode frames using mix_video_encode() - * Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file. + * Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file. * Retrieve the uncompressed frames for display using mix_video_get_frame(). * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). For encode, the frame should not be retained for redrawing after the initial rendering, due to resource limitations. * Release the frame using mix_video_release_frame(). * - * + * */ #include /* libVA */ @@ -103,7 +103,7 @@ VADisplay vaGetDisplay ( #define mix_strcmp g_strcmp0 #endif -#define USE_OPAQUE_POINTER +#define USE_OPAQUE_POINTER #ifdef USE_OPAQUE_POINTER #define MIX_VIDEO_PRIVATE(mix) (MixVideoPrivate *)(mix->context) @@ -632,20 +632,20 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER) { - display_order_mode = MIX_DISPLAY_ORDER_FIFO; + display_order_mode = MIX_DISPLAY_ORDER_FIFO; } - else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || + else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0 ) + mix_strcmp(mime_type, "video/x-xvid") == 0 ) { - display_order_mode = MIX_DISPLAY_ORDER_PICTYPE; - } - else + display_order_mode = MIX_DISPLAY_ORDER_PICTYPE; + } + else { - //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP; - display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER; + //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP; + display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER; } /* initialize frame manager */ @@ -702,15 +702,16 @@ MIX_RESULT mix_video_configure_decode(MixVideo * mix, priv->video_format = MIX_VIDEOFORMAT(video_format); - } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || + } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0) { + mix_strcmp(mime_type, "video/x-xvid") == 0 || + mix_strcmp(mime_type, "video/x-dx50") == 0) { guint version = 0; /* Is this mpeg4:2 ? */ - if (mix_strcmp(mime_type, "video/mpeg") == 0 || + if (mix_strcmp(mime_type, "video/mpeg") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 ) { /* @@ -962,7 +963,7 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); } - + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { @@ -978,7 +979,7 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); } - + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { @@ -1044,7 +1045,7 @@ MIX_RESULT mix_video_configure_default(MixVideo * mix, MixVideoPrivate *priv = NULL; LOG_V( "Begin\n"); - + CHECK_INIT(mix, priv); if(!config_params) { LOG_E("!config_params\n"); @@ -1113,6 +1114,9 @@ MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], return MIX_RESULT_NULL_PTR; } + // reset new sequence flag + decode_params->new_sequence = FALSE; + //First check that we have surfaces available for decode ret = mix_surfacepool_check_available(priv->surface_pool); @@ -1138,7 +1142,7 @@ MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame) { MIX_RESULT ret = MIX_RESULT_FAIL; MixVideoPrivate *priv = NULL; - + CHECK_INIT_CONFIG(mix, priv); if (!frame) { @@ -1205,7 +1209,7 @@ MIX_RESULT mix_video_release_frame_default(MixVideo * mix, MIX_RESULT mix_video_render_default(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame) { - + return MIX_RESULT_NOTIMPL; } @@ -1395,6 +1399,15 @@ MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], return MIX_RESULT_NULL_PTR; } + //First check that we have surfaces available for decode + ret = mix_surfacepool_check_available(priv->surface_pool); + + if (ret == MIX_RESULT_POOLEMPTY) { + LOG_I( "Out of surface\n"); + return MIX_RESULT_OUTOFSURFACES; + } + + g_mutex_lock(priv->objlock); ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt, @@ -1447,7 +1460,7 @@ MIX_RESULT mix_video_eos_default(MixVideo * mix) { MixVideoPrivate *priv = NULL; LOG_V( "Begin\n"); - + CHECK_INIT_CONFIG(mix, priv); /* ---------------------- begin lock --------------------- */ @@ -1456,11 +1469,11 @@ MIX_RESULT mix_video_eos_default(MixVideo * mix) { if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { ret = mix_videofmt_eos(priv->video_format); - /* We should not call mix_framemanager_eos() here. - * MixVideoFormat* is responsible to call this function. + /* We should not call mix_framemanager_eos() here. + * MixVideoFormat* is responsible to call this function. * Commnet the function call here! - */ - /* frame manager will set EOS flag to be TRUE */ + */ + /* frame manager will set EOS flag to be TRUE */ /* ret = mix_framemanager_eos(priv->frame_manager); */ } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && priv->video_format_enc != NULL) { @@ -1581,7 +1594,7 @@ MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *m } -MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, +MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) { MIX_RESULT ret = MIX_RESULT_FAIL; @@ -1608,7 +1621,7 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, else { LOG_E( "priv->config_params is invalid\n"); - return MIX_RESULT_FAIL; + return MIX_RESULT_FAIL; } g_mutex_lock(priv->objlock); @@ -1620,38 +1633,120 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n"); goto cleanup; - } + } + } + break; + + case MIX_ENC_PARAMS_INIT_QP: + { + ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->init_QP); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_MIN_QP: + { + ret = mix_videoconfigparamsenc_set_min_qp (priv_config_params_enc, dynamic_params->min_QP); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_min_qp\n"); + goto cleanup; + } } break; - case MIX_ENC_PARAMS_SLICE_SIZE: + + case MIX_ENC_PARAMS_WINDOW_SIZE: + { + ret = mix_videoconfigparamsenc_set_window_size (priv_config_params_enc, dynamic_params->window_size); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_window_size\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_TARGET_PERCENTAGE: + { + ret = mix_videoconfigparamsenc_set_target_percentage (priv_config_params_enc, dynamic_params->target_percentage); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_target_percentage\n"); + goto cleanup; + } + } + break; + + + case MIX_ENC_PARAMS_MTU_SLICE_SIZE: + { + ret = mix_videoconfigparamsenc_set_max_slice_size(priv_config_params_enc, dynamic_params->max_slice_size); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_max_slice_size\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_SLICE_NUM: { /* */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MixVideoConfigParamsEncH264 * config_params_enc_h264 = MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n"); goto cleanup; - } + } } break; - + + case MIX_ENC_PARAMS_I_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_I_slice_num (config_params_enc_h264, dynamic_params->I_slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_I_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_P_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_P_slice_num (config_params_enc_h264, dynamic_params->P_slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_P_slice_num\n"); + goto cleanup; + } + } + break; + case MIX_ENC_PARAMS_IDR_INTERVAL: { - MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MixVideoConfigParamsEncH264 * config_params_enc_h264 = MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n"); goto cleanup; - } + } } break; - case MIX_ENC_PARAMS_RC_MODE: + case MIX_ENC_PARAMS_RC_MODE: case MIX_ENC_PARAMS_RESOLUTION: { /* @@ -1660,8 +1755,8 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, if (priv->video_format_enc) { mix_videofmtenc_deinitialize(priv->video_format_enc); } - - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) + + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) //priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0 @@ -1671,30 +1766,30 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, /* * Step 2: Change configuration parameters (frame size) - */ + */ if (params_type == MIX_ENC_PARAMS_RESOLUTION) { ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n"); goto cleanup; - } + } } else if (params_type == MIX_ENC_PARAMS_RC_MODE) { ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n"); goto cleanup; - } + } } /* * Step 3: Renew mixvideofmtenc object - */ + */ MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; - + ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, &encode_format); if (ret != MIX_RESULT_SUCCESS) { @@ -1734,7 +1829,7 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); } - + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { @@ -1775,10 +1870,10 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, /* * Step 4: Re-initialize and start a new encode session, of course with new resolution value - */ + */ - /* - * Initialize MixVideoEncFormat + /* + * Initialize MixVideoEncFormat */ /* @@ -1800,11 +1895,11 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed initialize video format\n"); goto cleanup; - } + } mix_surfacepool_ref(priv->surface_pool); - - + + } break; case MIX_ENC_PARAMS_GOP_SIZE: @@ -1813,7 +1908,7 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n"); goto cleanup; - } + } } break; @@ -1823,7 +1918,7 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n"); goto cleanup; - } + } } break; case MIX_ENC_PARAMS_FORCE_KEY_FRAME: @@ -1833,39 +1928,51 @@ MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, */ } break; - case MIX_ENC_PARAMS_QP: + + case MIX_ENC_PARAMS_REFRESH_TYPE: { - ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->QP); + ret = mix_videoconfigparamsenc_set_refresh_type(priv_config_params_enc, dynamic_params->refresh_type); if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); + LOG_E("Failed mix_videoconfigparamsenc_set_refresh_type\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_AIR: + { + ret = mix_videoconfigparamsenc_set_AIR_params(priv_config_params_enc, dynamic_params->air_params); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_AIR_params\n"); goto cleanup; - } + } } break; + case MIX_ENC_PARAMS_CIR_FRAME_CNT: { ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n"); goto cleanup; - } - + } + } break; - + default: break; } ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type); -cleanup: +cleanup: g_mutex_unlock(priv->objlock); LOG_V( "End ret = 0x%x\n", ret); - return ret; + return ret; } /* * API functions @@ -1951,7 +2058,7 @@ MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, CHECK_AND_GET_MIX_CLASS(mix, klass); if (klass->decode_func) { - return klass->decode_func(mix, bufin, bufincnt, + return klass->decode_func(mix, bufin, bufincnt, decode_params); } return MIX_RESULT_NOTIMPL; @@ -2074,13 +2181,13 @@ MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize) { return MIX_RESULT_NOTIMPL; } -MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, +MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) { MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix); if (klass->set_dynamic_enc_config_func) { return klass->set_dynamic_enc_config_func(mix, params_type, dynamic_params); - } - return MIX_RESULT_NOTIMPL; + } + return MIX_RESULT_NOTIMPL; } diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h index f8e4828..678ba65 100644 --- a/mix_video/src/mixvideo.h +++ b/mix_video/src/mixvideo.h @@ -169,10 +169,10 @@ MixVideo *mix_video_ref(MixVideo * mix); /** * mix_video_get_version: * @mix: #MixVideo object. - * @major: Pointer to an unsigned integer indicating the major version number of this MI-X Video library + * @major: Pointer to an unsigned integer indicating the major version number of this MI-X Video library * @minor: Pointer to an unsigned integer indicating the minor version number of this MI-X Video library * @returns: Common Video Error Return Codes - * + * * This function will return the major and minor version numbers of the library. */ MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor); @@ -182,16 +182,16 @@ MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor); /** * mix_video_initialize: * @mix: #MixVideo object. - * @mode: Enum value to indicate encode or decode mode + * @mode: Enum value to indicate encode or decode mode * @init_params: MixVideoInitParams object which includes display type and pointer to display, encode or decode mode - * @drm_init_params: MixDrmParams defined in Moorestown MI-X DRM API. + * @drm_init_params: MixDrmParams defined in Moorestown MI-X DRM API. * This can be null if content is not protected. - * @returns: In addition to the Common Video Error Return Codes, + * @returns: In addition to the Common Video Error Return Codes, * the following error codes may be returned. * * MIX_RESULT_ALREADY_INIT, mix_video_initialize() has already been called. * - * + * * This function will return the major and minor version numbers of the library. */ MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, @@ -201,7 +201,7 @@ MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, * mix_video_deinitialize: * @mix: #MixVideo object. * @returns: Common Video Error Return Codes - * + * * This function will un-initialize a session with this MI-X instance. During this call, the * LibVA session is closed and all resources including surface buffers, #MixBuffers and * #MixVideoFrame objects are freed. This function is called by the application once @@ -213,18 +213,18 @@ MIX_RESULT mix_video_deinitialize(MixVideo * mix); /** * mix_video_configure: * @mix: #MixVideo object. - * @config_params: Pointer to #MixVideoConfigParams object (either #MixVideoConfigParamsDec or + * @config_params: Pointer to #MixVideoConfigParams object (either #MixVideoConfigParamsDec or * #MixVideoConfigParamsEnc for specific media type) - * @drm_config_params: Pointer to #MixDrmParams defined in Moorestown MI-X DRM API. - * This can be null if content is not protected. - * @returns: In addition to the Common Video Error Return Codes, + * @drm_config_params: Pointer to #MixDrmParams defined in Moorestown MI-X DRM API. + * This can be null if content is not protected. + * @returns: In addition to the Common Video Error Return Codes, * the following error codes may be returned. * * MIX_RESULT_RESOURCES_NOTAVAIL, HW accelerated decoding is not available. * MIX_RESULT_NOTSUPPORTED, A requested parameter is not supported or not available. * - * - * This function can be used to configure a stream for the current session. + * + * This function can be used to configure a stream for the current session. * The caller can use this function to do the following: * * Choose frame ordering mode (display order or decode order) @@ -243,18 +243,18 @@ MIX_RESULT mix_video_configure(MixVideo * mix, /** * mix_video_get_config: * @mix: #MixVideo object. - * @config_params: Pointer to pointer to #MixVideoConfigParams object defined in + * @config_params: Pointer to pointer to #MixVideoConfigParams object defined in * description of mix_video_configure() * @returns: Common Video Error Return Codes - * + * * This function can be used to get the current configuration of a stream for the current session. * A #MixVideoConfigParams object will be returned, which can be used to get each of the * parameter current values. The caller will need to release this object when it is no - * longer needed. - * + * longer needed. + * * This function can only be called once mix_video_configure() has been called. - * - * See description of mix_video_configure() for #MixVideoConfigParams object details. + * + * See description of mix_video_configure() for #MixVideoConfigParams object details. * For mix_video_get_config(), all input parameter fields become OUT parameters. * */ @@ -264,18 +264,18 @@ MIX_RESULT mix_video_get_config(MixVideo * mix, /** * mix_video_decode: * @mix: #MixVideo object. - * @bufin: Array of pointers to #MixBuffer objects, described in mix_video_get_mixbuffer() * + * @bufin: Array of pointers to #MixBuffer objects, described in mix_video_get_mixbuffer() * * @bufincnt: Number of #MixBuffer objects * @decode_params: #MixVideoDecodeParams object - * @returns: In addition to the Common Video Error Return Codes, + * @returns: In addition to the Common Video Error Return Codes, * the following error codes may be returned. * * - * MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done. + * MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done. * Caller can try again with the same MixBuffers later when surfaces may have been freed. * * - * + * * * This function is used to initiate HW accelerated decoding of encoded data buffers. This * function is used to decode to a surface buffer, which can then be rendered using @@ -289,7 +289,7 @@ MIX_RESULT mix_video_get_config(MixVideo * mix, * and provided for the #MixVideoFrame object that contains the decoded data for this * frame data. * - * + * * * As only one timestamp is passed in for the buffer, there should be no more than one * video frame included in the encoded data buffer provided in a single call to @@ -297,8 +297,8 @@ MIX_RESULT mix_video_get_config(MixVideo * mix, * mix_video_decode(), the same timestamp should be provided with each call having * data associated with the same frame. * - * - * + * + * * The application should request a #MixBuffer object using mix_video_get_mixbuffer(), * initialize the #MixBuffer with the data pointer to the coded input data, along with the * size of the input data buffer, and optionally can provide a token value and a callback @@ -318,18 +318,18 @@ MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, * mix_video_get_frame: * @mix: #MixVideo object. * @frame: A pointer to a pointer to a #MixVideoFrame object - * @returns: In addition to the Common Video Error Return Codes, + * @returns: In addition to the Common Video Error Return Codes, * the following error codes may be returned. * * * MIX_RESULT_FRAME_NOTAVAIL, No decoded frames are available. * * - * MIX_RESULT_EOS, No more decoded frames are available, + * MIX_RESULT_EOS, No more decoded frames are available, * since end of stream has been encountered. * * - * + * * * This function returns a frame object that represents the next frame ID and includes * timestamp and discontinuity information. If display frame ordering has been @@ -337,7 +337,7 @@ MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, * configured, it is the next frame decoded. In both cases the timestamp reflects the * presentation timestamp. For encode mode the frame order is always display order. * - * + * * * The frame object is a reference counted object that represents the frame. The * application can retain this frame object as long as needed to display the frame and @@ -357,7 +357,7 @@ MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame); * @mix: #MixVideo object. * @frame: A pointer to a #MixVideoFrame object, described in mix_video_get_frame() * @returns: Common Video Error Return Codes - * + * * This function releases a frame object that was acquired from mix_video_get_frame(). */ MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame); @@ -366,14 +366,14 @@ MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame); /** * mix_video_render: * @mix: #MixVideo object. - * @render_params: #MixVideoRenderParams object defined below, - * which includes the display window and type, - * src and dest image sizes, deinterlace info, clipping rectangles, + * @render_params: #MixVideoRenderParams object defined below, + * which includes the display window and type, + * src and dest image sizes, deinterlace info, clipping rectangles, * some post processing parameters, and so forth. * @frame: Pointer to a #MixVideoFrame object returned from mix_video_get_frame(). * @returns: Common Video Error Return Codes - * - * This function renders a video frame associated with a MixVideoFrame object to the display. + * + * This function renders a video frame associated with a MixVideoFrame object to the display. * The display is either an X11 Pixmap or an X11 Window using the overlay. */ MIX_RESULT mix_video_render(MixVideo * mix, @@ -384,12 +384,12 @@ MIX_RESULT mix_video_render(MixVideo * mix, * mix_video_encode: * @mix: #MixVideo object. * @bufin: Array of pointers to #MixBuffer objects, structure defined in mix_video_decode() - * @bufincnt: Number of #MixBuffer objects + * @bufincnt: Number of #MixBuffer objects * @iovout: Array of #MixIOVec structures, pointing to buffers allocated by the application * @iovoutcnt: Number of items in iovout array - * @encode_params: #MixVideoEncodeParams object + * @encode_params: #MixVideoEncodeParams object * @returns: Common Video Error Return Codes - * + * * * This function is used to initiate HW accelerated encoding of uncompressed video input * buffers. The input buffers may either be uncompressed video in user space buffers, or @@ -397,14 +397,14 @@ MIX_RESULT mix_video_render(MixVideo * mix, * shared buffer mode should be indicated in the #MixVideoConfigParamsEnc object * provided to mix_video_configure(). * - * + * * * Video uncompressed data input buffers are provided in a scatter/gather list of * reference counted MixBuffers. The input #MixBuffers are considered a complete frame * of data, and are used for encoding before the input buffers are released. LibCI frame * indices may also be provided in MixBuffers. * - * + * * * The encoded data will be copied to the output buffers provided in the array of * #MixIOVec structures, also in a scatter/gather list. These output buffers are allocated @@ -416,19 +416,19 @@ MIX_RESULT mix_video_render(MixVideo * mix, * the encoded data size placed in the buffer. For any buffer not used for encoded data, * the data_size will be set to zero. * - * + * * * Alternatively, if the application does not allocate the output buffers, the data pointers * in the #MixIOVec structures (still provided by the application) can be set to NULL, * whereupon #MixVideo will allocate a data buffer for each frame and set the data, - * buffer_size and data_size pointers in the #MixIOVec structures accordingly. + * buffer_size and data_size pointers in the #MixIOVec structures accordingly. * - * - * + * + * * This is not an efficient method to handle these buffers and it is preferred that * the application provide pre-allocated buffers. * - * + * * * The application should request a #MixBuffer object using mix_video_get_mixbuffer(), * initialize the #MixBuffer with the data pointer to the uncompressed input data or a LibCI @@ -461,7 +461,7 @@ MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, * mix_video_flush: * @mix: #MixVideo object. * @returns: Common Video Error Return Codes - * + * * This function will flush all encoded and decoded buffers that are currently enqueued or * in the process of decoding. After this call, decoding can commence again, but would * need to start at the beginning of a sequence (for example, with no dependencies on @@ -473,7 +473,7 @@ MIX_RESULT mix_video_flush(MixVideo * mix); * mix_video_eos: * @mix: #MixVideo object. * @returns: Common Video Error Return Codes - * + * * This function will signal end of stream to #MixVideo. This can be used to finalize * decoding of the last frame and other end of stream processing. #MixVideo will complete * the decoding of all buffers received, and will continue to provide the decoded frame @@ -488,7 +488,7 @@ MIX_RESULT mix_video_eos(MixVideo * mix); * @mix: #MixVideo object. * @state: Current state of MI-X session. * @returns: Common Video Error Return Codes - * + * * This function returns the current state of the MI-X session. */ MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state); @@ -498,7 +498,7 @@ MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state); * @mix: #MixVideo object. * @buf: A pointer to a pointer to a #MixBuffer object * @returns: Common Video Error Return Codes - * + * * * This function returns a frame object that represents the next frame ID and includes * timestamp and discontinuity information. If display frame ordering has been @@ -506,7 +506,7 @@ MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state); * configured, it is the next frame decoded. In both cases the timestamp reflects the * presentation timestamp. * - * + * * * The frame object is a reference counted object that represents the frame. The * application can retain this frame object as long as needed to display the frame and @@ -516,7 +516,7 @@ MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state); * mix_video_release_frame(). The application should not modify the reference count or * delete this object directly. * - * + * */ MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf); @@ -526,7 +526,7 @@ MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf); * @mix: #MixVideo object. * @buf: A pointer to a #MixBuffer object, described in mix_video_get_mixbuffer(). * @returns: Common Video Error Return Codes - * + * * This function releases a frame object that was acquired from mix_video_get_mixbuffer(). */ MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); @@ -537,7 +537,7 @@ MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); * @mix: #MixVideo object. * @bufsize: Pointer to guint. * @returns: Common Video Error Return Codes - * + * * * This function can be used to get the maximum size of encoded data buffer needed for * the mix_video_encode() call. @@ -555,7 +555,7 @@ MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize); * @params_type: Dynamic encoder configuration type * @dynamic_params: Point to dynamic control data structure which includes the new value to be changed to * @returns: Common Video Error Return Codes - * + * * * This function can be used to change the encoder parameters at run-time * @@ -563,7 +563,7 @@ MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize); * Usually this function is after the encoding session is started. * */ -MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, +MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); G_END_DECLS diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c index 7ad334f..e9659dd 100644 --- a/mix_video/src/mixvideoconfigparamsdec.c +++ b/mix_video/src/mixvideoconfigparamsdec.c @@ -50,6 +50,13 @@ static void mix_videoconfigparamsdec_init(MixVideoConfigParamsDec * self) { self->mixbuffer_pool_size = 0; self->extra_surface_allocation = 0; + self->video_range = 0; + self->color_matrix = 0; + self->bit_rate = 0; + + self->par_num = 0; + self->par_denom= 0; + /* TODO: initialize other properties */ self->reserved1 = NULL; self->reserved2 = NULL; @@ -165,6 +172,11 @@ gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src this_target->rate_control = this_src->rate_control; this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size; this_target->extra_surface_allocation = this_src->extra_surface_allocation; + this_target->video_range = this_src->video_range; + this_target->color_matrix = this_src->color_matrix; + this_target->bit_rate = this_src->bit_rate; + this_target->par_num = this_src->par_num; + this_target->par_denom = this_src->par_denom; /* copy properties of non-primitive */ @@ -297,6 +309,31 @@ gboolean mix_videoconfigparamsdec_equal(MixParams * first, MixParams * second) { goto not_equal; } + if (this_first->video_range != this_second->video_range) + { + goto not_equal; + } + + if (this_first->color_matrix != this_second->color_matrix) + { + goto not_equal; + } + + if (this_first->bit_rate != this_second->bit_rate) + { + goto not_equal; + } + + if (this_first->par_num != this_second->par_num) + { + goto not_equal; + } + + if (this_first->par_denom != this_second->par_denom) + { + goto not_equal; + } + ret = TRUE; not_equal: @@ -533,5 +570,80 @@ MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation( } +MIX_RESULT mix_videoconfigparamsdec_set_video_range( + MixVideoConfigParamsDec * obj, + guint8 video_range) +{ + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->video_range = video_range; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_video_range( + MixVideoConfigParamsDec * obj, + guint8 *video_range) +{ + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, video_range); + *video_range = obj->video_range; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_color_matrix( + MixVideoConfigParamsDec * obj, + guint8 color_matrix) +{ + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->color_matrix = color_matrix; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_color_matrix( + MixVideoConfigParamsDec * obj, + guint8 *color_matrix) +{ + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, color_matrix); + *color_matrix = obj->color_matrix; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_bit_rate( + MixVideoConfigParamsDec * obj, + guint bit_rate) +{ + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->bit_rate = bit_rate; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_bit_rate( + MixVideoConfigParamsDec * obj, + guint *bit_rate) +{ + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bit_rate); + *bit_rate = obj->bit_rate; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio( + MixVideoConfigParamsDec * obj, + guint par_num, + guint par_denom) +{ + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->par_num = par_num; + obj->par_denom = par_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio( + MixVideoConfigParamsDec * obj, + guint * par_num, + guint * par_denom) +{ + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, par_num, par_denom); + *par_num = obj->par_num; + *par_denom = obj->par_denom; + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h index 809eb1e..b4574c1 100644 --- a/mix_video/src/mixvideoconfigparamsdec.h +++ b/mix_video/src/mixvideoconfigparamsdec.h @@ -105,6 +105,26 @@ struct _MixVideoConfigParamsDec { /* Extra surfaces for MixVideoFrame objects to be allocated */ guint extra_surface_allocation; + + /* video range, 0 for short range and 1 for full range, output only */ + guint8 video_range; + + /* + color matrix, output only. Possible values defined in va.h + #define VA_SRC_BT601 0x00000010 + #define VA_SRC_BT709 0x00000020 + #define VA_SRC_SMPTE_240 0x00000040 + */ + guint8 color_matrix; + + /* bit rate in bps, output only */ + guint8 bit_rate; + + /* Pixel aspect ratio numerator value */ + guint par_num; + + /* Pixel aspect ratio denominator value */ + guint par_denom; /* Reserved for future use */ void *reserved1; @@ -375,6 +395,102 @@ MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(MixVideoConfigP MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigParamsDec * obj, guint *extra_surface_allocation); + +/** + * mix_videoconfigparamsdec_set_video_range: + * @obj: #MixVideoConfigParamsDec object + * @video_range: 1 for full video range, 0 for short video range. + * @returns: Common Video Error Return Codes + * + * Set video range + */ +MIX_RESULT mix_videoconfigparamsdec_set_video_range(MixVideoConfigParamsDec * obj, + guint8 video_range); + +/** + * mix_videoconfigparamsdec_get_video_range: + * @obj: #MixVideoConfigParamsDec object + * @video_range: video range to be returned + * @returns: Common Video Error Return Codes + * + * Get video range + */ +MIX_RESULT mix_videoconfigparamsdec_get_video_range(MixVideoConfigParamsDec * obj, + guint8 *video_range); + + +/** + * mix_videoconfigparamsdec_set_color_matrix: + * @obj: #MixVideoConfigParamsDec object + * @color_matrix: BT601 or BT709, defined in va.h. 0 for any other including unspecified color matrix. + * @returns: Common Video Error Return Codes + * + * Set color matrix + */ +MIX_RESULT mix_videoconfigparamsdec_set_color_matrix(MixVideoConfigParamsDec * obj, + guint8 color_matrix); + +/** + * mix_videoconfigparamsdec_get_color_matrix: + * @obj: #MixVideoConfigParamsDec object + * @color_matrix: color matrix to be returned + * @returns: Common Video Error Return Codes + * + * Get color matrix + */ +MIX_RESULT mix_videoconfigparamsdec_get_color_matrix(MixVideoConfigParamsDec * obj, + guint8 *color_matrix); + + +/** + * mix_videoconfigparamsdec_set_bit_rate: + * @obj: #MixVideoConfigParamsDec object + * @bit_rate: bit rate in bit per second. + * @returns: Common Video Error Return Codes + * + * Set bit rate + */ +MIX_RESULT mix_videoconfigparamsdec_set_bit_rate(MixVideoConfigParamsDec * obj, + guint bit_rate); + +/** + * mix_videoconfigparamsdec_get_bit_rate: + * @obj: #MixVideoConfigParamsDec object + * @bit_rate: bit rate to be returned + * @returns: Common Video Error Return Codes + * + * Get bit rate + */ +MIX_RESULT mix_videoconfigparamsdec_get_bit_rate(MixVideoConfigParamsDec * obj, + guint *bit_rate); + + + +/** + * mix_videoconfigparamsdec_set_pixel_aspect_ratio: + * @obj: #MixVideoConfigParamsDec object + * @par_num: Pixel aspect ratio numerator value + * @par_denom: Pixel aspect ratio denominator value * + * @returns: Common Video Error Return Codes + * + * Set pixel aspect ratio + */ +MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio(MixVideoConfigParamsDec * obj, + guint par_num, guint par_denom); + +/** + * mix_videoconfigparamsdec_get_pixel_aspect_ratio: + * @obj: #MixVideoConfigParamsDec object + * @par_num: Pixel aspect ratio numerator value to be returned + * @par_denom: Pixel aspect ratio denominator value to be returned + * @returns: Common Video Error Return Codes + * + * Get pixel aspect ratio + */ +MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio(MixVideoConfigParamsDec * obj, + guint * par_num, guint * par_denom); + + /* TODO: Add getters and setters for other properties */ G_END_DECLS diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c index 040b612..c35ade2 100644 --- a/mix_video/src/mixvideoconfigparamsenc.c +++ b/mix_video/src/mixvideoconfigparamsenc.c @@ -34,12 +34,16 @@ G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEnc, mix_videoconfigparamsenc, MIX_TYPE_VIDEOCONFIGPARAMS, _do_init); static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) { - /* initialize properties here */ + /* initialize properties here */ self->bitrate = 0; self->frame_rate_num = 30; - self->frame_rate_denom = 1; + self->frame_rate_denom = 1; self->initial_qp = 15; self->min_qp = 0; + self->target_percentage = 95; + self->window_size = 500; + + self->max_slice_size = 0; /*Set to 0 means it won't take effect*/ self->picture_width = 0; self->picture_height = 0; @@ -54,15 +58,20 @@ static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) { self->ci_frame_id = NULL; self->ci_frame_num = 0; - - self->need_display = TRUE; + + self->need_display = TRUE; self->rate_control = MIX_RATE_CONTROL_NONE; self->raw_format = MIX_RAW_TARGET_FORMAT_YUV420; - self->profile = MIX_PROFILE_H264BASELINE; + self->profile = MIX_PROFILE_H264BASELINE; self->level = 30; self->CIR_frame_cnt = 15; + self->refresh_type = MIX_VIDEO_NONIR; + + self->air_params.air_MBs = 0; + self->air_params.air_threshold = 0; + self->air_params.air_auto = 0; /* TODO: initialize other properties */ self->reserved1 = NULL; @@ -73,10 +82,10 @@ static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) { static void mix_videoconfigparamsenc_class_init(MixVideoConfigParamsEncClass * klass) { MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - + /* setup static parent class */ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - + mixparams_class->finalize = mix_videoconfigparamsenc_finalize; mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsenc_copy; mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsenc_dup; @@ -89,7 +98,7 @@ mix_videoconfigparamsenc_new(void) { MixVideoConfigParamsEnc *ret = (MixVideoConfigParamsEnc *) g_type_create_instance( MIX_TYPE_VIDEOCONFIGPARAMSENC); - + return ret; } @@ -128,9 +137,9 @@ mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) { MixParams * mix_videoconfigparamsenc_dup(const MixParams * obj) { MixParams *ret = NULL; - - LOG_V( "Begin\n"); - + + LOG_V( "Begin\n"); + if (MIX_IS_VIDEOCONFIGPARAMSENC(obj)) { MixVideoConfigParamsEnc *duplicate = mix_videoconfigparamsenc_new(); if (mix_videoconfigparamsenc_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { @@ -156,7 +165,7 @@ gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src MixVideoConfigParamsEnc *this_target, *this_src; MIX_RESULT mix_result = MIX_RESULT_FAIL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); if (MIX_IS_VIDEOCONFIGPARAMSENC(target) && MIX_IS_VIDEOCONFIGPARAMSENC(src)) { @@ -168,24 +177,31 @@ gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src this_target->bitrate = this_src->bitrate; this_target->frame_rate_num = this_src->frame_rate_num; - this_target->frame_rate_denom = this_src->frame_rate_denom; + this_target->frame_rate_denom = this_src->frame_rate_denom; this_target->initial_qp = this_src->initial_qp; this_target->min_qp = this_src->min_qp; + this_target->target_percentage = this_src->target_percentage; + this_target->window_size = this_src->window_size; + this_target->max_slice_size = this_src->max_slice_size; this_target->intra_period = this_src->intra_period; - this_target->picture_width = this_src->picture_width; + this_target->picture_width = this_src->picture_width; this_target->picture_height = this_src->picture_height; this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size; this_target->share_buf_mode = this_src->share_buf_mode; - this_target->encode_format = this_src->encode_format; - this_target->ci_frame_num = this_src->ci_frame_num; - this_target->draw= this_src->draw; + this_target->encode_format = this_src->encode_format; + this_target->ci_frame_num = this_src->ci_frame_num; + this_target->draw= this_src->draw; this_target->need_display = this_src->need_display; - this_target->rate_control = this_src->rate_control; - this_target->raw_format = this_src->raw_format; - this_target->profile = this_src->profile; - this_target->level = this_src->level; - this_target->CIR_frame_cnt = this_src->CIR_frame_cnt; - + this_target->rate_control = this_src->rate_control; + this_target->raw_format = this_src->raw_format; + this_target->profile = this_src->profile; + this_target->level = this_src->level; + this_target->CIR_frame_cnt = this_src->CIR_frame_cnt; + this_target->refresh_type = this_src->refresh_type; + this_target->air_params.air_MBs = this_src->air_params.air_MBs; + this_target->air_params.air_threshold = this_src->air_params.air_threshold; + this_target->air_params.air_auto = this_src->air_params.air_auto; + /* copy properties of non-primitive */ /* copy mime_type */ @@ -193,32 +209,32 @@ gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src if (this_src->mime_type) { #ifdef MDEBUG if (this_src->mime_type->str) { - - LOG_I( "this_src->mime_type->str = %s %x\n", - this_src->mime_type->str, (unsigned int)this_src->mime_type->str); + + LOG_I( "this_src->mime_type->str = %s %x\n", + this_src->mime_type->str, (unsigned int)this_src->mime_type->str); } #endif mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, this_src->mime_type->str); } else { - + LOG_I( "this_src->mime_type = NULL\n"); - + mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, NULL); } - + if (mix_result != MIX_RESULT_SUCCESS) { - - LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n"); + + LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n"); return FALSE; - } - + } + mix_result = mix_videoconfigparamsenc_set_ci_frame_info (this_target, this_src->ci_frame_id, this_src->ci_frame_num); - + /* TODO: copy other properties if there's any */ - + /* Now chainup base class */ if (parent_class->copy) { return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( @@ -227,7 +243,7 @@ gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src return TRUE; } } - + return FALSE; } @@ -273,10 +289,22 @@ gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { if (this_first->min_qp != this_second->min_qp) { goto not_equal; } - + + if (this_first->target_percentage != this_second->target_percentage) { + goto not_equal; + } + + if (this_first->window_size != this_second->window_size) { + goto not_equal; + } + + if (this_first->max_slice_size != this_second->max_slice_size) { + goto not_equal; + } + if (this_first->intra_period != this_second->intra_period) { goto not_equal; - } + } if (this_first->picture_width != this_second->picture_width && this_first->picture_height != this_second->picture_height) { @@ -289,11 +317,11 @@ gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) { goto not_equal; - } + } if (this_first->share_buf_mode != this_second->share_buf_mode) { goto not_equal; - } + } if (this_first->ci_frame_id != this_second->ci_frame_id) { goto not_equal; @@ -301,34 +329,52 @@ gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { if (this_first->ci_frame_num != this_second->ci_frame_num) { goto not_equal; - } + } if (this_first->draw != this_second->draw) { goto not_equal; - } + } if (this_first->need_display!= this_second->need_display) { goto not_equal; - } - - if (this_first->rate_control != this_second->rate_control) { - goto not_equal; - } - - if (this_first->raw_format != this_second->raw_format) { - goto not_equal; - } - - if (this_first->profile != this_second->profile) { - goto not_equal; - } - if (this_first->level != this_second->level) { - goto not_equal; - } - - if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) { - goto not_equal; - } + } + + if (this_first->rate_control != this_second->rate_control) { + goto not_equal; + } + + if (this_first->raw_format != this_second->raw_format) { + goto not_equal; + } + + if (this_first->profile != this_second->profile) { + goto not_equal; + } + + if (this_first->level != this_second->level) { + goto not_equal; + } + + if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) { + goto not_equal; + } + + if (this_first->refresh_type != this_second->refresh_type) { + goto not_equal; + } + + if (this_first->air_params.air_MBs != this_second->air_params.air_MBs) { + goto not_equal; + } + + if (this_first->air_params.air_threshold != this_second->air_params.air_threshold) { + goto not_equal; + } + + if (this_first->air_params.air_auto != this_second->air_params.air_auto) { + goto not_equal; + } + /* check the equalitiy of the none-primitive type properties */ /* compare mime_type */ @@ -340,7 +386,7 @@ gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { } } else if (!(!this_first->mime_type && !this_second->mime_type)) { goto not_equal; - } + } ret = TRUE; @@ -385,7 +431,7 @@ MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, return MIX_RESULT_NULL_PTR; } - LOG_I( "mime_type = %s %x\n", + LOG_I( "mime_type = %s %x\n", mime_type, (unsigned int)mime_type); if (obj->mime_type) { @@ -396,9 +442,9 @@ MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, } - LOG_I( "mime_type = %s %x\n", + LOG_I( "mime_type = %s %x\n", mime_type, (unsigned int)mime_type); - + obj->mime_type = g_string_new(mime_type); if (!obj->mime_type) { return MIX_RESULT_NO_MEMORY; @@ -479,13 +525,13 @@ MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, obj->bitrate= bitrate; return MIX_RESULT_SUCCESS; -} +} MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, guint *bitrate) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bitrate); *bitrate = obj->bitrate; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, @@ -493,20 +539,20 @@ MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); obj->initial_qp = initial_qp; return MIX_RESULT_SUCCESS; -} +} MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, guint *initial_qp) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, initial_qp); *initial_qp = obj->initial_qp; return MIX_RESULT_SUCCESS; - -} + +} MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, guint min_qp) { MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->min_qp = min_qp; + obj->min_qp = min_qp; return MIX_RESULT_SUCCESS; } @@ -514,15 +560,51 @@ MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, guint *min_qp) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, min_qp); *min_qp = obj->min_qp; - + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj, + guint target_percentage) { + + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->target_percentage = target_percentage; + return MIX_RESULT_SUCCESS; + } + + +MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj, + guint *target_percentage) { + + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, target_percentage); + *target_percentage = obj->target_percentage; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj, + guint window_size) { + + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->window_size = window_size; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj, + guint *window_size) { + + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, window_size); + *window_size = obj->window_size; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, guint intra_period) { MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->intra_period = intra_period; - + obj->intra_period = intra_period; + return MIX_RESULT_SUCCESS; } @@ -530,7 +612,7 @@ MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * guint *intra_period) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, intra_period); *intra_period = obj->intra_period; - + return MIX_RESULT_SUCCESS; } @@ -564,15 +646,15 @@ MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * gboolean *share_buf_mod) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, share_buf_mod); - *share_buf_mod = obj->share_buf_mode; - return MIX_RESULT_SUCCESS; + *share_buf_mod = obj->share_buf_mode; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, gulong * ci_frame_id, guint ci_frame_num) { MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - + + if (!ci_frame_id || !ci_frame_num) { obj->ci_frame_id = NULL; obj->ci_frame_num = 0; @@ -584,7 +666,7 @@ MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * guint size = ci_frame_num * sizeof (gulong); obj->ci_frame_num = ci_frame_num; - + obj->ci_frame_id = g_malloc (ci_frame_num * sizeof (gulong)); if (!(obj->ci_frame_id)) { return MIX_RESULT_NO_MEMORY; @@ -600,7 +682,7 @@ MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, ci_frame_id, ci_frame_num); *ci_frame_num = obj->ci_frame_num; - + if (!obj->ci_frame_id) { *ci_frame_id = NULL; return MIX_RESULT_SUCCESS; @@ -608,36 +690,36 @@ MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * if (obj->ci_frame_num) { *ci_frame_id = g_malloc (obj->ci_frame_num * sizeof (gulong)); - + if (!*ci_frame_id) { return MIX_RESULT_NO_MEMORY; - } - + } + memcpy (*ci_frame_id, obj->ci_frame_id, obj->ci_frame_num * sizeof (gulong)); - + } else { *ci_frame_id = NULL; } - - return MIX_RESULT_SUCCESS; + + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, gulong draw) { - + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); obj->draw = draw; return MIX_RESULT_SUCCESS; - + } MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, gulong *draw) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, draw); - *draw = obj->draw; - return MIX_RESULT_SUCCESS; + *draw = obj->draw; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_need_display ( @@ -652,8 +734,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * o gboolean *need_display) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, need_display); - *need_display = obj->need_display; - return MIX_RESULT_SUCCESS; + *need_display = obj->need_display; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, @@ -668,62 +750,110 @@ MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * o MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, rate_control); *rate_control = obj->rate_control; return MIX_RESULT_SUCCESS; -} +} MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, MixRawTargetFormat raw_format) { MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); obj->raw_format = raw_format; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, MixRawTargetFormat * raw_format) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, raw_format); *raw_format = obj->raw_format; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, MixProfile profile) { MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); obj->profile = profile; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, MixProfile * profile) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, profile); *profile = obj->profile; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, guint8 level) { MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); obj->level = level; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, guint8 * level) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, level); *level = obj->level; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, guint CIR_frame_cnt) { MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); obj->CIR_frame_cnt = CIR_frame_cnt; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, guint * CIR_frame_cnt) { MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, CIR_frame_cnt); *CIR_frame_cnt = obj->CIR_frame_cnt; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj, + guint max_slice_size) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->max_slice_size = max_slice_size; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj, + guint * max_slice_size) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, max_slice_size); + *max_slice_size = obj->max_slice_size; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_refresh_type(MixVideoConfigParamsEnc * obj, + MixVideoIntraRefreshType refresh_type) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->refresh_type = refresh_type; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj, + MixVideoIntraRefreshType * refresh_type) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, refresh_type); + *refresh_type = obj->refresh_type; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj, + MixAIRParams air_params) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->air_params.air_MBs = air_params.air_MBs; + obj->air_params.air_threshold = air_params.air_threshold; + obj->air_params.air_auto = air_params.air_auto; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj, + MixAIRParams * air_params) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, air_params); + air_params->air_MBs = obj->air_params.air_MBs; + air_params->air_threshold = obj->air_params.air_threshold; + air_params->air_auto = obj->air_params.air_auto; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h index adb5f25..f45cc50 100644 --- a/mix_video/src/mixvideoconfigparamsenc.h +++ b/mix_video/src/mixvideoconfigparamsenc.h @@ -83,35 +83,44 @@ struct _MixVideoConfigParamsEnc { MixRawTargetFormat raw_format; /* Rate control mode */ - MixRateControl rate_control; + MixRateControl rate_control; /* Bitrate when rate control is used */ guint bitrate; - + /* Numerator of frame rate */ guint frame_rate_num; - + /* Denominator of frame rate */ guint frame_rate_denom; - + /* The initial QP value */ guint initial_qp; - + /* The minimum QP value */ guint min_qp; - + + /* this is the bit-rate the rate control is targeting, as a percentage of the maximum bit-rate + * for example if target_percentage is 95 then the rate control will target a bit-rate that is + * 95% of the maximum bit-rate + */ + guint target_percentage; + + /* windows size in milliseconds. For example if this is set to 500, then the rate control will guarantee the */ + guint window_size; + /* Number of frames between key frames (GOP size) */ guint intra_period; - + /* Width of video frame */ guint16 picture_width; - + /* Height of the video frame */ - guint16 picture_height; + guint16 picture_height; /* Mime type, reserved */ GString * mime_type; - + /* Encode target format */ MixEncodeTargetFormat encode_format; @@ -119,35 +128,45 @@ struct _MixVideoConfigParamsEnc { guint mixbuffer_pool_size; /* Are buffers shared between capture and encoding drivers */ - gboolean share_buf_mode; + gboolean share_buf_mode; /* Array of frame IDs created by capture library */ gulong * ci_frame_id; - + /* Size of the array ci_frame_id */ guint ci_frame_num; - + guint CIR_frame_cnt; - + + /* The maximum slice size to be set to video driver (in bits). + * The encoder hardware will try to make sure the single slice does not exceed this size + * If not, mix_video_encode() will report a specific error + */ + guint max_slice_size; + + MixVideoIntraRefreshType refresh_type; + + MixAIRParams air_params; + /* < private > */ gulong draw; - + /*< public > */ - - /* Indicates whether MixVideoFrames suitable for displaying + + /* Indicates whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() */ gboolean need_display; - + /* Reserved for future use */ void *reserved1; - - /* Reserved for future use */ + + /* Reserved for future use */ void *reserved2; - - /* Reserved for future use */ + + /* Reserved for future use */ void *reserved3; - - /* Reserved for future use */ + + /* Reserved for future use */ void *reserved4; }; @@ -215,10 +234,10 @@ MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, * @returns: Common Video Error Return Codes * * Get mime type - * + * * * Caller is responsible to g_free *mime_type - * + * */ MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, gchar ** mime_type); @@ -360,6 +379,51 @@ MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, guint *min_qp); + +/** + * mix_videoconfigparamsenc_set_target_percentage: + * @obj: #MixVideoConfigParamsEnc object + * @target_percentage: The target percentage value + * @returns: Common Video Error Return Codes + * + * Set The target percentage value + */ +MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj, + guint target_percentage); + +/** + * mix_videoconfigparamsenc_get_target_percentage: + * @obj: #MixVideoConfigParamsEnc object + * @target_percentage: The target percentage value to be returned + * @returns: Common Video Error Return Codes + * + * Get The target percentage value + */ +MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj, + guint *target_percentage); + +/** + * mix_videoconfigparamsenc_set_window_size: + * @obj: #MixVideoConfigParamsEnc object + * @window_size: The window size for rate control + * @returns: Common Video Error Return Codes + * + * Set The window size value + */ +MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj, + guint window_size); + +/** + * mix_videoconfigparamsenc_get_window_size: + * @obj: #MixVideoConfigParamsEnc object + * @window_size: The window size for rate control + * @returns: Common Video Error Return Codes + * + * Get The window size value + */ +MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj, + guint *window_size); + /** * mix_videoconfigparamsenc_set_intra_period: * @obj: #MixVideoConfigParamsEnc object @@ -407,7 +471,7 @@ MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size(MixVideoConfigParamsEnc /** * mix_videoconfigparamsenc_set_share_buf_mode: * @obj: #MixVideoConfigParamsEnc object - * @share_buf_mod: A flag to indicate whether buffers are shared + * @share_buf_mod: A flag to indicate whether buffers are shared * between capture and encoding drivers or not * @returns: Common Video Error Return Codes * @@ -419,7 +483,7 @@ MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc /** * mix_videoconfigparamsenc_get_share_buf_mode: * @obj: #MixVideoConfigParamsEnc object - * @share_buf_mod: the flag to be returned that indicates whether buffers + * @share_buf_mod: the flag to be returned that indicates whether buffers * are shared between capture and encoding drivers or not * @returns: Common Video Error Return Codes * @@ -431,26 +495,26 @@ MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * /** * mix_videoconfigparamsenc_set_ci_frame_info: * @obj: #MixVideoConfigParamsEnc object - * @ci_frame_id: Array of frame IDs created by capture library * - * @ci_frame_num: Size of the array ci_frame_id + * @ci_frame_id: Array of frame IDs created by capture library * + * @ci_frame_num: Size of the array ci_frame_id * @returns: Common Video Error Return Codes * * Set CI frame information */ -MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, gulong * ci_frame_id, guint ci_frame_num); /** * mix_videoconfigparamsenc_get_ci_frame_info: * @obj: #MixVideoConfigParamsEnc object - * @ci_frame_id: Array of frame IDs created by capture library to be returned + * @ci_frame_id: Array of frame IDs created by capture library to be returned * @ci_frame_num: Size of the array ci_frame_id to be returned * @returns: Common Video Error Return Codes * * Get CI frame information * * Caller is responsible to g_free *ci_frame_id - * + * */ MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, gulong * *ci_frame_id, guint *ci_frame_num); @@ -459,18 +523,18 @@ MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * /** * mix_videoconfigparamsenc_set_drawable: * @obj: #MixVideoConfigParamsEnc object - * @draw: drawable + * @draw: drawable * @returns: Common Video Error Return Codes * * Set drawable */ -MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, gulong draw); /** * mix_videoconfigparamsenc_get_drawable: * @obj: #MixVideoConfigParamsEnc object - * @draw: drawable to be returned + * @draw: drawable to be returned * @returns: Common Video Error Return Codes * * Get drawable @@ -481,11 +545,11 @@ MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, /** * mix_videoconfigparamsenc_set_need_display: * @obj: #MixVideoConfigParamsEnc object - * @need_display: Flag to indicates whether MixVideoFrames suitable for displaying + * @need_display: Flag to indicates whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() * @returns: Common Video Error Return Codes * - * Set the flag used to indicate whether MixVideoFrames suitable for displaying + * Set the flag used to indicate whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() */ MIX_RESULT mix_videoconfigparamsenc_set_need_display ( @@ -495,11 +559,11 @@ MIX_RESULT mix_videoconfigparamsenc_set_need_display ( /** * mix_videoconfigparamsenc_get_need_display: * @obj: #MixVideoConfigParamsEnc object - * @need_display: A flag to be returned to indicates whether MixVideoFrames suitable for displaying + * @need_display: A flag to be returned to indicates whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() * @returns: Common Video Error Return Codes * - * Get the flag used to indicate whether MixVideoFrames suitable for displaying + * Get the flag used to indicate whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() */ MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, @@ -508,10 +572,10 @@ MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * o /** * mix_videoconfigparamsenc_set_rate_control: * @obj: #MixVideoConfigParamsEnc object - * @rcmode: Rate control mode + * @rcmode: Rate control mode * @returns: Common Video Error Return Codes * - * Set Rate control mode + * Set Rate control mode */ MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, MixRateControl rcmode); @@ -519,10 +583,10 @@ MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * o /** * mix_videoconfigparamsenc_set_rate_control: * @obj: #MixVideoConfigParamsEnc object - * @rcmode: Rate control mode to be returned + * @rcmode: Rate control mode to be returned * @returns: Common Video Error Return Codes * - * Get Rate control mode + * Get Rate control mode */ MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, MixRateControl * rcmode); @@ -530,10 +594,10 @@ MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * o /** * mix_videoconfigparamsenc_set_raw_format: * @obj: #MixVideoConfigParamsEnc object - * @raw_format: Raw format to be encoded + * @raw_format: Raw format to be encoded * @returns: Common Video Error Return Codes * - * Set Raw format to be encoded + * Set Raw format to be encoded */ MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, MixRawTargetFormat raw_format); @@ -541,10 +605,10 @@ MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * ob /** * mix_videoconfigparamsenc_get_raw_format: * @obj: #MixVideoConfigParamsEnc object - * @raw_format: Raw format to be returned + * @raw_format: Raw format to be returned * @returns: Common Video Error Return Codes * - * Get Raw format + * Get Raw format */ MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, MixRawTargetFormat * raw_format); @@ -552,10 +616,10 @@ MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * ob /** * mix_videoconfigparamsenc_set_profile: * @obj: #MixVideoConfigParamsEnc object - * @profile: Encoding profile + * @profile: Encoding profile * @returns: Common Video Error Return Codes * - * Set Encoding profile + * Set Encoding profile */ MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, MixProfile profile); @@ -563,10 +627,10 @@ MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, /** * mix_videoconfigparamsenc_get_profile: * @obj: #MixVideoConfigParamsEnc object - * @profile: Encoding profile to be returned + * @profile: Encoding profile to be returned * @returns: Common Video Error Return Codes * - * Get Encoding profile + * Get Encoding profile */ MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, MixProfile * profile); @@ -575,51 +639,122 @@ MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, /** * mix_videoconfigparamsenc_set_level: * @obj: #MixVideoConfigParamsEnc object - * @level: Encoding level + * @level: Encoding level * @returns: Common Video Error Return Codes * - * Set Encoding level + * Set Encoding level */ -MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, guint8 level); /** * mix_videoconfigparamsenc_get_level: * @obj: #MixVideoConfigParamsEnc object - * @level: Encoding level to be returned + * @level: Encoding level to be returned * @returns: Common Video Error Return Codes * - * Get Encoding level + * Get Encoding level */ -MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, guint8 * level); /** * mix_videoconfigparamsenc_set_CIR_frame_cnt: * @obj: #MixVideoConfigParamsEnc object - * @CIR_frame_cnt: Encoding CIR frame count + * @CIR_frame_cnt: Encoding CIR frame count * @returns: Common Video Error Return Codes * * Set Encoding CIR frame count */ -MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, guint CIR_frame_cnt); /** * mix_videoconfigparamsenc_set_CIR_frame_cnt: * @obj: #MixVideoConfigParamsEnc object - * @CIR_frame_cnt: Encoding CIR frame count to be returned + * @CIR_frame_cnt: Encoding CIR frame count to be returned * @returns: Common Video Error Return Codes * - * Get Encoding CIR frame count + * Get Encoding CIR frame count */ -MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, +MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, guint * CIR_frame_cnt); + +/** + * mix_videoconfigparamsenc_set_max_slice_size: + * @obj: #MixVideoConfigParamsEnc object + * @max_slice_size: Maximum encoded slice size + * @returns: Common Video Error Return Codes + * + * Set Maximum encoded slice size + */ +MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj, + guint max_slice_size); + +/** + * mix_videoconfigparamsenc_get_max_slice_size: + * @obj: #MixVideoConfigParamsEnc object + * @max_slice_size: Maximum encoded slice size + * @returns: Common Video Error Return Codes + * + * Get Maximum encoded slice size + */ + +MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj, + guint * max_slice_size); + + +/** + * mix_videoconfigparamsenc_set_refresh_type: + * @obj: #MixVideoConfigParamsEnc object + * @refresh_type: The intra refresh type (CIR, AIR etc) + * @returns: Common Video Error Return Codes + * + * Set Intra Refresh Type + */ +MIX_RESULT mix_videoconfigparamsenc_set_refresh_type (MixVideoConfigParamsEnc * obj, + MixVideoIntraRefreshType refresh_type); + +/** + * mix_videoconfigparamsenc_get_refresh_type: + * @obj: #MixVideoConfigParamsEnc object + * @refresh_type: The intra refresh type (CIR, AIR etc) + * @returns: Common Video Error Return Codes + * + * Get Intra Refresh Type + */ + +MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj, + MixVideoIntraRefreshType * refresh_type); + +/** + * mix_videoconfigparamsenc_set_AIR_params: + * @obj: #MixVideoConfigParamsEnc object + * @air_params: AIR Parameters, including air_MBs, air_threshold and air_auto + * @returns: Common Video Error Return Codes + * + * Set AIR parameters + */ +MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj, + MixAIRParams air_params); + +/** + * mix_videoconfigparamsenc_get_AIR_params: + * @obj: #MixVideoConfigParamsEnc object + * @air_params: AIR Parameters, including air_MBs, air_threshold and air_auto + * @returns: Common Video Error Return Codes + * + * Get AIR parameters + */ + +MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj, + MixAIRParams * air_params); + /* TODO: Add getters and setters for other properties */ G_END_DECLS diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c index 620093d..12a5dd1 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.c +++ b/mix_video/src/mixvideoconfigparamsenc_h264.c @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -53,6 +53,8 @@ mix_videoconfigparamsenc_h264_init (MixVideoConfigParamsEncH264 * self) /* TODO: initialize properties */ self->basic_unit_size = 0; self->slice_num = 1; + self->I_slice_num = 1; + self->P_slice_num = 1; self->disable_deblocking_filter_idc = 0; self->delimiter_type = MIX_DELIMITER_LENGTHPREFIX; @@ -119,7 +121,7 @@ MixVideoConfigParamsEncH264 * mix_videoconfigparamsenc_h264_dup: * @obj: a #MixVideoConfigParams object * @returns: a newly allocated duplicate of the object. -* +* * Copy duplicate of the object. */ MixParams * @@ -148,7 +150,7 @@ mix_videoconfigparamsenc_h264_dup (const MixParams * obj) * @target: copy to target * @src: copy from src * @returns: boolean indicates if copy is successful. -* +* * Copy instance data from @src to @target. */ gboolean @@ -157,7 +159,7 @@ mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) MixVideoConfigParamsEncH264 *this_target, *this_src; MixParamsClass *root_class; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (target) && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (src)) @@ -169,11 +171,13 @@ mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) //add properties this_target->basic_unit_size = this_src->basic_unit_size; this_target->slice_num = this_src->slice_num; + this_target->I_slice_num = this_src->I_slice_num; + this_target->P_slice_num = this_src->P_slice_num; this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; this_target->delimiter_type = this_src->delimiter_type; - this_target->idr_interval = this_src->idr_interval; + this_target->idr_interval = this_src->idr_interval; + - // Now chainup base class root_class = MIX_PARAMS_CLASS (parent_class); @@ -196,7 +200,7 @@ mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) * @first: first object to compare * @second: seond object to compare * @returns: boolean indicates if instance are equal. -* +* * Copy instance data from @src to @target. */ gboolean @@ -216,23 +220,31 @@ mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second) if (this_first->basic_unit_size != this_second->basic_unit_size) { goto not_equal; } - + if (this_first->slice_num != this_second->slice_num) { goto not_equal; } + if (this_first->I_slice_num != this_second->I_slice_num) { + goto not_equal; + } + + if (this_first->P_slice_num != this_second->P_slice_num) { + goto not_equal; + } + if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { goto not_equal; - } + } if (this_first->delimiter_type != this_second->delimiter_type) { goto not_equal; - } + } if (this_first->idr_interval != this_second->idr_interval) { goto not_equal; - } - + } + ret = TRUE; @@ -240,7 +252,7 @@ mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second) if (ret != TRUE) { return ret; - } + } /* TODO: add comparison for properties */ { @@ -283,7 +295,7 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, basic_unit_size); *basic_unit_size = obj->basic_unit_size; return MIX_RESULT_SUCCESS; -} +} MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, @@ -298,13 +310,15 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; return MIX_RESULT_SUCCESS; -} +} MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, guint slice_num) { MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); obj->slice_num = slice_num; + obj->I_slice_num = slice_num; + obj->P_slice_num = slice_num; return MIX_RESULT_SUCCESS; } @@ -313,7 +327,35 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH2 MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, slice_num); *slice_num = obj->slice_num; return MIX_RESULT_SUCCESS; -} +} + +MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj, + guint I_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->I_slice_num = I_slice_num; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * I_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, I_slice_num); + *I_slice_num = obj->I_slice_num; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj, + guint P_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->P_slice_num = P_slice_num; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * P_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, P_slice_num); + *P_slice_num = obj->P_slice_num; + return MIX_RESULT_SUCCESS; +} MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, MixDelimiterType delimiter_type) { diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h index 1885846..4eddcb5 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.h +++ b/mix_video/src/mixvideoconfigparamsenc_h264.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -16,7 +16,7 @@ G_BEGIN_DECLS /** * MIX_TYPE_VIDEOCONFIGPARAMSENC_H264: -* +* * Get type of class. */ #define MIX_TYPE_VIDEOCONFIGPARAMSENC_H264 (mix_videoconfigparamsenc_h264_get_type ()) @@ -30,7 +30,7 @@ G_BEGIN_DECLS /** * MIX_IS_VIDEOCONFIGPARAMSENC_H264: * @obj: an object. -* +* * Checks if the given object is an instance of #MixVideoConfigParamsEncH264 */ #define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) @@ -44,7 +44,7 @@ G_BEGIN_DECLS /** * MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS: * @klass: a class. -* +* * Checks if the given class is #MixVideoConfigParamsEncH264Class */ #define MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) @@ -52,7 +52,7 @@ G_BEGIN_DECLS /** * MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS: * @obj: a #MixParams object. -* +* * Get the class instance of the object. */ #define MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) @@ -73,37 +73,43 @@ struct _MixVideoConfigParamsEncH264 /*< public > */ /* TODO: Add H.264 configuration paramters */ - - /* The basic unit size used by rate control */ + + /* The basic unit size used by rate control */ guint basic_unit_size; - + /* Number of slices in one frame */ guint slice_num; - + + /* Number of slices in one I frame */ + guint I_slice_num; + + /* Number of slices in one P frame */ + guint P_slice_num; + /* enable/disable deblocking */ - guint8 disable_deblocking_filter_idc; + guint8 disable_deblocking_filter_idc; /* delimiter_type */ MixDelimiterType delimiter_type; guint idr_interval; - - /* Reserved for future use */ + + /* Reserved for future use */ void *reserved1; - - /* Reserved for future use */ + + /* Reserved for future use */ void *reserved2; - - /* Reserved for future use */ + + /* Reserved for future use */ void *reserved3; - - /* Reserved for future use */ + + /* Reserved for future use */ void *reserved4; }; /** * MixVideoConfigParamsEncH264Class: -* +* * MI-X VideoConfig object class */ struct _MixVideoConfigParamsEncH264Class @@ -117,7 +123,7 @@ struct _MixVideoConfigParamsEncH264Class /** * mix_videoconfigparamsenc_h264_get_type: * @returns: type -* +* * Get the type of object. */ GType mix_videoconfigparamsenc_h264_get_type (void); @@ -125,7 +131,7 @@ GType mix_videoconfigparamsenc_h264_get_type (void); /** * mix_videoconfigparamsenc_h264_new: * @returns: A newly allocated instance of #MixVideoConfigParamsEncH264 -* +* * Use this method to create new instance of #MixVideoConfigParamsEncH264 */ MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void); @@ -133,7 +139,7 @@ MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void); * mix_videoconfigparamsenc_h264_ref: * @mix: object to add reference * @returns: the #MixVideoConfigParamsEncH264 instance where reference count has been increased. -* +* * Add reference count. */ MixVideoConfigParamsEncH264 @@ -142,7 +148,7 @@ MixVideoConfigParamsEncH264 /** * mix_videoconfigparamsenc_h264_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_videoconfigparamsenc_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj)) @@ -155,7 +161,7 @@ MixVideoConfigParamsEncH264 /** * mix_videoconfigparamsenc_h264_set_bus: * @obj: #MixVideoConfigParamsEncH264 object - * @basic_unit_size: The basic unit size used by rate control + * @basic_unit_size: The basic unit size used by rate control * @returns: Common Video Error Return Codes * * Set The basic unit size used by rate control @@ -166,7 +172,7 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * /** * mix_videoconfigparamsenc_h264_get_bus: * @obj: #MixVideoConfigParamsEncH264 object - * @basic_unit_size: The basic unit size to be returned + * @basic_unit_size: The basic unit size to be returned * @returns: Common Video Error Return Codes * * Get The basic unit size used by rate control @@ -218,6 +224,51 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH2 MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, guint * slice_num); + +/** + * mix_videoconfigparamsenc_h264_set_I_slice_num: + * @obj: #MixVideoConfigParamsEncH264 object + * @I_slice_num: Number of slices in one I frame + * @returns: Common Video Error Return Codes + * + * Set the Number of slices in one I frame + */ +MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj, + guint I_slice_num); + +/** + * mix_videoconfigparamsenc_h264_get_I_slice_num: + * @obj: #MixVideoConfigParamsEncH264 object + * @I_slice_num: Number of slices in one I frame to be returned + * @returns: Common Video Error Return Codes + * + * Get the Number of slices in one I frame + */ +MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * I_slice_num); + +/** + * mix_videoconfigparamsenc_h264_set_P_slice_num: + * @obj: #MixVideoConfigParamsEncH264 object + * @P_slice_num: Number of slices in one P frame + * @returns: Common Video Error Return Codes + * + * Set the Number of slices in one P frame + */ +MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj, + guint P_slice_num); + +/** + * mix_videoconfigparamsenc_h264_get_P_slice_num: + * @obj: #MixVideoConfigParamsEncH264 object + * @P_slice_num: Number of slices in one P frame to be returned + * @returns: Common Video Error Return Codes + * + * Get the Number of slices in one P frame + */ +MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * P_slice_num); + /** * mix_videoconfigparamsenc_h264_set_delimiter_type: * @obj: #MixVideoConfigParamsEncH264 object diff --git a/mix_video/src/mixvideodecodeparams.c b/mix_video/src/mixvideodecodeparams.c index 0c74eb0..1e403ab 100644 --- a/mix_video/src/mixvideodecodeparams.c +++ b/mix_video/src/mixvideodecodeparams.c @@ -36,6 +36,7 @@ static void mix_videodecodeparams_init(MixVideoDecodeParams * self) { self->timestamp = 0; self->discontinuity = FALSE; + self->new_sequence = FALSE; self->reserved1 = NULL; self->reserved2 = NULL; self->reserved3 = NULL; @@ -203,3 +204,17 @@ MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj, + gboolean new_sequence) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->new_sequence = new_sequence; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj, + gboolean *new_sequence) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, new_sequence); + *new_sequence = obj->new_sequence; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h index dfd614b..50ec502 100644 --- a/mix_video/src/mixvideodecodeparams.h +++ b/mix_video/src/mixvideodecodeparams.h @@ -80,6 +80,9 @@ struct _MixVideoDecodeParams { /* Indicates a discontinuity in the stream */ gboolean discontinuity; + /* output only, indicate if stream contains a new sequence */ + gboolean new_sequence; + /* Reserved for future use */ void *reserved1; @@ -167,7 +170,7 @@ MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj, /** * mix_videodecodeparams_set_discontinuity: * @obj: #MixVideoDecodeParams object - * @discontinuity: Flag to in Indicates a discontinuity in the stream. + * @discontinuity: Flag to indicate a discontinuity in the stream. * @returns: Common Video Error Return Codes * * Set discontinuity flag @@ -187,6 +190,31 @@ MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, gboolean *discontinuity); + +/** + * mix_videodecodeparams_set_new_sequence: + * @obj: #MixVideoDecodeParams object + * @new_sequence: Flag to indicate if stream contains a new sequence. + * @returns: Common Video Error Return Codes + * + * Set new_sequence flag + */ +MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj, + gboolean new_sequence); + + +/** + * mix_videodecodeparams_get_new_sequence: + * @obj: #MixVideoDecodeParams object + * @new_sequence: new_sequence flag to be returned + * @returns: Common Video Error Return Codes + * + * Get new_sequence flag + */ +MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj, + gboolean *new_sequence); + + G_END_DECLS #endif /* __MIX_VIDEODECODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h index 9f8651a..29bb2ac 100644 --- a/mix_video/src/mixvideodef.h +++ b/mix_video/src/mixvideodef.h @@ -11,8 +11,8 @@ * @title: MI-X Video Data Definitons And Common Error Code * @short_description: MI-X Video data definitons and common error code * @include: mixvideodef.h - * - * The section includes the definition of enum and struct as well as + * + * The section includes the definition of enum and struct as well as * * Common Video Error Return Codes of MI-X video functions * @@ -44,6 +44,10 @@ typedef enum { MIX_RESULT_OUTOFSURFACES, MIX_RESULT_DROPFRAME, MIX_RESULT_NOTIMPL, + MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW, + MIX_RESULT_NOT_PERMITTED, + MIX_RESULT_ERROR_PROCESS_STREAM, + MIX_RESULT_MISSING_CONFIG, MIX_RESULT_VIDEO_LAST } MIX_VIDEO_ERROR_CODE; @@ -89,7 +93,7 @@ typedef enum MIX_RAW_TARGET_FORMAT_YUV420 = 1, MIX_RAW_TARGET_FORMAT_YUV422 = 2, MIX_RAW_TARGET_FORMAT_YUV444 = 4, - MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000, + MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000, MIX_RAW_TARGET_FORMAT_LAST } MixRawTargetFormat; @@ -109,6 +113,7 @@ typedef enum MIX_RATE_CONTROL_NONE = 1, MIX_RATE_CONTROL_CBR = 2, MIX_RATE_CONTROL_VBR = 4, + MIX_RATE_CONTROL_VCM = 8, MIX_RATE_CONTROL_LAST } MixRateControl; @@ -134,38 +139,65 @@ typedef enum MIX_DELIMITER_ANNEXB } MixDelimiterType; +typedef enum { + MIX_VIDEO_NONIR, + MIX_VIDEO_CIR, /*Cyclic intra refresh*/ + MIX_VIDEO_AIR, /*Adaptive intra refresh*/ + MIX_VIDEO_BOTH, + MIX_VIDEO_LAST +} MixVideoIntraRefreshType; + +typedef struct _MixAIRParams +{ + guint air_MBs; + guint air_threshold; + guint air_auto; +} MixAIRParams; typedef enum { MIX_ENC_PARAMS_START_UNUSED = 0x01000000, MIX_ENC_PARAMS_BITRATE, - MIX_ENC_PARAMS_SLICE_SIZE, + MIX_ENC_PARAMS_INIT_QP, + MIX_ENC_PARAMS_MIN_QP, + MIX_ENC_PARAMS_WINDOW_SIZE, + MIX_ENC_PARAMS_TARGET_PERCENTAGE, + MIX_ENC_PARAMS_SLICE_NUM, + MIX_ENC_PARAMS_I_SLICE_NUM, + MIX_ENC_PARAMS_P_SLICE_NUM, MIX_ENC_PARAMS_RESOLUTION, MIX_ENC_PARAMS_GOP_SIZE, MIX_ENC_PARAMS_FRAME_RATE, MIX_ENC_PARAMS_FORCE_KEY_FRAME, MIX_ENC_PARAMS_IDR_INTERVAL, MIX_ENC_PARAMS_RC_MODE, - MIX_ENC_PARAMS_MAX_ENCODED_SLICE_SIZE, - MIX_ENC_PARAMS_QP, + MIX_ENC_PARAMS_MTU_SLICE_SIZE, + MIX_ENC_PARAMS_REFRESH_TYPE, + MIX_ENC_PARAMS_AIR, MIX_ENC_PARAMS_CIR_FRAME_CNT, MIX_ENC_PARAMS_LAST } MixEncParamsType; typedef struct _MixEncDynamicParams { guint bitrate; + guint init_QP; + guint min_QP; + guint window_size; + guint target_percentage; guint slice_num; + guint I_slice_num; + guint P_slice_num; guint width; guint height; guint frame_rate_num; - guint frame_rate_denom; + guint frame_rate_denom; guint intra_period; - guint idr_interval; - guint QP; + guint idr_interval; guint CIR_frame_cnt; guint max_slice_size; gboolean force_idr; MixRateControl rc_mode; - + MixVideoIntraRefreshType refresh_type; + MixAIRParams air_params; } MixEncDynamicParams; G_END_DECLS diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c index c78423f..7f34abd 100644 --- a/mix_video/src/mixvideoformat.c +++ b/mix_video/src/mixvideoformat.c @@ -62,6 +62,7 @@ static void mix_videoformat_init(MixVideoFormat * self) { self->end_picture_pending = FALSE; self->video_frame = NULL; self->extra_surfaces = 0; + self->config_params = NULL; } static void mix_videoformat_class_init(MixVideoFormatClass * klass) { @@ -117,6 +118,12 @@ void mix_videoformat_finalize(GObject * obj) { MIXUNREF(mix->surfacepool, mix_surfacepool_unref); } + if (mix->config_params) + { + mix_videoconfigparams_unref(mix->config_params); + mix->config_params = NULL; + } + //libVA cleanup (vaTerminate is called from MixVideo object) if (mix->va_display) { if (mix->va_context != VA_INVALID_ID) @@ -223,6 +230,13 @@ static MIX_RESULT mix_videofmt_initialize_default(MixVideoFormat *mix, mix->framemgr = frame_mgr; mix_framemanager_ref(mix->framemgr); + if (mix->config_params) + { + mix_videoconfigparams_unref(mix->config_params); + } + mix->config_params = config_params; + mix_videoconfigparams_ref(mix->config_params); + mix->va_display = va_display; if (mix->mime_type) //Clean up any previous mime_type diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h index 2499934..7a395b0 100644 --- a/mix_video/src/mixvideoformat.h +++ b/mix_video/src/mixvideoformat.h @@ -88,6 +88,7 @@ struct _MixVideoFormat { gboolean end_picture_pending; MixVideoFrame* video_frame; guint extra_surfaces; + MixVideoConfigParamsDec * config_params; }; /** diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c index 04d517d..1108a0b 100644 --- a/mix_video/src/mixvideoformat_h264.c +++ b/mix_video/src/mixvideoformat_h264.c @@ -157,6 +157,94 @@ MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg) { return ret; } +MIX_RESULT mix_video_h264_update_config_params( + MixVideoFormat *mix, + vbp_data_h264 *data) +{ + MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); + + if (parent->picture_width == 0 || + parent->picture_height == 0 || + data->new_sps) + { + parent->picture_width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; + parent->picture_height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; + + mix_videoconfigparamsdec_set_picture_res( + mix->config_params, + parent->picture_width, + parent->picture_height); + } + + + // video_range has default value of 0. + mix_videoconfigparamsdec_set_video_range( + mix->config_params, + data->codec_data->video_full_range_flag); + + + uint8 color_matrix; + + + + switch (data->codec_data->matrix_coefficients) + { + case 1: + color_matrix = VA_SRC_BT709; + break; + + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + color_matrix = VA_SRC_BT601; + break; + + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix); + + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + mix->config_params, + data->codec_data->sar_width, + data->codec_data->sar_height); + + mix_videoconfigparamsdec_set_bit_rate( + mix->config_params, + data->codec_data->bit_rate); + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_video_h264_handle_new_sequence( + MixVideoFormat *mix, + vbp_data_h264 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("new sequence is received.\n"); + + // original picture resolution + int width = mix->picture_width; + int height = mix->picture_height; + + mix_video_h264_update_config_params(mix, data); + + if (width != mix->picture_width || height != mix->picture_height) + { + // flush frame manager only if resolution is changed. + ret = mix_framemanager_flush(mix->framemgr); + } + + // TO DO: re-initialize VA + + return ret; +} + + MIX_RESULT mix_videofmt_h264_initialize_va( MixVideoFormat *mix, vbp_data_h264 *data) @@ -166,7 +254,7 @@ MIX_RESULT mix_videofmt_h264_initialize_va( VAConfigAttrib attrib; MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); if (parent->va_initialized) { @@ -193,7 +281,7 @@ MIX_RESULT mix_videofmt_h264_initialize_va( if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E("vaCreateConfig failed\n"); goto cleanup; } @@ -215,7 +303,7 @@ MIX_RESULT mix_videofmt_h264_initialize_va( parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces); if (parent->va_surfaces == NULL) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E( "parent->va_surfaces == NULL. \n"); goto cleanup; } @@ -227,15 +315,15 @@ MIX_RESULT mix_videofmt_h264_initialize_va( vret = vaCreateSurfaces( parent->va_display, - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, + parent->picture_width, + parent->picture_height, VA_RT_FORMAT_YUV420, parent->va_num_surfaces, parent->va_surfaces); if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E( "Error allocating surfaces\n"); goto cleanup; } @@ -281,7 +369,7 @@ MIX_RESULT mix_videofmt_h264_initialize_va( if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E( "Error initializing video driver\n"); goto cleanup; } @@ -507,7 +595,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E( "Video driver returned error from vaCreateBuffer\n"); goto cleanup; } @@ -529,7 +617,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E( "Video driver returned error from vaCreateBuffer\n"); goto cleanup; } @@ -567,7 +655,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E( "Video driver returned error from vaCreateBuffer\n"); goto cleanup; } @@ -598,7 +686,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_slice( if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E( "Video driver returned error from vaCreateBuffer\n"); goto cleanup; } @@ -909,7 +997,8 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( MixVideoFormat *mix, MixBuffer * bufin, guint64 ts, - gboolean discontinuity) + gboolean discontinuity, + MixVideoDecodeParams * decode_params) { uint32 pret = 0; MixVideoFormat *parent = NULL; @@ -929,7 +1018,7 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( LOG_V( "Called parse for current frame\n"); if ((pret != VBP_DONE) &&(pret != VBP_OK)) { - ret = MIX_RESULT_DROPFRAME; + ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME; LOG_E( "vbp_parse failed.\n"); goto cleanup; } @@ -948,13 +1037,27 @@ MIX_RESULT mix_videofmt_h264_decode_a_buffer( if (data->has_sps == 0 || data->has_pps == 0) { - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_MISSING_CONFIG; // MIX_RESULT_SUCCESS; LOG_V("SPS or PPS is not available.\n"); goto cleanup; } + if (data->new_sps) + { + decode_params->new_sequence = data->new_sps; + + ret = mix_video_h264_handle_new_sequence(parent, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_video_h264_handle_new_sequence failed.\n"); + goto cleanup; + } + } + if (parent->va_initialized == FALSE) { + mix_video_h264_update_config_params(parent, data); + LOG_V("try initializing VA...\n"); ret = mix_videofmt_h264_initialize_va(parent, data); if (ret != MIX_RESULT_SUCCESS) @@ -1029,8 +1132,6 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, vbp_data_h264 *data = NULL; MixVideoFormat *parent = NULL; MixIOVec *header = NULL; - guint pic_width_in_codec_data = 0; - guint pic_height_in_codec_data = 0; if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) { @@ -1152,17 +1253,7 @@ MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, LOG_V( "Queried parser for header data\n"); - // Update the pic size according to the parsed codec_data - pic_width_in_codec_data = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; - pic_height_in_codec_data = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; - mix_videoconfigparamsdec_set_picture_res (config_params, pic_width_in_codec_data, pic_height_in_codec_data); - - if (parent->picture_width == 0 || parent->picture_height == 0) - { - // Update picture resolution only if it is not set. The derived picture res from mbs may not be accurate. - parent->picture_width = pic_width_in_codec_data; - parent->picture_height = pic_height_in_codec_data; - } + mix_video_h264_update_config_params(mix, data); ret = mix_videofmt_h264_initialize_va(mix, data); if (ret != MIX_RESULT_SUCCESS) @@ -1246,6 +1337,8 @@ MIX_RESULT mix_videofmt_h264_decode( return MIX_RESULT_FAIL; } + decode_params->new_sequence = FALSE; + //From now on, we exit this function through cleanup: LOG_V( "Locking\n"); @@ -1262,7 +1355,8 @@ MIX_RESULT mix_videofmt_h264_decode( mix, bufin[i], ts, - discontinuity); + discontinuity, + decode_params); if (ret != MIX_RESULT_SUCCESS) { @@ -1470,7 +1564,7 @@ static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = ((MixVideoFrame *)self->last_decoded_frame)->frame_id; - LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); } else diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h index 84bfc78..bcfe8bd 100644 --- a/mix_video/src/mixvideoformat_h264.h +++ b/mix_video/src/mixvideoformat_h264.h @@ -41,7 +41,7 @@ struct _MixVideoFormat_H264 { GHashTable *dpb_surface_table; #ifdef DECODER_ROBUSTNESS //Can improve which frame is used for this at a later time - MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing + MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing #endif }; @@ -96,7 +96,7 @@ MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix); /* H.264 vmethods */ MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg); -MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, +MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, MixBufferPool * input_buf_pool, @@ -116,12 +116,12 @@ MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix, - vbp_data_h264 *data, + vbp_data_h264 *data, guint64 timestamp, gboolean discontinuity); -MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, +MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, guint64 timestamp); diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c index b11ab88..0ac6667 100644 --- a/mix_video/src/mixvideoformat_mp42.c +++ b/mix_video/src/mixvideoformat_mp42.c @@ -10,6 +10,8 @@ #include "mixvideolog.h" #include "mixvideoformat_mp42.h" +// Value of VOP type defined here follows MP4 spec, and has the same value of corresponding frame type +// defined in enumeration MixFrameType (except sprite (S)) enum { MP4_VOP_TYPE_I = 0, MP4_VOP_TYPE_P = 1, @@ -17,24 +19,6 @@ enum { MP4_VOP_TYPE_S = 3, }; -/* - * This is for divx packed stream - */ -typedef struct _PackedStream PackedStream; -struct _PackedStream { - vbp_picture_data_mp42 *picture_data; - MixBuffer *mix_buffer; -}; - -/* - * Clone and destroy vbp_picture_data_mp42 - */ -static vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data( - vbp_picture_data_mp42 *picture_data); -static void mix_videoformat_mp42_free_picture_data( - vbp_picture_data_mp42 *picture_data); -static void mix_videoformat_mp42_flush_packed_stream_queue( - GQueue *packed_stream_queue); /* The parent class. The pointer will be saved * in this class's initialization. The pointer @@ -57,8 +41,9 @@ static void mix_videoformat_mp42_init(MixVideoFormat_MP42 * self) { self->last_frame = NULL; self->last_vop_coding_type = -1; - - self->packed_stream_queue = NULL; + self->last_vop_time_increment = 0; + self->next_nvop_for_PB_frame = FALSE; + self->iq_matrix_buf_sent = FALSE; /* NOTE: we don't need to do this here. * This just demostrates how to access @@ -104,6 +89,7 @@ void mix_videoformat_mp42_finalize(GObject * obj) { MixVideoFormat *parent = NULL; gint32 vbp_ret = VBP_OK; MixVideoFormat_MP42 *self = NULL; + gint idx = 0; LOG_V("Begin\n"); @@ -127,32 +113,32 @@ void mix_videoformat_mp42_finalize(GObject * obj) { g_mutex_lock(parent->objectlock); /* unref reference frames */ - { - gint idx = 0; - for (idx = 0; idx < 2; idx++) { - if (self->reference_frames[idx] != NULL) { - mix_videoframe_unref(self->reference_frames[idx]); - self->reference_frames[idx] = NULL; - } + for (idx = 0; idx < 2; idx++) { + if (self->reference_frames[idx] != NULL) { + mix_videoframe_unref(self->reference_frames[idx]); + self->reference_frames[idx] = NULL; } } - + if (self->last_frame) + { + mix_videoframe_unref(self->last_frame); + self->last_frame = NULL; + } + self->next_nvop_for_PB_frame = FALSE; + self->iq_matrix_buf_sent = FALSE; /* Reset state */ parent->initialized = TRUE; - parent->parse_in_progress = FALSE; + parent->end_picture_pending = FALSE; parent->discontinuity_frame_in_progress = FALSE; parent->current_timestamp = (guint64)-1; /* Close the parser */ - vbp_ret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - - if (self->packed_stream_queue) { - mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue); - g_queue_free(self->packed_stream_queue); - } - self->packed_stream_queue = NULL; + if (parent->parser_handle) + { + vbp_ret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + } g_mutex_unlock(parent->objectlock); @@ -183,938 +169,1097 @@ MIX_RESULT mix_videofmt_mp42_getcaps(MixVideoFormat *mix, GString *msg) { return MIX_RESULT_NOTIMPL; } -MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool, - VADisplay va_display) { - uint32 vbp_ret = 0; - MIX_RESULT ret = MIX_RESULT_FAIL; - - vbp_data_mp42 *data = NULL; - MixVideoFormat *parent = NULL; - MixIOVec *header = NULL; - - VAProfile va_profile = VAProfileMPEG4AdvancedSimple; - VAConfigAttrib attrib; - - VAStatus va_ret = VA_STATUS_SUCCESS; - guint number_extra_surfaces = 0; - VASurfaceID *surfaces = NULL; - guint numSurfaces = 0; - - MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - if (mix == NULL || config_params == NULL || frame_mgr == NULL) { - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { - return MIX_RESULT_INVALID_PARAM; - } - - LOG_V("begin\n"); - - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params, frame_mgr, - input_buf_pool, surface_pool, va_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize parent!\n"); - return ret; - } - } - - parent = MIX_VIDEOFORMAT(mix); - - g_mutex_lock(parent->objectlock); - - parent->initialized = FALSE; - - vbp_ret = vbp_open(VBP_MPEG4, &(parent->parser_handle)); - - if (vbp_ret != VBP_OK) { - LOG_E("Failed to call vbp_open()\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /* - * avidemux doesn't pass codec_data, we need handle this. - */ - - LOG_V("Try to get header data from config_param\n"); - - ret = mix_videoconfigparamsdec_get_header(config_params, &header); - if (ret == MIX_RESULT_SUCCESS && header != NULL) { - - LOG_V("Found header data from config_param\n"); - vbp_ret = vbp_parse(parent->parser_handle, header->data, header->data_size, - TRUE); - - LOG_V("vbp_parse() returns 0x%x\n", vbp_ret); - - g_free(header->data); - g_free(header); - - if (!((vbp_ret == VBP_OK) || (vbp_ret == VBP_DONE))) { - LOG_E("Failed to call vbp_parse() to parse header data!\n"); - goto cleanup; - } - /* Get the header data and save */ - - LOG_V("Call vbp_query()\n"); - vbp_ret = vbp_query(parent->parser_handle, (void *) &data); - LOG_V("vbp_query() returns 0x%x\n", vbp_ret); - - if ((vbp_ret != VBP_OK) || (data == NULL)) { - LOG_E("Failed to call vbp_query() to query header data parsing result\n"); - goto cleanup; - } - - if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) { - va_profile = VAProfileMPEG4AdvancedSimple; - LOG_V("The profile is VAProfileMPEG4AdvancedSimple from header data\n"); - } else { - va_profile = VAProfileMPEG4Simple; - LOG_V("The profile is VAProfileMPEG4Simple from header data\n"); - } - } - - va_display = parent->va_display; - - /* We are requesting RT attributes */ - attrib.type = VAConfigAttribRTFormat; - - va_ret = vaGetConfigAttributes(va_display, va_profile, VAEntrypointVLD, - &attrib, 1); - if (va_ret != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaGetConfigAttributes()\n"); - goto cleanup; - } - - if ((attrib.value & VA_RT_FORMAT_YUV420) == 0) { - LOG_E("The attrib.value is wrong!\n"); - goto cleanup; - } - - va_ret = vaCreateConfig(va_display, va_profile, VAEntrypointVLD, &attrib, - 1, &(parent->va_config)); - - if (va_ret != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaCreateConfig()!\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &number_extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to call mix_videoconfigparams_get_extra_surface_allocation()!\n"); - goto cleanup; - } - - parent->va_num_surfaces = number_extra_surfaces + 4; - if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) { - parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; - } - - numSurfaces = parent->va_num_surfaces; - - parent->va_surfaces = g_malloc(sizeof(VASurfaceID) * numSurfaces); - if (!parent->va_surfaces) { - LOG_E("Not enough memory to allocate surfaces!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - surfaces = parent->va_surfaces; - - va_ret = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, VA_RT_FORMAT_YUV420, numSurfaces, - surfaces); - if (va_ret != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaCreateSurfaces()!\n"); - goto cleanup; - } +MIX_RESULT mix_videofmt_mp42_update_config_params( + MixVideoFormat *mix, + vbp_data_mp42 *data) +{ + MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); + //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + + if (parent->picture_width == 0 || + parent->picture_height == 0 || + parent->picture_width < data->codec_data.video_object_layer_width || + parent->picture_height < data->codec_data.video_object_layer_height) + { + parent->picture_width = data->codec_data.video_object_layer_width; + parent->picture_height = data->codec_data.video_object_layer_height; + + mix_videoconfigparamsdec_set_picture_res( + mix->config_params, + parent->picture_width, + parent->picture_height); + } + + + // video_range has default value of 0. Y ranges from 16 to 235. + mix_videoconfigparamsdec_set_video_range(mix->config_params, data->codec_data.video_range); + + uint8 color_matrix; + + switch (data->codec_data.matrix_coefficients) + { + case 1: + color_matrix = VA_SRC_BT709; + break; + + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + color_matrix = VA_SRC_BT601; + break; + + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix); + + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + mix->config_params, + data->codec_data.par_width, + data->codec_data.par_height); + + return MIX_RESULT_SUCCESS; +} - parent->surfacepool = mix_surfacepool_new(); - if (parent->surfacepool == NULL) { - LOG_E("Not enough memory to create surface pool!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - *surface_pool = parent->surfacepool; +MIX_RESULT mix_videofmt_mp42_initialize_va( + MixVideoFormat *mix, + vbp_data_mp42 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VAConfigAttrib attrib; + VAProfile va_profile; + MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); + //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + + LOG_V( "Begin\n"); + + if (parent->va_initialized) + { + LOG_W("va already initialized.\n"); + return MIX_RESULT_SUCCESS; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + //Initialize and save the VA config ID + if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) + { + va_profile = VAProfileMPEG4AdvancedSimple; + } + else + { + va_profile = VAProfileMPEG4Simple; + } + + vret = vaCreateConfig( + parent->va_display, + va_profile, + VAEntrypointVLD, + &attrib, + 1, + &(parent->va_config)); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E("vaCreateConfig failed\n"); + goto cleanup; + } + + // add 1 more surface for packed frame (PB frame), and another one + // for partial frame handling + parent->va_num_surfaces = parent->extra_surfaces + 4 + 1 + 1; + //if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) + // parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; + + parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces); + if (parent->va_surfaces == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E( "parent->va_surfaces == NULL. \n"); + goto cleanup; + } + + vret = vaCreateSurfaces( + parent->va_display, + parent->picture_width, + parent->picture_height, + VA_RT_FORMAT_YUV420, + parent->va_num_surfaces, + parent->va_surfaces); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto cleanup; + } + + LOG_V( "Created %d libva surfaces\n", parent->va_num_surfaces); + + //Initialize the surface pool + ret = mix_surfacepool_initialize( + parent->surfacepool, + parent->va_surfaces, + parent->va_num_surfaces, + parent->va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init surface pool\n"); + goto cleanup; + break; + } + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext( + parent->va_display, + parent->va_config, + parent->picture_width, + parent->picture_height, + 0, + parent->va_surfaces, + parent->va_num_surfaces, + &(parent->va_context)); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto cleanup; + } + + parent->va_initialized = TRUE; + +cleanup: + /* nothing to clean up */ + + return ret; - ret = mix_surfacepool_initialize(parent->surfacepool, surfaces, - numSurfaces, va_display); +} - /* Initialize and save the VA context ID - * Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - */ - va_ret = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, 0, surfaces, - numSurfaces, &(parent->va_context)); +MIX_RESULT mix_videofmt_mp42_decode_a_slice( + MixVideoFormat *mix, + vbp_data_mp42* data, + vbp_picture_data_mp42* pic_data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + guint buffer_id_cnt = 0; + gint frame_type = -1; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID buffer_ids[4]; + MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + VAPictureParameterBufferMPEG4* pic_params = &(pic_data->picture_param); + vbp_slice_data_mp42* slice_data = &(pic_data->slice_data); + VASliceParameterBufferMPEG4* slice_params = &(slice_data->slice_param); + + LOG_V( "Begin\n"); + + vadisplay = mix->va_display; + vacontext = mix->va_context; + + if (!mix->end_picture_pending) + { + LOG_E("picture decoder is not started!\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + // update reference pictures + frame_type = pic_params->vop_fields.bits.vop_coding_type; + + switch (frame_type) { + case MP4_VOP_TYPE_I: + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + break; + + case MP4_VOP_TYPE_P: + pic_params-> forward_reference_picture + = self->reference_frames[0]->frame_id; + pic_params-> backward_reference_picture = VA_INVALID_SURFACE; + break; + + case MP4_VOP_TYPE_B: + pic_params->vop_fields.bits.backward_reference_vop_coding_type + = self->last_vop_coding_type; + pic_params->forward_reference_picture + = self->reference_frames[1]->frame_id; + pic_params->backward_reference_picture + = self->reference_frames[0]->frame_id; + break; + + case MP4_VOP_TYPE_S: + pic_params-> forward_reference_picture + = self->reference_frames[0]->frame_id; + pic_params-> backward_reference_picture = VA_INVALID_SURFACE; + break; - if (va_ret != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaCreateContext()!\n"); + default: + LOG_W("default, Will never reach here\n"); ret = MIX_RESULT_FAIL; goto cleanup; - } - - /* - * Packed stream queue - */ + break; - self->packed_stream_queue = g_queue_new(); - if (!self->packed_stream_queue) { - LOG_E("Failed to crate packed stream queue!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; } + + //Now for slices + + LOG_V( "Creating libva picture parameter buffer\n"); + + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferMPEG4), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + buffer_id_cnt++; + + if (pic_params->vol_fields.bits.quant_type && self->iq_matrix_buf_sent == FALSE) + { + LOG_V( "Creating libva IQMatrix buffer\n"); + // only send IQ matrix for the first slice in the picture + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferMPEG4), + 1, + &(data->iq_matrix_buffer), + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + self->iq_matrix_buf_sent = TRUE; + buffer_id_cnt++; + } + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferMPEG4), + 1, + slice_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + buffer_id_cnt++; + + + //Do slice data + + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferMP42 + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + slice_data->slice_size, //size + 1, //num_elements + slice_data->buffer_addr + slice_data->slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto cleanup; + } + + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto cleanup; + } + + +cleanup: + LOG_V( "End\n"); + + return ret; - self->last_frame = NULL; - self->last_vop_coding_type = -1; - parent->initialized = FALSE; - ret = MIX_RESULT_SUCCESS; - - cleanup: - - g_mutex_unlock(parent->objectlock); - - LOG_V("End\n"); - - return ret; } -MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params) { - uint32 vbp_ret = 0; - MixVideoFormat *parent = NULL; - MIX_RESULT ret = MIX_RESULT_FAIL; - guint64 ts = 0; - vbp_data_mp42 *data = NULL; - gboolean discontinuity = FALSE; - MixInputBufferEntry *bufentry = NULL; - gint i = 0; - - LOG_V("Begin\n"); - - if (mix == NULL || bufin == NULL || decode_params == NULL) { - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { - return MIX_RESULT_INVALID_PARAM; - } - - parent = MIX_VIDEOFORMAT(mix); - - g_mutex_lock(parent->objectlock); - - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get timestamp\n"); - goto cleanup; - } - - LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts); - - ret - = mix_videodecodeparams_get_discontinuity(decode_params, - &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get discontinuity\n"); - goto cleanup; - } - - /* If this is a new frame and we haven't retrieved parser - * workload data from previous frame yet, do so - */ - if ((ts != parent->current_timestamp) && (parent->parse_in_progress)) { - - LOG_V("timestamp changed and parsing is still in progress\n"); - - /* this is new data and the old data parsing is not complete, continue - * to parse the old data - */ - vbp_ret = vbp_query(parent->parser_handle, (void *) &data); - LOG_V("vbp_query() returns 0x%x\n", vbp_ret); - - if ((vbp_ret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E("vbp_ret != VBP_OK || data == NULL\n"); - goto cleanup; - } - - ret = mix_videofmt_mp42_process_decode(mix, data, - parent->current_timestamp, - parent->discontinuity_frame_in_progress); - - if (ret != MIX_RESULT_SUCCESS) { - /* We log this but need to process - * the new frame data, so do not return - */ - LOG_W("process_decode failed.\n"); - } - - /* we are done parsing for old data */ - parent->parse_in_progress = FALSE; - } - - parent->current_timestamp = ts; - parent->discontinuity_frame_in_progress = discontinuity; - - /* we parse data buffer one by one */ - for (i = 0; i < bufincnt; i++) { - - LOG_V( - "Calling parse for current frame, parse handle %d, buf %x, size %d\n", - (int) parent->parser_handle, (guint) bufin[i]->data, - bufin[i]->size); - - vbp_ret = vbp_parse(parent->parser_handle, bufin[i]->data, - bufin[i]->size, FALSE); - - LOG_V("vbp_parse() returns 0x%x\n", vbp_ret); - - /* The parser failed to parse */ - if (vbp_ret != VBP_DONE && vbp_ret != VBP_OK) { - LOG_E("vbp_parse() ret = %d\n", vbp_ret); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V("vbp_parse() ret = %d\n", vbp_ret); - - if (vbp_ret == VBP_OK || vbp_ret == VBP_DONE) { - - LOG_V("Now, parsing is done (VBP_DONE)!\n"); - - vbp_ret = vbp_query(parent->parser_handle, (void *) &data); - LOG_V("vbp_query() returns 0x%x\n", vbp_ret); - - if ((vbp_ret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /* Increase the ref count of this input buffer */ - mix_buffer_ref(bufin[i]); - - /* Create a new MixInputBufferEntry - * TODO: make this from a pool later - */ - bufentry = g_malloc(sizeof(MixInputBufferEntry)); - if (bufentry == NULL) { - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } +MIX_RESULT mix_videofmt_mp42_decode_end( + MixVideoFormat *mix, + gboolean drop_picture) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + MixVideoFormat* parent = MIX_VIDEOFORMAT(mix); + //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + + if (!parent->end_picture_pending) + { + if (parent->video_frame) + { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame is not unreferenced.\n"); + } + goto cleanup; + } + + if (parent->video_frame == NULL) + { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame has been unreferenced.\n"); + goto cleanup; + + } + vret = vaEndPicture(parent->va_display, parent->va_context); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto cleanup; + } + +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ + + LOG_V( "Calling vaSyncSurface\n"); + + //Decode the picture + vret = vaSyncSurface(vadisplay, surface); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + goto cleanup; + } +#endif - bufentry->buf = bufin[i]; - bufentry->timestamp = ts; + if (drop_picture) + { + // we are asked to drop this decoded picture + mix_videoframe_unref(parent->video_frame); + parent->video_frame = NULL; + goto cleanup; + } + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(parent->framemgr, parent->video_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error enqueuing frame object\n"); + goto cleanup; + } + else + { + // video frame is passed to frame manager + parent->video_frame = NULL; + } + +cleanup: + if (parent->video_frame) + { + /* this always indicates an error */ + mix_videoframe_unref(parent->video_frame); + parent->video_frame = NULL; + } + parent->end_picture_pending = FALSE; + return ret; - LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_DONE = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp); +} - /* Enqueue this input buffer */ - g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry); - /* process and decode data */ - ret - = mix_videofmt_mp42_process_decode(mix, data, ts, - discontinuity); +MIX_RESULT mix_videofmt_mp42_decode_continue( + MixVideoFormat *mix, + vbp_data_mp42 *data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + int i; + gint frame_type = -1; + vbp_picture_data_mp42* pic_data = NULL; + VAPictureParameterBufferMPEG4* pic_params = NULL; + MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - if (ret != MIX_RESULT_SUCCESS) { - /* We log this but continue since we need - * to complete our processing - */ - LOG_W("process_decode failed.\n"); - } + /* + Packed Frame Assumption: - LOG_V("Called process and decode for current frame\n"); + 1. In one packed frame, there's only one P or I frame and only one B frame. + 2. In packed frame, there's no skipped frame (vop_coded = 0) + 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately). + 4. N-VOP frame is the frame with vop_coded = 0. + 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame - parent->parse_in_progress = FALSE; - } -#if 0 - /* - * The DHG parser checks for next_sc, if next_sc is a start code, it thinks the current parsing is done: VBP_DONE. - * For our situtation, this not the case. The start code is always begin with the gstbuffer. At the end of frame, - * the start code is never found. - */ - - else if (vbp_ret == VBP_OK) { - - LOG_V("Now, parsing is not done (VBP_OK)!\n"); - - LOG_V( - "Enqueuing buffer and going on to next (if any) for this frame\n"); - - /* Increase the ref count of this input buffer */ - mix_buffer_ref(bufin[i]); - - /* Create a new MixInputBufferEntry - * TODO make this from a pool later - */ - bufentry = g_malloc(sizeof(MixInputBufferEntry)); - if (bufentry == NULL) { - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - bufentry->buf = bufin[i]; - bufentry->timestamp = ts; - LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_OK = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp); - - /* Enqueue this input buffer */ - g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry); - parent->parse_in_progress = TRUE; - } -#endif - } + I, P, {P, B}, B, N, P, N, I, ... + I, P, {P, B}, N, P, N, I, ... - cleanup: + The first N is placeholder for P frame in the packed frame + The second N is a skipped frame + */ - g_mutex_unlock(parent->objectlock); + pic_data = data->picture_data; + for (i = 0; i < data->number_picture_data; i++, pic_data = pic_data->next_picture_data) + { + pic_params = &(pic_data->picture_param); + frame_type = pic_params->vop_fields.bits.vop_coding_type; + if (frame_type == MP4_VOP_TYPE_S && pic_params->no_of_sprite_warping_points > 1) + { + // hardware only support up to one warping point (stationary or translation) + LOG_E("sprite with %d warping points is not supported by HW.\n", + pic_params->no_of_sprite_warping_points); + return MIX_RESULT_DROPFRAME; + } + + if (pic_data->vop_coded == 0) + { + // this should never happen + LOG_E("VOP is not coded.\n"); + return MIX_RESULT_DROPFRAME; + } + + if (pic_data->new_picture_flag == 1 || mix->end_picture_pending == FALSE) + { + if (pic_data->new_picture_flag == 0) + { + LOG_W("First slice of picture is lost!\n"); + } + + gulong surface = 0; + if (mix->end_picture_pending) + { + // this indicates the start of a new frame in the packed frame + LOG_V("packed frame is found.\n"); + + // Update timestamp for packed frame as timestamp is for the B frame! + if (mix->video_frame && pic_params->vop_time_increment_resolution) + { + guint64 ts, ts_inc; + mix_videoframe_get_timestamp(mix->video_frame, &ts); + ts_inc= self->last_vop_time_increment - pic_data->vop_time_increment + + pic_params->vop_time_increment_resolution; + ts_inc = ts_inc % pic_params->vop_time_increment_resolution; + LOG_V("timestamp is incremented by %d at %d resolution.\n", + ts_inc, pic_params->vop_time_increment_resolution); + // convert to nano-second + ts_inc = ts_inc * 1e9 / pic_params->vop_time_increment_resolution; + LOG_V("timestamp of P frame in packed frame is updated from %"G_GINT64_FORMAT" to %"G_GUINT64_FORMAT".\n", + ts, ts + ts_inc); + + ts += ts_inc; + mix_videoframe_set_timestamp(mix->video_frame, ts); + } + + mix_videofmt_mp42_decode_end(mix, FALSE); + self->next_nvop_for_PB_frame = TRUE; + } + if (self->next_nvop_for_PB_frame == TRUE && frame_type != MP4_VOP_TYPE_B) + { + LOG_E("The second frame in the packed frame is not B frame.\n"); + self->next_nvop_for_PB_frame = FALSE; + return MIX_RESULT_DROPFRAME; + } + + //Get a frame from the surface pool + ret = mix_surfacepool_get(mix->surfacepool, &(mix->video_frame)); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting frame from surfacepool\n"); + return MIX_RESULT_FAIL; + } + + /* the following calls will always succeed */ + + // set frame type + if (frame_type == MP4_VOP_TYPE_S) + { + // sprite is treated as P frame in the display order + mix_videoframe_set_frame_type(mix->video_frame, MP4_VOP_TYPE_P); + } + else + { + mix_videoframe_set_frame_type(mix->video_frame, frame_type); + } + + + // set frame structure + if (pic_data->picture_param.vol_fields.bits.interlaced) + { + // only MPEG-4 studio profile can have field coding. All other profiles + // use frame coding only, i.e, no field VOP. (see vop_structure in MP4 spec) + mix_videoframe_set_frame_structure( + mix->video_frame, + VA_BOTTOM_FIELD | VA_TOP_FIELD); + + LOG_W("Interlaced content, set frame structure to 3 (TOP | BOTTOM field) !\n"); + } + else + { + mix_videoframe_set_frame_structure(mix->video_frame, VA_FRAME_PICTURE); + } + + //Set the discontinuity flag + mix_videoframe_set_discontinuity( + mix->video_frame, + mix->discontinuity_frame_in_progress); + + //Set the timestamp + mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(mix->video_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error getting surface ID from frame object\n"); + goto cleanup; + } + + /* If I or P frame, update the reference array */ + if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) + { + LOG_V("Updating forward/backward references for libva\n"); + + self->last_vop_coding_type = frame_type; + self->last_vop_time_increment = pic_data->vop_time_increment; + mix_videofmt_mp42_handle_ref_frames(mix, frame_type, mix->video_frame); + if (self->last_frame != NULL) + { + mix_videoframe_unref(self->last_frame); + } + self->last_frame = mix->video_frame; + mix_videoframe_ref(self->last_frame); + } + + //Now we can begin the picture + vret = vaBeginPicture(mix->va_display, mix->va_context, surface); + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto cleanup; + } + + // vaBeginPicture needs a matching vaEndPicture + mix->end_picture_pending = TRUE; + self->iq_matrix_buf_sent = FALSE; + } + + + ret = mix_videofmt_mp42_decode_a_slice(mix, data, pic_data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "mix_videofmt_mp42_decode_a_slice failed, error = %#X.", ret); + goto cleanup; + } + } + +cleanup: + // nothing to cleanup; + + return ret; +} - LOG_V("End\n"); +MIX_RESULT mix_videofmt_mp42_decode_begin( + MixVideoFormat *mix, + vbp_data_mp42* data) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + gint frame_type = -1; + VAPictureParameterBufferMPEG4* pic_params = NULL; + MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); + vbp_picture_data_mp42 *pic_data = NULL; + + pic_data = data->picture_data; + pic_params = &(pic_data->picture_param); + frame_type = pic_params->vop_fields.bits.vop_coding_type; + + if (self->next_nvop_for_PB_frame) + { + // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type + // of this frame must be B. + // for example: {PB} B N P B B P... + if (pic_data->vop_coded == 1 && frame_type != MP4_VOP_TYPE_B) + { + LOG_E("Invalid coding type while waiting for n-vop for packed frame.\n"); + // timestamp of P frame in the queue is not correct + mix_framemanager_flush(mix->framemgr); + self->next_nvop_for_PB_frame = FALSE; + } + } + + if (pic_data->vop_coded == 0) + { + if (self->last_frame == NULL) + { + LOG_E("The forward reference frame is NULL, couldn't reconstruct skipped frame.\n"); + mix_framemanager_flush(mix->framemgr); + self->next_nvop_for_PB_frame = FALSE; + return MIX_RESULT_DROPFRAME; + } + + if (self->next_nvop_for_PB_frame) + { + // P frame is already in queue, just need to update time stamp. + mix_videoframe_set_timestamp(self->last_frame, mix->current_timestamp); + self->next_nvop_for_PB_frame = FALSE; + } + else + { + // handle skipped frame + MixVideoFrame *skip_frame = NULL; + gulong frame_id = VA_INVALID_SURFACE; + + skip_frame = mix_videoframe_new(); + ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); + ret = mix_videoframe_get_frame_id(self->last_frame, &frame_id); + ret = mix_videoframe_set_frame_id(skip_frame, frame_id); + ret = mix_videoframe_set_frame_type(skip_frame, MP4_VOP_TYPE_P); + ret = mix_videoframe_set_real_frame(skip_frame, self->last_frame); + // add a reference as skip_frame holds the last_frame. + mix_videoframe_ref(self->last_frame); + ret = mix_videoframe_set_timestamp(skip_frame, mix->current_timestamp); + ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); + + LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", + (guint)skip_frame, (guint)frame_id, mix->current_timestamp); + + /* Enqueue the skipped frame using frame manager */ + ret = mix_framemanager_enqueue(mix->framemgr, skip_frame); + } + + if (data->number_picture_data > 1) + { + LOG_E("Unexpected to have more picture data following a not-coded VOP.\n"); + //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for + // coded picture, a frame is lost. + } + return MIX_RESULT_SUCCESS; + } + else + { + /* + * Check for B frames after a seek + * We need to have both reference frames in hand before we can decode a B frame + * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME + */ + if (frame_type == MP4_VOP_TYPE_B) + { + if (self->reference_frames[0] == NULL || + self->reference_frames[1] == NULL) + { + LOG_W("Insufficient reference frames for B frame\n"); + return MIX_RESULT_DROPFRAME; + } + } + else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S) + { + if (self->reference_frames[0] == NULL) + { + LOG_W("Reference frames for P/S frame is missing\n"); + return MIX_RESULT_DROPFRAME; + } + } + + // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue + ret = mix_videofmt_mp42_decode_continue(mix, data); + } + return ret; + } -MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, - vbp_data_mp42 *data, guint64 timestamp, gboolean discontinuity) { +MIX_RESULT mix_videofmt_mp42_decode_a_buffer( + MixVideoFormat *mix, + MixBuffer * bufin, + guint64 ts, + gboolean discontinuity) +{ + uint32 pret = 0; + MixVideoFormat *parent = NULL; MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_ret = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - - MixVideoFormat_MP42 *self = NULL; - vbp_picture_data_mp42 *picture_data = NULL; - VAPictureParameterBufferMPEG4 *picture_param = NULL; - VAIQMatrixBufferMPEG4 *iq_matrix_buffer = NULL; - vbp_slice_data_mp42 *slice_data = NULL; - VASliceParameterBufferMPEG4 *slice_param = NULL; + vbp_data_mp42 *data = NULL; - gint frame_type = -1; - guint buffer_id_number = 0; - guint buffer_id_cnt = 0; - VABufferID *buffer_ids = NULL; - MixVideoFrame *frame = NULL; + LOG_V( "Begin\n"); + + parent = MIX_VIDEOFORMAT(mix); + + pret = vbp_parse(parent->parser_handle, + bufin->data, + bufin->size, + FALSE); + + if (pret != VBP_OK) + { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "vbp_parse failed.\n"); + goto cleanup; + } + else + { + LOG_V("vbp_parse succeeded.\n"); + } + + //query for data + pret = vbp_query(parent->parser_handle, (void *) &data); + + if ((pret != VBP_OK) || (data == NULL)) + { + // never happen! + ret = MIX_RESULT_FAIL; + LOG_E( "vbp_query failed.\n"); + goto cleanup; + } + else + { + LOG_V("vbp_query succeeded.\n"); + } + + if (parent->va_initialized == FALSE) + { + mix_videofmt_mp42_update_config_params(parent, data); + + LOG_V("try initializing VA...\n"); + ret = mix_videofmt_mp42_initialize_va(parent, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_mp42_initialize_va failed.\n"); + goto cleanup; + } + } + + // check if any slice is parsed, we may just receive configuration data + if (data->number_picture_data == 0) + { + ret = MIX_RESULT_SUCCESS; + LOG_V("slice is not available.\n"); + goto cleanup; + } + + guint64 last_ts = parent->current_timestamp; + parent->current_timestamp = ts; + parent->discontinuity_frame_in_progress = discontinuity; + + if (last_ts != ts) + { + // finish decoding the last frame + ret = mix_videofmt_mp42_decode_end(parent, FALSE); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_mp42_decode_end failed.\n"); + goto cleanup; + } + + // start decoding a new frame + ret = mix_videofmt_mp42_decode_begin(parent, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_mp42_decode_begin failed.\n"); + goto cleanup; + } + } + else + { + ret = mix_videofmt_mp42_decode_continue(parent, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_mp42_decode_continue failed.\n"); + goto cleanup; + } + } + +cleanup: + + LOG_V( "End\n"); - gint idx = 0, jdx = 0; - gulong surface = 0; + return ret; +} - MixBuffer *mix_buffer = NULL; - gboolean is_from_queued_data = FALSE; - LOG_V("Begin\n"); - if ((mix == NULL) || (data == NULL)) { +MIX_RESULT mix_videofmt_mp42_initialize( + MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) +{ + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_MPEG4; + vbp_data_mp42 *data = NULL; + MixVideoFormat *parent = NULL; + MixIOVec *header = NULL; + + if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) + { + LOG_E( "NUll pointer passed in\n"); return MIX_RESULT_NULL_PTR; } - if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { - return MIX_RESULT_INVALID_PARAM; - } + LOG_V( "Begin\n"); - self = MIX_VIDEOFORMAT_MP42(mix); + /* Chainup parent method. */ - LOG_V("data->number_pictures = %d\n", data->number_pictures); + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } - if (data->number_pictures == 0) { - LOG_W("data->number_pictures == 0\n"); - mix_videofmt_mp42_release_input_buffers(mix, timestamp); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error initializing\n"); return ret; } - is_from_queued_data = FALSE; - - /* Do we have packed frames? */ - if (data->number_pictures > 1) { - - /* - - Assumption: - - 1. In one packed frame, there's only one P or I frame and the - reference frame will be the first one in the packed frame - 2. In packed frame, there's no skipped frame(vop_coded = 0) - 3. In one packed frame, if there're n B frames, there will be - n N-VOP frames to follow the packed frame. - The timestamp of each N-VOP frame will be used for each B frames - in the packed frame - 4. N-VOP frame is the frame with vop_coded = 0. - - {P, B, B, B }, N, N, N, P, P, P, I, ... - - */ - - MixInputBufferEntry *bufentry = NULL; - PackedStream *packed_stream = NULL; - vbp_picture_data_mp42 *cloned_picture_data = NULL; - - LOG_V("This is packed frame\n"); - - /* - * Is the packed_frame_queue empty? If not, how come - * a packed frame can follow another packed frame without - * necessary number of N-VOP between them? - */ - - if (!g_queue_is_empty(self->packed_stream_queue)) { - ret = MIX_RESULT_DROPFRAME; - LOG_E("The previous packed frame is not fully processed yet!\n"); - goto cleanup; - } - - /* Packed frame shall be something like this {P, B, B, B, ... B } */ - for (idx = 0; idx < data->number_pictures; idx++) { - picture_data = &(data->picture_data[idx]); - picture_param = &(picture_data->picture_param); - frame_type = picture_param->vop_fields.bits.vop_coding_type; - - /* Is the first frame in the packed frames a reference frame? */ - if (idx == 0 && frame_type != MP4_VOP_TYPE_I && frame_type - != MP4_VOP_TYPE_P) { - ret = MIX_RESULT_DROPFRAME;; - LOG_E("The first frame in packed frame is not I or B\n"); - goto cleanup; - } - - if (idx != 0 && frame_type != MP4_VOP_TYPE_B) { - ret = MIX_RESULT_DROPFRAME;; - LOG_E("The frame other than the first one in packed frame is not B\n"); - goto cleanup; - } - - if (picture_data->vop_coded == 0) { - ret = MIX_RESULT_DROPFRAME; - LOG_E("In packed frame, there's unexpected skipped frame\n"); - goto cleanup; - } - } - - LOG_V("The packed frame looks valid\n"); - - /* Okay, the packed-frame looks ok. Now, we enqueue all the B frames */ - bufentry - = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue); - if (bufentry == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("There's data in in inputbufqueue\n"); - goto cleanup; - } - - LOG_V("Enqueue all B frames in the packed frame\n"); - - mix_buffer = bufentry->buf; - for (idx = 1; idx < data->number_pictures; idx++) { - picture_data = &(data->picture_data[idx]); - cloned_picture_data = mix_videoformat_mp42_clone_picture_data( - picture_data); - if (!cloned_picture_data) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to allocate memory for cloned picture_data\n"); - goto cleanup; - } - - packed_stream = g_malloc(sizeof(PackedStream)); - if (packed_stream == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to allocate memory for packed_stream\n"); - goto cleanup; - } - - packed_stream->mix_buffer = mix_buffer_ref(mix_buffer); - packed_stream->picture_data = cloned_picture_data; - - g_queue_push_tail(self->packed_stream_queue, - (gpointer) packed_stream); - } - - LOG_V("Prepare to decode the first frame in the packed frame\n"); - - /* we are going to process the firs frame */ - picture_data = &(data->picture_data[0]); - - } else { + if (!MIX_IS_VIDEOFORMAT_MP42(mix)) + return MIX_RESULT_INVALID_PARAM; - LOG_V("This is a single frame\n"); + parent = MIX_VIDEOFORMAT(mix); + //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - /* Okay, we only have one frame */ - if (g_queue_is_empty(self->packed_stream_queue)) { - /* If the packed_stream_queue is empty, everything is fine */ - picture_data = &(data->picture_data[0]); + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + g_mutex_lock(parent->objectlock); - LOG_V("There's no packed frame not processed yet\n"); + parent->surfacepool = mix_surfacepool_new(); + *surface_pool = parent->surfacepool; - } else { - /* The packed_stream_queue is not empty, is this frame N-VOP? */ - picture_data = &(data->picture_data[0]); - if (picture_data->vop_coded != 0) { - - LOG_V("The packed frame queue is not empty, we will flush it\n"); - - /* - * Unexpected! We flush the packed_stream_queue and begin to process the - * current frame if it is not a B frame - */ - mix_videoformat_mp42_flush_packed_stream_queue( - self->packed_stream_queue); - - picture_param = &(picture_data->picture_param); - frame_type = picture_param->vop_fields.bits.vop_coding_type; - - if (frame_type == MP4_VOP_TYPE_B) { - ret = MIX_RESULT_DROPFRAME; - LOG_E("The frame right after packed frame is B frame!\n"); - goto cleanup; - } - - } else { - /* This is N-VOP, process B frame from the packed_stream_queue */ - PackedStream *packed_stream = NULL; - - LOG_V("N-VOP found, we ignore it and start to process the B frame from the packed frame queue\n"); - - packed_stream = (PackedStream *) g_queue_pop_head( - self->packed_stream_queue); - picture_data = packed_stream->picture_data; - mix_buffer = packed_stream->mix_buffer; - g_free(packed_stream); - is_from_queued_data = TRUE; - } - } + if (parent->surfacepool == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "parent->surfacepool == NULL.\n"); + goto cleanup; } - picture_param = &(picture_data->picture_param); - iq_matrix_buffer = &(picture_data->iq_matrix_buffer); + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &parent->extra_surfaces); - if (picture_param == NULL) { - ret = MIX_RESULT_NULL_PTR; - LOG_E("picture_param == NULL\n"); - goto cleanup; - } + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto cleanup; + } - /* If the frame type is not I, P or B */ - frame_type = picture_param->vop_fields.bits.vop_coding_type; - if (frame_type != MP4_VOP_TYPE_I && frame_type != MP4_VOP_TYPE_P - && frame_type != MP4_VOP_TYPE_B) { + //Load the bitstream parser + pret = vbp_open(ptype, &(parent->parser_handle)); + + if (!(pret == VBP_OK)) + { ret = MIX_RESULT_FAIL; - LOG_E("frame_type is not I, P or B. frame_type = %d\n", frame_type); + LOG_E( "Error opening parser\n"); goto cleanup; } + LOG_V( "Opened parser\n"); - /* - * This is a skipped frame (vop_coded = 0) - * Please note that this is not a N-VOP (DivX). - */ - if (picture_data->vop_coded == 0) { - MixVideoFrame *skip_frame = NULL; - gulong frame_id = VA_INVALID_SURFACE; + ret = mix_videoconfigparamsdec_get_header(config_params, &header); + + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) + { + // Delay initializing VA if codec configuration data is not ready, but don't return an error. + ret = MIX_RESULT_SUCCESS; + LOG_W( "Codec data is not available in the configuration parameter.\n"); + goto cleanup; + } - LOG_V("vop_coded == 0\n"); - if (self->last_frame == NULL) { - LOG_W("Previous frame is NULL\n"); + LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); - /* - * We shouldn't get a skipped frame - * before we are able to get a real frame - */ - ret = MIX_RESULT_DROPFRAME; - goto cleanup; - } - - skip_frame = mix_videoframe_new(); - ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); - mix_videoframe_ref(self->last_frame); + pret = vbp_parse(parent->parser_handle, header->data, + header->data_size, TRUE); - ret = mix_videoframe_get_frame_id(self->last_frame, &frame_id); - ret = mix_videoframe_set_frame_id(skip_frame, frame_id); - ret = mix_videoframe_set_frame_type(skip_frame, MP4_VOP_TYPE_P); - ret = mix_videoframe_set_real_frame(skip_frame, self->last_frame); - ret = mix_videoframe_set_timestamp(skip_frame, timestamp); - ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); + if (pret != VBP_OK) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data\n"); + goto cleanup; + } - LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", - (guint)skip_frame, (guint)frame_id, timestamp); + LOG_V( "Parsed header\n"); - /* Release our input buffers */ - ret = mix_videofmt_mp42_release_input_buffers(mix, timestamp); + //Get the header data and save + pret = vbp_query(parent->parser_handle, (void *)&data); - /* Enqueue the skipped frame using frame manager */ - ret = mix_framemanager_enqueue(mix->framemgr, skip_frame); + if ((pret != VBP_OK) || (data == NULL)) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); goto cleanup; } - /* - * Decide the number of buffer to use - */ - - buffer_id_number = picture_data->number_slices * 2 + 2; - LOG_V("number_slices is %d, allocating %d buffer_ids\n", - picture_data->number_slices, buffer_id_number); - - /* - * Check for B frames after a seek - * We need to have both reference frames in hand before we can decode a B frame - * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME - */ - if (frame_type == MP4_VOP_TYPE_B) { + LOG_V( "Queried parser for header data\n"); + + mix_videofmt_mp42_update_config_params(mix, data); - if (self->reference_frames[1] == NULL) { - LOG_W("Insufficient reference frames for B frame\n"); - ret = MIX_RESULT_DROPFRAME; - goto cleanup; - } - } - - buffer_ids = g_malloc(sizeof(VABufferID) * buffer_id_number); - if (buffer_ids == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to allocate buffer_ids!\n"); - goto cleanup; - } + ret = mix_videofmt_mp42_initialize_va(mix, data); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error initializing va. \n"); + goto cleanup; + } - LOG_V("Getting a new surface\n");LOG_V("frame type is %d\n", frame_type); - /* Get a frame from the surface pool */ - ret = mix_surfacepool_get(mix->surfacepool, &frame); +cleanup: if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get frame from surface pool!\n"); - goto cleanup; - } - - /* - * Set the frame type for the frame object (used in reordering by frame manager) - */ - ret = mix_videoframe_set_frame_type(frame, frame_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set frame type!\n"); - goto cleanup; - } - - /* If I or P frame, update the reference array */ - if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { - LOG_V("Updating forward/backward references for libva\n"); + if (parent->parser_handle) + { + pret = vbp_close(parent->parser_handle); + parent->parser_handle = NULL; + } + parent->initialized = FALSE; - self->last_vop_coding_type = frame_type; - mix_videofmt_mp42_handle_ref_frames(mix, frame_type, frame); + } else { + parent->initialized = TRUE; } - - LOG_V("Setting reference frames in picparams, frame_type = %d\n", - frame_type); - - switch (frame_type) { - case MP4_VOP_TYPE_I: - picture_param->forward_reference_picture = VA_INVALID_SURFACE; - picture_param->backward_reference_picture = VA_INVALID_SURFACE; - LOG_V("I frame, surface ID %u\n", (guint) frame->frame_id); - break; - case MP4_VOP_TYPE_P: - picture_param-> forward_reference_picture - = self->reference_frames[0]->frame_id; - picture_param-> backward_reference_picture = VA_INVALID_SURFACE; - - LOG_V("P frame, surface ID %u, forw ref frame is %u\n", - (guint) frame->frame_id, - (guint) self->reference_frames[0]->frame_id); - break; - case MP4_VOP_TYPE_B: - - picture_param->vop_fields.bits.backward_reference_vop_coding_type - = self->last_vop_coding_type; - - picture_param->forward_reference_picture - = self->reference_frames[1]->frame_id; - picture_param->backward_reference_picture - = self->reference_frames[0]->frame_id; - - LOG_V("B frame, surface ID %u, forw ref %d, back ref %d\n", - (guint) frame->frame_id, - (guint) picture_param->forward_reference_picture, - (guint) picture_param->backward_reference_picture); - break; - case MP4_VOP_TYPE_S: - LOG_W("MP4_VOP_TYPE_S, Will never reach here\n"); - break; - - default: - LOG_W("default, Will never reach here\n"); - break; - + + if (header != NULL) + { + if (header->data != NULL) + g_free(header->data); + g_free(header); + header = NULL; } - /* Libva buffer set up */ - va_display = mix->va_display; - va_context = mix->va_context; - LOG_V("Creating libva picture parameter buffer\n"); + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); - /* First the picture parameter buffer */ - buffer_id_cnt = 0; - va_ret = vaCreateBuffer(va_display, va_context, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferMPEG4), 1, picture_param, - &buffer_ids[buffer_id_cnt]); - buffer_id_cnt++; - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to create va buffer of type VAPictureParameterBufferMPEG4!\n"); - goto cleanup; - } - - LOG_V("Creating libva VAIQMatrixBufferMPEG4 buffer\n"); - - if (picture_param->vol_fields.bits.quant_type) { - va_ret = vaCreateBuffer(va_display, va_context, VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferMPEG4), 1, iq_matrix_buffer, - &buffer_ids[buffer_id_cnt]); + return ret; +} - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to create va buffer of type VAIQMatrixBufferType!\n"); - goto cleanup; - } - buffer_id_cnt++; - } - /* Now for slices */ - for (jdx = 0; jdx < picture_data->number_slices; jdx++) { - - slice_data = &(picture_data->slice_data[jdx]); - slice_param = &(slice_data->slice_param); - - LOG_V( - "Creating libva slice parameter buffer, for slice %d\n", - jdx); - - /* Do slice parameters */ - va_ret = vaCreateBuffer(va_display, va_context, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferMPEG4), 1, slice_param, - &buffer_ids[buffer_id_cnt]); - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to create va buffer of type VASliceParameterBufferMPEG4!\n"); - goto cleanup; - } - buffer_id_cnt++; - - /* Do slice data */ - va_ret = vaCreateBuffer(va_display, va_context, VASliceDataBufferType, - slice_data->slice_size, 1, slice_data->buffer_addr - + slice_data->slice_offset, &buffer_ids[buffer_id_cnt]); - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to create va buffer of type VASliceDataBufferType!\n"); - goto cleanup; - } - buffer_id_cnt++; - } +MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params) { - /* Get our surface ID from the frame object */ - ret = mix_videoframe_get_frame_id(frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get frame id: ret = 0x%x\n", ret); - goto cleanup; - } + int i = 0; + MixVideoFormat *parent = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + guint64 ts = 0; + gboolean discontinuity = FALSE; - LOG_V("Calling vaBeginPicture\n"); + LOG_V( "Begin\n"); - /* Now we can begin the picture */ - va_ret = vaBeginPicture(va_display, va_context, surface); - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to vaBeginPicture(): va_ret = 0x%x\n", va_ret); - goto cleanup; + if (mix == NULL || bufin == NULL || decode_params == NULL ) + { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; } - LOG_V("Calling vaRenderPicture\n"); + /* Chainup parent method. + We are not chaining up to parent method for now. + */ - /* Render the picture */ - va_ret = vaRenderPicture(va_display, va_context, buffer_ids, buffer_id_cnt); - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to vaRenderPicture(): va_ret = 0x%x\n", va_ret); - goto cleanup; +#if 0 + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, decode_params); } +#endif - LOG_V("Calling vaEndPicture\n"); - - /* End picture */ - va_ret = vaEndPicture(va_display, va_context); - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to vaEndPicture(): va_ret = 0x%x\n", va_ret); - goto cleanup; - } + if (!MIX_IS_VIDEOFORMAT_MP42(mix)) + return MIX_RESULT_INVALID_PARAM; -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - LOG_V("Calling vaSyncSurface\n"); + parent = MIX_VIDEOFORMAT(mix); - /* Decode the picture */ - va_ret = vaSyncSurface(va_display, surface); - if (va_ret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed to vaSyncSurface(): va_ret = 0x%x\n", va_ret); - goto cleanup; + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) + { + // never happen + return MIX_RESULT_FAIL; } -#endif - - /* Set the discontinuity flag */ - mix_videoframe_set_discontinuity(frame, discontinuity); - - /* Set the timestamp */ - mix_videoframe_set_timestamp(frame, timestamp); - - LOG_V("Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); - /* Enqueue the decoded frame using frame manager */ - ret = mix_framemanager_enqueue(mix->framemgr, frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_framemanager_enqueue()!\n"); - goto cleanup; + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) + { + // never happen + return MIX_RESULT_FAIL; } - /* For I or P frames, save this frame off for skipped frame handling */ - if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { - if (self->last_frame != NULL) { - mix_videoframe_unref(self->last_frame); - } - self->last_frame = frame; - mix_videoframe_ref(frame); - } + //From now on, we exit this function through cleanup: - ret = MIX_RESULT_SUCCESS; + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); - cleanup: + LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts); - if (ret != MIX_RESULT_SUCCESS && frame != NULL) { - mix_videoframe_unref(frame); - } + for (i = 0; i < bufincnt; i++) + { + // decode a buffer at a time + ret = mix_videofmt_mp42_decode_a_buffer( + mix, + bufin[i], + ts, + discontinuity); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n"); + goto cleanup; + } + } - if (ret != MIX_RESULT_SUCCESS) { - mix_videoformat_mp42_flush_packed_stream_queue( - self->packed_stream_queue); - } - g_free(buffer_ids); - mix_videofmt_mp42_release_input_buffers(mix, timestamp); +cleanup: - if (is_from_queued_data) { - if (mix_buffer) { - mix_buffer_unref(mix_buffer); - } - mix_videoformat_mp42_free_picture_data(picture_data); - } + LOG_V( "Unlocking\n"); + g_mutex_unlock(parent->objectlock); - LOG_V("End\n"); + LOG_V( "End\n"); return ret; } @@ -1123,26 +1268,13 @@ MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix) { MIX_RESULT ret = MIX_RESULT_SUCCESS; MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - MixInputBufferEntry *bufentry = NULL; LOG_V("Begin\n"); g_mutex_lock(mix->objectlock); - mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue); - - /* - * Clear the contents of inputbufqueue - */ - while (!g_queue_is_empty(mix->inputbufqueue)) { - bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); - if (bufentry == NULL) { - continue; - } - - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } + // drop any decode-pending picture, and ignore return value + mix_videofmt_mp42_decode_end(mix, TRUE); /* * Clear parse_in_progress flag and current timestamp @@ -1150,16 +1282,21 @@ MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix) { mix->parse_in_progress = FALSE; mix->discontinuity_frame_in_progress = FALSE; mix->current_timestamp = (guint64)-1; + self->next_nvop_for_PB_frame = FALSE; - { - gint idx = 0; - for (idx = 0; idx < 2; idx++) { - if (self->reference_frames[idx] != NULL) { - mix_videoframe_unref(self->reference_frames[idx]); - self->reference_frames[idx] = NULL; - } + gint idx = 0; + for (idx = 0; idx < 2; idx++) { + if (self->reference_frames[idx] != NULL) { + mix_videoframe_unref(self->reference_frames[idx]); + self->reference_frames[idx] = NULL; } } + if (self->last_frame) + { + mix_videoframe_unref(self->last_frame); + self->last_frame = NULL; + } + /* Call parser flush */ vbp_flush(mix->parser_handle); @@ -1174,8 +1311,6 @@ MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix) { MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_mp42 *data = NULL; - uint32 vbp_ret = 0; LOG_V("Begin\n"); @@ -1189,29 +1324,10 @@ MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix) { g_mutex_lock(mix->objectlock); - /* if a frame is in progress, process the frame */ - if (mix->parse_in_progress) { - /* query for data */ - vbp_ret = vbp_query(mix->parser_handle, (void *) &data); - LOG_V("vbp_query() returns 0x%x\n", vbp_ret); - - if ((vbp_ret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E("vbp_ret != VBP_OK || data == NULL\n"); - goto cleanup; - } - - /* process and decode data */ - ret = mix_videofmt_mp42_process_decode(mix, data, - mix->current_timestamp, mix->discontinuity_frame_in_progress); - mix->parse_in_progress = FALSE; - - } - + mix_videofmt_mp42_decode_end(mix, FALSE); + ret = mix_framemanager_eos(mix->framemgr); - cleanup: - g_mutex_unlock(mix->objectlock); LOG_V("End\n"); @@ -1294,125 +1410,6 @@ MIX_RESULT mix_videofmt_mp42_handle_ref_frames(MixVideoFormat *mix, MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix, guint64 timestamp) { - MixInputBufferEntry *bufentry = NULL; - gboolean done = FALSE; - - LOG_V("Begin\n"); - - if (mix == NULL) { - return MIX_RESULT_NULL_PTR; - } - - /* Dequeue and release all input buffers for this frame */ - LOG_V("Releasing all the MixBuffers for this frame\n"); - - /* - * While the head of the queue has timestamp == current ts - * dequeue the entry, unref the MixBuffer, and free the struct - */ - done = FALSE; - while (!done) { - bufentry - = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue); - if (bufentry == NULL) { - break; - } - - LOG_V("head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", - (guint)bufentry->buf, timestamp, bufentry->timestamp); - - if (bufentry->timestamp != timestamp) { - LOG_V("buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", - (guint)bufentry->buf, timestamp, bufentry->timestamp); - - done = TRUE; - break; - } - - bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); - LOG_V("Unref this MixBuffers %x\n", (guint) bufentry->buf); - - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } - - LOG_V("End\n"); - + // not used, to be removed return MIX_RESULT_SUCCESS; } - -vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data( - vbp_picture_data_mp42 *picture_data) { - - gboolean succ = FALSE; - - if (!picture_data) { - return NULL; - } - - if (picture_data->number_slices == 0) { - return NULL; - } - - vbp_picture_data_mp42 *cloned_picture_data = g_try_new0( - vbp_picture_data_mp42, 1); - if (cloned_picture_data == NULL) { - goto cleanup; - } - - memcpy(cloned_picture_data, picture_data, sizeof(vbp_picture_data_mp42)); - - cloned_picture_data->number_slices = picture_data->number_slices; - cloned_picture_data->slice_data = g_try_new0(vbp_slice_data_mp42, - picture_data->number_slices); - if (cloned_picture_data->slice_data == NULL) { - goto cleanup; - } - - memcpy(cloned_picture_data->slice_data, picture_data->slice_data, - sizeof(vbp_slice_data_mp42) * (picture_data->number_slices)); - - succ = TRUE; - - cleanup: - - if (!succ) { - mix_videoformat_mp42_free_picture_data(cloned_picture_data); - return NULL; - } - - return cloned_picture_data; -} - -void mix_videoformat_mp42_free_picture_data(vbp_picture_data_mp42 *picture_data) { - if (picture_data) { - if (picture_data->slice_data) { - g_free(picture_data->slice_data); - } - g_free(picture_data); - } -} - -void mix_videoformat_mp42_flush_packed_stream_queue(GQueue *packed_stream_queue) { - - PackedStream *packed_stream = NULL; - - if (packed_stream_queue == NULL) { - return; - } - while (!g_queue_is_empty(packed_stream_queue)) { - packed_stream = (PackedStream *) g_queue_pop_head(packed_stream_queue); - if (packed_stream == NULL) { - continue; - } - - if (packed_stream->picture_data) { - mix_videoformat_mp42_free_picture_data(packed_stream->picture_data); - } - - if (packed_stream->mix_buffer) { - mix_buffer_unref(packed_stream->mix_buffer); - } - g_free(packed_stream); - } -} diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h index 49a1299..fc80c95 100644 --- a/mix_video/src/mixvideoformat_mp42.h +++ b/mix_video/src/mixvideoformat_mp42.h @@ -40,8 +40,13 @@ struct _MixVideoFormat_MP42 { MixVideoFrame * reference_frames[2]; MixVideoFrame * last_frame; gint last_vop_coding_type; + guint last_vop_time_increment; - GQueue *packed_stream_queue; + /* indicate if future n-vop is a placeholder of a packed frame */ + gboolean next_nvop_for_PB_frame; + + /* indicate if iq_matrix_buffer is sent to driver */ + gboolean iq_matrix_buf_sent; }; /** diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c index bf4d1f4..9f21a5a 100644 --- a/mix_video/src/mixvideoformat_vc1.c +++ b/mix_video/src/mixvideoformat_vc1.c @@ -178,7 +178,7 @@ MIX_RESULT mix_videofmt_vc1_update_seq_header( guint height = 0; guint i = 0; - guchar* p = header->data; + guchar* p = NULL; MIX_RESULT res = MIX_RESULT_SUCCESS; if (!config_params || !header) @@ -187,6 +187,8 @@ MIX_RESULT mix_videofmt_vc1_update_seq_header( return (MIX_RESULT_NULL_PTR); } + p = header->data; + res = mix_videoconfigparamsdec_get_picture_res( config_params, &width, @@ -240,6 +242,57 @@ MIX_RESULT mix_videofmt_vc1_update_seq_header( } +MIX_RESULT mix_videofmt_vc1_update_config_params( + MixVideoFormat *mix, + vbp_data_vc1 *data) +{ + MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); + + if (parent->picture_width == 0 || parent->picture_height == 0) + { + parent->picture_width = data->se_data->CODED_WIDTH; + parent->picture_height = data->se_data->CODED_HEIGHT; + + mix_videoconfigparamsdec_set_picture_res( + mix->config_params, + parent->picture_width, + parent->picture_height); + } + + + // scaling has been performed on the decoded image. + mix_videoconfigparamsdec_set_video_range(mix->config_params, 1); + + uint8 color_matrix; + + switch (data->se_data->MATRIX_COEF) + { + case 1: + color_matrix = VA_SRC_BT709; + break; + + // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996. + case 6: + color_matrix = VA_SRC_BT601; + break; + + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix); + + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + mix->config_params, + data->se_data->ASPECT_HORIZ_SIZE, + data->se_data->ASPECT_VERT_SIZE); + + return MIX_RESULT_SUCCESS; + +} + + MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, MixVideoConfigParamsDec * config_params, @@ -368,6 +421,8 @@ MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, LOG_V( "Queried parser for header data\n"); + mix_videofmt_vc1_update_config_params(parent, data); + //Time for libva initialization vadisplay = parent->va_display; diff --git a/mix_video/src/mixvideoformatenc.c b/mix_video/src/mixvideoformatenc.c index f39f77f..f35fb32 100644 --- a/mix_video/src/mixvideoformatenc.c +++ b/mix_video/src/mixvideoformatenc.c @@ -30,8 +30,8 @@ static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix); static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix); static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( MixVideoFormatEnc *mix, guint *max_size); -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, MixEncParamsType params_type); @@ -57,28 +57,52 @@ static void mix_videoformatenc_init(MixVideoFormatEnc * self) { self->va_config = 0; self->mime_type = NULL; self->frame_rate_num= 0; - self->frame_rate_denom = 1; + self->frame_rate_denom = 1; self->picture_width = 0; self->picture_height = 0; + + /* + * bitrate control + */ self->initial_qp = 0; self->min_qp = 0; - self->intra_period = 0; + self->target_percentage = 95; + self->window_size = 500; self->bitrate = 0; + + self->intra_period = 0; self->share_buf_mode = FALSE; self->ci_frame_id = NULL; self->ci_frame_num = 0; self->drawable = 0x0; - self->need_display = TRUE; + self->need_display = TRUE; self->va_rcmode = VA_RC_NONE; self->va_format = VA_RT_FORMAT_YUV420; self->va_entrypoint = VAEntrypointEncSlice; - self->va_profile = VAProfileH264Baseline; + self->va_profile = VAProfileH264Baseline; self->level = 30; + + self->refresh_type = MIX_VIDEO_NONIR; self->CIR_frame_cnt = 15; //default value + + /* + * Parameters for AIR intra refresh mode + */ + self->air_params.air_MBs = 0; + self->air_params.air_threshold = 0; + self->air_params.air_auto = 0; + + self->max_slice_size = 0; + self->force_key_frame = FALSE; self->new_header_required = FALSE; - + self->render_mss_required = FALSE; + self->render_QP_required = FALSE; + self->render_AIR_required = FALSE; + self->render_framerate_required = FALSE; + self->render_bitrate_required = FALSE; + //add more properties here } @@ -112,13 +136,13 @@ void mix_videoformatenc_finalize(GObject * obj) { /* clean up here. */ if (obj == NULL) { - LOG_E( "obj == NULL\n"); - return; + LOG_E( "obj == NULL\n"); + return; } - - MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj); - - LOG_V( "\n"); + + MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj); + + LOG_V( "\n"); if(mix->objectlock) { g_mutex_free(mix->objectlock); @@ -128,9 +152,9 @@ void mix_videoformatenc_finalize(GObject * obj) { //MiVideo object calls the _deinitialize() for frame manager if (mix->framemgr) { - mix_framemanager_unref(mix->framemgr); + mix_framemanager_unref(mix->framemgr); mix->framemgr = NULL; - } + } if (mix->mime_type) { @@ -139,10 +163,10 @@ void mix_videoformatenc_finalize(GObject * obj) { else g_string_free(mix->mime_type, FALSE); } - + if (mix->ci_frame_id) g_free (mix->ci_frame_id); - + if (mix->surfacepool) { @@ -168,7 +192,7 @@ mix_videoformatenc_ref(MixVideoFormatEnc * mix) { /* Default vmethods implementation */ static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, GString *msg) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); return MIX_RESULT_SUCCESS; } @@ -178,16 +202,16 @@ static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool, VADisplay va_display) { - - LOG_V( "Begin\n"); - + + LOG_V( "Begin\n"); + if (mix == NULL ||config_params_enc == NULL) { - LOG_E( - "!mix || config_params_enc == NULL\n"); + LOG_E( + "!mix || config_params_enc == NULL\n"); return MIX_RESULT_NULL_PTR; } - - + + MIX_RESULT ret = MIX_RESULT_SUCCESS; //TODO check return values of getter fns for config_params @@ -195,233 +219,298 @@ static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, g_mutex_lock(mix->objectlock); mix->framemgr = frame_mgr; - mix_framemanager_ref(mix->framemgr); + mix_framemanager_ref(mix->framemgr); mix->va_display = va_display; - - LOG_V( + + LOG_V( "Start to get properities from parent params\n"); - + /* get properties from param (parent) Object*/ - ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, + ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(mix->bitrate)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_bps\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_bps\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; } - + ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(mix->frame_rate_num), &(mix->frame_rate_denom)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - + } + ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_init_qp\n"); + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_init_qp\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - - + } + + ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(mix->min_qp)); - + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_min_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_target_percentage(config_params_enc, + &(mix->target_percentage)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_target_percentage\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, + &(mix->window_size)); + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_min_qp\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_window_size\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - + } + ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(mix->intra_period)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_intra_period\n"); + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_intra_period\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - + } + ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(mix->picture_width), &(mix->picture_height)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_picture_res\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_picture_res\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - + } + ret = mix_videoconfigparamsenc_get_share_buf_mode (config_params_enc, &(mix->share_buf_mode)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - - + } + + ret = mix_videoconfigparamsenc_get_ci_frame_info (config_params_enc, &(mix->ci_frame_id), &(mix->ci_frame_num)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - - + } + + ret = mix_videoconfigparamsenc_get_drawable (config_params_enc, &(mix->drawable)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_drawable\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_drawable\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } ret = mix_videoconfigparamsenc_get_need_display (config_params_enc, &(mix->need_display)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_drawable\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_drawable\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, &(mix->va_rcmode)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_rc_mode\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_rc_mode\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } ret = mix_videoconfigparamsenc_get_raw_format (config_params_enc, &(mix->va_format)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_format\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_format\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } ret = mix_videoconfigparamsenc_get_profile (config_params_enc, (MixProfile *) &(mix->va_profile)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_profile\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_profile\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } ret = mix_videoconfigparamsenc_get_level (config_params_enc, &(mix->level)); - + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_level\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_level\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } - ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, + ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, &(mix->CIR_frame_cnt)); - + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, + &(mix->max_slice_size)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_max_slice_size\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, + &(mix->refresh_type)); + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + LOG_E( + "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } + + ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, + &(mix->air_params)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup - - LOG_V( + LOG_E( + "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( "======Video Encode Parent Object properities======:\n"); - - LOG_I( "mix->bitrate = %d\n", + + LOG_I( "mix->bitrate = %d\n", mix->bitrate); - LOG_I( "mix->frame_rate = %d\n", - mix->frame_rate_denom / mix->frame_rate_denom); - LOG_I( "mix->initial_qp = %d\n", - mix->initial_qp); - LOG_I( "mix->min_qp = %d\n", - mix->min_qp); - LOG_I( "mix->intra_period = %d\n", - mix->intra_period); - LOG_I( "mix->picture_width = %d\n", - mix->picture_width); - LOG_I( "mix->picture_height = %d\n", - mix->picture_height); - LOG_I( "mix->share_buf_mode = %d\n", - mix->share_buf_mode); - LOG_I( "mix->ci_frame_id = 0x%08x\n", - mix->ci_frame_id); - LOG_I( "mix->ci_frame_num = %d\n", - mix->ci_frame_num); - LOG_I( "mix->drawable = 0x%08x\n", - mix->drawable); - LOG_I( "mix->need_display = %d\n", - mix->need_display); - LOG_I( "mix->va_format = %d\n", - mix->va_format); - LOG_I( "mix->va_profile = %d\n", - mix->va_profile); - LOG_I( "mix->va_rcmode = %d\n\n", - mix->va_rcmode); - + LOG_I( "mix->frame_rate = %d\n", + mix->frame_rate_denom / mix->frame_rate_denom); + LOG_I( "mix->initial_qp = %d\n", + mix->initial_qp); + LOG_I( "mix->min_qp = %d\n", + mix->min_qp); + LOG_I( "mix->intra_period = %d\n", + mix->intra_period); + LOG_I( "mix->picture_width = %d\n", + mix->picture_width); + LOG_I( "mix->picture_height = %d\n", + mix->picture_height); + LOG_I( "mix->share_buf_mode = %d\n", + mix->share_buf_mode); + LOG_I( "mix->ci_frame_id = 0x%08x\n", + mix->ci_frame_id); + LOG_I( "mix->ci_frame_num = %d\n", + mix->ci_frame_num); + LOG_I( "mix->drawable = 0x%08x\n", + mix->drawable); + LOG_I( "mix->need_display = %d\n", + mix->need_display); + LOG_I( "mix->va_format = %d\n", + mix->va_format); + LOG_I( "mix->va_profile = %d\n", + mix->va_profile); + LOG_I( "mix->va_rcmode = %d\n\n", + mix->va_rcmode); + LOG_I( "mix->CIR_frame_cnt = %d\n\n", + mix->CIR_frame_cnt); + LOG_I( "mix->max_slice_size = %d\n\n", + mix->max_slice_size); + g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; } @@ -450,24 +539,24 @@ static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( MixVideoFormatEnc *mix, guint *max_size) { - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, MixEncParamsType params_type) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; + MIX_RESULT ret = MIX_RESULT_SUCCESS; if (mix == NULL ||config_params_enc == NULL) { - LOG_E( - "!mix || config_params_enc == NULL\n"); + LOG_E( + "!mix || config_params_enc == NULL\n"); return MIX_RESULT_NULL_PTR; } MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - + g_mutex_lock(mix->objectlock); @@ -480,38 +569,119 @@ MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * m if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup LOG_E( - "Failed to mix_videoconfigparamsenc_get_bit_rate\n"); + "Failed to mix_videoconfigparamsenc_get_bit_rate\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } - mix->new_header_required = TRUE; + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_INIT_QP: + { + ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_init_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_MIN_QP: + { + ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(mix->min_qp)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_min_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_WINDOW_SIZE: + { + ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, &(mix->window_size)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to MIX_ENC_PARAMS_WINDOW_SIZE\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; } break; - case MIX_ENC_PARAMS_SLICE_SIZE: + + case MIX_ENC_PARAMS_TARGET_PERCENTAGE: + { + ret = mix_videoconfigparamsenc_get_target_percentage (config_params_enc, &(mix->target_percentage)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to MIX_ENC_PARAMS_TARGET_PERCENTAGE\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_MTU_SLICE_SIZE: + { + ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, &(mix->max_slice_size)); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_get_max_slice_size\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_mss_required = TRUE; + + } + + case MIX_ENC_PARAMS_SLICE_NUM: { /* * This type of dynamic control will be handled in H.264 override method */ } break; - - case MIX_ENC_PARAMS_RC_MODE: + + case MIX_ENC_PARAMS_RC_MODE: { ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, &(mix->va_rcmode)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup LOG_E( - "Failed to mix_videoconfigparamsenc_get_rate_control\n"); + "Failed to mix_videoconfigparamsenc_get_rate_control\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } + + /* + * We only can change the RC mode to re-start encoding session + */ - mix->new_header_required = TRUE; } break; - + case MIX_ENC_PARAMS_RESOLUTION: { @@ -520,12 +690,12 @@ MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * m //TODO cleanup LOG_E( - "Failed to mix_videoconfigparamsenc_get_picture_res\n"); + "Failed to mix_videoconfigparamsenc_get_picture_res\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } - mix->new_header_required = TRUE; + mix->new_header_required = TRUE; } break; case MIX_ENC_PARAMS_GOP_SIZE: @@ -536,12 +706,12 @@ MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * m //TODO cleanup LOG_E( - "Failed to mix_videoconfigparamsenc_get_intra_period\n"); + "Failed to mix_videoconfigparamsenc_get_intra_period\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } - mix->new_header_required = TRUE; + mix->new_header_required = TRUE; } break; @@ -552,35 +722,52 @@ MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * m //TODO cleanup LOG_E( - "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } - - mix->new_header_required = TRUE; + } + + mix->render_framerate_required = TRUE; } break; + case MIX_ENC_PARAMS_FORCE_KEY_FRAME: { - mix->new_header_required = TRUE; - + mix->new_header_required = TRUE; + } break; - case MIX_ENC_PARAMS_QP: + + case MIX_ENC_PARAMS_REFRESH_TYPE: { - ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp)); + ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, &(mix->refresh_type)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + } + break; + + case MIX_ENC_PARAMS_AIR: + { + ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, &(mix->air_params)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup LOG_E( - "Failed to mix_videoconfigparamsenc_get_init_qp\n"); + "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } - mix->new_header_required = TRUE; + mix->render_AIR_required = TRUE; } break; + case MIX_ENC_PARAMS_CIR_FRAME_CNT: { ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(mix->CIR_frame_cnt)); @@ -588,29 +775,29 @@ MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * m //TODO cleanup LOG_E( - "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); g_mutex_unlock(mix->objectlock); return MIX_RESULT_FAIL; - } + } } break; - + default: - break; - } + break; + } - g_mutex_unlock(mix->objectlock); + g_mutex_unlock(mix->objectlock); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } /* mixvideoformatenc class methods implementation */ MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) { MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - - LOG_V( "Begin\n"); - + + LOG_V( "Begin\n"); + if (klass->getcaps) { return klass->getcaps(mix, msg); } @@ -624,26 +811,26 @@ MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, MixSurfacePool ** surface_pool, VADisplay va_display) { MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - + /*frame_mgr and input_buf_pool is reserved for future use*/ if (klass->initialize) { return klass->initialize(mix, config_params_enc, frame_mgr, input_buf_pool, surface_pool, va_display); } - + return MIX_RESULT_FAIL; - + } MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params) { - + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); if (klass->encode) { return klass->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params); } - + return MIX_RESULT_FAIL; } @@ -652,7 +839,7 @@ MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) { if (klass->flush) { return klass->flush(mix); } - + return MIX_RESULT_FAIL; } @@ -661,7 +848,7 @@ MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) { if (klass->eos) { return klass->eos(mix); } - + return MIX_RESULT_FAIL; } @@ -670,28 +857,28 @@ MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) { if (klass->deinitialize) { return klass->deinitialize(mix); } - + return MIX_RESULT_FAIL; } MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint * max_size) { - + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); if (klass->encode) { return klass->getmaxencodedbufsize(mix, max_size); } - + return MIX_RESULT_FAIL; } -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, MixEncParamsType params_type) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); if (klass->set_dynamic_config) { return klass->set_dynamic_config(mix, config_params_enc, params_type); } - - return MIX_RESULT_FAIL; + + return MIX_RESULT_FAIL; } diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h index b66cc6a..3b208b4 100644 --- a/mix_video/src/mixvideoformatenc.h +++ b/mix_video/src/mixvideoformatenc.h @@ -54,8 +54,8 @@ typedef MIX_RESULT (*MixVideoFmtEncFlushFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, guint *max_size); -typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params, +typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params, MixEncParamsType params_type); struct _MixVideoFormatEnc { @@ -73,35 +73,54 @@ struct _MixVideoFormatEnc { VAContextID va_context; VAConfigID va_config; GString *mime_type; - + guint frame_rate_num; guint frame_rate_denom; guint picture_width; guint picture_height; - + + guint intra_period; + + /* + * Following is for bitrate control + */ guint initial_qp; guint min_qp; - guint intra_period; guint bitrate; - - gboolean share_buf_mode; + guint target_percentage; + guint window_size; + + gboolean share_buf_mode; gulong * ci_frame_id; - guint ci_frame_num; + guint ci_frame_num; gboolean force_key_frame; gboolean new_header_required; - guint CIR_frame_cnt; - + + MixVideoIntraRefreshType refresh_type; + + guint CIR_frame_cnt; + + MixAIRParams air_params; + + guint max_slice_size; + + gboolean render_mss_required; + gboolean render_QP_required; + gboolean render_AIR_required; + gboolean render_framerate_required; + gboolean render_bitrate_required; + gulong drawable; - gboolean need_display; + gboolean need_display; VAProfile va_profile; VAEntrypoint va_entrypoint; guint va_format; - guint va_rcmode; - guint8 level; - - + guint va_rcmode; + guint8 level; + + MixBufferPool *inputbufpool; GQueue *inputbufqueue; }; @@ -124,7 +143,7 @@ struct _MixVideoFormatEncClass { MixVideoFmtEncFlushFunc flush; MixVideoFmtEncEndOfStreamFunc eos; MixVideoFmtEncDeinitializeFunc deinitialize; - MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize; + MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize; MixVideoFmtEncSetDynamicEncConfigFunc set_dynamic_config; }; @@ -166,7 +185,7 @@ MixVideoFormatEnc *mix_videoformatenc_ref(MixVideoFormatEnc * mix); /* TODO: change method parameter list */ MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg); -MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, +MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, MixVideoConfigParamsEnc * enc_config_params, MixFrameManager * frame_mgr, MixBufferPool * input_buf_pool, @@ -183,11 +202,11 @@ MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, +MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint *max_size); -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params, +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params, MixEncParamsType params_type); G_END_DECLS diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c index db532e4..ac45be8 100644 --- a/mix_video/src/mixvideoformatenc_h264.c +++ b/mix_video/src/mixvideoformatenc_h264.c @@ -40,6 +40,7 @@ static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { /* member initialization */ self->encoded_frames = 0; + self->frame_num = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; self->cur_frame = NULL; @@ -227,6 +228,24 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, goto cleanup; } + ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, + &self->I_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, + &self->P_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); + goto cleanup; + } + ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, &self->delimiter_type); @@ -254,6 +273,10 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, self->disable_deblocking_filter_idc); LOG_I( "self->slice_num = %d\n", self->slice_num); + LOG_I( "self->I_slice_num = %d\n", + self->I_slice_num); + LOG_I( "self->P_slice_num = %d\n", + self->P_slice_num); LOG_I ("self->delimiter_type = %d\n", self->delimiter_type); LOG_I ("self->idr_interval = %d\n", @@ -423,6 +446,19 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, goto cleanup; } + + if (parent->va_rcmode == VA_RC_VCM) { + + /* + * Following three features are only enabled in VCM mode + */ + parent->render_mss_required = TRUE; + parent->render_AIR_required = TRUE; + parent->render_bitrate_required = TRUE; + self->slice_num = (parent->picture_height + 15) / 16; //if we are in VCM, we will set slice num to max value + } + + /*TODO: compute the surface number*/ int numSurfaces; @@ -802,6 +838,7 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { #endif /*reset the properities*/ self->encoded_frames = 0; + self->frame_num = 0; self->pic_skipped = FALSE; self->is_intra = TRUE; @@ -1103,6 +1140,8 @@ MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mi guint slice_height; guint slice_index; guint slice_height_in_mb; + guint max_slice_num; + guint min_slice_num; if (mix == NULL) { LOG_E("mix == NULL\n"); @@ -1119,25 +1158,32 @@ MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mi parent = MIX_VIDEOFORMATENC(&(mix->parent)); - slice_num = mix->slice_num; - slice_height = parent->picture_height / slice_num; + max_slice_num = (parent->picture_height + 15) / 16; + min_slice_num = 1; - slice_height += 15; - slice_height &= (~15); + if (mix->is_intra) { + slice_num = mix->I_slice_num; + } + else { + slice_num = mix->P_slice_num; + } - slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height; + if (slice_num < min_slice_num) { + LOG_W ("Slice Number is too small"); + slice_num = min_slice_num; + } -#if 0 - if (!mix->is_intra){ - slice_num = 9; + if (slice_num > max_slice_num) { + LOG_W ("Slice Number is too big"); + slice_num = max_slice_num; + } - slice_height = parent->picture_height / slice_num; + slice_height = parent->picture_height / slice_num; - slice_height += 15; - slice_height &= (~15); + slice_height += 15; + slice_height &= (~15); - } -#endif + slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height; #if 1 va_status = vaCreateBuffer (parent->va_display, parent->va_context, @@ -1285,13 +1331,20 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_I( "encoded_frames = %d\n", mix->encoded_frames); + LOG_I( "frame_num = %d\n", + mix->frame_num); LOG_I( "is_intra = %d\n", mix->is_intra); LOG_I( "ci_frame_id = 0x%08x\n", (guint) parent->ci_frame_id); + if (parent->new_header_required) { + mix->frame_num = 0; + } + /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { + //if ((mix->encoded_frames % parent->intra_period) == 0) { + if ((mix->frame_num % parent->intra_period) == 0) { mix->is_intra = TRUE; } else { mix->is_intra = FALSE; @@ -1648,6 +1701,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (mix->encoded_frames == 0) { mix->encoded_frames ++; + mix->frame_num ++; mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; mix->coded_buf_index ++; mix->coded_buf_index %=2; @@ -1656,7 +1710,8 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { + //if ((mix->encoded_frames % parent->intra_period) == 0) { + if ((mix->frame_num % parent->intra_period) == 0) { mix->is_intra = TRUE; } else { mix->is_intra = FALSE; @@ -1697,6 +1752,8 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, int num_seg = 0; guint total_size = 0; guint size = 0; + guint status = 0; + gboolean slice_size_overflow = FALSE; coded_seg = (VACodedBufferSegment *)buf; num_seg = 1; @@ -1704,6 +1761,13 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, while (1) { total_size += coded_seg->size; + status = coded_seg->status; + + if (!slice_size_overflow) { + + slice_size_overflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; + } + if (coded_seg->next == NULL) break; @@ -1887,10 +1951,10 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } - VASurfaceStatus status; + VASurfaceStatus va_surface_status; /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); + va_status = vaQuerySurfaceStatus(va_display, surface, &va_surface_status); if (va_status != VA_STATUS_SUCCESS) { LOG_E( @@ -1898,7 +1962,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, ret = MIX_RESULT_FAIL; goto cleanup; } - mix->pic_skipped = status & VASurfaceSkipped; + mix->pic_skipped = va_surface_status & VASurfaceSkipped; if (parent->need_display) { ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); @@ -1933,6 +1997,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, #endif mix->encoded_frames ++; + mix->frame_num ++; mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; mix->coded_buf_index ++; mix->coded_buf_index %=2; @@ -1965,6 +2030,14 @@ cleanup: LOG_V( "end\n"); + /* + * The error level of MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW + * is lower than other errors, so if any other errors happen, we won't + * return slice size overflow + */ + if (ret == MIX_RESULT_SUCCESS && slice_size_overflow) + ret = MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW; + return ret; } @@ -2170,7 +2243,8 @@ MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix if (!MIX_IS_VIDEOFORMATENC_H264(mix)) return MIX_RESULT_INVALID_PARAM; - if (mix->encoded_frames == 0 || parent->new_header_required) { + //if (mix->encoded_frames == 0 || parent->new_header_required) { + if (mix->frame_num == 0 || parent->new_header_required) { ret = mix_videofmtenc_h264_send_seq_params (mix); if (ret != MIX_RESULT_SUCCESS) { @@ -2182,6 +2256,58 @@ MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix parent->new_header_required = FALSE; //Set to require new header filed to FALSE } + if (parent->render_mss_required && parent->max_slice_size != 0) { + ret = mix_videofmtenc_h264_send_max_slice_size(mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_max_slice_size\n"); + return MIX_RESULT_FAIL; + } + + parent->render_mss_required = FALSE; + } + + if (parent->render_bitrate_required) { + ret = mix_videofmtenc_h264_send_dynamic_bitrate(mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_dynamic_bitrate\n"); + return MIX_RESULT_FAIL; + } + + parent->render_bitrate_required = FALSE; + } + + if (parent->render_AIR_required && + (parent->refresh_type == MIX_VIDEO_AIR || parent->refresh_type == MIX_VIDEO_BOTH)) + { + + ret = mix_videofmtenc_h264_send_AIR (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_AIR\n"); + return MIX_RESULT_FAIL; + } + + parent->render_AIR_required = FALSE; + } + + if (parent->render_framerate_required) { + + ret = mix_videofmtenc_h264_send_dynamic_framerate (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_dynamic_framerate\n"); + return MIX_RESULT_FAIL; + } + + parent->render_framerate_required = FALSE; + } + ret = mix_videofmtenc_h264_send_picture_parameter (mix); if (ret != MIX_RESULT_SUCCESS) @@ -2234,18 +2360,20 @@ MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, } /* - * For case params_type == MIX_ENC_PARAMS_SLICE_SIZE + * For case params_type == MIX_ENC_PARAMS_SLICE_NUM * we don't need to chain up to parent method, as we will handle * dynamic slice height change inside this method, and other dynamic * controls will be handled in parent method. */ - if (params_type == MIX_ENC_PARAMS_SLICE_SIZE) { + if (params_type == MIX_ENC_PARAMS_SLICE_NUM) { g_mutex_lock(parent->objectlock); ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, &self->slice_num); + self->I_slice_num = self->P_slice_num = self->slice_num; + if (ret != MIX_RESULT_SUCCESS) { LOG_E( "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); @@ -2257,6 +2385,44 @@ MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, g_mutex_unlock(parent->objectlock); + } + else if (params_type == MIX_ENC_PARAMS_I_SLICE_NUM) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, + &self->I_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + g_mutex_unlock(parent->objectlock); + + } + else if (params_type == MIX_ENC_PARAMS_P_SLICE_NUM) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, + &self->P_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + g_mutex_unlock(parent->objectlock); + } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { g_mutex_lock(parent->objectlock); @@ -2300,3 +2466,319 @@ MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, } +MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_bitrate\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterRateControl * bitrate_control_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl; + bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data; + + bitrate_control_param->bits_per_second = parent->bitrate; + bitrate_control_param->initial_qp = parent->initial_qp; + bitrate_control_param->min_qp = parent->min_qp; + bitrate_control_param->target_percentage = parent->target_percentage; + bitrate_control_param->window_size = parent->window_size; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_max_slice_size\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterMaxSliceSize * max_slice_size_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeMaxSliceSize; + max_slice_size_param = (VAEncMiscParameterMaxSliceSize *)misc_enc_param_buf->data; + + max_slice_size_param->max_slice_size = parent->max_slice_size; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + LOG_I( "max slice size = %d\n", + max_slice_size_param->max_slice_size); + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_AIR\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterAIR * air_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterAIR), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeAIR; + air_param = (VAEncMiscParameterAIR *)misc_enc_param_buf->data; + + air_param->air_auto = parent->air_params.air_auto; + air_param->air_num_mbs = parent->air_params.air_MBs; + air_param->air_threshold = parent->air_params.air_threshold; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( "air_threshold = %d\n", + air_param->air_threshold); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_framerate\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterFrameRate * framerate_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; + framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; + framerate_param->framerate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( "frame rate = %d\n", + framerate_param->framerate); + + return MIX_RESULT_SUCCESS; + +} + diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h index 2e7b12d..6cd9d83 100644 --- a/mix_video/src/mixvideoformatenc_h264.h +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -40,14 +40,14 @@ struct _MixVideoFormatEnc_H264 { VABufferID last_coded_buf; VABufferID seq_param_buf; VABufferID pic_param_buf; - VABufferID slice_param_buf; + VABufferID slice_param_buf; VASurfaceID * ci_shared_surfaces; VASurfaceID * surfaces; - guint surface_num; + guint surface_num; - MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *cur_frame; //current input frame to be encoded; MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *rec_frame; //reconstructed frame; MixVideoFrame *last_frame; //last frame; MixVideoFrame *lookup_frame; #ifdef ANDROID @@ -59,9 +59,12 @@ struct _MixVideoFormatEnc_H264 { MixDelimiterType delimiter_type; guint idr_interval; guint slice_num; - guint va_rcmode; + guint I_slice_num; + guint P_slice_num; + guint va_rcmode; guint encoded_frames; + guint frame_num; gboolean pic_skipped; gboolean is_intra; @@ -123,7 +126,7 @@ MixVideoFormatEnc_H264 *mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix /* H.264 vmethods */ MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg); -MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, +MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, MixVideoConfigParamsEnc * config_params_enc, MixFrameManager * frame_mgr, MixBufferPool * input_buf_pool, @@ -135,20 +138,25 @@ MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, +MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, MixEncParamsType params_type); /* Local Methods */ MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint *max_size); -MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin, +MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin, MixIOVec * iovout); MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( guint8 * bufin, guint bufin_len, guint8* bufout, guint *bufout_len); MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix); +MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix); +MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix); +MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix); +MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix); + G_END_DECLS #endif /* __MIX_VIDEOFORMATENC_H264_H__ */ -- cgit v1.2.3 From 04dfaaf74b8ea818ea398c47eaab29c52333c989 Mon Sep 17 00:00:00 2001 From: Yanlong Fang Date: Thu, 2 Dec 2010 16:25:34 +0800 Subject: updated vbp mpeg4 parser to fix short video header start code emulation error Change-Id: I3a95f9eb51573f607fb6ab2c6bc1def5386e003e Signed-off-by: Yanlong Fang --- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 1acfd9b..5a4e358 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -45,7 +45,8 @@ uint32 vbp_get_sc_pos_mp42( uint32 length, uint32 *sc_end_pos, uint8 *is_normal_sc, - uint8* resync_marker); + uint8* resync_marker, + const bool svh_search); void vbp_on_vop_mp42(vbp_context *pcontext, int list_index); void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index); @@ -249,7 +250,7 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) while (1) { found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, - &sc_end_pos, &is_normal_sc, &resync_marker); + &sc_end_pos, &is_normal_sc, &resync_marker, short_video_header); if (found_sc) { @@ -628,7 +629,8 @@ uint32 vbp_get_sc_pos_mp42( uint32 length, uint32 *sc_end_pos, uint8 *is_normal_sc, - uint8 *resync_marker) + uint8 *resync_marker, + const bool svh_search) { uint8 *ptr = buf; uint32 size; @@ -709,8 +711,10 @@ uint32 vbp_get_sc_pos_mp42( if (phase == 2) { normal_sc = (*ptr == THIRD_STARTCODE_BYTE); - short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); - + if (svh_search) + { + short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); + } *is_normal_sc = normal_sc; // at least 16-bit 0, may be GOB start code or -- cgit v1.2.3 From 40509edb199de8be556dafabe65331f531ab3af7 Mon Sep 17 00:00:00 2001 From: $ Date: Sun, 2 Jan 2011 18:07:21 -0800 Subject: Fix module tags line Change-Id: I057d6e696fa2322c0474b775e7d4d10990cb6d6c Signed-off-by: $ --- mix_audio/src/Android.mk | 1 + mix_common/src/Android.mk | 1 + mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk | 1 + mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk | 1 + mix_vbp/viddec_fw/fw/parser/Android.mk | 1 + mix_video/src/Android.mk | 1 + 6 files changed, 6 insertions(+) diff --git a/mix_audio/src/Android.mk b/mix_audio/src/Android.mk index 08f3566..818ae8e 100644 --- a/mix_audio/src/Android.mk +++ b/mix_audio/src/Android.mk @@ -48,5 +48,6 @@ LOCAL_COPY_HEADERS := \ # pvt.h \ LOCAL_MODULE := libmixaudio +LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) diff --git a/mix_common/src/Android.mk b/mix_common/src/Android.mk index 7f2bc52..c29bf6f 100644 --- a/mix_common/src/Android.mk +++ b/mix_common/src/Android.mk @@ -28,5 +28,6 @@ LOCAL_COPY_HEADERS := \ mixdrmparams.h LOCAL_MODULE := libmixcommon +LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk index b7c15d6..bacf49b 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk @@ -27,6 +27,7 @@ LOCAL_C_INCLUDES := \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/include LOCAL_MODULE := libmixvbp_h264 +LOCAL_MODULE_TAGS := optional LOCAL_SHARED_LIBRARIES := \ libglib-2.0 \ diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk index 23c0c52..8b2bfe3 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk @@ -24,6 +24,7 @@ LOCAL_C_INCLUDES := \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/include LOCAL_MODULE := libmixvbp_mpeg4 +LOCAL_MODULE_TAGS := optional LOCAL_SHARED_LIBRARIES := \ libglib-2.0 \ diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index cc9ba5a..e577932 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -42,6 +42,7 @@ LOCAL_COPY_HEADERS := \ vbp_loader.h LOCAL_MODULE := libmixvbp +LOCAL_MODULE_TAGS := optional LOCAL_SHARED_LIBRARIES := \ libdl \ diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index 0f060fc..2c07cd9 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -115,5 +115,6 @@ LOCAL_COPY_HEADERS := \ mixvideorenderparams_internal.h LOCAL_MODULE := libmixvideo +LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) -- cgit v1.2.3 From 71b67cac360ae7ca7699dfbe2465c126ec66deaf Mon Sep 17 00:00:00 2001 From: "Liu, Shuo" Date: Mon, 31 Jan 2011 12:25:05 +0800 Subject: Enable video playback in libmix Change-Id: If7dd717b2d733ce9a0a2a434730c804d3bc7ea00 Signed-off-by: Liu, Shuo --- Android.mk | 2 +- mix_audio/AUTHORS | 1 - mix_audio/COPYING | 26 - mix_audio/ChangeLog | 139 - mix_audio/INSTALL | 4 - mix_audio/Makefile.am | 7 - mix_audio/NEWS | 1 - mix_audio/README | 2 - mix_audio/autogen.sh | 19 - mix_audio/configure.ac | 137 - mix_audio/docs/Makefile.am | 4 - mix_audio/docs/reference/Makefile.am | 4 - mix_audio/docs/reference/MixAudio/Makefile.am | 96 - .../docs/reference/MixAudio/MixAudio-docs.sgml | 39 - .../docs/reference/MixAudio/MixAudio-sections.txt | 187 -- mix_audio/docs/reference/MixAudio/MixAudio.types | 6 - .../html/MixAudio-MixAudioConfigParams.html | 689 ----- .../html/MixAudio-MixAudioConfigParamsAAC.html | 823 ------ .../html/MixAudio-MixAudioConfigParamsMP3.html | 221 -- .../html/MixAudio-MixAudioConfigParamsWMA.html | 391 --- .../MixAudio/html/MixAudio-MixAudioInitParams.html | 139 - .../MixAudio/html/MixAudio-mixaudiotypes.html | 94 - .../docs/reference/MixAudio/html/MixAudio.devhelp | 124 - .../docs/reference/MixAudio/html/MixAudio.devhelp2 | 186 -- .../docs/reference/MixAudio/html/MixAudio.html | 1286 --------- .../reference/MixAudio/html/api-index-full.html | 259 -- mix_audio/docs/reference/MixAudio/html/ch01.html | 56 - mix_audio/docs/reference/MixAudio/html/index.html | 60 - mix_audio/docs/reference/MixAudio/html/index.sgml | 134 - mix_audio/docs/reference/MixAudio/html/style.css | 167 -- .../reference/MixAudio/html/tree-hierarchy.html | 37 - mix_audio/m4/Makefile.am | 1 - mix_audio/m4/as-mix-version.m4 | 35 - mix_audio/mixaudio.spec | 56 - mix_audio/pkgconfig/Makefile.am | 11 - mix_audio/pkgconfig/mixaudio.pc.in | 12 - mix_audio/src/Android.mk | 53 - mix_audio/src/Makefile.am | 61 - mix_audio/src/amhelper.c | 120 - mix_audio/src/amhelper.h | 25 - mix_audio/src/intel_sst_ioctl.h | 337 --- mix_audio/src/mixacp.c | 355 --- mix_audio/src/mixacp.h | 367 --- mix_audio/src/mixacpaac.c | 364 --- mix_audio/src/mixacpaac.h | 413 --- mix_audio/src/mixacpmp3.c | 178 -- mix_audio/src/mixacpmp3.h | 170 -- mix_audio/src/mixacpwma.c | 208 -- mix_audio/src/mixacpwma.h | 235 -- mix_audio/src/mixaip.c | 167 -- mix_audio/src/mixaip.h | 132 - mix_audio/src/mixaudio.c | 2511 ------------------ mix_audio/src/mixaudio.h | 575 ---- mix_audio/src/mixaudiotypes.h | 27 - mix_audio/src/pvt.h | 9 - mix_audio/src/sst_proxy.c | 437 --- mix_audio/src/sst_proxy.h | 17 - mix_audio/tests/Makefile.am | 2 - mix_audio/tests/smoke/Makefile.am | 25 - mix_audio/tests/smoke/mixaudiosmoke.c | 77 - mix_common/src/Android.mk | 21 +- mix_common/src/mixdrmparams.c | 163 -- mix_common/src/mixdrmparams.cpp | 45 + mix_common/src/mixdrmparams.h | 68 +- mix_common/src/mixlog.c | 260 -- mix_common/src/mixlog.cpp | 263 ++ mix_common/src/mixlog.h | 1 + mix_common/src/mixparams.c | 274 -- mix_common/src/mixparams.cpp | 127 + mix_common/src/mixparams.h | 165 +- mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 4 + .../viddec_fw/fw/codecs/h264/include/h264parse.h | 23 +- .../fw/codecs/h264/include/h264parse_dpb.h | 4 + mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk | 2 +- .../fw/codecs/h264/parser/h264parse_math.c | 6 +- .../fw/codecs/h264/parser/h264parse_mem.c | 6 +- .../fw/codecs/h264/parser/h264parse_sei.c | 8 +- .../fw/codecs/h264/parser/h264parse_sps.c | 4 +- .../fw/codecs/h264/parser/mix_vbp_h264_stubs.c | 26 +- mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk | 2 +- .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 20 +- .../fw/codecs/mp4/parser/viddec_mp4_parse.c | 2 + .../fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 2 +- .../mp4/parser/viddec_mp4_videoobjectlayer.c | 35 +- .../mp4/parser/viddec_mp4_videoobjectplane.c | 6 +- .../fw/codecs/mp4/parser/viddec_mp4_visualobject.c | 10 +- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h | 2 +- mix_vbp/viddec_fw/fw/parser/Android.mk | 2 +- .../viddec_fw/fw/parser/include/viddec_emitter.h | 4 +- .../fw/parser/include/viddec_parser_ops.h | 25 +- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 3 + mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h | 2 + mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 5 + mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 20 +- mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 2 +- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 5 +- mix_vbp/viddec_fw/include/viddec_fw_common_defs.h | 6 +- mix_vbp/viddec_fw/include/viddec_fw_item_types.h | 4 +- mix_video/src/Android.mk | 87 +- mix_video/src/mixbuffer.c | 233 -- mix_video/src/mixbuffer.cpp | 144 + mix_video/src/mixbuffer.h | 77 +- mix_video/src/mixbuffer_private.h | 18 +- mix_video/src/mixbufferpool.c | 484 ---- mix_video/src/mixbufferpool.cpp | 377 +++ mix_video/src/mixbufferpool.h | 90 +- mix_video/src/mixdisplay.c | 539 ---- mix_video/src/mixdisplay.cpp | 130 + mix_video/src/mixdisplay.h | 170 +- mix_video/src/mixdisplayandroid.c | 197 -- mix_video/src/mixdisplayandroid.cpp | 158 ++ mix_video/src/mixdisplayandroid.h | 115 +- mix_video/src/mixdisplayx11.c | 210 -- mix_video/src/mixdisplayx11.cpp | 210 ++ mix_video/src/mixframemanager.c | 794 ------ mix_video/src/mixframemanager.cpp | 648 +++++ mix_video/src/mixframemanager.h | 52 +- mix_video/src/mixsurfacepool.c | 668 ----- mix_video/src/mixsurfacepool.cpp | 564 ++++ mix_video/src/mixsurfacepool.h | 84 +- mix_video/src/mixvideo.c | 2193 --------------- mix_video/src/mixvideo.cpp | 2113 +++++++++++++++ mix_video/src/mixvideo.h | 72 +- mix_video/src/mixvideo_private.h | 20 +- mix_video/src/mixvideocaps.c | 267 -- mix_video/src/mixvideocaps.cpp | 162 ++ mix_video/src/mixvideocaps.h | 72 +- mix_video/src/mixvideoconfigparams.c | 176 -- mix_video/src/mixvideoconfigparams.cpp | 86 + mix_video/src/mixvideoconfigparams.h | 75 +- mix_video/src/mixvideoconfigparamsdec.c | 649 ----- mix_video/src/mixvideoconfigparamsdec.cpp | 498 ++++ mix_video/src/mixvideoconfigparamsdec.h | 64 +- mix_video/src/mixvideoconfigparamsdec_h264.c | 213 -- mix_video/src/mixvideoconfigparamsdec_h264.cpp | 66 + mix_video/src/mixvideoconfigparamsdec_h264.h | 65 +- mix_video/src/mixvideoconfigparamsdec_mp42.c | 245 -- mix_video/src/mixvideoconfigparamsdec_mp42.cpp | 112 + mix_video/src/mixvideoconfigparamsdec_mp42.h | 65 +- mix_video/src/mixvideoconfigparamsdec_vc1.c | 189 -- mix_video/src/mixvideoconfigparamsdec_vc1.cpp | 71 + mix_video/src/mixvideoconfigparamsdec_vc1.h | 74 +- mix_video/src/mixvideoconfigparamsenc.c | 859 ------ mix_video/src/mixvideoconfigparamsenc.cpp | 514 ++++ mix_video/src/mixvideoconfigparamsenc.h | 51 +- mix_video/src/mixvideoconfigparamsenc_h263.c | 281 -- mix_video/src/mixvideoconfigparamsenc_h263.cpp | 106 + mix_video/src/mixvideoconfigparamsenc_h263.h | 49 +- mix_video/src/mixvideoconfigparamsenc_h264.c | 386 --- mix_video/src/mixvideoconfigparamsenc_h264.cpp | 172 ++ mix_video/src/mixvideoconfigparamsenc_h264.h | 48 +- mix_video/src/mixvideoconfigparamsenc_mpeg4.c | 301 --- mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp | 301 +++ mix_video/src/mixvideoconfigparamsenc_mpeg4.h | 47 +- mix_video/src/mixvideoconfigparamsenc_preview.c | 217 -- mix_video/src/mixvideoconfigparamsenc_preview.cpp | 217 ++ mix_video/src/mixvideodecodeparams.c | 220 -- mix_video/src/mixvideodecodeparams.cpp | 125 + mix_video/src/mixvideodecodeparams.h | 72 +- mix_video/src/mixvideodef.h | 4 +- mix_video/src/mixvideoencodeparams.c | 209 -- mix_video/src/mixvideoencodeparams.cpp | 209 ++ mix_video/src/mixvideoformat.c | 424 --- mix_video/src/mixvideoformat.cpp | 293 ++ mix_video/src/mixvideoformat.h | 171 +- mix_video/src/mixvideoformat_h264.c | 1814 ------------- mix_video/src/mixvideoformat_h264.cpp | 1393 ++++++++++ mix_video/src/mixvideoformat_h264.h | 129 +- mix_video/src/mixvideoformat_mp42.c | 1415 ---------- mix_video/src/mixvideoformat_mp42.cpp | 1064 ++++++++ mix_video/src/mixvideoformat_mp42.h | 111 +- mix_video/src/mixvideoformat_vc1.c | 1813 ------------- mix_video/src/mixvideoformat_vc1.cpp | 1364 ++++++++++ mix_video/src/mixvideoformat_vc1.h | 109 +- mix_video/src/mixvideoformatenc.c | 884 ------- mix_video/src/mixvideoformatenc.cpp | 884 +++++++ mix_video/src/mixvideoformatenc_h263.c | 1867 ------------- mix_video/src/mixvideoformatenc_h263.cpp | 1862 +++++++++++++ mix_video/src/mixvideoformatenc_h264.c | 2784 -------------------- mix_video/src/mixvideoformatenc_h264.cpp | 2775 +++++++++++++++++++ mix_video/src/mixvideoformatenc_mpeg4.c | 1824 ------------- mix_video/src/mixvideoformatenc_mpeg4.cpp | 1813 +++++++++++++ mix_video/src/mixvideoformatenc_preview.c | 1187 --------- mix_video/src/mixvideoformatenc_preview.cpp | 1187 +++++++++ mix_video/src/mixvideoformatqueue.h | 11 +- mix_video/src/mixvideoframe.c | 502 ---- mix_video/src/mixvideoframe.cpp | 362 +++ mix_video/src/mixvideoframe.h | 127 +- mix_video/src/mixvideoframe_private.h | 43 +- mix_video/src/mixvideoinitparams.c | 222 -- mix_video/src/mixvideoinitparams.cpp | 126 + mix_video/src/mixvideoinitparams.h | 116 +- mix_video/src/mixvideolog.h | 3 +- mix_video/src/mixvideorenderparams.c | 422 --- mix_video/src/mixvideorenderparams.cpp | 299 +++ mix_video/src/mixvideorenderparams.h | 116 +- mix_video/src/mixvideorenderparams_internal.h | 23 +- mix_video/src/mixvideothread.cpp | 50 + mix_video/src/mixvideothread.h | 45 + mix_video/src/test.c | 87 - mix_video/src/test.cpp | 87 + mix_video/test/src/test_framemanager.c | 200 -- mix_video/test/src/test_framemanager.cpp | 200 ++ 203 files changed, 22148 insertions(+), 39991 deletions(-) delete mode 100644 mix_audio/AUTHORS delete mode 100644 mix_audio/COPYING delete mode 100644 mix_audio/ChangeLog delete mode 100644 mix_audio/INSTALL delete mode 100644 mix_audio/Makefile.am delete mode 100644 mix_audio/NEWS delete mode 100644 mix_audio/README delete mode 100644 mix_audio/autogen.sh delete mode 100644 mix_audio/configure.ac delete mode 100644 mix_audio/docs/Makefile.am delete mode 100644 mix_audio/docs/reference/Makefile.am delete mode 100644 mix_audio/docs/reference/MixAudio/Makefile.am delete mode 100644 mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml delete mode 100644 mix_audio/docs/reference/MixAudio/MixAudio-sections.txt delete mode 100644 mix_audio/docs/reference/MixAudio/MixAudio.types delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 delete mode 100644 mix_audio/docs/reference/MixAudio/html/MixAudio.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/api-index-full.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/ch01.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/index.html delete mode 100644 mix_audio/docs/reference/MixAudio/html/index.sgml delete mode 100644 mix_audio/docs/reference/MixAudio/html/style.css delete mode 100644 mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html delete mode 100644 mix_audio/m4/Makefile.am delete mode 100644 mix_audio/m4/as-mix-version.m4 delete mode 100644 mix_audio/mixaudio.spec delete mode 100644 mix_audio/pkgconfig/Makefile.am delete mode 100644 mix_audio/pkgconfig/mixaudio.pc.in delete mode 100644 mix_audio/src/Android.mk delete mode 100644 mix_audio/src/Makefile.am delete mode 100644 mix_audio/src/amhelper.c delete mode 100644 mix_audio/src/amhelper.h delete mode 100644 mix_audio/src/intel_sst_ioctl.h delete mode 100644 mix_audio/src/mixacp.c delete mode 100644 mix_audio/src/mixacp.h delete mode 100644 mix_audio/src/mixacpaac.c delete mode 100644 mix_audio/src/mixacpaac.h delete mode 100644 mix_audio/src/mixacpmp3.c delete mode 100644 mix_audio/src/mixacpmp3.h delete mode 100644 mix_audio/src/mixacpwma.c delete mode 100644 mix_audio/src/mixacpwma.h delete mode 100644 mix_audio/src/mixaip.c delete mode 100644 mix_audio/src/mixaip.h delete mode 100644 mix_audio/src/mixaudio.c delete mode 100644 mix_audio/src/mixaudio.h delete mode 100644 mix_audio/src/mixaudiotypes.h delete mode 100644 mix_audio/src/pvt.h delete mode 100644 mix_audio/src/sst_proxy.c delete mode 100644 mix_audio/src/sst_proxy.h delete mode 100644 mix_audio/tests/Makefile.am delete mode 100644 mix_audio/tests/smoke/Makefile.am delete mode 100644 mix_audio/tests/smoke/mixaudiosmoke.c delete mode 100644 mix_common/src/mixdrmparams.c create mode 100644 mix_common/src/mixdrmparams.cpp delete mode 100644 mix_common/src/mixlog.c create mode 100644 mix_common/src/mixlog.cpp delete mode 100644 mix_common/src/mixparams.c create mode 100644 mix_common/src/mixparams.cpp delete mode 100644 mix_video/src/mixbuffer.c create mode 100644 mix_video/src/mixbuffer.cpp delete mode 100644 mix_video/src/mixbufferpool.c create mode 100644 mix_video/src/mixbufferpool.cpp delete mode 100644 mix_video/src/mixdisplay.c create mode 100644 mix_video/src/mixdisplay.cpp delete mode 100644 mix_video/src/mixdisplayandroid.c create mode 100644 mix_video/src/mixdisplayandroid.cpp delete mode 100644 mix_video/src/mixdisplayx11.c create mode 100644 mix_video/src/mixdisplayx11.cpp delete mode 100644 mix_video/src/mixframemanager.c create mode 100644 mix_video/src/mixframemanager.cpp delete mode 100644 mix_video/src/mixsurfacepool.c create mode 100644 mix_video/src/mixsurfacepool.cpp delete mode 100644 mix_video/src/mixvideo.c create mode 100644 mix_video/src/mixvideo.cpp delete mode 100644 mix_video/src/mixvideocaps.c create mode 100644 mix_video/src/mixvideocaps.cpp delete mode 100644 mix_video/src/mixvideoconfigparams.c create mode 100644 mix_video/src/mixvideoconfigparams.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec.c create mode 100644 mix_video/src/mixvideoconfigparamsdec.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec_h264.c create mode 100644 mix_video/src/mixvideoconfigparamsdec_h264.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec_mp42.c create mode 100644 mix_video/src/mixvideoconfigparamsdec_mp42.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec_vc1.c create mode 100644 mix_video/src/mixvideoconfigparamsdec_vc1.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc.c create mode 100644 mix_video/src/mixvideoconfigparamsenc.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_h263.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_h263.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_h264.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_h264.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_mpeg4.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_preview.c create mode 100644 mix_video/src/mixvideoconfigparamsenc_preview.cpp delete mode 100644 mix_video/src/mixvideodecodeparams.c create mode 100644 mix_video/src/mixvideodecodeparams.cpp delete mode 100644 mix_video/src/mixvideoencodeparams.c create mode 100644 mix_video/src/mixvideoencodeparams.cpp delete mode 100644 mix_video/src/mixvideoformat.c create mode 100644 mix_video/src/mixvideoformat.cpp delete mode 100644 mix_video/src/mixvideoformat_h264.c create mode 100644 mix_video/src/mixvideoformat_h264.cpp delete mode 100644 mix_video/src/mixvideoformat_mp42.c create mode 100644 mix_video/src/mixvideoformat_mp42.cpp delete mode 100644 mix_video/src/mixvideoformat_vc1.c create mode 100644 mix_video/src/mixvideoformat_vc1.cpp delete mode 100644 mix_video/src/mixvideoformatenc.c create mode 100644 mix_video/src/mixvideoformatenc.cpp delete mode 100644 mix_video/src/mixvideoformatenc_h263.c create mode 100644 mix_video/src/mixvideoformatenc_h263.cpp delete mode 100644 mix_video/src/mixvideoformatenc_h264.c create mode 100644 mix_video/src/mixvideoformatenc_h264.cpp delete mode 100644 mix_video/src/mixvideoformatenc_mpeg4.c create mode 100644 mix_video/src/mixvideoformatenc_mpeg4.cpp delete mode 100644 mix_video/src/mixvideoformatenc_preview.c create mode 100644 mix_video/src/mixvideoformatenc_preview.cpp delete mode 100644 mix_video/src/mixvideoframe.c create mode 100644 mix_video/src/mixvideoframe.cpp delete mode 100644 mix_video/src/mixvideoinitparams.c create mode 100644 mix_video/src/mixvideoinitparams.cpp delete mode 100644 mix_video/src/mixvideorenderparams.c create mode 100644 mix_video/src/mixvideorenderparams.cpp create mode 100644 mix_video/src/mixvideothread.cpp create mode 100644 mix_video/src/mixvideothread.h delete mode 100644 mix_video/src/test.c create mode 100644 mix_video/src/test.cpp delete mode 100644 mix_video/test/src/test_framemanager.c create mode 100644 mix_video/test/src/test_framemanager.cpp diff --git a/Android.mk b/Android.mk index 3e0347f..3b5ef37 100644 --- a/Android.mk +++ b/Android.mk @@ -6,6 +6,6 @@ VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) GLIB_TOP := hardware/intel/glib include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_common/src/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_audio/src/Android.mk +#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_audio/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_video/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk diff --git a/mix_audio/AUTHORS b/mix_audio/AUTHORS deleted file mode 100644 index d74d027..0000000 --- a/mix_audio/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -echo.choi@intel.com diff --git a/mix_audio/COPYING b/mix_audio/COPYING deleted file mode 100644 index a4f852c..0000000 --- a/mix_audio/COPYING +++ /dev/null @@ -1,26 +0,0 @@ -INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License) - -IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING. -Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software. - - -LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions: -1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software. -2. You may not reverse engineer, decompile, or disassemble the Software. -3. You may not sublicense the Software. -4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions. -5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL). -OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights. -EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software. -LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS. -TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate. -APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations. -GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052. -CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos. -ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion. -ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel. -NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties. -SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions. -WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself. -CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions. - diff --git a/mix_audio/ChangeLog b/mix_audio/ChangeLog deleted file mode 100644 index 00fa650..0000000 --- a/mix_audio/ChangeLog +++ /dev/null @@ -1,139 +0,0 @@ -2010-01-31 Echo Choi - - * Reverted to use num_chan as output number of channel for AAC. - -2010-01-29 Echo Choi - - * Fixed Audio Manager setting. - * Updated version to 0.4.1 since API changes since 0.3.5. - -2010-01-25 Echo Choi - - * Updated MixCommon dependency to 0.1.8. - * Updated version to 0.3.6. - -2010-01-24 Echo Choi - - * Sync MixIOVec between capture and decode. - -2010-01-22 Echo Choi - - * Updated MixIOVec definition. - * Updated API sync with 0.79 doc. - -2010-01-20 Echo Choi - - * Updated API doc 0.79 sync up. - -2010-01-18 Echo Choi - - * Updated version to 0.3.5 and submit for build. - * Updated call to Audio Manager to use stream name. - * Removed the check to allow decode to be called during PAUSE. - -2010-01-11 Echo Choi - - * Updated version to 0.3.4 - * Updated MixCommon dependency to v 0.1.6. - * Updated the parameter conversion code for AAC to detect codec value from parameters. - * Fixed and added more enum types for AAC parameters definitions. - * Added methods to replace AAC parameters direct accessing. - * Added psPresentFlag for AAC param object. - * Updated gtk-doc documentation. - * Added get_stream_byte_decoded API. - -2010-01-04 Echo Choi - - * Fixed code review issues: declare const for char* - * Fixed code review issues: array size calculation. - -2009-12-23 Echo Choi - - * Added aac core operating frequency param for AAC Param object. Needed to configure HE-AAC decoder. - * Fixed the log message category for DRAIN debug log. - -2009-11-19 Echo Choi - - * Added more utility function to populate param object. - * Added MixAudio API to read output configuration (get params) - -2009-11-18 Echo Choi - - * Added return code that inform caller to interpret errno for error. - * Fixed more error checkings. - -2009-11-17 Echo Choi - - * Added default invalid value for various enumerations. - * Fixed some bugs in type declarations. - * Cleaned up code. Added pointer checks, state checks. - -2009-11-15 Echo Choi - - * Updated version to 0.3.3 and package for build. - * Fixed DRAIN state test condition. - -2009-11-13 Echo Choi - - * Updated MixCommon version dependency as MixAudio is using new definitions from MixCommon. - * Fixed issues reported by klocwork. - -2009-11-11 Echo Choi - - * Fixed a mem leak in the stub code. - -2009-11-01 Echo Choi - - * Increased version number to 0.3.2 and package for build. - -2009-10-28 Echo Choi - - * Renamed MPEG_FORMAT member of AAC params to MPEG_ID. - -2009-10-23 Echo Choi - - * Updated version to 0.3.1 for build. - * Added code to assign op_align to sst structure in deterministic case. - * Added stub code to write input bytes to file during dnr request. - * Fixed MixAudio::decode() method to use correct 64-bit type for decode consumed/produced. - -2009-10-18 Echo Choi - - * Added dbus-glib dependency. - * Updated AAC param object to include additonal fields for HE-AAC support. - -2009-10-16 Echo Choi - - * Moved mixdrmparams.* to MixCommon package. - * Changed mix_audio_decode API to include output parameters for bytes consumed and produceds - * Updated version to 0.3.0 to reflect API change in mix_audio_decode. - -2009-10-08 Echo Choi - - * Package for 0.2.6 build. - -2009-10-02 Echo Choi - - * Updated version number to 0.2.6 - * Defined new range for error code that encapsulate errno when system calls to SST API shall fail. - * Added internal states to track PAUSED_DRAINING, and added code to deal with this state. - -2009-08-17 Echo Choi - - * Updated SST API struct to align with build 0.04.008. - * Added bit-mask based runtime log mechanism. - -2009-08-14 Echo Choi - - * Fixed return value check after DROP call. - * Added method to dump status upon SST call failure. - -2009-08-13 Echo Choi - - * Updated API definitions to sync with v0.5 documentation. - -2009-08-10 Echo Choi - - * Fixed stop_drop so it is called even if the state is STOPPED - - diff --git a/mix_audio/INSTALL b/mix_audio/INSTALL deleted file mode 100644 index 50e1648..0000000 --- a/mix_audio/INSTALL +++ /dev/null @@ -1,4 +0,0 @@ -run the following to build and install: -./autogen.sh -./configure -make diff --git a/mix_audio/Makefile.am b/mix_audio/Makefile.am deleted file mode 100644 index 2ed4bcd..0000000 --- a/mix_audio/Makefile.am +++ /dev/null @@ -1,7 +0,0 @@ -SUBDIRS = src tests pkgconfig - -#Uncomment the following line if building documentation using gtkdoc -#SUBDIRS += docs - -EXTRA_DIST = autogen.sh m4 -DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc diff --git a/mix_audio/NEWS b/mix_audio/NEWS deleted file mode 100644 index 82302b4..0000000 --- a/mix_audio/NEWS +++ /dev/null @@ -1 +0,0 @@ -no. diff --git a/mix_audio/README b/mix_audio/README deleted file mode 100644 index b4292a0..0000000 --- a/mix_audio/README +++ /dev/null @@ -1,2 +0,0 @@ -MIX Audio is an user library interface for various hardware audio codecs available on the platform. - diff --git a/mix_audio/autogen.sh b/mix_audio/autogen.sh deleted file mode 100644 index 13a1d76..0000000 --- a/mix_audio/autogen.sh +++ /dev/null @@ -1,19 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -package=MixAudio - -#Uncomment the follow line if building documentation using gtkdoc -#gtkdocize --flavour no-tmpl || exit 1 -aclocal -I m4/ $ACLOCAL_FLAGS || exit 1 -libtoolize --copy --force || exit 1 -autoheader -v || exit 1 -autoconf -v || exit 1 -automake -a -c -v || exit 1 - -echo "Now type ./configure to configure $package." -exit 0 diff --git a/mix_audio/configure.ac b/mix_audio/configure.ac deleted file mode 100644 index 01c84a9..0000000 --- a/mix_audio/configure.ac +++ /dev/null @@ -1,137 +0,0 @@ -AC_INIT("","",[echo.choi@intel.com]) - -AC_CONFIG_MACRO_DIR(m4) - -AS_MIX_VERSION(mixaudio, MIXAUDIO, 0, 4, 1) - -dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode -AM_MAINTAINER_MODE - -AM_INIT_AUTOMAKE($PACKAGE, $VERSION) - -MIXAUDIO_PKG_DEPS="glib-2.0, gobject-2.0, gthread-2.0 mixcommon" -AC_SUBST(MIXAUDIO_PKG_DEPS) - -dnl make aclocal work in maintainer mode -AC_SUBST(ACLOCAL_AMFLAGS, "-I m4") - -AM_CONFIG_HEADER(config.h) - -dnl check for tools -AC_PROG_CC_C_O -AC_PROG_CC -AC_PROG_LIBTOOL - -MIX_CFLAGS="-Wall -Werror -O" - -AC_ARG_ENABLE([lpestub], - [ --enable-lpestub Stub LPE methods], - [case "${enableval}" in - yes) lpestub=true ;; - no) lpestub=false ;; - *) AC_MSG_ERROR([bad value ${enableval} for --enable-lpestub]) ;; - esac],[lpestub=false]) - -AM_CONDITIONAL([LPESTUB], [test x$lpestub = xtrue]) - -AC_ARG_ENABLE([workaround], - [ --enable-workaround Enable workaround for LPE DROP], - [case "${enableval}" in - yes) workaround=true ;; - no) workaround=false ;; - *) AC_MSG_ERROR([bad value ${enableval} for --enable-workaround]) ;; - esac],[workaround=false]) - -AM_CONDITIONAL([WORKAROUND], [test x$workaround = xtrue]) - -AC_ARG_ENABLE([audiomanager], - [ --enable-audiomanager Audio Manager methods(default=enable)], - [case "${enableval}" in - yes) audiomanager=true ;; - no) audiomanager=false ;; - *) AC_MSG_ERROR([bad value ${enableval} for --enable-audiomanager]) ;; - esac],[audiomanager=true]) - -AM_CONDITIONAL([AUDIO_MANAGER], [test x$audiomanager = xtrue]) - -dnl decide on error flags -dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR") -dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR") - -dnl Check for pkgconfig first -AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no) - -dnl Give error and exit if we don't have pkgconfig -if test "x$HAVE_PKGCONFIG" = "xno"; then - AC_MSG_ERROR(you need to have pkgconfig installed !) -fi - -dnl GLib -dnl FIXME: need to align with moblin glib version -dnl FIXME: currently using an earlier version so it can be built on dev box. -GLIB_REQ=2.18 - -dnl Check for glib2 without extra fat, useful for the unversioned tool frontends -dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -if test "x$HAVE_GLIB" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) -if test "x$HAVE_GOBJECT" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no) -if test "x$HAVE_GTRHEAD" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -MIXCOMMON_REQUIRED=0.1.8 -PKG_CHECK_MODULES(MIXCOMMON, mixcommon >= $MIXCOMMON_REQUIRED , HAVE_MIXCOMMON=yes, HAVE_MIXCOMMON=no) -if test "x$HAVE_MIXCOMMON" = "xno"; then - AC_MSG_ERROR(You need mixcommon development package $MIXCOMMON_REQUIRED installed !) -fi - -if test "x$audiomanager" = "xtrue"; then - PKG_CHECK_MODULES(DBUS_GLIB, dbus-glib-1) -fi - -dnl Check for documentation xrefs -dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`" -dnl AC_SUBST(GLIB_PREFIX) - -AC_SUBST(GLIB_CFLAGS) -AC_SUBST(GLIB_LIBS) -AC_SUBST(GOBJECT_CFLAGS) -AC_SUBST(GOBJECT_LIBS) -AC_SUBST(MIX_CFLAGS) -AC_SUBST(GTHREAD_CFLAGS) -AC_SUBST(GTHREAD_LIBS) -AC_SUBST(MIXCOMMON_CFLAGS) -AC_SUBST(MIXCOMMON_LIBS) - -dnl check for gtk-doc -dnl GTK_DOC_CHECK(1.9) - -AC_CONFIG_FILES( -Makefile -src/Makefile -tests/Makefile -tests/smoke/Makefile -pkgconfig/Makefile -pkgconfig/mixaudio.pc -) - -dnl Additional Makefiles if we are building document with gtkdoc. -dnl Un-comment this section to enable building of documentation. -dnl AC_CONFIG_FILES( -dnl docs/Makefile -dnl docs/reference/Makefile -dnl docs/reference/MixAudio/Makefile -dnl ) - -AC_OUTPUT - - diff --git a/mix_audio/docs/Makefile.am b/mix_audio/docs/Makefile.am deleted file mode 100644 index 621e3f7..0000000 --- a/mix_audio/docs/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -SUBDIRS = reference - -DIST_SUBDIRS = reference - diff --git a/mix_audio/docs/reference/Makefile.am b/mix_audio/docs/reference/Makefile.am deleted file mode 100644 index 85bde95..0000000 --- a/mix_audio/docs/reference/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -SUBDIRS = MixAudio - -DIST_SUBDIRS = MixAudio - diff --git a/mix_audio/docs/reference/MixAudio/Makefile.am b/mix_audio/docs/reference/MixAudio/Makefile.am deleted file mode 100644 index adf494c..0000000 --- a/mix_audio/docs/reference/MixAudio/Makefile.am +++ /dev/null @@ -1,96 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - - -## Process this file with automake to produce Makefile.in - -# We require automake 1.6 at least. -AUTOMAKE_OPTIONS = 1.6 - -# This is a blank Makefile.am for using gtk-doc. -# Copy this to your project's API docs directory and modify the variables to -# suit your project. See the GTK+ Makefiles in gtk+/docs/reference for examples -# of using the various options. - -# The name of the module, e.g. 'glib'. -DOC_MODULE=MixAudio - -# The top-level SGML file. You can change this if you want to. -DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml - -# The directory containing the source code. Relative to $(srcdir). -# gtk-doc will search all .c & .h files beneath here for inline comments -# documenting the functions and macros. -# e.g. DOC_SOURCE_DIR=../../../gtk -DOC_SOURCE_DIR=$(top_srcdir)/src - -# Extra options to pass to gtkdoc-scangobj. Not normally needed. -#SCANGOBJ_OPTIONS=--type-init-func="g_init(NULL,NULL)" - -# Extra options to supply to gtkdoc-scan. -# e.g. SCAN_OPTIONS=--deprecated-guards="GTK_DISABLE_DEPRECATED" -SCAN_OPTIONS=--rebuild-sections --rebuild-types -#SCAN_OPTIONS=--rebuild-sections - -# Extra options to supply to gtkdoc-mkdb. -# e.g. MKDB_OPTIONS=--sgml-mode --output-format=xml -MKDB_OPTIONS=--sgml-mode --output-format=xml - -# Extra options to supply to gtkdoc-mktmpl -# e.g. MKTMPL_OPTIONS=--only-section-tmpl -MKTMPL_OPTIONS= - -# Extra options to supply to gtkdoc-fixref. Not normally needed. -# e.g. FIXXREF_OPTIONS=--extra-dir=../gdk-pixbuf/html --extra-dir=../gdk/html -FIXXREF_OPTIONS= - -# Used for dependencies. The docs will be rebuilt if any of these change. -# e.g. HFILE_GLOB=$(top_srcdir)/gtk/*.h -# e.g. CFILE_GLOB=$(top_srcdir)/gtk/*.c -HFILE_GLOB=$(top_srcdir)/src/*.h -CFILE_GLOB=$(top_srcdir)/src/*.c - -# Header files to ignore when scanning. -# e.g. IGNORE_HFILES=gtkdebug.h gtkintl.h -IGNORE_HFILES=*~ intel_sst_ioctl.h pvt.h sst_proxy.h amhelper.h - -# Images to copy into HTML directory. -# e.g. HTML_IMAGES=$(top_srcdir)/gtk/stock-icons/stock_about_24.png -HTML_IMAGES= - -# Extra SGML files that are included by $(DOC_MAIN_SGML_FILE). -# e.g. content_files=running.sgml building.sgml changes-2.0.sgml -content_files= - -# SGML files where gtk-doc abbrevations (#GtkWidget) are expanded -# These files must be listed here *and* in content_files -# e.g. expand_content_files=running.sgml -expand_content_files= - -# CFLAGS and LDFLAGS for compiling gtkdoc-scangobj with your library. -# Only needed if you are using gtkdoc-scangobj to dynamically query widget -# signals and properties. -# e.g. INCLUDES=-I$(top_srcdir) -I$(top_builddir) $(GTK_DEBUG_FLAGS) -# e.g. GTKDOC_LIBS=$(top_builddir)/gtk/$(gtktargetlib) -AM_CFLAGS=$(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS) -GTKDOC_LIBS=$(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS) $(top_srcdir)/src/libmixaudio.la - -# This includes the standard gtk-doc make rules, copied by gtkdocize. -include $(top_srcdir)/gtk-doc.make - -# Other files to distribute -# e.g. EXTRA_DIST += version.xml.in -EXTRA_DIST += - -# Files not to distribute -# for --rebuild-types in $(SCAN_OPTIONS), e.g. $(DOC_MODULE).types -# for --rebuild-sections in $(SCAN_OPTIONS) e.g. $(DOC_MODULE)-sections.txt -#DISTCLEANFILES = - -# Comment this out if you want your docs-status tested during 'make check' -#TESTS = $(GTKDOC_CHECK) - diff --git a/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml b/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml deleted file mode 100644 index 7627fe9..0000000 --- a/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - MixAudio Reference Manual - - MixAudio version 0.3 - - - - - - Mix Audio API - - - - - - - - - - - - - - Object Hierarchy - - - - - API Index - - - - diff --git a/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt b/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt deleted file mode 100644 index d96a685..0000000 --- a/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt +++ /dev/null @@ -1,187 +0,0 @@ -
-mixacpwma -MixAudioWMAVersion -MixAudioConfigParamsWMA -MixAudioConfigParamsWMA -mix_acp_wma_new -mix_acp_wma_ref -mix_acp_wma_unref -MIX_ACP_WMA_CHANNEL_MASK -MIX_ACP_WMA_FORMAT_TAG -MIX_ACP_WMA_BLOCK_ALIGN -MIX_ACP_WMA_ENCODE_OPT -MIX_ACP_WMA_PCM_BIT_WIDTH -mix_acp_wma_get_version -mix_acp_wma_set_version - -MIX_AUDIOCONFIGPARAMSWMA -MIX_IS_AUDIOCONFIGPARAMSWMA -MIX_TYPE_AUDIOCONFIGPARAMSWMA -mix_acp_wma_get_type -MIX_AUDIOCONFIGPARAMSWMA_CLASS -MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS -MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS -
- -
-mixacp -MixACPOpAlign -MixACPBPSType -MixDecodeMode -MixAudioConfigParams -MixAudioConfigParams -mix_acp_new -mix_acp_ref -mix_acp_unref -MIX_ACP_DECODEMODE -MIX_ACP_NUM_CHANNELS -MIX_ACP_BITRATE -MIX_ACP_SAMPLE_FREQ -mix_acp_get_decodemode -mix_acp_set_decodemode -mix_acp_get_streamname -mix_acp_set_streamname -mix_acp_set_audio_manager -mix_acp_get_audio_manager -mix_acp_is_streamname_valid -mix_acp_get_bps -mix_acp_set_bps -mix_acp_get_op_align -mix_acp_set_op_align - -MIX_AUDIOCONFIGPARAMS -MIX_IS_AUDIOCONFIGPARAMS -MIX_TYPE_AUDIOCONFIGPARAMS -mix_acp_get_type -MIX_AUDIOCONFIGPARAMS_CLASS -MIX_IS_AUDIOCONFIGPARAMS_CLASS -MIX_AUDIOCONFIGPARAMS_GET_CLASS -
- -
-mixacpaac -MixAACBitrateType -MixAACBitstreamFormt -MixAACProfile -MixAACMpegID -MixAudioConfigParamsAAC -MixAudioConfigParamsAAC -mix_acp_aac_new -mix_acp_aac_ref -mix_acp_aac_unref -mix_acp_aac_set_mpeg_id -mix_acp_aac_get_mpeg_id -MIX_ACP_AAC_CRC -mix_acp_aac_set_aot -mix_acp_aac_get_aot -MIX_ACP_AAC_SBR_FLAG -MIX_ACP_AAC_PS_FLAG -MIX_ACP_AAC_PCE_FLAG -MIX_ACP_AAC_SAMPLE_RATE -MIX_ACP_AAC_CHANNELS -mix_acp_aac_get_bit_stream_format -mix_acp_aac_set_bit_stream_format -mix_acp_aac_get_aac_profile -mix_acp_aac_set_aac_profile -mix_acp_aac_get_bit_rate_type -mix_acp_aac_set_bit_rate_type - -MIX_AUDIOCONFIGPARAMSAAC -MIX_IS_AUDIOCONFIGPARAMSAAC -MIX_TYPE_AUDIOCONFIGPARAMSAAC -mix_acp_aac_get_type -MIX_AUDIOCONFIGPARAMSAAC_CLASS -MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS -MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS -
- -
-mixaudio -MixStreamState -MixState -MixCodecMode -MixVolType -MixVolRamp -MixIOVec -MixDeviceState -MixAudio -MixAudio -mix_audio_new -mix_audio_ref -mix_audio_unref -mix_audio_get_version -mix_audio_initialize -mix_audio_configure -mix_audio_decode -mix_audio_capture_encode -mix_audio_start -mix_audio_stop_drop -mix_audio_stop_drain -mix_audio_pause -mix_audio_resume -mix_audio_get_timestamp -mix_audio_set_mute -mix_audio_get_mute -mix_audio_get_max_vol -mix_audio_get_min_vol -mix_audio_get_volume -mix_audio_set_volume -mix_audio_deinitialize -mix_audio_get_stream_state -mix_audio_get_state -mix_audio_am_is_enabled -mix_audio_is_am_available -mix_audio_get_output_configuration -mix_audio_get_stream_byte_decoded - -MIX_AUDIO -MIX_IS_AUDIO -MIX_TYPE_AUDIO -mix_audio_get_type -MIX_AUDIO_CLASS -MIX_IS_AUDIO_CLASS -MIX_AUDIO_GET_CLASS -
- -
-mixaip -MixAudioInitParams -MixAudioInitParams -mix_aip_new -mix_aip_ref -mix_aip_unref - -MIX_AUDIOINITPARAMS -MIX_IS_AUDIOINITPARAMS -MIX_TYPE_AUDIOINITPARAMS -mix_aip_get_type -MIX_AUDIOINITPARAMS_CLASS -MIX_IS_AUDIOINITPARAMS_CLASS -MIX_AUDIOINITPARAMS_GET_CLASS -
- -
-mixacpmp3 -MixAudioConfigParamsMP3 -MixAudioConfigParamsMP3 -mix_acp_mp3_new -mix_acp_mp3_ref -mix_acp_mp3_unref -MIX_ACP_MP3_CRC -MIX_ACP_MP3_MPEG_FORMAT -MIX_ACP_MP3_MPEG_LAYER - -MIX_AUDIOCONFIGPARAMSMP3 -MIX_IS_AUDIOCONFIGPARAMSMP3 -MIX_TYPE_AUDIOCONFIGPARAMSMP3 -mix_acp_mp3_get_type -MIX_AUDIOCONFIGPARAMSMP3_CLASS -MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS -MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS -
- -
-mixaudiotypes -MixAudioManager -
- diff --git a/mix_audio/docs/reference/MixAudio/MixAudio.types b/mix_audio/docs/reference/MixAudio/MixAudio.types deleted file mode 100644 index 0a80168..0000000 --- a/mix_audio/docs/reference/MixAudio/MixAudio.types +++ /dev/null @@ -1,6 +0,0 @@ -mix_acp_wma_get_type -mix_acp_get_type -mix_acp_aac_get_type -mix_audio_get_type -mix_aip_get_type -mix_acp_mp3_get_type diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html deleted file mode 100644 index 1dd3b14..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html +++ /dev/null @@ -1,689 +0,0 @@ - - - - -MixAudioConfigParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixAudioConfigParams

-

MixAudioConfigParams — MixAudio configuration parameters object.

-
- -
-

Description

-

-MixAudio configuration parameters object which is used to communicate audio specific parameters. -

-

-This object is should not be instantiated as codec specific parameters are definied in individual derive classes.

-
-
-

Details

-
-

enum MixACPOpAlign

-
typedef enum {
-  MIX_ACP_OUTPUT_ALIGN_UNKNOWN=-1,
-  MIX_ACP_OUTPUT_ALIGN_16=0,
-  MIX_ACP_OUTPUT_ALIGN_MSB,
-  MIX_ACP_OUTPUT_ALIGN_LSB,
-  MIX_ACP_OUTPUT_ALIGN_LAST
-} MixACPOpAlign;
-
-

-Audio Output alignment.

-
-- - - - - - - - - - - - - - - - - - - - - - -

MIX_ACP_OUTPUT_ALIGN_UNKNOWN

Output alignment undefined. -

MIX_ACP_OUTPUT_ALIGN_16

MIX_ACP_OUTPUT_ALIGN_MSB

Output word is MSB aligned -

MIX_ACP_OUTPUT_ALIGN_LSB

Output word is LSB aligned -

MIX_ACP_OUTPUT_ALIGN_LAST

Last entry in list. -
-
-
-
-

enum MixACPBPSType

-
typedef enum {
-  MIX_ACP_BPS_UNKNOWN=0,
-  MIX_ACP_BPS_16=16,
-  MIX_ACP_BPS_24=24,
-} MixACPBPSType;
-
-

-Audio Output Size in bits per sample.

-
-- - - - - - - - - - - - - - -

MIX_ACP_BPS_UNKNOWN

Bit Per Sample undefined. -

MIX_ACP_BPS_16

Output bits per sample is 16 bits -

MIX_ACP_BPS_24

Output bits per sample is 24 bits -
-
-
-
-

enum MixDecodeMode

-
typedef enum {
-  MIX_DECODE_NULL=0,
-  MIX_DECODE_DIRECTRENDER,
-  MIX_DECODE_DECODERETURN,
-  MIX_DECODE_LAST
-} MixDecodeMode;
-
-

-Operation Mode for a MI-X session. See mix_audio_configure().

-
-- - - - - - - - - - - - - - - - - - -

MIX_DECODE_NULL

Undefined decode mode. -

MIX_DECODE_DIRECTRENDER

Stream is configured in Direct Render mode -

MIX_DECODE_DECODERETURN

Stream is configured in Decode Return mode -

MIX_DECODE_LAST

Last index in the enumeration. -
-
-
-
-

MixAudioConfigParams

-
typedef struct {
-  MixParams parent;
-
-  /* Audio Session Parameters */
-  MixDecodeMode decode_mode;
-  gchar *stream_name;
-  MixAudioManager audio_manager;
-
-  /* Audio Format Parameters */
-  gint num_channels;
-  gint bit_rate;
-  gint sample_freq;
-  MixACPBPSType bits_per_sample;
-  MixACPOpAlign op_align;
-} MixAudioConfigParams;
-
-

-MixAudio configuration parameters object.

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

MixParams parent;

parent. -

MixDecodeMode decode_mode;

Decode Mode to use for current session. See mix_acp_set_decodemode -

gchar *stream_name;

Stream name. See mix_acp_set_streamname. This object will release the string upon destruction. -

MixAudioManager audio_manager;

Type of Audio Manager. See mix_acp_set_audio_manager. -

gint num_channels;

Number of output channels. See MIX_ACP_NUM_CHANNELS -

gint bit_rate;

Optional. See MIX_ACP_BITRATE -

gint sample_freq;

Output frequency. See MIX_ACP_SAMPLE_FREQ -

MixACPBPSType bits_per_sample;

Number of output bit per sample. See mix_acp_set_bps -

MixACPOpAlign op_align;

Output Byte Alignment. See mix_acp_set_op_align -
-
-
-
-

mix_acp_new ()

-
MixAudioConfigParams * mix_acp_new                      (void);
-

-Use this method to create new instance of MixAudioConfigParams

-
-- - - - -

returns :

A newly allocated instance of MixAudioConfigParams -
-
-
-
-

mix_acp_ref ()

-
MixAudioConfigParams * mix_acp_ref                      (MixAudioConfigParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixAudioConfigParams instance where reference count has been increased. -
-
-
-
-

mix_acp_unref()

-
#define mix_acp_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

MIX_ACP_DECODEMODE()

-
#define MIX_ACP_DECODEMODE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->decode_mode)
-
-

-MixAudioConfigParam.decode_mode accessor. -

-

-Configure the decode mode to one of MixDecodeMode value.

-
-- - - - -

obj :

MixAudioConfigParams object -
-
-
-
-

MIX_ACP_NUM_CHANNELS()

-
#define MIX_ACP_NUM_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMS(obj)->num_channels)
-
-

-MixAudioConfigParam.num_channels accessor. -

-

-Configure the number of output channels. This value need to be exact the same as the supported output channel in the audio since down-mixing is not supported. -

-

-This value can be used during MIX_DECODE_DECODERETURN mode for buffer size/duration calculation. -

-

-In Moorestown, number of channel must be 1 or 2.

-
-- - - - -

obj :

MixAudioConfigParams object -
-
-
-
-

MIX_ACP_BITRATE()

-
#define MIX_ACP_BITRATE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->bit_rate)
-
-

-MixAudioConfigParam.bit_rate accessor. -

-

-Bit rate of the current audio. -

-

-Optional

-
-- - - - -

obj :

MixAudioConfigParams object -
-
-
-
-

MIX_ACP_SAMPLE_FREQ()

-
#define MIX_ACP_SAMPLE_FREQ(obj) (MIX_AUDIOCONFIGPARAMS(obj)->sample_freq)
-
-

-MixAudioConfigParam.sample_freq accessor. -

-

-Output sampling frequency. -

-

-This value can be used during MIX_DECODE_DECODERETURN mode for buffer size/duration calculation.

-
-- - - - -

obj :

MixAudioConfigParams object -
-
-
-
-

mix_acp_get_decodemode ()

-
MixDecodeMode       mix_acp_get_decodemode              (MixAudioConfigParams *obj);
-

-Retrieve currently configured MixDecodeMode.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParams -

returns :

MixDecodeMode -
-
-
-
-

mix_acp_set_decodemode ()

-
MIX_RESULT          mix_acp_set_decodemode              (MixAudioConfigParams *obj,
-                                                         MixDecodeMode mode);
-

-Configure session for one of the MixDecodeMode.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParams -

mode :

MixDecodeMode to set -

returns :

MIX_RESULT -
-
-
-
-

mix_acp_get_streamname ()

-
gchar *             mix_acp_get_streamname              (MixAudioConfigParams *obj);
-

-Return copy of streamname. caller must free with g_free()

-
-- - - - - - - - - - -

obj :

MixAudioConfigParams -

returns :

pointer to a copy of the stream name. NULL if name is not available. -
-
-
-
-

mix_acp_set_streamname ()

-
MIX_RESULT          mix_acp_set_streamname              (MixAudioConfigParams *obj,
-                                                         const gchar *streamname);
-

-Set the stream name. The object will make a copy of the input stream name string.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParams -

streamname :

Stream name to set -

returns :

MIX_RESULT -
-
-
-
-

mix_acp_set_audio_manager ()

-
MIX_RESULT          mix_acp_set_audio_manager           (MixAudioConfigParams *obj,
-                                                         MixAudioManager am);
-

-Set the Audio Manager to one of the MixAudioManager.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParams -

am :

MixAudioManager -

returns :

MIX_RESULT -
-
-
-
-

mix_acp_get_audio_manager ()

-
MixAudioManager     mix_acp_get_audio_manager           (MixAudioConfigParams *obj);
-

-Retrieve name of currently configured audio manager.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParams -

returns :

MixAudioManager -
-
-
-
-

mix_acp_is_streamname_valid ()

-
gboolean            mix_acp_is_streamname_valid         (MixAudioConfigParams *obj);
-

-Check if stream name is valid considering the current Decode Mode.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParams -

returns :

boolean indicates if stream name is valid. -
-
-
-
-

mix_acp_get_bps ()

-
MixACPBPSType       mix_acp_get_bps                     (MixAudioConfigParams *obj);
-

-Retrive currently configured bit-per-stream value.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParams -

returns :

MixACPBPSType -
-
-
-
-

mix_acp_set_bps ()

-
MIX_RESULT          mix_acp_set_bps                     (MixAudioConfigParams *obj,
-                                                         MixACPBPSType type);
-

-Configure bit-per-stream of one of the supported MixACPBPSType.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParams -

mode :

MixACPBPSType to set -

returns :

MIX_RESULT -
-
-
-
-

mix_acp_get_op_align ()

-
MixACPOpAlign       mix_acp_get_op_align                (MixAudioConfigParams *obj);
-

-Get Output Alignment.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParams object -

returns :

MixACPOpAlign -
-
-
-
-

mix_acp_set_op_align ()

-
MIX_RESULT          mix_acp_set_op_align                (MixAudioConfigParams *obj,
-                                                         MixACPOpAlign op_align);
-

-Set Output Alignment to one of the MixACPOpAlign value.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParams object -

op_align :

One of the supported MixACPOpAlign -

returns :

MIX_RESULT -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html deleted file mode 100644 index 46e4e8e..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html +++ /dev/null @@ -1,823 +0,0 @@ - - - - -MixAudioConfigParamsAAC - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixAudioConfigParamsAAC

-

MixAudioConfigParamsAAC — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format.

-
- -
-

Description

-

-A data object which stores audio specific parameters for the following formats: -

-
    -
  • AAC-LC
  • -
  • HE-AAC v1
  • -
  • HE-AAC v2
  • -
-

-

-

-Additional parameters must be set in the parent object MixAudioConfigParams

-
-
-

Details

-
-

enum MixAACBitrateType

-
typedef enum {
-  MIX_AAC_BR_NULL=-1,
-  MIX_AAC_BR_CONSTANT=0,
-  MIX_AAC_BR_VARIABLE,
-  MIX_AAC_BR_LAST
-} MixAACBitrateType;
-
-

-Types of bitrate in AAC.

-
-- - - - - - - - - - - - - - - - - - -

MIX_AAC_BR_NULL

Undefined bit rate type. -

MIX_AAC_BR_CONSTANT

Constant bit rate. -

MIX_AAC_BR_VARIABLE

Variable bit rate. -

MIX_AAC_BR_LAST

last entry. -
-
-
-
-

enum MixAACBitstreamFormt

-
typedef enum {
-  MIX_AAC_BS_NULL=-1,
-  MIX_AAC_BS_ADTS=0,
-  MIX_AAC_BS_ADIF,
-  MIX_AAC_BS_RAW,
-  MIX_AAC_BS_LAST
-} MixAACBitstreamFormt;
-
-

-AAC bitstream format.

-
-- - - - - - - - - - - - - - - - - - - - - - -

MIX_AAC_BS_NULL

Undefined bitstream format. -

MIX_AAC_BS_ADTS

Bitstream is in ADTS format. -

MIX_AAC_BS_ADIF

Bitstream is in ADIF format. -

MIX_AAC_BS_RAW

Bitstream is in raw format. -

MIX_AAC_BS_LAST

Last entry. -
-
-
-
-

enum MixAACProfile

-
typedef enum {
-  MIX_AAC_PROFILE_NULL=-1,
-  MIX_AAC_PROFILE_MAIN=0,
-  MIX_AAC_PROFILE_LC,
-  MIX_AAC_PROFILE_SSR,
-  MIX_AAC_PROFILE_LAST
-} MixAACProfile;
-
-

-AAC profiles definitions.

-
-- - - - - - - - - - - - - - - - - - - - - - -

MIX_AAC_PROFILE_NULL

Undefined profile. -

MIX_AAC_PROFILE_MAIN

Not Supported AAC Main profile. -

MIX_AAC_PROFILE_LC

AAC-LC profile, including support of SBR and PS tool. -

MIX_AAC_PROFILE_SSR

Not Supported SSR profile. -

MIX_AAC_PROFILE_LAST

Last entry. -
-
-
-
-

enum MixAACMpegID

-
typedef enum {
-  MIX_AAC_MPEG_ID_NULL=-1,
-  MIX_AAC_MPEG_2_ID = 0,
-  MIX_AAC_MPEG_4_ID = 1,
-  MIX_AAC_MPEG_LAST
-} MixAACMpegID;
-
-

-AAC MPEG ID.

-
-- - - - - - - - - - - - - - - - - - -

MIX_AAC_MPEG_ID_NULL

Undefined MPEG ID. -

MIX_AAC_MPEG_2_ID

Indicate MPEG 2 Audio. -

MIX_AAC_MPEG_4_ID

Indicate MPEG 4 Audio. -

MIX_AAC_MPEG_LAST

last entry. -
-
-
-
-

MixAudioConfigParamsAAC

-
typedef struct {
-  MixAudioConfigParams parent;
-
-  /* Audio Format Parameters */
-  MixAACMpegID MPEG_id;
-  MixAACBitstreamFormt bit_stream_format;
-  MixAACProfile aac_profile;
-  guint aot;
-  guint aac_sample_rate;      
-  guint aac_channels;  
-  MixAACBitrateType bit_rate_type;
-  gboolean CRC;
-  guint sbrPresentFlag;
-  guint psPresentFlag;
-  gboolean pce_present;
-  gint8 syntc_id[2]; 
-  gint8 syntc_tag[2]; 
-  gint num_syntc_elems;
-} MixAudioConfigParamsAAC;
-
-

-MixAudio Parameter object

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

MixAudioConfigParams parent;

parent. -

MixAACMpegID MPEG_id;

MPEG ID. See mix_acp_aac_set_mpeg_id -

MixAACBitstreamFormt bit_stream_format;

Bitstream format. See mix_acp_aac_set_bit_stream_format. -

MixAACProfile aac_profile;

AAC profile. See mix_acp_aac_set_aac_profile. -

guint aot;

Audio object type. See mix_acp_aac_set_aot -

guint aac_sample_rate;

See MIX_ACP_AAC_SAMPLE_RATE macro. -

guint aac_channels;

See MIX_ACP_AAC_CHANNELS macro. -

MixAACBitrateType bit_rate_type;

Bitrate type. See mix_acp_aac_set_bit_rate_type -

gboolean CRC;

CRC check 0:disable, 1:enable. -

guint sbrPresentFlag;

See MIX_ACP_AAC_SBR_FLAG macro. -

guint psPresentFlag;

See MIX_ACP_AAC_PS_FLAG macro. -

gboolean pce_present;

Not Used. See MIX_ACP_AAC_PCE_FLAG -

gint8 syntc_id[2];

Not Used. 0 for ID_SCE(Dula Mono), -1 for raw. -

gint8 syntc_tag[2];

Not Used. -1 for raw. 0-16 for rest of the streams. -

gint num_syntc_elems;

Not Used. Number of syntatic elements. -
-
-
-
-

mix_acp_aac_new ()

-
MixAudioConfigParamsAAC * mix_acp_aac_new               (void);
-

-Use this method to create new instance of MixAudioConfigParamsAAC

-
-- - - - -

returns :

A newly allocated instance of MixAudioConfigParamsAAC -
-
-
-
-

mix_acp_aac_ref ()

-
MixAudioConfigParamsAAC * mix_acp_aac_ref               (MixAudioConfigParamsAAC *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixAudioConfigParamsAAC instance where reference count has been increased. -
-
-
-
-

mix_acp_aac_unref()

-
#define mix_acp_aac_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_acp_aac_set_mpeg_id ()

-
MIX_RESULT          mix_acp_aac_set_mpeg_id             (MixAudioConfigParamsAAC *obj,
-                                                         MixAACMpegID mpegid);
-

-Configure decoder to treat audio as MPEG 2 or MPEG 4.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

mpegid :

MPEG ID to set. -

return :

MIX_RESULT -
-
-
-
-

mix_acp_aac_get_mpeg_id ()

-
MixAACMpegID        mix_acp_aac_get_mpeg_id             (MixAudioConfigParamsAAC *obj);
-

-Retrieve currently configured mpeg id value.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParamsAAC object -

returns :

MPEG ID. -
-
-
-
-

MIX_ACP_AAC_CRC()

-
#define MIX_ACP_AAC_CRC(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->CRC)
-
-

-MixAudioConfigParamAAC.CRC accessor.

-
-- - - - -

obj :

MixAudioConfigParamsAAC object. -
-
-
-
-

mix_acp_aac_set_aot ()

-
MIX_RESULT          mix_acp_aac_set_aot                 (MixAudioConfigParamsAAC *obj,
-                                                         guint aot);
-

-Audio Object Type for the MPEG-4 audio stream. Valid value are: -

-

-2 - for AAC-LC -

-

-5 - for SBR -

-

-Method returns MIX_RESULT_NOT_SUPPORTED for not supported value.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

aot :

Audio Object Type. -
-
-
-
-

mix_acp_aac_get_aot ()

-
guint               mix_acp_aac_get_aot                 (MixAudioConfigParamsAAC *obj);
-

-To retrieve currently configured audio object type.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

aot :

Pointer to receive the Audio Object Type. -

return :

Currently configured audio object type. Or 0 if not yet specified. -
-
-
-
-

MIX_ACP_AAC_SBR_FLAG()

-
#define MIX_ACP_AAC_SBR_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->sbrPresentFlag)
-
-

-MixAudioConfigParamAAC.sbrPresentFlag accessor. -

-

-Applicable only when bit_stream_format==MIX_AAC_BS_RAW. Indicates whether SBR data is present. -

-

-0: Absent -

-

-1: Present -

-

--1 (0xffffffff): indicates implicit signalling.

-
-- - - - -

obj :

MixAudioConfigParamsAAC object -
-
-
-
-

MIX_ACP_AAC_PS_FLAG()

-
#define MIX_ACP_AAC_PS_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->psPresentFlag)
-
-

-MixAudioConfigParamAAC.psPresentFlag accessor. -

-

-Applicable only when bit_stream_format==MIX_AAC_BS_RAW. Indicates whether PS data is present. -

-

-0: Absent -

-

-1: Present -

-

--1 (0xffffffff): indicates implicit signalling.

-
-- - - - -

obj :

MixAudioConfigParamsAAC object -
-
-
-
-

MIX_ACP_AAC_PCE_FLAG()

-
#define MIX_ACP_AAC_PCE_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->pce_present)
-
-

-MixAudioConfigParamAAC.pce_present accessor. -

-

-Applicable only when bit_stream_format==MIX_AAC_BS_RAW. Indicates PCE data presence. -

-

-1:present -

-

-0:absent. -

-

-Not Used on Moorestown.

-
-- - - - -

obj :

MixAudioConfigParamsAAC object. -
-
-
-
-

MIX_ACP_AAC_SAMPLE_RATE()

-
#define MIX_ACP_AAC_SAMPLE_RATE(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_sample_rate)
-
-

-MixAudioConfigParamAAC.aac_sample_rate accessor. -

-

-Plain AAC decoder operating sample rate. Which could be different from the output sampling rate with HE AAC v1 and v2.

-
-- - - - -

obj :

MixAudioConfigParamsAAC object. -
-
-
-
-

MIX_ACP_AAC_CHANNELS()

-
#define MIX_ACP_AAC_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_channels)
-
-

-MixAudioConfigParamAAC.aac_channels accessor. -

-

-Indicates the number of output channels used by AAC decoder before SBR or PS tools are applied.

-
-- - - - -

obj :

MixAudioConfigParamsAAC -
-
-
-
-

mix_acp_aac_get_bit_stream_format ()

-
MixAACBitstreamFormt  mix_acp_aac_get_bit_stream_format (MixAudioConfigParamsAAC *obj);
-

-Return the bitstream format currently configured.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

returns :

MixAACBitstreamFormt -
-
-
-
-

mix_acp_aac_set_bit_stream_format ()

-
MIX_RESULT          mix_acp_aac_set_bit_stream_format   (MixAudioConfigParamsAAC *obj,
-                                                         MixAACBitstreamFormt bit_stream_format);
-

-Set the type of bitstream format as specified in MixAACBitstreamFormt.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

bit_stream_format :

Bit stream format. -

returns :

MIX_RESULT -
-
-
-
-

mix_acp_aac_get_aac_profile ()

-
MixAACProfile       mix_acp_aac_get_aac_profile         (MixAudioConfigParamsAAC *obj);
-

-Retrieve the AAC profile currently configured.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

returns :

MixAACProfile -
-
-
-
-

mix_acp_aac_set_aac_profile ()

-
MIX_RESULT          mix_acp_aac_set_aac_profile         (MixAudioConfigParamsAAC *obj,
-                                                         MixAACProfile aac_profile);
-

-Configure AAC profile for current session. -

-

-Only MIX_AAC_PROFILE_LC is supported in Moorestown.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

aac_profile :

AAC profile to set. -

returns :

MIX_RESULT -
-
-
-
-

mix_acp_aac_get_bit_rate_type ()

-
MixAACBitrateType   mix_acp_aac_get_bit_rate_type       (MixAudioConfigParamsAAC *obj);
-

-Retrieve the bit rate type currently configured.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

returns :

MixAACBitrateType -
-
-
-
-

mix_acp_aac_set_bit_rate_type ()

-
MIX_RESULT          mix_acp_aac_set_bit_rate_type       (MixAudioConfigParamsAAC *obj,
-                                                         MixAACBitrateType bit_rate_type);
-

-Set the bit rate type used.

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParamsAAC -

bit_rate_type :

Bit rate type to set. -

returns :

MIX_RESULT -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html deleted file mode 100644 index 8c97357..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html +++ /dev/null @@ -1,221 +0,0 @@ - - - - -MixAudioConfigParamsMP3 - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixAudioConfigParamsMP3

-

MixAudioConfigParamsMP3 — Audio configuration parameters for MP3 audio.

-
-
-

Synopsis

-
-
-#include <mixacpmp3.h>
-
-                    MixAudioConfigParamsMP3;
-MixAudioConfigParamsMP3 * mix_acp_mp3_new               (void);
-MixAudioConfigParamsMP3 * mix_acp_mp3_ref               (MixAudioConfigParamsMP3 *mix);
-#define             mix_acp_mp3_unref                   (obj)
-#define             MIX_ACP_MP3_CRC                     (obj)
-#define             MIX_ACP_MP3_MPEG_FORMAT             (obj)
-#define             MIX_ACP_MP3_MPEG_LAYER              (obj)
-
-
-
-

Description

-

-A data object which stores audio specific parameters for MP3 audio. -

-

-Additional parameters must be set in the parent object MixAudioConfigParams

-
-
-

Details

-
-

MixAudioConfigParamsMP3

-
typedef struct {
-  MixAudioConfigParams parent;
-
-  /* Audio Format Parameters */
-  gboolean CRC;
-  gint MPEG_format;
-  gint MPEG_layer;
-} MixAudioConfigParamsMP3;
-
-

-MI-X Audio Parameter object for MP3 Audio.

-
-- - - - - - - - - - - - - - - - - - -

MixAudioConfigParams parent;

parent. -

gboolean CRC;

CRC. See MIX_ACP_MP3_CRC -

gint MPEG_format;

OptionalMPEG format of the mpeg audio. See MIX_ACP_MP3_MPEG_FORMAT -

gint MPEG_layer;

OptionalMPEG layer of the mpeg audio. See MIX_ACP_MP3_MPEG_LAYER -
-
-
-
-

mix_acp_mp3_new ()

-
MixAudioConfigParamsMP3 * mix_acp_mp3_new               (void);
-

-Use this method to create new instance of MixAudioConfigParamsMP3

-
-- - - - -

returns :

A newly allocated instance of MixAudioConfigParamsMP3 -
-
-
-
-

mix_acp_mp3_ref ()

-
MixAudioConfigParamsMP3 * mix_acp_mp3_ref               (MixAudioConfigParamsMP3 *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixAudioConfigParamsMP3 instance where reference count has been increased. -
-
-
-
-

mix_acp_mp3_unref()

-
#define mix_acp_mp3_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

MIX_ACP_MP3_CRC()

-
#define MIX_ACP_MP3_CRC(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->CRC)
-
-

-MixAudioConfigParamMP3.CRC accessor. -

-

-Optional

-
-- - - - -

obj :

MixAudioConfigParamsMP3 object. -
-
-
-
-

MIX_ACP_MP3_MPEG_FORMAT()

-
#define MIX_ACP_MP3_MPEG_FORMAT(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_format)
-
-

-MixAudioConfigParamMP3.MPEG_format accessor. -

-

-Supported MPEG format should be 1 or 2.

-
-- - - - -

obj :

MixAudioConfigParamsMP3 object. -
-
-
-
-

MIX_ACP_MP3_MPEG_LAYER()

-
#define MIX_ACP_MP3_MPEG_LAYER(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_layer)
-
-

-MixAudioConfigParamMP3.MPEG_layer accessor. -

-

-Supported layer should be 1, 2, or 3.

-
-- - - - -

obj :

MixAudioConfigParamsMP3 object. -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html deleted file mode 100644 index efd14ca..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html +++ /dev/null @@ -1,391 +0,0 @@ - - - - -MixAudioConfigParamsWMA - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixAudioConfigParamsWMA

-

MixAudioConfigParamsWMA — Audio parameters for WMA audio.

-
- -
-

Description

-

-A data object which stores audio specific parameters for WMA. -

-

-In Moorestown, only WMA2 is supported. -

-

-Additional parameters must be set in the parent object MixAudioConfigParams

-
-
-

Details

-
-

enum MixAudioWMAVersion

-
typedef enum {
-  MIX_AUDIO_WMA_VUNKNOWN,
-  MIX_AUDIO_WMA_V9,
-  MIX_AUDIO_WMA_V10,
-  MIX_AUDIO_WMA_V10P,
-  MIX_AUDIO_WMA_LAST
-} MixAudioWMAVersion;
-
-

-WMA version.

-
-- - - - - - - - - - - - - - - - - - - - - - -

MIX_AUDIO_WMA_VUNKNOWN

WMA version undefined. -

MIX_AUDIO_WMA_V9

WMA 9 -

MIX_AUDIO_WMA_V10

Not Supported WMA 10 -

MIX_AUDIO_WMA_V10P

Not Supported WMA 10 Pro -

MIX_AUDIO_WMA_LAST

last entry. -
-
-
-
-

MixAudioConfigParamsWMA

-
typedef struct {
-  MixAudioConfigParams parent;
-
-  /* Audio Format Parameters */
-  guint32 channel_mask;
-  guint16 format_tag;
-  guint16 block_align;
-  guint16 wma_encode_opt;/* Encoder option */
-  guint8 pcm_bit_width;  /* source pcm bit width */
-  MixAudioWMAVersion wma_version;
-} MixAudioConfigParamsWMA;
-
-

-MI-X Audio Parameter object

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

MixAudioConfigParams parent;

parent. -

guint32 channel_mask;

Channel Mask. See MIX_ACP_WMA_CHANNEL_MASK -

guint16 format_tag;

Format tag. See MIX_ACP_WMA_FORMAT_TAG -

guint16 block_align;

guint16 wma_encode_opt;

Encoder option. See MIX_ACP_WMA_ENCODE_OPT -

guint8 pcm_bit_width;

Source pcm bit width. See MIX_ACP_WMA_PCM_BIT_WIDTH -

MixAudioWMAVersion wma_version;

WMA version. See mix_acp_wma_set_version -
-
-
-
-

mix_acp_wma_new ()

-
MixAudioConfigParamsWMA * mix_acp_wma_new               (void);
-

-Use this method to create new instance of MixAudioConfigParamsWMA

-
-- - - - -

returns :

A newly allocated instance of MixAudioConfigParamsWMA -
-
-
-
-

mix_acp_wma_ref ()

-
MixAudioConfigParamsWMA * mix_acp_wma_ref               (MixAudioConfigParamsWMA *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixAudioConfigParamsWMA instance where reference count has been increased. -
-
-
-
-

mix_acp_wma_unref()

-
#define mix_acp_wma_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

MIX_ACP_WMA_CHANNEL_MASK()

-
#define MIX_ACP_WMA_CHANNEL_MASK(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->channel_mask)
-
-

-MixAudioConfigParamWMA.channel_mask accessor. -

-

-Channel mask must be one of the following: -

-

-4: For single (1) channel output. -

-

-3: For stereo (2) channels output. -

-

-Only 1 or 2 output channels are supported.

-
-- - - - -

obj :

MixAudioConfigParamsWMA object -
-
-
-
-

MIX_ACP_WMA_FORMAT_TAG()

-
#define MIX_ACP_WMA_FORMAT_TAG(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->format_tag)
-
-

-MixAudioConfigParamWMA.format_tag accessor. -

-

-In Moorestown, only value 0x0161 combined with use of MIX_AUDIO_WMA_V9 is supported.

-
-- - - - -

obj :

MixAudioConfigParamsWMA object -
-
-
-
-

MIX_ACP_WMA_BLOCK_ALIGN()

-
#define MIX_ACP_WMA_BLOCK_ALIGN(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->block_align)
-
-

-MixAudioConfigParamWMA.block_align accessor. -

-

-Block alignment indicates packet size. Available from ASF Header.

-
-- - - - -

obj :

MixAudioConfigParamsWMA object -
-
-
-
-

MIX_ACP_WMA_ENCODE_OPT()

-
#define MIX_ACP_WMA_ENCODE_OPT(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->wma_encode_opt)
-
-

-MixAudioConfigParamWMA.wma_encode_opt accessor. -

-

-Encoder option available from ASF header.

-
-- - - - -

obj :

MixAudioConfigParamsWMA object -
-
-
-
-

MIX_ACP_WMA_PCM_BIT_WIDTH()

-
#define MIX_ACP_WMA_PCM_BIT_WIDTH(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->pcm_bit_width)
-
-

-MixAudioConfigParamWMA.pcm_bit_width accessor. -

-

-Source pcm bit width available from ASF Header.

-
-- - - - -

obj :

MixAudioConfigParamsWMA object -
-
-
-
-

mix_acp_wma_get_version ()

-
MixAudioWMAVersion  mix_acp_wma_get_version             (MixAudioConfigParamsWMA *obj);
-

-Get WMA Version.

-
-- - - - - - - - - - -

obj :

MixAudioConfigParamsWMA object -

returns :

MixAudioWMAVersion -
-
-
-
-

mix_acp_wma_set_version ()

-
MIX_RESULT          mix_acp_wma_set_version             (MixAudioConfigParamsWMA *obj,
-                                                         MixAudioWMAVersion ver);
-

-Set WMA Version. -

-

-In Moorestown, only MIX_AUDIO_WMA_V9 is supported

-
-- - - - - - - - - - - - - - -

obj :

MixAudioConfigParamsWMA object -

ver :

MixAudioWMAVersion to set. -

returns :

MIX_RESULT. -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html deleted file mode 100644 index 5aa7e45..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html +++ /dev/null @@ -1,139 +0,0 @@ - - - - -MixAudioInitParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixAudioInitParams

-

MixAudioInitParams — Initialization parameters object.

-
-
-

Synopsis

-
-
-#include <mixacp.h>
-
-                    MixAudioInitParams;
-MixAudioInitParams * mix_aip_new                        (void);
-MixAudioInitParams * mix_aip_ref                        (MixAudioInitParams *mix);
-#define             mix_aip_unref                       (obj)
-
-
-
-

Description

-

-A data object which stores initialization specific parameters. -

-

-Not Implemented in Moorestown.

-
-
-

Details

-
-

MixAudioInitParams

-
typedef struct {
-  MixParams parent;
-} MixAudioInitParams;
-
-

-MixAudio initialization parameter object.

-
-- - - - -

MixParams parent;

Parent. -
-
-
-
-

mix_aip_new ()

-
MixAudioInitParams * mix_aip_new                        (void);
-

-Use this method to create new instance of MixAudioInitParams

-
-- - - - -

returns :

A newly allocated instance of MixAudioInitParams -
-
-
-
-

mix_aip_ref ()

-
MixAudioInitParams * mix_aip_ref                        (MixAudioInitParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixAudioInitParams instance where reference count has been increased. -
-
-
-
-

mix_aip_unref()

-
#define mix_aip_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html deleted file mode 100644 index 7166107..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html +++ /dev/null @@ -1,94 +0,0 @@ - - - - -Mix Audio Types - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

Mix Audio Types

-

Mix Audio Types — Miscellanous types used by MixAudio API.

-
-
-

Synopsis

-
-
-#include <mixaudiotypes.h>
-
-enum                MixAudioManager;
-
-
-
-

Description

-

-Miscellanous types used by MixAudio API.

-
-
-

Details

-
-

enum MixAudioManager

-
typedef enum {
-  MIX_AUDIOMANAGER_NONE = 0,
-  MIX_AUDIOMANAGER_INTELAUDIOMANAGER,
-  MIX_AUDIOMANAGER_LAST
-} MixAudioManager;
-
-

-Audio Manager enumerations.

-
-- - - - - - - - - - - - - - -

MIX_AUDIOMANAGER_NONE

No Audio Manager. -

MIX_AUDIOMANAGER_INTELAUDIOMANAGER

Intel Audio Manager. -

MIX_AUDIOMANAGER_LAST

Last index. -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp deleted file mode 100644 index 9063304..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp +++ /dev/null @@ -1,124 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 deleted file mode 100644 index f9e0358..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 +++ /dev/null @@ -1,186 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.html b/mix_audio/docs/reference/MixAudio/html/MixAudio.html deleted file mode 100644 index 2f53577..0000000 --- a/mix_audio/docs/reference/MixAudio/html/MixAudio.html +++ /dev/null @@ -1,1286 +0,0 @@ - - - - -MixAudio - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixAudio

-

MixAudio — Object to support a single stream playback using hardware accelerated decoder.

-
-
-

Synopsis

-
-
-#include <mixaudio.h>
-
-enum                MixStreamState;
-enum                MixState;
-enum                MixCodecMode;
-enum                MixVolType;
-enum                MixVolRamp;
-                    MixIOVec;
-enum                MixDeviceState;
-                    MixAudio;
-MixAudio *          mix_audio_new                       (void);
-MixAudio *          mix_audio_ref                       (MixAudio *mix);
-#define             mix_audio_unref                     (obj)
-MIX_RESULT          mix_audio_get_version               (guint *major,
-                                                         guint *minor);
-MIX_RESULT          mix_audio_initialize                (MixAudio *mix,
-                                                         MixCodecMode mode,
-                                                         MixAudioInitParams *aip,
-                                                         MixDrmParams *drminitparams);
-MIX_RESULT          mix_audio_configure                 (MixAudio *mix,
-                                                         MixAudioConfigParams *audioconfigparams,
-                                                         MixDrmParams *drmparams);
-MIX_RESULT          mix_audio_decode                    (MixAudio *mix,
-                                                         const MixIOVec *iovin,
-                                                         gint iovincnt,
-                                                         guint64 *insize,
-                                                         MixIOVec *iovout,
-                                                         gint iovoutcnt,
-                                                         guint64 *outsize);
-MIX_RESULT          mix_audio_capture_encode            (MixAudio *mix,
-                                                         MixIOVec *iovout,
-                                                         gint iovoutcnt);
-MIX_RESULT          mix_audio_start                     (MixAudio *mix);
-MIX_RESULT          mix_audio_stop_drop                 (MixAudio *mix);
-MIX_RESULT          mix_audio_stop_drain                (MixAudio *mix);
-MIX_RESULT          mix_audio_pause                     (MixAudio *mix);
-MIX_RESULT          mix_audio_resume                    (MixAudio *mix);
-MIX_RESULT          mix_audio_get_timestamp             (MixAudio *mix,
-                                                         guint64 *msecs);
-MIX_RESULT          mix_audio_set_mute                  (MixAudio *mix,
-                                                         gboolean mute);
-MIX_RESULT          mix_audio_get_mute                  (MixAudio *mix,
-                                                         gboolean *muted);
-MIX_RESULT          mix_audio_get_max_vol               (MixAudio *mix,
-                                                         gint *maxvol);
-MIX_RESULT          mix_audio_get_min_vol               (MixAudio *mix,
-                                                         gint *minvol);
-MIX_RESULT          mix_audio_get_volume                (MixAudio *mix,
-                                                         gint *currvol,
-                                                         MixVolType type);
-MIX_RESULT          mix_audio_set_volume                (MixAudio *mix,
-                                                         gint currvol,
-                                                         MixVolType type,
-                                                         gulong msecs,
-                                                         MixVolRamp ramptype);
-MIX_RESULT          mix_audio_deinitialize              (MixAudio *mix);
-MIX_RESULT          mix_audio_get_stream_state          (MixAudio *mix,
-                                                         MixStreamState *streamState);
-MIX_RESULT          mix_audio_get_state                 (MixAudio *mix,
-                                                         MixState *state);
-gboolean            mix_audio_am_is_enabled             (MixAudio *mix);
-MIX_RESULT          mix_audio_is_am_available           (MixAudio *mix,
-                                                         MixAudioManager am,
-                                                         gboolean *avail);
-MIX_RESULT          mix_audio_get_output_configuration  (MixAudio *mix,
-                                                         MixAudioConfigParams **audioconfigparams);
-MIX_RESULT          mix_audio_get_stream_byte_decoded   (MixAudio *mix,
-                                                         guint64 *byte);
-
-
-
-

Object Hierarchy

-
-  GObject
-   +----MixAudio
-
-
-
-

Description

-

-MixAudio object provide thread-safe API for application and/or multimedia framework to take advantage of Intel Smart Sound Technology(TM) driver for hardware audio decode and render. -

-

-Each MixAudio object represents one streaming session with the Intel Smart Sound driver and provides configuration and control of the decoding and playback options. -

-

-The MixAudio object also support integration with Intel Audio Manager service. -

-

-An application can utilize the MixAudio object by calling the following sequence: -

-
    -
  1. -mix_audio_new() to create a MixAudio instance.
  2. -
  3. -mix_audio_initialize() to allocate Intel Smart Sound Technology resource.
  4. -
  5. -mix_audio_configure() to configure stream parameters.
  6. -
  7. -mix_audio_decode() can be called repeatedly for decoding and, optionally, rendering.
  8. -
  9. -mix_audio_start() is called after the 1st mix_audio_decode() method to start rendering.
  10. -
  11. -mix_audio_stop_drain() is called after the last buffer is passed for decoding in with mix_audio_decode().
  12. -
  13. -mix_audio_deinitialize() to free resource once playback is completed.
  14. -
-

-

-

-Since mix_audio_decode() is a blocking call during playback, the following methods are called in a seperate thread to control progress: -

- -
-
-

Details

-
-

enum MixStreamState

-
typedef enum {
-  MIX_STREAM_NULL=0,
-  MIX_STREAM_STOPPED,
-  MIX_STREAM_PLAYING,
-  MIX_STREAM_PAUSED,
-  MIX_STREAM_DRAINING,
-  MIX_STREAM_LAST
-} MixStreamState;
-
-

-Stream State during Decode and Render or Encode mode. These states do not apply to Decode and Return mode.

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - -

MIX_STREAM_NULL

Stream is not allocated. -

MIX_STREAM_STOPPED

Stream is at STOP state. This is the only state DNR is allowed. -

MIX_STREAM_PLAYING

Stream is at Playing state. -

MIX_STREAM_PAUSED

Stream is Paused. -

MIX_STREAM_DRAINING

Stream is draining -- remaining of the buffer in the device are playing. This state is special due to the limitation that no other control operations are allowed at this state. Stream will become MIX_STREAM_STOPPED automatically when this data draining has completed. -

MIX_STREAM_LAST

Last index in the enumeration. -
-
-
-
-

enum MixState

-
typedef enum {
-  MIX_STATE_NULL=0,
-  MIX_STATE_UNINITIALIZED,
-  MIX_STATE_INITIALIZED,
-  MIX_STATE_CONFIGURED,
-  MIX_STATE_LAST
-} MixState;
-
-

-The varies states the device is in.

-
-- - - - - - - - - - - - - - - - - - - - - - -

MIX_STATE_NULL

MIX_STATE_UNINITIALIZED

MIX is not initialized. -

MIX_STATE_INITIALIZED

MIX is initialized. -

MIX_STATE_CONFIGURED

MIX is configured successfully. -

MIX_STATE_LAST

Last index in the enumeration. -
-
-
-
-

enum MixCodecMode

-
typedef enum {
-  MIX_CODING_INVALID=0,
-  MIX_CODING_ENCODE,
-  MIX_CODING_DECODE,
-  MIX_CODING_LAST
-} MixCodecMode;
-
-

-Mode where device is operating on. See mix_audio_initialize().

-
-- - - - - - - - - - - - - - - - - - -

MIX_CODING_INVALID

Indicates device uninitialied for any mode. -

MIX_CODING_ENCODE

Indicates device is opened for encoding. -

MIX_CODING_DECODE

Indicates device is opened for decoding. -

MIX_CODING_LAST

Last index in the enumeration. -
-
-
-
-

enum MixVolType

-
typedef enum {
-  MIX_VOL_PERCENT=0,
-  MIX_VOL_DECIBELS,
-  MIX_VOL_LAST
-} MixVolType;
-
-

-See mix_audio_getvolume() and mix_audio_setvolume().

-
-- - - - - - - - - - - - - - -

MIX_VOL_PERCENT

volume is expressed in percentage. -

MIX_VOL_DECIBELS

volume is expressed in decibel. -

MIX_VOL_LAST

last entry. -
-
-
-
-

enum MixVolRamp

-
typedef enum 
-{
-  MIX_RAMP_LINEAR = 0,
-  MIX_RAMP_EXPONENTIAL,
-  MIX_RAMP_LAST
-} MixVolRamp;
-
-

-See mix_audio_getvolume() and mix_audio_setvolume().

-
-- - - - - - - - - - - - - - -

MIX_RAMP_LINEAR

volume is expressed in percentage. -

MIX_RAMP_EXPONENTIAL

volume is expressed in decibel. -

MIX_RAMP_LAST

last entry. -
-
-
-
-

MixIOVec

-
typedef struct {
-  guchar *data;
-  gint size;
-} MixIOVec;
-
-

-Scatter-gather style structure. To be used by mix_audio_decode() method for input and output buffer.

-
-- - - - - - - - - - -

guchar *data;

data pointer -

gint size;

size of buffer in data -
-
-
-
-

enum MixDeviceState

-
typedef enum {
-  MIX_AUDIO_DEV_CLOSED=0,
-  MIX_AUDIO_DEV_OPENED,
-  MIX_AUDIO_DEV_ALLOCATED
-} MixDeviceState;
-
-

-Device state.

-
-- - - - - - - - - - - - - - -

MIX_AUDIO_DEV_CLOSED

TBD -

MIX_AUDIO_DEV_OPENED

TBD -

MIX_AUDIO_DEV_ALLOCATED

TBD -
-
-
-
-

MixAudio

-
typedef struct {
-  GObject parent;
-} MixAudio;
-
-

-MI-X Audio object

-
-- - - - -

GObject parent;

Parent object. -
-
-
-
-

mix_audio_new ()

-
MixAudio *          mix_audio_new                       (void);
-

-Use this method to create new instance of MixAudio

-
-- - - - -

returns :

A newly allocated instance of MixAudio -
-
-
-
-

mix_audio_ref ()

-
MixAudio *          mix_audio_ref                       (MixAudio *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixAudio instance where reference count has been increased. -
-
-
-
-

mix_audio_unref()

-
#define mix_audio_unref(obj) g_object_unref (G_OBJECT(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_audio_get_version ()

-
MIX_RESULT          mix_audio_get_version               (guint *major,
-                                                         guint *minor);
-

-Returns the version of the MI-X library.

-
-- - - - -

returns :

MIX_RESULT_SUCCESS -
-
-
-
-

mix_audio_initialize ()

-
MIX_RESULT          mix_audio_initialize                (MixAudio *mix,
-                                                         MixCodecMode mode,
-                                                         MixAudioInitParams *aip,
-                                                         MixDrmParams *drminitparams);
-

-This function will initialize an encode or decode session with this MixAudio instance. During this call, the device will be opened. If the device is not available, an error is returned to the caller so that an alternative (e.g. software decoding) can be configured instead. Use mix_audio_deinitialize() to close the device. -

-

-A previous initialized session must be de-initialized using mix_audio_deinitialize() before it can be initialized again.

-
-- - - - - - - - - - - - - - - - - - - - - - -

mix :

MixAudio object. -

mode :

Requested MixCodecMode. -

aip :

Audio initialization parameters. -

drminitparams :

Optional. DRM initialization param if applicable. -

returns :

MIX_RESULT_SUCCESS on successful initilaization. MIX_RESULT_ALREADY_INIT if session is already initialized. -
-
-
-
-

mix_audio_configure ()

-
MIX_RESULT          mix_audio_configure                 (MixAudio *mix,
-                                                         MixAudioConfigParams *audioconfigparams,
-                                                         MixDrmParams *drmparams);
-

-This function can be used to configure a stream for the current session. The caller can use this function to do the following: -

-

-

-
    -
  • Choose decoding mode (direct-render or decode-return)
  • -
  • Provide DRM parameters (using DRMparams object)
  • -
  • Provide stream parameters (using STRMparams objects)
  • -
  • Provide a stream name for the Intel Smart Sound Technology stream
  • -
-

-

-

-SST stream parameters will be set during this call, and stream resources allocated in SST. -

-

-

-
-

Intel Audio Manager support:

-

If Intel Audio Manager support is enabled, and if mode is specified to MIX_DECODE_DIRECTRENDER, the SST stream will be registered with Intel Audio Manager in the context of this call, using the stream name provided in streamname. Application will receive a notification from Intel Audio Manager that the stream has been created during or soon after this call. The application should be ready to handle either possibility. A stream ID (associated with the stream name) will be provided by Intel Audio Manager which will be used for subsequent notifications from Intel Audio Manager or calls to Intel Audio Manager for muting, pause and resume. See mix_audio_getstreamid()

-

If a stream is already registered with Intel Audio Manager, application must pass the same streamname argument to retain the session. Otherwise, the existing stream will be unregistered and a new stream will be registered with the new streamname. -

-
-

-

-

-If mode is specified to MIX_DECODE_DIRECTRENDER but direct-render mode is not available (due to end user use of alternative output device), an error indication will be returned to the caller so that an alternate pipeline configuration can be created (e.g. including a Pulse Audio sink, and support for output buffers). In this case, the caller will need to call mix_audio_configure() again to with mode specify as MIX_DECODE_DECODERETURN to request decode-return mode. -

-

-This method can be called multiple times if reconfiguration of the stream is needed. However, this method must be called when the stream is in MIX_STREAM_STOPPED state.

-
-- - - - - - - - - - - - - - - - - - -

mix :

MixAudio object. -

audioconfigparams :

a MixAudioConfigParams derived object containing information for the specific stream type. -

drmparams :

Optional. DRM initialization param if applicable. -

returns :

Result indicates successful or not. -
-
-
-
-

mix_audio_decode ()

-
MIX_RESULT          mix_audio_decode                    (MixAudio *mix,
-                                                         const MixIOVec *iovin,
-                                                         gint iovincnt,
-                                                         guint64 *insize,
-                                                         MixIOVec *iovout,
-                                                         gint iovoutcnt,
-                                                         guint64 *outsize);
-

-This function is used to initiate HW accelerated decoding of encoded data buffers. This function may be used in two major modes, direct-render or decode-return. -

-

-With direct-render, input buffers are provided by the caller which hold encoded audio data, and no output buffers are provided. The encoded data is decoded, and the decoded data is sent directly to the output speaker. This allows very low power audio rendering and is the best choice of operation for longer battery life. -

-

-

-
-

Intel Audio Manager Support

-However, if the user has connected a different target output device, such as Bluetooth headphones, this mode cannot be used as the decoded audio must be directed to the Pulse Audio stack where the output to Bluetooth device can be supported, per Intel Audio Manager guidelines. This mode is called decode-return, and requires the caller to provide output buffers for the decoded data. -
-

-

-

-Input buffers in both modes are one or more user space buffers using a scatter/gather style vector interface. -

-

-Output buffers for the decode-return mode are one or more user space buffers in a scatter style vector interface. Buffers will be filled in order and lengths of data filled will be returned. -

-

-This call will block until data has been completely copied or queued to the driver. All user space buffers may be used or released when this call returns. -

-

-Note: If the stream is configured as MIX_DECODE_DIRECTRENDER, and whenever the stream in MIX_STREAM_STOPPED state, the call to mix_audio_decode() will not start the playback until mix_audio_start() is called. This behavior would allow application to queue up data but delay the playback until appropriate time.

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

mix :

MixAudio object. -

iovin :

a pointer to an array of MixIOVec structure that contains the input buffers -

iovincnt :

the number of entry in the iovin array -

iovout :

a pointer to an arrya of MixIOVec structure that represent the output buffer. During input, each size in the MixIOVec array represents the available buffer size pointed to by data. Upon return, each size value will be updated to reflect how much data has been filled. This parameter is ignored if stream is configured to MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. -

iovoutcnt :

in/out parameter which when input, it contains the number of entry available in the iovout array. Upon return, this value will be updated to reflect how many entry in the iovout array has been populated with data. This parameter is ignored if stream is configured to MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. -

outsize :

Total number of bytes returned for the decode session. This parameter is ignored if stream is configured to MIX_DECODE_DIRECTRENDER. -

returns :

MIX_RESULT -
-
-
-
-

mix_audio_capture_encode ()

-
MIX_RESULT          mix_audio_capture_encode            (MixAudio *mix,
-                                                         MixIOVec *iovout,
-                                                         gint iovoutcnt);
-

-To read encoded data from device. -

-

- -NOTE: May need to rename to "read_encoded" or other name. Since "encode" seems to mean taking raw audio and convert to compressed audio. -

-
-- - - - - - - - - - - - - - - - - - -

mix :

MixAudio object. -

iovout :

Capture audio samples. -

iovoutcnt :

Number of entry in the input vector iovout. -

returns :

MIX_RESULT -
-
-
-
-

mix_audio_start ()

-
MIX_RESULT          mix_audio_start                     (MixAudio *mix);
-

-If the stream is configured to MIX_DECODE_DIRECTRENDER, application use this call to change the stream out of the MIX_STREAM_STOPPED state. If mix_audio_decode() is called and blocking in a seperate thread prior to this call. This method causes the device to start rendering data. -

-

-In MIX_DECODE_DECODERETURN, this method is no op.

-
-- - - - - - - - - - -

mix :

MixAudio object. -

returns :

MIX_RESULT_SUCCESS if the resulting state is either MIX_STREAM_PLAYING or MIX_STREAM_PAUSED. Fail code otherwise. -
-
-
-
-

mix_audio_stop_drop ()

-
MIX_RESULT          mix_audio_stop_drop                 (MixAudio *mix);
-

-If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. -

-

-All remaining frames to be decoded or rendered will be discarded and playback will stop immediately, unblocks any pending mix_audio_decode(). -

-

-If MIX_STOP_DRAIN is requested, the call will block with stream state set to MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. When MIX_STOP_DRAIN returns successfully, the stream would have reached MIX_STREAM_STOPPED successfully. -

-

-After this call, timestamp retrived by mix_audio_gettimestamp() is reset to zero. -

-

-Note that this method returns MIX_RESULT_WRONG_STATE if the stream is in MIX_STREAM_DRAINING state.

-
-- - - - - - - - - - -

mix :

MixAudio object. -

returns :

MIX_RESULT_SUCCESS if the resulting state has successfully reached MIX_STREAM_STOPPED. Fail code otherwise. -
-
-
-
-

mix_audio_stop_drain ()

-
MIX_RESULT          mix_audio_stop_drain                (MixAudio *mix);
-

-If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. -

-

-The call will block with stream state set to MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. -

-

-Note that this method blocks until MIX_STREAM_STOPPED is reached if it is called when the stream is already in MIX_STREAM_DRAINING state.

-
-- - - - - - - - - - -

mix :

MixAudio object. -

returns :

MIX_RESULT_SUCCESS if the resulting state has successfully reached MIX_STREAM_STOPPED. Fail code otherwise. -
-
-
-
-

mix_audio_pause ()

-
MIX_RESULT          mix_audio_pause                     (MixAudio *mix);
-

-If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state from MIX_STREAM_PLAYING to MIX_STREAM_PAUSED. Note that this method returns sucessful only when the resulting state reaches MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as MIX_STREAM_STOPPED, where transitioning to MIX_STREAM_PAUSED is not possible. -

-

-In some situation, where there is potential race condition with the DRAINING operation, this method may return MIX_RESULT_NEED_RETRY to indicate last operation result is inclusive and request caller to call again.

-
-- - - - - - - - - - -

mix :

MixAudio object. -

returns :

MIX_RESULT_SUCCESS if MIX_STREAM_PAUSED state is reached successfully. MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. -
-
-
-
-

mix_audio_resume ()

-
MIX_RESULT          mix_audio_resume                    (MixAudio *mix);
-

-If the stream is configured to MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state to MIX_STREAM_PLAYING. Note that this method returns sucessful only when the resulting state reaches MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as MIX_STREAM_DRAINING, where transitioning to MIX_STREAM_PLAYING is not possible.

-
-- - - - - - - - - - -

mix :

MixAudio object. -

returns :

MIX_RESULT_SUCCESS if MIX_STREAM_PLAYING state is reached successfully. MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. -
-
-
-
-

mix_audio_get_timestamp ()

-
MIX_RESULT          mix_audio_get_timestamp             (MixAudio *mix,
-                                                         guint64 *msecs);
-

-This function can be used to retrieve the current timestamp for audio playback in milliseconds. The timestamp will reflect the amount of audio data rendered since the start of stream, or since the last stop. Note that the timestamp is always reset to zero when the stream enter MIX_STREAM_STOPPED state. The timestamp is an unsigned long value, so the value will wrap when the timestamp reaches ULONG_MAX. This function is only valid in direct-render mode.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

msecs :

play time in milliseconds. -

returns :

MIX_RESULT_SUCCESS if the timestamp is available. MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. -
-
-
-
-

mix_audio_set_mute ()

-
MIX_RESULT          mix_audio_set_mute                  (MixAudio *mix,
-                                                         gboolean mute);
-

-This function is used to mute and unmute audio playback. While muted, playback would continue but silently. This function is only valid when the session is configured to MIX_DECODE_DIRECTRENDER mode. -

-

-Note that playback volumn may change due to change of global settings while stream is muted.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

mute :

Turn mute on/off. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_get_mute ()

-
MIX_RESULT          mix_audio_get_mute                  (MixAudio *mix,
-                                                         gboolean *muted);
-

-Get Mute.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

muted :

current mute state. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_get_max_vol ()

-
MIX_RESULT          mix_audio_get_max_vol               (MixAudio *mix,
-                                                         gint *maxvol);
-

-This function can be used if the application will be setting the audio volume using decibels instead of percentage. The maximum volume in decibels supported by the driver will be returned. This value can be used to determine the upper bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to MIX_DECODE_DIRECTRENDER mode.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

maxvol :

pointer to receive max volumn. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_get_min_vol ()

-
MIX_RESULT          mix_audio_get_min_vol               (MixAudio *mix,
-                                                         gint *minvol);
-

-This function can be used if the application will be setting the audio volume using decibels instead of percentage. The minimum volume in decibels supported by the driver will be returned. This value can be used to determine the lower bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to MIX_DECODE_DIRECTRENDER mode.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

minvol :

pointer to receive max volumn. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_get_volume ()

-
MIX_RESULT          mix_audio_get_volume                (MixAudio *mix,
-                                                         gint *currvol,
-                                                         MixVolType type);
-

-This function returns the current volume setting in either decibels or percentage. This function is only valid if stream is configured to MIX_DECODE_DIRECTRENDER mode.

-
-- - - - - - - - - - - - - - - - - - -

mix :

MixAudio object. -

currvol :

Current volume. Note that if type equals MIX_VOL_PERCENT, this value will be return within the range of 0 to 100 inclusive. -

type :

The type represented by currvol. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_set_volume ()

-
MIX_RESULT          mix_audio_set_volume                (MixAudio *mix,
-                                                         gint currvol,
-                                                         MixVolType type,
-                                                         gulong msecs,
-                                                         MixVolRamp ramptype);
-

-This function sets the current volume setting in either decibels or percentage. This function is only valid if the stream is configured to MIX_DECODE_DIRECTRENDER mode.

-
-- - - - - - - - - - - - - - - - - - -

mix :

MixAudio object. -

currvol :

Current volume. Note that if type equals MIX_VOL_PERCENT, this value will be trucated to within the range of 0 to 100 inclusive. -

type :

The type represented by currvol. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_deinitialize ()

-
MIX_RESULT          mix_audio_deinitialize              (MixAudio *mix);
-

-This function will uninitialize a session with this MI-X instance. During this call, the SST device will be closed and resources including mmapped buffers would be freed.This function should be called by the application once mix_audio_init() has been called. -

-

-

-
-

Intel Audio Manager Support

-The SST stream would be unregistered with Intel Audio Manager if it was registered. -
-

-

-

-Note that if this method should not fail normally. If it does return failure, the state of this object and the underlying mechanism is compromised and application should not attempt to reuse this object.

-
-- - - - - - - - - - -

mix :

MixAudio object. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_get_stream_state ()

-
MIX_RESULT          mix_audio_get_stream_state          (MixAudio *mix,
-                                                         MixStreamState *streamState);
-

-Get the stream state of the current stream.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

streamState :

pointer to receive stream state. -

returns :

MIX_RESULT -
-
-
-
-

mix_audio_get_state ()

-
MIX_RESULT          mix_audio_get_state                 (MixAudio *mix,
-                                                         MixState *state);
-

-Get the device state of the audio session.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

state :

pointer to receive state -

returns :

Current device state. -
-
-
-
-

mix_audio_am_is_enabled ()

-
gboolean            mix_audio_am_is_enabled             (MixAudio *mix);
-

-This method checks if the current session is configure to use Intel Audio Manager. Note that Intel Audio Manager is considered disabled if the stream has not be initialized to use the service explicitly.

-
-- - - - - - - - - - -

mix :

MixAudio object. -

returns :

boolean indicates if Intel Audio Manager is enabled with the current session. -
-
-
-
-

mix_audio_is_am_available ()

-
MIX_RESULT          mix_audio_is_am_available           (MixAudio *mix,
-                                                         MixAudioManager am,
-                                                         gboolean *avail);
-

-Check if AM is available.

-
-- - - - - - - - - - - - - - - - - - -

mix :

TBD -

am :

TBD -

avail :

TBD -

returns :

TBD -
-
-
-
-

mix_audio_get_output_configuration ()

-
MIX_RESULT          mix_audio_get_output_configuration  (MixAudio *mix,
-                                                         MixAudioConfigParams **audioconfigparams);
-

-This method retrieve the current configuration. This can be called after initialization. If a stream has been configured, it returns the corresponding derive object of MixAudioConfigParams.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

audioconfigparams :

double pointer to hold output configuration. -

returns :

MIX_RESULT_SUCCESS on success or other fail code. -
-
-
-
-

mix_audio_get_stream_byte_decoded ()

-
MIX_RESULT          mix_audio_get_stream_byte_decoded   (MixAudio *mix,
-                                                         guint64 *byte);
-

-Retrive the culmulative byte decoded. -

-

-Not Implemented.

-
-- - - - - - - - - - - - - - -

mix :

MixAudio object. -

msecs :

stream byte decoded.. -

returns :

MIX_RESULT_SUCCESS if the value is available. MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/api-index-full.html b/mix_audio/docs/reference/MixAudio/html/api-index-full.html deleted file mode 100644 index 99c830e..0000000 --- a/mix_audio/docs/reference/MixAudio/html/api-index-full.html +++ /dev/null @@ -1,259 +0,0 @@ - - - - -API Index - - - - - - - - - - - - - - - - - - -
-

-API Index

-
----- - - - - - -
GObjectMixAudio 
-
-

M

-
-
MixAACBitrateType, enum MixAACBitrateType -
-
MixAACBitstreamFormt, enum MixAACBitstreamFormt -
-
MixAACMpegID, enum MixAACMpegID -
-
MixAACProfile, enum MixAACProfile -
-
MixACPBPSType, enum MixACPBPSType -
-
MixACPOpAlign, enum MixACPOpAlign -
-
MixAudio, MixAudio -
-
MixAudioConfigParams, MixAudioConfigParams -
-
MixAudioConfigParamsAAC, MixAudioConfigParamsAAC -
-
MixAudioConfigParamsMP3, MixAudioConfigParamsMP3 -
-
MixAudioConfigParamsWMA, MixAudioConfigParamsWMA -
-
MixAudioInitParams, MixAudioInitParams -
-
MixAudioManager, enum MixAudioManager -
-
MixAudioWMAVersion, enum MixAudioWMAVersion -
-
MixCodecMode, enum MixCodecMode -
-
MixDecodeMode, enum MixDecodeMode -
-
MixDeviceState, enum MixDeviceState -
-
MixIOVec, MixIOVec -
-
MixState, enum MixState -
-
MixStreamState, enum MixStreamState -
-
MixVolRamp, enum MixVolRamp -
-
MixVolType, enum MixVolType -
-
MIX_ACP_AAC_CHANNELS, MIX_ACP_AAC_CHANNELS() -
-
MIX_ACP_AAC_CRC, MIX_ACP_AAC_CRC() -
-
mix_acp_aac_get_aac_profile, mix_acp_aac_get_aac_profile () -
-
mix_acp_aac_get_aot, mix_acp_aac_get_aot () -
-
mix_acp_aac_get_bit_rate_type, mix_acp_aac_get_bit_rate_type () -
-
mix_acp_aac_get_bit_stream_format, mix_acp_aac_get_bit_stream_format () -
-
mix_acp_aac_get_mpeg_id, mix_acp_aac_get_mpeg_id () -
-
mix_acp_aac_new, mix_acp_aac_new () -
-
MIX_ACP_AAC_PCE_FLAG, MIX_ACP_AAC_PCE_FLAG() -
-
MIX_ACP_AAC_PS_FLAG, MIX_ACP_AAC_PS_FLAG() -
-
mix_acp_aac_ref, mix_acp_aac_ref () -
-
MIX_ACP_AAC_SAMPLE_RATE, MIX_ACP_AAC_SAMPLE_RATE() -
-
MIX_ACP_AAC_SBR_FLAG, MIX_ACP_AAC_SBR_FLAG() -
-
mix_acp_aac_set_aac_profile, mix_acp_aac_set_aac_profile () -
-
mix_acp_aac_set_aot, mix_acp_aac_set_aot () -
-
mix_acp_aac_set_bit_rate_type, mix_acp_aac_set_bit_rate_type () -
-
mix_acp_aac_set_bit_stream_format, mix_acp_aac_set_bit_stream_format () -
-
mix_acp_aac_set_mpeg_id, mix_acp_aac_set_mpeg_id () -
-
mix_acp_aac_unref, mix_acp_aac_unref() -
-
MIX_ACP_BITRATE, MIX_ACP_BITRATE() -
-
MIX_ACP_DECODEMODE, MIX_ACP_DECODEMODE() -
-
mix_acp_get_audio_manager, mix_acp_get_audio_manager () -
-
mix_acp_get_bps, mix_acp_get_bps () -
-
mix_acp_get_decodemode, mix_acp_get_decodemode () -
-
mix_acp_get_op_align, mix_acp_get_op_align () -
-
mix_acp_get_streamname, mix_acp_get_streamname () -
-
mix_acp_is_streamname_valid, mix_acp_is_streamname_valid () -
-
MIX_ACP_MP3_CRC, MIX_ACP_MP3_CRC() -
-
MIX_ACP_MP3_MPEG_FORMAT, MIX_ACP_MP3_MPEG_FORMAT() -
-
MIX_ACP_MP3_MPEG_LAYER, MIX_ACP_MP3_MPEG_LAYER() -
-
mix_acp_mp3_new, mix_acp_mp3_new () -
-
mix_acp_mp3_ref, mix_acp_mp3_ref () -
-
mix_acp_mp3_unref, mix_acp_mp3_unref() -
-
mix_acp_new, mix_acp_new () -
-
MIX_ACP_NUM_CHANNELS, MIX_ACP_NUM_CHANNELS() -
-
mix_acp_ref, mix_acp_ref () -
-
MIX_ACP_SAMPLE_FREQ, MIX_ACP_SAMPLE_FREQ() -
-
mix_acp_set_audio_manager, mix_acp_set_audio_manager () -
-
mix_acp_set_bps, mix_acp_set_bps () -
-
mix_acp_set_decodemode, mix_acp_set_decodemode () -
-
mix_acp_set_op_align, mix_acp_set_op_align () -
-
mix_acp_set_streamname, mix_acp_set_streamname () -
-
mix_acp_unref, mix_acp_unref() -
-
MIX_ACP_WMA_BLOCK_ALIGN, MIX_ACP_WMA_BLOCK_ALIGN() -
-
MIX_ACP_WMA_CHANNEL_MASK, MIX_ACP_WMA_CHANNEL_MASK() -
-
MIX_ACP_WMA_ENCODE_OPT, MIX_ACP_WMA_ENCODE_OPT() -
-
MIX_ACP_WMA_FORMAT_TAG, MIX_ACP_WMA_FORMAT_TAG() -
-
mix_acp_wma_get_version, mix_acp_wma_get_version () -
-
mix_acp_wma_new, mix_acp_wma_new () -
-
MIX_ACP_WMA_PCM_BIT_WIDTH, MIX_ACP_WMA_PCM_BIT_WIDTH() -
-
mix_acp_wma_ref, mix_acp_wma_ref () -
-
mix_acp_wma_set_version, mix_acp_wma_set_version () -
-
mix_acp_wma_unref, mix_acp_wma_unref() -
-
mix_aip_new, mix_aip_new () -
-
mix_aip_ref, mix_aip_ref () -
-
mix_aip_unref, mix_aip_unref() -
-
mix_audio_am_is_enabled, mix_audio_am_is_enabled () -
-
mix_audio_capture_encode, mix_audio_capture_encode () -
-
mix_audio_configure, mix_audio_configure () -
-
mix_audio_decode, mix_audio_decode () -
-
mix_audio_deinitialize, mix_audio_deinitialize () -
-
mix_audio_get_max_vol, mix_audio_get_max_vol () -
-
mix_audio_get_min_vol, mix_audio_get_min_vol () -
-
mix_audio_get_mute, mix_audio_get_mute () -
-
mix_audio_get_output_configuration, mix_audio_get_output_configuration () -
-
mix_audio_get_state, mix_audio_get_state () -
-
mix_audio_get_stream_byte_decoded, mix_audio_get_stream_byte_decoded () -
-
mix_audio_get_stream_state, mix_audio_get_stream_state () -
-
mix_audio_get_timestamp, mix_audio_get_timestamp () -
-
mix_audio_get_version, mix_audio_get_version () -
-
mix_audio_get_volume, mix_audio_get_volume () -
-
mix_audio_initialize, mix_audio_initialize () -
-
mix_audio_is_am_available, mix_audio_is_am_available () -
-
mix_audio_new, mix_audio_new () -
-
mix_audio_pause, mix_audio_pause () -
-
mix_audio_ref, mix_audio_ref () -
-
mix_audio_resume, mix_audio_resume () -
-
mix_audio_set_mute, mix_audio_set_mute () -
-
mix_audio_set_volume, mix_audio_set_volume () -
-
mix_audio_start, mix_audio_start () -
-
mix_audio_stop_drain, mix_audio_stop_drain () -
-
mix_audio_stop_drop, mix_audio_stop_drop () -
-
mix_audio_unref, mix_audio_unref() -
-
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/ch01.html b/mix_audio/docs/reference/MixAudio/html/ch01.html deleted file mode 100644 index 2ab25e8..0000000 --- a/mix_audio/docs/reference/MixAudio/html/ch01.html +++ /dev/null @@ -1,56 +0,0 @@ - - - - -Mix Audio API - - - - - - - - - - - - - - - - - - - -
-

-Mix Audio API

-
-
-MixAudioConfigParamsAAC — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format. -
-
-MixAudioConfigParamsMP3 — Audio configuration parameters for MP3 audio. -
-
-MixAudioConfigParamsWMA — Audio parameters for WMA audio. -
-
-MixAudioConfigParams — MixAudio configuration parameters object. -
-
-MixAudioInitParams — Initialization parameters object. -
-
-MixAudio — Object to support a single stream playback using hardware accelerated decoder. -
-
-Mix Audio Types — Miscellanous types used by MixAudio API. -
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/index.html b/mix_audio/docs/reference/MixAudio/html/index.html deleted file mode 100644 index ab60f03..0000000 --- a/mix_audio/docs/reference/MixAudio/html/index.html +++ /dev/null @@ -1,60 +0,0 @@ - - - - -MixAudio Reference Manual - - - - - - - - - - -
-
-
-
-

- MixAudio version 0.3 - -

-
-
-
-
-
Mix Audio API
-
-
-MixAudioConfigParamsAAC — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format. -
-
-MixAudioConfigParamsMP3 — Audio configuration parameters for MP3 audio. -
-
-MixAudioConfigParamsWMA — Audio parameters for WMA audio. -
-
-MixAudioConfigParams — MixAudio configuration parameters object. -
-
-MixAudioInitParams — Initialization parameters object. -
-
-MixAudio — Object to support a single stream playback using hardware accelerated decoder. -
-
-Mix Audio Types — Miscellanous types used by MixAudio API. -
-
-
Object Hierarchy
-
API Index
-
-
- - - diff --git a/mix_audio/docs/reference/MixAudio/html/index.sgml b/mix_audio/docs/reference/MixAudio/html/index.sgml deleted file mode 100644 index 0cc1a2a..0000000 --- a/mix_audio/docs/reference/MixAudio/html/index.sgml +++ /dev/null @@ -1,134 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/mix_audio/docs/reference/MixAudio/html/style.css b/mix_audio/docs/reference/MixAudio/html/style.css deleted file mode 100644 index bb44c28..0000000 --- a/mix_audio/docs/reference/MixAudio/html/style.css +++ /dev/null @@ -1,167 +0,0 @@ -.synopsis, .classsynopsis -{ - background: #eeeeee; - border: solid 1px #aaaaaa; - padding: 0.5em; -} -.programlisting -{ - background: #eeeeff; - border: solid 1px #aaaaff; - padding: 0.5em; -} -.variablelist -{ - padding: 4px; - margin-left: 3em; -} -.variablelist td:first-child -{ - vertical-align: top; -} - -/* this is needed so that the local anchors are displayed below the naviagtion */ -@media screen { - sup a.footnote - { - position: relative; - top: 0em ! important; - } - div.refnamediv a[name], div.refsect1 a[name] - { - position: relative; - top: -4.5em; - } - table.navigation#top - { - background: #ffeeee; - border: solid 1px #ffaaaa; - margin-top: 0; - margin-bottom: 0; - position: fixed; - top: 0; - left: 0; - height: 2em; - z-index: 1; - } - .navigation a - { - color: #770000; - } - .navigation a:visited - { - color: #550000; - } - td.shortcuts - { - color: #770000; - font-size: 80%; - white-space: nowrap; - } - div.refentry, div.chapter, div.reference, div.part, div.book, div.glossary, div.sect1, div.appendix, div.preface - { - position: relative; - top: 3em; - z-index: 0; - } - div.glossary, div.index - { - position: relative; - top: 2em; - z-index: 0; - } - div.refnamediv - { - margin-top: 2em; - } - body - { - padding-bottom: 20em; - } -} -@media print { - table.navigation { - visibility: collapse; - display: none; - } - div.titlepage table.navigation { - visibility: visible; - display: table; - background: #ffeeee; - border: solid 1px #ffaaaa; - margin-top: 0; - margin-bottom: 0; - top: 0; - left: 0; - height: 2em; - } -} - -.navigation .title -{ - font-size: 200%; -} - - -div.gallery-float -{ - float: left; - padding: 10px; -} -div.gallery-float img -{ - border-style: none; -} -div.gallery-spacer -{ - clear: both; -} -a -{ - text-decoration: none; -} -a:hover -{ - text-decoration: underline; - color: #FF0000; -} - -div.table table -{ - border-collapse: collapse; - border-spacing: 0px; - border-style: solid; - border-color: #777777; - border-width: 1px; -} - -div.table table td, div.table table th -{ - border-style: solid; - border-color: #777777; - border-width: 1px; - padding: 3px; - vertical-align: top; -} - -div.table table th -{ - background-color: #eeeeee; -} - -hr -{ - color: #777777; - background: #777777; - border: 0; - height: 1px; - clear: both; -} - -.footer -{ - padding-top: 3.5em; - color: #777777; - text-align: center; - font-size: 80%; -} diff --git a/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html b/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html deleted file mode 100644 index e6f8029..0000000 --- a/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html +++ /dev/null @@ -1,37 +0,0 @@ - - - - -Object Hierarchy - - - - - - - - - - - - - - - - - - - -
-

-Object Hierarchy

-
-    GObject
-        MixAudio
-
-
- - - diff --git a/mix_audio/m4/Makefile.am b/mix_audio/m4/Makefile.am deleted file mode 100644 index 66381d4..0000000 --- a/mix_audio/m4/Makefile.am +++ /dev/null @@ -1 +0,0 @@ -EXTRA_DIST += diff --git a/mix_audio/m4/as-mix-version.m4 b/mix_audio/m4/as-mix-version.m4 deleted file mode 100644 index 8b09d7c..0000000 --- a/mix_audio/m4/as-mix-version.m4 +++ /dev/null @@ -1,35 +0,0 @@ -dnl as-mix-version.m4 - -dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) - -dnl example -dnl AS_MIX_VERSION(mixaudio,MIXAUDIO, 0, 3, 2,) -dnl for a 0.3.2 release version - -dnl this macro -dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE -dnl - defines [$PREFIX], VERSION -dnl - AC_SUBST's all defined vars - -AC_DEFUN([AS_MIX_VERSION], -[ - PACKAGE=[$1] - [$2]_MAJOR=[$3] - [$2]_MINOR=[$4] - [$2]_REVISION=[$5] - [$2]_CURRENT=m4_eval([$3] + [$4]) - [$2]_AGE=[$4] - VERSION=[$3].[$4].[$5] - - AC_SUBST([$2]_MAJOR) - AC_SUBST([$2]_MINOR) - AC_SUBST([$2]_REVISION) - AC_SUBST([$2]_CURRENT) - AC_SUBST([$2]_AGE) - - AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name]) - AC_SUBST(PACKAGE) - AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version]) - AC_SUBST(VERSION) - -]) diff --git a/mix_audio/mixaudio.spec b/mix_audio/mixaudio.spec deleted file mode 100644 index 54e658b..0000000 --- a/mix_audio/mixaudio.spec +++ /dev/null @@ -1,56 +0,0 @@ -# INTEL CONFIDENTIAL -# Copyright 2009 Intel Corporation All Rights Reserved. -# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. -# -# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - -Summary: MIX Audio -Name: mixaudio -Version: 0.4.1 -Release: 1 -Source0: %{name}-%{version}.tar.gz -NoSource: 0 -License: Intel Proprietary -Group: System Environment/Libraries -BuildRoot: %{_tmppath}/%{name}-root -ExclusiveArch: i586 i386 -BuildRequires: glib2-devel mixcommon-devel dbus-glib-devel - -%description -MIX Audio is an user library interface for various hardware audio codecs -available on the platform. - -%package devel -Summary: Libraries include files -Group: Development/Libraries -Requires: %{name} = %{version} - -%description devel -The %{name}-devel package contains the header files and static libraries -for building applications which use %{name}. - -%prep -%setup -q - -%build -%autogen -%configure --prefix=%{_prefix} -make - -%install -%make_install - -%clean -rm -rf $RPM_BUILD_ROOT - -%files -%defattr(-,root,root) -%{_libdir}/libmixaudio.so.* - -%files devel -%defattr(-,root,root) -%{_libdir}/libmixaudio.so -%{_libdir}/libmixaudio.la -%{_libdir}/pkgconfig/mixaudio.pc -%{_includedir}/*.h -%doc COPYING diff --git a/mix_audio/pkgconfig/Makefile.am b/mix_audio/pkgconfig/Makefile.am deleted file mode 100644 index ceea4fa..0000000 --- a/mix_audio/pkgconfig/Makefile.am +++ /dev/null @@ -1,11 +0,0 @@ -### all of the standard pc files we need to generate -pcfiles = mixaudio.pc - -all-local: $(pcfiles) - -pkgconfigdir = $(libdir)/pkgconfig -pkgconfig_DATA = $(pcfiles) - -EXTRA_DIST = mixaudio.pc.in - -CLEANFILES = $(pcfiles) diff --git a/mix_audio/pkgconfig/mixaudio.pc.in b/mix_audio/pkgconfig/mixaudio.pc.in deleted file mode 100644 index b521b5b..0000000 --- a/mix_audio/pkgconfig/mixaudio.pc.in +++ /dev/null @@ -1,12 +0,0 @@ -prefix=@prefix@ -exec_prefix=@exec_prefix@ -libdir=@libdir@ -includedir=@includedir@ -toolsdir=${exec_prefix}/bin - -Name: MixAudio -Description: Intel MIX Audio -Requires: @MIXAUDIO_PKG_DEPS@ -Version: @VERSION@ -Libs: -L${libdir} -lmixaudio -Cflags: -I${includedir} diff --git a/mix_audio/src/Android.mk b/mix_audio/src/Android.mk deleted file mode 100644 index 818ae8e..0000000 --- a/mix_audio/src/Android.mk +++ /dev/null @@ -1,53 +0,0 @@ -LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) - -#MIXAUDIO_LOG_ENABLE := true - -LOCAL_SRC_FILES := \ - mixaip.c \ - mixacp.c \ - mixacpmp3.c \ - mixacpwma.c \ - mixacpaac.c \ - mixaudio.c \ - sst_proxy.c - -LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) \ - $(GLIB_TOP) \ - $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib \ - $(GLIB_TOP)/gobject \ - $(TARGET_OUT_HEADERS)/libmixcommon - -LOCAL_SHARED_LIBRARIES := \ - libglib-2.0 \ - libgobject-2.0 \ - libgthread-2.0 \ - libgmodule-2.0 \ - libmixcommon - -ifeq ($(strip $(MIXAUDIO_LOG_ENABLE)),true) -LOCAL_CFLAGS += -DMIX_LOG_ENABLE -LOCAL_SHARED_LIBRARIES += liblog -endif - -LOCAL_COPY_HEADERS_TO := libmixaudio - -LOCAL_COPY_HEADERS := \ - amhelper.h \ - intel_sst_ioctl.h \ - mixacp.h \ - mixacpaac.h \ - mixacpmp3.h \ - mixacpwma.h \ - mixaip.h \ - mixaudio.h \ - mixaudiotypes.h \ - sst_proxy.h -# pvt.h \ - -LOCAL_MODULE := libmixaudio -LOCAL_MODULE_TAGS := optional - -include $(BUILD_SHARED_LIBRARY) diff --git a/mix_audio/src/Makefile.am b/mix_audio/src/Makefile.am deleted file mode 100644 index 4a4cd36..0000000 --- a/mix_audio/src/Makefile.am +++ /dev/null @@ -1,61 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -lib_LTLIBRARIES = libmixaudio.la -#noinst_LTLIBRARIES = libmixaudio_stub.la - -############################################################################## -# sources used to compile -libmixaudio_la_SOURCES = mixaudio.c \ - sst_proxy.c \ - mixaip.c \ - mixacp.c \ - mixacpmp3.c \ - mixacpwma.c \ - mixacpaac.c - -# flags used to compile this plugin -# add other _CFLAGS and _LIBS as needed -libmixaudio_la_CFLAGS = $(DBUS_GLIB_CFLAGS) $(GLIB_CFLAGS) $(MIX_CFLAGS) $(GOBJECT_CFLAGS) $(GTHREAD_CFLAGS) -DMIXAUDIO_CURRENT=@MIXAUDIO_CURRENT@ -DMIXAUDIO_AGE=@MIXAUDIO_AGE@ -DMIXAUDIO_REVISION=@MIXAUDIO_REVISION@ $(MIXCOMMON_CFLAGS) -DMIX_LOG_ENABLE -libmixaudio_la_LIBADD = $(DBUS_GLIB_LIBS) $(GLIB_LIBS) $(GOBJECT_LIBS) $(GTHREAD_LIBS) $(MIXCOMMON_LIBS) -libmixaudio_la_LDFLAGS = $(DBUS_GLIB_LIBS)$(GLIB_LIBS) $(GOBJECT_LIBS) $(GTHREAD_LIBS) -version-info @MIXAUDIO_CURRENT@:@MIXAUDIO_REVISION@:@MIXAUDIO_AGE@ $(MIXCOMMON_LIBS) - -libmixaudio_la_LIBTOOLFLAGS = --tag=disable-static - -# additional flags to enable backdoor or workaround -if LPESTUB -libmixaudio_la_CFLAGS += -DLPESTUB -endif - -if WORKAROUND -libmixaudio_la_CFLAGS += -DDROP_WORKAROUND -endif - -#libmixaudio_stub_la_SOURCES = $(libmixaudio_la_SOURCES) -#libmixaudio_stub_la_CFLAGS = $(libmixaudio_la_CFLAGS) -DLPESTUB -#libmixaudio_stub_la_LIBADD = $(libmixaudio_la_LIBADD) -#libmixaudio_stub_la_LDFLAGS = $(libmixaudio_la_LDFLAGS) -#libmixaudio_stub_la_LIBTOOLFLAGS = $(libmixaudio_la_LIBTOOLFLAGS) - -# headers we need but don't want installed -noinst_HEADERS = intel_sst_ioctl.h sst_proxy.h amhelper.h - -# TODO: decide whehter a /usr/include/mix is needed for mix headers -include_HEADERS = mixaudio.h \ - mixaudiotypes.h \ - mixaip.h \ - mixacp.h \ - mixacpmp3.h \ - mixacpwma.h \ - mixacpaac.h - -if AUDIO_MANAGER -libmixaudio_la_CFLAGS += -DAUDIO_MANAGER -libmixaudio_la_SOURCES += amhelper.c -#include_HEADERS += amhelper.h -endif - diff --git a/mix_audio/src/amhelper.c b/mix_audio/src/amhelper.c deleted file mode 100644 index 5124a6a..0000000 --- a/mix_audio/src/amhelper.c +++ /dev/null @@ -1,120 +0,0 @@ -#include "amhelper.h" -#include - -static DBusGConnection *connection; - -static DBusGProxy *proxy_lpe = NULL; - -static gboolean am_enable=FALSE; - -/* Connect to am dbus server - * return -1 means failed - * return 0 means succeeded - * */ -gint32 dbus_init() { - GError *error; - const char *name = "org.moblin.audiomanager"; - - const char *path_lpe = "/org/moblin/audiomanager/lpe"; - const char *interface_lpe = "org.moblin.audiomanager.lpe"; - - const gchar* env = g_getenv("MIX_AM"); - if (env && env[0] == '1') { - am_enable = TRUE; - } - else - am_enable = FALSE; - - if (am_enable && (proxy_lpe == NULL)) { - error = NULL; - connection = dbus_g_bus_get(DBUS_BUS_SESSION, &error); - - if (connection == NULL) { - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "Failed to open connection to bus: %s\n", - error->message); - g_error_free(error); - return -1; - } - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "Successfully get a dbus connection\n"); - - proxy_lpe = dbus_g_proxy_new_for_name(connection, name, - path_lpe, interface_lpe); - if (proxy_lpe == NULL) { - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "Failed to connect to AM dbus server\n"); - return -1; - } - else { - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "Successfully connected to AM dbus\npath: %s\ninterface: %s\n", - path_lpe, interface_lpe); - } - } - return 0; -} - -gint32 lpe_stream_register(guint32 lpe_stream_id, char* media_role, char* lpe_stream_name, guint32 stream_type) -{ - GError *error; - gint32 s_output = 0; - error = NULL; - - if (am_enable) { - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "lpe_stream_id: %d\n", lpe_stream_id); - - if (lpe_stream_id == 0) { - return 0; - } - if(!dbus_g_proxy_call (proxy_lpe, "LPEStreamRegister", &error, G_TYPE_UINT, - lpe_stream_id, G_TYPE_STRING, media_role, G_TYPE_STRING, lpe_stream_name, G_TYPE_UINT, stream_type, - G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID)) { - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "LPEStreamRegister failed: %s\n", error->message); - g_error_free(error); - return s_output; - } - - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "LPEStreamRegister returned am stream id %d\n", s_output); - } - - return s_output; -} - -gint32 lpe_stream_unregister(guint32 am_stream_id) -{ - GError *error; - gint32 s_output = 0; - - if (am_enable) { - error = NULL; - if(!dbus_g_proxy_call (proxy_lpe, "LPEStreamUnregister", &error, G_TYPE_UINT, am_stream_id, - G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID)){ - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "LPEStreamUnregister failed: %s\n", error->message); - g_error_free(error); - return s_output; - } - } - return s_output; -} - -gint32 lpe_stream_notify_pause(guint32 am_stream_id) -{ - GError *error; - gint32 s_output=0; - - if (am_enable) { - dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyPause", &error, G_TYPE_UINT, am_stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); - } - - return s_output; -} - -gint32 lpe_stream_notify_resume(guint32 am_stream_id) -{ - GError *error; - gint32 s_output=0; - - if (am_enable) { - dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyResume", &error, G_TYPE_UINT, am_stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID); - } - - return s_output; -} - diff --git a/mix_audio/src/amhelper.h b/mix_audio/src/amhelper.h deleted file mode 100644 index 8a00681..0000000 --- a/mix_audio/src/amhelper.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_AM_HELPER_H__ -#define __MIX_AM_HELPER_H__ - -#include -#include - -gint32 dbus_init(); - -gint32 lpe_stream_register(guint32 lpe_stream_id, char* media_role, char* lpe_stream_name, guint32 stream_type); - -gint32 lpe_stream_unregister(guint32 am_stream_id); - -gint32 lpe_stream_notify_pause(guint32 am_stream_id); - -gint32 lpe_stream_notify_resume(guint32 am_stream_id); - -#endif diff --git a/mix_audio/src/intel_sst_ioctl.h b/mix_audio/src/intel_sst_ioctl.h deleted file mode 100644 index 7fecf12..0000000 --- a/mix_audio/src/intel_sst_ioctl.h +++ /dev/null @@ -1,337 +0,0 @@ -#ifndef __INTEL_SST_IOCTL_H__ -#define __INTEL_SST_IOCTL_H__ - -enum sst_codec_types { -/* AUDIO/MUSIC CODEC Type Definitions */ - SST_CODEC_TYPE_UNKNOWN = 0, - SST_CODEC_TYPE_PCM, /* Pass through Audio codec */ - SST_CODEC_TYPE_MP3, - SST_CODEC_TYPE_MP24, - SST_CODEC_TYPE_AAC, - SST_CODEC_TYPE_AACP, - SST_CODEC_TYPE_eAACP, - SST_CODEC_TYPE_WMA9, - SST_CODEC_TYPE_WMA10, - SST_CODEC_TYPE_WMA10P, - SST_CODEC_TYPE_RA, - SST_CODEC_TYPE_DDAC3, - SST_CODEC_TYPE_STEREO_TRUE_HD, - SST_CODEC_TYPE_STEREO_HD_PLUS, - - /* VOICE CODEC Type Definitions */ - SST_CODEC_TYPE_VOICE_PCM = 0x21, /* Pass through voice codec */ - SST_CODEC_SRC = 0x64, - SST_CODEC_MIXER = 0x65, - SST_CODEC_DOWN_MIXER = 0x66, - SST_CODEC_VOLUME_CONTROL = 0x67, - SST_CODEC_OEM1 = 0xC8, - SST_CODEC_OEM2 = 0xC9, -}; - -enum snd_sst_stream_ops { - STREAM_OPS_PLAYBACK = 0, /* Decode */ - STREAM_OPS_CAPTURE, /* Encode */ - STREAM_OPS_PLAYBACK_DRM, /* Play Audio/Voice */ - STREAM_OPS_PLAYBACK_ALERT, /* Play Audio/Voice */ - STREAM_OPS_CAPTURE_VOICE_CALL, /* CSV Voice recording */ -}; - -enum stream_type { - STREAM_TYPE_MUSIC = 1, - STREAM_TYPE_VOICE -}; - -/* Firmware Version info */ -struct snd_sst_fw_version { - __u8 build; /* build number*/ - __u8 minor; /* minor number*/ - __u8 major; /* major number*/ - __u8 type; /* build type*/ -}; - -/* Port info structure */ -struct snd_sst_port_info { - __u16 port_type; - __u16 reserved; -}; - -/* Mixer info structure */ -struct snd_sst_mix_info { - __u16 max_streams; - __u16 reserved; -}; - -/* PCM Parameters */ -struct snd_pcm_params { - __u16 codec; /* codec type */ - __u8 num_chan; /* 1=Mono, 2=Stereo */ - __u8 pcm_wd_sz; /* 16/24 - bit*/ - __u32 brate; /* Bitrate in bits per second */ - __u32 sfreq; /* Sampling rate in Hz */ - __u16 frame_size; - __u16 samples_per_frame; /* Frame size num samples per frame */ - __u32 period_count; /* period elapsed time count, in samples,*/ -}; - -/* MP3 Music Parameters Message */ -struct snd_mp3_params { - __u16 codec; - __u8 num_chan; /* 1=Mono, 2=Stereo */ - __u8 pcm_wd_sz; /* 16/24 - bit*/ - __u32 brate; /* Use the hard coded value. */ - __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */ - __u8 crc_check; /* crc_check - disable (0) or enable (1) */ - __u8 op_align; /* op align 0- 16 bit, 1- MSB, 2 LSB*/ - __u16 reserved; /* Unused */ -}; - -#define AAC_BIT_STREAM_ADTS 0 -#define AAC_BIT_STREAM_ADIF 1 -#define AAC_BIT_STREAM_RAW 2 - -/* AAC Music Parameters Message */ -struct snd_aac_params { - __u16 codec; - __u8 num_chan; /* 1=Mono, 2=Stereo*/ - __u8 pcm_wd_sz; /* 16/24 - bit*/ - __u32 brate; - __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */ - __u32 aac_srate; /* Plain AAC decoder operating sample rate */ - __u8 mpg_id; /* 0=MPEG-2, 1=MPEG-4 */ - __u8 bs_format; /* input bit stream format adts=0, adif=1, raw=2 */ - __u8 aac_profile; /* 0=Main Profile, 1=LC profile, 3=SSR profile */ - __u8 ext_chl; /* No.of external channels */ - __u8 aot; /* Audio object type. 1=Main , 2=LC , 3=SSR, 4=SBR*/ - __u8 op_align; /* output alignment 0=16 bit , 1=MSB, 2= LSB align */ - __u8 brate_type; /* 0=CBR, 1=VBR */ - __u8 crc_check; /* crc check 0= disable, 1=enable */ - __s8 bit_stream_format[8]; /* input bit stream format adts/adif/raw */ - __u8 jstereo; /* Joint stereo Flag */ - __u8 sbr_present; /* 1 = SBR Present, 0 = SBR absent, for RAW */ - __u8 downsample; /* 1 = Downsampling ON, 0 = Downsampling OFF */ - __u8 num_syntc_elems; /* 1- Mono/stereo, 0 - Dual Mono, 0 - for raw */ - __s8 syntc_id[2]; /* 0 for ID_SCE(Dula Mono), -1 for raw */ - __s8 syntc_tag[2]; /* raw - -1 and 0 -16 for rest of the streams */ - __u8 pce_present; /* Flag. 1- present 0 - not present, for RAW */ - __u8 reserved; - __u16 reserved1; - -}; - -/* WMA Music Parameters Message */ -struct snd_wma_params { - __u16 codec; - __u8 num_chan; /* 1=Mono, 2=Stereo */ - __u8 pcm_wd_sz; /* 16/24 - bit*/ - __u32 brate; /* Use the hard coded value. */ - __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */ - __u32 channel_mask; /* Channel Mask */ - __u16 format_tag; /* Format Tag */ - __u16 block_align; /* packet size */ - __u16 wma_encode_opt;/* Encoder option */ - __u8 op_align; /* op align 0- 16 bit, 1- MSB, 2 LSB*/ - __u8 pcm_src; /* input pcm bit width*/ -}; - -/* Pre processing param structure */ -struct snd_prp_params { - __u32 reserved; /* No pre-processing defined yet */ -}; - -/* Post processing Capability info structure */ -struct snd_sst_postproc_info { - __u32 src_min; /* Supported SRC Min sampling freq */ - __u32 src_max; /* Supported SRC Max sampling freq */ - __u8 src; /* 0=Not supported, 1=Supported */ - __u8 bass_boost; /* 0=Not Supported, 1=Supported */ - __u8 stereo_widening; /* 0=Not Supported, 1=Supported */ - __u8 volume_control; /* 0=Not Supported, 1=Supported */ - __s16 min_vol; /* Minimum value of Volume in dB */ - __s16 max_vol; /* Maximum value of Volume in dB */ - __u8 mute_control; /*0=No Mute, 1=Mute*/ - __u8 reserved1; - __u16 reserved2; -}; - -/* pre processing Capability info structure */ -struct snd_sst_prp_info { - __s16 min_vol; /* Minimum value of Volume in dB */ - __s16 max_vol; /* Maximum value of Volume in dB */ - __u8 volume_control; /* 0=Not Supported, 1=Supported */ - __u8 reserved1; /* for 32 bit alignment */ - __u16 reserved2; /* for 32 bit alignment */ -} __attribute__ ((packed)); - -/* Firmware capabilities info */ -struct snd_sst_fw_info { - struct snd_sst_fw_version fw_version; /* Firmware version */ - __u8 audio_codecs_supported[8]; /* Codecs supported by FW */ - __u32 recommend_min_duration; /* Min duration for Low power Playback*/ - __u8 max_pcm_streams_supported; /*Max number of PCM streams supported */ - __u8 max_enc_streams_supported; /*Max number of Encoded streams */ - __u16 reserved; /* 32 bit alignment*/ - struct snd_sst_postproc_info pop_info; /* Post processing capability*/ - struct snd_sst_prp_info prp_info; /* pre_processing mod cap info */ - struct snd_sst_port_info port_info[2]; /* Port info */ - struct snd_sst_mix_info mix_info; /* Mixer info */ - __u32 min_input_buf; /*minmum i/p buffer for decode*/ -}; - -/* Add the codec parameter structures for new codecs to be supported */ -#define CODEC_PARAM_STRUCTURES \ - struct snd_pcm_params pcm_params; \ - struct snd_mp3_params mp3_params; \ - struct snd_aac_params aac_params; \ - struct snd_wma_params wma_params; - -/* Pre and Post Processing param structures */ -#define PPP_PARAM_STRUCTURES \ - struct snd_prp_params prp_params; - -/* Codec params struture */ -union snd_sst_codec_params { - CODEC_PARAM_STRUCTURES; -}; - -/* Pre-processing params struture */ -union snd_sst_ppp_params{ - PPP_PARAM_STRUCTURES; -}; - -struct snd_sst_stream_params { - union snd_sst_codec_params uc; -} __attribute__ ((packed)); - -struct snd_sst_params { - __u32 result; - __u32 stream_id; - __u8 codec; - __u8 ops; - __u8 stream_type; - struct snd_sst_stream_params sparams; -}; - -/*ioctl related stuff here*/ -struct snd_sst_pmic_config { - __u32 sfreq; /* Sampling rate in Hz */ - __u16 num_chan; /* Mono =1 or Stereo =2 */ - __u16 pcm_wd_sz; /* Number of bits per sample */ -} __attribute__ ((packed)); - -struct snd_sst_get_stream_params { - struct snd_sst_params codec_params; - struct snd_sst_pmic_config pcm_params; -}; - -enum snd_sst_target_type { - SND_SST_TARGET_PMIC = 1, - SND_SST_TARGET_OTHER, -}; - -enum snd_sst_port_action { - SND_SST_PORT_PREPARE = 1, - SND_SST_PORT_ACTIVATE, -}; - -/* Target selection per device structure */ -struct snd_sst_slot_info { - __u8 mix_enable; /* Mixer enable or disable */ - __u8 device_type; - __u8 device_instance; /* 0, 1, 2 */ - __u8 target_type; - __u16 slot[2]; - __u8 master; - __u8 action; - __u16 reserved; - struct snd_sst_pmic_config pcm_params; -} __attribute__ ((packed)); - -/* Target device list structure */ -struct snd_sst_target_device { - __u32 device_route; - struct snd_sst_slot_info devices[2]; -} __attribute__ ((packed)); - -struct snd_sst_driver_info { - __u32 version; /* Version of the driver */ - __u32 active_pcm_streams; - __u32 active_enc_streams; - __u32 max_pcm_streams; - __u32 max_enc_streams; - __u32 buf_per_stream; -}; - -struct snd_sst_vol { - __u32 stream_id; - __s32 volume; - __u32 ramp_duration; - __u32 ramp_type; /* Ramp type, default=0 */ -}; - -struct snd_sst_mute { - __u32 stream_id; - __u32 mute; -}; - -enum snd_sst_buff_type { - SST_BUF_USER = 1, - SST_BUF_MMAP, - SST_BUF_RAR, -}; - -struct snd_sst_mmap_buff_entry { - unsigned int offset; - unsigned int size; -}; - -struct snd_sst_mmap_buffs { - unsigned int entries; - enum snd_sst_buff_type type; - struct snd_sst_mmap_buff_entry *buff; -}; - -struct snd_sst_buff_entry { - void *buffer; - unsigned int size; -}; - -struct snd_sst_buffs { - unsigned int entries; - __u8 type; - struct snd_sst_buff_entry *buff_entry; -}; - -struct snd_sst_dbufs { - unsigned long long input_bytes_consumed; - unsigned long long output_bytes_produced; - struct snd_sst_buffs *ibufs; - struct snd_sst_buffs *obufs; -}; - -/*IOCTL defined here*/ -/*SST MMF IOCTLS only*/ -#define SNDRV_SST_STREAM_SET_PARAMS _IOR('L', 0x00, \ - struct snd_sst_stream_params *) -#define SNDRV_SST_STREAM_GET_PARAMS _IOWR('L', 0x01, \ - struct snd_sst_get_stream_params *) -#define SNDRV_SST_STREAM_GET_TSTAMP _IOWR('L', 0x02, __u64 *) -#define SNDRV_SST_STREAM_DECODE _IOWR('L', 0x03, struct snd_sst_dbufs *) -#define SNDRV_SST_STREAM_BYTES_DECODED _IOWR('L', 0x04, __u64 *) -#define SNDRV_SST_STREAM_START _IO('A', 0x42) -#define SNDRV_SST_STREAM_DROP _IO('A', 0x43) -#define SNDRV_SST_STREAM_DRAIN _IO('A', 0x44) -#define SNDRV_SST_STREAM_PAUSE _IOW('A', 0x45, int) -#define SNDRV_SST_STREAM_RESUME _IO('A', 0x47) -#define SNDRV_SST_MMAP_PLAY _IOW('L', 0x05, struct snd_sst_mmap_buffs *) -#define SNDRV_SST_MMAP_CAPTURE _IOW('L', 0x06, struct snd_sst_mmap_buffs *) -/*SST common ioctls */ -#define SNDRV_SST_DRIVER_INFO _IOR('L', 0x10, struct snd_sst_driver_info *) -#define SNDRV_SST_SET_VOL _IOW('L', 0x11, struct snd_sst_vol *) -#define SNDRV_SST_GET_VOL _IOW('L', 0x12, struct snd_sst_vol *) -#define SNDRV_SST_MUTE _IOW('L', 0x13, struct snd_sst_mute *) -/*AM Ioctly only*/ -#define SNDRV_SST_FW_INFO _IOR('L', 0x20, struct snd_sst_fw_info *) -#define SNDRV_SST_SET_TARGET_DEVICE _IOW('L', 0x21, \ - struct snd_sst_target_device *) - -#endif /*__INTEL_SST_IOCTL_H__*/ diff --git a/mix_audio/src/mixacp.c b/mix_audio/src/mixacp.c deleted file mode 100644 index 3478bf3..0000000 --- a/mix_audio/src/mixacp.c +++ /dev/null @@ -1,355 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixacp - * @short_description: MixAudio configuration parameters object. - * @include: mixacp.h - * - * #MixAudio configuration parameters object which is used to communicate audio specific parameters. - * - * This object is should not be instantiated as codec specific parameters are definied in individual derive classes. - */ - -#include "mixacp.h" -#include - -static GType _mix_acp_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_acp_type = g_define_type_id; } - -gboolean mix_acp_copy(MixParams* target, const MixParams *src); -MixParams* mix_acp_dup(const MixParams *obj); -gboolean mix_acp_equal(MixParams* first, MixParams *second); -static void mix_acp_finalize(MixParams *obj); - -G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParams, mix_acp, MIX_TYPE_PARAMS, _do_init); - -void -_mix_acp_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_acp_get_type ()); -} - -static void mix_acp_init (MixAudioConfigParams *self) -{ - self->decode_mode = MIX_DECODE_NULL; - self->stream_name = NULL; - self->audio_manager=MIX_AUDIOMANAGER_NONE; - self->num_channels = 0; - self->bit_rate = 0; - self->sample_freq = 0; - self->bits_per_sample = MIX_ACP_BPS_16; - self->op_align = MIX_ACP_OUTPUT_ALIGN_16; -} - -static void mix_acp_class_init(MixAudioConfigParamsClass *klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_acp_finalize; - mixparams_class->copy = (MixParamsCopyFunction)mix_acp_copy; - mixparams_class->dup = (MixParamsDupFunction)mix_acp_dup; - mixparams_class->equal = (MixParamsEqualFunction)mix_acp_equal; - - klass->print_params = NULL; -} - -MixAudioConfigParams *mix_acp_new(void) -{ - MixAudioConfigParams *ret = (MixAudioConfigParams *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMS); - - return ret; -} - -void mix_acp_finalize(MixParams *obj) -{ - /* clean up here. */ - MixAudioConfigParams *acp = MIX_AUDIOCONFIGPARAMS(obj); - - if (acp->stream_name) { - g_free(acp->stream_name); - acp->stream_name = NULL; - } - - /* Chain up parent */ - if (parent_class->finalize) - parent_class->finalize(obj); -} - -MixAudioConfigParams *mix_acp_ref(MixAudioConfigParams *mix) -{ - return (MixAudioConfigParams*)mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_acp_dup: - * @obj: a #MixAudioConfigParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams* mix_acp_dup(const MixParams *obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_AUDIOCONFIGPARAMS(obj)) - { - MixAudioConfigParams *duplicate = mix_acp_new(); - if (mix_acp_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) - { - ret = MIX_PARAMS(duplicate); - } - else - { - mix_acp_unref(duplicate); - } - } - - return ret; -} - -/** - * mix_acp_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_copy(MixParams* target, const MixParams *src) -{ - if (MIX_IS_AUDIOCONFIGPARAMS(target) && MIX_IS_AUDIOCONFIGPARAMS(src)) - { - MixAudioConfigParams *t = MIX_AUDIOCONFIGPARAMS(target); - MixAudioConfigParams *s = MIX_AUDIOCONFIGPARAMS(src); - - t->decode_mode = s->decode_mode; - t->stream_name = g_strdup(s->stream_name); - t->audio_manager=s->audio_manager; - t->num_channels = s->num_channels; - t->bit_rate = s->bit_rate; - t->sample_freq = s->sample_freq; - t->bits_per_sample = s->bits_per_sample; - t->op_align = s->op_align; - - // Now chainup base class - if (parent_class->copy) - { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); - } - else - return TRUE; - } - return FALSE; -} - -/** - * mix_acp_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_equal(MixParams* first, MixParams *second) -{ - gboolean ret = FALSE; - - if (first && second) - { - if (first == second) return TRUE; - } - else - { - // one of them is NULL. - return FALSE; - } - - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - - if (ret && MIX_IS_AUDIOCONFIGPARAMS(first) && MIX_IS_AUDIOCONFIGPARAMS(second)) - { - MixAudioConfigParams *acp1 = MIX_AUDIOCONFIGPARAMS(first); - MixAudioConfigParams *acp2 = MIX_AUDIOCONFIGPARAMS(second); - - ret = (acp1->decode_mode == acp2->decode_mode) && - (acp1->audio_manager == acp2->audio_manager) && - (acp1->num_channels == acp2->num_channels) && - (acp1->bit_rate == acp2->bit_rate) && - (acp1->sample_freq == acp2->sample_freq) && - (acp1->bits_per_sample == acp2->bits_per_sample) && - (acp1->op_align == acp2->op_align) && - (!strcmp(acp1->stream_name, acp2->stream_name)); - } - - return ret; -} - - -gboolean mix_acp_is_streamname_valid(MixAudioConfigParams *obj) -{ - if (MIX_IS_AUDIOCONFIGPARAMS(obj)) - if ((obj->stream_name) && (obj->stream_name[0] != 0)) return TRUE; - - return FALSE; -} - -gchar *mix_acp_get_streamname(MixAudioConfigParams *obj) -{ - gchar *ret = NULL; - if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj)) && obj->stream_name) - { - ret = g_strdup(obj->stream_name); - } - return ret; -} - -MIX_RESULT mix_acp_set_streamname(MixAudioConfigParams *obj, const gchar *streamname) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - - if (!obj) return MIX_RESULT_NULL_PTR; - - if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) - { - if (obj->stream_name) - { - g_free(obj->stream_name); - obj->stream_name = NULL; - } - - if (streamname) obj->stream_name = g_strdup(streamname); - - ret = MIX_RESULT_SUCCESS; - } - else - { - ret = MIX_RESULT_INVALID_PARAM; - } - - return ret; -} - -MixACPBPSType mix_acp_get_bps(MixAudioConfigParams *obj) -{ - if (G_LIKELY(obj)) - return obj->bits_per_sample; - else - return 0; -} - -MIX_RESULT mix_acp_set_bps(MixAudioConfigParams *obj, MixACPBPSType type) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (!obj) return MIX_RESULT_NULL_PTR; - - if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) - { - switch (type) - { - case MIX_ACP_BPS_UNKNOWN: - case MIX_ACP_BPS_16: - case MIX_ACP_BPS_24: - obj->bits_per_sample = type; - break; - default: - ret = MIX_RESULT_INVALID_PARAM; - break; - } - } - else - { - ret = MIX_RESULT_INVALID_PARAM; - } - - return ret; -} - - -MixACPOpAlign mix_acp_get_op_align(MixAudioConfigParams *obj) -{ - return (obj->op_align); -} - -MIX_RESULT mix_acp_set_op_align(MixAudioConfigParams *obj, MixACPOpAlign op_align) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if ((op_align >= MIX_ACP_OUTPUT_ALIGN_16) && (op_align < MIX_ACP_OUTPUT_ALIGN_LAST)) - obj->op_align = op_align; - else ret=MIX_RESULT_INVALID_PARAM; - - return ret; -} - -void mix_acp_print_params(MixAudioConfigParams *obj) -{ - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "decode_mode: %d\n", obj->decode_mode); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "stream_name: %s\n", obj->stream_name); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "audio_manager: %d\n", obj->audio_manager); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "num_channels: %d\n", obj->num_channels); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_rate: %d\n", obj->bit_rate); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "sample_freq: %d\n", obj->sample_freq); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bits_per_sample: %d\n", obj->bits_per_sample); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "op_align: %d\n", obj->op_align); - - MixAudioConfigParamsClass *klass = MIX_AUDIOCONFIGPARAMS_GET_CLASS(obj); - if (klass->print_params) - { - klass->print_params(obj); - } -} - - -MIX_RESULT mix_acp_set_audio_manager(MixAudioConfigParams *obj, MixAudioManager am) -{ - if (obj == NULL) return MIX_RESULT_NULL_PTR; - - if (!G_UNLIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) - { - return MIX_RESULT_INVALID_PARAM; - } - - if ((am >= MIX_AUDIOMANAGER_LAST) || (am audio_manager = am; - } - - return MIX_RESULT_SUCCESS; - -} - -MixAudioManager mix_acp_get_audio_manager(MixAudioConfigParams *obj) -{ - if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj))) - { - return obj->audio_manager; - } - else - return MIX_AUDIOMANAGER_NONE; -} - - diff --git a/mix_audio/src/mixacp.h b/mix_audio/src/mixacp.h deleted file mode 100644 index 0acd309..0000000 --- a/mix_audio/src/mixacp.h +++ /dev/null @@ -1,367 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_AUDIOCONFIGPARAMS_H__ -#define __MIX_AUDIOCONFIGPARAMS_H__ - - -#include "mixparams.h" -#include "mixresult.h" -#include "mixaudiotypes.h" - -/** - * MixACPOpAlign: - * @MIX_ACP_OUTPUT_ALIGN_UNKNOWN: Output alignment undefined. - * @IX_ACP_OUTPUT_ALIGN_16: Output word is 16-bit aligned - * @MIX_ACP_OUTPUT_ALIGN_MSB: Output word is MSB aligned - * @MIX_ACP_OUTPUT_ALIGN_LSB: Output word is LSB aligned - * @MIX_ACP_OUTPUT_ALIGN_LAST: Last entry in list. - * - * Audio Output alignment. - * - */ -typedef enum { - MIX_ACP_OUTPUT_ALIGN_UNKNOWN=-1, - MIX_ACP_OUTPUT_ALIGN_16=0, - MIX_ACP_OUTPUT_ALIGN_MSB, - MIX_ACP_OUTPUT_ALIGN_LSB, - MIX_ACP_OUTPUT_ALIGN_LAST -} MixACPOpAlign; - -/** - * MixACPBPSType: - * @MIX_ACP_BPS_UNKNOWN: Bit Per Sample undefined. - * @MIX_ACP_BPS_16: Output bits per sample is 16 bits - * @MIX_ACP_BPS_24: Output bits per sample is 24 bits - * - * Audio Output Size in bits per sample. - * - */ -typedef enum { - MIX_ACP_BPS_UNKNOWN=0, - MIX_ACP_BPS_16=16, - MIX_ACP_BPS_24=24, -} MixACPBPSType; - -/** - * MIX_TYPE_AUDIOCONFIGPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_AUDIOCONFIGPARAMS (mix_acp_get_type ()) - -/** - * MIX_AUDIOCONFIGPARAMS: - * @obj: object to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParams)) - -/** - * MIX_IS_AUDIOCONFIGPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixAudioConfigParams - */ -#define MIX_IS_AUDIOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMS)) - -/** - * MIX_AUDIOCONFIGPARAMS_CLASS: - * @klass: class to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParamsClass)) - -/** - * MIX_IS_AUDIOCONFIGPARAMS_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixAudioConfigParamsClass - */ -#define MIX_IS_AUDIOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMS)) - -/** - * MIX_AUDIOCONFIGPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_AUDIOCONFIGPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParamsClass)) - -typedef struct _MixAudioConfigParams MixAudioConfigParams; -typedef struct _MixAudioConfigParamsClass MixAudioConfigParamsClass; - -/** - * MixDecodeMode: - * @MIX_DECODE_NULL: Undefined decode mode. - * @MIX_DECODE_DIRECTRENDER: Stream is configured in Direct Render mode - * @MIX_DECODE_DECODERETURN: Stream is configured in Decode Return mode - * @MIX_DECODE_LAST: Last index in the enumeration. - * - * Operation Mode for a MI-X session. See mix_audio_configure(). - * - */ -typedef enum { - MIX_DECODE_NULL=0, - MIX_DECODE_DIRECTRENDER, - MIX_DECODE_DECODERETURN, - MIX_DECODE_LAST -} MixDecodeMode; - -/** - * MixAudioConfigParams: - * @parent: parent. - * @decode_mode: Decode Mode to use for current session. See #mix_acp_set_decodemode - * @stream_name: Stream name. See #mix_acp_set_streamname. This object will release the string upon destruction. - * @audio_manager: Type of Audio Manager. See #mix_acp_set_audio_manager. - * @num_channels: Number of output channels. See #MIX_ACP_NUM_CHANNELS - * @bit_rate: Optional. See #MIX_ACP_BITRATE - * @sample_freq: Output frequency. See #MIX_ACP_SAMPLE_FREQ - * @bits_per_sample: Number of output bit per sample. See #mix_acp_set_bps - * @op_align: Output Byte Alignment. See #mix_acp_set_op_align - * - * @MixAudio configuration parameters object. - */ -struct _MixAudioConfigParams -{ - /*< public >*/ - MixParams parent; - - /*< public >*/ - /* Audio Session Parameters */ - MixDecodeMode decode_mode; - gchar *stream_name; - MixAudioManager audio_manager; - - /*< public >*/ - /* Audio Format Parameters */ - gint num_channels; - gint bit_rate; - gint sample_freq; - MixACPBPSType bits_per_sample; - MixACPOpAlign op_align; - /*< private >*/ - void* reserved1; - void* reserved2; - void* reserved3; - void* reserved4; -}; - -/** - * MixAudioConfigParamsClass: - * - * MI-X Audio object class - */ -struct _MixAudioConfigParamsClass -{ - /*< public >*/ - MixParamsClass parent_class; - - /*< virtual public >*/ - void (*print_params) (MixAudioConfigParams *obj); - - /* class members */ - -}; - -/** - * mix_acp_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_acp_get_type (void); - -/** - * mix_acp_new: - * @returns: A newly allocated instance of #MixAudioConfigParams - * - * Use this method to create new instance of #MixAudioConfigParams - */ -MixAudioConfigParams *mix_acp_new(void); - -/** - * mix_acp_ref: - * @mix: object to add reference - * @returns: the MixAudioConfigParams instance where reference count has been increased. - * - * Add reference count. - */ -MixAudioConfigParams *mix_acp_ref(MixAudioConfigParams *mix); - -/** - * mix_acp_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_acp_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/** - * MIX_ACP_DECODEMODE: - * @obj: #MixAudioConfigParams object - * - * MixAudioConfigParam.decode_mode accessor. - * - * Configure the decode mode to one of #MixDecodeMode value. -*/ -#define MIX_ACP_DECODEMODE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->decode_mode) - -/** - * MIX_ACP_NUM_CHANNELS: - * @obj: #MixAudioConfigParams object - * - * MixAudioConfigParam.num_channels accessor. - * - * Configure the number of output channels. This value need to be exact the same as the supported output channel in the audio since down-mixing is not supported. - * - * This value can be used during #MIX_DECODE_DECODERETURN mode for buffer size/duration calculation. - * - * In Moorestown, number of channel must be 1 or 2. -*/ -#define MIX_ACP_NUM_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMS(obj)->num_channels) - -/** - * MIX_ACP_BITRATE: - * @obj: #MixAudioConfigParams object - * - * MixAudioConfigParam.bit_rate accessor. - * - * Bit rate of the current audio. - * - * Optional -*/ -#define MIX_ACP_BITRATE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->bit_rate) - -/** - * MIX_ACP_SAMPLE_FREQ: - * @obj: #MixAudioConfigParams object - * - * MixAudioConfigParam.sample_freq accessor. - * - * Output sampling frequency. - * - * This value can be used during #MIX_DECODE_DECODERETURN mode for buffer size/duration calculation. -*/ -#define MIX_ACP_SAMPLE_FREQ(obj) (MIX_AUDIOCONFIGPARAMS(obj)->sample_freq) - -/** - * mix_acp_get_decodemode: - * @obj: #MixAudioConfigParams - * @returns: #MixDecodeMode - * - * Retrieve currently configured #MixDecodeMode. - */ -MixDecodeMode mix_acp_get_decodemode(MixAudioConfigParams *obj); - -/** - * mix_acp_set_decodemode: - * @obj: #MixAudioConfigParams - * @mode: #MixDecodeMode to set - * @returns: #MIX_RESULT - * - * Configure session for one of the #MixDecodeMode. - */ -MIX_RESULT mix_acp_set_decodemode(MixAudioConfigParams *obj, MixDecodeMode mode); - -/** - * mix_acp_get_streamname: - * @obj: #MixAudioConfigParams - * @returns: pointer to a copy of the stream name. NULL if name is not available. - * - * Return copy of streamname. caller must free with g_free() - */ -gchar *mix_acp_get_streamname(MixAudioConfigParams *obj); - -/** - * mix_acp_set_streamname: - * @obj: #MixAudioConfigParams - * @streamname: Stream name to set - * @returns: #MIX_RESULT - * - * Set the stream name. The object will make a copy of the input stream name string. - * - */ -MIX_RESULT mix_acp_set_streamname(MixAudioConfigParams *obj, const gchar *streamname); - -/** - * mix_acp_set_audio_manager: - * @obj: #MixAudioConfigParams - * @am: #MixAudioManager - * @returns: #MIX_RESULT - * - * Set the Audio Manager to one of the #MixAudioManager. - */ -MIX_RESULT mix_acp_set_audio_manager(MixAudioConfigParams *obj, MixAudioManager am); - -/** - * mix_acp_get_audio_manager: - * @obj: #MixAudioConfigParams - * @returns: #MixAudioManager - * - * Retrieve name of currently configured audio manager. - */ -MixAudioManager mix_acp_get_audio_manager(MixAudioConfigParams *obj); - -/** - * mix_acp_is_streamname_valid: - * @obj: #MixAudioConfigParams - * @returns: boolean indicates if stream name is valid. - * - * Check if stream name is valid considering the current Decode Mode. - */ -gboolean mix_acp_is_streamname_valid(MixAudioConfigParams *obj); - - -/** - * mix_acp_get_bps: - * @obj: #MixAudioConfigParams - * @returns: #MixACPBPSType - * - * Retrive currently configured bit-per-stream value. - */ -MixACPBPSType mix_acp_get_bps(MixAudioConfigParams *obj); - -/** - * mix_acp_set_bps: - * @obj: #MixAudioConfigParams - * @mode: #MixACPBPSType to set - * @returns: #MIX_RESULT - * - * Configure bit-per-stream of one of the supported #MixACPBPSType. - */ -MIX_RESULT mix_acp_set_bps(MixAudioConfigParams *obj, MixACPBPSType type); - -/** - * mix_acp_get_op_align: - * @obj: #MixAudioConfigParams object - * @returns: #MixACPOpAlign - * - * Get Output Alignment. - */ -MixACPOpAlign mix_acp_get_op_align(MixAudioConfigParams *obj); - -/** - * mix_acp_set_op_align: - * @obj: #MixAudioConfigParams object - * @op_align: One of the supported #MixACPOpAlign - * @returns: MIX_RESULT - * - * Set Output Alignment to one of the #MixACPOpAlign value. - */ -MIX_RESULT mix_acp_set_op_align(MixAudioConfigParams *obj, MixACPOpAlign op_align); - -/* void mix_acp_print_params(MixAudioConfigParams *obj); */ - - -#endif /* __MIX_AUDIOCONFIGPARAMS_H__ */ - diff --git a/mix_audio/src/mixacpaac.c b/mix_audio/src/mixacpaac.c deleted file mode 100644 index 4b47c3d..0000000 --- a/mix_audio/src/mixacpaac.c +++ /dev/null @@ -1,364 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixacpaac - * @short_description: Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format. - * @include: mixacpaac.h - * - * A data object which stores audio specific parameters for the following formats: - * - * AAC-LC - * HE-AAC v1 - * HE-AAC v2 - * - * - * Additional parameters must be set in the parent object #MixAudioConfigParams - */ - -#include "mixacpaac.h" -#include -#include - -static GType _mix_acp_aac_type = 0; -static MixAudioConfigParamsClass *parent_class = NULL; - -#define _do_init { _mix_acp_aac_type = g_define_type_id; } - -gboolean mix_acp_aac_copy(MixParams* target, const MixParams *src); -MixParams* mix_acp_aac_dup(const MixParams *obj); -gboolean mix_acp_aac_equal(MixParams* first, MixParams *second); -static void mix_acp_aac_finalize(MixParams *obj); - -void mix_aac_print_params(MixAudioConfigParams *obj); - -G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsAAC, mix_acp_aac, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init); - -static void mix_acp_aac_init (MixAudioConfigParamsAAC *self) -{ - self->MPEG_id = MIX_AAC_MPEG_ID_NULL; - self->bit_stream_format= MIX_AAC_BS_NULL; - self->aac_profile=MIX_AAC_PROFILE_NULL; - self->aot=0; - self->bit_rate_type=MIX_AAC_BR_NULL; /* 0=CBR, 1=VBR */ - self->CRC=FALSE; - self->sbrPresentFlag = -1; - self->psPresentFlag = -1; - self->pce_present=FALSE; /* Flag. 1- present 0 - not present, for RAW */ - self->syntc_id[0] = self->syntc_id[1] = 0; /* 0 for ID_SCE(Dula Mono), -1 for raw */ - self->syntc_tag[0] = self->syntc_tag[1] = 0; /* raw - -1 and 0 -16 for rest of the streams */ - self->num_syntc_elems = 0; - self->aac_sample_rate = 0; - self->aac_channels = 0; -} - -static void mix_acp_aac_class_init(MixAudioConfigParamsAACClass *klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_acp_aac_finalize; - mixparams_class->copy = (MixParamsCopyFunction)mix_acp_aac_copy; - mixparams_class->dup = (MixParamsDupFunction)mix_acp_aac_dup; - mixparams_class->equal = (MixParamsEqualFunction)mix_acp_aac_equal; - -// MixAudioConfigParamsClass *acp = MIX_AUDIOCONFIGPARAMS_GET_CLASS(klass); - MixAudioConfigParamsClass *acp = (MixAudioConfigParamsClass *)klass; - acp->print_params = mix_aac_print_params; -} - -MixAudioConfigParamsAAC *mix_acp_aac_new(void) -{ - MixAudioConfigParamsAAC *ret = (MixAudioConfigParamsAAC *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSAAC); - - return ret; -} - -void mix_acp_aac_finalize(MixParams *obj) -{ - /* clean up here. */ - - /* Chain up parent */ - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->finalize) - klass->finalize(obj); -} - -MixAudioConfigParamsAAC *mix_acp_aac_ref(MixAudioConfigParamsAAC *mix) -{ - return (MixAudioConfigParamsAAC*)mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_acp_aac_dup: - * @obj: a #MixAudioConfigParamsAAC object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams* mix_acp_aac_dup(const MixParams *obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) - { - MixAudioConfigParamsAAC *duplicate = mix_acp_aac_new(); - if (mix_acp_aac_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) - { - ret = MIX_PARAMS(duplicate); - } - else - { - mix_acp_aac_unref(duplicate); - } - } - - return ret; -} - -/** - * mix_acp_aac_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_aac_copy(MixParams* target, const MixParams *src) -{ - if (MIX_IS_AUDIOCONFIGPARAMSAAC(target) && MIX_IS_AUDIOCONFIGPARAMSAAC(src)) - { - MixAudioConfigParamsAAC *t = MIX_AUDIOCONFIGPARAMSAAC(target); - MixAudioConfigParamsAAC *s = MIX_AUDIOCONFIGPARAMSAAC(src); - - t->MPEG_id = s->MPEG_id; - t->bit_stream_format = s->bit_stream_format; - t->aac_profile = s->aac_profile; - t->aot = s->aot; - t->bit_rate_type = s->bit_rate_type; - t->CRC = s->CRC; - - // Now chainup base class - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->copy) - { - return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); - } - else - return TRUE; - } - return FALSE; -} - -/** - * mix_acp_aac_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_aac_equal(MixParams* first, MixParams *second) -{ - gboolean ret = FALSE; - - if (first && second) - { - if (first == second) return TRUE; - } - else - { - return FALSE; - } - - // If either one of the input is not the same class then forget it. - if (!MIX_IS_AUDIOCONFIGPARAMSAAC(first) || !MIX_IS_AUDIOCONFIGPARAMSAAC(second)) - return FALSE; - - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = klass->equal(first, second); - else - ret = TRUE; - - if (ret) - { - - MixAudioConfigParamsAAC *acp1 = MIX_AUDIOCONFIGPARAMSAAC(first); - MixAudioConfigParamsAAC *acp2 = MIX_AUDIOCONFIGPARAMSAAC(second); - - ret = (acp1->MPEG_id == acp2->MPEG_id) && - (acp1->bit_stream_format && acp2->bit_stream_format) && - (acp1->aac_profile == acp2->aac_profile) && - (acp1->aot == acp2->aot) && - (acp1->bit_rate_type == acp2->bit_rate_type) && - (acp1->CRC == acp2->CRC) && - (acp1->sbrPresentFlag == acp2->sbrPresentFlag) && - (acp1->psPresentFlag == acp2->psPresentFlag) && - (acp1->pce_present == acp2->pce_present) && - (acp1->syntc_id[0] == acp2->syntc_id[0]) && - (acp1->syntc_id[1] == acp2->syntc_id[1]) && - (acp1->syntc_tag[0] == acp2->syntc_tag[0]) && - (acp1->syntc_tag[1] == acp2->syntc_tag[1]); - } - - return ret; -} - -MIX_RESULT mix_acp_aac_set_bit_stream_format(MixAudioConfigParamsAAC *obj, MixAACBitstreamFormt bit_stream_format) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (!obj) return MIX_RESULT_NULL_PTR; - - if (bit_stream_format < MIX_AAC_BS_ADTS && bit_stream_format >= MIX_AAC_BS_LAST) - { - ret = MIX_RESULT_INVALID_PARAM; - } - else - { - obj->bit_stream_format = bit_stream_format; - } - - return ret; -} -MixAACBitstreamFormt mix_acp_aac_get_bit_stream_format(MixAudioConfigParamsAAC *obj) -{ - if (obj) - return obj->bit_stream_format; - else - return MIX_AAC_BS_NULL; -} - -MIX_RESULT mix_acp_aac_set_aac_profile(MixAudioConfigParamsAAC *obj, MixAACProfile aac_profile) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (!obj) return MIX_RESULT_NULL_PTR; - - if (aac_profile < MIX_AAC_PROFILE_MAIN || aac_profile >= MIX_AAC_PROFILE_LAST) - { - ret = MIX_RESULT_INVALID_PARAM; - } - else - { - obj->aac_profile = aac_profile; - } - - return ret; -} -MixAACProfile mix_acp_aac_get_aac_profile(MixAudioConfigParamsAAC *obj) -{ - if (obj) - return obj->aac_profile; - else - return MIX_AAC_PROFILE_NULL; -} - -MIX_RESULT mix_acp_aac_set_bit_rate_type(MixAudioConfigParamsAAC *obj, MixAACBitrateType bit_rate_type) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (!obj) return MIX_RESULT_NULL_PTR; - - if (bit_rate_type != MIX_AAC_BR_CONSTANT && bit_rate_type != MIX_AAC_BR_VARIABLE) - { - ret = MIX_RESULT_INVALID_PARAM; - } - else - { - obj->bit_rate_type = bit_rate_type; - } - - return ret; -} -MixAACBitrateType mix_acp_aac_get_bit_rate_type(MixAudioConfigParamsAAC *obj) -{ - if (obj) - return obj->bit_rate_type; - else - return MIX_AAC_BR_NULL; -} - -void mix_aac_print_params(MixAudioConfigParams *obj) -{ - MixAudioConfigParamsAAC *t = MIX_AUDIOCONFIGPARAMSAAC(obj); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "Mpeg ID: %d\n", t->MPEG_id); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_stream_format: %d\n", t->bit_stream_format); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "aac_profile: %d\n", t->aac_profile); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "aot: %d\n", t->aot); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_rate_type: %d\n", t->bit_rate_type); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "CRC: %d\n", t->CRC); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, " \n"); -} - - -MIX_RESULT mix_acp_aac_set_aot(MixAudioConfigParamsAAC *obj, guint aot) -{ - if (!obj) return MIX_RESULT_NULL_PTR; - - if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) - { - if ((aot == 2) || (aot == 5)) - { - obj->aot=aot; - return MIX_RESULT_SUCCESS; - } - else - { - return MIX_RESULT_NOT_SUPPORTED; - } - } - else - { - return MIX_RESULT_INVALID_PARAM; - } -} - -guint mix_acp_aac_get_aot(MixAudioConfigParamsAAC *obj) -{ - if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) - return obj->aot; - else - return 0; -} - - -MIX_RESULT mix_acp_aac_set_mpeg_id(MixAudioConfigParamsAAC *obj, MixAACMpegID mpegid) -{ - if (!obj) return MIX_RESULT_NULL_PTR; - - if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) - { - if ((mpegid >= MIX_AAC_MPEG_ID_NULL) || (mpegid < MIX_AAC_MPEG_LAST)) - { - obj->MPEG_id=mpegid; - return MIX_RESULT_SUCCESS; - } - else - { - return MIX_RESULT_NOT_SUPPORTED; - } - } - else - { - return MIX_RESULT_INVALID_PARAM; - } -} - -MixAACMpegID mix_acp_aac_get_mpeg_id(MixAudioConfigParamsAAC *obj) -{ - if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj)) - return obj->MPEG_id; - else - return MIX_AAC_MPEG_ID_NULL; -} - diff --git a/mix_audio/src/mixacpaac.h b/mix_audio/src/mixacpaac.h deleted file mode 100644 index 7de2d95..0000000 --- a/mix_audio/src/mixacpaac.h +++ /dev/null @@ -1,413 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_ACP_AAC_H__ -#define __MIX_ACP_AAC_H__ - -#include "mixacp.h" - -/** - * MIX_TYPE_AUDIOCONFIGPARAMSAAC: - * - * Get type of class. - */ -#define MIX_TYPE_AUDIOCONFIGPARAMSAAC (mix_acp_aac_get_type ()) - -/** - * MIX_AUDIOCONFIGPARAMSAAC: - * @obj: object to be type-casted. - * - * Type casting - */ -#define MIX_AUDIOCONFIGPARAMSAAC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAAC)) - -/** - * MIX_IS_AUDIOCONFIGPARAMSAAC: - * @obj: an object. - * - * Checks if the given object is an instance of #MixAudioConfigParams - */ -#define MIX_IS_AUDIOCONFIGPARAMSAAC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC)) - -/** - * MIX_AUDIOCONFIGPARAMSAAC_CLASS: - * @klass: class to be type-casted. - * - * Type Casting. - */ -#define MIX_AUDIOCONFIGPARAMSAAC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAACClass)) - -/** - * MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixAudioConfigParamsClass - */ -#define MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSAAC)) - -/** - * MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS: - * @obj: a #MixAudioConfigParams object. - * - * Get the class instance of the object. - */ -#define MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAACClass)) - -typedef struct _MixAudioConfigParamsAAC MixAudioConfigParamsAAC; -typedef struct _MixAudioConfigParamsAACClass MixAudioConfigParamsAACClass; - -/** - * MixAACBitrateType: - * @MIX_AAC_BR_NULL: Undefined bit rate type. - * @MIX_AAC_BR_CONSTANT: Constant bit rate. - * @MIX_AAC_BR_VARIABLE: Variable bit rate. - * @MIX_AAC_BR_LAST: last entry. - * - * Types of bitrate in AAC. - */ -typedef enum { - MIX_AAC_BR_NULL=-1, - MIX_AAC_BR_CONSTANT=0, - MIX_AAC_BR_VARIABLE, - MIX_AAC_BR_LAST -} MixAACBitrateType; - -/** - * MixAACBitstreamFormt: - * @MIX_AAC_BS_NULL: Undefined bitstream format. - * @MIX_AAC_BS_ADTS: Bitstream is in ADTS format. - * @MIX_AAC_BS_ADIF: Bitstream is in ADIF format. - * @MIX_AAC_BS_RAW: Bitstream is in raw format. - * @MIX_AAC_BS_LAST: Last entry. - * - * AAC bitstream format. - */ -typedef enum { - MIX_AAC_BS_NULL=-1, - MIX_AAC_BS_ADTS=0, - MIX_AAC_BS_ADIF, - MIX_AAC_BS_RAW, - MIX_AAC_BS_LAST -} MixAACBitstreamFormt; - -/** - * MixAACProfile: - * @MIX_AAC_PROFILE_NULL: Undefined profile. - * @MIX_AAC_PROFILE_MAIN: Not Supported AAC Main profile. - * @MIX_AAC_PROFILE_LC: AAC-LC profile, including support of SBR and PS tool. - * @MIX_AAC_PROFILE_SSR: Not Supported SSR profile. - * @MIX_AAC_PROFILE_LAST: Last entry. - * - * AAC profiles definitions. - */ -typedef enum { - MIX_AAC_PROFILE_NULL=-1, - MIX_AAC_PROFILE_MAIN=0, - MIX_AAC_PROFILE_LC, - MIX_AAC_PROFILE_SSR, - MIX_AAC_PROFILE_LAST -} MixAACProfile; - -/* Using enumeration as this MPEG ID definition is specific to SST and different from - any MPEG/ADTS header. -*/ -/** - * MixAACMpegID: - * @MIX_AAC_MPEG_ID_NULL: Undefined MPEG ID. - * @MIX_AAC_MPEG_2_ID: Indicate MPEG 2 Audio. - * @MIX_AAC_MPEG_4_ID: Indicate MPEG 4 Audio. - * @MIX_AAC_MPEG_LAST: last entry. - * - * AAC MPEG ID. -*/ -typedef enum { - MIX_AAC_MPEG_ID_NULL=-1, - MIX_AAC_MPEG_2_ID = 0, - MIX_AAC_MPEG_4_ID = 1, - MIX_AAC_MPEG_LAST -} MixAACMpegID; - -/** - * MixAudioConfigParamsAAC: - * @parent: parent. - * @MPEG_id: MPEG ID. See #mix_acp_aac_set_mpeg_id - * @bit_stream_format: Bitstream format. See #mix_acp_aac_set_bit_stream_format. - * @aac_profile: AAC profile. See #mix_acp_aac_set_aac_profile. - * @aot: Audio object type. See #mix_acp_aac_set_aot - * @aac_sample_rate: See #MIX_ACP_AAC_SAMPLE_RATE macro. - * @aac_channels: See #MIX_ACP_AAC_CHANNELS macro. - * @bit_rate_type: Bitrate type. See #mix_acp_aac_set_bit_rate_type - * @sbrPresentFlag: See #MIX_ACP_AAC_SBR_FLAG macro. - * @psPresentFlag: See #MIX_ACP_AAC_PS_FLAG macro. - * @CRC: CRC check 0:disable, 1:enable. - * @pce_present: Not Used. See #MIX_ACP_AAC_PCE_FLAG - * @syntc_id: Not Used. 0 for ID_SCE(Dula Mono), -1 for raw. - * @syntc_tag: Not Used. -1 for raw. 0-16 for rest of the streams. - * @num_syntc_elems: Not Used. Number of syntatic elements. - * - * MixAudio Parameter object - */ -struct _MixAudioConfigParamsAAC -{ - /*< public >*/ - MixAudioConfigParams parent; - - /*< public >*/ - /* Audio Format Parameters */ - MixAACMpegID MPEG_id; - MixAACBitstreamFormt bit_stream_format; - MixAACProfile aac_profile; - guint aot; - guint aac_sample_rate; - guint aac_channels; - MixAACBitrateType bit_rate_type; - gboolean CRC; - guint sbrPresentFlag; - guint psPresentFlag; - gboolean pce_present; - gint8 syntc_id[2]; - gint8 syntc_tag[2]; - gint num_syntc_elems; - /*< private >*/ - void* reserved1; - void* reserved2; - void* reserved3; - void* reserved4; -}; - -/** - * MixAudioConfigParamsAACClass: - * - * MI-X Audio object class - */ -struct _MixAudioConfigParamsAACClass -{ - /*< public >*/ - MixAudioConfigParamsClass parent_class; - - /* class members */ -}; - -/** - * mix_acp_aac_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_acp_aac_get_type (void); - -/** - * mix_acp_aac_new: - * @returns: A newly allocated instance of #MixAudioConfigParamsAAC - * - * Use this method to create new instance of #MixAudioConfigParamsAAC - */ -MixAudioConfigParamsAAC *mix_acp_aac_new(void); - -/** - * mix_acp_aac_ref: - * @mix: object to add reference - * @returns: the MixAudioConfigParamsAAC instance where reference count has been increased. - * - * Add reference count. - */ -MixAudioConfigParamsAAC *mix_acp_aac_ref(MixAudioConfigParamsAAC *mix); - -/** - * mix_acp_aac_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_acp_aac_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - - -/** - * mix_acp_aac_set_mpeg_id: - * @obj: #MixAudioConfigParamsAAC - * @mpegid: MPEG ID to set. - * @return: MIX_RESULT - * - * Configure decoder to treat audio as MPEG 2 or MPEG 4. -*/ -MIX_RESULT mix_acp_aac_set_mpeg_id(MixAudioConfigParamsAAC *obj, MixAACMpegID mpegid); - -/** - * mix_acp_aac_get_mpeg_id: - * @obj: #MixAudioConfigParamsAAC object - * @returns: MPEG ID. - * - * Retrieve currently configured mpeg id value. -*/ -MixAACMpegID mix_acp_aac_get_mpeg_id(MixAudioConfigParamsAAC *obj); - -/** - * MIX_ACP_AAC_CRC: - * @obj: #MixAudioConfigParamsAAC object. - * - * #MixAudioConfigParamAAC.CRC accessor. -*/ -#define MIX_ACP_AAC_CRC(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->CRC) - -/** - * mix_acp_aac_set_aot: - * @obj: #MixAudioConfigParamsAAC - * @aot: Audio Object Type. - * - * Audio Object Type for the MPEG-4 audio stream. Valid value are: - * - * 2 - for AAC-LC - * - * 5 - for SBR - * - * Method returns MIX_RESULT_NOT_SUPPORTED for not supported value. - * -*/ -MIX_RESULT mix_acp_aac_set_aot(MixAudioConfigParamsAAC *obj, guint aot); - -/** - * mix_acp_aac_get_aot: - * @obj: #MixAudioConfigParamsAAC - * @aot: Pointer to receive the Audio Object Type. - * @return: Currently configured audio object type. Or 0 if not yet specified. - * - * To retrieve currently configured audio object type. -*/ -guint mix_acp_aac_get_aot(MixAudioConfigParamsAAC *obj); - -/** - * MIX_ACP_AAC_SBR_FLAG: - * @obj: #MixAudioConfigParamsAAC object - * - * MixAudioConfigParamAAC.sbrPresentFlag accessor. - * - * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates whether SBR data is present. - * - * 0: Absent - * - * 1: Present - * - * -1 (0xffffffff): indicates implicit signalling. - */ -#define MIX_ACP_AAC_SBR_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->sbrPresentFlag) - -/** - * MIX_ACP_AAC_PS_FLAG: - * @obj: #MixAudioConfigParamsAAC object - * - * MixAudioConfigParamAAC.psPresentFlag accessor. - * - * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates whether PS data is present. - * - * 0: Absent - * - * 1: Present - * - * -1 (0xffffffff): indicates implicit signalling. - */ -#define MIX_ACP_AAC_PS_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->psPresentFlag) - -/** - * MIX_ACP_AAC_PCE_FLAG: - * @obj: #MixAudioConfigParamsAAC object. - * - * MixAudioConfigParamAAC.pce_present accessor. - * - * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates PCE data presence. - * - * 1:present - * - * 0:absent. - * - * Not Used on Moorestown. - */ -#define MIX_ACP_AAC_PCE_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->pce_present) - -/** - * MIX_ACP_AAC_SAMPLE_RATE: - * @obj: #MixAudioConfigParamsAAC object. - * - * MixAudioConfigParamAAC.aac_sample_rate accessor. - * - * Plain AAC decoder operating sample rate. Which could be different from the output sampling rate with HE AAC v1 and v2. - */ -#define MIX_ACP_AAC_SAMPLE_RATE(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_sample_rate) - -/** - * MIX_ACP_AAC_CHANNELS: - * @obj: #MixAudioConfigParamsAAC - * - * MixAudioConfigParamAAC.aac_channels accessor. - * - * Indicates the number of output channels used by AAC decoder before SBR or PS tools are applied. - * - */ -#define MIX_ACP_AAC_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_channels) - -/** - * mix_acp_aac_get_bit_stream_format: - * @obj: #MixAudioConfigParamsAAC - * @returns: #MixAACBitstreamFormt - * - * Return the bitstream format currently configured. - */ -MixAACBitstreamFormt mix_acp_aac_get_bit_stream_format(MixAudioConfigParamsAAC *obj); - -/** - * mix_acp_aac_set_bit_stream_format: - * @obj: #MixAudioConfigParamsAAC - * @bit_stream_format: Bit stream format. - * @returns: MIX_RESULT - * - * Set the type of bitstream format as specified in #MixAACBitstreamFormt. - */ -MIX_RESULT mix_acp_aac_set_bit_stream_format(MixAudioConfigParamsAAC *obj, MixAACBitstreamFormt bit_stream_format); - -/** - * mix_acp_aac_get_aac_profile: - * @obj: #MixAudioConfigParamsAAC - * @returns: #MixAACProfile - * - * Retrieve the AAC profile currently configured. - */ -MixAACProfile mix_acp_aac_get_aac_profile(MixAudioConfigParamsAAC *obj); - -/** - * mix_acp_aac_set_aac_profile: - * @obj: #MixAudioConfigParamsAAC - * @aac_profile: AAC profile to set. - * @returns: MIX_RESULT - * - * Configure AAC profile for current session. - * - * Only #MIX_AAC_PROFILE_LC is supported in Moorestown. - */ -MIX_RESULT mix_acp_aac_set_aac_profile(MixAudioConfigParamsAAC *obj, MixAACProfile aac_profile); - -/** - * mix_acp_aac_get_bit_rate_type: - * @obj: #MixAudioConfigParamsAAC - * @returns: #MixAACBitrateType - * - * Retrieve the bit rate type currently configured. - */ -MixAACBitrateType mix_acp_aac_get_bit_rate_type(MixAudioConfigParamsAAC *obj); - -/** - * mix_acp_aac_set_bit_rate_type: - * @obj: #MixAudioConfigParamsAAC - * @bit_rate_type: Bit rate type to set. - * @returns: MIX_RESULT - * - * Set the bit rate type used. - */ -MIX_RESULT mix_acp_aac_set_bit_rate_type(MixAudioConfigParamsAAC *obj, MixAACBitrateType bit_rate_type); - -#endif /* __MIX_AUDIOCONFIGPARAMSAAC_H__ */ diff --git a/mix_audio/src/mixacpmp3.c b/mix_audio/src/mixacpmp3.c deleted file mode 100644 index 5514a24..0000000 --- a/mix_audio/src/mixacpmp3.c +++ /dev/null @@ -1,178 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixacpmp3 - * @short_description: Audio configuration parameters for MP3 audio. - * @include: mixacpmp3.h - * - * A data object which stores audio specific parameters for MP3 audio. - * - * Additional parameters must be set in the parent object #MixAudioConfigParams - */ - -#include "mixacpmp3.h" - -static GType _mix_acp_mp3_type = 0; -static MixAudioConfigParamsClass *parent_class = NULL; - -#define _do_init { _mix_acp_mp3_type = g_define_type_id; } - -gboolean mix_acp_mp3_copy(MixParams* target, const MixParams *src); -MixParams* mix_acp_mp3_dup(const MixParams *obj); -gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second); -static void mix_acp_mp3_finalize(MixParams *obj); - -G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsMP3, mix_acp_mp3, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init); - -static void mix_acp_mp3_init (MixAudioConfigParamsMP3 *self) -{ - self->CRC=FALSE; - self->MPEG_format=0; - self->MPEG_layer=0; -} - -static void mix_acp_mp3_class_init(MixAudioConfigParamsMP3Class *klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_acp_mp3_finalize; - mixparams_class->copy = (MixParamsCopyFunction)mix_acp_mp3_copy; - mixparams_class->dup = (MixParamsDupFunction)mix_acp_mp3_dup; - mixparams_class->equal = (MixParamsEqualFunction)mix_acp_mp3_equal; -} - -MixAudioConfigParamsMP3 *mix_acp_mp3_new(void) -{ - MixAudioConfigParamsMP3 *ret = (MixAudioConfigParamsMP3 *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSMP3); - - return ret; -} - -void mix_acp_mp3_finalize(MixParams *obj) -{ - /* clean up here. */ - - /* Chain up parent */ - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->finalize) - klass->finalize(obj); -} - -MixAudioConfigParamsMP3 *mix_acp_mp3_ref(MixAudioConfigParamsMP3 *mix) -{ - if (G_UNLIKELY(!mix)) return NULL; - return (MixAudioConfigParamsMP3*)mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_acp_mp3_dup: - * @obj: a #MixAudioConfigParamsMP3 object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams* mix_acp_mp3_dup(const MixParams *obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_AUDIOCONFIGPARAMSMP3(obj)) - { - MixAudioConfigParamsMP3 *duplicate = mix_acp_mp3_new(); - if (mix_acp_mp3_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) - { - ret = MIX_PARAMS(duplicate); - } - else - { - mix_acp_mp3_unref(duplicate); - } - } - - return ret; -} - -/** - * mix_acp_mp3_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_mp3_copy(MixParams* target, const MixParams *src) -{ - if (MIX_IS_AUDIOCONFIGPARAMSMP3(target) && MIX_IS_AUDIOCONFIGPARAMSMP3(src)) - { - MixAudioConfigParamsMP3 *t = MIX_AUDIOCONFIGPARAMSMP3(target); - MixAudioConfigParamsMP3 *s = MIX_AUDIOCONFIGPARAMSMP3(src); - - t->CRC = s->CRC; - t->MPEG_format = s->MPEG_format; - t->MPEG_layer = s->MPEG_layer; - - // Now chainup base class - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->copy) - { - return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); - } - else - return TRUE; - } - return FALSE; -} - -/** - * mix_acp_mp3_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second) -{ - gboolean ret = FALSE; - - if (first && second) - { - if (first == second) return TRUE; - } - else - { - return FALSE; - } - - if (!MIX_IS_AUDIOCONFIGPARAMSMP3(first) || !MIX_IS_AUDIOCONFIGPARAMSMP3(second)) - return FALSE; - - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = klass->equal(first, second); - else - ret = TRUE; - - if (ret) - { - MixAudioConfigParamsMP3 *acp1 = MIX_AUDIOCONFIGPARAMSMP3(first); - MixAudioConfigParamsMP3 *acp2 = MIX_AUDIOCONFIGPARAMSMP3(second); - - ret = (acp1->CRC == acp2->CRC) && - (acp1->MPEG_format == acp2->MPEG_format) && - (acp1->MPEG_layer == acp2->MPEG_layer); - } - - return ret; -} - - diff --git a/mix_audio/src/mixacpmp3.h b/mix_audio/src/mixacpmp3.h deleted file mode 100644 index e000b4f..0000000 --- a/mix_audio/src/mixacpmp3.h +++ /dev/null @@ -1,170 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_ACP_MP3_H__ -#define __MIX_ACP_MP3_H__ - - -#include "mixacp.h" - -/** - * MIX_TYPE_AUDIOCONFIGPARAMSMP3: - * - * Get type of class. - */ -#define MIX_TYPE_AUDIOCONFIGPARAMSMP3 (mix_acp_mp3_get_type ()) - -/** - * MIX_AUDIOCONFIGPARAMSMP3: - * @obj: object to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOCONFIGPARAMSMP3(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3)) - -/** - * MIX_IS_AUDIOCONFIGPARAMSMP3: - * @obj: an object. - * - * Checks if the given object is an instance of #MixAudioConfigParamsMP3 - */ -#define MIX_IS_AUDIOCONFIGPARAMSMP3(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3)) - -/** - * MIX_AUDIOCONFIGPARAMSMP3_CLASS: - * @klass: class to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOCONFIGPARAMSMP3_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3Class)) - -/** - * MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixAudioConfigParamsMP3Class - */ -#define MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSMP3)) - -/** - * MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS: - * @obj: a #MixAudioConfigParams object. - * - * Get the class instance of the object. - */ -#define MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3Class)) - -typedef struct _MixAudioConfigParamsMP3 MixAudioConfigParamsMP3; -typedef struct _MixAudioConfigParamsMP3Class MixAudioConfigParamsMP3Class; - -/** - * MixAudioConfigParamsMP3: - * @parent: parent. - * @CRC: CRC. See #MIX_ACP_MP3_CRC - * @MPEG_format: OptionalMPEG format of the mpeg audio. See #MIX_ACP_MP3_MPEG_FORMAT - * @MPEG_layer: OptionalMPEG layer of the mpeg audio. See #MIX_ACP_MP3_MPEG_LAYER - * - * MI-X Audio Parameter object for MP3 Audio. - */ -struct _MixAudioConfigParamsMP3 -{ - /*< public >*/ - MixAudioConfigParams parent; - - /*< public >*/ - /* Audio Format Parameters */ - gboolean CRC; - gint MPEG_format; - gint MPEG_layer; - - /*< private >*/ - void* reserved1; - void* reserved2; - void* reserved3; - void* reserved4; -}; - -/** - * MixAudioConfigParamsMP3Class: - * - * MI-X Audio object class - */ -struct _MixAudioConfigParamsMP3Class -{ - /*< public >*/ - MixAudioConfigParamsClass parent_class; - - /* class members */ -}; - -/** - * mix_acp_mp3_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_acp_mp3_get_type (void); - -/** - * mix_acp_mp3_new: - * @returns: A newly allocated instance of #MixAudioConfigParamsMP3 - * - * Use this method to create new instance of #MixAudioConfigParamsMP3 - */ -MixAudioConfigParamsMP3 *mix_acp_mp3_new(void); - -/** - * mix_acp_mp3_ref: - * @mix: object to add reference - * @returns: the MixAudioConfigParamsMP3 instance where reference count has been increased. - * - * Add reference count. - */ -MixAudioConfigParamsMP3 *mix_acp_mp3_ref(MixAudioConfigParamsMP3 *mix); - -/** - * mix_acp_mp3_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_acp_mp3_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/** - * MIX_ACP_MP3_CRC: - * @obj: #MixAudioConfigParamsMP3 object. - * - * MixAudioConfigParamMP3.CRC accessor. - * - * Optional -*/ -#define MIX_ACP_MP3_CRC(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->CRC) - -/** - * MIX_ACP_MP3_MPEG_FORMAT: - * @obj: #MixAudioConfigParamsMP3 object. - * - * MixAudioConfigParamMP3.MPEG_format accessor. - * - * Supported MPEG format should be 1 or 2. -*/ -#define MIX_ACP_MP3_MPEG_FORMAT(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_format) - -/** - * MIX_ACP_MP3_MPEG_LAYER: - * @obj: #MixAudioConfigParamsMP3 object. - * - * MixAudioConfigParamMP3.MPEG_layer accessor. - * - * Supported layer should be 1, 2, or 3. -*/ -#define MIX_ACP_MP3_MPEG_LAYER(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_layer) - -#endif /* __MIX_AUDIOCONFIGPARAMSMP3_H__ */ diff --git a/mix_audio/src/mixacpwma.c b/mix_audio/src/mixacpwma.c deleted file mode 100644 index 9e0db82..0000000 --- a/mix_audio/src/mixacpwma.c +++ /dev/null @@ -1,208 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixacpwma - * @short_description: Audio parameters for WMA audio. - * @include: mixacpwma.h - * - * A data object which stores audio specific parameters for WMA. - * - * In Moorestown, only WMA2 is supported. - * - * Additional parameters must be set in the parent object #MixAudioConfigParams - */ - -#include "mixacpwma.h" - -static GType _mix_acp_wma_type = 0; -static MixAudioConfigParamsClass *parent_class = NULL; - -#define _do_init { _mix_acp_wma_type = g_define_type_id; } - -gboolean mix_acp_wma_copy(MixParams* target, const MixParams *src); -MixParams* mix_acp_wma_dup(const MixParams *obj); -gboolean mix_acp_wma_equal(MixParams* first, MixParams *second); -static void mix_acp_wma_finalize(MixParams *obj); - -G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsWMA, mix_acp_wma, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init); - -static void mix_acp_wma_init (MixAudioConfigParamsWMA *self) -{ - self->channel_mask = 0; - self->format_tag = 0; - self->block_align = 0; - self->wma_encode_opt = 0; - self->pcm_bit_width = 0; /* source pcm bit width */ - self->wma_version = MIX_AUDIO_WMA_VUNKNOWN; -} - -static void mix_acp_wma_class_init(MixAudioConfigParamsWMAClass *klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_acp_wma_finalize; - mixparams_class->copy = (MixParamsCopyFunction)mix_acp_wma_copy; - mixparams_class->dup = (MixParamsDupFunction)mix_acp_wma_dup; - mixparams_class->equal = (MixParamsEqualFunction)mix_acp_wma_equal; -} - -MixAudioConfigParamsWMA *mix_acp_wma_new(void) -{ - MixAudioConfigParamsWMA *ret = (MixAudioConfigParamsWMA *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSWMA); - - return ret; -} - -void mix_acp_wma_finalize(MixParams *obj) -{ - /* clean up here. */ - - /* Chain up parent */ - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->finalize) - klass->finalize(obj); -} - -MixAudioConfigParamsWMA *mix_acp_wma_ref(MixAudioConfigParamsWMA *obj) -{ - return (MixAudioConfigParamsWMA*)mix_params_ref(MIX_PARAMS(obj)); -} - -/** - * mix_acp_wma_dup: - * @obj: a #MixAudioConfigParamsWMA object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams* mix_acp_wma_dup(const MixParams *obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_AUDIOCONFIGPARAMSWMA(obj)) - { - MixAudioConfigParamsWMA *duplicate = mix_acp_wma_new(); - if (mix_acp_wma_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) - { - ret = MIX_PARAMS(duplicate); - } - else - { - mix_acp_wma_unref(duplicate); - } - } - - return ret; -} - -/** - * mix_acp_wma_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_wma_copy(MixParams* target, const MixParams *src) -{ - if (MIX_IS_AUDIOCONFIGPARAMSWMA(target) && MIX_IS_AUDIOCONFIGPARAMSWMA(src)) - { - MixAudioConfigParamsWMA *t = MIX_AUDIOCONFIGPARAMSWMA(target); - MixAudioConfigParamsWMA *s = MIX_AUDIOCONFIGPARAMSWMA(src); - - t->channel_mask = s->channel_mask; - t->format_tag = s->format_tag; - t->block_align = s->block_align; - t->wma_encode_opt = s->wma_encode_opt; - t->wma_version = s->wma_version; - t->pcm_bit_width = s->pcm_bit_width; - - // Now chainup base class - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->copy) - { - return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); - } - else - return TRUE; - } - return FALSE; -} - -/** - * mix_acp_wma_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_acp_wma_equal(MixParams* first, MixParams *second) -{ - gboolean ret = FALSE; - - if (first && second) - { - if (first == second) return TRUE; - } - else - { - return FALSE; - } - - if (!MIX_IS_AUDIOCONFIGPARAMSWMA(first) || !MIX_IS_AUDIOCONFIGPARAMSWMA(second)) - return FALSE; - - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = klass->equal(first, second); - else - ret = TRUE; - - if (ret) - { - MixAudioConfigParamsWMA *acp1 = MIX_AUDIOCONFIGPARAMSWMA(first); - MixAudioConfigParamsWMA *acp2 = MIX_AUDIOCONFIGPARAMSWMA(second); - - ret = (acp1->channel_mask == acp2->channel_mask) && - (acp1->format_tag == acp2->format_tag) && - (acp1->block_align == acp2->block_align) && - (acp1->wma_encode_opt == acp2->wma_encode_opt) && - (acp1->pcm_bit_width == acp2->pcm_bit_width) && - (acp1->wma_version == acp2->wma_version); - } - - return ret; -} - -MixAudioWMAVersion mix_acp_wma_get_version(MixAudioConfigParamsWMA *obj) -{ - if (obj) - return (obj->wma_version); - else - return MIX_AUDIO_WMA_VUNKNOWN; -} - -MIX_RESULT mix_acp_wma_set_version(MixAudioConfigParamsWMA *obj, MixAudioWMAVersion ver) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (!obj) return MIX_RESULT_NULL_PTR; - - if ((ver > MIX_AUDIO_WMA_VUNKNOWN) && (ver < MIX_AUDIO_WMA_LAST)) - obj->wma_version = ver; - else - ret=MIX_RESULT_INVALID_PARAM; - - return ret; -} - diff --git a/mix_audio/src/mixacpwma.h b/mix_audio/src/mixacpwma.h deleted file mode 100644 index 8c617fd..0000000 --- a/mix_audio/src/mixacpwma.h +++ /dev/null @@ -1,235 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_ACP_WMA_H__ -#define __MIX_ACP_WMA_H__ - - -#include "mixacp.h" - -/** - * MIX_TYPE_AUDIOCONFIGPARAMSWMA: - * - * Get type of class. - */ -#define MIX_TYPE_AUDIOCONFIGPARAMSWMA (mix_acp_wma_get_type ()) - -/** - * MIX_AUDIOCONFIGPARAMSWMA: - * @obj: object to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOCONFIGPARAMSWMA(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMA)) - -/** - * MIX_IS_AUDIOCONFIGPARAMSWMA: - * @obj: an object. - * - * Checks if the given object is an instance of #MixAudioConfigParamsWMA - */ -#define MIX_IS_AUDIOCONFIGPARAMSWMA(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA)) - -/** - * MIX_AUDIOCONFIGPARAMSWMA_CLASS: - * @klass: class to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOCONFIGPARAMSWMA_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMAClass)) - -/** - * MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixAudioConfigParamsWMAClass - */ -#define MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSWMA)) - -/** - * MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS: - * @obj: a #MixAudioConfigParamsWMA object. - * - * Get the class instance of the object. - */ -#define MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMAClass)) - -/** - * MixAudioWMAVersion: - * @MIX_AUDIO_WMA_VUNKNOWN: WMA version undefined. - * @MIX_AUDIO_WMA_V9: WMA 9 - * @MIX_AUDIO_WMA_V10: Not Supported WMA 10 - * @MIX_AUDIO_WMA_V10P: Not Supported WMA 10 Pro - * @MIX_AUDIO_WMA_LAST: last entry. - * - * WMA version. - */ -typedef enum { - MIX_AUDIO_WMA_VUNKNOWN, - MIX_AUDIO_WMA_V9, - MIX_AUDIO_WMA_V10, - MIX_AUDIO_WMA_V10P, - MIX_AUDIO_WMA_LAST -} MixAudioWMAVersion; - -typedef struct _MixAudioConfigParamsWMA MixAudioConfigParamsWMA; -typedef struct _MixAudioConfigParamsWMAClass MixAudioConfigParamsWMAClass; - -/** - * MixAudioConfigParamsWMA: - * @parent: parent. - * @channel_mask: Channel Mask. See #MIX_ACP_WMA_CHANNEL_MASK - * @format_tag: Format tag. See #MIX_ACP_WMA_FORMAT_TAG - * @block_algin: Block alignment. See #MIX_ACP_WMA_BLOCK_ALIGN - * @wma_encode_opt: Encoder option. See #MIX_ACP_WMA_ENCODE_OPT - * @pcm_bit_width: Source pcm bit width. See #MIX_ACP_WMA_PCM_BIT_WIDTH - * @wma_version: WMA version. See #mix_acp_wma_set_version - * - * MI-X Audio Parameter object - */ -struct _MixAudioConfigParamsWMA -{ - /*< public >*/ - MixAudioConfigParams parent; - - /*< public >*/ - /* Audio Format Parameters */ - guint32 channel_mask; - guint16 format_tag; - guint16 block_align; - guint16 wma_encode_opt;/* Encoder option */ - guint8 pcm_bit_width; /* source pcm bit width */ - MixAudioWMAVersion wma_version; -}; - -/** - * MixAudioConfigParamsWMAClass: - * - * MI-X Audio object class - */ -struct _MixAudioConfigParamsWMAClass -{ - /*< public >*/ - MixAudioConfigParamsClass parent_class; - - /* class members */ -}; - -/** - * mix_acp_wma_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_acp_wma_get_type (void); - -/** - * mix_acp_wma_new: - * @returns: A newly allocated instance of #MixAudioConfigParamsWMA - * - * Use this method to create new instance of #MixAudioConfigParamsWMA - */ -MixAudioConfigParamsWMA *mix_acp_wma_new(void); - -/** - * mix_acp_wma_ref: - * @mix: object to add reference - * @returns: the MixAudioConfigParamsWMA instance where reference count has been increased. - * - * Add reference count. - */ -MixAudioConfigParamsWMA *mix_acp_wma_ref(MixAudioConfigParamsWMA *mix); - -/** - * mix_acp_wma_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_acp_wma_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/** - * MIX_ACP_WMA_CHANNEL_MASK: - * @obj: #MixAudioConfigParamsWMA object - * - * MixAudioConfigParamWMA.channel_mask accessor. - * - * Channel mask must be one of the following: - * - * 4: For single (1) channel output. - * - * 3: For stereo (2) channels output. - * - * Only 1 or 2 output channels are supported. - * -*/ -#define MIX_ACP_WMA_CHANNEL_MASK(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->channel_mask) - -/** - * MIX_ACP_WMA_FORMAT_TAG: - * @obj: #MixAudioConfigParamsWMA object - * - * MixAudioConfigParamWMA.format_tag accessor. - * - * In Moorestown, only value 0x0161 combined with use of #MIX_AUDIO_WMA_V9 is supported. -*/ -#define MIX_ACP_WMA_FORMAT_TAG(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->format_tag) - -/** - * MIX_ACP_WMA_BLOCK_ALIGN: - * @obj: #MixAudioConfigParamsWMA object - * - * MixAudioConfigParamWMA.block_align accessor. - * - * Block alignment indicates packet size. Available from ASF Header. -*/ -#define MIX_ACP_WMA_BLOCK_ALIGN(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->block_align) - -/** - * MIX_ACP_WMA_ENCODE_OPT: - * @obj: #MixAudioConfigParamsWMA object - * - * MixAudioConfigParamWMA.wma_encode_opt accessor. - * - * Encoder option available from ASF header. -*/ -#define MIX_ACP_WMA_ENCODE_OPT(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->wma_encode_opt) - -/** - * MIX_ACP_WMA_PCM_BIT_WIDTH: - * @obj: #MixAudioConfigParamsWMA object - * - * MixAudioConfigParamWMA.pcm_bit_width accessor. - * - * Source pcm bit width available from ASF Header. -*/ -#define MIX_ACP_WMA_PCM_BIT_WIDTH(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->pcm_bit_width) - -/* Class Methods */ -/** - * mix_acp_wma_get_version: - * @obj: #MixAudioConfigParamsWMA object - * @returns: MixAudioWMAVersion - * - * Get WMA Version. -*/ -MixAudioWMAVersion mix_acp_wma_get_version(MixAudioConfigParamsWMA *obj); - -/** - * mix_acp_wma_set_version: - * @obj: #MixAudioConfigParamsWMA object - * @ver: MixAudioWMAVersion to set. - * @returns: MIX_RESULT. - * - * Set WMA Version. - * - * In Moorestown, only #MIX_AUDIO_WMA_V9 is supported -*/ -MIX_RESULT mix_acp_wma_set_version(MixAudioConfigParamsWMA *obj, MixAudioWMAVersion ver); - -#endif /* __MIX_AUDIOCONFIGPARAMSWMA_H__ */ diff --git a/mix_audio/src/mixaip.c b/mix_audio/src/mixaip.c deleted file mode 100644 index 8ee0811..0000000 --- a/mix_audio/src/mixaip.c +++ /dev/null @@ -1,167 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixaip - * @short_description: Initialization parameters object. - * @include: mixacp.h - * - * A data object which stores initialization specific parameters. - * - * Not Implemented in Moorestown. - */ - -#include "mixaip.h" - -//static GType _mix_aip_type = 0; -static MixParamsClass *parent_class = NULL; - -// #define _do_init { _mix_aip_type = g_define_type_id; }; -#define _do_init - -gboolean mix_aip_copy(MixParams* target, const MixParams *src); -MixParams* mix_aip_dup(const MixParams *obj); -gboolean mix_aip_equal(MixParams* first, MixParams *second); -static void mix_aip_finalize(MixParams *obj); - -G_DEFINE_TYPE_WITH_CODE(MixAudioInitParams, mix_aip, MIX_TYPE_PARAMS, _do_init ); - -#if 0 -void _mix_aip_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_aip_get_type ()); -} -#endif - -static void mix_aip_init (MixAudioInitParams *self) -{ - self->reserved1 = self->reserved2 = self->reserved3 = self->reserved4 = NULL; -} - -static void mix_aip_class_init(MixAudioInitParamsClass *klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_aip_finalize; - mixparams_class->copy = (MixParamsCopyFunction)mix_aip_copy; - mixparams_class->dup = (MixParamsDupFunction)mix_aip_dup; - mixparams_class->equal = (MixParamsEqualFunction)mix_aip_equal; -} - -MixAudioInitParams *mix_aip_new(void) -{ - MixAudioInitParams *ret = (MixAudioInitParams *)g_type_create_instance (MIX_TYPE_AUDIOINITPARAMS); - - return ret; -} - -void mix_aip_finalize(MixParams *obj) -{ - /* clean up here. */ - - /* Chain up parent */ - if (parent_class->finalize) - parent_class->finalize(obj); -} - -MixAudioInitParams *mix_aip_ref(MixAudioInitParams *mix) -{ - return (MixAudioInitParams*)mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_aip_dup: - * @obj: a #MixAudioInitParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams* mix_aip_dup(const MixParams *obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_AUDIOINITPARAMS(obj)) - { - MixAudioInitParams *duplicate = mix_aip_new(); - if (mix_aip_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) - { - ret = MIX_PARAMS(duplicate); - } - else - { - mix_aip_unref(duplicate); - } - } - - return ret; -} - -/** - * mix_aip_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_aip_copy(MixParams* target, const MixParams *src) -{ - if (MIX_IS_AUDIOINITPARAMS(target) && MIX_IS_AUDIOINITPARAMS(src)) - { - // TODO perform copy. - // - // Now chainup base class - // Get the root class from the cached parent_class object. This cached parent_class object has not be overwritten by this current class. - // Using the cached parent_class object because this_class would have ->copy pointing to this method! - // Cached parent_class contains the class object before it is overwritten by this derive class. - // MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (parent_class->copy) - { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); - } - else - return TRUE; - } - return FALSE; -} - -/** - * mix_aip_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_aip_equal(MixParams* first, MixParams *second) -{ - gboolean ret = FALSE; - - if (MIX_IS_AUDIOINITPARAMS(first) && MIX_IS_AUDIOINITPARAMS(second)) - { - // TODO: do deep compare - - if (ret) - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - - return ret; -} diff --git a/mix_audio/src/mixaip.h b/mix_audio/src/mixaip.h deleted file mode 100644 index 613ed54..0000000 --- a/mix_audio/src/mixaip.h +++ /dev/null @@ -1,132 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_AUDIOINITPARAMS_H__ -#define __MIX_AUDIOINITPARAMS_H__ - - -#include - -/** - * MIX_TYPE_AUDIOINITPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_AUDIOINITPARAMS (mix_aip_get_type ()) - -/** - * MIX_AUDIOINITPARAMS: - * @obj: object to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParams)) - -/** - * MIX_IS_AUDIOINITPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_AUDIOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOINITPARAMS)) - -/** - * MIX_AUDIOINITPARAMS_CLASS: - * @klass: class to be type-casted. - * - * Type casting. - */ -#define MIX_AUDIOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParamsClass)) - -/** - * MIX_IS_AUDIOINITPARAMS_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_AUDIOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOINITPARAMS)) - -/** - * MIX_AUDIOINITPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_AUDIOINITPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParamsClass)) - -typedef struct _MixAudioInitParams MixAudioInitParams; -typedef struct _MixAudioInitParamsClass MixAudioInitParamsClass; - -/** - * MixAudioInitParams: - * @parent: Parent. - * - * @MixAudio initialization parameter object. - */ -struct _MixAudioInitParams -{ - /*< public >*/ - MixParams parent; - - /*< private >*/ - void* reserved1; - void* reserved2; - void* reserved3; - void* reserved4; -}; - -/** - * MixAudioInitParamsClass: - * @parent_class: Parent class. - * - * @MixAudio initialization parameter object class structure. - */ -struct _MixAudioInitParamsClass -{ - /*< public >*/ - MixParamsClass parent_class; - - /* class members */ -}; - -/** - * mix_aip_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_aip_get_type (void); - -/** - * mix_aip_new: - * @returns: A newly allocated instance of #MixAudioInitParams - * - * Use this method to create new instance of #MixAudioInitParams - */ -MixAudioInitParams *mix_aip_new(void); - -/** - * mix_aip_ref: - * @mix: object to add reference - * @returns: the MixAudioInitParams instance where reference count has been increased. - * - * Add reference count. - */ -MixAudioInitParams *mix_aip_ref(MixAudioInitParams *mix); - -/** - * mix_aip_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_aip_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -#endif /* __MIX_AUDIOINITPARAMS_H__ */ diff --git a/mix_audio/src/mixaudio.c b/mix_audio/src/mixaudio.c deleted file mode 100644 index 53d61a5..0000000 --- a/mix_audio/src/mixaudio.c +++ /dev/null @@ -1,2511 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixaudio - * @short_description: Object to support a single stream playback using hardware accelerated decoder. - * @include: mixaudio.h - * - * #MixAudio object provide thread-safe API for application and/or multimedia framework to take advantage of Intel Smart Sound Technology(TM) driver for hardware audio decode and render. - * - * Each #MixAudio object represents one streaming session with the Intel Smart Sound driver and provides configuration and control of the decoding and playback options. - * - * The #MixAudio object also support integration with Intel Audio Manager service. - * - * An application can utilize the #MixAudio object by calling the following sequence: - * - * mix_audio_new() to create a #MixAudio instance. - * mix_audio_initialize() to allocate Intel Smart Sound Technology resource. - * mix_audio_configure() to configure stream parameters. - * mix_audio_decode() can be called repeatedly for decoding and, optionally, rendering. - * mix_audio_start() is called after the 1st mix_audio_decode() method to start rendering. - * mix_audio_stop_drain() is called after the last buffer is passed for decoding in with mix_audio_decode(). - * mix_audio_deinitialize() to free resource once playback is completed. - * - * - * Since mix_audio_decode() is a blocking call during playback, the following methods are called in a seperate thread to control progress: - * - * mix_audio_start() - * mix_audio_pause() - * mix_audio_resume() - * mix_audio_stop_drop() - * - */ - -/** - * SECTION:mixaudiotypes - * @title: Mix Audio Types - * @short_description: Miscellanous types used by #MixAudio API. - * @include: mixaudiotypes.h - * - * Miscellanous types used by #MixAudio API. -*/ - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include "mixaudio.h" -#include "mixacpaac.h" - -#ifdef AUDIO_MANAGER -#include "amhelper.h" -#endif - -#ifndef MIXAUDIO_CURRENT -#define MIXAUDIO_CURRENT 0 -#endif -#ifndef MIXAUDIO_AGE -#define MIXAUDIO_AGE 0 -#endif - -/* Include this now but it will change when driver updates. - We would want to build against a kernel dev package if that - is available. -*/ -#include -#include "intel_sst_ioctl.h" -#include "sst_proxy.h" - -#ifdef G_LOG_DOMAIN -#undef G_LOG_DOMAIN -#define G_LOG_DOMAIN ((gchar*)"mixaudio") -#endif - -/** - * LPE_DEVICE: - * - * LPE Device location. - */ -static const char* LPE_DEVICE="/dev/sst"; -/* #define LPE_DEVICE "/dev/lpe" */ - -#define _LOCK(obj) g_static_rec_mutex_lock(obj); -#define _UNLOCK(obj) g_static_rec_mutex_unlock(obj); - -#define _UNLOCK_RETURN(obj, res) { _UNLOCK(obj); return res; } - -typedef enum { - MIX_STREAM_PAUSED_DRAINING = MIX_STREAM_LAST, - MIX_STREAM_INTERNAL_LAST -} MixStreamStateInternal; - - -MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); -MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); -MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); -MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); -MIX_RESULT mix_audio_start_default(MixAudio *mix); -MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix); -MIX_RESULT mix_audio_stop_drain_default(MixAudio *mix); -MIX_RESULT mix_audio_pause_default(MixAudio *mix); -MIX_RESULT mix_audio_resume_default(MixAudio *mix); -MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs); -MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute); -MIX_RESULT mix_audio_get_mute_default(MixAudio *mix, gboolean* muted); -MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol); -MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol); -MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType type); -MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype); -MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix); -MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState); -MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state); -MIX_RESULT mix_audio_get_config_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams); - -static gboolean g_IAM_available = FALSE; -MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audioconfigparams); -MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfigparams); -MIX_RESULT mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams); -static MIX_RESULT mix_audio_verify_params(MixAudio *mix, const MixAudioConfigParams *audioconfigparams); - -static void mix_audio_finalize(GObject *obj); -G_DEFINE_TYPE (MixAudio, mix_audio, G_TYPE_OBJECT); - -static gboolean has_FW_INFO = FALSE; -static struct snd_sst_fw_info cur_FW_INFO = {{0}}; - -static MIX_RESULT mix_audio_FW_INFO(MixAudio *mix); -static MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params); -static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize); -static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); -static void mix_audio_debug_dump(MixAudio *mix); -static MIX_RESULT mix_audio_is_stream_changed(MixAudio *mix, MixAudioConfigParams *new_params, gboolean *pChanged); - -static guint g_log_handler=0; -static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, const gchar *message, gpointer user_data); - -/** - * mix_acp_print_params: - * @obj: TBD - * - * This method is to print acp param. It is a hidden implementation within MixAudioConfigParams. -*/ -void mix_acp_print_params(MixAudioConfigParams *obj); - -static void mix_audio_init (MixAudio *self) -{ - self->streamID = 0; // TODO: Find out the invalid value for stream ID when integrates with IAM. - self->amStreamID = 0; // TODO: as above - self->streamState = MIX_STREAM_NULL; - self->encoding = NULL; - self->fileDescriptor = -1; - self->state = MIX_STATE_UNINITIALIZED; - self->codecMode = MIX_CODING_INVALID; - self->am_registered = FALSE; - self->stream_muted = FALSE; - - /* private member initialization */ - g_static_rec_mutex_init (&self->streamlock); - g_static_rec_mutex_init (&self->controllock); - - self->audioconfigparams = NULL; - self->deviceState = MIX_AUDIO_DEV_CLOSED; - -#ifdef LPESTUB - g_message("MixAudio running in stub mode!"); - self->ts_last = 0; - self->ts_elapsed = 0; -#endif - - self->bytes_written=0; - -} - -void _mix_aip_initialize (void); - -static void mix_audio_class_init (MixAudioClass *klass) -{ - GObjectClass *gobject_class = (GObjectClass*)klass; - - gobject_class->finalize = mix_audio_finalize; - - // Init thread before any threads/sync object are used. - if (!g_thread_supported ()) g_thread_init (NULL); - - /* Init some global vars */ - g_IAM_available = FALSE; - - // base implementations - klass->initialize = mix_audio_initialize_default; - klass->configure = mix_audio_configure_default; - klass->decode = mix_audio_decode_default; - klass->capture_encode = mix_audio_capture_encode_default; - klass->start = mix_audio_start_default; - klass->stop_drop = mix_audio_stop_drop_default; - klass->stop_drain = mix_audio_stop_drain_default; - klass->pause = mix_audio_pause_default; - klass->resume = mix_audio_resume_default; - klass->get_timestamp = mix_audio_get_timestamp_default; - klass->set_mute = mix_audio_set_mute_default; - klass->get_mute = mix_audio_get_mute_default; - klass->get_max_vol = mix_audio_get_max_vol_default; - klass->get_min_vol = mix_audio_get_min_vol_default; - klass->get_volume = mix_audio_get_volume_default; - klass->set_volume = mix_audio_set_volume_default; - klass->deinitialize = mix_audio_deinitialize_default; - klass->get_stream_state = mix_audio_get_stream_state_default; - klass->get_state = mix_audio_get_state_default; - klass->get_config = mix_audio_get_config_default; - - // Set log handler... - if (!g_log_handler) - { - // Get Environment variable - // See mix_audio_log for details - const gchar* loglevel = g_getenv("MIX_AUDIO_DEBUG"); - guint64 ll = 0; - if (loglevel) - { - if (g_strstr_len(loglevel,-1, "0x") == loglevel) - { - // Hex string - ll = g_ascii_strtoull(loglevel+2, NULL, 16); - } - else - { - // Decimal string - ll = g_ascii_strtoull(loglevel, NULL, 10); - } - } - guint32 mask = (guint32)ll; - g_log_handler = g_log_set_handler(G_LOG_DOMAIN, 0xffffffff, mix_audio_log, (gpointer)mask); -/* - g_debug("DEBUG Enabled"); - g_log(G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%s", "LOG Enabled"); - g_message("MESSAGE Enabled"); - g_warning("WARNING Enabled"); - g_critical("CRITICAL Enabled"); - g_error("ERROR Enabled"); -*/ - } -} - -static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, const gchar *message, gpointer user_data) -{ - // Log message based on a mask. - // Mask could be read from MIX_AUDIO_DEBUG environment variable - // mask is a bit mask specifying the message to print. The lsb (0) is "ERROR" and graduating increasing - // value as describe in GLogLevelFlags structure. Not that lsb in GLogLevelFlags is not "ERROR" and - // here we shifted the log_level to ignore the first 2 values in GLogLevelFlags, making ERROR align to - // the lsb. - static const gchar* lognames[] = {"error", "critical", "warning", "message", "log", "debug"}; - guint32 mask = (guint32)user_data & ((G_LOG_LEVEL_MASK & log_level) >> 2); - gint index = 0; - - GTimeVal t = {0}; - - // convert bit mask back to index. - index = ffs(mask) - 1; - - if ((index<0) || (index >= (sizeof(lognames)/sizeof(lognames[0])))) return; - - g_get_current_time(&t); - g_printerr("%" G_GUINT64_FORMAT ":%s-%s: %s\n", - ((guint64)1000000 * t.tv_sec + (guint64)t.tv_usec), - log_domain?log_domain:G_LOG_DOMAIN, - lognames[index], - message?message:"NULL"); -} - -MixAudio *mix_audio_new(void) -{ - MixAudio *ret = g_object_new(MIX_TYPE_AUDIO, NULL); - - return ret; -} - -void mix_audio_finalize(GObject *obj) -{ - /* clean up here. */ - MixAudio *mix = MIX_AUDIO(obj); - - if (G_UNLIKELY(!mix)) return; - - /* - We are not going to check the thread lock anymore in this method. - If a thread is accessing the object it better still have a ref on this - object and in that case, this method won't be called. - - The application have to risk access violation if it calls the methods in - a thread without actually holding a reference. - */ - - g_debug("_finalized(). bytes written=%" G_GUINT64_FORMAT, mix->bytes_written); - - g_static_rec_mutex_free (&mix->streamlock); - g_static_rec_mutex_free (&mix->controllock); - - if (mix->audioconfigparams) - { - mix_acp_unref(mix->audioconfigparams); - mix->audioconfigparams = NULL; - } -} - -MixAudio *mix_audio_ref(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return NULL; - - return (MixAudio*)g_object_ref(G_OBJECT(mix)); -} - -MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams) -{ - // API version 0.79. 1/19/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - if ((mode <= MIX_CODING_INVALID) || (mode >= MIX_CODING_LAST)) return MIX_RESULT_INVALID_PARAM; - - // initialized must be called with both thread-lock held, so no other operation is allowed. - - // try lock stream thread. If failed, a pending _decode/_encode/_drain is ongoing. - if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; - - // also lock the control thread lock. - _LOCK(&mix->controllock); - - if (mix->state != MIX_STATE_UNINITIALIZED) - ret = MIX_RESULT_ALREADY_INIT; - - if (!MIX_SUCCEEDED(ret)) - { - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); - return ret; - } - - // Open device. Same flags to open for decode and encode? -#ifdef LPESTUB - { - gchar *filename = NULL; - GError *err = NULL; - const gchar* fn = NULL; - fn = g_getenv("MIX_AUDIO_OUTPUT"); - if (fn) mix->fileDescriptor = open(fn, O_RDWR|O_CREAT, S_IRUSR|S_IWUSR); - if (mix->fileDescriptor == -1) - { - mix->fileDescriptor = g_file_open_tmp ("mixaudio.XXXXXX", &filename, &err); - if (err) - { - g_warning("Oops, cannot open temp file: Error message: %s", err->message); - } - else - { - g_debug("Opening %s instead of %s as output data file.\n", filename, LPE_DEVICE); - } - } - else - { - g_debug("Opening %s as output data file.\n", fn); - } - if (filename) g_free(filename); - } -#else - g_debug("Calling open(%s)\n", LPE_DEVICE); - mix->fileDescriptor = open(LPE_DEVICE, O_RDWR); - g_debug("open returned %d", mix->fileDescriptor); -#endif - - if (mix->fileDescriptor != -1) - { - mix->codecMode = mode; - mix->state = MIX_STATE_INITIALIZED; - ret = MIX_RESULT_SUCCESS; - mix->deviceState = MIX_AUDIO_DEV_OPENED; - } - else - { - ret = MIX_RESULT_LPE_NOTAVAIL; - g_warning("open() failed. Error(0x%08x): %s", errno, strerror(errno)); - } - - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); - - return ret; -} - -gboolean mix_audio_am_is_available(void) -{ - // return FALSE for now until IAM is available for integration. - // TODO: Check IAM - return FALSE; -} - -gboolean mix_audio_base_am_is_enabled(MixAudio *mix) -{ - // TODO: Check IAM usage - return FALSE; -} - -/** - * mix_audio_SST_SET_PARAMS: - * @mix: #MixAudio object. - * @params: Audio parameter used to configure SST. - * @returns: #MIX_RESULT indicating configuration result. - * - * This method setup up a SST stream with the given parameters. Note that even though - * this method could succeed and SST stream is setup properly, client may still not be able - * to use the session if other condition are met, such as a successfully set-up IAM, if used. - */ -MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - if (mix->state == MIX_STATE_UNINITIALIZED) return MIX_RESULT_NOT_INIT; - - if (!MIX_IS_AUDIOCONFIGPARAMS(params)) return MIX_RESULT_INVALID_PARAM; - - mix_acp_print_params(params); - - struct snd_sst_params sst_params = {0}; - - gboolean converted = mix_sst_params_convert(params, &sst_params); - - if (converted) - { - // Setup the driver structure - // We are assuming the configstream will always be called after open so the codec mode - // should already been setup. - sst_params.stream_id = mix->streamID; - // We are not checking the codecMODE here for out-of-range...assuming we check that - // during init... - if (mix->codecMode == MIX_CODING_ENCODE) - sst_params.ops = STREAM_OPS_CAPTURE; - else sst_params.ops = STREAM_OPS_PLAYBACK; - - // hard-coded to support music only. - sst_params.stream_type = 0x0; // stream_type 0x00 is STREAM_TYPE_MUSIC per SST doc. - - // SET_PARAMS - int retVal = 0; - -#ifdef LPESTUB - // Not calling the ioctl -#else - g_debug("Calling SNDRV_SST_STREAM_SET_PARAMS. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_SET_PARAMS, &sst_params); - g_debug("_SET_PARAMS returned %d", retVal); -#endif - - if (!retVal) - { - // IOCTL success. - switch (sst_params.result) - { - // Please refers to SST API doc for return value definition. - case 5: - g_debug("SET_PARAMS succeeded with Stream Parameter Modified."); - case 0: - // driver says ok, too. - ret = MIX_RESULT_SUCCESS; - mix->deviceState = MIX_AUDIO_DEV_ALLOCATED; - mix->streamState = MIX_STREAM_STOPPED; - mix->streamID = sst_params.stream_id; - // clear old params - if (MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams)) - { - mix_acp_unref(mix->audioconfigparams); - mix->audioconfigparams=NULL; - } - // replace with new one. - mix->audioconfigparams = MIX_AUDIOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(params))); - // Note: do not set mix->state here because this state may rely op other than SET_PARAMS - g_debug("SET_PARAMS succeeded streamID=%d.", mix->streamID); - break; - case 1: - ret = MIX_RESULT_STREAM_NOTAVAIL; - g_debug("SET_PARAMS failed STREAM not available."); - break; - case 2: - ret = MIX_RESULT_CODEC_NOTAVAIL; - g_debug("SET_PARAMS failed CODEC not available."); - break; - case 3: - ret = MIX_RESULT_CODEC_NOTSUPPORTED; - g_debug("SET_PARAMS failed CODEC not supported."); - break; - case 4: - ret = MIX_RESULT_INVALID_PARAM; - g_debug("SET_PARAMS failed Invalid Stream Parameters."); - break; - case 6: - g_debug("SET_PARAMS failed Invalid Stream ID."); - default: - ret = MIX_RESULT_FAIL; - g_critical("SET_PARAMS failed unexpectedly. Result code: %u\n", sst_params.result); - break; - } - } - else - { - // log errors - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("Failed to SET_PARAMS. errno:0x%08x. %s\n", errno, strerror(errno)); - } - } - else - { - ret = MIX_RESULT_INVALID_PARAM; - } - - return ret; -} - -MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(mix == NULL)) return MIX_RESULT_NULL_PTR; - if (state == NULL) return MIX_RESULT_NULL_PTR; - - *state = mix->state; - - return ret; -} - -MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; - - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->streamlock, MIX_RESULT_WRONG_STATE); - - if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DIRECTRENDER) - ret = mix_audio_SST_writev(mix, iovin, iovincnt, insize); - else - ret = mix_audio_SST_STREAM_DECODE(mix, iovin, iovincnt, insize, iovout, iovoutcnt); - - _UNLOCK(&mix->streamlock); - - return ret; -} - -MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; - -#ifdef AUDIO_MANAGER - if (mix->amStreamID && (lpe_stream_unregister(mix->amStreamID) < 0)) { - g_debug("lpe_stream_unregister failed\n"); - //return MIX_RESULT_FAIL; // TODO: not sure what to do here - } -#endif - - _LOCK(&mix->controllock); - - if (mix->state == MIX_STATE_UNINITIALIZED) - ret = MIX_RESULT_SUCCESS; - else if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL)) - ret = MIX_RESULT_WRONG_STATE; - else - { - if (mix->fileDescriptor != -1) - { - g_debug("Closing fd=%d\n", mix->fileDescriptor); - close(mix->fileDescriptor); - mix->fileDescriptor = -1; - mix->deviceState = MIX_AUDIO_DEV_CLOSED; - } - mix->state = MIX_STATE_UNINITIALIZED; - mix->stream_muted = FALSE; - } - - mix->bytes_written = 0; - - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); - - return ret; -} - - -MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int retVal = 0; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->state != MIX_STATE_CONFIGURED) - { - // Not allowing control operation if it is not configured. - ret = MIX_RESULT_NOT_CONFIGURED; - } - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - { - // Not allowing control operation if it is configured as DNR. - ret = MIX_RESULT_WRONGMODE; - } - - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - // Will call DROP even if we are already stopped. It is needed to unblock any pending readv()/write() call that is made after the last STOP_x and before the next START. -#ifdef LPESTUB - // Not calling ioctl. -#else - g_debug("Calling SNDRV_SST_STREAM_DROP. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DROP); - g_debug("_DROP returned %d", retVal); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_warning("Failed to stop stream. Error(0x%08x): %s", errno, strerror(errno)); - } - else - { - mix->streamState = MIX_STREAM_STOPPED; - ret = MIX_RESULT_SUCCESS; - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -MIX_RESULT mix_audio_stop_drain_default(MixAudio *mix) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int retVal = 0; - gboolean doDrain = FALSE; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - // No need to lock to check vars that won't be changed in this function - - // If cannot obtain stream lock meaning there's a pending _decode/_encode and will not proceed. - if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; - - _LOCK(&mix->controllock); - - // Check unallowed condition - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - { - // Not allowing control operation if it is configure for decode. - ret = MIX_RESULT_WRONGMODE; - } - else if (mix->state != MIX_STATE_CONFIGURED) - { - // Not allowing control operation if it is not configured. - ret = MIX_RESULT_NOT_CONFIGURED; - } - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - { - // Not allowing control operation if it is configured as DNR. - ret = MIX_RESULT_WRONGMODE; - } - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) - { - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); - return ret; - } - - if (mix->streamState == MIX_STREAM_STOPPED) - // no need to drain and we already stopped. - ret = MIX_RESULT_SUCCESS; - else if ((mix->streamState == MIX_STREAM_DRAINING) || mix->streamState == MIX_STREAM_PAUSED_DRAINING) - // Not allowed if we are already draining or PAUSED in draining state. - ret = MIX_RESULT_WRONG_STATE; - else - { - doDrain = TRUE; - g_debug("MIX stream needs DRAINING"); - mix->streamState = MIX_STREAM_DRAINING; - // Set state to MIX_STREAM_DRAINING and other operations that may change teh streamState has to - // be careful when handling stream with this state. - } - - // release the control lock. we only hold the stream lock during this blocking DRAIN call. - _UNLOCK(&mix->controllock); - - - if (doDrain) - { - // Calling the blocking DRAIN holding just the stream lock, without the control lock - -#ifdef LPESTUB - -#else - g_debug("Calling SNDRV_SST_STREAM_DRAIN fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DRAIN); - g_debug("_DRAIN returned %d", retVal); -#endif - - // obtain control lock and update state. - _LOCK(&mix->controllock); - - if (retVal != 0) - { - if (mix->streamState != MIX_STREAM_STOPPED) - { - // DRAIN could return failed if DROP is called during DRAIN. - // Any state resulting as a failed DRAIN would be error, except STOPPED. - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("Failed to drain stream. Error(0x%08x): %s", errno, strerror(errno)); - } - } - else - { - if ((mix->streamState != MIX_STREAM_DRAINING) && - (mix->streamState != MIX_STREAM_STOPPED)) - { - // State is changed while in DRAINING. This should not be allowed and is a bug. - g_warning("MIX Internal state error! DRAIN state(%u) changed!",mix->streamState); - ret = MIX_RESULT_FAIL; - } - else - { - mix->streamState = MIX_STREAM_STOPPED; - ret = MIX_RESULT_SUCCESS; - } - } - - _UNLOCK(&mix->controllock); - } - - _UNLOCK(&mix->streamlock); - - return ret; -} - -MIX_RESULT mix_audio_start_default(MixAudio *mix) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_FAIL; - - if (G_UNLIKELY(mix==NULL)) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - if (mix->state != MIX_STATE_CONFIGURED) - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); - - // Not allowed if in DNR mode. - if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONGMODE); - - // Note this impl return success even if stream is already started. - switch (mix->streamState) - { - case MIX_STREAM_PLAYING: - case MIX_STREAM_PAUSED: - case MIX_STREAM_PAUSED_DRAINING: - ret = MIX_RESULT_SUCCESS; - break; - case MIX_STREAM_STOPPED: - { - int retVal = 0; -#ifdef LPESTUB - // Not calling ioctl. -#else - g_debug("Calling SNDRV_SST_STREAM_START. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_START); - g_debug("_START returned %d", retVal); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("Fail to START. Error:0x%08x. Stream state unchanged.", errno); - mix_audio_debug_dump(mix); - } - else - { - mix->streamState = MIX_STREAM_PLAYING; - ret = MIX_RESULT_SUCCESS; - } - } - break; - case MIX_STREAM_DRAINING: - default: - ret = MIX_RESULT_WRONG_STATE; - break; - } - - _UNLOCK(&mix->controllock); - -#ifdef LPESTUB - if (MIX_SUCCEEDED(ret)) - { - if (mix->ts_last == 0) - { - GTimeVal tval = {0}; - g_get_current_time(&tval); - mix->ts_last = 1000ll * tval.tv_sec + tval.tv_usec / 1000; - } - } -#endif - return ret; -} - -MIX_RESULT mix_audio_get_version(guint* major, guint *minor) -{ - // simulate the way libtool generate version so the number synchronize with the filename. - if (major) - *major = MIXAUDIO_CURRENT-MIXAUDIO_AGE; - - if (minor) - *minor = MIXAUDIO_AGE; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams) -{ - // API version 0.79. 1/19/2009 - - MIX_RESULT ret = MIX_RESULT_FAIL; - gboolean changed = FALSE; - MixParams *new_params = NULL; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - g_message("_configure() starts."); - - // input param checks - if (!MIX_IS_AUDIOCONFIGPARAMS(audioconfigparams)) return MIX_RESULT_NOT_ACP; - if (MIX_ACP_DECODEMODE(audioconfigparams) >= MIX_DECODE_LAST) return MIX_RESULT_INVALID_DECODE_MODE; - if (!mix_acp_is_streamname_valid(audioconfigparams)) return MIX_RESULT_INVALID_STREAM_NAME; - - // dup a copy. we will need it when configure is successful. - // don't want to find out we can't dup it only after everything is set. - new_params = mix_params_dup(MIX_PARAMS(audioconfigparams)); - if (!MIX_IS_AUDIOCONFIGPARAMS(new_params)) - { - if (MIX_IS_PARAMS(new_params)) mix_params_unref(new_params); - g_error("Failed to duplicate input MixAudioConfigParams!"); - return MIX_RESULT_FAIL; - } - - // If we cannot lock stream thread, data is flowing and we can't configure. - if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE; - - _LOCK(&mix->controllock); - - // Check all unallowed conditions - if (mix->state == MIX_STATE_UNINITIALIZED) - { - ret = MIX_RESULT_NOT_INIT; // Will not allowed if the state is still UNINITIALIZED - goto _configure_done; - } - - if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL)) - { - ret = MIX_RESULT_WRONG_STATE; - goto _configure_done; - } - - ret = mix_audio_verify_params(mix, audioconfigparams); - if (!MIX_SUCCEEDED(ret)) goto _configure_done; - - // now configure stream. - -#ifdef AUDIO_MANAGER - ret = mix_audio_am_unregister(mix, audioconfigparams); -#else - ret = MIX_RESULT_SUCCESS; -#endif - if (!MIX_SUCCEEDED(ret)) goto _configure_failed; - - ret = mix_audio_is_stream_changed(mix, audioconfigparams, &changed); - if (!MIX_SUCCEEDED(ret)) goto _configure_failed; - - if (changed) - { - ret = mix_audio_SST_SET_PARAMS(mix, audioconfigparams); - if (!MIX_SUCCEEDED(ret)) goto _configure_failed; - } - else - { - g_message("No stream change is detected. Not calling SET_PARAMS."); - } - -#ifdef AUDIO_MANAGER - ret = mix_audio_am_register(mix, audioconfigparams); -#else - ret = MIX_RESULT_SUCCESS; -#endif - if (!MIX_SUCCEEDED(ret)) goto _configure_failed; - - ret = MIX_RESULT_SUCCESS; - -_configure_failed: - - if (mix->audioconfigparams) - { - mix_acp_unref(mix->audioconfigparams); - mix->audioconfigparams=NULL; - } - - if (MIX_SUCCEEDED(ret)) - { - mix->state = MIX_STATE_CONFIGURED; - mix->audioconfigparams = MIX_AUDIOCONFIGPARAMS(new_params); - new_params = NULL; - } - else - { - // attempt to configure has failed. Revert state back to INITIALIZED only. - mix->state = MIX_STATE_INITIALIZED; - mix->streamState = MIX_STREAM_STOPPED; - // No need to un-set SST PARAM. not supported really. - } - -_configure_done: - - _UNLOCK(&mix->controllock); - _UNLOCK(&mix->streamlock); - - if (new_params) - { - mix_params_unref(new_params); - } - - g_message("_configure() done returning 0x%08x.", ret); - - return ret; -} - -MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - unsigned long long ts = 0; - int retVal = 0; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - if (!msecs) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - -#ifdef LPESTUB - // For stubbing, just get system clock. - if (MIX_ACP_BITRATE(mix->audioconfigparams) > 0) - { - // use bytes_written and bitrate - // to get times in msec. - ts = mix->bytes_written * 8000 / MIX_ACP_BITRATE(mix->audioconfigparams); - } - else if (mix->ts_last) - { - GTimeVal tval = {0}; - g_get_current_time(&tval); - ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000; - ts -= mix->ts_last; - ts += mix->ts_elapsed; - } - else - { - ts = 0; - } -#else - g_debug("Calling _GET_TSTAMP. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_TSTAMP, &ts); - g_debug("_GET_TSTAMP returned %d. timestamp=%" G_GUINT64_FORMAT, retVal, ts); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_GET_TSTAMP failed. Error(0x%08x): %s", errno, strerror(errno)); - mix_audio_debug_dump(mix); - } - else - { - *msecs = ts; - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -#ifdef AUDIO_MANAGER -gboolean mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams) -{ - if (strcmp(oldparams->stream_name, newparams->stream_name) == 0) { - return FALSE; - } - - return TRUE; -} - -MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audioconfigparams) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - if (mix->am_registered && MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams) && MIX_IS_AUDIOCONFIGPARAMS(audioconfigparams)) - { - // we have 2 params. let's check - if ((MIX_ACP_DECODEMODE(mix->audioconfigparams) != MIX_ACP_DECODEMODE(audioconfigparams)) || - mix_audio_AM_Change(mix->audioconfigparams, audioconfigparams)) //TODO: add checking for SST change - { - // decode mode change. - if (mix->amStreamID > 0) { - if (lpe_stream_unregister(mix->amStreamID) != 0) { - return MIX_RESULT_AM_UNREGISTER_FAIL; - } - mix->amStreamID = 0; - mix->am_registered = FALSE; - } - } - } - - return ret; -} - -MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfigparams) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - gint32 codec_mode = -1; - - if (mix->codecMode == MIX_CODING_DECODE) - codec_mode = 0; - else if (mix->codecMode == MIX_CODING_ENCODE) - codec_mode = 1; - else - return MIX_RESULT_FAIL; // TODO: what to do when fail? - -#ifdef AUDIO_MANAGER - if (audioconfigparams->stream_name == NULL) - return MIX_RESULT_FAIL; - -// if AM is enable, and not_registered, then register - if ((audioconfigparams->audio_manager == MIX_AUDIOMANAGER_INTELAUDIOMANAGER) && !mix->am_registered) { -// if (!mix->am_registered) { // TODO: remove this and uncomment line above - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "audio_manager=MIX_AUDIOMANAGER_INTELAUDIOMANAGER and !am_registered\n"); - gint32 amStreamID = lpe_stream_register(mix->streamID, "music", audioconfigparams->stream_name, codec_mode); - - if (amStreamID == -1){ - mix->amStreamID = 0; - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "AM register failed: general error\n"); - return MIX_RESULT_AM_REGISTER_FAIL; - } - else if (amStreamID == -2) { // -2: Direct render not avail, see AM spec - mix->amStreamID = 0; - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "AM register failed: Direct render not available\n"); - return MIX_RESULT_DIRECT_NOTAVAIL; - } - mix->am_registered = TRUE; - mix->amStreamID = amStreamID; - } -#endif - - return ret; -} -#endif /* AUDIO_MANAGER */ - -MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt) -{ - struct iovec *vec; - gint bytes_read; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - // TODO: set count limit - if (iovoutcnt < 1) { - return MIX_RESULT_INVALID_COUNT; - } - - if (iovout == NULL) - return MIX_RESULT_NULL_PTR; - - vec = (struct iovec *) g_alloca(sizeof(struct iovec) * iovoutcnt); - if (!vec) return MIX_RESULT_NO_MEMORY; - - gint i; - for (i=0; i < iovoutcnt; i++) - { - vec[i].iov_base = iovout[i].data; - vec[i].iov_len = iovout[i].buffer_size; - iovout[i].data_size = 0; - } - - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "begin readv()\n"); - bytes_read = readv(mix->fileDescriptor, vec, iovoutcnt); - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "end readv(), return: %d\n", bytes_read); - if (bytes_read < 1) { - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_ERROR, "return: %d\n", bytes_read); - return MIX_RESULT_FAIL; - } - - i=0; - while (bytes_read > 0) - { - if (bytes_read > iovout[i].buffer_size) { - iovout[i].data_size = iovout[i].buffer_size; - bytes_read = bytes_read - iovout[i].buffer_size; - } - else { - iovout[i].data_size = bytes_read; - bytes_read = 0; - } - i++; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - if (!maxvol) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - if (!has_FW_INFO) - { - ret = mix_audio_FW_INFO(mix); - } - - if (MIX_SUCCEEDED(ret)) - { - *maxvol = (gint)cur_FW_INFO.pop_info.max_vol; - } - - _UNLOCK(&mix->controllock); - - return ret; -} - - -MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - if (!minvol) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - if (!has_FW_INFO) - { - ret = mix_audio_FW_INFO(mix); - } - - if (MIX_SUCCEEDED(ret)) - { - *minvol = (gint)cur_FW_INFO.pop_info.min_vol; - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(mix==NULL)) return MIX_RESULT_NULL_PTR; - if (streamState == NULL) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - // PAUSED_DRAINING is internal state. - if (mix->streamState == MIX_STREAM_PAUSED_DRAINING) - *streamState = MIX_STREAM_PAUSED; - else - *streamState = mix->streamState; - - _UNLOCK(&mix->controllock); - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType type) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - struct snd_sst_vol vol = {0}; - int retVal = 0; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - if (!currvol) return MIX_RESULT_NULL_PTR; - if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - vol.stream_id = mix->streamID; - - -#ifdef LPESTUB - // Not calling. -#else - g_debug("Calling _GET_VOL. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_GET_VOL, &vol); - g_debug("_GET_VOL returned %d. vol=%d", retVal, vol.volume); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_GET_VOL failed. Error(0x%08x): %s", errno, strerror(errno)); - mix_audio_debug_dump(mix); - } - else - { - gint maxvol = 0; - ret = mix_audio_get_max_vol(mix, &maxvol); - - if (MIX_SUCCEEDED(ret)) - { - if (type == MIX_VOL_PERCENT) - *currvol = (maxvol!=0)?((vol.volume * 100) / maxvol):0; - else - *currvol = vol.volume; - } - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -MIX_RESULT mix_audio_get_mute_default(MixAudio *mix, gboolean* muted) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - if (muted == NULL) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - *muted = mix->stream_muted; - - _UNLOCK(&mix->controllock); - - return ret; -} - -MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int retVal = 0; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - struct snd_sst_mute m = { 0 }; - - if (mute) m.mute = 1; - else m.mute = 0; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - m.stream_id = mix->streamID; - -#ifdef LPESTUB - // Not calling. -#else - g_debug("Calling _MUTE. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_MUTE, &m); - g_debug("_MUTE returned %d", retVal); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_MUTE failed. Error(0x%08x): %s", errno, strerror(errno)); - mix_audio_debug_dump(mix); - } - else - { - mix->stream_muted = mute; - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -MIX_RESULT mix_audio_pause_default(MixAudio *mix) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int retVal = 0; - - if (G_UNLIKELY(mix==NULL)) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - else if ((mix->streamState != MIX_STREAM_PLAYING) && (mix->streamState != MIX_STREAM_DRAINING)) - ret = MIX_RESULT_WRONG_STATE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - // If stream is paused, return success. - if ((mix->streamState == MIX_STREAM_PAUSED) || - (mix->streamState == MIX_STREAM_PAUSED_DRAINING)) - { - g_debug("Stream already paused."); - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); - } - -#ifdef LPESTUB - // Not calling -#else - g_debug("Calling SNDRV_SST_STREAM_PAUSE. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_PAUSE); - g_debug("_PAUSE returned %d", retVal); -#endif - - if (retVal != 0) - { - if (mix->streamState == MIX_STREAM_DRAINING) - { - // if stream state has been DRAINING, DRAIN could become successful during the PAUSE call, but not yet have chance to update streamState since we now hold the control lock. - // In this case, the mix_streamState becomes out-of-sync with the actual playback state. PAUSE failed due to stream already STOPPED but mix->streamState remains at "DRAINING" - // On the other hand, we can't let DRAIN hold the lock the entire time. - // We would not know if we fail PAUSE due to DRAINING, or a valid reason. - // DRAINING is not likely problem for resume, as long as the PAUSED state is set when stream is really PAUSED. - ret = MIX_RESULT_NEED_RETRY; - g_warning("PAUSE failed while DRAINING. Draining could be just completed. Retry needed."); - } - else - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_PAUSE failed. Error:0x%08x", errno); - mix_audio_debug_dump(mix); - } - } - else - { - if (mix->streamState == MIX_STREAM_DRAINING) - mix->streamState = MIX_STREAM_PAUSED_DRAINING; - else - mix->streamState = MIX_STREAM_PAUSED; - -#ifdef AUDIO_MANAGER - if (lpe_stream_notify_pause(mix->amStreamID) != 0) - { - ret = MIX_RESULT_AM_NOTIFY_PAUSE_FAIL; - } -#endif - } - - _UNLOCK(&mix->controllock); - -#ifdef LPESTUB - if (MIX_SUCCEEDED(ret)) - { - GTimeVal tval = {0}; - g_get_current_time(&tval); - guint64 ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000; - mix->ts_elapsed += ts - mix->ts_last; - mix->ts_last = 0; - } -#endif - return ret; -} - -MIX_RESULT mix_audio_resume_default(MixAudio *mix) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int retVal = 0; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - else if ((mix->streamState != MIX_STREAM_PAUSED) && (mix->streamState != MIX_STREAM_PAUSED_DRAINING)) - ret = MIX_RESULT_WRONG_STATE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - // If stream is paused, return success. - if ((mix->streamState == MIX_STREAM_PLAYING) || - (mix->streamState == MIX_STREAM_DRAINING)) - { - g_debug("Stream already playing."); - _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS); - } - -#ifdef LPESTUB - // Not calling -#else - g_debug("Calling SNDRV_SST_STREAM_RESUME. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_RESUME); - g_debug("_STREAM_RESUME returned %d", retVal); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_PAUSE failed. Error:0x%08x", errno); - mix_audio_debug_dump(mix); - } - else - { - if (mix->streamState == MIX_STREAM_PAUSED_DRAINING) - mix->streamState = MIX_STREAM_DRAINING; - else - mix->streamState = MIX_STREAM_PLAYING; - -#ifdef AUDIO_MANAGER - if (lpe_stream_notify_resume(mix->amStreamID) != 0) { - ret = MIX_RESULT_AM_NOTIFY_RESUME_FAIL; - } -#endif - } - - _UNLOCK(&mix->controllock); - -#ifdef LPESTUB - if (MIX_SUCCEEDED(ret)) - { - GTimeVal tval = {0}; - g_get_current_time(&tval); - guint64 ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000; - mix->ts_last = ts; - } -#endif - - return ret; -} - -MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int retVal = 0; - struct snd_sst_vol vol = { - .ramp_duration = msecs, - .ramp_type = ramptype - }; - - if (G_UNLIKELY(mix == NULL)) return MIX_RESULT_NULL_PTR; - if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - vol.stream_id = mix->streamID; - - if (type == MIX_VOL_DECIBELS) - { - vol.volume = currvol; - } - else - { - gint maxvol = 0; - ret = mix_audio_get_max_vol(mix, &maxvol); - - if (!maxvol) - g_critical("Max Vol is 0!"); - - if (MIX_SUCCEEDED(ret)) - { - vol.volume = currvol * maxvol / 100; - } - } - - -#ifdef LPESTUB - // Not calling -#else - g_debug("calling _SET_VOL vol=%d", vol.volume); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_SET_VOL, &vol); - g_debug("_SET_VOL returned %d", retVal); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_SET_VOL failed. Error(0x%08x): %s", errno, strerror(errno)); - mix_audio_debug_dump(mix); - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -MIX_RESULT mix_audio_FW_INFO(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - _LOCK(&mix->controllock); - - // This call always get the fw info. - int retVal = 0; - -#ifdef LPESTUB - // Not calling. -#else - g_debug("calling SNDRV_SST_FW_INFO fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_FW_INFO, &cur_FW_INFO); - g_debug("SNDRV_SST_FW_INFO returned %d", retVal); -#endif - - if (!retVal) - { - has_FW_INFO = TRUE; - } - else - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_FW_INFO failed. Error:0x%08x", errno); - mix_audio_debug_dump(mix); - } - - _UNLOCK(&mix->controllock); - - return ret; -} - - -static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - -/* - definition of "struct iovec" used by writev: - struct iovec { - void *iov_base; - size_t iov_len; - }; -*/ - - if (!mix) return MIX_RESULT_NULL_PTR; - - size_t total_bytes = 0; - // NOTE: we may want to find a way to avoid this copy. - struct iovec *in = (struct iovec*)g_alloca(sizeof(struct iovec) * iovincnt); - if (!in) return MIX_RESULT_NO_MEMORY; - - int i; - for (i=0;iaudioconfigparams) > 0) - { - wait_time = total_bytes*8*1000*1000/MIX_ACP_BITRATE(mix->audioconfigparams); - // g_debug("To wait %lu usec for writev() to simulate blocking\n", wait_time); - } - GTimer *timer = g_timer_new(); - g_timer_start(timer); - - g_debug("calling writev(fd=%d)", mix->fileDescriptor); - written = writev(mix->fileDescriptor, in, iovincnt); - if (written >= 0) mix->bytes_written += written; - g_debug("writev() returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written); - /* Now since writing to file rarely block, we put timestamp there to block.*/ - g_timer_stop(timer); - gulong elapsed = 0; - g_timer_elapsed(timer, &elapsed); - g_timer_destroy(timer); - // g_debug("writev() returned in %lu usec\n", elapsed); - if ((MIX_ACP_BITRATE(mix->audioconfigparams) > 0) && (wait_time > elapsed)) - { - wait_time -= elapsed; - g_usleep(wait_time); - } -#else - g_debug("calling writev(fd=%d) with %d", mix->fileDescriptor, total_bytes); - written = writev(mix->fileDescriptor, in, iovincnt); - if (written > 0) mix->bytes_written += written; - g_debug("writev() returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written); -#endif - - if (written < 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("writev() failed. Error:0x%08x", errno); - } - else - { - // guranttee written is positive value before sign extending it. - if (insize) *insize = (guint64)written; - if (written != total_bytes) - { - g_warning("writev() wrote only %d out of %d", written, total_bytes); - } - } - - return ret; -} - -static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int retVal = 0; - - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - if ((iovout == NULL) || (iovoutcnt <= 0)) - { - g_critical("Wrong mode. Please report a bug..."); - return MIX_RESULT_NULL_PTR; - } - - g_message("Input entries=%d. Output entries=%d", iovincnt, iovoutcnt); - - struct snd_sst_buff_entry *ientries = NULL; - struct snd_sst_buff_entry *oentries = NULL; - - ientries = (struct snd_sst_buff_entry*)g_alloca(sizeof(struct snd_sst_buff_entry) * iovincnt); - oentries = (struct snd_sst_buff_entry*)g_alloca(sizeof(struct snd_sst_buff_entry) * iovoutcnt); - - if (!ientries || !oentries) return MIX_RESULT_NO_MEMORY; - - struct snd_sst_dbufs dbufs = {0}; - - struct snd_sst_buffs ibuf = {0}; - struct snd_sst_buffs obuf = {0}; - - ibuf.entries = iovincnt; - ibuf.type = SST_BUF_USER; - ibuf.buff_entry = ientries; - - obuf.entries = iovoutcnt; - obuf.type = SST_BUF_USER; - obuf.buff_entry = oentries; - - dbufs.ibufs = &ibuf; - dbufs.obufs = &obuf; - - int i = 0; - for (i=0;i1) - { - for (i=0;ifileDescriptor); - written = writev(mix->fileDescriptor, in, iovincnt); - if (written >= 0) - { - mix->bytes_written += written; - dbufs.output_bytes_produced = written; - dbufs.input_bytes_consumed = written; - } - g_debug("stub STREAM_DECODE (writev) returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written); -#else - g_debug("calling SNDRV_SST_STREAM_DECODE fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DECODE, &dbufs); - g_debug("SNDRV_SST_STREAM_DECODE returned %d", retVal); -#endif - - if (retVal) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_STREAM_DECODE failed. Error:0x%08x", errno); - mix_audio_debug_dump(mix); - } - else - { - if (insize) *insize = dbufs.input_bytes_consumed; - //if (outsize) *outsize = dbufs.output_bytes_produced; - unsigned long long produced = dbufs.output_bytes_produced; - int i; - for (i=0;i iovout[i].buffer_size) - { - iovout[i].data_size = iovout[i].buffer_size; - produced -= iovout[i].data_size; - } - else - { - iovout[i].data_size = produced; - produced = 0; - } - } - else - { - iovout[i].data_size = 0; - } - } - produced = 0; - for (i=0;iinitialize) - return MIX_RESULT_FAIL; // TODO: add more descriptive error - -#ifdef AUDIO_MANAGER - if (dbus_init() < 0) { - mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to connect to dbus\n"); -// commented out, gracefully exit right now -// return MIX_RESULT_FAIL; // TODO: add more descriptive error - } -#endif - - return klass->initialize(mix, mode, aip, drminitparams); -} - -MIX_RESULT mix_audio_configure(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->configure) - return MIX_RESULT_FAIL; - - return klass->configure(mix, audioconfigparams, drmparams); -} - -MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->decode) - return MIX_RESULT_FAIL; - - return klass->decode(mix, iovin, iovincnt, insize, iovout, iovoutcnt); -} - -MIX_RESULT mix_audio_capture_encode(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->capture_encode) - return MIX_RESULT_FAIL; - - return klass->capture_encode(mix, iovout, iovoutcnt); -} - -MIX_RESULT mix_audio_start(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->start) - return MIX_RESULT_FAIL; - - return klass->start(mix); -} - -MIX_RESULT mix_audio_stop_drop(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->stop_drop) - return MIX_RESULT_FAIL; - - return klass->stop_drop(mix); -} - -MIX_RESULT mix_audio_stop_drain(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->stop_drain) - return MIX_RESULT_FAIL; - - return klass->stop_drain(mix); -} - -MIX_RESULT mix_audio_pause(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->pause) - return MIX_RESULT_FAIL; - - return klass->pause(mix); -} - -MIX_RESULT mix_audio_resume(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->resume) - return MIX_RESULT_FAIL; - - return klass->resume(mix); -} - -MIX_RESULT mix_audio_get_timestamp(MixAudio *mix, guint64 *msecs) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_timestamp) - return MIX_RESULT_FAIL; - - return klass->get_timestamp(mix, msecs); -} - -MIX_RESULT mix_audio_get_mute(MixAudio *mix, gboolean* muted) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_mute) - return MIX_RESULT_FAIL; - - return klass->get_mute(mix, muted); -} - -MIX_RESULT mix_audio_set_mute(MixAudio *mix, gboolean mute) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->set_mute) - return MIX_RESULT_FAIL; - - return klass->set_mute(mix, mute); -} - -MIX_RESULT mix_audio_get_max_vol(MixAudio *mix, gint *maxvol) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_max_vol) - return MIX_RESULT_FAIL; - - return klass->get_max_vol(mix, maxvol); -} - -MIX_RESULT mix_audio_get_min_vol(MixAudio *mix, gint *minvol) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_min_vol) - return MIX_RESULT_FAIL; - - return klass->get_min_vol(mix, minvol); -} - -MIX_RESULT mix_audio_get_volume(MixAudio *mix, gint *currvol, MixVolType type) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_volume) - return MIX_RESULT_FAIL; - - return klass->get_volume(mix, currvol, type); -} - -MIX_RESULT mix_audio_set_volume(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->set_volume) - return MIX_RESULT_FAIL; - - return klass->set_volume(mix, currvol, type, msecs, ramptype); -} - -MIX_RESULT mix_audio_deinitialize(MixAudio *mix) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->deinitialize) - return MIX_RESULT_FAIL; - - return klass->deinitialize(mix); -} - -MIX_RESULT mix_audio_get_stream_state(MixAudio *mix, MixStreamState *streamState) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_stream_state) - return MIX_RESULT_FAIL; - - return klass->get_stream_state(mix, streamState); -} - -MIX_RESULT mix_audio_get_state(MixAudio *mix, MixState *state) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_state) - return MIX_RESULT_FAIL; - - return klass->get_state(mix, state); -} - -MIX_RESULT mix_audio_is_am_available(MixAudioManager am, gboolean *avail) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (avail) - *avail = FALSE; - else - ret = MIX_RESULT_NULL_PTR; - - return ret; -} - -const gchar* dbgstr_UNKNOWN="UNKNOWN"; - -static const gchar* _mix_stream_state_get_name (MixStreamState s) -{ - static const gchar *MixStreamStateNames[] = { - "MIX_STREAM_NULL", - "MIX_STREAM_STOPPED", - "MIX_STREAM_PLAYING", - "MIX_STREAM_PAUSED", - "MIX_STREAM_DRAINING", - "MIX_STREAM_PAUSED_DRAINING", - "MIX_STREAM_INTERNAL_LAST" - }; - - const gchar *ret = dbgstr_UNKNOWN; - - if (s < sizeof(MixStreamStateNames)/sizeof(MixStreamStateNames[0])) - { - ret = MixStreamStateNames[s]; - } - - return ret; -} - -static const gchar* _mix_state_get_name(MixState s) -{ - static const gchar* MixStateNames[] = { - "MIX_STATE_NULL", - "MIX_STATE_UNINITIALIZED", - "MIX_STATE_INITIALIZED", - "MIX_STATE_CONFIGURED", - "MIX_STATE_LAST" - }; - - const gchar *ret = dbgstr_UNKNOWN; - - if (s < sizeof(MixStateNames)/sizeof(MixStateNames[0])) - { - ret = MixStateNames[s]; - } - - return ret; -} - -static const gchar* _mix_codec_mode_get_name(MixCodecMode s) -{ - static const gchar* MixCodecModeNames[] = { - "MIX_CODING_INVALID", - "MIX_CODING_ENCODE", - "MIX_CODING_DECODE", - "MIX_CODING_LAST" - }; - - const gchar *ret = dbgstr_UNKNOWN; - - if (s < sizeof(MixCodecModeNames)/sizeof(MixCodecModeNames[0])) - { - ret = MixCodecModeNames[s]; - } - - return ret; -} - -static const gchar* _mix_device_state_get_name(MixDeviceState s) -{ - static const gchar* MixDeviceStateNames[] = { - "MIX_AUDIO_DEV_CLOSED", - "MIX_AUDIO_DEV_OPENED", - "MIX_AUDIO_DEV_ALLOCATED" - }; - - const gchar *ret = dbgstr_UNKNOWN; - - if (s < sizeof(MixDeviceStateNames)/sizeof(MixDeviceStateNames[0])) - { - ret = MixDeviceStateNames[s]; - } - - return ret; -} - -void mix_audio_debug_dump(MixAudio *mix) -{ - const gchar* prefix="MixAudio:"; - - if (!MIX_IS_AUDIO(mix)) - { - g_debug("%s Not a valid MixAudio object.", prefix); - return; - } - - g_debug("%s streamState(%s)", prefix, _mix_stream_state_get_name(mix->streamState)); - g_debug("%s encoding(%s)", prefix, mix->encoding?mix->encoding:dbgstr_UNKNOWN); - g_debug("%s fileDescriptor(%d)", prefix, mix->fileDescriptor); - g_debug("%s state(%s)", prefix, _mix_state_get_name(mix->state)); - g_debug("%s codecMode(%s)", prefix, _mix_codec_mode_get_name(mix->codecMode)); - - // Private members - g_debug("%s streamID(%d)", prefix, mix->streamID); - //GStaticRecMutex streamlock; // lock that must be acquired to invoke stream method. - //GStaticRecMutex controllock; // lock that must be acquired to call control function. - if (MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams)) - { - // TODO: print audioconfigparams - } - else - { - g_debug("%s audioconfigparams(NULL)", prefix); - } - - g_debug("%s deviceState(%s)", prefix, _mix_device_state_get_name(mix->deviceState)); - - g_debug("%s ts_last(%" G_GUINT64_FORMAT ")", prefix, mix->ts_last); - g_debug("%s ts_elapsed(%" G_GUINT64_FORMAT ")", prefix, mix->ts_elapsed); - g_debug("%s bytes_written(%" G_GUINT64_FORMAT ")", prefix, mix->bytes_written); - - return; -} - -MIX_RESULT mix_audio_get_config(MixAudio *mix, MixAudioConfigParams **audioconfigparams) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_config) - return MIX_RESULT_FAIL; - - return klass->get_config(mix, audioconfigparams); -} - -MIX_RESULT mix_audio_get_config_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - struct snd_sst_get_stream_params stream_params = {{0}}; - MixAudioConfigParams *p = NULL; - int retVal = 0; - - if (G_UNLIKELY(!mix || !audioconfigparams)) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - // _get_config is called only it is configured. - if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED); - -#ifdef LPESTUB -#else - // Check only if we are initialized. - g_debug("Calling SNDRV_SST_STREAM_GET_PARAMS. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_PARAMS, &stream_params); - g_debug("_GET_PARAMS returned %d", retVal); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("Failed to GET_PARAMS. errno:0x%08x. %s\n", errno, strerror(errno)); - } - else - { - p = mix_sst_params_to_acp(&stream_params); - if (p != NULL) - { - if (mix->audioconfigparams != NULL) - { - // Complete the return structure with fields that are not returned from the SST. - MIX_ACP_DECODEMODE(p) = MIX_ACP_DECODEMODE(mix->audioconfigparams); - - gchar *sn = mix_acp_get_streamname(mix->audioconfigparams); - if (sn != NULL) - { - if (!MIX_SUCCEEDED(mix_acp_set_streamname(MIX_AUDIOCONFIGPARAMS(p), sn))) - { - g_error("Failed to set stream name!"); - } - g_free(sn); - } - - if (!MIX_SUCCEEDED(mix_acp_set_audio_manager(MIX_AUDIOCONFIGPARAMS(p), mix_acp_get_audio_manager(mix->audioconfigparams)))) - { - g_error("Failed to set audio manager!"); - } - - // remove existingi copy of audioconfigparams and copy this return struct. - mix_acp_unref(mix->audioconfigparams); - mix->audioconfigparams = NULL; - } - - mix->audioconfigparams = MIX_AUDIOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(p))); - } - *audioconfigparams = p; - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -MIX_RESULT mix_audio_get_bytes_decoded(MixAudio *mix, guint64 *byte) -{ - if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR; - - MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix); - - if (!klass->get_bytes_decoded) - return MIX_RESULT_FAIL; - - return klass->get_bytes_decoded(mix, byte); -} - -MIX_RESULT mix_audio_get_bytes_decoded_default(MixAudio *mix, guint64 *bytecount) -{ - // API version 0.79. 1/22/2009 - MIX_RESULT ret = MIX_RESULT_SUCCESS; - unsigned long long bytes_decoded = 0; - int retVal = 0; - - if ((G_UNLIKELY(!mix)) || (bytecount == NULL)) return MIX_RESULT_NULL_PTR; - - _LOCK(&mix->controllock); - - // ret should be SUCCESS when this sequence starts... - if (mix->codecMode != MIX_CODING_DECODE) - ret = MIX_RESULT_WRONGMODE; - else if (mix->state != MIX_STATE_CONFIGURED) - ret = MIX_RESULT_NOT_CONFIGURED; - else if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN) - ret = MIX_RESULT_WRONGMODE; - - // Now check if we need to exit. - if (!MIX_SUCCEEDED(ret)) _UNLOCK_RETURN(&mix->controllock, ret); - - -#ifdef LPESTUB -#else - g_debug("Calling _STREAM_BYTES_DECODED. fd=%d", mix->fileDescriptor); - retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_BYTES_DECODED, &bytes_decoded); - g_debug("_STREAM_BYTES_DECODED returned %d. Bytes decoded=%" G_GUINT64_FORMAT, retVal, bytes_decoded); -#endif - - if (retVal != 0) - { - ret = MIX_RESULT_SYSTEM_ERRNO; - g_debug("_STREAM_BYTES_DECODED failed. Error(0x%08x): %s", errno, strerror(errno)); - mix_audio_debug_dump(mix); - } - else - { - *bytecount = bytes_decoded; - } - - _UNLOCK(&mix->controllock); - - return ret; -} - -static MIX_RESULT mix_audio_verify_params(MixAudio *mix, const MixAudioConfigParams *audioconfigparams) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - - if (G_UNLIKELY(!mix)) - { - g_error("Null pointer passed to internal function!"); - return MIX_RESULT_NULL_PTR; - } - - g_message("_verify_params() starts."); - - switch (mix->codecMode) - { - case MIX_CODING_DECODE: - { - if ((MIX_ACP_DECODEMODE(audioconfigparams) != MIX_DECODE_DIRECTRENDER) && - (MIX_ACP_DECODEMODE(audioconfigparams) != MIX_DECODE_DECODERETURN)) - { - ret = MIX_RESULT_CONF_MISMATCH; - goto verify_params_done; - } - break; - } - case MIX_CODING_ENCODE: - { - if (!MIX_IS_AUDIOCONFIGPARAMSAAC(audioconfigparams)) - { - ret = MIX_RESULT_CODEC_NOTSUPPORTED; - goto verify_params_done; - } - } - default: - break; - } - - ret = MIX_RESULT_SUCCESS; - -verify_params_done: - - g_message("_verify_params() done. Returning 0x%08x.", ret); - - return ret; -} - -static MIX_RESULT mix_audio_is_stream_changed(MixAudio *mix, MixAudioConfigParams *new_params, gboolean *pChanged) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - - gboolean changed = FALSE; - MixParams *old_mask = NULL; - MixParams *new_mask = NULL; - - g_message("is_stream_changed() starts"); - - if ((mix == NULL) || (new_params == NULL) || (pChanged == NULL)) - { - g_error("Null pointer passed to internal function!"); - return MIX_RESULT_NULL_PTR; - } - - changed = (mix->audioconfigparams == NULL); - if (changed) - { - ret = MIX_RESULT_SUCCESS; - goto stream_changed_done; - } - - old_mask = mix_params_dup(MIX_PARAMS(mix->audioconfigparams)); - if (MIX_IS_AUDIOCONFIGPARAMS(old_mask)) - { - MIX_ACP_DECODEMODE(old_mask) = MIX_DECODE_NULL; - if (!MIX_SUCCEEDED(mix_acp_set_streamname(MIX_AUDIOCONFIGPARAMS(old_mask), NULL))) - { - g_error("Failed to set stream name!"); - goto stream_changed_done; - } - if (!MIX_SUCCEEDED(mix_acp_set_audio_manager(MIX_AUDIOCONFIGPARAMS(old_mask), MIX_AUDIOMANAGER_NONE))) - { - g_error("Failed to set audio manager!"); - goto stream_changed_done; - } - } - else - { - g_error("Failed to duplicate param!"); - goto stream_changed_done; - } - - new_mask = mix_params_dup(MIX_PARAMS(new_params)); - if (MIX_IS_AUDIOCONFIGPARAMS(new_mask)) - { - MIX_ACP_DECODEMODE(new_mask) = MIX_DECODE_NULL; - if (!MIX_SUCCEEDED(mix_acp_set_streamname(MIX_AUDIOCONFIGPARAMS(new_mask), NULL))) - { - g_error("Failed to set stream name!"); - goto stream_changed_done; - } - if (!MIX_SUCCEEDED(mix_acp_set_audio_manager(MIX_AUDIOCONFIGPARAMS(new_mask), MIX_AUDIOMANAGER_NONE))) - { - g_error("Failed to set audio manager!"); - goto stream_changed_done; - } - } - else - { - g_error("Failed to duplicate param!"); - goto stream_changed_done; - } - - - changed = !mix_params_equal(old_mask, new_mask); - - ret = MIX_RESULT_SUCCESS; - -stream_changed_done: - - if (old_mask) - { - mix_params_unref(old_mask); - old_mask=NULL; - } - if (new_mask) - { - mix_params_unref(new_mask); - new_mask=NULL; - } - - *pChanged = changed; - - g_message("is_stream_changed() done returning 0x%08x, changed=%d", ret, changed); - - return ret; -} diff --git a/mix_audio/src/mixaudio.h b/mix_audio/src/mixaudio.h deleted file mode 100644 index ca7e353..0000000 --- a/mix_audio/src/mixaudio.h +++ /dev/null @@ -1,575 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_AUDIO_H__ -#define __MIX_AUDIO_H__ - -#include -#include "mixacp.h" -#include "mixaip.h" -#include "mixdrmparams.h" -#include "mixresult.h" -#include "mixaudiotypes.h" - -/* - * Type macros. - */ -#define MIX_TYPE_AUDIO (mix_audio_get_type ()) -#define MIX_AUDIO(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIO, MixAudio)) -#define MIX_IS_AUDIO(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIO)) -#define MIX_AUDIO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIO, MixAudioClass)) -#define MIX_IS_AUDIO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIO)) -#define MIX_AUDIO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIO, MixAudioClass)) - -typedef struct _MixAudio MixAudio; -typedef struct _MixAudioClass MixAudioClass; - -/** - * MixStreamState: - * @MIX_STREAM_NULL: Stream is not allocated. - * @MIX_STREAM_STOPPED: Stream is at STOP state. This is the only state DNR is allowed. - * @MIX_STREAM_PLAYING: Stream is at Playing state. - * @MIX_STREAM_PAUSED: Stream is Paused. - * @MIX_STREAM_DRAINING: Stream is draining -- remaining of the buffer in the device are playing. This state is special due to the limitation that no other control operations are allowed at this state. Stream will become @MIX_STREAM_STOPPED automatically when this data draining has completed. - * @MIX_STREAM_LAST: Last index in the enumeration. - * - * Stream State during Decode and Render or Encode mode. These states do not apply to Decode and Return mode. - */ -typedef enum { - MIX_STREAM_NULL=0, - MIX_STREAM_STOPPED, - MIX_STREAM_PLAYING, - MIX_STREAM_PAUSED, - MIX_STREAM_DRAINING, - MIX_STREAM_LAST -} MixStreamState; - -/** - * MixState: - * @MIX_STATE_UNINITIALIZED: MIX is not initialized. - * @MIX_STATE_INITIALIZED: MIX is initialized. - * @MIX_STATE_CONFIGURED: MIX is configured successfully. - * @MIX_STATE_LAST: Last index in the enumeration. - * - * The varies states the device is in. - */ -typedef enum { - MIX_STATE_NULL=0, - MIX_STATE_UNINITIALIZED, - MIX_STATE_INITIALIZED, - MIX_STATE_CONFIGURED, - MIX_STATE_LAST -} MixState; - -/** - * MixCodecMode: - * @MIX_CODING_INVALID: Indicates device uninitialied for any mode. - * @MIX_CODING_ENCODE: Indicates device is opened for encoding. - * @MIX_CODING_DECODE: Indicates device is opened for decoding. - * @MIX_CODING_LAST: Last index in the enumeration. - * - * Mode where device is operating on. See mix_audio_initialize(). - */ -typedef enum { - MIX_CODING_INVALID=0, - MIX_CODING_ENCODE, - MIX_CODING_DECODE, - MIX_CODING_LAST -} MixCodecMode; - -/** - * MixVolType: - * @MIX_VOL_PERCENT: volume is expressed in percentage. - * @MIX_VOL_DECIBELS: volume is expressed in decibel. - * @MIX_VOL_LAST: last entry. - * - * See mix_audio_getvolume() and mix_audio_setvolume(). - */ -typedef enum { - MIX_VOL_PERCENT=0, - MIX_VOL_DECIBELS, - MIX_VOL_LAST -} MixVolType; - -/** - * MixVolRamp: - * @MIX_RAMP_LINEAR: volume is expressed in percentage. - * @MIX_RAMP_EXPONENTIAL: volume is expressed in decibel. - * @MIX_RAMP_LAST: last entry. - * - * See mix_audio_getvolume() and mix_audio_setvolume(). - */ -typedef enum -{ - MIX_RAMP_LINEAR = 0, - MIX_RAMP_EXPONENTIAL, - MIX_RAMP_LAST -} MixVolRamp; - -/** - * MixIOVec: - * @data: data pointer - * @size: size of buffer in @data - * - * Scatter-gather style structure. To be used by mix_audio_decode() method for input and output buffer. - */ -typedef struct { - guchar *data; - gint32 buffer_size; - gint32 data_size; -} MixIOVec; - -/** - * MixDeviceState: - * @MIX_AUDIO_DEV_CLOSED: TBD - * @MIX_AUDIO_DEV_OPENED: TBD - * @MIX_AUDIO_DEV_ALLOCATED: TBD - * - * Device state. - */ -typedef enum { - MIX_AUDIO_DEV_CLOSED=0, - MIX_AUDIO_DEV_OPENED, - MIX_AUDIO_DEV_ALLOCATED -} MixDeviceState; - -/** - * MixAudioClass: - * @parent_class: Parent class; - * - * MI-X Audio object class - */ -struct _MixAudioClass -{ - /*< public >*/ - GObjectClass parent_class; - - /*< virtual public >*/ - MIX_RESULT (*initialize) (MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); - MIX_RESULT (*configure) (MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); - MIX_RESULT (*decode) (MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); - MIX_RESULT (*capture_encode) (MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); - MIX_RESULT (*start) (MixAudio *mix); - MIX_RESULT (*stop_drop) (MixAudio *mix); - MIX_RESULT (*stop_drain) (MixAudio *mix); - MIX_RESULT (*pause) (MixAudio *mix); - MIX_RESULT (*resume) (MixAudio *mix); - MIX_RESULT (*get_timestamp) (MixAudio *mix, guint64 *msecs); - MIX_RESULT (*set_mute) (MixAudio *mix, gboolean mute); - MIX_RESULT (*get_mute) (MixAudio *mix, gboolean* muted); - MIX_RESULT (*get_max_vol) (MixAudio *mix, gint *maxvol); - MIX_RESULT (*get_min_vol) (MixAudio *mix, gint *minvol); - MIX_RESULT (*get_volume) (MixAudio *mix, gint *currvol, MixVolType type); - MIX_RESULT (*set_volume) (MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype); - MIX_RESULT (*deinitialize) (MixAudio *mix); - MIX_RESULT (*get_stream_state) (MixAudio *mix, MixStreamState *streamState); - MIX_RESULT (*get_state) (MixAudio *mix, MixState *state); - MIX_RESULT (*get_config) (MixAudio *mix, MixAudioConfigParams **audioconfigparams); - MIX_RESULT (*get_bytes_decoded) (MixAudio *mix, guint64 *byte); -}; - -/** - * MixAudio: - * @parent: Parent object. - * @streamState: Current state of the stream - * @decodeMode: Current decode mode of the device. This value is valid only when @codingMode equals #MIX_CODING_DECODE. - * @fileDescriptor: File Descriptor to the opened device. - * @state: State of the current #MixAudio session. - * @codecMode: Current codec mode of the session. - * @useIAM: Is current stream configured to use Intel Audio Manager. - * @encoding: Not Used. - * - * MI-X Audio object - */ -struct _MixAudio -{ - /*< public >*/ - GObject parent; - - /*< public >*/ - - /*< private >*/ - MixStreamState streamState; - gchar *encoding; - MixState state; - MixCodecMode codecMode; - int fileDescriptor; - gint streamID; - guint32 amStreamID; - GStaticRecMutex streamlock; // lock that must be acquired to invoke stream method. - GStaticRecMutex controllock; // lock that must be acquired to call control function. - MixAudioConfigParams *audioconfigparams; - gboolean am_registered; - MixDeviceState deviceState; - gboolean stream_muted; - - guint64 ts_last; - guint64 ts_elapsed; - guint64 bytes_written; -}; - -/** - * mix_audio_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_audio_get_type (void); - -/** - * mix_audio_new: - * @returns: A newly allocated instance of #MixAudio - * - * Use this method to create new instance of #MixAudio - */ -MixAudio *mix_audio_new(void); - -/** - * mix_audio_ref: - * @mix: object to add reference - * @returns: the MixAudio instance where reference count has been increased. - * - * Add reference count. - */ -MixAudio *mix_audio_ref(MixAudio *mix); - -/** - * mix_audio_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_audio_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/** - * mix_audio_get_version: - * @returns: #MIX_RESULT_SUCCESS - * - * Returns the version of the MI-X library. - * - */ -MIX_RESULT mix_audio_get_version(guint* major, guint *minor); - -/** - * mix_audio_initialize: - * @mix: #MixAudio object. - * @mode: Requested #MixCodecMode. - * @aip: Audio initialization parameters. - * @drminitparams: Optional. DRM initialization param if applicable. - * @returns: #MIX_RESULT_SUCCESS on successful initilaization. #MIX_RESULT_ALREADY_INIT if session is already initialized. - * - * This function will initialize an encode or decode session with this #MixAudio instance. During this call, the device will be opened. If the device is not available, an error is returned to the caller so that an alternative (e.g. software decoding) can be configured instead. Use mix_audio_deinitialize() to close the device. - * - * A previous initialized session must be de-initialized using mix_audio_deinitialize() before it can be initialized again. - */ -MIX_RESULT mix_audio_initialize(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams); - -/** - * mix_audio_configure: - * @mix: #MixAudio object. - * @audioconfigparams: a #MixAudioConfigParams derived object containing information for the specific stream type. - * @drmparams: Optional. DRM initialization param if applicable. - * @returns: Result indicates successful or not. - * - * This function can be used to configure a stream for the current session. The caller can use this function to do the following: - * - * - * Choose decoding mode (direct-render or decode-return) - * Provide DRM parameters (using DRMparams object) - * Provide stream parameters (using STRMparams objects) - * Provide a stream name for the Intel Smart Sound Technology stream - * - * - * SST stream parameters will be set during this call, and stream resources allocated in SST. - * - * - * Intel Audio Manager support: - * If Intel Audio Manager support is enabled, and if @mode is specified to #MIX_DECODE_DIRECTRENDER, the SST stream will be registered with Intel Audio Manager in the context of this call, using the stream name provided in @streamname. Application will receive a notification from Intel Audio Manager that the stream has been created during or soon after this call. The application should be ready to handle either possibility. A stream ID (associated with the stream name) will be provided by Intel Audio Manager which will be used for subsequent notifications from Intel Audio Manager or calls to Intel Audio Manager for muting, pause and resume. See mix_audio_getstreamid() - * If a stream is already registered with Intel Audio Manager, application must pass the same @streamname argument to retain the session. Otherwise, the existing stream will be unregistered and a new stream will be registered with the new @streamname. - * - * - * - * If @mode is specified to #MIX_DECODE_DIRECTRENDER but direct-render mode is not available (due to end user use of alternative output device), an error indication will be returned to the caller so that an alternate pipeline configuration can be created (e.g. including a Pulse Audio sink, and support for output buffers). In this case, the caller will need to call mix_audio_configure() again to with @mode specify as #MIX_DECODE_DECODERETURN to request decode-return mode. - * - * This method can be called multiple times if reconfiguration of the stream is needed. However, this method must be called when the stream is in #MIX_STREAM_STOPPED state. - * - */ -MIX_RESULT mix_audio_configure(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams); - -/** - * mix_audio_decode: - * @mix: #MixAudio object. - * @iovin: a pointer to an array of #MixIOVec structure that contains the input buffers - * @iovincnt: the number of entry in the @iovin array - * @iovout: a pointer to an arrya of #MixIOVec structure that represent the output buffer. During input, each size in the #MixIOVec array represents the available buffer size pointed to by data. Upon return, each size value will be updated to reflect how much data has been filled. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. - * @iovoutcnt: in/out parameter which when input, it contains the number of entry available in the @iovout array. Upon return, this value will be updated to reflect how many entry in the @iovout array has been populated with data. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail. - * @outsize: Total number of bytes returned for the decode session. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. - * @returns: #MIX_RESULT - * - * This function is used to initiate HW accelerated decoding of encoded data buffers. This function may be used in two major modes, direct-render or decode-return. - * - * With direct-render, input buffers are provided by the caller which hold encoded audio data, and no output buffers are provided. The encoded data is decoded, and the decoded data is sent directly to the output speaker. This allows very low power audio rendering and is the best choice of operation for longer battery life. - * - * - * Intel Audio Manager Support - * However, if the user has connected a different target output device, such as Bluetooth headphones, this mode cannot be used as the decoded audio must be directed to the Pulse Audio stack where the output to Bluetooth device can be supported, per Intel Audio Manager guidelines. This mode is called decode-return, and requires the caller to provide output buffers for the decoded data. - * - * - * Input buffers in both modes are one or more user space buffers using a scatter/gather style vector interface. - * - * Output buffers for the decode-return mode are one or more user space buffers in a scatter style vector interface. Buffers will be filled in order and lengths of data filled will be returned. - * - * This call will block until data has been completely copied or queued to the driver. All user space buffers may be used or released when this call returns. - * - * Note: If the stream is configured as #MIX_DECODE_DIRECTRENDER, and whenever the stream in #MIX_STREAM_STOPPED state, the call to mix_audio_decode() will not start the playback until mix_audio_start() is called. This behavior would allow application to queue up data but delay the playback until appropriate time. - * - */ -MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt); - -/** - * mix_audio_capture_encode: - * @mix: #MixAudio object. - * @iovout: Capture audio samples. - * @iovoutcnt: Number of entry in the input vector @iovout. - * @returns: #MIX_RESULT - * - * To read encoded data from device. - * - * - * NOTE: May need to rename to "read_encoded" or other name. Since "encode" seems to mean taking raw audio and convert to compressed audio. - * - */ -MIX_RESULT mix_audio_capture_encode(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt); - -/** - * mix_audio_start: - * @mix: #MixAudio object. - * @returns: #MIX_RESULT_SUCCESS if the resulting state is either #MIX_STREAM_PLAYING or #MIX_STREAM_PAUSED. Fail code otherwise. - * - * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application use this call to change the stream out of the #MIX_STREAM_STOPPED state. If mix_audio_decode() is called and blocking in a seperate thread prior to this call. This method causes the device to start rendering data. - * - * In #MIX_DECODE_DECODERETURN, this method is no op. - */ -MIX_RESULT mix_audio_start(MixAudio *mix); - -/** - * mix_audio_stop_drop: - * @mix: #MixAudio object. - * @returns: #MIX_RESULT_SUCCESS if the resulting state has successfully reached #MIX_STREAM_STOPPED. Fail code otherwise. - * - * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. - * - * All remaining frames to be decoded or rendered will be discarded and playback will stop immediately, unblocks any pending mix_audio_decode(). - * - * If #MIX_STOP_DRAIN is requested, the call will block with stream state set to #MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. When #MIX_STOP_DRAIN returns successfully, the stream would have reached #MIX_STREAM_STOPPED successfully. - * - * After this call, timestamp retrived by mix_audio_gettimestamp() is reset to zero. - * - * Note that this method returns #MIX_RESULT_WRONG_STATE if the stream is in #MIX_STREAM_DRAINING state. - * - */ -MIX_RESULT mix_audio_stop_drop(MixAudio *mix); - -/** - * mix_audio_stop_drain: - * @mix: #MixAudio object. - * @returns: #MIX_RESULT_SUCCESS if the resulting state has successfully reached #MIX_STREAM_STOPPED. Fail code otherwise. - * - * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio. - * - * The call will block with stream state set to #MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. - * - * Note that this method blocks until #MIX_STREAM_STOPPED is reached if it is called when the stream is already in #MIX_STREAM_DRAINING state. - * - */ -MIX_RESULT mix_audio_stop_drain(MixAudio *mix); - -/** - * mix_audio_pause: - * @mix: #MixAudio object. - * @returns: #MIX_RESULT_SUCCESS if #MIX_STREAM_PAUSED state is reached successfully. #MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. - * - * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state from #MIX_STREAM_PLAYING to #MIX_STREAM_PAUSED. Note that this method returns sucessful only when the resulting state reaches #MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as #MIX_STREAM_STOPPED, where transitioning to #MIX_STREAM_PAUSED is not possible. - * - * In some situation, where there is potential race condition with the DRAINING operation, this method may return MIX_RESULT_NEED_RETRY to indicate last operation result is inclusive and request caller to call again. - */ -MIX_RESULT mix_audio_pause(MixAudio *mix); - -/** - * mix_audio_resume: - * @mix: #MixAudio object. - * @returns: #MIX_RESULT_SUCCESS if #MIX_STREAM_PLAYING state is reached successfully. #MIX_RESULT_WRONG_STATE if operation is not allowed with the current state. - * - * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state to #MIX_STREAM_PLAYING. Note that this method returns sucessful only when the resulting state reaches #MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as #MIX_STREAM_DRAINING, where transitioning to #MIX_STREAM_PLAYING is not possible. - * - */ -MIX_RESULT mix_audio_resume(MixAudio *mix); - - -/** - * mix_audio_get_timestamp: - * @mix: #MixAudio object. - * @msecs: play time in milliseconds. - * @returns: #MIX_RESULT_SUCCESS if the timestamp is available. #MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. - * - * This function can be used to retrieve the current timestamp for audio playback in milliseconds. The timestamp will reflect the amount of audio data rendered since the start of stream, or since the last stop. Note that the timestamp is always reset to zero when the stream enter #MIX_STREAM_STOPPED state. The timestamp is an unsigned long value, so the value will wrap when the timestamp reaches #ULONG_MAX. This function is only valid in direct-render mode. - */ -MIX_RESULT mix_audio_get_timestamp(MixAudio *mix, guint64 *msecs); - -/** - * mix_audio_set_mute: - * @mix: #MixAudio object. - * @mute: Turn mute on/off. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * This function is used to mute and unmute audio playback. While muted, playback would continue but silently. This function is only valid when the session is configured to #MIX_DECODE_DIRECTRENDER mode. - * - * Note that playback volumn may change due to change of global settings while stream is muted. - */ -MIX_RESULT mix_audio_set_mute(MixAudio *mix, gboolean mute); - -/** - * mix_audio_get_mute: - * @mix: #MixAudio object. - * @muted: current mute state. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * Get Mute. - */ -MIX_RESULT mix_audio_get_mute(MixAudio *mix, gboolean* muted); - -/** - * mix_audio_get_max_vol: - * @mix: #MixAudio object. - * @maxvol: pointer to receive max volumn. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * This function can be used if the application will be setting the audio volume using decibels instead of percentage. The maximum volume in decibels supported by the driver will be returned. This value can be used to determine the upper bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode. - * - */ -MIX_RESULT mix_audio_get_max_vol(MixAudio *mix, gint *maxvol); - -/** - * mix_audio_get_min_vol: - * @mix: #MixAudio object. - * @minvol: pointer to receive max volumn. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * This function can be used if the application will be setting the audio volume using decibels instead of percentage. The minimum volume in decibels supported by the driver will be returned. This value can be used to determine the lower bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode. - * - */ -MIX_RESULT mix_audio_get_min_vol(MixAudio *mix, gint *minvol); - -/** - * mix_audio_get_volume: - * @mix: #MixAudio object. - * @currvol: Current volume. Note that if @type equals #MIX_VOL_PERCENT, this value will be return within the range of 0 to 100 inclusive. - * @type: The type represented by @currvol. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * This function returns the current volume setting in either decibels or percentage. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode. - * - */ -MIX_RESULT mix_audio_get_volume(MixAudio *mix, gint *currvol, MixVolType type); - -/** - * mix_audio_set_volume: - * @mix: #MixAudio object. - * @currvol: Current volume. Note that if @type equals #MIX_VOL_PERCENT, this value will be trucated to within the range of 0 to 100 inclusive. - * @type: The type represented by @currvol. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * This function sets the current volume setting in either decibels or percentage. This function is only valid if the stream is configured to #MIX_DECODE_DIRECTRENDER mode. - * - */ -MIX_RESULT mix_audio_set_volume(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype); - -/** - * mix_audio_deinitialize: - * @mix: #MixAudio object. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * This function will uninitialize a session with this MI-X instance. During this call, the SST device will be closed and resources including mmapped buffers would be freed.This function should be called by the application once mix_audio_init() has been called. - * - * - * Intel Audio Manager Support - * The SST stream would be unregistered with Intel Audio Manager if it was registered. - * - * - * Note that if this method should not fail normally. If it does return failure, the state of this object and the underlying mechanism is compromised and application should not attempt to reuse this object. - */ -MIX_RESULT mix_audio_deinitialize(MixAudio *mix); - -/** - * mix_audio_get_stream_state: - * @mix: #MixAudio object. - * @streamState: pointer to receive stream state. - * @returns: #MIX_RESULT - * - * Get the stream state of the current stream. - */ -MIX_RESULT mix_audio_get_stream_state(MixAudio *mix, MixStreamState *streamState); - -/** - * mix_audio_get_state: - * @mix: #MixAudio object. - * @state: pointer to receive state - * @returns: Current device state. - * - * Get the device state of the audio session. - */ -MIX_RESULT mix_audio_get_state(MixAudio *mix, MixState *state); - -/** - * mix_audio_am_is_enabled: - * @mix: #MixAudio object. - * @returns: boolean indicates if Intel Audio Manager is enabled with the current session. - * - * This method checks if the current session is configure to use Intel Audio Manager. Note that Intel Audio Manager is considered disabled if the stream has not be initialized to use the service explicitly. - */ -gboolean mix_audio_am_is_enabled(MixAudio *mix); - -// Real implementation for Base class -//MIX_RESULT mix_audio_get_version(guint* major, guint *minor); - -/** - * mix_audio_is_am_available: - * @mix: TBD - * @am: TBD - * @avail: TBD - * @returns: TBD - * - * Check if AM is available. - */ -MIX_RESULT mix_audio_is_am_available(MixAudioManager am, gboolean *avail); - -/** - * mix_audio_get_config: - * @mix: #MixAudio object. - * @audioconfigparams: double pointer to hold output configuration. - * @returns: #MIX_RESULT_SUCCESS on success or other fail code. - * - * This method retrieve the current configuration. This can be called after initialization. If a stream has been configured, it returns the corresponding derive object of MixAudioConfigParams. - */ -MIX_RESULT mix_audio_get_config(MixAudio *mix, MixAudioConfigParams **audioconfigparams); - -/** - * mix_audio_get_bytes_decoded: - * @mix: #MixAudio object. - * @msecs: stream byte decoded.. - * @returns: #MIX_RESULT_SUCCESS if the value is available. #MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode. - * - * Retrive the culmulative byte decoded. - * - * Not Implemented. - */ -MIX_RESULT mix_audio_get_bytes_decoded(MixAudio *mix, guint64 *byte); - -#endif /* __MIX_AUDIO_H__ */ diff --git a/mix_audio/src/mixaudiotypes.h b/mix_audio/src/mixaudiotypes.h deleted file mode 100644 index 1b4e085..0000000 --- a/mix_audio/src/mixaudiotypes.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_AUDIO_TYPES_H__ -#define __MIX_AUDIO_TYPES_H__ - -/** - * MixAudioManager: - * @MIX_AUDIOMANAGER_NONE: No Audio Manager. - * @MIX_AUDIOMANAGER_INTELAUDIOMANAGER: Intel Audio Manager. - * @MIX_AUDIOMANAGER_LAST: Last index. - * - * Audio Manager enumerations. - */ -typedef enum { - MIX_AUDIOMANAGER_NONE = 0, - MIX_AUDIOMANAGER_INTELAUDIOMANAGER, - MIX_AUDIOMANAGER_LAST -} MixAudioManager; - - -#endif diff --git a/mix_audio/src/pvt.h b/mix_audio/src/pvt.h deleted file mode 100644 index f4be9e5..0000000 --- a/mix_audio/src/pvt.h +++ /dev/null @@ -1,9 +0,0 @@ - - -typedef unsigned short u16; -typedef unsigned long u32; -typedef unsigned char u8; -typedef signed char s8; -typedef signed short s16; -#define __user - diff --git a/mix_audio/src/sst_proxy.c b/mix_audio/src/sst_proxy.c deleted file mode 100644 index de7f7a4..0000000 --- a/mix_audio/src/sst_proxy.c +++ /dev/null @@ -1,437 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - - -#include -#include -#include -#include "mixacpmp3.h" -#include "mixacpwma.h" -#include "mixacpaac.h" -#include "intel_sst_ioctl.h" -#include "mixacp.h" -#include "sst_proxy.h" - -#ifdef G_LOG_DOMAIN -#undef G_LOG_DOMAIN -#define G_LOG_DOMAIN ((gchar*)"mixaudio") -#endif - -gboolean mix_sst_params_convert_mp3(MixAudioConfigParamsMP3 *acp, struct snd_sst_params *s); -gboolean mix_sst_params_convert_wma(MixAudioConfigParamsWMA *acp, struct snd_sst_params *s); -gboolean mix_sst_params_convert_aac(MixAudioConfigParamsAAC *acp, struct snd_sst_params *s); -void mix_sst_params_to_mp3(MixAudioConfigParamsMP3 *acp, struct snd_mp3_params *params); -void mix_sst_params_to_wma(MixAudioConfigParamsWMA *acp, struct snd_wma_params *params); -void mix_sst_params_to_aac(MixAudioConfigParamsAAC *acp, struct snd_aac_params *params); -void mix_sst_set_bps(MixAudioConfigParams *acp, guchar pcm_wd_sz); -void mix_sst_set_op_align(MixAudioConfigParams *acp, guchar op_align); - -/* - * Utilities that convert param object to driver struct. - * No Mix Context needed. However, it knows about the driver's param structure. - */ -gboolean mix_sst_params_convert(MixAudioConfigParams *acp, struct snd_sst_params *s) -{ - gboolean ret = FALSE; - - if (!s) return FALSE; - - if (MIX_IS_AUDIOCONFIGPARAMSMP3(acp)) - ret = mix_sst_params_convert_mp3(MIX_AUDIOCONFIGPARAMSMP3(acp), s); - else if (MIX_IS_AUDIOCONFIGPARAMSWMA(acp)) - ret = mix_sst_params_convert_wma(MIX_AUDIOCONFIGPARAMSWMA(acp), s); - else if (MIX_IS_AUDIOCONFIGPARAMSAAC(acp)) - ret = mix_sst_params_convert_aac(MIX_AUDIOCONFIGPARAMSAAC(acp), s); - - return ret; -} - - -gboolean mix_sst_params_convert_mp3(MixAudioConfigParamsMP3 *acp, struct snd_sst_params *s) -{ - struct snd_mp3_params *p = &s->sparams.uc.mp3_params; - - s->codec = p->codec = SST_CODEC_TYPE_MP3; - p->num_chan = MIX_ACP_NUM_CHANNELS(acp); - p->brate = MIX_ACP_BITRATE(acp); - p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); - p->crc_check = MIX_ACP_MP3_CRC(acp); - p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp)); - if (p->pcm_wd_sz == MIX_ACP_BPS_16) - p->op_align = MIX_ACP_OUTPUT_ALIGN_16; - else - p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp)); - - return TRUE; -} - -gboolean mix_sst_params_convert_wma(MixAudioConfigParamsWMA *acp, struct snd_sst_params *s) -{ - struct snd_wma_params *p = &s->sparams.uc.wma_params; - - p->num_chan = MIX_ACP_NUM_CHANNELS(acp); - p->brate = MIX_ACP_BITRATE(acp); - p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); - p->wma_encode_opt = MIX_ACP_WMA_ENCODE_OPT(acp); - p->block_align = MIX_ACP_WMA_BLOCK_ALIGN(acp); - p->channel_mask = MIX_ACP_WMA_CHANNEL_MASK(acp); - p->format_tag = MIX_ACP_WMA_FORMAT_TAG(acp); - p->pcm_src = MIX_ACP_WMA_PCM_BIT_WIDTH(acp); - p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp)); - if (p->pcm_wd_sz == MIX_ACP_BPS_16) - p->op_align = MIX_ACP_OUTPUT_ALIGN_16; - else - p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp)); - - switch (mix_acp_wma_get_version(acp)) - { - case MIX_AUDIO_WMA_V9: - s->codec = p->codec = SST_CODEC_TYPE_WMA9; - break; - case MIX_AUDIO_WMA_V10: - s->codec = p->codec = SST_CODEC_TYPE_WMA10; - break; - case MIX_AUDIO_WMA_V10P: - s->codec = p->codec = SST_CODEC_TYPE_WMA10P; - break; - default: - break; - } - - return TRUE; -} - -#define AAC_DUMP(param) g_message("snd_aac_params.%s=%u", #param, p->param) -#define AAC_DUMP_I(param, idx) g_message("snd_aac_params.%s[%d]=%x", #param, idx, p->param[idx]) - -gboolean mix_sst_params_convert_aac(MixAudioConfigParamsAAC *acp, struct snd_sst_params *s) -{ - struct snd_aac_params *p = &s->sparams.uc.aac_params; - - // I have only AOT, where tools are usually specified at eAOT. - // However, sometimes, AOT could tell us the tool involved. e.g. - // AOT==5 --> SBR - // AOT==29 --> PS - // AOT==2 --> AAC-LC - - // we know SBR present only if it is indicated presence, or AOT says so. - guint aot = mix_acp_aac_get_aot(acp); - p->sbr_present = ((MIX_ACP_AAC_SBR_FLAG(acp) == 1) || - (aot == 5) || - (MIX_ACP_AAC_PS_FLAG(acp) == 1) || - (aot == 29))?1:0; - - // As far as we know, we should: - // set sbr_present flag for SST in case of possible implicit signalling of SBR, and - // we should use HEAACv2 decoder in case of possible implicit signalling of PS. - // Although we should theoretically select HEAACv2 decoder for HEAACv1 and HEAAC, - // it is not advisable since HEAACv2 decoder has more overhead as per SST team. - // So MixAudio is implicitly selecting codec base on AOT, psPresentFlag and sbrPresentFlag. - // Application can override the selection by explicitly setting psPresentFlag and/or sbrPresentFlag. - if ((MIX_ACP_AAC_PS_FLAG(acp) == 1) || (aot == 29)) - { - // PS present. - s->codec = p->codec = SST_CODEC_TYPE_eAACP; - } - else if (p->sbr_present == 1) - { - s->codec = p->codec = SST_CODEC_TYPE_AACP; - } - else - { - s->codec = p->codec = SST_CODEC_TYPE_AAC; - } - - p->num_chan = MIX_ACP_NUM_CHANNELS(acp); // external channels - p->ext_chl = MIX_ACP_AAC_CHANNELS(acp); // extension channel configuration. - //p->ext_chl = MIX_ACP_AAC_CHANNELS(acp); // core/internal channels - if (p->num_chan <= 0) p->num_chan = p->ext_chl; - p->aac_srate = MIX_ACP_AAC_SAMPLE_RATE(acp); // aac decoder internal frequency - p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); // output/external frequency - - p->brate = MIX_ACP_BITRATE(acp); - p->mpg_id = (guint)mix_acp_aac_get_mpeg_id(acp); - p->bs_format = mix_acp_aac_get_bit_stream_format(acp); - p->aac_profile = mix_acp_aac_get_aac_profile(acp); - // AOT defined by MPEG spec is 5 for SBR but SST definition is 4 for SBR. - if (aot == 5) - p->aot = 4; - else if (aot == 2) - p->aot = aot; - p->crc_check = MIX_ACP_AAC_CRC(acp); - p->brate_type = mix_acp_aac_get_bit_rate_type(acp); - p->pce_present = MIX_ACP_AAC_PCE_FLAG(acp); - p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp)); - - if (p->pcm_wd_sz == MIX_ACP_BPS_16) - p->op_align = MIX_ACP_OUTPUT_ALIGN_16; - else - p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp)); - - //p->aac_srate = ; // __u32 aac_srate; /* Plain AAC decoder operating sample rate */ - //p->ext_chl = ; // __u8 ext_chl; /* No.of external channels */ - - switch (p->bs_format) - { - case MIX_AAC_BS_ADTS: - g_sprintf((gchar*)p->bit_stream_format, "adts"); - break; - case MIX_AAC_BS_ADIF: - g_sprintf((gchar*)p->bit_stream_format, "adif"); - break; - case MIX_AAC_BS_RAW: - g_sprintf((gchar*)p->bit_stream_format, "raw"); - p->num_syntc_elems = 0; - p->syntc_id[0] = (gint8)-1; /* 0 for ID_SCE(Dula Mono), -1 for raw */ - p->syntc_id[1] = (gint8)-1; - p->syntc_tag[0] = (gint8)-1; /* raw - -1 and 0 -16 for rest of the streams */ - p->syntc_tag[1] = (gint8)-1; - break; - default: - break; - } - - { - AAC_DUMP(codec); - AAC_DUMP(num_chan); /* 1=Mono, 2=Stereo*/ - AAC_DUMP(pcm_wd_sz); /* 16/24 - bit*/ - AAC_DUMP(brate); - AAC_DUMP(sfreq); /* Sampling freq eg. 8000, 441000, 48000 */ - AAC_DUMP(aac_srate); /* Plain AAC decoder operating sample rate */ - AAC_DUMP(mpg_id); /* 0=MPEG-2, 1=MPEG-4 */ - AAC_DUMP(bs_format); /* input bit stream format adts=0, adif=1, raw=2 */ - AAC_DUMP(aac_profile); /* 0=Main Profile, 1=LC profile, 3=SSR profile */ - AAC_DUMP(ext_chl); /* No.of external channels */ - AAC_DUMP(aot); /* Audio object type. 1=Main , 2=LC , 3=SSR, 4=SBR*/ - AAC_DUMP(op_align); /* output alignment 0=16 bit , 1=MSB, 2= LSB align */ - AAC_DUMP(brate_type); /* 0=CBR, 1=VBR */ - AAC_DUMP(crc_check); /* crc check 0= disable, 1=enable */ - // AAC_DUMP(bit_stream_format[8]); /* input bit stream format adts/adif/raw */ - g_message("snd_aac_params.bit_stream_format=%s", p->bit_stream_format); - AAC_DUMP(jstereo); /* Joint stereo Flag */ - AAC_DUMP(sbr_present); /* 1 = SBR Present, 0 = SBR absent, for RAW */ - AAC_DUMP(downsample); /* 1 = Downsampling ON, 0 = Downsampling OFF */ - AAC_DUMP(num_syntc_elems); /* 1- Mono/stereo, 0 - Dual Mono, 0 - for raw */ - g_message("snd_aac_params.syntc_id[0]=%x", p->syntc_id[0]); - g_message("snd_aac_params.syntc_id[1]=%x", p->syntc_id[1]); - g_message("snd_aac_params.syntc_tag[0]=%x", p->syntc_tag[0]); - g_message("snd_aac_params.syntc_tag[1]=%x", p->syntc_tag[1]); - //AAC_DUMP_I(syntc_id, 0); /* 0 for ID_SCE(Dula Mono), -1 for raw */ - //AAC_DUMP_I(syntc_id, 1); /* 0 for ID_SCE(Dula Mono), -1 for raw */ - //AAC_DUMP_I(syntc_tag, 0); /* raw - -1 and 0 -16 for rest of the streams */ - //AAC_DUMP_I(syntc_tag, 1); /* raw - -1 and 0 -16 for rest of the streams */ - AAC_DUMP(pce_present); /* Flag. 1- present 0 - not present, for RAW */ - AAC_DUMP(reserved); - AAC_DUMP(reserved1); - } - - return TRUE; -} - -MixAudioConfigParams *mix_sst_acp_from_codec(guint codec) -{ - MixAudioConfigParams *ret = NULL; - - // need stream specific ACP - switch (codec) - { - case SST_CODEC_TYPE_MP3: - case SST_CODEC_TYPE_MP24: - ret = (MixAudioConfigParams*)mix_acp_mp3_new(); - break; - case SST_CODEC_TYPE_AAC: - case SST_CODEC_TYPE_AACP: - case SST_CODEC_TYPE_eAACP: - ret = (MixAudioConfigParams*)mix_acp_aac_new(); - break; - case SST_CODEC_TYPE_WMA9: - case SST_CODEC_TYPE_WMA10: - case SST_CODEC_TYPE_WMA10P: - ret = (MixAudioConfigParams*)mix_acp_wma_new(); - break; - } - - return ret; -} - - - -MixAudioConfigParams *mix_sst_params_to_acp(struct snd_sst_get_stream_params *stream_params) -{ - MixAudioConfigParams *ret = NULL; - - gboolean allocated = FALSE; - // Ingoring stream_params.codec_params.result, which seem to return details specific to stream allocation. - switch (stream_params->codec_params.result) - { - // Please refers to SST API doc for return value definition. - case 5: - g_debug("last SET_PARAMS succeeded with Stream Parameter Modified."); - case 0: - allocated = TRUE; - break; - case 1: - // last SET_PARAMS failed STREAM was not available. - case 2: - // last SET_PARAMS failed CODEC was not available. - case 3: - // last SET_PARAMS failed CODEC was not supported. - case 4: - // last SET_PARAMS failed Invalid Stream Parameters. - case 6: - // last SET_PARAMS failed Invalid Stream ID. - default: - // last SET_PARAMS failed unexpectedly. - break; - } - - if (allocated) - { - switch (stream_params->codec_params.codec) - { - case SST_CODEC_TYPE_MP3: - case SST_CODEC_TYPE_MP24: - ret = (MixAudioConfigParams*)mix_acp_mp3_new(); - mix_sst_params_to_mp3(MIX_AUDIOCONFIGPARAMSMP3(ret), &stream_params->codec_params.sparams.uc.mp3_params); - break; - case SST_CODEC_TYPE_AAC: - case SST_CODEC_TYPE_AACP: - case SST_CODEC_TYPE_eAACP: - ret = (MixAudioConfigParams*)mix_acp_aac_new(); - mix_sst_params_to_aac(MIX_AUDIOCONFIGPARAMSAAC(ret), &stream_params->codec_params.sparams.uc.aac_params); - break; - case SST_CODEC_TYPE_WMA9: - case SST_CODEC_TYPE_WMA10: - case SST_CODEC_TYPE_WMA10P: - ret = (MixAudioConfigParams*)mix_acp_wma_new(); - mix_sst_params_to_wma(MIX_AUDIOCONFIGPARAMSWMA(ret), &stream_params->codec_params.sparams.uc.wma_params); - break; - } - } - - if (!ret) ret = mix_acp_new(); - - if (ret) - { - // Be sure to update all vars that becomes available since the ACP could set defaults. - MIX_ACP_SAMPLE_FREQ(ret) = stream_params->pcm_params.sfreq; - MIX_ACP_NUM_CHANNELS(ret) = stream_params->pcm_params.num_chan; - mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(ret), stream_params->pcm_params.pcm_wd_sz); - } - - return ret; -} - - -void mix_sst_params_to_mp3(MixAudioConfigParamsMP3 *acp, struct snd_mp3_params *params) -{ - if(!acp || !params) return; - - MIX_ACP_NUM_CHANNELS(MIX_AUDIOCONFIGPARAMS(acp)) = params->num_chan; - MIX_ACP_BITRATE(MIX_AUDIOCONFIGPARAMS(acp)) = params->brate; - MIX_ACP_SAMPLE_FREQ(MIX_AUDIOCONFIGPARAMS(acp)) = params->sfreq; - MIX_ACP_MP3_CRC(acp) = params->crc_check; - - mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz); - mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align); -} - -void mix_sst_params_to_wma(MixAudioConfigParamsWMA *acp, struct snd_wma_params *params) -{ - - MIX_ACP_BITRATE(acp) = params->brate; - MIX_ACP_SAMPLE_FREQ(acp) = params->sfreq; - MIX_ACP_WMA_ENCODE_OPT(acp) = params->wma_encode_opt; - MIX_ACP_WMA_BLOCK_ALIGN(acp) = params->block_align; - MIX_ACP_WMA_CHANNEL_MASK(acp) = params->channel_mask; - MIX_ACP_WMA_FORMAT_TAG(acp) = params->format_tag; - MIX_ACP_WMA_PCM_BIT_WIDTH(acp) = params->pcm_src; - - mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz); - mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align); - - switch (params->codec) - { - case SST_CODEC_TYPE_WMA9: - mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V9); - break; - case SST_CODEC_TYPE_WMA10: - mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V10); - break; - case SST_CODEC_TYPE_WMA10P: - mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V10P); - break; - } -} - - -void mix_sst_params_to_aac(MixAudioConfigParamsAAC *acp, struct snd_aac_params *params) -{ - if (params->codec == SST_CODEC_TYPE_eAACP) - { - MIX_ACP_AAC_PS_FLAG(acp) = TRUE; - } - - MIX_ACP_NUM_CHANNELS(acp) = params->num_chan; - MIX_ACP_BITRATE(acp) = params->brate; - MIX_ACP_SAMPLE_FREQ(acp) = params->sfreq; - mix_acp_aac_set_mpeg_id(acp, params->mpg_id); - mix_acp_aac_set_bit_stream_format(acp, params->bs_format); - mix_acp_aac_set_aac_profile(acp, params->aac_profile); - - // SST API specific 4 for SBR while AOT definition in MPEG 4 spec specific 5. - // converting. - if (params->aot == 4) - mix_acp_aac_set_aot(acp, 5); - else if (params->aot == 2) - mix_acp_aac_set_aot(acp, params->aot); - - MIX_ACP_AAC_CRC(acp) = params->crc_check; - mix_acp_aac_set_bit_rate_type(acp, params->brate_type); - MIX_ACP_AAC_SBR_FLAG(acp) = params->sbr_present; - MIX_ACP_AAC_PCE_FLAG(acp) = params->pce_present; - - mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz); - mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align); - - acp->num_syntc_elems = params->num_syntc_elems; - acp->syntc_id[0] = params->syntc_id[0]; - acp->syntc_id[1] = params->syntc_id[1]; - acp->syntc_tag[0] = params->syntc_tag[0]; - acp->syntc_tag[1] = params->syntc_tag[1]; -} - -void mix_sst_set_bps(MixAudioConfigParams *acp, guchar pcm_wd_sz) -{ - switch (pcm_wd_sz) - { - case MIX_ACP_BPS_16: - case MIX_ACP_BPS_24: - break; - default: - pcm_wd_sz = MIX_ACP_BPS_UNKNOWN; - break; - } - mix_acp_set_bps(MIX_AUDIOCONFIGPARAMS(acp), pcm_wd_sz); -} - -void mix_sst_set_op_align(MixAudioConfigParams *acp, guchar op_align) -{ - switch (op_align) - { - case MIX_ACP_OUTPUT_ALIGN_16: - case MIX_ACP_OUTPUT_ALIGN_MSB: - case MIX_ACP_OUTPUT_ALIGN_LSB: - break; - default: - op_align = MIX_ACP_OUTPUT_ALIGN_UNKNOWN; - break; - } - mix_acp_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), op_align); -} - diff --git a/mix_audio/src/sst_proxy.h b/mix_audio/src/sst_proxy.h deleted file mode 100644 index 6ad69fe..0000000 --- a/mix_audio/src/sst_proxy.h +++ /dev/null @@ -1,17 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __SST_PROXY_H__ -#define __SST_PROXY_H__ - -// renaming the struct for easier update, and reference, in MixAudio code. - -gboolean mix_sst_params_convert(MixAudioConfigParams *params, struct snd_sst_params *s); -MixAudioConfigParams *mix_sst_params_to_acp(struct snd_sst_get_stream_params *stream_params); - -#endif diff --git a/mix_audio/tests/Makefile.am b/mix_audio/tests/Makefile.am deleted file mode 100644 index 372e488..0000000 --- a/mix_audio/tests/Makefile.am +++ /dev/null @@ -1,2 +0,0 @@ -SUBDIRS = smoke - diff --git a/mix_audio/tests/smoke/Makefile.am b/mix_audio/tests/smoke/Makefile.am deleted file mode 100644 index 0a373ec..0000000 --- a/mix_audio/tests/smoke/Makefile.am +++ /dev/null @@ -1,25 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -noinst_PROGRAMS = mixaudiosmoke - -############################################################################## -# sources used to compile -mixaudiosmoke_SOURCES = mixaudiosmoke.c - -# flags used to compile this plugin -# add other _CFLAGS and _LIBS as needed -mixaudiosmoke_CFLAGS = -I$(top_srcdir)/src $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS) -mixaudiosmoke_LDADD = $(GLIB_LIBS) $(GOBJECT_LIBS) $(top_srcdir)/src/libmixaudio.la $(MIXCOMMON_LIBS) -#mixaudiosmoke_LDFLAGS = $(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS) -mixaudiosmoke_LIBTOOLFLAGS = --tag=disable-static - -# headers we need but don't want installed -noinst_HEADERS = - - - diff --git a/mix_audio/tests/smoke/mixaudiosmoke.c b/mix_audio/tests/smoke/mixaudiosmoke.c deleted file mode 100644 index 8f81108..0000000 --- a/mix_audio/tests/smoke/mixaudiosmoke.c +++ /dev/null @@ -1,77 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#include -#include "mixaudio.h" -#include "mixparams.h" -#include "mixacp.h" -#include "mixacpmp3.h" - -void test_getversion() -{ - g_printf("Calling mixaudio_getversion...\n"); - { - guint major = 0; - guint minor = 0; - MIX_RESULT ret = mix_audio_get_version(&major, &minor); - if (MIX_SUCCEEDED(ret)) - { - g_printf("MixAudio Version %u.%u\n", major, minor); - } - else - g_printf("mixaudio_getversion() failed! Ret code : 0x%08x\n", ret); - } -} - -int main (int argc, char **argv) -{ - g_type_init(); - - g_printf("Smoke test for MixAudio and structs\n"); - - test_getversion(); - - g_printf("Creating MixAudio...\n"); - MixAudio *ma = mix_audio_new(); - if (MIX_IS_AUDIO(ma)) - { - g_printf("Successful.\n"); - - } - else - { - g_printf("Failed.\n"); - } - - g_printf("Creating MixAudioConfigParams...\n"); - MixAudioConfigParams *map = mix_acp_new(); - if (MIX_IS_AUDIOCONFIGPARAMS(map)) - { - g_printf("Successful.\n"); - - g_printf("Destroying MixAudioConfigParams...\n"); - mix_acp_unref(map); - g_printf("Successful.\n"); - } - else - { - g_printf("Failed.\n"); - } - g_printf("Creating mp3 config params...\n"); - MixAudioConfigParamsMP3 *mp3 = mix_acp_mp3_new(); - - mp3->CRC = 0; - - g_printf("Destroying MixAudio...\n"); - mix_audio_unref(ma); - g_printf("Successful.\n"); - - g_printf("Smoke completed.\n"); -} - - diff --git a/mix_common/src/Android.mk b/mix_common/src/Android.mk index c29bf6f..4042f95 100644 --- a/mix_common/src/Android.mk +++ b/mix_common/src/Android.mk @@ -2,22 +2,21 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES := \ - mixlog.c \ - mixparams.c \ - mixdrmparams.c \ + mixlog.cpp \ + mixparams.cpp \ + mixdrmparams.cpp LOCAL_C_INCLUDES := \ $(LOCAL_PATH) \ - $(GLIB_TOP) \ + $(GLIB_TOP) \ $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib \ - $(GLIB_TOP)/gobject + $(GLIB_TOP)/glib + +LOCAL_CFLAGS := -DANDROID LOCAL_SHARED_LIBRARIES := \ - libglib-2.0 \ - libgobject-2.0 \ - libgthread-2.0 \ - libgmodule-2.0 + libglib-2.0 +# libgmodule-2.0 LOCAL_COPY_HEADERS_TO := libmixcommon @@ -27,7 +26,7 @@ LOCAL_COPY_HEADERS := \ mixparams.h \ mixdrmparams.h -LOCAL_MODULE := libmixcommon LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixcommon include $(BUILD_SHARED_LIBRARY) diff --git a/mix_common/src/mixdrmparams.c b/mix_common/src/mixdrmparams.c deleted file mode 100644 index 82e3f39..0000000 --- a/mix_common/src/mixdrmparams.c +++ /dev/null @@ -1,163 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixdrmparams - * @short_description: Drm parameters - * - * A data object which stores drm specific parameters. - */ - -#include "mixdrmparams.h" - -static GType _mix_drmparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_drmparams_type = g_define_type_id; } - -gboolean mix_drmparams_copy(MixParams* target, const MixParams *src); -MixParams* mix_drmparams_dup(const MixParams *obj); -gboolean mix_drmparams_equal(MixParams* first, MixParams *second); -static void mix_drmparams_finalize(MixParams *obj); - -G_DEFINE_TYPE_WITH_CODE(MixDrmParams, mix_drmparams, MIX_TYPE_PARAMS, _do_init); - -void -_mix_drmparams_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_drmparams_get_type ()); -} - -static void mix_drmparams_init (MixDrmParams *self) -{ -} - -static void mix_drmparams_class_init(MixDrmParamsClass *klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_drmparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction)mix_drmparams_copy; - mixparams_class->dup = (MixParamsDupFunction)mix_drmparams_dup; - mixparams_class->equal = (MixParamsEqualFunction)mix_drmparams_equal; -} - -MixDrmParams *mix_drmparams_new(void) -{ - MixDrmParams *ret = (MixDrmParams *)g_type_create_instance (MIX_TYPE_DRMPARAMS); - - return ret; -} - -void mix_drmparams_finalize(MixParams *obj) -{ - /* clean up here. */ - - /* Chain up parent */ - if (parent_class->finalize) - parent_class->finalize(obj); -} - -MixDrmParams *mix_drmparams_ref(MixDrmParams *mix) -{ - return (MixDrmParams*)mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_drmparams_dup: - * @obj: a #MixDrmParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams* mix_drmparams_dup(const MixParams *obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_DRMPARAMS(obj)) - { - MixDrmParams *duplicate = mix_drmparams_new(); - if (mix_drmparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) - { - ret = MIX_PARAMS(duplicate); - } - else - { - mix_drmparams_unref(duplicate); - } - } - - return ret;; -} - -/** - * mix_drmparams_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_drmparams_copy(MixParams* target, const MixParams *src) -{ - if (MIX_IS_DRMPARAMS(target) && MIX_IS_DRMPARAMS(src)) - { - // TODO perform copy. - // - // Now chainup base class - // Get the root class from the cached parent_class object. This cached parent_class object has not be overwritten by this current class. - // Using the cached parent_class object because this_class would have ->copy pointing to this method! - // Cached parent_class contains the class object before it is overwritten by this derive class. - // MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (parent_class->copy) - { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src)); - } - else - return TRUE; - } - return FALSE; -} - -/** - * mix_drmparams_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_drmparams_equal(MixParams* first, MixParams *second) -{ - gboolean ret = TRUE; - - if (MIX_IS_DRMPARAMS(first) && MIX_IS_DRMPARAMS(second)) - { - // TODO: do deep compare - - if (ret) - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - - return ret; -} - - diff --git a/mix_common/src/mixdrmparams.cpp b/mix_common/src/mixdrmparams.cpp new file mode 100644 index 0000000..c75b184 --- /dev/null +++ b/mix_common/src/mixdrmparams.cpp @@ -0,0 +1,45 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixdrmparams + * @short_description: Drm parameters + * + * A data object which stores drm specific parameters. + */ + +#include "mixdrmparams.h" + + +MixDrmParams::MixDrmParams() { +} + +MixDrmParams::~MixDrmParams() { +} + +MixDrmParams *mix_drmparams_new(void) { + return new MixDrmParams(); +} + +MixDrmParams *mix_drmparams_ref(MixDrmParams *mix) { + return (MixDrmParams*)mix_params_ref(MIX_PARAMS(mix)); +} + +MixParams * MixDrmParams::dup () const { + MixParams* dup = new MixDrmParams(); + if (NULL != dup) { + if (FALSE == copy(dup)) { + dup->Unref(); + dup = NULL; + } + } + return dup; +} + + + diff --git a/mix_common/src/mixdrmparams.h b/mix_common/src/mixdrmparams.h index 7ef82fb..7dc7512 100644 --- a/mix_common/src/mixdrmparams.h +++ b/mix_common/src/mixdrmparams.h @@ -12,18 +12,11 @@ #include "mixparams.h" -/** - * MIX_TYPE_DRMPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_DRMPARAMS (mix_drmparams_get_type ()) - /** * MIX_DRMPARAMS: * @obj: object to be type-casted. */ -#define MIX_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DRMPARAMS, MixDrmParams)) +#define MIX_DRMPARAMS(obj) (reinterpret_cast(obj)) /** * MIX_IS_DRMPARAMS: @@ -31,67 +24,20 @@ * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DRMPARAMS)) - -/** - * MIX_DRMPARAMS_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) - -/** - * MIX_IS_DRMPARAMS_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DRMPARAMS)) - -/** - * MIX_DRMPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_DRMPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DRMPARAMS, MixDrmParamsClass)) - -typedef struct _MixDrmParams MixDrmParams; -typedef struct _MixDrmParamsClass MixDrmParamsClass; +#define MIX_IS_DRMPARAMS(obj) (NULL != MIX_DRMPARAMS(obj)) /** * MixDrmParams: * * MI-X Drm Parameter object */ -struct _MixDrmParams -{ - /*< public >*/ - MixParams parent; - - /*< public >*/ +class MixDrmParams : public MixParams { +public: + MixDrmParams(); + virtual ~MixDrmParams(); + virtual MixParams * dup () const; }; -/** - * MixDrmParamsClass: - * - * MI-X Drm object class - */ -struct _MixDrmParamsClass -{ - /*< public >*/ - MixParamsClass parent_class; - - /* class members */ -}; - -/** - * mix_drmparams_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_drmparams_get_type (void); - /** * mix_drmparams_new: * @returns: A newly allocated instance of #MixDrmParams diff --git a/mix_common/src/mixlog.c b/mix_common/src/mixlog.c deleted file mode 100644 index 239920e..0000000 --- a/mix_common/src/mixlog.c +++ /dev/null @@ -1,260 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#include -#include "mixlog.h" - -#define MIX_DELOG_COMPS "MIX_DELOG_COMPS" -#define MIX_DELOG_FILES "MIX_DELOG_FILES" -#define MIX_DELOG_FUNCS "MIX_DELOG_FUNCS" -#define MIX_LOG_ENABLE "MIX_LOG_ENABLE" -#define MIX_DELOG_DELIMITERS " ,;" - -#define MIX_LOG_LEVEL "MIX_LOG_LEVEL" - -#ifndef ANDROID - -static GStaticMutex g_mutex = G_STATIC_MUTEX_INIT; - -#ifdef MIX_LOG_USE_HT -static GHashTable *g_defile_ht = NULL, *g_defunc_ht = NULL, *g_decom_ht = NULL; -static gint g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; -static gint g_refcount = 0; - -#define mix_log_destroy_ht(ht) if(ht) { g_hash_table_destroy(ht); ht = NULL; } - -void mix_log_get_ht(GHashTable **ht, const gchar *var) { - - const char *delog_list = NULL; - char *item = NULL; - if (!ht || !var) { - return; - } - - delog_list = g_getenv(var); - if (!delog_list) { - return; - } - - if (*ht == NULL) { - *ht = g_hash_table_new(g_str_hash, g_str_equal); - if (*ht == NULL) { - return; - } - } - - item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); - while (item != NULL) { - g_hash_table_insert(*ht, item, "true"); - item = strtok(NULL, MIX_DELOG_DELIMITERS); - } -} - -void mix_log_initialize_func() { - - const gchar *mix_log_level = NULL; - g_static_mutex_lock(&g_mutex); - - if (g_refcount == 0) { - - mix_log_level = g_getenv(MIX_LOG_LEVEL); - if (mix_log_level) { - g_mix_log_level = atoi(mix_log_level); - } - - mix_log_get_ht(&g_decom_ht, MIX_DELOG_COMPS); - mix_log_get_ht(&g_defile_ht, MIX_DELOG_FILES); - mix_log_get_ht(&g_defunc_ht, MIX_DELOG_FUNCS); - } - - g_refcount++; - - g_static_mutex_unlock(&g_mutex); -} - -void mix_log_finalize_func() { - - g_static_mutex_lock(&g_mutex); - - g_refcount--; - - if (g_refcount == 0) { - mix_log_destroy_ht(g_decom_ht); - mix_log_destroy_ht(g_defile_ht); - mix_log_destroy_ht(g_defunc_ht); - - g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; - } - - if (g_refcount < 0) { - g_refcount = 0; - } - - g_static_mutex_unlock(&g_mutex); -} - -void mix_log_func(const gchar* comp, gint level, const gchar *file, - const gchar *func, gint line, const gchar *format, ...) { - - va_list args; - static gchar* loglevel[4] = {"**ERROR", "*WARNING", "INFO", "VERBOSE"}; - - if (!format) { - return; - } - - g_static_mutex_lock(&g_mutex); - - if (level > g_mix_log_level) { - goto exit; - } - - if (g_decom_ht) { - if (g_hash_table_lookup(g_decom_ht, comp)) { - goto exit; - } - } - - if (g_defile_ht) { - if (g_hash_table_lookup(g_defile_ht, file)) { - goto exit; - } - } - - if (g_defunc_ht) { - if (g_hash_table_lookup(g_defunc_ht, func)) { - goto exit; - } - } - - if(level > MIX_LOG_LEVEL_VERBOSE) { - level = MIX_LOG_LEVEL_VERBOSE; - } - if(level < MIX_LOG_LEVEL_ERROR) { - level = MIX_LOG_LEVEL_ERROR; - } - - g_print("%s : %s : %s : ", loglevel[level - 1], file, func); - - va_start(args, format); - g_vprintf(format, args); - va_end(args); - - exit: g_static_mutex_unlock(&g_mutex); -} - -#else /* MIX_LOG_USE_HT */ - -gboolean mix_shall_delog(const gchar *name, const gchar *var) { - - const char *delog_list = NULL; - char *item = NULL; - gboolean delog = FALSE; - - if (!name || !var) { - return delog; - } - - delog_list = g_getenv(var); - if (!delog_list) { - return delog; - } - - item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); - while (item != NULL) { - if (strcmp(item, name) == 0) { - delog = TRUE; - break; - } - item = strtok(NULL, MIX_DELOG_DELIMITERS); - } - - return delog; -} - -gboolean mix_log_enabled() { - - const char *value = NULL; - value = g_getenv(MIX_LOG_ENABLE); - if(!value) { - return FALSE; - } - - if(value[0] == '0') { - return FALSE; - } - return TRUE; -} - -void mix_log_func(const gchar* comp, gint level, const gchar *file, - const gchar *func, gint line, const gchar *format, ...) { - - va_list args; - static gchar* loglevel[4] = { "**ERROR", "*WARNING", "INFO", "VERBOSE" }; - - const gchar *env_mix_log_level = NULL; - gint mix_log_level_threhold = MIX_LOG_LEVEL_VERBOSE; - - if(!mix_log_enabled()) { - return; - } - - if (!format) { - return; - } - - g_static_mutex_lock(&g_mutex); - - /* log level */ - env_mix_log_level = g_getenv(MIX_LOG_LEVEL); - if (env_mix_log_level) { - mix_log_level_threhold = atoi(env_mix_log_level); - } - - if (level > mix_log_level_threhold) { - goto exit; - } - - /* component */ - if (mix_shall_delog(comp, MIX_DELOG_COMPS)) { - goto exit; - } - - /* files */ - if (mix_shall_delog(file, MIX_DELOG_FILES)) { - goto exit; - } - - /* functions */ - if (mix_shall_delog(func, MIX_DELOG_FUNCS)) { - goto exit; - } - - if (level > MIX_LOG_LEVEL_VERBOSE) { - level = MIX_LOG_LEVEL_VERBOSE; - } - if (level < MIX_LOG_LEVEL_ERROR) { - level = MIX_LOG_LEVEL_ERROR; - } - - g_print("%s : %s : %s : ", loglevel[level - 1], file, func); - - va_start(args, format); - g_vprintf(format, args); - va_end(args); - -exit: - g_static_mutex_unlock(&g_mutex); -} - - -#endif /* MIX_LOG_USE_HT */ - -#endif /* !ANDROID */ - diff --git a/mix_common/src/mixlog.cpp b/mix_common/src/mixlog.cpp new file mode 100644 index 0000000..bef6a24 --- /dev/null +++ b/mix_common/src/mixlog.cpp @@ -0,0 +1,263 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include +#include "mixlog.h" + +#ifndef ANDROID +#include +#endif + +#define MIX_DELOG_COMPS "MIX_DELOG_COMPS" +#define MIX_DELOG_FILES "MIX_DELOG_FILES" +#define MIX_DELOG_FUNCS "MIX_DELOG_FUNCS" +#define MIX_LOG_ENABLE "MIX_LOG_ENABLE" +#define MIX_DELOG_DELIMITERS " ,;" + +#define MIX_LOG_LEVEL "MIX_LOG_LEVEL" + +#ifndef ANDROID + +static GStaticMutex g_mutex = G_STATIC_MUTEX_INIT; + +#ifdef MIX_LOG_USE_HT +static GHashTable *g_defile_ht = NULL, *g_defunc_ht = NULL, *g_decom_ht = NULL; +static gint g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; +static gint g_refcount = 0; + +#define mix_log_destroy_ht(ht) if(ht) { g_hash_table_destroy(ht); ht = NULL; } + +void mix_log_get_ht(GHashTable **ht, const gchar *var) { + + const char *delog_list = NULL; + char *item = NULL; + if (!ht || !var) { + return; + } + + delog_list = g_getenv(var); + if (!delog_list) { + return; + } + + if (*ht == NULL) { + *ht = g_hash_table_new(g_str_hash, g_str_equal); + if (*ht == NULL) { + return; + } + } + + item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); + while (item != NULL) { + g_hash_table_insert(*ht, item, "true"); + item = strtok(NULL, MIX_DELOG_DELIMITERS); + } +} + +void mix_log_initialize_func() { + + const gchar *mix_log_level = NULL; + g_static_mutex_lock(&g_mutex); + + if (g_refcount == 0) { + + mix_log_level = g_getenv(MIX_LOG_LEVEL); + if (mix_log_level) { + g_mix_log_level = atoi(mix_log_level); + } + + mix_log_get_ht(&g_decom_ht, MIX_DELOG_COMPS); + mix_log_get_ht(&g_defile_ht, MIX_DELOG_FILES); + mix_log_get_ht(&g_defunc_ht, MIX_DELOG_FUNCS); + } + + g_refcount++; + + g_static_mutex_unlock(&g_mutex); +} + +void mix_log_finalize_func() { + + g_static_mutex_lock(&g_mutex); + + g_refcount--; + + if (g_refcount == 0) { + mix_log_destroy_ht(g_decom_ht); + mix_log_destroy_ht(g_defile_ht); + mix_log_destroy_ht(g_defunc_ht); + + g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; + } + + if (g_refcount < 0) { + g_refcount = 0; + } + + g_static_mutex_unlock(&g_mutex); +} + +void mix_log_func(const gchar* comp, gint level, const gchar *file, + const gchar *func, gint line, const gchar *format, ...) { + + va_list args; + static gchar* loglevel[4] = {"**ERROR", "*WARNING", "INFO", "VERBOSE"}; + + if (!format) { + return; + } + + g_static_mutex_lock(&g_mutex); + + if (level > g_mix_log_level) { + goto exit; + } + + if (g_decom_ht) { + if (g_hash_table_lookup(g_decom_ht, comp)) { + goto exit; + } + } + + if (g_defile_ht) { + if (g_hash_table_lookup(g_defile_ht, file)) { + goto exit; + } + } + + if (g_defunc_ht) { + if (g_hash_table_lookup(g_defunc_ht, func)) { + goto exit; + } + } + + if(level > MIX_LOG_LEVEL_VERBOSE) { + level = MIX_LOG_LEVEL_VERBOSE; + } + if(level < MIX_LOG_LEVEL_ERROR) { + level = MIX_LOG_LEVEL_ERROR; + } + + g_print("%s : %s : %s : ", loglevel[level - 1], file, func); + + va_start(args, format); + g_vprintf(format, args); + va_end(args); + + exit: g_static_mutex_unlock(&g_mutex); +} + +#else /* MIX_LOG_USE_HT */ + +gboolean mix_shall_delog(const gchar *name, const gchar *var) { + + const char *delog_list = NULL; + char *item = NULL; + gboolean delog = FALSE; + + if (!name || !var) { + return delog; + } + + delog_list = g_getenv(var); + if (!delog_list) { + return delog; + } + + item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); + while (item != NULL) { + if (strcmp(item, name) == 0) { + delog = TRUE; + break; + } + item = strtok(NULL, MIX_DELOG_DELIMITERS); + } + + return delog; +} + +gboolean mix_log_enabled() { + + const char *value = NULL; + value = g_getenv(MIX_LOG_ENABLE); + if(!value) { + return FALSE; + } + + if(value[0] == '0') { + return FALSE; + } + return TRUE; +} + +void mix_log_func(const gchar* comp, gint level, const gchar *file, + const gchar *func, gint line, const gchar *format, ...) { + + va_list args; + static gchar* loglevel[4] = { "**ERROR", "*WARNING", "INFO", "VERBOSE" }; + + const gchar *env_mix_log_level = NULL; + gint mix_log_level_threhold = MIX_LOG_LEVEL_VERBOSE; + + if(!mix_log_enabled()) { + return; + } + + if (!format) { + return; + } + + g_static_mutex_lock(&g_mutex); + + /* log level */ + env_mix_log_level = g_getenv(MIX_LOG_LEVEL); + if (env_mix_log_level) { + mix_log_level_threhold = atoi(env_mix_log_level); + } + + if (level > mix_log_level_threhold) { + goto exit; + } + + /* component */ + if (mix_shall_delog(comp, MIX_DELOG_COMPS)) { + goto exit; + } + + /* files */ + if (mix_shall_delog(file, MIX_DELOG_FILES)) { + goto exit; + } + + /* functions */ + if (mix_shall_delog(func, MIX_DELOG_FUNCS)) { + goto exit; + } + + if (level > MIX_LOG_LEVEL_VERBOSE) { + level = MIX_LOG_LEVEL_VERBOSE; + } + if (level < MIX_LOG_LEVEL_ERROR) { + level = MIX_LOG_LEVEL_ERROR; + } + + g_print("%s : %s : %s : ", loglevel[level - 1], file, func); + + va_start(args, format); + g_vprintf(format, args); + va_end(args); + +exit: + g_static_mutex_unlock(&g_mutex); +} + + +#endif /* MIX_LOG_USE_HT */ + +#endif /* !ANDROID */ + diff --git a/mix_common/src/mixlog.h b/mix_common/src/mixlog.h index 99ab4e2..dd93046 100644 --- a/mix_common/src/mixlog.h +++ b/mix_common/src/mixlog.h @@ -39,6 +39,7 @@ void mix_log_func(const gchar* comp, gint level, const gchar *file, #include + #undef MIX_LOG_LEVEL_ERROR #undef MIX_LOG_LEVEL_WARNING #undef MIX_LOG_LEVEL_INFO diff --git a/mix_common/src/mixparams.c b/mix_common/src/mixparams.c deleted file mode 100644 index 2f8f8f6..0000000 --- a/mix_common/src/mixparams.c +++ /dev/null @@ -1,274 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixparams - * @short_description: Lightweight base class for the MIX media params - * - */ -#ifdef HAVE_CONFIG_H -#include "config.h" -#endif - -#include "mixparams.h" -#include - - -#define DEBUG_REFCOUNT - -static void mix_params_class_init (gpointer g_class, gpointer class_data); -static void mix_params_init (GTypeInstance * instance, gpointer klass); - -static void mix_params_finalize(MixParams * obj); -static gboolean mix_params_copy_default (MixParams *target, const MixParams *src); -static MixParams *mix_params_dup_default(const MixParams *obj); -static gboolean mix_params_equal_default (MixParams *first, MixParams *second); - -GType mix_params_get_type (void) -{ - static GType _mix_params_type = 0; - - if (G_UNLIKELY (_mix_params_type == 0)) { - - GTypeInfo info = { - sizeof (MixParamsClass), - NULL, - NULL, - mix_params_class_init, - NULL, - NULL, - sizeof (MixParams), - 0, - (GInstanceInitFunc) mix_params_init, - NULL - }; - - static const GTypeFundamentalInfo fundamental_info = { - (G_TYPE_FLAG_CLASSED | G_TYPE_FLAG_INSTANTIATABLE | - G_TYPE_FLAG_DERIVABLE | G_TYPE_FLAG_DEEP_DERIVABLE) - }; - - info.value_table = NULL; - - _mix_params_type = g_type_fundamental_next (); - g_type_register_fundamental (_mix_params_type, "MixParams", &info, &fundamental_info, G_TYPE_FLAG_ABSTRACT); - - } - - return _mix_params_type; -} - -static void mix_params_class_init (gpointer g_class, gpointer class_data) -{ - MixParamsClass *klass = MIX_PARAMS_CLASS (g_class); - - klass->dup = mix_params_dup_default; - klass->copy = mix_params_copy_default; - klass->finalize = mix_params_finalize; - klass->equal = mix_params_equal_default; -} - -static void mix_params_init (GTypeInstance * instance, gpointer klass) -{ - MixParams *obj = MIX_PARAMS_CAST (instance); - - obj->refcount = 1; -} - -gboolean mix_params_copy (MixParams *target, const MixParams *src) -{ - /* Use the target object class. Because it knows what it is looking for. */ - MixParamsClass *klass = MIX_PARAMS_GET_CLASS(target); - if (klass->copy) - { - return klass->copy(target, src); - } - else - { - return mix_params_copy_default(target, src); - } -} - -/** - * mix_params_copy_default: - * @target: target - * @src: source - * - * The default copy method of this object. Perhap copy at this level. - * Assign this to the copy vmethod. - */ -static gboolean mix_params_copy_default (MixParams *target, const MixParams *src) -{ - if (MIX_IS_PARAMS(target) && MIX_IS_PARAMS(src)) - { - // TODO perform deep copy. - return TRUE; - } - return FALSE; -} - -static void mix_params_finalize (MixParams * obj) -{ - /* do nothing */ -} - -MixParams *mix_params_dup(const MixParams *obj) -{ - MixParamsClass *klass = MIX_PARAMS_GET_CLASS(obj); - - if (klass->dup) - { - return klass->dup(obj); - } - else if (MIX_IS_PARAMS(obj)) - { - return mix_params_dup_default(obj); - } - return NULL; -} - -static MixParams *mix_params_dup_default(const MixParams *obj) -{ - MixParams *ret = mix_params_new(); - if (mix_params_copy(ret, obj)) - { - return ret; - } - - return NULL; -} - -MixParams* mix_params_new (GType type) -{ - MixParams *obj; - - /* we don't support dynamic types because they really aren't useful, - * and could cause refcount problems */ - obj = (MixParams *) g_type_create_instance (type); - - return obj; -} - -MixParams* mix_params_ref (MixParams *obj) -{ - g_return_val_if_fail(MIX_IS_PARAMS (obj), NULL); - - g_atomic_int_inc(&obj->refcount); - - return obj; -} - -static void mix_params_free(MixParams *obj) -{ - MixParamsClass *klass = NULL; - - klass = MIX_PARAMS_GET_CLASS(obj); - klass->finalize(obj); - - /* Should we support recycling the object? */ - /* If so, refcount handling is slightly different. */ - /* i.e. If the refcount is still 0 we can really free the object, else the finalize method recycled the object -- but to where? */ - - if (g_atomic_int_get (&obj->refcount) == 0) { - - g_type_free_instance ((GTypeInstance *) obj); - } -} - -void mix_params_unref (MixParams *obj) -{ - g_return_if_fail (obj != NULL); - g_return_if_fail (obj->refcount > 0); - - if (G_UNLIKELY (g_atomic_int_dec_and_test (&obj->refcount))) { - mix_params_free (obj); - } -} - -/** - * mix_params_replace: - * @olddata: pointer to a pointer to a object to be replaced - * @newdata: pointer to new object - * - * Modifies a pointer to point to a new object. The modification - * is done atomically, and the reference counts are updated correctly. - * Either @newdata and the value pointed to by @olddata may be NULL. - */ -void mix_params_replace (MixParams **olddata, MixParams *newdata) -{ - MixParams *olddata_val; - - g_return_if_fail (olddata != NULL); - - olddata_val = g_atomic_pointer_get ((gpointer *) olddata); - - if (olddata_val == newdata) - return; - - if (newdata) - mix_params_ref (newdata); - - while (!g_atomic_pointer_compare_and_exchange ((gpointer *) olddata, olddata_val, newdata)) - { - olddata_val = g_atomic_pointer_get ((gpointer *) olddata); - } - - if (olddata_val) - mix_params_unref (olddata_val); - -} - -gboolean mix_params_equal (MixParams *first, MixParams *second) -{ - if (MIX_IS_PARAMS(first)) - { - MixParamsClass *klass = MIX_PARAMS_GET_CLASS(first); - - if (klass->equal) - { - return klass->equal(first, second); - } - else - { - return mix_params_equal_default(first, second); - } - } - else - return FALSE; -} - -static gboolean mix_params_equal_default (MixParams *first, MixParams *second) -{ - if (MIX_IS_PARAMS(first) && MIX_IS_PARAMS(second)) - { - gboolean ret = TRUE; - - // Do data comparison here. - - return ret; - } - else - return FALSE; -} - -/** - * mix_value_dup_params: - * @value: a valid #GValue of %MIX_TYPE_PARAMS derived type - * @returns: object contents of @value - * - * Get the contents of a #MIX_TYPE_PARAMS derived #GValue, - * increasing its reference count. - */ -MixParams* mix_value_dup_params (const GValue * value) -{ - g_return_val_if_fail (MIX_VALUE_HOLDS_PARAMS (value), NULL); - - return mix_params_ref (value->data[0].v_pointer); -} - - diff --git a/mix_common/src/mixparams.cpp b/mix_common/src/mixparams.cpp new file mode 100644 index 0000000..6489339 --- /dev/null +++ b/mix_common/src/mixparams.cpp @@ -0,0 +1,127 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixparams + * @short_description: Lightweight base class for the MIX media params + * + */ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "mixparams.h" + + +#define DEBUG_REFCOUNT + +MixParams::MixParams() + :ref_count(1) + ,_reserved(NULL) { +} + +MixParams::~MixParams() { + finalize(); +} + +MixParams* MixParams::Ref() { + this->ref_count++; + return this; +} + +void MixParams::Unref() { + this->ref_count--; + if (0 == this->ref_count) { + delete this; + } +} + +MixParams* MixParams::dup() const { + MixParams *ret = new MixParams(); + if (FALSE != copy(ret)) { + return ret; + } + return NULL; +} + +gboolean MixParams::copy(MixParams* target) const { + gboolean ret = FALSE; + if ( NULL != target) { + return TRUE; + } + return ret; +} + +void MixParams::finalize() { +} + +gboolean MixParams::equal(MixParams *obj) const { + gboolean ret = FALSE; + if ( NULL != obj) { + return TRUE; + } + return ret; +} + +MixParams* mix_params_new () { + /* we don't support dynamic types because they really aren't useful,*/ + /* and could cause ref_count problems */ + return new MixParams(); +} + +gboolean mix_params_copy (MixParams *target, const MixParams *src) { + if ( NULL != target && NULL != src) { + return src->copy(target); + } else + return FALSE; +} + +MixParams* mix_params_ref (MixParams *obj) { + if (NULL == obj) + return NULL; + return obj->Ref(); +} + +void mix_params_unref(MixParams *obj) { + if (NULL != obj) + obj->Unref(); +} + +void mix_params_replace (MixParams **olddata, MixParams *newdata) { + if (NULL == olddata) + return; + MixParams *olddata_val = + reinterpret_cast(g_atomic_pointer_get((gpointer *) olddata)); + if (olddata_val == newdata) + return; + if (NULL != newdata) + newdata->Ref(); + while (!g_atomic_pointer_compare_and_exchange ((gpointer *) olddata, + olddata_val, newdata)) { + olddata_val = + reinterpret_cast(g_atomic_pointer_get ((gpointer *) olddata)); + } + if (NULL != olddata_val) + olddata_val->Unref(); +} + +MixParams * mix_params_dup(const MixParams *obj) { + if (NULL != obj) { + return obj->dup(); + } else { + return NULL; + } +} + +gboolean mix_params_equal (MixParams *first, MixParams *second) { + if (NULL != first && NULL != second) + return first->equal(second); + else + return FALSE; +} + diff --git a/mix_common/src/mixparams.h b/mix_common/src/mixparams.h index 75d4051..c1a19a5 100644 --- a/mix_common/src/mixparams.h +++ b/mix_common/src/mixparams.h @@ -11,80 +11,16 @@ #include -G_BEGIN_DECLS - -#define MIX_TYPE_PARAMS (mix_params_get_type()) -#define MIX_IS_PARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_PARAMS)) -#define MIX_IS_PARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_PARAMS)) -#define MIX_PARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_PARAMS, MixParamsClass)) -#define MIX_PARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_PARAMS, MixParams)) -#define MIX_PARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_PARAMS, MixParamsClass)) +#define MIX_PARAMS(obj) (reinterpret_cast ((obj))) #define MIX_PARAMS_CAST(obj) ((MixParams*)(obj)) -typedef struct _MixParams MixParams; -typedef struct _MixParamsClass MixParamsClass; - -/** - * MixParamsDupFunction: - * @obj: Params to duplicate - * @returns: reference to cloned instance. - * - * Virtual function prototype for methods to create duplicate of instance. - * - */ -typedef MixParams * (*MixParamsDupFunction) (const MixParams *obj); - -/** - * MixParamsCopyFunction: - * @target: target of the copy - * @src: source of the copy - * @returns: boolean indicates if copy is successful. - * - * Virtual function prototype for methods to create copies of instance. - * - */ -typedef gboolean (*MixParamsCopyFunction) (MixParams* target, const MixParams *src); - -/** - * MixParamsFinalizeFunction: - * @obj: Params to finalize - * - * Virtual function prototype for methods to free ressources used by - * object. - */ -typedef void (*MixParamsFinalizeFunction) (MixParams *obj); - -/** - * MixParamsEqualsFunction: - * @first: first object in the comparison - * @second: second object in the comparison - * - * Virtual function prototype for methods to compare 2 objects and check if they are equal. - */ -typedef gboolean (*MixParamsEqualFunction) (MixParams *first, MixParams *second); - -/** - * MIX_VALUE_HOLDS_PARAMS: - * @value: the #GValue to check - * - * Checks if the given #GValue contains a #MIX_TYPE_PARAM value. - */ -#define MIX_VALUE_HOLDS_PARAMS(value) (G_VALUE_HOLDS(value, MIX_TYPE_PARAMS)) - /** * MIX_PARAMS_REFCOUNT: * @obj: a #MixParams * * Get access to the reference count field of the object. */ -#define MIX_PARAMS_REFCOUNT(obj) ((MIX_PARAMS_CAST(obj))->refcount) -/** - * MIX_PARAMS_REFCOUNT_VALUE: - * @obj: a #MixParams - * - * Get the reference count value of the object - */ -#define MIX_PARAMS_REFCOUNT_VALUE(obj) (g_atomic_int_get (&(MIX_PARAMS_CAST(obj))->refcount)) +#define MIX_PARAMS_REFCOUNT(obj) ((MIX_PARAMS_CAST(obj))->ref_count) /** * MixParams: @@ -93,34 +29,62 @@ typedef gboolean (*MixParamsEqualFunction) (MixParams *first, MixParams *second) * * Base class for a refcounted parameter objects. */ -struct _MixParams { - GTypeInstance instance; - /*< public >*/ - gint refcount; +class MixParams { + +public: + MixParams(); + virtual ~MixParams(); + MixParams* Ref(); + void Unref(); + gint GetRefCount() { return ref_count;} + +public: + /** + * MixParamsDupFunction: + * @obj: Params to duplicate + * @returns: reference to cloned instance. + * + * Virtual function prototype for methods to create duplicate of instance. + * + */ + virtual MixParams * dup () const; + + /** + * MixParamsCopyFunction: + * @target: target of the copy + * @src: source of the copy + * @returns: boolean indicates if copy is successful. + * + * Virtual function prototype for methods to create copies of instance. + * + */ + virtual gboolean copy(MixParams* target) const; + + /** + * MixParamsFinalizeFunction: + * @obj: Params to finalize + * + * Virtual function prototype for methods to free ressources used by + * object. + */ + virtual void finalize (); + + /** + * MixParamsEqualsFunction: + * @first: first object in the comparison + * @second: second object in the comparison + * + * Virtual function prototype for methods to compare 2 objects and check if they are equal. + */ + virtual gboolean equal (MixParams *obj) const; + +public: + /*< public >*/ + gint ref_count; + + /*< private >*/ + gpointer _reserved; - /*< private >*/ - gpointer _reserved; -}; - -/** - * MixParamsClass: - * @dup: method to duplicate the object. - * @copy: method to copy details in one object to the other. - * @finalize: destructor - * @equal: method to check if the content of two objects are equal. - * - * #MixParams class strcut. - */ -struct _MixParamsClass { - GTypeClass type_class; - - MixParamsDupFunction dup; - MixParamsCopyFunction copy; - MixParamsFinalizeFunction finalize; - MixParamsEqualFunction equal; - - /*< private >*/ - gpointer _mix_reserved; }; /** @@ -129,7 +93,7 @@ struct _MixParamsClass { * * Get type. */ -GType mix_params_get_type(void); +//GType mix_params_get_type(void); /** * mix_params_new: @@ -170,10 +134,12 @@ void mix_params_unref (MixParams *obj); /** * mix_params_replace: - * @olddata: - * @newdata: - * - * Replace a pointer of the object with the new one. + * @olddata: pointer to a pointer to a object to be replaced + * @newdata: pointer to new object + * + * Modifies a pointer to point to a new object. The modification + * is done atomically, and the reference counts are updated correctly. + * Either @newdata and the value pointed to by @olddata may be NULL. */ void mix_params_replace(MixParams **olddata, MixParams *newdata); @@ -195,8 +161,5 @@ MixParams *mix_params_dup(const MixParams *obj); * Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance. */ gboolean mix_params_equal(MixParams *first, MixParams *second); - -G_END_DECLS - #endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h index 4de5245..4618532 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -1029,6 +1029,10 @@ struct h264_viddec_parser h264_Info info; }; +#ifdef __cplusplus +} +#endif + diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h index c255980..88db5de 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h @@ -12,6 +12,9 @@ // The following part is only for Parser Debug /////////////////////////////////////////////////////////////////// +#ifdef __cplusplus +extern "C" { +#endif enum h264_debug_point_id @@ -113,14 +116,6 @@ extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo, -//////////////////////////////////////////////////////////////////// -///////////////////////////// utils functions -//////////////////////////////////////////////////////////////////// -extern int32_t h264_is_new_picture_start(h264_Info* pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice); -extern int32_t h264_is_second_field(h264_Info * pInfo); -///// Math functions -uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod); -uint32_t mult_u(uint32_t var1, uint32_t var2); ///// Mem functions extern void* h264_memset( void* buf, uint32_t c, uint32_t num ); extern void* h264_memcpy( void* dest, void* src, uint32_t num ); @@ -147,6 +142,18 @@ extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ); extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ); extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo ); +#ifdef __cplusplus +} +#endif + +//////////////////////////////////////////////////////////////////// +///////////////////////////// utils functions +//////////////////////////////////////////////////////////////////// +extern int32_t h264_is_new_picture_start(h264_Info* pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice); +extern int32_t h264_is_second_field(h264_Info * pInfo); +///// Math functions +uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod); +uint32_t mult_u(uint32_t var1, uint32_t var2); diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h index 2a19b5f..8b4deb4 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h @@ -101,6 +101,10 @@ extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t dire extern frame_store *active_fs; +#ifdef __cplusplus +} +#endif + #endif //_H264_DPB_CTL_H_ diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk index bacf49b..70f1388 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk @@ -26,8 +26,8 @@ LOCAL_C_INCLUDES := \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/include -LOCAL_MODULE := libmixvbp_h264 LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_h264 LOCAL_SHARED_LIBRARIES := \ libglib-2.0 \ diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c index 2793dbd..307a0da 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c @@ -1,7 +1,9 @@ //#include "math.h" // Arithmatic functions using add & subtract -unsigned long mult_u(register unsigned long var1, register unsigned long var2) +#include "h264parse.h" + +uint32_t mult_u(register uint32_t var1, register uint32_t var2) { register unsigned long var_out = 0; @@ -20,7 +22,7 @@ unsigned long mult_u(register unsigned long var1, register unsigned long var2) }// mult_u -unsigned long ldiv_mod_u(register unsigned long a, register unsigned long b, unsigned long * mod) +uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod) { register unsigned long div = b; register unsigned long res = 0; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c index a956607..38297dc 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c @@ -16,7 +16,7 @@ // --------------------------------------------------------------------------- void* h264_memset( void* buf, uint32_t c, uint32_t num ) { - uint32_t* buf32 = buf; + uint32_t* buf32 = (uint32_t*)buf; uint32_t size32 = ( num >> 2 ); uint32_t i; @@ -31,8 +31,8 @@ void* h264_memset( void* buf, uint32_t c, uint32_t num ) void* h264_memcpy( void* dest, void* src, uint32_t num ) { - int32_t* dest32 = dest; - int32_t* src32 = src; + int32_t* dest32 = (int32_t*)dest; + int32_t* src32 = (int32_t*)src; uint32_t size32 = ( num >> 2 ); uint32_t i; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c index eadd6cc..b330f86 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c @@ -881,7 +881,7 @@ h264_Status h264_sei_stereo_video_info(void *parent,h264_Info* pInfo) /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -h264_Status h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size) +uint32_t h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size) { int32_t k, byte_index, user_data_byte_index; uint32_t i; @@ -949,7 +949,7 @@ h264_Status h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32 return user_data_byte_index; - return H264_STATUS_OK; +// return H264_STATUS_OK; } ////// TODO @@ -1033,7 +1033,7 @@ h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtyp status = h264_sei_stereo_video_info(parent, pInfo); break; default: - status = h264_sei_reserved_sei_message(parent, pInfo, payloadSize); + status = (h264_Status)h264_sei_reserved_sei_message(parent, pInfo, payloadSize); break; } @@ -1102,7 +1102,7 @@ h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent ///////////////////////////////// // Parse SEI payloads ///////////////////////////////// - status = h264_SEI_payload(parent, pInfo, payload_type, payload_size); + status = h264_SEI_payload(parent, pInfo, (h264_sei_payloadtype)payload_type, payload_size); if(status != H264_STATUS_OK) break; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c index c377e2d..0276eaa 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c @@ -219,7 +219,7 @@ h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_ if(SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) { nal_hrd = 1; - ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + ret = h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); } viddec_pm_get_bits(parent, &code, 1); @@ -228,7 +228,7 @@ h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_ if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) { nal_hrd = 0; - ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + ret = (h264_Status)h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); } if((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c index 87959f3..40ec011 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c @@ -181,7 +181,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) ///////////////////////////////////// Slice Data //////////////////////////////// // h264_fill_slice_data(pInfo, &slice_data); - wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG; + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_H264_SLICE_REG); wi.data.data_offset = slice_data.h264_bsd_slice_start; wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; @@ -200,7 +200,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) ///////////////////////////predict weight table item and data if have/////////////////////////// if(pInfo->h264_pwt_enabled) { - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; @@ -209,7 +209,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) { // viddec_pm_append_workitem( parent , &wi); - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; wi.es.es_flags = 0; // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); } @@ -217,7 +217,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) { // viddec_pm_append_workitem_next( parent , &wi); - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; wi.es.es_flags = 0; // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); } @@ -241,7 +241,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) else { if(0!=bits_offset) { - wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; wi.data.data_offset = bits_offset; wi.data.data_payload[0]=0; wi.data.data_payload[1]=0; @@ -293,7 +293,7 @@ void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) // Dump slice data to an array of workitems, to do pl access non valid mem for( i = 0; i < nitems; i++ ) { - wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PIC_REG; wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct wi.data.data_payload[0] = pl[0]; wi.data.data_payload[1] = pl[1]; @@ -349,7 +349,7 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) for(i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 + pInfo->dpb.frame_id_need_to_be_displayed[i]); wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; @@ -371,7 +371,7 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) for(i=0; idpb.frame_id_need_to_be_removed[i]; + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 + pInfo->dpb.frame_id_need_to_be_removed[i]); wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; @@ -393,7 +393,7 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) for(i=0; idpb.frame_id_need_to_be_dropped[i]; + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 + pInfo->dpb.frame_id_need_to_be_dropped[i]); wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; @@ -418,7 +418,7 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) { - wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id; + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id); wi.ref_frame.reference_id = fs_id; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; @@ -443,7 +443,7 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) { - wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; wi.data.data_offset = fs_id; //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); @@ -530,7 +530,7 @@ void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) for(i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 + pInfo->dpb.frame_id_need_to_be_displayed[i]); wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; @@ -552,7 +552,7 @@ void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) for(i=0; idpb.frame_id_need_to_be_removed[i]; + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 + pInfo->dpb.frame_id_need_to_be_removed[i]); wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk index 8b2bfe3..ed28fa3 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk @@ -23,8 +23,8 @@ LOCAL_C_INCLUDES := \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/include -LOCAL_MODULE := libmixvbp_mpeg4 LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_mpeg4 LOCAL_SHARED_LIBRARIES := \ libglib-2.0 \ diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c index 6b44c7a..e9e9012 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -1,3 +1,5 @@ +#include + #include "viddec_fw_workload.h" #include "viddec_parser_ops.h" #include "viddec_fw_mp4.h" @@ -70,7 +72,7 @@ uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *pa viddec_fw_mp4_set_vop_time_increment_resolution(&vol_info, vol->vop_time_increment_resolution); - wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOL_INFO; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOL_INFO; wi.vwi_payload[0] = vol_info.vol_flags; wi.vwi_payload[1] = vol_info.vol_size; wi.vwi_payload[2] = vol_info.vol_item; @@ -119,7 +121,7 @@ uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *pa // Get vol_item result = viddec_pm_get_au_pos(parent, &vop_info.bit_offset, &byte, &is_emul); - wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOP_INFO; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOP_INFO; wi.vwi_payload[0] = vop_info.frame_info; wi.vwi_payload[1] = vop_info.vop_data; wi.vwi_payload[2] = vop_info.bit_offset; @@ -144,7 +146,7 @@ uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *p viddec_fw_mp4_set_num_gobs_in_vop(&svh_info, svh->num_gobs_in_vop); viddec_fw_mp4_set_num_rows_in_gob(&svh_info, svh->num_rows_in_gob); - wi.vwi_type = VIDDEC_WORKLOAD_MP4_SVH; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SVH; wi.vwi_payload[0] = svh_info.svh_data; wi.vwi_payload[1] = svh_info.pad1; wi.vwi_payload[2] = svh_info.pad2; @@ -190,7 +192,7 @@ uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t warp_index++; } - wi.vwi_type = VIDDEC_WORKLOAD_MP4_SPRT_TRAJ; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SPRT_TRAJ; wi.vwi_payload[0] = sprite_info.warping_mv_code[0]; wi.vwi_payload[1] = sprite_info.warping_mv_code[1]; wi.vwi_payload[2] = sprite_info.warping_mv_code[2]; @@ -207,7 +209,7 @@ uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *p viddec_workload_item_t wi; mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); - wi.vwi_type = VIDDEC_WORKLOAD_MP4_BVOP_INFO; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_BVOP_INFO; wi.vwi_payload[0] = vol->Tframe; wi.vwi_payload[1] = vol->TRD; wi.vwi_payload[2] = vol->TRB; @@ -231,9 +233,9 @@ uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint3 memset(&wi, 0, sizeof(viddec_workload_item_t)); if(intra_quant_flag) - wi.vwi_type = VIDDEC_WORKLOAD_MP4_IQUANT; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_IQUANT; else - wi.vwi_type = VIDDEC_WORKLOAD_MP4_NIQUANT; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_NIQUANT; if(i == 6) { @@ -278,7 +280,7 @@ uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent) uint32_t result = MP4_STATUS_OK; viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_MP4_PAST_FRAME; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_PAST_FRAME; wi.ref_frame.reference_id = 0; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; @@ -292,7 +294,7 @@ uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent) uint32_t result = MP4_STATUS_OK; viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_MP4_FUTURE_FRAME; + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_FUTURE_FRAME; wi.ref_frame.reference_id = 0; wi.ref_frame.luma_phys_addr = 0; wi.ref_frame.chroma_phys_addr = 0; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c index d2722a2..101c852 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c @@ -1,3 +1,5 @@ +#include + #include "viddec_fw_debug.h" #include "viddec_parser_ops.h" #include "viddec_mp4_parse.h" diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index 33cb1d7..29cd3a9 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -340,7 +340,7 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format); - ret = viddec_pm_append_workitem(parent, &wi, false); + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); if(ret == 1) ret = MP4_STATUS_OK; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index 31ac9d1..1e4c0c1 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -1,3 +1,4 @@ +#include #include "viddec_mp4_videoobjectlayer.h" const unsigned char mp4_DefaultIntraQuantMatrix[64] = { @@ -191,7 +192,7 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) { /* This is not a supported type by HW */ DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -204,21 +205,21 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) { DEB("Error: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", cxt->no_of_sprite_warping_points); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } if((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change)) { DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } if (vidObjLay->sprite_enable != MP4_SPRITE_GMC) { DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } } @@ -306,7 +307,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ { /* not supported shape*/ DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -317,7 +318,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ { /* 8 bit is only supported mode*/ DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } else @@ -330,7 +331,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ { /* Should not get here as shape is checked earlier */ DEB("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -359,7 +360,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ if(!vidObjLay->complexity_estimation_disable) {/* complexity estimation not supported */ DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -382,7 +383,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ if(vidObjLay->newpred_enable) { DEB("Error: NEWPRED mode is not supported\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } getbits = viddec_pm_get_bits(parent, &(code), 1); @@ -396,7 +397,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ if(vidObjLay->scalability) { DEB("Error: VOL scalability is not supported\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -438,7 +439,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse {/* This is not a supported type by HW */ DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n", vidObjLay->video_object_type_indication); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } else @@ -459,7 +460,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse { DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n", vidObjLay->video_object_layer_verid); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } /* Video object layer ID supercedes visual object ID */ @@ -497,7 +498,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse { DEB("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n", MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -505,7 +506,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)) {/* Grayscale not supported */ DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -518,7 +519,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse if(vidObjLay->vop_time_increment_resolution == 0) { DEB("Error: 0 value for vop_time_increment_resolution\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } /* calculate number bits in vop_time_increment_resolution */ @@ -543,7 +544,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse else { DEB("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n"); - ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR; + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } } @@ -582,7 +583,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment); viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution); - ret = viddec_pm_append_workitem(parent, &wi, false); + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); if(ret == 1) ret = MP4_STATUS_OK; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c index 3fee166..7410337 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c @@ -54,7 +54,7 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov); viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code); - ret = viddec_pm_append_workitem(parent, &wi, false); + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); if(ret == 1) ret = MP4_STATUS_OK; } @@ -147,7 +147,7 @@ mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_Video mp4_Status_t ret = MP4_STATUS_OK; for(i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ ) { - ret = mp4_Sprite_dmv_length(parent, &dmv_length); + ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length); if(ret != MP4_STATUS_OK) { break; @@ -175,7 +175,7 @@ mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_Video } vidObjPlane->warping_mv_code_du[i] = dmv_code; /* TODO: create another inline function to avoid code duplication */ - ret = mp4_Sprite_dmv_length(parent, &dmv_length); + ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length); if(ret != MP4_STATUS_OK) { break; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c index acfb8fa..e74444c 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c @@ -213,8 +213,9 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) } } - ret = viddec_pm_append_workitem(parent, &wi, false); - if(ret == 1) + int ret_val; + ret_val = viddec_pm_append_workitem(parent, &wi, false); + if(ret_val == 1) ret = MP4_STATUS_OK; } @@ -262,7 +263,8 @@ mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) if (wi.user_data.size >= 11) { viddec_pm_setup_userdata(&wi); - ret = viddec_pm_append_workitem(parent, &wi, false); + + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); wi.user_data.size = 0; } } @@ -275,7 +277,7 @@ mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) wi.user_data.data_payload[i] = 0; } viddec_pm_setup_userdata(&wi); - ret = viddec_pm_append_workitem(parent, &wi, false); + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); wi.user_data.size = 0; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h index a2607d7..eb26bf7 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h @@ -26,7 +26,7 @@ WARNING, INFO, DEBUG, - } log_level; + } ; #define vc1_log_level DEBUG diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index e577932..9125cb7 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -41,8 +41,8 @@ LOCAL_COPY_HEADERS_TO := libmixvbp LOCAL_COPY_HEADERS := \ vbp_loader.h -LOCAL_MODULE := libmixvbp LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp LOCAL_SHARED_LIBRARIES := \ libdl \ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h index a8efafb..7acb407 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h @@ -67,7 +67,7 @@ static inline void viddec_emit_time(viddec_emitter *cxt, uint32_t time) static inline void viddec_emit_set_codec(viddec_emitter *emit, uint32_t codec_type) { - emit->cur.data->codec = codec_type; + emit->cur.data->codec = (viddec_stream_format)(codec_type); } static inline void viddec_emit_set_codec_errors(viddec_emitter *emit, uint32_t codec_error) @@ -91,7 +91,7 @@ static inline void viddec_emit_set_inband_tag(viddec_emitter *emit, uint32_t typ viddec_emitter_wkld *cur_wkld; viddec_workload_item_t item; cur_wkld = (using_next == false)? &(emit->cur):&(emit->next); - item.vwi_type = type; + item.vwi_type = (workload_item_type)(type); item.vwi_payload[0] = item.vwi_payload[1] = item.vwi_payload[2] = 0; viddec_emit_append(cur_wkld, &item); } diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h index 0c643fa..68cddc7 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h @@ -17,16 +17,25 @@ typedef struct uint32_t persist_size; }viddec_parser_memory_sizes_t; +typedef void (*fn_init)(void *ctxt, uint32_t *persist, uint32_t preserve); +typedef uint32_t (*fn_parse_sc) (void *ctxt, void *pcxt, void *sc_state); +typedef uint32_t (*fn_parse_syntax) (void *parent, void *ctxt); +typedef void (*fn_get_cxt_size) (viddec_parser_memory_sizes_t *size); +typedef uint32_t (*fn_is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors); +typedef uint32_t (*fn_is_frame_start)(void *ctxt); +typedef uint32_t (*fn_gen_contrib_tags)(void *parent, uint32_t ignore_partial); +typedef uint32_t (*fn_gen_assoc_tags)(void *parent); + typedef struct { - void (*init)(void *ctxt, uint32_t *persist, uint32_t preserve); - uint32_t (*parse_sc) (void *ctxt, void *pcxt, void *sc_state); - uint32_t (*parse_syntax) (void *parent, void *ctxt); - void (*get_cxt_size) (viddec_parser_memory_sizes_t *size); - uint32_t (*is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors); - uint32_t (*is_frame_start)(void *ctxt); - uint32_t (*gen_contrib_tags)(void *parent, uint32_t ignore_partial); - uint32_t (*gen_assoc_tags)(void *parent); + fn_init init; + fn_parse_sc parse_sc; + fn_parse_syntax parse_syntax; + fn_get_cxt_size get_cxt_size; + fn_is_wkld_done is_wkld_done; + fn_is_frame_start is_frame_start; + fn_gen_contrib_tags gen_contrib_tags; + fn_gen_assoc_tags gen_assoc_tags; }viddec_parser_ops_t; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 27436b9..98cc0d0 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1796,3 +1796,6 @@ uint32 vbp_populate_query_data_h264(vbp_context *pcontext) } return VBP_OK; } + + + diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h index 6ed4499..3f86e59 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h @@ -45,4 +45,6 @@ uint32 vbp_process_parsing_result_h264(vbp_context *pcontext, int list_index); */ uint32 vbp_populate_query_data_h264(vbp_context *pcontext); + + #endif /*VBP_H264_PARSER_H*/ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 60a30b1..93a98a2 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -11,6 +11,9 @@ #include + + + #ifndef TRUE #define TRUE 1 #endif @@ -360,4 +363,6 @@ uint32 vbp_query(Handle hcontext, void **data); */ uint32 vbp_flush(Handle hcontent); + + #endif /* VBP_LOADER_H */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 5a4e358..9765f28 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -45,8 +45,7 @@ uint32 vbp_get_sc_pos_mp42( uint32 length, uint32 *sc_end_pos, uint8 *is_normal_sc, - uint8* resync_marker, - const bool svh_search); + uint8* resync_marker); void vbp_on_vop_mp42(vbp_context *pcontext, int list_index); void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index); @@ -98,7 +97,7 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) return VBP_LOAD; } - pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size"); + pcontext->parser_ops->get_cxt_size =dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size"); if (pcontext->parser_ops->get_cxt_size == NULL) { ETRACE ("Failed to set entry point." ); @@ -250,7 +249,11 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) while (1) { found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, - &sc_end_pos, &is_normal_sc, &resync_marker, short_video_header); + &sc_end_pos, &is_normal_sc, &resync_marker); + + VTRACE("buf=%x, bytes_parsed=%d, unparsed=%d", (uint32)buf, bytes_parsed, size- bytes_parsed); + VTRACE("found_sc=%d, cxt->list.num_items=%d, resync_marker=%d, ", + found_sc, cxt->list.num_items, resync_marker); if (found_sc) { @@ -629,8 +632,7 @@ uint32 vbp_get_sc_pos_mp42( uint32 length, uint32 *sc_end_pos, uint8 *is_normal_sc, - uint8 *resync_marker, - const bool svh_search) + uint8 *resync_marker) { uint8 *ptr = buf; uint32 size; @@ -711,10 +713,8 @@ uint32 vbp_get_sc_pos_mp42( if (phase == 2) { normal_sc = (*ptr == THIRD_STARTCODE_BYTE); - if (svh_search) - { - short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); - } + short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); + *is_normal_sc = normal_sc; // at least 16-bit 0, may be GOB start code or diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h index 71e76fb..ada7c26 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -12,7 +12,7 @@ -//#define VBP_TRACE +#define VBP_TRACE #ifdef VBP_TRACE /* if VBP_TRACE is defined*/ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index b5548ab..4b8b800 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -18,7 +18,6 @@ #include "vbp_mp42_parser.h" - /* buffer counter */ uint32 buffer_counter = 0; @@ -424,8 +423,8 @@ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) /* invoke the entry point to initialize the parser. */ pcontext->parser_ops->init( - (void *)pcontext->parser_cxt->codec_data, - (void *)pcontext->persist_mem, + (uint32_t *)pcontext->parser_cxt->codec_data, + (uint32_t *)pcontext->persist_mem, FALSE); viddec_emit_init(&(pcontext->parser_cxt->emitter)); diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h index 5cbbab1..16b4898 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h @@ -152,7 +152,7 @@ enum viddec_fw_decoder_int_status }; /** Hardware Accelerated stream formats */ -enum viddec_stream_format +typedef enum viddec_stream_format { MFD_STREAM_FORMAT_MPEG=1, MFD_STREAM_FORMAT_H264, @@ -161,7 +161,7 @@ enum viddec_stream_format MFD_STREAM_FORMAT_MAX, /* must be last */ MFD_STREAM_FORMAT_INVALID -}; +}viddec_stream_format; /* Workload specific error codes */ enum viddec_fw_workload_error_codes @@ -211,9 +211,11 @@ enum viddec_fw_mpeg2_error_codes #define false 0 #endif +#ifndef __cplusplus #ifndef bool typedef int bool; #endif +#endif #endif /* end of #ifdef VBP */ diff --git a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h index 3a69c6d..936c2e6 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h @@ -79,7 +79,7 @@ /* Workload items type. Each item here represents data that Parser detected ex:slice data which is used either by host or decoder.*/ -enum workload_item_type +typedef enum workload_item_type { VIDDEC_WORKLOAD_INVALID =0x0,/* Unknown type */ VIDDEC_WORKLOAD_PIXEL_ES =0x100,/* Slice data tag */ @@ -132,7 +132,7 @@ enum workload_item_type VIDDEC_WORKLOAD_DECODER_SPECIFIC =0x100000,/* pvt info for decoder tags */ VIDDEC_WORKLOAD_MAX, -}; +}workload_item_type; struct h264_witem_sps_mvc_id { diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index 2c07cd9..f003dc1 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -1,41 +1,34 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) -#MIXVIDEO_LOG_ENABLE := true +MIXVIDEO_LOG_ENABLE := true LOCAL_SRC_FILES := \ - mixbuffer.c \ - mixbufferpool.c \ - mixdisplay.c \ - mixdisplayandroid.c \ - mixframemanager.c \ - mixsurfacepool.c \ - mixvideo.c \ - mixvideocaps.c \ - mixvideoconfigparams.c \ - mixvideoconfigparamsdec.c \ - mixvideoconfigparamsdec_h264.c \ - mixvideoconfigparamsdec_mp42.c \ - mixvideoconfigparamsdec_vc1.c \ - mixvideoconfigparamsenc.c \ - mixvideoconfigparamsenc_h264.c \ - mixvideoconfigparamsenc_h263.c \ - mixvideoconfigparamsenc_mpeg4.c \ - mixvideoconfigparamsenc_preview.c \ - mixvideodecodeparams.c \ - mixvideoencodeparams.c \ - mixvideoformat.c \ - mixvideoformat_h264.c \ - mixvideoformat_mp42.c \ - mixvideoformat_vc1.c \ - mixvideoformatenc.c \ - mixvideoformatenc_h264.c \ - mixvideoformatenc_h263.c \ - mixvideoformatenc_mpeg4.c \ - mixvideoformatenc_preview.c \ - mixvideoframe.c \ - mixvideoinitparams.c \ - mixvideorenderparams.c + mixvideothread.cpp \ + mixbuffer.cpp \ + mixbufferpool.cpp \ + mixdisplay.cpp \ + mixdisplayandroid.cpp \ + mixframemanager.cpp \ + mixsurfacepool.cpp \ + mixvideo.cpp \ + mixvideocaps.cpp \ + mixvideoconfigparams.cpp \ + mixvideoconfigparamsdec.cpp \ + mixvideoconfigparamsdec_h264.cpp \ + mixvideoconfigparamsdec_mp42.cpp \ + mixvideoconfigparamsdec_vc1.cpp \ + mixvideodecodeparams.cpp \ + mixvideoformat.cpp \ + mixvideoformat_h264.cpp \ + mixvideoformat_mp42.cpp \ + mixvideoformat_vc1.cpp \ + mixvideoframe.cpp \ + mixvideoinitparams.cpp \ + mixvideorenderparams.cpp \ + mixvideoconfigparamsenc.cpp \ + mixvideoconfigparamsenc_h264.cpp \ + mixvideoconfigparamsenc_h263.cpp LOCAL_CFLAGS := \ -DMIXVIDEO_AGE=1 \ @@ -49,32 +42,34 @@ LOCAL_C_INCLUDES := \ $(GLIB_TOP) \ $(GLIB_TOP)/android \ $(GLIB_TOP)/glib \ - $(GLIB_TOP)/gobject \ $(TARGET_OUT_HEADERS)/libmixcommon \ $(TARGET_OUT_HEADERS)/libmixvbp \ $(TARGET_OUT_HEADERS)/libva +LOCAL_LDLIBS += -lpthread + LOCAL_SHARED_LIBRARIES := \ - libcutils \ - libglib-2.0 \ - libgobject-2.0 \ - libgthread-2.0 \ - libgmodule-2.0 \ + libcutils \ + libglib-2.0 \ libmixcommon \ - libmixvbp \ - libva \ - libva-android \ + libmixvbp \ + libva \ + libva-android \ libva-tpi -LOCAL_CFLAGS += -DANDROID + +LOCAL_CFLAGS += -DANDROID \ + -DMIXVIDEO_ENCODE_ENABLE=0 + ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) -LOCAL_CFLAGS += -DMIX_LOG_ENABLE +LOCAL_CFLAGS += LOCAL_SHARED_LIBRARIES += liblog endif LOCAL_COPY_HEADERS_TO := libmixvideo LOCAL_COPY_HEADERS := \ + mixvideothread.h \ mixbuffer.h \ mixbuffer_private.h \ mixbufferpool.h \ @@ -93,7 +88,7 @@ LOCAL_COPY_HEADERS := \ mixvideoconfigparamsdec_vc1.h \ mixvideoconfigparamsenc.h \ mixvideoconfigparamsenc_h264.h \ - mixvideoconfigparamsenc_h263.h \ + mixvideoconfigparamsenc_h263.h \ mixvideoconfigparamsenc_mpeg4.h \ mixvideoconfigparamsenc_preview.h \ mixvideodecodeparams.h \ @@ -114,7 +109,7 @@ LOCAL_COPY_HEADERS := \ mixvideorenderparams.h \ mixvideorenderparams_internal.h -LOCAL_MODULE := libmixvideo LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvideo include $(BUILD_SHARED_LIBRARY) diff --git a/mix_video/src/mixbuffer.c b/mix_video/src/mixbuffer.c deleted file mode 100644 index 3a19a5b..0000000 --- a/mix_video/src/mixbuffer.c +++ /dev/null @@ -1,233 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixbuffer - * @short_description: MI-X Video Buffer Parameters - * - * - * #MixBuffer objects are used to wrap input data buffers in a reference counted object as - * described in the buffer model section. Data buffers themselves are allocated by the - * App/MMF. #MixBuffer objects are allocated by #MixVideo in a pool and retrieved by the - * application using mix_video_get_mixbuffer(). The application will wrap a data buffer - * in a #Mixbuffer object and pass it into mix_video_decode() or to mix_video_encode(). - * - * - * The #MixBuffer objects will be released by #MixVideo when they are no longer needed - * for the decode or encoder operation. The App/MMF will also release the #MixBuffer - * object after use. When the #MixBuffer is completely released, the callback to the - * function registered in the #MixBuffer will be called (allowing the App/MMF to release - * data buffers as necessary). - * - */ - -#include "mixvideolog.h" -#include "mixbuffer.h" -#include "mixbuffer_private.h" - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_buffer_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_buffer_type = g_define_type_id; } - -gboolean mix_buffer_copy(MixParams * target, const MixParams * src); -MixParams *mix_buffer_dup(const MixParams * obj); -gboolean mix_buffer_equal(MixParams * first, MixParams * second); -static void mix_buffer_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixBuffer, mix_buffer, MIX_TYPE_PARAMS, - _do_init); - -static void mix_buffer_init(MixBuffer * self) { - /* initialize properties here */ - - MixBufferPrivate *priv = MIX_BUFFER_GET_PRIVATE(self); - self->reserved = priv; - - priv->pool = NULL; - - self->data = NULL; - self->size = 0; - self->token = 0; - self->callback = NULL; -} - -static void mix_buffer_class_init(MixBufferClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_buffer_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_buffer_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_buffer_dup; - mixparams_class->equal = (MixParamsEqualFunction) mix_buffer_equal; - - /* Register and allocate the space the private structure for this object */ - g_type_class_add_private(mixparams_class, sizeof(MixBufferPrivate)); -} - -MixBuffer * -mix_buffer_new(void) { - MixBuffer *ret = (MixBuffer *) g_type_create_instance(MIX_TYPE_BUFFER); - return ret; -} - -void mix_buffer_finalize(MixParams * obj) { - /* clean up here. */ - - /* MixBuffer *self = MIX_BUFFER(obj); */ - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixBuffer * -mix_buffer_ref(MixBuffer * mix) { - return (MixBuffer *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_buffer_dup: - * @obj: a #MixBuffer object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_buffer_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_BUFFER(obj)) { - MixBuffer *duplicate = mix_buffer_new(); - if (mix_buffer_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_buffer_unref(duplicate); - } - } - return ret; -} - -/** - * mix_buffer_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_buffer_copy(MixParams * target, const MixParams * src) { - MixBuffer *this_target, *this_src; - - if (MIX_IS_BUFFER(target) && MIX_IS_BUFFER(src)) { - // Cast the base object to this child object - this_target = MIX_BUFFER(target); - this_src = MIX_BUFFER(src); - - // Duplicate string - this_target->data = this_src->data; - this_target->size = this_src->size; - this_target->token = this_src->token; - this_target->callback = this_src->callback; - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_buffer_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_buffer_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixBuffer *this_first, *this_second; - - if (MIX_IS_BUFFER(first) && MIX_IS_BUFFER(second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_BUFFER(first); - this_second = MIX_BUFFER(second); - - if (this_first->data == this_second->data && this_first->size - == this_second->size && this_first->token == this_second->token - && this_first->callback == this_second->callback) { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = klass->equal(first, second); - else - ret = TRUE; - } - } - - return ret; -} - -#define MIX_BUFFER_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_BUFFER(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size, - gulong token, MixBufferCallback callback) { - MIX_BUFFER_SETTER_CHECK_INPUT (obj); - - obj->data = data; - obj->size = size; - obj->token = token; - obj->callback = callback; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_buffer_set_pool(MixBuffer *obj, MixBufferPool *pool) { - - MIX_BUFFER_SETTER_CHECK_INPUT (obj); - MixBufferPrivate *priv = (MixBufferPrivate *) obj->reserved; - priv->pool = pool; - - return MIX_RESULT_SUCCESS; -} - -void mix_buffer_unref(MixBuffer * obj) { - - // Unref through base class - mix_params_unref(MIX_PARAMS(obj)); - - LOG_I( "refcount = %d\n", MIX_PARAMS( - obj)->refcount); - - // Check if we have reduced to 1, in which case we add ourselves to free pool - if (MIX_PARAMS(obj)->refcount == 1) { - MixBufferPrivate *priv = (MixBufferPrivate *) obj->reserved; - g_return_if_fail(priv->pool != NULL); - - if (obj->callback) { - obj->callback(obj->token, obj->data); - } - mix_bufferpool_put(priv->pool, obj); - } -} - diff --git a/mix_video/src/mixbuffer.cpp b/mix_video/src/mixbuffer.cpp new file mode 100644 index 0000000..b1c0ca7 --- /dev/null +++ b/mix_video/src/mixbuffer.cpp @@ -0,0 +1,144 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixbuffer + * @short_description: MI-X Video Buffer Parameters + * + * + * #MixBuffer objects are used to wrap input data buffers in a reference counted object as + * described in the buffer model section. Data buffers themselves are allocated by the + * App/MMF. #MixBuffer objects are allocated by #MixVideo in a pool and retrieved by the + * application using mix_video_get_mixbuffer(). The application will wrap a data buffer + * in a #Mixbuffer object and pass it into mix_video_decode() or to mix_video_encode(). + * + * + * The #MixBuffer objects will be released by #MixVideo when they are no longer needed + * for the decode or encoder operation. The App/MMF will also release the #MixBuffer + * object after use. When the #MixBuffer is completely released, the callback to the + * function registered in the #MixBuffer will be called (allowing the App/MMF to release + * data buffers as necessary). + * + */ + +#include + +#include "mixvideolog.h" +#include "mixbufferpool.h" +#include "mixbuffer.h" +#include "mixbuffer_private.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +MixBuffer::MixBuffer() + :data(NULL) + ,size(0) + ,token(0) + ,callback(NULL) + ,pool(NULL) { +} + +MixBuffer::~MixBuffer(){ +} + +/** + * mix_buffer_dup: + * @obj: a #MixBuffer object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * MixBuffer::dup() const { + MixParams *ret = new MixBuffer(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + +/** + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean MixBuffer::copy(MixParams * target) const { + gboolean ret = FALSE; + MixBuffer * this_target = MIX_BUFFER(target); + if (NULL != this_target) { + this_target->data = data; + this_target->size = size; + this_target->token = token; + this_target->callback = callback; + ret = MixParams::copy(target); + } + return ret; +} + +gboolean MixBuffer::equal(MixParams * obj) const { + gboolean ret = FALSE; + MixBuffer * this_obj = MIX_BUFFER(obj); + if (NULL != this_obj) { + if (this_obj->data == data && + this_obj->size == size && + this_obj->token == token && + this_obj->callback == callback) { + ret = MixParams::equal(this_obj); + } + } + return ret; +} + +MixBuffer * mix_buffer_new(void) { + return new MixBuffer(); +} + +MixBuffer * mix_buffer_ref(MixBuffer * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + + + +MIX_RESULT mix_buffer_set_data( + MixBuffer * obj, guchar *data, guint size, + gulong token, MixBufferCallback callback) { + obj->data = data; + obj->size = size; + obj->token = token; + obj->callback = callback; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_buffer_set_pool(MixBuffer *obj, MixBufferPool *pool) { + obj->pool = pool; + return MIX_RESULT_SUCCESS; +} + +void mix_buffer_unref(MixBuffer * obj) { + + if (NULL != obj) { + gint newRefcount = obj->GetRefCount() - 1; + LOG_I( "after unref, refcount = %d\n", newRefcount); + // Unref through base class + obj->Unref(); + if (1 == newRefcount) { + g_return_if_fail(obj->pool != NULL); + if (obj->callback) { + obj->callback(obj->token, obj->data); + } + mix_bufferpool_put(obj->pool, obj); + } + } +} + diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h index 0688442..aeef6a1 100644 --- a/mix_video/src/mixbuffer.h +++ b/mix_video/src/mixbuffer.h @@ -12,20 +12,11 @@ #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_BUFFER: - * - * Get type of class. - */ -#define MIX_TYPE_BUFFER (mix_buffer_get_type ()) - /** * MIX_BUFFER: * @obj: object to be type-casted. */ -#define MIX_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_BUFFER, MixBuffer)) +#define MIX_BUFFER(obj) (reinterpret_cast(obj)) /** * MIX_IS_BUFFER: @@ -33,46 +24,25 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_BUFFER)) - -/** - * MIX_BUFFER_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_BUFFER, MixBufferClass)) - -/** - * MIX_IS_BUFFER_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_BUFFER)) - -/** - * MIX_BUFFER_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_BUFFER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_BUFFER, MixBufferClass)) +#define MIX_IS_BUFFER(obj) (NULL != MIX_BUFFER(obj)) typedef void (*MixBufferCallback)(gulong token, guchar *data); -typedef struct _MixBuffer MixBuffer; -typedef struct _MixBufferClass MixBufferClass; +class MixBufferPool; /** * MixBuffer: * * MI-X Buffer Parameter object */ -struct _MixBuffer { - /*< public > */ - MixParams parent; - - /*< public > */ - +class MixBuffer : public MixParams { +public: + MixBuffer(); + virtual ~MixBuffer(); + virtual gboolean copy(MixParams* target) const; + virtual MixParams* dup() const; + virtual gboolean equal(MixParams* obj) const; +public: /* Pointer to coded data buffer */ guchar *data; @@ -92,30 +62,9 @@ struct _MixBuffer { MixBufferCallback callback; /* < private > */ - /* reserved */ - gpointer reserved; -}; - -/** - * MixBufferClass: - * - * MI-X VideoConfig object class - */ -struct _MixBufferClass { - /*< public > */ - MixParamsClass parent_class; - - /* class members */ + MixBufferPool *pool; }; -/** - * mix_buffer_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_buffer_get_type(void); - /** * mix_buffer_new: * @returns: A newly allocated instance of #MixBuffer @@ -156,6 +105,4 @@ void mix_buffer_unref(MixBuffer * mix); MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size, gulong token, MixBufferCallback callback); -G_END_DECLS - #endif /* __MIX_BUFFER_H__ */ diff --git a/mix_video/src/mixbuffer_private.h b/mix_video/src/mixbuffer_private.h index 43c8347..81b7f9d 100644 --- a/mix_video/src/mixbuffer_private.h +++ b/mix_video/src/mixbuffer_private.h @@ -12,8 +12,11 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixbuffer.h" #include "mixbufferpool.h" -G_BEGIN_DECLS +class MixBuffer; +class MixBufferPool; + +#if 0 typedef struct _MixBufferPrivate MixBufferPrivate; struct _MixBufferPrivate @@ -22,21 +25,10 @@ struct _MixBufferPrivate MixBufferPool *pool; }; - -/** -* MIX_BUFFER_PRIVATE: -* -* Get private structure of this class. -* @obj: class object for which to get private data. -*/ -#define MIX_BUFFER_GET_PRIVATE(obj) \ - (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_BUFFER, MixBufferPrivate)) - +#endif /* Private functions */ MIX_RESULT mix_buffer_set_pool (MixBuffer *obj, MixBufferPool *pool); -G_END_DECLS - #endif /* __MIX_BUFFER_PRIVATE_H__ */ diff --git a/mix_video/src/mixbufferpool.c b/mix_video/src/mixbufferpool.c deleted file mode 100644 index 9d9ad56..0000000 --- a/mix_video/src/mixbufferpool.c +++ /dev/null @@ -1,484 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixbufferpool - * @short_description: MI-X Input Buffer Pool - * - * A data object which stores and manipulates a pool of compressed video buffers. - */ - -#include "mixvideolog.h" -#include "mixbufferpool.h" -#include "mixbuffer_private.h" - -#define MIX_LOCK(lock) g_mutex_lock(lock); -#define MIX_UNLOCK(lock) g_mutex_unlock(lock); - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_bufferpool_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_bufferpool_type = g_define_type_id; } - -gboolean mix_bufferpool_copy(MixParams * target, const MixParams * src); -MixParams *mix_bufferpool_dup(const MixParams * obj); -gboolean mix_bufferpool_equal(MixParams * first, MixParams * second); -static void mix_bufferpool_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixBufferPool, mix_bufferpool, MIX_TYPE_PARAMS, - _do_init); - -static void mix_bufferpool_init(MixBufferPool * self) { - /* initialize properties here */ - self->free_list = NULL; - self->in_use_list = NULL; - self->free_list_max_size = 0; - self->high_water_mark = 0; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; - - // TODO: relocate this mutex allocation -we can't communicate failure in ctor. - // Note that g_thread_init() has already been called by mix_video_init() - self->objectlock = g_mutex_new(); - -} - -static void mix_bufferpool_class_init(MixBufferPoolClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_bufferpool_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_bufferpool_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_bufferpool_dup; - mixparams_class->equal = (MixParamsEqualFunction) mix_bufferpool_equal; -} - -MixBufferPool * -mix_bufferpool_new(void) { - MixBufferPool *ret = (MixBufferPool *) g_type_create_instance( - MIX_TYPE_BUFFERPOOL); - return ret; -} - -void mix_bufferpool_finalize(MixParams * obj) { - /* clean up here. */ - - MixBufferPool *self = MIX_BUFFERPOOL(obj); - - if (self->objectlock) { - g_mutex_free(self->objectlock); - self->objectlock = NULL; - } - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixBufferPool * -mix_bufferpool_ref(MixBufferPool * mix) { - return (MixBufferPool *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_bufferpool_dup: - * @obj: a #MixBufferPool object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_bufferpool_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_BUFFERPOOL(obj)) { - - MIX_LOCK(MIX_BUFFERPOOL(obj)->objectlock); - - MixBufferPool *duplicate = mix_bufferpool_new(); - if (mix_bufferpool_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_bufferpool_unref(duplicate); - } - - MIX_UNLOCK(MIX_BUFFERPOOL(obj)->objectlock); - - } - return ret; -} - -/** - * mix_bufferpool_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_bufferpool_copy(MixParams * target, const MixParams * src) { - MixBufferPool *this_target, *this_src; - - if (MIX_IS_BUFFERPOOL(target) && MIX_IS_BUFFERPOOL(src)) { - - MIX_LOCK(MIX_BUFFERPOOL(src)->objectlock); - MIX_LOCK(MIX_BUFFERPOOL(target)->objectlock); - - // Cast the base object to this child object - this_target = MIX_BUFFERPOOL(target); - this_src = MIX_BUFFERPOOL(src); - - // Free the existing properties - - // Duplicate string - this_target->free_list = this_src->free_list; - this_target->in_use_list = this_src->in_use_list; - this_target->free_list_max_size = this_src->free_list_max_size; - this_target->high_water_mark = this_src->high_water_mark; - - MIX_UNLOCK(MIX_BUFFERPOOL(src)->objectlock); - MIX_UNLOCK(MIX_BUFFERPOOL(target)->objectlock); - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_bufferpool_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_bufferpool_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixBufferPool *this_first, *this_second; - - if (MIX_IS_BUFFERPOOL(first) && MIX_IS_BUFFERPOOL(second)) { - // Deep compare - // Cast the base object to this child object - - MIX_LOCK(MIX_BUFFERPOOL(first)->objectlock); - MIX_LOCK(MIX_BUFFERPOOL(second)->objectlock); - - this_first = MIX_BUFFERPOOL(first); - this_second = MIX_BUFFERPOOL(second); - - /* TODO: add comparison for other properties */ - if (this_first->free_list == this_second->free_list - && this_first->in_use_list == this_second->in_use_list - && this_first->free_list_max_size - == this_second->free_list_max_size - && this_first->high_water_mark == this_second->high_water_mark) { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = klass->equal(first, second); - else - ret = TRUE; - } - - MIX_LOCK(MIX_BUFFERPOOL(first)->objectlock); - MIX_LOCK(MIX_BUFFERPOOL(second)->objectlock); - - } - - return ret; -} - -/* Class Methods */ - -/** - * mix_bufferpool_initialize: - * @returns: MIX_RESULT_SUCCESS if successful in creating the buffer pool - * - * Use this method to create a new buffer pool, consisting of a GSList of - * buffer objects that represents a pool of buffers. - */ -MIX_RESULT mix_bufferpool_initialize(MixBufferPool * obj, guint num_buffers) { - - LOG_V( "Begin\n"); - - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - - if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { - //buffer pool is in use; return error; need proper cleanup - //TODO need cleanup here? - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_ALREADY_INIT; - } - - if (num_buffers == 0) { - obj->free_list = NULL; - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_buffers; - - obj->high_water_mark = 0; - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_SUCCESS; - } - - // Initialize the free pool with MixBuffer objects - - gint i = 0; - MixBuffer *buffer = NULL; - - for (; i < num_buffers; i++) { - - buffer = mix_buffer_new(); - - if (buffer == NULL) { - //TODO need to log an error here and do cleanup - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_NO_MEMORY; - } - - // Set the pool reference in the private data of the MixBuffer object - mix_buffer_set_pool(buffer, obj); - - //Add each MixBuffer object to the pool list - obj->free_list = g_slist_append(obj->free_list, buffer); - - } - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_buffers; - - obj->high_water_mark = 0; - - MIX_UNLOCK(obj->objectlock); - - LOG_V( "End\n"); - -return MIX_RESULT_SUCCESS; -} - -/** - * mix_bufferpool_put: - * @returns: SUCCESS or FAILURE - * - * Use this method to return a buffer to the free pool - */ -MIX_RESULT mix_bufferpool_put(MixBufferPool * obj, MixBuffer * buffer) { - - if (obj == NULL || buffer == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - - if (obj->in_use_list == NULL) { - //in use list cannot be empty if a buffer is in use - //TODO need better error code for this - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_FAIL; - } - - GSList *element = g_slist_find(obj->in_use_list, buffer); - if (element == NULL) { - //Integrity error; buffer not found in in use list - //TODO need better error code and handling for this - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_FAIL; - } else { - //Remove this element from the in_use_list - obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); - - //Concat the element to the free_list - obj->free_list = g_slist_concat(obj->free_list, element); - } - - //Note that we do nothing with the ref count for this. We want it to - //stay at 1, which is what triggered it to be added back to the free list. - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_SUCCESS; -} - -/** - * mix_bufferpool_get: - * @returns: SUCCESS or FAILURE - * - * Use this method to get a buffer from the free pool - */ -MIX_RESULT mix_bufferpool_get(MixBufferPool * obj, MixBuffer ** buffer) { - - if (obj == NULL || buffer == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - - if (obj->free_list == NULL) { - //We are out of buffers - //TODO need to log this as well - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_POOLEMPTY; - } - - //Remove a buffer from the free pool - - //We just remove the one at the head, since it's convenient - GSList *element = obj->free_list; - obj->free_list = g_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = g_slist_concat(obj->in_use_list, element); - - //TODO replace with proper logging - - LOG_I( "buffer refcount%d\n", - MIX_PARAMS(element->data)->refcount); - - //Set the out buffer pointer - *buffer = (MixBuffer *) element->data; - - //Check the high water mark for buffer use - guint size = g_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } - - //Increment the reference count for the buffer - mix_buffer_ref(*buffer); - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_SUCCESS; -} - -/** - * mix_bufferpool_deinitialize: - * @returns: SUCCESS or FAILURE - * - * Use this method to teardown a buffer pool - */ -MIX_RESULT mix_bufferpool_deinitialize(MixBufferPool * obj) { - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - - if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) - != obj->free_list_max_size)) { - //TODO better error code - //We have outstanding buffer objects in use and they need to be - //freed before we can deinitialize. - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_FAIL; - } - - //Now remove buffer objects from the list - - MixBuffer *buffer = NULL; - - while (obj->free_list != NULL) { - //Get the buffer object from the head of the list - buffer = obj->free_list->data; - //buffer = g_slist_nth_data(obj->free_list, 0); - - //Release it - mix_buffer_unref(buffer); - - //Delete the head node of the list and store the new head - obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); - - //Repeat until empty - } - - obj->free_list_max_size = 0; - - //May want to log this information for tuning - obj->high_water_mark = 0; - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_SUCCESS; -} - -#define MIX_BUFFERPOOL_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_BUFFERPOOL_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT -mix_bufferpool_dumpbuffer(MixBuffer *buffer) -{ - LOG_I( "\tBuffer %x, ptr %x, refcount %d\n", (guint)buffer, - (guint)buffer->data, MIX_PARAMS(buffer)->refcount); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -mix_bufferpool_dumpprint (MixBufferPool * obj) -{ - //TODO replace this with proper logging later - - LOG_I( "BUFFER POOL DUMP:\n"); - LOG_I( "Free list size is %d\n", g_slist_length(obj->free_list)); - LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); - LOG_I( "High water mark is %lu\n", obj->high_water_mark); - - //Walk the free list and report the contents - LOG_I( "Free list contents:\n"); - g_slist_foreach(obj->free_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); - - //Walk the in_use list and report the contents - LOG_I( "In Use list contents:\n"); - g_slist_foreach(obj->in_use_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); - - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixbufferpool.cpp b/mix_video/src/mixbufferpool.cpp new file mode 100644 index 0000000..044fddc --- /dev/null +++ b/mix_video/src/mixbufferpool.cpp @@ -0,0 +1,377 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixbufferpool + * @short_description: MI-X Input Buffer Pool + * + * A data object which stores and manipulates a pool of compressed video buffers. + */ + +#include "mixvideolog.h" +#include "mixbufferpool.h" +#include "mixbuffer_private.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +MixBufferPool::MixBufferPool() + :free_list(NULL) + ,in_use_list(NULL) + ,free_list_max_size(0) + ,high_water_mark(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,mLock() { +} + +MixBufferPool::~MixBufferPool(){ +} + +MixBufferPool * mix_bufferpool_new(void) { + return new MixBufferPool(); +} + +MixBufferPool * mix_bufferpool_ref(MixBufferPool * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +MixParams * MixBufferPool::dup() const { + MixBufferPool * ret = new MixBufferPool(); + MixBufferPool * this_obj = const_cast(this); + if (NULL != ret) { + this_obj->Lock(); + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + this_obj->Unlock(); + } + return ret; +} + + +/** + * mix_bufferpool_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ + +gboolean MixBufferPool::copy(MixParams * target) const { + gboolean ret = FALSE; + MixBufferPool * this_target = MIX_BUFFERPOOL(target); + MixBufferPool * this_obj = const_cast(this); + if (NULL != this_target) { + this_obj->Lock(); + this_target->Lock(); + this_target->free_list = free_list; + this_target->in_use_list = in_use_list; + this_target->free_list_max_size = free_list_max_size; + this_target->high_water_mark = high_water_mark; + ret = MixParams::copy(target); + this_target->Unlock(); + this_obj->Unlock(); + } + return ret; +} + +gboolean MixBufferPool::equal(MixParams * obj) const { + gboolean ret = FALSE; + MixBufferPool * this_obj = MIX_BUFFERPOOL(obj); + MixBufferPool * unconst_this = const_cast(this); + if (NULL != this_obj) { + unconst_this->Lock(); + this_obj->Lock(); + if (free_list == this_obj->free_list && + in_use_list == this_obj->in_use_list && + free_list_max_size == this_obj->free_list_max_size && + high_water_mark == this_obj->high_water_mark) { + ret = MixParams::equal(this_obj); + } + this_obj->Unlock(); + unconst_this->Unlock(); + } + return ret; +} + +/** + * mix_bufferpool_initialize: + * @returns: MIX_RESULT_SUCCESS if successful in creating the buffer pool + * + * Use this method to create a new buffer pool, consisting of a GSList of + * buffer objects that represents a pool of buffers. + */ +MIX_RESULT mix_bufferpool_initialize( + MixBufferPool * obj, guint num_buffers) { + LOG_V( "Begin\n"); + + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + obj->Lock(); + + if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { + //buffer pool is in use; return error; need proper cleanup + //TODO need cleanup here? + + obj->Unlock(); + + return MIX_RESULT_ALREADY_INIT; + } + + if (num_buffers == 0) { + obj->free_list = NULL; + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_buffers; + + obj->high_water_mark = 0; + + obj->Unlock(); + + return MIX_RESULT_SUCCESS; + } + + // Initialize the free pool with MixBuffer objects + + guint i = 0; + MixBuffer *buffer = NULL; + + for (; i < num_buffers; i++) { + + buffer = mix_buffer_new(); + + if (buffer == NULL) { + //TODO need to log an error here and do cleanup + + obj->Unlock(); + + return MIX_RESULT_NO_MEMORY; + } + + // Set the pool reference in the private data of the MixBuffer object + mix_buffer_set_pool(buffer, obj); + + //Add each MixBuffer object to the pool list + obj->free_list = g_slist_append(obj->free_list, buffer); + + } + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_buffers; + + obj->high_water_mark = 0; + + obj->Unlock(); + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +/** + * mix_bufferpool_put: + * @returns: SUCCESS or FAILURE + * + * Use this method to return a buffer to the free pool + */ +MIX_RESULT mix_bufferpool_put(MixBufferPool * obj, MixBuffer * buffer) { + + if (obj == NULL || buffer == NULL) + return MIX_RESULT_NULL_PTR; + + obj->Lock(); + + if (obj->in_use_list == NULL) { + //in use list cannot be empty if a buffer is in use + //TODO need better error code for this + + obj->Unlock(); + + return MIX_RESULT_FAIL; + } + + GSList *element = g_slist_find(obj->in_use_list, buffer); + if (element == NULL) { + //Integrity error; buffer not found in in use list + //TODO need better error code and handling for this + + obj->Unlock(); + + return MIX_RESULT_FAIL; + } else { + //Remove this element from the in_use_list + obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); + + //Concat the element to the free_list + obj->free_list = g_slist_concat(obj->free_list, element); + } + + //Note that we do nothing with the ref count for this. We want it to + //stay at 1, which is what triggered it to be added back to the free list. + + obj->Unlock(); + + return MIX_RESULT_SUCCESS; +} + +/** + * mix_bufferpool_get: + * @returns: SUCCESS or FAILURE + * + * Use this method to get a buffer from the free pool + */ +MIX_RESULT mix_bufferpool_get(MixBufferPool * obj, MixBuffer ** buffer) { + + if (obj == NULL || buffer == NULL) + return MIX_RESULT_NULL_PTR; + + obj->Lock(); + + if (obj->free_list == NULL) { + //We are out of buffers + //TODO need to log this as well + + obj->Unlock(); + + return MIX_RESULT_POOLEMPTY; + } + + //Remove a buffer from the free pool + + //We just remove the one at the head, since it's convenient + GSList *element = obj->free_list; + obj->free_list = g_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this + + obj->Unlock(); + + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = g_slist_concat(obj->in_use_list, element); + + //TODO replace with proper logging + + LOG_I( "buffer refcount%d\n", + MIX_PARAMS(element->data)->ref_count); + + //Set the out buffer pointer + *buffer = (MixBuffer *) element->data; + + //Check the high water mark for buffer use + guint size = g_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } + + //Increment the reference count for the buffer + mix_buffer_ref(*buffer); + + obj->Unlock(); + + return MIX_RESULT_SUCCESS; +} + +/** + * mix_bufferpool_deinitialize: + * @returns: SUCCESS or FAILURE + * + * Use this method to teardown a buffer pool + */ +MIX_RESULT mix_bufferpool_deinitialize(MixBufferPool * obj) { + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + obj->Lock(); + + if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) + != obj->free_list_max_size)) { + //TODO better error code + //We have outstanding buffer objects in use and they need to be + //freed before we can deinitialize. + + obj->Unlock(); + + return MIX_RESULT_FAIL; + } + + //Now remove buffer objects from the list + + MixBuffer *buffer = NULL; + + while (obj->free_list != NULL) { + //Get the buffer object from the head of the list + buffer = reinterpret_cast(obj->free_list->data); + //buffer = g_slist_nth_data(obj->free_list, 0); + + //Release it + mix_buffer_unref(buffer); + + //Delete the head node of the list and store the new head + obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); + + //Repeat until empty + } + + obj->free_list_max_size = 0; + + //May want to log this information for tuning + obj->high_water_mark = 0; + + obj->Unlock(); + + return MIX_RESULT_SUCCESS; +} + +#define MIX_BUFFERPOOL_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_BUFFERPOOL_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT +mix_bufferpool_dumpbuffer(MixBuffer *buffer) { + LOG_I( "\tBuffer %x, ptr %x, refcount %d\n", (guint)buffer, + (guint)buffer->data, MIX_PARAMS(buffer)->ref_count); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT +mix_bufferpool_dumpprint (MixBufferPool * obj) { + //TODO replace this with proper logging later + LOG_I( "BUFFER POOL DUMP:\n"); + LOG_I( "Free list size is %d\n", g_slist_length(obj->free_list)); + LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); + LOG_I( "High water mark is %lu\n", obj->high_water_mark); + + //Walk the free list and report the contents + LOG_I( "Free list contents:\n"); + g_slist_foreach(obj->free_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); + + //Walk the in_use list and report the contents + LOG_I( "In Use list contents:\n"); + g_slist_foreach(obj->in_use_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixbufferpool.h b/mix_video/src/mixbufferpool.h index bf32d0d..1275bbc 100644 --- a/mix_video/src/mixbufferpool.h +++ b/mix_video/src/mixbufferpool.h @@ -12,23 +12,16 @@ No license under any patent, copyright, trade secret or other intellectual prope #include #include "mixvideodef.h" #include "mixbuffer.h" - +#include "mixvideothread.h" #include -G_BEGIN_DECLS - -/** -* MIX_TYPE_BUFFERPOOL: -* -* Get type of class. -*/ -#define MIX_TYPE_BUFFERPOOL (mix_bufferpool_get_type ()) +class MixBuffer; /** * MIX_BUFFERPOOL: * @obj: object to be type-casted. */ -#define MIX_BUFFERPOOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_BUFFERPOOL, MixBufferPool)) +#define MIX_BUFFERPOOL(obj) (reinterpret_cast(obj)) /** * MIX_IS_BUFFERPOOL: @@ -36,43 +29,25 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixBufferPool */ -#define MIX_IS_BUFFERPOOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_BUFFERPOOL)) - -/** -* MIX_BUFFERPOOL_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_BUFFERPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_BUFFERPOOL, MixBufferPoolClass)) - -/** -* MIX_IS_BUFFERPOOL_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixBufferPoolClass -*/ -#define MIX_IS_BUFFERPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_BUFFERPOOL)) - -/** -* MIX_BUFFERPOOL_GET_CLASS: -* @obj: a #MixBufferPool object. -* -* Get the class instance of the object. -*/ -#define MIX_BUFFERPOOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_BUFFERPOOL, MixBufferPoolClass)) - -typedef struct _MixBufferPool MixBufferPool; -typedef struct _MixBufferPoolClass MixBufferPoolClass; +#define MIX_IS_BUFFERPOOL(obj) (NULL != MIX_BUFFERPOOL(obj)) /** * MixBufferPool: * * MI-X Video Buffer Pool object */ -struct _MixBufferPool +class MixBufferPool : public MixParams { - /*< public > */ - MixParams parent; - +public: + MixBufferPool(); + virtual ~MixBufferPool(); + virtual gboolean copy(MixParams* target) const; + virtual MixParams* dup() const; + virtual gboolean equal(MixParams* obj) const; + + void Lock() {mLock.lock();} + void Unlock() {mLock.unlock();} +public: /*< public > */ GSList *free_list; /* list of free buffers */ GSList *in_use_list; /* list of buffers in use */ @@ -85,30 +60,10 @@ struct _MixBufferPool void *reserved4; /*< private > */ - GMutex *objectlock; - + MixVideoMutex mLock; }; -/** -* MixBufferPoolClass: -* -* MI-X Video Buffer Pool object class -*/ -struct _MixBufferPoolClass -{ - /*< public > */ - MixParamsClass parent_class; - - /* class members */ -}; -/** -* mix_bufferpool_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_bufferpool_get_type (void); /** * mix_bufferpool_new: @@ -135,16 +90,9 @@ MixBufferPool *mix_bufferpool_ref (MixBufferPool * mix); #define mix_bufferpool_unref(obj) mix_params_unref(MIX_PARAMS(obj)) /* Class Methods */ - -MIX_RESULT mix_bufferpool_initialize (MixBufferPool * obj, - guint num_buffers); -MIX_RESULT mix_bufferpool_put (MixBufferPool * obj, - MixBuffer * buffer); - -MIX_RESULT mix_bufferpool_get (MixBufferPool * obj, - MixBuffer ** buffer); +MIX_RESULT mix_bufferpool_initialize (MixBufferPool * obj, guint num_buffers); +MIX_RESULT mix_bufferpool_put (MixBufferPool * obj, MixBuffer * buffer); +MIX_RESULT mix_bufferpool_get (MixBufferPool * obj, MixBuffer ** buffer); MIX_RESULT mix_bufferpool_deinitialize (MixBufferPool * obj); -G_END_DECLS - #endif /* __MIX_BUFFERPOOL_H__ */ diff --git a/mix_video/src/mixdisplay.c b/mix_video/src/mixdisplay.c deleted file mode 100644 index cd12846..0000000 --- a/mix_video/src/mixdisplay.c +++ /dev/null @@ -1,539 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixdisplay -* @short_description: Lightweight Base Object for MI-X Video Display -* -*/ -#ifdef HAVE_CONFIG_H -#include "config.h" -#endif - -#include "mixdisplay.h" -#include - -#define DEBUG_REFCOUNT - -static void mix_display_class_init (gpointer g_class, gpointer class_data); -static void mix_display_init (GTypeInstance * instance, gpointer klass); - -static void mix_value_display_init (GValue * value); -static void mix_value_display_free (GValue * value); -static void mix_value_display_copy (const GValue * src_value, - GValue * dest_value); -static gpointer mix_value_display_peek_pointer (const GValue * value); -static gchar *mix_value_display_collect (GValue * value, - guint n_collect_values, - GTypeCValue * collect_values, - guint collect_flags); -static gchar *mix_value_display_lcopy (const GValue * value, - guint n_collect_values, - GTypeCValue * collect_values, - guint collect_flags); - -static void mix_display_finalize (MixDisplay * obj); -static gboolean mix_display_copy_default (MixDisplay * target, - const MixDisplay * src); -static MixDisplay *mix_display_dup_default (const MixDisplay * obj); -static gboolean mix_display_equal_default (MixDisplay * first, - MixDisplay * second); - -GType -mix_display_get_type (void) -{ - static GType _mix_display_type = 0; - - if (G_UNLIKELY (_mix_display_type == 0)) - { - - GTypeValueTable value_table = { - mix_value_display_init, - mix_value_display_free, - mix_value_display_copy, - mix_value_display_peek_pointer, - "p", - mix_value_display_collect, - "p", - mix_value_display_lcopy - }; - - GTypeInfo info = { - sizeof (MixDisplayClass), - NULL, - NULL, - mix_display_class_init, - NULL, - NULL, - sizeof (MixDisplay), - 0, - (GInstanceInitFunc) mix_display_init, - NULL - }; - - static const GTypeFundamentalInfo fundamental_info = { - (G_TYPE_FLAG_CLASSED | G_TYPE_FLAG_INSTANTIATABLE | - G_TYPE_FLAG_DERIVABLE | G_TYPE_FLAG_DEEP_DERIVABLE) - }; - - info.value_table = &value_table; - - _mix_display_type = g_type_fundamental_next (); - g_type_register_fundamental (_mix_display_type, "MixDisplay", - &info, &fundamental_info, - G_TYPE_FLAG_ABSTRACT); - - } - - return _mix_display_type; -} - -static void -mix_display_class_init (gpointer g_class, gpointer class_data) -{ - MixDisplayClass *klass = MIX_DISPLAY_CLASS (g_class); - - klass->dup = mix_display_dup_default; - klass->copy = mix_display_copy_default; - klass->finalize = mix_display_finalize; - klass->equal = mix_display_equal_default; -} - -static void -mix_display_init (GTypeInstance * instance, gpointer klass) -{ - MixDisplay *obj = MIX_DISPLAY_CAST (instance); - - obj->refcount = 1; -} - -gboolean -mix_display_copy (MixDisplay * target, const MixDisplay * src) -{ - /* Use the target object class. Because it knows what it is looking for. */ - MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (target); - if (klass->copy) - { - return klass->copy (target, src); - } - else - { - return mix_display_copy_default (target, src); - } -} - -/** -* mix_display_copy_default: -* @target: target -* @src: source -* -* The default copy method of this object. Perhap copy at this level. -* Assign this to the copy vmethod. -*/ -static gboolean -mix_display_copy_default (MixDisplay * target, const MixDisplay * src) -{ - if (MIX_IS_DISPLAY (target) && MIX_IS_DISPLAY (src)) - { - // TODO perform deep copy. - return TRUE; - } - return FALSE; -} - -static void -mix_display_finalize (MixDisplay * obj) -{ - /* do nothing */ -} - -MixDisplay * -mix_display_dup (const MixDisplay * obj) -{ - MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (obj); - - if (klass->dup) - { - return klass->dup (obj); - } - else if (MIX_IS_DISPLAY (obj)) - { - return mix_display_dup_default (obj); - } - return NULL; -} - -static MixDisplay * -mix_display_dup_default (const MixDisplay * obj) -{ - MixDisplay *ret = mix_display_new (); - if (mix_display_copy (ret, obj)) - { - return ret; - } - - return NULL; -} - -MixDisplay * -mix_display_new (GType type) -{ - MixDisplay *obj; - - /* we don't support dynamic types because they really aren't useful, - * and could cause refcount problems */ - obj = (MixDisplay *) g_type_create_instance (type); - - return obj; -} - -MixDisplay * -mix_display_ref (MixDisplay * obj) -{ - g_return_val_if_fail (MIX_IS_DISPLAY (obj), NULL); - - g_atomic_int_inc (&obj->refcount); - - return obj; -} - -static void -mix_display_free (MixDisplay * obj) -{ - MixDisplayClass *klass = NULL; - - klass = MIX_DISPLAY_GET_CLASS (obj); - klass->finalize (obj); - - /* Should we support recycling the object? */ - /* If so, refcount handling is slightly different. */ - /* i.e. If the refcount is still 0 we can really free the object, else the finalize method recycled the object -- but to where? */ - - if (g_atomic_int_get (&obj->refcount) == 0) - { - - g_type_free_instance ((GTypeInstance *) obj); - } -} - -void -mix_display_unref (MixDisplay * obj) -{ - g_return_if_fail (obj != NULL); - g_return_if_fail (obj->refcount > 0); - - if (G_UNLIKELY (g_atomic_int_dec_and_test (&obj->refcount))) - { - mix_display_free (obj); - } -} - -static void -mix_value_display_init (GValue * value) -{ - value->data[0].v_pointer = NULL; -} - -static void -mix_value_display_free (GValue * value) -{ - if (value->data[0].v_pointer) - { - mix_display_unref (MIX_DISPLAY_CAST (value->data[0].v_pointer)); - } -} - -static void -mix_value_display_copy (const GValue * src_value, GValue * dest_value) -{ - if (src_value->data[0].v_pointer) - { - dest_value->data[0].v_pointer = - mix_display_ref (MIX_DISPLAY_CAST (src_value->data[0].v_pointer)); - } - else - { - dest_value->data[0].v_pointer = NULL; - } -} - -static gpointer -mix_value_display_peek_pointer (const GValue * value) -{ - return value->data[0].v_pointer; -} - -static gchar * -mix_value_display_collect (GValue * value, guint n_collect_values, - GTypeCValue * collect_values, guint collect_flags) -{ - mix_value_set_display (value, collect_values[0].v_pointer); - - return NULL; -} - -static gchar * -mix_value_display_lcopy (const GValue * value, - guint n_collect_values, - GTypeCValue * collect_values, guint collect_flags) -{ - gpointer *obj_p = collect_values[0].v_pointer; - - if (!obj_p) - { - return g_strdup_printf ("value location for '%s' passed as NULL", - G_VALUE_TYPE_NAME (value)); - } - - if (!value->data[0].v_pointer) - *obj_p = NULL; - else if (collect_flags & G_VALUE_NOCOPY_CONTENTS) - *obj_p = value->data[0].v_pointer; - else - *obj_p = mix_display_ref (value->data[0].v_pointer); - - return NULL; -} - -/** -* mix_value_set_display: -* @value: a valid #GValue of %MIX_TYPE_DISPLAY derived type -* @obj: object value to set -* -* Set the contents of a %MIX_TYPE_DISPLAY derived #GValue to -* @obj. -* The caller retains ownership of the reference. -*/ -void -mix_value_set_display (GValue * value, MixDisplay * obj) -{ - gpointer *pointer_p; - - g_return_if_fail (MIX_VALUE_HOLDS_DISPLAY (value)); - g_return_if_fail (obj == NULL || MIX_IS_DISPLAY (obj)); - - pointer_p = &value->data[0].v_pointer; - mix_display_replace ((MixDisplay **) pointer_p, obj); -} - -/** -* mix_value_take_display: -* @value: a valid #GValue of #MIX_TYPE_DISPLAY derived type -* @obj: object value to take -* -* Set the contents of a #MIX_TYPE_DISPLAY derived #GValue to -* @obj. -* Takes over the ownership of the caller's reference to @obj; -* the caller doesn't have to unref it any more. -*/ -void -mix_value_take_display (GValue * value, MixDisplay * obj) -{ - gpointer *pointer_p; - - g_return_if_fail (MIX_VALUE_HOLDS_DISPLAY (value)); - g_return_if_fail (obj == NULL || MIX_IS_DISPLAY (obj)); - - pointer_p = &value->data[0].v_pointer; - mix_display_replace ((MixDisplay **) pointer_p, obj); - if (obj) - mix_display_unref (obj); -} - -/** -* mix_value_get_display: -* @value: a valid #GValue of #MIX_TYPE_DISPLAY derived type -* @returns:object contents of @value -* -* refcount of the MixDisplay is not increased. -*/ -MixDisplay * -mix_value_get_display (const GValue * value) -{ - g_return_val_if_fail (MIX_VALUE_HOLDS_DISPLAY (value), NULL); - - return value->data[0].v_pointer; -} - -/** -* mix_value_dup_display: -* @value: a valid #GValue of %MIX_TYPE_DISPLAY derived type -* @returns: object contents of @value -* -* refcount of MixDisplay is increased. -*/ -MixDisplay * -mix_value_dup_display (const GValue * value) -{ - g_return_val_if_fail (MIX_VALUE_HOLDS_DISPLAY (value), NULL); - - return mix_display_ref (value->data[0].v_pointer); -} - - -static void -param_display_init (GParamSpec * pspec) -{ - /* GParamSpecDisplay *ospec = G_PARAM_SPEC_DISPLAY (pspec); */ -} - -static void -param_display_set_default (GParamSpec * pspec, GValue * value) -{ - value->data[0].v_pointer = NULL; -} - -static gboolean -param_display_validate (GParamSpec * pspec, GValue * value) -{ - gboolean validated = FALSE; - MixParamSpecDisplay *ospec = MIX_PARAM_SPEC_DISPLAY (pspec); - MixDisplay *obj = value->data[0].v_pointer; - - if (obj && !g_value_type_compatible (G_OBJECT_TYPE (obj), G_PARAM_SPEC_VALUE_TYPE (ospec))) - { - mix_display_unref (obj); - value->data[0].v_pointer = NULL; - validated = TRUE; - } - - return validated; -} - -static gint -param_display_values_cmp (GParamSpec * pspec, - const GValue * value1, const GValue * value2) -{ - guint8 *p1 = value1->data[0].v_pointer; - guint8 *p2 = value2->data[0].v_pointer; - - - return p1 < p2 ? -1 : p1 > p2; -} - -GType -mix_param_spec_display_get_type (void) -{ - static GType type; - - if (G_UNLIKELY (type) == 0) - { - static const GParamSpecTypeInfo pspec_info = { - sizeof (MixParamSpecDisplay), /* instance_size */ - 16, /* n_preallocs */ - param_display_init, /* instance_init */ - G_TYPE_OBJECT, /* value_type */ - NULL, /* finalize */ - param_display_set_default, /* value_set_default */ - param_display_validate, /* value_validate */ - param_display_values_cmp, /* values_cmp */ - }; - /* FIXME 0.11: Should really be MixParamSpecDisplay */ - type = g_param_type_register_static ("GParamSpecDisplay", &pspec_info); - } - - return type; -} - -/** -* mix_param_spec_display: -* @name: the canonical name of the property -* @nick: the nickname of the property -* @blurb: a short description of the property -* @object_type: the #MixDisplayType for the property -* @flags: a combination of #GParamFlags -* @returns: a newly allocated #GParamSpec instance -* -* Creates a new #GParamSpec instance that hold #MixDisplay references. -* -*/ -GParamSpec * -mix_param_spec_display (const char *name, const char *nick, - const char *blurb, GType object_type, - GParamFlags flags) -{ - MixParamSpecDisplay *ospec; - - g_return_val_if_fail (g_type_is_a (object_type, MIX_TYPE_DISPLAY), NULL); - - ospec = g_param_spec_internal (MIX_TYPE_PARAM_DISPLAY, - name, nick, blurb, flags); - G_PARAM_SPEC (ospec)->value_type = object_type; - - return G_PARAM_SPEC (ospec); -} - -/** -* mix_display_replace: -* @olddata: pointer to a pointer to a object to be replaced -* @newdata: pointer to new object -* -* Modifies a pointer to point to a new object. The modification -* is done atomically, and the reference counts are updated correctly. -* Either @newdata and the value pointed to by @olddata may be NULL. -*/ -void -mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata) -{ - MixDisplay *olddata_val; - - g_return_if_fail (olddata != NULL); - - olddata_val = g_atomic_pointer_get ((gpointer *) olddata); - - if (olddata_val == newdata) - return; - - if (newdata) - mix_display_ref (newdata); - - while (!g_atomic_pointer_compare_and_exchange - ((gpointer *) olddata, olddata_val, newdata)) - { - olddata_val = g_atomic_pointer_get ((gpointer *) olddata); - } - - if (olddata_val) - mix_display_unref (olddata_val); - -} - -gboolean -mix_display_equal (MixDisplay * first, MixDisplay * second) -{ - if (MIX_IS_DISPLAY (first)) - { - MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (first); - - if (klass->equal) - { - return klass->equal (first, second); - } - else - { - return mix_display_equal_default (first, second); - } - } - else - return FALSE; -} - -static gboolean -mix_display_equal_default (MixDisplay * first, MixDisplay * second) -{ - if (MIX_IS_DISPLAY (first) && MIX_IS_DISPLAY (second)) - { - gboolean ret = TRUE; - - // Do data comparison here. - - return ret; - } - else - return FALSE; -} diff --git a/mix_video/src/mixdisplay.cpp b/mix_video/src/mixdisplay.cpp new file mode 100644 index 0000000..c8df250 --- /dev/null +++ b/mix_video/src/mixdisplay.cpp @@ -0,0 +1,130 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixdisplay +* @short_description: Lightweight Base Object for MI-X Video Display +* +*/ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "mixdisplay.h" + +#define DEBUG_REFCOUNT + +MixDisplay::MixDisplay() + :refcount(1) { +} +MixDisplay::~MixDisplay() { + Finalize(); +} + +MixDisplay* MixDisplay::Dup() const { + MixDisplay* dup = new MixDisplay(); + if (NULL != dup ) { + if(FALSE == Copy(dup)) { + dup->Unref(); + dup = NULL; + } + } + return dup; +} + +gboolean MixDisplay::Copy(MixDisplay* target) const { + if(NULL != target) + return TRUE; + else + return FALSE; +} + +void MixDisplay::Finalize() { +} + +gboolean MixDisplay::Equal(const MixDisplay* obj) const { + if (NULL != obj) + return TRUE; + else + return FALSE; +} + +MixDisplay * MixDisplay::Ref() { + ++refcount; + return this; +} +void MixDisplay::Unref () { + if (0 == (--refcount)) { + delete this; + } +} + +gboolean mix_display_copy (MixDisplay * target, const MixDisplay * src) { + if (target == src) + return TRUE; + if (NULL == target || NULL == src) + return FALSE; + return src->Copy(target); +} + + +MixDisplay * mix_display_dup (const MixDisplay * obj) { + if (NULL == obj) + return NULL; + return obj->Dup(); +} + + + +MixDisplay * mix_display_new (void) { + return new MixDisplay(); +} + +MixDisplay * mix_display_ref (MixDisplay * obj) { + if (NULL != obj) + obj->Ref(); + return obj; +} + +void mix_display_unref (MixDisplay * obj) { + if (NULL != obj) + obj->Unref(); +} + + +/** +* mix_display_replace: +* @olddata: pointer to a pointer to a object to be replaced +* @newdata: pointer to new object +* +* Modifies a pointer to point to a new object. The modification +* is done atomically, and the reference counts are updated correctly. +* Either @newdata and the value pointed to by @olddata may be NULL. +*/ +void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata) { + if (NULL == olddata) + return; + if(*olddata == newdata) + return; + MixDisplay *olddata_val = *olddata; + if (NULL != newdata) + newdata->Ref(); + *olddata = newdata; + if (NULL != olddata_val) + olddata_val->Unref(); +} + +gboolean mix_display_equal (MixDisplay * first, MixDisplay * second) { + if (first == second) + return TRUE; + if (NULL == first || NULL == second) + return FALSE; + return first->Equal(second); +} + + diff --git a/mix_video/src/mixdisplay.h b/mix_video/src/mixdisplay.h index 04c8637..9416fb8 100644 --- a/mix_video/src/mixdisplay.h +++ b/mix_video/src/mixdisplay.h @@ -11,80 +11,7 @@ No license under any patent, copyright, trade secret or other intellectual prope #include -G_BEGIN_DECLS -#define MIX_TYPE_DISPLAY (mix_display_get_type()) -#define MIX_IS_DISPLAY(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAY)) -#define MIX_IS_DISPLAY_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAY)) -#define MIX_DISPLAY_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAY, MixDisplayClass)) -#define MIX_DISPLAY(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAY, MixDisplay)) -#define MIX_DISPLAY_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAY, MixDisplayClass)) -#define MIX_DISPLAY_CAST(obj) ((MixDisplay*)(obj)) -typedef struct _MixDisplay MixDisplay; -typedef struct _MixDisplayClass MixDisplayClass; - -/** -* MixDisplayDupFunction: -* @obj: Display to duplicate -* @returns: reference to cloned instance. -* -* Virtual function prototype for methods to create duplicate of instance. -* -*/ -typedef MixDisplay *(*MixDisplayDupFunction) (const MixDisplay * obj); - -/** -* MixDisplayCopyFunction: -* @target: target of the copy -* @src: source of the copy -* @returns: boolean indicates if copy is successful. -* -* Virtual function prototype for methods to create copies of instance. -* -*/ -typedef gboolean (*MixDisplayCopyFunction) (MixDisplay * target, - const MixDisplay * src); - -/** -* MixDisplayFinalizeFunction: -* @obj: Display to finalize -* -* Virtual function prototype for methods to free ressources used by -* object. -*/ -typedef void (*MixDisplayFinalizeFunction) (MixDisplay * obj); - -/** -* MixDisplayEqualsFunction: -* @first: first object in the comparison -* @second: second object in the comparison -* -* Virtual function prototype for methods to compare 2 objects and check if they are equal. -*/ -typedef gboolean (*MixDisplayEqualFunction) (MixDisplay * first, - MixDisplay * second); - -/** -* MIX_VALUE_HOLDS_DISPLAY: -* @value: the #GValue to check -* -* Checks if the given #GValue contains a #MIX_TYPE_PARAM value. -*/ -#define MIX_VALUE_HOLDS_DISPLAY(value) (G_VALUE_HOLDS(value, MIX_TYPE_DISPLAY)) - -/** -* MIX_DISPLAY_REFCOUNT: -* @obj: a #MixDisplay -* -* Get access to the reference count field of the object. -*/ -#define MIX_DISPLAY_REFCOUNT(obj) ((MIX_DISPLAY_CAST(obj))->refcount) -/** -* MIX_DISPLAY_REFCOUNT_VALUE: -* @obj: a #MixDisplay -* -* Get the reference count value of the object -*/ -#define MIX_DISPLAY_REFCOUNT_VALUE(obj) (g_atomic_int_get (&(MIX_DISPLAY_CAST(obj))->refcount)) +#define MIX_DISPLAY(obj) (reinterpret_cast(obj)) /** * MixDisplay: @@ -93,45 +20,29 @@ typedef gboolean (*MixDisplayEqualFunction) (MixDisplay * first, * * Base class for a refcounted parameter objects. */ -struct _MixDisplay -{ - GTypeInstance instance; - /*< public > */ - gint refcount; - - /*< private > */ - gpointer _reserved; +class MixDisplay { +public: + virtual ~MixDisplay(); + + virtual MixDisplay* Dup() const; + virtual gboolean Copy(MixDisplay* target) const; + virtual void Finalize(); + virtual gboolean Equal(const MixDisplay* obj) const; + + MixDisplay * Ref(); + void Unref (); + + friend MixDisplay *mix_display_new (void); + +protected: + MixDisplay(); +public: + /*< public > */ + gint refcount; + /*< private > */ + gpointer _reserved; }; -/** -* MixDisplayClass: -* @dup: method to duplicate the object. -* @copy: method to copy details in one object to the other. -* @finalize: destructor -* @equal: method to check if the content of two objects are equal. -* -* #MixDisplay class strcut. -*/ -struct _MixDisplayClass -{ - GTypeClass type_class; - - MixDisplayDupFunction dup; - MixDisplayCopyFunction copy; - MixDisplayFinalizeFunction finalize; - MixDisplayEqualFunction equal; - - /*< private > */ - gpointer _mix_reserved; -}; - -/** -* mix_display_get_type: -* @returns: type of this object. -* -* Get type. -*/ -GType mix_display_get_type (void); /** * mix_display_new: @@ -139,7 +50,9 @@ GType mix_display_get_type (void); * * Create new instance of the object. */ -MixDisplay *mix_display_new (); +MixDisplay *mix_display_new (void); + + /** * mix_display_copy: @@ -196,38 +109,5 @@ MixDisplay *mix_display_dup (const MixDisplay * obj); */ gboolean mix_display_equal (MixDisplay * first, MixDisplay * second); -/* GParamSpec */ - -#define MIX_TYPE_PARAM_DISPLAY (mix_param_spec_display_get_type()) -#define MIX_IS_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_TYPE ((pspec), MIX_TYPE_PARAM_DISPLAY)) -#define MIX_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_CAST ((pspec), MIX_TYPE_PARAM_DISPLAY, MixParamSpecDisplay)) - -typedef struct _MixParamSpecDisplay MixParamSpecDisplay; - -/** -* MixParamSpecDisplay: -* @parent: #GParamSpec portion -* -* A #GParamSpec derived structure that contains the meta data -* for #MixDisplay properties. -*/ -struct _MixParamSpecDisplay -{ - GParamSpec parent; -}; - -GType mix_param_spec_display_get_type (void); - -GParamSpec *mix_param_spec_display (const char *name, const char *nick, - const char *blurb, GType object_type, - GParamFlags flags); - -/* GValue methods */ - -void mix_value_set_display (GValue * value, MixDisplay * obj); -void mix_value_take_display (GValue * value, MixDisplay * obj); -MixDisplay *mix_value_get_display (const GValue * value); -MixDisplay *mix_value_dup_display (const GValue * value); -G_END_DECLS #endif diff --git a/mix_video/src/mixdisplayandroid.c b/mix_video/src/mixdisplayandroid.c deleted file mode 100644 index 08bec6d..0000000 --- a/mix_video/src/mixdisplayandroid.c +++ /dev/null @@ -1,197 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixdisplayandroid - * @short_description: MI-X Video Android Display - * - * A data object which stores Android specific parameters. - * - * - * Data Structures Used in MixDisplayAndroid Fields: - * - */ - -#ifdef ANDROID - -#include "mixdisplayandroid.h" - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_displayandroid_type = 0; -static MixDisplayClass *parent_class = NULL; - -#define _do_init { _mix_displayandroid_type = g_define_type_id; } - -gboolean mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src); -MixDisplay *mix_displayandroid_dup(const MixDisplay * obj); -gboolean mix_displayandroid_equal(MixDisplay * first, MixDisplay * second); -static void mix_displayandroid_finalize(MixDisplay * obj); - -G_DEFINE_TYPE_WITH_CODE (MixDisplayAndroid, mix_displayandroid, - MIX_TYPE_DISPLAY, _do_init); - -static void mix_displayandroid_init(MixDisplayAndroid * self) { - - /* Initialize member varibles */ - self->display = NULL; -// self->drawable = 0; -} - -static void mix_displayandroid_class_init(MixDisplayAndroidClass * klass) { - MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); - - mixdisplay_class->finalize = mix_displayandroid_finalize; - mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayandroid_copy; - mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayandroid_dup; - mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayandroid_equal; -} - -MixDisplayAndroid * -mix_displayandroid_new(void) { - MixDisplayAndroid *ret = (MixDisplayAndroid *) g_type_create_instance( - MIX_TYPE_DISPLAYANDROID); - - return ret; -} - -void mix_displayandroid_finalize(MixDisplay * obj) { - /* clean up here. */ - /* MixDisplayAndroid *self = MIX_DISPLAYANDROID (obj); */ - - /* NOTE: we don't need to do anything - * with display and drawable */ - - /* Chain up parent */ - if (parent_class->finalize) - parent_class->finalize(obj); -} - -MixDisplayAndroid * -mix_displayandroid_ref(MixDisplayAndroid * mix) { - return (MixDisplayAndroid *) mix_display_ref(MIX_DISPLAY(mix)); -} - -/** - * mix_mixdisplayandroid_dup: - * @obj: a #MixDisplayAndroid object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixDisplay * -mix_displayandroid_dup(const MixDisplay * obj) { - MixDisplay *ret = NULL; - - if (MIX_IS_DISPLAYANDROID(obj)) { - MixDisplayAndroid *duplicate = mix_displayandroid_new(); - if (mix_displayandroid_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { - ret = MIX_DISPLAY(duplicate); - } else { - mix_displayandroid_unref(duplicate); - } - } - return ret; -} - -/** - * mix_mixdisplayandroid_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src) { - MixDisplayAndroid *this_target, *this_src; - - if (MIX_IS_DISPLAYANDROID(target) && MIX_IS_DISPLAYANDROID(src)) { - // Cast the base object to this child object - this_target = MIX_DISPLAYANDROID(target); - this_src = MIX_DISPLAYANDROID(src); - - // Copy properties from source to target. - - this_target->display = this_src->display; -// this_target->drawable = this_src->drawable; - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_DISPLAY_CAST(target), - MIX_DISPLAY_CAST(src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_mixdisplayandroid_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_displayandroid_equal(MixDisplay * first, MixDisplay * second) { - gboolean ret = FALSE; - - MixDisplayAndroid *this_first, *this_second; - - this_first = MIX_DISPLAYANDROID(first); - this_second = MIX_DISPLAYANDROID(second); - - if (MIX_IS_DISPLAYANDROID(first) && MIX_IS_DISPLAYANDROID(second)) { - // Compare member variables - - // TODO: if in the copy method we just copy the pointer of display, the comparison - // below is enough. But we need to decide how to copy! - - if (this_first->display == this_second->display /*&& this_first->drawable - == this_second->drawable*/) { - // members within this scope equal. chaining up. - MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - return ret; -} - -#define MIX_DISPLAYANDROID_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DISPLAYANDROID(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_DISPLAYANDROID_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || prop == NULL) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DISPLAYANDROID(obj)) return MIX_RESULT_FAIL; \ - -MIX_RESULT mix_displayandroid_set_display(MixDisplayAndroid * obj, void * display) { - MIX_DISPLAYANDROID_SETTER_CHECK_INPUT (obj); - - // TODO: needs to decide to clone or just copy pointer - obj->display = display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayandroid_get_display(MixDisplayAndroid * obj, void ** display) { - MIX_DISPLAYANDROID_GETTER_CHECK_INPUT (obj, display); - - // TODO: needs to decide to clone or just copy pointer - *display = obj->display; - - return MIX_RESULT_SUCCESS; -} - -#endif /* ANDROID */ diff --git a/mix_video/src/mixdisplayandroid.cpp b/mix_video/src/mixdisplayandroid.cpp new file mode 100644 index 0000000..3f9ba19 --- /dev/null +++ b/mix_video/src/mixdisplayandroid.cpp @@ -0,0 +1,158 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixdisplayandroid + * @short_description: MI-X Video Android Display + * + * A data object which stores Android specific parameters. + * + * + * Data Structures Used in MixDisplayAndroid Fields: + * + */ + +#ifdef ANDROID + +#include "mixdisplayandroid.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +MixDisplayAndroid::MixDisplayAndroid() + :display(NULL) { +} + +MixDisplayAndroid::~MixDisplayAndroid() { + Finalize(); +} + +MixDisplay* MixDisplayAndroid::Dup() const { + MixDisplayAndroid* dup = new MixDisplayAndroid(); + if (NULL != dup ) { + if(FALSE == Copy(dup)) { + dup->Unref(); + dup = NULL; + } + } + return dup; + +} + +gboolean MixDisplayAndroid::Copy(MixDisplay* target) const { + gboolean ret = FALSE; + MixDisplayAndroid* this_target = reinterpret_cast(target); + if (NULL != this_target) { + this_target->display = this->display; + ret = MixDisplay::Copy(target); + } + return ret; +} + +void MixDisplayAndroid::Finalize() { + MixDisplay::Finalize(); +} + +gboolean MixDisplayAndroid::Equal(const MixDisplay* obj) const { + gboolean ret = FALSE; + const MixDisplayAndroid* this_obj = reinterpret_cast(obj); + if (NULL != this_obj) { + if(this_obj->display == this->display) + ret = MixDisplay::Equal(obj); + } + return ret; +} + + +MixDisplayAndroid * mix_displayandroid_new(void) { + return new MixDisplayAndroid(); +} + + + +MixDisplayAndroid * mix_displayandroid_ref(MixDisplayAndroid * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +/** + * mix_mixdisplayandroid_dup: + * @obj: a #MixDisplayAndroid object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixDisplay * mix_displayandroid_dup(const MixDisplay * obj) { + MixDisplay *ret = NULL; + if (NULL != obj) { + ret = obj->Dup(); + } + return ret; +} + +/** + * mix_mixdisplayandroid_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src) { + if (target == src) + return TRUE; + if (NULL == target || NULL == src) + return FALSE; + const MixDisplayAndroid *this_src = + reinterpret_cast(src); + MixDisplayAndroid *this_target = + reinterpret_cast(target); + return this_src->Copy(this_target); +} + +/** + * mix_mixdisplayandroid_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayandroid_equal(MixDisplay * first, MixDisplay * second) { + if (first == second) + return TRUE; + if (NULL == first || NULL == second) + return FALSE; + MixDisplayAndroid *this_first = + reinterpret_cast(first); + MixDisplayAndroid *this_second = + reinterpret_cast(second); + return first->Equal(second); +} + +#define MIX_DISPLAYANDROID_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; + +#define MIX_DISPLAYANDROID_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || prop == NULL) return MIX_RESULT_NULL_PTR; + +MIX_RESULT mix_displayandroid_set_display(MixDisplayAndroid * obj, void * display) { + MIX_DISPLAYANDROID_SETTER_CHECK_INPUT (obj); + // TODO: needs to decide to clone or just copy pointer + obj->display = display; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayandroid_get_display(MixDisplayAndroid * obj, void ** display) { + MIX_DISPLAYANDROID_GETTER_CHECK_INPUT (obj, display); + // TODO: needs to decide to clone or just copy pointer + *display = obj->display; + return MIX_RESULT_SUCCESS; +} + +#endif /* ANDROID */ diff --git a/mix_video/src/mixdisplayandroid.h b/mix_video/src/mixdisplayandroid.h index 95fe951..5637d25 100644 --- a/mix_video/src/mixdisplayandroid.h +++ b/mix_video/src/mixdisplayandroid.h @@ -17,24 +17,17 @@ No license under any patent, copyright, trade secret or other intellectual prope //using namespace android; //#endif -//#ifdef __cplusplus -//extern "C" { -//#endif +#ifdef __cplusplus +extern "C" { +#endif #ifdef ANDROID -/** -* MIX_TYPE_DISPLAYANDROID: -* -* Get type of class. -*/ -#define MIX_TYPE_DISPLAYANDROID (mix_displayandroid_get_type ()) - /** * MIX_DISPLAYANDROID: * @obj: object to be type-casted. */ -#define MIX_DISPLAYANDROID(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAYANDROID, MixDisplayAndroid)) +#define MIX_DISPLAYANDROID(obj) (reinterpret_cast(obj)) /** * MIX_IS_DISPLAYANDROID: @@ -42,75 +35,41 @@ No license under any patent, copyright, trade secret or other intellectual prope * * Checks if the given object is an instance of #MixDisplay */ -#define MIX_IS_DISPLAYANDROID(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAYANDROID)) - -/** -* MIX_DISPLAYANDROID_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_DISPLAYANDROID_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAYANDROID, MixDisplayAndroidClass)) - -/** -* MIX_IS_DISPLAYANDROID_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixDisplayClass -*/ -#define MIX_IS_DISPLAYANDROID_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAYANDROID)) - -/** -* MIX_DISPLAYANDROID_GET_CLASS: -* @obj: a #MixDisplay object. -* -* Get the class instance of the object. -*/ -#define MIX_DISPLAYANDROID_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAYANDROID, MixDisplayAndroidClass)) +#define MIX_IS_DISPLAYANDROID(obj) (NULL != MIX_DISPLAYANDROID(obj)) -typedef struct _MixDisplayAndroid MixDisplayAndroid; -typedef struct _MixDisplayAndroidClass MixDisplayAndroidClass; /** * MixDisplayAndroid: * * MI-X VideoInit Parameter object */ -struct _MixDisplayAndroid -{ - /*< public > */ - MixDisplay parent; - - /*< public > */ - - /* Pointer to a Android specific display */ - void *display; - - /* An Android drawable that is a smart pointer - * of ISurface. This field is not used in - * mix_video_initialize(). - */ - // sp drawable; -}; +class MixDisplayAndroid : public MixDisplay { -/** -* MixDisplayAndroidClass: -* -* MI-X VideoInit object class -*/ -struct _MixDisplayAndroidClass -{ - /*< public > */ - MixDisplayClass parent_class; +public: + ~MixDisplayAndroid(); + virtual MixDisplay* Dup() const; + virtual gboolean Copy(MixDisplay* target) const; + virtual void Finalize(); + virtual gboolean Equal(const MixDisplay* obj) const; - /* class members */ -}; -/** -* mix_displayandroid_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_displayandroid_get_type (void); + + friend MixDisplayAndroid *mix_displayandroid_new (void); + +protected: + MixDisplayAndroid(); +public: + /*< public > */ + + /* Pointer to a Android specific display */ + void *display; + + /* An Android drawable that is a smart pointer + * of ISurface. This field is not used in + * mix_video_initialize(). + */ + // sp drawable; +}; /** * mix_displayandroid_new: @@ -119,6 +78,8 @@ GType mix_displayandroid_get_type (void); * Use this method to create new instance of #MixDisplayAndroid */ MixDisplayAndroid *mix_displayandroid_new (void); + + /** * mix_displayandroid_ref: * @mix: object to add reference @@ -147,8 +108,8 @@ MixDisplayAndroid *mix_displayandroid_ref (MixDisplayAndroid * mix); * * Set Display */ -MIX_RESULT mix_displayandroid_set_display (MixDisplayAndroid * obj, - void * display); +MIX_RESULT mix_displayandroid_set_display ( + MixDisplayAndroid * obj, void * display); /** * mix_displayandroid_get_display: @@ -158,15 +119,15 @@ MIX_RESULT mix_displayandroid_set_display (MixDisplayAndroid * obj, * * Get Display */ -MIX_RESULT mix_displayandroid_get_display (MixDisplayAndroid * obj, - void ** dislay); +MIX_RESULT mix_displayandroid_get_display ( + MixDisplayAndroid * obj, void ** dislay); #endif /* ANDROID */ -//#ifdef __cplusplus -//} -//#endif +#ifdef __cplusplus +} +#endif #endif /* __MIX_DISPLAYANDROID_H__ */ diff --git a/mix_video/src/mixdisplayx11.c b/mix_video/src/mixdisplayx11.c deleted file mode 100644 index 43839ab..0000000 --- a/mix_video/src/mixdisplayx11.c +++ /dev/null @@ -1,210 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixdisplayx11 - * @short_description: MI-X Video X11 Display - * - * A data object which stores X11 specific parameters. - * - * - * Data Structures Used in MixDisplayX11 Fields: - * See X11/Xlib.h for Display and Drawable definitions. - * - */ - -#include "mixdisplayx11.h" - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_displayx11_type = 0; -static MixDisplayClass *parent_class = NULL; - -#define _do_init { _mix_displayx11_type = g_define_type_id; } - -gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); -MixDisplay *mix_displayx11_dup(const MixDisplay * obj); -gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second); -static void mix_displayx11_finalize(MixDisplay * obj); - -G_DEFINE_TYPE_WITH_CODE (MixDisplayX11, mix_displayx11, - MIX_TYPE_DISPLAY, _do_init); - -static void mix_displayx11_init(MixDisplayX11 * self) { - - /* Initialize member varibles */ - self->display = NULL; - self->drawable = 0; -} - -static void mix_displayx11_class_init(MixDisplayX11Class * klass) { - MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); - - mixdisplay_class->finalize = mix_displayx11_finalize; - mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayx11_copy; - mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayx11_dup; - mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayx11_equal; -} - -MixDisplayX11 * -mix_displayx11_new(void) { - MixDisplayX11 *ret = (MixDisplayX11 *) g_type_create_instance( - MIX_TYPE_DISPLAYX11); - - return ret; -} - -void mix_displayx11_finalize(MixDisplay * obj) { - /* clean up here. */ - /* MixDisplayX11 *self = MIX_DISPLAYX11 (obj); */ - - /* NOTE: we don't need to do anything - * with display and drawable */ - - /* Chain up parent */ - if (parent_class->finalize) - parent_class->finalize(obj); -} - -MixDisplayX11 * -mix_displayx11_ref(MixDisplayX11 * mix) { - return (MixDisplayX11 *) mix_display_ref(MIX_DISPLAY(mix)); -} - -/** - * mix_mixdisplayx11_dup: - * @obj: a #MixDisplayX11 object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixDisplay * -mix_displayx11_dup(const MixDisplay * obj) { - MixDisplay *ret = NULL; - - if (MIX_IS_DISPLAYX11(obj)) { - MixDisplayX11 *duplicate = mix_displayx11_new(); - if (mix_displayx11_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { - ret = MIX_DISPLAY(duplicate); - } else { - mix_displayx11_unref(duplicate); - } - } - return ret; -} - -/** - * mix_mixdisplayx11_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) { - MixDisplayX11 *this_target, *this_src; - - if (MIX_IS_DISPLAYX11(target) && MIX_IS_DISPLAYX11(src)) { - // Cast the base object to this child object - this_target = MIX_DISPLAYX11(target); - this_src = MIX_DISPLAYX11(src); - - // Copy properties from source to target. - - this_target->display = this_src->display; - this_target->drawable = this_src->drawable; - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_DISPLAY_CAST(target), - MIX_DISPLAY_CAST(src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_mixdisplayx11_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second) { - gboolean ret = FALSE; - - MixDisplayX11 *this_first, *this_second; - - this_first = MIX_DISPLAYX11(first); - this_second = MIX_DISPLAYX11(second); - - if (MIX_IS_DISPLAYX11(first) && MIX_IS_DISPLAYX11(second)) { - // Compare member variables - - // TODO: if in the copy method we just copy the pointer of display, the comparison - // below is enough. But we need to decide how to copy! - - if (this_first->display == this_second->display && this_first->drawable - == this_second->drawable) { - // members within this scope equal. chaining up. - MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - return ret; -} - -#define MIX_DISPLAYX11_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_DISPLAYX11_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ - -MIX_RESULT mix_displayx11_set_display(MixDisplayX11 * obj, Display * display) { - MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); - - // TODO: needs to decide to clone or just copy pointer - obj->display = display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayx11_get_display(MixDisplayX11 * obj, Display ** display) { - MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, display); - - // TODO: needs to decide to clone or just copy pointer - *display = obj->display; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayx11_set_drawable(MixDisplayX11 * obj, Drawable drawable) { - MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); - - // TODO: needs to decide to clone or just copy pointer - obj->drawable = drawable; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayx11_get_drawable(MixDisplayX11 * obj, Drawable * drawable) { - MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, drawable); - - // TODO: needs to decide to clone or just copy pointer - *drawable = obj->drawable; - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixdisplayx11.cpp b/mix_video/src/mixdisplayx11.cpp new file mode 100644 index 0000000..43839ab --- /dev/null +++ b/mix_video/src/mixdisplayx11.cpp @@ -0,0 +1,210 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixdisplayx11 + * @short_description: MI-X Video X11 Display + * + * A data object which stores X11 specific parameters. + * + * + * Data Structures Used in MixDisplayX11 Fields: + * See X11/Xlib.h for Display and Drawable definitions. + * + */ + +#include "mixdisplayx11.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +static GType _mix_displayx11_type = 0; +static MixDisplayClass *parent_class = NULL; + +#define _do_init { _mix_displayx11_type = g_define_type_id; } + +gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); +MixDisplay *mix_displayx11_dup(const MixDisplay * obj); +gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second); +static void mix_displayx11_finalize(MixDisplay * obj); + +G_DEFINE_TYPE_WITH_CODE (MixDisplayX11, mix_displayx11, + MIX_TYPE_DISPLAY, _do_init); + +static void mix_displayx11_init(MixDisplayX11 * self) { + + /* Initialize member varibles */ + self->display = NULL; + self->drawable = 0; +} + +static void mix_displayx11_class_init(MixDisplayX11Class * klass) { + MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); + + mixdisplay_class->finalize = mix_displayx11_finalize; + mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayx11_copy; + mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayx11_dup; + mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayx11_equal; +} + +MixDisplayX11 * +mix_displayx11_new(void) { + MixDisplayX11 *ret = (MixDisplayX11 *) g_type_create_instance( + MIX_TYPE_DISPLAYX11); + + return ret; +} + +void mix_displayx11_finalize(MixDisplay * obj) { + /* clean up here. */ + /* MixDisplayX11 *self = MIX_DISPLAYX11 (obj); */ + + /* NOTE: we don't need to do anything + * with display and drawable */ + + /* Chain up parent */ + if (parent_class->finalize) + parent_class->finalize(obj); +} + +MixDisplayX11 * +mix_displayx11_ref(MixDisplayX11 * mix) { + return (MixDisplayX11 *) mix_display_ref(MIX_DISPLAY(mix)); +} + +/** + * mix_mixdisplayx11_dup: + * @obj: a #MixDisplayX11 object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixDisplay * +mix_displayx11_dup(const MixDisplay * obj) { + MixDisplay *ret = NULL; + + if (MIX_IS_DISPLAYX11(obj)) { + MixDisplayX11 *duplicate = mix_displayx11_new(); + if (mix_displayx11_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { + ret = MIX_DISPLAY(duplicate); + } else { + mix_displayx11_unref(duplicate); + } + } + return ret; +} + +/** + * mix_mixdisplayx11_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) { + MixDisplayX11 *this_target, *this_src; + + if (MIX_IS_DISPLAYX11(target) && MIX_IS_DISPLAYX11(src)) { + // Cast the base object to this child object + this_target = MIX_DISPLAYX11(target); + this_src = MIX_DISPLAYX11(src); + + // Copy properties from source to target. + + this_target->display = this_src->display; + this_target->drawable = this_src->drawable; + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_DISPLAY_CAST(target), + MIX_DISPLAY_CAST(src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_mixdisplayx11_equal: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second) { + gboolean ret = FALSE; + + MixDisplayX11 *this_first, *this_second; + + this_first = MIX_DISPLAYX11(first); + this_second = MIX_DISPLAYX11(second); + + if (MIX_IS_DISPLAYX11(first) && MIX_IS_DISPLAYX11(second)) { + // Compare member variables + + // TODO: if in the copy method we just copy the pointer of display, the comparison + // below is enough. But we need to decide how to copy! + + if (this_first->display == this_second->display && this_first->drawable + == this_second->drawable) { + // members within this scope equal. chaining up. + MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + return ret; +} + +#define MIX_DISPLAYX11_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_DISPLAYX11_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ + +MIX_RESULT mix_displayx11_set_display(MixDisplayX11 * obj, Display * display) { + MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); + + // TODO: needs to decide to clone or just copy pointer + obj->display = display; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayx11_get_display(MixDisplayX11 * obj, Display ** display) { + MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, display); + + // TODO: needs to decide to clone or just copy pointer + *display = obj->display; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayx11_set_drawable(MixDisplayX11 * obj, Drawable drawable) { + MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); + + // TODO: needs to decide to clone or just copy pointer + obj->drawable = drawable; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_displayx11_get_drawable(MixDisplayX11 * obj, Drawable * drawable) { + MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, drawable); + + // TODO: needs to decide to clone or just copy pointer + *drawable = obj->drawable; + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c deleted file mode 100644 index b3edd90..0000000 --- a/mix_video/src/mixframemanager.c +++ /dev/null @@ -1,794 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include - -#include "mixvideolog.h" -#include "mixframemanager.h" -#include "mixvideoframe_private.h" - -#define INITIAL_FRAME_ARRAY_SIZE 16 - -// Assume only one backward reference is used. This will hold up to 2 frames before forcing -// the earliest frame out of queue. -#define MIX_MAX_ENQUEUE_SIZE 2 - -// RTP timestamp is 32-bit long and could be rollover in 13 hours (based on 90K Hz clock) -#define TS_ROLLOVER_THRESHOLD (0xFFFFFFFF/2) - -#define MIX_SECOND (G_USEC_PER_SEC * G_GINT64_CONSTANT (1000)) - -static GObjectClass *parent_class = NULL; - -static void mix_framemanager_finalize(GObject * obj); -G_DEFINE_TYPE( MixFrameManager, mix_framemanager, G_TYPE_OBJECT); - -static void mix_framemanager_init(MixFrameManager * self) { - /* TODO: public member initialization */ - - /* TODO: private member initialization */ - - if (!g_thread_supported()) { - g_thread_init(NULL); - } - - self->lock = g_mutex_new(); - - self->flushing = FALSE; - self->eos = FALSE; - self->frame_list = NULL; - self->initialized = FALSE; - - self->mode = MIX_DISPLAY_ORDER_UNKNOWN; - self->framerate_numerator = 30; - self->framerate_denominator = 1; - - self->is_first_frame = TRUE; - self->next_frame_timestamp = 0; - self->last_frame_timestamp = 0; - self->next_frame_picnumber = 0; - self->max_enqueue_size = MIX_MAX_ENQUEUE_SIZE; - self->max_picture_number = (guint32)-1; -} - -static void mix_framemanager_class_init(MixFrameManagerClass * klass) { - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - gobject_class->finalize = mix_framemanager_finalize; -} - -MixFrameManager *mix_framemanager_new(void) { - MixFrameManager *ret = g_object_new(MIX_TYPE_FRAMEMANAGER, NULL); - - return ret; -} - -void mix_framemanager_finalize(GObject * obj) { - /* clean up here. */ - - MixFrameManager *fm = MIX_FRAMEMANAGER(obj); - - /* cleanup here */ - mix_framemanager_deinitialize(fm); - - if (fm->lock) { - g_mutex_free(fm->lock); - fm->lock = NULL; - } - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { - return (MixFrameManager *) g_object_ref(G_OBJECT(fm)); -} - -/* MixFrameManager class methods */ - -MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, - MixDisplayOrderMode mode, gint framerate_numerator, - gint framerate_denominator) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (!MIX_IS_FRAMEMANAGER(fm) || - mode <= MIX_DISPLAY_ORDER_UNKNOWN || - mode >= MIX_DISPLAY_ORDER_LAST || - framerate_numerator <= 0 || - framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } - - if (fm->initialized) { - return MIX_RESULT_ALREADY_INIT; - } - - if (!g_thread_supported()) { - g_thread_init(NULL); - } - - if (!fm->lock) { - fm->lock = g_mutex_new(); - if (!fm->lock) { - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - fm->frame_list = NULL; - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; - - fm->mode = mode; - - LOG_V("fm->mode = %d\n", fm->mode); - - fm->is_first_frame = TRUE; - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - fm->next_frame_picnumber = 0; - - fm->initialized = TRUE; - -cleanup: - - return ret; -} - -MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { - - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->lock) { - return MIX_RESULT_FAIL; - } - - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - mix_framemanager_flush(fm); - - g_mutex_lock(fm->lock); - - fm->initialized = FALSE; - - g_mutex_unlock(fm->lock); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, - gint framerate_numerator, gint framerate_denominator) { - - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->lock) { - return MIX_RESULT_FAIL; - } - - if (framerate_numerator <= 0 || framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } - - g_mutex_lock(fm->lock); - - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; - - g_mutex_unlock(fm->lock); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, - gint *framerate_numerator, gint *framerate_denominator) { - - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->lock) { - return MIX_RESULT_FAIL; - } - - if (!framerate_numerator || !framerate_denominator) { - return MIX_RESULT_INVALID_PARAM; - } - - g_mutex_lock(fm->lock); - - *framerate_numerator = fm->framerate_numerator; - *framerate_denominator = fm->framerate_denominator; - - g_mutex_unlock(fm->lock); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_get_display_order_mode(MixFrameManager *fm, - MixDisplayOrderMode *mode) { - - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->lock) { - return MIX_RESULT_FAIL; - } - - if (!mode) { - return MIX_RESULT_INVALID_PARAM; - } - - /* no need to use lock */ - *mode = fm->mode; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size) -{ - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->lock) { - return MIX_RESULT_FAIL; - } - - if (size <= 0) - { - return MIX_RESULT_FAIL; - } - - g_mutex_lock(fm->lock); - - fm->max_enqueue_size = size; - LOG_V("max enqueue size is %d\n", size); - - g_mutex_unlock(fm->lock); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 num) -{ - // NOTE: set maximum picture order number only if pic_order_cnt_type is 0 (see H.264 spec) - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->lock) { - return MIX_RESULT_FAIL; - } - - if (num < 16) - { - // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. - return MIX_RESULT_INVALID_PARAM; - } - - g_mutex_lock(fm->lock); - - // max_picture_number is exclusie (range from 0 to num - 1). - // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the - // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches - // fm->max_picture_number. - fm->max_picture_number = num; - LOG_V("max picture number is %d\n", num); - - g_mutex_unlock(fm->lock); - - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { - - MixVideoFrame *frame = NULL; - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - g_mutex_lock(fm->lock); - - while (fm->frame_list) - { - frame = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame); - mix_videoframe_unref(frame); - LOG_V("one frame is flushed\n"); - }; - - fm->eos = FALSE; - fm->is_first_frame = TRUE; - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - fm->next_frame_picnumber = 0; - - g_mutex_unlock(fm->lock); - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V("Begin fm->mode = %d\n", fm->mode); - - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - gboolean discontinuity = FALSE; - mix_videoframe_get_discontinuity(mvf, &discontinuity); - if (discontinuity) - { - LOG_V("current frame has discontinuity!\n"); - mix_framemanager_flush(fm); - } -#ifdef MIX_LOG_ENABLE - if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) - { - guint32 num; - mix_videoframe_get_displayorder(mvf, &num); - LOG_V("pic %d is enqueued.\n", num); - } - - if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) - { - guint64 ts; - mix_videoframe_get_timestamp(mvf, &ts); - LOG_V("ts %"G_GINT64_FORMAT" is enqueued.\n", ts); - } -#endif - - g_mutex_lock(fm->lock); - fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf); - g_mutex_unlock(fm->lock); - - LOG_V("End\n"); - - return ret; -} - -void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) -{ - // this function finds the lowest time stamp in the list and assign it to the dequeued video frame, - // if that timestamp is smaller than the timestamp of dequeued video frame. - int i; - guint64 ts = 0, min_ts = 0; - MixVideoFrame *p = NULL, *min_p = NULL; - int len = g_slist_length(fm->frame_list); - if (len == 0) - { - // nothing to update - return; - } - - // find video frame with the smallest timestamp, take rollover into account when - // comparing timestamp. - for (i = 0; i < len; i++) - { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_timestamp(p, &ts); - if (i == 0 || - (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) - { - min_ts = ts; - min_p = p; - } - } - - mix_videoframe_get_timestamp(mvf, &ts); - if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) - { - // frame to be updated has smaller time stamp - } - else - { - // time stamp needs to be monotonically non-decreasing so swap timestamp. - mix_videoframe_set_timestamp(mvf, min_ts); - mix_videoframe_set_timestamp(min_p, ts); - LOG_V("timestamp for current frame is updated from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT"\n", - ts, min_ts); - } -} - - -MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) -{ - int i, num_i_or_p; - MixVideoFrame *p, *first_i_or_p; - MixFrameType type; - int len = g_slist_length(fm->frame_list); - - num_i_or_p = 0; - first_i_or_p = NULL; - - for (i = 0; i < len; i++) - { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_frame_type(p, &type); - if (type == TYPE_B) - { - // B frame has higher display priority as only one reference frame is kept in the list - // and it should be backward reference frame for B frame. - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); - mix_framemanager_update_timestamp(fm, p); - *mvf = p; - LOG_V("B frame is dequeued.\n"); - return MIX_RESULT_SUCCESS; - } - - if (type != TYPE_I && type != TYPE_P) - { - // this should never happen - LOG_E("Frame typs is invalid!!!\n"); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); - mix_videoframe_unref(p); - return MIX_RESULT_FRAME_NOTAVAIL; - } - num_i_or_p++; - if (first_i_or_p == NULL) - { - first_i_or_p = p; - } - } - - // if there are more than one reference frame in the list, the first one is dequeued. - if (num_i_or_p > 1 || fm->eos) - { - if (first_i_or_p == NULL) - { - // this should never happen! - LOG_E("first_i_or_p frame is NULL!\n"); - return MIX_RESULT_FAIL; - } - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)first_i_or_p); - mix_framemanager_update_timestamp(fm, first_i_or_p); - *mvf = first_i_or_p; -#ifdef MIX_LOG_ENABLE - mix_videoframe_get_frame_type(first_i_or_p, &type); - if (type == TYPE_I) - { - LOG_V("I frame is dequeued.\n"); - } - else - { - LOG_V("P frame is dequeued.\n"); - } -#endif - return MIX_RESULT_SUCCESS; - } - - return MIX_RESULT_FRAME_NOTAVAIL; -} - -MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) -{ - int i, len; - MixVideoFrame *p, *p_out_of_dated; - guint64 ts, ts_next_pending, ts_out_of_dated; - guint64 tolerance = fm->frame_timestamp_delta/4; - -retry: - // len may be changed during retry! - len = g_slist_length(fm->frame_list); - ts_next_pending = (guint64)-1; - ts_out_of_dated = 0; - p_out_of_dated = NULL; - - - for (i = 0; i < len; i++) - { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_timestamp(p, &ts); - if (ts >= fm->last_frame_timestamp && - ts <= fm->next_frame_timestamp + tolerance) - { - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); - *mvf = p; - mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp)); - fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; - LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts); - return MIX_RESULT_SUCCESS; - } - - if (ts > fm->next_frame_timestamp + tolerance && - ts < ts_next_pending) - { - ts_next_pending = ts; - } - if (ts < fm->last_frame_timestamp && - ts >= ts_out_of_dated) - { - // video frame that is most recently out-of-dated. - // this may happen in variable frame rate scenario where two adjacent frames both meet - // the "next frame" criteria, and the one with larger timestamp is dequeued first. - ts_out_of_dated = ts; - p_out_of_dated = p; - } - } - - if (p_out_of_dated && - fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) - { - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated); - mix_videoframe_unref(p_out_of_dated); - LOG_W("video frame is out of dated. ts = %"G_GINT64_FORMAT" compared to last ts = %"G_GINT64_FORMAT".\n", - ts_out_of_dated, fm->last_frame_timestamp); - return MIX_RESULT_FRAME_NOTAVAIL; - } - - if (len <= fm->max_enqueue_size && fm->eos == FALSE) - { - LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", - fm->next_frame_timestamp, ts_next_pending, len); - return MIX_RESULT_FRAME_NOTAVAIL; - } - - // timestamp has gap - if (ts_next_pending != -1) - { - LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n", - fm->next_frame_timestamp, ts_next_pending); - - fm->next_frame_timestamp = ts_next_pending; - goto retry; - } - - // time stamp roll-over - LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", - fm->next_frame_timestamp); - - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - goto retry; - - // should never run to here - LOG_E("Error in timestamp-based dequeue implementation!\n"); - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) -{ - int i, len; - MixVideoFrame* p; - guint32 picnum; - guint32 next_picnum_pending; - - len = g_slist_length(fm->frame_list); - -retry: - next_picnum_pending = (guint32)-1; - - for (i = 0; i < len; i++) - { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_displayorder(p, &picnum); - if (picnum == fm->next_frame_picnumber) - { - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); - mix_framemanager_update_timestamp(fm, p); - *mvf = p; - LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber); - fm->next_frame_picnumber++; - //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; - return MIX_RESULT_SUCCESS; - } - - if (picnum > fm->next_frame_picnumber && - picnum < next_picnum_pending) - { - next_picnum_pending = picnum; - } - - if (picnum < fm->next_frame_picnumber && - fm->next_frame_picnumber - picnum < 8) - { - // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" - // to the pic number in the list is less than half of 16, it is safe to assume that pic number - // is reset when a new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). - LOG_V("picture number is reset to %d, next pic number is %d, next pending number is %d.\n", - picnum, fm->next_frame_picnumber, next_picnum_pending); - break; - } - } - - if (len <= fm->max_enqueue_size && fm->eos == FALSE) - { - LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", - fm->next_frame_picnumber, next_picnum_pending, len); - return MIX_RESULT_FRAME_NOTAVAIL; - } - - // picture number has gap - if (next_picnum_pending != (guint32)-1) - { - LOG_V("picture number has gap, jumping from %d to %d.\n", - fm->next_frame_picnumber, next_picnum_pending); - - fm->next_frame_picnumber = next_picnum_pending; - goto retry; - } - - // picture number roll-over - LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", - fm->next_frame_picnumber); - - fm->next_frame_picnumber = 0; - goto retry; - - // should never run to here - LOG_E("Error in picnumber-based dequeue implementation!\n"); - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V("Begin\n"); - - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - g_mutex_lock(fm->lock); - - if (fm->frame_list == NULL) - { - if (fm->eos) - { - LOG_V("No frame is dequeued (eos)!\n"); - ret = MIX_RESULT_EOS; - } - else - { - LOG_V("No frame is dequeued as queue is empty!\n"); - ret = MIX_RESULT_FRAME_NOTAVAIL; - } - } - else if (fm->is_first_frame) - { - // dequeue the first entry in the list. Not need to update the time stamp as - // the list should contain only one frame. -#ifdef MIX_LOG_ENABLE - if (g_slist_length(fm->frame_list) != 1) - { - LOG_W("length of list is not equal to 1 for the first frame.\n"); - } -#endif - *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); - - if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) - { - mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); - fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; - LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp); - } - else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) - { - mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber)); - LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber); - fm->next_frame_picnumber++; - //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; - } - else - { -#ifdef MIX_LOG_ENABLE - MixFrameType type; - mix_videoframe_get_frame_type(*mvf, &type); - LOG_V("The first frame is dequeud, frame type is %d.\n", type); -#endif - } - fm->is_first_frame = FALSE; - - ret = MIX_RESULT_SUCCESS; - } - else - { - // not the first frame and list is not empty - switch(fm->mode) - { - case MIX_DISPLAY_ORDER_TIMESTAMP: - ret = mix_framemanager_timestamp_based_dequeue(fm, mvf); - break; - - case MIX_DISPLAY_ORDER_PICNUMBER: - ret = mix_framemanager_picnumber_based_dequeue(fm, mvf); - break; - - case MIX_DISPLAY_ORDER_PICTYPE: - ret = mix_framemanager_pictype_based_dequeue(fm, mvf); - break; - - case MIX_DISPLAY_ORDER_FIFO: - *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); - ret = MIX_RESULT_SUCCESS; - LOG_V("One frame is dequeued.\n"); - break; - - default: - LOG_E("Invalid frame order mode\n"); - ret = MIX_RESULT_FAIL; - break; - } - } - - g_mutex_unlock(fm->lock); - - LOG_V("End\n"); - - return ret; -} - -MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (!MIX_IS_FRAMEMANAGER(fm)) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - g_mutex_lock(fm->lock); - fm->eos = TRUE; - LOG_V("EOS is received.\n"); - g_mutex_unlock(fm->lock); - - return ret; -} - diff --git a/mix_video/src/mixframemanager.cpp b/mix_video/src/mixframemanager.cpp new file mode 100644 index 0000000..0ec8075 --- /dev/null +++ b/mix_video/src/mixframemanager.cpp @@ -0,0 +1,648 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include + +#include "mixvideolog.h" +#include "mixframemanager.h" +#include "mixvideoframe_private.h" + +#define INITIAL_FRAME_ARRAY_SIZE 16 + +// Assume only one backward reference is used. This will hold up to 2 frames before forcing +// the earliest frame out of queue. +#define MIX_MAX_ENQUEUE_SIZE 2 + +// RTP timestamp is 32-bit long and could be rollover in 13 hours (based on 90K Hz clock) +#define TS_ROLLOVER_THRESHOLD (0xFFFFFFFF/2) + +#define MIX_SECOND (G_USEC_PER_SEC * G_GINT64_CONSTANT (1000)) + +MixFrameManager::~MixFrameManager() { + /* cleanup here */ + mix_framemanager_deinitialize(this); +} + +MixFrameManager::MixFrameManager() + :initialized(FALSE) + ,flushing(FALSE) + ,eos(FALSE) + ,mLock() + ,frame_list(NULL) + ,framerate_numerator(30) + ,framerate_denominator(1) + ,frame_timestamp_delta(0) + ,mode(MIX_DISPLAY_ORDER_UNKNOWN) + ,is_first_frame(TRUE) + ,last_frame_timestamp(0) + ,next_frame_timestamp(0) + ,next_frame_picnumber(0) + ,max_enqueue_size(MIX_MAX_ENQUEUE_SIZE) + ,max_picture_number((guint32)-1) + ,ref_count(1) { +} + +MixFrameManager *mix_framemanager_new(void) { + return new MixFrameManager(); +} + + + +MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { + if (NULL != fm) + fm->ref_count++; + return fm; +} + +/* MixFrameManager class methods */ + +MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, + MixDisplayOrderMode mode, gint framerate_numerator, + gint framerate_denominator) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (fm->initialized) { + return MIX_RESULT_ALREADY_INIT; + } + + fm->frame_list = NULL; + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; + + fm->mode = mode; + + LOG_V("fm->mode = %d\n", fm->mode); + + fm->is_first_frame = TRUE; + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + fm->next_frame_picnumber = 0; + + fm->initialized = TRUE; + +cleanup: + + return ret; +} + +MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + mix_framemanager_flush(fm); + + fm->mLock.lock(); + + fm->initialized = FALSE; + + fm->mLock.unlock(); + + return MIX_RESULT_SUCCESS; +} + +MixFrameManager* mix_framemanager_unref(MixFrameManager *fm){ + if (NULL != fm) { + fm->ref_count--; + if (0 == fm->ref_count) { + delete fm; + return NULL; + } + } + return fm; +} + +MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, + gint framerate_numerator, gint framerate_denominator) { + + if (framerate_numerator <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } + fm->mLock.lock(); + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, + gint *framerate_numerator, gint *framerate_denominator) { + if (!framerate_numerator || !framerate_denominator) { + return MIX_RESULT_INVALID_PARAM; + } + fm->mLock.lock(); + *framerate_numerator = fm->framerate_numerator; + *framerate_denominator = fm->framerate_denominator; + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_get_display_order_mode( + MixFrameManager *fm, MixDisplayOrderMode *mode) { + if (!mode) { + return MIX_RESULT_INVALID_PARAM; + } + /* no need to use lock */ + *mode = fm->mode; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_set_max_enqueue_size( + MixFrameManager *fm, gint size) { + if (size <= 0) { + return MIX_RESULT_FAIL; + } + fm->mLock.lock(); + fm->max_enqueue_size = size; + LOG_V("max enqueue size is %d\n", size); + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_framemanager_set_max_picture_number( + MixFrameManager *fm, guint32 num) { + // NOTE: set maximum picture order number only if pic_order_cnt_type is 0 (see H.264 spec) + if (num < 16) { + // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. + return MIX_RESULT_INVALID_PARAM; + } + fm->mLock.lock(); + // max_picture_number is exclusie (range from 0 to num - 1). + // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the + // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches + // fm->max_picture_number. + fm->max_picture_number = num; + LOG_V("max picture number is %d\n", num); + + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { + MixVideoFrame *frame = NULL; + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + fm->mLock.lock(); + while (fm->frame_list) { + frame = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame); + mix_videoframe_unref(frame); + LOG_V("one frame is flushed\n"); + }; + + fm->eos = FALSE; + fm->is_first_frame = TRUE; + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + fm->next_frame_picnumber = 0; + + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("Begin fm->mode = %d\n", fm->mode); + + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + gboolean discontinuity = FALSE; + mix_videoframe_get_discontinuity(mvf, &discontinuity); + if (discontinuity) + { + LOG_V("current frame has discontinuity!\n"); + mix_framemanager_flush(fm); + } +#ifdef MIX_LOG_ENABLE + if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) { + guint32 num; + mix_videoframe_get_displayorder(mvf, &num); + LOG_V("pic %d is enqueued.\n", num); + } + + if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) { + guint64 ts; + mix_videoframe_get_timestamp(mvf, &ts); + LOG_V("ts %"G_GINT64_FORMAT" is enqueued.\n", ts); + } +#endif + + fm->mLock.lock(); + fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf); + fm->mLock.unlock(); + LOG_V("End\n"); + return ret; +} + +void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) +{ + // this function finds the lowest time stamp in the list and assign it to the dequeued video frame, + // if that timestamp is smaller than the timestamp of dequeued video frame. + int i; + guint64 ts = 0, min_ts = 0; + MixVideoFrame *p = NULL, *min_p = NULL; + int len = g_slist_length(fm->frame_list); + if (len == 0) + { + // nothing to update + return; + } + + // find video frame with the smallest timestamp, take rollover into account when + // comparing timestamp. + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_timestamp(p, &ts); + if (i == 0 || + (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + { + min_ts = ts; + min_p = p; + } + } + + mix_videoframe_get_timestamp(mvf, &ts); + if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + { + // frame to be updated has smaller time stamp + } + else + { + // time stamp needs to be monotonically non-decreasing so swap timestamp. + mix_videoframe_set_timestamp(mvf, min_ts); + mix_videoframe_set_timestamp(min_p, ts); + LOG_V("timestamp for current frame is updated from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT"\n", + ts, min_ts); + } +} + + +MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +{ + int i, num_i_or_p; + MixVideoFrame *p, *first_i_or_p; + MixFrameType type; + int len = g_slist_length(fm->frame_list); + + num_i_or_p = 0; + first_i_or_p = NULL; + + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_frame_type(p, &type); + if (type == TYPE_B) + { + // B frame has higher display priority as only one reference frame is kept in the list + // and it should be backward reference frame for B frame. + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + mix_framemanager_update_timestamp(fm, p); + *mvf = p; + LOG_V("B frame is dequeued.\n"); + return MIX_RESULT_SUCCESS; + } + + if (type != TYPE_I && type != TYPE_P) + { + // this should never happen + LOG_E("Frame typs is invalid!!!\n"); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + mix_videoframe_unref(p); + return MIX_RESULT_FRAME_NOTAVAIL; + } + num_i_or_p++; + if (first_i_or_p == NULL) + { + first_i_or_p = p; + } + } + + // if there are more than one reference frame in the list, the first one is dequeued. + if (num_i_or_p > 1 || fm->eos) + { + if (first_i_or_p == NULL) + { + // this should never happen! + LOG_E("first_i_or_p frame is NULL!\n"); + return MIX_RESULT_FAIL; + } + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)first_i_or_p); + mix_framemanager_update_timestamp(fm, first_i_or_p); + *mvf = first_i_or_p; +#ifdef MIX_LOG_ENABLE + mix_videoframe_get_frame_type(first_i_or_p, &type); + if (type == TYPE_I) + { + LOG_V("I frame is dequeued.\n"); + } + else + { + LOG_V("P frame is dequeued.\n"); + } +#endif + return MIX_RESULT_SUCCESS; + } + + return MIX_RESULT_FRAME_NOTAVAIL; +} + +MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +{ + int i, len; + MixVideoFrame *p, *p_out_of_dated; + guint64 ts, ts_next_pending, ts_out_of_dated; + guint64 tolerance = fm->frame_timestamp_delta/4; + +retry: + // len may be changed during retry! + len = g_slist_length(fm->frame_list); + ts_next_pending = (guint64)-1; + ts_out_of_dated = 0; + p_out_of_dated = NULL; + + + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_timestamp(p, &ts); + if (ts >= fm->last_frame_timestamp && + ts <= fm->next_frame_timestamp + tolerance) + { + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + *mvf = p; + mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp)); + fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; + LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts); + return MIX_RESULT_SUCCESS; + } + + if (ts > fm->next_frame_timestamp + tolerance && + ts < ts_next_pending) + { + ts_next_pending = ts; + } + if (ts < fm->last_frame_timestamp && + ts >= ts_out_of_dated) + { + // video frame that is most recently out-of-dated. + // this may happen in variable frame rate scenario where two adjacent frames both meet + // the "next frame" criteria, and the one with larger timestamp is dequeued first. + ts_out_of_dated = ts; + p_out_of_dated = p; + } + } + + if (p_out_of_dated && + fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) + { + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated); + mix_videoframe_unref(p_out_of_dated); + LOG_W("video frame is out of dated. ts = %"G_GINT64_FORMAT" compared to last ts = %"G_GINT64_FORMAT".\n", + ts_out_of_dated, fm->last_frame_timestamp); + return MIX_RESULT_FRAME_NOTAVAIL; + } + + if (len <= fm->max_enqueue_size && fm->eos == FALSE) + { + LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", + fm->next_frame_timestamp, ts_next_pending, len); + return MIX_RESULT_FRAME_NOTAVAIL; + } + + // timestamp has gap + if (ts_next_pending != -1) + { + LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n", + fm->next_frame_timestamp, ts_next_pending); + + fm->next_frame_timestamp = ts_next_pending; + goto retry; + } + + // time stamp roll-over + LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", + fm->next_frame_timestamp); + + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + goto retry; + + // should never run to here + LOG_E("Error in timestamp-based dequeue implementation!\n"); + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +{ + int i, len; + MixVideoFrame* p; + guint32 picnum; + guint32 next_picnum_pending; + + len = g_slist_length(fm->frame_list); + +retry: + next_picnum_pending = (guint32)-1; + + for (i = 0; i < len; i++) + { + p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_displayorder(p, &picnum); + if (picnum == fm->next_frame_picnumber) + { + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + mix_framemanager_update_timestamp(fm, p); + *mvf = p; + LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber); + fm->next_frame_picnumber++; + //if (fm->next_frame_picnumber == fm->max_picture_number) + // fm->next_frame_picnumber = 0; + return MIX_RESULT_SUCCESS; + } + + if (picnum > fm->next_frame_picnumber && + picnum < next_picnum_pending) + { + next_picnum_pending = picnum; + } + + if (picnum < fm->next_frame_picnumber && + fm->next_frame_picnumber - picnum < 8) + { + // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" + // to the pic number in the list is less than half of 16, it is safe to assume that pic number + // is reset when a new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). + LOG_V("picture number is reset to %d, next pic number is %d, next pending number is %d.\n", + picnum, fm->next_frame_picnumber, next_picnum_pending); + break; + } + } + + if (len <= fm->max_enqueue_size && fm->eos == FALSE) + { + LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", + fm->next_frame_picnumber, next_picnum_pending, len); + return MIX_RESULT_FRAME_NOTAVAIL; + } + + // picture number has gap + if (next_picnum_pending != (guint32)-1) + { + LOG_V("picture number has gap, jumping from %d to %d.\n", + fm->next_frame_picnumber, next_picnum_pending); + + fm->next_frame_picnumber = next_picnum_pending; + goto retry; + } + + // picture number roll-over + LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", + fm->next_frame_picnumber); + + fm->next_frame_picnumber = 0; + goto retry; + + // should never run to here + LOG_E("Error in picnumber-based dequeue implementation!\n"); + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V("Begin\n"); + + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } + + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + fm->mLock.lock(); + + if (fm->frame_list == NULL) + { + if (fm->eos) + { + LOG_V("No frame is dequeued (eos)!\n"); + ret = MIX_RESULT_EOS; + } + else + { + LOG_V("No frame is dequeued as queue is empty!\n"); + ret = MIX_RESULT_FRAME_NOTAVAIL; + } + } + else if (fm->is_first_frame) + { + // dequeue the first entry in the list. Not need to update the time stamp as + // the list should contain only one frame. +#ifdef MIX_LOG_ENABLE + if (g_slist_length(fm->frame_list) != 1) + { + LOG_W("length of list is not equal to 1 for the first frame.\n"); + } +#endif + *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); + + if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) + { + mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); + fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; + LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp); + } + else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) + { + mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber)); + LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber); + fm->next_frame_picnumber++; + //if (fm->next_frame_picnumber == fm->max_picture_number) + // fm->next_frame_picnumber = 0; + } + else + { +#ifdef MIX_LOG_ENABLE + MixFrameType type; + mix_videoframe_get_frame_type(*mvf, &type); + LOG_V("The first frame is dequeud, frame type is %d.\n", type); +#endif + } + fm->is_first_frame = FALSE; + + ret = MIX_RESULT_SUCCESS; + } + else + { + // not the first frame and list is not empty + switch(fm->mode) + { + case MIX_DISPLAY_ORDER_TIMESTAMP: + ret = mix_framemanager_timestamp_based_dequeue(fm, mvf); + break; + + case MIX_DISPLAY_ORDER_PICNUMBER: + ret = mix_framemanager_picnumber_based_dequeue(fm, mvf); + break; + + case MIX_DISPLAY_ORDER_PICTYPE: + ret = mix_framemanager_pictype_based_dequeue(fm, mvf); + break; + + case MIX_DISPLAY_ORDER_FIFO: + *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); + fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); + ret = MIX_RESULT_SUCCESS; + LOG_V("One frame is dequeued.\n"); + break; + + default: + LOG_E("Invalid frame order mode\n"); + ret = MIX_RESULT_FAIL; + break; + } + } + + fm->mLock.unlock(); + LOG_V("End\n"); + return ret; +} + +MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + fm->mLock.lock(); + fm->eos = TRUE; + LOG_V("EOS is received.\n"); + fm->mLock.unlock(); + return ret; +} + diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h index fae5948..4be322a 100644 --- a/mix_video/src/mixframemanager.h +++ b/mix_video/src/mixframemanager.h @@ -12,20 +12,7 @@ #include #include "mixvideodef.h" #include "mixvideoframe.h" - -G_BEGIN_DECLS -/* - * Type macros. - */ -#define MIX_TYPE_FRAMEMANAGER (mix_framemanager_get_type ()) -#define MIX_FRAMEMANAGER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_FRAMEMANAGER, MixFrameManager)) -#define MIX_IS_FRAMEMANAGER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_FRAMEMANAGER)) -#define MIX_FRAMEMANAGER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_FRAMEMANAGER, MixFrameManagerClass)) -#define MIX_IS_FRAMEMANAGER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_FRAMEMANAGER)) -#define MIX_FRAMEMANAGER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_FRAMEMANAGER, MixFrameManagerClass)) - -typedef struct _MixFrameManager MixFrameManager; -typedef struct _MixFrameManagerClass MixFrameManagerClass; +#include "mixvideothread.h" /* * MIX_FRAMEORDER_MODE_DECODEORDER is here interpreted as @@ -42,18 +29,14 @@ typedef enum } MixDisplayOrderMode; -struct _MixFrameManager { - /*< public > */ - GObject parent; - - /*< public > */ - +class MixFrameManager { /*< private > */ +public: gboolean initialized; gboolean flushing; gboolean eos; - GMutex *lock; + MixVideoMutex mLock; GSList* frame_list; gint framerate_numerator; @@ -68,29 +51,13 @@ struct _MixFrameManager { guint32 next_frame_picnumber; gint max_enqueue_size; guint32 max_picture_number; -}; -/** - * MixFrameManagerClass: - * - * MI-X Video object class - */ -struct _MixFrameManagerClass { - /*< public > */ - GObjectClass parent_class; - -/* class members */ - -/*< public > */ + guint32 ref_count; +public: + MixFrameManager(); + ~MixFrameManager(); }; -/** - * mix_framemanager_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_framemanager_get_type(void); /** * mix_framemanager_new: @@ -115,7 +82,7 @@ MixFrameManager *mix_framemanager_ref(MixFrameManager * mix); * * Decrement reference count of the object. */ -#define mix_framemanager_unref(obj) g_object_unref (G_OBJECT(obj)) +MixFrameManager* mix_framemanager_unref(MixFrameManager* fm); /* Class Methods */ @@ -182,5 +149,4 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf); */ MIX_RESULT mix_framemanager_eos(MixFrameManager *fm); -G_END_DECLS #endif /* __MIX_FRAMEMANAGER_H__ */ diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c deleted file mode 100644 index 5ed6007..0000000 --- a/mix_video/src/mixsurfacepool.c +++ /dev/null @@ -1,668 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixsurfacepool - * @short_description: MI-X Video Surface Pool - * - * A data object which stores and manipulates a pool of video surfaces. - */ - -#include "mixvideolog.h" -#include "mixsurfacepool.h" -#include "mixvideoframe_private.h" - -#define MIX_LOCK(lock) g_mutex_lock(lock); -#define MIX_UNLOCK(lock) g_mutex_unlock(lock); - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_surfacepool_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_surfacepool_type = g_define_type_id; } - -gboolean mix_surfacepool_copy(MixParams * target, const MixParams * src); -MixParams *mix_surfacepool_dup(const MixParams * obj); -gboolean mix_surfacepool_equal(MixParams * first, MixParams * second); -static void mix_surfacepool_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixSurfacePool, mix_surfacepool, MIX_TYPE_PARAMS, - _do_init); - -static void mix_surfacepool_init(MixSurfacePool * self) { - /* initialize properties here */ - self->free_list = NULL; - self->in_use_list = NULL; - self->free_list_max_size = 0; - self->free_list_cur_size = 0; - self->high_water_mark = 0; - self->initialized = FALSE; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; - - // TODO: relocate this mutex allocation -we can't communicate failure in ctor. - // Note that g_thread_init() has already been called by mix_video_init() - self->objectlock = g_mutex_new(); - -} - -static void mix_surfacepool_class_init(MixSurfacePoolClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_surfacepool_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_surfacepool_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_surfacepool_dup; - mixparams_class->equal = (MixParamsEqualFunction) mix_surfacepool_equal; -} - -MixSurfacePool * -mix_surfacepool_new(void) { - MixSurfacePool *ret = (MixSurfacePool *) g_type_create_instance( - MIX_TYPE_SURFACEPOOL); - return ret; -} - -void mix_surfacepool_finalize(MixParams * obj) { - /* clean up here. */ - - MixSurfacePool *self = MIX_SURFACEPOOL(obj); - - if (self->objectlock) { - g_mutex_free(self->objectlock); - self->objectlock = NULL; - } - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixSurfacePool * -mix_surfacepool_ref(MixSurfacePool * mix) { - return (MixSurfacePool *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_surfacepool_dup: - * @obj: a #MixSurfacePool object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_surfacepool_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_SURFACEPOOL(obj)) { - - MIX_LOCK(MIX_SURFACEPOOL(obj)->objectlock); - - MixSurfacePool *duplicate = mix_surfacepool_new(); - if (mix_surfacepool_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_surfacepool_unref(duplicate); - } - - MIX_UNLOCK(MIX_SURFACEPOOL(obj)->objectlock); - - } - return ret; -} - -/** - * mix_surfacepool_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_surfacepool_copy(MixParams * target, const MixParams * src) { - MixSurfacePool *this_target, *this_src; - - if (MIX_IS_SURFACEPOOL(target) && MIX_IS_SURFACEPOOL(src)) { - - MIX_LOCK(MIX_SURFACEPOOL(src)->objectlock); - MIX_LOCK(MIX_SURFACEPOOL(target)->objectlock); - - // Cast the base object to this child object - this_target = MIX_SURFACEPOOL(target); - this_src = MIX_SURFACEPOOL(src); - - // Free the existing properties - - // Duplicate string - this_target->free_list = this_src->free_list; - this_target->in_use_list = this_src->in_use_list; - this_target->free_list_max_size = this_src->free_list_max_size; - this_target->free_list_cur_size = this_src->free_list_cur_size; - this_target->high_water_mark = this_src->high_water_mark; - - MIX_UNLOCK(MIX_SURFACEPOOL(src)->objectlock); - MIX_UNLOCK(MIX_SURFACEPOOL(target)->objectlock); - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_surfacepool_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_surfacepool_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixSurfacePool *this_first, *this_second; - - if (MIX_IS_SURFACEPOOL(first) && MIX_IS_SURFACEPOOL(second)) { - // Deep compare - // Cast the base object to this child object - - MIX_LOCK(MIX_SURFACEPOOL(first)->objectlock); - MIX_LOCK(MIX_SURFACEPOOL(second)->objectlock); - - this_first = MIX_SURFACEPOOL(first); - this_second = MIX_SURFACEPOOL(second); - - /* TODO: add comparison for other properties */ - if (this_first->free_list == this_second->free_list - && this_first->in_use_list == this_second->in_use_list - && this_first->free_list_max_size - == this_second->free_list_max_size - && this_first->free_list_cur_size - == this_second->free_list_cur_size - && this_first->high_water_mark == this_second->high_water_mark) { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = klass->equal(first, second); - else - ret = TRUE; - } - - MIX_LOCK(MIX_SURFACEPOOL(first)->objectlock); - MIX_LOCK(MIX_SURFACEPOOL(second)->objectlock); - - } - - return ret; -} - -/* Class Methods */ - -/** - * mix_surfacepool_initialize: - * @returns: MIX_RESULT_SUCCESS if successful in creating the surface pool - * - * Use this method to create a new surface pool, consisting of a GSList of - * frame objects that represents a pool of surfaces. - */ -MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, - VASurfaceID *surfaces, guint num_surfaces, VADisplay va_display) { - - LOG_V( "Begin\n"); - - if (obj == NULL || surfaces == NULL) { - - LOG_E( - "Error NULL ptrs, obj %x, surfaces %x\n", (guint) obj, - (guint) surfaces); - - return MIX_RESULT_NULL_PTR; - } - - MIX_LOCK(obj->objectlock); - - if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { - //surface pool is in use; return error; need proper cleanup - //TODO need cleanup here? - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_ALREADY_INIT; - } - - if (num_surfaces == 0) { - obj->free_list = NULL; - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_surfaces; - - obj->free_list_cur_size = num_surfaces; - - obj->high_water_mark = 0; - - /* assume it is initialized */ - obj->initialized = TRUE; - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_SUCCESS; - } - - // Initialize the free pool with frame objects - - gint i = 0; - MixVideoFrame *frame = NULL; - - for (; i < num_surfaces; i++) { - - //Create a frame object for each surface ID - frame = mix_videoframe_new(); - - if (frame == NULL) { - //TODO need to log an error here and do cleanup - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_NO_MEMORY; - } - - // Set the frame ID to the surface ID - mix_videoframe_set_frame_id(frame, surfaces[i]); - // Set the ci frame index to the surface ID - mix_videoframe_set_ci_frame_idx (frame, i); - // Leave timestamp for each frame object as zero - // Set the pool reference in the private data of the frame object - mix_videoframe_set_pool(frame, obj); - - mix_videoframe_set_vadisplay(frame, va_display); - - //Add each frame object to the pool list - obj->free_list = g_slist_append(obj->free_list, frame); - - } - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_surfaces; - - obj->free_list_cur_size = num_surfaces; - - obj->high_water_mark = 0; - - obj->initialized = TRUE; - - MIX_UNLOCK(obj->objectlock); - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -/** - * mix_surfacepool_put: - * @returns: SUCCESS or FAILURE - * - * Use this method to return a surface to the free pool - */ -MIX_RESULT mix_surfacepool_put(MixSurfacePool * obj, MixVideoFrame * frame) { - - LOG_V( "Begin\n"); - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; - - LOG_V( "Frame id: %d\n", frame->frame_id); - MIX_LOCK(obj->objectlock); - - if (obj->in_use_list == NULL) { - //in use list cannot be empty if a frame is in use - //TODO need better error code for this - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_FAIL; - } - - GSList *element = g_slist_find(obj->in_use_list, frame); - if (element == NULL) { - //Integrity error; frame not found in in use list - //TODO need better error code and handling for this - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_FAIL; - } else { - //Remove this element from the in_use_list - obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); - - //Concat the element to the free_list and reset the timestamp of the frame - //Note that the surface ID stays valid - mix_videoframe_set_timestamp(frame, 0); - obj->free_list = g_slist_concat(obj->free_list, element); - - //increment the free list count - obj->free_list_cur_size++; - } - - //Note that we do nothing with the ref count for this. We want it to - //stay at 1, which is what triggered it to be added back to the free list. - - MIX_UNLOCK(obj->objectlock); - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - -/** - * mix_surfacepool_get: - * @returns: SUCCESS or FAILURE - * - * Use this method to get a surface from the free pool - */ -MIX_RESULT mix_surfacepool_get(MixSurfacePool * obj, MixVideoFrame ** frame) { - - LOG_V( "Begin\n"); - - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - -#if 0 - if (obj->free_list == NULL) { -#else - if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug -#endif - //We are out of surfaces - //TODO need to log this as well - - MIX_UNLOCK(obj->objectlock); - - LOG_E( "out of surfaces\n"); - - return MIX_RESULT_OUTOFSURFACES; - } - - //Remove a frame from the free pool - - //We just remove the one at the head, since it's convenient - GSList *element = obj->free_list; - obj->free_list = g_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this - - MIX_UNLOCK(obj->objectlock); - - LOG_E( "Element is null\n"); - - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = g_slist_concat(obj->in_use_list, element); - - //TODO replace with proper logging - - LOG_I( "frame refcount%d\n", - MIX_PARAMS(element->data)->refcount); - - //Set the out frame pointer - *frame = (MixVideoFrame *) element->data; - - LOG_V( "Frame id: %d\n", (*frame)->frame_id); - - //decrement the free list count - obj->free_list_cur_size--; - - //Check the high water mark for surface use - guint size = g_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } - - //Increment the reference count for the frame - mix_videoframe_ref(*frame); - - MIX_UNLOCK(obj->objectlock); - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - - -gint mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b) -{ - if (a == NULL || b == NULL) - return -1; - if (a->ci_frame_idx == b->ci_frame_idx) - return 0; - else - return -1; -} - -/** - * mix_surfacepool_get: - * @returns: SUCCESS or FAILURE - * - * Use this method to get a surface from the free pool according to the CI frame idx - */ - -MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, MixVideoFrame ** frame, MixVideoFrame *in_frame) { - - LOG_V( "Begin\n"); - - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - - if (obj->free_list == NULL) { - //We are out of surfaces - //TODO need to log this as well - - MIX_UNLOCK(obj->objectlock); - - LOG_E( "out of surfaces\n"); - - return MIX_RESULT_OUTOFSURFACES; - } - - //Remove a frame from the free pool - - //We just remove the one at the head, since it's convenient - GSList *element = g_slist_find_custom (obj->free_list, in_frame, (GCompareFunc) mixframe_compare_index); - obj->free_list = g_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this - - MIX_UNLOCK(obj->objectlock); - - LOG_E( "Element is null\n"); - - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = g_slist_concat(obj->in_use_list, element); - - //TODO replace with proper logging - - LOG_I( "frame refcount%d\n", - MIX_PARAMS(element->data)->refcount); - - //Set the out frame pointer - *frame = (MixVideoFrame *) element->data; - - //Check the high water mark for surface use - guint size = g_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } - - //Increment the reference count for the frame - mix_videoframe_ref(*frame); - - MIX_UNLOCK(obj->objectlock); - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} -/** - * mix_surfacepool_check_available: - * @returns: SUCCESS or FAILURE - * - * Use this method to check availability of getting a surface from the free pool - */ -MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) { - - LOG_V( "Begin\n"); - - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - - if (obj->initialized == FALSE) - { - LOG_W("surface pool is not initialized, probably configuration data has not been received yet.\n"); - MIX_UNLOCK(obj->objectlock); - return MIX_RESULT_NOT_INIT; - } - - -#if 0 - if (obj->free_list == NULL) { -#else - if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug -#endif - //We are out of surfaces - - MIX_UNLOCK(obj->objectlock); - - LOG_W( - "Returning MIX_RESULT_POOLEMPTY because out of surfaces\n"); - - return MIX_RESULT_POOLEMPTY; - } else { - //Pool is not empty - - MIX_UNLOCK(obj->objectlock); - - LOG_I( - "Returning MIX_RESULT_SUCCESS because surfaces are available\n"); - - return MIX_RESULT_SUCCESS; - } - -} - -/** - * mix_surfacepool_deinitialize: - * @returns: SUCCESS or FAILURE - * - * Use this method to teardown a surface pool - */ -MIX_RESULT mix_surfacepool_deinitialize(MixSurfacePool * obj) { - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - MIX_LOCK(obj->objectlock); - - if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) - != obj->free_list_max_size)) { - //TODO better error code - //We have outstanding frame objects in use and they need to be - //freed before we can deinitialize. - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_FAIL; - } - - //Now remove frame objects from the list - - MixVideoFrame *frame = NULL; - - while (obj->free_list != NULL) { - //Get the frame object from the head of the list - frame = obj->free_list->data; - //frame = g_slist_nth_data(obj->free_list, 0); - - //Release it - mix_videoframe_unref(frame); - - //Delete the head node of the list and store the new head - obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); - - //Repeat until empty - } - - obj->free_list_max_size = 0; - obj->free_list_cur_size = 0; - - //May want to log this information for tuning - obj->high_water_mark = 0; - - MIX_UNLOCK(obj->objectlock); - - return MIX_RESULT_SUCCESS; -} - -#define MIX_SURFACEPOOL_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_SURFACEPOOL_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT -mix_surfacepool_dumpframe(MixVideoFrame *frame) -{ - LOG_I( "\tFrame %x, id %lu, refcount %d, ts %lu\n", (guint)frame, - frame->frame_id, MIX_PARAMS(frame)->refcount, (gulong) frame->timestamp); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -mix_surfacepool_dumpprint (MixSurfacePool * obj) -{ - //TODO replace this with proper logging later - - LOG_I( "SURFACE POOL DUMP:\n"); - LOG_I( "Free list size is %d\n", obj->free_list_cur_size); - LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); - LOG_I( "High water mark is %lu\n", obj->high_water_mark); - - //Walk the free list and report the contents - LOG_I( "Free list contents:\n"); - g_slist_foreach(obj->free_list, (GFunc) mix_surfacepool_dumpframe, NULL); - - //Walk the in_use list and report the contents - LOG_I( "In Use list contents:\n"); - g_slist_foreach(obj->in_use_list, (GFunc) mix_surfacepool_dumpframe, NULL); - - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixsurfacepool.cpp b/mix_video/src/mixsurfacepool.cpp new file mode 100644 index 0000000..746a998 --- /dev/null +++ b/mix_video/src/mixsurfacepool.cpp @@ -0,0 +1,564 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixsurfacepool + * @short_description: MI-X Video Surface Pool + * + * A data object which stores and manipulates a pool of video surfaces. + */ + +#include "mixvideolog.h" +#include "mixsurfacepool.h" +#include "mixvideoframe_private.h" + +#define MIX_LOCK(lock) g_mutex_lock(lock); +#define MIX_UNLOCK(lock) g_mutex_unlock(lock); + + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +MixSurfacePool::MixSurfacePool() + /* initialize properties here */ + :free_list(NULL) + ,in_use_list(NULL) + ,free_list_max_size(0) + ,free_list_cur_size(0) + ,high_water_mark(0) + ,initialized(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,mLock() { +} + +MixSurfacePool::~MixSurfacePool() { +} + +MixParams* MixSurfacePool::dup() const { + MixParams *ret = NULL; + mLock.lock(); + ret = new MixSurfacePool(); + if(NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + mLock.unlock(); + return ret; +} + +gboolean MixSurfacePool::copy(MixParams* target) const { + if(NULL == target) return FALSE; + MixSurfacePool* this_target = MIX_SURFACEPOOL(target); + + mLock.lock(); + this_target->mLock.lock(); + // Free the existing properties + // Duplicate string + this_target->free_list = free_list; + this_target->in_use_list = in_use_list; + this_target->free_list_max_size = free_list_max_size; + this_target->free_list_cur_size = free_list_cur_size; + this_target->high_water_mark = high_water_mark; + + this_target->mLock.unlock(); + mLock.unlock(); + + MixParams::copy(target); + return TRUE; +} + +gboolean MixSurfacePool::equal(MixParams *first) const { + if(NULL == first) return FALSE; + gboolean ret = FALSE; + MixSurfacePool *this_first = MIX_SURFACEPOOL(first); + mLock.lock(); + this_first->mLock.lock(); + if (this_first->free_list == free_list + && this_first->in_use_list == in_use_list + && this_first->free_list_max_size + == free_list_max_size + && this_first->free_list_cur_size + == free_list_cur_size + && this_first->high_water_mark == high_water_mark) { + ret = MixParams::equal(first); + } + this_first->mLock.unlock(); + mLock.unlock(); + return ret; +} + +MixSurfacePool * +mix_surfacepool_new(void) { + return new MixSurfacePool(); +} + +MixSurfacePool * +mix_surfacepool_ref(MixSurfacePool * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +/* Class Methods */ + +/** + * mix_surfacepool_initialize: + * @returns: MIX_RESULT_SUCCESS if successful in creating the surface pool + * + * Use this method to create a new surface pool, consisting of a GSList of + * frame objects that represents a pool of surfaces. + */ +MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, + VASurfaceID *surfaces, guint num_surfaces, VADisplay va_display) { + + LOG_V( "Begin\n"); + + if (obj == NULL || surfaces == NULL) { + + LOG_E( + "Error NULL ptrs, obj %x, surfaces %x\n", (guint) obj, + (guint) surfaces); + + return MIX_RESULT_NULL_PTR; + } + + obj->mLock.lock(); + + if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { + //surface pool is in use; return error; need proper cleanup + //TODO need cleanup here? + + obj->mLock.unlock(); + + return MIX_RESULT_ALREADY_INIT; + } + + if (num_surfaces == 0) { + obj->free_list = NULL; + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_surfaces; + + obj->free_list_cur_size = num_surfaces; + + obj->high_water_mark = 0; + + /* assume it is initialized */ + obj->initialized = TRUE; + + obj->mLock.unlock(); + + return MIX_RESULT_SUCCESS; + } + + // Initialize the free pool with frame objects + + guint i = 0; + MixVideoFrame *frame = NULL; + + for (; i < num_surfaces; i++) { + + //Create a frame object for each surface ID + frame = mix_videoframe_new(); + + if (frame == NULL) { + //TODO need to log an error here and do cleanup + + obj->mLock.unlock(); + + return MIX_RESULT_NO_MEMORY; + } + + // Set the frame ID to the surface ID + mix_videoframe_set_frame_id(frame, surfaces[i]); + // Set the ci frame index to the surface ID + mix_videoframe_set_ci_frame_idx (frame, i); + // Leave timestamp for each frame object as zero + // Set the pool reference in the private data of the frame object + mix_videoframe_set_pool(frame, obj); + + mix_videoframe_set_vadisplay(frame, va_display); + + //Add each frame object to the pool list + obj->free_list = g_slist_append(obj->free_list, frame); + + } + + obj->in_use_list = NULL; + + obj->free_list_max_size = num_surfaces; + + obj->free_list_cur_size = num_surfaces; + + obj->high_water_mark = 0; + + obj->initialized = TRUE; + + obj->mLock.unlock(); + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +/** + * mix_surfacepool_put: + * @returns: SUCCESS or FAILURE + * + * Use this method to return a surface to the free pool + */ +MIX_RESULT mix_surfacepool_put(MixSurfacePool * obj, MixVideoFrame * frame) { + + LOG_V( "Begin\n"); + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; + + LOG_V( "Frame id: %d\n", frame->frame_id); + obj->mLock.lock(); + + if (obj->in_use_list == NULL) { + //in use list cannot be empty if a frame is in use + //TODO need better error code for this + + obj->mLock.unlock(); + + return MIX_RESULT_FAIL; + } + + GSList *element = g_slist_find(obj->in_use_list, frame); + if (element == NULL) { + //Integrity error; frame not found in in use list + //TODO need better error code and handling for this + + obj->mLock.unlock(); + + return MIX_RESULT_FAIL; + } else { + //Remove this element from the in_use_list + obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); + + //Concat the element to the free_list and reset the timestamp of the frame + //Note that the surface ID stays valid + mix_videoframe_set_timestamp(frame, 0); + obj->free_list = g_slist_concat(obj->free_list, element); + + //increment the free list count + obj->free_list_cur_size++; + } + + //Note that we do nothing with the ref count for this. We want it to + //stay at 1, which is what triggered it to be added back to the free list. + + obj->mLock.unlock(); + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + +/** + * mix_surfacepool_get: + * @returns: SUCCESS or FAILURE + * + * Use this method to get a surface from the free pool + */ +MIX_RESULT mix_surfacepool_get(MixSurfacePool * obj, MixVideoFrame ** frame) { + + LOG_V( "Begin\n"); + + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; + + obj->mLock.lock(); + +#if 0 + if (obj->free_list == NULL) { +#else + if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug +#endif + //We are out of surfaces + //TODO need to log this as well + + obj->mLock.unlock(); + + LOG_E( "out of surfaces\n"); + + return MIX_RESULT_OUTOFSURFACES; + } + + //Remove a frame from the free pool + + //We just remove the one at the head, since it's convenient + GSList *element = obj->free_list; + obj->free_list = g_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this + + obj->mLock.unlock(); + + LOG_E( "Element is null\n"); + + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = g_slist_concat(obj->in_use_list, element); + + //TODO replace with proper logging + + LOG_I( "frame refcount%d\n", + MIX_PARAMS(element->data)->ref_count); + + //Set the out frame pointer + *frame = (MixVideoFrame *) element->data; + + LOG_V( "Frame id: %d\n", (*frame)->frame_id); + + //decrement the free list count + obj->free_list_cur_size--; + + //Check the high water mark for surface use + guint size = g_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } + + //Increment the reference count for the frame + mix_videoframe_ref(*frame); + + obj->mLock.unlock(); + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + + +gint mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b) +{ + if (a == NULL || b == NULL) + return -1; + if (a->ci_frame_idx == b->ci_frame_idx) + return 0; + else + return -1; +} + +/** + * mix_surfacepool_get: + * @returns: SUCCESS or FAILURE + * + * Use this method to get a surface from the free pool according to the CI frame idx + */ + +MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, MixVideoFrame ** frame, MixVideoFrame *in_frame) { + + LOG_V( "Begin\n"); + + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; + + obj->mLock.lock(); + + if (obj->free_list == NULL) { + //We are out of surfaces + //TODO need to log this as well + + obj->mLock.unlock(); + + LOG_E( "out of surfaces\n"); + + return MIX_RESULT_OUTOFSURFACES; + } + + //Remove a frame from the free pool + + //We just remove the one at the head, since it's convenient + GSList *element = g_slist_find_custom (obj->free_list, in_frame, (GCompareFunc) mixframe_compare_index); + obj->free_list = g_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this + + obj->mLock.unlock(); + + LOG_E( "Element is null\n"); + + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = g_slist_concat(obj->in_use_list, element); + + //TODO replace with proper logging + + LOG_I( "frame refcount%d\n", + MIX_PARAMS(element->data)->ref_count); + + //Set the out frame pointer + *frame = (MixVideoFrame *) element->data; + + //Check the high water mark for surface use + guint size = g_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } + + //Increment the reference count for the frame + mix_videoframe_ref(*frame); + + obj->mLock.unlock(); + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} +/** + * mix_surfacepool_check_available: + * @returns: SUCCESS or FAILURE + * + * Use this method to check availability of getting a surface from the free pool + */ +MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) { + + LOG_V( "Begin\n"); + + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + obj->mLock.lock(); + + if (obj->initialized == FALSE) + { + LOG_W("surface pool is not initialized, probably configuration data has not been received yet.\n"); + obj->mLock.unlock(); + return MIX_RESULT_NOT_INIT; + } + + +#if 0 + if (obj->free_list == NULL) { +#else + if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug +#endif + //We are out of surfaces + + obj->mLock.unlock(); + + LOG_W( + "Returning MIX_RESULT_POOLEMPTY because out of surfaces\n"); + + return MIX_RESULT_POOLEMPTY; + } else { + //Pool is not empty + + obj->mLock.unlock(); + + LOG_I( + "Returning MIX_RESULT_SUCCESS because surfaces are available\n"); + + return MIX_RESULT_SUCCESS; + } + +} + +/** + * mix_surfacepool_deinitialize: + * @returns: SUCCESS or FAILURE + * + * Use this method to teardown a surface pool + */ +MIX_RESULT mix_surfacepool_deinitialize(MixSurfacePool * obj) { + if (obj == NULL) + return MIX_RESULT_NULL_PTR; + + obj->mLock.lock(); + + if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) + != obj->free_list_max_size)) { + //TODO better error code + //We have outstanding frame objects in use and they need to be + //freed before we can deinitialize. + + obj->mLock.unlock(); + + return MIX_RESULT_FAIL; + } + + //Now remove frame objects from the list + + MixVideoFrame *frame = NULL; + + while (obj->free_list != NULL) { + //Get the frame object from the head of the list + frame = reinterpret_cast(obj->free_list->data); + //frame = g_slist_nth_data(obj->free_list, 0); + + //Release it + mix_videoframe_unref(frame); + + //Delete the head node of the list and store the new head + obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); + + //Repeat until empty + } + + obj->free_list_max_size = 0; + obj->free_list_cur_size = 0; + + //May want to log this information for tuning + obj->high_water_mark = 0; + + obj->mLock.unlock(); + + return MIX_RESULT_SUCCESS; +} + +#define MIX_SURFACEPOOL_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_SURFACEPOOL_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT +mix_surfacepool_dumpframe(MixVideoFrame *frame) +{ + LOG_I( "\tFrame %x, id %lu, refcount %d, ts %lu\n", (guint)frame, + frame->frame_id, MIX_PARAMS(frame)->ref_count, (gulong) frame->timestamp); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT +mix_surfacepool_dumpprint (MixSurfacePool * obj) +{ + //TODO replace this with proper logging later + + LOG_I( "SURFACE POOL DUMP:\n"); + LOG_I( "Free list size is %d\n", obj->free_list_cur_size); + LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); + LOG_I( "High water mark is %lu\n", obj->high_water_mark); + + //Walk the free list and report the contents + LOG_I( "Free list contents:\n"); + g_slist_foreach(obj->free_list, (GFunc) mix_surfacepool_dumpframe, NULL); + + //Walk the in_use list and report the contents + LOG_I( "In Use list contents:\n"); + g_slist_foreach(obj->in_use_list, (GFunc) mix_surfacepool_dumpframe, NULL); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h index 0639fbc..3ad099d 100644 --- a/mix_video/src/mixsurfacepool.h +++ b/mix_video/src/mixsurfacepool.h @@ -12,67 +12,23 @@ No license under any patent, copyright, trade secret or other intellectual prope #include #include "mixvideodef.h" #include "mixvideoframe.h" - +#include "mixvideothread.h" #include -G_BEGIN_DECLS - -/** -* MIX_TYPE_SURFACEPOOL: -* -* Get type of class. -*/ -#define MIX_TYPE_SURFACEPOOL (mix_surfacepool_get_type ()) - /** * MIX_SURFACEPOOL: * @obj: object to be type-casted. */ -#define MIX_SURFACEPOOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_SURFACEPOOL, MixSurfacePool)) - -/** -* MIX_IS_SURFACEPOOL: -* @obj: an object. -* -* Checks if the given object is an instance of #MixSurfacePool -*/ -#define MIX_IS_SURFACEPOOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_SURFACEPOOL)) - -/** -* MIX_SURFACEPOOL_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_SURFACEPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_SURFACEPOOL, MixSurfacePoolClass)) - -/** -* MIX_IS_SURFACEPOOL_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixSurfacePoolClass -*/ -#define MIX_IS_SURFACEPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_SURFACEPOOL)) - -/** -* MIX_SURFACEPOOL_GET_CLASS: -* @obj: a #MixSurfacePool object. -* -* Get the class instance of the object. -*/ -#define MIX_SURFACEPOOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_SURFACEPOOL, MixSurfacePoolClass)) - -typedef struct _MixSurfacePool MixSurfacePool; -typedef struct _MixSurfacePoolClass MixSurfacePoolClass; +#define MIX_SURFACEPOOL(obj) (reinterpret_cast(obj)) /** * MixSurfacePool: * * MI-X Video Surface Pool object */ -struct _MixSurfacePool +class MixSurfacePool : public MixParams { - /*< public > */ - MixParams parent; - +public: /*< public > */ GSList *free_list; /* list of free surfaces */ GSList *in_use_list; /* list of surfaces in use */ @@ -88,31 +44,15 @@ struct _MixSurfacePool void *reserved4; /*< private > */ - GMutex *objectlock; - + mutable MixVideoMutex mLock; +public: + MixSurfacePool(); + virtual ~MixSurfacePool(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; }; -/** -* MixSurfacePoolClass: -* -* MI-X Video Surface Pool object class -*/ -struct _MixSurfacePoolClass -{ - /*< public > */ - MixParamsClass parent_class; - - /* class members */ -}; - -/** -* mix_surfacepool_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_surfacepool_get_type (void); - /** * mix_surfacepool_new: * @returns: A newly allocated instance of #MixSurfacePool @@ -154,6 +94,4 @@ MIX_RESULT mix_surfacepool_check_available (MixSurfacePool * obj); MIX_RESULT mix_surfacepool_deinitialize (MixSurfacePool * obj); -G_END_DECLS - #endif /* __MIX_SURFACEPOOL_H__ */ diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c deleted file mode 100644 index 56bd264..0000000 --- a/mix_video/src/mixvideo.c +++ /dev/null @@ -1,2193 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideo - * @short_description: Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. - * @include: mixvideo.h - * - * #MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video. - * - * The MixVideo object handles any of the video formats internally. - * The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/ - * MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure() - * call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and - * MixVideoRenderParams objects will be passed in the mix_video_initialize(), - * mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively. - * - * The application can take the following steps to decode video: - * - * Create a mix_video object using mix_video_new() - * Initialize the object using mix_video_initialize() - * Configure the stream using mix_video_configure() - * Decode frames using mix_video_decode() - * Retrieve the decoded frames using mix_video_get_frame(). The decoded frames can be retrieved in decode order or display order. - * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). The frame can be retained for redrawing until the next frame is retrieved. - * When the frame is no longer needed for redrawing, release the frame using mix_video_release_frame(). - * - * - * For encoding, the application can take the following steps to encode video: - * - * Create a mix_video object using mix_video_new() - * Initialize the object using mix_video_initialize() - * Configure the stream using mix_video_configure() - * Encode frames using mix_video_encode() - * Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file. - * Retrieve the uncompressed frames for display using mix_video_get_frame(). - * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). For encode, the frame should not be retained for redrawing after the initial rendering, due to resource limitations. - * Release the frame using mix_video_release_frame(). - * - * - */ - -#include /* libVA */ - -#ifndef ANDROID -#include -#include -#else -#define Display unsigned int -//#include "mix_vagetdisplay.h" - -VADisplay vaGetDisplay ( - void *android_dpy -); - - -#endif - -#include "mixvideolog.h" - -#ifndef ANDROID -#include "mixdisplayx11.h" -#else -#include "mixdisplayandroid.h" -#endif -#include "mixvideoframe.h" - -#include "mixframemanager.h" -#include "mixvideorenderparams.h" -#include "mixvideorenderparams_internal.h" - -#include "mixvideoformat.h" -#include "mixvideoformat_vc1.h" -#include "mixvideoformat_h264.h" -#include "mixvideoformat_mp42.h" - -#include "mixvideoconfigparamsdec_vc1.h" -#include "mixvideoconfigparamsdec_h264.h" -#include "mixvideoconfigparamsdec_mp42.h" - -#include "mixvideoformatenc.h" -#include "mixvideoformatenc_h264.h" -#include "mixvideoformatenc_mpeg4.h" -#include "mixvideoformatenc_preview.h" -#include "mixvideoformatenc_h263.h" - -#include "mixvideoconfigparamsenc_h264.h" -#include "mixvideoconfigparamsenc_mpeg4.h" -#include "mixvideoconfigparamsenc_preview.h" -#include "mixvideoconfigparamsenc_h263.h" - -#include "mixvideo.h" -#include "mixvideo_private.h" - -#ifdef ANDROID -#define mix_strcmp strcmp -#else -#define mix_strcmp g_strcmp0 -#endif - -#define USE_OPAQUE_POINTER - -#ifdef USE_OPAQUE_POINTER -#define MIX_VIDEO_PRIVATE(mix) (MixVideoPrivate *)(mix->context) -#else -#define MIX_VIDEO_PRIVATE(mix) MIX_VIDEO_GET_PRIVATE(mix) -#endif - -#define CHECK_INIT(mix, priv) \ - if (!mix) { \ - return MIX_RESULT_NULL_PTR; \ - } \ - if (!MIX_IS_VIDEO(mix)) { \ - LOG_E( "Not MixVideo\n"); \ - return MIX_RESULT_INVALID_PARAM; \ - } \ - priv = MIX_VIDEO_PRIVATE(mix); \ - if (!priv->initialized) { \ - LOG_E( "Not initialized\n"); \ - return MIX_RESULT_NOT_INIT; \ - } - -#define CHECK_INIT_CONFIG(mix, priv) \ - CHECK_INIT(mix, priv); \ - if (!priv->configured) { \ - LOG_E( "Not configured\n"); \ - return MIX_RESULT_NOT_CONFIGURED; \ - } - -/* - * default implementation of virtual methods - */ - -MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, - guint * minor); - -MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params); - -MIX_RESULT mix_video_deinitialize_default(MixVideo * mix); - -MIX_RESULT mix_video_configure_default(MixVideo * mix, - MixVideoConfigParams * config_params, MixDrmParams * drm_config_params); - -MIX_RESULT mix_video_get_config_default(MixVideo * mix, - MixVideoConfigParams ** config_params); - -MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); - -MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame); - -MIX_RESULT mix_video_release_frame_default(MixVideo * mix, - MixVideoFrame * frame); - -MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame); - -MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); - -MIX_RESULT mix_video_flush_default(MixVideo * mix); - -MIX_RESULT mix_video_eos_default(MixVideo * mix); - -MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state); - -MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf); - -MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf); - -MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size); - -MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); - -static void mix_video_finalize(GObject * obj); -MIX_RESULT mix_video_configure_decode(MixVideo * mix, - MixVideoConfigParamsDec * config_params_dec, - MixDrmParams * drm_config_params); - -MIX_RESULT mix_video_configure_encode(MixVideo * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixDrmParams * drm_config_params); - -G_DEFINE_TYPE( MixVideo, mix_video, G_TYPE_OBJECT); - -static void mix_video_init(MixVideo * self) { - - MixVideoPrivate *priv = MIX_VIDEO_GET_PRIVATE(self); - -#ifdef USE_OPAQUE_POINTER - self->context = priv; -#else - self->context = NULL; -#endif - - /* private structure initialization */ - - mix_video_private_initialize(priv); -} - -static void mix_video_class_init(MixVideoClass * klass) { - GObjectClass *gobject_class = (GObjectClass *) klass; - - gobject_class->finalize = mix_video_finalize; - - /* Register and allocate the space the private structure for this object */ - g_type_class_add_private(gobject_class, sizeof(MixVideoPrivate)); - - klass->get_version_func = mix_video_get_version_default; - klass->initialize_func = mix_video_initialize_default; - klass->deinitialize_func = mix_video_deinitialize_default; - klass->configure_func = mix_video_configure_default; - klass->get_config_func = mix_video_get_config_default; - klass->decode_func = mix_video_decode_default; - klass->get_frame_func = mix_video_get_frame_default; - klass->release_frame_func = mix_video_release_frame_default; - klass->render_func = mix_video_render_default; - klass->encode_func = mix_video_encode_default; - klass->flush_func = mix_video_flush_default; - klass->eos_func = mix_video_eos_default; - klass->get_state_func = mix_video_get_state_default; - klass->get_mix_buffer_func = mix_video_get_mixbuffer_default; - klass->release_mix_buffer_func = mix_video_release_mixbuffer_default; - klass->get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default; - klass->set_dynamic_enc_config_func = mix_video_set_dynamic_enc_config_default; -} - -MixVideo *mix_video_new(void) { - - MixVideo *ret = (MixVideo *)g_object_new(MIX_TYPE_VIDEO, NULL); - - return ret; -} - -void mix_video_finalize(GObject * obj) { - - /* clean up here. */ - - MixVideo *mix = MIX_VIDEO(obj); - mix_video_deinitialize(mix); -} - -MixVideo * -mix_video_ref(MixVideo * mix) { - return (MixVideo *) g_object_ref(G_OBJECT(mix)); -} - -/* private methods */ - -#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } - -void mix_video_private_initialize(MixVideoPrivate* priv) { - priv->objlock = NULL; - priv->initialized = FALSE; - priv->configured = FALSE; - - /* libVA */ - priv->va_display = NULL; - priv->va_major_version = -1; - priv->va_major_version = -1; - - /* mix objects */ - priv->frame_manager = NULL; - priv->video_format = NULL; - priv->video_format_enc = NULL; //for encoding - priv->surface_pool = NULL; - priv->buffer_pool = NULL; - - priv->codec_mode = MIX_CODEC_MODE_DECODE; - priv->init_params = NULL; - priv->drm_params = NULL; - priv->config_params = NULL; -} - -void mix_video_private_cleanup(MixVideoPrivate* priv) { - - VAStatus va_status; - - if (!priv) { - return; - } - - if (priv->video_format_enc) { - mix_videofmtenc_deinitialize(priv->video_format_enc); - } - - MIXUNREF(priv->frame_manager, mix_framemanager_unref) - MIXUNREF(priv->video_format, mix_videoformat_unref) - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) - //for encoding - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref) - MIXUNREF(priv->surface_pool, mix_surfacepool_unref) -/* MIXUNREF(priv->init_params, mix_videoinitparams_unref) */ - MIXUNREF(priv->drm_params, mix_drmparams_unref) - MIXUNREF(priv->config_params, mix_videoconfigparams_unref) - - /* terminate libVA */ - if (priv->va_display) { - va_status = vaTerminate(priv->va_display); - LOG_V( "vaTerminate\n"); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaTerminate\n"); - } else { - priv->va_display = NULL; - } - } - - MIXUNREF(priv->init_params, mix_videoinitparams_unref) - - priv->va_major_version = -1; - priv->va_major_version = -1; - - if (priv->objlock) { - g_mutex_free(priv->objlock); - priv->objlock = NULL; - } - - priv->codec_mode = MIX_CODEC_MODE_DECODE; - priv->initialized = FALSE; - priv->configured = FALSE; -} - -/* The following methods are defined in MI-X API */ - -MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, - guint * minor) { - if (!mix || !major || !minor) { - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEO(mix)) { - return MIX_RESULT_INVALID_PARAM; - } - - *major = MIXVIDEO_CURRENT - MIXVIDEO_AGE; - *minor = MIXVIDEO_AGE; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixDisplay *mix_display = NULL; - - LOG_V( "Begin\n"); - - if (!mix || !init_params) { - LOG_E( "!mix || !init_params\n"); - return MIX_RESULT_NULL_PTR; - } - - if (mode >= MIX_CODEC_MODE_LAST) { - LOG_E("mode >= MIX_CODEC_MODE_LAST\n"); - return MIX_RESULT_INVALID_PARAM; - } - -#if 0 //we have encoding support - /* TODO: We need to support encoding in the future */ - if (mode == MIX_CODEC_MODE_ENCODE) { - LOG_E("mode == MIX_CODEC_MODE_ENCODE\n"); - return MIX_RESULT_NOTIMPL; - } -#endif - - if (!MIX_IS_VIDEO(mix)) { - LOG_E( "!MIX_IS_VIDEO(mix)\n"); - return MIX_RESULT_INVALID_PARAM; - } - - if (!MIX_IS_VIDEOINITPARAMS(init_params)) { - LOG_E("!MIX_IS_VIDEOINITPARAMS(init_params\n"); - return MIX_RESULT_INVALID_PARAM; - } - - priv = MIX_VIDEO_PRIVATE(mix); - - if (priv->initialized) { - LOG_W( "priv->initialized\n"); - return MIX_RESULT_ALREADY_INIT; - } - - /* - * Init thread before any threads/sync object are used. - * TODO: If thread is not supported, what we do? - */ - - if (!g_thread_supported()) { - LOG_W("!g_thread_supported()\n"); - g_thread_init(NULL); - } - - /* create object lock */ - priv->objlock = g_mutex_new(); - if (!priv->objlock) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "!priv->objlock\n"); - goto cleanup; - } - - /* clone mode */ - priv->codec_mode = mode; - - /* ref init_params */ - priv->init_params = (MixVideoInitParams *) mix_params_ref(MIX_PARAMS( - init_params)); - if (!priv->init_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "!priv->init_params\n"); - goto cleanup; - } - - /* NOTE: we don't do anything with drm_init_params */ - - /* libVA initialization */ - - { - VAStatus va_status; - Display *display = NULL; - ret = mix_videoinitparams_get_display(priv->init_params, &mix_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 1\n"); - goto cleanup; - } -#ifndef ANDROID - if (MIX_IS_DISPLAYX11(mix_display)) { - MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); - ret = mix_displayx11_get_display(mix_displayx11, &display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 2\n"); - goto cleanup; - - } - } else { - /* TODO: add support to other MixDisplay type. For now, just return error!*/ - LOG_E("It is not display x11\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } -#else - if (MIX_IS_DISPLAYANDROID(mix_display)) { - MixDisplayAndroid *mix_displayandroid = MIX_DISPLAYANDROID(mix_display); - ret = mix_displayandroid_get_display(mix_displayandroid, &display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 2\n"); - goto cleanup; - - } - } else { - /* TODO: add support to other MixDisplay type. For now, just return error!*/ - LOG_E("It is not display android\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } -#endif - /* Now, we can initialize libVA */ - - LOG_V("Try to get vaDisplay : display = %x\n", display); - priv->va_display = vaGetDisplay(display); - - /* Oops! Fail to get VADisplay */ - if (!priv->va_display) { - ret = MIX_RESULT_FAIL; - LOG_E("Fail to get VADisplay\n"); - goto cleanup; - } - - /* Initialize libVA */ - va_status = vaInitialize(priv->va_display, &priv->va_major_version, - &priv->va_minor_version); - - /* Oops! Fail to initialize libVA */ - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Fail to initialize libVA\n"); - goto cleanup; - } - - /* TODO: check the version numbers of libVA */ - - priv->initialized = TRUE; - ret = MIX_RESULT_SUCCESS; - } - - cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - mix_video_private_cleanup(priv); - } - - MIXUNREF(mix_display, mix_display_unref); - - LOG_V( "End\n"); - - return ret; -} - - -MIX_RESULT mix_video_deinitialize_default(MixVideo * mix) { - - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - - mix_video_private_cleanup(priv); - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_video_configure_decode(MixVideo * mix, - MixVideoConfigParamsDec * config_params_dec, MixDrmParams * drm_config_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixVideoConfigParamsDec *priv_config_params_dec = NULL; - - gchar *mime_type = NULL; - guint fps_n, fps_d; - guint bufpoolsize = 0; - - MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; - MixDisplayOrderMode display_order_mode = MIX_DISPLAY_ORDER_UNKNOWN; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - - if (!config_params_dec) { - LOG_E( "!config_params_dec\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOCONFIGPARAMSDEC(config_params_dec)) { - LOG_E("Not a MixVideoConfigParamsDec\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - /* - * MixVideo has already been configured, it should be - * re-configured. - * - * TODO: Allow MixVideo re-configuration - */ - if (priv->configured) { - ret = MIX_RESULT_SUCCESS; - LOG_W( "Already configured\n"); - goto cleanup; - } - - /* Make a copy of config_params */ - priv->config_params = (MixVideoConfigParams *) mix_params_dup(MIX_PARAMS( - config_params_dec)); - if (!priv->config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Fail to duplicate config_params\n"); - goto cleanup; - } - - priv_config_params_dec = (MixVideoConfigParamsDec *)priv->config_params; - - /* Get fps, frame order mode and mime type from config_params */ - ret = mix_videoconfigparamsdec_get_mime_type(priv_config_params_dec, &mime_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mime type\n"); - goto cleanup; - } - - LOG_I( "mime : %s\n", mime_type); - -#ifdef MIX_LOG_ENABLE - if (mix_strcmp(mime_type, "video/x-wmv") == 0) { - - LOG_I( "mime : video/x-wmv\n"); - if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { - LOG_I( "VC1 config_param\n"); - } else { - LOG_E("Not VC1 config_param\n"); - } - } -#endif - - ret = mix_videoconfigparamsdec_get_frame_order_mode(priv_config_params_dec, - &frame_order_mode); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to frame order mode\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_frame_rate(priv_config_params_dec, &fps_n, - &fps_d); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get frame rate\n"); - goto cleanup; - } - - if (!fps_n) { - ret = MIX_RESULT_FAIL; - LOG_E( "fps_n is 0\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_buffer_pool_size(priv_config_params_dec, - &bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get buffer pool size\n"); - goto cleanup; - } - - /* create frame manager */ - priv->frame_manager = mix_framemanager_new(); - if (!priv->frame_manager) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create frame manager\n"); - goto cleanup; - } - - if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER) - { - display_order_mode = MIX_DISPLAY_ORDER_FIFO; - } - else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || - mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0 ) - { - display_order_mode = MIX_DISPLAY_ORDER_PICTYPE; - } - else - { - //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP; - display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER; - } - - /* initialize frame manager */ - ret = mix_framemanager_initialize(priv->frame_manager, - display_order_mode, fps_n, fps_d); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize frame manager\n"); - goto cleanup; - } - - /* create buffer pool */ - priv->buffer_pool = mix_bufferpool_new(); - if (!priv->buffer_pool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create buffer pool\n"); - goto cleanup; - } - - ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize buffer pool\n"); - goto cleanup; - } - - /* Finally, we can create MixVideoFormat */ - /* What type of MixVideoFormat we need create? */ - - if (mix_strcmp(mime_type, "video/x-wmv") == 0 - && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { - - MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create VC-1 video format\n"); - goto cleanup; - } - - /* TODO: work specific to VC-1 */ - - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else if (mix_strcmp(mime_type, "video/x-h264") == 0 - && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { - - MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create H.264 video format\n"); - goto cleanup; - } - - /* TODO: work specific to H.264 */ - - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0 || - mix_strcmp(mime_type, "video/x-dx50") == 0) { - - guint version = 0; - - /* Is this mpeg4:2 ? */ - if (mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 ) { - - /* - * we don't support mpeg other than mpeg verion 4 - */ - if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - /* what is the mpeg version ? */ - ret = mix_videoconfigparamsdec_mp42_get_mpegversion( - MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mpeg version\n"); - goto cleanup; - } - - /* if it is not MPEG4 */ - if (version != 4) { - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - } else { - - /* config_param shall be MixVideoConfigParamsDecMP42 */ - if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { - LOG_E("MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 failed.\n"); - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - /* what is the divx version ? */ - ret = mix_videoconfigparamsdec_mp42_get_divxversion( - MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get divx version\n"); - goto cleanup; - } - - /* if it is not divx 4 or 5 */ - if (version != 4 && version != 5) { - LOG_E("Invalid divx version.\n"); - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - } - - MixVideoFormat_MP42 *video_format = mix_videoformat_mp42_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create MPEG-4:2 video format\n"); - goto cleanup; - } - - /* TODO: work specific to MPEG-4:2 */ - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else { - - /* Oops! A format we don't know */ - - ret = MIX_RESULT_FAIL; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } - - /* initialize MixVideoFormat */ - ret = mix_videofmt_initialize(priv->video_format, priv_config_params_dec, - priv->frame_manager, priv->buffer_pool, &priv->surface_pool, - priv->va_display); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } - - mix_surfacepool_ref(priv->surface_pool); - - /* decide MixVideoFormat from mime_type*/ - - priv->configured = TRUE; - ret = MIX_RESULT_SUCCESS; - - cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - MIXUNREF(priv->config_params, mix_videoconfigparams_unref); - MIXUNREF(priv->frame_manager, mix_framemanager_unref); - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); - MIXUNREF(priv->video_format, mix_videoformat_unref); - } - - if (mime_type) { - g_free(mime_type); - } - - g_mutex_unlock(priv->objlock); - /* ---------------------- end lock --------------------- */ - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_configure_encode(MixVideo * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixDrmParams * drm_config_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixVideoConfigParamsEnc *priv_config_params_enc = NULL; - - - gchar *mime_type = NULL; - MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; - guint bufpoolsize = 0; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - - if (!config_params_enc) { - LOG_E("!config_params_enc\n"); - return MIX_RESULT_NULL_PTR; - } - if (!MIX_IS_VIDEOCONFIGPARAMSENC(config_params_enc)) { - LOG_E("Not a MixVideoConfigParams\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - /* - * MixVideo has already been configured, it should be - * re-configured. - * - * TODO: Allow MixVideo re-configuration - */ - if (priv->configured) { - ret = MIX_RESULT_SUCCESS; - LOG_E( "Already configured\n"); - goto cleanup; - } - - /* Make a copy of config_params */ - priv->config_params = (MixVideoConfigParams *) mix_params_dup( - MIX_PARAMS(config_params_enc)); - if (!priv->config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Fail to duplicate config_params\n"); - goto cleanup; - } - - priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; - - /* Get fps, frame order mode and mime type from config_params */ - ret = mix_videoconfigparamsenc_get_mime_type(priv_config_params_enc, - &mime_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mime type\n"); - goto cleanup; - } - - LOG_I( "mime : %s\n", mime_type); - - ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, - &encode_format); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get target format\n"); - goto cleanup; - } - - LOG_I( "encode_format : %d\n", - encode_format); - - ret = mix_videoconfigparamsenc_get_buffer_pool_size( - priv_config_params_enc, &bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get buffer pool size\n"); - goto cleanup; - } - - /* create frame manager */ - priv->frame_manager = mix_framemanager_new(); - if (!priv->frame_manager) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create frame manager\n"); - goto cleanup; - } - - /* initialize frame manager */ - /* frame rate can be any value for encoding. */ - ret = mix_framemanager_initialize(priv->frame_manager, MIX_DISPLAY_ORDER_FIFO, - 1, 1); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize frame manager\n"); - goto cleanup; - } - - /* create buffer pool */ - priv->buffer_pool = mix_bufferpool_new(); - if (!priv->buffer_pool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create buffer pool\n"); - goto cleanup; - } - - ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize buffer pool\n"); - goto cleanup; - } - - /* Finally, we can create MixVideoFormatEnc */ - /* What type of MixVideoFormatEnc we need create? */ - - if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 - && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { - - MixVideoFormatEnc_H264 *video_format_enc = - mix_videoformatenc_h264_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); - goto cleanup; - } - - /* work specific to h264 encode */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { - - MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); - goto cleanup; - } - - /* work specific to mpeg4 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 - && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { - - MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); - goto cleanup; - } - - /* work specific to h.263 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { - - MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); - goto cleanup; - } - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else { - - /*unsupported format */ - ret = MIX_RESULT_NOT_SUPPORTED; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } - - /* initialize MixVideoEncFormat */ - ret = mix_videofmtenc_initialize(priv->video_format_enc, - priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, - priv->va_display); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } - - mix_surfacepool_ref(priv->surface_pool); - - priv->configured = TRUE; - ret = MIX_RESULT_SUCCESS; - - cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - MIXUNREF(priv->frame_manager, mix_framemanager_unref); - MIXUNREF(priv->config_params, mix_videoconfigparams_unref); - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref); - } - - if (mime_type) { - g_free(mime_type); - } - - g_mutex_unlock(priv->objlock); - /* ---------------------- end lock --------------------- */ - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_configure_default(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - if(!config_params) { - LOG_E("!config_params\n"); - return MIX_RESULT_NULL_PTR; - } - - /*Decoder mode or Encoder mode*/ - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && MIX_IS_VIDEOCONFIGPARAMSDEC(config_params)) { - ret = mix_video_configure_decode(mix, (MixVideoConfigParamsDec*)config_params, NULL); - } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && MIX_IS_VIDEOCONFIGPARAMSENC(config_params)) { - ret = mix_video_configure_encode(mix, (MixVideoConfigParamsEnc*)config_params, NULL); - } else { - LOG_E("Codec mode not supported\n"); - } - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_video_get_config_default(MixVideo * mix, - MixVideoConfigParams ** config_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoPrivate *priv = NULL; - - CHECK_INIT_CONFIG(mix, priv); - - if (!config_params) { - LOG_E( "!config_params\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - *config_params = MIX_VIDEOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(priv->config_params))); - if(!*config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to duplicate MixVideoConfigParams\n"); - goto cleanup; - } - - cleanup: - - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - - return ret; - -} - -MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - if(!bufin || !bufincnt || !decode_params) { - LOG_E( "!bufin || !bufincnt || !decode_params\n"); - return MIX_RESULT_NULL_PTR; - } - - // reset new sequence flag - decode_params->new_sequence = FALSE; - - //First check that we have surfaces available for decode - ret = mix_surfacepool_check_available(priv->surface_pool); - - if (ret == MIX_RESULT_POOLEMPTY) { - LOG_I( "Out of surface\n"); - return MIX_RESULT_OUTOFSURFACES; - } - - g_mutex_lock(priv->objlock); - - ret = mix_videofmt_decode(priv->video_format, bufin, bufincnt, decode_params); - - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame) { - - LOG_V( "Begin\n"); - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - CHECK_INIT_CONFIG(mix, priv); - - if (!frame) { - LOG_E( "!frame\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - LOG_V("Calling frame manager dequeue\n"); - - ret = mix_framemanager_dequeue(priv->frame_manager, frame); - - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_release_frame_default(MixVideo * mix, - MixVideoFrame * frame) { - - LOG_V( "Begin\n"); - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - CHECK_INIT_CONFIG(mix, priv); - - if (!frame) { - LOG_E( "!frame\n"); - return MIX_RESULT_NULL_PTR; - } - - /* - * We don't need lock here. MixVideoFrame has lock to - * protect itself. - */ -#if 0 - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); -#endif - - LOG_I("Releasing reference frame %x\n", (guint) frame); - mix_videoframe_unref(frame); - - ret = MIX_RESULT_SUCCESS; - -#if 0 - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); -#endif - - LOG_V( "End\n"); - - return ret; - -} - -#ifdef ANDROID - -MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - - return MIX_RESULT_NOTIMPL; -} - -#else -MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - - LOG_V( "Begin\n"); - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - MixDisplay *mix_display = NULL; - MixDisplayX11 *mix_display_x11 = NULL; - - Display *display = NULL; - - Drawable drawable = 0; - MixRect src_rect, dst_rect; - - VARectangle *va_cliprects = NULL; - guint number_of_cliprects = 0; - - /* VASurfaceID va_surface_id; */ - gulong va_surface_id; - VAStatus va_status; - - gboolean sync_flag = FALSE; - - CHECK_INIT_CONFIG(mix, priv); - - if (!render_params || !frame) { - LOG_E( "!render_params || !frame\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Is this render param valid? */ - if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { - LOG_E("Not MixVideoRenderParams\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* - * We don't need lock here. priv->va_display may be the only variable - * seems need to be protected. But, priv->va_display is initialized - * when mixvideo object is initialized, and it keeps - * the same value thoughout the life of mixvideo. - */ -#if 0 - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); -#endif - - /* get MixDisplay prop from render param */ - ret = mix_videorenderparams_get_display(render_params, &mix_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mix_display\n"); - goto cleanup; - } - - /* Is this MixDisplayX11 ? */ - /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ - if (!MIX_IS_DISPLAYX11(mix_display)) { - ret = MIX_RESULT_INVALID_PARAM; - LOG_E( "Not MixDisplayX11\n"); - goto cleanup; - } - - /* cast MixDisplay to MixDisplayX11 */ - mix_display_x11 = MIX_DISPLAYX11(mix_display); - - /* Get Drawable */ - ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get drawable\n"); - goto cleanup; - } - - /* Get Display */ - ret = mix_displayx11_get_display(mix_display_x11, &display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get display\n"); - goto cleanup; - } - - /* get src_rect */ - ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get SOURCE src_rect\n"); - goto cleanup; - } - - /* get dst_rect */ - ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get dst_rect\n"); - goto cleanup; - } - - /* get va_cliprects */ - ret = mix_videorenderparams_get_cliprects_internal(render_params, - &va_cliprects, &number_of_cliprects); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get va_cliprects\n"); - goto cleanup; - } - - /* get surface id from frame */ - ret = mix_videoframe_get_frame_id(frame, &va_surface_id); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get va_surface_id\n"); - goto cleanup; - } - guint64 timestamp = 0; - mix_videoframe_get_timestamp(frame, ×tamp); - LOG_V( "Displaying surface ID %d, timestamp %"G_GINT64_FORMAT"\n", (int)va_surface_id, timestamp); - - guint32 frame_structure = 0; - mix_videoframe_get_frame_structure(frame, &frame_structure); - - ret = mix_videoframe_get_sync_flag(frame, &sync_flag); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get sync_flag\n"); - goto cleanup; - } - - if (!sync_flag) { - ret = mix_videoframe_set_sync_flag(frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - va_status = vaSyncSurface(priv->va_display, va_surface_id); - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed vaSyncSurface() : va_status = 0x%x\n", va_status); - goto cleanup; - } - } - - - /* TODO: the last param of vaPutSurface is de-interlacing flags, - what is value shall be*/ - va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id, - drawable, src_rect.x, src_rect.y, src_rect.width, src_rect.height, - dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, - va_cliprects, number_of_cliprects, frame_structure); - - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed vaPutSurface() : va_status = 0x%x\n", va_status); - goto cleanup; - } - - ret = MIX_RESULT_SUCCESS; - - cleanup: - - MIXUNREF(mix_display, mix_display_unref) - /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ - -#if 0 - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); -#endif - - LOG_V( "End\n"); - - return ret; - -} -#endif /* ANDROID */ - -MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - if(!bufin || !bufincnt) { //we won't check encode_params here, it's just a placeholder - LOG_E( "!bufin || !bufincnt\n"); - return MIX_RESULT_NULL_PTR; - } - - //First check that we have surfaces available for decode - ret = mix_surfacepool_check_available(priv->surface_pool); - - if (ret == MIX_RESULT_POOLEMPTY) { - LOG_I( "Out of surface\n"); - return MIX_RESULT_OUTOFSURFACES; - } - - - g_mutex_lock(priv->objlock); - - ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt, - iovout, iovoutcnt, encode_params); - - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - return ret; -} - -MIX_RESULT mix_video_flush_default(MixVideo * mix) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { - ret = mix_videofmt_flush(priv->video_format); - - ret = mix_framemanager_flush(priv->frame_manager); - } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE - && priv->video_format_enc != NULL) { - /*No framemanager for encoder now*/ - ret = mix_videofmtenc_flush(priv->video_format_enc); - } else { - g_mutex_unlock(priv->objlock); - LOG_E("Invalid video_format/video_format_enc Pointer\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - - return ret; - -} - -MIX_RESULT mix_video_eos_default(MixVideo * mix) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { - ret = mix_videofmt_eos(priv->video_format); - - /* We should not call mix_framemanager_eos() here. - * MixVideoFormat* is responsible to call this function. - * Commnet the function call here! - */ - /* frame manager will set EOS flag to be TRUE */ - /* ret = mix_framemanager_eos(priv->frame_manager); */ - } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE - && priv->video_format_enc != NULL) { - /*No framemanager now*/ - ret = mix_videofmtenc_eos(priv->video_format_enc); - } else { - g_mutex_unlock(priv->objlock); - LOG_E("Invalid video_format/video_format_enc Pointer\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state) { - - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (!state) { - LOG_E( "!state\n"); - return MIX_RESULT_NULL_PTR; - } - - *state = MIX_STATE_CONFIGURED; - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (!buf) { - LOG_E( "!buf\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - ret = mix_bufferpool_get(priv->buffer_pool, buf); - - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); - - LOG_V( "End ret = 0x%x\n", ret); - - return ret; - -} - -MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (!buf) { - LOG_E( "!buf\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - g_mutex_lock(priv->objlock); - - mix_buffer_unref(buf); - - /* ---------------------- end lock --------------------- */ - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - return ret; - -} - -MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - if (!mix || !max_size) /* TODO: add other parameter NULL checking */ - { - LOG_E( "!mix || !bufsize\n"); - return MIX_RESULT_NULL_PTR; - } - - CHECK_INIT_CONFIG(mix, priv); - - g_mutex_lock(priv->objlock); - - ret = mix_videofmtenc_get_max_coded_buffer_size(priv->video_format_enc, max_size); - - g_mutex_unlock(priv->objlock); - - LOG_V( "End\n"); - return ret; -} - - -MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (dynamic_params == NULL) { - LOG_E( - "dynamic_params == NULL\n"); - return MIX_RESULT_FAIL; - } - - MixVideoConfigParamsEnc *priv_config_params_enc = NULL; - if (priv->config_params) { - /* - * FIXME: It would be better to use ref/unref - */ - priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; - //priv_config_params_enc = mix_videoconfigparamsenc_ref (priv->config_params); - } - else { - LOG_E( - "priv->config_params is invalid\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(priv->objlock); - - switch (params_type) { - case MIX_ENC_PARAMS_BITRATE: - { - ret = mix_videoconfigparamsenc_set_bit_rate (priv_config_params_enc, dynamic_params->bitrate); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_INIT_QP: - { - ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->init_QP); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_MIN_QP: - { - ret = mix_videoconfigparamsenc_set_min_qp (priv_config_params_enc, dynamic_params->min_QP); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_min_qp\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_WINDOW_SIZE: - { - ret = mix_videoconfigparamsenc_set_window_size (priv_config_params_enc, dynamic_params->window_size); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_window_size\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_TARGET_PERCENTAGE: - { - ret = mix_videoconfigparamsenc_set_target_percentage (priv_config_params_enc, dynamic_params->target_percentage); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_target_percentage\n"); - goto cleanup; - } - } - break; - - - case MIX_ENC_PARAMS_MTU_SLICE_SIZE: - { - ret = mix_videoconfigparamsenc_set_max_slice_size(priv_config_params_enc, dynamic_params->max_slice_size); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_max_slice_size\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_I_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_I_slice_num (config_params_enc_h264, dynamic_params->I_slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_I_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_P_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_P_slice_num (config_params_enc_h264, dynamic_params->P_slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_P_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_IDR_INTERVAL: - { - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_RC_MODE: - case MIX_ENC_PARAMS_RESOLUTION: - { - /* - * Step 1: Release videofmtenc Object - */ - if (priv->video_format_enc) { - mix_videofmtenc_deinitialize(priv->video_format_enc); - } - - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) - - //priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0 - - /* - * Please note there maybe issue here for usrptr shared buffer mode - */ - - /* - * Step 2: Change configuration parameters (frame size) - */ - - if (params_type == MIX_ENC_PARAMS_RESOLUTION) { - ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n"); - goto cleanup; - } - } - else if (params_type == MIX_ENC_PARAMS_RC_MODE) { - ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n"); - goto cleanup; - } - } - - - /* - * Step 3: Renew mixvideofmtenc object - */ - - MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; - - ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, - &encode_format); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get target format\n"); - goto cleanup; - } - - if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 - && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { - - MixVideoFormatEnc_H264 *video_format_enc = - mix_videoformatenc_h264_new(); - - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); - goto cleanup; - } - - /* work specific to h264 encode */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { - - MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); - goto cleanup; - } - - /* work specific to mpeg4 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 - && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { - - MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); - goto cleanup; - } - - /* work specific to h.263 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { - - MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); - goto cleanup; - } - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else { - - /*unsupported format */ - ret = MIX_RESULT_NOT_SUPPORTED; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } - - - /* - * Step 4: Re-initialize and start a new encode session, of course with new resolution value - */ - - /* - * Initialize MixVideoEncFormat - */ - - /* - * If we are using usrptr shared buffer mode, alloc_surfaces/usrptr/alloc_surface_cnt - * will be re-requested by v4l2camsrc, how to differetiate old surface pools and new one - * is a problem. - */ - - /* - * priv->alloc_surface_cnt already been reset to 0 after calling mix_videofmtenc_initialize - * For dynamic frame size change, upstream element need to re-call buffer allocation method - * and priv->alloc_surface_cnt will get a new value. - */ - //priv->alloc_surface_cnt = 5; - ret = mix_videofmtenc_initialize(priv->video_format_enc, - priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, - priv->va_display/*, priv->alloc_surfaces, priv->usrptr, priv->alloc_surface_cnt*/); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } - - mix_surfacepool_ref(priv->surface_pool); - - - } - break; - case MIX_ENC_PARAMS_GOP_SIZE: - { - ret = mix_videoconfigparamsenc_set_intra_period (priv_config_params_enc, dynamic_params->intra_period); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n"); - goto cleanup; - } - - } - break; - case MIX_ENC_PARAMS_FRAME_RATE: - { - ret = mix_videoconfigparamsenc_set_frame_rate (priv_config_params_enc, dynamic_params->frame_rate_num, dynamic_params->frame_rate_denom); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n"); - goto cleanup; - } - } - break; - case MIX_ENC_PARAMS_FORCE_KEY_FRAME: - { - /* - * nothing to be done now. - */ - } - break; - - case MIX_ENC_PARAMS_REFRESH_TYPE: - { - ret = mix_videoconfigparamsenc_set_refresh_type(priv_config_params_enc, dynamic_params->refresh_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_refresh_type\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_AIR: - { - ret = mix_videoconfigparamsenc_set_AIR_params(priv_config_params_enc, dynamic_params->air_params); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_AIR_params\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_CIR_FRAME_CNT: - { - ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n"); - goto cleanup; - } - - } - break; - - default: - break; - } - - ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type); - -cleanup: - - g_mutex_unlock(priv->objlock); - - LOG_V( "End ret = 0x%x\n", ret); - - return ret; -} -/* - * API functions - */ - -#define CHECK_AND_GET_MIX_CLASS(mix, klass) \ - if (!mix) { \ - return MIX_RESULT_NULL_PTR; \ - } \ - if (!MIX_IS_VIDEO(mix)) { \ - LOG_E( "Not MixVideo\n"); \ - return MIX_RESULT_INVALID_PARAM; \ - } \ - klass = MIX_VIDEO_GET_CLASS(mix); - - -MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->get_version_func) { - return klass->get_version_func(mix, major, minor); - } - return MIX_RESULT_NOTIMPL; - -} - -MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->initialize_func) { - return klass->initialize_func(mix, mode, init_params, drm_init_params); - } - return MIX_RESULT_NOTIMPL; - -} - -MIX_RESULT mix_video_deinitialize(MixVideo * mix) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->deinitialize_func) { - return klass->deinitialize_func(mix); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_configure(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->configure_func) { - return klass->configure_func(mix, config_params, drm_config_params); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_get_config(MixVideo * mix, - MixVideoConfigParams ** config_params_dec) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->get_config_func) { - return klass->get_config_func(mix, config_params_dec); - } - return MIX_RESULT_NOTIMPL; - -} - -MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->decode_func) { - return klass->decode_func(mix, bufin, bufincnt, - decode_params); - } - return MIX_RESULT_NOTIMPL; - -} - -MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->get_frame_func) { - return klass->get_frame_func(mix, frame); - } - return MIX_RESULT_NOTIMPL; - -} - -MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->release_frame_func) { - return klass->release_frame_func(mix, frame); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_render(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->render_func) { - return klass->render_func(mix, render_params, frame); - } - return MIX_RESULT_NOTIMPL; - -} - -MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->encode_func) { - return klass->encode_func(mix, bufin, bufincnt, iovout, iovoutcnt, - encode_params); - } - return MIX_RESULT_NOTIMPL; - -} - -MIX_RESULT mix_video_flush(MixVideo * mix) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->flush_func) { - return klass->flush_func(mix); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_eos(MixVideo * mix) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->eos_func) { - return klass->eos_func(mix); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->get_state_func) { - return klass->get_state_func(mix, state); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->get_mix_buffer_func) { - return klass->get_mix_buffer_func(mix, buf); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf) { - - MixVideoClass *klass = NULL; - CHECK_AND_GET_MIX_CLASS(mix, klass); - - if (klass->release_mix_buffer_func) { - return klass->release_mix_buffer_func(mix, buf); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize) { - - MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix); - - if (klass->get_max_coded_buffer_size_func) { - return klass->get_max_coded_buffer_size_func(mix, bufsize); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) -{ - MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix); - if (klass->set_dynamic_enc_config_func) { - return klass->set_dynamic_enc_config_func(mix, params_type, dynamic_params); - } - return MIX_RESULT_NOTIMPL; - -} diff --git a/mix_video/src/mixvideo.cpp b/mix_video/src/mixvideo.cpp new file mode 100644 index 0000000..795d930 --- /dev/null +++ b/mix_video/src/mixvideo.cpp @@ -0,0 +1,2113 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideo + * @short_description: Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. + * @include: mixvideo.h + * + * #MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video. + * + * The MixVideo object handles any of the video formats internally. + * The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/ + * MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure() + * call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and + * MixVideoRenderParams objects will be passed in the mix_video_initialize(), + * mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively. + * + * The application can take the following steps to decode video: + * + * Create a mix_video object using mix_video_new() + * Initialize the object using mix_video_initialize() + * Configure the stream using mix_video_configure() + * Decode frames using mix_video_decode() + * Retrieve the decoded frames using mix_video_get_frame(). The decoded frames can be retrieved in decode order or display order. + * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). The frame can be retained for redrawing until the next frame is retrieved. + * When the frame is no longer needed for redrawing, release the frame using mix_video_release_frame(). + * + * + * For encoding, the application can take the following steps to encode video: + * + * Create a mix_video object using mix_video_new() + * Initialize the object using mix_video_initialize() + * Configure the stream using mix_video_configure() + * Encode frames using mix_video_encode() + * Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file. + * Retrieve the uncompressed frames for display using mix_video_get_frame(). + * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). For encode, the frame should not be retained for redrawing after the initial rendering, due to resource limitations. + * Release the frame using mix_video_release_frame(). + * + * + */ + +#include +#include /* libVA */ + +#ifndef ANDROID +#include +#include +#else +#define Display unsigned int +//#include "mix_vagetdisplay.h" + +#ifdef __cplusplus +extern "C" { +#endif + +VADisplay vaGetDisplay ( + void *android_dpy +); + +#ifdef __cplusplus +} +#endif + + +#endif + +#include "mixvideolog.h" + +#ifndef ANDROID +#include "mixdisplayx11.h" +#else +#include "mixdisplayandroid.h" +#endif +#include "mixvideoframe.h" + +#include "mixframemanager.h" +#include "mixvideorenderparams.h" +#include "mixvideorenderparams_internal.h" + +#include "mixvideoformat.h" +#include "mixvideoformat_vc1.h" +#include "mixvideoformat_h264.h" +#include "mixvideoformat_mp42.h" + +#include "mixvideoconfigparamsdec_vc1.h" +#include "mixvideoconfigparamsdec_h264.h" +#include "mixvideoconfigparamsdec_mp42.h" + +#if MIXVIDEO_ENCODE_ENABLE +#include "mixvideoformatenc.h" +#include "mixvideoformatenc_h264.h" +#include "mixvideoformatenc_mpeg4.h" +#include "mixvideoformatenc_preview.h" +#include "mixvideoformatenc_h263.h" + +#include "mixvideoconfigparamsenc_h264.h" +#include "mixvideoconfigparamsenc_mpeg4.h" +#include "mixvideoconfigparamsenc_preview.h" +#include "mixvideoconfigparamsenc_h263.h" +#endif + +#include "mixvideo.h" +#include "mixvideo_private.h" + +#ifdef ANDROID +#define mix_strcmp strcmp +#else +#define mix_strcmp g_strcmp0 +#endif + +#define USE_OPAQUE_POINTER + +#ifdef USE_OPAQUE_POINTER +#define MIX_VIDEO_PRIVATE(mix) (MixVideoPrivate *)(mix->context) +#else +#define MIX_VIDEO_PRIVATE(mix) MIX_VIDEO_GET_PRIVATE(mix) +#endif + +#define CHECK_INIT(mix, priv) \ + if (!mix) { \ + return MIX_RESULT_NULL_PTR; \ + } \ + priv = MIX_VIDEO_PRIVATE(mix); \ + if (!priv->initialized) { \ + LOG_E( "Not initialized\n"); \ + return MIX_RESULT_NOT_INIT; \ + } + +#define CHECK_INIT_CONFIG(mix, priv) \ + CHECK_INIT(mix, priv); \ + if (!priv->configured) { \ + LOG_E( "Not configured\n"); \ + return MIX_RESULT_NOT_CONFIGURED; \ + } + +/* + * default implementation of virtual methods + */ + +MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, + guint * minor); + +MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params); + +MIX_RESULT mix_video_deinitialize_default(MixVideo * mix); + +MIX_RESULT mix_video_configure_default(MixVideo * mix, + MixVideoConfigParams * config_params, MixDrmParams * drm_config_params); + +MIX_RESULT mix_video_get_config_default(MixVideo * mix, + MixVideoConfigParams ** config_params); + +MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); + +MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame); + +MIX_RESULT mix_video_release_frame_default(MixVideo * mix, + MixVideoFrame * frame); + +MIX_RESULT mix_video_render_default(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame); +#if MIXVIDEO_ENCODE_ENABLE +MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +#endif +MIX_RESULT mix_video_flush_default(MixVideo * mix); + +MIX_RESULT mix_video_eos_default(MixVideo * mix); + +MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state); + +MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf); + +MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf); +#if MIXVIDEO_ENCODE_ENABLE +MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size); +#endif +MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); + +static void mix_video_finalize(MixVideo * obj); +MIX_RESULT mix_video_configure_decode(MixVideo * mix, + MixVideoConfigParamsDec * config_params_dec, + MixDrmParams * drm_config_params); + +#if MIXVIDEO_ENCODE_ENABLE +MIX_RESULT mix_video_configure_encode(MixVideo * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixDrmParams * drm_config_params); +#endif +static void mix_video_init(MixVideo * self); + +MixVideo::MixVideo() { + //context = malloc(sizeof(MixVideoPrivate)); + context = &mPriv; + get_version_func = mix_video_get_version_default; + initialize_func = mix_video_initialize_default; + deinitialize_func = mix_video_deinitialize_default; + configure_func = mix_video_configure_default; + get_config_func = mix_video_get_config_default; + decode_func = mix_video_decode_default; + get_frame_func = mix_video_get_frame_default; + release_frame_func = mix_video_release_frame_default; + render_func = mix_video_render_default; +#if MIXVIDEO_ENCODE_ENABLE + encode_func = mix_video_encode_default; +#endif + flush_func = mix_video_flush_default; + eos_func = mix_video_eos_default; + get_state_func = mix_video_get_state_default; + get_mix_buffer_func = mix_video_get_mixbuffer_default; + release_mix_buffer_func = mix_video_release_mixbuffer_default; +#if MIXVIDEO_ENCODE_ENABLE + get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default; + set_dynamic_enc_config_func = mix_video_set_dynamic_enc_config_default; +#endif + mix_video_init(this); + + ref_count = 1; + +} + +MixVideo::~MixVideo(){ + mix_video_finalize(this); +} + +static void mix_video_init(MixVideo * self) { + + MixVideoPrivate *priv = MIX_VIDEO_GET_PRIVATE(self); + +#ifdef USE_OPAQUE_POINTER + self->context = priv; +#else + self->context = NULL; +#endif + + /* private structure initialization */ + mix_video_private_initialize(priv); +} + +MixVideo *mix_video_new(void) { + + MixVideo *ret = new MixVideo; + + return ret; +} + +void mix_video_finalize(MixVideo * mix) { + + /* clean up here. */ + + mix_video_deinitialize(mix); +} + +MixVideo * +mix_video_ref(MixVideo * mix) { + if (NULL != mix) + mix->ref_count ++; + return mix; +} + +MixVideo * +mix_video_unref(MixVideo * mix) { + if(NULL != mix) { + mix->ref_count --; + if (mix->ref_count == 0) { + delete mix; + return NULL; + } + } + return mix; +} + +/* private methods */ +#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } + +void mix_video_private_initialize(MixVideoPrivate* priv) { + priv->initialized = FALSE; + priv->configured = FALSE; + + /* libVA */ + priv->va_display = NULL; + priv->va_major_version = -1; + priv->va_major_version = -1; + + /* mix objects */ + priv->frame_manager = NULL; + priv->video_format = NULL; +#if MIXVIDEO_ENCODE_ENABLE + priv->video_format_enc = NULL; //for encoding +#endif + priv->surface_pool = NULL; + priv->buffer_pool = NULL; + + priv->codec_mode = MIX_CODEC_MODE_DECODE; + priv->init_params = NULL; + priv->drm_params = NULL; + priv->config_params = NULL; +} + +void mix_video_private_cleanup(MixVideoPrivate* priv) { + + VAStatus va_status; + + if (!priv) { + return; + } +#if MIXVIDEO_ENCODE_ENABLE + if (priv->video_format_enc) { + mix_videofmtenc_deinitialize(priv->video_format_enc); + } +#endif + MIXUNREF(priv->frame_manager, mix_framemanager_unref) + MIXUNREF(priv->video_format, mix_videoformat_unref) +#if MIXVIDEO_ENCODE_ENABLE + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) +#endif + //for encoding + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref) + MIXUNREF(priv->surface_pool, mix_surfacepool_unref) +/* MIXUNREF(priv->init_params, mix_videoinitparams_unref) */ + MIXUNREF(priv->drm_params, mix_drmparams_unref) + MIXUNREF(priv->config_params, mix_videoconfigparams_unref) + + /* terminate libVA */ + if (priv->va_display) { + va_status = vaTerminate(priv->va_display); + LOG_V( "vaTerminate\n"); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaTerminate\n"); + } else { + priv->va_display = NULL; + } + } + + MIXUNREF(priv->init_params, mix_videoinitparams_unref) + + priv->va_major_version = -1; + priv->va_major_version = -1; + priv->codec_mode = MIX_CODEC_MODE_DECODE; + priv->initialized = FALSE; + priv->configured = FALSE; +} + +/* The following methods are defined in MI-X API */ + +MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, + guint * minor) { + if (!mix || !major || !minor) { + return MIX_RESULT_NULL_PTR; + } + + *major = MIXVIDEO_CURRENT - MIXVIDEO_AGE; + *minor = MIXVIDEO_AGE; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixDisplay *mix_display = NULL; + + LOG_V( "Begin\n"); + + if (!mix || !init_params) { + LOG_E( "!mix || !init_params\n"); + return MIX_RESULT_NULL_PTR; + } + + if (mode >= MIX_CODEC_MODE_LAST) { + LOG_E("mode >= MIX_CODEC_MODE_LAST\n"); + return MIX_RESULT_INVALID_PARAM; + } + +#if 0 //we have encoding support + /* TODO: We need to support encoding in the future */ + if (mode == MIX_CODEC_MODE_ENCODE) { + LOG_E("mode == MIX_CODEC_MODE_ENCODE\n"); + return MIX_RESULT_NOTIMPL; + } +#endif + + if (!MIX_IS_VIDEOINITPARAMS(init_params)) { + LOG_E("!MIX_IS_VIDEOINITPARAMS(init_params\n"); + return MIX_RESULT_INVALID_PARAM; + } + + priv = MIX_VIDEO_PRIVATE(mix); + + if (priv->initialized) { + LOG_W( "priv->initialized\n"); + return MIX_RESULT_ALREADY_INIT; + } + + /* clone mode */ + priv->codec_mode = mode; + + /* ref init_params */ + priv->init_params = (MixVideoInitParams *) mix_params_ref(MIX_PARAMS( + init_params)); + if (!priv->init_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "!priv->init_params\n"); + goto cleanup; + } + + /* NOTE: we don't do anything with drm_init_params */ + + /* libVA initialization */ + + { + VAStatus va_status; + Display *display = NULL; + ret = mix_videoinitparams_get_display(priv->init_params, &mix_display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 1\n"); + goto cleanup; + } +#ifndef ANDROID + if (MIX_IS_DISPLAYX11(mix_display)) { + MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); + ret = mix_displayx11_get_display(mix_displayx11, &display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 2\n"); + goto cleanup; + + } + } else { + /* TODO: add support to other MixDisplay type. For now, just return error!*/ + LOG_E("It is not display x11\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } +#else + if (MIX_IS_DISPLAYANDROID(mix_display)) { + MixDisplayAndroid *mix_displayandroid = MIX_DISPLAYANDROID(mix_display); + ret = mix_displayandroid_get_display(mix_displayandroid, (void**)&display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 2\n"); + goto cleanup; + + } + } else { + /* TODO: add support to other MixDisplay type. For now, just return error!*/ + LOG_E("It is not display android\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } +#endif + /* Now, we can initialize libVA */ + + LOG_V("Try to get vaDisplay : display = %x\n", display); + priv->va_display = vaGetDisplay(display); + + /* Oops! Fail to get VADisplay */ + if (!priv->va_display) { + ret = MIX_RESULT_FAIL; + LOG_E("Fail to get VADisplay\n"); + goto cleanup; + } + + /* Initialize libVA */ + va_status = vaInitialize(priv->va_display, &priv->va_major_version, + &priv->va_minor_version); + + /* Oops! Fail to initialize libVA */ + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Fail to initialize libVA\n"); + goto cleanup; + } + + /* TODO: check the version numbers of libVA */ + + priv->initialized = TRUE; + ret = MIX_RESULT_SUCCESS; + } + + cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + mix_video_private_cleanup(priv); + } + + MIXUNREF(mix_display, mix_display_unref); + + LOG_V( "End\n"); + + return ret; +} + + +MIX_RESULT mix_video_deinitialize_default(MixVideo * mix) { + + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + + mix_video_private_cleanup(priv); + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_video_configure_decode(MixVideo * mix, + MixVideoConfigParamsDec * config_params_dec, MixDrmParams * drm_config_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixVideoConfigParamsDec *priv_config_params_dec = NULL; + + gchar *mime_type = NULL; + guint fps_n, fps_d; + guint bufpoolsize = 0; + + MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + MixDisplayOrderMode display_order_mode = MIX_DISPLAY_ORDER_UNKNOWN; + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + + if (!config_params_dec) { + LOG_E( "!config_params_dec\n"); + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOCONFIGPARAMSDEC(config_params_dec)) { + LOG_E("Not a MixVideoConfigParamsDec\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + /* + * MixVideo has already been configured, it should be + * re-configured. + * + * TODO: Allow MixVideo re-configuration + */ + if (priv->configured) { + ret = MIX_RESULT_SUCCESS; + LOG_W( "Already configured\n"); + goto cleanup; + } + + /* Make a copy of config_params */ + priv->config_params = (MixVideoConfigParams *) mix_params_dup(MIX_PARAMS( + config_params_dec)); + if (!priv->config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Fail to duplicate config_params\n"); + goto cleanup; + } + + priv_config_params_dec = (MixVideoConfigParamsDec *)priv->config_params; + + /* Get fps, frame order mode and mime type from config_params */ + ret = mix_videoconfigparamsdec_get_mime_type(priv_config_params_dec, &mime_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mime type\n"); + goto cleanup; + } + + LOG_I( "mime : %s\n", mime_type); + +#ifdef MIX_LOG_ENABLE + if (mix_strcmp(mime_type, "video/x-wmv") == 0) { + + LOG_I( "mime : video/x-wmv\n"); + if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { + LOG_I( "VC1 config_param\n"); + } else { + LOG_E("Not VC1 config_param\n"); + } + } +#endif + + ret = mix_videoconfigparamsdec_get_frame_order_mode(priv_config_params_dec, + &frame_order_mode); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to frame order mode\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_frame_rate(priv_config_params_dec, &fps_n, + &fps_d); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get frame rate\n"); + goto cleanup; + } + + if (!fps_n) { + ret = MIX_RESULT_FAIL; + LOG_E( "fps_n is 0\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_buffer_pool_size(priv_config_params_dec, + &bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get buffer pool size\n"); + goto cleanup; + } + + /* create frame manager */ + priv->frame_manager = mix_framemanager_new(); + if (!priv->frame_manager) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create frame manager\n"); + goto cleanup; + } + + if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER) + { + display_order_mode = MIX_DISPLAY_ORDER_FIFO; + } + else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || + mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 || + mix_strcmp(mime_type, "video/x-xvid") == 0 ) + { + display_order_mode = MIX_DISPLAY_ORDER_PICTYPE; + } + else + { + //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP; + display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER; + } + + /* initialize frame manager */ + ret = mix_framemanager_initialize(priv->frame_manager, + display_order_mode, fps_n, fps_d); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize frame manager\n"); + goto cleanup; + } + + /* create buffer pool */ + priv->buffer_pool = mix_bufferpool_new(); + if (!priv->buffer_pool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create buffer pool\n"); + goto cleanup; + } + + ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize buffer pool\n"); + goto cleanup; + } + + /* Finally, we can create MixVideoFormat */ + /* What type of MixVideoFormat we need create? */ + + if (mix_strcmp(mime_type, "video/x-wmv") == 0 + && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { + + MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create VC-1 video format\n"); + goto cleanup; + } + + /* TODO: work specific to VC-1 */ + + priv->video_format = MIX_VIDEOFORMAT(video_format); + + } else if (mix_strcmp(mime_type, "video/x-h264") == 0 + && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { + + MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create H.264 video format\n"); + goto cleanup; + } + + /* TODO: work specific to H.264 */ + + priv->video_format = MIX_VIDEOFORMAT(video_format); + + } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 || + mix_strcmp(mime_type, "video/x-xvid") == 0 || + mix_strcmp(mime_type, "video/x-dx50") == 0) { + + guint version = 0; + + /* Is this mpeg4:2 ? */ + if (mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 ) { + + /* + * we don't support mpeg other than mpeg verion 4 + */ + if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + /* what is the mpeg version ? */ + ret = mix_videoconfigparamsdec_mp42_get_mpegversion( + MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mpeg version\n"); + goto cleanup; + } + + /* if it is not MPEG4 */ + if (version != 4) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + } else { + + /* config_param shall be MixVideoConfigParamsDecMP42 */ + if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { + LOG_E("MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 failed.\n"); + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + /* what is the divx version ? */ + ret = mix_videoconfigparamsdec_mp42_get_divxversion( + MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get divx version\n"); + goto cleanup; + } + + /* if it is not divx 4 or 5 */ + if (version != 4 && version != 5) { + LOG_E("Invalid divx version.\n"); + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + } + + MixVideoFormat_MP42 *video_format = mix_videoformat_mp42_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create MPEG-4:2 video format\n"); + goto cleanup; + } + + /* TODO: work specific to MPEG-4:2 */ + priv->video_format = MIX_VIDEOFORMAT(video_format); + + } else { + + /* Oops! A format we don't know */ + + ret = MIX_RESULT_FAIL; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + /* initialize MixVideoFormat */ + ret = mix_videofmt_initialize(priv->video_format, priv_config_params_dec, + priv->frame_manager, priv->buffer_pool, &priv->surface_pool, + priv->va_display); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + /* decide MixVideoFormat from mime_type*/ + + priv->configured = TRUE; + ret = MIX_RESULT_SUCCESS; + + cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + MIXUNREF(priv->config_params, mix_videoconfigparams_unref); + MIXUNREF(priv->frame_manager, mix_framemanager_unref); + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); + MIXUNREF(priv->video_format, mix_videoformat_unref); + } + + if (mime_type) { + g_free(mime_type); + } + + priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + + LOG_V( "End\n"); + + return ret; +} +#if MIXVIDEO_ENCODE_ENABLE +MIX_RESULT mix_video_configure_encode(MixVideo * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixDrmParams * drm_config_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixVideoConfigParamsEnc *priv_config_params_enc = NULL; + + + gchar *mime_type = NULL; + MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; + guint bufpoolsize = 0; + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + + if (!config_params_enc) { + LOG_E("!config_params_enc\n"); + return MIX_RESULT_NULL_PTR; + } + if (!MIX_IS_VIDEOCONFIGPARAMSENC(config_params_enc)) { + LOG_E("Not a MixVideoConfigParams\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + /* + * MixVideo has already been configured, it should be + * re-configured. + * + * TODO: Allow MixVideo re-configuration + */ + if (priv->configured) { + ret = MIX_RESULT_SUCCESS; + LOG_E( "Already configured\n"); + goto cleanup; + } + + /* Make a copy of config_params */ + priv->config_params = (MixVideoConfigParams *) mix_params_dup( + MIX_PARAMS(config_params_enc)); + if (!priv->config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Fail to duplicate config_params\n"); + goto cleanup; + } + + priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; + + /* Get fps, frame order mode and mime type from config_params */ + ret = mix_videoconfigparamsenc_get_mime_type(priv_config_params_enc, + &mime_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mime type\n"); + goto cleanup; + } + + LOG_I( "mime : %s\n", mime_type); + + ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, + &encode_format); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get target format\n"); + goto cleanup; + } + + LOG_I( "encode_format : %d\n", + encode_format); + + ret = mix_videoconfigparamsenc_get_buffer_pool_size( + priv_config_params_enc, &bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get buffer pool size\n"); + goto cleanup; + } + + /* create frame manager */ + priv->frame_manager = mix_framemanager_new(); + if (!priv->frame_manager) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create frame manager\n"); + goto cleanup; + } + + /* initialize frame manager */ + /* frame rate can be any value for encoding. */ + ret = mix_framemanager_initialize(priv->frame_manager, MIX_DISPLAY_ORDER_FIFO, + 1, 1); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize frame manager\n"); + goto cleanup; + } + + /* create buffer pool */ + priv->buffer_pool = mix_bufferpool_new(); + if (!priv->buffer_pool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create buffer pool\n"); + goto cleanup; + } + + ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize buffer pool\n"); + goto cleanup; + } + + /* Finally, we can create MixVideoFormatEnc */ + /* What type of MixVideoFormatEnc we need create? */ + + if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 + && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { + + MixVideoFormatEnc_H264 *video_format_enc = + mix_videoformatenc_h264_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); + goto cleanup; + } + + /* work specific to h264 encode */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { + + MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); + goto cleanup; + } + + /* work specific to mpeg4 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 + && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { + + MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); + goto cleanup; + } + + /* work specific to h.263 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { + + MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); + goto cleanup; + } + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else { + + /*unsupported format */ + ret = MIX_RESULT_NOT_SUPPORTED; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + /* initialize MixVideoEncFormat */ + ret = mix_videofmtenc_initialize(priv->video_format_enc, + priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, + priv->va_display); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + priv->configured = TRUE; + ret = MIX_RESULT_SUCCESS; + + cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + MIXUNREF(priv->frame_manager, mix_framemanager_unref); + MIXUNREF(priv->config_params, mix_videoconfigparams_unref); + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref); + } + + if (mime_type) { + g_free(mime_type); + } + + priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + + LOG_V( "End\n"); + + return ret; +} +#endif +MIX_RESULT mix_video_configure_default(MixVideo * mix, + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT(mix, priv); + if(!config_params) { + LOG_E("!config_params\n"); + return MIX_RESULT_NULL_PTR; + } + + /*Decoder mode or Encoder mode*/ + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && MIX_IS_VIDEOCONFIGPARAMSDEC(config_params)) { + ret = mix_video_configure_decode(mix, (MixVideoConfigParamsDec*)config_params, NULL); + } +#if MIXVIDEO_ENCODE_ENABLE + else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && MIX_IS_VIDEOCONFIGPARAMSENC(config_params)) { + ret = mix_video_configure_encode(mix, (MixVideoConfigParamsEnc*)config_params, NULL); + } +#endif + else { + LOG_E("Codec mode not supported\n"); + } + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_video_get_config_default(MixVideo * mix, + MixVideoConfigParams ** config_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoPrivate *priv = NULL; + + CHECK_INIT_CONFIG(mix, priv); + + if (!config_params) { + LOG_E( "!config_params\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + *config_params = MIX_VIDEOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(priv->config_params))); + if(!*config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to duplicate MixVideoConfigParams\n"); + goto cleanup; + } + + cleanup: + + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); + + LOG_V( "End\n"); + + return ret; + +} + +MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + if(!bufin || !bufincnt || !decode_params) { + LOG_E( "!bufin || !bufincnt || !decode_params\n"); + return MIX_RESULT_NULL_PTR; + } + + // reset new sequence flag + decode_params->new_sequence = FALSE; + + //First check that we have surfaces available for decode + ret = mix_surfacepool_check_available(priv->surface_pool); + + if (ret == MIX_RESULT_POOLEMPTY) { + LOG_I( "Out of surface\n"); + return MIX_RESULT_OUTOFSURFACES; + } + + priv->objlock.lock(); + + ret = mix_videofmt_decode(priv->video_format, bufin, bufincnt, decode_params); + + priv->objlock.unlock(); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame) { + + LOG_V( "Begin\n"); + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + CHECK_INIT_CONFIG(mix, priv); + + if (!frame) { + LOG_E( "!frame\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + LOG_V("Calling frame manager dequeue\n"); + + ret = mix_framemanager_dequeue(priv->frame_manager, frame); + + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_release_frame_default(MixVideo * mix, + MixVideoFrame * frame) { + + LOG_V( "Begin\n"); + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + CHECK_INIT_CONFIG(mix, priv); + + if (!frame) { + LOG_E( "!frame\n"); + return MIX_RESULT_NULL_PTR; + } + + /* + * We don't need lock here. MixVideoFrame has lock to + * protect itself. + */ +#if 0 + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); +#endif + + LOG_I("Releasing reference frame %x\n", (guint) frame); + mix_videoframe_unref(frame); + + ret = MIX_RESULT_SUCCESS; + +#if 0 + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); +#endif + + LOG_V( "End\n"); + + return ret; + +} + +#ifdef ANDROID + +MIX_RESULT mix_video_render_default(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + + return MIX_RESULT_NOTIMPL; +} + +#else +MIX_RESULT mix_video_render_default(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + + LOG_V( "Begin\n"); + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + MixDisplay *mix_display = NULL; + MixDisplayX11 *mix_display_x11 = NULL; + + Display *display = NULL; + + Drawable drawable = 0; + MixRect src_rect, dst_rect; + + VARectangle *va_cliprects = NULL; + guint number_of_cliprects = 0; + + /* VASurfaceID va_surface_id; */ + gulong va_surface_id; + VAStatus va_status; + + gboolean sync_flag = FALSE; + + CHECK_INIT_CONFIG(mix, priv); + + if (!render_params || !frame) { + LOG_E( "!render_params || !frame\n"); + return MIX_RESULT_NULL_PTR; + } + + /* Is this render param valid? */ + if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { + LOG_E("Not MixVideoRenderParams\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* + * We don't need lock here. priv->va_display may be the only variable + * seems need to be protected. But, priv->va_display is initialized + * when mixvideo object is initialized, and it keeps + * the same value thoughout the life of mixvideo. + */ +#if 0 + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); +#endif + + /* get MixDisplay prop from render param */ + ret = mix_videorenderparams_get_display(render_params, &mix_display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mix_display\n"); + goto cleanup; + } + + /* Is this MixDisplayX11 ? */ + /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ + if (!MIX_IS_DISPLAYX11(mix_display)) { + ret = MIX_RESULT_INVALID_PARAM; + LOG_E( "Not MixDisplayX11\n"); + goto cleanup; + } + + /* cast MixDisplay to MixDisplayX11 */ + mix_display_x11 = MIX_DISPLAYX11(mix_display); + + /* Get Drawable */ + ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get drawable\n"); + goto cleanup; + } + + /* Get Display */ + ret = mix_displayx11_get_display(mix_display_x11, &display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get display\n"); + goto cleanup; + } + + /* get src_rect */ + ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get SOURCE src_rect\n"); + goto cleanup; + } + + /* get dst_rect */ + ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get dst_rect\n"); + goto cleanup; + } + + /* get va_cliprects */ + ret = mix_videorenderparams_get_cliprects_internal(render_params, + &va_cliprects, &number_of_cliprects); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get va_cliprects\n"); + goto cleanup; + } + + /* get surface id from frame */ + ret = mix_videoframe_get_frame_id(frame, &va_surface_id); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get va_surface_id\n"); + goto cleanup; + } + guint64 timestamp = 0; + mix_videoframe_get_timestamp(frame, ×tamp); + LOG_V( "Displaying surface ID %d, timestamp %"G_GINT64_FORMAT"\n", (int)va_surface_id, timestamp); + + guint32 frame_structure = 0; + mix_videoframe_get_frame_structure(frame, &frame_structure); + + ret = mix_videoframe_get_sync_flag(frame, &sync_flag); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get sync_flag\n"); + goto cleanup; + } + + if (!sync_flag) { + ret = mix_videoframe_set_sync_flag(frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; + } + + va_status = vaSyncSurface(priv->va_display, va_surface_id); + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed vaSyncSurface() : va_status = 0x%x\n", va_status); + goto cleanup; + } + } + + + /* TODO: the last param of vaPutSurface is de-interlacing flags, + what is value shall be*/ + va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id, + drawable, src_rect.x, src_rect.y, src_rect.width, src_rect.height, + dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, + va_cliprects, number_of_cliprects, frame_structure); + + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed vaPutSurface() : va_status = 0x%x\n", va_status); + goto cleanup; + } + + ret = MIX_RESULT_SUCCESS; + + cleanup: + + MIXUNREF(mix_display, mix_display_unref) + /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ + +#if 0 + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); +#endif + + LOG_V( "End\n"); + + return ret; + +} +#endif /* ANDROID */ + +#if MIXVIDEO_ENCODE_ENABLE +MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + if(!bufin || !bufincnt) { //we won't check encode_params here, it's just a placeholder + LOG_E( "!bufin || !bufincnt\n"); + return MIX_RESULT_NULL_PTR; + } + + //First check that we have surfaces available for decode + ret = mix_surfacepool_check_available(priv->surface_pool); + + if (ret == MIX_RESULT_POOLEMPTY) { + LOG_I( "Out of surface\n"); + return MIX_RESULT_OUTOFSURFACES; + } + + + priv->objlock.lock(); + + ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt, + iovout, iovoutcnt, encode_params); + + priv->objlock.unlock(); + + LOG_V( "End\n"); + return ret; +} +#endif +MIX_RESULT mix_video_flush_default(MixVideo * mix) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { + ret = mix_videofmt_flush(priv->video_format); + + ret = mix_framemanager_flush(priv->frame_manager); + } +#if MIXVIDEO_ENCODE_ENABLE + else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE + && priv->video_format_enc != NULL) { + /*No framemanager for encoder now*/ + ret = mix_videofmtenc_flush(priv->video_format_enc); + } +#endif + else { + priv->objlock.unlock(); + LOG_E("Invalid video_format/video_format_enc Pointer\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); + + LOG_V( "End\n"); + + return ret; + +} + +MIX_RESULT mix_video_eos_default(MixVideo * mix) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { + ret = mix_videofmt_eos(priv->video_format); + + /* We should not call mix_framemanager_eos() here. + * MixVideoFormat* is responsible to call this function. + * Commnet the function call here! + */ + /* frame manager will set EOS flag to be TRUE */ + /* ret = mix_framemanager_eos(priv->frame_manager); */ + } +#if MIXVIDEO_ENCODE_ENABLE + else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE + && priv->video_format_enc != NULL) { + /*No framemanager now*/ + ret = mix_videofmtenc_eos(priv->video_format_enc); + } +#endif + else { + priv->objlock.unlock(); + LOG_E("Invalid video_format/video_format_enc Pointer\n"); + return MIX_RESULT_NULL_PTR; + } + + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); + + LOG_V( "End\n"); + + return ret; +} + +MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state) { + + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (!state) { + LOG_E( "!state\n"); + return MIX_RESULT_NULL_PTR; + } + + *state = MIX_STATE_CONFIGURED; + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (!buf) { + LOG_E( "!buf\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + ret = mix_bufferpool_get(priv->buffer_pool, buf); + + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); + + LOG_V( "End ret = 0x%x\n", ret); + + return ret; + +} + +MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf) { + + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (!buf) { + LOG_E( "!buf\n"); + return MIX_RESULT_INVALID_PARAM; + } + + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + mix_buffer_unref(buf); + + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); + + LOG_V( "End\n"); + return ret; + +} + +#if MIXVIDEO_ENCODE_ENABLE +MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + if (!mix || !max_size) /* TODO: add other parameter NULL checking */ + { + LOG_E( "!mix || !bufsize\n"); + return MIX_RESULT_NULL_PTR; + } + + CHECK_INIT_CONFIG(mix, priv); + + priv->objlock.lock(); + + ret = mix_videofmtenc_get_max_coded_buffer_size(priv->video_format_enc, max_size); + + priv->objlock.unlock(); + + LOG_V( "End\n"); + return ret; +} + + +MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + + LOG_V( "Begin\n"); + + CHECK_INIT_CONFIG(mix, priv); + + if (dynamic_params == NULL) { + LOG_E( + "dynamic_params == NULL\n"); + return MIX_RESULT_FAIL; + } + + MixVideoConfigParamsEnc *priv_config_params_enc = NULL; + if (priv->config_params) { + /* + * FIXME: It would be better to use ref/unref + */ + priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; + //priv_config_params_enc = mix_videoconfigparamsenc_ref (priv->config_params); + } + else { + LOG_E( + "priv->config_params is invalid\n"); + return MIX_RESULT_FAIL; + } + + priv->objlock.lock(); + + switch (params_type) { + case MIX_ENC_PARAMS_BITRATE: + { + ret = mix_videoconfigparamsenc_set_bit_rate (priv_config_params_enc, dynamic_params->bitrate); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_INIT_QP: + { + ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->init_QP); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_MIN_QP: + { + ret = mix_videoconfigparamsenc_set_min_qp (priv_config_params_enc, dynamic_params->min_QP); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_min_qp\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_WINDOW_SIZE: + { + ret = mix_videoconfigparamsenc_set_window_size (priv_config_params_enc, dynamic_params->window_size); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_window_size\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_TARGET_PERCENTAGE: + { + ret = mix_videoconfigparamsenc_set_target_percentage (priv_config_params_enc, dynamic_params->target_percentage); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_target_percentage\n"); + goto cleanup; + } + } + break; + + + case MIX_ENC_PARAMS_MTU_SLICE_SIZE: + { + ret = mix_videoconfigparamsenc_set_max_slice_size(priv_config_params_enc, dynamic_params->max_slice_size); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_max_slice_size\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_I_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_I_slice_num (config_params_enc_h264, dynamic_params->I_slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_I_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_P_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_P_slice_num (config_params_enc_h264, dynamic_params->P_slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_P_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_IDR_INTERVAL: + { + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_RC_MODE: + case MIX_ENC_PARAMS_RESOLUTION: + { + /* + * Step 1: Release videofmtenc Object + */ + if (priv->video_format_enc) { + mix_videofmtenc_deinitialize(priv->video_format_enc); + } + + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) + + //priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0 + + /* + * Please note there maybe issue here for usrptr shared buffer mode + */ + + /* + * Step 2: Change configuration parameters (frame size) + */ + + if (params_type == MIX_ENC_PARAMS_RESOLUTION) { + ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n"); + goto cleanup; + } + } + else if (params_type == MIX_ENC_PARAMS_RC_MODE) { + ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n"); + goto cleanup; + } + } + + + /* + * Step 3: Renew mixvideofmtenc object + */ + + MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; + + ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, + &encode_format); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get target format\n"); + goto cleanup; + } + + if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 + && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { + + MixVideoFormatEnc_H264 *video_format_enc = + mix_videoformatenc_h264_new(); + + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); + goto cleanup; + } + + /* work specific to h264 encode */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { + + MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); + goto cleanup; + } + + /* work specific to mpeg4 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 + && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { + + MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); + goto cleanup; + } + + /* work specific to h.263 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { + + MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); + goto cleanup; + } + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else { + + /*unsupported format */ + ret = MIX_RESULT_NOT_SUPPORTED; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + + /* + * Step 4: Re-initialize and start a new encode session, of course with new resolution value + */ + + /* + * Initialize MixVideoEncFormat + */ + + /* + * If we are using usrptr shared buffer mode, alloc_surfaces/usrptr/alloc_surface_cnt + * will be re-requested by v4l2camsrc, how to differetiate old surface pools and new one + * is a problem. + */ + + /* + * priv->alloc_surface_cnt already been reset to 0 after calling mix_videofmtenc_initialize + * For dynamic frame size change, upstream element need to re-call buffer allocation method + * and priv->alloc_surface_cnt will get a new value. + */ + //priv->alloc_surface_cnt = 5; + ret = mix_videofmtenc_initialize(priv->video_format_enc, + priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, + priv->va_display/*, priv->alloc_surfaces, priv->usrptr, priv->alloc_surface_cnt*/); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + + } + break; + case MIX_ENC_PARAMS_GOP_SIZE: + { + ret = mix_videoconfigparamsenc_set_intra_period (priv_config_params_enc, dynamic_params->intra_period); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n"); + goto cleanup; + } + + } + break; + case MIX_ENC_PARAMS_FRAME_RATE: + { + ret = mix_videoconfigparamsenc_set_frame_rate (priv_config_params_enc, dynamic_params->frame_rate_num, dynamic_params->frame_rate_denom); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n"); + goto cleanup; + } + } + break; + case MIX_ENC_PARAMS_FORCE_KEY_FRAME: + { + /* + * nothing to be done now. + */ + } + break; + + case MIX_ENC_PARAMS_REFRESH_TYPE: + { + ret = mix_videoconfigparamsenc_set_refresh_type(priv_config_params_enc, dynamic_params->refresh_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_refresh_type\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_AIR: + { + ret = mix_videoconfigparamsenc_set_AIR_params(priv_config_params_enc, dynamic_params->air_params); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_AIR_params\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_CIR_FRAME_CNT: + { + ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n"); + goto cleanup; + } + + } + break; + + default: + break; + } + + ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type); + +cleanup: + + priv->objlock.unlock(); + + LOG_V( "End ret = 0x%x\n", ret); + + return ret; +} +#endif +/* + * API functions + */ + +#define CHECK_AND_GET_MIX_CLASS(mix, klass) \ + if (!mix) { \ + return MIX_RESULT_NULL_PTR; \ + } \ + if (!MIX_IS_VIDEO(mix)) { \ + LOG_E( "Not MixVideo\n"); \ + return MIX_RESULT_INVALID_PARAM; \ + } + + +MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor) { + + return mix->get_version_func(mix, major, minor); + +} + +MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, + MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { + + return mix->initialize_func(mix, mode, init_params, drm_init_params); +} + +MIX_RESULT mix_video_deinitialize(MixVideo * mix) { + + return mix->deinitialize_func(mix); +} + +MIX_RESULT mix_video_configure(MixVideo * mix, + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params) { + + return mix->configure_func(mix, config_params, drm_config_params); +} + +MIX_RESULT mix_video_get_config(MixVideo * mix, + MixVideoConfigParams ** config_params_dec) { + + return mix->get_config_func(mix, config_params_dec); + +} + +MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params) { + + return mix->decode_func(mix, bufin, bufincnt, + decode_params); + +} + +MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame) { + + return mix->get_frame_func(mix, frame); + +} + +MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame) { + + return mix->release_frame_func(mix, frame); +} + +MIX_RESULT mix_video_render(MixVideo * mix, + MixVideoRenderParams * render_params, MixVideoFrame *frame) { + + return mix->render_func(mix, render_params, frame); +} + +#if MIXVIDEO_ENCODE_ENABLE +MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + return mix->encode_func(mix, bufin, bufincnt, iovout, iovoutcnt, + encode_params); +} +#else +MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixIOVec * iovout[], gint iovoutcnt, + MixParams * encode_params) { + return MIX_RESULT_NOT_SUPPORTED; +} +#endif + +MIX_RESULT mix_video_flush(MixVideo * mix) { + + return mix->flush_func(mix); +} + +MIX_RESULT mix_video_eos(MixVideo * mix) { + + return mix->eos_func(mix); +} + +MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state) { + return mix->get_state_func(mix, state); +} + +MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf) { + + return mix->get_mix_buffer_func(mix, buf); +} + +MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf) { + + return mix->release_mix_buffer_func(mix, buf); +} + +MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize) { +#if MIXVIDEO_ENCODE_ENABLE + return mix->get_max_coded_buffer_size_func(mix, bufsize); +#else + return MIX_RESULT_NOT_SUPPORTED; +#endif +} + +MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) { +#if MIXVIDEO_ENCODE_ENABLE + return mix->set_dynamic_enc_config_func(mix, params_type, dynamic_params); +#else + return MIX_RESULT_NOT_SUPPORTED; +#endif +} \ No newline at end of file diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h index 678ba65..93a3038 100644 --- a/mix_video/src/mixvideo.h +++ b/mix_video/src/mixvideo.h @@ -14,28 +14,18 @@ #include #include "mixvideoinitparams.h" #include "mixvideoconfigparamsdec.h" -#include "mixvideoconfigparamsenc.h" #include "mixvideodecodeparams.h" +#if MIXVIDEO_ENCODE_ENABLE +#include "mixvideoconfigparamsenc.h" #include "mixvideoencodeparams.h" +#endif #include "mixvideorenderparams.h" #include "mixvideocaps.h" #include "mixbuffer.h" +#include "mixvideo_private.h" -G_BEGIN_DECLS - -/* - * Type macros. - */ -#define MIX_TYPE_VIDEO (mix_video_get_type ()) -#define MIX_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEO, MixVideo)) -#define MIX_IS_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEO)) -#define MIX_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEO, MixVideoClass)) -#define MIX_IS_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEO)) -#define MIX_VIDEO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEO, MixVideoClass)) - -typedef struct _MixVideo MixVideo; -typedef struct _MixVideoClass MixVideoClass; +class MixVideo; /* * Virtual methods typedef */ @@ -67,9 +57,11 @@ typedef MIX_RESULT (*MixVideoReleaseFrameFunc)(MixVideo * mix, typedef MIX_RESULT (*MixVideoRenderFunc)(MixVideo * mix, MixVideoRenderParams * render_params, MixVideoFrame *frame); +#if MIXVIDEO_ENCODE_ENABLE typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); +#endif typedef MIX_RESULT (*MixVideoFlushFunc)(MixVideo * mix); @@ -85,32 +77,28 @@ typedef MIX_RESULT (*MixVideoReleaseMixBufferFunc)(MixVideo * mix, typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix, guint *max_size); - +#if MIXVIDEO_ENCODE_ENABLE typedef MIX_RESULT (*MixVideoSetDynamicEncConfigFunc) (MixVideo * mix, MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); +#endif /** * MixVideo: * @parent: Parent object. * * MI-X Video object */ -struct _MixVideo { - /*< public > */ - GObject parent; - +class MixVideo { +public: + MixVideo(); + ~MixVideo(); + +public: /*< private > */ gpointer context; -}; - -/** - * MixVideoClass: - * - * MI-X Video object class - */ -struct _MixVideoClass { - /*< public > */ - GObjectClass parent_class; + guint ref_count; + MixVideoPrivate mPriv; +public: /*< virtual public >*/ MixVideoGetVersionFunc get_version_func; MixVideoInitializeFunc initialize_func; @@ -121,24 +109,20 @@ struct _MixVideoClass { MixVideoGetFrameFunc get_frame_func; MixVideoReleaseFrameFunc release_frame_func; MixVideoRenderFunc render_func; +#if MIXVIDEO_ENCODE_ENABLE MixVideoEncodeFunc encode_func; +#endif MixVideoFlushFunc flush_func; MixVideoEOSFunc eos_func; MixVideoGetStateFunc get_state_func; MixVideoGetMixBufferFunc get_mix_buffer_func; MixVideoReleaseMixBufferFunc release_mix_buffer_func; +#if MIXVIDEO_ENCODE_ENABLE MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func; MixVideoSetDynamicEncConfigFunc set_dynamic_enc_config_func; +#endif }; -/** - * mix_video_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_video_get_type(void); - /** * mix_video_new: * @returns: A newly allocated instance of #MixVideo @@ -162,7 +146,8 @@ MixVideo *mix_video_ref(MixVideo * mix); * * Decrement reference count of the object. */ -#define mix_video_unref(obj) g_object_unref (G_OBJECT(obj)) +MixVideo * +mix_video_unref(MixVideo * mix) ; /* Class Methods */ @@ -453,10 +438,15 @@ MIX_RESULT mix_video_render(MixVideo * mix, * * */ +#if MIXVIDEO_ENCODE_ENABLE MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, MixVideoEncodeParams * encode_params); - +#else +MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, + MixIOVec * iovout[], gint iovoutcnt, + MixParams * encode_params); +#endif /** * mix_video_flush: * @mix: #MixVideo object. @@ -563,9 +553,9 @@ MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize); * Usually this function is after the encoding session is started. * */ + MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); -G_END_DECLS #endif /* __MIX_VIDEO_H__ */ diff --git a/mix_video/src/mixvideo_private.h b/mix_video/src/mixvideo_private.h index ffa403f..eecd3ff 100644 --- a/mix_video/src/mixvideo_private.h +++ b/mix_video/src/mixvideo_private.h @@ -9,14 +9,16 @@ #ifndef __MIX_VIDEO_PRIVATE_H__ #define __MIX_VIDEO_PRIVATE_H__ -G_BEGIN_DECLS +#include "mixvideothread.h" +class MixFrameManager; +class MixVideoFormat; typedef struct _MixVideoPrivate MixVideoPrivate; struct _MixVideoPrivate { /*< private > */ - GMutex *objlock; + MixVideoMutex objlock; gboolean initialized; gboolean configured; @@ -34,26 +36,18 @@ struct _MixVideoPrivate { MixFrameManager *frame_manager; MixVideoFormat *video_format; +#if MIXVIDEO_ENCODE_ENABLE MixVideoFormatEnc *video_format_enc; +#endif MixSurfacePool *surface_pool; MixBufferPool *buffer_pool; }; -/** - * MIX_VIDEO_PRIVATE: - * - * Get private structure of this class. - * @obj: class object for which to get private data. - */ -#define MIX_VIDEO_GET_PRIVATE(obj) \ - (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEO, MixVideoPrivate)) - /* Private functions */ void mix_video_private_initialize(MixVideoPrivate* priv); void mix_video_private_cleanup(MixVideoPrivate* priv); -G_END_DECLS - +#define MIX_VIDEO_GET_PRIVATE(mix) (MixVideoPrivate*)(mix->context) #endif /* __MIX_VIDEO_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideocaps.c b/mix_video/src/mixvideocaps.c deleted file mode 100644 index 7e378a9..0000000 --- a/mix_video/src/mixvideocaps.c +++ /dev/null @@ -1,267 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixvideocaps -* @short_description: VideoConfig parameters -* -* A data object which stores videoconfig specific parameters. -*/ - -#include "mixvideocaps.h" - -#ifdef ANDROID -#define mix_strcmp strcmp -#else -#define mix_strcmp g_strcmp0 -#endif - - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_videocaps_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videocaps_type = g_define_type_id; } - -gboolean mix_videocaps_copy (MixParams * target, const MixParams * src); -MixParams *mix_videocaps_dup (const MixParams * obj); -gboolean mix_videocaps_equal (MixParams * first, MixParams * second); -static void mix_videocaps_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoCaps, mix_videocaps, MIX_TYPE_PARAMS, - _do_init); - -static void -mix_videocaps_init (MixVideoCaps * self) -{ - /* initialize properties here */ - self->mix_caps = NULL; - self->video_hw_caps = NULL; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; - -} - -static void -mix_videocaps_class_init (MixVideoCapsClass * klass) -{ - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS (klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent (klass); - - mixparams_class->finalize = mix_videocaps_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videocaps_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videocaps_dup; - mixparams_class->equal = (MixParamsEqualFunction) mix_videocaps_equal; -} - -MixVideoCaps * -mix_videocaps_new (void) -{ - MixVideoCaps *ret = - (MixVideoCaps *) g_type_create_instance (MIX_TYPE_VIDEOCAPS); - return ret; -} - -void -mix_videocaps_finalize (MixParams * obj) -{ - /* clean up here. */ - - MixVideoCaps *self = MIX_VIDEOCAPS (obj); - SAFE_FREE (self->mix_caps); - SAFE_FREE (self->video_hw_caps); - - /* Chain up parent */ - if (parent_class->finalize) - { - parent_class->finalize (obj); - } -} - -MixVideoCaps * -mix_videocaps_ref (MixVideoCaps * mix) -{ - return (MixVideoCaps *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videocaps_dup: -* @obj: a #MixVideoCaps object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videocaps_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCAPS (obj)) - { - MixVideoCaps *duplicate = mix_videocaps_new (); - if (mix_videocaps_copy (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videocaps_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videocaps_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videocaps_copy (MixParams * target, const MixParams * src) -{ - MixVideoCaps *this_target, *this_src; - - if (MIX_IS_VIDEOCAPS (target) && MIX_IS_VIDEOCAPS (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCAPS (target); - this_src = MIX_VIDEOCAPS (src); - - // Free the existing properties - SAFE_FREE (this_target->mix_caps); - SAFE_FREE (this_target->video_hw_caps); - - // Duplicate string - this_target->mix_caps = g_strdup (this_src->mix_caps); - this_target->video_hw_caps = g_strdup (this_src->video_hw_caps); - - // Now chainup base class - if (parent_class->copy) - { - return parent_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_videocaps_: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videocaps_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoCaps *this_first, *this_second; - - if (MIX_IS_VIDEOCAPS (first) && MIX_IS_VIDEOCAPS (second)) - { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEOCAPS (first); - this_second = MIX_VIDEOCAPS (second); - - /* TODO: add comparison for other properties */ - if (mix_strcmp (this_first->mix_caps, this_second->mix_caps) == 0 - && mix_strcmp (this_first->video_hw_caps, - this_second->video_hw_caps) == 0) - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - ret = klass->equal (first, second); - else - ret = TRUE; - } - } - - return ret; -} - -#define MIX_VIDEOCAPS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCAPS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ - - -/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ -MIX_RESULT -mix_videocaps_set_mix_caps (MixVideoCaps * obj, gchar * mix_caps) -{ - MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); - - SAFE_FREE (obj->mix_caps); - obj->mix_caps = g_strdup (mix_caps); - if (mix_caps != NULL && obj->mix_caps == NULL) - { - return MIX_RESULT_NO_MEMORY; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -mix_videocaps_get_mix_caps (MixVideoCaps * obj, gchar ** mix_caps) -{ - MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, mix_caps); - *mix_caps = g_strdup (obj->mix_caps); - if (*mix_caps == NULL && obj->mix_caps) - { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, gchar * video_hw_caps) -{ - MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); - SAFE_FREE (obj->video_hw_caps); - obj->video_hw_caps = g_strdup (video_hw_caps); - if (video_hw_caps != NULL && obj->video_hw_caps == NULL) - { - return MIX_RESULT_NO_MEMORY; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, gchar ** video_hw_caps) -{ - MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, video_hw_caps); - - *video_hw_caps = g_strdup (obj->video_hw_caps); - if (*video_hw_caps == NULL && obj->video_hw_caps) - { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideocaps.cpp b/mix_video/src/mixvideocaps.cpp new file mode 100644 index 0000000..ce96873 --- /dev/null +++ b/mix_video/src/mixvideocaps.cpp @@ -0,0 +1,162 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixvideocaps +* @short_description: VideoConfig parameters +* +* A data object which stores videoconfig specific parameters. +*/ + +#include +#include "mixvideocaps.h" + +#ifdef ANDROID +#define mix_strcmp strcmp +#else +#define mix_strcmp g_strcmp0 +#endif + +#define MIX_VIDEOCAPS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCAPS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ + + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +MixVideoCaps::MixVideoCaps() + :mix_caps(NULL) + ,video_hw_caps(NULL) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} + +MixVideoCaps::~MixVideoCaps() { + SAFE_FREE (this->mix_caps); + SAFE_FREE (this->video_hw_caps); +} + +/** +* mix_videocaps_dup: +* @obj: a #MixVideoCaps object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams* MixVideoCaps::dup() const { + MixParams *ret = new MixVideoCaps(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + +/** +* mix_videocaps_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean MixVideoCaps::copy (MixParams * target) const { + gboolean ret = FALSE; + MixVideoCaps * this_target = MIX_VIDEOCAPS(target); + if (NULL != this_target) { + // Free the existing properties + SAFE_FREE (this_target->mix_caps); + SAFE_FREE (this_target->video_hw_caps); + // Duplicate string + this_target->mix_caps = g_strdup (this->mix_caps); + this_target->video_hw_caps = g_strdup (this->video_hw_caps); + + // chain up base class + ret = MixParams::copy(target); + } + return ret; +} + + +gboolean MixVideoCaps::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoCaps * this_obj = MIX_VIDEOCAPS(obj); + if (NULL != this_obj) { + if ((mix_strcmp (this->mix_caps, this_obj->mix_caps) == 0) && + (mix_strcmp (this->video_hw_caps, this_obj->video_hw_caps) == 0)) { + ret = MixParams::equal(this_obj); + } + } + return ret; +} + + + +MixVideoCaps * +mix_videocaps_new (void) { + return new MixVideoCaps(); +} + +MixVideoCaps * +mix_videocaps_ref (MixVideoCaps * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + + +/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ +MIX_RESULT mix_videocaps_set_mix_caps ( + MixVideoCaps * obj, gchar * mix_caps) { + MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); + SAFE_FREE (obj->mix_caps); + obj->mix_caps = g_strdup (mix_caps); + if (NULL == obj->mix_caps && NULL != mix_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videocaps_get_mix_caps ( + MixVideoCaps * obj, gchar ** mix_caps) { + MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, mix_caps); + *mix_caps = g_strdup (obj->mix_caps); + if (NULL == *mix_caps && NULL != obj->mix_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videocaps_set_video_hw_caps ( + MixVideoCaps * obj, gchar * video_hw_caps) { + MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); + SAFE_FREE (obj->video_hw_caps); + obj->video_hw_caps = g_strdup (video_hw_caps); + if (NULL != video_hw_caps && NULL == obj->video_hw_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videocaps_get_video_hw_caps ( + MixVideoCaps * obj, gchar ** video_hw_caps) { + MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, video_hw_caps); + *video_hw_caps = g_strdup (obj->video_hw_caps); + if (NULL == *video_hw_caps && NULL != obj->video_hw_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideocaps.h b/mix_video/src/mixvideocaps.h index ff50647..dbf52b9 100644 --- a/mix_video/src/mixvideocaps.h +++ b/mix_video/src/mixvideocaps.h @@ -12,20 +12,11 @@ No license under any patent, copyright, trade secret or other intellectual prope #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** -* MIX_TYPE_VIDEOCAPS: -* -* Get type of class. -*/ -#define MIX_TYPE_VIDEOCAPS (mix_videocaps_get_type ()) - /** * MIX_VIDEOCAPS: * @obj: object to be type-casted. */ -#define MIX_VIDEOCAPS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCAPS, MixVideoCaps)) +#define MIX_VIDEOCAPS(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCAPS: @@ -33,42 +24,27 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_VIDEOCAPS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCAPS)) - -/** -* MIX_VIDEOCAPS_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_VIDEOCAPS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCAPS, MixVideoCapsClass)) - -/** -* MIX_IS_VIDEOCAPS_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixParamsClass -*/ -#define MIX_IS_VIDEOCAPS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCAPS)) - -/** -* MIX_VIDEOCAPS_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -#define MIX_VIDEOCAPS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCAPS, MixVideoCapsClass)) +#define MIX_IS_VIDEOCAPS(obj) ((NULL != MIX_VIDEOCAPS(obj)) ? TRUE : FALSE) -typedef struct _MixVideoCaps MixVideoCaps; -typedef struct _MixVideoCapsClass MixVideoCapsClass; /** * MixVideoCaps: * * MI-X VideoConfig Parameter object */ -struct _MixVideoCaps +class MixVideoCaps : public MixParams { +public: + MixVideoCaps(); + virtual ~MixVideoCaps(); + + virtual gboolean copy(MixParams* target) const; + virtual MixParams *dup() const; + virtual gboolean equal(MixParams* obj) const; + +public: /*< public > */ - MixParams parent; + //MixParams parent; /*< public > */ gchar *mix_caps; @@ -80,27 +56,6 @@ struct _MixVideoCaps void *reserved4; }; -/** -* MixVideoCapsClass: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoCapsClass -{ - /*< public > */ - MixParamsClass parent_class; - - /* class members */ -}; - -/** -* mix_videocaps_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_videocaps_get_type (void); - /** * mix_videocaps_new: * @returns: A newly allocated instance of #MixVideoCaps @@ -136,6 +91,5 @@ MIX_RESULT mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, MIX_RESULT mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, gchar ** video_hw_caps); -G_END_DECLS #endif /* __MIX_VIDEOCAPS_H__ */ diff --git a/mix_video/src/mixvideoconfigparams.c b/mix_video/src/mixvideoconfigparams.c deleted file mode 100644 index 6ec641b..0000000 --- a/mix_video/src/mixvideoconfigparams.c +++ /dev/null @@ -1,176 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparams - * @short_description: MI-X Video Configuration Parameter Base Object - * @include: mixvideoconfigparams.h - * - * - * A base object of MI-X video configuration parameter objects. - * - * - * The derived MixVideoConfigParams object is created by the MMF/App - * and provided in the MixVideo mix_video_configure() function. The get and set - * methods for the properties will be available for the caller to set and get information at - * configuration time. It will also be created by MixVideo and returned from the - * mix_video_get_config() function, whereupon the MMF/App can get the get methods to - * obtain current configuration information. - * - * - * There are decode mode objects (for example, MixVideoConfigParamsDec) and encode - * mode objects (for example, MixVideoConfigParamsEnc). Each of these types is refined - * further with media specific objects. The application should create the correct type of - * object to match the media format of the stream to be handled, e.g. if the media - * format of the stream to be decoded is H.264, the application would create a - * MixVideoConfigParamsDecH264 object for the mix_video_configure() call. - * - */ - -#include -#include "mixvideolog.h" -#include "mixvideoconfigparams.h" - -static GType _mix_videoconfigparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparams_type = g_define_type_id; } - -gboolean mix_videoconfigparams_copy(MixParams * target, const MixParams * src); -MixParams *mix_videoconfigparams_dup(const MixParams * obj); -gboolean mix_videoconfigparams_equal(MixParams * first, MixParams * second); -static void mix_videoconfigparams_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParams, mix_videoconfigparams, - MIX_TYPE_PARAMS, _do_init); - -static void mix_videoconfigparams_init(MixVideoConfigParams * self) { - - /* initialize properties here */ - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videoconfigparams_class_init(MixVideoConfigParamsClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videoconfigparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparams_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparams_dup; - mixparams_class->equal - = (MixParamsEqualFunction) mix_videoconfigparams_equal; -} - -MixVideoConfigParams * -mix_videoconfigparams_new(void) { - MixVideoConfigParams *ret = - (MixVideoConfigParams *) g_type_create_instance( - MIX_TYPE_VIDEOCONFIGPARAMS); - - return ret; -} - -void mix_videoconfigparams_finalize(MixParams * obj) { - - /* clean up here. */ - /* MixVideoConfigParams *self = MIX_VIDEOCONFIGPARAMS(obj); */ - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoConfigParams * -mix_videoconfigparams_ref(MixVideoConfigParams * mix) { - return (MixVideoConfigParams *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videoconfigparams_dup: - * @obj: a #MixVideoConfigParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoconfigparams_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMS(obj)) { - MixVideoConfigParams *duplicate = mix_videoconfigparams_new(); - if (mix_videoconfigparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoconfigparams_unref(duplicate); - } - } - - return ret; -} - -/** - * mix_videoconfigparams_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparams_copy(MixParams * target, const MixParams * src) { - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMS(target) && MIX_IS_VIDEOCONFIGPARAMS(src)) { - - /* TODO: copy other properties if there's any */ - - /* Now chainup base class */ - if (parent_class->copy) { - LOG_V( "parent_class->copy != NULL\n"); - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - LOG_V( "parent_class->copy == NULL\n"); - return TRUE; - } - } - - LOG_V( "End\n"); - return FALSE; -} - -/** - * mix_videoconfigparams_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparams_equal(MixParams * first, MixParams * second) { - - gboolean ret = FALSE; - - if (MIX_IS_VIDEOCONFIGPARAMS(first) && MIX_IS_VIDEOCONFIGPARAMS(second)) { - - /* chaining up. */ - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - - return ret; -} diff --git a/mix_video/src/mixvideoconfigparams.cpp b/mix_video/src/mixvideoconfigparams.cpp new file mode 100644 index 0000000..7668b4e --- /dev/null +++ b/mix_video/src/mixvideoconfigparams.cpp @@ -0,0 +1,86 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparams + * @short_description: MI-X Video Configuration Parameter Base Object + * @include: mixvideoconfigparams.h + * + * + * A base object of MI-X video configuration parameter objects. + * + * + * The derived MixVideoConfigParams object is created by the MMF/App + * and provided in the MixVideo mix_video_configure() function. The get and set + * methods for the properties will be available for the caller to set and get information at + * configuration time. It will also be created by MixVideo and returned from the + * mix_video_get_config() function, whereupon the MMF/App can get the get methods to + * obtain current configuration information. + * + * + * There are decode mode objects (for example, MixVideoConfigParamsDec) and encode + * mode objects (for example, MixVideoConfigParamsEnc). Each of these types is refined + * further with media specific objects. The application should create the correct type of + * object to match the media format of the stream to be handled, e.g. if the media + * format of the stream to be decoded is H.264, the application would create a + * MixVideoConfigParamsDecH264 object for the mix_video_configure() call. + * + */ + +#include +#include "mixvideolog.h" +#include "mixvideoconfigparams.h" + +MixVideoConfigParams::MixVideoConfigParams() + :reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} +MixVideoConfigParams::~MixVideoConfigParams() { +} +gboolean MixVideoConfigParams::copy(MixParams *target) const { + gboolean ret = FALSE; + MixVideoConfigParams * this_target = MIX_VIDEOCONFIGPARAMS(target); + if (NULL != this_target) + ret = MixParams::copy(target); + return ret; +} + +gboolean MixVideoConfigParams::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoConfigParams * this_obj = MIX_VIDEOCONFIGPARAMS(obj); + if (NULL != this_obj) + ret = MixParams::equal(this_obj); + return ret; +} + +MixParams* MixVideoConfigParams::dup() const { + MixParams *ret = new MixVideoConfigParams(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + + +MixVideoConfigParams * +mix_videoconfigparams_new(void) { + return new MixVideoConfigParams(); +} +MixVideoConfigParams * +mix_videoconfigparams_ref(MixVideoConfigParams * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + + diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h index d3939af..5e4d9ad 100644 --- a/mix_video/src/mixvideoconfigparams.h +++ b/mix_video/src/mixvideoconfigparams.h @@ -12,90 +12,36 @@ #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEOCONFIGPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEOCONFIGPARAMS (mix_videoconfigparams_get_type ()) - /** * MIX_VIDEOCONFIGPARAMS: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParams)) - -/** - * MIX_IS_VIDEOCONFIGPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_VIDEOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMS)) - -/** - * MIX_VIDEOCONFIGPARAMS_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParamsClass)) +#define MIX_VIDEOCONFIGPARAMS(obj) (reinterpret_cast(obj)) -/** - * MIX_IS_VIDEOCONFIGPARAMS_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_VIDEOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMS)) - -/** - * MIX_VIDEOCONFIGPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEOCONFIGPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParamsClass)) - -typedef struct _MixVideoConfigParams MixVideoConfigParams; -typedef struct _MixVideoConfigParamsClass MixVideoConfigParamsClass; /** * MixVideoConfigParams: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParams { +class MixVideoConfigParams : public MixParams { +public: + MixVideoConfigParams(); + virtual ~MixVideoConfigParams(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; /*< public > */ - MixParams parent; + //MixParams parent; /*< private > */ - +protected: void *reserved1; void *reserved2; void *reserved3; void *reserved4; }; -/** - * MixVideoConfigParamsClass: - * - * MI-X VideoConfig object class - */ -struct _MixVideoConfigParamsClass { - /*< public > */ - MixParamsClass parent_class; - -/* class members */ -}; - -/** - * mix_videoconfigparams_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoconfigparams_get_type(void); /** * mix_videoconfigparams_new: @@ -126,6 +72,5 @@ MixVideoConfigParams *mix_videoconfigparams_ref(MixVideoConfigParams * mix); /* TODO: Add getters and setters for other properties */ -G_END_DECLS #endif /* __MIX_VIDEOCONFIGPARAMS_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c deleted file mode 100644 index e9659dd..0000000 --- a/mix_video/src/mixvideoconfigparamsdec.c +++ /dev/null @@ -1,649 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsdec - * @short_description: MI-X Video Decode Configuration Parameter Base Object - * - * A base object of MI-X video decode configuration parameter objects. - */ - -#include -#include "mixvideolog.h" -#include "mixvideoconfigparamsdec.h" - -static GType _mix_videoconfigparamsdec_type = 0; -static MixVideoConfigParamsClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsdec_type = g_define_type_id; } - -gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src); -MixParams *mix_videoconfigparamsdec_dup(const MixParams * obj); -gboolean mix_videoconfigparamsdec_equal(MixParams * first, MixParams * second); -static void mix_videoconfigparamsdec_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDec, mix_videoconfigparamsdec, - MIX_TYPE_VIDEOCONFIGPARAMS, _do_init); - -static void mix_videoconfigparamsdec_init(MixVideoConfigParamsDec * self) { - - /* initialize properties here */ - - self->frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; - memset(&self->header, 0, sizeof(self->header)); - - self->mime_type = NULL; - - self->frame_rate_num = 0; - self->frame_rate_denom = 0; - - self->picture_width = 0; - self->picture_height = 0; - - self->raw_format = 0; - self->rate_control = 0; - self->mixbuffer_pool_size = 0; - self->extra_surface_allocation = 0; - - self->video_range = 0; - self->color_matrix = 0; - self->bit_rate = 0; - - self->par_num = 0; - self->par_denom= 0; - - /* TODO: initialize other properties */ - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videoconfigparamsdec_class_init(MixVideoConfigParamsDecClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixVideoConfigParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videoconfigparamsdec_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsdec_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsdec_dup; - mixparams_class->equal - = (MixParamsEqualFunction) mix_videoconfigparamsdec_equal; -} - -MixVideoConfigParamsDec * -mix_videoconfigparamsdec_new(void) { - MixVideoConfigParamsDec *ret = - (MixVideoConfigParamsDec *) g_type_create_instance( - MIX_TYPE_VIDEOCONFIGPARAMSDEC); - - return ret; -} - -void mix_videoconfigparamsdec_finalize(MixParams * obj) { - - /* clean up here. */ - MixVideoConfigParamsDec *self = MIX_VIDEOCONFIGPARAMSDEC(obj); - MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); - - - /* free header */ - if (self->header.data) { - g_free(self->header.data); - memset(&self->header, 0, sizeof(self->header)); - } - - /* free mime_type */ - if (self->mime_type->str) - g_string_free(self->mime_type, TRUE); - else - g_string_free(self->mime_type, FALSE); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoConfigParamsDec * -mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix) { - return (MixVideoConfigParamsDec *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videoconfigparamsdec_dup: - * @obj: a #MixVideoConfigParamsDec object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoconfigparamsdec_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) { - MixVideoConfigParamsDec *duplicate = mix_videoconfigparamsdec_new(); - if (mix_videoconfigparamsdec_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoconfigparamsdec_unref(duplicate); - } - } - - return ret; -} - -/** - * mix_videoconfigparamsdec_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src) { - - MixVideoConfigParamsDec *this_target, *this_src; - MIX_RESULT mix_result = MIX_RESULT_FAIL; - MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSDEC(target) && MIX_IS_VIDEOCONFIGPARAMSDEC(src)) { - - /* Cast the base object to this child object */ - this_target = MIX_VIDEOCONFIGPARAMSDEC(target); - this_src = MIX_VIDEOCONFIGPARAMSDEC(src); - - /* copy properties of primitive type */ - - this_target->frame_order_mode = this_src->frame_order_mode; - this_target->frame_rate_num = this_src->frame_rate_num; - this_target->frame_rate_denom = this_src->frame_rate_denom; - this_target->picture_width = this_src->picture_width; - this_target->picture_height = this_src->picture_height; - this_target->raw_format = this_src->raw_format; - this_target->rate_control = this_src->rate_control; - this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size; - this_target->extra_surface_allocation = this_src->extra_surface_allocation; - this_target->video_range = this_src->video_range; - this_target->color_matrix = this_src->color_matrix; - this_target->bit_rate = this_src->bit_rate; - this_target->par_num = this_src->par_num; - this_target->par_denom = this_src->par_denom; - - /* copy properties of non-primitive */ - - /* copy header */ - mix_result = mix_videoconfigparamsdec_set_header(this_target, - &this_src->header); - - if (mix_result != MIX_RESULT_SUCCESS) { - - LOG_E( "set_header failed: mix_result = 0x%x\n", mix_result); - return FALSE; - } - - /* copy mime_type */ - if (this_src->mime_type) { - - mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, - this_src->mime_type->str); - } else { - mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, NULL); - } - - if (mix_result != MIX_RESULT_SUCCESS) { - LOG_E( "set_mime_type failed: mix_result = 0x%x\n", mix_result); - return FALSE; - } - - /* TODO: copy other properties if there's any */ - - /* Now chainup base class */ - if (root_class->copy) { - LOG_V( "root_class->copy != NULL\n"); - return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - LOG_E( "root_class->copy == NULL\n"); - return TRUE; - } - } - - LOG_V( "End\n"); - - return FALSE; -} - -/** - * mix_videoconfigparamsdec_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsdec_equal(MixParams * first, MixParams * second) { - - gboolean ret = FALSE; - - MixVideoConfigParamsDec *this_first, *this_second; - MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); - - - if (MIX_IS_VIDEOCONFIGPARAMSDEC(first) && MIX_IS_VIDEOCONFIGPARAMSDEC(second)) { - - // Deep compare - // Cast the base object to this child object - this_first = MIX_VIDEOCONFIGPARAMSDEC(first); - this_second = MIX_VIDEOCONFIGPARAMSDEC(second); - - /* check the equalitiy of the primitive type properties */ - if (this_first->frame_order_mode != this_second->frame_order_mode) { - goto not_equal; - } - - if (this_first->frame_rate_num != this_second->frame_rate_num - && this_first->frame_rate_denom - != this_second->frame_rate_denom) { - goto not_equal; - } - - if (this_first->picture_width != this_second->picture_width - && this_first->picture_height != this_second->picture_height) { - goto not_equal; - } - - if (this_first->raw_format != this_second->raw_format) { - goto not_equal; - } - - if (this_first->rate_control != this_second->rate_control) { - goto not_equal; - } - - if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) { - goto not_equal; - } - - if (this_first->extra_surface_allocation != this_second->extra_surface_allocation) { - goto not_equal; - } - - /* check the equalitiy of the none-primitive type properties */ - - /* MixIOVec header */ - - if (this_first->header.data_size != this_second->header.data_size) { - goto not_equal; - } - - if (this_first->header.buffer_size != this_second->header.buffer_size) { - goto not_equal; - } - - if (this_first->header.data && this_second->header.data) { - if (memcmp(this_first->header.data, this_second->header.data, - this_first->header.data_size) != 0) { - goto not_equal; - } - } else if (!(!this_first->header.data && !this_second->header.data)) { - goto not_equal; - } - - /* compare mime_type */ - - if (this_first->mime_type && this_second->mime_type) { - if (g_string_equal(this_first->mime_type, this_second->mime_type) - != TRUE) { - goto not_equal; - } - } else if (!(!this_first->mime_type && !this_second->mime_type)) { - goto not_equal; - } - - if (this_first->video_range != this_second->video_range) - { - goto not_equal; - } - - if (this_first->color_matrix != this_second->color_matrix) - { - goto not_equal; - } - - if (this_first->bit_rate != this_second->bit_rate) - { - goto not_equal; - } - - if (this_first->par_num != this_second->par_num) - { - goto not_equal; - } - - if (this_first->par_denom != this_second->par_denom) - { - goto not_equal; - } - - ret = TRUE; - - not_equal: - - if (ret != TRUE) { - return ret; - } - - /* chaining up. */ - if (root_class->equal) - ret = root_class->equal(first, second); - else - ret = TRUE; - } - - return ret; -} - -#define MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ - if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - -/* TODO: Add getters and setters for other properties. The following is incomplete */ - -MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->frame_order_mode = frame_order_mode; - LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode); - *frame_order_mode = obj->frame_order_mode; - LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj, - MixIOVec * header) { - - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - if (!header) { - return MIX_RESULT_NULL_PTR; - } - - if (header->data && header->buffer_size) { - obj->header.data = g_memdup(header->data, header->buffer_size); - if (!obj->header.data) { - return MIX_RESULT_NO_MEMORY; - } - obj->header.buffer_size = header->buffer_size; - obj->header.data_size = header->data_size; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj, - MixIOVec ** header) { - - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, header); - - if (obj->header.data && obj->header.buffer_size) { - - *header = g_malloc(sizeof(MixIOVec)); - - if (*header == NULL) { - return MIX_RESULT_NO_MEMORY; - } - - (*header)->data = g_memdup(obj->header.data, obj->header.buffer_size); - (*header)->buffer_size = obj->header.buffer_size; - (*header)->data_size = obj->header.data_size; - - } else { - *header = NULL; - } - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj, - const gchar * mime_type) { - - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - if (!mime_type) { - return MIX_RESULT_NULL_PTR; - } - - if (obj->mime_type) { - if (obj->mime_type->str) - g_string_free(obj->mime_type, TRUE); - else - g_string_free(obj->mime_type, FALSE); - } - - obj->mime_type = g_string_new(mime_type); - if (!obj->mime_type) { - return MIX_RESULT_NO_MEMORY; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj, - gchar ** mime_type) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, mime_type); - - if (!obj->mime_type) { - *mime_type = NULL; - return MIX_RESULT_SUCCESS; - } - *mime_type = g_strdup(obj->mime_type->str); - if (!*mime_type) { - return MIX_RESULT_NO_MEMORY; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj, - guint frame_rate_num, guint frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->frame_rate_num = frame_rate_num; - obj->frame_rate_denom = frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj, - guint * frame_rate_num, guint * frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); - *frame_rate_num = obj->frame_rate_num; - *frame_rate_denom = obj->frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj, - guint picture_width, guint picture_height) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->picture_width = picture_width; - obj->picture_height = picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj, - guint * picture_width, guint * picture_height) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); - *picture_width = obj->picture_width; - *picture_height = obj->picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj, - guint raw_format) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - /* TODO: check if the value of raw_format is valid */ - obj->raw_format = raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj, - guint *raw_format) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, raw_format); - *raw_format = obj->raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj, - guint rate_control) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - /* TODO: check if the value of rate_control is valid */ - obj->rate_control = rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj, - guint *rate_control) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, rate_control); - *rate_control = obj->rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size( - MixVideoConfigParamsDec * obj, guint bufpoolsize) { - - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - obj->mixbuffer_pool_size = bufpoolsize; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size( - MixVideoConfigParamsDec * obj, guint *bufpoolsize) { - - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bufpoolsize); - *bufpoolsize = obj->mixbuffer_pool_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation( - MixVideoConfigParamsDec * obj, - guint extra_surface_allocation) { - - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - obj->extra_surface_allocation = extra_surface_allocation; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation( - MixVideoConfigParamsDec * obj, - guint *extra_surface_allocation) { - - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, extra_surface_allocation); - *extra_surface_allocation = obj->extra_surface_allocation; - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT mix_videoconfigparamsdec_set_video_range( - MixVideoConfigParamsDec * obj, - guint8 video_range) -{ - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->video_range = video_range; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_video_range( - MixVideoConfigParamsDec * obj, - guint8 *video_range) -{ - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, video_range); - *video_range = obj->video_range; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_color_matrix( - MixVideoConfigParamsDec * obj, - guint8 color_matrix) -{ - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->color_matrix = color_matrix; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_color_matrix( - MixVideoConfigParamsDec * obj, - guint8 *color_matrix) -{ - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, color_matrix); - *color_matrix = obj->color_matrix; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_bit_rate( - MixVideoConfigParamsDec * obj, - guint bit_rate) -{ - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->bit_rate = bit_rate; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_bit_rate( - MixVideoConfigParamsDec * obj, - guint *bit_rate) -{ - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bit_rate); - *bit_rate = obj->bit_rate; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio( - MixVideoConfigParamsDec * obj, - guint par_num, - guint par_denom) -{ - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->par_num = par_num; - obj->par_denom = par_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio( - MixVideoConfigParamsDec * obj, - guint * par_num, - guint * par_denom) -{ - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, par_num, par_denom); - *par_num = obj->par_num; - *par_denom = obj->par_denom; - return MIX_RESULT_SUCCESS; -} - - diff --git a/mix_video/src/mixvideoconfigparamsdec.cpp b/mix_video/src/mixvideoconfigparamsdec.cpp new file mode 100644 index 0000000..b34e9b3 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec.cpp @@ -0,0 +1,498 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparamsdec + * @short_description: MI-X Video Decode Configuration Parameter Base Object + * + * A base object of MI-X video decode configuration parameter objects. + */ + +#include +#include "mixvideolog.h" +#include "mixvideoconfigparamsdec.h" + +#define MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ + if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ + + +MixVideoConfigParamsDec::MixVideoConfigParamsDec() + :frame_order_mode(MIX_FRAMEORDER_MODE_DISPLAYORDER) + ,mime_type(NULL) + ,frame_rate_num(0) + ,frame_rate_denom(0) + ,picture_width(0) + ,picture_height(0) + ,raw_format(0) + ,rate_control(0) + ,mixbuffer_pool_size(0) + ,extra_surface_allocation(0) + ,video_range(0) + ,color_matrix(0) + ,bit_rate(0) + ,par_num(0) + ,par_denom(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + +{ + memset(&this->header, 0, sizeof(header)); +} + +MixVideoConfigParamsDec::~MixVideoConfigParamsDec() { + /* free header */ + if (NULL != this->header.data) { + g_free(this->header.data); + memset(&this->header, 0, sizeof(this->header)); + } + + /* free mime_type */ + if (this->mime_type->str) + g_string_free(this->mime_type, TRUE); + else + g_string_free(this->mime_type, FALSE); +} + +gboolean MixVideoConfigParamsDec::copy(MixParams *target) const { + MIX_RESULT mix_result = MIX_RESULT_FAIL; + MixVideoConfigParamsDec *this_target = MIX_VIDEOCONFIGPARAMSDEC(target); + LOG_V( "Begin\n"); + + if (NULL != this_target) { + /* copy properties of primitive type */ + this_target->frame_order_mode = this->frame_order_mode; + this_target->frame_rate_num = this->frame_rate_num; + this_target->frame_rate_denom = this->frame_rate_denom; + this_target->picture_width = this->picture_width; + this_target->picture_height = this->picture_height; + this_target->raw_format = this->raw_format; + this_target->rate_control = this->rate_control; + this_target->mixbuffer_pool_size = this->mixbuffer_pool_size; + this_target->extra_surface_allocation = this->extra_surface_allocation; + this_target->video_range = this->video_range; + this_target->color_matrix = this->color_matrix; + this_target->bit_rate = this->bit_rate; + this_target->par_num = this->par_num; + this_target->par_denom = this->par_denom; + + /* copy properties of non-primitive */ + + /* copy header */ + mix_result = mix_videoconfigparamsdec_set_header(this_target, + const_cast(&this->header)); + + if (MIX_RESULT_SUCCESS != mix_result) { + LOG_E( "set_header failed: mix_result = 0x%x\n", mix_result); + return FALSE; + } + + /* copy mime_type */ + if (NULL != this->mime_type) { + mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, + this->mime_type->str); + } else { + mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, NULL); + } + + if (MIX_RESULT_SUCCESS != mix_result) { + LOG_E( "set_mime_type failed: mix_result = 0x%x\n", mix_result); + return FALSE; + } + + /* TODO: copy other properties if there's any */ + + /* Now chainup base class */ + return MixVideoConfigParams::copy(target); + } + + LOG_V( "End\n"); + + return FALSE; +} + +gboolean MixVideoConfigParamsDec::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoConfigParamsDec *this_obj = MIX_VIDEOCONFIGPARAMSDEC(obj); + + if (NULL != this_obj) { + // Deep compare + + /* check the equalitiy of the primitive type properties */ + if (this->frame_order_mode != this_obj->frame_order_mode) { + goto not_equal; + } + + if ((this->frame_rate_num != this_obj->frame_rate_num) && + (this->frame_rate_denom != this_obj->frame_rate_denom)) { + goto not_equal; + } + + if ((this->picture_width != this_obj->picture_width) && + (this->picture_height != this_obj->picture_height)) { + goto not_equal; + } + + if (this->raw_format != this_obj->raw_format) { + goto not_equal; + } + + if (this->rate_control != this_obj->rate_control) { + goto not_equal; + } + + if (this->mixbuffer_pool_size != this_obj->mixbuffer_pool_size) { + goto not_equal; + } + + if (this->extra_surface_allocation != this_obj->extra_surface_allocation) { + goto not_equal; + } + + /* check the equalitiy of the none-primitive type properties */ + + /* MixIOVec header */ + + if (this->header.data_size != this_obj->header.data_size) { + goto not_equal; + } + + if (this->header.buffer_size != this_obj->header.buffer_size) { + goto not_equal; + } + + if (this->header.data && this_obj->header.data) { + if (memcmp(this->header.data, this_obj->header.data, + this_obj->header.data_size) != 0) { + goto not_equal; + } + } else if (!(!this->header.data && !this_obj->header.data)) { + goto not_equal; + } + + /* compare mime_type */ + if (this->mime_type && this_obj->mime_type) { + if (g_string_equal(this->mime_type, this_obj->mime_type) + != TRUE) { + goto not_equal; + } + } else if (!(!this->mime_type && !this_obj->mime_type)) { + goto not_equal; + } + + if (this->video_range != this_obj->video_range) { + goto not_equal; + } + + if (this->color_matrix != this_obj->color_matrix) { + goto not_equal; + } + + if (this->bit_rate != this_obj->bit_rate) { + goto not_equal; + } + + if (this->par_num != this_obj->par_num) { + goto not_equal; + } + + if (this->par_denom != this_obj->par_denom) { + goto not_equal; + } + ret = TRUE; + +not_equal: + + if (TRUE != ret) { + return ret; + } + + /* chaining up. */ + ret = MixVideoConfigParams::equal(obj); + } + + return ret; +} + +MixParams* MixVideoConfigParamsDec::dup() const { + MixParams *ret = NULL; + MixVideoConfigParamsDec *duplicate = new MixVideoConfigParamsDec(); + if (FALSE != copy(duplicate)) { + ret = duplicate; + } else { + mix_videoconfigparamsdec_unref(duplicate); + } + return ret; +} + +MixVideoConfigParamsDec * +mix_videoconfigparamsdec_new(void) { + return new MixVideoConfigParamsDec(); +} + +MixVideoConfigParamsDec * +mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix) { + return (MixVideoConfigParamsDec *) mix_params_ref(MIX_PARAMS(mix)); +} + + +/* TODO: Add getters and setters for other properties. The following is incomplete */ + +MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( + MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->frame_order_mode = frame_order_mode; + LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( + MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode); + *frame_order_mode = obj->frame_order_mode; + LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_header( + MixVideoConfigParamsDec * obj, MixIOVec * header) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + if (!header) { + return MIX_RESULT_NULL_PTR; + } + + if (header->data && header->buffer_size) { + obj->header.data = (guchar*)g_memdup(header->data, header->buffer_size); + if (!obj->header.data) { + return MIX_RESULT_NO_MEMORY; + } + obj->header.buffer_size = header->buffer_size; + obj->header.data_size = header->data_size; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_header( + MixVideoConfigParamsDec * obj, MixIOVec ** header) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, header); + + if (obj->header.data && obj->header.buffer_size) { + *header = (MixIOVec*)g_malloc(sizeof(MixIOVec)); + if (*header == NULL) { + return MIX_RESULT_NO_MEMORY; + } + (*header)->data = (guchar*)g_memdup(obj->header.data, obj->header.buffer_size); + (*header)->buffer_size = obj->header.buffer_size; + (*header)->data_size = obj->header.data_size; + } else { + *header = NULL; + } + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsdec_set_mime_type( + MixVideoConfigParamsDec * obj, const gchar * mime_type) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + if (!mime_type) { + return MIX_RESULT_NULL_PTR; + } + if (obj->mime_type) { + if (obj->mime_type->str) + g_string_free(obj->mime_type, TRUE); + else + g_string_free(obj->mime_type, FALSE); + } + obj->mime_type = g_string_new(mime_type); + if (!obj->mime_type) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_mime_type( + MixVideoConfigParamsDec * obj, gchar ** mime_type) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, mime_type); + if (!obj->mime_type) { + *mime_type = NULL; + return MIX_RESULT_SUCCESS; + } + *mime_type = g_strdup(obj->mime_type->str); + if (!*mime_type) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_frame_rate( + MixVideoConfigParamsDec * obj, guint frame_rate_num, + guint frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->frame_rate_num = frame_rate_num; + obj->frame_rate_denom = frame_rate_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_frame_rate( + MixVideoConfigParamsDec * obj, guint * frame_rate_num, + guint * frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); + *frame_rate_num = obj->frame_rate_num; + *frame_rate_denom = obj->frame_rate_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_picture_res( + MixVideoConfigParamsDec * obj, guint picture_width, + guint picture_height) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->picture_width = picture_width; + obj->picture_height = picture_height; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_picture_res( + MixVideoConfigParamsDec * obj, guint * picture_width, + guint * picture_height) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); + *picture_width = obj->picture_width; + *picture_height = obj->picture_height; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_raw_format( + MixVideoConfigParamsDec * obj, guint raw_format) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + /* TODO: check if the value of raw_format is valid */ + obj->raw_format = raw_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_raw_format( + MixVideoConfigParamsDec * obj, guint *raw_format) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, raw_format); + *raw_format = obj->raw_format; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_rate_control( + MixVideoConfigParamsDec * obj, guint rate_control) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + /* TODO: check if the value of rate_control is valid */ + obj->rate_control = rate_control; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_rate_control( + MixVideoConfigParamsDec * obj, guint *rate_control) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, rate_control); + *rate_control = obj->rate_control; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size( + MixVideoConfigParamsDec * obj, guint bufpoolsize) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->mixbuffer_pool_size = bufpoolsize; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size( + MixVideoConfigParamsDec * obj, guint *bufpoolsize) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bufpoolsize); + *bufpoolsize = obj->mixbuffer_pool_size; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation( + MixVideoConfigParamsDec * obj, guint extra_surface_allocation) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->extra_surface_allocation = extra_surface_allocation; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation( + MixVideoConfigParamsDec * obj, guint *extra_surface_allocation) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, extra_surface_allocation); + *extra_surface_allocation = obj->extra_surface_allocation; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsdec_set_video_range( + MixVideoConfigParamsDec * obj, guint8 video_range) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->video_range = video_range; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_video_range( + MixVideoConfigParamsDec * obj, guint8 *video_range) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, video_range); + *video_range = obj->video_range; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_color_matrix( + MixVideoConfigParamsDec * obj, guint8 color_matrix) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->color_matrix = color_matrix; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_color_matrix( + MixVideoConfigParamsDec * obj, guint8 *color_matrix) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, color_matrix); + *color_matrix = obj->color_matrix; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_bit_rate( + MixVideoConfigParamsDec * obj, guint bit_rate) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->bit_rate = bit_rate; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_bit_rate( + MixVideoConfigParamsDec * obj, guint *bit_rate) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bit_rate); + *bit_rate = obj->bit_rate; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio( + MixVideoConfigParamsDec * obj, guint par_num, guint par_denom) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->par_num = par_num; + obj->par_denom = par_denom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio( + MixVideoConfigParamsDec * obj, guint * par_num, guint * par_denom) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, par_num, par_denom); + *par_num = obj->par_num; + *par_denom = obj->par_denom; + return MIX_RESULT_SUCCESS; +} + + diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h index b4574c1..1e8657f 100644 --- a/mix_video/src/mixvideoconfigparamsdec.h +++ b/mix_video/src/mixvideoconfigparamsdec.h @@ -12,20 +12,12 @@ #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEOCONFIGPARAMSDEC: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEOCONFIGPARAMSDEC (mix_videoconfigparamsdec_get_type ()) /** * MIX_VIDEOCONFIGPARAMSDEC: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSDEC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDec)) +#define MIX_VIDEOCONFIGPARAMSDEC(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSDEC: @@ -33,41 +25,23 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC)) - -/** - * MIX_VIDEOCONFIGPARAMSDEC_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEOCONFIGPARAMSDEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDecClass)) - -/** - * MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC)) - -/** - * MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDecClass)) - -typedef struct _MixVideoConfigParamsDec MixVideoConfigParamsDec; -typedef struct _MixVideoConfigParamsDecClass MixVideoConfigParamsDecClass; +#define MIX_IS_VIDEOCONFIGPARAMSDEC(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC(obj)) ? TRUE : FALSE) /** * MixVideoConfigParamsDec: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsDec { +class MixVideoConfigParamsDec : public MixVideoConfigParams { +public: + MixVideoConfigParamsDec(); + ~MixVideoConfigParamsDec(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; +public: /*< public > */ - MixVideoConfigParams parent; + //MixVideoConfigParams parent; /*< public > */ @@ -139,25 +113,13 @@ struct _MixVideoConfigParamsDec { void *reserved4; }; -/** - * MixVideoConfigParamsDecClass: - * - * MI-X VideoConfig object class - */ -struct _MixVideoConfigParamsDecClass { - /*< public > */ - MixVideoConfigParamsClass parent_class; - - /* class members */ -}; - /** * mix_videoconfigparamsdec_get_type: * @returns: type * * Get the type of object. */ -GType mix_videoconfigparamsdec_get_type(void); +//GType mix_videoconfigparamsdec_get_type(void); /** * mix_videoconfigparamsdec_new: @@ -493,6 +455,4 @@ MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio(MixVideoConfigParamsD /* TODO: Add getters and setters for other properties */ -G_END_DECLS - #endif /* __MIX_VIDEOCONFIGPARAMSDEC_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.c b/mix_video/src/mixvideoconfigparamsdec_h264.c deleted file mode 100644 index 8047171..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_h264.c +++ /dev/null @@ -1,213 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsdec_h264 - * @short_description: MI-X Video H.264 Decode Configuration Parameter - * - * MI-X video H.264 decode configuration parameter objects. - */ - -#include "mixvideoconfigparamsdec_h264.h" - -static GType _mix_videoconfigparamsdec_h264_type = 0; -static MixVideoConfigParamsDecClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsdec_h264_type = g_define_type_id; } - -gboolean mix_videoconfigparamsdec_h264_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsdec_h264_dup (const MixParams * obj); -gboolean mix_videoconfigparamsdec_h264_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsdec_h264_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecH264, /* The name of the new type, in Camel case */ - mix_videoconfigparamsdec_h264, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsdec_h264_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsdec_h264_get_type ()); -} - -static void -mix_videoconfigparamsdec_h264_init (MixVideoConfigParamsDecH264 * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void -mix_videoconfigparamsdec_h264_class_init (MixVideoConfigParamsDecH264Class * klass) -{ - MixVideoConfigParamsDecClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSDEC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsDecClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsdec_h264_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsdec_h264_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsdec_h264_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsdec_h264_equal; -} - -MixVideoConfigParamsDecH264 * -mix_videoconfigparamsdec_h264_new (void) -{ - MixVideoConfigParamsDecH264 *ret = (MixVideoConfigParamsDecH264 *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264); - - return ret; -} - -void -mix_videoconfigparamsdec_h264_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsDecH264 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) - { - root_class->finalize (obj); - } -} - -MixVideoConfigParamsDecH264 - * mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix) -{ - return (MixVideoConfigParamsDecH264 *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsdec_h264_dup: -* @obj: a #MixVideoConfigParamsDec object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsdec_h264_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (obj)) - { - MixVideoConfigParamsDecH264 *duplicate = mix_videoconfigparamsdec_h264_new (); - if (mix_videoconfigparamsdec_h264_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsdec_h264_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videoconfigparamsdec_h264_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsdec_h264_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsDecH264 *this_target, *this_src; - MixParamsClass *root_class; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (target) - && MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSDEC_H264 (target); - this_src = MIX_VIDEOCONFIGPARAMSDEC_H264 (src); - - // TODO: copy properties */ - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_videoconfigparamsdec_h264: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsdec_h264_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsDecH264 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (first) - && MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSDEC_H264 (first); - this_second = MIX_VIDEOCONFIGPARAMSDEC_H264 (second); - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.cpp b/mix_video/src/mixvideoconfigparamsdec_h264.cpp new file mode 100644 index 0000000..bd96340 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_h264.cpp @@ -0,0 +1,66 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixvideoconfigparamsdec_h264 + * @short_description: MI-X Video H.264 Decode Configuration Parameter + * + * MI-X video H.264 decode configuration parameter objects. + */ + +#include "mixvideoconfigparamsdec_h264.h" + +MixVideoConfigParamsDecH264::MixVideoConfigParamsDecH264() + :reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} +MixVideoConfigParamsDecH264::~MixVideoConfigParamsDecH264() { +} + +gboolean MixVideoConfigParamsDecH264::copy(MixParams *target) const { + gboolean ret = FALSE; + MixVideoConfigParamsDecH264 * this_target = MIX_VIDEOCONFIGPARAMSDEC_H264(target); + if (NULL != this_target) + ret = MixVideoConfigParamsDec::copy(target); + return ret; +} + +gboolean MixVideoConfigParamsDecH264::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoConfigParamsDecH264 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264(obj); + if (NULL != this_obj) + ret = MixVideoConfigParamsDec::equal(this_obj); + return ret; +} + +MixParams* MixVideoConfigParamsDecH264::dup() const { + MixParams *ret = new MixVideoConfigParamsDecH264(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + +MixVideoConfigParamsDecH264 * +mix_videoconfigparamsdec_h264_new (void) { + return new MixVideoConfigParamsDecH264(); +} + +MixVideoConfigParamsDecH264* +mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h index 8a99313..f445fa8 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.h +++ b/mix_video/src/mixvideoconfigparamsdec_h264.h @@ -12,20 +12,11 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsdec.h" #include "mixvideodef.h" -G_BEGIN_DECLS - -/** -* MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264: -* -* Get type of class. -*/ -#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264 (mix_videoconfigparamsdec_h264_get_type ()) - /** * MIX_VIDEOCONFIGPARAMSDEC_H264: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSDEC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264)) +#define MIX_VIDEOCONFIGPARAMSDEC_H264(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSDEC_H264: @@ -33,43 +24,23 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixVideoConfigParamsDecH264 */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264)) - -/** -* MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264Class)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixVideoConfigParamsDecH264Class -*/ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264)) - -/** -* MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -#define MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264Class)) +#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_H264(obj)) ? TRUE : FALSE) -typedef struct _MixVideoConfigParamsDecH264 MixVideoConfigParamsDecH264; -typedef struct _MixVideoConfigParamsDecH264Class MixVideoConfigParamsDecH264Class; /** * MixVideoConfigParamsDecH264: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsDecH264 +class MixVideoConfigParamsDecH264 : public MixVideoConfigParamsDec { - /*< public > */ - MixVideoConfigParamsDec parent; - +public: + MixVideoConfigParamsDecH264(); + ~MixVideoConfigParamsDecH264(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; +public: /*< public > */ /* TODO: Add H.264 configuration paramters */ @@ -87,18 +58,7 @@ struct _MixVideoConfigParamsDecH264 void *reserved4; }; -/** -* MixVideoConfigParamsDecH264Class: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoConfigParamsDecH264Class -{ - /*< public > */ - MixVideoConfigParamsDecClass parent_class; - /* class members */ -}; /** * mix_videoconfigparamsdec_h264_get_type: @@ -106,7 +66,7 @@ struct _MixVideoConfigParamsDecH264Class * * Get the type of object. */ -GType mix_videoconfigparamsdec_h264_get_type (void); +//GType mix_videoconfigparamsdec_h264_get_type (void); /** * mix_videoconfigparamsdec_h264_new: @@ -136,7 +96,4 @@ MixVideoConfigParamsDecH264 /* Class Methods */ /* TODO: Add getters and setters for other properties */ - -G_END_DECLS - #endif /* __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.c b/mix_video/src/mixvideoconfigparamsdec_mp42.c deleted file mode 100644 index fe7efc6..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.c +++ /dev/null @@ -1,245 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsdec_mp42 - * @short_description: MI-X Video MPEG 4:2 Decode Configuration Parameter - * - * MI-X video MPEG 4:2 decode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsdec_mp42.h" - -static GType _mix_videoconfigparamsdec_mp42_type = 0; -static MixVideoConfigParamsDecClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsdec_mp42_type = g_define_type_id; } - -gboolean mix_videoconfigparamsdec_mp42_copy(MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsdec_mp42_dup(const MixParams * obj); -gboolean - mix_videoconfigparamsdec_mp42_equal(MixParams * first, MixParams * second); -static void mix_videoconfigparamsdec_mp42_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecMP42, /* The name of the new type, in Camel case */ - mix_videoconfigparamsdec_mp42, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */ - _do_init); - -void _mix_videoconfigparamsdec_mp42_initialize(void) { - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref(mix_videoconfigparamsdec_mp42_get_type()); -} - -static void mix_videoconfigparamsdec_mp42_init(MixVideoConfigParamsDecMP42 * self) { - /* initialize properties here */ - /* TODO: initialize properties */ - - self->mpegversion = 0; - self->divxversion = 0; - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; - -} - -static void mix_videoconfigparamsdec_mp42_class_init( - MixVideoConfigParamsDecMP42Class * klass) { - MixVideoConfigParamsDecClass *this_parent_class = MIX_VIDEOCONFIGPARAMSDEC_CLASS( - klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS(this_parent_class); - - /* setup static parent class */ - parent_class - = (MixVideoConfigParamsDecClass *) g_type_class_peek_parent(klass); - - this_root_class->finalize = mix_videoconfigparamsdec_mp42_finalize; - this_root_class->copy - = (MixParamsCopyFunction) mix_videoconfigparamsdec_mp42_copy; - this_root_class->dup - = (MixParamsDupFunction) mix_videoconfigparamsdec_mp42_dup; - this_root_class->equal - = (MixParamsEqualFunction) mix_videoconfigparamsdec_mp42_equal; -} - -MixVideoConfigParamsDecMP42 * -mix_videoconfigparamsdec_mp42_new(void) { - MixVideoConfigParamsDecMP42 *ret = - (MixVideoConfigParamsDecMP42 *) g_type_create_instance( - MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42); - - return ret; -} - -void mix_videoconfigparamsdec_mp42_finalize(MixParams * obj) { - /* MixVideoConfigParamsDecMP42 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_MP42 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoConfigParamsDecMP42 * -mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix) { - return (MixVideoConfigParamsDecMP42 *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videoconfigparamsdec_mp42_dup: - * @obj: a #MixVideoConfigParamsDec object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoconfigparamsdec_mp42_dup(const MixParams * obj) { - MixParams *ret = NULL; - - LOG_V( "Begin\n"); - if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) { - MixVideoConfigParamsDecMP42 *duplicate = mix_videoconfigparamsdec_mp42_new(); - LOG_V( "duplicate = 0x%x\n", duplicate); - if (mix_videoconfigparamsdec_mp42_copy(MIX_PARAMS(duplicate), MIX_PARAMS( - obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoconfigparamsdec_mp42_unref(duplicate); - } - } - LOG_V( "End\n"); - return ret; -} - -/** - * mix_videoconfigparamsdec_mp42_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsdec_mp42_copy(MixParams * target, - const MixParams * src) { - MixVideoConfigParamsDecMP42 *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(target) && MIX_IS_VIDEOCONFIGPARAMSDEC_MP42( - src)) { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSDEC_MP42(target); - this_src = MIX_VIDEOCONFIGPARAMSDEC_MP42(src); - - // TODO: copy properties */ - this_target->mpegversion = this_src->mpegversion; - this_target->divxversion = this_src->divxversion; - - // Now chainup base class - root_class = MIX_PARAMS_CLASS(parent_class); - - if (root_class->copy) { - LOG_V( "root_class->copy != NULL\n"); - return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - LOG_V( "root_class->copy == NULL\n\n"); - return TRUE; - } - } - LOG_V( "End\n"); - return FALSE; -} - -/** - * mix_videoconfigparamsdec_mp42: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsdec_mp42_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoConfigParamsDecMP42 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(first) && MIX_IS_VIDEOCONFIGPARAMSDEC_MP42( - second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSDEC_MP42(first); - this_second = MIX_VIDEOCONFIGPARAMSDEC_MP42(second); - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) { - ret = klass->equal(first, second); - } else { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( - MixVideoConfigParamsDecMP42 *obj, guint version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); - obj->mpegversion = version; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( - MixVideoConfigParamsDecMP42 *obj, guint *version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); - *version = obj->mpegversion; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( - MixVideoConfigParamsDecMP42 *obj, guint version) { - - MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); - obj->divxversion = version; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( - MixVideoConfigParamsDecMP42 *obj, guint *version) { - - MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); - *version = obj->divxversion; - return MIX_RESULT_SUCCESS; - -} - diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.cpp b/mix_video/src/mixvideoconfigparamsdec_mp42.cpp new file mode 100644 index 0000000..c610411 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.cpp @@ -0,0 +1,112 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparamsdec_mp42 + * @short_description: MI-X Video MPEG 4:2 Decode Configuration Parameter + * + * MI-X video MPEG 4:2 decode configuration parameter objects. + */ + + +#include "mixvideolog.h" +#include "mixvideoconfigparamsdec_mp42.h" + +#define MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ + +MixVideoConfigParamsDecMP42::MixVideoConfigParamsDecMP42() + :mpegversion(0) + ,divxversion(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} + +MixVideoConfigParamsDecMP42::~MixVideoConfigParamsDecMP42() { +} + +gboolean MixVideoConfigParamsDecMP42::copy(MixParams *target) const { + gboolean ret = FALSE; + MixVideoConfigParamsDecMP42 * this_target = MIX_VIDEOCONFIGPARAMSDEC_MP42(target); + if (NULL != this_target) { + this_target->mpegversion = this->mpegversion; + this_target->divxversion = this->divxversion; + ret = MixVideoConfigParamsDec::copy(target); + } + return ret; +} + +gboolean MixVideoConfigParamsDecMP42::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoConfigParamsDecMP42 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_MP42(obj); + if (NULL != this_obj) + ret = MixVideoConfigParamsDec::equal(this_obj); + return ret; +} + +MixParams* MixVideoConfigParamsDecMP42::dup() const { + MixParams *ret = new MixVideoConfigParamsDecMP42(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + + +MixVideoConfigParamsDecMP42 * +mix_videoconfigparamsdec_mp42_new(void) { + return new MixVideoConfigParamsDecMP42(); +} + +MixVideoConfigParamsDecMP42 * +mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +/* TODO: Add getters and setters for properties if any */ +MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( + MixVideoConfigParamsDecMP42 *obj, guint version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); + obj->mpegversion = version; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( + MixVideoConfigParamsDecMP42 *obj, guint *version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); + *version = obj->mpegversion; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( + MixVideoConfigParamsDecMP42 *obj, guint version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); + obj->divxversion = version; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( + MixVideoConfigParamsDecMP42 *obj, guint *version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); + *version = obj->divxversion; + return MIX_RESULT_SUCCESS; + +} + diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h index 6e3d84c..b4b73b7 100644 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.h +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.h @@ -12,20 +12,11 @@ #include "mixvideoconfigparamsdec.h" #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42 (mix_videoconfigparamsdec_mp42_get_type ()) - /** * MIX_VIDEOCONFIGPARAMSDEC_MP42: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSDEC_MP42(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42)) +#define MIX_VIDEOCONFIGPARAMSDEC_MP42(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSDEC_MP42: @@ -33,42 +24,22 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixVideoConfigParamsDecMP42 */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42)) - -/** - * MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42Class)) - -/** - * MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixVideoConfigParamsDecMP42Class - */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42)) - -/** - * MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42Class)) +#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_MP42(obj)) ? TRUE : FALSE) -typedef struct _MixVideoConfigParamsDecMP42 MixVideoConfigParamsDecMP42; -typedef struct _MixVideoConfigParamsDecMP42Class MixVideoConfigParamsDecMP42Class; /** * MixVideoConfigParamsDecMP42: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsDecMP42 { - /*< public > */ - MixVideoConfigParamsDec parent; - +class MixVideoConfigParamsDecMP42 : public MixVideoConfigParamsDec { +public: + MixVideoConfigParamsDecMP42(); + ~MixVideoConfigParamsDecMP42(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; +public: /*< public > */ /* MPEG version */ @@ -90,25 +61,13 @@ struct _MixVideoConfigParamsDecMP42 { void *reserved4; }; -/** - * MixVideoConfigParamsDecMP42Class: - * - * MI-X VideoConfig object class - */ -struct _MixVideoConfigParamsDecMP42Class { - /*< public > */ - MixVideoConfigParamsDecClass parent_class; - -/* class members */ -}; - /** * mix_videoconfigparamsdec_mp42_get_type: * @returns: type * * Get the type of object. */ -GType mix_videoconfigparamsdec_mp42_get_type(void); +//GType mix_videoconfigparamsdec_mp42_get_type(void); /** * mix_videoconfigparamsdec_mp42_new: @@ -184,6 +143,6 @@ MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( MixVideoConfigParamsDecMP42 *obj, guint *version); -G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.c b/mix_video/src/mixvideoconfigparamsdec_vc1.c deleted file mode 100644 index 635487c..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.c +++ /dev/null @@ -1,189 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsdec_vc1 - * @short_description: MI-X Video VC-1 Decode Configuration Parameter - * - * MI-X video VC-1 decode configuration parameter objects. - */ - - -#include "mixvideoconfigparamsdec_vc1.h" - -static GType _mix_videoconfigparamsdec_vc1_type = 0; -static MixVideoConfigParamsDecClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsdec_vc1_type = g_define_type_id; } - -gboolean mix_videoconfigparamsdec_vc1_copy(MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsdec_vc1_dup(const MixParams * obj); -gboolean mix_videoconfigparamsdec_vc1_equal(MixParams * first, MixParams * second); -static void mix_videoconfigparamsdec_vc1_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecVC1, /* The name of the new type, in Camel case */ - mix_videoconfigparamsdec_vc1, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */ - _do_init); - -void _mix_videoconfigparamsdec_vc1_initialize(void) { - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref(mix_videoconfigparamsdec_vc1_get_type()); -} - -static void mix_videoconfigparamsdec_vc1_init(MixVideoConfigParamsDecVC1 * self) { - /* initialize properties here */ - /* TODO: initialize properties */ - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videoconfigparamsdec_vc1_class_init( - MixVideoConfigParamsDecVC1Class * klass) { - MixVideoConfigParamsDecClass *this_parent_class = MIX_VIDEOCONFIGPARAMSDEC_CLASS( - klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS(this_parent_class); - - /* setup static parent class */ - parent_class - = (MixVideoConfigParamsDecClass *) g_type_class_peek_parent(klass); - - this_root_class->finalize = mix_videoconfigparamsdec_vc1_finalize; - this_root_class->copy - = (MixParamsCopyFunction) mix_videoconfigparamsdec_vc1_copy; - this_root_class->dup = (MixParamsDupFunction) mix_videoconfigparamsdec_vc1_dup; - this_root_class->equal - = (MixParamsEqualFunction) mix_videoconfigparamsdec_vc1_equal; -} - -MixVideoConfigParamsDecVC1 * -mix_videoconfigparamsdec_vc1_new(void) { - MixVideoConfigParamsDecVC1 *ret = - (MixVideoConfigParamsDecVC1 *) g_type_create_instance( - MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1); - - return ret; -} - -void mix_videoconfigparamsdec_vc1_finalize(MixParams * obj) { - /* MixVideoConfigParamsDecVC1 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_VC1 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoConfigParamsDecVC1 * -mix_videoconfigparamsdec_vc1_ref(MixVideoConfigParamsDecVC1 * mix) { - return (MixVideoConfigParamsDecVC1 *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videoconfigparamsdec_vc1_dup: - * @obj: a #MixVideoConfigParamsDec object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoconfigparamsdec_vc1_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj)) { - MixVideoConfigParamsDecVC1 *duplicate = mix_videoconfigparamsdec_vc1_new(); - if (mix_videoconfigparamsdec_vc1_copy(MIX_PARAMS(duplicate), MIX_PARAMS( - obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoconfigparamsdec_vc1_unref(duplicate); - } - } - return ret; -} - -/** - * mix_videoconfigparamsdec_vc1_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsdec_vc1_copy(MixParams * target, - const MixParams * src) { - MixVideoConfigParamsDecVC1 *this_target, *this_src; - MixParamsClass *root_class; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(target) && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1( - src)) { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSDEC_VC1(target); - this_src = MIX_VIDEOCONFIGPARAMSDEC_VC1(src); - - // TODO: copy properties */ - - // Now chainup base class - root_class = MIX_PARAMS_CLASS(parent_class); - - if (root_class->copy) { - return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_videoconfigparamsdec_vc1: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsdec_vc1_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoConfigParamsDecVC1 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(first) && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1( - second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSDEC_VC1(first); - this_second = MIX_VIDEOCONFIGPARAMSDEC_VC1(second); - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) { - ret = klass->equal(first, second); - } else { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.cpp b/mix_video/src/mixvideoconfigparamsdec_vc1.cpp new file mode 100644 index 0000000..baf1acc --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.cpp @@ -0,0 +1,71 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparamsdec_vc1 + * @short_description: MI-X Video VC-1 Decode Configuration Parameter + * + * MI-X video VC-1 decode configuration parameter objects. + */ + + +#include "mixvideoconfigparamsdec_vc1.h" + +MixVideoConfigParamsDecVC1::MixVideoConfigParamsDecVC1() + :reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} + +MixVideoConfigParamsDecVC1::~MixVideoConfigParamsDecVC1() { +} + +gboolean MixVideoConfigParamsDecVC1::copy(MixParams *target) const { + gboolean ret = FALSE; + MixVideoConfigParamsDecVC1 * this_target = MIX_VIDEOCONFIGPARAMSDEC_VC1(target); + if (NULL != this_target) { + ret = MixVideoConfigParamsDec::copy(target); + } + return ret; +} + +gboolean MixVideoConfigParamsDecVC1::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoConfigParamsDecVC1 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_VC1(obj); + if (NULL != this_obj) + ret = MixVideoConfigParamsDec::equal(this_obj); + return ret; +} + +MixParams* MixVideoConfigParamsDecVC1::dup() const { + MixParams *ret = new MixVideoConfigParamsDecVC1(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + +MixVideoConfigParamsDecVC1 * +mix_videoconfigparamsdec_vc1_new(void) { + return new MixVideoConfigParamsDecVC1(); +} + +MixVideoConfigParamsDecVC1 * +mix_videoconfigparamsdec_vc1_ref( + MixVideoConfigParamsDecVC1 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + + +/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h index d38f975..559ab82 100644 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.h +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.h @@ -12,20 +12,11 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsdec.h" #include "mixvideodef.h" -G_BEGIN_DECLS - -/** -* MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1: -* -* Get type of class. -*/ -#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1 (mix_videoconfigparamsdec_vc1_get_type ()) - /** * MIX_VIDEOCONFIGPARAMSDEC_VC1: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSDEC_VC1(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1)) +#define MIX_VIDEOCONFIGPARAMSDEC_VC1(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSDEC_VC1: @@ -33,43 +24,22 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixVideoConfigParamsDecVC1 */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1)) - -/** -* MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1Class)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixVideoConfigParamsDecVC1Class -*/ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1)) - -/** -* MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -#define MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1Class)) - -typedef struct _MixVideoConfigParamsDecVC1 MixVideoConfigParamsDecVC1; -typedef struct _MixVideoConfigParamsDecVC1Class MixVideoConfigParamsDecVC1Class; +#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_VC1(obj)) ? TRUE : FALSE) /** * MixVideoConfigParamsDecVC1: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsDecVC1 +class MixVideoConfigParamsDecVC1 : public MixVideoConfigParamsDec { - /*< public > */ - MixVideoConfigParamsDec parent; - +public: + MixVideoConfigParamsDecVC1(); + ~MixVideoConfigParamsDecVC1(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; +public: /*< public > */ /* TODO: Add VC1 configuration paramters */ @@ -94,27 +64,6 @@ struct _MixVideoConfigParamsDecVC1 void *reserved4; }; -/** -* MixVideoConfigParamsDecVC1Class: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoConfigParamsDecVC1Class -{ - /*< public > */ - MixVideoConfigParamsDecClass parent_class; - - /* class members */ -}; - -/** -* mix_videoconfigparamsdec_vc1_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_videoconfigparamsdec_vc1_get_type (void); - /** * mix_videoconfigparamsdec_vc1_new: * @returns: A newly allocated instance of #MixVideoConfigParamsDecVC1 @@ -143,7 +92,4 @@ MixVideoConfigParamsDecVC1 /* Class Methods */ /* TODO: Add getters and setters for other properties */ - -G_END_DECLS - #endif /* __MIX_VIDEOCONFIGPARAMSDECDEC_VC1_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c deleted file mode 100644 index c35ade2..0000000 --- a/mix_video/src/mixvideoconfigparamsenc.c +++ /dev/null @@ -1,859 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsenc - * @short_description: MI-X Video Encode Configuration Parameter Base Object - * - * A base object of MI-X video encode configuration parameter objects. - */ - - -#include -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc.h" - -static GType _mix_videoconfigparamsenc_type = 0; -static MixParamsClass *parent_class = NULL; - -#define MDEBUG - -#define _do_init { _mix_videoconfigparamsenc_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src); -MixParams *mix_videoconfigparamsenc_dup(const MixParams * obj); -gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second); -static void mix_videoconfigparamsenc_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEnc, mix_videoconfigparamsenc, - MIX_TYPE_VIDEOCONFIGPARAMS, _do_init); - -static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) { - /* initialize properties here */ - self->bitrate = 0; - self->frame_rate_num = 30; - self->frame_rate_denom = 1; - self->initial_qp = 15; - self->min_qp = 0; - self->target_percentage = 95; - self->window_size = 500; - - self->max_slice_size = 0; /*Set to 0 means it won't take effect*/ - - self->picture_width = 0; - self->picture_height = 0; - - self->mime_type = NULL; - self->encode_format = 0; - self->intra_period = 30; - - self->mixbuffer_pool_size = 0; - - self->share_buf_mode = FALSE; - - self->ci_frame_id = NULL; - self->ci_frame_num = 0; - - self->need_display = TRUE; - - self->rate_control = MIX_RATE_CONTROL_NONE; - self->raw_format = MIX_RAW_TARGET_FORMAT_YUV420; - self->profile = MIX_PROFILE_H264BASELINE; - self->level = 30; - - self->CIR_frame_cnt = 15; - self->refresh_type = MIX_VIDEO_NONIR; - - self->air_params.air_MBs = 0; - self->air_params.air_threshold = 0; - self->air_params.air_auto = 0; - - /* TODO: initialize other properties */ - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videoconfigparamsenc_class_init(MixVideoConfigParamsEncClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videoconfigparamsenc_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsenc_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsenc_dup; - mixparams_class->equal - = (MixParamsEqualFunction) mix_videoconfigparamsenc_equal; -} - -MixVideoConfigParamsEnc * -mix_videoconfigparamsenc_new(void) { - MixVideoConfigParamsEnc *ret = - (MixVideoConfigParamsEnc *) g_type_create_instance( - MIX_TYPE_VIDEOCONFIGPARAMSENC); - - return ret; -} - -void mix_videoconfigparamsenc_finalize(MixParams * obj) { - - /* clean up here. */ - MixVideoConfigParamsEnc *self = MIX_VIDEOCONFIGPARAMSENC(obj); - - /* free mime_type */ - if (self->mime_type->str) - g_string_free(self->mime_type, TRUE); - else - g_string_free(self->mime_type, FALSE); - - if (self->ci_frame_id) - g_free (self->ci_frame_id); - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoConfigParamsEnc * -mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) { - return (MixVideoConfigParamsEnc *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videoconfigparamsenc_dup: - * @obj: a #MixVideoConfigParamsEnc object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoconfigparamsenc_dup(const MixParams * obj) { - MixParams *ret = NULL; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC(obj)) { - MixVideoConfigParamsEnc *duplicate = mix_videoconfigparamsenc_new(); - if (mix_videoconfigparamsenc_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - - ret = MIX_PARAMS(duplicate); - } else { - mix_videoconfigparamsenc_unref(duplicate); - } - } - return ret; -} - -/** - * mix_videoconfigparamsenc_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src) { - - MixVideoConfigParamsEnc *this_target, *this_src; - MIX_RESULT mix_result = MIX_RESULT_FAIL; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC(target) && MIX_IS_VIDEOCONFIGPARAMSENC(src)) { - - /* Cast the base object to this child object */ - this_target = MIX_VIDEOCONFIGPARAMSENC(target); - this_src = MIX_VIDEOCONFIGPARAMSENC(src); - - /* copy properties of primitive type */ - - this_target->bitrate = this_src->bitrate; - this_target->frame_rate_num = this_src->frame_rate_num; - this_target->frame_rate_denom = this_src->frame_rate_denom; - this_target->initial_qp = this_src->initial_qp; - this_target->min_qp = this_src->min_qp; - this_target->target_percentage = this_src->target_percentage; - this_target->window_size = this_src->window_size; - this_target->max_slice_size = this_src->max_slice_size; - this_target->intra_period = this_src->intra_period; - this_target->picture_width = this_src->picture_width; - this_target->picture_height = this_src->picture_height; - this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size; - this_target->share_buf_mode = this_src->share_buf_mode; - this_target->encode_format = this_src->encode_format; - this_target->ci_frame_num = this_src->ci_frame_num; - this_target->draw= this_src->draw; - this_target->need_display = this_src->need_display; - this_target->rate_control = this_src->rate_control; - this_target->raw_format = this_src->raw_format; - this_target->profile = this_src->profile; - this_target->level = this_src->level; - this_target->CIR_frame_cnt = this_src->CIR_frame_cnt; - this_target->refresh_type = this_src->refresh_type; - this_target->air_params.air_MBs = this_src->air_params.air_MBs; - this_target->air_params.air_threshold = this_src->air_params.air_threshold; - this_target->air_params.air_auto = this_src->air_params.air_auto; - - /* copy properties of non-primitive */ - - /* copy mime_type */ - - if (this_src->mime_type) { -#ifdef MDEBUG - if (this_src->mime_type->str) { - - LOG_I( "this_src->mime_type->str = %s %x\n", - this_src->mime_type->str, (unsigned int)this_src->mime_type->str); - } -#endif - - mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, - this_src->mime_type->str); - } else { - - LOG_I( "this_src->mime_type = NULL\n"); - - mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, NULL); - } - - if (mix_result != MIX_RESULT_SUCCESS) { - - LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n"); - return FALSE; - } - - mix_result = mix_videoconfigparamsenc_set_ci_frame_info (this_target, this_src->ci_frame_id, - this_src->ci_frame_num); - - /* TODO: copy other properties if there's any */ - - /* Now chainup base class */ - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - - return FALSE; -} - - -/** - * mix_videoconfigparamsenc_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { - - gboolean ret = FALSE; - - MixVideoConfigParamsEnc *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC(first) && MIX_IS_VIDEOCONFIGPARAMSENC(second)) { - - // Deep compare - // Cast the base object to this child object - this_first = MIX_VIDEOCONFIGPARAMSENC(first); - this_second = MIX_VIDEOCONFIGPARAMSENC(second); - - /* check the equalitiy of the primitive type properties */ - if (this_first->bitrate != this_second->bitrate) { - goto not_equal; - } - - if (this_first->frame_rate_num != this_second->frame_rate_num) { - goto not_equal; - } - - if (this_first->frame_rate_denom != this_second->frame_rate_denom) { - goto not_equal; - } - - if (this_first->initial_qp != this_second->initial_qp) { - goto not_equal; - } - - if (this_first->min_qp != this_second->min_qp) { - goto not_equal; - } - - if (this_first->target_percentage != this_second->target_percentage) { - goto not_equal; - } - - if (this_first->window_size != this_second->window_size) { - goto not_equal; - } - - if (this_first->max_slice_size != this_second->max_slice_size) { - goto not_equal; - } - - if (this_first->intra_period != this_second->intra_period) { - goto not_equal; - } - - if (this_first->picture_width != this_second->picture_width - && this_first->picture_height != this_second->picture_height) { - goto not_equal; - } - - if (this_first->encode_format != this_second->encode_format) { - goto not_equal; - } - - if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) { - goto not_equal; - } - - if (this_first->share_buf_mode != this_second->share_buf_mode) { - goto not_equal; - } - - if (this_first->ci_frame_id != this_second->ci_frame_id) { - goto not_equal; - } - - if (this_first->ci_frame_num != this_second->ci_frame_num) { - goto not_equal; - } - - if (this_first->draw != this_second->draw) { - goto not_equal; - } - - if (this_first->need_display!= this_second->need_display) { - goto not_equal; - } - - if (this_first->rate_control != this_second->rate_control) { - goto not_equal; - } - - if (this_first->raw_format != this_second->raw_format) { - goto not_equal; - } - - if (this_first->profile != this_second->profile) { - goto not_equal; - } - - if (this_first->level != this_second->level) { - goto not_equal; - } - - if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) { - goto not_equal; - } - - if (this_first->refresh_type != this_second->refresh_type) { - goto not_equal; - } - - if (this_first->air_params.air_MBs != this_second->air_params.air_MBs) { - goto not_equal; - } - - if (this_first->air_params.air_threshold != this_second->air_params.air_threshold) { - goto not_equal; - } - - if (this_first->air_params.air_auto != this_second->air_params.air_auto) { - goto not_equal; - } - - /* check the equalitiy of the none-primitive type properties */ - - /* compare mime_type */ - - if (this_first->mime_type && this_second->mime_type) { - if (g_string_equal(this_first->mime_type, this_second->mime_type) - != TRUE) { - goto not_equal; - } - } else if (!(!this_first->mime_type && !this_second->mime_type)) { - goto not_equal; - } - - ret = TRUE; - - not_equal: - - if (ret != TRUE) { - return ret; - } - - /* chaining up. */ - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - - return ret; -} - -#define MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ - if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ - -/* TODO: Add getters and setters for other properties. The following is incomplete */ - - -MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, - const gchar * mime_type) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - if (!mime_type) { - return MIX_RESULT_NULL_PTR; - } - - LOG_I( "mime_type = %s %x\n", - mime_type, (unsigned int)mime_type); - - if (obj->mime_type) { - if (obj->mime_type->str) - g_string_free(obj->mime_type, TRUE); - else - g_string_free(obj->mime_type, FALSE); - } - - - LOG_I( "mime_type = %s %x\n", - mime_type, (unsigned int)mime_type); - - obj->mime_type = g_string_new(mime_type); - if (!obj->mime_type) { - return MIX_RESULT_NO_MEMORY; - } - - - LOG_I( "mime_type = %s obj->mime_type->str = %s\n", - mime_type, obj->mime_type->str); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, - gchar ** mime_type) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, mime_type); - - if (!obj->mime_type) { - *mime_type = NULL; - return MIX_RESULT_SUCCESS; - } - *mime_type = g_strdup(obj->mime_type->str); - if (!*mime_type) { - return MIX_RESULT_NO_MEMORY; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, - guint frame_rate_num, guint frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->frame_rate_num = frame_rate_num; - obj->frame_rate_denom = frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, - guint * frame_rate_num, guint * frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); - *frame_rate_num = obj->frame_rate_num; - *frame_rate_denom = obj->frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, - guint picture_width, guint picture_height) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->picture_width = picture_width; - obj->picture_height = picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, - guint * picture_width, guint * picture_height) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); - *picture_width = obj->picture_width; - *picture_height = obj->picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_encode_format(MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat encode_format) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->encode_format = encode_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat* encode_format) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, encode_format); - *encode_format = obj->encode_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, - guint bitrate) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->bitrate= bitrate; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, - guint *bitrate) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bitrate); - *bitrate = obj->bitrate; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, - guint initial_qp) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->initial_qp = initial_qp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, - guint *initial_qp) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, initial_qp); - *initial_qp = obj->initial_qp; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, - guint min_qp) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->min_qp = min_qp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, - guint *min_qp) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, min_qp); - *min_qp = obj->min_qp; - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj, - guint target_percentage) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->target_percentage = target_percentage; - return MIX_RESULT_SUCCESS; - } - - -MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj, - guint *target_percentage) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, target_percentage); - *target_percentage = obj->target_percentage; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj, - guint window_size) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->window_size = window_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj, - guint *window_size) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, window_size); - *window_size = obj->window_size; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, - guint intra_period) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->intra_period = intra_period; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, - guint *intra_period) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, intra_period); - *intra_period = obj->intra_period; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size( - MixVideoConfigParamsEnc * obj, guint bufpoolsize) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - obj->mixbuffer_pool_size = bufpoolsize; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size( - MixVideoConfigParamsEnc * obj, guint *bufpoolsize) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bufpoolsize); - *bufpoolsize = obj->mixbuffer_pool_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode ( - MixVideoConfigParamsEnc * obj, gboolean share_buf_mod) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - obj->share_buf_mode = share_buf_mod; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, - gboolean *share_buf_mod) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, share_buf_mod); - - *share_buf_mod = obj->share_buf_mode; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, - gulong * ci_frame_id, guint ci_frame_num) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - - if (!ci_frame_id || !ci_frame_num) { - obj->ci_frame_id = NULL; - obj->ci_frame_num = 0; - return MIX_RESULT_SUCCESS; - } - - if (obj->ci_frame_id) - g_free (obj->ci_frame_id); - - guint size = ci_frame_num * sizeof (gulong); - obj->ci_frame_num = ci_frame_num; - - obj->ci_frame_id = g_malloc (ci_frame_num * sizeof (gulong)); - if (!(obj->ci_frame_id)) { - return MIX_RESULT_NO_MEMORY; - } - - memcpy (obj->ci_frame_id, ci_frame_id, size); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, - gulong * *ci_frame_id, guint *ci_frame_num) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, ci_frame_id, ci_frame_num); - - *ci_frame_num = obj->ci_frame_num; - - if (!obj->ci_frame_id) { - *ci_frame_id = NULL; - return MIX_RESULT_SUCCESS; - } - - if (obj->ci_frame_num) { - *ci_frame_id = g_malloc (obj->ci_frame_num * sizeof (gulong)); - - if (!*ci_frame_id) { - return MIX_RESULT_NO_MEMORY; - } - - memcpy (*ci_frame_id, obj->ci_frame_id, obj->ci_frame_num * sizeof (gulong)); - - } else { - *ci_frame_id = NULL; - } - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, - gulong draw) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->draw = draw; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, - gulong *draw) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, draw); - *draw = obj->draw; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_need_display ( - MixVideoConfigParamsEnc * obj, gboolean need_display) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - obj->need_display = need_display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, - gboolean *need_display) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, need_display); - - *need_display = obj->need_display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl rate_control) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->rate_control = rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl * rate_control) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, rate_control); - *rate_control = obj->rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat raw_format) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->raw_format = raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat * raw_format) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, raw_format); - *raw_format = obj->raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, - MixProfile profile) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->profile = profile; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, - MixProfile * profile) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, profile); - *profile = obj->profile; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, - guint8 level) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->level = level; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, - guint8 * level) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, level); - *level = obj->level; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - guint CIR_frame_cnt) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->CIR_frame_cnt = CIR_frame_cnt; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - guint * CIR_frame_cnt) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, CIR_frame_cnt); - *CIR_frame_cnt = obj->CIR_frame_cnt; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj, - guint max_slice_size) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->max_slice_size = max_slice_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj, - guint * max_slice_size) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, max_slice_size); - *max_slice_size = obj->max_slice_size; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_refresh_type(MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType refresh_type) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->refresh_type = refresh_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType * refresh_type) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, refresh_type); - *refresh_type = obj->refresh_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams air_params) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->air_params.air_MBs = air_params.air_MBs; - obj->air_params.air_threshold = air_params.air_threshold; - obj->air_params.air_auto = air_params.air_auto; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams * air_params) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, air_params); - air_params->air_MBs = obj->air_params.air_MBs; - air_params->air_threshold = obj->air_params.air_threshold; - air_params->air_auto = obj->air_params.air_auto; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsenc.cpp b/mix_video/src/mixvideoconfigparamsenc.cpp new file mode 100644 index 0000000..efba46f --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc.cpp @@ -0,0 +1,514 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoconfigparamsenc + * @short_description: MI-X Video Encode Configuration Parameter Base Object + * + * A base object of MI-X video encode configuration parameter objects. + */ + + +#include +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc.h" + + +#define MDEBUG + +MixVideoConfigParamsEnc * +mix_videoconfigparamsenc_new(void) { + return new MixVideoConfigParamsEnc(); +} + + + +MixVideoConfigParamsEnc * +mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) { + return (MixVideoConfigParamsEnc *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoconfigparamsenc_dup: + * @obj: a #MixVideoConfigParamsEnc object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoconfigparamsenc_dup(const MixParams * obj) { + return NULL; +} + +/** + * mix_videoconfigparamsenc_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src) { + + return FALSE; +} + + +/** + * mix_videoconfigparamsenc_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { + + gboolean ret = FALSE; + + MixVideoConfigParamsEnc *this_first, *this_second; + + if (NULL != first && NULL != second) { + + // Deep compare + // Cast the base object to this child object + this_first = MIX_VIDEOCONFIGPARAMSENC(first); + this_second = MIX_VIDEOCONFIGPARAMSENC(second); + + /* check the equalitiy of the primitive type properties */ + if (this_first->bitrate != this_second->bitrate) { + goto not_equal; + } + + if (this_first->frame_rate_num != this_second->frame_rate_num) { + goto not_equal; + } + + if (this_first->frame_rate_denom != this_second->frame_rate_denom) { + goto not_equal; + } + + if (this_first->initial_qp != this_second->initial_qp) { + goto not_equal; + } + + if (this_first->min_qp != this_second->min_qp) { + goto not_equal; + } + + if (this_first->target_percentage != this_second->target_percentage) { + goto not_equal; + } + + if (this_first->window_size != this_second->window_size) { + goto not_equal; + } + + if (this_first->max_slice_size != this_second->max_slice_size) { + goto not_equal; + } + + if (this_first->intra_period != this_second->intra_period) { + goto not_equal; + } + + if (this_first->picture_width != this_second->picture_width + && this_first->picture_height != this_second->picture_height) { + goto not_equal; + } + + if (this_first->encode_format != this_second->encode_format) { + goto not_equal; + } + + if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) { + goto not_equal; + } + + if (this_first->share_buf_mode != this_second->share_buf_mode) { + goto not_equal; + } + + if (this_first->ci_frame_id != this_second->ci_frame_id) { + goto not_equal; + } + + if (this_first->ci_frame_num != this_second->ci_frame_num) { + goto not_equal; + } + + if (this_first->draw != this_second->draw) { + goto not_equal; + } + + if (this_first->need_display!= this_second->need_display) { + goto not_equal; + } + + if (this_first->rate_control != this_second->rate_control) { + goto not_equal; + } + + if (this_first->raw_format != this_second->raw_format) { + goto not_equal; + } + + if (this_first->profile != this_second->profile) { + goto not_equal; + } + + if (this_first->level != this_second->level) { + goto not_equal; + } + + if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) { + goto not_equal; + } + + if (this_first->refresh_type != this_second->refresh_type) { + goto not_equal; + } + + if (this_first->air_params.air_MBs != this_second->air_params.air_MBs) { + goto not_equal; + } + + if (this_first->air_params.air_threshold != this_second->air_params.air_threshold) { + goto not_equal; + } + + if (this_first->air_params.air_auto != this_second->air_params.air_auto) { + goto not_equal; + } + + /* check the equalitiy of the none-primitive type properties */ + + /* compare mime_type */ + + if (this_first->mime_type && this_second->mime_type) { + if (g_string_equal(this_first->mime_type, this_second->mime_type) + != TRUE) { + goto not_equal; + } + } else if (!(!this_first->mime_type && !this_second->mime_type)) { + goto not_equal; + } + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* chaining up. */ + return TRUE; + } + + return ret; +} + + +/* TODO: Add getters and setters for other properties. The following is incomplete */ + + +MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, + const gchar * mime_type) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, + gchar ** mime_type) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, + guint frame_rate_num, guint frame_rate_denom) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, + guint * frame_rate_num, guint * frame_rate_denom) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, + guint picture_width, guint picture_height) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, + guint * picture_width, guint * picture_height) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_encode_format(MixVideoConfigParamsEnc * obj, + MixEncodeTargetFormat encode_format) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, + MixEncodeTargetFormat* encode_format) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, + guint bitrate) { + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, + guint *bitrate) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, + guint initial_qp) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, + guint *initial_qp) { + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, + guint min_qp) { + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, + guint *min_qp) { + return MIX_RESULT_NOT_SUPPORTED; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj, + guint target_percentage) { + + return MIX_RESULT_SUCCESS; + } + + +MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj, + guint *target_percentage) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj, + guint window_size) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj, + guint *window_size) { + + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, + guint intra_period) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, + guint *intra_period) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size( + MixVideoConfigParamsEnc * obj, guint bufpoolsize) { + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size( + MixVideoConfigParamsEnc * obj, guint *bufpoolsize) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc * obj, + gboolean share_buf_mod) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, + gboolean *share_buf_mod) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, + gulong * ci_frame_id, guint ci_frame_num) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, + gulong * *ci_frame_id, guint *ci_frame_num) { + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, + gulong draw) { + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, + gulong *draw) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_need_display ( + MixVideoConfigParamsEnc * obj, gboolean need_display) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, + gboolean *need_display) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, + MixRateControl rate_control) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, + MixRateControl * rate_control) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, + MixRawTargetFormat raw_format) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, + MixRawTargetFormat * raw_format) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, + MixProfile profile) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, + MixProfile * profile) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, + guint8 level) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, + guint8 * level) { + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, + guint CIR_frame_cnt) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, + guint * CIR_frame_cnt) { + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj, + guint max_slice_size) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj, + guint * max_slice_size) { + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_set_refresh_type(MixVideoConfigParamsEnc * obj, + MixVideoIntraRefreshType refresh_type) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj, + MixVideoIntraRefreshType * refresh_type) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj, + MixAIRParams air_params) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj, + MixAIRParams * air_params) { + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h index f45cc50..ceac4a6 100644 --- a/mix_video/src/mixvideoconfigparamsenc.h +++ b/mix_video/src/mixvideoconfigparamsenc.h @@ -12,20 +12,12 @@ #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEOCONFIGPARAMSENC: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEOCONFIGPARAMSENC (mix_videoconfigparamsenc_get_type ()) - /** * MIX_VIDEOCONFIGPARAMSENC: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEnc)) +//#define MIX_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEnc)) +#define MIX_VIDEOCONFIGPARAMSENC(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSENC: @@ -33,13 +25,13 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC)) /** * MIX_VIDEOCONFIGPARAMSENC_CLASS: * @klass: class to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) +//#define MIX_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_CLASS: @@ -47,7 +39,7 @@ G_BEGIN_DECLS * * Checks if the given class is #MixParamsClass */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC)) /** * MIX_VIDEOCONFIGPARAMSENC_GET_CLASS: @@ -55,25 +47,21 @@ G_BEGIN_DECLS * * Get the class instance of the object. */ -#define MIX_VIDEOCONFIGPARAMSENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) +//#define MIX_VIDEOCONFIGPARAMSENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) + -typedef struct _MixVideoConfigParamsEnc MixVideoConfigParamsEnc; -typedef struct _MixVideoConfigParamsEncClass MixVideoConfigParamsEncClass; /** * MixVideoConfigParamsEnc: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsEnc { - /*< public > */ - MixVideoConfigParams parent; - +class MixVideoConfigParamsEnc : public MixVideoConfigParams { /*< public > */ //MixIOVec header; /* the type of the following members will be changed after MIX API doc is ready */ - +public: /* Encoding profile */ MixProfile profile; @@ -170,26 +158,6 @@ struct _MixVideoConfigParamsEnc { void *reserved4; }; -/** - * MixVideoConfigParamsEncClass: - * - * MI-X VideoConfig object class - */ -struct _MixVideoConfigParamsEncClass { - /*< public > */ - MixVideoConfigParamsClass parent_class; - - /* class members */ -}; - -/** - * mix_videoconfigparamsenc_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoconfigparamsenc_get_type(void); - /** * mix_videoconfigparamsenc_new: * @returns: A newly allocated instance of #MixVideoConfigParamsEnc @@ -757,7 +725,6 @@ MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * ob /* TODO: Add getters and setters for other properties */ -G_END_DECLS #endif /* __MIX_VIDEOCONFIGPARAMSENC_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.c b/mix_video/src/mixvideoconfigparamsenc_h263.c deleted file mode 100644 index d0fb4d8..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_h263.c +++ /dev/null @@ -1,281 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsenc_h263 - * @short_description: MI-X Video H.263 Eecode Configuration Parameter - * - * MI-X video H.263 eecode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_h263.h" - -#define MDEBUG - - -static GType _mix_videoconfigparamsenc_h263_type = 0; -static MixVideoConfigParamsEncClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsenc_h263_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_h263_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsenc_h263_dup (const MixParams * obj); -gboolean mix_videoconfigparamsencenc_h263_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsenc_h263_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncH263, /* The name of the new type, in Camel case */ - mix_videoconfigparamsenc_h263, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsenc_h263_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsenc_h263_get_type ()); -} - -static void -mix_videoconfigparamsenc_h263_init (MixVideoConfigParamsEncH263 * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ - - self->slice_num = 1; - self->disable_deblocking_filter_idc = 0; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void -mix_videoconfigparamsenc_h263_class_init (MixVideoConfigParamsEncH263Class * klass) -{ - MixVideoConfigParamsEncClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsenc_h263_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsenc_h263_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsenc_h263_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsencenc_h263_equal; -} - -MixVideoConfigParamsEncH263 * -mix_videoconfigparamsenc_h263_new (void) -{ - MixVideoConfigParamsEncH263 *ret = (MixVideoConfigParamsEncH263 *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_H263); - - return ret; -} - -void -mix_videoconfigparamsenc_h263_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsEncH263 *this_obj = MIX_VIDEOCONFIGPARAMSENC_H263 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) - { - root_class->finalize (obj); - } -} - -MixVideoConfigParamsEncH263 - * mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix) -{ - return (MixVideoConfigParamsEncH263 *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsenc_h263_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_h263_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (obj)) - { - MixVideoConfigParamsEncH263 *duplicate = mix_videoconfigparamsenc_h263_new (); - if (mix_videoconfigparamsenc_h263_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsenc_h263_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videoconfigparamsenc_h263_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_h263_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsEncH263 *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (target) - && MIX_IS_VIDEOCONFIGPARAMSENC_H263 (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSENC_H263 (target); - this_src = MIX_VIDEOCONFIGPARAMSENC_H263 (src); - - //add properties - this_target->slice_num = this_src->slice_num; - this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_videoconfigparamsenc_h263: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_h263_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsEncH263 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (first) - && MIX_IS_VIDEOCONFIGPARAMSENC_H263 (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSENC_H263 (first); - this_second = MIX_VIDEOCONFIGPARAMSENC_H263 (second); - - if (this_first->slice_num != this_second->slice_num) { - goto not_equal; - } - - if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { - goto not_equal; - } - - - ret = TRUE; - - not_equal: - - if (ret != TRUE) { - return ret; - } - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH263 * obj, - guint slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); - obj->slice_num = slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, - guint * slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, slice_num); - *slice_num = obj->slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk (MixVideoConfigParamsEncH263 * obj, - guint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk (MixVideoConfigParamsEncH263 * obj, - guint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.cpp b/mix_video/src/mixvideoconfigparamsenc_h263.cpp new file mode 100644 index 0000000..35add90 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_h263.cpp @@ -0,0 +1,106 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixvideoconfigparamsenc_h263 + * @short_description: MI-X Video H.263 Eecode Configuration Parameter + * + * MI-X video H.263 eecode configuration parameter objects. + */ + + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_h263.h" + +#define MDEBUG + + +MixVideoConfigParamsEncH263 * +mix_videoconfigparamsenc_h263_new (void) +{ + return new MixVideoConfigParamsEncH263(); +} + + + +MixVideoConfigParamsEncH263 + * mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix) +{ + return (MixVideoConfigParamsEncH263 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_h263_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_h263_dup (const MixParams * obj) +{ + + return NULL; +} + +/** +* mix_videoconfigparamsenc_h263_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_h263_copy (MixParams * target, const MixParams * src) +{ + + return FALSE; +} + +/** +* mix_videoconfigparamsenc_h263: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_h263_equal (MixParams * first, MixParams * second) +{ + + return FALSE; +} + + + +MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH263 * obj, + guint slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, + guint * slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk (MixVideoConfigParamsEncH263 * obj, + guint disable_deblocking_filter_idc) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk (MixVideoConfigParamsEncH263 * obj, + guint * disable_deblocking_filter_idc) { + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.h b/mix_video/src/mixvideoconfigparamsenc_h263.h index 097041c..4dfd680 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h263.h +++ b/mix_video/src/mixvideoconfigparamsenc_h263.h @@ -12,20 +12,12 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" -G_BEGIN_DECLS - -/** -* MIX_TYPE_VIDEOCONFIGPARAMSENC_H263: -* -* Get type of class. -*/ -#define MIX_TYPE_VIDEOCONFIGPARAMSENC_H263 (mix_videoconfigparamsenc_h263_get_type ()) - /** * MIX_VIDEOCONFIGPARAMSENC_H263: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263)) +//#define MIX_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263)) +#define MIX_VIDEOCONFIGPARAMSENC_H263(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_H263: @@ -33,13 +25,13 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixVideoConfigParamsEncH263 */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) /** * MIX_VIDEOCONFIGPARAMSENC_H263_CLASS: * @klass: class to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) +//#define MIX_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_H263_CLASS: @@ -47,7 +39,7 @@ G_BEGIN_DECLS * * Checks if the given class is #MixVideoConfigParamsEncH263Class */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) /** * MIX_VIDEOCONFIGPARAMSENC_H263_GET_CLASS: @@ -55,20 +47,19 @@ G_BEGIN_DECLS * * Get the class instance of the object. */ -#define MIX_VIDEOCONFIGPARAMSENC_H263_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) +//#define MIX_VIDEOCONFIGPARAMSENC_H263_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) + + -typedef struct _MixVideoConfigParamsEncH263 MixVideoConfigParamsEncH263; -typedef struct _MixVideoConfigParamsEncH263Class MixVideoConfigParamsEncH263Class; /** * MixVideoConfigParamsEncH263: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsEncH263 +class MixVideoConfigParamsEncH263 : public MixVideoConfigParamsEnc { - /*< public > */ - MixVideoConfigParamsEnc parent; + public: /*< public > */ @@ -93,26 +84,7 @@ struct _MixVideoConfigParamsEncH263 void *reserved4; }; -/** -* MixVideoConfigParamsEncH263Class: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoConfigParamsEncH263Class -{ - /*< public > */ - MixVideoConfigParamsEncClass parent_class; - - /* class members */ -}; -/** -* mix_videoconfigparamsenc_h263_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_videoconfigparamsenc_h263_get_type (void); /** * mix_videoconfigparamsenc_h263_new: @@ -187,6 +159,5 @@ MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, guint * slice_num); -G_END_DECLS #endif /* __MIX_VIDEOCONFIGPARAMSENC_H263_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c deleted file mode 100644 index 12a5dd1..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_h264.c +++ /dev/null @@ -1,386 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsenc_h264 - * @short_description: MI-X Video H.264 Eecode Configuration Parameter - * - * MI-X video H.264 eecode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_h264.h" - -#define MDEBUG - - -static GType _mix_videoconfigparamsenc_h264_type = 0; -static MixVideoConfigParamsEncClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsenc_h264_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_h264_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsenc_h264_dup (const MixParams * obj); -gboolean mix_videoconfigparamsencenc_h264_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsenc_h264_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncH264, /* The name of the new type, in Camel case */ - mix_videoconfigparamsenc_h264, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsenc_h264_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsenc_h264_get_type ()); -} - -static void -mix_videoconfigparamsenc_h264_init (MixVideoConfigParamsEncH264 * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ - self->basic_unit_size = 0; - self->slice_num = 1; - self->I_slice_num = 1; - self->P_slice_num = 1; - self->disable_deblocking_filter_idc = 0; - - self->delimiter_type = MIX_DELIMITER_LENGTHPREFIX; - self->idr_interval = 2; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void -mix_videoconfigparamsenc_h264_class_init (MixVideoConfigParamsEncH264Class * klass) -{ - MixVideoConfigParamsEncClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsenc_h264_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsenc_h264_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsenc_h264_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsencenc_h264_equal; -} - -MixVideoConfigParamsEncH264 * -mix_videoconfigparamsenc_h264_new (void) -{ - MixVideoConfigParamsEncH264 *ret = (MixVideoConfigParamsEncH264 *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_H264); - - return ret; -} - -void -mix_videoconfigparamsenc_h264_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsEncH264 *this_obj = MIX_VIDEOCONFIGPARAMSENC_H264 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) - { - root_class->finalize (obj); - } -} - -MixVideoConfigParamsEncH264 - * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix) -{ - return (MixVideoConfigParamsEncH264 *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsenc_h264_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_h264_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (obj)) - { - MixVideoConfigParamsEncH264 *duplicate = mix_videoconfigparamsenc_h264_new (); - if (mix_videoconfigparamsenc_h264_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsenc_h264_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videoconfigparamsenc_h264_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsEncH264 *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (target) - && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSENC_H264 (target); - this_src = MIX_VIDEOCONFIGPARAMSENC_H264 (src); - - //add properties - this_target->basic_unit_size = this_src->basic_unit_size; - this_target->slice_num = this_src->slice_num; - this_target->I_slice_num = this_src->I_slice_num; - this_target->P_slice_num = this_src->P_slice_num; - this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; - this_target->delimiter_type = this_src->delimiter_type; - this_target->idr_interval = this_src->idr_interval; - - - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_videoconfigparamsenc_h264: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsEncH264 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (first) - && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSENC_H264 (first); - this_second = MIX_VIDEOCONFIGPARAMSENC_H264 (second); - - if (this_first->basic_unit_size != this_second->basic_unit_size) { - goto not_equal; - } - - if (this_first->slice_num != this_second->slice_num) { - goto not_equal; - } - - if (this_first->I_slice_num != this_second->I_slice_num) { - goto not_equal; - } - - if (this_first->P_slice_num != this_second->P_slice_num) { - goto not_equal; - } - - if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { - goto not_equal; - } - - if (this_first->delimiter_type != this_second->delimiter_type) { - goto not_equal; - } - - if (this_first->idr_interval != this_second->idr_interval) { - goto not_equal; - } - - - ret = TRUE; - - not_equal: - - if (ret != TRUE) { - return ret; - } - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj, - guint basic_unit_size) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->basic_unit_size = basic_unit_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj, - guint * basic_unit_size) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, basic_unit_size); - *basic_unit_size = obj->basic_unit_size; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, - guint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj, - guint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, - guint slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->slice_num = slice_num; - obj->I_slice_num = slice_num; - obj->P_slice_num = slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, slice_num); - *slice_num = obj->slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj, - guint I_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->I_slice_num = I_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * I_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, I_slice_num); - *I_slice_num = obj->I_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj, - guint P_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->P_slice_num = P_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * P_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, P_slice_num); - *P_slice_num = obj->P_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, - MixDelimiterType delimiter_type) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->delimiter_type = delimiter_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, - MixDelimiterType * delimiter_type) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, delimiter_type); - *delimiter_type = obj->delimiter_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj, - guint idr_interval) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->idr_interval = idr_interval; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj, - guint * idr_interval) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, idr_interval); - *idr_interval = obj->idr_interval; - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.cpp b/mix_video/src/mixvideoconfigparamsenc_h264.cpp new file mode 100644 index 0000000..3219718 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_h264.cpp @@ -0,0 +1,172 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixvideoconfigparamsenc_h264 + * @short_description: MI-X Video H.264 Eecode Configuration Parameter + * + * MI-X video H.264 eecode configuration parameter objects. + */ + + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_h264.h" + +#define MDEBUG + + + + + +MixVideoConfigParamsEncH264 * +mix_videoconfigparamsenc_h264_new (void) +{ + + + return new MixVideoConfigParamsEncH264(); +} + + +MixVideoConfigParamsEncH264 + * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix) +{ + return (MixVideoConfigParamsEncH264 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_h264_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_h264_dup (const MixParams * obj) +{ + + return NULL; +} + +/** +* mix_videoconfigparamsenc_h264_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) +{ + + return FALSE; +} + +/** +* mix_videoconfigparamsenc_h264: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + + return ret; +} + + + +MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj, + guint basic_unit_size) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj, + guint * basic_unit_size) { + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, + guint disable_deblocking_filter_idc) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj, + guint * disable_deblocking_filter_idc) { + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, + guint slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj, + guint I_slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * I_slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj, + guint P_slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj, + guint * P_slice_num) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, + MixDelimiterType delimiter_type) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, + MixDelimiterType * delimiter_type) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj, + guint idr_interval) { + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj, + guint * idr_interval) { + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h index 4eddcb5..fe8fd16 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.h +++ b/mix_video/src/mixvideoconfigparamsenc_h264.h @@ -12,20 +12,14 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" -G_BEGIN_DECLS -/** -* MIX_TYPE_VIDEOCONFIGPARAMSENC_H264: -* -* Get type of class. -*/ -#define MIX_TYPE_VIDEOCONFIGPARAMSENC_H264 (mix_videoconfigparamsenc_h264_get_type ()) /** * MIX_VIDEOCONFIGPARAMSENC_H264: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264)) +//#define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264)) +#define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_H264: @@ -33,13 +27,13 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixVideoConfigParamsEncH264 */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) /** * MIX_VIDEOCONFIGPARAMSENC_H264_CLASS: * @klass: class to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) +//#define MIX_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS: @@ -47,7 +41,7 @@ G_BEGIN_DECLS * * Checks if the given class is #MixVideoConfigParamsEncH264Class */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) /** * MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS: @@ -55,20 +49,19 @@ G_BEGIN_DECLS * * Get the class instance of the object. */ -#define MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) +//#define MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) + -typedef struct _MixVideoConfigParamsEncH264 MixVideoConfigParamsEncH264; -typedef struct _MixVideoConfigParamsEncH264Class MixVideoConfigParamsEncH264Class; +//typedef struct _MixVideoConfigParamsEncH264Class MixVideoConfigParamsEncH264Class; /** * MixVideoConfigParamsEncH264: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsEncH264 +class MixVideoConfigParamsEncH264 : public MixVideoConfigParamsEnc { - /*< public > */ - MixVideoConfigParamsEnc parent; +public: /*< public > */ @@ -107,26 +100,7 @@ struct _MixVideoConfigParamsEncH264 void *reserved4; }; -/** -* MixVideoConfigParamsEncH264Class: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoConfigParamsEncH264Class -{ - /*< public > */ - MixVideoConfigParamsEncClass parent_class; - - /* class members */ -}; -/** -* mix_videoconfigparamsenc_h264_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_videoconfigparamsenc_h264_get_type (void); /** * mix_videoconfigparamsenc_h264_new: @@ -316,7 +290,5 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsE guint * idr_interval); -G_END_DECLS - #endif /* __MIX_VIDEOCONFIGPARAMSENC_H264_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.c b/mix_video/src/mixvideoconfigparamsenc_mpeg4.c deleted file mode 100644 index 6e11d22..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.c +++ /dev/null @@ -1,301 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsenc_mpeg4 - * @short_description: MI-X Video MPEG 4:2 Eecode Configuration Parameter - * - * MI-X video MPEG 4:2 eecode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_mpeg4.h" - -#define MDEBUG - - -static GType _mix_videoconfigparamsenc_mpeg4_type = 0; -static MixVideoConfigParamsEncClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsenc_mpeg4_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj); -gboolean mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncMPEG4, /* The name of the new type, in Camel case */ - mix_videoconfigparamsenc_mpeg4, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsenc_mpeg4_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsenc_mpeg4_get_type ()); -} - -static void -mix_videoconfigparamsenc_mpeg4_init (MixVideoConfigParamsEncMPEG4 * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ - - self->fixed_vop_time_increment = 3; - self->profile_and_level_indication = 3; - self->disable_deblocking_filter_idc = 0; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void -mix_videoconfigparamsenc_mpeg4_class_init (MixVideoConfigParamsEncMPEG4Class * klass) -{ - MixVideoConfigParamsEncClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsenc_mpeg4_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsenc_mpeg4_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsenc_mpeg4_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsencenc_mpeg4_equal; -} - -MixVideoConfigParamsEncMPEG4 * -mix_videoconfigparamsenc_mpeg4_new (void) -{ - MixVideoConfigParamsEncMPEG4 *ret = (MixVideoConfigParamsEncMPEG4 *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4); - - return ret; -} - -void -mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) - { - root_class->finalize (obj); - } -} - -MixVideoConfigParamsEncMPEG4 - * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) -{ - return (MixVideoConfigParamsEncMPEG4 *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsenc_mpeg4_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (obj)) - { - MixVideoConfigParamsEncMPEG4 *duplicate = mix_videoconfigparamsenc_mpeg4_new (); - if (mix_videoconfigparamsenc_mpeg4_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsenc_mpeg4_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videoconfigparamsenc_mpeg4_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsEncMPEG4 *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (target) - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (target); - this_src = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (src); - - //add properties - this_target->profile_and_level_indication= this_src->profile_and_level_indication; - this_target->fixed_vop_time_increment= this_src->fixed_vop_time_increment; - this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_videoconfigparamsenc_mpeg4: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsEncMPEG4 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (first) - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (first); - this_second = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (second); - - if (this_first->profile_and_level_indication!= this_second->profile_and_level_indication) { - goto not_equal; - } - - if (this_first->fixed_vop_time_increment!= this_second->fixed_vop_time_increment) { - goto not_equal; - } - - if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { - goto not_equal; - } - - - ret = TRUE; - - not_equal: - - if (ret != TRUE) { - return ret; - } - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->profile_and_level_indication = profile_and_level_indication; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar * profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); - *profile_and_level_indication = obj->profile_and_level_indication; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->fixed_vop_time_increment = fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint * fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); - *fixed_vop_time_increment = obj->fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp b/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp new file mode 100644 index 0000000..6e11d22 --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp @@ -0,0 +1,301 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** + * SECTION:mixvideoconfigparamsenc_mpeg4 + * @short_description: MI-X Video MPEG 4:2 Eecode Configuration Parameter + * + * MI-X video MPEG 4:2 eecode configuration parameter objects. + */ + + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_mpeg4.h" + +#define MDEBUG + + +static GType _mix_videoconfigparamsenc_mpeg4_type = 0; +static MixVideoConfigParamsEncClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsenc_mpeg4_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj); +gboolean mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncMPEG4, /* The name of the new type, in Camel case */ + mix_videoconfigparamsenc_mpeg4, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsenc_mpeg4_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsenc_mpeg4_get_type ()); +} + +static void +mix_videoconfigparamsenc_mpeg4_init (MixVideoConfigParamsEncMPEG4 * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ + + self->fixed_vop_time_increment = 3; + self->profile_and_level_indication = 3; + self->disable_deblocking_filter_idc = 0; + + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void +mix_videoconfigparamsenc_mpeg4_class_init (MixVideoConfigParamsEncMPEG4Class * klass) +{ + MixVideoConfigParamsEncClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsenc_mpeg4_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsenc_mpeg4_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsenc_mpeg4_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsencenc_mpeg4_equal; +} + +MixVideoConfigParamsEncMPEG4 * +mix_videoconfigparamsenc_mpeg4_new (void) +{ + MixVideoConfigParamsEncMPEG4 *ret = (MixVideoConfigParamsEncMPEG4 *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4); + + return ret; +} + +void +mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsEncMPEG4 + * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) +{ + return (MixVideoConfigParamsEncMPEG4 *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_mpeg4_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (obj)) + { + MixVideoConfigParamsEncMPEG4 *duplicate = mix_videoconfigparamsenc_mpeg4_new (); + if (mix_videoconfigparamsenc_mpeg4_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsenc_mpeg4_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsenc_mpeg4_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsEncMPEG4 *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (target) + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (target); + this_src = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (src); + + //add properties + this_target->profile_and_level_indication= this_src->profile_and_level_indication; + this_target->fixed_vop_time_increment= this_src->fixed_vop_time_increment; + this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsenc_mpeg4: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsEncMPEG4 *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (first) + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (first); + this_second = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (second); + + if (this_first->profile_and_level_indication!= this_second->profile_and_level_indication) { + goto not_equal; + } + + if (this_first->fixed_vop_time_increment!= this_second->fixed_vop_time_increment) { + goto not_equal; + } + + if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { + goto not_equal; + } + + + ret = TRUE; + + not_equal: + + if (ret != TRUE) { + return ret; + } + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ + +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->profile_and_level_indication = profile_and_level_indication; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, + guchar * profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); + *profile_and_level_indication = obj->profile_and_level_indication; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->fixed_vop_time_increment = fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, + guint * fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); + *fixed_vop_time_increment = obj->fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, + guint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h index 7ff32bc..733b34b 100644 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h @@ -12,20 +12,21 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" -G_BEGIN_DECLS + /** * MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4: * * Get type of class. */ -#define MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 (mix_videoconfigparamsenc_mpeg4_get_type ()) +//#define MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 (mix_videoconfigparamsenc_mpeg4_get_type ()) /** * MIX_VIDEOCONFIGPARAMSENC_MPEG4: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4)) +//#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4)) +#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4: @@ -33,13 +34,13 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixVideoConfigParamsEncMPEG4 */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) /** * MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: * @klass: class to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) +//#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: @@ -47,7 +48,7 @@ G_BEGIN_DECLS * * Checks if the given class is #MixVideoConfigParamsEncMPEG4Class */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) +//#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) /** * MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS: @@ -55,21 +56,19 @@ G_BEGIN_DECLS * * Get the class instance of the object. */ -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) +//#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) + -typedef struct _MixVideoConfigParamsEncMPEG4 MixVideoConfigParamsEncMPEG4; -typedef struct _MixVideoConfigParamsEncMPEG4Class MixVideoConfigParamsEncMPEG4Class; +//typedef struct _MixVideoConfigParamsEncMPEG4Class MixVideoConfigParamsEncMPEG4Class; /** * MixVideoConfigParamsEncMPEG4: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsEncMPEG4 +class MixVideoConfigParamsEncMPEG4 : public MixVideoConfigParamsEnc { - /*< public > */ - MixVideoConfigParamsEnc parent; - +public: /*< public > */ /* TODO: Add MPEG-4 configuration paramters */ @@ -102,26 +101,6 @@ struct _MixVideoConfigParamsEncMPEG4 void *reserved4; }; -/** -* MixVideoConfigParamsEncMPEG4Class: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoConfigParamsEncMPEG4Class -{ - /*< public > */ - MixVideoConfigParamsEncClass parent_class; - - /* class members */ -}; - -/** -* mix_videoconfigparamsenc_mpeg4_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_videoconfigparamsenc_mpeg4_get_type (void); /** * mix_videoconfigparamsenc_mpeg4_new: @@ -222,6 +201,6 @@ MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEnc MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, guint * fixed_vop_time_increment); -G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.c b/mix_video/src/mixvideoconfigparamsenc_preview.c deleted file mode 100644 index 890aaac..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_preview.c +++ /dev/null @@ -1,217 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixvideoconfigparamsenc_preview -* @short_description: VideoConfig parameters -* -* A data object which stores videoconfig specific parameters. -*/ - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_preview.h" - -#define MDEBUG - - -static GType _mix_videoconfigparamsenc_preview_type = 0; -static MixVideoConfigParamsEncClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsenc_preview_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_preview_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsenc_preview_dup (const MixParams * obj); -gboolean mix_videoconfigparamsencenc_preview_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsenc_preview_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncPreview, /* The name of the new type, in Camel case */ - mix_videoconfigparamsenc_preview, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsenc_preview_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsenc_preview_get_type ()); -} - -static void -mix_videoconfigparamsenc_preview_init (MixVideoConfigParamsEncPreview * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ -} - -static void -mix_videoconfigparamsenc_preview_class_init (MixVideoConfigParamsEncPreviewClass * klass) -{ - MixVideoConfigParamsEncClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsenc_preview_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsenc_preview_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsenc_preview_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsencenc_preview_equal; -} - -MixVideoConfigParamsEncPreview * -mix_videoconfigparamsenc_preview_new (void) -{ - MixVideoConfigParamsEncPreview *ret = (MixVideoConfigParamsEncPreview *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW); - - return ret; -} - -void -mix_videoconfigparamsenc_preview_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsEncPreview *this_obj = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) - { - root_class->finalize (obj); - } -} - -MixVideoConfigParamsEncPreview - * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix) -{ - return (MixVideoConfigParamsEncPreview *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsenc_preview_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_preview_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (obj)) - { - MixVideoConfigParamsEncPreview *duplicate = mix_videoconfigparamsenc_preview_new (); - if (mix_videoconfigparamsenc_preview_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsenc_preview_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videoconfigparamsenc_preview_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_preview_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsEncPreview *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (target) - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (target); - this_src = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (src); - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; -} - -/** -* mix_videoconfigparamsenc_preview: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_preview_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsEncPreview *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (first) - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (first); - this_second = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (second); - - - ret = TRUE; - - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - - return ret; -} - -/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.cpp b/mix_video/src/mixvideoconfigparamsenc_preview.cpp new file mode 100644 index 0000000..890aaac --- /dev/null +++ b/mix_video/src/mixvideoconfigparamsenc_preview.cpp @@ -0,0 +1,217 @@ +/* +INTEL CONFIDENTIAL +Copyright 2009 Intel Corporation All Rights Reserved. +The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + +No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. +*/ + +/** +* SECTION:mixvideoconfigparamsenc_preview +* @short_description: VideoConfig parameters +* +* A data object which stores videoconfig specific parameters. +*/ + +#include "mixvideolog.h" +#include "mixvideoconfigparamsenc_preview.h" + +#define MDEBUG + + +static GType _mix_videoconfigparamsenc_preview_type = 0; +static MixVideoConfigParamsEncClass *parent_class = NULL; + +#define _do_init { _mix_videoconfigparamsenc_preview_type = g_define_type_id; } + +gboolean mix_videoconfigparamsenc_preview_copy (MixParams * target, + const MixParams * src); +MixParams *mix_videoconfigparamsenc_preview_dup (const MixParams * obj); +gboolean mix_videoconfigparamsencenc_preview_equal (MixParams * first, + MixParams * second); +static void mix_videoconfigparamsenc_preview_finalize (MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncPreview, /* The name of the new type, in Camel case */ + mix_videoconfigparamsenc_preview, /* The name of the new type in lowercase */ + MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ + _do_init); + +void +_mix_videoconfigparamsenc_preview_initialize (void) +{ + /* the MixParams types need to be class_ref'd once before it can be + * done from multiple threads; + * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ + g_type_class_ref (mix_videoconfigparamsenc_preview_get_type ()); +} + +static void +mix_videoconfigparamsenc_preview_init (MixVideoConfigParamsEncPreview * self) +{ + /* initialize properties here */ + /* TODO: initialize properties */ +} + +static void +mix_videoconfigparamsenc_preview_class_init (MixVideoConfigParamsEncPreviewClass * klass) +{ + MixVideoConfigParamsEncClass *this_parent_class = + MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); + MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); + + /* setup static parent class */ + parent_class = + (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); + + this_root_class->finalize = mix_videoconfigparamsenc_preview_finalize; + this_root_class->copy = + (MixParamsCopyFunction) mix_videoconfigparamsenc_preview_copy; + this_root_class->dup = + (MixParamsDupFunction) mix_videoconfigparamsenc_preview_dup; + this_root_class->equal = + (MixParamsEqualFunction) mix_videoconfigparamsencenc_preview_equal; +} + +MixVideoConfigParamsEncPreview * +mix_videoconfigparamsenc_preview_new (void) +{ + MixVideoConfigParamsEncPreview *ret = (MixVideoConfigParamsEncPreview *) + g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW); + + return ret; +} + +void +mix_videoconfigparamsenc_preview_finalize (MixParams * obj) +{ + /* MixVideoConfigParamsEncPreview *this_obj = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (obj); */ + MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); + + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + + if (root_class->finalize) + { + root_class->finalize (obj); + } +} + +MixVideoConfigParamsEncPreview + * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix) +{ + return (MixVideoConfigParamsEncPreview *) mix_params_ref (MIX_PARAMS (mix)); +} + +/** +* mix_videoconfigparamsenc_preview_dup: +* @obj: a #MixVideoConfigParams object +* @returns: a newly allocated duplicate of the object. +* +* Copy duplicate of the object. +*/ +MixParams * +mix_videoconfigparamsenc_preview_dup (const MixParams * obj) +{ + MixParams *ret = NULL; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (obj)) + { + MixVideoConfigParamsEncPreview *duplicate = mix_videoconfigparamsenc_preview_new (); + if (mix_videoconfigparamsenc_preview_copy + (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) + { + ret = MIX_PARAMS (duplicate); + } + else + { + mix_videoconfigparamsenc_preview_unref (duplicate); + } + } + return ret; +} + +/** +* mix_videoconfigparamsenc_preview_copy: +* @target: copy to target +* @src: copy from src +* @returns: boolean indicates if copy is successful. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsenc_preview_copy (MixParams * target, const MixParams * src) +{ + MixVideoConfigParamsEncPreview *this_target, *this_src; + MixParamsClass *root_class; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (target) + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (src)) + { + // Cast the base object to this child object + this_target = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (target); + this_src = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (src); + + // Now chainup base class + root_class = MIX_PARAMS_CLASS (parent_class); + + if (root_class->copy) + { + return root_class->copy (MIX_PARAMS_CAST (target), + MIX_PARAMS_CAST (src)); + } + else + { + return TRUE; + } + } + return FALSE; +} + +/** +* mix_videoconfigparamsenc_preview: +* @first: first object to compare +* @second: seond object to compare +* @returns: boolean indicates if instance are equal. +* +* Copy instance data from @src to @target. +*/ +gboolean +mix_videoconfigparamsencenc_preview_equal (MixParams * first, MixParams * second) +{ + gboolean ret = FALSE; + MixVideoConfigParamsEncPreview *this_first, *this_second; + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (first) + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (second)) + { + // Cast the base object to this child object + + this_first = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (first); + this_second = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (second); + + + ret = TRUE; + + + /* TODO: add comparison for properties */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); + if (klass->equal) + { + ret = klass->equal (first, second); + } + else + { + ret = TRUE; + } + } + } + + return ret; +} + +/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideodecodeparams.c b/mix_video/src/mixvideodecodeparams.c deleted file mode 100644 index 1e403ab..0000000 --- a/mix_video/src/mixvideodecodeparams.c +++ /dev/null @@ -1,220 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideodecodeparams - * @short_description: MI-X Video Decode Paramters - * - * The #MixVideoDecodeParams object will be created by the MMF/App - * and provided to MixVideo in the MixVideo mix_video_decode() function. - */ - -#include "mixvideodecodeparams.h" - -static GType _mix_videodecodeparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videodecodeparams_type = g_define_type_id; } - -gboolean mix_videodecodeparams_copy(MixParams * target, const MixParams * src); -MixParams *mix_videodecodeparams_dup(const MixParams * obj); -gboolean mix_videodecodeparams_equal(MixParams * first, MixParams * second); -static void mix_videodecodeparams_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoDecodeParams, mix_videodecodeparams, - MIX_TYPE_PARAMS, _do_init); - -static void mix_videodecodeparams_init(MixVideoDecodeParams * self) { - /* initialize properties here */ - - /* TODO: initialize properties */ - - self->timestamp = 0; - self->discontinuity = FALSE; - self->new_sequence = FALSE; - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videodecodeparams_class_init(MixVideoDecodeParamsClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videodecodeparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videodecodeparams_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videodecodeparams_dup; - mixparams_class->equal - = (MixParamsEqualFunction) mix_videodecodeparams_equal; -} - -MixVideoDecodeParams * -mix_videodecodeparams_new(void) { - MixVideoDecodeParams *ret = - (MixVideoDecodeParams *) g_type_create_instance( - MIX_TYPE_VIDEODECODEPARAMS); - - return ret; -} - -void mix_videodecodeparams_finalize(MixParams * obj) { - /* clean up here. */ - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoDecodeParams * -mix_videodecodeparams_ref(MixVideoDecodeParams * mix) { - return (MixVideoDecodeParams *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videodecodeparams_dup: - * @obj: a #MixVideoDecodeParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videodecodeparams_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEODECODEPARAMS(obj)) { - MixVideoDecodeParams *duplicate = mix_videodecodeparams_new(); - if (mix_videodecodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videodecodeparams_unref(duplicate); - } - } - return ret; -} - -/** - * mix_videodecodeparams_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videodecodeparams_copy(MixParams * target, const MixParams * src) { - MixVideoDecodeParams *this_target, *this_src; - - if (MIX_IS_VIDEODECODEPARAMS(target) && MIX_IS_VIDEODECODEPARAMS(src)) { - // Cast the base object to this child object - this_target = MIX_VIDEODECODEPARAMS(target); - this_src = MIX_VIDEODECODEPARAMS(src); - - // TODO: copy properties */ - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_videodecodeparams_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videodecodeparams_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoDecodeParams *this_first, *this_second; - - if (MIX_IS_VIDEODECODEPARAMS(first) && MIX_IS_VIDEODECODEPARAMS(second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEODECODEPARAMS(first); - this_second = MIX_VIDEODECODEPARAMS(second); - - /* TODO: add comparison for properties */ - /* if ( first properties == sencod properties) */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - - return ret; -} - -#define MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - - -/* TODO: Add getters and setters for properties. */ - -MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj, - guint64 timestamp) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj, - guint64 * timestamp) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, - gboolean discontinuity) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, - gboolean *discontinuity) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj, - gboolean new_sequence) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->new_sequence = new_sequence; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj, - gboolean *new_sequence) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, new_sequence); - *new_sequence = obj->new_sequence; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideodecodeparams.cpp b/mix_video/src/mixvideodecodeparams.cpp new file mode 100644 index 0000000..7ac2179 --- /dev/null +++ b/mix_video/src/mixvideodecodeparams.cpp @@ -0,0 +1,125 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideodecodeparams + * @short_description: MI-X Video Decode Paramters + * + * The #MixVideoDecodeParams object will be created by the MMF/App + * and provided to MixVideo in the MixVideo mix_video_decode() function. + */ + +#include "mixvideodecodeparams.h" + +#define MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + + +MixVideoDecodeParams::MixVideoDecodeParams() + :timestamp(0) + ,discontinuity(FALSE) + ,new_sequence(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} + +MixVideoDecodeParams::~MixVideoDecodeParams() { +} + +gboolean MixVideoDecodeParams::copy(MixParams *target) const { + gboolean ret = FALSE; + MixVideoDecodeParams * this_target = MIX_VIDEODECODEPARAMS(target); + if (NULL != this_target) { + // chain up base class + ret = MixParams::copy(target); + } + return ret; +} + +gboolean MixVideoDecodeParams::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoDecodeParams * this_obj = MIX_VIDEODECODEPARAMS(obj); + if (NULL != this_obj) + ret = MixParams::equal(this_obj); + return ret; +} + +MixParams* MixVideoDecodeParams::dup() const { + MixParams *ret = new MixVideoDecodeParams(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + +MixVideoDecodeParams * mix_videodecodeparams_new(void) { + return new MixVideoDecodeParams(); +} + +MixVideoDecodeParams * +mix_videodecodeparams_ref(MixVideoDecodeParams * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + + +/* TODO: Add getters and setters for properties. */ + +MIX_RESULT mix_videodecodeparams_set_timestamp( + MixVideoDecodeParams * obj, guint64 timestamp) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_get_timestamp( + MixVideoDecodeParams * obj, guint64 * timestamp) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_set_discontinuity( + MixVideoDecodeParams * obj, gboolean discontinuity) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_get_discontinuity( + MixVideoDecodeParams * obj, gboolean *discontinuity) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_set_new_sequence( + MixVideoDecodeParams * obj, gboolean new_sequence) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->new_sequence = new_sequence; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_get_new_sequence( + MixVideoDecodeParams * obj, gboolean *new_sequence) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, new_sequence); + *new_sequence = obj->new_sequence; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h index 50ec502..3d4d72f 100644 --- a/mix_video/src/mixvideodecodeparams.h +++ b/mix_video/src/mixvideodecodeparams.h @@ -12,20 +12,11 @@ #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEODECODEPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEODECODEPARAMS (mix_videodecodeparams_get_type ()) - /** * MIX_VIDEODECODEPARAMS: * @obj: object to be type-casted. */ -#define MIX_VIDEODECODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParams)) +#define MIX_VIDEODECODEPARAMS(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEODECODEPARAMS: @@ -33,44 +24,23 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_VIDEODECODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEODECODEPARAMS)) - -/** - * MIX_VIDEODECODEPARAMS_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEODECODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParamsClass)) - -/** - * MIX_IS_VIDEODECODEPARAMS_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_VIDEODECODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEODECODEPARAMS)) - -/** - * MIX_VIDEODECODEPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEODECODEPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParamsClass)) +#define MIX_IS_VIDEODECODEPARAMS(obj) ((NULL != MIX_VIDEODECODEPARAMS(obj)) ? TRUE : FALSE) -typedef struct _MixVideoDecodeParams MixVideoDecodeParams; -typedef struct _MixVideoDecodeParamsClass MixVideoDecodeParamsClass; /** * MixVideoDecodeParams: * * MI-X VideoDecode Parameter object */ -struct _MixVideoDecodeParams { - /*< public > */ - MixParams parent; - +class MixVideoDecodeParams : public MixParams { +public: + MixVideoDecodeParams(); + ~MixVideoDecodeParams(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; +public: /*< public > */ - /* TODO: Add properties */ /* Presentation timestamp for the video @@ -96,26 +66,6 @@ struct _MixVideoDecodeParams { void *reserved4; }; -/** - * MixVideoDecodeParamsClass: - * - * MI-X VideoDecode object class - */ -struct _MixVideoDecodeParamsClass { - /*< public > */ - MixParamsClass parent_class; - - /* class members */ -}; - -/** - * mix_videodecodeparams_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videodecodeparams_get_type(void); - /** * mix_videodecodeparams_new: * @returns: A newly allocated instance of #MixVideoDecodeParams @@ -213,8 +163,6 @@ MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj, */ MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj, gboolean *new_sequence); - -G_END_DECLS #endif /* __MIX_VIDEODECODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h index 29bb2ac..bac6e8c 100644 --- a/mix_video/src/mixvideodef.h +++ b/mix_video/src/mixvideodef.h @@ -32,7 +32,7 @@ #include -G_BEGIN_DECLS + /* * MI-X video error code @@ -200,6 +200,6 @@ typedef struct _MixEncDynamicParams { MixAIRParams air_params; } MixEncDynamicParams; -G_END_DECLS + #endif /* __MIX_VIDEO_DEF_H__ */ diff --git a/mix_video/src/mixvideoencodeparams.c b/mix_video/src/mixvideoencodeparams.c deleted file mode 100644 index 52be78f..0000000 --- a/mix_video/src/mixvideoencodeparams.c +++ /dev/null @@ -1,209 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoencodeparams - * @short_description: MI-X Video Encode Parameters - * - * The #MixVideoEncodeParams object will be created by - * the MMF/App and provided to #MixVideo in the #MixVideo - * mix_video_encode() function. Get methods for the - * properties will be available for the caller to - * retrieve configuration information. Currently this - * object is reserved for future use. - */ - -#include "mixvideoencodeparams.h" - -static GType _mix_videoencodeparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videoencodeparams_type = g_define_type_id; } - -gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src); -MixParams *mix_videoencodeparams_dup(const MixParams * obj); -gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second); -static void mix_videoencodeparams_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoEncodeParams, mix_videoencodeparams, - MIX_TYPE_PARAMS, _do_init); - -static void mix_videoencodeparams_init(MixVideoEncodeParams * self) { - /* initialize properties here */ - - /* TODO: initialize properties */ - - self->timestamp = 0; - self->discontinuity = FALSE; - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videoencodeparams_class_init(MixVideoEncodeParamsClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videoencodeparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videoencodeparams_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videoencodeparams_dup; - mixparams_class->equal - = (MixParamsEqualFunction) mix_videoencodeparams_equal; -} - -MixVideoEncodeParams * -mix_videoencodeparams_new(void) { - MixVideoEncodeParams *ret = - (MixVideoEncodeParams *) g_type_create_instance( - MIX_TYPE_VIDEOENCODEPARAMS); - - return ret; -} - -void mix_videoencodeparams_finalize(MixParams * obj) { - /* clean up here. */ - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoEncodeParams * -mix_videoencodeparams_ref(MixVideoEncodeParams * mix) { - return (MixVideoEncodeParams *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videoencodeparams_dup: - * @obj: a #MixVideoEncodeParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoencodeparams_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEOENCODEPARAMS(obj)) { - MixVideoEncodeParams *duplicate = mix_videoencodeparams_new(); - if (mix_videoencodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoencodeparams_unref(duplicate); - } - } - return ret; -} - -/** - * mix_videoencodeparams_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src) { - MixVideoEncodeParams *this_target, *this_src; - - if (MIX_IS_VIDEOENCODEPARAMS(target) && MIX_IS_VIDEOENCODEPARAMS(src)) { - // Cast the base object to this child object - this_target = MIX_VIDEOENCODEPARAMS(target); - this_src = MIX_VIDEOENCODEPARAMS(src); - - // TODO: copy properties */ - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_videoencodeparams_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoEncodeParams *this_first, *this_second; - - if (MIX_IS_VIDEOENCODEPARAMS(first) && MIX_IS_VIDEOENCODEPARAMS(second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEOENCODEPARAMS(first); - this_second = MIX_VIDEOENCODEPARAMS(second); - - /* TODO: add comparison for properties */ - /* if ( first properties == sencod properties) */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - - return ret; -} - -#define MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - - -/* TODO: Add getters and setters for properties. */ - -MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj, - guint64 timestamp) { - MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj, - guint64 * timestamp) { - MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj, - gboolean discontinuity) { - MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj, - gboolean *discontinuity) { - MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoencodeparams.cpp b/mix_video/src/mixvideoencodeparams.cpp new file mode 100644 index 0000000..52be78f --- /dev/null +++ b/mix_video/src/mixvideoencodeparams.cpp @@ -0,0 +1,209 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoencodeparams + * @short_description: MI-X Video Encode Parameters + * + * The #MixVideoEncodeParams object will be created by + * the MMF/App and provided to #MixVideo in the #MixVideo + * mix_video_encode() function. Get methods for the + * properties will be available for the caller to + * retrieve configuration information. Currently this + * object is reserved for future use. + */ + +#include "mixvideoencodeparams.h" + +static GType _mix_videoencodeparams_type = 0; +static MixParamsClass *parent_class = NULL; + +#define _do_init { _mix_videoencodeparams_type = g_define_type_id; } + +gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src); +MixParams *mix_videoencodeparams_dup(const MixParams * obj); +gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second); +static void mix_videoencodeparams_finalize(MixParams * obj); + +G_DEFINE_TYPE_WITH_CODE (MixVideoEncodeParams, mix_videoencodeparams, + MIX_TYPE_PARAMS, _do_init); + +static void mix_videoencodeparams_init(MixVideoEncodeParams * self) { + /* initialize properties here */ + + /* TODO: initialize properties */ + + self->timestamp = 0; + self->discontinuity = FALSE; + self->reserved1 = NULL; + self->reserved2 = NULL; + self->reserved3 = NULL; + self->reserved4 = NULL; +} + +static void mix_videoencodeparams_class_init(MixVideoEncodeParamsClass * klass) { + MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); + + /* setup static parent class */ + parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); + + mixparams_class->finalize = mix_videoencodeparams_finalize; + mixparams_class->copy = (MixParamsCopyFunction) mix_videoencodeparams_copy; + mixparams_class->dup = (MixParamsDupFunction) mix_videoencodeparams_dup; + mixparams_class->equal + = (MixParamsEqualFunction) mix_videoencodeparams_equal; +} + +MixVideoEncodeParams * +mix_videoencodeparams_new(void) { + MixVideoEncodeParams *ret = + (MixVideoEncodeParams *) g_type_create_instance( + MIX_TYPE_VIDEOENCODEPARAMS); + + return ret; +} + +void mix_videoencodeparams_finalize(MixParams * obj) { + /* clean up here. */ + /* TODO: cleanup resources allocated */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoEncodeParams * +mix_videoencodeparams_ref(MixVideoEncodeParams * mix) { + return (MixVideoEncodeParams *) mix_params_ref(MIX_PARAMS(mix)); +} + +/** + * mix_videoencodeparams_dup: + * @obj: a #MixVideoEncodeParams object + * @returns: a newly allocated duplicate of the object. + * + * Copy duplicate of the object. + */ +MixParams * +mix_videoencodeparams_dup(const MixParams * obj) { + MixParams *ret = NULL; + + if (MIX_IS_VIDEOENCODEPARAMS(obj)) { + MixVideoEncodeParams *duplicate = mix_videoencodeparams_new(); + if (mix_videoencodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { + ret = MIX_PARAMS(duplicate); + } else { + mix_videoencodeparams_unref(duplicate); + } + } + return ret; +} + +/** + * mix_videoencodeparams_copy: + * @target: copy to target + * @src: copy from src + * @returns: boolean indicates if copy is successful. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src) { + MixVideoEncodeParams *this_target, *this_src; + + if (MIX_IS_VIDEOENCODEPARAMS(target) && MIX_IS_VIDEOENCODEPARAMS(src)) { + // Cast the base object to this child object + this_target = MIX_VIDEOENCODEPARAMS(target); + this_src = MIX_VIDEOENCODEPARAMS(src); + + // TODO: copy properties */ + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( + src)); + } else { + return TRUE; + } + } + return FALSE; +} + +/** + * mix_videoencodeparams_: + * @first: first object to compare + * @second: seond object to compare + * @returns: boolean indicates if instance are equal. + * + * Copy instance data from @src to @target. + */ +gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second) { + gboolean ret = FALSE; + MixVideoEncodeParams *this_first, *this_second; + + if (MIX_IS_VIDEOENCODEPARAMS(first) && MIX_IS_VIDEOENCODEPARAMS(second)) { + // Deep compare + // Cast the base object to this child object + + this_first = MIX_VIDEOENCODEPARAMS(first); + this_second = MIX_VIDEOENCODEPARAMS(second); + + /* TODO: add comparison for properties */ + /* if ( first properties == sencod properties) */ + { + // members within this scope equal. chaining up. + MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + + return ret; +} + +#define MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + + +/* TODO: Add getters and setters for properties. */ + +MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj, + guint64 timestamp) { + MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj, + guint64 * timestamp) { + MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj, + gboolean discontinuity) { + MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj, + gboolean *discontinuity) { + MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c deleted file mode 100644 index 7f34abd..0000000 --- a/mix_video/src/mixvideoformat.c +++ /dev/null @@ -1,424 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include "mixvideolog.h" - -#include "mixvideoformat.h" - -#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } - - -/* Default vmethods implementation */ -static MIX_RESULT mix_videofmt_getcaps_default(MixVideoFormat *mix, - GString *msg); -static MIX_RESULT mix_videofmt_initialize_default(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay vadisplay); -static MIX_RESULT - mix_videofmt_decode_default(MixVideoFormat *mix, - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params); -static MIX_RESULT mix_videofmt_flush_default(MixVideoFormat *mix); -static MIX_RESULT mix_videofmt_eos_default(MixVideoFormat *mix); -static MIX_RESULT mix_videofmt_deinitialize_default(MixVideoFormat *mix); - -static GObjectClass *parent_class = NULL; - -static void mix_videoformat_finalize(GObject * obj); -G_DEFINE_TYPE (MixVideoFormat, mix_videoformat, G_TYPE_OBJECT); - -static void mix_videoformat_init(MixVideoFormat * self) { - - /* public member initialization */ - /* These are all public because MixVideoFormat objects are completely internal to MixVideo, - no need for private members */ - - self->initialized = FALSE; - self->va_initialized = FALSE; - self->framemgr = NULL; - self->surfacepool = NULL; - self->inputbufpool = NULL; - self->inputbufqueue = NULL; - self->va_display = NULL; - self->va_context = VA_INVALID_ID; - self->va_config = VA_INVALID_ID; - self->va_surfaces = NULL; - self->va_num_surfaces = 0; - self->mime_type = NULL; - self->frame_rate_num = 0; - self->frame_rate_denom = 0; - self->picture_width = 0; - self->picture_height = 0; - self->parse_in_progress = FALSE; - self->current_timestamp = (guint64)-1; - self->end_picture_pending = FALSE; - self->video_frame = NULL; - self->extra_surfaces = 0; - self->config_params = NULL; -} - -static void mix_videoformat_class_init(MixVideoFormatClass * klass) { - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - gobject_class->finalize = mix_videoformat_finalize; - - /* setup vmethods with base implementation */ - klass->getcaps = mix_videofmt_getcaps_default; - klass->initialize = mix_videofmt_initialize_default; - klass->decode = mix_videofmt_decode_default; - klass->flush = mix_videofmt_flush_default; - klass->eos = mix_videofmt_eos_default; - klass->deinitialize = mix_videofmt_deinitialize_default; -} - -MixVideoFormat * -mix_videoformat_new(void) { - MixVideoFormat *ret = g_object_new(MIX_TYPE_VIDEOFORMAT, NULL); - - return ret; -} - -void mix_videoformat_finalize(GObject * obj) { - /* clean up here. */ - VAStatus va_status; - - MixVideoFormat *mix = MIX_VIDEOFORMAT(obj); - MixInputBufferEntry *buf_entry = NULL; - - if(mix->objectlock) { - g_mutex_free(mix->objectlock); - mix->objectlock = NULL; - } - - if (mix->mime_type) - { - if (mix->mime_type->str) - g_string_free(mix->mime_type, TRUE); - else - g_string_free(mix->mime_type, FALSE); - } - - //MiVideo object calls the _deinitialize() for frame manager - MIXUNREF(mix->framemgr, mix_framemanager_unref); - - if (mix->surfacepool) - { - mix_surfacepool_deinitialize(mix->surfacepool); - MIXUNREF(mix->surfacepool, mix_surfacepool_unref); - } - - if (mix->config_params) - { - mix_videoconfigparams_unref(mix->config_params); - mix->config_params = NULL; - } - - //libVA cleanup (vaTerminate is called from MixVideo object) - if (mix->va_display) { - if (mix->va_context != VA_INVALID_ID) - { - va_status = vaDestroyContext(mix->va_display, mix->va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroyContext\n"); - } - mix->va_context = VA_INVALID_ID; - } - if (mix->va_config != VA_INVALID_ID) - { - va_status = vaDestroyConfig(mix->va_display, mix->va_config); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroyConfig\n"); - } - mix->va_config = VA_INVALID_ID; - } - if (mix->va_surfaces) - { - va_status = vaDestroySurfaces(mix->va_display, mix->va_surfaces, mix->va_num_surfaces); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroySurfaces\n"); - } - g_free(mix->va_surfaces); - mix->va_surfaces = NULL; - mix->va_num_surfaces = 0; - } - } - - if (mix->video_frame) - { - mix_videoframe_unref(mix->video_frame); - mix->video_frame = NULL; - } - - //Deinit input buffer queue - - while (!g_queue_is_empty(mix->inputbufqueue)) - { - buf_entry = g_queue_pop_head(mix->inputbufqueue); - mix_buffer_unref(buf_entry->buf); - g_free(buf_entry); - } - - g_queue_free(mix->inputbufqueue); - - //MixBuffer pool is deallocated in MixVideo object - mix->inputbufpool = NULL; - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoFormat * -mix_videoformat_ref(MixVideoFormat * mix) { - return (MixVideoFormat *) g_object_ref(G_OBJECT(mix)); -} - -/* Default vmethods implementation */ -static MIX_RESULT mix_videofmt_getcaps_default(MixVideoFormat *mix, - GString *msg) { - g_print("mix_videofmt_getcaps_default\n"); - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmt_initialize_default(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - LOG_V( "Begin\n"); - - MIX_RESULT res = MIX_RESULT_SUCCESS; - MixInputBufferEntry *buf_entry = NULL; - - if (!mix || !config_params || !frame_mgr || !input_buf_pool || !surface_pool || !va_display) - { - LOG_E( "NUll pointer passed in\n"); - return (MIX_RESULT_NULL_PTR); - } - - // Create object lock - // Note that g_thread_init() has already been called by mix_video_init() - if (mix->objectlock) //If already exists, then deallocate old one (we are being re-initialized) - { - g_mutex_free(mix->objectlock); - mix->objectlock = NULL; - } - mix->objectlock = g_mutex_new(); - if (!mix->objectlock) { - LOG_E( "!mix->objectlock\n"); - return (MIX_RESULT_NO_MEMORY); - } - - g_mutex_lock(mix->objectlock); - - //Clean up any previous framemgr - MIXUNREF(mix->framemgr, mix_framemanager_unref); - mix->framemgr = frame_mgr; - mix_framemanager_ref(mix->framemgr); - - if (mix->config_params) - { - mix_videoconfigparams_unref(mix->config_params); - } - mix->config_params = config_params; - mix_videoconfigparams_ref(mix->config_params); - - mix->va_display = va_display; - - if (mix->mime_type) //Clean up any previous mime_type - { - if (mix->mime_type->str) - g_string_free(mix->mime_type, TRUE); - else - g_string_free(mix->mime_type, FALSE); - } - gchar *mime_tmp = NULL; - res = mix_videoconfigparamsdec_get_mime_type(config_params, &mime_tmp); - if (mime_tmp) - { - mix->mime_type = g_string_new(mime_tmp); - g_free(mime_tmp); - if (!mix->mime_type) //new failed - { - res = MIX_RESULT_NO_MEMORY; - LOG_E( "Could not duplicate mime_type\n"); - goto cleanup; - } - } //else there is no mime_type; leave as NULL - - res = mix_videoconfigparamsdec_get_frame_rate(config_params, &(mix->frame_rate_num), &(mix->frame_rate_denom)); - if (res != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting frame_rate\n"); - goto cleanup; - } - res = mix_videoconfigparamsdec_get_picture_res(config_params, &(mix->picture_width), &(mix->picture_height)); - if (res != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting picture_res\n"); - goto cleanup; - } - - if (mix->inputbufqueue) - { - //Deinit previous input buffer queue - - while (!g_queue_is_empty(mix->inputbufqueue)) - { - buf_entry = g_queue_pop_head(mix->inputbufqueue); - mix_buffer_unref(buf_entry->buf); - g_free(buf_entry); - } - - g_queue_free(mix->inputbufqueue); - } - - //MixBuffer pool is cleaned up in MixVideo object - mix->inputbufpool = NULL; - - mix->inputbufpool = input_buf_pool; - mix->inputbufqueue = g_queue_new(); - if (!mix->inputbufqueue) //New failed - { - res = MIX_RESULT_NO_MEMORY; - LOG_E( "Could not duplicate mime_type\n"); - goto cleanup; - } - - // surface pool, VA context/config and parser handle are initialized by - // derived classes - - - cleanup: - if (res != MIX_RESULT_SUCCESS) { - - MIXUNREF(mix->framemgr, mix_framemanager_unref); - if (mix->mime_type) - { - if (mix->mime_type->str) - g_string_free(mix->mime_type, TRUE); - else - g_string_free(mix->mime_type, FALSE); - mix->mime_type = NULL; - } - - if (mix->objectlock) - g_mutex_unlock(mix->objectlock); - g_mutex_free(mix->objectlock); - mix->objectlock = NULL; - mix->frame_rate_num = 0; - mix->frame_rate_denom = 1; - mix->picture_width = 0; - mix->picture_height = 0; - - } else { - //Normal unlock - if (mix->objectlock) - g_mutex_unlock(mix->objectlock); - } - - LOG_V( "End\n"); - - return res; -} - -static MIX_RESULT mix_videofmt_decode_default(MixVideoFormat *mix, - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params) { - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmt_flush_default(MixVideoFormat *mix) { - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmt_eos_default(MixVideoFormat *mix) { - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmt_deinitialize_default(MixVideoFormat *mix) { - - //All teardown is being done in _finalize() - - return MIX_RESULT_SUCCESS; -} - -/* mixvideoformat class methods implementation */ - -MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg) { - MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); - g_print("mix_videofmt_getcaps\n"); - if (klass->getcaps) { - return klass->getcaps(mix, msg); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_videofmt_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); - - if (klass->initialize) { - return klass->initialize(mix, config_params, frame_mgr, - input_buf_pool, surface_pool, va_display); - } - - return MIX_RESULT_FAIL; - -} - -MIX_RESULT mix_videofmt_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params) { - - MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); - if (klass->decode) { - return klass->decode(mix, bufin, bufincnt, decode_params); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix) { - MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); - if (klass->flush) { - return klass->flush(mix); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix) { - MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); - if (klass->eos) { - return klass->eos(mix); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix) { - MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix); - if (klass->deinitialize) { - return klass->deinitialize(mix); - } - - return MIX_RESULT_FAIL; -} diff --git a/mix_video/src/mixvideoformat.cpp b/mix_video/src/mixvideoformat.cpp new file mode 100644 index 0000000..9c7ff42 --- /dev/null +++ b/mix_video/src/mixvideoformat.cpp @@ -0,0 +1,293 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include "mixvideolog.h" + +#include "mixvideoformat.h" + +#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } + +MixVideoFormat::MixVideoFormat() + :mLock() + ,initialized(FALSE) + ,va_initialized(FALSE) + ,framemgr(NULL) + ,surfacepool(NULL) + ,inputbufpool(NULL) + ,inputbufqueue(NULL) + ,va_display(NULL) + ,va_context(VA_INVALID_ID) + ,va_config(VA_INVALID_ID) + ,va_surfaces(NULL) + ,va_num_surfaces(0) + ,mime_type(NULL) + ,frame_rate_num(0) + ,frame_rate_denom(0) + ,picture_width(0) + ,picture_height(0) + ,parse_in_progress(FALSE) + ,current_timestamp((guint64)-1) + ,end_picture_pending(FALSE) + ,video_frame(NULL) + ,extra_surfaces(0) + ,config_params(NULL) + ,ref_count(1) { +} + +MixVideoFormat::~MixVideoFormat(){ + /* clean up here. */ + VAStatus va_status; + MixInputBufferEntry *buf_entry = NULL; + + if (this->mime_type) { + if (this->mime_type->str) + g_string_free(this->mime_type, TRUE); + else + g_string_free(this->mime_type, FALSE); + } + + //MiVideo object calls the _deinitialize() for frame manager + MIXUNREF(this->framemgr, mix_framemanager_unref); + + if (this->surfacepool) { + mix_surfacepool_deinitialize(this->surfacepool); + MIXUNREF(this->surfacepool, mix_surfacepool_unref); + } + + if (this->config_params) { + mix_videoconfigparams_unref(this->config_params); + this->config_params = NULL; + } + + //libVA cleanup (vaTerminate is called from MixVideo object) + if (this->va_display) { + if (this->va_context != VA_INVALID_ID) { + va_status = vaDestroyContext(this->va_display, this->va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroyContext\n"); + } + this->va_context = VA_INVALID_ID; + } + if (this->va_config != VA_INVALID_ID) { + va_status = vaDestroyConfig(this->va_display, this->va_config); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroyConfig\n"); + } + this->va_config = VA_INVALID_ID; + } + if (this->va_surfaces) { + va_status = vaDestroySurfaces(this->va_display, this->va_surfaces, this->va_num_surfaces); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroySurfaces\n"); + } + g_free(this->va_surfaces); + this->va_surfaces = NULL; + this->va_num_surfaces = 0; + } + } + + if (this->video_frame) { + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + } + + //Deinit input buffer queue + while (!g_queue_is_empty(this->inputbufqueue)) { + buf_entry = reinterpret_cast(g_queue_pop_head(this->inputbufqueue)); + mix_buffer_unref(buf_entry->buf); + g_free(buf_entry); + } + + g_queue_free(this->inputbufqueue); + + //MixBuffer pool is deallocated in MixVideo object + this->inputbufpool = NULL; +} + +MIX_RESULT MixVideoFormat::GetCaps(GString *msg) { + g_print("mix_videofmt_getcaps_default\n"); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoFormat::Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + LOG_V( "Begin\n"); + MIX_RESULT res = MIX_RESULT_SUCCESS; + MixInputBufferEntry *buf_entry = NULL; + + if (!config_params || !frame_mgr || !input_buf_pool || !surface_pool || !va_display) { + LOG_E( "NUll pointer passed in\n"); + return (MIX_RESULT_NULL_PTR); + } + + Lock(); + + //Clean up any previous framemgr + MIXUNREF(this->framemgr, mix_framemanager_unref); + this->framemgr = frame_mgr; + mix_framemanager_ref(this->framemgr); + if (this->config_params) { + mix_videoconfigparams_unref(this->config_params); + } + this->config_params = config_params; + mix_videoconfigparams_ref(reinterpret_cast(this->config_params)); + + this->va_display = va_display; + + //Clean up any previous mime_type + if (this->mime_type) { + if (this->mime_type->str) + g_string_free(this->mime_type, TRUE); + else + g_string_free(this->mime_type, FALSE); + } + gchar *mime_tmp = NULL; + res = mix_videoconfigparamsdec_get_mime_type(config_params, &mime_tmp); + if (NULL != mime_tmp) { + this->mime_type = g_string_new(mime_tmp); + g_free(mime_tmp); + if (NULL == this->mime_type) {//new failed + res = MIX_RESULT_NO_MEMORY; + LOG_E( "Could not duplicate mime_type\n"); + goto cleanup; + } + }//else there is no mime_type; leave as NULL + + res = mix_videoconfigparamsdec_get_frame_rate(config_params, &(this->frame_rate_num), &(this->frame_rate_denom)); + if (res != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame_rate\n"); + goto cleanup; + } + res = mix_videoconfigparamsdec_get_picture_res(config_params, &(this->picture_width), &(this->picture_height)); + if (res != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting picture_res\n"); + goto cleanup; + } + + if (this->inputbufqueue) { + //Deinit previous input buffer queue + while (!g_queue_is_empty(this->inputbufqueue)) { + buf_entry = reinterpret_cast(g_queue_pop_head(this->inputbufqueue)); + mix_buffer_unref(buf_entry->buf); + g_free(buf_entry); + } + g_queue_free(this->inputbufqueue); + } + + //MixBuffer pool is cleaned up in MixVideo object + this->inputbufpool = NULL; + + this->inputbufpool = input_buf_pool; + this->inputbufqueue = g_queue_new(); + if (NULL == this->inputbufqueue) {//New failed + res = MIX_RESULT_NO_MEMORY; + LOG_E( "Could not duplicate mime_type\n"); + goto cleanup; + } + + // surface pool, VA context/config and parser handle are initialized by + // derived classes + + +cleanup: + if (res != MIX_RESULT_SUCCESS) { + MIXUNREF(this->framemgr, mix_framemanager_unref); + if (this->mime_type) { + if (this->mime_type->str) + g_string_free(this->mime_type, TRUE); + else + g_string_free(this->mime_type, FALSE); + this->mime_type = NULL; + } + Unlock(); + this->frame_rate_num = 0; + this->frame_rate_denom = 1; + this->picture_width = 0; + this->picture_height = 0; + } else {//Normal unlock + Unlock(); + } + + LOG_V( "End\n"); + + return res; + +} + +MIX_RESULT MixVideoFormat::Decode( + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params) { + return MIX_RESULT_SUCCESS; +} +MIX_RESULT MixVideoFormat::Flush() { + return MIX_RESULT_SUCCESS; +} +MIX_RESULT MixVideoFormat::EndOfStream() { + return MIX_RESULT_SUCCESS; +} +MIX_RESULT MixVideoFormat::Deinitialize() { + return MIX_RESULT_SUCCESS; +} + + +MixVideoFormat* mix_videoformat_unref(MixVideoFormat* mix){ + if (NULL != mix) + return mix->Unref(); + else + return NULL; +} + +MixVideoFormat * mix_videoformat_new(void) { + return new MixVideoFormat(); +} + +MixVideoFormat * mix_videoformat_ref(MixVideoFormat * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + + +/* mixvideoformat class methods implementation */ +MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg) { + return mix->GetCaps(msg); +} + +MIX_RESULT mix_videofmt_initialize( + MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + return mix->Initialize(config_params, frame_mgr, + input_buf_pool, surface_pool, va_display); +} + +MIX_RESULT mix_videofmt_decode( + MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params) { + return mix->Decode(bufin, bufincnt, decode_params); +} + +MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix) { + return mix->Flush(); +} + +MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix) { + return mix->EndOfStream(); +} + +MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix) { + return mix->Deinitialize(); +} diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h index 7a395b0..26f91f6 100644 --- a/mix_video/src/mixvideoformat.h +++ b/mix_video/src/mixvideoformat.h @@ -11,7 +11,11 @@ #include #include + +extern "C" { #include "vbp_loader.h" +}; + #include "mixvideodef.h" #include #include "mixvideoconfigparamsdec.h" @@ -22,25 +26,14 @@ #include "mixbuffer.h" #include "mixbufferpool.h" #include "mixvideoformatqueue.h" - -G_BEGIN_DECLS +#include "mixvideothread.h" // Redefine the Handle defined in vbp_loader.h -#define VBPhandle Handle - -/* - * Type macros. - */ -#define MIX_TYPE_VIDEOFORMAT (mix_videoformat_get_type ()) -#define MIX_VIDEOFORMAT(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT, MixVideoFormat)) -#define MIX_IS_VIDEOFORMAT(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT)) -#define MIX_VIDEOFORMAT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT, MixVideoFormatClass)) -#define MIX_IS_VIDEOFORMAT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT)) -#define MIX_VIDEOFORMAT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT, MixVideoFormatClass)) +#define VBPhandle Handle -typedef struct _MixVideoFormat MixVideoFormat; -typedef struct _MixVideoFormatClass MixVideoFormatClass; +class MixVideoFormat; +#define MIX_VIDEOFORMAT(obj) (dynamic_cast(obj)) /* vmethods typedef */ typedef MIX_RESULT (*MixVideoFmtGetCapsFunc)(MixVideoFormat *mix, GString *msg); @@ -57,67 +50,79 @@ typedef MIX_RESULT (*MixVideoFmtFlushFunc)(MixVideoFormat *mix); typedef MIX_RESULT (*MixVideoFmtEndOfStreamFunc)(MixVideoFormat *mix); typedef MIX_RESULT (*MixVideoFmtDeinitializeFunc)(MixVideoFormat *mix); -struct _MixVideoFormat { - /*< public > */ - GObject parent; - - /*< public > */ - - /*< private > */ - GMutex *objectlock; - gboolean initialized; - MixFrameManager *framemgr; - MixSurfacePool *surfacepool; - VADisplay va_display; - VAContextID va_context; - VAConfigID va_config; - VASurfaceID *va_surfaces; - guint va_num_surfaces; - VBPhandle parser_handle; - GString *mime_type; - guint frame_rate_num; - guint frame_rate_denom; - guint picture_width; - guint picture_height; - gboolean parse_in_progress; - gboolean discontinuity_frame_in_progress; - guint64 current_timestamp; - MixBufferPool *inputbufpool; - GQueue *inputbufqueue; - gboolean va_initialized; - gboolean end_picture_pending; - MixVideoFrame* video_frame; - guint extra_surfaces; - MixVideoConfigParamsDec * config_params; -}; - -/** - * MixVideoFormatClass: - * - * MI-X Video object class - */ -struct _MixVideoFormatClass { +class MixVideoFormat { /*< public > */ - GObjectClass parent_class; +public: + MixVideoFormat(); + virtual ~MixVideoFormat(); - /* class members */ - - /*< public > */ - MixVideoFmtGetCapsFunc getcaps; - MixVideoFmtInitializeFunc initialize; - MixVideoFmtDecodeFunc decode; - MixVideoFmtFlushFunc flush; - MixVideoFmtEndOfStreamFunc eos; - MixVideoFmtDeinitializeFunc deinitialize; + + virtual MIX_RESULT GetCaps(GString *msg); + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); + virtual MIX_RESULT Deinitialize(); + + + void Lock() { + mLock.lock(); + } + + void Unlock() { + mLock.unlock(); + } + + MixVideoFormat* Ref() { + ++ref_count; + return this; + } + MixVideoFormat* Unref() { + if (0 == (--ref_count)) { + delete this; + return NULL; + } else { + return this; + } + } + +public: + /*< private > */ + MixVideoMutex mLock; + gboolean initialized; + MixFrameManager *framemgr; + MixSurfacePool *surfacepool; + VADisplay va_display; + VAContextID va_context; + VAConfigID va_config; + VASurfaceID *va_surfaces; + guint va_num_surfaces; + VBPhandle parser_handle; + GString *mime_type; + guint frame_rate_num; + guint frame_rate_denom; + guint picture_width; + guint picture_height; + gboolean parse_in_progress; + gboolean discontinuity_frame_in_progress; + guint64 current_timestamp; + MixBufferPool *inputbufpool; + GQueue *inputbufqueue; + gboolean va_initialized; + gboolean end_picture_pending; + MixVideoFrame* video_frame; + guint extra_surfaces; + MixVideoConfigParamsDec * config_params; + guint ref_count ; }; -/** - * mix_videoformat_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformat_get_type(void); /** * mix_videoformat_new: @@ -142,21 +147,23 @@ MixVideoFormat *mix_videoformat_ref(MixVideoFormat * mix); * * Decrement reference count of the object. */ -#define mix_videoformat_unref(obj) g_object_unref (G_OBJECT(obj)) +MixVideoFormat* mix_videoformat_unref(MixVideoFormat* mix); /* Class Methods */ MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg); -MIX_RESULT mix_videofmt_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); +MIX_RESULT mix_videofmt_initialize( + MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); -MIX_RESULT mix_videofmt_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); +MIX_RESULT mix_videofmt_decode( + MixVideoFormat *mix, MixBuffer * bufin[], + gint bufincnt, MixVideoDecodeParams * decode_params); MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix); @@ -164,6 +171,4 @@ MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix); MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix); -G_END_DECLS - #endif /* __MIX_VIDEOFORMAT_H__ */ diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c deleted file mode 100644 index 1108a0b..0000000 --- a/mix_video/src/mixvideoformat_h264.c +++ /dev/null @@ -1,1814 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include -#ifndef ANDROID -#include -#endif - -#include "mixvideolog.h" -#include "mixvideoformat_h264.h" - -#ifdef MIX_LOG_ENABLE -static int mix_video_h264_counter = 0; -#endif /* MIX_LOG_ENABLE */ - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatClass *parent_class = NULL; - -static void mix_videoformat_h264_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT - */ -G_DEFINE_TYPE (MixVideoFormat_H264, mix_videoformat_h264, MIX_TYPE_VIDEOFORMAT); - -static void mix_videoformat_h264_init(MixVideoFormat_H264 * self) { - MixVideoFormat *parent = MIX_VIDEOFORMAT(self); - - /* public member initialization */ - /* These are all public because MixVideoFormat objects are completely internal to MixVideo, - no need for private members */ - self->dpb_surface_table = NULL; -#ifdef DECODER_ROBUSTNESS - self->last_decoded_frame = NULL; -#endif - - /* NOTE: we don't need to do this here. - * This just demostrates how to access - * member varibles beloned to parent - */ - parent->initialized = FALSE; -} - -static void mix_videoformat_h264_class_init( - MixVideoFormat_H264Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatClass *video_format_class = - MIX_VIDEOFORMAT_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformat_h264_finalize; - - /* setup vmethods with base implementation */ - /* This is where we can override base class methods if needed */ - video_format_class->getcaps = mix_videofmt_h264_getcaps; - video_format_class->initialize = mix_videofmt_h264_initialize; - video_format_class->decode = mix_videofmt_h264_decode; - video_format_class->flush = mix_videofmt_h264_flush; - video_format_class->eos = mix_videofmt_h264_eos; - video_format_class->deinitialize = mix_videofmt_h264_deinitialize; -} - -MixVideoFormat_H264 * -mix_videoformat_h264_new(void) { - MixVideoFormat_H264 *ret = - g_object_new(MIX_TYPE_VIDEOFORMAT_H264, NULL); - - return ret; -} - -void mix_videoformat_h264_finalize(GObject * obj) { - gint32 pret = VBP_OK; - - /* clean up here. */ - - MixVideoFormat *parent = NULL; - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(obj); - GObjectClass *root_class = (GObjectClass *) parent_class; - - parent = MIX_VIDEOFORMAT(self); - - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - - if (self->dpb_surface_table) { - //Free the DPB surface table - //First remove all the entries (frames will be unrefed) - g_hash_table_remove_all(self->dpb_surface_table); - //Then unref the table - g_hash_table_unref(self->dpb_surface_table); - self->dpb_surface_table = NULL; - } - - g_mutex_lock(parent->objectlock); - parent->initialized = TRUE; - parent->parse_in_progress = FALSE; - - //Close the parser - pret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - if (pret != VBP_OK) - { - LOG_E( "Error closing parser\n"); - } - - g_mutex_unlock(parent->objectlock); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoFormat_H264 * -mix_videoformat_h264_ref(MixVideoFormat_H264 * mix) { - return (MixVideoFormat_H264 *) g_object_ref(G_OBJECT(mix)); -} - -/* H.264 vmethods implementation */ -MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (mix == NULL || msg == NULL) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - /* Chainup parent method. - */ - - if (parent_class->getcaps) { - ret = parent_class->getcaps(mix, msg); - } - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_h264_update_config_params( - MixVideoFormat *mix, - vbp_data_h264 *data) -{ - MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); - - if (parent->picture_width == 0 || - parent->picture_height == 0 || - data->new_sps) - { - parent->picture_width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; - parent->picture_height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; - - mix_videoconfigparamsdec_set_picture_res( - mix->config_params, - parent->picture_width, - parent->picture_height); - } - - - // video_range has default value of 0. - mix_videoconfigparamsdec_set_video_range( - mix->config_params, - data->codec_data->video_full_range_flag); - - - uint8 color_matrix; - - - - switch (data->codec_data->matrix_coefficients) - { - case 1: - color_matrix = VA_SRC_BT709; - break; - - // ITU-R Recommendation BT.470-6 System B, G (MP4), same as - // SMPTE 170M/BT601 - case 5: - case 6: - color_matrix = VA_SRC_BT601; - break; - - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix); - - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - mix->config_params, - data->codec_data->sar_width, - data->codec_data->sar_height); - - mix_videoconfigparamsdec_set_bit_rate( - mix->config_params, - data->codec_data->bit_rate); - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_video_h264_handle_new_sequence( - MixVideoFormat *mix, - vbp_data_h264 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("new sequence is received.\n"); - - // original picture resolution - int width = mix->picture_width; - int height = mix->picture_height; - - mix_video_h264_update_config_params(mix, data); - - if (width != mix->picture_width || height != mix->picture_height) - { - // flush frame manager only if resolution is changed. - ret = mix_framemanager_flush(mix->framemgr); - } - - // TO DO: re-initialize VA - - return ret; -} - - -MIX_RESULT mix_videofmt_h264_initialize_va( - MixVideoFormat *mix, - vbp_data_h264 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VAConfigAttrib attrib; - - MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - if (parent->va_initialized) - { - LOG_W("va already initialized.\n"); - return MIX_RESULT_SUCCESS; - } - - - LOG_V( "Begin\n"); - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - attrib.value = VA_RT_FORMAT_YUV420; - - //Initialize and save the VA config ID - //We use high profile for all kinds of H.264 profiles (baseline, main and high) - vret = vaCreateConfig( - parent->va_display, - VAProfileH264High, - VAEntrypointVLD, - &attrib, - 1, - &(parent->va_config)); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E("vaCreateConfig failed\n"); - goto cleanup; - } - - LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); - - - // handle both frame and field coding for interlaced content - int num_ref_pictures = data->codec_data->num_ref_frames; - - - //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that - // will not start decoding until a new frame is received. - parent->va_num_surfaces = 1 + 1 + parent->extra_surfaces + (((num_ref_pictures + 3) < - MIX_VIDEO_H264_SURFACE_NUM) ? - (num_ref_pictures + 3) - : MIX_VIDEO_H264_SURFACE_NUM); - - parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces); - if (parent->va_surfaces == NULL) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "parent->va_surfaces == NULL. \n"); - goto cleanup; - } - - LOG_V( "Codec data says picture size is %d x %d\n", - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); - LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height); - - vret = vaCreateSurfaces( - parent->va_display, - parent->picture_width, - parent->picture_height, - VA_RT_FORMAT_YUV420, - parent->va_num_surfaces, - parent->va_surfaces); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto cleanup; - } - - LOG_V( "Created %d libva surfaces\n", parent->va_num_surfaces); - - //Initialize the surface pool - ret = mix_surfacepool_initialize( - parent->surfacepool, - parent->va_surfaces, - parent->va_num_surfaces, - parent->va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init surface pool\n"); - goto cleanup; - break; - } - - if (data->codec_data->pic_order_cnt_type == 0) - { - int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); - mix_framemanager_set_max_picture_number(parent->framemgr, max); - } - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext( - parent->va_display, - parent->va_config, - parent->picture_width, - parent->picture_height, - 0, // no flag set - parent->va_surfaces, - parent->va_num_surfaces, - &(parent->va_context)); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto cleanup; - } - - parent->va_initialized = TRUE; - -cleanup: - /* nothing to clean up */ - - return ret; - -} - - -MIX_RESULT mix_videofmt_h264_update_ref_pic_list( - MixVideoFormat *mix, - VAPictureParameterBufferH264* picture_params, - VASliceParameterBufferH264* slice_params) -{ - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - //Do slice parameters - - //First patch up the List0 and List1 surface IDs - int j = 0; - guint poc = 0; - gpointer video_frame = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++) - { - if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) - { - poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList0[j])); - video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); - if (video_frame == NULL) - { - LOG_E("unable to find surface of picture %d (current picture %d).", - poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - goto cleanup; - } - else - { - slice_params->RefPicList0[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - - } - - if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6)) - { - for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++) - { - if (!(slice_params->RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) - { - poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList1[j])); - video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); - if (video_frame == NULL) - { - LOG_E("unable to find surface of picture %d (current picture %d).", - poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - goto cleanup; - } - else - { - slice_params->RefPicList1[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - } - } - -cleanup: - // do nothing - - return ret; -} - - -MIX_RESULT mix_videofmt_h264_decode_a_slice( - MixVideoFormat *mix, - vbp_data_h264 *data, - int picture_index, - int slice_index) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - guint buffer_id_cnt = 0; - - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID buffer_ids[4]; - - - LOG_V( "Begin\n"); - - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - vbp_picture_data_h264* pic_data = &(data->pic_data[picture_index]); - vbp_slice_data_h264* slice_data = &(pic_data->slc_data[slice_index]); - VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; - VASliceParameterBufferH264* slice_params = &(slice_data->slc_parms); - vadisplay = mix->va_display; - vacontext = mix->va_context; - -#ifdef DECODER_ROBUSTNESS - if ((slice_params->first_mb_in_slice == 0) || (!mix->end_picture_pending)) -#else - if (slice_params->first_mb_in_slice == 0) -#endif - { - // this is the first slice of the picture - if (mix->end_picture_pending) - { - // interlace content, decoding the first field - vret = vaEndPicture(vadisplay, vacontext); - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E("vaEndPicture failed.\n"); - goto cleanup; - } - - // for interlace content, top field may be valid only after the second field is parsed - mix_videoframe_set_displayorder(mix->video_frame, pic_params->CurrPic.TopFieldOrderCnt); - } - - gulong surface = 0; - - LOG_V("mix->video_frame = 0x%x\n", mix->video_frame); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(mix->video_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting surface ID from frame object\n"); - goto cleanup; - } - -#ifdef DECODER_ROBUSTNESS - LOG_V( "Updating DPB for libva\n"); - - //Now handle the reference frames and surface IDs for DPB and current frame - mix_videofmt_h264_handle_ref_frames(mix, pic_params, mix->video_frame); - -#ifdef HACK_DPB - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - ret = mix_videofmt_h264_hack_dpb(mix, pic_data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error reference frame not found\n"); - //Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it - mix_videofmt_h264_cleanup_ref_frame(mix, pic_params, mix->frame); - goto cleanup; - } -#endif - - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto cleanup; - } - - // vaBeginPicture needs a matching vaEndPicture - mix->end_picture_pending = TRUE; - -#else - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto cleanup; - } - - // vaBeginPicture needs a matching vaEndPicture - mix->end_picture_pending = TRUE; - - LOG_V( "Updating DPB for libva\n"); - - //Now handle the reference frames and surface IDs for DPB and current frame - mix_videofmt_h264_handle_ref_frames(mix, pic_params, mix->video_frame); - -#ifdef HACK_DPB - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - ret = mix_videofmt_h264_hack_dpb(mix, pic_data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error reference frame not found\n"); - goto cleanup; - } -#endif - -#endif - - //Libva buffer set up - - - LOG_V( "Creating libva picture parameter buffer\n"); - LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); - - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferH264), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - buffer_id_cnt++; - - LOG_V( "Creating libva IQMatrix buffer\n"); - - - //Then the IQ matrix buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferH264), - 1, - data->IQ_matrix_buf, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - buffer_id_cnt++; - } - -#ifndef DECODER_ROBUSTNESS - if (!mix->end_picture_pending) - { - LOG_E("first slice is lost??????????\n"); - ret = MIX_RESULT_DROPFRAME; - goto cleanup; - } -#endif - - //Now for slices - - ret = mix_videofmt_h264_update_ref_pic_list(mix, pic_params, slice_params); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E("mix_videofmt_h264_update_ref_pic_list failed.\n"); - goto cleanup; - } - - LOG_V( "Creating libva slice parameter buffer\n"); - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), - 1, - slice_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - buffer_id_cnt++; - - - //Do slice data - - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferH264 - - LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", - (guint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size); - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - slice_data->slice_size, //size - 1, //num_elements - slice_data->buffer_addr + slice_data->slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto cleanup; - } - - -cleanup: - LOG_V( "End\n"); - - return ret; - -} - - -MIX_RESULT mix_videofmt_h264_decode_end( - MixVideoFormat *mix, - gboolean drop_picture) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - MixVideoFormat* parent = MIX_VIDEOFORMAT(mix); -#ifdef DECODER_ROBUSTNESS - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); -#else - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); -#endif - - LOG_V("Begin\n"); - - if (!parent->end_picture_pending) - { - if (parent->video_frame) - { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame is not unreferenced.\n"); - } - goto cleanup; - } - - if (parent->video_frame == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame has been unreferenced.\n"); - goto cleanup; - } - - LOG_V( "Calling vaEndPicture\n"); - vret = vaEndPicture(parent->va_display, parent->va_context); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto cleanup; - } - -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - - LOG_V( "Calling vaSyncSurface\n"); - - //Decode the picture - vret = vaSyncSurface(parent->va_display, parent->video_frame->frame_id); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - goto cleanup; - } -#endif - - if (drop_picture) - { - // we are asked to drop this decoded picture - mix_videoframe_unref(parent->video_frame); - parent->video_frame = NULL; - goto cleanup; - } - - LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", - parent->video_frame->timestamp); - -#ifdef DECODER_ROBUSTNESS - if (self->last_decoded_frame) - mix_videoframe_unref(self->last_decoded_frame); - self->last_decoded_frame = parent->video_frame; - mix_videoframe_ref(self->last_decoded_frame); -#endif - - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(parent->framemgr, parent->video_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error enqueuing frame object\n"); - goto cleanup; - } - else - { - // video frame is passed to frame manager - parent->video_frame = NULL; - LOG_V("video_frame is assigned to be NULL !\n"); - } - -cleanup: - if (parent->video_frame) - { - /* this always indicates an error */ - mix_videoframe_unref(parent->video_frame); - parent->video_frame = NULL; - } - parent->end_picture_pending = FALSE; - LOG_V("End\n"); - return ret; - -} - - -MIX_RESULT mix_videofmt_h264_decode_continue( - MixVideoFormat *mix, - vbp_data_h264 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int i, j; - vbp_picture_data_h264* pic_data = NULL; - - LOG_V("Begin\n"); - - for (i = 0; i < data->num_pictures; i++) - { - pic_data = &(data->pic_data[i]); - if (pic_data->pic_parms == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->pic_parms is NULL.\n"); - goto cleanup; - } - - if (pic_data->slc_data == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->slc_data is NULL.\n"); - goto cleanup; - } - - if (pic_data->num_slices == 0) - { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->num_slices == 0.\n"); - goto cleanup; - } - - LOG_V( "num_slices is %d\n", pic_data->num_slices); - for (j = 0; j < pic_data->num_slices; j++) - { - LOG_V( "Decoding slice %d\n", j); - ret = mix_videofmt_h264_decode_a_slice(mix, data, i, j); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "mix_videofmt_h264_decode_a_slice failed, error = %#X.", ret); - goto cleanup; - } - } - } - -cleanup: - // nothing to cleanup; - - LOG_V("End\n"); - return ret; -} - - -MIX_RESULT mix_videofmt_h264_set_frame_type( - MixVideoFormat *mix, - vbp_data_h264 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - //Set the picture type (I, B or P frame) - //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) - MixFrameType frame_type = TYPE_INVALID; - - switch (data->pic_data[0].slc_data[0].slc_parms.slice_type) - { - case 0: - case 3: - case 5: - case 8: - frame_type = TYPE_P; - break; - case 1: - case 6: - frame_type = TYPE_B; - break; - case 2: - case 4: - case 7: - case 9: - frame_type = TYPE_I; - break; - default: - break; - } - - //Do not have to check for B frames after a seek - //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise - // DPB will not be correct and frames may come in with invalid references - // This will be detected when DPB is checked for valid mapped surfaces and - // error returned from there. - - LOG_V( "frame type is %d\n", frame_type); - - //Set the frame type for the frame object (used in reordering by frame manager) - ret = mix_videoframe_set_frame_type(mix->video_frame, frame_type); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error setting frame type on frame\n"); - } - - return ret; -} - - -MIX_RESULT mix_videofmt_h264_set_frame_structure( - MixVideoFormat *mix, - vbp_data_h264 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) - { - mix_videoframe_set_frame_structure(mix->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); - } - else - { - mix_videoframe_set_frame_structure(mix->video_frame, VA_FRAME_PICTURE); - } - - return ret; -} - - -MIX_RESULT mix_videofmt_h264_decode_begin( - MixVideoFormat *mix, - vbp_data_h264 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - //Get a frame from the surface pool - LOG_V("Begin\n"); - ret = mix_surfacepool_get(mix->surfacepool, &(mix->video_frame)); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting frame from surfacepool\n"); - return ret; - } - - /* the following calls will always succeed */ - - // set frame type - ret = mix_videofmt_h264_set_frame_type(mix, data); - - // set frame structure - ret = mix_videofmt_h264_set_frame_structure(mix, data); - - //Set the discontinuity flag - mix_videoframe_set_discontinuity(mix->video_frame, mix->discontinuity_frame_in_progress); - - //Set the timestamp - mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp); - - // Set displayorder - ret = mix_videoframe_set_displayorder(mix->video_frame, - data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt); - if(ret != MIX_RESULT_SUCCESS) - { - LOG_E("Error setting displayorder\n"); - return ret; - } - - ret = mix_videofmt_h264_decode_continue(mix, data); - - LOG_V("End\n"); - return ret; - -} - - -MIX_RESULT mix_videofmt_h264_decode_a_buffer( - MixVideoFormat *mix, - MixBuffer * bufin, - guint64 ts, - gboolean discontinuity, - MixVideoDecodeParams * decode_params) -{ - uint32 pret = 0; - MixVideoFormat *parent = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_h264 *data = NULL; - - LOG_V( "Begin\n"); - - parent = MIX_VIDEOFORMAT(mix); - - LOG_V( "Calling parse for current frame, parse handle %d\n", (int)parent->parser_handle); - pret = vbp_parse(parent->parser_handle, - bufin->data, - bufin->size, - FALSE); - - LOG_V( "Called parse for current frame\n"); - if ((pret != VBP_DONE) &&(pret != VBP_OK)) - { - ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME; - LOG_E( "vbp_parse failed.\n"); - goto cleanup; - } - - //query for data - pret = vbp_query(parent->parser_handle, (void *) &data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "vbp_query failed.\n"); - goto cleanup; - } - LOG_V( "Called query for current frame\n"); - - - if (data->has_sps == 0 || data->has_pps == 0) - { - ret = MIX_RESULT_MISSING_CONFIG; // MIX_RESULT_SUCCESS; - LOG_V("SPS or PPS is not available.\n"); - goto cleanup; - } - - if (data->new_sps) - { - decode_params->new_sequence = data->new_sps; - - ret = mix_video_h264_handle_new_sequence(parent, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_video_h264_handle_new_sequence failed.\n"); - goto cleanup; - } - } - - if (parent->va_initialized == FALSE) - { - mix_video_h264_update_config_params(parent, data); - - LOG_V("try initializing VA...\n"); - ret = mix_videofmt_h264_initialize_va(parent, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_h264_initialize_va failed.\n"); - goto cleanup; - } - } - - // first pic_data always exists, check if any slice is parsed - if (data->pic_data[0].num_slices == 0) - { - ret = MIX_RESULT_SUCCESS; - LOG_V("slice is not available.\n"); - goto cleanup; - } - - guint64 last_ts = parent->current_timestamp; - parent->current_timestamp = ts; - parent->discontinuity_frame_in_progress = discontinuity; - - LOG_V("ts = %lli last_ts = %lli\n", ts, last_ts); - - if (last_ts != ts) - { - // finish decoding the last frame - ret = mix_videofmt_h264_decode_end(parent, FALSE); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_h264_decode_end failed.\n"); - goto cleanup; - } - - // start decoding a new frame - ret = mix_videofmt_h264_decode_begin(parent, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_h264_decode_begin failed.\n"); - goto cleanup; - } - } - else - { - // parital frame - LOG_V("partial frame handling...\n"); - ret = mix_videofmt_h264_decode_continue(parent, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_h264_decode_continue failed.\n"); - goto cleanup; - } - } - - cleanup: - - LOG_V( "End\n"); - - return ret; -} - - -MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_H264; - vbp_data_h264 *data = NULL; - MixVideoFormat *parent = NULL; - MixIOVec *header = NULL; - - if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - /* Chainup parent method. */ - - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error initializing\n"); - return ret; - } - - if (!MIX_IS_VIDEOFORMAT_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMAT(mix); - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - g_mutex_lock(parent->objectlock); - - parent->surfacepool = mix_surfacepool_new(); - *surface_pool = parent->surfacepool; - - if (parent->surfacepool == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "parent->surfacepool == NULL.\n"); - goto cleanup; - } - - //Create our table of Decoded Picture Buffer "in use" surfaces - self->dpb_surface_table = g_hash_table_new_full( - NULL, - NULL, - mix_videofmt_h264_destroy_DPB_key, - mix_videofmt_h264_destroy_DPB_value); - - if (self->dpb_surface_table == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating dbp surface table\n"); - goto cleanup; //leave this goto here in case other code is added between here and cleanup label - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation( - config_params, - &parent->extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto cleanup; - } - - LOG_V( "Before vbp_open\n"); - //Load the bitstream parser - pret = vbp_open(ptype, &(parent->parser_handle)); - LOG_V( "After vbp_open\n"); - - if (!(pret == VBP_OK)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto cleanup; - } - LOG_V( "Opened parser\n"); - - - ret = mix_videoconfigparamsdec_get_header(config_params, &header); - - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) - { - // Delay initializing VA if codec configuration data is not ready, but don't return an error. - ret = MIX_RESULT_SUCCESS; - LOG_W( "Codec data is not available in the configuration parameter.\n"); - goto cleanup; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); - - pret = vbp_parse( - parent->parser_handle, - header->data, - header->data_size, - TRUE); - - if (!((pret == VBP_OK) || (pret == VBP_DONE))) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data\n"); - goto cleanup; - } - - LOG_V( "Parsed header\n"); - - //Get the header data and save - pret = vbp_query(parent->parser_handle, (void *)&data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto cleanup; - } - - LOG_V( "Queried parser for header data\n"); - - mix_video_h264_update_config_params(mix, data); - - ret = mix_videofmt_h264_initialize_va(mix, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error initializing va. \n"); - goto cleanup; - } - - cleanup: - if (ret != MIX_RESULT_SUCCESS) { - pret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - parent->initialized = FALSE; - - } else { - parent->initialized = TRUE; - } - - if (header != NULL) - { - if (header->data != NULL) - g_free(header->data); - g_free(header); - header = NULL; - } - - - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); - - - return ret; -} - -MIX_RESULT mix_videofmt_h264_decode( - MixVideoFormat *mix, - MixBuffer * bufin[], - gint bufincnt, - MixVideoDecodeParams * decode_params) { - - int i = 0; - MixVideoFormat *parent = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - guint64 ts = 0; - gboolean discontinuity = FALSE; - - LOG_V( "Begin\n"); - - if (mix == NULL || bufin == NULL || decode_params == NULL || bufincnt == 0) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ - -#if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, decode_params); - } -#endif - - if (!MIX_IS_VIDEOFORMAT_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMAT(mix); - - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) - { - // never happen - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) - { - // never happen - return MIX_RESULT_FAIL; - } - - decode_params->new_sequence = FALSE; - - //From now on, we exit this function through cleanup: - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts); - - for (i = 0; i < bufincnt; i++) - { - LOG_V( "Decoding a buf %x, size %d\n", (guint)bufin[i]->data, bufin[i]->size); - - // decode a buffer at a time - ret = mix_videofmt_h264_decode_a_buffer( - mix, - bufin[i], - ts, - discontinuity, - decode_params); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); - goto cleanup; - } - } - - -cleanup: - - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - uint32 pret = 0; - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ - -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - g_mutex_lock(mix->objectlock); - - // drop any decode-pending picture, and ignore return value - mix_videofmt_h264_decode_end(mix, TRUE); - - //Clear parse_in_progress flag and current timestamp - mix->parse_in_progress = FALSE; - mix->discontinuity_frame_in_progress = FALSE; - mix->current_timestamp = (guint64)-1; - - //Clear the DPB surface table - g_hash_table_remove_all(self->dpb_surface_table); - - //Call parser flush - pret = vbp_flush(mix->parser_handle); - if (pret != VBP_OK) - ret = MIX_RESULT_FAIL; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ - -#if 0 - if (parent_class->eos) { - return parent_class->eos(mix, msg); - } -#endif - - g_mutex_lock(mix->objectlock); - - // finished decoding the pending frame - mix_videofmt_h264_decode_end(mix, FALSE); - - g_mutex_unlock(mix->objectlock); - - //Call Frame Manager with _eos() - ret = mix_framemanager_eos(mix->framemgr); - - LOG_V( "End\n"); - - return ret; - - -} - -MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix) { - -//Note this method is not called; may remove in future - - LOG_V( "Begin\n"); - - if (mix == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Chainup parent method. - */ - - if (parent_class->deinitialize) { - return parent_class->deinitialize(mix); - } - - //Most stuff is cleaned up in parent_class->finalize() and in _finalize - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - - -#define HACK_DPB -#ifdef HACK_DPB -static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, - vbp_picture_data_h264* pic_data - ) -{ - - gboolean found = FALSE; - guint tflags = 0; - VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; - VAPictureH264 *pRefList = NULL; - int i = 0, j = 0, k = 0, list = 0; - - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - //Set the surface ID for everything in the parser DPB to INVALID - for (i = 0; i < 16; i++) - { - pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE; - pic_params->ReferenceFrames[i].frame_idx = -1; - pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; - pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; - pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags - } - - pic_params->num_ref_frames = 0; - - for (i = 0; i < pic_data->num_slices; i++) - { - - //Copy from the List0 and List1 surface IDs - pRefList = pic_data->slc_data[i].slc_parms.RefPicList0; - for (list = 0; list < 2; list++) - { - for (j = 0; j < 32; j++) - { - if (pRefList[j].flags & VA_PICTURE_H264_INVALID) - { - break; //no more valid reference frames in this list - } - found = FALSE; - for (k = 0; k < pic_params->num_ref_frames; k++) - { - if (pic_params->ReferenceFrames[k].TopFieldOrderCnt == pRefList[j].TopFieldOrderCnt) - { - ///check for complementary field - tflags = pic_params->ReferenceFrames[k].flags | pRefList[j].flags; - //If both TOP and BOTTOM are set, we'll clear those flags - if ((tflags & VA_PICTURE_H264_TOP_FIELD) && - (tflags & VA_PICTURE_H264_TOP_FIELD)) - pic_params->ReferenceFrames[k].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; - found = TRUE; //already in the DPB; will not add this one - break; - } - } - if (!found) - { - guint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); - gpointer video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); - -#ifdef DECODER_ROBUSTNESS - if (!video_frame) - { - if (!self->last_decoded_frame) - { - //No saved reference frame, can't recover this one - return MIX_RESULT_DROPFRAME; - } - - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)self->last_decoded_frame)->frame_id; - LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); - - } - else - { - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } -#else - if (!video_frame) return MIX_RESULT_DROPFRAME; //return non-fatal error - - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; -#endif - - LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); - - pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = - pRefList[j].flags; - pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = - pRefList[j].frame_idx; - pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = - pRefList[j].TopFieldOrderCnt; - pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = - pRefList[j].BottomFieldOrderCnt; - } - - } - pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; - } - - } - return MIX_RESULT_SUCCESS; -} -#endif - - - -MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, - VAPictureParameterBufferH264* pic_params, - MixVideoFrame * current_frame - ) { - - guint poc = 0; - - LOG_V( "Begin\n"); - - if (mix == NULL || current_frame == NULL || pic_params == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - - LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); - -#ifdef MIX_LOG_ENABLE - if (pic_params->CurrPic.flags & VA_PICTURE_H264_INVALID) - LOG_V( "Flags show VA_PICTURE_H264_INVALID\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) - LOG_V( "Flags show VA_PICTURE_H264_TOP_FIELD\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) - LOG_V( "Flags show VA_PICTURE_H264_BOTTOM_FIELD\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) - LOG_V( "Flags show VA_PICTURE_H264_SHORT_TERM_REFERENCE\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE) - LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n"); -#endif - - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - - //First we need to check the parser DBP against our DPB table - //So for each item in our DBP table, we look to see if it is in the parser DPB - //If it is not, it gets unrefed and removed -#ifdef MIX_LOG_ENABLE - guint num_removed = -#endif - g_hash_table_foreach_remove(self->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params); - - LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); - - - MixVideoFrame *mvf = NULL; - gboolean found = FALSE; - //Set the surface ID for everything in the parser DPB - int i = 0; - for (; i < 16; i++) - { - if (!(pic_params->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID)) - { - - poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i])); - LOG_V( "Looking up poc %d in dpb table\n", poc); - found = g_hash_table_lookup_extended(self->dpb_surface_table, (gpointer)poc, NULL, (gpointer)&mvf); - - if (found) - { - pic_params->ReferenceFrames[i].picture_id = mvf->frame_id; - LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id); - } else { - LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); - } - LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", poc, i, (gint)pic_params->ReferenceFrames[i].picture_id); - } - - } - - - //Set picture_id for current picture - pic_params->CurrPic.picture_id = current_frame->frame_id; - - //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || - (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) - { - //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); - - //Increment the reference count for this frame - mix_videoframe_ref(current_frame); - - LOG_V( "Inserting poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); - //Add this frame to the DPB surface table - g_hash_table_insert(self->dpb_surface_table, (gpointer)poc, current_frame); - } - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmt_h264_cleanup_ref_frame(MixVideoFormat *mix, - VAPictureParameterBufferH264* pic_params, - MixVideoFrame * current_frame - ) { - - guint poc = 0; - - LOG_V( "Begin\n"); - - if (mix == NULL || current_frame == NULL || pic_params == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - - LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); - - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) - { - //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); - - //We don't need to decrement the ref count for the video frame here; it's done elsewhere - - LOG_V( "Removing poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); - //Remove this frame from the DPB surface table - g_hash_table_remove(self->dpb_surface_table, (gpointer)poc); - } - - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -guint mix_videofmt_h264_get_poc(VAPictureH264 *pic) -{ - if (pic == NULL) - return 0; - - if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) - return pic->BottomFieldOrderCnt; - - if (pic->flags & VA_PICTURE_H264_TOP_FIELD) - return pic->TopFieldOrderCnt; - - return pic->TopFieldOrderCnt; - -} - - -gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer user_data) -{ - gboolean ret = TRUE; - - if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key - return FALSE; - - VAPictureH264* vaPic = NULL; - int i = 0; - for (; i < 16; i++) - { - vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]); - if (vaPic->flags & VA_PICTURE_H264_INVALID) - continue; - - if ((guint)key == vaPic->TopFieldOrderCnt || - (guint)key == vaPic->BottomFieldOrderCnt) - { - ret = FALSE; - break; - } - } - - return ret; -} - - -void mix_videofmt_h264_destroy_DPB_key(gpointer data) -{ - //TODO remove this method and don't register it with the hash table foreach call; it is no longer needed - LOG_V( "Begin, poc of %d\n", (guint)data); - LOG_V( "End\n"); - - return; -} - -void mix_videofmt_h264_destroy_DPB_value(gpointer data) -{ - LOG_V( "Begin\n"); - if (data != NULL) - { - mix_videoframe_unref((MixVideoFrame *)data); - } - LOG_V( "End\n"); - return; -} - - -MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, - guint64 timestamp - ) { - - // no longer used, need to be removed. - return MIX_RESULT_SUCCESS; -} - - diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp new file mode 100644 index 0000000..27e1447 --- /dev/null +++ b/mix_video/src/mixvideoformat_h264.cpp @@ -0,0 +1,1393 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#ifndef ANDROID +#include +#endif + +#include "mixvideolog.h" +#include "mixvideoformat_h264.h" + +#ifdef MIX_LOG_ENABLE +static int mix_video_h264_counter = 0; +#endif /* MIX_LOG_ENABLE */ + +// Local Help Funcs + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +MixVideoFormat_H264::MixVideoFormat_H264() + :dpb_surface_table(NULL) +#ifdef DECODER_ROBUSTNESS + ,last_decoded_frame(NULL) +#endif +{} + +MixVideoFormat_H264::~MixVideoFormat_H264() { + gint32 pret = VBP_OK; + /* clean up here. */ + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + + if (this->dpb_surface_table) { + //Free the DPB surface table + //First remove all the entries (frames will be unrefed) + g_hash_table_remove_all(this->dpb_surface_table); + //Then unref the table + g_hash_table_unref(this->dpb_surface_table); + this->dpb_surface_table = NULL; + } + + Lock(); + this->initialized = TRUE; + this->parse_in_progress = FALSE; + + //Close the parser + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + if (pret != VBP_OK) { + LOG_E( "Error closing parser\n"); + } + Unlock(); +} + +MIX_RESULT MixVideoFormat_H264::Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_H264; + vbp_data_h264 *data = NULL; + MixIOVec *header = NULL; + + if (config_params == NULL || frame_mgr == NULL || + input_buf_pool == NULL || va_display == NULL) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + // chain up parent method + MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, + surface_pool, va_display); + + /* Chainup parent method. */ + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + Lock(); + + this->surfacepool = mix_surfacepool_new(); + *surface_pool = this->surfacepool; + + if (NULL == this->surfacepool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "parent->surfacepool == NULL.\n"); + goto CLEAN_UP; + } + + //Create our table of Decoded Picture Buffer "in use" surfaces + this->dpb_surface_table = g_hash_table_new_full(NULL, NULL, + mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value); + + if (NULL == this->dpb_surface_table) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating dbp surface table\n"); + goto CLEAN_UP; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation( + config_params, &(this->extra_surfaces)); + + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto CLEAN_UP; + } + + LOG_V( "Before vbp_open\n"); + //Load the bitstream parser + pret = vbp_open(ptype, &(this->parser_handle)); + LOG_V( "After vbp_open\n"); + + if (VBP_OK != pret) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto CLEAN_UP; + } + LOG_V( "Opened parser\n"); + + ret = mix_videoconfigparamsdec_get_header(config_params, &header); + + if ((MIX_RESULT_SUCCESS != ret) || (NULL == header)) { + // Delay initializing VA if codec configuration data is not ready, but don't return an error. + ret = MIX_RESULT_SUCCESS; + LOG_W( "Codec data is not available in the configuration parameter.\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)(this->parser_handle)); + + pret = vbp_parse(this->parser_handle, header->data, header->data_size, TRUE); + + if ((VBP_OK != pret) && (VBP_DONE != pret)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Parsed header\n"); + + //Get the header data and save + pret = vbp_query(this->parser_handle, (void **)&data); + + if ((VBP_OK != pret) || (NULL == data)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Queried parser for header data\n"); + + _update_config_params(data); + + ret = _initialize_va(data); + if (MIX_RESULT_SUCCESS != ret) { + LOG_E( "Error initializing va. \n"); + goto CLEAN_UP; + } + +CLEAN_UP: + if (MIX_RESULT_SUCCESS != ret) { + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + this->initialized = FALSE; + } else { + this->initialized = TRUE; + } + if (NULL != header) { + if (NULL != header->data) + g_free(header->data); + g_free(header); + header = NULL; + } + LOG_V( "Unlocking\n"); + Unlock(); + return ret; + +} + +MIX_RESULT MixVideoFormat_H264::Decode( + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params) { + + int i = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + guint64 ts = 0; + gboolean discontinuity = FALSE; + + LOG_V( "Begin\n"); + + if (NULL == bufin || NULL == decode_params || 0 == bufincnt) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ +#if 0 + MixVideoFormat::Decode(bufin, bufincnt, decode_params); +#endif + + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } + + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } + + decode_params->new_sequence = FALSE; + + //From now on, we exit this function through cleanup: + LOG_V( "Locking\n"); + Lock(); + + LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts); + + for (i = 0; i < bufincnt; i++) { + LOG_V( "Decoding a buf %x, size %d\n", (guint)bufin[i]->data, bufin[i]->size); + // decode a buffer at a time + ret = _decode_a_buffer(bufin[i], ts, discontinuity, decode_params); + if (MIX_RESULT_SUCCESS != ret) { + LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); + goto CLEAN_UP; + } + } + +CLEAN_UP: + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_H264::Flush() { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + uint32 pret = 0; + /* Chainup parent method. + We are not chaining up to parent method for now. + */ +#if 0 + MixVideoFormat::Flush(); +#endif + Lock(); + // drop any decode-pending picture, and ignore return value + _decode_end(TRUE); + + //Clear parse_in_progress flag and current timestamp + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (guint64)-1; + + //Clear the DPB surface table + g_hash_table_remove_all(this->dpb_surface_table); + + //Call parser flush + pret = vbp_flush(this->parser_handle); + if (VBP_OK != pret) + ret = MIX_RESULT_FAIL; + + Unlock(); + LOG_V( "End\n"); + return ret; + +} + +MIX_RESULT MixVideoFormat_H264::EndOfStream() { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + /* Chainup parent method. + We are not chaining up to parent method for now. + */ +#if 0 + if (parent_class->eos) { + return parent_class->eos(mix, msg); + } +#endif + Lock(); + // finished decoding the pending frame + _decode_end(FALSE); + Unlock(); + //Call Frame Manager with _eos() + ret = mix_framemanager_eos(this->framemgr); + LOG_V( "End\n"); + return ret; +} + + +MixVideoFormat_H264 * mix_videoformat_h264_new(void) { + return new MixVideoFormat_H264(); +} + + +MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +MixVideoFormat_H264 *mix_videoformat_h264_unref(MixVideoFormat_H264 *mix) { + if (NULL != mix) + return MIX_VIDEOFORMAT_H264(mix->Unref()); + else + return mix; +} + +MIX_RESULT MixVideoFormat_H264::_update_config_params(vbp_data_h264 *data) { + if (0 == this->picture_width || 0 == this->picture_height || data->new_sps) { + this->picture_width = + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; + this->picture_height = + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; + + mix_videoconfigparamsdec_set_picture_res( + this->config_params, this->picture_width, this->picture_height); + } + + // video_range has default value of 0. + mix_videoconfigparamsdec_set_video_range(this->config_params, + data->codec_data->video_full_range_flag); + + uint8 color_matrix; + switch (data->codec_data->matrix_coefficients) { + case 1: + color_matrix = VA_SRC_BT709; + break; + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + color_matrix = VA_SRC_BT601; + break; + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + this->config_params, + data->codec_data->sar_width, + data->codec_data->sar_height); + mix_videoconfigparamsdec_set_bit_rate( + this->config_params, data->codec_data->bit_rate); + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VAConfigAttrib attrib; + if (this->va_initialized) { + LOG_W("va already initialized.\n"); + return MIX_RESULT_SUCCESS; + } + LOG_V( "Begin\n"); + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + //Initialize and save the VA config ID + //We use high profile for all kinds of H.264 profiles (baseline, main and high) + vret = vaCreateConfig(this->va_display, VAProfileH264High, + VAEntrypointVLD, &attrib, 1, &(this->va_config)); + + if (VA_STATUS_SUCCESS != vret) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E("vaCreateConfig failed\n"); + return ret; + } + + LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); + + // handle both frame and field coding for interlaced content + int num_ref_pictures = data->codec_data->num_ref_frames; + + //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that + // will not start decoding until a new frame is received. + this->va_num_surfaces = 1 + 1 + this->extra_surfaces + + (((num_ref_pictures + 3) < MIX_VIDEO_H264_SURFACE_NUM) ? (num_ref_pictures + 3) : MIX_VIDEO_H264_SURFACE_NUM); + + this->va_surfaces = + reinterpret_cast(g_malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); + if (NULL == this->va_surfaces){ + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "parent->va_surfaces == NULL. \n"); + return ret; + } + + LOG_V( "Codec data says picture size is %d x %d\n", + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); + LOG_V( "getcaps says picture size is %d x %d\n", this->picture_width, this->picture_height); + + vret = vaCreateSurfaces( + this->va_display, + this->picture_width, + this->picture_height, + VA_RT_FORMAT_YUV420, + this->va_num_surfaces, + this->va_surfaces); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + return ret; + } + + LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); + + //Initialize the surface pool + ret = mix_surfacepool_initialize( + this->surfacepool, + this->va_surfaces, + this->va_num_surfaces, + this->va_display); + + switch (ret) { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init surface pool\n"); + return ret; + break; + } + + if (data->codec_data->pic_order_cnt_type == 0) { + int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); + mix_framemanager_set_max_picture_number(this->framemgr, max); + } + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext( + this->va_display, + this->va_config, + this->picture_width, + this->picture_height, + 0, // no flag set + this->va_surfaces, + this->va_num_surfaces, + &(this->va_context)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + return ret; + } + + this->va_initialized = TRUE; + + return ret; +} + + +MIX_RESULT MixVideoFormat_H264::_handle_new_sequence(vbp_data_h264 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("new sequence is received.\n"); + + // original picture resolution + uint32 width = this->picture_width; + uint32 height = this->picture_height; + + _update_config_params(data); + + if (width != this->picture_width || height != this->picture_height) { + // flush frame manager only if resolution is changed. + ret = mix_framemanager_flush(this->framemgr); + } + // TO DO: re-initialize VA + return ret; +} + + +MIX_RESULT MixVideoFormat_H264::_update_ref_pic_list( + VAPictureParameterBufferH264* picture_params, + VASliceParameterBufferH264* slice_params) { + //Do slice parameters + //First patch up the List0 and List1 surface IDs + uint32 j = 0; + guint poc = 0; + gpointer video_frame = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++) { + if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) { + poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList0[j])); + video_frame = g_hash_table_lookup(this->dpb_surface_table, (gpointer)poc); + if (video_frame == NULL) { + LOG_E("unable to find surface of picture %d (current picture %d).", + poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); + ret = MIX_RESULT_DROPFRAME; //return non-fatal error + return ret; + } else { + slice_params->RefPicList0[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + } + + if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6)) { + for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++) { + if (!(slice_params->RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) { + poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList1[j])); + video_frame = g_hash_table_lookup(this->dpb_surface_table, (gpointer)poc); + if (video_frame == NULL) { + LOG_E("unable to find surface of picture %d (current picture %d).", + poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); + ret = MIX_RESULT_DROPFRAME; //return non-fatal error + return ret; + } else { + slice_params->RefPicList1[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + } + } + return ret; +} + +MIX_RESULT MixVideoFormat_H264::_decode_a_slice( + vbp_data_h264 *data, int picture_index, int slice_index) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + guint buffer_id_cnt = 0; + + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID buffer_ids[4]; + + LOG_V( "Begin\n"); + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + vbp_picture_data_h264* pic_data = &(data->pic_data[picture_index]); + vbp_slice_data_h264* slice_data = &(pic_data->slc_data[slice_index]); + VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; + VASliceParameterBufferH264* slice_params = &(slice_data->slc_parms); + vadisplay = this->va_display; + vacontext = this->va_context; + +#ifdef DECODER_ROBUSTNESS + if ((slice_params->first_mb_in_slice == 0) || (!this->end_picture_pending)) +#else + if (slice_params->first_mb_in_slice == 0) +#endif + { + // this is the first slice of the picture + if (this->end_picture_pending) { + // interlace content, decoding the first field + vret = vaEndPicture(vadisplay, vacontext); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("vaEndPicture failed.\n"); + LOG_V( "End\n"); + return ret; + } + // for interlace content, top field may be valid only after the second field is parsed + mix_videoframe_set_displayorder(this->video_frame, pic_params->CurrPic.TopFieldOrderCnt); + } + + gulong surface = 0; + LOG_V("mix->video_frame = 0x%x\n", (unsigned)this->video_frame); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(this->video_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting surface ID from frame object\n"); + LOG_V( "End\n"); + return ret; } + +#ifdef DECODER_ROBUSTNESS + LOG_V( "Updating DPB for libva\n"); + //Now handle the reference frames and surface IDs for DPB and current frame + _handle_ref_frames(pic_params, this->video_frame); +#ifdef HACK_DPB + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + ret = mix_videofmt_h264_hack_dpb(this, pic_data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error reference frame not found\n"); + //Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it + _cleanup_ref_frame(pic_params, this->frame); + LOG_V( "End\n"); + return ret; + } +#endif + LOG_V( "Calling vaBeginPicture\n"); + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + LOG_V( "End\n"); + return ret; + } + // vaBeginPicture needs a matching vaEndPicture + this->end_picture_pending = TRUE; +#else + LOG_V( "Calling vaBeginPicture\n"); + + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + LOG_V( "End\n"); + return ret; + } + // vaBeginPicture needs a matching vaEndPicture + this->end_picture_pending = TRUE; + LOG_V( "Updating DPB for libva\n"); + //Now handle the reference frames and surface IDs for DPB and current frame + _handle_ref_frames(pic_params, this->video_frame); + +#ifdef HACK_DPB + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + ret = mix_videofmt_h264_hack_dpb(this, pic_data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error reference frame not found\n"); + LOG_V( "End\n"); + return ret; + } +#endif + +#endif + //Libva buffer set up + LOG_V( "Creating libva picture parameter buffer\n"); + LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; + } + + buffer_id_cnt++; + LOG_V( "Creating libva IQMatrix buffer\n"); + + //Then the IQ matrix buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; + } + buffer_id_cnt++; + } + +#ifndef DECODER_ROBUSTNESS + if (!this->end_picture_pending) { + LOG_E("first slice is lost??????????\n"); + ret = MIX_RESULT_DROPFRAME; + LOG_V( "End\n"); + return ret; + + } +#endif + + //Now for slices + + ret = _update_ref_pic_list(pic_params, slice_params); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videofmt_h264_update_ref_pic_list failed.\n"); + LOG_V( "End\n"); + return ret; + } + + LOG_V( "Creating libva slice parameter buffer\n"); + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + slice_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; + } + + buffer_id_cnt++; + + //Do slice data + + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferH264 + + LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", + (guint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size); + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + slice_data->slice_size, //size + 1, //num_elements + slice_data->buffer_addr + slice_data->slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; + + } + + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + LOG_V( "End\n"); + return ret; + } + + LOG_V( "End\n"); + return ret; + +} + +MIX_RESULT MixVideoFormat_H264::_decode_end(gboolean drop_picture) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + +#ifdef DECODER_ROBUSTNESS + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); +#else + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); +#endif + + LOG_V("Begin\n"); + if (!this->end_picture_pending) { + if (this->video_frame) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame is not unreferenced.\n"); + } + goto CLEAN_UP; + } + + if (this->video_frame == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame has been unreferenced.\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaEndPicture\n"); + vret = vaEndPicture(this->va_display, this->va_context); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto CLEAN_UP; + } + +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ + + LOG_V( "Calling vaSyncSurface\n"); + + //Decode the picture + vret = vaSyncSurface(parent->va_display, parent->video_frame->frame_id); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + CLEAN_UP; + } +#endif + + if (drop_picture) { + // we are asked to drop this decoded picture + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + goto CLEAN_UP; + } + + LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", + this->video_frame->timestamp); + +#ifdef DECODER_ROBUSTNESS + if (this->last_decoded_frame) + mix_videoframe_unref(this->last_decoded_frame); + this->last_decoded_frame = this->video_frame; + mix_videoframe_ref(this->last_decoded_frame); +#endif + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error enqueuing frame object\n"); + goto CLEAN_UP; + } else { + // video frame is passed to frame manager + this->video_frame = NULL; + LOG_V("video_frame is assigned to be NULL !\n"); + } + +CLEAN_UP: + if (this->video_frame) { + /* this always indicates an error */ + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + } + this->end_picture_pending = FALSE; + LOG_V("End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_H264::_decode_continue(vbp_data_h264 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + uint32 i, j; + vbp_picture_data_h264* pic_data = NULL; + LOG_V("Begin\n"); + for (i = 0; i < data->num_pictures; i++) { + pic_data = &(data->pic_data[i]); + if (pic_data->pic_parms == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->pic_parms is NULL.\n"); + LOG_V("End\n"); + return ret; + } + + if (pic_data->slc_data == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->slc_data is NULL.\n"); + LOG_V("End\n"); + return ret; + } + + if (pic_data->num_slices == 0) { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->num_slices == 0.\n"); + LOG_V("End\n"); + return ret; + } + + LOG_V( "num_slices is %d\n", pic_data->num_slices); + for (j = 0; j < pic_data->num_slices; j++) { + LOG_V( "Decoding slice %d\n", j); + ret = _decode_a_slice(data, i, j); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "mix_videofmt_h264_decode_a_slice failed, error = %#X.", ret); + LOG_V("End\n"); + return ret; + } + } + } + + LOG_V("End\n"); + return ret; +} + + + +MIX_RESULT MixVideoFormat_H264::_set_frame_type(vbp_data_h264 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + //Set the picture type (I, B or P frame) + //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) + MixFrameType frame_type = TYPE_INVALID; + switch (data->pic_data[0].slc_data[0].slc_parms.slice_type) { + case 0: + case 3: + case 5: + case 8: + frame_type = TYPE_P; + break; + case 1: + case 6: + frame_type = TYPE_B; + break; + case 2: + case 4: + case 7: + case 9: + frame_type = TYPE_I; + break; + default: + break; + } + + //Do not have to check for B frames after a seek + //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise + // DPB will not be correct and frames may come in with invalid references + // This will be detected when DPB is checked for valid mapped surfaces and + // error returned from there. + + LOG_V( "frame type is %d\n", frame_type); + + //Set the frame type for the frame object (used in reordering by frame manager) + ret = mix_videoframe_set_frame_type(this->video_frame, frame_type); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error setting frame type on frame\n"); + } + return ret; +} + + +MIX_RESULT MixVideoFormat_H264::_set_frame_structure( + vbp_data_h264 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) { + mix_videoframe_set_frame_structure(this->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); + } else { + mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); + } + return ret; +} + + +MIX_RESULT MixVideoFormat_H264::_decode_begin(vbp_data_h264 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + //Get a frame from the surface pool + LOG_V("Begin\n"); + ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame from surfacepool\n"); + return ret; + } + + /* the following calls will always succeed */ + // set frame type + ret = _set_frame_type(data); + // set frame structure + ret = _set_frame_structure(data); + //Set the discontinuity flag + mix_videoframe_set_discontinuity(this->video_frame, this->discontinuity_frame_in_progress); + //Set the timestamp + mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); + // Set displayorder + ret = mix_videoframe_set_displayorder(this->video_frame, + data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt); + if(ret != MIX_RESULT_SUCCESS) { + LOG_E("Error setting displayorder\n"); + return ret; + } + ret = _decode_continue(data); + LOG_V("End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( + MixBuffer * bufin, + guint64 ts, + gboolean discontinuity, + MixVideoDecodeParams * decode_params) { + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_h264 *data = NULL; + + LOG_V( "Begin\n"); + LOG_V( "Calling parse for current frame, parse handle %d\n", (int)this->parser_handle); + pret = vbp_parse(this->parser_handle, + bufin->data, + bufin->size, + FALSE); + + LOG_V( "Called parse for current frame\n"); + if ((pret != VBP_DONE) &&(pret != VBP_OK)) { + ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME; + LOG_E( "vbp_parse failed.\n"); + LOG_V("End\n"); + return ret; + } + + //query for data + pret = vbp_query(this->parser_handle, (void**)&data); + + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "vbp_query failed.\n"); + LOG_V("End\n"); + return ret; + + } + LOG_V( "Called query for current frame\n"); + + if (data->has_sps == 0 || data->has_pps == 0) { + ret = MIX_RESULT_MISSING_CONFIG; // MIX_RESULT_SUCCESS; + LOG_V("SPS or PPS is not available.\n"); + LOG_V("End\n"); + return ret; + + } + + if (data->new_sps) { + decode_params->new_sequence = data->new_sps; + + ret = _handle_new_sequence(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_video_h264_handle_new_sequence failed.\n"); + LOG_V("End\n"); + return ret; + + } + } + + if (this->va_initialized == FALSE) { + _update_config_params(data); + + LOG_V("try initializing VA...\n"); + ret = _initialize_va(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_initialize_va failed.\n"); + LOG_V("End\n"); + return ret; + } + } + + // first pic_data always exists, check if any slice is parsed + if (data->pic_data[0].num_slices == 0) { + ret = MIX_RESULT_SUCCESS; + LOG_V("slice is not available.\n"); + LOG_V("End\n"); + return ret; + } + + guint64 last_ts = this->current_timestamp; + this->current_timestamp = ts; + this->discontinuity_frame_in_progress = discontinuity; + + LOG_V("ts = %lli last_ts = %lli\n", ts, last_ts); + + if (last_ts != ts) { + // finish decoding the last frame + ret = _decode_end(FALSE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_decode_end failed.\n"); + LOG_V("End\n"); + return ret; + } + + // start decoding a new frame + ret = _decode_begin(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_decode_begin failed.\n"); + LOG_V("End\n"); + return ret; + } + } else { + // parital frame + LOG_V("partial frame handling...\n"); + ret = _decode_continue(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_decode_continue failed.\n"); + LOG_V("End\n"); + return ret; + } + } + + LOG_V("End\n"); + return ret; +} + + + +#define HACK_DPB +#ifdef HACK_DPB +static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, + vbp_picture_data_h264* pic_data + ) +{ + + gboolean found = FALSE; + guint tflags = 0; + VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; + VAPictureH264 *pRefList = NULL; + uint32 i = 0, j = 0, k = 0, list = 0; + + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + //Set the surface ID for everything in the parser DPB to INVALID + for (i = 0; i < 16; i++) + { + pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE; + pic_params->ReferenceFrames[i].frame_idx = -1; + pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags + } + + pic_params->num_ref_frames = 0; + + for (i = 0; i < pic_data->num_slices; i++) + { + + //Copy from the List0 and List1 surface IDs + pRefList = pic_data->slc_data[i].slc_parms.RefPicList0; + for (list = 0; list < 2; list++) + { + for (j = 0; j < 32; j++) + { + if (pRefList[j].flags & VA_PICTURE_H264_INVALID) + { + break; //no more valid reference frames in this list + } + found = FALSE; + for (k = 0; k < pic_params->num_ref_frames; k++) + { + if (pic_params->ReferenceFrames[k].TopFieldOrderCnt == pRefList[j].TopFieldOrderCnt) + { + ///check for complementary field + tflags = pic_params->ReferenceFrames[k].flags | pRefList[j].flags; + //If both TOP and BOTTOM are set, we'll clear those flags + if ((tflags & VA_PICTURE_H264_TOP_FIELD) && + (tflags & VA_PICTURE_H264_TOP_FIELD)) + pic_params->ReferenceFrames[k].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + found = TRUE; //already in the DPB; will not add this one + break; + } + } + if (!found) + { + guint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); + gpointer video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + +#ifdef DECODER_ROBUSTNESS + if (!video_frame) + { + if (!self->last_decoded_frame) + { + //No saved reference frame, can't recover this one + return MIX_RESULT_DROPFRAME; + } + + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)self->last_decoded_frame)->frame_id; + LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + + } + else + { + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } +#else + if (!video_frame) return MIX_RESULT_DROPFRAME; //return non-fatal error + + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; +#endif + + LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + + pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = + pRefList[j].flags; + pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = + pRefList[j].frame_idx; + pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = + pRefList[j].TopFieldOrderCnt; + pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = + pRefList[j].BottomFieldOrderCnt; + } + + } + pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; + } + + } + return MIX_RESULT_SUCCESS; +} +#endif + +MIX_RESULT MixVideoFormat_H264::_handle_ref_frames( + VAPictureParameterBufferH264* pic_params, + MixVideoFrame * current_frame) { + + guint poc = 0; + LOG_V( "Begin\n"); + + if (current_frame == NULL || pic_params == NULL) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", + pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, + pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); + +#ifdef MIX_LOG_ENABLE + if (pic_params->CurrPic.flags & VA_PICTURE_H264_INVALID) + LOG_V( "Flags show VA_PICTURE_H264_INVALID\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) + LOG_V( "Flags show VA_PICTURE_H264_TOP_FIELD\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) + LOG_V( "Flags show VA_PICTURE_H264_BOTTOM_FIELD\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) + LOG_V( "Flags show VA_PICTURE_H264_SHORT_TERM_REFERENCE\n"); + + if (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE) + LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n"); +#endif + + //First we need to check the parser DBP against our DPB table + //So for each item in our DBP table, we look to see if it is in the parser DPB + //If it is not, it gets unrefed and removed +#ifdef MIX_LOG_ENABLE + guint num_removed = +#endif + g_hash_table_foreach_remove(this->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params); + + LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); + + + MixVideoFrame *mvf = NULL; + gboolean found = FALSE; + //Set the surface ID for everything in the parser DPB + int i = 0; + for (; i < 16; i++) { + if (!(pic_params->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID)) { + poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i])); + LOG_V( "Looking up poc %d in dpb table\n", poc); + found = g_hash_table_lookup_extended(this->dpb_surface_table, + (gpointer)poc, NULL, (gpointer*)&mvf); + if (found) { + pic_params->ReferenceFrames[i].picture_id = mvf->frame_id; + LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id); + } else { + LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); + } + LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", + poc, i, (gint)pic_params->ReferenceFrames[i].picture_id); + } + } + //Set picture_id for current picture + pic_params->CurrPic.picture_id = current_frame->frame_id; + + //Check to see if current frame is a reference frame + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + //Get current frame's POC + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + //Increment the reference count for this frame + mix_videoframe_ref(current_frame); + LOG_V( "Inserting poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); + //Add this frame to the DPB surface table + g_hash_table_insert(this->dpb_surface_table, (gpointer)poc, current_frame); + } + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoFormat_H264::_cleanup_ref_frame( + VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame) { + + guint poc = 0; + LOG_V( "Begin\n"); + + if (current_frame == NULL || pic_params == NULL) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", + pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, + pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); + + //Check to see if current frame is a reference frame + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + //Get current frame's POC + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + //We don't need to decrement the ref count for the video frame here; it's done elsewhere + LOG_V( "Removing poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); + //Remove this frame from the DPB surface table + g_hash_table_remove(this->dpb_surface_table, (gpointer)poc); + } + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + +guint mix_videofmt_h264_get_poc(VAPictureH264 *pic) { + if (pic == NULL) + return 0; + + if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) + return pic->BottomFieldOrderCnt; + + if (pic->flags & VA_PICTURE_H264_TOP_FIELD) + return pic->TopFieldOrderCnt; + + return pic->TopFieldOrderCnt; + +} + + +gboolean mix_videofmt_h264_check_in_DPB( + gpointer key, gpointer value, gpointer user_data) { + gboolean ret = TRUE; + if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key + return FALSE; + + VAPictureH264* vaPic = NULL; + int i = 0; + for (; i < 16; i++) + { + vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]); + if (vaPic->flags & VA_PICTURE_H264_INVALID) + continue; + + if ((guint)key == vaPic->TopFieldOrderCnt || + (guint)key == vaPic->BottomFieldOrderCnt) + { + ret = FALSE; + break; + } + } + return ret; +} + + +void mix_videofmt_h264_destroy_DPB_key(gpointer data) +{ + //TODO remove this method and don't register it with the hash table foreach call; it is no longer needed + LOG_V( "Begin, poc of %d\n", (guint)data); + LOG_V( "End\n"); + return; +} + +void mix_videofmt_h264_destroy_DPB_value(gpointer data) +{ + LOG_V( "Begin\n"); + if (data != NULL) { + mix_videoframe_unref((MixVideoFrame *)data); + } + LOG_V( "End\n"); + return; +} + diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h index bcfe8bd..e2ea007 100644 --- a/mix_video/src/mixvideoformat_h264.h +++ b/mix_video/src/mixvideoformat_h264.h @@ -14,58 +14,64 @@ #define DECODER_ROBUSTNESS -G_BEGIN_DECLS - -#define MIX_VIDEO_H264_SURFACE_NUM 20 - -/* - * Type macros. - */ -#define MIX_TYPE_VIDEOFORMAT_H264 (mix_videoformat_h264_get_type ()) -#define MIX_VIDEOFORMAT_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264)) -#define MIX_IS_VIDEOFORMAT_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_H264)) -#define MIX_VIDEOFORMAT_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264Class)) -#define MIX_IS_VIDEOFORMAT_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_H264)) -#define MIX_VIDEOFORMAT_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264Class)) - -typedef struct _MixVideoFormat_H264 MixVideoFormat_H264; -typedef struct _MixVideoFormat_H264Class MixVideoFormat_H264Class; - -struct _MixVideoFormat_H264 { - /*< public > */ - MixVideoFormat parent; +#define MIX_VIDEO_H264_SURFACE_NUM 20 + + +#define MIX_VIDEOFORMAT_H264(obj) (reinterpret_cast(obj)) +#define MIX_IS_VIDEOFORMAT_H264(obj) (NULL != MIX_VIDEOFORMAT_H264(obj)) + + + +class MixVideoFormat_H264 : public MixVideoFormat { +public: + MixVideoFormat_H264(); + virtual ~MixVideoFormat_H264(); + + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); + +private: + // Local Help Func + MIX_RESULT _update_config_params(vbp_data_h264 *data); + MIX_RESULT _initialize_va(vbp_data_h264 *data); + MIX_RESULT _decode_a_buffer(MixBuffer * bufin, guint64 ts, + gboolean discontinuity, MixVideoDecodeParams * decode_params); + MIX_RESULT _decode_end(gboolean drop_picture); + MIX_RESULT _handle_new_sequence(vbp_data_h264 *data); + MIX_RESULT _decode_begin(vbp_data_h264 *data); + MIX_RESULT _decode_continue(vbp_data_h264 *data); + MIX_RESULT _set_frame_type(vbp_data_h264 *data); + MIX_RESULT _set_frame_structure(vbp_data_h264 *data); + MIX_RESULT _update_ref_pic_list(VAPictureParameterBufferH264* picture_params, + VASliceParameterBufferH264* slice_params); + MIX_RESULT _decode_a_slice(vbp_data_h264 *data, + int picture_index, int slice_index); + MIX_RESULT _cleanup_ref_frame( + VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame); + MIX_RESULT _handle_ref_frames( + VAPictureParameterBufferH264* pic_params, + MixVideoFrame * current_frame); + +public: /*< public > */ - /*< private > */ GHashTable *dpb_surface_table; #ifdef DECODER_ROBUSTNESS //Can improve which frame is used for this at a later time - MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing + MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing #endif }; -/** - * MixVideoFormat_H264Class: - * - * MI-X Video object class - */ -struct _MixVideoFormat_H264Class { - /*< public > */ - MixVideoFormatClass parent_class; - - /* class members */ - - /*< public > */ -}; - -/** - * mix_videoformat_h264_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformat_h264_get_type(void); /** * mix_videoformat_h264_new: @@ -90,39 +96,7 @@ MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix); * * Decrement reference count of the object. */ -#define mix_videoformat_h264_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* H.264 vmethods */ -MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg); -MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); -MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix); -MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix); -MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix); - -/* Local Methods */ - -MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, - VAPictureParameterBufferH264* pic_params, - MixVideoFrame * current_frame); - - -MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix, - vbp_data_h264 *data, - guint64 timestamp, - gboolean discontinuity); - - -MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, - guint64 timestamp); +MixVideoFormat_H264* mix_videoformat_h264_unref(MixVideoFormat_H264 *mix); /* Helper functions to manage the DPB table */ @@ -131,7 +105,4 @@ void mix_videofmt_h264_destroy_DPB_key(gpointer data); void mix_videofmt_h264_destroy_DPB_value(gpointer data); guint mix_videofmt_h264_get_poc(VAPictureH264 *pic); -G_END_DECLS - - #endif /* __MIX_VIDEOFORMAT_H264_H__ */ diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c deleted file mode 100644 index 0ac6667..0000000 --- a/mix_video/src/mixvideoformat_mp42.c +++ /dev/null @@ -1,1415 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include -#include "mixvideolog.h" -#include "mixvideoformat_mp42.h" - -// Value of VOP type defined here follows MP4 spec, and has the same value of corresponding frame type -// defined in enumeration MixFrameType (except sprite (S)) -enum { - MP4_VOP_TYPE_I = 0, - MP4_VOP_TYPE_P = 1, - MP4_VOP_TYPE_B = 2, - MP4_VOP_TYPE_S = 3, -}; - - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatClass *parent_class = NULL; - -static void mix_videoformat_mp42_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT - */ -G_DEFINE_TYPE( MixVideoFormat_MP42, mix_videoformat_mp42, MIX_TYPE_VIDEOFORMAT); - -static void mix_videoformat_mp42_init(MixVideoFormat_MP42 * self) { - MixVideoFormat *parent = MIX_VIDEOFORMAT(self); - - self->reference_frames[0] = NULL; - self->reference_frames[1] = NULL; - - self->last_frame = NULL; - self->last_vop_coding_type = -1; - self->last_vop_time_increment = 0; - self->next_nvop_for_PB_frame = FALSE; - self->iq_matrix_buf_sent = FALSE; - - /* NOTE: we don't need to do this here. - * This just demostrates how to access - * member varibles beloned to parent - */ - parent->initialized = FALSE; -} - -static void mix_videoformat_mp42_class_init(MixVideoFormat_MP42Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatClass *video_format_class = MIX_VIDEOFORMAT_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformat_mp42_finalize; - - /* setup vmethods with base implementation */ - video_format_class->getcaps = mix_videofmt_mp42_getcaps; - video_format_class->initialize = mix_videofmt_mp42_initialize; - video_format_class->decode = mix_videofmt_mp42_decode; - video_format_class->flush = mix_videofmt_mp42_flush; - video_format_class->eos = mix_videofmt_mp42_eos; - video_format_class->deinitialize = mix_videofmt_mp42_deinitialize; -} - -MixVideoFormat_MP42 *mix_videoformat_mp42_new(void) { - MixVideoFormat_MP42 *ret = g_object_new(MIX_TYPE_VIDEOFORMAT_MP42, NULL); - - return ret; -} - -void mix_videoformat_mp42_finalize(GObject * obj) { - /* clean up here. */ - - /* MixVideoFormat_MP42 *mix = MIX_VIDEOFORMAT_MP42(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - MixVideoFormat *parent = NULL; - gint32 vbp_ret = VBP_OK; - MixVideoFormat_MP42 *self = NULL; - gint idx = 0; - - LOG_V("Begin\n"); - - if (obj == NULL) { - LOG_E("obj is NULL\n"); - return; - } - - if (!MIX_IS_VIDEOFORMAT_MP42(obj)) { - LOG_E("obj is not mixvideoformat_mp42\n"); - return; - } - - self = MIX_VIDEOFORMAT_MP42(obj); - parent = MIX_VIDEOFORMAT(self); - - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - - g_mutex_lock(parent->objectlock); - - /* unref reference frames */ - for (idx = 0; idx < 2; idx++) { - if (self->reference_frames[idx] != NULL) { - mix_videoframe_unref(self->reference_frames[idx]); - self->reference_frames[idx] = NULL; - } - } - if (self->last_frame) - { - mix_videoframe_unref(self->last_frame); - self->last_frame = NULL; - } - self->next_nvop_for_PB_frame = FALSE; - self->iq_matrix_buf_sent = FALSE; - - /* Reset state */ - parent->initialized = TRUE; - parent->end_picture_pending = FALSE; - parent->discontinuity_frame_in_progress = FALSE; - parent->current_timestamp = (guint64)-1; - - /* Close the parser */ - if (parent->parser_handle) - { - vbp_ret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - } - - g_mutex_unlock(parent->objectlock); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } - - LOG_V("End\n"); -} - -MixVideoFormat_MP42 * -mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix) { - return (MixVideoFormat_MP42 *) g_object_ref(G_OBJECT(mix)); -} - -/* MP42 vmethods implementation */ -MIX_RESULT mix_videofmt_mp42_getcaps(MixVideoFormat *mix, GString *msg) { - -//This method is reserved for future use - - LOG_V("Begin\n"); - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - - LOG_V("End\n"); - return MIX_RESULT_NOTIMPL; -} - - -MIX_RESULT mix_videofmt_mp42_update_config_params( - MixVideoFormat *mix, - vbp_data_mp42 *data) -{ - MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); - //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - if (parent->picture_width == 0 || - parent->picture_height == 0 || - parent->picture_width < data->codec_data.video_object_layer_width || - parent->picture_height < data->codec_data.video_object_layer_height) - { - parent->picture_width = data->codec_data.video_object_layer_width; - parent->picture_height = data->codec_data.video_object_layer_height; - - mix_videoconfigparamsdec_set_picture_res( - mix->config_params, - parent->picture_width, - parent->picture_height); - } - - - // video_range has default value of 0. Y ranges from 16 to 235. - mix_videoconfigparamsdec_set_video_range(mix->config_params, data->codec_data.video_range); - - uint8 color_matrix; - - switch (data->codec_data.matrix_coefficients) - { - case 1: - color_matrix = VA_SRC_BT709; - break; - - // ITU-R Recommendation BT.470-6 System B, G (MP4), same as - // SMPTE 170M/BT601 - case 5: - case 6: - color_matrix = VA_SRC_BT601; - break; - - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix); - - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - mix->config_params, - data->codec_data.par_width, - data->codec_data.par_height); - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videofmt_mp42_initialize_va( - MixVideoFormat *mix, - vbp_data_mp42 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VAConfigAttrib attrib; - VAProfile va_profile; - MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); - //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - LOG_V( "Begin\n"); - - if (parent->va_initialized) - { - LOG_W("va already initialized.\n"); - return MIX_RESULT_SUCCESS; - } - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - attrib.value = VA_RT_FORMAT_YUV420; - - //Initialize and save the VA config ID - if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) - { - va_profile = VAProfileMPEG4AdvancedSimple; - } - else - { - va_profile = VAProfileMPEG4Simple; - } - - vret = vaCreateConfig( - parent->va_display, - va_profile, - VAEntrypointVLD, - &attrib, - 1, - &(parent->va_config)); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E("vaCreateConfig failed\n"); - goto cleanup; - } - - // add 1 more surface for packed frame (PB frame), and another one - // for partial frame handling - parent->va_num_surfaces = parent->extra_surfaces + 4 + 1 + 1; - //if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) - // parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; - - parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces); - if (parent->va_surfaces == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E( "parent->va_surfaces == NULL. \n"); - goto cleanup; - } - - vret = vaCreateSurfaces( - parent->va_display, - parent->picture_width, - parent->picture_height, - VA_RT_FORMAT_YUV420, - parent->va_num_surfaces, - parent->va_surfaces); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto cleanup; - } - - LOG_V( "Created %d libva surfaces\n", parent->va_num_surfaces); - - //Initialize the surface pool - ret = mix_surfacepool_initialize( - parent->surfacepool, - parent->va_surfaces, - parent->va_num_surfaces, - parent->va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init surface pool\n"); - goto cleanup; - break; - } - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext( - parent->va_display, - parent->va_config, - parent->picture_width, - parent->picture_height, - 0, - parent->va_surfaces, - parent->va_num_surfaces, - &(parent->va_context)); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto cleanup; - } - - parent->va_initialized = TRUE; - -cleanup: - /* nothing to clean up */ - - return ret; - -} - -MIX_RESULT mix_videofmt_mp42_decode_a_slice( - MixVideoFormat *mix, - vbp_data_mp42* data, - vbp_picture_data_mp42* pic_data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - guint buffer_id_cnt = 0; - gint frame_type = -1; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID buffer_ids[4]; - MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - VAPictureParameterBufferMPEG4* pic_params = &(pic_data->picture_param); - vbp_slice_data_mp42* slice_data = &(pic_data->slice_data); - VASliceParameterBufferMPEG4* slice_params = &(slice_data->slice_param); - - LOG_V( "Begin\n"); - - vadisplay = mix->va_display; - vacontext = mix->va_context; - - if (!mix->end_picture_pending) - { - LOG_E("picture decoder is not started!\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - // update reference pictures - frame_type = pic_params->vop_fields.bits.vop_coding_type; - - switch (frame_type) { - case MP4_VOP_TYPE_I: - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - break; - - case MP4_VOP_TYPE_P: - pic_params-> forward_reference_picture - = self->reference_frames[0]->frame_id; - pic_params-> backward_reference_picture = VA_INVALID_SURFACE; - break; - - case MP4_VOP_TYPE_B: - pic_params->vop_fields.bits.backward_reference_vop_coding_type - = self->last_vop_coding_type; - pic_params->forward_reference_picture - = self->reference_frames[1]->frame_id; - pic_params->backward_reference_picture - = self->reference_frames[0]->frame_id; - break; - - case MP4_VOP_TYPE_S: - pic_params-> forward_reference_picture - = self->reference_frames[0]->frame_id; - pic_params-> backward_reference_picture = VA_INVALID_SURFACE; - break; - - default: - LOG_W("default, Will never reach here\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - break; - - } - - //Now for slices - - LOG_V( "Creating libva picture parameter buffer\n"); - - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferMPEG4), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - buffer_id_cnt++; - - if (pic_params->vol_fields.bits.quant_type && self->iq_matrix_buf_sent == FALSE) - { - LOG_V( "Creating libva IQMatrix buffer\n"); - // only send IQ matrix for the first slice in the picture - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferMPEG4), - 1, - &(data->iq_matrix_buffer), - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - self->iq_matrix_buf_sent = TRUE; - buffer_id_cnt++; - } - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferMPEG4), - 1, - slice_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - buffer_id_cnt++; - - - //Do slice data - - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferMP42 - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - slice_data->slice_size, //size - 1, //num_elements - slice_data->buffer_addr + slice_data->slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto cleanup; - } - - -cleanup: - LOG_V( "End\n"); - - return ret; - -} - - -MIX_RESULT mix_videofmt_mp42_decode_end( - MixVideoFormat *mix, - gboolean drop_picture) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - MixVideoFormat* parent = MIX_VIDEOFORMAT(mix); - //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - if (!parent->end_picture_pending) - { - if (parent->video_frame) - { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame is not unreferenced.\n"); - } - goto cleanup; - } - - if (parent->video_frame == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame has been unreferenced.\n"); - goto cleanup; - - } - vret = vaEndPicture(parent->va_display, parent->va_context); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto cleanup; - } - -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - - LOG_V( "Calling vaSyncSurface\n"); - - //Decode the picture - vret = vaSyncSurface(vadisplay, surface); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - goto cleanup; - } -#endif - - if (drop_picture) - { - // we are asked to drop this decoded picture - mix_videoframe_unref(parent->video_frame); - parent->video_frame = NULL; - goto cleanup; - } - - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(parent->framemgr, parent->video_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error enqueuing frame object\n"); - goto cleanup; - } - else - { - // video frame is passed to frame manager - parent->video_frame = NULL; - } - -cleanup: - if (parent->video_frame) - { - /* this always indicates an error */ - mix_videoframe_unref(parent->video_frame); - parent->video_frame = NULL; - } - parent->end_picture_pending = FALSE; - return ret; - -} - - -MIX_RESULT mix_videofmt_mp42_decode_continue( - MixVideoFormat *mix, - vbp_data_mp42 *data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - int i; - gint frame_type = -1; - vbp_picture_data_mp42* pic_data = NULL; - VAPictureParameterBufferMPEG4* pic_params = NULL; - MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - /* - Packed Frame Assumption: - - 1. In one packed frame, there's only one P or I frame and only one B frame. - 2. In packed frame, there's no skipped frame (vop_coded = 0) - 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately). - 4. N-VOP frame is the frame with vop_coded = 0. - 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame - - - I, P, {P, B}, B, N, P, N, I, ... - I, P, {P, B}, N, P, N, I, ... - - The first N is placeholder for P frame in the packed frame - The second N is a skipped frame - */ - - pic_data = data->picture_data; - for (i = 0; i < data->number_picture_data; i++, pic_data = pic_data->next_picture_data) - { - pic_params = &(pic_data->picture_param); - frame_type = pic_params->vop_fields.bits.vop_coding_type; - if (frame_type == MP4_VOP_TYPE_S && pic_params->no_of_sprite_warping_points > 1) - { - // hardware only support up to one warping point (stationary or translation) - LOG_E("sprite with %d warping points is not supported by HW.\n", - pic_params->no_of_sprite_warping_points); - return MIX_RESULT_DROPFRAME; - } - - if (pic_data->vop_coded == 0) - { - // this should never happen - LOG_E("VOP is not coded.\n"); - return MIX_RESULT_DROPFRAME; - } - - if (pic_data->new_picture_flag == 1 || mix->end_picture_pending == FALSE) - { - if (pic_data->new_picture_flag == 0) - { - LOG_W("First slice of picture is lost!\n"); - } - - gulong surface = 0; - if (mix->end_picture_pending) - { - // this indicates the start of a new frame in the packed frame - LOG_V("packed frame is found.\n"); - - // Update timestamp for packed frame as timestamp is for the B frame! - if (mix->video_frame && pic_params->vop_time_increment_resolution) - { - guint64 ts, ts_inc; - mix_videoframe_get_timestamp(mix->video_frame, &ts); - ts_inc= self->last_vop_time_increment - pic_data->vop_time_increment + - pic_params->vop_time_increment_resolution; - ts_inc = ts_inc % pic_params->vop_time_increment_resolution; - LOG_V("timestamp is incremented by %d at %d resolution.\n", - ts_inc, pic_params->vop_time_increment_resolution); - // convert to nano-second - ts_inc = ts_inc * 1e9 / pic_params->vop_time_increment_resolution; - LOG_V("timestamp of P frame in packed frame is updated from %"G_GINT64_FORMAT" to %"G_GUINT64_FORMAT".\n", - ts, ts + ts_inc); - - ts += ts_inc; - mix_videoframe_set_timestamp(mix->video_frame, ts); - } - - mix_videofmt_mp42_decode_end(mix, FALSE); - self->next_nvop_for_PB_frame = TRUE; - } - if (self->next_nvop_for_PB_frame == TRUE && frame_type != MP4_VOP_TYPE_B) - { - LOG_E("The second frame in the packed frame is not B frame.\n"); - self->next_nvop_for_PB_frame = FALSE; - return MIX_RESULT_DROPFRAME; - } - - //Get a frame from the surface pool - ret = mix_surfacepool_get(mix->surfacepool, &(mix->video_frame)); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting frame from surfacepool\n"); - return MIX_RESULT_FAIL; - } - - /* the following calls will always succeed */ - - // set frame type - if (frame_type == MP4_VOP_TYPE_S) - { - // sprite is treated as P frame in the display order - mix_videoframe_set_frame_type(mix->video_frame, MP4_VOP_TYPE_P); - } - else - { - mix_videoframe_set_frame_type(mix->video_frame, frame_type); - } - - - // set frame structure - if (pic_data->picture_param.vol_fields.bits.interlaced) - { - // only MPEG-4 studio profile can have field coding. All other profiles - // use frame coding only, i.e, no field VOP. (see vop_structure in MP4 spec) - mix_videoframe_set_frame_structure( - mix->video_frame, - VA_BOTTOM_FIELD | VA_TOP_FIELD); - - LOG_W("Interlaced content, set frame structure to 3 (TOP | BOTTOM field) !\n"); - } - else - { - mix_videoframe_set_frame_structure(mix->video_frame, VA_FRAME_PICTURE); - } - - //Set the discontinuity flag - mix_videoframe_set_discontinuity( - mix->video_frame, - mix->discontinuity_frame_in_progress); - - //Set the timestamp - mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(mix->video_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting surface ID from frame object\n"); - goto cleanup; - } - - /* If I or P frame, update the reference array */ - if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) - { - LOG_V("Updating forward/backward references for libva\n"); - - self->last_vop_coding_type = frame_type; - self->last_vop_time_increment = pic_data->vop_time_increment; - mix_videofmt_mp42_handle_ref_frames(mix, frame_type, mix->video_frame); - if (self->last_frame != NULL) - { - mix_videoframe_unref(self->last_frame); - } - self->last_frame = mix->video_frame; - mix_videoframe_ref(self->last_frame); - } - - //Now we can begin the picture - vret = vaBeginPicture(mix->va_display, mix->va_context, surface); - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto cleanup; - } - - // vaBeginPicture needs a matching vaEndPicture - mix->end_picture_pending = TRUE; - self->iq_matrix_buf_sent = FALSE; - } - - - ret = mix_videofmt_mp42_decode_a_slice(mix, data, pic_data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "mix_videofmt_mp42_decode_a_slice failed, error = %#X.", ret); - goto cleanup; - } - } - -cleanup: - // nothing to cleanup; - - return ret; -} - - -MIX_RESULT mix_videofmt_mp42_decode_begin( - MixVideoFormat *mix, - vbp_data_mp42* data) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - gint frame_type = -1; - VAPictureParameterBufferMPEG4* pic_params = NULL; - MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - vbp_picture_data_mp42 *pic_data = NULL; - - pic_data = data->picture_data; - pic_params = &(pic_data->picture_param); - frame_type = pic_params->vop_fields.bits.vop_coding_type; - - if (self->next_nvop_for_PB_frame) - { - // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type - // of this frame must be B. - // for example: {PB} B N P B B P... - if (pic_data->vop_coded == 1 && frame_type != MP4_VOP_TYPE_B) - { - LOG_E("Invalid coding type while waiting for n-vop for packed frame.\n"); - // timestamp of P frame in the queue is not correct - mix_framemanager_flush(mix->framemgr); - self->next_nvop_for_PB_frame = FALSE; - } - } - - if (pic_data->vop_coded == 0) - { - if (self->last_frame == NULL) - { - LOG_E("The forward reference frame is NULL, couldn't reconstruct skipped frame.\n"); - mix_framemanager_flush(mix->framemgr); - self->next_nvop_for_PB_frame = FALSE; - return MIX_RESULT_DROPFRAME; - } - - if (self->next_nvop_for_PB_frame) - { - // P frame is already in queue, just need to update time stamp. - mix_videoframe_set_timestamp(self->last_frame, mix->current_timestamp); - self->next_nvop_for_PB_frame = FALSE; - } - else - { - // handle skipped frame - MixVideoFrame *skip_frame = NULL; - gulong frame_id = VA_INVALID_SURFACE; - - skip_frame = mix_videoframe_new(); - ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); - ret = mix_videoframe_get_frame_id(self->last_frame, &frame_id); - ret = mix_videoframe_set_frame_id(skip_frame, frame_id); - ret = mix_videoframe_set_frame_type(skip_frame, MP4_VOP_TYPE_P); - ret = mix_videoframe_set_real_frame(skip_frame, self->last_frame); - // add a reference as skip_frame holds the last_frame. - mix_videoframe_ref(self->last_frame); - ret = mix_videoframe_set_timestamp(skip_frame, mix->current_timestamp); - ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); - - LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", - (guint)skip_frame, (guint)frame_id, mix->current_timestamp); - - /* Enqueue the skipped frame using frame manager */ - ret = mix_framemanager_enqueue(mix->framemgr, skip_frame); - } - - if (data->number_picture_data > 1) - { - LOG_E("Unexpected to have more picture data following a not-coded VOP.\n"); - //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for - // coded picture, a frame is lost. - } - return MIX_RESULT_SUCCESS; - } - else - { - /* - * Check for B frames after a seek - * We need to have both reference frames in hand before we can decode a B frame - * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME - */ - if (frame_type == MP4_VOP_TYPE_B) - { - if (self->reference_frames[0] == NULL || - self->reference_frames[1] == NULL) - { - LOG_W("Insufficient reference frames for B frame\n"); - return MIX_RESULT_DROPFRAME; - } - } - else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S) - { - if (self->reference_frames[0] == NULL) - { - LOG_W("Reference frames for P/S frame is missing\n"); - return MIX_RESULT_DROPFRAME; - } - } - - // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue - ret = mix_videofmt_mp42_decode_continue(mix, data); - } - - return ret; - -} - - -MIX_RESULT mix_videofmt_mp42_decode_a_buffer( - MixVideoFormat *mix, - MixBuffer * bufin, - guint64 ts, - gboolean discontinuity) -{ - uint32 pret = 0; - MixVideoFormat *parent = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_mp42 *data = NULL; - - LOG_V( "Begin\n"); - - parent = MIX_VIDEOFORMAT(mix); - - pret = vbp_parse(parent->parser_handle, - bufin->data, - bufin->size, - FALSE); - - if (pret != VBP_OK) - { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "vbp_parse failed.\n"); - goto cleanup; - } - else - { - LOG_V("vbp_parse succeeded.\n"); - } - - //query for data - pret = vbp_query(parent->parser_handle, (void *) &data); - - if ((pret != VBP_OK) || (data == NULL)) - { - // never happen! - ret = MIX_RESULT_FAIL; - LOG_E( "vbp_query failed.\n"); - goto cleanup; - } - else - { - LOG_V("vbp_query succeeded.\n"); - } - - if (parent->va_initialized == FALSE) - { - mix_videofmt_mp42_update_config_params(parent, data); - - LOG_V("try initializing VA...\n"); - ret = mix_videofmt_mp42_initialize_va(parent, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_mp42_initialize_va failed.\n"); - goto cleanup; - } - } - - // check if any slice is parsed, we may just receive configuration data - if (data->number_picture_data == 0) - { - ret = MIX_RESULT_SUCCESS; - LOG_V("slice is not available.\n"); - goto cleanup; - } - - guint64 last_ts = parent->current_timestamp; - parent->current_timestamp = ts; - parent->discontinuity_frame_in_progress = discontinuity; - - if (last_ts != ts) - { - // finish decoding the last frame - ret = mix_videofmt_mp42_decode_end(parent, FALSE); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_mp42_decode_end failed.\n"); - goto cleanup; - } - - // start decoding a new frame - ret = mix_videofmt_mp42_decode_begin(parent, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_mp42_decode_begin failed.\n"); - goto cleanup; - } - } - else - { - ret = mix_videofmt_mp42_decode_continue(parent, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_mp42_decode_continue failed.\n"); - goto cleanup; - } - } - -cleanup: - - LOG_V( "End\n"); - - return ret; -} - - - -MIX_RESULT mix_videofmt_mp42_initialize( - MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) -{ - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_MPEG4; - vbp_data_mp42 *data = NULL; - MixVideoFormat *parent = NULL; - MixIOVec *header = NULL; - - if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - /* Chainup parent method. */ - - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error initializing\n"); - return ret; - } - - if (!MIX_IS_VIDEOFORMAT_MP42(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMAT(mix); - //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - g_mutex_lock(parent->objectlock); - - parent->surfacepool = mix_surfacepool_new(); - *surface_pool = parent->surfacepool; - - if (parent->surfacepool == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "parent->surfacepool == NULL.\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &parent->extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto cleanup; - } - - //Load the bitstream parser - pret = vbp_open(ptype, &(parent->parser_handle)); - - if (!(pret == VBP_OK)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto cleanup; - } - LOG_V( "Opened parser\n"); - - - ret = mix_videoconfigparamsdec_get_header(config_params, &header); - - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) - { - // Delay initializing VA if codec configuration data is not ready, but don't return an error. - ret = MIX_RESULT_SUCCESS; - LOG_W( "Codec data is not available in the configuration parameter.\n"); - goto cleanup; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); - - pret = vbp_parse(parent->parser_handle, header->data, - header->data_size, TRUE); - - if (pret != VBP_OK) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data\n"); - goto cleanup; - } - - LOG_V( "Parsed header\n"); - - //Get the header data and save - pret = vbp_query(parent->parser_handle, (void *)&data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto cleanup; - } - - LOG_V( "Queried parser for header data\n"); - - mix_videofmt_mp42_update_config_params(mix, data); - - ret = mix_videofmt_mp42_initialize_va(mix, data); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error initializing va. \n"); - goto cleanup; - } - - -cleanup: - if (ret != MIX_RESULT_SUCCESS) { - if (parent->parser_handle) - { - pret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - } - parent->initialized = FALSE; - - } else { - parent->initialized = TRUE; - } - - if (header != NULL) - { - if (header->data != NULL) - g_free(header->data); - g_free(header); - header = NULL; - } - - - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); - - - return ret; -} - - -MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params) { - - int i = 0; - MixVideoFormat *parent = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - guint64 ts = 0; - gboolean discontinuity = FALSE; - - LOG_V( "Begin\n"); - - if (mix == NULL || bufin == NULL || decode_params == NULL ) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ - -#if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, decode_params); - } -#endif - - if (!MIX_IS_VIDEOFORMAT_MP42(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMAT(mix); - - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) - { - // never happen - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) - { - // never happen - return MIX_RESULT_FAIL; - } - - //From now on, we exit this function through cleanup: - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts); - - for (i = 0; i < bufincnt; i++) - { - // decode a buffer at a time - ret = mix_videofmt_mp42_decode_a_buffer( - mix, - bufin[i], - ts, - discontinuity); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n"); - goto cleanup; - } - } - - -cleanup: - - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - LOG_V("Begin\n"); - - g_mutex_lock(mix->objectlock); - - // drop any decode-pending picture, and ignore return value - mix_videofmt_mp42_decode_end(mix, TRUE); - - /* - * Clear parse_in_progress flag and current timestamp - */ - mix->parse_in_progress = FALSE; - mix->discontinuity_frame_in_progress = FALSE; - mix->current_timestamp = (guint64)-1; - self->next_nvop_for_PB_frame = FALSE; - - gint idx = 0; - for (idx = 0; idx < 2; idx++) { - if (self->reference_frames[idx] != NULL) { - mix_videoframe_unref(self->reference_frames[idx]); - self->reference_frames[idx] = NULL; - } - } - if (self->last_frame) - { - mix_videoframe_unref(self->last_frame); - self->last_frame = NULL; - } - - - /* Call parser flush */ - vbp_flush(mix->parser_handle); - - g_mutex_unlock(mix->objectlock); - - LOG_V("End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V("Begin\n"); - - if (mix == NULL) { - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { - return MIX_RESULT_INVALID_PARAM; - } - - g_mutex_lock(mix->objectlock); - - mix_videofmt_mp42_decode_end(mix, FALSE); - - ret = mix_framemanager_eos(mix->framemgr); - - g_mutex_unlock(mix->objectlock); - - LOG_V("End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_mp42_deinitialize(MixVideoFormat *mix) { - - /* - * We do the all the cleanup in _finalize - */ - - MIX_RESULT ret = MIX_RESULT_FAIL; - - LOG_V("Begin\n"); - - if (mix == NULL) { - LOG_V("mix is NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMAT_MP42(mix)) { - LOG_V("mix is not mixvideoformat_mp42\n"); - return MIX_RESULT_INVALID_PARAM; - } - - if (parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } - - LOG_V("End\n"); - return ret; -} - -MIX_RESULT mix_videofmt_mp42_handle_ref_frames(MixVideoFormat *mix, - enum _picture_type frame_type, MixVideoFrame * current_frame) { - - MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix); - - LOG_V("Begin\n"); - - if (mix == NULL || current_frame == NULL) { - return MIX_RESULT_NULL_PTR; - } - - switch (frame_type) { - case MP4_VOP_TYPE_I: - case MP4_VOP_TYPE_P: - LOG_V("Refing reference frame %x\n", (guint) current_frame); - - mix_videoframe_ref(current_frame); - - /* should only happen on first frame */ - if (self->reference_frames[0] == NULL) { - self->reference_frames[0] = current_frame; - /* should only happen on second frame */ - } else if (self->reference_frames[1] == NULL) { - self->reference_frames[1] = current_frame; - } else { - LOG_V("Releasing reference frame %x\n", - (guint) self->reference_frames[0]); - mix_videoframe_unref(self->reference_frames[0]); - self->reference_frames[0] = self->reference_frames[1]; - self->reference_frames[1] = current_frame; - } - break; - case MP4_VOP_TYPE_B: - case MP4_VOP_TYPE_S: - default: - break; - - } - - LOG_V("End\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix, - guint64 timestamp) { - - // not used, to be removed - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp new file mode 100644 index 0000000..29d460e --- /dev/null +++ b/mix_video/src/mixvideoformat_mp42.cpp @@ -0,0 +1,1064 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include "mixvideolog.h" +#include "mixvideoformat_mp42.h" + +// Value of VOP type defined here follows MP4 spec, and has the same value of corresponding frame type +// defined in enumeration MixFrameType (except sprite (S)) +enum { + MP4_VOP_TYPE_I = 0, + MP4_VOP_TYPE_P = 1, + MP4_VOP_TYPE_B = 2, + MP4_VOP_TYPE_S = 3, +}; + +MixVideoFormat_MP42::MixVideoFormat_MP42() + :last_frame(NULL) + ,last_vop_coding_type(-1) + ,last_vop_time_increment(0) + ,next_nvop_for_PB_frame(FALSE) + ,iq_matrix_buf_sent(FALSE) { + this->reference_frames[0] = NULL; + this->reference_frames[1] = NULL; +} + +MixVideoFormat_MP42::~MixVideoFormat_MP42() { + /* clean up here. */ + gint32 vbp_ret = VBP_OK; + LOG_V("Begin\n"); + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + + Lock(); + + /* unref reference frames */ + for (uint32 idx = 0; idx < 2; idx++) { + if (this->reference_frames[idx] != NULL) { + mix_videoframe_unref(this->reference_frames[idx]); + this->reference_frames[idx] = NULL; + } + } + if (this->last_frame) { + mix_videoframe_unref(this->last_frame); + this->last_frame = NULL; + } + this->next_nvop_for_PB_frame = FALSE; + this->iq_matrix_buf_sent = FALSE; + + /* Reset state */ + this->initialized = TRUE; + this->end_picture_pending = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (guint64)-1; + + /* Close the parser */ + if (this->parser_handle) { + vbp_ret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + } + + Unlock(); + LOG_V("End\n"); +} + + +MixVideoFormat_MP42 *mix_videoformat_mp42_new(void) { + return new MixVideoFormat_MP42; +} + +MixVideoFormat_MP42 * mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +MixVideoFormat_MP42 *mix_videoformat_mp42_unref(MixVideoFormat_MP42 * mix){ + if (NULL != mix) + return MIX_VIDEOFORMAT_MP42(mix->Unref()); + else + return mix; +} + +MIX_RESULT MixVideoFormat_MP42::_update_config_params( + vbp_data_mp42 *data) { + if (this->picture_width == 0 || + this->picture_height == 0 || + this->picture_width < data->codec_data.video_object_layer_width || + this->picture_height < data->codec_data.video_object_layer_height) { + this->picture_width = data->codec_data.video_object_layer_width; + this->picture_height = data->codec_data.video_object_layer_height; + mix_videoconfigparamsdec_set_picture_res( + this->config_params, this->picture_width, this->picture_height); + } + // video_range has default value of 0. Y ranges from 16 to 235. + mix_videoconfigparamsdec_set_video_range(this->config_params, data->codec_data.video_range); + uint8 color_matrix; + switch (data->codec_data.matrix_coefficients) { + case 1: + color_matrix = VA_SRC_BT709; + break; + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + color_matrix = VA_SRC_BT601; + break; + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); + + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + this->config_params, data->codec_data.par_width, data->codec_data.par_height); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoFormat_MP42::_initialize_va(vbp_data_mp42 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VAConfigAttrib attrib; + VAProfile va_profile; + LOG_V( "Begin\n"); + if (this->va_initialized) { + LOG_W("va already initialized.\n"); + return MIX_RESULT_SUCCESS; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + //Initialize and save the VA config ID + if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) { + va_profile = VAProfileMPEG4AdvancedSimple; + } else { + va_profile = VAProfileMPEG4Simple; + } + vret = vaCreateConfig( + this->va_display, + va_profile, + VAEntrypointVLD, + &attrib, + 1, + &(this->va_config)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("vaCreateConfig failed\n"); + goto CLEAN_UP; + } + + // add 1 more surface for packed frame (PB frame), and another one + // for partial frame handling + this->va_num_surfaces = this->extra_surfaces + 4 + 1 + 1; + //if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) + // parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; + + this->va_surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); + if (this->va_surfaces == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E( "parent->va_surfaces == NULL. \n"); + goto CLEAN_UP; + } + + vret = vaCreateSurfaces( + this->va_display, + this->picture_width, + this->picture_height, + VA_RT_FORMAT_YUV420, + this->va_num_surfaces, + this->va_surfaces); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto CLEAN_UP; + } + + LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); + + //Initialize the surface pool + ret = mix_surfacepool_initialize( + this->surfacepool, + this->va_surfaces, + this->va_num_surfaces, + this->va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init surface pool\n"); + goto CLEAN_UP; + break; + } + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext( + this->va_display, + this->va_config, + this->picture_width, + this->picture_height, + 0, + this->va_surfaces, + this->va_num_surfaces, + &(this->va_context)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto CLEAN_UP; + } + this->va_initialized = TRUE; + +CLEAN_UP: + return ret; +} + +MIX_RESULT MixVideoFormat_MP42::_decode_a_slice( + vbp_data_mp42* data, vbp_picture_data_mp42* pic_data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + guint buffer_id_cnt = 0; + gint frame_type = -1; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID buffer_ids[4]; + VAPictureParameterBufferMPEG4* pic_params = &(pic_data->picture_param); + vbp_slice_data_mp42* slice_data = &(pic_data->slice_data); + VASliceParameterBufferMPEG4* slice_params = &(slice_data->slice_param); + + LOG_V( "Begin\n"); + + vadisplay = this->va_display; + vacontext = this->va_context; + + if (!this->end_picture_pending) { + LOG_E("picture decoder is not started!\n"); + ret = MIX_RESULT_FAIL; + goto CLEAN_UP; + } + + // update reference pictures + frame_type = pic_params->vop_fields.bits.vop_coding_type; + + switch (frame_type) { + case MP4_VOP_TYPE_I: + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + break; + + case MP4_VOP_TYPE_P: + pic_params-> forward_reference_picture + = this->reference_frames[0]->frame_id; + pic_params-> backward_reference_picture = VA_INVALID_SURFACE; + break; + + case MP4_VOP_TYPE_B: + pic_params->vop_fields.bits.backward_reference_vop_coding_type + = this->last_vop_coding_type; + pic_params->forward_reference_picture + = this->reference_frames[1]->frame_id; + pic_params->backward_reference_picture + = this->reference_frames[0]->frame_id; + break; + + case MP4_VOP_TYPE_S: + pic_params-> forward_reference_picture + = this->reference_frames[0]->frame_id; + pic_params-> backward_reference_picture = VA_INVALID_SURFACE; + break; + + default: + LOG_W("default, Will never reach here\n"); + ret = MIX_RESULT_FAIL; + goto CLEAN_UP; + break; + } + + //Now for slices + + LOG_V( "Creating libva picture parameter buffer\n"); + + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferMPEG4), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + buffer_id_cnt++; + + if (pic_params->vol_fields.bits.quant_type && + this->iq_matrix_buf_sent == FALSE) { + LOG_V( "Creating libva IQMatrix buffer\n"); + // only send IQ matrix for the first slice in the picture + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferMPEG4), + 1, + &(data->iq_matrix_buffer), + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + this->iq_matrix_buf_sent = TRUE; + buffer_id_cnt++; + } + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferMPEG4), + 1, + slice_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + buffer_id_cnt++; + + + //Do slice data + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferMP42 + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + slice_data->slice_size, //size + 1, //num_elements + slice_data->buffer_addr + slice_data->slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto CLEAN_UP; + } + +CLEAN_UP: + LOG_V( "End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_MP42::_decode_end(gboolean drop_picture) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + + if (!this->end_picture_pending) { + if (this->video_frame) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame is not unreferenced.\n"); + } + goto CLEAN_UP; + } + + if (this->video_frame == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame has been unreferenced.\n"); + goto CLEAN_UP; + } + + vret = vaEndPicture(this->va_display, this->va_context); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto CLEAN_UP; + } + +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ + + LOG_V( "Calling vaSyncSurface\n"); + + //Decode the picture + vret = vaSyncSurface(vadisplay, surface); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + CLEAN_UP; + } +#endif + + if (drop_picture) { + // we are asked to drop this decoded picture + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + goto CLEAN_UP; + } + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error enqueuing frame object\n"); + goto CLEAN_UP; + } else { + // video frame is passed to frame manager + this->video_frame = NULL; + } + +CLEAN_UP: + if (this->video_frame) { + /* this always indicates an error */ + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + } + this->end_picture_pending = FALSE; + return ret; +} + +MIX_RESULT MixVideoFormat_MP42::_decode_continue(vbp_data_mp42 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + uint32 i; + gint frame_type = -1; + vbp_picture_data_mp42* pic_data = NULL; + VAPictureParameterBufferMPEG4* pic_params = NULL; + /* + Packed Frame Assumption: + + 1. In one packed frame, there's only one P or I frame and only one B frame. + 2. In packed frame, there's no skipped frame (vop_coded = 0) + 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately). + 4. N-VOP frame is the frame with vop_coded = 0. + 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame + + + I, P, {P, B}, B, N, P, N, I, ... + I, P, {P, B}, N, P, N, I, ... + + The first N is placeholder for P frame in the packed frame + The second N is a skipped frame + */ + + pic_data = data->picture_data; + for (i = 0; i < data->number_picture_data; i++, pic_data = pic_data->next_picture_data) { + pic_params = &(pic_data->picture_param); + frame_type = pic_params->vop_fields.bits.vop_coding_type; + if (frame_type == MP4_VOP_TYPE_S && + pic_params->no_of_sprite_warping_points > 1) { + // hardware only support up to one warping point (stationary or translation) + LOG_E("sprite with %d warping points is not supported by HW.\n", + pic_params->no_of_sprite_warping_points); + return MIX_RESULT_DROPFRAME; + } + + if (pic_data->vop_coded == 0) { + // this should never happen + LOG_E("VOP is not coded.\n"); + return MIX_RESULT_DROPFRAME; + } + + if (pic_data->new_picture_flag == 1 || + this->end_picture_pending == FALSE) { + if (pic_data->new_picture_flag == 0) { + LOG_W("First slice of picture is lost!\n"); + } + + gulong surface = 0; + if (this->end_picture_pending) + { + // this indicates the start of a new frame in the packed frame + LOG_V("packed frame is found.\n"); + + // Update timestamp for packed frame as timestamp is for the B frame! + if (this->video_frame && pic_params->vop_time_increment_resolution) { + guint64 ts, ts_inc; + mix_videoframe_get_timestamp(this->video_frame, &ts); + ts_inc= this->last_vop_time_increment - pic_data->vop_time_increment + + pic_params->vop_time_increment_resolution; + ts_inc = ts_inc % pic_params->vop_time_increment_resolution; + LOG_V("timestamp is incremented by %d at %d resolution.\n", + ts_inc, pic_params->vop_time_increment_resolution); + // convert to nano-second + ts_inc = ts_inc * 1e9 / pic_params->vop_time_increment_resolution; + LOG_V("timestamp of P frame in packed frame is updated from %"G_GINT64_FORMAT" to %"G_GUINT64_FORMAT".\n", + ts, ts + ts_inc); + ts += ts_inc; + mix_videoframe_set_timestamp(this->video_frame, ts); + } + + _decode_end(FALSE); + this->next_nvop_for_PB_frame = TRUE; + } + + if (this->next_nvop_for_PB_frame == TRUE && + frame_type != MP4_VOP_TYPE_B) { + LOG_E("The second frame in the packed frame is not B frame.\n"); + this->next_nvop_for_PB_frame = FALSE; + return MIX_RESULT_DROPFRAME; + } + + //Get a frame from the surface pool + ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame from surfacepool\n"); + return MIX_RESULT_FAIL; + } + + /* the following calls will always succeed */ + + // set frame type + if (frame_type == MP4_VOP_TYPE_S) { + // sprite is treated as P frame in the display order + mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)MP4_VOP_TYPE_P); + } else { + mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)frame_type); + } + + // set frame structure + if (pic_data->picture_param.vol_fields.bits.interlaced) { + // only MPEG-4 studio profile can have field coding. All other profiles + // use frame coding only, i.e, no field VOP. (see vop_structure in MP4 spec) + mix_videoframe_set_frame_structure( + this->video_frame, + VA_BOTTOM_FIELD | VA_TOP_FIELD); + LOG_W("Interlaced content, set frame structure to 3 (TOP | BOTTOM field) !\n"); + } else { + mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); + } + + //Set the discontinuity flag + mix_videoframe_set_discontinuity( + this->video_frame, + this->discontinuity_frame_in_progress); + + //Set the timestamp + mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(this->video_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting surface ID from frame object\n"); + goto CLEAN_UP; + } + + /* If I or P frame, update the reference array */ + if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { + LOG_V("Updating forward/backward references for libva\n"); + this->last_vop_coding_type = frame_type; + this->last_vop_time_increment = pic_data->vop_time_increment; + _handle_ref_frames((_picture_type)frame_type, this->video_frame); + if (this->last_frame != NULL) { + mix_videoframe_unref(this->last_frame); + } + this->last_frame = this->video_frame; + mix_videoframe_ref(this->last_frame); + } + + //Now we can begin the picture + vret = vaBeginPicture(this->va_display, this->va_context, surface); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto CLEAN_UP; + } + + // vaBeginPicture needs a matching vaEndPicture + this->end_picture_pending = TRUE; + this->iq_matrix_buf_sent = FALSE; + } + + + ret = _decode_a_slice(data, pic_data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "_decode_a_slice failed, error = %#X.", ret); + goto CLEAN_UP; + } + } + +CLEAN_UP: + return ret; +} + + + +MIX_RESULT MixVideoFormat_MP42::_decode_begin(vbp_data_mp42* data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + gint frame_type = -1; + VAPictureParameterBufferMPEG4* pic_params = NULL; + vbp_picture_data_mp42 *pic_data = NULL; + pic_data = data->picture_data; + pic_params = &(pic_data->picture_param); + frame_type = pic_params->vop_fields.bits.vop_coding_type; + + if (this->next_nvop_for_PB_frame) { + // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type + // of this frame must be B. + // for example: {PB} B N P B B P... + if (pic_data->vop_coded == 1 && frame_type != MP4_VOP_TYPE_B) { + LOG_E("Invalid coding type while waiting for n-vop for packed frame.\n"); + // timestamp of P frame in the queue is not correct + mix_framemanager_flush(this->framemgr); + this->next_nvop_for_PB_frame = FALSE; + } + } + + if (pic_data->vop_coded == 0) { + if (this->last_frame == NULL) { + LOG_E("The forward reference frame is NULL, couldn't reconstruct skipped frame.\n"); + mix_framemanager_flush(this->framemgr); + this->next_nvop_for_PB_frame = FALSE; + return MIX_RESULT_DROPFRAME; + } + + if (this->next_nvop_for_PB_frame) { + // P frame is already in queue, just need to update time stamp. + mix_videoframe_set_timestamp(this->last_frame, this->current_timestamp); + this->next_nvop_for_PB_frame = FALSE; + } else { + // handle skipped frame + MixVideoFrame *skip_frame = NULL; + gulong frame_id = VA_INVALID_SURFACE; + + skip_frame = mix_videoframe_new(); + ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); + ret = mix_videoframe_get_frame_id(this->last_frame, &frame_id); + ret = mix_videoframe_set_frame_id(skip_frame, frame_id); + ret = mix_videoframe_set_frame_type(skip_frame, (MixFrameType)MP4_VOP_TYPE_P); + ret = mix_videoframe_set_real_frame(skip_frame, this->last_frame); + // add a reference as skip_frame holds the last_frame. + mix_videoframe_ref(this->last_frame); + ret = mix_videoframe_set_timestamp(skip_frame, this->current_timestamp); + ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); + + LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", + (guint)skip_frame, (guint)frame_id, this->current_timestamp); + + /* Enqueue the skipped frame using frame manager */ + ret = mix_framemanager_enqueue(this->framemgr, skip_frame); + } + + if (data->number_picture_data > 1) { + LOG_E("Unexpected to have more picture data following a not-coded VOP.\n"); + //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for + // coded picture, a frame is lost. + } + return MIX_RESULT_SUCCESS; + } else { + /* + * Check for B frames after a seek + * We need to have both reference frames in hand before we can decode a B frame + * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME + */ + if (frame_type == MP4_VOP_TYPE_B) { + if (this->reference_frames[0] == NULL || + this->reference_frames[1] == NULL) { + LOG_W("Insufficient reference frames for B frame\n"); + return MIX_RESULT_DROPFRAME; + } + } else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S) { + if (this->reference_frames[0] == NULL) { + LOG_W("Reference frames for P/S frame is missing\n"); + return MIX_RESULT_DROPFRAME; + } + } + // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue + ret = _decode_continue(data); + } + return ret; +} + +MIX_RESULT MixVideoFormat_MP42::_decode_a_buffer( + MixBuffer * bufin, guint64 ts, gboolean discontinuity) { + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_mp42 *data = NULL; + guint64 last_ts = 0; + + LOG_V( "Begin\n"); + pret = vbp_parse(this->parser_handle, + bufin->data, + bufin->size, + FALSE); + + if (pret != VBP_OK) { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "vbp_parse failed.\n"); + goto CLEAN_UP; + } + else { + LOG_V("vbp_parse succeeded.\n"); + } + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + + if ((pret != VBP_OK) || (data == NULL)) { + // never happen! + ret = MIX_RESULT_FAIL; + LOG_E( "vbp_query failed.\n"); + goto CLEAN_UP; + } else { + LOG_V("vbp_query succeeded.\n"); + } + + if (this->va_initialized == FALSE) { + _update_config_params(data); + LOG_V("try initializing VA...\n"); + ret = _initialize_va(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_initialize_va failed.\n"); + goto CLEAN_UP; + } + } + + // check if any slice is parsed, we may just receive configuration data + if (data->number_picture_data == 0) { + ret = MIX_RESULT_SUCCESS; + LOG_V("slice is not available.\n"); + goto CLEAN_UP; + } + + last_ts = this->current_timestamp; + this->current_timestamp = ts; + this->discontinuity_frame_in_progress = discontinuity; + + if (last_ts != ts) { + // finish decoding the last frame + ret = _decode_end(FALSE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_decode_end failed.\n"); + goto CLEAN_UP; + } + + // start decoding a new frame + ret = _decode_begin(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_decode_begin failed.\n"); + goto CLEAN_UP; + } + } else { + ret = _decode_continue(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_decode_continue failed.\n"); + goto CLEAN_UP; + } + } + +CLEAN_UP: + LOG_V( "End\n"); + return ret; +} + + +MIX_RESULT MixVideoFormat_MP42::Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_MPEG4; + vbp_data_mp42 *data = NULL; + MixIOVec *header = NULL; + + if (config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + // chain up parent method + MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, + surface_pool, va_display); + + if (ret != MIX_RESULT_SUCCESS){ + LOG_E( "Error initializing\n"); + return ret; + } + + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + Lock(); + + this->surfacepool = mix_surfacepool_new(); + *surface_pool = this->surfacepool; + + if (this->surfacepool == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "parent->surfacepool == NULL.\n"); + goto CLEAN_UP; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &this->extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto CLEAN_UP; + } + + //Load the bitstream parser + pret = vbp_open(ptype, &(this->parser_handle)); + + if (!(pret == VBP_OK)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto CLEAN_UP; + } + LOG_V( "Opened parser\n"); + + + ret = mix_videoconfigparamsdec_get_header(config_params, &header); + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { + // Delay initializing VA if codec configuration data is not ready, but don't return an error. + ret = MIX_RESULT_SUCCESS; + LOG_W( "Codec data is not available in the configuration parameter.\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); + + pret = vbp_parse(this->parser_handle, header->data, + header->data_size, TRUE); + + if (pret != VBP_OK) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Parsed header\n"); + + //Get the header data and save + pret = vbp_query(this->parser_handle, (void **)&data); + + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Queried parser for header data\n"); + + _update_config_params(data); + + ret = _initialize_va(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error initializing va. \n"); + goto CLEAN_UP; + } + +CLEAN_UP: + if (ret != MIX_RESULT_SUCCESS) { + if (this->parser_handle) { + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + } + this->initialized = FALSE; + } else { + this->initialized = TRUE; + } + if (header != NULL) { + if (header->data != NULL) + g_free(header->data); + g_free(header); + header = NULL; + } + LOG_V( "Unlocking\n"); + Unlock(); + return ret; +} + +MIX_RESULT MixVideoFormat_MP42::Decode( + MixBuffer * bufin[], gint bufincnt, MixVideoDecodeParams * decode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + guint64 ts = 0; + gboolean discontinuity = FALSE; + + LOG_V( "Begin\n"); + + if (bufin == NULL || decode_params == NULL ) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + /* Chainup parent method. + We are not chaining up to parent method for now. + */ +#if 0 + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, decode_params); + } +#endif + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } + + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } + + //From now on, we exit this function through cleanup: + LOG_V( "Locking\n"); + Lock(); + + LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts); + + for (int i = 0; i < bufincnt; i++) { + LOG_V("decode buffer %d in total %d \n", i, bufincnt); + // decode a buffer at a time + ret = _decode_a_buffer(bufin[i], ts, discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n"); + break; + } + } + + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; +} + + +MIX_RESULT MixVideoFormat_MP42::Flush() { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("Begin\n"); + + Lock(); + // drop any decode-pending picture, and ignore return value + _decode_end(TRUE); + + /* + * Clear parse_in_progress flag and current timestamp + */ + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (guint64)-1; + this->next_nvop_for_PB_frame = FALSE; + + for (gint idx = 0; idx < 2; idx++) { + if (this->reference_frames[idx] != NULL) { + mix_videoframe_unref(this->reference_frames[idx]); + this->reference_frames[idx] = NULL; + } + } + if (this->last_frame) { + mix_videoframe_unref(this->last_frame); + this->last_frame = NULL; + } + + /* Call parser flush */ + vbp_flush(this->parser_handle); + Unlock(); + LOG_V("End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_MP42::EndOfStream() { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("Begin\n"); + Lock(); + _decode_end(FALSE); + ret = mix_framemanager_eos(this->framemgr); + Unlock(); + LOG_V("End\n"); + return ret; +} + + +MIX_RESULT MixVideoFormat_MP42::_handle_ref_frames( + enum _picture_type frame_type, MixVideoFrame * current_frame) { + LOG_V("Begin\n"); + if (current_frame == NULL) { + return MIX_RESULT_NULL_PTR; + } + switch (frame_type) { + case MP4_VOP_TYPE_I: + case MP4_VOP_TYPE_P: + LOG_V("Refing reference frame %x\n", (guint) current_frame); + mix_videoframe_ref(current_frame); + + /* should only happen on first frame */ + if (this->reference_frames[0] == NULL) { + this->reference_frames[0] = current_frame; + /* should only happen on second frame */ + } else if (this->reference_frames[1] == NULL) { + this->reference_frames[1] = current_frame; + } else { + LOG_V("Releasing reference frame %x\n", + (guint) this->reference_frames[0]); + mix_videoframe_unref(this->reference_frames[0]); + this->reference_frames[0] = this->reference_frames[1]; + this->reference_frames[1] = current_frame; + } + break; + case MP4_VOP_TYPE_B: + case MP4_VOP_TYPE_S: + default: + break; + + } + + LOG_V("End\n"); + + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h index fc80c95..31267c2 100644 --- a/mix_video/src/mixvideoformat_mp42.h +++ b/mix_video/src/mixvideoformat_mp42.h @@ -12,64 +12,60 @@ #include "mixvideoformat.h" #include "mixvideoframe_private.h" -G_BEGIN_DECLS - //Note: this is only a max limit. Real number of surfaces allocated is calculated in mix_videoformat_mp42_initialize() #define MIX_VIDEO_MP42_SURFACE_NUM 8 /* * Type macros. */ -#define MIX_TYPE_VIDEOFORMAT_MP42 (mix_videoformat_mp42_get_type ()) -#define MIX_VIDEOFORMAT_MP42(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42)) -#define MIX_IS_VIDEOFORMAT_MP42(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_MP42)) -#define MIX_VIDEOFORMAT_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42Class)) -#define MIX_IS_VIDEOFORMAT_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_MP42)) -#define MIX_VIDEOFORMAT_MP42_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42Class)) - -typedef struct _MixVideoFormat_MP42 MixVideoFormat_MP42; -typedef struct _MixVideoFormat_MP42Class MixVideoFormat_MP42Class; - -struct _MixVideoFormat_MP42 { - /*< public > */ - MixVideoFormat parent; - +#define MIX_VIDEOFORMAT_MP42(obj) (reinterpret_cast(obj)) +#define MIX_IS_VIDEOFORMAT_MP42(obj) (NULL != MIX_VIDEOFORMAT_MP42(obj)) + +class MixVideoFormat_MP42 : public MixVideoFormat { +public: + MixVideoFormat_MP42(); + virtual ~MixVideoFormat_MP42(); + + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); + +private: + MIX_RESULT _handle_ref_frames( + enum _picture_type frame_type, MixVideoFrame * current_frame); + MIX_RESULT _release_input_buffers(guint64 timestamp); + MIX_RESULT _update_config_params(vbp_data_mp42 *data); + MIX_RESULT _initialize_va(vbp_data_mp42 *data); + MIX_RESULT _decode_a_slice( + vbp_data_mp42* data, vbp_picture_data_mp42* pic_data); + MIX_RESULT _decode_end(gboolean drop_picture); + MIX_RESULT _decode_continue(vbp_data_mp42 *data); + MIX_RESULT _decode_begin(vbp_data_mp42* data); + MIX_RESULT _decode_a_buffer( + MixBuffer * bufin, guint64 ts, gboolean discontinuity); + +public: /*< public > */ - - /*< private > */ MixVideoFrame * reference_frames[2]; MixVideoFrame * last_frame; gint last_vop_coding_type; guint last_vop_time_increment; - /* indicate if future n-vop is a placeholder of a packed frame */ - gboolean next_nvop_for_PB_frame; - - /* indicate if iq_matrix_buffer is sent to driver */ - gboolean iq_matrix_buf_sent; -}; - -/** - * MixVideoFormat_MP42Class: - * - * MI-X Video object class - */ -struct _MixVideoFormat_MP42Class { - /*< public > */ - MixVideoFormatClass parent_class; + /* indicate if future n-vop is a placeholder of a packed frame */ + gboolean next_nvop_for_PB_frame; -/* class members */ - -/*< public > */ + /* indicate if iq_matrix_buffer is sent to driver */ + gboolean iq_matrix_buf_sent; }; -/** - * mix_videoformat_mp42_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformat_mp42_get_type(void); /** * mix_videoformat_mp42_new: @@ -94,33 +90,6 @@ MixVideoFormat_MP42 *mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix); * * Decrement reference count of the object. */ -#define mix_videoformat_mp42_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* MP42 vmethods */ -MIX_RESULT mix_videofmt_mp42_getcaps(MixVideoFormat *mix, GString *msg); -MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); -MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix); -MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix); -MIX_RESULT mix_videofmt_mp42_deinitialize(MixVideoFormat *mix); - -/* Local Methods */ - -MIX_RESULT mix_videofmt_mp42_handle_ref_frames(MixVideoFormat *mix, - enum _picture_type frame_type, MixVideoFrame * current_frame); - -MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix, - vbp_data_mp42 *data, guint64 timestamp, gboolean discontinuity); - -MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix, - guint64 timestamp); - -G_END_DECLS +MixVideoFormat_MP42 *mix_videoformat_mp42_unref(MixVideoFormat_MP42 * mix); #endif /* __MIX_VIDEOFORMAT_MP42_H__ */ diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c deleted file mode 100644 index 9f21a5a..0000000 --- a/mix_video/src/mixvideoformat_vc1.c +++ /dev/null @@ -1,1813 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include "mixvideolog.h" - -#include "mixvideoformat_vc1.h" -#ifndef ANDROID -#include -#endif - -#ifdef YUVDUMP -//TODO Complete YUVDUMP code and move into base class -#include -#endif /* YUVDUMP */ - -#include - - -#ifdef MIX_LOG_ENABLE -static int mix_video_vc1_counter = 0; -#endif - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatClass *parent_class = NULL; - -static void mix_videoformat_vc1_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT - */ -G_DEFINE_TYPE (MixVideoFormat_VC1, mix_videoformat_vc1, MIX_TYPE_VIDEOFORMAT); - -static void mix_videoformat_vc1_init(MixVideoFormat_VC1 * self) { - MixVideoFormat *parent = MIX_VIDEOFORMAT(self); - - /* public member initialization */ - /* These are all public because MixVideoFormat objects are completely internal to MixVideo, - no need for private members */ - self->reference_frames[0] = NULL; - self->reference_frames[1] = NULL; - - /* NOTE: we don't need to do this here. - * This just demostrates how to access - * member varibles beloned to parent - */ - parent->initialized = FALSE; -} - -static void mix_videoformat_vc1_class_init( - MixVideoFormat_VC1Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatClass *video_format_class = - MIX_VIDEOFORMAT_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformat_vc1_finalize; - - /* setup vmethods with base implementation */ - /* This is where we can override base class methods if needed */ - video_format_class->getcaps = mix_videofmt_vc1_getcaps; - video_format_class->initialize = mix_videofmt_vc1_initialize; - video_format_class->decode = mix_videofmt_vc1_decode; - video_format_class->flush = mix_videofmt_vc1_flush; - video_format_class->eos = mix_videofmt_vc1_eos; - video_format_class->deinitialize = mix_videofmt_vc1_deinitialize; -} - -MixVideoFormat_VC1 * -mix_videoformat_vc1_new(void) { - MixVideoFormat_VC1 *ret = - g_object_new(MIX_TYPE_VIDEOFORMAT_VC1, NULL); - - return ret; -} - -void mix_videoformat_vc1_finalize(GObject * obj) { - gint32 pret = VBP_OK; - - /* clean up here. */ - - MixVideoFormat *parent = NULL; - MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(obj); - GObjectClass *root_class = (GObjectClass *) parent_class; - - parent = MIX_VIDEOFORMAT(self); - - g_mutex_lock(parent->objectlock); - - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - - //Unref our reference frames - int i = 0; - for (; i < 2; i++) - { - if (self->reference_frames[i] != NULL) - { - mix_videoframe_unref(self->reference_frames[i]); - self->reference_frames[i] = NULL; - } - } - - //Reset state - parent->initialized = TRUE; - parent->parse_in_progress = FALSE; - parent->discontinuity_frame_in_progress = FALSE; - parent->current_timestamp = (guint64)-1; - - //Close the parser - pret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - if (pret != VBP_OK) - { - LOG_E( "Error closing parser\n"); - } - - g_mutex_unlock(parent->objectlock); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoFormat_VC1 * -mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix) { - return (MixVideoFormat_VC1 *) g_object_ref(G_OBJECT(mix)); -} - -/* VC1 vmethods implementation */ -MIX_RESULT mix_videofmt_vc1_getcaps(MixVideoFormat *mix, GString *msg) { - - MIX_RESULT ret = MIX_RESULT_NOTIMPL; - -//This method is reserved for future use - - if (mix == NULL || msg == NULL) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - /* Chainup parent method. - */ - - if (parent_class->getcaps) { - ret = parent_class->getcaps(mix, msg); - } - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_vc1_update_seq_header( - MixVideoConfigParamsDec* config_params, - MixIOVec *header) -{ - guint width = 0; - guint height = 0; - - guint i = 0; - guchar* p = NULL; - MIX_RESULT res = MIX_RESULT_SUCCESS; - - if (!config_params || !header) - { - LOG_E( "NUll pointer passed in\n"); - return (MIX_RESULT_NULL_PTR); - } - - p = header->data; - - res = mix_videoconfigparamsdec_get_picture_res( - config_params, - &width, - &height); - - if (MIX_RESULT_SUCCESS != res) - { - return res; - } - - /* Check for start codes. If one exist, then this is VC-1 and not WMV. */ - while (i < header->data_size - 2) - { - if ((p[i] == 0) && - (p[i + 1] == 0) && - (p[i + 2] == 1)) - { - return MIX_RESULT_SUCCESS; - } - i++; - } - - p = g_malloc0(header->data_size + 9); - - if (!p) - { - LOG_E( "Cannot allocate memory\n"); - return MIX_RESULT_NO_MEMORY; - } - - /* If we get here we have 4+ bytes of codec data that must be formatted */ - /* to pass through as an RCV sequence header. */ - p[0] = 0; - p[1] = 0; - p[2] = 1; - p[3] = 0x0f; /* Start code. */ - - p[4] = (width >> 8) & 0x0ff; - p[5] = width & 0x0ff; - p[6] = (height >> 8) & 0x0ff; - p[7] = height & 0x0ff; - - memcpy(p + 8, header->data, header->data_size); - *(p + header->data_size + 8) = 0x80; - - g_free(header->data); - header->data = p; - header->data_size = header->data_size + 9; - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videofmt_vc1_update_config_params( - MixVideoFormat *mix, - vbp_data_vc1 *data) -{ - MixVideoFormat *parent = MIX_VIDEOFORMAT(mix); - - if (parent->picture_width == 0 || parent->picture_height == 0) - { - parent->picture_width = data->se_data->CODED_WIDTH; - parent->picture_height = data->se_data->CODED_HEIGHT; - - mix_videoconfigparamsdec_set_picture_res( - mix->config_params, - parent->picture_width, - parent->picture_height); - } - - - // scaling has been performed on the decoded image. - mix_videoconfigparamsdec_set_video_range(mix->config_params, 1); - - uint8 color_matrix; - - switch (data->se_data->MATRIX_COEF) - { - case 1: - color_matrix = VA_SRC_BT709; - break; - - // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996. - case 6: - color_matrix = VA_SRC_BT601; - break; - - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix); - - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - mix->config_params, - data->se_data->ASPECT_HORIZ_SIZE, - data->se_data->ASPECT_VERT_SIZE); - - return MIX_RESULT_SUCCESS; - -} - - - -MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_VC1; - vbp_data_vc1 *data = NULL; - MixVideoFormat *parent = NULL; - MixVideoFormat_VC1 *self = NULL; - MixIOVec *header = NULL; - gint numprofs = 0, numactualprofs = 0; - gint numentrypts = 0, numactualentrypts = 0; - VADisplay vadisplay = NULL; - VAProfile *profiles = NULL; - VAEntrypoint *entrypts = NULL; - VAConfigAttrib attrib; - VAStatus vret = VA_STATUS_SUCCESS; - guint extra_surfaces = 0; - VASurfaceID *surfaces = NULL; - guint numSurfaces = 0; - - //TODO Partition this method into smaller methods - - if (mix == NULL || config_params == NULL || frame_mgr == NULL || !input_buf_pool || !surface_pool || !va_display) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - /* Chainup parent method. - */ - - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - if (!MIX_IS_VIDEOFORMAT_VC1(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMAT(mix); - self = MIX_VIDEOFORMAT_VC1(mix); - - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - g_mutex_lock(parent->objectlock); - - //Load the bitstream parser - pret = vbp_open(ptype, &(parent->parser_handle)); - - if (!(pret == VBP_OK)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto cleanup; - } - - LOG_V( "Opened parser\n"); - - ret = mix_videoconfigparamsdec_get_header(config_params, - &header); - - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get header data\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto cleanup; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle); - - ret = mix_videofmt_vc1_update_seq_header( - config_params, - header); - if (ret != MIX_RESULT_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error updating sequence header\n"); - goto cleanup; - } - - pret = vbp_parse(parent->parser_handle, header->data, - header->data_size, TRUE); - - if (!((pret == VBP_OK) || (pret == VBP_DONE))) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data, size %d\n", header->data_size); - goto cleanup; - } - - - LOG_V( "Parsed header\n"); - //Get the header data and save - pret = vbp_query(parent->parser_handle, (void *)&data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto cleanup; - } - - LOG_V( "Queried parser for header data\n"); - - mix_videofmt_vc1_update_config_params(parent, data); - - //Time for libva initialization - - vadisplay = parent->va_display; - - numprofs = vaMaxNumProfiles(vadisplay); - profiles = g_malloc(numprofs*sizeof(VAProfile)); - - if (!profiles) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto cleanup; - } - - vret = vaQueryConfigProfiles(vadisplay, profiles, - &numactualprofs); - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto cleanup; - } - - //check the desired profile support - gint vaprof = 0; - - VAProfile profile; - switch (data->se_data->PROFILE) - { - case 0: - profile = VAProfileVC1Simple; - break; - - case 1: - profile = VAProfileVC1Main; - break; - - default: - profile = VAProfileVC1Advanced; - break; - } - - for (; vaprof < numactualprofs; vaprof++) - { - if (profiles[vaprof] == profile) - break; - } - if (vaprof >= numprofs || profiles[vaprof] != profile) - //Did not get the profile we wanted - { - ret = MIX_RESULT_FAIL; - LOG_E( "Profile not supported by driver\n"); - goto cleanup; - } - - numentrypts = vaMaxNumEntrypoints(vadisplay); - entrypts = g_malloc(numentrypts*sizeof(VAEntrypoint)); - - if (!entrypts) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto cleanup; - } - - vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], - entrypts, &numactualentrypts); - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto cleanup; - } - - gint vaentrypt = 0; - for (; vaentrypt < numactualentrypts; vaentrypt++) - { - if (entrypts[vaentrypt] == VAEntrypointVLD) - break; - } - if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) - //Did not get the entrypt we wanted - { - ret = MIX_RESULT_FAIL; - LOG_E( "Entry point not supported by driver\n"); - goto cleanup; - } - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - - vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1); - - //TODO Handle other values returned for RT format - // and check with requested format provided in config params - //Right now only YUV 4:2:0 is supported by libva - // and this is our default - if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || - vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto cleanup; - } - - //Initialize and save the VA config ID - vret = vaCreateConfig(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1, &(parent->va_config)); - - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto cleanup; - } - - LOG_V( "Created libva config with profile %d\n", vaprof); - - //Check for loop filtering - if (data->se_data->LOOPFILTER == 1) - self->loopFilter = TRUE; - else - self->loopFilter = FALSE; - - LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); - - //Initialize the surface pool - - - if ((data->se_data->MAXBFRAMES > 0) || (data->se_data->PROFILE == 3) || (data->se_data->PROFILE == 1)) - //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof - self->haveBframes = TRUE; - else - self->haveBframes = FALSE; - - //Calculate VC1 numSurfaces based on max number of B frames or - // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less - - //Adding 1 to work around VBLANK issue - parent->va_num_surfaces = 1 + extra_surfaces + ((3 + (self->haveBframes ? 1 : 0) < - MIX_VIDEO_VC1_SURFACE_NUM) ? - (3 + (self->haveBframes ? 1 : 0)) - : MIX_VIDEO_VC1_SURFACE_NUM); - - numSurfaces = parent->va_num_surfaces; - - parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - - surfaces = parent->va_surfaces; - - if (surfaces == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot allocate temporary data\n"); - goto cleanup; - } - - vret = vaCreateSurfaces(vadisplay, parent->picture_width, - parent->picture_height, entrypts[vaentrypt], - numSurfaces, surfaces); - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto cleanup; - } - - parent->surfacepool = mix_surfacepool_new(); - *surface_pool = parent->surfacepool; - - if (parent->surfacepool == NULL) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing surface pool\n"); - goto cleanup; - } - - - ret = mix_surfacepool_initialize(parent->surfacepool, - surfaces, numSurfaces, vadisplay); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init failure\n"); - goto cleanup; - break; - } - - LOG_V( "Created %d libva surfaces, MAXBFRAMES is %d\n", numSurfaces, data->se_data->MAXBFRAMES); - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext(vadisplay, parent->va_config, - parent->picture_width, parent->picture_height, - 0, surfaces, numSurfaces, - &(parent->va_context)); - if (!(vret == VA_STATUS_SUCCESS)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto cleanup; - } - - LOG_V( "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height); - - LOG_V( "mix_video vinfo: Content type %s, %s\n", (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); - LOG_V( "mix_video vinfo: Content width %d, height %d\n", parent->picture_width, parent->picture_height); - LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", data->se_data->MAXBFRAMES); - LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", data->se_data->PROFILE, data->se_data->LEVEL); - - - cleanup: - if (ret != MIX_RESULT_SUCCESS) { - pret = vbp_close(parent->parser_handle); - parent->parser_handle = NULL; - parent->initialized = FALSE; - - } else { - parent->initialized = TRUE; - } - - if (header != NULL) - { - if (header->data != NULL) - g_free(header->data); - g_free(header); - header = NULL; - } - - g_free(profiles); - g_free(entrypts); - - self->lastFrame = NULL; - - - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_vc1_decode(MixVideoFormat *mix, - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params) { - - uint32 pret = 0; - int i = 0; - MixVideoFormat *parent = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - guint64 ts = 0; - vbp_data_vc1 *data = NULL; - gboolean discontinuity = FALSE; - MixInputBufferEntry *bufentry = NULL; - - if (mix == NULL || bufin == NULL || decode_params == NULL ) - { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - //TODO remove iovout and iovoutcnt; they are not used (need to remove from MixVideo/MI-X API too) - - LOG_V( "Begin\n"); - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ - -#if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, - decode_params); - } -#endif - - if (!MIX_IS_VIDEOFORMAT_VC1(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMAT(mix); - - - ret = mix_videodecodeparams_get_timestamp(decode_params, - &ts); - if (ret != MIX_RESULT_SUCCESS) - { - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, - &discontinuity); - if (ret != MIX_RESULT_SUCCESS) - { - return MIX_RESULT_FAIL; - } - - //From now on, we exit this function through cleanup: - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - //If this is a new frame and we haven't retrieved parser - // workload data from previous frame yet, do so - if ((ts != parent->current_timestamp) && - (parent->parse_in_progress)) - { - - //query for data - pret = vbp_query(parent->parser_handle, - (void *) &data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing parser\n"); - goto cleanup; - } - - LOG_V( "Queried for last frame data\n"); - - //process and decode data - ret = mix_videofmt_vc1_process_decode(mix, - data, parent->current_timestamp, - parent->discontinuity_frame_in_progress); - - if (ret != MIX_RESULT_SUCCESS) - { - //We log this but need to process the new frame data, so do not return - LOG_E( "process_decode failed.\n"); - } - - LOG_V( "Called process and decode for last frame\n"); - - parent->parse_in_progress = FALSE; - - } - - parent->current_timestamp = ts; - parent->discontinuity_frame_in_progress = discontinuity; - - LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_vc1_counter++, ts); - - for (i = 0; i < bufincnt; i++) - { - - LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size); - - pret = vbp_parse(parent->parser_handle, - bufin[i]->data, - bufin[i]->size, - FALSE); - - LOG_V( "Called parse for current frame\n"); - - if (pret == VBP_DONE) - { - //query for data - pret = vbp_query(parent->parser_handle, - (void *) &data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting parser data\n"); - goto cleanup; - } - - LOG_V( "Called query for current frame\n"); - - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = g_malloc(sizeof( - MixInputBufferEntry)); - if (bufentry == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); - goto cleanup; - } - - bufentry->buf = bufin[i]; - LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts); - bufentry->timestamp = ts; - - LOG_V( "Enqueue this input buffer for current frame\n"); - LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); - - //Enqueue this input buffer - g_queue_push_tail(parent->inputbufqueue, - (gpointer)bufentry); - - //process and decode data - ret = mix_videofmt_vc1_process_decode(mix, - data, ts, discontinuity); - - if (ret != MIX_RESULT_SUCCESS) - { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Process_decode failed.\n"); - } - - LOG_V( "Called process and decode for current frame\n"); - - parent->parse_in_progress = FALSE; - } - else if (pret != VBP_OK) - { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Parsing failed.\n"); - ret = MIX_RESULT_FAIL; - } - else - { - - LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); - - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = g_malloc(sizeof - (MixInputBufferEntry)); - if (bufentry == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); - goto cleanup; - } - bufentry->buf = bufin[i]; - bufentry->timestamp = ts; - - //Enqueue this input buffer - g_queue_push_tail(parent->inputbufqueue, - (gpointer)bufentry); - parent->parse_in_progress = TRUE; - } - - } - - - cleanup: - - LOG_V( "Unlocking\n"); - g_mutex_unlock(parent->objectlock); - - - LOG_V( "End\n"); - - return ret; -} - -#ifdef YUVDUMP -//TODO Complete this YUVDUMP code and move into base class - -MIX_RESULT GetImageFromSurface (MixVideoFormat *mix, MixVideoFrame * frame) - -{ - - VAStatus vaStatus = VA_STATUS_SUCCESS; - VAImageFormat va_image_format; - VAImage va_image; - - unsigned char* pBuffer; - unsigned int ui32SrcWidth = mix->picture_width; - unsigned int ui32SrcHeight = mix->picture_height; - unsigned int ui32Stride; - unsigned int ui32ChromaOffset; - FILE *fp = NULL; - int r = 0; - - int i; - - g_print ("GetImageFromSurface \n"); - - if ((mix == NULL) || (frame == NULL)) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - fp = fopen("yuvdump.yuv", "a+"); - - static int have_va_image = 0; - - if (!have_va_image) - { - va_image_format.fourcc = VA_FOURCC_NV12; -// va_image_format.fourcc = VA_FOURCC_YV12; - - vaStatus = vaCreateImage(mix->va_display, &va_image_format, ui32SrcWidth, ui32SrcHeight, &va_image); - have_va_image = 1; - } - - vaStatus = vaGetImage( mix->va_display, frame->frame_id, 0, 0, ui32SrcWidth, ui32SrcHeight, va_image.image_id ); - vaStatus = vaMapBuffer( mix->va_display, va_image.buf, (void **) &pBuffer); - ui32ChromaOffset = va_image.offsets[1]; - ui32Stride = va_image.pitches[0]; - - if (VA_STATUS_SUCCESS != vaStatus) - { - g_print ("VideoProcessBlt: Unable to copy surface\n\r"); - return vaStatus; - } - - { - g_print ("before copy memory....\n"); - g_print ("width = %d, height = %d\n", ui32SrcWidth, ui32SrcHeight); - g_print ("data_size = %d\n", va_image.data_size); - g_print ("num_planes = %d\n", va_image.num_planes); - g_print ("va_image.pitches[0] = %d\n", va_image.pitches[0]); - g_print ("va_image.pitches[1] = %d\n", va_image.pitches[1]); - g_print ("va_image.pitches[2] = %d\n", va_image.pitches[2]); - g_print ("va_image.offsets[0] = %d\n", va_image.offsets[0]); - g_print ("va_image.offsets[1] = %d\n", va_image.offsets[1]); - g_print ("va_image.offsets[2] = %d\n", va_image.offsets[2]); -// r = fwrite (pBuffer, 1, va_image.offsets[1], fp); - - r = fwrite (pBuffer, va_image.offsets[1], 1, fp); - - for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) - r = fwrite (pBuffer + va_image.offsets[1] + i / 2, 1, 1, fp); - - for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) - r = fwrite (pBuffer + va_image.offsets[1] + i / 2 + 1, 1, 1, fp); - - g_print ("ui32ChromaOffset = %d, ui32Stride = %d\n", ui32ChromaOffset, ui32Stride); - - } - - vaStatus = vaUnmapBuffer( mix->va_display, va_image.buf); - - return vaStatus; - -} -#endif /* YUVDUMP */ - - -MIX_RESULT mix_videofmt_vc1_decode_a_picture( - MixVideoFormat* mix, - vbp_data_vc1 *data, - int pic_index, - MixVideoFrame *frame) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - guint buffer_id_cnt = 0; - VABufferID *buffer_ids = NULL; - MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); - - vbp_picture_data_vc1* pic_data = &(data->pic_data[pic_index]); - VAPictureParameterBufferVC1 *pic_params = pic_data->pic_parms; - - if (pic_params == NULL) - { - ret = MIX_RESULT_NULL_PTR; - LOG_E( "Error reading parser data\n"); - goto cleanup; - } - - LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); - - //Set up reference frames for the picture parameter buffer - - //Set the picture type (I, B or P frame) - enum _picture_type frame_type = pic_params->picture_fields.bits.picture_type; - - - //Check for B frames after a seek - //We need to have both reference frames in hand before we can decode a B frame - //If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME - //Note: demuxer should do the right thing and only seek to I frame, so we should - // not get P frame first, but may get B frames after the first I frame - if (frame_type == VC1_PTYPE_B) - { - if (self->reference_frames[1] == NULL) - { - LOG_E( "Insufficient reference frames for B frame\n"); - ret = MIX_RESULT_DROPFRAME; - goto cleanup; - } - } - - buffer_ids = g_malloc(sizeof(VABufferID) * ((pic_data->num_slices * 2) + 2)); - if (buffer_ids == NULL) - { - LOG_E( "Cannot allocate buffer IDs\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "Getting a new surface\n"); - LOG_V( "frame type is %d\n", frame_type); - - gulong surface = 0; - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting surface ID from frame object\n"); - goto cleanup; - } - - //Get a frame from the surface pool - - if (0 == pic_index) - { - //Set the frame type for the frame object (used in reordering by frame manager) - switch (frame_type) - { - case VC1_PTYPE_I: // I frame type - case VC1_PTYPE_P: // P frame type - case VC1_PTYPE_B: // B frame type - ret = mix_videoframe_set_frame_type(frame, frame_type); - break; - case VC1_PTYPE_BI: // BI frame type - ret = mix_videoframe_set_frame_type(frame, TYPE_B); - break; - //Not indicated here case VC1_PTYPE_SKIPPED: - default: - break; - } - } - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error setting frame type on frame\n"); - goto cleanup; - } - - LOG_V( "Setting reference frames in picparams, frame_type = %d\n", frame_type); - - //TODO Check if we need to add more handling of B or P frames when reference frames are not set up (such as after flush/seek) - - switch (frame_type) - { - case VC1_PTYPE_I: // I frame type - /* forward and backward reference pictures are not used but just set to current - surface to be in consistence with test suite - */ - pic_params->forward_reference_picture = surface; - pic_params->backward_reference_picture = surface; - LOG_V( "I frame, surface ID %u\n", (guint)frame->frame_id); - LOG_V( "mix_video vinfo: Frame type is I\n"); - break; - case VC1_PTYPE_P: // P frame type - - // check REFDIST in the picture parameter buffer - if (0 != pic_params->reference_fields.bits.reference_distance_flag && - 0 != pic_params->reference_fields.bits.reference_distance) - { - /* The previous decoded frame (distance is up to 16 but not 0) is used - for reference, as we don't allocate that many surfaces so the reference picture - could have been overwritten and hence not avaiable for reference. - */ - LOG_E( "reference distance is not 0!"); - ret = MIX_RESULT_DROPFRAME; - goto cleanup; - } - if (1 == pic_index) - { - // handle interlace field coding case - if (1 == pic_params->reference_fields.bits.num_reference_pictures || - 1 == pic_params->reference_fields.bits.reference_field_pic_indicator) - { - /* two reference fields or the second closest I/P field is used for - prediction. Set forward reference picture to INVALID so it will be - updated to a valid previous reconstructed reference frame later. - */ - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - } - else - { - /* the closest I/P is used for reference so it must be the - complementary field in the same surface. - */ - pic_params->forward_reference_picture = surface; - } - } - if (VA_INVALID_SURFACE == pic_params->forward_reference_picture) - { - if (self->reference_frames[1]) - { - pic_params->forward_reference_picture = self->reference_frames[1]->frame_id; - } - else if (self->reference_frames[0]) - { - pic_params->forward_reference_picture = self->reference_frames[0]->frame_id; - } - else - { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Error could not find reference frames for P frame\n"); - goto cleanup; - } - } - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - -#ifdef MIX_LOG_ENABLE /* this is to fix a crash when MIX_LOG_ENABLE is set */ - if(self->reference_frames[0] && frame) { - LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", - (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id); - } -#endif - LOG_V( "mix_video vinfo: Frame type is P\n"); - break; - - case VC1_PTYPE_B: // B frame type - LOG_V( "B frame, forw ref %d, back ref %d\n", (guint)self->reference_frames[0]->frame_id, (guint)self->reference_frames[1]->frame_id); - - if (!self->haveBframes) //We don't expect B frames and have not allocated a surface - // for the extra ref frame so this is an error - { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Unexpected B frame, cannot process\n"); - goto cleanup; - } - - pic_params->forward_reference_picture = self->reference_frames[0]->frame_id; - pic_params->backward_reference_picture = self->reference_frames[1]->frame_id; - - LOG_V( "B frame, surface ID %u, forw ref %d, back ref %d\n", (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id, (guint)self->reference_frames[1]->frame_id); - LOG_V( "mix_video vinfo: Frame type is B\n"); - break; - - case VC1_PTYPE_BI: - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - LOG_V( "BI frame\n"); - LOG_V( "mix_video vinfo: Frame type is BI\n"); - break; - - case VC1_PTYPE_SKIPPED: - //Will never happen here - break; - - default: - LOG_V( "Hit default\n"); - break; - - } - - //Loop filter handling - if (self->loopFilter) - { - LOG_V( "Setting in loop decoded picture to current frame\n"); - LOG_V( "Double checking picparams inloop filter is %d\n", pic_params->entrypoint_fields.bits.loopfilter); - pic_params->inloop_decoded_picture = frame->frame_id; - } - else - { - LOG_V( "Setting in loop decoded picture to invalid\n"); - pic_params->inloop_decoded_picture = VA_INVALID_SURFACE; - } - - //Libva buffer set up - - vadisplay = mix->va_display; - vacontext = mix->va_context; - - LOG_V( "Creating libva picture parameter buffer\n"); - - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferVC1), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - LOG_V( "Creating libva bitplane buffer\n"); - - if (pic_params->bitplane_present.value) - { - //Then the bitplane buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VABitPlaneBufferType, - pic_data->size_bitplanes, - 1, - pic_data->packed_bitplanes, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - } - - //Now for slices - int i = 0; - for (; i < pic_data->num_slices; i++) - { - LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); - - //Do slice parameters - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferVC1), - 1, - &(pic_data->slc_data[i].slc_parms), - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - - buffer_id_cnt++; - - LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); - - - //Do slice data - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - //size - pic_data->slc_data[i].slice_size, - //num_elements - 1, - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferVC1 - pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto cleanup; - } - } - - - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto cleanup; - } - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto cleanup; - } - - LOG_V( "Calling vaEndPicture\n"); - - //End picture - vret = vaEndPicture(vadisplay, vacontext); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto cleanup; - } - -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - LOG_V( "Calling vaSyncSurface\n"); - - //Decode the picture - vret = vaSyncSurface(vadisplay, surface); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - goto cleanup; - } -#endif - -cleanup: - if (NULL != buffer_ids) - g_free(buffer_ids); - - return ret; -} - - -MIX_RESULT mix_videofmt_vc1_process_decode( - MixVideoFormat *mix, - vbp_data_vc1 *data, - guint64 timestamp, - gboolean discontinuity) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - gboolean unrefVideoFrame = FALSE; - MixVideoFrame *frame = NULL; - - //TODO Partition this method into smaller methods - - LOG_V( "Begin\n"); - - if ((mix == NULL) || (data == NULL)) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - if (0 == data->num_pictures || NULL == data->pic_data) - { - return MIX_RESULT_INVALID_PARAM; - } - - if (!MIX_IS_VIDEOFORMAT_VC1(mix)) - { - return MIX_RESULT_INVALID_PARAM; - } - - //After this point, all exits from this function are through cleanup: - MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); - - //Check for skipped frame - //For skipped frames, we will reuse the last P or I frame surface and treat as P frame - if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) - { - - LOG_V( "mix_video vinfo: Frame type is SKIPPED\n"); - if (self->lastFrame == NULL) - { - //we shouldn't get a skipped frame before we are able to get a real frame - LOG_E( "Error for skipped frame, prev frame is NULL\n"); - ret = MIX_RESULT_DROPFRAME; - goto cleanup; - } - - //We don't worry about this memory allocation because SKIPPED is not a common case - //Doing the allocation on the fly is a more efficient choice than trying to manage yet another pool - MixVideoFrame *skip_frame = mix_videoframe_new(); - if (skip_frame == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating new video frame object for skipped frame\n"); - goto cleanup; - } - - mix_videoframe_set_is_skipped(skip_frame, TRUE); -// mix_videoframe_ref(skip_frame); - mix_videoframe_ref(self->lastFrame); - gulong frameid = VA_INVALID_SURFACE; - mix_videoframe_get_frame_id(self->lastFrame, &frameid); - mix_videoframe_set_frame_id(skip_frame, frameid); - mix_videoframe_set_frame_type(skip_frame, VC1_PTYPE_P); - mix_videoframe_set_real_frame(skip_frame, self->lastFrame); - mix_videoframe_set_timestamp(skip_frame, timestamp); - mix_videoframe_set_discontinuity(skip_frame, FALSE); - LOG_V( "Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", (guint)skip_frame, (guint)frameid, timestamp); - - //Process reference frames - LOG_V( "Updating skipped frame forward/backward references for libva\n"); - mix_videofmt_vc1_handle_ref_frames(mix, - VC1_PTYPE_P, - skip_frame); - - //Enqueue the skipped frame using frame manager - ret = mix_framemanager_enqueue(mix->framemgr, skip_frame); - - goto cleanup; - - } - - ret = mix_surfacepool_get(mix->surfacepool, &frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error getting frame from surfacepool\n"); - goto cleanup; - - } - unrefVideoFrame = TRUE; - - // TO DO: handle multiple frames parsed from a sample buffer - int index; - int num_pictures = (data->num_pictures > 1) ? 2 : 1; - - for (index = 0; index < num_pictures; index++) - { - ret = mix_videofmt_vc1_decode_a_picture(mix, data, index, frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Failed to decode a picture.\n"); - goto cleanup; - } - } - - //Set the discontinuity flag - mix_videoframe_set_discontinuity(frame, discontinuity); - - //Set the timestamp - mix_videoframe_set_timestamp(frame, timestamp); - - // setup frame structure - if (data->num_pictures > 1) - { - if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) - mix_videoframe_set_frame_structure(frame, VA_TOP_FIELD); - else - mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD); - } - else - { - mix_videoframe_set_frame_structure(frame, VA_FRAME_PICTURE); - } - - enum _picture_type frame_type = data->pic_data[0].pic_parms->picture_fields.bits.picture_type; - - //For I or P frames - //Save this frame off for skipped frame handling - if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) - { - if (self->lastFrame != NULL) - { - mix_videoframe_unref(self->lastFrame); - } - self->lastFrame = frame; - mix_videoframe_ref(frame); - } - - //Update the references frames for the current frame - if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) //If I or P frame, update the reference array - { - LOG_V( "Updating forward/backward references for libva\n"); - mix_videofmt_vc1_handle_ref_frames(mix, - frame_type, - frame); - } - -//TODO Complete YUVDUMP code and move into base class -#ifdef YUVDUMP - if (mix_video_vc1_counter < 10) - ret = GetImageFromSurface (mix, frame); -// g_usleep(5000000); -#endif /* YUVDUMP */ - - LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); - - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(mix->framemgr, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error enqueuing frame object\n"); - goto cleanup; - } - unrefVideoFrame = FALSE; - - -cleanup: - - mix_videofmt_vc1_release_input_buffers(mix, timestamp); - if (unrefVideoFrame) - mix_videoframe_unref(frame); - - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_vc1_flush(MixVideoFormat *mix) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (mix == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - uint32 pret = 0; - MixInputBufferEntry *bufentry = NULL; - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ - -#if 0 - if (parent_class->flush) - { - return parent_class->flush(mix, msg); - } -#endif - - MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); - - g_mutex_lock(mix->objectlock); - - //Clear the contents of inputbufqueue - while (!g_queue_is_empty(mix->inputbufqueue)) - { - bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); - if (bufentry == NULL) - continue; - - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } - - //Clear parse_in_progress flag and current timestamp - mix->parse_in_progress = FALSE; - mix->discontinuity_frame_in_progress = FALSE; - mix->current_timestamp = (guint64)-1; - - int i = 0; - for (; i < 2; i++) - { - if (self->reference_frames[i] != NULL) - { - mix_videoframe_unref(self->reference_frames[i]); - self->reference_frames[i] = NULL; - } - } - - //Call parser flush - pret = vbp_flush(mix->parser_handle); - if (pret != VBP_OK) - ret = MIX_RESULT_FAIL; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_vc1_eos(MixVideoFormat *mix) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_vc1 *data = NULL; - uint32 pret = 0; - - if (mix == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ - -#if 0 - if (parent_class->eos) - { - return parent_class->eos(mix, msg); - } -#endif - - g_mutex_lock(mix->objectlock); - - //if a frame is in progress, process the frame - if (mix->parse_in_progress) - { - //query for data - pret = vbp_query(mix->parser_handle, (void *) &data); - - if ((pret != VBP_OK) || (data == NULL)) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting last parse data\n"); - goto cleanup; - } - - //process and decode data - ret = mix_videofmt_vc1_process_decode(mix, - data, mix->current_timestamp, - mix->discontinuity_frame_in_progress); - mix->parse_in_progress = FALSE; - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error processing last frame\n"); - goto cleanup; - } - } - -cleanup: - - g_mutex_unlock(mix->objectlock); - - //Call Frame Manager with _eos() - ret = mix_framemanager_eos(mix->framemgr); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_videofmt_vc1_deinitialize(MixVideoFormat *mix) -{ - //Note this method is not called; may remove in future - if (mix == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - /* Chainup parent method. - */ - - if (parent_class->deinitialize) - { - return parent_class->deinitialize(mix); - } - - //Most stuff is cleaned up in parent_class->finalize() and in _finalize - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmt_vc1_handle_ref_frames( - MixVideoFormat *mix, - enum _picture_type frame_type, - MixVideoFrame * current_frame) -{ - - LOG_V( "Begin\n"); - - if (mix == NULL || current_frame == NULL) - { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix); - - - switch (frame_type) - { - case VC1_PTYPE_I: // I frame type - case VC1_PTYPE_P: // P frame type - LOG_V( "Refing reference frame %x\n", (guint) current_frame); - mix_videoframe_ref(current_frame); - - //If we have B frames, we need to keep forward and backward reference frames - if (self->haveBframes) - { - if (self->reference_frames[0] == NULL) //should only happen on first frame - { - self->reference_frames[0] = current_frame; -// self->reference_frames[1] = NULL; - } - else if (self->reference_frames[1] == NULL) //should only happen on second frame - { - self->reference_frames[1] = current_frame; - } - else - { - LOG_V( "Releasing reference frame %x\n", (guint) self->reference_frames[0]); - mix_videoframe_unref(self->reference_frames[0]); - self->reference_frames[0] = self->reference_frames[1]; - self->reference_frames[1] = current_frame; - } - } - else //No B frames in this content, only need to keep the forward reference frame - { - LOG_V( "Releasing reference frame %x\n", (guint) self->reference_frames[0]); - if (self->reference_frames[0] != NULL) - mix_videoframe_unref(self->reference_frames[0]); - self->reference_frames[0] = current_frame; - - } - break; - case VC1_PTYPE_B: // B or BI frame type (should not happen) - case VC1_PTYPE_BI: - default: - LOG_E( "Wrong frame type for handling reference frames\n"); - return MIX_RESULT_FAIL; - break; - - } - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmt_vc1_release_input_buffers( - MixVideoFormat *mix, - guint64 timestamp) -{ - MixInputBufferEntry *bufentry = NULL; - gboolean done = FALSE; - - LOG_V( "Begin\n"); - - if (mix == NULL) - return MIX_RESULT_NULL_PTR; - - //Dequeue and release all input buffers for this frame - - LOG_V( "Releasing all the MixBuffers for this frame\n"); - - //While the head of the queue has timestamp == current ts - //dequeue the entry, unref the MixBuffer, and free the struct - done = FALSE; - while (!done) - { - bufentry = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue); - if (bufentry == NULL) - break; - - LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); - - if (bufentry->timestamp != timestamp) - { - LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp); - done = TRUE; - break; - } - - bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue); - - LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf); - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } - - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - - diff --git a/mix_video/src/mixvideoformat_vc1.cpp b/mix_video/src/mixvideoformat_vc1.cpp new file mode 100644 index 0000000..2199ff6 --- /dev/null +++ b/mix_video/src/mixvideoformat_vc1.cpp @@ -0,0 +1,1364 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include "mixvideolog.h" + +#include "mixvideoformat_vc1.h" +#ifndef ANDROID +#include +#endif + +#ifdef YUVDUMP +//TODO Complete YUVDUMP code and move into base class +#include +#endif /* YUVDUMP */ + +#include + + +#ifdef MIX_LOG_ENABLE +static int mix_video_vc1_counter = 0; +#endif + + +MixVideoFormat_VC1::MixVideoFormat_VC1() { + this->reference_frames[0] = NULL; + this->reference_frames[1] = NULL; +} + +MixVideoFormat_VC1::~MixVideoFormat_VC1() { + gint32 pret = VBP_OK; + /* clean up here. */ + Lock(); + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + //Unref our reference frames; + for (int i = 0; i < 2; i++) { + if (this->reference_frames[i] != NULL) + { + mix_videoframe_unref(this->reference_frames[i]); + this->reference_frames[i] = NULL; + } + } + + //Reset state + this->initialized = TRUE; + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (guint64)-1; + + //Close the parser + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + if (pret != VBP_OK) { + LOG_E( "Error closing parser\n"); + } + + Unlock(); +} + + +MixVideoFormat_VC1 * mix_videoformat_vc1_new(void) { + return new MixVideoFormat_VC1(); +} + +MixVideoFormat_VC1 * mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} +MixVideoFormat_VC1 *mix_videoformat_vc1_unref(MixVideoFormat_VC1 * mix) { + if (NULL != mix) + return MIX_VIDEOFORMAT_VC1(mix->Unref()); + else + return mix; +} + +MIX_RESULT MixVideoFormat_VC1::_update_seq_header( + MixVideoConfigParamsDec* config_params, + MixIOVec *header) { + guint width = 0; + guint height = 0; + + gint i = 0; + guchar* p = NULL; + MIX_RESULT res = MIX_RESULT_SUCCESS; + + if (!config_params || !header) { + LOG_E( "NUll pointer passed in\n"); + return (MIX_RESULT_NULL_PTR); + } + + p = header->data; + + res = mix_videoconfigparamsdec_get_picture_res( + config_params, &width, &height); + + if (MIX_RESULT_SUCCESS != res) { + return res; + } + + /* Check for start codes. If one exist, then this is VC-1 and not WMV. */ + while (i < header->data_size - 2) { + if ((p[i] == 0) && (p[i + 1] == 0) && (p[i + 2] == 1)) { + return MIX_RESULT_SUCCESS; + } + i++; + } + + p = reinterpret_cast(g_malloc0(header->data_size + 9)); + + if (!p) { + LOG_E( "Cannot allocate memory\n"); + return MIX_RESULT_NO_MEMORY; + } + + /* If we get here we have 4+ bytes of codec data that must be formatted */ + /* to pass through as an RCV sequence header. */ + p[0] = 0; + p[1] = 0; + p[2] = 1; + p[3] = 0x0f; /* Start code. */ + + p[4] = (width >> 8) & 0x0ff; + p[5] = width & 0x0ff; + p[6] = (height >> 8) & 0x0ff; + p[7] = height & 0x0ff; + + memcpy(p + 8, header->data, header->data_size); + *(p + header->data_size + 8) = 0x80; + + g_free(header->data); + header->data = p; + header->data_size = header->data_size + 9; + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT MixVideoFormat_VC1::_update_config_params(vbp_data_vc1 *data) { + if (this->picture_width == 0 || + this->picture_height == 0) { + this->picture_width = data->se_data->CODED_WIDTH; + this->picture_height = data->se_data->CODED_HEIGHT; + mix_videoconfigparamsdec_set_picture_res( + this->config_params, + this->picture_width, + this->picture_height); + } + + // scaling has been performed on the decoded image. + mix_videoconfigparamsdec_set_video_range(this->config_params, 1); + uint8 color_matrix; + switch (data->se_data->MATRIX_COEF) { + case 1: + color_matrix = VA_SRC_BT709; + break; + // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996. + case 6: + color_matrix = VA_SRC_BT601; + break; + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + this->config_params, + data->se_data->ASPECT_HORIZ_SIZE, + data->se_data->ASPECT_VERT_SIZE); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoFormat_VC1::Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_VC1; + vbp_data_vc1 *data = NULL; + MixIOVec *header = NULL; + gint numprofs = 0, numactualprofs = 0; + gint numentrypts = 0, numactualentrypts = 0; + VADisplay vadisplay = NULL; + VAProfile *profiles = NULL; + VAEntrypoint *entrypts = NULL; + VAConfigAttrib attrib; + VAStatus vret = VA_STATUS_SUCCESS; + guint extra_surfaces = 0; + VASurfaceID *surfaces = NULL; + guint numSurfaces = 0; + gint vaentrypt = 0; + gint vaprof = 0; + + //TODO Partition this method into smaller methods + if (config_params == NULL || frame_mgr == NULL || + !input_buf_pool || !surface_pool || !va_display) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + LOG_V( "Begin\n"); + + // chain up parent method + MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, + surface_pool, va_display); + + if (ret != MIX_RESULT_SUCCESS) { + return ret; + } + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + Lock(); + + //Load the bitstream parser + pret = vbp_open(ptype, &(this->parser_handle)); + + if (!(pret == VBP_OK)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto CLEAN_UP; + } + + LOG_V( "Opened parser\n"); + + ret = mix_videoconfigparamsdec_get_header(config_params, + &header); + + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get header data\n"); + goto CLEAN_UP; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); + + ret = _update_seq_header(config_params, header); + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error updating sequence header\n"); + goto CLEAN_UP; + } + + pret = vbp_parse(this->parser_handle, header->data, + header->data_size, TRUE); + + if (!((pret == VBP_OK) || (pret == VBP_DONE))) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data, size %d\n", header->data_size); + goto CLEAN_UP; + } + + + LOG_V( "Parsed header\n"); + //Get the header data and save + pret = vbp_query(this->parser_handle, (void **)&data); + + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto CLEAN_UP; + } + LOG_V( "Queried parser for header data\n"); + + _update_config_params(data); + + //Time for libva initialization + vadisplay = this->va_display; + numprofs = vaMaxNumProfiles(vadisplay); + profiles = reinterpret_cast(g_malloc(numprofs*sizeof(VAProfile))); + + if (!profiles) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto CLEAN_UP; + } + + vret = vaQueryConfigProfiles(vadisplay, profiles, + &numactualprofs); + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto CLEAN_UP; + } + + //check the desired profile support + + VAProfile profile; + switch (data->se_data->PROFILE) { + case 0: + profile = VAProfileVC1Simple; + break; + case 1: + profile = VAProfileVC1Main; + break; + default: + profile = VAProfileVC1Advanced; + break; + } + + for (; vaprof < numactualprofs; vaprof++) { + if (profiles[vaprof] == profile) + break; + } + if (vaprof >= numprofs || profiles[vaprof] != profile) { + ret = MIX_RESULT_FAIL; + LOG_E( "Profile not supported by driver\n"); + goto CLEAN_UP; + } + + numentrypts = vaMaxNumEntrypoints(vadisplay); + entrypts = reinterpret_cast(g_malloc(numentrypts*sizeof(VAEntrypoint))); + + if (!entrypts) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto CLEAN_UP; + } + + vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], + entrypts, &numactualentrypts); + + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto CLEAN_UP; + } + + for (; vaentrypt < numactualentrypts; vaentrypt++) { + if (entrypts[vaentrypt] == VAEntrypointVLD) + break; + } + if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) { + ret = MIX_RESULT_FAIL; + LOG_E( "Entry point not supported by driver\n"); + goto CLEAN_UP; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + + vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1); + + //TODO Handle other values returned for RT format + // and check with requested format provided in config params + //Right now only YUV 4:2:0 is supported by libva + // and this is our default + if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto CLEAN_UP; + } + + //Initialize and save the VA config ID + vret = vaCreateConfig(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1, &(this->va_config)); + + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto CLEAN_UP; + } + + LOG_V( "Created libva config with profile %d\n", vaprof); + + //Check for loop filtering + if (data->se_data->LOOPFILTER == 1) + this->loopFilter = TRUE; + else + this->loopFilter = FALSE; + + LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", + data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); + + //Initialize the surface pool + if ((data->se_data->MAXBFRAMES > 0) || + (data->se_data->PROFILE == 3) || + (data->se_data->PROFILE == 1)) + //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof + this->haveBframes = TRUE; + else + this->haveBframes = FALSE; + + //Calculate VC1 numSurfaces based on max number of B frames or + // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less + + //Adding 1 to work around VBLANK issue + this->va_num_surfaces = 1 + extra_surfaces + + ((3 + (this->haveBframes ? 1 : 0) < MIX_VIDEO_VC1_SURFACE_NUM) ? + (3 + (this->haveBframes ? 1 : 0)) : MIX_VIDEO_VC1_SURFACE_NUM); + numSurfaces = this->va_num_surfaces; + this->va_surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); + surfaces = this->va_surfaces; + + if (surfaces == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot allocate temporary data\n"); + goto CLEAN_UP; + } + + vret = vaCreateSurfaces( + vadisplay, this->picture_width, + this->picture_height, entrypts[vaentrypt], + numSurfaces, surfaces); + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto CLEAN_UP; + } + + this->surfacepool = mix_surfacepool_new(); + *surface_pool = this->surfacepool; + if (this->surfacepool == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing surface pool\n"); + goto CLEAN_UP; + } + + + ret = mix_surfacepool_initialize(this->surfacepool, + surfaces, numSurfaces, vadisplay); + + switch (ret) { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init failure\n"); + goto CLEAN_UP; + break; + } + + LOG_V( "Created %d libva surfaces, MAXBFRAMES is %d\n", + numSurfaces, data->se_data->MAXBFRAMES); + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext(vadisplay, this->va_config, + this->picture_width, this->picture_height, + 0, surfaces, numSurfaces, + &(this->va_context)); + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto CLEAN_UP; + } + + LOG_V( "Created libva context width %d, height %d\n", + this->picture_width, this->picture_height); + LOG_V( "mix_video vinfo: Content type %s, %s\n", + (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); + LOG_V( "mix_video vinfo: Content width %d, height %d\n", + this->picture_width, this->picture_height); + LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", + data->se_data->MAXBFRAMES); + LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", + data->se_data->PROFILE, data->se_data->LEVEL); + +CLEAN_UP: + if (ret != MIX_RESULT_SUCCESS) { + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + this->initialized = FALSE; + } else { + this->initialized = TRUE; + } + if (header != NULL) { + if (header->data != NULL) + g_free(header->data); + g_free(header); + header = NULL; + } + g_free(profiles); + g_free(entrypts); + this->lastFrame = NULL; + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_VC1::Decode( + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params) { + + uint32 pret = 0; + int i = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + guint64 ts = 0; + vbp_data_vc1 *data = NULL; + gboolean discontinuity = FALSE; + MixInputBufferEntry *bufentry = NULL; + if (bufin == NULL || decode_params == NULL) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + //TODO remove iovout and iovoutcnt; they are not used (need to remove from MixVideo/MI-X API too) + LOG_V( "Begin\n"); + /* Chainup parent method. + We are not chaining up to parent method for now. + */ +#if 0 + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, + decode_params); + } +#endif + + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) { + return MIX_RESULT_FAIL; + } + + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + return MIX_RESULT_FAIL; + } + + //From now on, we exit this function through cleanup: + LOG_V( "Locking\n"); + Lock(); + + //If this is a new frame and we haven't retrieved parser + // workload data from previous frame yet, do so + if ((ts != this->current_timestamp) && + (this->parse_in_progress)) { + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing parser\n"); + goto CLEAN_UP; + } + LOG_V( "Queried for last frame data\n"); + //process and decode data + ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); + + if (ret != MIX_RESULT_SUCCESS) { + //We log this but need to process the new frame data, so do not return + LOG_E( "process_decode failed.\n"); + } + LOG_V( "Called process and decode for last frame\n"); + this->parse_in_progress = FALSE; + } + + this->current_timestamp = ts; + this->discontinuity_frame_in_progress = discontinuity; + LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_vc1_counter++, ts); + + for (i = 0; i < bufincnt; i++) { + LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", + (int)this->parser_handle, (guint)bufin[i]->data, bufin[i]->size); + pret = vbp_parse(this->parser_handle, bufin[i]->data, bufin[i]->size, FALSE); + LOG_V( "Called parse for current frame\n"); + if (pret == VBP_DONE) { + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting parser data\n"); + goto CLEAN_UP; + } + LOG_V( "Called query for current frame\n"); + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = reinterpret_cast(g_malloc(sizeof( + MixInputBufferEntry))); + if (bufentry == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto CLEAN_UP; + } + + bufentry->buf = bufin[i]; + LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", + (guint)bufentry, (guint)bufentry->buf, ts); + bufentry->timestamp = ts; + + LOG_V( "Enqueue this input buffer for current frame\n"); + LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); + + //Enqueue this input buffer + g_queue_push_tail(this->inputbufqueue, + (gpointer)bufentry); + + //process and decode data + ret = _process_decode(data, ts, discontinuity); + + if (ret != MIX_RESULT_SUCCESS) { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Process_decode failed.\n"); + } + + LOG_V( "Called process and decode for current frame\n"); + this->parse_in_progress = FALSE; + } else if (pret != VBP_OK) { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Parsing failed.\n"); + ret = MIX_RESULT_FAIL; + } else { + LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = reinterpret_cast(g_malloc(sizeof(MixInputBufferEntry))); + if (bufentry == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto CLEAN_UP; + } + bufentry->buf = bufin[i]; + bufentry->timestamp = ts; + + //Enqueue this input buffer + g_queue_push_tail(this->inputbufqueue, + (gpointer)bufentry); + this->parse_in_progress = TRUE; + } + } +CLEAN_UP: + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; +} + + +#ifdef YUVDUMP +//TODO Complete this YUVDUMP code and move into base class +MIX_RESULT MixVideoFormat_VC1::_get_Img_from_surface (MixVideoFrame * frame) { + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAImageFormat va_image_format; + VAImage va_image; + unsigned char* pBuffer; + unsigned int ui32SrcWidth = this->picture_width; + unsigned int ui32SrcHeight = this->picture_height; + unsigned int ui32Stride; + unsigned int ui32ChromaOffset; + FILE *fp = NULL; + int r = 0; + int i; + g_print ("_get_Img_from_surface \n"); + + if (NULL == frame) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + fp = fopen("yuvdump.yuv", "a+"); + + static int have_va_image = 0; + + if (!have_va_image) { + va_image_format.fourcc = VA_FOURCC_NV12; + //va_image_format.fourcc = VA_FOURCC_YV12; + vaStatus = vaCreateImage( + this->va_display, &va_image_format, + ui32SrcWidth, ui32SrcHeight, &va_image); + have_va_image = 1; + } + + vaStatus = vaGetImage( + this->va_display, frame->frame_id, 0, 0, + ui32SrcWidth, ui32SrcHeight, va_image.image_id ); + vaStatus = vaMapBuffer(this->va_display, va_image.buf, (void **) &pBuffer); + ui32ChromaOffset = va_image.offsets[1]; + ui32Stride = va_image.pitches[0]; + + if (VA_STATUS_SUCCESS != vaStatus) { + g_print ("VideoProcessBlt: Unable to copy surface\n\r"); + return vaStatus; + } + + { + g_print ("before copy memory....\n"); + g_print ("width = %d, height = %d\n", ui32SrcWidth, ui32SrcHeight); + g_print ("data_size = %d\n", va_image.data_size); + g_print ("num_planes = %d\n", va_image.num_planes); + g_print ("va_image.pitches[0] = %d\n", va_image.pitches[0]); + g_print ("va_image.pitches[1] = %d\n", va_image.pitches[1]); + g_print ("va_image.pitches[2] = %d\n", va_image.pitches[2]); + g_print ("va_image.offsets[0] = %d\n", va_image.offsets[0]); + g_print ("va_image.offsets[1] = %d\n", va_image.offsets[1]); + g_print ("va_image.offsets[2] = %d\n", va_image.offsets[2]); + // r = fwrite (pBuffer, 1, va_image.offsets[1], fp); + + r = fwrite (pBuffer, va_image.offsets[1], 1, fp); + + for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) + r = fwrite (pBuffer + va_image.offsets[1] + i / 2, 1, 1, fp); + + for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) + r = fwrite (pBuffer + va_image.offsets[1] + i / 2 + 1, 1, 1, fp); + + g_print ("ui32ChromaOffset = %d, ui32Stride = %d\n", ui32ChromaOffset, ui32Stride); + + } + + vaStatus = vaUnmapBuffer(this->va_display, va_image.buf); + return vaStatus; +} +#endif /* YUVDUMP */ + +MIX_RESULT MixVideoFormat_VC1::_decode_a_picture( + vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + guint buffer_id_cnt = 0; + VABufferID *buffer_ids = NULL; + vbp_picture_data_vc1* pic_data = &(data->pic_data[pic_index]); + VAPictureParameterBufferVC1 *pic_params = pic_data->pic_parms; + enum _picture_type frame_type = VC1_PTYPE_I; + gulong surface = 0; + + if (pic_params == NULL) { + ret = MIX_RESULT_NULL_PTR; + LOG_E( "Error reading parser data\n"); + goto CLEAN_UP; + } + + LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); + + //Set up reference frames for the picture parameter buffer + //Set the picture type (I, B or P frame) + frame_type = (_picture_type)pic_params->picture_fields.bits.picture_type; + + //Check for B frames after a seek + //We need to have both reference frames in hand before we can decode a B frame + //If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME + //Note: demuxer should do the right thing and only seek to I frame, so we should + // not get P frame first, but may get B frames after the first I frame + if (frame_type == VC1_PTYPE_B) { + if (this->reference_frames[1] == NULL) { + LOG_E( "Insufficient reference frames for B frame\n"); + ret = MIX_RESULT_DROPFRAME; + goto CLEAN_UP; + } + } + + buffer_ids = reinterpret_cast(g_malloc(sizeof(VABufferID) * ((pic_data->num_slices * 2) + 2))); + if (buffer_ids == NULL) { + LOG_E( "Cannot allocate buffer IDs\n"); + ret = MIX_RESULT_NO_MEMORY; + goto CLEAN_UP; + } + + LOG_V( "Getting a new surface\n"); + LOG_V( "frame type is %d\n", frame_type); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting surface ID from frame object\n"); + goto CLEAN_UP; + } + + //Get a frame from the surface pool + if (0 == pic_index) { + //Set the frame type for the frame object (used in reordering by frame manager) + switch (frame_type) { + case VC1_PTYPE_I: // I frame type + case VC1_PTYPE_P: // P frame type + case VC1_PTYPE_B: // B frame type + ret = mix_videoframe_set_frame_type(frame, (MixFrameType)frame_type); + break; + case VC1_PTYPE_BI: // BI frame type + ret = mix_videoframe_set_frame_type(frame, TYPE_B); + break; + //Not indicated here case VC1_PTYPE_SKIPPED: + default: + break; + } + } + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error setting frame type on frame\n"); + goto CLEAN_UP; + } + + LOG_V( "Setting reference frames in picparams, frame_type = %d\n", frame_type); + //TODO Check if we need to add more handling of B or P frames when reference frames are not set up (such as after flush/seek) + + switch (frame_type) { + case VC1_PTYPE_I: // I frame type + /* forward and backward reference pictures are not used but just set to current + surface to be in consistence with test suite + */ + pic_params->forward_reference_picture = surface; + pic_params->backward_reference_picture = surface; + LOG_V( "I frame, surface ID %u\n", (guint)frame->frame_id); + LOG_V( "mix_video vinfo: Frame type is I\n"); + break; + case VC1_PTYPE_P: // P frame type + // check REFDIST in the picture parameter buffer + if (0 != pic_params->reference_fields.bits.reference_distance_flag && + 0 != pic_params->reference_fields.bits.reference_distance) { + /* The previous decoded frame (distance is up to 16 but not 0) is used + for reference, as we don't allocate that many surfaces so the reference picture + could have been overwritten and hence not avaiable for reference. + */ + LOG_E( "reference distance is not 0!"); + ret = MIX_RESULT_DROPFRAME; + goto CLEAN_UP; + } + if (1 == pic_index) { + // handle interlace field coding case + if (1 == pic_params->reference_fields.bits.num_reference_pictures || + 1 == pic_params->reference_fields.bits.reference_field_pic_indicator) { + /* two reference fields or the second closest I/P field is used for + prediction. Set forward reference picture to INVALID so it will be + updated to a valid previous reconstructed reference frame later. + */ + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + } else { + /* the closest I/P is used for reference so it must be the + complementary field in the same surface. + */ + pic_params->forward_reference_picture = surface; + } + } + if (VA_INVALID_SURFACE == pic_params->forward_reference_picture) { + if (this->reference_frames[1]) { + pic_params->forward_reference_picture = this->reference_frames[1]->frame_id; + } else if (this->reference_frames[0]) { + pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; + } else { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "Error could not find reference frames for P frame\n"); + goto CLEAN_UP; + } + } + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + +#ifdef MIX_LOG_ENABLE /* this is to fix a crash when MIX_LOG_ENABLE is set */ + if(this->reference_frames[0] && frame) { + LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", + (guint)frame->frame_id, (guint)this->reference_frames[0]->frame_id); + } +#endif + LOG_V( "mix_video vinfo: Frame type is P\n"); + break; + + case VC1_PTYPE_B: // B frame type + LOG_V( "B frame, forw ref %d, back ref %d\n", + (guint)this->reference_frames[0]->frame_id, + (guint)this->reference_frames[1]->frame_id); + + if (!this->haveBframes) {//We don't expect B frames and have not allocated a surface + // for the extra ref frame so this is an error + ret = MIX_RESULT_DROPFRAME; + LOG_E( "Unexpected B frame, cannot process\n"); + goto CLEAN_UP; + } + + pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; + pic_params->backward_reference_picture = this->reference_frames[1]->frame_id; + + LOG_V( "B frame, surface ID %u, forw ref %d, back ref %d\n", + (guint)frame->frame_id, (guint)this->reference_frames[0]->frame_id, + (guint)this->reference_frames[1]->frame_id); + LOG_V( "mix_video vinfo: Frame type is B\n"); + break; + case VC1_PTYPE_BI: + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + LOG_V( "BI frame\n"); + LOG_V( "mix_video vinfo: Frame type is BI\n"); + break; + case VC1_PTYPE_SKIPPED: + //Will never happen here + break; + default: + LOG_V( "Hit default\n"); + break; + } + + //Loop filter handling + if (this->loopFilter) { + LOG_V( "Setting in loop decoded picture to current frame\n"); + LOG_V( "Double checking picparams inloop filter is %d\n", + pic_params->entrypoint_fields.bits.loopfilter); + pic_params->inloop_decoded_picture = frame->frame_id; + } else { + LOG_V( "Setting in loop decoded picture to invalid\n"); + pic_params->inloop_decoded_picture = VA_INVALID_SURFACE; + } + //Libva buffer set up + vadisplay = this->va_display; + vacontext = this->va_context; + + LOG_V( "Creating libva picture parameter buffer\n"); + + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferVC1), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + LOG_V( "Creating libva bitplane buffer\n"); + + if (pic_params->bitplane_present.value) { + //Then the bitplane buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VABitPlaneBufferType, + pic_data->size_bitplanes, + 1, + pic_data->packed_bitplanes, + &buffer_ids[buffer_id_cnt]); + buffer_id_cnt++; + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + } + + //Now for slices + for (uint32 i = 0; i < pic_data->num_slices; i++) { + LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); + + //Do slice parameters + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferVC1), + 1, + &(pic_data->slc_data[i].slc_parms), + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + buffer_id_cnt++; + + LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); + + + //Do slice data + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + //size + pic_data->slc_data[i].slice_size, + //num_elements + 1, + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferVC1 + pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + } + + + LOG_V( "Calling vaBeginPicture\n"); + + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaEndPicture\n"); + + //End picture + vret = vaEndPicture(vadisplay, vacontext); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto CLEAN_UP; + } + +#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ + LOG_V( "Calling vaSyncSurface\n"); + + //Decode the picture + vret = vaSyncSurface(vadisplay, surface); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + goto CLEAN_UP; + } +#endif + +CLEAN_UP: + if (NULL != buffer_ids) + g_free(buffer_ids); + return ret; +} + + +MIX_RESULT MixVideoFormat_VC1::Flush() { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + uint32 pret = 0; + MixInputBufferEntry *bufentry = NULL; + /* Chainup parent method. + We are not chaining up to parent method for now. + */ +#if 0 + if (parent_class->flush) + { + return parent_class->flush(mix, msg); + } +#endif + Lock(); + + //Clear the contents of inputbufqueue + while (!g_queue_is_empty(this->inputbufqueue)) { + bufentry = (MixInputBufferEntry *) g_queue_pop_head(this->inputbufqueue); + if (bufentry == NULL) + continue; + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + + //Clear parse_in_progress flag and current timestamp + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (guint64)-1; + + int i = 0; + for (; i < 2; i++) { + if (this->reference_frames[i] != NULL) { + mix_videoframe_unref(this->reference_frames[i]); + this->reference_frames[i] = NULL; + } + } + + //Call parser flush + pret = vbp_flush(this->parser_handle); + if (pret != VBP_OK) + ret = MIX_RESULT_FAIL; + + Unlock(); + LOG_V( "End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_VC1::EndOfStream() { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_vc1 *data = NULL; + uint32 pret = 0; + LOG_V( "Begin\n"); + /* Chainup parent method. + We are not chaining up to parent method for now. + */ +#if 0 + if (parent_class->eos) + { + return parent_class->eos(mix, msg); + } +#endif + Lock(); + //if a frame is in progress, process the frame + if (this->parse_in_progress) { + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting last parse data\n"); + goto CLEAN_UP; + } + + //process and decode data + ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); + this->parse_in_progress = FALSE; + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error processing last frame\n"); + goto CLEAN_UP; + } + } +CLEAN_UP: + Unlock(); + ret = mix_framemanager_eos(this->framemgr); + LOG_V( "End\n"); + return ret; +} + + +MIX_RESULT MixVideoFormat_VC1::_handle_ref_frames( + enum _picture_type frame_type, MixVideoFrame * current_frame) { + LOG_V( "Begin\n"); + if (NULL == current_frame) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + switch (frame_type) { + case VC1_PTYPE_I: // I frame type + case VC1_PTYPE_P: // P frame type + LOG_V( "Refing reference frame %x\n", (guint) current_frame); + mix_videoframe_ref(current_frame); + + //If we have B frames, we need to keep forward and backward reference frames + if (this->haveBframes) { + if (this->reference_frames[0] == NULL) { //should only happen on first frame + this->reference_frames[0] = current_frame; + //this->reference_frames[1] = NULL; + } else if (this->reference_frames[1] == NULL) {//should only happen on second frame + this->reference_frames[1] = current_frame; + } else { + LOG_V( "Releasing reference frame %x\n", (guint) this->reference_frames[0]); + mix_videoframe_unref(this->reference_frames[0]); + this->reference_frames[0] = this->reference_frames[1]; + this->reference_frames[1] = current_frame; + } + }else {//No B frames in this content, only need to keep the forward reference frame + LOG_V( "Releasing reference frame %x\n", (guint) this->reference_frames[0]); + if (this->reference_frames[0] != NULL) + mix_videoframe_unref(this->reference_frames[0]); + this->reference_frames[0] = current_frame; + } + break; + case VC1_PTYPE_B: // B or BI frame type (should not happen) + case VC1_PTYPE_BI: + default: + LOG_E( "Wrong frame type for handling reference frames\n"); + return MIX_RESULT_FAIL; + break; + + } + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoFormat_VC1::_process_decode( + vbp_data_vc1 *data, guint64 timestamp, gboolean discontinuity) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + gboolean unrefVideoFrame = FALSE; + MixVideoFrame *frame = NULL; + int num_pictures = 0; + enum _picture_type frame_type = VC1_PTYPE_I; + + //TODO Partition this method into smaller methods + LOG_V( "Begin\n"); + if (NULL == data) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + if (0 == data->num_pictures || NULL == data->pic_data) { + return MIX_RESULT_INVALID_PARAM; + } + + //Check for skipped frame + //For skipped frames, we will reuse the last P or I frame surface and treat as P frame + if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) { + LOG_V( "mix_video vinfo: Frame type is SKIPPED\n"); + if (this->lastFrame == NULL) { + //we shouldn't get a skipped frame before we are able to get a real frame + LOG_E( "Error for skipped frame, prev frame is NULL\n"); + ret = MIX_RESULT_DROPFRAME; + goto CLEAN_UP; + } + + //We don't worry about this memory allocation because SKIPPED is not a common case + //Doing the allocation on the fly is a more efficient choice than trying to manage yet another pool + MixVideoFrame *skip_frame = mix_videoframe_new(); + if (skip_frame == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating new video frame object for skipped frame\n"); + goto CLEAN_UP; + } + + mix_videoframe_set_is_skipped(skip_frame, TRUE); + //mix_videoframe_ref(skip_frame); + mix_videoframe_ref(this->lastFrame); + gulong frameid = VA_INVALID_SURFACE; + mix_videoframe_get_frame_id(this->lastFrame, &frameid); + mix_videoframe_set_frame_id(skip_frame, frameid); + mix_videoframe_set_frame_type(skip_frame, (MixFrameType)VC1_PTYPE_P); + mix_videoframe_set_real_frame(skip_frame, this->lastFrame); + mix_videoframe_set_timestamp(skip_frame, timestamp); + mix_videoframe_set_discontinuity(skip_frame, FALSE); + LOG_V( "Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", + (guint)skip_frame, (guint)frameid, timestamp); + //Process reference frames + LOG_V( "Updating skipped frame forward/backward references for libva\n"); + _handle_ref_frames(VC1_PTYPE_P, skip_frame); + //Enqueue the skipped frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, skip_frame); + goto CLEAN_UP; + } + + ret = mix_surfacepool_get(this->surfacepool, &frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame from surfacepool\n"); + goto CLEAN_UP; + } + unrefVideoFrame = TRUE; + + // TO DO: handle multiple frames parsed from a sample buffer + num_pictures = (data->num_pictures > 1) ? 2 : 1; + for (int index = 0; index < num_pictures; index++) { + ret = _decode_a_picture(data, index, frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to decode a picture.\n"); + goto CLEAN_UP; + } + } + + //Set the discontinuity flag + mix_videoframe_set_discontinuity(frame, discontinuity); + + //Set the timestamp + mix_videoframe_set_timestamp(frame, timestamp); + + // setup frame structure + if (data->num_pictures > 1) { + if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) + mix_videoframe_set_frame_structure(frame, VA_TOP_FIELD); + else + mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD); + } else { + mix_videoframe_set_frame_structure(frame, VA_FRAME_PICTURE); + } + + frame_type = (_picture_type)data->pic_data[0].pic_parms->picture_fields.bits.picture_type; + + //For I or P frames + //Save this frame off for skipped frame handling + if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) { + if (this->lastFrame != NULL) { + mix_videoframe_unref(this->lastFrame); + } + this->lastFrame = frame; + mix_videoframe_ref(frame); + } + + //Update the references frames for the current frame + if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) {//If I or P frame, update the reference array + LOG_V( "Updating forward/backward references for libva\n"); + _handle_ref_frames(frame_type, frame); + } + +//TODO Complete YUVDUMP code and move into base class +#ifdef YUVDUMP + if (mix_video_vc1_counter < 10) + ret = _get_Img_from_surface(frame); + //g_usleep(5000000); +#endif /* YUVDUMP */ + + LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error enqueuing frame object\n"); + goto CLEAN_UP; + } + unrefVideoFrame = FALSE; + +CLEAN_UP: + _release_input_buffers(timestamp); + if (unrefVideoFrame) + mix_videoframe_unref(frame); + LOG_V( "End\n"); + return ret; +} + +MIX_RESULT MixVideoFormat_VC1::_release_input_buffers(guint64 timestamp) { + MixInputBufferEntry *bufentry = NULL; + gboolean done = FALSE; + LOG_V( "Begin\n"); + + //Dequeue and release all input buffers for this frame + LOG_V( "Releasing all the MixBuffers for this frame\n"); + //While the head of the queue has timestamp == current ts + //dequeue the entry, unref the MixBuffer, and free the struct + done = FALSE; + while (!done) { + bufentry = (MixInputBufferEntry *) g_queue_peek_head(this->inputbufqueue); + if (bufentry == NULL) + break; + LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", + (guint)bufentry->buf, timestamp, bufentry->timestamp); + if (bufentry->timestamp != timestamp) { + LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", + (guint)bufentry->buf, timestamp, bufentry->timestamp); + done = TRUE; + break; + } + bufentry = (MixInputBufferEntry *) g_queue_pop_head(this->inputbufqueue); + LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf); + mix_buffer_unref(bufentry->buf); + g_free(bufentry); + } + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; +} + + diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h index 366428e..2171e00 100644 --- a/mix_video/src/mixvideoformat_vc1.h +++ b/mix_video/src/mixvideoformat_vc1.h @@ -12,7 +12,6 @@ #include "mixvideoformat.h" #include "mixvideoframe_private.h" -G_BEGIN_DECLS //Note: this is only a max limit. Actual number of surfaces allocated is calculated in mix_videoformat_vc1_initialize() #define MIX_VIDEO_VC1_SURFACE_NUM 8 @@ -20,20 +19,43 @@ G_BEGIN_DECLS /* * Type macros. */ -#define MIX_TYPE_VIDEOFORMAT_VC1 (mix_videoformat_vc1_get_type ()) -#define MIX_VIDEOFORMAT_VC1(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1)) -#define MIX_IS_VIDEOFORMAT_VC1(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_VC1)) -#define MIX_VIDEOFORMAT_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1Class)) -#define MIX_IS_VIDEOFORMAT_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_VC1)) -#define MIX_VIDEOFORMAT_VC1_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1Class)) - -typedef struct _MixVideoFormat_VC1 MixVideoFormat_VC1; -typedef struct _MixVideoFormat_VC1Class MixVideoFormat_VC1Class; - -struct _MixVideoFormat_VC1 { - /*< public > */ - MixVideoFormat parent; - +#define MIX_VIDEOFORMAT_VC1(obj) (reinterpret_cast(obj)) +#define MIX_IS_VIDEOFORMAT_VC1(obj) (NULL != MIX_VIDEOFORMAT_VC1(obj)) + +class MixVideoFormat_VC1 : public MixVideoFormat { +public: + MixVideoFormat_VC1(); + virtual ~MixVideoFormat_VC1(); + + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], gint bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); + +private: + MIX_RESULT _handle_ref_frames( + enum _picture_type frame_type, MixVideoFrame * current_frame); + MIX_RESULT _process_decode( + vbp_data_vc1 *data, guint64 timestamp, gboolean discontinuity); + MIX_RESULT _release_input_buffers(guint64 timestamp); + MIX_RESULT _update_seq_header( + MixVideoConfigParamsDec* config_params, MixIOVec *header); + MIX_RESULT _update_config_params(vbp_data_vc1 *data); + MIX_RESULT _decode_a_picture( + vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame); +#ifdef YUVDUMP + MIX_RESULT _get_Img_from_surface (MixVideoFrame * frame); +#endif + + +public: /*< public > */ /*< private > */ @@ -43,28 +65,6 @@ struct _MixVideoFormat_VC1 { MixVideoFrame * lastFrame; }; -/** - * MixVideoFormat_VC1Class: - * - * MI-X Video object class - */ -struct _MixVideoFormat_VC1Class { - /*< public > */ - MixVideoFormatClass parent_class; - - /* class members */ - - /*< public > */ -}; - -/** - * mix_videoformat_vc1_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformat_vc1_get_type(void); - /** * mix_videoformat_vc1_new: * @returns: A newly allocated instance of #MixVideoFormat_VC1 @@ -88,40 +88,7 @@ MixVideoFormat_VC1 *mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix); * * Decrement reference count of the object. */ -#define mix_videoformat_vc1_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* VC1 vmethods */ -MIX_RESULT mix_videofmt_vc1_getcaps(MixVideoFormat *mix, GString *msg); -MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmt_vc1_decode(MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); -MIX_RESULT mix_videofmt_vc1_flush(MixVideoFormat *mix); -MIX_RESULT mix_videofmt_vc1_eos(MixVideoFormat *mix); -MIX_RESULT mix_videofmt_vc1_deinitialize(MixVideoFormat *mix); - -/* Local Methods */ - -MIX_RESULT mix_videofmt_vc1_handle_ref_frames(MixVideoFormat *mix, - enum _picture_type frame_type, - MixVideoFrame * current_frame); - - -MIX_RESULT mix_videofmt_vc1_process_decode(MixVideoFormat *mix, - vbp_data_vc1 *data, - guint64 timestamp, - gboolean discontinuity); - - -MIX_RESULT mix_videofmt_vc1_release_input_buffers(MixVideoFormat *mix, - guint64 timestamp); +MixVideoFormat_VC1 *mix_videoformat_vc1_unref(MixVideoFormat_VC1 * mix); -G_END_DECLS #endif /* __MIX_VIDEOFORMAT_VC1_H__ */ diff --git a/mix_video/src/mixvideoformatenc.c b/mix_video/src/mixvideoformatenc.c deleted file mode 100644 index f35fb32..0000000 --- a/mix_video/src/mixvideoformatenc.c +++ /dev/null @@ -1,884 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include "mixvideolog.h" -#include "mixvideoformatenc.h" - -//#define MDEBUG - -/* Default vmethods implementation */ -static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, - GString *msg); -static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay vadisplay); - -static MIX_RESULT -mix_videofmtenc_encode_default(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix); -static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix); -static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix); -static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( - MixVideoFormatEnc *mix, guint *max_size); -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type); - - -static GObjectClass *parent_class = NULL; - -static void mix_videoformatenc_finalize(GObject * obj); -G_DEFINE_TYPE (MixVideoFormatEnc, mix_videoformatenc, G_TYPE_OBJECT); - -static void mix_videoformatenc_init(MixVideoFormatEnc * self) { - /* TODO: public member initialization */ - - /* TODO: private member initialization */ - - self->objectlock = g_mutex_new(); - - self->initialized = FALSE; - self->framemgr = NULL; - self->surfacepool = NULL; - self->inputbufpool = NULL; - self->inputbufqueue = NULL; - self->va_display = NULL; - self->va_context = 0; - self->va_config = 0; - self->mime_type = NULL; - self->frame_rate_num= 0; - self->frame_rate_denom = 1; - self->picture_width = 0; - self->picture_height = 0; - - /* - * bitrate control - */ - self->initial_qp = 0; - self->min_qp = 0; - self->target_percentage = 95; - self->window_size = 500; - self->bitrate = 0; - - self->intra_period = 0; - self->share_buf_mode = FALSE; - self->ci_frame_id = NULL; - self->ci_frame_num = 0; - self->drawable = 0x0; - self->need_display = TRUE; - - self->va_rcmode = VA_RC_NONE; - self->va_format = VA_RT_FORMAT_YUV420; - self->va_entrypoint = VAEntrypointEncSlice; - self->va_profile = VAProfileH264Baseline; - self->level = 30; - - self->refresh_type = MIX_VIDEO_NONIR; - self->CIR_frame_cnt = 15; //default value - - /* - * Parameters for AIR intra refresh mode - */ - self->air_params.air_MBs = 0; - self->air_params.air_threshold = 0; - self->air_params.air_auto = 0; - - self->max_slice_size = 0; - - self->force_key_frame = FALSE; - self->new_header_required = FALSE; - self->render_mss_required = FALSE; - self->render_QP_required = FALSE; - self->render_AIR_required = FALSE; - self->render_framerate_required = FALSE; - self->render_bitrate_required = FALSE; - - //add more properties here -} - -static void mix_videoformatenc_class_init(MixVideoFormatEncClass * klass) { - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - gobject_class->finalize = mix_videoformatenc_finalize; - - /* setup vmethods with base implementation */ - klass->getcaps = mix_videofmtenc_getcaps_default; - klass->initialize = mix_videofmtenc_initialize_default; - klass->encode = mix_videofmtenc_encode_default; - klass->flush = mix_videofmtenc_flush_default; - klass->eos = mix_videofmtenc_eos_default; - klass->deinitialize = mix_videofmtenc_deinitialize_default; - klass->getmaxencodedbufsize = mix_videofmtenc_get_max_coded_buffer_size_default; - klass->set_dynamic_config = mix_videofmtenc_set_dynamic_enc_config_default; -} - -MixVideoFormatEnc * -mix_videoformatenc_new(void) { - MixVideoFormatEnc *ret = g_object_new(MIX_TYPE_VIDEOFORMATENC, NULL); - - return ret; -} - -void mix_videoformatenc_finalize(GObject * obj) { - /* clean up here. */ - - if (obj == NULL) { - LOG_E( "obj == NULL\n"); - return; - } - - MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj); - - LOG_V( "\n"); - - if(mix->objectlock) { - g_mutex_free(mix->objectlock); - mix->objectlock = NULL; - } - - //MiVideo object calls the _deinitialize() for frame manager - if (mix->framemgr) - { - mix_framemanager_unref(mix->framemgr); - mix->framemgr = NULL; - } - - if (mix->mime_type) - { - if (mix->mime_type->str) - g_string_free(mix->mime_type, TRUE); - else - g_string_free(mix->mime_type, FALSE); - } - - if (mix->ci_frame_id) - g_free (mix->ci_frame_id); - - - if (mix->surfacepool) - { - mix_surfacepool_deinitialize(mix->surfacepool); - mix_surfacepool_unref(mix->surfacepool); - mix->surfacepool = NULL; - } - - - /* TODO: cleanup here */ - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoFormatEnc * -mix_videoformatenc_ref(MixVideoFormatEnc * mix) { - return (MixVideoFormatEnc *) g_object_ref(G_OBJECT(mix)); -} - -/* Default vmethods implementation */ -static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, - GString *msg) { - LOG_V( "Begin\n"); - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - LOG_V( "Begin\n"); - - if (mix == NULL ||config_params_enc == NULL) { - LOG_E( - "!mix || config_params_enc == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - //TODO check return values of getter fns for config_params - - g_mutex_lock(mix->objectlock); - - mix->framemgr = frame_mgr; - mix_framemanager_ref(mix->framemgr); - - mix->va_display = va_display; - - LOG_V( - "Start to get properities from parent params\n"); - - /* get properties from param (parent) Object*/ - ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, - &(mix->bitrate)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_bps\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, - &(mix->frame_rate_num), &(mix->frame_rate_denom)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, - &(mix->initial_qp)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_init_qp\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, - &(mix->min_qp)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_min_qp\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_target_percentage(config_params_enc, - &(mix->target_percentage)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_target_percentage\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, - &(mix->window_size)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_window_size\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, - &(mix->intra_period)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_intra_period\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, - &(mix->picture_width), &(mix->picture_height)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_picture_res\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_share_buf_mode (config_params_enc, - &(mix->share_buf_mode)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_ci_frame_info (config_params_enc, - &(mix->ci_frame_id), &(mix->ci_frame_num)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_drawable (config_params_enc, - &(mix->drawable)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_drawable\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_need_display (config_params_enc, - &(mix->need_display)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_drawable\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, - &(mix->va_rcmode)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_rc_mode\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_raw_format (config_params_enc, - &(mix->va_format)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_format\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_profile (config_params_enc, - (MixProfile *) &(mix->va_profile)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_profile\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_level (config_params_enc, - &(mix->level)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_level\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, - &(mix->CIR_frame_cnt)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, - &(mix->max_slice_size)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_max_slice_size\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, - &(mix->refresh_type)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, - &(mix->air_params)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - LOG_V( - "======Video Encode Parent Object properities======:\n"); - - LOG_I( "mix->bitrate = %d\n", - mix->bitrate); - LOG_I( "mix->frame_rate = %d\n", - mix->frame_rate_denom / mix->frame_rate_denom); - LOG_I( "mix->initial_qp = %d\n", - mix->initial_qp); - LOG_I( "mix->min_qp = %d\n", - mix->min_qp); - LOG_I( "mix->intra_period = %d\n", - mix->intra_period); - LOG_I( "mix->picture_width = %d\n", - mix->picture_width); - LOG_I( "mix->picture_height = %d\n", - mix->picture_height); - LOG_I( "mix->share_buf_mode = %d\n", - mix->share_buf_mode); - LOG_I( "mix->ci_frame_id = 0x%08x\n", - mix->ci_frame_id); - LOG_I( "mix->ci_frame_num = %d\n", - mix->ci_frame_num); - LOG_I( "mix->drawable = 0x%08x\n", - mix->drawable); - LOG_I( "mix->need_display = %d\n", - mix->need_display); - LOG_I( "mix->va_format = %d\n", - mix->va_format); - LOG_I( "mix->va_profile = %d\n", - mix->va_profile); - LOG_I( "mix->va_rcmode = %d\n\n", - mix->va_rcmode); - LOG_I( "mix->CIR_frame_cnt = %d\n\n", - mix->CIR_frame_cnt); - LOG_I( "mix->max_slice_size = %d\n\n", - mix->max_slice_size); - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmtenc_encode_default (MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix) { - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix) { - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix) { - - //TODO decide whether to put any of the teardown from _finalize() here - - return MIX_RESULT_SUCCESS; -} - -static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( - MixVideoFormatEnc *mix, guint *max_size) { - - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (mix == NULL ||config_params_enc == NULL) { - LOG_E( - "!mix || config_params_enc == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - - - g_mutex_lock(mix->objectlock); - - mix->new_header_required = FALSE; - - switch (params_type) { - case MIX_ENC_PARAMS_BITRATE: - { - ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(mix->bitrate)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_bit_rate\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_INIT_QP: - { - ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_init_qp\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_MIN_QP: - { - ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(mix->min_qp)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_min_qp\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_WINDOW_SIZE: - { - ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, &(mix->window_size)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to MIX_ENC_PARAMS_WINDOW_SIZE\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_TARGET_PERCENTAGE: - { - ret = mix_videoconfigparamsenc_get_target_percentage (config_params_enc, &(mix->target_percentage)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to MIX_ENC_PARAMS_TARGET_PERCENTAGE\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_MTU_SLICE_SIZE: - { - ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, &(mix->max_slice_size)); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_get_max_slice_size\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_mss_required = TRUE; - - } - - case MIX_ENC_PARAMS_SLICE_NUM: - { - /* - * This type of dynamic control will be handled in H.264 override method - */ - } - break; - - case MIX_ENC_PARAMS_RC_MODE: - { - ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, &(mix->va_rcmode)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_rate_control\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - /* - * We only can change the RC mode to re-start encoding session - */ - - } - break; - - case MIX_ENC_PARAMS_RESOLUTION: - { - - ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(mix->picture_width), &(mix->picture_height)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_picture_res\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->new_header_required = TRUE; - } - break; - case MIX_ENC_PARAMS_GOP_SIZE: - { - - ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(mix->intra_period)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_intra_period\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->new_header_required = TRUE; - - } - break; - case MIX_ENC_PARAMS_FRAME_RATE: - { - ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(mix->frame_rate_num), &(mix->frame_rate_denom)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_framerate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_FORCE_KEY_FRAME: - { - mix->new_header_required = TRUE; - - } - break; - - case MIX_ENC_PARAMS_REFRESH_TYPE: - { - ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, &(mix->refresh_type)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - } - break; - - case MIX_ENC_PARAMS_AIR: - { - ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, &(mix->air_params)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_AIR_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_CIR_FRAME_CNT: - { - ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(mix->CIR_frame_cnt)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - } - break; - - default: - break; - } - - g_mutex_unlock(mix->objectlock); - - return MIX_RESULT_SUCCESS; -} - -/* mixvideoformatenc class methods implementation */ - -MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - - LOG_V( "Begin\n"); - - if (klass->getcaps) { - return klass->getcaps(mix, msg); - } - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - - /*frame_mgr and input_buf_pool is reserved for future use*/ - if (klass->initialize) { - return klass->initialize(mix, config_params_enc, frame_mgr, - input_buf_pool, surface_pool, va_display); - } - - return MIX_RESULT_FAIL; - -} - -MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->encode) { - return klass->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->flush) { - return klass->flush(mix); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->eos) { - return klass->eos(mix); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->deinitialize) { - return klass->deinitialize(mix); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint * max_size) { - - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->encode) { - return klass->getmaxencodedbufsize(mix, max_size); - } - - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { - - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->set_dynamic_config) { - return klass->set_dynamic_config(mix, config_params_enc, params_type); - } - - return MIX_RESULT_FAIL; -} diff --git a/mix_video/src/mixvideoformatenc.cpp b/mix_video/src/mixvideoformatenc.cpp new file mode 100644 index 0000000..f76a39d --- /dev/null +++ b/mix_video/src/mixvideoformatenc.cpp @@ -0,0 +1,884 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include "mixvideolog.h" +#include "mixvideoformatenc.h" + +//#define MDEBUG + +/* Default vmethods implementation */ +static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, + GString *msg); +static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay vadisplay); + +static MIX_RESULT +mix_videofmtenc_encode_default(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params); +static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix); +static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix); +static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix); +static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( + MixVideoFormatEnc *mix, guint *max_size); +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type); + + +static GObjectClass *parent_class = NULL; + +static void mix_videoformatenc_finalize(GObject * obj); +G_DEFINE_TYPE (MixVideoFormatEnc, mix_videoformatenc, G_TYPE_OBJECT); + +static void mix_videoformatenc_init(MixVideoFormatEnc * self) { + /* TODO: public member initialization */ + + /* TODO: private member initialization */ + + self->objectlock = g_mutex_new(); + + self->initialized = FALSE; + self->framemgr = NULL; + self->surfacepool = NULL; + self->inputbufpool = NULL; + self->inputbufqueue = NULL; + self->va_display = NULL; + self->va_context = 0; + self->va_config = 0; + self->mime_type = NULL; + self->frame_rate_num= 0; + self->frame_rate_denom = 1; + self->picture_width = 0; + self->picture_height = 0; + + /* + * bitrate control + */ + self->initial_qp = 0; + self->min_qp = 0; + self->target_percentage = 95; + self->window_size = 500; + self->bitrate = 0; + + self->intra_period = 0; + self->share_buf_mode = FALSE; + self->ci_frame_id = NULL; + self->ci_frame_num = 0; + self->drawable = 0x0; + self->need_display = TRUE; + + self->va_rcmode = VA_RC_NONE; + self->va_format = VA_RT_FORMAT_YUV420; + self->va_entrypoint = VAEntrypointEncSlice; + self->va_profile = VAProfileH264Baseline; + self->level = 30; + + self->refresh_type = MIX_VIDEO_NONIR; + self->CIR_frame_cnt = 15; //default value + + /* + * Parameters for AIR intra refresh mode + */ + self->air_params.air_MBs = 0; + self->air_params.air_threshold = 0; + self->air_params.air_auto = 0; + + self->max_slice_size = 0; + + self->force_key_frame = FALSE; + self->new_header_required = FALSE; + self->render_mss_required = FALSE; + self->render_QP_required = FALSE; + self->render_AIR_required = FALSE; + self->render_framerate_required = FALSE; + self->render_bitrate_required = FALSE; + + //add more properties here +} + +static void mix_videoformatenc_class_init(MixVideoFormatEncClass * klass) { + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* parent class for later use */ + parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); + + gobject_class->finalize = mix_videoformatenc_finalize; + + /* setup vmethods with base implementation */ + klass->getcaps = mix_videofmtenc_getcaps_default; + klass->initialize = mix_videofmtenc_initialize_default; + klass->encode = mix_videofmtenc_encode_default; + klass->flush = mix_videofmtenc_flush_default; + klass->eos = mix_videofmtenc_eos_default; + klass->deinitialize = mix_videofmtenc_deinitialize_default; + klass->getmaxencodedbufsize = mix_videofmtenc_get_max_coded_buffer_size_default; + klass->set_dynamic_config = mix_videofmtenc_set_dynamic_enc_config_default; +} + +MixVideoFormatEnc * +mix_videoformatenc_new(void) { + MixVideoFormatEnc *ret = reinterpret_cast(g_object_new(MIX_TYPE_VIDEOFORMATENC, NULL)); + + return ret; +} + +void mix_videoformatenc_finalize(GObject * obj) { + /* clean up here. */ + + if (obj == NULL) { + LOG_E( "obj == NULL\n"); + return; + } + + MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj); + + LOG_V( "\n"); + + if(mix->objectlock) { + g_mutex_free(mix->objectlock); + mix->objectlock = NULL; + } + + //MiVideo object calls the _deinitialize() for frame manager + if (mix->framemgr) + { + mix_framemanager_unref(mix->framemgr); + mix->framemgr = NULL; + } + + if (mix->mime_type) + { + if (mix->mime_type->str) + g_string_free(mix->mime_type, TRUE); + else + g_string_free(mix->mime_type, FALSE); + } + + if (mix->ci_frame_id) + g_free (mix->ci_frame_id); + + + if (mix->surfacepool) + { + mix_surfacepool_deinitialize(mix->surfacepool); + mix_surfacepool_unref(mix->surfacepool); + mix->surfacepool = NULL; + } + + + /* TODO: cleanup here */ + + /* Chain up parent */ + if (parent_class->finalize) { + parent_class->finalize(obj); + } +} + +MixVideoFormatEnc * +mix_videoformatenc_ref(MixVideoFormatEnc * mix) { + return (MixVideoFormatEnc *) g_object_ref(G_OBJECT(mix)); +} + +/* Default vmethods implementation */ +static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, + GString *msg) { + LOG_V( "Begin\n"); + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + LOG_V( "Begin\n"); + + if (mix == NULL ||config_params_enc == NULL) { + LOG_E( + "!mix || config_params_enc == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + //TODO check return values of getter fns for config_params + + g_mutex_lock(mix->objectlock); + + mix->framemgr = frame_mgr; + mix_framemanager_ref(mix->framemgr); + + mix->va_display = va_display; + + LOG_V( + "Start to get properities from parent params\n"); + + /* get properties from param (parent) Object*/ + ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, + &(mix->bitrate)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_get_bps\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, + &(mix->frame_rate_num), &(mix->frame_rate_denom)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, + &(mix->initial_qp)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_init_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, + &(mix->min_qp)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_min_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_target_percentage(config_params_enc, + &(mix->target_percentage)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_target_percentage\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, + &(mix->window_size)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_window_size\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, + &(mix->intra_period)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_intra_period\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, + &(mix->picture_width), &(mix->picture_height)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_picture_res\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_share_buf_mode (config_params_enc, + &(mix->share_buf_mode)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_ci_frame_info (config_params_enc, + &(mix->ci_frame_id), &(mix->ci_frame_num)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_drawable (config_params_enc, + &(mix->drawable)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_drawable\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_need_display (config_params_enc, + &(mix->need_display)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_drawable\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, + (MixRateControl*)&(mix->va_rcmode)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_rc_mode\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_raw_format (config_params_enc, + (MixRawTargetFormat*)&(mix->va_format)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_format\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_profile (config_params_enc, + (MixProfile *) &(mix->va_profile)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_profile\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_level (config_params_enc, + &(mix->level)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_level\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, + &(mix->CIR_frame_cnt)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, + &(mix->max_slice_size)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_max_slice_size\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + + ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, + &(mix->refresh_type)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, + &(mix->air_params)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + LOG_V( + "======Video Encode Parent Object properities======:\n"); + + LOG_I( "mix->bitrate = %d\n", + mix->bitrate); + LOG_I( "mix->frame_rate = %d\n", + mix->frame_rate_denom / mix->frame_rate_denom); + LOG_I( "mix->initial_qp = %d\n", + mix->initial_qp); + LOG_I( "mix->min_qp = %d\n", + mix->min_qp); + LOG_I( "mix->intra_period = %d\n", + mix->intra_period); + LOG_I( "mix->picture_width = %d\n", + mix->picture_width); + LOG_I( "mix->picture_height = %d\n", + mix->picture_height); + LOG_I( "mix->share_buf_mode = %d\n", + mix->share_buf_mode); + LOG_I( "mix->ci_frame_id = 0x%08x\n", + mix->ci_frame_id); + LOG_I( "mix->ci_frame_num = %d\n", + mix->ci_frame_num); + LOG_I( "mix->drawable = 0x%08x\n", + mix->drawable); + LOG_I( "mix->need_display = %d\n", + mix->need_display); + LOG_I( "mix->va_format = %d\n", + mix->va_format); + LOG_I( "mix->va_profile = %d\n", + mix->va_profile); + LOG_I( "mix->va_rcmode = %d\n\n", + mix->va_rcmode); + LOG_I( "mix->CIR_frame_cnt = %d\n\n", + mix->CIR_frame_cnt); + LOG_I( "mix->max_slice_size = %d\n\n", + mix->max_slice_size); + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_encode_default (MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix) { + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix) { + + //TODO decide whether to put any of the teardown from _finalize() here + + return MIX_RESULT_SUCCESS; +} + +static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( + MixVideoFormatEnc *mix, guint *max_size) { + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + if (mix == NULL ||config_params_enc == NULL) { + LOG_E( + "!mix || config_params_enc == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + + + g_mutex_lock(mix->objectlock); + + mix->new_header_required = FALSE; + + switch (params_type) { + case MIX_ENC_PARAMS_BITRATE: + { + ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(mix->bitrate)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E( + "Failed to mix_videoconfigparamsenc_get_bit_rate\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_INIT_QP: + { + ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_init_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_MIN_QP: + { + ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(mix->min_qp)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_min_qp\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_WINDOW_SIZE: + { + ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, &(mix->window_size)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to MIX_ENC_PARAMS_WINDOW_SIZE\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_TARGET_PERCENTAGE: + { + ret = mix_videoconfigparamsenc_get_target_percentage (config_params_enc, &(mix->target_percentage)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to MIX_ENC_PARAMS_TARGET_PERCENTAGE\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_MTU_SLICE_SIZE: + { + ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, &(mix->max_slice_size)); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_get_max_slice_size\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_mss_required = TRUE; + + } + + case MIX_ENC_PARAMS_SLICE_NUM: + { + /* + * This type of dynamic control will be handled in H.264 override method + */ + } + break; + + case MIX_ENC_PARAMS_RC_MODE: + { + ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, (MixRateControl*)&(mix->va_rcmode)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_rate_control\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + /* + * We only can change the RC mode to re-start encoding session + */ + + } + break; + + case MIX_ENC_PARAMS_RESOLUTION: + { + + ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(mix->picture_width), &(mix->picture_height)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_picture_res\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + } + break; + case MIX_ENC_PARAMS_GOP_SIZE: + { + + ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(mix->intra_period)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_intra_period\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->new_header_required = TRUE; + + } + break; + case MIX_ENC_PARAMS_FRAME_RATE: + { + ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(mix->frame_rate_num), &(mix->frame_rate_denom)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_framerate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_FORCE_KEY_FRAME: + { + mix->new_header_required = TRUE; + + } + break; + + case MIX_ENC_PARAMS_REFRESH_TYPE: + { + ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, &(mix->refresh_type)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + } + break; + + case MIX_ENC_PARAMS_AIR: + { + ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, &(mix->air_params)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + + mix->render_AIR_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_CIR_FRAME_CNT: + { + ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(mix->CIR_frame_cnt)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E( + "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + g_mutex_unlock(mix->objectlock); + return MIX_RESULT_FAIL; + } + } + break; + + default: + break; + } + + g_mutex_unlock(mix->objectlock); + + return MIX_RESULT_SUCCESS; +} + +/* mixvideoformatenc class methods implementation */ + +MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + + LOG_V( "Begin\n"); + + if (klass->getcaps) { + return klass->getcaps(mix, msg); + } + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + + /*frame_mgr and input_buf_pool is reserved for future use*/ + if (klass->initialize) { + return klass->initialize(mix, config_params_enc, frame_mgr, + input_buf_pool, surface_pool, va_display); + } + + return MIX_RESULT_FAIL; + +} + +MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->encode) { + return klass->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->flush) { + return klass->flush(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->eos) { + return klass->eos(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) { + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->deinitialize) { + return klass->deinitialize(mix); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint * max_size) { + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->encode) { + return klass->getmaxencodedbufsize(mix, max_size); + } + + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { + + MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + if (klass->set_dynamic_config) { + return klass->set_dynamic_config(mix, config_params_enc, params_type); + } + + return MIX_RESULT_FAIL; +} diff --git a/mix_video/src/mixvideoformatenc_h263.c b/mix_video/src/mixvideoformatenc_h263.c deleted file mode 100644 index 809332e..0000000 --- a/mix_video/src/mixvideoformatenc_h263.c +++ /dev/null @@ -1,1867 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_h263.h" -#include "mixvideoconfigparamsenc_h263.h" -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_h263_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_H263, mix_videoformatenc_h263, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_h263_init(MixVideoFormatEnc_H263 * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - /* member initialization */ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; -#ifdef ANDROID - self->last_mix_buffer = NULL; -#endif - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - self->coded_buf_index = 0; - - parent->initialized = FALSE; - -} - -static void mix_videoformatenc_h263_class_init( - MixVideoFormatEnc_H263Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_h263_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_h263_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_h263_initialize; - video_formatenc_class->encode = mix_videofmtenc_h263_encode; - video_formatenc_class->flush = mix_videofmtenc_h263_flush; - video_formatenc_class->eos = mix_videofmtenc_h263_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_h263_deinitialize; - video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h263_get_max_encoded_buf_size; -} - -MixVideoFormatEnc_H263 * -mix_videoformatenc_h263_new(void) { - MixVideoFormatEnc_H263 *ret = - g_object_new(MIX_TYPE_VIDEOFORMATENC_H263, NULL); - - return ret; -} - -void mix_videoformatenc_h263_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_H263 *mix = MIX_VIDEOFORMATENC_H263(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoFormatEnc_H263 * -mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix) { - return (MixVideoFormatEnc_H263 *) g_object_ref(G_OBJECT(mix)); -} - -/*H263 vmethods implementation */ -MIX_RESULT mix_videofmtenc_h263_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_h263_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h263_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncH263 * config_params_enc_h263; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - - - /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "begin\n"); - - /* Chainup parent method. */ - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - if (!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (config_params_enc)) { - config_params_enc_h263 = - MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h263_initialize: no h263 config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - - LOG_V( - "Start to get properities from H263 params\n"); - - /* get properties from H263 params Object, which is special to H263 format*/ - - ret = mix_videoconfigparamsenc_h263_get_slice_num (config_params_enc_h263, - &self->slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h263_get_slice_num\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_h263_get_dlk (config_params_enc_h263, - &(self->disable_deblocking_filter_idc)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h263_get_dlk\n"); - goto cleanup; - } - - - LOG_V( - "======H263 Encode Object properities======:\n"); - - LOG_I( "self->slice_num = %d\n", - self->slice_num); - LOG_I( "self->disabled_deblocking_filter_idc = %d\n\n", - self->disable_deblocking_filter_idc); - - LOG_V( - "Get properities from params done\n"); - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); - -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode - - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - - LOG_E("Error init failure\n"); - - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - guint max_size = 0; - ret = mix_videofmtenc_h263_get_max_encoded_buf_size (parent, &max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_h263_get_max_encoded_buf_size\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf[0]); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - -cleanup: - - if(ret == MIX_RESULT_SUCCESS) { - parent->initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if(va_profiles) - g_free(va_profiles); - if(va_entrypoints) - g_free(va_entrypoints); - if(surfaces) - g_free(surfaces); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_h263_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ - - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: also we could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_h263_process_encode\n"); - - ret = mix_videofmtenc_h263_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h263_process_encode\n"); - goto cleanup; - } - -cleanup: - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - - if(!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); - - g_mutex_lock(mix->objectlock); - - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } -#ifdef ANDROID - if(self->last_mix_buffer) { - mix_buffer_unref(self->last_mix_buffer); - self->last_mix_buffer = NULL; - } -#endif - /*reset the properities*/ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h263_eos(MixVideoFormatEnc *mix) { - - LOG_V( "\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (parent_class->eos) { - return parent_class->eos(mix); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h263_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if(!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - if(parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } - - if(ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); - - LOG_V( "Release frames\n"); - - g_mutex_lock(parent->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; - } - - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -cleanup: - - parent->initialized = TRUE; - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_h263_send_seq_params (MixVideoFormatEnc_H263 *mix) -{ - - VAStatus va_status; - VAEncSequenceParameterBufferH263 h263_seq_param; - VABufferID seq_para_buf_id; - - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix = NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - if (!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set up the sequence params for HW*/ - h263_seq_param.bits_per_second= parent->bitrate; - h263_seq_param.frame_rate = 30; //hard-coded, driver need; - //(unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - h263_seq_param.initial_qp = parent->initial_qp; - h263_seq_param.min_qp = parent->min_qp; - h263_seq_param.intra_period = parent->intra_period; - - //h263_seq_param.fixed_vop_rate = 30; - - LOG_V( - "===h263 sequence params===\n"); - - LOG_I( "bitrate = %d\n", - h263_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - h263_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - h263_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - h263_seq_param.min_qp); - LOG_I( "intra_period = %d\n\n", - h263_seq_param.intra_period); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(h263_seq_param), - 1, &h263_seq_param, - &seq_para_buf_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &seq_para_buf_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videofmtenc_h263_send_picture_parameter (MixVideoFormatEnc_H263 *mix) -{ - VAStatus va_status; - VAEncPictureParameterBufferH263 h263_pic_param; - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix = NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - -#if 0 //not needed currently - MixVideoConfigParamsEncH263 * params_h263 - = MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); -#endif - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set picture params for HW*/ - h263_pic_param.reference_picture = mix->ref_frame->frame_id; - h263_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - h263_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - h263_pic_param.picture_width = parent->picture_width; - h263_pic_param.picture_height = parent->picture_height; - h263_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; - - - - LOG_V( - "======h263 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h263_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - h263_pic_param.reconstructed_picture); - LOG_I( "coded_buf = 0x%08x\n", - h263_pic_param.coded_buf); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "picture_width = %d\n", - h263_pic_param.picture_width); - LOG_I( "picture_height = %d\n", - h263_pic_param.picture_height); - LOG_I( "picture_type = %d\n\n", - h263_pic_param.picture_type); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(h263_pic_param), - 1,&h263_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT mix_videofmtenc_h263_send_slice_parameter (MixVideoFormatEnc_H263 *mix) -{ - VAStatus va_status; - - guint slice_num; - guint slice_height; - guint slice_index; - guint slice_height_in_mb; - - if (mix == NULL) { - LOG_E("mix = NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V("Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - //slice_num = mix->slice_num; - slice_num = 1; // one slice per picture; - slice_height = parent->picture_height / slice_num; - - slice_height += 15; - slice_height &= (~15); - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - slice_num, NULL, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - VAEncSliceParameterBuffer *slice_param, *current_slice; - - va_status = vaMapBuffer(parent->va_display, - mix->slice_param_buf, - (void **)&slice_param); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - current_slice = slice_param; - - for (slice_index = 0; slice_index < slice_num; slice_index++) { - current_slice = slice_param + slice_index; - slice_height_in_mb = - min (slice_height, parent->picture_height - - slice_index * slice_height) / 16; - - // starting MB row number for this slice - current_slice->start_row_number = slice_index * slice_height / 16; - // slice height measured in MB - current_slice->slice_height = slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = mix->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - LOG_V("======h263 slice params======\n"); - - LOG_I("slice_index = %d\n", - (gint) slice_index); - LOG_I("start_row_number = %d\n", - (gint) current_slice->start_row_number); - LOG_I("slice_height_in_mb = %d\n", - (gint) current_slice->slice_height); - LOG_I("slice.is_intra = %d\n", - (gint) current_slice->slice_flags.bits.is_intra); - LOG_I("disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - - } - - va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V("end\n"); - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - gulong surface = 0; - guint16 width, height; - - MixVideoFrame * tmp_frame; - guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; - } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - -#ifndef ANDROID -#define USE_SRC_FMT_YUV420 -#else -#define USE_SRC_FMT_NV21 -#endif - -#ifdef USE_SRC_FMT_YUV420 - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - -#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - int offset_uv = width * height; - guint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - -#ifdef USE_SRC_FMT_NV12 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif -#endif //USE_SRC_FMT_YUV420 - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "Map source data to surface done\n"); - - } - - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto cleanup; - } - } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - - - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videofmtenc_h263_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - - mix->last_frame = mix->cur_frame; - - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - - - } - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - //return MIX_RESULT_FAIL; - } - - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - guint total_size = 0; - guint size = 0; - - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; - - while (1) { - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = coded_seg->next; - num_seg ++; - } - - -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; -#endif - - iovout->data_size = total_size; - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - - iovout->data = g_malloc (iovout->data_size); - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - //memcpy (iovout->data, buf + 16, iovout->data_size); - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - while (1) { - - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = coded_seg->next; - } - - iovout->buffer_size = iovout->data_size; - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "get encoded data done\n"); - - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videofmtenc_h263_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - } - - VASurfaceStatus status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - mix->pic_skipped = status & VASurfaceSkipped; - - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_framemanager_enqueue\n"); - goto cleanup; - } - } - - /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - } - - -#if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - - mix_videoframe_unref (mix->cur_frame); -#endif - - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; - -#ifdef ANDROID - if(mix->last_mix_buffer) { - LOG_V("calls to mix_buffer_unref \n"); - LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); - mix_buffer_unref(mix->last_mix_buffer); - } - - LOG_V("ref the current bufin\n"); - mix->last_mix_buffer = mix_buffer_ref(bufin); -#endif - - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - -cleanup: - - if(ret != MIX_RESULT_SUCCESS) { - if(iovout->data) { - g_free(iovout->data); - iovout->data = NULL; - } - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h263_get_max_encoded_buf_size ( - MixVideoFormatEnc *mix, guint * max_size) -{ - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) - { - LOG_E( - "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); - - if (MIX_IS_VIDEOFORMATENC_H263(self)) { - - if (self->coded_buf_size > 0) { - *max_size = self->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 830) / (16 * 16); - // set to value according to QP - } - else { - self->coded_buf_size = parent->bitrate/ 4; - } - - self->coded_buf_size = - max (self->coded_buf_size , - (parent->picture_width* parent->picture_height * 830) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - max(self->coded_buf_size, - (parent->picture_width * parent->picture_height * 1.5 * 8)); - self->coded_buf_size = (self->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not H263 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - *max_size = self->coded_buf_size; - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_H263(mix)) - { - if (mix->encoded_frames == 0) { - ret = mix_videofmtenc_h263_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h263_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - } - - ret = mix_videofmtenc_h263_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h263_send_picture_parameter\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_h263_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h263_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - } - else - { - LOG_E( - "not H263 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoformatenc_h263.cpp b/mix_video/src/mixvideoformatenc_h263.cpp new file mode 100644 index 0000000..f7b3626 --- /dev/null +++ b/mix_video/src/mixvideoformatenc_h263.cpp @@ -0,0 +1,1862 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_h263.h" +#include "mixvideoconfigparamsenc_h263.h" +#include + +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_h263_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_H263, mix_videoformatenc_h263, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_h263_init(MixVideoFormatEnc_H263 * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /* member initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; +#ifdef ANDROID + self->last_mix_buffer = NULL; +#endif + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + self->coded_buf_index = 0; + + parent->initialized = FALSE; + +} + +static void mix_videoformatenc_h263_class_init( + MixVideoFormatEnc_H263Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_h263_finalize; + + /* setup vmethods with base implementation */ + video_formatenc_class->getcaps = mix_videofmtenc_h263_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_h263_initialize; + video_formatenc_class->encode = mix_videofmtenc_h263_encode; + video_formatenc_class->flush = mix_videofmtenc_h263_flush; + video_formatenc_class->eos = mix_videofmtenc_h263_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_h263_deinitialize; + video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h263_get_max_encoded_buf_size; +} + +MixVideoFormatEnc_H263 * +mix_videoformatenc_h263_new(void) { + MixVideoFormatEnc_H263 *ret = reinterpret_cast( + g_object_new(MIX_TYPE_VIDEOFORMATENC_H263, NULL)); + + return ret; +} + +void mix_videoformatenc_h263_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_H263 *mix = MIX_VIDEOFORMATENC_H263(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_H263 * +mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix) { + return (MixVideoFormatEnc_H263 *) g_object_ref(G_OBJECT(mix)); +} + +/*H263 vmethods implementation */ +MIX_RESULT mix_videofmtenc_h263_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + LOG_V( "mix_videofmtenc_h263_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + if(ret == MIX_RESULT_SUCCESS) {\ + parent->initialized = TRUE;\ + }\ + /*free profiles and entrypoints*/\ + if(va_profiles)\ + g_free(va_profiles);\ + if(va_entrypoints)\ + g_free(va_entrypoints);\ + if(surfaces)\ + g_free(surfaces);\ + g_mutex_unlock(parent->objectlock);\ + LOG_V( "end\n"); \ + return ret;} + +MIX_RESULT mix_videofmtenc_h263_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncH263 * config_params_enc_h263; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces = NULL; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + /* Chainup parent method. */ + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + if (!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (config_params_enc)) { + config_params_enc_h263 = + MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_h263_initialize: no h263 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from H263 params\n"); + + /* get properties from H263 params Object, which is special to H263 format*/ + + ret = mix_videoconfigparamsenc_h263_get_slice_num (config_params_enc_h263, + &self->slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h263_get_slice_num\n"); + CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_h263_get_dlk (config_params_enc_h263, + &(self->disable_deblocking_filter_idc)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h263_get_dlk\n"); + CLEAN_UP; + } + + + LOG_V( + "======H263 Encode Object properities======:\n"); + + LOG_I( "self->slice_num = %d\n", + self->slice_num); + LOG_I( "self->disabled_deblocking_filter_idc = %d\n\n", + self->disable_deblocking_filter_idc); + + LOG_V( + "Get properities from params done\n"); + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); + va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = reinterpret_cast( + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + + LOG_E("Error init failure\n"); + + ret = MIX_RESULT_ALREADY_INIT; + CLEAN_UP; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + guint max_size = 0; + ret = mix_videofmtenc_h263_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_h263_get_max_encoded_buf_size\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf[0]); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + + CLEAN_UP; +} +#undef CLEAN_UP + +#define CLEAN_UP {\ + LOG_V( "UnLocking\n"); \ + g_mutex_unlock(parent->objectlock);\ + LOG_V( "end\n"); \ + return ret;} + +MIX_RESULT mix_videofmtenc_h263_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_h263_process_encode\n"); + + ret = mix_videofmtenc_h263_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_process_encode\n"); + CLEAN_UP; + } + + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + if(!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); + + g_mutex_lock(mix->objectlock); + + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } +#ifdef ANDROID + if(self->last_mix_buffer) { + mix_buffer_unref(self->last_mix_buffer); + self->last_mix_buffer = NULL; + } +#endif + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h263_eos(MixVideoFormatEnc *mix) { + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + parent->initialized = TRUE;\ + g_mutex_unlock(parent->objectlock); \ + LOG_V( "end\n"); \ + return ret;} + +MIX_RESULT mix_videofmtenc_h263_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if(!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + if(parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if(ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_h263_send_seq_params (MixVideoFormatEnc_H263 *mix) +{ + + VAStatus va_status; + VAEncSequenceParameterBufferH263 h263_seq_param; + VABufferID seq_para_buf_id; + + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + if (!MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + h263_seq_param.bits_per_second= parent->bitrate; + h263_seq_param.frame_rate = 30; //hard-coded, driver need; + //(unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + h263_seq_param.initial_qp = parent->initial_qp; + h263_seq_param.min_qp = parent->min_qp; + h263_seq_param.intra_period = parent->intra_period; + + //h263_seq_param.fixed_vop_rate = 30; + + LOG_V( + "===h263 sequence params===\n"); + + LOG_I( "bitrate = %d\n", + h263_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h263_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h263_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h263_seq_param.min_qp); + LOG_I( "intra_period = %d\n\n", + h263_seq_param.intra_period); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(h263_seq_param), + 1, &h263_seq_param, + &seq_para_buf_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &seq_para_buf_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videofmtenc_h263_send_picture_parameter (MixVideoFormatEnc_H263 *mix) +{ + VAStatus va_status; + VAEncPictureParameterBufferH263 h263_pic_param; + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + +#if 0 //not needed currently + MixVideoConfigParamsEncH263 * params_h263 + = MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); +#endif + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + h263_pic_param.reference_picture = mix->ref_frame->frame_id; + h263_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + h263_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; + h263_pic_param.picture_width = parent->picture_width; + h263_pic_param.picture_height = parent->picture_height; + h263_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + + + LOG_V( + "======h263 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h263_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h263_pic_param.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", + h263_pic_param.coded_buf); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); + LOG_I( "picture_width = %d\n", + h263_pic_param.picture_width); + LOG_I( "picture_height = %d\n", + h263_pic_param.picture_height); + LOG_I( "picture_type = %d\n\n", + h263_pic_param.picture_type); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(h263_pic_param), + 1,&h263_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT mix_videofmtenc_h263_send_slice_parameter (MixVideoFormatEnc_H263 *mix) +{ + VAStatus va_status; + + guint slice_num; + guint slice_height; + guint slice_index; + guint slice_height_in_mb; + + if (mix == NULL) { + LOG_E("mix = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V("Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + //slice_num = mix->slice_num; + slice_num = 1; // one slice per picture; + slice_height = parent->picture_height / slice_num; + + slice_height += 15; + slice_height &= (~15); + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + slice_num, NULL, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + VAEncSliceParameterBuffer *slice_param, *current_slice; + + va_status = vaMapBuffer(parent->va_display, + mix->slice_param_buf, + (void **)&slice_param); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + current_slice = slice_param; + + for (slice_index = 0; slice_index < slice_num; slice_index++) { + current_slice = slice_param + slice_index; + slice_height_in_mb = + min (slice_height, parent->picture_height + - slice_index * slice_height) / 16; + + // starting MB row number for this slice + current_slice->start_row_number = slice_index * slice_height / 16; + // slice height measured in MB + current_slice->slice_height = slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = mix->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + LOG_V("======h263 slice params======\n"); + + LOG_I("slice_index = %d\n", + (gint) slice_index); + LOG_I("start_row_number = %d\n", + (gint) current_slice->start_row_number); + LOG_I("slice_height_in_mb = %d\n", + (gint) current_slice->slice_height); + LOG_I("slice.is_intra = %d\n", + (gint) current_slice->slice_flags.bits.is_intra); + LOG_I("disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + } + + va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V("end\n"); + + return MIX_RESULT_SUCCESS; +} + + +#define CLEAN_UP {\ + if(ret != MIX_RESULT_SUCCESS) {\ + if(iovout->data) {\ + g_free(iovout->data);\ + iovout->data = NULL;\ + }\ + }\ + LOG_V( "end\n"); \ + return MIX_RESULT_SUCCESS;} + +MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + MixVideoFrame * tmp_frame; + guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (! MIX_IS_VIDEOFORMATENC_H263(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + CLEAN_UP; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + +#ifndef ANDROID +#define USE_SRC_FMT_YUV420 +#else +#define USE_SRC_FMT_NV21 +#endif + +#ifdef USE_SRC_FMT_YUV420 + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + +#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + int offset_uv = width * height; + guint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + +#ifdef USE_SRC_FMT_NV12 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v + } + dst_uv += image->pitches[1]; + inbuf_uv += width_uv; + } +#endif +#endif //USE_SRC_FMT_YUV420 + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + CLEAN_UP; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + CLEAN_UP; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + CLEAN_UP; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + ret = mix_videofmtenc_h263_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + CLEAN_UP; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + + } + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + //return MIX_RESULT_FAIL; + } + + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + guint total_size = 0; + guint size = 0; + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg = reinterpret_cast(coded_seg->next); + num_seg ++; + } + + +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; +#endif + + iovout->data_size = total_size; + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + + iovout->data = (guchar*)g_malloc (iovout->data_size); + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + } + + //memcpy (iovout->data, buf + 16, iovout->data_size); + + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; + + while (1) { + + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg = reinterpret_cast(coded_seg->next); + } + + iovout->buffer_size = iovout->data_size; + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "get encoded data done\n"); + + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; + } + } + + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + ret = mix_videofmtenc_h263_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + CLEAN_UP; + } + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + } + + VASurfaceStatus status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + mix->pic_skipped = status & VASurfaceSkipped; + + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); + + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + CLEAN_UP; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_framemanager_enqueue\n"); + CLEAN_UP; + } + } + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + } + + +#if 0 + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; + + mix_videoframe_unref (mix->cur_frame); +#endif + + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; + +#ifdef ANDROID + if(mix->last_mix_buffer) { + LOG_V("calls to mix_buffer_unref \n"); + LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); + mix_buffer_unref(mix->last_mix_buffer); + } + + LOG_V("ref the current bufin\n"); + mix->last_mix_buffer = mix_buffer_ref(bufin); +#endif + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; + } + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_h263_get_max_encoded_buf_size ( + MixVideoFormatEnc *mix, guint * max_size) +{ + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + { + LOG_E( + "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + parent = MIX_VIDEOFORMATENC(mix); + MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); + + if (MIX_IS_VIDEOFORMATENC_H263(self)) { + + if (self->coded_buf_size > 0) { + *max_size = self->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (self->va_rcmode == VA_RC_NONE) { + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 830) / (16 * 16); + // set to value according to QP + } + else { + self->coded_buf_size = parent->bitrate/ 4; + } + + self->coded_buf_size = + max (self->coded_buf_size , + (parent->picture_width* parent->picture_height * 830) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + self->coded_buf_size = + max(self->coded_buf_size, + (parent->picture_width * parent->picture_height * 1.5 * 8)); + self->coded_buf_size = (self->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not H263 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + *max_size = self->coded_buf_size; + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_H263(mix)) + { + if (mix->encoded_frames == 0) { + ret = mix_videofmtenc_h263_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_h263_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_h263_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + } + else + { + LOG_E( + "not H263 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c deleted file mode 100644 index ac45be8..0000000 --- a/mix_video/src/mixvideoformatenc_h264.c +++ /dev/null @@ -1,2784 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_h264.h" -#include "mixvideoconfigparamsenc_h264.h" -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_h264_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_H264, mix_videoformatenc_h264, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - /* member initialization */ - self->encoded_frames = 0; - self->frame_num = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; - self->lookup_frame = NULL; -#ifdef ANDROID - self->last_mix_buffer = NULL; -#endif - - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - - self->coded_buf_index = 0; - parent->initialized = FALSE; -} - -static void mix_videoformatenc_h264_class_init( - MixVideoFormatEnc_H264Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_h264_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_h264_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_h264_initialize; - video_formatenc_class->encode = mix_videofmtenc_h264_encode; - video_formatenc_class->flush = mix_videofmtenc_h264_flush; - video_formatenc_class->eos = mix_videofmtenc_h264_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_h264_deinitialize; - video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h264_get_max_encoded_buf_size; - video_formatenc_class->set_dynamic_config = mix_videofmtenc_h264_set_dynamic_enc_config; -} - -MixVideoFormatEnc_H264 * -mix_videoformatenc_h264_new(void) { - MixVideoFormatEnc_H264 *ret = - g_object_new(MIX_TYPE_VIDEOFORMATENC_H264, NULL); - - return ret; -} - -void mix_videoformatenc_h264_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_H264 *mix = MIX_VIDEOFORMATENC_H264(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoFormatEnc_H264 * -mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix) { - return (MixVideoFormatEnc_H264 *) g_object_ref(G_OBJECT(mix)); -} - -/*H.264 vmethods implementation */ -MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_h264_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncH264 * config_params_enc_h264; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - - - /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "begin\n"); - - - - /* Chainup parent method. */ - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { - config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h264_initialize: no h264 config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - - LOG_V( - "Start to get properities from h.264 params\n"); - - /* get properties from H264 params Object, which is special to H264 format*/ - ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, - &self->basic_unit_size); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); - goto cleanup; - } - - - ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, - &self->disable_deblocking_filter_idc); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); - goto cleanup; - } - - - ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, - &self->slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, - &self->I_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, - &self->P_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, - &self->delimiter_type); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, - &self->idr_interval); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); - goto cleanup; - } - - LOG_V( - "======H264 Encode Object properities======:\n"); - - LOG_I( "self->basic_unit_size = %d\n", - self->basic_unit_size); - LOG_I( "self->disable_deblocking_filter_idc = %d\n", - self->disable_deblocking_filter_idc); - LOG_I( "self->slice_num = %d\n", - self->slice_num); - LOG_I( "self->I_slice_num = %d\n", - self->I_slice_num); - LOG_I( "self->P_slice_num = %d\n", - self->P_slice_num); - LOG_I ("self->delimiter_type = %d\n", - self->delimiter_type); - LOG_I ("self->idr_interval = %d\n", - self->idr_interval); - - LOG_V( - "Get properities from params done\n"); - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); - - -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if (parent->va_rcmode == VA_RC_VCM) { - - /* - * Following three features are only enabled in VCM mode - */ - parent->render_mss_required = TRUE; - parent->render_AIR_required = TRUE; - parent->render_bitrate_required = TRUE; - self->slice_num = (parent->picture_height + 15) / 16; //if we are in VCM, we will set slice num to max value - } - - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode - - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - LOG_E( "Error init failure\n"); - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - 0, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - guint max_size = 0; - ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); - goto cleanup; - - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[0])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - -cleanup: - - - if (ret == MIX_RESULT_SUCCESS) { - parent->initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if (va_profiles) - g_free(va_profiles); - - if (va_entrypoints) - g_free (va_entrypoints); - - if (surfaces) - g_free (surfaces); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ - - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: also we could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_h264_process_encode\n"); - - ret = mix_videofmtenc_h264_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_process_encode\n"); - goto cleanup; - } - -cleanup: - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - g_mutex_lock(mix->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - -#ifdef ANDROID - if(self->last_mix_buffer) { - mix_buffer_unref(self->last_mix_buffer); - self->last_mix_buffer = NULL; - } -#endif - /*reset the properities*/ - self->encoded_frames = 0; - self->frame_num = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) { - - LOG_V( "\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (parent_class->eos) { - return parent_class->eos(mix); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - if (parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - LOG_V( "Release frames\n"); - - g_mutex_lock(parent->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - if (self->lookup_frame != NULL) - { - mix_videoframe_unref (self->lookup_frame); - self->lookup_frame = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; - } - - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -cleanup: - parent->initialized = FALSE; - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) -{ - - VAStatus va_status; - VAEncSequenceParameterBufferH264 h264_seq_param; - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set up the sequence params for HW*/ - h264_seq_param.level_idc = 30; //TODO, hard code now - h264_seq_param.intra_period = parent->intra_period; - h264_seq_param.intra_idr_period = mix->idr_interval; - h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; - h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; - h264_seq_param.bits_per_second = parent->bitrate; - h264_seq_param.frame_rate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - h264_seq_param.initial_qp = parent->initial_qp; - h264_seq_param.min_qp = parent->min_qp; - h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage - h264_seq_param.intra_period = parent->intra_period; - //h264_seq_param.vui_flag = 248; - //h264_seq_param.seq_parameter_set_id = 176; - - LOG_V( - "===h264 sequence params===\n"); - - LOG_I( "seq_parameter_set_id = %d\n", - (guint)h264_seq_param.seq_parameter_set_id); - LOG_I( "level_idc = %d\n", - (guint)h264_seq_param.level_idc); - LOG_I( "intra_period = %d\n", - h264_seq_param.intra_period); - LOG_I( "idr_interval = %d\n", - h264_seq_param.intra_idr_period); - LOG_I( "picture_width_in_mbs = %d\n", - h264_seq_param.picture_width_in_mbs); - LOG_I( "picture_height_in_mbs = %d\n", - h264_seq_param.picture_height_in_mbs); - LOG_I( "bitrate = %d\n", - h264_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - h264_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - h264_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - h264_seq_param.min_qp); - LOG_I( "basic_unit_size = %d\n", - h264_seq_param.basic_unit_size); - LOG_I( "vui_flag = %d\n\n", - h264_seq_param.vui_flag); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(h264_seq_param), - 1, &h264_seq_param, - &mix->seq_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->seq_param_buf, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - VAEncPictureParameterBufferH264 h264_pic_param; - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set picture params for HW*/ - h264_pic_param.reference_picture = mix->ref_frame->frame_id; - h264_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - h264_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - h264_pic_param.picture_width = parent->picture_width; - h264_pic_param.picture_height = parent->picture_height; - h264_pic_param.last_picture = 0; - - - LOG_V( - "======h264 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h264_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - h264_pic_param.reconstructed_picture); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "coded_buf = 0x%08x\n", - h264_pic_param.coded_buf); - LOG_I( "picture_width = %d\n", - h264_pic_param.picture_width); - LOG_I( "picture_height = %d\n\n", - h264_pic_param.picture_height); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(h264_pic_param), - 1,&h264_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - - guint slice_num; - guint slice_height; - guint slice_index; - guint slice_height_in_mb; - guint max_slice_num; - guint min_slice_num; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - max_slice_num = (parent->picture_height + 15) / 16; - min_slice_num = 1; - - if (mix->is_intra) { - slice_num = mix->I_slice_num; - } - else { - slice_num = mix->P_slice_num; - } - - if (slice_num < min_slice_num) { - LOG_W ("Slice Number is too small"); - slice_num = min_slice_num; - } - - if (slice_num > max_slice_num) { - LOG_W ("Slice Number is too big"); - slice_num = max_slice_num; - } - - slice_height = parent->picture_height / slice_num; - - slice_height += 15; - slice_height &= (~15); - - slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height; - -#if 1 - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - slice_num, NULL, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - VAEncSliceParameterBuffer *slice_param, *current_slice; - - va_status = vaMapBuffer(parent->va_display, - mix->slice_param_buf, - (void **)&slice_param); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - current_slice = slice_param; - - - for (slice_index = 0; slice_index < slice_num; slice_index++) { - current_slice = slice_param + slice_index; - slice_height_in_mb = - min (slice_height, parent->picture_height - - slice_index * slice_height) / 16; - - // starting MB row number for this slice - current_slice->start_row_number = slice_index * slice_height / 16; - // slice height measured in MB - current_slice->slice_height = slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = mix->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - LOG_V( - "======h264 slice params======\n"); - - LOG_I( "slice_index = %d\n", - (gint) slice_index); - LOG_I( "start_row_number = %d\n", - (gint) current_slice->start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (gint) current_slice->slice_height); - LOG_I( "slice.is_intra = %d\n", - (gint) current_slice->slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - - } - - va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } -#endif - -#if 0 - VAEncSliceParameterBuffer slice_param; - slice_index = 0; - slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; - slice_param.slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(slice_param), - slice_num, &slice_param, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } -#endif - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - gulong surface = 0; - guint16 width, height; - - MixVideoFrame * tmp_frame; - guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "frame_num = %d\n", - mix->frame_num); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - if (parent->new_header_required) { - mix->frame_num = 0; - } - - /* determine the picture type*/ - //if ((mix->encoded_frames % parent->intra_period) == 0) { - if ((mix->frame_num % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; - } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - -#ifndef ANDROID -#define USE_SRC_FMT_YUV420 -#else -#define USE_SRC_FMT_NV21 -#endif - -#ifdef USE_SRC_FMT_YUV420 - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - int offset_uv = width * height; - guint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - -#ifdef USE_SRC_FMT_NV12 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif -#endif //USE_SRC_FMT_YUV420 - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "Map source data to surface done\n"); - - } - - else {//if (!parent->share_buf_mode) - - //MixVideoFrame * frame = mix_videoframe_new(); - if (mix->lookup_frame == NULL) - { - mix->lookup_frame = mix_videoframe_new (); - if (mix->lookup_frame == NULL) - { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, mix->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, mix->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - - } - - - ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, mix->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto cleanup; - - } - } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - - } - - /** - * Start encoding process - **/ - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - - - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videofmtenc_h264_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - LOG_V( "vaEndPicture\n"); - - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->frame_num ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - - mix->last_frame = mix->cur_frame; - - - /* determine the picture type*/ - //if ((mix->encoded_frames % parent->intra_period) == 0) { - if ((mix->frame_num % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - - - } - - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - - //return MIX_RESULT_FAIL; - } - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - guint total_size = 0; - guint size = 0; - guint status = 0; - gboolean slice_size_overflow = FALSE; - - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; - - while (1) { - total_size += coded_seg->size; - - status = coded_seg->status; - - if (!slice_size_overflow) { - - slice_size_overflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; - } - - if (coded_seg->next == NULL) - break; - - coded_seg = coded_seg->next; - num_seg ++; - } - - LOG_I ("segment number = %d\n", num_seg); - -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; - - guint size = iovout->data_size + 100; -#endif - - iovout->data_size = total_size; - size = total_size + 100; - - iovout->buffer_size = size; - - //We will support two buffer mode, one is application allocates the buffer and passes to encode, - //the other is encode allocate memory - - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) { - - while (1) { - - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = coded_seg->next; - } - - //memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte - //size = iovout->data_size; - - } else { - - guint pos = 0; - guint zero_byte_count = 0; - guint prefix_length = 0; - guint8 nal_unit_type = 0; - //guint8 * payload = buf + 16; - guint8 * payload = coded_seg->buf; - - while ((payload[pos++] == 0x00)) { - zero_byte_count ++; - if (pos >= coded_seg->size) //to make sure the buffer to be accessed is valid - break; - } - - nal_unit_type = (guint8)(payload[pos] & 0x1f); - prefix_length = zero_byte_count + 1; - - LOG_I ("nal_unit_type = %d\n", nal_unit_type); - LOG_I ("zero_byte_count = %d\n", zero_byte_count); - - size = iovout->data_size; - - if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1 && num_seg == 1) { - iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; - iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; - iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; - iovout->data[3] = (size - prefix_length) & 0xff; - // use 4 bytes to indicate the NALU length - //memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); - memcpy (iovout->data + 4, coded_seg->buf + prefix_length, size - prefix_length); - LOG_V ("We only have one start code, copy directly\n"); - } - else { - - if (num_seg == 1) { - ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (coded_seg->buf, coded_seg->size, iovout->data, &size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); - goto cleanup; - } - - } else { - - guint8 * tem_buf = NULL; - tem_buf = g_malloc (size); - if (tem_buf == NULL) { - LOG_E( "tem_buf == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - while (1) { - - memcpy (tem_buf + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = coded_seg->next; - } - - ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); - goto cleanup; - } - - g_free (tem_buf); - } - } - } - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "get encoded data done\n"); - - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - if (mix->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videofmtenc_h264_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - } - - VASurfaceStatus va_surface_status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &va_surface_status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - mix->pic_skipped = va_surface_status & VASurfaceSkipped; - - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_framemanager_enqueue\n"); - goto cleanup; - } - } - - - /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - } - -#if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - - mix_videoframe_unref (mix->cur_frame); -#endif - - mix->encoded_frames ++; - mix->frame_num ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; - -#ifdef ANDROID - if(mix->last_mix_buffer) { - LOG_V("calls to mix_buffer_unref \n"); - LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); - mix_buffer_unref(mix->last_mix_buffer); - } - - LOG_V("ref the current bufin\n"); - mix->last_mix_buffer = mix_buffer_ref(bufin); -#endif - - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - if (iovout->data) { - g_free (iovout->data); - iovout->data = NULL; - } - } - - LOG_V( "end\n"); - - /* - * The error level of MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW - * is lower than other errors, so if any other errors happen, we won't - * return slice size overflow - */ - if (ret == MIX_RESULT_SUCCESS && slice_size_overflow) - ret = MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW; - - return ret; -} - -MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size ( - MixVideoFormatEnc *mix, guint *max_size) -{ - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL || max_size == NULL) - { - LOG_E( - "mix == NULL || max_size == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_H264(self)) { - - if (self->coded_buf_size > 0) { - *max_size = self->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 400) / (16 * 16); - // set to value according to QP - } - else { - self->coded_buf_size = parent->bitrate/ 4; - } - - self->coded_buf_size = - max (self->coded_buf_size , - (parent->picture_width* parent->picture_height * 400) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - min(self->coded_buf_size, - (parent->picture_width * parent->picture_height * 1.5 * 8)); - self->coded_buf_size = (self->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - *max_size = self->coded_buf_size; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( - guint8 * bufin, guint bufin_len, guint8* bufout, guint * bufout_len) -{ - - guint pos = 0; - guint last_pos = 0; - - guint zero_byte_count = 0; - guint nal_size = 0; - guint prefix_length = 0; - guint size_copied = 0; - guint leading_zero_count = 0; - - if (bufin == NULL || bufout == NULL || bufout_len == NULL) { - - LOG_E( - "bufin == NULL || bufout == NULL || bufout_len = NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (bufin_len <= 0 || *bufout_len <= 0) { - LOG_E( - "bufin_len <= 0 || *bufout_len <= 0\n"); - return MIX_RESULT_FAIL; - } - - LOG_V ("Begin\n"); - - while ((bufin[pos++] == 0x00)) { - zero_byte_count ++; - if (pos >= bufin_len) //to make sure the buffer to be accessed is valid - break; - } - - if (bufin[pos - 1] != 0x01 || zero_byte_count < 2) - { - LOG_E("The stream is not AnnexB format \n"); - return MIX_RESULT_FAIL; ; //not AnnexB, we won't process it - } - - zero_byte_count = 0; - last_pos = pos; - - while (pos < bufin_len) { - - while (bufin[pos++] == 0) { - zero_byte_count ++; - if (pos >= bufin_len) //to make sure the buffer to be accessed is valid - break; - } - - if (bufin[pos - 1] == 0x01 && zero_byte_count >= 2) { - if (zero_byte_count == 2) { - prefix_length = 3; - } - else { - prefix_length = 4; - leading_zero_count = zero_byte_count - 3; - } - - LOG_I("leading_zero_count = %d\n", leading_zero_count); - - nal_size = pos - last_pos - prefix_length - leading_zero_count; - if (nal_size < 0) { - LOG_E ("something wrong in the stream\n"); - return MIX_RESULT_FAIL; //not AnnexB, we won't process it - } - - if (*bufout_len < (size_copied + nal_size + 4)) { - LOG_E ("The length of destination buffer is too small\n"); - return MIX_RESULT_FAIL; - } - - LOG_I ("nal_size = %d\n", nal_size); - - /*We use 4 bytes length prefix*/ - bufout [size_copied] = nal_size >> 24 & 0xff; - bufout [size_copied + 1] = nal_size >> 16 & 0xff; - bufout [size_copied + 2] = nal_size >> 8 & 0xff; - bufout [size_copied + 3] = nal_size & 0xff; - - size_copied += 4; //4 bytes length prefix - memcpy (bufout + size_copied, bufin + last_pos, nal_size); - size_copied += nal_size; - - LOG_I ("size_copied = %d\n", size_copied); - - zero_byte_count = 0; - leading_zero_count = 0; - last_pos = pos; - } - - else if (pos == bufin_len) { - - LOG_V ("Last NALU in this frame\n"); - - nal_size = pos - last_pos; - - if (*bufout_len < (size_copied + nal_size + 4)) { - LOG_E ("The length of destination buffer is too small\n"); - return MIX_RESULT_FAIL; - } - - /*We use 4 bytes length prefix*/ - bufout [size_copied] = nal_size >> 24 & 0xff; - bufout [size_copied + 1] = nal_size >> 16 & 0xff; - bufout [size_copied + 2] = nal_size >> 8 & 0xff; - bufout [size_copied + 3] = nal_size & 0xff; - - size_copied += 4; //4 bytes length prefix - memcpy (bufout + size_copied, bufin + last_pos, nal_size); - size_copied += nal_size; - - LOG_I ("size_copied = %d\n", size_copied); - } - - else { - zero_byte_count = 0; - leading_zero_count = 0; - } - - } - - if (size_copied != *bufout_len) { - *bufout_len = size_copied; - } - - LOG_V ("End\n"); - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - //if (mix->encoded_frames == 0 || parent->new_header_required) { - if (mix->frame_num == 0 || parent->new_header_required) { - ret = mix_videofmtenc_h264_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - - parent->new_header_required = FALSE; //Set to require new header filed to FALSE - } - - if (parent->render_mss_required && parent->max_slice_size != 0) { - ret = mix_videofmtenc_h264_send_max_slice_size(mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_max_slice_size\n"); - return MIX_RESULT_FAIL; - } - - parent->render_mss_required = FALSE; - } - - if (parent->render_bitrate_required) { - ret = mix_videofmtenc_h264_send_dynamic_bitrate(mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_dynamic_bitrate\n"); - return MIX_RESULT_FAIL; - } - - parent->render_bitrate_required = FALSE; - } - - if (parent->render_AIR_required && - (parent->refresh_type == MIX_VIDEO_AIR || parent->refresh_type == MIX_VIDEO_BOTH)) - { - - ret = mix_videofmtenc_h264_send_AIR (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_AIR\n"); - return MIX_RESULT_FAIL; - } - - parent->render_AIR_required = FALSE; - } - - if (parent->render_framerate_required) { - - ret = mix_videofmtenc_h264_send_dynamic_framerate (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_dynamic_framerate\n"); - return MIX_RESULT_FAIL; - } - - parent->render_framerate_required = FALSE; - } - - ret = mix_videofmtenc_h264_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_h264_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "End\n"); - - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { - - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncH264 * config_params_enc_h264; - - LOG_V( "Begin\n"); - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { - config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h264_initialize: no h264 config params found\n"); - return MIX_RESULT_FAIL; - } - - /* - * For case params_type == MIX_ENC_PARAMS_SLICE_NUM - * we don't need to chain up to parent method, as we will handle - * dynamic slice height change inside this method, and other dynamic - * controls will be handled in parent method. - */ - if (params_type == MIX_ENC_PARAMS_SLICE_NUM) { - - g_mutex_lock(parent->objectlock); - - ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, - &self->slice_num); - - self->I_slice_num = self->P_slice_num = self->slice_num; - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - - g_mutex_unlock(parent->objectlock); - - return ret; - } - - g_mutex_unlock(parent->objectlock); - - } - else if (params_type == MIX_ENC_PARAMS_I_SLICE_NUM) { - - g_mutex_lock(parent->objectlock); - - ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, - &self->I_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); - - g_mutex_unlock(parent->objectlock); - - return ret; - } - - g_mutex_unlock(parent->objectlock); - - } - else if (params_type == MIX_ENC_PARAMS_P_SLICE_NUM) { - - g_mutex_lock(parent->objectlock); - - ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, - &self->P_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); - - g_mutex_unlock(parent->objectlock); - - return ret; - } - - g_mutex_unlock(parent->objectlock); - - } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { - - g_mutex_lock(parent->objectlock); - - ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, - &self->idr_interval); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); - - g_mutex_unlock(parent->objectlock); - - return ret; - } - - parent->new_header_required = TRUE; - - g_mutex_unlock(parent->objectlock); - - } else{ - - /* Chainup parent method. */ - if (parent_class->set_dynamic_config) { - ret = parent_class->set_dynamic_config(mix, config_params_enc, - params_type); - } - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V( - "chainup parent method (set_dynamic_config) failed \n"); - return ret; - } - } - - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_bitrate\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterRateControl * bitrate_control_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl; - bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data; - - bitrate_control_param->bits_per_second = parent->bitrate; - bitrate_control_param->initial_qp = parent->initial_qp; - bitrate_control_param->min_qp = parent->min_qp; - bitrate_control_param->target_percentage = parent->target_percentage; - bitrate_control_param->window_size = parent->window_size; - - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_max_slice_size\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterMaxSliceSize * max_slice_size_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeMaxSliceSize; - max_slice_size_param = (VAEncMiscParameterMaxSliceSize *)misc_enc_param_buf->data; - - max_slice_size_param->max_slice_size = parent->max_slice_size; - - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - LOG_I( "max slice size = %d\n", - max_slice_size_param->max_slice_size); - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_AIR\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterAIR * air_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterAIR), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeAIR; - air_param = (VAEncMiscParameterAIR *)misc_enc_param_buf->data; - - air_param->air_auto = parent->air_params.air_auto; - air_param->air_num_mbs = parent->air_params.air_MBs; - air_param->air_threshold = parent->air_params.air_threshold; - - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - - LOG_I( "air_threshold = %d\n", - air_param->air_threshold); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_framerate\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterFrameRate * framerate_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; - framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; - framerate_param->framerate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - - LOG_I( "frame rate = %d\n", - framerate_param->framerate); - - return MIX_RESULT_SUCCESS; - -} - diff --git a/mix_video/src/mixvideoformatenc_h264.cpp b/mix_video/src/mixvideoformatenc_h264.cpp new file mode 100644 index 0000000..cb354ae --- /dev/null +++ b/mix_video/src/mixvideoformatenc_h264.cpp @@ -0,0 +1,2775 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_h264.h" +#include "mixvideoconfigparamsenc_h264.h" +#include + +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_h264_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_H264, mix_videoformatenc_h264, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /* member initialization */ + self->encoded_frames = 0; + self->frame_num = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; + self->lookup_frame = NULL; +#ifdef ANDROID + self->last_mix_buffer = NULL; +#endif + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + + self->coded_buf_index = 0; + parent->initialized = FALSE; +} + +static void mix_videoformatenc_h264_class_init( + MixVideoFormatEnc_H264Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_h264_finalize; + + /* setup vmethods with base implementation */ + video_formatenc_class->getcaps = mix_videofmtenc_h264_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_h264_initialize; + video_formatenc_class->encode = mix_videofmtenc_h264_encode; + video_formatenc_class->flush = mix_videofmtenc_h264_flush; + video_formatenc_class->eos = mix_videofmtenc_h264_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_h264_deinitialize; + video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h264_get_max_encoded_buf_size; + video_formatenc_class->set_dynamic_config = mix_videofmtenc_h264_set_dynamic_enc_config; +} + +MixVideoFormatEnc_H264 * +mix_videoformatenc_h264_new(void) { + MixVideoFormatEnc_H264 *ret = reinterpret_cast( + g_object_new(MIX_TYPE_VIDEOFORMATENC_H264, NULL)); + + return ret; +} + +void mix_videoformatenc_h264_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_H264 *mix = MIX_VIDEOFORMATENC_H264(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_H264 * +mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix) { + return (MixVideoFormatEnc_H264 *) g_object_ref(G_OBJECT(mix)); +} + +/*H.264 vmethods implementation */ +MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + LOG_V( "mix_videofmtenc_h264_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + if (ret == MIX_RESULT_SUCCESS) {\ + parent->initialized = TRUE;\ + }\ + if (va_profiles)\ + g_free(va_profiles);\ + if (va_entrypoints)\ + g_free (va_entrypoints);\ + if (surfaces)\ + g_free (surfaces);\ + g_mutex_unlock(parent->objectlock);\ + LOG_V( "end\n");\ + return ret;} + +MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncH264 * config_params_enc_h264; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces = NULL; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + + + /* Chainup parent method. */ + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { + config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_h264_initialize: no h264 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from h.264 params\n"); + + /* get properties from H264 params Object, which is special to H264 format*/ + ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, + &self->basic_unit_size); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); + CLEAN_UP; + } + + + ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, + &self->disable_deblocking_filter_idc); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); + CLEAN_UP; + } + + + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, + &self->slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, + &self->I_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); + CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, + &self->P_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); + CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, + &self->delimiter_type); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E ( + "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); + CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, + &self->idr_interval); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E ( + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + CLEAN_UP; + } + + LOG_V( + "======H264 Encode Object properities======:\n"); + + LOG_I( "self->basic_unit_size = %d\n", + self->basic_unit_size); + LOG_I( "self->disable_deblocking_filter_idc = %d\n", + self->disable_deblocking_filter_idc); + LOG_I( "self->slice_num = %d\n", + self->slice_num); + LOG_I( "self->I_slice_num = %d\n", + self->I_slice_num); + LOG_I( "self->P_slice_num = %d\n", + self->P_slice_num); + LOG_I ("self->delimiter_type = %d\n", + self->delimiter_type); + LOG_I ("self->idr_interval = %d\n", + self->idr_interval); + + LOG_V( + "Get properities from params done\n"); + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); + va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + if (parent->va_rcmode == VA_RC_VCM) { + + /* + * Following three features are only enabled in VCM mode + */ + parent->render_mss_required = TRUE; + parent->render_AIR_required = TRUE; + parent->render_bitrate_required = TRUE; + self->slice_num = (parent->picture_height + 15) / 16; //if we are in VCM, we will set slice num to max value + } + + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = reinterpret_cast( + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + LOG_E( "Error init failure\n"); + ret = MIX_RESULT_ALREADY_INIT; + CLEAN_UP; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + 0, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + guint max_size = 0; + ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); + CLEAN_UP; + + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[0])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + CLEAN_UP; +} +#undef CLEAN_UP + +#define CLEAN_UP {\ + LOG_V( "UnLocking\n");\ + g_mutex_unlock(parent->objectlock);\ + LOG_V( "end\n");\ + return ret;} + +MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_h264_process_encode\n"); + + ret = mix_videofmtenc_h264_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_process_encode\n"); + CLEAN_UP; + } + + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + g_mutex_lock(mix->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + +#ifdef ANDROID + if(self->last_mix_buffer) { + mix_buffer_unref(self->last_mix_buffer); + self->last_mix_buffer = NULL; + } +#endif + /*reset the properities*/ + self->encoded_frames = 0; + self->frame_num = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) { + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + parent->initialized = FALSE;\ + g_mutex_unlock(parent->objectlock);\ + LOG_V( "end\n");\ + return ret;} + +MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + if (parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + if (self->lookup_frame != NULL) + { + mix_videoframe_unref (self->lookup_frame); + self->lookup_frame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) +{ + + VAStatus va_status; + VAEncSequenceParameterBufferH264 h264_seq_param; + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + h264_seq_param.level_idc = 30; //TODO, hard code now + h264_seq_param.intra_period = parent->intra_period; + h264_seq_param.intra_idr_period = mix->idr_interval; + h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; + h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; + h264_seq_param.bits_per_second = parent->bitrate; + h264_seq_param.frame_rate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + h264_seq_param.initial_qp = parent->initial_qp; + h264_seq_param.min_qp = parent->min_qp; + h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage + h264_seq_param.intra_period = parent->intra_period; + //h264_seq_param.vui_flag = 248; + //h264_seq_param.seq_parameter_set_id = 176; + + LOG_V( + "===h264 sequence params===\n"); + + LOG_I( "seq_parameter_set_id = %d\n", + (guint)h264_seq_param.seq_parameter_set_id); + LOG_I( "level_idc = %d\n", + (guint)h264_seq_param.level_idc); + LOG_I( "intra_period = %d\n", + h264_seq_param.intra_period); + LOG_I( "idr_interval = %d\n", + h264_seq_param.intra_idr_period); + LOG_I( "picture_width_in_mbs = %d\n", + h264_seq_param.picture_width_in_mbs); + LOG_I( "picture_height_in_mbs = %d\n", + h264_seq_param.picture_height_in_mbs); + LOG_I( "bitrate = %d\n", + h264_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h264_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h264_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h264_seq_param.min_qp); + LOG_I( "basic_unit_size = %d\n", + h264_seq_param.basic_unit_size); + LOG_I( "vui_flag = %d\n\n", + h264_seq_param.vui_flag); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(h264_seq_param), + 1, &h264_seq_param, + &mix->seq_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->seq_param_buf, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + VAEncPictureParameterBufferH264 h264_pic_param; + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + h264_pic_param.reference_picture = mix->ref_frame->frame_id; + h264_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + h264_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; + h264_pic_param.picture_width = parent->picture_width; + h264_pic_param.picture_height = parent->picture_height; + h264_pic_param.last_picture = 0; + + + LOG_V( + "======h264 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h264_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h264_pic_param.reconstructed_picture); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); + LOG_I( "coded_buf = 0x%08x\n", + h264_pic_param.coded_buf); + LOG_I( "picture_width = %d\n", + h264_pic_param.picture_width); + LOG_I( "picture_height = %d\n\n", + h264_pic_param.picture_height); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(h264_pic_param), + 1,&h264_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + guint slice_num; + guint slice_height; + guint slice_index; + guint slice_height_in_mb; + guint max_slice_num; + guint min_slice_num; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + max_slice_num = (parent->picture_height + 15) / 16; + min_slice_num = 1; + + if (mix->is_intra) { + slice_num = mix->I_slice_num; + } + else { + slice_num = mix->P_slice_num; + } + + if (slice_num < min_slice_num) { + LOG_W ("Slice Number is too small"); + slice_num = min_slice_num; + } + + if (slice_num > max_slice_num) { + LOG_W ("Slice Number is too big"); + slice_num = max_slice_num; + } + + slice_height = parent->picture_height / slice_num; + + slice_height += 15; + slice_height &= (~15); + + slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height; + +#if 1 + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + slice_num, NULL, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + VAEncSliceParameterBuffer *slice_param, *current_slice; + + va_status = vaMapBuffer(parent->va_display, + mix->slice_param_buf, + (void **)&slice_param); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + current_slice = slice_param; + + + for (slice_index = 0; slice_index < slice_num; slice_index++) { + current_slice = slice_param + slice_index; + slice_height_in_mb = + min (slice_height, parent->picture_height + - slice_index * slice_height) / 16; + + // starting MB row number for this slice + current_slice->start_row_number = slice_index * slice_height / 16; + // slice height measured in MB + current_slice->slice_height = slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = mix->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + LOG_V( + "======h264 slice params======\n"); + + LOG_I( "slice_index = %d\n", + (gint) slice_index); + LOG_I( "start_row_number = %d\n", + (gint) current_slice->start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (gint) current_slice->slice_height); + LOG_I( "slice.is_intra = %d\n", + (gint) current_slice->slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + } + + va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } +#endif + +#if 0 + VAEncSliceParameterBuffer slice_param; + slice_index = 0; + slice_height_in_mb = slice_height / 16; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(slice_param), + slice_num, &slice_param, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } +#endif + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + if (ret != MIX_RESULT_SUCCESS) {\ + if (iovout->data) {\ + g_free (iovout->data);\ + iovout->data = NULL;\ + }\ + }\ + LOG_V( "end\n");\ + /* + * The error level of MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW + * is lower than other errors, so if any other errors happen, we won't + * return slice size overflow + */\ + if (ret == MIX_RESULT_SUCCESS && slice_size_overflow)\ + ret = MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW;\ + return ret;} + +MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + MixVideoFrame * tmp_frame; + guint8 *buf; + gboolean slice_size_overflow = FALSE; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "frame_num = %d\n", + mix->frame_num); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + if (parent->new_header_required) { + mix->frame_num = 0; + } + + /* determine the picture type*/ + //if ((mix->encoded_frames % parent->intra_period) == 0) { + if ((mix->frame_num % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + CLEAN_UP; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + +#ifndef ANDROID +#define USE_SRC_FMT_YUV420 +#else +#define USE_SRC_FMT_NV21 +#endif + +#ifdef USE_SRC_FMT_YUV420 + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + int offset_uv = width * height; + guint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + +#ifdef USE_SRC_FMT_NV12 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v + } + dst_uv += image->pitches[1]; + inbuf_uv += width_uv; + } +#endif +#endif //USE_SRC_FMT_YUV420 + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + //MixVideoFrame * frame = mix_videoframe_new(); + if (mix->lookup_frame == NULL) + { + mix->lookup_frame = mix_videoframe_new (); + if (mix->lookup_frame == NULL) + { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + } + + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, mix->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get reference surface from pool failed\n"); + CLEAN_UP; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, mix->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + CLEAN_UP; + } + } + + //mix_videoframe_unref (mix->cur_frame); + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + + } + + + ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, mix->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + CLEAN_UP; + + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + + } + + /** + * Start encoding process + **/ + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + ret = mix_videofmtenc_h264_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + CLEAN_UP; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + LOG_V( "vaEndPicture\n"); + + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->frame_num ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + //if ((mix->encoded_frames % parent->intra_period) == 0) { + if ((mix->frame_num % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + + } + + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + + //return MIX_RESULT_FAIL; + } + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + guint total_size = 0; + guint size = 0; + guint status = 0; + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; + + status = coded_seg->status; + + if (!slice_size_overflow) { + + slice_size_overflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; + } + + if (coded_seg->next == NULL) + break; + + coded_seg = reinterpret_cast(coded_seg->next); + num_seg ++; + } + + LOG_I ("segment number = %d\n", num_seg); + +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; + + guint size = iovout->data_size + 100; +#endif + + iovout->data_size = total_size; + size = total_size + 100; + + iovout->buffer_size = size; + + //We will support two buffer mode, one is application allocates the buffer and passes to encode, + //the other is encode allocate memory + + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + iovout->data = reinterpret_cast(g_malloc (size)); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + } + + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; + + if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) { + + while (1) { + + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg = reinterpret_cast(coded_seg->next); + } + + //memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte + //size = iovout->data_size; + + } else { + + guint pos = 0; + guint zero_byte_count = 0; + guint prefix_length = 0; + guint8 nal_unit_type = 0; + //guint8 * payload = buf + 16; + guint8 * payload = reinterpret_cast(coded_seg->buf); + + while ((payload[pos++] == 0x00)) { + zero_byte_count ++; + if (pos >= coded_seg->size) //to make sure the buffer to be accessed is valid + break; + } + + nal_unit_type = (guint8)(payload[pos] & 0x1f); + prefix_length = zero_byte_count + 1; + + LOG_I ("nal_unit_type = %d\n", nal_unit_type); + LOG_I ("zero_byte_count = %d\n", zero_byte_count); + + size = iovout->data_size; + + if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1 && num_seg == 1) { + iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; + iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; + iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; + iovout->data[3] = (size - prefix_length) & 0xff; + // use 4 bytes to indicate the NALU length + //memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); + memcpy (iovout->data + 4, coded_seg->buf + prefix_length, size - prefix_length); + LOG_V ("We only have one start code, copy directly\n"); + } + else { + + if (num_seg == 1) { + ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (reinterpret_cast(coded_seg->buf), coded_seg->size, iovout->data, &size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); + CLEAN_UP; + } + + } else { + + guint8 * tem_buf = NULL; + tem_buf = reinterpret_cast(g_malloc (size)); + if (tem_buf == NULL) { + LOG_E( "tem_buf == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + while (1) { + + memcpy (tem_buf + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg = reinterpret_cast(coded_seg->next); + } + + ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); + CLEAN_UP; + } + + g_free (tem_buf); + } + } + } + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "get encoded data done\n"); + + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + ret = mix_videofmtenc_h264_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + CLEAN_UP; + } + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + } + + VASurfaceStatus va_surface_status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &va_surface_status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + mix->pic_skipped = va_surface_status & VASurfaceSkipped; + + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + CLEAN_UP; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_framemanager_enqueue\n"); + CLEAN_UP; + } + } + + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + } + +#if 0 + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; + + mix_videoframe_unref (mix->cur_frame); +#endif + + mix->encoded_frames ++; + mix->frame_num ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; + +#ifdef ANDROID + if(mix->last_mix_buffer) { + LOG_V("calls to mix_buffer_unref \n"); + LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); + mix_buffer_unref(mix->last_mix_buffer); + } + + LOG_V("ref the current bufin\n"); + mix->last_mix_buffer = mix_buffer_ref(bufin); +#endif + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; + } + + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size ( + MixVideoFormatEnc *mix, guint *max_size) +{ + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL || max_size == NULL) + { + LOG_E( + "mix == NULL || max_size == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + parent = MIX_VIDEOFORMATENC(mix); + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_H264(self)) { + + if (self->coded_buf_size > 0) { + *max_size = self->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (self->va_rcmode == VA_RC_NONE) { + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 400) / (16 * 16); + // set to value according to QP + } + else { + self->coded_buf_size = parent->bitrate/ 4; + } + + self->coded_buf_size = + max (self->coded_buf_size , + (parent->picture_width* parent->picture_height * 400) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + self->coded_buf_size = + min(self->coded_buf_size, + (parent->picture_width * parent->picture_height * 1.5 * 8)); + self->coded_buf_size = (self->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not H264 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + *max_size = self->coded_buf_size; + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( + guint8 * bufin, guint bufin_len, guint8* bufout, guint * bufout_len) +{ + + guint pos = 0; + guint last_pos = 0; + + guint zero_byte_count = 0; + guint nal_size = 0; + guint prefix_length = 0; + guint size_copied = 0; + guint leading_zero_count = 0; + + if (bufin == NULL || bufout == NULL || bufout_len == NULL) { + + LOG_E( + "bufin == NULL || bufout == NULL || bufout_len = NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (bufin_len <= 0 || *bufout_len <= 0) { + LOG_E( + "bufin_len <= 0 || *bufout_len <= 0\n"); + return MIX_RESULT_FAIL; + } + + LOG_V ("Begin\n"); + + while ((bufin[pos++] == 0x00)) { + zero_byte_count ++; + if (pos >= bufin_len) //to make sure the buffer to be accessed is valid + break; + } + + if (bufin[pos - 1] != 0x01 || zero_byte_count < 2) + { + LOG_E("The stream is not AnnexB format \n"); + return MIX_RESULT_FAIL; ; //not AnnexB, we won't process it + } + + zero_byte_count = 0; + last_pos = pos; + + while (pos < bufin_len) { + + while (bufin[pos++] == 0) { + zero_byte_count ++; + if (pos >= bufin_len) //to make sure the buffer to be accessed is valid + break; + } + + if (bufin[pos - 1] == 0x01 && zero_byte_count >= 2) { + if (zero_byte_count == 2) { + prefix_length = 3; + } + else { + prefix_length = 4; + leading_zero_count = zero_byte_count - 3; + } + + LOG_I("leading_zero_count = %d\n", leading_zero_count); + + nal_size = pos - last_pos - prefix_length - leading_zero_count; + if (nal_size < 0) { + LOG_E ("something wrong in the stream\n"); + return MIX_RESULT_FAIL; //not AnnexB, we won't process it + } + + if (*bufout_len < (size_copied + nal_size + 4)) { + LOG_E ("The length of destination buffer is too small\n"); + return MIX_RESULT_FAIL; + } + + LOG_I ("nal_size = %d\n", nal_size); + + /*We use 4 bytes length prefix*/ + bufout [size_copied] = nal_size >> 24 & 0xff; + bufout [size_copied + 1] = nal_size >> 16 & 0xff; + bufout [size_copied + 2] = nal_size >> 8 & 0xff; + bufout [size_copied + 3] = nal_size & 0xff; + + size_copied += 4; //4 bytes length prefix + memcpy (bufout + size_copied, bufin + last_pos, nal_size); + size_copied += nal_size; + + LOG_I ("size_copied = %d\n", size_copied); + + zero_byte_count = 0; + leading_zero_count = 0; + last_pos = pos; + } + + else if (pos == bufin_len) { + + LOG_V ("Last NALU in this frame\n"); + + nal_size = pos - last_pos; + + if (*bufout_len < (size_copied + nal_size + 4)) { + LOG_E ("The length of destination buffer is too small\n"); + return MIX_RESULT_FAIL; + } + + /*We use 4 bytes length prefix*/ + bufout [size_copied] = nal_size >> 24 & 0xff; + bufout [size_copied + 1] = nal_size >> 16 & 0xff; + bufout [size_copied + 2] = nal_size >> 8 & 0xff; + bufout [size_copied + 3] = nal_size & 0xff; + + size_copied += 4; //4 bytes length prefix + memcpy (bufout + size_copied, bufin + last_pos, nal_size); + size_copied += nal_size; + + LOG_I ("size_copied = %d\n", size_copied); + } + + else { + zero_byte_count = 0; + leading_zero_count = 0; + } + + } + + if (size_copied != *bufout_len) { + *bufout_len = size_copied; + } + + LOG_V ("End\n"); + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + //if (mix->encoded_frames == 0 || parent->new_header_required) { + if (mix->frame_num == 0 || parent->new_header_required) { + ret = mix_videofmtenc_h264_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + + parent->new_header_required = FALSE; //Set to require new header filed to FALSE + } + + if (parent->render_mss_required && parent->max_slice_size != 0) { + ret = mix_videofmtenc_h264_send_max_slice_size(mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_max_slice_size\n"); + return MIX_RESULT_FAIL; + } + + parent->render_mss_required = FALSE; + } + + if (parent->render_bitrate_required) { + ret = mix_videofmtenc_h264_send_dynamic_bitrate(mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_dynamic_bitrate\n"); + return MIX_RESULT_FAIL; + } + + parent->render_bitrate_required = FALSE; + } + + if (parent->render_AIR_required && + (parent->refresh_type == MIX_VIDEO_AIR || parent->refresh_type == MIX_VIDEO_BOTH)) + { + + ret = mix_videofmtenc_h264_send_AIR (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_AIR\n"); + return MIX_RESULT_FAIL; + } + + parent->render_AIR_required = FALSE; + } + + if (parent->render_framerate_required) { + + ret = mix_videofmtenc_h264_send_dynamic_framerate (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_dynamic_framerate\n"); + return MIX_RESULT_FAIL; + } + + parent->render_framerate_required = FALSE; + } + + ret = mix_videofmtenc_h264_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_h264_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + + LOG_V( "End\n"); + + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { + + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncH264 * config_params_enc_h264; + + LOG_V( "Begin\n"); + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { + config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_h264_initialize: no h264 config params found\n"); + return MIX_RESULT_FAIL; + } + + /* + * For case params_type == MIX_ENC_PARAMS_SLICE_NUM + * we don't need to chain up to parent method, as we will handle + * dynamic slice height change inside this method, and other dynamic + * controls will be handled in parent method. + */ + if (params_type == MIX_ENC_PARAMS_SLICE_NUM) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, + &self->slice_num); + + self->I_slice_num = self->P_slice_num = self->slice_num; + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + g_mutex_unlock(parent->objectlock); + + } + else if (params_type == MIX_ENC_PARAMS_I_SLICE_NUM) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, + &self->I_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + g_mutex_unlock(parent->objectlock); + + } + else if (params_type == MIX_ENC_PARAMS_P_SLICE_NUM) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, + &self->P_slice_num); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + g_mutex_unlock(parent->objectlock); + + } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { + + g_mutex_lock(parent->objectlock); + + ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, + &self->idr_interval); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + + g_mutex_unlock(parent->objectlock); + + return ret; + } + + parent->new_header_required = TRUE; + + g_mutex_unlock(parent->objectlock); + + } else{ + + /* Chainup parent method. */ + if (parent_class->set_dynamic_config) { + ret = parent_class->set_dynamic_config(mix, config_params_enc, + params_type); + } + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V( + "chainup parent method (set_dynamic_config) failed \n"); + return ret; + } + } + + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_bitrate\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterRateControl * bitrate_control_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl; + bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data; + + bitrate_control_param->bits_per_second = parent->bitrate; + bitrate_control_param->initial_qp = parent->initial_qp; + bitrate_control_param->min_qp = parent->min_qp; + bitrate_control_param->target_percentage = parent->target_percentage; + bitrate_control_param->window_size = parent->window_size; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_max_slice_size\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterMaxSliceSize * max_slice_size_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeMaxSliceSize; + max_slice_size_param = (VAEncMiscParameterMaxSliceSize *)misc_enc_param_buf->data; + + max_slice_size_param->max_slice_size = parent->max_slice_size; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + LOG_I( "max slice size = %d\n", + max_slice_size_param->max_slice_size); + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_AIR\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterAIR * air_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterAIR), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeAIR; + air_param = (VAEncMiscParameterAIR *)misc_enc_param_buf->data; + + air_param->air_auto = parent->air_params.air_auto; + air_param->air_num_mbs = parent->air_params.air_MBs; + air_param->air_threshold = parent->air_params.air_threshold; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( "air_threshold = %d\n", + air_param->air_threshold); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix) +{ + VAStatus va_status; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_H264(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_framerate\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterFrameRate * framerate_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncMiscParameterBufferType, + sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; + framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; + framerate_param->framerate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + + va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( "frame rate = %d\n", + framerate_param->framerate); + + return MIX_RESULT_SUCCESS; + +} + diff --git a/mix_video/src/mixvideoformatenc_mpeg4.c b/mix_video/src/mixvideoformatenc_mpeg4.c deleted file mode 100644 index 18c0b16..0000000 --- a/mix_video/src/mixvideoformatenc_mpeg4.c +++ /dev/null @@ -1,1824 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_mpeg4.h" -#include "mixvideoconfigparamsenc_mpeg4.h" -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_mpeg4_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_MPEG4, mix_videoformatenc_mpeg4, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - /*member initialization */ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; - - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - self->coded_buf_index = 0; - - parent->initialized = FALSE; - -} - -static void mix_videoformatenc_mpeg4_class_init( - MixVideoFormatEnc_MPEG4Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_mpeg4_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_mpeg4_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_mpeg4_initialize; - video_formatenc_class->encode = mix_videofmtenc_mpeg4_encode; - video_formatenc_class->flush = mix_videofmtenc_mpeg4_flush; - video_formatenc_class->eos = mix_videofmtenc_mpeg4_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_mpeg4_deinitialize; - video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_mpeg4_get_max_encoded_buf_size; -} - -MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_new(void) { - MixVideoFormatEnc_MPEG4 *ret = - g_object_new(MIX_TYPE_VIDEOFORMATENC_MPEG4, NULL); - - return ret; -} - -void mix_videoformatenc_mpeg4_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_MPEG4 *mix = MIX_VIDEOFORMATENC_MPEG4(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { - return (MixVideoFormatEnc_MPEG4 *) g_object_ref(G_OBJECT(mix)); -} - -/*MPEG-4:2 vmethods implementation */ -MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_mpeg4_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - - - /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "begin\n"); - - - - /* Chainup parent method. */ - - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) { - config_params_enc_mpeg4 = - MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - - LOG_V( - "Start to get properities from MPEG-4:2 params\n"); - - /* get properties from MPEG4 params Object, which is special to MPEG4 format*/ - - ret = mix_videoconfigparamsenc_mpeg4_get_profile_level (config_params_enc_mpeg4, - &self->profile_and_level_indication); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti (config_params_enc_mpeg4, - &(self->fixed_vop_time_increment)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_dlk (config_params_enc_mpeg4, - &(self->disable_deblocking_filter_idc)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to config_params_enc_mpeg4\n"); - goto cleanup; - } - - - LOG_V( - "======MPEG4 Encode Object properities======:\n"); - - LOG_I( "self->profile_and_level_indication = %d\n", - self->profile_and_level_indication); - LOG_I( "self->fixed_vop_time_increment = %d\n\n", - self->fixed_vop_time_increment); - - LOG_V( - "Get properities from params done\n"); - - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); - - -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode - - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - LOG_E( "Error init failure\n"); - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - guint max_size = 0; - ret = mix_videofmtenc_mpeg4_get_max_encoded_buf_size (parent, &max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); - goto cleanup; - - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf[0]); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - -cleanup: - - if (ret == MIX_RESULT_SUCCESS) { - parent->initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if (va_profiles) - g_free(va_profiles); - - if (va_entrypoints) - g_free (va_entrypoints); - - if (surfaces) - g_free (surfaces); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ - - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: also we could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_mpeg4_process_encode\n"); - - ret = mix_videofmtenc_mpeg4_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_mpeg4_process_encode\n"); - goto cleanup; - } - -cleanup: - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - g_mutex_lock(mix->objectlock); - - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - /*reset the properities*/ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix) { - - LOG_V( "\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (parent_class->eos) { - return parent_class->eos(mix); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - - if (parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - LOG_V( "Release frames\n"); - - g_mutex_lock(parent->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; - } - - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -cleanup: - - parent->initialized = TRUE; - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - -return ret; - -} - -MIX_RESULT mix_videofmtenc_mpeg4_send_seq_params (MixVideoFormatEnc_MPEG4 *mix) -{ - - VAStatus va_status; - VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param; - VABufferID seq_para_buf_id; - - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set up the sequence params for HW*/ - mpeg4_seq_param.profile_and_level_indication = mix->profile_and_level_indication; //TODO, hard code now - mpeg4_seq_param.video_object_layer_width= parent->picture_width; - mpeg4_seq_param.video_object_layer_height= parent->picture_height; - mpeg4_seq_param.vop_time_increment_resolution = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - mpeg4_seq_param.fixed_vop_time_increment= mix->fixed_vop_time_increment; - mpeg4_seq_param.bits_per_second= parent->bitrate; - mpeg4_seq_param.frame_rate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - mpeg4_seq_param.initial_qp = parent->initial_qp; - mpeg4_seq_param.min_qp = parent->min_qp; - mpeg4_seq_param.intra_period = parent->intra_period; - - - //mpeg4_seq_param.fixed_vop_rate = 30; - - - - LOG_V( - "===mpeg4 sequence params===\n"); - - LOG_I( "profile_and_level_indication = %d\n", - (guint)mpeg4_seq_param.profile_and_level_indication); - LOG_I( "intra_period = %d\n", - mpeg4_seq_param.intra_period); - LOG_I( "video_object_layer_width = %d\n", - mpeg4_seq_param.video_object_layer_width); - LOG_I( "video_object_layer_height = %d\n", - mpeg4_seq_param.video_object_layer_height); - LOG_I( "vop_time_increment_resolution = %d\n", - mpeg4_seq_param.vop_time_increment_resolution); - LOG_I( "fixed_vop_rate = %d\n", - mpeg4_seq_param.fixed_vop_rate); - LOG_I( "fixed_vop_time_increment = %d\n", - mpeg4_seq_param.fixed_vop_time_increment); - LOG_I( "bitrate = %d\n", - mpeg4_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - mpeg4_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - mpeg4_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - mpeg4_seq_param.min_qp); - LOG_I( "intra_period = %d\n\n", - mpeg4_seq_param.intra_period); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(mpeg4_seq_param), - 1, &mpeg4_seq_param, - &seq_para_buf_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &seq_para_buf_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; - - -} - -MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 *mix) -{ - VAStatus va_status; - VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - -#if 0 //not needed currently - MixVideoConfigParamsEncMPEG4 * params_mpeg4 - = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); -#endif - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set picture params for HW*/ - mpeg4_pic_param.reference_picture = mix->ref_frame->frame_id; - mpeg4_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - mpeg4_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - mpeg4_pic_param.picture_width = parent->picture_width; - mpeg4_pic_param.picture_height = parent->picture_height; - mpeg4_pic_param.vop_time_increment= mix->encoded_frames; - mpeg4_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; - - - - LOG_V( - "======mpeg4 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - mpeg4_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - mpeg4_pic_param.reconstructed_picture); - LOG_I( "coded_buf = 0x%08x\n", - mpeg4_pic_param.coded_buf); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "picture_width = %d\n", - mpeg4_pic_param.picture_width); - LOG_I( "picture_height = %d\n", - mpeg4_pic_param.picture_height); - LOG_I( "vop_time_increment = %d\n", - mpeg4_pic_param.vop_time_increment); - LOG_I( "picture_type = %d\n\n", - mpeg4_pic_param.picture_type); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(mpeg4_pic_param), - 1,&mpeg4_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT mix_videofmtenc_mpeg4_send_slice_parameter (MixVideoFormatEnc_MPEG4 *mix) -{ - VAStatus va_status; - - guint slice_height; - guint slice_index; - guint slice_height_in_mb; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - slice_height = parent->picture_height; - - slice_height += 15; - slice_height &= (~15); - - VAEncSliceParameterBuffer slice_param; - slice_index = 0; - slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; - slice_param.slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - LOG_V( - "======mpeg4 slice params======\n"); - - LOG_I( "start_row_number = %d\n", - (gint) slice_param.start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (gint) slice_param.slice_height); - LOG_I( "slice.is_intra = %d\n", - (gint) slice_param.slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - 1, &slice_param, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - gulong surface = 0; - guint16 width, height; - - MixVideoFrame * tmp_frame; - guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; - } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "Map source data to surface done\n"); - - } - - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto cleanup; - } - } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - - - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videofmtenc_mpeg4_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - - mix->last_frame = mix->cur_frame; - - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - - - } - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - //return MIX_RESULT_FAIL; - } - - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - guint total_size = 0; - guint size = 0; - - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; - - while (1) { - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = coded_seg->next; - num_seg ++; - } - - -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; -#endif - - iovout->data_size = total_size; - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - - iovout->data = g_malloc (iovout->data_size); - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - //memcpy (iovout->data, buf + 16, iovout->data_size); - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - while (1) { - - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = coded_seg->next; - } - - iovout->buffer_size = iovout->data_size; - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "get encoded data done\n"); - - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - if (mix->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videofmtenc_mpeg4_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - } - - VASurfaceStatus status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - mix->pic_skipped = status & VASurfaceSkipped; - - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_framemanager_enqueue\n"); - goto cleanup; - } - } - - /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - } - -#if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - - mix_videoframe_unref (mix->cur_frame); -#endif - - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; - - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - if (iovout->data) { - g_free (iovout->data); - iovout->data = NULL; - } - } - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size ( - MixVideoFormatEnc *mix, guint * max_size) -{ - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) - { - LOG_E( - "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(self)) { - - if (self->coded_buf_size > 0) { - *max_size = self->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 400) / (16 * 16); - // set to value according to QP - } - else { - self->coded_buf_size = parent->bitrate/ 4; - } - - self->coded_buf_size = - max (self->coded_buf_size , - (parent->picture_width* parent->picture_height * 400) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - max(self->coded_buf_size, - (parent->picture_width * parent->picture_height * 1.5 * 8)); - self->coded_buf_size = (self->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - *max_size = self->coded_buf_size; - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix) -{ - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - if (mix->encoded_frames == 0) { - ret = mix_videofmtenc_mpeg4_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - } - - ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoformatenc_mpeg4.cpp b/mix_video/src/mixvideoformatenc_mpeg4.cpp new file mode 100644 index 0000000..c74ed8f --- /dev/null +++ b/mix_video/src/mixvideoformatenc_mpeg4.cpp @@ -0,0 +1,1813 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_mpeg4.h" +#include "mixvideoconfigparamsenc_mpeg4.h" +#include + +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_mpeg4_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_MPEG4, mix_videoformatenc_mpeg4, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + /*member initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + self->coded_buf_index = 0; + + parent->initialized = FALSE; + +} + +static void mix_videoformatenc_mpeg4_class_init( + MixVideoFormatEnc_MPEG4Class * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_mpeg4_finalize; + + /* setup vmethods with base implementation */ + video_formatenc_class->getcaps = mix_videofmtenc_mpeg4_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_mpeg4_initialize; + video_formatenc_class->encode = mix_videofmtenc_mpeg4_encode; + video_formatenc_class->flush = mix_videofmtenc_mpeg4_flush; + video_formatenc_class->eos = mix_videofmtenc_mpeg4_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_mpeg4_deinitialize; + video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_mpeg4_get_max_encoded_buf_size; +} + +MixVideoFormatEnc_MPEG4 * +mix_videoformatenc_mpeg4_new(void) { + MixVideoFormatEnc_MPEG4 *ret = reinterpret_cast( + g_object_new(MIX_TYPE_VIDEOFORMATENC_MPEG4, NULL)); + + return ret; +} + +void mix_videoformatenc_mpeg4_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_MPEG4 *mix = MIX_VIDEOFORMATENC_MPEG4(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_MPEG4 * +mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { + return (MixVideoFormatEnc_MPEG4 *) g_object_ref(G_OBJECT(mix)); +} + +/*MPEG-4:2 vmethods implementation */ +MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + LOG_V( "mix_videofmtenc_mpeg4_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + if (ret == MIX_RESULT_SUCCESS) {\ + parent->initialized = TRUE; \ + }\ + /*free profiles and entrypoints*/\ + if (va_profiles) \ + g_free(va_profiles);\ + if (va_entrypoints)\ + g_free (va_entrypoints); \ + if (surfaces) \ + g_free (surfaces);\ + g_mutex_unlock(parent->objectlock); \ + LOG_V( "end\n"); \ + return ret;} + +MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces = NULL; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + + + /* Chainup parent method. */ + + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) { + config_params_enc_mpeg4 = + MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + LOG_V( + "Start to get properities from MPEG-4:2 params\n"); + + /* get properties from MPEG4 params Object, which is special to MPEG4 format*/ + + ret = mix_videoconfigparamsenc_mpeg4_get_profile_level (config_params_enc_mpeg4, + &self->profile_and_level_indication); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); + CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti (config_params_enc_mpeg4, + &(self->fixed_vop_time_increment)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); + CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_dlk (config_params_enc_mpeg4, + &(self->disable_deblocking_filter_idc)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to config_params_enc_mpeg4\n"); + CLEAN_UP; + } + + + LOG_V( + "======MPEG4 Encode Object properities======:\n"); + + LOG_I( "self->profile_and_level_indication = %d\n", + self->profile_and_level_indication); + LOG_I( "self->fixed_vop_time_increment = %d\n\n", + self->fixed_vop_time_increment); + + LOG_V( + "Get properities from params done\n"); + + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); + va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + /*TODO: compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = reinterpret_cast( + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + LOG_E( "Error init failure\n"); + ret = MIX_RESULT_ALREADY_INIT; + CLEAN_UP; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + guint max_size = 0; + ret = mix_videofmtenc_mpeg4_get_max_encoded_buf_size (parent, &max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); + CLEAN_UP; + + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf[0]); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &(self->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + CLEAN_UP; +} +#undef CLEAN_UP + +#define CLEAN_UP {\ + LOG_V( "UnLocking\n"); \ + g_mutex_unlock(parent->objectlock);\ + LOG_V( "end\n"); \ + return ret;} + +MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_mpeg4_process_encode\n"); + + ret = mix_videofmtenc_mpeg4_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_mpeg4_process_encode\n"); + CLEAN_UP; + } + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + g_mutex_lock(mix->objectlock); + + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix) { + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + parent->initialized = TRUE;\ + g_mutex_unlock(parent->objectlock); \ + LOG_V( "end\n"); \ + return ret;} + +MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + + if (parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_mpeg4_send_seq_params (MixVideoFormatEnc_MPEG4 *mix) +{ + + VAStatus va_status; + VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param; + VABufferID seq_para_buf_id; + + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set up the sequence params for HW*/ + mpeg4_seq_param.profile_and_level_indication = mix->profile_and_level_indication; //TODO, hard code now + mpeg4_seq_param.video_object_layer_width= parent->picture_width; + mpeg4_seq_param.video_object_layer_height= parent->picture_height; + mpeg4_seq_param.vop_time_increment_resolution = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + mpeg4_seq_param.fixed_vop_time_increment= mix->fixed_vop_time_increment; + mpeg4_seq_param.bits_per_second= parent->bitrate; + mpeg4_seq_param.frame_rate = + (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + mpeg4_seq_param.initial_qp = parent->initial_qp; + mpeg4_seq_param.min_qp = parent->min_qp; + mpeg4_seq_param.intra_period = parent->intra_period; + + + //mpeg4_seq_param.fixed_vop_rate = 30; + + + + LOG_V( + "===mpeg4 sequence params===\n"); + + LOG_I( "profile_and_level_indication = %d\n", + (guint)mpeg4_seq_param.profile_and_level_indication); + LOG_I( "intra_period = %d\n", + mpeg4_seq_param.intra_period); + LOG_I( "video_object_layer_width = %d\n", + mpeg4_seq_param.video_object_layer_width); + LOG_I( "video_object_layer_height = %d\n", + mpeg4_seq_param.video_object_layer_height); + LOG_I( "vop_time_increment_resolution = %d\n", + mpeg4_seq_param.vop_time_increment_resolution); + LOG_I( "fixed_vop_rate = %d\n", + mpeg4_seq_param.fixed_vop_rate); + LOG_I( "fixed_vop_time_increment = %d\n", + mpeg4_seq_param.fixed_vop_time_increment); + LOG_I( "bitrate = %d\n", + mpeg4_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + mpeg4_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + mpeg4_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + mpeg4_seq_param.min_qp); + LOG_I( "intra_period = %d\n\n", + mpeg4_seq_param.intra_period); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncSequenceParameterBufferType, + sizeof(mpeg4_seq_param), + 1, &mpeg4_seq_param, + &seq_para_buf_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &seq_para_buf_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; + + +} + +MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 *mix) +{ + VAStatus va_status; + VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + +#if 0 //not needed currently + MixVideoConfigParamsEncMPEG4 * params_mpeg4 + = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); +#endif + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + /*set picture params for HW*/ + mpeg4_pic_param.reference_picture = mix->ref_frame->frame_id; + mpeg4_pic_param.reconstructed_picture = mix->rec_frame->frame_id; + mpeg4_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; + mpeg4_pic_param.picture_width = parent->picture_width; + mpeg4_pic_param.picture_height = parent->picture_height; + mpeg4_pic_param.vop_time_increment= mix->encoded_frames; + mpeg4_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + + + LOG_V( + "======mpeg4 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + mpeg4_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + mpeg4_pic_param.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", + mpeg4_pic_param.coded_buf); + LOG_I( "coded_buf_index = %d\n", + mix->coded_buf_index); + LOG_I( "picture_width = %d\n", + mpeg4_pic_param.picture_width); + LOG_I( "picture_height = %d\n", + mpeg4_pic_param.picture_height); + LOG_I( "vop_time_increment = %d\n", + mpeg4_pic_param.vop_time_increment); + LOG_I( "picture_type = %d\n\n", + mpeg4_pic_param.picture_type); + + va_status = vaCreateBuffer(parent->va_display, parent->va_context, + VAEncPictureParameterBufferType, + sizeof(mpeg4_pic_param), + 1,&mpeg4_pic_param, + &mix->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT mix_videofmtenc_mpeg4_send_slice_parameter (MixVideoFormatEnc_MPEG4 *mix) +{ + VAStatus va_status; + + guint slice_height; + guint slice_index; + guint slice_height_in_mb; + + if (mix == NULL) { + LOG_E("mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n\n"); + + + MixVideoFormatEnc *parent = NULL; + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + slice_height = parent->picture_height; + + slice_height += 15; + slice_height &= (~15); + + VAEncSliceParameterBuffer slice_param; + slice_index = 0; + slice_height_in_mb = slice_height / 16; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + LOG_V( + "======mpeg4 slice params======\n"); + + LOG_I( "start_row_number = %d\n", + (gint) slice_param.start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (gint) slice_param.slice_height); + LOG_I( "slice.is_intra = %d\n", + (gint) slice_param.slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (gint) mix->disable_deblocking_filter_idc); + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + 1, &slice_param, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(parent->va_display, parent->va_context, + &mix->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +#define CLEAN_UP {\ + if (ret != MIX_RESULT_SUCCESS) {\ + if (iovout->data) {\ + g_free (iovout->data);\ + iovout->data = NULL;\ + }\ + } \ + LOG_V( "end\n"); \ + return ret;} + +MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + MixVideoFrame * tmp_frame; + guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + mix->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + CLEAN_UP; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + CLEAN_UP; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + CLEAN_UP; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + CLEAN_UP; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + CLEAN_UP; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + ret = mix_videofmtenc_mpeg4_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + CLEAN_UP; + } + + + if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + if (mix->encoded_frames == 0) { + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + + mix->last_frame = mix->cur_frame; + + + /* determine the picture type*/ + if ((mix->encoded_frames % parent->intra_period) == 0) { + mix->is_intra = TRUE; + } else { + mix->is_intra = FALSE; + } + + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + + + } + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + //return MIX_RESULT_FAIL; + } + + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + guint total_size = 0; + guint size = 0; + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg = reinterpret_cast(coded_seg->next); + num_seg ++; + } + + +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (guint*) buf; +#endif + + iovout->data_size = total_size; + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + + iovout->data = (guchar*)g_malloc (iovout->data_size); + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + CLEAN_UP; + } + } + + //memcpy (iovout->data, buf + 16, iovout->data_size); + + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; + + while (1) { + + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg = reinterpret_cast(coded_seg->next); + } + + iovout->buffer_size = iovout->data_size; + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + LOG_V( "get encoded data done\n"); + + if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + } + + if (mix->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + ret = mix_videofmtenc_mpeg4_send_encode_command (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + CLEAN_UP; + } + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + + } + + VASurfaceStatus status; + + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + CLEAN_UP; + } + mix->pic_skipped = status & VASurfaceSkipped; + + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); + + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + CLEAN_UP; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_framemanager_enqueue\n"); + CLEAN_UP; + } + } + + /*update the reference surface and reconstructed surface */ + if (!mix->pic_skipped) { + tmp_frame = mix->rec_frame; + mix->rec_frame= mix->ref_frame; + mix->ref_frame = tmp_frame; + } + +#if 0 + if (mix->ref_frame != NULL) + mix_videoframe_unref (mix->ref_frame); + mix->ref_frame = mix->rec_frame; + + mix_videoframe_unref (mix->cur_frame); +#endif + + mix->encoded_frames ++; + mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; + mix->coded_buf_index ++; + mix->coded_buf_index %=2; + mix->last_frame = mix->cur_frame; + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; + } + + CLEAN_UP; +} +#undef CLEAN_UP + +MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size ( + MixVideoFormatEnc *mix, guint * max_size) +{ + + MixVideoFormatEnc *parent = NULL; + + if (mix == NULL) + { + LOG_E( + "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + parent = MIX_VIDEOFORMATENC(mix); + MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(self)) { + + if (self->coded_buf_size > 0) { + *max_size = self->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (self->va_rcmode == VA_RC_NONE) { + self->coded_buf_size = + (parent->picture_width* parent->picture_height * 400) / (16 * 16); + // set to value according to QP + } + else { + self->coded_buf_size = parent->bitrate/ 4; + } + + self->coded_buf_size = + max (self->coded_buf_size , + (parent->picture_width* parent->picture_height * 400) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + self->coded_buf_size = + max(self->coded_buf_size, + (parent->picture_width * parent->picture_height * 1.5 * 8)); + self->coded_buf_size = (self->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + *max_size = self->coded_buf_size; + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix) +{ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) + { + if (mix->encoded_frames == 0) { + ret = mix_videofmtenc_mpeg4_send_seq_params (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_seq_params\n"); + return MIX_RESULT_FAIL; + } + } + + ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + } + else + { + LOG_E( + "not MPEG4 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoformatenc_preview.c b/mix_video/src/mixvideoformatenc_preview.c deleted file mode 100644 index 649ad49..0000000 --- a/mix_video/src/mixvideoformatenc_preview.c +++ /dev/null @@ -1,1187 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_preview.h" -#include "mixvideoconfigparamsenc_preview.h" -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_preview_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_Preview, mix_videoformatenc_preview, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_preview_init(MixVideoFormatEnc_Preview * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - - /* member variable initialization */ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; - - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - - parent->initialized = FALSE; -} - -static void mix_videoformatenc_preview_class_init( - MixVideoFormatEnc_PreviewClass * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = g_type_class_peek_parent(klass); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_preview_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_preview_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_preview_initialize; - video_formatenc_class->encode = mix_videofmtenc_preview_encode; - video_formatenc_class->flush = mix_videofmtenc_preview_flush; - video_formatenc_class->eos = mix_videofmtenc_preview_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_preview_deinitialize; -} - -MixVideoFormatEnc_Preview * -mix_videoformatenc_preview_new(void) { - MixVideoFormatEnc_Preview *ret = - g_object_new(MIX_TYPE_VIDEOFORMATENC_PREVIEW, NULL); - - return ret; -} - -void mix_videoformatenc_preview_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_Preview *mix = MIX_VIDEOFORMATENC_PREVIEW(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} - -MixVideoFormatEnc_Preview * -mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) { - return (MixVideoFormatEnc_Preview *) g_object_ref(G_OBJECT(mix)); -} - -/*Preview vmethods implementation */ -MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_preview_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncPreview * config_params_enc_preview; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - - - /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "begin\n"); - - - /* Chainup parent method. */ - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc)) { - config_params_enc_preview = - MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_preview_initialize: no preview config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - - - LOG_V( - "Get properities from params done\n"); - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); - - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /*compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - - } - } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode - - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; - - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - 0, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - - ret = MIX_RESULT_FAIL; - goto cleanup; - - } - - self->coded_buf_size = 4; - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - - - LOG_V( "end\n"); - -cleanup: - - - if (ret == MIX_RESULT_SUCCESS) { - parent->initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if (va_profiles) - g_free(va_profiles); - - if (va_entrypoints) - g_free (va_entrypoints); - - if (surfaces) - g_free (surfaces); - - g_mutex_unlock(parent->objectlock); - - return ret; -} - -MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ - - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: we also could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_preview_process_encode\n"); - - ret = mix_videofmtenc_preview_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_preview_process_encode\n"); - goto cleanup; - } - - -cleanup: - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW (mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - - g_mutex_lock(mix->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - /*reset the properities*/ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix) { - - LOG_V( "\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (parent_class->eos) { - return parent_class->eos(mix); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - - if (parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - - LOG_V( "Release frames\n"); - - g_mutex_lock(parent->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; - } - - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - -cleanup: - - parent->initialized = FALSE; - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - - return ret; -} - - -MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - gulong surface = 0; - guint16 width, height; - - //MixVideoFrame * tmp_frame; - //guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - LOG_V( - "Get Surface from the pool\n"); - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; - } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "Map source data to surface done\n"); - - } - - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto cleanup; - } - } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - - { - LOG_E( - "mix_videoframe_get_frame_id failed\n"); - goto cleanup; - } - - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - - iovout->data_size = 4; - iovout->data = g_malloc (iovout->data_size); - if (iovout->data == NULL) { - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - memset (iovout->data, 0, iovout->data_size); - - iovout->buffer_size = iovout->data_size; - - - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_framemanager_enqueue\n"); - goto cleanup; - } - } - - - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - mix->encoded_frames ++; - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - if (iovout->data) { - g_free (iovout->data); - iovout->data = NULL; - } - } - - LOG_V( "end\n"); - - return ret; -} diff --git a/mix_video/src/mixvideoformatenc_preview.cpp b/mix_video/src/mixvideoformatenc_preview.cpp new file mode 100644 index 0000000..3444437 --- /dev/null +++ b/mix_video/src/mixvideoformatenc_preview.cpp @@ -0,0 +1,1187 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include +#include + +#include "mixvideolog.h" + +#include "mixvideoformatenc_preview.h" +#include "mixvideoconfigparamsenc_preview.h" +#include + +#undef SHOW_SRC + +#ifdef SHOW_SRC +Window win = 0; +#endif /* SHOW_SRC */ + + +/* The parent class. The pointer will be saved + * in this class's initialization. The pointer + * can be used for chaining method call if needed. + */ +static MixVideoFormatEncClass *parent_class = NULL; + +static void mix_videoformatenc_preview_finalize(GObject * obj); + +/* + * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC + */ +G_DEFINE_TYPE (MixVideoFormatEnc_Preview, mix_videoformatenc_preview, MIX_TYPE_VIDEOFORMATENC); + +static void mix_videoformatenc_preview_init(MixVideoFormatEnc_Preview * self) { + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); + + + /* member variable initialization */ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + self->cur_frame = NULL; + self->ref_frame = NULL; + self->rec_frame = NULL; + + self->ci_shared_surfaces = NULL; + self->surfaces= NULL; + self->surface_num = 0; + + parent->initialized = FALSE; +} + +static void mix_videoformatenc_preview_class_init( + MixVideoFormatEnc_PreviewClass * klass) { + + /* root class */ + GObjectClass *gobject_class = (GObjectClass *) klass; + + /* direct parent class */ + MixVideoFormatEncClass *video_formatenc_class = + MIX_VIDEOFORMATENC_CLASS(klass); + + /* parent class for later use */ + parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); + + /* setup finializer */ + gobject_class->finalize = mix_videoformatenc_preview_finalize; + + /* setup vmethods with base implementation */ + video_formatenc_class->getcaps = mix_videofmtenc_preview_getcaps; + video_formatenc_class->initialize = mix_videofmtenc_preview_initialize; + video_formatenc_class->encode = mix_videofmtenc_preview_encode; + video_formatenc_class->flush = mix_videofmtenc_preview_flush; + video_formatenc_class->eos = mix_videofmtenc_preview_eos; + video_formatenc_class->deinitialize = mix_videofmtenc_preview_deinitialize; +} + +MixVideoFormatEnc_Preview * +mix_videoformatenc_preview_new(void) { + MixVideoFormatEnc_Preview *ret = reinterpret_cast( + g_object_new(MIX_TYPE_VIDEOFORMATENC_PREVIEW, NULL)); + + return ret; +} + +void mix_videoformatenc_preview_finalize(GObject * obj) { + /* clean up here. */ + + /*MixVideoFormatEnc_Preview *mix = MIX_VIDEOFORMATENC_PREVIEW(obj); */ + GObjectClass *root_class = (GObjectClass *) parent_class; + + LOG_V( "\n"); + + /* Chain up parent */ + if (root_class->finalize) { + root_class->finalize(obj); + } +} + +MixVideoFormatEnc_Preview * +mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) { + return (MixVideoFormatEnc_Preview *) g_object_ref(G_OBJECT(mix)); +} + +/*Preview vmethods implementation */ +MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg) { + + LOG_V( "mix_videofmtenc_preview_getcaps\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + if (parent_class->getcaps) { + return parent_class->getcaps(mix, msg); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + MixVideoConfigParamsEncPreview * config_params_enc_preview; + + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceID * surfaces = NULL; + + gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + gint va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; + VAConfigAttrib va_attrib[2]; + guint index; + + + /*frame_mgr and input_buf_pool is reservered for future use*/ + + if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + LOG_E( + "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "begin\n"); + + + /* Chainup parent method. */ + if (parent_class->initialize) { + ret = parent_class->initialize(mix, config_params_enc, + frame_mgr, input_buf_pool, surface_pool, + va_display); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + + + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); + + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc)) { + config_params_enc_preview = + MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_preview_initialize: no preview config params found\n"); + return MIX_RESULT_FAIL; + } + + g_mutex_lock(parent->objectlock); + + + LOG_V( + "Get properities from params done\n"); + + parent->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (guint)va_display); + + + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); + + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); + + va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); + va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_I( + "va_profiles = 0x%08x\n", (guint)va_profiles); + + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigProfiles Done\n"); + + + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(parent->va_profile == va_profiles[index]) + break; + } + + if(index == va_num_profiles) + { + LOG_E( "Profile not supported\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + parent->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; + + LOG_V( "vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if ((va_attrib[0].value & parent->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = parent->va_rcmode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", + parent->va_profile); + LOG_I( "va_entrypoint = %d\n", + parent->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + va_status = vaCreateConfig(va_display, parent->va_profile, + parent->va_entrypoint, + &va_attrib[0], 2, &(parent->va_config)); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /*compute the surface number*/ + int numSurfaces; + + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 8; + parent->ci_frame_num = 0; + } + + self->surface_num = numSurfaces + parent->ci_frame_num; + + surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); + + if (surfaces == NULL) + { + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, parent->picture_width, + parent->picture_height, parent->va_format, + numSurfaces, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if (parent->share_buf_mode) { + + LOG_V( + "We are in share buffer mode!\n"); + self->ci_shared_surfaces = reinterpret_cast( + g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); + + if (self->ci_shared_surfaces == NULL) + { + LOG_E( + "Failed allocate shared surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + guint index; + for(index = 0; index < parent->ci_frame_num; index++) { + + LOG_I( "ci_frame_id = %lu\n", + parent->ci_frame_id[index]); + + LOG_V( + "vaCreateSurfaceFromCIFrame\n"); + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (gulong) (parent->ci_frame_id[index]), + &self->ci_shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + + } + } + + LOG_V( + "vaCreateSurfaceFromCIFrame Done\n"); + + }// if (parent->share_buf_mode) + + self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); + + if (self->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + + } + + if (parent->share_buf_mode) { + /*shared surfaces should be put in pool first, + * because we will get it accoring to CI index*/ + for(index = 0; index < parent->ci_frame_num; index++) + self->surfaces[index] = self->ci_shared_surfaces[index]; + } + + for(index = 0; index < numSurfaces; index++) { + self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", + numSurfaces + parent->ci_frame_num); + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + parent->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = parent->surfacepool; + //which is useful to check before encode + + if (parent->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(parent->surfacepool, + self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, parent->va_config, + parent->picture_width, parent->picture_height, + 0, self->surfaces, parent->ci_frame_num + numSurfaces, + &(parent->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + parent->picture_width, parent->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (guint)va_status); + + ret = MIX_RESULT_FAIL; + goto cleanup; + + } + + self->coded_buf_size = 4; + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, parent->va_context, + VAEncCodedBufferType, + self->coded_buf_size, // + 1, NULL, + &self->coded_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (guint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (guint) va_display); + +#endif /* SHOW_SRC */ + + + LOG_V( "end\n"); + +cleanup: + + + if (ret == MIX_RESULT_SUCCESS) { + parent->initialized = TRUE; + } + + /*free profiles and entrypoints*/ + if (va_profiles) + g_free(va_profiles); + + if (va_entrypoints) + g_free (va_entrypoints); + + if (surfaces) + g_free (surfaces); + + g_mutex_unlock(parent->objectlock); + + return ret; +} + +MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], + gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + MixVideoEncodeParams * encode_params) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoFormatEnc *parent = NULL; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!mix || !bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW (mix); + + LOG_V( "Locking\n"); + g_mutex_lock(parent->objectlock); + + + //TODO: we also could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_preview_process_encode\n"); + + ret = mix_videofmtenc_preview_process_encode (self, + bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_preview_process_encode\n"); + goto cleanup; + } + + +cleanup: + + LOG_V( "UnLocking\n"); + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { + + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + + if (!MIX_IS_VIDEOFORMATENC_PREVIEW (mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); + + g_mutex_lock(mix->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + /*reset the properities*/ + self->encoded_frames = 0; + self->pic_skipped = FALSE; + self->is_intra = TRUE; + + g_mutex_unlock(mix->objectlock); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix) { + + LOG_V( "\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (parent_class->eos) { + return parent_class->eos(mix); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { + + MixVideoFormatEnc *parent = NULL; + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + if (mix == NULL) { + LOG_E( "mix == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; + + + if (parent_class->deinitialize) { + ret = parent_class->deinitialize(mix); + } + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + + parent = MIX_VIDEOFORMATENC(&(mix->parent)); + MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); + + LOG_V( "Release frames\n"); + + g_mutex_lock(parent->objectlock); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (self->rec_frame != NULL) + { + mix_videoframe_unref (self->rec_frame); + self->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (self->ref_frame != NULL) + { + mix_videoframe_unref (self->ref_frame); + self->ref_frame = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (self->ci_shared_surfaces) + { + g_free (self->ci_shared_surfaces); + self->ci_shared_surfaces = NULL; + } + + if (self->surfaces) + { + g_free (self->surfaces); + self->surfaces = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (parent->va_display, parent->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (parent->va_display, parent->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + +cleanup: + + parent->initialized = FALSE; + + g_mutex_unlock(parent->objectlock); + + LOG_V( "end\n"); + + return ret; +} + + +MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, + MixBuffer * bufin, MixIOVec * iovout) +{ + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + gulong surface = 0; + guint16 width, height; + + //MixVideoFrame * tmp_frame; + //guint8 *buf; + + if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + LOG_E( + "mix == NUL) || bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) + return MIX_RESULT_INVALID_PARAM; + + MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); + + va_display = parent->va_display; + va_context = parent->va_context; + width = parent->picture_width; + height = parent->picture_height; + + + LOG_I( "encoded_frames = %d\n", + mix->encoded_frames); + LOG_I( "is_intra = %d\n", + mix->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (guint) parent->ci_frame_id); + + LOG_V( + "Get Surface from the pool\n"); + + if (!parent->share_buf_mode) { + LOG_V( + "We are NOT in share buffer mode\n"); + + if (mix->ref_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + guint8 *pvbuf; + guint8 *dst_y; + guint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; + } + + + LOG_I( + "surface id = 0x%08x\n", (guint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + guint8 *inbuf = bufin->data; + + /*need to convert YUV420 to NV12*/ + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "Map source data to surface done\n"); + + } + + else {//if (!parent->share_buf_mode) + + MixVideoFrame * frame = mix_videoframe_new(); + + if (mix->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->ref_frame, frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used + { + LOG_E( + "get reference surface from pool failed\n"); + goto cleanup; + } + } + + if (mix->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->rec_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto cleanup; + } + } + + //mix_videoframe_unref (mix->cur_frame); + + if (parent->need_display) { + mix->cur_frame = NULL; + } + + if (mix->cur_frame == NULL) + { + guint ci_idx; + memcpy (&ci_idx, bufin->data, bufin->size); + + LOG_I( + "surface_num = %d\n", mix->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > mix->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (parent->surfacepool, &mix->cur_frame, frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + goto cleanup; + } + } + + ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + + { + LOG_E( + "mix_videoframe_get_frame_id failed\n"); + goto cleanup; + } + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(guint)va_context); + LOG_I( "surface = 0x%08x\n",(guint)surface); + LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + iovout->data_size = 4; + iovout->data = (guchar*)g_malloc (iovout->data_size); + if (iovout->data == NULL) { + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + memset (iovout->data, 0, iovout->data_size); + + iovout->buffer_size = iovout->data_size; + + + if (parent->need_display) { + ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; + } + + ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_framemanager_enqueue\n"); + goto cleanup; + } + } + + + if (!(parent->need_display)) { + mix_videoframe_unref (mix->cur_frame); + mix->cur_frame = NULL; + } + mix->encoded_frames ++; + +cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + if (iovout->data) { + g_free (iovout->data); + iovout->data = NULL; + } + } + + LOG_V( "end\n"); + + return ret; +} diff --git a/mix_video/src/mixvideoformatqueue.h b/mix_video/src/mixvideoformatqueue.h index f21edfb..97c2b08 100644 --- a/mix_video/src/mixvideoformatqueue.h +++ b/mix_video/src/mixvideoformatqueue.h @@ -11,18 +11,13 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixbuffer.h" -G_BEGIN_DECLS - typedef struct _MixInputBufferEntry MixInputBufferEntry; struct _MixInputBufferEntry { - /*< private > */ - MixBuffer *buf; - guint64 timestamp; - + /*< private > */ + MixBuffer *buf; + guint64 timestamp; }; -G_END_DECLS - #endif /* __MIX_VIDEOFORMATQUEUE_H__ */ diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c deleted file mode 100644 index 2891cf0..0000000 --- a/mix_video/src/mixvideoframe.c +++ /dev/null @@ -1,502 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoframe - * @short_description: MI-X Video Frame Object - * - * - * The MixVideoFrame object will be created by - * MixVideo and provided to the MMF/App in the - * MixVideo mix_video_get_frame() function. - * - * - * mix_video_release_frame() must be used - * to release frame object returned from - * mix_video_get_frame(). Caller must not - * use mix_videoframe_ref() or mix_videoframe_unref() - * or adjust the reference count directly in any way. - * This object can be supplied in the mix_video_render() - * function to render the associated video frame. - * The MMF/App can release this object when it no longer - * needs to display/re-display this frame. - * - */ - - -#include -#ifndef ANDROID -#include -#endif -#include "mixvideolog.h" -#include "mixvideoframe.h" -#include "mixvideoframe_private.h" - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_videoframe_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videoframe_type = g_define_type_id; } - -gboolean mix_videoframe_copy(MixParams * target, const MixParams * src); -MixParams *mix_videoframe_dup(const MixParams * obj); -gboolean mix_videoframe_equal(MixParams * first, MixParams * second); -static void mix_videoframe_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoFrame, mix_videoframe, MIX_TYPE_PARAMS, - _do_init); - -#define VIDEOFRAME_PRIVATE(self) ((MixVideoFramePrivate *)((self)->reserved1)) -static void mix_videoframe_init(MixVideoFrame * self) { - /* initialize properties here */ - self->frame_id = VA_INVALID_SURFACE; - self->timestamp = 0; - self->discontinuity = FALSE; - - MixVideoFramePrivate *priv = MIX_VIDEOFRAME_GET_PRIVATE(self); - self->reserved1 = priv; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; - - /* set pool pointer in private structure to NULL */ - priv -> pool = NULL; - - /* set stuff for skipped frames */ - priv -> is_skipped = FALSE; - priv -> real_frame = NULL; - priv -> sync_flag = FALSE; - priv -> frame_structure = VA_FRAME_PICTURE; - - priv -> va_display = NULL; - - g_static_rec_mutex_init (&priv -> lock); -} - -static void mix_videoframe_class_init(MixVideoFrameClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videoframe_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videoframe_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videoframe_dup; - mixparams_class->equal = (MixParamsEqualFunction) mix_videoframe_equal; - - /* Register and allocate the space the private structure for this object */ - g_type_class_add_private(mixparams_class, sizeof(MixVideoFramePrivate)); - -} - -MixVideoFrame * -mix_videoframe_new(void) { - MixVideoFrame *ret = (MixVideoFrame *) g_type_create_instance( - MIX_TYPE_VIDEOFRAME); - return ret; -} - -void mix_videoframe_finalize(MixParams * obj) { - /* clean up here. */ - MixVideoFrame *self = MIX_VIDEOFRAME (obj); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(self); - - g_static_rec_mutex_free (&priv->lock); - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoFrame * -mix_videoframe_ref(MixVideoFrame * obj) { - - MixVideoFrame *ret = NULL; - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - g_static_rec_mutex_lock(&priv->lock); - LOG_I("obj %x, new refcount is %d\n", (guint) obj, - MIX_PARAMS(obj)->refcount + 1); - - ret = (MixVideoFrame *) mix_params_ref(MIX_PARAMS(obj)); - g_static_rec_mutex_unlock (&priv->lock); - return ret; -} - -void mix_videoframe_unref(MixVideoFrame * obj) { - - if(obj == NULL) { - LOG_E("obj is NULL\n"); - return; - } - - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - g_static_rec_mutex_lock(&priv->lock); - - LOG_I("obj %x, frame id %d, new refcount is %d\n", (guint) obj, - (guint) obj->frame_id, MIX_PARAMS(obj)->refcount - 1); - - // Check if we have reduced to 1, in which case we add ourselves to free pool - // but only do this for real frames, not skipped frames - if (((MIX_PARAMS(obj)->refcount - 1) == 1) && (!(priv -> is_skipped))) { - - LOG_I("Adding obj %x, frame id %d back to pool\n", (guint) obj, - (guint) obj->frame_id); - - MixSurfacePool *pool = NULL; - pool = priv -> pool; - if(pool == NULL) { - LOG_E("pool is NULL\n"); - g_static_rec_mutex_unlock (&priv->lock); - return; - } - - mix_videoframe_reset(obj); - mix_surfacepool_put(pool, obj); - } - - //If this is a skipped frame that is being deleted, release the real frame - if (((MIX_PARAMS(obj)->refcount - 1) == 0) && (priv -> is_skipped)) { - - LOG_I("skipped frame obj %x, releasing real frame %x \n", - (guint) obj, (guint) priv->real_frame); - - mix_videoframe_unref(priv -> real_frame); - } - - // Unref through base class - mix_params_unref(MIX_PARAMS(obj)); - g_static_rec_mutex_unlock (&priv->lock); -} - -/** - * mix_videoframe_dup: - * @obj: a #MixVideoFrame object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoframe_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEOFRAME(obj)) { - MixVideoFrame *duplicate = mix_videoframe_new(); - if (mix_videoframe_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoframe_unref(duplicate); - } - } - return ret; -} - -/** - * mix_videoframe_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoframe_copy(MixParams * target, const MixParams * src) { - MixVideoFrame *this_target, *this_src; - - if (MIX_IS_VIDEOFRAME(target) && MIX_IS_VIDEOFRAME(src)) { - // Cast the base object to this child object - this_target = MIX_VIDEOFRAME(target); - this_src = MIX_VIDEOFRAME(src); - - // Free the existing properties - - // Duplicate string - this_target->frame_id = this_src->frame_id; - this_target->timestamp = this_src->timestamp; - this_target->discontinuity = this_src->discontinuity; - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_videoframe_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoframe_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoFrame *this_first, *this_second; - - if (MIX_IS_VIDEOFRAME(first) && MIX_IS_VIDEOFRAME(second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEOFRAME(first); - this_second = MIX_VIDEOFRAME(second); - - /* TODO: add comparison for other properties */ - if (this_first->frame_id == this_second->frame_id - && this_first->timestamp == this_second->timestamp - && this_first->discontinuity == this_second->discontinuity) { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = klass->equal(first, second); - else - ret = TRUE; - } - } - - return ret; -} - -#define MIX_VIDEOFRAME_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ - - -/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ -MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, gulong frame_id) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->frame_id = frame_id; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, gulong * frame_id) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_id); - *frame_id = obj->frame_id; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_ci_frame_idx (MixVideoFrame * obj, guint ci_frame_idx) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->ci_frame_idx = ci_frame_idx; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_ci_frame_idx (MixVideoFrame * obj, guint * ci_frame_idx) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, ci_frame_idx); - *ci_frame_idx = obj->ci_frame_idx; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, guint64 timestamp) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - - obj->timestamp = timestamp; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, - guint64 * timestamp) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, - gboolean discontinuity) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, - gboolean * discontinuity) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, - guint32 frame_structure) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - priv->frame_structure = frame_structure; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, - guint32* frame_structure) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - *frame_structure = priv->frame_structure; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_pool(MixVideoFrame * obj, MixSurfacePool * pool) { - - /* set pool pointer in private structure */ - VIDEOFRAME_PRIVATE(obj) -> pool = pool; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_frame_type(MixVideoFrame *obj, - MixFrameType frame_type) { - - VIDEOFRAME_PRIVATE(obj) -> frame_type = frame_type; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_frame_type(MixVideoFrame *obj, - MixFrameType *frame_type) { - - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, frame_type); - - *frame_type = VIDEOFRAME_PRIVATE(obj) -> frame_type; - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoframe_set_is_skipped(MixVideoFrame *obj, - gboolean is_skipped) { - - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - VIDEOFRAME_PRIVATE(obj) -> is_skipped = is_skipped; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_is_skipped(MixVideoFrame *obj, - gboolean *is_skipped) { - - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, is_skipped); - - *is_skipped = VIDEOFRAME_PRIVATE(obj) -> is_skipped; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_real_frame(MixVideoFrame *obj, - MixVideoFrame *real) { - - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - VIDEOFRAME_PRIVATE(obj) -> real_frame = real; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_real_frame(MixVideoFrame *obj, - MixVideoFrame **real) { - - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, real); - - *real = VIDEOFRAME_PRIVATE(obj) -> real_frame; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_reset(MixVideoFrame *obj) { - - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - - obj->timestamp = 0; - obj->discontinuity = FALSE; - - priv -> is_skipped = FALSE; - priv -> real_frame = NULL; - priv -> sync_flag = FALSE; - priv -> frame_structure = VA_FRAME_PICTURE; - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - - priv -> sync_flag = sync_flag; - if (priv->real_frame && priv->real_frame != obj) { - mix_videoframe_set_sync_flag(priv->real_frame, sync_flag); - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, sync_flag); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - if (priv->real_frame && priv->real_frame != obj) { - return mix_videoframe_get_sync_flag(priv->real_frame, sync_flag); - } else { - *sync_flag = priv -> sync_flag; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display) { - - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - - priv -> va_display = va_display; - if (priv->real_frame && priv->real_frame != obj) { - mix_videoframe_set_vadisplay(priv->real_frame, va_display); - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display) { - - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, va_display); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - - if (priv->real_frame && priv->real_frame != obj) { - return mix_videoframe_get_vadisplay(priv->real_frame, va_display); - } else { - *va_display = priv -> va_display; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder) { - - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - - priv -> displayorder = displayorder; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder) { - - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder); - MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj); - - *displayorder = priv -> displayorder; - return MIX_RESULT_SUCCESS; -} - - - diff --git a/mix_video/src/mixvideoframe.cpp b/mix_video/src/mixvideoframe.cpp new file mode 100644 index 0000000..82f774b --- /dev/null +++ b/mix_video/src/mixvideoframe.cpp @@ -0,0 +1,362 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoframe + * @short_description: MI-X Video Frame Object + * + * + * The MixVideoFrame object will be created by + * MixVideo and provided to the MMF/App in the + * MixVideo mix_video_get_frame() function. + * + * + * mix_video_release_frame() must be used + * to release frame object returned from + * mix_video_get_frame(). Caller must not + * use mix_videoframe_ref() or mix_videoframe_unref() + * or adjust the reference count directly in any way. + * This object can be supplied in the mix_video_render() + * function to render the associated video frame. + * The MMF/App can release this object when it no longer + * needs to display/re-display this frame. + * + */ + + +#include +#ifndef ANDROID +#include +#endif +#include "mixvideolog.h" +//#include "mixvideoframe_private.h" +#include "mixsurfacepool.h" +#include "mixvideoframe.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +#define MIX_VIDEOFRAME_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ + + +MixVideoFrame::MixVideoFrame() + :frame_id(VA_INVALID_SURFACE) + ,timestamp(0) + ,discontinuity(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,pool(NULL) + ,is_skipped(FALSE) + ,real_frame(NULL) + ,sync_flag(FALSE) + ,frame_structure(VA_FRAME_PICTURE) + ,va_display(NULL){ + g_static_rec_mutex_init (&lock); +} + +MixVideoFrame::~MixVideoFrame() { + g_static_rec_mutex_free (&lock); +} + +gboolean MixVideoFrame::copy(MixParams *target) const { + gboolean ret = FALSE; + MixVideoFrame * this_target = MIX_VIDEOFRAME(target); + if (NULL != this_target) { + this_target->frame_id = this->frame_id; + this_target->timestamp = this->timestamp; + this_target->discontinuity = this->discontinuity; + // chain up base class + ret = MixParams::copy(target); + } + return ret; +} + +gboolean MixVideoFrame::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoFrame * this_obj = MIX_VIDEOFRAME(obj); + if (NULL != this_obj) { + /* TODO: add comparison for other properties */ + if (this->frame_id == this_obj->frame_id && + this->timestamp == this_obj->timestamp && + this->discontinuity == this_obj->discontinuity) { + ret = MixParams::equal(this_obj); + } + } + return ret; +} + +MixParams* MixVideoFrame::dup() const { + MixParams *ret = new MixVideoFrame(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + +void MixVideoFrame::Lock() { + g_static_rec_mutex_lock(&lock); +} +void MixVideoFrame::Unlock() { + g_static_rec_mutex_unlock (&lock); +} + +MixVideoFrame * mix_videoframe_new(void) { + return new MixVideoFrame(); +} + + + +MixVideoFrame * mix_videoframe_ref(MixVideoFrame * obj) { + if (NULL != obj) { + obj->Lock(); + LOG_I("obj %x, new refcount is %d\n", (guint) obj, + obj->GetRefCount() + 1); + obj->Ref(); + obj->Unlock(); + } + return obj; +} + +void mix_videoframe_unref(MixVideoFrame * obj) { + if(NULL == obj) { + LOG_E("obj is NULL\n"); + return; + } + + obj->Lock(); + LOG_I("obj %x, frame id %d, new refcount is %d\n", (guint) obj, + (guint) obj->frame_id, obj->GetRefCount() - 1); + + // Check if we have reduced to 1, in which case we add ourselves to free pool + // but only do this for real frames, not skipped frames + if (((obj->GetRefCount() - 1) == 1) && (!(obj->is_skipped))) { + LOG_I("Adding obj %x, frame id %d back to pool\n", (guint) obj, + (guint) obj->frame_id); + MixSurfacePool *pool = obj->pool; + if(pool == NULL) { + LOG_E("pool is NULL\n"); + obj->Unlock(); + return; + } + mix_videoframe_reset(obj); + mix_surfacepool_put(pool, obj); + } + + //If this is a skipped frame that is being deleted, release the real frame + if (((obj->GetRefCount() - 1) == 0) && (obj->is_skipped)) { + LOG_I("skipped frame obj %x, releasing real frame %x \n", + (guint) obj, (guint) obj->real_frame); + mix_videoframe_unref(obj->real_frame); + } + + // Unref through base class + obj->Unref(); + obj->Unlock(); +} + + +/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ +MIX_RESULT mix_videoframe_set_frame_id( + MixVideoFrame * obj, gulong frame_id) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->frame_id = frame_id; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_frame_id( + MixVideoFrame * obj, gulong * frame_id) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_id); + *frame_id = obj->frame_id; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_ci_frame_idx ( + MixVideoFrame * obj, guint ci_frame_idx) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->ci_frame_idx = ci_frame_idx; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_ci_frame_idx ( + MixVideoFrame * obj, guint * ci_frame_idx) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, ci_frame_idx); + *ci_frame_idx = obj->ci_frame_idx; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_timestamp( + MixVideoFrame * obj, guint64 timestamp) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_timestamp( + MixVideoFrame * obj, guint64 * timestamp) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_discontinuity( + MixVideoFrame * obj, gboolean discontinuity) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_discontinuity( + MixVideoFrame * obj, gboolean * discontinuity) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_frame_structure( + MixVideoFrame * obj, guint32 frame_structure) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->frame_structure = frame_structure; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_frame_structure( + MixVideoFrame * obj, guint32* frame_structure) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure); + *frame_structure = obj->frame_structure; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_pool( + MixVideoFrame * obj, MixSurfacePool * pool) { + /* set pool pointer in private structure */ + obj->pool = pool; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_frame_type( + MixVideoFrame *obj, MixFrameType frame_type) { + obj->frame_type = frame_type; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_frame_type( + MixVideoFrame *obj, MixFrameType *frame_type) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, frame_type); + *frame_type = obj->frame_type; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_is_skipped( + MixVideoFrame *obj, gboolean is_skipped) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->is_skipped = is_skipped; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_is_skipped( + MixVideoFrame *obj, gboolean *is_skipped) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, is_skipped); + *is_skipped = obj->is_skipped; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_real_frame( + MixVideoFrame *obj, MixVideoFrame *real) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->real_frame = real; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_real_frame( + MixVideoFrame *obj, MixVideoFrame **real) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, real); + *real = obj->real_frame; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_reset(MixVideoFrame *obj) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->timestamp = 0; + obj->discontinuity = FALSE; + obj->is_skipped = FALSE; + obj->real_frame = NULL; + obj->sync_flag = FALSE; + obj->frame_structure = VA_FRAME_PICTURE; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoframe_set_sync_flag( + MixVideoFrame *obj, gboolean sync_flag) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->sync_flag = sync_flag; + if (obj->real_frame && obj->real_frame != obj) { + mix_videoframe_set_sync_flag(obj->real_frame, sync_flag); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_sync_flag( + MixVideoFrame *obj, gboolean *sync_flag) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, sync_flag); + if (obj->real_frame && obj->real_frame != obj) { + return mix_videoframe_get_sync_flag(obj->real_frame, sync_flag); + } else { + *sync_flag = obj -> sync_flag; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_vadisplay( + MixVideoFrame * obj, void *va_display) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->va_display = va_display; + if (obj->real_frame && obj->real_frame != obj) { + mix_videoframe_set_vadisplay(obj->real_frame, va_display); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_get_vadisplay( + MixVideoFrame * obj, void **va_display) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, va_display); + if (obj->real_frame && obj->real_frame != obj) { + return mix_videoframe_get_vadisplay(obj->real_frame, va_display); + } else { + *va_display = obj->va_display; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoframe_set_displayorder( + MixVideoFrame *obj, guint32 displayorder) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->displayorder = displayorder; + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT mix_videoframe_get_displayorder( + MixVideoFrame *obj, guint32 *displayorder) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder); + *displayorder = obj->displayorder; + return MIX_RESULT_SUCCESS; +} + + + diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h index c3089a9..9c5c084 100644 --- a/mix_video/src/mixvideoframe.h +++ b/mix_video/src/mixvideoframe.h @@ -12,20 +12,13 @@ #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEOFRAME: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEOFRAME (mix_videoframe_get_type ()) +class MixSurfacePool; /** * MIX_VIDEOFRAME: * @obj: object to be type-casted. */ -#define MIX_VIDEOFRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFrame)) +#define MIX_VIDEOFRAME(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOFRAME: @@ -33,44 +26,32 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixVideoFrame */ -#define MIX_IS_VIDEOFRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFRAME)) +#define MIX_IS_VIDEOFRAME(obj) ((NULL != MIX_VIDEOFRAME(obj)) ? TRUE : FALSE) -/** - * MIX_VIDEOFRAME_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEOFRAME_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFRAME, MixVideoFrameClass)) - -/** - * MIX_IS_VIDEOFRAME_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixVideoFrameClass - */ -#define MIX_IS_VIDEOFRAME_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFRAME)) -/** - * MIX_VIDEOFRAME_GET_CLASS: - * @obj: a #MixVideoFrame object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEOFRAME_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFrameClass)) -typedef struct _MixVideoFrame MixVideoFrame; -typedef struct _MixVideoFrameClass MixVideoFrameClass; +typedef enum _MixFrameType { + TYPE_I, + TYPE_P, + TYPE_B, + TYPE_INVALID +} MixFrameType; /** * MixVideoFrame: * * MI-X VideoConfig Parameter object */ -struct _MixVideoFrame { - /*< public > */ - MixParams parent; - - /*< public > */ - +class MixVideoFrame : public MixParams { +public: + MixVideoFrame(); + ~MixVideoFrame(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; + void Lock(); + void Unlock(); +public: /* ID associated with the decoded frame */ gulong frame_id; @@ -101,28 +82,21 @@ struct _MixVideoFrame { /* Reserved for future use */ void *reserved4; -}; -/** - * MixVideoFrameClass: - * - * MI-X VideoConfig object class - */ -struct _MixVideoFrameClass { - /*< public > */ - MixParamsClass parent_class; +public: + // from structure MixVideoFramePrivate + MixSurfacePool *pool; + MixFrameType frame_type; + gboolean is_skipped; + MixVideoFrame *real_frame; + GStaticRecMutex lock; + gboolean sync_flag; + guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field + void *va_display; + guint32 displayorder; - /* class members */ }; -/** - * mix_videoframe_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoframe_get_type(void); - /** * mix_videoframe_new: * @returns: A newly allocated instance of #MixVideoFrame @@ -237,6 +211,45 @@ MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display); MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display); MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); -G_END_DECLS +// from private structure MixVideoFramePrivate +/* Private functions */ +MIX_RESULT +mix_videoframe_set_pool (MixVideoFrame *obj, MixSurfacePool *pool); + +MIX_RESULT +mix_videoframe_set_frame_type (MixVideoFrame *obj, MixFrameType frame_type); + +MIX_RESULT +mix_videoframe_get_frame_type (MixVideoFrame *obj, MixFrameType *frame_type); + +MIX_RESULT +mix_videoframe_set_is_skipped (MixVideoFrame *obj, gboolean is_skipped); + +MIX_RESULT +mix_videoframe_get_is_skipped (MixVideoFrame *obj, gboolean *is_skipped); + +MIX_RESULT +mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real); + +MIX_RESULT +mix_videoframe_get_real_frame (MixVideoFrame *obj, MixVideoFrame **real); + +MIX_RESULT +mix_videoframe_reset(MixVideoFrame *obj); + +MIX_RESULT +mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag); + +MIX_RESULT +mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag); + +MIX_RESULT +mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); + +MIX_RESULT +mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder); + +MIX_RESULT +mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder); #endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h index bb8bd1e..54d8a3e 100644 --- a/mix_video/src/mixvideoframe_private.h +++ b/mix_video/src/mixvideoframe_private.h @@ -8,12 +8,10 @@ No license under any patent, copyright, trade secret or other intellectual prope #ifndef __MIX_VIDEOFRAME_PRIVATE_H__ #define __MIX_VIDEOFRAME_PRIVATE_H__ - +#if 0 #include "mixvideoframe.h" #include "mixsurfacepool.h" -G_BEGIN_DECLS - typedef enum _MixFrameType { TYPE_I, @@ -22,30 +20,23 @@ typedef enum _MixFrameType TYPE_INVALID } MixFrameType; -typedef struct _MixVideoFramePrivate MixVideoFramePrivate; - -struct _MixVideoFramePrivate +class MixVideoFramePrivate { - /*< private > */ - MixSurfacePool *pool; - MixFrameType frame_type; - gboolean is_skipped; - MixVideoFrame *real_frame; - GStaticRecMutex lock; - gboolean sync_flag; - guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field - void *va_display; - guint32 displayorder; +public: + MixVideoFramePrivate() + :pool(NULL) + ,is_skipped(FALSE) + ,real_frame(NULL) + ,sync_flag(FALSE) + ,frame_structure(VA_FRAME_PICTURE) + ,va_display(NULL) + {} +public: + /*< private > */ + }; -/** -* MIX_VIDEOFRAME_PRIVATE: -* -* Get private structure of this class. -* @obj: class object for which to get private data. -*/ -#define MIX_VIDEOFRAME_GET_PRIVATE(obj) \ - (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFramePrivate)) + /* Private functions */ @@ -87,8 +78,6 @@ mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder); MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder); - - -G_END_DECLS +#endif #endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideoinitparams.c b/mix_video/src/mixvideoinitparams.c deleted file mode 100644 index 603b6cd..0000000 --- a/mix_video/src/mixvideoinitparams.c +++ /dev/null @@ -1,222 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoinitparams - * @short_description: MI-X Video Initialization Parameters - * - * The MixVideoInitParams object will be created by the MMF/App - * and provided in the mix_video_initialize() function. - * The get and set methods for the properties will be available for - * the caller to set and get information used at initialization time. - */ - -#include "mixvideoinitparams.h" - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -static GType _mix_videoinitparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videoinitparams_type = g_define_type_id; } - -gboolean mix_videoinitparams_copy(MixParams * target, const MixParams * src); -MixParams *mix_videoinitparams_dup(const MixParams * obj); -gboolean mix_videoinitparams_equal(MixParams * first, MixParams * second); -static void mix_videoinitparams_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoInitParams, mix_videoinitparams, - MIX_TYPE_PARAMS, _do_init); - -static void mix_videoinitparams_init(MixVideoInitParams * self) { - - /* Initialize member varibles */ - self->display = NULL; - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videoinitparams_class_init(MixVideoInitParamsClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videoinitparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videoinitparams_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videoinitparams_dup; - mixparams_class->equal = (MixParamsEqualFunction) mix_videoinitparams_equal; -} - -MixVideoInitParams * -mix_videoinitparams_new(void) { - MixVideoInitParams *ret = (MixVideoInitParams *) g_type_create_instance( - MIX_TYPE_VIDEOINITPARAMS); - - return ret; -} - -void mix_videoinitparams_finalize(MixParams * obj) { - /* clean up here. */ - - MixVideoInitParams *self = MIX_VIDEOINITPARAMS(obj); - - /* unref display */ - if (self->display) { - mix_display_unref(self->display); - self->display = NULL; - } - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoInitParams * -mix_videoinitparams_ref(MixVideoInitParams * mix) { - return (MixVideoInitParams *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videoinitparams_dup: - * @obj: a #MixVideoInitParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoinitparams_dup(const MixParams * obj) { - MixParams *ret = NULL; - if (MIX_IS_VIDEOINITPARAMS(obj)) { - MixVideoInitParams *duplicate = mix_videoinitparams_new(); - if (mix_videoinitparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoinitparams_unref(duplicate); - } - } - return ret; -} - -/** - * mix_videoinitparams_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoinitparams_copy(MixParams * target, const MixParams * src) { - MixVideoInitParams *this_target, *this_src; - if (MIX_IS_VIDEOINITPARAMS(target) && MIX_IS_VIDEOINITPARAMS(src)) { - /* Cast the base object to this child object */ - this_target = MIX_VIDEOINITPARAMS(target); - this_src = MIX_VIDEOINITPARAMS(src); - /* Copy properties from source to target. */ - - /* duplicate display */ - - this_target->display = mix_display_dup(this_src->display); - - /* Now chainup base class */ - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_videoinitparams_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoinitparams_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoInitParams *this_first, *this_second; - this_first = MIX_VIDEOINITPARAMS(first); - this_second = MIX_VIDEOINITPARAMS(second); - if (MIX_IS_VIDEOINITPARAMS(first) && MIX_IS_VIDEOINITPARAMS(second)) { - // Compare member variables - if (!this_first->display && !this_second->display) { - ret = TRUE; - } else if (this_first->display && this_second->display) { - - /* compare MixDisplay */ - ret = mix_display_equal(this_first->display, this_second->display); - } - - if (ret == FALSE) { - return FALSE; - } - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - return ret; -} - -#define MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ - -MIX_RESULT mix_videoinitparams_set_display(MixVideoInitParams * obj, - MixDisplay * display) { - MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT (obj); - - if(obj->display) { - mix_display_unref(obj->display); - } - obj->display = NULL; - - if(display) { - /* obj->display = mix_display_dup(display); - if(!obj->display) { - return MIX_RESULT_NO_MEMORY; - }*/ - - obj->display = mix_display_ref(display); - } - - return MIX_RESULT_SUCCESS; -} - -/* - Caller is responsible to use g_free to free the memory - */ -MIX_RESULT mix_videoinitparams_get_display(MixVideoInitParams * obj, - MixDisplay ** display) { - MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT (obj, display); - - *display = NULL; - if(obj->display) { - /* *display = mix_display_dup(obj->display); - if(!*display) { - return MIX_RESULT_NO_MEMORY; - }*/ - *display = mix_display_ref(obj->display); - } - - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoinitparams.cpp b/mix_video/src/mixvideoinitparams.cpp new file mode 100644 index 0000000..bcc282c --- /dev/null +++ b/mix_video/src/mixvideoinitparams.cpp @@ -0,0 +1,126 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoinitparams + * @short_description: MI-X Video Initialization Parameters + * + * The MixVideoInitParams object will be created by the MMF/App + * and provided in the mix_video_initialize() function. + * The get and set methods for the properties will be available for + * the caller to set and get information used at initialization time. + */ + +#include "mixvideoinitparams.h" + +#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } + +#define MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ + +MixVideoInitParams::MixVideoInitParams() + :display(NULL) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} +MixVideoInitParams::~MixVideoInitParams() { + /* unref display */ + if (this->display) { + mix_display_unref(this->display); + this->display = NULL; + } +} + +gboolean MixVideoInitParams::copy(MixParams *target) const { + gboolean ret = FALSE; + MixVideoInitParams * this_target = MIX_VIDEOINITPARAMS(target); + if (NULL != this_target) { + /* duplicate display */ + this_target->display = mix_display_dup(this->display); + // chain up base class + ret = MixParams::copy(target); + } + return ret; +} + +gboolean MixVideoInitParams::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoInitParams * this_obj = MIX_VIDEOINITPARAMS(obj); + if (NULL != this_obj) { + /* TODO: add comparison for other properties */ + if ((NULL == this->display && NULL == this_obj->display) || + mix_display_equal(this->display, this_obj->display)) { + ret = MixParams::equal(this_obj); + } + } + return ret; +} + +MixParams* MixVideoInitParams::dup() const { + MixParams *ret = new MixVideoInitParams(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} + +MixVideoInitParams * mix_videoinitparams_new(void) { + return new MixVideoInitParams(); +} + +MixVideoInitParams * +mix_videoinitparams_ref(MixVideoInitParams * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +MIX_RESULT mix_videoinitparams_set_display( + MixVideoInitParams * obj, MixDisplay * display) { + MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT (obj); + if(obj->display) { + mix_display_unref(obj->display); + } + obj->display = NULL; + if(display) { + /* obj->display = mix_display_dup(display); + if(!obj->display) { + return MIX_RESULT_NO_MEMORY; + }*/ + + obj->display = mix_display_ref(display); + } + return MIX_RESULT_SUCCESS; +} + +/* + Caller is responsible to use g_free to free the memory + */ +MIX_RESULT mix_videoinitparams_get_display( + MixVideoInitParams * obj, MixDisplay ** display) { + MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT (obj, display); + *display = NULL; + if(obj->display) { + /* *display = mix_display_dup(obj->display); + if(!*display) { + return MIX_RESULT_NO_MEMORY; + }*/ + *display = mix_display_ref(obj->display); + } + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h index bd83224..8d8dbec 100644 --- a/mix_video/src/mixvideoinitparams.h +++ b/mix_video/src/mixvideoinitparams.h @@ -13,20 +13,11 @@ #include "mixdisplay.h" #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEOINITPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEOINITPARAMS (mix_videoinitparams_get_type ()) - /** * MIX_VIDEOINITPARAMS: * @obj: object to be type-casted. */ -#define MIX_VIDEOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParams)) +#define MIX_VIDEOINITPARAMS(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOINITPARAMS: @@ -34,83 +25,40 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_VIDEOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOINITPARAMS)) - -/** - * MIX_VIDEOINITPARAMS_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParamsClass)) - -/** - * MIX_IS_VIDEOINITPARAMS_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_VIDEOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOINITPARAMS)) - -/** - * MIX_VIDEOINITPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEOINITPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParamsClass)) - -typedef struct _MixVideoInitParams MixVideoInitParams; -typedef struct _MixVideoInitParamsClass MixVideoInitParamsClass; +#define MIX_IS_VIDEOINITPARAMS(obj) ((NULL != MIX_VIDEOINITPARAMS(obj)) ? TRUE : FALSE) /** * MixVideoInitParams: * * MI-X VideoInit Parameter object */ -struct _MixVideoInitParams -{ - /*< public > */ - MixParams parent; - - /*< public > */ - - /* Pointer to a MixDisplay object - * such as MixDisplayX11 */ - MixDisplay *display; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - -/** - * MixVideoInitParamsClass: - * - * MI-X VideoInit object class - */ -struct _MixVideoInitParamsClass -{ - /*< public > */ - MixParamsClass parent_class; - - /* class members */ +class MixVideoInitParams : public MixParams { +public: + MixVideoInitParams(); + ~MixVideoInitParams(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams* obj) const; + virtual MixParams* dup() const; +public: + /*< public > */ + + /* Pointer to a MixDisplay object + * such as MixDisplayX11 */ + MixDisplay *display; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; -/** - * mix_videoinitparams_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoinitparams_get_type (void); - /** * mix_videoinitparams_new: * @returns: A newly allocated instance of #MixVideoInitParams @@ -146,8 +94,8 @@ MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix); * * Set MixDisplay object */ -MIX_RESULT mix_videoinitparams_set_display (MixVideoInitParams * obj, - MixDisplay * display); +MIX_RESULT mix_videoinitparams_set_display ( + MixVideoInitParams * obj, MixDisplay * display); /** * mix_videoinitparams_get_display: @@ -157,9 +105,9 @@ MIX_RESULT mix_videoinitparams_set_display (MixVideoInitParams * obj, * * Get MixDisplay object */ -MIX_RESULT mix_videoinitparams_get_display (MixVideoInitParams * obj, - MixDisplay ** dislay); +MIX_RESULT mix_videoinitparams_get_display ( + MixVideoInitParams * obj, MixDisplay ** dislay); + -G_END_DECLS #endif /* __MIX_VIDEOINITPARAMS_H__ */ diff --git a/mix_video/src/mixvideolog.h b/mix_video/src/mixvideolog.h index 7bb9ace..6f40943 100644 --- a/mix_video/src/mixvideolog.h +++ b/mix_video/src/mixvideolog.h @@ -10,7 +10,7 @@ #define __MIX_VIDEO_LOG_H__ #include -G_BEGIN_DECLS + #ifdef MIX_LOG_ENABLE #define LOG_V(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) #define LOG_I(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_INFO, format, ##__VA_ARGS__) @@ -23,6 +23,5 @@ G_BEGIN_DECLS #define LOG_E(format, ...) #endif -G_END_DECLS #endif /* __MIX_VIDEO_LOG_H__ */ diff --git a/mix_video/src/mixvideorenderparams.c b/mix_video/src/mixvideorenderparams.c deleted file mode 100644 index 9c47ddd..0000000 --- a/mix_video/src/mixvideorenderparams.c +++ /dev/null @@ -1,422 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideorenderparams - * @short_description: MI-X Video Render Parameters - * - * The #MixVideoRenderParams object will be created by the MMF/App - * and provided to #MixVideo in the #MixVideo mix_video_render() function. - */ - -#include /* libVA */ -#include - -#include "mixvideorenderparams.h" -#include "mixvideorenderparams_internal.h" - -#include - -static GType _mix_videorenderparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videorenderparams_type = g_define_type_id; } - -gboolean mix_videorenderparams_copy(MixParams * target, const MixParams * src); -MixParams *mix_videorenderparams_dup(const MixParams * obj); -gboolean mix_videorenderparams_equal(MixParams * first, MixParams * second); -static void mix_videorenderparams_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoRenderParams, mix_videorenderparams, - MIX_TYPE_PARAMS, _do_init); - -static void mix_videorenderparams_init(MixVideoRenderParams * self) { - - MixVideoRenderParamsPrivate *priv = MIX_VIDEORENDERPARAMS_GET_PRIVATE(self); - priv->va_cliprects = NULL; - self->reserved = priv; - - /* initialize properties here */ - self->display = NULL; - memset(&(self->src_rect), 0, sizeof(MixRect)); - memset(&(self->dst_rect), 0, sizeof(MixRect)); - - self->clipping_rects = NULL; - self->number_of_clipping_rects = 0; - - /* TODO: initialize other properties */ - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void mix_videorenderparams_class_init(MixVideoRenderParamsClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videorenderparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videorenderparams_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videorenderparams_dup; - mixparams_class->equal - = (MixParamsEqualFunction) mix_videorenderparams_equal; - - /* Register and allocate the space the private structure for this object */ - g_type_class_add_private(mixparams_class, sizeof(MixVideoRenderParamsPrivate)); -} - -MixVideoRenderParams * -mix_videorenderparams_new(void) { - MixVideoRenderParams *ret = - (MixVideoRenderParams *) g_type_create_instance( - MIX_TYPE_VIDEORENDERPARAMS); - - return ret; -} - -void mix_videorenderparams_finalize(MixParams * obj) { - /* clean up here. */ - - MixVideoRenderParams *self = MIX_VIDEORENDERPARAMS(obj); - MixVideoRenderParamsPrivate *priv = - (MixVideoRenderParamsPrivate *) self->reserved; - - if (self->clipping_rects) { - g_free(self->clipping_rects); - self->clipping_rects = NULL; - } - - if (priv->va_cliprects) { - g_free(self->clipping_rects); - priv->va_cliprects = NULL; - } - - self->number_of_clipping_rects = 0; - - if (self->display) { - mix_display_unref(self->display); - self->display = NULL; - } - - /* TODO: cleanup other resources allocated */ - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} - -MixVideoRenderParams * -mix_videorenderparams_ref(MixVideoRenderParams * mix) { - return (MixVideoRenderParams *) mix_params_ref(MIX_PARAMS(mix)); -} - -/** - * mix_videorenderparams_dup: - * @obj: a #MixVideoRenderParams object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videorenderparams_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEORENDERPARAMS(obj)) { - MixVideoRenderParams *duplicate = mix_videorenderparams_new(); - if (mix_videorenderparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videorenderparams_unref(duplicate); - } - } - return ret; -} - -/** - * mix_videorenderparams_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videorenderparams_copy(MixParams * target, const MixParams * src) { - - MixVideoRenderParams *this_target, *this_src; - MIX_RESULT mix_result = MIX_RESULT_FAIL; - - if (target == src) { - return TRUE; - } - - if (MIX_IS_VIDEORENDERPARAMS(target) && MIX_IS_VIDEORENDERPARAMS(src)) { - - // Cast the base object to this child object - this_target = MIX_VIDEORENDERPARAMS(target); - this_src = MIX_VIDEORENDERPARAMS(src); - - mix_result = mix_videorenderparams_set_display(this_target, - this_src->display); - if (mix_result != MIX_RESULT_SUCCESS) { - return FALSE; - } - - mix_result = mix_videorenderparams_set_clipping_rects(this_target, - this_src->clipping_rects, this_src->number_of_clipping_rects); - - if (mix_result != MIX_RESULT_SUCCESS) { - return FALSE; - } - - this_target->src_rect = this_src->src_rect; - this_target->dst_rect = this_src->dst_rect; - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; -} - -gboolean mix_rect_equal(MixRect rc1, MixRect rc2) { - - if (rc1.x == rc2.x && rc1.y == rc2.y && rc1.width == rc2.width - && rc1.height == rc2.height) { - return TRUE; - } - - return FALSE; -} - -/** - * mix_videorenderparams_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videorenderparams_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoRenderParams *this_first, *this_second; - - if (MIX_IS_VIDEORENDERPARAMS(first) && MIX_IS_VIDEORENDERPARAMS(second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEORENDERPARAMS(first); - this_second = MIX_VIDEORENDERPARAMS(second); - - if (mix_display_equal(MIX_DISPLAY(this_first->display), MIX_DISPLAY( - this_second->display)) && mix_rect_equal(this_first->src_rect, - this_second->src_rect) && mix_rect_equal(this_first->dst_rect, - this_second->dst_rect) && this_first->number_of_clipping_rects - == this_second->number_of_clipping_rects && memcmp( - (guchar *) this_first->number_of_clipping_rects, - (guchar *) this_second->number_of_clipping_rects, - this_first->number_of_clipping_rects) == 0) { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - - return ret; -} - -#define MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ - - -/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ - -MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj, - MixDisplay * display) { - - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - - if (obj->display) { - mix_display_unref(obj->display); - obj->display = NULL; - } - - /* dup */ - if (display) { - obj->display = mix_display_ref(display); - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj, - MixDisplay ** display) { - - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, display); - - /* dup? */ - if (obj->display) { - *display = mix_display_ref(obj->display); - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj, - MixRect src_rect) { - - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - - obj->src_rect = src_rect; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj, - MixRect * src_rect) { - - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, src_rect); - - *src_rect = obj->src_rect; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj, - MixRect dst_rect) { - - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - - obj->dst_rect = dst_rect; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj, - MixRect * dst_rect) { - - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, dst_rect); - - *dst_rect = obj->dst_rect; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj, - MixRect* clipping_rects, guint number_of_clipping_rects) { - - MixVideoRenderParamsPrivate *priv = NULL; - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - - priv = (MixVideoRenderParamsPrivate *) obj->reserved; - - - if (obj->clipping_rects) { - g_free(obj->clipping_rects); - obj->clipping_rects = NULL; - obj->number_of_clipping_rects = 0; - } - - if(priv->va_cliprects) { - g_free(priv->va_cliprects); - priv->va_cliprects = NULL; - } - - - if (clipping_rects && number_of_clipping_rects) { - - gint idx = 0; - - obj->clipping_rects = g_memdup(clipping_rects, number_of_clipping_rects - * sizeof(MixRect)); - if (!obj->clipping_rects) { - return MIX_RESULT_NO_MEMORY; - } - - obj->number_of_clipping_rects = number_of_clipping_rects; - - /* create VARectangle list */ - priv->va_cliprects = g_malloc(number_of_clipping_rects * sizeof(VARectangle)); - if (!priv->va_cliprects) { - return MIX_RESULT_NO_MEMORY; - } - - for (idx = 0; idx < number_of_clipping_rects; idx++) { - priv->va_cliprects[idx].x = clipping_rects[idx].x; - priv->va_cliprects[idx].y = clipping_rects[idx].y; - priv->va_cliprects[idx].width = clipping_rects[idx].width; - priv->va_cliprects[idx].height = clipping_rects[idx].height; - } - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj, - MixRect ** clipping_rects, guint* number_of_clipping_rects) { - - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, clipping_rects); - if (!number_of_clipping_rects) { - return MIX_RESULT_NULL_PTR; - } - - *clipping_rects = NULL; - *number_of_clipping_rects = 0; - - if (obj->clipping_rects && obj->number_of_clipping_rects) { - *clipping_rects = g_memdup(obj->clipping_rects, - obj->number_of_clipping_rects * sizeof(MixRect)); - if (!*clipping_rects) { - return MIX_RESULT_NO_MEMORY; - } - - *number_of_clipping_rects = obj->number_of_clipping_rects; - } - - return MIX_RESULT_SUCCESS; -} - -/* The mixvideo internal method */ -MIX_RESULT mix_videorenderparams_get_cliprects_internal( - MixVideoRenderParams * obj, VARectangle ** va_cliprects, - guint* number_of_cliprects) { - - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, va_cliprects); - if (!number_of_cliprects) { - return MIX_RESULT_NULL_PTR; - } - MixVideoRenderParamsPrivate *priv = - (MixVideoRenderParamsPrivate *) obj->reserved; - - *va_cliprects = NULL; - *number_of_cliprects = 0; - - if (priv->va_cliprects && obj->number_of_clipping_rects) { - *va_cliprects = priv->va_cliprects; - *number_of_cliprects = obj->number_of_clipping_rects; - } - - return MIX_RESULT_SUCCESS; - -} - -/* TODO: implement properties' setters and getters */ diff --git a/mix_video/src/mixvideorenderparams.cpp b/mix_video/src/mixvideorenderparams.cpp new file mode 100644 index 0000000..65e5183 --- /dev/null +++ b/mix_video/src/mixvideorenderparams.cpp @@ -0,0 +1,299 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideorenderparams + * @short_description: MI-X Video Render Parameters + * + * The #MixVideoRenderParams object will be created by the MMF/App + * and provided to #MixVideo in the #MixVideo mix_video_render() function. + */ + +#include /* libVA */ +#include "mixvideorenderparams.h" +#include "mixvideorenderparams_internal.h" + +#include + +#define MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ + +gboolean mix_rect_equal(MixRect rc1, MixRect rc2); + +MixVideoRenderParams::MixVideoRenderParams() + :display(NULL) + ,clipping_rects(NULL) + ,number_of_clipping_rects(0) + ,reserved(NULL) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,mVa_cliprects(NULL) { + /* initialize properties here */ + memset(&src_rect, 0, sizeof(MixRect)); + memset(&dst_rect, 0, sizeof(MixRect)); +} + +MixVideoRenderParams::~MixVideoRenderParams() { + if (NULL != clipping_rects) { + g_free(clipping_rects); + clipping_rects = NULL; + } + if(NULL != mVa_cliprects) { + g_free(mVa_cliprects); + mVa_cliprects = NULL; + } + number_of_clipping_rects = 0; + if (NULL != display) { + mix_display_unref(display); + display = NULL; + } +} + +gboolean MixVideoRenderParams::copy(MixParams *target) const { + if (NULL == target) return FALSE; + MixVideoRenderParams *this_target = MIX_VIDEORENDERPARAMS(target); + MIX_RESULT mix_result = MIX_RESULT_FAIL; + + if (this_target == this) { + return TRUE; + } + + if(NULL != this_target) { + mix_result = mix_videorenderparams_set_display(this_target, display); + if (MIX_RESULT_SUCCESS != mix_result) { + return FALSE; + } + + mix_result = mix_videorenderparams_set_clipping_rects(this_target, + clipping_rects, number_of_clipping_rects); + + if (MIX_RESULT_SUCCESS != mix_result) { + return FALSE; + } + + this_target->src_rect = src_rect; + this_target->dst_rect = dst_rect; + + } + return MixParams::copy(target); +} + +gboolean MixVideoRenderParams::equal(MixParams* obj) const { + gboolean ret = FALSE; + MixVideoRenderParams *this_obj = MIX_VIDEORENDERPARAMS(obj); + if (NULL != this_obj) { + // Deep compare + if (mix_display_equal(MIX_DISPLAY(display), MIX_DISPLAY( + this_obj->display)) && mix_rect_equal(src_rect, + this_obj->src_rect) && mix_rect_equal(dst_rect, + this_obj->dst_rect) && number_of_clipping_rects + == this_obj->number_of_clipping_rects && memcmp( + (guchar *) number_of_clipping_rects, + (guchar *) this_obj->number_of_clipping_rects, + number_of_clipping_rects) == 0) { + // members within this scope equal. chaining up. + ret = MixParams::equal(obj); + } + } + return ret; +} + +MixParams* MixVideoRenderParams::dup() const { + MixParams *ret = NULL; + MixVideoRenderParams *duplicate = mix_videorenderparams_new(); + if (copy(duplicate)) { + ret = duplicate; + } else { + mix_videorenderparams_unref(duplicate); + } + return ret; +} + +MIX_RESULT MixVideoRenderParams::set_clipping_rects( + MixRect* clipping_rects, + guint number_of_clipping_rects) { + + if (this->clipping_rects) { + g_free(this->clipping_rects); + this->clipping_rects = NULL; + this->number_of_clipping_rects = 0; + } + + if(this->mVa_cliprects) { + g_free(this->mVa_cliprects); + this->mVa_cliprects = NULL; + } + + if ((NULL == clipping_rects) && (0 != number_of_clipping_rects)) { + this->clipping_rects = reinterpret_cast(g_memdup(clipping_rects, + number_of_clipping_rects * sizeof(MixRect))); + if (NULL == this->clipping_rects) { + return MIX_RESULT_NO_MEMORY; + } + this->number_of_clipping_rects = number_of_clipping_rects; + + /* create VARectangle list */ + this->mVa_cliprects = reinterpret_cast(g_malloc(number_of_clipping_rects * sizeof(VARectangle))); + if (NULL == this->mVa_cliprects) { + return MIX_RESULT_NO_MEMORY; + } + + for (guint idx = 0; idx < number_of_clipping_rects; ++idx) { + this->mVa_cliprects[idx].x = clipping_rects[idx].x; + this->mVa_cliprects[idx].y = clipping_rects[idx].y; + this->mVa_cliprects[idx].width = clipping_rects[idx].width; + this->mVa_cliprects[idx].height = clipping_rects[idx].height; + } + } + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT MixVideoRenderParams::get_clipping_rects(MixRect ** clipping_rects, + guint* number_of_clipping_rects) { + if (NULL == clipping_rects || NULL == number_of_clipping_rects) + return MIX_RESULT_NULL_PTR; + + *clipping_rects = NULL; + *number_of_clipping_rects = 0; + + if ((NULL != this->clipping_rects) && (0 != this->number_of_clipping_rects)) { + *clipping_rects = reinterpret_cast(g_memdup(this->clipping_rects, + this->number_of_clipping_rects * sizeof(MixRect))); + if (NULL == *clipping_rects) { + return MIX_RESULT_NO_MEMORY; + } + *number_of_clipping_rects = this->number_of_clipping_rects; + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoRenderParams::get_va_cliprects(VARectangle ** va_cliprects, + guint* number_of_cliprects) { + if (NULL == va_cliprects || NULL == number_of_cliprects) + return MIX_RESULT_NULL_PTR; + + *va_cliprects = NULL; + *number_of_cliprects = 0; + + if ((NULL != mVa_cliprects) && (0 != number_of_clipping_rects)) { + *va_cliprects = mVa_cliprects; + *number_of_cliprects = number_of_clipping_rects; + } + return MIX_RESULT_SUCCESS; +} + +MixVideoRenderParams * +mix_videorenderparams_new(void) { + return new MixVideoRenderParams(); +} + +MixVideoRenderParams * +mix_videorenderparams_ref(MixVideoRenderParams * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + +gboolean mix_rect_equal(MixRect rc1, MixRect rc2) { + if (rc1.x == rc2.x && rc1.y == rc2.y && rc1.width == rc2.width + && rc1.height == rc2.height) { + return TRUE; + } + return FALSE; +} + + +/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ + +MIX_RESULT mix_videorenderparams_set_display( + MixVideoRenderParams * obj, MixDisplay * display) { + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + if (obj->display) { + mix_display_unref(obj->display); + obj->display = NULL; + } + /* dup */ + if (display) { + obj->display = mix_display_ref(display); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_get_display( + MixVideoRenderParams * obj, MixDisplay ** display) { + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, display); + /* dup? */ + if (obj->display) { + *display = mix_display_ref(obj->display); + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_set_src_rect( + MixVideoRenderParams * obj, MixRect src_rect) { + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + obj->src_rect = src_rect; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_get_src_rect( + MixVideoRenderParams * obj, MixRect * src_rect) { + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, src_rect); + *src_rect = obj->src_rect; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_set_dest_rect( + MixVideoRenderParams * obj, MixRect dst_rect) { + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + obj->dst_rect = dst_rect; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_get_dest_rect( + MixVideoRenderParams * obj, MixRect * dst_rect) { + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, dst_rect); + *dst_rect = obj->dst_rect; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videorenderparams_set_clipping_rects( + MixVideoRenderParams * obj, MixRect* clipping_rects, + guint number_of_clipping_rects) { + if (NULL == obj) + return MIX_RESULT_NULL_PTR; + return obj->set_clipping_rects(clipping_rects, number_of_clipping_rects); +} + +MIX_RESULT mix_videorenderparams_get_clipping_rects( + MixVideoRenderParams * obj, MixRect ** clipping_rects, + guint* number_of_clipping_rects) { + if (NULL == obj) + return MIX_RESULT_NULL_PTR; + return obj->get_clipping_rects(clipping_rects, number_of_clipping_rects); +} + +/* The mixvideo internal method */ +MIX_RESULT mix_videorenderparams_get_cliprects_internal( + MixVideoRenderParams * obj, VARectangle ** va_cliprects, + guint* number_of_cliprects) { + if (NULL == obj) + return MIX_RESULT_NULL_PTR; + return obj->get_va_cliprects(va_cliprects, number_of_cliprects);; +} + +/* TODO: implement properties' setters and getters */ diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h index b377136..f841cb6 100644 --- a/mix_video/src/mixvideorenderparams.h +++ b/mix_video/src/mixvideorenderparams.h @@ -13,21 +13,13 @@ #include "mixvideodef.h" #include "mixdisplay.h" #include "mixvideoframe.h" - -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEORENDERPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEORENDERPARAMS (mix_videorenderparams_get_type ()) +#include /** * MIX_VIDEORENDERPARAMS: * @obj: object to be type-casted. */ -#define MIX_VIDEORENDERPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParams)) +#define MIX_VIDEORENDERPARAMS(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEORENDERPARAMS: @@ -35,44 +27,31 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_VIDEORENDERPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEORENDERPARAMS)) - -/** - * MIX_VIDEORENDERPARAMS_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEORENDERPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsClass)) - -/** - * MIX_IS_VIDEORENDERPARAMS_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -#define MIX_IS_VIDEORENDERPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEORENDERPARAMS)) - -/** - * MIX_VIDEORENDERPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEORENDERPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsClass)) +#define MIX_IS_VIDEORENDERPARAMS(obj) ((NULL != MIX_VIDEORENDERPARAMS(obj)) ? TRUE : FALSE) -typedef struct _MixVideoRenderParams MixVideoRenderParams; -typedef struct _MixVideoRenderParamsClass MixVideoRenderParamsClass; /** * MixVideoRenderParams: * * MI-X VideoRender Parameter object */ -struct _MixVideoRenderParams { - /*< public > */ - MixParams parent; - - /*< public > */ - +class MixVideoRenderParams : public MixParams { +public: + MixVideoRenderParams(); + virtual ~MixVideoRenderParams(); + virtual gboolean copy(MixParams *target) const; + virtual gboolean equal(MixParams*) const; + virtual MixParams* dup() const; + + MIX_RESULT set_clipping_rects(MixRect* clipping_rects, + guint number_of_clipping_rects); + MIX_RESULT get_clipping_rects(MixRect ** clipping_rects, + guint* number_of_clipping_rects); + MIX_RESULT get_va_cliprects(VARectangle ** va_cliprects, + guint* number_of_cliprects); + +public: + /*< public > */ /* Pointer to a MixDisplay object * such as MixDisplayX11 */ MixDisplay *display; @@ -111,27 +90,12 @@ struct _MixVideoRenderParams { /* Reserved for future use */ gpointer reserved4; -}; -/** - * MixVideoRenderParamsClass: - * - * MI-X VideoRender object class - */ -struct _MixVideoRenderParamsClass { - /*< public > */ - MixParamsClass parent_class; +private: + VARectangle *mVa_cliprects; - /* class members */ }; -/** - * mix_videorenderparams_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videorenderparams_get_type(void); /** * mix_videorenderparams_new: @@ -139,6 +103,7 @@ GType mix_videorenderparams_get_type(void); * * Use this method to create new instance of #MixVideoRenderParams */ + MixVideoRenderParams *mix_videorenderparams_new(void); /** * mix_videorenderparams_ref: @@ -147,6 +112,7 @@ MixVideoRenderParams *mix_videorenderparams_new(void); * * Add reference count. */ + MixVideoRenderParams *mix_videorenderparams_ref(MixVideoRenderParams * mix); /** @@ -167,8 +133,8 @@ MixVideoRenderParams *mix_videorenderparams_ref(MixVideoRenderParams * mix); * * Set MixDisplay Object */ -MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj, - MixDisplay * display); +MIX_RESULT mix_videorenderparams_set_display( + MixVideoRenderParams * obj, MixDisplay * display); /** * mix_videorenderparams_get_display: @@ -178,8 +144,8 @@ MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj, * * Get MixDisplay Object */ -MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj, - MixDisplay ** display); +MIX_RESULT mix_videorenderparams_get_display( + MixVideoRenderParams * obj, MixDisplay ** display); /** * mix_videorenderparams_set_src_rect: @@ -189,8 +155,8 @@ MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj, * * Set source rectangle */ -MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj, - MixRect src_rect); +MIX_RESULT mix_videorenderparams_set_src_rect( + MixVideoRenderParams * obj, MixRect src_rect); /** * mix_videorenderparams_get_src_rect: @@ -200,8 +166,8 @@ MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj, * * Get source rectangle */ -MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj, - MixRect * src_rect); +MIX_RESULT mix_videorenderparams_get_src_rect( + MixVideoRenderParams * obj, MixRect * src_rect); /** * mix_videorenderparams_set_dest_rect: @@ -211,8 +177,8 @@ MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj, * * Set destination rectangle */ -MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj, - MixRect dst_rect); +MIX_RESULT mix_videorenderparams_set_dest_rect( + MixVideoRenderParams * obj, MixRect dst_rect); /** * mix_videorenderparams_set_dest_rect: @@ -222,8 +188,8 @@ MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj, * * Get destination rectangle */ -MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj, - MixRect * dst_rect); +MIX_RESULT mix_videorenderparams_get_dest_rect( + MixVideoRenderParams * obj, MixRect * dst_rect); /** * mix_videorenderparams_set_clipping_rects: @@ -234,8 +200,8 @@ MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj, * * Set clipping rectangles */ -MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj, - MixRect* clipping_rects, guint number_of_clipping_rects); +MIX_RESULT mix_videorenderparams_set_clipping_rects( + MixVideoRenderParams * obj, MixRect* clipping_rects, guint number_of_clipping_rects); /** * mix_videorenderparams_get_clipping_rects: @@ -250,11 +216,11 @@ MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj, * DO NOT free clipping_rects! *
*/ -MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj, - MixRect ** clipping_rects, guint* number_of_clipping_rects); +MIX_RESULT mix_videorenderparams_get_clipping_rects( + MixVideoRenderParams * obj, MixRect ** clipping_rects, guint* number_of_clipping_rects); /* TODO: Add getters and setters for other properties */ -G_END_DECLS + #endif /* __MIX_VIDEORENDERPARAMS_H__ */ diff --git a/mix_video/src/mixvideorenderparams_internal.h b/mix_video/src/mixvideorenderparams_internal.h index c7d1fe4..14215a0 100644 --- a/mix_video/src/mixvideorenderparams_internal.h +++ b/mix_video/src/mixvideorenderparams_internal.h @@ -9,33 +9,14 @@ #ifndef __MIX_VIDEORENDERPARAMS_PRIVATE_H__ #define __MIX_VIDEORENDERPARAMS_PRIVATE_H__ -G_BEGIN_DECLS - - -typedef struct _MixVideoRenderParamsPrivate MixVideoRenderParamsPrivate; - -struct _MixVideoRenderParamsPrivate { - /*< private > */ - - VARectangle *va_cliprects; -}; - -/** - * MIX_VIDEO_PRIVATE: - * - * Get private structure of this class. - * @obj: class object for which to get private data. - */ -#define MIX_VIDEORENDERPARAMS_GET_PRIVATE(obj) \ - (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsPrivate)) +#include /* Internal function */ - MIX_RESULT mix_videorenderparams_get_cliprects_internal( MixVideoRenderParams * obj, VARectangle ** va_cliprects, guint* number_of_cliprects); -G_END_DECLS + #endif /* __MIX_VIDEORENDERPARAMS_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideothread.cpp b/mix_video/src/mixvideothread.cpp new file mode 100644 index 0000000..b9a92e0 --- /dev/null +++ b/mix_video/src/mixvideothread.cpp @@ -0,0 +1,50 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +/** + * SECTION:mixvideoinitparams + * @short_description: MI-X Video Initialization Parameters + * + * The MixVideoInitParams object will be created by the MMF/App + * and provided in the mix_video_initialize() function. + * The get and set methods for the properties will be available for + * the caller to set and get information used at initialization time. + */ + +#include "mixvideothread.h" + +MixVideoMutex::MixVideoMutex() { + pthread_mutex_init(&mMutex, NULL); +} +MixVideoMutex::MixVideoMutex(const char* name) { + pthread_mutex_init(&mMutex, NULL); +} +MixVideoMutex::MixVideoMutex(int type, const char* name) { + if (type == SHARED) { + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_setpshared(&attr, PTHREAD_PROCESS_SHARED); + pthread_mutex_init(&mMutex, &attr); + pthread_mutexattr_destroy(&attr); + } else { + pthread_mutex_init(&mMutex, NULL); + } +} +MixVideoMutex::~MixVideoMutex() { + pthread_mutex_destroy(&mMutex); +} +int MixVideoMutex::lock() { + return -pthread_mutex_lock(&mMutex); +} +void MixVideoMutex::unlock() { + pthread_mutex_unlock(&mMutex); +} +int MixVideoMutex::tryLock() { + return -pthread_mutex_trylock(&mMutex); +} + diff --git a/mix_video/src/mixvideothread.h b/mix_video/src/mixvideothread.h new file mode 100644 index 0000000..5ad36a6 --- /dev/null +++ b/mix_video/src/mixvideothread.h @@ -0,0 +1,45 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __MIX_VIDEO_THREAD_H__ +#define __MIX_VIDEO_THREAD_H__ + +#include + + +class MixVideoMutex { +public: + enum { + PRIVATE = 0, + SHARED = 1 + }; + + MixVideoMutex(); + MixVideoMutex(const char* name); + MixVideoMutex(int type, const char* name = NULL); + ~MixVideoMutex(); + + // lock or unlock the mutex + int lock(); + void unlock(); + // lock if possible; returns 0 on success, error otherwise + int tryLock(); +private: + // A mutex cannot be copied + MixVideoMutex(const MixVideoMutex&); + MixVideoMutex& operator = (const MixVideoMutex&); + +private: + pthread_mutex_t mMutex; +}; + + + + +#endif /* __MIX_VIDEO_THREAD_H__ */ + diff --git a/mix_video/src/test.c b/mix_video/src/test.c deleted file mode 100644 index 8f9aee5..0000000 --- a/mix_video/src/test.c +++ /dev/null @@ -1,87 +0,0 @@ -#include -#include -#include -#include "mixvideo.h" -#include "mixdisplayx11.h" - -int -main (int argc, char **argv) -{ - MIX_RESULT ret; - - g_type_init (); - -/* test MixDisplay */ - { - - MixDisplayX11 *x11_clone = NULL; - MixDisplayX11 *x11 = mix_displayx11_new (); - - MixDisplay *base = MIX_DISPLAY (x11); - - gboolean flag = MIX_IS_DISPLAYX11 (base); - - Drawable drawable = 1024; - - mix_displayx11_set_drawable (x11, drawable); - -/* clone x11 */ - - x11_clone = (MixDisplayX11 *) mix_display_dup (MIX_DISPLAY (x11)); - - base = MIX_DISPLAY (x11_clone); - - flag = MIX_IS_DISPLAYX11 (base); - - mix_displayx11_get_drawable (x11_clone, &drawable); - -/* TODO: add more test cases */ - -/* release */ - mix_display_unref (MIX_DISPLAY (x11)); - mix_display_unref (MIX_DISPLAY (x11_clone)); - g_print ("MixDisplayX11 test is done!\n"); - } - -/* test MixVideoInitParams */ - { - MixVideoInitParams *init_params = mix_videoinitparams_new (); - - MixDisplayX11 *x11 = mix_displayx11_new (); - mix_displayx11_set_drawable (x11, 1024); - - mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); - -/* release */ - mix_params_unref (MIX_PARAMS (init_params)); - mix_display_unref (MIX_DISPLAY (x11)); - - g_print ("MixVideoInitParams test is done!\n"); - } - -/* test MixVideo */ - - { - MixVideo *video = mix_video_new (); - MixVideoInitParams *init_params = mix_videoinitparams_new (); - MixDisplayX11 *x11 = mix_displayx11_new (); - MixDrmParams *drm = mix_drmparams_new (); - MixCodecMode mode = MIX_CODEC_MODE_DECODE; - - mix_displayx11_set_drawable (x11, 1024); - mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); - - mix_video_initialize (video, mode, init_params, drm); - -/* TODO: add more test cases */ - -/* unref the objects. */ - - mix_params_unref (MIX_PARAMS (init_params)); - mix_params_unref (MIX_PARAMS (drm)); - mix_display_unref (MIX_DISPLAY (x11)); - g_object_unref (G_OBJECT (video)); - - g_print ("MixVideo test is done!\n"); - } -} diff --git a/mix_video/src/test.cpp b/mix_video/src/test.cpp new file mode 100644 index 0000000..8f9aee5 --- /dev/null +++ b/mix_video/src/test.cpp @@ -0,0 +1,87 @@ +#include +#include +#include +#include "mixvideo.h" +#include "mixdisplayx11.h" + +int +main (int argc, char **argv) +{ + MIX_RESULT ret; + + g_type_init (); + +/* test MixDisplay */ + { + + MixDisplayX11 *x11_clone = NULL; + MixDisplayX11 *x11 = mix_displayx11_new (); + + MixDisplay *base = MIX_DISPLAY (x11); + + gboolean flag = MIX_IS_DISPLAYX11 (base); + + Drawable drawable = 1024; + + mix_displayx11_set_drawable (x11, drawable); + +/* clone x11 */ + + x11_clone = (MixDisplayX11 *) mix_display_dup (MIX_DISPLAY (x11)); + + base = MIX_DISPLAY (x11_clone); + + flag = MIX_IS_DISPLAYX11 (base); + + mix_displayx11_get_drawable (x11_clone, &drawable); + +/* TODO: add more test cases */ + +/* release */ + mix_display_unref (MIX_DISPLAY (x11)); + mix_display_unref (MIX_DISPLAY (x11_clone)); + g_print ("MixDisplayX11 test is done!\n"); + } + +/* test MixVideoInitParams */ + { + MixVideoInitParams *init_params = mix_videoinitparams_new (); + + MixDisplayX11 *x11 = mix_displayx11_new (); + mix_displayx11_set_drawable (x11, 1024); + + mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); + +/* release */ + mix_params_unref (MIX_PARAMS (init_params)); + mix_display_unref (MIX_DISPLAY (x11)); + + g_print ("MixVideoInitParams test is done!\n"); + } + +/* test MixVideo */ + + { + MixVideo *video = mix_video_new (); + MixVideoInitParams *init_params = mix_videoinitparams_new (); + MixDisplayX11 *x11 = mix_displayx11_new (); + MixDrmParams *drm = mix_drmparams_new (); + MixCodecMode mode = MIX_CODEC_MODE_DECODE; + + mix_displayx11_set_drawable (x11, 1024); + mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); + + mix_video_initialize (video, mode, init_params, drm); + +/* TODO: add more test cases */ + +/* unref the objects. */ + + mix_params_unref (MIX_PARAMS (init_params)); + mix_params_unref (MIX_PARAMS (drm)); + mix_display_unref (MIX_DISPLAY (x11)); + g_object_unref (G_OBJECT (video)); + + g_print ("MixVideo test is done!\n"); + } +} diff --git a/mix_video/test/src/test_framemanager.c b/mix_video/test/src/test_framemanager.c deleted file mode 100644 index f4b8be9..0000000 --- a/mix_video/test/src/test_framemanager.c +++ /dev/null @@ -1,200 +0,0 @@ -#include "../../src/mixframemanager.h" - -gboolean stop_thread = FALSE; -GCond* data_cond = NULL; -GMutex* data_mutex = NULL; - - -void *deque_function(void *data) { - - MixFrameManager *fm = (MixFrameManager *) data; - MIX_RESULT mixresult; - MixVideoFrame *mvf = NULL; - guint64 pts; - while(!stop_thread) { - - g_mutex_lock (data_mutex); - - mixresult = mix_framemanager_dequeue(fm, &mvf); - if(mixresult == MIX_RESULT_SUCCESS) { - mixresult = mix_videoframe_get_timestamp(mvf, &pts); - g_print("dequeued timestamp = %"G_GINT64_FORMAT"\n", pts); - /* mix_videoframe_unref(mvf); */ - } else if(mixresult == MIX_RESULT_FRAME_NOTAVAIL) { - g_print("mixresult == MIX_RESULT_FRAME_NOTAVAIL\n"); - g_cond_wait (data_cond, data_mutex); - } - - g_mutex_unlock (data_mutex); - - } -} - -void shuffle(GPtrArray *list) { - guint idx, jdx; - guint len = list->len; - for (idx = 0; idx < len - 1; idx++) { - jdx = rand() % len; - if (idx != jdx) { - gpointer tmp = g_ptr_array_index(list, jdx); - g_ptr_array_index(list, jdx) = g_ptr_array_index(list, idx); - g_ptr_array_index(list, idx) = tmp; - } - } -} - -int main() { - MIX_RESULT mixresult; - - gint fps_n = 24000; - gint fps_d = 1001; - -/* - gint fps_n = 2500000; - gint fps_d = 104297; -*/ - GPtrArray *fa = NULL; - MixFrameManager *fm = NULL; - MixVideoFrame *mvf = NULL; - MixVideoFrame *mvf_1st = NULL; - - gint idx = 0; - guint64 pts = 0; - - GThread *deque_thread = NULL; - GError *deque_thread_error = NULL; - - /* first ting first */ - g_type_init(); - - /* create frame manager */ - fm = mix_framemanager_new(); - if (!fm) { - goto cleanup; - } - - /* initialize frame manager */ - mixresult = mix_framemanager_initialize(fm, - MIX_FRAMEORDER_MODE_DISPLAYORDER, fps_n, fps_d); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - /* create frame_array */ - fa = g_ptr_array_sized_new(64); - if (!fa) { - goto cleanup; - } - - for (idx = 0; idx < 16; idx++) { - /* generate MixVideoFrame */ - mvf = mix_videoframe_new(); - if (!mvf) { - goto cleanup; - } - - pts = idx * G_USEC_PER_SEC * G_GINT64_CONSTANT(1000) * fps_d / fps_n; - mixresult = mix_videoframe_set_timestamp(mvf, pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - g_print("original timestamp = %"G_GINT64_FORMAT"\n", pts); - - if (idx == 0) { - mvf_1st = mvf; - } else { - g_ptr_array_add(fa, (gpointer) mvf); - } - } - - /* shuffle the array */ - shuffle( fa); - - data_mutex = g_mutex_new (); - if(!data_mutex) { - goto cleanup; - } - - data_cond = g_cond_new(); - if(!data_cond) { - goto cleanup; - } - - - /* create another thread to dequeue */ - deque_thread = g_thread_create((GThreadFunc) deque_function, (void *) fm, - TRUE, &deque_thread_error); - if (!deque_thread) { - goto cleanup; - } - - /* enqueue */ - mixresult = mix_framemanager_enqueue(fm, mvf_1st); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - mixresult = mix_videoframe_get_timestamp(mvf_1st, &pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); - - for (idx = 0; idx < fa->len; idx++) { - - g_mutex_lock (data_mutex); - - /* wait for 100ms to enqueue another frame */ - g_usleep(G_USEC_PER_SEC / 10 ); - - mvf = (MixVideoFrame *) g_ptr_array_index(fa, idx); - mixresult = mix_framemanager_enqueue(fm, mvf); - - /* wake up deque thread */ - g_cond_signal (data_cond); - - - g_mutex_unlock (data_mutex); - - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - mixresult = mix_videoframe_get_timestamp(mvf, &pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); - } - - getchar(); - - stop_thread = TRUE; - - /* wake up deque thread */ - g_cond_signal (data_cond); - - g_thread_join(deque_thread); - -cleanup: - - if(data_mutex) { - g_mutex_free(data_mutex); - } - - if(data_cond) { - g_cond_free(data_cond); - } - - if (fm) { - mix_framemanager_unref(fm); - } - - if (fa) { - g_ptr_array_free(fa, TRUE); - } - - return 0; -} diff --git a/mix_video/test/src/test_framemanager.cpp b/mix_video/test/src/test_framemanager.cpp new file mode 100644 index 0000000..f4b8be9 --- /dev/null +++ b/mix_video/test/src/test_framemanager.cpp @@ -0,0 +1,200 @@ +#include "../../src/mixframemanager.h" + +gboolean stop_thread = FALSE; +GCond* data_cond = NULL; +GMutex* data_mutex = NULL; + + +void *deque_function(void *data) { + + MixFrameManager *fm = (MixFrameManager *) data; + MIX_RESULT mixresult; + MixVideoFrame *mvf = NULL; + guint64 pts; + while(!stop_thread) { + + g_mutex_lock (data_mutex); + + mixresult = mix_framemanager_dequeue(fm, &mvf); + if(mixresult == MIX_RESULT_SUCCESS) { + mixresult = mix_videoframe_get_timestamp(mvf, &pts); + g_print("dequeued timestamp = %"G_GINT64_FORMAT"\n", pts); + /* mix_videoframe_unref(mvf); */ + } else if(mixresult == MIX_RESULT_FRAME_NOTAVAIL) { + g_print("mixresult == MIX_RESULT_FRAME_NOTAVAIL\n"); + g_cond_wait (data_cond, data_mutex); + } + + g_mutex_unlock (data_mutex); + + } +} + +void shuffle(GPtrArray *list) { + guint idx, jdx; + guint len = list->len; + for (idx = 0; idx < len - 1; idx++) { + jdx = rand() % len; + if (idx != jdx) { + gpointer tmp = g_ptr_array_index(list, jdx); + g_ptr_array_index(list, jdx) = g_ptr_array_index(list, idx); + g_ptr_array_index(list, idx) = tmp; + } + } +} + +int main() { + MIX_RESULT mixresult; + + gint fps_n = 24000; + gint fps_d = 1001; + +/* + gint fps_n = 2500000; + gint fps_d = 104297; +*/ + GPtrArray *fa = NULL; + MixFrameManager *fm = NULL; + MixVideoFrame *mvf = NULL; + MixVideoFrame *mvf_1st = NULL; + + gint idx = 0; + guint64 pts = 0; + + GThread *deque_thread = NULL; + GError *deque_thread_error = NULL; + + /* first ting first */ + g_type_init(); + + /* create frame manager */ + fm = mix_framemanager_new(); + if (!fm) { + goto cleanup; + } + + /* initialize frame manager */ + mixresult = mix_framemanager_initialize(fm, + MIX_FRAMEORDER_MODE_DISPLAYORDER, fps_n, fps_d); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + /* create frame_array */ + fa = g_ptr_array_sized_new(64); + if (!fa) { + goto cleanup; + } + + for (idx = 0; idx < 16; idx++) { + /* generate MixVideoFrame */ + mvf = mix_videoframe_new(); + if (!mvf) { + goto cleanup; + } + + pts = idx * G_USEC_PER_SEC * G_GINT64_CONSTANT(1000) * fps_d / fps_n; + mixresult = mix_videoframe_set_timestamp(mvf, pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + g_print("original timestamp = %"G_GINT64_FORMAT"\n", pts); + + if (idx == 0) { + mvf_1st = mvf; + } else { + g_ptr_array_add(fa, (gpointer) mvf); + } + } + + /* shuffle the array */ + shuffle( fa); + + data_mutex = g_mutex_new (); + if(!data_mutex) { + goto cleanup; + } + + data_cond = g_cond_new(); + if(!data_cond) { + goto cleanup; + } + + + /* create another thread to dequeue */ + deque_thread = g_thread_create((GThreadFunc) deque_function, (void *) fm, + TRUE, &deque_thread_error); + if (!deque_thread) { + goto cleanup; + } + + /* enqueue */ + mixresult = mix_framemanager_enqueue(fm, mvf_1st); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + mixresult = mix_videoframe_get_timestamp(mvf_1st, &pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); + + for (idx = 0; idx < fa->len; idx++) { + + g_mutex_lock (data_mutex); + + /* wait for 100ms to enqueue another frame */ + g_usleep(G_USEC_PER_SEC / 10 ); + + mvf = (MixVideoFrame *) g_ptr_array_index(fa, idx); + mixresult = mix_framemanager_enqueue(fm, mvf); + + /* wake up deque thread */ + g_cond_signal (data_cond); + + + g_mutex_unlock (data_mutex); + + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + mixresult = mix_videoframe_get_timestamp(mvf, &pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + + g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); + } + + getchar(); + + stop_thread = TRUE; + + /* wake up deque thread */ + g_cond_signal (data_cond); + + g_thread_join(deque_thread); + +cleanup: + + if(data_mutex) { + g_mutex_free(data_mutex); + } + + if(data_cond) { + g_cond_free(data_cond); + } + + if (fm) { + mix_framemanager_unref(fm); + } + + if (fa) { + g_ptr_array_free(fa, TRUE); + } + + return 0; +} -- cgit v1.2.3 From cb612c9509e1c509dd0d3aefd07f960006d6b649 Mon Sep 17 00:00:00 2001 From: "Liu, Shuo" Date: Wed, 16 Mar 2011 19:58:13 +0800 Subject: glib free and cpp based libmix Change-Id: Id4bbc51c59145babb8088ad0973a3b628b4b11cd Signed-off-by: Liu, Shuo --- Android.mk | 2 - mix_common/autogen.sh | 8 - mix_common/src/Android.mk | 21 +- mix_common/src/mixdrmparams.cpp | 26 +- mix_common/src/mixdrmparams.h | 18 +- mix_common/src/mixlog.cpp | 347 +- mix_common/src/mixlog.h | 7 +- mix_common/src/mixparams.cpp | 131 +- mix_common/src/mixparams.h | 136 +- mix_common/src/mixresult.h | 120 +- mix_vbp/autogen.sh | 19 - mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 1594 ++--- .../viddec_fw/fw/codecs/h264/include/h264parse.h | 144 +- .../fw/codecs/h264/include/h264parse_dpb.h | 102 +- .../fw/codecs/h264/include/h264parse_sei.h | 314 +- mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk | 4 - .../viddec_fw/fw/codecs/h264/parser/h264parse.c | 970 +-- .../fw/codecs/h264/parser/h264parse_bsd.c | 308 +- .../fw/codecs/h264/parser/h264parse_dpb.c | 6617 ++++++++++---------- .../fw/codecs/h264/parser/h264parse_math.c | 130 +- .../fw/codecs/h264/parser/h264parse_mem.c | 194 +- .../fw/codecs/h264/parser/h264parse_pps.c | 228 +- .../fw/codecs/h264/parser/h264parse_sei.c | 1880 +++--- .../viddec_fw/fw/codecs/h264/parser/h264parse_sh.c | 1270 ++-- .../fw/codecs/h264/parser/h264parse_sps.c | 922 +-- .../fw/codecs/h264/parser/mix_vbp_h264_stubs.c | 990 +-- .../fw/codecs/h264/parser/viddec_h264_parse.c | 938 +-- .../fw/codecs/h264/parser/viddec_h264_workload.c | 1968 +++--- mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h | 2 +- .../viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h | 12 +- .../fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c | 8 +- .../fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c | 57 +- .../fw/codecs/mp2/parser/viddec_mpeg2_metadata.c | 344 +- .../fw/codecs/mp2/parser/viddec_mpeg2_parse.c | 164 +- .../fw/codecs/mp2/parser/viddec_mpeg2_workload.c | 64 +- .../fw/codecs/mp4/include/viddec_fw_mp4.h | 196 +- mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk | 4 - .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 102 +- .../mp4/parser/viddec_mp4_decodevideoobjectplane.c | 34 +- .../fw/codecs/mp4/parser/viddec_mp4_parse.c | 208 +- .../fw/codecs/mp4/parser/viddec_mp4_parse.h | 30 +- .../fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 68 +- .../mp4/parser/viddec_mp4_videoobjectlayer.c | 158 +- .../mp4/parser/viddec_mp4_videoobjectplane.c | 92 +- .../fw/codecs/mp4/parser/viddec_mp4_visualobject.c | 152 +- .../fw/codecs/mp4/parser/viddec_parse_sc_mp4.c | 48 +- .../viddec_fw/fw/codecs/vc1/include/vc1common.h | 138 +- .../fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c | 6 +- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h | 246 +- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c | 106 +- .../fw/codecs/vc1/parser/vc1parse_bitplane.c | 308 +- .../viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c | 14 +- .../fw/codecs/vc1/parser/vc1parse_bpic_adv.c | 28 +- .../fw/codecs/vc1/parser/vc1parse_common_defs.h | 1066 ++-- .../fw/codecs/vc1/parser/vc1parse_common_tables.c | 136 +- .../viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c | 4 +- .../fw/codecs/vc1/parser/vc1parse_ipic_adv.c | 48 +- .../fw/codecs/vc1/parser/vc1parse_pic_com.c | 6 +- .../fw/codecs/vc1/parser/vc1parse_pic_com_adv.c | 34 +- .../viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c | 14 +- .../fw/codecs/vc1/parser/vc1parse_ppic_adv.c | 38 +- .../fw/codecs/vc1/parser/vc1parse_vopdq.c | 30 +- .../fw/codecs/vc1/parser/viddec_vc1_parse.c | 374 +- .../fw/codecs/vc1/parser/viddec_vc1_workload.c | 1294 ++-- mix_vbp/viddec_fw/fw/include/viddec_debug.h | 22 +- mix_vbp/viddec_fw/fw/parser/Android.mk | 6 +- mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c | 82 +- mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h | 30 +- .../viddec_fw/fw/parser/include/ipc_fw_custom.h | 56 +- .../viddec_fw/fw/parser/include/viddec_emitter.h | 4 +- .../viddec_fw/fw/parser/include/viddec_fw_debug.h | 10 +- .../fw/parser/include/viddec_fw_parser_fw_ipc.h | 70 +- .../fw/parser/include/viddec_parser_ops.h | 12 +- mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h | 12 +- .../viddec_fw/fw/parser/include/viddec_pm_parse.h | 4 +- .../fw/parser/include/viddec_pm_utils_bstream.h | 22 +- .../fw/parser/include/viddec_pm_utils_list.h | 4 +- mix_vbp/viddec_fw/fw/parser/main.c | 224 +- mix_vbp/viddec_fw/fw/parser/utils.c | 70 +- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 2744 ++++---- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h | 6 +- mix_vbp/viddec_fw/fw/parser/vbp_loader.c | 220 +- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 332 +- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 2006 +++--- mix_vbp/viddec_fw/fw/parser/vbp_trace.c | 18 +- mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 2 +- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 749 +-- mix_vbp/viddec_fw/fw/parser/vbp_utils.h | 66 +- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 1764 +++--- mix_vbp/viddec_fw/fw/parser/viddec_emit.c | 12 +- mix_vbp/viddec_fw/fw/parser/viddec_intr.c | 14 +- mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c | 54 +- .../fw/parser/viddec_parse_sc_fast_loop.c | 310 +- mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c | 2 +- mix_vbp/viddec_fw/fw/parser/viddec_pm.c | 322 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c | 42 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c | 20 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c | 8 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c | 59 +- .../viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 145 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c | 54 +- mix_vbp/viddec_fw/include/viddec_fw_common_defs.h | 94 +- mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h | 338 +- mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h | 92 +- mix_vbp/viddec_fw/include/viddec_fw_item_types.h | 466 +- mix_vbp/viddec_fw/include/viddec_fw_parser_host.h | 344 +- mix_vbp/viddec_fw/include/viddec_fw_workload.h | 78 +- mix_video/autogen.sh | 19 - .../docs/reference/MixVideo/html/MixBuffer.html | 26 +- .../docs/reference/MixVideo/html/MixDisplay.html | 20 +- .../MixVideo/html/MixVideo-mixvideodef.html | 14 +- .../docs/reference/MixVideo/html/MixVideo.html | 26 +- .../MixVideo/html/MixVideoConfigParamsDec.html | 94 +- .../MixVideo/html/MixVideoConfigParamsDecMP42.html | 20 +- .../MixVideo/html/MixVideoConfigParamsDecVC1.html | 4 +- .../MixVideo/html/MixVideoConfigParamsEnc.html | 150 +- .../MixVideo/html/MixVideoConfigParamsEncH264.html | 30 +- .../html/MixVideoConfigParamsEncMPEG4.html | 30 +- .../MixVideo/html/MixVideoDecodeParams.html | 20 +- .../reference/MixVideo/html/MixVideoFrame.html | 40 +- .../MixVideo/html/MixVideoRenderParams.html | 22 +- mix_video/src/Android.mk | 22 +- mix_video/src/mixbuffer.cpp | 132 +- mix_video/src/mixbuffer.h | 68 +- mix_video/src/mixbuffer_private.h | 8 +- mix_video/src/mixbufferpool.cpp | 416 +- mix_video/src/mixbufferpool.h | 56 +- mix_video/src/mixdisplay.cpp | 118 +- mix_video/src/mixdisplay.h | 56 +- mix_video/src/mixdisplayandroid.cpp | 134 +- mix_video/src/mixdisplayandroid.h | 186 +- mix_video/src/mixdisplayx11.cpp | 206 +- mix_video/src/mixdisplayx11.h | 78 +- mix_video/src/mixframemanager.cpp | 631 +- mix_video/src/mixframemanager.h | 76 +- mix_video/src/mixsurfacepool.cpp | 640 +- mix_video/src/mixsurfacepool.h | 66 +- mix_video/src/mixvideo.cpp | 3014 ++++----- mix_video/src/mixvideo.h | 153 +- mix_video/src/mixvideo_private.h | 36 +- mix_video/src/mixvideocaps.cpp | 166 +- mix_video/src/mixvideocaps.h | 48 +- mix_video/src/mixvideoconfigparams.cpp | 62 +- mix_video/src/mixvideoconfigparams.h | 18 +- mix_video/src/mixvideoconfigparamsdec.cpp | 729 +-- mix_video/src/mixvideoconfigparamsdec.h | 194 +- mix_video/src/mixvideoconfigparamsdec_h264.cpp | 60 +- mix_video/src/mixvideoconfigparamsdec_h264.h | 50 +- mix_video/src/mixvideoconfigparamsdec_mp42.cpp | 104 +- mix_video/src/mixvideoconfigparamsdec_mp42.h | 52 +- mix_video/src/mixvideoconfigparamsdec_vc1.cpp | 62 +- mix_video/src/mixvideoconfigparamsdec_vc1.h | 62 +- mix_video/src/mixvideoconfigparamsenc.cpp | 887 ++- mix_video/src/mixvideoconfigparamsenc.h | 403 +- mix_video/src/mixvideoconfigparamsenc_h263.cpp | 151 +- mix_video/src/mixvideoconfigparamsenc_h263.h | 115 +- mix_video/src/mixvideoconfigparamsenc_h264.cpp | 282 +- mix_video/src/mixvideoconfigparamsenc_h264.h | 176 +- mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp | 322 +- mix_video/src/mixvideoconfigparamsenc_mpeg4.h | 151 +- mix_video/src/mixvideoconfigparamsenc_preview.cpp | 210 +- mix_video/src/mixvideoconfigparamsenc_preview.h | 83 +- mix_video/src/mixvideodecodeparams.cpp | 120 +- mix_video/src/mixvideodecodeparams.h | 76 +- mix_video/src/mixvideodef.h | 194 +- mix_video/src/mixvideoencodeparams.cpp | 219 +- mix_video/src/mixvideoencodeparams.h | 120 +- mix_video/src/mixvideoformat.cpp | 454 +- mix_video/src/mixvideoformat.h | 174 +- mix_video/src/mixvideoformat_h264.cpp | 2323 +++---- mix_video/src/mixvideoformat_h264.h | 88 +- mix_video/src/mixvideoformat_mp42.cpp | 1928 +++--- mix_video/src/mixvideoformat_mp42.h | 70 +- mix_video/src/mixvideoformat_vc1.cpp | 2510 ++++---- mix_video/src/mixvideoformat_vc1.h | 62 +- mix_video/src/mixvideoformatenc.cpp | 1232 ++-- mix_video/src/mixvideoformatenc.h | 211 +- mix_video/src/mixvideoformatenc_h263.cpp | 3539 ++++++----- mix_video/src/mixvideoformatenc_h263.h | 165 +- mix_video/src/mixvideoformatenc_h264.cpp | 3028 ++++----- mix_video/src/mixvideoformatenc_h264.h | 189 +- mix_video/src/mixvideoformatenc_mpeg4.cpp | 2765 ++++---- mix_video/src/mixvideoformatenc_mpeg4.h | 168 +- mix_video/src/mixvideoformatenc_preview.cpp | 1691 +++-- mix_video/src/mixvideoformatenc_preview.h | 123 +- mix_video/src/mixvideoformatqueue.h | 10 +- mix_video/src/mixvideoframe.cpp | 452 +- mix_video/src/mixvideoframe.h | 170 +- mix_video/src/mixvideoframe_private.h | 44 +- mix_video/src/mixvideoinitparams.cpp | 150 +- mix_video/src/mixvideoinitparams.h | 58 +- mix_video/src/mixvideorenderparams.cpp | 414 +- mix_video/src/mixvideorenderparams.h | 160 +- mix_video/src/mixvideorenderparams_internal.h | 6 +- mix_video/src/mixvideothread.cpp | 36 +- mix_video/src/mixvideothread.h | 38 +- mix_video/src/test.cpp | 102 +- mix_video/test/autogen.sh | 1 - mix_video/test/src/test_framemanager.cpp | 294 +- 199 files changed, 35387 insertions(+), 35025 deletions(-) delete mode 100644 mix_common/autogen.sh delete mode 100644 mix_vbp/autogen.sh delete mode 100644 mix_video/autogen.sh delete mode 100644 mix_video/test/autogen.sh diff --git a/Android.mk b/Android.mk index 3b5ef37..135c4c4 100644 --- a/Android.mk +++ b/Android.mk @@ -3,8 +3,6 @@ include $(CLEAR_VARS) VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) -GLIB_TOP := hardware/intel/glib - include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_common/src/Android.mk #include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_audio/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_video/src/Android.mk diff --git a/mix_common/autogen.sh b/mix_common/autogen.sh deleted file mode 100644 index e123d49..0000000 --- a/mix_common/autogen.sh +++ /dev/null @@ -1,8 +0,0 @@ -package=MixCommon - -aclocal -I m4/ $ACLOCAL_FLAGS || exit 1 -libtoolize --copy --force || exit 1 -autoheader -v || exit 1 -autoconf -v || exit 1 -automake -a -c -v || exit 1 -#autoreconf -v --install diff --git a/mix_common/src/Android.mk b/mix_common/src/Android.mk index 4042f95..3dffe11 100644 --- a/mix_common/src/Android.mk +++ b/mix_common/src/Android.mk @@ -4,23 +4,28 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES := \ mixlog.cpp \ mixparams.cpp \ - mixdrmparams.cpp + mixdrmparams.cpp \ + j_slist.cpp \ + j_queue.cpp \ + j_hashtable.cpp + LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) \ - $(GLIB_TOP) \ - $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib + $(LOCAL_PATH) + +LOCAL_SHARED_LIBRARIES := \ + libcutils LOCAL_CFLAGS := -DANDROID -LOCAL_SHARED_LIBRARIES := \ - libglib-2.0 -# libgmodule-2.0 LOCAL_COPY_HEADERS_TO := libmixcommon LOCAL_COPY_HEADERS := \ + mixtypes.h \ + j_slist.h \ + j_queue.h \ + j_hashtable.h \ mixlog.h \ mixresult.h \ mixparams.h \ diff --git a/mix_common/src/mixdrmparams.cpp b/mix_common/src/mixdrmparams.cpp index c75b184..ae0ec80 100644 --- a/mix_common/src/mixdrmparams.cpp +++ b/mix_common/src/mixdrmparams.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. + Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -23,22 +23,22 @@ MixDrmParams::~MixDrmParams() { } MixDrmParams *mix_drmparams_new(void) { - return new MixDrmParams(); + return new MixDrmParams(); } -MixDrmParams *mix_drmparams_ref(MixDrmParams *mix) { - return (MixDrmParams*)mix_params_ref(MIX_PARAMS(mix)); +MixDrmParams *mix_drmparams_ref(MixDrmParams *mix) { + return (MixDrmParams*)mix_params_ref(MIX_PARAMS(mix)); } MixParams * MixDrmParams::dup () const { - MixParams* dup = new MixDrmParams(); - if (NULL != dup) { - if (FALSE == copy(dup)) { - dup->Unref(); - dup = NULL; - } - } - return dup; + MixParams* dup = new MixDrmParams(); + if (NULL != dup) { + if (FALSE == copy(dup)) { + dup->Unref(); + dup = NULL; + } + } + return dup; } diff --git a/mix_common/src/mixdrmparams.h b/mix_common/src/mixdrmparams.h index 7dc7512..9bbb2d0 100644 --- a/mix_common/src/mixdrmparams.h +++ b/mix_common/src/mixdrmparams.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. + Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -21,7 +21,7 @@ /** * MIX_IS_DRMPARAMS: * @obj: an object. - * + * * Checks if the given object is an instance of #MixParams */ #define MIX_IS_DRMPARAMS(obj) (NULL != MIX_DRMPARAMS(obj)) @@ -33,15 +33,15 @@ */ class MixDrmParams : public MixParams { public: - MixDrmParams(); - virtual ~MixDrmParams(); - virtual MixParams * dup () const; + MixDrmParams(); + virtual ~MixDrmParams(); + virtual MixParams * dup () const; }; /** * mix_drmparams_new: * @returns: A newly allocated instance of #MixDrmParams - * + * * Use this method to create new instance of #MixDrmParams */ MixDrmParams *mix_drmparams_new(void); @@ -50,7 +50,7 @@ MixDrmParams *mix_drmparams_new(void); * mix_drmparams_ref: * @mix: object to add reference * @returns: the MixDrmParams instance where reference count has been increased. - * + * * Add reference count. */ MixDrmParams *mix_drmparams_ref(MixDrmParams *mix); @@ -58,7 +58,7 @@ MixDrmParams *mix_drmparams_ref(MixDrmParams *mix); /** * mix_drmparams_unref: * @obj: object to unref. - * + * * Decrement reference count of the object. */ #define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) diff --git a/mix_common/src/mixlog.cpp b/mix_common/src/mixlog.cpp index bef6a24..cad5c5a 100644 --- a/mix_common/src/mixlog.cpp +++ b/mix_common/src/mixlog.cpp @@ -10,7 +10,9 @@ #include "mixlog.h" #ifndef ANDROID -#include +#ifdef MIX_LOG_USE_HT +#include "j_hashtable.h" +#endif #endif #define MIX_DELOG_COMPS "MIX_DELOG_COMPS" @@ -26,234 +28,239 @@ static GStaticMutex g_mutex = G_STATIC_MUTEX_INIT; #ifdef MIX_LOG_USE_HT -static GHashTable *g_defile_ht = NULL, *g_defunc_ht = NULL, *g_decom_ht = NULL; -static gint g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; -static gint g_refcount = 0; - -#define mix_log_destroy_ht(ht) if(ht) { g_hash_table_destroy(ht); ht = NULL; } - -void mix_log_get_ht(GHashTable **ht, const gchar *var) { - - const char *delog_list = NULL; - char *item = NULL; - if (!ht || !var) { - return; - } - - delog_list = g_getenv(var); - if (!delog_list) { - return; - } - - if (*ht == NULL) { - *ht = g_hash_table_new(g_str_hash, g_str_equal); - if (*ht == NULL) { - return; - } - } - - item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); - while (item != NULL) { - g_hash_table_insert(*ht, item, "true"); - item = strtok(NULL, MIX_DELOG_DELIMITERS); - } +static JHashTable *g_defile_ht = NULL, *g_defunc_ht = NULL, *g_decom_ht = NULL; +static int g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; +static int g_refcount = 0; + +#define mix_log_destroy_ht(ht) if(ht) { \ + if (ht == NULL || ht->ref_count <= 0) return; \ + j_hash_table_remove_all (ht); \ + j_hash_table_unref (ht); \ + ht = NULL; } + +void mix_log_get_ht(JHashTable **ht, const char *var) { + + const char *delog_list = NULL; + char *item = NULL; + if (!ht || !var) { + return; + } + + delog_list = g_getenv(var); + if (!delog_list) { + return; + } + + if (*ht == NULL) { + *ht = j_hash_table_new_full(g_str_hash, g_str_equal, NULL, NULL); + if (*ht == NULL) { + return; + } + } + + item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); + while (item != NULL) { + j_hash_table_insert(*ht, item, "true"); + item = strtok(NULL, MIX_DELOG_DELIMITERS); + } } void mix_log_initialize_func() { - const gchar *mix_log_level = NULL; - g_static_mutex_lock(&g_mutex); + const char *mix_log_level = NULL; + g_static_mutex_lock(&g_mutex); - if (g_refcount == 0) { + if (g_refcount == 0) { - mix_log_level = g_getenv(MIX_LOG_LEVEL); - if (mix_log_level) { - g_mix_log_level = atoi(mix_log_level); - } + mix_log_level = g_getenv(MIX_LOG_LEVEL); + if (mix_log_level) { + g_mix_log_level = atoi(mix_log_level); + } - mix_log_get_ht(&g_decom_ht, MIX_DELOG_COMPS); - mix_log_get_ht(&g_defile_ht, MIX_DELOG_FILES); - mix_log_get_ht(&g_defunc_ht, MIX_DELOG_FUNCS); - } + mix_log_get_ht(&g_decom_ht, MIX_DELOG_COMPS); + mix_log_get_ht(&g_defile_ht, MIX_DELOG_FILES); + mix_log_get_ht(&g_defunc_ht, MIX_DELOG_FUNCS); + } - g_refcount++; + g_refcount++; - g_static_mutex_unlock(&g_mutex); + g_static_mutex_unlock(&g_mutex); } void mix_log_finalize_func() { - g_static_mutex_lock(&g_mutex); + g_static_mutex_lock(&g_mutex); - g_refcount--; + g_refcount--; - if (g_refcount == 0) { - mix_log_destroy_ht(g_decom_ht); - mix_log_destroy_ht(g_defile_ht); - mix_log_destroy_ht(g_defunc_ht); + if (g_refcount == 0) { + mix_log_destroy_ht(g_decom_ht); + mix_log_destroy_ht(g_defile_ht); + mix_log_destroy_ht(g_defunc_ht); - g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; - } + g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; + } - if (g_refcount < 0) { - g_refcount = 0; - } + if (g_refcount < 0) { + g_refcount = 0; + } - g_static_mutex_unlock(&g_mutex); + g_static_mutex_unlock(&g_mutex); } -void mix_log_func(const gchar* comp, gint level, const gchar *file, - const gchar *func, gint line, const gchar *format, ...) { +void mix_log_func(const char* comp, int level, const char *file, + const char *func, int line, const char *format, ...) { - va_list args; - static gchar* loglevel[4] = {"**ERROR", "*WARNING", "INFO", "VERBOSE"}; + va_list args; + static char* loglevel[4] = {"**ERROR", "*WARNING", "INFO", "VERBOSE"}; - if (!format) { - return; - } + if (!format) { + return; + } - g_static_mutex_lock(&g_mutex); + g_static_mutex_lock(&g_mutex); - if (level > g_mix_log_level) { - goto exit; - } + if (level > g_mix_log_level) { + goto exit; + } - if (g_decom_ht) { - if (g_hash_table_lookup(g_decom_ht, comp)) { - goto exit; - } - } + if (g_decom_ht) { + if (j_hash_table_lookup(g_decom_ht, comp)) { + goto exit; + } + } - if (g_defile_ht) { - if (g_hash_table_lookup(g_defile_ht, file)) { - goto exit; - } - } + if (g_defile_ht) { + if (j_hash_table_lookup(g_defile_ht, file)) { + goto exit; + } + } - if (g_defunc_ht) { - if (g_hash_table_lookup(g_defunc_ht, func)) { - goto exit; - } - } + if (g_defunc_ht) { + if (j_hash_table_lookup(g_defunc_ht, func)) { + goto exit; + } + } - if(level > MIX_LOG_LEVEL_VERBOSE) { - level = MIX_LOG_LEVEL_VERBOSE; - } - if(level < MIX_LOG_LEVEL_ERROR) { - level = MIX_LOG_LEVEL_ERROR; - } + if (level > MIX_LOG_LEVEL_VERBOSE) { + level = MIX_LOG_LEVEL_VERBOSE; + } + if (level < MIX_LOG_LEVEL_ERROR) { + level = MIX_LOG_LEVEL_ERROR; + } - g_print("%s : %s : %s : ", loglevel[level - 1], file, func); + g_print("%s : %s : %s : ", loglevel[level - 1], file, func); - va_start(args, format); - g_vprintf(format, args); - va_end(args); + va_start(args, format); + g_vprintf(format, args); + va_end(args); - exit: g_static_mutex_unlock(&g_mutex); +exit: + g_static_mutex_unlock(&g_mutex); } #else /* MIX_LOG_USE_HT */ -gboolean mix_shall_delog(const gchar *name, const gchar *var) { +bool mix_shall_delog(const char *name, const char *var) { - const char *delog_list = NULL; - char *item = NULL; - gboolean delog = FALSE; + const char *delog_list = NULL; + char *item = NULL; + bool delog = FALSE; - if (!name || !var) { - return delog; - } + if (!name || !var) { + return delog; + } - delog_list = g_getenv(var); - if (!delog_list) { - return delog; - } + delog_list = g_getenv(var); + if (!delog_list) { + return delog; + } - item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); - while (item != NULL) { - if (strcmp(item, name) == 0) { - delog = TRUE; - break; - } - item = strtok(NULL, MIX_DELOG_DELIMITERS); - } + item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); + while (item != NULL) { + if (strcmp(item, name) == 0) { + delog = TRUE; + break; + } + item = strtok(NULL, MIX_DELOG_DELIMITERS); + } - return delog; + return delog; } -gboolean mix_log_enabled() { - - const char *value = NULL; - value = g_getenv(MIX_LOG_ENABLE); - if(!value) { - return FALSE; - } - - if(value[0] == '0') { - return FALSE; - } - return TRUE; +bool mix_log_enabled() { + + const char *value = NULL; + value = g_getenv(MIX_LOG_ENABLE); + if (!value) { + return FALSE; + } + + if (value[0] == '0') { + return FALSE; + } + return TRUE; } -void mix_log_func(const gchar* comp, gint level, const gchar *file, - const gchar *func, gint line, const gchar *format, ...) { +void mix_log_func(const char* comp, int level, const char *file, + const char *func, int line, const char *format, ...) { - va_list args; - static gchar* loglevel[4] = { "**ERROR", "*WARNING", "INFO", "VERBOSE" }; + va_list args; + static char* loglevel[4] = { "**ERROR", "*WARNING", "INFO", "VERBOSE" }; - const gchar *env_mix_log_level = NULL; - gint mix_log_level_threhold = MIX_LOG_LEVEL_VERBOSE; + const char *env_mix_log_level = NULL; + int mix_log_level_threhold = MIX_LOG_LEVEL_VERBOSE; - if(!mix_log_enabled()) { - return; - } + if (!mix_log_enabled()) { + return; + } - if (!format) { - return; - } + if (!format) { + return; + } - g_static_mutex_lock(&g_mutex); + g_static_mutex_lock(&g_mutex); - /* log level */ - env_mix_log_level = g_getenv(MIX_LOG_LEVEL); - if (env_mix_log_level) { - mix_log_level_threhold = atoi(env_mix_log_level); - } + /* log level */ + env_mix_log_level = g_getenv(MIX_LOG_LEVEL); + if (env_mix_log_level) { + mix_log_level_threhold = atoi(env_mix_log_level); + } - if (level > mix_log_level_threhold) { - goto exit; - } + if (level > mix_log_level_threhold) { + goto exit; + } - /* component */ - if (mix_shall_delog(comp, MIX_DELOG_COMPS)) { - goto exit; - } + /* component */ + if (mix_shall_delog(comp, MIX_DELOG_COMPS)) { + goto exit; + } - /* files */ - if (mix_shall_delog(file, MIX_DELOG_FILES)) { - goto exit; - } + /* files */ + if (mix_shall_delog(file, MIX_DELOG_FILES)) { + goto exit; + } - /* functions */ - if (mix_shall_delog(func, MIX_DELOG_FUNCS)) { - goto exit; - } + /* functions */ + if (mix_shall_delog(func, MIX_DELOG_FUNCS)) { + goto exit; + } - if (level > MIX_LOG_LEVEL_VERBOSE) { - level = MIX_LOG_LEVEL_VERBOSE; - } - if (level < MIX_LOG_LEVEL_ERROR) { - level = MIX_LOG_LEVEL_ERROR; - } + if (level > MIX_LOG_LEVEL_VERBOSE) { + level = MIX_LOG_LEVEL_VERBOSE; + } + if (level < MIX_LOG_LEVEL_ERROR) { + level = MIX_LOG_LEVEL_ERROR; + } - g_print("%s : %s : %s : ", loglevel[level - 1], file, func); + g_print("%s : %s : %s : ", loglevel[level - 1], file, func); - va_start(args, format); - g_vprintf(format, args); - va_end(args); + va_start(args, format); + g_vprintf(format, args); + va_end(args); exit: - g_static_mutex_unlock(&g_mutex); + g_static_mutex_unlock(&g_mutex); } diff --git a/mix_common/src/mixlog.h b/mix_common/src/mixlog.h index dd93046..bf298dc 100644 --- a/mix_common/src/mixlog.h +++ b/mix_common/src/mixlog.h @@ -6,14 +6,15 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include +#include "mixtypes.h" #ifndef __MIX_LOG_H__ #define __MIX_LOG_H__ /* Warning: don't call these functions */ -void mix_log_func(const gchar* comp, gint level, const gchar *file, - const gchar *func, gint line, const gchar *format, ...); +void mix_log_func( + const char* comp, int level, const char *file, + const char *func, int line, const char *format, ...); /* Components */ #define MIX_VIDEO_COMP "mixvideo" diff --git a/mix_common/src/mixparams.cpp b/mix_common/src/mixparams.cpp index 6489339..3fff640 100644 --- a/mix_common/src/mixparams.cpp +++ b/mix_common/src/mixparams.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. + Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -16,112 +16,111 @@ #endif #include "mixparams.h" +#include #define DEBUG_REFCOUNT MixParams::MixParams() - :ref_count(1) - ,_reserved(NULL) { + :ref_count(1) + ,_reserved(NULL) { } MixParams::~MixParams() { - finalize(); + finalize(); } MixParams* MixParams::Ref() { - this->ref_count++; - return this; + this->ref_count++; + return this; } void MixParams::Unref() { - this->ref_count--; - if (0 == this->ref_count) { - delete this; - } + this->ref_count--; + if (0 == this->ref_count) { + delete this; + } } MixParams* MixParams::dup() const { - MixParams *ret = new MixParams(); - if (FALSE != copy(ret)) { - return ret; - } - return NULL; + MixParams *ret = new MixParams(); + if (FALSE != copy(ret)) { + return ret; + } + return NULL; } -gboolean MixParams::copy(MixParams* target) const { - gboolean ret = FALSE; - if ( NULL != target) { - return TRUE; - } - return ret; +bool MixParams::copy(MixParams* target) const { + bool ret = FALSE; + if ( NULL != target) { + return TRUE; + } + return ret; } void MixParams::finalize() { } -gboolean MixParams::equal(MixParams *obj) const { - gboolean ret = FALSE; - if ( NULL != obj) { - return TRUE; - } - return ret; +bool MixParams::equal(MixParams *obj) const { + bool ret = FALSE; + if ( NULL != obj) { + return TRUE; + } + return ret; } MixParams* mix_params_new () { - /* we don't support dynamic types because they really aren't useful,*/ - /* and could cause ref_count problems */ - return new MixParams(); + /* we don't support dynamic types because they really aren't useful,*/ + /* and could cause ref_count problems */ + return new MixParams(); } -gboolean mix_params_copy (MixParams *target, const MixParams *src) { - if ( NULL != target && NULL != src) { - return src->copy(target); - } else - return FALSE; +bool mix_params_copy (MixParams *target, const MixParams *src) { + if ( NULL != target && NULL != src) { + return src->copy(target); + } else + return FALSE; } MixParams* mix_params_ref (MixParams *obj) { - if (NULL == obj) - return NULL; - return obj->Ref(); + if (NULL == obj) + return NULL; + return obj->Ref(); } void mix_params_unref(MixParams *obj) { - if (NULL != obj) - obj->Unref(); + if (NULL != obj) + obj->Unref(); } void mix_params_replace (MixParams **olddata, MixParams *newdata) { - if (NULL == olddata) - return; - MixParams *olddata_val = - reinterpret_cast(g_atomic_pointer_get((gpointer *) olddata)); - if (olddata_val == newdata) - return; - if (NULL != newdata) - newdata->Ref(); - while (!g_atomic_pointer_compare_and_exchange ((gpointer *) olddata, - olddata_val, newdata)) { - olddata_val = - reinterpret_cast(g_atomic_pointer_get ((gpointer *) olddata)); - } - if (NULL != olddata_val) - olddata_val->Unref(); + if (NULL == olddata) + return; + MixParams *olddata_val = *olddata; + if (olddata_val == newdata) + return; + if (NULL != newdata) + newdata->Ref(); + while (!android_atomic_cmpxchg ( + (int32_t)olddata_val,(int32_t)newdata, (int32_t *) (*olddata))) { + olddata_val = *olddata; + } + if (NULL != olddata_val) + olddata_val->Unref(); } MixParams * mix_params_dup(const MixParams *obj) { - if (NULL != obj) { - return obj->dup(); - } else { - return NULL; - } + if (NULL != obj) { + return obj->dup(); + } else { + return NULL; + } } -gboolean mix_params_equal (MixParams *first, MixParams *second) { - if (NULL != first && NULL != second) - return first->equal(second); - else - return FALSE; +bool mix_params_equal (MixParams *first, MixParams *second) { + if (NULL != first && NULL != second) + return first->equal(second); + else + return FALSE; } diff --git a/mix_common/src/mixparams.h b/mix_common/src/mixparams.h index c1a19a5..f3395f4 100644 --- a/mix_common/src/mixparams.h +++ b/mix_common/src/mixparams.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. + Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -9,7 +9,7 @@ #ifndef __MIX_PARAMS_H__ #define __MIX_PARAMS_H__ -#include +#include "mixtypes.h" #define MIX_PARAMS(obj) (reinterpret_cast ((obj))) #define MIX_PARAMS_CAST(obj) ((MixParams*)(obj)) @@ -32,73 +32,69 @@ class MixParams { public: - MixParams(); - virtual ~MixParams(); - MixParams* Ref(); - void Unref(); - gint GetRefCount() { return ref_count;} - + MixParams(); + virtual ~MixParams(); + MixParams* Ref(); + void Unref(); + int GetRefCount() { + return ref_count; + } + public: - /** - * MixParamsDupFunction: - * @obj: Params to duplicate - * @returns: reference to cloned instance. - * - * Virtual function prototype for methods to create duplicate of instance. - * - */ - virtual MixParams * dup () const; - - /** - * MixParamsCopyFunction: - * @target: target of the copy - * @src: source of the copy - * @returns: boolean indicates if copy is successful. - * - * Virtual function prototype for methods to create copies of instance. - * - */ - virtual gboolean copy(MixParams* target) const; - - /** - * MixParamsFinalizeFunction: - * @obj: Params to finalize - * - * Virtual function prototype for methods to free ressources used by - * object. - */ - virtual void finalize (); - - /** - * MixParamsEqualsFunction: - * @first: first object in the comparison - * @second: second object in the comparison - * - * Virtual function prototype for methods to compare 2 objects and check if they are equal. - */ - virtual gboolean equal (MixParams *obj) const; + /** + * MixParamsDupFunction: + * @obj: Params to duplicate + * @returns: reference to cloned instance. + * + * Virtual function prototype for methods to create duplicate of instance. + * + */ + virtual MixParams * dup () const; + + /** + * MixParamsCopyFunction: + * @target: target of the copy + * @src: source of the copy + * @returns: intean indicates if copy is successful. + * + * Virtual function prototype for methods to create copies of instance. + * + */ + virtual bool copy(MixParams* target) const; + + /** + * MixParamsFinalizeFunction: + * @obj: Params to finalize + * + * Virtual function prototype for methods to free ressources used by + * object. + */ + virtual void finalize (); + + /** + * MixParamsEqualsFunction: + * @first: first object in the comparison + * @second: second object in the comparison + * + * Virtual function prototype for methods to compare 2 objects and check if they are equal. + */ + virtual bool equal (MixParams *obj) const; public: - /*< public >*/ - gint ref_count; + /*< public >*/ + int ref_count; - /*< private >*/ - gpointer _reserved; + /*< private >*/ + void* _reserved; }; -/** - * mix_params_get_type: - * @returns: type of this object. - * - * Get type. - */ -//GType mix_params_get_type(void); + /** * mix_params_new: * @returns: return a newly allocated object. - * + * * Create new instance of the object. */ MixParams* mix_params_new(); @@ -107,27 +103,27 @@ MixParams* mix_params_new(); * mix_params_copy: * @target: copy to target * @src: copy from source - * @returns: boolean indicating if copy is successful. - * + * @returns: intean indicating if copy is successful. + * * Copy data from one instance to the other. This method internally invoked the #MixParams::copy method such that derived object will be copied correctly. */ -gboolean mix_params_copy(MixParams *target, const MixParams *src); +bool mix_params_copy(MixParams *target, const MixParams *src); -/** +/** * mix_params_ref: * @obj: a #MixParams object. * @returns: the object with reference count incremented. - * + * * Increment reference count. */ MixParams* mix_params_ref(MixParams *obj); -/** +/** * mix_params_unref: * @obj: a #MixParams object. - * + * * Decrement reference count. */ void mix_params_unref (MixParams *obj); @@ -147,7 +143,7 @@ void mix_params_replace(MixParams **olddata, MixParams *newdata); * mix_params_dup: * @obj: #MixParams object to duplicate. * @returns: A newly allocated duplicate of the object, or NULL if failed. - * + * * Duplicate the given #MixParams and allocate a new instance. This method is chained up properly and derive object will be dupped properly. */ MixParams *mix_params_dup(const MixParams *obj); @@ -156,10 +152,10 @@ MixParams *mix_params_dup(const MixParams *obj); * mix_params_equal: * @first: first object to compare * @second: second object to compare - * @returns: boolean indicates if the 2 object contains same data. - * + * @returns: intean indicates if the 2 object contains same data. + * * Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance. */ -gboolean mix_params_equal(MixParams *first, MixParams *second); +bool mix_params_equal(MixParams *first, MixParams *second); #endif diff --git a/mix_common/src/mixresult.h b/mix_common/src/mixresult.h index 0559bc2..e8325f7 100644 --- a/mix_common/src/mixresult.h +++ b/mix_common/src/mixresult.h @@ -1,18 +1,18 @@ /************************************************************************************* * INTEL CONFIDENTIAL - * Copyright 2008-2009 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel’s prior + * Copyright 2008-2009 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel’s prior * express written permission. * - * No license under any patent, copyright, trade secret or other intellectual + * No license under any patent, copyright, trade secret or other intellectual * property right is granted to or conferred upon you by disclosure or delivery * of the Materials, either expressly, by implication, inducement, estoppel or * otherwise. Any license under such intellectual property rights must be express @@ -22,69 +22,69 @@ #ifndef MIX_RESULT_H #define MIX_RESULT_H -#include +#include "mixtypes.h" -typedef gint32 MIX_RESULT; +typedef int32 MIX_RESULT; #define MIX_SUCCEEDED(result_code) ((((MIX_RESULT)(result_code)) & 0x80000000) == 0) typedef enum { - /** General success */ - MIX_RESULT_SUCCESS = (MIX_RESULT) 0x00000000, - MIX_RESULT_SUCCESS_CHG = (MIX_RESULT)0x00000001, + /** General success */ + MIX_RESULT_SUCCESS = (MIX_RESULT) 0x00000000, + MIX_RESULT_SUCCESS_CHG = (MIX_RESULT)0x00000001, - /** Module specific success starting number */ + /** Module specific success starting number */ - /** Starting success number for Audio */ - MIX_RESULT_SUCCESS_AUDIO_START = (MIX_RESULT) 0x00010000, - /** Starting success number for Video */ - MIX_RESULT_SUCCESS_VIDEO_START = (MIX_RESULT) 0x00020000, - /** Starting success number for DRM */ - MIX_RESULT_SUCCESS_DRM_START = (MIX_RESULT) 0x00030000 + /** Starting success number for Audio */ + MIX_RESULT_SUCCESS_AUDIO_START = (MIX_RESULT) 0x00010000, + /** Starting success number for Video */ + MIX_RESULT_SUCCESS_VIDEO_START = (MIX_RESULT) 0x00020000, + /** Starting success number for DRM */ + MIX_RESULT_SUCCESS_DRM_START = (MIX_RESULT) 0x00030000 } MIX_SUCCESS_COMMON; typedef enum { - /** General failure */ - MIX_RESULT_FAIL = (MIX_RESULT) 0x80000000, - MIX_RESULT_NULL_PTR = (MIX_RESULT) 0x80000001, - MIX_RESULT_LPE_NOTAVAIL = (MIX_RESULT) 0X80000002, - MIX_RESULT_DIRECT_NOTAVAIL = (MIX_RESULT) 0x80000003, - MIX_RESULT_NOT_SUPPORTED = (MIX_RESULT) 0x80000004, - MIX_RESULT_CONF_MISMATCH = (MIX_RESULT) 0x80000005, - MIX_RESULT_RESUME_NEEDED = (MIX_RESULT) 0x80000007, - MIX_RESULT_WRONGMODE = (MIX_RESULT) 0x80000008, - MIX_RESULT_RESOURCES_NOTAVAIL = (MIX_RESULT)0x80000009, - MIX_RESULT_INVALID_PARAM = (MIX_RESULT)0x8000000a, - MIX_RESULT_ALREADY_INIT = (MIX_RESULT)0x8000000b, - MIX_RESULT_WRONG_STATE = (MIX_RESULT)0x8000000c, - MIX_RESULT_NOT_INIT = (MIX_RESULT)0x8000000d, - MIX_RESULT_NOT_CONFIGURED = (MIX_RESULT)0x8000000e, - MIX_RESULT_STREAM_NOTAVAIL = (MIX_RESULT)0x8000000f, - MIX_RESULT_CODEC_NOTAVAIL = (MIX_RESULT)0x80000010, - MIX_RESULT_CODEC_NOTSUPPORTED = (MIX_RESULT)0x80000011, - MIX_RESULT_INVALID_COUNT = (MIX_RESULT)0x80000012, - MIX_RESULT_NOT_ACP = (MIX_RESULT)0x80000013, - MIX_RESULT_INVALID_DECODE_MODE = (MIX_RESULT)0x80000014, - MIX_RESULT_INVALID_STREAM_NAME = (MIX_RESULT)0x80000015, - MIX_RESULT_NO_MEMORY = (MIX_RESULT)0x80000016, - MIX_RESULT_NEED_RETRY = (MIX_RESULT)0x80000017, - MIX_RESULT_SYSTEM_ERRNO = (MIX_RESULT)0x80000018, - MIX_RESULT_AM_REGISTER_FAIL = (MIX_RESULT)0x80000019, - MIX_RESULT_AM_UNREGISTER_FAIL = (MIX_RESULT)0x80000020, - MIX_RESULT_AM_NOTIFY_PAUSE_FAIL = (MIX_RESULT)0x80000021, - MIX_RESULT_AM_NOTIFY_RESUME_FAIL = (MIX_RESULT)0x80000022, + /** General failure */ + MIX_RESULT_FAIL = (MIX_RESULT) 0x80000000, + MIX_RESULT_NULL_PTR = (MIX_RESULT) 0x80000001, + MIX_RESULT_LPE_NOTAVAIL = (MIX_RESULT) 0X80000002, + MIX_RESULT_DIRECT_NOTAVAIL = (MIX_RESULT) 0x80000003, + MIX_RESULT_NOT_SUPPORTED = (MIX_RESULT) 0x80000004, + MIX_RESULT_CONF_MISMATCH = (MIX_RESULT) 0x80000005, + MIX_RESULT_RESUME_NEEDED = (MIX_RESULT) 0x80000007, + MIX_RESULT_WRONGMODE = (MIX_RESULT) 0x80000008, + MIX_RESULT_RESOURCES_NOTAVAIL = (MIX_RESULT)0x80000009, + MIX_RESULT_INVALID_PARAM = (MIX_RESULT)0x8000000a, + MIX_RESULT_ALREADY_INIT = (MIX_RESULT)0x8000000b, + MIX_RESULT_WRONG_STATE = (MIX_RESULT)0x8000000c, + MIX_RESULT_NOT_INIT = (MIX_RESULT)0x8000000d, + MIX_RESULT_NOT_CONFIGURED = (MIX_RESULT)0x8000000e, + MIX_RESULT_STREAM_NOTAVAIL = (MIX_RESULT)0x8000000f, + MIX_RESULT_CODEC_NOTAVAIL = (MIX_RESULT)0x80000010, + MIX_RESULT_CODEC_NOTSUPPORTED = (MIX_RESULT)0x80000011, + MIX_RESULT_INVALID_COUNT = (MIX_RESULT)0x80000012, + MIX_RESULT_NOT_ACP = (MIX_RESULT)0x80000013, + MIX_RESULT_INVALID_DECODE_MODE = (MIX_RESULT)0x80000014, + MIX_RESULT_INVALID_STREAM_NAME = (MIX_RESULT)0x80000015, + MIX_RESULT_NO_MEMORY = (MIX_RESULT)0x80000016, + MIX_RESULT_NEED_RETRY = (MIX_RESULT)0x80000017, + MIX_RESULT_SYSTEM_ERRNO = (MIX_RESULT)0x80000018, + MIX_RESULT_AM_REGISTER_FAIL = (MIX_RESULT)0x80000019, + MIX_RESULT_AM_UNREGISTER_FAIL = (MIX_RESULT)0x80000020, + MIX_RESULT_AM_NOTIFY_PAUSE_FAIL = (MIX_RESULT)0x80000021, + MIX_RESULT_AM_NOTIFY_RESUME_FAIL = (MIX_RESULT)0x80000022, - /** Module specific errors starting number */ + /** Module specific errors starting number */ - /** Starting error number for Audio */ - MIX_RESULT_ERROR_AUDIO_START = (MIX_RESULT) 0x80010000, - /** Starting error number for Video */ - MIX_RESULT_ERROR_VIDEO_START = (MIX_RESULT) 0x80020000, - /** Starting error number for DRM */ - MIX_RESULT_ERROR_DRM_START = (MIX_RESULT) 0x80030000 + /** Starting error number for Audio */ + MIX_RESULT_ERROR_AUDIO_START = (MIX_RESULT) 0x80010000, + /** Starting error number for Video */ + MIX_RESULT_ERROR_VIDEO_START = (MIX_RESULT) 0x80020000, + /** Starting error number for DRM */ + MIX_RESULT_ERROR_DRM_START = (MIX_RESULT) 0x80030000 } MIX_ERROR_COMMON; - /* New success code should be added just above this line */ +/* New success code should be added just above this line */ // MIX_RESULT_IAM_DISABLED, /* 0x80000008 */ // MIX_RESULT_IAM_NOTAVAIL, /* 0x80000009 */ // MIX_RESULT_IAM_REG_FAILED, /* 0x8000000f */ diff --git a/mix_vbp/autogen.sh b/mix_vbp/autogen.sh deleted file mode 100644 index ed2c536..0000000 --- a/mix_vbp/autogen.sh +++ /dev/null @@ -1,19 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -package=MIXVBP - -#Uncomment the follow line if building documentation using gtkdoc -#gtkdocize --flavour no-tmpl || exit 1 -aclocal -I m4/ $ACLOCAL_FLAGS || exit 1 -libtoolize --copy --force || exit 1 -autoheader -v || exit 1 -autoconf -v || exit 1 -automake -a -c -v || exit 1 - -echo "Now type ./configure to configure $package." -exit 0 diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h index 4618532..1976567 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -60,7 +60,7 @@ extern "C" { // Used to check whether the SEI RP is the only way for recovery (cisco contents) // This threshold will decide the interval of recovery even no error detected if no IDR during this time -#define SEI_REC_CHECK_TH 8 +#define SEI_REC_CHECK_TH 8 //SPS #define MAX_NUM_SPS 32 @@ -69,7 +69,7 @@ extern "C" { //PPS #define MAX_PIC_PARAMS 255 #define MAX_NUM_REF_FRAMES 32 -#define MAX_QP 51 +#define MAX_QP 51 #define MAX_NUM_PPS 256 #define PUT_FS_IDC_BITS(w) (w&0x1F) @@ -115,14 +115,14 @@ extern "C" { //// PIP -typedef enum _pip_setting_t -{ - PIP_SCALER_DISABLED, - PIP_SCALE_FACTOR_1_BY_4, - PIP_SCALE_FACTOR_1_BY_2, - PIP_SCALER_INVALID, + typedef enum _pip_setting_t + { + PIP_SCALER_DISABLED, + PIP_SCALE_FACTOR_1_BY_4, + PIP_SCALE_FACTOR_1_BY_2, + PIP_SCALER_INVALID, -} pip_setting_t; + } pip_setting_t; #ifdef VERBOSE @@ -131,81 +131,81 @@ typedef enum _pip_setting_t //#define DEBUGGETBITS(args...) #endif -/* status codes */ -typedef enum _h264_Status -{ - H264_STATUS_EOF = 1, // end of file - H264_STATUS_OK = 0, // no error - H264_STATUS_NO_MEM = 2, // out of memory - H264_STATUS_FILE_ERROR = 3, // file error - H264_STATUS_NOTSUPPORT = 4, // not supported mode - H264_STATUS_PARSE_ERROR = 5, // fail in parse MPEG-4 stream - H264_STATUS_ERROR = 6, // unknown/unspecified error - H264_NAL_ERROR, - H264_SPS_INVALID_PROFILE, - H264_SPS_INVALID_LEVEL, - H264_SPS_INVALID_SEQ_PARAM_ID, - H264_SPS_ERROR, - H264_PPS_INVALID_PIC_ID, - H264_PPS_INVALID_SEQ_ID, - H264_PPS_ERROR, - H264_SliceHeader_INVALID_MB, - H264_SliceHeader_ERROR, - H264_FRAME_DONE, - H264_SLICE_DONE, - H264_STATUS_POLL_ONCE_ERROR, - H264_STATUS_DEC_MEMINIT_ERROR, - H264_STATUS_NAL_UNIT_TYPE_ERROR, - H264_STATUS_SEI_ERROR, - H264_STATUS_SEI_DONE, -} h264_Status; - - - -typedef enum _picture_structure_t -{ - TOP_FIELD = 1, - BOTTOM_FIELD = 2, - FRAME = 3, - INVALID = 4 -} picture_structure_t; + /* status codes */ + typedef enum _h264_Status + { + H264_STATUS_EOF = 1, // end of file + H264_STATUS_OK = 0, // no error + H264_STATUS_NO_MEM = 2, // out of memory + H264_STATUS_FILE_ERROR = 3, // file error + H264_STATUS_NOTSUPPORT = 4, // not supported mode + H264_STATUS_PARSE_ERROR = 5, // fail in parse MPEG-4 stream + H264_STATUS_ERROR = 6, // unknown/unspecified error + H264_NAL_ERROR, + H264_SPS_INVALID_PROFILE, + H264_SPS_INVALID_LEVEL, + H264_SPS_INVALID_SEQ_PARAM_ID, + H264_SPS_ERROR, + H264_PPS_INVALID_PIC_ID, + H264_PPS_INVALID_SEQ_ID, + H264_PPS_ERROR, + H264_SliceHeader_INVALID_MB, + H264_SliceHeader_ERROR, + H264_FRAME_DONE, + H264_SLICE_DONE, + H264_STATUS_POLL_ONCE_ERROR, + H264_STATUS_DEC_MEMINIT_ERROR, + H264_STATUS_NAL_UNIT_TYPE_ERROR, + H264_STATUS_SEI_ERROR, + H264_STATUS_SEI_DONE, + } h264_Status; + + + + typedef enum _picture_structure_t + { + TOP_FIELD = 1, + BOTTOM_FIELD = 2, + FRAME = 3, + INVALID = 4 + } picture_structure_t; ///// Chorma format -typedef enum _h264_chroma_format_t -{ - H264_CHROMA_MONOCHROME, - H264_CHROMA_420, - H264_CHROMA_422, - H264_CHROMA_444, -}h264_chroma_format_t; - -/* H264 start code values */ -typedef enum _h264_nal_unit_type -{ - h264_NAL_UNIT_TYPE_unspecified = 0, - h264_NAL_UNIT_TYPE_SLICE, - h264_NAL_UNIT_TYPE_DPA, - h264_NAL_UNIT_TYPE_DPB, - h264_NAL_UNIT_TYPE_DPC, - h264_NAL_UNIT_TYPE_IDR, - h264_NAL_UNIT_TYPE_SEI, - h264_NAL_UNIT_TYPE_SPS, - h264_NAL_UNIT_TYPE_PPS, - h264_NAL_UNIT_TYPE_Acc_unit_delimiter, - h264_NAL_UNIT_TYPE_EOSeq, - h264_NAL_UNIT_TYPE_EOstream, - h264_NAL_UNIT_TYPE_filler_data, - h264_NAL_UNIT_TYPE_SPS_extension, - h264_NAL_UNIT_TYPE_Reserved1 =14, /*14-18*/ - h264_NAL_UNIT_TYPE_Reserved2 =15, /*14-18*/ - h264_NAL_UNIT_TYPE_Reserved3 =16, /*14-18*/ - h264_NAL_UNIT_TYPE_Reserved4 =17, /*14-18*/ - h264_NAL_UNIT_TYPE_Reserved5 =18, /*14-18*/ - h264_NAL_UNIT_TYPE_ACP =19, - h264_NAL_UNIT_TYPE_Reserved6 =20, /*20-23*/ - h264_NAL_UNIT_TYPE_unspecified2 =24, /*24-31*/ -} h264_nal_unit_type; + typedef enum _h264_chroma_format_t + { + H264_CHROMA_MONOCHROME, + H264_CHROMA_420, + H264_CHROMA_422, + H264_CHROMA_444, + } h264_chroma_format_t; + + /* H264 start code values */ + typedef enum _h264_nal_unit_type + { + h264_NAL_UNIT_TYPE_unspecified = 0, + h264_NAL_UNIT_TYPE_SLICE, + h264_NAL_UNIT_TYPE_DPA, + h264_NAL_UNIT_TYPE_DPB, + h264_NAL_UNIT_TYPE_DPC, + h264_NAL_UNIT_TYPE_IDR, + h264_NAL_UNIT_TYPE_SEI, + h264_NAL_UNIT_TYPE_SPS, + h264_NAL_UNIT_TYPE_PPS, + h264_NAL_UNIT_TYPE_Acc_unit_delimiter, + h264_NAL_UNIT_TYPE_EOSeq, + h264_NAL_UNIT_TYPE_EOstream, + h264_NAL_UNIT_TYPE_filler_data, + h264_NAL_UNIT_TYPE_SPS_extension, + h264_NAL_UNIT_TYPE_Reserved1 =14, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved2 =15, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved3 =16, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved4 =17, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved5 =18, /*14-18*/ + h264_NAL_UNIT_TYPE_ACP =19, + h264_NAL_UNIT_TYPE_Reserved6 =20, /*20-23*/ + h264_NAL_UNIT_TYPE_unspecified2 =24, /*24-31*/ + } h264_nal_unit_type; #define h264_NAL_PRIORITY_HIGHEST 3 #define h264_NAL_PRIORITY_HIGH 2 @@ -213,97 +213,97 @@ typedef enum _h264_nal_unit_type #define h264_NAL_PRIORITY_DISPOSABLE 0 -typedef enum _h264_Profile -{ - h264_ProfileBaseline = 66, /** Baseline profile */ - h264_ProfileMain = 77, /** Main profile */ - h264_ProfileExtended = 88, /** Extended profile */ - h264_ProfileHigh = 100 , /** High profile */ - h264_ProfileHigh10 = 110, /** High 10 profile */ - h264_ProfileHigh422 = 122, /** High profile 4:2:2 */ - h264_ProfileHigh444 = 144, /** High profile 4:4:4 */ -} h264_Profile; - - -typedef enum _h264_Level -{ - h264_Level1b = 9, /** Level 1b */ - h264_Level1 = 10, /** Level 1 */ - h264_Level11 = 11, /** Level 1.1 */ - h264_Level12 = 12, /** Level 1.2 */ - h264_Level13 = 13, /** Level 1.3 */ - h264_Level2 = 20, /** Level 2 */ - h264_Level21 = 21, /** Level 2.1 */ - h264_Level22 = 22, /** Level 2.2 */ - h264_Level3 = 30, /** Level 3 */ - h264_Level31 = 31, /** Level 3.1 */ - h264_Level32 = 32, /** Level 3.2 */ - h264_Level4 = 40, /** Level 4 */ - h264_Level41 = 41, /** Level 4.1 */ - h264_Level42 = 42, /** Level 4.2 */ - h264_Level5 = 50, /** Level 5 */ - h264_Level51 = 51, /** Level 5.1 */ - h264_LevelReserved = 255 /** Unknown profile */ -} h264_Level; - - -typedef enum _h264_video_format -{ - h264_Component =0, - h264_PAL, - h264_NTSC, - h264_SECAM, - h264_MAC, - h264_unspecified, - h264_Reserved6, - h264_Reserved7 -}h264_video_format; - - -typedef enum _h264_fcm -{ - h264_ProgressiveFrame = 0, - h264_InterlacedFrame = 1, - h264_InterlacedField = 3, - h264_PictureFormatNone -} h264_fcm; + typedef enum _h264_Profile + { + h264_ProfileBaseline = 66, /** Baseline profile */ + h264_ProfileMain = 77, /** Main profile */ + h264_ProfileExtended = 88, /** Extended profile */ + h264_ProfileHigh = 100 , /** High profile */ + h264_ProfileHigh10 = 110, /** High 10 profile */ + h264_ProfileHigh422 = 122, /** High profile 4:2:2 */ + h264_ProfileHigh444 = 144, /** High profile 4:4:4 */ + } h264_Profile; + + + typedef enum _h264_Level + { + h264_Level1b = 9, /** Level 1b */ + h264_Level1 = 10, /** Level 1 */ + h264_Level11 = 11, /** Level 1.1 */ + h264_Level12 = 12, /** Level 1.2 */ + h264_Level13 = 13, /** Level 1.3 */ + h264_Level2 = 20, /** Level 2 */ + h264_Level21 = 21, /** Level 2.1 */ + h264_Level22 = 22, /** Level 2.2 */ + h264_Level3 = 30, /** Level 3 */ + h264_Level31 = 31, /** Level 3.1 */ + h264_Level32 = 32, /** Level 3.2 */ + h264_Level4 = 40, /** Level 4 */ + h264_Level41 = 41, /** Level 4.1 */ + h264_Level42 = 42, /** Level 4.2 */ + h264_Level5 = 50, /** Level 5 */ + h264_Level51 = 51, /** Level 5.1 */ + h264_LevelReserved = 255 /** Unknown profile */ + } h264_Level; + + + typedef enum _h264_video_format + { + h264_Component =0, + h264_PAL, + h264_NTSC, + h264_SECAM, + h264_MAC, + h264_unspecified, + h264_Reserved6, + h264_Reserved7 + } h264_video_format; + + + typedef enum _h264_fcm + { + h264_ProgressiveFrame = 0, + h264_InterlacedFrame = 1, + h264_InterlacedField = 3, + h264_PictureFormatNone + } h264_fcm; ///// Define the picture types [] -typedef enum _h264_ptype_t -{ - h264_PtypeP = 0, - h264_PtypeB = 1, - h264_PtypeI = 2, - h264_PtypeSP = 3, - h264_PtypeSI = 4, - h264_Ptype_unspecified, -} h264_ptype_t; + typedef enum _h264_ptype_t + { + h264_PtypeP = 0, + h264_PtypeB = 1, + h264_PtypeI = 2, + h264_PtypeSP = 3, + h264_PtypeSI = 4, + h264_Ptype_unspecified, + } h264_ptype_t; ///// Aspect ratio -typedef enum _h264_aspect_ratio -{ - h264_AR_Unspecified = 0, - h264_AR_1_1 = 1, - h264_AR_12_11 = 2, - h264_AR_10_11 = 3, - h264_AR_16_11 = 4, - h264_AR_40_33 = 5, - h264_AR_24_11 = 6, - h264_AR_20_11 = 7, - h264_AR_32_11 = 8, - h264_AR_80_33 = 9, - h264_AR_18_11 = 10, - h264_AR_15_11 = 11, - h264_AR_64_33 = 12, - h264_AR_160_99 = 13, - h264_AR_4_3 = 14, - h264_AR_3_2 = 15, - h264_AR_2_1 = 16, - h264_AR_RESERVED = 17, - h264_AR_Extended_SAR = 255, -}h264_aspect_ratio; + typedef enum _h264_aspect_ratio + { + h264_AR_Unspecified = 0, + h264_AR_1_1 = 1, + h264_AR_12_11 = 2, + h264_AR_10_11 = 3, + h264_AR_16_11 = 4, + h264_AR_40_33 = 5, + h264_AR_24_11 = 6, + h264_AR_20_11 = 7, + h264_AR_32_11 = 8, + h264_AR_80_33 = 9, + h264_AR_18_11 = 10, + h264_AR_15_11 = 11, + h264_AR_64_33 = 12, + h264_AR_160_99 = 13, + h264_AR_4_3 = 14, + h264_AR_3_2 = 15, + h264_AR_2_1 = 16, + h264_AR_RESERVED = 17, + h264_AR_Extended_SAR = 255, + } h264_aspect_ratio; ////////////////////////////////////////////// @@ -311,723 +311,723 @@ typedef enum _h264_aspect_ratio ////////////////////////////////////////////// // storable_picture -/* Structure details - If all members remain ints - Size = 11 ints, i.e. 44 bytes -*/ + /* Structure details + If all members remain ints + Size = 11 ints, i.e. 44 bytes + */ + + typedef struct + { + int32_t poc; + int32_t pic_num; -typedef struct -{ - int32_t poc; - int32_t pic_num; + int32_t long_term_pic_num; - int32_t long_term_pic_num; - - uint8_t long_term_frame_idx; - uint8_t is_long_term; - uint8_t used_for_reference; - uint8_t pad_flag; // Used to indicate the status + uint8_t long_term_frame_idx; + uint8_t is_long_term; + uint8_t used_for_reference; + uint8_t pad_flag; // Used to indicate the status -} storable_picture, *storable_picture_ptr; + } storable_picture, *storable_picture_ptr; ////////////////////////////////////////////// // frame store -/* Structure details - If all members remain ints - Size = 46 ints, i.e. 184 bytes -*/ - -typedef struct _frame_store -{ - storable_picture frame; - storable_picture top_field; - storable_picture bottom_field; - - int32_t frame_num; - - int32_t frame_num_wrap; - - - uint8_t fs_idc; - uint8_t pic_type; //bit7 structure: 1 frame , 0 field; - //bit4,5,6 top field (frame) pic type, 00 IDR 01 I 10 P 11 B 100 INVALID - //bit1,2,3 bottom pic type, 00 IDR 01 I 10 P 11 B 100 INVALID - uint8_t long_term_frame_idx; // No two frame stores may have the same long-term frame index - - #define viddec_h264_get_dec_structure(x) h264_bitfields_extract( (x)->fs_flag_1, 0, 0x03) - #define viddec_h264_set_dec_structure(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 0, 0x03) - #define viddec_h264_get_is_used(x) h264_bitfields_extract( (x)->fs_flag_1, 2, 0x03) - #define viddec_h264_set_is_frame_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x03) - #define viddec_h264_set_is_top_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x01) - #define viddec_h264_set_is_bottom_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 3, 0x01) - #define viddec_h264_get_is_skipped(x) h264_bitfields_extract( (x)->fs_flag_1, 4, 0x03) - #define viddec_h264_set_is_frame_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x03) - #define viddec_h264_set_is_top_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x01) - #define viddec_h264_set_is_bottom_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 5, 0x01) - #define viddec_h264_get_is_long_term(x) h264_bitfields_extract( (x)->fs_flag_1, 6, 0x03) - #define viddec_h264_set_is_frame_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x03) - #define viddec_h264_set_is_top_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x01) - #define viddec_h264_set_is_bottom_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 7, 0x01) - uint8_t fs_flag_1; - - - #define viddec_h264_get_is_non_existent(x) h264_bitfields_extract( (x)->fs_flag_2, 0, 0x01) - #define viddec_h264_set_is_non_existent(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 0, 0x01) - #define viddec_h264_get_is_output(x) h264_bitfields_extract( (x)->fs_flag_2, 1, 0x01) - #define viddec_h264_set_is_output(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 1, 0x01) - #define viddec_h264_get_is_dangling(x) h264_bitfields_extract( (x)->fs_flag_2, 2, 0x01) - #define viddec_h264_set_is_dangling(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 2, 0x01) - #define viddec_h264_get_recovery_pt_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 3, 0x01) - #define viddec_h264_set_recovery_pt_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 3, 0x01) - #define viddec_h264_get_broken_link_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 4, 0x01) - #define viddec_h264_set_broken_link_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 4, 0x01) - #define viddec_h264_get_open_gop_entry(x) h264_bitfields_extract( (x)->fs_flag_2, 5, 0x01) - #define viddec_h264_set_open_gop_entry(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 5, 0x01) - #define viddec_h264_get_first_field_intra(x) h264_bitfields_extract( (x)->fs_flag_2, 6, 0x01) - #define viddec_h264_set_first_field_intra(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 6, 0x01) - uint8_t fs_flag_2; - - uint8_t fs_flag_reserve_1; - uint8_t fs_flag_reserve_2; - uint8_t fs_flag_reserve_3; - - // If non-reference, may have skipped pixel decode - //uint8_t non_ref_skipped; -} frame_store, *frame_param_ptr; + /* Structure details + If all members remain ints + Size = 46 ints, i.e. 184 bytes + */ + + typedef struct _frame_store + { + storable_picture frame; + storable_picture top_field; + storable_picture bottom_field; + + int32_t frame_num; + + int32_t frame_num_wrap; + + + uint8_t fs_idc; + uint8_t pic_type; //bit7 structure: 1 frame , 0 field; + //bit4,5,6 top field (frame) pic type, 00 IDR 01 I 10 P 11 B 100 INVALID + //bit1,2,3 bottom pic type, 00 IDR 01 I 10 P 11 B 100 INVALID + uint8_t long_term_frame_idx; // No two frame stores may have the same long-term frame index + +#define viddec_h264_get_dec_structure(x) h264_bitfields_extract( (x)->fs_flag_1, 0, 0x03) +#define viddec_h264_set_dec_structure(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 0, 0x03) +#define viddec_h264_get_is_used(x) h264_bitfields_extract( (x)->fs_flag_1, 2, 0x03) +#define viddec_h264_set_is_frame_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x03) +#define viddec_h264_set_is_top_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x01) +#define viddec_h264_set_is_bottom_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 3, 0x01) +#define viddec_h264_get_is_skipped(x) h264_bitfields_extract( (x)->fs_flag_1, 4, 0x03) +#define viddec_h264_set_is_frame_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x03) +#define viddec_h264_set_is_top_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x01) +#define viddec_h264_set_is_bottom_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 5, 0x01) +#define viddec_h264_get_is_long_term(x) h264_bitfields_extract( (x)->fs_flag_1, 6, 0x03) +#define viddec_h264_set_is_frame_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x03) +#define viddec_h264_set_is_top_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x01) +#define viddec_h264_set_is_bottom_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 7, 0x01) + uint8_t fs_flag_1; + + +#define viddec_h264_get_is_non_existent(x) h264_bitfields_extract( (x)->fs_flag_2, 0, 0x01) +#define viddec_h264_set_is_non_existent(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 0, 0x01) +#define viddec_h264_get_is_output(x) h264_bitfields_extract( (x)->fs_flag_2, 1, 0x01) +#define viddec_h264_set_is_output(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 1, 0x01) +#define viddec_h264_get_is_dangling(x) h264_bitfields_extract( (x)->fs_flag_2, 2, 0x01) +#define viddec_h264_set_is_dangling(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 2, 0x01) +#define viddec_h264_get_recovery_pt_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 3, 0x01) +#define viddec_h264_set_recovery_pt_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 3, 0x01) +#define viddec_h264_get_broken_link_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 4, 0x01) +#define viddec_h264_set_broken_link_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 4, 0x01) +#define viddec_h264_get_open_gop_entry(x) h264_bitfields_extract( (x)->fs_flag_2, 5, 0x01) +#define viddec_h264_set_open_gop_entry(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 5, 0x01) +#define viddec_h264_get_first_field_intra(x) h264_bitfields_extract( (x)->fs_flag_2, 6, 0x01) +#define viddec_h264_set_first_field_intra(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 6, 0x01) + uint8_t fs_flag_2; + + uint8_t fs_flag_reserve_1; + uint8_t fs_flag_reserve_2; + uint8_t fs_flag_reserve_3; + + // If non-reference, may have skipped pixel decode + //uint8_t non_ref_skipped; + } frame_store, *frame_param_ptr; //! Decoded Picture Buffer -typedef struct _h264_decoded_picture_buffer -{ - /// - int32_t last_output_poc; - int32_t max_long_term_pic_idx; + typedef struct _h264_decoded_picture_buffer + { + /// + int32_t last_output_poc; + int32_t max_long_term_pic_idx; + + //// Resolutions + int32_t PicWidthInMbs; + int32_t FrameHeightInMbs; - //// Resolutions - int32_t PicWidthInMbs; - int32_t FrameHeightInMbs; + frame_store fs[NUM_DPB_FRAME_STORES]; - frame_store fs[NUM_DPB_FRAME_STORES]; + uint8_t fs_ref_idc[16]; + uint8_t fs_ltref_idc[16]; - uint8_t fs_ref_idc[16]; - uint8_t fs_ltref_idc[16]; + uint8_t fs_dpb_idc[NUM_DPB_FRAME_STORES+2]; - uint8_t fs_dpb_idc[NUM_DPB_FRAME_STORES+2]; + uint8_t listX_0[33+3]; // [bit5}:field_flag:0 for top, 1 for bottom, [bit4~0]:fs_idc + uint8_t listX_1[33+3]; - uint8_t listX_0[33+3]; // [bit5}:field_flag:0 for top, 1 for bottom, [bit4~0]:fs_idc - uint8_t listX_1[33+3]; + uint8_t listXsize[2]; // 1 to 32 + uint8_t nInitListSize[2]; - uint8_t listXsize[2]; // 1 to 32 - uint8_t nInitListSize[2]; + //uint32_t size; + uint8_t fs_dec_idc; + uint8_t fs_non_exist_idc; + uint8_t BumpLevel; + uint8_t used_size; - //uint32_t size; - uint8_t fs_dec_idc; - uint8_t fs_non_exist_idc; - uint8_t BumpLevel; - uint8_t used_size; + uint8_t OutputLevel; + uint8_t OutputLevelValid; + uint8_t OutputCtrl; + uint8_t num_ref_frames; - uint8_t OutputLevel; - uint8_t OutputLevelValid; - uint8_t OutputCtrl; - uint8_t num_ref_frames; - - uint8_t ref_frames_in_buffer; - uint8_t ltref_frames_in_buffer; - uint8_t SuspendOutput; - uint8_t WaitSeiRecovery; + uint8_t ref_frames_in_buffer; + uint8_t ltref_frames_in_buffer; + uint8_t SuspendOutput; + uint8_t WaitSeiRecovery; - - uint8_t frame_numbers_need_to_be_allocated; - uint8_t frame_id_need_to_be_allocated; - //// frame list to release from dpb, need be displayed - uint8_t frame_numbers_need_to_be_removed; - uint8_t frame_id_need_to_be_removed[17]; + uint8_t frame_numbers_need_to_be_allocated; + uint8_t frame_id_need_to_be_allocated; - //// frame list to removed from dpb but not display - uint8_t frame_numbers_need_to_be_dropped; - uint8_t frame_id_need_to_be_dropped[17]; + //// frame list to release from dpb, need be displayed + uint8_t frame_numbers_need_to_be_removed; + uint8_t frame_id_need_to_be_removed[17]; - //// frame list to display (in display order) - uint8_t frame_numbers_need_to_be_displayed; - uint8_t frame_id_need_to_be_displayed[17]; + //// frame list to removed from dpb but not display + uint8_t frame_numbers_need_to_be_dropped; + uint8_t frame_id_need_to_be_dropped[17]; + //// frame list to display (in display order) + uint8_t frame_numbers_need_to_be_displayed; + uint8_t frame_id_need_to_be_displayed[17]; -} h264_DecodedPictureBuffer; + + } h264_DecodedPictureBuffer; ////////////////////////////////////////////// // qm_matrix_set -typedef struct _qm_matrix_set -{ - // uint8_t scaling_default_vector; - uint8_t scaling_list[56]; // 0 to 23 for qm 0 to 5 (4x4), 24 to 55 for qm 6 & 7 (8x8) - -} qm_matrix_set, *qm_matrix_set_ptr; - -/* -///////// Currently not enabled in parser fw/////////////////// -typedef struct _h264_SPS_Extension_RBSP { - int32_t seq_parameter_set_id; //UE - int32_t aux_format_idc; //UE - int32_t bit_depth_aux_minus8; //UE - int32_t alpha_incr_flag; - int32_t alpha_opaque_value; - int32_t alpha_transparent_value; - int32_t additional_extension_flag; -// h264_rbsp_trail_set* rbsp_trail_ptr; -}h264_SPS_Extension_RBSP_t; -*/ + typedef struct _qm_matrix_set + { +// uint8_t scaling_default_vector; + uint8_t scaling_list[56]; // 0 to 23 for qm 0 to 5 (4x4), 24 to 55 for qm 6 & 7 (8x8) + + } qm_matrix_set, *qm_matrix_set_ptr; + + /* + ///////// Currently not enabled in parser fw/////////////////// + typedef struct _h264_SPS_Extension_RBSP { + int32_t seq_parameter_set_id; //UE + int32_t aux_format_idc; //UE + int32_t bit_depth_aux_minus8; //UE + int32_t alpha_incr_flag; + int32_t alpha_opaque_value; + int32_t alpha_transparent_value; + int32_t additional_extension_flag; + // h264_rbsp_trail_set* rbsp_trail_ptr; + }h264_SPS_Extension_RBSP_t; + */ + + typedef struct _h264_hrd_param_set { + int32_t bit_rate_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 + int32_t cpb_size_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 + + uint8_t cbr_flag[MAX_CPB_CNT]; // u(1) * 32 + + } h264_hrd_param_set, *h264_hrd_param_set_ptr; + + typedef struct _vui_seq_parameters_t_used + { + uint32_t num_units_in_tick; // u(32) + uint32_t time_scale; // u(32) + + int32_t num_reorder_frames; // ue(v), 0 to max_dec_frame_buffering + int32_t max_dec_frame_buffering; // ue(v), 0 to MaxDpbSize, specified in subclause A.3 + + uint16_t sar_width; // u(16) + uint16_t sar_height; // u(16) + + uint8_t aspect_ratio_info_present_flag; // u(1) + uint8_t aspect_ratio_idc; // u(8) + uint8_t video_signal_type_present_flag; // u(1) + uint8_t video_format; // u(3) +#ifdef VBP + uint8_t video_full_range_flag; // u(1) + uint8_t matrix_coefficients; // u(8) +#endif -typedef struct _h264_hrd_param_set { - int32_t bit_rate_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 - int32_t cpb_size_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 + uint8_t colour_description_present_flag; // u(1) + uint8_t colour_primaries; // u(8) + uint8_t transfer_characteristics; // u(8) + uint8_t timing_info_present_flag; // u(1) - uint8_t cbr_flag[MAX_CPB_CNT]; // u(1) * 32 + uint8_t fixed_frame_rate_flag; // u(1) + uint8_t low_delay_hrd_flag; // u(1) + uint8_t bitstream_restriction_flag; // u(1) + uint8_t pic_struct_present_flag; -} h264_hrd_param_set, *h264_hrd_param_set_ptr; + uint8_t nal_hrd_parameters_present_flag; // u(1) + uint8_t nal_hrd_cpb_removal_delay_length_minus1; // u(5) + uint8_t nal_hrd_dpb_output_delay_length_minus1; // u(5) + uint8_t nal_hrd_time_offset_length; // u(5) -typedef struct _vui_seq_parameters_t_used -{ - uint32_t num_units_in_tick; // u(32) - uint32_t time_scale; // u(32) + uint8_t nal_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 + uint8_t nal_hrd_initial_cpb_removal_delay_length_minus1; // u(5) + uint8_t vcl_hrd_parameters_present_flag; // u(1) + uint8_t vcl_hrd_cpb_removal_delay_length_minus1; // u(5) - int32_t num_reorder_frames; // ue(v), 0 to max_dec_frame_buffering - int32_t max_dec_frame_buffering; // ue(v), 0 to MaxDpbSize, specified in subclause A.3 + uint8_t vcl_hrd_dpb_output_delay_length_minus1; // u(5) + uint8_t vcl_hrd_time_offset_length; // u(5) + uint8_t vcl_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 + uint8_t vcl_hrd_initial_cpb_removal_delay_length_minus1; // u(5) - uint16_t sar_width; // u(16) - uint16_t sar_height; // u(16) + /////// Here should be kept as 32-bits aligned for next structures + /// 2 structures for NAL&VCL HRD - uint8_t aspect_ratio_info_present_flag; // u(1) - uint8_t aspect_ratio_idc; // u(8) - uint8_t video_signal_type_present_flag; // u(1) - uint8_t video_format; // u(3) -#ifdef VBP - uint8_t video_full_range_flag; // u(1) - uint8_t matrix_coefficients; // u(8) -#endif - - uint8_t colour_description_present_flag; // u(1) - uint8_t colour_primaries; // u(8) - uint8_t transfer_characteristics; // u(8) - uint8_t timing_info_present_flag; // u(1) - - uint8_t fixed_frame_rate_flag; // u(1) - uint8_t low_delay_hrd_flag; // u(1) - uint8_t bitstream_restriction_flag; // u(1) - uint8_t pic_struct_present_flag; - - uint8_t nal_hrd_parameters_present_flag; // u(1) - uint8_t nal_hrd_cpb_removal_delay_length_minus1; // u(5) - uint8_t nal_hrd_dpb_output_delay_length_minus1; // u(5) - uint8_t nal_hrd_time_offset_length; // u(5) - - uint8_t nal_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 - uint8_t nal_hrd_initial_cpb_removal_delay_length_minus1; // u(5) - uint8_t vcl_hrd_parameters_present_flag; // u(1) - uint8_t vcl_hrd_cpb_removal_delay_length_minus1; // u(5) - - uint8_t vcl_hrd_dpb_output_delay_length_minus1; // u(5) - uint8_t vcl_hrd_time_offset_length; // u(5) - uint8_t vcl_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 - uint8_t vcl_hrd_initial_cpb_removal_delay_length_minus1; // u(5) - - /////// Here should be kept as 32-bits aligned for next structures - /// 2 structures for NAL&VCL HRD - - -} vui_seq_parameters_t_used; - - -typedef struct _vui_seq_parameters_t_not_used -{ - int16_t chroma_sample_loc_type_top_field; // ue(v) - int16_t chroma_sample_loc_type_bottom_field; // ue(v) - - uint8_t overscan_info_present_flag; // u(1) - uint8_t overscan_appropriate_flag; // u(1) - - uint8_t video_full_range_flag; // u(1) - uint8_t matrix_coefficients; // u(8) - - uint8_t chroma_location_info_present_flag; // u(1) - uint8_t max_bytes_per_pic_denom; // ue(v), 0 to 16 - uint8_t max_bits_per_mb_denom; // ue(v), 0 to 16 - uint8_t log2_max_mv_length_vertical; // ue(v), 0 to 16, default to 16 - uint8_t log2_max_mv_length_horizontal; // ue(v), 0 to 16, default to 16 - - uint8_t motion_vectors_over_pic_boundaries_flag; // u(1) - - uint8_t nal_hrd_bit_rate_scale; // u(4) - uint8_t nal_hrd_cpb_size_scale; // u(4) - - uint8_t vcl_hrd_bit_rate_scale; // u(4) - uint8_t vcl_hrd_cpb_size_scale; // u(4) - - h264_hrd_param_set nal_hrd_parameters; - h264_hrd_param_set vcl_hrd_parameters; - - -} vui_seq_parameters_t_not_used, *vui_seq_parameters_t_not_used_ptr; + + } vui_seq_parameters_t_used; + + + typedef struct _vui_seq_parameters_t_not_used + { + int16_t chroma_sample_loc_type_top_field; // ue(v) + int16_t chroma_sample_loc_type_bottom_field; // ue(v) + + uint8_t overscan_info_present_flag; // u(1) + uint8_t overscan_appropriate_flag; // u(1) + + uint8_t video_full_range_flag; // u(1) + uint8_t matrix_coefficients; // u(8) + + uint8_t chroma_location_info_present_flag; // u(1) + uint8_t max_bytes_per_pic_denom; // ue(v), 0 to 16 + uint8_t max_bits_per_mb_denom; // ue(v), 0 to 16 + uint8_t log2_max_mv_length_vertical; // ue(v), 0 to 16, default to 16 + uint8_t log2_max_mv_length_horizontal; // ue(v), 0 to 16, default to 16 + + uint8_t motion_vectors_over_pic_boundaries_flag; // u(1) + + uint8_t nal_hrd_bit_rate_scale; // u(4) + uint8_t nal_hrd_cpb_size_scale; // u(4) + + uint8_t vcl_hrd_bit_rate_scale; // u(4) + uint8_t vcl_hrd_cpb_size_scale; // u(4) + + h264_hrd_param_set nal_hrd_parameters; + h264_hrd_param_set vcl_hrd_parameters; + + + } vui_seq_parameters_t_not_used, *vui_seq_parameters_t_not_used_ptr; ////////////////////////////////////////////// // picture parameter set -typedef struct _PPS_PAR -{ - //int32_t DOUBLE_ALIGN valid; // indicates the parameter set is valid - - int32_t pic_init_qp_minus26; // se(v), -26 to +25 - int32_t pic_init_qs_minus26; // se(v), -26 to +25 - int32_t chroma_qp_index_offset; // se(v), -12 to +12 - int32_t second_chroma_qp_index_offset; - - uint8_t pic_parameter_set_id; // ue(v), 0 to 255, restricted to 0 to 127 by MPD_CTRL_MAXPPS = 128 - uint8_t seq_parameter_set_id; // ue(v), 0 to 31 - uint8_t entropy_coding_mode_flag; // u(1) - uint8_t pic_order_present_flag; // u(1) - - uint8_t num_slice_groups_minus1; // ue(v), shall be 0 for MP - // Below are not relevant for main profile... - uint8_t slice_group_map_type; // ue(v), 0 to 6 - uint8_t num_ref_idx_l0_active; // ue(v), 0 to 31 - uint8_t num_ref_idx_l1_active; // ue(v), 0 to 31 - - uint8_t weighted_pred_flag; // u(1) - uint8_t weighted_bipred_idc; // u(2) - uint8_t deblocking_filter_control_present_flag; // u(1) - uint8_t constrained_intra_pred_flag; // u(1) - - uint8_t redundant_pic_cnt_present_flag; // u(1) - uint8_t transform_8x8_mode_flag; - uint8_t pic_scaling_matrix_present_flag; - uint8_t pps_status_flag; - - //// Keep here with 32-bits aligned - uint8_t pic_scaling_list_present_flag[MAX_PIC_LIST_NUM]; - - qm_matrix_set pps_qm; - - uint8_t ScalingList4x4[6][16]; - uint8_t ScalingList8x8[2][64]; - uint8_t UseDefaultScalingMatrix4x4Flag[6+2]; - uint8_t UseDefaultScalingMatrix8x8Flag[6+2]; - -} pic_param_set, *pic_param_set_ptr, h264_PicParameterSet_t; - -typedef union _list_reordering_num_t -{ - int32_t abs_diff_pic_num_minus1; - int32_t long_term_pic_num; -} list_reordering_num_t; - -typedef struct _h264_Ref_Pic_List_Reordering ////size = 8*33+ 1 + 33 -{ - list_reordering_num_t list_reordering_num[MAX_NUM_REF_FRAMES+1]; - - uint8_t ref_pic_list_reordering_flag; - uint8_t reordering_of_pic_nums_idc[MAX_NUM_REF_FRAMES+1]; //UE - -}h264_Ref_Pic_List_Reordering_t; - -typedef enum _H264_DANGLING_TYPE -{ - DANGLING_TYPE_LAST_FIELD, - DANGLING_TYPE_DPB_RESET, - DANGLING_TYPE_FIELD, - DANGLING_TYPE_FRAME, - DANGLING_TYPE_GAP_IN_FRAME - -} H264_DANGLING_TYPE; - - -typedef struct _h264_Dec_Ref_Pic_Marking //size = 17*4*2 + 17*3 + 4 + 1 -{ - int32_t difference_of_pic_num_minus1[NUM_MMCO_OPERATIONS]; - int32_t long_term_pic_num[NUM_MMCO_OPERATIONS]; - - /// MMCO - uint8_t memory_management_control_operation[NUM_MMCO_OPERATIONS]; - uint8_t max_long_term_frame_idx_plus1[NUM_MMCO_OPERATIONS]; - uint8_t long_term_frame_idx[NUM_MMCO_OPERATIONS]; - uint8_t long_term_reference_flag; - - uint8_t adaptive_ref_pic_marking_mode_flag; - uint8_t dec_ref_pic_marking_count; - uint8_t no_output_of_prior_pics_flag; - - uint8_t pad; -}h264_Dec_Ref_Pic_Marking_t; - - - -typedef struct old_slice_par -{ - int32_t frame_num; - int32_t pic_order_cnt_lsb; - int32_t delta_pic_order_cnt_bottom; - int32_t delta_pic_order_cnt[2]; - - uint8_t field_pic_flag; - uint8_t bottom_field_flag; - uint8_t nal_ref_idc; - uint8_t structure; - - uint8_t idr_flag; - uint8_t idr_pic_id; - uint8_t pic_parameter_id; - uint8_t status; -} OldSliceParams; + typedef struct _PPS_PAR + { + //int32_t DOUBLE_ALIGN valid; // indicates the parameter set is valid + + int32_t pic_init_qp_minus26; // se(v), -26 to +25 + int32_t pic_init_qs_minus26; // se(v), -26 to +25 + int32_t chroma_qp_index_offset; // se(v), -12 to +12 + int32_t second_chroma_qp_index_offset; + + uint8_t pic_parameter_set_id; // ue(v), 0 to 255, restricted to 0 to 127 by MPD_CTRL_MAXPPS = 128 + uint8_t seq_parameter_set_id; // ue(v), 0 to 31 + uint8_t entropy_coding_mode_flag; // u(1) + uint8_t pic_order_present_flag; // u(1) + + uint8_t num_slice_groups_minus1; // ue(v), shall be 0 for MP + // Below are not relevant for main profile... + uint8_t slice_group_map_type; // ue(v), 0 to 6 + uint8_t num_ref_idx_l0_active; // ue(v), 0 to 31 + uint8_t num_ref_idx_l1_active; // ue(v), 0 to 31 + + uint8_t weighted_pred_flag; // u(1) + uint8_t weighted_bipred_idc; // u(2) + uint8_t deblocking_filter_control_present_flag; // u(1) + uint8_t constrained_intra_pred_flag; // u(1) + + uint8_t redundant_pic_cnt_present_flag; // u(1) + uint8_t transform_8x8_mode_flag; + uint8_t pic_scaling_matrix_present_flag; + uint8_t pps_status_flag; + + //// Keep here with 32-bits aligned + uint8_t pic_scaling_list_present_flag[MAX_PIC_LIST_NUM]; + + qm_matrix_set pps_qm; + + uint8_t ScalingList4x4[6][16]; + uint8_t ScalingList8x8[2][64]; + uint8_t UseDefaultScalingMatrix4x4Flag[6+2]; + uint8_t UseDefaultScalingMatrix8x8Flag[6+2]; + + } pic_param_set, *pic_param_set_ptr, h264_PicParameterSet_t; + + typedef union _list_reordering_num_t + { + int32_t abs_diff_pic_num_minus1; + int32_t long_term_pic_num; + } list_reordering_num_t; + + typedef struct _h264_Ref_Pic_List_Reordering ////size = 8*33+ 1 + 33 + { + list_reordering_num_t list_reordering_num[MAX_NUM_REF_FRAMES+1]; + + uint8_t ref_pic_list_reordering_flag; + uint8_t reordering_of_pic_nums_idc[MAX_NUM_REF_FRAMES+1]; //UE + + } h264_Ref_Pic_List_Reordering_t; + + typedef enum _H264_DANGLING_TYPE + { + DANGLING_TYPE_LAST_FIELD, + DANGLING_TYPE_DPB_RESET, + DANGLING_TYPE_FIELD, + DANGLING_TYPE_FRAME, + DANGLING_TYPE_GAP_IN_FRAME + + } H264_DANGLING_TYPE; + + + typedef struct _h264_Dec_Ref_Pic_Marking //size = 17*4*2 + 17*3 + 4 + 1 + { + int32_t difference_of_pic_num_minus1[NUM_MMCO_OPERATIONS]; + int32_t long_term_pic_num[NUM_MMCO_OPERATIONS]; + + /// MMCO + uint8_t memory_management_control_operation[NUM_MMCO_OPERATIONS]; + uint8_t max_long_term_frame_idx_plus1[NUM_MMCO_OPERATIONS]; + uint8_t long_term_frame_idx[NUM_MMCO_OPERATIONS]; + uint8_t long_term_reference_flag; + + uint8_t adaptive_ref_pic_marking_mode_flag; + uint8_t dec_ref_pic_marking_count; + uint8_t no_output_of_prior_pics_flag; + + uint8_t pad; + } h264_Dec_Ref_Pic_Marking_t; + + + + typedef struct old_slice_par + { + int32_t frame_num; + int32_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt_bottom; + int32_t delta_pic_order_cnt[2]; + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t nal_ref_idc; + uint8_t structure; + + uint8_t idr_flag; + uint8_t idr_pic_id; + uint8_t pic_parameter_id; + uint8_t status; + } OldSliceParams; #ifdef VBP -typedef struct _h264__pred_weight_table -{ - uint8_t luma_log2_weight_denom; - uint8_t chroma_log2_weight_denom; - uint8_t luma_weight_l0_flag; - int16_t luma_weight_l0[32]; - int8_t luma_offset_l0[32]; - uint8_t chroma_weight_l0_flag; - int16_t chroma_weight_l0[32][2]; - int8_t chroma_offset_l0[32][2]; - - uint8_t luma_weight_l1_flag; - int16_t luma_weight_l1[32]; - int8_t luma_offset_l1[32]; - uint8_t chroma_weight_l1_flag; - int16_t chroma_weight_l1[32][2]; - int8_t chroma_offset_l1[32][2]; -} h264_pred_weight_table; + typedef struct _h264__pred_weight_table + { + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; + uint8_t luma_weight_l0_flag; + int16_t luma_weight_l0[32]; + int8_t luma_offset_l0[32]; + uint8_t chroma_weight_l0_flag; + int16_t chroma_weight_l0[32][2]; + int8_t chroma_offset_l0[32][2]; + + uint8_t luma_weight_l1_flag; + int16_t luma_weight_l1[32]; + int8_t luma_offset_l1[32]; + uint8_t chroma_weight_l1_flag; + int16_t chroma_weight_l1[32][2]; + int8_t chroma_offset_l1[32][2]; + } h264_pred_weight_table; #endif -typedef struct _h264_Slice_Header -{ - int32_t first_mb_in_slice; //UE - int32_t frame_num; //UV - int32_t pic_order_cnt_lsb; //UV - int32_t delta_pic_order_cnt_bottom; //SE - int32_t delta_pic_order_cnt[2]; //SE - int32_t redundant_pic_cnt; //UE - - uint32_t num_ref_idx_l0_active; //UE - uint32_t num_ref_idx_l1_active; //UE - - int32_t slice_qp_delta; //SE - int32_t slice_qs_delta; //SE - int32_t slice_alpha_c0_offset_div2; //SE - int32_t slice_beta_offset_div2; //SE - int32_t slice_group_change_cycle; //UV + typedef struct _h264_Slice_Header + { + int32_t first_mb_in_slice; //UE + int32_t frame_num; //UV + int32_t pic_order_cnt_lsb; //UV + int32_t delta_pic_order_cnt_bottom; //SE + int32_t delta_pic_order_cnt[2]; //SE + int32_t redundant_pic_cnt; //UE + + uint32_t num_ref_idx_l0_active; //UE + uint32_t num_ref_idx_l1_active; //UE + + int32_t slice_qp_delta; //SE + int32_t slice_qs_delta; //SE + int32_t slice_alpha_c0_offset_div2; //SE + int32_t slice_beta_offset_div2; //SE + int32_t slice_group_change_cycle; //UV #ifdef VBP - h264_pred_weight_table sh_predwttbl; + h264_pred_weight_table sh_predwttbl; #endif - - ///// Flags or IDs - //h264_ptype_t slice_type; //UE - uint8_t slice_type; - uint8_t nal_ref_idc; - uint8_t structure; - uint8_t pic_parameter_id; //UE - - uint8_t field_pic_flag; - uint8_t bottom_field_flag; - uint8_t idr_flag; //UE - uint8_t idr_pic_id; //UE - - uint8_t sh_error; - uint8_t cabac_init_idc; //UE - uint8_t sp_for_switch_flag; - uint8_t disable_deblocking_filter_idc; //UE - - uint8_t direct_spatial_mv_pred_flag; - uint8_t num_ref_idx_active_override_flag; - int16_t current_slice_nr; - - //// For Ref list reordering - h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; - h264_Ref_Pic_List_Reordering_t sh_refpic_l0; - h264_Ref_Pic_List_Reordering_t sh_refpic_l1; - -} h264_Slice_Header_t; + + ///// Flags or IDs + //h264_ptype_t slice_type; //UE + uint8_t slice_type; + uint8_t nal_ref_idc; + uint8_t structure; + uint8_t pic_parameter_id; //UE + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t idr_flag; //UE + uint8_t idr_pic_id; //UE + + uint8_t sh_error; + uint8_t cabac_init_idc; //UE + uint8_t sp_for_switch_flag; + uint8_t disable_deblocking_filter_idc; //UE + + uint8_t direct_spatial_mv_pred_flag; + uint8_t num_ref_idx_active_override_flag; + int16_t current_slice_nr; + + //// For Ref list reordering + h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; + h264_Ref_Pic_List_Reordering_t sh_refpic_l0; + h264_Ref_Pic_List_Reordering_t sh_refpic_l1; + + } h264_Slice_Header_t; #define MAX_USER_DATA_SIZE 1024 -typedef struct _h264_user_data_t -{ - h264_sei_payloadtype user_data_type; - - int32_t user_data_id; - int32_t dsn; - int32_t user_data_size; - int32_t user_data[MAX_USER_DATA_SIZE>>2]; -} h264_user_data_t; + typedef struct _h264_user_data_t + { + h264_sei_payloadtype user_data_type; + + int32_t user_data_id; + int32_t dsn; + int32_t user_data_size; + int32_t user_data[MAX_USER_DATA_SIZE>>2]; + } h264_user_data_t; // SPS DISPLAY parameters: seq_param_set_disp, *seq_param_set_disp_ptr; -typedef struct _SPS_DISP -{ - ///// VUI info - vui_seq_parameters_t_used vui_seq_parameters; //size = + typedef struct _SPS_DISP + { + ///// VUI info + vui_seq_parameters_t_used vui_seq_parameters; //size = - ///// Resolution - int16_t pic_width_in_mbs_minus1; - int16_t pic_height_in_map_units_minus1; + ///// Resolution + int16_t pic_width_in_mbs_minus1; + int16_t pic_height_in_map_units_minus1; - ///// Cropping - int16_t frame_crop_rect_left_offset; - int16_t frame_crop_rect_right_offset; + ///// Cropping + int16_t frame_crop_rect_left_offset; + int16_t frame_crop_rect_right_offset; - int16_t frame_crop_rect_top_offset; - int16_t frame_crop_rect_bottom_offset; + int16_t frame_crop_rect_top_offset; + int16_t frame_crop_rect_bottom_offset; - ///// Flags - uint8_t frame_mbs_only_flag; - uint8_t mb_adaptive_frame_field_flag; - uint8_t direct_8x8_inference_flag; - uint8_t frame_cropping_flag; - - uint16_t vui_parameters_present_flag; - uint16_t chroma_format_idc; -} seq_param_set_disp, *seq_param_set_disp_ptr; + ///// Flags + uint8_t frame_mbs_only_flag; + uint8_t mb_adaptive_frame_field_flag; + uint8_t direct_8x8_inference_flag; + uint8_t frame_cropping_flag; + + uint16_t vui_parameters_present_flag; + uint16_t chroma_format_idc; + } seq_param_set_disp, *seq_param_set_disp_ptr; ////SPS: seq_param_set, *seq_param_set_ptr; -typedef struct _SPS_PAR_USED -{ - uint32_t is_updated; - - /////////// Required for display section ////////////////////////// - seq_param_set_disp sps_disp; - - int32_t expectedDeltaPerPOCCycle; - int32_t offset_for_non_ref_pic; // se(v), -2^31 to (2^31)-1, 32-bit integer - int32_t offset_for_top_to_bottom_field; // se(v), -2^31 to (2^31)-1, 32-bit integer - - /////////// IDC - uint8_t profile_idc; // u(8), 0x77 for MP - uint8_t constraint_set_flags; // bit 0 to 3 for set0 to set3 - uint8_t level_idc; // u(8) - uint8_t seq_parameter_set_id; // ue(v), 0 to 31 - - - uint8_t pic_order_cnt_type; // ue(v), 0 to 2 - uint8_t log2_max_frame_num_minus4; // ue(v), 0 to 12 - uint8_t log2_max_pic_order_cnt_lsb_minus4; // ue(v), 0 to 12 - uint8_t num_ref_frames_in_pic_order_cnt_cycle; // ue(v), 0 to 255 - - //int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; // se(v), -2^31 to (2^31)-1, 32-bit integer - uint8_t num_ref_frames; // ue(v), 0 to 16, - uint8_t gaps_in_frame_num_value_allowed_flag; // u(1) - // This is my addition, we should calculate this once and leave it with the sps - // as opposed to calculating it each time in h264_hdr_decoding_POC() - - uint8_t delta_pic_order_always_zero_flag; // u(1) - uint8_t residual_colour_transform_flag; - - uint8_t bit_depth_luma_minus8; - uint8_t bit_depth_chroma_minus8; - uint8_t lossless_qpprime_y_zero_flag; - uint8_t seq_scaling_matrix_present_flag; - - uint8_t seq_scaling_list_present_flag[MAX_PIC_LIST_NUM]; //0-7 - - //// Combine the scaling matrix to word ( 24 + 32) - uint8_t ScalingList4x4[6][16]; - uint8_t ScalingList8x8[2][64]; - uint8_t UseDefaultScalingMatrix4x4Flag[6]; - uint8_t UseDefaultScalingMatrix8x8Flag[6]; - -} seq_param_set_used, *seq_param_set_used_ptr; - - -typedef struct _SPS_PAR_ALL -{ - - seq_param_set_used sps_par_used; - vui_seq_parameters_t_not_used sps_vui_par_not_used; - -}seq_param_set_all, *seq_param_set_all_ptr; + typedef struct _SPS_PAR_USED + { + uint32_t is_updated; + + /////////// Required for display section ////////////////////////// + seq_param_set_disp sps_disp; + + int32_t expectedDeltaPerPOCCycle; + int32_t offset_for_non_ref_pic; // se(v), -2^31 to (2^31)-1, 32-bit integer + int32_t offset_for_top_to_bottom_field; // se(v), -2^31 to (2^31)-1, 32-bit integer + + /////////// IDC + uint8_t profile_idc; // u(8), 0x77 for MP + uint8_t constraint_set_flags; // bit 0 to 3 for set0 to set3 + uint8_t level_idc; // u(8) + uint8_t seq_parameter_set_id; // ue(v), 0 to 31 + + + uint8_t pic_order_cnt_type; // ue(v), 0 to 2 + uint8_t log2_max_frame_num_minus4; // ue(v), 0 to 12 + uint8_t log2_max_pic_order_cnt_lsb_minus4; // ue(v), 0 to 12 + uint8_t num_ref_frames_in_pic_order_cnt_cycle; // ue(v), 0 to 255 + + //int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; // se(v), -2^31 to (2^31)-1, 32-bit integer + uint8_t num_ref_frames; // ue(v), 0 to 16, + uint8_t gaps_in_frame_num_value_allowed_flag; // u(1) + // This is my addition, we should calculate this once and leave it with the sps + // as opposed to calculating it each time in h264_hdr_decoding_POC() + + uint8_t delta_pic_order_always_zero_flag; // u(1) + uint8_t residual_colour_transform_flag; + + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t lossless_qpprime_y_zero_flag; + uint8_t seq_scaling_matrix_present_flag; + + uint8_t seq_scaling_list_present_flag[MAX_PIC_LIST_NUM]; //0-7 + + //// Combine the scaling matrix to word ( 24 + 32) + uint8_t ScalingList4x4[6][16]; + uint8_t ScalingList8x8[2][64]; + uint8_t UseDefaultScalingMatrix4x4Flag[6]; + uint8_t UseDefaultScalingMatrix8x8Flag[6]; + + } seq_param_set_used, *seq_param_set_used_ptr; + + + typedef struct _SPS_PAR_ALL + { + + seq_param_set_used sps_par_used; + vui_seq_parameters_t_not_used sps_vui_par_not_used; + + } seq_param_set_all, *seq_param_set_all_ptr; ///// Image control parameter//////////// -typedef struct _h264_img_par -{ - int32_t frame_num; // decoding num of current frame - int32_t frame_count; // count of decoded frames - int32_t current_slice_num; - int32_t gaps_in_frame_num; - - // POC decoding - int32_t num_ref_frames_in_pic_order_cnt_cycle; - int32_t delta_pic_order_always_zero_flag; - int32_t offset_for_non_ref_pic; - int32_t offset_for_top_to_bottom_field; - - int32_t pic_order_cnt_lsb; - int32_t pic_order_cnt_msb; - int32_t delta_pic_order_cnt_bottom; - int32_t delta_pic_order_cnt[2]; - - int32_t PicOrderCntMsb; - int32_t CurrPicOrderCntMsb; - int32_t PrevPicOrderCntLsb; - - int32_t FrameNumOffset; - - int32_t PreviousFrameNum; - int32_t PreviousFrameNumOffset; - - int32_t toppoc; - int32_t bottompoc; - int32_t framepoc; - int32_t ThisPOC; - - //int32_t sei_freeze_this_image; - - ///////////////////// Resolutions - int32_t PicWidthInMbs; - int32_t FrameHeightInMbs; - - ///////////////////// MMCO - uint8_t last_has_mmco_5; - uint8_t curr_has_mmco_5; - - /////////////////// Flags - uint8_t g_new_frame; - uint8_t g_new_pic; - - uint8_t structure; - uint8_t second_field; // Set to one if this is the second field of a set of paired fields... - uint8_t field_pic_flag; - uint8_t last_pic_bottom_field; - - uint8_t bottom_field_flag; - uint8_t MbaffFrameFlag; - uint8_t no_output_of_prior_pics_flag; - uint8_t long_term_reference_flag; - - uint8_t skip_this_pic; - uint8_t pic_order_cnt_type; - // Recovery - uint8_t recovery_point_found; - uint8_t used_for_reference; -} h264_img_par; - - -typedef struct _h264_slice_reg_data -{ - uint32_t h264_bsd_slice_p1; // 0x150 - //uint32_t h264_mpr_list0[8]; // from 0x380 to 0x3BC - uint32_t h264_bsd_slice_p2; // 0x154 - uint32_t h264_bsd_slice_start; // 0x158 - -} h264_slice_data; - - -typedef struct _h264_pic_data -{ - uint32_t h264_dpb_init; // 0x40 - //info For current pic - uint32_t h264_cur_bsd_img_init; // 0x140 - uint32_t h264_cur_mpr_tf_poc; // 0x300 - uint32_t h264_cur_mpr_bf_poc; // 0x304 - - //info For framess in DPB - //uint32_t h264_dpb_bsd_img_init[16]; //0x140 - //uint32_t h264_dpb_mpr_tf_poc[16]; // 0x300 - //uint32_t h264_dpb_mpr_bf_poc[16]; // 0x304 -} h264_pic_data; - -enum h264_workload_item_type -{ - VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC, - VIDDEC_WORKLOAD_H264_PIC_REG, - VIDDEC_WORKLOAD_H264_DPB_FRAME_POC, - VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET, - VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET, - VIDDEC_WORKLOAD_H264_PWT_ES_BYTES, - VIDDEC_WORKLOAD_H264_SCALING_MATRIX, - VIDDEC_WORKLOAD_H264_DEBUG -}; + typedef struct _h264_img_par + { + int32_t frame_num; // decoding num of current frame + int32_t frame_count; // count of decoded frames + int32_t current_slice_num; + int32_t gaps_in_frame_num; + + // POC decoding + int32_t num_ref_frames_in_pic_order_cnt_cycle; + int32_t delta_pic_order_always_zero_flag; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + + int32_t pic_order_cnt_lsb; + int32_t pic_order_cnt_msb; + int32_t delta_pic_order_cnt_bottom; + int32_t delta_pic_order_cnt[2]; + + int32_t PicOrderCntMsb; + int32_t CurrPicOrderCntMsb; + int32_t PrevPicOrderCntLsb; + + int32_t FrameNumOffset; + + int32_t PreviousFrameNum; + int32_t PreviousFrameNumOffset; + + int32_t toppoc; + int32_t bottompoc; + int32_t framepoc; + int32_t ThisPOC; + + //int32_t sei_freeze_this_image; + + ///////////////////// Resolutions + int32_t PicWidthInMbs; + int32_t FrameHeightInMbs; + + ///////////////////// MMCO + uint8_t last_has_mmco_5; + uint8_t curr_has_mmco_5; + + /////////////////// Flags + uint8_t g_new_frame; + uint8_t g_new_pic; + + uint8_t structure; + uint8_t second_field; // Set to one if this is the second field of a set of paired fields... + uint8_t field_pic_flag; + uint8_t last_pic_bottom_field; + + uint8_t bottom_field_flag; + uint8_t MbaffFrameFlag; + uint8_t no_output_of_prior_pics_flag; + uint8_t long_term_reference_flag; + + uint8_t skip_this_pic; + uint8_t pic_order_cnt_type; + // Recovery + uint8_t recovery_point_found; + uint8_t used_for_reference; + } h264_img_par; + + + typedef struct _h264_slice_reg_data + { + uint32_t h264_bsd_slice_p1; // 0x150 + //uint32_t h264_mpr_list0[8]; // from 0x380 to 0x3BC + uint32_t h264_bsd_slice_p2; // 0x154 + uint32_t h264_bsd_slice_start; // 0x158 + + } h264_slice_data; + + + typedef struct _h264_pic_data + { + uint32_t h264_dpb_init; // 0x40 + //info For current pic + uint32_t h264_cur_bsd_img_init; // 0x140 + uint32_t h264_cur_mpr_tf_poc; // 0x300 + uint32_t h264_cur_mpr_bf_poc; // 0x304 + + //info For framess in DPB + //uint32_t h264_dpb_bsd_img_init[16]; //0x140 + //uint32_t h264_dpb_mpr_tf_poc[16]; // 0x300 + //uint32_t h264_dpb_mpr_bf_poc[16]; // 0x304 + } h264_pic_data; + + enum h264_workload_item_type + { + VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_H264_PIC_REG, + VIDDEC_WORKLOAD_H264_DPB_FRAME_POC, + VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET, + VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET, + VIDDEC_WORKLOAD_H264_PWT_ES_BYTES, + VIDDEC_WORKLOAD_H264_SCALING_MATRIX, + VIDDEC_WORKLOAD_H264_DEBUG + }; //////////////////////////////////////////// -/* Full Info set*/ + /* Full Info set*/ //////////////////////////////////////////// -typedef struct _h264_Info -{ + typedef struct _h264_Info + { + + h264_DecodedPictureBuffer dpb; + + //// Structures + //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address + seq_param_set_used active_SPS; + pic_param_set active_PPS; - h264_DecodedPictureBuffer dpb; - //// Structures - //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address - seq_param_set_used active_SPS; - pic_param_set active_PPS; + h264_Slice_Header_t SliceHeader; + OldSliceParams old_slice; + sei_info sei_information; - - h264_Slice_Header_t SliceHeader; - OldSliceParams old_slice; - sei_info sei_information; + h264_img_par img; - h264_img_par img; + uint32_t SPS_PADDR_GL; + uint32_t PPS_PADDR_GL; + uint32_t OFFSET_REF_FRAME_PADDR_GL; + uint32_t TMP_OFFSET_REFFRM_PADDR_GL; - uint32_t SPS_PADDR_GL; - uint32_t PPS_PADDR_GL; - uint32_t OFFSET_REF_FRAME_PADDR_GL; - uint32_t TMP_OFFSET_REFFRM_PADDR_GL; + uint32_t h264_list_replacement; - uint32_t h264_list_replacement; + uint32_t h264_pwt_start_byte_offset; + uint32_t h264_pwt_start_bit_offset; + uint32_t h264_pwt_end_byte_offset; + uint32_t h264_pwt_end_bit_offset; + uint32_t h264_pwt_enabled; - uint32_t h264_pwt_start_byte_offset; - uint32_t h264_pwt_start_bit_offset; - uint32_t h264_pwt_end_byte_offset; - uint32_t h264_pwt_end_bit_offset; - uint32_t h264_pwt_enabled; + uint32_t sps_valid; - uint32_t sps_valid; + uint8_t slice_ref_list0[32]; + uint8_t slice_ref_list1[32]; - uint8_t slice_ref_list0[32]; - uint8_t slice_ref_list1[32]; + uint8_t qm_present_list; + //h264_NAL_Unit_t + uint8_t nal_unit_type; + uint8_t old_nal_unit_type; + uint8_t got_start; - uint8_t qm_present_list; - //h264_NAL_Unit_t - uint8_t nal_unit_type; - uint8_t old_nal_unit_type; - uint8_t got_start; + //workload + uint8_t push_to_cur; + uint8_t Is_first_frame_in_stream; + uint8_t Is_SPS_updated; + uint8_t number_of_first_au_info_nal_before_first_slice; - //workload - uint8_t push_to_cur; - uint8_t Is_first_frame_in_stream; - uint8_t Is_SPS_updated; - uint8_t number_of_first_au_info_nal_before_first_slice; + uint8_t is_frame_boundary_detected_by_non_slice_nal; + uint8_t is_frame_boundary_detected_by_slice_nal; + uint8_t is_current_workload_done; + uint8_t primary_pic_type_plus_one; //AUD---[0,7] - uint8_t is_frame_boundary_detected_by_non_slice_nal; - uint8_t is_frame_boundary_detected_by_slice_nal; - uint8_t is_current_workload_done; - uint8_t primary_pic_type_plus_one; //AUD---[0,7] + //Error handling + uint8_t sei_rp_received; + uint8_t last_I_frame_idc; + uint8_t sei_b_state_ready; + uint8_t gop_err_flag; - //Error handling - uint8_t sei_rp_received; - uint8_t last_I_frame_idc; - uint8_t sei_b_state_ready; - uint8_t gop_err_flag; - - uint32_t wl_err_curr; - uint32_t wl_err_next; + uint32_t wl_err_curr; + uint32_t wl_err_next; -} h264_Info; + } h264_Info; -struct h264_viddec_parser -{ - uint32_t sps_pps_ddr_paddr; - h264_Info info; -}; + struct h264_viddec_parser + { + uint32_t sps_pps_ddr_paddr; + h264_Info info; + }; #ifdef __cplusplus } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h index 88db5de..2e7b817 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h @@ -17,43 +17,43 @@ extern "C" { #endif -enum h264_debug_point_id -{ - WARNING_H264_GENERAL = 0xff000000, - WARNING_H264_DPB, - WARNING_H264_REFLIST, - WARNING_H264_SPS, - WARNING_H264_PPS, - WARNING_H264_SEI, - WARNING_H264_VCL, - - ERROR_H264_GENERAL = 0xffff0000, - ERROR_H264_DPB, - ERROR_H264_REFLIST, - ERROR_H264_SPS, - ERROR_H264_PPS, - ERROR_H264_SEI, - ERROR_H264_VCL -}; - -static inline void MFD_PARSER_DEBUG(int debug_point_id) -{ + enum h264_debug_point_id + { + WARNING_H264_GENERAL = 0xff000000, + WARNING_H264_DPB, + WARNING_H264_REFLIST, + WARNING_H264_SPS, + WARNING_H264_PPS, + WARNING_H264_SEI, + WARNING_H264_VCL, + + ERROR_H264_GENERAL = 0xffff0000, + ERROR_H264_DPB, + ERROR_H264_REFLIST, + ERROR_H264_SPS, + ERROR_H264_PPS, + ERROR_H264_SEI, + ERROR_H264_VCL + }; + + static inline void MFD_PARSER_DEBUG(int debug_point_id) + { #ifdef H264_MFD_DEBUG - int p1,p2,p3,p4,p5,p6; + int p1,p2,p3,p4,p5,p6; - p1 = 0x0BAD; - p2 = 0xC0DE; - p3 = debug_point_id; - p4=p5=p6 = 0; - - DEBUG_WRITE(p1,p2,p3,p4,p5,p6); -#endif + p1 = 0x0BAD; + p2 = 0xC0DE; + p3 = debug_point_id; + p4=p5=p6 = 0; - debug_point_id = debug_point_id; + DEBUG_WRITE(p1,p2,p3,p4,p5,p6); +#endif - return; -} + debug_point_id = debug_point_id; + + return; + } @@ -61,27 +61,27 @@ static inline void MFD_PARSER_DEBUG(int debug_point_id) //////////////////////////////////////////////////////////////////// ///////////////////////////// Init functions //////////////////////////////////////////////////////////////////// -extern void h264_init_old_slice(h264_Info* pInfo); -extern void h264_init_img(h264_Info* pInfo); -extern void h264_init_Info(h264_Info* pInfo); -extern void h264_init_Info_under_sps_pps_level(h264_Info* pInfo); -extern void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem); + extern void h264_init_old_slice(h264_Info* pInfo); + extern void h264_init_img(h264_Info* pInfo); + extern void h264_init_Info(h264_Info* pInfo); + extern void h264_init_Info_under_sps_pps_level(h264_Info* pInfo); + extern void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem); -extern void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader); -extern void h264_sei_stream_initialise (h264_Info* pInfo); -extern void h264_update_img_info(h264_Info * pInfo ); -extern void h264_update_frame_type(h264_Info * pInfo ); + extern void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader); + extern void h264_sei_stream_initialise (h264_Info* pInfo); + extern void h264_update_img_info(h264_Info * pInfo ); + extern void h264_update_frame_type(h264_Info * pInfo ); -extern int32_t h264_check_previous_frame_end(h264_Info * pInfo); + extern int32_t h264_check_previous_frame_end(h264_Info * pInfo); //////////////////////////////////////////////////////////////////// ///////////////////////////// bsd functions //////////////////////////////////////////////////////////////////// -extern uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo); + extern uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo); ////// VLE and bit operation -extern uint32_t h264_get_codeNum(void *parent,h264_Info* pInfo); -extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSigned); + extern uint32_t h264_get_codeNum(void *parent,h264_Info* pInfo); + extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSigned); @@ -90,58 +90,58 @@ extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSign //////////////////////////////////////////////////////////////////// //NAL -extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc); + extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc); ////// Slice header -extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); -extern h264_Status h264_Parse_Slice_Header_1(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); -extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); -extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_1(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); ////// SPS -extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame); + extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame); //extern h264_Status h264_Parse_SeqParameterSet_Extension(void *parent, h264_Info * pInfo); -extern h264_Status h264_Parse_PicParameterSet(void *parent, h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet); + extern h264_Status h264_Parse_PicParameterSet(void *parent, h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet); ////// SEI functions -h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent,h264_Info* pInfo); -h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize); + h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent,h264_Info* pInfo); + h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize); ////// -extern h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo); -extern h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); -extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); -extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo); + extern h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); ///// Mem functions -extern void* h264_memset( void* buf, uint32_t c, uint32_t num ); -extern void* h264_memcpy( void* dest, void* src, uint32_t num ); + extern void* h264_memset( void* buf, uint32_t c, uint32_t num ); + extern void* h264_memcpy( void* dest, void* src, uint32_t num ); -extern void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); -extern void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); + extern void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); + extern void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); -extern void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); -extern void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); + extern void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); + extern void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); -extern void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); -extern void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); -extern uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); -extern void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); + extern void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); + extern void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); + extern uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); + extern void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); //////////////////////////////////////////////////////////////////// ///////////////////////////// workload functions //////////////////////////////////////////////////////////////////// -extern void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ); + extern void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ); -extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ); + extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ); -extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ); -extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo ); + extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ); + extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo ); #ifdef __cplusplus } #endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h index 8b4deb4..002818b 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h @@ -26,79 +26,79 @@ extern "C" { //////////////////////////////////////////////////////////////////// ///// Reference list -extern void h264_dpb_update_ref_lists(h264_Info * pInfo); -extern void h264_dpb_reorder_lists(h264_Info * pInfo); + extern void h264_dpb_update_ref_lists(h264_Info * pInfo); + extern void h264_dpb_reorder_lists(h264_Info * pInfo); -extern void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting); + extern void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting); -///// POC -extern void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num); -extern void h264_hdr_post_poc(h264_Info* pInfo,int32_t NonExisting, int32_t frame_num, int32_t use_old); +///// POC + extern void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num); + extern void h264_hdr_post_poc(h264_Info* pInfo,int32_t NonExisting, int32_t frame_num, int32_t use_old); ///// DPB buffer mangement -extern void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb); + extern void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb); -extern void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); -extern void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); -extern void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx); -extern void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity); -extern void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX); -extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); + extern void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); + extern void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); + extern void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx); + extern void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity); + extern void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX); + extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); -extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo); -extern void h264_dpb_is_used_for_reference(int32_t * flag); + extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo); + extern void h264_dpb_is_used_for_reference(int32_t * flag); -extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index); -extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); + extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index); + extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); -extern void h264_dpb_idr_memory_management (h264_Info * pInfo, - seq_param_set_used_ptr active_sps, - int32_t no_output_of_prior_pics_flag); + extern void h264_dpb_idr_memory_management (h264_Info * pInfo, + seq_param_set_used_ptr active_sps, + int32_t no_output_of_prior_pics_flag); -extern void h264_dpb_init_frame_store(h264_Info * pInfo); -extern void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, - int32_t SizeChange, int32_t no_output_of_prior_pics_flag); + extern void h264_dpb_init_frame_store(h264_Info * pInfo); + extern void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, + int32_t SizeChange, int32_t no_output_of_prior_pics_flag); -extern void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo); + extern void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo); -extern int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting); + extern int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting); -extern void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos); -extern void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag); + extern void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos); + extern void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag); -extern void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, - int32_t NonExisting, - int32_t num_ref_frames); -extern int32_t h264_dpb_queue_update(h264_Info * pInfo, - int32_t push, - int32_t direct, - int32_t frame_request, - int32_t num_ref_frames); + extern void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, + int32_t NonExisting, + int32_t num_ref_frames); + extern int32_t h264_dpb_queue_update(h264_Info * pInfo, + int32_t push, + int32_t direct, + int32_t frame_request, + int32_t num_ref_frames); -extern void h264_dpb_split_field (h264_Info * pInfo); -extern void h264_dpb_combine_field(int32_t use_old); + extern void h264_dpb_split_field (h264_Info * pInfo); + extern void h264_dpb_combine_field(int32_t use_old); -extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo, - int32_t used_for_reference, - int32_t add2dpb, - int32_t NonExisting, - int32_t use_old); + extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo, + int32_t used_for_reference, + int32_t add2dpb, + int32_t NonExisting, + int32_t use_old); -extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, - int32_t NonExisting, - int32_t use_old); + extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, + int32_t NonExisting, + int32_t use_old); -extern void h264_dpb_adaptive_memory_management (h264_Info * pInfo); + extern void h264_dpb_adaptive_memory_management (h264_Info * pInfo); -extern int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo, - int32_t direct, int32_t request, int32_t num_ref_frames); + extern int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo, + int32_t direct, int32_t request, int32_t num_ref_frames); -extern void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx); -extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing); + extern void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx); + extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing); //////////////////////////////////////////////////////////// Globals -extern frame_store *active_fs; + extern frame_store *active_fs; #ifdef __cplusplus diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h index e5903cd..f0a591d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h @@ -31,53 +31,53 @@ #define SEI_SCAN_FORMAT_INTERLACED 0x1 #define SEI_SCAN_FORMAT_PROGRESSIVE 0x3 #define SEI_SCAN_FORMAT_VALID(r) (r&0x1) -#define SEI_SCAN_FORMAT(r) ((r&0x2)>>1) +#define SEI_SCAN_FORMAT(r) ((r&0x2)>>1) -typedef enum +typedef enum { - SEI_BUF_PERIOD = 0, - SEI_PIC_TIMING, - SEI_PAN_SCAN, - SEI_FILLER_PAYLOAD, - SEI_REG_USERDATA, - SEI_UNREG_USERDATA, - SEI_RECOVERY_POINT, - SEI_DEC_REF_PIC_MARKING_REP, - SEI_SPARE_PIC, - SEI_SCENE_INFO, - SEI_SUB_SEQ_INFO, - SEI_SUB_SEQ_LAYER, - SEI_SUB_SEQ, - SEI_FULL_FRAME_FREEZE, - SEI_FULL_FRAME_FREEZE_RELEASE, - SEI_FULL_FRAME_SNAPSHOT, - SEI_PROGRESSIVE_SEGMENT_START, - SEI_PROGRESSIVE_SEGMENT_END, - SEI_MOTION_CONSTRAINED_SLICE_GRP_SET, - SEI_FILM_GRAIN_CHARACTERISTICS, - SEI_DEBLK_FILTER_DISPLAY_PREFERENCE, - SEI_STEREO_VIDEO_INFO, - SEI_RESERVED, -}h264_sei_payloadtype; + SEI_BUF_PERIOD = 0, + SEI_PIC_TIMING, + SEI_PAN_SCAN, + SEI_FILLER_PAYLOAD, + SEI_REG_USERDATA, + SEI_UNREG_USERDATA, + SEI_RECOVERY_POINT, + SEI_DEC_REF_PIC_MARKING_REP, + SEI_SPARE_PIC, + SEI_SCENE_INFO, + SEI_SUB_SEQ_INFO, + SEI_SUB_SEQ_LAYER, + SEI_SUB_SEQ, + SEI_FULL_FRAME_FREEZE, + SEI_FULL_FRAME_FREEZE_RELEASE, + SEI_FULL_FRAME_SNAPSHOT, + SEI_PROGRESSIVE_SEGMENT_START, + SEI_PROGRESSIVE_SEGMENT_END, + SEI_MOTION_CONSTRAINED_SLICE_GRP_SET, + SEI_FILM_GRAIN_CHARACTERISTICS, + SEI_DEBLK_FILTER_DISPLAY_PREFERENCE, + SEI_STEREO_VIDEO_INFO, + SEI_RESERVED, +} h264_sei_payloadtype; typedef struct _h264_SEI_buffering_period { - int32_t seq_param_set_id; - int32_t initial_cpb_removal_delay_nal; - int32_t initial_cpb_removal_delay_offset_nal; - int32_t initial_cpb_removal_delay_vcl; - int32_t initial_cpb_removal_delay_offset_vcl; + int32_t seq_param_set_id; + int32_t initial_cpb_removal_delay_nal; + int32_t initial_cpb_removal_delay_offset_nal; + int32_t initial_cpb_removal_delay_vcl; + int32_t initial_cpb_removal_delay_offset_vcl; -}h264_SEI_buffering_period_t; +} h264_SEI_buffering_period_t; typedef struct _h264_SEI_pic_timing { - int32_t cpb_removal_delay; - int32_t dpb_output_delay; - int32_t pic_struct; -}h264_SEI_pic_timing_t; + int32_t cpb_removal_delay; + int32_t dpb_output_delay; + int32_t pic_struct; +} h264_SEI_pic_timing_t; #if 0 int32_t clock_timestamp_flag[MAX_NUM_CLOCK_TS]; @@ -96,191 +96,191 @@ int32_t minutes_flag[MAX_NUM_CLOCK_TS]; int32_t hours_flag[MAX_NUM_CLOCK_TS]; int32_t time_offset[MAX_NUM_CLOCK_TS]; -#endif +#endif typedef struct _h264_SEI_pan_scan_rectangle { - int32_t pan_scan_rect_id; - int32_t pan_scan_rect_cancel_flag; - int32_t pan_scan_cnt_minus1; - int32_t pan_scan_rect_left_offset[MAX_PAN_SCAN_CNT]; - int32_t pan_scan_rect_right_offset[MAX_PAN_SCAN_CNT]; - int32_t pan_scan_rect_top_offset[MAX_PAN_SCAN_CNT]; - int32_t pan_scan_rect_bottom_offset[MAX_PAN_SCAN_CNT]; - int32_t pan_scan_rect_repetition_period; -}h264_SEI_pan_scan_rectangle_t; + int32_t pan_scan_rect_id; + int32_t pan_scan_rect_cancel_flag; + int32_t pan_scan_cnt_minus1; + int32_t pan_scan_rect_left_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_right_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_top_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_bottom_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_repetition_period; +} h264_SEI_pan_scan_rectangle_t; typedef struct _h264_SEI_filler_payload { - int32_t ff_byte; -}h264_SEI_filler_payload_t; + int32_t ff_byte; +} h264_SEI_filler_payload_t; typedef struct _h264_SEI_userdata_registered { - int32_t itu_t_t35_country_code; - int32_t itu_t_t35_country_code_extension_byte; - int32_t itu_t_t35_payload_byte; -}h264_SEI_userdata_registered_t; + int32_t itu_t_t35_country_code; + int32_t itu_t_t35_country_code_extension_byte; + int32_t itu_t_t35_payload_byte; +} h264_SEI_userdata_registered_t; typedef struct _h264_SEI_userdata_unregistered { - int32_t uuid_iso_iec_11578[4]; - int32_t user_data_payload_byte; -}h264_SEI_userdata_unregistered_t; + int32_t uuid_iso_iec_11578[4]; + int32_t user_data_payload_byte; +} h264_SEI_userdata_unregistered_t; typedef struct _h264_SEI_recovery_point { - int32_t recovery_frame_cnt; - int32_t exact_match_flag; - int32_t broken_link_flag; - int32_t changing_slice_group_idc; -}h264_SEI_recovery_point_t; + int32_t recovery_frame_cnt; + int32_t exact_match_flag; + int32_t broken_link_flag; + int32_t changing_slice_group_idc; +} h264_SEI_recovery_point_t; typedef struct _h264_SEI_decoded_ref_pic_marking_repetition { - int32_t original_idr_flag; - int32_t original_frame_num; - int32_t orignal_field_pic_flag; - int32_t original_bottom_field_pic_flag; - int32_t no_output_of_prior_pics_flag; - int32_t long_term_reference_flag; - int32_t adaptive_ref_pic_marking_mode_flag; - int32_t memory_management_control_operation; //UE - int32_t difference_of_pics_num_minus1; //UE - int32_t long_term_pic_num; //UE - int32_t long_term_frame_idx; //UE - int32_t max_long_term_frame_idx_plus1; //UE -}h264_SEI_decoded_ref_pic_marking_repetition_t; + int32_t original_idr_flag; + int32_t original_frame_num; + int32_t orignal_field_pic_flag; + int32_t original_bottom_field_pic_flag; + int32_t no_output_of_prior_pics_flag; + int32_t long_term_reference_flag; + int32_t adaptive_ref_pic_marking_mode_flag; + int32_t memory_management_control_operation; //UE + int32_t difference_of_pics_num_minus1; //UE + int32_t long_term_pic_num; //UE + int32_t long_term_frame_idx; //UE + int32_t max_long_term_frame_idx_plus1; //UE +} h264_SEI_decoded_ref_pic_marking_repetition_t; typedef struct _h264_SEI_spare_picture { - int32_t target_frame_num; - int32_t spare_field_flag; - int32_t target_bottom_field_flag; - int32_t num_spare_pics_minus1; - int32_t delta_spare_frame_num[MAX_NUM_SPARE_PICS]; - int32_t spare_bottom_field_flag[MAX_NUM_SPARE_PICS]; - int32_t spare_area_idc[MAX_NUM_SPARE_PICS]; // not complete -}h264_SEI_spare_picture_t; + int32_t target_frame_num; + int32_t spare_field_flag; + int32_t target_bottom_field_flag; + int32_t num_spare_pics_minus1; + int32_t delta_spare_frame_num[MAX_NUM_SPARE_PICS]; + int32_t spare_bottom_field_flag[MAX_NUM_SPARE_PICS]; + int32_t spare_area_idc[MAX_NUM_SPARE_PICS]; // not complete +} h264_SEI_spare_picture_t; typedef struct _h264_SEI_scene_info { - int32_t scene_info_present_flag; - int32_t scene_id; - int32_t scene_transitioning_type; - int32_t second_scene_id; -}h264_SEI_scene_info_t; + int32_t scene_info_present_flag; + int32_t scene_id; + int32_t scene_transitioning_type; + int32_t second_scene_id; +} h264_SEI_scene_info_t; typedef struct _h264_SEI_sub_sequence_info { - int32_t sub_seq_layer_num; - int32_t sub_seq_id; - int32_t first_ref_pic_flag; - int32_t leading_non_ref_pic_flag; - int32_t last_pic_flag; - int32_t sub_seq_frame_num_flag; - int32_t sub_seq_frame_num; -}h264_SEI_sub_sequence_info_t; + int32_t sub_seq_layer_num; + int32_t sub_seq_id; + int32_t first_ref_pic_flag; + int32_t leading_non_ref_pic_flag; + int32_t last_pic_flag; + int32_t sub_seq_frame_num_flag; + int32_t sub_seq_frame_num; +} h264_SEI_sub_sequence_info_t; typedef struct _h264_SEI_sub_sequence_layer { - int32_t num_sub_seq_layers_minus1; - int32_t accurate_statistics_flag[MAX_SUB_SEQ_LAYERS]; - int32_t average_bit_rate[MAX_SUB_SEQ_LAYERS]; - int32_t average_frame_rate[MAX_SUB_SEQ_LAYERS]; -}h264_SEI_sub_sequence_layer_t; + int32_t num_sub_seq_layers_minus1; + int32_t accurate_statistics_flag[MAX_SUB_SEQ_LAYERS]; + int32_t average_bit_rate[MAX_SUB_SEQ_LAYERS]; + int32_t average_frame_rate[MAX_SUB_SEQ_LAYERS]; +} h264_SEI_sub_sequence_layer_t; typedef struct _h264_SEI_sub_sequence { - int32_t sub_seq_layer_num; - int32_t sub_seq_id; - int32_t duration_flag; - int32_t sub_seq_duration; - int32_t average_rate_flag; - int32_t average_statistics_flag; - int32_t average_bit_rate; - int32_t average_frame_rate; - int32_t num_referenced_subseqs; - int32_t ref_sub_seq_layer_num; - int32_t ref_sub_seq_id; - int32_t ref_sub_seq_direction; -}h264_SEI_sub_sequence_t; + int32_t sub_seq_layer_num; + int32_t sub_seq_id; + int32_t duration_flag; + int32_t sub_seq_duration; + int32_t average_rate_flag; + int32_t average_statistics_flag; + int32_t average_bit_rate; + int32_t average_frame_rate; + int32_t num_referenced_subseqs; + int32_t ref_sub_seq_layer_num; + int32_t ref_sub_seq_id; + int32_t ref_sub_seq_direction; +} h264_SEI_sub_sequence_t; typedef struct _h264_SEI_full_frame_freeze { - int32_t full_frame_freeze_repetition_period; -}h264_SEI_full_frame_freeze_t; + int32_t full_frame_freeze_repetition_period; +} h264_SEI_full_frame_freeze_t; typedef struct _h264_SEI_full_frame_snapshot { - int32_t snapshot_id; -}h264_SEI_full_frame_snapshot_t; + int32_t snapshot_id; +} h264_SEI_full_frame_snapshot_t; typedef struct _h264_SEI_progressive_segment_start { - int32_t progressive_refinement_id; - int32_t num_refinement_steps_minus1; -}h264_SEI_progressive_segment_start_t; + int32_t progressive_refinement_id; + int32_t num_refinement_steps_minus1; +} h264_SEI_progressive_segment_start_t; typedef struct _h264_SEI_progressive_segment_end { - int32_t progressive_refinement_id; -}h264_SEI_progressive_segment_end_t; + int32_t progressive_refinement_id; +} h264_SEI_progressive_segment_end_t; typedef struct _h264_SEI_motion_constrained_slice_group { - int32_t num_slice_groups_in_set_minus1; - int32_t slice_group_id[MAX_SLICE_GRPS]; - int32_t exact_sample_value_match_flag; - int32_t pan_scan_rect_flag; - int32_t pan_scan_rect_id; -}h264_SEI_motion_constrained_slice_group_t; + int32_t num_slice_groups_in_set_minus1; + int32_t slice_group_id[MAX_SLICE_GRPS]; + int32_t exact_sample_value_match_flag; + int32_t pan_scan_rect_flag; + int32_t pan_scan_rect_id; +} h264_SEI_motion_constrained_slice_group_t; typedef struct _h264_SEI_deblocking_filter_display_pref { - int32_t devlocking_display_preference_cancel_flag; - int32_t display_prior_to_deblocking_preferred_flag; - int32_t dec_frame_buffering_constraint_flag; - int32_t deblocking_display_preference_repetition_period; -}h264_SEI_deblocking_filter_display_pref_t; + int32_t devlocking_display_preference_cancel_flag; + int32_t display_prior_to_deblocking_preferred_flag; + int32_t dec_frame_buffering_constraint_flag; + int32_t deblocking_display_preference_repetition_period; +} h264_SEI_deblocking_filter_display_pref_t; typedef struct _h264_SEI_stereo_video_info { - int32_t field_views_flag; - int32_t top_field_is_left_view_flag; - int32_t curent_frame_is_left_view_flag; - int32_t next_frame_is_second_view_flag; - int32_t left_view_self_contained_flag; - int32_t right_view_self_contained_flag; -}h264_SEI_stereo_video_info_t; + int32_t field_views_flag; + int32_t top_field_is_left_view_flag; + int32_t curent_frame_is_left_view_flag; + int32_t next_frame_is_second_view_flag; + int32_t left_view_self_contained_flag; + int32_t right_view_self_contained_flag; +} h264_SEI_stereo_video_info_t; typedef struct _h264_SEI_reserved { - int32_t reserved_sei_message_payload_byte; -}h264_SEI_reserved_t; + int32_t reserved_sei_message_payload_byte; +} h264_SEI_reserved_t; //////////////////////////// // SEI Info -///////////////////////////// +///////////////////////////// typedef struct sei_info { - int32_t recovery_point; - int32_t recovery_frame_num; - - int32_t capture_POC; - int32_t freeze_POC; - int32_t release_POC; // The POC which when reached will allow display update to re-commence - int32_t disp_frozen; // Indicates display is currently frozen - int32_t freeze_rep_period; - int32_t recovery_frame_cnt; - int32_t capture_fn; - int32_t recovery_fn; - int32_t broken_link; - int32_t scan_format; - int32_t broken_link_pic; -}sei_info, *sei_info_ptr; + int32_t recovery_point; + int32_t recovery_frame_num; + + int32_t capture_POC; + int32_t freeze_POC; + int32_t release_POC; // The POC which when reached will allow display update to re-commence + int32_t disp_frozen; // Indicates display is currently frozen + int32_t freeze_rep_period; + int32_t recovery_frame_cnt; + int32_t capture_fn; + int32_t recovery_fn; + int32_t broken_link; + int32_t scan_format; + int32_t broken_link_pic; +} sei_info, *sei_info_ptr; /*typedef struct _h264_SEI { diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk index 70f1388..d3e4910 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk @@ -18,9 +18,6 @@ LOCAL_SRC_FILES := \ LOCAL_CFLAGS := -DVBP -DHOST_ONLY LOCAL_C_INCLUDES := \ - $(GLIB_TOP) \ - $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ @@ -30,7 +27,6 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libmixvbp_h264 LOCAL_SHARED_LIBRARIES := \ - libglib-2.0 \ libmixvbp include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c index 180e7b6..e7dd6a7 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c @@ -24,43 +24,43 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo) { - int32_t j, scanj; - int32_t delta_scale, lastScale, nextScale; + int32_t j, scanj; + int32_t delta_scale, lastScale, nextScale; #if 0 - const uint8_t ZZ_SCAN[16] = - { 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15 - }; - - const uint8_t ZZ_SCAN8[64] = - { 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, - 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, - 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, - 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 - }; + const uint8_t ZZ_SCAN[16] = + { 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15 + }; + + const uint8_t ZZ_SCAN8[64] = + { 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 + }; #endif - lastScale = 8; - nextScale = 8; - scanj = 0; + lastScale = 8; + nextScale = 8; + scanj = 0; - for(j=0; jSliceHeader; - - /////////////////////////////////////////////////// - // Reload SPS/PPS while - // 1) Start of Frame (in case of context switch) - // 2) PPS id changed - /////////////////////////////////////////////////// - if((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id)) - { + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + + /////////////////////////////////////////////////// + // Reload SPS/PPS while + // 1) Start of Frame (in case of context switch) + // 2) PPS id changed + /////////////////////////////////////////////////// + if ((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id)) + { #ifndef WIN32 - h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id); - - if(pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) - { - return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected - } - - if(pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id) - { - pInfo->Is_SPS_updated =1; - h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); - h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); - } - else - { - if(h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id)) - { - pInfo->Is_SPS_updated =1; - h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); - h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); - } - } - -#else - pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id]; - pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id]; + h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id); + + if (pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + + if (pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated =1; + h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); + h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); + } + else + { + if (h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id)) + { + pInfo->Is_SPS_updated =1; + h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); + h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); + } + } + +#else + pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id]; + pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id]; #endif - - if(pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS) - { - return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected - } - } - else { - if((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)) - { - return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected - } - } - - - pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); - //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1); - pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ - (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1): \ - ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1); - - - return H264_STATUS_OK; + + if (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected + } + } + else { + if ((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + } + + + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1): \ + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1); + + + return H264_STATUS_OK; }; //// End of h264_active_par_set /* ------------------------------------------------------------------------------------------ */ @@ -135,76 +135,76 @@ h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader /* ------------------------------------------------------------------------------------------ */ ////////////////////////////////////////////////// -// Parse slice header info +// Parse slice header info ////////////////////////////////////////////////// h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) { - h264_Status retStatus = H264_STATUS_ERROR; - - //////////////////////////////////////////////////// - //// Parse slice header info - //// Part1: not depend on the active PPS/SPS - //// Part2/3: depend on the active parset - ////////////////////////////////////////////////// - - //retStatus = h264_Parse_Slice_Header_1(pInfo); - - SliceHeader->sh_error = 0; - - if(h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) - { - ////////////////////////////////////////// - //// Active parameter set for this slice - ////////////////////////////////////////// - retStatus = h264_active_par_set(pInfo, SliceHeader); - } - - if(retStatus == H264_STATUS_OK) { - switch(pInfo->active_SPS.profile_idc) - { - case h264_ProfileBaseline: - case h264_ProfileMain: - case h264_ProfileExtended: - pInfo->active_PPS.transform_8x8_mode_flag=0; - pInfo->active_PPS.pic_scaling_matrix_present_flag =0; - pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; - - default: - break; - } - - if( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - SliceHeader->sh_error |= 2; - } - else if( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - SliceHeader->sh_error |= 4; - } - - } else { - SliceHeader->sh_error |= 1; - } - - - //if(SliceHeader->sh_error) { - //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - //} - - - - ////////////////////////////////// - //// Parse slice data (MB loop) - ////////////////////////////////// - //retStatus = h264_Parse_Slice_Data(pInfo); - { - //uint32_t data = 0; - //if( viddec_pm_peek_bits(parent, &data, 32) == -1) - //retStatus = H264_STATUS_ERROR; - } - //h264_Parse_rbsp_trailing_bits(pInfo); - - return retStatus; + h264_Status retStatus = H264_STATUS_ERROR; + + //////////////////////////////////////////////////// + //// Parse slice header info + //// Part1: not depend on the active PPS/SPS + //// Part2/3: depend on the active parset + ////////////////////////////////////////////////// + + //retStatus = h264_Parse_Slice_Header_1(pInfo); + + SliceHeader->sh_error = 0; + + if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) + { + ////////////////////////////////////////// + //// Active parameter set for this slice + ////////////////////////////////////////// + retStatus = h264_active_par_set(pInfo, SliceHeader); + } + + if (retStatus == H264_STATUS_OK) { + switch (pInfo->active_SPS.profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + pInfo->active_PPS.transform_8x8_mode_flag=0; + pInfo->active_PPS.pic_scaling_matrix_present_flag =0; + pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; + + default: + break; + } + + if ( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 2; + } + else if ( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 4; + } + + } else { + SliceHeader->sh_error |= 1; + } + + + //if(SliceHeader->sh_error) { + //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + //} + + + + ////////////////////////////////// + //// Parse slice data (MB loop) + ////////////////////////////////// + //retStatus = h264_Parse_Slice_Data(pInfo); + { + //uint32_t data = 0; + //if( viddec_pm_peek_bits(parent, &data, 32) == -1) + //retStatus = H264_STATUS_ERROR; + } + //h264_Parse_rbsp_trailing_bits(pInfo); + + return retStatus; } @@ -215,34 +215,34 @@ h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_ h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc) { - h264_Status ret = H264_STATUS_ERROR; + h264_Status ret = H264_STATUS_ERROR; - //h264_NAL_Unit_t* NAL = &pInfo->NAL; - uint32_t code; + //h264_NAL_Unit_t* NAL = &pInfo->NAL; + uint32_t code; #if 0 - viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24); - viddec_pm_get_bits(parent, &code, 1); //forbidden_zero_bit + viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24); + viddec_pm_get_bits(parent, &code, 1); //forbidden_zero_bit - viddec_pm_get_bits(parent, &code, 2); - SliceHeader->nal_ref_idc = (uint8_t)code; + viddec_pm_get_bits(parent, &code, 2); + SliceHeader->nal_ref_idc = (uint8_t)code; - viddec_pm_get_bits(parent, &code, 5); - pInfo->nal_unit_type = (uint8_t)code; + viddec_pm_get_bits(parent, &code, 5); + pInfo->nal_unit_type = (uint8_t)code; #else -#ifdef VBP - if( viddec_pm_get_bits(parent, &code, 8) != -1) +#ifdef VBP + if ( viddec_pm_get_bits(parent, &code, 8) != -1) #else - //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type - if( viddec_pm_get_bits(parent, &code, 32) != -1) -#endif - { - *nal_ref_idc = (uint8_t)((code>>5)&0x3); - pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f); - ret = H264_STATUS_OK; - } + //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type + if ( viddec_pm_get_bits(parent, &code, 32) != -1) +#endif + { + *nal_ref_idc = (uint8_t)((code>>5)&0x3); + pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f); + ret = H264_STATUS_OK; + } #endif - return ret; + return ret; } @@ -267,61 +267,61 @@ h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref void h264_init_old_slice(h264_Info* pInfo) { - pInfo->SliceHeader.field_pic_flag = 0; + pInfo->SliceHeader.field_pic_flag = 0; - pInfo->SliceHeader.pic_parameter_id = 0xFF; + pInfo->SliceHeader.pic_parameter_id = 0xFF; - pInfo->SliceHeader.frame_num = INT_MAX; + pInfo->SliceHeader.frame_num = INT_MAX; - pInfo->SliceHeader.nal_ref_idc = 0xFF; + pInfo->SliceHeader.nal_ref_idc = 0xFF; - pInfo->SliceHeader.idr_flag = 0; + pInfo->SliceHeader.idr_flag = 0; - pInfo->SliceHeader.pic_order_cnt_lsb = UINT_MAX; - pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX; + pInfo->SliceHeader.pic_order_cnt_lsb = UINT_MAX; + pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX; - pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX; - pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX; + pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX; + pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX; - return; + return; } void h264_init_img(h264_Info* pInfo) { - h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) ); + h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) ); + - - return; + return; } void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem) { int32_t i; - + h264_Info * pInfo = &(parser->info); - + parser->sps_pps_ddr_paddr = (uint32_t)persist_mem; - - pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr; - pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all); + + pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr; + pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all); pInfo->OFFSET_REF_FRAME_PADDR_GL = pInfo->PPS_PADDR_GL + MAX_NUM_PPS * sizeof(pic_param_set); - pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL + - MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; h264_memset( &(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used) ); h264_memset( &(pInfo->active_PPS), 0x0, sizeof(pic_param_set) ); /* Global for SPS & PPS */ - for(i=0;iactive_SPS.seq_parameter_set_id = 0xff; + pInfo->active_SPS.seq_parameter_set_id = 0xff; h264_Parse_Copy_Sps_To_DDR (pInfo, &(pInfo->active_SPS), i); } - for(i=0;iactive_PPS.seq_parameter_set_id = 0xff; + pInfo->active_PPS.seq_parameter_set_id = 0xff; h264_Parse_Copy_Pps_To_DDR (pInfo, &(pInfo->active_PPS), i); } @@ -344,14 +344,14 @@ void h264_init_Info_under_sps_pps_level(h264_Info* pInfo) h264_memset( &(pInfo->img), 0x0, sizeof(h264_img_par) ); pInfo->h264_list_replacement = 0; - + pInfo->h264_pwt_start_byte_offset = 0; pInfo->h264_pwt_start_bit_offset = 0; pInfo->h264_pwt_end_byte_offset = 0; pInfo->h264_pwt_end_bit_offset = 0; pInfo->h264_pwt_enabled = 0; - for(i=0;i<32;i++) + for (i=0; i<32; i++) { pInfo->slice_ref_list0[i] = 0; pInfo->slice_ref_list1[i] = 0; @@ -370,20 +370,20 @@ void h264_init_Info_under_sps_pps_level(h264_Info* pInfo) pInfo->is_frame_boundary_detected_by_non_slice_nal = 0; pInfo->is_frame_boundary_detected_by_slice_nal = 0; pInfo->is_current_workload_done = 0; - + pInfo->sei_rp_received = 0; pInfo->last_I_frame_idc = 255; pInfo->wl_err_curr = 0; pInfo->wl_err_next = 0; pInfo->primary_pic_type_plus_one = 0; - pInfo->sei_b_state_ready = 0; - + pInfo->sei_b_state_ready = 0; + /* Init old slice structure */ - h264_init_old_slice(pInfo); + h264_init_old_slice(pInfo); /* init_dpb */ - h264_init_dpb(&(pInfo->dpb)); + h264_init_dpb(&(pInfo->dpb)); /* init_sei */ h264_sei_stream_initialise(pInfo); @@ -391,73 +391,73 @@ void h264_init_Info_under_sps_pps_level(h264_Info* pInfo) } void h264_init_Info(h264_Info* pInfo) -{ - h264_memset(pInfo, 0x0, sizeof(h264_Info)); +{ + h264_memset(pInfo, 0x0, sizeof(h264_Info)); pInfo->old_nal_unit_type = 0xff; pInfo->Is_first_frame_in_stream =1; - pInfo->img.frame_count = 0; - pInfo->last_I_frame_idc = 255; - + pInfo->img.frame_count = 0; + pInfo->last_I_frame_idc = 255; + return; } - - /* ------------------------------------------------------------------------------------------ */ + +/* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ ///////////////////////////////////////////////////// // -// Judge whether it is the first VCL of a new picture +// Judge whether it is the first VCL of a new picture // ///////////////////////////////////////////////////// - int32_t h264_is_second_field(h264_Info * pInfo) - { +int32_t h264_is_second_field(h264_Info * pInfo) +{ h264_Slice_Header_t cur_slice = pInfo->SliceHeader; OldSliceParams old_slice = pInfo->old_slice; - + int result = 0; - + //pInfo->img.second_field = 0; - + /// is it second field? - + //OS_INFO( "xxx is_used = %d\n", pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used); - - if (cur_slice.structure != FRAME) + + if (cur_slice.structure != FRAME) { - if( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ) - &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )) - { - if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag)) - { - - if(old_slice.structure != cur_slice.structure) - { - - if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1: - (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \ - ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0) || // Condition 2: - (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0))) + if ( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ) + &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )) + { + if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag)) + { + + if (old_slice.structure != cur_slice.structure) { - //pInfo->img.second_field = 1; - result = 1; - } - } - } - - - } - - + + if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1: + (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \ + ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0) || // Condition 2: + (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0))) + { + //pInfo->img.second_field = 1; + result = 1; + } + } + } + + + } + + } - - - + + + return result; - - } //// End of h264_is_second_field + +} //// End of h264_is_second_field @@ -467,86 +467,86 @@ void h264_init_Info(h264_Info* pInfo) int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice) { - int result = 0; + int result = 0; + + if (pInfo->number_of_first_au_info_nal_before_first_slice) + { + pInfo->number_of_first_au_info_nal_before_first_slice = 0; + return 1; + } + + + + result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); + result |= (old_slice.frame_num != cur_slice.frame_num); + result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); + if (cur_slice.field_pic_flag && old_slice.field_pic_flag) + { + result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag); + } + + result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ + ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0)); + result |= ( old_slice.idr_flag != cur_slice.idr_flag); + + if (cur_slice.idr_flag && old_slice.idr_flag) + { + result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id); + } - if(pInfo->number_of_first_au_info_nal_before_first_slice) + if (pInfo->active_SPS.pic_order_cnt_type == 0) { - pInfo->number_of_first_au_info_nal_before_first_slice = 0; - return 1; + result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb); + result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom); } - - - result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); - result |= (old_slice.frame_num != cur_slice.frame_num); - result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); - if(cur_slice.field_pic_flag && old_slice.field_pic_flag) - { - result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag); - } - - result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ - ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0)); - result |= ( old_slice.idr_flag != cur_slice.idr_flag); - - if (cur_slice.idr_flag && old_slice.idr_flag) - { - result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id); - } - - if (pInfo->active_SPS.pic_order_cnt_type == 0) - { - result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb); - result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom); - } - - if (pInfo->active_SPS.pic_order_cnt_type == 1) - { - result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]); - result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]); - } - - return result; + if (pInfo->active_SPS.pic_order_cnt_type == 1) + { + result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]); + result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]); + } + + return result; } int32_t h264_check_previous_frame_end(h264_Info * pInfo) { - int result = 0; - - if( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) ) - { - - switch ( pInfo->nal_unit_type ) - { - case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: - case h264_NAL_UNIT_TYPE_SPS: - case h264_NAL_UNIT_TYPE_PPS: - case h264_NAL_UNIT_TYPE_SEI: - case h264_NAL_UNIT_TYPE_EOSeq: - case h264_NAL_UNIT_TYPE_EOstream: - case h264_NAL_UNIT_TYPE_Reserved1: - case h264_NAL_UNIT_TYPE_Reserved2: - case h264_NAL_UNIT_TYPE_Reserved3: - case h264_NAL_UNIT_TYPE_Reserved4: - case h264_NAL_UNIT_TYPE_Reserved5: - { - pInfo->img.current_slice_num = 0; - - if((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) { - pInfo->is_frame_boundary_detected_by_non_slice_nal =1; - pInfo->is_current_workload_done=1; - result=1; - } - break; - } - default: - break; - } - - } - - return result; + int result = 0; + + if ( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) ) + { + + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->img.current_slice_num = 0; + + if ((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) { + pInfo->is_frame_boundary_detected_by_non_slice_nal =1; + pInfo->is_current_workload_done=1; + result=1; + } + break; + } + default: + break; + } + + } + + return result; } @@ -562,43 +562,43 @@ int32_t h264_check_previous_frame_end(h264_Info * pInfo) ////////////////////////////////////////////////////////////// void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader) { - pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id; + pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id; - pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num; + pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num; - pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag; + pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag; - if(pInfo->SliceHeader.field_pic_flag) - { - pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; - } + if (pInfo->SliceHeader.field_pic_flag) + { + pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; + } - pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc; + pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc; - pInfo->old_slice.structure = pInfo->SliceHeader.structure; + pInfo->old_slice.structure = pInfo->SliceHeader.structure; - pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag; - if (pInfo->SliceHeader.idr_flag) - { - pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id; - } + pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag; + if (pInfo->SliceHeader.idr_flag) + { + pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id; + } - if (pInfo->active_SPS.pic_order_cnt_type == 0) - { - pInfo->old_slice.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; - pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; - } + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + pInfo->old_slice.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; + pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; + } - if (pInfo->active_SPS.pic_order_cnt_type == 1) - { - pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; - pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; - } + if (pInfo->active_SPS.pic_order_cnt_type == 1) + { + pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; + pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; + } - ////////////////////////////// Next to current - memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t)); + ////////////////////////////// Next to current + memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t)); - return; + return; } /* ------------------------------------------------------------------------------------------ */ @@ -610,59 +610,59 @@ void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeade ////////////////////////////////////////////////////////////////////////////// void h264_update_img_info(h264_Info * pInfo ) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - pInfo->img.frame_num = pInfo->SliceHeader.frame_num; - pInfo->img.structure = pInfo->SliceHeader.structure; + pInfo->img.frame_num = pInfo->SliceHeader.frame_num; + pInfo->img.structure = pInfo->SliceHeader.structure; - pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag; - pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; - - pInfo->img.MbaffFrameFlag = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag)); - pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type; + pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag; + pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; - if(pInfo->img.pic_order_cnt_type == 1) { - pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle; - pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag; - pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic; - pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field; - } + pInfo->img.MbaffFrameFlag = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag)); + pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type; - pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; - //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb; - pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; - pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; - pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; + if (pInfo->img.pic_order_cnt_type == 1) { + pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle; + pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag; + pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic; + pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field; + } + pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; + //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb; + pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; + pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; + pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; - pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num; - pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag; + pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num; - ////////////////////////////////////////////////// Check SEI recovery point - if (pInfo->sei_information.recovery_point) { - int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); - pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum; - } + pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag; - if (pInfo->SliceHeader.idr_flag) - pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num; + ////////////////////////////////////////////////// Check SEI recovery point + if (pInfo->sei_information.recovery_point) { + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum; + } - - - /////////////////////////////////////////////////Resolution Change - pInfo->img.curr_has_mmco_5 = 0; + if (pInfo->SliceHeader.idr_flag) + pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num; - if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)|| - (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) ) - { - int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0; - // If resolution changed, reset the soft DPB here - h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics); - } - return; + /////////////////////////////////////////////////Resolution Change + pInfo->img.curr_has_mmco_5 = 0; + + if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)|| + (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) ) + { + int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0; + + // If resolution changed, reset the soft DPB here + h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics); + } + + return; } ///// End of init new frame @@ -670,124 +670,124 @@ void h264_update_img_info(h264_Info * pInfo ) void h264_update_frame_type(h264_Info * pInfo ) { -//update frame type - if(pInfo->img.structure == FRAME) - { - if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) - { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET); - //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff; - //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc; - - } - else - { - #if 1 - switch(pInfo->SliceHeader.slice_type) - { +//update frame type + if (pInfo->img.structure == FRAME) + { + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET); + //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff; + //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc; + + } + else + { +#if 1 + switch (pInfo->SliceHeader.slice_type) + { case h264_PtypeB: - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET); - break; + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET); + break; case h264_PtypeSP: case h264_PtypeP: - if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B) - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET); - break; + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET); + break; case h264_PtypeI: case h264_PtypeSI: - if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID) - { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET); - } - pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; - - break; + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET); + } + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; + + break; default: - break; - - } - #endif - - } - - } - else if(pInfo->img.structure == TOP_FIELD) - { - if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) - { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));; - } - else - { - switch(pInfo->SliceHeader.slice_type) - { + break; + + } +#endif + + } + + } + else if (pInfo->img.structure == TOP_FIELD) + { + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));; + } + else + { + switch (pInfo->SliceHeader.slice_type) + { case h264_PtypeB: - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); - break; + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + break; case h264_PtypeSP: case h264_PtypeP: - if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B) - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); - break; + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + break; case h264_PtypeI: case h264_PtypeSI: - if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID) - { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); - } - if (pInfo->sei_rp_received) - pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; - else - pInfo->last_I_frame_idc = 255; - break; + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + } + if (pInfo->sei_rp_received) + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; + else + pInfo->last_I_frame_idc = 255; + break; default: - break; + break; - } - - } - - - }else if(pInfo->img.structure == BOTTOM_FIELD) - { - if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) - { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));; - } - else - { - switch(pInfo->SliceHeader.slice_type) - { + } + + } + + + } else if (pInfo->img.structure == BOTTOM_FIELD) + { + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));; + } + else + { + switch (pInfo->SliceHeader.slice_type) + { case h264_PtypeB: - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); - break; + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + break; case h264_PtypeSP: case h264_PtypeP: - if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B) - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); - break; + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + break; case h264_PtypeI: case h264_PtypeSI: - if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID) - { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + } + if (pInfo->sei_rp_received) + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc + PUT_LIST_INDEX_FIELD_BIT(1); + else + pInfo->last_I_frame_idc = 255; + + break; + default: + break; + } - if (pInfo->sei_rp_received) - pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc + PUT_LIST_INDEX_FIELD_BIT(1); - else - pInfo->last_I_frame_idc = 255; - break; - default: - break; + } + + } + return; - } - - } - - } - return; - } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c index dbbe5c6..40c7559 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c @@ -24,7 +24,7 @@ @param cxt : Buffer adress & size are part inputs, the cxt is updated with codeNum & sign on sucess. Assumption: codeNum is a max of 32 bits - + @retval 1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code. @retval 0 : Couldn't find a code in the current buffer. be freed. @@ -32,131 +32,131 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) { - int32_t leadingZeroBits= 0; - uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; - uint32_t codeNum =0; - uint32_t bits_offset =0, byte_offset =0; - uint8_t is_emul =0; - uint8_t is_first_byte = 1; - uint32_t length =0; - uint32_t bits_need_add_in_first_byte =0; - int32_t bits_operation_result=0; - - //remove warning - pInfo = pInfo; - - ////// Step 1: parse through zero bits until we find a bit with value 1. - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - - while(!match) - { - if ((bits_offset != 0) && ( is_first_byte == 1)) - { - //we handle byte at a time, if we have offset then for first - // byte handle only 8 - offset bits - noOfBits = (uint8_t)(8 - bits_offset); - bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits); - - - temp = (temp << bits_offset); - if(temp!=0) - { - bits_need_add_in_first_byte = bits_offset; - } - is_first_byte =0; - } - else - { - noOfBits = 8;/* always 8 bits as we read a byte at a time */ - bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); - - } - - if(-1==bits_operation_result) - { - return MAX_INT32_VALUE; - } - - if(temp != 0) - { - // if byte!=0 we have at least one bit with value 1. - count=1; - while(((temp & 0x80) != 0x80) && (count <= noOfBits)) - { - count++; - temp = temp <<1; - } - //At this point we get the bit position of 1 in current byte(count). - - match = 1; - leadingZeroBits += count; - } - else - { - // we don't have a 1 in current byte - leadingZeroBits += noOfBits; - } - - if(!match) - { - //actually move the bitoff by viddec_pm_get_bits - viddec_pm_get_bits(parent, &temp, noOfBits); - } - else - { - //actually move the bitoff by viddec_pm_get_bits - viddec_pm_get_bits(parent, &temp, count); - } - - } - ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value. - - - if(match) - { - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - /* bit position in current byte */ - //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7); - count = ((count + bits_need_add_in_first_byte)& 0x7); - - leadingZeroBits --; - length = leadingZeroBits; - codeNum = 0; - noOfBits = 8 - count; - - - while(leadingZeroBits > 0) - { - if(noOfBits < (uint32_t)leadingZeroBits) - { - viddec_pm_get_bits(parent, &temp, noOfBits); - - - codeNum = (codeNum << noOfBits) | temp; - leadingZeroBits -= noOfBits; - } - else - { - viddec_pm_get_bits(parent, &temp, leadingZeroBits); - - codeNum = (codeNum << leadingZeroBits) | temp; - leadingZeroBits = 0; - } - - - noOfBits = 8; - } - // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits). - codeNum = codeNum + (1 << length) -1; + int32_t leadingZeroBits= 0; + uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; + uint32_t codeNum =0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + uint8_t is_first_byte = 1; + uint32_t length =0; + uint32_t bits_need_add_in_first_byte =0; + int32_t bits_operation_result=0; + + //remove warning + pInfo = pInfo; + + ////// Step 1: parse through zero bits until we find a bit with value 1. + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + + while (!match) + { + if ((bits_offset != 0) && ( is_first_byte == 1)) + { + //we handle byte at a time, if we have offset then for first + // byte handle only 8 - offset bits + noOfBits = (uint8_t)(8 - bits_offset); + bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits); + + + temp = (temp << bits_offset); + if (temp!=0) + { + bits_need_add_in_first_byte = bits_offset; + } + is_first_byte =0; + } + else + { + noOfBits = 8;/* always 8 bits as we read a byte at a time */ + bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); + + } + + if (-1==bits_operation_result) + { + return MAX_INT32_VALUE; + } + + if (temp != 0) + { + // if byte!=0 we have at least one bit with value 1. + count=1; + while (((temp & 0x80) != 0x80) && (count <= noOfBits)) + { + count++; + temp = temp <<1; + } + //At this point we get the bit position of 1 in current byte(count). + + match = 1; + leadingZeroBits += count; + } + else + { + // we don't have a 1 in current byte + leadingZeroBits += noOfBits; + } + + if (!match) + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, noOfBits); + } + else + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, count); + } + + } + ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value. + + + if (match) + { + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + /* bit position in current byte */ + //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7); + count = ((count + bits_need_add_in_first_byte)& 0x7); + + leadingZeroBits --; + length = leadingZeroBits; + codeNum = 0; + noOfBits = 8 - count; + + + while (leadingZeroBits > 0) + { + if (noOfBits < (uint32_t)leadingZeroBits) + { + viddec_pm_get_bits(parent, &temp, noOfBits); + + + codeNum = (codeNum << noOfBits) | temp; + leadingZeroBits -= noOfBits; + } + else + { + viddec_pm_get_bits(parent, &temp, leadingZeroBits); + + codeNum = (codeNum << leadingZeroBits) | temp; + leadingZeroBits = 0; + } + + + noOfBits = 8; + } + // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits). + codeNum = codeNum + (1 << length) -1; } viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - if(bits_offset!=0) + if (bits_offset!=0) { - viddec_pm_peek_bits(parent, &temp, 8-bits_offset); + viddec_pm_peek_bits(parent, &temp, 8-bits_offset); } return codeNum; @@ -167,19 +167,19 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) /*---------------------------------------*/ int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) { - int32_t sval = 0; - signed char sign; + int32_t sval = 0; + signed char sign; - sval = h264_get_codeNum(parent , pInfo); + sval = h264_get_codeNum(parent , pInfo); - if(bIsSigned) //get signed integer golomb code else the value is unsigned - { - sign = (sval & 0x1)?1:-1; - sval = (sval +1) >> 1; - sval = sval * sign; - } + if (bIsSigned) //get signed integer golomb code else the value is unsigned + { + sign = (sval & 0x1)?1:-1; + sval = (sval +1) >> 1; + sval = sval * sign; + } - return sval; + return sval; } // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned) /// @@ -187,39 +187,39 @@ int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) /// uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) { - uint8_t cnt = 0; + uint8_t cnt = 0; - uint8_t is_emul =0; - uint8_t cur_byte = 0; - int32_t shift_bits =0; - uint32_t ctr_bit = 0; - uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + uint8_t cur_byte = 0; + int32_t shift_bits =0; + uint32_t ctr_bit = 0; + uint32_t bits_offset =0, byte_offset =0; - //remove warning - pInfo = pInfo; + //remove warning + pInfo = pInfo; - if (!viddec_pm_is_nomoredata(parent)) - return 1; + if (!viddec_pm_is_nomoredata(parent)) + return 1; - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - shift_bits = 7-bits_offset; + shift_bits = 7-bits_offset; - // read one byte - viddec_pm_get_cur_byte(parent, &cur_byte); + // read one byte + viddec_pm_get_cur_byte(parent, &cur_byte); - ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; + ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; - // a stop bit has to be one - if (ctr_bit==0) - return 1; + // a stop bit has to be one + if (ctr_bit==0) + return 1; - while (shift_bits>=0 && !cnt) - { - cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit - } + while (shift_bits>=0 && !cnt) + { + cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit + } - return (cnt); + return (cnt); } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 3a111c4..25ca059 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -41,22 +41,22 @@ frame_store *active_fs; void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb) { - int32_t i; + int32_t i; - //// Init DPB to zero - //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) ); + //// Init DPB to zero + //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) ); - for(i=0;ifs[i].fs_idc = MPD_DPB_FS_NULL_IDC; - p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; - } - p_dpb->used_size = 0; - p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; - p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + for (i=0; ifs[i].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + p_dpb->used_size = 0; + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; - return; + return; } @@ -72,8 +72,8 @@ void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb) ////////////////////////////////////////////////////////////////////////////// void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) { - p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc; - p_dpb->ref_frames_in_buffer++; + p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc; + p_dpb->ref_frames_in_buffer++; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -85,8 +85,8 @@ void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) ////////////////////////////////////////////////////////////////////////////// void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) { - p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc; - p_dpb->ltref_frames_in_buffer++; + p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc; + p_dpb->ltref_frames_in_buffer++; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -96,54 +96,54 @@ void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) // // Decide whether the current picture needs to be added to the reference lists // active_fs should be set-up prior to calling this function -// -// Check if we need to search the lists here +// +// Check if we need to search the lists here // or can we go straight to adding to ref lists.. ////////////////////////////////////////////////////////////////////////////// void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExisting) { - if(NonExisting) - h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc); - else - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - - //if(active_fs->is_reference) - if(active_fs->frame.used_for_reference) - { - if(viddec_h264_get_is_long_term(active_fs)) - { - if(viddec_h264_get_dec_structure(active_fs) == FRAME) - h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); - else - { - uint32_t found_in_list = 0, i = 0; - for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) { - if(p_dpb->fs_ltref_idc[i] == active_fs->fs_idc) found_in_list = 1; - } - - if(found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); - } - } - else - { - if(viddec_h264_get_dec_structure(active_fs) == FRAME) { - h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); - } else - { - uint32_t found_in_list = 0, i = 0; - - for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++) - { - if(p_dpb->fs_ref_idc[i] == active_fs->fs_idc) found_in_list = 1; - } - - if(found_in_list == 0) h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); - } - } - } - - return; + if (NonExisting) + h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc); + else + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + //if(active_fs->is_reference) + if (active_fs->frame.used_for_reference) + { + if (viddec_h264_get_is_long_term(active_fs)) + { + if (viddec_h264_get_dec_structure(active_fs) == FRAME) + h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + else + { + uint32_t found_in_list = 0, i = 0; + for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) { + if (p_dpb->fs_ltref_idc[i] == active_fs->fs_idc) found_in_list = 1; + } + + if (found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + } + } + else + { + if (viddec_h264_get_dec_structure(active_fs) == FRAME) { + h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); + } else + { + uint32_t found_in_list = 0, i = 0; + + for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++) + { + if (p_dpb->fs_ref_idc[i] == active_fs->fs_idc) found_in_list = 1; + } + + if (found_in_list == 0) h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); + } + } + } + + return; } @@ -156,8 +156,8 @@ void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExi void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index) { - active_fs = &p_dpb->fs[index]; -} + active_fs = &p_dpb->fs[index]; +} /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -167,26 +167,26 @@ void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index) void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t desc) { - int32_t j, k, temp, idc; - - // Dodgy looking for embedded code here... - if(size > 1) - { - for (j = 0; j < size-1; j = j + 1) { - for (k = j + 1; k < size; k = k + 1) { - if ((desc & (sort_indices[j] < sort_indices[k]))| - (~desc & (sort_indices[j] > sort_indices[k])) ) - { - temp = sort_indices[k]; - sort_indices[k] = sort_indices[j]; - sort_indices[j] = temp; - idc = list[k]; - list[k] = list[j]; - list[j] = idc; - } - } - } - } + int32_t j, k, temp, idc; + + // Dodgy looking for embedded code here... + if (size > 1) + { + for (j = 0; j < size-1; j = j + 1) { + for (k = j + 1; k < size; k = k + 1) { + if ((desc & (sort_indices[j] < sort_indices[k]))| + (~desc & (sort_indices[j] > sort_indices[k])) ) + { + temp = sort_indices[k]; + sort_indices[k] = sort_indices[j]; + sort_indices[j] = temp; + idc = list[k]; + list[k] = list[j]; + list[j] = idc; + } + } + } + } } /* ------------------------------------------------------------------------------------------ */ @@ -200,11 +200,11 @@ void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t int32_t h264_dpb_pic_is_bottom_field_ref(int32_t long_term) { - int32_t temp; - if(long_term) temp = ((active_fs->bottom_field.used_for_reference) && (active_fs->bottom_field.is_long_term)) ? 1 : 0; - else temp = ((active_fs->bottom_field.used_for_reference) && !(active_fs->bottom_field.is_long_term)) ? 1 : 0; - - return temp; + int32_t temp; + if (long_term) temp = ((active_fs->bottom_field.used_for_reference) && (active_fs->bottom_field.is_long_term)) ? 1 : 0; + else temp = ((active_fs->bottom_field.used_for_reference) && !(active_fs->bottom_field.is_long_term)) ? 1 : 0; + + return temp; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -217,13 +217,13 @@ int32_t h264_dpb_pic_is_bottom_field_ref(int32_t long_term) int32_t h264_dpb_pic_is_top_field_ref(int32_t long_term) { - int32_t temp; - if(long_term) - temp = ((active_fs->top_field.used_for_reference) && (active_fs->top_field.is_long_term)) ? 1 : 0; - else - temp = ((active_fs->top_field.used_for_reference) && !(active_fs->top_field.is_long_term)) ? 1 : 0; - - return temp; + int32_t temp; + if (long_term) + temp = ((active_fs->top_field.used_for_reference) && (active_fs->top_field.is_long_term)) ? 1 : 0; + else + temp = ((active_fs->top_field.used_for_reference) && !(active_fs->top_field.is_long_term)) ? 1 : 0; + + return temp; } @@ -238,97 +238,97 @@ int32_t h264_dpb_pic_is_top_field_ref(int32_t long_term) int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, uint8_t *pic_list, uint8_t *frame_list, int32_t currPicStructure, int32_t list_size, int32_t long_term) { - int32_t top_idx, bot_idx, got_pic, list_idx; - int32_t lterm; - - list_idx = 0; - lterm = (long_term)? 1:0; - - if(list_size){ - - - top_idx = 0; - bot_idx = 0; - - if (currPicStructure == TOP_FIELD) { - while ((top_idx < list_size)||(bot_idx < list_size)) - { - /////////////////////////////////////////// ref Top Field - got_pic = 0; - while ((top_idx < list_size) & ~got_pic) - { - h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x1) - { - if(h264_dpb_pic_is_top_field_ref(long_term)) - { - pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field - list_idx++; - got_pic = 1; - } - } - top_idx++; - } - - /////////////////////////////////////////// ref Bottom Field - got_pic = 0; - while ((bot_idx < list_size) & ~got_pic) - { - h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x2) - { - if(h264_dpb_pic_is_bottom_field_ref(long_term)) - { - pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field - list_idx++; - got_pic = 1; - } - } - bot_idx++; - } - } - } - - /////////////////////////////////////////////// current Bottom Field - if (currPicStructure == BOTTOM_FIELD) { - while ((top_idx < list_size)||(bot_idx < list_size)) - { - /////////////////////////////////////////// ref Top Field - got_pic = 0; - while ((bot_idx < list_size) && (!(got_pic))) - { - h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x2) { - if(h264_dpb_pic_is_bottom_field_ref(long_term)) { - // short term ref pic - pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field - list_idx++; - got_pic = 1; - } - } - bot_idx++; - } - - /////////////////////////////////////////// ref Bottom Field - got_pic = 0; - while ((top_idx < list_size) && (!(got_pic))) - { - h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x1) { - if(h264_dpb_pic_is_top_field_ref(long_term)){ - // short term ref pic - pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field - list_idx++; - got_pic = 1; - } - } - top_idx++; - } - } - } - } - - return list_idx; + int32_t top_idx, bot_idx, got_pic, list_idx; + int32_t lterm; + + list_idx = 0; + lterm = (long_term)? 1:0; + + if (list_size) { + + + top_idx = 0; + bot_idx = 0; + + if (currPicStructure == TOP_FIELD) { + while ((top_idx < list_size)||(bot_idx < list_size)) + { + /////////////////////////////////////////// ref Top Field + got_pic = 0; + while ((top_idx < list_size) & ~got_pic) + { + h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x1) + { + if (h264_dpb_pic_is_top_field_ref(long_term)) + { + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field + list_idx++; + got_pic = 1; + } + } + top_idx++; + } + + /////////////////////////////////////////// ref Bottom Field + got_pic = 0; + while ((bot_idx < list_size) & ~got_pic) + { + h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x2) + { + if (h264_dpb_pic_is_bottom_field_ref(long_term)) + { + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field + list_idx++; + got_pic = 1; + } + } + bot_idx++; + } + } + } + + /////////////////////////////////////////////// current Bottom Field + if (currPicStructure == BOTTOM_FIELD) { + while ((top_idx < list_size)||(bot_idx < list_size)) + { + /////////////////////////////////////////// ref Top Field + got_pic = 0; + while ((bot_idx < list_size) && (!(got_pic))) + { + h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x2) { + if (h264_dpb_pic_is_bottom_field_ref(long_term)) { + // short term ref pic + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field + list_idx++; + got_pic = 1; + } + } + bot_idx++; + } + + /////////////////////////////////////////// ref Bottom Field + got_pic = 0; + while ((top_idx < list_size) && (!(got_pic))) + { + h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); + if ((viddec_h264_get_is_used(active_fs))&0x1) { + if (h264_dpb_pic_is_top_field_ref(long_term)) { + // short term ref pic + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field + list_idx++; + got_pic = 1; + } + } + top_idx++; + } + } + } + } + + return list_idx; } /* ------------------------------------------------------------------------------------------ */ @@ -342,30 +342,30 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) { - uint8_t idx = 0; - int32_t Found = 0; - - while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found))) - { - if (p_dpb->fs_ref_idc[idx] == ref_idc) - Found = 1; - else - idx++; - } - - if (Found) - { - // Move the remainder of the list up one - while(idx < p_dpb->ref_frames_in_buffer - 1) { - p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1]; - idx ++; - } - - p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one - p_dpb->ref_frames_in_buffer--; - } - - return; + uint8_t idx = 0; + int32_t Found = 0; + + while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found))) + { + if (p_dpb->fs_ref_idc[idx] == ref_idc) + Found = 1; + else + idx++; + } + + if (Found) + { + // Move the remainder of the list up one + while (idx < p_dpb->ref_frames_in_buffer - 1) { + p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1]; + idx ++; + } + + p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one + p_dpb->ref_frames_in_buffer--; + } + + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -378,29 +378,29 @@ void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_idc) { - uint8_t idx = 0; - int32_t Found = 0; - - while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found))) - { - if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1; - else idx++; - } - - if (Found) - { - // Move the remainder of the list up one - while(idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1)) - { - p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1]; - idx ++; - } - p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one - - p_dpb->ltref_frames_in_buffer--; - } - - return; + uint8_t idx = 0; + int32_t Found = 0; + + while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found))) + { + if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1; + else idx++; + } + + if (Found) + { + // Move the remainder of the list up one + while (idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1)) + { + p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1]; + idx ++; + } + p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one + + p_dpb->ltref_frames_in_buffer--; + } + + return; } @@ -415,474 +415,474 @@ void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_id ////////////////////////////////////////////////////////////////////////////// void h264_dpb_update_ref_lists(h264_Info * pInfo) { - h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb; + h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb; - int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); - uint8_t list0idx, list0idx_1, listltidx; - uint8_t idx; + uint8_t list0idx, list0idx_1, listltidx; + uint8_t idx; - uint8_t add_top, add_bottom, diff; - uint8_t list_idc; - uint8_t check_non_existing, skip_picture; + uint8_t add_top, add_bottom, diff; + uint8_t list_idc; + uint8_t check_non_existing, skip_picture; - uint8_t gen_pic_fs_list0[16]; - uint8_t gen_pic_fs_list1[16]; - uint8_t gen_pic_fs_listlt[16]; - uint8_t gen_pic_pic_list[32]; // check out these sizes... + uint8_t gen_pic_fs_list0[16]; + uint8_t gen_pic_fs_list1[16]; + uint8_t gen_pic_fs_listlt[16]; + uint8_t gen_pic_pic_list[32]; // check out these sizes... - uint8_t sort_fs_idc[16]; - int32_t list_sort_number[16]; + uint8_t sort_fs_idc[16]; + int32_t list_sort_number[16]; #ifdef DUMP_HEADER_INFO - static int cc1 = 0; - //OS_INFO("-------------cc1= %d\n",cc1); /////// DEBUG info - if(cc1 == 255) - idx = 0; + static int cc1 = 0; + //OS_INFO("-------------cc1= %d\n",cc1); /////// DEBUG info + if (cc1 == 255) + idx = 0; #endif - list0idx = list0idx_1 = listltidx = 0; - - if (pInfo->SliceHeader.structure == FRAME) - { - ////////////////////////////////////////////////// short term handling - for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - - if((viddec_h264_get_is_used(active_fs) == 3)&&(active_fs->frame.used_for_reference == 3)) - { - if (active_fs->frame_num > pInfo->img.frame_num) - active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; - else - active_fs->frame_num_wrap = active_fs->frame_num; - - active_fs->frame.pic_num = active_fs->frame_num_wrap; - - // Use this opportunity to sort list for a p-frame - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.pic_num; - list0idx++; - } - } - } - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); - for (idx = 0; idx < list0idx; idx++) - p_dpb->listX_0[idx] = (sort_fs_idc[idx]); // frame - - p_dpb->listXsize[0] = list0idx; - } - - ////////////////////////////////////////////////// long term handling - for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3) && (active_fs->frame.used_for_reference == 3)) - { - active_fs->frame.long_term_pic_num = active_fs->frame.long_term_frame_idx; - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - sort_fs_idc[list0idx-p_dpb->listXsize[0]] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[list0idx-p_dpb->listXsize[0]] = active_fs->frame.long_term_pic_num; - list0idx++; - } - } - } - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0); - for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) { - p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; - } - p_dpb->listXsize[0] = list0idx; - } - } - else /// Field base - { - if (pInfo->SliceHeader.structure == TOP_FIELD) - { - add_top = 1; - add_bottom = 0; - } - else - { - add_top = 0; - add_bottom = 1; - } - - ////////////////////////////////////////////P0: Short term handling - for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (active_fs->frame.used_for_reference) - { - if(active_fs->frame_num > pInfo->SliceHeader.frame_num) { - active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; - } else { - active_fs->frame_num_wrap = active_fs->frame_num; - } - - if ((active_fs->frame.used_for_reference)&0x1) { - active_fs->top_field.pic_num = (active_fs->frame_num_wrap << 1) + add_top; - } - - if ((active_fs->frame.used_for_reference)&0x2) { - active_fs->bottom_field.pic_num = (active_fs->frame_num_wrap << 1) + add_bottom; - } - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) { - sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame_num_wrap; - list0idx++; - } - } - } - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); - for (idx = 0; idx < list0idx; idx++) { - gen_pic_fs_list0[idx] = sort_fs_idc[idx]; - } - - p_dpb->listXsize[0] = 0; - p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); - - for (idx = 0; idx < p_dpb->listXsize[0]; idx++) - { - p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; - } - } - - ////////////////////////////////////////////P0: long term handling - for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - - if (viddec_h264_get_is_long_term(active_fs)&0x1) { - active_fs->top_field.long_term_pic_num = (active_fs->top_field.long_term_frame_idx << 1) + add_top; - } - - if (viddec_h264_get_is_long_term(active_fs)&0x2) { - active_fs->bottom_field.long_term_pic_num = (active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom; - } - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[listltidx] = active_fs->long_term_frame_idx; - listltidx++; - } - } - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); - for (idx = 0; idx < listltidx; idx++) { - gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; - } - list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); - - for (idx = 0; idx < list0idx_1; idx++) { - p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; - } - p_dpb->listXsize[0] += list0idx_1; - } - } - - - if (pInfo->SliceHeader.slice_type == h264_PtypeI) - { - p_dpb->listXsize[0] = 0; - p_dpb->listXsize[1] = 0; - return; - } - - if(pInfo->SliceHeader.slice_type == h264_PtypeP) - { - //// Forward done above - p_dpb->listXsize[1] = 0; - } - - - // B-Slice - // Do not include non-existing frames for B-pictures when cnt_type is zero - - if(pInfo->SliceHeader.slice_type == h264_PtypeB) - { - list0idx = list0idx_1 = listltidx = 0; - skip_picture = 0; - - if(pInfo->active_SPS.pic_order_cnt_type == 0) - check_non_existing = 1; - else - check_non_existing = 0; - - if (pInfo->SliceHeader.structure == FRAME) - { - for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (viddec_h264_get_is_used(active_fs) == 3) - { - if(check_non_existing) - { - if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; - else skip_picture = 0; - } - - if(skip_picture == 0) - { - if ((active_fs->frame.used_for_reference==3) && (!(active_fs->frame.is_long_term))) - { - if (pInfo->img.framepoc >= active_fs->frame.poc) - { - sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.poc; - list0idx++; - } - } - } - } - } - - h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); - for (idx = 0; idx < list0idx; idx++) { - p_dpb->listX_0[idx] = sort_fs_idc[idx]; - } - - list0idx_1 = list0idx; - - /////////////////////////////////////////B0: Short term handling - for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - - if (viddec_h264_get_is_used(active_fs) == 3) - { - if(check_non_existing) - { - if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; - else skip_picture = 0; - } - - if(skip_picture == 0) - { - if ((active_fs->frame.used_for_reference) && (!(active_fs->frame.is_long_term))) - { - if (pInfo->img.framepoc < active_fs->frame.poc) - { - sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; - list0idx++; - } - } - } - } - } - - h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); - for (idx = list0idx_1; idx < list0idx; idx++) { - p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1]; - } - - for (idx = 0; idx < list0idx_1; idx++) { - p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx]; - } - - for (idx = list0idx_1; idx < list0idx; idx++) { - p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx]; - } - - p_dpb->listXsize[0] = list0idx; - p_dpb->listXsize[1] = list0idx; - - /////////////////////////////////////////B0: long term handling - list0idx = 0; - - // Can non-existent pics be set as long term?? - for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - - if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3)) - { - // if we have two fields, both must be long-term - sort_fs_idc[list0idx] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.long_term_pic_num; - list0idx++; - } - } - - h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0); - for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1) - { - p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; - p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; - } - - p_dpb->listXsize[0] += list0idx; - p_dpb->listXsize[1] += list0idx; - } - else // Field - { - for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - - if (viddec_h264_get_is_used(active_fs)) { - if(check_non_existing) { - if(viddec_h264_get_is_non_existent(active_fs)) - skip_picture = 1; - else - skip_picture = 0; - } - - if(skip_picture == 0) { - if (pInfo->img.ThisPOC >= active_fs->frame.poc) { - sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.poc; - list0idx++; - } - } - } - } - - h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); - for (idx = 0; idx < list0idx; idx = idx + 1) { - gen_pic_fs_list0[idx] = sort_fs_idc[idx]; - } - - list0idx_1 = list0idx; - - ///////////////////////////////////////////// B1: Short term handling - for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (viddec_h264_get_is_used(active_fs)) - { - if(check_non_existing) { - if(viddec_h264_get_is_non_existent(active_fs)) - skip_picture = 1; - else - skip_picture = 0; - } - - if(skip_picture == 0) { - if (pInfo->img.ThisPOC < active_fs->frame.poc) { - sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; - list0idx++; - } - } - } - } - - ///// Generate frame list from sorted fs - ///// - h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); - for (idx = list0idx_1; idx < list0idx; idx++) - gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1]; - - for (idx = 0; idx < list0idx_1; idx++) - gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx]; - - for (idx = list0idx_1; idx < list0idx; idx++) - gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx]; - - ///// Generate List_X0 - ///// - p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); - - for (idx = 0; idx < p_dpb->listXsize[0]; idx++) - p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; - - //// Generate List X1 - //// - p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0); - - for (idx = 0; idx < p_dpb->listXsize[1]; idx++) - p_dpb->listX_1[idx] = gen_pic_pic_list[idx]; - - ///////////////////////////////////////////// B1: long term handling - for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[listltidx] = active_fs->long_term_frame_idx; - listltidx++; - } - - h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); - for (idx = 0; idx < listltidx; idx++) - gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; - - list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); - - for (idx = 0; idx < list0idx_1; idx++) - { - p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; - p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx]; - } - - p_dpb->listXsize[0] += list0idx_1; - p_dpb->listXsize[1] += list0idx_1; - } - } - - // Setup initial list sizes at this point - p_dpb->nInitListSize[0] = p_dpb->listXsize[0]; - p_dpb->nInitListSize[1] = p_dpb->listXsize[1]; - if(pInfo->SliceHeader.slice_type != h264_PtypeI) - { - if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1)) - { - // check if lists are identical, if yes swap first two elements of listX[1] - diff = 0; - for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1) - { - if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1; - } - - - if (!(diff)) - { - list_idc = p_dpb->listX_1[0]; - p_dpb->listX_1[0] = p_dpb->listX_1[1]; - p_dpb->listX_1[1] = list_idc; - } - } - - // set max size - if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active) - { - p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active; - } - - - if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active) - { - p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active; - } - - - - } - - - - /// DPB reorder list - h264_dpb_reorder_lists(pInfo); - - return; + list0idx = list0idx_1 = listltidx = 0; + + if (pInfo->SliceHeader.structure == FRAME) + { + ////////////////////////////////////////////////// short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if ((viddec_h264_get_is_used(active_fs) == 3)&&(active_fs->frame.used_for_reference == 3)) + { + if (active_fs->frame_num > pInfo->img.frame_num) + active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; + else + active_fs->frame_num_wrap = active_fs->frame_num; + + active_fs->frame.pic_num = active_fs->frame_num_wrap; + + // Use this opportunity to sort list for a p-frame + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.pic_num; + list0idx++; + } + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) + p_dpb->listX_0[idx] = (sort_fs_idc[idx]); // frame + + p_dpb->listXsize[0] = list0idx; + } + + ////////////////////////////////////////////////// long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3) && (active_fs->frame.used_for_reference == 3)) + { + active_fs->frame.long_term_pic_num = active_fs->frame.long_term_frame_idx; + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[list0idx-p_dpb->listXsize[0]] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[list0idx-p_dpb->listXsize[0]] = active_fs->frame.long_term_pic_num; + list0idx++; + } + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0); + for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + } + p_dpb->listXsize[0] = list0idx; + } + } + else /// Field base + { + if (pInfo->SliceHeader.structure == TOP_FIELD) + { + add_top = 1; + add_bottom = 0; + } + else + { + add_top = 0; + add_bottom = 1; + } + + ////////////////////////////////////////////P0: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (active_fs->frame.used_for_reference) + { + if (active_fs->frame_num > pInfo->SliceHeader.frame_num) { + active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; + } else { + active_fs->frame_num_wrap = active_fs->frame_num; + } + + if ((active_fs->frame.used_for_reference)&0x1) { + active_fs->top_field.pic_num = (active_fs->frame_num_wrap << 1) + add_top; + } + + if ((active_fs->frame.used_for_reference)&0x2) { + active_fs->bottom_field.pic_num = (active_fs->frame_num_wrap << 1) + add_bottom; + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame_num_wrap; + list0idx++; + } + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) { + gen_pic_fs_list0[idx] = sort_fs_idc[idx]; + } + + p_dpb->listXsize[0] = 0; + p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[0]; idx++) + { + p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; + } + } + + ////////////////////////////////////////////P0: long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (viddec_h264_get_is_long_term(active_fs)&0x1) { + active_fs->top_field.long_term_pic_num = (active_fs->top_field.long_term_frame_idx << 1) + add_top; + } + + if (viddec_h264_get_is_long_term(active_fs)&0x2) { + active_fs->bottom_field.long_term_pic_num = (active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom; + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[listltidx] = active_fs->long_term_frame_idx; + listltidx++; + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); + for (idx = 0; idx < listltidx; idx++) { + gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; + } + list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); + + for (idx = 0; idx < list0idx_1; idx++) { + p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; + } + p_dpb->listXsize[0] += list0idx_1; + } + } + + + if (pInfo->SliceHeader.slice_type == h264_PtypeI) + { + p_dpb->listXsize[0] = 0; + p_dpb->listXsize[1] = 0; + return; + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + //// Forward done above + p_dpb->listXsize[1] = 0; + } + + + // B-Slice + // Do not include non-existing frames for B-pictures when cnt_type is zero + + if (pInfo->SliceHeader.slice_type == h264_PtypeB) + { + list0idx = list0idx_1 = listltidx = 0; + skip_picture = 0; + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + check_non_existing = 1; + else + check_non_existing = 0; + + if (pInfo->SliceHeader.structure == FRAME) + { + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (viddec_h264_get_is_used(active_fs) == 3) + { + if (check_non_existing) + { + if (viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; + else skip_picture = 0; + } + + if (skip_picture == 0) + { + if ((active_fs->frame.used_for_reference==3) && (!(active_fs->frame.is_long_term))) + { + if (pInfo->img.framepoc >= active_fs->frame.poc) + { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.poc; + list0idx++; + } + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = sort_fs_idc[idx]; + } + + list0idx_1 = list0idx; + + /////////////////////////////////////////B0: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (viddec_h264_get_is_used(active_fs) == 3) + { + if (check_non_existing) + { + if (viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; + else skip_picture = 0; + } + + if (skip_picture == 0) + { + if ((active_fs->frame.used_for_reference) && (!(active_fs->frame.is_long_term))) + { + if (pInfo->img.framepoc < active_fs->frame.poc) + { + sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; + list0idx++; + } + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); + for (idx = list0idx_1; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1]; + } + + for (idx = 0; idx < list0idx_1; idx++) { + p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx]; + } + + for (idx = list0idx_1; idx < list0idx; idx++) { + p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx]; + } + + p_dpb->listXsize[0] = list0idx; + p_dpb->listXsize[1] = list0idx; + + /////////////////////////////////////////B0: long term handling + list0idx = 0; + + // Can non-existent pics be set as long term?? + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3)) + { + // if we have two fields, both must be long-term + sort_fs_idc[list0idx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.long_term_pic_num; + list0idx++; + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0); + for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1) + { + p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + } + + p_dpb->listXsize[0] += list0idx; + p_dpb->listXsize[1] += list0idx; + } + else // Field + { + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (viddec_h264_get_is_used(active_fs)) { + if (check_non_existing) { + if (viddec_h264_get_is_non_existent(active_fs)) + skip_picture = 1; + else + skip_picture = 0; + } + + if (skip_picture == 0) { + if (pInfo->img.ThisPOC >= active_fs->frame.poc) { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = active_fs->frame.poc; + list0idx++; + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx = idx + 1) { + gen_pic_fs_list0[idx] = sort_fs_idc[idx]; + } + + list0idx_1 = list0idx; + + ///////////////////////////////////////////// B1: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (viddec_h264_get_is_used(active_fs)) + { + if (check_non_existing) { + if (viddec_h264_get_is_non_existent(active_fs)) + skip_picture = 1; + else + skip_picture = 0; + } + + if (skip_picture == 0) { + if (pInfo->img.ThisPOC < active_fs->frame.poc) { + sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; + list0idx++; + } + } + } + } + + ///// Generate frame list from sorted fs + ///// + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); + for (idx = list0idx_1; idx < list0idx; idx++) + gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1]; + + for (idx = 0; idx < list0idx_1; idx++) + gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx]; + + for (idx = list0idx_1; idx < list0idx; idx++) + gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx]; + + ///// Generate List_X0 + ///// + p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[0]; idx++) + p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; + + //// Generate List X1 + //// + p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[1]; idx++) + p_dpb->listX_1[idx] = gen_pic_pic_list[idx]; + + ///////////////////////////////////////////// B1: long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[listltidx] = active_fs->long_term_frame_idx; + listltidx++; + } + + h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); + for (idx = 0; idx < listltidx; idx++) + gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; + + list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); + + for (idx = 0; idx < list0idx_1; idx++) + { + p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; + p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx]; + } + + p_dpb->listXsize[0] += list0idx_1; + p_dpb->listXsize[1] += list0idx_1; + } + } + + // Setup initial list sizes at this point + p_dpb->nInitListSize[0] = p_dpb->listXsize[0]; + p_dpb->nInitListSize[1] = p_dpb->listXsize[1]; + if (pInfo->SliceHeader.slice_type != h264_PtypeI) + { + if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1)) + { + // check if lists are identical, if yes swap first two elements of listX[1] + diff = 0; + for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1) + { + if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1; + } + + + if (!(diff)) + { + list_idc = p_dpb->listX_1[0]; + p_dpb->listX_1[0] = p_dpb->listX_1[1]; + p_dpb->listX_1[1] = list_idc; + } + } + + // set max size + if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active) + { + p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active; + } + + + if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active) + { + p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active; + } + + + + } + + + + /// DPB reorder list + h264_dpb_reorder_lists(pInfo); + + return; } //// End of init_dpb_list @@ -898,40 +898,40 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) // static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic_num, int32_t *bottom_field_bit) { - register uint32_t idx; - register frame_param_ptr temp_fs; - - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - *bottom_field_bit = 0; - for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) - { - temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]]; - if (pInfo->SliceHeader.structure == FRAME) - { - if(temp_fs->frame.used_for_reference == 3) - if (!(temp_fs->frame.is_long_term)) - if (temp_fs->frame.pic_num == pic_num) return temp_fs; - } - else // current picture is a field - { - if (temp_fs->frame.used_for_reference&0x1) - if (!(temp_fs->top_field.is_long_term)) - if (temp_fs->top_field.pic_num == pic_num) - { - return temp_fs; - } - - if (temp_fs->frame.used_for_reference&0x2) - if (!(temp_fs->bottom_field.is_long_term)) - if (temp_fs->bottom_field.pic_num == pic_num) - { - *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); - return temp_fs; - } - } - } - return NULL; + register uint32_t idx; + register frame_param_ptr temp_fs; + + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + *bottom_field_bit = 0; + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]]; + if (pInfo->SliceHeader.structure == FRAME) + { + if (temp_fs->frame.used_for_reference == 3) + if (!(temp_fs->frame.is_long_term)) + if (temp_fs->frame.pic_num == pic_num) return temp_fs; + } + else // current picture is a field + { + if (temp_fs->frame.used_for_reference&0x1) + if (!(temp_fs->top_field.is_long_term)) + if (temp_fs->top_field.pic_num == pic_num) + { + return temp_fs; + } + + if (temp_fs->frame.used_for_reference&0x2) + if (!(temp_fs->bottom_field.is_long_term)) + if (temp_fs->bottom_field.pic_num == pic_num) + { + *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); + return temp_fs; + } + } + } + return NULL; } /* ------------------------------------------------------------------------------------------ */ @@ -945,38 +945,38 @@ static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long_term_pic_num, int32_t *bottom_field_bit) { - register uint32_t idx; - register frame_param_ptr temp_fs; - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - *bottom_field_bit = 0; - for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) - { - temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]]; - if (pInfo->SliceHeader.structure == FRAME) - { - if (temp_fs->frame.used_for_reference == 3) - if (temp_fs->frame.is_long_term) - if (temp_fs->frame.long_term_pic_num == long_term_pic_num) - return temp_fs; - } - else - { - if (temp_fs->frame.used_for_reference&0x1) - if (temp_fs->top_field.is_long_term) - if (temp_fs->top_field.long_term_pic_num == long_term_pic_num) - return temp_fs; - - if (temp_fs->frame.used_for_reference&0x2) - if (temp_fs->bottom_field.is_long_term) - if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num) - { - *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); - return temp_fs; - } - } - } - return NULL; + register uint32_t idx; + register frame_param_ptr temp_fs; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + *bottom_field_bit = 0; + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]]; + if (pInfo->SliceHeader.structure == FRAME) + { + if (temp_fs->frame.used_for_reference == 3) + if (temp_fs->frame.is_long_term) + if (temp_fs->frame.long_term_pic_num == long_term_pic_num) + return temp_fs; + } + else + { + if (temp_fs->frame.used_for_reference&0x1) + if (temp_fs->top_field.is_long_term) + if (temp_fs->top_field.long_term_pic_num == long_term_pic_num) + return temp_fs; + + if (temp_fs->frame.used_for_reference&0x2) + if (temp_fs->bottom_field.is_long_term) + if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num) + { + *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); + return temp_fs; + } + } + } + return NULL; } /* ------------------------------------------------------------------------------------------ */ @@ -988,368 +988,368 @@ static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long // Used to sort a list based on a corresponding sort indices // -struct list_value_t +struct list_value_t { - int32_t value; - struct list_value_t *next; + int32_t value; + struct list_value_t *next; }; struct linked_list_t { - struct list_value_t *begin; - struct list_value_t *end; - struct list_value_t *entry; - struct list_value_t *prev_entry; - struct list_value_t list[32]; + struct list_value_t *begin; + struct list_value_t *end; + struct list_value_t *entry; + struct list_value_t *prev_entry; + struct list_value_t list[32]; }; static void linked_list_initialize (struct linked_list_t *lp, uint8_t *vp, int32_t size) { - struct list_value_t *lvp; - - lvp = lp->list; - lp->begin = lvp; - lp->entry = lvp; - lp->end = lvp + (size-1); - lp->prev_entry = NULL; - - while (lvp <= lp->end) - { - lvp->value = *(vp++); - lvp->next = lvp + 1; - lvp++; - } - lp->end->next = NULL; - return; + struct list_value_t *lvp; + + lvp = lp->list; + lp->begin = lvp; + lp->entry = lvp; + lp->end = lvp + (size-1); + lp->prev_entry = NULL; + + while (lvp <= lp->end) + { + lvp->value = *(vp++); + lvp->next = lvp + 1; + lvp++; + } + lp->end->next = NULL; + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value) { - register struct list_value_t *lvp = lp->entry; - register struct list_value_t *lvp_prev; - - if (lvp == NULL) { - lp->end->value = list_value; // replace the end entry - } else if ((lp->begin==lp->end)||(lvp==lp->end)) // replece the begin/end entry and set the entry to NULL - { - lp->entry->value = list_value; - lp->prev_entry = lp->entry; - lp->entry = NULL; - } - else if (lvp->value==list_value) // the entry point matches - { - lp->prev_entry = lvp; - lp->entry = lvp->next; - } - else if (lvp->next == lp->end) // the entry is just before the end - { - // replace the end and swap the end and entry points - // lvp - // prev_entry => entry => old_end - // old_end & new_prev_entry => new_end & entry - lp->end->value = list_value; - - if (lp->prev_entry) - lp->prev_entry->next = lp->end; - else - lp->begin = lp->end; - - lp->prev_entry = lp->end; - lp->end->next = lvp; - lp->end = lvp; - lvp->next = NULL; - } - else - { - lvp_prev = NULL; - while (lvp->next) // do not check the end but we'll be in the loop at least once - { - if (lvp->value == list_value) break; - lvp_prev = lvp; - lvp = lvp->next; - } - lvp->value = list_value; // force end matches - - // remove lvp from the list - lvp_prev->next = lvp->next; - if (lvp==lp->end) lp->end = lvp_prev; - - // insert lvp in front of lp->entry - if (lp->entry==lp->begin) - { - lvp->next = lp->begin; - lp->begin = lvp; - } - else - { - lvp->next = lp->entry; - lp->prev_entry->next = lvp; - } - lp->prev_entry = lvp; - } - return; + register struct list_value_t *lvp = lp->entry; + register struct list_value_t *lvp_prev; + + if (lvp == NULL) { + lp->end->value = list_value; // replace the end entry + } else if ((lp->begin==lp->end)||(lvp==lp->end)) // replece the begin/end entry and set the entry to NULL + { + lp->entry->value = list_value; + lp->prev_entry = lp->entry; + lp->entry = NULL; + } + else if (lvp->value==list_value) // the entry point matches + { + lp->prev_entry = lvp; + lp->entry = lvp->next; + } + else if (lvp->next == lp->end) // the entry is just before the end + { + // replace the end and swap the end and entry points + // lvp + // prev_entry => entry => old_end + // old_end & new_prev_entry => new_end & entry + lp->end->value = list_value; + + if (lp->prev_entry) + lp->prev_entry->next = lp->end; + else + lp->begin = lp->end; + + lp->prev_entry = lp->end; + lp->end->next = lvp; + lp->end = lvp; + lvp->next = NULL; + } + else + { + lvp_prev = NULL; + while (lvp->next) // do not check the end but we'll be in the loop at least once + { + if (lvp->value == list_value) break; + lvp_prev = lvp; + lvp = lvp->next; + } + lvp->value = list_value; // force end matches + + // remove lvp from the list + lvp_prev->next = lvp->next; + if (lvp==lp->end) lp->end = lvp_prev; + + // insert lvp in front of lp->entry + if (lp->entry==lp->begin) + { + lvp->next = lp->begin; + lp->begin = lvp; + } + else + { + lvp->next = lp->entry; + lp->prev_entry->next = lvp; + } + lp->prev_entry = lvp; + } + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ static void linked_list_output (struct linked_list_t *lp, int32_t *vp) { - register int32_t *ip1; - register struct list_value_t *lvp; - - lvp = lp->begin; - ip1 = vp; - while (lvp) - { - *(ip1++) = lvp->value; - lvp = lvp->next; - } - return; + register int32_t *ip1; + register struct list_value_t *lvp; + + lvp = lp->begin; + ip1 = vp; + while (lvp) + { + *(ip1++) = lvp->value; + lvp = lvp->next; + } + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ int32_t h264_dpb_reorder_ref_pic_list(h264_Info * pInfo,int32_t list_num, int32_t num_ref_idx_active) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - uint8_t *remapping_of_pic_nums_idc; - list_reordering_num_t *list_reordering_num; - int32_t bottom_field_bit; - - int32_t maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num; - int32_t refIdxLX; - int32_t i; - - int32_t PicList[32] = {0}; - struct linked_list_t ll; - struct linked_list_t *lp = ≪ // should consider use the scratch space - - // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu - register frame_param_ptr temp_fs; - register int32_t temp; - register uint8_t *ip1; - - maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); - - - if (list_num == 0) // i.e list 0 - { - ip1 = p_dpb->listX_0; - remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc; - list_reordering_num = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num; - } - else - { - ip1 = p_dpb->listX_1; - remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc; - list_reordering_num = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num; - } - - - linked_list_initialize (lp, ip1, num_ref_idx_active); - - currPicNum = pInfo->SliceHeader.frame_num; - if (pInfo->SliceHeader.structure != FRAME) - { - - /* The reason it is + 1 I think, is because the list is based on polarity - expand later... - */ - maxPicNum <<= 1; - currPicNum <<= 1; - currPicNum++; - } - - picNumLXPred = currPicNum; - refIdxLX = 0; - - for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++) - { - if(i > MAX_NUM_REF_FRAMES) - { - break; - } - - if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering - { - temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1); - if (remapping_of_pic_nums_idc[i] == 0) - { - temp = picNumLXPred - temp; - if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum; - else picNumLXNoWrap = temp; - } - else // (remapping_of_pic_nums_idc[i] == 1) - { - temp += picNumLXPred; - if (temp >= maxPicNum) picNumLXNoWrap = temp - maxPicNum; - else picNumLXNoWrap = temp; - } - - // Updates for next iteration of the loop - picNumLXPred = picNumLXNoWrap; - - if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum; - else pic_num = picNumLXNoWrap; - - temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit); - if (temp_fs) - { - temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); - linked_list_reorder (lp, temp); - } - } - else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering - { - pic_num = list_reordering_num[i].long_term_pic_num; - - temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit); - if (temp_fs) - { - temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); - linked_list_reorder (lp, temp); - } - } - } - - linked_list_output (lp, PicList); - - if(0 == list_num ) - { - for(i=0; islice_ref_list0[i]=(uint8_t)PicList[i]; - } - } - else - { - for(i=0; islice_ref_list1[i]=(uint8_t)PicList[i]; - } - } - - - // Instead of updating the now reordered list here, just write it down... - // This way, we can continue to hold the initialised list in p_dpb->listX_0 - // and therefore not need to update it every slice - - //h264_dpb_write_list(list_num, PicList, num_ref_idx_active); - - return num_ref_idx_active; -} - -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ - + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint8_t *remapping_of_pic_nums_idc; + list_reordering_num_t *list_reordering_num; + int32_t bottom_field_bit; + + int32_t maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num; + int32_t refIdxLX; + int32_t i; + + int32_t PicList[32] = {0}; + struct linked_list_t ll; + struct linked_list_t *lp = ≪ // should consider use the scratch space + + // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu + register frame_param_ptr temp_fs; + register int32_t temp; + register uint8_t *ip1; + + maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + + if (list_num == 0) // i.e list 0 + { + ip1 = p_dpb->listX_0; + remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc; + list_reordering_num = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num; + } + else + { + ip1 = p_dpb->listX_1; + remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc; + list_reordering_num = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num; + } + + + linked_list_initialize (lp, ip1, num_ref_idx_active); + + currPicNum = pInfo->SliceHeader.frame_num; + if (pInfo->SliceHeader.structure != FRAME) + { + + /* The reason it is + 1 I think, is because the list is based on polarity + expand later... + */ + maxPicNum <<= 1; + currPicNum <<= 1; + currPicNum++; + } + + picNumLXPred = currPicNum; + refIdxLX = 0; + + for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++) + { + if (i > MAX_NUM_REF_FRAMES) + { + break; + } + + if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering + { + temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1); + if (remapping_of_pic_nums_idc[i] == 0) + { + temp = picNumLXPred - temp; + if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum; + else picNumLXNoWrap = temp; + } + else // (remapping_of_pic_nums_idc[i] == 1) + { + temp += picNumLXPred; + if (temp >= maxPicNum) picNumLXNoWrap = temp - maxPicNum; + else picNumLXNoWrap = temp; + } + + // Updates for next iteration of the loop + picNumLXPred = picNumLXNoWrap; + + if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum; + else pic_num = picNumLXNoWrap; + + temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit); + if (temp_fs) + { + temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); + linked_list_reorder (lp, temp); + } + } + else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering + { + pic_num = list_reordering_num[i].long_term_pic_num; + + temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit); + if (temp_fs) + { + temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); + linked_list_reorder (lp, temp); + } + } + } + + linked_list_output (lp, PicList); + + if (0 == list_num ) + { + for (i=0; islice_ref_list0[i]=(uint8_t)PicList[i]; + } + } + else + { + for (i=0; islice_ref_list1[i]=(uint8_t)PicList[i]; + } + } + + + // Instead of updating the now reordered list here, just write it down... + // This way, we can continue to hold the initialised list in p_dpb->listX_0 + // and therefore not need to update it every slice + + //h264_dpb_write_list(list_num, PicList, num_ref_idx_active); + + return num_ref_idx_active; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + void h264_dpb_RP_check_list (h264_Info * pInfo) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - uint8_t *p_list = pInfo->slice_ref_list0; - - // - // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away! - // - - if((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) { - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - } - - - // - // Repare Ref list if it damaged with RP recovery only - // - if((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received) - { - - int32_t idx, rp_found = 0; - - if( ((pInfo->SliceHeader.num_ref_idx_l0_active == 1)&&(pInfo->SliceHeader.structure == FRAME)) || - ((pInfo->SliceHeader.num_ref_idx_l0_active == 2)&&(pInfo->SliceHeader.structure != FRAME)) ) - { - if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - { - p_list = pInfo->slice_ref_list0; - } - else - { - p_list = pInfo->dpb.listX_0; - //pInfo->sei_rp_received = 0; - //return; - } - - - for(idx = 0; idx < p_dpb->used_size; idx++) { - if(p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) { - rp_found = 1; - break; - } - } - if(rp_found) { -#if 0 - int32_t poc; - - ///// Clear long-term ref list - for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) - { - h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); - } - - ///// Clear short-term ref list - //while(p_dpb->used_size>1) - for(idx = 0; idx < p_dpb->used_size; idx++) - { - int32_t idx_pos; - //// find smallest non-output POC - h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos); - - //// Remove all frames in previous GOP - if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc)) - { - // Remove from ref-list - h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); - - // Output from DPB - //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0)) - { - //int32_t existing; - //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing); - //p_dpb->last_output_poc = poc; - } - //h264_dpb_remove_frame_from_dpb(p_dpb, idx); // Remove dpb.fs_dpb_idc[pos] - - } - } + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint8_t *p_list = pInfo->slice_ref_list0; + + // + // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away! + // + + if ((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + } + + + // + // Repare Ref list if it damaged with RP recovery only + // + if ((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received) + { + + int32_t idx, rp_found = 0; + + if ( ((pInfo->SliceHeader.num_ref_idx_l0_active == 1)&&(pInfo->SliceHeader.structure == FRAME)) || + ((pInfo->SliceHeader.num_ref_idx_l0_active == 2)&&(pInfo->SliceHeader.structure != FRAME)) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list0; + } + else + { + p_list = pInfo->dpb.listX_0; + //pInfo->sei_rp_received = 0; + //return; + } + + + for (idx = 0; idx < p_dpb->used_size; idx++) { + if (p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) { + rp_found = 1; + break; + } + } + if (rp_found) { +#if 0 + int32_t poc; + + ///// Clear long-term ref list + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); + } + + ///// Clear short-term ref list + //while(p_dpb->used_size>1) + for (idx = 0; idx < p_dpb->used_size; idx++) + { + int32_t idx_pos; + //// find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos); + + //// Remove all frames in previous GOP + if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc)) + { + // Remove from ref-list + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); + + // Output from DPB + //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0)) + { + //int32_t existing; + //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing); + //p_dpb->last_output_poc = poc; + } + //h264_dpb_remove_frame_from_dpb(p_dpb, idx); // Remove dpb.fs_dpb_idc[pos] + + } + } #endif - ///// Set the reference to last I frame - if( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0])) - { - /// Repaire the reference list now - h264_dpb_unmark_for_reference(p_dpb, p_list[0]); - h264_dpb_remove_ref_list(p_dpb, p_list[0]); - p_list[0] = pInfo->last_I_frame_idc; - if (pInfo->SliceHeader.structure != FRAME) - p_list[1] = (pInfo->last_I_frame_idc ^ 0x20); - } - } - } - - pInfo->sei_rp_received = 0; - pInfo->sei_b_state_ready = 1; - - } - - - return; + ///// Set the reference to last I frame + if ( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0])) + { + /// Repaire the reference list now + h264_dpb_unmark_for_reference(p_dpb, p_list[0]); + h264_dpb_remove_ref_list(p_dpb, p_list[0]); + p_list[0] = pInfo->last_I_frame_idc; + if (pInfo->SliceHeader.structure != FRAME) + p_list[1] = (pInfo->last_I_frame_idc ^ 0x20); + } + } + } + + pInfo->sei_rp_received = 0; + pInfo->sei_b_state_ready = 1; + + } + + + return; } @@ -1361,53 +1361,53 @@ void h264_dpb_RP_check_list (h264_Info * pInfo) // // Used to sort a list based on a corresponding sort indices // - + void h264_dpb_reorder_lists(h264_Info * pInfo) { - int32_t currSliceType = pInfo->SliceHeader.slice_type; - - if (currSliceType == h264_PtypeP ) - { - /////////////////////////////////////////////// Reordering reference list for P slice - /// Forward reordering - if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); - else - { - - } - pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; - } else if (currSliceType == h264_PtypeB) - { - /////////////////////////////////////////////// Reordering reference list for B slice - /// Forward reordering - if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); - else - { - - } - pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; - - /// Backward reordering - if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) - h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active); - else - { - - } - pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active; - } - - //// Check if need recover reference list with previous recovery point - if(!pInfo->img.second_field) - { - h264_dpb_RP_check_list(pInfo); - } - - - return; -} + int32_t currSliceType = pInfo->SliceHeader.slice_type; + + if (currSliceType == h264_PtypeP ) + { + /////////////////////////////////////////////// Reordering reference list for P slice + /// Forward reordering + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); + else + { + + } + pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; + } else if (currSliceType == h264_PtypeB) + { + /////////////////////////////////////////////// Reordering reference list for B slice + /// Forward reordering + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); + else + { + + } + pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; + + /// Backward reordering + if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active); + else + { + + } + pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active; + } + + //// Check if need recover reference list with previous recovery point + if (!pInfo->img.second_field) + { + h264_dpb_RP_check_list(pInfo); + } + + + return; +} ////////////////////////////////////////// DPB management ////////////////////// @@ -1418,21 +1418,21 @@ void h264_dpb_reorder_lists(h264_Info * pInfo) // static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo) { - int32_t idx; - int32_t number=0; - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - for (idx = 0; idx < p_dpb->used_size; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - - if (viddec_h264_get_is_output(active_fs) == 0) - { - (number)++; - } - } - - return number; + int32_t idx; + int32_t number=0; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if (viddec_h264_get_is_output(active_fs) == 0) + { + (number)++; + } + } + + return number; } @@ -1443,236 +1443,239 @@ static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo) void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExisting, int32_t use_old) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - int32_t used_for_reference; - int32_t is_direct_output; - int32_t second_field_stored = 0; - int32_t poc; - int32_t pos; - int32_t flag; - int32_t first_field_non_ref = 0; - int32_t idr_flag; - - if(NonExisting) { - if(p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC) - return; - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); - } else { - if(p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC) - return; - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - } - - if(NonExisting == 0) - { - //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1; - pInfo->img.last_has_mmco_5 = 0; - pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag; - - //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag); - used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0); - - switch (viddec_h264_get_dec_structure(active_fs)) - { - case(TOP_FIELD) : { - active_fs->top_field.used_for_reference = used_for_reference; - viddec_h264_set_is_top_used(active_fs, 1); - //active_fs->crc_field_coded = 1; - }break; - case(BOTTOM_FIELD): { - active_fs->bottom_field.used_for_reference = used_for_reference << 1; - viddec_h264_set_is_bottom_used(active_fs, 1); - //active_fs->crc_field_coded = 1; - }break; - default: { - active_fs->frame.used_for_reference = used_for_reference?3:0; - viddec_h264_set_is_frame_used(active_fs, 3); - //if(pInfo->img.MbaffFrameFlag) active_fs->crc_field_coded = 1; - - }break; - } - - //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image; - //if (freeze_assert) sei_information.disp_frozen = 1; - - idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag; - if (idr_flag) { - h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag); - } else { - // adaptive memory management - if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) { - h264_dpb_adaptive_memory_management(pInfo); - } - } - // Reset the active frame store - could have changed in mem management ftns - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - - if ((viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD)) - { - // check for frame store with same pic_number -- always true in my case, YH - // when we allocate frame store for the second field, we make sure the frame store for the second - // field is the one that contains the first field of the frame- see h264_dpb_init_frame_store() - // This is different from JM model. - // In this way we don't need to move image data around and can reduce memory bandwidth. - // simply check if the check if the other field has been decoded or not - - if (viddec_h264_get_is_used(active_fs) != 0) - { - if(pInfo->img.second_field) - { - h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 0, NonExisting, use_old); - second_field_stored = 1; - } - } - } - } - else - { // Set up locals for non-existing frames - used_for_reference = 1; - - active_fs->frame.used_for_reference = used_for_reference?3:0; - viddec_h264_set_is_frame_used(active_fs, 3); - viddec_h264_set_dec_structure(active_fs, FRAME); - pInfo->img.structure = FRAME; - } - - is_direct_output = 0; - if (NonExisting == 0) - { - if(p_dpb->used_size >= p_dpb->BumpLevel) - { - // non-reference frames may be output directly - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - - if ((used_for_reference == 0) && (viddec_h264_get_is_used(active_fs) == 3)) - { - h264_dpb_get_smallest_poc (p_dpb, &poc, &pos); - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - if ((pos == MPD_DPB_FS_NULL_IDC) || (pInfo->img.ThisPOC < poc)) - { - is_direct_output = 1; - } - } - } - } - - if (NonExisting) { - h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); - } else if(pInfo->SliceHeader.idr_flag == 0) { - if(used_for_reference){ - if(pInfo->img.second_field == 0) { - if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) { - h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); - } - } - } - } - - h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - - //if (is_direct_output == 0) - { - if ((pInfo->img.second_field == 0) || (NonExisting)) - { - h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 1, NonExisting, use_old); - } - - // In an errored stream we saw a condition where - // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel, - // which in itself is an error, but this means first_field_non_ref will - // not get set and causes problems for h264_dpb_queue_update() - if((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) { - if(used_for_reference == 0) - if(p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel) - first_field_non_ref = 1; - } - - } - - if(NonExisting) - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); - else - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - - if(NonExisting == 0) - { - if((pInfo->img.second_field == 1) || (pInfo->img.structure == FRAME)) - { - //h264_send_new_decoded_frame(); - if((p_dpb->OutputCtrl) && (is_direct_output == 0)) - h264_dpb_output_one_frame_from_dpb(pInfo, 0, 0,pInfo->active_SPS.num_ref_frames); - - // Pictures inserted by this point - check if we have reached the specified output - // level (if one has been specified) so we can begin on next call - - /* - Fixed HSD 212625---------------should compare OutputLevel with non-output frame number in dpb, not the used number in dpb - if((p_dpb->OutputLevelValid)&&(p_dpb->OutputCtrl == 0)) - { - if(p_dpb->used_size == p_dpb->OutputLevel) - p_dpb->OutputCtrl = 1; - } - */ - - if(p_dpb->OutputLevelValid) - { - int32_t non_output_frame_number=0; - non_output_frame_number = avc_dpb_get_non_output_frame_number(pInfo); - - if(non_output_frame_number == p_dpb->OutputLevel) - p_dpb->OutputCtrl = 1; - else - p_dpb->OutputCtrl = 0; - } - else { - p_dpb->OutputCtrl = 0; - } - } - } - - while(p_dpb->used_size > (p_dpb->BumpLevel + first_field_non_ref)) - //while(p_dpb->used_size > p_dpb->BumpLevel) - { - h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame - //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - } - - // - // Do not output "direct output" pictures until the sempahore has been set that the pic is - // decoded!! - // - if(is_direct_output) { - h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames); - //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - } - - // - // Add reference pictures into Reference list - // - if(used_for_reference) { - h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting); - } - - h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - - - return; -} ////////////// End of DPB store pic + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t used_for_reference; + int32_t is_direct_output; + int32_t second_field_stored = 0; + int32_t poc; + int32_t pos; + int32_t flag; + int32_t first_field_non_ref = 0; + int32_t idr_flag; + + if (NonExisting) { + if (p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC) + return; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + } else { + if (p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC) + return; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + } + if (NonExisting == 0) + { + //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1; + pInfo->img.last_has_mmco_5 = 0; + pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag; -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ -////////////////////////////////////////////////////////////////////////////// -// h264_dpb_insert_picture_in_dpb () -// -// Insert the decoded picture into the DPB. A free DPB position is necessary -// for frames, . -// This ftn tends to fill out the framestore's top level parameters from the -// storable picture's parameters within it. It is called from h264_dpb_store_picture_in_dpb() -// -// This function finishes by updating the reference lists - this means it must be called after + //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag); + used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0); + + switch (viddec_h264_get_dec_structure(active_fs)) + { + case(TOP_FIELD) : { + active_fs->top_field.used_for_reference = used_for_reference; + viddec_h264_set_is_top_used(active_fs, 1); + //active_fs->crc_field_coded = 1; + } + break; + case(BOTTOM_FIELD): { + active_fs->bottom_field.used_for_reference = used_for_reference << 1; + viddec_h264_set_is_bottom_used(active_fs, 1); + //active_fs->crc_field_coded = 1; + } + break; + default: { + active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(active_fs, 3); + //if(pInfo->img.MbaffFrameFlag) active_fs->crc_field_coded = 1; + + } + break; + } + + //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image; + //if (freeze_assert) sei_information.disp_frozen = 1; + + idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag; + if (idr_flag) { + h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag); + } else { + // adaptive memory management + if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) { + h264_dpb_adaptive_memory_management(pInfo); + } + } + // Reset the active frame store - could have changed in mem management ftns + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if ((viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD)) + { + // check for frame store with same pic_number -- always true in my case, YH + // when we allocate frame store for the second field, we make sure the frame store for the second + // field is the one that contains the first field of the frame- see h264_dpb_init_frame_store() + // This is different from JM model. + // In this way we don't need to move image data around and can reduce memory bandwidth. + // simply check if the check if the other field has been decoded or not + + if (viddec_h264_get_is_used(active_fs) != 0) + { + if (pInfo->img.second_field) + { + h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 0, NonExisting, use_old); + second_field_stored = 1; + } + } + } + } + else + { // Set up locals for non-existing frames + used_for_reference = 1; + + active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(active_fs, 3); + viddec_h264_set_dec_structure(active_fs, FRAME); + pInfo->img.structure = FRAME; + } + + is_direct_output = 0; + if (NonExisting == 0) + { + if (p_dpb->used_size >= p_dpb->BumpLevel) + { + // non-reference frames may be output directly + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if ((used_for_reference == 0) && (viddec_h264_get_is_used(active_fs) == 3)) + { + h264_dpb_get_smallest_poc (p_dpb, &poc, &pos); + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + if ((pos == MPD_DPB_FS_NULL_IDC) || (pInfo->img.ThisPOC < poc)) + { + is_direct_output = 1; + } + } + } + } + + if (NonExisting) { + h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); + } else if (pInfo->SliceHeader.idr_flag == 0) { + if (used_for_reference) { + if (pInfo->img.second_field == 0) { + if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) { + h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); + } + } + } + } + + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + //if (is_direct_output == 0) + { + if ((pInfo->img.second_field == 0) || (NonExisting)) + { + h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 1, NonExisting, use_old); + } + + // In an errored stream we saw a condition where + // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel, + // which in itself is an error, but this means first_field_non_ref will + // not get set and causes problems for h264_dpb_queue_update() + if ((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) { + if (used_for_reference == 0) + if (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel) + first_field_non_ref = 1; + } + + } + + if (NonExisting) + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + else + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (NonExisting == 0) + { + if ((pInfo->img.second_field == 1) || (pInfo->img.structure == FRAME)) + { + //h264_send_new_decoded_frame(); + if ((p_dpb->OutputCtrl) && (is_direct_output == 0)) + h264_dpb_output_one_frame_from_dpb(pInfo, 0, 0,pInfo->active_SPS.num_ref_frames); + + // Pictures inserted by this point - check if we have reached the specified output + // level (if one has been specified) so we can begin on next call + + /* + Fixed HSD 212625---------------should compare OutputLevel with non-output frame number in dpb, not the used number in dpb + if((p_dpb->OutputLevelValid)&&(p_dpb->OutputCtrl == 0)) + { + if(p_dpb->used_size == p_dpb->OutputLevel) + p_dpb->OutputCtrl = 1; + } + */ + + if (p_dpb->OutputLevelValid) + { + int32_t non_output_frame_number=0; + non_output_frame_number = avc_dpb_get_non_output_frame_number(pInfo); + + if (non_output_frame_number == p_dpb->OutputLevel) + p_dpb->OutputCtrl = 1; + else + p_dpb->OutputCtrl = 0; + } + else { + p_dpb->OutputCtrl = 0; + } + } + } + + while (p_dpb->used_size > (p_dpb->BumpLevel + first_field_non_ref)) + //while(p_dpb->used_size > p_dpb->BumpLevel) + { + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + // + // Do not output "direct output" pictures until the sempahore has been set that the pic is + // decoded!! + // + if (is_direct_output) { + h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames); + //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + // + // Add reference pictures into Reference list + // + if (used_for_reference) { + h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting); + } + + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + + return; +} ////////////// End of DPB store pic + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_insert_picture_in_dpb () +// +// Insert the decoded picture into the DPB. A free DPB position is necessary +// for frames, . +// This ftn tends to fill out the framestore's top level parameters from the +// storable picture's parameters within it. It is called from h264_dpb_store_picture_in_dpb() +// +// This function finishes by updating the reference lists - this means it must be called after // h264_dpb_sliding_window_memory_management() // // In the case of a frame it will call h264_dpb_split_field() @@ -1681,93 +1684,96 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference, int32_t add2dpb, int32_t NonExisting, int32_t use_old) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - if(NonExisting == 0) { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num; - } - else { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); - active_fs->frame_num = active_fs->frame.pic_num; - } - - if (add2dpb) { - p_dpb->fs_dpb_idc[p_dpb->used_size] = active_fs->fs_idc; - p_dpb->used_size++; - } - - - switch (viddec_h264_get_dec_structure(active_fs)) - { - case FRAME :{ - viddec_h264_set_is_frame_used(active_fs, 3); - active_fs->frame.used_for_reference = used_for_reference?3:0; - if (used_for_reference) - { - active_fs->frame.used_for_reference = 3; - if (active_fs->frame.is_long_term) - viddec_h264_set_is_frame_long_term(active_fs, 3); - } - // Split frame to 2 fields for prediction - h264_dpb_split_field(pInfo); - - }break; - case TOP_FIELD :{ - viddec_h264_set_is_top_used(active_fs, 1); - - active_fs->top_field.used_for_reference = used_for_reference; - if (used_for_reference) - { - active_fs->frame.used_for_reference |= 0x1; - if (active_fs->top_field.is_long_term) - { - viddec_h264_set_is_top_long_term(active_fs, 1); - active_fs->long_term_frame_idx = active_fs->top_field.long_term_frame_idx; - } - } - if (viddec_h264_get_is_used(active_fs) == 3) { - h264_dpb_combine_field(use_old); // generate frame view - } - else - { - active_fs->frame.poc = active_fs->top_field.poc; - } - - }break; - case BOTTOM_FIELD :{ - viddec_h264_set_is_bottom_used(active_fs, 1); - - active_fs->bottom_field.used_for_reference = (used_for_reference<<1); - if (used_for_reference) - { - active_fs->frame.used_for_reference |= 0x2; - if (active_fs->bottom_field.is_long_term) - { - viddec_h264_set_is_bottom_long_term(active_fs, 1); - active_fs->long_term_frame_idx = active_fs->bottom_field.long_term_frame_idx; - } - } - if (viddec_h264_get_is_used(active_fs) == 3) { - h264_dpb_combine_field(use_old); // generate frame view - } - else - { - active_fs->frame.poc = active_fs->bottom_field.poc; - } - - }break; - } -/* - if ( gRestartMode.LastRestartType == RESTART_SEI ) - { - if ( active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1; - } - - gRestartMode.LastRestartType = 0xFFFF; -*/ - - return; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + if (NonExisting == 0) { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num; + } + else { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + active_fs->frame_num = active_fs->frame.pic_num; + } + + if (add2dpb) { + p_dpb->fs_dpb_idc[p_dpb->used_size] = active_fs->fs_idc; + p_dpb->used_size++; + } + + + switch (viddec_h264_get_dec_structure(active_fs)) + { + case FRAME : { + viddec_h264_set_is_frame_used(active_fs, 3); + active_fs->frame.used_for_reference = used_for_reference?3:0; + if (used_for_reference) + { + active_fs->frame.used_for_reference = 3; + if (active_fs->frame.is_long_term) + viddec_h264_set_is_frame_long_term(active_fs, 3); + } + // Split frame to 2 fields for prediction + h264_dpb_split_field(pInfo); + + } + break; + case TOP_FIELD : { + viddec_h264_set_is_top_used(active_fs, 1); + + active_fs->top_field.used_for_reference = used_for_reference; + if (used_for_reference) + { + active_fs->frame.used_for_reference |= 0x1; + if (active_fs->top_field.is_long_term) + { + viddec_h264_set_is_top_long_term(active_fs, 1); + active_fs->long_term_frame_idx = active_fs->top_field.long_term_frame_idx; + } + } + if (viddec_h264_get_is_used(active_fs) == 3) { + h264_dpb_combine_field(use_old); // generate frame view + } + else + { + active_fs->frame.poc = active_fs->top_field.poc; + } + + } + break; + case BOTTOM_FIELD : { + viddec_h264_set_is_bottom_used(active_fs, 1); + + active_fs->bottom_field.used_for_reference = (used_for_reference<<1); + if (used_for_reference) + { + active_fs->frame.used_for_reference |= 0x2; + if (active_fs->bottom_field.is_long_term) + { + viddec_h264_set_is_bottom_long_term(active_fs, 1); + active_fs->long_term_frame_idx = active_fs->bottom_field.long_term_frame_idx; + } + } + if (viddec_h264_get_is_used(active_fs) == 3) { + h264_dpb_combine_field(use_old); // generate frame view + } + else + { + active_fs->frame.poc = active_fs->bottom_field.poc; + } + + } + break; + } + /* + if ( gRestartMode.LastRestartType == RESTART_SEI ) + { + if ( active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1; + } + + gRestartMode.LastRestartType = 0xFFFF; + */ + + return; } ////// End of insert picture in DPB /* ------------------------------------------------------------------------------------------ */ @@ -1781,80 +1787,80 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1) { - int32_t picNumX; - int32_t currPicNum; - uint32_t idx; - int32_t unmark_done; - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - if (pInfo->img.structure == FRAME) - currPicNum = pInfo->img.frame_num; - else - currPicNum = (pInfo->img.frame_num << 1) + 1; - - picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); - - unmark_done = 0; - - for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - - if (pInfo->img.structure == FRAME) - { - /* If all pic numbers in the list are different (and they should be) - we should terminate the for loop the moment we match pic numbers, - no need to continue to check - hence set unmark_done - */ - - if ((active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(active_fs) == 0) && - (active_fs->frame.pic_num == picNumX)) - { - h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); - unmark_done = 1; - } - } - else - { - /* - If we wish to unmark a short-term picture by picture number when the current picture - is a field, we have to unmark the corresponding field as unused for reference, - and also if it was part of a frame or complementary reference field pair, the - frame is to be marked as unused. However the opposite field may still be used as a - reference for future fields - - How will this affect the reference list update ftn coming after?? - - */ - if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& - (active_fs->top_field.pic_num == picNumX) ) - { - active_fs->top_field.used_for_reference = 0; - active_fs->frame.used_for_reference &= 2; - - unmark_done = 1; - - //Check if other field is used for short-term reference, if not remove from list... - if(active_fs->bottom_field.used_for_reference == 0) - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); - } - if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && - (active_fs->bottom_field.pic_num == picNumX) ) - { - active_fs->bottom_field.used_for_reference = 0; - active_fs->frame.used_for_reference &= 1; - - unmark_done = 1; - - //Check if other field is used for reference, if not remove from list... - if(active_fs->top_field.used_for_reference == 0) - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); - } - } - } - - return; + int32_t picNumX; + int32_t currPicNum; + uint32_t idx; + int32_t unmark_done; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + if (pInfo->img.structure == FRAME) + currPicNum = pInfo->img.frame_num; + else + currPicNum = (pInfo->img.frame_num << 1) + 1; + + picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); + + unmark_done = 0; + + for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (pInfo->img.structure == FRAME) + { + /* If all pic numbers in the list are different (and they should be) + we should terminate the for loop the moment we match pic numbers, + no need to continue to check - hence set unmark_done + */ + + if ((active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(active_fs) == 0) && + (active_fs->frame.pic_num == picNumX)) + { + h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + unmark_done = 1; + } + } + else + { + /* + If we wish to unmark a short-term picture by picture number when the current picture + is a field, we have to unmark the corresponding field as unused for reference, + and also if it was part of a frame or complementary reference field pair, the + frame is to be marked as unused. However the opposite field may still be used as a + reference for future fields + + How will this affect the reference list update ftn coming after?? + + */ + if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& + (active_fs->top_field.pic_num == picNumX) ) + { + active_fs->top_field.used_for_reference = 0; + active_fs->frame.used_for_reference &= 2; + + unmark_done = 1; + + //Check if other field is used for short-term reference, if not remove from list... + if (active_fs->bottom_field.used_for_reference == 0) + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && + (active_fs->bottom_field.pic_num == picNumX) ) + { + active_fs->bottom_field.used_for_reference = 0; + active_fs->frame.used_for_reference &= 1; + + unmark_done = 1; + + //Check if other field is used for reference, if not remove from list... + if (active_fs->top_field.used_for_reference == 0) + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + } + } + + return; } /* ------------------------------------------------------------------------------------------ */ @@ -1868,72 +1874,72 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff // In a frame situation the long_term_pic_num will refer to another frame. // Thus we can call h264_dpb_unmark_for_long_term_reference() and then remove the picture // from the list -// +// // If the current picture is a field, long_term_pic_num will refer to another field // It is also the case that each individual field should have a unique picture number -// 8.2.5.4.2 suggests that when curr pic is a field, an mmco == 2 operation +// 8.2.5.4.2 suggests that when curr pic is a field, an mmco == 2 operation // should be accompanied by a second op to unmark the other field as being unused /////////////////////////////////////////////////////////////////////////////////// void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long_term_pic_num) { - uint32_t idx; - int32_t unmark_done; - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - unmark_done = 0; - for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (!(unmark_done)); idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - - if (pInfo->img.structure == FRAME) - { - if ((active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(active_fs)==3) && - (active_fs->frame.long_term_pic_num == long_term_pic_num)) - { - h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); - unmark_done = 1; - } - } - else - { - /// Check top field - if ((active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(active_fs)&0x1) && - (active_fs->top_field.long_term_pic_num == long_term_pic_num) ) - { - active_fs->top_field.used_for_reference = 0; - active_fs->top_field.is_long_term = 0; - active_fs->frame.used_for_reference &= 2; - viddec_h264_set_is_frame_long_term(active_fs, 2); - - unmark_done = 1; - - //Check if other field is used for long term reference, if not remove from list... - if ((active_fs->bottom_field.used_for_reference == 0) || (active_fs->bottom_field.is_long_term == 0)) - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); - } - - /// Check Bottom field - if ((active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(active_fs)&0x2) && - (active_fs->bottom_field.long_term_pic_num == long_term_pic_num) ) - { - active_fs->bottom_field.used_for_reference = 0; - active_fs->bottom_field.is_long_term = 0; - active_fs->frame.used_for_reference &= 1; - viddec_h264_set_is_frame_long_term(active_fs, 1); - - unmark_done = 1; - //Check if other field is used for long term reference, if not remove from list... - if ((active_fs->top_field.used_for_reference == 0) || (active_fs->top_field.is_long_term == 0)) - { - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); - } - } - } // field structure - } //for(idx) - - return; + uint32_t idx; + int32_t unmark_done; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + unmark_done = 0; + for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (!(unmark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (pInfo->img.structure == FRAME) + { + if ((active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(active_fs)==3) && + (active_fs->frame.long_term_pic_num == long_term_pic_num)) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + unmark_done = 1; + } + } + else + { + /// Check top field + if ((active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(active_fs)&0x1) && + (active_fs->top_field.long_term_pic_num == long_term_pic_num) ) + { + active_fs->top_field.used_for_reference = 0; + active_fs->top_field.is_long_term = 0; + active_fs->frame.used_for_reference &= 2; + viddec_h264_set_is_frame_long_term(active_fs, 2); + + unmark_done = 1; + + //Check if other field is used for long term reference, if not remove from list... + if ((active_fs->bottom_field.used_for_reference == 0) || (active_fs->bottom_field.is_long_term == 0)) + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + + /// Check Bottom field + if ((active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(active_fs)&0x2) && + (active_fs->bottom_field.long_term_pic_num == long_term_pic_num) ) + { + active_fs->bottom_field.used_for_reference = 0; + active_fs->bottom_field.is_long_term = 0; + active_fs->frame.used_for_reference &= 1; + viddec_h264_set_is_frame_long_term(active_fs, 1); + + unmark_done = 1; + //Check if other field is used for long term reference, if not remove from list... + if ((active_fs->top_field.used_for_reference == 0) || (active_fs->top_field.is_long_term == 0)) + { + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + } + } // field structure + } //for(idx) + + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -1947,31 +1953,31 @@ void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int32_t picNumX) { - uint32_t idx; - int32_t pic_struct = INVALID; - int32_t found = 0; - - for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - - if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& - (active_fs->top_field.pic_num == picNumX) ) - { - found = 1; - pic_struct = TOP_FIELD; - - } - if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && - (active_fs->bottom_field.pic_num == picNumX) ) - { - found = 1; - pic_struct = BOTTOM_FIELD; - - } - } - - return pic_struct; + uint32_t idx; + int32_t pic_struct = INVALID; + int32_t found = 0; + + for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& + (active_fs->top_field.pic_num == picNumX) ) + { + found = 1; + pic_struct = TOP_FIELD; + + } + if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && + (active_fs->bottom_field.pic_num == picNumX) ) + { + found = 1; + pic_struct = BOTTOM_FIELD; + + } + } + + return pic_struct; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -1985,32 +1991,32 @@ int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1, int32_t long_term_frame_idx) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - int32_t picNumX; - int32_t currPicNum; - int32_t polarity = 0; - - if (pInfo->img.structure == FRAME) { - currPicNum = pInfo->img.frame_num; - } else { - currPicNum = (pInfo->img.frame_num << 1) + 1; - } + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t picNumX; + int32_t currPicNum; + int32_t polarity = 0; + + if (pInfo->img.structure == FRAME) { + currPicNum = pInfo->img.frame_num; + } else { + currPicNum = (pInfo->img.frame_num << 1) + 1; + } - picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); + picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); - // remove frames / fields with same long_term_frame_idx - if (pInfo->img.structure == FRAME) { - h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); - } else { - polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); + // remove frames / fields with same long_term_frame_idx + if (pInfo->img.structure == FRAME) { + h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); + } else { + polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); - if(polarity != INVALID) - h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, active_fs->fs_idc, polarity); - } + if (polarity != INVALID) + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, active_fs->fs_idc, polarity); + } - h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX); + h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX); - return; + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -2023,31 +2029,31 @@ void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t differenc void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb,int32_t max_long_term_frame_idx_plus1) { - //h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - int32_t idx; - int32_t temp; - int32_t removed_count; - int32_t idx2 = 0; - - p_dpb->max_long_term_pic_idx = max_long_term_frame_idx_plus1 - 1; - - temp = p_dpb->ltref_frames_in_buffer; - removed_count = 0; - - // check for invalid frames - for (idx = 0; idx < temp; idx++) - { - idx2 = idx - removed_count; - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]); - - if (active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) - { - removed_count++; - h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]); - } - } - return; + //h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t idx; + int32_t temp; + int32_t removed_count; + int32_t idx2 = 0; + + p_dpb->max_long_term_pic_idx = max_long_term_frame_idx_plus1 - 1; + + temp = p_dpb->ltref_frames_in_buffer; + removed_count = 0; + + // check for invalid frames + for (idx = 0; idx < temp; idx++) + { + idx2 = idx - removed_count; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]); + + if (active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) + { + removed_count++; + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]); + } + } + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -2060,15 +2066,15 @@ void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb void h264_dpb_mm_unmark_all_short_term_for_reference (h264_DecodedPictureBuffer *p_dpb) { - int32_t idx; - int32_t temp = p_dpb->ref_frames_in_buffer; - - for (idx = 0; idx < temp; idx++) - { - h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); - } - return; + int32_t idx; + int32_t temp = p_dpb->ref_frames_in_buffer; + + for (idx = 0; idx < temp; idx++) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + } + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -2082,44 +2088,44 @@ void h264_dpb_mm_unmark_all_short_term_for_reference (h264_DecodedPictureBuffer void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx) { - int32_t picNumX; - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - - if (viddec_h264_get_dec_structure(active_fs) == FRAME) - { - h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - active_fs->frame.is_long_term = 1; - active_fs->frame.long_term_frame_idx = long_term_frame_idx; - active_fs->frame.long_term_pic_num = long_term_frame_idx; - } - else - { - if(viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) - { - picNumX = (active_fs->top_field.pic_num << 1) + 1; - active_fs->top_field.is_long_term = 1; - active_fs->top_field.long_term_frame_idx = long_term_frame_idx; - - // Assign long-term pic num - active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; - } - else - { - picNumX = (active_fs->bottom_field.pic_num << 1) + 1; - active_fs->bottom_field.is_long_term = 1; - active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; - - // Assign long-term pic num - active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; - - } - h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(active_fs)); - } - // Add to long term list - //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc); - - return; + int32_t picNumX; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (viddec_h264_get_dec_structure(active_fs) == FRAME) + { + h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + active_fs->frame.is_long_term = 1; + active_fs->frame.long_term_frame_idx = long_term_frame_idx; + active_fs->frame.long_term_pic_num = long_term_frame_idx; + } + else + { + if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) + { + picNumX = (active_fs->top_field.pic_num << 1) + 1; + active_fs->top_field.is_long_term = 1; + active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + + // Assign long-term pic num + active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + } + else + { + picNumX = (active_fs->bottom_field.pic_num << 1) + 1; + active_fs->bottom_field.is_long_term = 1; + active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + + // Assign long-term pic num + active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + + } + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(active_fs)); + } + // Add to long term list + //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc); + + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -2133,18 +2139,18 @@ void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx) { - uint32_t idx; - for(idx =0; idx < p_dpb->ltref_frames_in_buffer; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - - if (active_fs->long_term_frame_idx == long_term_frame_idx) - { - h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); - } - } - return; + uint32_t idx; + for (idx =0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (active_fs->long_term_frame_idx == long_term_frame_idx) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + } + return; } /* ------------------------------------------------------------------------------------------ */ @@ -2153,43 +2159,43 @@ void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPict ////////////////////////////////////////////////////////////////////////////// // h264_dpb_unmark_long_term_field_for_reference_by_frame_idx () // -// Mark a long-term reference field unused for reference. However if it is the -// complementary field (opposite polarity) of the picture stored in fs_idc, +// Mark a long-term reference field unused for reference. However if it is the +// complementary field (opposite polarity) of the picture stored in fs_idc, // we do not unmark it ////////////////////////////////////////////////////////////////////////////// void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity) { - uint32_t idx; - int32_t found = 0; - int32_t is_complement = 0; - - for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if (active_fs->long_term_frame_idx == long_term_frame_idx) - { - if(active_fs->fs_idc == fs_idc) - { - // Again these seem like redundant checks but for safety while until JM is updated - if (polarity == TOP_FIELD) - is_complement = (active_fs->bottom_field.is_long_term)? 1:0; - else if(polarity == BOTTOM_FIELD) - is_complement = (active_fs->top_field.is_long_term) ? 1:0; - } - found = 1; - } - } - - if(found) { - if(is_complement == 0) - { - h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx-1]); - } - } - - return; + uint32_t idx; + int32_t found = 0; + int32_t is_complement = 0; + + for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + if (active_fs->long_term_frame_idx == long_term_frame_idx) + { + if (active_fs->fs_idc == fs_idc) + { + // Again these seem like redundant checks but for safety while until JM is updated + if (polarity == TOP_FIELD) + is_complement = (active_fs->bottom_field.is_long_term)? 1:0; + else if (polarity == BOTTOM_FIELD) + is_complement = (active_fs->top_field.is_long_term) ? 1:0; + } + found = 1; + } + } + + if (found) { + if (is_complement == 0) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx-1]); + } + } + + return; } @@ -2202,7 +2208,7 @@ void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPict // This is used on a picture already in the dpb - i.e. not for the current picture // dpb_split / dpb_combine field will perform ftnality in that case // -// Marks a picture as used for long-term reference. Adds it to the long-term +// Marks a picture as used for long-term reference. Adds it to the long-term // reference list. Also removes it from the short term reference list if required // // Note: if the current picture is a frame, the picture to be marked will be a @@ -2212,102 +2218,102 @@ void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPict // assigned to be equal to long_term_frame_idx // // If the current picture is a field, the picture to be marked will be a -// short-term reference field. We use the pic_nums assigned to the field parts of +// short-term reference field. We use the pic_nums assigned to the field parts of // the structure to identify the appropriate field. We assign the long_term_frame_idx -// of the field equal to long_term_frame_idx. +// of the field equal to long_term_frame_idx. // -// We also check to see if this marking has resulted in both fields of the frame -// becoming long_term. If it has, we update the frame part of the structure by +// We also check to see if this marking has resulted in both fields of the frame +// becoming long_term. If it has, we update the frame part of the structure by // setting its long_term_frame_idx ////////////////////////////////////////////////////////////////////////////// void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - uint32_t idx; - int32_t mark_done; - int32_t polarity = 0; - - mark_done = 0; - - if (pInfo->img.structure == FRAME) - { - for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(mark_done)); idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - - if (active_fs->frame.used_for_reference == 3) - { - if ((!(active_fs->frame.is_long_term))&&(active_fs->frame.pic_num == picNumX)) - { - active_fs->long_term_frame_idx = long_term_frame_idx; - active_fs->frame.long_term_frame_idx = long_term_frame_idx; - active_fs->top_field.long_term_frame_idx = long_term_frame_idx; - active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; - - active_fs->frame.is_long_term = 1; - active_fs->top_field.is_long_term = 1; - active_fs->bottom_field.is_long_term = 1; - - viddec_h264_set_is_frame_long_term(active_fs, 3); - mark_done = 1; - - // Assign long-term pic num - active_fs->frame.long_term_pic_num = long_term_frame_idx; - active_fs->top_field.long_term_pic_num = long_term_frame_idx; - active_fs->bottom_field.long_term_pic_num = long_term_frame_idx; - // Add to long term list - h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]); - // Remove from short-term list - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); - } - } - } - } - else - { - polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); - active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG - - if(polarity == TOP_FIELD) - { - active_fs->top_field.long_term_frame_idx = long_term_frame_idx; - active_fs->top_field.is_long_term = 1; - viddec_h264_set_is_top_long_term(active_fs, 1); - - // Assign long-term pic num - active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0); - - } - else if (polarity == BOTTOM_FIELD) - { - active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; - active_fs->bottom_field.is_long_term = 1; - viddec_h264_set_is_bottom_long_term(active_fs, 1); - - // Assign long-term pic num - active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0); - } - - if (viddec_h264_get_is_long_term(active_fs) == 3) - { - active_fs->frame.is_long_term = 1; - active_fs->frame.long_term_frame_idx = long_term_frame_idx; - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); - } - else - { - // We need to add this idc to the long term ref list... - h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); - - // If the opposite field is not a short term reference, remove it from the - // short term list. Since we know top field is a reference but both are not long term - // we can simply check that both fields are not references... - if(active_fs->frame.used_for_reference != 3) - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); - } - } - return; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint32_t idx; + int32_t mark_done; + int32_t polarity = 0; + + mark_done = 0; + + if (pInfo->img.structure == FRAME) + { + for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(mark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (active_fs->frame.used_for_reference == 3) + { + if ((!(active_fs->frame.is_long_term))&&(active_fs->frame.pic_num == picNumX)) + { + active_fs->long_term_frame_idx = long_term_frame_idx; + active_fs->frame.long_term_frame_idx = long_term_frame_idx; + active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + + active_fs->frame.is_long_term = 1; + active_fs->top_field.is_long_term = 1; + active_fs->bottom_field.is_long_term = 1; + + viddec_h264_set_is_frame_long_term(active_fs, 3); + mark_done = 1; + + // Assign long-term pic num + active_fs->frame.long_term_pic_num = long_term_frame_idx; + active_fs->top_field.long_term_pic_num = long_term_frame_idx; + active_fs->bottom_field.long_term_pic_num = long_term_frame_idx; + // Add to long term list + h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + // Remove from short-term list + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + } + } + } + else + { + polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); + active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG + + if (polarity == TOP_FIELD) + { + active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + active_fs->top_field.is_long_term = 1; + viddec_h264_set_is_top_long_term(active_fs, 1); + + // Assign long-term pic num + active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0); + + } + else if (polarity == BOTTOM_FIELD) + { + active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + active_fs->bottom_field.is_long_term = 1; + viddec_h264_set_is_bottom_long_term(active_fs, 1); + + // Assign long-term pic num + active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0); + } + + if (viddec_h264_get_is_long_term(active_fs) == 3) + { + active_fs->frame.is_long_term = 1; + active_fs->frame.long_term_frame_idx = long_term_frame_idx; + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + } + else + { + // We need to add this idc to the long term ref list... + h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + + // If the opposite field is not a short term reference, remove it from the + // short term list. Since we know top field is a reference but both are not long term + // we can simply check that both fields are not references... + if (active_fs->frame.used_for_reference != 3) + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + } + } + return; } ///// End of mark pic long term @@ -2322,83 +2328,89 @@ void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, void h264_dpb_adaptive_memory_management (h264_Info * pInfo) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - int32_t idx; - - idx = 0; - - while (idx < pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count) - { - switch(pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx]) - { - case 1:{ //Mark a short-term reference picture as �unused for reference? - h264_dpb_mm_unmark_short_term_for_reference(pInfo, - pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]); - } break; - case 2:{ //Mark a long-term reference picture as �unused for reference? - h264_dpb_mm_unmark_long_term_for_reference(pInfo, - pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]); - }break; - case 3:{ //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it - h264_dpb_mm_assign_long_term_frame_idx(pInfo, - pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx], - pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); - }break; - case 4:{ //Specify the maximum long-term frame index and - //mark all long-term reference pictureshaving long-term frame indices greater than - //the maximum value as "unused for reference" - h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb, - pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]); - }break; - case 5:{ //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to - // "no long-term frame indices" - h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb); - h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0); - pInfo->img.last_has_mmco_5 = 1; - }break; - case 6:{ //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it - h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb, - pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); - }break; - } - idx++; - } - - - if (pInfo->img.last_has_mmco_5) - { - pInfo->img.frame_num = 0; - pInfo->SliceHeader.frame_num=0; - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - - if (viddec_h264_get_dec_structure(active_fs) == FRAME) - { - pInfo->img.bottompoc -= active_fs->frame.poc; - pInfo->img.toppoc -= active_fs->frame.poc; - - - active_fs->frame.poc = 0; - active_fs->frame.pic_num = 0; - active_fs->frame_num = 0; - } - - else if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) - { - active_fs->top_field.poc = active_fs->top_field.pic_num = 0; - pInfo->img.toppoc = active_fs->top_field.poc; - } - else if (viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD) - { - active_fs->bottom_field.poc = active_fs->bottom_field.pic_num = 0; - pInfo->img.bottompoc = 0; - } - - h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field,pInfo->active_SPS.num_ref_frames); - } - // Reset the marking count operations for the current picture... - pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count = 0; - - return; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t idx; + + idx = 0; + + while (idx < pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count) + { + switch (pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx]) + { + case 1: { //Mark a short-term reference picture as �unused for reference? + h264_dpb_mm_unmark_short_term_for_reference(pInfo, + pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]); + } + break; + case 2: { //Mark a long-term reference picture as �unused for reference? + h264_dpb_mm_unmark_long_term_for_reference(pInfo, + pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]); + } + break; + case 3: { //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it + h264_dpb_mm_assign_long_term_frame_idx(pInfo, + pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx], + pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); + } + break; + case 4: { //Specify the maximum long-term frame index and + //mark all long-term reference pictureshaving long-term frame indices greater than + //the maximum value as "unused for reference" + h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb, + pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]); + } + break; + case 5: { //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to + // "no long-term frame indices" + h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb); + h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0); + pInfo->img.last_has_mmco_5 = 1; + } + break; + case 6: { //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it + h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb, + pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); + } + break; + } + idx++; + } + + + if (pInfo->img.last_has_mmco_5) + { + pInfo->img.frame_num = 0; + pInfo->SliceHeader.frame_num=0; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (viddec_h264_get_dec_structure(active_fs) == FRAME) + { + pInfo->img.bottompoc -= active_fs->frame.poc; + pInfo->img.toppoc -= active_fs->frame.poc; + + + active_fs->frame.poc = 0; + active_fs->frame.pic_num = 0; + active_fs->frame_num = 0; + } + + else if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) + { + active_fs->top_field.poc = active_fs->top_field.pic_num = 0; + pInfo->img.toppoc = active_fs->top_field.poc; + } + else if (viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD) + { + active_fs->bottom_field.poc = active_fs->bottom_field.pic_num = 0; + pInfo->img.bottompoc = 0; + } + + h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field,pInfo->active_SPS.num_ref_frames); + } + // Reset the marking count operations for the current picture... + pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count = 0; + + return; } ////// End of adaptive memory management /* ------------------------------------------------------------------------------------------ */ @@ -2408,132 +2420,132 @@ void h264_dpb_adaptive_memory_management (h264_Info * pInfo) // h264_dpb_gaps_in_frame_num_mem_management () // // Produces a set of frame_nums pertaining to "non-existing" pictures -// Calls h264_dpb_store_picture_in_dpb -////////////////////////////////////////////////////////////////////////////// +// Calls h264_dpb_store_picture_in_dpb +////////////////////////////////////////////////////////////////////////////// void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) { - int32_t temp_frame_num = 0; - int32_t idx, prev_idc; - int32_t prev_frame_num_plus1_wrap; - uint32_t temp; - int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); - seq_param_set_used_ptr active_sps = &pInfo->active_SPS; - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - pInfo->img.gaps_in_frame_num = 0; - - // pInfo->img.last_has_mmco_5 set thru store_picture_in_dpb - if (pInfo->img.last_has_mmco_5) - { - // If the previous picture was an unpaired field, mark it as a dangler - if(p_dpb->used_size) - { - idx = p_dpb->used_size-1; - prev_idc = p_dpb->fs_dpb_idc[idx]; - if (prev_idc != MPD_DPB_FS_NULL_IDC) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - active_fs->frame_num =0; - } - } - pInfo->img.PreviousFrameNumOffset = 0; - //CONFORMANCE_ISSUE - pInfo->img.PreviousFrameNum = 0; - - } - - // Check for gaps in frame_num - if(pInfo->SliceHeader.idr_flag) { - pInfo->img.PreviousFrameNum = pInfo->img.frame_num; - } - // Have we re-started following a recovery point message? -/* - else if(got_sei_recovery || aud_got_restart){ - pInfo->img.PreviousFrameNum = pInfo->img.frame_num; - //got_sei_recovery = 0; - //aud_got_restart = 0; - } -*/ - else if(pInfo->img.frame_num != pInfo->img.PreviousFrameNum) - { - if (MaxFrameNum) - ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); - - prev_frame_num_plus1_wrap = temp; - if(pInfo->img.frame_num != prev_frame_num_plus1_wrap) - { - pInfo->img.gaps_in_frame_num = (pInfo->img.frame_num < pInfo->img.PreviousFrameNum)? ((MaxFrameNum + pInfo->img.frame_num -1) - pInfo->img.PreviousFrameNum): (pInfo->img.frame_num - pInfo->img.PreviousFrameNum - 1); - // We should test for an error here - should infer an unintentional loss of pictures - } - } - - - //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) { - if(pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) { - // infer an unintentional loss of pictures - // only invoke following process for a conforming bitstream - // when gaps_in_frame_num_value_allowed_flag is equal to 1 - pInfo->img.gaps_in_frame_num = 0; - - //mfd_printf("ERROR STREAM??\n"); - ////// Error handling here---- - } - - /////// Removed following OLO source (Sodaville H.D) - //else if (pInfo->img.gaps_in_frame_num > active_sps->num_ref_frames) { - // // No need to produce any more non-existent frames than the amount required to flush the dpb - // pInfo->img.gaps_in_frame_num = active_sps->num_ref_frames; - //mfd_printf("gaps in frame: %d\n", gaps_in_frame_num); - //} - - // If the previous picture was an unpaired field, mark it as a dangler - if(p_dpb->used_size) - { - idx = p_dpb->used_size-1; - prev_idc = p_dpb->fs_dpb_idc[idx]; - if (prev_idc != MPD_DPB_FS_NULL_IDC) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - if(viddec_h264_get_is_used(active_fs) != 3) { - h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME - } - } - } - - while(temp_frame_num < pInfo->img.gaps_in_frame_num) - { - h264_dpb_assign_frame_store(pInfo, 1); - - // Set up initial markings - not sure if all are needed - viddec_h264_set_dec_structure(active_fs, FRAME); - - if(MaxFrameNum) - ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); - - active_fs->frame.pic_num = temp; - active_fs->long_term_frame_idx = 0; - active_fs->frame.long_term_pic_num = 0; - viddec_h264_set_is_frame_long_term(active_fs, 0); - - // Note the call below will overwrite some aspects of the img structure with info relating to the - // non-existent picture - // However, since this is called before h264_hdr_decoding_poc() for the current existing picture - // it should be o.k. - if(pInfo->img.pic_order_cnt_type) - h264_hdr_decoding_poc(pInfo, 1, temp); - - pInfo->img.structure = FRAME; - active_fs->frame.poc = pInfo->img.framepoc; - - // call store_picture_in_dpb - - h264_dpb_store_previous_picture_in_dpb(pInfo, 1, 0); - - h264_hdr_post_poc(pInfo, 1, temp, 0); - - temp_frame_num++; - } + int32_t temp_frame_num = 0; + int32_t idx, prev_idc; + int32_t prev_frame_num_plus1_wrap; + uint32_t temp; + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + seq_param_set_used_ptr active_sps = &pInfo->active_SPS; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + pInfo->img.gaps_in_frame_num = 0; + + // pInfo->img.last_has_mmco_5 set thru store_picture_in_dpb + if (pInfo->img.last_has_mmco_5) + { + // If the previous picture was an unpaired field, mark it as a dangler + if (p_dpb->used_size) + { + idx = p_dpb->used_size-1; + prev_idc = p_dpb->fs_dpb_idc[idx]; + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + active_fs->frame_num =0; + } + } + pInfo->img.PreviousFrameNumOffset = 0; + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum = 0; + + } + + // Check for gaps in frame_num + if (pInfo->SliceHeader.idr_flag) { + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + } + // Have we re-started following a recovery point message? + /* + else if(got_sei_recovery || aud_got_restart){ + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + //got_sei_recovery = 0; + //aud_got_restart = 0; + } + */ + else if (pInfo->img.frame_num != pInfo->img.PreviousFrameNum) + { + if (MaxFrameNum) + ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); + + prev_frame_num_plus1_wrap = temp; + if (pInfo->img.frame_num != prev_frame_num_plus1_wrap) + { + pInfo->img.gaps_in_frame_num = (pInfo->img.frame_num < pInfo->img.PreviousFrameNum)? ((MaxFrameNum + pInfo->img.frame_num -1) - pInfo->img.PreviousFrameNum): (pInfo->img.frame_num - pInfo->img.PreviousFrameNum - 1); + // We should test for an error here - should infer an unintentional loss of pictures + } + } + + + //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) { + if (pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) { + // infer an unintentional loss of pictures + // only invoke following process for a conforming bitstream + // when gaps_in_frame_num_value_allowed_flag is equal to 1 + pInfo->img.gaps_in_frame_num = 0; + + //mfd_printf("ERROR STREAM??\n"); + ////// Error handling here---- + } + + /////// Removed following OLO source (Sodaville H.D) + //else if (pInfo->img.gaps_in_frame_num > active_sps->num_ref_frames) { + // // No need to produce any more non-existent frames than the amount required to flush the dpb + // pInfo->img.gaps_in_frame_num = active_sps->num_ref_frames; + //mfd_printf("gaps in frame: %d\n", gaps_in_frame_num); + //} + + // If the previous picture was an unpaired field, mark it as a dangler + if (p_dpb->used_size) + { + idx = p_dpb->used_size-1; + prev_idc = p_dpb->fs_dpb_idc[idx]; + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + if (viddec_h264_get_is_used(active_fs) != 3) { + h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + } + } + + while (temp_frame_num < pInfo->img.gaps_in_frame_num) + { + h264_dpb_assign_frame_store(pInfo, 1); + + // Set up initial markings - not sure if all are needed + viddec_h264_set_dec_structure(active_fs, FRAME); + + if (MaxFrameNum) + ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); + + active_fs->frame.pic_num = temp; + active_fs->long_term_frame_idx = 0; + active_fs->frame.long_term_pic_num = 0; + viddec_h264_set_is_frame_long_term(active_fs, 0); + + // Note the call below will overwrite some aspects of the img structure with info relating to the + // non-existent picture + // However, since this is called before h264_hdr_decoding_poc() for the current existing picture + // it should be o.k. + if (pInfo->img.pic_order_cnt_type) + h264_hdr_decoding_poc(pInfo, 1, temp); + + pInfo->img.structure = FRAME; + active_fs->frame.poc = pInfo->img.framepoc; + + // call store_picture_in_dpb + + h264_dpb_store_previous_picture_in_dpb(pInfo, 1, 0); + + h264_hdr_post_poc(pInfo, 1, temp, 0); + + temp_frame_num++; + } } /* ------------------------------------------------------------------------------------------ */ @@ -2548,14 +2560,14 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) { - h264_dpb_set_active_fs(p_dpb, fs_idc); + h264_dpb_set_active_fs(p_dpb, fs_idc); - if (viddec_h264_get_is_used(active_fs)&0x1) active_fs->top_field.used_for_reference = 0; - if (viddec_h264_get_is_used(active_fs)&0x2) active_fs->bottom_field.used_for_reference = 0; - if (viddec_h264_get_is_used(active_fs) == 3) active_fs->frame.used_for_reference = 0; + if (viddec_h264_get_is_used(active_fs)&0x1) active_fs->top_field.used_for_reference = 0; + if (viddec_h264_get_is_used(active_fs)&0x2) active_fs->bottom_field.used_for_reference = 0; + if (viddec_h264_get_is_used(active_fs) == 3) active_fs->frame.used_for_reference = 0; - active_fs->frame.used_for_reference = 0; - return; + active_fs->frame.used_for_reference = 0; + return; } @@ -2571,29 +2583,29 @@ void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_ void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) { - h264_dpb_set_active_fs(p_dpb, fs_idc); - - if (viddec_h264_get_is_used(active_fs)&0x1) - { - active_fs->top_field.used_for_reference = 0; - active_fs->top_field.is_long_term = 0; - } - - if (viddec_h264_get_is_used(active_fs)&0x2) - { - active_fs->bottom_field.used_for_reference = 0; - active_fs->bottom_field.is_long_term = 0; - } - if (viddec_h264_get_is_used(active_fs) == 3) - { - active_fs->frame.used_for_reference = 0; - active_fs->frame.is_long_term = 0; - } - - active_fs->frame.used_for_reference = 0; - viddec_h264_set_is_frame_long_term(active_fs, 0); - - return; + h264_dpb_set_active_fs(p_dpb, fs_idc); + + if (viddec_h264_get_is_used(active_fs)&0x1) + { + active_fs->top_field.used_for_reference = 0; + active_fs->top_field.is_long_term = 0; + } + + if (viddec_h264_get_is_used(active_fs)&0x2) + { + active_fs->bottom_field.used_for_reference = 0; + active_fs->bottom_field.is_long_term = 0; + } + if (viddec_h264_get_is_used(active_fs) == 3) + { + active_fs->frame.used_for_reference = 0; + active_fs->frame.is_long_term = 0; + } + + active_fs->frame.used_for_reference = 0; + viddec_h264_set_is_frame_long_term(active_fs, 0); + + return; } @@ -2611,39 +2623,39 @@ void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, i void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) { - h264_dpb_set_active_fs(p_dpb, fs_idc); - - //PRINTF(MFD_NONE, " fs_idc = %d DANGLING_TYPE = %d \n", fs_idc, reason); - /* - Make the check that it has not already been marked - This covers the situation of a dangling field followed by a - frame which is direct output (i.e. never entered into the dpb). - In this case we could attempt to mark the prev unpaired field - as a dangler twice which would upset the HW dpb_disp_q count - */ - - if(viddec_h264_get_is_dangling(active_fs) == 0) - { - switch(viddec_h264_get_dec_structure(active_fs)) - { - case TOP_FIELD: - viddec_h264_set_is_dangling(active_fs, 1); - //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), active_fs->fs_idc); - break; - case BOTTOM_FIELD: - //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), active_fs->fs_idc); - viddec_h264_set_is_dangling(active_fs, 1); - break; - default: - //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), active_fs->fs_idc); - break; - } - - //h264_send_new_decoded_frame(); - } - return; + h264_dpb_set_active_fs(p_dpb, fs_idc); + + //PRINTF(MFD_NONE, " fs_idc = %d DANGLING_TYPE = %d \n", fs_idc, reason); + /* + Make the check that it has not already been marked + This covers the situation of a dangling field followed by a + frame which is direct output (i.e. never entered into the dpb). + In this case we could attempt to mark the prev unpaired field + as a dangler twice which would upset the HW dpb_disp_q count + */ + + if (viddec_h264_get_is_dangling(active_fs) == 0) + { + switch (viddec_h264_get_dec_structure(active_fs)) + { + case TOP_FIELD: + viddec_h264_set_is_dangling(active_fs, 1); + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), active_fs->fs_idc); + break; + case BOTTOM_FIELD: + //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), active_fs->fs_idc); + viddec_h264_set_is_dangling(active_fs, 1); + break; + default: + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), active_fs->fs_idc); + break; + } + + //h264_send_new_decoded_frame(); + } + return; } - + /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -2657,19 +2669,19 @@ void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_i void h264_dpb_is_used_for_reference(int32_t * flag) { - /* Check out below for embedded */ - *flag = 0; - if (active_fs->frame.used_for_reference) - *flag = 1; - else if (viddec_h264_get_is_used(active_fs) ==3) // frame - *flag = active_fs->frame.used_for_reference; - else - { - if (viddec_h264_get_is_used(active_fs)&0x1) // top field - *flag = active_fs->top_field.used_for_reference; - if (viddec_h264_get_is_used(active_fs)&0x2) // bottom field - *flag = *flag || active_fs->bottom_field.used_for_reference; - } + /* Check out below for embedded */ + *flag = 0; + if (active_fs->frame.used_for_reference) + *flag = 1; + else if (viddec_h264_get_is_used(active_fs) ==3) // frame + *flag = active_fs->frame.used_for_reference; + else + { + if (viddec_h264_get_is_used(active_fs)&0x1) // top field + *flag = active_fs->top_field.used_for_reference; + if (viddec_h264_get_is_used(active_fs)&0x2) // bottom field + *flag = *flag || active_fs->bottom_field.used_for_reference; + } } /* ------------------------------------------------------------------------------------------ */ @@ -2683,226 +2695,231 @@ void h264_dpb_is_used_for_reference(int32_t * flag) void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr active_sps, int32_t no_output_of_prior_pics_flag) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - uint32_t idx; - uint32_t i; - int32_t DPB_size; - int32_t FrameSizeInBytes, FrameSizeInMbs; - uint32_t data; - int32_t num_ref_frames = active_sps->num_ref_frames; - int32_t level_idc = active_sps->level_idc; - uint32_t temp_bump_level=0; - - - /// H.D----- - /// There are 2 kinds of dpb flush defined, one is with display, the other is without display - /// The function name dpb_flush actually is just the first, and the 2nd one is for error case or no_prior_output - /// We will rewrite the code below to make it clean and clear - /// - if (no_output_of_prior_pics_flag) - { - - // free all stored pictures - for (idx = 0; idx < p_dpb->used_size; idx = idx + 1) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - - //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",active_fs->fs_idc, active_fs->first_dsn); - viddec_h264_set_is_frame_used(active_fs, 0); - //if( (active_fs->frame_sent == 0x01) && (active_fs->is_output == 0x0)) - { - //DECODED_FRAME sent but not DISPLAY_FRAME - h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); - //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host - - /// Add into drop-out list for all frms in dpb without display - if(!(viddec_h264_get_is_non_existent(active_fs))) { - if( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released - p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx]; - p_dpb->frame_numbers_need_to_be_removed ++; - } else { //// This frame will be removed without display - p_dpb->frame_id_need_to_be_dropped[p_dpb->frame_numbers_need_to_be_dropped] = p_dpb->fs_dpb_idc[idx]; - p_dpb->frame_numbers_need_to_be_dropped ++; - } - } - } - - } - - ////////////////////////////////////////// Reset Reference list - for (i = 0; i < p_dpb->ref_frames_in_buffer; i++) - p_dpb->fs_ref_idc[i] = MPD_DPB_FS_NULL_IDC; - - for (i = 0; i < p_dpb->ltref_frames_in_buffer; i++) - p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC; - - ////////////////////////////////////////// Reset DPB and dpb list - for (i = 0; i < p_dpb->used_size; i++) { - p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC; - p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; - } - - p_dpb->used_size = 0; - p_dpb->ref_frames_in_buffer = 0; - p_dpb->ltref_frames_in_buffer = 0; - - p_dpb->last_output_poc = 0x80000000; - } - else { - h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, num_ref_frames); - } - - if (p_dpb->fs_dec_idc != MPD_DPB_FS_NULL_IDC) // added condition for use of DPB initialization - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - if (pInfo->img.long_term_reference_flag) - { - p_dpb->max_long_term_pic_idx = 0; - switch (viddec_h264_get_dec_structure(active_fs)) - { - case FRAME : active_fs->frame.is_long_term = 1; - case TOP_FIELD : active_fs->top_field.is_long_term = 1; - case BOTTOM_FIELD : active_fs->bottom_field.is_long_term = 1; - } - active_fs->long_term_frame_idx = 0; - } - else - { - p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC; - viddec_h264_set_is_frame_long_term(active_fs, 0); - } - } - - p_dpb->OutputLevel = 0; - p_dpb->OutputLevelValid = 0; - p_dpb->OutputCtrl = 0; - - - // Set up bumping level - do this every time a parameters set is activated... - if(active_sps->sps_disp.vui_parameters_present_flag) - { - if(active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) - { - //p_dpb->OutputLevel = active_sps->sps_disp.vui_seq_parameters.num_reorder_frames; - //p_dpb->OutputLevelValid = 1; - } - } - - // Set up bumping level - do this every time a parameters set is activated... - switch(level_idc) - { - case h264_Level1b: - case h264_Level1: - { - if ((active_sps->profile_idc < 100) && ((active_sps->constraint_set_flags & 0x1) == 0)) { - DPB_size = 338; - } - else { - DPB_size = 149; - } - - break; - } - case h264_Level11: - { - DPB_size = 338; - break; - } - case h264_Level12: - case h264_Level13: - case h264_Level2: - { - DPB_size = 891; - break; - } - case h264_Level21: - { - DPB_size = 1782; - break; - } - case h264_Level22: - case h264_Level3: - { - DPB_size = 3038; - break; - } - case h264_Level31: - { - DPB_size = 6750; - break; - } - case h264_Level32: - { - DPB_size = 7680; - break; - } - case h264_Level4: - case h264_Level41: - { - DPB_size = 12288; - break; - } - case h264_Level42: - { - DPB_size = 13056; - break; - } - case h264_Level5: - { - DPB_size = 41400; - break; - } - case h264_Level51: - { - DPB_size = 69120; - break; - } - default : DPB_size = 69120; break; - } - - FrameSizeInMbs = pInfo->img.PicWidthInMbs * pInfo->img.FrameHeightInMbs; - FrameSizeInBytes = (FrameSizeInMbs << 8) + (FrameSizeInMbs << 7); - - if(FrameSizeInBytes) - { - - temp_bump_level = ldiv_mod_u((DPB_size << 10), FrameSizeInBytes, &data); - - if(temp_bump_level > 255) - { - p_dpb->BumpLevel = 255; - } - else - { - p_dpb->BumpLevel = (uint8_t)temp_bump_level; - } - } - - if (p_dpb->BumpLevel == 0) - p_dpb->BumpLevel = active_sps->num_ref_frames + 1; - - if (p_dpb->BumpLevel > 16) - p_dpb->BumpLevel = 16; - - - if(active_sps->sps_disp.vui_parameters_present_flag && active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) { - - if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) { - //MFD_PARSER_DEBUG(ERROR_H264_DPB); - //// err handling here - } - else { - p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ? - (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering) : 1; - } - } - - - // A new sequence means automatic frame release - //sei_information.disp_frozen = 0; - - return; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint32_t idx; + uint32_t i; + int32_t DPB_size; + int32_t FrameSizeInBytes, FrameSizeInMbs; + uint32_t data; + int32_t num_ref_frames = active_sps->num_ref_frames; + int32_t level_idc = active_sps->level_idc; + uint32_t temp_bump_level=0; + + + /// H.D----- + /// There are 2 kinds of dpb flush defined, one is with display, the other is without display + /// The function name dpb_flush actually is just the first, and the 2nd one is for error case or no_prior_output + /// We will rewrite the code below to make it clean and clear + /// + if (no_output_of_prior_pics_flag) + { + + // free all stored pictures + for (idx = 0; idx < p_dpb->used_size; idx = idx + 1) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",active_fs->fs_idc, active_fs->first_dsn); + viddec_h264_set_is_frame_used(active_fs, 0); + //if( (active_fs->frame_sent == 0x01) && (active_fs->is_output == 0x0)) + { + //DECODED_FRAME sent but not DISPLAY_FRAME + h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host + + /// Add into drop-out list for all frms in dpb without display + if (!(viddec_h264_get_is_non_existent(active_fs))) { + if ( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released + p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx]; + p_dpb->frame_numbers_need_to_be_removed ++; + } else { //// This frame will be removed without display + p_dpb->frame_id_need_to_be_dropped[p_dpb->frame_numbers_need_to_be_dropped] = p_dpb->fs_dpb_idc[idx]; + p_dpb->frame_numbers_need_to_be_dropped ++; + } + } + } + + } + + ////////////////////////////////////////// Reset Reference list + for (i = 0; i < p_dpb->ref_frames_in_buffer; i++) + p_dpb->fs_ref_idc[i] = MPD_DPB_FS_NULL_IDC; + + for (i = 0; i < p_dpb->ltref_frames_in_buffer; i++) + p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC; + + ////////////////////////////////////////// Reset DPB and dpb list + for (i = 0; i < p_dpb->used_size; i++) { + p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + + p_dpb->used_size = 0; + p_dpb->ref_frames_in_buffer = 0; + p_dpb->ltref_frames_in_buffer = 0; + + p_dpb->last_output_poc = 0x80000000; + } + else { + h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, num_ref_frames); + } + + if (p_dpb->fs_dec_idc != MPD_DPB_FS_NULL_IDC) // added condition for use of DPB initialization + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + if (pInfo->img.long_term_reference_flag) + { + p_dpb->max_long_term_pic_idx = 0; + switch (viddec_h264_get_dec_structure(active_fs)) + { + case FRAME : + active_fs->frame.is_long_term = 1; + case TOP_FIELD : + active_fs->top_field.is_long_term = 1; + case BOTTOM_FIELD : + active_fs->bottom_field.is_long_term = 1; + } + active_fs->long_term_frame_idx = 0; + } + else + { + p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC; + viddec_h264_set_is_frame_long_term(active_fs, 0); + } + } + + p_dpb->OutputLevel = 0; + p_dpb->OutputLevelValid = 0; + p_dpb->OutputCtrl = 0; + + + // Set up bumping level - do this every time a parameters set is activated... + if (active_sps->sps_disp.vui_parameters_present_flag) + { + if (active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) + { + //p_dpb->OutputLevel = active_sps->sps_disp.vui_seq_parameters.num_reorder_frames; + //p_dpb->OutputLevelValid = 1; + } + } + + // Set up bumping level - do this every time a parameters set is activated... + switch (level_idc) + { + case h264_Level1b: + case h264_Level1: + { + if ((active_sps->profile_idc < 100) && ((active_sps->constraint_set_flags & 0x1) == 0)) { + DPB_size = 338; + } + else { + DPB_size = 149; + } + + break; + } + case h264_Level11: + { + DPB_size = 338; + break; + } + case h264_Level12: + case h264_Level13: + case h264_Level2: + { + DPB_size = 891; + break; + } + case h264_Level21: + { + DPB_size = 1782; + break; + } + case h264_Level22: + case h264_Level3: + { + DPB_size = 3038; + break; + } + case h264_Level31: + { + DPB_size = 6750; + break; + } + case h264_Level32: + { + DPB_size = 7680; + break; + } + case h264_Level4: + case h264_Level41: + { + DPB_size = 12288; + break; + } + case h264_Level42: + { + DPB_size = 13056; + break; + } + case h264_Level5: + { + DPB_size = 41400; + break; + } + case h264_Level51: + { + DPB_size = 69120; + break; + } + default : + DPB_size = 69120; + break; + } + + FrameSizeInMbs = pInfo->img.PicWidthInMbs * pInfo->img.FrameHeightInMbs; + FrameSizeInBytes = (FrameSizeInMbs << 8) + (FrameSizeInMbs << 7); + + if (FrameSizeInBytes) + { + + temp_bump_level = ldiv_mod_u((DPB_size << 10), FrameSizeInBytes, &data); + + if (temp_bump_level > 255) + { + p_dpb->BumpLevel = 255; + } + else + { + p_dpb->BumpLevel = (uint8_t)temp_bump_level; + } + } + + if (p_dpb->BumpLevel == 0) + p_dpb->BumpLevel = active_sps->num_ref_frames + 1; + + if (p_dpb->BumpLevel > 16) + p_dpb->BumpLevel = 16; + + + if (active_sps->sps_disp.vui_parameters_present_flag && active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) { + + if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) { + //MFD_PARSER_DEBUG(ERROR_H264_DPB); + //// err handling here + } + else { + p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ? + (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering) : 1; + } + } + + + // A new sequence means automatic frame release + //sei_information.disp_frozen = 0; + + return; } //// End --- dpb_idr_memory_management /* ------------------------------------------------------------------------------------------ */ @@ -2912,43 +2929,43 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac // h264_dpb_remove_frame_from_dpb () // // remove one frame from DPB -// The parameter index, is the location of the frame to be removed in the +// The parameter index, is the location of the frame to be removed in the // fs_dpb_idc list. The used size is decremented by one ////////////////////////////////////////////////////////////////////////////// -void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx) -{ - int32_t fs_idc; - uint32_t i; - - fs_idc = p_dpb->fs_dpb_idc[idx]; - - h264_dpb_set_active_fs(p_dpb, fs_idc); - viddec_h264_set_is_frame_used(active_fs, 0); - - //add to support frame relocation interface to host - if(!(viddec_h264_get_is_non_existent(active_fs))) - { - p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc; - p_dpb->frame_numbers_need_to_be_removed ++; - } - - ///////////////////////////////////////// Reset FS - p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC; - - /////Remove unused frame from dpb-list - i = idx; - while( (i + 1)< p_dpb->used_size) - { - p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1]; - i ++; - } - p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; - - //////////////////////////// - p_dpb->used_size--; - - return; +void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx) +{ + int32_t fs_idc; + uint32_t i; + + fs_idc = p_dpb->fs_dpb_idc[idx]; + + h264_dpb_set_active_fs(p_dpb, fs_idc); + viddec_h264_set_is_frame_used(active_fs, 0); + + //add to support frame relocation interface to host + if (!(viddec_h264_get_is_non_existent(active_fs))) + { + p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc; + p_dpb->frame_numbers_need_to_be_removed ++; + } + + ///////////////////////////////////////// Reset FS + p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC; + + /////Remove unused frame from dpb-list + i = idx; + while ( (i + 1)< p_dpb->used_size) + { + p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1]; + i ++; + } + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + + //////////////////////////// + p_dpb->used_size--; + + return; } /* ------------------------------------------------------------------------------------------ */ @@ -2960,53 +2977,53 @@ void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t id // // Remove a picture from DPB which is no longer needed. // Search for a frame which is not used for reference and has previously been placed -// in the output queue - if find one call h264_dpb_remove_frame_from_dpb() and +// in the output queue - if find one call h264_dpb_remove_frame_from_dpb() and // set flag 1 ////////////////////////////////////////////////////////////////////////////// void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag) { - uint32_t idx; - int32_t first_non_exist_valid, non_exist_idx; - int32_t used_for_reference = 0; - - *flag = 0; - first_non_exist_valid = 0x0; - non_exist_idx = 0x0; - - for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - h264_dpb_is_used_for_reference(&used_for_reference); - - //if( (used_for_reference == 0x0 ) && active_fs->is_output && active_fs->is_non_existent == 0x0) - //{ - //PRINTF(MFD_NONE, " requesting to send FREE: fs_idc = %d fb_id = %d \n", active_fs->fs_idc, active_fs->fb_id); - //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1); - //} - - if (viddec_h264_get_is_output(active_fs) && (used_for_reference == 0)) - { - h264_dpb_remove_frame_from_dpb(p_dpb, idx); - *flag = 1; - } -/* -/////// Removed following OLO source (Sodaville H.D) - else if ( (first_non_exist_valid == 0x0) && active_fs->is_non_existent ) - { - first_non_exist_valid = 0x01; - non_exist_idx = idx; - } -*/ - } -/* -/////// Removed following OLO source (Sodaville H.D) - if ( *flag == 0x0 && first_non_exist_valid) { - h264_dpb_remove_frame_from_dpb(p_dpb,non_exist_idx); - *flag = 1; - } -*/ - return; + uint32_t idx; + int32_t first_non_exist_valid, non_exist_idx; + int32_t used_for_reference = 0; + + *flag = 0; + first_non_exist_valid = 0x0; + non_exist_idx = 0x0; + + for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_is_used_for_reference(&used_for_reference); + + //if( (used_for_reference == 0x0 ) && active_fs->is_output && active_fs->is_non_existent == 0x0) + //{ + //PRINTF(MFD_NONE, " requesting to send FREE: fs_idc = %d fb_id = %d \n", active_fs->fs_idc, active_fs->fb_id); + //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1); + //} + + if (viddec_h264_get_is_output(active_fs) && (used_for_reference == 0)) + { + h264_dpb_remove_frame_from_dpb(p_dpb, idx); + *flag = 1; + } + /* + /////// Removed following OLO source (Sodaville H.D) + else if ( (first_non_exist_valid == 0x0) && active_fs->is_non_existent ) + { + first_non_exist_valid = 0x01; + non_exist_idx = idx; + } + */ + } + /* + /////// Removed following OLO source (Sodaville H.D) + if ( *flag == 0x0 && first_non_exist_valid) { + h264_dpb_remove_frame_from_dpb(p_dpb,non_exist_idx); + *flag = 1; + } + */ + return; } //// End of h264_dpb_remove_unused_frame_from_dpb @@ -3022,47 +3039,47 @@ void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int ////////////////////////////////////////////////////////////////////////////// void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos) { - int32_t poc_int; - uint32_t idx; - int32_t first_non_output = 1; - - *pos = MPD_DPB_FS_NULL_IDC; - - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]); - poc_int = active_fs->frame.poc; - - for (idx = 0; idx < p_dpb->used_size; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - - if (viddec_h264_get_is_output(active_fs) == 0) - { - //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc); - if ((viddec_h264_get_is_used(active_fs) == 3) || (viddec_h264_get_is_dangling(active_fs))) - { - if (first_non_output) - { - *pos = idx; - first_non_output = 0; - poc_int = active_fs->frame.poc; - } - else if (poc_int > active_fs->frame.poc) - { - poc_int = active_fs->frame.poc; - *pos = idx; - } - } - else if (p_dpb->used_size == 1) - { - poc_int = active_fs->frame.poc; - *pos = idx; - } - } - } - - *poc = poc_int; - - return; + int32_t poc_int; + uint32_t idx; + int32_t first_non_output = 1; + + *pos = MPD_DPB_FS_NULL_IDC; + + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]); + poc_int = active_fs->frame.poc; + + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if (viddec_h264_get_is_output(active_fs) == 0) + { + //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc); + if ((viddec_h264_get_is_used(active_fs) == 3) || (viddec_h264_get_is_dangling(active_fs))) + { + if (first_non_output) + { + *pos = idx; + first_non_output = 0; + poc_int = active_fs->frame.poc; + } + else if (poc_int > active_fs->frame.poc) + { + poc_int = active_fs->frame.poc; + *pos = idx; + } + } + else if (p_dpb->used_size == 1) + { + poc_int = active_fs->frame.poc; + *pos = idx; + } + } + } + + *poc = poc_int; + + return; } /* ------------------------------------------------------------------------------------------ */ @@ -3076,29 +3093,29 @@ void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, i void h264_dpb_split_field (h264_Info * pInfo) { - - //active_fs->frame.poc = active_fs->frame.poc; - // active_fs->top_field.poc = active_fs->frame.poc; - // This line changed on 11/05/05 KMc - active_fs->top_field.poc = pInfo->img.toppoc; - active_fs->bottom_field.poc = pInfo->img.bottompoc; - active_fs->top_field.used_for_reference = active_fs->frame.used_for_reference & 1; - active_fs->bottom_field.used_for_reference = active_fs->frame.used_for_reference >> 1; + //active_fs->frame.poc = active_fs->frame.poc; + // active_fs->top_field.poc = active_fs->frame.poc; + // This line changed on 11/05/05 KMc + active_fs->top_field.poc = pInfo->img.toppoc; + active_fs->bottom_field.poc = pInfo->img.bottompoc; - active_fs->top_field.is_long_term = active_fs->frame.is_long_term; - active_fs->bottom_field.is_long_term = active_fs->frame.is_long_term; + active_fs->top_field.used_for_reference = active_fs->frame.used_for_reference & 1; + active_fs->bottom_field.used_for_reference = active_fs->frame.used_for_reference >> 1; - active_fs->long_term_frame_idx = active_fs->frame.long_term_frame_idx; - active_fs->top_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; - active_fs->bottom_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; + active_fs->top_field.is_long_term = active_fs->frame.is_long_term; + active_fs->bottom_field.is_long_term = active_fs->frame.is_long_term; + active_fs->long_term_frame_idx = active_fs->frame.long_term_frame_idx; + active_fs->top_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; + active_fs->bottom_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; - // Assign field mvs attached to MB-Frame buffer to the proper buffer - //! Generate field MVs from Frame MVs - // ... - // these will be done in RTL through using proper memory mapping - return; + + // Assign field mvs attached to MB-Frame buffer to the proper buffer + //! Generate field MVs from Frame MVs + // ... + // these will be done in RTL through using proper memory mapping + return; } @@ -3114,23 +3131,23 @@ void h264_dpb_split_field (h264_Info * pInfo) void h264_dpb_combine_field(int32_t use_old) { - //remove warning - use_old = use_old; - - active_fs->frame.poc = (active_fs->top_field.poc < active_fs->bottom_field.poc)? - active_fs->top_field.poc: active_fs->bottom_field.poc; + //remove warning + use_old = use_old; + + active_fs->frame.poc = (active_fs->top_field.poc < active_fs->bottom_field.poc)? + active_fs->top_field.poc: active_fs->bottom_field.poc; - //active_fs->frame.poc = active_fs->poc; + //active_fs->frame.poc = active_fs->poc; - active_fs->frame.used_for_reference = active_fs->top_field.used_for_reference |(active_fs->bottom_field.used_for_reference); + active_fs->frame.used_for_reference = active_fs->top_field.used_for_reference |(active_fs->bottom_field.used_for_reference); - active_fs->frame.is_long_term = active_fs->top_field.is_long_term |(active_fs->bottom_field.is_long_term <<1); + active_fs->frame.is_long_term = active_fs->top_field.is_long_term |(active_fs->bottom_field.is_long_term <<1); - if (active_fs->frame.is_long_term) - active_fs->frame.long_term_frame_idx = active_fs->long_term_frame_idx; + if (active_fs->frame.is_long_term) + active_fs->frame.long_term_frame_idx = active_fs->long_term_frame_idx; - return; + return; } @@ -3143,33 +3160,33 @@ void h264_dpb_combine_field(int32_t use_old) // // Perform Sliding window decoded reference picture marking process // It must be the reference frame, complementary reference field pair -// or non-paired reference field that has the smallest value of -// FrameNumWrap which is marked as unused for reference. Note : We CANNOT -// simply use frame_num!!!! +// or non-paired reference field that has the smallest value of +// FrameNumWrap which is marked as unused for reference. Note : We CANNOT +// simply use frame_num!!!! // -// Although we hold frame_num_wrap in SW, currently, this is not -// being updated for every picture (the b-picture parameter non-update -// phenomenon of the reference software) +// Although we hold frame_num_wrap in SW, currently, this is not +// being updated for every picture (the b-picture parameter non-update +// phenomenon of the reference software) ////////////////////////////////////////////////////////////////////////////// void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, int32_t NonExisting, int32_t num_ref_frames) { - // if this is a reference pic with sliding window, unmark first ref frame - // should this be (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer) - // Rem: adaptive marking can be on a slice by slice basis so we - // could have pictures merked as long term reference in adaptive marking and then - // the marking mode changed back to sliding_window_memory_management - if (p_dpb->ref_frames_in_buffer >= (num_ref_frames - p_dpb->ltref_frames_in_buffer)) - { - h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); - - if(NonExisting == 0) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - viddec_h264_set_is_frame_long_term(active_fs, 0); - } - } + // if this is a reference pic with sliding window, unmark first ref frame + // should this be (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer) + // Rem: adaptive marking can be on a slice by slice basis so we + // could have pictures merked as long term reference in adaptive marking and then + // the marking mode changed back to sliding_window_memory_management + if (p_dpb->ref_frames_in_buffer >= (num_ref_frames - p_dpb->ltref_frames_in_buffer)) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + + if (NonExisting == 0) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + viddec_h264_set_is_frame_long_term(active_fs, 0); + } + } } /* ------------------------------------------------------------------------------------------ */ @@ -3178,7 +3195,7 @@ void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, ////////////////////////////////////////////////////////////////////////////// // h264_dpb_store_picture_in_dpb () // -// First we run the marking procedure. +// First we run the marking procedure. // Then, before we add the current frame_store to the list of refernce stores we run some checks // These include checking the number of existing reference frames // in DPB and if necessary, flushing frames. @@ -3200,105 +3217,105 @@ void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - h264_dpb_set_active_fs(p_dpb, fs_idc); - - //h264_dpb_push_output_queue(); - if(pInfo->sei_information.disp_frozen) - { - // check pocs - if(active_fs->top_field.poc >= pInfo->sei_information.freeze_POC) - { - if(active_fs->top_field.poc < pInfo->sei_information.release_POC) - { - viddec_h264_set_is_top_skipped(active_fs, 1); - } - else - { - pInfo->sei_information.disp_frozen = 0; - } - } - - if(active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC) - { - if(active_fs->bottom_field.poc < pInfo->sei_information.release_POC) - { - viddec_h264_set_is_bottom_skipped(active_fs, 1); - } - else - { - pInfo->sei_information.disp_frozen = 0; - } - } - } - - if ( viddec_h264_get_broken_link_picture(active_fs) ) - pInfo->sei_information.broken_link = 1; - - if( pInfo->sei_information.broken_link) - { - // Check if this was the recovery point picture - going to have recovery point on - // a frame basis - if(viddec_h264_get_recovery_pt_picture(active_fs)) - { - pInfo->sei_information.broken_link = 0; - // Also reset wait on sei recovery point picture - p_dpb->WaitSeiRecovery = 0; - } - else - { - viddec_h264_set_is_frame_skipped(active_fs, 3); - } - } - else - { - // even if this is not a broken - link, we need to follow SEI recovery point rules - // Did we use SEI recovery point for th elast restart? - if ( p_dpb->WaitSeiRecovery ) - { - if ( viddec_h264_get_recovery_pt_picture(active_fs) ) { - p_dpb->WaitSeiRecovery = 0; - } else { - viddec_h264_set_is_frame_skipped(active_fs, 3); - } - } - } - - if ( p_dpb->SuspendOutput ) - { - if ( viddec_h264_get_open_gop_entry(active_fs) ) { - p_dpb->SuspendOutput = 0; - } else{ - viddec_h264_set_is_frame_skipped(active_fs, 3); - } - } - - //h264_send_new_display_frame(0x0); - viddec_h264_set_is_output(active_fs, 1); - - if(viddec_h264_get_is_non_existent(active_fs) == 0) - { - *existing = 1; - p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=active_fs->fs_idc; - p_dpb->frame_numbers_need_to_be_displayed++; - - //if(direct) - //h264_dpb_remove_frame_from_dpb(p_dpb, active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos] - } - else - { - *existing = 0; - } - - if(direct) { - viddec_h264_set_is_frame_used(active_fs, 0); - active_fs->frame.used_for_reference = 0; - active_fs->top_field.used_for_reference = 0; - active_fs->bottom_field.used_for_reference = 0; - active_fs->fs_idc = MPD_DPB_FS_NULL_IDC; - } - return; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + h264_dpb_set_active_fs(p_dpb, fs_idc); + + //h264_dpb_push_output_queue(); + if (pInfo->sei_information.disp_frozen) + { + // check pocs + if (active_fs->top_field.poc >= pInfo->sei_information.freeze_POC) + { + if (active_fs->top_field.poc < pInfo->sei_information.release_POC) + { + viddec_h264_set_is_top_skipped(active_fs, 1); + } + else + { + pInfo->sei_information.disp_frozen = 0; + } + } + + if (active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC) + { + if (active_fs->bottom_field.poc < pInfo->sei_information.release_POC) + { + viddec_h264_set_is_bottom_skipped(active_fs, 1); + } + else + { + pInfo->sei_information.disp_frozen = 0; + } + } + } + + if ( viddec_h264_get_broken_link_picture(active_fs) ) + pInfo->sei_information.broken_link = 1; + + if ( pInfo->sei_information.broken_link) + { + // Check if this was the recovery point picture - going to have recovery point on + // a frame basis + if (viddec_h264_get_recovery_pt_picture(active_fs)) + { + pInfo->sei_information.broken_link = 0; + // Also reset wait on sei recovery point picture + p_dpb->WaitSeiRecovery = 0; + } + else + { + viddec_h264_set_is_frame_skipped(active_fs, 3); + } + } + else + { + // even if this is not a broken - link, we need to follow SEI recovery point rules + // Did we use SEI recovery point for th elast restart? + if ( p_dpb->WaitSeiRecovery ) + { + if ( viddec_h264_get_recovery_pt_picture(active_fs) ) { + p_dpb->WaitSeiRecovery = 0; + } else { + viddec_h264_set_is_frame_skipped(active_fs, 3); + } + } + } + + if ( p_dpb->SuspendOutput ) + { + if ( viddec_h264_get_open_gop_entry(active_fs) ) { + p_dpb->SuspendOutput = 0; + } else { + viddec_h264_set_is_frame_skipped(active_fs, 3); + } + } + + //h264_send_new_display_frame(0x0); + viddec_h264_set_is_output(active_fs, 1); + + if (viddec_h264_get_is_non_existent(active_fs) == 0) + { + *existing = 1; + p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=active_fs->fs_idc; + p_dpb->frame_numbers_need_to_be_displayed++; + + //if(direct) + //h264_dpb_remove_frame_from_dpb(p_dpb, active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos] + } + else + { + *existing = 0; + } + + if (direct) { + viddec_h264_set_is_frame_used(active_fs, 0); + active_fs->frame.used_for_reference = 0; + active_fs->top_field.used_for_reference = 0; + active_fs->bottom_field.used_for_reference = 0; + active_fs->fs_idc = MPD_DPB_FS_NULL_IDC; + } + return; } ///////// End of dpb frame output @@ -3308,7 +3325,7 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int ////////////////////////////////////////////////////////////////////////////// // h264_dpb_output_one_frame_from_dpb () // -// Output one frame stored in the DPB. Basiclly this results in its placment +// Output one frame stored in the DPB. Basiclly this results in its placment // in the fs_output_idc list. // Placement in the output queue should cause an automatic removal from the dpb // if the frame store is not being used as a reference @@ -3316,88 +3333,88 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int ////////////////////////////////////////////////////////////////////////////// int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int32_t request, int32_t num_ref_frames) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - int32_t poc; - int32_t pos; - int32_t used_for_reference; - - int32_t existing = 0; - int32_t is_refused = 0; - int32_t is_pushed = 0; - - //remove warning - request = request; - - if(direct) - { - h264_dpb_frame_output(pInfo, p_dpb->fs_dec_idc, 1, &existing); - } - else - { - if(p_dpb->used_size != 0) - { - // Should this be dpb.not_as_yet_output_num > 0 ?? - // There should maybe be a is_refused == 0 condition instead... - while ((p_dpb->used_size > 0) && (existing == 0) && (is_refused == 0)) - { - // find smallest non-output POC - h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); - if (pos != MPD_DPB_FS_NULL_IDC) - { - // put it into the output queue - h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); - - p_dpb->last_output_poc = poc; - if (existing) is_pushed = 1; - // If non-reference, free frame store and move empty store to end of buffer - - h264_dpb_is_used_for_reference(&used_for_reference); - if (!(used_for_reference)) - h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] - } - else - { - int32_t flag; - uint32_t idx; - - // This is basically an error condition caused by too many reference frames in the DPB. - // It should only happen in errored streams, and can happen if this picture had an MMCO, - // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have - // unmarked the oldest reference frame. - h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames); - h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - - if (flag == 0) { - for (idx = 0; idx < p_dpb->used_size; idx++) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - h264_dpb_is_used_for_reference(&used_for_reference); - - if (used_for_reference) { - break; - } - } - - if (idx < p_dpb->used_size) { - // Short term - h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); - - // Long term - h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); - - // Remove from DPB - h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - } - } - return 1; - } - } - } - } - - return is_pushed; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t poc; + int32_t pos; + int32_t used_for_reference; + + int32_t existing = 0; + int32_t is_refused = 0; + int32_t is_pushed = 0; + + //remove warning + request = request; + + if (direct) + { + h264_dpb_frame_output(pInfo, p_dpb->fs_dec_idc, 1, &existing); + } + else + { + if (p_dpb->used_size != 0) + { + // Should this be dpb.not_as_yet_output_num > 0 ?? + // There should maybe be a is_refused == 0 condition instead... + while ((p_dpb->used_size > 0) && (existing == 0) && (is_refused == 0)) + { + // find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); + if (pos != MPD_DPB_FS_NULL_IDC) + { + // put it into the output queue + h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); + + p_dpb->last_output_poc = poc; + if (existing) is_pushed = 1; + // If non-reference, free frame store and move empty store to end of buffer + + h264_dpb_is_used_for_reference(&used_for_reference); + if (!(used_for_reference)) + h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] + } + else + { + int32_t flag; + uint32_t idx; + + // This is basically an error condition caused by too many reference frames in the DPB. + // It should only happen in errored streams, and can happen if this picture had an MMCO, + // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have + // unmarked the oldest reference frame. + h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames); + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + if (flag == 0) { + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_is_used_for_reference(&used_for_reference); + + if (used_for_reference) { + break; + } + } + + if (idx < p_dpb->used_size) { + // Short term + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); + + // Long term + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); + + // Remove from DPB + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + } + return 1; + } + } + } + } + + return is_pushed; } @@ -3408,25 +3425,25 @@ int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int3 ////////////////////////////////////////////////////////////////////////////// // h264_dpb_queue_update // -// This should be called anytime the output queue might be changed +// This should be called anytime the output queue might be changed ////////////////////////////////////////////////////////////////////////////// int32_t h264_dpb_queue_update(h264_Info* pInfo,int32_t push, int32_t direct, int32_t frame_request, int32_t num_ref_frames) { - int32_t frame_output = 0; + int32_t frame_output = 0; - if(push) - { - frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, direct, 0, num_ref_frames); - } - else if(frame_request) - { - frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, 0, 1,num_ref_frames); - } + if (push) + { + frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, direct, 0, num_ref_frames); + } + else if (frame_request) + { + frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, 0, 1,num_ref_frames); + } - return frame_output; + return frame_output; } @@ -3439,47 +3456,47 @@ int32_t h264_dpb_queue_update(h264_Info* pInfo,int32_t push, int32_t direct, int // // Unmarks all reference pictures in the short-term and long term lists and // in doing so resets the lists. -// +// // Flushing the dpb, adds all the current frames in the dpb, not already on the output list -// to the output list and removes them from the dpb (they will all be marked as unused for +// to the output list and removes them from the dpb (they will all be marked as unused for // reference first) ////////////////////////////////////////////////////////////////////////////// void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - int32_t idx, flag; - int32_t ref_frames_in_buffer; - - ref_frames_in_buffer = p_dpb->ref_frames_in_buffer; - - for (idx = 0; idx < ref_frames_in_buffer; idx++){ - h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); - h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); - } - - ref_frames_in_buffer = p_dpb->ltref_frames_in_buffer; - - for (idx = 0; idx < ref_frames_in_buffer; idx++) - { - h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[0]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); - } - - // output frames in POC order - if (output_all) { - while (p_dpb->used_size - keep_complement) { - h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames); - } - } - - flag = 1; - while (flag) { - h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - } - - return; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t idx, flag; + int32_t ref_frames_in_buffer; + + ref_frames_in_buffer = p_dpb->ref_frames_in_buffer; + + for (idx = 0; idx < ref_frames_in_buffer; idx++) { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + } + + ref_frames_in_buffer = p_dpb->ltref_frames_in_buffer; + + for (idx = 0; idx < ref_frames_in_buffer; idx++) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[0]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); + } + + // output frames in POC order + if (output_all) { + while (p_dpb->used_size - keep_complement) { + h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames); + } + } + + flag = 1; + while (flag) { + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + return; } /* ------------------------------------------------------------------------------------------ */ @@ -3488,66 +3505,66 @@ void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_compl ////////////////////////////////////////////////////////////////////////////// // h264_dpb_reset_dpb () // -// Used to reset the contents of dpb +// Used to reset the contents of dpb // Must calculate memory (aligned) pointers for each of the possible frame stores // -// Also want to calculate possible max dpb size in terms of frames +// Also want to calculate possible max dpb size in terms of frames // We should have an active SPS when we call this ftn to calc bumping level ////////////////////////////////////////////////////////////////////////////// void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, int32_t SizeChange, int32_t no_output_of_prior_pics_flag) { - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - int32_t num_ref_frames = pInfo->active_SPS.num_ref_frames; + int32_t num_ref_frames = pInfo->active_SPS.num_ref_frames; - // If half way through a frame then Frame in progress will still be high, - // so mark the previous field as a dangling field. This is also needed to - // keep cs7050_sif_dpb_disp_numb_ptr correct. Better to reset instead? - if(p_dpb->used_size) - { - int32_t idx; - idx = p_dpb->used_size-1; - if (p_dpb->fs_dpb_idc[idx] != MPD_DPB_FS_NULL_IDC) - { - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + // If half way through a frame then Frame in progress will still be high, + // so mark the previous field as a dangling field. This is also needed to + // keep cs7050_sif_dpb_disp_numb_ptr correct. Better to reset instead? + if (p_dpb->used_size) + { + int32_t idx; + idx = p_dpb->used_size-1; + if (p_dpb->fs_dpb_idc[idx] != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - if(viddec_h264_get_is_used(active_fs) != 3) - h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET - } - } + if (viddec_h264_get_is_used(active_fs) != 3) + h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET + } + } - // initialize software DPB - if(active_fs) { - viddec_h264_set_dec_structure(active_fs, INVALID); - } - h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1 + // initialize software DPB + if (active_fs) { + viddec_h264_set_dec_structure(active_fs, INVALID); + } + h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1 - // May always be a size change which calls this function now... - // could eliminate below branch - if(SizeChange) - { + // May always be a size change which calls this function now... + // could eliminate below branch + if (SizeChange) + { - /*** - Note : 21/03/2005 14:16 - Danger asociated with resetting curr_alloc_mem as it would allow the FW top reallocate - frame stores from 0 -> NUM_FRAME_STORES again - could lead to queue overflow and corruption + /*** + Note : 21/03/2005 14:16 + Danger asociated with resetting curr_alloc_mem as it would allow the FW top reallocate + frame stores from 0 -> NUM_FRAME_STORES again - could lead to queue overflow and corruption - Placed in size change condition in the hope that this will only ensure dpb is empty - and thus this behaviour is valid before continuing again - ***/ + Placed in size change condition in the hope that this will only ensure dpb is empty + and thus this behaviour is valid before continuing again + ***/ - p_dpb->PicWidthInMbs = PicWidthInMbs; - p_dpb->FrameHeightInMbs = FrameHeightInMbs; + p_dpb->PicWidthInMbs = PicWidthInMbs; + p_dpb->FrameHeightInMbs = FrameHeightInMbs; - p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; - //Flush the current DPB. - h264_dpb_flush_dpb(pInfo, 1,0,num_ref_frames); - } + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + //Flush the current DPB. + h264_dpb_flush_dpb(pInfo, 1,0,num_ref_frames); + } - return; + return; } ///// End of reset DPB /* ------------------------------------------------------------------------------------------ */ @@ -3564,29 +3581,29 @@ void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHe // --------------------------------------------------------------------------- int32_t dpb_setup_free_fb( h264_DecodedPictureBuffer *p_dpb, uint8_t* fb_id, pip_setting_t* pip_setting ) { - uint8_t idx; + uint8_t idx; + + //remove warning + pip_setting = pip_setting; - //remove warning - pip_setting = pip_setting; + for (idx = 0; idx < NUM_DPB_FRAME_STORES; idx++) + { + if (p_dpb->fs[idx].fs_idc == MPD_DPB_FS_NULL_IDC) + { + *fb_id = idx; + break; + } + } - for (idx = 0; idx < NUM_DPB_FRAME_STORES; idx++) - { - if (p_dpb->fs[idx].fs_idc == MPD_DPB_FS_NULL_IDC) - { - *fb_id = idx; - break; - } - } + if (idx == NUM_DPB_FRAME_STORES) + return 1; - if(idx == NUM_DPB_FRAME_STORES) - return 1; + p_dpb->fs[idx].fs_idc = idx; - p_dpb->fs[idx].fs_idc = idx; + return 0; - return 0; - -} +} /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -3595,71 +3612,71 @@ int32_t dpb_setup_free_fb( h264_DecodedPictureBuffer *p_dpb, uint8_t* fb_id, pip // h264_dpb_assign_frame_store () // // may need a non-existing option parameter -// +// int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) { - uint8_t idc = MPD_DPB_FS_NULL_IDC; - pip_setting_t pip_setting; - h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - - - while( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) { - /// - /// Generally this is triggered a error case, no more frame buffer avaliable for next - /// What we do here is just remove one with min-POC before get more info - /// - - int32_t pos = 0, poc = 0, existing = 1; - - // find smallest non-output POC - h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); - if (pos != MPD_DPB_FS_NULL_IDC) - { - // put it into the output queue - h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); - p_dpb->last_output_poc = poc; - h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] - } - } - - - if(NonExisting) { - p_dpb->fs_non_exist_idc = idc; - }else { - p_dpb->fs_dec_idc = idc; - } + uint8_t idc = MPD_DPB_FS_NULL_IDC; + pip_setting_t pip_setting; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + + while ( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) { + /// + /// Generally this is triggered a error case, no more frame buffer avaliable for next + /// What we do here is just remove one with min-POC before get more info + /// + + int32_t pos = 0, poc = 0, existing = 1; + + // find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); + if (pos != MPD_DPB_FS_NULL_IDC) + { + // put it into the output queue + h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); + p_dpb->last_output_poc = poc; + h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] + } + } + + + if (NonExisting) { + p_dpb->fs_non_exist_idc = idc; + } else { + p_dpb->fs_dec_idc = idc; + } //add to support frame relocation interface to host - if(!NonExisting) + if (!NonExisting) { - p_dpb->frame_numbers_need_to_be_allocated = 1; - p_dpb->frame_id_need_to_be_allocated = p_dpb->fs_dec_idc; - } - - - ///////////////////////////////h264_dpb_reset_fs(); - h264_dpb_set_active_fs(p_dpb, idc); - active_fs->fs_flag_1 = 0; - active_fs->fs_flag_2 = 0; - viddec_h264_set_is_non_existent(active_fs, NonExisting); - viddec_h264_set_is_output(active_fs, (NonExisting?1:0)); - - active_fs->pic_type = ((FRAME_TYPE_INVALID<is_used is reset on removal from dpb, no need for it here - // ->poc would only be changed when we overwrite on insert_Picture_in_dpb() - // but would be used by get_smallest_poc() - // ->top.poc would also not be overwritten until a new valid value comes along, - // but I don't think it is used before then so no need to reset - //active_fs->is_long_term = 0; - active_fs->frame.used_for_reference = 0; - active_fs->frame.poc = 0; - - return 1; + p_dpb->frame_numbers_need_to_be_allocated = 1; + p_dpb->frame_id_need_to_be_allocated = p_dpb->fs_dec_idc; + } + + + ///////////////////////////////h264_dpb_reset_fs(); + h264_dpb_set_active_fs(p_dpb, idc); + active_fs->fs_flag_1 = 0; + active_fs->fs_flag_2 = 0; + viddec_h264_set_is_non_existent(active_fs, NonExisting); + viddec_h264_set_is_output(active_fs, (NonExisting?1:0)); + + active_fs->pic_type = ((FRAME_TYPE_INVALID<is_used is reset on removal from dpb, no need for it here + // ->poc would only be changed when we overwrite on insert_Picture_in_dpb() + // but would be used by get_smallest_poc() + // ->top.poc would also not be overwritten until a new valid value comes along, + // but I don't think it is used before then so no need to reset + //active_fs->is_long_term = 0; + active_fs->frame.used_for_reference = 0; + active_fs->frame.poc = 0; + + return 1; } @@ -3673,42 +3690,42 @@ int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) // void h264_dpb_update_queue_dangling_field(h264_Info * pInfo) { - h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; - int32_t prev_pic_unpaired_field = 0; - - if(dpb_ptr->used_size > dpb_ptr->BumpLevel) - { - if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC) - { - h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); - if(viddec_h264_get_is_used(active_fs) != 3) - { - prev_pic_unpaired_field = 1; - } - } - - if (pInfo->img.structure != FRAME) - { - // To prove this is the second field, - // 1) The previous picture is an (as yet) unpaired field - if(prev_pic_unpaired_field) - { - // If we establish the previous pic was an unpaired field and this picture is not - // its complement, the previous picture was a dangling field - if(pInfo->img.second_field == 0) { - while(dpb_ptr->used_size > dpb_ptr->BumpLevel) - h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame - } - } - } - else if (prev_pic_unpaired_field) { - while(dpb_ptr->used_size > dpb_ptr->BumpLevel) - h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame - } - } - - - return; + h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; + int32_t prev_pic_unpaired_field = 0; + + if (dpb_ptr->used_size > dpb_ptr->BumpLevel) + { + if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + if (viddec_h264_get_is_used(active_fs) != 3) + { + prev_pic_unpaired_field = 1; + } + } + + if (pInfo->img.structure != FRAME) + { + // To prove this is the second field, + // 1) The previous picture is an (as yet) unpaired field + if (prev_pic_unpaired_field) + { + // If we establish the previous pic was an unpaired field and this picture is not + // its complement, the previous picture was a dangling field + if (pInfo->img.second_field == 0) { + while (dpb_ptr->used_size > dpb_ptr->BumpLevel) + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + } + } + } + else if (prev_pic_unpaired_field) { + while (dpb_ptr->used_size > dpb_ptr->BumpLevel) + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + } + } + + + return; } ///// End of init Frame Store @@ -3723,145 +3740,148 @@ void h264_dpb_update_queue_dangling_field(h264_Info * pInfo) void h264_dpb_init_frame_store(h264_Info * pInfo) { - h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; - - int32_t free_fs_found; - int32_t idx = 0; - int32_t prev_pic_unpaired_field = 0; - int32_t prev_idc = MPD_DPB_FS_NULL_IDC; - int32_t structure = pInfo->img.structure; - - if(dpb_ptr->used_size) - { - idx = dpb_ptr->used_size-1; - prev_idc = dpb_ptr->fs_dpb_idc[idx]; - } - - if (prev_idc != MPD_DPB_FS_NULL_IDC) - { - h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); - if(viddec_h264_get_is_used(active_fs) != 3) - { - //PRINTF(MFD_NONE, " FN: %d active_fs->is_used = %d \n", (h264_frame_number+1), active_fs->is_used); - prev_pic_unpaired_field = 1; - } - } - - //if ((pInfo->img.curr_has_mmco_5) || (pInfo->img.idr_flag)) curr_fld_not_prev_comp = 1; - - if (structure != FRAME) - { - - // To prove this is the second field, - // 1) The previous picture is an (as yet) unpaired field - if(prev_pic_unpaired_field) - { - // If we establish the previous pic was an unpaired field and this picture is not - // its complement, the previous picture was a dangling field - if(pInfo->img.second_field == 0) - h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FIELD - } - } - else if (prev_pic_unpaired_field) { - h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FRAME - } - - free_fs_found = 0; - - // If this is not a second field, we must find a free space for the current picture - if (!(pInfo->img.second_field)) - { - dpb_ptr->fs_dec_idc = MPD_DPB_FS_NULL_IDC; - free_fs_found = h264_dpb_assign_frame_store(pInfo, 0); - //h264_frame_number++; - //PRINTF(MFD_NONE, " FN: %d (inc) fs_idc = %d \n", (h264_frame_number+1), dpb.fs_dec_idc); - } - - h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dec_idc); - - ////////////// TODO: THe following init + h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; + + int32_t free_fs_found; + int32_t idx = 0; + int32_t prev_pic_unpaired_field = 0; + int32_t prev_idc = MPD_DPB_FS_NULL_IDC; + int32_t structure = pInfo->img.structure; + + if (dpb_ptr->used_size) + { + idx = dpb_ptr->used_size-1; + prev_idc = dpb_ptr->fs_dpb_idc[idx]; + } + + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + if (viddec_h264_get_is_used(active_fs) != 3) + { + //PRINTF(MFD_NONE, " FN: %d active_fs->is_used = %d \n", (h264_frame_number+1), active_fs->is_used); + prev_pic_unpaired_field = 1; + } + } + + //if ((pInfo->img.curr_has_mmco_5) || (pInfo->img.idr_flag)) curr_fld_not_prev_comp = 1; + + if (structure != FRAME) + { + + // To prove this is the second field, + // 1) The previous picture is an (as yet) unpaired field + if (prev_pic_unpaired_field) + { + // If we establish the previous pic was an unpaired field and this picture is not + // its complement, the previous picture was a dangling field + if (pInfo->img.second_field == 0) + h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FIELD + } + } + else if (prev_pic_unpaired_field) { + h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FRAME + } + + free_fs_found = 0; + + // If this is not a second field, we must find a free space for the current picture + if (!(pInfo->img.second_field)) + { + dpb_ptr->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + free_fs_found = h264_dpb_assign_frame_store(pInfo, 0); + //h264_frame_number++; + //PRINTF(MFD_NONE, " FN: %d (inc) fs_idc = %d \n", (h264_frame_number+1), dpb.fs_dec_idc); + } + + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dec_idc); + + ////////////// TODO: THe following init #if 1 - if( pInfo->img.second_field) { - //active_fs->second_dsn = pInfo->img.dsn; - //active_fs->prev_dsn = pInfo->img.prev_dsn; - if (active_fs->pic_type == FRAME_TYPE_IDR || - active_fs->pic_type == FRAME_TYPE_I) { - - viddec_h264_set_first_field_intra(active_fs, 1); - } else { - viddec_h264_set_first_field_intra(active_fs, 0); - } - - } - else { - //active_fs->first_dsn = pInfo->img.dsn; - //active_fs->prev_dsn = pInfo->img.prev_dsn; - viddec_h264_set_first_field_intra(active_fs, 0); - } - - if (pInfo->img.structure == FRAME) { - //active_fs->second_dsn = 0x0; - } - - if ( pInfo->sei_information.broken_link_pic ) - { - viddec_h264_set_broken_link_picture(active_fs, 1); - pInfo->sei_information.broken_link_pic = 0; - } - - if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0)) - viddec_h264_set_recovery_pt_picture(active_fs, 1); - - //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr)) - if(pInfo->img.recovery_point_found == 6) - { - viddec_h264_set_open_gop_entry(active_fs, 1); - pInfo->dpb.SuspendOutput = 1; - } + if ( pInfo->img.second_field) { + //active_fs->second_dsn = pInfo->img.dsn; + //active_fs->prev_dsn = pInfo->img.prev_dsn; + if (active_fs->pic_type == FRAME_TYPE_IDR || + active_fs->pic_type == FRAME_TYPE_I) { + + viddec_h264_set_first_field_intra(active_fs, 1); + } else { + viddec_h264_set_first_field_intra(active_fs, 0); + } + + } + else { + //active_fs->first_dsn = pInfo->img.dsn; + //active_fs->prev_dsn = pInfo->img.prev_dsn; + viddec_h264_set_first_field_intra(active_fs, 0); + } + + if (pInfo->img.structure == FRAME) { + //active_fs->second_dsn = 0x0; + } + + if ( pInfo->sei_information.broken_link_pic ) + { + viddec_h264_set_broken_link_picture(active_fs, 1); + pInfo->sei_information.broken_link_pic = 0; + } + + if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0)) + viddec_h264_set_recovery_pt_picture(active_fs, 1); + + //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr)) + if (pInfo->img.recovery_point_found == 6) + { + viddec_h264_set_open_gop_entry(active_fs, 1); + pInfo->dpb.SuspendOutput = 1; + } #endif - if ((pInfo->img.second_field) || (free_fs_found)) - { - viddec_h264_set_dec_structure(active_fs, pInfo->img.structure); - viddec_h264_set_is_output(active_fs, 0); - - switch(pInfo->img.structure) - { - case (FRAME) :{ - active_fs->frame.pic_num = pInfo->img.frame_num; - active_fs->frame.long_term_frame_idx = 0; - active_fs->frame.long_term_pic_num = 0; - active_fs->frame.used_for_reference = 0; - active_fs->frame.is_long_term = 0; - //active_fs->frame.structure = pInfo->img.structure; - active_fs->frame.poc = pInfo->img.framepoc; - }break; - case (TOP_FIELD) :{ - active_fs->top_field.pic_num = pInfo->img.frame_num; - active_fs->top_field.long_term_frame_idx = 0; - active_fs->top_field.long_term_pic_num = 0; - active_fs->top_field.used_for_reference = 0; - active_fs->top_field.is_long_term = 0; - //active_fs->top_field.structure = pInfo->img.structure; - active_fs->top_field.poc = pInfo->img.toppoc; - }break; - case(BOTTOM_FIELD) :{ - active_fs->bottom_field.pic_num = pInfo->img.frame_num; - active_fs->bottom_field.long_term_frame_idx = 0; - active_fs->bottom_field.long_term_pic_num = 0; - active_fs->bottom_field.used_for_reference = 0; - active_fs->bottom_field.is_long_term = 0; - //active_fs->bottom_field.structure = pInfo->img.structure; - active_fs->bottom_field.poc = pInfo->img.bottompoc; - }break; - } - } - else - { - // Need to drop a frame or something here - } - - return; + if ((pInfo->img.second_field) || (free_fs_found)) + { + viddec_h264_set_dec_structure(active_fs, pInfo->img.structure); + viddec_h264_set_is_output(active_fs, 0); + + switch (pInfo->img.structure) + { + case (FRAME) : { + active_fs->frame.pic_num = pInfo->img.frame_num; + active_fs->frame.long_term_frame_idx = 0; + active_fs->frame.long_term_pic_num = 0; + active_fs->frame.used_for_reference = 0; + active_fs->frame.is_long_term = 0; + //active_fs->frame.structure = pInfo->img.structure; + active_fs->frame.poc = pInfo->img.framepoc; + } + break; + case (TOP_FIELD) : { + active_fs->top_field.pic_num = pInfo->img.frame_num; + active_fs->top_field.long_term_frame_idx = 0; + active_fs->top_field.long_term_pic_num = 0; + active_fs->top_field.used_for_reference = 0; + active_fs->top_field.is_long_term = 0; + //active_fs->top_field.structure = pInfo->img.structure; + active_fs->top_field.poc = pInfo->img.toppoc; + } + break; + case(BOTTOM_FIELD) : { + active_fs->bottom_field.pic_num = pInfo->img.frame_num; + active_fs->bottom_field.long_term_frame_idx = 0; + active_fs->bottom_field.long_term_pic_num = 0; + active_fs->bottom_field.used_for_reference = 0; + active_fs->bottom_field.is_long_term = 0; + //active_fs->bottom_field.structure = pInfo->img.structure; + active_fs->bottom_field.poc = pInfo->img.bottompoc; + } + break; + } + } + else + { + // Need to drop a frame or something here + } + + return; } ///// End of init Frame Store @@ -3876,254 +3896,254 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num) { - int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4)); - int32_t delta_pic_order_count[2]; - int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4)); + int32_t delta_pic_order_count[2]; + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + int32_t AbsFrameNum =0; + int32_t ExpectedDeltaPerPicOrderCntCycle =0; + int32_t PicOrderCntCycleCnt = 0; + int32_t FrameNumInPicOrderCntCycle =0; + int32_t ExpectedPicOrderCnt =0; + + int32_t actual_frame_num =0; + + + + if (NonExisting) actual_frame_num = frame_num; + else actual_frame_num = pInfo->img.frame_num; + + switch (pInfo->active_SPS.pic_order_cnt_type) + { + case 0: + if (NonExisting != 0) break; + + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = 0; + } + else if (pInfo->img.last_has_mmco_5) + { + if (pInfo->img.last_pic_bottom_field) + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = 0; + } + else + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = pInfo->img.toppoc; + } + } - int32_t AbsFrameNum =0; - int32_t ExpectedDeltaPerPicOrderCntCycle =0; - int32_t PicOrderCntCycleCnt = 0; - int32_t FrameNumInPicOrderCntCycle =0; - int32_t ExpectedPicOrderCnt =0; - - int32_t actual_frame_num =0; + // Calculate the MSBs of current picture + if ((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb) && + ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) ) + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb; + } else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) && + ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) ) + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb; + } else + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb; + } - + // 2nd - if(NonExisting) actual_frame_num = frame_num; - else actual_frame_num = pInfo->img.frame_num; + if (pInfo->img.field_pic_flag==0) + { + //frame pix + pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom; + pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301 + } + else if (pInfo->img.bottom_field_flag==0) + { //top field + pInfo->img.ThisPOC= pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + } + else + { //bottom field + pInfo->img.ThisPOC= pInfo->img.bottompoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + } + pInfo->img.framepoc=pInfo->img.ThisPOC; - switch (pInfo->active_SPS.pic_order_cnt_type) - { - case 0: - if(NonExisting != 0) break; + if ( pInfo->img.frame_num != pInfo->old_slice.frame_num) + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; - if (pInfo->SliceHeader.idr_flag) + if (pInfo->SliceHeader.nal_ref_idc) { - pInfo->img.PicOrderCntMsb = 0; - pInfo->img.PrevPicOrderCntLsb = 0; - } - else if (pInfo->img.last_has_mmco_5) - { - if (pInfo->img.last_pic_bottom_field) - { - pInfo->img.PicOrderCntMsb = 0; - pInfo->img.PrevPicOrderCntLsb = 0; - } - else - { - pInfo->img.PicOrderCntMsb = 0; - pInfo->img.PrevPicOrderCntLsb = pInfo->img.toppoc; - } - } - - // Calculate the MSBs of current picture - if((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb) && - ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) ) - { - pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb; - } else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) && - ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) ) - { - pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb; - } else - { - pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb; - } - - // 2nd - - if(pInfo->img.field_pic_flag==0) - { - //frame pix - pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; - pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom; - pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301 - } - else if (pInfo->img.bottom_field_flag==0) - { //top field - pInfo->img.ThisPOC= pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; - } - else - { //bottom field - pInfo->img.ThisPOC= pInfo->img.bottompoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; - } - pInfo->img.framepoc=pInfo->img.ThisPOC; - - if ( pInfo->img.frame_num != pInfo->old_slice.frame_num) - pInfo->img.PreviousFrameNum = pInfo->img.frame_num; - - if(pInfo->SliceHeader.nal_ref_idc) - { - pInfo->img.PrevPicOrderCntLsb = pInfo->img.pic_order_cnt_lsb; - pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; - } - - break; + pInfo->img.PrevPicOrderCntLsb = pInfo->img.pic_order_cnt_lsb; + pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; + } + + break; case 1: { - if(NonExisting) - { - delta_pic_order_count[0] = 0; - delta_pic_order_count[1] = 0; - } - else - { - delta_pic_order_count[0] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : pInfo->img.delta_pic_order_cnt[0]; - delta_pic_order_count[1] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : - ( (!pInfo->active_PPS.pic_order_present_flag) && (!(pInfo->img.field_pic_flag))) ? 0 : - pInfo->img.delta_pic_order_cnt[1]; - } - - // this if branch should not be taken during processing of a gap_in_frame_num pic since - // an IDR picture cannot produce non-existent frames... - if(pInfo->SliceHeader.idr_flag) - { - pInfo->img.FrameNumOffset = 0; - } - else - { - - if (actual_frame_num < pInfo->img.PreviousFrameNum) - { - pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; - } - else - { - pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; - } - } - - // pInfo->img.num_ref_frames_in_pic_order_cnt_cycle set from SPS - // so constant between existent and non-existent frames - if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) - AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; - else - AbsFrameNum = 0; - - // pInfo->img.disposable_flag should never be true for a non-existent frame since these are always - // references... - if ((pInfo->SliceHeader.nal_ref_idc == 0) && (AbsFrameNum > 0)) AbsFrameNum = AbsFrameNum - 1; - - // 3rd - ExpectedDeltaPerPicOrderCntCycle = pInfo->active_SPS.expectedDeltaPerPOCCycle; - - if (AbsFrameNum) - { - // Rem: pInfo->img.num_ref_frames_in_pic_order_cnt_cycle takes max value of 255 (8 bit) - // Frame NUm may be 2^16 (17 bits) - // I guess we really have to treat AbsFrameNum as a 32 bit number - uint32_t temp = 0; - int32_t i=0; - int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; - - if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) - PicOrderCntCycleCnt = ldiv_mod_u((uint32_t)(AbsFrameNum-1), (uint32_t)pInfo->img.num_ref_frames_in_pic_order_cnt_cycle, &temp); - - ExpectedPicOrderCnt = mult_u((uint32_t)PicOrderCntCycleCnt, (uint32_t)ExpectedDeltaPerPicOrderCntCycle); - - FrameNumInPicOrderCntCycle = temp; - - //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle; -#ifndef USER_MODE - h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id); - for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) - ExpectedPicOrderCnt += offset_for_ref_frame[i]; + if (NonExisting) + { + delta_pic_order_count[0] = 0; + delta_pic_order_count[1] = 0; + } + else + { + delta_pic_order_count[0] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : pInfo->img.delta_pic_order_cnt[0]; + delta_pic_order_count[1] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : + ( (!pInfo->active_PPS.pic_order_present_flag) && (!(pInfo->img.field_pic_flag))) ? 0 : + pInfo->img.delta_pic_order_cnt[1]; + } + + // this if branch should not be taken during processing of a gap_in_frame_num pic since + // an IDR picture cannot produce non-existent frames... + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.FrameNumOffset = 0; + } + else + { + + if (actual_frame_num < pInfo->img.PreviousFrameNum) + { + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; + } + else + { + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; + } + } + + // pInfo->img.num_ref_frames_in_pic_order_cnt_cycle set from SPS + // so constant between existent and non-existent frames + if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) + AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; + else + AbsFrameNum = 0; + + // pInfo->img.disposable_flag should never be true for a non-existent frame since these are always + // references... + if ((pInfo->SliceHeader.nal_ref_idc == 0) && (AbsFrameNum > 0)) AbsFrameNum = AbsFrameNum - 1; + + // 3rd + ExpectedDeltaPerPicOrderCntCycle = pInfo->active_SPS.expectedDeltaPerPOCCycle; + + if (AbsFrameNum) + { + // Rem: pInfo->img.num_ref_frames_in_pic_order_cnt_cycle takes max value of 255 (8 bit) + // Frame NUm may be 2^16 (17 bits) + // I guess we really have to treat AbsFrameNum as a 32 bit number + uint32_t temp = 0; + int32_t i=0; + int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; + + if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) + PicOrderCntCycleCnt = ldiv_mod_u((uint32_t)(AbsFrameNum-1), (uint32_t)pInfo->img.num_ref_frames_in_pic_order_cnt_cycle, &temp); + + ExpectedPicOrderCnt = mult_u((uint32_t)PicOrderCntCycleCnt, (uint32_t)ExpectedDeltaPerPicOrderCntCycle); + + FrameNumInPicOrderCntCycle = temp; + + //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle; +#ifndef USER_MODE + h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id); + for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) + ExpectedPicOrderCnt += offset_for_ref_frame[i]; #else - for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) - ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i]; + for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) + ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i]; #endif - } - else { - ExpectedPicOrderCnt = 0; - } - - if (pInfo->SliceHeader.nal_ref_idc == 0) - ExpectedPicOrderCnt += pInfo->img.offset_for_non_ref_pic; - - if (!(pInfo->img.field_pic_flag)) - { - pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; - pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[1]; - pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; - pInfo->img.ThisPOC = pInfo->img.framepoc; - } - else if (!(pInfo->img.bottom_field_flag)) - { - //top field - pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; - pInfo->img.ThisPOC = pInfo->img.toppoc; - pInfo->img.bottompoc = 0; - } - else - { - //bottom field - pInfo->img.toppoc = 0; - pInfo->img.bottompoc = ExpectedPicOrderCnt + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[0]; - pInfo->img.ThisPOC = pInfo->img.bottompoc; - } - - //CONFORMANCE_ISSUE - pInfo->img.framepoc=pInfo->img.ThisPOC; - - //CONFORMANCE_ISSUE - pInfo->img.PreviousFrameNum=pInfo->img.frame_num; - pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset; - - } - break; + } + else { + ExpectedPicOrderCnt = 0; + } + + if (pInfo->SliceHeader.nal_ref_idc == 0) + ExpectedPicOrderCnt += pInfo->img.offset_for_non_ref_pic; + + if (!(pInfo->img.field_pic_flag)) + { + pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; + pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[1]; + pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; + pInfo->img.ThisPOC = pInfo->img.framepoc; + } + else if (!(pInfo->img.bottom_field_flag)) + { + //top field + pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; + pInfo->img.ThisPOC = pInfo->img.toppoc; + pInfo->img.bottompoc = 0; + } + else + { + //bottom field + pInfo->img.toppoc = 0; + pInfo->img.bottompoc = ExpectedPicOrderCnt + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[0]; + pInfo->img.ThisPOC = pInfo->img.bottompoc; + } + + //CONFORMANCE_ISSUE + pInfo->img.framepoc=pInfo->img.ThisPOC; + + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum=pInfo->img.frame_num; + pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset; + + } + break; case 2: { // POC MODE 2 - if (pInfo->SliceHeader.idr_flag) - { - pInfo->img.FrameNumOffset = 0; - pInfo->img.framepoc = 0; - pInfo->img.toppoc = 0; - pInfo->img.bottompoc = 0; - pInfo->img.ThisPOC = 0; - } - else - { - if (pInfo->img.last_has_mmco_5) - { - pInfo->img.PreviousFrameNum = 0; - pInfo->img.PreviousFrameNumOffset = 0; - } - if (actual_frame_num < pInfo->img.PreviousFrameNum) - pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; - else - pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; - - AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; - if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1; - else pInfo->img.ThisPOC = (AbsFrameNum<<1); - - if (!(pInfo->img.field_pic_flag)) - { - pInfo->img.toppoc = pInfo->img.ThisPOC; - pInfo->img.bottompoc = pInfo->img.ThisPOC; - pInfo->img.framepoc = pInfo->img.ThisPOC; - } - else if (!(pInfo->img.bottom_field_flag)) - { - pInfo->img.toppoc = pInfo->img.ThisPOC; - pInfo->img.framepoc = pInfo->img.ThisPOC; - } - else - { - pInfo->img.bottompoc = pInfo->img.ThisPOC; - pInfo->img.framepoc = pInfo->img.ThisPOC; - } - } - - //CONFORMANCE_ISSUE - pInfo->img.PreviousFrameNum = pInfo->img.frame_num; - pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; - } - break; - default: - break; - } - - return; + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.FrameNumOffset = 0; + pInfo->img.framepoc = 0; + pInfo->img.toppoc = 0; + pInfo->img.bottompoc = 0; + pInfo->img.ThisPOC = 0; + } + else + { + if (pInfo->img.last_has_mmco_5) + { + pInfo->img.PreviousFrameNum = 0; + pInfo->img.PreviousFrameNumOffset = 0; + } + if (actual_frame_num < pInfo->img.PreviousFrameNum) + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; + else + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; + + AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; + if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1; + else pInfo->img.ThisPOC = (AbsFrameNum<<1); + + if (!(pInfo->img.field_pic_flag)) + { + pInfo->img.toppoc = pInfo->img.ThisPOC; + pInfo->img.bottompoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + else if (!(pInfo->img.bottom_field_flag)) + { + pInfo->img.toppoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + else + { + pInfo->img.bottompoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + } + + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + } + break; + default: + break; + } + + return; } //// End of decoding_POC /* ------------------------------------------------------------------------------------------ */ @@ -4136,41 +4156,42 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, int32_t use_old) { - int32_t actual_frame_num = (NonExisting)? frame_num : - (use_old)? pInfo->old_slice.frame_num : - pInfo->img.frame_num; - - int32_t disposable_flag = (use_old)?(pInfo->old_slice.nal_ref_idc == 0) : - (pInfo->SliceHeader.nal_ref_idc == 0); - - switch(pInfo->img.pic_order_cnt_type) - { - case 0: { - pInfo->img.PreviousFrameNum = actual_frame_num; - if ((disposable_flag == 0) && (NonExisting == 0)) - { - pInfo->img.PrevPicOrderCntLsb = (use_old)? pInfo->old_slice.pic_order_cnt_lsb : - pInfo->SliceHeader.pic_order_cnt_lsb; - pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; - } - } - break; - case 1: { - pInfo->img.PreviousFrameNum = actual_frame_num; - pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; - } - break; - case 2: { - pInfo->img.PreviousFrameNum = actual_frame_num; - pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; - - }break; - - default: { - }break; - } - - return; + int32_t actual_frame_num = (NonExisting)? frame_num : + (use_old)? pInfo->old_slice.frame_num : + pInfo->img.frame_num; + + int32_t disposable_flag = (use_old)?(pInfo->old_slice.nal_ref_idc == 0) : + (pInfo->SliceHeader.nal_ref_idc == 0); + + switch (pInfo->img.pic_order_cnt_type) + { + case 0: { + pInfo->img.PreviousFrameNum = actual_frame_num; + if ((disposable_flag == 0) && (NonExisting == 0)) + { + pInfo->img.PrevPicOrderCntLsb = (use_old)? pInfo->old_slice.pic_order_cnt_lsb : + pInfo->SliceHeader.pic_order_cnt_lsb; + pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; + } + } + break; + case 1: { + pInfo->img.PreviousFrameNum = actual_frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + } + break; + case 2: { + pInfo->img.PreviousFrameNum = actual_frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + + } + break; + + default: { + } break; + } + + return; } ///// End of h264_hdr_post_poc diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c index 307a0da..ec48cc8 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c @@ -6,79 +6,79 @@ uint32_t mult_u(register uint32_t var1, register uint32_t var2) { - register unsigned long var_out = 0; - - while (var2 > 0) - { - - if (var2 & 0x01) - { - var_out += var1; - } - var2 >>= 1; - var1 <<= 1; - } - return var_out; - + register unsigned long var_out = 0; + + while (var2 > 0) + { + + if (var2 & 0x01) + { + var_out += var1; + } + var2 >>= 1; + var1 <<= 1; + } + return var_out; + }// mult_u uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod) { - register unsigned long div = b; - register unsigned long res = 0; - register unsigned long bit = 0x1; - - if (!div) - { - *mod = 0; - return 0xffffffff ; // Div by 0 - } - - if (a < b) - { - *mod = a; - return 0; // It won't even go once - } - - while(!(div & 0x80000000)) - { - div <<= 1; - bit <<= 1; - } - - while (bit) - { - if (div <= a) - { - res |= bit; - a -= div; - } - div >>= 1; - bit >>= 1; - } - *mod = a; - return res; + register unsigned long div = b; + register unsigned long res = 0; + register unsigned long bit = 0x1; + + if (!div) + { + *mod = 0; + return 0xffffffff ; // Div by 0 + } + + if (a < b) + { + *mod = a; + return 0; // It won't even go once + } + + while (!(div & 0x80000000)) + { + div <<= 1; + bit <<= 1; + } + + while (bit) + { + if (div <= a) + { + res |= bit; + a -= div; + } + div >>= 1; + bit >>= 1; + } + *mod = a; + return res; }// ldiv_mod_u unsigned ldiv_u(register unsigned a, register unsigned b) -{ - register unsigned div = b << 16; - register unsigned res = 0; - register unsigned bit = 0x10000; - - while (bit) - { - div >>= 1; - bit >>= 1; - if (div < a) - { - res |= bit; - a -= div; - } - } - - return res; +{ + register unsigned div = b << 16; + register unsigned res = 0; + register unsigned bit = 0x10000; + + while (bit) + { + div >>= 1; + bit >>= 1; + if (div < a) + { + res |= bit; + a -= div; + } + } + + return res; } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c index 38297dc..b5a0145 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c @@ -16,32 +16,32 @@ // --------------------------------------------------------------------------- void* h264_memset( void* buf, uint32_t c, uint32_t num ) { - uint32_t* buf32 = (uint32_t*)buf; - uint32_t size32 = ( num >> 2 ); - uint32_t i; + uint32_t* buf32 = (uint32_t*)buf; + uint32_t size32 = ( num >> 2 ); + uint32_t i; - for ( i = 0; i < size32; i++ ) - { - *buf32++ = c; - } + for ( i = 0; i < size32; i++ ) + { + *buf32++ = c; + } - return buf; + return buf; } void* h264_memcpy( void* dest, void* src, uint32_t num ) { - int32_t* dest32 = (int32_t*)dest; - int32_t* src32 = (int32_t*)src; - uint32_t size32 = ( num >> 2 ); - uint32_t i; + int32_t* dest32 = (int32_t*)dest; + int32_t* src32 = (int32_t*)src; + uint32_t size32 = ( num >> 2 ); + uint32_t i; - for ( i = 0; i < size32; i++ ) - { - *dest32++ = *src32++; - } + for ( i = 0; i < size32; i++ ) + { + *dest32++ = *src32++; + } - return dest; + return dest; } @@ -50,33 +50,33 @@ void* h264_memcpy( void* dest, void* src, uint32_t num ) //h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) { - uint32_t copy_size = sizeof(pic_param_set); - uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + uint32_t copy_size = sizeof(pic_param_set); + uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; - if(nPPSId < MAX_NUM_PPS) - { - cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0); - } + if (nPPSId < MAX_NUM_PPS) + { + cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0); + } - return; + return; } //end of h264_Parse_Copy_Pps_To_DDR -// h264_Parse_Copy_Pps_From_DDR copy a pps with nPPSId from ddr mem to local PPS +// h264_Parse_Copy_Pps_From_DDR copy a pps with nPPSId from ddr mem to local PPS void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) { - uint32_t copy_size= sizeof(pic_param_set); - uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + uint32_t copy_size= sizeof(pic_param_set); + uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; - if( nPPSId < MAX_NUM_PPS) - { - cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0); - } - - return; + if ( nPPSId < MAX_NUM_PPS) + { + cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0); + } + + return; } //end of h264_Parse_Copy_Pps_From_DDR @@ -84,71 +84,71 @@ void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint3 //h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem with nSPSId void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) { - uint32_t copy_size = sizeof(seq_param_set_used); - uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); - - if(nSPSId < MAX_NUM_SPS) - { - cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0); - } - - //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); - - - return; + uint32_t copy_size = sizeof(seq_param_set_used); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0); + } + + //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); + + + return; } //end of h264_Parse_Copy_Sps_To_DDR -// h264_Parse_Copy_Sps_From_DDR copy a sps with nSPSId from ddr mem to local SPS +// h264_Parse_Copy_Sps_From_DDR copy a sps with nSPSId from ddr mem to local SPS void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) { - uint32_t copy_size= sizeof(seq_param_set_used); - uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + uint32_t copy_size= sizeof(seq_param_set_used); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0); + } - if(nSPSId < MAX_NUM_SPS) - { - cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0); - } + return; - return; - } //end of h264_Parse_Copy_Sps_From_DDR //h264_Parse_Copy_Offset_Ref_Frames_To_DDR () copy local offset_ref_frames to ddr mem with nSPSId void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId) { - uint32_t copy_size = sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; - uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; - - if(nSPSId < MAX_NUM_SPS) - { - //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 1, 0); - h264_memcpy((int32_t *)offset_ref_frames_entry_ptr,pOffset_ref_frames, copy_size); - } - - return; + uint32_t copy_size = sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; + + if (nSPSId < MAX_NUM_SPS) + { + //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 1, 0); + h264_memcpy((int32_t *)offset_ref_frames_entry_ptr,pOffset_ref_frames, copy_size); + } + + return; } //end of h264_Parse_Copy_Offset_Ref_Frames_To_DDR -// h264_Parse_Copy_Offset_Ref_Frames_From_DDR copy a offset_ref_frames with nSPSId from ddr mem to local offset_ref_frames +// h264_Parse_Copy_Offset_Ref_Frames_From_DDR copy a offset_ref_frames with nSPSId from ddr mem to local offset_ref_frames void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId) { - uint32_t copy_size= sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; - uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; + uint32_t copy_size= sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; - if(nSPSId < MAX_NUM_SPS) - { - //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 0, 0); - h264_memcpy(pOffset_ref_frames, (int32_t *)offset_ref_frames_entry_ptr, copy_size); - } + if (nSPSId < MAX_NUM_SPS) + { + //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 0, 0); + h264_memcpy(pOffset_ref_frames, (int32_t *)offset_ref_frames_entry_ptr, copy_size); + } + + return; - return; - } //end of h264_Parse_Copy_Offset_Ref_Frames_From_DDR @@ -156,39 +156,39 @@ void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffs //h264_Parse_Check_Sps_Updated_Flag () copy local sps to ddr mem with nSPSId uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) { - uint32_t is_updated=0; - uint32_t copy_size = sizeof(uint32_t); - uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; - - - if(nSPSId < MAX_NUM_SPS) - { - cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0); - } - - //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); - - - return is_updated; + uint32_t is_updated=0; + uint32_t copy_size = sizeof(uint32_t); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0); + } + + //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); + + + return is_updated; } //end of h264_Parse_Check_Sps_Updated_Flag -// h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS +// h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) { uint32_t is_updated=0; - uint32_t copy_size= sizeof(uint32_t); - uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + uint32_t copy_size= sizeof(uint32_t); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0); + } - if(nSPSId < MAX_NUM_SPS) - { - cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0); - } + return; - return; - } //end of h264_Parse_Clear_Sps_Updated_Flag diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c index a1281c2..1719a04 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c @@ -8,120 +8,120 @@ /*---------------------------------------------*/ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet) { - h264_Status ret = H264_PPS_ERROR; - - //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet; - uint32_t code=0, i = 0; - - do { - ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id - code = h264_GetVLCElement(parent, pInfo, false); - if(code > MAX_PIC_PARAMS) { - break; - } - PictureParameterSet->pic_parameter_set_id = (uint8_t)code; - - - code = h264_GetVLCElement(parent, pInfo, false); - if(code > MAX_NUM_SPS-1) { - break; - } - PictureParameterSet->seq_parameter_set_id = (uint8_t)code; - - ///// entropy_coding_mode_flag - viddec_pm_get_bits(parent, &code, 1); - PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; - ///// pic_order_present_flag - viddec_pm_get_bits(parent, &code, 1); - PictureParameterSet->pic_order_present_flag = (uint8_t)code; - - PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false); - - // - // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0 - // - if(PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS) - break; - - PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1; - PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; - - //// PPS->num_ref_idx_l0_active --- [0,32] - if(((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES)) - { - break; - } - - //// weighting prediction - viddec_pm_get_bits(parent, &code, 1); - PictureParameterSet->weighted_pred_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code, 2); - PictureParameterSet->weighted_bipred_idc = (uint8_t)code; - - //// QP - PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true); - PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true); - if(((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP)) - break; - PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); - - //// Deblocking ctl parameters - viddec_pm_get_bits(parent, &code, 1); - PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code, 1); - PictureParameterSet->constrained_intra_pred_flag = (uint8_t)code; - - if( viddec_pm_get_bits(parent, &code, 1) == -1) - break; - PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code; - - //// Check if have more RBSP Data for additional parameters - if(h264_More_RBSP_Data(parent, pInfo)) - { - viddec_pm_get_bits(parent, &code, 1); - PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code; - - if( viddec_pm_get_bits(parent, &code, 1) == -1) - break; - PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code; - - if(PictureParameterSet->pic_scaling_matrix_present_flag) - { - uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1); - for(i=0; ipic_scaling_list_present_flag[i] = (uint8_t)code; - - if(PictureParameterSet->pic_scaling_list_present_flag[i]) - { - if(i<6) - h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo); - else - h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); - } - } - } - - PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix - //if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12)) - // break; - } - else - { - PictureParameterSet->transform_8x8_mode_flag = 0; - PictureParameterSet->pic_scaling_matrix_present_flag = 0; - PictureParameterSet->second_chroma_qp_index_offset = PictureParameterSet->chroma_qp_index_offset; - } - - ret = H264_STATUS_OK; - }while(0); - - //h264_Parse_rbsp_trailing_bits(pInfo); - return ret; + h264_Status ret = H264_PPS_ERROR; + + //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet; + uint32_t code=0, i = 0; + + do { + ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id + code = h264_GetVLCElement(parent, pInfo, false); + if (code > MAX_PIC_PARAMS) { + break; + } + PictureParameterSet->pic_parameter_set_id = (uint8_t)code; + + + code = h264_GetVLCElement(parent, pInfo, false); + if (code > MAX_NUM_SPS-1) { + break; + } + PictureParameterSet->seq_parameter_set_id = (uint8_t)code; + + ///// entropy_coding_mode_flag + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; + ///// pic_order_present_flag + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->pic_order_present_flag = (uint8_t)code; + + PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false); + + // + // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0 + // + if (PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS) + break; + + PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1; + PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + + //// PPS->num_ref_idx_l0_active --- [0,32] + if (((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES)) + { + break; + } + + //// weighting prediction + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->weighted_pred_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 2); + PictureParameterSet->weighted_bipred_idc = (uint8_t)code; + + //// QP + PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true); + PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true); + if (((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP)) + break; + PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); + + //// Deblocking ctl parameters + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->constrained_intra_pred_flag = (uint8_t)code; + + if ( viddec_pm_get_bits(parent, &code, 1) == -1) + break; + PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code; + + //// Check if have more RBSP Data for additional parameters + if (h264_More_RBSP_Data(parent, pInfo)) + { + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code; + + if ( viddec_pm_get_bits(parent, &code, 1) == -1) + break; + PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code; + + if (PictureParameterSet->pic_scaling_matrix_present_flag) + { + uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1); + for (i=0; ipic_scaling_list_present_flag[i] = (uint8_t)code; + + if (PictureParameterSet->pic_scaling_list_present_flag[i]) + { + if (i<6) + h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo); + else + h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); + } + } + } + + PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix + //if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12)) + // break; + } + else + { + PictureParameterSet->transform_8x8_mode_flag = 0; + PictureParameterSet->pic_scaling_matrix_present_flag = 0; + PictureParameterSet->second_chroma_qp_index_offset = PictureParameterSet->chroma_qp_index_offset; + } + + ret = H264_STATUS_OK; + } while (0); + + //h264_Parse_rbsp_trailing_bits(pInfo); + return ret; } ////////// EOF/////////////// diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c index b330f86..43655dd 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c @@ -14,941 +14,941 @@ ////////////////////////////////////////////////////////////////////////////// // avc_sei_stream_initialise () // -// +// void h264_sei_stream_initialise (h264_Info* pInfo) { - pInfo->sei_information.capture_POC = 0; - pInfo->sei_information.disp_frozen = 0; - pInfo->sei_information.release_POC = 0; - pInfo->sei_information.capture_fn = 0; - pInfo->sei_information.recovery_fn = 0xFFFFFFFF; - pInfo->sei_information.scan_format = 0; - pInfo->sei_information.broken_link_pic = 0; - return; + pInfo->sei_information.capture_POC = 0; + pInfo->sei_information.disp_frozen = 0; + pInfo->sei_information.release_POC = 0; + pInfo->sei_information.capture_fn = 0; + pInfo->sei_information.recovery_fn = 0xFFFFFFFF; + pInfo->sei_information.scan_format = 0; + pInfo->sei_information.broken_link_pic = 0; + return; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_buffering_period(void *parent,h264_Info* pInfo) { - h264_Status ret = H264_STATUS_SEI_ERROR; - - h264_SEI_buffering_period_t* sei_msg_ptr; - h264_SEI_buffering_period_t sei_buffering_period; - int32_t SchedSelIdx; - int num_bits = 0; - - sei_msg_ptr = (h264_SEI_buffering_period_t *)(&sei_buffering_period); - - do{ - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) - { - num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; - } - else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) - { - num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; - } - - sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false); - if(sei_msg_ptr->seq_param_set_id >= NUM_SPS) - break; - - //check if this id is same as the id of the current SPS //fix - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) - { - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) - break; - - for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; SchedSelIdx++) - { - viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_nal, num_bits); - viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_nal, num_bits); - } - } - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) - { - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) - break; - - for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; SchedSelIdx++) - { - viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_vcl, num_bits); - viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_vcl, num_bits); - } - } - - ret = H264_STATUS_OK; - } while (0); - - return H264_STATUS_OK; + h264_Status ret = H264_STATUS_SEI_ERROR; + + h264_SEI_buffering_period_t* sei_msg_ptr; + h264_SEI_buffering_period_t sei_buffering_period; + int32_t SchedSelIdx; + int num_bits = 0; + + sei_msg_ptr = (h264_SEI_buffering_period_t *)(&sei_buffering_period); + + do { + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; + } + else if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; + } + + sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->seq_param_set_id >= NUM_SPS) + break; + + //check if this id is same as the id of the current SPS //fix + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + break; + + for (SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; SchedSelIdx++) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_nal, num_bits); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_nal, num_bits); + } + } + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) + { + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + break; + + for (SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; SchedSelIdx++) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_vcl, num_bits); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_vcl, num_bits); + } + } + + ret = H264_STATUS_OK; + } while (0); + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo) { - int32_t CpbDpbDelaysPresentFlag = 0; - h264_SEI_pic_timing_t* sei_msg_ptr; - h264_SEI_pic_timing_t sei_pic_timing; - int32_t num_bits_cpb = 0, num_bits_dpb = 0, time_offset_length = 0; - uint32_t code; - uint32_t clock_timestamp_flag = 0; - uint32_t full_timestamp_flag = 0; - uint32_t seconds_flag = 0; - uint32_t minutes_flag = 0; - uint32_t hours_flag = 0; - uint32_t time_offset = 0; - - - - - sei_msg_ptr = (h264_SEI_pic_timing_t *)(&sei_pic_timing); - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag) - { - num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 +1; - num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 + 1; - time_offset_length = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_time_offset_length; - } - else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) - { - num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 +1; - num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 + 1; - } - - - CpbDpbDelaysPresentFlag = 1; // as per amphion code - if(CpbDpbDelaysPresentFlag) - { - viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->cpb_removal_delay, num_bits_cpb); - viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->dpb_output_delay, num_bits_dpb); - } - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag) - { - int32_t i = 0, NumClockTS = 0; - - viddec_workload_item_t wi; - - wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; - viddec_pm_get_bits(parent, &code , 4); - sei_msg_ptr->pic_struct = (uint8_t)code; - - - if((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) { - pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE; - } else { - pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED; - } - - wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING; - wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct; + int32_t CpbDpbDelaysPresentFlag = 0; + h264_SEI_pic_timing_t* sei_msg_ptr; + h264_SEI_pic_timing_t sei_pic_timing; + int32_t num_bits_cpb = 0, num_bits_dpb = 0, time_offset_length = 0; + uint32_t code; + uint32_t clock_timestamp_flag = 0; + uint32_t full_timestamp_flag = 0; + uint32_t seconds_flag = 0; + uint32_t minutes_flag = 0; + uint32_t hours_flag = 0; + uint32_t time_offset = 0; + + + + + sei_msg_ptr = (h264_SEI_pic_timing_t *)(&sei_pic_timing); + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag) + { + num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 +1; + num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 + 1; + time_offset_length = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_time_offset_length; + } + else if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 +1; + num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 + 1; + } + + + CpbDpbDelaysPresentFlag = 1; // as per amphion code + if (CpbDpbDelaysPresentFlag) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->cpb_removal_delay, num_bits_cpb); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->dpb_output_delay, num_bits_dpb); + } + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag) + { + int32_t i = 0, NumClockTS = 0; + + viddec_workload_item_t wi; + + wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; + viddec_pm_get_bits(parent, &code , 4); + sei_msg_ptr->pic_struct = (uint8_t)code; + + + if ((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) { + pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE; + } else { + pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED; + } + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING; + wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct; #ifndef VBP - //Push to current if we are in first frame, or we do not detect previous frame end - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - - if(sei_msg_ptr->pic_struct < 3) { - NumClockTS = 1; - } else if((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) { - NumClockTS = 2; - } else { - NumClockTS = 3; - } - - for(i = 0; i < NumClockTS; i++) - { - viddec_pm_get_bits(parent, &code , 1); - clock_timestamp_flag = code; - //sei_msg_ptr->clock_timestamp_flag[i] = (uint8_t)code; - - if(clock_timestamp_flag) - { - viddec_pm_get_bits(parent, &code , 2); - //sei_msg_ptr->ct_type[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - //sei_msg_ptr->nuit_field_based_flag[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 5); - //sei_msg_ptr->counting_type[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - //sei_msg_ptr->full_timestamp_flag[i] = (uint8_t)code; - full_timestamp_flag = code; - - viddec_pm_get_bits(parent, &code , 1); - //sei_msg_ptr->discontinuity_flag[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - //sei_msg_ptr->cnt_dropped_flag[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 8); - //sei_msg_ptr->n_frames[i] = (uint8_t)code; - - - if(full_timestamp_flag) - { - viddec_pm_get_bits(parent, &code , 6); - //sei_msg_ptr->seconds_value[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 6); - //sei_msg_ptr->minutes_value[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 5); - //sei_msg_ptr->hours_value[i] = (uint8_t)code; - } - else - { - viddec_pm_get_bits(parent, &code , 1); - //sei_msg_ptr->seconds_flag[i] = (uint8_t)code; - seconds_flag = code; - - if(seconds_flag) - { - viddec_pm_get_bits(parent, &code , 6); - //sei_msg_ptr->seconds_value[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - //sei_msg_ptr->minutes_flag[i] = (uint8_t)code; - minutes_flag = code; - - if(minutes_flag) - { - viddec_pm_get_bits(parent, &code , 6); - //sei_msg_ptr->minutes_value[i] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - //sei_msg_ptr->hours_flag[i] = (uint8_t)code; - hours_flag = code; - - if(hours_flag){ - viddec_pm_get_bits(parent, &code , 6); - //sei_msg_ptr->hours_value[i] = (uint8_t)code; - } - } - } - } - - if(time_offset_length > 0) - { - viddec_pm_get_bits(parent, (uint32_t *)&time_offset, time_offset_length); - } - } - } - } - - - return H264_STATUS_OK; + //Push to current if we are in first frame, or we do not detect previous frame end + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + + if (sei_msg_ptr->pic_struct < 3) { + NumClockTS = 1; + } else if ((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) { + NumClockTS = 2; + } else { + NumClockTS = 3; + } + + for (i = 0; i < NumClockTS; i++) + { + viddec_pm_get_bits(parent, &code , 1); + clock_timestamp_flag = code; + //sei_msg_ptr->clock_timestamp_flag[i] = (uint8_t)code; + + if (clock_timestamp_flag) + { + viddec_pm_get_bits(parent, &code , 2); + //sei_msg_ptr->ct_type[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->nuit_field_based_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 5); + //sei_msg_ptr->counting_type[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->full_timestamp_flag[i] = (uint8_t)code; + full_timestamp_flag = code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->discontinuity_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->cnt_dropped_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 8); + //sei_msg_ptr->n_frames[i] = (uint8_t)code; + + + if (full_timestamp_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->seconds_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->minutes_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 5); + //sei_msg_ptr->hours_value[i] = (uint8_t)code; + } + else + { + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->seconds_flag[i] = (uint8_t)code; + seconds_flag = code; + + if (seconds_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->seconds_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->minutes_flag[i] = (uint8_t)code; + minutes_flag = code; + + if (minutes_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->minutes_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->hours_flag[i] = (uint8_t)code; + hours_flag = code; + + if (hours_flag) { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->hours_value[i] = (uint8_t)code; + } + } + } + } + + if (time_offset_length > 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&time_offset, time_offset_length); + } + } + } + } + + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo) { - h264_SEI_pan_scan_rectangle_t* sei_msg_ptr; - h264_SEI_pan_scan_rectangle_t sei_pan_scan; - uint32_t code; - - viddec_workload_item_t wi; - - h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) ); - - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN; - - sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan); - - sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false); - - wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id; - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code; - viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag); - - if(!sei_msg_ptr->pan_scan_rect_cancel_flag) - { - int32_t i; - sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); - - viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1); - if(sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1) - { - return H264_STATUS_SEI_ERROR; - } - for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) - { - sei_msg_ptr->pan_scan_rect_left_offset[i] = h264_GetVLCElement(parent, pInfo, true); - sei_msg_ptr->pan_scan_rect_right_offset[i] = h264_GetVLCElement(parent, pInfo, true); - sei_msg_ptr->pan_scan_rect_top_offset[i] = h264_GetVLCElement(parent, pInfo, true); - sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true); - } - sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false); - wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period; - } -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - - if(!sei_msg_ptr->pan_scan_rect_cancel_flag) - { - int32_t i; - - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT; - - for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) - { - viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]); - viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]); - viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]); - viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); -#endif - } - } - - return H264_STATUS_OK; + h264_SEI_pan_scan_rectangle_t* sei_msg_ptr; + h264_SEI_pan_scan_rectangle_t sei_pan_scan; + uint32_t code; + + viddec_workload_item_t wi; + + h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) ); + + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN; + + sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan); + + sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false); + + wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code; + viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag); + + if (!sei_msg_ptr->pan_scan_rect_cancel_flag) + { + int32_t i; + sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1); + if (sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1) + { + return H264_STATUS_SEI_ERROR; + } + for (i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) + { + sei_msg_ptr->pan_scan_rect_left_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_right_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_top_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true); + } + sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false); + wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period; + } +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + + if (!sei_msg_ptr->pan_scan_rect_cancel_flag) + { + int32_t i; + + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT; + + for (i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) + { + viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]); + viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]); + viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]); + viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif + } + } + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_filler_payload(void *parent,h264_Info* pInfo, uint32_t payload_size) { - - h264_SEI_filler_payload_t* sei_msg_ptr; - h264_SEI_filler_payload_t sei_filler_payload; - uint32_t k; - uint32_t code; - - //remove warning - pInfo = pInfo; - - sei_msg_ptr = (h264_SEI_filler_payload_t *)(&sei_filler_payload); - for(k=0; k < payload_size; k++) - { - viddec_pm_get_bits(parent, &code , 8); - sei_msg_ptr->ff_byte = (uint8_t)code; - } - - return H264_STATUS_OK; + + h264_SEI_filler_payload_t* sei_msg_ptr; + h264_SEI_filler_payload_t sei_filler_payload; + uint32_t k; + uint32_t code; + + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_filler_payload_t *)(&sei_filler_payload); + for (k=0; k < payload_size; k++) + { + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->ff_byte = (uint8_t)code; + } + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payload_size) { - - h264_SEI_userdata_registered_t* sei_msg_ptr; - h264_SEI_userdata_registered_t sei_userdata_registered; - uint32_t i; - int32_t byte = 0; - uint32_t code = 0; - viddec_workload_item_t wi; - - wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED; + + h264_SEI_userdata_registered_t* sei_msg_ptr; + h264_SEI_userdata_registered_t sei_userdata_registered; + uint32_t i; + int32_t byte = 0; + uint32_t code = 0; + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED; wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; - //remove warning - pInfo = pInfo; - - sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered); - - viddec_pm_get_bits(parent, &code , 8); - sei_msg_ptr->itu_t_t35_country_code = (uint8_t)code; - - if(sei_msg_ptr->itu_t_t35_country_code != 0xff) { - i = 1; - } else { - viddec_pm_get_bits(parent, &code , 8); - sei_msg_ptr->itu_t_t35_country_code_extension_byte = (uint8_t)code; - i = 2; - } - - - wi.user_data.size =0; - do - { - - viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); - - wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; - wi.user_data.size++; - - if(11 == wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - wi.user_data.size =0; - } - - i++; - }while(i < payload_size); - - if(0!=wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); -#endif - } - - return H264_STATUS_OK; + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered); + + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->itu_t_t35_country_code = (uint8_t)code; + + if (sei_msg_ptr->itu_t_t35_country_code != 0xff) { + i = 1; + } else { + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->itu_t_t35_country_code_extension_byte = (uint8_t)code; + i = 2; + } + + + wi.user_data.size =0; + do + { + + viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); + + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + wi.user_data.size++; + + if (11 == wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + wi.user_data.size =0; + } + + i++; + } while (i < payload_size); + + if (0!=wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif + } + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t payload_size) { - - h264_SEI_userdata_unregistered_t* sei_msg_ptr; - h264_SEI_userdata_unregistered_t sei_userdata_unregistered; - uint32_t i; - int32_t byte = 0; - uint32_t code; - viddec_workload_item_t wi; + h264_SEI_userdata_unregistered_t* sei_msg_ptr; + h264_SEI_userdata_unregistered_t sei_userdata_unregistered; + uint32_t i; + int32_t byte = 0; + uint32_t code; + + viddec_workload_item_t wi; wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED; //remove warning - pInfo = pInfo; - - sei_msg_ptr = (h264_SEI_userdata_unregistered_t *)(&sei_userdata_unregistered); - - for (i = 0; i < 4; i++) - { - viddec_pm_get_bits(parent, &code , 32); - sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code; - } - - wi.user_data.size =0; - for(i = 16; i < payload_size; i++) - { - - viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); - - wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; - wi.user_data.size++; - - if(11 == wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - wi.user_data.size =0; - } - } - - if(0!=wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); -#endif - } - - return H264_STATUS_OK; + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_userdata_unregistered_t *)(&sei_userdata_unregistered); + + for (i = 0; i < 4; i++) + { + viddec_pm_get_bits(parent, &code , 32); + sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code; + } + + wi.user_data.size =0; + for (i = 16; i < payload_size; i++) + { + + viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); + + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + wi.user_data.size++; + + if (11 == wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + wi.user_data.size =0; + } + } + + if (0!=wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif + } + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) { - - h264_SEI_recovery_point_t* sei_msg_ptr; - h264_SEI_recovery_point_t sei_recovery_point; - uint32_t code; - viddec_workload_item_t wi; - - - sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point); - - sei_msg_ptr->recovery_frame_cnt = h264_GetVLCElement(parent, pInfo, false); - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->exact_match_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->broken_link_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 2); - sei_msg_ptr->changing_slice_group_idc = (uint8_t)code; - - pInfo->sei_information.recovery_point = 1; - pInfo->sei_information.recovery_frame_cnt = (int32_t) sei_msg_ptr->recovery_frame_cnt; - pInfo->sei_information.capture_fn = 1; - pInfo->sei_information.broken_link_pic = sei_msg_ptr->broken_link_flag; - - if(pInfo->got_start) { - pInfo->img.recovery_point_found |= 2; - - //// Enable the RP recovery if no IDR ---Cisco - if((pInfo->img.recovery_point_found & 1)==0) - pInfo->sei_rp_received = 1; - } - - // - /// Append workload for SEI - // - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT; - wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt; - viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag); - viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag); - wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc; -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - - return H264_STATUS_OK; + + h264_SEI_recovery_point_t* sei_msg_ptr; + h264_SEI_recovery_point_t sei_recovery_point; + uint32_t code; + viddec_workload_item_t wi; + + + sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point); + + sei_msg_ptr->recovery_frame_cnt = h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->exact_match_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->broken_link_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 2); + sei_msg_ptr->changing_slice_group_idc = (uint8_t)code; + + pInfo->sei_information.recovery_point = 1; + pInfo->sei_information.recovery_frame_cnt = (int32_t) sei_msg_ptr->recovery_frame_cnt; + pInfo->sei_information.capture_fn = 1; + pInfo->sei_information.broken_link_pic = sei_msg_ptr->broken_link_flag; + + if (pInfo->got_start) { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if ((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + + // + /// Append workload for SEI + // + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT; + wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt; + viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag); + viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag); + wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc; +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_dec_ref_pic_marking_rep(void *parent,h264_Info* pInfo) { - - h264_SEI_decoded_ref_pic_marking_repetition_t* sei_msg_ptr; - h264_SEI_decoded_ref_pic_marking_repetition_t sei_ref_pic; - uint32_t code; - - sei_msg_ptr = (h264_SEI_decoded_ref_pic_marking_repetition_t *)(&sei_ref_pic); - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->original_idr_flag = (uint8_t)code; - - sei_msg_ptr->original_frame_num = h264_GetVLCElement(parent, pInfo, false); - - if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) - { - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->orignal_field_pic_flag = (uint8_t)code; - - if(sei_msg_ptr->orignal_field_pic_flag) - { - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->original_bottom_field_pic_flag = (uint8_t)code; - } - } - h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, &pInfo->SliceHeader); - return H264_STATUS_OK; + + h264_SEI_decoded_ref_pic_marking_repetition_t* sei_msg_ptr; + h264_SEI_decoded_ref_pic_marking_repetition_t sei_ref_pic; + uint32_t code; + + sei_msg_ptr = (h264_SEI_decoded_ref_pic_marking_repetition_t *)(&sei_ref_pic); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->original_idr_flag = (uint8_t)code; + + sei_msg_ptr->original_frame_num = h264_GetVLCElement(parent, pInfo, false); + + if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->orignal_field_pic_flag = (uint8_t)code; + + if (sei_msg_ptr->orignal_field_pic_flag) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->original_bottom_field_pic_flag = (uint8_t)code; + } + } + h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, &pInfo->SliceHeader); + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_spare_pic(void *parent,h264_Info* pInfo) { - - //h264_SEI_spare_picture_t* sei_msg_ptr; - //remove warning - pInfo = pInfo; - parent = parent; - - //sei_msg_ptr = (h264_SEI_spare_picture_t *)(&user_data->user_data[0]); + //h264_SEI_spare_picture_t* sei_msg_ptr; + + //remove warning + pInfo = pInfo; + parent = parent; - //OS_INFO("Not supported SEI\n"); - return H264_STATUS_OK; + //sei_msg_ptr = (h264_SEI_spare_picture_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_scene_info(void *parent,h264_Info* pInfo) { - - h264_SEI_scene_info_t* sei_msg_ptr; - h264_SEI_scene_info_t sei_scene_info; - uint32_t code; - - sei_msg_ptr = (h264_SEI_scene_info_t*)(&sei_scene_info); - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->scene_info_present_flag = (uint8_t)code; - - if(sei_msg_ptr->scene_info_present_flag) - { - sei_msg_ptr->scene_id = h264_GetVLCElement(parent, pInfo, false); - sei_msg_ptr->scene_transitioning_type= h264_GetVLCElement(parent, pInfo, false); - if(sei_msg_ptr->scene_transitioning_type > 3) - { - sei_msg_ptr->second_scene_id = h264_GetVLCElement(parent, pInfo, false); - } - } - - return H264_STATUS_OK; + + h264_SEI_scene_info_t* sei_msg_ptr; + h264_SEI_scene_info_t sei_scene_info; + uint32_t code; + + sei_msg_ptr = (h264_SEI_scene_info_t*)(&sei_scene_info); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->scene_info_present_flag = (uint8_t)code; + + if (sei_msg_ptr->scene_info_present_flag) + { + sei_msg_ptr->scene_id = h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->scene_transitioning_type= h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->scene_transitioning_type > 3) + { + sei_msg_ptr->second_scene_id = h264_GetVLCElement(parent, pInfo, false); + } + } + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_sub_seq_info(void *parent,h264_Info* pInfo) { - - h264_SEI_sub_sequence_info_t* sei_msg_ptr; - h264_SEI_sub_sequence_info_t sei_sub_sequence_info; - uint32_t code; - - sei_msg_ptr = (h264_SEI_sub_sequence_info_t *)(&sei_sub_sequence_info); - - sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo,false); - sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo,false); - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->first_ref_pic_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->leading_non_ref_pic_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->last_pic_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->sub_seq_frame_num_flag = (uint8_t)code; - - - if(sei_msg_ptr->sub_seq_frame_num_flag) - { - sei_msg_ptr->sub_seq_frame_num = h264_GetVLCElement(parent, pInfo,false); - } - return H264_STATUS_OK; + + h264_SEI_sub_sequence_info_t* sei_msg_ptr; + h264_SEI_sub_sequence_info_t sei_sub_sequence_info; + uint32_t code; + + sei_msg_ptr = (h264_SEI_sub_sequence_info_t *)(&sei_sub_sequence_info); + + sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo,false); + sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo,false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->first_ref_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->leading_non_ref_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->last_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->sub_seq_frame_num_flag = (uint8_t)code; + + + if (sei_msg_ptr->sub_seq_frame_num_flag) + { + sei_msg_ptr->sub_seq_frame_num = h264_GetVLCElement(parent, pInfo,false); + } + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_sub_seq_layer(void *parent,h264_Info* pInfo) { - - h264_SEI_sub_sequence_layer_t* sei_msg_ptr; - h264_SEI_sub_sequence_layer_t sei_sub_sequence_layer; - int32_t layer; - uint32_t code; - - sei_msg_ptr = (h264_SEI_sub_sequence_layer_t *)(&sei_sub_sequence_layer); - sei_msg_ptr->num_sub_seq_layers_minus1 = h264_GetVLCElement(parent, pInfo,false); - - if(sei_msg_ptr->num_sub_seq_layers_minus1 >= MAX_SUB_SEQ_LAYERS) - { - return H264_STATUS_SEI_ERROR; - } - - for(layer = 0;layer <= sei_msg_ptr->num_sub_seq_layers_minus1; layer++) - { - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->accurate_statistics_flag[layer] = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 16); - sei_msg_ptr->average_bit_rate[layer] = (uint16_t)code; - - viddec_pm_get_bits(parent, &code , 16); - sei_msg_ptr->average_frame_rate[layer] = (uint16_t)code; - - } - - return H264_STATUS_OK; + + h264_SEI_sub_sequence_layer_t* sei_msg_ptr; + h264_SEI_sub_sequence_layer_t sei_sub_sequence_layer; + int32_t layer; + uint32_t code; + + sei_msg_ptr = (h264_SEI_sub_sequence_layer_t *)(&sei_sub_sequence_layer); + sei_msg_ptr->num_sub_seq_layers_minus1 = h264_GetVLCElement(parent, pInfo,false); + + if (sei_msg_ptr->num_sub_seq_layers_minus1 >= MAX_SUB_SEQ_LAYERS) + { + return H264_STATUS_SEI_ERROR; + } + + for (layer = 0; layer <= sei_msg_ptr->num_sub_seq_layers_minus1; layer++) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->accurate_statistics_flag[layer] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_bit_rate[layer] = (uint16_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_frame_rate[layer] = (uint16_t)code; + + } + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_sub_seq(void *parent,h264_Info* pInfo) { - int32_t n; - uint32_t code; - - h264_SEI_sub_sequence_t* sei_msg_ptr; - h264_SEI_sub_sequence_t sei_sub_sequence; - - sei_msg_ptr = (h264_SEI_sub_sequence_t *)(&sei_sub_sequence); - - sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo, false); - sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo, false); - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->duration_flag = (uint8_t)code; - - if(sei_msg_ptr->duration_flag) - { - viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->sub_seq_duration, 32); - } - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->average_rate_flag = (uint8_t)code; - - if(sei_msg_ptr->average_rate_flag) - { - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->average_statistics_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 16); - sei_msg_ptr->average_bit_rate = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 16); - sei_msg_ptr->average_frame_rate = (uint8_t)code; - - } - sei_msg_ptr->num_referenced_subseqs = h264_GetVLCElement(parent, pInfo, false); - if(sei_msg_ptr->num_referenced_subseqs >= MAX_NUM_REF_SUBSEQS) - { - return H264_STATUS_SEI_ERROR; - } - - for(n = 0; n < sei_msg_ptr->num_referenced_subseqs; n++) - { - sei_msg_ptr->ref_sub_seq_layer_num= h264_GetVLCElement(parent, pInfo, false); - sei_msg_ptr->ref_sub_seq_id= h264_GetVLCElement(parent, pInfo, false); - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->ref_sub_seq_direction = (uint8_t)code; - } - return H264_STATUS_OK; + int32_t n; + uint32_t code; + + h264_SEI_sub_sequence_t* sei_msg_ptr; + h264_SEI_sub_sequence_t sei_sub_sequence; + + sei_msg_ptr = (h264_SEI_sub_sequence_t *)(&sei_sub_sequence); + + sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->duration_flag = (uint8_t)code; + + if (sei_msg_ptr->duration_flag) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->sub_seq_duration, 32); + } + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->average_rate_flag = (uint8_t)code; + + if (sei_msg_ptr->average_rate_flag) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->average_statistics_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_bit_rate = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_frame_rate = (uint8_t)code; + + } + sei_msg_ptr->num_referenced_subseqs = h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->num_referenced_subseqs >= MAX_NUM_REF_SUBSEQS) + { + return H264_STATUS_SEI_ERROR; + } + + for (n = 0; n < sei_msg_ptr->num_referenced_subseqs; n++) + { + sei_msg_ptr->ref_sub_seq_layer_num= h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->ref_sub_seq_id= h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->ref_sub_seq_direction = (uint8_t)code; + } + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_full_frame_freeze(void *parent,h264_Info* pInfo) { - - h264_SEI_full_frame_freeze_t* sei_msg_ptr; - h264_SEI_full_frame_freeze_t sei_full_frame_freeze; - - sei_msg_ptr = (h264_SEI_full_frame_freeze_t *)(&sei_full_frame_freeze); - - sei_msg_ptr->full_frame_freeze_repetition_period= h264_GetVLCElement(parent, pInfo, false); - - pInfo->sei_information.capture_POC = 1; - pInfo->sei_information.freeze_rep_period = sei_msg_ptr->full_frame_freeze_repetition_period; - //pInfo->img.sei_freeze_this_image = 1; - - return H264_STATUS_OK; + + h264_SEI_full_frame_freeze_t* sei_msg_ptr; + h264_SEI_full_frame_freeze_t sei_full_frame_freeze; + + sei_msg_ptr = (h264_SEI_full_frame_freeze_t *)(&sei_full_frame_freeze); + + sei_msg_ptr->full_frame_freeze_repetition_period= h264_GetVLCElement(parent, pInfo, false); + + pInfo->sei_information.capture_POC = 1; + pInfo->sei_information.freeze_rep_period = sei_msg_ptr->full_frame_freeze_repetition_period; + //pInfo->img.sei_freeze_this_image = 1; + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_full_frame_freeze_release(void *parent,h264_Info* pInfo) { - //remove warning - parent = parent; - pInfo = pInfo; + //remove warning + parent = parent; + pInfo = pInfo; - - return H264_STATUS_OK; + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_full_frame_snapshot(void *parent,h264_Info* pInfo) { - - h264_SEI_full_frame_snapshot_t* sei_msg_ptr; - h264_SEI_full_frame_snapshot_t sei_full_frame_snapshot; - - sei_msg_ptr = (h264_SEI_full_frame_snapshot_t *)(&sei_full_frame_snapshot); - - sei_msg_ptr->snapshot_id = h264_GetVLCElement(parent, pInfo, false); - return H264_STATUS_OK; + + h264_SEI_full_frame_snapshot_t* sei_msg_ptr; + h264_SEI_full_frame_snapshot_t sei_full_frame_snapshot; + + sei_msg_ptr = (h264_SEI_full_frame_snapshot_t *)(&sei_full_frame_snapshot); + + sei_msg_ptr->snapshot_id = h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_progressive_segement_start(void *parent,h264_Info* pInfo) { - - h264_SEI_progressive_segment_start_t* sei_msg_ptr; - h264_SEI_progressive_segment_start_t sei_progressive_segment_start; - - sei_msg_ptr = (h264_SEI_progressive_segment_start_t *)(&sei_progressive_segment_start); - - sei_msg_ptr->progressive_refinement_id= h264_GetVLCElement(parent, pInfo, false); - sei_msg_ptr->num_refinement_steps_minus1= h264_GetVLCElement(parent, pInfo, false); - return H264_STATUS_OK; + + h264_SEI_progressive_segment_start_t* sei_msg_ptr; + h264_SEI_progressive_segment_start_t sei_progressive_segment_start; + + sei_msg_ptr = (h264_SEI_progressive_segment_start_t *)(&sei_progressive_segment_start); + + sei_msg_ptr->progressive_refinement_id= h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->num_refinement_steps_minus1= h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_progressive_segment_end(void *parent,h264_Info* pInfo) { - - h264_SEI_progressive_segment_end_t* sei_msg_ptr; - h264_SEI_progressive_segment_end_t sei_progressive_segment_end; - - sei_msg_ptr = (h264_SEI_progressive_segment_end_t *)(&sei_progressive_segment_end); - - sei_msg_ptr->progressive_refinement_id = h264_GetVLCElement(parent, pInfo, false); - return H264_STATUS_OK; + + h264_SEI_progressive_segment_end_t* sei_msg_ptr; + h264_SEI_progressive_segment_end_t sei_progressive_segment_end; + + sei_msg_ptr = (h264_SEI_progressive_segment_end_t *)(&sei_progressive_segment_end); + + sei_msg_ptr->progressive_refinement_id = h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_motion_constrained_slice_grp_set(void *parent, h264_Info* pInfo) { - int32_t i; - uint32_t code; - h264_SEI_motion_constrained_slice_group_t* sei_msg_ptr; - h264_SEI_motion_constrained_slice_group_t sei_motion_constrained_slice_group; - - sei_msg_ptr = (h264_SEI_motion_constrained_slice_group_t *)(&sei_motion_constrained_slice_group); - - sei_msg_ptr->num_slice_groups_in_set_minus1= h264_GetVLCElement(parent, pInfo, false); - if(sei_msg_ptr->num_slice_groups_in_set_minus1 >= MAX_NUM_SLICE_GRPS) - { - return H264_STATUS_SEI_ERROR; - } - - for(i=0; i<= sei_msg_ptr->num_slice_groups_in_set_minus1; i++) - { - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->slice_group_id[i] = (uint8_t)code; - } - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->exact_sample_value_match_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code , 1); - sei_msg_ptr->pan_scan_rect_flag = (uint8_t)code; - - - if(sei_msg_ptr->pan_scan_rect_flag) - { - sei_msg_ptr->pan_scan_rect_id= h264_GetVLCElement(parent, pInfo, false); - } - return H264_STATUS_OK; + int32_t i; + uint32_t code; + h264_SEI_motion_constrained_slice_group_t* sei_msg_ptr; + h264_SEI_motion_constrained_slice_group_t sei_motion_constrained_slice_group; + + sei_msg_ptr = (h264_SEI_motion_constrained_slice_group_t *)(&sei_motion_constrained_slice_group); + + sei_msg_ptr->num_slice_groups_in_set_minus1= h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->num_slice_groups_in_set_minus1 >= MAX_NUM_SLICE_GRPS) + { + return H264_STATUS_SEI_ERROR; + } + + for (i=0; i<= sei_msg_ptr->num_slice_groups_in_set_minus1; i++) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->slice_group_id[i] = (uint8_t)code; + } + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->exact_sample_value_match_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->pan_scan_rect_flag = (uint8_t)code; + + + if (sei_msg_ptr->pan_scan_rect_flag) + { + sei_msg_ptr->pan_scan_rect_id= h264_GetVLCElement(parent, pInfo, false); + } + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_film_grain_characteristics(void *parent,h264_Info* pInfo) { - //OS_INFO("Not supported SEI\n"); + //OS_INFO("Not supported SEI\n"); - //remove warning - parent = parent; - pInfo = pInfo; + //remove warning + parent = parent; + pInfo = pInfo; - - return H264_STATUS_OK; + + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_deblocking_filter_display_preferences(void *parent,h264_Info* pInfo) { - - //h264_SEI_deblocking_filter_display_pref_t* sei_msg_ptr; - //remove warning - parent = parent; - pInfo = pInfo; - - //sei_msg_ptr = (h264_SEI_deblocking_filter_display_pref_t *)(&user_data->user_data[0]); + //h264_SEI_deblocking_filter_display_pref_t* sei_msg_ptr; + + //remove warning + parent = parent; + pInfo = pInfo; + + //sei_msg_ptr = (h264_SEI_deblocking_filter_display_pref_t *)(&user_data->user_data[0]); - //OS_INFO("Not supported SEI\n"); - return H264_STATUS_OK; + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ h264_Status h264_sei_stereo_video_info(void *parent,h264_Info* pInfo) { - - //h264_SEI_stereo_video_info_t* sei_msg_ptr; - - //remove warning - parent = parent; - pInfo = pInfo; - - - //sei_msg_ptr = (h264_SEI_stereo_video_info_t *)(&user_data->user_data[0]); - - //OS_INFO("Not supported SEI\n"); - return H264_STATUS_OK; + + //h264_SEI_stereo_video_info_t* sei_msg_ptr; + + //remove warning + parent = parent; + pInfo = pInfo; + + + //sei_msg_ptr = (h264_SEI_stereo_video_info_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ uint32_t h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size) { - int32_t k, byte_index, user_data_byte_index; - uint32_t i; - int32_t word, bits; - uint32_t user_data; - //h264_SEI_reserved_t* sei_msg_ptr; - //h264_SEI_reserved_t sei_reserved; - - //remove warning - pInfo = pInfo; - - //sei_msg_ptr = (h264_SEI_reserved_t *)(&sei_reserved); - - byte_index = 0; - word = 0; - user_data_byte_index = 0x0; - - for(i = 0, k = 0; i < payload_size; i++) - { - if(byte_index == 0) word = 0; - viddec_pm_get_bits(parent, (uint32_t *)&bits, 8); - - switch (byte_index) - { - case 1: - word = (bits << 8) | word; - break; - case 2: - word = (bits << 16) | word; - break; - case 3: - word = (bits << 24) | word; - break; - default : - word = bits; - break; - } - - if(byte_index == 3) - { - byte_index = 0; - user_data = word; - k++; - } - else - { - byte_index++; - } - - user_data_byte_index++; - if ( user_data_byte_index == MAX_USER_DATA_SIZE) - { - //user_data->user_data_size = user_data_byte_index; - //sei_msg_ptr = (h264_SEI_reserved_t *)(&user_data->user_data[0]); - byte_index = 0; - word = 0; - user_data_byte_index = 0x0; - } - } - - if(byte_index) - user_data = word; - - //user_data->user_data_size = user_data_byte_index; - - return user_data_byte_index; - + int32_t k, byte_index, user_data_byte_index; + uint32_t i; + int32_t word, bits; + uint32_t user_data; + //h264_SEI_reserved_t* sei_msg_ptr; + //h264_SEI_reserved_t sei_reserved; + + //remove warning + pInfo = pInfo; + + //sei_msg_ptr = (h264_SEI_reserved_t *)(&sei_reserved); + + byte_index = 0; + word = 0; + user_data_byte_index = 0x0; + + for (i = 0, k = 0; i < payload_size; i++) + { + if (byte_index == 0) word = 0; + viddec_pm_get_bits(parent, (uint32_t *)&bits, 8); + + switch (byte_index) + { + case 1: + word = (bits << 8) | word; + break; + case 2: + word = (bits << 16) | word; + break; + case 3: + word = (bits << 24) | word; + break; + default : + word = bits; + break; + } + + if (byte_index == 3) + { + byte_index = 0; + user_data = word; + k++; + } + else + { + byte_index++; + } + + user_data_byte_index++; + if ( user_data_byte_index == MAX_USER_DATA_SIZE) + { + //user_data->user_data_size = user_data_byte_index; + //sei_msg_ptr = (h264_SEI_reserved_t *)(&user_data->user_data[0]); + byte_index = 0; + word = 0; + user_data_byte_index = 0x0; + } + } + + if (byte_index) + user_data = word; + + //user_data->user_data_size = user_data_byte_index; + + return user_data_byte_index; + // return H264_STATUS_OK; } @@ -958,97 +958,97 @@ uint32_t h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t /* ------------------------------------------------------------------------------------------ */ h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize) { - //int32_t bit_equal_to_zero; - h264_Status status = H264_STATUS_OK; - - //removing warning - payloadSize = payloadSize; - - switch(payloadType) - { - case SEI_BUF_PERIOD: - status = h264_sei_buffering_period(parent, pInfo); - break; - case SEI_PIC_TIMING: - status = h264_sei_pic_timing(parent, pInfo); - break; - case SEI_PAN_SCAN: - status = h264_sei_pan_scan(parent, pInfo); - break; - case SEI_FILLER_PAYLOAD: - status = h264_sei_filler_payload(parent, pInfo, payloadSize); - break; - case SEI_REG_USERDATA: - status = h264_sei_userdata_reg(parent, pInfo, payloadSize); - break; - case SEI_UNREG_USERDATA: - status = h264_sei_userdata_unreg(parent, pInfo, payloadSize); - break; - case SEI_RECOVERY_POINT: - h264_sei_recovery_point(parent, pInfo); - break; - case SEI_DEC_REF_PIC_MARKING_REP: - status = h264_sei_dec_ref_pic_marking_rep(parent, pInfo); - break; - case SEI_SPARE_PIC: - status = h264_sei_spare_pic(parent, pInfo); - break; - case SEI_SCENE_INFO: - status = h264_sei_scene_info(parent, pInfo); - break; - case SEI_SUB_SEQ_INFO: - status = h264_sei_sub_seq_info(parent, pInfo); - break; - case SEI_SUB_SEQ_LAYER: - status = h264_sei_sub_seq_layer(parent, pInfo); - break; - case SEI_SUB_SEQ: - status = h264_sei_sub_seq(parent, pInfo); - break; - case SEI_FULL_FRAME_FREEZE: - status = h264_sei_full_frame_freeze(parent, pInfo); - break; - case SEI_FULL_FRAME_FREEZE_RELEASE: - h264_sei_full_frame_freeze_release(parent, pInfo); - break; - case SEI_FULL_FRAME_SNAPSHOT: - status = h264_sei_full_frame_snapshot(parent, pInfo); - break; - case SEI_PROGRESSIVE_SEGMENT_START: - status = h264_sei_progressive_segement_start(parent, pInfo); - break; - case SEI_PROGRESSIVE_SEGMENT_END: - status = h264_sei_progressive_segment_end(parent, pInfo); - break; - case SEI_MOTION_CONSTRAINED_SLICE_GRP_SET: - status = h264_sei_motion_constrained_slice_grp_set(parent, pInfo); - break; - case SEI_FILM_GRAIN_CHARACTERISTICS: - status = h264_sei_film_grain_characteristics(parent, pInfo); - break; - case SEI_DEBLK_FILTER_DISPLAY_PREFERENCE: - status = h264_sei_deblocking_filter_display_preferences(parent, pInfo); - break; - case SEI_STEREO_VIDEO_INFO: - status = h264_sei_stereo_video_info(parent, pInfo); - break; - default: - status = (h264_Status)h264_sei_reserved_sei_message(parent, pInfo, payloadSize); - break; - } - -/* - viddec_pm_get_bits(parent, (uint32_t *)&tmp, 1); - - if(tmp == 0x1) // if byte is not aligned - { - while(pInfo->bitoff != 0) - { - viddec_pm_get_bits(parent, (uint32_t *)&bit_equal_to_zero, 1); - } - } -*/ - return status; + //int32_t bit_equal_to_zero; + h264_Status status = H264_STATUS_OK; + + //removing warning + payloadSize = payloadSize; + + switch (payloadType) + { + case SEI_BUF_PERIOD: + status = h264_sei_buffering_period(parent, pInfo); + break; + case SEI_PIC_TIMING: + status = h264_sei_pic_timing(parent, pInfo); + break; + case SEI_PAN_SCAN: + status = h264_sei_pan_scan(parent, pInfo); + break; + case SEI_FILLER_PAYLOAD: + status = h264_sei_filler_payload(parent, pInfo, payloadSize); + break; + case SEI_REG_USERDATA: + status = h264_sei_userdata_reg(parent, pInfo, payloadSize); + break; + case SEI_UNREG_USERDATA: + status = h264_sei_userdata_unreg(parent, pInfo, payloadSize); + break; + case SEI_RECOVERY_POINT: + h264_sei_recovery_point(parent, pInfo); + break; + case SEI_DEC_REF_PIC_MARKING_REP: + status = h264_sei_dec_ref_pic_marking_rep(parent, pInfo); + break; + case SEI_SPARE_PIC: + status = h264_sei_spare_pic(parent, pInfo); + break; + case SEI_SCENE_INFO: + status = h264_sei_scene_info(parent, pInfo); + break; + case SEI_SUB_SEQ_INFO: + status = h264_sei_sub_seq_info(parent, pInfo); + break; + case SEI_SUB_SEQ_LAYER: + status = h264_sei_sub_seq_layer(parent, pInfo); + break; + case SEI_SUB_SEQ: + status = h264_sei_sub_seq(parent, pInfo); + break; + case SEI_FULL_FRAME_FREEZE: + status = h264_sei_full_frame_freeze(parent, pInfo); + break; + case SEI_FULL_FRAME_FREEZE_RELEASE: + h264_sei_full_frame_freeze_release(parent, pInfo); + break; + case SEI_FULL_FRAME_SNAPSHOT: + status = h264_sei_full_frame_snapshot(parent, pInfo); + break; + case SEI_PROGRESSIVE_SEGMENT_START: + status = h264_sei_progressive_segement_start(parent, pInfo); + break; + case SEI_PROGRESSIVE_SEGMENT_END: + status = h264_sei_progressive_segment_end(parent, pInfo); + break; + case SEI_MOTION_CONSTRAINED_SLICE_GRP_SET: + status = h264_sei_motion_constrained_slice_grp_set(parent, pInfo); + break; + case SEI_FILM_GRAIN_CHARACTERISTICS: + status = h264_sei_film_grain_characteristics(parent, pInfo); + break; + case SEI_DEBLK_FILTER_DISPLAY_PREFERENCE: + status = h264_sei_deblocking_filter_display_preferences(parent, pInfo); + break; + case SEI_STEREO_VIDEO_INFO: + status = h264_sei_stereo_video_info(parent, pInfo); + break; + default: + status = (h264_Status)h264_sei_reserved_sei_message(parent, pInfo, payloadSize); + break; + } + + /* + viddec_pm_get_bits(parent, (uint32_t *)&tmp, 1); + + if(tmp == 0x1) // if byte is not aligned + { + while(pInfo->bitoff != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&bit_equal_to_zero, 1); + } + } + */ + return status; } /* ------------------------------------------------------------------------------------------ */ @@ -1056,78 +1056,78 @@ h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtyp /* ------------------------------------------------------------------------------------------ */ h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent, h264_Info* pInfo) { - h264_Status status = H264_STATUS_OK; - int32_t payload_type, payload_size; - uint32_t next_8_bits = 0,bits_offset=0,byte_offset = 0; - uint8_t is_emul = 0; - int32_t bits_operation_result = 0; - - do { - //// payload_type - payload_type = 0; - viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); - while (next_8_bits == 0xFF) - { - bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); - if(-1 == bits_operation_result) - { - status = H264_STATUS_SEI_ERROR; - return status; - } - payload_type += 255; - - } - //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); - payload_type += next_8_bits; - - //// payload_size - payload_size = 0; - viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); - while (next_8_bits == 0xFF) - { - payload_size += 255; - bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); - if(-1 == bits_operation_result) - { - status = H264_STATUS_SEI_ERROR; - return status; - } - } - //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); - payload_size += next_8_bits; - - //PRINTF(MFD_NONE, " SEI: payload type = %d, payload size = %d \n", payload_type, payload_size); - - - ///////////////////////////////// - // Parse SEI payloads - ///////////////////////////////// - status = h264_SEI_payload(parent, pInfo, (h264_sei_payloadtype)payload_type, payload_size); - if(status != H264_STATUS_OK) - break; - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - // OS_INFO("SEI byte_offset 3= %d, bits_offset=%d\n", byte_offset, bits_offset); - - if(bits_offset!=0) - { - viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8-bits_offset); - } - - bits_operation_result = viddec_pm_peek_bits(parent, (uint32_t *)&next_8_bits, 8); - if(-1 == bits_operation_result) - { - status = H264_STATUS_SEI_ERROR; - return status; - } - - // OS_INFO("next_8_bits = %08x\n", next_8_bits); - - }while(next_8_bits != 0x80); - - //} while (h264_More_RBSP_Data(parent, pInfo) && status == H264_STATUS_OK); - - return status; + h264_Status status = H264_STATUS_OK; + int32_t payload_type, payload_size; + uint32_t next_8_bits = 0,bits_offset=0,byte_offset = 0; + uint8_t is_emul = 0; + int32_t bits_operation_result = 0; + + do { + //// payload_type + payload_type = 0; + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + while (next_8_bits == 0xFF) + { + bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + if (-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + payload_type += 255; + + } + //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + payload_type += next_8_bits; + + //// payload_size + payload_size = 0; + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + while (next_8_bits == 0xFF) + { + payload_size += 255; + bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + if (-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + } + //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + payload_size += next_8_bits; + + //PRINTF(MFD_NONE, " SEI: payload type = %d, payload size = %d \n", payload_type, payload_size); + + + ///////////////////////////////// + // Parse SEI payloads + ///////////////////////////////// + status = h264_SEI_payload(parent, pInfo, (h264_sei_payloadtype)payload_type, payload_size); + if (status != H264_STATUS_OK) + break; + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + // OS_INFO("SEI byte_offset 3= %d, bits_offset=%d\n", byte_offset, bits_offset); + + if (bits_offset!=0) + { + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8-bits_offset); + } + + bits_operation_result = viddec_pm_peek_bits(parent, (uint32_t *)&next_8_bits, 8); + if (-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + + // OS_INFO("next_8_bits = %08x\n", next_8_bits); + + } while (next_8_bits != 0x80); + + //} while (h264_More_RBSP_Data(parent, pInfo) && status == H264_STATUS_OK); + + return status; } #endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c index 29340ac..de34811 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c @@ -13,37 +13,37 @@ extern int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, /*-----------------------------------------------------------------------------------------*/ h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) { - h264_Status ret = H264_STATUS_ERROR; - - //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; - int32_t slice_type =0; - uint32_t data =0; - - do { - ///// first_mb_in_slice - SliceHeader->first_mb_in_slice = h264_GetVLCElement(parent, pInfo, false); - - ///// slice_type - slice_type = h264_GetVLCElement(parent, pInfo, false); - SliceHeader->slice_type = (slice_type%5); - - if(SliceHeader->slice_type > h264_PtypeI) { - ret = H264_STATUS_NOTSUPPORT; - break; - } - - - ////// pic_parameter_id - data = h264_GetVLCElement(parent, pInfo, false); - if(data > MAX_PIC_PARAMS) { - ret = H264_PPS_INVALID_PIC_ID; - break; - } - SliceHeader->pic_parameter_id = (uint8_t)data; - ret = H264_STATUS_OK; - }while(0); - - return ret; + h264_Status ret = H264_STATUS_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_type =0; + uint32_t data =0; + + do { + ///// first_mb_in_slice + SliceHeader->first_mb_in_slice = h264_GetVLCElement(parent, pInfo, false); + + ///// slice_type + slice_type = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->slice_type = (slice_type%5); + + if (SliceHeader->slice_type > h264_PtypeI) { + ret = H264_STATUS_NOTSUPPORT; + break; + } + + + ////// pic_parameter_id + data = h264_GetVLCElement(parent, pInfo, false); + if (data > MAX_PIC_PARAMS) { + ret = H264_PPS_INVALID_PIC_ID; + break; + } + SliceHeader->pic_parameter_id = (uint8_t)data; + ret = H264_STATUS_OK; + } while (0); + + return ret; } /*-----------------------------------------------------------------------------------------*/ @@ -56,105 +56,105 @@ h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) { - h264_Status ret = H264_SliceHeader_ERROR; - - //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; - uint32_t code; - int32_t max_mb_num=0; - - do { - //////////////////////////////////// Slice header part 2////////////////// - - /// Frame_num - viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4); - SliceHeader->frame_num = (int32_t)code; - - /// Picture structure - SliceHeader->structure = FRAME; - SliceHeader->field_pic_flag = 0; - SliceHeader->bottom_field_flag = 0; - - if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) - { - /// field_pic_flag - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->field_pic_flag = (uint8_t)code; - - if(SliceHeader->field_pic_flag) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->bottom_field_flag = (uint8_t)code; - - SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; - } - } - - ////// Check valid or not of first_mb_in_slice - if(SliceHeader->structure == FRAME) { - max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; - } else { - max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; - } - - - ///if(pInfo->img.MbaffFrameFlag) - if(pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { - SliceHeader->first_mb_in_slice <<=1; - } - - if(SliceHeader->first_mb_in_slice >= max_mb_num) - break; - - - if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) - { - SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); - } - - if(pInfo->active_SPS.pic_order_cnt_type == 0) - { - viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4); - SliceHeader->pic_order_cnt_lsb = (uint32_t)code; - - - if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) - { - SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); - } - else - { - SliceHeader->delta_pic_order_cnt_bottom = 0; - } - } - - if((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag)) - { - SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true); - if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) - { - SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true); - } - } - - if(pInfo->active_PPS.redundant_pic_cnt_present_flag) - { - SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); - if(SliceHeader->redundant_pic_cnt > 127) - break; - } else { - SliceHeader->redundant_pic_cnt = 0; - } - - ret = H264_STATUS_OK; - } while (0); - - //////////// FMO is not supported curently, so comment out the following code - //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) - //{ - // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile - //} - - return ret; + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + uint32_t code; + int32_t max_mb_num=0; + + do { + //////////////////////////////////// Slice header part 2////////////////// + + /// Frame_num + viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4); + SliceHeader->frame_num = (int32_t)code; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->field_pic_flag = (uint8_t)code; + + if (SliceHeader->field_pic_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->bottom_field_flag = (uint8_t)code; + + SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + if (SliceHeader->structure == FRAME) { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + } else { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; + } + + + ///if(pInfo->img.MbaffFrameFlag) + if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { + SliceHeader->first_mb_in_slice <<=1; + } + + if (SliceHeader->first_mb_in_slice >= max_mb_num) + break; + + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4); + SliceHeader->pic_order_cnt_lsb = (uint32_t)code; + + + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if ((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true); + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true); + } + } + + if (pInfo->active_PPS.redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->redundant_pic_cnt > 127) + break; + } else { + SliceHeader->redundant_pic_cnt = 0; + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; } /*-----------------------------------------------------------------------------------------*/ @@ -164,174 +164,174 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) { - h264_Status ret = H264_SliceHeader_ERROR; - - //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; - int32_t slice_alpha_c0_offset, slice_beta_offset; - uint32_t code; - uint32_t bits_offset =0, byte_offset =0; - uint8_t is_emul =0; - - do { - /// direct_spatial_mv_pred_flag - if(SliceHeader->slice_type == h264_PtypeB) - { - viddec_pm_get_bits(parent, &code , 1); - SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code; - } - else - { - SliceHeader->direct_spatial_mv_pred_flag = 0; - } - - // - // Reset ref_idx and Overide it if exist - // - SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active; - SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active; - - if((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB)) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code; - - if(SliceHeader->num_ref_idx_active_override_flag) - { - SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; - if(SliceHeader->slice_type == h264_PtypeB) - { - SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; - } - } - } - - if(SliceHeader->slice_type != h264_PtypeB) { - SliceHeader->num_ref_idx_l1_active = 0; - } - - if((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) - { - break; - } - - if(h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - break; - } - - - //// - //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW - //// - if(((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) - { - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - pInfo->h264_pwt_enabled = 1; - pInfo->h264_pwt_start_byte_offset = byte_offset; - pInfo->h264_pwt_start_bit_offset = bits_offset; - - if(h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - break; - } - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - if(0 == bits_offset) - { - pInfo->h264_pwt_end_byte_offset = byte_offset-1; - pInfo->h264_pwt_end_bit_offset = 8; - } - else - { - pInfo->h264_pwt_end_byte_offset = byte_offset; - pInfo->h264_pwt_end_bit_offset = bits_offset; - } - - } - - - - //// - //// Parse Ref_pic marking if there - //// - if(SliceHeader->nal_ref_idc != 0) - { - if(h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - break; - } - } - - if((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) - { - SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); - } - else - { - SliceHeader->cabac_init_idc = 0; - } - - if(SliceHeader->cabac_init_idc > 2) - { - break; - } - - SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); - if( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26))) - break; - - - if((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) ) - { - if(SliceHeader->slice_type == h264_PtypeSP) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->sp_for_switch_flag = (uint8_t)code; - - } - SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); - - if( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) - break; - } - - if(pInfo->active_PPS.deblocking_filter_control_present_flag) - { - SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); - if(SliceHeader->disable_deblocking_filter_idc != 1) - { - SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); - slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; - if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) { + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_alpha_c0_offset, slice_beta_offset; + uint32_t code; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + do { + /// direct_spatial_mv_pred_flag + if (SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code , 1); + SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code; + } + else + { + SliceHeader->direct_spatial_mv_pred_flag = 0; + } + + // + // Reset ref_idx and Overide it if exist + // + SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active; + SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active; + + if ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code; + + if (SliceHeader->num_ref_idx_active_override_flag) + { + SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; + if (SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + } + } + } + + if (SliceHeader->slice_type != h264_PtypeB) { + SliceHeader->num_ref_idx_l1_active = 0; + } + + if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) + { + break; + } + + if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + + + //// + //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW + //// + if (((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + { + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + pInfo->h264_pwt_enabled = 1; + pInfo->h264_pwt_start_byte_offset = byte_offset; + pInfo->h264_pwt_start_bit_offset = bits_offset; + + if (h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + if (0 == bits_offset) + { + pInfo->h264_pwt_end_byte_offset = byte_offset-1; + pInfo->h264_pwt_end_bit_offset = 8; + } + else + { + pInfo->h264_pwt_end_byte_offset = byte_offset; + pInfo->h264_pwt_end_bit_offset = bits_offset; + } + + } + + + + //// + //// Parse Ref_pic marking if there + //// + if (SliceHeader->nal_ref_idc != 0) + { + if (h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { break; } - - SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); - slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; - if (slice_beta_offset < -12 || slice_beta_offset > 12) { - break; - } - } - else - { - SliceHeader->slice_alpha_c0_offset_div2 = 0; - SliceHeader->slice_beta_offset_div2 = 0; - } - } - - ret = H264_STATUS_OK; - } while (0); - - //////////// FMO is not supported curently, so comment out the following code - //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) - //{ - // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile - //} - - return ret; + } + + if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); + } + else + { + SliceHeader->cabac_init_idc = 0; + } + + if (SliceHeader->cabac_init_idc > 2) + { + break; + } + + SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); + if ( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26))) + break; + + + if ((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) ) + { + if (SliceHeader->slice_type == h264_PtypeSP) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sp_for_switch_flag = (uint8_t)code; + + } + SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); + + if ( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + break; + } + + if (pInfo->active_PPS.deblocking_filter_control_present_flag) + { + SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->disable_deblocking_filter_idc != 1) + { + SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; + if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) { + break; + } + + SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; + if (slice_beta_offset < -12 || slice_beta_offset > 12) { + break; + } + } + else + { + SliceHeader->slice_alpha_c0_offset_div2 = 0; + SliceHeader->slice_beta_offset_div2 = 0; + } + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; } @@ -341,7 +341,7 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice // specify the change from the initial reference picture lists to the reference picture lists to be used // for decoding the slice -// reordering_of_pic_nums_idc: +// reordering_of_pic_nums_idc: // 0: abs_diff_pic_num_minus1 is present and corresponds to a difference to subtract from a picture number prediction value // 1: abs_diff_pic_num_minus1 is present and corresponds to a difference to add to a picture number prediction value // 2: long_term_pic_num is present and specifies the long-term picture number for a reference picture @@ -351,176 +351,176 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) { - //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; - int32_t reorder= -1; - uint32_t code; - - - if((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)code; - - if(SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag) - { - - reorder= -1; - do - { - reorder++; - - if(reorder > MAX_NUM_REF_FRAMES) - { - return H264_SliceHeader_ERROR; - } - - SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); - if((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) - { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); - } - else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) - { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); - } - - }while(SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3); - } - } - - if(SliceHeader->slice_type == h264_PtypeB) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)code; - - if(SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag) - { - - reorder = -1; - do - { - reorder++; - if(reorder > MAX_NUM_REF_FRAMES) - { - return H264_SliceHeader_ERROR; - } - SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); - if((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) - { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); - } - else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) - { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); - } - }while(SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3); - } - } - - //currently just two reference frames but in case mroe than two, then should use an array for the above structures that is why reorder - return H264_STATUS_OK; - + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t reorder= -1; + uint32_t code; + + + if ((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)code; + + if (SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag) + { + + reorder= -1; + do + { + reorder++; + + if (reorder > MAX_NUM_REF_FRAMES) + { + return H264_SliceHeader_ERROR; + } + + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + } + else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + } + + } while (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3); + } + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)code; + + if (SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag) + { + + reorder = -1; + do + { + reorder++; + if (reorder > MAX_NUM_REF_FRAMES) + { + return H264_SliceHeader_ERROR; + } + SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + } + else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + } + } while (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3); + } + } + + //currently just two reference frames but in case mroe than two, then should use an array for the above structures that is why reorder + return H264_STATUS_OK; + } #ifdef VBP h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) { - uint32_t i =0, j=0; - uint32_t flag; - - SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); - - if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); - } - - for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; - - if(SliceHeader->sh_predwttbl.luma_weight_l0_flag) - { - SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true); - } - else + uint32_t i =0, j=0; + uint32_t flag; + + SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); + } + + for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; + + if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true); + } + else { SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; } - if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; - - if(SliceHeader->sh_predwttbl.chroma_weight_l0_flag) - { - for(j=0; j <2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for(j=0; j <2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; - } - } - } - - } - - if(SliceHeader->slice_type == h264_PtypeB) - { - for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; - - if(SliceHeader->sh_predwttbl.luma_weight_l1_flag) - { - SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true); - } - else - { - SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); - SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; - } - - if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; - - if(SliceHeader->sh_predwttbl.chroma_weight_l1_flag) - { - for(j=0; j <2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for(j=0; j <2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; - } - } - } - - } - } - - return H264_STATUS_OK; + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; + + if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; + + if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; + + if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; } ///// End of h264_Parse_Pred_Weight_Table #else @@ -535,125 +535,125 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) { - uint32_t i =0, j=0; - uint32_t flag, val; - //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader; - - //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom"); - val = h264_GetVLCElement(parent, pInfo, false); - - if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom"); - val = h264_GetVLCElement(parent,pInfo, false); - } - - for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++) - { - //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - - //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag) - if(flag) - { - //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - } - else + uint32_t i =0, j=0; + uint32_t flag, val; + //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader; + + //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom"); + val = h264_GetVLCElement(parent, pInfo, false); + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom"); + val = h264_GetVLCElement(parent,pInfo, false); + } + + for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + { + //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + + //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag) + if (flag) + { + //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + } + else { //SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); //SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; } - if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - if(flag) - { - for(j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for(j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; - } - } - } - - } - - if(SliceHeader->slice_type == h264_PtypeB) - { - for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++) - { - //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - if(flag) - { - //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - } - else - { - //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); - //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; - } - - if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - if(flag) - { - for(j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for(j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; - } - } - } - - } - } - - return H264_STATUS_OK; + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if (flag) + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + { + //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if (flag) + { + //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + } + else + { + //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if (flag) + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; } ///// End of h264_Parse_Pred_Weight_Table #endif /*--------------------------------------------------------------------------------------------------*/ // The syntax elements specify marking of the reference pictures. -// 1)IDR: no_output_of_prior_pics_flag, -// long_term_reference_flag, +// 1)IDR: no_output_of_prior_pics_flag, +// long_term_reference_flag, // 2)NonIDR: adaptive_ref_pic_marking_mode_flag, -// memory_management_control_operation, -// difference_of_pic_nums_minus1, +// memory_management_control_operation, +// difference_of_pic_nums_minus1, // long_term_frame_idx, -// long_term_pic_num, and -// max_long_term_frame_idx_plus1 +// long_term_pic_num, and +// max_long_term_frame_idx_plus1 // //The marking of a reference picture can be "unused for reference", "used for short-term reference", or "used for longterm // reference", but only one among these three. @@ -662,78 +662,78 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) { - //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; - uint8_t i = 0; - uint32_t code = 0; - - if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)code; - pInfo->img.long_term_reference_flag = (uint8_t)code; - } - else - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)code; - - /////////////////////////////////////////////////////////////////////////////////////// - //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified - // 0 Sliding window reference picture marking mode: A marking mode - // providing a first-in first-out mechanism for short-term reference pictures. - // 1 Adaptive reference picture marking mode: A reference picture - // marking mode providing syntax elements to specify marking of - // reference pictures as �unused for reference?and to assign long-term - // frame indices. - /////////////////////////////////////////////////////////////////////////////////////// - - if(SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) - { - do - { - SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); - if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) - { - SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); - } - - if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) - { - SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); - } - - if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) - { - SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); - } - - if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) - { - SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); - } - - if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) - { - pInfo->img.curr_has_mmco_5 = 1; - } - - if(i>NUM_MMCO_OPERATIONS) { - return H264_STATUS_ERROR; - } - - }while(SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); - } - } - - - - - SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; - - return H264_STATUS_OK; + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + uint8_t i = 0; + uint32_t code = 0; + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)code; + pInfo->img.long_term_reference_flag = (uint8_t)code; + } + else + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)code; + + /////////////////////////////////////////////////////////////////////////////////////// + //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified + // 0 Sliding window reference picture marking mode: A marking mode + // providing a first-in first-out mechanism for short-term reference pictures. + // 1 Adaptive reference picture marking mode: A reference picture + // marking mode providing syntax elements to specify marking of + // reference pictures as �unused for reference?and to assign long-term + // frame indices. + /////////////////////////////////////////////////////////////////////////////////////// + + if (SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) + { + do + { + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) + { + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) + { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) + { + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) + { + pInfo->img.curr_has_mmco_5 = 1; + } + + if (i>NUM_MMCO_OPERATIONS) { + return H264_STATUS_ERROR; + } + + } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); + } + } + + + + + SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; + + return H264_STATUS_OK; } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c index 0276eaa..8cedd1f 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c @@ -6,513 +6,513 @@ /// SPS extension unit (unit_type = 13) -/// +/// #if 0 h264_Status h264_Parse_SeqParameterSet_Extension(void *parent,h264_Info * pInfo) { - /*h264_SPS_Extension_RBSP_t* SPS_ext = pInfo->p_active_SPS_ext; - - SPS_ext->seq_parameter_set_id = h264_GetVLCElement(pInfo, false); - if(SPS_ext->seq_parameter_set_id > MAX_SEQ_PARAMS-1) - { - return H264_SPS_ERROR; - } - SPS_ext->aux_format_idc = h264_GetVLCElement(pInfo, false); - if(SPS_ext->aux_format_idc > 3) - { - return H264_SPS_ERROR; - } - if(SPS_ext->aux_format_idc != 0) - { - SPS_ext->bit_depth_aux_minus8 = h264_GetVLCElement(pInfo, false); - if(SPS_ext->bit_depth_aux_minus8 + 8 > 12) - { - return H264_SPS_ERROR; - } - - SPS_ext->alpha_incr_flag = h264_GetBits(pInfo, 1, "alpha_incr_flag"); - if(SPS_ext->alpha_incr_flag > 1) - { - return H264_SPS_ERROR; - } - - SPS_ext->alpha_opaque_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_opaque_value"); //+8 to get the bit_depth value - SPS_ext->alpha_transparent_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_transparent_value"); //+8 to get the bit_depth value - } - SPS_ext->additional_extension_flag = h264_GetBits(pInfo, 1, "additional_extension_flag"); -*/ - return H264_STATUS_OK; + /*h264_SPS_Extension_RBSP_t* SPS_ext = pInfo->p_active_SPS_ext; + + SPS_ext->seq_parameter_set_id = h264_GetVLCElement(pInfo, false); + if(SPS_ext->seq_parameter_set_id > MAX_SEQ_PARAMS-1) + { + return H264_SPS_ERROR; + } + SPS_ext->aux_format_idc = h264_GetVLCElement(pInfo, false); + if(SPS_ext->aux_format_idc > 3) + { + return H264_SPS_ERROR; + } + if(SPS_ext->aux_format_idc != 0) + { + SPS_ext->bit_depth_aux_minus8 = h264_GetVLCElement(pInfo, false); + if(SPS_ext->bit_depth_aux_minus8 + 8 > 12) + { + return H264_SPS_ERROR; + } + + SPS_ext->alpha_incr_flag = h264_GetBits(pInfo, 1, "alpha_incr_flag"); + if(SPS_ext->alpha_incr_flag > 1) + { + return H264_SPS_ERROR; + } + + SPS_ext->alpha_opaque_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_opaque_value"); //+8 to get the bit_depth value + SPS_ext->alpha_transparent_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_transparent_value"); //+8 to get the bit_depth value + } + SPS_ext->additional_extension_flag = h264_GetBits(pInfo, 1, "additional_extension_flag"); + */ + return H264_STATUS_OK; } #endif h264_Status h264_Parse_HRD_Parameters(void *parent, h264_Info* pInfo, int nal_hrd,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used) { - //seq_param_set_ptr SPS = pInfo->p_active_SPS; - int32_t i = 0; - uint32_t code; - - - if(nal_hrd) - { - SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); - - if(SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) - { - return H264_SPS_ERROR; - } - - viddec_pm_get_bits(parent, &code, 8); - pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale = (uint8_t)(code>>4); - pVUI_Seq_Not_Used->nal_hrd_cpb_size_scale = (uint8_t)(code & 0xf); - - for(i=0; i<=SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; i++) - { - pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); - pVUI_Seq_Not_Used->nal_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); - - viddec_pm_get_bits(parent, &code, 1); - pVUI_Seq_Not_Used->nal_hrd_parameters.cbr_flag[i] = (uint8_t)code; - } - - if( viddec_pm_get_bits(parent, &code, 20) == -1) - return H264_SPS_ERROR; - - SPS->sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); - SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; - SPS->sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; - SPS->sps_disp.vui_seq_parameters.nal_hrd_time_offset_length = (uint8_t)(code&0x1f);; - - } - else - { - SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); - - if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) - { - return H264_SPS_ERROR; - } - - viddec_pm_get_bits(parent, &code, 8); - pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale = (uint8_t)(code>>4); - pVUI_Seq_Not_Used->vcl_hrd_cpb_size_scale = (uint8_t)(code&0xf); - - for(i=0; i<=SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; i++) - { - pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); - pVUI_Seq_Not_Used->vcl_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); - viddec_pm_get_bits(parent, &code, 1); - pVUI_Seq_Not_Used->vcl_hrd_parameters.cbr_flag[i] = (uint8_t)code; - } - - if( viddec_pm_get_bits(parent, &code, 20) == -1) - return H264_SPS_ERROR; - - SPS->sps_disp.vui_seq_parameters.vcl_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); - SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; - SPS->sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; - SPS->sps_disp.vui_seq_parameters.vcl_hrd_time_offset_length = (uint8_t)(code&0x1f);; - } - - return H264_STATUS_OK; + //seq_param_set_ptr SPS = pInfo->p_active_SPS; + int32_t i = 0; + uint32_t code; + + + if (nal_hrd) + { + SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + if (SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + { + return H264_SPS_ERROR; + } + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale = (uint8_t)(code>>4); + pVUI_Seq_Not_Used->nal_hrd_cpb_size_scale = (uint8_t)(code & 0xf); + + for (i=0; i<=SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; i++) + { + pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->nal_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->nal_hrd_parameters.cbr_flag[i] = (uint8_t)code; + } + + if ( viddec_pm_get_bits(parent, &code, 20) == -1) + return H264_SPS_ERROR; + + SPS->sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); + SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; + SPS->sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; + SPS->sps_disp.vui_seq_parameters.nal_hrd_time_offset_length = (uint8_t)(code&0x1f);; + + } + else + { + SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + { + return H264_SPS_ERROR; + } + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale = (uint8_t)(code>>4); + pVUI_Seq_Not_Used->vcl_hrd_cpb_size_scale = (uint8_t)(code&0xf); + + for (i=0; i<=SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; i++) + { + pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->vcl_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->vcl_hrd_parameters.cbr_flag[i] = (uint8_t)code; + } + + if ( viddec_pm_get_bits(parent, &code, 20) == -1) + return H264_SPS_ERROR; + + SPS->sps_disp.vui_seq_parameters.vcl_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); + SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; + SPS->sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; + SPS->sps_disp.vui_seq_parameters.vcl_hrd_time_offset_length = (uint8_t)(code&0x1f);; + } + + return H264_STATUS_OK; } h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used) { - h264_Status ret = H264_STATUS_OK; - //seq_param_set_ptr SPS = pInfo->p_active_SPS; - int32_t nal_hrd = 0; - uint32_t code; + h264_Status ret = H264_STATUS_OK; + //seq_param_set_ptr SPS = pInfo->p_active_SPS; + int32_t nal_hrd = 0; + uint32_t code; - do { - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag = (uint8_t)code; + do { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag = (uint8_t)code; - if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) - { - viddec_pm_get_bits(parent, &code, 8); - SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc = (uint8_t)code; + if (SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc = (uint8_t)code; - if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc == h264_AR_Extended_SAR) - { - viddec_pm_get_bits(parent, &code, 16); - SPS->sps_disp.vui_seq_parameters.sar_width = (uint16_t)code; + if (SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc == h264_AR_Extended_SAR) + { + viddec_pm_get_bits(parent, &code, 16); + SPS->sps_disp.vui_seq_parameters.sar_width = (uint16_t)code; - viddec_pm_get_bits(parent, &code, 16); - SPS->sps_disp.vui_seq_parameters.sar_height = (uint16_t)code; + viddec_pm_get_bits(parent, &code, 16); + SPS->sps_disp.vui_seq_parameters.sar_height = (uint16_t)code; - } - } + } + } - viddec_pm_get_bits(parent, &code, 1); - pVUI_Seq_Not_Used->overscan_info_present_flag = (uint8_t)code; + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->overscan_info_present_flag = (uint8_t)code; - if(pVUI_Seq_Not_Used->overscan_info_present_flag) - { - viddec_pm_get_bits(parent, &code, 1); - pVUI_Seq_Not_Used->overscan_appropriate_flag = (uint8_t)code; - } + if (pVUI_Seq_Not_Used->overscan_info_present_flag) + { + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->overscan_appropriate_flag = (uint8_t)code; + } - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag = (uint8_t)code; + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag = (uint8_t)code; - if(SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag) - { - viddec_pm_get_bits(parent, &code, 3); - SPS->sps_disp.vui_seq_parameters.video_format = (uint8_t)code; + if (SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + viddec_pm_get_bits(parent, &code, 3); + SPS->sps_disp.vui_seq_parameters.video_format = (uint8_t)code; - viddec_pm_get_bits(parent, &code, 1); - pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code; + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code; #ifdef VBP SPS->sps_disp.vui_seq_parameters.video_full_range_flag = (uint8_t)code; #endif - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code; - - if(SPS->sps_disp.vui_seq_parameters.colour_description_present_flag) - { - viddec_pm_get_bits(parent, &code, 8); - SPS->sps_disp.vui_seq_parameters.colour_primaries = (uint8_t)code; + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code; - viddec_pm_get_bits(parent, &code, 8); - SPS->sps_disp.vui_seq_parameters.transfer_characteristics = (uint8_t)code; + if (SPS->sps_disp.vui_seq_parameters.colour_description_present_flag) + { + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.colour_primaries = (uint8_t)code; - viddec_pm_get_bits(parent, &code, 8); - pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code; -#ifdef VBP + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.transfer_characteristics = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code; +#ifdef VBP SPS->sps_disp.vui_seq_parameters.matrix_coefficients = (uint8_t)code; #endif - } - } - - viddec_pm_get_bits(parent, &code, 1); - pVUI_Seq_Not_Used->chroma_location_info_present_flag = (uint8_t)code; - - if(pVUI_Seq_Not_Used->chroma_location_info_present_flag) - { - pVUI_Seq_Not_Used->chroma_sample_loc_type_top_field = h264_GetVLCElement(parent, pInfo, false); - pVUI_Seq_Not_Used->chroma_sample_loc_type_bottom_field = h264_GetVLCElement(parent, pInfo, false); - } - - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.timing_info_present_flag = (uint8_t)code; - - if(SPS->sps_disp.vui_seq_parameters.timing_info_present_flag == 1) - { - viddec_pm_get_bits(parent, &code, 32); - SPS->sps_disp.vui_seq_parameters.num_units_in_tick = (uint32_t)code; - - viddec_pm_get_bits(parent, &code, 32); - SPS->sps_disp.vui_seq_parameters.time_scale = (uint32_t)code; - - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.fixed_frame_rate_flag = (uint8_t)code; - } - - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag = (uint8_t)code; - - if(SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) - { - nal_hrd = 1; - ret = h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); - } - - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag = (uint8_t)code; - - if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) - { - nal_hrd = 0; - ret = (h264_Status)h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); - } - - if((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) - { - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.low_delay_hrd_flag = (uint8_t)code; - } - - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.vui_seq_parameters.pic_struct_present_flag = (uint8_t)code; - - if(viddec_pm_get_bits(parent, &code, 1) == -1) { - ret = H264_STATUS_ERROR; - break; - } - SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag = (uint8_t)code; - - if(SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag) - { - viddec_pm_get_bits(parent, &code, 1); - pVUI_Seq_Not_Used->motion_vectors_over_pic_boundaries_flag = (uint8_t)code; - - pVUI_Seq_Not_Used->max_bytes_per_pic_denom = h264_GetVLCElement(parent, pInfo, false); - pVUI_Seq_Not_Used->max_bits_per_mb_denom = h264_GetVLCElement(parent, pInfo, false); - pVUI_Seq_Not_Used->log2_max_mv_length_horizontal = h264_GetVLCElement(parent, pInfo, false); - pVUI_Seq_Not_Used->log2_max_mv_length_vertical = h264_GetVLCElement(parent, pInfo, false); - SPS->sps_disp.vui_seq_parameters.num_reorder_frames = h264_GetVLCElement(parent, pInfo, false); - SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering = h264_GetVLCElement(parent, pInfo, false); - - if(SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering == MAX_INT32_VALUE) - ret = H264_STATUS_ERROR; - } - }while (0); - - return ret; + } + } + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->chroma_location_info_present_flag = (uint8_t)code; + + if (pVUI_Seq_Not_Used->chroma_location_info_present_flag) + { + pVUI_Seq_Not_Used->chroma_sample_loc_type_top_field = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->chroma_sample_loc_type_bottom_field = h264_GetVLCElement(parent, pInfo, false); + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.timing_info_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + viddec_pm_get_bits(parent, &code, 32); + SPS->sps_disp.vui_seq_parameters.num_units_in_tick = (uint32_t)code; + + viddec_pm_get_bits(parent, &code, 32); + SPS->sps_disp.vui_seq_parameters.time_scale = (uint32_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.fixed_frame_rate_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + nal_hrd = 1; + ret = h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) + { + nal_hrd = 0; + ret = (h264_Status)h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + } + + if ((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) + { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.low_delay_hrd_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.pic_struct_present_flag = (uint8_t)code; + + if (viddec_pm_get_bits(parent, &code, 1) == -1) { + ret = H264_STATUS_ERROR; + break; + } + SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag) + { + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->motion_vectors_over_pic_boundaries_flag = (uint8_t)code; + + pVUI_Seq_Not_Used->max_bytes_per_pic_denom = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->max_bits_per_mb_denom = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->log2_max_mv_length_horizontal = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->log2_max_mv_length_vertical = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.vui_seq_parameters.num_reorder_frames = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering = h264_GetVLCElement(parent, pInfo, false); + + if (SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering == MAX_INT32_VALUE) + ret = H264_STATUS_ERROR; + } + } while (0); + + return ret; } h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame) { - h264_Status ret = H264_SPS_ERROR; - - int32_t i = 0, tmp = 0; - int32_t PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs; - uint32_t code = 0; - uint32_t data = 0; - - //SPS->profile_idc = h264_GetBits(pInfo, 8, "Profile"); - viddec_pm_get_bits(parent, &code, 8); - SPS->profile_idc = (uint8_t)code; - - switch(SPS->profile_idc) - { - case h264_ProfileBaseline: - case h264_ProfileMain: - case h264_ProfileExtended: - case h264_ProfileHigh10: - case h264_ProfileHigh422: - case h264_ProfileHigh444: - case h264_ProfileHigh: - break; - default: - return H264_SPS_INVALID_PROFILE; - break; - } - - //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag"); - //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag"); //should be 1 - //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag"); - //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag"); - - viddec_pm_get_bits(parent, &code, 4); - SPS->constraint_set_flags = (uint8_t)code; - - //// reserved_zero_4bits - viddec_pm_get_bits(parent, (uint32_t *)&code, 4); - - viddec_pm_get_bits(parent, &code, 8); - SPS->level_idc = (uint8_t)code; - - switch(SPS->level_idc) - { - case h264_Level1b: - case h264_Level1: - case h264_Level11: - case h264_Level12: - case h264_Level13: - case h264_Level2: - case h264_Level21: - case h264_Level22: - case h264_Level3: - case h264_Level31: - case h264_Level32: - case h264_Level4: - case h264_Level41: - case h264_Level42: - case h264_Level5: - case h264_Level51: - break; - default: - return H264_SPS_INVALID_LEVEL; - } - - do { - SPS->seq_parameter_set_id = h264_GetVLCElement(parent, pInfo, false); - - //// seq_parameter_set_id ---[0,31] - if(SPS->seq_parameter_set_id > MAX_NUM_SPS -1) - break; - - if((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) || - (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) ) - { - //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2] - data = h264_GetVLCElement(parent, pInfo, false); - if( data > H264_CHROMA_422) - break; - SPS->sps_disp.chroma_format_idc = (uint8_t)data; - //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {} - - //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel - data = h264_GetVLCElement(parent, pInfo, false); - if( data) - break; - SPS->bit_depth_luma_minus8 = (uint8_t)data; - - //// bit_depth_chroma_minus8 ---[0,4] - data = h264_GetVLCElement(parent, pInfo, false); - if( data ) - break; - SPS->bit_depth_chroma_minus8 = (uint8_t)data; - - - viddec_pm_get_bits(parent, &code, 1); - SPS->lossless_qpprime_y_zero_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code, 1); - SPS->seq_scaling_matrix_present_flag = (uint8_t)code; - - if(SPS->seq_scaling_matrix_present_flag == 1) - { - //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12; - int n_ScalingList = 8; /// We do not support 444 currrently - - for(i=0; iseq_scaling_list_present_flag[i] = (uint8_t)code; - - if(SPS->seq_scaling_list_present_flag[i]) - { - if(i<6) - h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo); - else - h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); - } - } - } - } - else - { - SPS->sps_disp.chroma_format_idc = 1; - SPS->seq_scaling_matrix_present_flag = 0; - - SPS->bit_depth_luma_minus8 = 0; - SPS->bit_depth_chroma_minus8 = 0; - //h264_SetDefaultScalingLists(pInfo); - } - - //// log2_max_frame_num_minus4 ---[0,12] - data = (h264_GetVLCElement(parent, pInfo, false)); - if( data > 12) - break; - SPS->log2_max_frame_num_minus4 = (uint8_t)data; - - //// pic_order_cnt_type ---- [0,2] - data = h264_GetVLCElement(parent, pInfo, false); - if( data > 2) - break; - SPS->pic_order_cnt_type = (uint8_t)data; - - - SPS->expectedDeltaPerPOCCycle = 0; - if(SPS->pic_order_cnt_type == 0) { - SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false); - } else if(SPS->pic_order_cnt_type == 1){ - viddec_pm_get_bits(parent, &code, 1); - SPS->delta_pic_order_always_zero_flag = (uint8_t)code; - - SPS->offset_for_non_ref_pic = h264_GetVLCElement(parent, pInfo, true); - SPS->offset_for_top_to_bottom_field = h264_GetVLCElement(parent, pInfo, true); - - //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255] - data = h264_GetVLCElement(parent, pInfo, false); - if( data > 255) - break; - SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data; - - - //Alloc memory for frame offset -- FIXME - for(i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++) - { - /////SPS->offset_for_ref_frame[i] could be removed from SPS + h264_Status ret = H264_SPS_ERROR; + + int32_t i = 0, tmp = 0; + int32_t PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs; + uint32_t code = 0; + uint32_t data = 0; + + //SPS->profile_idc = h264_GetBits(pInfo, 8, "Profile"); + viddec_pm_get_bits(parent, &code, 8); + SPS->profile_idc = (uint8_t)code; + + switch (SPS->profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + case h264_ProfileHigh10: + case h264_ProfileHigh422: + case h264_ProfileHigh444: + case h264_ProfileHigh: + break; + default: + return H264_SPS_INVALID_PROFILE; + break; + } + + //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag"); + //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag"); //should be 1 + //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag"); + //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag"); + + viddec_pm_get_bits(parent, &code, 4); + SPS->constraint_set_flags = (uint8_t)code; + + //// reserved_zero_4bits + viddec_pm_get_bits(parent, (uint32_t *)&code, 4); + + viddec_pm_get_bits(parent, &code, 8); + SPS->level_idc = (uint8_t)code; + + switch (SPS->level_idc) + { + case h264_Level1b: + case h264_Level1: + case h264_Level11: + case h264_Level12: + case h264_Level13: + case h264_Level2: + case h264_Level21: + case h264_Level22: + case h264_Level3: + case h264_Level31: + case h264_Level32: + case h264_Level4: + case h264_Level41: + case h264_Level42: + case h264_Level5: + case h264_Level51: + break; + default: + return H264_SPS_INVALID_LEVEL; + } + + do { + SPS->seq_parameter_set_id = h264_GetVLCElement(parent, pInfo, false); + + //// seq_parameter_set_id ---[0,31] + if (SPS->seq_parameter_set_id > MAX_NUM_SPS -1) + break; + + if ((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) || + (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) ) + { + //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > H264_CHROMA_422) + break; + SPS->sps_disp.chroma_format_idc = (uint8_t)data; + //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {} + + //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel + data = h264_GetVLCElement(parent, pInfo, false); + if ( data) + break; + SPS->bit_depth_luma_minus8 = (uint8_t)data; + + //// bit_depth_chroma_minus8 ---[0,4] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data ) + break; + SPS->bit_depth_chroma_minus8 = (uint8_t)data; + + + viddec_pm_get_bits(parent, &code, 1); + SPS->lossless_qpprime_y_zero_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->seq_scaling_matrix_present_flag = (uint8_t)code; + + if (SPS->seq_scaling_matrix_present_flag == 1) + { + //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12; + int n_ScalingList = 8; /// We do not support 444 currrently + + for (i=0; iseq_scaling_list_present_flag[i] = (uint8_t)code; + + if (SPS->seq_scaling_list_present_flag[i]) + { + if (i<6) + h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo); + else + h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); + } + } + } + } + else + { + SPS->sps_disp.chroma_format_idc = 1; + SPS->seq_scaling_matrix_present_flag = 0; + + SPS->bit_depth_luma_minus8 = 0; + SPS->bit_depth_chroma_minus8 = 0; + //h264_SetDefaultScalingLists(pInfo); + } + + //// log2_max_frame_num_minus4 ---[0,12] + data = (h264_GetVLCElement(parent, pInfo, false)); + if ( data > 12) + break; + SPS->log2_max_frame_num_minus4 = (uint8_t)data; + + //// pic_order_cnt_type ---- [0,2] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > 2) + break; + SPS->pic_order_cnt_type = (uint8_t)data; + + + SPS->expectedDeltaPerPOCCycle = 0; + if (SPS->pic_order_cnt_type == 0) { + SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false); + } else if (SPS->pic_order_cnt_type == 1) { + viddec_pm_get_bits(parent, &code, 1); + SPS->delta_pic_order_always_zero_flag = (uint8_t)code; + + SPS->offset_for_non_ref_pic = h264_GetVLCElement(parent, pInfo, true); + SPS->offset_for_top_to_bottom_field = h264_GetVLCElement(parent, pInfo, true); + + //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > 255) + break; + SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data; + + + //Alloc memory for frame offset -- FIXME + for (i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++) + { + /////SPS->offset_for_ref_frame[i] could be removed from SPS #ifndef USER_MODE - tmp = h264_GetVLCElement(parent, pInfo, true); - pOffset_ref_frame[i]=tmp; - SPS->expectedDeltaPerPOCCycle += tmp; + tmp = h264_GetVLCElement(parent, pInfo, true); + pOffset_ref_frame[i]=tmp; + SPS->expectedDeltaPerPOCCycle += tmp; #else - tmp = h264_GetVLCElement(parent, pInfo, true); - SPS->offset_for_ref_frame[i]=tmp; - SPS->expectedDeltaPerPOCCycle += tmp; + tmp = h264_GetVLCElement(parent, pInfo, true); + SPS->offset_for_ref_frame[i]=tmp; + SPS->expectedDeltaPerPOCCycle += tmp; #endif - } - } - - //// num_ref_frames ---[0,16] - data = h264_GetVLCElement(parent, pInfo, false); - if( data > 16) - break; - SPS->num_ref_frames = (uint8_t)data; - - viddec_pm_get_bits(parent, &code, 1); - SPS->gaps_in_frame_num_value_allowed_flag = (uint8_t)code; - - - SPS->sps_disp.pic_width_in_mbs_minus1 = h264_GetVLCElement(parent, pInfo, false); - SPS->sps_disp.pic_height_in_map_units_minus1 = h264_GetVLCElement(parent, pInfo, false); - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.frame_mbs_only_flag = (uint8_t)code; - - /// err check for size - PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1); - PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1); - FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1); - if((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128)) - break; - - if(!SPS->sps_disp.frame_mbs_only_flag) - { - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code; - } - - //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1); - //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs; - - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code; - - viddec_pm_get_bits(parent, &code, 1); - SPS->sps_disp.frame_cropping_flag = (uint8_t)code; - - if(SPS->sps_disp.frame_cropping_flag) - { - SPS->sps_disp.frame_crop_rect_left_offset = h264_GetVLCElement(parent, pInfo, false); - SPS->sps_disp.frame_crop_rect_right_offset = h264_GetVLCElement(parent, pInfo, false); - SPS->sps_disp.frame_crop_rect_top_offset = h264_GetVLCElement(parent, pInfo, false); - SPS->sps_disp.frame_crop_rect_bottom_offset = h264_GetVLCElement(parent, pInfo, false); - } - - //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1 - if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0){ - break; - } - - ////// vui_parameters - if(viddec_pm_get_bits(parent, &code, 1) == -1) - break; - SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code; - ret = H264_STATUS_OK; - - if(SPS->sps_disp.vui_parameters_present_flag) - { + } + } + + //// num_ref_frames ---[0,16] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > 16) + break; + SPS->num_ref_frames = (uint8_t)data; + + viddec_pm_get_bits(parent, &code, 1); + SPS->gaps_in_frame_num_value_allowed_flag = (uint8_t)code; + + + SPS->sps_disp.pic_width_in_mbs_minus1 = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.pic_height_in_map_units_minus1 = h264_GetVLCElement(parent, pInfo, false); + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.frame_mbs_only_flag = (uint8_t)code; + + /// err check for size + PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1); + PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1); + FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1); + if ((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128)) + break; + + if (!SPS->sps_disp.frame_mbs_only_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code; + } + + //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1); + //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.frame_cropping_flag = (uint8_t)code; + + if (SPS->sps_disp.frame_cropping_flag) + { + SPS->sps_disp.frame_crop_rect_left_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_right_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_top_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_bottom_offset = h264_GetVLCElement(parent, pInfo, false); + } + + //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1 + if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0) { + break; + } + + ////// vui_parameters + if (viddec_pm_get_bits(parent, &code, 1) == -1) + break; + SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code; + ret = H264_STATUS_OK; + + if (SPS->sps_disp.vui_parameters_present_flag) + { #ifndef VBP // Ignore VUI parsing result - ret = -#endif - h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); - } + ret = +#endif + h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); + } - }while(0); + } while (0); - //h264_Parse_rbsp_trailing_bits(pInfo); + //h264_Parse_rbsp_trailing_bits(pInfo); - return ret; + return ret; } //#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c index 40ec011..bbf2835 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c @@ -4,124 +4,124 @@ #include "h264parse.h" #include "viddec_fw_item_types.h" #include "h264parse_dpb.h" -#include + extern void* h264_memcpy( void* dest, void* src, uint32_t num ); uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) { - if (swap != 0) - { - g_warning("swap copying is not implemented."); - } - - if (to_ddr) - { - memcpy((void*)ddr_addr, (void*)local_addr, size); - } - else - { - memcpy((void*)local_addr, (void*)ddr_addr, size); - } - + if (swap != 0) + { + //g_warning("swap copying is not implemented."); + } + + if (to_ddr) + { + memcpy((void*)ddr_addr, (void*)local_addr, size); + } + else + { + memcpy((void*)local_addr, (void*)ddr_addr, size); + } + return (0); } #if 0 void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) { - - if(pInfo->Is_first_frame_in_stream) //new stream, fill new frame in cur - { - pInfo->img.g_new_frame = 0; - pInfo->Is_first_frame_in_stream =0; - pInfo->push_to_cur = 1; + if (pInfo->Is_first_frame_in_stream) //new stream, fill new frame in cur + { + + pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->push_to_cur = 1; + + } + else // move to next for new frame + { + pInfo->push_to_cur = 0; + } - } - else // move to next for new frame - { - pInfo->push_to_cur = 0; - } - - //fill dpb managemnt info + //fill dpb managemnt info - - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - pInfo->dpb.frame_numbers_need_to_be_removed =0; - pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + pInfo->dpb.frame_numbers_need_to_be_removed =0; + pInfo->dpb.frame_numbers_need_to_be_allocated =0; } void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) { - //// - //// Now we can flush out all frames in DPB fro display - if(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used != 3) - { - h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME - } + //// + //// Now we can flush out all frames in DPB fro display + if (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); - h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); - h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + pInfo->dpb.frame_numbers_need_to_be_removed =0; - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - pInfo->dpb.frame_numbers_need_to_be_removed =0; - } void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) { - pInfo->qm_present_list=0; + pInfo->qm_present_list=0; } void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) { -#if 1 - uint32_t i, nitems=0; - - - if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) - { - if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - { - nitems = pInfo->SliceHeader.num_ref_idx_l0_active; - - for(i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) +#if 1 + uint32_t i, nitems=0; + + + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for (i=0; ih264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; - break; + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } } - } - } - else - { - nitems = pInfo->dpb.listXsize[0]; - - for(i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for (i=0; ih264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; - break; + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } } - } - } - - } - else - { - nitems =0; - } -#endif + } + + } + else + { + nitems =0; + } +#endif } #else @@ -129,381 +129,381 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - h264_slice_data slice_data; + viddec_workload_item_t wi; + h264_slice_data slice_data; + + uint32_t i=0, nitems=0, data=0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; - uint32_t i=0, nitems=0, data=0; - uint32_t bits_offset =0, byte_offset =0; - uint8_t is_emul =0; + ////////////////////// Update Reference list ////////////////// + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; - ////////////////////// Update Reference list ////////////////// - if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) - { - if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - { - nitems = pInfo->SliceHeader.num_ref_idx_l0_active; - - for(i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + for (i=0; ih264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; - break; + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } } - } - } - else - { - nitems = pInfo->dpb.listXsize[0]; - - for(i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for (i=0; ih264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; - break; + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } } - } - } - - } - else - { - nitems =0; - } - /////file ref list 0 - // h264_parse_emit_ref_list(parent, pInfo, 0); - - /////file ref list 1 - //h264_parse_emit_ref_list(parent, pInfo, 1); - - ///////////////////////////////////// Slice Data //////////////////////////////// - // h264_fill_slice_data(pInfo, &slice_data); - - wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_H264_SLICE_REG); - - wi.data.data_offset = slice_data.h264_bsd_slice_start; - wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; - wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent , &wi); - } - else - { - // viddec_pm_append_workitem_next( parent , &wi); - } - - - ///////////////////////////predict weight table item and data if have/////////////////////////// - if(pInfo->h264_pwt_enabled) - { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; - wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; - wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; - wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent , &wi); + } + + } + else + { + nitems =0; + } + /////file ref list 0 + // h264_parse_emit_ref_list(parent, pInfo, 0); + + /////file ref list 1 + //h264_parse_emit_ref_list(parent, pInfo, 1); + + ///////////////////////////////////// Slice Data //////////////////////////////// + // h264_fill_slice_data(pInfo, &slice_data); + + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_H264_SLICE_REG); + + wi.data.data_offset = slice_data.h264_bsd_slice_start; + wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; + wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); - } - else - { - // viddec_pm_append_workitem_next( parent , &wi); - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); - } - } - - - ////////////////////////////////// Update ES Buffer for Slice /////////////////////// - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); - - if(pInfo->active_PPS.entropy_coding_mode_flag) - { - if(0!=bits_offset) { - data = data; // fix compilation warning - // don't skip byte-aligned bits as those bits are actually - // part of slice_data - //viddec_pm_get_bits(parent, &data, 8-bits_offset); - } - } - else - { - if(0!=bits_offset) { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; - wi.data.data_offset = bits_offset; - wi.data.data_payload[0]=0; - wi.data.data_payload[1]=0; - - if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur - // viddec_pm_append_workitem( parent , &wi); - } - else { - //viddec_pm_append_workitem_next( parent , &wi); - } - } - } - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_pixeldata( parent ); - } - else - { - //viddec_pm_append_pixeldata_next( parent); - } - - return; + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent , &wi); + } + else + { + // viddec_pm_append_workitem_next( parent , &wi); + } + + + ///////////////////////////predict weight table item and data if have/////////////////////////// + if (pInfo->h264_pwt_enabled) + { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; + wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; + wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; + wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent , &wi); + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); + } + else + { + // viddec_pm_append_workitem_next( parent , &wi); + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); + } + } + + + ////////////////////////////////// Update ES Buffer for Slice /////////////////////// + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); + + if (pInfo->active_PPS.entropy_coding_mode_flag) + { + if (0!=bits_offset) { + data = data; // fix compilation warning + // don't skip byte-aligned bits as those bits are actually + // part of slice_data + //viddec_pm_get_bits(parent, &data, 8-bits_offset); + } + } + else + { + if (0!=bits_offset) { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; + wi.data.data_offset = bits_offset; + wi.data.data_payload[0]=0; + wi.data.data_payload[1]=0; + + if (pInfo->push_to_cur) { //cur is empty, fill new frame in cur + // viddec_pm_append_workitem( parent , &wi); + } + else { + //viddec_pm_append_workitem_next( parent , &wi); + } + } + } + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_pixeldata( parent ); + } + else + { + //viddec_pm_append_pixeldata_next( parent); + } + + return; } void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - - const uint32_t *pl; - uint32_t i=0,nitems=0; - - h264_pic_data pic_data; - - pInfo->qm_present_list=0; - - //h264_parse_emit_4X4_scaling_matrix(parent, pInfo); - // h264_parse_emit_8X8_scaling_matrix(parent, pInfo); - - // h264_fill_pic_data(pInfo, &pic_data); - - // How many payloads must be generated - nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up - - pl = (const uint32_t *) &pic_data; - - // Dump slice data to an array of workitems, to do pl access non valid mem - for( i = 0; i < nitems; i++ ) - { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PIC_REG; - wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct - wi.data.data_payload[0] = pl[0]; - wi.data.data_payload[1] = pl[1]; - pl += 2; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - } - - return; + viddec_workload_item_t wi; + + const uint32_t *pl; + uint32_t i=0,nitems=0; + + h264_pic_data pic_data; + + pInfo->qm_present_list=0; + + //h264_parse_emit_4X4_scaling_matrix(parent, pInfo); + // h264_parse_emit_8X8_scaling_matrix(parent, pInfo); + + // h264_fill_pic_data(pInfo, &pic_data); + + // How many payloads must be generated + nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up + + pl = (const uint32_t *) &pic_data; + + // Dump slice data to an array of workitems, to do pl access non valid mem + for ( i = 0; i < nitems; i++ ) + { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PIC_REG; + wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + + return; } void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - uint32_t i=0,nitems=0; - - ///////////////////////// Frame attributes////////////////////////// - - //Push data into current workload if first frame or frame_boundary already detected by non slice nal - if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) - { - //viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); - //pInfo->img.g_new_frame = 0; - pInfo->Is_first_frame_in_stream =0; - pInfo->is_frame_boundary_detected_by_non_slice_nal=0; - pInfo->push_to_cur = 1; - //h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); - } - else // move to cur if frame boundary detected by previous non slice nal, or move to next if not - { - //viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); - - pInfo->push_to_cur = 0; - //h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); - - pInfo->is_current_workload_done=1; - } - - ///////////////////// SPS///////////////////// - // h264_parse_emit_sps(parent, pInfo); - - /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for(i=0; idpb.frame_id_need_to_be_displayed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - } - } - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - - - /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for(i=0; idpb.frame_id_need_to_be_removed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_workitem( parent, &wi ); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - } - - } - pInfo->dpb.frame_numbers_need_to_be_removed =0; - - /////////////////////flust frames (do not display)///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; - - for(i=0; idpb.frame_id_need_to_be_dropped[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_workitem( parent, &wi ); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - } - - } - pInfo->dpb.frame_numbers_need_to_be_dropped =0; - - /////////////////////updata DPB frames///////////////////// - nitems = pInfo->dpb.used_size; - for(i=0; idpb.fs_dpb_idc[i]; - - if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id); - wi.ref_frame.reference_id = fs_id; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - } - } - - - /////////////////////updata dpb frames info (poc)///////////////////// - nitems = pInfo->dpb.used_size; - for(i=0; idpb.fs_dpb_idc[i]; - - if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; - wi.data.data_offset = fs_id; - //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); - - switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) - { - case (FRAME):{ - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - case (TOP_FIELD):{ - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = 0; - break; - }; - - case (BOTTOM_FIELD):{ - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - default : { - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; - break; - }; - } - - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - - } - } - - /////////////////////Alloc buffer for current Existing frame///////////////////// - if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated) - { - if(pInfo->push_to_cur) - { - // viddec_workload_t *wl_cur = viddec_pm_get_header (parent); - // wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - else - { - // viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); - //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - } - pInfo->dpb.frame_numbers_need_to_be_allocated =0; - - return; + viddec_workload_item_t wi; + uint32_t i=0,nitems=0; + + ///////////////////////// Frame attributes////////////////////////// + + //Push data into current workload if first frame or frame_boundary already detected by non slice nal + if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) + { + //viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); + //pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->is_frame_boundary_detected_by_non_slice_nal=0; + pInfo->push_to_cur = 1; + //h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); + } + else // move to cur if frame boundary detected by previous non slice nal, or move to next if not + { + //viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + + pInfo->push_to_cur = 0; + //h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); + + pInfo->is_current_workload_done=1; + } + + ///////////////////// SPS///////////////////// + // h264_parse_emit_sps(parent, pInfo); + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + /////////////////////flust frames (do not display)///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; + + for (i=0; idpb.frame_id_need_to_be_dropped[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_dropped =0; + + /////////////////////updata DPB frames///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id); + wi.ref_frame.reference_id = fs_id; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + } + + + /////////////////////updata dpb frames info (poc)///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; + wi.data.data_offset = fs_id; + //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); + + switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) + { + case (FRAME): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + case (TOP_FIELD): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = 0; + break; + }; + + case (BOTTOM_FIELD): { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + default : { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + break; + }; + } + + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + + } + } + + /////////////////////Alloc buffer for current Existing frame///////////////////// + if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated) + { + if (pInfo->push_to_cur) + { + // viddec_workload_t *wl_cur = viddec_pm_get_header (parent); + // wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + else + { + // viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + } + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + return; } @@ -511,65 +511,65 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) { - uint32_t nitems=0, i=0; - viddec_workload_item_t wi; - - //// - //// Now we can flush out all frames in DPB fro display - if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) - { - h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME - } - - h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); - h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); - - - /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for(i=0; idpb.frame_id_need_to_be_displayed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - } - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - - - /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for(i=0; idpb.frame_id_need_to_be_removed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); - } - } - pInfo->dpb.frame_numbers_need_to_be_removed =0; - - return; + uint32_t nitems=0, i=0; + viddec_workload_item_t wi; + + //// + //// Now we can flush out all frames in DPB fro display + if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); + } + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + return; } #endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index a763d00..b5b6c87 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -17,18 +17,18 @@ void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) #endif { - struct h264_viddec_parser* parser = ctxt; - h264_Info * pInfo = &(parser->info); - - if(!preserve) - { - /* we don't initialize this data if we want to preserve - sequence and gop information */ - h264_init_sps_pps(parser,persist_mem); - } - /* picture level info which will always be initialized */ - h264_init_Info_under_sps_pps_level(pInfo); - return; + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + if (!preserve) + { + /* we don't initialize this data if we want to preserve + sequence and gop information */ + h264_init_sps_pps(parser,persist_mem); + } + /* picture level info which will always be initialized */ + h264_init_Info_under_sps_pps_level(pInfo); + return; } @@ -41,434 +41,434 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) static uint32_t viddec_h264_parse(void *parent, void *ctxt) #endif { - struct h264_viddec_parser* parser = ctxt; - - h264_Info * pInfo = &(parser->info); + struct h264_viddec_parser* parser = ctxt; - h264_Status status = H264_STATUS_ERROR; - + h264_Info * pInfo = &(parser->info); - uint8_t nal_ref_idc = 0; + h264_Status status = H264_STATUS_ERROR; - ///// Parse NAL Unit header - pInfo->img.g_new_frame = 0; - pInfo->push_to_cur = 1; - pInfo->is_current_workload_done =0; - pInfo->nal_unit_type = 0; - - h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); - ///// Check frame bounday for non-vcl elimitter - h264_check_previous_frame_end(pInfo); + uint8_t nal_ref_idc = 0; - //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type); - //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0); + ///// Parse NAL Unit header + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + + ///// Check frame bounday for non-vcl elimitter + h264_check_previous_frame_end(pInfo); + + //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type); + //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0); #if 0 - devh_SVEN_WriteModuleEvent( NULL, - SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0, - pInfo->got_start,pInfo->nal_unit_type, pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num); -#endif - - //////// Parse valid NAL unit - switch ( pInfo->nal_unit_type ) - { - case h264_NAL_UNIT_TYPE_IDR: - if(pInfo->got_start) { - pInfo->img.recovery_point_found |= 1; - } - - pInfo->sei_rp_received = 0; - - case h264_NAL_UNIT_TYPE_SLICE: - //////////////////////////////////////////////////////////////////////////// - // Step 1: Check start point - //////////////////////////////////////////////////////////////////////////// - // - /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I) - /// 1) No start point reached, append current ES buffer to workload and release it - /// 2) else, start parsing - // - //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR))) - //{ - //pInfo->img.recovery_point_found = 1; - //} - { - - h264_Slice_Header_t next_SliceHeader; - - /// Reset next slice header - h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); - next_SliceHeader.nal_ref_idc = nal_ref_idc; - - if( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) - { - pInfo->img.recovery_point_found |=4; - } - pInfo->primary_pic_type_plus_one = 0; - - - - if(pInfo->img.recovery_point_found == 0) { - pInfo->img.structure = FRAME; - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - break; - } - - //////////////////////////////////////////////////////////////////////////// - // Step 2: Parsing slice header - //////////////////////////////////////////////////////////////////////////// - /// PWT - pInfo->h264_pwt_start_byte_offset=0; - pInfo->h264_pwt_start_bit_offset=0; - pInfo->h264_pwt_end_byte_offset=0; - pInfo->h264_pwt_end_bit_offset=0; - pInfo->h264_pwt_enabled =0; - /// IDR flag - next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); - - - /// Pass slice header - status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); - - pInfo->sei_information.recovery_point = 0; - - if(next_SliceHeader.sh_error & 3) { - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - - // Error type definition, refer to viddec_fw_common_defs.h - // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) - // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) - // if this is frame based, both 2 bits should be set - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - - break; - } - pInfo->img.current_slice_num++; + devh_SVEN_WriteModuleEvent( NULL, + SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0, + pInfo->got_start,pInfo->nal_unit_type, pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num); +#endif + + //////// Parse valid NAL unit + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + if (pInfo->got_start) { + pInfo->img.recovery_point_found |= 1; + } + + pInfo->sei_rp_received = 0; + + case h264_NAL_UNIT_TYPE_SLICE: + //////////////////////////////////////////////////////////////////////////// + // Step 1: Check start point + //////////////////////////////////////////////////////////////////////////// + // + /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I) + /// 1) No start point reached, append current ES buffer to workload and release it + /// 2) else, start parsing + // + //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR))) + //{ + //pInfo->img.recovery_point_found = 1; + //} + { + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = nal_ref_idc; + + if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + + + if (pInfo->img.recovery_point_found == 0) { + pInfo->img.structure = FRAME; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + break; + } + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + + if (next_SliceHeader.sh_error & 3) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + + break; + } + pInfo->img.current_slice_num++; #ifdef DUMP_HEADER_INFO -dump_slice_header(pInfo, &next_SliceHeader); -////h264_print_decoder_values(pInfo); + dump_slice_header(pInfo, &next_SliceHeader); +////h264_print_decoder_values(pInfo); #endif - //////////////////////////////////////////////////////////////////////////// - // Step 3: Processing if new picture coming - // 1) if it's the second field - // 2) if it's a new frame - //////////////////////////////////////////////////////////////////////////// - //AssignQuantParam(pInfo); - if(h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) - { - // - ///----------------- New Picture.boundary detected-------------------- - // - pInfo->img.g_new_pic++; - - // - // Complete previous picture - h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old - //h264_hdr_post_poc(0, 0, use_old); - - // - // Update slice structures: - h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; - - // - // 1) if resolution change: reset dpb - // 2) else: init frame store - h264_update_img_info(pInfo); //img, dpb - - // - ///----------------- New frame.boundary detected-------------------- - // - pInfo->img.second_field = h264_is_second_field(pInfo); - if(pInfo->img.second_field == 0) - { - pInfo->img.g_new_frame = 1; - h264_dpb_update_queue_dangling_field(pInfo); - - // - /// DPB management - /// 1) check the gaps - /// 2) assign fs for non-exist frames - /// 3) fill the gaps - /// 4) store frame into DPB if ... - // - //if(pInfo->SliceHeader.redundant_pic_cnt) - { - h264_dpb_gaps_in_frame_num_mem_management(pInfo); - } + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } #ifdef DUMP_HEADER_INFO - dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); + dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); #endif - } - // - /// Decoding POC - h264_hdr_decoding_poc (pInfo, 0, 0); - - // - /// Init Frame Store for next frame - h264_dpb_init_frame_store (pInfo); - pInfo->img.current_slice_num = 1; - - if(pInfo->SliceHeader.first_mb_in_slice != 0) - { - ////Come here means we have slice lost at the beginning, since no FMO support - pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); - } - - // - /// Emit out the New Frame - if(pInfo->img.g_new_frame) + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + + // + /// Emit out the New Frame + if (pInfo->img.g_new_frame) { - h264_parse_emit_start_new_frame(parent, pInfo); + h264_parse_emit_start_new_frame(parent, pInfo); } - + h264_parse_emit_current_pic(parent, pInfo); - } - else ///////////////////////////////////////////////////// If Not a picture start - { - // - /// Update slice structures: cur->old; next->cur; - h264_update_old_slice(pInfo, next_SliceHeader); - - // - /// 1) if resolution change: reset dpb - /// 2) else: update img info - h264_update_img_info(pInfo); - } - - - ////////////////////////////////////////////////////////////// - // Step 4: DPB reference list init and reordering - ////////////////////////////////////////////////////////////// - - //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field - h264_update_frame_type(pInfo); - - - h264_dpb_update_ref_lists( pInfo); - -#ifdef DUMP_HEADER_INFO - dump_ref_list(pInfo); -#endif - /// Emit out the current "good" slice - h264_parse_emit_current_slice(parent, pInfo); - - } - break; - - ///// * Main profile doesn't support Data Partition, skipped.... *//// - case h264_NAL_UNIT_TYPE_DPA: - case h264_NAL_UNIT_TYPE_DPB: - case h264_NAL_UNIT_TYPE_DPC: - //OS_INFO("***********************DP feature, not supported currently*******************\n"); - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - status = H264_STATUS_NOTSUPPORT; - break; - - //// * Parsing SEI info *//// - case h264_NAL_UNIT_TYPE_SEI: - status = H264_STATUS_OK; - - //OS_INFO("*****************************SEI**************************************\n"); - if(pInfo->sps_valid){ - //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW - pInfo->number_of_first_au_info_nal_before_first_slice++; - /// parsing the SEI info - status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo); - } - - //h264_rbsp_trailing_bits(pInfo); - break; - case h264_NAL_UNIT_TYPE_SPS: - { - //OS_INFO("*****************************SPS**************************************\n"); - /// - /// Can not define local SPS since the Current local stack size limitation! - /// Could be changed after the limitation gone - /// - uint8_t old_sps_id=0; - vui_seq_parameters_t_not_used vui_seq_not_used; - - old_sps_id = pInfo->active_SPS.seq_parameter_set_id; - h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); - - - status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); - if(status == H264_STATUS_OK) { - h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + + + ////////////////////////////////////////////////////////////// + // Step 4: DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + + + h264_dpb_update_ref_lists( pInfo); + +#ifdef DUMP_HEADER_INFO + dump_ref_list(pInfo); +#endif + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + + } + break; + + ///// * Main profile doesn't support Data Partition, skipped.... *//// + case h264_NAL_UNIT_TYPE_DPA: + case h264_NAL_UNIT_TYPE_DPB: + case h264_NAL_UNIT_TYPE_DPC: + //OS_INFO("***********************DP feature, not supported currently*******************\n"); + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + status = H264_STATUS_NOTSUPPORT; + break; + + //// * Parsing SEI info *//// + case h264_NAL_UNIT_TYPE_SEI: + status = H264_STATUS_OK; + + //OS_INFO("*****************************SEI**************************************\n"); + if (pInfo->sps_valid) { + //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW + pInfo->number_of_first_au_info_nal_before_first_slice++; + /// parsing the SEI info + status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo); + } + + //h264_rbsp_trailing_bits(pInfo); + break; + case h264_NAL_UNIT_TYPE_SPS: + { + //OS_INFO("*****************************SPS**************************************\n"); + /// + /// Can not define local SPS since the Current local stack size limitation! + /// Could be changed after the limitation gone + /// + uint8_t old_sps_id=0; + vui_seq_parameters_t_not_used vui_seq_not_used; + + old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + + + status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); + if (status == H264_STATUS_OK) { + h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); pInfo->sps_valid = 1; - - if(1==pInfo->active_SPS.pic_order_cnt_type) { - h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); - } + + if (1==pInfo->active_SPS.pic_order_cnt_type) { + h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); + } #ifdef DUMP_HEADER_INFO - dump_sps(&(pInfo->active_SPS)); + dump_sps(&(pInfo->active_SPS)); #endif - - } - ///// Restore the active SPS if new arrival's id changed - if(old_sps_id>=MAX_NUM_SPS) { - h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); - pInfo->active_SPS.seq_parameter_set_id = 0xff; - } - else { - if(old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { - h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); - } - else { - //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); - pInfo->active_SPS.seq_parameter_set_id = 0xff; - } - } - - pInfo->number_of_first_au_info_nal_before_first_slice++; - } - break; - case h264_NAL_UNIT_TYPE_PPS: - { - //OS_INFO("*****************************PPS**************************************\n"); - - uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; - uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; - - h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set)); - pInfo->number_of_first_au_info_nal_before_first_slice++; - - if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK) - { - h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id); - if(old_sps_id != pInfo->active_SPS.seq_parameter_set_id) - { - pInfo->Is_SPS_updated = 1; - } - if(pInfo->active_SPS.seq_parameter_set_id != 0xff) { - h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id); - pInfo->got_start = 1; - if(pInfo->sei_information.recovery_point) - { - pInfo->img.recovery_point_found |= 2; - - //// Enable the RP recovery if no IDR ---Cisco - if((pInfo->img.recovery_point_found & 1)==0) + + } + ///// Restore the active SPS if new arrival's id changed + if (old_sps_id>=MAX_NUM_SPS) { + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + else { + if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + else { + //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + } + + pInfo->number_of_first_au_info_nal_before_first_slice++; + } + break; + case h264_NAL_UNIT_TYPE_PPS: + { + //OS_INFO("*****************************PPS**************************************\n"); + + uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; + + h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set)); + pInfo->number_of_first_au_info_nal_before_first_slice++; + + if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK) + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id); + if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated = 1; + } + if (pInfo->active_SPS.seq_parameter_set_id != 0xff) { + h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id); + pInfo->got_start = 1; + if (pInfo->sei_information.recovery_point) + { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if ((pInfo->img.recovery_point_found & 1)==0) pInfo->sei_rp_received = 1; - } - } - else - { - h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); - } - #ifdef DUMP_HEADER_INFO - dump_pps(&(pInfo->active_PPS)); - #endif - } else { - if(old_sps_idactive_SPS), old_sps_id); - if(old_pps_idactive_PPS), old_pps_id); - } - - } //// End of PPS parsing - break; - - - case h264_NAL_UNIT_TYPE_EOSeq: - case h264_NAL_UNIT_TYPE_EOstream: - - h264_parse_emit_eos(parent, pInfo); - h264_init_dpb(&(pInfo->dpb)); - - pInfo->is_current_workload_done=1; - - /* picture level info which will always be initialized */ - //h264_init_Info_under_sps_pps_level(pInfo); - - ////reset the pInfo here - //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false); - - - status = H264_STATUS_OK; - pInfo->number_of_first_au_info_nal_before_first_slice++; - break; - - case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + } + } + else + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } +#ifdef DUMP_HEADER_INFO + dump_pps(&(pInfo->active_PPS)); +#endif + } else { + if (old_sps_idactive_SPS), old_sps_id); + if (old_pps_idactive_PPS), old_pps_id); + } + + } //// End of PPS parsing + break; + + + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + + h264_parse_emit_eos(parent, pInfo); + h264_init_dpb(&(pInfo->dpb)); + + pInfo->is_current_workload_done=1; + + /* picture level info which will always be initialized */ + //h264_init_Info_under_sps_pps_level(pInfo); + + ////reset the pInfo here + //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false); + + + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: #if 1 - ///// primary_pic_type - { - uint32_t code = 0xff; - int32_t ret = 0; - ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3); - - if(ret != -1) { - //if(pInfo->got_start && (code == 0)) - //{ - //pInfo->img.recovery_point_found |= 4; - //} - pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1; - status = H264_STATUS_OK; - } - pInfo->number_of_first_au_info_nal_before_first_slice++; - break; - } + ///// primary_pic_type + { + uint32_t code = 0xff; + int32_t ret = 0; + ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3); + + if (ret != -1) { + //if(pInfo->got_start && (code == 0)) + //{ + //pInfo->img.recovery_point_found |= 4; + //} + pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1; + status = H264_STATUS_OK; + } + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + } #endif - - case h264_NAL_UNIT_TYPE_Reserved1: - case h264_NAL_UNIT_TYPE_Reserved2: - case h264_NAL_UNIT_TYPE_Reserved3: - case h264_NAL_UNIT_TYPE_Reserved4: - case h264_NAL_UNIT_TYPE_Reserved5: - status = H264_STATUS_OK; - pInfo->number_of_first_au_info_nal_before_first_slice++; - break; - - case h264_NAL_UNIT_TYPE_filler_data: - status = H264_STATUS_OK; - break; - case h264_NAL_UNIT_TYPE_ACP: - break; - case h264_NAL_UNIT_TYPE_SPS_extension: - case h264_NAL_UNIT_TYPE_unspecified: - case h264_NAL_UNIT_TYPE_unspecified2: - status = H264_STATUS_OK; - //nothing - break; - default: - status = H264_STATUS_OK; - break; - } - - //pInfo->old_nal_unit_type = pInfo->nal_unit_type; - switch ( pInfo->nal_unit_type ) - { - case h264_NAL_UNIT_TYPE_IDR: - case h264_NAL_UNIT_TYPE_SLICE: - case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: - case h264_NAL_UNIT_TYPE_SPS: - case h264_NAL_UNIT_TYPE_PPS: - case h264_NAL_UNIT_TYPE_SEI: - case h264_NAL_UNIT_TYPE_EOSeq: - case h264_NAL_UNIT_TYPE_EOstream: - case h264_NAL_UNIT_TYPE_Reserved1: - case h264_NAL_UNIT_TYPE_Reserved2: - case h264_NAL_UNIT_TYPE_Reserved3: - case h264_NAL_UNIT_TYPE_Reserved4: - case h264_NAL_UNIT_TYPE_Reserved5: - { - pInfo->old_nal_unit_type = pInfo->nal_unit_type; - break; - } - default: - break; - } - - return status; + + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_filler_data: + status = H264_STATUS_OK; + break; + case h264_NAL_UNIT_TYPE_ACP: + break; + case h264_NAL_UNIT_TYPE_SPS_extension: + case h264_NAL_UNIT_TYPE_unspecified: + case h264_NAL_UNIT_TYPE_unspecified2: + status = H264_STATUS_OK; + //nothing + break; + default: + status = H264_STATUS_OK; + break; + } + + //pInfo->old_nal_unit_type = pInfo->nal_unit_type; + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + case h264_NAL_UNIT_TYPE_SLICE: + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->old_nal_unit_type = pInfo->nal_unit_type; + break; + } + default: + break; + } + + return status; } - + @@ -478,60 +478,60 @@ dump_slice_header(pInfo, &next_SliceHeader); static uint32_t viddec_h264_is_frame_start(void *ctxt) { - struct h264_viddec_parser* parser = ctxt; - uint32_t ret = 0; - - h264_Info * pInfo = &(parser->info); + struct h264_viddec_parser* parser = ctxt; + uint32_t ret = 0; + + h264_Info * pInfo = &(parser->info); - if(pInfo->img.g_new_frame) { - ret = 1; - } + if (pInfo->img.g_new_frame) { + ret = 1; + } - return ret; + return ret; } #ifdef VBP uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, - uint32_t *codec_specific_errors) + uint32_t *codec_specific_errors) #else static uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) #endif { - struct h264_viddec_parser* parser = ctxt; - uint32_t ret = VIDDEC_PARSE_SUCESS; - h264_Info * pInfo = &(parser->info); - uint8_t is_stream_forced_to_complete=false; - - is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc); - - if(is_stream_forced_to_complete || (pInfo->is_current_workload_done)) - { - viddec_workload_t *wl; - viddec_frame_attributes_t *attrs; - - wl = viddec_pm_get_header( parent ); - attrs = &wl->attrs; - - if((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048)) - { - attrs->cont_size.width = 32; - attrs->cont_size.height = 32; - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - } - - *codec_specific_errors = pInfo->wl_err_curr; - pInfo->wl_err_curr = pInfo->wl_err_next; - pInfo->wl_err_next = 0; - - if(is_stream_forced_to_complete) - { - h264_parse_emit_eos(parent, pInfo); - } - ret = VIDDEC_PARSE_FRMDONE; - } - - return ret; + struct h264_viddec_parser* parser = ctxt; + uint32_t ret = VIDDEC_PARSE_SUCESS; + h264_Info * pInfo = &(parser->info); + uint8_t is_stream_forced_to_complete=false; + + is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc); + + if (is_stream_forced_to_complete || (pInfo->is_current_workload_done)) + { + viddec_workload_t *wl; + viddec_frame_attributes_t *attrs; + + wl = viddec_pm_get_header( parent ); + attrs = &wl->attrs; + + if ((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048)) + { + attrs->cont_size.width = 32; + attrs->cont_size.height = 32; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + } + + *codec_specific_errors = pInfo->wl_err_curr; + pInfo->wl_err_curr = pInfo->wl_err_next; + pInfo->wl_err_next = 0; + + if (is_stream_forced_to_complete) + { + h264_parse_emit_eos(parent, pInfo); + } + ret = VIDDEC_PARSE_FRMDONE; + } + + return ret; } #ifdef VBP @@ -540,22 +540,22 @@ void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) #endif { - /* Should return size of my structure */ + /* Should return size of my structure */ size->context_size = sizeof(struct h264_viddec_parser); - size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all) - + MAX_NUM_PPS * sizeof(pic_param_set) - + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE - + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all) + + MAX_NUM_PPS * sizeof(pic_param_set) + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE + + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; } void viddec_h264_get_ops(viddec_parser_ops_t *ops) { - ops->init = viddec_h264_init; + ops->init = viddec_h264_init; - ops->parse_syntax = viddec_h264_parse; - ops->get_cxt_size = viddec_h264_get_context_size; - ops->is_wkld_done = viddec_h264_wkld_done; - ops->is_frame_start = viddec_h264_is_frame_start; - return; + ops->parse_syntax = viddec_h264_parse; + ops->get_cxt_size = viddec_h264_get_context_size; + ops->is_wkld_done = viddec_h264_wkld_done; + ops->is_frame_start = viddec_h264_is_frame_start; + return; } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c index 2faa136..54c96db 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c @@ -63,8 +63,8 @@ extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, - int32_t NonExisting, - int32_t use_old); + int32_t NonExisting, + int32_t use_old); extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); @@ -73,52 +73,52 @@ extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t ke void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_Info *pInfo) { - viddec_frame_attributes_t *attrs = &wl->attrs; + viddec_frame_attributes_t *attrs = &wl->attrs; - //// Cont_size - attrs->cont_size.height = pInfo->img.FrameHeightInMbs*16; - attrs->cont_size.width = pInfo->img.PicWidthInMbs*16; + //// Cont_size + attrs->cont_size.height = pInfo->img.FrameHeightInMbs*16; + attrs->cont_size.width = pInfo->img.PicWidthInMbs*16; - //// The following attributes will be updated in slice level - attrs->h264.used_for_reference = 0; - attrs->h264.top_field_first = 0; - attrs->h264.top_field_poc = 0; - attrs->h264.bottom_field_poc = 0; - attrs->h264.field_pic_flag = 0; + //// The following attributes will be updated in slice level + attrs->h264.used_for_reference = 0; + attrs->h264.top_field_first = 0; + attrs->h264.top_field_poc = 0; + attrs->h264.bottom_field_poc = 0; + attrs->h264.field_pic_flag = 0; -#if 1 +#if 1 /// Double check the size late!!!!! - //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16; - //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16; - - if( (pInfo->active_SPS.sps_disp.frame_cropping_flag) && - (pInfo->active_SPS.sps_disp.chroma_format_idc < 4)) - { - int32_t CropUnitX, CropUnitY; - int32_t SubWidthC, SubHeightC; - - if(pInfo->active_SPS.sps_disp.chroma_format_idc == 0) - { - CropUnitX = 1; - CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag; - } - else - { - SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1); - SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1) - - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1); - CropUnitX = SubWidthC; - CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag); - } - - if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY)) - { - attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); - //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); - } - } + //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16; + //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16; + + if ( (pInfo->active_SPS.sps_disp.frame_cropping_flag) && + (pInfo->active_SPS.sps_disp.chroma_format_idc < 4)) + { + int32_t CropUnitX, CropUnitY; + int32_t SubWidthC, SubHeightC; + + if (pInfo->active_SPS.sps_disp.chroma_format_idc == 0) + { + CropUnitX = 1; + CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag; + } + else + { + SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1); + SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1) + - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1); + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag); + } + + if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY)) + { + attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); + //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); + } + } /// Pan-Scan Info #endif @@ -128,515 +128,545 @@ void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_ static void h264_parse_update_frame_attributes(void *parent, h264_Info *pInfo) { - viddec_workload_t *wl_cur, *wl_next; - viddec_frame_attributes_t *attrs; - uint8_t frame_type=0; - - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - wl_cur = viddec_pm_get_header( parent ); - attrs = &wl_cur->attrs; - } - else - { - wl_next = viddec_pm_get_next_header (parent); - attrs = &wl_next->attrs; - } - - /////////update frame type - if((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)) - { - frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET; - switch(frame_type) - { - case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break; - case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break; - case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break; - case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break; - default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break; - } - - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; - } - else - { - frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET; - switch(frame_type) - { - case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break; - case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break; - case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break; - case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break; - default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break; - - } - - frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET; - switch(frame_type) - { - case FRAME_TYPE_IDR: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR; break; - case FRAME_TYPE_I: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I; break; - case FRAME_TYPE_P: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P; break; - case FRAME_TYPE_B: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B; break; - default: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; break; - - } - } - - /////////update is_referece flag - attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1; - - /////////update POC - attrs->h264.top_field_poc = pInfo->img.toppoc; - attrs->h264.bottom_field_poc = pInfo->img.bottompoc; - - //////// update TFF - if(attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) { - attrs->h264.top_field_first = 1; - } else { - attrs->h264.top_field_first = 0; - } - - /////// update field_pic_flag - //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag); - attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag; - - return; + viddec_workload_t *wl_cur, *wl_next; + viddec_frame_attributes_t *attrs; + uint8_t frame_type=0; + + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + wl_cur = viddec_pm_get_header( parent ); + attrs = &wl_cur->attrs; + } + else + { + wl_next = viddec_pm_get_next_header (parent); + attrs = &wl_next->attrs; + } + + /////////update frame type + if ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)) + { + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET; + switch (frame_type) + { + case FRAME_TYPE_IDR: + attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; + break; + case FRAME_TYPE_I: + attrs->frame_type = VIDDEC_FRAME_TYPE_I; + break; + case FRAME_TYPE_P: + attrs->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case FRAME_TYPE_B: + attrs->frame_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; + break; + } + + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; + } + else + { + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET; + switch (frame_type) + { + case FRAME_TYPE_IDR: + attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; + break; + case FRAME_TYPE_I: + attrs->frame_type = VIDDEC_FRAME_TYPE_I; + break; + case FRAME_TYPE_P: + attrs->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case FRAME_TYPE_B: + attrs->frame_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; + break; + + } + + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET; + switch (frame_type) + { + case FRAME_TYPE_IDR: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR; + break; + case FRAME_TYPE_I: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I; + break; + case FRAME_TYPE_P: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P; + break; + case FRAME_TYPE_B: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; + break; + + } + } + + /////////update is_referece flag + attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1; + + /////////update POC + attrs->h264.top_field_poc = pInfo->img.toppoc; + attrs->h264.bottom_field_poc = pInfo->img.bottompoc; + + //////// update TFF + if (attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) { + attrs->h264.top_field_first = 1; + } else { + attrs->h264.top_field_first = 0; + } + + /////// update field_pic_flag + //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag); + attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag; + + return; } static void h264_fill_slice_data(h264_Info *pInfo, h264_slice_data * p_slice_data) { - uint32_t data=0; - uint32_t first_mb_in_slice =0; - - - - ////////////fill pic parameters 1 - data = PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) + - PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) + - PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) + - PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag) + - PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active) + - PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active); - p_slice_data->h264_bsd_slice_p1 = data; - - - ///////////fill pic parameters 2 - data = PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) + - PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) + - PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) + - PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) + - PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) + - PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) + - PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) + - PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset); - - p_slice_data->h264_bsd_slice_p2 = data; - - /////////fill slice start - first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice; - - data = PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice); - data |= PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) | - PUT_BSD_SS_SKIP_TYPE_BIT(0) | - PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3)); - - p_slice_data->h264_bsd_slice_start = data; - + uint32_t data=0; + uint32_t first_mb_in_slice =0; + + + + ////////////fill pic parameters 1 + data = PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) + + PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) + + PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) + + PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag) + + PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active) + + PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active); + p_slice_data->h264_bsd_slice_p1 = data; + + + ///////////fill pic parameters 2 + data = PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) + + PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) + + PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) + + PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) + + PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) + + PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) + + PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) + + PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset); + + p_slice_data->h264_bsd_slice_p2 = data; + + /////////fill slice start + first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice; + + data = PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice); + data |= PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) | + PUT_BSD_SS_SKIP_TYPE_BIT(0) | + PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3)); + + p_slice_data->h264_bsd_slice_start = data; + } static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - - uint32_t i=0, n_items=0; - uint32_t qm_type=0; - - - for( i = 0; i < 6; i++ ) - { - qm_type = FB_QM; - if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first - { - if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix4x4Flag[i]) { - qm_type = DEFAULT_QM; - } else { - qm_type = SPS_QM; + viddec_workload_item_t wi; + + uint32_t i=0, n_items=0; + uint32_t qm_type=0; + + + for ( i = 0; i < 6; i++ ) + { + qm_type = FB_QM; + if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first + { + if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix4x4Flag[i]) { + qm_type = DEFAULT_QM; + } else { + qm_type = SPS_QM; + } } - } - } - - if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps - { - if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix4x4Flag[i]) { - qm_type = DEFAULT_QM; - } else { - qm_type = PPS_QM; + } + + if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps + { + if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix4x4Flag[i]) { + qm_type = DEFAULT_QM; + } else { + qm_type = PPS_QM; + } } - } - else - { - if ((i != 0) && (i != 3) && (i < 6)) { - pInfo->qm_present_list &= ~((0x1)<qm_present_list &= ~((0x1)<active_SPS.ScalingList4x4[i][n_items*8+0]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+0]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } break; - } - case (PPS_QM):{ + } + case (PPS_QM): { - for(n_items =0; n_items<2; n_items++) + for (n_items =0; n_items<2; n_items++) { - wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); - wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } - break; - } - case (DEFAULT_QM): - { + break; + } + case (DEFAULT_QM): + { wi.data.data_offset = i + (DEFAULT_QM << 4); wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; + wi.data.data_payload[1] = 0; //cur is empty, fill new frame in cur viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - break; - } - default: - { - break; - } - } - } - + break; + } + default: + { + break; + } + } + } + } static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - - uint32_t i=0, n_items=0; - uint32_t qm_type=0; - - for( i = 6; i < 8; i++ ) - { - qm_type = FB_QM; - if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first - { - if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix8x8Flag[i-6]) - { - qm_type = DEFAULT_QM; - } - else - { - qm_type = SPS_QM; - } - } - } - - if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps - { - if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]) + viddec_workload_item_t wi; + + uint32_t i=0, n_items=0; + uint32_t qm_type=0; + + for ( i = 6; i < 8; i++ ) + { + qm_type = FB_QM; + if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first + { + if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) { - qm_type = DEFAULT_QM; + pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix8x8Flag[i-6]) + { + qm_type = DEFAULT_QM; + } + else + { + qm_type = SPS_QM; + } } - else + } + + if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps + { + if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) { - qm_type = PPS_QM; + pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]) + { + qm_type = DEFAULT_QM; + } + else + { + qm_type = PPS_QM; + } } - } - } - wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX; - - // data_offset 0x aa bb cc dd - // bb is the workload item offset - // cc is the qm_type - // dd is the matrix number - // - switch (qm_type) - { - case (SPS_QM): - { - for(n_items =0; n_items<8; n_items++) + } + wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX; + + // data_offset 0x aa bb cc dd + // bb is the workload item offset + // cc is the qm_type + // dd is the matrix number + // + switch (qm_type) + { + case (SPS_QM): + { + for (n_items =0; n_items<8; n_items++) { - wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8); - wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } break; - } - case (PPS_QM): - { - for(n_items =0; n_items<8; n_items++) + } + case (PPS_QM): + { + for (n_items =0; n_items<8; n_items++) { - wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); - wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); } - break; - } - case (DEFAULT_QM): - { + break; + } + case (DEFAULT_QM): + { wi.data.data_offset = i + (DEFAULT_QM << 4); wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; + wi.data.data_payload[1] = 0; //cur is empty, fill new frame in cur viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - break; - } - default:{ - break; - } - } - } - + break; + } + default: { + break; + } + } + } + } static void h264_fill_pic_data(h264_Info *pInfo, h264_pic_data * p_pic_data) { - uint32_t data=0; - uint32_t dec_idc =0; - uint32_t frame_structure =0; - - //fill h264_dpb_init - data = PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) + - PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs); - - p_pic_data->h264_dpb_init = data; - - ////////////////////////////////file current pic info - data = 0; - dec_idc = pInfo->dpb.fs_dec_idc; - frame_structure = pInfo->img.structure; - if(frame_structure == FRAME) - frame_structure=0; - //data = PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); - - //p_pic_data->h264_cur_bsd_img_init= data; - - data = PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure) + - PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + - PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) + - PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) + - PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) + - PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) + - PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) + - PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) + - PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) + - PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) + - PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) + - PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) + - PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) + - PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); - - p_pic_data->h264_cur_bsd_img_init= data; - - //to do: add qm list - //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) + - //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc); - - if(pInfo->img.structure == FRAME) - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; - p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; - }else if (pInfo->img.structure == TOP_FIELD) - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; - p_pic_data->h264_cur_mpr_bf_poc = 0; - } - else if (pInfo->img.structure == BOTTOM_FIELD) - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = 0; - p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; - } - else - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = 0; - p_pic_data->h264_cur_mpr_bf_poc = 0; - } - - return; + uint32_t data=0; + uint32_t dec_idc =0; + uint32_t frame_structure =0; + + //fill h264_dpb_init + data = PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) + + PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs); + + p_pic_data->h264_dpb_init = data; + + ////////////////////////////////file current pic info + data = 0; + dec_idc = pInfo->dpb.fs_dec_idc; + frame_structure = pInfo->img.structure; + if (frame_structure == FRAME) + frame_structure=0; + //data = PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); + + //p_pic_data->h264_cur_bsd_img_init= data; + + data = PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure) + + PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + + PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) + + PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) + + PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) + + PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) + + PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) + + PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) + + PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) + + PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) + + PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) + + PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) + + PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) + + PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); + + p_pic_data->h264_cur_bsd_img_init= data; + + //to do: add qm list + //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) + + //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc); + + if (pInfo->img.structure == FRAME) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; + p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; + } else if (pInfo->img.structure == TOP_FIELD) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; + p_pic_data->h264_cur_mpr_bf_poc = 0; + } + else if (pInfo->img.structure == BOTTOM_FIELD) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = 0; + p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; + } + else + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = 0; + p_pic_data->h264_cur_mpr_bf_poc = 0; + } + + return; } static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) { - viddec_workload_item_t wi; - - if(pInfo->Is_SPS_updated) - { - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; - - viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc); - viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc); - viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc); - viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames); - viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag); - viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag); - viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag); - viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag); - wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1; - wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1; - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - - viddec_fw_reset_workload_item(&wi); - if(pInfo->active_SPS.sps_disp.frame_cropping_flag) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING; - viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset); - viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset); - viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset); - viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - viddec_fw_reset_workload_item(&wi); - if(pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1) - { - wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; - viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag); - viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag); - viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag); - viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag); - viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag); - viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag); - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1) - { - viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc); - if(h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc) - { - viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width); - viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height); - } - } - - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) - { - viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag); - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) - { - viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries); - viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics); - } - viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format); - } - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) - { - viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag); - } - - if( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) - || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) - { - viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag); - } - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - - viddec_fw_reset_workload_item(&wi); - - if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO; - - wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick; - wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->Is_SPS_updated =0; - - } - - return; + viddec_workload_item_t wi; + + if (pInfo->Is_SPS_updated) + { + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; + + viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc); + viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc); + viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc); + viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames); + viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag); + viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag); + viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag); + viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag); + wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1; + wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1; + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + + viddec_fw_reset_workload_item(&wi); + if (pInfo->active_SPS.sps_disp.frame_cropping_flag) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING; + viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset); + viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset); + viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset); + viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + viddec_fw_reset_workload_item(&wi); + if (pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1) + { + wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; + viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag); + viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag); + viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag); + viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag); + viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag); + viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag); + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1) + { + viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc); + if (h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc) + { + viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width); + viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height); + } + } + + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag); + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries); + viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics); + } + viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format); + } + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag); + } + + if ( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) + { + viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag); + } + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + + viddec_fw_reset_workload_item(&wi); + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO; + + wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick; + wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->Is_SPS_updated =0; + + } + + return; } @@ -644,107 +674,107 @@ static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t list_id) { - uint32_t i=0, nitems=0, byte_index=0, data=0, data_writed=0; - uint8_t *p_list; - viddec_workload_item_t wi; - - if(0 == list_id) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0; - - if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) - { - nitems = pInfo->SliceHeader.num_ref_idx_l0_active; - if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - { - p_list = pInfo->slice_ref_list0; - } - else - { + uint32_t i=0, nitems=0, byte_index=0, data=0, data_writed=0; + uint8_t *p_list; + viddec_workload_item_t wi; + + if (0 == list_id) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0; + + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list0; + } + else + { + p_list = pInfo->dpb.listX_0; + } + } + else + { + nitems =0; p_list = pInfo->dpb.listX_0; - } - } - else - { - nitems =0; - p_list = pInfo->dpb.listX_0; - } - } - else - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1; - - if( h264_PtypeB==pInfo->SliceHeader.slice_type) - { - nitems = pInfo->SliceHeader.num_ref_idx_l1_active; - if(pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) - { - p_list = pInfo->slice_ref_list1; - } - else - { + } + } + else + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1; + + if ( h264_PtypeB==pInfo->SliceHeader.slice_type) + { + nitems = pInfo->SliceHeader.num_ref_idx_l1_active; + if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list1; + } + else + { + p_list = pInfo->dpb.listX_1; + } + } + else + { + nitems = 0; p_list = pInfo->dpb.listX_1; - } - } - else - { - nitems = 0; - p_list = pInfo->dpb.listX_1; - } - - } - - if(0 == nitems) - { - return; - } - - byte_index =0; - data_writed=0; - - - for (i=0; i < 32; i++) - { - if(byte_index == 0) data = 0; - - if(idpb.fs[ (p_list[i]&0x1f) ]))) - { - data |= (pInfo->h264_list_replacement) << byte_index; - } - else - { - data |= (p_list[i] & 0x7f) << byte_index; - } - } - else - { - data |= (0x80) << byte_index; - } - - - if(byte_index == 24) - { - byte_index = 0; - wi.data.data_offset = data_writed&(~0x1); - wi.data.data_payload[data_writed&0x1]=data; - - data =0; - - if(data_writed&0x1) - { - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - data_writed ++; - } - else - { - byte_index += 8; - } - } - + } + + } + + if (0 == nitems) + { + return; + } + + byte_index =0; + data_writed=0; + + + for (i=0; i < 32; i++) + { + if (byte_index == 0) data = 0; + + if (idpb.fs[ (p_list[i]&0x1f) ]))) + { + data |= (pInfo->h264_list_replacement) << byte_index; + } + else + { + data |= (p_list[i] & 0x7f) << byte_index; + } + } + else + { + data |= (0x80) << byte_index; + } + + + if (byte_index == 24) + { + byte_index = 0; + wi.data.data_offset = data_writed&(~0x1); + wi.data.data_payload[data_writed&0x1]=data; + + data =0; + + if (data_writed&0x1) + { + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + data_writed ++; + } + else + { + byte_index += 8; + } + } + } @@ -752,337 +782,337 @@ static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t l void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - h264_slice_data slice_data; - - uint32_t i=0, nitems=0, data=0; - uint32_t bits_offset =0, byte_offset =0; - uint8_t is_emul =0; - - ////////////////////// Update frame attributes///////////////// - h264_parse_update_frame_attributes(parent,pInfo); - - - if(pInfo->SliceHeader.sh_error) { - // Error type definition, refer to viddec_fw_common_defs.h - // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) - // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) - // if this is frame based, both 2 bits should be set - - if(pInfo->push_to_cur) { - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); - } else { - pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); - } - } - - - ////////////////////// Update Reference list ////////////////// - if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) - { - if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - { - nitems = pInfo->SliceHeader.num_ref_idx_l0_active; - - for(i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + viddec_workload_item_t wi; + h264_slice_data slice_data; + + uint32_t i=0, nitems=0, data=0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + ////////////////////// Update frame attributes///////////////// + h264_parse_update_frame_attributes(parent,pInfo); + + + if (pInfo->SliceHeader.sh_error) { + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + + if (pInfo->push_to_cur) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); + } else { + pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); + } + } + + + ////////////////////// Update Reference list ////////////////// + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for (i=0; ih264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; - break; + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } } - } - } - else - { - nitems = pInfo->dpb.listXsize[0]; - - for(i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for (i=0; ih264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; - break; + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } } - } - } - - } - else - { - nitems =0; - } - /////file ref list 0 - h264_parse_emit_ref_list(parent, pInfo, 0); - - /////file ref list 1 - h264_parse_emit_ref_list(parent, pInfo, 1); - - ///////////////////////////////////// Slice Data //////////////////////////////// - h264_fill_slice_data(pInfo, &slice_data); - - wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG; - - wi.data.data_offset = slice_data.h264_bsd_slice_start; - wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; - wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - - ///////////////////////////predict weight table item and data if have/////////////////////////// - if(pInfo->h264_pwt_enabled) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; - wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; - wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; - wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent , &wi, false); - - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); - } - else - { - viddec_pm_append_workitem( parent , &wi, true); - - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); - } - } - - - ////////////////////////////////// Update ES Buffer for Slice /////////////////////// - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); - - if(pInfo->active_PPS.entropy_coding_mode_flag) - { - if(0!=bits_offset) { - viddec_pm_get_bits(parent, &data, 8-bits_offset); - } - } - else - { - if(0!=bits_offset) { - wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; - wi.data.data_offset = bits_offset; - wi.data.data_payload[0]=0; - wi.data.data_payload[1]=0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - } - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_pixeldata( parent ); - } - else - { - viddec_pm_append_pixeldata_next( parent); - } - - return; + } + + } + else + { + nitems =0; + } + /////file ref list 0 + h264_parse_emit_ref_list(parent, pInfo, 0); + + /////file ref list 1 + h264_parse_emit_ref_list(parent, pInfo, 1); + + ///////////////////////////////////// Slice Data //////////////////////////////// + h264_fill_slice_data(pInfo, &slice_data); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG; + + wi.data.data_offset = slice_data.h264_bsd_slice_start; + wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; + wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + + ///////////////////////////predict weight table item and data if have/////////////////////////// + if (pInfo->h264_pwt_enabled) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; + wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; + wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; + wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent , &wi, false); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); + } + else + { + viddec_pm_append_workitem( parent , &wi, true); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); + } + } + + + ////////////////////////////////// Update ES Buffer for Slice /////////////////////// + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); + + if (pInfo->active_PPS.entropy_coding_mode_flag) + { + if (0!=bits_offset) { + viddec_pm_get_bits(parent, &data, 8-bits_offset); + } + } + else + { + if (0!=bits_offset) { + wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; + wi.data.data_offset = bits_offset; + wi.data.data_payload[0]=0; + wi.data.data_payload[1]=0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + } + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_pixeldata( parent ); + } + else + { + viddec_pm_append_pixeldata_next( parent); + } + + return; } void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; + viddec_workload_item_t wi; - const uint32_t *pl; - uint32_t i=0,nitems=0; + const uint32_t *pl; + uint32_t i=0,nitems=0; - h264_pic_data pic_data; + h264_pic_data pic_data; - pInfo->qm_present_list=0; - - h264_parse_emit_4X4_scaling_matrix(parent, pInfo); - h264_parse_emit_8X8_scaling_matrix(parent, pInfo); + pInfo->qm_present_list=0; - h264_fill_pic_data(pInfo, &pic_data); + h264_parse_emit_4X4_scaling_matrix(parent, pInfo); + h264_parse_emit_8X8_scaling_matrix(parent, pInfo); - // How many payloads must be generated - nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up + h264_fill_pic_data(pInfo, &pic_data); - pl = (const uint32_t *) &pic_data; + // How many payloads must be generated + nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up - // Dump slice data to an array of workitems, to do pl access non valid mem - for( i = 0; i < nitems; i++ ) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG; - wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct - wi.data.data_payload[0] = pl[0]; - wi.data.data_payload[1] = pl[1]; - pl += 2; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } + pl = (const uint32_t *) &pic_data; - return; + // Dump slice data to an array of workitems, to do pl access non valid mem + for ( i = 0; i < nitems; i++ ) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG; + wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + + return; } void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - uint32_t i=0,nitems=0; - - ///////////////////////// Frame attributes////////////////////////// - - //Push data into current workload if first frame or frame_boundary already detected by non slice nal - if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) - { - viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); - //pInfo->img.g_new_frame = 0; - pInfo->Is_first_frame_in_stream =0; - pInfo->is_frame_boundary_detected_by_non_slice_nal=0; - pInfo->push_to_cur = 1; - h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); - } - else // move to cur if frame boundary detected by previous non slice nal, or move to next if not - { - viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); - - pInfo->push_to_cur = 0; - h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); - - pInfo->is_current_workload_done=1; - } - - ///////////////////// SPS///////////////////// - h264_parse_emit_sps(parent, pInfo); - - /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for(i=0; idpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - - - /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for(i=0; idpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_removed =0; - - /////////////////////flust frames (do not display)///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; - - for(i=0; idpb.frame_id_need_to_be_dropped[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_dropped =0; - - /////////////////////updata DPB frames///////////////////// - nitems = pInfo->dpb.used_size; - for(i=0; idpb.fs_dpb_idc[i]; - - if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id; - wi.ref_frame.reference_id = fs_id; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - } - - /////////////////////updata dpb frames info (poc)///////////////////// - nitems = pInfo->dpb.used_size; - for(i=0; idpb.fs_dpb_idc[i]; - - if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; - wi.data.data_offset = fs_id; - //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); - - switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) - { - case (FRAME):{ - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - case (TOP_FIELD):{ - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = 0; - break; - }; - - case (BOTTOM_FIELD):{ - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - default : { - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; - break; - }; - } - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - } - - /////////////////////Alloc buffer for current Existing frame///////////////////// - if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated) - { - if(pInfo->push_to_cur) - { - viddec_workload_t *wl_cur = viddec_pm_get_header (parent); - wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - else - { - viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); - wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - } - pInfo->dpb.frame_numbers_need_to_be_allocated =0; - - return; + viddec_workload_item_t wi; + uint32_t i=0,nitems=0; + + ///////////////////////// Frame attributes////////////////////////// + + //Push data into current workload if first frame or frame_boundary already detected by non slice nal + if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) + { + viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); + //pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->is_frame_boundary_detected_by_non_slice_nal=0; + pInfo->push_to_cur = 1; + h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); + } + else // move to cur if frame boundary detected by previous non slice nal, or move to next if not + { + viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + + pInfo->push_to_cur = 0; + h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); + + pInfo->is_current_workload_done=1; + } + + ///////////////////// SPS///////////////////// + h264_parse_emit_sps(parent, pInfo); + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + /////////////////////flust frames (do not display)///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; + + for (i=0; idpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_dropped =0; + + /////////////////////updata DPB frames///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id; + wi.ref_frame.reference_id = fs_id; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + } + + /////////////////////updata dpb frames info (poc)///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; + wi.data.data_offset = fs_id; + //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); + + switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) + { + case (FRAME): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + case (TOP_FIELD): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = 0; + break; + }; + + case (BOTTOM_FIELD): { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + default : { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + break; + }; + } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + } + + /////////////////////Alloc buffer for current Existing frame///////////////////// + if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated) + { + if (pInfo->push_to_cur) + { + viddec_workload_t *wl_cur = viddec_pm_get_header (parent); + wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + else + { + viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + } + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + return; } @@ -1090,72 +1120,72 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) { - uint32_t nitems=0, i=0; - viddec_workload_item_t wi; - - - wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - - //// Now we can flush out all frames in DPB fro display - - if(MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc) - { - if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) - { - h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME - } - } - - - h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); - h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); - - - /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for(i=0; idpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - - - /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for(i=0; idpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if(pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi , false); - viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); - } - else - { - viddec_pm_append_workitem( parent, &wi , true); - viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); - } - } - pInfo->dpb.frame_numbers_need_to_be_removed =0; - - return; + uint32_t nitems=0, i=0; + viddec_workload_item_t wi; + + + wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + + //// Now we can flush out all frames in DPB fro display + + if (MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc) + { + if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + } + + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi , false); + viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + else + { + viddec_pm_append_workitem( parent, &wi , true); + viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); + } + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + return; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h index aa2a712..4600f39 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h @@ -4,7 +4,7 @@ /** * mpeg2.h * ------- - * This file contains all the necessary enumerations and structures needed from + * This file contains all the necessary enumerations and structures needed from * the MPEG-2 Specification. */ diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h index a6d8c2c..22d6236 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h @@ -4,7 +4,7 @@ /** * viddec_mpeg2.h * -------------- - * This header file contains all the necessary state information and function + * This header file contains all the necessary state information and function * prototypes for the MPEG2 parser. This header also defines the debug macros * used by the MPEG2 parser to emit debug messages in host mode. */ @@ -98,7 +98,7 @@ typedef enum { } mpeg2_wl_status_codes; /* MPEG2 Parser Workload types */ -typedef enum +typedef enum { /* MPEG2 Decoder Specific data */ VIDDEC_WORKLOAD_MPEG2_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, @@ -124,16 +124,16 @@ struct mpeg2_workitems { /* Core Sequence Info 1 */ uint32_t csi1; - + /* Core Sequence Info 2 */ uint32_t csi2; - + /* Core Picture Info 1 */ uint32_t cpi1; - + /* Core Picture Coding Extension Info 1 */ uint32_t cpce1; - + /* Quantization Matrices */ /* 0-15: Intra Quantization Matrix */ /* 16-31: Non-Intra Quantization Matrix */ diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c index 6aa6120..0394ec8 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c @@ -4,7 +4,7 @@ void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t flag) { - return; + return; } void viddec_mpeg2_emit_workload(void *parent, void *ctxt) @@ -13,13 +13,13 @@ void viddec_mpeg2_emit_workload(void *parent, void *ctxt) } void viddec_mpeg2_append_pixeldata(void *parent, uint8_t flag) -{ - return; +{ + return; } viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag) { - viddec_workload_t *ret; + viddec_workload_t *ret; if (flag) { ret = viddec_pm_get_next_header(parent); diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c index e33a6d6..310f986 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c @@ -1,7 +1,7 @@ /** * viddec_mpeg2_frame_attr.c * ------------------------- - * This is a helper file for viddec_mpeg2_workload.c to translate the data + * This is a helper file for viddec_mpeg2_workload.c to translate the data * stored in the parser context into frame attributes in the workload. */ @@ -11,9 +11,9 @@ static inline void viddec_mpeg2_print_attr(viddec_frame_attributes_t *attr) { unsigned int index = 0; - - MPEG2_FA_DEB("Content_Size=%dx%d\n", attr->cont_size.width, - attr->cont_size.height); + + MPEG2_FA_DEB("Content_Size=%dx%d\n", attr->cont_size.width, + attr->cont_size.height); MPEG2_FA_DEB("Repeat=%d\n", attr->mpeg2.repeat_first_field); MPEG2_FA_DEB("Frame_Type=%d\n", attr->frame_type); MPEG2_FA_DEB("Temporal_Reference=%d\n", attr->mpeg2.temporal_ref); @@ -21,14 +21,14 @@ static inline void viddec_mpeg2_print_attr(viddec_frame_attributes_t *attr) MPEG2_FA_DEB("Progressive_Frame=%d\n", attr->mpeg2.progressive_frame); MPEG2_FA_DEB("Picture_Struct=%d\n", attr->mpeg2.picture_struct); MPEG2_FA_DEB("Pan_Scan_Offsets=%d\n", attr->mpeg2.number_of_frame_center_offsets); - + for (index = 0; index < attr->mpeg2.number_of_frame_center_offsets; index++) { MPEG2_FA_DEB("\tPan_Scan_Offset_%d= %dx%d\n", index, - attr->mpeg2.frame_center_offset[index].horz, - attr->mpeg2.frame_center_offset[index].vert); + attr->mpeg2.frame_center_offset[index].horz, + attr->mpeg2.frame_center_offset[index].vert); } - + return; } @@ -54,47 +54,54 @@ void viddec_mpeg2_translate_attr(void *parent, void *ctxt) { /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Get workload */ viddec_workload_t *wl = viddec_pm_get_header( parent ); - + /* Get attributes in workload */ viddec_frame_attributes_t *attrs = &wl->attrs; /* Get the default values for optional attributes */ viddec_mpeg2_set_default_values(attrs); - + /* Populate attributes from parser context */ /* Content Size */ - attrs->cont_size.height = ((parser->info.seq_ext.vertical_size_extension << 12) - | parser->info.seq_hdr.vertical_size_value); - attrs->cont_size.width = ((parser->info.seq_ext.horizontal_size_extension << 12) - | parser->info.seq_hdr.horizontal_size_value); + attrs->cont_size.height = ((parser->info.seq_ext.vertical_size_extension << 12) + | parser->info.seq_hdr.vertical_size_value); + attrs->cont_size.width = ((parser->info.seq_ext.horizontal_size_extension << 12) + | parser->info.seq_hdr.horizontal_size_value); /* Repeat field */ attrs->mpeg2.repeat_first_field = parser->info.pic_cod_ext.repeat_first_field; /* Temporal Reference */ attrs->mpeg2.temporal_ref = parser->info.pic_hdr.temporal_reference; - + /* Top field first */ attrs->mpeg2.top_field_first = parser->info.pic_cod_ext.top_field_first; - + /* Progressive frame */ attrs->mpeg2.progressive_frame = parser->info.pic_cod_ext.progressive_frame; - + /* Picture Structure */ attrs->mpeg2.picture_struct = parser->info.pic_cod_ext.picture_structure; /* Populate the frame type */ switch (parser->info.pic_hdr.picture_coding_type) { - case MPEG2_PC_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break; - case MPEG2_PC_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break; - case MPEG2_PC_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break; - default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; + case MPEG2_PC_TYPE_I: + attrs->frame_type = VIDDEC_FRAME_TYPE_I; + break; + case MPEG2_PC_TYPE_P: + attrs->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case MPEG2_PC_TYPE_B: + attrs->frame_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; } - + /* Update PanScan data */ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_DISP_EXT) { @@ -106,9 +113,9 @@ void viddec_mpeg2_translate_attr(void *parent, void *ctxt) attrs->mpeg2.frame_center_offset[index].vert = parser->info.pic_disp_ext.frame_center_vertical_offset[index]; } } - + /* Print frame attributes */ viddec_mpeg2_print_attr(attrs); - + return; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c index 56604a4..037d6de 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c @@ -4,7 +4,7 @@ * This file contains all the routines to parse the information from MPEG2 * elementary stream and store it in the parser context. Based on the data * parsed, the state information in the context is updated. - * + * * Headers currently parsed from MPEG2 stream include: * - Sequence Header * - Sequence Extension @@ -14,7 +14,7 @@ * - Picture Coding Extension * - Quantization Matrix Extension * - Picture Display Extension - * + * * The slice data is parsed and appended into workload in viddec_mpeg2_parse.c */ @@ -44,7 +44,7 @@ const uint8_t mpeg2_default_non_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = { /* Matrix for converting scan order */ const uint8_t mpeg2_classic_scan[MPEG2_QUANT_MAT_SIZE] = { - 0, 1, 8, 16, 9, 2, 3, 10, + 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, @@ -54,7 +54,7 @@ const uint8_t mpeg2_classic_scan[MPEG2_QUANT_MAT_SIZE] = { 53, 60, 61, 54, 47, 55, 62, 63 }; const uint8_t mpeg2_alternate_scan[MPEG2_QUANT_MAT_SIZE] = { - 0, 8, 16, 24, 1, 9, 2, 10, + 0, 8, 16, 24, 1, 9, 2, 10, 17, 25, 32, 40, 48, 56, 57, 49, 41, 33, 26, 18, 3, 11, 4, 12, 19, 27, 34, 42, 50, 58, 35, 43, @@ -65,11 +65,11 @@ const uint8_t mpeg2_alternate_scan[MPEG2_QUANT_MAT_SIZE] = { }; /* Look-up tables for macro block address increment VLC */ -const uint8_t mb_addr_inc_tab1[16] = { +const uint8_t mb_addr_inc_tab1[16] = { 0, 0, 7, 6, 5, 5, 4, 4, 3, 3, 3, 3, 2, 2, 2, 2 }; -const uint8_t mb_addr_inc_tab2[8] = { +const uint8_t mb_addr_inc_tab2[8] = { 13, 12, 11, 10, 9, 9, 8, 8 }; const uint8_t mb_addr_inc_tab3[40] = { @@ -85,7 +85,7 @@ const uint8_t mb_addr_inc_tab3[40] = { static inline void mpeg2_copy_matrix(const uint8_t *src, uint8_t *dst) { register uint32_t index = 0; - for(index=0; index < MPEG2_QUANT_MAT_SIZE; index++) + for (index=0; index < MPEG2_QUANT_MAT_SIZE; index++) dst[index] = src[index]; } @@ -93,7 +93,7 @@ static inline void mpeg2_copy_matrix(const uint8_t *src, uint8_t *dst) /* matrix */ static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint32_t alternate_scan) { - int32_t ret = 1; + int32_t ret = 1; uint32_t index = 0, code = 0; const uint8_t *zigzag_scan = (const uint8_t *) mpeg2_classic_scan; @@ -111,7 +111,7 @@ static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint /* further parsing is stopped and the existing values are used.*/ if ((ret != 1) || (code == 0)) { - ret = -1; + ret = -1; break; } matrix[zigzag_scan[index]] = (uint8_t)(code & 0xFF); @@ -125,34 +125,34 @@ static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) { int32_t ret_code = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Get Horizontal Frame Size */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.horizontal_size_value, 12); - + /* Get Vertical Frame Size */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vertical_size_value, 12); - + /* Get Frame Aspect Ratio */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.aspect_ratio_information, 4); - + /* Get Frame Rate */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.frame_rate_code, 4); - + /* Get Bit Rate */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.bit_rate_value, 18); - + /* Skip Marker bit */ ret_code |= viddec_pm_skip_bits(parent, 1); - + /* Get VBV Buffer Size Value */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vbv_buffer_size_value, 10); - + /* Get Constrained Parameters Flag */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.constrained_parameters_flag, 1); - + /* Quantization Matrix Support */ /* Get Intra Quantizer matrix, if available or use default values */ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1); @@ -169,7 +169,7 @@ void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix); } } - + /* Get Non-Intra Qualtizer matrix, if available or use default values */ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1); if (parser->info.qnt_ext.load_non_intra_quantiser_matrix) @@ -185,7 +185,7 @@ void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); } } - + /* Error handling */ /* The return value from get_bits() function is accumulated. If the return value is not 1, */ /* then there was an error getting the required information from the stream and the status */ @@ -200,7 +200,7 @@ void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) /* So, if in the future, there is an error parsing quantization matrix, the */ /* parser will use the previously parsed custom values. */ if ((parser->info.qnt_ext.load_intra_quantiser_matrix) - || (parser->info.qnt_ext.load_non_intra_quantiser_matrix)) + || (parser->info.qnt_ext.load_non_intra_quantiser_matrix)) { parser->mpeg2_custom_qmat_parsed = true; } @@ -212,12 +212,12 @@ void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_HDR; MPEG2_DEB("Sequence header corrupted.\n"); } - + parser->mpeg2_stream = false; parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ; parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ; parser->mpeg2_stream_level = MPEG2_LEVEL_SEQ; - + return; } @@ -226,17 +226,17 @@ void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) void viddec_mpeg2_parse_gop_hdr(void *parent, void *ctxt) { int32_t ret_code = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Skip first 25 bits */ /* Skip time_code */ ret_code |= viddec_pm_skip_bits(parent, 25); - + /* Get closed gop info */ ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.closed_gop, 1); - + /* Get broken link info */ ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.broken_link, 1); @@ -249,8 +249,8 @@ void viddec_mpeg2_parse_gop_hdr(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_GOP_HDR; MPEG2_DEB("GOP header corrupted.\n"); } - - parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_GOP; + + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_GOP; parser->mpeg2_stream_level = MPEG2_LEVEL_GOP; return; @@ -274,8 +274,8 @@ void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) /* Error Handling and Concealment */ /* Picture coding type should be one I, P or B */ if ((parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) && - (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_P) && - (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_B)) + (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_P) && + (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_B)) { found_error = 1; } @@ -297,7 +297,7 @@ void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) found_error = 1; } } - + /* If there is an error parsing picture coding type, do error concealment and continue. */ if ((ret_code != 1) || (found_error)) { @@ -307,21 +307,21 @@ void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR; MPEG2_DEB("Picture header corrupted.\n"); } - + /* Error concealment for picture coding type - Default to I picture. */ parser->info.pic_hdr.picture_coding_type = MPEG2_PC_TYPE_I; parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_COD_TYPE; MPEG2_DEB("Picture Coding Type corrupted. Concealing to I type.\n"); } - + /* Skip next 16 bits */ /* Skip vbv_delay */ ret_code |= viddec_pm_skip_bits(parent, 16); - + /* If Picture Coding type is either P or B then */ /* Get forward vector code */ if ((MPEG2_PC_TYPE_P == parser->info.pic_hdr.picture_coding_type) || - (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type)) + (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type)) { ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_forward_vect, 1); ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.forward_f_code, 3); @@ -331,7 +331,7 @@ void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) parser->info.pic_hdr.full_pel_forward_vect = 0; parser->info.pic_hdr.forward_f_code = 0; } - + /* If Picture coding type is B then */ /* Get backward vector code */ if (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type) @@ -344,7 +344,7 @@ void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) parser->info.pic_hdr.full_pel_backward_vect = 0; parser->info.pic_hdr.backward_f_code = 0; } - + if (ret_code == 1) { MPEG2_DEB("Picture header parsed successfully.\n") @@ -355,8 +355,8 @@ void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR; MPEG2_DEB("Picture header corrupted.\n"); } - - parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC; + + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC; parser->mpeg2_stream_level = MPEG2_LEVEL_PIC; return; @@ -367,19 +367,19 @@ void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt) { int32_t ret_code = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Get Profile and Level info */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.profile_and_level_indication, 8); - + /* Get Progressive Sequence Flag */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.progressive_sequence, 1); - + /* Get Chroma Format */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.chroma_format, 2); - + /* Error Concealment */ /* If there is an error parsing chroma format, do error concealment and continue. */ if ((ret_code != 1) || (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED)) @@ -390,34 +390,34 @@ void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT; MPEG2_DEB("Sequence extension corrupted.\n") } - + /* Error concealment for chroma format - Default to 4:2:0 */ parser->info.seq_ext.chroma_format = MPEG2_CF_420; parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_CHROMA_FMT; MPEG2_DEB("Chroma Format corrupted. Concealing to 4:2:0.\n"); } - + /* Get Content Size Extension Data */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.horizontal_size_extension, 2); ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vertical_size_extension, 2); - + /* Get Bit Rate Extension */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.bit_rate_extension, 12); - + /* Skip Marker bit */ ret_code |= viddec_pm_skip_bits(parent, 1); - + /* Get VBV Buffer Size Extension Data */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vbv_buffer_size_extension, 8); - + /* Skip 1 bit */ /* Skip low_delay */ ret_code |= viddec_pm_skip_bits(parent, 1); - + /* Get Frame Rate extension data */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_n, 2); ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_d, 5); - + if (ret_code == 1) { MPEG2_DEB("Sequence extension header parsed successfully.\n") @@ -428,13 +428,13 @@ void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT; MPEG2_DEB("Sequence extension corrupted.\n") } - + /* Check if the last parsed start code was that of sequence header. */ /* If true, seq extension followed seq header => MPEG2 Stream */ parser->mpeg2_stream = (parser->mpeg2_last_parsed_sc == MPEG2_SC_SEQ_HDR) ? true:false; parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_EXT; parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_EXT; - + return; } @@ -446,13 +446,13 @@ void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt) /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Get video format */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.video_format, 3); - + /* Check if color description info is present */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_description, 1); - + /* If color description is found, get color primaries info */ /* and transfer characteristics */ if (parser->info.seq_disp_ext.colour_description) @@ -461,12 +461,12 @@ void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt) ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.transfer_characteristics, 8); ret_code |= viddec_pm_skip_bits(parent, 8); } - + /* Get Display Horizontal Size */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_horizontal_size, 14); ret_code |= viddec_pm_skip_bits(parent, 1); ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_vertical_size, 14); - + if (ret_code == 1) { MPEG2_DEB("Sequence display extension parsed successfully.\n"); @@ -477,7 +477,7 @@ void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_DISP_EXT; MPEG2_DEB("Sequence display extension corrupted.\n") } - + /* Set flag to indicate Sequence Display Extension is present */ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_DISP_EXT; parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_DISP_EXT; @@ -490,22 +490,22 @@ void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt) void viddec_mpeg2_parse_ext_seq_scal(void *parent, void *ctxt) { int32_t ret_code = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Get video format */ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_scal_ext.scalable_mode, 2); - + if (ret_code == 1) { MPEG2_DEB("Sequence scalable extension parsed successfully.\n"); } - + /* Set flag to indicate Sequence Display Extension is present */ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_SCAL_EXT; parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_SCAL_EXT; - + return; } @@ -514,22 +514,22 @@ void viddec_mpeg2_parse_ext_seq_scal(void *parent, void *ctxt) void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) { int32_t ret_code = 0, found_error = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Get Forward/Backward, Horizontal/Vertical codes */ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode00, 4); ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode01, 4); ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode10, 4); ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode11, 4); - + /* Get Intra DC Precision */ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_dc_precision, 2); - + /* Get Picture Structure */ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.picture_structure, 2); - + /* Error Handling and Concealment */ /* Picture structure should be frame, top field or bottom field */ if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_RESERVED) @@ -544,7 +544,7 @@ void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) found_error = 1; } } - + /* If there is an error parsing picture structure, do error concealment and continue. */ if ((ret_code != 1) || (found_error)) { @@ -554,13 +554,13 @@ void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT; MPEG2_DEB("Picture coding extension corrupted.\n"); } - + /* Error concealment for picture structure - Default to frame picture. */ parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME; parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT; MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n"); } - + /* Get flags */ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.top_field_first, 1); ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.frame_pred_frame_dct, 1); @@ -572,12 +572,12 @@ void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.chroma_420_type, 1); ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.progressive_frame, 1); ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.composite_display_flag, 1); - + /* Error concealment for frame picture */ - if ((parser->info.pic_cod_ext.top_field_first) - || (parser->info.pic_cod_ext.frame_pred_frame_dct) - || (parser->info.pic_cod_ext.repeat_first_field) - || (parser->info.pic_cod_ext.progressive_frame)) + if ((parser->info.pic_cod_ext.top_field_first) + || (parser->info.pic_cod_ext.frame_pred_frame_dct) + || (parser->info.pic_cod_ext.repeat_first_field) + || (parser->info.pic_cod_ext.progressive_frame)) { if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME) { @@ -586,7 +586,7 @@ void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n"); } } - + if (ret_code == 1) { MPEG2_DEB("Picture coding extension parsed successfully.\n"); @@ -597,7 +597,7 @@ void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT; MPEG2_DEB("Picture coding extension corrupted.\n"); } - + /* Dangling field detection */ /* If the previous picture is the first field, then the temporal reference number */ /* should match with the second field. Otherwise, one of the fields in the previous */ @@ -637,7 +637,7 @@ void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) } } } - + /* Set context variables for interlaced picture handling */ if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_FRAME) { @@ -654,11 +654,11 @@ void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) } /* Set flags */ - parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_COD_EXT; + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_COD_EXT; parser->mpeg2_prev_temp_ref = parser->info.pic_hdr.temporal_reference; parser->mpeg2_prev_picture_structure = parser->info.pic_cod_ext.picture_structure; - if ((!parser->mpeg2_picture_interlaced) - || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) + if ((!parser->mpeg2_picture_interlaced) + || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) { parser->mpeg2_frame_start = true; } @@ -672,16 +672,16 @@ void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt) { int32_t ret_code = 0; uint32_t index = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Determine number of offsets */ if (parser->info.seq_ext.progressive_sequence) { if (parser->info.pic_cod_ext.repeat_first_field) { - parser->mpeg2_num_pan_scan_offsets = + parser->mpeg2_num_pan_scan_offsets = (parser->info.pic_cod_ext.top_field_first) ? 3 : 2; } else /* Not repeat field */ @@ -691,13 +691,13 @@ void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt) { /* Check if picture structure is a field */ if ((parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_TOP) || - (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_BOTTOM)) + (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_BOTTOM)) { parser->mpeg2_num_pan_scan_offsets = 1; } else { - parser->mpeg2_num_pan_scan_offsets = + parser->mpeg2_num_pan_scan_offsets = (parser->info.pic_cod_ext.repeat_first_field) ? 3 : 2; } } @@ -710,7 +710,7 @@ void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt) ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_vertical_offset[index], 16); ret_code |= viddec_pm_skip_bits(parent, 1); } - + if (ret_code == 1) { MPEG2_DEB("Picture display extension parsed successfully.\n"); @@ -721,9 +721,9 @@ void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_DISP_EXT; MPEG2_DEB("Picture display extension corrupted.\n"); } - + /* Set flag to indicate picture display extension is found */ - parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_DISP_EXT; + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_DISP_EXT; return; } @@ -732,7 +732,7 @@ void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt) void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt) { int32_t ret_code = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; @@ -742,41 +742,41 @@ void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt) if (parser->info.qnt_ext.load_intra_quantiser_matrix) { ret_code |= mpeg2_get_quant_matrix(parent, - parser->info.qnt_mat.intra_quantiser_matrix, - parser->info.pic_cod_ext.alternate_scan); - mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, - parser->info.qnt_mat.chroma_intra_quantiser_matrix); + parser->info.qnt_mat.intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, + parser->info.qnt_mat.chroma_intra_quantiser_matrix); } - + /* Get Non-Intra Qualtizer matrix, if available */ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1); if (parser->info.qnt_ext.load_non_intra_quantiser_matrix) { ret_code |= mpeg2_get_quant_matrix(parent, - parser->info.qnt_mat.non_intra_quantiser_matrix, - parser->info.pic_cod_ext.alternate_scan); + parser->info.qnt_mat.non_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, - parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); + parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); } - + /* Get Chroma Intra Quantizer matrix, if available */ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_intra_quantiser_matrix, 1); if (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) { ret_code |= mpeg2_get_quant_matrix(parent, - parser->info.qnt_mat.chroma_intra_quantiser_matrix, - parser->info.pic_cod_ext.alternate_scan); + parser->info.qnt_mat.chroma_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); } - + /* Get Chroma Non-Intra Quantizer matrix, if available */ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix, 1); if (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) { ret_code |= mpeg2_get_quant_matrix(parent, - parser->info.qnt_mat.chroma_non_intra_quantiser_matrix, - parser->info.pic_cod_ext.alternate_scan); + parser->info.qnt_mat.chroma_non_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); } - + if (ret_code == 1) { MPEG2_DEB("Quantization matrix extension parsed successfully.\n"); @@ -787,12 +787,12 @@ void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt) parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_QMAT_EXT; MPEG2_DEB("Quantization matrix extension corrupted.\n"); } - + /* Set quantization matrices updated flag */ if ( (parser->info.qnt_ext.load_intra_quantiser_matrix) || - (parser->info.qnt_ext.load_non_intra_quantiser_matrix) || - (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) || - (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) ) + (parser->info.qnt_ext.load_non_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) ) { MPEG2_DEB("Custom quantization matrix found.\n"); } @@ -813,36 +813,36 @@ void viddec_mpeg2_parse_ext(void *parent, void *ctxt) switch ( ext_code ) { /* Sequence Extension Info */ - case MPEG2_EXT_SEQ: - viddec_mpeg2_parse_ext_seq(parent, ctxt); - break; + case MPEG2_EXT_SEQ: + viddec_mpeg2_parse_ext_seq(parent, ctxt); + break; /* Sequence Display Extension info */ - case MPEG2_EXT_SEQ_DISP: - viddec_mpeg2_parse_ext_seq_disp(parent, ctxt); - break; + case MPEG2_EXT_SEQ_DISP: + viddec_mpeg2_parse_ext_seq_disp(parent, ctxt); + break; - case MPEG2_EXT_SEQ_SCAL: - viddec_mpeg2_parse_ext_seq_scal(parent, ctxt); - break; + case MPEG2_EXT_SEQ_SCAL: + viddec_mpeg2_parse_ext_seq_scal(parent, ctxt); + break; /* Picture Coding Extension */ - case MPEG2_EXT_PIC_CODING: - viddec_mpeg2_parse_ext_pic(parent, ctxt); - break; + case MPEG2_EXT_PIC_CODING: + viddec_mpeg2_parse_ext_pic(parent, ctxt); + break; /* Picture Display Extension */ - case MPEG2_EXT_PIC_DISP: - viddec_mpeg2_parse_ext_pic_disp(parent, ctxt); - break; + case MPEG2_EXT_PIC_DISP: + viddec_mpeg2_parse_ext_pic_disp(parent, ctxt); + break; /* Quantization Extension*/ - case MPEG2_EXT_QUANT_MAT: - viddec_mpeg2_parse_ext_quant(parent, ctxt); - break; + case MPEG2_EXT_QUANT_MAT: + viddec_mpeg2_parse_ext_quant(parent, ctxt); + break; - default: - break; + default: + break; } /* Switch, on extension type */ return; @@ -860,28 +860,28 @@ void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt) /* Set the user data level (SEQ/GOP/PIC) in the workitem type. */ switch (parser->mpeg2_stream_level) { - case MPEG2_LEVEL_SEQ: - { - wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; - break; - } - case MPEG2_LEVEL_GOP: - { - wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; - break; - } - case MPEG2_LEVEL_PIC: - { - wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; - break; - } - default: - { - wi.vwi_type = VIDDEC_WORKLOAD_INVALID; - break; - } + case MPEG2_LEVEL_SEQ: + { + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + } + case MPEG2_LEVEL_GOP: + { + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; } - + case MPEG2_LEVEL_PIC: + { + wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; + break; + } + default: + { + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; + break; + } + } + /* Read 1 byte of user data and store it in workitem for the current */ /* stream level (SEQ/GOP/PIC). Keep adding data payloads till it reaches */ /* size 11. When it is 11, the maximum user data payload size, append the */ @@ -889,12 +889,12 @@ void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt) /* appended. */ wi.user_data.size = 0; memset(&(wi.user_data), 0, sizeof(wi.user_data)); - while(viddec_pm_get_bits(parent, &user_data, MPEG2_BITS_EIGHT) != -1) + while (viddec_pm_get_bits(parent, &user_data, MPEG2_BITS_EIGHT) != -1) { /* Store the valid byte in data payload */ wi.user_data.data_payload[wi.user_data.size] = user_data; wi.user_data.size++; - + /* When size exceeds payload size, append workitem and continue */ if (wi.user_data.size >= 11) { @@ -911,7 +911,7 @@ void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt) viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); wi.user_data.size = 0; } - + MPEG2_DEB("User data @ Level %d found.\n", parser->mpeg2_stream_level); return; } @@ -944,24 +944,24 @@ static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t { uint32_t mb_row = 0, mb_width = 0, prev_mb_addr = 0; uint32_t temp = 0; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; *first_mb = 0; mb_row = ((parser->mpeg2_last_parsed_slice_sc & 0xFF) - 1); mb_width = parser->info.seq_hdr.horizontal_size_value >> 4; prev_mb_addr = (mb_row * mb_width) - 1; - + /* Skip slice start code */ viddec_pm_skip_bits(parent, 32); - + if (parser->info.seq_hdr.vertical_size_value > 2800) { /* Get 3 bits of slice_vertical_position_extension */ viddec_pm_get_bits(parent, &temp, 3); mb_row += (temp << 7); } - + /* Skip proprity_breakpoint if sequence scalable extension is present */ if (parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_SCAL_EXT) { @@ -971,10 +971,10 @@ static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t viddec_pm_skip_bits(parent, 7); } } - + /* Skip quantizer_scale */ viddec_pm_skip_bits(parent, 5); - + /* Skip a few bits with slice information */ temp = 0; viddec_pm_peek_bits(parent, &temp, 1); @@ -992,10 +992,10 @@ static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t viddec_pm_peek_bits(parent, &temp, 1); } } - + /* Skip extra_bit_slice flag */ viddec_pm_skip_bits(parent, 1); - + /* Increment prev_mb_addr by 33 for every 11 bits of macroblock_escape string */ temp=0; viddec_pm_peek_bits(parent, &temp, 11); @@ -1006,11 +1006,11 @@ static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t temp=0; viddec_pm_peek_bits(parent, &temp, 11); } - + /* Get the mb_addr_increment and add it to prev_mb_addr to get the current mb number. */ *first_mb = prev_mb_addr + get_mb_addr_increment(&temp); MPEG2_DEB("First MB number in slice is 0x%08X.\n", *first_mb); - + return; } @@ -1021,18 +1021,18 @@ void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt) uint32_t bit_off=0, start_byte=0, first_mb = 0; uint8_t is_emul=0; viddec_workload_item_t wi; - + /* Get MPEG2 Parser context */ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; - + /* Get current byte position */ viddec_pm_get_au_pos(parent, &bit_off, &start_byte, &is_emul); - + /* Populate wi type */ viddec_mpeg2_get_first_mb_number(parent, ctxt, &first_mb); wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES; wi.es.es_flags = (first_mb << 16); - + /* Append data from given byte position as pixel data */ viddec_pm_append_misc_tags(parent, start_byte, (unsigned int) -1, &wi, !parser->mpeg2_use_next_workload); return; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c index a7b6ef7..83d5340 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c @@ -1,8 +1,8 @@ -/** +/** * viddec_mpeg2_parse.c * -------------------- * This file acts as the main interface between the parser manager and MPEG2 - * parser. All the operations done by the MPEG2 parser are defined here and + * parser. All the operations done by the MPEG2 parser are defined here and * functions pointers for each operation is returned to the parser manager. */ @@ -37,26 +37,26 @@ static void viddec_mpeg2_parser_init parser->mpeg2_prev_temp_ref = 0; parser->mpeg2_num_pan_scan_offsets = 0; - if(preserve) + if (preserve) { - /* Init all picture level header info */ - memset(&parser->info.pic_hdr, 0, sizeof(struct mpeg2_picture_hdr_info)); - memset(&parser->info.pic_cod_ext, 0, sizeof(struct mpeg2_picture_coding_ext_info)); - memset(&parser->info.pic_disp_ext, 0, sizeof(struct mpeg2_picture_disp_ext_info)); + /* Init all picture level header info */ + memset(&parser->info.pic_hdr, 0, sizeof(struct mpeg2_picture_hdr_info)); + memset(&parser->info.pic_cod_ext, 0, sizeof(struct mpeg2_picture_coding_ext_info)); + memset(&parser->info.pic_disp_ext, 0, sizeof(struct mpeg2_picture_disp_ext_info)); } else { - /* Init all header info */ - memset(&parser->info, 0, sizeof(struct mpeg2_info)); + /* Init all header info */ + memset(&parser->info, 0, sizeof(struct mpeg2_info)); - parser->mpeg2_stream = false; - parser->mpeg2_custom_qmat_parsed = false; - parser->mpeg2_valid_seq_hdr_parsed = false; - parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; - } + parser->mpeg2_stream = false; + parser->mpeg2_custom_qmat_parsed = false; + parser->mpeg2_valid_seq_hdr_parsed = false; + parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; + } MPEG2_DEB("MPEG2 Parser: Context Initialized.\n"); - + return; } @@ -125,7 +125,7 @@ static void viddec_mpeg2_get_error_code /* Missing sequence header and irrecoverable flag is set */ if ((!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ)) - && (!parser->mpeg2_valid_seq_hdr_parsed)) + && (!parser->mpeg2_valid_seq_hdr_parsed)) { *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ; @@ -156,7 +156,7 @@ static void viddec_mpeg2_get_error_code { if (wl->is_reference_frame == 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; } - + /* For non-decodable frames, do not set reference info so that the workload */ /* manager does not increment ref count. */ if (*error_code & VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE) @@ -211,7 +211,7 @@ static uint32_t viddec_mpeg2_is_workload_done viddec_workload_t *wl = viddec_pm_get_header(parent); uint32_t ret = VIDDEC_PARSE_SUCESS; uint32_t frame_boundary = 0; - uint8_t force_frame_complete = 0; + uint8_t force_frame_complete = 0; parent = parent; /* Detect Frame Boundary */ @@ -220,12 +220,12 @@ static uint32_t viddec_mpeg2_is_workload_done { parser->mpeg2_first_slice_flag = false; } - - force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); + + force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); if (force_frame_complete || (frame_boundary && (parser->mpeg2_pic_metadata_complete))) { - if(!force_frame_complete) + if (!force_frame_complete) { parser->mpeg2_wl_status |= MPEG2_WL_COMPLETE; parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX; @@ -281,82 +281,82 @@ static mpeg2_status viddec_mpeg2_parse } /* Parse Metadata based on the start code found */ - switch( current_sc ) + switch ( current_sc ) { /* Sequence Start Code */ - case MPEG2_SC_SEQ_HDR: - { - parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; - viddec_mpeg2_parse_seq_hdr(parent, ctxt); - } - break; + case MPEG2_SC_SEQ_HDR: + { + parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; + viddec_mpeg2_parse_seq_hdr(parent, ctxt); + } + break; - /* Picture Start Code */ - case MPEG2_SC_PICTURE: - { - viddec_mpeg2_parse_pic_hdr(parent, ctxt); - } - break; + /* Picture Start Code */ + case MPEG2_SC_PICTURE: + { + viddec_mpeg2_parse_pic_hdr(parent, ctxt); + } + break; - /* Extension Code */ - case MPEG2_SC_EXT: - { - viddec_mpeg2_parse_ext(parent, ctxt); - } - break; + /* Extension Code */ + case MPEG2_SC_EXT: + { + viddec_mpeg2_parse_ext(parent, ctxt); + } + break; - /* Group of Pictures Header */ - case MPEG2_SC_GROUP: - { - viddec_mpeg2_parse_gop_hdr(parent, ctxt); - } - break; + /* Group of Pictures Header */ + case MPEG2_SC_GROUP: + { + viddec_mpeg2_parse_gop_hdr(parent, ctxt); + } + break; - /* Unused Start Code */ - case MPEG2_SC_SEQ_END: - case MPEG2_SC_SEQ_ERR: - break; + /* Unused Start Code */ + case MPEG2_SC_SEQ_END: + case MPEG2_SC_SEQ_ERR: + break; /* User Data */ - case MPEG2_SC_USER_DATA: - { - viddec_mpeg2_parse_and_append_user_data(parent, ctxt); - } - break; + case MPEG2_SC_USER_DATA: + { + viddec_mpeg2_parse_and_append_user_data(parent, ctxt); + } + break; - default: - { - /* Slice Data - Append slice data to the workload */ - if ((current_sc >= MPEG2_SC_SLICE_MIN) && + default: + { + /* Slice Data - Append slice data to the workload */ + if ((current_sc >= MPEG2_SC_SLICE_MIN) && (current_sc <= MPEG2_SC_SLICE_MAX)) + { + if (!parser->mpeg2_first_slice_flag) { - if (!parser->mpeg2_first_slice_flag) - { - /* At this point, all the metadata required by the MPEG2 */ - /* hardware for decoding is extracted and stored. So the */ - /* metadata can be packed into workitems and emitted out.*/ - viddec_mpeg2_emit_workload(parent, ctxt); - - /* If the current picture is progressive or it is the */ - /* second field of interlaced field picture then, set */ - /* the workload done flag. */ - if ((!parser->mpeg2_picture_interlaced) + /* At this point, all the metadata required by the MPEG2 */ + /* hardware for decoding is extracted and stored. So the */ + /* metadata can be packed into workitems and emitted out.*/ + viddec_mpeg2_emit_workload(parent, ctxt); + + /* If the current picture is progressive or it is the */ + /* second field of interlaced field picture then, set */ + /* the workload done flag. */ + if ((!parser->mpeg2_picture_interlaced) || ((parser->mpeg2_picture_interlaced) && (!parser->mpeg2_first_field))) - { - parser->mpeg2_pic_metadata_complete = true; - } - else if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)) - { - parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; - } - - parser->mpeg2_first_slice_flag = true; + { + parser->mpeg2_pic_metadata_complete = true; } - parser->mpeg2_last_parsed_slice_sc = current_sc; - viddec_mpeg2_parse_and_append_slice_data(parent, ctxt); - parser->mpeg2_wl_status |= MPEG2_WL_PARTIAL_SLICE; + else if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)) + { + parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; + } + + parser->mpeg2_first_slice_flag = true; } + parser->mpeg2_last_parsed_slice_sc = current_sc; + viddec_mpeg2_parse_and_append_slice_data(parent, ctxt); + parser->mpeg2_wl_status |= MPEG2_WL_PARTIAL_SLICE; } + } } /* Switch */ /* Save last parsed start code */ diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c index 503ded5..42a42a9 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c @@ -1,16 +1,16 @@ /** * viddec_mpeg2_workload.c * ----------------------- - * This file packs the data parsed and stored in the context into workload and + * This file packs the data parsed and stored in the context into workload and * emits it out. The current list of workitems emitter into the workload * include: - * + * * - DMEM - Register Data * - Past and Future picture references * - Quantization matrix data - * + * * Slice data gets appended into the workload in viddec_mpeg2_parse.c - * + * * Also, the frame attributes are updated in the workload. */ @@ -105,7 +105,7 @@ static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser) for (index=0; indexmpeg2_use_next_workload ); wl->is_reference_frame = 0; - + /* Identify the frame type (I, P or B) */ frame_type = parser->info.pic_hdr.picture_coding_type; /* Send reference frame information based on whether the picture is a */ /* frame picture or field picture. */ if ((!parser->mpeg2_picture_interlaced) - || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) + || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) { /* Check if we need to reorder frame references/send frame for display */ /* in case of I or P type */ @@ -242,22 +242,22 @@ static void viddec_mpeg2_manage_ref(void *parent, void *ctxt) } /* Send reference frame workitems */ - switch(frame_type) + switch (frame_type) { - case MPEG2_PC_TYPE_I: - { - break; - } - case MPEG2_PC_TYPE_P: - { - mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); - break; - } - case MPEG2_PC_TYPE_B: - { - mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); - mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, parser->mpeg2_use_next_workload); - } + case MPEG2_PC_TYPE_I: + { + break; + } + case MPEG2_PC_TYPE_P: + { + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); + break; + } + case MPEG2_PC_TYPE_B: + { + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, parser->mpeg2_use_next_workload); + } } /* Set reference information updated flag */ @@ -437,20 +437,20 @@ void viddec_mpeg2_emit_workload(void *parent, void *ctxt) /* Send MPEG2 DMEM workitems */ viddec_mpeg2_append_workitems(parent, - (uint32_t *) &parser->wi, - VIDDEC_WORKLOAD_MPEG2_DMEM, - MPEG2_NUM_DMEM_WL_ITEMS, - parser->mpeg2_use_next_workload); + (uint32_t *) &parser->wi, + VIDDEC_WORKLOAD_MPEG2_DMEM, + MPEG2_NUM_DMEM_WL_ITEMS, + parser->mpeg2_use_next_workload); parser->mpeg2_wl_status |= MPEG2_WL_DMEM_DATA; MPEG2_DEB("Adding %d items as DMEM Data.\n", MPEG2_NUM_DMEM_WL_ITEMS); /* Send MPEG2 Quantization Matrix workitems, if updated */ viddec_mpeg2_pack_qmat(parser); viddec_mpeg2_append_workitems(parent, - (uint32_t *) parser->wi.qmat, - VIDDEC_WORKLOAD_MPEG2_QMAT, - MPEG2_NUM_QMAT_WL_ITEMS, - parser->mpeg2_use_next_workload); + (uint32_t *) parser->wi.qmat, + VIDDEC_WORKLOAD_MPEG2_QMAT, + MPEG2_NUM_QMAT_WL_ITEMS, + parser->mpeg2_use_next_workload); MPEG2_DEB("Adding %d items as QMAT Data.\n", MPEG2_NUM_QMAT_WL_ITEMS); /* Manage reference frames */ diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h index 7084161..bb772d4 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h @@ -5,23 +5,23 @@ enum viddec_fw_mp4_ref_frame_id { - VIDDEC_MP4_FRAME_CURRENT = 0, - VIDDEC_MP4_FRAME_PAST = 1, - VIDDEC_MP4_FRAME_FUTURE = 2, - VIDDEC_MP4_FRAME_MAX = 3, + VIDDEC_MP4_FRAME_CURRENT = 0, + VIDDEC_MP4_FRAME_PAST = 1, + VIDDEC_MP4_FRAME_FUTURE = 2, + VIDDEC_MP4_FRAME_MAX = 3, }; enum mp4_workload_item_type { - VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, - VIDDEC_WORKLOAD_MP4_FUTURE_FRAME, - VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC, - VIDDEC_WORKLOAD_MP4_VOP_INFO, - VIDDEC_WORKLOAD_MP4_BVOP_INFO, - VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, - VIDDEC_WORKLOAD_MP4_IQUANT, - VIDDEC_WORKLOAD_MP4_NIQUANT, - VIDDEC_WORKLOAD_MP4_SVH, + VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + VIDDEC_WORKLOAD_MP4_FUTURE_FRAME, + VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_MP4_VOP_INFO, + VIDDEC_WORKLOAD_MP4_BVOP_INFO, + VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, + VIDDEC_WORKLOAD_MP4_IQUANT, + VIDDEC_WORKLOAD_MP4_NIQUANT, + VIDDEC_WORKLOAD_MP4_SVH, }; enum viddec_fw_mp4_vop_coding_type_t @@ -33,7 +33,7 @@ enum viddec_fw_mp4_vop_coding_type_t }; // This structure contains the information extracted from the Video Object Layer. -// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOL_INFO, using +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOL_INFO, using // the "vwi_payload" array in viddec_workload_item_t. // TODO: Add default values in the comments for each item typedef struct @@ -47,35 +47,35 @@ typedef struct // 6:6 - resync_marker_disable // 7:7 - data_partitioned // 8:8 - reversible_vlc - #define viddec_fw_mp4_get_reversible_vlc(x) viddec_fw_bitfields_extract((x)->vol_flags, 8, 0x1) - #define viddec_fw_mp4_set_reversible_vlc(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 8, 0x1) - #define viddec_fw_mp4_get_data_partitioned(x) viddec_fw_bitfields_extract((x)->vol_flags, 7, 0x1) - #define viddec_fw_mp4_set_data_partitioned(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 7, 0x1) - #define viddec_fw_mp4_get_resync_marker_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 6, 0x1) - #define viddec_fw_mp4_set_resync_marker_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 6, 0x1) - #define viddec_fw_mp4_get_quarter_sample(x) viddec_fw_bitfields_extract((x)->vol_flags, 5, 0x1) - #define viddec_fw_mp4_set_quarter_sample(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 5, 0x1) - #define viddec_fw_mp4_get_obmc_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 4, 0x1) - #define viddec_fw_mp4_set_obmc_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 4, 0x1) - #define viddec_fw_mp4_get_interlaced(x) viddec_fw_bitfields_extract((x)->vol_flags, 3, 0x1) - #define viddec_fw_mp4_set_interlaced(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 3, 0x1) - #define viddec_fw_mp4_get_vol_shape(x) viddec_fw_bitfields_extract((x)->vol_flags, 1, 0x3) - #define viddec_fw_mp4_set_vol_shape(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 1, 0x3) - #define viddec_fw_mp4_get_short_video_header_flag(x) viddec_fw_bitfields_extract((x)->vol_flags, 0, 0x1) - #define viddec_fw_mp4_set_short_video_header_flag(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 0, 0x1) +#define viddec_fw_mp4_get_reversible_vlc(x) viddec_fw_bitfields_extract((x)->vol_flags, 8, 0x1) +#define viddec_fw_mp4_set_reversible_vlc(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 8, 0x1) +#define viddec_fw_mp4_get_data_partitioned(x) viddec_fw_bitfields_extract((x)->vol_flags, 7, 0x1) +#define viddec_fw_mp4_set_data_partitioned(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 7, 0x1) +#define viddec_fw_mp4_get_resync_marker_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 6, 0x1) +#define viddec_fw_mp4_set_resync_marker_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 6, 0x1) +#define viddec_fw_mp4_get_quarter_sample(x) viddec_fw_bitfields_extract((x)->vol_flags, 5, 0x1) +#define viddec_fw_mp4_set_quarter_sample(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 5, 0x1) +#define viddec_fw_mp4_get_obmc_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 4, 0x1) +#define viddec_fw_mp4_set_obmc_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 4, 0x1) +#define viddec_fw_mp4_get_interlaced(x) viddec_fw_bitfields_extract((x)->vol_flags, 3, 0x1) +#define viddec_fw_mp4_set_interlaced(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 3, 0x1) +#define viddec_fw_mp4_get_vol_shape(x) viddec_fw_bitfields_extract((x)->vol_flags, 1, 0x3) +#define viddec_fw_mp4_set_vol_shape(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 1, 0x3) +#define viddec_fw_mp4_get_short_video_header_flag(x) viddec_fw_bitfields_extract((x)->vol_flags, 0, 0x1) +#define viddec_fw_mp4_set_short_video_header_flag(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 0, 0x1) unsigned int vol_flags; - + // Size extracted from the Video Object Layer // 0:12 - width // 13:25 - height // MFD_MPG4VD_MB_PER_ROW can be calculated as (width+15) >> 4 // MFD_MPG4VD_MB_ROWS can be calculated as (height+15) >> 4 - #define viddec_fw_mp4_get_vol_width(x) viddec_fw_bitfields_extract((x)->vol_size, 13, 0x1FFF) - #define viddec_fw_mp4_set_vol_width(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 13, 0x1FFF) - #define viddec_fw_mp4_get_vol_height(x) viddec_fw_bitfields_extract((x)->vol_size, 0, 0x1FFF) - #define viddec_fw_mp4_set_vol_height(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 0, 0x1FFF) +#define viddec_fw_mp4_get_vol_width(x) viddec_fw_bitfields_extract((x)->vol_size, 13, 0x1FFF) +#define viddec_fw_mp4_set_vol_width(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 13, 0x1FFF) +#define viddec_fw_mp4_get_vol_height(x) viddec_fw_bitfields_extract((x)->vol_size, 0, 0x1FFF) +#define viddec_fw_mp4_set_vol_height(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 0, 0x1FFF) unsigned int vol_size; - + // Sprite, time increments and quantization details from the Video Object Layer // 0:15 - vop_time_increment_resolution // 16:17 - sprite_enable @@ -83,24 +83,24 @@ typedef struct // 24:25 - sprite_warping_accuracy // 26:29 - quant_precision // 30:30 - quant_type - #define viddec_fw_mp4_get_quant_type(x) viddec_fw_bitfields_extract((x)->vol_item, 30, 0x1) - #define viddec_fw_mp4_set_quant_type(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 30, 0x1) - #define viddec_fw_mp4_get_quant_precision(x) viddec_fw_bitfields_extract((x)->vol_item, 26, 0xF) - #define viddec_fw_mp4_set_quant_precision(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 26, 0xF) - #define viddec_fw_mp4_get_sprite_warping_accuracy(x) viddec_fw_bitfields_extract((x)->vol_item, 24, 0x3) - #define viddec_fw_mp4_set_sprite_warping_accuracy(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 24, 0x3) - #define viddec_fw_mp4_get_sprite_warping_points(x) viddec_fw_bitfields_extract((x)->vol_item, 18, 0x3F) - #define viddec_fw_mp4_set_sprite_warping_points(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 18, 0x3F) - #define viddec_fw_mp4_get_sprite_enable(x) viddec_fw_bitfields_extract((x)->vol_item, 16, 0x3) - #define viddec_fw_mp4_set_sprite_enable(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 16, 0x3) - #define viddec_fw_mp4_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_item, 0, 0xFFFF) - #define viddec_fw_mp4_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 0, 0xFFFF) +#define viddec_fw_mp4_get_quant_type(x) viddec_fw_bitfields_extract((x)->vol_item, 30, 0x1) +#define viddec_fw_mp4_set_quant_type(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 30, 0x1) +#define viddec_fw_mp4_get_quant_precision(x) viddec_fw_bitfields_extract((x)->vol_item, 26, 0xF) +#define viddec_fw_mp4_set_quant_precision(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 26, 0xF) +#define viddec_fw_mp4_get_sprite_warping_accuracy(x) viddec_fw_bitfields_extract((x)->vol_item, 24, 0x3) +#define viddec_fw_mp4_set_sprite_warping_accuracy(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 24, 0x3) +#define viddec_fw_mp4_get_sprite_warping_points(x) viddec_fw_bitfields_extract((x)->vol_item, 18, 0x3F) +#define viddec_fw_mp4_set_sprite_warping_points(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 18, 0x3F) +#define viddec_fw_mp4_get_sprite_enable(x) viddec_fw_bitfields_extract((x)->vol_item, 16, 0x3) +#define viddec_fw_mp4_set_sprite_enable(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 16, 0x3) +#define viddec_fw_mp4_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_item, 0, 0xFFFF) +#define viddec_fw_mp4_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 0, 0xFFFF) unsigned int vol_item; } viddec_fw_mp4_vol_info_t; // This structure contains the information extracted from the Video Object Layer. -// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOP_INFO, using +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOP_INFO, using // the "vwi_payload" array in viddec_workload_item_t. // TODO: Add default values in the comments for each item typedef struct @@ -112,18 +112,18 @@ typedef struct // 11:11 - future_field_frame // 12:16 - past_frame_id // 17:17 - past_field_frame - #define viddec_fw_mp4_get_past_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 17, 0x1) - #define viddec_fw_mp4_set_past_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 17, 0x1) - #define viddec_fw_mp4_get_past_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 12, 0x1F) - #define viddec_fw_mp4_set_past_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 12, 0x1F) - #define viddec_fw_mp4_get_future_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 11, 0x1) - #define viddec_fw_mp4_set_future_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 11, 0x1) - #define viddec_fw_mp4_get_future_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 6, 0x1F) - #define viddec_fw_mp4_set_future_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 6, 0x1F) - #define viddec_fw_mp4_get_current_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 5, 0x1) - #define viddec_fw_mp4_set_current_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 5, 0x1) - #define viddec_fw_mp4_get_current_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 0, 0x1F) - #define viddec_fw_mp4_set_current_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 0, 0x1F) +#define viddec_fw_mp4_get_past_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 17, 0x1) +#define viddec_fw_mp4_set_past_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 17, 0x1) +#define viddec_fw_mp4_get_past_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 12, 0x1F) +#define viddec_fw_mp4_set_past_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 12, 0x1F) +#define viddec_fw_mp4_get_future_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 11, 0x1) +#define viddec_fw_mp4_set_future_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 11, 0x1) +#define viddec_fw_mp4_get_future_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 6, 0x1F) +#define viddec_fw_mp4_set_future_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 6, 0x1F) +#define viddec_fw_mp4_get_current_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 5, 0x1) +#define viddec_fw_mp4_set_current_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 5, 0x1) +#define viddec_fw_mp4_get_current_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 0, 0x1F) +#define viddec_fw_mp4_set_current_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 0, 0x1F) unsigned int frame_info; // Video Object Plane Info @@ -136,24 +136,24 @@ typedef struct // 17:19 - vop_fcode_forward // 20:22 - vop_fcode_backward // 23:31 - quant_scale - #define viddec_fw_mp4_get_vop_quant_scale(x) viddec_fw_bitfields_extract((x)->vop_data, 23, 0x1FF) - #define viddec_fw_mp4_set_vop_quant_scale(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 23, 0x1FF) - #define viddec_fw_mp4_get_vop_fcode_backward(x) viddec_fw_bitfields_extract((x)->vop_data, 20, 0x7) - #define viddec_fw_mp4_set_vop_fcode_backward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 20, 0x7) - #define viddec_fw_mp4_get_vop_fcode_forward(x) viddec_fw_bitfields_extract((x)->vop_data, 17, 0x7) - #define viddec_fw_mp4_set_vop_fcode_forward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 17, 0x7) - #define viddec_fw_mp4_get_vop_quant(x) viddec_fw_bitfields_extract((x)->vop_data, 8, 0x1FF) - #define viddec_fw_mp4_set_vop_quant(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 8, 0x1FF) - #define viddec_fw_mp4_get_alternate_vertical_scan_flag(x) viddec_fw_bitfields_extract((x)->vop_data, 7, 0x1) - #define viddec_fw_mp4_set_alternate_vertical_scan_flag(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 7, 0x1) - #define viddec_fw_mp4_get_top_field_first(x) viddec_fw_bitfields_extract((x)->vop_data, 6, 0x1) - #define viddec_fw_mp4_set_top_field_first(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 6, 0x1) - #define viddec_fw_mp4_get_intra_dc_vlc_thr(x) viddec_fw_bitfields_extract((x)->vop_data, 3, 0x7) - #define viddec_fw_mp4_set_intra_dc_vlc_thr(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 3, 0x7) - #define viddec_fw_mp4_get_vop_rounding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 2, 0x1) - #define viddec_fw_mp4_set_vop_rounding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 2, 0x1) - #define viddec_fw_mp4_get_vop_coding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 0, 0x3) - #define viddec_fw_mp4_set_vop_coding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 0, 0x3) +#define viddec_fw_mp4_get_vop_quant_scale(x) viddec_fw_bitfields_extract((x)->vop_data, 23, 0x1FF) +#define viddec_fw_mp4_set_vop_quant_scale(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 23, 0x1FF) +#define viddec_fw_mp4_get_vop_fcode_backward(x) viddec_fw_bitfields_extract((x)->vop_data, 20, 0x7) +#define viddec_fw_mp4_set_vop_fcode_backward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 20, 0x7) +#define viddec_fw_mp4_get_vop_fcode_forward(x) viddec_fw_bitfields_extract((x)->vop_data, 17, 0x7) +#define viddec_fw_mp4_set_vop_fcode_forward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 17, 0x7) +#define viddec_fw_mp4_get_vop_quant(x) viddec_fw_bitfields_extract((x)->vop_data, 8, 0x1FF) +#define viddec_fw_mp4_set_vop_quant(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 8, 0x1FF) +#define viddec_fw_mp4_get_alternate_vertical_scan_flag(x) viddec_fw_bitfields_extract((x)->vop_data, 7, 0x1) +#define viddec_fw_mp4_set_alternate_vertical_scan_flag(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 7, 0x1) +#define viddec_fw_mp4_get_top_field_first(x) viddec_fw_bitfields_extract((x)->vop_data, 6, 0x1) +#define viddec_fw_mp4_set_top_field_first(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 6, 0x1) +#define viddec_fw_mp4_get_intra_dc_vlc_thr(x) viddec_fw_bitfields_extract((x)->vop_data, 3, 0x7) +#define viddec_fw_mp4_set_intra_dc_vlc_thr(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 3, 0x7) +#define viddec_fw_mp4_get_vop_rounding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 2, 0x1) +#define viddec_fw_mp4_set_vop_rounding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 2, 0x1) +#define viddec_fw_mp4_get_vop_coding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 0, 0x3) +#define viddec_fw_mp4_set_vop_coding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 0, 0x3) unsigned int vop_data; // No of bits used in first byte of MB data @@ -162,7 +162,7 @@ typedef struct } viddec_fw_mp4_vop_info_t; // This structure contains the information extracted from the Video Object Layer. -// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_BVOP_INFO, using +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_BVOP_INFO, using // the "vwi_payload" array in viddec_workload_item_t. // TODO: Add default values in the comments for each item typedef struct @@ -170,17 +170,17 @@ typedef struct // Frame period = T(first B-VOP after VOL) - T(past reference of first B-VOP after VOL) unsigned int Tframe; - // TRD is the difference in temporal reference of the temporally next reference VOP with + // TRD is the difference in temporal reference of the temporally next reference VOP with // temporally previous reference VOP, assuming B-VOPs or skipped VOPs in between. unsigned int TRD; - // TRB is the difference in temporal reference of the B-VOP and the previous reference VOP. + // TRB is the difference in temporal reference of the B-VOP and the previous reference VOP. unsigned int TRB; } viddec_fw_mp4_bvop_info_t; // This structure contains the information extracted from the sprite trajectory. -// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, // using the fields vwi_payload in viddec_workload_item_t. // TODO: Add default values in the comments for each item typedef struct @@ -190,12 +190,12 @@ typedef struct // 14:27 - warping_mv_code_du // 28:31 - warping_point_index - identifies which warping point the warping code refers to. // The default value for index is 0xF which should be treated as invalid. - #define viddec_fw_mp4_get_warping_point_index(x) viddec_fw_bitfields_extract((x), 28, 0xF) - #define viddec_fw_mp4_set_warping_point_index(x, val) viddec_fw_bitfields_insert((x), val, 28, 0xF) - #define viddec_fw_mp4_get_warping_mv_code_du(x) viddec_fw_bitfields_extract((x), 14, 0x3FFF) - #define viddec_fw_mp4_set_warping_mv_code_du(x, val) viddec_fw_bitfields_insert((x), val, 14, 0x3FFF) - #define viddec_fw_mp4_get_warping_mv_code_dv(x) viddec_fw_bitfields_extract((x), 0, 0x3FFF) - #define viddec_fw_mp4_set_warping_mv_code_dv(x, val) viddec_fw_bitfields_insert((x), val, 0, 0x3FFF) +#define viddec_fw_mp4_get_warping_point_index(x) viddec_fw_bitfields_extract((x), 28, 0xF) +#define viddec_fw_mp4_set_warping_point_index(x, val) viddec_fw_bitfields_insert((x), val, 28, 0xF) +#define viddec_fw_mp4_get_warping_mv_code_du(x) viddec_fw_bitfields_extract((x), 14, 0x3FFF) +#define viddec_fw_mp4_set_warping_mv_code_du(x, val) viddec_fw_bitfields_insert((x), val, 14, 0x3FFF) +#define viddec_fw_mp4_get_warping_mv_code_dv(x) viddec_fw_bitfields_extract((x), 0, 0x3FFF) +#define viddec_fw_mp4_set_warping_mv_code_dv(x, val) viddec_fw_bitfields_insert((x), val, 0, 0x3FFF) unsigned int warping_mv_code[3]; } viddec_fw_mp4_sprite_trajectory_t; @@ -204,7 +204,7 @@ typedef struct // There is no need for a separate structure for these values. // This structure contains the information extracted from the Video Plane with Short Header. -// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SVH, using +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SVH, using // the "vwi_payload" array in viddec_workload_item_t. // TODO: Add default values in the comments for each item typedef struct @@ -214,14 +214,14 @@ typedef struct // 8:19 - num_macroblocks_in_gob // 20:24 - num_gobs_in_vop // 25:27 - num_rows_in_gob - #define viddec_fw_mp4_get_num_rows_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 25, 0x7) - #define viddec_fw_mp4_set_num_rows_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 25, 0x7) - #define viddec_fw_mp4_get_num_gobs_in_vop(x) viddec_fw_bitfields_extract((x)->svh_data, 20, 0x1F) - #define viddec_fw_mp4_set_num_gobs_in_vop(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 20, 0x1F) - #define viddec_fw_mp4_get_num_macroblocks_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 8, 0xFFF) - #define viddec_fw_mp4_set_num_macroblocks_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 8, 0xFFF) - #define viddec_fw_mp4_get_temporal_reference(x) viddec_fw_bitfields_extract((x)->svh_data, 0, 0xFF) - #define viddec_fw_mp4_set_temporal_reference(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 0, 0xFF) +#define viddec_fw_mp4_get_num_rows_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 25, 0x7) +#define viddec_fw_mp4_set_num_rows_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 25, 0x7) +#define viddec_fw_mp4_get_num_gobs_in_vop(x) viddec_fw_bitfields_extract((x)->svh_data, 20, 0x1F) +#define viddec_fw_mp4_set_num_gobs_in_vop(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 20, 0x1F) +#define viddec_fw_mp4_get_num_macroblocks_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 8, 0xFFF) +#define viddec_fw_mp4_set_num_macroblocks_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 8, 0xFFF) +#define viddec_fw_mp4_get_temporal_reference(x) viddec_fw_bitfields_extract((x)->svh_data, 0, 0xFF) +#define viddec_fw_mp4_set_temporal_reference(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 0, 0xFF) unsigned int svh_data; unsigned int pad1; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk index ed28fa3..012c8cd 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/Android.mk @@ -15,9 +15,6 @@ LOCAL_SRC_FILES := \ LOCAL_CFLAGS := -DVBP -DHOST_ONLY LOCAL_C_INCLUDES := \ - $(GLIB_TOP) \ - $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ @@ -27,7 +24,6 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libmixvbp_mpeg4 LOCAL_SHARED_LIBRARIES := \ - libglib-2.0 \ libmixvbp include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c index e9e9012..eaed197 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -15,24 +15,24 @@ uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t attr->cont_size.width = vol->video_object_layer_width; attr->cont_size.height = vol->video_object_layer_height; - + // Translate vop_coding_type - switch(vol->VideoObjectPlane.vop_coding_type) + switch (vol->VideoObjectPlane.vop_coding_type) { - case MP4_VOP_TYPE_B: - attr->frame_type = VIDDEC_FRAME_TYPE_B; - break; - case MP4_VOP_TYPE_P: - attr->frame_type = VIDDEC_FRAME_TYPE_P; - break; - case MP4_VOP_TYPE_S: - attr->frame_type = VIDDEC_FRAME_TYPE_S; - break; - case MP4_VOP_TYPE_I: - attr->frame_type = VIDDEC_FRAME_TYPE_I; - break; - default: - break; + case MP4_VOP_TYPE_B: + attr->frame_type = VIDDEC_FRAME_TYPE_B; + break; + case MP4_VOP_TYPE_P: + attr->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case MP4_VOP_TYPE_S: + attr->frame_type = VIDDEC_FRAME_TYPE_S; + break; + case MP4_VOP_TYPE_I: + attr->frame_type = VIDDEC_FRAME_TYPE_I; + break; + default: + break; } // switch on vop_coding_type attr->mpeg4.top_field_first = vol->VideoObjectPlane.top_field_first; @@ -168,18 +168,18 @@ uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t uint8_t warp_index = 0; int i, j; - if(!vol->sprite_info.no_of_sprite_warping_points) + if (!vol->sprite_info.no_of_sprite_warping_points) return result; no_of_sprite_workitems = (vol->sprite_info.no_of_sprite_warping_points > 3) ? 2 : 1; - for(i=0; isprite_info.no_of_sprite_warping_points) + if (warp_index < vol->sprite_info.no_of_sprite_warping_points) { viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index); viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]); @@ -187,7 +187,7 @@ uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t } else { - sprite_info.warping_mv_code[j] = 0xF << 28; + sprite_info.warping_mv_code[j] = 0xF << 28; } warp_index++; } @@ -228,16 +228,16 @@ uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint3 // No of items = (64/4 Dwords / 3 entries per workload item) // 64 8b entries => 64 * 8 / 32 DWORDS => 64/4 DWORDS => 16 DWORDS // Each item can store 3 DWORDS, 16 DWORDS => 16/3 items => 6 items - for(i=0; i<6; i++) + for (i=0; i<6; i++) { memset(&wi, 0, sizeof(viddec_workload_item_t)); - if(intra_quant_flag) + if (intra_quant_flag) wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_IQUANT; else wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_NIQUANT; - if(i == 6) + if (i == 6) { wi.vwi_payload[0] = qmat[0]; wi.vwi_payload[1] = 0; @@ -262,12 +262,12 @@ uint32_t viddec_fw_mp4_insert_inversequant_workitem(void *parent, mp4_VOLQuant_m { uint32_t result = MP4_STATUS_OK; - if(qmat->load_intra_quant_mat) + if (qmat->load_intra_quant_mat) { result = viddec_fw_mp4_insert_qmat(parent, true, (uint32_t *) &(qmat->intra_quant_mat)); } - if(qmat->load_nonintra_quant_mat) + if (qmat->load_nonintra_quant_mat) { result = viddec_fw_mp4_insert_qmat(parent, false, (uint32_t *) &(qmat->nonintra_quant_mat)); } @@ -331,39 +331,39 @@ uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt) result = viddec_fw_mp4_insert_sprite_workitem(parent, parser); result = viddec_fw_mp4_insert_inversequant_workitem(parent, &(parser->info.VisualObject.VideoObject.quant_mat_info)); - if(parser->info.VisualObject.VideoObject.short_video_header) - result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser); + if (parser->info.VisualObject.VideoObject.short_video_header) + result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser); - if(!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded) + if (!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded) wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; - + // Send reference re-order tag for all reference frame types if (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type != MP4_VOP_TYPE_B) { result = viddec_fw_mp4_insert_reorder_workitem(parent); } - + // Handle vop_coding_type based information - switch(parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type) + switch (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type) { - case MP4_VOP_TYPE_B: - result = viddec_fw_mp4_insert_bvop_workitem(parent, parser); - result = viddec_fw_mp4_insert_past_frame_workitem(parent); - result = viddec_fw_mp4_insert_future_frame_workitem(parent); - break; - case MP4_VOP_TYPE_P: - case MP4_VOP_TYPE_S: - result = viddec_fw_mp4_insert_past_frame_workitem(parent); - // Deliberate fall-thru to type I - case MP4_VOP_TYPE_I: - wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK); - // Swap reference information - parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1]; - parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0]; - break; - break; - default: - break; + case MP4_VOP_TYPE_B: + result = viddec_fw_mp4_insert_bvop_workitem(parent, parser); + result = viddec_fw_mp4_insert_past_frame_workitem(parent); + result = viddec_fw_mp4_insert_future_frame_workitem(parent); + break; + case MP4_VOP_TYPE_P: + case MP4_VOP_TYPE_S: + result = viddec_fw_mp4_insert_past_frame_workitem(parent); + // Deliberate fall-thru to type I + case MP4_VOP_TYPE_I: + wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK); + // Swap reference information + parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1]; + parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0]; + break; + break; + default: + break; } // switch on vop_coding_type result = viddec_pm_append_pixeldata(parent); diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c index 4ae9135..021678e 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c @@ -8,12 +8,12 @@ mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo) mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject); mp4_GroupOfVideoObjectPlane_t *gvop = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane); mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); - + // set VOP time if (vol->short_video_header) { vop_time = vol->vop_sync_time + - pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.temporal_reference * 1001; + pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.temporal_reference * 1001; // if (vo->currentFrame.time > vop_time) { @@ -42,29 +42,29 @@ mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo) } } - if(vop->vop_coded) + if (vop->vop_coded) { switch (vop->vop_coding_type) { - case MP4_VOP_TYPE_S: - if (vol->sprite_enable != MP4_SPRITE_GMC) - break; - // Deliberate fall-through from this case - case MP4_VOP_TYPE_I: - case MP4_VOP_TYPE_P: - // set past and future time for B-VOP - vol->pastFrameTime = vol->futureFrameTime; - vol->futureFrameTime = vop_time; - break; - default: + case MP4_VOP_TYPE_S: + if (vol->sprite_enable != MP4_SPRITE_GMC) break; + // Deliberate fall-through from this case + case MP4_VOP_TYPE_I: + case MP4_VOP_TYPE_P: + // set past and future time for B-VOP + vol->pastFrameTime = vol->futureFrameTime; + vol->futureFrameTime = vop_time; + break; + default: + break; } } if (vop->vop_coded) // || (vop_time != vo->currentFrame.time && vop_time != vo->pastFrame.time && vop_time != vo->futureFrame.time) ) { - if(vop->vop_coding_type == MP4_VOP_TYPE_B) + if (vop->vop_coding_type == MP4_VOP_TYPE_B) { if (!vol->Tframe) vol->Tframe = (int) (vop_time); // - vo->pastFrame.time); @@ -73,7 +73,7 @@ mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo) { vol->TRB = (int) (vop_time - vol->pastFrameTime); vol->TRD = (int) (vol->futureFrameTime - vol->pastFrameTime); - + // defense from bad streams when B-VOPs are before Past and/or Future if (vol->TRB <= 0) vol->TRB = 1; @@ -93,6 +93,6 @@ mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo) } } - return status; + return status; } // mp4_DecodeVideoObjectPlane diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c index 101c852..72ad8b7 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c @@ -31,19 +31,19 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 parent = parent; // VS, VO, VOL, VOP or GVOP start codes indicate frame boundary. - frame_boundary = ( (MP4_SC_VISUAL_OBJECT_SEQUENCE == next_sc) || - (MP4_SC_VISUAL_OBJECT == next_sc) || - ((MP4_SC_VIDEO_OBJECT_LAYER_MIN <= next_sc) && (next_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) || + frame_boundary = ( (MP4_SC_VISUAL_OBJECT_SEQUENCE == next_sc) || + (MP4_SC_VISUAL_OBJECT == next_sc) || + ((MP4_SC_VIDEO_OBJECT_LAYER_MIN <= next_sc) && (next_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) || (next_sc <= MP4_SC_VIDEO_OBJECT_MAX) || - (MP4_SC_VIDEO_OBJECT_PLANE == next_sc) || - ((SHORT_THIRD_STARTCODE_BYTE & 0xFC) == (next_sc & 0xFC)) || + (MP4_SC_VIDEO_OBJECT_PLANE == next_sc) || + ((SHORT_THIRD_STARTCODE_BYTE & 0xFC) == (next_sc & 0xFC)) || (MP4_SC_GROUP_OF_VOP == next_sc) ); // Mark workload is ready to be emitted based on the start codes seen. if (frame_boundary) { uint8_t vol_error_found = false, frame_complete = false; - + // Frame is considered complete and without errors, if a VOL was received since startup and // if a VOP was received for this workload (or) if short video header is found. frame_complete = ( ((parser->sc_seen & MP4_SC_SEEN_VOL) && (parser->sc_seen & MP4_SC_SEEN_VOP)) || @@ -53,7 +53,7 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 // If not, error occurred and we need to throw the current workload as error. vol_error_found = ( (parser->prev_sc == MP4_SC_VIDEO_OBJECT_LAYER_MIN) && !(MP4_SC_VIDEO_OBJECT_PLANE == next_sc) ); - + emit_workload = (frame_complete || vol_error_found); //DEB("emit workload: frame_complete: %d, vol_error_found %d\n", frame_complete, vol_error_found); @@ -62,7 +62,7 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 // EOS and discontinuity should force workload completion. emit_workload |= ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); - if(emit_workload) + if (emit_workload) { *codec_specific_errors = 0; @@ -73,7 +73,7 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 /* This is the strategy for error detection. - Errors in any field needed by the firmware (parser/decoder) are treated as non-decodable. + Errors in any field needed by the firmware (parser/decoder) are treated as non-decodable. Errors in other fields will be considered decodable. Defaults/alternate strategies will be considered on a case-by-case basis as customer content is seen. @@ -84,23 +84,23 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 ----------------------------------------------------------------- FIRMWARE | BS+ND | BS+ND | UNSUP+ND | UNSUP+ND | DRIVER/USER | BS+DFLT | BS | UNSUP | UNSUP | - NONE | BS | BS | UNSUP | UNSUP | + NONE | BS | BS | UNSUP | UNSUP | | | | Continue Parsing | - */ - if((parser->bitstream_error & MP4_BS_ERROR_HDR_NONDEC) || (parser->bitstream_error & MP4_BS_ERROR_FRM_NONDEC)) + */ + if ((parser->bitstream_error & MP4_BS_ERROR_HDR_NONDEC) || (parser->bitstream_error & MP4_BS_ERROR_FRM_NONDEC)) *codec_specific_errors |= (VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE | VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM); - if((parser->bitstream_error & MP4_BS_ERROR_HDR_UNSUP) || (parser->bitstream_error & MP4_BS_ERROR_FRM_UNSUP)) + if ((parser->bitstream_error & MP4_BS_ERROR_HDR_UNSUP) || (parser->bitstream_error & MP4_BS_ERROR_FRM_UNSUP)) *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED; - if((parser->bitstream_error & MP4_BS_ERROR_HDR_PARSE) || (parser->bitstream_error & MP4_BS_ERROR_FRM_PARSE)) + if ((parser->bitstream_error & MP4_BS_ERROR_HDR_PARSE) || (parser->bitstream_error & MP4_BS_ERROR_FRM_PARSE)) *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR; - + parser->bitstream_error &= MP4_HDR_ERROR_MASK; parser->sc_seen &= MP4_SC_SEEN_VOL; result = VIDDEC_PARSE_FRMDONE; } - //DEB("exiting is_wkld_done: next_sc: 0x%x, sc_seen: %d, err: %d, fr_bnd:%d, force:%d\n", + //DEB("exiting is_wkld_done: next_sc: 0x%x, sc_seen: %d, err: %d, fr_bnd:%d, force:%d\n", // next_sc, parser->sc_seen, *codec_specific_errors, frame_boundary, force_frame_complete); return result; @@ -118,21 +118,21 @@ void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) parser->next_sc_prefix = false; parser->ignore_scs = false; - if(preserve) + if (preserve) { // Need to maintain information till VOL parser->sc_seen &= MP4_SC_SEEN_VOL; parser->bitstream_error &= MP4_HDR_ERROR_MASK; // Reset only frame related data - memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t)); - memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263)); + memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t)); + memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263)); } else { parser->sc_seen = MP4_SC_SEEN_INVALID; parser->bitstream_error = MP4_BS_ERROR_NONE; - memset(&(parser->info), 0, sizeof(mp4_Info_t)); + memset(&(parser->info), 0, sizeof(mp4_Info_t)); } return; @@ -142,7 +142,7 @@ static uint32_t viddec_mp4_decodevop_and_emitwkld(void *parent, void *ctxt) { int status = MP4_STATUS_OK; viddec_mp4_parser_t *cxt = (viddec_mp4_parser_t *)ctxt; - + status = mp4_DecodeVideoObjectPlane(&(cxt->info)); #ifndef VBP @@ -159,16 +159,16 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) uint8_t is_svh=0; int32_t getbits=0; int32_t status = 0; - + cxt = (viddec_mp4_parser_t *)ctxt; is_svh = (cxt->cur_sc_prefix) ? false: true; - if((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1) + if ((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1) { DEB("Start code not found\n"); return VIDDEC_PARSE_ERROR; } - - if(!is_svh) + + if (!is_svh) { viddec_pm_get_bits(parent, &sc, 32); sc = sc & 0xFF; @@ -176,92 +176,92 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) cxt->current_sc |= 0x100; DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); - switch(sc) + switch (sc) { - case MP4_SC_VISUAL_OBJECT_SEQUENCE: - { - status = mp4_Parse_VisualSequence(parent, cxt); - cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE; - DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); - break; - } - case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC: - {/* Not required to do anything */ - break; - } - case MP4_SC_USER_DATA: - { /* Copy userdata to user-visible buffer (EMIT) */ - status = mp4_Parse_UserData(parent, cxt); - DEB("MP4_USER_DATA_SC: \n"); - break; - } - case MP4_SC_GROUP_OF_VOP: - { - status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt); - cxt->prev_sc = MP4_SC_GROUP_OF_VOP; - DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); - break; - } - case MP4_SC_VIDEO_SESSION_ERROR: - {/* Not required to do anything?? */ - break; - } - case MP4_SC_VISUAL_OBJECT: - { - status = mp4_Parse_VisualObject(parent, cxt); - cxt->prev_sc = MP4_SC_VISUAL_OBJECT; - DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); - break; - } - case MP4_SC_VIDEO_OBJECT_PLANE: + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + { + status = mp4_Parse_VisualSequence(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE; + DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); + break; + } + case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC: + {/* Not required to do anything */ + break; + } + case MP4_SC_USER_DATA: + { /* Copy userdata to user-visible buffer (EMIT) */ + status = mp4_Parse_UserData(parent, cxt); + DEB("MP4_USER_DATA_SC: \n"); + break; + } + case MP4_SC_GROUP_OF_VOP: + { + status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt); + cxt->prev_sc = MP4_SC_GROUP_OF_VOP; + DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); + break; + } + case MP4_SC_VIDEO_SESSION_ERROR: + {/* Not required to do anything?? */ + break; + } + case MP4_SC_VISUAL_OBJECT: + { + status = mp4_Parse_VisualObject(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT; + DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); + break; + } + case MP4_SC_VIDEO_OBJECT_PLANE: + { + /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit + a starting bit offset after parsing the header. */ + status = mp4_Parse_VideoObjectPlane(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + // TODO: Fix this for interlaced + cxt->is_frame_start = true; + cxt->sc_seen |= MP4_SC_SEEN_VOP; + + DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); + break; + } + case MP4_SC_STUFFING: + { + break; + } + default: + { + if ( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) ) { - /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit - a starting bit offset after parsing the header. */ - status = mp4_Parse_VideoObjectPlane(parent, cxt); - status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); - // TODO: Fix this for interlaced - cxt->is_frame_start = true; - cxt->sc_seen |= MP4_SC_SEEN_VOP; - - DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); - break; + status = mp4_Parse_VideoObjectLayer(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_VOL; + cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); + sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; } - case MP4_SC_STUFFING: - { - break; - } - default: + // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN + else if (sc <= MP4_SC_VIDEO_OBJECT_MAX) { - if( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) ) + // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer + getbits = viddec_pm_get_bits(parent, &sc, 22); + if (getbits != -1) { - status = mp4_Parse_VideoObjectLayer(parent, cxt); - cxt->sc_seen = MP4_SC_SEEN_VOL; - cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; - DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); - sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; - } - // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN - else if(sc <= MP4_SC_VIDEO_OBJECT_MAX) - { - // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer - getbits = viddec_pm_get_bits(parent, &sc, 22); - if(getbits != -1) - { - cxt->current_sc = sc; - status = mp4_Parse_VideoObject_svh(parent, cxt); - status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); - cxt->sc_seen = MP4_SC_SEEN_SVH; - cxt->is_frame_start = true; - DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); - DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status); - } - } - else - { - DEB("UNKWON Cod:0x%08X\n", sc); + cxt->current_sc = sc; + status = mp4_Parse_VideoObject_svh(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_SVH; + cxt->is_frame_start = true; + DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status); } } - break; + else + { + DEB("UNKWON Cod:0x%08X\n", sc); + } + } + break; } } else diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h index e9f8bbf..13aa32a 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h @@ -61,9 +61,9 @@ typedef enum MP4_SC_SYTEM_MIN = 0xC6, /* Unsupported */ MP4_SC_SYTEM_MAX = 0xFF, /* Unsupported */ MP4_SC_INVALID = 0x100, /* Invalid */ -}mp4_start_code_values_t; +} mp4_start_code_values_t; -/* MPEG-4 code values +/* MPEG-4 code values ISO/IEC 14496-2:2004 table 6-6 */ enum { @@ -263,7 +263,7 @@ typedef struct uint8_t top_field_first; uint8_t alternate_vertical_scan_flag; uint8_t sprite_transmit_mode; - + int32_t brightness_change_factor; uint16_t vop_quant; uint8_t vop_fcode_forward; @@ -323,7 +323,7 @@ typedef struct uint16_t sprite_warping_accuracy; uint16_t sprite_brightness_change; uint16_t low_latency_sprite_enable; -}mp4_VOLSpriteInfo_t; +} mp4_VOLSpriteInfo_t; typedef struct { @@ -332,7 +332,7 @@ typedef struct uint16_t align_dummy1; uint8_t intra_quant_mat[64]; uint8_t nonintra_quant_mat[64]; -}mp4_VOLQuant_mat_t; +} mp4_VOLQuant_mat_t; /* Video Object Layer Info */ typedef struct @@ -461,15 +461,15 @@ enum typedef enum { - VIDDEC_MP4_INDX_0 = 0, - VIDDEC_MP4_INDX_1 = 1, - VIDDEC_MP4_INDX_2 = 2, - VIDDEC_MP4_INDX_MAX = 3, + VIDDEC_MP4_INDX_0 = 0, + VIDDEC_MP4_INDX_1 = 1, + VIDDEC_MP4_INDX_2 = 2, + VIDDEC_MP4_INDX_MAX = 3, } viddec_fw_mp4_ref_index_t; typedef struct { - uint8_t is_field; + uint8_t is_field; } viddec_mp4_ref_info_t; typedef struct @@ -477,10 +477,10 @@ typedef struct // The relevant bitstream data for current stream mp4_Info_t info; - // The previous start code (without the prefix) + // The previous start code (without the prefix) uint32_t prev_sc; - // The current start code (without the prefix) + // The current start code (without the prefix) // TODO: Revisit for SVH uint32_t current_sc; @@ -504,10 +504,10 @@ typedef struct // Indicates bitstream errors if any uint16_t bitstream_error; - // Reference frame information - viddec_mp4_ref_info_t ref_frame[VIDDEC_MP4_INDX_MAX]; + // Reference frame information + viddec_mp4_ref_info_t ref_frame[VIDDEC_MP4_INDX_MAX]; -}viddec_mp4_parser_t; +} viddec_mp4_parser_t; #define BREAK_GETBITS_FAIL(x, ret) { \ if(x == -1){ \ diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index 29cd3a9..32d6d93 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -11,11 +11,11 @@ typedef struct const svh_src_fmt_params_t svh_src_fmt_defaults[5] = { - {128, 96, 8, 6, 1}, - {176, 144, 11, 9, 1}, - {352, 288, 22, 18, 1}, - {704, 576, 88, 18, 2}, - {1408, 1152, 352, 18, 4}, + {128, 96, 8, 6, 1}, + {176, 144, 11, 9, 1}, + {352, 288, 22, 18, 1}, + {704, 576, 88, 18, 2}, + {1408, 1152, 352, 18, 4}, }; mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *parser) @@ -25,7 +25,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); int32_t getbits = 0; uint8_t pei = 0; - + do { //temporal reference @@ -35,7 +35,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //marker bit getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 1 != (data & 0x1)) + if ( 1 != (data & 0x1)) { ret = MP4_STATUS_NOTSUPPORT; break; @@ -43,7 +43,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //zero bit getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0x1)) + if ( 0 != (data & 0x1)) { ret = MP4_STATUS_NOTSUPPORT; break; @@ -61,17 +61,17 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p ret = MP4_STATUS_NOTSUPPORT; break; } - - if(svh->source_format != 7) + + if (svh->source_format != 7) { //picture coding type getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - svh->picture_coding_type = (data & 0x1); + svh->picture_coding_type = (data & 0x1); //reserved zero bits getbits = viddec_pm_get_bits(parent, &data, 4); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0xf)) + if ( 0 != (data & 0xf)) { ret = MP4_STATUS_NOTSUPPORT; break; @@ -83,7 +83,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //zero bit getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0x1)) + if ( 0 != (data & 0x1)) { ret = MP4_STATUS_NOTSUPPORT; break; @@ -95,7 +95,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->ufep = (data & 0x7); //ufep - if(svh->ufep == 0x0) + if (svh->ufep == 0x0) { DEB("Info: don't support to handle the 0x000 case of Update Full Extended PTYPE\n"); ret = MP4_STATUS_NOTSUPPORT; @@ -107,7 +107,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->source_format = (data & 0x7); - if(svh->source_format < 1 || svh->source_format > 6) + if (svh->source_format < 1 || svh->source_format > 6) { DEB("Error: bad value of source_format\n"); ret = MP4_STATUS_PARSE_ERROR; @@ -116,7 +116,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //optional indicators getbits = viddec_pm_get_bits(parent, &data, 8); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0xff)) + if ( 0 != (data & 0xff)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -124,7 +124,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //reserved zero bits getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0x7)) + if ( 0 != (data & 0x7)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -132,7 +132,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //marker bit getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 1 != (data & 0x1)) + if ( 1 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -140,7 +140,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //reserved zero bits getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0x7)) + if ( 0 != (data & 0x7)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -157,7 +157,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->picture_coding_type = (data & 0x7); - if(svh->picture_coding_type > 1) + if (svh->picture_coding_type > 1) { DEB("Info: only support I and P frames\n"); ret = MP4_STATUS_NOTSUPPORT; @@ -166,7 +166,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //optional RPR mode getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0x1)) + if ( 0 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -174,7 +174,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //optional PRU mode getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0x1)) + if ( 0 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -186,7 +186,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //reserved zero bits getbits = viddec_pm_get_bits(parent, &data, 2); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 0 != (data & 0x3)) + if ( 0 != (data & 0x3)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -194,7 +194,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //marker bit getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 1 != (data & 0x1)) + if ( 1 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -202,7 +202,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //cpm getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if(svh->ufep == 1 && svh->source_format == 6) + if (svh->ufep == 1 && svh->source_format == 6) { //CPFMT getbits = viddec_pm_get_bits(parent, &data, 4); BREAK_GETBITS_REQD_MISSING(getbits, ret); @@ -214,7 +214,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //marker bit getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if( 1 != (data & 0x1)) + if ( 1 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -234,15 +234,15 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p getbits = viddec_pm_get_bits(parent, &data, 1); // pei BREAK_GETBITS_FAIL(getbits, ret); pei = (data & 0x1); - if(0 != pei) + if (0 != pei) { getbits = viddec_pm_get_bits(parent, &data, 8); // psupp BREAK_GETBITS_FAIL(getbits, ret); } - }while( 1 == pei); + } while ( 1 == pei); // Anything after this needs to be fed to the decoder as PIXEL_ES - } while(0); + } while (0); return ret; } @@ -259,7 +259,7 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser uint8_t k = 0; ret = mp4_Parse_VideoObjectPlane_svh(parent, parser); - if(ret == MP4_STATUS_OK) + if (ret == MP4_STATUS_OK) { // Populate defaults for the svh vol->short_video_header = 1; @@ -289,7 +289,7 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser vst->transfer_characteristics = 1; vst->matrix_coefficients = 6; - if(svh->source_format >= 1 && svh->source_format <= 5) + if (svh->source_format >= 1 && svh->source_format <= 5) { index = svh->source_format - 1; vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width; @@ -298,11 +298,11 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop; svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob; } - else if(svh->source_format == 6) //custom format + else if (svh->source_format == 6) //custom format { vol->video_object_layer_width = (svh->picture_width_indication + 1)*4; vol->video_object_layer_height = (svh->picture_height_indication)*4; - if(vol->video_object_layer_height < 404) + if (vol->video_object_layer_height < 404) { k = 1; } @@ -341,7 +341,7 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format); ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - if(ret == 1) + if (ret == 1) ret = MP4_STATUS_OK; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index 1e4c0c1..5ef0960 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -2,23 +2,23 @@ #include "viddec_mp4_videoobjectlayer.h" const unsigned char mp4_DefaultIntraQuantMatrix[64] = { - 8, 17, 18, 19, 21, 23, 25, 27, + 8, 17, 18, 19, 21, 23, 25, 27, 17, 18, 19, 21, 23, 25, 27, 28, - 20, 21, 22, 23, 24, 26, 28, 30, + 20, 21, 22, 23, 24, 26, 28, 30, 21, 22, 23, 24, 26, 28, 30, 32, - 22, 23, 24, 26, 28, 30, 32, 35, + 22, 23, 24, 26, 28, 30, 32, 35, 23, 24, 26, 28, 30, 32, 35, 38, - 25, 26, 28, 30, 32, 35, 38, 41, + 25, 26, 28, 30, 32, 35, 38, 41, 27, 28, 30, 32, 35, 38, 41, 45 }; const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = { - 16, 17, 18, 19, 20, 21, 22, 23, + 16, 17, 18, 19, 20, 21, 22, 23, 17, 18, 19, 20, 21, 22, 23, 24, - 18, 19, 20, 21, 22, 23, 24, 25, + 18, 19, 20, 21, 22, 23, 24, 25, 19, 20, 21, 22, 23, 24, 26, 27, - 20, 21, 22, 23, 25, 26, 27, 28, + 20, 21, 22, 23, 25, 26, 27, 28, 21, 22, 23, 24, 26, 27, 28, 30, - 22, 23, 24, 26, 27, 28, 30, 31, + 22, 23, 24, 26, 27, 28, 30, 31, 23, 24, 25, 27, 28, 30, 31, 33 }; const unsigned char mp4_ClassicalZigzag[64] = { @@ -42,7 +42,7 @@ static inline int mp4_GetMacroBlockNumberSize(int nmb) static inline void mp4_copy_default_table(const uint8_t *src, uint8_t *dst, uint32_t len) { uint32_t i; - for(i=0; i< len; i++) + for (i=0; i< len; i++) dst[i] = src[i]; } @@ -65,7 +65,7 @@ static inline mp4_Status_t mp4_Parse_QuantMatrix(void *parent, uint8_t *pQM) for (; i < 64; i ++) { pQM[mp4_ClassicalZigzag[i]] = last; - } + } return ret;; } @@ -77,20 +77,20 @@ static inline uint8_t mp4_pvt_valid_object_type_indication(uint8_t val) static inline uint8_t mp4_pvt_valid_object_layer_verid(uint8_t val) { uint8_t ret=false; - switch(val) + switch (val) { - case 1: - case 2: - case 4: - case 5: - { - ret = true; - break; - } - default: - { - break; - } + case 1: + case 2: + case 4: + case 5: + { + ret = true; + break; + } + default: + { + break; + } } return ret; } @@ -103,7 +103,7 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) int32_t getbits=0; uint32_t code=0; - do + do { getbits = viddec_pm_get_bits(parent, &(code), 4); BREAK_GETBITS_REQD_MISSING(getbits, ret); @@ -118,8 +118,8 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; ret = MP4_STATUS_NOTSUPPORT; } - - if(cxt->vbv_parameters) + + if (cxt->vbv_parameters) {/* TODO: Check for validity of marker bits */ getbits = viddec_pm_get_bits(parent, &(code), 32); BREAK_GETBITS_REQD_MISSING(getbits, ret); @@ -127,7 +127,7 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) cxt->bit_rate = (code & 0xFFFE) >> 1; // Get rid of 1 marker bit cxt->bit_rate |= ((code & 0xFFFE0000) >> 2); // Get rid of 2 marker bits - if(cxt->bit_rate == 0) + if (cxt->bit_rate == 0) { DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n"); parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; @@ -135,13 +135,13 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) // Do we need to really break here? Why not just set an error and proceed //break; } - + getbits = viddec_pm_get_bits(parent, &(code), 19); BREAK_GETBITS_REQD_MISSING(getbits, ret); - /* 19 bits= firsthalf(15) + M + LatterHalf(3)*/ + /* 19 bits= firsthalf(15) + M + LatterHalf(3)*/ cxt->vbv_buffer_size = code & 0x7; cxt->vbv_buffer_size |= ( (code >> 4) & 0x7FFF); - if(cxt->vbv_buffer_size == 0) + if (cxt->vbv_buffer_size == 0) { DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n"); parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; @@ -149,7 +149,7 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) // Do we need to really break here? Why not just set an error and proceed //break; } - + getbits = viddec_pm_get_bits(parent, &(code), 28); BREAK_GETBITS_REQD_MISSING(getbits, ret); /* 28 bits= firsthalf(11) + M + LatterHalf(15) + M */ @@ -159,7 +159,7 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) cxt->vbv_occupancy |= (code & 0x07FF); } ret = MP4_STATUS_OK; - } while(0); + } while (0); return ret; } @@ -167,10 +167,10 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) static uint32_t mp4_pvt_count_number_of_bits(uint32_t val) { uint32_t num_bits=0; - do{ + do { val >>= 1; num_bits++; - }while(val); + } while (val); return num_bits; } @@ -184,9 +184,9 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; int32_t getbits=0; - do{ + do { if ((sprite_enable == MP4_SPRITE_STATIC) || - (sprite_enable == MP4_SPRITE_GMC)) + (sprite_enable == MP4_SPRITE_GMC)) { if (sprite_enable != MP4_SPRITE_GMC) { @@ -201,7 +201,7 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) cxt->sprite_brightness_change = code & 0x1; cxt->sprite_warping_accuracy = (code >> 1) & 0x3; cxt->no_of_sprite_warping_points = code >> 3; - if(cxt->no_of_sprite_warping_points > 1) + if (cxt->no_of_sprite_warping_points > 1) { DEB("Error: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", cxt->no_of_sprite_warping_points); @@ -209,7 +209,7 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) break; } - if((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change)) + if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change)) { DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); @@ -224,7 +224,7 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) } } ret = MP4_STATUS_OK; - }while(0); + } while (0); return ret; } @@ -236,7 +236,7 @@ static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t int32_t getbits=0; mp4_VOLQuant_mat_t *quant = &(vidObjLay->quant_mat_info); - do{ + do { getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); quant->load_intra_quant_mat = code; @@ -248,7 +248,7 @@ static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t { mp4_copy_default_table((const uint8_t *)&mp4_DefaultIntraQuantMatrix[0], (uint8_t *)&(quant->intra_quant_mat[0]), 64); } - + getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); quant->load_nonintra_quant_mat = code; @@ -261,7 +261,7 @@ static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t mp4_copy_default_table((const uint8_t *)&mp4_DefaultNonIntraQuantMatrix[0], (uint8_t *)&(quant->nonintra_quant_mat[0]), 64); } ret = MP4_STATUS_OK; - }while(0); + } while (0); return ret; } @@ -273,7 +273,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; int32_t getbits=0; - do{ + do { if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) { /* TODO: check for validity of marker bits */ @@ -290,20 +290,20 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ { uint32_t num_bits=1; - if(vidObjLay->video_object_layer_verid != 1) num_bits=2; + if (vidObjLay->video_object_layer_verid != 1) num_bits=2; getbits = viddec_pm_get_bits(parent, &(code), num_bits); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->sprite_enable = code; } ret = mp4_Parse_VOL_sprite(parent, parser); - if(ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) { break; } if ((vidObjLay->video_object_layer_verid != 1) && - (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)) + (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)) { /* not supported shape*/ DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n"); @@ -314,7 +314,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_FAIL(getbits, ret); vidObjLay->not_8_bit = (code > 0 ); - if(vidObjLay->not_8_bit) + if (vidObjLay->not_8_bit) { /* 8 bit is only supported mode*/ DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n"); @@ -341,7 +341,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ if (vidObjLay->quant_type) { ret = mp4_Parse_VOL_quant_mat(parent, vidObjLay); - if(ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) { break; } @@ -353,11 +353,11 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->quarter_sample = code; } - + getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->complexity_estimation_disable = code; - if(!vidObjLay->complexity_estimation_disable) + if (!vidObjLay->complexity_estimation_disable) {/* complexity estimation not supported */ DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); @@ -368,19 +368,19 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->resync_marker_disable = ((code & 0x2) > 0); vidObjLay->data_partitioned = code & 0x1; - if(vidObjLay->data_partitioned) + if (vidObjLay->data_partitioned) { getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->reversible_vlc = code; } - + if (vidObjLay->video_object_layer_verid != 1) { getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_FAIL(getbits, ret); vidObjLay->newpred_enable = code; - if(vidObjLay->newpred_enable) + if (vidObjLay->newpred_enable) { DEB("Error: NEWPRED mode is not supported\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); @@ -394,7 +394,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_FAIL(getbits, ret); vidObjLay->scalability = code; - if(vidObjLay->scalability) + if (vidObjLay->scalability) { DEB("Error: VOL scalability is not supported\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); @@ -403,7 +403,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ // No need to parse further - none of the fields are interesting to parser/decoder/user ret = MP4_STATUS_OK; - }while(0); + } while (0); return ret; } @@ -414,28 +414,28 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse mp4_VisualObject_t *visObj = &(pInfo->VisualObject); mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; - int32_t getbits=0; + int32_t getbits=0; //DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); - do{ + do { vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; vidObjLay->short_video_header = 0; vidObjLay->video_object_layer_id = (parser->current_sc & 0xF); - + getbits = viddec_pm_get_bits(parent, &code, 9); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->video_object_type_indication = code & 0xFF; vidObjLay->random_accessible_vol = ((code & 0x100) > 0); - if(!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication)) + if (!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication)) { /* Streams with "unknown" type mismatch with ref */ DEB("Warning: video_object_type_indication = %d, forcing to 1\n", vidObjLay->video_object_type_indication); vidObjLay->video_object_type_indication = 1; } - if(vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE) + if (vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE) {/* This is not a supported type by HW */ DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n", vidObjLay->video_object_type_indication); @@ -449,14 +449,14 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse vidObjLay->is_object_layer_identifier = code; vidObjLay->video_object_layer_verid = (mp4_pvt_valid_object_layer_verid(visObj->visual_object_verid)) ? visObj->visual_object_verid : 1; - + if (vidObjLay->is_object_layer_identifier) { getbits = viddec_pm_get_bits(parent, &(code), 7); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->video_object_layer_priority = code & 0x7; vidObjLay->video_object_layer_verid = (code >> 3) & 0xF; - if(!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid)) + if (!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid)) { DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n", vidObjLay->video_object_layer_verid); @@ -470,21 +470,21 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse getbits = viddec_pm_get_bits(parent, &(code), 4); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->aspect_ratio_info = code & 0xF; - if(vidObjLay->aspect_ratio_info == MP4_ASPECT_RATIO_EXTPAR) + if (vidObjLay->aspect_ratio_info == MP4_ASPECT_RATIO_EXTPAR) { getbits = viddec_pm_get_bits(parent, &(code), 16); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->aspect_ratio_info_par_width = (code >> 8) & 0xFF; vidObjLay->aspect_ratio_info_par_height = code & 0xFF; } - + getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->is_vol_control_parameters = code; - if(vidObjLay->is_vol_control_parameters) + if (vidObjLay->is_vol_control_parameters) { ret = mp4_pvt_VOL_volcontrolparameters(parent, parser); - if(ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) { break; } @@ -503,20 +503,20 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse } if ((vidObjLay->video_object_layer_verid != 1) && - (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)) + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)) {/* Grayscale not supported */ DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } - + getbits = viddec_pm_get_bits(parent, &(code), 19); BREAK_GETBITS_REQD_MISSING(getbits, ret); /* TODO: check validity of marker */ - vidObjLay->vop_time_increment_resolution = (code >> 2) & 0xFFFF; + vidObjLay->vop_time_increment_resolution = (code >> 2) & 0xFFFF; vidObjLay->fixed_vop_rate = code & 0x1; - if(vidObjLay->vop_time_increment_resolution == 0) + if (vidObjLay->vop_time_increment_resolution == 0) { DEB("Error: 0 value for vop_time_increment_resolution\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); @@ -524,9 +524,9 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse } /* calculate number bits in vop_time_increment_resolution */ vidObjLay->vop_time_increment_resolution_bits = (uint8_t)mp4_pvt_count_number_of_bits( - (uint32_t)(vidObjLay->vop_time_increment_resolution -1)); + (uint32_t)(vidObjLay->vop_time_increment_resolution -1)); - if(vidObjLay->fixed_vop_rate) + if (vidObjLay->fixed_vop_rate) { getbits = viddec_pm_get_bits(parent, &(code), vidObjLay->vop_time_increment_resolution_bits); BREAK_GETBITS_REQD_MISSING(getbits, ret); @@ -536,7 +536,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) { ret = mp4_Parse_VOL_notbinaryonly(parent, parser); - if(ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) { break; } @@ -551,10 +551,10 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; ret = MP4_STATUS_OK; - } while(0); + } while (0); mp4_set_hdr_bitstream_error(parser, true, ret); - if(ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; //DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); @@ -562,7 +562,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse { viddec_workload_item_t wi; viddec_workload_t *wl = viddec_pm_get_header(parent); - + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ; wi.mp4_vol.vol_aspect_ratio = 0; @@ -584,7 +584,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution); ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - if(ret == 1) + if (ret == 1) ret = MP4_STATUS_OK; memset(&(wl->attrs), 0, sizeof(viddec_frame_attributes_t)); @@ -592,6 +592,6 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse wl->attrs.cont_size.width = vidObjLay->video_object_layer_width; wl->attrs.cont_size.height = vidObjLay->video_object_layer_height; } - - return ret; + + return ret; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c index 7410337..ae4cc6d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c @@ -10,7 +10,7 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t uint32_t time_code = 0; data = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane); - + do { getbits = viddec_pm_get_bits(parent, &code, 20); @@ -22,7 +22,7 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t time_code = code = code >> 2; data->time_code_seconds = code & 0x3F; code = code >> 6; - if((code & 1) == 0) + if ((code & 1) == 0) {/* SGA:Should we ignore marker bit? */ DEB("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n"); } @@ -35,15 +35,15 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t data->time_base = data->time_code_seconds + (60*data->time_code_minutes) + (3600*data->time_code_hours); // Need to convert this into no. of ticks data->time_base *= pInfo->VisualObject.VideoObject.vop_time_increment_resolution; - - } while(0); + + } while (0); mp4_set_hdr_bitstream_error(parser, true, ret); // POPULATE WORKLOAD ITEM { viddec_workload_item_t wi; - + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ; wi.mp4_gvop.gvop_info = 0; @@ -55,7 +55,7 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code); ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - if(ret == 1) + if (ret == 1) ret = MP4_STATUS_OK; } @@ -79,7 +79,7 @@ static inline mp4_Status_t mp4_brightness_change(void *parent, int32_t *b_change { getbits = viddec_pm_skip_bits(parent, 4); getbits = viddec_pm_get_bits(parent, &code, 9); - *b_change = 113 + code; + *b_change = 113 + code; } else if (code >= 12) { @@ -91,7 +91,7 @@ static inline mp4_Status_t mp4_brightness_change(void *parent, int32_t *b_change { getbits = viddec_pm_skip_bits(parent, 2); getbits = viddec_pm_get_bits(parent, &code, 6); - *b_change = (code < 32) ? ((int32_t)code - 48) : ((int32_t)code - 15); + *b_change = (code < 32) ? ((int32_t)code - 48) : ((int32_t)code - 15); } else { @@ -109,18 +109,18 @@ static inline int32_t mp4_Sprite_dmv_length(void * parent, int32_t *dmv_length) mp4_Status_t ret= MP4_STATUS_PARSE_ERROR; *dmv_length=0; skip=3; - do{ + do { getbits = viddec_pm_peek_bits(parent, &code, skip); BREAK_GETBITS_REQD_MISSING(getbits, ret); - - if(code == 7) + + if (code == 7) { viddec_pm_skip_bits(parent, skip); getbits = viddec_pm_peek_bits(parent, &code, 9); BREAK_GETBITS_REQD_MISSING(getbits, ret); - + skip=1; - while((code & 256) != 0) + while ((code & 256) != 0) {/* count number of 1 bits */ code <<=1; skip++; @@ -134,8 +134,8 @@ static inline int32_t mp4_Sprite_dmv_length(void * parent, int32_t *dmv_length) } viddec_pm_skip_bits(parent, skip); ret= MP4_STATUS_OK; - - }while(0); + + } while (0); return ret; } @@ -145,14 +145,14 @@ mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_Video uint32_t code, i; int32_t dmv_length=0, dmv_code=0, getbits=0; mp4_Status_t ret = MP4_STATUS_OK; - for(i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ ) + for (i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ ) { ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length); - if(ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) { break; } - if(dmv_length <= 0) + if (dmv_length <= 0) { dmv_code = 0; } @@ -168,7 +168,7 @@ mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_Video } getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if(code != 1) + if (code != 1) { ret = MP4_STATUS_NOTSUPPORT; break; @@ -176,11 +176,11 @@ mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_Video vidObjPlane->warping_mv_code_du[i] = dmv_code; /* TODO: create another inline function to avoid code duplication */ ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length); - if(ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) { break; } - if(dmv_length <= 0) + if (dmv_length <= 0) { dmv_code = 0; } @@ -196,13 +196,13 @@ mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_Video } getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if(code != 1) + if (code != 1) { ret = MP4_STATUS_NOTSUPPORT; break; } vidObjPlane->warping_mv_code_dv[i] = dmv_code; - + } return ret; } @@ -219,7 +219,7 @@ static inline mp4_Status_t mp4_pvt_extract_modulotimebase_from_VideoObjectPlane( getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); *base += code; - }while(code != 0); + } while (code != 0); return ret; } @@ -227,7 +227,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse { uint32_t code; mp4_Info_t *pInfo = &(parser->info); - mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); mp4_VideoObjectPlane_t *vidObjPlane = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); int32_t getbits=0; mp4_Status_t ret= MP4_STATUS_PARSE_ERROR; @@ -237,8 +237,8 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse getbits = viddec_pm_get_bits(parent, &code, 2); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjPlane->vop_coding_type = code & 0x3; - if( mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(parent, - &(vidObjPlane->modulo_time_base)) == MP4_STATUS_REQD_DATA_ERROR) + if ( mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(parent, + &(vidObjPlane->modulo_time_base)) == MP4_STATUS_REQD_DATA_ERROR) { break; } @@ -248,7 +248,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse { uint32_t numbits=0; numbits = vidObjLay->vop_time_increment_resolution_bits; - if(numbits == 0) numbits=1; /*TODO:check if its greater than 16 bits ?? */ + if (numbits == 0) numbits=1; /*TODO:check if its greater than 16 bits ?? */ getbits = viddec_pm_get_bits(parent, &code, numbits); BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjPlane->vop_time_increment = code; @@ -258,13 +258,13 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjPlane->vop_coded = code & 0x1; - if(vidObjPlane->vop_coded == 0) + if (vidObjPlane->vop_coded == 0) { ret = MP4_STATUS_OK;/* Exit point 1 */ break; } - if(vidObjLay->newpred_enable) + if (vidObjLay->newpred_enable) { /* New pred mode not supported in HW */ DEB("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n"); @@ -273,9 +273,9 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse } if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) && - ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P) || - ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S) && - (vidObjLay->sprite_enable == MP4_SPRITE_GMC)))) + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P) || + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S) && + (vidObjLay->sprite_enable == MP4_SPRITE_GMC)))) { getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); @@ -283,9 +283,9 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse } if (vidObjLay->reduced_resolution_vop_enable && - (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && - ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) || - (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P))) + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) || + (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P))) { getbits = viddec_pm_get_bits(parent, &code, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); @@ -302,14 +302,14 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse { /* we support only rectangular shapes so the following logic is not required */ ret = MP4_STATUS_NOTSUPPORT; - break; + break; } if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) && - (!vidObjLay->complexity_estimation_disable)) + (!vidObjLay->complexity_estimation_disable)) { /* Not required according to DE team */ - //read_vop_complexity_estimation_header(); + //read_vop_complexity_estimation_header(); ret = MP4_STATUS_NOTSUPPORT; break; } @@ -327,12 +327,12 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse vidObjPlane->alternate_vertical_scan_flag = code & 0x1; } } - + if (((vidObjLay->sprite_enable == MP4_SPRITE_STATIC) || (vidObjLay->sprite_enable == MP4_SPRITE_GMC)) && - (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S)) + (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S)) { - if (vidObjLay->sprite_info.no_of_sprite_warping_points > 0){ - if (mp4_Sprite_Trajectory(parent, vidObjLay, vidObjPlane) != MP4_STATUS_OK){ + if (vidObjLay->sprite_info.no_of_sprite_warping_points > 0) { + if (mp4_Sprite_Trajectory(parent, vidObjLay, vidObjPlane) != MP4_STATUS_OK) { break; } } @@ -340,7 +340,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse if (vidObjLay->sprite_info.sprite_brightness_change) { int32_t change=0; - if(mp4_brightness_change(parent, &change) == MP4_STATUS_PARSE_ERROR) + if (mp4_brightness_change(parent, &change) == MP4_STATUS_PARSE_ERROR) { break; } @@ -393,7 +393,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse if (!vidObjLay->scalability) { if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) && - (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I)) + (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I)) { ret = MP4_STATUS_NOTSUPPORT; break; @@ -414,7 +414,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse } /* Since we made it all the way here it a success condition */ ret = MP4_STATUS_OK; /* Exit point 3 */ - }while(0); + } while (0); mp4_set_hdr_bitstream_error(parser, false, ret); diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c index e74444c..a91fd5c 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c @@ -3,20 +3,20 @@ static inline uint8_t mp4_pvt_isValid_verID(uint8_t id) { uint8_t ret=true; - switch(id) + switch (id) { - case 1: - case 2: - case 4: - case 5: - { - break; - } - default: - { - ret = false; - break; - } + case 1: + case 2: + case 4: + case 5: + { + break; + } + default: + { + ret = false; + break; + } } return ret; } // mp4_pvt_isValid_verID @@ -38,7 +38,7 @@ static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalTyp getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_FAIL(getbits, ret); vidSignal->is_video_signal_type = (data > 0); - if(vidSignal->is_video_signal_type) + if (vidSignal->is_video_signal_type) { getbits = viddec_pm_get_bits(parent, &data, 5); BREAK_GETBITS_FAIL(getbits, ret); @@ -46,7 +46,7 @@ static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalTyp vidSignal->video_range = ((data & 0x2) > 0); data = data >> 2; vidSignal->video_format = data & 0x7; - if(vidSignal->is_colour_description) + if (vidSignal->is_colour_description) { getbits = viddec_pm_get_bits(parent, &data, 24); BREAK_GETBITS_FAIL(getbits, ret); @@ -56,35 +56,35 @@ static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalTyp } } ret = MP4_STATUS_OK; - }while(0); + } while (0); return ret; } // mp4_Parse_video_signal_type void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status) { - //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n", + //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n", // parser->bitstream_error, hdr_flag, parse_status); - if(hdr_flag) + if (hdr_flag) { - if(parse_status & MP4_STATUS_NOTSUPPORT) + if (parse_status & MP4_STATUS_NOTSUPPORT) parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; - if(parse_status & MP4_STATUS_PARSE_ERROR) + if (parse_status & MP4_STATUS_PARSE_ERROR) parser->bitstream_error |= MP4_BS_ERROR_HDR_PARSE; - if(parse_status & MP4_STATUS_REQD_DATA_ERROR) + if (parse_status & MP4_STATUS_REQD_DATA_ERROR) parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; parser->bitstream_error &= MP4_HDR_ERROR_MASK; } else { - if(parse_status & MP4_STATUS_NOTSUPPORT) + if (parse_status & MP4_STATUS_NOTSUPPORT) parser->bitstream_error |= MP4_BS_ERROR_FRM_UNSUP; - if(parse_status & MP4_STATUS_PARSE_ERROR) + if (parse_status & MP4_STATUS_PARSE_ERROR) parser->bitstream_error |= MP4_BS_ERROR_FRM_PARSE; - if(parse_status & MP4_STATUS_REQD_DATA_ERROR) + if (parse_status & MP4_STATUS_REQD_DATA_ERROR) parser->bitstream_error |= MP4_BS_ERROR_FRM_NONDEC; - } + } //DEB("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error); @@ -98,32 +98,32 @@ mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser) mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; getbits = viddec_pm_get_bits(parent, &data, 8); - if(getbits != -1) + if (getbits != -1) { parser->info.profile_and_level_indication = data & 0xFF; // If present, check for validity - switch(parser->info.profile_and_level_indication) + switch (parser->info.profile_and_level_indication) { - case MP4_SIMPLE_PROFILE_LEVEL_0: - case MP4_SIMPLE_PROFILE_LEVEL_1: - case MP4_SIMPLE_PROFILE_LEVEL_2: - case MP4_SIMPLE_PROFILE_LEVEL_3: - case MP4_SIMPLE_PROFILE_LEVEL_4a: - case MP4_SIMPLE_PROFILE_LEVEL_5: - case MP4_SIMPLE_PROFILE_LEVEL_6: - case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0: - case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1: - case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2: - case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3: - case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4: - case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5: - case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B: - parser->bitstream_error = MP4_BS_ERROR_NONE; - ret = MP4_STATUS_OK; - break; - default: - parser->bitstream_error = MP4_BS_ERROR_HDR_UNSUP | MP4_BS_ERROR_HDR_NONDEC; - break; + case MP4_SIMPLE_PROFILE_LEVEL_0: + case MP4_SIMPLE_PROFILE_LEVEL_1: + case MP4_SIMPLE_PROFILE_LEVEL_2: + case MP4_SIMPLE_PROFILE_LEVEL_3: + case MP4_SIMPLE_PROFILE_LEVEL_4a: + case MP4_SIMPLE_PROFILE_LEVEL_5: + case MP4_SIMPLE_PROFILE_LEVEL_6: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B: + parser->bitstream_error = MP4_BS_ERROR_NONE; + ret = MP4_STATUS_OK; + break; + default: + parser->bitstream_error = MP4_BS_ERROR_HDR_UNSUP | MP4_BS_ERROR_HDR_NONDEC; + break; } } else @@ -154,7 +154,7 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) viddec_pm_get_bits(parent, &data, 7); visObj->visual_object_priority = data & 0x7; data = data >> 3; - if(mp4_pvt_isValid_verID(data & 0xF)) + if (mp4_pvt_isValid_verID(data & 0xF)) { visObj->visual_object_verid = data & 0xF; } @@ -183,7 +183,7 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) // No need to check for user data or visual object layer because they have a different start code // and will not be part of this header - } while(0); + } while (0); mp4_set_hdr_bitstream_error(parser, true, ret); @@ -191,7 +191,7 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) { viddec_workload_item_t wi; mp4_VideoSignalType_t *vst = &(visObj->VideoSignalType); - + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ; wi.mp4_vs_vo.vs_item = 0; @@ -201,12 +201,12 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) viddec_fw_mp4_vs_set_profile_and_level_indication(&wi.mp4_vs_vo, pInfo->profile_and_level_indication); viddec_fw_mp4_vo_set_video_signal_type(&wi.mp4_vs_vo, vst->is_video_signal_type); - if(vst->is_video_signal_type) + if (vst->is_video_signal_type) { viddec_fw_mp4_vo_set_video_range(&wi.mp4_vs_vo, vst->video_range); viddec_fw_mp4_vo_set_video_format(&wi.mp4_vs_vo, vst->video_format); viddec_fw_mp4_vo_set_colour_description(&wi.mp4_vs_vo, vst->is_colour_description); - if(vst->is_colour_description) + if (vst->is_colour_description) { viddec_fw_mp4_vo_set_transfer_char(&wi.mp4_vs_vo, vst->transfer_characteristics); viddec_fw_mp4_vo_set_color_primaries(&wi.mp4_vs_vo, vst->colour_primaries); @@ -215,10 +215,10 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) int ret_val; ret_val = viddec_pm_append_workitem(parent, &wi, false); - if(ret_val == 1) + if (ret_val == 1) ret = MP4_STATUS_OK; } - + return ret; } // mp4_Parse_VisualObject @@ -231,39 +231,39 @@ mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) DEB("ParseUser-prev_sc: 0x%x\n", parser->prev_sc); /* find the scope based on start code sc */ - switch(parser->prev_sc) { - case MP4_SC_VISUAL_OBJECT_SEQUENCE: - wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; - break; - case MP4_SC_VISUAL_OBJECT: - wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA; - break; - case MP4_SC_GROUP_OF_VOP: - wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; - break; - case MP4_SC_VIDEO_OBJECT_LAYER_MIN: - wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA; - break; - default: - wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen - break; + switch (parser->prev_sc) { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + case MP4_SC_VISUAL_OBJECT: + wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA; + break; + case MP4_SC_GROUP_OF_VOP: + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + case MP4_SC_VIDEO_OBJECT_LAYER_MIN: + wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA; + break; + default: + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen + break; } - + /* Read 1 byte of user data and store it in workitem for the current stream level (VS/VO/VOL/GVOP). Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, append the workitem. This loop is repeated till all user data is extracted and appended. */ wi.user_data.size = 0; - while(viddec_pm_get_bits(parent, &user_data, 8) != -1) + while (viddec_pm_get_bits(parent, &user_data, 8) != -1) { /* Store the valid byte in data payload */ wi.user_data.data_payload[wi.user_data.size] = user_data; wi.user_data.size++; - + /* When size exceeds payload size, append workitem and continue */ if (wi.user_data.size >= 11) { viddec_pm_setup_userdata(&wi); - + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); wi.user_data.size = 0; } @@ -272,7 +272,7 @@ mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) if (wi.user_data.size > 0) { int i; - for(i=wi.user_data.size;i<11;i++) + for (i=wi.user_data.size; i<11; i++) { wi.user_data.data_payload[i] = 0; } @@ -281,7 +281,7 @@ mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) wi.user_data.size = 0; } - if(ret == 1) + if (ret == 1) ret = MP4_STATUS_OK; return ret; diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c index 6a34500..ba296e7 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c @@ -34,19 +34,19 @@ uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) phase = cxt->phase; cxt->sc_end_pos = -1; p_info = (viddec_mp4_parser_t *)pcxt; - + /* parse until there is more data and start code not found */ - while((data_left > 0) &&(phase < 3)) + while ((data_left > 0) &&(phase < 3)) { /* Check if we are byte aligned & phase=0, if thats the case we can check work at a time instead of byte*/ - if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { - while(data_left > 3) + while (data_left > 3) { uint32_t data; char mask1 = 0, mask2=0; - + data = *((uint32_t *)ptr); #ifndef MFDBIGENDIAN data = SWAP_WORD(data); @@ -55,9 +55,11 @@ uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need two consecutive zero bytes for a start code pattern */ - if(mask1 && mask2) + if (mask1 && mask2) {/* Success so skip 4 bytes and start over */ - ptr+=4;size+=4;data_left-=4; + ptr+=4; + size+=4; + data_left-=4; continue; } else @@ -66,28 +68,32 @@ uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) } } } - + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected two zero bytes in the word so we look one byte at a time*/ - if(data_left > 0) + if (data_left > 0) { - if(*ptr == FIRST_STARTCODE_BYTE) + if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */ phase++; - ptr++;size++;data_left--; - if(phase > 2) + ptr++; + size++; + data_left--; + if (phase > 2) { phase = 2; if ( (((uint32_t)ptr) & 0x3) == 0 ) { - while( data_left > 3 ) + while ( data_left > 3 ) { - if(*((uint32_t *)ptr) != 0) + if (*((uint32_t *)ptr) != 0) { break; } - ptr+=4;size+=4;data_left-=4; + ptr+=4; + size+=4; + data_left-=4; } } } @@ -95,13 +101,13 @@ uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) else { uint8_t normal_sc=0, short_sc=0; - if(phase == 2) + if (phase == 2) { normal_sc = (*ptr == THIRD_STARTCODE_BYTE); short_sc = (p_info->ignore_scs == 0) && (SHORT_THIRD_STARTCODE_BYTE == ( *ptr & 0xFC)); } - if(!(normal_sc | short_sc)) + if (!(normal_sc | short_sc)) { phase = 0; } @@ -111,7 +117,7 @@ uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) phase = 3; p_info->cur_sc_prefix = p_info->next_sc_prefix; p_info->next_sc_prefix = (normal_sc) ? 1: 0; - if(normal_sc) + if (normal_sc) { p_info->ignore_scs=1; } @@ -125,11 +131,13 @@ uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) break; } } - ptr++;size++;data_left--; + ptr++; + size++; + data_left--; } } } - if((data_left > 0) && (phase == 3)) + if ((data_left > 0) && (phase == 3)) { cxt->sc_end_pos++; state->next_sc = cxt->buf[cxt->sc_end_pos]; diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h index 66a93df..e4f1b3f 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h @@ -16,7 +16,7 @@ /* If the pixel data is left near an emulation prevention sequence, the decoder will be unaware unless we send some previous bytes */ //#define PADDING_FOR_EMUL 3 -#define PADDING_FOR_EMUL 0 +#define PADDING_FOR_EMUL 0 #define GET_BLSB( name, bitf ) BLSB_MFD_##name##_##bitf #define GET_BMSK( name, bitf ) BMSK_MFD_##name##_##bitf @@ -26,35 +26,35 @@ enum vc1_workload_item_type { - VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, - VIDDEC_WORKLOAD_VC1_BITOFFSET, - VIDDEC_WORKLOAD_VC1_BITPLANE0, - VIDDEC_WORKLOAD_VC1_BITPLANE1, - VIDDEC_WORKLOAD_VC1_BITPLANE2, - VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY, - VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP, - VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW, - VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW, - VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO, - VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO, - VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO, - VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO, - VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE, - VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, - VIDDEC_WORKLOAD_VC1_FUTURE_FRAME, + VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_VC1_BITOFFSET, + VIDDEC_WORKLOAD_VC1_BITPLANE0, + VIDDEC_WORKLOAD_VC1_BITPLANE1, + VIDDEC_WORKLOAD_VC1_BITPLANE2, + VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY, + VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP, + VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW, + VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW, + VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO, + VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE, + VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + VIDDEC_WORKLOAD_VC1_FUTURE_FRAME, }; typedef enum { - vc1_ProgressiveFrame = 0, - vc1_InterlacedFrame = 2, - vc1_InterlacedField = 3, - vc1_PictureFormatNone + vc1_ProgressiveFrame = 0, + vc1_InterlacedFrame = 2, + vc1_InterlacedField = 3, + vc1_PictureFormatNone } vc1_fcm; /** This enumeration defines the various frame types as defined in PTYPE syntax -element. -PTYPE interpretation depends on bitstream profile. The value that needs to get +element. +PTYPE interpretation depends on bitstream profile. The value that needs to get programmed in the frame_type register 0x2218 is this generic enum obtained from Canmore code. Changing this enum to match the spec for each profile caused md5 mismatches. @@ -70,51 +70,73 @@ enum }; enum { - vc1_FrameDone = 1 << 0, - vc1_FieldDone = 1 << 1, - vc1_SliceDone = 1 << 2, - vc1_Field1Done = 1 << 3, - vc1_Field2Done = 1 << 4, - vc1_FrameError = 1 << 8, + vc1_FrameDone = 1 << 0, + vc1_FieldDone = 1 << 1, + vc1_SliceDone = 1 << 2, + vc1_Field1Done = 1 << 3, + vc1_Field2Done = 1 << 4, + vc1_FrameError = 1 << 8, }; typedef struct { - /* 0x00 */ uint32_t general; - /* 0x04 */ uint32_t stream_format1; - /* 0x08 */ uint32_t coded_size; - /* 0x0c */ uint32_t stream_format2; - /* 0x10 */ uint32_t entrypoint1; - /* 0x14 */ uint32_t ap_range_map; - /* 0x18 */ uint32_t frame_type; - /* 0x1c */ uint32_t recon_control; - /* 0x20 */ uint32_t mv_control; - /* 0x24 */ uint32_t intcomp_fwd_top; - /* 0x28 */ uint32_t ref_bfraction; - /* 0x2c */ uint32_t blk_control; - /* 0x30 */ uint32_t trans_data; - /* 0x34 */ uint32_t vop_dquant; + /* 0x00 */ + uint32_t general; + /* 0x04 */ + uint32_t stream_format1; + /* 0x08 */ + uint32_t coded_size; + /* 0x0c */ + uint32_t stream_format2; + /* 0x10 */ + uint32_t entrypoint1; + /* 0x14 */ + uint32_t ap_range_map; + /* 0x18 */ + uint32_t frame_type; + /* 0x1c */ + uint32_t recon_control; + /* 0x20 */ + uint32_t mv_control; + /* 0x24 */ + uint32_t intcomp_fwd_top; + /* 0x28 */ + uint32_t ref_bfraction; + /* 0x2c */ + uint32_t blk_control; + /* 0x30 */ + uint32_t trans_data; + /* 0x34 */ + uint32_t vop_dquant; #define NUM_REF_ID 4 - /* 0x38-0x48 */ uint32_t ref_frm_id[NUM_REF_ID]; - /* 0x48 */ uint32_t fieldref_ctrl_id; - /* 0x4c */ uint32_t auxfrmctrl; - /* 0x50 */ uint32_t imgstruct; - /* 0x54 */ uint32_t alt_frame_type; - /* 0x58 */ uint32_t intcomp_fwd_bot; - /* 0x5c */ uint32_t intcomp_bwd_top; - /* 0x60 */ uint32_t intcomp_bwd_bot; - /* 0x14 */ uint32_t smp_range_map; + /* 0x38-0x48 */ uint32_t ref_frm_id[NUM_REF_ID]; + /* 0x48 */ + uint32_t fieldref_ctrl_id; + /* 0x4c */ + uint32_t auxfrmctrl; + /* 0x50 */ + uint32_t imgstruct; + /* 0x54 */ + uint32_t alt_frame_type; + /* 0x58 */ + uint32_t intcomp_fwd_bot; + /* 0x5c */ + uint32_t intcomp_bwd_top; + /* 0x60 */ + uint32_t intcomp_bwd_bot; + /* 0x14 */ + uint32_t smp_range_map; } VC1D_SPR_REGS; /* -In VC1, past reference is the fwd reference and future reference is the backward reference +In VC1, past reference is the fwd reference and future reference is the backward reference i.e. P frame has only a forward reference and B frame has both a forward and a backward reference. */ enum { - VC1_FRAME_CURRENT_REF = 0, - VC1_FRAME_CURRENT_DIS, - VC1_FRAME_PAST, - VC1_FRAME_FUTURE, - VC1_FRAME_ALT + VC1_FRAME_CURRENT_REF = 0, + VC1_FRAME_CURRENT_DIS, + VC1_FRAME_PAST, + VC1_FRAME_FUTURE, + VC1_FRAME_ALT }; #endif //_VC1_COMMON_H_ diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c index 72b770f..cffa7b8 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c @@ -2,18 +2,18 @@ void vc1_start_new_frame (void *parent, vc1_viddec_parser_t *parser ) { - return; + return; } void vc1_end_frame (vc1_viddec_parser_t *parser) { - return; + return; } int32_t vc1_parse_emit_current_frame( void *parent, vc1_viddec_parser_t *parser ) { - return(0); + return(0); } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h index eb26bf7..21894c9 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h @@ -14,23 +14,23 @@ #define _VC1_H_ #ifdef MFD_FIRMWARE - typedef unsigned int size_t; - #define LOG(...) +typedef unsigned int size_t; +#define LOG(...) #else - #include - #include - #include - enum { - NONE = 0, - CRITICAL, - WARNING, - INFO, - DEBUG, - } ; - - #define vc1_log_level DEBUG - - #define LOG( log_lev, format, args ... ) \ +#include +#include +#include +enum { + NONE = 0, + CRITICAL, + WARNING, + INFO, + DEBUG, +} ; + +#define vc1_log_level DEBUG + +#define LOG( log_lev, format, args ... ) \ if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ , ## args ); } #endif @@ -52,115 +52,115 @@ extern "C" { // and Bitplane_raw_ID2 for acpred/mvtypemb/forwardmb // but when we send bitplane index 0 for directmb/fieldtx and bitplane index 2 for acpred/mvtypemb/forwardmb // md5 mismatches are seen -typedef enum -{ - BPP_FORWARDMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, - BPP_ACPRED = VIDDEC_WORKLOAD_VC1_BITPLANE0, - BPP_MVTYPEMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, - BPP_OVERFLAGS = VIDDEC_WORKLOAD_VC1_BITPLANE1, - BPP_SKIPMB = VIDDEC_WORKLOAD_VC1_BITPLANE1, - BPP_DIRECTMB = VIDDEC_WORKLOAD_VC1_BITPLANE2, - BPP_FIELDTX = VIDDEC_WORKLOAD_VC1_BITPLANE2, -} vc1_bpp_type_t; - -/* status codes */ -typedef enum { - VC1_STATUS_EOF = 1, // end of file - VC1_STATUS_OK = 0, // no error - VC1_STATUS_NO_MEM = 2, // out of memory - VC1_STATUS_FILE_ERROR = 2, // file error - VC1_STATUS_NOTSUPPORT = 2, // not supported mode - VC1_STATUS_PARSE_ERROR = 2, // fail in parse MPEG-4 stream - VC1_STATUS_ERROR = 2 // unknown/unspecified error -} vc1_Status; - -/* VC1 start code values */ -typedef enum { - vc1_Forbidden = 0x80,/*0x80-0xFF*/ - vc1_Reserved1 = 0x09,/*0x00-0x09*/ - vc1_Reserved2 = 0x10, - vc1_Reserved3 = 0x1A, - vc1_Reserved4 = 0x20,/*0x20-0x7F*/ - vc1_SCEndOfSequence = 0x0A, - vc1_SCSlice = 0x0B, - vc1_SCField = 0x0C, - vc1_SCFrameHeader = 0x0D, - vc1_SCEntryPointHeader = 0x0E, - vc1_SCSequenceHeader = 0x0F, - vc1_SCSliceUser = 0x1B, - vc1_SCFieldUser = 0x1C, - vc1_SCFrameUser = 0x1D, - vc1_SCEntryPointUser = 0x1E, - vc1_SCSequenceUser = 0x1F -} vc1_sc; + typedef enum + { + BPP_FORWARDMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_ACPRED = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_MVTYPEMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_OVERFLAGS = VIDDEC_WORKLOAD_VC1_BITPLANE1, + BPP_SKIPMB = VIDDEC_WORKLOAD_VC1_BITPLANE1, + BPP_DIRECTMB = VIDDEC_WORKLOAD_VC1_BITPLANE2, + BPP_FIELDTX = VIDDEC_WORKLOAD_VC1_BITPLANE2, + } vc1_bpp_type_t; + + /* status codes */ + typedef enum { + VC1_STATUS_EOF = 1, // end of file + VC1_STATUS_OK = 0, // no error + VC1_STATUS_NO_MEM = 2, // out of memory + VC1_STATUS_FILE_ERROR = 2, // file error + VC1_STATUS_NOTSUPPORT = 2, // not supported mode + VC1_STATUS_PARSE_ERROR = 2, // fail in parse MPEG-4 stream + VC1_STATUS_ERROR = 2 // unknown/unspecified error + } vc1_Status; + + /* VC1 start code values */ + typedef enum { + vc1_Forbidden = 0x80,/*0x80-0xFF*/ + vc1_Reserved1 = 0x09,/*0x00-0x09*/ + vc1_Reserved2 = 0x10, + vc1_Reserved3 = 0x1A, + vc1_Reserved4 = 0x20,/*0x20-0x7F*/ + vc1_SCEndOfSequence = 0x0A, + vc1_SCSlice = 0x0B, + vc1_SCField = 0x0C, + vc1_SCFrameHeader = 0x0D, + vc1_SCEntryPointHeader = 0x0E, + vc1_SCSequenceHeader = 0x0F, + vc1_SCSliceUser = 0x1B, + vc1_SCFieldUser = 0x1C, + vc1_SCFrameUser = 0x1D, + vc1_SCEntryPointUser = 0x1E, + vc1_SCSequenceUser = 0x1F + } vc1_sc; #if 0 -typedef enum -{ - vc1_ProfileSimple = 0, /** Simple profile */ - vc1_ProfileMain, /** Main profile */ - vc1_ProfileReserved, /** Reserved */ - vc1_ProfileAdvanced /** Advanced profile */ -} vc1_Profile; + typedef enum + { + vc1_ProfileSimple = 0, /** Simple profile */ + vc1_ProfileMain, /** Main profile */ + vc1_ProfileReserved, /** Reserved */ + vc1_ProfileAdvanced /** Advanced profile */ + } vc1_Profile; #endif -typedef enum -{ - vc1_PtypeI = 1, - vc1_PtypeP = 2, - vc1_PtypeB = 4, - vc1_PtypeBI = 5, - vc1_PtypeSkipped = 8|2, -} vc1_ptype; - -typedef enum -{ - vc1_PtypeII = 0, - vc1_PtypeIP = 1, - vc1_PtypePI = 2, - vc1_PtypePP = 3, - vc1_PtypeBB = 4, - vc1_PtypeBBI = 5, - vc1_PtypeBIB = 6, - vc1_PtypeBIBI = 7 -} vc1_fptype; - -typedef enum -{ - vc1_Imode_Raw = 0, //0x0000 - vc1_Imode_Norm2, //0x10 - vc1_Imode_Diff2, //0x001 - vc1_Imode_Norm6, //0x11 - vc1_Imode_Diff6, //0x0001 - vc1_Imode_Rowskip, //0x010 - vc1_Imode_Colskip, //0x011 -} vc1_Imode; - -/* calculation of MAX_BITPLANE_SZ 2048/16x1088/16 pel= 128x68 bit used for bitplane - * as rows are packed in DWORDS - * we have (128)/32 * 68 Dwords needed for bitplane storage - */ + typedef enum + { + vc1_PtypeI = 1, + vc1_PtypeP = 2, + vc1_PtypeB = 4, + vc1_PtypeBI = 5, + vc1_PtypeSkipped = 8|2, + } vc1_ptype; + + typedef enum + { + vc1_PtypeII = 0, + vc1_PtypeIP = 1, + vc1_PtypePI = 2, + vc1_PtypePP = 3, + vc1_PtypeBB = 4, + vc1_PtypeBBI = 5, + vc1_PtypeBIB = 6, + vc1_PtypeBIBI = 7 + } vc1_fptype; + + typedef enum + { + vc1_Imode_Raw = 0, //0x0000 + vc1_Imode_Norm2, //0x10 + vc1_Imode_Diff2, //0x001 + vc1_Imode_Norm6, //0x11 + vc1_Imode_Diff6, //0x0001 + vc1_Imode_Rowskip, //0x010 + vc1_Imode_Colskip, //0x011 + } vc1_Imode; + + /* calculation of MAX_BITPLANE_SZ 2048/16x1088/16 pel= 128x68 bit used for bitplane + * as rows are packed in DWORDS + * we have (128)/32 * 68 Dwords needed for bitplane storage + */ #define MAX_BITPLANE_SZ 272 -/* Full Info */ -typedef struct { - unsigned char* bufptr; /* current frame, point to header or data */ - int bitoff; /* mostly point to next frame header or PSC */ - int picture_info_has_changed; - vc1_metadata_t metadata; - vc1_PictureLayerHeader picLayerHeader; - uint32_t bitplane[MAX_BITPLANE_SZ]; -} vc1_Info; + /* Full Info */ + typedef struct { + unsigned char* bufptr; /* current frame, point to header or data */ + int bitoff; /* mostly point to next frame header or PSC */ + int picture_info_has_changed; + vc1_metadata_t metadata; + vc1_PictureLayerHeader picLayerHeader; + uint32_t bitplane[MAX_BITPLANE_SZ]; + } vc1_Info; #ifdef __cplusplus } #endif enum { - VC1_REF_FRAME_T_MINUS_1 = 0, - VC1_REF_FRAME_T_MINUS_2, - VC1_REF_FRAME_T_MINUS_0, - VC1_NUM_REFERENCE_FRAMES, + VC1_REF_FRAME_T_MINUS_1 = 0, + VC1_REF_FRAME_T_MINUS_2, + VC1_REF_FRAME_T_MINUS_0, + VC1_NUM_REFERENCE_FRAMES, }; enum vc1_sc_seen_flags @@ -179,15 +179,15 @@ enum vc1_sc_seen_flags #define VC1_FLD_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM | VC1_SC_FLD typedef struct { - int id; - uint32_t intcomp_top; - uint32_t intcomp_bot; - int fcm; /* frame coding mode */ - int type; - int anchor[2]; /* one per field */ - int rr_en; /* range reduction enable flag at sequence layer */ - int rr_frm; /* range reduction flag at picture layer */ - int tff; + int id; + uint32_t intcomp_top; + uint32_t intcomp_bot; + int fcm; /* frame coding mode */ + int type; + int anchor[2]; /* one per field */ + int rr_en; /* range reduction enable flag at sequence layer */ + int rr_frm; /* range reduction flag at picture layer */ + int tff; } ref_frame_t; typedef struct @@ -196,7 +196,7 @@ typedef struct uint32_t sc_seen; uint32_t is_frame_start; uint32_t is_second_start; - uint32_t is_reference_picture; + uint32_t is_reference_picture; uint32_t intcomp_last[4]; /* for B frames */ uint32_t intcomp_top[2]; uint32_t intcomp_bot[2]; diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c index 4973b1d..fb59613 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c @@ -30,7 +30,7 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) uint32_t result; vc1_Status status = VC1_STATUS_OK; vc1_metadata_t *md = &pInfo->metadata; - vc1_RcvSequenceHeader rcv; + vc1_RcvSequenceHeader rcv; memset(&rcv, 0, sizeof(vc1_RcvSequenceHeader)); @@ -52,11 +52,11 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) md->QUANTIZER = rcv.struct_c.QUANTIZER; md->FINTERPFLAG = rcv.struct_c.FINTERPFLAG; #ifdef VBP - md->SYNCMARKER = rcv.struct_c.SYNCMARKER; -#endif + md->SYNCMARKER = rcv.struct_c.SYNCMARKER; +#endif if ((md->PROFILE == VC1_PROFILE_SIMPLE) || - (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN)) + (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN)) { md->DQUANT = 0; } @@ -76,7 +76,7 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) // POPULATE WORKLOAD ITEM { viddec_workload_item_t wi; - + wi.vwi_type = VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C; wi.vc1_sh_struct_a_c.size = 0; @@ -122,17 +122,17 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) // PARSE SEQUENCE HEADER result = viddec_pm_get_bits(ctxt, &sh.flags, 15); - if(result == 1) + if (result == 1) { md->PROFILE = sh.seq_flags.PROFILE; md->LEVEL = sh.seq_flags.LEVEL; md->CHROMAFORMAT = sh.seq_flags.COLORDIFF_FORMAT; md->FRMRTQ = sh.seq_flags.FRMRTQ_POSTPROC; - md->BITRTQ = sh.seq_flags.BITRTQ_POSTPROC; + md->BITRTQ = sh.seq_flags.BITRTQ_POSTPROC; } result = viddec_pm_get_bits(ctxt, &sh.max_size, 32); - if(result == 1) + if (result == 1) { md->POSTPROCFLAG = sh.seq_max_size.POSTPROCFLAG; md->width = sh.seq_max_size.MAX_CODED_WIDTH; @@ -147,10 +147,10 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) if (sh.seq_max_size.DISPLAY_EXT == 1) { result = viddec_pm_get_bits(ctxt, &sh.disp_size, 29); - if(result == 1) + if (result == 1) { if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1) - { + { result = viddec_pm_get_bits(ctxt, &tempValue, 4); sh.ASPECT_RATIO = tempValue; if (sh.ASPECT_RATIO == 15) @@ -219,13 +219,13 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) // POPULATE WORKLOAD ITEM { viddec_workload_item_t wi_sl, wi_de; - + wi_sl.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; wi_sl.vc1_sl.size = 0; wi_sl.vc1_sl.flags = 0; wi_sl.vc1_sl.pad = 0; - + viddec_fw_vc1_set_profile(&wi_sl.vc1_sl, sh.seq_flags.PROFILE); viddec_fw_vc1_set_level(&wi_sl.vc1_sl, sh.seq_flags.LEVEL); viddec_fw_vc1_set_colordiff_format(&wi_sl.vc1_sl, sh.seq_flags.COLORDIFF_FORMAT); @@ -296,13 +296,13 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) // PARSE ENTRYPOINT HEADER result = viddec_pm_get_bits(ctxt, &ep.flags, 13); - if(result == 1) + if (result == 1) { // Skip the flags already peeked at (13) and the unneeded hrd_full data // NOTE: HRD_NUM_LEAKY_BUCKETS is initialized to 0 when HRD_PARAM_FLAG is not present int hrd_bits = md->HRD_NUM_LEAKY_BUCKETS * 8; - while(hrd_bits >= 32) - { + while (hrd_bits >= 32) + { result = viddec_pm_skip_bits(ctxt, 32); hrd_bits -= 32; } @@ -322,27 +322,27 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) md->QUANTIZER = ep.ep_flags.QUANTIZER; result = viddec_pm_get_bits(ctxt, &temp, 1); - if(result == 1) + if (result == 1) { ep.CODED_SIZE_FLAG = temp; - if(ep.CODED_SIZE_FLAG) + if (ep.CODED_SIZE_FLAG) { result = viddec_pm_get_bits(ctxt, &ep.size, 24); md->width = ep.ep_size.CODED_WIDTH; md->height = ep.ep_size.CODED_HEIGHT; } } - if(ep.ep_flags.EXTENDED_MV) + if (ep.ep_flags.EXTENDED_MV) { result = viddec_pm_get_bits(ctxt, &temp, 1); md->EXTENDED_DMV = ep.EXTENDED_DMV = temp; } result = viddec_pm_get_bits(ctxt, &temp, 1); - if(result == 1) + if (result == 1) { md->RANGE_MAPY_FLAG = ep.RANGE_MAPY_FLAG = temp; - if(ep.RANGE_MAPY_FLAG) + if (ep.RANGE_MAPY_FLAG) { result = viddec_pm_get_bits(ctxt, &temp, 3); md->RANGE_MAPY = ep.RANGE_MAPY = temp; @@ -350,10 +350,10 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) } result = viddec_pm_get_bits(ctxt, &temp, 1); - if(result == 1) + if (result == 1) { md->RANGE_MAPUV_FLAG = ep.RANGE_MAPUV_FLAG = temp; - if(ep.RANGE_MAPUV_FLAG) + if (ep.RANGE_MAPUV_FLAG) { result = viddec_pm_get_bits(ctxt, &temp, 3); md->RANGE_MAPUV = ep.RANGE_MAPUV = temp; @@ -364,7 +364,7 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) // POPULATE WORKLOAD ITEM { viddec_workload_item_t wi; - + wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO; wi.vc1_ep.size = 0; @@ -402,15 +402,15 @@ vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo) uint32_t temp; int i; - for(i=0; imetadata.bp_raw[i] = true; } - if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) + if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) { VC1_PEEK_BITS(2, temp); /* fcm */ - if( (pInfo->metadata.INTERLACE == 1) && (temp == VC1_FCM_FIELD_INTERLACE)) + if ( (pInfo->metadata.INTERLACE == 1) && (temp == VC1_FCM_FIELD_INTERLACE)) { status = vc1_ParseFieldHeader_Adv(ctxt, pInfo); } @@ -474,7 +474,7 @@ vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo) uint32_t last_bitoff = pInfo->bitoff; status = vc1_ParsePictureLayer(ctxt, pInfo); pInfo->picture_info_has_changed = 1; - if( status ) { + if ( status ) { /* FIXME - is this a good way of handling this? Failed, see if it's for fields */ pInfo->bufptr = last_bufptr; pInfo->bitoff = last_bitoff; @@ -492,7 +492,7 @@ vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo) * This function parses the user data information as defined in SMPTE 421M annex F. * It then appends that data to the workload. * Assume the flush byte 0x80 is within the 3 bytes before next start code. - * let's put 1 byte per item first + * let's put 1 byte per item first *------------------------------------------------------------------------------ */ vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) @@ -501,29 +501,29 @@ vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) uint32_t user_data; viddec_workload_item_t wi; uint32_t ud_id; - + /* find the scope based on start code sc */ - switch(sc) { - case vc1_SCSequenceUser: - wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; - break; - case vc1_SCEntryPointUser: - wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; - break; - case vc1_SCFrameUser: - wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; - break; - case vc1_SCFieldUser: - wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA; - break; - case vc1_SCSliceUser: - wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA; - break; - default: - wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen - break; + switch (sc) { + case vc1_SCSequenceUser: + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + case vc1_SCEntryPointUser: + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + case vc1_SCFrameUser: + wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; + break; + case vc1_SCFieldUser: + wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA; + break; + case vc1_SCSliceUser: + wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA; + break; + default: + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen + break; } - + /* get identifier - 4 bytes*/ // Extract this information but discard it for now VC1_GET_BITS(32, ud_id); @@ -532,12 +532,12 @@ vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, append the workitem. This loop is repeated till all user data is extracted and appended. */ wi.user_data.size = 0; - while(viddec_pm_get_bits(ctxt, &user_data, 8) != -1) + while (viddec_pm_get_bits(ctxt, &user_data, 8) != -1) { /* Store the valid byte in data payload */ wi.user_data.data_payload[wi.user_data.size] = user_data; wi.user_data.size++; - + /* When size exceeds payload size, append workitem and continue */ if (wi.user_data.size >= 11) { @@ -545,14 +545,14 @@ vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) viddec_pm_append_workitem(ctxt, &wi,false); wi.user_data.size = 0; } - if(user_data == 0x80) // flushing byte + if (user_data == 0x80) // flushing byte break; } /* If size is not 0, append remaining user data. */ if (wi.user_data.size > 0) { int i; - for(i=wi.user_data.size;i<11;i++) + for (i=wi.user_data.size; i<11; i++) { wi.user_data.data_payload[i] = 0; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c index b7dd271..b39f4ad 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c @@ -184,7 +184,7 @@ static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, col[1] = 2; /* (i+1)%width; */ // Consider special case where width is 1 - if(width == 1) + if (width == 1) { col[0] = 0; /* i%width; */ row[0] = 1; /* i/width; */ @@ -192,7 +192,7 @@ static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, row[1] = 2; /* (i+1)/width; */ } } - + /* decode every pair of bits in natural scan order */ for (i = (width*height) & 1; i < (width*height/2)*2; i += 2) { @@ -218,7 +218,7 @@ static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, { put_bit(1, col[0],row[0], width, height, pBitplane->invert, pBitplane->databits); - put_bit(1, col[1],row[1], width, height, pBitplane->invert, + put_bit(1, col[1],row[1], width, height, pBitplane->invert, pBitplane->databits); } else @@ -228,21 +228,21 @@ static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, { put_bit(1, col[0],row[0], width, height, pBitplane->invert, pBitplane->databits); - put_bit(0, col[1],row[1], width, height, pBitplane->invert, + put_bit(0, col[1],row[1], width, height, pBitplane->invert, pBitplane->databits); } else { put_bit(0, col[0],row[0], width, height, pBitplane->invert, pBitplane->databits); - put_bit(1, col[1],row[1], width, height, pBitplane->invert, + put_bit(1, col[1],row[1], width, height, pBitplane->invert, pBitplane->databits); } } } // Consider special case where width is 1 - if(width == 1) + if (width == 1) { row[0] += 2; row[1] += 2; @@ -252,7 +252,7 @@ static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, col[0] += 2; /* i%width; */ if ( col[0] >= width ) { - // For odd sizes, col[0] can alternatively start at 0 and 1 + // For odd sizes, col[0] can alternatively start at 0 and 1 col[0] -= width; row[0]++; } @@ -260,7 +260,7 @@ static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, col[1] += 2; /* (i+1)%width; */ if ( col[1] >= width ) { - // For odd sizes, col[1] can alternatively start at 0 and 1 + // For odd sizes, col[1] can alternatively start at 0 and 1 col[1] -= width; row[1]++; } @@ -376,12 +376,12 @@ static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, //if (1 == ColSkip) { - for(j = 0; j < height; j++) + for (j = 0; j < height; j++) { - int32_t Value = 0; - if (1 == ColSkip) VC1_GET_BITS(1, Value); - - put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); + int32_t Value = 0; + if (1 == ColSkip) VC1_GET_BITS(1, Value); + + put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); } } } @@ -394,10 +394,10 @@ static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, { for (i = ResidualX; i < width; i++) { - int32_t Value = 0; - if (1 == RowSkip) VC1_GET_BITS(1, Value); - - put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); + int32_t Value = 0; + if (1 == RowSkip) VC1_GET_BITS(1, Value); + + put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); } } } @@ -423,7 +423,7 @@ int initBitplane(vc1_Bitplane *pBitplane,uint32_t width, uint32_t height) numDword = ((width + 31)>>5) * height; numDword += numDword & 1; /* add 1 in case numDword is odd */ - for (i=0;idatabits[i] = 0; + for (i=0; idatabits[i] = 0; return(numDword); } @@ -432,7 +432,7 @@ int initBitplane(vc1_Bitplane *pBitplane,uint32_t width, uint32_t height) * width: width in MB unit * height: height in MB unit */ -vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, +vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bpnum) { uint32_t i, j; @@ -442,7 +442,7 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, vc1_Bitplane bp; vc1_Bitplane *bpp = &bp; - // By default, set imode to raw + // By default, set imode to raw pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = true; // bitplane data would be temporarily stored in the vc1 context @@ -519,16 +519,16 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, VC1_GET_BITS(1, tempValue); put_bit( tempValue, i, j, width, height, bpp->invert, bpp->databits); } - } + } else if (bpp->invert) { // fill column with ones for (j = 0; j < height; j++) { put_bit( 0, i, j, width, height, bpp->invert, bpp->databits); } - }//end for else + }//end for else } } - if(bpp->imode != VC1_BITPLANE_RAW_MODE) + if (bpp->imode != VC1_BITPLANE_RAW_MODE) { uint32_t* pl; int sizeinbytes,nitems,i; @@ -544,10 +544,10 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, // How many payloads must be generated nitems = (sizeinbytes + (sizeof(wi.data.data_payload) - 1)) / - sizeof(wi.data.data_payload); + sizeof(wi.data.data_payload); // Dump DMEM to an array of workitems - for( i = 0; i < nitems; i++ ) + for ( i = 0; i < nitems; i++ ) { wi.vwi_type = bpnum; wi.data.data_offset = (char *)pl - (char *)bit_dw; // offset within struct @@ -562,153 +562,153 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, #ifdef VBP { - viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)ctxt; - vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data); - - if (biplaneSz > 4096) - { - /* bigger than we got, so let's bail with a non meaningful error. */ - return VC1_STATUS_ERROR; - } - - /* At this point bp contains the information we need for the bit-plane */ - /* bpnum is the enumeration that tells us which bitplane this is for. */ - /* pInfo->picLayerHeader.ACPRED is one of the bitplanes I need to fill.*/ - switch (bpnum) - { + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)ctxt; + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data); + + if (biplaneSz > 4096) + { + /* bigger than we got, so let's bail with a non meaningful error. */ + return VC1_STATUS_ERROR; + } + + /* At this point bp contains the information we need for the bit-plane */ + /* bpnum is the enumeration that tells us which bitplane this is for. */ + /* pInfo->picLayerHeader.ACPRED is one of the bitplanes I need to fill.*/ + switch (bpnum) + { case VIDDEC_WORKLOAD_VC1_BITPLANE0: - if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) - { - if(bp.imode != VC1_BITPLANE_RAW_MODE) - { - pInfo->picLayerHeader.FORWARDMB.invert = bp.invert; - pInfo->picLayerHeader.FORWARDMB.imode = bp.imode; - for (i = 0; i < biplaneSz; i++) - { - parser->bp_forwardmb[i] = bp.databits[i]; - } - pInfo->picLayerHeader.FORWARDMB.databits = parser->bp_forwardmb; - } - else - { - pInfo->picLayerHeader.raw_FORWARDMB = 1; - } - } - if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) - || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) - { - if(bp.imode != VC1_BITPLANE_RAW_MODE) - { - pInfo->picLayerHeader.ACPRED.invert = bp.invert; - pInfo->picLayerHeader.ACPRED.imode = bp.imode; - for (i = 0; i < biplaneSz; i++) - { - parser->bp_acpred[i] = bp.databits[i]; - } - pInfo->picLayerHeader.ACPRED.databits = parser->bp_acpred; - } - else + if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) { - pInfo->picLayerHeader.raw_ACPRED = 1; + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.FORWARDMB.invert = bp.invert; + pInfo->picLayerHeader.FORWARDMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_forwardmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.FORWARDMB.databits = parser->bp_forwardmb; + } + else + { + pInfo->picLayerHeader.raw_FORWARDMB = 1; + } } - } - if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) - { - if(bp.imode != VC1_BITPLANE_RAW_MODE) + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) { - pInfo->picLayerHeader.MVTYPEMB.invert = bp.invert; - pInfo->picLayerHeader.MVTYPEMB.imode = bp.imode; - for (i = 0; i < biplaneSz; i++) - { - parser->bp_mvtypemb[i] = bp.databits[i]; - } - pInfo->picLayerHeader.MVTYPEMB.databits = parser->bp_mvtypemb; + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.ACPRED.invert = bp.invert; + pInfo->picLayerHeader.ACPRED.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_acpred[i] = bp.databits[i]; + } + pInfo->picLayerHeader.ACPRED.databits = parser->bp_acpred; + } + else + { + pInfo->picLayerHeader.raw_ACPRED = 1; + } } - else + if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) { - pInfo->picLayerHeader.raw_MVTYPEMB = 1; + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.MVTYPEMB.invert = bp.invert; + pInfo->picLayerHeader.MVTYPEMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_mvtypemb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.MVTYPEMB.databits = parser->bp_mvtypemb; + } + else + { + pInfo->picLayerHeader.raw_MVTYPEMB = 1; + } } - } - break; + break; case VIDDEC_WORKLOAD_VC1_BITPLANE1: - if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) - || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) - { - if(bp.imode != VC1_BITPLANE_RAW_MODE) + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) { - pInfo->picLayerHeader.OVERFLAGS.invert = bp.invert; - pInfo->picLayerHeader.OVERFLAGS.imode = bp.imode; - for (i = 0; i < biplaneSz; i++) - { - parser->bp_overflags[i] = bp.databits[i]; - } - pInfo->picLayerHeader.OVERFLAGS.databits = parser->bp_overflags; - } - else - { - pInfo->picLayerHeader.raw_OVERFLAGS = 1; + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.OVERFLAGS.invert = bp.invert; + pInfo->picLayerHeader.OVERFLAGS.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_overflags[i] = bp.databits[i]; + } + pInfo->picLayerHeader.OVERFLAGS.databits = parser->bp_overflags; + } + else + { + pInfo->picLayerHeader.raw_OVERFLAGS = 1; + } } - } - if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) - || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) - { - if(bp.imode != VC1_BITPLANE_RAW_MODE) + if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) { - pInfo->picLayerHeader.SKIPMB.invert = bp.invert; - pInfo->picLayerHeader.SKIPMB.imode = bp.imode; - for (i = 0; i < biplaneSz; i++) - { - parser->bp_skipmb[i] = bp.databits[i]; - } - pInfo->picLayerHeader.SKIPMB.databits = parser->bp_skipmb; - } - else - { - pInfo->picLayerHeader.raw_SKIPMB = 1; + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.SKIPMB.invert = bp.invert; + pInfo->picLayerHeader.SKIPMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_skipmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.SKIPMB.databits = parser->bp_skipmb; + } + else + { + pInfo->picLayerHeader.raw_SKIPMB = 1; + } } - } - break; + break; case VIDDEC_WORKLOAD_VC1_BITPLANE2: - if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) - || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) - { - if(bp.imode != VC1_BITPLANE_RAW_MODE) + if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) { - pInfo->picLayerHeader.DIRECTMB.invert = bp.invert; - pInfo->picLayerHeader.DIRECTMB.imode = bp.imode; - for (i = 0; i < biplaneSz; i++) - { - parser->bp_directmb[i] = bp.databits[i]; - } - pInfo->picLayerHeader.DIRECTMB.databits = parser->bp_directmb; - } - else - { - pInfo->picLayerHeader.raw_DIRECTMB = 1; - } - } - if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) - || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) - { - if(bp.imode != VC1_BITPLANE_RAW_MODE) - { - pInfo->picLayerHeader.FIELDTX.invert = bp.invert; - pInfo->picLayerHeader.FIELDTX.imode = bp.imode; - for (i = 0; i < biplaneSz; i++) - { - parser->bp_fieldtx[i] = bp.databits[i]; - } - pInfo->picLayerHeader.FIELDTX.databits = parser->bp_fieldtx; + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.DIRECTMB.invert = bp.invert; + pInfo->picLayerHeader.DIRECTMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_directmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.DIRECTMB.databits = parser->bp_directmb; + } + else + { + pInfo->picLayerHeader.raw_DIRECTMB = 1; + } } - else + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) { - pInfo->picLayerHeader.raw_FIELDTX = 1; + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.FIELDTX.invert = bp.invert; + pInfo->picLayerHeader.FIELDTX.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_fieldtx[i] = bp.databits[i]; + } + pInfo->picLayerHeader.FIELDTX.databits = parser->bp_fieldtx; + } + else + { + pInfo->picLayerHeader.raw_FIELDTX = 1; + } } - } - break; - } + break; + } } #endif - + return status; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c index 1b702e3..32fd5cd 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c @@ -29,8 +29,8 @@ vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInf vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, - &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != - VC1_STATUS_OK) + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) { return status; } @@ -55,16 +55,16 @@ vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInf VC1_GET_BITS9(1, picLayerHeader->MVMODE); picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? - VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) { return VC1_STATUS_OK; } - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) { return status; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c index 7a6a8e0..397b5a1 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c @@ -34,16 +34,16 @@ vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info * VC1_GET_BITS9(1, picLayerHeader->MVMODE); picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? - VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) { return status; } - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) { return status; } @@ -94,7 +94,7 @@ vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pI if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != - VC1_STATUS_OK) + VC1_STATUS_OK) { return status; } @@ -107,14 +107,14 @@ vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pI VC1_GET_BITS9(1, picLayerHeader->INTCOMP); - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) { return status; } - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) { return status; } @@ -214,12 +214,12 @@ vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInf bit_count++; picLayerHeader->MVMODE = table[bit_count]; - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) != - VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) != + VC1_STATUS_OK) { return status; - } + } VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h index ce36849..821df32 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h @@ -19,603 +19,603 @@ extern "C" { #include -/** @weakgroup vc1parse_common_defs VC-1 Common Definitions */ -/** @ingroup vc1parse_common_defs */ -/*@{*/ + /** @weakgroup vc1parse_common_defs VC-1 Common Definitions */ + /** @ingroup vc1parse_common_defs */ + /*@{*/ -/** This defines the maximum number of horizontal macroblocks in a picture. */ + /** This defines the maximum number of horizontal macroblocks in a picture. */ #define VC1_WIDTH_MB_MAX ((2048+15)/16) -/** This defines the maximum number of vertical macroblocks in a picture. */ + /** This defines the maximum number of vertical macroblocks in a picture. */ #define VC1_HEIGHT_MB_MAX ((1088+15)/16) -/** This defines the maximum number of bitplane storage per picture. */ -#define VC1_MAX_BITPLANE_CHUNKS 3 + /** This defines the maximum number of bitplane storage per picture. */ +#define VC1_MAX_BITPLANE_CHUNKS 3 -/** This defines the value for an invalid BFRACTION syntax element. */ + /** This defines the value for an invalid BFRACTION syntax element. */ #define VC1_BFRACTION_INVALID 0 -/** This defines the value for BFRACTION syntax element that defines a BI -picture. */ + /** This defines the value for BFRACTION syntax element that defines a BI + picture. */ #define VC1_BFRACTION_BI 9 -/** This enumeration defines the various supported profiles as defined in -PROFILE syntax element. */ -enum -{ - VC1_PROFILE_SIMPLE, - VC1_PROFILE_MAIN, - VC1_PROFILE_RESERVED, - VC1_PROFILE_ADVANCED -}; - -/** This enumeration defines the frame coding mode as defined in FCM syntax -element. */ -enum -{ - VC1_FCM_PROGRESSIVE, - VC1_FCM_FRAME_INTERLACE = 2, - VC1_FCM_FIELD_INTERLACE = 3 -}; - -/** This enumeration defines the various bitplane types as defined in IMODE -syntax element. */ -enum -{ - VC1_BITPLANE_RAW_MODE, - VC1_BITPLANE_NORM2_MODE, - VC1_BITPLANE_DIFF2_MODE, - VC1_BITPLANE_NORM6_MODE, - VC1_BITPLANE_DIFF6_MODE, - VC1_BITPLANE_ROWSKIP_MODE, - VC1_BITPLANE_COLSKIP_MODE -}; - -/** This enumeration defines the various motion vector modes as defined in -MVMODE or MVMODE2 syntax element. */ -enum -{ - VC1_MVMODE_1MV, -#ifdef VBP - VC1_MVMODE_HPELBI_1MV, - VC1_MVMODE_HPEL_1MV, + /** This enumeration defines the various supported profiles as defined in + PROFILE syntax element. */ + enum + { + VC1_PROFILE_SIMPLE, + VC1_PROFILE_MAIN, + VC1_PROFILE_RESERVED, + VC1_PROFILE_ADVANCED + }; + + /** This enumeration defines the frame coding mode as defined in FCM syntax + element. */ + enum + { + VC1_FCM_PROGRESSIVE, + VC1_FCM_FRAME_INTERLACE = 2, + VC1_FCM_FIELD_INTERLACE = 3 + }; + + /** This enumeration defines the various bitplane types as defined in IMODE + syntax element. */ + enum + { + VC1_BITPLANE_RAW_MODE, + VC1_BITPLANE_NORM2_MODE, + VC1_BITPLANE_DIFF2_MODE, + VC1_BITPLANE_NORM6_MODE, + VC1_BITPLANE_DIFF6_MODE, + VC1_BITPLANE_ROWSKIP_MODE, + VC1_BITPLANE_COLSKIP_MODE + }; + + /** This enumeration defines the various motion vector modes as defined in + MVMODE or MVMODE2 syntax element. */ + enum + { + VC1_MVMODE_1MV, +#ifdef VBP + VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_HPEL_1MV, #else - VC1_MVMODE_HPEL_1MV, - VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_HPEL_1MV, + VC1_MVMODE_HPELBI_1MV, #endif - VC1_MVMODE_MIXED_MV, - VC1_MVMODE_INTENSCOMP -}; - -/** This enumeration defines the extended differential motion vector range flag -as defined in DMVRANGE syntax element. */ -enum -{ - VC1_DMVRANGE_NONE, - VC1_DMVRANGE_HORIZONTAL_RANGE, - VC1_DMVRANGE_VERTICAL_RANGE, - VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE -}; - -/** This enumeration defines the intensity compensation field as defined in -INTCOMPFIELD syntax element. */ -enum -{ - VC1_INTCOMP_TOP_FIELD = 1, - VC1_INTCOMP_BOTTOM_FIELD = 2, - VC1_INTCOMP_BOTH_FIELD = 3 -}; - -/** This enumeration defines the differential quantizer profiles as defined in -DQPROFILE syntax element. */ -enum -{ - VC1_DQPROFILE_ALL4EDGES, - VC1_DQPROFILE_DBLEDGES, - VC1_DQPROFILE_SNGLEDGES, - VC1_DQPROFILE_ALLMBLKS -}; - -/** This enumeration defines the conditional overlap flag as defined in CONDOVER -syntax element. */ -enum -{ - VC1_CONDOVER_FLAG_NONE = 0, - VC1_CONDOVER_FLAG_ALL = 2, - VC1_CONDOVER_FLAG_SOME = 3 -}; - -/** This enumeration defines the type of quantizer to be used and is derived -from bitstream syntax. */ -enum -{ - VC1_QUANTIZER_NONUNIFORM, - VC1_QUANTIZER_UNIFORM -}; - -/** This structure represents the various bitplanes within VC-1 bitstream. */ -typedef struct -{ - uint8_t invert; - int32_t imode; - uint32_t *databits; -} vc1_Bitplane; - -/** This structure represents all bitstream metadata needed for register programming. */ -typedef struct -{ - // From Sequence Layer for Advanced Profile - uint8_t PROFILE; /** 2 bit(s). */ - uint8_t LEVEL; /** 3 bit(s). */ - uint8_t CHROMAFORMAT; /** 2 bit(s). */ - uint8_t FRMRTQ; /** 3 bit(s). */ - - uint8_t BITRTQ; /** 5 bit(s). */ - uint8_t POSTPROCFLAG; /** 1 bit(s). */ - uint8_t PULLDOWN; /** 1 bit(s). */ - uint8_t INTERLACE; /** 1 bit(s). */ - - uint8_t TFCNTRFLAG; /** 1 bit(s). */ - uint8_t FINTERPFLAG; /** 1 bit(s). */ - uint8_t PSF; /** 1 bit(s). */ - uint8_t HRD_NUM_LEAKY_BUCKETS; /** 5 bit(s). */ - - // From STRUCT_C - uint8_t MAXBFRAMES; /** 3 bit(s). */ - uint8_t MULTIRES; /** 1 bit(s). */ - - // From EntryPoint Layer for Advanced Profile - uint8_t BROKEN_LINK; - uint8_t CLOSED_ENTRY; - - uint8_t PANSCAN_FLAG; - uint8_t REFDIST_FLAG; - uint8_t LOOPFILTER; - uint8_t FASTUVMC; - - uint8_t EXTENDED_MV; - uint8_t DQUANT; - uint8_t VSTRANSFORM; - uint8_t OVERLAP; - - uint8_t QUANTIZER; - uint8_t EXTENDED_DMV; - uint8_t RANGE_MAPY_FLAG; - uint8_t RANGE_MAPY; - - uint8_t RANGE_MAPUV_FLAG; - uint8_t RANGE_MAPUV; - - // From Picture Header - uint8_t RANGERED; /** 1 bit(s). */ - uint8_t RNDCTRL; /** 1 bit(s), rcv specific. */ - - // REFDIST is present only in field-interlaced mode on I/I, I/P, P/I, P/P frames - // From Canmore, looks like this needs to be propagated to following B frames - uint8_t REFDIST; - uint8_t INTCOMPFIELD; /** ? bit(s)? */ - uint8_t LUMSCALE2; /** 6 bit(s). */ - uint8_t LUMSHIFT2; /** 6 bit(s). */ - - uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS]; - uint8_t res_1; - - // From SequenceLayerHeader, EntryPointHeader or Struct_A - uint16_t width; - uint16_t height; - uint16_t widthMB; - uint16_t heightMB; + VC1_MVMODE_MIXED_MV, + VC1_MVMODE_INTENSCOMP + }; + + /** This enumeration defines the extended differential motion vector range flag + as defined in DMVRANGE syntax element. */ + enum + { + VC1_DMVRANGE_NONE, + VC1_DMVRANGE_HORIZONTAL_RANGE, + VC1_DMVRANGE_VERTICAL_RANGE, + VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE + }; + + /** This enumeration defines the intensity compensation field as defined in + INTCOMPFIELD syntax element. */ + enum + { + VC1_INTCOMP_TOP_FIELD = 1, + VC1_INTCOMP_BOTTOM_FIELD = 2, + VC1_INTCOMP_BOTH_FIELD = 3 + }; + + /** This enumeration defines the differential quantizer profiles as defined in + DQPROFILE syntax element. */ + enum + { + VC1_DQPROFILE_ALL4EDGES, + VC1_DQPROFILE_DBLEDGES, + VC1_DQPROFILE_SNGLEDGES, + VC1_DQPROFILE_ALLMBLKS + }; + + /** This enumeration defines the conditional overlap flag as defined in CONDOVER + syntax element. */ + enum + { + VC1_CONDOVER_FLAG_NONE = 0, + VC1_CONDOVER_FLAG_ALL = 2, + VC1_CONDOVER_FLAG_SOME = 3 + }; + + /** This enumeration defines the type of quantizer to be used and is derived + from bitstream syntax. */ + enum + { + VC1_QUANTIZER_NONUNIFORM, + VC1_QUANTIZER_UNIFORM + }; + + /** This structure represents the various bitplanes within VC-1 bitstream. */ + typedef struct + { + uint8_t invert; + int32_t imode; + uint32_t *databits; + } vc1_Bitplane; + + /** This structure represents all bitstream metadata needed for register programming. */ + typedef struct + { + // From Sequence Layer for Advanced Profile + uint8_t PROFILE; /** 2 bit(s). */ + uint8_t LEVEL; /** 3 bit(s). */ + uint8_t CHROMAFORMAT; /** 2 bit(s). */ + uint8_t FRMRTQ; /** 3 bit(s). */ + + uint8_t BITRTQ; /** 5 bit(s). */ + uint8_t POSTPROCFLAG; /** 1 bit(s). */ + uint8_t PULLDOWN; /** 1 bit(s). */ + uint8_t INTERLACE; /** 1 bit(s). */ + + uint8_t TFCNTRFLAG; /** 1 bit(s). */ + uint8_t FINTERPFLAG; /** 1 bit(s). */ + uint8_t PSF; /** 1 bit(s). */ + uint8_t HRD_NUM_LEAKY_BUCKETS; /** 5 bit(s). */ + + // From STRUCT_C + uint8_t MAXBFRAMES; /** 3 bit(s). */ + uint8_t MULTIRES; /** 1 bit(s). */ + + // From EntryPoint Layer for Advanced Profile + uint8_t BROKEN_LINK; + uint8_t CLOSED_ENTRY; + + uint8_t PANSCAN_FLAG; + uint8_t REFDIST_FLAG; + uint8_t LOOPFILTER; + uint8_t FASTUVMC; + + uint8_t EXTENDED_MV; + uint8_t DQUANT; + uint8_t VSTRANSFORM; + uint8_t OVERLAP; + + uint8_t QUANTIZER; + uint8_t EXTENDED_DMV; + uint8_t RANGE_MAPY_FLAG; + uint8_t RANGE_MAPY; + + uint8_t RANGE_MAPUV_FLAG; + uint8_t RANGE_MAPUV; + + // From Picture Header + uint8_t RANGERED; /** 1 bit(s). */ + uint8_t RNDCTRL; /** 1 bit(s), rcv specific. */ + + // REFDIST is present only in field-interlaced mode on I/I, I/P, P/I, P/P frames + // From Canmore, looks like this needs to be propagated to following B frames + uint8_t REFDIST; + uint8_t INTCOMPFIELD; /** ? bit(s)? */ + uint8_t LUMSCALE2; /** 6 bit(s). */ + uint8_t LUMSHIFT2; /** 6 bit(s). */ + + uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS]; + uint8_t res_1; + + // From SequenceLayerHeader, EntryPointHeader or Struct_A + uint16_t width; + uint16_t height; + uint16_t widthMB; + uint16_t heightMB; #ifdef VBP - uint8_t COLOR_FORMAT_FLAG; - uint8_t MATRIX_COEF; - uint8_t SYNCMARKER; - uint8_t ASPECT_RATIO_FLAG; - uint8_t ASPECT_RATIO; - uint8_t ASPECT_HORIZ_SIZE; - uint8_t ASPECT_VERT_SIZE; - + uint8_t COLOR_FORMAT_FLAG; + uint8_t MATRIX_COEF; + uint8_t SYNCMARKER; + uint8_t ASPECT_RATIO_FLAG; + uint8_t ASPECT_RATIO; + uint8_t ASPECT_HORIZ_SIZE; + uint8_t ASPECT_VERT_SIZE; + #endif - -} vc1_metadata_t; - -/** This structure represents the sequence header for advanced profile. */ -typedef struct -{ - union - { -#ifndef MFDBIGENDIAN - struct + + } vc1_metadata_t; + + /** This structure represents the sequence header for advanced profile. */ + typedef struct + { + union { - unsigned BITRTQ_POSTPROC:5; - unsigned FRMRTQ_POSTPROC:3; - unsigned COLORDIFF_FORMAT:2; - unsigned LEVEL:3; - unsigned PROFILE:2; - unsigned pad:17; - } seq_flags; +#ifndef MFDBIGENDIAN + struct + { + unsigned BITRTQ_POSTPROC:5; + unsigned FRMRTQ_POSTPROC:3; + unsigned COLORDIFF_FORMAT:2; + unsigned LEVEL:3; + unsigned PROFILE:2; + unsigned pad:17; + } seq_flags; #else - struct - { - unsigned pad:17; - unsigned PROFILE:2; - unsigned LEVEL:3; - unsigned COLORDIFF_FORMAT:2; - unsigned FRMRTQ_POSTPROC:3; - unsigned BITRTQ_POSTPROC:5; - } seq_flags; + struct + { + unsigned pad:17; + unsigned PROFILE:2; + unsigned LEVEL:3; + unsigned COLORDIFF_FORMAT:2; + unsigned FRMRTQ_POSTPROC:3; + unsigned BITRTQ_POSTPROC:5; + } seq_flags; #endif - uint32_t flags; - }; + uint32_t flags; + }; - union - { -#ifndef MFDBIGENDIAN - struct + union { - unsigned DISPLAY_EXT:1; - unsigned PSF:1; - unsigned RESERVED:1; - unsigned FINTERPFLAG:1; - unsigned TFCNTRFLAG:1; - unsigned INTERLACE:1; - unsigned PULLDOWN:1; - unsigned MAX_CODED_HEIGHT:12; - unsigned MAX_CODED_WIDTH:12; - unsigned POSTPROCFLAG:1; - } seq_max_size; +#ifndef MFDBIGENDIAN + struct + { + unsigned DISPLAY_EXT:1; + unsigned PSF:1; + unsigned RESERVED:1; + unsigned FINTERPFLAG:1; + unsigned TFCNTRFLAG:1; + unsigned INTERLACE:1; + unsigned PULLDOWN:1; + unsigned MAX_CODED_HEIGHT:12; + unsigned MAX_CODED_WIDTH:12; + unsigned POSTPROCFLAG:1; + } seq_max_size; #else - struct - { - unsigned POSTPROCFLAG:1; - unsigned MAX_CODED_WIDTH:12; - unsigned MAX_CODED_HEIGHT:12; - unsigned PULLDOWN:1; - unsigned INTERLACE:1; - unsigned TFCNTRFLAG:1; - unsigned FINTERPFLAG:1; - unsigned RESERVED:1; - unsigned PSF:1; - unsigned DISPLAY_EXT:1; - } seq_max_size; + struct + { + unsigned POSTPROCFLAG:1; + unsigned MAX_CODED_WIDTH:12; + unsigned MAX_CODED_HEIGHT:12; + unsigned PULLDOWN:1; + unsigned INTERLACE:1; + unsigned TFCNTRFLAG:1; + unsigned FINTERPFLAG:1; + unsigned RESERVED:1; + unsigned PSF:1; + unsigned DISPLAY_EXT:1; + } seq_max_size; #endif - uint32_t max_size; - }; + uint32_t max_size; + }; - union - { -#ifndef MFDBIGENDIAN - struct + union { - unsigned ASPECT_RATIO_FLAG:1; - unsigned DISP_VERT_SIZE:14; - unsigned DISP_HORIZ_SIZE:14; - unsigned pad:3; - } seq_disp_size; +#ifndef MFDBIGENDIAN + struct + { + unsigned ASPECT_RATIO_FLAG:1; + unsigned DISP_VERT_SIZE:14; + unsigned DISP_HORIZ_SIZE:14; + unsigned pad:3; + } seq_disp_size; #else - struct - { - unsigned pad:3; - unsigned DISP_HORIZ_SIZE:14; - unsigned DISP_VERT_SIZE:14; - unsigned ASPECT_RATIO_FLAG:1; - } seq_disp_size; + struct + { + unsigned pad:3; + unsigned DISP_HORIZ_SIZE:14; + unsigned DISP_VERT_SIZE:14; + unsigned ASPECT_RATIO_FLAG:1; + } seq_disp_size; #endif - uint32_t disp_size; - }; + uint32_t disp_size; + }; - uint8_t ASPECT_RATIO; // 4 bits + uint8_t ASPECT_RATIO; // 4 bits - union - { -#ifndef MFDBIGENDIAN - struct + union { - unsigned ASPECT_VERT_SIZE:8; - unsigned ASPECT_HORIZ_SIZE:8; - unsigned pad:16; - } seq_aspect_size; +#ifndef MFDBIGENDIAN + struct + { + unsigned ASPECT_VERT_SIZE:8; + unsigned ASPECT_HORIZ_SIZE:8; + unsigned pad:16; + } seq_aspect_size; #else - struct - { - unsigned pad:16; - unsigned ASPECT_HORIZ_SIZE:8; - unsigned ASPECT_VERT_SIZE:8; - } seq_aspect_size; + struct + { + unsigned pad:16; + unsigned ASPECT_HORIZ_SIZE:8; + unsigned ASPECT_VERT_SIZE:8; + } seq_aspect_size; #endif - uint32_t aspect_size; - }; + uint32_t aspect_size; + }; - uint8_t FRAMERATE_FLAG; // 1b - uint8_t FRAMERATEIND; // 1b + uint8_t FRAMERATE_FLAG; // 1b + uint8_t FRAMERATEIND; // 1b - union - { -#ifndef MFDBIGENDIAN - struct + union { - unsigned FRAMERATEDR:4; - unsigned FRAMERATENR:8; - unsigned pad:20; - } seq_framerate_fraction; +#ifndef MFDBIGENDIAN + struct + { + unsigned FRAMERATEDR:4; + unsigned FRAMERATENR:8; + unsigned pad:20; + } seq_framerate_fraction; #else - struct - { - unsigned pad:20; - unsigned FRAMERATENR:8; - unsigned FRAMERATEDR:4; - } seq_framerate_fraction; + struct + { + unsigned pad:20; + unsigned FRAMERATENR:8; + unsigned FRAMERATEDR:4; + } seq_framerate_fraction; #endif - uint32_t framerate_fraction; - }; + uint32_t framerate_fraction; + }; - uint16_t FRAMERATEEXP; // 16b - uint8_t COLOR_FORMAT_FLAG; // 1b + uint16_t FRAMERATEEXP; // 16b + uint8_t COLOR_FORMAT_FLAG; // 1b - union - { -#ifndef MFDBIGENDIAN - struct + union { - unsigned MATRIX_COEF:8; - unsigned TRANSFER_CHAR:8; - unsigned COLOR_PRIM:8; - unsigned pad:8; - } seq_color_format; +#ifndef MFDBIGENDIAN + struct + { + unsigned MATRIX_COEF:8; + unsigned TRANSFER_CHAR:8; + unsigned COLOR_PRIM:8; + unsigned pad:8; + } seq_color_format; #else - struct - { - unsigned pad:8; - unsigned COLOR_PRIM:8; - unsigned TRANSFER_CHAR:8; - unsigned MATRIX_COEF:8; - } seq_color_format; + struct + { + unsigned pad:8; + unsigned COLOR_PRIM:8; + unsigned TRANSFER_CHAR:8; + unsigned MATRIX_COEF:8; + } seq_color_format; #endif - uint32_t color_format; - }; + uint32_t color_format; + }; - uint8_t HRD_PARAM_FLAG; // 1b - uint8_t HRD_NUM_LEAKY_BUCKETS; // 5b - // No need to parse remaining items - not needed so far -} vc1_SequenceLayerHeader; + uint8_t HRD_PARAM_FLAG; // 1b + uint8_t HRD_NUM_LEAKY_BUCKETS; // 5b + // No need to parse remaining items - not needed so far + } vc1_SequenceLayerHeader; -/** This structure represents metadata for struct c. */ -typedef struct -{ - union - { -#ifndef MFDBIGENDIAN - struct + /** This structure represents metadata for struct c. */ + typedef struct + { + union { - unsigned res6:1; - unsigned FINTERPFLAG:1; - unsigned QUANTIZER:2; - unsigned MAXBFRAMES:3; - unsigned RANGERED:1; - unsigned SYNCMARKER:1; - unsigned OVERLAP:1; - unsigned res5:1; - unsigned VSTRANSFORM:1; - unsigned DQUANT:2; - unsigned EXTENDED_MV:1; - unsigned FASTUVMC:1; - unsigned res4:1; - unsigned MULTIRES:1; - unsigned res3:1; - unsigned LOOPFILTER:1; - unsigned BITRTQ_POSTPROC:5; - unsigned FRMRTQ_POSTPROC:3; - unsigned PROFILE:4; - } struct_c; +#ifndef MFDBIGENDIAN + struct + { + unsigned res6:1; + unsigned FINTERPFLAG:1; + unsigned QUANTIZER:2; + unsigned MAXBFRAMES:3; + unsigned RANGERED:1; + unsigned SYNCMARKER:1; + unsigned OVERLAP:1; + unsigned res5:1; + unsigned VSTRANSFORM:1; + unsigned DQUANT:2; + unsigned EXTENDED_MV:1; + unsigned FASTUVMC:1; + unsigned res4:1; + unsigned MULTIRES:1; + unsigned res3:1; + unsigned LOOPFILTER:1; + unsigned BITRTQ_POSTPROC:5; + unsigned FRMRTQ_POSTPROC:3; + unsigned PROFILE:4; + } struct_c; #else - struct - { - unsigned PROFILE:4; - unsigned FRMRTQ_POSTPROC:3; - unsigned BITRTQ_POSTPROC:5; - unsigned LOOPFILTER:1; - unsigned res3:1; - unsigned MULTIRES:1; - unsigned res4:1; - unsigned FASTUVMC:1; - unsigned EXTENDED_MV:1; - unsigned DQUANT:2; - unsigned VSTRANSFORM:1; - unsigned res5:1; - unsigned OVERLAP:1; - unsigned SYNCMARKER:1; - unsigned RANGERED:1; - unsigned MAXBFRAMES:3; - unsigned QUANTIZER:2; - unsigned FINTERPFLAG:1; - unsigned res6:1; - } struct_c; + struct + { + unsigned PROFILE:4; + unsigned FRMRTQ_POSTPROC:3; + unsigned BITRTQ_POSTPROC:5; + unsigned LOOPFILTER:1; + unsigned res3:1; + unsigned MULTIRES:1; + unsigned res4:1; + unsigned FASTUVMC:1; + unsigned EXTENDED_MV:1; + unsigned DQUANT:2; + unsigned VSTRANSFORM:1; + unsigned res5:1; + unsigned OVERLAP:1; + unsigned SYNCMARKER:1; + unsigned RANGERED:1; + unsigned MAXBFRAMES:3; + unsigned QUANTIZER:2; + unsigned FINTERPFLAG:1; + unsigned res6:1; + } struct_c; #endif - uint32_t struct_c_rcv; - }; + uint32_t struct_c_rcv; + }; - union - { -#ifndef MFDBIGENDIAN - struct + union { - unsigned VERT_SIZE:16; - unsigned HORIZ_SIZE:16; - } struct_a; +#ifndef MFDBIGENDIAN + struct + { + unsigned VERT_SIZE:16; + unsigned HORIZ_SIZE:16; + } struct_a; #else - struct - { - unsigned HORIZ_SIZE:16; - unsigned VERT_SIZE:16; - } struct_a; + struct + { + unsigned HORIZ_SIZE:16; + unsigned VERT_SIZE:16; + } struct_a; #endif - uint32_t struct_a_rcv; - }; + uint32_t struct_a_rcv; + }; -} vc1_RcvSequenceHeader; + } vc1_RcvSequenceHeader; -/** This structure represents metadata for entry point layers. */ -typedef struct -{ - union - { -#ifndef MFDBIGENDIAN - struct + /** This structure represents metadata for entry point layers. */ + typedef struct + { + union { - unsigned QUANTIZER:2; - unsigned OVERLAP:1; - unsigned VSTRANSFORM:1; - unsigned DQUANT:2; - unsigned EXTENDED_MV:1; - unsigned FASTUVMC:1; - unsigned LOOPFILTER:1; - unsigned REFDIST_FLAG:1; - unsigned PANSCAN_FLAG:1; - unsigned CLOSED_ENTRY:1; - unsigned BROKEN_LINK:1; - unsigned pad1:19; - } ep_flags; +#ifndef MFDBIGENDIAN + struct + { + unsigned QUANTIZER:2; + unsigned OVERLAP:1; + unsigned VSTRANSFORM:1; + unsigned DQUANT:2; + unsigned EXTENDED_MV:1; + unsigned FASTUVMC:1; + unsigned LOOPFILTER:1; + unsigned REFDIST_FLAG:1; + unsigned PANSCAN_FLAG:1; + unsigned CLOSED_ENTRY:1; + unsigned BROKEN_LINK:1; + unsigned pad1:19; + } ep_flags; #else - struct - { - unsigned pad1:19; - unsigned BROKEN_LINK:1; - unsigned CLOSED_ENTRY:1; - unsigned PANSCAN_FLAG:1; - unsigned REFDIST_FLAG:1; - unsigned LOOPFILTER:1; - unsigned FASTUVMC:1; - unsigned EXTENDED_MV:1; - unsigned DQUANT:2; - unsigned VSTRANSFORM:1; - unsigned OVERLAP:1; - unsigned QUANTIZER:2; - } ep_flags; + struct + { + unsigned pad1:19; + unsigned BROKEN_LINK:1; + unsigned CLOSED_ENTRY:1; + unsigned PANSCAN_FLAG:1; + unsigned REFDIST_FLAG:1; + unsigned LOOPFILTER:1; + unsigned FASTUVMC:1; + unsigned EXTENDED_MV:1; + unsigned DQUANT:2; + unsigned VSTRANSFORM:1; + unsigned OVERLAP:1; + unsigned QUANTIZER:2; + } ep_flags; #endif - uint32_t flags; - }; + uint32_t flags; + }; - // Skipping HRD data because it is not needed for our processing + // Skipping HRD data because it is not needed for our processing - union - { -#ifndef MFDBIGENDIAN - struct + union { - unsigned CODED_HEIGHT:12; - unsigned CODED_WIDTH:12; - unsigned pad2:8; - } ep_size; +#ifndef MFDBIGENDIAN + struct + { + unsigned CODED_HEIGHT:12; + unsigned CODED_WIDTH:12; + unsigned pad2:8; + } ep_size; #else - struct - { - unsigned pad2:8; - unsigned CODED_WIDTH:12; - unsigned CODED_HEIGHT:12; - } ep_size; + struct + { + unsigned pad2:8; + unsigned CODED_WIDTH:12; + unsigned CODED_HEIGHT:12; + } ep_size; #endif - uint32_t size; - }; - - uint8_t CODED_SIZE_FLAG; /** 1 bit(s). */ - uint8_t EXTENDED_DMV; /** 1 bit(s). */ - uint8_t RANGE_MAPY_FLAG; /** 1 bit(s). */ - uint8_t RANGE_MAPY; /** 3 bit(s). */ - uint8_t RANGE_MAPUV_FLAG; /** 1 bit(s). */ - uint8_t RANGE_MAPUV; /** 3 bit(s). */ -} vc1_EntryPointHeader; - -/** This structure represents metadata for slice and picture layers. */ -typedef struct -{ - /* Slice layer. */ - uint16_t SLICE_ADDR; /** 9 bit(s). */ - - /* Picture layer for simple or main profile. */ - uint8_t RANGEREDFRM; /** 1 bit(s). */ - uint8_t PTYPE; /** 4 bit(s)? */ - int8_t BFRACTION_NUM; /** ? bit(s). */ - int16_t BFRACTION_DEN; /** ? bit(s). */ - uint8_t PQINDEX; /** 5 bit(s). */ - uint8_t HALFQP; /** 1 bit(s). */ - uint8_t PQUANTIZER; /** 1 bit(s). */ - uint8_t MVRANGE; /** 3 bit(s)? */ - uint8_t MVMODE; /** 4 bit(s)? */ - uint8_t MVMODE2; /** 3 bit(s)? */ - uint8_t LUMSCALE; /** 6 bit(s). */ - uint8_t LUMSHIFT; /** 6 bit(s). */ - uint8_t MVTAB; /** 2 bit(s). */ - uint8_t CBPTAB; /** 2 bit(s). */ - uint8_t TTMBF; /** 1 bit(s). */ - uint8_t TTFRM; /** 2 bit(s). */ - uint8_t TRANSACFRM; /** 2 bit(s)? */ - uint8_t TRANSACFRM2; /** 2 bit(s)? */ - uint8_t TRANSDCTAB; /** 1 bit(s). */ - - /* Picture layer for advanced profile. */ - uint8_t FCM; /** 2 bit(s)? */ - uint8_t FPTYPE; /** 3 bit(s). */ - uint8_t TFCNTR; /** 8 bit(s) */ - uint8_t RPTFRM; /** 2 bit(s) */ - uint8_t TFF; /** 1 bit(s). */ - uint8_t RFF; /** 1 bit(s) */ - uint8_t RNDCTRL; /** 1 bit(s). */ - uint8_t UVSAMP; /** 1 bit(s). */ - uint8_t POSTPROC; /** 2 bit(s). */ - uint8_t CONDOVER; /** 2 bit(s)? */ - uint8_t DMVRANGE; /** ? bit(s)? */ - uint8_t MV4SWITCH; /** 1 bit(s). */ - uint8_t INTCOMP; /** 1 bit(s). */ - uint8_t MBMODETAB; /** 2 bit(s). */ - uint8_t MV2BPTAB; /** 2 bit(s). */ - uint8_t MV4BPTAB; /** 2 bit(s). */ - uint8_t NUMREF; /** 1 bit(s). */ - uint8_t REFFIELD; /** 1 bit(s). */ - - /* PAN SCAN */ - uint8_t PS_PRESENT; /** 1 bit(s). */ - uint8_t number_of_pan_scan_window; /** 4 max. */ - viddec_vc1_pan_scan_window_t PAN_SCAN_WINDOW[VIDDEC_PANSCAN_MAX_OFFSETS]; - - /* VOPDQUANT. */ - uint8_t PQDIFF; /** 3 bit(s). */ - uint8_t ABSPQ; /** 5 bit(s). */ - uint8_t DQUANTFRM; /** 1 bit(s). */ - uint8_t DQPROFILE; /** 2 bit(s). */ - uint8_t DQSBEDGE; /** 2 bit(s). */ - uint8_t DQBILEVEL; /** 1 bit(s). */ - - /* Others. */ - uint8_t PTypeField1; - uint8_t PTypeField2; - uint32_t PQUANT; - uint8_t CurrField; - uint8_t BottomField; - uint32_t UniformQuant; + uint32_t size; + }; + + uint8_t CODED_SIZE_FLAG; /** 1 bit(s). */ + uint8_t EXTENDED_DMV; /** 1 bit(s). */ + uint8_t RANGE_MAPY_FLAG; /** 1 bit(s). */ + uint8_t RANGE_MAPY; /** 3 bit(s). */ + uint8_t RANGE_MAPUV_FLAG; /** 1 bit(s). */ + uint8_t RANGE_MAPUV; /** 3 bit(s). */ + } vc1_EntryPointHeader; + + /** This structure represents metadata for slice and picture layers. */ + typedef struct + { + /* Slice layer. */ + uint16_t SLICE_ADDR; /** 9 bit(s). */ + + /* Picture layer for simple or main profile. */ + uint8_t RANGEREDFRM; /** 1 bit(s). */ + uint8_t PTYPE; /** 4 bit(s)? */ + int8_t BFRACTION_NUM; /** ? bit(s). */ + int16_t BFRACTION_DEN; /** ? bit(s). */ + uint8_t PQINDEX; /** 5 bit(s). */ + uint8_t HALFQP; /** 1 bit(s). */ + uint8_t PQUANTIZER; /** 1 bit(s). */ + uint8_t MVRANGE; /** 3 bit(s)? */ + uint8_t MVMODE; /** 4 bit(s)? */ + uint8_t MVMODE2; /** 3 bit(s)? */ + uint8_t LUMSCALE; /** 6 bit(s). */ + uint8_t LUMSHIFT; /** 6 bit(s). */ + uint8_t MVTAB; /** 2 bit(s). */ + uint8_t CBPTAB; /** 2 bit(s). */ + uint8_t TTMBF; /** 1 bit(s). */ + uint8_t TTFRM; /** 2 bit(s). */ + uint8_t TRANSACFRM; /** 2 bit(s)? */ + uint8_t TRANSACFRM2; /** 2 bit(s)? */ + uint8_t TRANSDCTAB; /** 1 bit(s). */ + + /* Picture layer for advanced profile. */ + uint8_t FCM; /** 2 bit(s)? */ + uint8_t FPTYPE; /** 3 bit(s). */ + uint8_t TFCNTR; /** 8 bit(s) */ + uint8_t RPTFRM; /** 2 bit(s) */ + uint8_t TFF; /** 1 bit(s). */ + uint8_t RFF; /** 1 bit(s) */ + uint8_t RNDCTRL; /** 1 bit(s). */ + uint8_t UVSAMP; /** 1 bit(s). */ + uint8_t POSTPROC; /** 2 bit(s). */ + uint8_t CONDOVER; /** 2 bit(s)? */ + uint8_t DMVRANGE; /** ? bit(s)? */ + uint8_t MV4SWITCH; /** 1 bit(s). */ + uint8_t INTCOMP; /** 1 bit(s). */ + uint8_t MBMODETAB; /** 2 bit(s). */ + uint8_t MV2BPTAB; /** 2 bit(s). */ + uint8_t MV4BPTAB; /** 2 bit(s). */ + uint8_t NUMREF; /** 1 bit(s). */ + uint8_t REFFIELD; /** 1 bit(s). */ + + /* PAN SCAN */ + uint8_t PS_PRESENT; /** 1 bit(s). */ + uint8_t number_of_pan_scan_window; /** 4 max. */ + viddec_vc1_pan_scan_window_t PAN_SCAN_WINDOW[VIDDEC_PANSCAN_MAX_OFFSETS]; + + /* VOPDQUANT. */ + uint8_t PQDIFF; /** 3 bit(s). */ + uint8_t ABSPQ; /** 5 bit(s). */ + uint8_t DQUANTFRM; /** 1 bit(s). */ + uint8_t DQPROFILE; /** 2 bit(s). */ + uint8_t DQSBEDGE; /** 2 bit(s). */ + uint8_t DQBILEVEL; /** 1 bit(s). */ + + /* Others. */ + uint8_t PTypeField1; + uint8_t PTypeField2; + uint32_t PQUANT; + uint8_t CurrField; + uint8_t BottomField; + uint32_t UniformQuant; #ifdef VBP - uint8_t raw_MVTYPEMB; - uint8_t raw_DIRECTMB; - uint8_t raw_SKIPMB; - uint8_t raw_ACPRED; - uint8_t raw_FIELDTX; - uint8_t raw_OVERFLAGS; - uint8_t raw_FORWARDMB; - - vc1_Bitplane MVTYPEMB; - vc1_Bitplane DIRECTMB; - vc1_Bitplane SKIPMB; - vc1_Bitplane ACPRED; - vc1_Bitplane FIELDTX; - vc1_Bitplane OVERFLAGS; - vc1_Bitplane FORWARDMB; - uint32_t ALTPQUANT; - uint8_t DQDBEDGE; + uint8_t raw_MVTYPEMB; + uint8_t raw_DIRECTMB; + uint8_t raw_SKIPMB; + uint8_t raw_ACPRED; + uint8_t raw_FIELDTX; + uint8_t raw_OVERFLAGS; + uint8_t raw_FORWARDMB; + + vc1_Bitplane MVTYPEMB; + vc1_Bitplane DIRECTMB; + vc1_Bitplane SKIPMB; + vc1_Bitplane ACPRED; + vc1_Bitplane FIELDTX; + vc1_Bitplane OVERFLAGS; + vc1_Bitplane FORWARDMB; + uint32_t ALTPQUANT; + uint8_t DQDBEDGE; #endif - -} vc1_PictureLayerHeader; -/*@}*/ + } vc1_PictureLayerHeader; + + /*@}*/ #ifdef __cplusplus } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c index 6fec35a..896e18c 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c @@ -33,48 +33,48 @@ const uint8_t VC1_MVMODE_HIGH_TBL[] = const int32_t VC1_BITPLANE_IMODE_TBL[] = { - 4, /* max bits */ - 1, /* total subtables */ - 4, /* subtable sizes */ + 4, /* max bits */ + 1, /* total subtables */ + 4, /* subtable sizes */ - 0, /* 1-bit codes */ - 2, /* 2-bit codes */ + 0, /* 1-bit codes */ + 2, /* 2-bit codes */ 2, VC1_BITPLANE_NORM2_MODE, 3, VC1_BITPLANE_NORM6_MODE, - 3, /* 3-bit codes */ + 3, /* 3-bit codes */ 1, VC1_BITPLANE_DIFF2_MODE, 2, VC1_BITPLANE_ROWSKIP_MODE, 3, VC1_BITPLANE_COLSKIP_MODE, - 2, /* 4-bit codes */ + 2, /* 4-bit codes */ 0, VC1_BITPLANE_RAW_MODE, 1, VC1_BITPLANE_DIFF6_MODE, --1 + -1 }; /* This VLC table is used for decoding of k in bitplane. */ const int32_t VC1_BITPLANE_K_TBL[] = { - 13, /* max bits */ - 2, /* total subtables */ - 6,7,/* subtable sizes */ + 13, /* max bits */ + 2, /* total subtables */ + 6,7,/* subtable sizes */ - 1, /* 1-bit codes */ + 1, /* 1-bit codes */ 1, 0 , - 0, /* 2-bit codes */ - 0, /* 3-bit codes */ - 6, /* 4-bit codes */ + 0, /* 2-bit codes */ + 0, /* 3-bit codes */ + 6, /* 4-bit codes */ 2, 1, 3, 2, 4, 4, 5, 8, 6, 16, 7, 32, - 0, /* 5-bit codes */ - 1, /* 6-bit codes */ + 0, /* 5-bit codes */ + 1, /* 6-bit codes */ (3 << 1)| 1, 63, - 0, /* 7-bit codes */ - 15, /* 8-bit codes */ + 0, /* 7-bit codes */ + 15, /* 8-bit codes */ 0, 3, 1, 5, 2, 6, 3, 9, 4, 10, 5, 12, 6, 17, 7, 18, 8, 20, 9, 24, 10, 33, 11, 34, 12, 36, 13, 40, 14, 48, - 6, /* 9-bit codes */ + 6, /* 9-bit codes */ (3 << 4)| 7, 31, (3 << 4)| 6, 47, (3 << 4)| 5, 55, @@ -82,7 +82,7 @@ const int32_t VC1_BITPLANE_K_TBL[] = (3 << 4)| 3, 61, (3 << 4)| 2, 62, - 20, /* 10-bit codes */ + 20, /* 10-bit codes */ (1 << 6)| 11, 11, (1 << 6)| 7, 7 , (1 << 6)| 13, 13, @@ -107,9 +107,9 @@ const int32_t VC1_BITPLANE_K_TBL[] = (1 << 6)| 18, 50, (1 << 6)| 20, 52, (1 << 6)| 24, 56, - 0, /* 11-bit codes */ - 0, /* 12-bit codes */ - 15, /* 13-bit codes */ + 0, /* 11-bit codes */ + 0, /* 12-bit codes */ + 15, /* 13-bit codes */ (3 << 8)| 14, 15, (3 << 8)| 13, 23, (3 << 8)| 12, 27, @@ -134,18 +134,18 @@ const int32_t VC1_BITPLANE_K_TBL[] = /* This VLC table is used for decoding of BFRACTION. */ const int32_t VC1_BFRACTION_TBL[] = { - 7, /* max bits */ - 2, /* total subtables */ - 3,4, /* subtable sizes */ - 0, /* 1-bit codes */ - 0, /* 2-bit codes */ - 7, /* 3-bit codes */ + 7, /* max bits */ + 2, /* total subtables */ + 3,4, /* subtable sizes */ + 0, /* 1-bit codes */ + 0, /* 2-bit codes */ + 7, /* 3-bit codes */ 0x00,1,2, 0x01,1,3, 0x02,2,3, 0x03,1,4, 0x04,3,4, 0x05,1,5, 0x06,2,5, - 0, /* 4-bit codes */ - 0, /* 5-bit codes */ - 0, /* 6-bit codes */ - 16, /* 7-bit codes */ + 0, /* 4-bit codes */ + 0, /* 5-bit codes */ + 0, /* 6-bit codes */ + 16, /* 7-bit codes */ 0x70, 3,5, 0x71, 4,5, 0x72, 1,6, 0x73, 5,6, 0x74, 1,7, 0x75, 2,7, 0x76, 3,7, 0x77, 4,7, 0x78, 5,7, 0x79, 6,7, 0x7A, 1,8, 0x7B, 3,8, @@ -160,39 +160,39 @@ const int32_t VC1_BFRACTION_TBL[] = const int32_t VC1_REFDIST_TBL[] = { 16, /* Max bits. */ - 3, /* Total sub-tables. */ - 5, 6, 5, /* Sub-table sizes. */ - - 0, /* 1-bit codes. */ - 3, /* 2-bit codes. */ - 0, 0, 1, 1, 2, 2, - 1, /* 3-bit codes. */ - 6, 3, - 1, /* 4-bit codes. */ - 14, 4, - 1, /* 5-bit codes. */ - 30, 5, - 1, /* 6-bit codes. */ - 62, 6, - 1, /* 7-bit codes. */ - 126, 7, - 1, /* 8-bit codes. */ - 254, 8, - 1, /* 9-bit codes. */ - 510, 9, - 1, /* 10-bit codes. */ - 1022, 10, - 1, /* 11-bit codes. */ - 2046, 11, - 1, /* 12-bit codes. */ - 4094, 12, - 1, /* 13-bit codes. */ - 8190, 13, - 1, /* 14-bit codes. */ - 16382, 14, - 1, /* 15-bit codes. */ - 32766, 15, - 1, /* 16-bit codes. */ - 65534, 16, + 3, /* Total sub-tables. */ + 5, 6, 5, /* Sub-table sizes. */ + + 0, /* 1-bit codes. */ + 3, /* 2-bit codes. */ + 0, 0, 1, 1, 2, 2, + 1, /* 3-bit codes. */ + 6, 3, + 1, /* 4-bit codes. */ + 14, 4, + 1, /* 5-bit codes. */ + 30, 5, + 1, /* 6-bit codes. */ + 62, 6, + 1, /* 7-bit codes. */ + 126, 7, + 1, /* 8-bit codes. */ + 254, 8, + 1, /* 9-bit codes. */ + 510, 9, + 1, /* 10-bit codes. */ + 1022, 10, + 1, /* 11-bit codes. */ + 2046, 11, + 1, /* 12-bit codes. */ + 4094, 12, + 1, /* 13-bit codes. */ + 8190, 13, + 1, /* 14-bit codes. */ + 16382, 14, + 1, /* 15-bit codes. */ + 32766, 15, + 1, /* 16-bit codes. */ + 65534, 16, -1 /* end of table. */ }; diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c index 1a37929..3d8192a 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c @@ -43,8 +43,8 @@ vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInf if (picLayerHeader->PTYPE == VC1_BI_FRAME) { if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, - &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) - != VC1_STATUS_OK) + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) + != VC1_STATUS_OK) { return status; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c index e478250..39d6c79 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c @@ -28,8 +28,8 @@ vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info * vc1_metadata_t *md = &pInfo->metadata; vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) { return status; } @@ -46,8 +46,8 @@ vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info * { picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, - md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) { return status; } @@ -96,13 +96,13 @@ vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pI vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK) + md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK) { return status; } if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) { return status; } @@ -119,8 +119,8 @@ vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pI { picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, - md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) { return status; } @@ -173,13 +173,13 @@ vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInf // to avoid carrying forward the mvmode values from previous field // especially the intensity compensation value picLayerHeader->MVMODE = 0; - + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) { DEB("Error parsing I field \n"); return status; } - + if (picLayerHeader->PQINDEX <= 8) { VC1_GET_BITS9(1, picLayerHeader->HALFQP); @@ -194,14 +194,14 @@ vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInf if (md->POSTPROCFLAG == 1) VC1_GET_BITS9(2, tempValue); /* POSTPROC. */ - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) != - VC1_STATUS_OK) + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) != + VC1_STATUS_OK) { DEB("Error parsing I field \n"); return status; - } + } if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) { @@ -214,15 +214,15 @@ vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInf else { picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; - - if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, - (md->heightMB+1)/2, BPP_OVERFLAGS)) != - VC1_STATUS_OK) - { - DEB("Error parsing I field \n"); - return status; - } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + (md->heightMB+1)/2, BPP_OVERFLAGS)) != + VC1_STATUS_OK) + { + DEB("Error parsing I field \n"); + return status; + } } } else diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c index c363456..87f8426 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c @@ -32,14 +32,14 @@ vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo) if (md->PROFILE != VC1_PROFILE_ADVANCED) { - // As per spec, for main/simple profile, if the size of the coded picture is <= 1B, + // As per spec, for main/simple profile, if the size of the coded picture is <= 1B, // it shall be treated as a skipped frame. // In content with skipped frames, the data is "00". // rcv to vc1 conversion process adds an additional byte (0x80) to the picture, hence // the data looks like "00 80" // Hence if data is <= 2B, we will consider it skipped (check for 16+1b, if it fails, the frame is skipped). result = viddec_pm_peek_bits(ctxt, &tempValue, 17); - if(result == -1) + if (result == -1) { picLayerHeader->PTYPE = VC1_SKIPPED_FRAME; return status; @@ -86,7 +86,7 @@ vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo) } if (picLayerHeader->PTYPE == VC1_I_FRAME || - picLayerHeader->PTYPE == VC1_BI_FRAME) + picLayerHeader->PTYPE == VC1_BI_FRAME) { status = vc1_ParsePictureHeader_ProgressiveIpicture(ctxt, pInfo); } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c index 2b1c75a..6b20781 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c @@ -107,7 +107,7 @@ vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo) if (picLayerHeader->PS_PRESENT == 1) { if ((md->INTERLACE == 1) && - (md->PSF == 0)) + (md->PSF == 0)) { if (md->PULLDOWN == 1) number_of_pan_scan_window = 2 + picLayerHeader->RFF; @@ -139,23 +139,23 @@ vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo) md->RNDCTRL = picLayerHeader->RNDCTRL; if ((md->INTERLACE == 1) || - (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE)) + (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE)) { VC1_GET_BITS9(1, picLayerHeader->UVSAMP); } if ((md->FINTERPFLAG == 1) && - (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) { VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */ } if ((picLayerHeader->PTYPE == VC1_B_FRAME) && - (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) { if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) - != VC1_STATUS_OK) + != VC1_STATUS_OK) { return status; } @@ -260,14 +260,14 @@ vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) if (md->TFCNTRFLAG) { - VC1_GET_BITS9(8, picLayerHeader->TFCNTR); + VC1_GET_BITS9(8, picLayerHeader->TFCNTR); } if (md->PULLDOWN == 1) { if (md->PSF == 1) { - VC1_GET_BITS9(2, picLayerHeader->RPTFRM); + VC1_GET_BITS9(2, picLayerHeader->RPTFRM); } else { @@ -279,7 +279,7 @@ vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) if (md->PANSCAN_FLAG == 1) { - VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); + VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); if (picLayerHeader->PS_PRESENT) { if (md->PULLDOWN) @@ -300,9 +300,9 @@ vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) VC1_GET_BITS9(1, md->RNDCTRL); #ifdef VBP - picLayerHeader->RNDCTRL = md->RNDCTRL; + picLayerHeader->RNDCTRL = md->RNDCTRL; #endif - + VC1_GET_BITS9(1, picLayerHeader->UVSAMP); if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3)) @@ -315,14 +315,14 @@ vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) } md->REFDIST = tmp; } else if (md->REFDIST_FLAG == 0) { - md->REFDIST = 0; - } + md->REFDIST = 0; + } if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7)) { if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != - VC1_STATUS_OK) + VC1_STATUS_OK) { return status; } @@ -355,7 +355,7 @@ vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE) { if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || - (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) { status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo); } @@ -369,7 +369,7 @@ vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE) { if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || - (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) { status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo); } @@ -383,13 +383,13 @@ vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) { int ptype; - if( pInfo->picLayerHeader.CurrField == 0) + if ( pInfo->picLayerHeader.CurrField == 0) ptype = pInfo->picLayerHeader.PTypeField1; else ptype = pInfo->picLayerHeader.PTypeField2; if ((ptype == VC1_I_FRAME) || - (ptype == VC1_BI_FRAME)) + (ptype == VC1_BI_FRAME)) { status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo); } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c index f57c61d..b921af0 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c @@ -93,26 +93,26 @@ vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInf VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); } else -#ifdef VBP +#ifdef VBP picLayerHeader->MVMODE2 = 0; -#else +#else picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; #endif if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || - ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && - (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) + ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && + (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) { if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_MVTYPEMB)) - != VC1_STATUS_OK) + md->widthMB, md->heightMB, BPP_MVTYPEMB)) + != VC1_STATUS_OK) { return status; } } if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) { return status; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c index ff81282..1e942fc 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c @@ -68,26 +68,26 @@ vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info * md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; } else -#ifdef VBP - picLayerHeader->MVMODE2 = 0; -#else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; #endif if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || - ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && - (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) + ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && + (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) { if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_MVTYPEMB)) != - VC1_STATUS_OK) + md->widthMB, md->heightMB, BPP_MVTYPEMB)) != + VC1_STATUS_OK) { return status; } } if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) { return status; } @@ -156,7 +156,7 @@ vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pI } if ((status = vc1_DecodeBitplane(ctxt, pInfo, - md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) { return status; } @@ -213,7 +213,7 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf vc1_Status status = VC1_STATUS_OK; vc1_metadata_t *md = &pInfo->metadata; vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; - + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) @@ -239,7 +239,7 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf } VC1_GET_BITS9(1, picLayerHeader->NUMREF); - + if (picLayerHeader->NUMREF == 0) { VC1_GET_BITS9(1, picLayerHeader->REFFIELD); @@ -294,7 +294,7 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf else { VC1_GET_BITS9(1, md->INTCOMPFIELD); - if(md->INTCOMPFIELD == 1) + if (md->INTCOMPFIELD == 1) md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD; else md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD; @@ -312,11 +312,11 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf } } else -#ifdef VBP - picLayerHeader->MVMODE2 = 0; -#else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; -#endif +#endif VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); @@ -332,10 +332,10 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ #ifdef VBP - if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) -#else + if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) +#else if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV) -#endif +#endif { VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c index 559a0dd..95b556c 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c @@ -57,18 +57,18 @@ vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo) } else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_DBLEDGES) { -#ifdef VBP +#ifdef VBP VC1_GET_BITS9(2, picLayerHeader->DQDBEDGE); #else - VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */ -#endif + VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */ +#endif } else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS) { VC1_GET_BITS9(1, picLayerHeader->DQBILEVEL); } if (! (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS && - picLayerHeader->DQBILEVEL == 0)) + picLayerHeader->DQBILEVEL == 0)) { VC1_GET_BITS9(3, picLayerHeader->PQDIFF); if (picLayerHeader->PQDIFF == 7) @@ -79,17 +79,17 @@ vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo) } } #ifdef VBP - if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2)) - { - if (picLayerHeader->PQDIFF == 7) - { - picLayerHeader->ALTPQUANT = picLayerHeader->ABSPQ; - } - else - { - picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1; - } - } + if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2)) + { + if (picLayerHeader->PQDIFF == 7) + { + picLayerHeader->ALTPQUANT = picLayerHeader->ABSPQ; + } + else + { + picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1; + } + } #endif return status; } diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c index 9ddc237..a1b47ff 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c @@ -20,8 +20,8 @@ static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve persist_mem = persist_mem; - for(i=0; iref_frame[i].id = -1; /* first I frame checks that value */ parser->ref_frame[i].anchor[0] = 1; parser->ref_frame[i].anchor[1] = 1; @@ -38,7 +38,7 @@ static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); - if(preserve) + if (preserve) { parser->sc_seen &= VC1_EP_MASK; parser->sc_seen_since_last_wkld &= VC1_EP_MASK; @@ -48,9 +48,9 @@ static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve parser->sc_seen = VC1_SC_INVALID; parser->sc_seen_since_last_wkld = VC1_SC_INVALID; memset(&parser->info.metadata, 0, sizeof(parser->info.metadata)); - } + } - return; + return; } // viddec_vc1_init static void vc1_swap_intcomp(vc1_viddec_parser_t *parser) @@ -79,13 +79,13 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) ret = viddec_pm_peek_bits(parent, &sc, 32); if ((sc > 0x0100) && (sc < 0x0200)) /* a Start code will be in this range. */ { - ret = viddec_pm_get_bits(parent, &sc, 32); + ret = viddec_pm_get_bits(parent, &sc, 32); } else { - /* In cases where we get a buffer with no start codes, we assume */ - /* that this is a frame of data. We may have to fix this later. */ - sc = vc1_SCFrameHeader; + /* In cases where we get a buffer with no start codes, we assume */ + /* that this is a frame of data. We may have to fix this later. */ + sc = vc1_SCFrameHeader; } #else ret = viddec_pm_get_bits(parent, &sc, 32); @@ -94,170 +94,170 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) parser->is_frame_start = 0; parser->is_second_start = 0; DEB("START_CODE = %02x\n", sc); - switch( sc ) + switch ( sc ) { - case vc1_SCSequenceHeader: + case vc1_SCSequenceHeader: + { + uint32_t data; + parser->ref_frame[0].anchor[0] = 1; + parser->ref_frame[0].anchor[1] = 1; + parser->ref_frame[1].anchor[0] = 1; + parser->ref_frame[1].anchor[1] = 1; + memset( &parser->info.metadata, 0, sizeof(parser->info.metadata)); + /* look if we have a rcv header for main or simple profile */ + ret = viddec_pm_peek_bits(parent,&data ,2); + + if (data == 3) { - uint32_t data; - parser->ref_frame[0].anchor[0] = 1; - parser->ref_frame[0].anchor[1] = 1; - parser->ref_frame[1].anchor[0] = 1; - parser->ref_frame[1].anchor[1] = 1; - memset( &parser->info.metadata, 0, sizeof(parser->info.metadata)); - /* look if we have a rcv header for main or simple profile */ - ret = viddec_pm_peek_bits(parent,&data ,2); - - if (data == 3) - { - status = vc1_ParseSequenceLayer(parent, &parser->info); - } - else - { - status = vc1_ParseRCVSequenceLayer(parent, &parser->info); - } - parser->sc_seen = VC1_SC_SEQ; - parser->sc_seen_since_last_wkld |= VC1_SC_SEQ; -#ifdef VBP - parser->start_code = VC1_SC_SEQ; -#endif - break; + status = vc1_ParseSequenceLayer(parent, &parser->info); } - - case vc1_SCEntryPointHeader: + else { - status = vc1_ParseEntryPointLayer(parent, &parser->info); - parser->sc_seen |= VC1_SC_EP; - // Clear all bits indicating data below ep header - parser->sc_seen &= VC1_EP_MASK; - parser->sc_seen_since_last_wkld |= VC1_SC_EP; -#ifdef VBP - parser->start_code = VC1_SC_EP; -#endif - break; + status = vc1_ParseRCVSequenceLayer(parent, &parser->info); } + parser->sc_seen = VC1_SC_SEQ; + parser->sc_seen_since_last_wkld |= VC1_SC_SEQ; +#ifdef VBP + parser->start_code = VC1_SC_SEQ; +#endif + break; + } - case vc1_SCFrameHeader: - { - memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); - status = vc1_ParsePictureLayer(parent, &parser->info); - if((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) || - (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) || - (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) || - (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME)) - { - vc1_swap_intcomp(parser); - } - parser->sc_seen |= VC1_SC_FRM; - // Clear all bits indicating data below frm header - parser->sc_seen &= VC1_FRM_MASK; - parser->sc_seen_since_last_wkld |= VC1_SC_FRM; - //vc1_start_new_frame ( parent, parser ); - - parser->is_frame_start = 1; - vc1_parse_emit_frame_start( parent, parser ); + case vc1_SCEntryPointHeader: + { + status = vc1_ParseEntryPointLayer(parent, &parser->info); + parser->sc_seen |= VC1_SC_EP; + // Clear all bits indicating data below ep header + parser->sc_seen &= VC1_EP_MASK; + parser->sc_seen_since_last_wkld |= VC1_SC_EP; #ifdef VBP - parser->start_code = VC1_SC_FRM; -#endif - break; - } + parser->start_code = VC1_SC_EP; +#endif + break; + } - case vc1_SCSlice: + case vc1_SCFrameHeader: + { + memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); + status = vc1_ParsePictureLayer(parent, &parser->info); + if ((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) || + (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME)) { - status = vc1_ParseSliceLayer(parent, &parser->info); - parser->sc_seen_since_last_wkld |= VC1_SC_SLC; - - vc1_parse_emit_current_slice( parent, parser ); + vc1_swap_intcomp(parser); + } + parser->sc_seen |= VC1_SC_FRM; + // Clear all bits indicating data below frm header + parser->sc_seen &= VC1_FRM_MASK; + parser->sc_seen_since_last_wkld |= VC1_SC_FRM; + //vc1_start_new_frame ( parent, parser ); + parser->is_frame_start = 1; + vc1_parse_emit_frame_start( parent, parser ); #ifdef VBP - parser->start_code = VC1_SC_SLC; -#endif - break; - } + parser->start_code = VC1_SC_FRM; +#endif + break; + } + + case vc1_SCSlice: + { + status = vc1_ParseSliceLayer(parent, &parser->info); + parser->sc_seen_since_last_wkld |= VC1_SC_SLC; + + vc1_parse_emit_current_slice( parent, parser ); - case vc1_SCField: - { - parser->info.picLayerHeader.SLICE_ADDR = 0; - parser->info.picLayerHeader.CurrField = 1; - parser->info.picLayerHeader.REFFIELD = 0; - parser->info.picLayerHeader.NUMREF = 0; - parser->info.picLayerHeader.MBMODETAB = 0; - parser->info.picLayerHeader.MV4SWITCH = 0; - parser->info.picLayerHeader.DMVRANGE = 0; - parser->info.picLayerHeader.MVTAB = 0; - parser->info.picLayerHeader.MVMODE = 0; - parser->info.picLayerHeader.MVRANGE = 0; #ifdef VBP - parser->info.picLayerHeader.raw_MVTYPEMB = 0; - parser->info.picLayerHeader.raw_DIRECTMB = 0; - parser->info.picLayerHeader.raw_SKIPMB = 0; - parser->info.picLayerHeader.raw_ACPRED = 0; - parser->info.picLayerHeader.raw_FIELDTX = 0; - parser->info.picLayerHeader.raw_OVERFLAGS = 0; - parser->info.picLayerHeader.raw_FORWARDMB = 0; - - memset(&(parser->info.picLayerHeader.MVTYPEMB), 0, sizeof(vc1_Bitplane)); - memset(&(parser->info.picLayerHeader.DIRECTMB), 0, sizeof(vc1_Bitplane)); - memset(&(parser->info.picLayerHeader.SKIPMB), 0, sizeof(vc1_Bitplane)); - memset(&(parser->info.picLayerHeader.ACPRED), 0, sizeof(vc1_Bitplane)); - memset(&(parser->info.picLayerHeader.FIELDTX), 0, sizeof(vc1_Bitplane)); - memset(&(parser->info.picLayerHeader.OVERFLAGS), 0, sizeof(vc1_Bitplane)); - memset(&(parser->info.picLayerHeader.FORWARDMB), 0, sizeof(vc1_Bitplane)); - - parser->info.picLayerHeader.ALTPQUANT = 0; - parser->info.picLayerHeader.DQDBEDGE = 0; - #endif - - status = vc1_ParseFieldLayer(parent, &parser->info); - if((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) || - (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME)) - { - //vc1_swap_intcomp(parser); - } - parser->sc_seen |= VC1_SC_FLD; - parser->sc_seen_since_last_wkld |= VC1_SC_FLD; - - parser->is_second_start = 1; - vc1_parse_emit_second_field_start( parent, parser ); + parser->start_code = VC1_SC_SLC; +#endif + break; + } + + case vc1_SCField: + { + parser->info.picLayerHeader.SLICE_ADDR = 0; + parser->info.picLayerHeader.CurrField = 1; + parser->info.picLayerHeader.REFFIELD = 0; + parser->info.picLayerHeader.NUMREF = 0; + parser->info.picLayerHeader.MBMODETAB = 0; + parser->info.picLayerHeader.MV4SWITCH = 0; + parser->info.picLayerHeader.DMVRANGE = 0; + parser->info.picLayerHeader.MVTAB = 0; + parser->info.picLayerHeader.MVMODE = 0; + parser->info.picLayerHeader.MVRANGE = 0; #ifdef VBP - parser->start_code = VC1_SC_FLD; -#endif - break; + parser->info.picLayerHeader.raw_MVTYPEMB = 0; + parser->info.picLayerHeader.raw_DIRECTMB = 0; + parser->info.picLayerHeader.raw_SKIPMB = 0; + parser->info.picLayerHeader.raw_ACPRED = 0; + parser->info.picLayerHeader.raw_FIELDTX = 0; + parser->info.picLayerHeader.raw_OVERFLAGS = 0; + parser->info.picLayerHeader.raw_FORWARDMB = 0; + + memset(&(parser->info.picLayerHeader.MVTYPEMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.DIRECTMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.SKIPMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.ACPRED), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.FIELDTX), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.OVERFLAGS), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.FORWARDMB), 0, sizeof(vc1_Bitplane)); + + parser->info.picLayerHeader.ALTPQUANT = 0; + parser->info.picLayerHeader.DQDBEDGE = 0; +#endif + + status = vc1_ParseFieldLayer(parent, &parser->info); + if ((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME)) + { + //vc1_swap_intcomp(parser); } + parser->sc_seen |= VC1_SC_FLD; + parser->sc_seen_since_last_wkld |= VC1_SC_FLD; - case vc1_SCSequenceUser: - case vc1_SCEntryPointUser: - case vc1_SCFrameUser: - case vc1_SCSliceUser: - case vc1_SCFieldUser: - {/* Handle user data */ - status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items - parser->sc_seen_since_last_wkld |= VC1_SC_UD; + parser->is_second_start = 1; + vc1_parse_emit_second_field_start( parent, parser ); #ifdef VBP - parser->start_code = VC1_SC_UD; -#endif - break; - } + parser->start_code = VC1_SC_FLD; +#endif + break; + } - case vc1_SCEndOfSequence: - { - parser->sc_seen = VC1_SC_INVALID; - parser->sc_seen_since_last_wkld |= VC1_SC_INVALID; + case vc1_SCSequenceUser: + case vc1_SCEntryPointUser: + case vc1_SCFrameUser: + case vc1_SCSliceUser: + case vc1_SCFieldUser: + {/* Handle user data */ + status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items + parser->sc_seen_since_last_wkld |= VC1_SC_UD; #ifdef VBP - parser->start_code = VC1_SC_INVALID; -#endif - break; - } - default: /* Any other SC that is not handled */ - { - DEB("SC = %02x - unhandled\n", sc ); + parser->start_code = VC1_SC_UD; +#endif + break; + } + + case vc1_SCEndOfSequence: + { + parser->sc_seen = VC1_SC_INVALID; + parser->sc_seen_since_last_wkld |= VC1_SC_INVALID; #ifdef VBP - parser->start_code = VC1_SC_INVALID; -#endif - break; - } + parser->start_code = VC1_SC_INVALID; +#endif + break; + } + default: /* Any other SC that is not handled */ + { + DEB("SC = %02x - unhandled\n", sc ); +#ifdef VBP + parser->start_code = VC1_SC_INVALID; +#endif + break; + } } - - + + return VIDDEC_PARSE_SUCESS; } // viddec_vc1_parse @@ -265,7 +265,7 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) /** If a picture header was seen and the next start code is a sequence header, entrypoint header, end of sequence or another frame header, this api returns frame done. - If a sequence header and a frame header was not seen before this point, all the + If a sequence header and a frame header was not seen before this point, all the information needed for decode is not present and parser errors are reported. */ #ifdef VBP @@ -279,44 +279,44 @@ static uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next parent = parent; switch (next_sc) { - case vc1_SCFrameHeader: - if(((parser->sc_seen_since_last_wkld & VC1_SC_EP) || - (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) && - (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM))) - { - break; - } - // Deliberate fall-thru case - case vc1_SCEntryPointHeader: - if((next_sc == vc1_SCEntryPointHeader) && - (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) && - (!(parser->sc_seen_since_last_wkld & VC1_SC_EP))) - { - break; - } - // Deliberate fall-thru case - case vc1_SCSequenceHeader: - case vc1_SCEndOfSequence: - case VIDDEC_PARSE_EOS: - case VIDDEC_PARSE_DISCONTINUITY: - ret = VIDDEC_PARSE_FRMDONE; - // Set errors for progressive - if((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM)) - *codec_specific_errors = 0; - else - *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - vc1_end_frame(parser); - parser->sc_seen_since_last_wkld = VC1_SC_INVALID; - // TODO: Need to check for interlaced + case vc1_SCFrameHeader: + if (((parser->sc_seen_since_last_wkld & VC1_SC_EP) || + (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) && + (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM))) + { break; - default: - ret = VIDDEC_PARSE_SUCESS; + } + // Deliberate fall-thru case + case vc1_SCEntryPointHeader: + if ((next_sc == vc1_SCEntryPointHeader) && + (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) && + (!(parser->sc_seen_since_last_wkld & VC1_SC_EP))) + { break; + } + // Deliberate fall-thru case + case vc1_SCSequenceHeader: + case vc1_SCEndOfSequence: + case VIDDEC_PARSE_EOS: + case VIDDEC_PARSE_DISCONTINUITY: + ret = VIDDEC_PARSE_FRMDONE; + // Set errors for progressive + if ((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM)) + *codec_specific_errors = 0; + else + *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + vc1_end_frame(parser); + parser->sc_seen_since_last_wkld = VC1_SC_INVALID; + // TODO: Need to check for interlaced + break; + default: + ret = VIDDEC_PARSE_SUCESS; + break; } //switch - DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n", - next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld, + DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n", + next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld, *codec_specific_errors, ret); - + return ret; } // viddec_vc1_wkld_done diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c index b5bba2b..cf6fa7f 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c @@ -15,29 +15,29 @@ static inline uint32_t vc1_populate_frame_type(uint32_t vc1_frame_type) { uint32_t viddec_frame_type; - switch(vc1_frame_type) - { - case VC1_I_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_I; - break; - case VC1_P_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_P; - break; - case VC1_B_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_B; - break; - case VC1_BI_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_BI; - break; - case VC1_SKIPPED_FRAME : - viddec_frame_type = VIDDEC_FRAME_TYPE_SKIP; - break; - default: - viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID; - break; - } // switch on vc1 frame type - - return(viddec_frame_type); + switch (vc1_frame_type) + { + case VC1_I_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_I; + break; + case VC1_P_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_P; + break; + case VC1_B_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_B; + break; + case VC1_BI_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_BI; + break; + case VC1_SKIPPED_FRAME : + viddec_frame_type = VIDDEC_FRAME_TYPE_SKIP; + break; + default: + viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID; + break; + } // switch on vc1 frame type + + return(viddec_frame_type); } // vc1_populate_frame_type static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_parser_t *parser) @@ -50,15 +50,15 @@ static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_p /* typical sequence layer and entry_point data */ attrs->cont_size.height = info->metadata.height * 2 + 2; attrs->cont_size.width = info->metadata.width * 2 + 2; - + /* frame type */ /* we can have two fileds with different types for field interlace coding mode */ if (info->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) { - attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1); - attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2); + attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1); + attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2); } else { - attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE); - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown + attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE); + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown } /* frame counter */ @@ -72,11 +72,11 @@ static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_p /* PAN Scan */ attrs->vc1.ps_present = info->picLayerHeader.PS_PRESENT; attrs->vc1.num_of_pan_scan_windows = info->picLayerHeader.number_of_pan_scan_window; - for (i=0;ivc1.num_of_pan_scan_windows;i++) { - attrs->vc1.pan_scan_window[i].hoffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset; - attrs->vc1.pan_scan_window[i].voffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset; - attrs->vc1.pan_scan_window[i].width = info->picLayerHeader.PAN_SCAN_WINDOW[i].width; - attrs->vc1.pan_scan_window[i].height = info->picLayerHeader.PAN_SCAN_WINDOW[i].height; + for (i=0; ivc1.num_of_pan_scan_windows; i++) { + attrs->vc1.pan_scan_window[i].hoffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset; + attrs->vc1.pan_scan_window[i].voffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset; + attrs->vc1.pan_scan_window[i].width = info->picLayerHeader.PAN_SCAN_WINDOW[i].width; + attrs->vc1.pan_scan_window[i].height = info->picLayerHeader.PAN_SCAN_WINDOW[i].height; } //end for i return; @@ -85,51 +85,51 @@ static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_p /* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ static inline void vc1_send_past_ref_items(void *parent) { - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi, false ); + return; } /* send future frame item */ static inline void vc1_send_future_ref_items(void *parent) { - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi, false ); + return; } /* send reorder frame item to host * future frame gets push to past */ static inline void send_reorder_ref_items(void *parent) { - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; - wi.ref_reorder.ref_table_offset = 0; - wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0 - wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0 + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same + viddec_pm_append_workitem( parent, &wi, false ); + return; } // send_reorder_ref_items /* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ static inline void vc1_send_ref_fcm_items(void *parent, uint32_t past_fcm, uint32_t future_fcm) { - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE; - wi.vwi_payload[0]= 0; - wi.vwi_payload[1]= past_fcm; - wi.vwi_payload[2]= future_fcm; - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE; + wi.vwi_payload[0]= 0; + wi.vwi_payload[1]= past_fcm; + wi.vwi_payload[2]= future_fcm; + viddec_pm_append_workitem( parent, &wi, false ); + return; } @@ -138,53 +138,53 @@ static inline void vc1_send_ref_fcm_items(void *parent, uint32_t past_fcm, uint3 * future frame gets push to past */ static inline void send_SEQ_ENTRY_registers(void *parent, vc1_viddec_parser_t *parser) { - uint32_t stream_format1 = 0; - uint32_t stream_format2 = 0; - uint32_t entrypoint1 = 0; - viddec_workload_item_t wi; + uint32_t stream_format1 = 0; + uint32_t stream_format2 = 0; + uint32_t entrypoint1 = 0; + viddec_workload_item_t wi; - vc1_metadata_t *md = &(parser->info.metadata); + vc1_metadata_t *md = &(parser->info.metadata); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, stream_format1, md->PROFILE); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, LEVEL, stream_format1, md->LEVEL); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, CHROMAFORMAT, stream_format1, md->CHROMAFORMAT); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, FRMRTQ, stream_format1, md->FRMRTQ); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, BITRTQ, stream_format1, md->BITRTQ); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, POSTPRO, stream_format1, md->POSTPROCFLAG); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, stream_format1, md->PROFILE); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, LEVEL, stream_format1, md->LEVEL); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, CHROMAFORMAT, stream_format1, md->CHROMAFORMAT); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, FRMRTQ, stream_format1, md->FRMRTQ); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, BITRTQ, stream_format1, md->BITRTQ); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, POSTPRO, stream_format1, md->POSTPROCFLAG); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PULLDOWN, stream_format2, md->PULLDOWN); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, stream_format2, md->INTERLACE); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, TFCNTRFLAG, stream_format2, md->TFCNTRFLAG); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, FINTERPFLAG, stream_format2, md->FINTERPFLAG); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PSF, stream_format2, md->PSF); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PULLDOWN, stream_format2, md->PULLDOWN); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, stream_format2, md->INTERLACE); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, TFCNTRFLAG, stream_format2, md->TFCNTRFLAG); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, FINTERPFLAG, stream_format2, md->FINTERPFLAG); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PSF, stream_format2, md->PSF); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, BROKEN_LINK, entrypoint1, md->BROKEN_LINK); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, CLOSED_ENTRY, entrypoint1, md->CLOSED_ENTRY); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, PANSCAN_FLAG, entrypoint1, md->PANSCAN_FLAG); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, REFDIST_FLAG, entrypoint1, md->REFDIST_FLAG); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, entrypoint1, md->LOOPFILTER); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, entrypoint1, md->FASTUVMC); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, entrypoint1, md->EXTENDED_MV); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, entrypoint1, md->DQUANT); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, entrypoint1, md->VSTRANSFORM); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, entrypoint1, md->OVERLAP); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, entrypoint1, md->QUANTIZER); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, entrypoint1, md->EXTENDED_DMV); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, BROKEN_LINK, entrypoint1, md->BROKEN_LINK); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, CLOSED_ENTRY, entrypoint1, md->CLOSED_ENTRY); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, PANSCAN_FLAG, entrypoint1, md->PANSCAN_FLAG); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, REFDIST_FLAG, entrypoint1, md->REFDIST_FLAG); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, entrypoint1, md->LOOPFILTER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, entrypoint1, md->FASTUVMC); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, entrypoint1, md->EXTENDED_MV); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, entrypoint1, md->DQUANT); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, entrypoint1, md->VSTRANSFORM); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, entrypoint1, md->OVERLAP); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, entrypoint1, md->QUANTIZER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, entrypoint1, md->EXTENDED_DMV); - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY; - wi.vwi_payload[0] = stream_format1; - wi.vwi_payload[1] = stream_format2; - wi.vwi_payload[2] = entrypoint1; + wi.vwi_payload[0] = stream_format1; + wi.vwi_payload[1] = stream_format2; + wi.vwi_payload[2] = entrypoint1; - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_pm_append_workitem( parent, &wi, false ); + return; } // send_reorder_ref_items @@ -192,47 +192,47 @@ static inline void send_SEQ_ENTRY_registers(void *parent, vc1_viddec_parser_t *p * future frame gets push to past */ static inline void send_SIZE_AND_AP_RANGEMAP_registers(void *parent, vc1_viddec_parser_t *parser) { - uint32_t coded_size = 0; - uint32_t ap_range_map = 0; + uint32_t coded_size = 0; + uint32_t ap_range_map = 0; - viddec_workload_item_t wi; + viddec_workload_item_t wi; - vc1_metadata_t *md = &(parser->info.metadata); + vc1_metadata_t *md = &(parser->info.metadata); - BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, coded_size, md->width); - BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, coded_size, md->height); + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, coded_size, md->width); + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, coded_size, md->height); - /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/ - if(VC1_PROFILE_ADVANCED == md->PROFILE) - { + /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/ + if (VC1_PROFILE_ADVANCED == md->PROFILE) + { + + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, ap_range_map, md->RANGE_MAPY_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, ap_range_map, md->RANGE_MAPY); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, ap_range_map, md->RANGE_MAPUV_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, ap_range_map, md->RANGE_MAPUV); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, ap_range_map, md->RANGE_MAPY_FLAG); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, ap_range_map, md->RANGE_MAPY); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, ap_range_map, md->RANGE_MAPUV_FLAG); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, ap_range_map, md->RANGE_MAPUV); + } + else + { + ap_range_map = 0; + } - } - else - { - ap_range_map = 0; - } + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP; - - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = coded_size; - wi.vwi_payload[2] = ap_range_map; + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = coded_size; + wi.vwi_payload[2] = ap_range_map; - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_pm_append_workitem( parent, &wi, false ); + return; } // send_reorder_ref_items @@ -241,202 +241,202 @@ static inline void send_SIZE_AND_AP_RANGEMAP_registers(void *parent, vc1_viddec_ * future frame gets push to past */ static inline void send_SLICE_FRAME_TYPE_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { - uint32_t alt_frame_type = 0; - uint32_t frame_type = 0; + uint32_t alt_frame_type = 0; + uint32_t frame_type = 0; - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; - vc1_metadata_t *md = &(parser->info.metadata); + vc1_metadata_t *md = &(parser->info.metadata); - BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, frame_type, pic->FCM); - BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, frame_type, pic->PTYPE); + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, frame_type, pic->FCM); + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, frame_type, pic->PTYPE); - alt_frame_type = frame_type; + alt_frame_type = frame_type; - if(VC1_PROFILE_ADVANCED == md->PROFILE) - { - if( (VC1_P_FRAME == pic->PTYPE)||(VC1_B_FRAME == pic->PTYPE) ) - { - BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); - } - } - else - { - if( VC1_SKIPPED_FRAME== pic->PTYPE) - { - BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, 0); - } else { - BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); - } - } + if (VC1_PROFILE_ADVANCED == md->PROFILE) + { + if ( (VC1_P_FRAME == pic->PTYPE)||(VC1_B_FRAME == pic->PTYPE) ) + { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); + } + } + else + { + if ( VC1_SKIPPED_FRAME== pic->PTYPE) + { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, 0); + } else { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); + } + } - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO; - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = frame_type; - wi.vwi_payload[2] = alt_frame_type; + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = frame_type; + wi.vwi_payload[2] = alt_frame_type; - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_pm_append_workitem( parent, &wi, false ); + return; } // send_reorder_ref_items /* send reorder frame item to host * future frame gets push to past */ static inline void send_SLICE_CONTROL_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { - uint32_t recon_control = 0; - uint32_t mv_control = 0; - uint32_t blk_control = 0; - - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; - - int is_previous_ref_rr=0; - - vc1_metadata_t *md = &(parser->info.metadata); - - - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, recon_control, md->RNDCTRL); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, recon_control, pic->UVSAMP); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, recon_control, pic->PQUANT); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, recon_control, pic->HALFQP); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, recon_control, pic->UniformQuant); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, recon_control, pic->POSTPROC); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, recon_control, pic->CONDOVER); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, recon_control, (pic->PQINDEX <= 8)); - - /* Get the range reduced status of the previous frame */ - switch (pic->PTYPE) - { - case VC1_P_FRAME: - { - is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm; - break; - } - case VC1_B_FRAME: - { - is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm; - break; - } - default: - { - break; - } - } - - if(pic->RANGEREDFRM) - { - - if(!is_previous_ref_rr) - { - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 1); - } - } - else - { - /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */ - if(is_previous_ref_rr) - { - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 0); - } - } // end for RR upscale - - - - - - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, mv_control, pic->MVRANGE); - if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP) - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE2); - else - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, mv_control, pic->MVTAB); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, mv_control, pic->DMVRANGE); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, mv_control, pic->MV4SWITCH); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, mv_control, pic->MBMODETAB); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, mv_control, - pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) )); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, mv_control, pic->REFFIELD); - - - - // BLOCK CONTROL REGISTER Offset 0x2C - BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, blk_control, pic->CBPTAB); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, blk_control, pic->TTMBF); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, blk_control, pic->TTFRM); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, blk_control, pic->MV2BPTAB); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, blk_control, pic->MV4BPTAB); - if((pic->CurrField == 1) && (pic->SLICE_ADDR)) - { - int mby = md->height * 2 + 2; - mby = (mby + 15 ) / 16; - pic->SLICE_ADDR -= (mby/2); - } - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, blk_control, pic->SLICE_ADDR); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, blk_control, md->bp_raw[0]); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, blk_control, md->bp_raw[1]); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, blk_control, md->bp_raw[2]); - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO; - - - wi.vwi_payload[0] = recon_control; - wi.vwi_payload[1] = mv_control; - wi.vwi_payload[2] = blk_control; - - viddec_pm_append_workitem( parent, &wi, false ); - return; + uint32_t recon_control = 0; + uint32_t mv_control = 0; + uint32_t blk_control = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; + + int is_previous_ref_rr=0; + + vc1_metadata_t *md = &(parser->info.metadata); + + + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, recon_control, md->RNDCTRL); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, recon_control, pic->UVSAMP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, recon_control, pic->PQUANT); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, recon_control, pic->HALFQP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, recon_control, pic->UniformQuant); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, recon_control, pic->POSTPROC); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, recon_control, pic->CONDOVER); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, recon_control, (pic->PQINDEX <= 8)); + + /* Get the range reduced status of the previous frame */ + switch (pic->PTYPE) + { + case VC1_P_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm; + break; + } + case VC1_B_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm; + break; + } + default: + { + break; + } + } + + if (pic->RANGEREDFRM) + { + + if (!is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 1); + } + } + else + { + /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */ + if (is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 0); + } + } // end for RR upscale + + + + + + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, mv_control, pic->MVRANGE); + if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP) + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE2); + else + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, mv_control, pic->MVTAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, mv_control, pic->DMVRANGE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, mv_control, pic->MV4SWITCH); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, mv_control, pic->MBMODETAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, mv_control, + pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) )); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, mv_control, pic->REFFIELD); + + + + // BLOCK CONTROL REGISTER Offset 0x2C + BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, blk_control, pic->CBPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, blk_control, pic->TTMBF); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, blk_control, pic->TTFRM); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, blk_control, pic->MV2BPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, blk_control, pic->MV4BPTAB); + if ((pic->CurrField == 1) && (pic->SLICE_ADDR)) + { + int mby = md->height * 2 + 2; + mby = (mby + 15 ) / 16; + pic->SLICE_ADDR -= (mby/2); + } + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, blk_control, pic->SLICE_ADDR); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, blk_control, md->bp_raw[0]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, blk_control, md->bp_raw[1]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, blk_control, md->bp_raw[2]); + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO; + + + wi.vwi_payload[0] = recon_control; + wi.vwi_payload[1] = mv_control; + wi.vwi_payload[2] = blk_control; + + viddec_pm_append_workitem( parent, &wi, false ); + return; } // send_reorder_ref_items /* send reorder frame item to host * future frame gets push to past */ static inline void send_SLICE_OTHER_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { - uint32_t trans_data = 0; - uint32_t vop_dquant = 0; - uint32_t ref_bfraction = 0; + uint32_t trans_data = 0; + uint32_t vop_dquant = 0; + uint32_t ref_bfraction = 0; - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; - vc1_metadata_t *md = &(parser->info.metadata); + vc1_metadata_t *md = &(parser->info.metadata); - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, ref_bfraction, pic->BFRACTION_DEN); - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, ref_bfraction, pic->BFRACTION_NUM); - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, ref_bfraction, md->REFDIST); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, ref_bfraction, pic->BFRACTION_DEN); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, ref_bfraction, pic->BFRACTION_NUM); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, ref_bfraction, md->REFDIST); - if(md->DQUANT) - { - if(pic->PQDIFF == 7) - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->ABSPQ); - else if (pic->DQUANTFRM == 1) - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->PQUANT + pic->PQDIFF + 1); - } - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, vop_dquant, pic->DQUANTFRM); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, vop_dquant, pic->DQPROFILE); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, vop_dquant, pic->DQSBEDGE); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, vop_dquant, pic->DQBILEVEL); + if (md->DQUANT) + { + if (pic->PQDIFF == 7) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->ABSPQ); + else if (pic->DQUANTFRM == 1) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->PQUANT + pic->PQDIFF + 1); + } + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, vop_dquant, pic->DQUANTFRM); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, vop_dquant, pic->DQPROFILE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, vop_dquant, pic->DQSBEDGE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, vop_dquant, pic->DQBILEVEL); - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, trans_data, pic->TRANSACFRM); - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, trans_data, pic->TRANSACFRM2); - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, trans_data, pic->TRANSDCTAB); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, trans_data, pic->TRANSACFRM); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, trans_data, pic->TRANSACFRM2); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, trans_data, pic->TRANSDCTAB); - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO; - wi.vwi_payload[0] = trans_data; - wi.vwi_payload[1] = vop_dquant; - wi.vwi_payload[2] = ref_bfraction; + wi.vwi_payload[0] = trans_data; + wi.vwi_payload[1] = vop_dquant; + wi.vwi_payload[2] = ref_bfraction; - viddec_pm_append_workitem( parent, &wi, false ); - return; + viddec_pm_append_workitem( parent, &wi, false ); + return; } // send_reorder_ref_items @@ -445,50 +445,50 @@ static inline void send_SLICE_OTHER_INFO_registers(void *parent, vc1_viddec_pars * future frame gets push to past */ static inline void send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(void *parent, vc1_viddec_parser_t *parser) { - uint32_t imgstruct = 0; - uint32_t fieldref_ctrl_id = 0; - uint32_t smp_rangemap = 0; - - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; - - vc1_metadata_t *md = &(parser->info.metadata); - - if( pic->FCM == VC1_FCM_FIELD_INTERLACE ) { - BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, imgstruct, (pic->BottomField) ? 2 : 1); - } - - BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, fieldref_ctrl_id, pic->BottomField); - BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, fieldref_ctrl_id, pic->CurrField); - if(parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) - { - BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, 1); - } - else - { - BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]); - } - - if(VC1_PROFILE_ADVANCED != md->PROFILE) - { - if(pic->RANGEREDFRM) - { - //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, smp_rangemap, md->RANGE_MAPY_FLAG); - //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, smp_rangemap, md->RANGE_MAPUV_FLAG); - smp_rangemap = 0x11; - } - - } - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO; - - - wi.vwi_payload[0] = imgstruct; - wi.vwi_payload[1] = fieldref_ctrl_id; - wi.vwi_payload[2] = smp_rangemap; - - viddec_pm_append_workitem( parent, &wi, false ); - return; + uint32_t imgstruct = 0; + uint32_t fieldref_ctrl_id = 0; + uint32_t smp_rangemap = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + if ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) { + BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, imgstruct, (pic->BottomField) ? 2 : 1); + } + + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, fieldref_ctrl_id, pic->BottomField); + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, fieldref_ctrl_id, pic->CurrField); + if (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, 1); + } + else + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]); + } + + if (VC1_PROFILE_ADVANCED != md->PROFILE) + { + if (pic->RANGEREDFRM) + { + //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, smp_rangemap, md->RANGE_MAPY_FLAG); + //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, smp_rangemap, md->RANGE_MAPUV_FLAG); + smp_rangemap = 0x11; + } + + } + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO; + + + wi.vwi_payload[0] = imgstruct; + wi.vwi_payload[1] = fieldref_ctrl_id; + wi.vwi_payload[2] = smp_rangemap; + + viddec_pm_append_workitem( parent, &wi, false ); + return; } // send_reorder_ref_items @@ -496,225 +496,225 @@ static inline void send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(void *paren * future frame gets push to past */ static inline void send_INT_COM_registers(void *parent, vc1_viddec_parser_t *parser) { - uint32_t intcomp_fwd_top = 0; - uint32_t intcomp_fwd_bot = 0; - uint32_t intcomp_bwd_top = 0; - uint32_t intcomp_bwd_bot = 0; - uint32_t intcomp_cur = 0; + uint32_t intcomp_fwd_top = 0; + uint32_t intcomp_fwd_bot = 0; + uint32_t intcomp_bwd_top = 0; + uint32_t intcomp_bwd_bot = 0; + uint32_t intcomp_cur = 0; - uint32_t POS_2nd_INTCOMP = 13; - uint32_t MASK_1st_INTCOMP = 0x1fff; - uint32_t MASK_2nd_INTCOMP = 0x3ffe000; + uint32_t POS_2nd_INTCOMP = 13; + uint32_t MASK_1st_INTCOMP = 0x1fff; + uint32_t MASK_2nd_INTCOMP = 0x3ffe000; - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; - vc1_metadata_t *md = &(parser->info.metadata); + vc1_metadata_t *md = &(parser->info.metadata); - if(VC1_SKIPPED_FRAME == pic->PTYPE) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top =0; - return; - } - - if( VC1_FCM_FIELD_INTERLACE != pic->FCM ) - { - - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); + if (VC1_SKIPPED_FRAME == pic->PTYPE) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top =0; + return; + } - if ( !((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) ) - intcomp_cur = 0; + if ( VC1_FCM_FIELD_INTERLACE != pic->FCM ) + { - if( (VC1_BI_FRAME==pic->PTYPE)||(VC1_B_FRAME==pic->PTYPE) ) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = 0; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = 0; + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); - intcomp_bwd_top = parser->intcomp_top[0]; - intcomp_bwd_bot = parser->intcomp_bot[0]; - intcomp_fwd_bot = parser->intcomp_bot[1]; + if ( !((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) ) + intcomp_cur = 0; + if ( (VC1_BI_FRAME==pic->PTYPE)||(VC1_B_FRAME==pic->PTYPE) ) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = 0; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = 0; - if( parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != (-1) ) - { - if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].type) - intcomp_fwd_top = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].intcomp_top; - } - else - { - if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) - intcomp_fwd_top = parser->intcomp_top[1]; - } - } - else - { //I,P TYPE + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; + intcomp_fwd_bot = parser->intcomp_bot[1]; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; - if(VC1_FCM_FIELD_INTERLACE == parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm) - { - intcomp_fwd_top = parser->intcomp_top[1]; - intcomp_fwd_top |= intcomp_cur << POS_2nd_INTCOMP; + if ( parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != (-1) ) + { + if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].type) + intcomp_fwd_top = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].intcomp_top; + } + else + { + if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) + intcomp_fwd_top = parser->intcomp_top[1]; + } + } + else + { //I,P TYPE - intcomp_fwd_bot = parser->intcomp_bot[1]; - intcomp_fwd_bot |= intcomp_cur << POS_2nd_INTCOMP; - } - else - { - intcomp_fwd_top = intcomp_cur;// << POS_2nd_INTCOMP; - intcomp_fwd_bot = 0; - } - } - } - else - { - //FIELD INTERLACE - //if(0!=md->INTCOMPFIELD) - //No debugging - - if (md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD) - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); - } - else - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); - } - - if(md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); - } - - if(pic->MVMODE != VC1_MVMODE_INTENSCOMP) - { - intcomp_cur = 0; - } - - if(pic->CurrField == 0) - { - if(pic->TFF) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; - } - else - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; - } - } - else - { - if(pic->TFF) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; - } - else - { parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; - } - } - if(pic->CurrField == 1) - { //SECOND FIELD + if (VC1_FCM_FIELD_INTERLACE == parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm) + { + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_top |= intcomp_cur << POS_2nd_INTCOMP; - if(VC1_B_FRAME != pic->PTYPE) - { - if(pic->TFF) + intcomp_fwd_bot = parser->intcomp_bot[1]; + intcomp_fwd_bot |= intcomp_cur << POS_2nd_INTCOMP; + } + else { - intcomp_bwd_top = intcomp_cur & MASK_1st_INTCOMP; + intcomp_fwd_top = intcomp_cur;// << POS_2nd_INTCOMP; + intcomp_fwd_bot = 0; + } + } + } + else + { + //FIELD INTERLACE + //if(0!=md->INTCOMPFIELD) + //No debugging - intcomp_fwd_bot = (parser->intcomp_bot[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; //??????? - intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); + } + else + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); + } + + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); + } + + if (pic->MVMODE != VC1_MVMODE_INTENSCOMP) + { + intcomp_cur = 0; + } - intcomp_fwd_top = parser->intcomp_top[1]; + if (pic->CurrField == 0) + { + if (pic->TFF) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; } else { - intcomp_bwd_bot= (intcomp_cur & MASK_2nd_INTCOMP)>>POS_2nd_INTCOMP; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; + } + } + else + { + if (pic->TFF) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; + } + else + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; + } + } + + if (pic->CurrField == 1) + { //SECOND FIELD + + if (VC1_B_FRAME != pic->PTYPE) + { + if (pic->TFF) + { + intcomp_bwd_top = intcomp_cur & MASK_1st_INTCOMP; + + intcomp_fwd_bot = (parser->intcomp_bot[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; //??????? + intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); - intcomp_fwd_top = (parser->intcomp_top[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; - intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP) << POS_2nd_INTCOMP; + intcomp_fwd_top = parser->intcomp_top[1]; + } + else + { + intcomp_bwd_bot= (intcomp_cur & MASK_2nd_INTCOMP)>>POS_2nd_INTCOMP; - intcomp_fwd_bot = parser->intcomp_bot[1]; + intcomp_fwd_top = (parser->intcomp_top[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; + intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP) << POS_2nd_INTCOMP; + + intcomp_fwd_bot = parser->intcomp_bot[1]; + } } - } - else - { //B TYPE - intcomp_fwd_top = parser->intcomp_top[1]; - intcomp_fwd_bot = parser->intcomp_bot[1]; + else + { //B TYPE + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_bot = parser->intcomp_bot[1]; - intcomp_bwd_top = parser->intcomp_top[0]; - intcomp_bwd_bot = parser->intcomp_bot[0]; - } - } - else - { //FIRST FILED - - if( (VC1_B_FRAME==pic->PTYPE)||(VC1_BI_FRAME==pic->PTYPE) ) - { - if(VC1_SKIPPED_FRAME!=parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) - { - intcomp_fwd_top = parser->intcomp_top[1]; - intcomp_fwd_bot = parser->intcomp_bot[1]; + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; } + } + else + { //FIRST FILED - intcomp_bwd_top = parser->intcomp_top[0]; - intcomp_bwd_bot = parser->intcomp_bot[0]; + if ( (VC1_B_FRAME==pic->PTYPE)||(VC1_BI_FRAME==pic->PTYPE) ) + { + if (VC1_SKIPPED_FRAME!=parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) + { + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_bot = parser->intcomp_bot[1]; + } - } - else - { //I,P TYPE + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; - intcomp_fwd_top = parser->intcomp_top[1] & MASK_1st_INTCOMP; - intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP)<intcomp_bot[1] & MASK_1st_INTCOMP; - intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); - } //pic->PTYPE == I,P TYPE - } //pic->CurrField == 0 - } //VC1_FCM_FIELD_INTERLACE != pic->FCM + intcomp_fwd_top = parser->intcomp_top[1] & MASK_1st_INTCOMP; + intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP)<PTYPE) && (VC1_BI_FRAME != pic->PTYPE) ) - { - parser->intcomp_top[1] = intcomp_fwd_top; - parser->intcomp_bot[1] = intcomp_fwd_bot; + intcomp_fwd_bot = parser->intcomp_bot[1] & MASK_1st_INTCOMP; + intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); + } //pic->PTYPE == I,P TYPE + } //pic->CurrField == 0 + } //VC1_FCM_FIELD_INTERLACE != pic->FCM - parser->intcomp_top[0] = intcomp_bwd_top; - parser->intcomp_bot[0] = intcomp_bwd_bot; - } + if ( (VC1_B_FRAME != pic->PTYPE) && (VC1_BI_FRAME != pic->PTYPE) ) + { + parser->intcomp_top[1] = intcomp_fwd_top; + parser->intcomp_bot[1] = intcomp_fwd_bot; + + parser->intcomp_top[0] = intcomp_bwd_top; + parser->intcomp_bot[0] = intcomp_bwd_bot; + } - //OS_INFO("intcomp_fwd_top = %d\n", intcomp_fwd_top); - //OS_INFO("intcomp_fwd_bot = %d\n", intcomp_fwd_bot); + //OS_INFO("intcomp_fwd_top = %d\n", intcomp_fwd_top); + //OS_INFO("intcomp_fwd_bot = %d\n", intcomp_fwd_bot); - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW; - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = intcomp_fwd_top; - wi.vwi_payload[2] = intcomp_fwd_bot; + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = intcomp_fwd_top; + wi.vwi_payload[2] = intcomp_fwd_bot; - viddec_pm_append_workitem( parent, &wi, false ); + viddec_pm_append_workitem( parent, &wi, false ); - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW; - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = intcomp_bwd_top; - wi.vwi_payload[2] = intcomp_bwd_bot; + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = intcomp_bwd_top; + wi.vwi_payload[2] = intcomp_bwd_bot; - viddec_pm_append_workitem( parent, &wi, false ); + viddec_pm_append_workitem( parent, &wi, false ); - return; + return; } // send_reorder_ref_items @@ -722,68 +722,68 @@ static inline void send_INT_COM_registers(void *parent, vc1_viddec_parser_t *par */ void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) { - vc1_metadata_t *md = &(parser->info.metadata); - viddec_workload_t *wl = viddec_pm_get_header(parent); - int frame_type = parser->info.picLayerHeader.PTYPE; - int frame_id = 1; // new reference frame is assigned index 1 - - /* init */ - memset(&parser->spr, 0, sizeof(parser->spr)); - wl->is_reference_frame = 0; - - /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */ - if (parser->info.metadata.RANGE_MAPY_FLAG || - parser->info.metadata.RANGE_MAPUV_FLAG || - parser->info.picLayerHeader.RANGEREDFRM) - { - wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME; - } - - LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type); - - parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type)); - - /* reference / anchor frames processing - * we need to send reorder before reference frames */ - if (parser->is_reference_picture) - { - /* one frame has been sent */ - if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1) - { - /* there is a frame in the reference buffer, move it to the past */ - send_reorder_ref_items(parent); - } - } - - /* send workitems for reference frames */ - switch( frame_type ) - { - case VC1_B_FRAME: - { - vc1_send_past_ref_items(parent); - vc1_send_future_ref_items(parent); - vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm); - break; - } - case VC1_SKIPPED_FRAME: - { - wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; - vc1_send_past_ref_items(parent); - vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); - break; - } - case VC1_P_FRAME: - { - vc1_send_past_ref_items( parent); - vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); - break; - } + vc1_metadata_t *md = &(parser->info.metadata); + viddec_workload_t *wl = viddec_pm_get_header(parent); + int frame_type = parser->info.picLayerHeader.PTYPE; + int frame_id = 1; // new reference frame is assigned index 1 + + /* init */ + memset(&parser->spr, 0, sizeof(parser->spr)); + wl->is_reference_frame = 0; + + /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */ + if (parser->info.metadata.RANGE_MAPY_FLAG || + parser->info.metadata.RANGE_MAPUV_FLAG || + parser->info.picLayerHeader.RANGEREDFRM) + { + wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME; + } + + LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type); + + parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type)); + + /* reference / anchor frames processing + * we need to send reorder before reference frames */ + if (parser->is_reference_picture) + { + /* one frame has been sent */ + if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1) + { + /* there is a frame in the reference buffer, move it to the past */ + send_reorder_ref_items(parent); + } + } + + /* send workitems for reference frames */ + switch ( frame_type ) + { + case VC1_B_FRAME: + { + vc1_send_past_ref_items(parent); + vc1_send_future_ref_items(parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm); + break; + } + case VC1_SKIPPED_FRAME: + { + wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; + vc1_send_past_ref_items(parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); + break; + } + case VC1_P_FRAME: + { + vc1_send_past_ref_items( parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); + break; + } default: break; - } + } - /* reference / anchor frames from previous code - * we may need it for frame reduction */ + /* reference / anchor frames from previous code + * we may need it for frame reduction */ if (parser->is_reference_picture) { wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK); @@ -791,7 +791,7 @@ void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].id = frame_id; parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].fcm = parser->info.picLayerHeader.FCM; parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0] = (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME); - if(parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) + if (parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) { parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = (parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME); } @@ -806,14 +806,14 @@ void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].tff = parser->info.picLayerHeader.TFF; LOG_CRIT("anchor[0] = %d, anchor[1] = %d", - parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0], - parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] ); + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0], + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] ); } - if( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) + if ( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) { - translate_parser_info_to_frame_attributes( parent, parser ); - return; + translate_parser_info_to_frame_attributes( parent, parser ); + return; } translate_parser_info_to_frame_attributes( parent, parser ); @@ -828,30 +828,30 @@ void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) send_INT_COM_registers(parent, parser); { - viddec_workload_item_t wi; - uint32_t bit, byte; - uint8_t is_emul = 0; - - viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); - - // Send current bit offset and current slice - wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; - - // If slice data starts in the middle of the emulation prevention sequence - - // Special Case1----[is_emul = 1]: - // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data - // to the decoder starting at the first byte of 0s so that the decoder can detect the - // emulation prevention. But the actual data starts are offset 8 in this bit sequence. - - // Specail Case 2----[is_emul = 2]: - // If slice data starts in the middle of the emulation prevention sequence - - // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. - // - - wi.vwi_payload[0] = bit + (is_emul*8) ; - wi.vwi_payload[1] = 0xdeaddead; - wi.vwi_payload[2] = 0xdeaddead; - viddec_pm_append_workitem( parent, &wi, false ); + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + wi.vwi_payload[0] = bit + (is_emul*8) ; + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); } @@ -864,92 +864,92 @@ void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser) { - send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); - send_SLICE_CONTROL_INFO_registers(parent, parser); - send_SLICE_OTHER_INFO_registers(parent, parser); - send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); - send_INT_COM_registers(parent, parser); + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + send_INT_COM_registers(parent, parser); { - viddec_workload_item_t wi; - uint32_t bit, byte; - uint8_t is_emul = 0; + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; - viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); - // Send current bit offset and current slice - wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; - // If slice data starts in the middle of the emulation prevention sequence - - // Special Case1----[is_emul = 1]: - // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data - // to the decoder starting at the first byte of 0s so that the decoder can detect the - // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. - // Specail Case 2----[is_emul = 2]: - // If slice data starts in the middle of the emulation prevention sequence - - // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. - // + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // - wi.vwi_payload[0] = bit + (is_emul*8); - wi.vwi_payload[1] = 0xdeaddead; - wi.vwi_payload[2] = 0xdeaddead; - viddec_pm_append_workitem( parent, &wi, false ); + wi.vwi_payload[0] = bit + (is_emul*8); + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); } viddec_pm_append_pixeldata( parent ); - return; + return; } void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser) { - send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); - send_SLICE_CONTROL_INFO_registers(parent, parser); - send_SLICE_OTHER_INFO_registers(parent, parser); - //send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); - //send_INT_COM_registers(parent, parser); + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + //send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + //send_INT_COM_registers(parent, parser); { - viddec_workload_item_t wi; - uint32_t bit, byte; - uint8_t is_emul = 0; - - viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); - - // Send current bit offset and current slice - wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; - - // If slice data starts in the middle of the emulation prevention sequence - - // Special Case1----[is_emul = 1]: - // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data - // to the decoder starting at the first byte of 0s so that the decoder can detect the - // emulation prevention. But the actual data starts are offset 8 in this bit sequence. - - // Specail Case 2----[is_emul = 2]: - // If slice data starts in the middle of the emulation prevention sequence - - // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. - // - - wi.vwi_payload[0] = bit + (is_emul*8); - wi.vwi_payload[1] = 0xdeaddead; - wi.vwi_payload[2] = 0xdeaddead; - viddec_pm_append_workitem( parent, &wi, false ); + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + wi.vwi_payload[0] = bit + (is_emul*8); + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); } viddec_pm_append_pixeldata( parent ); - return; + return; } void vc1_end_frame(vc1_viddec_parser_t *parser) { /* update status of reference frames */ - if(parser->is_reference_picture) + if (parser->is_reference_picture) { parser->ref_frame[VC1_REF_FRAME_T_MINUS_2] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1]; parser->ref_frame[VC1_REF_FRAME_T_MINUS_1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0]; diff --git a/mix_vbp/viddec_fw/fw/include/viddec_debug.h b/mix_vbp/viddec_fw/fw/include/viddec_debug.h index 23db98f..fcae102 100644 --- a/mix_vbp/viddec_fw/fw/include/viddec_debug.h +++ b/mix_vbp/viddec_fw/fw/include/viddec_debug.h @@ -4,19 +4,19 @@ #ifndef VBP #ifdef HOST_ONLY - #include - #include - #define DEB OS_PRINT - #define FWTRACE OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ ); +#include +#include +#define DEB OS_PRINT +#define FWTRACE OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ ); // #define DEB(format, args...) -// #define FWTRACE - #define DEB_FNAME(format, args...) OS_PRINT("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) - #define CDEB(a, format, args...) if(a != 0) {DEB(format, ##args);} +// #define FWTRACE +#define DEB_FNAME(format, args...) OS_PRINT("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) +#define CDEB(a, format, args...) if(a != 0) {DEB(format, ##args);} #else - #define DEB(format, args...) - #define FWTRACE - #define CDEB(a, format, args...) - #define DEB_FNAME(format, args...) +#define DEB(format, args...) +#define FWTRACE +#define CDEB(a, format, args...) +#define DEB_FNAME(format, args...) #endif #else // VBP is defined diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 9125cb7..aa5330e 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -23,9 +23,6 @@ LOCAL_SRC_FILES := \ LOCAL_CFLAGS := -DVBP -DHOST_ONLY LOCAL_C_INCLUDES += \ - $(GLIB_TOP) \ - $(GLIB_TOP)/glib \ - $(GLIB_TOP)/android \ $(LOCAL_PATH)/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ @@ -46,8 +43,7 @@ LOCAL_MODULE := libmixvbp LOCAL_SHARED_LIBRARIES := \ libdl \ - libcutils \ - libglib-2.0 + libcutils ifeq ($(strip $(MIXVBP_LOG_ENABLE)),true) LOCAL_CFLAGS += -DVBP_TRACE diff --git a/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c index 299dbce..c815406 100644 --- a/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c +++ b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c @@ -75,22 +75,22 @@ static os_devhandle_t g_svenh; //#define SVEN_DEVH_DISABLE_SVEN extern int sven_fw_is_tx_enabled( - struct SVENHandle *svenh ); + struct SVENHandle *svenh ); #ifndef SVEN_DEVH_DISABLE_SVEN static void sven_write_event( - struct SVENHandle *svenh, - struct SVENEvent *ev ) + struct SVENHandle *svenh, + struct SVENEvent *ev ) { - if ( NULL == svenh ) - svenh = &g_svenh.devh_svenh; + if ( NULL == svenh ) + svenh = &g_svenh.devh_svenh; - if ( NULL != svenh->phot ) - sven_fw_write_event(svenh,ev); + if ( NULL != svenh->phot ) + sven_fw_write_event(svenh,ev); } static void sven_fw_initialize_event_top( - struct SVENEvent *ev, + struct SVENEvent *ev, int module, int unit, int event_type, @@ -106,14 +106,14 @@ static void sven_fw_initialize_event_top( uint32_t sven_get_timestamp() { - uint32_t value = 0; + uint32_t value = 0; - if ( NULL != g_svenh.devh_svenh.ptime ) - { - value = sven_fw_read_external_register( &g_svenh.devh_svenh, g_svenh.devh_svenh.ptime ); - } + if ( NULL != g_svenh.devh_svenh.ptime ) + { + value = sven_fw_read_external_register( &g_svenh.devh_svenh, g_svenh.devh_svenh.ptime ); + } - return(value); + return(value); } /* ---------------------------------------------------------------------- */ @@ -125,26 +125,26 @@ void devh_SVEN_SetModuleUnit( int sven_unit ) { #ifndef SVEN_DEVH_DISABLE_SVEN - if ( NULL == devh ) - devh = &g_svenh; - devh->devh_sven_module = sven_module; - devh->devh_sven_unit = sven_unit; + if ( NULL == devh ) + devh = &g_svenh; + devh->devh_sven_module = sven_module; + devh->devh_sven_unit = sven_unit; #endif } os_devhandle_t *devhandle_factory( const char *desc ) { - /* pointer to global vsparc local registers */ - g_svenh.devh_regs_ptr = (void *) 0x10000000; /* firmware address to Local (GV) registers */ + /* pointer to global vsparc local registers */ + g_svenh.devh_regs_ptr = (void *) 0x10000000; /* firmware address to Local (GV) registers */ - return( &g_svenh ); + return( &g_svenh ); } int devhandle_connect_name( os_devhandle_t *devh, const char *devname ) { - return(1); + return(1); } /* ---------------------------------------------------------------------- */ @@ -169,10 +169,10 @@ void devh_SVEN_WriteModuleEvent( return; sven_fw_initialize_event_top( &ev, - devh->devh_sven_module, - 1 /* devh->devh_sven_unit */, - SVEN_event_type_module_specific, - module_event_subtype ); + devh->devh_sven_module, + 1 /* devh->devh_sven_unit */, + SVEN_event_type_module_specific, + module_event_subtype ); ev.u.se_uint[0] = payload0; ev.u.se_uint[1] = payload1; @@ -189,36 +189,36 @@ void devh_SVEN_WriteModuleEvent( /* SVEN FW TX: Required custom routines to enable FW TX */ /* ---------------------------------------------------------------------- */ int sven_fw_set_globals( - struct SVEN_FW_Globals *fw_globals ) + struct SVEN_FW_Globals *fw_globals ) { - sven_fw_attach( &g_svenh.devh_svenh, fw_globals ); - devh_SVEN_SetModuleUnit( &g_svenh, SVEN_module_GEN4_GV, 1 ); - return(0); + sven_fw_attach( &g_svenh.devh_svenh, fw_globals ); + devh_SVEN_SetModuleUnit( &g_svenh, SVEN_module_GEN4_GV, 1 ); + return(0); } uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); unsigned int sven_fw_read_external_register( - struct SVENHandle *svenh, - volatile unsigned int *preg ) + struct SVENHandle *svenh, + volatile unsigned int *preg ) { - unsigned int reg __attribute__ ((aligned(8))); + unsigned int reg __attribute__ ((aligned(8))); - (void)svenh; // argument unused + (void)svenh; // argument unused - cp_using_dma_phys( (uint32_t) preg, (uint32_t) ®, 4, 0, 0 ); + cp_using_dma_phys( (uint32_t) preg, (uint32_t) ®, 4, 0, 0 ); - return( reg ); + return( reg ); } void sven_fw_copy_event_to_host_mem( - struct SVENHandle *svenh, - volatile struct SVENEvent *to, - const struct SVENEvent *from ) + struct SVENHandle *svenh, + volatile struct SVENEvent *to, + const struct SVENEvent *from ) { - (void)svenh; // argument unused + (void)svenh; // argument unused - cp_using_dma_phys( (uint32_t) to, (uint32_t) from, sizeof(*to), 1, 0 ); + cp_using_dma_phys( (uint32_t) to, (uint32_t) from, sizeof(*to), 1, 0 ); } /* ---------------------------------------------------------------------- */ /* ---------------------------------------------------------------------- */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h index 0928ad3..07a4a98 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h +++ b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h @@ -27,38 +27,38 @@ typedef struct { uint8_t data[CONFIG_IPC_SYNC_MESSAGE_BUF_SIZE]; -}mfd_sync_msg_t; +} mfd_sync_msg_t; /* Required Information needed by Parser Kernel for each stream */ typedef struct { - uint32_t ddr_cxt; /* phys addr of swap space where Parser kernel stores pvt information */ - uint32_t cxt_size; /* size of context buffer */ - uint32_t strm_type; /* Current stream information*/ + uint32_t ddr_cxt; /* phys addr of swap space where Parser kernel stores pvt information */ + uint32_t cxt_size; /* size of context buffer */ + uint32_t strm_type; /* Current stream information*/ uint32_t wl_time; /* ticks for processing current workload */ uint32_t es_time; /* ticks for processing current workload */ uint32_t low_watermark; /* On crossing this value we generate low watermark interrupt */ - uint8_t state; /* Current state of stream ... start(1), stop(0).. */ + uint8_t state; /* Current state of stream ... start(1), stop(0).. */ uint8_t priority; /* Priority of current stream Real time or Non real time */ uint8_t buffered_data;/* Do we have data from past buffer */ uint8_t pending_interrupt;/* Whether an Interrupt needs to be generated for this stream */ -}mfd_stream_info; +} mfd_stream_info; /* Global data for Parser kernel */ typedef struct { - int32_t low_id; /* last scheduled low priority stream id */ - int32_t high_id;/* last scheduled high priority stream id */ - uint32_t g_parser_tables; /* should point to global_parser_table in DDR */ -}mfd_pk_data_t; + int32_t low_id; /* last scheduled low priority stream id */ + int32_t high_id;/* last scheduled high priority stream id */ + uint32_t g_parser_tables; /* should point to global_parser_table in DDR */ +} mfd_pk_data_t; typedef struct { ipc_msg_data input; ipc_msg_data wkld1; ipc_msg_data wkld2; - viddec_pm_cxt_t pm; -}mfd_pk_strm_cxt; + viddec_pm_cxt_t pm; +} mfd_pk_strm_cxt; /* This structure defines the layout of local memory */ typedef struct @@ -68,8 +68,8 @@ typedef struct FW_IPC_Handle fwIpc; mfd_stream_info stream_info[FW_SUPPORTED_STREAMS]; mfd_pk_data_t g_pk_data; - mfd_pk_strm_cxt srm_cxt; -}dmem_t; + mfd_pk_strm_cxt srm_cxt; +} dmem_t; /* Pvt Functions which will be used by multiple modules */ @@ -88,7 +88,7 @@ static inline uint32_t reg_read(uint32_t offset) static inline void DEBUG(uint32_t print, uint32_t code, uint32_t val) { - if(print > 0) + if (print > 0) { DUMP_TO_MEM(code); DUMP_TO_MEM(val); diff --git a/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h index adfdabf..a5bc644 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h +++ b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h @@ -1,25 +1,25 @@ -/* +/* -This file is provided under a dual BSD/GPLv2 license. When using or +This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. GPL LICENSE SUMMARY Copyright(c) 2005-2008 Intel Corporation. All rights reserved. -This program is free software; you can redistribute it and/or modify +This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. -This program is distributed in the hope that it will be useful, but -WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +This program is distributed in the hope that it will be useful, but +WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. -The full GNU General Public License is included in this distribution +The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: @@ -27,34 +27,34 @@ Intel Corporation 2200 Mission College Blvd. Santa Clara, CA 97052 -BSD LICENSE +BSD LICENSE Copyright(c) 2005-2008 Intel Corporation. All rights reserved. -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: -* Redistributions of source code must retain the above copyright +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in -the documentation and/or other materials provided with the +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in +the documentation and/or other materials provided with the distribution. -* Neither the name of Intel Corporation nor the names of its -contributors may be used to endorse or promote products derived +* Neither the name of Intel Corporation nor the names of its +contributors may be used to endorse or promote products derived from this software without specific prior written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h index 7acb407..18038f3 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h @@ -20,13 +20,13 @@ typedef struct uint32_t max_items; uint32_t num_items; uint32_t result; -}viddec_emitter_wkld; +} viddec_emitter_wkld; typedef struct { viddec_emitter_wkld cur; viddec_emitter_wkld next; -}viddec_emitter; +} viddec_emitter; /* whats this for? Emitting current tag for ES buffer diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h index cccc437..6b0d76c 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h @@ -19,11 +19,11 @@ #ifdef HOST_ONLY #define DUMP_TO_MEM(x) DEB("0x%.08X ",x); #define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) DEB("Sven evnt=0x%.8X p1=%d p2=%d p3=%d p4=%d p5=%d p6=%d\n",event, p1, p2, p3, p4, p5, p6) -#define read_ret(x) -#define read_fp(x) -#define read_sp(x) -#define read_wim(x) -#define read_psr(x) +#define read_ret(x) +#define read_fp(x) +#define read_sp(x) +#define read_wim(x) +#define read_psr(x) #else extern uint32_t dump_ptr; /* Macros for Dumping data to DDR */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h index a77b645..8b4b1c6 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h @@ -1,25 +1,25 @@ -/* +/* - This file is provided under a dual BSD/GPLv2 license. When using or + This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. GPL LICENSE SUMMARY Copyright(c) 2005-2008 Intel Corporation. All rights reserved. - This program is free software; you can redistribute it and/or modify + This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution + The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: @@ -27,34 +27,34 @@ 2200 Mission College Blvd. Santa Clara, CA 97052 - BSD LICENSE + BSD LICENSE Copyright(c) 2005-2008 Intel Corporation. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ @@ -79,7 +79,7 @@ typedef struct { unsigned int state; unsigned int priority; -}FW_IPC_stream_info; +} FW_IPC_stream_info; /* ---------------------------------------------------------------------- */ /* ---------------------------------------------------------------------- */ @@ -95,7 +95,7 @@ typedef struct /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */ FW_IPC_ReceiveQue rcv_q[CONFIG_IPC_FW_MAX_RX_QUEUES]; /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */ - FW_IPC_ReceiveQue wkld_q[CONFIG_IPC_FW_MAX_RX_QUEUES]; + FW_IPC_ReceiveQue wkld_q[CONFIG_IPC_FW_MAX_RX_QUEUES]; /** FIRMWARE_TO_HOST Message Queues (outbound) */ struct _IPC_QueueHeader *snd_q_shared[CONFIG_IPC_HOST_MAX_RX_QUEUES]; @@ -141,7 +141,7 @@ This function writes the message of message_size into queue(host_rx_queue). @retval 0 : if write fails. @retval 1 : if write succeeds. */ -int FwIPC_SendMessage(FW_IPC_Handle *fwipc, unsigned int host_rx_queue, const char *message, unsigned int message_size ); +int FwIPC_SendMessage(FW_IPC_Handle *fwipc, unsigned int host_rx_queue, const char *message, unsigned int message_size ); /** This function reads a message(which is <= max_message_size) from rcv_queue of firmware into input parameter message. @@ -157,7 +157,7 @@ int FwIPC_ReadMessage(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, char *mess This function Initialises shared queue headers and sync command buffer for IPC. @param[in] fwipc : Ipc handle. @param[in] synchronous_command_buffer : update handle with pointer to shared memory - between host and FW. + between host and FW. @retval 0 : if write succeeds. */ int FwIPC_Initialize(FW_IPC_Handle *fwipc, volatile char *synchronous_command_buffer ); @@ -166,7 +166,7 @@ int FwIPC_Initialize(FW_IPC_Handle *fwipc, volatile char *synchronous_command_bu This function Initialises Sendqueue with circular buffer which has actual data. @param[in] fwipc : Ipc handle. @param[in] snd_q : Send queue that needs to be initialized. -@param[in] snd_circbuf : Address of circular buffer. +@param[in] snd_circbuf : Address of circular buffer. */ void FWIPC_SendQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, void *snd_circbuf ); @@ -174,12 +174,12 @@ void FWIPC_SendQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, void *snd This function Initialises Recvqueue with circular buffer which has actual data. @param[in] fwipc : Ipc handle. @param[in] rcv_q : Receive queue that needs to be initialized. -@param[in] rcv_circbuf : Address of circular buffer. +@param[in] rcv_circbuf : Address of circular buffer. */ void FwIPC_ReceiveQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, void *rcv_circbuf ); /** -This function reads the nth(index) message(which is <= max_message_size ) from rcv_queue of firmware into input parameter message +This function reads the nth(index) message(which is <= max_message_size ) from rcv_queue of firmware into input parameter message by peeking the queue. @param[in] fwipc : Ipc handle. @param[in] rcv_q : Send queue to read from. diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h index 68cddc7..608c0e7 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h @@ -9,13 +9,13 @@ typedef enum { VIDDEC_PARSE_EOS = 0x0FFF, /* Dummy start code to force EOS */ VIDDEC_PARSE_DISCONTINUITY, /* Dummy start code to force completion and flush */ -}viddec_parser_inband_messages_t; +} viddec_parser_inband_messages_t; typedef struct { uint32_t context_size; uint32_t persist_size; -}viddec_parser_memory_sizes_t; +} viddec_parser_memory_sizes_t; typedef void (*fn_init)(void *ctxt, uint32_t *persist, uint32_t preserve); typedef uint32_t (*fn_parse_sc) (void *ctxt, void *pcxt, void *sc_state); @@ -36,7 +36,7 @@ typedef struct fn_is_frame_start is_frame_start; fn_gen_contrib_tags gen_contrib_tags; fn_gen_assoc_tags gen_assoc_tags; -}viddec_parser_ops_t; +} viddec_parser_ops_t; typedef enum @@ -44,11 +44,11 @@ typedef enum VIDDEC_PARSE_ERROR = 0xF0, VIDDEC_PARSE_SUCESS = 0xF1, VIDDEC_PARSE_FRMDONE = 0xF2, -}viddec_parser_error_t; +} viddec_parser_error_t; /* * - *Functions used by Parsers + *Functions used by Parsers * */ @@ -56,7 +56,7 @@ typedef enum */ int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits); -/* This function returns requested number of bits(<=32) with out incrementing au byte position +/* This function returns requested number of bits(<=32) with out incrementing au byte position */ int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h index 6d1d2be..3d20196 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h @@ -27,7 +27,7 @@ typedef enum PM_INBAND_MESSAGES = 0x500, PM_EOS = 0x501, PM_DISCONTINUITY = 0x502, -}pm_parse_state_t; +} pm_parse_state_t; /* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers cur_es points to current es buffer we are parsing. */ @@ -37,7 +37,7 @@ typedef struct uint32_t cur_offset; uint32_t cur_size; viddec_input_buffer_t *cur_es; -}viddec_pm_sc_cur_buf_t; +} viddec_pm_sc_cur_buf_t; typedef struct { @@ -46,7 +46,7 @@ typedef struct uint8_t frame_done; uint8_t first_buf_aligned; uint8_t using_next; -}vidded_pm_pending_tags_t; +} vidded_pm_pending_tags_t; /* This structure holds all necessary data required by parser manager for stream parsing. */ @@ -73,11 +73,11 @@ typedef struct #else uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2]; #endif -}viddec_pm_cxt_t; +} viddec_pm_cxt_t; -/* +/* * - * Functions used by Parser kernel + * Functions used by Parser kernel * */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h index 703d65d..beca8d7 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h @@ -11,14 +11,14 @@ typedef struct uint8_t *buf; /* ptr to data */ int32_t sc_end_pos; /* return value end position of sc */ uint32_t phase; /* phase information(state) for sc */ -}viddec_sc_parse_cubby_cxt_t; +} viddec_sc_parse_cubby_cxt_t; typedef struct { uint16_t next_sc; uint8_t second_scprfx_length; uint8_t first_sc_detect; -}viddec_sc_prefix_state_t; +} viddec_sc_prefix_state_t; uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state); #endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h index 8dc2c53..999a067 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h @@ -10,15 +10,15 @@ typedef struct { #ifdef VBP - uint8_t *buf; + uint8_t *buf; #else uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */ #endif uint32_t buf_st; /* start pos in buf */ uint32_t buf_end; /* first invalid byte in buf */ uint32_t buf_index; /* current index in buf */ - uint32_t buf_bitoff; /* bit offset in current index position */ -}viddec_pm_utils_bstream_buf_cxt_t; + uint32_t buf_bitoff; /* bit offset in current index position */ +} viddec_pm_utils_bstream_buf_cxt_t; typedef struct { @@ -26,14 +26,14 @@ typedef struct uint32_t st; /* start index of valid byte */ uint32_t size;/* Total number of bytes in current buffer */ uint32_t bitoff; /* bit offset in first valid byte */ -}viddec_pm_utils_bstream_scratch_cxt_t; +} viddec_pm_utils_bstream_scratch_cxt_t; typedef struct { #ifdef VBP - /* counter of emulation prevention byte */ - uint32_t emulation_byte_counter; -#endif + /* counter of emulation prevention byte */ + uint32_t emulation_byte_counter; +#endif /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store the bstream buffer's first valid byte index wrt to accessunit in this variable */ uint32_t au_pos; @@ -49,7 +49,7 @@ typedef struct viddec_pm_utils_bstream_scratch_cxt_t scratch; /* Actual context which has valid data for get bits functionality */ viddec_pm_utils_bstream_buf_cxt_t bstrm_buf; -}viddec_pm_utils_bstream_cxt_t; +} viddec_pm_utils_bstream_cxt_t; void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul); @@ -76,13 +76,13 @@ static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstrea *bit = cxt->bstrm_buf.buf_bitoff; *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st); - if(cxt->phase > 0) + if (cxt->phase > 0) { phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 ); } /* Assumption: we will never be parked on 0x3 byte of emulation prevention sequence */ *is_emul = (cxt->is_emul_reqd) && (phase > 0) && - (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) && - (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3); + (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) && + (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3); } #endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h index 98f2d46..7f406fd 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h @@ -18,7 +18,7 @@ typedef struct { uint32_t stpos; uint32_t edpos; -}viddec_pm_utils_au_bytepos_t; +} viddec_pm_utils_au_bytepos_t; /* this structure is for storing all necessary information for list handling */ typedef struct @@ -30,7 +30,7 @@ typedef struct viddec_input_buffer_t sc_ibuf[MAX_IBUFS_PER_SC]; /* Place to store buffer descriptors */ viddec_pm_utils_au_bytepos_t data[MAX_IBUFS_PER_SC]; /* place to store au byte positions */ int32_t total_bytes; /* total bytes for current access unit including first sc prefix*/ -}viddec_pm_utils_list_t; +} viddec_pm_utils_list_t; /* This function initialises the list to default values */ void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt); diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c index 1bb368a..ad921b4 100644 --- a/mix_vbp/viddec_fw/fw/parser/main.c +++ b/mix_vbp/viddec_fw/fw/parser/main.c @@ -31,7 +31,7 @@ int SMDEXPORT viddec_fw_parser_sven_init(struct SVEN_FW_Globals *sven_fw_global * This function figures out if we crossesd watermark boundary on input data. * before represents the ES Queue data when we started and current represents ES Queue data * when we are ready to swap.Threshold is the amount of data specified by the driver to trigger an - * interrupt. + * interrupt. * We return true if threshold is between before and current. *------------------------------------------------------------------------------ */ @@ -63,7 +63,7 @@ static uint32_t viddec_fw_get_total_input_Q_data(uint32_t indx) * Function: mfd_round_robin * Params: * [in] pri: Priority of the stream - * [in] indx: stream id number of the last stream that was scheduled. + * [in] indx: stream id number of the last stream that was scheduled. * [out] qnum: Stream id of priority(pri) which has data. * This function is responsible for figuring out which stream needs to be scheduled next. * It starts after the last scheduled stream and walks through all streams until it finds @@ -80,15 +80,15 @@ uint32_t mfd_round_robin(uint32_t pri, int32_t *qnum, int32_t indx) int32_t i = CONFIG_IPC_FW_MAX_RX_QUEUES; uint32_t ret = 0; /* Go through all queues until we find a valid queue of reqd priority */ - while(i>0) + while (i>0) { indx++; - if(indx >= CONFIG_IPC_FW_MAX_RX_QUEUES) indx = 0; + if (indx >= CONFIG_IPC_FW_MAX_RX_QUEUES) indx = 0; - /* We should look only at queues which match priority and + /* We should look only at queues which match priority and in running state */ - if( (_dmem.stream_info[indx].state == 1) - && (_dmem.stream_info[indx].priority == pri)) + if ( (_dmem.stream_info[indx].state == 1) + && (_dmem.stream_info[indx].priority == pri)) { uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos; FW_IPC_ReceiveQue *rcv_q; @@ -99,7 +99,7 @@ uint32_t mfd_round_robin(uint32_t pri, int32_t *qnum, int32_t indx) pos = 0; /* Need at least current and next to proceed */ wklds_avail = (ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos) >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1)); - if(inpt_avail && output_avail && wklds_avail) + if (inpt_avail && output_avail && wklds_avail) {/* Success condition: we have some data on input and enough space on output queue */ *qnum = indx; ret =1; @@ -132,7 +132,7 @@ void output_omar_wires( unsigned int value ) { #ifdef RTL_SIMULATION reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, value ); -#endif +#endif } /*------------------------------------------------------------------------------ @@ -150,17 +150,17 @@ void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsig cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt); cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]); - if(swap) + if (swap) {/* Swap context into local memory */ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false); } - + { mfd_init_swap_memory(&(cxt->pm), cxt_swap->strm_type, cxt_swap->ddr_cxt+cxt_swap->cxt_size, clean); cxt_swap->wl_time = 0; cxt_swap->es_time = 0; } - if(swap) + if (swap) {/* Swap context into DDR */ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false); } @@ -176,7 +176,7 @@ static inline void viddec_fw_push_current_frame_to_output(FW_IPC_Handle *fwipc, { ipc_msg_data wkld_to_push; FwIPC_ReadMessage(fwipc, &fwipc->wkld_q[cur], (char *)&(wkld_to_push), sizeof(ipc_msg_data)); - FwIPC_SendMessage(fwipc, cur, (char *)&(wkld_to_push), sizeof(ipc_msg_data)); + FwIPC_SendMessage(fwipc, cur, (char *)&(wkld_to_push), sizeof(ipc_msg_data)); } /*------------------------------------------------------------------------------ @@ -189,7 +189,7 @@ static inline int viddec_fw_get_next_stream_to_schedule(void) { int32_t cur = -1; - if(mfd_round_robin(viddec_stream_priority_REALTIME, &cur, _dmem.g_pk_data.high_id)) + if (mfd_round_robin(viddec_stream_priority_REALTIME, &cur, _dmem.g_pk_data.high_id)) { /* On success store the stream id */ _dmem.g_pk_data.high_id = cur; @@ -197,7 +197,7 @@ static inline int viddec_fw_get_next_stream_to_schedule(void) else { /* Check Low priority Queues, Since we couldn't find a valid realtime stream */ - if(mfd_round_robin(viddec_stream_priority_BACKGROUND, &cur, _dmem.g_pk_data.low_id)) + if (mfd_round_robin(viddec_stream_priority_BACKGROUND, &cur, _dmem.g_pk_data.low_id)) { _dmem.g_pk_data.low_id = cur; } @@ -214,22 +214,22 @@ static inline int viddec_fw_get_next_stream_to_schedule(void) *------------------------------------------------------------------------------ */ static inline void viddec_fw_update_pending_interrupt_flag(int32_t cur, mfd_stream_info *cxt_swap, uint8_t pushed_a_workload, - uint32_t es_Q_data_at_start) + uint32_t es_Q_data_at_start) { - if(_dmem.int_status[cur].mask) + if (_dmem.int_status[cur].mask) { - if(!cxt_swap->pending_interrupt) + if (!cxt_swap->pending_interrupt) { uint32_t es_Q_data_now; uint8_t wmark_boundary_reached=false; es_Q_data_now = viddec_fw_get_total_input_Q_data((uint32_t)cur); wmark_boundary_reached = viddec_fw_check_watermark_boundary(es_Q_data_at_start, es_Q_data_now, cxt_swap->low_watermark); _dmem.int_status[cur].status = 0; - if(pushed_a_workload) + if (pushed_a_workload) { _dmem.int_status[cur].status |= VIDDEC_FW_WKLD_DATA_AVAIL; } - if(wmark_boundary_reached) + if (wmark_boundary_reached) { _dmem.int_status[cur].status |= VIDDEC_FW_INPUT_WATERMARK_REACHED; } @@ -247,21 +247,21 @@ static inline void viddec_fw_handle_error_and_inband_messages(int32_t cur, uint3 FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); viddec_fw_push_current_frame_to_output(fwipc, cur); - switch(pm_ret) + switch (pm_ret) { - case PM_EOS: - case PM_OVERFLOW: - { - viddec_fw_init_swap_memory(cur, false, true); - } - break; - case PM_DISCONTINUITY: - { - viddec_fw_init_swap_memory(cur, false, false); - } + case PM_EOS: + case PM_OVERFLOW: + { + viddec_fw_init_swap_memory(cur, false, true); + } + break; + case PM_DISCONTINUITY: + { + viddec_fw_init_swap_memory(cur, false, false); + } + break; + default: break; - default: - break; } } @@ -290,10 +290,10 @@ void viddec_fw_debug_scheduled_stream_state(int32_t indx, int32_t start) /*------------------------------------------------------------------------------ * Function: viddec_fw_process_async_queues(A.K.A -> Parser Kernel) * This function is responsible for handling the asynchronous queues. - * + * * The first step is to figure out which stream to run. The current algorithm - * will go through all high priority queues for a valid stream, if not found we - * go through lower priority queues. + * will go through all high priority queues for a valid stream, if not found we + * go through lower priority queues. * * If a valid stream is found we swap the required context from DDR to DMEM and do all necessary * things to setup the stream. @@ -308,8 +308,8 @@ static inline int32_t viddec_fw_process_async_queues() int32_t cur = -1; cur = viddec_fw_get_next_stream_to_schedule(); - - if(cur != -1) + + if (cur != -1) { FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); FW_IPC_ReceiveQue *rcv_q; @@ -321,7 +321,7 @@ static inline int32_t viddec_fw_process_async_queues() mfd_stream_info *cxt_swap; cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt); cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[cur]); - + /* Step 1: Swap rodata to local memory. Not doing this currently as all the rodata fits in local memory. */ {/* Step 2: Swap context into local memory */ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false); @@ -349,76 +349,76 @@ static inline int32_t viddec_fw_process_async_queues() get_wdog(&es_t1); cxt_swap->es_time += get_total_ticks(es_t0, es_t1); } - switch(pm_ret) + switch (pm_ret) { - case PM_EOS: - case PM_WKLD_DONE: - case PM_OVERFLOW: - case PM_DISCONTINUITY: - {/* Finished a frame worth of data or encountered fatal error*/ - stream_active = false; + case PM_EOS: + case PM_WKLD_DONE: + case PM_OVERFLOW: + case PM_DISCONTINUITY: + {/* Finished a frame worth of data or encountered fatal error*/ + stream_active = false; + } + break; + case PM_NO_DATA: + { + uint32_t next_ret=0; + if ( (NULL != data) && (0 != cxt_swap->es_time) ) + { + /* print performance info for this buffer */ + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_DONE, (int)cur, (int)cxt_swap->es_time, (int)cxt->input.phys, + (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); + cxt_swap->es_time = 0; } - break; - case PM_NO_DATA: + + next_ret = FwIPC_ReadMessage(fwipc, rcv_q, (char *)&(cxt->input), sizeof(ipc_msg_data)); + if (next_ret != 0) { - uint32_t next_ret=0; - if ( (NULL != data) && (0 != cxt_swap->es_time) ) - { - /* print performance info for this buffer */ - WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_DONE, (int)cur, (int)cxt_swap->es_time, (int)cxt->input.phys, - (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); - cxt_swap->es_time = 0; - } - - next_ret = FwIPC_ReadMessage(fwipc, rcv_q, (char *)&(cxt->input), sizeof(ipc_msg_data)); - if(next_ret != 0) - { - data = &(cxt->input); - WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_START, (int)cur, (int)cxt_swap->wl_time, - (int)cxt->input.phys, (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); - } - else - {/* No data on input queue */ - cxt_swap->buffered_data = 0; - stream_active = false; - } + data = &(cxt->input); + WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_START, (int)cur, (int)cxt_swap->wl_time, + (int)cxt->input.phys, (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); } - break; - default: - {/* Not done with current buffer */ - data = NULL; + else + {/* No data on input queue */ + cxt_swap->buffered_data = 0; + stream_active = false; } - break; } - }while(stream_active); + break; + default: + {/* Not done with current buffer */ + data = NULL; + } + break; + } + } while (stream_active); get_wdog(&time); cxt_swap->wl_time += get_total_ticks(start_time, time); /* Step 5: If workload done push workload out */ - switch(pm_ret) + switch (pm_ret) { - case PM_EOS: - case PM_WKLD_DONE: - case PM_OVERFLOW: - case PM_DISCONTINUITY: - {/* Push current workload as we are done with the frame */ - cxt_swap->buffered_data = (PM_WKLD_DONE == pm_ret) ? true: false; - viddec_pm_update_time(&(cxt->pm), cxt_swap->wl_time); - - /* xmit performance info for this workload output */ - WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_PK_WL_DONE, (int)cur, (int)cxt_swap->wl_time, (int)cxt->wkld1.phys, - (int)cxt->wkld1.len, (int)cxt->wkld1.id, (int)cxt->wkld1.flags ); - cxt_swap->wl_time = 0; - - viddec_fw_push_current_frame_to_output(fwipc, cur); - if(pm_ret != PM_WKLD_DONE) - { - viddec_fw_handle_error_and_inband_messages(cur, pm_ret); - } - pushed_a_workload = true; + case PM_EOS: + case PM_WKLD_DONE: + case PM_OVERFLOW: + case PM_DISCONTINUITY: + {/* Push current workload as we are done with the frame */ + cxt_swap->buffered_data = (PM_WKLD_DONE == pm_ret) ? true: false; + viddec_pm_update_time(&(cxt->pm), cxt_swap->wl_time); + + /* xmit performance info for this workload output */ + WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_PK_WL_DONE, (int)cur, (int)cxt_swap->wl_time, (int)cxt->wkld1.phys, + (int)cxt->wkld1.len, (int)cxt->wkld1.id, (int)cxt->wkld1.flags ); + cxt_swap->wl_time = 0; + + viddec_fw_push_current_frame_to_output(fwipc, cur); + if (pm_ret != PM_WKLD_DONE) + { + viddec_fw_handle_error_and_inband_messages(cur, pm_ret); } + pushed_a_workload = true; + } + break; + default: break; - default: - break; } /* Update information on whether we have active interrupt for this stream */ viddec_fw_update_pending_interrupt_flag(cur, cxt_swap, pushed_a_workload, es_Q_data_at_start); @@ -464,7 +464,7 @@ static inline void process_command(uint32_t cmd_id, unsigned char *command) * what are synchronous messages? Anything releated to teardown or opening a stream Ex: open, close, flush etc. * * Only once synchronous message at a time. When a synchronous message its id is usually in cp doorbell. Once - * we are done handling synchronous message through auto api we release doorbell to let the host write next + * we are done handling synchronous message through auto api we release doorbell to let the host write next * message. *------------------------------------------------------------------------------ */ @@ -473,7 +473,7 @@ static inline int32_t viddec_fw_process_sync_queues(unsigned char *msg) { int32_t ret = -1; - if(0 == reg_read(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS)) + if (0 == reg_read(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS)) { uint32_t command1=0; command1 = reg_read(CONFIG_IPC_ROFF_RISC_RX_DOORBELL); @@ -494,11 +494,11 @@ static inline uint32_t viddec_fw_check_for_pending_int(void) { uint32_t i=0, ret=false; /* start from 0 to max streams that fw can handle*/ - while(i < FW_SUPPORTED_STREAMS) + while (i < FW_SUPPORTED_STREAMS) { - if(_dmem.stream_info[i].state == 1) + if (_dmem.stream_info[i].state == 1) { - if((_dmem.stream_info[i].pending_interrupt) && _dmem.int_status[i].mask) + if ((_dmem.stream_info[i].pending_interrupt) && _dmem.int_status[i].mask) { ret = true; } @@ -522,7 +522,7 @@ static inline void viddec_fw_clear_processed_int(void) { uint32_t i=0; /* start from 0 to max streams that fw can handle*/ - while(i < FW_SUPPORTED_STREAMS) + while (i < FW_SUPPORTED_STREAMS) { //if(_dmem.stream_info[i].state == 1) _dmem.stream_info[i].pending_interrupt = false; @@ -535,9 +535,9 @@ static inline void viddec_fw_clear_processed_int(void) * Function: viddec_fw_int_host * This function interrupts host if data is available for host or any other status * is valid which the host configures the FW to. - * There is only one interrupt line so this is a shared Int for all streams, Host should + * There is only one interrupt line so this is a shared Int for all streams, Host should * look at status of all streams when it receives a Int. - * The FW will interrupt the host only if host doorbell is free, in other words the host + * The FW will interrupt the host only if host doorbell is free, in other words the host * should always make the doorbell free at the End of its ISR. *------------------------------------------------------------------------------ */ @@ -545,9 +545,9 @@ static inline void viddec_fw_clear_processed_int(void) static inline int32_t viddec_fw_int_host() { /* We Interrupt the host only if host is ready to receive an interrupt */ - if((reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) & GV_DOORBELL_STATS) == GV_DOORBELL_STATS) + if ((reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) & GV_DOORBELL_STATS) == GV_DOORBELL_STATS) { - if(viddec_fw_check_for_pending_int()) + if (viddec_fw_check_for_pending_int()) { /* If a pending interrupt is found trigger INT */ reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, VIDDEC_FW_PARSER_IPC_HOST_INT); @@ -576,13 +576,13 @@ volatile unsigned int stack_corrupted __attribute__ ((section (".stckovrflwchk") int main(void) { unsigned char *msg = (uint8_t *)&(_dmem.buf.data[0]); - + /* We wait until host reads sync message */ reg_write(CONFIG_IPC_ROFF_HOST_RX_DOORBELL, GV_FW_IPC_HOST_SYNC); while ( GV_DOORBELL_STATS != reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) ) { /*poll register until done bit is set */ - /* Host re-writes Vsparc DRAM (BSS) in this loop and will hit the DONE bit when complete */ + /* Host re-writes Vsparc DRAM (BSS) in this loop and will hit the DONE bit when complete */ } enable_intr(); /* Initialize State for queues */ @@ -591,16 +591,16 @@ int main(void) _dmem.g_pk_data.high_id = _dmem.g_pk_data.low_id = -1; viddec_pm_init_ops(); stack_corrupted = 0xDEADBEEF; - while(1) + while (1) { viddec_fw_process_sync_queues(msg); viddec_fw_process_async_queues(); viddec_fw_int_host(); #if 0 - if(stack_corrupted != 0xDEADBEEF) + if (stack_corrupted != 0xDEADBEEF) { WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_STACK_CORRPON, 0, 0, 0, 0, 0, 0); - while(1); + while (1); } #endif } diff --git a/mix_vbp/viddec_fw/fw/parser/utils.c b/mix_vbp/viddec_fw/fw/parser/utils.c index 5a22e5b..dd65bf5 100644 --- a/mix_vbp/viddec_fw/fw/parser/utils.c +++ b/mix_vbp/viddec_fw/fw/parser/utils.c @@ -20,11 +20,11 @@ void *memcpy(void *dest, const void *src, uint32_t n) ptr8_to = (uint8_t *)dest; trail = ((uint32_t)ptr8_frm) & 0x3; - if((trail == (((uint32_t)ptr8_to) & 0x3)) && (n > 4)) + if ((trail == (((uint32_t)ptr8_to) & 0x3)) && (n > 4)) { /* check to see what's the offset bytes to go to a word alignment */ bytes_left -= trail; - while(align > 0){ + while (align > 0) { *ptr8_to ++ = *ptr8_frm ++; trail--; } @@ -34,14 +34,14 @@ void *memcpy(void *dest, const void *src, uint32_t n) ptr32_to = (uint32_t *)ptr8_to; ptr32_frm = (uint32_t *)ptr8_frm; /* copy word by word */ - while(bytes_left > 0){ + while (bytes_left > 0) { *ptr32_to ++ = *ptr32_frm ++; bytes_left -= 4; } /* If there are any trailing bytes do a byte copy */ ptr8_to = (uint8_t *)ptr32_to; ptr8_frm = (uint8_t *)ptr32_frm; - while(trail > 0){ + while (trail > 0) { *ptr8_to ++ = *ptr8_frm ++; trail--; } @@ -49,7 +49,7 @@ void *memcpy(void *dest, const void *src, uint32_t n) else {/* case when src and dest addr are not on same alignment. Just do a byte copy */ - while(bytes_left > 0){ + while (bytes_left > 0) { *ptr8_to ++ = *ptr8_frm ++; bytes_left -= 1; } @@ -72,11 +72,11 @@ void *memset(void *s, int32_t c, uint32_t n) mask = c & 0xFF; mask |= (mask << 8); mask |= (mask << 16); - if(n >= 4) + if (n >= 4) { uint32_t trail=0; trail = 4 - (((uint32_t)ptr8) & 0x3); - if(trail < 4) + if (trail < 4) { ptr32 = (uint32_t *)(((uint32_t)ptr8) & ~0x3); data = (*ptr32 >> (8*trail)) << (8*trail); @@ -86,20 +86,20 @@ void *memset(void *s, int32_t c, uint32_t n) ptr8 += trail; } ptr32 = (uint32_t *)((uint32_t)ptr8); - while(bytes_left >= 4) + while (bytes_left >= 4) { *ptr32 = mask; ptr32++; bytes_left -=4; } - if(bytes_left > 0) + if (bytes_left > 0) { data = (*ptr32 << (8*bytes_left)) >> (8*bytes_left); data |= (mask << (32 - (8*bytes_left))); *ptr32=data; } } - + return s; } @@ -113,7 +113,7 @@ void *memset(void *s, int32_t c, uint32_t n) * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory. * [in] swap : Enable or disable byte swap(endian). * [out] return : Actual number of bytes copied, which can be more than what was requested - * since we can only copy words at a time. + * since we can only copy words at a time. * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned. *------------------------------------------------------------------------------ */ @@ -121,7 +121,7 @@ uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, cha { uint32_t val=0, wrote = size; - while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) + while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) { /* wait if DMA is busy with a transcation Error condition??*/ } @@ -133,17 +133,17 @@ uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, cha val=(wrote & 0xffff) << 2; reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); val |= DMA_CTRL_STATUS_START; - /* If size > 64 use 128 byte burst speed */ - if(wrote > 64) + /* If size > 64 use 128 byte burst speed */ + if (wrote > 64) val |= (1<<18); - if(swap) /* Endian swap if needed */ + if (swap) /* Endian swap if needed */ val |= DMA_CTRL_STATUS_SWAP; - if(to_ddr) + if (to_ddr) val = val | DMA_CTRL_STATUS_DIRCN; reg_write(DMA_CONTROL_STATUS, val); - while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) + while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) { - /* wait till DMA is done */ + /* wait till DMA is done */ } reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); @@ -160,7 +160,7 @@ uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, cha * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory. * [in] swap : Enable or disable byte swap(endian). * [out] return : Actual number of bytes copied, which can be more than what was requested - * since we can only copy words at a time. + * since we can only copy words at a time. * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned. *------------------------------------------------------------------------------ */ @@ -168,7 +168,7 @@ uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size { uint32_t val=0, wrote = size; - while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) + while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) { /* wait if DMA is busy with a transcation Error condition??*/ } @@ -180,17 +180,17 @@ uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size val=(wrote & 0xffff) << 2; reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); val |= DMA_CTRL_STATUS_START; - /* If size > 64 use 128 byte burst speed */ - if(wrote > 64) + /* If size > 64 use 128 byte burst speed */ + if (wrote > 64) val |= (1<<18); - if(swap) /* Endian swap if needed */ + if (swap) /* Endian swap if needed */ val |= DMA_CTRL_STATUS_SWAP; - if(to_ddr) + if (to_ddr) val = val | DMA_CTRL_STATUS_DIRCN; reg_write(DMA_CONTROL_STATUS, val); - while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) + while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) { - /* wait till DMA is done */ + /* wait till DMA is done */ } reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); @@ -201,17 +201,17 @@ void update_ctrl_reg(uint8_t enable, uint32_t mask) { uint32_t read_val = 0; read_val = reg_read(CONFIG_CP_CONTROL_REG); - if(enable) + if (enable) { read_val = read_val | mask; } else { - read_val = read_val & ~mask; + read_val = read_val & ~mask; } - reg_write(CONFIG_CP_CONTROL_REG, read_val); + reg_write(CONFIG_CP_CONTROL_REG, read_val); return; - + } extern uint32_t sven_get_timestamp(); @@ -225,24 +225,24 @@ uint32_t set_wdog(uint32_t offset) update_ctrl_reg(1, WATCH_DOG_ENABLE); return offset & WATCH_DOG_MASK; #else - return sven_get_timestamp(); -#endif + return sven_get_timestamp(); +#endif } void get_wdog(uint32_t *value) { -#ifdef B0_TIMER_FIX +#ifdef B0_TIMER_FIX *value = reg_read(WATCH_DOG_COUNTER) & WATCH_DOG_MASK; reg_write(INT_REG, ~INT_WDOG_ENABLE); update_ctrl_reg(0, WATCH_DOG_ENABLE); #else *value = sven_get_timestamp(); -#endif +#endif } uint32_t get_total_ticks(uint32_t start, uint32_t end) { - uint32_t value; + uint32_t value; #ifdef B0_TIMER_FIX value = (start-end) + (start*timer); timer=0; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 98cc0d0..dd3c12d 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -6,7 +6,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include +//#include #include #include "h264.h" @@ -35,7 +35,7 @@ static H264_BS_PATTERN bitstream_pattern = H264_BS_SC_PREFIXED; /* default scaling list table */ unsigned char Default_4x4_Intra[16] = -{ +{ 6,13,20,28, 13,20,28,32, 20,28,32,37, @@ -43,7 +43,7 @@ unsigned char Default_4x4_Intra[16] = }; unsigned char Default_4x4_Inter[16] = -{ +{ 10,14,20,24, 14,20,24,27, 20,24,27,30, @@ -51,7 +51,7 @@ unsigned char Default_4x4_Inter[16] = }; unsigned char Default_8x8_Intra[64] = -{ +{ 6,10,13,16,18,23,25,27, 10,11,16,18,23,25,27,29, 13,16,18,23,25,27,29,31, @@ -74,7 +74,7 @@ unsigned char Default_8x8_Inter[64] = 24,25,27,28,30,32,33,35 }; -unsigned char quant_flat[16] = +unsigned char quant_flat[16] = { 16,16,16,16, 16,16,16,16, @@ -83,7 +83,7 @@ unsigned char quant_flat[16] = }; unsigned char quant8_flat[64] = -{ +{ 16,16,16,16,16,16,16,16, 16,16,16,16,16,16,16,16, 16,16,16,16,16,16,16,16, @@ -94,7 +94,7 @@ unsigned char quant8_flat[64] = 16,16,16,16,16,16,16,16 }; -unsigned char* UseDefaultList[8] = +unsigned char* UseDefaultList[8] = { Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter, @@ -102,7 +102,7 @@ unsigned char* UseDefaultList[8] = Default_8x8_Inter }; -static uint8 h264_aspect_ratio_table[][2] = +static uint8 h264_aspect_ratio_table[][2] = { {0, 0}, {1, 1}, @@ -132,43 +132,43 @@ static uint8 h264_aspect_ratio_table[][2] = */ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) { - if (NULL == pcontext->parser_ops) - { - return VBP_PARM; - } - pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init"); - if (NULL == pcontext->parser_ops->init) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->parse_sc = viddec_parse_sc; - - pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse"); - if (NULL == pcontext->parser_ops->parse_syntax) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size"); - if (NULL == pcontext->parser_ops->get_cxt_size) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done"); - if (NULL == pcontext->parser_ops->is_wkld_done) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - /* entry point not needed */ - pcontext->parser_ops->is_frame_start = NULL; - return VBP_OK; + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done"); + if (NULL == pcontext->parser_ops->is_wkld_done) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + /* entry point not needed */ + pcontext->parser_ops->is_frame_start = NULL; + return VBP_OK; } @@ -177,1340 +177,1340 @@ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) */ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) { - if (NULL != pcontext->query_data) - { - return VBP_PARM; - } - - pcontext->query_data = NULL; - vbp_data_h264 *query_data = NULL; - - query_data = g_try_new0(vbp_data_h264, 1); - if (NULL == query_data) - { - goto cleanup; - } - - /* assign the pointer */ - pcontext->query_data = (void *)query_data; - - query_data->pic_data = g_try_new0(vbp_picture_data_h264, MAX_NUM_PICTURES); - if (NULL == query_data->pic_data) - { - goto cleanup; - } - - int i; - for (i = 0; i < MAX_NUM_PICTURES; i++) - { - query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferH264, 1); - if (NULL == query_data->pic_data[i].pic_parms) - { - goto cleanup; - } - query_data->pic_data[i].num_slices = 0; - query_data->pic_data[i].slc_data = g_try_new0(vbp_slice_data_h264, MAX_NUM_SLICES); - if (NULL == query_data->pic_data[i].slc_data) - { - goto cleanup; - } - } - - - query_data->IQ_matrix_buf = g_try_new0(VAIQMatrixBufferH264, 1); - if (NULL == query_data->IQ_matrix_buf) - { - goto cleanup; - } - - query_data->codec_data = g_try_new0(vbp_codec_data_h264, 1); - if (NULL == query_data->codec_data) - { - goto cleanup; - } - - return VBP_OK; + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + pcontext->query_data = NULL; + vbp_data_h264 *query_data = NULL; + + query_data = g_try_new0(vbp_data_h264, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = g_try_new0(vbp_picture_data_h264, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferH264, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = g_try_new0(vbp_slice_data_h264, MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + + query_data->IQ_matrix_buf = g_try_new0(VAIQMatrixBufferH264, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + query_data->codec_data = g_try_new0(vbp_codec_data_h264, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + return VBP_OK; cleanup: - vbp_free_query_data_h264(pcontext); - - return VBP_MEM; + vbp_free_query_data_h264(pcontext); + + return VBP_MEM; } uint32 vbp_free_query_data_h264(vbp_context *pcontext) { - if (NULL == pcontext->query_data) - { - return VBP_OK; - } - - int i; - vbp_data_h264 *query_data; - query_data = (vbp_data_h264 *)pcontext->query_data; - - if (query_data->pic_data) - { - for (i = 0; i < MAX_NUM_PICTURES; i++) - { - g_free(query_data->pic_data[i].slc_data); - g_free(query_data->pic_data[i].pic_parms); - } - g_free(query_data->pic_data); - } - - g_free(query_data->IQ_matrix_buf); - g_free(query_data->codec_data); - g_free(query_data); - - pcontext->query_data = NULL; + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + int i; + vbp_data_h264 *query_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + if (query_data->pic_data) + { + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + free(query_data->pic_data[i].slc_data); + free(query_data->pic_data[i].pic_parms); + } + free(query_data->pic_data); + } + + free(query_data->IQ_matrix_buf); + free(query_data->codec_data); + free(query_data); + + pcontext->query_data = NULL; NAL_length_size = 0; length_prefix_verified = 0; bitstream_pattern = H264_BS_SC_PREFIXED; - return VBP_OK; + return VBP_OK; } static inline uint16_t vbp_utils_ntohs(uint8_t* p) { - uint16_t i = ((*p) << 8) + ((*(p+1))); - return i; + uint16_t i = ((*p) << 8) + ((*(p+1))); + return i; } static inline uint32_t vbp_utils_ntohl(uint8_t* p) { - uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3))); - return i; + uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3))); + return i; } static inline void vbp_set_VAPicture_h264( - int curr_picture_structure, - int bottom_field, - frame_store* store, - VAPictureH264* pic) + int curr_picture_structure, + int bottom_field, + frame_store* store, + VAPictureH264* pic) { - if (FRAME == curr_picture_structure) - { - if (FRAME != viddec_h264_get_dec_structure(store)) - { - WTRACE("Reference picture structure is not frame for current frame picture!"); - } - pic->flags = 0; - pic->TopFieldOrderCnt = store->frame.poc; - pic->BottomFieldOrderCnt = store->frame.poc; - } - else - { - if (FRAME == viddec_h264_get_dec_structure(store)) - { - WTRACE("reference picture structure is frame for current field picture!"); - } - if (bottom_field) - { - pic->flags = VA_PICTURE_H264_BOTTOM_FIELD; - pic->TopFieldOrderCnt = store->top_field.poc; - pic->BottomFieldOrderCnt = store->bottom_field.poc; - } - else - { - pic->flags = VA_PICTURE_H264_TOP_FIELD; - pic->TopFieldOrderCnt = store->top_field.poc; - pic->BottomFieldOrderCnt = store->bottom_field.poc; - } - } + if (FRAME == curr_picture_structure) + { + if (FRAME != viddec_h264_get_dec_structure(store)) + { + WTRACE("Reference picture structure is not frame for current frame picture!"); + } + pic->flags = 0; + pic->TopFieldOrderCnt = store->frame.poc; + pic->BottomFieldOrderCnt = store->frame.poc; + } + else + { + if (FRAME == viddec_h264_get_dec_structure(store)) + { + WTRACE("reference picture structure is frame for current field picture!"); + } + if (bottom_field) + { + pic->flags = VA_PICTURE_H264_BOTTOM_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic->flags = VA_PICTURE_H264_TOP_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + } } static inline void vbp_set_slice_ref_list_h264( - struct h264_viddec_parser* h264_parser, - VASliceParameterBufferH264 *slc_parms) -{ - int i, j; - int num_ref_idx_active = 0; - h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); - uint8_t* p_list = NULL; - VAPictureH264* refPicListX = NULL; - frame_store* fs = NULL; - - /* initialize ref picutre list, set picture id and flags to invalid. */ - - for (i = 0; i < 2; i++) - { - refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); - for (j = 0; j < 32; j++) - { - refPicListX->picture_id = VA_INVALID_SURFACE; - refPicListX->frame_idx = 0; - refPicListX->flags = VA_PICTURE_H264_INVALID; - refPicListX->TopFieldOrderCnt = 0; - refPicListX->BottomFieldOrderCnt = 0; - refPicListX++; - } - } - - for (i = 0; i < 2; i++) - { - refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); - - if ((i == 0) && - ((h264_PtypeB == slice_header->slice_type) || - (h264_PtypeP == slice_header->slice_type))) - { - num_ref_idx_active = slice_header->num_ref_idx_l0_active; - if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) - { - p_list = h264_parser->info.slice_ref_list0; - } - else - { - p_list = h264_parser->info.dpb.listX_0; - } - } - else if((i == 1) && (h264_PtypeB == slice_header->slice_type)) - { - num_ref_idx_active = slice_header->num_ref_idx_l1_active; - if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag) - { - p_list = h264_parser->info.slice_ref_list1; - } - else - { - p_list = h264_parser->info.dpb.listX_1; - } - } - else - { - num_ref_idx_active = 0; - p_list = NULL; - } - - - for (j = 0; j < num_ref_idx_active; j++) - { - fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]); - - /* bit 5 indicates if reference picture is bottom field */ - vbp_set_VAPicture_h264( - h264_parser->info.img.structure, - (p_list[j] & 0x20) >> 5, - fs, - refPicListX); - - refPicListX->frame_idx = fs->frame_num; - refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE; - refPicListX++; - } - } + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + int i, j; + int num_ref_idx_active = 0; + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + uint8_t* p_list = NULL; + VAPictureH264* refPicListX = NULL; + frame_store* fs = NULL; + + /* initialize ref picutre list, set picture id and flags to invalid. */ + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + for (j = 0; j < 32; j++) + { + refPicListX->picture_id = VA_INVALID_SURFACE; + refPicListX->frame_idx = 0; + refPicListX->flags = VA_PICTURE_H264_INVALID; + refPicListX->TopFieldOrderCnt = 0; + refPicListX->BottomFieldOrderCnt = 0; + refPicListX++; + } + } + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + + if ((i == 0) && + ((h264_PtypeB == slice_header->slice_type) || + (h264_PtypeP == slice_header->slice_type))) + { + num_ref_idx_active = slice_header->num_ref_idx_l0_active; + if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list0; + } + else + { + p_list = h264_parser->info.dpb.listX_0; + } + } + else if ((i == 1) && (h264_PtypeB == slice_header->slice_type)) + { + num_ref_idx_active = slice_header->num_ref_idx_l1_active; + if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list1; + } + else + { + p_list = h264_parser->info.dpb.listX_1; + } + } + else + { + num_ref_idx_active = 0; + p_list = NULL; + } + + + for (j = 0; j < num_ref_idx_active; j++) + { + fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]); + + /* bit 5 indicates if reference picture is bottom field */ + vbp_set_VAPicture_h264( + h264_parser->info.img.structure, + (p_list[j] & 0x20) >> 5, + fs, + refPicListX); + + refPicListX->frame_idx = fs->frame_num; + refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE; + refPicListX++; + } + } } static inline void vbp_set_pre_weight_table_h264( - struct h264_viddec_parser* h264_parser, - VASliceParameterBufferH264 *slc_parms) + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) { - h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); - int i, j; - - if ((((h264_PtypeP == slice_header->slice_type) || - (h264_PtypeB == slice_header->slice_type)) && - h264_parser->info.active_PPS.weighted_pred_flag) || - ((h264_PtypeB == slice_header->slice_type) && - (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) - { - slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; - slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; - slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag; - slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag; - slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag; - slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag; - - for (i = 0; i < 32; i++) - { - slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i]; - slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i]; - slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i]; - slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i]; - - for (j = 0; j < 2; j++) - { - slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j]; - slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j]; - slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j]; - slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j]; - } - } - } - else - { - /* default weight table */ - slc_parms->luma_log2_weight_denom = 5; - slc_parms->chroma_log2_weight_denom = 5; - slc_parms->luma_weight_l0_flag = 0; - slc_parms->luma_weight_l1_flag = 0; - slc_parms->chroma_weight_l0_flag = 0; - slc_parms->chroma_weight_l1_flag = 0; - for (i = 0; i < 32; i++) - { - slc_parms->luma_weight_l0[i] = 0; - slc_parms->luma_offset_l0[i] = 0; - slc_parms->luma_weight_l1[i] = 0; - slc_parms->luma_offset_l1[i] = 0; - - for (j = 0; j < 2; j++) - { - slc_parms->chroma_weight_l0[i][j] = 0; - slc_parms->chroma_offset_l0[i][j] = 0; - slc_parms->chroma_weight_l1[i][j] = 0; - slc_parms->chroma_offset_l1[i][j] = 0; - } - } + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + int i, j; + + if ((((h264_PtypeP == slice_header->slice_type) || + (h264_PtypeB == slice_header->slice_type)) && + h264_parser->info.active_PPS.weighted_pred_flag) || + ((h264_PtypeB == slice_header->slice_type) && + (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) + { + slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; + slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; + slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag; + slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag; + slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag; + slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag; + + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i]; + slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i]; + slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i]; + slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i]; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j]; + slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j]; + slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j]; + slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j]; + } + } + } + else + { + /* default weight table */ + slc_parms->luma_log2_weight_denom = 5; + slc_parms->chroma_log2_weight_denom = 5; + slc_parms->luma_weight_l0_flag = 0; + slc_parms->luma_weight_l1_flag = 0; + slc_parms->chroma_weight_l0_flag = 0; + slc_parms->chroma_weight_l1_flag = 0; + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = 0; + slc_parms->luma_offset_l0[i] = 0; + slc_parms->luma_weight_l1[i] = 0; + slc_parms->luma_offset_l1[i] = 0; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = 0; + slc_parms->chroma_offset_l0[i][j] = 0; + slc_parms->chroma_weight_l1[i][j] = 0; + slc_parms->chroma_offset_l1[i][j] = 0; + } + } } } static inline void vbp_set_reference_frames_h264( - struct h264_viddec_parser *parser, - VAPictureParameterBufferH264* pic_parms) -{ - int buffer_idx; - int frame_idx; - frame_store* store = NULL; - h264_DecodedPictureBuffer* dpb = &(parser->info.dpb); - /* initialize reference frames */ - for (frame_idx = 0; frame_idx < 16; frame_idx++) - { - pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; - pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; - pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; - pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; - pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; - } - pic_parms->num_ref_frames = 0; - - frame_idx = 0; - - /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */ - /* set short term reference frames */ - for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) - { - if (frame_idx >= 16 || buffer_idx >= 16) - { - WTRACE("Frame index is out of bound."); - break; - } - - store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]]; - /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */ - if (viddec_h264_get_is_used(store)) - { - pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; - pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; - if (FRAME == parser->info.img.structure) - { - pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; - pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; - } - else - { - pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; - pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; - if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) - { - /* if both fields are used for reference, just set flag to be frame (0) */ - } - else - { - if (store->top_field.used_for_reference) - pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; - if (store->bottom_field.used_for_reference) - pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; - } - } - } - frame_idx++; - } - - /* set long term reference frames */ - for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) - { - if (frame_idx >= 16 || buffer_idx >= 16) - { - WTRACE("Frame index is out of bound."); - break; - } - store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]]; - if (!viddec_h264_get_is_long_term(store)) - { - WTRACE("long term frame is not marked as long term."); - } - /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */ - if (viddec_h264_get_is_used(store)) - { - pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; - if (FRAME == parser->info.img.structure) - { - pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; - pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; - } - else - { - pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; - pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; - if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) - { - /* if both fields are used for reference, just set flag to be frame (0)*/ - } - else - { - if (store->top_field.used_for_reference) - pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; - if (store->bottom_field.used_for_reference) - pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; - } - } - } - frame_idx++; - } - - pic_parms->num_ref_frames = frame_idx; - - if (frame_idx > parser->info.active_SPS.num_ref_frames) - { - WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).", - frame_idx, parser->info.active_SPS.num_ref_frames); - } + struct h264_viddec_parser *parser, + VAPictureParameterBufferH264* pic_parms) +{ + int buffer_idx; + int frame_idx; + frame_store* store = NULL; + h264_DecodedPictureBuffer* dpb = &(parser->info.dpb); + /* initialize reference frames */ + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + pic_parms->num_ref_frames = 0; + + frame_idx = 0; + + /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */ + /* set short term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + + store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]]; + /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0) */ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + /* set long term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]]; + if (!viddec_h264_get_is_long_term(store)) + { + WTRACE("long term frame is not marked as long term."); + } + /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0)*/ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + pic_parms->num_ref_frames = frame_idx; + + if (frame_idx > parser->info.active_SPS.num_ref_frames) + { + WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).", + frame_idx, parser->info.active_SPS.num_ref_frames); + } } static inline void vbp_set_scaling_list_h264( - struct h264_viddec_parser *parser, - VAIQMatrixBufferH264* IQ_matrix_buf) + struct h264_viddec_parser *parser, + VAIQMatrixBufferH264* IQ_matrix_buf) { - int i; + int i; int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0); - - if (parser->info.active_PPS.pic_scaling_matrix_present_flag) - { - for (i = 0; i < lists_to_set; i++) - { - if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) - { - if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) || - ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])) - { - /* use default scaling list */ - if (i < 6) - { - memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); - } - else - { - memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); - } - } - else - { - /* use PPS list */ - if (i < 6) - { - memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16); - } - else - { - memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64); - } - } - } - else /* pic_scaling_list not present */ - { - if (parser->info.active_SPS.seq_scaling_matrix_present_flag) - { - /* SPS matrix present - use fallback rule B */ - switch (i) - { - case 0: - case 3: - memcpy(IQ_matrix_buf->ScalingList4x4[i], - parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i], - 16); - break; - - case 6: - case 7: - memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], - parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i], - 64); - break; - - case 1: - case 2: - case 4: - case 5: - memcpy(IQ_matrix_buf->ScalingList4x4[i], - IQ_matrix_buf->ScalingList4x4[i - 1], - 16); - break; - - default: - g_warning("invalid scaling list index."); - break; - } - } - else /* seq_scaling_matrix not present */ - { - /* SPS matrix not present - use fallback rule A */ - switch (i) - { - case 0: - case 3: - memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); - break; - - case 6: - case 7: - memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); - break; - - case 1: - case 2: - case 4: - case 5: - memcpy(IQ_matrix_buf->ScalingList4x4[i], - IQ_matrix_buf->ScalingList4x4[i - 1], - 16); - break; - - default: - WTRACE("invalid scaling list index."); - break; - } - } /* end of seq_scaling_matrix not present */ - } /* end of pic_scaling_list not present */ - } /* for loop for each index from 0 to 7 */ - } /* end of pic_scaling_matrix present */ - else - { - /* PPS matrix not present, use SPS information */ - if (parser->info.active_SPS.seq_scaling_matrix_present_flag) - { - for (i = 0; i < lists_to_set; i++) - { - if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) - { - if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) || - ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6])) - { - /* use default scaling list */ - if (i < 6) - { - memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); - } - else - { - memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); - } - } - else - { - /* use SPS list */ - if (i < 6) - { - memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16); - } - else - { - memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64); - } - } - } - else - { - /* SPS list not present - use fallback rule A */ - switch (i) - { - case 0: - case 3: - memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); - break; - - case 6: - case 7: - memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); - break; - - case 1: - case 2: - case 4: - case 5: - memcpy(IQ_matrix_buf->ScalingList4x4[i], - IQ_matrix_buf->ScalingList4x4[i - 1], - 16); - break; - - default: - WTRACE("invalid scaling list index."); - break; - } - } - } - } - else - { - /* SPS matrix not present - use flat lists */ - for (i = 0; i < 6; i++) - { - memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16); - } - for (i = 0; i < 2; i++) - { - memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); - } - } - } - - if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) && - (parser->info.active_PPS.pic_scaling_matrix_present_flag || - parser->info.active_SPS.seq_scaling_matrix_present_flag)) - { - for (i = 0; i < 2; i++) - { - memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); - } - } + + if (parser->info.active_PPS.pic_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use PPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64); + } + } + } + else /* pic_scaling_list not present */ + { + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + /* SPS matrix present - use fallback rule B */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i], + 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i], + 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + //g_warning("invalid scaling list index."); + break; + } + } + else /* seq_scaling_matrix not present */ + { + /* SPS matrix not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } /* end of seq_scaling_matrix not present */ + } /* end of pic_scaling_list not present */ + } /* for loop for each index from 0 to 7 */ + } /* end of pic_scaling_matrix present */ + else + { + /* PPS matrix not present, use SPS information */ + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use SPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64); + } + } + } + else + { + /* SPS list not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } + } + } + else + { + /* SPS matrix not present - use flat lists */ + for (i = 0; i < 6; i++) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16); + } + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } + } + + if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) && + (parser->info.active_PPS.pic_scaling_matrix_present_flag || + parser->info.active_SPS.seq_scaling_matrix_present_flag)) + { + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } } static void vbp_set_codec_data_h264( - struct h264_viddec_parser *parser, - vbp_codec_data_h264* codec_data) + struct h264_viddec_parser *parser, + vbp_codec_data_h264* codec_data) { - /* parameter id */ - codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; - codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; - - /* profile and level */ - codec_data->profile_idc = parser->info.active_SPS.profile_idc; - codec_data->level_idc = parser->info.active_SPS.level_idc; - - - codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; - - - /* reference frames */ - codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; - - if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && - !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) - { - /* no longer necessary: two fields share the same interlaced surface */ - /* codec_data->num_ref_frames *= 2; */ - } - - codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; - - /* frame coding */ - codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; - codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; - - /* frame dimension */ - codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16; - - codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * - (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; - - - /* aspect ratio */ + /* parameter id */ + codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; + codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; + + /* profile and level */ + codec_data->profile_idc = parser->info.active_SPS.profile_idc; + codec_data->level_idc = parser->info.active_SPS.level_idc; + + + codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; + + + /* reference frames */ + codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && + !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) + { + /* no longer necessary: two fields share the same interlaced surface */ + /* codec_data->num_ref_frames *= 2; */ + } + + codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + + /* frame coding */ + codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + + /* frame dimension */ + codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16; + + codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; + + + /* aspect ratio */ if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) - { - codec_data->aspect_ratio_idc = - parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; + { + codec_data->aspect_ratio_idc = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; if (codec_data->aspect_ratio_idc < 17) { codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0]; - codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1]; + codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1]; } else if (codec_data->aspect_ratio_idc == 255) { - codec_data->sar_width = - parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; - - codec_data->sar_height = - parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + codec_data->sar_width = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; + + codec_data->sar_height = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; } else { codec_data->sar_width = 0; - codec_data->sar_height = 0; + codec_data->sar_height = 0; } } else { // unspecified - codec_data->aspect_ratio_idc = 0; - codec_data->sar_width = 0; - codec_data->sar_height = 0; + codec_data->aspect_ratio_idc = 0; + codec_data->sar_width = 0; + codec_data->sar_height = 0; } - + /* video format */ - if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) - { + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { codec_data->video_format = - parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; - } + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + } else { // Unspecified video format codec_data->video_format = 5; } - - codec_data->video_full_range_flag = - parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; - + + codec_data->video_full_range_flag = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) { - codec_data->matrix_coefficients = - parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; + codec_data->matrix_coefficients = + parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; } else { // Unspecified - codec_data->matrix_coefficients = 2; + codec_data->matrix_coefficients = 2; } /* picture order type and count */ codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; - + } static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - - vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; - struct h264_viddec_parser* parser = NULL; - vbp_picture_data_h264* pic_data = NULL; - VAPictureParameterBufferH264* pic_parms = NULL; - - parser = (struct h264_viddec_parser *)cxt->codec_data; - - if (0 == parser->info.SliceHeader.first_mb_in_slice) - { - /* a new picture is parsed */ - query_data->num_pictures++; - } - - if (query_data->num_pictures == 0) - { - /* partial frame */ - query_data->num_pictures = 1; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + struct h264_viddec_parser* parser = NULL; + vbp_picture_data_h264* pic_data = NULL; + VAPictureParameterBufferH264* pic_parms = NULL; + + parser = (struct h264_viddec_parser *)cxt->codec_data; + + if (0 == parser->info.SliceHeader.first_mb_in_slice) + { + /* a new picture is parsed */ + query_data->num_pictures++; + } + + if (query_data->num_pictures == 0) + { + /* partial frame */ + query_data->num_pictures = 1; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + WTRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + pic_parms = pic_data->pic_parms; + + // relax this condition to support partial frame parsing + + //if (parser->info.SliceHeader.first_mb_in_slice == 0) + { + /** + * picture parameter only needs to be set once, + * even multiple slices may be encoded + */ + + /* VAPictureParameterBufferH264 */ + pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; + pic_parms->CurrPic.frame_idx = 0; + if (parser->info.img.field_pic_flag == 1) + { + if (parser->info.img.bottom_field_flag) + { + pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; + } + else + { + /* also OK set to 0 (from test suite) */ + pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; + } + } + else + { + pic_parms->CurrPic.flags = 0; /* frame picture */ } - - if (query_data->num_pictures > MAX_NUM_PICTURES) - { - ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); - return VBP_DATA; - } - - int pic_data_index = query_data->num_pictures - 1; - if (pic_data_index < 0) - { - WTRACE("MB address does not start from 0!"); - return VBP_DATA; - } - - pic_data = &(query_data->pic_data[pic_data_index]); - pic_parms = pic_data->pic_parms; - - // relax this condition to support partial frame parsing - - //if (parser->info.SliceHeader.first_mb_in_slice == 0) - { - /** - * picture parameter only needs to be set once, - * even multiple slices may be encoded - */ - - /* VAPictureParameterBufferH264 */ - pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; - pic_parms->CurrPic.frame_idx = 0; - if (parser->info.img.field_pic_flag == 1) - { - if (parser->info.img.bottom_field_flag) - { - pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; - } - else - { - /* also OK set to 0 (from test suite) */ - pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; - } - } - else - { - pic_parms->CurrPic.flags = 0; /* frame picture */ - } - pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; - pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; - pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; - - /* don't care if current frame is used as long term reference */ - if (parser->info.SliceHeader.nal_ref_idc != 0) - { - pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; - } - - pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; - - /* frame height in MBS */ - pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * - (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; - - pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; - pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; - - - pic_parms->seq_fields.value = 0; - pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; - pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; - pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; - pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; - pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; - - /* new fields in libva 0.31 */ - pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; + pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; + pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; + + /* don't care if current frame is used as long term reference */ + if (parser->info.SliceHeader.nal_ref_idc != 0) + { + pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + + /* frame height in MBS */ + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; + pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + + + pic_parms->seq_fields.value = 0; + pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; + + /* new fields in libva 0.31 */ + pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag; - - - /* referened from UMG_Moorstown_TestSuites */ - pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; - - pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; - pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; - pic_parms->slice_group_change_rate_minus1 = 0; - pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; - pic_parms->pic_init_qs_minus26 = 0; - pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; - pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; - - pic_parms->pic_fields.value = 0; - pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; - pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; - pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; - pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; - - /* new LibVA fields in v0.31*/ - pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; - pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + + + /* referened from UMG_Moorstown_TestSuites */ + pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; + + pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + pic_parms->slice_group_change_rate_minus1 = 0; + pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; + pic_parms->pic_init_qs_minus26 = 0; + pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; + pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; + + /* new LibVA fields in v0.31*/ + pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0; - /* all slices in the pciture have the same field_pic_flag */ - pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; - pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; - - pic_parms->frame_num = parser->info.SliceHeader.frame_num; - } - - - /* set reference frames, and num_ref_frames */ - vbp_set_reference_frames_h264(parser, pic_parms); - if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) - { - /* num of reference frame is 0 if current picture is IDR */ - pic_parms->num_ref_frames = 0; - } - else - { - /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ - } - - return VBP_OK; + /* all slices in the pciture have the same field_pic_flag */ + pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; + pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; + + pic_parms->frame_num = parser->info.SliceHeader.frame_num; + } + + + /* set reference frames, and num_ref_frames */ + vbp_set_reference_frames_h264(parser, pic_parms); + if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + /* num of reference frame is 0 if current picture is IDR */ + pic_parms->num_ref_frames = 0; + } + else + { + /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ + } + + return VBP_OK; } #if 0 static inline void vbp_update_reference_frames_h264_methodA(vbp_picture_data_h264* pic_data) { - VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; - - char is_used[16]; - memset(is_used, 0, sizeof(is_used)); - - int ref_list; - int slice_index; - int i, j; - VAPictureH264* pRefList = NULL; - - for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) - { - VASliceParameterBufferH264* slice_parms = - &(pic_data->slc_data[slice_index].slc_parms); - - for (ref_list = 0; ref_list < 2; ref_list++) - { - if (0 == ref_list) - pRefList = slice_parms->RefPicList0; - else - pRefList = slice_parms->RefPicList1; - - for (i = 0; i < 32; i++, pRefList++) - { - if (VA_PICTURE_H264_INVALID == pRefList->flags) - break; - - for (j = 0; j < 16; j++) - { - if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == - pRefList->TopFieldOrderCnt) - { - is_used[j] = 1; - break; - } - } - } - } - } - - int frame_idx = 0; - VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; - for (i = 0; i < 16; i++) - { - if (is_used[i]) - { - memcpy(pRefFrame, - &(pic_parms->ReferenceFrames[i]), - sizeof(VAPictureH264)); - - pRefFrame++; - frame_idx++; - } - } - pic_parms->num_ref_frames = frame_idx; - - for (; frame_idx < 16; frame_idx++) - { - pRefFrame->picture_id = VA_INVALID_SURFACE; - pRefFrame->frame_idx = -1; - pRefFrame->flags = VA_PICTURE_H264_INVALID; - pRefFrame->TopFieldOrderCnt = -1; - pRefFrame->BottomFieldOrderCnt = -1; - pRefFrame++; - } + VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; + + char is_used[16]; + memset(is_used, 0, sizeof(is_used)); + + int ref_list; + int slice_index; + int i, j; + VAPictureH264* pRefList = NULL; + + for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) + { + VASliceParameterBufferH264* slice_parms = + &(pic_data->slc_data[slice_index].slc_parms); + + for (ref_list = 0; ref_list < 2; ref_list++) + { + if (0 == ref_list) + pRefList = slice_parms->RefPicList0; + else + pRefList = slice_parms->RefPicList1; + + for (i = 0; i < 32; i++, pRefList++) + { + if (VA_PICTURE_H264_INVALID == pRefList->flags) + break; + + for (j = 0; j < 16; j++) + { + if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == + pRefList->TopFieldOrderCnt) + { + is_used[j] = 1; + break; + } + } + } + } + } + + int frame_idx = 0; + VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; + for (i = 0; i < 16; i++) + { + if (is_used[i]) + { + memcpy(pRefFrame, + &(pic_parms->ReferenceFrames[i]), + sizeof(VAPictureH264)); + + pRefFrame++; + frame_idx++; + } + } + pic_parms->num_ref_frames = frame_idx; + + for (; frame_idx < 16; frame_idx++) + { + pRefFrame->picture_id = VA_INVALID_SURFACE; + pRefFrame->frame_idx = -1; + pRefFrame->flags = VA_PICTURE_H264_INVALID; + pRefFrame->TopFieldOrderCnt = -1; + pRefFrame->BottomFieldOrderCnt = -1; + pRefFrame++; + } } #endif #if 0 static inline void vbp_update_reference_frames_h264_methodB(vbp_picture_data_h264* pic_data) { - VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; - int i; - VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; - for (i = 0; i < 16; i++) - { - pRefFrame->picture_id = VA_INVALID_SURFACE; - pRefFrame->frame_idx = -1; - pRefFrame->flags = VA_PICTURE_H264_INVALID; - pRefFrame->TopFieldOrderCnt = -1; - pRefFrame->BottomFieldOrderCnt = -1; - pRefFrame++; - } - - pic_parms->num_ref_frames = 0; - - - int ref_list; - int slice_index; - int j; - VAPictureH264* pRefList = NULL; - - for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) - { - VASliceParameterBufferH264* slice_parms = - &(pic_data->slc_data[slice_index].slc_parms); - - for (ref_list = 0; ref_list < 2; ref_list++) - { - if (0 == ref_list) - pRefList = slice_parms->RefPicList0; - else - pRefList = slice_parms->RefPicList1; - - for (i = 0; i < 32; i++, pRefList++) - { - if (VA_PICTURE_H264_INVALID == pRefList->flags) - break; - - for (j = 0; j < 16; j++) - { - if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == - pRefList->TopFieldOrderCnt) - { - pic_parms->ReferenceFrames[j].flags |= - pRefList->flags; - - if ((pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_TOP_FIELD) && - (pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_BOTTOM_FIELD)) - { - pic_parms->ReferenceFrames[j].flags = 0; - } - break; - } - } - if (j == 16) - { - memcpy(&(pic_parms->ReferenceFrames[pic_parms->num_ref_frames++]), - pRefList, - sizeof(VAPictureH264)); - } - - } - } - } + VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; + int i; + VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; + for (i = 0; i < 16; i++) + { + pRefFrame->picture_id = VA_INVALID_SURFACE; + pRefFrame->frame_idx = -1; + pRefFrame->flags = VA_PICTURE_H264_INVALID; + pRefFrame->TopFieldOrderCnt = -1; + pRefFrame->BottomFieldOrderCnt = -1; + pRefFrame++; + } + + pic_parms->num_ref_frames = 0; + + + int ref_list; + int slice_index; + int j; + VAPictureH264* pRefList = NULL; + + for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) + { + VASliceParameterBufferH264* slice_parms = + &(pic_data->slc_data[slice_index].slc_parms); + + for (ref_list = 0; ref_list < 2; ref_list++) + { + if (0 == ref_list) + pRefList = slice_parms->RefPicList0; + else + pRefList = slice_parms->RefPicList1; + + for (i = 0; i < 32; i++, pRefList++) + { + if (VA_PICTURE_H264_INVALID == pRefList->flags) + break; + + for (j = 0; j < 16; j++) + { + if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == + pRefList->TopFieldOrderCnt) + { + pic_parms->ReferenceFrames[j].flags |= + pRefList->flags; + + if ((pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_TOP_FIELD) && + (pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_BOTTOM_FIELD)) + { + pic_parms->ReferenceFrames[j].flags = 0; + } + break; + } + } + if (j == 16) + { + memcpy(&(pic_parms->ReferenceFrames[pic_parms->num_ref_frames++]), + pRefList, + sizeof(VAPictureH264)); + } + + } + } + } } #endif static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - uint32 bit, byte; - uint8 is_emul; - - vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; - VASliceParameterBufferH264 *slc_parms = NULL; - vbp_slice_data_h264 *slc_data = NULL; - struct h264_viddec_parser* h264_parser = NULL; - h264_Slice_Header_t* slice_header = NULL; - vbp_picture_data_h264* pic_data = NULL; - - - h264_parser = (struct h264_viddec_parser *)cxt->codec_data; - int pic_data_index = query_data->num_pictures - 1; - if (pic_data_index < 0) - { - ETRACE("invalid picture data index."); - return VBP_DATA; - } - - pic_data = &(query_data->pic_data[pic_data_index]); - - slc_data = &(pic_data->slc_data[pic_data->num_slices]); - slc_data->buffer_addr = cxt->parse_cubby.buf; - slc_parms = &(slc_data->slc_parms); - - /* byte: how many bytes have been parsed */ - /* bit: bits parsed within the current parsing position */ - viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); - - + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_data->buffer_addr = cxt->parse_cubby.buf; + slc_parms = &(slc_data->slc_parms); + + /* byte: how many bytes have been parsed */ + /* bit: bits parsed within the current parsing position */ + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + #if 0 - /* add 4 bytes of start code prefix */ - slc_parms->slice_data_size = slc_data->slice_size = - pcontext->parser_cxt->list.data[index].edpos - - pcontext->parser_cxt->list.data[index].stpos + 4; - - slc_data->slice_offset = pcontext->parser_cxt->list.data[index].stpos - 4; - - /* overwrite the "length" bytes to start code (0x00000001) */ - *(slc_data->buffer_addr + slc_data->slice_offset) = 0; - *(slc_data->buffer_addr + slc_data->slice_offset + 1) = 0; - *(slc_data->buffer_addr + slc_data->slice_offset + 2) = 0; - *(slc_data->buffer_addr + slc_data->slice_offset + 3) = 1; - - - /* the offset to the NAL start code for this slice */ - slc_parms->slice_data_offset = 0; - - /* whole slice is in this buffer */ - slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - - /* bit offset from NAL start code to the beginning of slice data */ - /* slc_parms->slice_data_bit_offset = bit;*/ - slc_parms->slice_data_bit_offset = (byte + 4)* 8 + bit; - + /* add 4 bytes of start code prefix */ + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos + 4; + + slc_data->slice_offset = pcontext->parser_cxt->list.data[index].stpos - 4; + + /* overwrite the "length" bytes to start code (0x00000001) */ + *(slc_data->buffer_addr + slc_data->slice_offset) = 0; + *(slc_data->buffer_addr + slc_data->slice_offset + 1) = 0; + *(slc_data->buffer_addr + slc_data->slice_offset + 2) = 0; + *(slc_data->buffer_addr + slc_data->slice_offset + 3) = 1; + + + /* the offset to the NAL start code for this slice */ + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* bit offset from NAL start code to the beginning of slice data */ + /* slc_parms->slice_data_bit_offset = bit;*/ + slc_parms->slice_data_bit_offset = (byte + 4)* 8 + bit; + #else - slc_parms->slice_data_size = slc_data->slice_size = - pcontext->parser_cxt->list.data[index].edpos - - pcontext->parser_cxt->list.data[index].stpos; - - /* the offset to the NAL start code for this slice */ - slc_data->slice_offset = cxt->list.data[index].stpos; - slc_parms->slice_data_offset = 0; - - /* whole slice is in this buffer */ - slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - - /* bit offset from NAL start code to the beginning of slice data */ - slc_parms->slice_data_bit_offset = bit + byte * 8; + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = cxt->list.data[index].stpos; + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* bit offset from NAL start code to the beginning of slice data */ + slc_parms->slice_data_bit_offset = bit + byte * 8; #endif - - if (is_emul) - { - WTRACE("next byte is emulation prevention byte."); - /*slc_parms->slice_data_bit_offset += 8; */ - } - - if (cxt->getbits.emulation_byte_counter != 0) - { - slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8; - } - - slice_header = &(h264_parser->info.SliceHeader); - slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; - - if(h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & - (!(h264_parser->info.SliceHeader.field_pic_flag))) - { - slc_parms->first_mb_in_slice /= 2; - } - - slc_parms->slice_type = slice_header->slice_type; - - slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag; - - slc_parms->num_ref_idx_l0_active_minus1 = 0; - slc_parms->num_ref_idx_l1_active_minus1 = 0; - if (slice_header->slice_type == h264_PtypeI) - { - } - else if (slice_header->slice_type == h264_PtypeP) - { - slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; - } - else if (slice_header->slice_type == h264_PtypeB) - { - slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; - slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1; - } - else - { - WTRACE("slice type %d is not supported.", slice_header->slice_type); - } - - slc_parms->cabac_init_idc = slice_header->cabac_init_idc; - slc_parms->slice_qp_delta = slice_header->slice_qp_delta; - slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc; - slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2; - slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2; - - - vbp_set_pre_weight_table_h264(h264_parser, slc_parms); - vbp_set_slice_ref_list_h264(h264_parser, slc_parms); - - - pic_data->num_slices++; - - //vbp_update_reference_frames_h264_methodB(pic_data); - if (pic_data->num_slices > MAX_NUM_SLICES) - { - ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); - return VBP_DATA; - } - - /*if (pic_data->num_slices > 1) - { - ITRACE("number of slices per picture is %d.", pic_data->num_slices); - }*/ - return VBP_OK; + + if (is_emul) + { + WTRACE("next byte is emulation prevention byte."); + /*slc_parms->slice_data_bit_offset += 8; */ + } + + if (cxt->getbits.emulation_byte_counter != 0) + { + slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8; + } + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + slc_parms->slice_type = slice_header->slice_type; + + slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag; + + slc_parms->num_ref_idx_l0_active_minus1 = 0; + slc_parms->num_ref_idx_l1_active_minus1 = 0; + if (slice_header->slice_type == h264_PtypeI) + { + } + else if (slice_header->slice_type == h264_PtypeP) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + } + else if (slice_header->slice_type == h264_PtypeB) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1; + } + else + { + WTRACE("slice type %d is not supported.", slice_header->slice_type); + } + + slc_parms->cabac_init_idc = slice_header->cabac_init_idc; + slc_parms->slice_qp_delta = slice_header->slice_qp_delta; + slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc; + slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2; + slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2; + + + vbp_set_pre_weight_table_h264(h264_parser, slc_parms); + vbp_set_slice_ref_list_h264(h264_parser, slc_parms); + + + pic_data->num_slices++; + + //vbp_update_reference_frames_h264_methodB(pic_data); + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + /*if (pic_data->num_slices > 1) + { + ITRACE("number of slices per picture is %d.", pic_data->num_slices); + }*/ + return VBP_OK; } /** * parse decoder configuration data */ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) -{ - /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */ - - uint8 configuration_version = 0; - uint8 AVC_profile_indication = 0; - uint8 profile_compatibility = 0; - uint8 AVC_level_indication = 0; - uint8 length_size_minus_one = 0; - uint8 num_of_sequence_parameter_sets = 0; - uint8 num_of_picture_parameter_sets = 0; - uint16 sequence_parameter_set_length = 0; - uint16 picture_parameter_set_length = 0; - - int i = 0; - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - - /* check if configuration data is start code prefix */ - viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; - viddec_parser_ops_t *ops = pcontext->parser_ops; - int ret = ops->parse_sc((void *)&cubby, - NULL, /* context, not used */ - &(cxt->sc_prefix_info)); - if (ret == 1) - { - WTRACE("configuration data is start-code prefixed.\n"); - bitstream_pattern = H264_BS_SC_PREFIXED; - return vbp_parse_start_code_h264(pcontext); - } - - - uint8* cur_data = cxt->parse_cubby.buf; - - - if (cxt->parse_cubby.size < 6) - { - /* need at least 6 bytes to start parsing the structure, see spec 15 */ - return VBP_DATA; - } - - configuration_version = *cur_data++; - AVC_profile_indication = *cur_data++; - - /*ITRACE("Profile indication: %d", AVC_profile_indication); */ - - profile_compatibility = *cur_data++; - AVC_level_indication = *cur_data++; - - /* ITRACE("Level indication: %d", AVC_level_indication);*/ - /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */ - length_size_minus_one = (*cur_data) & 0x3; - - if (length_size_minus_one != 3) - { - WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); - } - - NAL_length_size = length_size_minus_one + 1; - - cur_data++; - - /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */ - num_of_sequence_parameter_sets = (*cur_data) & 0x1f; - if (num_of_sequence_parameter_sets > 1) - { - WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets); - } - if (num_of_sequence_parameter_sets > MAX_NUM_SPS) - { - /* this would never happen as MAX_NUM_SPS = 32 */ - WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS); - } - cur_data++; - - cxt->list.num_items = 0; - for (i = 0; i < num_of_sequence_parameter_sets; i++) - { - if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) - { - /* need at least 2 bytes to parse sequence_parameter_set_length */ - ETRACE("Not enough data to parse SPS length."); - return VBP_DATA; - } - - /* 16 bits */ - sequence_parameter_set_length = vbp_utils_ntohs(cur_data); - - - cur_data += 2; - - if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) - { - /* need at least sequence_parameter_set_length bytes for SPS */ - ETRACE("Not enough data to parse SPS."); - return VBP_DATA; - } - - cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; - - /* end pos is exclusive */ - cxt->list.data[cxt->list.num_items].edpos = - cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length; - - cxt->list.num_items++; - - cur_data += sequence_parameter_set_length; - } - - if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) - { - /* need at least one more byte to parse num_of_picture_parameter_sets */ - ETRACE("Not enough data to parse number of PPS."); - return VBP_DATA; - } - - num_of_picture_parameter_sets = *cur_data++; - if (num_of_picture_parameter_sets > 1) - { - /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ - } - - for (i = 0; i < num_of_picture_parameter_sets; i++) - { - if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) - { - /* need at least 2 bytes to parse picture_parameter_set_length */ - ETRACE("Not enough data to parse PPS length."); - return VBP_DATA; - } - - /* 16 bits */ - picture_parameter_set_length = vbp_utils_ntohs(cur_data); - - cur_data += 2; - - if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) - { - /* need at least picture_parameter_set_length bytes for PPS */ - ETRACE("Not enough data to parse PPS."); - return VBP_DATA; - } - - cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; - - /* end pos is exclusive */ - cxt->list.data[cxt->list.num_items].edpos = - cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length; - - cxt->list.num_items++; - - cur_data += picture_parameter_set_length; - } - - if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size) - { - WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", - cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); - } +{ + /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */ + + uint8 configuration_version = 0; + uint8 AVC_profile_indication = 0; + uint8 profile_compatibility = 0; + uint8 AVC_level_indication = 0; + uint8 length_size_minus_one = 0; + uint8 num_of_sequence_parameter_sets = 0; + uint8 num_of_picture_parameter_sets = 0; + uint16 sequence_parameter_set_length = 0; + uint16 picture_parameter_set_length = 0; + + int i = 0; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + /* check if configuration data is start code prefix */ + viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + WTRACE("configuration data is start-code prefixed.\n"); + bitstream_pattern = H264_BS_SC_PREFIXED; + return vbp_parse_start_code_h264(pcontext); + } + + + uint8* cur_data = cxt->parse_cubby.buf; + + + if (cxt->parse_cubby.size < 6) + { + /* need at least 6 bytes to start parsing the structure, see spec 15 */ + return VBP_DATA; + } + + configuration_version = *cur_data++; + AVC_profile_indication = *cur_data++; + + /*ITRACE("Profile indication: %d", AVC_profile_indication); */ + + profile_compatibility = *cur_data++; + AVC_level_indication = *cur_data++; + + /* ITRACE("Level indication: %d", AVC_level_indication);*/ + /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */ + length_size_minus_one = (*cur_data) & 0x3; + + if (length_size_minus_one != 3) + { + WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); + } + + NAL_length_size = length_size_minus_one + 1; + + cur_data++; + + /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */ + num_of_sequence_parameter_sets = (*cur_data) & 0x1f; + if (num_of_sequence_parameter_sets > 1) + { + WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets); + } + if (num_of_sequence_parameter_sets > MAX_NUM_SPS) + { + /* this would never happen as MAX_NUM_SPS = 32 */ + WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS); + } + cur_data++; + + cxt->list.num_items = 0; + for (i = 0; i < num_of_sequence_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse sequence_parameter_set_length */ + ETRACE("Not enough data to parse SPS length."); + return VBP_DATA; + } + + /* 16 bits */ + sequence_parameter_set_length = vbp_utils_ntohs(cur_data); + + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least sequence_parameter_set_length bytes for SPS */ + ETRACE("Not enough data to parse SPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length; + + cxt->list.num_items++; + + cur_data += sequence_parameter_set_length; + } + + if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) + { + /* need at least one more byte to parse num_of_picture_parameter_sets */ + ETRACE("Not enough data to parse number of PPS."); + return VBP_DATA; + } + + num_of_picture_parameter_sets = *cur_data++; + if (num_of_picture_parameter_sets > 1) + { + /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ + } + + for (i = 0; i < num_of_picture_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse picture_parameter_set_length */ + ETRACE("Not enough data to parse PPS length."); + return VBP_DATA; + } + + /* 16 bits */ + picture_parameter_set_length = vbp_utils_ntohs(cur_data); + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least picture_parameter_set_length bytes for PPS */ + ETRACE("Not enough data to parse PPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length; + + cxt->list.num_items++; + + cur_data += picture_parameter_set_length; + } + + if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size) + { + WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", + cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); + } bitstream_pattern = H264_BS_LENGTH_PREFIXED; - return VBP_OK; + return VBP_OK; } static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p) { - switch (NAL_length_size) - { - case 4: - return vbp_utils_ntohl(p); - - case 3: - { - uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2))); - return i; - } - - case 2: - return vbp_utils_ntohs(p); - - case 1: - return *p; - - default: - WTRACE("invalid NAL_length_size: %d.", NAL_length_size); - /* default to 4 bytes for length */ - NAL_length_size = 4; - return vbp_utils_ntohl(p); - } + switch (NAL_length_size) + { + case 4: + return vbp_utils_ntohl(p); + + case 3: + { + uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2))); + return i; + } + + case 2: + return vbp_utils_ntohs(p); + + case 1: + return *p; + + default: + WTRACE("invalid NAL_length_size: %d.", NAL_length_size); + /* default to 4 bytes for length */ + NAL_length_size = 4; + return vbp_utils_ntohl(p); + } } /** @@ -1519,7 +1519,7 @@ static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p) * of NAL unit. See spec 15 (Sample format) */ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) -{ +{ viddec_pm_cxt_t *cxt = pcontext->parser_cxt; /* reset query data for the new sample buffer */ @@ -1547,8 +1547,8 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) /* reset start position of first item to 0 in case there is only one item */ cxt->list.data[0].stpos = 0; - /* start code emulation prevention byte is present in NAL */ - cxt->getbits.is_emul_reqd = 1; + /* start code emulation prevention byte is present in NAL */ + cxt->getbits.is_emul_reqd = 1; if (bitstream_pattern == H264_BS_LENGTH_PREFIXED) { @@ -1563,13 +1563,13 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) while (size_left >= NAL_length_size) { - NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); size_parsed += NAL_length_size; cxt->list.data[cxt->list.num_items].stpos = size_parsed; size_parsed += NAL_length; /* skip NAL bytes */ /* end position is exclusive */ - cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.data[cxt->list.num_items].edpos = size_parsed; cxt->list.num_items++; if (cxt->list.num_items >= MAX_IBUFS_PER_SC) { @@ -1592,25 +1592,25 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) viddec_parser_ops_t *ops = pcontext->parser_ops; int ret = ops->parse_sc((void *)&temp_cubby, - NULL, /* context, not used */ - &(cxt->sc_prefix_info)); - - /* found start code */ + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + + /* found start code */ if (ret == 1) { WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed."); NAL_length_size = 0; bitstream_pattern = H264_BS_SC_PREFIXED; - /* reset parsing data */ + /* reset parsing data */ for (i = 0; i < MAX_NUM_PICTURES; i++) { - query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].num_slices = 0; } - query_data->num_pictures = 0; - cxt->list.num_items = 0; - } - } - } + query_data->num_pictures = 0; + cxt->list.num_items = 0; + } + } + } if (bitstream_pattern == H264_BS_SC_PREFIXED) @@ -1621,12 +1621,12 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) viddec_parser_ops_t *ops = pcontext->parser_ops; int ret = 0; - while(1) + while (1) { - ret = ops->parse_sc((void *)&cubby, - NULL, /* context, not used */ - &(cxt->sc_prefix_info)); - if(ret == 1) + ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) { cubby.phase = 0; @@ -1637,17 +1637,17 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) else { cxt->list.data[cxt->list.num_items - 1].edpos = - cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; cxt->list.data[cxt->list.num_items].stpos = - cxt->list.data[cxt->list.num_items - 1].edpos; + cxt->list.data[cxt->list.num_items - 1].edpos; } cubby.buf = cxt->parse_cubby.buf + - cxt->list.data[cxt->list.num_items].stpos; + cxt->list.data[cxt->list.num_items].stpos; cubby.size = cxt->parse_cubby.size - - cxt->list.data[cxt->list.num_items].stpos; + cxt->list.data[cxt->list.num_items].stpos; cxt->list.num_items++; if (cxt->list.num_items >= MAX_IBUFS_PER_SC) @@ -1668,7 +1668,7 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; break; } - } + } } @@ -1686,77 +1686,77 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) /** * * process parsing result after a NAL unit is parsed -* +* */ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) -{ - if (i >= MAX_NUM_SLICES) - { - return VBP_PARM; - } - - uint32 error = VBP_OK; - - struct h264_viddec_parser* parser = NULL; - parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); - vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data; - switch (parser->info.nal_unit_type) - { - case h264_NAL_UNIT_TYPE_SLICE: - //ITRACE("slice header is parsed."); - error = vbp_add_pic_data_h264(pcontext, i); - if (VBP_OK == error) - { - error = vbp_add_slice_data_h264(pcontext, i); - } - break; - - case h264_NAL_UNIT_TYPE_IDR: - //ITRACE("IDR header is parsed."); - error = vbp_add_pic_data_h264(pcontext, i); - if (VBP_OK == error) - { - error = vbp_add_slice_data_h264(pcontext, i); - } - break; - - case h264_NAL_UNIT_TYPE_SEI: - //ITRACE("SEI header is parsed."); - break; - - case h264_NAL_UNIT_TYPE_SPS: - if (query_data->has_sps) - query_data->new_sps = 1; - query_data->has_sps = 1; - query_data->has_pps = 0; +{ + if (i >= MAX_NUM_SLICES) + { + return VBP_PARM; + } + + uint32 error = VBP_OK; + + struct h264_viddec_parser* parser = NULL; + parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); + vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data; + switch (parser->info.nal_unit_type) + { + case h264_NAL_UNIT_TYPE_SLICE: + //ITRACE("slice header is parsed."); + error = vbp_add_pic_data_h264(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264(pcontext, i); + } + break; + + case h264_NAL_UNIT_TYPE_IDR: + //ITRACE("IDR header is parsed."); + error = vbp_add_pic_data_h264(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264(pcontext, i); + } + break; + + case h264_NAL_UNIT_TYPE_SEI: + //ITRACE("SEI header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_SPS: + if (query_data->has_sps) + query_data->new_sps = 1; + query_data->has_sps = 1; + query_data->has_pps = 0; ITRACE("SPS header is parsed."); - break; - - case h264_NAL_UNIT_TYPE_PPS: - if (query_data->has_pps || query_data->new_sps) - query_data->new_pps = 1; - - query_data->has_pps = 1; - ITRACE("PPS header is parsed."); - break; - - case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: - //ITRACE("ACC unit delimiter is parsed."); - break; - - case h264_NAL_UNIT_TYPE_EOSeq: - ITRACE("EOSeq is parsed."); - break; - - case h264_NAL_UNIT_TYPE_EOstream: - ITRACE("EOStream is parsed."); - break; - - default: - WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); - break; - } - return error; + break; + + case h264_NAL_UNIT_TYPE_PPS: + if (query_data->has_pps || query_data->new_sps) + query_data->new_pps = 1; + + query_data->has_pps = 1; + ITRACE("PPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + //ITRACE("ACC unit delimiter is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOSeq: + ITRACE("EOSeq is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOstream: + ITRACE("EOStream is parsed."); + break; + + default: + WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); + break; + } + return error; } /* @@ -1766,35 +1766,35 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) */ uint32 vbp_populate_query_data_h264(vbp_context *pcontext) { - vbp_data_h264 *query_data = NULL; - struct h264_viddec_parser *parser = NULL; - - parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; - query_data = (vbp_data_h264 *)pcontext->query_data; - - vbp_set_codec_data_h264(parser, query_data->codec_data); - - /* buffer number */ - query_data->buf_number = buffer_counter; - - /* VQIAMatrixBufferH264 */ - vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf); - - if (query_data->num_pictures > 0) - { - /* - * picture parameter buffer and slice parameter buffer have been populated - */ - } - else - { - /** - * add a dummy picture that contains picture parameters parsed - from SPS and PPS. - */ - vbp_add_pic_data_h264(pcontext, 0); - } - return VBP_OK; + vbp_data_h264 *query_data = NULL; + struct h264_viddec_parser *parser = NULL; + + parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + vbp_set_codec_data_h264(parser, query_data->codec_data); + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* VQIAMatrixBufferH264 */ + vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf); + + if (query_data->num_pictures > 0) + { + /* + * picture parameter buffer and slice parameter buffer have been populated + */ + } + else + { + /** + * add a dummy picture that contains picture parameters parsed + from SPS and PPS. + */ + vbp_add_pic_data_h264(pcontext, 0); + } + return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h index 3f86e59..673b3bd 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h @@ -9,13 +9,13 @@ #ifndef VBP_H264_PARSER_H #define VBP_H264_PARSER_H -/* +/* * setup parser's entry points */ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext); /* - * allocate query data + * allocate query data */ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext); @@ -31,7 +31,7 @@ uint32 vbp_parse_init_data_h264(vbp_context *pcontext); /* * parse start code. Only support lenght prefixed mode. Start - * code prefixed is not supported. + * code prefixed is not supported. */ uint32 vbp_parse_start_code_h264(vbp_context *pcontext); diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c index 27a2dd0..2dc9a48 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c @@ -6,7 +6,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include +//#include #include "vbp_loader.h" #include "vbp_utils.h" @@ -16,30 +16,30 @@ */ uint32 vbp_open(uint32 parser_type, Handle *hcontext) { - vbp_context **ppcontext; - uint32 error; - - if (NULL == hcontext) - { - return VBP_PARM; - } - - *hcontext = NULL; /* prepare for failure. */ - - ppcontext = (vbp_context **)hcontext; - - /** - * TO DO: - * check if vbp context has been created. - */ - - error = vbp_utils_create_context(parser_type, ppcontext); - if (VBP_OK != error) - { - ETRACE("Failed to create context: %d.", error); - } - - return error; + vbp_context **ppcontext; + uint32 error; + + if (NULL == hcontext) + { + return VBP_PARM; + } + + *hcontext = NULL; /* prepare for failure. */ + + ppcontext = (vbp_context **)hcontext; + + /** + * TO DO: + * check if vbp context has been created. + */ + + error = vbp_utils_create_context(parser_type, ppcontext); + if (VBP_OK != error) + { + ETRACE("Failed to create context: %d.", error); + } + + return error; } /** @@ -47,28 +47,28 @@ uint32 vbp_open(uint32 parser_type, Handle *hcontext) */ uint32 vbp_close(Handle hcontext) { - uint32 error; - - if (NULL == hcontext) - { - return VBP_PARM; - } - - vbp_context *pcontext = (vbp_context *)hcontext; - - if (MAGIC_NUMBER != pcontext->identifier) - { - /* not a valid vbp context. */ - ETRACE("context is not initialized"); - return VBP_INIT; - } - error = vbp_utils_destroy_context(pcontext); - if (VBP_OK != error) - { - ETRACE("Failed to destroy context: %d.", error); - } - - return error; + uint32 error; + + if (NULL == hcontext) + { + return VBP_PARM; + } + + vbp_context *pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + /* not a valid vbp context. */ + ETRACE("context is not initialized"); + return VBP_INIT; + } + error = vbp_utils_destroy_context(pcontext); + if (VBP_OK != error) + { + ETRACE("Failed to destroy context: %d.", error); + } + + return error; } @@ -77,30 +77,30 @@ uint32 vbp_close(Handle hcontext) */ uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag) { - vbp_context *pcontext; - uint32 error = VBP_OK; - - if ((NULL == hcontext) || (NULL == data) || (0 == size)) - { - ETRACE("Invalid input parameters."); - return VBP_PARM; - } - - pcontext = (vbp_context *)hcontext; - - if (MAGIC_NUMBER != pcontext->identifier) - { - ETRACE("context is not initialized"); - return VBP_INIT; - } - - error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag); - - if (VBP_OK != error) - { - ETRACE("Failed to parse buffer: %d.", error); - } - return error; + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == data) || (0 == size)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag); + + if (VBP_OK != error) + { + ETRACE("Failed to parse buffer: %d.", error); + } + return error; } /** @@ -108,30 +108,30 @@ uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag */ uint32 vbp_query(Handle hcontext, void **data) { - vbp_context *pcontext; - uint32 error = VBP_OK; - - if ((NULL == hcontext) || (NULL == data)) - { - ETRACE("Invalid input parameters."); - return VBP_PARM; - } - - pcontext = (vbp_context *)hcontext; - - if (MAGIC_NUMBER != pcontext->identifier) - { - ETRACE("context is not initialized"); - return VBP_INIT; - } - - error = vbp_utils_query(pcontext, data); - - if (VBP_OK != error) - { - ETRACE("Failed to query parsing result: %d.", error); - } - return error; + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == data)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_query(pcontext, data); + + if (VBP_OK != error) + { + ETRACE("Failed to query parsing result: %d.", error); + } + return error; } /** @@ -139,24 +139,24 @@ uint32 vbp_query(Handle hcontext, void **data) */ uint32 vbp_flush(Handle hcontext) { - vbp_context *pcontext; - uint32 error = VBP_OK; + vbp_context *pcontext; + uint32 error = VBP_OK; - if (NULL == hcontext) - { - ETRACE("Invalid input parameters."); - return VBP_PARM; - } + if (NULL == hcontext) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } - pcontext = (vbp_context *)hcontext; + pcontext = (vbp_context *)hcontext; - if (MAGIC_NUMBER != pcontext->identifier) - { - ETRACE("context is not initialized"); - return VBP_INIT; - } + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } - error = vbp_utils_flush(pcontext); + error = vbp_utils_flush(pcontext); - return error; + return error; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 93a98a2..0ef4fbf 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -39,7 +39,7 @@ typedef void *Handle; * MPEG-4 Part 2 data structure */ -typedef struct _vbp_codec_data_mp42 +typedef struct _vbp_codec_data_mp42 { uint8 profile_and_level_indication; uint32 video_object_layer_width; @@ -50,50 +50,50 @@ typedef struct _vbp_codec_data_mp42 // 0 short range, 1 full range uint8 video_range; - + // default 2 (unspecified), 1 for BT709. uint8 matrix_coefficients; - + uint8 short_video_header; - // always exist for mpeg-4, + // always exist for mpeg-4, uint8 aspect_ratio_info; uint8 par_width; uint8 par_height; } vbp_codec_data_mp42; -typedef struct _vbp_slice_data_mp42 -{ - uint8* buffer_addr; - uint32 slice_offset; - uint32 slice_size; - VASliceParameterBufferMPEG4 slice_param; +typedef struct _vbp_slice_data_mp42 +{ + uint8* buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferMPEG4 slice_param; } vbp_slice_data_mp42; typedef struct _vbp_picture_data_mp42 vbp_picture_data_mp42; struct _vbp_picture_data_mp42 { - uint8 vop_coded; - uint16 vop_time_increment; - /* indicates if current buffer contains parameter for the first slice of the picture */ - uint8 new_picture_flag; - VAPictureParameterBufferMPEG4 picture_param; - vbp_slice_data_mp42 slice_data; - - vbp_picture_data_mp42* next_picture_data; + uint8 vop_coded; + uint16 vop_time_increment; + /* indicates if current buffer contains parameter for the first slice of the picture */ + uint8 new_picture_flag; + VAPictureParameterBufferMPEG4 picture_param; + vbp_slice_data_mp42 slice_data; + + vbp_picture_data_mp42* next_picture_data; }; -typedef struct _vbp_data_mp42 +typedef struct _vbp_data_mp42 { - vbp_codec_data_mp42 codec_data; - VAIQMatrixBufferMPEG4 iq_matrix_buffer; + vbp_codec_data_mp42 codec_data; + VAIQMatrixBufferMPEG4 iq_matrix_buffer; - uint32 number_picture_data; - uint32 number_pictures; + uint32 number_picture_data; + uint32 number_pictures; - vbp_picture_data_mp42 *picture_data; + vbp_picture_data_mp42 *picture_data; } vbp_data_mp42; @@ -103,33 +103,33 @@ typedef struct _vbp_data_mp42 typedef struct _vbp_codec_data_h264 { - uint8 pic_parameter_set_id; - uint8 seq_parameter_set_id; - - uint8 profile_idc; - uint8 level_idc; - uint8 constraint_set1_flag; - - uint8 num_ref_frames; - uint8 gaps_in_frame_num_value_allowed_flag; - - uint8 frame_mbs_only_flag; - uint8 mb_adaptive_frame_field_flag; - - int frame_width; - int frame_height; - - uint8 vui_parameters_present_flag; - - /* aspect ratio */ - uint8 aspect_ratio_idc; - uint16 sar_width; - uint16 sar_height; - - /* video fromat */ - - // default 5 unspecified - uint8 video_format; + uint8 pic_parameter_set_id; + uint8 seq_parameter_set_id; + + uint8 profile_idc; + uint8 level_idc; + uint8 constraint_set1_flag; + + uint8 num_ref_frames; + uint8 gaps_in_frame_num_value_allowed_flag; + + uint8 frame_mbs_only_flag; + uint8 mb_adaptive_frame_field_flag; + + int frame_width; + int frame_height; + + uint8 vui_parameters_present_flag; + + /* aspect ratio */ + uint8 aspect_ratio_idc; + uint16 sar_width; + uint16 sar_height; + + /* video fromat */ + + // default 5 unspecified + uint8 video_format; uint8 video_full_range_flag; // default 2 unspecified @@ -139,32 +139,32 @@ typedef struct _vbp_codec_data_h264 int log2_max_pic_order_cnt_lsb_minus4; int bit_rate; - + } vbp_codec_data_h264; typedef struct _vbp_slice_data_h264 { - uint8* buffer_addr; + uint8* buffer_addr; - uint32 slice_offset; /* slice data offset */ + uint32 slice_offset; /* slice data offset */ - uint32 slice_size; /* slice data size */ + uint32 slice_size; /* slice data size */ - VASliceParameterBufferH264 slc_parms; + VASliceParameterBufferH264 slc_parms; } vbp_slice_data_h264; - - - typedef struct _vbp_picture_data_h264 - { - VAPictureParameterBufferH264* pic_parms; - uint32 num_slices; - vbp_slice_data_h264* slc_data; - - } vbp_picture_data_h264; - +typedef struct _vbp_picture_data_h264 +{ + VAPictureParameterBufferH264* pic_parms; + + uint32 num_slices; + + vbp_slice_data_h264* slc_data; + +} vbp_picture_data_h264; + typedef struct _vbp_data_h264 { @@ -173,7 +173,7 @@ typedef struct _vbp_data_h264 uint32 num_pictures; - /* if SPS has been received */ + /* if SPS has been received */ uint8 has_sps; /* if PPS has been received */ @@ -182,10 +182,10 @@ typedef struct _vbp_data_h264 uint8 new_sps; uint8 new_pps; - + vbp_picture_data_h264* pic_data; - /** + /** * do we need to send matrix to VA for each picture? If not, we need * a flag indicating whether it is updated. */ @@ -193,127 +193,127 @@ typedef struct _vbp_data_h264 vbp_codec_data_h264* codec_data; -} vbp_data_h264; +} vbp_data_h264; /* * vc1 data structure */ -typedef struct _vbp_codec_data_vc1 +typedef struct _vbp_codec_data_vc1 { - /* Sequence layer. */ - uint8 PROFILE; - uint8 LEVEL; - uint8 POSTPROCFLAG; - uint8 PULLDOWN; - uint8 INTERLACE; - uint8 TFCNTRFLAG; - uint8 FINTERPFLAG; - uint8 PSF; + /* Sequence layer. */ + uint8 PROFILE; + uint8 LEVEL; + uint8 POSTPROCFLAG; + uint8 PULLDOWN; + uint8 INTERLACE; + uint8 TFCNTRFLAG; + uint8 FINTERPFLAG; + uint8 PSF; // default 2: unspecified uint8 MATRIX_COEF; - - /* Entry point layer. */ - uint8 BROKEN_LINK; - uint8 CLOSED_ENTRY; - uint8 PANSCAN_FLAG; - uint8 REFDIST_FLAG; - uint8 LOOPFILTER; - uint8 FASTUVMC; - uint8 EXTENDED_MV; - uint8 DQUANT; - uint8 VSTRANSFORM; - uint8 OVERLAP; - uint8 QUANTIZER; - uint16 CODED_WIDTH; - uint16 CODED_HEIGHT; - uint8 EXTENDED_DMV; - uint8 RANGE_MAPY_FLAG; - uint8 RANGE_MAPY; - uint8 RANGE_MAPUV_FLAG; - uint8 RANGE_MAPUV; - - /* Others. */ - uint8 RANGERED; - uint8 MAXBFRAMES; - uint8 MULTIRES; - uint8 SYNCMARKER; - uint8 RNDCTRL; - uint8 REFDIST; - uint16 widthMB; - uint16 heightMB; - - uint8 INTCOMPFIELD; - uint8 LUMSCALE2; - uint8 LUMSHIFT2; - - // aspect ratio - - // default unspecified - uint8 ASPECT_RATIO; - - uint8 ASPECT_HORIZ_SIZE; - uint8 ASPECT_VERT_SIZE; - + + /* Entry point layer. */ + uint8 BROKEN_LINK; + uint8 CLOSED_ENTRY; + uint8 PANSCAN_FLAG; + uint8 REFDIST_FLAG; + uint8 LOOPFILTER; + uint8 FASTUVMC; + uint8 EXTENDED_MV; + uint8 DQUANT; + uint8 VSTRANSFORM; + uint8 OVERLAP; + uint8 QUANTIZER; + uint16 CODED_WIDTH; + uint16 CODED_HEIGHT; + uint8 EXTENDED_DMV; + uint8 RANGE_MAPY_FLAG; + uint8 RANGE_MAPY; + uint8 RANGE_MAPUV_FLAG; + uint8 RANGE_MAPUV; + + /* Others. */ + uint8 RANGERED; + uint8 MAXBFRAMES; + uint8 MULTIRES; + uint8 SYNCMARKER; + uint8 RNDCTRL; + uint8 REFDIST; + uint16 widthMB; + uint16 heightMB; + + uint8 INTCOMPFIELD; + uint8 LUMSCALE2; + uint8 LUMSHIFT2; + + // aspect ratio + + // default unspecified + uint8 ASPECT_RATIO; + + uint8 ASPECT_HORIZ_SIZE; + uint8 ASPECT_VERT_SIZE; + } vbp_codec_data_vc1; -typedef struct _vbp_slice_data_vc1 +typedef struct _vbp_slice_data_vc1 { - uint8 *buffer_addr; - uint32 slice_offset; - uint32 slice_size; - VASliceParameterBufferVC1 slc_parms; /* pointer to slice parms */ + uint8 *buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferVC1 slc_parms; /* pointer to slice parms */ } vbp_slice_data_vc1; typedef struct _vbp_picture_data_vc1 { - uint32 picture_is_skipped; /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */ - VAPictureParameterBufferVC1 *pic_parms; /* current parsed picture header */ - uint32 size_bitplanes; /* based on number of MBs */ - uint8 *packed_bitplanes; /* contains up to three bitplanes packed for libVA */ - uint32 num_slices; /* number of slices. always at least one */ - vbp_slice_data_vc1 *slc_data; /* pointer to array of slice data */ + uint32 picture_is_skipped; /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */ + VAPictureParameterBufferVC1 *pic_parms; /* current parsed picture header */ + uint32 size_bitplanes; /* based on number of MBs */ + uint8 *packed_bitplanes; /* contains up to three bitplanes packed for libVA */ + uint32 num_slices; /* number of slices. always at least one */ + vbp_slice_data_vc1 *slc_data; /* pointer to array of slice data */ } vbp_picture_data_vc1; - -typedef struct _vbp_data_vc1 + +typedef struct _vbp_data_vc1 { - uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */ - vbp_codec_data_vc1 *se_data; /* parsed SH/EPs */ - - uint32 num_pictures; - - vbp_picture_data_vc1* pic_data; + uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */ + vbp_codec_data_vc1 *se_data; /* parsed SH/EPs */ + + uint32 num_pictures; + + vbp_picture_data_vc1* pic_data; } vbp_data_vc1; enum _picture_type { - VC1_PTYPE_I, - VC1_PTYPE_P, - VC1_PTYPE_B, - VC1_PTYPE_BI, - VC1_PTYPE_SKIPPED + VC1_PTYPE_I, + VC1_PTYPE_P, + VC1_PTYPE_B, + VC1_PTYPE_BI, + VC1_PTYPE_SKIPPED }; enum _vbp_parser_error { - VBP_OK, - VBP_TYPE, - VBP_LOAD, - VBP_INIT, - VBP_DATA, - VBP_DONE, - VBP_MEM, - VBP_PARM, - VBP_PARTIAL + VBP_OK, + VBP_TYPE, + VBP_LOAD, + VBP_INIT, + VBP_DATA, + VBP_DONE, + VBP_MEM, + VBP_PARM, + VBP_PARTIAL }; enum _vbp_parser_type { - VBP_VC1, - VBP_MPEG2, - VBP_MPEG4, - VBP_H264 + VBP_VC1, + VBP_MPEG2, + VBP_MPEG4, + VBP_H264 }; /* @@ -321,7 +321,7 @@ enum _vbp_parser_type * @param parser_type: one of the types defined in #vbp_parser_type * @param hcontext: pointer to hold returned VBP context handle. * @return VBP_OK on success, anything else on failure. - * + * */ uint32 vbp_open(uint32 parser_type, Handle *hcontext); @@ -329,7 +329,7 @@ uint32 vbp_open(uint32 parser_type, Handle *hcontext); * close video bitstream parser. * @param hcontext: VBP context handle. * @returns VBP_OK on success, anything else on failure. - * + * */ uint32 vbp_close(Handle hcontext); @@ -340,17 +340,17 @@ uint32 vbp_close(Handle hcontext); * @param size: size of bitstream buffer. * @param init_flag: 1 if buffer contains bitstream configuration data, 0 otherwise. * @return VBP_OK on success, anything else on failure. - * + * */ uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag); /* * query parsing result. * @param hcontext: handle to VBP context. - * @param data: pointer to hold a data blob that contains parsing result. + * @param data: pointer to hold a data blob that contains parsing result. * Structure of data blob is determined by the media type. * @return VBP_OK on success, anything else on failure. - * + * */ uint32 vbp_query(Handle hcontext, void **data); @@ -359,7 +359,7 @@ uint32 vbp_query(Handle hcontext, void **data); * flush any un-parsed bitstream. * @param hcontext: handle to VBP context. * @returns VBP_OK on success, anything else on failure. - * + * */ uint32 vbp_flush(Handle hcontent); diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 9765f28..a4783ee 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -7,7 +7,7 @@ */ -#include +//#include #include #include @@ -20,7 +20,7 @@ static bool short_video_header = TRUE; -static uint8 mp4_aspect_ratio_table[][2] = +static uint8 mp4_aspect_ratio_table[][2] = { // forbidden {0, 0}, @@ -29,7 +29,7 @@ static uint8 mp4_aspect_ratio_table[][2] = {10, 11}, {16, 11}, {40, 33}, - + // reserved {0, 0} }; @@ -41,10 +41,10 @@ static uint8 mp4_aspect_ratio_table[][2] = uint32 vbp_get_sc_pos_mp42( - uint8 *buf, + uint8 *buf, uint32 length, - uint32 *sc_end_pos, - uint8 *is_normal_sc, + uint32 *sc_end_pos, + uint8 *is_normal_sc, uint8* resync_marker); void vbp_on_vop_mp42(vbp_context *pcontext, int list_index); @@ -56,7 +56,7 @@ uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index); uint32 vbp_process_video_packet_mp42(vbp_context *pcontext); static inline uint32 vbp_sprite_trajectory_mp42( - void *parent, + void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_VideoObjectPlane_t *vidObjPlane); @@ -64,54 +64,54 @@ static inline uint32 vbp_sprite_trajectory_mp42( static inline uint32 vbp_sprite_dmv_length_mp42( void * parent, int32_t *dmv_length); - + /** * */ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) { - if (NULL == pcontext->parser_ops) - { - // absolutely impossible, just sanity check - return VBP_PARM; - } - pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init"); - if (pcontext->parser_ops->init == NULL) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4"); - if (pcontext->parser_ops->parse_sc == NULL) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse"); - if (pcontext->parser_ops->parse_syntax == NULL) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->get_cxt_size =dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size"); - if (pcontext->parser_ops->get_cxt_size == NULL) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done"); - if (pcontext->parser_ops->is_wkld_done == NULL) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - return VBP_OK; + if (NULL == pcontext->parser_ops) + { + // absolutely impossible, just sanity check + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init"); + if (pcontext->parser_ops->init == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4"); + if (pcontext->parser_ops->parse_sc == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse"); + if (pcontext->parser_ops->parse_syntax == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size =dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size"); + if (pcontext->parser_ops->get_cxt_size == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done"); + if (pcontext->parser_ops->is_wkld_done == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + return VBP_OK; } @@ -121,82 +121,82 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) uint32 vbp_parse_init_data_mp42(vbp_context *pcontext) { uint32 ret = VBP_OK; - ret = vbp_parse_start_code_mp42(pcontext); - return ret; + ret = vbp_parse_start_code_mp42(pcontext); + return ret; } -uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) +uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); - - uint8 is_svh = 0; - uint32 current_sc = parser->current_sc; - is_svh = parser->cur_sc_prefix ? false : true; - - if (!is_svh) - { - // remove prefix from current_sc - current_sc &= 0x0FF; - switch (current_sc) - { - case MP4_SC_VISUAL_OBJECT_SEQUENCE: - VTRACE ("Visual Object Sequence is parsed.\n"); - query_data->codec_data.profile_and_level_indication - = parser->info.profile_and_level_indication; + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + + uint8 is_svh = 0; + uint32 current_sc = parser->current_sc; + is_svh = parser->cur_sc_prefix ? false : true; + + if (!is_svh) + { + // remove prefix from current_sc + current_sc &= 0x0FF; + switch (current_sc) + { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + VTRACE ("Visual Object Sequence is parsed.\n"); + query_data->codec_data.profile_and_level_indication + = parser->info.profile_and_level_indication; VTRACE ("profile_and_level_indication = 0x%x\n", parser->info.profile_and_level_indication); - break; - - case MP4_SC_VIDEO_OBJECT_PLANE: - VTRACE ("Video Object Plane is parsed.\n"); - vbp_on_vop_mp42(pcontext, list_index); - break; - - default: - if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && - (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) + break; + + case MP4_SC_VIDEO_OBJECT_PLANE: + VTRACE ("Video Object Plane is parsed.\n"); + vbp_on_vop_mp42(pcontext, list_index); + break; + + default: + if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && + (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) { VTRACE ("Video Object Layer is parsed\n"); short_video_header = FALSE; - vbp_fill_codec_data(pcontext); - } - else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX && - current_sc >= MP4_SC_VIDEO_OBJECT_MIN) - { - if (parser->sc_seen == MP4_SC_SEEN_SVH) - { - // this should never happen!!!! + vbp_fill_codec_data(pcontext); + } + else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX && + current_sc >= MP4_SC_VIDEO_OBJECT_MIN) + { + if (parser->sc_seen == MP4_SC_SEEN_SVH) + { + // this should never happen!!!! WTRACE ("Short video header is parsed.\n"); - vbp_on_vop_svh_mp42(pcontext, list_index); - } - } - break; - } - } - else - { - if (parser->sc_seen == MP4_SC_SEEN_SVH) - { - VTRACE ("Short video header is parsed.\n"); - vbp_on_vop_svh_mp42(pcontext, list_index); - } - } - - return VBP_OK; + vbp_on_vop_svh_mp42(pcontext, list_index); + } + } + break; + } + } + else + { + if (parser->sc_seen == MP4_SC_SEEN_SVH) + { + VTRACE ("Short video header is parsed.\n"); + vbp_on_vop_svh_mp42(pcontext, list_index); + } + } + + return VBP_OK; } -/* +/* * partial frame handling: -* -* h.263: picture header is lost if the first GOB is discarded, a redudant pic header must be -* conveyed in the packet (RFC 4629) for each following GOB, otherwise, +* +* h.263: picture header is lost if the first GOB is discarded, a redudant pic header must be +* conveyed in the packet (RFC 4629) for each following GOB, otherwise, * picture can't be decoded. -* +* * MPEG4: VideoObjectPlane header is lost if the first slice is discarded. However, picture -* is still decodable as long as the header_extension_code is 1 in video_packet_header. +* is still decodable as long as the header_extension_code is 1 in video_packet_header. * *MPEG-4 with short header: video_plane_with_short_header is lost if the first GOB * is discarded. As this header is not duplicated (RFC 3016), picture is not decodable. @@ -205,10 +205,10 @@ uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) * If buffer contains the 32-bit start code (0x000001xx), proceed as normal. * * If buffer contains 22-bits of "0000 0000 0000 0000 1000 00", which indicates h.263 -* picture start code or short_video_start_marker, proceed as normal. +* picture start code or short_video_start_marker, proceed as normal. * -* If buffer contains 22-bits of "0000 0000 0000 0000 1XXX XX", (when XXX XX starts from 000 01), which -* indicates h.263 Group Start code or gob_resync_marker of gob_layer in MPEG-4 with +* If buffer contains 22-bits of "0000 0000 0000 0000 1XXX XX", (when XXX XX starts from 000 01), which +* indicates h.263 Group Start code or gob_resync_marker of gob_layer in MPEG-4 with * short header, we should report packet as a partial frame - no more parsing is needed. * * If buffer contains a string of 0 between 16 bits and 22 bits, followed by 1-bit of '1', which indicates a resync-marker, @@ -216,113 +216,113 @@ uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) */ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - uint8 *buf = NULL; - uint32 size = 0; - uint32 sc_end_pos = -1; - uint32 bytes_parsed = 0; - viddec_mp4_parser_t *pinfo = NULL; - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - - - // reset query data for the new sample buffer - query_data->number_picture_data= 0; - query_data->number_pictures = 0; - - // emulation prevention byte is always present - cxt->getbits.is_emul_reqd = 1; - - cxt->list.num_items = 0; - cxt->list.data[0].stpos = 0; - cxt->list.data[0].edpos = cxt->parse_cubby.size; - - buf = cxt->parse_cubby.buf; - size = cxt->parse_cubby.size; - - pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]); - - uint8 is_normal_sc = 0; - uint8 resync_marker = 0; - uint32 found_sc = 0; - uint32 ret = VBP_OK; - - while (1) - { - found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, - &sc_end_pos, &is_normal_sc, &resync_marker); - - VTRACE("buf=%x, bytes_parsed=%d, unparsed=%d", (uint32)buf, bytes_parsed, size- bytes_parsed); - VTRACE("found_sc=%d, cxt->list.num_items=%d, resync_marker=%d, ", - found_sc, cxt->list.num_items, resync_marker); - - if (found_sc) - { - cxt->list.data[cxt->list.num_items].stpos = bytes_parsed - + sc_end_pos - 3; - if (cxt->list.num_items != 0) - { - cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed - + sc_end_pos - 3; - } - bytes_parsed += sc_end_pos; - - cxt->list.num_items++; - pinfo->cur_sc_prefix = is_normal_sc; - } - else - { - if (cxt->list.num_items != 0) - { - cxt->list.data[cxt->list.num_items - 1].edpos - = cxt->parse_cubby.size; - break; - } - else - { - WTRACE ("No start-code is found in cubby buffer! The size of cubby is %d\n", size); - cxt->list.num_items = 1; - cxt->list.data[0].stpos = 0; - cxt->list.data[0].edpos = cxt->parse_cubby.size; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint8 *buf = NULL; + uint32 size = 0; + uint32 sc_end_pos = -1; + uint32 bytes_parsed = 0; + viddec_mp4_parser_t *pinfo = NULL; + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + + // reset query data for the new sample buffer + query_data->number_picture_data= 0; + query_data->number_pictures = 0; + + // emulation prevention byte is always present + cxt->getbits.is_emul_reqd = 1; + + cxt->list.num_items = 0; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + + buf = cxt->parse_cubby.buf; + size = cxt->parse_cubby.size; + + pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]); + + uint8 is_normal_sc = 0; + uint8 resync_marker = 0; + uint32 found_sc = 0; + uint32 ret = VBP_OK; + + while (1) + { + found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, + &sc_end_pos, &is_normal_sc, &resync_marker); + + VTRACE("buf=%x, bytes_parsed=%d, unparsed=%d", (uint32)buf, bytes_parsed, size- bytes_parsed); + VTRACE("found_sc=%d, cxt->list.num_items=%d, resync_marker=%d, ", + found_sc, cxt->list.num_items, resync_marker); + + if (found_sc) + { + cxt->list.data[cxt->list.num_items].stpos = bytes_parsed + + sc_end_pos - 3; + if (cxt->list.num_items != 0) + { + cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed + + sc_end_pos - 3; + } + bytes_parsed += sc_end_pos; + + cxt->list.num_items++; + pinfo->cur_sc_prefix = is_normal_sc; + } + else + { + if (cxt->list.num_items != 0) + { + cxt->list.data[cxt->list.num_items - 1].edpos + = cxt->parse_cubby.size; + break; + } + else + { + WTRACE ("No start-code is found in cubby buffer! The size of cubby is %d\n", size); + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; if (resync_marker) - { + { // either the first slice (GOB) is lost or parser receives a single slice (GOB) - if (short_video_header) - { - // TODO: revisit if HW supportd GOB layer decoding for h.263 - WTRACE("Partial frame: GOB buffer.\n"); - ret = VBP_PARTIAL; - } - else - { - WTRACE("Partial frame: video packet header buffer.\n"); - ret = vbp_process_video_packet_mp42(pcontext); - } - - // set num_items to 0 so buffer will not be parsed again - cxt->list.num_items = 0; - } - else - { - ETRACE("Invalid data received.\n"); + if (short_video_header) + { + // TODO: revisit if HW supportd GOB layer decoding for h.263 + WTRACE("Partial frame: GOB buffer.\n"); + ret = VBP_PARTIAL; + } + else + { + WTRACE("Partial frame: video packet header buffer.\n"); + ret = vbp_process_video_packet_mp42(pcontext); + } + + // set num_items to 0 so buffer will not be parsed again + cxt->list.num_items = 0; + } + else + { + ETRACE("Invalid data received.\n"); cxt->list.num_items = 0; - return VBP_DATA; - } - - break; - } - } - } - - return ret; + return VBP_DATA; + } + + break; + } + } + } + + return ret; } -uint32 vbp_populate_query_data_mp42(vbp_context *pcontext) +uint32 vbp_populate_query_data_mp42(vbp_context *pcontext) { #if 0 - vbp_dump_query_data(pcontext); + vbp_dump_query_data(pcontext); #endif - return VBP_OK; + return VBP_OK; } vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data) @@ -333,30 +333,30 @@ vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data) { picture_data = picture_data->next_picture_data; num_pictures--; - } + } return picture_data; } -void vbp_fill_codec_data(vbp_context *pcontext) +void vbp_fill_codec_data(vbp_context *pcontext) { - viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - vbp_codec_data_mp42* codec_data = &(query_data->codec_data); - - codec_data->profile_and_level_indication - = parser->info.profile_and_level_indication; + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + vbp_codec_data_mp42* codec_data = &(query_data->codec_data); - codec_data->video_object_layer_width = - parser->info.VisualObject.VideoObject.video_object_layer_width; + codec_data->profile_and_level_indication + = parser->info.profile_and_level_indication; - codec_data->video_object_layer_height = - parser->info.VisualObject.VideoObject.video_object_layer_height; + codec_data->video_object_layer_width = + parser->info.VisualObject.VideoObject.video_object_layer_width; + + codec_data->video_object_layer_height = + parser->info.VisualObject.VideoObject.video_object_layer_height; if (parser->info.VisualObject.VideoSignalType.is_video_signal_type) { - codec_data->video_format = + codec_data->video_format = parser->info.VisualObject.VideoSignalType.video_format; } else @@ -364,19 +364,19 @@ void vbp_fill_codec_data(vbp_context *pcontext) // Unspecified video format codec_data->video_format = 5; } - - codec_data->video_range = - parser->info.VisualObject.VideoSignalType.video_range; + + codec_data->video_range = + parser->info.VisualObject.VideoSignalType.video_range; if (parser->info.VisualObject.VideoSignalType.is_colour_description) { - codec_data->matrix_coefficients = + codec_data->matrix_coefficients = parser->info.VisualObject.VideoSignalType.matrix_coefficients; } else if (short_video_header) { // SMPTE 170M - codec_data->matrix_coefficients = 6; + codec_data->matrix_coefficients = 6; } else { @@ -384,7 +384,7 @@ void vbp_fill_codec_data(vbp_context *pcontext) codec_data->matrix_coefficients = 1; } - codec_data->short_video_header = short_video_header; + codec_data->short_video_header = short_video_header; // aspect ratio codec_data->aspect_ratio_info = parser->info.VisualObject.VideoObject.aspect_ratio_info; @@ -405,35 +405,35 @@ void vbp_fill_codec_data(vbp_context *pcontext) } } -void vbp_fill_slice_data(vbp_context *pcontext, int list_index) +void vbp_fill_slice_data(vbp_context *pcontext, int list_index) { - viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); - - if (!parser->info.VisualObject.VideoObject.short_video_header) - { - vbp_process_slices_mp42(pcontext, list_index); - } - else - { - vbp_process_slices_svh_mp42(pcontext, list_index); - } + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + + if (!parser->info.VisualObject.VideoObject.short_video_header) + { + vbp_process_slices_mp42(pcontext, list_index); + } + else + { + vbp_process_slices_svh_mp42(pcontext, list_index); + } } -void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) +void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) { - viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - vbp_picture_data_mp42 *picture_data = NULL; - VAPictureParameterBufferMPEG4 *picture_param = NULL; + vbp_picture_data_mp42 *picture_data = NULL; + VAPictureParameterBufferMPEG4 *picture_param = NULL; if (new_picture_flag) { query_data->number_pictures++; } - + picture_data = query_data->picture_data; if (picture_data == NULL || query_data->number_picture_data == 0) { @@ -442,18 +442,18 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) { picture_data = (vbp_picture_data_mp42*)g_try_new0(vbp_picture_data_mp42, 1); query_data->picture_data = picture_data; - } + } query_data->number_picture_data = 1; } else - { - // find the last active one + { + // find the last active one int i = query_data->number_picture_data; while (i > 1) - { + { picture_data = picture_data->next_picture_data; i--; - } + } if (picture_data->next_picture_data == NULL) { picture_data->next_picture_data = g_try_new0(vbp_picture_data_mp42, 1); @@ -462,737 +462,737 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) query_data->number_picture_data++; picture_data = picture_data->next_picture_data; - } - - picture_param = &(picture_data->picture_param); + } + + picture_param = &(picture_data->picture_param); - uint8 idx = 0; + uint8 idx = 0; picture_data->new_picture_flag = new_picture_flag; - - picture_data->vop_coded - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded; - VTRACE ("vop_coded = %d\n", picture_data->vop_coded); + picture_data->vop_coded + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded; + VTRACE ("vop_coded = %d\n", picture_data->vop_coded); - picture_data->vop_time_increment = - parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment; + + picture_data->vop_time_increment = + parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment; // fill picture_param - /* - * NOTE: for short video header, the parser saves vop_width and vop_height - * to VOL->video_object_layer_width and VOL->video_object_layer_height - */ - picture_param->vop_width - = parser->info.VisualObject.VideoObject.video_object_layer_width; - picture_param->vop_height - = parser->info.VisualObject.VideoObject.video_object_layer_height; - - picture_param->forward_reference_picture = VA_INVALID_SURFACE; - picture_param->backward_reference_picture = VA_INVALID_SURFACE; - - // Fill VAPictureParameterBufferMPEG4::vol_fields - - picture_param->vol_fields.bits.short_video_header - = parser->info.VisualObject.VideoObject.short_video_header; - picture_param->vol_fields.bits.chroma_format - = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format; - - /* TODO: find out why testsuite always set this value to be 0 */ - picture_param->vol_fields.bits.chroma_format = 0; - - picture_param->vol_fields.bits.interlaced - = parser->info.VisualObject.VideoObject.interlaced; - picture_param->vol_fields.bits.obmc_disable - = parser->info.VisualObject.VideoObject.obmc_disable; - picture_param->vol_fields.bits.sprite_enable - = parser->info.VisualObject.VideoObject.sprite_enable; - picture_param->vol_fields.bits.sprite_warping_accuracy - = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy; - picture_param->vol_fields.bits.quant_type - = parser->info.VisualObject.VideoObject.quant_type; - picture_param->vol_fields.bits.quarter_sample - = parser->info.VisualObject.VideoObject.quarter_sample; - picture_param->vol_fields.bits.data_partitioned - = parser->info.VisualObject.VideoObject.data_partitioned; - picture_param->vol_fields.bits.reversible_vlc - = parser->info.VisualObject.VideoObject.reversible_vlc; - picture_param->vol_fields.bits.resync_marker_disable - = parser->info.VisualObject.VideoObject.resync_marker_disable; - - picture_param->no_of_sprite_warping_points - = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points; - - for (idx = 0; idx < 3; idx++) - { - picture_param->sprite_trajectory_du[idx] - = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx]; - picture_param->sprite_trajectory_dv[idx] - = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx]; - } - - picture_param->quant_precision - = parser->info.VisualObject.VideoObject.quant_precision; + /* + * NOTE: for short video header, the parser saves vop_width and vop_height + * to VOL->video_object_layer_width and VOL->video_object_layer_height + */ + picture_param->vop_width + = parser->info.VisualObject.VideoObject.video_object_layer_width; + picture_param->vop_height + = parser->info.VisualObject.VideoObject.video_object_layer_height; + + picture_param->forward_reference_picture = VA_INVALID_SURFACE; + picture_param->backward_reference_picture = VA_INVALID_SURFACE; + + // Fill VAPictureParameterBufferMPEG4::vol_fields + + picture_param->vol_fields.bits.short_video_header + = parser->info.VisualObject.VideoObject.short_video_header; + picture_param->vol_fields.bits.chroma_format + = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format; + + /* TODO: find out why testsuite always set this value to be 0 */ + picture_param->vol_fields.bits.chroma_format = 0; + + picture_param->vol_fields.bits.interlaced + = parser->info.VisualObject.VideoObject.interlaced; + picture_param->vol_fields.bits.obmc_disable + = parser->info.VisualObject.VideoObject.obmc_disable; + picture_param->vol_fields.bits.sprite_enable + = parser->info.VisualObject.VideoObject.sprite_enable; + picture_param->vol_fields.bits.sprite_warping_accuracy + = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy; + picture_param->vol_fields.bits.quant_type + = parser->info.VisualObject.VideoObject.quant_type; + picture_param->vol_fields.bits.quarter_sample + = parser->info.VisualObject.VideoObject.quarter_sample; + picture_param->vol_fields.bits.data_partitioned + = parser->info.VisualObject.VideoObject.data_partitioned; + picture_param->vol_fields.bits.reversible_vlc + = parser->info.VisualObject.VideoObject.reversible_vlc; + picture_param->vol_fields.bits.resync_marker_disable + = parser->info.VisualObject.VideoObject.resync_marker_disable; + + picture_param->no_of_sprite_warping_points + = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points; + + for (idx = 0; idx < 3; idx++) + { + picture_param->sprite_trajectory_du[idx] + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx]; + picture_param->sprite_trajectory_dv[idx] + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx]; + } + + picture_param->quant_precision + = parser->info.VisualObject.VideoObject.quant_precision; // fill VAPictureParameterBufferMPEG4::vop_fields - if (!parser->info.VisualObject.VideoObject.short_video_header) - { - picture_param->vop_fields.bits.vop_coding_type - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type; - } - else - { - picture_param->vop_fields.bits.vop_coding_type - = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type; - } - - /* - * TODO: - * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type - * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7 - */ - - if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) - { - picture_param->vop_fields.bits.backward_reference_vop_coding_type - = picture_param->vop_fields.bits.vop_coding_type; - } - - picture_param->vop_fields.bits.vop_rounding_type - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type; - picture_param->vop_fields.bits.intra_dc_vlc_thr - = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr; - picture_param->vop_fields.bits.top_field_first - = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first; - picture_param->vop_fields.bits.alternate_vertical_scan_flag - = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag; - - picture_param->vop_fcode_forward - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward; - picture_param->vop_fcode_backward - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward; - picture_param->vop_time_increment_resolution - = parser->info.VisualObject.VideoObject.vop_time_increment_resolution; - - // short header related - picture_param->num_gobs_in_vop - = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop; - picture_param->num_macroblocks_in_gob - = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob; - - // for direct mode prediction - picture_param->TRB = parser->info.VisualObject.VideoObject.TRB; - picture_param->TRD = parser->info.VisualObject.VideoObject.TRD; + if (!parser->info.VisualObject.VideoObject.short_video_header) + { + picture_param->vop_fields.bits.vop_coding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type; + } + else + { + picture_param->vop_fields.bits.vop_coding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type; + } + + /* + * TODO: + * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type + * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7 + */ + + if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) + { + picture_param->vop_fields.bits.backward_reference_vop_coding_type + = picture_param->vop_fields.bits.vop_coding_type; + } + + picture_param->vop_fields.bits.vop_rounding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type; + picture_param->vop_fields.bits.intra_dc_vlc_thr + = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr; + picture_param->vop_fields.bits.top_field_first + = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first; + picture_param->vop_fields.bits.alternate_vertical_scan_flag + = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag; + + picture_param->vop_fcode_forward + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward; + picture_param->vop_fcode_backward + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward; + picture_param->vop_time_increment_resolution + = parser->info.VisualObject.VideoObject.vop_time_increment_resolution; + + // short header related + picture_param->num_gobs_in_vop + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop; + picture_param->num_macroblocks_in_gob + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob; + + // for direct mode prediction + picture_param->TRB = parser->info.VisualObject.VideoObject.TRB; + picture_param->TRD = parser->info.VisualObject.VideoObject.TRD; } -void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) +void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) { - viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - mp4_VOLQuant_mat_t *quant_mat_info = - &(parser->info.VisualObject.VideoObject.quant_mat_info); + mp4_VOLQuant_mat_t *quant_mat_info = + &(parser->info.VisualObject.VideoObject.quant_mat_info); - VAIQMatrixBufferMPEG4 *iq_matrix = NULL; + VAIQMatrixBufferMPEG4 *iq_matrix = NULL; - iq_matrix = &(query_data->iq_matrix_buffer); + iq_matrix = &(query_data->iq_matrix_buffer); - iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat; - iq_matrix->load_non_intra_quant_mat - = quant_mat_info->load_nonintra_quant_mat; - memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64); - memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64); + iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat; + iq_matrix->load_non_intra_quant_mat + = quant_mat_info->load_nonintra_quant_mat; + memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64); + memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64); } -void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) +void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) { - vbp_fill_codec_data(pcontext); - vbp_fill_picture_param(pcontext, 1); - vbp_fill_iq_matrix_buffer(pcontext); - vbp_fill_slice_data(pcontext, list_index); + vbp_fill_codec_data(pcontext); + vbp_fill_picture_param(pcontext, 1); + vbp_fill_iq_matrix_buffer(pcontext); + vbp_fill_slice_data(pcontext, list_index); } -void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index) +void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index) { - vbp_fill_codec_data(pcontext); - vbp_fill_picture_param(pcontext, 1); - vbp_fill_iq_matrix_buffer(pcontext); - vbp_fill_slice_data(pcontext, list_index); + vbp_fill_codec_data(pcontext); + vbp_fill_picture_param(pcontext, 1); + vbp_fill_iq_matrix_buffer(pcontext); + vbp_fill_slice_data(pcontext, list_index); } uint32 vbp_get_sc_pos_mp42( - uint8 *buf, - uint32 length, - uint32 *sc_end_pos, - uint8 *is_normal_sc, - uint8 *resync_marker) + uint8 *buf, + uint32 length, + uint32 *sc_end_pos, + uint8 *is_normal_sc, + uint8 *resync_marker) { - uint8 *ptr = buf; - uint32 size; - uint32 data_left = 0, phase = 0, ret = 0; - size = 0; - - data_left = length; - *sc_end_pos = -1; - - /* parse until there is more data and start code not found */ - while ((data_left > 0) && (phase < 3)) - { - /* Check if we are byte aligned & phase=0, if thats the case we can check - work at a time instead of byte*/ - if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) - { - while (data_left > 3) - { - uint32 data; - char mask1 = 0, mask2 = 0; - - data = *((uint32 *) ptr); + uint8 *ptr = buf; + uint32 size; + uint32 data_left = 0, phase = 0, ret = 0; + size = 0; + + data_left = length; + *sc_end_pos = -1; + + /* parse until there is more data and start code not found */ + while ((data_left > 0) && (phase < 3)) + { + /* Check if we are byte aligned & phase=0, if thats the case we can check + work at a time instead of byte*/ + if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) + { + while (data_left > 3) + { + uint32 data; + char mask1 = 0, mask2 = 0; + + data = *((uint32 *) ptr); #ifndef MFDBIGENDIAN - data = SWAP_WORD(data); + data = SWAP_WORD(data); #endif - mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); - mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); - /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need - two consecutive zero bytes for a start code pattern */ - if (mask1 && mask2) - { - /* Success so skip 4 bytes and start over */ - ptr += 4; - size += 4; - data_left -= 4; - continue; - } - else - { - break; - } - } - } - - /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected - two zero bytes in the word so we look one byte at a time*/ - if (data_left > 0) - { - if (*ptr == FIRST_STARTCODE_BYTE) - { - /* Phase can be 3 only if third start code byte is found */ - phase++; - ptr++; - size++; - data_left--; - if (phase > 2) - { - phase = 2; - - if ((((uint32) ptr) & 0x3) == 0) - { - while (data_left > 3) - { - if (*((uint32 *) ptr) != 0) - { - break; - } - ptr += 4; - size += 4; - data_left -= 4; - } - } - } - } - else - { - uint8 normal_sc = 0, short_sc = 0; - if (phase == 2) - { - normal_sc = (*ptr == THIRD_STARTCODE_BYTE); - short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); - - *is_normal_sc = normal_sc; - - // at least 16-bit 0, may be GOB start code or - // resync marker. - *resync_marker = 1; - } - - if (!(normal_sc | short_sc)) - { - phase = 0; - } - else - { - /* Match for start code so update context with byte position */ - *sc_end_pos = size; - phase = 3; - } - ptr++; - size++; - data_left--; - } - } - } - if ((data_left > 0) && (phase == 3)) - { - (*sc_end_pos)++; - phase++; - ret = 1; - } - - // Return 1 only if phase is 4, else always return 0 - return ret; + mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); + mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); + /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need + two consecutive zero bytes for a start code pattern */ + if (mask1 && mask2) + { + /* Success so skip 4 bytes and start over */ + ptr += 4; + size += 4; + data_left -= 4; + continue; + } + else + { + break; + } + } + } + + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected + two zero bytes in the word so we look one byte at a time*/ + if (data_left > 0) + { + if (*ptr == FIRST_STARTCODE_BYTE) + { + /* Phase can be 3 only if third start code byte is found */ + phase++; + ptr++; + size++; + data_left--; + if (phase > 2) + { + phase = 2; + + if ((((uint32) ptr) & 0x3) == 0) + { + while (data_left > 3) + { + if (*((uint32 *) ptr) != 0) + { + break; + } + ptr += 4; + size += 4; + data_left -= 4; + } + } + } + } + else + { + uint8 normal_sc = 0, short_sc = 0; + if (phase == 2) + { + normal_sc = (*ptr == THIRD_STARTCODE_BYTE); + short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); + + *is_normal_sc = normal_sc; + + // at least 16-bit 0, may be GOB start code or + // resync marker. + *resync_marker = 1; + } + + if (!(normal_sc | short_sc)) + { + phase = 0; + } + else + { + /* Match for start code so update context with byte position */ + *sc_end_pos = size; + phase = 3; + } + ptr++; + size++; + data_left--; + } + } + } + if ((data_left > 0) && (phase == 3)) + { + (*sc_end_pos)++; + phase++; + ret = 1; + } + + // Return 1 only if phase is 4, else always return 0 + return ret; } uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs) { - uint32 length = 0; - numOfMbs--; - do - { - numOfMbs >>= 1; - length++; - } - while (numOfMbs); - return length; + uint32 length = 0; + numOfMbs--; + do + { + numOfMbs >>= 1; + length++; + } + while (numOfMbs); + return length; } -uint32 vbp_parse_video_packet_header_mp42( - void *parent, - viddec_mp4_parser_t *parser_cxt, - uint16_t *quant_scale, - uint32 *macroblock_number) +uint32 vbp_parse_video_packet_header_mp42( + void *parent, + viddec_mp4_parser_t *parser_cxt, + uint16_t *quant_scale, + uint32 *macroblock_number) { - uint32 ret = VBP_DATA; - mp4_Info_t *pInfo = &(parser_cxt->info); - mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); - mp4_VideoObjectPlane_t *vidObjPlane = - &(pInfo->VisualObject.VideoObject.VideoObjectPlane); - - uint32 code = 0; - int32_t getbits = 0; - - uint16_t _quant_scale = 0; - uint32 _macroblock_number = 0; - uint32 header_extension_codes = 0; - uint8 vop_coding_type = vidObjPlane->vop_coding_type; - - if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) - { - return VBP_DATA; - } - - do - { - // get macroblock_number - uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4; - uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4; - uint32 length = vbp_macroblock_number_length_mp42(mbs_x * mbs_y); - - getbits = viddec_pm_get_bits(parent, &code, length); - BREAK_GETBITS_FAIL(getbits, ret); - - _macroblock_number = code; - - // quant_scale - if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) - { - getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision); - BREAK_GETBITS_FAIL(getbits, ret); - _quant_scale = code; - } - - // header_extension_codes - if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) - { - getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - header_extension_codes = code; - } - - if (header_extension_codes) - { - // modulo time base - do - { - getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - } while (code); - - // marker_bit - getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - - // vop_time_increment - uint32 numbits = 0; - numbits = vidObjLay->vop_time_increment_resolution_bits; - if (numbits == 0) - { - // ?? - numbits = 1; - } - getbits = viddec_pm_get_bits(parent, &code, numbits); - BREAK_GETBITS_FAIL(getbits, ret); - vidObjPlane->vop_time_increment = code; - - - // marker_bit - getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - - // vop_coding_type - getbits = viddec_pm_get_bits(parent, &code, 2); - BREAK_GETBITS_FAIL(getbits, ret); - - vop_coding_type = code & 0x3; - vidObjPlane->vop_coding_type = vop_coding_type; - - - if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) - { - // intra_dc_vlc_thr - getbits = viddec_pm_get_bits(parent, &code, 3); - BREAK_GETBITS_FAIL(getbits, ret); - - vidObjPlane->intra_dc_vlc_thr = code; - if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && - (vop_coding_type == MP4_VOP_TYPE_S) && - (vidObjLay->sprite_info.no_of_sprite_warping_points> 0)) + uint32 ret = VBP_DATA; + mp4_Info_t *pInfo = &(parser_cxt->info); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vidObjPlane = + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + uint32 code = 0; + int32_t getbits = 0; + + uint16_t _quant_scale = 0; + uint32 _macroblock_number = 0; + uint32 header_extension_codes = 0; + uint8 vop_coding_type = vidObjPlane->vop_coding_type; + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + { + return VBP_DATA; + } + + do + { + // get macroblock_number + uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4; + uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4; + uint32 length = vbp_macroblock_number_length_mp42(mbs_x * mbs_y); + + getbits = viddec_pm_get_bits(parent, &code, length); + BREAK_GETBITS_FAIL(getbits, ret); + + _macroblock_number = code; + + // quant_scale + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision); + BREAK_GETBITS_FAIL(getbits, ret); + _quant_scale = code; + } + + // header_extension_codes + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + header_extension_codes = code; + } + + if (header_extension_codes) + { + // modulo time base + do + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + } while (code); + + // marker_bit + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + + // vop_time_increment + uint32 numbits = 0; + numbits = vidObjLay->vop_time_increment_resolution_bits; + if (numbits == 0) + { + // ?? + numbits = 1; + } + getbits = viddec_pm_get_bits(parent, &code, numbits); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_time_increment = code; + + + // marker_bit + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + + // vop_coding_type + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_FAIL(getbits, ret); + + vop_coding_type = code & 0x3; + vidObjPlane->vop_coding_type = vop_coding_type; + + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + // intra_dc_vlc_thr + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + + vidObjPlane->intra_dc_vlc_thr = code; + if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && + (vop_coding_type == MP4_VOP_TYPE_S) && + (vidObjLay->sprite_info.no_of_sprite_warping_points> 0)) + { + if (vbp_sprite_trajectory_mp42(parent, vidObjLay, + vidObjPlane) != VBP_OK) + { + break; + } + } + + if (vidObjLay->reduced_resolution_vop_enable && + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && + ((vop_coding_type == MP4_VOP_TYPE_I) || + (vop_coding_type == MP4_VOP_TYPE_P))) { - if (vbp_sprite_trajectory_mp42(parent, vidObjLay, - vidObjPlane) != VBP_OK) - { - break; - } - } - - if (vidObjLay->reduced_resolution_vop_enable && - (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && - ((vop_coding_type == MP4_VOP_TYPE_I) || - (vop_coding_type == MP4_VOP_TYPE_P))) + // vop_reduced_resolution + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + } + + if (vop_coding_type != MP4_VOP_TYPE_I) + { + // vop_fcode_forward + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_fcode_forward = code; + } + + if (vop_coding_type == MP4_VOP_TYPE_B) { - // vop_reduced_resolution - getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - } - - if (vop_coding_type != MP4_VOP_TYPE_I) - { - // vop_fcode_forward - getbits = viddec_pm_get_bits(parent, &code, 3); - BREAK_GETBITS_FAIL(getbits, ret); - vidObjPlane->vop_fcode_forward = code; - } - - if (vop_coding_type == MP4_VOP_TYPE_B) - { - // vop_fcode_backward - getbits = viddec_pm_get_bits(parent, &code, 3); - BREAK_GETBITS_FAIL(getbits, ret); - vidObjPlane->vop_fcode_backward = code; - } - } - } - - if (vidObjLay->newpred_enable) - { - // New pred mode not supported in HW, but, does libva support this? - ret = VBP_DATA; - break; - } - - *quant_scale = _quant_scale; - *macroblock_number = _macroblock_number; - - ret = VBP_OK; - } - while (0); - return ret; + // vop_fcode_backward + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_fcode_backward = code; + } + } + } + + if (vidObjLay->newpred_enable) + { + // New pred mode not supported in HW, but, does libva support this? + ret = VBP_DATA; + break; + } + + *quant_scale = _quant_scale; + *macroblock_number = _macroblock_number; + + ret = VBP_OK; + } + while (0); + return ret; } uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt) { - mp4_Info_t *pInfo = &(parser_cxt->info); - mp4_VideoObjectPlane_t *vidObjPlane = - &(pInfo->VisualObject.VideoObject.VideoObjectPlane); - - uint32 resync_marker_length = 0; - if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) - { - resync_marker_length = 17; - } - else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) - { - uint8 fcode_max = vidObjPlane->vop_fcode_forward; - if (fcode_max < vidObjPlane->vop_fcode_backward) - { - fcode_max = vidObjPlane->vop_fcode_backward; - } - resync_marker_length = 16 + fcode_max; - - // resync_marker is max(15+fcode,17) zeros followed by a one - if (resync_marker_length < 18) - resync_marker_length = 18; - } - else - { - resync_marker_length = 16 + vidObjPlane->vop_fcode_forward; - } - return resync_marker_length; + mp4_Info_t *pInfo = &(parser_cxt->info); + mp4_VideoObjectPlane_t *vidObjPlane = + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + uint32 resync_marker_length = 0; + if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) + { + resync_marker_length = 17; + } + else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) + { + uint8 fcode_max = vidObjPlane->vop_fcode_forward; + if (fcode_max < vidObjPlane->vop_fcode_backward) + { + fcode_max = vidObjPlane->vop_fcode_backward; + } + resync_marker_length = 16 + fcode_max; + + // resync_marker is max(15+fcode,17) zeros followed by a one + if (resync_marker_length < 18) + resync_marker_length = 18; + } + else + { + resync_marker_length = 16 + vidObjPlane->vop_fcode_forward; + } + return resync_marker_length; } uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) { - uint32 ret = VBP_OK; + uint32 ret = VBP_OK; - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - viddec_pm_cxt_t *parent = pcontext->parser_cxt; - viddec_mp4_parser_t *parser_cxt = - (viddec_mp4_parser_t *) &(parent->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = + (viddec_mp4_parser_t *) &(parent->codec_data[0]); - vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data); - vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data); - VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param); + vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data); + vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data); + VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param); - uint8 is_emul = 0; - uint32 bit_offset = 0; - uint32 byte_offset = 0; + uint8 is_emul = 0; + uint32 bit_offset = 0; + uint32 byte_offset = 0; - // The offsets are relative to parent->parse_cubby.buf - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + // The offsets are relative to parent->parse_cubby.buf + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); - slice_data->buffer_addr = parent->parse_cubby.buf; + slice_data->buffer_addr = parent->parse_cubby.buf; - slice_data->slice_offset = byte_offset - + parent->list.data[list_index].stpos; - slice_data->slice_size = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset; + slice_data->slice_offset = byte_offset + + parent->list.data[list_index].stpos; + slice_data->slice_size = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset; - slice_param->slice_data_size = slice_data->slice_size; - slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - slice_param->slice_data_offset = 0; - slice_param->macroblock_offset = bit_offset; - slice_param->macroblock_number = 0; - slice_param->quant_scale - = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant; + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = 0; + slice_param->quant_scale + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant; - return ret; + return ret; } -uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) +uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - viddec_pm_cxt_t *parent = pcontext->parser_cxt; - viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); - vbp_picture_data_mp42 *picture_data = NULL; - vbp_slice_data_mp42 *slice_data = NULL; - VASliceParameterBufferMPEG4* slice_param = NULL; + vbp_picture_data_mp42 *picture_data = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + VASliceParameterBufferMPEG4* slice_param = NULL; - uint32 ret = VBP_OK; + uint32 ret = VBP_OK; - uint8 is_emul = 0; - uint32 bit_offset = 0; - uint32 byte_offset = 0; + uint8 is_emul = 0; + uint32 bit_offset = 0; + uint32 byte_offset = 0; - uint32 code = 0; - int32_t getbits = 0; - uint32 resync_marker_length = 0; + uint32 code = 0; + int32_t getbits = 0; + uint32 resync_marker_length = 0; #ifdef VBP_TRACE - uint32 list_size_at_index = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos; + uint32 list_size_at_index = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos; - VTRACE ("list_index = %d list_size_at_index = %d\n", list_index, - list_size_at_index); + VTRACE ("list_index = %d list_size_at_index = %d\n", list_index, + list_size_at_index); - VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index, - parent->list.data[list_index].edpos, - parent->list.data[list_index].stpos); + VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index, + parent->list.data[list_index].edpos, + parent->list.data[list_index].stpos); #endif - /* The offsets are relative to parent->parse_cubby.buf */ - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + /* The offsets are relative to parent->parse_cubby.buf */ + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); #if 0 - if (is_emul) { - g_print("*** emul != 0\n"); - /*byte_offset += 1;*/ - } + if (is_emul) { + g_print("*** emul != 0\n"); + /*byte_offset += 1;*/ + } #endif - picture_data = vbp_get_mp42_picture_data(query_data); - slice_data = &(picture_data->slice_data); - slice_param = &(slice_data->slice_param); + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); + slice_param = &(slice_data->slice_param); - slice_data->buffer_addr = parent->parse_cubby.buf; + slice_data->buffer_addr = parent->parse_cubby.buf; - slice_data->slice_offset = byte_offset - + parent->list.data[list_index].stpos; - slice_data->slice_size = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset; + slice_data->slice_offset = byte_offset + + parent->list.data[list_index].stpos; + slice_data->slice_size = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset; - slice_param->slice_data_size = slice_data->slice_size; - slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - slice_param->slice_data_offset = 0; - slice_param->macroblock_offset = bit_offset; - slice_param->macroblock_number = 0; - slice_param->quant_scale - = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant; + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = 0; + slice_param->quant_scale + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant; - if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) - { - // no resync_marker + if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) + { + // no resync_marker return VBP_OK; } - // scan for resync_marker - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); - if (bit_offset) - { - // byte-aligned - getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); - if (getbits == -1) - { - return VBP_DATA; - } - } + // scan for resync_marker + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + if (bit_offset) + { + // byte-aligned + getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + if (getbits == -1) + { + return VBP_DATA; + } + } // get resync_marker_length - resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt); + resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt); uint16_t quant_scale = 0; uint32 macroblock_number = 0; - while (1) - { - getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length); + while (1) + { + getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length); - // return VBP_OK as resync_marker may not be present - BREAK_GETBITS_FAIL(getbits, ret); + // return VBP_OK as resync_marker may not be present + BREAK_GETBITS_FAIL(getbits, ret); - if (code != 1) - { - getbits = viddec_pm_get_bits(parent, &code, 8); - BREAK_GETBITS_FAIL(getbits, ret); - continue; - } + if (code != 1) + { + getbits = viddec_pm_get_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); + continue; + } // We found resync_marker - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); // update slice data as we found resync_marker - slice_data->slice_size -= (parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset); - slice_param->slice_data_size = slice_data->slice_size; + slice_data->slice_size -= (parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset); + slice_param->slice_data_size = slice_data->slice_size; // skip resync marker - getbits = viddec_pm_get_bits(parent, &code, resync_marker_length); + getbits = viddec_pm_get_bits(parent, &code, resync_marker_length); + + // return VBP_DATA, this should never happen! + BREAK_GETBITS_FAIL(getbits, ret); - // return VBP_DATA, this should never happen! - BREAK_GETBITS_FAIL(getbits, ret); - - // parse video_packet_header - ret = vbp_parse_video_packet_header_mp42(parent, parser_cxt, - &quant_scale, ¯oblock_number); + // parse video_packet_header + ret = vbp_parse_video_packet_header_mp42(parent, parser_cxt, + &quant_scale, ¯oblock_number); if (ret != VBP_OK) { ETRACE("Failed to parse video packet header.\n"); return ret; } - + // new_picture_flag = 0, this is not the first slice of a picture vbp_fill_picture_param(pcontext, 0); - + picture_data = vbp_get_mp42_picture_data(query_data); slice_data = &(picture_data->slice_data); slice_param = &(slice_data->slice_param); - - - viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); - - slice_data->buffer_addr = parent->parse_cubby.buf; - - slice_data->slice_offset = byte_offset - + parent->list.data[list_index].stpos; - slice_data->slice_size = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset; - - slice_param->slice_data_size = slice_data->slice_size; - slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - slice_param->slice_data_offset = 0; - slice_param->macroblock_offset = bit_offset; - slice_param->macroblock_number = macroblock_number; - slice_param->quant_scale = quant_scale; - - if (bit_offset) - { - // byte-align parsing position - getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); - if (getbits == -1) - { + + + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + slice_data->buffer_addr = parent->parse_cubby.buf; + + slice_data->slice_offset = byte_offset + + parent->list.data[list_index].stpos; + slice_data->slice_size = parent->list.data[list_index].edpos + - parent->list.data[list_index].stpos - byte_offset; + + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = macroblock_number; + slice_param->quant_scale = quant_scale; + + if (bit_offset) + { + // byte-align parsing position + getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + if (getbits == -1) + { ETRACE("Failed to align parser to byte position.\n"); - return VBP_DATA; - } - } + return VBP_DATA; + } + } - } + } - return VBP_OK; + return VBP_OK; } uint32 vbp_process_video_packet_mp42(vbp_context *pcontext) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - viddec_pm_cxt_t *parent = pcontext->parser_cxt; - viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); uint32 code = 0; int32_t getbits = 0; - - uint32 ret = VBP_DATA; + + uint32 ret = VBP_DATA; - // setup bitstream parser - parent->getbits.list = &(parent->list); - - parent->getbits.bstrm_buf.buf = parent->parse_cubby.buf; + // setup bitstream parser + parent->getbits.list = &(parent->list); + + parent->getbits.bstrm_buf.buf = parent->parse_cubby.buf; parent->getbits.bstrm_buf.buf_index = 0; parent->getbits.bstrm_buf.buf_st = 0; - parent->getbits.bstrm_buf.buf_end = parent->parse_cubby.size; + parent->getbits.bstrm_buf.buf_end = parent->parse_cubby.size; parent->getbits.bstrm_buf.buf_bitoff = 0; - - parent->getbits.au_pos = 0; + + parent->getbits.au_pos = 0; parent->getbits.list_off = 0; parent->getbits.phase = 0; parent->getbits.emulation_byte_counter = 0; - + parent->list.start_offset = 0; parent->list.end_offset = parent->parse_cubby.size; parent->list.total_bytes = parent->parse_cubby.size; - - + + // skip leading zero-byte while (code == 0) { getbits = viddec_pm_get_bits(parent, &code, 8); - BREAK_GETBITS_FAIL(getbits, ret); - getbits = viddec_pm_peek_bits(parent, &code, 8); - BREAK_GETBITS_FAIL(getbits, ret); + BREAK_GETBITS_FAIL(getbits, ret); + getbits = viddec_pm_peek_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); } if (getbits != 0) - { + { return VBP_DATA; } - + // resync-marker is represented as 17-23 bits. (16-22 bits of 0) // as 16-bit '0' has been skipped, we try to parse buffer bit by bit // until bit 1 is encounted or up to 7 bits are parsed. @@ -1201,36 +1201,36 @@ uint32 vbp_process_video_packet_mp42(vbp_context *pcontext) while (code == 0 && count < 7) { getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - count++; + BREAK_GETBITS_FAIL(getbits, ret); + count++; } - + if (code == 0 || getbits != 0) { ETRACE("no resync-marker in the buffer.\n"); return ret; } - // resync marker is skipped - uint16_t quant_scale = 0; - uint32 macroblock_number = 0; + // resync marker is skipped + uint16_t quant_scale = 0; + uint32 macroblock_number = 0; - // parse video_packet_header - vbp_parse_video_packet_header_mp42(parent, parser_cxt, - &quant_scale, ¯oblock_number); + // parse video_packet_header + vbp_parse_video_packet_header_mp42(parent, parser_cxt, + &quant_scale, ¯oblock_number); // new_picture_flag = 0, this is not the first slice of a picture vbp_fill_picture_param(pcontext, 0); - - vbp_picture_data_mp42 *picture_data = NULL; - vbp_slice_data_mp42 *slice_data = NULL; - VASliceParameterBufferMPEG4* slice_param = NULL; - - picture_data = vbp_get_mp42_picture_data(query_data); - slice_data = &(picture_data->slice_data); - slice_param = &(slice_data->slice_param); - ret = vbp_process_slices_mp42(pcontext, 0); + vbp_picture_data_mp42 *picture_data = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + VASliceParameterBufferMPEG4* slice_param = NULL; + + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); + slice_param = &(slice_data->slice_param); + + ret = vbp_process_slices_mp42(pcontext, 0); // update slice's QP and macro_block number as it is set to 0 by default. slice_param->macroblock_number = macroblock_number; @@ -1238,180 +1238,180 @@ uint32 vbp_process_video_packet_mp42(vbp_context *pcontext) // VOP must be coded! picture_data->vop_coded = 1; - return ret; + return ret; } static inline uint32 vbp_sprite_dmv_length_mp42( - void * parent, - int32_t *dmv_length) + void * parent, + int32_t *dmv_length) { - uint32 code, skip; - int32_t getbits = 0; - uint32 ret = VBP_DATA; - *dmv_length = 0; - skip = 3; - do - { - getbits = viddec_pm_peek_bits(parent, &code, skip); - BREAK_GETBITS_FAIL(getbits, ret); - - if (code == 7) - { - viddec_pm_skip_bits(parent, skip); - getbits = viddec_pm_peek_bits(parent, &code, 9); - BREAK_GETBITS_FAIL(getbits, ret); - - skip = 1; - while ((code & 256) != 0) - { - // count number of 1 bits - code <<= 1; - skip++; - } - *dmv_length = 5 + skip; - } - else - { - skip = (code <= 1) ? 2 : 3; - *dmv_length = code - 1; - } - viddec_pm_skip_bits(parent, skip); - ret = VBP_OK; - - } - while (0); - return ret; + uint32 code, skip; + int32_t getbits = 0; + uint32 ret = VBP_DATA; + *dmv_length = 0; + skip = 3; + do + { + getbits = viddec_pm_peek_bits(parent, &code, skip); + BREAK_GETBITS_FAIL(getbits, ret); + + if (code == 7) + { + viddec_pm_skip_bits(parent, skip); + getbits = viddec_pm_peek_bits(parent, &code, 9); + BREAK_GETBITS_FAIL(getbits, ret); + + skip = 1; + while ((code & 256) != 0) + { + // count number of 1 bits + code <<= 1; + skip++; + } + *dmv_length = 5 + skip; + } + else + { + skip = (code <= 1) ? 2 : 3; + *dmv_length = code - 1; + } + viddec_pm_skip_bits(parent, skip); + ret = VBP_OK; + + } + while (0); + return ret; } static inline uint32 vbp_sprite_trajectory_mp42( - void *parent, - mp4_VideoObjectLayer_t *vidObjLay, - mp4_VideoObjectPlane_t *vidObjPlane) + void *parent, + mp4_VideoObjectLayer_t *vidObjLay, + mp4_VideoObjectPlane_t *vidObjPlane) { - uint32 code, i; - int32_t dmv_length = 0, dmv_code = 0, getbits = 0; - uint32 ret = VBP_OK; - for (i = 0; i < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) - { - ret = VBP_DATA; - ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); - if (ret != VBP_OK) - { - break; - } - if (dmv_length <= 0) - { - dmv_code = 0; - } - else - { - getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); - BREAK_GETBITS_FAIL(getbits, ret); - dmv_code = (int32_t) code; - if ((dmv_code & (1 << (dmv_length - 1))) == 0) - { - dmv_code -= (1 << dmv_length) - 1; - } - } - getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - if (code != 1) - { - ret = VBP_DATA; - break; - } - vidObjPlane->warping_mv_code_du[i] = dmv_code; - // TODO: create another inline function to avoid code duplication - ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); - if (ret != VBP_OK) - { - break; - } - // reset return value in case early break + uint32 code, i; + int32_t dmv_length = 0, dmv_code = 0, getbits = 0; + uint32 ret = VBP_OK; + for (i = 0; i < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) + { ret = VBP_DATA; - if (dmv_length <= 0) - { - dmv_code = 0; - } - else - { - getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); - BREAK_GETBITS_FAIL(getbits, ret); - dmv_code = (int32_t) code; - if ((dmv_code & (1 << (dmv_length - 1))) == 0) - { - dmv_code -= (1 << dmv_length) - 1; - } - } - getbits = viddec_pm_get_bits(parent, &code, 1); - BREAK_GETBITS_FAIL(getbits, ret); - if (code != 1) - { - break; - } - vidObjPlane->warping_mv_code_dv[i] = dmv_code; - - // set to VBP_OK - ret = VBP_OK; - - } - return ret; + ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); + if (ret != VBP_OK) + { + break; + } + if (dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); + BREAK_GETBITS_FAIL(getbits, ret); + dmv_code = (int32_t) code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + if (code != 1) + { + ret = VBP_DATA; + break; + } + vidObjPlane->warping_mv_code_du[i] = dmv_code; + // TODO: create another inline function to avoid code duplication + ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); + if (ret != VBP_OK) + { + break; + } + // reset return value in case early break + ret = VBP_DATA; + if (dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); + BREAK_GETBITS_FAIL(getbits, ret); + dmv_code = (int32_t) code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + if (code != 1) + { + break; + } + vidObjPlane->warping_mv_code_dv[i] = dmv_code; + + // set to VBP_OK + ret = VBP_OK; + + } + return ret; } /* * free memory of vbp_data_mp42 structure and its members */ -uint32 vbp_free_query_data_mp42(vbp_context *pcontext) +uint32 vbp_free_query_data_mp42(vbp_context *pcontext) { - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; vbp_picture_data_mp42* current = NULL; vbp_picture_data_mp42* next = NULL; - if (query_data) - { - current = query_data->picture_data; - while (current != NULL) - { - next = current->next_picture_data; - g_free(current); - current = next; - } - - g_free(query_data); - } - - pcontext->query_data = NULL; - return VBP_OK; + if (query_data) + { + current = query_data->picture_data; + while (current != NULL) + { + next = current->next_picture_data; + free(current); + current = next; + } + + free(query_data); + } + + pcontext->query_data = NULL; + return VBP_OK; } /* * Allocate memory for vbp_data_mp42 structure and all its members. */ -uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) +uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) { - vbp_data_mp42 *query_data; - pcontext->query_data = NULL; + vbp_data_mp42 *query_data; + pcontext->query_data = NULL; - query_data = g_try_new0(vbp_data_mp42, 1); - if (query_data == NULL) - { - goto cleanup; - } + query_data = g_try_new0(vbp_data_mp42, 1); + if (query_data == NULL) + { + goto cleanup; + } - pcontext->query_data = (void *) query_data; - query_data->picture_data = NULL; + pcontext->query_data = (void *) query_data; + query_data->picture_data = NULL; query_data->number_picture_data = 0; query_data->number_pictures = 0; - return VBP_OK; + return VBP_OK; cleanup: vbp_free_query_data_mp42(pcontext); - - return VBP_MEM; + + return VBP_MEM; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.c b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c index d87bfd8..f75b9e7 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c @@ -13,15 +13,15 @@ void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...) { - if (NULL == cat || NULL == fun || NULL == format) - return; - - printf("%s %s(#%d): ", cat, fun, line); - va_list args; - va_start(args, format); - vprintf(format, args); - va_end(args); - printf("\n"); + if (NULL == cat || NULL == fun || NULL == format) + return; + + printf("%s %s(#%d): ", cat, fun, line); + va_list args; + va_start(args, format); + vprintf(format, args); + va_end(args); + printf("\n"); } #endif diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h index ada7c26..253a85c 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -26,7 +26,7 @@ extern void vbp_trace_util(const char* cat, const char* fun, int line, const cha #define VBP_TRACE_UTIL(cat, format, ...) \ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) -#else +#else #include #define VBP_TRACE_UTIL(cat, format, ...) \ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index 4b8b800..a527607 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -6,7 +6,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include +//#include #include #include "vc1.h" @@ -18,7 +18,14 @@ #include "vbp_mp42_parser.h" -/* buffer counter */ +void* g_try_malloc0(uint32 size) { + void* pMem = malloc(size); + if (pMem) + memset(pMem, 0, size); + return pMem; +} + +/* buffer counter */ uint32 buffer_counter = 0; @@ -29,26 +36,26 @@ uint32 buffer_counter = 0; */ static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext) { - uint32 error = VBP_OK; - - if (NULL == pcontext) - { - return error; - } - - /* not need to reset parser entry points. */ - - g_free(pcontext->parser_ops); - pcontext->parser_ops = NULL; - - - if (pcontext->fd_parser) - { - dlclose(pcontext->fd_parser); - pcontext->fd_parser = NULL; - } - - return error; + uint32 error = VBP_OK; + + if (NULL == pcontext) + { + return error; + } + + /* not need to reset parser entry points. */ + + free(pcontext->parser_ops); + pcontext->parser_ops = NULL; + + + if (pcontext->fd_parser) + { + dlclose(pcontext->fd_parser); + pcontext->fd_parser = NULL; + } + + return error; } /** @@ -58,61 +65,61 @@ static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext) */ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) { - uint32 error = VBP_OK; - char *parser_name; - - switch (pcontext->parser_type) - { - case VBP_VC1: + uint32 error = VBP_OK; + char *parser_name; + + switch (pcontext->parser_type) + { + case VBP_VC1: #ifndef ANDROID - parser_name = "libmixvbp_vc1.so.0"; + parser_name = "libmixvbp_vc1.so.0"; #else - parser_name = "libmixvbp_vc1.so"; + parser_name = "libmixvbp_vc1.so"; #endif - break; + break; - /* MPEG-2 parser is not supported. */ + /* MPEG-2 parser is not supported. */ - /* case VBP_MPEG2: - parser_name = "libmixvbp_mpeg2.so.0"; - break;*/ + /* case VBP_MPEG2: + parser_name = "libmixvbp_mpeg2.so.0"; + break;*/ - case VBP_MPEG4: + case VBP_MPEG4: #ifndef ANDROID - parser_name = "libmixvbp_mpeg4.so.0"; + parser_name = "libmixvbp_mpeg4.so.0"; #else - parser_name = "libmixvbp_mpeg4.so"; + parser_name = "libmixvbp_mpeg4.so"; #endif - break; + break; - case VBP_H264: + case VBP_H264: #ifndef ANDROID - parser_name = "libmixvbp_h264.so.0"; + parser_name = "libmixvbp_h264.so.0"; #else - parser_name = "libmixvbp_h264.so"; + parser_name = "libmixvbp_h264.so"; #endif - break; - - default: - g_warning ("Warning! Unsupported parser type!"); - return VBP_TYPE; - } - - pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY); - if (NULL == pcontext->fd_parser) - { - ETRACE("Failed to load parser %s.", parser_name); - error = VBP_LOAD; - goto cleanup; - } - - pcontext->parser_ops = g_try_new(viddec_parser_ops_t, 1); - if (NULL == pcontext->parser_ops) - { - ETRACE("Failed to allocate memory"); - error = VBP_MEM; - goto cleanup; - } + break; + + default: + //g_warning ("Warning! Unsupported parser type!"); + return VBP_TYPE; + } + + pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY); + if (NULL == pcontext->fd_parser) + { + ETRACE("Failed to load parser %s.", parser_name); + error = VBP_LOAD; + goto cleanup; + } + + pcontext->parser_ops = g_try_new(viddec_parser_ops_t, 1); + if (NULL == pcontext->parser_ops) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } #define SET_FUNC_POINTER(X, Y)\ case X:\ @@ -125,32 +132,32 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\ break; - switch (pcontext->parser_type) - { - SET_FUNC_POINTER(VBP_VC1, vc1); - SET_FUNC_POINTER(VBP_MPEG4, mp42); - SET_FUNC_POINTER(VBP_H264, h264); - } - - /* set entry points for parser operations: - init - parse_sc - parse_syntax - get_cxt_size - is_wkld_done - is_frame_start - */ - error = pcontext->func_init_parser_entries(pcontext); + switch (pcontext->parser_type) + { + SET_FUNC_POINTER(VBP_VC1, vc1); + SET_FUNC_POINTER(VBP_MPEG4, mp42); + SET_FUNC_POINTER(VBP_H264, h264); + } + + /* set entry points for parser operations: + init + parse_sc + parse_syntax + get_cxt_size + is_wkld_done + is_frame_start + */ + error = pcontext->func_init_parser_entries(pcontext); cleanup: - if (VBP_OK != error) - { - /* no need to log error. the loader would have done so already. */ - vbp_utils_uninitialize_context(pcontext); - } + if (VBP_OK != error) + { + /* no need to log error. the loader would have done so already. */ + vbp_utils_uninitialize_context(pcontext); + } - return error; + return error; } /** @@ -159,30 +166,30 @@ cleanup: * */ static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext) -{ - if (NULL == pcontext) - { - return VBP_OK; - } - - if (pcontext->func_free_query_data) - { - pcontext->func_free_query_data(pcontext); - } - - g_free(pcontext->workload2); - pcontext->workload2 = NULL; - - g_free(pcontext->workload1); - pcontext->workload1 = NULL; - - g_free(pcontext->persist_mem); - pcontext->persist_mem = NULL; - - g_free(pcontext->parser_cxt); - pcontext->parser_cxt = NULL; - - return VBP_OK; +{ + if (NULL == pcontext) + { + return VBP_OK; + } + + if (pcontext->func_free_query_data) + { + pcontext->func_free_query_data(pcontext); + } + + free(pcontext->workload2); + pcontext->workload2 = NULL; + + free(pcontext->workload1); + pcontext->workload1 = NULL; + + free(pcontext->persist_mem); + pcontext->persist_mem = NULL; + + free(pcontext->parser_cxt); + pcontext->parser_cxt = NULL; + + return VBP_OK; } @@ -193,80 +200,80 @@ static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext) */ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) { - /* pcontext is guaranteed to be valid input. */ - uint32 error = VBP_OK; - viddec_parser_memory_sizes_t sizes; - - pcontext->parser_cxt = g_try_new(viddec_pm_cxt_t, 1); - if (NULL == pcontext->parser_cxt) - { - ETRACE("Failed to allocate memory"); - error = VBP_MEM; - goto cleanup; - } - - /* invoke parser entry to get context size */ - /* no return value, should always succeed. */ - pcontext->parser_ops->get_cxt_size(&sizes); - - /* allocate persistent memory for parser */ - if (sizes.persist_size) - { - pcontext->persist_mem = g_try_malloc(sizes.persist_size); - if (NULL == pcontext->persist_mem) - { - ETRACE("Failed to allocate memory"); - error = VBP_MEM; - goto cleanup; - } - } - else - { - /* OK for VC-1, MPEG2 and MPEG4. */ - if ((VBP_VC1 == pcontext->parser_type) || - (VBP_MPEG2 == pcontext->parser_type) || - (VBP_MPEG4 == pcontext->parser_type)) - { - pcontext->persist_mem = NULL; - } - else - { - /* mandatory for H.264 */ - ETRACE("Failed to allocate memory"); - error = VBP_TYPE; - goto cleanup; - } - } - - /* allocate a new workload with 1000 items. */ - pcontext->workload1 = g_try_malloc(sizeof(viddec_workload_t) + - (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); - if (NULL == pcontext->workload1) - { - ETRACE("Failed to allocate memory"); - error = VBP_MEM; - goto cleanup; - } - - /* allocate a second workload with 1000 items. */ - pcontext->workload2 = g_try_malloc(sizeof(viddec_workload_t) + - (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); - if (NULL == pcontext->workload2) - { - ETRACE("Failed to allocate memory"); - error = VBP_MEM; - goto cleanup; - } - - /* allocate format-specific query data */ - error = pcontext->func_allocate_query_data(pcontext); - -cleanup: - if (error != VBP_OK) - { - vbp_utils_free_parser_memory(pcontext); - } - return error; + /* pcontext is guaranteed to be valid input. */ + uint32 error = VBP_OK; + viddec_parser_memory_sizes_t sizes; + + pcontext->parser_cxt = g_try_new(viddec_pm_cxt_t, 1); + if (NULL == pcontext->parser_cxt) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* invoke parser entry to get context size */ + /* no return value, should always succeed. */ + pcontext->parser_ops->get_cxt_size(&sizes); + + /* allocate persistent memory for parser */ + if (sizes.persist_size) + { + pcontext->persist_mem = g_try_malloc(sizes.persist_size); + if (NULL == pcontext->persist_mem) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + } + else + { + /* OK for VC-1, MPEG2 and MPEG4. */ + if ((VBP_VC1 == pcontext->parser_type) || + (VBP_MPEG2 == pcontext->parser_type) || + (VBP_MPEG4 == pcontext->parser_type)) + { + pcontext->persist_mem = NULL; + } + else + { + /* mandatory for H.264 */ + ETRACE("Failed to allocate memory"); + error = VBP_TYPE; + goto cleanup; + } + } + + /* allocate a new workload with 1000 items. */ + pcontext->workload1 = g_try_malloc(sizeof(viddec_workload_t) + + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); + if (NULL == pcontext->workload1) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* allocate a second workload with 1000 items. */ + pcontext->workload2 = g_try_malloc(sizeof(viddec_workload_t) + + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); + if (NULL == pcontext->workload2) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* allocate format-specific query data */ + error = pcontext->func_allocate_query_data(pcontext); + +cleanup: + if (error != VBP_OK) + { + vbp_utils_free_parser_memory(pcontext); + } + return error; } @@ -278,105 +285,105 @@ cleanup: */ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - viddec_parser_ops_t *ops = pcontext->parser_ops; - uint32 error = VBP_OK; - int i; - - /* reset list number. func_parse_init_data or func_parse_start_code will - * set it equal to number of sequence headers, picture headers or slices headers - * found in the sample buffer - */ - cxt->list.num_items = 0; - - /** - * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1 - * for H.264 and MPEG-4, VC1 advanced profile and set to 0 - * for VC1 simple or main profile when parsing the frame - * buffer. When parsing the sequence header, it must be set to 1 - * always. - * - * PARSER IMPLEMENTOR: set this flag in the parser. - */ - - /* - if ((codec_type == VBP_H264) || (codec_type == VBP_MPEG4)) - { - cxt->getbits.is_emul_reqd = 1; - } - */ - - - /* populate the list.*/ - if (init_data_flag) - { - error = pcontext->func_parse_init_data(pcontext); - } - else - { - error = pcontext->func_parse_start_code(pcontext); - } - - if (VBP_OK != error) - { - ETRACE("Failed to parse the start code!"); - return error; - } - - /* set up bitstream buffer */ - cxt->getbits.list = &(cxt->list); - - /* setup buffer pointer */ - cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf; - - /* - * TO DO: - * check if cxt->getbits.is_emul_reqd is set properly - */ - - for (i = 0; i < cxt->list.num_items; i++) - { - /* setup bitstream parser */ - cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos; - cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos; - cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos; - - /* It is possible to end up with buf_offset not equal zero. */ - cxt->getbits.bstrm_buf.buf_bitoff = 0; - - cxt->getbits.au_pos = 0; - cxt->getbits.list_off = 0; - cxt->getbits.phase = 0; - cxt->getbits.emulation_byte_counter = 0; - - cxt->list.start_offset = cxt->list.data[i].stpos; - cxt->list.end_offset = cxt->list.data[i].edpos; - cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos; - - /* invoke parse entry point to parse the buffer */ - error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); - - /* can't return error for now. Neet further investigation */ - - /*if (0 != error) - { - ETRACE("failed to parse the syntax: %d!", error); - return error; - }*/ - - /* - * process parsing result - */ - error = pcontext->func_process_parsing_result(pcontext, i); - - if (0 != error) - { - ETRACE("Failed to process parsing result."); - return error; - } - } - - return VBP_OK; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + uint32 error = VBP_OK; + int i; + + /* reset list number. func_parse_init_data or func_parse_start_code will + * set it equal to number of sequence headers, picture headers or slices headers + * found in the sample buffer + */ + cxt->list.num_items = 0; + + /** + * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1 + * for H.264 and MPEG-4, VC1 advanced profile and set to 0 + * for VC1 simple or main profile when parsing the frame + * buffer. When parsing the sequence header, it must be set to 1 + * always. + * + * PARSER IMPLEMENTOR: set this flag in the parser. + */ + + /* + if ((codec_type == VBP_H264) || (codec_type == VBP_MPEG4)) + { + cxt->getbits.is_emul_reqd = 1; + } + */ + + + /* populate the list.*/ + if (init_data_flag) + { + error = pcontext->func_parse_init_data(pcontext); + } + else + { + error = pcontext->func_parse_start_code(pcontext); + } + + if (VBP_OK != error) + { + ETRACE("Failed to parse the start code!"); + return error; + } + + /* set up bitstream buffer */ + cxt->getbits.list = &(cxt->list); + + /* setup buffer pointer */ + cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf; + + /* + * TO DO: + * check if cxt->getbits.is_emul_reqd is set properly + */ + + for (i = 0; i < cxt->list.num_items; i++) + { + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = cxt->list.data[i].stpos; + cxt->list.end_offset = cxt->list.data[i].edpos; + cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos; + + /* invoke parse entry point to parse the buffer */ + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + + /* can't return error for now. Neet further investigation */ + + /*if (0 != error) + { + ETRACE("failed to parse the syntax: %d!", error); + return error; + }*/ + + /* + * process parsing result + */ + error = pcontext->func_process_parsing_result(pcontext, i); + + if (0 != error) + { + ETRACE("Failed to process parsing result."); + return error; + } + } + + return VBP_OK; } @@ -387,71 +394,71 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f */ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) { - uint32 error = VBP_OK; - vbp_context *pcontext = NULL; + uint32 error = VBP_OK; + vbp_context *pcontext = NULL; - /* prevention from the failure */ - *ppcontext = NULL; + /* prevention from the failure */ + *ppcontext = NULL; - pcontext = g_try_new0(vbp_context, 1); - if (NULL == pcontext) - { - error = VBP_MEM; - goto cleanup; - } + pcontext = g_try_new0(vbp_context, 1); + if (NULL == pcontext) + { + error = VBP_MEM; + goto cleanup; + } - pcontext->parser_type = parser_type; + pcontext->parser_type = parser_type; - /* load parser, initialize parser operators and entry points */ - error = vbp_utils_initialize_context(pcontext); + /* load parser, initialize parser operators and entry points */ + error = vbp_utils_initialize_context(pcontext); if (VBP_OK != error) { - goto cleanup; - } + goto cleanup; + } - /* allocate parser context, persistent memory, query data and workload */ - error = vbp_utils_allocate_parser_memory(pcontext); - if (VBP_OK != error) - { - goto cleanup; - } + /* allocate parser context, persistent memory, query data and workload */ + error = vbp_utils_allocate_parser_memory(pcontext); + if (VBP_OK != error) + { + goto cleanup; + } - viddec_pm_utils_list_init(&(pcontext->parser_cxt->list)); - viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0); - pcontext->parser_cxt->cur_buf.list_index = -1; - pcontext->parser_cxt->parse_cubby.phase = 0; + viddec_pm_utils_list_init(&(pcontext->parser_cxt->list)); + viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0); + pcontext->parser_cxt->cur_buf.list_index = -1; + pcontext->parser_cxt->parse_cubby.phase = 0; - /* invoke the entry point to initialize the parser. */ - pcontext->parser_ops->init( - (uint32_t *)pcontext->parser_cxt->codec_data, - (uint32_t *)pcontext->persist_mem, - FALSE); + /* invoke the entry point to initialize the parser. */ + pcontext->parser_ops->init( + (uint32_t *)pcontext->parser_cxt->codec_data, + (uint32_t *)pcontext->persist_mem, + FALSE); - viddec_emit_init(&(pcontext->parser_cxt->emitter)); + viddec_emit_init(&(pcontext->parser_cxt->emitter)); - /* overwrite init with our number of items. */ - pcontext->parser_cxt->emitter.cur.max_items = MAX_WORKLOAD_ITEMS; - pcontext->parser_cxt->emitter.next.max_items = MAX_WORKLOAD_ITEMS; + /* overwrite init with our number of items. */ + pcontext->parser_cxt->emitter.cur.max_items = MAX_WORKLOAD_ITEMS; + pcontext->parser_cxt->emitter.next.max_items = MAX_WORKLOAD_ITEMS; - /* set up to find the first start code. */ - pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1; + /* set up to find the first start code. */ + pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1; - /* indicates initialized OK. */ - pcontext->identifier = MAGIC_NUMBER; - *ppcontext = pcontext; - error = VBP_OK; + /* indicates initialized OK. */ + pcontext->identifier = MAGIC_NUMBER; + *ppcontext = pcontext; + error = VBP_OK; cleanup: - if (VBP_OK != error) - { - vbp_utils_free_parser_memory(pcontext); - vbp_utils_uninitialize_context(pcontext); - g_free(pcontext); - pcontext = NULL; - } + if (VBP_OK != error) + { + vbp_utils_free_parser_memory(pcontext); + vbp_utils_uninitialize_context(pcontext); + free(pcontext); + pcontext = NULL; + } - return error; + return error; } /** @@ -461,13 +468,13 @@ cleanup: */ uint32 vbp_utils_destroy_context(vbp_context *pcontext) { - /* entry point, not need to validate input parameters. */ - vbp_utils_free_parser_memory(pcontext); - vbp_utils_uninitialize_context(pcontext); - g_free(pcontext); - pcontext = NULL; - - return VBP_OK; + /* entry point, not need to validate input parameters. */ + vbp_utils_free_parser_memory(pcontext); + vbp_utils_uninitialize_context(pcontext); + free(pcontext); + pcontext = NULL; + + return VBP_OK; } @@ -477,34 +484,34 @@ uint32 vbp_utils_destroy_context(vbp_context *pcontext) * */ uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag) -{ - /* entry point, not need to validate input parameters. */ +{ + /* entry point, not need to validate input parameters. */ - uint32 error = VBP_OK; + uint32 error = VBP_OK; //ITRACE("buffer counter: %d",buffer_counter); - /* set up emitter. */ - pcontext->parser_cxt->emitter.cur.data = pcontext->workload1; - pcontext->parser_cxt->emitter.next.data = pcontext->workload2; + /* set up emitter. */ + pcontext->parser_cxt->emitter.cur.data = pcontext->workload1; + pcontext->parser_cxt->emitter.next.data = pcontext->workload2; - /* reset bit offset */ - pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0; + /* reset bit offset */ + pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0; - /* set up cubby. */ - pcontext->parser_cxt->parse_cubby.buf = data; - pcontext->parser_cxt->parse_cubby.size = size; - pcontext->parser_cxt->parse_cubby.phase = 0; + /* set up cubby. */ + pcontext->parser_cxt->parse_cubby.buf = data; + pcontext->parser_cxt->parse_cubby.size = size; + pcontext->parser_cxt->parse_cubby.phase = 0; - error = vbp_utils_parse_es_buffer(pcontext, init_data_flag); + error = vbp_utils_parse_es_buffer(pcontext, init_data_flag); - /* rolling count of buffers. */ - if (0 == init_data_flag) - { - buffer_counter++; - } - return error; + /* rolling count of buffers. */ + if (0 == init_data_flag) + { + buffer_counter++; + } + return error; } /** @@ -514,19 +521,19 @@ uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, */ uint32 vbp_utils_query(vbp_context *pcontext, void **data) { - /* entry point, not need to validate input parameters. */ - uint32 error = VBP_OK; - - error = pcontext->func_populate_query_data(pcontext); - if (VBP_OK == error) - { - *data = pcontext->query_data; - } - else - { - *data = NULL; - } - return error; + /* entry point, not need to validate input parameters. */ + uint32 error = VBP_OK; + + error = pcontext->func_populate_query_data(pcontext); + if (VBP_OK == error) + { + *data = pcontext->query_data; + } + else + { + *data = NULL; + } + return error; } /** @@ -536,6 +543,6 @@ uint32 vbp_utils_query(vbp_context *pcontext, void **data) */ uint32 vbp_utils_flush(vbp_context *pcontext) { - return VBP_OK; + return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h index 67ff3e8..08bb76f 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h @@ -13,6 +13,7 @@ #include "viddec_pm_parse.h" #include "viddec_pm.h" #include "vbp_trace.h" +#include #define MAGIC_NUMBER 0x0DEADBEEF #define MAX_WORKLOAD_ITEMS 1000 @@ -21,8 +22,19 @@ #define MAX_NUM_SLICES 256 /* maximum two pictures per sample buffer */ -#define MAX_NUM_PICTURES 2 +#define MAX_NUM_PICTURES 2 +#define free free +#define g_try_malloc malloc + +#define g_try_new(struct_type, n_structs) \ + ((struct_type *) g_try_malloc (sizeof (struct_type) * n_structs)) +#define g_try_new0(struct_type, n_structs) \ + ((struct_type *) g_try_malloc0 (sizeof (struct_type) * n_structs)) + + + +void* g_try_malloc0(uint32 size); extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state); @@ -43,38 +55,38 @@ typedef uint32 (*function_populate_query_data)(vbp_context* cxt); struct vbp_context_t { - /* magic number */ - uint32 identifier; + /* magic number */ + uint32 identifier; - /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */ - uint32 parser_type; + /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */ + uint32 parser_type; - /* handle to parser (shared object) */ - void *fd_parser; + /* handle to parser (shared object) */ + void *fd_parser; - /* parser (shared object) entry points */ - viddec_parser_ops_t *parser_ops; + /* parser (shared object) entry points */ + viddec_parser_ops_t *parser_ops; - /* parser context */ - viddec_pm_cxt_t *parser_cxt; + /* parser context */ + viddec_pm_cxt_t *parser_cxt; - /* work load */ - viddec_workload_t *workload1, *workload2; + /* work load */ + viddec_workload_t *workload1, *workload2; - /* persistent memory for parser */ - uint32 *persist_mem; + /* persistent memory for parser */ + uint32 *persist_mem; - /* format specific query data */ - void *query_data; + /* format specific query data */ + void *query_data; - - function_init_parser_entries func_init_parser_entries; - function_allocate_query_data func_allocate_query_data; - function_free_query_data func_free_query_data; - function_parse_init_data func_parse_init_data; - function_parse_start_code func_parse_start_code; - function_process_parsing_result func_process_parsing_result; - function_populate_query_data func_populate_query_data; + + function_init_parser_entries func_init_parser_entries; + function_allocate_query_data func_allocate_query_data; + function_free_query_data func_free_query_data; + function_parse_init_data func_parse_init_data; + function_parse_start_code func_parse_start_code; + function_process_parsing_result func_process_parsing_result; + function_populate_query_data func_populate_query_data; }; @@ -88,7 +100,7 @@ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext); */ uint32 vbp_utils_destroy_context(vbp_context *pcontext); -/* +/* * parse bitstream */ uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag); @@ -98,7 +110,7 @@ uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, u */ uint32 vbp_utils_query(vbp_context *pcontext, void **data); -/* +/* * flush un-parsed bitstream */ uint32 vbp_utils_flush(vbp_context *pcontext); diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 4739798..21c431a 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -6,7 +6,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include +//#include #include #include @@ -22,19 +22,19 @@ #define PREFIX_SIZE 3 static uint32 b_fraction_table[][9] = { - /* num 0 1 2 3 4 5 6 7 8 den */ - /* 0 */ { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, - /* 1 */ { 0, 0, 0, 1, 3, 5, 9, 11, 17 }, - /* 2 */ { 0, 0, 0, 2, 0, 6, 0, 12, 0 }, - /* 3 */ { 0, 0, 0, 0, 4, 7, 0, 13, 18 }, - /* 4 */ { 0, 0, 0, 0, 0, 8, 0, 14, 0 }, - /* 5 */ { 0, 0, 0, 0, 0, 0, 10, 15, 19 }, - /* 6 */ { 0, 0, 0, 0, 0, 0, 0, 16, 0 }, - /* 7 */ { 0, 0, 0, 0, 0, 0, 0, 0, 20 } + /* num 0 1 2 3 4 5 6 7 8 den */ + /* 0 */ { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + /* 1 */ { 0, 0, 0, 1, 3, 5, 9, 11, 17 }, + /* 2 */ { 0, 0, 0, 2, 0, 6, 0, 12, 0 }, + /* 3 */ { 0, 0, 0, 0, 4, 7, 0, 13, 18 }, + /* 4 */ { 0, 0, 0, 0, 0, 8, 0, 14, 0 }, + /* 5 */ { 0, 0, 0, 0, 0, 0, 10, 15, 19 }, + /* 6 */ { 0, 0, 0, 0, 0, 0, 0, 16, 0 }, + /* 7 */ { 0, 0, 0, 0, 0, 0, 0, 0, 20 } }; -static uint8 vc1_aspect_ratio_table[][2] = +static uint8 vc1_aspect_ratio_table[][2] = { {0, 0}, {1, 1}, @@ -50,7 +50,7 @@ static uint8 vc1_aspect_ratio_table[][2] = {15, 11}, {64, 33}, {160, 99}, - + // reserved {0, 0} }; @@ -62,50 +62,50 @@ static uint8 vc1_aspect_ratio_table[][2] = */ uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) { - if (NULL == pcontext->parser_ops) - { - /* impossible, just sanity check */ - return VBP_PARM; - } - - pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init"); - if (NULL == pcontext->parser_ops->init) - { - ETRACE ("Failed to set entry point."); - return VBP_LOAD; - } - - pcontext->parser_ops->parse_sc = viddec_parse_sc; - - pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse"); - if (NULL == pcontext->parser_ops->parse_syntax) - { - ETRACE ("Failed to set entry point."); - return VBP_LOAD; - } - - pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size"); - if (NULL == pcontext->parser_ops->get_cxt_size) - { - ETRACE ("Failed to set entry point."); - return VBP_LOAD; - } - - pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done"); - if (NULL == pcontext->parser_ops->is_wkld_done) - { - ETRACE ("Failed to set entry point."); - return VBP_LOAD; - } - - pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame"); - if (NULL == pcontext->parser_ops->is_frame_start) - { - ETRACE ("Failed to set entry point."); - return VBP_LOAD; - } - - return VBP_OK; + if (NULL == pcontext->parser_ops) + { + /* impossible, just sanity check */ + return VBP_PARM; + } + + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done"); + if (NULL == pcontext->parser_ops->is_wkld_done) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame"); + if (NULL == pcontext->parser_ops->is_frame_start) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + return VBP_OK; } /** @@ -113,63 +113,63 @@ uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) */ uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext) { - if (NULL != pcontext->query_data) - { - /* impossible, just sanity check */ - return VBP_PARM; - } - - pcontext->query_data = NULL; - - vbp_data_vc1 *query_data = NULL; - query_data = g_try_new0(vbp_data_vc1, 1); - if (NULL == query_data) - { - return VBP_MEM; - } - - /* assign the pointer */ - pcontext->query_data = (void *)query_data; - - query_data->se_data = g_try_new0(vbp_codec_data_vc1, 1); - if (NULL == query_data->se_data) - { - goto cleanup; - } - query_data->pic_data = g_try_new0(vbp_picture_data_vc1, MAX_NUM_PICTURES); - if (NULL == query_data->pic_data) - { - goto cleanup; - } - - int i; - for (i = 0; i < MAX_NUM_PICTURES; i++) - { - query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferVC1, 1); - if (NULL == query_data->pic_data[i].pic_parms) - { - goto cleanup; - } - - query_data->pic_data[i].packed_bitplanes = g_try_malloc0(MAX_BITPLANE_SIZE); - if (NULL == query_data->pic_data[i].packed_bitplanes) - { - goto cleanup; - } - - query_data->pic_data[i].slc_data = g_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1)); - if (NULL == query_data->pic_data[i].slc_data) - { - goto cleanup; - } - } - - return VBP_OK; + if (NULL != pcontext->query_data) + { + /* impossible, just sanity check */ + return VBP_PARM; + } + + pcontext->query_data = NULL; + + vbp_data_vc1 *query_data = NULL; + query_data = g_try_new0(vbp_data_vc1, 1); + if (NULL == query_data) + { + return VBP_MEM; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->se_data = g_try_new0(vbp_codec_data_vc1, 1); + if (NULL == query_data->se_data) + { + goto cleanup; + } + query_data->pic_data = g_try_new0(vbp_picture_data_vc1, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferVC1, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + + query_data->pic_data[i].packed_bitplanes = g_try_malloc0(MAX_BITPLANE_SIZE); + if (NULL == query_data->pic_data[i].packed_bitplanes) + { + goto cleanup; + } + + query_data->pic_data[i].slc_data = g_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1)); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + return VBP_OK; cleanup: - vbp_free_query_data_vc1(pcontext); + vbp_free_query_data_vc1(pcontext); - return VBP_MEM; + return VBP_MEM; } @@ -178,35 +178,35 @@ cleanup: */ uint32 vbp_free_query_data_vc1(vbp_context *pcontext) { - vbp_data_vc1 *query_data = NULL; - - if (NULL == pcontext->query_data) - { - return VBP_OK; - } - - query_data = (vbp_data_vc1 *)pcontext->query_data; - - if (query_data->pic_data) - { - int i = 0; - for (i = 0; i < MAX_NUM_PICTURES; i++) - { - g_free(query_data->pic_data[i].slc_data); - g_free(query_data->pic_data[i].packed_bitplanes); - g_free(query_data->pic_data[i].pic_parms); - } - } - - g_free(query_data->pic_data); - - g_free(query_data->se_data); - - g_free(query_data); - - pcontext->query_data = NULL; - - return VBP_OK; + vbp_data_vc1 *query_data = NULL; + + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + query_data = (vbp_data_vc1 *)pcontext->query_data; + + if (query_data->pic_data) + { + int i = 0; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + free(query_data->pic_data[i].slc_data); + free(query_data->pic_data[i].packed_bitplanes); + free(query_data->pic_data[i].pic_parms); + } + } + + free(query_data->pic_data); + + free(query_data->se_data); + + free(query_data); + + pcontext->query_data = NULL; + + return VBP_OK; } @@ -218,151 +218,151 @@ uint32 vbp_free_query_data_vc1(vbp_context *pcontext) * In this case, I will bypass the stripping of the SC code and assume a frame. */ static uint32 vbp_parse_start_code_helper_vc1( - viddec_pm_cxt_t *cxt, - viddec_parser_ops_t *ops, - int init_data_flag) + viddec_pm_cxt_t *cxt, + viddec_parser_ops_t *ops, + int init_data_flag) { - uint32_t ret = VBP_OK; - viddec_sc_parse_cubby_cxt_t cubby; - - /* make copy of cubby */ - /* this doesn't copy the buffer, merely the structure that holds the buffer */ - /* pointer. Below, where we call parse_sc() the code starts the search for */ - /* SCs at the beginning of the buffer pointed to by the cubby, so in our */ - /* cubby copy we increment the pointer as we move through the buffer. If */ - /* you think of each start code followed either by another start code or the */ - /* end of the buffer, then parse_sc() is returning information relative to */ - /* current segment. */ - - cubby = cxt->parse_cubby; - - cxt->list.num_items = 0; - cxt->list.data[0].stpos = 0; - cxt->getbits.is_emul_reqd = 1; - - /* codec initialization data is always start code prefixed. (may not start at position 0) - * sample buffer for AP has three start code patterns here: - * pattern 0: no start code at all, the whole buffer is a single segment item - * pattern 1: start codes for all segment items - * pattern 2: no start code for the first segment item, start codes for the rest segment items - */ - - gboolean is_pattern_two = FALSE; - - unsigned char start_code = 0; - - while(1) - { - /* parse the created buffer for sc */ - ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info)); - if(ret == 1) - { - cubby.phase = 0; - start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos); -#if 1 - if (0 == init_data_flag && - PREFIX_SIZE != cubby.sc_end_pos && - 0 == cxt->list.num_items) - { - /* buffer does not have start code at the beginning */ - vc1_viddec_parser_t *parser = NULL; - vc1_metadata_t *seqLayerHeader = NULL; - - parser = (vc1_viddec_parser_t *)cxt->codec_data; - seqLayerHeader = &(parser->info.metadata); - if (1 == seqLayerHeader->INTERLACE) - { - /* this is a hack for interlaced field coding */ - /* handle field interlace coding. One sample contains two fields, where: - * the first field does not have start code prefix, - * the second field has start code prefix. - */ - cxt->list.num_items = 1; - cxt->list.data[0].stpos = 0; - is_pattern_two = TRUE; - } - } -#endif - if (cxt->list.num_items == 0) /* found first SC. */ - { - /* sc_end_pos gets us to the SC type. We need to back up to the first zero */ - cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE; - } - else - { - /* First we set the end position of the last segment. */ - /* Since the SC parser searches from SC type to SC type and the */ - /* sc_end_pos is relative to this segment only, we merely add */ - /* sc_end_pos to the start to find the end. */ - cxt->list.data[cxt->list.num_items - 1].edpos = - cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; - - /* Then we set the start position of the current segment. */ - /* So I need to subtract 1 ??? */ - cxt->list.data[cxt->list.num_items].stpos = - cxt->list.data[cxt->list.num_items - 1].edpos; - - if (is_pattern_two) - { - cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE; - /* restore to normal pattern */ - is_pattern_two = FALSE; - } - } - /* We need to set up the cubby buffer for the next time through parse_sc(). */ - /* But even though we want the list to contain a segment as described */ - /* above, we want the cubby buffer to start just past the prefix, or it will */ - /* find the same SC again. So I bump the cubby buffer past the prefix. */ - cubby.buf = /*cubby.buf +*/ - cxt->parse_cubby.buf + - cxt->list.data[cxt->list.num_items].stpos + - PREFIX_SIZE; - - cubby.size = cxt->parse_cubby.size - - cxt->list.data[cxt->list.num_items].stpos - - PREFIX_SIZE; - - if (start_code >= 0x0A && start_code <= 0x0F) - { - /* only put known start code to the list - * 0x0A: end of sequence - * 0x0B: slice header - * 0x0C: frame header - * 0x0D: field header - * 0x0E: entry point header - * 0x0F: sequence header - */ - cxt->list.num_items++; - } - else - { - ITRACE("skipping unknown start code :%d", start_code); - } - - if (cxt->list.num_items >= MAX_IBUFS_PER_SC) - { - WTRACE("Num items exceeds the limit!"); - /* not fatal, just stop parsing */ - break; - } - } - else - { - /* we get here, if we reach the end of the buffer while looking or a SC. */ - /* If we never found a SC, then num_items will never get incremented. */ - if (cxt->list.num_items == 0) - { - /* If we don't find a SC we probably still have a frame of data. */ - /* So let's bump the num_items or else later we will not parse the */ - /* frame. */ - cxt->list.num_items = 1; - } - /* now we can set the end position of the last segment. */ - cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; - break; - } - } - return VBP_OK; + uint32_t ret = VBP_OK; + viddec_sc_parse_cubby_cxt_t cubby; + + /* make copy of cubby */ + /* this doesn't copy the buffer, merely the structure that holds the buffer */ + /* pointer. Below, where we call parse_sc() the code starts the search for */ + /* SCs at the beginning of the buffer pointed to by the cubby, so in our */ + /* cubby copy we increment the pointer as we move through the buffer. If */ + /* you think of each start code followed either by another start code or the */ + /* end of the buffer, then parse_sc() is returning information relative to */ + /* current segment. */ + + cubby = cxt->parse_cubby; + + cxt->list.num_items = 0; + cxt->list.data[0].stpos = 0; + cxt->getbits.is_emul_reqd = 1; + + /* codec initialization data is always start code prefixed. (may not start at position 0) + * sample buffer for AP has three start code patterns here: + * pattern 0: no start code at all, the whole buffer is a single segment item + * pattern 1: start codes for all segment items + * pattern 2: no start code for the first segment item, start codes for the rest segment items + */ + + bool is_pattern_two = FALSE; + + unsigned char start_code = 0; + + while (1) + { + /* parse the created buffer for sc */ + ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info)); + if (ret == 1) + { + cubby.phase = 0; + start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos); +#if 1 + if (0 == init_data_flag && + PREFIX_SIZE != cubby.sc_end_pos && + 0 == cxt->list.num_items) + { + /* buffer does not have start code at the beginning */ + vc1_viddec_parser_t *parser = NULL; + vc1_metadata_t *seqLayerHeader = NULL; + + parser = (vc1_viddec_parser_t *)cxt->codec_data; + seqLayerHeader = &(parser->info.metadata); + if (1 == seqLayerHeader->INTERLACE) + { + /* this is a hack for interlaced field coding */ + /* handle field interlace coding. One sample contains two fields, where: + * the first field does not have start code prefix, + * the second field has start code prefix. + */ + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + is_pattern_two = TRUE; + } + } +#endif + if (cxt->list.num_items == 0) /* found first SC. */ + { + /* sc_end_pos gets us to the SC type. We need to back up to the first zero */ + cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE; + } + else + { + /* First we set the end position of the last segment. */ + /* Since the SC parser searches from SC type to SC type and the */ + /* sc_end_pos is relative to this segment only, we merely add */ + /* sc_end_pos to the start to find the end. */ + cxt->list.data[cxt->list.num_items - 1].edpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + + /* Then we set the start position of the current segment. */ + /* So I need to subtract 1 ??? */ + cxt->list.data[cxt->list.num_items].stpos = + cxt->list.data[cxt->list.num_items - 1].edpos; + + if (is_pattern_two) + { + cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE; + /* restore to normal pattern */ + is_pattern_two = FALSE; + } + } + /* We need to set up the cubby buffer for the next time through parse_sc(). */ + /* But even though we want the list to contain a segment as described */ + /* above, we want the cubby buffer to start just past the prefix, or it will */ + /* find the same SC again. So I bump the cubby buffer past the prefix. */ + cubby.buf = /*cubby.buf +*/ + cxt->parse_cubby.buf + + cxt->list.data[cxt->list.num_items].stpos + + PREFIX_SIZE; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos - + PREFIX_SIZE; + + if (start_code >= 0x0A && start_code <= 0x0F) + { + /* only put known start code to the list + * 0x0A: end of sequence + * 0x0B: slice header + * 0x0C: frame header + * 0x0D: field header + * 0x0E: entry point header + * 0x0F: sequence header + */ + cxt->list.num_items++; + } + else + { + ITRACE("skipping unknown start code :%d", start_code); + } + + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + /* we get here, if we reach the end of the buffer while looking or a SC. */ + /* If we never found a SC, then num_items will never get incremented. */ + if (cxt->list.num_items == 0) + { + /* If we don't find a SC we probably still have a frame of data. */ + /* So let's bump the num_items or else later we will not parse the */ + /* frame. */ + cxt->list.num_items = 1; + } + /* now we can set the end position of the last segment. */ + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + return VBP_OK; } /* @@ -374,15 +374,15 @@ static uint32 vbp_parse_start_code_helper_vc1( * structure 1 and structure 3 */ uint32 vbp_parse_init_data_vc1(vbp_context *pcontext) -{ - /** - * init data (aka decoder configuration data) must - * be start-code prefixed - */ - - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - viddec_parser_ops_t *ops = pcontext->parser_ops; - return vbp_parse_start_code_helper_vc1(cxt, ops, 1); +{ + /** + * init data (aka decoder configuration data) must + * be start-code prefixed + */ + + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + return vbp_parse_start_code_helper_vc1(cxt, ops, 1); } @@ -393,45 +393,45 @@ uint32 vbp_parse_init_data_vc1(vbp_context *pcontext) */ uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - viddec_parser_ops_t *ops = pcontext->parser_ops; - - vc1_viddec_parser_t *parser = NULL; - vc1_metadata_t *seqLayerHeader = NULL; - - vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data; - - /* Reset query data for the new sample buffer */ - int i = 0; - for (i = 0; i < MAX_NUM_PICTURES; i++) - { - query_data->num_pictures = 0; - query_data->pic_data[i].num_slices = 0; - query_data->pic_data[i].picture_is_skipped = 0; - } - - parser = (vc1_viddec_parser_t *)cxt->codec_data; - seqLayerHeader = &(parser->info.metadata); - - - /* WMV codec data will have a start code, but the WMV picture data won't. */ - if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE) - { - return vbp_parse_start_code_helper_vc1(cxt, ops, 0); - } - else - { - /* WMV: vc1 simple or main profile. No start code present. - */ - - /* must set is_emul_reqd to 0! */ - cxt->getbits.is_emul_reqd = 0; - cxt->list.num_items = 1; - cxt->list.data[0].stpos = 0; - cxt->list.data[0].edpos = cxt->parse_cubby.size; - } - - return VBP_OK; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + + vc1_viddec_parser_t *parser = NULL; + vc1_metadata_t *seqLayerHeader = NULL; + + vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data; + + /* Reset query data for the new sample buffer */ + int i = 0; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->num_pictures = 0; + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].picture_is_skipped = 0; + } + + parser = (vc1_viddec_parser_t *)cxt->codec_data; + seqLayerHeader = &(parser->info.metadata); + + + /* WMV codec data will have a start code, but the WMV picture data won't. */ + if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE) + { + return vbp_parse_start_code_helper_vc1(cxt, ops, 0); + } + else + { + /* WMV: vc1 simple or main profile. No start code present. + */ + + /* must set is_emul_reqd to 0! */ + cxt->getbits.is_emul_reqd = 0; + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + return VBP_OK; } @@ -440,22 +440,22 @@ uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext) */ static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 *current_bit) { - uint8 value; - - value = (data[*current_word] >> *current_bit) & 1; - - /* Fix up bit/byte offsets. endianess?? */ - if (*current_bit < 31) - { - ++(*current_bit); - } - else - { - ++(*current_word); - *current_bit = 0; - } - - return value; + uint8 value; + + value = (data[*current_word] >> *current_bit) & 1; + + /* Fix up bit/byte offsets. endianess?? */ + if (*current_bit < 31) + { + ++(*current_bit); + } + else + { + ++(*current_word); + *current_bit = 0; + } + + return value; } @@ -463,48 +463,48 @@ static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 * * */ static uint32 vbp_pack_bitplane_vc1( - uint32 *from_plane, - uint8 *to_plane, - uint32 width, - uint32 height, - uint32 nibble_shift) + uint32 *from_plane, + uint8 *to_plane, + uint32 width, + uint32 height, + uint32 nibble_shift) { - uint32 error = VBP_OK; - uint32 current_word = 0; - uint32 current_bit = 0; /* must agree with number in vbp_get_bit_vc1 */ - uint32 i, j, n; - uint8 value; - uint32 stride = 0; - - stride = 32 * ((width + 31) / 32); - - for (i = 0, n = 0; i < height; i++) - { - for (j = 0; j < stride; j++) - { - if (j < width) - { - value = vbp_get_bit_vc1( - from_plane, - ¤t_word, - ¤t_bit); - - to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4)); - n++; - } - else - { - break; - } - } - if (stride > width) - { - current_word++; - current_bit = 0; - } - } - - return error; + uint32 error = VBP_OK; + uint32 current_word = 0; + uint32 current_bit = 0; /* must agree with number in vbp_get_bit_vc1 */ + uint32 i, j, n; + uint8 value; + uint32 stride = 0; + + stride = 32 * ((width + 31) / 32); + + for (i = 0, n = 0; i < height; i++) + { + for (j = 0; j < stride; j++) + { + if (j < width) + { + value = vbp_get_bit_vc1( + from_plane, + ¤t_word, + ¤t_bit); + + to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4)); + n++; + } + else + { + break; + } + } + if (stride > width) + { + current_word++; + current_bit = 0; + } + } + + return error; } @@ -513,166 +513,166 @@ static uint32 vbp_pack_bitplane_vc1( */ static inline uint32 vbp_map_bfraction(uint32 numerator, uint32 denominator) { - uint32 b_fraction = 0; + uint32 b_fraction = 0; - if ((numerator < 8) && (denominator < 9)) - { - b_fraction = b_fraction_table[numerator][denominator]; - } + if ((numerator < 8) && (denominator < 9)) + { + b_fraction = b_fraction_table[numerator][denominator]; + } - return b_fraction; + return b_fraction; } /** * */ static uint32 vbp_pack_bitplanes_vc1( - vbp_context *pcontext, - int index, - vbp_picture_data_vc1* pic_data) + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) { - uint32 error = VBP_OK; - if (0 == pic_data->pic_parms->bitplane_present.value) - { - /* return if bitplane is not present */ - pic_data->size_bitplanes = 0; - memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE); - return error; - } - - vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; - vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); - vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); - - - /* set bit plane size */ - pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2; - - - memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes); - - /* see libva library va.h for nibble bit */ - switch (picLayerHeader->PTYPE) - { - case VC1_I_FRAME: - case VC1_BI_FRAME: - if (picLayerHeader->OVERFLAGS.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->OVERFLAGS.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 2); - } - if (picLayerHeader->ACPRED.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->ACPRED.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 1); - } - if (picLayerHeader->FIELDTX.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->FIELDTX.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 0); - } - /* sanity check */ - if (picLayerHeader->MVTYPEMB.imode || - picLayerHeader->DIRECTMB.imode || - picLayerHeader->SKIPMB.imode || - picLayerHeader->FORWARDMB.imode) - { - ETRACE("Unexpected bit-plane type."); - error = VBP_TYPE; - } - break; - - case VC1_P_FRAME: - if (picLayerHeader->MVTYPEMB.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->MVTYPEMB.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 2); - } - if (picLayerHeader->SKIPMB.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->SKIPMB.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 1); - } - if (picLayerHeader->DIRECTMB.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->DIRECTMB.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 0); - } - /* sanity check */ - if (picLayerHeader->FIELDTX.imode || - picLayerHeader->FORWARDMB.imode || - picLayerHeader->ACPRED.imode || - picLayerHeader->OVERFLAGS.imode ) - { - ETRACE("Unexpected bit-plane type."); - error = VBP_TYPE; - } - break; - - case VC1_B_FRAME: - if (picLayerHeader->FORWARDMB.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->FORWARDMB.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 2); - } - if (picLayerHeader->SKIPMB.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->SKIPMB.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 1); - } - if (picLayerHeader->DIRECTMB.imode) - { - vbp_pack_bitplane_vc1( - picLayerHeader->DIRECTMB.databits, - pic_data->packed_bitplanes, - seqLayerHeader->widthMB, - seqLayerHeader->heightMB, - 0); - } - /* sanity check */ - if (picLayerHeader->MVTYPEMB.imode || - picLayerHeader->FIELDTX.imode || - picLayerHeader->ACPRED.imode || - picLayerHeader->OVERFLAGS.imode) - { - ETRACE("Unexpected bit-plane type."); - error = VBP_TYPE; - } - break; - } - return error; + uint32 error = VBP_OK; + if (0 == pic_data->pic_parms->bitplane_present.value) + { + /* return if bitplane is not present */ + pic_data->size_bitplanes = 0; + memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE); + return error; + } + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + + + /* set bit plane size */ + pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2; + + + memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes); + + /* see libva library va.h for nibble bit */ + switch (picLayerHeader->PTYPE) + { + case VC1_I_FRAME: + case VC1_BI_FRAME: + if (picLayerHeader->OVERFLAGS.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->OVERFLAGS.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->ACPRED.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->ACPRED.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->FIELDTX.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->FIELDTX.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->MVTYPEMB.imode || + picLayerHeader->DIRECTMB.imode || + picLayerHeader->SKIPMB.imode || + picLayerHeader->FORWARDMB.imode) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + + case VC1_P_FRAME: + if (picLayerHeader->MVTYPEMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->MVTYPEMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->SKIPMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->SKIPMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->DIRECTMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->DIRECTMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->FIELDTX.imode || + picLayerHeader->FORWARDMB.imode || + picLayerHeader->ACPRED.imode || + picLayerHeader->OVERFLAGS.imode ) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + + case VC1_B_FRAME: + if (picLayerHeader->FORWARDMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->FORWARDMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->SKIPMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->SKIPMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->DIRECTMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->DIRECTMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->MVTYPEMB.imode || + picLayerHeader->FIELDTX.imode || + picLayerHeader->ACPRED.imode || + picLayerHeader->OVERFLAGS.imode) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + } + return error; } @@ -683,29 +683,29 @@ static uint32 vbp_pack_bitplanes_vc1( */ uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) { - uint32 error = VBP_OK; + uint32 error = VBP_OK; - vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; - vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); - vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; + vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; - /* first we get the SH/EP data. Can we cut down on this? */ - vbp_codec_data_vc1 *se_data = query_data->se_data; - se_data->PROFILE = seqLayerHeader->PROFILE; - se_data->LEVEL = seqLayerHeader->LEVEL; - se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG; - se_data->PULLDOWN = seqLayerHeader->PULLDOWN; - se_data->INTERLACE = seqLayerHeader->INTERLACE; - se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG; - se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG; - se_data->PSF = seqLayerHeader->PSF; + /* first we get the SH/EP data. Can we cut down on this? */ + vbp_codec_data_vc1 *se_data = query_data->se_data; + se_data->PROFILE = seqLayerHeader->PROFILE; + se_data->LEVEL = seqLayerHeader->LEVEL; + se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG; + se_data->PULLDOWN = seqLayerHeader->PULLDOWN; + se_data->INTERLACE = seqLayerHeader->INTERLACE; + se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG; + se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG; + se_data->PSF = seqLayerHeader->PSF; // color matrix if (seqLayerHeader->COLOR_FORMAT_FLAG) { - se_data->MATRIX_COEF = seqLayerHeader->MATRIX_COEF; - } + se_data->MATRIX_COEF = seqLayerHeader->MATRIX_COEF; + } else { //ITU-R BT. 601-5. @@ -719,7 +719,7 @@ uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) if (se_data->ASPECT_RATIO < 14) { se_data->ASPECT_HORIZ_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][0]; - se_data->ASPECT_VERT_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][1]; + se_data->ASPECT_VERT_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][1]; } else if (se_data->ASPECT_RATIO == 15) { @@ -727,7 +727,7 @@ uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) se_data->ASPECT_VERT_SIZE = seqLayerHeader->ASPECT_VERT_SIZE; } else // se_data->ASPECT_RATIO == 14 - { + { se_data->ASPECT_HORIZ_SIZE = 0; se_data->ASPECT_VERT_SIZE = 0; } @@ -739,291 +739,291 @@ uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) se_data->ASPECT_HORIZ_SIZE = 0; se_data->ASPECT_VERT_SIZE = 0; } - - se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK; - se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY; - se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG; - se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG; - se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER; - se_data->FASTUVMC = seqLayerHeader->FASTUVMC; - se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV; - se_data->DQUANT = seqLayerHeader->DQUANT; - se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM; - se_data->OVERLAP = seqLayerHeader->OVERLAP; - se_data->QUANTIZER = seqLayerHeader->QUANTIZER; - se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1; - se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1; - se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV; - se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG; - se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY; - se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG; - se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV; - se_data->RANGERED = seqLayerHeader->RANGERED; - se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES; - se_data->MULTIRES = seqLayerHeader->MULTIRES; - se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER; - se_data->RNDCTRL = seqLayerHeader->RNDCTRL; - se_data->REFDIST = seqLayerHeader->REFDIST; - se_data->widthMB = seqLayerHeader->widthMB; - se_data->heightMB = seqLayerHeader->heightMB; - se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD; - se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2; - se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2; - - /* update buffer number */ - query_data->buf_number = buffer_counter; - - if (query_data->num_pictures > 2) - { - WTRACE("sampe buffer contains %d pictures", query_data->num_pictures); - } - return error; + + se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK; + se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY; + se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG; + se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG; + se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER; + se_data->FASTUVMC = seqLayerHeader->FASTUVMC; + se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV; + se_data->DQUANT = seqLayerHeader->DQUANT; + se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM; + se_data->OVERLAP = seqLayerHeader->OVERLAP; + se_data->QUANTIZER = seqLayerHeader->QUANTIZER; + se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1; + se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1; + se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV; + se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG; + se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY; + se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG; + se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV; + se_data->RANGERED = seqLayerHeader->RANGERED; + se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES; + se_data->MULTIRES = seqLayerHeader->MULTIRES; + se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER; + se_data->RNDCTRL = seqLayerHeader->RNDCTRL; + se_data->REFDIST = seqLayerHeader->REFDIST; + se_data->widthMB = seqLayerHeader->widthMB; + se_data->heightMB = seqLayerHeader->heightMB; + se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD; + se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2; + se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2; + + /* update buffer number */ + query_data->buf_number = buffer_counter; + + if (query_data->num_pictures > 2) + { + WTRACE("sampe buffer contains %d pictures", query_data->num_pictures); + } + return error; } static void vbp_pack_picture_params_vc1( - vbp_context *pcontext, - int index, - vbp_picture_data_vc1* pic_data) + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; - vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); - vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); - - - VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms; - - /* Then we get the picture header data. Picture type need translation. */ - pic_parms->forward_reference_picture = VA_INVALID_SURFACE; - pic_parms->backward_reference_picture = VA_INVALID_SURFACE; - pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE; - - pic_parms->sequence_fields.value = 0; - pic_parms->sequence_fields.bits.pulldown = seqLayerHeader->PULLDOWN; - pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE; - pic_parms->sequence_fields.bits.tfcntrflag = seqLayerHeader->TFCNTRFLAG; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + + + VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms; + + /* Then we get the picture header data. Picture type need translation. */ + pic_parms->forward_reference_picture = VA_INVALID_SURFACE; + pic_parms->backward_reference_picture = VA_INVALID_SURFACE; + pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE; + + pic_parms->sequence_fields.value = 0; + pic_parms->sequence_fields.bits.pulldown = seqLayerHeader->PULLDOWN; + pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE; + pic_parms->sequence_fields.bits.tfcntrflag = seqLayerHeader->TFCNTRFLAG; pic_parms->sequence_fields.bits.finterpflag = seqLayerHeader->FINTERPFLAG; pic_parms->sequence_fields.bits.psf = seqLayerHeader->PSF; pic_parms->sequence_fields.bits.multires = seqLayerHeader->MULTIRES; pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP; - pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER; + pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER; pic_parms->sequence_fields.bits.rangered = seqLayerHeader->RANGERED; pic_parms->sequence_fields.bits.max_b_frames = seqLayerHeader->MAXBFRAMES; - pic_parms->coded_width = (seqLayerHeader->width + 1) << 1; - pic_parms->coded_height = (seqLayerHeader->height + 1) << 1; - - pic_parms->entrypoint_fields.value = 0; - pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY; - pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK; - pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER; - pic_parms->entrypoint_fields.bits.panscan_flag = seqLayerHeader->PANSCAN_FLAG; - - pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER; - pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC; - - pic_parms->range_mapping_fields.value = 0; - pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG; - pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY; - pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG; - pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV; - - pic_parms->b_picture_fraction = - vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN); - - pic_parms->cbp_table = picLayerHeader->CBPTAB; - pic_parms->mb_mode_table = picLayerHeader->MBMODETAB; - pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM; - pic_parms->rounding_control = picLayerHeader->RNDCTRL; - pic_parms->post_processing = picLayerHeader->POSTPROC; - /* fix this. Add RESPIC to parser. */ - pic_parms->picture_resolution_index = 0; - pic_parms->luma_scale = picLayerHeader->LUMSCALE; - pic_parms->luma_shift = picLayerHeader->LUMSHIFT; - - pic_parms->picture_fields.value = 0; - switch (picLayerHeader->PTYPE) - { - case VC1_I_FRAME: - pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I; - break; - - case VC1_P_FRAME: - pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P; - break; - - case VC1_B_FRAME: - pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B; - break; - - case VC1_BI_FRAME: - pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI; - break; - - case VC1_SKIPPED_FRAME: - pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED; - break; - - default: - /* to do: handle this case */ - break; - } - pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM; - if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE) - { - /* simple or main profile, top field flag is not present, default to 1.*/ - pic_parms->picture_fields.bits.top_field_first = 1; - } - else - { - pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF; - } - - pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField); - /* This seems to be set based on the MVMODE and MVMODE2 syntax. */ - /* This is a hack. Probably will need refining. */ - if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) || - (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2)) - { - pic_parms->picture_fields.bits.intensity_compensation = 1; - } - else - { - pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP; - } - - /* Lets store the raw-mode BP bits. */ - pic_parms->raw_coding.value = 0; - pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB; - pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB; - pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB; - pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX; - pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB; - pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED; - pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS; - - /* imode 1/0 indicates bitmap presence in Pic Hdr. */ - pic_parms->bitplane_present.value = 0; - - pic_parms->bitplane_present.flags.bp_mv_type_mb = - pic_parms->raw_coding.flags.mv_type_mb ? 1 : - (picLayerHeader->MVTYPEMB.imode ? 1: 0); - - pic_parms->bitplane_present.flags.bp_direct_mb = - pic_parms->raw_coding.flags.direct_mb ? 1 : - (picLayerHeader->DIRECTMB.imode ? 1: 0); - - pic_parms->bitplane_present.flags.bp_skip_mb = - pic_parms->raw_coding.flags.skip_mb ? 1 : - (picLayerHeader->SKIPMB.imode ? 1: 0); - - pic_parms->bitplane_present.flags.bp_field_tx = - pic_parms->raw_coding.flags.field_tx ? 1 : - (picLayerHeader->FIELDTX.imode ? 1: 0); - - pic_parms->bitplane_present.flags.bp_forward_mb = - pic_parms->raw_coding.flags.forward_mb ? 1 : - (picLayerHeader->FORWARDMB.imode ? 1: 0); - - pic_parms->bitplane_present.flags.bp_ac_pred = - pic_parms->raw_coding.flags.ac_pred ? 1 : - (picLayerHeader->ACPRED.imode ? 1: 0); - - pic_parms->bitplane_present.flags.bp_overflags = - pic_parms->raw_coding.flags.overflags ? 1 : - (picLayerHeader->OVERFLAGS.imode ? 1: 0); - - pic_parms->reference_fields.value = 0; - pic_parms->reference_fields.bits.reference_distance_flag = - seqLayerHeader->REFDIST_FLAG; - - pic_parms->reference_fields.bits.reference_distance = - seqLayerHeader->REFDIST; - - pic_parms->reference_fields.bits.num_reference_pictures = - picLayerHeader->NUMREF; - - pic_parms->reference_fields.bits.reference_field_pic_indicator = - picLayerHeader->REFFIELD; - - pic_parms->mv_fields.value = 0; - pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE; - pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2; - - pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB; - pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB; - pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH; - pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB; - pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV; - pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE; - pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV; - pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE; - - pic_parms->pic_quantizer_fields.value = 0; - pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT; - pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER; - pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP; - pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT; - pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant; - pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM; - pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE; - pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE; - pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE; - pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL; - pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT; - - pic_parms->transform_fields.value = 0; - pic_parms->transform_fields.bits.variable_sized_transform_flag = - seqLayerHeader->VSTRANSFORM; - - pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF; - pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM; - - pic_parms->transform_fields.bits.transform_ac_codingset_idx1 = - (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0; - - pic_parms->transform_fields.bits.transform_ac_codingset_idx2 = - (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0; - - pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB; + pic_parms->coded_width = (seqLayerHeader->width + 1) << 1; + pic_parms->coded_height = (seqLayerHeader->height + 1) << 1; + + pic_parms->entrypoint_fields.value = 0; + pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY; + pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK; + pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER; + pic_parms->entrypoint_fields.bits.panscan_flag = seqLayerHeader->PANSCAN_FLAG; + + pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER; + pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC; + + pic_parms->range_mapping_fields.value = 0; + pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG; + pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY; + pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG; + pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV; + + pic_parms->b_picture_fraction = + vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN); + + pic_parms->cbp_table = picLayerHeader->CBPTAB; + pic_parms->mb_mode_table = picLayerHeader->MBMODETAB; + pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM; + pic_parms->rounding_control = picLayerHeader->RNDCTRL; + pic_parms->post_processing = picLayerHeader->POSTPROC; + /* fix this. Add RESPIC to parser. */ + pic_parms->picture_resolution_index = 0; + pic_parms->luma_scale = picLayerHeader->LUMSCALE; + pic_parms->luma_shift = picLayerHeader->LUMSHIFT; + + pic_parms->picture_fields.value = 0; + switch (picLayerHeader->PTYPE) + { + case VC1_I_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I; + break; + + case VC1_P_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P; + break; + + case VC1_B_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B; + break; + + case VC1_BI_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI; + break; + + case VC1_SKIPPED_FRAME: + pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED; + break; + + default: + /* to do: handle this case */ + break; + } + pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM; + if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE) + { + /* simple or main profile, top field flag is not present, default to 1.*/ + pic_parms->picture_fields.bits.top_field_first = 1; + } + else + { + pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF; + } + + pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField); + /* This seems to be set based on the MVMODE and MVMODE2 syntax. */ + /* This is a hack. Probably will need refining. */ + if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) || + (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2)) + { + pic_parms->picture_fields.bits.intensity_compensation = 1; + } + else + { + pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP; + } + + /* Lets store the raw-mode BP bits. */ + pic_parms->raw_coding.value = 0; + pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB; + pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB; + pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB; + pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX; + pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB; + pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED; + pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS; + + /* imode 1/0 indicates bitmap presence in Pic Hdr. */ + pic_parms->bitplane_present.value = 0; + + pic_parms->bitplane_present.flags.bp_mv_type_mb = + pic_parms->raw_coding.flags.mv_type_mb ? 1 : + (picLayerHeader->MVTYPEMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_direct_mb = + pic_parms->raw_coding.flags.direct_mb ? 1 : + (picLayerHeader->DIRECTMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_skip_mb = + pic_parms->raw_coding.flags.skip_mb ? 1 : + (picLayerHeader->SKIPMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_field_tx = + pic_parms->raw_coding.flags.field_tx ? 1 : + (picLayerHeader->FIELDTX.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_forward_mb = + pic_parms->raw_coding.flags.forward_mb ? 1 : + (picLayerHeader->FORWARDMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_ac_pred = + pic_parms->raw_coding.flags.ac_pred ? 1 : + (picLayerHeader->ACPRED.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_overflags = + pic_parms->raw_coding.flags.overflags ? 1 : + (picLayerHeader->OVERFLAGS.imode ? 1: 0); + + pic_parms->reference_fields.value = 0; + pic_parms->reference_fields.bits.reference_distance_flag = + seqLayerHeader->REFDIST_FLAG; + + pic_parms->reference_fields.bits.reference_distance = + seqLayerHeader->REFDIST; + + pic_parms->reference_fields.bits.num_reference_pictures = + picLayerHeader->NUMREF; + + pic_parms->reference_fields.bits.reference_field_pic_indicator = + picLayerHeader->REFFIELD; + + pic_parms->mv_fields.value = 0; + pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE; + pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2; + + pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB; + pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB; + pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH; + pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB; + pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV; + pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE; + pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV; + pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE; + + pic_parms->pic_quantizer_fields.value = 0; + pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT; + pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER; + pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP; + pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT; + pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant; + pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM; + pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE; + pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE; + pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE; + pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL; + pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT; + + pic_parms->transform_fields.value = 0; + pic_parms->transform_fields.bits.variable_sized_transform_flag = + seqLayerHeader->VSTRANSFORM; + + pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF; + pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM; + + pic_parms->transform_fields.bits.transform_ac_codingset_idx1 = + (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0; + + pic_parms->transform_fields.bits.transform_ac_codingset_idx2 = + (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0; + + pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB; } static void vbp_pack_slice_data_vc1( - vbp_context *pcontext, - int index, - vbp_picture_data_vc1* pic_data) + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos; - uint32 bit; - uint32 byte; - uint8 is_emul; - viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); - - vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); - VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms); + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos; + uint32 bit; + uint32 byte; + uint8 is_emul; + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); + VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms); - /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/ + /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/ - slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos; - slc_data->slice_size = slice_size; - slc_data->slice_offset = 0; + slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos; + slc_data->slice_size = slice_size; + slc_data->slice_offset = 0; - slc_parms->slice_data_size = slc_data->slice_size; - slc_parms->slice_data_offset = 0; + slc_parms->slice_data_size = slc_data->slice_size; + slc_parms->slice_data_offset = 0; - /* fix this. we need to be able to handle partial slices. */ - slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + /* fix this. we need to be able to handle partial slices. */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - slc_parms->macroblock_offset = bit + byte * 8; + slc_parms->macroblock_offset = bit + byte * 8; - /* fix this. we need o get the slice_vertical_position from the code */ - slc_parms->slice_vertical_position = pic_data->num_slices; + /* fix this. we need o get the slice_vertical_position from the code */ + slc_parms->slice_vertical_position = pic_data->num_slices; - pic_data->num_slices++; + pic_data->num_slices++; } /** @@ -1031,69 +1031,69 @@ static void vbp_pack_slice_data_vc1( */ uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index) { - viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - uint32 error = VBP_OK; - - vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; - if (parser->start_code != VC1_SC_FRM && parser->start_code != VC1_SC_FLD && - parser->start_code != VC1_SC_SLC) - { - /* only handle frame data, field data and slice data here - */ - return VBP_OK; - } - vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; - - if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) - { - query_data->num_pictures++; - } - - if (query_data->num_pictures > MAX_NUM_PICTURES) - { - ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES); - return VBP_DATA; - } - - if (query_data->num_pictures == 0) - { - ETRACE("Unexpected num of pictures."); - return VBP_DATA; - } - - /* start packing data */ - int picture_index = query_data->num_pictures - 1; - vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]); - - if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) - { - /* setup picture parameter first*/ - vbp_pack_picture_params_vc1(pcontext, index, pic_data); - - /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */ - error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data); - if (VBP_OK != error) - { - ETRACE("Failed to pack bitplane."); - return error; - } - - } - - /* Always pack slice parameter. The first macroblock in the picture CANNOT - * be preceeded by a slice header, so we will have first slice parsed always. - * - */ - - if (pic_data->num_slices >= MAX_NUM_SLICES) - { - ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES); - return VBP_DATA; - } - - /* set up slice parameter */ - vbp_pack_slice_data_vc1(pcontext, index, pic_data); - - - return VBP_OK; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 error = VBP_OK; + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + if (parser->start_code != VC1_SC_FRM && parser->start_code != VC1_SC_FLD && + parser->start_code != VC1_SC_SLC) + { + /* only handle frame data, field data and slice data here + */ + return VBP_OK; + } + vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; + + if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) + { + query_data->num_pictures++; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + if (query_data->num_pictures == 0) + { + ETRACE("Unexpected num of pictures."); + return VBP_DATA; + } + + /* start packing data */ + int picture_index = query_data->num_pictures - 1; + vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]); + + if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) + { + /* setup picture parameter first*/ + vbp_pack_picture_params_vc1(pcontext, index, pic_data); + + /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */ + error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data); + if (VBP_OK != error) + { + ETRACE("Failed to pack bitplane."); + return error; + } + + } + + /* Always pack slice parameter. The first macroblock in the picture CANNOT + * be preceeded by a slice header, so we will have first slice parsed always. + * + */ + + if (pic_data->num_slices >= MAX_NUM_SLICES) + { + ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + /* set up slice parameter */ + vbp_pack_slice_data_vc1(pcontext, index, pic_data); + + + return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c index 12ddfe9..eda2bf7 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c @@ -6,21 +6,21 @@ int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit) { - if(emit->cur.data != NULL) + if (emit->cur.data != NULL) { emit->cur.data->num_items = emit->cur.num_items; } - if(emit->next.data != NULL) + if (emit->next.data != NULL) { emit->next.data->num_items = emit->next.num_items; } emit->cur.num_items = emit->next.num_items; emit->next.num_items = 0; - if(emit->cur.data != NULL) + if (emit->cur.data != NULL) { emit->cur.data->result = emit->cur.result; } - if(emit->next.data != NULL) + if (emit->next.data != NULL) { emit->next.data->result = emit->next.result; } @@ -32,7 +32,7 @@ int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit) int32_t viddec_emit_append(viddec_emitter_wkld *cxt, viddec_workload_item_t *item) { int32_t ret =0; - if((cxt->num_items < cxt->max_items) && (cxt->data != NULL)) + if ((cxt->num_items < cxt->max_items) && (cxt->data != NULL)) { cxt->data->item[cxt->num_items] = *item; cxt->num_items++; @@ -54,7 +54,7 @@ int32_t viddec_emit_contr_tag(viddec_emitter *emit, viddec_input_buffer_t *ibuf, cur_wkld = (using_next == 0)? &(emit->cur):&(emit->next); - if(!incomplete) + if (!incomplete) item.vwi_type = VIDDEC_WORKLOAD_IBUF_DONE; else item.vwi_type = VIDDEC_WORKLOAD_IBUF_CONTINUED; diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_intr.c b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c index fa6c1f2..c794e0f 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_intr.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c @@ -22,15 +22,15 @@ void mfd_trap_handler() uint32_t reg=0, temp=0; temp = reg_read(INT_STATUS); //DEBUG_WRITE(0xff, temp, timer, 0, 0, 0); - if(temp & INT_WDOG_ENABLE) + if (temp & INT_WDOG_ENABLE) { timer++; set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX); reg = reg_read(INT_STATUS); } - if(temp & 0x4) + if (temp & 0x4) { - + temp = temp & (~0x4); reg_write(INT_REG, temp); //val = reg_read(DMA_CONTROL_STATUS); @@ -38,14 +38,14 @@ void mfd_trap_handler() //reg_write(DMA_CONTROL_STATUS, val); //reg = reg_read(INT_STATUS); } - if(temp & 0x2) + if (temp & 0x2) { - + temp = temp & (~0x2); reg_write(INT_REG, temp); } - - if(temp & 0x1) + + if (temp & 0x1) { temp = temp & (~0x1); reg_write(INT_REG, temp); diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c index 85b6b8e..6f5aae0 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c @@ -31,19 +31,19 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) phase = cxt->phase; cxt->sc_end_pos = -1; pcxt=pcxt; - + /* parse until there is more data and start code not found */ - while((data_left > 0) &&(phase < 3)) + while ((data_left > 0) &&(phase < 3)) { /* Check if we are byte aligned & phase=0, if thats the case we can check work at a time instead of byte*/ - if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { - while(data_left > 3) + while (data_left > 3) { uint32_t data; char mask1 = 0, mask2=0; - + data = *((uint32_t *)ptr); #ifndef MFDBIGENDIAN data = SWAP_WORD(data); @@ -52,9 +52,11 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need two consecutive zero bytes for a start code pattern */ - if(mask1 && mask2) + if (mask1 && mask2) {/* Success so skip 4 bytes and start over */ - ptr+=4;size+=4;data_left-=4; + ptr+=4; + size+=4; + data_left-=4; continue; } else @@ -63,35 +65,39 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) } } } - + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected two zero bytes in the word so we look one byte at a time*/ - if(data_left > 0) + if (data_left > 0) { - if(*ptr == FIRST_STARTCODE_BYTE) + if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */ phase++; - ptr++;size++;data_left--; - if(phase > 2) + ptr++; + size++; + data_left--; + if (phase > 2) { phase = 2; if ( (((uint32_t)ptr) & 0x3) == 0 ) { - while( data_left > 3 ) - { - if(*((uint32_t *)ptr) != 0) - { - break; - } - ptr+=4;size+=4;data_left-=4; - } + while ( data_left > 3 ) + { + if (*((uint32_t *)ptr) != 0) + { + break; + } + ptr+=4; + size+=4; + data_left-=4; + } } } } else { - if((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2)) + if ((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2)) {/* Match for start code so update context with byte position */ phase = 3; cxt->sc_end_pos = size; @@ -100,11 +106,13 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) { phase = 0; } - ptr++;size++;data_left--; + ptr++; + size++; + data_left--; } } } - if((data_left > 0) && (phase == 3)) + if ((data_left > 0) && (phase == 3)) { viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; cxt->sc_end_pos++; diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c index 6f00d27..004063c 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c @@ -16,175 +16,175 @@ #ifdef USE_2p25_CLOCK_PER_BYTE_LOOP static int parser_find_next_startcode( - const unsigned char *buf, - int i, - int len, - unsigned int *pphase ) + const unsigned char *buf, + int i, + int len, + unsigned int *pphase ) { - int sc_pos = -1; - int in_slow_loop; - register unsigned int scphase; - - scphase = *pphase; - - in_slow_loop = 1; - if ( (0 == (0x3 & i)) && /* dword aligned */ - (0 == scphase) && /* no "potential" SC detected */ - ((len - i) >= 4) ) /* more than four bytes left */ - { - in_slow_loop = 0; /* go to fast loop */ - } - - while( i < len ) - { - if ( in_slow_loop ) - { -/* ------- slow SC Detect Loop, used when 0 detected in stream --------*/ + int sc_pos = -1; + int in_slow_loop; + register unsigned int scphase; + + scphase = *pphase; + + in_slow_loop = 1; + if ( (0 == (0x3 & i)) && /* dword aligned */ + (0 == scphase) && /* no "potential" SC detected */ + ((len - i) >= 4) ) /* more than four bytes left */ + { + in_slow_loop = 0; /* go to fast loop */ + } + + while ( i < len ) + { + if ( in_slow_loop ) + { + /* ------- slow SC Detect Loop, used when 0 detected in stream --------*/ sc_detect_slow_loop: - while ( i < len ) - { - unsigned char ch; - - ch = buf[i]; - - /* searching for a zero, ignore phase for now */ - if ( FIRST_STARTCODE_BYTE == ch ) - { - /* if we've already got two zeros, hold at phase == 2 */ - if ( scphase < 2 ) - { - scphase++; - } - else if ( scphase > 2 ) - { - /* RARE Valid Condition, SC == 00 00 01 00 */ - /* if we've already got two zeros hold at phase == 2 - * we also enter here of we're at phase 3 - * meaning we've got 00 00 01 00 which is a valid SC - */ - /* 00 00 01 00 */ - sc_pos = i; - *pphase = scphase; - return(sc_pos); - } - else /* implies scphase == 2, holding receiving 0's */ - { - } - } - else if ( THIRD_STARTCODE_BYTE == ch ) - { - if ( 2 == scphase ) - { - /* next byte is the SC */ - scphase++; - } - else if ( scphase < 2 ) - { - scphase = 0; /* start over */ - } - else if ( scphase > 2 ) - { - /* RARE Valid Condition, SC == 00 00 01 01 */ - sc_pos = i; - *pphase = scphase; - return(sc_pos); - } - } - else if ( 3 == scphase ) + while ( i < len ) { - /* Valid Condition, SC == 00 00 01 xx */ - sc_pos = i; - *pphase = scphase; - return(sc_pos); + unsigned char ch; + + ch = buf[i]; + + /* searching for a zero, ignore phase for now */ + if ( FIRST_STARTCODE_BYTE == ch ) + { + /* if we've already got two zeros, hold at phase == 2 */ + if ( scphase < 2 ) + { + scphase++; + } + else if ( scphase > 2 ) + { + /* RARE Valid Condition, SC == 00 00 01 00 */ + /* if we've already got two zeros hold at phase == 2 + * we also enter here of we're at phase 3 + * meaning we've got 00 00 01 00 which is a valid SC + */ + /* 00 00 01 00 */ + sc_pos = i; + *pphase = scphase; + return(sc_pos); + } + else /* implies scphase == 2, holding receiving 0's */ + { + } + } + else if ( THIRD_STARTCODE_BYTE == ch ) + { + if ( 2 == scphase ) + { + /* next byte is the SC */ + scphase++; + } + else if ( scphase < 2 ) + { + scphase = 0; /* start over */ + } + else if ( scphase > 2 ) + { + /* RARE Valid Condition, SC == 00 00 01 01 */ + sc_pos = i; + *pphase = scphase; + return(sc_pos); + } + } + else if ( 3 == scphase ) + { + /* Valid Condition, SC == 00 00 01 xx */ + sc_pos = i; + *pphase = scphase; + return(sc_pos); + } + else + { + scphase = 0; + + if ( (3 == (0x3 & i)) && /* dword aligned? */ + ((len - i) > 4) ) /* more than four bytes left */ + { + i++; + in_slow_loop = 0; /* go to fast loop */ + + /* WARNING: Performance GoTo */ + goto sc_detect_fast_loop; + } + } + + i++; } - else - { - scphase = 0; - - if ( (3 == (0x3 & i)) && /* dword aligned? */ - ((len - i) > 4) ) /* more than four bytes left */ - { - i++; - in_slow_loop = 0; /* go to fast loop */ - - /* WARNING: Performance GoTo */ - goto sc_detect_fast_loop; - } - } - - i++; - } - } - else /* we're in the fast loop */ - { -/* ------- FAST SC Detect Loop, used to skip at high bandwidth --------*/ + } + else /* we're in the fast loop */ + { + /* ------- FAST SC Detect Loop, used to skip at high bandwidth --------*/ sc_detect_fast_loop: - /* FAST start-code scanning loop (Krebs Algorithm) */ - while ( i <= (len - 4) ) - { - register unsigned int dw; + /* FAST start-code scanning loop (Krebs Algorithm) */ + while ( i <= (len - 4) ) + { + register unsigned int dw; - dw = *((unsigned int *)&buf[i]); + dw = *((unsigned int *)&buf[i]); #ifndef MFDBIGENDIAN - dw = SWAP_WORD(dw); + dw = SWAP_WORD(dw); #endif - if ( 0 != (dw & SC_BYTE_MASK0) ) - { - if ( 0 != (dw & SC_BYTE_MASK1) ) - { - /* most common code path */ - i += 4; - continue; - } + if ( 0 != (dw & SC_BYTE_MASK0) ) + { + if ( 0 != (dw & SC_BYTE_MASK1) ) + { + /* most common code path */ + i += 4; + continue; + } + } + + break; } + /* potential SC detected or at end of loop */ + in_slow_loop = 1; + + /* WARNING: performance goto */ + goto sc_detect_slow_loop; + } + } - break; - } - /* potential SC detected or at end of loop */ - in_slow_loop = 1; - - /* WARNING: performance goto */ - goto sc_detect_slow_loop; - } - } - - *pphase = scphase; - return(sc_pos); + *pphase = scphase; + return(sc_pos); } unsigned int viddec_parse_sc(void *in, void *pcxt) { - viddec_sc_parse_cubby_cxt_t *cxt; - int boff; - int retval=0; - - cxt = (viddec_sc_parse_cubby_cxt_t *)in; - - /* get to four-byte alignment */ - boff = (int)cxt->buf & 0x3; - - cxt->sc_end_pos = parser_find_next_startcode( - (const unsigned char *)cxt->buf - boff, - boff, - cxt->size + boff, - &cxt->phase ); - - if ( (int)cxt->sc_end_pos >= 0 ) - { - cxt->sc_end_pos -= boff; - - /* have not fully finished the buffer */ - if ( cxt->sc_end_pos < cxt->size ) - cxt->phase++; - - retval = 1; - } - else - { - /* No startcode found */ - } - - return(retval); + viddec_sc_parse_cubby_cxt_t *cxt; + int boff; + int retval=0; + + cxt = (viddec_sc_parse_cubby_cxt_t *)in; + + /* get to four-byte alignment */ + boff = (int)cxt->buf & 0x3; + + cxt->sc_end_pos = parser_find_next_startcode( + (const unsigned char *)cxt->buf - boff, + boff, + cxt->size + boff, + &cxt->phase ); + + if ( (int)cxt->sc_end_pos >= 0 ) + { + cxt->sc_end_pos -= boff; + + /* have not fully finished the buffer */ + if ( cxt->sc_end_pos < cxt->size ) + cxt->phase++; + + retval = 1; + } + else + { + /* No startcode found */ + } + + return(retval); } #endif diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c index 5aa2e9c..ff07e17 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c @@ -2,5 +2,5 @@ uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) { - return (0); + return (0); } diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c index 42cc3e9..f52eb36 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c @@ -72,10 +72,10 @@ void viddec_pm_init_ops() parser_ops[MFD_STREAM_FORMAT_MPEG].gen_assoc_tags = viddec_mpeg2_add_association_tags; viddec_h264_get_ops(&parser_ops[MFD_STREAM_FORMAT_H264]); - parser_ops[MFD_STREAM_FORMAT_H264].parse_sc = viddec_parse_sc; + parser_ops[MFD_STREAM_FORMAT_H264].parse_sc = viddec_parse_sc; parser_ops[MFD_STREAM_FORMAT_H264].gen_contrib_tags = viddec_pm_lateframe_generate_contribution_tags; parser_ops[MFD_STREAM_FORMAT_H264].gen_assoc_tags = viddec_h264_add_association_tags; - + viddec_mp4_get_ops(&parser_ops[MFD_STREAM_FORMAT_MPEG42]); parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags; parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_assoc_tags = viddec_generic_add_association_tags; @@ -88,7 +88,7 @@ void viddec_pm_init_ops() uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size) { parser_ops[codec_type].get_cxt_size(size); - if(size->context_size > MAX_CODEC_CXT_SIZE) + if (size->context_size > MAX_CODEC_CXT_SIZE) { DEB("ERROR: size(%d) of context for codec=%d is greater than max=%d\n",size->context_size,codec_type,MAX_CODEC_CXT_SIZE); } @@ -104,10 +104,10 @@ void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t { int i; - for(i=0; ipending_tags.pending_tags[i] = INVALID_ENTRY; - } + } cxt->frame_start_found = false; cxt->found_fm_st_in_current_au = false; cxt->late_frame_detect = (MFD_STREAM_FORMAT_H264 == codec_type) ? true:false; @@ -117,7 +117,7 @@ void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t cxt->cur_buf.list_index = -1; cxt->parse_cubby.phase=0; parser_ops[codec_type].init((void *)&(cxt->codec_data[0]), persist_mem, !clean); - if(clean) + if (clean) { cxt->pending_inband_tags = 0; } @@ -126,7 +126,7 @@ void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t /* TODO: Enable this once codecs support this function */ //parser_ops[codec_type].flush_preserve((void *)&(cxt->codec_data[0]), persist_mem); } - + } void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time) @@ -140,31 +140,31 @@ static inline uint32_t viddec_pm_add_es_buf_to_list(viddec_pm_cxt_t *cxt, viddec uint32_t val , ret = PM_OVERFLOW; val = viddec_pm_utils_list_addbuf(&(cxt->list), es_buf); - if(val == 1) ret = PM_SUCCESS; + if (val == 1) ret = PM_SUCCESS; return ret; } static inline uint32_t viddec_pm_check_inband_messages(viddec_pm_sc_cur_buf_t *cur_buf, uint32_t *type) { uint32_t ret=false; - if(cur_buf->cur_es->flags != 0) + if (cur_buf->cur_es->flags != 0) { /* update offset to point to next position for loading data */ cur_buf->cur_offset +=(cur_buf->cur_size); cur_buf->cur_size = 0; - switch(cur_buf->cur_es->flags) + switch (cur_buf->cur_es->flags) { - case VIDDEC_STREAM_EOS: - { - *type = PM_EOS; - } + case VIDDEC_STREAM_EOS: + { + *type = PM_EOS; + } + break; + case VIDDEC_STREAM_DISCONTINUITY: + { + *type = PM_DISCONTINUITY; + } + default: break; - case VIDDEC_STREAM_DISCONTINUITY: - { - *type = PM_DISCONTINUITY; - } - default: - break; } ret =true; } @@ -182,16 +182,16 @@ uint32_t viddec_pm_create_ibuf(viddec_pm_cxt_t *cxt) viddec_pm_utils_list_t *list = &(cxt->list); /* Step1: check if list is Empty, If yes return No data */ - if(list->num_items > 0) + if (list->num_items > 0) { /* Step 2: Check to see If current index into list is empty & we have data in list, if so increment index and initialise it*/ - if(cur_buf->list_index == -1) + if (cur_buf->list_index == -1) { - if(viddec_pm_utils_list_getbyte_position(list, - list->first_scprfx_length+1, - (uint32_t *)&(cur_buf->list_index), - &(cur_buf->cur_offset)) != 1) + if (viddec_pm_utils_list_getbyte_position(list, + list->first_scprfx_length+1, + (uint32_t *)&(cur_buf->list_index), + &(cur_buf->cur_offset)) != 1) {/* This return's offset and index from where we have to start for sc detect */ cur_buf->cur_size = 0; cur_buf->cur_es = &(list->sc_ibuf[cur_buf->list_index]); @@ -203,16 +203,16 @@ uint32_t viddec_pm_create_ibuf(viddec_pm_cxt_t *cxt) } /* Step3: If we are done with current buffer then try to go to next item in list */ - if((cur_buf->cur_offset + cur_buf->cur_size) >= cur_buf->cur_es->len) + if ((cur_buf->cur_offset + cur_buf->cur_size) >= cur_buf->cur_es->len) { /* Need to handle In band messages before going to next buffer */ //if(viddec_pm_check_inband_messages(cur_buf)) - if(viddec_pm_check_inband_messages(cur_buf, &ret)) + if (viddec_pm_check_inband_messages(cur_buf, &ret)) { return ret; } /* If no items in list after the current buffer return no data */ - if((uint32_t)(cur_buf->list_index + 1) >= list->num_items) + if ((uint32_t)(cur_buf->list_index + 1) >= list->num_items) { return PM_NO_DATA; } @@ -231,12 +231,12 @@ uint32_t viddec_pm_create_ibuf(viddec_pm_cxt_t *cxt) #ifndef VBP /* Load maximum of array size */ - if(data_left >= SC_DETECT_BUF_SIZE) + if (data_left >= SC_DETECT_BUF_SIZE) { data_left = SC_DETECT_BUF_SIZE; } /* can be zero if we have zero sized buffers in our list.EX:NEW segment */ - if(data_left > 0) + if (data_left > 0) {/* do a copy using Linear Dma */ uint32_t size , ddr_addr = 0, ddr_mask=0; /* get ddr adress of current offset in ES buffer */ @@ -263,13 +263,13 @@ uint32_t viddec_pm_create_ibuf(viddec_pm_cxt_t *cxt) { /* If we completely consumed this buffer or this is a zero sized buffer we want to check inband messages */ //if(viddec_pm_check_inband_messages(cur_buf)) - if(viddec_pm_check_inband_messages(cur_buf, &ret)) + if (viddec_pm_check_inband_messages(cur_buf, &ret)) { return ret; } } #else - ret = PM_SUCCESS; + ret = PM_SUCCESS; #endif } } @@ -285,56 +285,56 @@ static inline uint32_t viddec_pm_parse_for_sccode(viddec_pm_cxt_t *cxt, viddec_p uint32_t ret = PM_NO_DATA; uint32_t sc_boundary_found = 0; - while(!sc_boundary_found) + while (!sc_boundary_found) { /* Create an buffer from list to parse */ ret = viddec_pm_create_ibuf(cxt); - switch(ret) + switch (ret) { - case PM_NO_DATA: - {/* No data in esbuffer list for parsing sc */ - sc_boundary_found = 1; + case PM_NO_DATA: + {/* No data in esbuffer list for parsing sc */ + sc_boundary_found = 1; + } + break; + case PM_EOS: + case PM_DISCONTINUITY: + { + sc_boundary_found = 1; + cxt->list.end_offset = cxt->cur_buf.cur_offset+1; + cxt->parse_cubby.phase = 0; + /* we didn't find a start code so second start code length would be 0 */ + cxt->sc_prefix_info.second_scprfx_length = 0; + //cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS; + if (ret == PM_EOS) + { + cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS; } - break; - case PM_EOS: - case PM_DISCONTINUITY: + if (ret == PM_DISCONTINUITY) { - sc_boundary_found = 1; - cxt->list.end_offset = cxt->cur_buf.cur_offset+1; + cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_DISCONTINUITY; + } + } + break; + case PM_SUCCESS: + default: + { + /* parse the created buffer for sc */ + ret = func->parse_sc((void *)&(cxt->parse_cubby), (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info)); + if (ret == 1) + { + cxt->list.end_offset = cxt->parse_cubby.sc_end_pos + cxt->cur_buf.cur_offset; cxt->parse_cubby.phase = 0; - /* we didn't find a start code so second start code length would be 0 */ - cxt->sc_prefix_info.second_scprfx_length = 0; - //cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS; - if(ret == PM_EOS) - { - cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS; - } - if(ret == PM_DISCONTINUITY) - { - cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_DISCONTINUITY; - } + cxt->list.total_bytes+=cxt->parse_cubby.sc_end_pos; + ret = PM_SC_FOUND; + sc_boundary_found = 1; + break; } - break; - case PM_SUCCESS: - default: + else { - /* parse the created buffer for sc */ - ret = func->parse_sc((void *)&(cxt->parse_cubby), (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info)); - if(ret == 1) - { - cxt->list.end_offset = cxt->parse_cubby.sc_end_pos + cxt->cur_buf.cur_offset; - cxt->parse_cubby.phase = 0; - cxt->list.total_bytes+=cxt->parse_cubby.sc_end_pos; - ret = PM_SC_FOUND; - sc_boundary_found = 1; - break; - } - else - { - cxt->list.total_bytes+=cxt->cur_buf.cur_size; - } + cxt->list.total_bytes+=cxt->cur_buf.cur_size; } - break; + } + break; } } @@ -371,7 +371,7 @@ uint32_t viddec_pm_finalize_list(viddec_pm_cxt_t *cxt) void viddec_pm_handle_buffer_overflow(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf) { uint32_t indx=0; - while(indx< (uint32_t)cxt->list.num_items) + while (indx< (uint32_t)cxt->list.num_items) {/* Dump tags for all entries in list to prevent buffer leak */ viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, true); viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, true); @@ -395,15 +395,15 @@ void viddec_pm_handle_buffer_overflow(viddec_pm_cxt_t *cxt, uint32_t codec_type, static inline void viddec_pm_handle_post_inband_messages(viddec_pm_cxt_t *cxt, uint32_t m_type) { - if((m_type & ~(0xFF))== PM_INBAND_MESSAGES) + if ((m_type & ~(0xFF))== PM_INBAND_MESSAGES) { /* If EOS decide set error on next workload too */ viddec_emit_set_workload_error(&(cxt->emitter), cxt->next_workload_error_eos, true); - if(m_type == PM_EOS) + if (m_type == PM_EOS) { viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_EOS, true); } - if(m_type == PM_DISCONTINUITY) + if (m_type == PM_DISCONTINUITY) { cxt->pending_inband_tags = PM_DISCONTINUITY; } @@ -413,10 +413,10 @@ static inline void viddec_pm_handle_post_inband_messages(viddec_pm_cxt_t *cxt, u static inline uint32_t viddec_pm_handle_new_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf) { uint32_t state = PM_SUCCESS; - if(es_buf != NULL) + if (es_buf != NULL) { state = viddec_pm_add_es_buf_to_list(cxt, es_buf); - if(state == PM_OVERFLOW) + if (state == PM_OVERFLOW) { viddec_pm_handle_buffer_overflow(cxt, codec_type, es_buf); } @@ -426,7 +426,7 @@ static inline uint32_t viddec_pm_handle_new_es_buffer(viddec_pm_cxt_t *cxt, uint static inline void viddec_pm_handle_pre_inband_messages(viddec_pm_cxt_t *cxt) { - if(cxt->pending_inband_tags == PM_DISCONTINUITY) + if (cxt->pending_inband_tags == PM_DISCONTINUITY) { viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_DISCONTINUITY, false); cxt->pending_inband_tags = 0; @@ -447,108 +447,108 @@ uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, vi /* Step1: Append Es buffer to list */ viddec_pm_handle_pre_inband_messages(cxt); state = viddec_pm_handle_new_es_buffer(cxt, codec_type, es_buf); - if(state == PM_SUCCESS) + if (state == PM_SUCCESS) { uint32_t scdetect_ret; output_omar_wires( 0x3 ); /* Step2: Phase1 of parsing, parse until a sc is found */ scdetect_ret = viddec_pm_parse_for_sccode(cxt,&parser_ops[codec_type]); - switch(scdetect_ret) + switch (scdetect_ret) { - case PM_NO_DATA: + case PM_NO_DATA: + { + /* Step3: If we consumed all the data indicate we need more buffers */ + state = PM_NO_DATA; + break; + } + case PM_EOS: + case PM_DISCONTINUITY: + case PM_SC_FOUND: + { + uint32_t codec_errors=0; + /* Create necessary state information to make the ES buffers look like linear data */ + viddec_pm_utils_list_updatebytepos(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length); + if (cxt->sc_prefix_info.first_sc_detect != 1) { - /* Step3: If we consumed all the data indicate we need more buffers */ - state = PM_NO_DATA; - break; + /* Step4: If we saw two start codes init state and call codec to parse */ + uint32_t codec_ret; + /* Initialise the state to provide get bits for codecs */ + viddec_pm_utils_bstream_init(&(cxt->getbits), &(cxt->list), EMUL_REQD(codec_type)); + output_omar_wires( 0x1 ); + /* call the codec to do synatax parsing */ + parser_ops[codec_type].parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + /* Check and see if frame start was detected. If we did update frame start in current au */ + if (parser_ops[codec_type].is_frame_start((void *)&(cxt->codec_data[0])) == true) + { + cxt->frame_start_found += 1; + cxt->found_fm_st_in_current_au = true; + } + /* Query to see if we reached end of current frame */ + codec_ret = parser_ops[codec_type].is_wkld_done((void *)cxt, + (void *)&(cxt->codec_data[0]), + (uint32_t)(cxt->sc_prefix_info.next_sc), + &codec_errors); + + state = (codec_ret == VIDDEC_PARSE_FRMDONE) ? PM_WKLD_DONE : PM_SUCCESS; + /* generate contribution and association tags */ + cxt->pending_tags.frame_done = (codec_ret == VIDDEC_PARSE_FRMDONE); + parser_ops[codec_type].gen_assoc_tags(cxt); + parser_ops[codec_type].gen_contrib_tags(cxt, (state != PM_WKLD_DONE)); } - case PM_EOS: - case PM_DISCONTINUITY: - case PM_SC_FOUND: + else { - uint32_t codec_errors=0; - /* Create necessary state information to make the ES buffers look like linear data */ - viddec_pm_utils_list_updatebytepos(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length); - if(cxt->sc_prefix_info.first_sc_detect != 1) + /* Step4: If this is the first start code in this stream, clean up and return */ + if (cxt->list.total_bytes != 0) { - /* Step4: If we saw two start codes init state and call codec to parse */ - uint32_t codec_ret; - /* Initialise the state to provide get bits for codecs */ - viddec_pm_utils_bstream_init(&(cxt->getbits), &(cxt->list), EMUL_REQD(codec_type)); - output_omar_wires( 0x1 ); - /* call the codec to do synatax parsing */ - parser_ops[codec_type].parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); - /* Check and see if frame start was detected. If we did update frame start in current au */ - if(parser_ops[codec_type].is_frame_start((void *)&(cxt->codec_data[0])) == true) - { - cxt->frame_start_found += 1; - cxt->found_fm_st_in_current_au = true; - } - /* Query to see if we reached end of current frame */ - codec_ret = parser_ops[codec_type].is_wkld_done((void *)cxt, - (void *)&(cxt->codec_data[0]), - (uint32_t)(cxt->sc_prefix_info.next_sc), - &codec_errors); - - state = (codec_ret == VIDDEC_PARSE_FRMDONE) ? PM_WKLD_DONE : PM_SUCCESS; - /* generate contribution and association tags */ - cxt->pending_tags.frame_done = (codec_ret == VIDDEC_PARSE_FRMDONE); - parser_ops[codec_type].gen_assoc_tags(cxt); - parser_ops[codec_type].gen_contrib_tags(cxt, (state != PM_WKLD_DONE)); + viddec_pm_generic_generate_contribution_tags(cxt, true); + viddec_generic_add_association_tags(cxt); } else { - /* Step4: If this is the first start code in this stream, clean up and return */ - if(cxt->list.total_bytes != 0) + if (cxt->list.num_items >= 1) { - viddec_pm_generic_generate_contribution_tags(cxt, true); - viddec_generic_add_association_tags(cxt); - } - else - { - if(cxt->list.num_items >= 1) - { - uint32_t indx=0; - while((indx< (uint32_t)cxt->list.num_items) && (cxt->list.sc_ibuf[indx].len == 0)) - {/* Dump all zero sized buffers until we see a buffer with valid data */ - viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, false); - viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, false); - indx++; - } + uint32_t indx=0; + while ((indx< (uint32_t)cxt->list.num_items) && (cxt->list.sc_ibuf[indx].len == 0)) + {/* Dump all zero sized buffers until we see a buffer with valid data */ + viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, false); + viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, false); + indx++; } } - if((scdetect_ret & ~(0xFF))!= PM_INBAND_MESSAGES) - { - state = PM_SUCCESS;//state = PM_FIRST_SC_FOUND; - cxt->sc_prefix_info.first_sc_detect = 0; - } - else - { - state = PM_WKLD_DONE; - } } + if ((scdetect_ret & ~(0xFF))!= PM_INBAND_MESSAGES) + { + state = PM_SUCCESS;//state = PM_FIRST_SC_FOUND; + cxt->sc_prefix_info.first_sc_detect = 0; + } + else + { + state = PM_WKLD_DONE; + } + } - viddec_pm_handle_post_inband_messages(cxt, scdetect_ret); + viddec_pm_handle_post_inband_messages(cxt, scdetect_ret); - /* Step 5: If current frame is done, finalise the workload state with necessary information */ - if(state == PM_WKLD_DONE) - { - DEB("\nFRAME ... DONE\n"); - /* we decrement frame start. This can be 0 in cases like sending junk data with EOS */ - cxt->frame_start_found -= (cxt->frame_start_found)? 1: 0; - if((scdetect_ret & ~(0xFF))== PM_INBAND_MESSAGES) - {/* If EOS dump pending tags and set state */ - viddec_pm_generate_missed_association_tags(cxt, false); - state = scdetect_ret; - } - /* Write back stored state of workloads to memory to prepare for psuhing to output queue */ - viddec_pm_finalize_workload(cxt, codec_type, codec_errors); + /* Step 5: If current frame is done, finalise the workload state with necessary information */ + if (state == PM_WKLD_DONE) + { + DEB("\nFRAME ... DONE\n"); + /* we decrement frame start. This can be 0 in cases like sending junk data with EOS */ + cxt->frame_start_found -= (cxt->frame_start_found)? 1: 0; + if ((scdetect_ret & ~(0xFF))== PM_INBAND_MESSAGES) + {/* If EOS dump pending tags and set state */ + viddec_pm_generate_missed_association_tags(cxt, false); + state = scdetect_ret; } - /* Step 6: Reset the list to prepare for next iteration */ - viddec_pm_finalize_list(cxt); - break; + /* Write back stored state of workloads to memory to prepare for psuhing to output queue */ + viddec_pm_finalize_workload(cxt, codec_type, codec_errors); } - default: - break; + /* Step 6: Reset the list to prepare for next iteration */ + viddec_pm_finalize_list(cxt); + break; + } + default: + break; } }//if(state == PM_SUCCESS) return state; diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c index 9d5c132..21f4527 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c @@ -16,7 +16,7 @@ static void viddec_fw_parser_peekmessages(viddec_pm_cxt_t *pm, ipc_msg_data *wkl of message but won't actually pull it out of queue*/ *ret_cur = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_cur, sizeof(ipc_msg_data), 0); - *ret_next = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_next, sizeof(ipc_msg_data), 1); + *ret_next = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_next, sizeof(ipc_msg_data), 1); /* NOTE: I am passing length of current workload as size for next, since next workload might not exist. This is safe since in flush we always append to current workload */ viddec_emit_update(&(pm->emitter), wkld_cur->phys, wkld_next->phys, wkld_cur->len, wkld_cur->len); } @@ -48,10 +48,10 @@ int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type) workloads_in_input_q = ipc_mq_read_avail(&fwipc->wkld_q[stream_id].mq, (int32_t *)&pos); pos = 0; /* Check to see if output queue has space for next message */ - if(ipc_mq_write_avail(&fwipc->snd_q[stream_id].mq,&pos) >= workloads_in_input_q) + if (ipc_mq_write_avail(&fwipc->snd_q[stream_id].mq,&pos) >= workloads_in_input_q) { /* Check how many free workloads are available. Need at least 1 */ - if(workloads_in_input_q >= CONFIG_IPC_MESSAGE_MAX_SIZE) + if (workloads_in_input_q >= CONFIG_IPC_MESSAGE_MAX_SIZE) { ipc_msg_data wkld_cur, wkld_next, cur_es; int32_t ret_cur=0,ret_next=0; @@ -61,7 +61,7 @@ int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type) } viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id); - if(workloads_in_input_q >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1)) + if (workloads_in_input_q >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1)) {/* If we have more than 2 workloads, most likely current workload has partial data. To avoid overflow lets push current and use next which is most likely empty .If there's only one workload it was next for previous frame so most likely its empty in which case we don't do this logic*/ @@ -74,7 +74,7 @@ int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type) since we will guaranteed succesful writes for all es buffers */ viddec_pm_generate_tags_for_unused_buffers_to_flush(pm); /* Check the number of ES buffers and append them to current wkld */ - while(FwIPC_ReadMessage(fwipc, &(fwipc->rcv_q[stream_id]), (char *)&cur_es, sizeof(ipc_msg_data)) != 0) + while (FwIPC_ReadMessage(fwipc, &(fwipc->rcv_q[stream_id]), (char *)&cur_es, sizeof(ipc_msg_data)) != 0) { /* NOTE(Assumption): Again we have to define workload size to be big enough to make sure we can fit all the es buffers into current workload */ @@ -85,27 +85,27 @@ int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type) do {/* Read until no workloads left */ viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id); - if(ret_cur == 0) + if (ret_cur == 0) { break; } viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id); - }while(1); - switch(flush_type) + } while (1); + switch (flush_type) { - case VIDDEC_STREAM_FLUSH_DISCARD: - { - /* Reset pm_context */ - viddec_fw_init_swap_memory(stream_id, 0, 1); - } + case VIDDEC_STREAM_FLUSH_DISCARD: + { + /* Reset pm_context */ + viddec_fw_init_swap_memory(stream_id, 0, 1); + } + break; + case VIDDEC_STREAM_FLUSH_PRESERVE: + { + /* Reset just stream information */ + viddec_fw_init_swap_memory(stream_id, 0, 0); + } + default: break; - case VIDDEC_STREAM_FLUSH_PRESERVE: - { - /* Reset just stream information */ - viddec_fw_init_swap_memory(stream_id, 0, 0); - } - default: - break; } {/* swap context into DDR */ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) pm, sizeof(viddec_pm_cxt_t), true, false); @@ -115,7 +115,7 @@ int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type) { pos = 0; /* check to see if I have any es buffers on input queue. If none are present we don't have to do anything */ - if(ipc_mq_read_avail(&fwipc->rcv_q[stream_id].mq, (int32_t *)&pos) != 0) + if (ipc_mq_read_avail(&fwipc->rcv_q[stream_id].mq, (int32_t *)&pos) != 0) ret = VIDDEC_FW_NEED_FREE_WKLD; } } diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c index 9d488fe..d23c758 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c @@ -16,8 +16,10 @@ int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) cxt = (viddec_pm_cxt_t *)parent; ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1); - if(ret == -1) - {DEB("FAILURE!!!! getbits returned %d\n", ret);} + if (ret == -1) + { + DEB("FAILURE!!!! getbits returned %d\n", ret); + } return ret; } @@ -27,7 +29,7 @@ int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits) int32_t ret = 1; viddec_pm_cxt_t *cxt; - cxt = (viddec_pm_cxt_t *)parent; + cxt = (viddec_pm_cxt_t *)parent; ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0); return ret; } @@ -48,13 +50,13 @@ int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, ui int32_t ret = 1; viddec_pm_cxt_t *cxt; viddec_emitter_wkld *emit; - + cxt = (viddec_pm_cxt_t *)parent; emit = (next) ? &(cxt->emitter.next) : &(cxt->emitter.cur); ret = viddec_emit_append(emit, item); return ret; #else - return 1; + return 1; #endif } @@ -68,7 +70,7 @@ int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_ viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul); return ret; - + } static inline int32_t viddec_pm_append_restof_pixel_data(void *parent, uint32_t cur_wkld) @@ -78,11 +80,11 @@ static inline int32_t viddec_pm_append_restof_pixel_data(void *parent, uint32_t uint32_t start=0, b_off=0; uint8_t emul=0; viddec_workload_item_t wi; - + cxt = (viddec_pm_cxt_t *)parent; viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits)); viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), &b_off, &start, &emul); - if(emul) start--; + if (emul) start--; wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES; wi.es.es_flags = 0; @@ -123,7 +125,7 @@ int32_t viddec_pm_is_nomoredata(void *parent) int32_t ret=0; viddec_pm_cxt_t *cxt; - cxt = (viddec_pm_cxt_t *)parent; + cxt = (viddec_pm_cxt_t *)parent; ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits)); return ret; } diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c index 0a6f09b..adb366b 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c @@ -2,20 +2,20 @@ void viddec_vc1_get_ops(viddec_parser_ops_t *ops) { - return; + return; } void viddec_mpeg2_get_ops(viddec_parser_ops_t *ops) { - return; + return; } void viddec_mp4_get_ops(viddec_parser_ops_t *ops) { - return; + return; } void viddec_h264_get_ops(viddec_parser_ops_t *ops) { - return; + return; } diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c index baa8330..d7fa5dd 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c @@ -20,7 +20,7 @@ or has a sc prefix its associated to next decodable frame(based on first slice or header depending on codec). We use three state variables to determine where the frame starts and ends. frame_start_found: Indicates we saw the beggining of frame in current list of ES buffers(which represent current acces unit). - This is decremented on workload done since it normally means we detected frame end. + This is decremented on workload done since it normally means we detected frame end. found_fm_st_in_current_au:Indicates we saw the first slice in current access unit. Its mainly used to decide whether the first buffer belongs to current frame or next frame. Its reset after its use. Frame Done: Indicates we detected end of frame pointed by current workload. @@ -42,24 +42,24 @@ uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ignore_partial) { uint32_t ret = PM_SUCCESS; - viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; viddec_pm_utils_list_t *list = &(cxt->list); - if(list->num_items != 0) + if (list->num_items != 0) { - if(!cxt->late_frame_detect) + if (!cxt->late_frame_detect) { uint32_t num_items = 0; - while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes)) + while ((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes)) {/* Walkthrough Consumed buffers and dump the tags */ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, false); num_items++; } /* Dump incomplete tags if required */ - if(!ignore_partial) + if (!ignore_partial) {/* check to see if last item is not consumed and dump continued flag */ - if((num_items < list->num_items) - && (list->data[num_items].edpos >= (uint32_t)list->total_bytes)) + if ((num_items < list->num_items) + && (list->data[num_items].edpos >= (uint32_t)list->total_bytes)) { viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false); } @@ -85,19 +85,19 @@ uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ign uint32_t viddec_pm_lateframe_generate_contribution_tags(void *parent, uint32_t ignore_partial) { uint32_t ret = PM_SUCCESS; - viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; viddec_pm_utils_list_t *list = &(cxt->list); - if(list->num_items != 0) + if (list->num_items != 0) { uint32_t num_items = 0; /* If start offset is not 0 then it was partially used in last access unit. !ignore_partial means frame done*/ - if((list->start_offset!= 0) && !ignore_partial) + if ((list->start_offset!= 0) && !ignore_partial) {/* Emit continue in current if necessary. */ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false); } - - while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes)) + + while ((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes)) { /* Walkthrough Consumed buffers and dump the tags to current or Next*/ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, !ignore_partial); num_items++; @@ -113,7 +113,7 @@ uint32_t viddec_pm_generate_missed_association_tags(viddec_pm_cxt_t *cxt, uint32 { uint32_t i=0, ret = PM_SUCCESS; - while((i < MAX_IBUFS_PER_SC) && (cxt->pending_tags.pending_tags[i] != INVALID_ENTRY)) + while ((i < MAX_IBUFS_PER_SC) && (cxt->pending_tags.pending_tags[i] != INVALID_ENTRY)) { viddec_emit_assoc_tag(&(cxt->emitter), cxt->pending_tags.pending_tags[i], using_next); cxt->pending_tags.pending_tags[i] = INVALID_ENTRY; @@ -131,7 +131,7 @@ void viddec_pm_add_tags_to_pendinglist(viddec_pm_cxt_t *cxt, uint32_t ignore_fir vidded_pm_pending_tags_t *pend = &(cxt->pending_tags); uint32_t index=0, t_index=0; - if(!ignore_first && (list->start_offset == 0)) + if (!ignore_first && (list->start_offset == 0)) {/* If start offset is 0 we are saying that first buffer in list starts with start code */ pend->first_buf_aligned = true; } @@ -141,12 +141,13 @@ void viddec_pm_add_tags_to_pendinglist(viddec_pm_cxt_t *cxt, uint32_t ignore_fir pend->first_buf_aligned = false; } - while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes)) + while ( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes)) {/* walk through consumed buffers and buffer id's in pending list */ pend->pending_tags[t_index] = list->sc_ibuf[index].id; - index++;t_index++; + index++; + t_index++; } - if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes)) + if ( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes)) {/* If last item is partially consumed still add it to pending tags since tag association is based on start of ES buffer */ pend->pending_tags[t_index] = list->sc_ibuf[index].id; } @@ -175,7 +176,7 @@ static inline void viddec_pm_emit_pending_tag_item(viddec_emitter *emit, vidded_ uint32_t viddec_mpeg2_add_association_tags(void *parent) { uint32_t ret = PM_SUCCESS; - viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; vidded_pm_pending_tags_t *pend = &(cxt->pending_tags); uint32_t first_slice = false, index = 0; /* check to see if we found a frame start in current access unit */ @@ -183,13 +184,13 @@ uint32_t viddec_mpeg2_add_association_tags(void *parent) cxt->found_fm_st_in_current_au = false; /* If we found frame start and first item in pending tags is start with start code then it needs to go to current frame. */ - if(first_slice && pend->first_buf_aligned && (pend->pending_tags[index] != INVALID_ENTRY)) + if (first_slice && pend->first_buf_aligned && (pend->pending_tags[index] != INVALID_ENTRY)) { viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, false); index++; } /* rest of list goes to current if frame start is not found else next frame */ - while((index < MAX_IBUFS_PER_SC) && (pend->pending_tags[index] != INVALID_ENTRY)) + while ((index < MAX_IBUFS_PER_SC) && (pend->pending_tags[index] != INVALID_ENTRY)) { viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, cxt->frame_start_found); index++; @@ -211,7 +212,7 @@ uint32_t viddec_mpeg2_add_association_tags(void *parent) uint32_t viddec_h264_add_association_tags(void *parent) { uint32_t ret = PM_SUCCESS; - viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent; viddec_pm_utils_list_t *list = &(cxt->list); vidded_pm_pending_tags_t *pend = &(cxt->pending_tags); uint32_t first_slice = false, index = 0; @@ -221,7 +222,7 @@ uint32_t viddec_h264_add_association_tags(void *parent) first_slice = cxt->frame_start_found && cxt->found_fm_st_in_current_au; cxt->found_fm_st_in_current_au = false; /* If we saw frame start and first buffer is aligned to start code throw it into next */ - if(first_slice && (list->start_offset == 0)) + if (first_slice && (list->start_offset == 0)) { viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found && cxt->pending_tags.frame_done); index++; @@ -254,19 +255,19 @@ uint32_t viddec_generic_add_association_tags(void *parent) /* We check to see if this access unit is not the first one with frame start. This evaluates to true in that case */ not_first_slice = cxt->frame_start_found && !cxt->found_fm_st_in_current_au; cxt->found_fm_st_in_current_au = false; - if(list->start_offset == 0) + if (list->start_offset == 0) {/* If start offset is 0, we have start code at beggining of buffer. If frame start was detected in this access unit we put the tag in current else it goes to next */ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, not_first_slice); } /* Skip first item always, for start_offset=0 its already been handled above*/ index++; - while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes)) + while ( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes)) {/* Walkthrough Consumed buffers and dump the tags to current or next*/ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found); index++; } - if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes)) + if ( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes)) {/* Dump last item if it was partially consumed */ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found); } @@ -284,16 +285,16 @@ void viddec_pm_generate_tags_for_unused_buffers_to_flush(viddec_pm_cxt_t *cxt) list = &(cxt->list); /* Generate association tags from temporary pending array */ viddec_pm_generate_missed_association_tags(cxt, false); - if(list->num_items > 0) + if (list->num_items > 0) { /* Throw contribution flag for first item as done */ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false); - if(cxt->list.start_offset == 0) + if (cxt->list.start_offset == 0) {/* Throw association for first item if it was not done already */ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false); } index++; - while(index < list->num_items) + while (index < list->num_items) {/* Walk through list and throw contribution and association flags */ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false); viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false); diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c index cecaac3..693741f 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -7,7 +7,7 @@ typedef union { uint8_t byte[8]; uint32_t word[2]; -}viddec_pm_utils_getbits_t; +} viddec_pm_utils_getbits_t; void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt); uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index); @@ -35,18 +35,18 @@ uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cx data_reamining should be 2 for 00000001, as we don't count sc prefix its current byte and extra 00 as we check for 000001. NOTE: This is used for H264 only. */ - switch(data_remaining) + switch (data_remaining) { - case 2: - /* If next byte is 0 and its the last byte in access unit */ - ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0); - break; - case 1: - /* if the current byte is last byte */ - ret = true; - break; - default: - break; + case 2: + /* If next byte is 0 and its the last byte in access unit */ + ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0); + break; + case 1: + /* if the current byte is last byte */ + ret = true; + break; + default: + break; } return ret; } @@ -61,7 +61,7 @@ uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt) /* Check to see if the last byte Acces unit offset is the last byte for current access unit. End represents the first invalid byte, so (end - st) will give number of bytes.*/ last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st); - if((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes) + if ((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes) { ret = true; } @@ -83,7 +83,7 @@ static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_lis int32_t val=0; val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes; val = val - (int32_t)offset; - if(val > 0) ret = (uint32_t)val; + if (val > 0) ret = (uint32_t)val; return val; } @@ -92,19 +92,19 @@ static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_lis at returns index of ES buffer in list which has byte_offset */ static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt, - uint32_t *lst_index, - uint32_t byte_offset, - uint32_t *physaddr) + uint32_t *lst_index, + uint32_t byte_offset, + uint32_t *physaddr) { viddec_pm_utils_list_t *list; uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */ list = cxt->list; - while(*lst_index < list->num_items) + while (*lst_index < list->num_items) { /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */ last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes; - if(byte_offset < last_byte_offst) + if (byte_offset < last_byte_offst) {/* Found a match so return with data remaining */ bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset); *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index); @@ -119,10 +119,11 @@ static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_uti static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes) { uint32_t i=0; - for(i=0; ibuf_scratch[i] = *data; - data++;cxt->size++; + data++; + cxt->size++; } } @@ -130,7 +131,7 @@ static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstrea static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data) { uint32_t i=0; - for(i=0; isize;i++) + for (i=0; isize; i++) { *data = cxt->buf_scratch[i]; data++; @@ -139,22 +140,22 @@ static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstr /* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream, - viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/ - uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/ - uint32_t *phase, /* Phase for emulation */ - uint32_t num_bytes,/* requested number of bytes*/ - uint32_t emul_reqd, /* On true we look for emulation prevention */ - uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/ - ) + viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/ + uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/ + uint32_t *phase, /* Phase for emulation */ + uint32_t num_bytes,/* requested number of bytes*/ + uint32_t emul_reqd, /* On true we look for emulation prevention */ + uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/ + ) { int32_t ret = 1; uint8_t cur_byte = 0, valid_bytes_read = 0; *act_bytes = 0; - while(valid_bytes_read < num_bytes) + while (valid_bytes_read < num_bytes) { cur_byte = bstream->buf[bstream->buf_index + *act_bytes]; - if((cur_byte == 0x3) &&(*phase == 2)) + if ((cur_byte == 0x3) &&(*phase == 2)) {/* skip emulation byte. we update the phase only if emulation prevention is enabled */ *phase = 0; } @@ -165,9 +166,9 @@ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past. From second byte onwards we always look to update phase. */ - if((*act_bytes != 0) || (is_offset_zero)) + if ((*act_bytes != 0) || (is_offset_zero)) { - if(cur_byte == 0) + if (cur_byte == 0) { /* Update phase only if emulation prevention is required */ *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 ); @@ -181,9 +182,9 @@ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t } *act_bytes +=1; } - /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array - has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */ - if((bstream->buf_index + *act_bytes -1) >= bstream->buf_end) + /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array + has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */ + if ((bstream->buf_index + *act_bytes -1) >= bstream->buf_end) { ret = -1; } @@ -196,18 +197,18 @@ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t */ static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left) { -#ifdef VBP - *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); -#else +#ifdef VBP + *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); +#else uint8_t isReload=0; *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); /* If we have minimum data we should continue, else try to read more data */ - if(*data_left buf_bitoff = 0; bstream->buf_index +=bytes; @@ -243,15 +244,15 @@ static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_bu However in some cases we might send data to HW without reading the next bit, in which case we are on emulation byte. To avoid sending invalid data, this function has to be called first to skip. */ - + void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt) { viddec_pm_utils_bstream_buf_cxt_t *bstream = &(cxt->bstrm_buf); - if(cxt->is_emul_reqd && - (cxt->phase >= 2) && - (bstream->buf_bitoff == 0) && - (bstream->buf[bstream->buf_index] == 0x3) ) + if (cxt->is_emul_reqd && + (cxt->phase >= 2) && + (bstream->buf_bitoff == 0) && + (bstream->buf[bstream->buf_index] == 0x3) ) { bstream->buf_index += 1; cxt->phase = 0; @@ -268,10 +269,10 @@ uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uin viddec_pm_utils_list_t *list; list = cxt->list; - while(lst_index < list->num_items) + while (lst_index < list->num_items) { last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes; - if(pos < last_byte_offst) + if (pos < last_byte_offst) { #ifndef MFDBIGENDIAN ret = (uint32_t)list->sc_ibuf[lst_index].buf; @@ -279,7 +280,7 @@ uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uin ret = list->sc_ibuf[lst_index].phys; #endif ret +=(pos - list->data[lst_index].stpos); - if(lst_index == 0) ret+=list->start_offset; + if (lst_index == 0) ret+=list->start_offset; break; } lst_index++; @@ -304,7 +305,7 @@ void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt) int32_t cur_bytes=0; viddec_pm_utils_bstream_scratch_init(&(cxt->scratch)); cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); - if(cur_bytes > 0) + if (cur_bytes > 0) { viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes); cxt->scratch.bitoff = bstream->buf_bitoff; @@ -320,11 +321,11 @@ void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt) /* byte pos points to the position from where we want to read data.*/ byte_pos = cxt->au_pos + cxt->scratch.size; data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr); - if(data_left > CUBBY_SIZE) + if (data_left > CUBBY_SIZE) { data_left = CUBBY_SIZE; } - if(data_left != 0) + if (data_left != 0) { ddr_mask = ddr_addr & 0x3; ddr_addr = ddr_addr & ~0x3; @@ -350,8 +351,8 @@ void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt) void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul) { #ifdef VBP - cxt->emulation_byte_counter = 0; -#endif + cxt->emulation_byte_counter = 0; +#endif cxt->au_pos = 0; cxt->list = list; @@ -373,7 +374,7 @@ int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t * bstream = &(cxt->bstrm_buf); viddec_pm_utils_check_bstream_reload(cxt, &data_left); - if(data_left != 0) + if (data_left != 0) { *byte = bstream->buf[bstream->buf_index]; ret = 1; @@ -392,28 +393,28 @@ int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uin bstream = &(cxt->bstrm_buf); viddec_pm_utils_check_bstream_reload(cxt, &data_left); - if((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) { uint8_t bytes_required=0; bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; - if(bytes_required <= data_left) + if (bytes_required <= data_left) { viddec_pm_utils_getbits_t data; uint32_t act_bytes =0; - if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) { uint32_t total_bits=0; total_bits=num_bits+bstream->buf_bitoff; viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); ret=1; - -#ifdef VBP + +#ifdef VBP if (act_bytes > bytes_required) { - cxt->emulation_byte_counter = act_bytes - bytes_required; + cxt->emulation_byte_counter = act_bytes - bytes_required; } -#endif +#endif } } } @@ -430,7 +431,7 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin /* STEP 1: Make sure that we have at least minimum data before we calculate bits */ viddec_pm_utils_check_bstream_reload(cxt, &data_left); - if((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) { uint32_t bytes_required=0; viddec_pm_utils_bstream_buf_cxt_t *bstream; @@ -439,13 +440,13 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; /* Step 2: Make sure we have bytes for requested bits */ - if(bytes_required <= data_left) + if (bytes_required <= data_left) { uint32_t act_bytes, phase; viddec_pm_utils_getbits_t data; phase = cxt->phase; /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ - if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) { uint32_t total_bits=0; uint32_t shift_by=0; @@ -460,7 +461,7 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin data.word[1] = SWAP_WORD(data.word[1]); #endif total_bits = num_bits+bstream->buf_bitoff; - if(total_bits > 32) + if (total_bits > 32) { /* We have to use both the words to get required data */ shift_by = total_bits - 32; @@ -472,18 +473,18 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin data.word[0] = data.word[0] >> shift_by; } *out = data.word[0]; - if(skip) + if (skip) { /* update au byte position if needed */ viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); cxt->phase = phase; - -#ifdef VBP + +#ifdef VBP if (act_bytes > bytes_required) { - cxt->emulation_byte_counter += act_bytes - bytes_required; + cxt->emulation_byte_counter += act_bytes - bytes_required; } -#endif +#endif } ret =1; diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c index d6f6adf..1561449 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c @@ -20,7 +20,7 @@ void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt) uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf) { uint32_t ret = 0; - if((list->num_items + 1) <= MAX_IBUFS_PER_SC) + if ((list->num_items + 1) <= MAX_IBUFS_PER_SC) { list->num_items +=1; list->sc_ibuf[list->num_items - 1] = *es_buf; @@ -37,10 +37,10 @@ uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_ uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset) { uint32_t index = 0, accumulated_size=0; - + /* First buffer in list is always special case, since start offset is tied to it */ accumulated_size = list->sc_ibuf[index].len - list->start_offset; - if( accumulated_size >= byte) + if ( accumulated_size >= byte) { /* we found a match in first buffer itself */ *offset = list->start_offset + byte - 1; @@ -49,9 +49,9 @@ uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uin } index++; /* walkthrough the list until we find the byte */ - while(index < list->num_items) - { - if((accumulated_size + list->sc_ibuf[index].len) >= byte) + while (index < list->num_items) + { + if ((accumulated_size + list->sc_ibuf[index].len) >= byte) { *offset = byte - accumulated_size - 1; *list_index = index; @@ -73,26 +73,26 @@ void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc { uint32_t items=0; uint32_t start=0, end=0; - - if(list->num_items != 0) + + if (list->num_items != 0) { end = list->sc_ibuf[0].len - list->start_offset; - if((int32_t)end >= list->total_bytes) end = list->total_bytes; + if ((int32_t)end >= list->total_bytes) end = list->total_bytes; list->data[items].stpos = start; list->data[items].edpos = end; items++; - while((int32_t)end < list->total_bytes) + while ((int32_t)end < list->total_bytes) { start = end; end += list->sc_ibuf[items].len; - if((int32_t)end >= list->total_bytes) end = list->total_bytes; + if ((int32_t)end >= list->total_bytes) end = list->total_bytes; list->data[items].stpos = start; list->data[items].edpos = end; items++; } - while(items < list->num_items) + while (items < list->num_items) { - if(sc_prefix_length != 0) + if (sc_prefix_length != 0) { start = end = list->total_bytes+1; } @@ -102,7 +102,7 @@ void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc } list->data[items].stpos = start; list->data[items].edpos = end; - items++; + items++; } /* Normal access unit sequence is SC+data+SC. We read SC+data+SC bytes so far. but the current access unit should be SC+data, the Second SC belongs to next access unit. @@ -117,7 +117,7 @@ static inline void viddec_pm_utils_list_emit_slice_tags_append(viddec_emitter_wk Most of the time len >0. However we can have a condition on EOS where the last buffer can be zero sized in which case we want to make sure that we emit END of SLICE information. */ - if((wi->es.es_phys_len != 0) || (wi->es.es_flags&VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE)) + if ((wi->es.es_phys_len != 0) || (wi->es.es_flags&VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE)) { viddec_emit_append(cur_wkld, wi); } @@ -128,7 +128,7 @@ static inline void viddec_pm_utils_list_emit_slice_tags_append(viddec_emitter_wk */ void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t start, uint32_t end, viddec_emitter *emitter, uint32_t is_cur_wkld, viddec_workload_item_t *wi) { - if((list->num_items != 0) && ((int32_t)start < (list->total_bytes)) && ((int32_t)end <= (list->total_bytes))) + if ((list->num_items != 0) && ((int32_t)start < (list->total_bytes)) && ((int32_t)end <= (list->total_bytes))) { uint32_t flags=0, items=0; viddec_emitter_wkld *cur_wkld; @@ -136,14 +136,14 @@ void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t flags = wi->es.es_flags; cur_wkld = (is_cur_wkld != 0) ? &(emitter->cur):&(emitter->next); /* Seek until we find a ES buffer entry which has the start position */ - while(start >= list->data[items].edpos) items++; - - if(end < list->data[items].edpos) + while (start >= list->data[items].edpos) items++; + + if (end < list->data[items].edpos) { /* One ES buffer has both start and end in it. So dump a single entry */ wi->es.es_phys_len = end - start + 1; wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos; /* Account for start_offset if its the first buffer in List */ - if(items == 0) wi->es.es_phys_addr += list->start_offset; + if (items == 0) wi->es.es_phys_addr += list->start_offset; wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE | VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE; viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi); @@ -153,12 +153,12 @@ void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t /* We know that there are at least two buffers for the requested data. Dump the first item */ wi->es.es_phys_len = list->data[items].edpos - start; wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos; - if(items == 0) wi->es.es_phys_addr += list->start_offset; + if (items == 0) wi->es.es_phys_addr += list->start_offset; wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE; viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi); items++; /* Dump everything in between if any until the last buffer */ - while(end >= list->data[items].edpos) + while (end >= list->data[items].edpos) { wi->es.es_phys_len = list->data[items].edpos - list->data[items].stpos; wi->es.es_phys_addr = list->sc_ibuf[items].phys; @@ -183,22 +183,22 @@ void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint { list->end_offset = -1; - if(list->num_items != 0) + if (list->num_items != 0) { - if(length != 0) + if (length != 0) { uint32_t items = list->num_items-1, byte_pos; uint32_t index=0; viddec_input_buffer_t *es_buf; byte_pos = list->total_bytes; - while((list->data[items].edpos > byte_pos) && (list->data[items].stpos > byte_pos)) + while ((list->data[items].edpos > byte_pos) && (list->data[items].stpos > byte_pos)) { items--; } - if(items != 0) + if (items != 0) { list->start_offset = byte_pos - list->data[items].stpos; - while(items < list->num_items) + while (items < list->num_items) { es_buf = &(list->sc_ibuf[items]); list->sc_ibuf[index] = *es_buf; diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h index 16b4898..2cc32b7 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h @@ -1,57 +1,57 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or +/* + This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. - + GPL LICENSE SUMMARY - + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify + + This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution + The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: - BSD LICENSE + BSD LICENSE Copyright(c) 2007-2009 Intel Corporation. All rights reserved. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ @@ -96,7 +96,7 @@ typedef struct viddec_input_buffer #ifdef HOST_ONLY unsigned char *buf; /* virt pointer to buffer. This is a don't care for FW */ #endif -}ipc_msg_data; +} ipc_msg_data; typedef ipc_msg_data viddec_input_buffer_t; typedef ipc_msg_data viddec_ipc_msg_data; @@ -111,34 +111,34 @@ typedef enum VIDDEC_FW_PORT_FULL, /* The operation failed since queue is full */ VIDDEC_FW_PORT_EMPTY, /* The operation failed since queue is empty */ VIDDEC_FW_NEED_FREE_WKLD, /* The operation failed since a free wkld is not available */ -}viddec_fw_return_types_t; +} viddec_fw_return_types_t; /* Defines for Interrupt mask and status */ typedef enum { VIDDEC_FW_WKLD_DATA_AVAIL=1, /* A processed workload is available */ VIDDEC_FW_INPUT_WATERMARK_REACHED=2, /* The input path is below the set watermark for current stream */ -}viddec_fw_parser_int_status_t; +} viddec_fw_parser_int_status_t; /* Defines for attributes on stream, If not set explicitly will be default values */ typedef enum { VIDDEC_FW_INPUT_Q_WATERMARK, /* Define for setting Input queue watermarks */ VIDDEC_FW_STREAM_PRIORITY, /* Define for setting stream priority */ -}viddec_fw_stream_attributes_t; +} viddec_fw_stream_attributes_t; typedef struct { unsigned int input_q_space; /* Num of messages that can be written to input queue */ unsigned int output_q_data; /* Num of messages in output queue */ unsigned int workload_q_status; /* Number of free wklds available to parser */ -}viddec_fw_q_status_t; +} viddec_fw_q_status_t; typedef struct { unsigned int to_fw_q_space; /* Num of messages that can be written to input queue */ unsigned int from_fw_q_data; /* Num of messages in output queue */ -}viddec_fw_decoder_q_status_t; +} viddec_fw_decoder_q_status_t; enum viddec_fw_decoder_int_status { @@ -154,14 +154,14 @@ enum viddec_fw_decoder_int_status /** Hardware Accelerated stream formats */ typedef enum viddec_stream_format { - MFD_STREAM_FORMAT_MPEG=1, - MFD_STREAM_FORMAT_H264, - MFD_STREAM_FORMAT_VC1, - MFD_STREAM_FORMAT_MPEG42, + MFD_STREAM_FORMAT_MPEG=1, + MFD_STREAM_FORMAT_H264, + MFD_STREAM_FORMAT_VC1, + MFD_STREAM_FORMAT_MPEG42, - MFD_STREAM_FORMAT_MAX, /* must be last */ - MFD_STREAM_FORMAT_INVALID -}viddec_stream_format; + MFD_STREAM_FORMAT_MAX, /* must be last */ + MFD_STREAM_FORMAT_INVALID +} viddec_stream_format; /* Workload specific error codes */ enum viddec_fw_workload_error_codes @@ -174,7 +174,7 @@ enum viddec_fw_workload_error_codes VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM = (1 << 4),/* This is impartial or empty frame from Parser/Decoder */ VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED = (1 << 5),/* Parser Detected unsupported feature in the stream */ /* First 8 bits reserved for Non Decodable errors */ - VIDDEC_FW_WORKLOAD_ERR_CONCEALED = (1 << 9),/* The decoder concealed some errors in this frame */ + VIDDEC_FW_WORKLOAD_ERR_CONCEALED = (1 << 9),/* The decoder concealed some errors in this frame */ VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE = (1 << 10),/* Deocder/parser detected at least one of the required reference frames is missing */ VIDDEC_FW_WORKLOAD_ERR_IN_REFERENCE = (1 << 11),/* Deocder/parser detected at least one of the reference frames has errors in it */ VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD = (1 << 12),/* Parser detected at least one of the fields are missing */ @@ -185,7 +185,7 @@ enum viddec_fw_workload_error_codes VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17),/* Decoder/Parser detected errors in "top field" or "frame"*/ VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18),/* Decoder/Parser detected errors in "bottom field" or "frame" */ VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR = (1 << 19),/* Parser detected errors */ - + }; enum viddec_fw_mpeg2_error_codes @@ -217,7 +217,7 @@ typedef int bool; #endif #endif -#endif +#endif /* end of #ifdef VBP */ #endif diff --git a/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h index 3a07af0..d902520 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h @@ -1,240 +1,240 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or +/* + This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. - + GPL LICENSE SUMMARY - + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify + + This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution + The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: - BSD LICENSE + BSD LICENSE Copyright(c) 2007-2009 Intel Corporation. All rights reserved. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - + #ifndef VIDDEC_FW_DECODER_HOST_H #define VIDDEC_FW_DECODER_HOST_H - + #ifdef __cplusplus extern "C" { #endif #include "viddec_fw_common_defs.h" - -/** @weakgroup viddec Fw Decoder interface Functions */ -/** @ingroup viddec_fw_decoder */ -/*@{*/ - -/** - This function returns the size required for loading fw. - @retval size : Required size. -*/ + + /** @weakgroup viddec Fw Decoder interface Functions */ + /** @ingroup viddec_fw_decoder */ + /*@{*/ + + /** + This function returns the size required for loading fw. + @retval size : Required size. + */ uint32_t viddec_fw_decoder_query_fwsize(void); - -/** - This function loads Decoder Firmware and initialises necessary state information. - @param[in] phys : Physical address on where firmware should be loaded. - @param[in] len : Length of data allocated at phys. - @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. - @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + + /** + This function loads Decoder Firmware and initialises necessary state information. + @param[in] phys : Physical address on where firmware should be loaded. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len); -/** - This function returns required size for global memory for all supported decoders. This is a synchronous message to FW. - @param[out] size : returns the size required. - @retval VIDDEC_FW_SUCCESS : Successfuly got required information from FW. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. -*/ + /** + This function returns required size for global memory for all supported decoders. This is a synchronous message to FW. + @param[out] size : returns the size required. + @retval VIDDEC_FW_SUCCESS : Successfuly got required information from FW. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + */ uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size); -/** - This function sets global memory for the firmware to use.This is a synchronous message to FW. - @param[in] phys : Physical address on where global memory starts. - @param[in] len : Length of data allocated at phys. - @retval VIDDEC_FW_SUCCESS : Successfully setup global memory. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. -*/ + /** + This function sets global memory for the firmware to use.This is a synchronous message to FW. + @param[in] phys : Physical address on where global memory starts. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully setup global memory. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + */ uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len); -/** - This function returns the size required opening a stream. This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want information about. - @param[out] size : Size of memory required for opening a stream. - @retval VIDDEC_FW_SUCCESS : Successfuly talked to FW and got required size. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. -*/ + /** + This function returns the size required opening a stream. This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want information about. + @param[out] size : Size of memory required for opening a stream. + @retval VIDDEC_FW_SUCCESS : Successfuly talked to FW and got required size. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + */ uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size); -/** - This function opens requested codec.This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want to open. - @param[in] phys : Physical address of allocated memory for this codec. - @param[in] prority : Priority of stream. 1 for realtime and 0 for background. - @param[out] strm_handle : Handle of the opened stream. - @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. - @retval VIDDEC_FW_FAILURE : Failed to Open a stream. -*/ + /** + This function opens requested codec.This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want to open. + @param[in] phys : Physical address of allocated memory for this codec. + @param[in] prority : Priority of stream. 1 for realtime and 0 for background. + @param[out] strm_handle : Handle of the opened stream. + @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. + @retval VIDDEC_FW_FAILURE : Failed to Open a stream. + */ uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); -/** - This function closes stream.This a synchronous message to FW. - @param[in] strm_handle : Handle of the stream to close. -*/ + /** + This function closes stream.This a synchronous message to FW. + @param[in] strm_handle : Handle of the stream to close. + */ void viddec_fw_decoder_closestream(uint32_t strm_handle); -/** - This function allows to get current status of the decoder workload queues. If the current stream is active we return - number of input messages that can be written to input queue and the number of messages in output queue of the stream. - - Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT - Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is - written into output queue of a stream. - @param[in] strm_handle : The handle of stream that we want to get status of queues. - @param[out] status : The status of each queue gets updated in here. - @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. - @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. -*/ + /** + This function allows to get current status of the decoder workload queues. If the current stream is active we return + number of input messages that can be written to input queue and the number of messages in output queue of the stream. + + Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT + Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is + written into output queue of a stream. + @param[in] strm_handle : The handle of stream that we want to get status of queues. + @param[out] status : The status of each queue gets updated in here. + @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. + @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. + */ uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status); -/** - This function flushes the current stream. This is a synchronous message to FW. - Before calling this function the host has to make sure the output queue of the firmware - is empty. After this function is executed the FW will read all entries in input - wkld buffer queue into output queue. After this operation the host has to read all entries - in output queue again to finish the flush operation. - @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. - @param[in] strm_handle : Handle of the stream we want to flush. - @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. - @retval VIDDEC_FW_FAILURE : Failed to flush a stream. -*/ + /** + This function flushes the current stream. This is a synchronous message to FW. + Before calling this function the host has to make sure the output queue of the firmware + is empty. After this function is executed the FW will read all entries in input + wkld buffer queue into output queue. After this operation the host has to read all entries + in output queue again to finish the flush operation. + @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. + @param[in] strm_handle : Handle of the stream we want to flush. + @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. + @retval VIDDEC_FW_FAILURE : Failed to flush a stream. + */ uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type); - -/** - This function sends an input workload buffer. The host should provide required frame buffers in this workload before - sending it to fw. - @param[in] strm_handle : The handle of stream that we want to send workload buffer to. - @param[in] cur_wkld : The workload buffer we want to send. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. -*/ + + /** + This function sends an input workload buffer. The host should provide required frame buffers in this workload before + sending it to fw. + @param[in] strm_handle : The handle of stream that we want to send workload buffer to. + @param[in] cur_wkld : The workload buffer we want to send. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. + */ uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld); -/** - This function gets the decoded workload from fw. - @param[in] strm_handle : The handle of stream that we want to read workload from. - @param[out] cur_wkld : The workload descriptor. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. -*/ + /** + This function gets the decoded workload from fw. + @param[in] strm_handle : The handle of stream that we want to read workload from. + @param[out] cur_wkld : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. + */ uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld); -/** - This function unloads Decoder Firmware and free's the resources allocated in Load fw. - If this function is called before load fw it will crash with a segmentation fault. -*/ + /** + This function unloads Decoder Firmware and free's the resources allocated in Load fw. + If this function is called before load fw it will crash with a segmentation fault. + */ void viddec_fw_decoder_deinit(void); -/** - This function gets the major and minor revison numbers of the loaded firmware. - @param[out] major : The major revision number. - @param[out] minor : The minor revision number. - @param[out] build : The Internal Build number. -*/ + /** + This function gets the major and minor revison numbers of the loaded firmware. + @param[out] major : The major revision number. + @param[out] minor : The minor revision number. + @param[out] build : The Internal Build number. + */ void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); -/** - This function returns the interrupt status of all streams which need to be processed. A value of zero - means no active streams which generated this interrupt. -*/ + /** + This function returns the interrupt status of all streams which need to be processed. A value of zero + means no active streams which generated this interrupt. + */ uint32_t viddec_fw_decoder_active_pending_interrupts(void); -/** - This function clears the interrupts for all active streams represented by status input parameter. - The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts(). - @param[in] status : The status value that was returned by viddec_fw_decoder_active_pending_interrupts(). -*/ + /** + This function clears the interrupts for all active streams represented by status input parameter. + The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts(). + @param[in] status : The status value that was returned by viddec_fw_decoder_active_pending_interrupts(). + */ void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status); -/** - This function enables/disables interrupt for the stream specified. - @param[in] strm_handle : The handle of stream that we want enable or disable interrupts for. - @param[in] enable : Boolean value if ==0 means disable Interrupts else enable. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed. -*/ + /** + This function enables/disables interrupt for the stream specified. + @param[in] strm_handle : The handle of stream that we want enable or disable interrupts for. + @param[in] enable : Boolean value if ==0 means disable Interrupts else enable. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed. + */ uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable); -/** - This function returns which stream interrupted in the past based on status, which is a snapshot of - interrupt status that was cleared in the past. The host has to call clear with status information - before calling this function again with status value. The Host should do this operation until this function - returns 0, which means all the streams that generated interrupt have been processed. - @param[out]strm_handle : The handle of a stream that generated interrupt. - @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). - @retval 1 : A valid stream handle was found. - @retval 0 : No more streams from the status which caused interrupt. -*/ + /** + This function returns which stream interrupted in the past based on status, which is a snapshot of + interrupt status that was cleared in the past. The host has to call clear with status information + before calling this function again with status value. The Host should do this operation until this function + returns 0, which means all the streams that generated interrupt have been processed. + @param[out]strm_handle : The handle of a stream that generated interrupt. + @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). + @retval 1 : A valid stream handle was found. + @retval 0 : No more streams from the status which caused interrupt. + */ uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle); -/** - This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(), - This should be called after host performs all necessary actions for the stream. - @param[in] strm_handle : The handle of a stream that we want to clear to indicate we handled it. - @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). - @retval 1 : Operation was sucessful. - @retval 0 : Invalid stream handle was passed. -*/ + /** + This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(), + This should be called after host performs all necessary actions for the stream. + @param[in] strm_handle : The handle of a stream that we want to clear to indicate we handled it. + @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). + @retval 1 : Operation was sucessful. + @retval 0 : Invalid stream handle was passed. + */ uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle); -/*@}*/ + /*@}*/ #ifdef __cplusplus } #endif diff --git a/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h index cdc0bff..4f4b479 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h @@ -1,57 +1,57 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or +/* + This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. - + GPL LICENSE SUMMARY - + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify + + This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution + The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: - BSD LICENSE + BSD LICENSE Copyright(c) 2007-2009 Intel Corporation. All rights reserved. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ @@ -67,7 +67,7 @@ This enumeration lists all the frame types defined by the MPEG, VC1 and H264 specifications. Frame types applicable to a single codec are specified in the comments. */ -typedef enum +typedef enum { VIDDEC_FRAME_TYPE_INVALID=0, /** Unknown type - default value */ VIDDEC_FRAME_TYPE_IDR=0x1, /** IDR frame - h264 only */ @@ -88,7 +88,7 @@ typedef struct viddec_rect_size { unsigned int width; unsigned int height; -}viddec_rect_size_t; +} viddec_rect_size_t; /** This structure contains MPEG2 specific pan scan offsets extracted from the stream. @@ -97,7 +97,7 @@ typedef struct viddec_mpeg2_frame_center_offset { int horz; int vert; -}viddec_mpeg2_frame_center_offset_t; +} viddec_mpeg2_frame_center_offset_t; /** This structure contains the MPEG2 specific frame attributes. @@ -143,7 +143,7 @@ typedef struct viddec_mpeg2_frame_attributes unsigned int repeat_first_field; -}viddec_mpeg2_frame_attributes_t; +} viddec_mpeg2_frame_attributes_t; /** This structure contains MPEG2 specific pan scan offsets extracted from the stream. @@ -154,7 +154,7 @@ typedef struct viddec_vc1_pan_scan_window unsigned int voffset; unsigned int width; unsigned int height; -}viddec_vc1_pan_scan_window_t; +} viddec_vc1_pan_scan_window_t; /** This structure contains the VC1 specific frame attributes. @@ -169,7 +169,7 @@ typedef struct viddec_vc1_frame_attributes /** Frame/field repeat information in the bitstream. - Refer to "RPTFRM", "TFF", "BFF" in the picture layer + Refer to "RPTFRM", "TFF", "BFF" in the picture layer of the SMPTE VC1 Specification. */ unsigned int rptfrm; @@ -178,7 +178,7 @@ typedef struct viddec_vc1_frame_attributes /** Pan-scan information in the bitstream. - Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET", + Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET", "PS_WIDTH" and "PS_HEIGHT" in the picture layer of the SMPTE VC1 Specification. */ unsigned int panscan_flag; @@ -186,7 +186,7 @@ typedef struct viddec_vc1_frame_attributes unsigned int num_of_pan_scan_windows; viddec_vc1_pan_scan_window_t pan_scan_window[VIDDEC_PANSCAN_MAX_OFFSETS]; -}viddec_vc1_frame_attributes_t; +} viddec_vc1_frame_attributes_t; /** This structure contains the H264 specific frame attributes. @@ -207,7 +207,7 @@ typedef struct viddec_h264_frame_attributes int bottom_field_poc; /** - Display size, which is cropped from content size. + Display size, which is cropped from content size. Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed */ viddec_rect_size_t cropped_size; @@ -216,7 +216,7 @@ typedef struct viddec_h264_frame_attributes top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1 */ unsigned int top_field_first; - + /** field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1 */ @@ -236,7 +236,7 @@ typedef struct viddec_h264_frame_attributes #define viddec_fw_h264_mvc_get_is_base_view(x) viddec_fw_bitfields_extract( (x)->view_spcific_info, 16, 0x1) #define viddec_fw_h264_mvc_set_is_base_view(x, val) viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 16, 0x1) unsigned int view_spcific_info; -}viddec_h264_frame_attributes_t; +} viddec_h264_frame_attributes_t; /** This structure contains the MPEG4 specific frame attributes. @@ -249,7 +249,7 @@ typedef struct viddec_mpeg4_frame_attributes */ unsigned int top_field_first; -}viddec_mpeg4_frame_attributes_t; +} viddec_mpeg4_frame_attributes_t; /** This structure groups all the frame attributes that are exported by the firmware. @@ -262,9 +262,9 @@ typedef struct viddec_frame_attributes Content size specified in the stream. For MPEG2, refer to "horizontal_size_value, vertical_size_value" of the sequence header and "horizontal_size_extension, vertical_size_extension" of the sequence extension in ITU-T H.262 Specification. - For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the + For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the sequence parameter set in ITU-T H.264 Specification. - For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer, + For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer, "CODED_SIZE_FLAG", "CODED_WIDTH" and "CODED_HEIGHT" in the entrypoint layer of the SMPTE VC1 Specification. */ viddec_rect_size_t cont_size; @@ -289,6 +289,6 @@ typedef struct viddec_frame_attributes viddec_mpeg4_frame_attributes_t mpeg4; }; -}viddec_frame_attributes_t; +} viddec_frame_attributes_t; #endif /* VIDDEC_FRAME_ATTR_H */ diff --git a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h index 936c2e6..472dff2 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h @@ -1,57 +1,57 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or +/* + This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. - + GPL LICENSE SUMMARY - + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify + + This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution + The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: - BSD LICENSE + BSD LICENSE Copyright(c) 2007-2009 Intel Corporation. All rights reserved. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ @@ -66,7 +66,7 @@ the start and mask values for width and height. width: start = 0 mask=0xFFFF Height:start= 16 mask=0xFFFF - + extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in unsigned integer type. insert: Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to @@ -111,7 +111,7 @@ typedef enum workload_item_type VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED =0x1900,/* H264 only */ VIDDEC_WORKLOAD_SEI_RECOVERY_POINT =0x1a00,/* H264 only */ VIDDEC_WORKLOAD_MPEG2_SEQ_EXT =0x1b00,/* MPEG2 Only - Sequence Extension */ - VIDDEC_WORKLOAD_H264_MVC_SPS_VIEW_IDS =0x1c00,/* H264 only */ + VIDDEC_WORKLOAD_H264_MVC_SPS_VIEW_IDS =0x1c00,/* H264 only */ VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ =0x1d00,/* MPEG4 Only - Visual Sequence */ VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ =0x1e00,/* MPEG4 Only - Video Object Layer */ VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ =0x1f00,/* MPEG4 Only - Group of Video Object Planes */ @@ -122,9 +122,9 @@ typedef enum workload_item_type VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 =0x10000,/* required reference frames tag,last eight bits indicate index in dpb */ VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 =0x20000,/* release frames tag, last eight bits indicate index in dpb*/ VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 =0x30000,/* Display order in DPB tag, for H264 */ - VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 =0x40000,/* Release frames but not display, for H264 */ - VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 =0x50000,/* Release list while EOS, last eight bits indicate index in dpb */ - VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 =0x60000,/* Display list while EOS, last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 =0x40000,/* Release frames but not display, for H264 */ + VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 =0x50000,/* Release list while EOS, last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 =0x60000,/* Display list while EOS, last eight bits indicate index in dpb */ VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 =0x70000,/* required for H264 as it needs whole DPB for each frame */ VIDDEC_WORKLOAD_H264_REFR_LIST_0 =0x80000,/* ref list 0 for H264 */ VIDDEC_WORKLOAD_H264_REFR_LIST_1 =0x90000,/* ref list 1 for H264 */ @@ -132,7 +132,7 @@ typedef enum workload_item_type VIDDEC_WORKLOAD_DECODER_SPECIFIC =0x100000,/* pvt info for decoder tags */ VIDDEC_WORKLOAD_MAX, -}workload_item_type; +} workload_item_type; struct h264_witem_sps_mvc_id { @@ -188,24 +188,24 @@ typedef struct viddec_workload_item unsigned int es_phys_addr; unsigned int es_phys_len; unsigned int es_flags; - }es; + } es; struct { unsigned int tag_phys_addr; unsigned int tag_phys_len; unsigned int tag_value; - }tag; + } tag; struct { unsigned int data_offset; unsigned int data_payload[2]; - }data; + } data; struct { signed int reference_id; /* Assigned by parser */ unsigned int luma_phys_addr; /* assigned by host, for DM */ unsigned int chroma_phys_addr; /* assigned by host, for DM */ - }ref_frame; + } ref_frame; struct /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */ { signed int ref_table_offset; /* Index of first "reordered" */ @@ -228,7 +228,7 @@ typedef struct viddec_workload_item VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA, VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA, */ - }user_data; + } user_data; struct { // Sequence Header Item I (From LSB): @@ -236,24 +236,24 @@ typedef struct viddec_workload_item // - vertical_size_value - 12 bits // - aspect_ratio_information - 4 bits // - frame_rate_code - 4 bits - #define viddec_fw_mp2_sh_get_horizontal_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 0, 0xFFF) - #define viddec_fw_mp2_sh_get_vertical_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF) - #define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF) - #define viddec_fw_mp2_sh_get_frame_rate_code(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF) - #define viddec_fw_mp2_sh_set_horizontal_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 0, 0xFFF) - #define viddec_fw_mp2_sh_set_vertical_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF) - #define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF) - #define viddec_fw_mp2_sh_set_frame_rate_code(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF) +#define viddec_fw_mp2_sh_get_horizontal_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 0, 0xFFF) +#define viddec_fw_mp2_sh_get_vertical_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF) +#define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF) +#define viddec_fw_mp2_sh_get_frame_rate_code(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF) +#define viddec_fw_mp2_sh_set_horizontal_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 0, 0xFFF) +#define viddec_fw_mp2_sh_set_vertical_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF) +#define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF) +#define viddec_fw_mp2_sh_set_frame_rate_code(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF) unsigned int seq_hdr_item_1; // Sequence Header Item II (From LSB): // - bit_rate_value - 18 bits // - vbv_buffer_size_value - 10 bits // - remaining pad bits - #define viddec_fw_mp2_sh_get_bit_rate_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 0, 0x3FFFF) - #define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF) - #define viddec_fw_mp2_sh_set_bit_rate_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 0, 0x3FFFF) - #define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF) +#define viddec_fw_mp2_sh_get_bit_rate_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 0, 0x3FFFF) +#define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF) +#define viddec_fw_mp2_sh_set_bit_rate_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 0, 0x3FFFF) +#define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF) unsigned int seq_hdr_item_2; unsigned int pad; @@ -268,18 +268,18 @@ typedef struct viddec_workload_item // - vertical_size_extension - 2 bits // - bit_rate_extension - 12 bits // - remaining pad bits - #define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 0, 0xFF) - #define viddec_fw_mp2_se_get_progressive_sequence(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 8, 0x1) - #define viddec_fw_mp2_se_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 9, 0x3) - #define viddec_fw_mp2_se_get_horizontal_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3) - #define viddec_fw_mp2_se_get_vertical_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3) - #define viddec_fw_mp2_se_get_bit_rate_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF) - #define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 0, 0xFF) - #define viddec_fw_mp2_se_set_progressive_sequence(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 8, 0x1) - #define viddec_fw_mp2_se_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 9, 0x3) - #define viddec_fw_mp2_se_set_horizontal_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3) - #define viddec_fw_mp2_se_set_vertical_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3) - #define viddec_fw_mp2_se_set_bit_rate_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF) +#define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 0, 0xFF) +#define viddec_fw_mp2_se_get_progressive_sequence(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 8, 0x1) +#define viddec_fw_mp2_se_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 9, 0x3) +#define viddec_fw_mp2_se_get_horizontal_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3) +#define viddec_fw_mp2_se_get_vertical_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3) +#define viddec_fw_mp2_se_get_bit_rate_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF) +#define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 0, 0xFF) +#define viddec_fw_mp2_se_set_progressive_sequence(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 8, 0x1) +#define viddec_fw_mp2_se_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 9, 0x3) +#define viddec_fw_mp2_se_set_horizontal_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3) +#define viddec_fw_mp2_se_set_vertical_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3) +#define viddec_fw_mp2_se_set_bit_rate_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF) unsigned int seq_ext_item_1; // Sequence Extension Item II (From LSB): @@ -287,12 +287,12 @@ typedef struct viddec_workload_item // - frame_rate_extension_n - 2 bits // - frame_rate_extension_d - 5 bits // - remaining pad bits - #define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 0, 0xFF) - #define viddec_fw_mp2_se_get_frame_rate_extension_n(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 8, 0x3) - #define viddec_fw_mp2_se_get_frame_rate_extension_d(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F) - #define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 0, 0xFF) - #define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 8, 0x3) - #define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F) +#define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 0, 0xFF) +#define viddec_fw_mp2_se_get_frame_rate_extension_n(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 8, 0x3) +#define viddec_fw_mp2_se_get_frame_rate_extension_d(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F) +#define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 0, 0xFF) +#define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 8, 0x3) +#define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F) unsigned int seq_ext_item_2; unsigned int pad; @@ -304,24 +304,24 @@ typedef struct viddec_workload_item // - display_vertical_size - 14 bits // - video_format - 3 bits // - color_description - 1 bit - #define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 0, 0x3FFF) - #define viddec_fw_mp2_sde_get_display_vertical_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF) - #define viddec_fw_mp2_sde_get_video_format(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7) - #define viddec_fw_mp2_sde_get_color_description(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1) - #define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 0, 0x3FFF) - #define viddec_fw_mp2_sde_set_display_vertical_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF) - #define viddec_fw_mp2_sde_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7) - #define viddec_fw_mp2_sde_set_color_description(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1) +#define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 0, 0x3FFF) +#define viddec_fw_mp2_sde_get_display_vertical_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF) +#define viddec_fw_mp2_sde_get_video_format(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7) +#define viddec_fw_mp2_sde_get_color_description(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1) +#define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 0, 0x3FFF) +#define viddec_fw_mp2_sde_set_display_vertical_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF) +#define viddec_fw_mp2_sde_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7) +#define viddec_fw_mp2_sde_set_color_description(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1) unsigned int seq_disp_ext_item_1; // Sequence Display Extension II (From LSB): // - color_primaries - 8 bits // - transfer_characteristics - 8 bits // - remaining pad bits - #define viddec_fw_mp2_sde_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 0, 0xFF) - #define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 8, 0xFF) - #define viddec_fw_mp2_sde_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 0, 0xFF) - #define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 8, 0xFF) +#define viddec_fw_mp2_sde_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 0, 0xFF) +#define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 8, 0xFF) +#define viddec_fw_mp2_sde_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 0, 0xFF) +#define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 8, 0xFF) unsigned int seq_disp_ext_item_2; unsigned int pad; @@ -332,10 +332,10 @@ typedef struct viddec_workload_item // - closed_gop - 1 bit // - broken_link - 1 bit // - remaining pad bits - #define viddec_fw_mp2_gop_get_closed_gop(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 0, 0x1) - #define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 1, 0x1) - #define viddec_fw_mp2_gop_set_closed_gop(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 0, 0x1) - #define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 1, 0x1) +#define viddec_fw_mp2_gop_get_closed_gop(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 0, 0x1) +#define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 1, 0x1) +#define viddec_fw_mp2_gop_set_closed_gop(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 0, 0x1) +#define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 1, 0x1) unsigned int gop_hdr_item_1; unsigned int pad1; @@ -343,44 +343,44 @@ typedef struct viddec_workload_item } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO struct { - #define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3) - #define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3) +#define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3) +#define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3) - #define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7) - #define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7) +#define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7) +#define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7) - #define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3) - #define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3) +#define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3) +#define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3) - #define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) - #define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) +#define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) +#define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) - #define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) - #define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) +#define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) +#define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) - #define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) - #define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) +#define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) +#define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) - #define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F) - #define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F) +#define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F) +#define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F) - #define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7) - #define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7) +#define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7) +#define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7) - #define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) - #define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) +#define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) +#define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) - #define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) - #define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) +#define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) +#define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) - #define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1) - #define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1) +#define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1) +#define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1) - #define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1) - #define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1) +#define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1) +#define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1) - #define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) - #define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) +#define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) +#define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) uint32_t size; // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12 uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1 @@ -390,47 +390,47 @@ typedef struct viddec_workload_item { // This item is populated when display_ext flag is set in the sequence layer // therefore, no need to provide this flag - #define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF) - #define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF) +#define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF) +#define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF) - #define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF) - #define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF) +#define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF) +#define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF) - #define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1) - #define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1) +#define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1) +#define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1) - #define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1) - #define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1) +#define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1) +#define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1) - #define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1) - #define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1) +#define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1) +#define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1) - #define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1) - #define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1) +#define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1) +#define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1) - #define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF) - #define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF) +#define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF) +#define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF) - #define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF) - #define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF) +#define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF) +#define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF) - #define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF) - #define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF) +#define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF) +#define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF) - #define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF) - #define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF) +#define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF) +#define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF) - #define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF) - #define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF) +#define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF) +#define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF) - #define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF) - #define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF) +#define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF) +#define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF) - #define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF) - #define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF) +#define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF) +#define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF) - #define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF) - #define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF) +#define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF) +#define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF) uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1 uint32_t framerate; // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16 @@ -438,35 +438,35 @@ typedef struct viddec_workload_item } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO struct { - #define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF) - #define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF) +#define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF) +#define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF) - #define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF) - #define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF) +#define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF) +#define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF) - #define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F) - #define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F) +#define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F) +#define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F) - #define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7) - #define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7) +#define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7) +#define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7) - #define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF) - #define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF) +#define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF) +#define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF) - #define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7) - #define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7) +#define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7) +#define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7) - #define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1) - #define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1) +#define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1) +#define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1) - #define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) - #define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) +#define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) +#define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) - #define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7) - #define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7) +#define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7) +#define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7) - #define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) - #define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) +#define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) +#define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) uint32_t size; // horiz_size:16, vert_size:16 uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1 @@ -474,35 +474,35 @@ typedef struct viddec_workload_item } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C struct { - #define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) - #define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) +#define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) +#define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) - #define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) - #define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) +#define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) +#define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) - #define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) - #define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) +#define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) +#define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) - #define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1) - #define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1) +#define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1) +#define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1) - #define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1) - #define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1) +#define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1) +#define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1) - #define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1) - #define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1) +#define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1) +#define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1) - #define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1) - #define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1) +#define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1) +#define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1) - #define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7) - #define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7) +#define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7) +#define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7) - #define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) - #define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) +#define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) +#define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) - #define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7) - #define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7) +#define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7) +#define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7) uint32_t size; // coded_size_flag:1, coded_width:12, coded_height:12 uint32_t flags; // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3 @@ -557,7 +557,7 @@ typedef struct viddec_workload_item unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */ unsigned int pad; } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING - + struct { /* 0 bit for aspect_ratio_info_present_flag @@ -565,7 +565,7 @@ typedef struct viddec_workload_item 2 nd bit for colour_description_present_flag 3 rd bit for timing_info_present_flag 4 th bit for nal_hrd_parameters_present_flag - 5 th bit for vcl_hrd_parameters_present_flag + 5 th bit for vcl_hrd_parameters_present_flag 6 th bit for fixed_frame_rate_flag 7 th bit for pic_struct_present_flag 8 th bit for low_delay_hrd_flag @@ -601,7 +601,7 @@ typedef struct viddec_workload_item #define viddec_fw_h264_vui_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF) /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */ unsigned int aspc_color_transfer; - + #define viddec_fw_h264_vui_get_sar_width(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF) #define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF) #define viddec_fw_h264_vui_set_sar_width(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF) @@ -614,10 +614,10 @@ typedef struct viddec_workload_item #define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val) viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF) #define viddec_fw_h264_vui_get_time_scale_flag(x) viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF) #define viddec_fw_h264_vui_set_time_scale_flag(x, val) viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF) - unsigned int num_units_in_tick; + unsigned int num_units_in_tick; unsigned int time_scale; unsigned int pad1; - } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO + } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO struct { unsigned int pic_struct; /* 4 bit length */ @@ -665,14 +665,14 @@ typedef struct viddec_workload_item unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */ } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT - + struct { // Visual Sequence (From LSB): // - profile_and_level_indication - 8 bits - #define viddec_fw_mp4_vs_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->vs_item, 0, 0xFF) - #define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val, 0, 0xFF) +#define viddec_fw_mp4_vs_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->vs_item, 0, 0xFF) +#define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val, 0, 0xFF) unsigned int vs_item; // Visual Object - video_signal_type @@ -680,23 +680,23 @@ typedef struct viddec_workload_item // - video_format - 3b // - video_range - 1b // - colour_description - 1b - #define viddec_fw_mp4_vo_get_colour_description(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1) - #define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1) - #define viddec_fw_mp4_vo_get_video_range(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1) - #define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1) - #define viddec_fw_mp4_vo_get_video_format(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 1, 0x7) - #define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 1, 0x7) - #define viddec_fw_mp4_vo_get_video_signal_type(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 0, 0x1) - #define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 0, 0x1) +#define viddec_fw_mp4_vo_get_colour_description(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1) +#define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1) +#define viddec_fw_mp4_vo_get_video_range(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1) +#define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1) +#define viddec_fw_mp4_vo_get_video_format(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 1, 0x7) +#define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 1, 0x7) +#define viddec_fw_mp4_vo_get_video_signal_type(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 0, 0x1) +#define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 0, 0x1) unsigned int video_signal_type; // Visual Object - video_signal_type // - color_primaries - 8 bits // - transfer_characteristics - 8 bits - #define viddec_fw_mp4_vo_get_transfer_char(x) viddec_fw_bitfields_extract( (x)->color_desc, 8, 0xFF) - #define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 8, 0xFF) - #define viddec_fw_mp4_vo_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->color_desc, 0, 0xFF) - #define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 0, 0xFF) +#define viddec_fw_mp4_vo_get_transfer_char(x) viddec_fw_bitfields_extract( (x)->color_desc, 8, 0xFF) +#define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 8, 0xFF) +#define viddec_fw_mp4_vo_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->color_desc, 0, 0xFF) +#define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 0, 0xFF) unsigned int color_desc; } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ @@ -710,38 +710,38 @@ typedef struct viddec_workload_item // - chroma_format - 2b // - interlaced - 1b // - fixed_vop_rate - 1b - #define viddec_fw_mp4_vol_get_fixed_vop_rate(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1) - #define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1) - #define viddec_fw_mp4_vol_get_interlaced(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1) - #define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1) - #define viddec_fw_mp4_vol_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3) - #define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3) - #define viddec_fw_mp4_vol_get_control_param(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1) - #define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1) - #define viddec_fw_mp4_vol_get_par_height(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF) - #define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF) - #define viddec_fw_mp4_vol_get_par_width(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF) - #define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF) - #define viddec_fw_mp4_vol_get_aspect_ratio_info(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF) - #define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF) +#define viddec_fw_mp4_vol_get_fixed_vop_rate(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1) +#define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1) +#define viddec_fw_mp4_vol_get_interlaced(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1) +#define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1) +#define viddec_fw_mp4_vol_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3) +#define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3) +#define viddec_fw_mp4_vol_get_control_param(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1) +#define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1) +#define viddec_fw_mp4_vol_get_par_height(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF) +#define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF) +#define viddec_fw_mp4_vol_get_par_width(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF) +#define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF) +#define viddec_fw_mp4_vol_get_aspect_ratio_info(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF) +#define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF) unsigned int vol_aspect_ratio; // Video Object Layer(From LSB): // - vbv_parameters - 1b // - bit_rate - 30b - #define viddec_fw_mp4_vol_get_bit_rate(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF) - #define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF) - #define viddec_fw_mp4_vol_get_vbv_param(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1) - #define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1) +#define viddec_fw_mp4_vol_get_bit_rate(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF) +#define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF) +#define viddec_fw_mp4_vol_get_vbv_param(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1) +#define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1) unsigned int vol_bit_rate; // Video Object Layer(From LSB): // - fixed_vop_time_increment - 16b // - vop_time_increment_resolution - 16b - #define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF) - #define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF) - #define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF) - #define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF) +#define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF) +#define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF) +#define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF) +#define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF) unsigned int vol_frame_rate; } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ @@ -751,12 +751,12 @@ typedef struct viddec_workload_item // - time_code - 18b // - closed_gov - 1b // - broken_link - 1b - #define viddec_fw_mp4_gvop_get_broken_link(x) viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1) - #define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1) - #define viddec_fw_mp4_gvop_get_closed_gov(x) viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1) - #define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1) - #define viddec_fw_mp4_gvop_get_time_code(x) viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF) - #define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF) +#define viddec_fw_mp4_gvop_get_broken_link(x) viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1) +#define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1) +#define viddec_fw_mp4_gvop_get_closed_gov(x) viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1) +#define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1) +#define viddec_fw_mp4_gvop_get_time_code(x) viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF) +#define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF) unsigned int gvop_info; unsigned int pad1; @@ -767,8 +767,8 @@ typedef struct viddec_workload_item { // Group of Video Object Planes(From LSB): // - source_format - 3b - #define viddec_fw_mp4_vpsh_get_source_format(x) viddec_fw_bitfields_extract((x)->info, 0, 0x7) - #define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7) +#define viddec_fw_mp4_vpsh_get_source_format(x) viddec_fw_bitfields_extract((x)->info, 0, 0x7) +#define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7) unsigned int info; unsigned int pad1; @@ -777,7 +777,7 @@ typedef struct viddec_workload_item unsigned int vwi_payload[3]; }; -}viddec_workload_item_t; +} viddec_workload_item_t; diff --git a/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h index 6d26555..550cf0a 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h @@ -1,235 +1,235 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or +/* + This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. - + GPL LICENSE SUMMARY - + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify + + This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution + The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: - BSD LICENSE + BSD LICENSE Copyright(c) 2007-2009 Intel Corporation. All rights reserved. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - + #ifndef VIDDEC_FW_PARSER_HOST_H #define VIDDEC_FW_PARSER_HOST_H - + #ifdef __cplusplus extern "C" { #endif #include "viddec_fw_common_defs.h" - -/** @weakgroup viddec Fw Parser interface Functions */ -/** @ingroup viddec_fw_parser */ -/*@{*/ - -/** - This function returns the size required for loading fw. - @retval size : Required size. -*/ + + /** @weakgroup viddec Fw Parser interface Functions */ + /** @ingroup viddec_fw_parser */ + /*@{*/ + + /** + This function returns the size required for loading fw. + @retval size : Required size. + */ uint32_t viddec_fw_parser_query_fwsize(void); - -/** - This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW. - @param[in] phys : Physical address on where firmware should be loaded. - @param[in] len : Length of data allocated at phys. - @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. - @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + + /** + This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW. + @param[in] phys : Physical address on where firmware should be loaded. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len); -/** - This function returns the size required opening a stream. This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want information about. - @param[out] num_wklds : Number of wklds required for initialisation. - @param[out] size : Size of memory required for opening a stream. -*/ + /** + This function returns the size required opening a stream. This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want information about. + @param[out] num_wklds : Number of wklds required for initialisation. + @param[out] size : Size of memory required for opening a stream. + */ void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size); -/** - This function opens requested codec.This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want to open. - @param[in] phys : Physical address of allocated memory for this codec. - @param[in] prority : Priority of stream. 1 for realtime and 0 for background. - @param[out] strm_handle : Handle of the opened stream. - @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. - @retval VIDDEC_FW_FAILURE : Failed to Open a stream. - @retval VIDDEC_FW_NORESOURCES : Failed to Open a stream as we are out of resources. -*/ + /** + This function opens requested codec.This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want to open. + @param[in] phys : Physical address of allocated memory for this codec. + @param[in] prority : Priority of stream. 1 for realtime and 0 for background. + @param[out] strm_handle : Handle of the opened stream. + @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. + @retval VIDDEC_FW_FAILURE : Failed to Open a stream. + @retval VIDDEC_FW_NORESOURCES : Failed to Open a stream as we are out of resources. + */ uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); -/** - This function closes stream.This a synchronous message to FW. - For the close stream to be effective, host has to do flush with discard first and then close the stream. - @param[in] strm_handle : Handle of the stream to close. -*/ + /** + This function closes stream.This a synchronous message to FW. + For the close stream to be effective, host has to do flush with discard first and then close the stream. + @param[in] strm_handle : Handle of the stream to close. + */ void viddec_fw_parser_closestream(uint32_t strm_handle); -/** - This function flushes the current stream. This is a synchronous message to FW. - Before calling this function the host has to make sure the output queue of the firmware - is empty. After this function is executed the FW will read all entries in input - es buffer queue into a free or partial workload and push it into output queue. - After this operation the host has to read all entries in output queue again to - finish the flush operation. - @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. - @param[in] strm_handle : Handle of the stream we want to flush. - @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - @retval VIDDEC_FW_NEED_FREE_WKLD : Failed to flush sice a free wkld was not available. -*/ + /** + This function flushes the current stream. This is a synchronous message to FW. + Before calling this function the host has to make sure the output queue of the firmware + is empty. After this function is executed the FW will read all entries in input + es buffer queue into a free or partial workload and push it into output queue. + After this operation the host has to read all entries in output queue again to + finish the flush operation. + @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. + @param[in] strm_handle : Handle of the stream we want to flush. + @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + @retval VIDDEC_FW_NEED_FREE_WKLD : Failed to flush sice a free wkld was not available. + */ uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type); - -/** - This function sends an input es buffer. - @param[in] strm_handle : The handle of stream that we want to send es buffer to. - @param[in] message : The es buffer we want to send. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + + /** + This function sends an input es buffer. + @param[in] strm_handle : The handle of stream that we want to send es buffer to. + @param[in] message : The es buffer we want to send. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message); -/** - This function gets the next processed workload. The host is required to add free workloads - to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue. - @param[in] strm_handle : The handle of stream that we want to read workload from. - @param[out] message : The workload descriptor. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + /** + This function gets the next processed workload. The host is required to add free workloads + to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue. + @param[in] strm_handle : The handle of stream that we want to read workload from. + @param[out] message : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message); -/** - This function adds a free workload to current stream. - @param[in] strm_handle : The handle of stream that we want to write workload to. - @param[out] message : The workload descriptor. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_FULL : Workload port is full,unsuccesful in writing wkld. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + /** + This function adds a free workload to current stream. + @param[in] strm_handle : The handle of stream that we want to write workload to. + @param[out] message : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Workload port is full,unsuccesful in writing wkld. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message); -/** - This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts. - The driver can disable/enable Interrupts if it needs for this particular stream. + /** + This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts. + The driver can disable/enable Interrupts if it needs for this particular stream. - @param[in] strm_handle : The handle of stream that we want to get mask from - @param[in] mask : This is read as boolean variable, true to enable, false to disable. - @retval VIDDEC_FW_SUCCESS : Successfully set mask. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + @param[in] strm_handle : The handle of stream that we want to get mask from + @param[in] mask : This is read as boolean variable, true to enable, false to disable. + @retval VIDDEC_FW_SUCCESS : Successfully set mask. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask); -/** - This function gets the interrupt status for current stream. - When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams, - by calling this function. The status is what the FW thinks the current state of stream is. The status information that - FW provides is complete information on all possible events that are defined. The host should only access this information - in its ISR at which state FW doesn't modify this information. - - @param[in] strm_handle : The handle of stream that we want to get mask from - @param[out] status : The status of the stream based on viddec_fw_parser_int_status_t enum. - @retval VIDDEC_FW_SUCCESS : Successfully in reading status. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + /** + This function gets the interrupt status for current stream. + When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams, + by calling this function. The status is what the FW thinks the current state of stream is. The status information that + FW provides is complete information on all possible events that are defined. The host should only access this information + in its ISR at which state FW doesn't modify this information. + + @param[in] strm_handle : The handle of stream that we want to get mask from + @param[out] status : The status of the stream based on viddec_fw_parser_int_status_t enum. + @retval VIDDEC_FW_SUCCESS : Successfully in reading status. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status); - -/** - This function allows to set stream attributes that are supported. - @param[in] strm_handle : The handle of stream that we want to set attribute on. - @param[in] type : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t. - @param[in] value : The value of the type that we want to set. - @retval VIDDEC_FW_SUCCESS : Successfully Set the attribute. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. -*/ + + /** + This function allows to set stream attributes that are supported. + @param[in] strm_handle : The handle of stream that we want to set attribute on. + @param[in] type : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t. + @param[in] value : The value of the type that we want to set. + @retval VIDDEC_FW_SUCCESS : Successfully Set the attribute. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value); -/** - This function allows to get current status of all the parser queues. If the current stream is active we return - number of inout messages that can be written to input queue, no of messages in output queue and number of - free available workloads the stream has. - Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT - Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or - a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT - FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee - one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT - to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will - give only one INT and host should try to empty output queue. - @param[in] strm_handle : The handle of stream that we want to get status of queues. - @param[out] status : The status of each queue gets updated in here. - @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. - @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. -*/ + /** + This function allows to get current status of all the parser queues. If the current stream is active we return + number of inout messages that can be written to input queue, no of messages in output queue and number of + free available workloads the stream has. + Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT + Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or + a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT + FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee + one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT + to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will + give only one INT and host should try to empty output queue. + @param[in] strm_handle : The handle of stream that we want to get status of queues. + @param[out] status : The status of each queue gets updated in here. + @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. + @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. + */ uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status); -/** - This function unloads Parser Firmware and free's the resources allocated in Load fw. - If this function is called before load fw it will crash with a segmentation fault. -*/ + /** + This function unloads Parser Firmware and free's the resources allocated in Load fw. + If this function is called before load fw it will crash with a segmentation fault. + */ void viddec_fw_parser_deinit(void); -/** - This function gets the major and minor revison numbers of the loaded firmware. - @param[out] major : The major revision numner. - @param[out] minor : The minor revision number. - @param[out] build : The Internal Build number. -*/ + /** + This function gets the major and minor revison numbers of the loaded firmware. + @param[out] major : The major revision numner. + @param[out] minor : The minor revision number. + @param[out] build : The Internal Build number. + */ void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); - -/** - This function clears the global interrupt. This is the last thing host calls before exiting ISR. -*/ + + /** + This function clears the global interrupt. This is the last thing host calls before exiting ISR. + */ void viddec_fw_parser_clear_global_interrupt(void); -/*@}*/ + /*@}*/ #ifdef __cplusplus } #endif diff --git a/mix_vbp/viddec_fw/include/viddec_fw_workload.h b/mix_vbp/viddec_fw/include/viddec_fw_workload.h index 73c5ab3..3b86270 100644 --- a/mix_vbp/viddec_fw/include/viddec_fw_workload.h +++ b/mix_vbp/viddec_fw/include/viddec_fw_workload.h @@ -1,57 +1,57 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or +/* + This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. - + GPL LICENSE SUMMARY - + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify + + This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution + The full GNU General Public License is included in this distribution in the file called LICENSE.GPL. Contact Information: - BSD LICENSE + BSD LICENSE Copyright(c) 2007-2009 Intel Corporation. All rights reserved. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ @@ -83,7 +83,7 @@ typedef struct viddec_frame_reference unsigned int luma_phys_addr; unsigned int chroma_phys_addr; int internal_id; /* Used by workload manager only */ -}viddec_frame_reference_t; +} viddec_frame_reference_t; #define WORKLOAD_REFERENCE_FRAME (1 << 16) #define WORKLOAD_SKIPPED_FRAME (1 << 17) @@ -103,24 +103,24 @@ Will be used for VC1 only. range_adjusted_out: Frame buffer needed to store range adjusted frames for VC1 only. Range adjustment in VC1 requires that the luma/chroma values in the decoded frame be modified before the frame can be displayed. In this case, we need a new frame buffer to store he adjusted values. - The parser will indicate this requirement by setting the WORKLOAD_FLAGS_RA_FRAME bit in the + The parser will indicate this requirement by setting the WORKLOAD_FLAGS_RA_FRAME bit in the is_reference_frame of the workload. The decoder expects this field to be valid when range adjustment is indicated and populates this frame buffer along with frame_out. Expectation from user: Before feeding workload to the decoder, do the following: - If pip is indicated/needed, + If pip is indicated/needed, provide the pip_out buffer - If range adjustment is indicated (WORKLOAD_FLAGS_RA_FRAME bit in is_reference_frame is set), + If range adjustment is indicated (WORKLOAD_FLAGS_RA_FRAME bit in is_reference_frame is set), provide range_adjusted_out buffer Provide frame_out buffer. After workload is returned from the decoder, do the following: - If pip is indicated, + If pip is indicated, display the pip_out buffer - Else If range adjustment is indicated, + Else If range adjustment is indicated, display range_adjusted_out buffer - Else + Else display frame_out buffer. */ typedef struct viddec_workload @@ -128,7 +128,7 @@ typedef struct viddec_workload enum viddec_stream_format codec; signed int is_reference_frame; unsigned int result; - unsigned int time; + unsigned int time; unsigned int num_items;/* number of viddec_workload_item_t in current workload */ unsigned int num_error_mb; /* Number of error macroblocks in the current picture. */ viddec_frame_attributes_t attrs; diff --git a/mix_video/autogen.sh b/mix_video/autogen.sh deleted file mode 100644 index 558a695..0000000 --- a/mix_video/autogen.sh +++ /dev/null @@ -1,19 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -package=MixVideo - -#Uncomment the follow line if building documentation using gtkdoc -#gtkdocize --flavour no-tmpl || exit 1 -aclocal -I m4/ $ACLOCAL_FLAGS || exit 1 -libtoolize --copy --force || exit 1 -autoheader -v || exit 1 -autoconf -v || exit 1 -automake -a -c -v || exit 1 - -echo "Now type ./configure to configure $package." -exit 0 diff --git a/mix_video/docs/reference/MixVideo/html/MixBuffer.html b/mix_video/docs/reference/MixVideo/html/MixBuffer.html index 1183b43..8f50f9e 100644 --- a/mix_video/docs/reference/MixVideo/html/MixBuffer.html +++ b/mix_video/docs/reference/MixVideo/html/MixBuffer.html @@ -43,16 +43,16 @@

Synopsis

-void                (*MixBufferCallback)                (gulong token,
-                                                         guchar *data);
+void                (*MixBufferCallback)                (ulong token,
+                                                         uchar *data);
                     MixBuffer;
 MixBuffer *         mix_buffer_new                      (void);
 MixBuffer *         mix_buffer_ref                      (MixBuffer *mix);
 void                mix_buffer_unref                    (MixBuffer *mix);
 MIX_RESULT          mix_buffer_set_data                 (MixBuffer *obj,
-                                                         guchar *data,
-                                                         guint size,
-                                                         gulong token,
+                                                         uchar *data,
+                                                         uint size,
+                                                         ulong token,
                                                          MixBufferCallback callback);
 
@@ -88,8 +88,8 @@ data buffers as necessary).

Details

MixBufferCallback ()

-
void                (*MixBufferCallback)                (gulong token,
-                                                         guchar *data);
+
void                (*MixBufferCallback)                (ulong token,
+                                                         uchar *data);

@@ -99,10 +99,10 @@ data buffers as necessary). /* Pointer to coded data buffer */ - guchar *data; + uchar *data; /* Size of coded data buffer */ - guint size; + uint size; /* Token that will be passed to * the callback function. Can be @@ -111,7 +111,7 @@ data buffers as necessary). * with this coded data buffer, * such as a pointer to a structure * belonging to the application. */ - gulong token; + ulong token; /* callback function pointer */ MixBufferCallback callback; @@ -176,9 +176,9 @@ Decrement reference count of the object.

mix_buffer_set_data ()

MIX_RESULT          mix_buffer_set_data                 (MixBuffer *obj,
-                                                         guchar *data,
-                                                         guint size,
-                                                         gulong token,
+                                                         uchar *data,
+                                                         uint size,
+                                                         ulong token,
                                                          MixBufferCallback callback);

Set data buffer, size, token and callback function

diff --git a/mix_video/docs/reference/MixVideo/html/MixDisplay.html b/mix_video/docs/reference/MixVideo/html/MixDisplay.html index 04a75c6..cfce197 100644 --- a/mix_video/docs/reference/MixVideo/html/MixDisplay.html +++ b/mix_video/docs/reference/MixVideo/html/MixDisplay.html @@ -46,24 +46,24 @@ #define MIX_DISPLAY_CLASS (klass) #define MIX_DISPLAY_CAST (obj) MixDisplay * (*MixDisplayDupFunction) (const MixDisplay *obj); -gboolean (*MixDisplayCopyFunction) (MixDisplay *target, +bool (*MixDisplayCopyFunction) (MixDisplay *target, const MixDisplay *src); void (*MixDisplayFinalizeFunction) (MixDisplay *obj); -gboolean (*MixDisplayEqualFunction) (MixDisplay *first, +bool (*MixDisplayEqualFunction) (MixDisplay *first, MixDisplay *second); #define MIX_VALUE_HOLDS_DISPLAY (value) #define MIX_DISPLAY_REFCOUNT (obj) #define MIX_DISPLAY_REFCOUNT_VALUE (obj) MixDisplay; MixDisplay * mix_display_new (); -gboolean mix_display_copy (MixDisplay *target, +bool mix_display_copy (MixDisplay *target, const MixDisplay *src); MixDisplay * mix_display_ref (MixDisplay *obj); void mix_display_unref (MixDisplay *obj); void mix_display_replace (MixDisplay **olddata, MixDisplay *newdata); MixDisplay * mix_display_dup (const MixDisplay *obj); -gboolean mix_display_equal (MixDisplay *first, +bool mix_display_equal (MixDisplay *first, MixDisplay *second); #define MIX_TYPE_PARAM_DISPLAY #define MIX_IS_PARAM_SPEC_DISPLAY (pspec) @@ -133,7 +133,7 @@ Virtual function prototype for methods to create duplicate of instance.


MixDisplayCopyFunction ()

-
gboolean            (*MixDisplayCopyFunction)           (MixDisplay *target,
+
bool            (*MixDisplayCopyFunction)           (MixDisplay *target,
                                                          const MixDisplay *src);

Virtual function prototype for methods to create copies of instance.

@@ -177,7 +177,7 @@ object.


MixDisplayEqualFunction ()

-
gboolean            (*MixDisplayEqualFunction)          (MixDisplay *first,
+
bool            (*MixDisplayEqualFunction)          (MixDisplay *first,
                                                          MixDisplay *second);

@@ -232,7 +232,7 @@ Get the reference count value of the object

MixDisplay

typedef struct {
-  gint refcount;
+  int refcount;
 } MixDisplay;
 

@@ -240,7 +240,7 @@ Base class for a refcounted parameter objects.

- + @@ -264,7 +264,7 @@ Create new instance of the object.


mix_display_copy ()

-
gboolean            mix_display_copy                    (MixDisplay *target,
+
bool            mix_display_copy                    (MixDisplay *target,
                                                          const MixDisplay *src);

Copy data from one instance to the other. This method internally invoked the "copy" method such that derived object will be copied correctly.

@@ -374,7 +374,7 @@ Duplicate the given

mix_display_equal ()

-
gboolean            mix_display_equal                   (MixDisplay *first,
+
bool            mix_display_equal                   (MixDisplay *first,
                                                          MixDisplay *second);

Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance.

diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html b/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html index 3250169..6f6ea67 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html @@ -115,9 +115,9 @@ The section includes the definition of enum and struct as well as

MixIOVec

typedef struct {
-	guchar *data;
-	gint buffer_size;
-    gint data_size;
+	uchar *data;
+	int buffer_size;
+    int data_size;
 } MixIOVec;
 
@@ -125,10 +125,10 @@ The section includes the definition of enum and struct as well as

MixRect

typedef struct {
-	gshort x;
-	gshort y;
-	gushort width;
-	gushort height;
+	short x;
+	short y;
+	ushort width;
+	ushort height;
 } MixRect;
 
diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo.html b/mix_video/docs/reference/MixVideo/html/MixVideo.html index f0fb27c..aa16589 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideo.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideo.html @@ -51,8 +51,8 @@ MixVideo * mix_video_ref (MixVideo *mix); #define mix_video_unref (obj) MIX_RESULT mix_video_get_version (MixVideo *mix, - guint *major, - guint *minor); + uint *major, + uint *minor); MIX_RESULT mix_video_initialize (MixVideo *mix, MixCodecMode mode, MixVideoInitParams *init_params, @@ -65,7 +65,7 @@ MIX_RESULT MixVideoConfigParams **config_params); MIX_RESULT mix_video_decode (MixVideo *mix, MixBuffer *bufin[], - gint bufincnt, + int bufincnt, MixVideoDecodeParams *decode_params); MIX_RESULT mix_video_get_frame (MixVideo *mix, MixVideoFrame **frame); @@ -76,9 +76,9 @@ MIX_RESULT MixVideoFrame *frame); MIX_RESULT mix_video_encode (MixVideo *mix, MixBuffer *bufin[], - gint bufincnt, + int bufincnt, MixIOVec *iovout[], - gint iovoutcnt, + int iovoutcnt, MixVideoEncodeParams *encode_params); MIX_RESULT mix_video_flush (MixVideo *mix); MIX_RESULT mix_video_eos (MixVideo *mix); @@ -89,7 +89,7 @@ MIX_RESULT mix_video_release_mixbuffer (MixVideo *mix, MixBuffer *buf); MIX_RESULT mix_video_get_max_coded_buffer_size (MixVideo *mix, - guint *bufsize); + uint *bufsize);
@@ -224,8 +224,8 @@ Decrement reference count of the object.

mix_video_get_version ()

MIX_RESULT          mix_video_get_version               (MixVideo *mix,
-                                                         guint *major,
-                                                         guint *minor);
+ uint *major, + uint *minor);

This function will return the major and minor version numbers of the library.

gint refcount;

int refcount;

atomic refcount
@@ -422,7 +422,7 @@ For

mix_video_decode ()

MIX_RESULT          mix_video_decode                    (MixVideo *mix,
                                                          MixBuffer *bufin[],
-                                                         gint bufincnt,
+                                                         int bufincnt,
                                                          MixVideoDecodeParams *decode_params);

@@ -630,9 +630,9 @@ The display is either an X11 Pixmap or an X11 Window using the overlay.

mix_video_encode ()

MIX_RESULT          mix_video_encode                    (MixVideo *mix,
                                                          MixBuffer *bufin[],
-                                                         gint bufincnt,
+                                                         int bufincnt,
                                                          MixIOVec *iovout[],
-                                                         gint iovoutcnt,
+                                                         int iovoutcnt,
                                                          MixVideoEncodeParams *encode_params);

@@ -916,7 +916,7 @@ This function releases a frame object that was acquired from

mix_video_get_max_coded_buffer_size ()

MIX_RESULT          mix_video_get_max_coded_buffer_size (MixVideo *mix,
-                                                         guint *bufsize);
+ uint *bufsize);

@@ -938,7 +938,7 @@ This function can only be called once bufsize :

-
diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html index 91ca416..592d5c2 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html @@ -59,50 +59,50 @@ MIX_RESULT MixIOVec **header); MIX_RESULT mix_videoconfigparamsdec_set_mime_type (MixVideoConfigParamsDec *obj, - const gchar *mime_type); + const char *mime_type); MIX_RESULT mix_videoconfigparamsdec_get_mime_type (MixVideoConfigParamsDec *obj, - gchar **mime_type); + char **mime_type); MIX_RESULT mix_videoconfigparamsdec_set_frame_rate (MixVideoConfigParamsDec *obj, - guint frame_rate_num, - guint frame_rate_denom); + uint frame_rate_num, + uint frame_rate_denom); MIX_RESULT mix_videoconfigparamsdec_get_frame_rate (MixVideoConfigParamsDec *obj, - guint *frame_rate_num, - guint *frame_rate_denom); + uint *frame_rate_num, + uint *frame_rate_denom); MIX_RESULT mix_videoconfigparamsdec_set_picture_res (MixVideoConfigParamsDec *obj, - guint picture_width, - guint picture_height); + uint picture_width, + uint picture_height); MIX_RESULT mix_videoconfigparamsdec_get_picture_res (MixVideoConfigParamsDec *obj, - guint *picture_width, - guint *picture_height); + uint *picture_width, + uint *picture_height); MIX_RESULT mix_videoconfigparamsdec_set_raw_format (MixVideoConfigParamsDec *obj, - guint raw_format); + uint raw_format); MIX_RESULT mix_videoconfigparamsdec_get_raw_format (MixVideoConfigParamsDec *obj, - guint *raw_format); + uint *raw_format); MIX_RESULT mix_videoconfigparamsdec_set_rate_control (MixVideoConfigParamsDec *obj, - guint rate_control); + uint rate_control); MIX_RESULT mix_videoconfigparamsdec_get_rate_control (MixVideoConfigParamsDec *obj, - guint *rate_control); + uint *rate_control); MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size (MixVideoConfigParamsDec *obj, - guint bufpoolsize); + uint bufpoolsize); MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size (MixVideoConfigParamsDec *obj, - guint *bufpoolsize); + uint *bufpoolsize); MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation (MixVideoConfigParamsDec *obj, - guint extra_surface_allocation); + uint extra_surface_allocation); MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation (MixVideoConfigParamsDec *obj, - guint *extra_surface_allocation); + uint *extra_surface_allocation);
@@ -140,29 +140,29 @@ A base object of MI-X video decode configuration parameter objects.

GString * mime_type; /* Frame rate numerator value */ - guint frame_rate_num; + uint frame_rate_num; /* Frame rate denominator value */ - guint frame_rate_denom; + uint frame_rate_denom; /* Picture width */ - gulong picture_width; + ulong picture_width; /* Picture height */ - gulong picture_height; + ulong picture_height; /* Render target format */ - guint raw_format; + uint raw_format; /* Rate control: CBR, VBR, none. Only valid for encoding. * This should be set to none for decoding. */ - guint rate_control; + uint rate_control; /* Size of pool of MixBuffers to allocate */ - guint mixbuffer_pool_size; + uint mixbuffer_pool_size; /* Extra surfaces for MixVideoFrame objects to be allocated */ - guint extra_surface_allocation; + uint extra_surface_allocation; /* Reserved for future use */ void *reserved1; @@ -329,7 +329,7 @@ Get stream header information.

Note

-Caller is responsible to g_free (*header)->data field and *header +Caller is responsible to free (*header)->data field and *header
Pointer to guint. + Pointer to uint.
@@ -357,7 +357,7 @@ Caller is responsible to g_free (*header)->data field and *header

mix_videoconfigparamsdec_set_mime_type ()

MIX_RESULT          mix_videoconfigparamsdec_set_mime_type
                                                         (MixVideoConfigParamsDec *obj,
-                                                         const gchar *mime_type);
+ const char *mime_type);

Set stream mime type

@@ -386,13 +386,13 @@ Set stream mime type

mix_videoconfigparamsdec_get_mime_type ()

MIX_RESULT          mix_videoconfigparamsdec_get_mime_type
                                                         (MixVideoConfigParamsDec *obj,
-                                                         gchar **mime_type);
+ char **mime_type);

Get mime type

Note

-Caller is responsible to g_free *mime_type +Caller is responsible to free *mime_type
@@ -404,7 +404,7 @@ Caller is responsible to g_free *mime_type - @@ -420,8 +420,8 @@ Caller is responsible to g_free *mime_type

mix_videoconfigparamsdec_set_frame_rate ()

MIX_RESULT          mix_videoconfigparamsdec_set_frame_rate
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint frame_rate_num,
-                                                         guint frame_rate_denom);
+ uint frame_rate_num, + uint frame_rate_denom);

Set frame rate

mime_type :

Pointer to pointer of type gchar + Pointer to pointer of type char
@@ -455,8 +455,8 @@ Set frame rate

mix_videoconfigparamsdec_get_frame_rate ()

MIX_RESULT          mix_videoconfigparamsdec_get_frame_rate
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint *frame_rate_num,
-                                                         guint *frame_rate_denom);
+ uint *frame_rate_num, + uint *frame_rate_denom);

Get frame rate

@@ -490,8 +490,8 @@ Get frame rate

mix_videoconfigparamsdec_set_picture_res ()

MIX_RESULT          mix_videoconfigparamsdec_set_picture_res
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint picture_width,
-                                                         guint picture_height);
+ uint picture_width, + uint picture_height);

Set video resolution

@@ -525,8 +525,8 @@ Set video resolution

mix_videoconfigparamsdec_get_picture_res ()

MIX_RESULT          mix_videoconfigparamsdec_get_picture_res
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint *picture_width,
-                                                         guint *picture_height);
+ uint *picture_width, + uint *picture_height);

Get video resolution

@@ -560,7 +560,7 @@ Get video resolution

mix_videoconfigparamsdec_set_raw_format ()

MIX_RESULT          mix_videoconfigparamsdec_set_raw_format
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint raw_format);
+ uint raw_format);

Set Render target format

@@ -589,7 +589,7 @@ Set Render target format

mix_videoconfigparamsdec_get_raw_format ()

MIX_RESULT          mix_videoconfigparamsdec_get_raw_format
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint *raw_format);
+ uint *raw_format);

Get Render target format

@@ -618,7 +618,7 @@ Get Render target format

mix_videoconfigparamsdec_set_rate_control ()

MIX_RESULT          mix_videoconfigparamsdec_set_rate_control
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint rate_control);
+ uint rate_control);

Set rate control

@@ -648,7 +648,7 @@ Set rate control

mix_videoconfigparamsdec_get_rate_control ()

MIX_RESULT          mix_videoconfigparamsdec_get_rate_control
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint *rate_control);
+ uint *rate_control);

Get rate control

@@ -677,7 +677,7 @@ Get rate control

mix_videoconfigparamsdec_set_buffer_pool_size ()

MIX_RESULT          mix_videoconfigparamsdec_set_buffer_pool_size
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint bufpoolsize);
+ uint bufpoolsize);

Set buffer pool size

@@ -706,7 +706,7 @@ Set buffer pool size

mix_videoconfigparamsdec_get_buffer_pool_size ()

MIX_RESULT          mix_videoconfigparamsdec_get_buffer_pool_size
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint *bufpoolsize);
+ uint *bufpoolsize);

Get buffer pool size

@@ -735,7 +735,7 @@ Get buffer pool size

mix_videoconfigparamsdec_set_extra_surface_allocation ()

MIX_RESULT          mix_videoconfigparamsdec_set_extra_surface_allocation
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint extra_surface_allocation);
+ uint extra_surface_allocation);

Set extra surface allocation

@@ -764,7 +764,7 @@ Set extra surface allocation

mix_videoconfigparamsdec_get_extra_surface_allocation ()

MIX_RESULT          mix_videoconfigparamsdec_get_extra_surface_allocation
                                                         (MixVideoConfigParamsDec *obj,
-                                                         guint *extra_surface_allocation);
+ uint *extra_surface_allocation);

Get extra surface allocation

diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html index 9ef4860..07bf4e6 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html @@ -49,16 +49,16 @@ #define mix_videoconfigparamsdec_mp42_unref (obj) MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion (MixVideoConfigParamsDecMP42 *obj, - guint version); + uint version); MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion (MixVideoConfigParamsDecMP42 *obj, - guint *version); + uint *version); MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion (MixVideoConfigParamsDecMP42 *obj, - guint version); + uint version); MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion (MixVideoConfigParamsDecMP42 *obj, - guint *version); + uint *version);
@@ -84,10 +84,10 @@ MI-X video MPEG 4:2 decode configuration parameter objects.

/* MPEG version */ - guint mpegversion; + uint mpegversion; /* DivX version */ - guint divxversion; + uint divxversion; /* Reserved for future use */ void *reserved1; @@ -142,7 +142,7 @@ Decrement reference count of the object.

mix_videoconfigparamsdec_mp42_set_mpegversion ()

MIX_RESULT          mix_videoconfigparamsdec_mp42_set_mpegversion
                                                         (MixVideoConfigParamsDecMP42 *obj,
-                                                         guint version);
+ uint version);

Set MPEG version

@@ -171,7 +171,7 @@ Set MPEG version

mix_videoconfigparamsdec_mp42_get_mpegversion ()

MIX_RESULT          mix_videoconfigparamsdec_mp42_get_mpegversion
                                                         (MixVideoConfigParamsDecMP42 *obj,
-                                                         guint *version);
+ uint *version);

Get MPEG version

@@ -200,7 +200,7 @@ Get MPEG version

mix_videoconfigparamsdec_mp42_set_divxversion ()

MIX_RESULT          mix_videoconfigparamsdec_mp42_set_divxversion
                                                         (MixVideoConfigParamsDecMP42 *obj,
-                                                         guint version);
+ uint version);

Get DivX version

@@ -229,7 +229,7 @@ Get DivX version

mix_videoconfigparamsdec_mp42_get_divxversion ()

MIX_RESULT          mix_videoconfigparamsdec_mp42_get_divxversion
                                                         (MixVideoConfigParamsDecMP42 *obj,
-                                                         guint *version);
+ uint *version); diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html index 063ae2b..aec7d28 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html @@ -75,10 +75,10 @@ MI-X video VC-1 decode configuration parameter objects.

/* TODO: wmv_version and fourcc type might be changed later */ /* WMV version */ - guint wmv_version; + uint wmv_version; /* FourCC code */ - guint fourcc; + uint fourcc; /* Reserved for future use */ void *reserved1; diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html index f02e153..b3c9de6 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html @@ -49,26 +49,26 @@ #define mix_videoconfigparamsenc_unref (obj) MIX_RESULT mix_videoconfigparamsenc_set_mime_type (MixVideoConfigParamsEnc *obj, - const gchar *mime_type); + const char *mime_type); MIX_RESULT mix_videoconfigparamsenc_get_mime_type (MixVideoConfigParamsEnc *obj, - gchar **mime_type); + char **mime_type); MIX_RESULT mix_videoconfigparamsenc_set_frame_rate (MixVideoConfigParamsEnc *obj, - guint frame_rate_num, - guint frame_rate_denom); + uint frame_rate_num, + uint frame_rate_denom); MIX_RESULT mix_videoconfigparamsenc_get_frame_rate (MixVideoConfigParamsEnc *obj, - guint *frame_rate_num, - guint *frame_rate_denom); + uint *frame_rate_num, + uint *frame_rate_denom); MIX_RESULT mix_videoconfigparamsenc_set_picture_res (MixVideoConfigParamsEnc *obj, - guint picture_width, - guint picture_height); + uint picture_width, + uint picture_height); MIX_RESULT mix_videoconfigparamsenc_get_picture_res (MixVideoConfigParamsEnc *obj, - guint *picture_width, - guint *picture_height); + uint *picture_width, + uint *picture_height); MIX_RESULT mix_videoconfigparamsenc_set_encode_format (MixVideoConfigParamsEnc *obj, MixEncodeTargetFormat encode_format); @@ -77,58 +77,58 @@ MIX_RESULT MixEncodeTargetFormat *encode_format); MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc *obj, - guint bps); + uint bps); MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc *obj, - guint *bps); + uint *bps); MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc *obj, - guint initial_qp); + uint initial_qp); MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc *obj, - guint *initial_qp); + uint *initial_qp); MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc *obj, - guint min_qp); + uint min_qp); MIX_RESULT mix_videoconfigparamsenc_get_min_qp (MixVideoConfigParamsEnc *obj, - guint *min_qp); + uint *min_qp); MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc *obj, - guint intra_period); + uint intra_period); MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc *obj, - guint *intra_period); + uint *intra_period); MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size (MixVideoConfigParamsEnc *obj, - guint bufpoolsize); + uint bufpoolsize); MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size (MixVideoConfigParamsEnc *obj, - guint *bufpoolsize); + uint *bufpoolsize); MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc *obj, - gboolean share_buf_mod); + bool share_buf_mod); MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode (MixVideoConfigParamsEnc *obj, - gboolean *share_buf_mod); + bool *share_buf_mod); MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info (MixVideoConfigParamsEnc *obj, - gulong *ci_frame_id, - guint ci_frame_num); + ulong *ci_frame_id, + uint ci_frame_num); MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc *obj, - gulong * *ci_frame_id, - guint *ci_frame_num); + ulong * *ci_frame_id, + uint *ci_frame_num); MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc *obj, - gulong draw); + ulong draw); MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc *obj, - gulong *draw); + ulong *draw); MIX_RESULT mix_videoconfigparamsenc_set_need_display (MixVideoConfigParamsEnc *obj, - gboolean need_display); + bool need_display); MIX_RESULT mix_videoconfigparamsenc_get_need_display (MixVideoConfigParamsEnc *obj, - gboolean *need_display); + bool *need_display); MIX_RESULT mix_videoconfigparamsenc_set_rate_control (MixVideoConfigParamsEnc *obj, MixRateControl rcmode); @@ -185,28 +185,28 @@ A base object of MI-X video encode configuration parameter objects.

MixRateControl rate_control; /* Bitrate when rate control is used */ - guint bitrate; + uint bitrate; /* Numerator of frame rate */ - guint frame_rate_num; + uint frame_rate_num; /* Denominator of frame rate */ - guint frame_rate_denom; + uint frame_rate_denom; /* The initial QP value */ - guint initial_qp; + uint initial_qp; /* The minimum QP value */ - guint min_qp; + uint min_qp; /* Number of frames between key frames (GOP size) */ - guint intra_period; + uint intra_period; /* Width of video frame */ - guint16 picture_width; + uint16 picture_width; /* Height of the video frame */ - guint16 picture_height; + uint16 picture_height; /* Mime type, reserved */ GString * mime_type; @@ -215,21 +215,21 @@ A base object of MI-X video encode configuration parameter objects.

MixEncodeTargetFormat encode_format; /* Size of the pool of MixBuffer objects */ - guint mixbuffer_pool_size; + uint mixbuffer_pool_size; /* Are buffers shared between capture and encoding drivers */ - gboolean share_buf_mode; + bool share_buf_mode; /* Array of frame IDs created by capture library */ - gulong * ci_frame_id; + ulong * ci_frame_id; /* Size of the array ci_frame_id */ - guint ci_frame_num; + uint ci_frame_num; /* Indicates whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() */ - gboolean need_display; + bool need_display; /* Reserved for future use */ void *reserved1; @@ -305,7 +305,7 @@ Decrement reference count of the object.

mix_videoconfigparamsenc_set_mime_type ()

MIX_RESULT          mix_videoconfigparamsenc_set_mime_type
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         const gchar *mime_type);
+ const char *mime_type);

Set mime type

@@ -334,7 +334,7 @@ Set mime type

mix_videoconfigparamsenc_get_mime_type ()

MIX_RESULT          mix_videoconfigparamsenc_get_mime_type
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gchar **mime_type);
+ char **mime_type);

Get mime type

@@ -342,7 +342,7 @@ Get mime type

Note

-Caller is responsible to g_free *mime_type +Caller is responsible to free *mime_type
@@ -370,8 +370,8 @@ Caller is responsible to g_free *mime_type

mix_videoconfigparamsenc_set_frame_rate ()

MIX_RESULT          mix_videoconfigparamsenc_set_frame_rate
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint frame_rate_num,
-                                                         guint frame_rate_denom);
+ uint frame_rate_num, + uint frame_rate_denom);

Set frame rate

@@ -405,8 +405,8 @@ Set frame rate

mix_videoconfigparamsenc_get_frame_rate ()

MIX_RESULT          mix_videoconfigparamsenc_get_frame_rate
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint *frame_rate_num,
-                                                         guint *frame_rate_denom);
+ uint *frame_rate_num, + uint *frame_rate_denom);

Get frame rate

@@ -440,8 +440,8 @@ Get frame rate

mix_videoconfigparamsenc_set_picture_res ()

MIX_RESULT          mix_videoconfigparamsenc_set_picture_res
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint picture_width,
-                                                         guint picture_height);
+ uint picture_width, + uint picture_height);

Set width and height of video frame

@@ -475,8 +475,8 @@ Set width and height of video frame

mix_videoconfigparamsenc_get_picture_res ()

MIX_RESULT          mix_videoconfigparamsenc_get_picture_res
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint *picture_width,
-                                                         guint *picture_height);
+ uint *picture_width, + uint *picture_height);

Get width and height of video frame

@@ -568,7 +568,7 @@ Get Encode target format

mix_videoconfigparamsenc_set_bit_rate ()

MIX_RESULT          mix_videoconfigparamsenc_set_bit_rate
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint bps);
+ uint bps);

Set bitrate

@@ -597,7 +597,7 @@ Set bitrate

mix_videoconfigparamsenc_get_bit_rate ()

MIX_RESULT          mix_videoconfigparamsenc_get_bit_rate
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint *bps);
+ uint *bps);

Get bitrate

@@ -626,7 +626,7 @@ Get bitrate

mix_videoconfigparamsenc_set_init_qp ()

MIX_RESULT          mix_videoconfigparamsenc_set_init_qp
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint initial_qp);
+ uint initial_qp);

Set The initial QP value

@@ -655,7 +655,7 @@ Set The initial QP value

mix_videoconfigparamsenc_get_init_qp ()

MIX_RESULT          mix_videoconfigparamsenc_get_init_qp
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint *initial_qp);
+ uint *initial_qp);

Get The initial QP value

@@ -683,7 +683,7 @@ Get The initial QP value

mix_videoconfigparamsenc_set_min_qp ()

MIX_RESULT          mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc *obj,
-                                                         guint min_qp);
+ uint min_qp);

Set The minimum QP value

@@ -711,7 +711,7 @@ Set The minimum QP value

mix_videoconfigparamsenc_get_min_qp ()

MIX_RESULT          mix_videoconfigparamsenc_get_min_qp (MixVideoConfigParamsEnc *obj,
-                                                         guint *min_qp);
+ uint *min_qp);

Get The minimum QP value

@@ -740,7 +740,7 @@ Get The minimum QP value

mix_videoconfigparamsenc_set_intra_period ()

MIX_RESULT          mix_videoconfigparamsenc_set_intra_period
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint intra_period);
+ uint intra_period);

Set Number of frames between key frames (GOP size)

@@ -769,7 +769,7 @@ Set Number of frames between key frames (GOP size)

mix_videoconfigparamsenc_get_intra_period ()

MIX_RESULT          mix_videoconfigparamsenc_get_intra_period
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint *intra_period);
+ uint *intra_period);

Get Number of frames between key frames (GOP size)

@@ -798,7 +798,7 @@ Get Number of frames between key frames (GOP size)

mix_videoconfigparamsenc_set_buffer_pool_size ()

MIX_RESULT          mix_videoconfigparamsenc_set_buffer_pool_size
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint bufpoolsize);
+ uint bufpoolsize);

Get Size of the pool of MixBuffer objects

@@ -827,14 +827,14 @@ Get Size of the pool of

mix_videoconfigparamsenc_get_buffer_pool_size ()

MIX_RESULT          mix_videoconfigparamsenc_get_buffer_pool_size
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         guint *bufpoolsize);
+ uint *bufpoolsize);

mix_videoconfigparamsenc_set_share_buf_mode ()

MIX_RESULT          mix_videoconfigparamsenc_set_share_buf_mode
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gboolean share_buf_mod);
+ bool share_buf_mod);

Set the flag that indicates whether buffers are shared between capture and encoding drivers or not

@@ -864,7 +864,7 @@ Set the flag that indicates whether buffers are shared between capture and encod

mix_videoconfigparamsenc_get_share_buf_mode ()

MIX_RESULT          mix_videoconfigparamsenc_get_share_buf_mode
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gboolean *share_buf_mod);
+ bool *share_buf_mod);

Get the flag that indicates whether buffers are shared between capture and encoding drivers or not

@@ -894,8 +894,8 @@ Get the flag that indicates whether buffers are shared between capture and encod

mix_videoconfigparamsenc_set_ci_frame_info ()

MIX_RESULT          mix_videoconfigparamsenc_set_ci_frame_info
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gulong *ci_frame_id,
-                                                         guint ci_frame_num);
+ ulong *ci_frame_id, + uint ci_frame_num);

Set CI frame information

@@ -929,14 +929,14 @@ Set CI frame information

mix_videoconfigparamsenc_get_ci_frame_info ()

MIX_RESULT          mix_videoconfigparamsenc_get_ci_frame_info
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gulong * *ci_frame_id,
-                                                         guint *ci_frame_num);
+ ulong * *ci_frame_id, + uint *ci_frame_num);

Get CI frame information

Note

-Caller is responsible to g_free *ci_frame_id +Caller is responsible to free *ci_frame_id
@@ -969,7 +969,7 @@ Caller is responsible to g_free *ci_frame_id

mix_videoconfigparamsenc_set_drawable ()

MIX_RESULT          mix_videoconfigparamsenc_set_drawable
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gulong draw);
+ ulong draw);

Set drawable

@@ -998,7 +998,7 @@ Set drawable

mix_videoconfigparamsenc_get_drawable ()

MIX_RESULT          mix_videoconfigparamsenc_get_drawable
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gulong *draw);
+ ulong *draw);

Get drawable

@@ -1027,7 +1027,7 @@ Get drawable

mix_videoconfigparamsenc_set_need_display ()

MIX_RESULT          mix_videoconfigparamsenc_set_need_display
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gboolean need_display);
+ bool need_display);

Set the flag used to indicate whether MixVideoFrames suitable for displaying need to be enqueued for retrieval using mix_video_get_frame()

@@ -1058,7 +1058,7 @@ need to be enqueued for retrieval using

mix_videoconfigparamsenc_get_need_display ()

MIX_RESULT          mix_videoconfigparamsenc_get_need_display
                                                         (MixVideoConfigParamsEnc *obj,
-                                                         gboolean *need_display);
+ bool *need_display);

Get the flag used to indicate whether MixVideoFrames suitable for displaying need to be enqueued for retrieval using mix_video_get_frame()

diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html index 524f115..fd6d7d7 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html @@ -49,22 +49,22 @@ #define mix_videoconfigparamsenc_h264_unref (obj) MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 *obj, - guint basic_unit_size); + uint basic_unit_size); MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 *obj, - guint *basic_unit_size); + uint *basic_unit_size); MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 *obj, - guint disable_deblocking_filter_idc); + uint disable_deblocking_filter_idc); MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 *obj, - guint *disable_deblocking_filter_idc); + uint *disable_deblocking_filter_idc); MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num (MixVideoConfigParamsEncH264 *obj, - guint slice_num); + uint slice_num); MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num (MixVideoConfigParamsEncH264 *obj, - guint *slice_num); + uint *slice_num); MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 *obj, MixDelimiterType delimiter_type); @@ -98,13 +98,13 @@ MI-X video H.264 eecode configuration parameter objects.

/* TODO: Add H.264 configuration paramters */ /* The basic unit size used by rate control */ - guint basic_unit_size; + uint basic_unit_size; /* Number of slices in one frame */ - guint slice_num; + uint slice_num; /* enable/disable deblocking */ - guint8 disable_deblocking_filter_idc; + uint8 disable_deblocking_filter_idc; /* delimiter_type */ MixDelimiterType delimiter_type; @@ -162,7 +162,7 @@ Decrement reference count of the object.

mix_videoconfigparamsenc_h264_set_bus ()

MIX_RESULT          mix_videoconfigparamsenc_h264_set_bus
                                                         (MixVideoConfigParamsEncH264 *obj,
-                                                         guint basic_unit_size);
+ uint basic_unit_size);

Set The basic unit size used by rate control

@@ -191,7 +191,7 @@ Set The basic unit size used by rate control

mix_videoconfigparamsenc_h264_get_bus ()

MIX_RESULT          mix_videoconfigparamsenc_h264_get_bus
                                                         (MixVideoConfigParamsEncH264 *obj,
-                                                         guint *basic_unit_size);
+ uint *basic_unit_size);

Get The basic unit size used by rate control

@@ -220,7 +220,7 @@ Get The basic unit size used by rate control

mix_videoconfigparamsenc_h264_set_dlk ()

MIX_RESULT          mix_videoconfigparamsenc_h264_set_dlk
                                                         (MixVideoConfigParamsEncH264 *obj,
-                                                         guint disable_deblocking_filter_idc);
+ uint disable_deblocking_filter_idc);

Set the The flag to enable/disable deblocking

@@ -249,7 +249,7 @@ Set the The flag to enable/disable deblocking

mix_videoconfigparamsenc_h264_get_dlk ()

MIX_RESULT          mix_videoconfigparamsenc_h264_get_dlk
                                                         (MixVideoConfigParamsEncH264 *obj,
-                                                         guint *disable_deblocking_filter_idc);
+ uint *disable_deblocking_filter_idc);

Get the The flag to enable/disable deblocking

@@ -278,7 +278,7 @@ Get the The flag to enable/disable deblocking

mix_videoconfigparamsenc_h264_set_slice_num ()

MIX_RESULT          mix_videoconfigparamsenc_h264_set_slice_num
                                                         (MixVideoConfigParamsEncH264 *obj,
-                                                         guint slice_num);
+ uint slice_num);

Set the Number of slices in one frame

@@ -307,7 +307,7 @@ Set the Number of slices in one frame

mix_videoconfigparamsenc_h264_get_slice_num ()

MIX_RESULT          mix_videoconfigparamsenc_h264_get_slice_num
                                                         (MixVideoConfigParamsEncH264 *obj,
-                                                         guint *slice_num);
+ uint *slice_num);

Get the Number of slices in one frame

diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html index 25a310e..706a209 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html @@ -49,22 +49,22 @@ #define mix_videoconfigparamsenc_mpeg4_unref(obj) MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 *obj, - guint disable_deblocking_filter_idc); + uint disable_deblocking_filter_idc); MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 *obj, - guint *disable_deblocking_filter_idc); + uint *disable_deblocking_filter_idc); MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 *obj, - guchar profile_and_level_indication); + uchar profile_and_level_indication); MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 *obj, - guchar *profile_and_level_indication); + uchar *profile_and_level_indication); MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 *obj, - guint fixed_vop_time_increment); + uint fixed_vop_time_increment); MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 *obj, - guint *fixed_vop_time_increment); + uint *fixed_vop_time_increment);
@@ -95,16 +95,16 @@ MI-X video MPEG 4:2 eecode configuration parameter objects.

* Default value is 3. * Can be ignored (refer to encoding * specification for more info). */ - guchar profile_and_level_indication; + uchar profile_and_level_indication; /* Number of ticks between two successive VOPs * in display order. Default value is 3. * Can be ignored (refer to encoding specification * for more info) */ - guint fixed_vop_time_increment; + uint fixed_vop_time_increment; /* enable/disable deblocking */ - guint disable_deblocking_filter_idc; + uint disable_deblocking_filter_idc; /* Reserved for future use */ void *reserved1; @@ -159,7 +159,7 @@ Decrement reference count of the object.

mix_videoconfigparamsenc_mpeg4_set_dlk ()

MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_dlk
                                                         (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         guint disable_deblocking_filter_idc);
+ uint disable_deblocking_filter_idc);

Set the The flag to enable/disable deblocking

@@ -188,7 +188,7 @@ Set the The flag to enable/disable deblocking

mix_videoconfigparamsenc_mpeg4_get_dlk ()

MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_dlk
                                                         (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         guint *disable_deblocking_filter_idc);
+ uint *disable_deblocking_filter_idc);

Get the The flag to enable/disable deblocking

@@ -217,7 +217,7 @@ Get the The flag to enable/disable deblocking

mix_videoconfigparamsenc_mpeg4_set_profile_level ()

MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_profile_level
                                                         (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         guchar profile_and_level_indication);
+ uchar profile_and_level_indication);

Set profile_and_level_indication

@@ -248,7 +248,7 @@ Set profile_and_level_indication

mix_videoconfigparamsenc_mpeg4_get_profile_level ()

MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_profile_level
                                                         (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         guchar *profile_and_level_indication);
+ uchar *profile_and_level_indication);

Set fixed_vop_time_increment

@@ -279,14 +279,14 @@ Set fixed_vop_time_increment

mix_videoconfigparamsenc_mpeg4_set_fixed_vti ()

MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_fixed_vti
                                                         (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         guint fixed_vop_time_increment);
+ uint fixed_vop_time_increment);

mix_videoconfigparamsenc_mpeg4_get_fixed_vti ()

MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_fixed_vti
                                                         (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         guint *fixed_vop_time_increment);
+ uint *fixed_vop_time_increment);

Get fixed_vop_time_increment

diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html index d6b8394..b86c19f 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html @@ -48,15 +48,15 @@ MixVideoDecodeParams * mix_videodecodeparams_ref (MixVideoDecodeParams *mix); #define mix_videodecodeparams_unref (obj) MIX_RESULT mix_videodecodeparams_set_timestamp (MixVideoDecodeParams *obj, - guint64 timestamp); + uint64 timestamp); MIX_RESULT mix_videodecodeparams_get_timestamp (MixVideoDecodeParams *obj, - guint64 *timestamp); + uint64 *timestamp); MIX_RESULT mix_videodecodeparams_set_discontinuity (MixVideoDecodeParams *obj, - gboolean discontinuity); + bool discontinuity); MIX_RESULT mix_videodecodeparams_get_discontinuity (MixVideoDecodeParams *obj, - gboolean *discontinuity); + bool *discontinuity);
@@ -84,10 +84,10 @@ and provided to MixVideo in the MixVideo

mix_videodecodeparams_set_timestamp ()

MIX_RESULT          mix_videodecodeparams_set_timestamp (MixVideoDecodeParams *obj,
-                                                         guint64 timestamp);
+ uint64 timestamp);

Set Presentation timestamp

@@ -190,7 +190,7 @@ Set Presentation timestamp

mix_videodecodeparams_get_timestamp ()

MIX_RESULT          mix_videodecodeparams_get_timestamp (MixVideoDecodeParams *obj,
-                                                         guint64 *timestamp);
+ uint64 *timestamp);

Get Presentation timestamp

@@ -219,7 +219,7 @@ Get Presentation timestamp

mix_videodecodeparams_set_discontinuity ()

MIX_RESULT          mix_videodecodeparams_set_discontinuity
                                                         (MixVideoDecodeParams *obj,
-                                                         gboolean discontinuity);
+ bool discontinuity);

Set discontinuity flag

@@ -248,7 +248,7 @@ Set discontinuity flag

mix_videodecodeparams_get_discontinuity ()

MIX_RESULT          mix_videodecodeparams_get_discontinuity
                                                         (MixVideoDecodeParams *obj,
-                                                         gboolean *discontinuity);
+ bool *discontinuity);

Get discontinuity flag

diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html b/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html index 5147c84..767321d 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html @@ -48,21 +48,21 @@ MixVideoFrame * mix_videoframe_ref (MixVideoFrame *obj); void mix_videoframe_unref (MixVideoFrame *obj); MIX_RESULT mix_videoframe_set_frame_id (MixVideoFrame *obj, - gulong frame_id); + ulong frame_id); MIX_RESULT mix_videoframe_get_frame_id (MixVideoFrame *obj, - gulong *frame_id); + ulong *frame_id); MIX_RESULT mix_videoframe_set_ci_frame_idx (MixVideoFrame *obj, - guint ci_frame_idx); + uint ci_frame_idx); MIX_RESULT mix_videoframe_get_ci_frame_idx (MixVideoFrame *obj, - guint *ci_frame_idx); + uint *ci_frame_idx); MIX_RESULT mix_videoframe_set_timestamp (MixVideoFrame *obj, - guint64 timestamp); + uint64 timestamp); MIX_RESULT mix_videoframe_get_timestamp (MixVideoFrame *obj, - guint64 *timestamp); + uint64 *timestamp); MIX_RESULT mix_videoframe_set_discontinuity (MixVideoFrame *obj, - gboolean discontinuity); + bool discontinuity); MIX_RESULT mix_videoframe_get_discontinuity (MixVideoFrame *obj, - gboolean *discontinuity); + bool *discontinuity);
@@ -104,23 +104,23 @@ needs to display/re-display this frame. /* ID associated with the decoded frame */ - gulong frame_id; + ulong frame_id; /* ID associated with the CI frame * (used for encode only) */ - guint ci_frame_idx; + uint ci_frame_idx; /* 64 bit timestamp. For decode, * this is preserved from the corresponding * MixVideoDecodeParams field. For encode, * this is created during encoding. */ - guint64 timestamp; + uint64 timestamp; /* Flag indicating whether there * is a discontinuity. For decode, * this is preserved from the corresponding * MixVideoDecodeParams field. */ - gboolean discontinuity; + bool discontinuity; /* Reserved for future use */ void *reserved1; @@ -194,7 +194,7 @@ Decrement reference count of the object.

mix_videoframe_set_frame_id ()

MIX_RESULT          mix_videoframe_set_frame_id         (MixVideoFrame *obj,
-                                                         gulong frame_id);
+ ulong frame_id);

Set Frame ID

@@ -222,7 +222,7 @@ Set Frame ID

mix_videoframe_get_frame_id ()

MIX_RESULT          mix_videoframe_get_frame_id         (MixVideoFrame *obj,
-                                                         gulong *frame_id);
+ ulong *frame_id);

Get Frame ID

@@ -250,7 +250,7 @@ Get Frame ID

mix_videoframe_set_ci_frame_idx ()

MIX_RESULT          mix_videoframe_set_ci_frame_idx     (MixVideoFrame *obj,
-                                                         guint ci_frame_idx);
+ uint ci_frame_idx);

Set CI Frame ID

@@ -278,7 +278,7 @@ Set CI Frame ID

mix_videoframe_get_ci_frame_idx ()

MIX_RESULT          mix_videoframe_get_ci_frame_idx     (MixVideoFrame *obj,
-                                                         guint *ci_frame_idx);
+ uint *ci_frame_idx);

Get CI Frame ID

@@ -306,7 +306,7 @@ Get CI Frame ID

mix_videoframe_set_timestamp ()

MIX_RESULT          mix_videoframe_set_timestamp        (MixVideoFrame *obj,
-                                                         guint64 timestamp);
+ uint64 timestamp);

Set Frame timestamp

@@ -334,7 +334,7 @@ Set Frame timestamp

mix_videoframe_get_timestamp ()

MIX_RESULT          mix_videoframe_get_timestamp        (MixVideoFrame *obj,
-                                                         guint64 *timestamp);
+ uint64 *timestamp);

Get Frame timestamp

@@ -362,7 +362,7 @@ Get Frame timestamp

mix_videoframe_set_discontinuity ()

MIX_RESULT          mix_videoframe_set_discontinuity    (MixVideoFrame *obj,
-                                                         gboolean discontinuity);
+ bool discontinuity);

Get discontinuity flag

@@ -390,7 +390,7 @@ Get discontinuity flag

mix_videoframe_get_discontinuity ()

MIX_RESULT          mix_videoframe_get_discontinuity    (MixVideoFrame *obj,
-                                                         gboolean *discontinuity);
+ bool *discontinuity);

Get discontinuity flag

diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html index 6d998bf..ff03b01 100644 --- a/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html +++ b/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html @@ -62,11 +62,11 @@ MIX_RESULT mix_videorenderparams_set_clipping_rects (MixVideoRenderParams *obj, MixRect *clipping_rects, - guint number_of_clipping_rects); + uint number_of_clipping_rects); MIX_RESULT mix_videorenderparams_get_clipping_rects (MixVideoRenderParams *obj, MixRect **clipping_rects, - guint *number_of_clipping_rects); + uint *number_of_clipping_rects);
@@ -109,25 +109,25 @@ and provided to

@@ -338,7 +338,7 @@ Get destination rectangle

MIX_RESULT          mix_videorenderparams_set_clipping_rects
                                                         (MixVideoRenderParams *obj,
                                                          MixRect *clipping_rects,
-                                                         guint number_of_clipping_rects);
+ uint number_of_clipping_rects);

Set clipping rectangles

@@ -373,7 +373,7 @@ Set clipping rectangles

MIX_RESULT          mix_videorenderparams_get_clipping_rects
                                                         (MixVideoRenderParams *obj,
                                                          MixRect **clipping_rects,
-                                                         guint *number_of_clipping_rects);
+ uint *number_of_clipping_rects);

Get clipping rectangles

diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index f003dc1..39c057d 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -11,24 +11,32 @@ LOCAL_SRC_FILES := \ mixdisplayandroid.cpp \ mixframemanager.cpp \ mixsurfacepool.cpp \ - mixvideo.cpp \ mixvideocaps.cpp \ mixvideoconfigparams.cpp \ mixvideoconfigparamsdec.cpp \ mixvideoconfigparamsdec_h264.cpp \ mixvideoconfigparamsdec_mp42.cpp \ mixvideoconfigparamsdec_vc1.cpp \ + mixvideoconfigparamsenc.cpp \ + mixvideoconfigparamsenc_mpeg4.cpp \ + mixvideoconfigparamsenc_h264.cpp \ + mixvideoconfigparamsenc_h263.cpp \ + mixvideoconfigparamsenc_preview.cpp \ mixvideodecodeparams.cpp \ + mixvideoencodeparams.cpp \ mixvideoformat.cpp \ mixvideoformat_h264.cpp \ mixvideoformat_mp42.cpp \ mixvideoformat_vc1.cpp \ + mixvideoformatenc.cpp \ + mixvideoformatenc_h264.cpp \ + mixvideoformatenc_h263.cpp \ + mixvideoformatenc_mpeg4.cpp \ + mixvideoformatenc_preview.cpp \ mixvideoframe.cpp \ mixvideoinitparams.cpp \ mixvideorenderparams.cpp \ - mixvideoconfigparamsenc.cpp \ - mixvideoconfigparamsenc_h264.cpp \ - mixvideoconfigparamsenc_h263.cpp + mixvideo.cpp LOCAL_CFLAGS := \ -DMIXVIDEO_AGE=1 \ @@ -39,9 +47,6 @@ LOCAL_CFLAGS := \ LOCAL_C_INCLUDES := \ $(LOCAL_PATH) \ - $(GLIB_TOP) \ - $(GLIB_TOP)/android \ - $(GLIB_TOP)/glib \ $(TARGET_OUT_HEADERS)/libmixcommon \ $(TARGET_OUT_HEADERS)/libmixvbp \ $(TARGET_OUT_HEADERS)/libva @@ -50,7 +55,6 @@ LOCAL_LDLIBS += -lpthread LOCAL_SHARED_LIBRARIES := \ libcutils \ - libglib-2.0 \ libmixcommon \ libmixvbp \ libva \ @@ -62,7 +66,7 @@ LOCAL_CFLAGS += -DANDROID \ -DMIXVIDEO_ENCODE_ENABLE=0 ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) -LOCAL_CFLAGS += +LOCAL_CFLAGS += -DMIX_LOG_ENABLE LOCAL_SHARED_LIBRARIES += liblog endif diff --git a/mix_video/src/mixbuffer.cpp b/mix_video/src/mixbuffer.cpp index b1c0ca7..90fdf02 100644 --- a/mix_video/src/mixbuffer.cpp +++ b/mix_video/src/mixbuffer.cpp @@ -1,4 +1,4 @@ -/* +/* INTEL CONFIDENTIAL Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. @@ -33,17 +33,17 @@ #include "mixbuffer.h" #include "mixbuffer_private.h" -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } +#define SAFE_FREE(p) if(p) { free(p); p = NULL; } MixBuffer::MixBuffer() - :data(NULL) - ,size(0) - ,token(0) - ,callback(NULL) - ,pool(NULL) { + :data(NULL) + ,size(0) + ,token(0) + ,callback(NULL) + ,pool(NULL) { } -MixBuffer::~MixBuffer(){ +MixBuffer::~MixBuffer() { } /** @@ -54,14 +54,14 @@ MixBuffer::~MixBuffer(){ * Copy duplicate of the object. */ MixParams * MixBuffer::dup() const { - MixParams *ret = new MixBuffer(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixBuffer(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } /** @@ -71,74 +71,74 @@ MixParams * MixBuffer::dup() const { * * Copy instance data from @src to @target. */ -gboolean MixBuffer::copy(MixParams * target) const { - gboolean ret = FALSE; - MixBuffer * this_target = MIX_BUFFER(target); - if (NULL != this_target) { - this_target->data = data; - this_target->size = size; - this_target->token = token; - this_target->callback = callback; - ret = MixParams::copy(target); - } - return ret; +bool MixBuffer::copy(MixParams * target) const { + bool ret = FALSE; + MixBuffer * this_target = MIX_BUFFER(target); + if (NULL != this_target) { + this_target->data = data; + this_target->size = size; + this_target->token = token; + this_target->callback = callback; + ret = MixParams::copy(target); + } + return ret; } -gboolean MixBuffer::equal(MixParams * obj) const { - gboolean ret = FALSE; - MixBuffer * this_obj = MIX_BUFFER(obj); - if (NULL != this_obj) { - if (this_obj->data == data && - this_obj->size == size && - this_obj->token == token && - this_obj->callback == callback) { - ret = MixParams::equal(this_obj); - } - } - return ret; +bool MixBuffer::equal(MixParams * obj) const { + bool ret = FALSE; + MixBuffer * this_obj = MIX_BUFFER(obj); + if (NULL != this_obj) { + if (this_obj->data == data && + this_obj->size == size && + this_obj->token == token && + this_obj->callback == callback) { + ret = MixParams::equal(this_obj); + } + } + return ret; } MixBuffer * mix_buffer_new(void) { - return new MixBuffer(); + return new MixBuffer(); } MixBuffer * mix_buffer_ref(MixBuffer * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } MIX_RESULT mix_buffer_set_data( - MixBuffer * obj, guchar *data, guint size, - gulong token, MixBufferCallback callback) { - obj->data = data; - obj->size = size; - obj->token = token; - obj->callback = callback; - return MIX_RESULT_SUCCESS; + MixBuffer * obj, uchar *data, uint size, + ulong token, MixBufferCallback callback) { + obj->data = data; + obj->size = size; + obj->token = token; + obj->callback = callback; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_buffer_set_pool(MixBuffer *obj, MixBufferPool *pool) { - obj->pool = pool; - return MIX_RESULT_SUCCESS; + obj->pool = pool; + return MIX_RESULT_SUCCESS; } void mix_buffer_unref(MixBuffer * obj) { - - if (NULL != obj) { - gint newRefcount = obj->GetRefCount() - 1; - LOG_I( "after unref, refcount = %d\n", newRefcount); - // Unref through base class - obj->Unref(); - if (1 == newRefcount) { - g_return_if_fail(obj->pool != NULL); - if (obj->callback) { - obj->callback(obj->token, obj->data); - } - mix_bufferpool_put(obj->pool, obj); - } - } + + if (NULL != obj) { + int newRefcount = obj->GetRefCount() - 1; + LOG_I( "after unref, refcount = %d\n", newRefcount); + // Unref through base class + obj->Unref(); + if (1 == newRefcount) { + return_if_fail(obj->pool != NULL); + if (obj->callback) { + obj->callback(obj->token, obj->data); + } + mix_bufferpool_put(obj->pool, obj); + } + } } diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h index aeef6a1..af23aaf 100644 --- a/mix_video/src/mixbuffer.h +++ b/mix_video/src/mixbuffer.h @@ -1,4 +1,4 @@ -/* +/* INTEL CONFIDENTIAL Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. @@ -26,7 +26,7 @@ */ #define MIX_IS_BUFFER(obj) (NULL != MIX_BUFFER(obj)) -typedef void (*MixBufferCallback)(gulong token, guchar *data); +typedef void (*MixBufferCallback)(ulong token, uchar *data); class MixBufferPool; @@ -37,32 +37,32 @@ class MixBufferPool; */ class MixBuffer : public MixParams { public: - MixBuffer(); - virtual ~MixBuffer(); - virtual gboolean copy(MixParams* target) const; - virtual MixParams* dup() const; - virtual gboolean equal(MixParams* obj) const; + MixBuffer(); + virtual ~MixBuffer(); + virtual bool copy(MixParams* target) const; + virtual MixParams* dup() const; + virtual bool equal(MixParams* obj) const; public: - /* Pointer to coded data buffer */ - guchar *data; - - /* Size of coded data buffer */ - guint size; - - /* Token that will be passed to - * the callback function. Can be - * used by the application for - * any information to be associated - * with this coded data buffer, - * such as a pointer to a structure - * belonging to the application. */ - gulong token; - - /* callback function pointer */ - MixBufferCallback callback; - - /* < private > */ - MixBufferPool *pool; + /* Pointer to coded data buffer */ + uchar *data; + + /* Size of coded data buffer */ + uint size; + + /* Token that will be passed to + * the callback function. Can be + * used by the application for + * any information to be associated + * with this coded data buffer, + * such as a pointer to a structure + * belonging to the application. */ + ulong token; + + /* callback function pointer */ + MixBufferCallback callback; + + /* < private > */ + MixBufferPool *pool; }; /** @@ -94,15 +94,15 @@ void mix_buffer_unref(MixBuffer * mix); /** * mix_buffer_set_data: * @obj: #MixBuffer object - * @data: data buffer - * @size: data buffer size - * @token: token - * @callback: callback function pointer + * @data: data buffer + * @size: data buffer size + * @token: token + * @callback: callback function pointer * @returns: Common Video Error Return Codes * - * Set data buffer, size, token and callback function + * Set data buffer, size, token and callback function */ -MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size, - gulong token, MixBufferCallback callback); +MIX_RESULT mix_buffer_set_data(MixBuffer * obj, uchar *data, uint size, + ulong token, MixBufferCallback callback); #endif /* __MIX_BUFFER_H__ */ diff --git a/mix_video/src/mixbuffer_private.h b/mix_video/src/mixbuffer_private.h index 81b7f9d..6ade94d 100644 --- a/mix_video/src/mixbuffer_private.h +++ b/mix_video/src/mixbuffer_private.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -21,8 +21,8 @@ typedef struct _MixBufferPrivate MixBufferPrivate; struct _MixBufferPrivate { - /*< private > */ - MixBufferPool *pool; + /*< private > */ + MixBufferPool *pool; }; #endif diff --git a/mix_video/src/mixbufferpool.cpp b/mix_video/src/mixbufferpool.cpp index 044fddc..47afa55 100644 --- a/mix_video/src/mixbufferpool.cpp +++ b/mix_video/src/mixbufferpool.cpp @@ -1,4 +1,4 @@ -/* +/* INTEL CONFIDENTIAL Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. @@ -17,45 +17,45 @@ #include "mixbufferpool.h" #include "mixbuffer_private.h" -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } - -MixBufferPool::MixBufferPool() - :free_list(NULL) - ,in_use_list(NULL) - ,free_list_max_size(0) - ,high_water_mark(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,mLock() { +#define SAFE_FREE(p) if(p) { free(p); p = NULL; } + +MixBufferPool::MixBufferPool() + :free_list(NULL) + ,in_use_list(NULL) + ,free_list_max_size(0) + ,high_water_mark(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,mLock() { } -MixBufferPool::~MixBufferPool(){ +MixBufferPool::~MixBufferPool() { } MixBufferPool * mix_bufferpool_new(void) { - return new MixBufferPool(); + return new MixBufferPool(); } MixBufferPool * mix_bufferpool_ref(MixBufferPool * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } MixParams * MixBufferPool::dup() const { - MixBufferPool * ret = new MixBufferPool(); - MixBufferPool * this_obj = const_cast(this); - if (NULL != ret) { - this_obj->Lock(); - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - this_obj->Unlock(); - } - return ret; + MixBufferPool * ret = new MixBufferPool(); + MixBufferPool * this_obj = const_cast(this); + if (NULL != ret) { + this_obj->Lock(); + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + this_obj->Unlock(); + } + return ret; } @@ -68,41 +68,41 @@ MixParams * MixBufferPool::dup() const { * Copy instance data from @src to @target. */ -gboolean MixBufferPool::copy(MixParams * target) const { - gboolean ret = FALSE; - MixBufferPool * this_target = MIX_BUFFERPOOL(target); - MixBufferPool * this_obj = const_cast(this); - if (NULL != this_target) { - this_obj->Lock(); - this_target->Lock(); - this_target->free_list = free_list; - this_target->in_use_list = in_use_list; - this_target->free_list_max_size = free_list_max_size; - this_target->high_water_mark = high_water_mark; - ret = MixParams::copy(target); - this_target->Unlock(); - this_obj->Unlock(); - } - return ret; +bool MixBufferPool::copy(MixParams * target) const { + bool ret = FALSE; + MixBufferPool * this_target = MIX_BUFFERPOOL(target); + MixBufferPool * this_obj = const_cast(this); + if (NULL != this_target) { + this_obj->Lock(); + this_target->Lock(); + this_target->free_list = free_list; + this_target->in_use_list = in_use_list; + this_target->free_list_max_size = free_list_max_size; + this_target->high_water_mark = high_water_mark; + ret = MixParams::copy(target); + this_target->Unlock(); + this_obj->Unlock(); + } + return ret; } -gboolean MixBufferPool::equal(MixParams * obj) const { - gboolean ret = FALSE; - MixBufferPool * this_obj = MIX_BUFFERPOOL(obj); - MixBufferPool * unconst_this = const_cast(this); - if (NULL != this_obj) { - unconst_this->Lock(); - this_obj->Lock(); - if (free_list == this_obj->free_list && - in_use_list == this_obj->in_use_list && - free_list_max_size == this_obj->free_list_max_size && - high_water_mark == this_obj->high_water_mark) { - ret = MixParams::equal(this_obj); - } - this_obj->Unlock(); - unconst_this->Unlock(); - } - return ret; +bool MixBufferPool::equal(MixParams * obj) const { + bool ret = FALSE; + MixBufferPool * this_obj = MIX_BUFFERPOOL(obj); + MixBufferPool * unconst_this = const_cast(this); + if (NULL != this_obj) { + unconst_this->Lock(); + this_obj->Lock(); + if (free_list == this_obj->free_list && + in_use_list == this_obj->in_use_list && + free_list_max_size == this_obj->free_list_max_size && + high_water_mark == this_obj->high_water_mark) { + ret = MixParams::equal(this_obj); + } + this_obj->Unlock(); + unconst_this->Unlock(); + } + return ret; } /** @@ -113,73 +113,73 @@ gboolean MixBufferPool::equal(MixParams * obj) const { * buffer objects that represents a pool of buffers. */ MIX_RESULT mix_bufferpool_initialize( - MixBufferPool * obj, guint num_buffers) { - LOG_V( "Begin\n"); + MixBufferPool * obj, uint num_buffers) { + LOG_V( "Begin\n"); - if (obj == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL) + return MIX_RESULT_NULL_PTR; - obj->Lock(); + obj->Lock(); - if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { - //buffer pool is in use; return error; need proper cleanup - //TODO need cleanup here? + if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { + //buffer pool is in use; return error; need proper cleanup + //TODO need cleanup here? - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_ALREADY_INIT; - } + return MIX_RESULT_ALREADY_INIT; + } - if (num_buffers == 0) { - obj->free_list = NULL; + if (num_buffers == 0) { + obj->free_list = NULL; - obj->in_use_list = NULL; + obj->in_use_list = NULL; - obj->free_list_max_size = num_buffers; + obj->free_list_max_size = num_buffers; - obj->high_water_mark = 0; + obj->high_water_mark = 0; - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_SUCCESS; - } + return MIX_RESULT_SUCCESS; + } - // Initialize the free pool with MixBuffer objects + // Initialize the free pool with MixBuffer objects - guint i = 0; - MixBuffer *buffer = NULL; + uint i = 0; + MixBuffer *buffer = NULL; - for (; i < num_buffers; i++) { + for (; i < num_buffers; i++) { - buffer = mix_buffer_new(); + buffer = mix_buffer_new(); - if (buffer == NULL) { - //TODO need to log an error here and do cleanup + if (buffer == NULL) { + //TODO need to log an error here and do cleanup - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_NO_MEMORY; - } + return MIX_RESULT_NO_MEMORY; + } - // Set the pool reference in the private data of the MixBuffer object - mix_buffer_set_pool(buffer, obj); + // Set the pool reference in the private data of the MixBuffer object + mix_buffer_set_pool(buffer, obj); - //Add each MixBuffer object to the pool list - obj->free_list = g_slist_append(obj->free_list, buffer); + //Add each MixBuffer object to the pool list + obj->free_list = j_slist_append(obj->free_list, buffer); - } + } - obj->in_use_list = NULL; + obj->in_use_list = NULL; - obj->free_list_max_size = num_buffers; + obj->free_list_max_size = num_buffers; - obj->high_water_mark = 0; + obj->high_water_mark = 0; - obj->Unlock(); + obj->Unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } /** @@ -190,42 +190,42 @@ MIX_RESULT mix_bufferpool_initialize( */ MIX_RESULT mix_bufferpool_put(MixBufferPool * obj, MixBuffer * buffer) { - if (obj == NULL || buffer == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL || buffer == NULL) + return MIX_RESULT_NULL_PTR; - obj->Lock(); + obj->Lock(); - if (obj->in_use_list == NULL) { - //in use list cannot be empty if a buffer is in use - //TODO need better error code for this + if (obj->in_use_list == NULL) { + //in use list cannot be empty if a buffer is in use + //TODO need better error code for this - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_FAIL; - } + return MIX_RESULT_FAIL; + } - GSList *element = g_slist_find(obj->in_use_list, buffer); - if (element == NULL) { - //Integrity error; buffer not found in in use list - //TODO need better error code and handling for this + JSList *element = j_slist_find(obj->in_use_list, buffer); + if (element == NULL) { + //Integrity error; buffer not found in in use list + //TODO need better error code and handling for this - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_FAIL; - } else { - //Remove this element from the in_use_list - obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); + return MIX_RESULT_FAIL; + } else { + //Remove this element from the in_use_list + obj->in_use_list = j_slist_remove_link(obj->in_use_list, element); - //Concat the element to the free_list - obj->free_list = g_slist_concat(obj->free_list, element); - } + //Concat the element to the free_list + obj->free_list = j_slist_concat(obj->free_list, element); + } - //Note that we do nothing with the ref count for this. We want it to - //stay at 1, which is what triggered it to be added back to the free list. + //Note that we do nothing with the ref count for this. We want it to + //stay at 1, which is what triggered it to be added back to the free list. - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } /** @@ -236,57 +236,57 @@ MIX_RESULT mix_bufferpool_put(MixBufferPool * obj, MixBuffer * buffer) { */ MIX_RESULT mix_bufferpool_get(MixBufferPool * obj, MixBuffer ** buffer) { - if (obj == NULL || buffer == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL || buffer == NULL) + return MIX_RESULT_NULL_PTR; - obj->Lock(); + obj->Lock(); - if (obj->free_list == NULL) { - //We are out of buffers - //TODO need to log this as well + if (obj->free_list == NULL) { + //We are out of buffers + //TODO need to log this as well - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_POOLEMPTY; - } + return MIX_RESULT_POOLEMPTY; + } - //Remove a buffer from the free pool + //Remove a buffer from the free pool - //We just remove the one at the head, since it's convenient - GSList *element = obj->free_list; - obj->free_list = g_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this + //We just remove the one at the head, since it's convenient + JSList *element = obj->free_list; + obj->free_list = j_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = g_slist_concat(obj->in_use_list, element); + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = j_slist_concat(obj->in_use_list, element); - //TODO replace with proper logging + //TODO replace with proper logging - LOG_I( "buffer refcount%d\n", - MIX_PARAMS(element->data)->ref_count); + LOG_I( "buffer refcount%d\n", + MIX_PARAMS(element->data)->ref_count); - //Set the out buffer pointer - *buffer = (MixBuffer *) element->data; + //Set the out buffer pointer + *buffer = (MixBuffer *) element->data; - //Check the high water mark for buffer use - guint size = g_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } + //Check the high water mark for buffer use + uint size = j_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } - //Increment the reference count for the buffer - mix_buffer_ref(*buffer); + //Increment the reference count for the buffer + mix_buffer_ref(*buffer); - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } /** @@ -296,82 +296,82 @@ MIX_RESULT mix_bufferpool_get(MixBufferPool * obj, MixBuffer ** buffer) { * Use this method to teardown a buffer pool */ MIX_RESULT mix_bufferpool_deinitialize(MixBufferPool * obj) { - if (obj == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL) + return MIX_RESULT_NULL_PTR; - obj->Lock(); + obj->Lock(); - if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) - != obj->free_list_max_size)) { - //TODO better error code - //We have outstanding buffer objects in use and they need to be - //freed before we can deinitialize. + if ((obj->in_use_list != NULL) || (j_slist_length(obj->free_list) + != obj->free_list_max_size)) { + //TODO better error code + //We have outstanding buffer objects in use and they need to be + //freed before we can deinitialize. - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_FAIL; - } + return MIX_RESULT_FAIL; + } - //Now remove buffer objects from the list + //Now remove buffer objects from the list - MixBuffer *buffer = NULL; + MixBuffer *buffer = NULL; - while (obj->free_list != NULL) { - //Get the buffer object from the head of the list - buffer = reinterpret_cast(obj->free_list->data); - //buffer = g_slist_nth_data(obj->free_list, 0); + while (obj->free_list != NULL) { + //Get the buffer object from the head of the list + buffer = reinterpret_cast(obj->free_list->data); + //buffer = g_slist_nth_data(obj->free_list, 0); - //Release it - mix_buffer_unref(buffer); + //Release it + mix_buffer_unref(buffer); - //Delete the head node of the list and store the new head - obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); + //Delete the head node of the list and store the new head + obj->free_list = j_slist_delete_link(obj->free_list, obj->free_list); - //Repeat until empty - } + //Repeat until empty + } - obj->free_list_max_size = 0; + obj->free_list_max_size = 0; - //May want to log this information for tuning - obj->high_water_mark = 0; + //May want to log this information for tuning + obj->high_water_mark = 0; - obj->Unlock(); + obj->Unlock(); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } #define MIX_BUFFERPOOL_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_BUFFERPOOL_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ - + MIX_RESULT mix_bufferpool_dumpbuffer(MixBuffer *buffer) { - LOG_I( "\tBuffer %x, ptr %x, refcount %d\n", (guint)buffer, - (guint)buffer->data, MIX_PARAMS(buffer)->ref_count); - return MIX_RESULT_SUCCESS; + LOG_I( "\tBuffer %x, ptr %x, refcount %d\n", (uint)buffer, + (uint)buffer->data, MIX_PARAMS(buffer)->ref_count); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_bufferpool_dumpprint (MixBufferPool * obj) { - //TODO replace this with proper logging later - LOG_I( "BUFFER POOL DUMP:\n"); - LOG_I( "Free list size is %d\n", g_slist_length(obj->free_list)); - LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); - LOG_I( "High water mark is %lu\n", obj->high_water_mark); + //TODO replace this with proper logging later + LOG_I( "BUFFER POOL DUMP:\n"); + LOG_I( "Free list size is %d\n", j_slist_length(obj->free_list)); + LOG_I( "In use list size is %d\n", j_slist_length(obj->in_use_list)); + LOG_I( "High water mark is %lu\n", obj->high_water_mark); - //Walk the free list and report the contents - LOG_I( "Free list contents:\n"); - g_slist_foreach(obj->free_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); + //Walk the free list and report the contents + LOG_I( "Free list contents:\n"); + j_slist_foreach(obj->free_list, (JFunc) mix_bufferpool_dumpbuffer, NULL); - //Walk the in_use list and report the contents - LOG_I( "In Use list contents:\n"); - g_slist_foreach(obj->in_use_list, (GFunc) mix_bufferpool_dumpbuffer, NULL); + //Walk the in_use list and report the contents + LOG_I( "In Use list contents:\n"); + j_slist_foreach(obj->in_use_list, (JFunc) mix_bufferpool_dumpbuffer, NULL); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixbufferpool.h b/mix_video/src/mixbufferpool.h index 1275bbc..655fba4 100644 --- a/mix_video/src/mixbufferpool.h +++ b/mix_video/src/mixbufferpool.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -14,6 +14,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixbuffer.h" #include "mixvideothread.h" #include +#include + class MixBuffer; @@ -26,7 +28,7 @@ class MixBuffer; /** * MIX_IS_BUFFERPOOL: * @obj: an object. -* +* * Checks if the given object is an instance of #MixBufferPool */ #define MIX_IS_BUFFERPOOL(obj) (NULL != MIX_BUFFERPOOL(obj)) @@ -41,26 +43,30 @@ class MixBufferPool : public MixParams public: MixBufferPool(); virtual ~MixBufferPool(); - virtual gboolean copy(MixParams* target) const; + virtual bool copy(MixParams* target) const; virtual MixParams* dup() const; - virtual gboolean equal(MixParams* obj) const; - - void Lock() {mLock.lock();} - void Unlock() {mLock.unlock();} + virtual bool equal(MixParams* obj) const; + + void Lock() { + mLock.lock(); + } + void Unlock() { + mLock.unlock(); + } public: - /*< public > */ - GSList *free_list; /* list of free buffers */ - GSList *in_use_list; /* list of buffers in use */ - gulong free_list_max_size; /* initial size of the free list */ - gulong high_water_mark; /* most buffers in use at one time */ - - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; - - /*< private > */ - MixVideoMutex mLock; + /*< public > */ + JSList *free_list; /* list of free buffers */ + JSList *in_use_list; /* list of buffers in use */ + ulong free_list_max_size; /* initial size of the free list */ + ulong high_water_mark; /* most buffers in use at one time */ + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; + + /*< private > */ + MixVideoMutex mLock; }; @@ -68,7 +74,7 @@ public: /** * mix_bufferpool_new: * @returns: A newly allocated instance of #MixBufferPool -* +* * Use this method to create new instance of #MixBufferPool */ MixBufferPool *mix_bufferpool_new (void); @@ -76,7 +82,7 @@ MixBufferPool *mix_bufferpool_new (void); * mix_bufferpool_ref: * @mix: object to add reference * @returns: the MixBufferPool instance where reference count has been increased. -* +* * Add reference count. */ MixBufferPool *mix_bufferpool_ref (MixBufferPool * mix); @@ -84,13 +90,13 @@ MixBufferPool *mix_bufferpool_ref (MixBufferPool * mix); /** * mix_bufferpool_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_bufferpool_unref(obj) mix_params_unref(MIX_PARAMS(obj)) /* Class Methods */ -MIX_RESULT mix_bufferpool_initialize (MixBufferPool * obj, guint num_buffers); +MIX_RESULT mix_bufferpool_initialize (MixBufferPool * obj, uint num_buffers); MIX_RESULT mix_bufferpool_put (MixBufferPool * obj, MixBuffer * buffer); MIX_RESULT mix_bufferpool_get (MixBufferPool * obj, MixBuffer ** buffer); MIX_RESULT mix_bufferpool_deinitialize (MixBufferPool * obj); diff --git a/mix_video/src/mixdisplay.cpp b/mix_video/src/mixdisplay.cpp index c8df250..5ab7c0b 100644 --- a/mix_video/src/mixdisplay.cpp +++ b/mix_video/src/mixdisplay.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -19,81 +19,81 @@ No license under any patent, copyright, trade secret or other intellectual prope #define DEBUG_REFCOUNT -MixDisplay::MixDisplay() - :refcount(1) { +MixDisplay::MixDisplay() + :refcount(1) { } MixDisplay::~MixDisplay() { - Finalize(); + Finalize(); } MixDisplay* MixDisplay::Dup() const { - MixDisplay* dup = new MixDisplay(); - if (NULL != dup ) { - if(FALSE == Copy(dup)) { - dup->Unref(); - dup = NULL; - } - } - return dup; + MixDisplay* dup = new MixDisplay(); + if (NULL != dup ) { + if (FALSE == Copy(dup)) { + dup->Unref(); + dup = NULL; + } + } + return dup; } -gboolean MixDisplay::Copy(MixDisplay* target) const { - if(NULL != target) - return TRUE; - else - return FALSE; +bool MixDisplay::Copy(MixDisplay* target) const { + if (NULL != target) + return TRUE; + else + return FALSE; } void MixDisplay::Finalize() { } -gboolean MixDisplay::Equal(const MixDisplay* obj) const { - if (NULL != obj) - return TRUE; - else - return FALSE; +bool MixDisplay::Equal(const MixDisplay* obj) const { + if (NULL != obj) + return TRUE; + else + return FALSE; } MixDisplay * MixDisplay::Ref() { - ++refcount; - return this; + ++refcount; + return this; } void MixDisplay::Unref () { - if (0 == (--refcount)) { - delete this; - } + if (0 == (--refcount)) { + delete this; + } } -gboolean mix_display_copy (MixDisplay * target, const MixDisplay * src) { - if (target == src) - return TRUE; - if (NULL == target || NULL == src) - return FALSE; - return src->Copy(target); +bool mix_display_copy (MixDisplay * target, const MixDisplay * src) { + if (target == src) + return TRUE; + if (NULL == target || NULL == src) + return FALSE; + return src->Copy(target); } MixDisplay * mix_display_dup (const MixDisplay * obj) { - if (NULL == obj) - return NULL; - return obj->Dup(); + if (NULL == obj) + return NULL; + return obj->Dup(); } MixDisplay * mix_display_new (void) { - return new MixDisplay(); + return new MixDisplay(); } MixDisplay * mix_display_ref (MixDisplay * obj) { - if (NULL != obj) - obj->Ref(); - return obj; + if (NULL != obj) + obj->Ref(); + return obj; } void mix_display_unref (MixDisplay * obj) { - if (NULL != obj) - obj->Unref(); + if (NULL != obj) + obj->Unref(); } @@ -107,24 +107,24 @@ void mix_display_unref (MixDisplay * obj) { * Either @newdata and the value pointed to by @olddata may be NULL. */ void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata) { - if (NULL == olddata) - return; - if(*olddata == newdata) - return; - MixDisplay *olddata_val = *olddata; - if (NULL != newdata) - newdata->Ref(); - *olddata = newdata; - if (NULL != olddata_val) - olddata_val->Unref(); + if (NULL == olddata) + return; + if (*olddata == newdata) + return; + MixDisplay *olddata_val = *olddata; + if (NULL != newdata) + newdata->Ref(); + *olddata = newdata; + if (NULL != olddata_val) + olddata_val->Unref(); } -gboolean mix_display_equal (MixDisplay * first, MixDisplay * second) { - if (first == second) - return TRUE; - if (NULL == first || NULL == second) - return FALSE; - return first->Equal(second); +bool mix_display_equal (MixDisplay * first, MixDisplay * second) { + if (first == second) + return TRUE; + if (NULL == first || NULL == second) + return FALSE; + return first->Equal(second); } diff --git a/mix_video/src/mixdisplay.h b/mix_video/src/mixdisplay.h index 9416fb8..da7f074 100644 --- a/mix_video/src/mixdisplay.h +++ b/mix_video/src/mixdisplay.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -9,7 +9,7 @@ No license under any patent, copyright, trade secret or other intellectual prope #ifndef __MIX_DISPLAY_H__ #define __MIX_DISPLAY_H__ -#include +#include #define MIX_DISPLAY(obj) (reinterpret_cast(obj)) @@ -22,32 +22,32 @@ No license under any patent, copyright, trade secret or other intellectual prope */ class MixDisplay { public: - virtual ~MixDisplay(); + virtual ~MixDisplay(); - virtual MixDisplay* Dup() const; - virtual gboolean Copy(MixDisplay* target) const; - virtual void Finalize(); - virtual gboolean Equal(const MixDisplay* obj) const; + virtual MixDisplay* Dup() const; + virtual bool Copy(MixDisplay* target) const; + virtual void Finalize(); + virtual bool Equal(const MixDisplay* obj) const; - MixDisplay * Ref(); - void Unref (); + MixDisplay * Ref(); + void Unref (); + + friend MixDisplay *mix_display_new (void); - friend MixDisplay *mix_display_new (void); - protected: - MixDisplay(); + MixDisplay(); public: - /*< public > */ - gint refcount; - /*< private > */ - gpointer _reserved; + /*< public > */ + int refcount; + /*< private > */ + void* _reserved; }; /** * mix_display_new: * @returns: return a newly allocated object. -* +* * Create new instance of the object. */ MixDisplay *mix_display_new (void); @@ -59,24 +59,24 @@ MixDisplay *mix_display_new (void); * @target: copy to target * @src: copy from source * @returns: boolean indicating if copy is successful. -* +* * Copy data from one instance to the other. This method internally invoked the #MixDisplay::copy method such that derived object will be copied correctly. */ -gboolean mix_display_copy (MixDisplay * target, const MixDisplay * src); +bool mix_display_copy (MixDisplay * target, const MixDisplay * src); -/** +/** * mix_display_ref: * @obj: a #MixDisplay object. * @returns: the object with reference count incremented. -* +* * Increment reference count. */ MixDisplay *mix_display_ref (MixDisplay * obj); -/** +/** * mix_display_unref: * @obj: a #MixDisplay object. -* +* * Decrement reference count. */ void mix_display_unref (MixDisplay * obj); @@ -85,7 +85,7 @@ void mix_display_unref (MixDisplay * obj); * mix_display_replace: * @olddata: old data * @newdata: new data -* +* * Replace a pointer of the object with the new one. */ void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata); @@ -94,7 +94,7 @@ void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata); * mix_display_dup: * @obj: #MixDisplay object to duplicate. * @returns: A newly allocated duplicate of the object, or NULL if failed. -* +* * Duplicate the given #MixDisplay and allocate a new instance. This method is chained up properly and derive object will be dupped properly. */ MixDisplay *mix_display_dup (const MixDisplay * obj); @@ -104,10 +104,10 @@ MixDisplay *mix_display_dup (const MixDisplay * obj); * @first: first object to compare * @second: second object to compare * @returns: boolean indicates if the 2 object contains same data. -* +* * Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance. */ -gboolean mix_display_equal (MixDisplay * first, MixDisplay * second); +bool mix_display_equal (MixDisplay * first, MixDisplay * second); #endif diff --git a/mix_video/src/mixdisplayandroid.cpp b/mix_video/src/mixdisplayandroid.cpp index 3f9ba19..1d00d24 100644 --- a/mix_video/src/mixdisplayandroid.cpp +++ b/mix_video/src/mixdisplayandroid.cpp @@ -8,10 +8,10 @@ /** * SECTION:mixdisplayandroid - * @short_description: MI-X Video Android Display + * @short_description: MI-X Video Android Display * * A data object which stores Android specific parameters. - * + * * * Data Structures Used in MixDisplayAndroid Fields: * @@ -21,63 +21,63 @@ #include "mixdisplayandroid.h" -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } +#define SAFE_FREE(p) if(p) { free(p); p = NULL; } MixDisplayAndroid::MixDisplayAndroid() - :display(NULL) { + :display(NULL) { } MixDisplayAndroid::~MixDisplayAndroid() { - Finalize(); + Finalize(); } MixDisplay* MixDisplayAndroid::Dup() const { - MixDisplayAndroid* dup = new MixDisplayAndroid(); - if (NULL != dup ) { - if(FALSE == Copy(dup)) { - dup->Unref(); - dup = NULL; - } - } - return dup; + MixDisplayAndroid* dup = new MixDisplayAndroid(); + if (NULL != dup ) { + if (FALSE == Copy(dup)) { + dup->Unref(); + dup = NULL; + } + } + return dup; } -gboolean MixDisplayAndroid::Copy(MixDisplay* target) const { - gboolean ret = FALSE; - MixDisplayAndroid* this_target = reinterpret_cast(target); - if (NULL != this_target) { - this_target->display = this->display; - ret = MixDisplay::Copy(target); - } - return ret; +bool MixDisplayAndroid::Copy(MixDisplay* target) const { + bool ret = FALSE; + MixDisplayAndroid* this_target = reinterpret_cast(target); + if (NULL != this_target) { + this_target->display = this->display; + ret = MixDisplay::Copy(target); + } + return ret; } void MixDisplayAndroid::Finalize() { - MixDisplay::Finalize(); + MixDisplay::Finalize(); } -gboolean MixDisplayAndroid::Equal(const MixDisplay* obj) const { - gboolean ret = FALSE; - const MixDisplayAndroid* this_obj = reinterpret_cast(obj); - if (NULL != this_obj) { - if(this_obj->display == this->display) - ret = MixDisplay::Equal(obj); - } - return ret; +bool MixDisplayAndroid::Equal(const MixDisplay* obj) const { + bool ret = FALSE; + const MixDisplayAndroid* this_obj = reinterpret_cast(obj); + if (NULL != this_obj) { + if (this_obj->display == this->display) + ret = MixDisplay::Equal(obj); + } + return ret; } MixDisplayAndroid * mix_displayandroid_new(void) { - return new MixDisplayAndroid(); + return new MixDisplayAndroid(); } MixDisplayAndroid * mix_displayandroid_ref(MixDisplayAndroid * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } /** @@ -88,11 +88,11 @@ MixDisplayAndroid * mix_displayandroid_ref(MixDisplayAndroid * mix) { * Copy duplicate of the object. */ MixDisplay * mix_displayandroid_dup(const MixDisplay * obj) { - MixDisplay *ret = NULL; - if (NULL != obj) { - ret = obj->Dup(); - } - return ret; + MixDisplay *ret = NULL; + if (NULL != obj) { + ret = obj->Dup(); + } + return ret; } /** @@ -103,16 +103,16 @@ MixDisplay * mix_displayandroid_dup(const MixDisplay * obj) { * * Copy instance data from @src to @target. */ -gboolean mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src) { - if (target == src) - return TRUE; - if (NULL == target || NULL == src) - return FALSE; - const MixDisplayAndroid *this_src = - reinterpret_cast(src); - MixDisplayAndroid *this_target = - reinterpret_cast(target); - return this_src->Copy(this_target); +bool mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src) { + if (target == src) + return TRUE; + if (NULL == target || NULL == src) + return FALSE; + const MixDisplayAndroid *this_src = + reinterpret_cast(src); + MixDisplayAndroid *this_target = + reinterpret_cast(target); + return this_src->Copy(this_target); } /** @@ -123,16 +123,16 @@ gboolean mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src) { * * Copy instance data from @src to @target. */ -gboolean mix_displayandroid_equal(MixDisplay * first, MixDisplay * second) { - if (first == second) - return TRUE; - if (NULL == first || NULL == second) - return FALSE; - MixDisplayAndroid *this_first = - reinterpret_cast(first); - MixDisplayAndroid *this_second = - reinterpret_cast(second); - return first->Equal(second); +bool mix_displayandroid_equal(MixDisplay * first, MixDisplay * second) { + if (first == second) + return TRUE; + if (NULL == first || NULL == second) + return FALSE; + MixDisplayAndroid *this_first = + reinterpret_cast(first); + MixDisplayAndroid *this_second = + reinterpret_cast(second); + return first->Equal(second); } #define MIX_DISPLAYANDROID_SETTER_CHECK_INPUT(obj) \ @@ -142,17 +142,17 @@ gboolean mix_displayandroid_equal(MixDisplay * first, MixDisplay * second) { if(!obj || prop == NULL) return MIX_RESULT_NULL_PTR; MIX_RESULT mix_displayandroid_set_display(MixDisplayAndroid * obj, void * display) { - MIX_DISPLAYANDROID_SETTER_CHECK_INPUT (obj); - // TODO: needs to decide to clone or just copy pointer - obj->display = display; - return MIX_RESULT_SUCCESS; + MIX_DISPLAYANDROID_SETTER_CHECK_INPUT (obj); + // TODO: needs to decide to clone or just copy pointer + obj->display = display; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_displayandroid_get_display(MixDisplayAndroid * obj, void ** display) { - MIX_DISPLAYANDROID_GETTER_CHECK_INPUT (obj, display); - // TODO: needs to decide to clone or just copy pointer - *display = obj->display; - return MIX_RESULT_SUCCESS; + MIX_DISPLAYANDROID_GETTER_CHECK_INPUT (obj, display); + // TODO: needs to decide to clone or just copy pointer + *display = obj->display; + return MIX_RESULT_SUCCESS; } #endif /* ANDROID */ diff --git a/mix_video/src/mixdisplayandroid.h b/mix_video/src/mixdisplayandroid.h index 5637d25..1b47f42 100644 --- a/mix_video/src/mixdisplayandroid.h +++ b/mix_video/src/mixdisplayandroid.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -23,104 +23,104 @@ extern "C" { #ifdef ANDROID -/** -* MIX_DISPLAYANDROID: -* @obj: object to be type-casted. -*/ + /** + * MIX_DISPLAYANDROID: + * @obj: object to be type-casted. + */ #define MIX_DISPLAYANDROID(obj) (reinterpret_cast(obj)) -/** -* MIX_IS_DISPLAYANDROID: -* @obj: an object. -* -* Checks if the given object is an instance of #MixDisplay -*/ + /** + * MIX_IS_DISPLAYANDROID: + * @obj: an object. + * + * Checks if the given object is an instance of #MixDisplay + */ #define MIX_IS_DISPLAYANDROID(obj) (NULL != MIX_DISPLAYANDROID(obj)) -/** -* MixDisplayAndroid: -* -* MI-X VideoInit Parameter object -*/ -class MixDisplayAndroid : public MixDisplay { - -public: - ~MixDisplayAndroid(); - virtual MixDisplay* Dup() const; - virtual gboolean Copy(MixDisplay* target) const; - virtual void Finalize(); - virtual gboolean Equal(const MixDisplay* obj) const; - - - - friend MixDisplayAndroid *mix_displayandroid_new (void); - -protected: - MixDisplayAndroid(); -public: - /*< public > */ - - /* Pointer to a Android specific display */ - void *display; - - /* An Android drawable that is a smart pointer - * of ISurface. This field is not used in - * mix_video_initialize(). - */ - // sp drawable; -}; - -/** -* mix_displayandroid_new: -* @returns: A newly allocated instance of #MixDisplayAndroid -* -* Use this method to create new instance of #MixDisplayAndroid -*/ -MixDisplayAndroid *mix_displayandroid_new (void); - - -/** -* mix_displayandroid_ref: -* @mix: object to add reference -* @returns: the #MixDisplayAndroid instance where reference count has been increased. -* -* Add reference count. -*/ -MixDisplayAndroid *mix_displayandroid_ref (MixDisplayAndroid * mix); - -/** -* mix_displayandroid_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ + /** + * MixDisplayAndroid: + * + * MI-X VideoInit Parameter object + */ + class MixDisplayAndroid : public MixDisplay { + + public: + ~MixDisplayAndroid(); + virtual MixDisplay* Dup() const; + virtual bool Copy(MixDisplay* target) const; + virtual void Finalize(); + virtual bool Equal(const MixDisplay* obj) const; + + + + friend MixDisplayAndroid *mix_displayandroid_new (void); + + protected: + MixDisplayAndroid(); + public: + /*< public > */ + + /* Pointer to a Android specific display */ + void *display; + + /* An Android drawable that is a smart pointer + * of ISurface. This field is not used in + * mix_video_initialize(). + */ + // sp drawable; + }; + + /** + * mix_displayandroid_new: + * @returns: A newly allocated instance of #MixDisplayAndroid + * + * Use this method to create new instance of #MixDisplayAndroid + */ + MixDisplayAndroid *mix_displayandroid_new (void); + + + /** + * mix_displayandroid_ref: + * @mix: object to add reference + * @returns: the #MixDisplayAndroid instance where reference count has been increased. + * + * Add reference count. + */ + MixDisplayAndroid *mix_displayandroid_ref (MixDisplayAndroid * mix); + + /** + * mix_displayandroid_unref: + * @obj: object to unref. + * + * Decrement reference count of the object. + */ #define mix_displayandroid_unref(obj) mix_display_unref(MIX_DISPLAY(obj)) -/* Class Methods */ - - -/** - * mix_displayandroid_set_display: - * @obj: #MixDisplayAndroid object - * @display: Pointer to Android specific display - * @returns: Common Video Error Return Codes - * - * Set Display - */ -MIX_RESULT mix_displayandroid_set_display ( - MixDisplayAndroid * obj, void * display); - -/** - * mix_displayandroid_get_display: - * @obj: #MixDisplayAndroid object - * @display: Pointer to pointer of Android specific display - * @returns: Common Video Error Return Codes - * - * Get Display - */ -MIX_RESULT mix_displayandroid_get_display ( - MixDisplayAndroid * obj, void ** dislay); + /* Class Methods */ + + + /** + * mix_displayandroid_set_display: + * @obj: #MixDisplayAndroid object + * @display: Pointer to Android specific display + * @returns: Common Video Error Return Codes + * + * Set Display + */ + MIX_RESULT mix_displayandroid_set_display ( + MixDisplayAndroid * obj, void * display); + + /** + * mix_displayandroid_get_display: + * @obj: #MixDisplayAndroid object + * @display: Pointer to pointer of Android specific display + * @returns: Common Video Error Return Codes + * + * Get Display + */ + MIX_RESULT mix_displayandroid_get_display ( + MixDisplayAndroid * obj, void ** dislay); #endif /* ANDROID */ diff --git a/mix_video/src/mixdisplayx11.cpp b/mix_video/src/mixdisplayx11.cpp index 43839ab..23fb8c0 100644 --- a/mix_video/src/mixdisplayx11.cpp +++ b/mix_video/src/mixdisplayx11.cpp @@ -8,10 +8,10 @@ /** * SECTION:mixdisplayx11 - * @short_description: MI-X Video X11 Display + * @short_description: MI-X Video X11 Display * * A data object which stores X11 specific parameters. - * + * * * Data Structures Used in MixDisplayX11 Fields: * See X11/Xlib.h for Display and Drawable definitions. @@ -20,63 +20,63 @@ #include "mixdisplayx11.h" -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } +#define SAFE_FREE(p) if(p) { free(p); p = NULL; } static GType _mix_displayx11_type = 0; static MixDisplayClass *parent_class = NULL; #define _do_init { _mix_displayx11_type = g_define_type_id; } -gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); +bool mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); MixDisplay *mix_displayx11_dup(const MixDisplay * obj); -gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second); +bool mix_displayx11_equal(MixDisplay * first, MixDisplay * second); static void mix_displayx11_finalize(MixDisplay * obj); G_DEFINE_TYPE_WITH_CODE (MixDisplayX11, mix_displayx11, - MIX_TYPE_DISPLAY, _do_init); + MIX_TYPE_DISPLAY, _do_init); static void mix_displayx11_init(MixDisplayX11 * self) { - /* Initialize member varibles */ - self->display = NULL; - self->drawable = 0; + /* Initialize member varibles */ + self->display = NULL; + self->drawable = 0; } static void mix_displayx11_class_init(MixDisplayX11Class * klass) { - MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); + MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); - /* setup static parent class */ - parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); + /* setup static parent class */ + parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); - mixdisplay_class->finalize = mix_displayx11_finalize; - mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayx11_copy; - mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayx11_dup; - mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayx11_equal; + mixdisplay_class->finalize = mix_displayx11_finalize; + mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayx11_copy; + mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayx11_dup; + mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayx11_equal; } MixDisplayX11 * mix_displayx11_new(void) { - MixDisplayX11 *ret = (MixDisplayX11 *) g_type_create_instance( - MIX_TYPE_DISPLAYX11); + MixDisplayX11 *ret = (MixDisplayX11 *) g_type_create_instance( + MIX_TYPE_DISPLAYX11); - return ret; + return ret; } void mix_displayx11_finalize(MixDisplay * obj) { - /* clean up here. */ - /* MixDisplayX11 *self = MIX_DISPLAYX11 (obj); */ + /* clean up here. */ + /* MixDisplayX11 *self = MIX_DISPLAYX11 (obj); */ - /* NOTE: we don't need to do anything - * with display and drawable */ + /* NOTE: we don't need to do anything + * with display and drawable */ - /* Chain up parent */ - if (parent_class->finalize) - parent_class->finalize(obj); + /* Chain up parent */ + if (parent_class->finalize) + parent_class->finalize(obj); } MixDisplayX11 * mix_displayx11_ref(MixDisplayX11 * mix) { - return (MixDisplayX11 *) mix_display_ref(MIX_DISPLAY(mix)); + return (MixDisplayX11 *) mix_display_ref(MIX_DISPLAY(mix)); } /** @@ -88,17 +88,17 @@ mix_displayx11_ref(MixDisplayX11 * mix) { */ MixDisplay * mix_displayx11_dup(const MixDisplay * obj) { - MixDisplay *ret = NULL; - - if (MIX_IS_DISPLAYX11(obj)) { - MixDisplayX11 *duplicate = mix_displayx11_new(); - if (mix_displayx11_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { - ret = MIX_DISPLAY(duplicate); - } else { - mix_displayx11_unref(duplicate); - } - } - return ret; + MixDisplay *ret = NULL; + + if (MIX_IS_DISPLAYX11(obj)) { + MixDisplayX11 *duplicate = mix_displayx11_new(); + if (mix_displayx11_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { + ret = MIX_DISPLAY(duplicate); + } else { + mix_displayx11_unref(duplicate); + } + } + return ret; } /** @@ -109,28 +109,28 @@ mix_displayx11_dup(const MixDisplay * obj) { * * Copy instance data from @src to @target. */ -gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) { - MixDisplayX11 *this_target, *this_src; - - if (MIX_IS_DISPLAYX11(target) && MIX_IS_DISPLAYX11(src)) { - // Cast the base object to this child object - this_target = MIX_DISPLAYX11(target); - this_src = MIX_DISPLAYX11(src); - - // Copy properties from source to target. - - this_target->display = this_src->display; - this_target->drawable = this_src->drawable; - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_DISPLAY_CAST(target), - MIX_DISPLAY_CAST(src)); - } else { - return TRUE; - } - } - return FALSE; +bool mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) { + MixDisplayX11 *this_target, *this_src; + + if (MIX_IS_DISPLAYX11(target) && MIX_IS_DISPLAYX11(src)) { + // Cast the base object to this child object + this_target = MIX_DISPLAYX11(target); + this_src = MIX_DISPLAYX11(src); + + // Copy properties from source to target. + + this_target->display = this_src->display; + this_target->drawable = this_src->drawable; + + // Now chainup base class + if (parent_class->copy) { + return parent_class->copy(MIX_DISPLAY_CAST(target), + MIX_DISPLAY_CAST(src)); + } else { + return TRUE; + } + } + return FALSE; } /** @@ -141,70 +141,70 @@ gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) { * * Copy instance data from @src to @target. */ -gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second) { - gboolean ret = FALSE; - - MixDisplayX11 *this_first, *this_second; - - this_first = MIX_DISPLAYX11(first); - this_second = MIX_DISPLAYX11(second); - - if (MIX_IS_DISPLAYX11(first) && MIX_IS_DISPLAYX11(second)) { - // Compare member variables - - // TODO: if in the copy method we just copy the pointer of display, the comparison - // below is enough. But we need to decide how to copy! - - if (this_first->display == this_second->display && this_first->drawable - == this_second->drawable) { - // members within this scope equal. chaining up. - MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - return ret; +bool mix_displayx11_equal(MixDisplay * first, MixDisplay * second) { + bool ret = FALSE; + + MixDisplayX11 *this_first, *this_second; + + this_first = MIX_DISPLAYX11(first); + this_second = MIX_DISPLAYX11(second); + + if (MIX_IS_DISPLAYX11(first) && MIX_IS_DISPLAYX11(second)) { + // Compare member variables + + // TODO: if in the copy method we just copy the pointer of display, the comparison + // below is enough. But we need to decide how to copy! + + if (this_first->display == this_second->display && this_first->drawable + == this_second->drawable) { + // members within this scope equal. chaining up. + MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); + if (klass->equal) + ret = parent_class->equal(first, second); + else + ret = TRUE; + } + } + return ret; } #define MIX_DISPLAYX11_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_DISPLAYX11_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ - + MIX_RESULT mix_displayx11_set_display(MixDisplayX11 * obj, Display * display) { - MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); + MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); - // TODO: needs to decide to clone or just copy pointer - obj->display = display; - return MIX_RESULT_SUCCESS; + // TODO: needs to decide to clone or just copy pointer + obj->display = display; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_displayx11_get_display(MixDisplayX11 * obj, Display ** display) { - MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, display); + MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, display); - // TODO: needs to decide to clone or just copy pointer - *display = obj->display; + // TODO: needs to decide to clone or just copy pointer + *display = obj->display; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_displayx11_set_drawable(MixDisplayX11 * obj, Drawable drawable) { - MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); + MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); - // TODO: needs to decide to clone or just copy pointer - obj->drawable = drawable; - return MIX_RESULT_SUCCESS; + // TODO: needs to decide to clone or just copy pointer + obj->drawable = drawable; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_displayx11_get_drawable(MixDisplayX11 * obj, Drawable * drawable) { - MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, drawable); + MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, drawable); - // TODO: needs to decide to clone or just copy pointer - *drawable = obj->drawable; - return MIX_RESULT_SUCCESS; + // TODO: needs to decide to clone or just copy pointer + *drawable = obj->drawable; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h index 3b51f5e..a3fe183 100644 --- a/mix_video/src/mixdisplayx11.h +++ b/mix_video/src/mixdisplayx11.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -17,7 +17,7 @@ G_BEGIN_DECLS /** * MIX_TYPE_DISPLAYX11: -* +* * Get type of class. */ #define MIX_TYPE_DISPLAYX11 (mix_displayx11_get_type ()) @@ -31,7 +31,7 @@ G_BEGIN_DECLS /** * MIX_IS_DISPLAYX11: * @obj: an object. -* +* * Checks if the given object is an instance of #MixDisplay */ #define MIX_IS_DISPLAYX11(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAYX11)) @@ -45,7 +45,7 @@ G_BEGIN_DECLS /** * MIX_IS_DISPLAYX11_CLASS: * @klass: a class. -* +* * Checks if the given class is #MixDisplayClass */ #define MIX_IS_DISPLAYX11_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAYX11)) @@ -53,7 +53,7 @@ G_BEGIN_DECLS /** * MIX_DISPLAYX11_GET_CLASS: * @obj: a #MixDisplay object. -* +* * Get the class instance of the object. */ #define MIX_DISPLAYX11_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAYX11, MixDisplayX11Class)) @@ -68,38 +68,38 @@ typedef struct _MixDisplayX11Class MixDisplayX11Class; */ struct _MixDisplayX11 { - /*< public > */ - MixDisplay parent; - - /*< public > */ - - /* Pointer to a X Window Display structure */ - Display *display; - - /* An X Window Drawable that is either a Window - * or a Pixmap. This field is not used in - * mix_video_initialize(). - * See X11/Xlib.h for Display and Drawable definitions.*/ - Drawable drawable; + /*< public > */ + MixDisplay parent; + + /*< public > */ + + /* Pointer to a X Window Display structure */ + Display *display; + + /* An X Window Drawable that is either a Window + * or a Pixmap. This field is not used in + * mix_video_initialize(). + * See X11/Xlib.h for Display and Drawable definitions.*/ + Drawable drawable; }; /** * MixDisplayX11Class: -* +* * MI-X VideoInit object class */ struct _MixDisplayX11Class { - /*< public > */ - MixDisplayClass parent_class; + /*< public > */ + MixDisplayClass parent_class; - /* class members */ + /* class members */ }; /** * mix_displayx11_get_type: * @returns: type -* +* * Get the type of object. */ GType mix_displayx11_get_type (void); @@ -107,7 +107,7 @@ GType mix_displayx11_get_type (void); /** * mix_displayx11_new: * @returns: A newly allocated instance of #MixDisplayX11 -* +* * Use this method to create new instance of #MixDisplayX11 */ MixDisplayX11 *mix_displayx11_new (void); @@ -115,7 +115,7 @@ MixDisplayX11 *mix_displayx11_new (void); * mix_displayx11_ref: * @mix: object to add reference * @returns: the #MixDisplayX11 instance where reference count has been increased. -* +* * Add reference count. */ MixDisplayX11 *mix_displayx11_ref (MixDisplayX11 * mix); @@ -123,7 +123,7 @@ MixDisplayX11 *mix_displayx11_ref (MixDisplayX11 * mix); /** * mix_displayx11_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_displayx11_unref(obj) mix_display_unref(MIX_DISPLAY(obj)) @@ -134,46 +134,46 @@ MixDisplayX11 *mix_displayx11_ref (MixDisplayX11 * mix); /** * mix_displayx11_set_display: * @obj: #MixDisplayX11 object - * @display: Pointer to a X Window Display structure + * @display: Pointer to a X Window Display structure * @returns: Common Video Error Return Codes * - * Set Display + * Set Display */ MIX_RESULT mix_displayx11_set_display (MixDisplayX11 * obj, - Display * display); + Display * display); /** * mix_displayx11_get_display: * @obj: #MixDisplayX11 object - * @display: Pointer to pointer of X Window Display structure + * @display: Pointer to pointer of X Window Display structure * @returns: Common Video Error Return Codes * - * Get Display + * Get Display */ MIX_RESULT mix_displayx11_get_display (MixDisplayX11 * obj, - Display ** dislay); + Display ** dislay); /** * mix_displayx11_set_drawable: * @obj: #MixDisplayX11 object - * @drawable: An X Window Drawable that is either a Window or a Pixmap. + * @drawable: An X Window Drawable that is either a Window or a Pixmap. * @returns: Common Video Error Return Codes * - * Set drawable + * Set drawable */ MIX_RESULT mix_displayx11_set_drawable (MixDisplayX11 * obj, - Drawable drawable); + Drawable drawable); /** * mix_displayx11_get_drawable: * @obj: #MixDisplayX11 object - * @drawable: An X Window Drawable that is either a Window or a Pixmap to be returned. + * @drawable: An X Window Drawable that is either a Window or a Pixmap to be returned. * @returns: Common Video Error Return Codes * - * Get drawable + * Get drawable */ MIX_RESULT mix_displayx11_get_drawable (MixDisplayX11 * obj, - Drawable * drawable); + Drawable * drawable); G_END_DECLS diff --git a/mix_video/src/mixframemanager.cpp b/mix_video/src/mixframemanager.cpp index 0ec8075..31b84fc 100644 --- a/mix_video/src/mixframemanager.cpp +++ b/mix_video/src/mixframemanager.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include "mixvideolog.h" #include "mixframemanager.h" @@ -13,325 +13,324 @@ #define INITIAL_FRAME_ARRAY_SIZE 16 -// Assume only one backward reference is used. This will hold up to 2 frames before forcing +// Assume only one backward reference is used. This will hold up to 2 frames before forcing // the earliest frame out of queue. #define MIX_MAX_ENQUEUE_SIZE 2 // RTP timestamp is 32-bit long and could be rollover in 13 hours (based on 90K Hz clock) #define TS_ROLLOVER_THRESHOLD (0xFFFFFFFF/2) -#define MIX_SECOND (G_USEC_PER_SEC * G_GINT64_CONSTANT (1000)) + +#define MIX_SECOND (1000000 * INT64_CONSTANT (1000)) MixFrameManager::~MixFrameManager() { - /* cleanup here */ - mix_framemanager_deinitialize(this); + /* cleanup here */ + mix_framemanager_deinitialize(this); } MixFrameManager::MixFrameManager() - :initialized(FALSE) - ,flushing(FALSE) - ,eos(FALSE) - ,mLock() - ,frame_list(NULL) - ,framerate_numerator(30) - ,framerate_denominator(1) - ,frame_timestamp_delta(0) - ,mode(MIX_DISPLAY_ORDER_UNKNOWN) - ,is_first_frame(TRUE) - ,last_frame_timestamp(0) - ,next_frame_timestamp(0) - ,next_frame_picnumber(0) - ,max_enqueue_size(MIX_MAX_ENQUEUE_SIZE) - ,max_picture_number((guint32)-1) - ,ref_count(1) { + :initialized(FALSE) + ,flushing(FALSE) + ,eos(FALSE) + ,mLock() + ,frame_list(NULL) + ,framerate_numerator(30) + ,framerate_denominator(1) + ,frame_timestamp_delta(0) + ,mode(MIX_DISPLAY_ORDER_UNKNOWN) + ,is_first_frame(TRUE) + ,last_frame_timestamp(0) + ,next_frame_timestamp(0) + ,next_frame_picnumber(0) + ,max_enqueue_size(MIX_MAX_ENQUEUE_SIZE) + ,max_picture_number((uint32)-1) + ,ref_count(1) { } MixFrameManager *mix_framemanager_new(void) { - return new MixFrameManager(); + return new MixFrameManager(); } MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { - if (NULL != fm) - fm->ref_count++; - return fm; + if (NULL != fm) + fm->ref_count++; + return fm; } /* MixFrameManager class methods */ -MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, - MixDisplayOrderMode mode, gint framerate_numerator, - gint framerate_denominator) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; +MIX_RESULT mix_framemanager_initialize( + MixFrameManager *fm, MixDisplayOrderMode mode, + int framerate_numerator, int framerate_denominator) { - if (fm->initialized) { - return MIX_RESULT_ALREADY_INIT; - } + MIX_RESULT ret = MIX_RESULT_SUCCESS; - fm->frame_list = NULL; - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; - - fm->mode = mode; + if (fm->initialized) { + return MIX_RESULT_ALREADY_INIT; + } - LOG_V("fm->mode = %d\n", fm->mode); + fm->frame_list = NULL; + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; - fm->is_first_frame = TRUE; - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - fm->next_frame_picnumber = 0; + fm->mode = mode; - fm->initialized = TRUE; + LOG_V("fm->mode = %d\n", fm->mode); + fm->is_first_frame = TRUE; + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + fm->next_frame_picnumber = 0; + fm->initialized = TRUE; cleanup: - return ret; + return ret; } MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - mix_framemanager_flush(fm); + mix_framemanager_flush(fm); - fm->mLock.lock(); + fm->mLock.lock(); - fm->initialized = FALSE; + fm->initialized = FALSE; - fm->mLock.unlock(); + fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MixFrameManager* mix_framemanager_unref(MixFrameManager *fm){ - if (NULL != fm) { - fm->ref_count--; - if (0 == fm->ref_count) { - delete fm; - return NULL; - } - } - return fm; +MixFrameManager* mix_framemanager_unref(MixFrameManager *fm) { + if (NULL != fm) { + fm->ref_count--; + if (0 == fm->ref_count) { + delete fm; + return NULL; + } + } + return fm; } -MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, - gint framerate_numerator, gint framerate_denominator) { - - if (framerate_numerator <= 0 || framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } - fm->mLock.lock(); - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_framemanager_set_framerate( + MixFrameManager *fm, int framerate_numerator, int framerate_denominator) { + + if (framerate_numerator <= 0 || framerate_denominator <= 0) { + return MIX_RESULT_INVALID_PARAM; + } + fm->mLock.lock(); + fm->framerate_numerator = framerate_numerator; + fm->framerate_denominator = framerate_denominator; + fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND + / fm->framerate_numerator; + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, - gint *framerate_numerator, gint *framerate_denominator) { - if (!framerate_numerator || !framerate_denominator) { - return MIX_RESULT_INVALID_PARAM; - } - fm->mLock.lock(); - *framerate_numerator = fm->framerate_numerator; - *framerate_denominator = fm->framerate_denominator; - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; + int *framerate_numerator, int *framerate_denominator) { + if (!framerate_numerator || !framerate_denominator) { + return MIX_RESULT_INVALID_PARAM; + } + fm->mLock.lock(); + *framerate_numerator = fm->framerate_numerator; + *framerate_denominator = fm->framerate_denominator; + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_get_display_order_mode( - MixFrameManager *fm, MixDisplayOrderMode *mode) { - if (!mode) { - return MIX_RESULT_INVALID_PARAM; - } - /* no need to use lock */ - *mode = fm->mode; - return MIX_RESULT_SUCCESS; + MixFrameManager *fm, MixDisplayOrderMode *mode) { + if (!mode) { + return MIX_RESULT_INVALID_PARAM; + } + /* no need to use lock */ + *mode = fm->mode; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_set_max_enqueue_size( - MixFrameManager *fm, gint size) { - if (size <= 0) { - return MIX_RESULT_FAIL; - } - fm->mLock.lock(); - fm->max_enqueue_size = size; - LOG_V("max enqueue size is %d\n", size); - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; + MixFrameManager *fm, int size) { + if (size <= 0) { + return MIX_RESULT_FAIL; + } + fm->mLock.lock(); + fm->max_enqueue_size = size; + LOG_V("max enqueue size is %d\n", size); + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_set_max_picture_number( - MixFrameManager *fm, guint32 num) { - // NOTE: set maximum picture order number only if pic_order_cnt_type is 0 (see H.264 spec) - if (num < 16) { - // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. - return MIX_RESULT_INVALID_PARAM; - } - fm->mLock.lock(); - // max_picture_number is exclusie (range from 0 to num - 1). - // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the - // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches - // fm->max_picture_number. - fm->max_picture_number = num; - LOG_V("max picture number is %d\n", num); - - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; + MixFrameManager *fm, uint32 num) { + // NOTE: set maximum picture order number only if pic_order_cnt_type is 0 (see H.264 spec) + if (num < 16) { + // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. + return MIX_RESULT_INVALID_PARAM; + } + fm->mLock.lock(); + // max_picture_number is exclusie (range from 0 to num - 1). + // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the + // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches + // fm->max_picture_number. + fm->max_picture_number = num; + LOG_V("max picture number is %d\n", num); + + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { - MixVideoFrame *frame = NULL; - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - fm->mLock.lock(); - while (fm->frame_list) { - frame = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame); - mix_videoframe_unref(frame); - LOG_V("one frame is flushed\n"); - }; - - fm->eos = FALSE; - fm->is_first_frame = TRUE; - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - fm->next_frame_picnumber = 0; - - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; + MixVideoFrame *frame = NULL; + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + + fm->mLock.lock(); + while (fm->frame_list) { + frame = (MixVideoFrame*) j_slist_nth_data(fm->frame_list, 0); + fm->frame_list = j_slist_remove(fm->frame_list, frame); + mix_videoframe_unref(frame); + LOG_V("one frame is flushed\n"); + }; + + fm->eos = FALSE; + fm->is_first_frame = TRUE; + fm->next_frame_timestamp = 0; + fm->last_frame_timestamp = 0; + fm->next_frame_picnumber = 0; + + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("Begin fm->mode = %d\n", fm->mode); + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("Begin fm->mode = %d\n", fm->mode); - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - gboolean discontinuity = FALSE; - mix_videoframe_get_discontinuity(mvf, &discontinuity); - if (discontinuity) - { - LOG_V("current frame has discontinuity!\n"); - mix_framemanager_flush(fm); - } + bool discontinuity = FALSE; + mix_videoframe_get_discontinuity(mvf, &discontinuity); + if (discontinuity) + { + LOG_V("current frame has discontinuity!\n"); + mix_framemanager_flush(fm); + } #ifdef MIX_LOG_ENABLE - if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) { - guint32 num; - mix_videoframe_get_displayorder(mvf, &num); - LOG_V("pic %d is enqueued.\n", num); - } - - if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) { - guint64 ts; - mix_videoframe_get_timestamp(mvf, &ts); - LOG_V("ts %"G_GINT64_FORMAT" is enqueued.\n", ts); - } + if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) { + uint32 num; + mix_videoframe_get_displayorder(mvf, &num); + LOG_V("pic %d is enqueued.\n", num); + } + + if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) { + uint64 ts; + mix_videoframe_get_timestamp(mvf, &ts); + LOG_V("ts %"UINT64_FORMAT" is enqueued.\n", ts); + } #endif - fm->mLock.lock(); - fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf); - fm->mLock.unlock(); - LOG_V("End\n"); - return ret; + fm->mLock.lock(); + fm->frame_list = j_slist_append(fm->frame_list, (void*)mvf); + fm->mLock.unlock(); + LOG_V("End\n"); + return ret; } -void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) +void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) { // this function finds the lowest time stamp in the list and assign it to the dequeued video frame, // if that timestamp is smaller than the timestamp of dequeued video frame. int i; - guint64 ts = 0, min_ts = 0; + uint64 ts = 0, min_ts = 0; MixVideoFrame *p = NULL, *min_p = NULL; - int len = g_slist_length(fm->frame_list); + int len = j_slist_length(fm->frame_list); if (len == 0) { // nothing to update return; } - + // find video frame with the smallest timestamp, take rollover into account when // comparing timestamp. for (i = 0; i < len; i++) { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); mix_videoframe_get_timestamp(p, &ts); if (i == 0 || - (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) { min_ts = ts; min_p = p; - } + } } mix_videoframe_get_timestamp(mvf, &ts); if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) + (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) { // frame to be updated has smaller time stamp - } + } else { // time stamp needs to be monotonically non-decreasing so swap timestamp. mix_videoframe_set_timestamp(mvf, min_ts); mix_videoframe_set_timestamp(min_p, ts); - LOG_V("timestamp for current frame is updated from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT"\n", - ts, min_ts); + LOG_V("timestamp for current frame is updated from %"UINT64_FORMAT" to %"UINT64_FORMAT"\n", + ts, min_ts); } } -MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, num_i_or_p; MixVideoFrame *p, *first_i_or_p; MixFrameType type; - int len = g_slist_length(fm->frame_list); + int len = j_slist_length(fm->frame_list); num_i_or_p = 0; first_i_or_p = NULL; - + for (i = 0; i < len; i++) { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); mix_videoframe_get_frame_type(p, &type); if (type == TYPE_B) { // B frame has higher display priority as only one reference frame is kept in the list // and it should be backward reference frame for B frame. - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + fm->frame_list = j_slist_remove(fm->frame_list, p); mix_framemanager_update_timestamp(fm, p); *mvf = p; LOG_V("B frame is dequeued.\n"); return MIX_RESULT_SUCCESS; - } - + } + if (type != TYPE_I && type != TYPE_P) { - // this should never happen + // this should never happen LOG_E("Frame typs is invalid!!!\n"); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + fm->frame_list = j_slist_remove(fm->frame_list, p); mix_videoframe_unref(p); - return MIX_RESULT_FRAME_NOTAVAIL; + return MIX_RESULT_FRAME_NOTAVAIL; } num_i_or_p++; if (first_i_or_p == NULL) @@ -340,7 +339,7 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF } } - // if there are more than one reference frame in the list, the first one is dequeued. + // if there are more than one reference frame in the list, the first one is dequeued. if (num_i_or_p > 1 || fm->eos) { if (first_i_or_p == NULL) @@ -349,7 +348,7 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF LOG_E("first_i_or_p frame is NULL!\n"); return MIX_RESULT_FAIL; } - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)first_i_or_p); + fm->frame_list = j_slist_remove(fm->frame_list, first_i_or_p); mix_framemanager_update_timestamp(fm, first_i_or_p); *mvf = first_i_or_p; #ifdef MIX_LOG_ENABLE @@ -361,90 +360,90 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF else { LOG_V("P frame is dequeued.\n"); - } + } #endif - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } - - return MIX_RESULT_FRAME_NOTAVAIL; + + return MIX_RESULT_FRAME_NOTAVAIL; } -MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, len; MixVideoFrame *p, *p_out_of_dated; - guint64 ts, ts_next_pending, ts_out_of_dated; - guint64 tolerance = fm->frame_timestamp_delta/4; + uint64 ts, ts_next_pending, ts_out_of_dated; + uint64 tolerance = fm->frame_timestamp_delta/4; -retry: +retry: // len may be changed during retry! - len = g_slist_length(fm->frame_list); - ts_next_pending = (guint64)-1; + len = j_slist_length(fm->frame_list); + ts_next_pending = (uint64)-1; ts_out_of_dated = 0; p_out_of_dated = NULL; - - + + for (i = 0; i < len; i++) { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); mix_videoframe_get_timestamp(p, &ts); - if (ts >= fm->last_frame_timestamp && - ts <= fm->next_frame_timestamp + tolerance) + if (ts >= fm->last_frame_timestamp && + ts <= fm->next_frame_timestamp + tolerance) { - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + fm->frame_list = j_slist_remove(fm->frame_list, p); *mvf = p; mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp)); - fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; - LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts); + fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; + LOG_V("frame is dequeud, ts = %"UINT64_FORMAT".\n", ts); return MIX_RESULT_SUCCESS; } if (ts > fm->next_frame_timestamp + tolerance && - ts < ts_next_pending) + ts < ts_next_pending) { ts_next_pending = ts; } - if (ts < fm->last_frame_timestamp && - ts >= ts_out_of_dated) + if (ts < fm->last_frame_timestamp && + ts >= ts_out_of_dated) { // video frame that is most recently out-of-dated. // this may happen in variable frame rate scenario where two adjacent frames both meet // the "next frame" criteria, and the one with larger timestamp is dequeued first. ts_out_of_dated = ts; p_out_of_dated = p; - } + } } - if (p_out_of_dated && - fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) + if (p_out_of_dated && + fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) { - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated); + fm->frame_list = j_slist_remove(fm->frame_list, p_out_of_dated); mix_videoframe_unref(p_out_of_dated); - LOG_W("video frame is out of dated. ts = %"G_GINT64_FORMAT" compared to last ts = %"G_GINT64_FORMAT".\n", - ts_out_of_dated, fm->last_frame_timestamp); + LOG_W("video frame is out of dated. ts = %"UINT64_FORMAT" compared to last ts = %"UINT64_FORMAT".\n", + ts_out_of_dated, fm->last_frame_timestamp); return MIX_RESULT_FRAME_NOTAVAIL; } - + if (len <= fm->max_enqueue_size && fm->eos == FALSE) { - LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", - fm->next_frame_timestamp, ts_next_pending, len); + LOG_V("no frame is dequeued, expected ts = %"UINT64_FORMAT", next pending ts = %"UINT64_FORMAT".(List size = %d)\n", + fm->next_frame_timestamp, ts_next_pending, len); return MIX_RESULT_FRAME_NOTAVAIL; } // timestamp has gap - if (ts_next_pending != -1) + if (ts_next_pending != (uint64)-1) { - LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n", - fm->next_frame_timestamp, ts_next_pending); - + LOG_V("timestamp has gap, jumping from %"UINT64_FORMAT" to %"UINT64_FORMAT".\n", + fm->next_frame_timestamp, ts_next_pending); + fm->next_frame_timestamp = ts_next_pending; goto retry; } // time stamp roll-over - LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", - fm->next_frame_timestamp); + LOG_V("time stamp is rolled over, resetting next frame timestamp from %"UINT64_FORMAT" to 0.\n", + fm->next_frame_timestamp); fm->next_frame_timestamp = 0; fm->last_frame_timestamp = 0; @@ -455,72 +454,72 @@ retry: return MIX_RESULT_FAIL; } -MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) +MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { int i, len; MixVideoFrame* p; - guint32 picnum; - guint32 next_picnum_pending; + uint32 picnum; + uint32 next_picnum_pending; - len = g_slist_length(fm->frame_list); + len = j_slist_length(fm->frame_list); + +retry: + next_picnum_pending = (uint32)-1; -retry: - next_picnum_pending = (guint32)-1; - for (i = 0; i < len; i++) { - p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i); + p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); mix_videoframe_get_displayorder(p, &picnum); if (picnum == fm->next_frame_picnumber) { - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p); + fm->frame_list = j_slist_remove(fm->frame_list, p); mix_framemanager_update_timestamp(fm, p); - *mvf = p; + *mvf = p; LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber); fm->next_frame_picnumber++; //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; + // fm->next_frame_picnumber = 0; return MIX_RESULT_SUCCESS; } if (picnum > fm->next_frame_picnumber && - picnum < next_picnum_pending) + picnum < next_picnum_pending) { next_picnum_pending = picnum; } if (picnum < fm->next_frame_picnumber && - fm->next_frame_picnumber - picnum < 8) + fm->next_frame_picnumber - picnum < 8) { - // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" + // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" // to the pic number in the list is less than half of 16, it is safe to assume that pic number // is reset when a new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). LOG_V("picture number is reset to %d, next pic number is %d, next pending number is %d.\n", - picnum, fm->next_frame_picnumber, next_picnum_pending); + picnum, fm->next_frame_picnumber, next_picnum_pending); break; } } - + if (len <= fm->max_enqueue_size && fm->eos == FALSE) { - LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", - fm->next_frame_picnumber, next_picnum_pending, len); + LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", + fm->next_frame_picnumber, next_picnum_pending, len); return MIX_RESULT_FRAME_NOTAVAIL; } // picture number has gap - if (next_picnum_pending != (guint32)-1) + if (next_picnum_pending != (uint32)-1) { LOG_V("picture number has gap, jumping from %d to %d.\n", - fm->next_frame_picnumber, next_picnum_pending); - + fm->next_frame_picnumber, next_picnum_pending); + fm->next_frame_picnumber = next_picnum_pending; goto retry; } // picture number roll-over - LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", - fm->next_frame_picnumber); + LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", + fm->next_frame_picnumber); fm->next_frame_picnumber = 0; goto retry; @@ -532,76 +531,76 @@ retry: MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; + MIX_RESULT ret = MIX_RESULT_SUCCESS; LOG_V("Begin\n"); - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } + if (!mvf) { + return MIX_RESULT_INVALID_PARAM; + } - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } - fm->mLock.lock(); + fm->mLock.lock(); - if (fm->frame_list == NULL) - { - if (fm->eos) - { - LOG_V("No frame is dequeued (eos)!\n"); - ret = MIX_RESULT_EOS; + if (fm->frame_list == NULL) + { + if (fm->eos) + { + LOG_V("No frame is dequeued (eos)!\n"); + ret = MIX_RESULT_EOS; } else { LOG_V("No frame is dequeued as queue is empty!\n"); ret = MIX_RESULT_FRAME_NOTAVAIL; - } - } - else if (fm->is_first_frame) - { - // dequeue the first entry in the list. Not need to update the time stamp as - // the list should contain only one frame. -#ifdef MIX_LOG_ENABLE - if (g_slist_length(fm->frame_list) != 1) - { - LOG_W("length of list is not equal to 1 for the first frame.\n"); - } -#endif - *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); + } + } + else if (fm->is_first_frame) + { + // dequeue the first entry in the list. Not need to update the time stamp as + // the list should contain only one frame. +#ifdef MIX_LOG_ENABLE + if (j_slist_length(fm->frame_list) != 1) + { + LOG_W("length of list is not equal to 1 for the first frame.\n"); + } +#endif + *mvf = (MixVideoFrame*) j_slist_nth_data(fm->frame_list, 0); + fm->frame_list = j_slist_remove(fm->frame_list, (*mvf)); if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) - { - mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); + { + mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; - LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp); + LOG_V("The first frame is dequeued, ts = %"UINT64_FORMAT"\n", fm->last_frame_timestamp); } else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) - { + { mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber)); LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber); fm->next_frame_picnumber++; //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; + // fm->next_frame_picnumber = 0; } else { -#ifdef MIX_LOG_ENABLE +#ifdef MIX_LOG_ENABLE MixFrameType type; mix_videoframe_get_frame_type(*mvf, &type); LOG_V("The first frame is dequeud, frame type is %d.\n", type); -#endif +#endif } - fm->is_first_frame = FALSE; - - ret = MIX_RESULT_SUCCESS; - } - else - { - // not the first frame and list is not empty - switch(fm->mode) + fm->is_first_frame = FALSE; + + ret = MIX_RESULT_SUCCESS; + } + else + { + // not the first frame and list is not empty + switch (fm->mode) { case MIX_DISPLAY_ORDER_TIMESTAMP: ret = mix_framemanager_timestamp_based_dequeue(fm, mvf); @@ -615,34 +614,34 @@ MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { ret = mix_framemanager_pictype_based_dequeue(fm, mvf); break; - case MIX_DISPLAY_ORDER_FIFO: - *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0); - fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf)); - ret = MIX_RESULT_SUCCESS; + case MIX_DISPLAY_ORDER_FIFO: + *mvf = (MixVideoFrame*) j_slist_nth_data(fm->frame_list, 0); + fm->frame_list = j_slist_remove(fm->frame_list, (*mvf)); + ret = MIX_RESULT_SUCCESS; LOG_V("One frame is dequeued.\n"); break; - - default: + + default: LOG_E("Invalid frame order mode\n"); ret = MIX_RESULT_FAIL; break; - } - } + } + } - fm->mLock.unlock(); - LOG_V("End\n"); - return ret; + fm->mLock.unlock(); + LOG_V("End\n"); + return ret; } MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - fm->mLock.lock(); - fm->eos = TRUE; - LOG_V("EOS is received.\n"); - fm->mLock.unlock(); - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (!fm->initialized) { + return MIX_RESULT_NOT_INIT; + } + fm->mLock.lock(); + fm->eos = TRUE; + LOG_V("EOS is received.\n"); + fm->mLock.unlock(); + return ret; } diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h index 4be322a..684857d 100644 --- a/mix_video/src/mixframemanager.h +++ b/mix_video/src/mixframemanager.h @@ -9,14 +9,15 @@ #ifndef __MIX_FRAMEMANAGER_H__ #define __MIX_FRAMEMANAGER_H__ -#include +#include #include "mixvideodef.h" #include "mixvideoframe.h" #include "mixvideothread.h" +#include -/* -* MIX_FRAMEORDER_MODE_DECODEORDER is here interpreted as -* MIX_DISPLAY_ORDER_FIFO, a special case of display order mode. +/* +* MIX_FRAMEORDER_MODE_DECODEORDER is here interpreted as +* MIX_DISPLAY_ORDER_FIFO, a special case of display order mode. */ typedef enum { @@ -30,32 +31,27 @@ typedef enum class MixFrameManager { - /*< private > */ public: - gboolean initialized; - gboolean flushing; - gboolean eos; - - MixVideoMutex mLock; - GSList* frame_list; - - gint framerate_numerator; - gint framerate_denominator; - guint64 frame_timestamp_delta; + MixFrameManager(); + ~MixFrameManager(); - MixDisplayOrderMode mode; - - gboolean is_first_frame; - guint64 last_frame_timestamp; - guint64 next_frame_timestamp; - guint32 next_frame_picnumber; - gint max_enqueue_size; - guint32 max_picture_number; - - guint32 ref_count; public: - MixFrameManager(); - ~MixFrameManager(); + bool initialized; + bool flushing; + bool eos; + MixVideoMutex mLock; + JSList* frame_list; + int framerate_numerator; + int framerate_denominator; + uint64 frame_timestamp_delta; + MixDisplayOrderMode mode; + bool is_first_frame; + uint64 last_frame_timestamp; + uint64 next_frame_timestamp; + uint32 next_frame_picnumber; + int max_enqueue_size; + uint32 max_picture_number; + uint32 ref_count; }; @@ -89,9 +85,9 @@ MixFrameManager* mix_framemanager_unref(MixFrameManager* fm); /* * Initialize FM */ -MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm, - MixDisplayOrderMode mode, gint framerate_numerator, - gint framerate_denominator); +MIX_RESULT mix_framemanager_initialize( + MixFrameManager *fm, MixDisplayOrderMode mode, + int framerate_numerator, int framerate_denominator); /* * Deinitialize FM */ @@ -100,33 +96,35 @@ MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm); /* * Set new framerate */ -MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm, - gint framerate_numerator, gint framerate_denominator); +MIX_RESULT mix_framemanager_set_framerate( + MixFrameManager *fm, int framerate_numerator, int framerate_denominator); /* * Get framerate */ -MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, - gint *framerate_numerator, gint *framerate_denominator); +MIX_RESULT mix_framemanager_get_framerate( + MixFrameManager *fm, int *framerate_numerator, int *framerate_denominator); /* * Set miximum size of queue */ -MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size); - +MIX_RESULT mix_framemanager_set_max_enqueue_size( + MixFrameManager *fm, int size); + /* * Set miximum picture number */ -MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 num); +MIX_RESULT mix_framemanager_set_max_picture_number( + MixFrameManager *fm, uint32 num); /* * Get Display Order Mode */ -MIX_RESULT mix_framemanager_get_display_order_mode(MixFrameManager *fm, - MixDisplayOrderMode *mode); +MIX_RESULT mix_framemanager_get_display_order_mode( + MixFrameManager *fm, MixDisplayOrderMode *mode); /* * For discontiunity, reset FM diff --git a/mix_video/src/mixsurfacepool.cpp b/mix_video/src/mixsurfacepool.cpp index 746a998..5c6899c 100644 --- a/mix_video/src/mixsurfacepool.cpp +++ b/mix_video/src/mixsurfacepool.cpp @@ -1,4 +1,4 @@ -/* +/* INTEL CONFIDENTIAL Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. @@ -21,91 +21,91 @@ #define MIX_UNLOCK(lock) g_mutex_unlock(lock); -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } +#define SAFE_FREE(p) if(p) { free(p); p = NULL; } MixSurfacePool::MixSurfacePool() - /* initialize properties here */ - :free_list(NULL) - ,in_use_list(NULL) - ,free_list_max_size(0) - ,free_list_cur_size(0) - ,high_water_mark(0) - ,initialized(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,mLock() { +/* initialize properties here */ + :free_list(NULL) + ,in_use_list(NULL) + ,free_list_max_size(0) + ,free_list_cur_size(0) + ,high_water_mark(0) + ,initialized(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,mLock() { } MixSurfacePool::~MixSurfacePool() { } MixParams* MixSurfacePool::dup() const { - MixParams *ret = NULL; - mLock.lock(); - ret = new MixSurfacePool(); - if(NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - mLock.unlock(); - return ret; + MixParams *ret = NULL; + mLock.lock(); + ret = new MixSurfacePool(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + mLock.unlock(); + return ret; } -gboolean MixSurfacePool::copy(MixParams* target) const { - if(NULL == target) return FALSE; - MixSurfacePool* this_target = MIX_SURFACEPOOL(target); - - mLock.lock(); - this_target->mLock.lock(); - // Free the existing properties - // Duplicate string - this_target->free_list = free_list; - this_target->in_use_list = in_use_list; - this_target->free_list_max_size = free_list_max_size; - this_target->free_list_cur_size = free_list_cur_size; - this_target->high_water_mark = high_water_mark; - - this_target->mLock.unlock(); - mLock.unlock(); - - MixParams::copy(target); - return TRUE; +bool MixSurfacePool::copy(MixParams* target) const { + if (NULL == target) return FALSE; + MixSurfacePool* this_target = MIX_SURFACEPOOL(target); + + mLock.lock(); + this_target->mLock.lock(); + // Free the existing properties + // Duplicate string + this_target->free_list = free_list; + this_target->in_use_list = in_use_list; + this_target->free_list_max_size = free_list_max_size; + this_target->free_list_cur_size = free_list_cur_size; + this_target->high_water_mark = high_water_mark; + + this_target->mLock.unlock(); + mLock.unlock(); + + MixParams::copy(target); + return TRUE; } -gboolean MixSurfacePool::equal(MixParams *first) const { - if(NULL == first) return FALSE; - gboolean ret = FALSE; - MixSurfacePool *this_first = MIX_SURFACEPOOL(first); - mLock.lock(); - this_first->mLock.lock(); - if (this_first->free_list == free_list - && this_first->in_use_list == in_use_list - && this_first->free_list_max_size - == free_list_max_size - && this_first->free_list_cur_size - == free_list_cur_size - && this_first->high_water_mark == high_water_mark) { - ret = MixParams::equal(first); - } - this_first->mLock.unlock(); - mLock.unlock(); - return ret; +bool MixSurfacePool::equal(MixParams *first) const { + if (NULL == first) return FALSE; + bool ret = FALSE; + MixSurfacePool *this_first = MIX_SURFACEPOOL(first); + mLock.lock(); + this_first->mLock.lock(); + if (this_first->free_list == free_list + && this_first->in_use_list == in_use_list + && this_first->free_list_max_size + == free_list_max_size + && this_first->free_list_cur_size + == free_list_cur_size + && this_first->high_water_mark == high_water_mark) { + ret = MixParams::equal(first); + } + this_first->mLock.unlock(); + mLock.unlock(); + return ret; } MixSurfacePool * -mix_surfacepool_new(void) { - return new MixSurfacePool(); +mix_surfacepool_new(void) { + return new MixSurfacePool(); } MixSurfacePool * mix_surfacepool_ref(MixSurfacePool * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } /* Class Methods */ @@ -118,97 +118,97 @@ mix_surfacepool_ref(MixSurfacePool * mix) { * frame objects that represents a pool of surfaces. */ MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, - VASurfaceID *surfaces, guint num_surfaces, VADisplay va_display) { + VASurfaceID *surfaces, uint num_surfaces, VADisplay va_display) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (obj == NULL || surfaces == NULL) { + if (obj == NULL || surfaces == NULL) { - LOG_E( - "Error NULL ptrs, obj %x, surfaces %x\n", (guint) obj, - (guint) surfaces); + LOG_E( + "Error NULL ptrs, obj %x, surfaces %x\n", (uint) obj, + (uint) surfaces); - return MIX_RESULT_NULL_PTR; - } + return MIX_RESULT_NULL_PTR; + } - obj->mLock.lock(); + obj->mLock.lock(); - if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { - //surface pool is in use; return error; need proper cleanup - //TODO need cleanup here? + if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { + //surface pool is in use; return error; need proper cleanup + //TODO need cleanup here? - obj->mLock.unlock(); + obj->mLock.unlock(); - return MIX_RESULT_ALREADY_INIT; - } + return MIX_RESULT_ALREADY_INIT; + } - if (num_surfaces == 0) { - obj->free_list = NULL; + if (num_surfaces == 0) { + obj->free_list = NULL; - obj->in_use_list = NULL; + obj->in_use_list = NULL; - obj->free_list_max_size = num_surfaces; + obj->free_list_max_size = num_surfaces; - obj->free_list_cur_size = num_surfaces; + obj->free_list_cur_size = num_surfaces; - obj->high_water_mark = 0; + obj->high_water_mark = 0; - /* assume it is initialized */ - obj->initialized = TRUE; + /* assume it is initialized */ + obj->initialized = TRUE; - obj->mLock.unlock(); + obj->mLock.unlock(); - return MIX_RESULT_SUCCESS; - } + return MIX_RESULT_SUCCESS; + } - // Initialize the free pool with frame objects + // Initialize the free pool with frame objects - guint i = 0; - MixVideoFrame *frame = NULL; + uint i = 0; + MixVideoFrame *frame = NULL; - for (; i < num_surfaces; i++) { + for (; i < num_surfaces; i++) { - //Create a frame object for each surface ID - frame = mix_videoframe_new(); + //Create a frame object for each surface ID + frame = mix_videoframe_new(); - if (frame == NULL) { - //TODO need to log an error here and do cleanup + if (frame == NULL) { + //TODO need to log an error here and do cleanup - obj->mLock.unlock(); + obj->mLock.unlock(); - return MIX_RESULT_NO_MEMORY; - } + return MIX_RESULT_NO_MEMORY; + } - // Set the frame ID to the surface ID - mix_videoframe_set_frame_id(frame, surfaces[i]); - // Set the ci frame index to the surface ID - mix_videoframe_set_ci_frame_idx (frame, i); - // Leave timestamp for each frame object as zero - // Set the pool reference in the private data of the frame object - mix_videoframe_set_pool(frame, obj); + // Set the frame ID to the surface ID + mix_videoframe_set_frame_id(frame, surfaces[i]); + // Set the ci frame index to the surface ID + mix_videoframe_set_ci_frame_idx (frame, i); + // Leave timestamp for each frame object as zero + // Set the pool reference in the private data of the frame object + mix_videoframe_set_pool(frame, obj); - mix_videoframe_set_vadisplay(frame, va_display); + mix_videoframe_set_vadisplay(frame, va_display); - //Add each frame object to the pool list - obj->free_list = g_slist_append(obj->free_list, frame); + //Add each frame object to the pool list + obj->free_list = j_slist_append(obj->free_list, frame); - } + } - obj->in_use_list = NULL; + obj->in_use_list = NULL; - obj->free_list_max_size = num_surfaces; + obj->free_list_max_size = num_surfaces; - obj->free_list_cur_size = num_surfaces; + obj->free_list_cur_size = num_surfaces; - obj->high_water_mark = 0; + obj->high_water_mark = 0; - obj->initialized = TRUE; + obj->initialized = TRUE; - obj->mLock.unlock(); + obj->mLock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } /** @@ -218,51 +218,51 @@ MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, * Use this method to return a surface to the free pool */ MIX_RESULT mix_surfacepool_put(MixSurfacePool * obj, MixVideoFrame * frame) { - - LOG_V( "Begin\n"); - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; - LOG_V( "Frame id: %d\n", frame->frame_id); - obj->mLock.lock(); + LOG_V( "Begin\n"); + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; - if (obj->in_use_list == NULL) { - //in use list cannot be empty if a frame is in use - //TODO need better error code for this + LOG_V( "Frame id: %d\n", frame->frame_id); + obj->mLock.lock(); - obj->mLock.unlock(); + if (obj->in_use_list == NULL) { + //in use list cannot be empty if a frame is in use + //TODO need better error code for this - return MIX_RESULT_FAIL; - } + obj->mLock.unlock(); - GSList *element = g_slist_find(obj->in_use_list, frame); - if (element == NULL) { - //Integrity error; frame not found in in use list - //TODO need better error code and handling for this + return MIX_RESULT_FAIL; + } - obj->mLock.unlock(); + JSList *element = j_slist_find(obj->in_use_list, frame); + if (element == NULL) { + //Integrity error; frame not found in in use list + //TODO need better error code and handling for this - return MIX_RESULT_FAIL; - } else { - //Remove this element from the in_use_list - obj->in_use_list = g_slist_remove_link(obj->in_use_list, element); + obj->mLock.unlock(); - //Concat the element to the free_list and reset the timestamp of the frame - //Note that the surface ID stays valid - mix_videoframe_set_timestamp(frame, 0); - obj->free_list = g_slist_concat(obj->free_list, element); - - //increment the free list count - obj->free_list_cur_size++; - } + return MIX_RESULT_FAIL; + } else { + //Remove this element from the in_use_list + obj->in_use_list = j_slist_remove_link(obj->in_use_list, element); - //Note that we do nothing with the ref count for this. We want it to - //stay at 1, which is what triggered it to be added back to the free list. + //Concat the element to the free_list and reset the timestamp of the frame + //Note that the surface ID stays valid + mix_videoframe_set_timestamp(frame, 0); + obj->free_list = j_slist_concat(obj->free_list, element); - obj->mLock.unlock(); + //increment the free list count + obj->free_list_cur_size++; + } - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + //Note that we do nothing with the ref count for this. We want it to + //stay at 1, which is what triggered it to be added back to the free list. + + obj->mLock.unlock(); + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; } /** @@ -273,85 +273,85 @@ MIX_RESULT mix_surfacepool_put(MixSurfacePool * obj, MixVideoFrame * frame) { */ MIX_RESULT mix_surfacepool_get(MixSurfacePool * obj, MixVideoFrame ** frame) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; - obj->mLock.lock(); + obj->mLock.lock(); #if 0 - if (obj->free_list == NULL) { + if (obj->free_list == NULL) { #else - if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug + if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug #endif - //We are out of surfaces - //TODO need to log this as well + //We are out of surfaces + //TODO need to log this as well + + obj->mLock.unlock(); - obj->mLock.unlock(); + LOG_E( "out of surfaces\n"); - LOG_E( "out of surfaces\n"); + return MIX_RESULT_OUTOFSURFACES; + } - return MIX_RESULT_OUTOFSURFACES; - } + //Remove a frame from the free pool - //Remove a frame from the free pool + //We just remove the one at the head, since it's convenient + JSList *element = obj->free_list; + obj->free_list = j_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this - //We just remove the one at the head, since it's convenient - GSList *element = obj->free_list; - obj->free_list = g_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this + obj->mLock.unlock(); - obj->mLock.unlock(); + LOG_E( "Element is null\n"); - LOG_E( "Element is null\n"); + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = j_slist_concat(obj->in_use_list, element); - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = g_slist_concat(obj->in_use_list, element); + //TODO replace with proper logging - //TODO replace with proper logging + LOG_I( "frame refcount%d\n", + MIX_PARAMS(element->data)->ref_count); - LOG_I( "frame refcount%d\n", - MIX_PARAMS(element->data)->ref_count); + //Set the out frame pointer + *frame = (MixVideoFrame *) element->data; - //Set the out frame pointer - *frame = (MixVideoFrame *) element->data; + LOG_V( "Frame id: %d\n", (*frame)->frame_id); - LOG_V( "Frame id: %d\n", (*frame)->frame_id); - - //decrement the free list count - obj->free_list_cur_size--; + //decrement the free list count + obj->free_list_cur_size--; - //Check the high water mark for surface use - guint size = g_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } + //Check the high water mark for surface use + uint size = j_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } - //Increment the reference count for the frame - mix_videoframe_ref(*frame); + //Increment the reference count for the frame + mix_videoframe_ref(*frame); - obj->mLock.unlock(); + obj->mLock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -gint mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b) +int mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b) { - if (a == NULL || b == NULL) - return -1; - if (a->ci_frame_idx == b->ci_frame_idx) - return 0; - else - return -1; + if (a == NULL || b == NULL) + return -1; + if (a->ci_frame_idx == b->ci_frame_idx) + return 0; + else + return -1; } /** @@ -363,65 +363,65 @@ gint mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b) MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, MixVideoFrame ** frame, MixVideoFrame *in_frame) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL || frame == NULL) + return MIX_RESULT_NULL_PTR; - obj->mLock.lock(); + obj->mLock.lock(); - if (obj->free_list == NULL) { - //We are out of surfaces - //TODO need to log this as well + if (obj->free_list == NULL) { + //We are out of surfaces + //TODO need to log this as well - obj->mLock.unlock(); + obj->mLock.unlock(); - LOG_E( "out of surfaces\n"); + LOG_E( "out of surfaces\n"); - return MIX_RESULT_OUTOFSURFACES; - } + return MIX_RESULT_OUTOFSURFACES; + } - //Remove a frame from the free pool + //Remove a frame from the free pool - //We just remove the one at the head, since it's convenient - GSList *element = g_slist_find_custom (obj->free_list, in_frame, (GCompareFunc) mixframe_compare_index); - obj->free_list = g_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this + //We just remove the one at the head, since it's convenient + JSList *element = j_slist_find_custom (obj->free_list, in_frame, (JCompareFunc) mixframe_compare_index); + obj->free_list = j_slist_remove_link(obj->free_list, element); + if (element == NULL) { + //Unexpected behavior + //TODO need better error code and handling for this - obj->mLock.unlock(); + obj->mLock.unlock(); - LOG_E( "Element is null\n"); + LOG_E( "Element is null\n"); - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = g_slist_concat(obj->in_use_list, element); + return MIX_RESULT_FAIL; + } else { + //Concat the element to the in_use_list + obj->in_use_list = j_slist_concat(obj->in_use_list, element); - //TODO replace with proper logging + //TODO replace with proper logging - LOG_I( "frame refcount%d\n", - MIX_PARAMS(element->data)->ref_count); + LOG_I( "frame refcount%d\n", + MIX_PARAMS(element->data)->ref_count); - //Set the out frame pointer - *frame = (MixVideoFrame *) element->data; + //Set the out frame pointer + *frame = (MixVideoFrame *) element->data; - //Check the high water mark for surface use - guint size = g_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } + //Check the high water mark for surface use + uint size = j_slist_length(obj->in_use_list); + if (size > obj->high_water_mark) + obj->high_water_mark = size; + //TODO Log this high water mark + } - //Increment the reference count for the frame - mix_videoframe_ref(*frame); + //Increment the reference count for the frame + mix_videoframe_ref(*frame); - obj->mLock.unlock(); + obj->mLock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } /** * mix_surfacepool_check_available: @@ -431,44 +431,44 @@ MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, Mix */ MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (obj == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL) + return MIX_RESULT_NULL_PTR; - obj->mLock.lock(); + obj->mLock.lock(); + + if (obj->initialized == FALSE) + { + LOG_W("surface pool is not initialized, probably configuration data has not been received yet.\n"); + obj->mLock.unlock(); + return MIX_RESULT_NOT_INIT; + } - if (obj->initialized == FALSE) - { - LOG_W("surface pool is not initialized, probably configuration data has not been received yet.\n"); - obj->mLock.unlock(); - return MIX_RESULT_NOT_INIT; - } - #if 0 - if (obj->free_list == NULL) { + if (obj->free_list == NULL) { #else - if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug + if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug #endif - //We are out of surfaces + //We are out of surfaces - obj->mLock.unlock(); + obj->mLock.unlock(); - LOG_W( - "Returning MIX_RESULT_POOLEMPTY because out of surfaces\n"); + LOG_W( + "Returning MIX_RESULT_POOLEMPTY because out of surfaces\n"); - return MIX_RESULT_POOLEMPTY; - } else { - //Pool is not empty + return MIX_RESULT_POOLEMPTY; + } else { + //Pool is not empty - obj->mLock.unlock(); + obj->mLock.unlock(); - LOG_I( - "Returning MIX_RESULT_SUCCESS because surfaces are available\n"); + LOG_I( + "Returning MIX_RESULT_SUCCESS because surfaces are available\n"); - return MIX_RESULT_SUCCESS; - } + return MIX_RESULT_SUCCESS; + } } @@ -479,86 +479,86 @@ MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) { * Use this method to teardown a surface pool */ MIX_RESULT mix_surfacepool_deinitialize(MixSurfacePool * obj) { - if (obj == NULL) - return MIX_RESULT_NULL_PTR; + if (obj == NULL) + return MIX_RESULT_NULL_PTR; - obj->mLock.lock(); + obj->mLock.lock(); - if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list) - != obj->free_list_max_size)) { - //TODO better error code - //We have outstanding frame objects in use and they need to be - //freed before we can deinitialize. + if ((obj->in_use_list != NULL) || (j_slist_length(obj->free_list) + != obj->free_list_max_size)) { + //TODO better error code + //We have outstanding frame objects in use and they need to be + //freed before we can deinitialize. - obj->mLock.unlock(); + obj->mLock.unlock(); - return MIX_RESULT_FAIL; - } + return MIX_RESULT_FAIL; + } - //Now remove frame objects from the list + //Now remove frame objects from the list - MixVideoFrame *frame = NULL; + MixVideoFrame *frame = NULL; - while (obj->free_list != NULL) { - //Get the frame object from the head of the list - frame = reinterpret_cast(obj->free_list->data); - //frame = g_slist_nth_data(obj->free_list, 0); + while (obj->free_list != NULL) { + //Get the frame object from the head of the list + frame = reinterpret_cast(obj->free_list->data); + //frame = g_slist_nth_data(obj->free_list, 0); - //Release it - mix_videoframe_unref(frame); + //Release it + mix_videoframe_unref(frame); - //Delete the head node of the list and store the new head - obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list); + //Delete the head node of the list and store the new head + obj->free_list = j_slist_delete_link(obj->free_list, obj->free_list); - //Repeat until empty - } + //Repeat until empty + } - obj->free_list_max_size = 0; - obj->free_list_cur_size = 0; + obj->free_list_max_size = 0; + obj->free_list_cur_size = 0; - //May want to log this information for tuning - obj->high_water_mark = 0; + //May want to log this information for tuning + obj->high_water_mark = 0; - obj->mLock.unlock(); + obj->mLock.unlock(); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } #define MIX_SURFACEPOOL_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_SURFACEPOOL_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ - + MIX_RESULT mix_surfacepool_dumpframe(MixVideoFrame *frame) { - LOG_I( "\tFrame %x, id %lu, refcount %d, ts %lu\n", (guint)frame, - frame->frame_id, MIX_PARAMS(frame)->ref_count, (gulong) frame->timestamp); + LOG_I( "\tFrame %x, id %lu, refcount %d, ts %lu\n", (uint)frame, + frame->frame_id, MIX_PARAMS(frame)->ref_count, (ulong) frame->timestamp); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_surfacepool_dumpprint (MixSurfacePool * obj) { - //TODO replace this with proper logging later + //TODO replace this with proper logging later - LOG_I( "SURFACE POOL DUMP:\n"); - LOG_I( "Free list size is %d\n", obj->free_list_cur_size); - LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list)); - LOG_I( "High water mark is %lu\n", obj->high_water_mark); + LOG_I( "SURFACE POOL DUMP:\n"); + LOG_I( "Free list size is %d\n", obj->free_list_cur_size); + LOG_I( "In use list size is %d\n", j_slist_length(obj->in_use_list)); + LOG_I( "High water mark is %lu\n", obj->high_water_mark); - //Walk the free list and report the contents - LOG_I( "Free list contents:\n"); - g_slist_foreach(obj->free_list, (GFunc) mix_surfacepool_dumpframe, NULL); + //Walk the free list and report the contents + LOG_I( "Free list contents:\n"); + j_slist_foreach(obj->free_list, (JFunc) mix_surfacepool_dumpframe, NULL); - //Walk the in_use list and report the contents - LOG_I( "In Use list contents:\n"); - g_slist_foreach(obj->in_use_list, (GFunc) mix_surfacepool_dumpframe, NULL); + //Walk the in_use list and report the contents + LOG_I( "In Use list contents:\n"); + j_slist_foreach(obj->in_use_list, (JFunc) mix_surfacepool_dumpframe, NULL); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h index 3ad099d..e75e417 100644 --- a/mix_video/src/mixsurfacepool.h +++ b/mix_video/src/mixsurfacepool.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -14,6 +14,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoframe.h" #include "mixvideothread.h" #include +#include + /** * MIX_SURFACEPOOL: @@ -29,34 +31,34 @@ No license under any patent, copyright, trade secret or other intellectual prope class MixSurfacePool : public MixParams { public: - /*< public > */ - GSList *free_list; /* list of free surfaces */ - GSList *in_use_list; /* list of surfaces in use */ - gulong free_list_max_size; /* initial size of the free list */ - gulong free_list_cur_size; /* current size of the free list */ - gulong high_water_mark; /* most surfaces in use at one time */ - gboolean initialized; -// guint64 timestamp; - - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; - - /*< private > */ - mutable MixVideoMutex mLock; + /*< public > */ + JSList *free_list; /* list of free surfaces */ + JSList *in_use_list; /* list of surfaces in use */ + ulong free_list_max_size; /* initial size of the free list */ + ulong free_list_cur_size; /* current size of the free list */ + ulong high_water_mark; /* most surfaces in use at one time */ + bool initialized; +// uint64 timestamp; + + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; + + /*< private > */ + mutable MixVideoMutex mLock; public: - MixSurfacePool(); - virtual ~MixSurfacePool(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; - virtual MixParams* dup() const; + MixSurfacePool(); + virtual ~MixSurfacePool(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; + virtual MixParams* dup() const; }; /** * mix_surfacepool_new: * @returns: A newly allocated instance of #MixSurfacePool -* +* * Use this method to create new instance of #MixSurfacePool */ MixSurfacePool *mix_surfacepool_new (void); @@ -64,7 +66,7 @@ MixSurfacePool *mix_surfacepool_new (void); * mix_surfacepool_ref: * @mix: object to add reference * @returns: the MixSurfacePool instance where reference count has been increased. -* +* * Add reference count. */ MixSurfacePool *mix_surfacepool_ref (MixSurfacePool * mix); @@ -72,23 +74,23 @@ MixSurfacePool *mix_surfacepool_ref (MixSurfacePool * mix); /** * mix_surfacepool_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_surfacepool_unref(obj) mix_params_unref(MIX_PARAMS(obj)) /* Class Methods */ -MIX_RESULT mix_surfacepool_initialize (MixSurfacePool * obj, - VASurfaceID *surfaces, guint num_surfaces, VADisplay va_display); +MIX_RESULT mix_surfacepool_initialize (MixSurfacePool * obj, + VASurfaceID *surfaces, uint num_surfaces, VADisplay va_display); MIX_RESULT mix_surfacepool_put (MixSurfacePool * obj, - MixVideoFrame * frame); + MixVideoFrame * frame); MIX_RESULT mix_surfacepool_get (MixSurfacePool * obj, - MixVideoFrame ** frame); + MixVideoFrame ** frame); -MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, - MixVideoFrame ** frame, MixVideoFrame *in_frame); +MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, + MixVideoFrame ** frame, MixVideoFrame *in_frame); MIX_RESULT mix_surfacepool_check_available (MixSurfacePool * obj); diff --git a/mix_video/src/mixvideo.cpp b/mix_video/src/mixvideo.cpp index 795d930..5053595 100644 --- a/mix_video/src/mixvideo.cpp +++ b/mix_video/src/mixvideo.cpp @@ -47,6 +47,8 @@ #include #include /* libVA */ +#include +#include #ifndef ANDROID #include @@ -59,9 +61,9 @@ extern "C" { #endif -VADisplay vaGetDisplay ( - void *android_dpy -); + VADisplay vaGetDisplay ( + void *android_dpy + ); #ifdef __cplusplus } @@ -92,7 +94,7 @@ VADisplay vaGetDisplay ( #include "mixvideoconfigparamsdec_h264.h" #include "mixvideoconfigparamsdec_mp42.h" -#if MIXVIDEO_ENCODE_ENABLE + #include "mixvideoformatenc.h" #include "mixvideoformatenc_h264.h" #include "mixvideoformatenc_mpeg4.h" @@ -103,7 +105,7 @@ VADisplay vaGetDisplay ( #include "mixvideoconfigparamsenc_mpeg4.h" #include "mixvideoconfigparamsenc_preview.h" #include "mixvideoconfigparamsenc_h263.h" -#endif + #include "mixvideo.h" #include "mixvideo_private.h" @@ -143,35 +145,35 @@ VADisplay vaGetDisplay ( * default implementation of virtual methods */ -MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, - guint * minor); +MIX_RESULT mix_video_get_version_default(MixVideo * mix, uint * major, + uint * minor); MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params); + MixVideoInitParams * init_params, MixDrmParams * drm_init_params); MIX_RESULT mix_video_deinitialize_default(MixVideo * mix); MIX_RESULT mix_video_configure_default(MixVideo * mix, - MixVideoConfigParams * config_params, MixDrmParams * drm_config_params); + MixVideoConfigParams * config_params, MixDrmParams * drm_config_params); MIX_RESULT mix_video_get_config_default(MixVideo * mix, - MixVideoConfigParams ** config_params); + MixVideoConfigParams ** config_params); MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); + int bufincnt, MixVideoDecodeParams * decode_params); MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame); MIX_RESULT mix_video_release_frame_default(MixVideo * mix, - MixVideoFrame * frame); + MixVideoFrame * frame); MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame); -#if MIXVIDEO_ENCODE_ENABLE + MixVideoRenderParams * render_params, MixVideoFrame *frame); + MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -#endif + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); + MIX_RESULT mix_video_flush_default(MixVideo * mix); MIX_RESULT mix_video_eos_default(MixVideo * mix); @@ -181,771 +183,784 @@ MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state); MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf); MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf); -#if MIXVIDEO_ENCODE_ENABLE -MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size); -#endif + +MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, uint *max_size); + MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); +MIX_RESULT mix_video_get_new_userptr_for_surface_buffer_default (MixVideo * mix, uint width, uint height, uint format, + uint expected_size, uint *outsize, uint * stride, uint8 **usrptr); static void mix_video_finalize(MixVideo * obj); MIX_RESULT mix_video_configure_decode(MixVideo * mix, - MixVideoConfigParamsDec * config_params_dec, - MixDrmParams * drm_config_params); + MixVideoConfigParamsDec * config_params_dec, + MixDrmParams * drm_config_params); + -#if MIXVIDEO_ENCODE_ENABLE MIX_RESULT mix_video_configure_encode(MixVideo * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixDrmParams * drm_config_params); -#endif + MixVideoConfigParamsEnc * config_params_enc, + MixDrmParams * drm_config_params); + static void mix_video_init(MixVideo * self); MixVideo::MixVideo() { - //context = malloc(sizeof(MixVideoPrivate)); - context = &mPriv; - get_version_func = mix_video_get_version_default; - initialize_func = mix_video_initialize_default; - deinitialize_func = mix_video_deinitialize_default; - configure_func = mix_video_configure_default; - get_config_func = mix_video_get_config_default; - decode_func = mix_video_decode_default; - get_frame_func = mix_video_get_frame_default; - release_frame_func = mix_video_release_frame_default; - render_func = mix_video_render_default; -#if MIXVIDEO_ENCODE_ENABLE - encode_func = mix_video_encode_default; -#endif - flush_func = mix_video_flush_default; - eos_func = mix_video_eos_default; - get_state_func = mix_video_get_state_default; - get_mix_buffer_func = mix_video_get_mixbuffer_default; - release_mix_buffer_func = mix_video_release_mixbuffer_default; -#if MIXVIDEO_ENCODE_ENABLE - get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default; - set_dynamic_enc_config_func = mix_video_set_dynamic_enc_config_default; -#endif - mix_video_init(this); - - ref_count = 1; + //context = malloc(sizeof(MixVideoPrivate)); + context = &mPriv; + get_version_func = mix_video_get_version_default; + initialize_func = mix_video_initialize_default; + deinitialize_func = mix_video_deinitialize_default; + configure_func = mix_video_configure_default; + get_config_func = mix_video_get_config_default; + decode_func = mix_video_decode_default; + get_frame_func = mix_video_get_frame_default; + release_frame_func = mix_video_release_frame_default; + render_func = mix_video_render_default; + + encode_func = mix_video_encode_default; + + flush_func = mix_video_flush_default; + eos_func = mix_video_eos_default; + get_state_func = mix_video_get_state_default; + get_mix_buffer_func = mix_video_get_mixbuffer_default; + release_mix_buffer_func = mix_video_release_mixbuffer_default; + + get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default; + set_dynamic_enc_config_func = mix_video_set_dynamic_enc_config_default; + + get_new_usrptr_for_surface_buffer = mix_video_get_new_userptr_for_surface_buffer_default; + mix_video_init(this); + + ref_count = 1; } -MixVideo::~MixVideo(){ - mix_video_finalize(this); +MixVideo::~MixVideo() { + mix_video_finalize(this); } static void mix_video_init(MixVideo * self) { - MixVideoPrivate *priv = MIX_VIDEO_GET_PRIVATE(self); + MixVideoPrivate *priv = MIX_VIDEO_GET_PRIVATE(self); #ifdef USE_OPAQUE_POINTER - self->context = priv; + self->context = priv; #else - self->context = NULL; + self->context = NULL; #endif - /* private structure initialization */ - mix_video_private_initialize(priv); + /* private structure initialization */ + mix_video_private_initialize(priv); } MixVideo *mix_video_new(void) { - MixVideo *ret = new MixVideo; + MixVideo *ret = new MixVideo; - return ret; + return ret; } void mix_video_finalize(MixVideo * mix) { - /* clean up here. */ + /* clean up here. */ - mix_video_deinitialize(mix); + mix_video_deinitialize(mix); } MixVideo * mix_video_ref(MixVideo * mix) { - if (NULL != mix) - mix->ref_count ++; - return mix; + if (NULL != mix) + mix->ref_count ++; + return mix; } MixVideo * mix_video_unref(MixVideo * mix) { - if(NULL != mix) { - mix->ref_count --; - if (mix->ref_count == 0) { - delete mix; - return NULL; - } - } - return mix; + if (NULL != mix) { + mix->ref_count --; + if (mix->ref_count == 0) { + delete mix; + return NULL; + } + } + return mix; } /* private methods */ #define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } void mix_video_private_initialize(MixVideoPrivate* priv) { - priv->initialized = FALSE; - priv->configured = FALSE; - - /* libVA */ - priv->va_display = NULL; - priv->va_major_version = -1; - priv->va_major_version = -1; - - /* mix objects */ - priv->frame_manager = NULL; - priv->video_format = NULL; -#if MIXVIDEO_ENCODE_ENABLE - priv->video_format_enc = NULL; //for encoding -#endif - priv->surface_pool = NULL; - priv->buffer_pool = NULL; - - priv->codec_mode = MIX_CODEC_MODE_DECODE; - priv->init_params = NULL; - priv->drm_params = NULL; - priv->config_params = NULL; + priv->initialized = FALSE; + priv->configured = FALSE; + + /* libVA */ + priv->va_display = NULL; + priv->va_major_version = -1; + priv->va_major_version = -1; + + /* mix objects */ + priv->frame_manager = NULL; + priv->video_format = NULL; + + priv->video_format_enc = NULL; //for encoding + + priv->surface_pool = NULL; + priv->buffer_pool = NULL; + + priv->codec_mode = MIX_CODEC_MODE_DECODE; + priv->init_params = NULL; + priv->drm_params = NULL; + priv->config_params = NULL; + + /* + * usrptr shared buffer mode + */ + priv->requested_surface_info.surface_cnt = 0; + memset (priv->requested_surface_info.surface_allocated, 0 , sizeof (uint) * MAX_ENC_SURFACE_COUNT); + memset (priv->requested_surface_info.usrptr, 0 , sizeof (uint8 *) * MAX_ENC_SURFACE_COUNT); } void mix_video_private_cleanup(MixVideoPrivate* priv) { - VAStatus va_status; + VAStatus va_status; - if (!priv) { - return; - } -#if MIXVIDEO_ENCODE_ENABLE - if (priv->video_format_enc) { - mix_videofmtenc_deinitialize(priv->video_format_enc); - } -#endif - MIXUNREF(priv->frame_manager, mix_framemanager_unref) - MIXUNREF(priv->video_format, mix_videoformat_unref) -#if MIXVIDEO_ENCODE_ENABLE - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) -#endif - //for encoding - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref) - MIXUNREF(priv->surface_pool, mix_surfacepool_unref) -/* MIXUNREF(priv->init_params, mix_videoinitparams_unref) */ - MIXUNREF(priv->drm_params, mix_drmparams_unref) - MIXUNREF(priv->config_params, mix_videoconfigparams_unref) - - /* terminate libVA */ - if (priv->va_display) { - va_status = vaTerminate(priv->va_display); - LOG_V( "vaTerminate\n"); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaTerminate\n"); - } else { - priv->va_display = NULL; - } - } + if (!priv) { + return; + } - MIXUNREF(priv->init_params, mix_videoinitparams_unref) + if (priv->video_format_enc) { + mix_videofmtenc_deinitialize(priv->video_format_enc); + } - priv->va_major_version = -1; - priv->va_major_version = -1; - priv->codec_mode = MIX_CODEC_MODE_DECODE; - priv->initialized = FALSE; - priv->configured = FALSE; + MIXUNREF(priv->frame_manager, mix_framemanager_unref) + MIXUNREF(priv->video_format, mix_videoformat_unref) + + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) + + //for encoding + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref) + MIXUNREF(priv->surface_pool, mix_surfacepool_unref) + priv->requested_surface_info.surface_cnt = 0; + memset (priv->requested_surface_info.surface_allocated, 0 , sizeof (uint) * MAX_ENC_SURFACE_COUNT); + memset (priv->requested_surface_info.usrptr, 0 , sizeof (uint8 *) * MAX_ENC_SURFACE_COUNT); + /* MIXUNREF(priv->init_params, mix_videoinitparams_unref) */ + MIXUNREF(priv->drm_params, mix_drmparams_unref) + MIXUNREF(priv->config_params, mix_videoconfigparams_unref) + + /* terminate libVA */ + if (priv->va_display) { + va_status = vaTerminate(priv->va_display); + LOG_V( "vaTerminate\n"); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaTerminate\n"); + } else { + priv->va_display = NULL; + } + } + + MIXUNREF(priv->init_params, mix_videoinitparams_unref) + + priv->va_major_version = -1; + priv->va_major_version = -1; + priv->codec_mode = MIX_CODEC_MODE_DECODE; + priv->initialized = FALSE; + priv->configured = FALSE; } /* The following methods are defined in MI-X API */ -MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major, - guint * minor) { - if (!mix || !major || !minor) { - return MIX_RESULT_NULL_PTR; - } +MIX_RESULT mix_video_get_version_default(MixVideo * mix, uint * major, + uint * minor) { + if (!mix || !major || !minor) { + return MIX_RESULT_NULL_PTR; + } - *major = MIXVIDEO_CURRENT - MIXVIDEO_AGE; - *minor = MIXVIDEO_AGE; + *major = MIXVIDEO_CURRENT - MIXVIDEO_AGE; + *minor = MIXVIDEO_AGE; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { + MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixDisplay *mix_display = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixDisplay *mix_display = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (!mix || !init_params) { - LOG_E( "!mix || !init_params\n"); - return MIX_RESULT_NULL_PTR; - } + if (!mix || !init_params) { + LOG_E( "!mix || !init_params\n"); + return MIX_RESULT_NULL_PTR; + } - if (mode >= MIX_CODEC_MODE_LAST) { - LOG_E("mode >= MIX_CODEC_MODE_LAST\n"); - return MIX_RESULT_INVALID_PARAM; - } + if (mode >= MIX_CODEC_MODE_LAST) { + LOG_E("mode >= MIX_CODEC_MODE_LAST\n"); + return MIX_RESULT_INVALID_PARAM; + } #if 0 //we have encoding support - /* TODO: We need to support encoding in the future */ - if (mode == MIX_CODEC_MODE_ENCODE) { - LOG_E("mode == MIX_CODEC_MODE_ENCODE\n"); - return MIX_RESULT_NOTIMPL; - } + /* TODO: We need to support encoding in the future */ + if (mode == MIX_CODEC_MODE_ENCODE) { + LOG_E("mode == MIX_CODEC_MODE_ENCODE\n"); + return MIX_RESULT_NOTIMPL; + } #endif - if (!MIX_IS_VIDEOINITPARAMS(init_params)) { - LOG_E("!MIX_IS_VIDEOINITPARAMS(init_params\n"); - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_VIDEOINITPARAMS(init_params)) { + LOG_E("!MIX_IS_VIDEOINITPARAMS(init_params\n"); + return MIX_RESULT_INVALID_PARAM; + } - priv = MIX_VIDEO_PRIVATE(mix); + priv = MIX_VIDEO_PRIVATE(mix); - if (priv->initialized) { - LOG_W( "priv->initialized\n"); - return MIX_RESULT_ALREADY_INIT; - } + if (priv->initialized) { + LOG_W( "priv->initialized\n"); + return MIX_RESULT_ALREADY_INIT; + } - /* clone mode */ - priv->codec_mode = mode; + /* clone mode */ + priv->codec_mode = mode; - /* ref init_params */ - priv->init_params = (MixVideoInitParams *) mix_params_ref(MIX_PARAMS( - init_params)); - if (!priv->init_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "!priv->init_params\n"); - goto cleanup; - } + /* ref init_params */ + priv->init_params = (MixVideoInitParams *) mix_params_ref(MIX_PARAMS( + init_params)); + if (!priv->init_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "!priv->init_params\n"); + goto cleanup; + } - /* NOTE: we don't do anything with drm_init_params */ + /* NOTE: we don't do anything with drm_init_params */ - /* libVA initialization */ + /* libVA initialization */ - { - VAStatus va_status; - Display *display = NULL; - ret = mix_videoinitparams_get_display(priv->init_params, &mix_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 1\n"); - goto cleanup; - } + { + VAStatus va_status; + Display *display = NULL; + ret = mix_videoinitparams_get_display(priv->init_params, &mix_display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 1\n"); + goto cleanup; + } #ifndef ANDROID - if (MIX_IS_DISPLAYX11(mix_display)) { - MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); - ret = mix_displayx11_get_display(mix_displayx11, &display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 2\n"); - goto cleanup; - - } - } else { - /* TODO: add support to other MixDisplay type. For now, just return error!*/ - LOG_E("It is not display x11\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } + if (MIX_IS_DISPLAYX11(mix_display)) { + MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); + ret = mix_displayx11_get_display(mix_displayx11, &display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 2\n"); + goto cleanup; + + } + } else { + /* TODO: add support to other MixDisplay type. For now, just return error!*/ + LOG_E("It is not display x11\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } #else - if (MIX_IS_DISPLAYANDROID(mix_display)) { - MixDisplayAndroid *mix_displayandroid = MIX_DISPLAYANDROID(mix_display); - ret = mix_displayandroid_get_display(mix_displayandroid, (void**)&display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 2\n"); - goto cleanup; - - } - } else { - /* TODO: add support to other MixDisplay type. For now, just return error!*/ - LOG_E("It is not display android\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } + if (MIX_IS_DISPLAYANDROID(mix_display)) { + MixDisplayAndroid *mix_displayandroid = MIX_DISPLAYANDROID(mix_display); + ret = mix_displayandroid_get_display(mix_displayandroid, (void**)&display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get display 2\n"); + goto cleanup; + + } + } else { + /* TODO: add support to other MixDisplay type. For now, just return error!*/ + LOG_E("It is not display android\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } #endif - /* Now, we can initialize libVA */ + /* Now, we can initialize libVA */ - LOG_V("Try to get vaDisplay : display = %x\n", display); - priv->va_display = vaGetDisplay(display); + LOG_V("Try to get vaDisplay : display = %x\n", display); + priv->va_display = vaGetDisplay(display); - /* Oops! Fail to get VADisplay */ - if (!priv->va_display) { - ret = MIX_RESULT_FAIL; - LOG_E("Fail to get VADisplay\n"); - goto cleanup; - } + /* Oops! Fail to get VADisplay */ + if (!priv->va_display) { + ret = MIX_RESULT_FAIL; + LOG_E("Fail to get VADisplay\n"); + goto cleanup; + } - /* Initialize libVA */ - va_status = vaInitialize(priv->va_display, &priv->va_major_version, - &priv->va_minor_version); + /* Initialize libVA */ + va_status = vaInitialize(priv->va_display, &priv->va_major_version, + &priv->va_minor_version); - /* Oops! Fail to initialize libVA */ - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Fail to initialize libVA\n"); - goto cleanup; - } + /* Oops! Fail to initialize libVA */ + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Fail to initialize libVA\n"); + goto cleanup; + } - /* TODO: check the version numbers of libVA */ + /* TODO: check the version numbers of libVA */ - priv->initialized = TRUE; - ret = MIX_RESULT_SUCCESS; - } + priv->initialized = TRUE; + ret = MIX_RESULT_SUCCESS; + } - cleanup: +cleanup: - if (ret != MIX_RESULT_SUCCESS) { - mix_video_private_cleanup(priv); - } + if (ret != MIX_RESULT_SUCCESS) { + mix_video_private_cleanup(priv); + } - MIXUNREF(mix_display, mix_display_unref); + MIXUNREF(mix_display, mix_display_unref); - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } MIX_RESULT mix_video_deinitialize_default(MixVideo * mix) { - MixVideoPrivate *priv = NULL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT(mix, priv); + CHECK_INIT(mix, priv); - mix_video_private_cleanup(priv); + mix_video_private_cleanup(priv); - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_video_configure_decode(MixVideo * mix, - MixVideoConfigParamsDec * config_params_dec, MixDrmParams * drm_config_params) { + MixVideoConfigParamsDec * config_params_dec, MixDrmParams * drm_config_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixVideoConfigParamsDec *priv_config_params_dec = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixVideoConfigParamsDec *priv_config_params_dec = NULL; - gchar *mime_type = NULL; - guint fps_n, fps_d; - guint bufpoolsize = 0; + char *mime_type = NULL; + uint fps_n, fps_d; + uint bufpoolsize = 0; - MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; - MixDisplayOrderMode display_order_mode = MIX_DISPLAY_ORDER_UNKNOWN; + MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; + MixDisplayOrderMode display_order_mode = MIX_DISPLAY_ORDER_UNKNOWN; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT(mix, priv); + CHECK_INIT(mix, priv); - if (!config_params_dec) { - LOG_E( "!config_params_dec\n"); - return MIX_RESULT_NULL_PTR; - } + if (!config_params_dec) { + LOG_E( "!config_params_dec\n"); + return MIX_RESULT_NULL_PTR; + } - if (!MIX_IS_VIDEOCONFIGPARAMSDEC(config_params_dec)) { - LOG_E("Not a MixVideoConfigParamsDec\n"); - return MIX_RESULT_INVALID_PARAM; - } + if (!MIX_IS_VIDEOCONFIGPARAMSDEC(config_params_dec)) { + LOG_E("Not a MixVideoConfigParamsDec\n"); + return MIX_RESULT_INVALID_PARAM; + } - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - /* - * MixVideo has already been configured, it should be - * re-configured. - * - * TODO: Allow MixVideo re-configuration - */ - if (priv->configured) { - ret = MIX_RESULT_SUCCESS; - LOG_W( "Already configured\n"); - goto cleanup; - } + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + /* + * MixVideo has already been configured, it should be + * re-configured. + * + * TODO: Allow MixVideo re-configuration + */ + if (priv->configured) { + ret = MIX_RESULT_SUCCESS; + LOG_W( "Already configured\n"); + goto cleanup; + } - /* Make a copy of config_params */ - priv->config_params = (MixVideoConfigParams *) mix_params_dup(MIX_PARAMS( - config_params_dec)); - if (!priv->config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Fail to duplicate config_params\n"); - goto cleanup; - } + /* Make a copy of config_params */ + priv->config_params = (MixVideoConfigParams *) mix_params_dup(MIX_PARAMS( + config_params_dec)); + if (!priv->config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Fail to duplicate config_params\n"); + goto cleanup; + } - priv_config_params_dec = (MixVideoConfigParamsDec *)priv->config_params; + priv_config_params_dec = (MixVideoConfigParamsDec *)priv->config_params; - /* Get fps, frame order mode and mime type from config_params */ - ret = mix_videoconfigparamsdec_get_mime_type(priv_config_params_dec, &mime_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mime type\n"); - goto cleanup; - } + /* Get fps, frame order mode and mime type from config_params */ + ret = mix_videoconfigparamsdec_get_mime_type(priv_config_params_dec, &mime_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mime type\n"); + goto cleanup; + } - LOG_I( "mime : %s\n", mime_type); + LOG_I( "mime : %s\n", mime_type); #ifdef MIX_LOG_ENABLE - if (mix_strcmp(mime_type, "video/x-wmv") == 0) { - - LOG_I( "mime : video/x-wmv\n"); - if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { - LOG_I( "VC1 config_param\n"); - } else { - LOG_E("Not VC1 config_param\n"); - } - } + if (mix_strcmp(mime_type, "video/x-wmv") == 0) { + + LOG_I( "mime : video/x-wmv\n"); + if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { + LOG_I( "VC1 config_param\n"); + } else { + LOG_E("Not VC1 config_param\n"); + } + } #endif - ret = mix_videoconfigparamsdec_get_frame_order_mode(priv_config_params_dec, - &frame_order_mode); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to frame order mode\n"); - goto cleanup; - } + ret = mix_videoconfigparamsdec_get_frame_order_mode(priv_config_params_dec, + &frame_order_mode); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to frame order mode\n"); + goto cleanup; + } - ret = mix_videoconfigparamsdec_get_frame_rate(priv_config_params_dec, &fps_n, - &fps_d); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get frame rate\n"); - goto cleanup; - } + ret = mix_videoconfigparamsdec_get_frame_rate(priv_config_params_dec, &fps_n, + &fps_d); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get frame rate\n"); + goto cleanup; + } - if (!fps_n) { - ret = MIX_RESULT_FAIL; - LOG_E( "fps_n is 0\n"); - goto cleanup; - } + if (!fps_n) { + ret = MIX_RESULT_FAIL; + LOG_E( "fps_n is 0\n"); + goto cleanup; + } - ret = mix_videoconfigparamsdec_get_buffer_pool_size(priv_config_params_dec, - &bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get buffer pool size\n"); - goto cleanup; - } + ret = mix_videoconfigparamsdec_get_buffer_pool_size(priv_config_params_dec, + &bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get buffer pool size\n"); + goto cleanup; + } - /* create frame manager */ - priv->frame_manager = mix_framemanager_new(); - if (!priv->frame_manager) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create frame manager\n"); - goto cleanup; - } + /* create frame manager */ + priv->frame_manager = mix_framemanager_new(); + if (!priv->frame_manager) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create frame manager\n"); + goto cleanup; + } - if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER) - { + if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER) + { display_order_mode = MIX_DISPLAY_ORDER_FIFO; } - else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || - mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0 ) + else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || + mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 || + mix_strcmp(mime_type, "video/x-xvid") == 0 ) { display_order_mode = MIX_DISPLAY_ORDER_PICTYPE; - } - else - { + } + else + { //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP; display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER; - } + } - /* initialize frame manager */ + /* initialize frame manager */ ret = mix_framemanager_initialize(priv->frame_manager, - display_order_mode, fps_n, fps_d); + display_order_mode, fps_n, fps_d); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize frame manager\n"); - goto cleanup; - } + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize frame manager\n"); + goto cleanup; + } - /* create buffer pool */ - priv->buffer_pool = mix_bufferpool_new(); - if (!priv->buffer_pool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create buffer pool\n"); - goto cleanup; - } + /* create buffer pool */ + priv->buffer_pool = mix_bufferpool_new(); + if (!priv->buffer_pool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create buffer pool\n"); + goto cleanup; + } - ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize buffer pool\n"); - goto cleanup; - } + ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize buffer pool\n"); + goto cleanup; + } - /* Finally, we can create MixVideoFormat */ - /* What type of MixVideoFormat we need create? */ - - if (mix_strcmp(mime_type, "video/x-wmv") == 0 - && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { - - MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create VC-1 video format\n"); - goto cleanup; - } - - /* TODO: work specific to VC-1 */ - - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else if (mix_strcmp(mime_type, "video/x-h264") == 0 - && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { - - MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create H.264 video format\n"); - goto cleanup; - } - - /* TODO: work specific to H.264 */ - - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0 || - mix_strcmp(mime_type, "video/x-dx50") == 0) { - - guint version = 0; - - /* Is this mpeg4:2 ? */ - if (mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 ) { - - /* - * we don't support mpeg other than mpeg verion 4 - */ - if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - /* what is the mpeg version ? */ - ret = mix_videoconfigparamsdec_mp42_get_mpegversion( - MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mpeg version\n"); - goto cleanup; - } - - /* if it is not MPEG4 */ - if (version != 4) { - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - } else { - - /* config_param shall be MixVideoConfigParamsDecMP42 */ - if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { - LOG_E("MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 failed.\n"); - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - /* what is the divx version ? */ - ret = mix_videoconfigparamsdec_mp42_get_divxversion( - MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get divx version\n"); - goto cleanup; - } - - /* if it is not divx 4 or 5 */ - if (version != 4 && version != 5) { - LOG_E("Invalid divx version.\n"); - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - } - - MixVideoFormat_MP42 *video_format = mix_videoformat_mp42_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create MPEG-4:2 video format\n"); - goto cleanup; - } - - /* TODO: work specific to MPEG-4:2 */ - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else { - - /* Oops! A format we don't know */ - - ret = MIX_RESULT_FAIL; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } + /* Finally, we can create MixVideoFormat */ + /* What type of MixVideoFormat we need create? */ - /* initialize MixVideoFormat */ - ret = mix_videofmt_initialize(priv->video_format, priv_config_params_dec, - priv->frame_manager, priv->buffer_pool, &priv->surface_pool, - priv->va_display); + if (mix_strcmp(mime_type, "video/x-wmv") == 0 + && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } + MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create VC-1 video format\n"); + goto cleanup; + } - mix_surfacepool_ref(priv->surface_pool); + /* TODO: work specific to VC-1 */ - /* decide MixVideoFormat from mime_type*/ + priv->video_format = MIX_VIDEOFORMAT(video_format); - priv->configured = TRUE; - ret = MIX_RESULT_SUCCESS; + } else if (mix_strcmp(mime_type, "video/x-h264") == 0 + && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { - cleanup: + MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create H.264 video format\n"); + goto cleanup; + } - if (ret != MIX_RESULT_SUCCESS) { - MIXUNREF(priv->config_params, mix_videoconfigparams_unref); - MIXUNREF(priv->frame_manager, mix_framemanager_unref); - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); - MIXUNREF(priv->video_format, mix_videoformat_unref); - } + /* TODO: work specific to H.264 */ + + priv->video_format = MIX_VIDEOFORMAT(video_format); + + } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-divx") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 || + mix_strcmp(mime_type, "video/x-xvid") == 0 || + mix_strcmp(mime_type, "video/x-dx50") == 0) { + + uint version = 0; + + /* Is this mpeg4:2 ? */ + if (mix_strcmp(mime_type, "video/mpeg") == 0 || + mix_strcmp(mime_type, "video/x-h263") == 0 ) { + + /* + * we don't support mpeg other than mpeg verion 4 + */ + if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + /* what is the mpeg version ? */ + ret = mix_videoconfigparamsdec_mp42_get_mpegversion( + MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mpeg version\n"); + goto cleanup; + } + + /* if it is not MPEG4 */ + if (version != 4) { + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + } else { + + /* config_param shall be MixVideoConfigParamsDecMP42 */ + if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { + LOG_E("MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 failed.\n"); + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + + /* what is the divx version ? */ + ret = mix_videoconfigparamsdec_mp42_get_divxversion( + MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get divx version\n"); + goto cleanup; + } + + /* if it is not divx 4 or 5 */ + if (version != 4 && version != 5) { + LOG_E("Invalid divx version.\n"); + ret = MIX_RESULT_NOT_SUPPORTED; + goto cleanup; + } + } - if (mime_type) { - g_free(mime_type); - } + MixVideoFormat_MP42 *video_format = mix_videoformat_mp42_new(); + if (!video_format) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create MPEG-4:2 video format\n"); + goto cleanup; + } + + /* TODO: work specific to MPEG-4:2 */ + priv->video_format = MIX_VIDEOFORMAT(video_format); - priv->objlock.unlock(); - /* ---------------------- end lock --------------------- */ + } else { - LOG_V( "End\n"); + /* Oops! A format we don't know */ - return ret; + ret = MIX_RESULT_FAIL; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + /* initialize MixVideoFormat */ + ret = mix_videofmt_initialize(priv->video_format, priv_config_params_dec, + priv->frame_manager, priv->buffer_pool, &priv->surface_pool, + priv->va_display); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + /* decide MixVideoFormat from mime_type*/ + + priv->configured = TRUE; + ret = MIX_RESULT_SUCCESS; + +cleanup: + + if (ret != MIX_RESULT_SUCCESS) { + MIXUNREF(priv->config_params, mix_videoconfigparams_unref); + MIXUNREF(priv->frame_manager, mix_framemanager_unref); + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); + MIXUNREF(priv->video_format, mix_videoformat_unref); + } + + if (mime_type) { + free(mime_type); + } + + priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + + LOG_V( "End\n"); + + return ret; } -#if MIXVIDEO_ENCODE_ENABLE + MIX_RESULT mix_video_configure_encode(MixVideo * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixDrmParams * drm_config_params) { + MixVideoConfigParamsEnc * config_params_enc, + MixDrmParams * drm_config_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixVideoConfigParamsEnc *priv_config_params_enc = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + MixVideoConfigParamsEnc *priv_config_params_enc = NULL; - gchar *mime_type = NULL; - MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; - guint bufpoolsize = 0; + char *mime_type = NULL; + MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; + uint bufpoolsize = 0; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT(mix, priv); + CHECK_INIT(mix, priv); - if (!config_params_enc) { - LOG_E("!config_params_enc\n"); - return MIX_RESULT_NULL_PTR; - } - if (!MIX_IS_VIDEOCONFIGPARAMSENC(config_params_enc)) { - LOG_E("Not a MixVideoConfigParams\n"); - return MIX_RESULT_INVALID_PARAM; - } + if (!config_params_enc) { + LOG_E("!config_params_enc\n"); + return MIX_RESULT_NULL_PTR; + } + if (!MIX_IS_VIDEOCONFIGPARAMSENC(config_params_enc)) { + LOG_E("Not a MixVideoConfigParams\n"); + return MIX_RESULT_INVALID_PARAM; + } - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - /* - * MixVideo has already been configured, it should be - * re-configured. - * - * TODO: Allow MixVideo re-configuration - */ - if (priv->configured) { - ret = MIX_RESULT_SUCCESS; - LOG_E( "Already configured\n"); - goto cleanup; - } + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); + + /* + * MixVideo has already been configured, it should be + * re-configured. + * + * TODO: Allow MixVideo re-configuration + */ + if (priv->configured) { + ret = MIX_RESULT_SUCCESS; + LOG_E( "Already configured\n"); + goto cleanup; + } - /* Make a copy of config_params */ - priv->config_params = (MixVideoConfigParams *) mix_params_dup( - MIX_PARAMS(config_params_enc)); - if (!priv->config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Fail to duplicate config_params\n"); - goto cleanup; - } + /* Make a copy of config_params */ + priv->config_params = (MixVideoConfigParams *) mix_params_dup( + MIX_PARAMS(config_params_enc)); + if (!priv->config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Fail to duplicate config_params\n"); + goto cleanup; + } - priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; + priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; - /* Get fps, frame order mode and mime type from config_params */ - ret = mix_videoconfigparamsenc_get_mime_type(priv_config_params_enc, - &mime_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mime type\n"); - goto cleanup; - } + /* Get fps, frame order mode and mime type from config_params */ + ret = mix_videoconfigparamsenc_get_mime_type(priv_config_params_enc, + &mime_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mime type\n"); + goto cleanup; + } - LOG_I( "mime : %s\n", mime_type); + LOG_I( "mime : %s\n", mime_type); - ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, - &encode_format); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get target format\n"); - goto cleanup; - } + ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, + &encode_format); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get target format\n"); + goto cleanup; + } - LOG_I( "encode_format : %d\n", - encode_format); + LOG_I( "encode_format : %d\n", + encode_format); - ret = mix_videoconfigparamsenc_get_buffer_pool_size( - priv_config_params_enc, &bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get buffer pool size\n"); - goto cleanup; - } + ret = mix_videoconfigparamsenc_get_buffer_pool_size( + priv_config_params_enc, &bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get buffer pool size\n"); + goto cleanup; + } - /* create frame manager */ - priv->frame_manager = mix_framemanager_new(); - if (!priv->frame_manager) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create frame manager\n"); - goto cleanup; - } + /* create frame manager */ + priv->frame_manager = mix_framemanager_new(); + if (!priv->frame_manager) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create frame manager\n"); + goto cleanup; + } - /* initialize frame manager */ - /* frame rate can be any value for encoding. */ - ret = mix_framemanager_initialize(priv->frame_manager, MIX_DISPLAY_ORDER_FIFO, - 1, 1); + /* initialize frame manager */ + /* frame rate can be any value for encoding. */ + ret = mix_framemanager_initialize(priv->frame_manager, MIX_DISPLAY_ORDER_FIFO, + 1, 1); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize frame manager\n"); - goto cleanup; - } + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize frame manager\n"); + goto cleanup; + } - /* create buffer pool */ - priv->buffer_pool = mix_bufferpool_new(); - if (!priv->buffer_pool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create buffer pool\n"); - goto cleanup; - } + /* create buffer pool */ + priv->buffer_pool = mix_bufferpool_new(); + if (!priv->buffer_pool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to create buffer pool\n"); + goto cleanup; + } - ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize buffer pool\n"); - goto cleanup; - } + ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to initialize buffer pool\n"); + goto cleanup; + } - /* Finally, we can create MixVideoFormatEnc */ - /* What type of MixVideoFormatEnc we need create? */ + /* Finally, we can create MixVideoFormatEnc */ + /* What type of MixVideoFormatEnc we need create? */ - if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 - && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { + if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 + && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { - MixVideoFormatEnc_H264 *video_format_enc = - mix_videoformatenc_h264_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); - goto cleanup; - } + MixVideoFormatEnc_H264 *video_format_enc = + mix_videoformatenc_h264_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); + goto cleanup; + } - /* work specific to h264 encode */ + /* work specific to h264 encode */ - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - } + } else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); if (!video_format_enc) { @@ -954,14 +969,14 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, goto cleanup; } - /* work specific to mpeg4 */ + /* work specific to mpeg4 */ - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - } + } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 - && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 + && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); if (!video_format_enc) { @@ -970,14 +985,14 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, goto cleanup; } - /* work specific to h.263 */ + /* work specific to h.263 */ - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - } + } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); if (!video_format_enc) { @@ -986,1005 +1001,1139 @@ MIX_RESULT mix_video_configure_encode(MixVideo * mix, goto cleanup; } - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - } - else { + } + else { - /*unsupported format */ - ret = MIX_RESULT_NOT_SUPPORTED; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } + /*unsupported format */ + ret = MIX_RESULT_NOT_SUPPORTED; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } - /* initialize MixVideoEncFormat */ - ret = mix_videofmtenc_initialize(priv->video_format_enc, - priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, - priv->va_display); + /* initialize MixVideoEncFormat */ + ret = mix_videofmtenc_initialize(priv->video_format_enc, + priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, + &(priv->requested_surface_info), priv->va_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } - mix_surfacepool_ref(priv->surface_pool); + mix_surfacepool_ref(priv->surface_pool); - priv->configured = TRUE; - ret = MIX_RESULT_SUCCESS; + priv->configured = TRUE; + ret = MIX_RESULT_SUCCESS; - cleanup: +cleanup: - if (ret != MIX_RESULT_SUCCESS) { - MIXUNREF(priv->frame_manager, mix_framemanager_unref); - MIXUNREF(priv->config_params, mix_videoconfigparams_unref); - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref); - } + if (ret != MIX_RESULT_SUCCESS) { + MIXUNREF(priv->frame_manager, mix_framemanager_unref); + MIXUNREF(priv->config_params, mix_videoconfigparams_unref); + MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref); + } - if (mime_type) { - g_free(mime_type); - } + if (mime_type) { + free(mime_type); + } - priv->objlock.unlock(); - /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } -#endif + MIX_RESULT mix_video_configure_default(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params) { + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT(mix, priv); - if(!config_params) { - LOG_E("!config_params\n"); - return MIX_RESULT_NULL_PTR; - } + CHECK_INIT(mix, priv); + if (!config_params) { + LOG_E("!config_params\n"); + return MIX_RESULT_NULL_PTR; + } - /*Decoder mode or Encoder mode*/ - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && MIX_IS_VIDEOCONFIGPARAMSDEC(config_params)) { - ret = mix_video_configure_decode(mix, (MixVideoConfigParamsDec*)config_params, NULL); - } -#if MIXVIDEO_ENCODE_ENABLE - else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && MIX_IS_VIDEOCONFIGPARAMSENC(config_params)) { - ret = mix_video_configure_encode(mix, (MixVideoConfigParamsEnc*)config_params, NULL); - } -#endif - else { - LOG_E("Codec mode not supported\n"); - } + /*Decoder mode or Encoder mode*/ + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && MIX_IS_VIDEOCONFIGPARAMSDEC(config_params)) { + ret = mix_video_configure_decode(mix, (MixVideoConfigParamsDec*)config_params, NULL); + } + else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && MIX_IS_VIDEOCONFIGPARAMSENC(config_params)) { + ret = mix_video_configure_encode(mix, (MixVideoConfigParamsEnc*)config_params, NULL); + } + else { + LOG_E("Codec mode not supported\n"); + } - LOG_V( "end\n"); + LOG_V( "end\n"); - return ret; + return ret; } MIX_RESULT mix_video_get_config_default(MixVideo * mix, - MixVideoConfigParams ** config_params) { + MixVideoConfigParams ** config_params) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoPrivate *priv = NULL; - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (!config_params) { - LOG_E( "!config_params\n"); - return MIX_RESULT_NULL_PTR; - } + if (!config_params) { + LOG_E( "!config_params\n"); + return MIX_RESULT_NULL_PTR; + } - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); - *config_params = MIX_VIDEOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(priv->config_params))); - if(!*config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to duplicate MixVideoConfigParams\n"); - goto cleanup; - } + *config_params = MIX_VIDEOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(priv->config_params))); + if (!*config_params) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("Failed to duplicate MixVideoConfigParams\n"); + goto cleanup; + } - cleanup: +cleanup: - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params) { + int bufincnt, MixVideoDecodeParams * decode_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); - if(!bufin || !bufincnt || !decode_params) { - LOG_E( "!bufin || !bufincnt || !decode_params\n"); - return MIX_RESULT_NULL_PTR; - } + CHECK_INIT_CONFIG(mix, priv); + if (!bufin || !bufincnt || !decode_params) { + LOG_E( "!bufin || !bufincnt || !decode_params\n"); + return MIX_RESULT_NULL_PTR; + } // reset new sequence flag decode_params->new_sequence = FALSE; - //First check that we have surfaces available for decode - ret = mix_surfacepool_check_available(priv->surface_pool); + //First check that we have surfaces available for decode + ret = mix_surfacepool_check_available(priv->surface_pool); - if (ret == MIX_RESULT_POOLEMPTY) { - LOG_I( "Out of surface\n"); - return MIX_RESULT_OUTOFSURFACES; - } + if (ret == MIX_RESULT_POOLEMPTY) { + LOG_I( "Out of surface\n"); + return MIX_RESULT_OUTOFSURFACES; + } - priv->objlock.lock(); + priv->objlock.lock(); - ret = mix_videofmt_decode(priv->video_format, bufin, bufincnt, decode_params); + ret = mix_videofmt_decode(priv->video_format, bufin, bufincnt, decode_params); - priv->objlock.unlock(); + priv->objlock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (!frame) { - LOG_E( "!frame\n"); - return MIX_RESULT_NULL_PTR; - } + if (!frame) { + LOG_E( "!frame\n"); + return MIX_RESULT_NULL_PTR; + } - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); - LOG_V("Calling frame manager dequeue\n"); + LOG_V("Calling frame manager dequeue\n"); - ret = mix_framemanager_dequeue(priv->frame_manager, frame); + ret = mix_framemanager_dequeue(priv->frame_manager, frame); - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } MIX_RESULT mix_video_release_frame_default(MixVideo * mix, - MixVideoFrame * frame) { + MixVideoFrame * frame) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (!frame) { - LOG_E( "!frame\n"); - return MIX_RESULT_NULL_PTR; - } + if (!frame) { + LOG_E( "!frame\n"); + return MIX_RESULT_NULL_PTR; + } - /* - * We don't need lock here. MixVideoFrame has lock to - * protect itself. - */ + /* + * We don't need lock here. MixVideoFrame has lock to + * protect itself. + */ #if 0 - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); #endif - LOG_I("Releasing reference frame %x\n", (guint) frame); - mix_videoframe_unref(frame); + LOG_I("Releasing reference frame %x\n", (uint) frame); + mix_videoframe_unref(frame); - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_SUCCESS; #if 0 - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); #endif - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } #ifdef ANDROID MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { + MixVideoRenderParams * render_params, MixVideoFrame *frame) { - return MIX_RESULT_NOTIMPL; + return MIX_RESULT_NOTIMPL; } #else MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { + MixVideoRenderParams * render_params, MixVideoFrame *frame) { - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - MixDisplay *mix_display = NULL; - MixDisplayX11 *mix_display_x11 = NULL; + MixDisplay *mix_display = NULL; + MixDisplayX11 *mix_display_x11 = NULL; - Display *display = NULL; + Display *display = NULL; - Drawable drawable = 0; - MixRect src_rect, dst_rect; + Drawable drawable = 0; + MixRect src_rect, dst_rect; - VARectangle *va_cliprects = NULL; - guint number_of_cliprects = 0; + VARectangle *va_cliprects = NULL; + uint number_of_cliprects = 0; - /* VASurfaceID va_surface_id; */ - gulong va_surface_id; - VAStatus va_status; + /* VASurfaceID va_surface_id; */ + ulong va_surface_id; + VAStatus va_status; - gboolean sync_flag = FALSE; + bool sync_flag = FALSE; - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (!render_params || !frame) { - LOG_E( "!render_params || !frame\n"); - return MIX_RESULT_NULL_PTR; - } + if (!render_params || !frame) { + LOG_E( "!render_params || !frame\n"); + return MIX_RESULT_NULL_PTR; + } - /* Is this render param valid? */ - if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { - LOG_E("Not MixVideoRenderParams\n"); - return MIX_RESULT_INVALID_PARAM; - } + /* Is this render param valid? */ + if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { + LOG_E("Not MixVideoRenderParams\n"); + return MIX_RESULT_INVALID_PARAM; + } - /* - * We don't need lock here. priv->va_display may be the only variable - * seems need to be protected. But, priv->va_display is initialized - * when mixvideo object is initialized, and it keeps - * the same value thoughout the life of mixvideo. - */ + /* + * We don't need lock here. priv->va_display may be the only variable + * seems need to be protected. But, priv->va_display is initialized + * when mixvideo object is initialized, and it keeps + * the same value thoughout the life of mixvideo. + */ #if 0 - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); #endif - /* get MixDisplay prop from render param */ - ret = mix_videorenderparams_get_display(render_params, &mix_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mix_display\n"); - goto cleanup; - } + /* get MixDisplay prop from render param */ + ret = mix_videorenderparams_get_display(render_params, &mix_display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get mix_display\n"); + goto cleanup; + } - /* Is this MixDisplayX11 ? */ - /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ - if (!MIX_IS_DISPLAYX11(mix_display)) { - ret = MIX_RESULT_INVALID_PARAM; - LOG_E( "Not MixDisplayX11\n"); - goto cleanup; - } + /* Is this MixDisplayX11 ? */ + /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ + if (!MIX_IS_DISPLAYX11(mix_display)) { + ret = MIX_RESULT_INVALID_PARAM; + LOG_E( "Not MixDisplayX11\n"); + goto cleanup; + } - /* cast MixDisplay to MixDisplayX11 */ - mix_display_x11 = MIX_DISPLAYX11(mix_display); + /* cast MixDisplay to MixDisplayX11 */ + mix_display_x11 = MIX_DISPLAYX11(mix_display); - /* Get Drawable */ - ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get drawable\n"); - goto cleanup; - } + /* Get Drawable */ + ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get drawable\n"); + goto cleanup; + } - /* Get Display */ - ret = mix_displayx11_get_display(mix_display_x11, &display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get display\n"); - goto cleanup; - } + /* Get Display */ + ret = mix_displayx11_get_display(mix_display_x11, &display); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get display\n"); + goto cleanup; + } - /* get src_rect */ - ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get SOURCE src_rect\n"); - goto cleanup; - } + /* get src_rect */ + ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get SOURCE src_rect\n"); + goto cleanup; + } - /* get dst_rect */ - ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get dst_rect\n"); - goto cleanup; - } + /* get dst_rect */ + ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to get dst_rect\n"); + goto cleanup; + } - /* get va_cliprects */ - ret = mix_videorenderparams_get_cliprects_internal(render_params, - &va_cliprects, &number_of_cliprects); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get va_cliprects\n"); - goto cleanup; - } + /* get va_cliprects */ + ret = mix_videorenderparams_get_cliprects_internal(render_params, + &va_cliprects, &number_of_cliprects); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get va_cliprects\n"); + goto cleanup; + } - /* get surface id from frame */ - ret = mix_videoframe_get_frame_id(frame, &va_surface_id); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get va_surface_id\n"); - goto cleanup; - } - guint64 timestamp = 0; - mix_videoframe_get_timestamp(frame, ×tamp); - LOG_V( "Displaying surface ID %d, timestamp %"G_GINT64_FORMAT"\n", (int)va_surface_id, timestamp); + /* get surface id from frame */ + ret = mix_videoframe_get_frame_id(frame, &va_surface_id); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get va_surface_id\n"); + goto cleanup; + } + uint64 timestamp = 0; + mix_videoframe_get_timestamp(frame, ×tamp); + LOG_V( "Displaying surface ID %d, timestamp %"UINT64_FORMAT"\n", (int)va_surface_id, timestamp); - guint32 frame_structure = 0; - mix_videoframe_get_frame_structure(frame, &frame_structure); + uint32 frame_structure = 0; + mix_videoframe_get_frame_structure(frame, &frame_structure); - ret = mix_videoframe_get_sync_flag(frame, &sync_flag); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get sync_flag\n"); - goto cleanup; - } + ret = mix_videoframe_get_sync_flag(frame, &sync_flag); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get sync_flag\n"); + goto cleanup; + } - if (!sync_flag) { - ret = mix_videoframe_set_sync_flag(frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - va_status = vaSyncSurface(priv->va_display, va_surface_id); - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed vaSyncSurface() : va_status = 0x%x\n", va_status); - goto cleanup; - } - } + if (!sync_flag) { + ret = mix_videoframe_set_sync_flag(frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; + } + + va_status = vaSyncSurface(priv->va_display, va_surface_id); + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed vaSyncSurface() : va_status = 0x%x\n", va_status); + goto cleanup; + } + } - /* TODO: the last param of vaPutSurface is de-interlacing flags, - what is value shall be*/ - va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id, - drawable, src_rect.x, src_rect.y, src_rect.width, src_rect.height, - dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, - va_cliprects, number_of_cliprects, frame_structure); + /* TODO: the last param of vaPutSurface is de-interlacing flags, + what is value shall be*/ + va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id, + drawable, src_rect.x, src_rect.y, src_rect.width, src_rect.height, + dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, + va_cliprects, number_of_cliprects, frame_structure); - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed vaPutSurface() : va_status = 0x%x\n", va_status); - goto cleanup; - } + if (va_status != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("Failed vaPutSurface() : va_status = 0x%x\n", va_status); + goto cleanup; + } - ret = MIX_RESULT_SUCCESS; + ret = MIX_RESULT_SUCCESS; - cleanup: +cleanup: - MIXUNREF(mix_display, mix_display_unref) - /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ + MIXUNREF(mix_display, mix_display_unref) + /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ #if 0 - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); #endif - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } #endif /* ANDROID */ -#if MIXVIDEO_ENCODE_ENABLE + MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); - if(!bufin || !bufincnt) { //we won't check encode_params here, it's just a placeholder - LOG_E( "!bufin || !bufincnt\n"); - return MIX_RESULT_NULL_PTR; - } + CHECK_INIT_CONFIG(mix, priv); + if (!bufin || !bufincnt) { //we won't check encode_params here, it's just a placeholder + LOG_E( "!bufin || !bufincnt\n"); + return MIX_RESULT_NULL_PTR; + } - //First check that we have surfaces available for decode - ret = mix_surfacepool_check_available(priv->surface_pool); + //First check that we have surfaces available for decode + ret = mix_surfacepool_check_available(priv->surface_pool); - if (ret == MIX_RESULT_POOLEMPTY) { - LOG_I( "Out of surface\n"); - return MIX_RESULT_OUTOFSURFACES; - } + if (ret == MIX_RESULT_POOLEMPTY) { + LOG_I( "Out of surface\n"); + return MIX_RESULT_OUTOFSURFACES; + } - priv->objlock.lock(); + priv->objlock.lock(); - ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt, - iovout, iovoutcnt, encode_params); + ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt, + iovout, iovoutcnt, encode_params); - priv->objlock.unlock(); + priv->objlock.unlock(); - LOG_V( "End\n"); - return ret; + LOG_V( "End\n"); + return ret; } -#endif + MIX_RESULT mix_video_flush_default(MixVideo * mix) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { - ret = mix_videofmt_flush(priv->video_format); + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { + ret = mix_videofmt_flush(priv->video_format); - ret = mix_framemanager_flush(priv->frame_manager); - } -#if MIXVIDEO_ENCODE_ENABLE - else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE - && priv->video_format_enc != NULL) { - /*No framemanager for encoder now*/ - ret = mix_videofmtenc_flush(priv->video_format_enc); - } -#endif - else { - priv->objlock.unlock(); - LOG_E("Invalid video_format/video_format_enc Pointer\n"); - return MIX_RESULT_NULL_PTR; - } + ret = mix_framemanager_flush(priv->frame_manager); + } + else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE + && priv->video_format_enc != NULL) { + /*No framemanager for encoder now*/ + ret = mix_videofmtenc_flush(priv->video_format_enc); + } + else { + priv->objlock.unlock(); + LOG_E("Invalid video_format/video_format_enc Pointer\n"); + return MIX_RESULT_NULL_PTR; + } - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } MIX_RESULT mix_video_eos_default(MixVideo * mix) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { - ret = mix_videofmt_eos(priv->video_format); + if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { + ret = mix_videofmt_eos(priv->video_format); - /* We should not call mix_framemanager_eos() here. - * MixVideoFormat* is responsible to call this function. - * Commnet the function call here! - */ - /* frame manager will set EOS flag to be TRUE */ - /* ret = mix_framemanager_eos(priv->frame_manager); */ - } -#if MIXVIDEO_ENCODE_ENABLE - else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE - && priv->video_format_enc != NULL) { - /*No framemanager now*/ - ret = mix_videofmtenc_eos(priv->video_format_enc); - } -#endif - else { - priv->objlock.unlock(); - LOG_E("Invalid video_format/video_format_enc Pointer\n"); - return MIX_RESULT_NULL_PTR; - } + /* We should not call mix_framemanager_eos() here. + * MixVideoFormat* is responsible to call this function. + * Commnet the function call here! + */ + /* frame manager will set EOS flag to be TRUE */ + /* ret = mix_framemanager_eos(priv->frame_manager); */ + } + else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE + && priv->video_format_enc != NULL) { + /*No framemanager now*/ + ret = mix_videofmtenc_eos(priv->video_format_enc); + } + else { + priv->objlock.unlock(); + LOG_E("Invalid video_format/video_format_enc Pointer\n"); + return MIX_RESULT_NULL_PTR; + } - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); - LOG_V( "End\n"); + LOG_V( "End\n"); - return ret; + return ret; } MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state) { - MixVideoPrivate *priv = NULL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (!state) { - LOG_E( "!state\n"); - return MIX_RESULT_NULL_PTR; - } + if (!state) { + LOG_E( "!state\n"); + return MIX_RESULT_NULL_PTR; + } - *state = MIX_STATE_CONFIGURED; + *state = MIX_STATE_CONFIGURED; - LOG_V( "End\n"); + LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (!buf) { - LOG_E( "!buf\n"); - return MIX_RESULT_INVALID_PARAM; - } + if (!buf) { + LOG_E( "!buf\n"); + return MIX_RESULT_INVALID_PARAM; + } - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); - ret = mix_bufferpool_get(priv->buffer_pool, buf); + ret = mix_bufferpool_get(priv->buffer_pool, buf); - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); - LOG_V( "End ret = 0x%x\n", ret); + LOG_V( "End ret = 0x%x\n", ret); - return ret; + return ret; } MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (!buf) { - LOG_E( "!buf\n"); - return MIX_RESULT_INVALID_PARAM; - } + if (!buf) { + LOG_E( "!buf\n"); + return MIX_RESULT_INVALID_PARAM; + } - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); + /* ---------------------- begin lock --------------------- */ + priv->objlock.lock(); - mix_buffer_unref(buf); + mix_buffer_unref(buf); - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); + /* ---------------------- end lock --------------------- */ + priv->objlock.unlock(); - LOG_V( "End\n"); - return ret; + LOG_V( "End\n"); + return ret; } -#if MIXVIDEO_ENCODE_ENABLE -MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size) + +MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, uint *max_size) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (!mix || !max_size) /* TODO: add other parameter NULL checking */ - { - LOG_E( "!mix || !bufsize\n"); - return MIX_RESULT_NULL_PTR; - } + if (!mix || !max_size) /* TODO: add other parameter NULL checking */ + { + LOG_E( "!mix || !bufsize\n"); + return MIX_RESULT_NULL_PTR; + } - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - priv->objlock.lock(); + priv->objlock.lock(); - ret = mix_videofmtenc_get_max_coded_buffer_size(priv->video_format_enc, max_size); + ret = mix_videofmtenc_get_max_coded_buffer_size(priv->video_format_enc, max_size); - priv->objlock.unlock(); + priv->objlock.unlock(); - LOG_V( "End\n"); - return ret; + LOG_V( "End\n"); + return ret; } MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) { - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - CHECK_INIT_CONFIG(mix, priv); + CHECK_INIT_CONFIG(mix, priv); - if (dynamic_params == NULL) { - LOG_E( - "dynamic_params == NULL\n"); - return MIX_RESULT_FAIL; - } + if (dynamic_params == NULL) { + LOG_E( + "dynamic_params == NULL\n"); + return MIX_RESULT_FAIL; + } - MixVideoConfigParamsEnc *priv_config_params_enc = NULL; - if (priv->config_params) { - /* - * FIXME: It would be better to use ref/unref - */ - priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; - //priv_config_params_enc = mix_videoconfigparamsenc_ref (priv->config_params); - } - else { - LOG_E( - "priv->config_params is invalid\n"); - return MIX_RESULT_FAIL; - } + MixVideoConfigParamsEnc *priv_config_params_enc = NULL; + if (priv->config_params) { + /* + * FIXME: It would be better to use ref/unref + */ + priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; + //priv_config_params_enc = mix_videoconfigparamsenc_ref (priv->config_params); + } + else { + LOG_E( + "priv->config_params is invalid\n"); + return MIX_RESULT_FAIL; + } - priv->objlock.lock(); - - switch (params_type) { - case MIX_ENC_PARAMS_BITRATE: - { - ret = mix_videoconfigparamsenc_set_bit_rate (priv_config_params_enc, dynamic_params->bitrate); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_INIT_QP: - { - ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->init_QP); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_MIN_QP: - { - ret = mix_videoconfigparamsenc_set_min_qp (priv_config_params_enc, dynamic_params->min_QP); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_min_qp\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_WINDOW_SIZE: - { - ret = mix_videoconfigparamsenc_set_window_size (priv_config_params_enc, dynamic_params->window_size); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_window_size\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_TARGET_PERCENTAGE: - { - ret = mix_videoconfigparamsenc_set_target_percentage (priv_config_params_enc, dynamic_params->target_percentage); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_target_percentage\n"); - goto cleanup; - } - } - break; - - - case MIX_ENC_PARAMS_MTU_SLICE_SIZE: - { - ret = mix_videoconfigparamsenc_set_max_slice_size(priv_config_params_enc, dynamic_params->max_slice_size); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_max_slice_size\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_I_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_I_slice_num (config_params_enc_h264, dynamic_params->I_slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_I_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_P_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_P_slice_num (config_params_enc_h264, dynamic_params->P_slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_P_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_IDR_INTERVAL: - { - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_RC_MODE: - case MIX_ENC_PARAMS_RESOLUTION: - { - /* - * Step 1: Release videofmtenc Object - */ - if (priv->video_format_enc) { - mix_videofmtenc_deinitialize(priv->video_format_enc); - } - - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) - - //priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0 - - /* - * Please note there maybe issue here for usrptr shared buffer mode - */ - - /* - * Step 2: Change configuration parameters (frame size) - */ - - if (params_type == MIX_ENC_PARAMS_RESOLUTION) { - ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n"); - goto cleanup; - } - } - else if (params_type == MIX_ENC_PARAMS_RC_MODE) { - ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n"); - goto cleanup; - } - } - - - /* - * Step 3: Renew mixvideofmtenc object - */ - - MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; - - ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, - &encode_format); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get target format\n"); - goto cleanup; - } - - if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 - && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { - - MixVideoFormatEnc_H264 *video_format_enc = - mix_videoformatenc_h264_new(); - - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); - goto cleanup; - } - - /* work specific to h264 encode */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { - - MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); - goto cleanup; - } - - /* work specific to mpeg4 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 - && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { - - MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); - goto cleanup; - } - - /* work specific to h.263 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { - - MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); - goto cleanup; - } - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else { - - /*unsupported format */ - ret = MIX_RESULT_NOT_SUPPORTED; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } - - - /* - * Step 4: Re-initialize and start a new encode session, of course with new resolution value - */ - - /* - * Initialize MixVideoEncFormat - */ - - /* - * If we are using usrptr shared buffer mode, alloc_surfaces/usrptr/alloc_surface_cnt - * will be re-requested by v4l2camsrc, how to differetiate old surface pools and new one - * is a problem. - */ - - /* - * priv->alloc_surface_cnt already been reset to 0 after calling mix_videofmtenc_initialize - * For dynamic frame size change, upstream element need to re-call buffer allocation method - * and priv->alloc_surface_cnt will get a new value. - */ - //priv->alloc_surface_cnt = 5; - ret = mix_videofmtenc_initialize(priv->video_format_enc, - priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, - priv->va_display/*, priv->alloc_surfaces, priv->usrptr, priv->alloc_surface_cnt*/); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } - - mix_surfacepool_ref(priv->surface_pool); - - - } - break; - case MIX_ENC_PARAMS_GOP_SIZE: - { - ret = mix_videoconfigparamsenc_set_intra_period (priv_config_params_enc, dynamic_params->intra_period); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n"); - goto cleanup; - } - - } - break; - case MIX_ENC_PARAMS_FRAME_RATE: - { - ret = mix_videoconfigparamsenc_set_frame_rate (priv_config_params_enc, dynamic_params->frame_rate_num, dynamic_params->frame_rate_denom); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n"); - goto cleanup; - } - } - break; - case MIX_ENC_PARAMS_FORCE_KEY_FRAME: - { - /* - * nothing to be done now. - */ - } - break; - - case MIX_ENC_PARAMS_REFRESH_TYPE: - { - ret = mix_videoconfigparamsenc_set_refresh_type(priv_config_params_enc, dynamic_params->refresh_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_refresh_type\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_AIR: - { - ret = mix_videoconfigparamsenc_set_AIR_params(priv_config_params_enc, dynamic_params->air_params); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_AIR_params\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_CIR_FRAME_CNT: - { - ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n"); - goto cleanup; - } - - } - break; - - default: - break; - } + priv->objlock.lock(); + + switch (params_type) { + case MIX_ENC_PARAMS_BITRATE: + { + ret = mix_videoconfigparamsenc_set_bit_rate (priv_config_params_enc, dynamic_params->bitrate); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_INIT_QP: + { + ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->init_QP); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); + goto cleanup; + } + } + break; - ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type); + case MIX_ENC_PARAMS_MIN_QP: + { + ret = mix_videoconfigparamsenc_set_min_qp (priv_config_params_enc, dynamic_params->min_QP); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_min_qp\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_WINDOW_SIZE: + { + ret = mix_videoconfigparamsenc_set_window_size (priv_config_params_enc, dynamic_params->window_size); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_window_size\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_TARGET_PERCENTAGE: + { + ret = mix_videoconfigparamsenc_set_target_percentage (priv_config_params_enc, dynamic_params->target_percentage); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_target_percentage\n"); + goto cleanup; + } + } + break; + + + case MIX_ENC_PARAMS_MTU_SLICE_SIZE: + { + ret = mix_videoconfigparamsenc_set_max_slice_size(priv_config_params_enc, dynamic_params->max_slice_size); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_max_slice_size\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_I_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_I_slice_num (config_params_enc_h264, dynamic_params->I_slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_I_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_P_SLICE_NUM: + { + /* + */ + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_P_slice_num (config_params_enc_h264, dynamic_params->P_slice_num); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_P_slice_num\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_IDR_INTERVAL: + { + MixVideoConfigParamsEncH264 * config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); + + ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_RC_MODE: + case MIX_ENC_PARAMS_RESOLUTION: + { + /* + * Step 1: Release videofmtenc Object + */ + if (priv->video_format_enc) { + mix_videofmtenc_deinitialize(priv->video_format_enc); + } + + MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) + + //priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0 + + /* + * Please note there maybe issue here for usrptr shared buffer mode + */ + + /* + * Step 2: Change configuration parameters (frame size) + */ + + if (params_type == MIX_ENC_PARAMS_RESOLUTION) { + ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n"); + goto cleanup; + } + } + else if (params_type == MIX_ENC_PARAMS_RC_MODE) { + LOG_E("set dynamic_params->rc_mode = %d", dynamic_params->rc_mode); + ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n"); + goto cleanup; + } + } + + + /* + * Step 3: Renew mixvideofmtenc object + */ + + MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; + + ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, + &encode_format); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to get target format\n"); + goto cleanup; + } + + if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 + && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { + + MixVideoFormatEnc_H264 *video_format_enc = + mix_videoformatenc_h264_new(); + + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); + goto cleanup; + } + + /* work specific to h264 encode */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 + && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { + + MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); + goto cleanup; + } + + /* work specific to mpeg4 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 + && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { + + MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); + goto cleanup; + } + + /* work specific to h.263 */ + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + + else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW + && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { + + MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); + if (!video_format_enc) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); + goto cleanup; + } + + priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); + + } + else { + + /*unsupported format */ + ret = MIX_RESULT_NOT_SUPPORTED; + LOG_E("Unknown format, we can't handle it\n"); + goto cleanup; + } + + + /* + * Step 4: Re-initialize and start a new encode session, of course with new resolution value + */ + + /* + * Initialize MixVideoEncFormat + */ + + /* + * If we are using usrptr shared buffer mode, alloc_surfaces/usrptr/alloc_surface_cnt + * will be re-requested by v4l2camsrc, how to differetiate old surface pools and new one + * is a problem. + */ + + /* + * priv->alloc_surface_cnt already been reset to 0 after calling mix_videofmtenc_initialize + * For dynamic frame size change, upstream element need to re-call buffer allocation method + * and priv->alloc_surface_cnt will get a new value. + */ + //priv->alloc_surface_cnt = 5; + ret = mix_videofmtenc_initialize(priv->video_format_enc, + priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, + &(priv->requested_surface_info), priv->va_display); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed initialize video format\n"); + goto cleanup; + } + + mix_surfacepool_ref(priv->surface_pool); + + + } + break; + case MIX_ENC_PARAMS_GOP_SIZE: + { + ret = mix_videoconfigparamsenc_set_intra_period (priv_config_params_enc, dynamic_params->intra_period); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n"); + goto cleanup; + } + + } + break; + case MIX_ENC_PARAMS_FRAME_RATE: + { + ret = mix_videoconfigparamsenc_set_frame_rate (priv_config_params_enc, dynamic_params->frame_rate_num, dynamic_params->frame_rate_denom); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n"); + goto cleanup; + } + } + break; + case MIX_ENC_PARAMS_FORCE_KEY_FRAME: + { + /* + * nothing to be done now. + */ + } + break; + + case MIX_ENC_PARAMS_REFRESH_TYPE: + { + ret = mix_videoconfigparamsenc_set_refresh_type(priv_config_params_enc, dynamic_params->refresh_type); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_refresh_type\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_AIR: + { + ret = mix_videoconfigparamsenc_set_AIR_params(priv_config_params_enc, dynamic_params->air_params); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_AIR_params\n"); + goto cleanup; + } + } + break; + + case MIX_ENC_PARAMS_CIR_FRAME_CNT: + { + ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n"); + goto cleanup; + } + + } + break; + + default: + break; + } + + ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type); cleanup: - priv->objlock.unlock(); + priv->objlock.unlock(); - LOG_V( "End ret = 0x%x\n", ret); + LOG_V( "End ret = 0x%x\n", ret); - return ret; + return ret; } + + +MIX_RESULT mix_video_get_new_userptr_for_surface_buffer_default (MixVideo * mix, uint width, uint height, uint format, + uint expected_size, uint *outsize, uint * stride, uint8 **usrptr) +{ + MIX_RESULT ret = MIX_RESULT_FAIL; + MixVideoPrivate *priv = NULL; + VAStatus va_status = VA_STATUS_SUCCESS; + + VASurfaceID surface = VA_INVALID_SURFACE; + VAImage image; + int index = 0; + + LOG_V( "Begin\n"); + CHECK_INIT(mix, priv); + + /* + * If mixvideo has been configured, we can not request surface creation anymore + */ + if (priv->configured) { + LOG_E( "Already configured, can not request VA surface anymore\n"); + return MIX_RESULT_WRONG_STATE; + } + + + if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) { + LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /* + * Current only NV12 is supported in VA API + * Through format we can get known the number of planes + */ + if (format != MIX_STRING_TO_FOURCC("NV12")) { + LOG_W ("Format is not supported\n"); + return MIX_RESULT_NOT_SUPPORTED; + } + + priv->objlock.lock(); + + + if (priv->requested_surface_info.surface_cnt >= MAX_ENC_SURFACE_COUNT) { + LOG_E("Usr created Surface count is exceed max number!\n"); + goto cleanup; + } + +#if 1 + va_status = vaCreateSurfaces(priv->va_display, width, + height, VA_RT_FORMAT_YUV420, + 1, &surface); +#else + va_status = vaCreateSurfacesForUserPtr ( + priv->va_display, width, height, VA_RT_FORMAT_YUV420, 1, + &surface, expected_size, VA_FOURCC_NV12, width, width, width, + 0, width * height, width * height); +#endif + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaCreateSurfaces\n"); + goto cleanup; + } + + va_status = vaDeriveImage(priv->va_display, surface, &image); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaDeriveImage\n"); + goto cleanup; + } + + LOG_V( "vaDeriveImage Done\n"); + + //priv->src_image [priv->alloc_surface_cnt] = image; + + va_status = vaMapBuffer (priv->va_display, image.buf, (void **) usrptr); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed to vaMapBuffer\n"); + goto cleanup; + } + +#if 1 + for (index = 0; index < image.data_size; index = index + 4096) { + unsigned char tmp = *(*usrptr + index); + if (tmp == 0) + *(*usrptr + index) = 0; + } #endif + + *outsize = image.data_size; + *stride = image.pitches[0]; + + priv->requested_surface_info.surface_allocated[priv->requested_surface_info.surface_cnt] = surface; + priv->requested_surface_info.usrptr[priv->requested_surface_info.surface_cnt] = *usrptr; + + LOG_I( "surface = 0x%08x\n",(uint)surface); + LOG_I("image->pitches[0] = %d\n", image.pitches[0]); + LOG_I("image->pitches[1] = %d\n", image.pitches[1]); + LOG_I("image->offsets[0] = %d\n", image.offsets[0]); + LOG_I("image->offsets[1] = %d\n", image.offsets[1]); + LOG_I("image->num_planes = %d\n", image.num_planes); + LOG_I("image->width = %d\n", image.width); + LOG_I("image->height = %d\n", image.height); + LOG_I("data_size = %d\n", image.data_size); + LOG_I("usrptr = 0x%08x\n", *usrptr); + LOG_I("surface_cnt = %d\n", priv->requested_surface_info.surface_cnt); + LOG_I ("priv->usrptr[%d] = 0x%08x\n ", + priv->requested_surface_info.surface_cnt, + priv->requested_surface_info.usrptr[priv->requested_surface_info.surface_cnt]); + + va_status = vaUnmapBuffer(priv->va_display, image.buf); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaUnmapBuffer\n"); + goto cleanup; + } + + va_status = vaDestroyImage(priv->va_display, image.image_id); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaDestroyImage\n"); + goto cleanup; + } + + if (*outsize < expected_size) { + LOG_E ("Allocated buffer size is small than the expected size, destroy the surface"); + LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expected_size); + va_status = vaDestroySurfaces(priv->va_display, &surface, 1); + goto cleanup; + } + + priv->requested_surface_info.surface_cnt ++; + + ret = MIX_RESULT_SUCCESS; + +cleanup: + + priv->objlock.unlock(); + + LOG_V( "End\n"); + return ret; + +} /* * API functions */ @@ -1996,118 +2145,107 @@ cleanup: if (!MIX_IS_VIDEO(mix)) { \ LOG_E( "Not MixVideo\n"); \ return MIX_RESULT_INVALID_PARAM; \ - } + } -MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor) { +MIX_RESULT mix_video_get_version(MixVideo * mix, uint * major, uint * minor) { - return mix->get_version_func(mix, major, minor); + return mix->get_version_func(mix, major, minor); } MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { + MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { - return mix->initialize_func(mix, mode, init_params, drm_init_params); + return mix->initialize_func(mix, mode, init_params, drm_init_params); } MIX_RESULT mix_video_deinitialize(MixVideo * mix) { - return mix->deinitialize_func(mix); + return mix->deinitialize_func(mix); } MIX_RESULT mix_video_configure(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params) { + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params) { - return mix->configure_func(mix, config_params, drm_config_params); + return mix->configure_func(mix, config_params, drm_config_params); } MIX_RESULT mix_video_get_config(MixVideo * mix, - MixVideoConfigParams ** config_params_dec) { + MixVideoConfigParams ** config_params_dec) { - return mix->get_config_func(mix, config_params_dec); + return mix->get_config_func(mix, config_params_dec); } -MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params) { +MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params) { - return mix->decode_func(mix, bufin, bufincnt, - decode_params); + return mix->decode_func(mix, bufin, bufincnt, + decode_params); } MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame) { - return mix->get_frame_func(mix, frame); + return mix->get_frame_func(mix, frame); } MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame) { - return mix->release_frame_func(mix, frame); + return mix->release_frame_func(mix, frame); } MIX_RESULT mix_video_render(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { + MixVideoRenderParams * render_params, MixVideoFrame *frame) { - return mix->render_func(mix, render_params, frame); + return mix->render_func(mix, render_params, frame); } -#if MIXVIDEO_ENCODE_ENABLE -MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { +MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, + MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params) { - return mix->encode_func(mix, bufin, bufincnt, iovout, iovoutcnt, - encode_params); + return mix->encode_func(mix, bufin, bufincnt, iovout, iovoutcnt, + encode_params); } -#else -MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixIOVec * iovout[], gint iovoutcnt, - MixParams * encode_params) { - return MIX_RESULT_NOT_SUPPORTED; -} -#endif MIX_RESULT mix_video_flush(MixVideo * mix) { - return mix->flush_func(mix); + return mix->flush_func(mix); } MIX_RESULT mix_video_eos(MixVideo * mix) { - return mix->eos_func(mix); + return mix->eos_func(mix); } MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state) { - return mix->get_state_func(mix, state); + return mix->get_state_func(mix, state); } MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf) { - return mix->get_mix_buffer_func(mix, buf); + return mix->get_mix_buffer_func(mix, buf); } MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf) { - return mix->release_mix_buffer_func(mix, buf); + return mix->release_mix_buffer_func(mix, buf); } -MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize) { -#if MIXVIDEO_ENCODE_ENABLE - return mix->get_max_coded_buffer_size_func(mix, bufsize); -#else - return MIX_RESULT_NOT_SUPPORTED; -#endif +MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, uint *bufsize) { + return mix->get_max_coded_buffer_size_func(mix, bufsize); } MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) { -#if MIXVIDEO_ENCODE_ENABLE - return mix->set_dynamic_enc_config_func(mix, params_type, dynamic_params); -#else - return MIX_RESULT_NOT_SUPPORTED; -#endif -} \ No newline at end of file + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) { + return mix->set_dynamic_enc_config_func(mix, params_type, dynamic_params); +} +MIX_RESULT mix_video_get_new_userptr_for_surface_buffer (MixVideo * mix, uint width, uint height, uint format, + uint expected_size, uint *outsize, uint * stride, uint8 **usrptr) { + return mix->get_new_usrptr_for_surface_buffer(mix, width, height, format, expected_size, outsize, stride, usrptr); + +} diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h index 93a3038..bf63b0c 100644 --- a/mix_video/src/mixvideo.h +++ b/mix_video/src/mixvideo.h @@ -9,16 +9,14 @@ #ifndef __MIX_VIDEO_H__ #define __MIX_VIDEO_H__ -#include + #include #include "mixvideoinitparams.h" #include "mixvideoconfigparamsdec.h" #include "mixvideodecodeparams.h" -#if MIXVIDEO_ENCODE_ENABLE #include "mixvideoconfigparamsenc.h" #include "mixvideoencodeparams.h" -#endif #include "mixvideorenderparams.h" #include "mixvideocaps.h" #include "mixbuffer.h" @@ -30,38 +28,38 @@ class MixVideo; * Virtual methods typedef */ -typedef MIX_RESULT (*MixVideoGetVersionFunc)(MixVideo * mix, guint * major, - guint * minor); +typedef MIX_RESULT (*MixVideoGetVersionFunc)(MixVideo * mix, uint * major, + uint * minor); typedef MIX_RESULT (*MixVideoInitializeFunc)(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params); + MixVideoInitParams * init_params, MixDrmParams * drm_init_params); typedef MIX_RESULT (*MixVideoDeinitializeFunc)(MixVideo * mix); typedef MIX_RESULT (*MixVideoConfigureFunc)(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params); + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params); typedef MIX_RESULT (*MixVideoGetConfigFunc)(MixVideo * mix, - MixVideoConfigParams ** config_params); + MixVideoConfigParams ** config_params); typedef MIX_RESULT (*MixVideoDecodeFunc)(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); + int bufincnt, MixVideoDecodeParams * decode_params); typedef MIX_RESULT (*MixVideoGetFrameFunc)(MixVideo * mix, - MixVideoFrame ** frame); + MixVideoFrame ** frame); typedef MIX_RESULT (*MixVideoReleaseFrameFunc)(MixVideo * mix, - MixVideoFrame * frame); + MixVideoFrame * frame); typedef MIX_RESULT (*MixVideoRenderFunc)(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame); + MixVideoRenderParams * render_params, MixVideoFrame *frame); + -#if MIXVIDEO_ENCODE_ENABLE typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -#endif + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); + typedef MIX_RESULT (*MixVideoFlushFunc)(MixVideo * mix); @@ -72,15 +70,19 @@ typedef MIX_RESULT (*MixVideoGetStateFunc)(MixVideo * mix, MixState * state); typedef MIX_RESULT (*MixVideoGetMixBufferFunc)(MixVideo * mix, MixBuffer ** buf); typedef MIX_RESULT (*MixVideoReleaseMixBufferFunc)(MixVideo * mix, - MixBuffer * buf); + MixBuffer * buf); typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix, - guint *max_size); + uint *max_size); + -#if MIXVIDEO_ENCODE_ENABLE typedef MIX_RESULT (*MixVideoSetDynamicEncConfigFunc) (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); -#endif + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); + +typedef MIX_RESULT (*MixVideoGetNewUsrptrForSurfaceBufferFunc) (MixVideo * mix, + uint width, uint height, uint format, uint expected_size, + uint *outsize, uint * stride, uint8 **usrptr); + /** * MixVideo: * @parent: Parent object. @@ -89,38 +91,35 @@ typedef MIX_RESULT (*MixVideoSetDynamicEncConfigFunc) (MixVideo * mix, */ class MixVideo { public: - MixVideo(); - ~MixVideo(); - + MixVideo(); + ~MixVideo(); + public: - /*< private > */ - gpointer context; - guint ref_count; + /*< private > */ + void* context; + uint ref_count; MixVideoPrivate mPriv; public: - /*< virtual public >*/ - MixVideoGetVersionFunc get_version_func; - MixVideoInitializeFunc initialize_func; - MixVideoDeinitializeFunc deinitialize_func; - MixVideoConfigureFunc configure_func; - MixVideoGetConfigFunc get_config_func; - MixVideoDecodeFunc decode_func; - MixVideoGetFrameFunc get_frame_func; - MixVideoReleaseFrameFunc release_frame_func; - MixVideoRenderFunc render_func; -#if MIXVIDEO_ENCODE_ENABLE - MixVideoEncodeFunc encode_func; -#endif - MixVideoFlushFunc flush_func; - MixVideoEOSFunc eos_func; - MixVideoGetStateFunc get_state_func; - MixVideoGetMixBufferFunc get_mix_buffer_func; - MixVideoReleaseMixBufferFunc release_mix_buffer_func; -#if MIXVIDEO_ENCODE_ENABLE - MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func; - MixVideoSetDynamicEncConfigFunc set_dynamic_enc_config_func; -#endif + /*< virtual public >*/ + MixVideoGetVersionFunc get_version_func; + MixVideoInitializeFunc initialize_func; + MixVideoDeinitializeFunc deinitialize_func; + MixVideoConfigureFunc configure_func; + MixVideoGetConfigFunc get_config_func; + MixVideoDecodeFunc decode_func; + MixVideoGetFrameFunc get_frame_func; + MixVideoReleaseFrameFunc release_frame_func; + MixVideoRenderFunc render_func; + MixVideoEncodeFunc encode_func; + MixVideoFlushFunc flush_func; + MixVideoEOSFunc eos_func; + MixVideoGetStateFunc get_state_func; + MixVideoGetMixBufferFunc get_mix_buffer_func; + MixVideoReleaseMixBufferFunc release_mix_buffer_func; + MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func; + MixVideoSetDynamicEncConfigFunc set_dynamic_enc_config_func; + MixVideoGetNewUsrptrForSurfaceBufferFunc get_new_usrptr_for_surface_buffer; }; /** @@ -160,7 +159,7 @@ mix_video_unref(MixVideo * mix) ; * * This function will return the major and minor version numbers of the library. */ -MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor); +MIX_RESULT mix_video_get_version(MixVideo * mix, uint * major, uint * minor); @@ -180,7 +179,7 @@ MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor); * This function will return the major and minor version numbers of the library. */ MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params); + MixVideoInitParams * init_params, MixDrmParams * drm_init_params); /** * mix_video_deinitialize: @@ -221,8 +220,8 @@ MIX_RESULT mix_video_deinitialize(MixVideo * mix); * This function can only be called after mix_video_initialize() has been called */ MIX_RESULT mix_video_configure(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params); + MixVideoConfigParams * config_params, + MixDrmParams * drm_config_params); /** @@ -244,7 +243,7 @@ MIX_RESULT mix_video_configure(MixVideo * mix, * */ MIX_RESULT mix_video_get_config(MixVideo * mix, - MixVideoConfigParams ** config_params); + MixVideoConfigParams ** config_params); /** * mix_video_decode: @@ -295,8 +294,8 @@ MIX_RESULT mix_video_get_config(MixVideo * mix, * of this pool, which is passed to mix_video_configure() in #the MixVideoConfigParams object. * */ -MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params); +MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params); /** @@ -362,7 +361,7 @@ MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame); * The display is either an X11 Pixmap or an X11 Window using the overlay. */ MIX_RESULT mix_video_render(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame); + MixVideoRenderParams * render_params, MixVideoFrame *frame); /** @@ -438,15 +437,10 @@ MIX_RESULT mix_video_render(MixVideo * mix, * * */ -#if MIXVIDEO_ENCODE_ENABLE -MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -#else -MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt, - MixIOVec * iovout[], gint iovoutcnt, - MixParams * encode_params); -#endif +MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, + MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); + /** * mix_video_flush: * @mix: #MixVideo object. @@ -525,7 +519,7 @@ MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); /** * mix_video_get_max_coded_buffer_size: * @mix: #MixVideo object. - * @bufsize: Pointer to guint. + * @bufsize: Pointer to uint. * @returns: Common Video Error Return Codes * * @@ -536,7 +530,7 @@ MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); * This function can only be called once mix_video_configure() has been called. * */ -MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize); +MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, uint *bufsize); /** @@ -555,7 +549,26 @@ MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize); */ MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); + MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); + +/** + * mix_video_get_new_userptr_for_surface_buffer: + * @mix: #MixVideo object. + * @width: Width of new surface to be created + * @height: Height of new surface to be created + * @format: Format of new surface to be created + * @usrptr: User space pointer mapped from the new created VA surface + * @returns: Common Video Error Return Codes + * + * + * This function can be used to create a new VA surface and map the physical address to user space + * + * + * Usually this function is before the encoding session is started. + * + */ +MIX_RESULT mix_video_get_new_userptr_for_surface_buffer (MixVideo * mix, uint width, uint height, uint format, + uint expected_size, uint *outsize, uint * stride, uint8 **usrptr); #endif /* __MIX_VIDEO_H__ */ diff --git a/mix_video/src/mixvideo_private.h b/mix_video/src/mixvideo_private.h index eecd3ff..24bafe1 100644 --- a/mix_video/src/mixvideo_private.h +++ b/mix_video/src/mixvideo_private.h @@ -10,38 +10,38 @@ #define __MIX_VIDEO_PRIVATE_H__ #include "mixvideothread.h" +#include "mixvideoformatenc.h" class MixFrameManager; class MixVideoFormat; typedef struct _MixVideoPrivate MixVideoPrivate; struct _MixVideoPrivate { - /*< private > */ + /*< private > */ - MixVideoMutex objlock; - gboolean initialized; - gboolean configured; + MixVideoMutex objlock; + bool initialized; + bool configured; - VADisplay va_display; + VADisplay va_display; - int va_major_version; - int va_minor_version; + int va_major_version; + int va_minor_version; - MixCodecMode codec_mode; + MixCodecMode codec_mode; - MixVideoInitParams *init_params; - MixDrmParams *drm_params; + MixVideoInitParams *init_params; + MixDrmParams *drm_params; - MixVideoConfigParams *config_params; + MixVideoConfigParams *config_params; - MixFrameManager *frame_manager; - MixVideoFormat *video_format; -#if MIXVIDEO_ENCODE_ENABLE - MixVideoFormatEnc *video_format_enc; -#endif + MixFrameManager *frame_manager; + MixVideoFormat *video_format; + MixVideoFormatEnc *video_format_enc; - MixSurfacePool *surface_pool; - MixBufferPool *buffer_pool; + MixSurfacePool *surface_pool; + MixBufferPool *buffer_pool; + MixUsrReqSurfacesInfo requested_surface_info; }; diff --git a/mix_video/src/mixvideocaps.cpp b/mix_video/src/mixvideocaps.cpp index ce96873..c4e0d7a 100644 --- a/mix_video/src/mixvideocaps.cpp +++ b/mix_video/src/mixvideocaps.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -15,6 +15,8 @@ No license under any patent, copyright, trade secret or other intellectual prope #include #include "mixvideocaps.h" +#include + #ifdef ANDROID #define mix_strcmp strcmp @@ -25,44 +27,44 @@ No license under any patent, copyright, trade secret or other intellectual prope #define MIX_VIDEOCAPS_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEOCAPS_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ + - -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } +#define SAFE_FREE(p) if(p) {free(p); p = NULL; } MixVideoCaps::MixVideoCaps() - :mix_caps(NULL) - ,video_hw_caps(NULL) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :mix_caps(NULL) + ,video_hw_caps(NULL) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoCaps::~MixVideoCaps() { - SAFE_FREE (this->mix_caps); - SAFE_FREE (this->video_hw_caps); + SAFE_FREE (this->mix_caps); + SAFE_FREE (this->video_hw_caps); } /** * mix_videocaps_dup: * @obj: a #MixVideoCaps object * @returns: a newly allocated duplicate of the object. -* +* * Copy duplicate of the object. */ MixParams* MixVideoCaps::dup() const { - MixParams *ret = new MixVideoCaps(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoCaps(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } /** @@ -70,93 +72,93 @@ MixParams* MixVideoCaps::dup() const { * @target: copy to target * @src: copy from src * @returns: boolean indicates if copy is successful. -* +* * Copy instance data from @src to @target. */ -gboolean MixVideoCaps::copy (MixParams * target) const { - gboolean ret = FALSE; - MixVideoCaps * this_target = MIX_VIDEOCAPS(target); - if (NULL != this_target) { - // Free the existing properties - SAFE_FREE (this_target->mix_caps); - SAFE_FREE (this_target->video_hw_caps); - // Duplicate string - this_target->mix_caps = g_strdup (this->mix_caps); - this_target->video_hw_caps = g_strdup (this->video_hw_caps); - - // chain up base class - ret = MixParams::copy(target); - } - return ret; +bool MixVideoCaps::copy (MixParams * target) const { + bool ret = FALSE; + MixVideoCaps * this_target = MIX_VIDEOCAPS(target); + if (NULL != this_target) { + // Free the existing properties + SAFE_FREE (this_target->mix_caps); + SAFE_FREE (this_target->video_hw_caps); + // Duplicate string + this_target->mix_caps = strdup (this->mix_caps); + this_target->video_hw_caps = strdup (this->video_hw_caps); + + // chain up base class + ret = MixParams::copy(target); + } + return ret; } -gboolean MixVideoCaps::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoCaps * this_obj = MIX_VIDEOCAPS(obj); - if (NULL != this_obj) { - if ((mix_strcmp (this->mix_caps, this_obj->mix_caps) == 0) && - (mix_strcmp (this->video_hw_caps, this_obj->video_hw_caps) == 0)) { - ret = MixParams::equal(this_obj); - } - } - return ret; +bool MixVideoCaps::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoCaps * this_obj = MIX_VIDEOCAPS(obj); + if (NULL != this_obj) { + if ((mix_strcmp (this->mix_caps, this_obj->mix_caps) == 0) && + (mix_strcmp (this->video_hw_caps, this_obj->video_hw_caps) == 0)) { + ret = MixParams::equal(this_obj); + } + } + return ret; } MixVideoCaps * mix_videocaps_new (void) { - return new MixVideoCaps(); + return new MixVideoCaps(); } MixVideoCaps * mix_videocaps_ref (MixVideoCaps * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } /* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ MIX_RESULT mix_videocaps_set_mix_caps ( - MixVideoCaps * obj, gchar * mix_caps) { - MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); - SAFE_FREE (obj->mix_caps); - obj->mix_caps = g_strdup (mix_caps); - if (NULL == obj->mix_caps && NULL != mix_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; + MixVideoCaps * obj, char * mix_caps) { + MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); + SAFE_FREE (obj->mix_caps); + obj->mix_caps = strdup (mix_caps); + if (NULL == obj->mix_caps && NULL != mix_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videocaps_get_mix_caps ( - MixVideoCaps * obj, gchar ** mix_caps) { - MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, mix_caps); - *mix_caps = g_strdup (obj->mix_caps); - if (NULL == *mix_caps && NULL != obj->mix_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; + MixVideoCaps * obj, char ** mix_caps) { + MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, mix_caps); + *mix_caps = strdup (obj->mix_caps); + if (NULL == *mix_caps && NULL != obj->mix_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videocaps_set_video_hw_caps ( - MixVideoCaps * obj, gchar * video_hw_caps) { - MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); - SAFE_FREE (obj->video_hw_caps); - obj->video_hw_caps = g_strdup (video_hw_caps); - if (NULL != video_hw_caps && NULL == obj->video_hw_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; + MixVideoCaps * obj, char * video_hw_caps) { + MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); + SAFE_FREE (obj->video_hw_caps); + obj->video_hw_caps = strdup (video_hw_caps); + if (NULL != video_hw_caps && NULL == obj->video_hw_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videocaps_get_video_hw_caps ( - MixVideoCaps * obj, gchar ** video_hw_caps) { - MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, video_hw_caps); - *video_hw_caps = g_strdup (obj->video_hw_caps); - if (NULL == *video_hw_caps && NULL != obj->video_hw_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; + MixVideoCaps * obj, char ** video_hw_caps) { + MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, video_hw_caps); + *video_hw_caps = strdup (obj->video_hw_caps); + if (NULL == *video_hw_caps && NULL != obj->video_hw_caps) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideocaps.h b/mix_video/src/mixvideocaps.h index dbf52b9..e760787 100644 --- a/mix_video/src/mixvideocaps.h +++ b/mix_video/src/mixvideocaps.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -21,7 +21,7 @@ No license under any patent, copyright, trade secret or other intellectual prope /** * MIX_IS_VIDEOCAPS: * @obj: an object. -* +* * Checks if the given object is an instance of #MixParams */ #define MIX_IS_VIDEOCAPS(obj) ((NULL != MIX_VIDEOCAPS(obj)) ? TRUE : FALSE) @@ -35,31 +35,31 @@ No license under any patent, copyright, trade secret or other intellectual prope class MixVideoCaps : public MixParams { public: - MixVideoCaps(); - virtual ~MixVideoCaps(); + MixVideoCaps(); + virtual ~MixVideoCaps(); - virtual gboolean copy(MixParams* target) const; - virtual MixParams *dup() const; - virtual gboolean equal(MixParams* obj) const; + virtual bool copy(MixParams* target) const; + virtual MixParams *dup() const; + virtual bool equal(MixParams* obj) const; public: - /*< public > */ - //MixParams parent; + /*< public > */ + //MixParams parent; - /*< public > */ - gchar *mix_caps; - gchar *video_hw_caps; + /*< public > */ + char *mix_caps; + char *video_hw_caps; - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; }; /** * mix_videocaps_new: * @returns: A newly allocated instance of #MixVideoCaps -* +* * Use this method to create new instance of #MixVideoCaps */ MixVideoCaps *mix_videocaps_new (void); @@ -67,7 +67,7 @@ MixVideoCaps *mix_videocaps_new (void); * mix_videocaps_ref: * @mix: object to add reference * @returns: the MixVideoCaps instance where reference count has been increased. -* +* * Add reference count. */ MixVideoCaps *mix_videocaps_ref (MixVideoCaps * mix); @@ -75,21 +75,21 @@ MixVideoCaps *mix_videocaps_ref (MixVideoCaps * mix); /** * mix_videocaps_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_videocaps_unref(obj) mix_params_unref(MIX_PARAMS(obj)) /* Class Methods */ -MIX_RESULT mix_videocaps_set_mix_caps (MixVideoCaps * obj, gchar * mix_caps); +MIX_RESULT mix_videocaps_set_mix_caps (MixVideoCaps * obj, char * mix_caps); MIX_RESULT mix_videocaps_get_mix_caps (MixVideoCaps * obj, - gchar ** mix_caps); + char ** mix_caps); MIX_RESULT mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, - gchar * video_hw_caps); + char * video_hw_caps); MIX_RESULT mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, - gchar ** video_hw_caps); + char ** video_hw_caps); #endif /* __MIX_VIDEOCAPS_H__ */ diff --git a/mix_video/src/mixvideoconfigparams.cpp b/mix_video/src/mixvideoconfigparams.cpp index 7668b4e..25c88dd 100644 --- a/mix_video/src/mixvideoconfigparams.cpp +++ b/mix_video/src/mixvideoconfigparams.cpp @@ -13,7 +13,7 @@ * * * A base object of MI-X video configuration parameter objects. - * + * * * The derived MixVideoConfigParams object is created by the MMF/App * and provided in the MixVideo mix_video_configure() function. The get and set @@ -21,7 +21,7 @@ * configuration time. It will also be created by MixVideo and returned from the * mix_video_get_config() function, whereupon the MMF/App can get the get methods to * obtain current configuration information. - * + * * * There are decode mode objects (for example, MixVideoConfigParamsDec) and encode * mode objects (for example, MixVideoConfigParamsEnc). Each of these types is refined @@ -29,7 +29,7 @@ * object to match the media format of the stream to be handled, e.g. if the media * format of the stream to be decoded is H.264, the application would create a * MixVideoConfigParamsDecH264 object for the mix_video_configure() call. - * + * */ #include @@ -37,50 +37,50 @@ #include "mixvideoconfigparams.h" MixVideoConfigParams::MixVideoConfigParams() - :reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoConfigParams::~MixVideoConfigParams() { } -gboolean MixVideoConfigParams::copy(MixParams *target) const { - gboolean ret = FALSE; - MixVideoConfigParams * this_target = MIX_VIDEOCONFIGPARAMS(target); - if (NULL != this_target) - ret = MixParams::copy(target); - return ret; +bool MixVideoConfigParams::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParams * this_target = MIX_VIDEOCONFIGPARAMS(target); + if (NULL != this_target) + ret = MixParams::copy(target); + return ret; } -gboolean MixVideoConfigParams::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoConfigParams * this_obj = MIX_VIDEOCONFIGPARAMS(obj); - if (NULL != this_obj) - ret = MixParams::equal(this_obj); - return ret; +bool MixVideoConfigParams::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParams * this_obj = MIX_VIDEOCONFIGPARAMS(obj); + if (NULL != this_obj) + ret = MixParams::equal(this_obj); + return ret; } MixParams* MixVideoConfigParams::dup() const { - MixParams *ret = new MixVideoConfigParams(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoConfigParams(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } MixVideoConfigParams * mix_videoconfigparams_new(void) { - return new MixVideoConfigParams(); + return new MixVideoConfigParams(); } MixVideoConfigParams * mix_videoconfigparams_ref(MixVideoConfigParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h index 5e4d9ad..d0afa72 100644 --- a/mix_video/src/mixvideoconfigparams.h +++ b/mix_video/src/mixvideoconfigparams.h @@ -28,18 +28,18 @@ class MixVideoConfigParams : public MixParams { public: MixVideoConfigParams(); virtual ~MixVideoConfigParams(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; virtual MixParams* dup() const; - /*< public > */ - //MixParams parent; + /*< public > */ + //MixParams parent; - /*< private > */ + /*< private > */ protected: - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; + void *reserved1; + void *reserved2; + void *reserved3; + void *reserved4; }; diff --git a/mix_video/src/mixvideoconfigparamsdec.cpp b/mix_video/src/mixvideoconfigparamsdec.cpp index b34e9b3..3500b3c 100644 --- a/mix_video/src/mixvideoconfigparamsdec.cpp +++ b/mix_video/src/mixvideoconfigparamsdec.cpp @@ -16,483 +16,490 @@ #include #include "mixvideolog.h" #include "mixvideoconfigparamsdec.h" +#include #define MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ + + +MixVideoConfigParamsDec::MixVideoConfigParamsDec() + :frame_order_mode(MIX_FRAMEORDER_MODE_DISPLAYORDER) + ,mime_type(NULL) + ,frame_rate_num(0) + ,frame_rate_denom(0) + ,picture_width(0) + ,picture_height(0) + ,raw_format(0) + ,rate_control(0) + ,mixbuffer_pool_size(0) + ,extra_surface_allocation(0) + ,video_range(0) + ,color_matrix(0) + ,bit_rate(0) + ,par_num(0) + ,par_denom(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) - -MixVideoConfigParamsDec::MixVideoConfigParamsDec() - :frame_order_mode(MIX_FRAMEORDER_MODE_DISPLAYORDER) - ,mime_type(NULL) - ,frame_rate_num(0) - ,frame_rate_denom(0) - ,picture_width(0) - ,picture_height(0) - ,raw_format(0) - ,rate_control(0) - ,mixbuffer_pool_size(0) - ,extra_surface_allocation(0) - ,video_range(0) - ,color_matrix(0) - ,bit_rate(0) - ,par_num(0) - ,par_denom(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - { - memset(&this->header, 0, sizeof(header)); + memset(&this->header, 0, sizeof(header)); } MixVideoConfigParamsDec::~MixVideoConfigParamsDec() { - /* free header */ - if (NULL != this->header.data) { - g_free(this->header.data); - memset(&this->header, 0, sizeof(this->header)); - } - - /* free mime_type */ - if (this->mime_type->str) - g_string_free(this->mime_type, TRUE); - else - g_string_free(this->mime_type, FALSE); -} - -gboolean MixVideoConfigParamsDec::copy(MixParams *target) const { - MIX_RESULT mix_result = MIX_RESULT_FAIL; - MixVideoConfigParamsDec *this_target = MIX_VIDEOCONFIGPARAMSDEC(target); - LOG_V( "Begin\n"); - - if (NULL != this_target) { - /* copy properties of primitive type */ - this_target->frame_order_mode = this->frame_order_mode; - this_target->frame_rate_num = this->frame_rate_num; - this_target->frame_rate_denom = this->frame_rate_denom; - this_target->picture_width = this->picture_width; - this_target->picture_height = this->picture_height; - this_target->raw_format = this->raw_format; - this_target->rate_control = this->rate_control; - this_target->mixbuffer_pool_size = this->mixbuffer_pool_size; - this_target->extra_surface_allocation = this->extra_surface_allocation; - this_target->video_range = this->video_range; - this_target->color_matrix = this->color_matrix; - this_target->bit_rate = this->bit_rate; - this_target->par_num = this->par_num; - this_target->par_denom = this->par_denom; - - /* copy properties of non-primitive */ - - /* copy header */ - mix_result = mix_videoconfigparamsdec_set_header(this_target, - const_cast(&this->header)); - - if (MIX_RESULT_SUCCESS != mix_result) { - LOG_E( "set_header failed: mix_result = 0x%x\n", mix_result); - return FALSE; - } - - /* copy mime_type */ - if (NULL != this->mime_type) { - mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, - this->mime_type->str); - } else { - mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, NULL); - } - - if (MIX_RESULT_SUCCESS != mix_result) { - LOG_E( "set_mime_type failed: mix_result = 0x%x\n", mix_result); - return FALSE; - } - - /* TODO: copy other properties if there's any */ - - /* Now chainup base class */ - return MixVideoConfigParams::copy(target); - } - - LOG_V( "End\n"); - - return FALSE; -} - -gboolean MixVideoConfigParamsDec::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoConfigParamsDec *this_obj = MIX_VIDEOCONFIGPARAMSDEC(obj); - - if (NULL != this_obj) { - // Deep compare - - /* check the equalitiy of the primitive type properties */ - if (this->frame_order_mode != this_obj->frame_order_mode) { - goto not_equal; - } - - if ((this->frame_rate_num != this_obj->frame_rate_num) && - (this->frame_rate_denom != this_obj->frame_rate_denom)) { - goto not_equal; - } - - if ((this->picture_width != this_obj->picture_width) && - (this->picture_height != this_obj->picture_height)) { - goto not_equal; - } - - if (this->raw_format != this_obj->raw_format) { - goto not_equal; - } - - if (this->rate_control != this_obj->rate_control) { - goto not_equal; - } - - if (this->mixbuffer_pool_size != this_obj->mixbuffer_pool_size) { - goto not_equal; - } - - if (this->extra_surface_allocation != this_obj->extra_surface_allocation) { - goto not_equal; - } - - /* check the equalitiy of the none-primitive type properties */ - - /* MixIOVec header */ - - if (this->header.data_size != this_obj->header.data_size) { - goto not_equal; - } - - if (this->header.buffer_size != this_obj->header.buffer_size) { - goto not_equal; - } - - if (this->header.data && this_obj->header.data) { - if (memcmp(this->header.data, this_obj->header.data, - this_obj->header.data_size) != 0) { - goto not_equal; - } - } else if (!(!this->header.data && !this_obj->header.data)) { - goto not_equal; - } - - /* compare mime_type */ - if (this->mime_type && this_obj->mime_type) { - if (g_string_equal(this->mime_type, this_obj->mime_type) - != TRUE) { - goto not_equal; - } - } else if (!(!this->mime_type && !this_obj->mime_type)) { - goto not_equal; - } - - if (this->video_range != this_obj->video_range) { - goto not_equal; - } - - if (this->color_matrix != this_obj->color_matrix) { - goto not_equal; - } - - if (this->bit_rate != this_obj->bit_rate) { - goto not_equal; - } - - if (this->par_num != this_obj->par_num) { - goto not_equal; - } - - if (this->par_denom != this_obj->par_denom) { - goto not_equal; - } - ret = TRUE; + /* free header */ + if (NULL != this->header.data) { + free(this->header.data); + memset(&this->header, 0, sizeof(this->header)); + } + + /* free mime_type */ + if (this->mime_type) { + free(this->mime_type); + this->mime_type = NULL; + } +} + +bool MixVideoConfigParamsDec::copy(MixParams *target) const { + MIX_RESULT mix_result = MIX_RESULT_FAIL; + MixVideoConfigParamsDec *this_target = MIX_VIDEOCONFIGPARAMSDEC(target); + LOG_V( "Begin\n"); + + if (NULL != this_target) { + /* copy properties of primitive type */ + this_target->frame_order_mode = this->frame_order_mode; + this_target->frame_rate_num = this->frame_rate_num; + this_target->frame_rate_denom = this->frame_rate_denom; + this_target->picture_width = this->picture_width; + this_target->picture_height = this->picture_height; + this_target->raw_format = this->raw_format; + this_target->rate_control = this->rate_control; + this_target->mixbuffer_pool_size = this->mixbuffer_pool_size; + this_target->extra_surface_allocation = this->extra_surface_allocation; + this_target->video_range = this->video_range; + this_target->color_matrix = this->color_matrix; + this_target->bit_rate = this->bit_rate; + this_target->par_num = this->par_num; + this_target->par_denom = this->par_denom; + + /* copy properties of non-primitive */ + + /* copy header */ + mix_result = mix_videoconfigparamsdec_set_header(this_target, + const_cast(&this->header)); + + if (MIX_RESULT_SUCCESS != mix_result) { + LOG_E( "set_header failed: mix_result = 0x%x\n", mix_result); + return FALSE; + } + + /* copy mime_type */ + if (NULL != mime_type) { + mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, + this->mime_type); + } else { + mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, NULL); + } + + if (MIX_RESULT_SUCCESS != mix_result) { + LOG_E( "set_mime_type failed: mix_result = 0x%x\n", mix_result); + return FALSE; + } + + /* TODO: copy other properties if there's any */ + + /* Now chainup base class */ + return MixVideoConfigParams::copy(target); + } + + LOG_V( "End\n"); + + return FALSE; +} + +bool MixVideoConfigParamsDec::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParamsDec *this_obj = MIX_VIDEOCONFIGPARAMSDEC(obj); + + if (NULL != this_obj) { + // Deep compare + + /* check the equalitiy of the primitive type properties */ + if (this->frame_order_mode != this_obj->frame_order_mode) { + goto not_equal; + } + + if ((this->frame_rate_num != this_obj->frame_rate_num) && + (this->frame_rate_denom != this_obj->frame_rate_denom)) { + goto not_equal; + } + + if ((this->picture_width != this_obj->picture_width) && + (this->picture_height != this_obj->picture_height)) { + goto not_equal; + } + + if (this->raw_format != this_obj->raw_format) { + goto not_equal; + } + + if (this->rate_control != this_obj->rate_control) { + goto not_equal; + } + + if (this->mixbuffer_pool_size != this_obj->mixbuffer_pool_size) { + goto not_equal; + } + + if (this->extra_surface_allocation != this_obj->extra_surface_allocation) { + goto not_equal; + } + + /* check the equalitiy of the none-primitive type properties */ + + /* MixIOVec header */ + + if (this->header.data_size != this_obj->header.data_size) { + goto not_equal; + } + + if (this->header.buffer_size != this_obj->header.buffer_size) { + goto not_equal; + } + + if (this->header.data && this_obj->header.data) { + if (memcmp(this->header.data, this_obj->header.data, + this_obj->header.data_size) != 0) { + goto not_equal; + } + } else if (!(!this->header.data && !this_obj->header.data)) { + goto not_equal; + } + + /* compare mime_type */ + if (this->mime_type && this_obj->mime_type) { + if (strcmp(this->mime_type, this_obj->mime_type) != 0) { + goto not_equal; + } + } else if (!(!this->mime_type && !this_obj->mime_type)) { + goto not_equal; + } + + if (this->video_range != this_obj->video_range) { + goto not_equal; + } + + if (this->color_matrix != this_obj->color_matrix) { + goto not_equal; + } + + if (this->bit_rate != this_obj->bit_rate) { + goto not_equal; + } + + if (this->par_num != this_obj->par_num) { + goto not_equal; + } + + if (this->par_denom != this_obj->par_denom) { + goto not_equal; + } + ret = TRUE; not_equal: - if (TRUE != ret) { - return ret; - } + if (TRUE != ret) { + return ret; + } - /* chaining up. */ - ret = MixVideoConfigParams::equal(obj); - } + /* chaining up. */ + ret = MixVideoConfigParams::equal(obj); + } - return ret; + return ret; } MixParams* MixVideoConfigParamsDec::dup() const { - MixParams *ret = NULL; - MixVideoConfigParamsDec *duplicate = new MixVideoConfigParamsDec(); - if (FALSE != copy(duplicate)) { - ret = duplicate; - } else { - mix_videoconfigparamsdec_unref(duplicate); - } - return ret; + MixParams *ret = NULL; + MixVideoConfigParamsDec *duplicate = new MixVideoConfigParamsDec(); + if (FALSE != copy(duplicate)) { + ret = duplicate; + } else { + mix_videoconfigparamsdec_unref(duplicate); + } + return ret; } MixVideoConfigParamsDec * mix_videoconfigparamsdec_new(void) { - return new MixVideoConfigParamsDec(); + return new MixVideoConfigParamsDec(); } MixVideoConfigParamsDec * mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix) { - return (MixVideoConfigParamsDec *) mix_params_ref(MIX_PARAMS(mix)); + return (MixVideoConfigParamsDec *) mix_params_ref(MIX_PARAMS(mix)); } /* TODO: Add getters and setters for other properties. The following is incomplete */ MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->frame_order_mode = frame_order_mode; - LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->frame_order_mode = frame_order_mode; + LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode); - *frame_order_mode = obj->frame_order_mode; - LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode); + *frame_order_mode = obj->frame_order_mode; + LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_header( - MixVideoConfigParamsDec * obj, MixIOVec * header) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - if (!header) { - return MIX_RESULT_NULL_PTR; - } - - if (header->data && header->buffer_size) { - obj->header.data = (guchar*)g_memdup(header->data, header->buffer_size); - if (!obj->header.data) { - return MIX_RESULT_NO_MEMORY; - } - obj->header.buffer_size = header->buffer_size; - obj->header.data_size = header->data_size; - } - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, MixIOVec * header) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + if (!header) { + return MIX_RESULT_NULL_PTR; + } + + if (header->data && header->buffer_size) { + obj->header.data = (uchar*)malloc(header->buffer_size); +// obj->header.data = (uchar*)memdup(header->data, header->buffer_size); + if (!obj->header.data) { + return MIX_RESULT_NO_MEMORY; + } + memcpy(obj->header.data,header->data,header->buffer_size); + obj->header.buffer_size = header->buffer_size; + obj->header.data_size = header->data_size; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_header( - MixVideoConfigParamsDec * obj, MixIOVec ** header) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, header); - - if (obj->header.data && obj->header.buffer_size) { - *header = (MixIOVec*)g_malloc(sizeof(MixIOVec)); - if (*header == NULL) { - return MIX_RESULT_NO_MEMORY; - } - (*header)->data = (guchar*)g_memdup(obj->header.data, obj->header.buffer_size); - (*header)->buffer_size = obj->header.buffer_size; - (*header)->data_size = obj->header.data_size; - } else { - *header = NULL; - } - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, MixIOVec ** header) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, header); + + if (obj->header.data && obj->header.buffer_size) { + *header = (MixIOVec*)malloc(sizeof(MixIOVec)); + if (*header == NULL) { + return MIX_RESULT_NO_MEMORY; + } + (*header)->data = (uchar*)malloc(obj->header.buffer_size); + if ((*header)->data == NULL) { + free(*header); + *header = NULL; + return MIX_RESULT_NO_MEMORY; + } + memcpy((*header)->data,obj->header.data, obj->header.buffer_size); +// (*header)->data = (uchar*)memdup(obj->header.data, obj->header.buffer_size); + (*header)->buffer_size = obj->header.buffer_size; + (*header)->data_size = obj->header.data_size; + } else { + *header = NULL; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_mime_type( - MixVideoConfigParamsDec * obj, const gchar * mime_type) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - if (!mime_type) { - return MIX_RESULT_NULL_PTR; - } - if (obj->mime_type) { - if (obj->mime_type->str) - g_string_free(obj->mime_type, TRUE); - else - g_string_free(obj->mime_type, FALSE); - } - obj->mime_type = g_string_new(mime_type); - if (!obj->mime_type) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, const char * mime_type) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + if (!mime_type) { + return MIX_RESULT_NULL_PTR; + } + if (obj->mime_type) { + free(obj->mime_type); + obj->mime_type = NULL; + } + obj->mime_type = strdup(mime_type); + if (!obj->mime_type) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_mime_type( - MixVideoConfigParamsDec * obj, gchar ** mime_type) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, mime_type); - if (!obj->mime_type) { - *mime_type = NULL; - return MIX_RESULT_SUCCESS; - } - *mime_type = g_strdup(obj->mime_type->str); - if (!*mime_type) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, char ** mime_type) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, mime_type); + if (!obj->mime_type) { + *mime_type = NULL; + return MIX_RESULT_SUCCESS; + } + *mime_type = strdup(obj->mime_type); + if (!*mime_type) { + return MIX_RESULT_NO_MEMORY; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_frame_rate( - MixVideoConfigParamsDec * obj, guint frame_rate_num, - guint frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->frame_rate_num = frame_rate_num; - obj->frame_rate_denom = frame_rate_denom; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint frame_rate_num, + uint frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->frame_rate_num = frame_rate_num; + obj->frame_rate_denom = frame_rate_denom; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_frame_rate( - MixVideoConfigParamsDec * obj, guint * frame_rate_num, - guint * frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); - *frame_rate_num = obj->frame_rate_num; - *frame_rate_denom = obj->frame_rate_denom; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint * frame_rate_num, + uint * frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); + *frame_rate_num = obj->frame_rate_num; + *frame_rate_denom = obj->frame_rate_denom; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_picture_res( - MixVideoConfigParamsDec * obj, guint picture_width, - guint picture_height) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->picture_width = picture_width; - obj->picture_height = picture_height; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint picture_width, + uint picture_height) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->picture_width = picture_width; + obj->picture_height = picture_height; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_picture_res( - MixVideoConfigParamsDec * obj, guint * picture_width, - guint * picture_height) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); - *picture_width = obj->picture_width; - *picture_height = obj->picture_height; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint * picture_width, + uint * picture_height) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); + *picture_width = obj->picture_width; + *picture_height = obj->picture_height; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_raw_format( - MixVideoConfigParamsDec * obj, guint raw_format) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + MixVideoConfigParamsDec * obj, uint raw_format) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - /* TODO: check if the value of raw_format is valid */ - obj->raw_format = raw_format; - return MIX_RESULT_SUCCESS; + /* TODO: check if the value of raw_format is valid */ + obj->raw_format = raw_format; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_raw_format( - MixVideoConfigParamsDec * obj, guint *raw_format) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, raw_format); - *raw_format = obj->raw_format; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint *raw_format) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, raw_format); + *raw_format = obj->raw_format; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_rate_control( - MixVideoConfigParamsDec * obj, guint rate_control) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + MixVideoConfigParamsDec * obj, uint rate_control) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - /* TODO: check if the value of rate_control is valid */ - obj->rate_control = rate_control; - return MIX_RESULT_SUCCESS; + /* TODO: check if the value of rate_control is valid */ + obj->rate_control = rate_control; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_rate_control( - MixVideoConfigParamsDec * obj, guint *rate_control) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, rate_control); - *rate_control = obj->rate_control; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint *rate_control) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, rate_control); + *rate_control = obj->rate_control; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size( - MixVideoConfigParamsDec * obj, guint bufpoolsize) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->mixbuffer_pool_size = bufpoolsize; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint bufpoolsize) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->mixbuffer_pool_size = bufpoolsize; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size( - MixVideoConfigParamsDec * obj, guint *bufpoolsize) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bufpoolsize); - *bufpoolsize = obj->mixbuffer_pool_size; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint *bufpoolsize) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bufpoolsize); + *bufpoolsize = obj->mixbuffer_pool_size; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation( - MixVideoConfigParamsDec * obj, guint extra_surface_allocation) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->extra_surface_allocation = extra_surface_allocation; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint extra_surface_allocation) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->extra_surface_allocation = extra_surface_allocation; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation( - MixVideoConfigParamsDec * obj, guint *extra_surface_allocation) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, extra_surface_allocation); - *extra_surface_allocation = obj->extra_surface_allocation; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint *extra_surface_allocation) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, extra_surface_allocation); + *extra_surface_allocation = obj->extra_surface_allocation; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_video_range( - MixVideoConfigParamsDec * obj, guint8 video_range) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->video_range = video_range; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint8 video_range) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->video_range = video_range; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_video_range( - MixVideoConfigParamsDec * obj, guint8 *video_range) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, video_range); - *video_range = obj->video_range; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint8 *video_range) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, video_range); + *video_range = obj->video_range; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_color_matrix( - MixVideoConfigParamsDec * obj, guint8 color_matrix) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->color_matrix = color_matrix; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint8 color_matrix) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->color_matrix = color_matrix; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_color_matrix( - MixVideoConfigParamsDec * obj, guint8 *color_matrix) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, color_matrix); - *color_matrix = obj->color_matrix; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint8 *color_matrix) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, color_matrix); + *color_matrix = obj->color_matrix; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_bit_rate( - MixVideoConfigParamsDec * obj, guint bit_rate) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->bit_rate = bit_rate; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint bit_rate) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->bit_rate = bit_rate; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_bit_rate( - MixVideoConfigParamsDec * obj, guint *bit_rate) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bit_rate); - *bit_rate = obj->bit_rate; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint *bit_rate) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bit_rate); + *bit_rate = obj->bit_rate; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio( - MixVideoConfigParamsDec * obj, guint par_num, guint par_denom) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->par_num = par_num; - obj->par_denom = par_denom; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint par_num, uint par_denom) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->par_num = par_num; + obj->par_denom = par_denom; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio( - MixVideoConfigParamsDec * obj, guint * par_num, guint * par_denom) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, par_num, par_denom); - *par_num = obj->par_num; - *par_denom = obj->par_denom; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec * obj, uint * par_num, uint * par_denom) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, par_num, par_denom); + *par_num = obj->par_num; + *par_denom = obj->par_denom; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h index 1e8657f..f492128 100644 --- a/mix_video/src/mixvideoconfigparamsdec.h +++ b/mix_video/src/mixvideoconfigparamsdec.h @@ -36,81 +36,81 @@ class MixVideoConfigParamsDec : public MixVideoConfigParams { public: MixVideoConfigParamsDec(); ~MixVideoConfigParamsDec(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; virtual MixParams* dup() const; public: - /*< public > */ - //MixVideoConfigParams parent; - - /*< public > */ - - /* Frame re-ordering mode */ - MixFrameOrderMode frame_order_mode; - - /* Stream header information, such as - * codec_data in GStreamer pipelines */ - MixIOVec header; - - /* Mime type */ - GString * mime_type; - - /* Frame rate numerator value */ - guint frame_rate_num; - - /* Frame rate denominator value */ - guint frame_rate_denom; - - /* Picture width */ - gulong picture_width; - - /* Picture height */ - gulong picture_height; - - /* Render target format */ - guint raw_format; - - /* Rate control: CBR, VBR, none. Only valid for encoding. - * This should be set to none for decoding. */ - guint rate_control; - - /* Size of pool of MixBuffers to allocate */ - guint mixbuffer_pool_size; - - /* Extra surfaces for MixVideoFrame objects to be allocated */ - guint extra_surface_allocation; + /*< public > */ + //MixVideoConfigParams parent; + + /*< public > */ + + /* Frame re-ordering mode */ + MixFrameOrderMode frame_order_mode; + + /* Stream header information, such as + * codec_data in GStreamer pipelines */ + MixIOVec header; + + /* Mime type */ + char * mime_type; + + /* Frame rate numerator value */ + uint frame_rate_num; + + /* Frame rate denominator value */ + uint frame_rate_denom; + + /* Picture width */ + ulong picture_width; + + /* Picture height */ + ulong picture_height; + + /* Render target format */ + uint raw_format; + + /* Rate control: CBR, VBR, none. Only valid for encoding. + * This should be set to none for decoding. */ + uint rate_control; + + /* Size of pool of MixBuffers to allocate */ + uint mixbuffer_pool_size; + + /* Extra surfaces for MixVideoFrame objects to be allocated */ + uint extra_surface_allocation; /* video range, 0 for short range and 1 for full range, output only */ - guint8 video_range; + uint8 video_range; - /* + /* color matrix, output only. Possible values defined in va.h #define VA_SRC_BT601 0x00000010 #define VA_SRC_BT709 0x00000020 #define VA_SRC_SMPTE_240 0x00000040 */ - guint8 color_matrix; + uint8 color_matrix; /* bit rate in bps, output only */ - guint8 bit_rate; - - /* Pixel aspect ratio numerator value */ - guint par_num; - - /* Pixel aspect ratio denominator value */ - guint par_denom; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; + uint8 bit_rate; + + /* Pixel aspect ratio numerator value */ + uint par_num; + + /* Pixel aspect ratio denominator value */ + uint par_denom; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; /** @@ -157,7 +157,7 @@ MixVideoConfigParamsDec *mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * * Set frame order mode. */ MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode); + MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode); /** * mix_videoconfigparamsdec_get_frame_order_mode: @@ -168,7 +168,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( * Get frame order mode. */ MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode); + MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode); /** * mix_videoconfigparamsdec_set_header: @@ -179,7 +179,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( * Set stream header information. */ MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj, - MixIOVec *header); + MixIOVec *header); /** * mix_videoconfigparamsdec_get_header: @@ -193,7 +193,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj, * */ MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj, - MixIOVec ** header); + MixIOVec ** header); /** * mix_videoconfigparamsdec_set_mime_type: @@ -204,7 +204,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj, * Set stream mime type */ MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj, - const gchar * mime_type); + const char * mime_type); /** * mix_videoconfigparamsdec_get_mime_type: @@ -215,46 +215,46 @@ MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj, * Get mime type * * Caller is responsible to g_free *mime_type - * + * */ MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj, - gchar ** mime_type); + char ** mime_type); /** * mix_videoconfigparamsdec_set_frame_rate: * @obj: #MixVideoConfigParamsDec object * @frame_rate_num: Frame rate numerator value - * @frame_rate_denom: Frame rate denominator value * + * @frame_rate_denom: Frame rate denominator value * * @returns: Common Video Error Return Codes * * Set frame rate */ MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj, - guint frame_rate_num, guint frame_rate_denom); + uint frame_rate_num, uint frame_rate_denom); /** * mix_videoconfigparamsdec_get_frame_rate: * @obj: #MixVideoConfigParamsDec object - * @frame_rate_num: Frame rate numerator value to be returned + * @frame_rate_num: Frame rate numerator value to be returned * @frame_rate_denom: Frame rate denominator value to be returned * @returns: Common Video Error Return Codes * * Get frame rate */ MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj, - guint * frame_rate_num, guint * frame_rate_denom); + uint * frame_rate_num, uint * frame_rate_denom); /** * mix_videoconfigparamsdec_set_picture_res: * @obj: #MixVideoConfigParamsDec object - * @picture_width: Picture width + * @picture_width: Picture width * @picture_height: Picture height * @returns: Common Video Error Return Codes * * Set video resolution */ MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj, - guint picture_width, guint picture_height); + uint picture_width, uint picture_height); /** * mix_videoconfigparamsdec_get_picture_res: @@ -266,7 +266,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * ob * Get video resolution */ MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj, - guint * picture_width, guint * picture_height); + uint * picture_width, uint * picture_height); /** * mix_videoconfigparamsdec_set_raw_format: @@ -277,7 +277,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * ob * Set Render target format */ MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj, - guint raw_format); + uint raw_format); /** * mix_videoconfigparamsdec_get_raw_format: @@ -288,19 +288,19 @@ MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj * Get Render target format */ MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj, - guint *raw_format); + uint *raw_format); /** * mix_videoconfigparamsdec_set_rate_control: * @obj: #MixVideoConfigParamsDec object - * @rate_control: Rate control: CBR, VBR, none. Only valid for encoding. + * @rate_control: Rate control: CBR, VBR, none. Only valid for encoding. * This should be set to none for decoding. * @returns: Common Video Error Return Codes * * Set rate control */ MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj, - guint rate_control); + uint rate_control); /** * mix_videoconfigparamsdec_get_rate_control: @@ -311,7 +311,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * o * Get rate control */ MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj, - guint *rate_control); + uint *rate_control); /** * mix_videoconfigparamsdec_set_buffer_pool_size: @@ -322,7 +322,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * o * Set buffer pool size */ MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size(MixVideoConfigParamsDec * obj, - guint bufpoolsize); + uint bufpoolsize); /** * mix_videoconfigparamsdec_get_buffer_pool_size: @@ -333,7 +333,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size(MixVideoConfigParamsDec * Get buffer pool size */ MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size(MixVideoConfigParamsDec * obj, - guint *bufpoolsize); + uint *bufpoolsize); /** * mix_videoconfigparamsdec_set_extra_surface_allocation: @@ -344,7 +344,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size(MixVideoConfigParamsDec * Set extra surface allocation */ MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(MixVideoConfigParamsDec * obj, - guint extra_surface_allocation); + uint extra_surface_allocation); /** * mix_videoconfigparamsdec_get_extra_surface_allocation: @@ -355,7 +355,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(MixVideoConfigP * Get extra surface allocation */ MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigParamsDec * obj, - guint *extra_surface_allocation); + uint *extra_surface_allocation); /** @@ -367,7 +367,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigP * Set video range */ MIX_RESULT mix_videoconfigparamsdec_set_video_range(MixVideoConfigParamsDec * obj, - guint8 video_range); + uint8 video_range); /** * mix_videoconfigparamsdec_get_video_range: @@ -378,7 +378,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_video_range(MixVideoConfigParamsDec * ob * Get video range */ MIX_RESULT mix_videoconfigparamsdec_get_video_range(MixVideoConfigParamsDec * obj, - guint8 *video_range); + uint8 *video_range); /** @@ -390,7 +390,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_video_range(MixVideoConfigParamsDec * ob * Set color matrix */ MIX_RESULT mix_videoconfigparamsdec_set_color_matrix(MixVideoConfigParamsDec * obj, - guint8 color_matrix); + uint8 color_matrix); /** * mix_videoconfigparamsdec_get_color_matrix: @@ -401,7 +401,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_color_matrix(MixVideoConfigParamsDec * o * Get color matrix */ MIX_RESULT mix_videoconfigparamsdec_get_color_matrix(MixVideoConfigParamsDec * obj, - guint8 *color_matrix); + uint8 *color_matrix); /** @@ -413,7 +413,7 @@ MIX_RESULT mix_videoconfigparamsdec_get_color_matrix(MixVideoConfigParamsDec * o * Set bit rate */ MIX_RESULT mix_videoconfigparamsdec_set_bit_rate(MixVideoConfigParamsDec * obj, - guint bit_rate); + uint bit_rate); /** * mix_videoconfigparamsdec_get_bit_rate: @@ -424,7 +424,7 @@ MIX_RESULT mix_videoconfigparamsdec_set_bit_rate(MixVideoConfigParamsDec * obj, * Get bit rate */ MIX_RESULT mix_videoconfigparamsdec_get_bit_rate(MixVideoConfigParamsDec * obj, - guint *bit_rate); + uint *bit_rate); @@ -432,26 +432,26 @@ MIX_RESULT mix_videoconfigparamsdec_get_bit_rate(MixVideoConfigParamsDec * obj, * mix_videoconfigparamsdec_set_pixel_aspect_ratio: * @obj: #MixVideoConfigParamsDec object * @par_num: Pixel aspect ratio numerator value - * @par_denom: Pixel aspect ratio denominator value * + * @par_denom: Pixel aspect ratio denominator value * * @returns: Common Video Error Return Codes * * Set pixel aspect ratio */ MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio(MixVideoConfigParamsDec * obj, - guint par_num, guint par_denom); + uint par_num, uint par_denom); /** * mix_videoconfigparamsdec_get_pixel_aspect_ratio: * @obj: #MixVideoConfigParamsDec object - * @par_num: Pixel aspect ratio numerator value to be returned + * @par_num: Pixel aspect ratio numerator value to be returned * @par_denom: Pixel aspect ratio denominator value to be returned * @returns: Common Video Error Return Codes * * Get pixel aspect ratio */ MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio(MixVideoConfigParamsDec * obj, - guint * par_num, guint * par_denom); - + uint * par_num, uint * par_denom); + /* TODO: Add getters and setters for other properties */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.cpp b/mix_video/src/mixvideoconfigparamsdec_h264.cpp index bd96340..64dfdf7 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.cpp +++ b/mix_video/src/mixvideoconfigparamsdec_h264.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -16,51 +16,51 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsdec_h264.h" MixVideoConfigParamsDecH264::MixVideoConfigParamsDecH264() - :reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoConfigParamsDecH264::~MixVideoConfigParamsDecH264() { } -gboolean MixVideoConfigParamsDecH264::copy(MixParams *target) const { - gboolean ret = FALSE; - MixVideoConfigParamsDecH264 * this_target = MIX_VIDEOCONFIGPARAMSDEC_H264(target); - if (NULL != this_target) - ret = MixVideoConfigParamsDec::copy(target); - return ret; +bool MixVideoConfigParamsDecH264::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParamsDecH264 * this_target = MIX_VIDEOCONFIGPARAMSDEC_H264(target); + if (NULL != this_target) + ret = MixVideoConfigParamsDec::copy(target); + return ret; } -gboolean MixVideoConfigParamsDecH264::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoConfigParamsDecH264 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264(obj); - if (NULL != this_obj) - ret = MixVideoConfigParamsDec::equal(this_obj); - return ret; +bool MixVideoConfigParamsDecH264::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParamsDecH264 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264(obj); + if (NULL != this_obj) + ret = MixVideoConfigParamsDec::equal(this_obj); + return ret; } MixParams* MixVideoConfigParamsDecH264::dup() const { - MixParams *ret = new MixVideoConfigParamsDecH264(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoConfigParamsDecH264(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } MixVideoConfigParamsDecH264 * mix_videoconfigparamsdec_h264_new (void) { - return new MixVideoConfigParamsDecH264(); + return new MixVideoConfigParamsDecH264(); } MixVideoConfigParamsDecH264* mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } /* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h index f445fa8..ee8b786 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.h +++ b/mix_video/src/mixvideoconfigparamsdec_h264.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -21,7 +21,7 @@ No license under any patent, copyright, trade secret or other intellectual prope /** * MIX_IS_VIDEOCONFIGPARAMSDEC_H264: * @obj: an object. -* +* * Checks if the given object is an instance of #MixVideoConfigParamsDecH264 */ #define MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_H264(obj)) ? TRUE : FALSE) @@ -37,25 +37,25 @@ class MixVideoConfigParamsDecH264 : public MixVideoConfigParamsDec public: MixVideoConfigParamsDecH264(); ~MixVideoConfigParamsDecH264(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; virtual MixParams* dup() const; public: - /*< public > */ - - /* TODO: Add H.264 configuration paramters */ - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; + /*< public > */ + + /* TODO: Add H.264 configuration paramters */ + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; @@ -63,7 +63,7 @@ public: /** * mix_videoconfigparamsdec_h264_get_type: * @returns: type -* +* * Get the type of object. */ //GType mix_videoconfigparamsdec_h264_get_type (void); @@ -71,7 +71,7 @@ public: /** * mix_videoconfigparamsdec_h264_new: * @returns: A newly allocated instance of #MixVideoConfigParamsDecH264 -* +* * Use this method to create new instance of #MixVideoConfigParamsDecH264 */ MixVideoConfigParamsDecH264 *mix_videoconfigparamsdec_h264_new (void); @@ -79,16 +79,16 @@ MixVideoConfigParamsDecH264 *mix_videoconfigparamsdec_h264_new (void); * mix_videoconfigparamsdec_h264_ref: * @mix: object to add reference * @returns: #the MixVideoConfigParamsDecH264 instance where reference count has been increased. -* +* * Add reference count. */ MixVideoConfigParamsDecH264 - * mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix); +* mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix); /** * mix_videoconfigparamsdec_h264_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_videoconfigparamsdec_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj)) diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.cpp b/mix_video/src/mixvideoconfigparamsdec_mp42.cpp index c610411..98c564d 100644 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.cpp +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.cpp @@ -1,4 +1,4 @@ -/* +/* INTEL CONFIDENTIAL Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. @@ -20,93 +20,93 @@ #define MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ - + MixVideoConfigParamsDecMP42::MixVideoConfigParamsDecMP42() - :mpegversion(0) - ,divxversion(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :mpegversion(0) + ,divxversion(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoConfigParamsDecMP42::~MixVideoConfigParamsDecMP42() { } -gboolean MixVideoConfigParamsDecMP42::copy(MixParams *target) const { - gboolean ret = FALSE; - MixVideoConfigParamsDecMP42 * this_target = MIX_VIDEOCONFIGPARAMSDEC_MP42(target); - if (NULL != this_target) { - this_target->mpegversion = this->mpegversion; - this_target->divxversion = this->divxversion; - ret = MixVideoConfigParamsDec::copy(target); - } - return ret; +bool MixVideoConfigParamsDecMP42::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParamsDecMP42 * this_target = MIX_VIDEOCONFIGPARAMSDEC_MP42(target); + if (NULL != this_target) { + this_target->mpegversion = this->mpegversion; + this_target->divxversion = this->divxversion; + ret = MixVideoConfigParamsDec::copy(target); + } + return ret; } -gboolean MixVideoConfigParamsDecMP42::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoConfigParamsDecMP42 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_MP42(obj); - if (NULL != this_obj) - ret = MixVideoConfigParamsDec::equal(this_obj); - return ret; +bool MixVideoConfigParamsDecMP42::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParamsDecMP42 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_MP42(obj); + if (NULL != this_obj) + ret = MixVideoConfigParamsDec::equal(this_obj); + return ret; } MixParams* MixVideoConfigParamsDecMP42::dup() const { - MixParams *ret = new MixVideoConfigParamsDecMP42(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoConfigParamsDecMP42(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } MixVideoConfigParamsDecMP42 * mix_videoconfigparamsdec_mp42_new(void) { - return new MixVideoConfigParamsDecMP42(); + return new MixVideoConfigParamsDecMP42(); } MixVideoConfigParamsDecMP42 * mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } /* TODO: Add getters and setters for properties if any */ MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( - MixVideoConfigParamsDecMP42 *obj, guint version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); - obj->mpegversion = version; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDecMP42 *obj, uint version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); + obj->mpegversion = version; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( - MixVideoConfigParamsDecMP42 *obj, guint *version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); - *version = obj->mpegversion; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDecMP42 *obj, uint *version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); + *version = obj->mpegversion; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( - MixVideoConfigParamsDecMP42 *obj, guint version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); - obj->divxversion = version; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDecMP42 *obj, uint version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); + obj->divxversion = version; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( - MixVideoConfigParamsDecMP42 *obj, guint *version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); - *version = obj->divxversion; - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDecMP42 *obj, uint *version) { + MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); + *version = obj->divxversion; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h index b4b73b7..5969079 100644 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.h +++ b/mix_video/src/mixvideoconfigparamsdec_mp42.h @@ -1,4 +1,4 @@ -/* +/* INTEL CONFIDENTIAL Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. @@ -36,29 +36,29 @@ class MixVideoConfigParamsDecMP42 : public MixVideoConfigParamsDec { public: MixVideoConfigParamsDecMP42(); ~MixVideoConfigParamsDecMP42(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; virtual MixParams* dup() const; public: - /*< public > */ - - /* MPEG version */ - guint mpegversion; - - /* DivX version */ - guint divxversion; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; + /*< public > */ + + /* MPEG version */ + uint mpegversion; + + /* DivX version */ + uint divxversion; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; /** @@ -108,7 +108,7 @@ MixVideoConfigParamsDecMP42 * Set MPEG version */ MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( - MixVideoConfigParamsDecMP42 *obj, guint version); + MixVideoConfigParamsDecMP42 *obj, uint version); /** * mix_videoconfigparamsdec_mp42_get_mpegversion: @@ -119,7 +119,7 @@ MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( * Get MPEG version */ MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( - MixVideoConfigParamsDecMP42 *obj, guint *version); + MixVideoConfigParamsDecMP42 *obj, uint *version); /** * mix_videoconfigparamsdec_mp42_set_divxversion: @@ -130,7 +130,7 @@ MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( * Set DivX version */ MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( - MixVideoConfigParamsDecMP42 *obj, guint version); + MixVideoConfigParamsDecMP42 *obj, uint version); /** * mix_videoconfigparamsdec_mp42_set_divxversion: @@ -141,7 +141,7 @@ MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( * Get DivX version */ MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( - MixVideoConfigParamsDecMP42 *obj, guint *version); + MixVideoConfigParamsDecMP42 *obj, uint *version); diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.cpp b/mix_video/src/mixvideoconfigparamsdec_vc1.cpp index baf1acc..fd2e1c5 100644 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.cpp +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.cpp @@ -1,4 +1,4 @@ -/* +/* INTEL CONFIDENTIAL Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. @@ -17,54 +17,54 @@ #include "mixvideoconfigparamsdec_vc1.h" MixVideoConfigParamsDecVC1::MixVideoConfigParamsDecVC1() - :reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoConfigParamsDecVC1::~MixVideoConfigParamsDecVC1() { } -gboolean MixVideoConfigParamsDecVC1::copy(MixParams *target) const { - gboolean ret = FALSE; - MixVideoConfigParamsDecVC1 * this_target = MIX_VIDEOCONFIGPARAMSDEC_VC1(target); - if (NULL != this_target) { - ret = MixVideoConfigParamsDec::copy(target); - } - return ret; +bool MixVideoConfigParamsDecVC1::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParamsDecVC1 * this_target = MIX_VIDEOCONFIGPARAMSDEC_VC1(target); + if (NULL != this_target) { + ret = MixVideoConfigParamsDec::copy(target); + } + return ret; } -gboolean MixVideoConfigParamsDecVC1::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoConfigParamsDecVC1 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_VC1(obj); - if (NULL != this_obj) - ret = MixVideoConfigParamsDec::equal(this_obj); - return ret; +bool MixVideoConfigParamsDecVC1::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParamsDecVC1 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_VC1(obj); + if (NULL != this_obj) + ret = MixVideoConfigParamsDec::equal(this_obj); + return ret; } MixParams* MixVideoConfigParamsDecVC1::dup() const { - MixParams *ret = new MixVideoConfigParamsDecVC1(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoConfigParamsDecVC1(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } MixVideoConfigParamsDecVC1 * mix_videoconfigparamsdec_vc1_new(void) { - return new MixVideoConfigParamsDecVC1(); + return new MixVideoConfigParamsDecVC1(); } MixVideoConfigParamsDecVC1 * mix_videoconfigparamsdec_vc1_ref( - MixVideoConfigParamsDecVC1 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + MixVideoConfigParamsDecVC1 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; } diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h index 559ab82..1397424 100644 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.h +++ b/mix_video/src/mixvideoconfigparamsdec_vc1.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -21,7 +21,7 @@ No license under any patent, copyright, trade secret or other intellectual prope /** * MIX_IS_VIDEOCONFIGPARAMSDEC_VC1: * @obj: an object. -* +* * Checks if the given object is an instance of #MixVideoConfigParamsDecVC1 */ #define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_VC1(obj)) ? TRUE : FALSE) @@ -36,38 +36,38 @@ class MixVideoConfigParamsDecVC1 : public MixVideoConfigParamsDec public: MixVideoConfigParamsDecVC1(); ~MixVideoConfigParamsDecVC1(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; virtual MixParams* dup() const; public: - /*< public > */ - - /* TODO: Add VC1 configuration paramters */ - /* TODO: wmv_version and fourcc type might be changed later */ - - /* WMV version */ - guint wmv_version; - - /* FourCC code */ - guint fourcc; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; + /*< public > */ + + /* TODO: Add VC1 configuration paramters */ + /* TODO: wmv_version and fourcc type might be changed later */ + + /* WMV version */ + uint wmv_version; + + /* FourCC code */ + uint fourcc; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; /** * mix_videoconfigparamsdec_vc1_new: * @returns: A newly allocated instance of #MixVideoConfigParamsDecVC1 -* +* * Use this method to create new instance of #MixVideoConfigParamsDecVC1 */ MixVideoConfigParamsDecVC1 *mix_videoconfigparamsdec_vc1_new (void); @@ -75,16 +75,16 @@ MixVideoConfigParamsDecVC1 *mix_videoconfigparamsdec_vc1_new (void); * mix_videoconfigparamsdec_vc1_ref: * @mix: object to add reference * @returns: the #MixVideoConfigParamsDecVC1 instance where reference count has been increased. -* +* * Add reference count. */ MixVideoConfigParamsDecVC1 - * mix_videoconfigparamsdec_vc1_ref (MixVideoConfigParamsDecVC1 * mix); +* mix_videoconfigparamsdec_vc1_ref (MixVideoConfigParamsDecVC1 * mix); /** * mix_videoconfigparamsdec_vc1_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_videoconfigparamsdec_vc1_unref(obj) mix_params_unref(MIX_PARAMS(obj)) diff --git a/mix_video/src/mixvideoconfigparamsenc.cpp b/mix_video/src/mixvideoconfigparamsenc.cpp index efba46f..67a0c5f 100644 --- a/mix_video/src/mixvideoconfigparamsenc.cpp +++ b/mix_video/src/mixvideoconfigparamsenc.cpp @@ -17,498 +17,867 @@ #include #include "mixvideolog.h" #include "mixvideoconfigparamsenc.h" - +#include #define MDEBUG -MixVideoConfigParamsEnc * -mix_videoconfigparamsenc_new(void) { - return new MixVideoConfigParamsEnc(); -} - - +MixVideoConfigParamsEnc::MixVideoConfigParamsEnc() + :profile(MIX_PROFILE_H264BASELINE) + ,level(40) + ,raw_format(MIX_RAW_TARGET_FORMAT_YUV420) + ,rate_control(MIX_RATE_CONTROL_NONE) + ,bitrate(0) + ,frame_rate_num(30) + ,frame_rate_denom(1) + ,initial_qp(15) + ,min_qp(0) + ,target_percentage(95) + ,window_size(500) + ,intra_period(30) + ,picture_width(0) + ,picture_height(0) + ,mime_type(NULL) + ,encode_format(MIX_ENCODE_TARGET_FORMAT_MPEG4) + ,mixbuffer_pool_size(0) + ,share_buf_mode(FALSE) + ,ci_frame_id(NULL) + ,ci_frame_num(0) + ,CIR_frame_cnt(15) + ,max_slice_size(0)/*Set to 0 means it won't take effect*/ + ,refresh_type(MIX_VIDEO_NONIR) + ,buffer_mode(MIX_BUFFER_SELF_ALLOC_SURFACE) + ,buf_info(NULL) + ,need_display(TRUE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { + air_params.air_MBs = 0; + air_params.air_threshold = 0; + air_params.air_auto = 0; +} + +MixVideoConfigParamsEnc::~MixVideoConfigParamsEnc() { + /* free mime_type */ + if (mime_type) + free(mime_type); + + if (ci_frame_id) + delete[] ci_frame_id; + + if (buffer_mode == MIX_BUFFER_UPSTREAM_ALLOC_CI) { + MixCISharedBufferInfo * ci_tmp = NULL; + if (buf_info) { + ci_tmp = (MixCISharedBufferInfo *) buf_info;; + if (ci_tmp->ci_frame_id) { + delete [] ci_tmp->ci_frame_id; + ci_tmp->ci_frame_id = NULL; + } + + delete ci_tmp; + ci_tmp = NULL; + buf_info = NULL; + } + } +} + +bool MixVideoConfigParamsEnc::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParamsEnc *this_target = MIX_VIDEOCONFIGPARAMSENC(target); + MIX_RESULT mix_result = MIX_RESULT_FAIL; + + LOG_V( "Begin\n"); + + if (NULL != this_target) { + /* copy properties of primitive type */ + this_target->bitrate = bitrate; + this_target->frame_rate_num = frame_rate_num; + this_target->frame_rate_denom = frame_rate_denom; + this_target->initial_qp = initial_qp; + this_target->min_qp = min_qp; + this_target->target_percentage = target_percentage; + this_target->window_size = window_size; + this_target->max_slice_size = max_slice_size; + this_target->intra_period = intra_period; + this_target->picture_width = picture_width; + this_target->picture_height = picture_height; + this_target->mixbuffer_pool_size = mixbuffer_pool_size; + this_target->share_buf_mode = share_buf_mode; + this_target->encode_format = encode_format; + this_target->ci_frame_num = ci_frame_num; + this_target->draw= draw; + this_target->need_display = need_display; + this_target->rate_control = rate_control; + this_target->raw_format = raw_format; + this_target->profile = profile; + this_target->level = level; + this_target->CIR_frame_cnt = CIR_frame_cnt; + this_target->refresh_type = refresh_type; + this_target->air_params.air_MBs = air_params.air_MBs; + this_target->air_params.air_threshold = air_params.air_threshold; + this_target->air_params.air_auto = air_params.air_auto; + this_target->buffer_mode = buffer_mode; + + /* copy properties of non-primitive */ + /* copy mime_type */ + if (mime_type) { +#ifdef MDEBUG + LOG_I( "mime_type = %s %x\n", mime_type, (unsigned int)mime_type); +#endif + mix_result = mix_videoconfigparamsenc_set_mime_type( + this_target,mime_type); + } else { + LOG_I( "mime_type = NULL\n"); + mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, NULL); + } -MixVideoConfigParamsEnc * -mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) { - return (MixVideoConfigParamsEnc *) mix_params_ref(MIX_PARAMS(mix)); -} + if (mix_result != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n"); + return FALSE; + } -/** - * mix_videoconfigparamsenc_dup: - * @obj: a #MixVideoConfigParamsEnc object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * -mix_videoconfigparamsenc_dup(const MixParams * obj) { - return NULL; -} + mix_result = mix_videoconfigparamsenc_set_ci_frame_info ( + this_target, ci_frame_id, ci_frame_num); + mix_result = mix_videoconfigparamsenc_set_upstream_buffer_info ( + this_target, this_target->buffer_mode, buf_info); -/** - * mix_videoconfigparamsenc_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src) { + /* TODO: copy other properties if there's any */ + /* Now chainup base class */ + ret = MixVideoConfigParams::copy(target); + } - return FALSE; + return ret; } +bool MixVideoConfigParamsEnc::equal(MixParams* obj) const { + bool ret = TRUE; + MixVideoConfigParamsEnc *this_obj = MIX_VIDEOCONFIGPARAMSENC(obj); + if (NULL != this_obj) { + /* check the equalitiy of the primitive type properties */ + if (bitrate != this_obj->bitrate) { + goto not_equal; + } -/** - * mix_videoconfigparamsenc_: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) { - - gboolean ret = FALSE; - - MixVideoConfigParamsEnc *this_first, *this_second; - - if (NULL != first && NULL != second) { - - // Deep compare - // Cast the base object to this child object - this_first = MIX_VIDEOCONFIGPARAMSENC(first); - this_second = MIX_VIDEOCONFIGPARAMSENC(second); - - /* check the equalitiy of the primitive type properties */ - if (this_first->bitrate != this_second->bitrate) { - goto not_equal; - } + if (frame_rate_num != this_obj->frame_rate_num) { + goto not_equal; + } - if (this_first->frame_rate_num != this_second->frame_rate_num) { - goto not_equal; - } + if (frame_rate_denom != this_obj->frame_rate_denom) { + goto not_equal; + } - if (this_first->frame_rate_denom != this_second->frame_rate_denom) { - goto not_equal; - } + if (initial_qp != this_obj->initial_qp) { + goto not_equal; + } - if (this_first->initial_qp != this_second->initial_qp) { - goto not_equal; - } + if (min_qp != this_obj->min_qp) { + goto not_equal; + } - if (this_first->min_qp != this_second->min_qp) { - goto not_equal; - } + if (target_percentage != this_obj->target_percentage) { + goto not_equal; + } - if (this_first->target_percentage != this_second->target_percentage) { - goto not_equal; - } + if (window_size != this_obj->window_size) { + goto not_equal; + } - if (this_first->window_size != this_second->window_size) { - goto not_equal; - } + if (max_slice_size != this_obj->max_slice_size) { + goto not_equal; + } - if (this_first->max_slice_size != this_second->max_slice_size) { - goto not_equal; - } + if (intra_period != this_obj->intra_period) { + goto not_equal; + } - if (this_first->intra_period != this_second->intra_period) { - goto not_equal; - } + if (picture_width != this_obj->picture_width && + picture_height != this_obj->picture_height) { + goto not_equal; + } - if (this_first->picture_width != this_second->picture_width - && this_first->picture_height != this_second->picture_height) { - goto not_equal; - } + if (encode_format != this_obj->encode_format) { + goto not_equal; + } - if (this_first->encode_format != this_second->encode_format) { - goto not_equal; - } + if (mixbuffer_pool_size != this_obj->mixbuffer_pool_size) { + goto not_equal; + } - if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) { - goto not_equal; - } + if (share_buf_mode != this_obj->share_buf_mode) { + goto not_equal; + } - if (this_first->share_buf_mode != this_second->share_buf_mode) { - goto not_equal; - } + if (ci_frame_id != this_obj->ci_frame_id) { + goto not_equal; + } - if (this_first->ci_frame_id != this_second->ci_frame_id) { - goto not_equal; - } + if (ci_frame_num != this_obj->ci_frame_num) { + goto not_equal; + } - if (this_first->ci_frame_num != this_second->ci_frame_num) { - goto not_equal; - } + if (draw != this_obj->draw) { + goto not_equal; + } - if (this_first->draw != this_second->draw) { - goto not_equal; - } + if (need_display!= this_obj->need_display) { + goto not_equal; + } - if (this_first->need_display!= this_second->need_display) { - goto not_equal; - } + if (rate_control != this_obj->rate_control) { + goto not_equal; + } - if (this_first->rate_control != this_second->rate_control) { + if (raw_format != this_obj->raw_format) { goto not_equal; } - if (this_first->raw_format != this_second->raw_format) { + if (profile != this_obj->profile) { goto not_equal; } - if (this_first->profile != this_second->profile) { + if (level != this_obj->level) { goto not_equal; } - if (this_first->level != this_second->level) { + if (CIR_frame_cnt != this_obj->CIR_frame_cnt) { goto not_equal; - } + } - if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) { + if (refresh_type != this_obj->refresh_type) { goto not_equal; } - if (this_first->refresh_type != this_second->refresh_type) { + if (air_params.air_MBs != this_obj->air_params.air_MBs) { goto not_equal; } - if (this_first->air_params.air_MBs != this_second->air_params.air_MBs) { + if (air_params.air_threshold != this_obj->air_params.air_threshold) { goto not_equal; } - if (this_first->air_params.air_threshold != this_second->air_params.air_threshold) { + if (air_params.air_auto != this_obj->air_params.air_auto) { goto not_equal; } - if (this_first->air_params.air_auto != this_second->air_params.air_auto) { + if (buffer_mode != this_obj->buffer_mode) { goto not_equal; } - /* check the equalitiy of the none-primitive type properties */ + /* check the equalitiy of the none-primitive type properties */ - /* compare mime_type */ + /* compare mime_type */ + if (mime_type && this_obj->mime_type) { + if (strcmp(mime_type, this_obj->mime_type) != 0) { + goto not_equal; + } + } else if (!(!mime_type && !this_obj->mime_type)) { + goto not_equal; + } - if (this_first->mime_type && this_second->mime_type) { - if (g_string_equal(this_first->mime_type, this_second->mime_type) - != TRUE) { - goto not_equal; - } - } else if (!(!this_first->mime_type && !this_second->mime_type)) { - goto not_equal; - } + /* + * TODO: Check the data inside data info + */ + ret = TRUE; - ret = TRUE; +not_equal: - not_equal: + if (ret != TRUE) { + return ret; + } - if (ret != TRUE) { - return ret; - } + /* chaining up. */ + ret = MixVideoConfigParams::equal(obj); - /* chaining up. */ - return TRUE; - } + } + return ret; - return ret; +} + +MixParams* MixVideoConfigParamsEnc::dup() const { + MixParams *ret = new MixVideoConfigParamsEnc(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } +MixVideoConfigParamsEnc * +mix_videoconfigparamsenc_new(void) { + return new MixVideoConfigParamsEnc(); +} + +MixVideoConfigParamsEnc * +mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} + + +#define MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ + if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ + /* TODO: Add getters and setters for other properties. The following is incomplete */ MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, - const gchar * mime_type) { + const char * mime_type) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - return MIX_RESULT_SUCCESS; + if (!mime_type) { + return MIX_RESULT_NULL_PTR; + } + + LOG_I( "mime_type = %s %x\n", + mime_type, (unsigned int)mime_type); + + if (obj->mime_type) { + free(obj->mime_type); + obj->mime_type = NULL; + } + + + LOG_I( "mime_type = %s %x\n", + mime_type, (unsigned int)mime_type); + + obj->mime_type = strdup(mime_type); + if (!obj->mime_type) { + return MIX_RESULT_NO_MEMORY; + } + + + LOG_I( "mime_type = %s obj->mime_type = %s\n", + mime_type, obj->mime_type); + + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, - gchar ** mime_type) { - + char ** mime_type) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, mime_type); + + if (!obj->mime_type) { + *mime_type = NULL; + return MIX_RESULT_SUCCESS; + } + *mime_type = strdup(obj->mime_type); + if (!*mime_type) { + return MIX_RESULT_NO_MEMORY; + } - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, - guint frame_rate_num, guint frame_rate_denom) { - - return MIX_RESULT_SUCCESS; + uint frame_rate_num, uint frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->frame_rate_num = frame_rate_num; + obj->frame_rate_denom = frame_rate_denom; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, - guint * frame_rate_num, guint * frame_rate_denom) { - - return MIX_RESULT_SUCCESS; + uint * frame_rate_num, uint * frame_rate_denom) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); + *frame_rate_num = obj->frame_rate_num; + *frame_rate_denom = obj->frame_rate_denom; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, - guint picture_width, guint picture_height) { - - return MIX_RESULT_SUCCESS; + uint picture_width, uint picture_height) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->picture_width = picture_width; + obj->picture_height = picture_height; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, - guint * picture_width, guint * picture_height) { - - return MIX_RESULT_SUCCESS; + uint * picture_width, uint * picture_height) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); + *picture_width = obj->picture_width; + *picture_height = obj->picture_height; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_encode_format(MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat encode_format) { - - return MIX_RESULT_SUCCESS; + MixEncodeTargetFormat encode_format) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->encode_format = encode_format; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat* encode_format) { - - return MIX_RESULT_SUCCESS; + MixEncodeTargetFormat* encode_format) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, encode_format); + *encode_format = obj->encode_format; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, - guint bitrate) { - - return MIX_RESULT_SUCCESS; + uint bitrate) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->bitrate= bitrate; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, - guint *bitrate) { - - return MIX_RESULT_SUCCESS; + uint *bitrate) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bitrate); + *bitrate = obj->bitrate; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, - guint initial_qp) { - - return MIX_RESULT_SUCCESS; + uint initial_qp) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->initial_qp = initial_qp; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, - guint *initial_qp) { - - return MIX_RESULT_SUCCESS; + uint *initial_qp) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, initial_qp); + *initial_qp = obj->initial_qp; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, - guint min_qp) { - return MIX_RESULT_SUCCESS; + uint min_qp) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->min_qp = min_qp; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, - guint *min_qp) { - return MIX_RESULT_NOT_SUPPORTED; + uint *min_qp) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, min_qp); + *min_qp = obj->min_qp; + + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj, - guint target_percentage) { + uint target_percentage) { - return MIX_RESULT_SUCCESS; - } + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->target_percentage = target_percentage; + return MIX_RESULT_SUCCESS; +} MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj, - guint *target_percentage) { + uint *target_percentage) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, target_percentage); + *target_percentage = obj->target_percentage; return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj, - guint window_size) { - + uint window_size) { - return MIX_RESULT_SUCCESS; + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->window_size = window_size; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj, - guint *window_size) { - + uint *window_size) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, window_size); + *window_size = obj->window_size; return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, - guint intra_period) { - + uint intra_period) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->intra_period = intra_period; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, - guint *intra_period) { - + uint *intra_period) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, intra_period); + *intra_period = obj->intra_period; - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size( - MixVideoConfigParamsEnc * obj, guint bufpoolsize) { + MixVideoConfigParamsEnc * obj, uint bufpoolsize) { + + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - return MIX_RESULT_SUCCESS; + obj->mixbuffer_pool_size = bufpoolsize; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size( - MixVideoConfigParamsEnc * obj, guint *bufpoolsize) { + MixVideoConfigParamsEnc * obj, uint *bufpoolsize) { - return MIX_RESULT_SUCCESS; + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bufpoolsize); + *bufpoolsize = obj->mixbuffer_pool_size; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc * obj, - gboolean share_buf_mod) { +MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode ( + MixVideoConfigParamsEnc * obj, bool share_buf_mod) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - return MIX_RESULT_SUCCESS; + obj->share_buf_mode = share_buf_mod; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, - gboolean *share_buf_mod) { + bool *share_buf_mod) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, share_buf_mod); - return MIX_RESULT_SUCCESS; + *share_buf_mod = obj->share_buf_mode; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, - gulong * ci_frame_id, guint ci_frame_num) { +MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info( + MixVideoConfigParamsEnc * obj, ulong * ci_frame_id, uint ci_frame_num) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + + if (!ci_frame_id || !ci_frame_num) { + obj->ci_frame_id = NULL; + obj->ci_frame_num = 0; + return MIX_RESULT_SUCCESS; + } + + if (obj->ci_frame_id) + delete [] obj->ci_frame_id; + + uint size = ci_frame_num * sizeof (ulong); + obj->ci_frame_num = ci_frame_num; + obj->ci_frame_id = new ulong[ci_frame_num]; + if (!(obj->ci_frame_id)) { + return MIX_RESULT_NO_MEMORY; + } - return MIX_RESULT_SUCCESS; + memcpy (obj->ci_frame_id, ci_frame_id, size); + + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, - gulong * *ci_frame_id, guint *ci_frame_num) { + ulong * *ci_frame_id, uint *ci_frame_num) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, ci_frame_id, ci_frame_num); + + *ci_frame_num = obj->ci_frame_num; + + if (!obj->ci_frame_id) { + *ci_frame_id = NULL; + return MIX_RESULT_SUCCESS; + } + + if (obj->ci_frame_num) { + *ci_frame_id = new ulong[obj->ci_frame_num]; + + if (!*ci_frame_id) { + return MIX_RESULT_NO_MEMORY; + } - return MIX_RESULT_SUCCESS; + memcpy (*ci_frame_id, obj->ci_frame_id, obj->ci_frame_num * sizeof (ulong)); + + } else { + *ci_frame_id = NULL; + } + + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, - gulong draw) { + ulong draw) { - return MIX_RESULT_SUCCESS; + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->draw = draw; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, - gulong *draw) { - + ulong *draw) { - return MIX_RESULT_SUCCESS; + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, draw); + *draw = obj->draw; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_need_display ( - MixVideoConfigParamsEnc * obj, gboolean need_display) { + MixVideoConfigParamsEnc * obj, bool need_display) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - return MIX_RESULT_SUCCESS; + obj->need_display = need_display; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, - gboolean *need_display) { + bool *need_display) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, need_display); - return MIX_RESULT_SUCCESS; + *need_display = obj->need_display; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl rate_control) { - - return MIX_RESULT_SUCCESS; + MixRateControl rate_control) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->rate_control = rate_control; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl * rate_control) { - - return MIX_RESULT_SUCCESS; + MixRateControl * rate_control) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, rate_control); + *rate_control = obj->rate_control; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat raw_format) { - - return MIX_RESULT_SUCCESS; + MixRawTargetFormat raw_format) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->raw_format = raw_format; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat * raw_format) { - - return MIX_RESULT_SUCCESS; + MixRawTargetFormat * raw_format) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, raw_format); + *raw_format = obj->raw_format; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, - MixProfile profile) { - - return MIX_RESULT_SUCCESS; + MixProfile profile) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->profile = profile; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, - MixProfile * profile) { - - return MIX_RESULT_SUCCESS; + MixProfile * profile) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, profile); + *profile = obj->profile; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, - guint8 level) { - - return MIX_RESULT_SUCCESS; + uint8 level) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->level = level; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, - guint8 * level) { - - return MIX_RESULT_SUCCESS; + uint8 * level) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, level); + *level = obj->level; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - guint CIR_frame_cnt) { - - return MIX_RESULT_SUCCESS; + uint CIR_frame_cnt) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->CIR_frame_cnt = CIR_frame_cnt; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - guint * CIR_frame_cnt) { - - return MIX_RESULT_SUCCESS; + uint * CIR_frame_cnt) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, CIR_frame_cnt); + *CIR_frame_cnt = obj->CIR_frame_cnt; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj, - guint max_slice_size) { - - return MIX_RESULT_SUCCESS; + uint max_slice_size) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->max_slice_size = max_slice_size; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj, - guint * max_slice_size) { - - return MIX_RESULT_SUCCESS; + uint * max_slice_size) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, max_slice_size); + *max_slice_size = obj->max_slice_size; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_refresh_type(MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType refresh_type) { - - return MIX_RESULT_SUCCESS; + MixVideoIntraRefreshType refresh_type) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->refresh_type = refresh_type; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType * refresh_type) { - - return MIX_RESULT_SUCCESS; + MixVideoIntraRefreshType * refresh_type) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, refresh_type); + *refresh_type = obj->refresh_type; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams air_params) { - - return MIX_RESULT_SUCCESS; + MixAIRParams air_params) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->air_params.air_MBs = air_params.air_MBs; + obj->air_params.air_threshold = air_params.air_threshold; + obj->air_params.air_auto = air_params.air_auto; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams * air_params) { + MixAIRParams * air_params) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, air_params); + air_params->air_MBs = obj->air_params.air_MBs; + air_params->air_threshold = obj->air_params.air_threshold; + air_params->air_auto = obj->air_params.air_auto; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_set_buffer_mode (MixVideoConfigParamsEnc * obj, + MixBufferAllocationMode buffer_mode) { + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + obj->buffer_mode = buffer_mode; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_get_buffer_mode (MixVideoConfigParamsEnc * obj, + MixBufferAllocationMode * buffer_mode) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, buffer_mode); + *buffer_mode = obj->buffer_mode; + return MIX_RESULT_SUCCESS; +} + +/* +* Currently we use void* for buf_info, and will change to use union later which has been defined in mixvideodef.h +*/ +MIX_RESULT mix_videoconfigparamsenc_set_upstream_buffer_info (MixVideoConfigParamsEnc * obj, + MixBufferAllocationMode buffer_mode, void * buf_info) { + + MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); + + if (!buf_info) { + return MIX_RESULT_NULL_PTR; + } + + switch (buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + MixCISharedBufferInfo * ci_tmp = NULL; + MixCISharedBufferInfo * ci_info_in = (MixCISharedBufferInfo *) buf_info; + + if (obj->buf_info) { + ci_tmp = (MixCISharedBufferInfo *) obj->buf_info;; + if (ci_tmp->ci_frame_id) { + delete [] ci_tmp->ci_frame_id; + ci_tmp->ci_frame_id = NULL; + } + + delete ci_tmp; + ci_tmp = NULL; + obj->buf_info = NULL; + } + + ci_tmp = new MixCISharedBufferInfo; + if (!ci_tmp) { + return MIX_RESULT_NO_MEMORY; + } + + ci_tmp->ci_frame_cnt = ci_info_in->ci_frame_cnt; + ci_tmp->ci_frame_id = NULL; + + ci_tmp->ci_frame_id = new ulong[ci_tmp->ci_frame_cnt]; + if (!ci_tmp->ci_frame_id) { + return MIX_RESULT_NO_MEMORY; + } + + memcpy (ci_tmp->ci_frame_id, ci_info_in->ci_frame_id, ci_tmp->ci_frame_cnt * sizeof (ulong)); + obj->buf_info = (void *) ci_tmp; + } + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + break; + default: + return MIX_RESULT_FAIL; //FIXEME + } + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_get_upstream_buffer_info (MixVideoConfigParamsEnc * obj, + MixBufferAllocationMode buffer_mode, void ** buf_info) { + MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, buf_info); + + switch (buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + MixCISharedBufferInfo * ci_tmp = (MixCISharedBufferInfo *) (obj->buf_info); + MixCISharedBufferInfo * ci_info_out = NULL; + + if (!ci_tmp) { + return MIX_RESULT_NULL_PTR; + } + + if (!(ci_tmp->ci_frame_id) || !(ci_tmp->ci_frame_cnt)) { + return MIX_RESULT_NULL_PTR; + } + + ci_info_out = new MixCISharedBufferInfo; + if (!ci_info_out) { + return MIX_RESULT_NO_MEMORY; + } + + ci_info_out->ci_frame_cnt = ci_tmp->ci_frame_cnt; + ci_info_out->ci_frame_id = NULL; + + ci_info_out->ci_frame_id = new ulong[ci_info_out->ci_frame_cnt]; + if (!ci_info_out->ci_frame_id) { + return MIX_RESULT_NO_MEMORY; + } + + memcpy (ci_info_out->ci_frame_id, ci_tmp->ci_frame_id, ci_info_out->ci_frame_cnt * sizeof (ulong)); + *buf_info = (MixCISharedBufferInfo *) ci_info_out; + } + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + break; + default: + return MIX_RESULT_FAIL; //FIXME + } + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h index ceac4a6..00d270d 100644 --- a/mix_video/src/mixvideoconfigparamsenc.h +++ b/mix_video/src/mixvideoconfigparamsenc.h @@ -16,7 +16,6 @@ * MIX_VIDEOCONFIGPARAMSENC: * @obj: object to be type-casted. */ -//#define MIX_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEnc)) #define MIX_VIDEOCONFIGPARAMSENC(obj) (reinterpret_cast(obj)) /** @@ -25,29 +24,7 @@ * * Checks if the given object is an instance of #MixParams */ -//#define MIX_IS_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC)) - -/** - * MIX_VIDEOCONFIGPARAMSENC_CLASS: - * @klass: class to be type-casted. - */ -//#define MIX_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) - -/** - * MIX_IS_VIDEOCONFIGPARAMSENC_CLASS: - * @klass: a class. - * - * Checks if the given class is #MixParamsClass - */ -//#define MIX_IS_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC)) - -/** - * MIX_VIDEOCONFIGPARAMSENC_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -//#define MIX_VIDEOCONFIGPARAMSENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass)) +#define MIX_IS_VIDEOCONFIGPARAMSENC(obj) (NULL != MIX_VIDEOCONFIGPARAMSENC(obj)) @@ -57,107 +34,118 @@ * MI-X VideoConfig Parameter object */ class MixVideoConfigParamsEnc : public MixVideoConfigParams { - /*< public > */ - //MixIOVec header; - - /* the type of the following members will be changed after MIX API doc is ready */ public: - /* Encoding profile */ - MixProfile profile; + MixVideoConfigParamsEnc(); + virtual ~MixVideoConfigParamsEnc(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; + virtual MixParams* dup() const; +public: + /*< public > */ + //MixIOVec header; + + /* the type of the following members will be changed after MIX API doc is ready */ + + /* Encoding profile */ + MixProfile profile; - guint8 level; + uint8 level; - /* Raw format to be encoded */ - MixRawTargetFormat raw_format; + /* Raw format to be encoded */ + MixRawTargetFormat raw_format; - /* Rate control mode */ - MixRateControl rate_control; + /* Rate control mode */ + MixRateControl rate_control; - /* Bitrate when rate control is used */ - guint bitrate; + /* Bitrate when rate control is used */ + uint bitrate; - /* Numerator of frame rate */ - guint frame_rate_num; + /* Numerator of frame rate */ + uint frame_rate_num; - /* Denominator of frame rate */ - guint frame_rate_denom; + /* Denominator of frame rate */ + uint frame_rate_denom; - /* The initial QP value */ - guint initial_qp; + /* The initial QP value */ + uint initial_qp; - /* The minimum QP value */ - guint min_qp; + /* The minimum QP value */ + uint min_qp; - /* this is the bit-rate the rate control is targeting, as a percentage of the maximum bit-rate - * for example if target_percentage is 95 then the rate control will target a bit-rate that is - * 95% of the maximum bit-rate - */ - guint target_percentage; + /* this is the bit-rate the rate control is targeting, as a percentage of the maximum bit-rate + * for example if target_percentage is 95 then the rate control will target a bit-rate that is + * 95% of the maximum bit-rate + */ + uint target_percentage; - /* windows size in milliseconds. For example if this is set to 500, then the rate control will guarantee the */ - guint window_size; + /* windows size in milliseconds. For example if this is set to 500, then the rate control will guarantee the */ + uint window_size; - /* Number of frames between key frames (GOP size) */ - guint intra_period; + /* Number of frames between key frames (GOP size) */ + uint intra_period; - /* Width of video frame */ - guint16 picture_width; + /* Width of video frame */ + uint16 picture_width; - /* Height of the video frame */ - guint16 picture_height; + /* Height of the video frame */ + uint16 picture_height; - /* Mime type, reserved */ - GString * mime_type; + /* Mime type, reserved */ + char * mime_type; - /* Encode target format */ - MixEncodeTargetFormat encode_format; + /* Encode target format */ + MixEncodeTargetFormat encode_format; - /* Size of the pool of MixBuffer objects */ - guint mixbuffer_pool_size; + /* Size of the pool of MixBuffer objects */ + uint mixbuffer_pool_size; - /* Are buffers shared between capture and encoding drivers */ - gboolean share_buf_mode; + /* Are buffers shared between capture and encoding drivers */ + bool share_buf_mode; - /* Array of frame IDs created by capture library */ - gulong * ci_frame_id; + /* Array of frame IDs created by capture library */ + ulong * ci_frame_id; - /* Size of the array ci_frame_id */ - guint ci_frame_num; + /* Size of the array ci_frame_id */ + uint ci_frame_num; - guint CIR_frame_cnt; + uint CIR_frame_cnt; /* The maximum slice size to be set to video driver (in bits). * The encoder hardware will try to make sure the single slice does not exceed this size * If not, mix_video_encode() will report a specific error */ - guint max_slice_size; + uint max_slice_size; - MixVideoIntraRefreshType refresh_type; + MixVideoIntraRefreshType refresh_type; - MixAIRParams air_params; + MixAIRParams air_params; - /* < private > */ - gulong draw; + MixBufferAllocationMode buffer_mode; + void * buf_info; - /*< public > */ + /* < private > */ + ulong draw; - /* Indicates whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() */ - gboolean need_display; + /*< public > */ - /* Reserved for future use */ - void *reserved1; + /* Indicates whether MixVideoFrames suitable for displaying + * need to be enqueued for retrieval using mix_video_get_frame() */ + bool need_display; - /* Reserved for future use */ - void *reserved2; + /* Reserved for future use */ + void *reserved1; - /* Reserved for future use */ - void *reserved3; + /* Reserved for future use */ + void *reserved2; - /* Reserved for future use */ - void *reserved4; + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; + /** * mix_videoconfigparamsenc_new: * @returns: A newly allocated instance of #MixVideoConfigParamsEnc @@ -165,6 +153,7 @@ public: * Use this method to create new instance of #MixVideoConfigParamsEnc */ MixVideoConfigParamsEnc *mix_videoconfigparamsenc_new(void); + /** * mix_videoconfigparamsenc_ref: * @mix: object to add reference @@ -192,8 +181,8 @@ MixVideoConfigParamsEnc *mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * * * Set mime type */ -MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, - const gchar * mime_type); +MIX_RESULT mix_videoconfigparamsenc_set_mime_type( + MixVideoConfigParamsEnc * obj, const char * mime_type); /** * mix_videoconfigparamsenc_get_mime_type: @@ -207,8 +196,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, * Caller is responsible to g_free *mime_type * */ -MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, - gchar ** mime_type); +MIX_RESULT mix_videoconfigparamsenc_get_mime_type( + MixVideoConfigParamsEnc * obj, char ** mime_type); /** @@ -220,8 +209,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, * * Set frame rate */ -MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, - guint frame_rate_num, guint frame_rate_denom); +MIX_RESULT mix_videoconfigparamsenc_set_frame_rate( + MixVideoConfigParamsEnc * obj, uint frame_rate_num, uint frame_rate_denom); /** * mix_videoconfigparamsenc_get_frame_rate: @@ -232,8 +221,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj * * Get frame rate */ -MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, - guint * frame_rate_num, guint * frame_rate_denom); +MIX_RESULT mix_videoconfigparamsenc_get_frame_rate( + MixVideoConfigParamsEnc * obj, uint * frame_rate_num, uint * frame_rate_denom); /** * mix_videoconfigparamsenc_set_picture_res: @@ -244,8 +233,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj * * Set width and height of video frame */ -MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, - guint picture_width, guint picture_height); +MIX_RESULT mix_videoconfigparamsenc_set_picture_res( + MixVideoConfigParamsEnc * obj, uint picture_width, uint picture_height); /** * mix_videoconfigparamsenc_get_picture_res: @@ -256,8 +245,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * ob * * Get width and height of video frame */ -MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, - guint * picture_width, guint * picture_height); +MIX_RESULT mix_videoconfigparamsenc_get_picture_res( + MixVideoConfigParamsEnc * obj, uint * picture_width, uint * picture_height); /** * mix_videoconfigparamsenc_set_encode_format: @@ -267,8 +256,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * ob * * Set Encode target format */ -MIX_RESULT mix_videoconfigparamsenc_set_encode_format (MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat encode_format); +MIX_RESULT mix_videoconfigparamsenc_set_encode_format ( + MixVideoConfigParamsEnc * obj, MixEncodeTargetFormat encode_format); /** * mix_videoconfigparamsenc_get_encode_format: @@ -278,8 +267,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_encode_format (MixVideoConfigParamsEnc * * * Get Encode target format */ -MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat * encode_format); +MIX_RESULT mix_videoconfigparamsenc_get_encode_format ( + MixVideoConfigParamsEnc * obj, MixEncodeTargetFormat * encode_format); /** * mix_videoconfigparamsenc_set_bit_rate: @@ -289,8 +278,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * * * Set bitrate */ -MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, - guint bps); +MIX_RESULT mix_videoconfigparamsenc_set_bit_rate ( + MixVideoConfigParamsEnc * obj, uint bps); /** * mix_videoconfigparamsenc_get_bit_rate: @@ -300,8 +289,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, * * Get bitrate */ -MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, - guint *bps); +MIX_RESULT mix_videoconfigparamsenc_get_bit_rate ( + MixVideoConfigParamsEnc * obj, uint *bps); /** * mix_videoconfigparamsenc_set_init_qp: @@ -311,8 +300,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, * * Set The initial QP value */ -MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, - guint initial_qp); +MIX_RESULT mix_videoconfigparamsenc_set_init_qp ( + MixVideoConfigParamsEnc * obj, uint initial_qp); /** * mix_videoconfigparamsenc_get_init_qp: @@ -322,8 +311,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, * * Get The initial QP value */ -MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, - guint *initial_qp); +MIX_RESULT mix_videoconfigparamsenc_get_init_qp ( + MixVideoConfigParamsEnc * obj, uint *initial_qp); /** * mix_videoconfigparamsenc_set_min_qp: @@ -333,8 +322,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, * * Set The minimum QP value */ -MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, - guint min_qp); +MIX_RESULT mix_videoconfigparamsenc_set_min_qp ( + MixVideoConfigParamsEnc * obj, uint min_qp); /** * mix_videoconfigparamsenc_get_min_qp: @@ -344,8 +333,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, * * Get The minimum QP value */ -MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, - guint *min_qp); +MIX_RESULT mix_videoconfigparamsenc_get_min_qp( + MixVideoConfigParamsEnc * obj, uint *min_qp); /** @@ -356,8 +345,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, * * Set The target percentage value */ -MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj, - guint target_percentage); +MIX_RESULT mix_videoconfigparamsenc_set_target_percentage ( + MixVideoConfigParamsEnc * obj, uint target_percentage); /** * mix_videoconfigparamsenc_get_target_percentage: @@ -367,8 +356,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsE * * Get The target percentage value */ -MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj, - guint *target_percentage); +MIX_RESULT mix_videoconfigparamsenc_get_target_percentage( + MixVideoConfigParamsEnc * obj, uint *target_percentage); /** * mix_videoconfigparamsenc_set_window_size: @@ -378,8 +367,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEn * * Set The window size value */ -MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj, - guint window_size); +MIX_RESULT mix_videoconfigparamsenc_set_window_size ( + MixVideoConfigParamsEnc * obj, uint window_size); /** * mix_videoconfigparamsenc_get_window_size: @@ -389,8 +378,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * o * * Get The window size value */ -MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj, - guint *window_size); +MIX_RESULT mix_videoconfigparamsenc_get_window_size ( + MixVideoConfigParamsEnc * obj, uint *window_size); /** * mix_videoconfigparamsenc_set_intra_period: @@ -400,8 +389,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * o * * Set Number of frames between key frames (GOP size) */ -MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, - guint intra_period); +MIX_RESULT mix_videoconfigparamsenc_set_intra_period ( + MixVideoConfigParamsEnc * obj, uint intra_period); /** * mix_videoconfigparamsenc_get_intra_period: @@ -411,8 +400,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * * * Get Number of frames between key frames (GOP size) */ -MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, - guint *intra_period); +MIX_RESULT mix_videoconfigparamsenc_get_intra_period ( + MixVideoConfigParamsEnc * obj, uint *intra_period); /** * mix_videoconfigparamsenc_set_buffer_pool_size: @@ -422,8 +411,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * * * Set Size of the pool of #MixBuffer objects */ -MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size(MixVideoConfigParamsEnc * obj, - guint bufpoolsize); +MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size( + MixVideoConfigParamsEnc * obj, uint bufpoolsize); /** * mix_videoconfigparamsenc_set_buffer_pool_size: @@ -433,32 +422,32 @@ MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size(MixVideoConfigParamsEnc * * Get Size of the pool of #MixBuffer objects */ -MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size(MixVideoConfigParamsEnc * obj, - guint *bufpoolsize); +MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size( + MixVideoConfigParamsEnc * obj, uint *bufpoolsize); /** * mix_videoconfigparamsenc_set_share_buf_mode: * @obj: #MixVideoConfigParamsEnc object * @share_buf_mod: A flag to indicate whether buffers are shared - * between capture and encoding drivers or not + * between capture and encoding drivers or not * @returns: Common Video Error Return Codes * * Set the flag that indicates whether buffers are shared between capture and encoding drivers or not */ -MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc * obj, - gboolean share_buf_mod); +MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode ( + MixVideoConfigParamsEnc * obj, bool share_buf_mod); /** * mix_videoconfigparamsenc_get_share_buf_mode: * @obj: #MixVideoConfigParamsEnc object * @share_buf_mod: the flag to be returned that indicates whether buffers - * are shared between capture and encoding drivers or not + * are shared between capture and encoding drivers or not * @returns: Common Video Error Return Codes * * Get the flag that indicates whether buffers are shared between capture and encoding drivers or not */ -MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, - gboolean *share_buf_mod); +MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode( + MixVideoConfigParamsEnc * obj, bool *share_buf_mod); /** * mix_videoconfigparamsenc_set_ci_frame_info: @@ -469,8 +458,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * * * Set CI frame information */ -MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, - gulong * ci_frame_id, guint ci_frame_num); +MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info( + MixVideoConfigParamsEnc * obj, ulong * ci_frame_id, uint ci_frame_num); /** * mix_videoconfigparamsenc_get_ci_frame_info: @@ -484,8 +473,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * * Caller is responsible to g_free *ci_frame_id * */ -MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, - gulong * *ci_frame_id, guint *ci_frame_num); +MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info ( + MixVideoConfigParamsEnc * obj, ulong * *ci_frame_id, uint *ci_frame_num); /** @@ -496,8 +485,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * * * Set drawable */ -MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, - gulong draw); +MIX_RESULT mix_videoconfigparamsenc_set_drawable ( + MixVideoConfigParamsEnc * obj, ulong draw); /** * mix_videoconfigparamsenc_get_drawable: @@ -507,35 +496,35 @@ MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, * * Get drawable */ -MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, - gulong *draw); +MIX_RESULT mix_videoconfigparamsenc_get_drawable ( + MixVideoConfigParamsEnc * obj, ulong *draw); /** * mix_videoconfigparamsenc_set_need_display: * @obj: #MixVideoConfigParamsEnc object * @need_display: Flag to indicates whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() + * need to be enqueued for retrieval using mix_video_get_frame() * @returns: Common Video Error Return Codes * * Set the flag used to indicate whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() */ MIX_RESULT mix_videoconfigparamsenc_set_need_display ( - MixVideoConfigParamsEnc * obj, gboolean need_display); + MixVideoConfigParamsEnc * obj, bool need_display); /** * mix_videoconfigparamsenc_get_need_display: * @obj: #MixVideoConfigParamsEnc object * @need_display: A flag to be returned to indicates whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() + * need to be enqueued for retrieval using mix_video_get_frame() * @returns: Common Video Error Return Codes * * Get the flag used to indicate whether MixVideoFrames suitable for displaying * need to be enqueued for retrieval using mix_video_get_frame() */ -MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, - gboolean *need_display); +MIX_RESULT mix_videoconfigparamsenc_get_need_display( + MixVideoConfigParamsEnc * obj, bool *need_display); /** * mix_videoconfigparamsenc_set_rate_control: @@ -545,8 +534,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * o * * Set Rate control mode */ -MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl rcmode); +MIX_RESULT mix_videoconfigparamsenc_set_rate_control( + MixVideoConfigParamsEnc * obj, MixRateControl rcmode); /** * mix_videoconfigparamsenc_set_rate_control: @@ -556,8 +545,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * o * * Get Rate control mode */ -MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl * rcmode); +MIX_RESULT mix_videoconfigparamsenc_get_rate_control( + MixVideoConfigParamsEnc * obj, MixRateControl * rcmode); /** * mix_videoconfigparamsenc_set_raw_format: @@ -567,8 +556,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * o * * Set Raw format to be encoded */ -MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat raw_format); +MIX_RESULT mix_videoconfigparamsenc_set_raw_format ( + MixVideoConfigParamsEnc * obj, MixRawTargetFormat raw_format); /** * mix_videoconfigparamsenc_get_raw_format: @@ -578,8 +567,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * ob * * Get Raw format */ -MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat * raw_format); +MIX_RESULT mix_videoconfigparamsenc_get_raw_format ( + MixVideoConfigParamsEnc * obj, MixRawTargetFormat * raw_format); /** * mix_videoconfigparamsenc_set_profile: @@ -589,8 +578,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * ob * * Set Encoding profile */ -MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, - MixProfile profile); +MIX_RESULT mix_videoconfigparamsenc_set_profile ( + MixVideoConfigParamsEnc * obj, MixProfile profile); /** * mix_videoconfigparamsenc_get_profile: @@ -600,8 +589,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, * * Get Encoding profile */ -MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, - MixProfile * profile); +MIX_RESULT mix_videoconfigparamsenc_get_profile ( + MixVideoConfigParamsEnc * obj, MixProfile * profile); /** @@ -612,8 +601,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, * * Set Encoding level */ -MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, - guint8 level); +MIX_RESULT mix_videoconfigparamsenc_set_level ( + MixVideoConfigParamsEnc * obj, uint8 level); /** @@ -625,8 +614,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, * Get Encoding level */ -MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, - guint8 * level); +MIX_RESULT mix_videoconfigparamsenc_get_level ( + MixVideoConfigParamsEnc * obj, uint8 * level); /** @@ -637,8 +626,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, * * Set Encoding CIR frame count */ -MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - guint CIR_frame_cnt); +MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt ( + MixVideoConfigParamsEnc * obj, uint CIR_frame_cnt); /** * mix_videoconfigparamsenc_set_CIR_frame_cnt: @@ -649,8 +638,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * * Get Encoding CIR frame count */ -MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - guint * CIR_frame_cnt); +MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt ( + MixVideoConfigParamsEnc * obj, uint * CIR_frame_cnt); /** @@ -661,8 +650,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * * * Set Maximum encoded slice size */ -MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj, - guint max_slice_size); +MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size ( + MixVideoConfigParamsEnc * obj, uint max_slice_size); /** * mix_videoconfigparamsenc_get_max_slice_size: @@ -673,8 +662,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * Get Maximum encoded slice size */ -MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj, - guint * max_slice_size); +MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size ( + MixVideoConfigParamsEnc * obj, uint * max_slice_size); /** @@ -685,8 +674,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * * Set Intra Refresh Type */ -MIX_RESULT mix_videoconfigparamsenc_set_refresh_type (MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType refresh_type); +MIX_RESULT mix_videoconfigparamsenc_set_refresh_type ( + MixVideoConfigParamsEnc * obj, MixVideoIntraRefreshType refresh_type); /** * mix_videoconfigparamsenc_get_refresh_type: @@ -697,8 +686,8 @@ MIX_RESULT mix_videoconfigparamsenc_set_refresh_type (MixVideoConfigParamsEnc * * Get Intra Refresh Type */ -MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType * refresh_type); +MIX_RESULT mix_videoconfigparamsenc_get_refresh_type ( + MixVideoConfigParamsEnc * obj, MixVideoIntraRefreshType * refresh_type); /** * mix_videoconfigparamsenc_set_AIR_params: @@ -708,8 +697,8 @@ MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * * * Set AIR parameters */ -MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams air_params); +MIX_RESULT mix_videoconfigparamsenc_set_AIR_params ( + MixVideoConfigParamsEnc * obj, MixAIRParams air_params); /** * mix_videoconfigparamsenc_get_AIR_params: @@ -720,11 +709,57 @@ MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * ob * Get AIR parameters */ -MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams * air_params); +MIX_RESULT mix_videoconfigparamsenc_get_AIR_params ( + MixVideoConfigParamsEnc * obj, MixAIRParams * air_params); -/* TODO: Add getters and setters for other properties */ +/** + * mix_videoconfigparamsenc_set_buffer_mode: + * @obj: #MixVideoConfigParamsEnc object + * @buffer_mode: Buffer allocation mode + * @returns: Common Video Error Return Codes + * + * Set buffer allocation mode + */ +MIX_RESULT mix_videoconfigparamsenc_set_buffer_mode ( + MixVideoConfigParamsEnc * obj, MixBufferAllocationMode buffer_mode); + +/** + * mix_videoconfigparamsenc_get_buffer_mode: + * @obj: #MixVideoConfigParamsEnc object + * @buffer_mode: Buffer allocation mode + * @returns: Common Video Error Return Codes + * + * Get buffer allocation mode + */ +MIX_RESULT mix_videoconfigparamsenc_get_buffer_mode ( + MixVideoConfigParamsEnc * obj, MixBufferAllocationMode * buffer_mode); +/** + * mix_videoconfigparamsenc_set_upstream_buffer_info: + * @obj: #MixVideoConfigParamsEnc object + * @buffer_mode: Buffer allocation mode + * @buf_info: Buffer information + * @returns: Common Video Error Return Codes + * + * Set buffer information according to the buffer mode + */ + +MIX_RESULT mix_videoconfigparamsenc_set_upstream_buffer_info ( + MixVideoConfigParamsEnc * obj, MixBufferAllocationMode buffer_mode, void * buf_info); + +/** + * mix_videoconfigparamsenc_get_upstream_buffer_info: + * @obj: #MixVideoConfigParamsEnc object + * @buffer_mode: Buffer allocation mode + * @buf_info: Buffer information + * @returns: Common Video Error Return Codes + * + * Get buffer information according to the buffer mode + */ +MIX_RESULT mix_videoconfigparamsenc_get_upstream_buffer_info ( + MixVideoConfigParamsEnc * obj, MixBufferAllocationMode buffer_mode, void ** buf_info); + +/* TODO: Add getters and setters for other properties */ #endif /* __MIX_VIDEOCONFIGPARAMSENC_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.cpp b/mix_video/src/mixvideoconfigparamsenc_h263.cpp index 35add90..f31a3e8 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h263.cpp +++ b/mix_video/src/mixvideoconfigparamsenc_h263.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -19,88 +19,115 @@ No license under any patent, copyright, trade secret or other intellectual prope #define MDEBUG +MixVideoConfigParamsEncH263::MixVideoConfigParamsEncH263() + :slice_num(1) + ,disable_deblocking_filter_idc(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} +MixVideoConfigParamsEncH263:: ~MixVideoConfigParamsEncH263() { +} -MixVideoConfigParamsEncH263 * -mix_videoconfigparamsenc_h263_new (void) -{ - return new MixVideoConfigParamsEncH263(); +bool MixVideoConfigParamsEncH263:: copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParamsEncH263 * this_target = MIX_VIDEOCONFIGPARAMSENC_H263(target); + if (NULL != this_target) { + //add properties + this_target->slice_num = slice_num; + this_target->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + ret = MixVideoConfigParamsEnc::copy(target); + } + return ret; } +bool MixVideoConfigParamsEncH263:: equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParamsEncH263 * this_first = MIX_VIDEOCONFIGPARAMSENC_H263(obj); + if (this_first->slice_num !=slice_num) { + goto not_equal; + } -MixVideoConfigParamsEncH263 - * mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix) -{ - return (MixVideoConfigParamsEncH263 *) mix_params_ref (MIX_PARAMS (mix)); -} + if (this_first->disable_deblocking_filter_idc !=disable_deblocking_filter_idc) { + goto not_equal; + } -/** -* mix_videoconfigparamsenc_h263_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_h263_dup (const MixParams * obj) -{ - - return NULL; + ret = TRUE; + +not_equal: + + if (ret != TRUE) { + return ret; + } + ret = MixVideoConfigParamsEnc::equal(obj); + return ret; } -/** -* mix_videoconfigparamsenc_h263_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_h263_copy (MixParams * target, const MixParams * src) -{ - return FALSE; +MixParams* +MixVideoConfigParamsEncH263::dup() const { + MixParams *ret = new MixVideoConfigParamsEncH263(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } -/** -* mix_videoconfigparamsenc_h263: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_h263_equal (MixParams * first, MixParams * second) -{ - - return FALSE; +MixVideoConfigParamsEncH263 * +mix_videoconfigparamsenc_h263_new (void) { + return new MixVideoConfigParamsEncH263(); } +MixVideoConfigParamsEncH263* +mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; +} -MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH263 * obj, - guint slice_num) { - return MIX_RESULT_SUCCESS; -} +/* TODO: Add getters and setters for properties if any */ -MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, - guint * slice_num) { +#define MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ + - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num ( + MixVideoConfigParamsEncH263 * obj, uint slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); + obj->slice_num = slice_num; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk (MixVideoConfigParamsEncH263 * obj, - guint disable_deblocking_filter_idc) { - - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num ( + MixVideoConfigParamsEncH263 * obj, uint * slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, slice_num); + *slice_num = obj->slice_num; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk (MixVideoConfigParamsEncH263 * obj, - guint * disable_deblocking_filter_idc) { +MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk ( + MixVideoConfigParamsEncH263 * obj, uint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk ( + MixVideoConfigParamsEncH263 * obj, uint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.h b/mix_video/src/mixvideoconfigparamsenc_h263.h index 4dfd680..4e0d994 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h263.h +++ b/mix_video/src/mixvideoconfigparamsenc_h263.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -12,84 +12,66 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" + /** * MIX_VIDEOCONFIGPARAMSENC_H263: * @obj: object to be type-casted. */ -//#define MIX_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263)) #define MIX_VIDEOCONFIGPARAMSENC_H263(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_H263: * @obj: an object. -* +* * Checks if the given object is an instance of #MixVideoConfigParamsEncH263 */ -//#define MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) +#define MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj) ((NULL != MIX_VIDEOCONFIGPARAMSENC_H263(obj)) ? TRUE : FALSE) -/** -* MIX_VIDEOCONFIGPARAMSENC_H263_CLASS: -* @klass: class to be type-casted. -*/ -//#define MIX_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) /** -* MIX_IS_VIDEOCONFIGPARAMSENC_H263_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixVideoConfigParamsEncH263Class +* MixVideoConfigParamsEncH263: +* +* MI-X VideoConfig Parameter object */ -//#define MIX_IS_VIDEOCONFIGPARAMSENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263)) +class MixVideoConfigParamsEncH263 : public MixVideoConfigParamsEnc { +public: + MixVideoConfigParamsEncH263(); + virtual ~MixVideoConfigParamsEncH263(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; + virtual MixParams* dup() const; -/** -* MIX_VIDEOCONFIGPARAMSENC_H263_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -//#define MIX_VIDEOCONFIGPARAMSENC_H263_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H263, MixVideoConfigParamsEncH263Class)) +public: + /*< public > */ + /* TODO: Add H.263 configuration paramters */ + /* slice number in one picture */ + uint slice_num; + /* enable/disable deblocking */ + uint disable_deblocking_filter_idc; -/** -* MixVideoConfigParamsEncH263: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoConfigParamsEncH263 : public MixVideoConfigParamsEnc -{ - public: - - /*< public > */ - - /* TODO: Add H.263 configuration paramters */ - - /* slice number in one picture */ - guint slice_num; - - /* enable/disable deblocking */ - guint disable_deblocking_filter_idc; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; + /** * mix_videoconfigparamsenc_h263_new: * @returns: A newly allocated instance of #MixVideoConfigParamsEncH263 -* +* * Use this method to create new instance of #MixVideoConfigParamsEncH263 */ MixVideoConfigParamsEncH263 *mix_videoconfigparamsenc_h263_new (void); @@ -97,16 +79,16 @@ MixVideoConfigParamsEncH263 *mix_videoconfigparamsenc_h263_new (void); * mix_videoconfigparamsenc_h263_ref: * @mix: object to add reference * @returns: the #MixVideoConfigParamsEncH263 instance where reference count has been increased. -* +* * Add reference count. */ -MixVideoConfigParamsEncH263 - * mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix); +MixVideoConfigParamsEncH263* +mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix); /** * mix_videoconfigparamsenc_h263_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_videoconfigparamsenc_h263_unref(obj) mix_params_unref(MIX_PARAMS(obj)) @@ -123,8 +105,8 @@ MixVideoConfigParamsEncH263 * * Set the The flag to enable/disable deblocking */ -MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk (MixVideoConfigParamsEncH263 * obj, - guint disable_deblocking_filter_idc); +MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk ( + MixVideoConfigParamsEncH263 * obj, uint disable_deblocking_filter_idc); /** * mix_videoconfigparamsenc_h263_get_dlk: @@ -134,19 +116,19 @@ MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk (MixVideoConfigParamsEncH263 * * * Get the The flag to enable/disable deblocking */ -MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk (MixVideoConfigParamsEncH263 * obj, - guint * disable_deblocking_filter_idc); +MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk ( + MixVideoConfigParamsEncH263 * obj, uint * disable_deblocking_filter_idc); /** * mix_videoconfigparamsenc_h263_set_slice_num: * @obj: #MixVideoConfigParamsEncH263 object - * @slice_num: Number of slice in one picture encoded. + * @slice_num: Number of slice in one picture encoded. * @returns: Common Video Error Return Codes * * Set slice_num */ -MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH263 * obj, - guint slice_num); +MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num ( + MixVideoConfigParamsEncH263 * obj, uint slice_num); /** * mix_videoconfigparamsenc_h263_get_slice_num: @@ -156,8 +138,9 @@ MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num (MixVideoConfigParamsEncH * * Get slice_num */ -MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj, - guint * slice_num); +MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num ( + MixVideoConfigParamsEncH263 * obj, uint * slice_num); + #endif /* __MIX_VIDEOCONFIGPARAMSENC_H263_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.cpp b/mix_video/src/mixvideoconfigparamsenc_h264.cpp index 3219718..9114fdf 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.cpp +++ b/mix_video/src/mixvideoconfigparamsenc_h264.cpp @@ -19,154 +19,238 @@ No license under any patent, copyright, trade secret or other intellectual prope #define MDEBUG +MixVideoConfigParamsEncH264::MixVideoConfigParamsEncH264() + :basic_unit_size(0) + ,slice_num(1) + ,I_slice_num(1) + ,P_slice_num(1) + ,disable_deblocking_filter_idc(0) + ,vui_flag(0) + ,delimiter_type(MIX_DELIMITER_LENGTHPREFIX) + ,idr_interval(2) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { +} +MixVideoConfigParamsEncH264::~MixVideoConfigParamsEncH264() { +} - - -MixVideoConfigParamsEncH264 * -mix_videoconfigparamsenc_h264_new (void) -{ - - - return new MixVideoConfigParamsEncH264(); +bool MixVideoConfigParamsEncH264::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParamsEncH264 * this_target = MIX_VIDEOCONFIGPARAMSENC_H264(target); + if (NULL != this_target) { + this_target->basic_unit_size = basic_unit_size; + this_target->slice_num = slice_num; + this_target->I_slice_num = I_slice_num; + this_target->P_slice_num = P_slice_num; + this_target->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + this_target->vui_flag = vui_flag; + this_target->delimiter_type = delimiter_type; + this_target->idr_interval = idr_interval; + ret = MixVideoConfigParamsEnc::copy(target); + } + return ret; } +bool MixVideoConfigParamsEncH264::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParamsEncH264 * this_obj = MIX_VIDEOCONFIGPARAMSENC_H264(obj); + if (NULL == this_obj) + return ret; -MixVideoConfigParamsEncH264 - * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix) -{ - return (MixVideoConfigParamsEncH264 *) mix_params_ref (MIX_PARAMS (mix)); -} + if (this_obj->basic_unit_size != basic_unit_size) { + goto not_equal; + } -/** -* mix_videoconfigparamsenc_h264_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_h264_dup (const MixParams * obj) -{ - - return NULL; -} + if (this_obj->slice_num != slice_num) { + goto not_equal; + } -/** -* mix_videoconfigparamsenc_h264_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src) -{ + if (this_obj->I_slice_num != I_slice_num) { + goto not_equal; + } - return FALSE; -} + if (this_obj->P_slice_num !=P_slice_num) { + goto not_equal; + } -/** -* mix_videoconfigparamsenc_h264: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; + if (this_obj->disable_deblocking_filter_idc != disable_deblocking_filter_idc) { + goto not_equal; + } - return ret; -} + if (this_obj->vui_flag !=vui_flag) { + goto not_equal; + } + + if (this_obj->delimiter_type != delimiter_type) { + goto not_equal; + } + if (this_obj->idr_interval != idr_interval) { + goto not_equal; + } + ret = TRUE; +not_equal: -MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj, - guint basic_unit_size) { + if (ret != TRUE) { + return ret; + } - return MIX_RESULT_SUCCESS; + ret = MixVideoConfigParamsEnc::equal(this_obj); + return ret; } -MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj, - guint * basic_unit_size) { +MixParams* MixVideoConfigParamsEncH264::dup() const { + MixParams *ret = new MixVideoConfigParamsEncH264(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; +} - return MIX_RESULT_SUCCESS; +MixVideoConfigParamsEncH264 * +mix_videoconfigparamsenc_h264_new (void) { + return new MixVideoConfigParamsEncH264(); } -MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, - guint disable_deblocking_filter_idc) { - return MIX_RESULT_SUCCESS; +MixVideoConfigParamsEncH264* +mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; } -MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj, - guint * disable_deblocking_filter_idc) { - - return MIX_RESULT_SUCCESS; -} +/* TODO: Add getters and setters for properties if any */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, - guint slice_num) { +#define MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ + - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_set_bus ( + MixVideoConfigParamsEncH264 * obj, uint basic_unit_size) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->basic_unit_size = basic_unit_size; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * slice_num) { - - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_bus ( + MixVideoConfigParamsEncH264 * obj, uint * basic_unit_size) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, basic_unit_size); + *basic_unit_size = obj->basic_unit_size; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj, - guint I_slice_num) { - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk ( + MixVideoConfigParamsEncH264 * obj, uint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * I_slice_num) { - - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk ( + MixVideoConfigParamsEncH264 * obj, uint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj, - guint P_slice_num) { +MIX_RESULT mix_videoconfigparamsenc_h264_set_vui_flag ( + MixVideoConfigParamsEncH264 * obj, uint8 vui_flag) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->vui_flag = vui_flag; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_vui_flag ( + MixVideoConfigParamsEncH264 * obj, uint8 * vui_flag) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, vui_flag); + *vui_flag = obj->vui_flag; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * P_slice_num) { +MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num( + MixVideoConfigParamsEncH264 * obj, uint slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->slice_num = slice_num; + obj->I_slice_num = slice_num; + obj->P_slice_num = slice_num; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num( + MixVideoConfigParamsEncH264 * obj, uint * slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, slice_num); + *slice_num = obj->slice_num; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, - MixDelimiterType delimiter_type) { +MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num( + MixVideoConfigParamsEncH264 * obj, uint I_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->I_slice_num = I_slice_num; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num( + MixVideoConfigParamsEncH264 * obj, uint * I_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, I_slice_num); + *I_slice_num = obj->I_slice_num; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, - MixDelimiterType * delimiter_type) { +MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num( + MixVideoConfigParamsEncH264 * obj, uint P_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->P_slice_num = P_slice_num; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num( + MixVideoConfigParamsEncH264 * obj, uint * P_slice_num) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, P_slice_num); + *P_slice_num = obj->P_slice_num; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj, - guint idr_interval) { +MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type ( + MixVideoConfigParamsEncH264 * obj, MixDelimiterType delimiter_type) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->delimiter_type = delimiter_type; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type ( + MixVideoConfigParamsEncH264 * obj, MixDelimiterType * delimiter_type) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, delimiter_type); + *delimiter_type = obj->delimiter_type; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj, - guint * idr_interval) { +MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval ( + MixVideoConfigParamsEncH264 * obj, uint idr_interval) { + MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); + obj->idr_interval = idr_interval; + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval ( + MixVideoConfigParamsEncH264 * obj, uint * idr_interval) { + MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, idr_interval); + *idr_interval = obj->idr_interval; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h index fe8fd16..09afb9a 100644 --- a/mix_video/src/mixvideoconfigparamsenc_h264.h +++ b/mix_video/src/mixvideoconfigparamsenc_h264.h @@ -13,95 +13,77 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideodef.h" - /** -* MIX_VIDEOCONFIGPARAMSENC_H264: -* @obj: object to be type-casted. +* MIX_TYPE_VIDEOCONFIGPARAMSENC_H264: +* +* Get type of class. */ -//#define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264)) #define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (reinterpret_cast(obj)) + /** * MIX_IS_VIDEOCONFIGPARAMSENC_H264: * @obj: an object. * * Checks if the given object is an instance of #MixVideoConfigParamsEncH264 */ -//#define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) - -/** -* MIX_VIDEOCONFIGPARAMSENC_H264_CLASS: -* @klass: class to be type-casted. -*/ -//#define MIX_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixVideoConfigParamsEncH264Class -*/ -//#define MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264)) - -/** -* MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -//#define MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class)) +#define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) ((NULL != MIX_VIDEOCONFIGPARAMSENC_H264(obj)) ? TRUE : FALSE) -//typedef struct _MixVideoConfigParamsEncH264Class MixVideoConfigParamsEncH264Class; - /** * MixVideoConfigParamsEncH264: * * MI-X VideoConfig Parameter object */ -class MixVideoConfigParamsEncH264 : public MixVideoConfigParamsEnc -{ +class MixVideoConfigParamsEncH264 : public MixVideoConfigParamsEnc { +public: + MixVideoConfigParamsEncH264(); + virtual ~MixVideoConfigParamsEncH264(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; + virtual MixParams* dup() const; + public: - /*< public > */ + /* TODO: Add H.264 configuration paramters */ - /* TODO: Add H.264 configuration paramters */ + /* The basic unit size used by rate control */ + uint basic_unit_size; - /* The basic unit size used by rate control */ - guint basic_unit_size; + /* Number of slices in one frame */ + uint slice_num; - /* Number of slices in one frame */ - guint slice_num; + /* Number of slices in one I frame */ + uint I_slice_num; - /* Number of slices in one I frame */ - guint I_slice_num; + /* Number of slices in one P frame */ + uint P_slice_num; - /* Number of slices in one P frame */ - guint P_slice_num; + /* enable/disable deblocking */ + uint8 disable_deblocking_filter_idc; - /* enable/disable deblocking */ - guint8 disable_deblocking_filter_idc; + /* enable/disable vui */ + uint8 vui_flag; - /* delimiter_type */ - MixDelimiterType delimiter_type; + /* delimiter_type */ + MixDelimiterType delimiter_type; - guint idr_interval; + uint idr_interval; - /* Reserved for future use */ - void *reserved1; + /* Reserved for future use */ + void *reserved1; - /* Reserved for future use */ - void *reserved2; + /* Reserved for future use */ + void *reserved2; - /* Reserved for future use */ - void *reserved3; + /* Reserved for future use */ + void *reserved3; - /* Reserved for future use */ - void *reserved4; + /* Reserved for future use */ + void *reserved4; }; - /** * mix_videoconfigparamsenc_h264_new: * @returns: A newly allocated instance of #MixVideoConfigParamsEncH264 @@ -116,8 +98,8 @@ MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void); * * Add reference count. */ -MixVideoConfigParamsEncH264 - * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix); +MixVideoConfigParamsEncH264* +mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix); /** * mix_videoconfigparamsenc_h264_unref: @@ -140,8 +122,8 @@ MixVideoConfigParamsEncH264 * * Set The basic unit size used by rate control */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj, - guint basic_unit_size); +MIX_RESULT mix_videoconfigparamsenc_h264_set_bus ( + MixVideoConfigParamsEncH264 * obj, uint basic_unit_size); /** * mix_videoconfigparamsenc_h264_get_bus: @@ -151,8 +133,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * * * Get The basic unit size used by rate control */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj, - guint * basic_unit_size); +MIX_RESULT mix_videoconfigparamsenc_h264_get_bus ( + MixVideoConfigParamsEncH264 * obj, uint * basic_unit_size); /** * mix_videoconfigparamsenc_h264_set_dlk: @@ -162,8 +144,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * * * Set the The flag to enable/disable deblocking */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj, - guint disable_deblocking_filter_idc); +MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk ( + MixVideoConfigParamsEncH264 * obj, uint disable_deblocking_filter_idc); /** * mix_videoconfigparamsenc_h264_get_dlk: @@ -173,8 +155,31 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * * * Get the The flag to enable/disable deblocking */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj, - guint * disable_deblocking_filter_idc); +MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk ( + MixVideoConfigParamsEncH264 * obj, uint * disable_deblocking_filter_idc); + +/** + * mix_videoconfigparamsenc_h264_set_vui_flag: + * @obj: #MixVideoConfigParamsEncH264 object + * @vui_flag: The flag to enable/disable vui + * @returns: Common Video Error Return Codes + * + * Set the The flag to enable/disable vui + */ +MIX_RESULT mix_videoconfigparamsenc_h264_set_vui_flag ( + MixVideoConfigParamsEncH264 * obj, uint8 vui_flag); + +/** + * mix_videoconfigparamsenc_h264_get_vui_flag + * @obj: #MixVideoConfigParamsEncH264 object + * @vui_flag: vui_flag to be returned + * @returns: Common Video Error Return Codes + * + * Get the The flag to enable/disable vui_flag + */ +MIX_RESULT mix_videoconfigparamsenc_h264_get_vui_flag ( + MixVideoConfigParamsEncH264 * obj, uint8 * vui_flag); + /** * mix_videoconfigparamsenc_h264_set_slice_num: @@ -184,8 +189,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * * * Set the Number of slices in one frame */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj, - guint slice_num); +MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num( + MixVideoConfigParamsEncH264 * obj, uint slice_num); /** * mix_videoconfigparamsenc_h264_get_slice_num: @@ -195,8 +200,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH2 * * Get the Number of slices in one frame */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * slice_num); +MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num( + MixVideoConfigParamsEncH264 * obj, uint * slice_num); /** @@ -207,8 +212,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH2 * * Set the Number of slices in one I frame */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj, - guint I_slice_num); +MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num( + MixVideoConfigParamsEncH264 * obj, uint I_slice_num); /** * mix_videoconfigparamsenc_h264_get_I_slice_num: @@ -218,8 +223,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEnc * * Get the Number of slices in one I frame */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * I_slice_num); +MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num( + MixVideoConfigParamsEncH264 * obj, uint * I_slice_num); /** * mix_videoconfigparamsenc_h264_set_P_slice_num: @@ -229,8 +234,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEnc * * Set the Number of slices in one P frame */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj, - guint P_slice_num); +MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num( + MixVideoConfigParamsEncH264 * obj, uint P_slice_num); /** * mix_videoconfigparamsenc_h264_get_P_slice_num: @@ -240,8 +245,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEnc * * Get the Number of slices in one P frame */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj, - guint * P_slice_num); +MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num( + MixVideoConfigParamsEncH264 * obj, uint * P_slice_num); /** * mix_videoconfigparamsenc_h264_set_delimiter_type: @@ -251,8 +256,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEnc * * Set Delimiter type */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj, - MixDelimiterType delimiter_type); +MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type ( + MixVideoConfigParamsEncH264 * obj, MixDelimiterType delimiter_type); /** * mix_videoconfigparamsenc_h264_get_delimiter_type: @@ -262,8 +267,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParam * * Get Delimiter type */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj, - MixDelimiterType * delimiter_type); +MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type ( + MixVideoConfigParamsEncH264 * obj, MixDelimiterType * delimiter_type); /** @@ -274,8 +279,8 @@ MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParam * * Set IDR interval */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj, - guint idr_interval); +MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval ( + MixVideoConfigParamsEncH264 * obj, uint idr_interval); /** @@ -286,8 +291,9 @@ MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsE * * Get IDR interval */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj, - guint * idr_interval); +MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval ( + MixVideoConfigParamsEncH264 * obj, uint * idr_interval); + #endif /* __MIX_VIDEOCONFIGPARAMSENC_H264_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp b/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp index 6e11d22..9ce152c 100644 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -19,231 +19,71 @@ No license under any patent, copyright, trade secret or other intellectual prope #define MDEBUG - -static GType _mix_videoconfigparamsenc_mpeg4_type = 0; -static MixVideoConfigParamsEncClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsenc_mpeg4_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj); -gboolean mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncMPEG4, /* The name of the new type, in Camel case */ - mix_videoconfigparamsenc_mpeg4, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsenc_mpeg4_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsenc_mpeg4_get_type ()); -} - -static void -mix_videoconfigparamsenc_mpeg4_init (MixVideoConfigParamsEncMPEG4 * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ - - self->fixed_vop_time_increment = 3; - self->profile_and_level_indication = 3; - self->disable_deblocking_filter_idc = 0; - - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; -} - -static void -mix_videoconfigparamsenc_mpeg4_class_init (MixVideoConfigParamsEncMPEG4Class * klass) -{ - MixVideoConfigParamsEncClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsenc_mpeg4_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsenc_mpeg4_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsenc_mpeg4_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsencenc_mpeg4_equal; +MixVideoConfigParamsEncMPEG4::MixVideoConfigParamsEncMPEG4() + :profile_and_level_indication(3) + ,fixed_vop_time_increment(3) + ,disable_deblocking_filter_idc(0) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } -MixVideoConfigParamsEncMPEG4 * -mix_videoconfigparamsenc_mpeg4_new (void) -{ - MixVideoConfigParamsEncMPEG4 *ret = (MixVideoConfigParamsEncMPEG4 *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4); - - return ret; +MixVideoConfigParamsEncMPEG4::~MixVideoConfigParamsEncMPEG4() { } -void -mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - /* Chain up parent */ +bool MixVideoConfigParamsEncMPEG4::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoConfigParamsEncMPEG4 *this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4(target); + if (NULL != this_target) { + //add properties + this_target->profile_and_level_indication= profile_and_level_indication; + this_target->fixed_vop_time_increment= fixed_vop_time_increment; + this_target->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - if (root_class->finalize) - { - root_class->finalize (obj); + // Now chainup base class + ret = MixVideoConfigParamsEnc::copy(target); } + return ret; } -MixVideoConfigParamsEncMPEG4 - * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) -{ - return (MixVideoConfigParamsEncMPEG4 *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsenc_mpeg4_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (obj)) - { - MixVideoConfigParamsEncMPEG4 *duplicate = mix_videoconfigparamsenc_mpeg4_new (); - if (mix_videoconfigparamsenc_mpeg4_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsenc_mpeg4_unref (duplicate); - } +bool MixVideoConfigParamsEncMPEG4::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj); + if ((NULL != this_obj) && + (profile_and_level_indication == this_obj->profile_and_level_indication) && + (fixed_vop_time_increment == this_obj->fixed_vop_time_increment) && + (disable_deblocking_filter_idc == this_obj->disable_deblocking_filter_idc)) { + ret = MixVideoConfigParamsEnc::equal(obj); } - return ret; + return ret; } -/** -* mix_videoconfigparamsenc_mpeg4_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsEncMPEG4 *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (target) - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (target); - this_src = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (src); - - //add properties - this_target->profile_and_level_indication= this_src->profile_and_level_indication; - this_target->fixed_vop_time_increment= this_src->fixed_vop_time_increment; - this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc; - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } +MixParams* MixVideoConfigParamsEncMPEG4::dup() const { + MixParams *ret = NULL; + MixVideoConfigParamsEncMPEG4 *duplicate = new MixVideoConfigParamsEncMPEG4(); + if (TRUE == copy(duplicate)) { + ret = duplicate; + } else { + if (NULL != duplicate) + duplicate->Unref(); } - return FALSE; + return ret; } -/** -* mix_videoconfigparamsenc_mpeg4: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsEncMPEG4 *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (first) - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (first); - this_second = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (second); - - if (this_first->profile_and_level_indication!= this_second->profile_and_level_indication) { - goto not_equal; - } - if (this_first->fixed_vop_time_increment!= this_second->fixed_vop_time_increment) { - goto not_equal; - } - - if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) { - goto not_equal; - } - - - ret = TRUE; - - not_equal: - - if (ret != TRUE) { - return ret; - } +MixVideoConfigParamsEncMPEG4 * +mix_videoconfigparamsenc_mpeg4_new (void) { + return new MixVideoConfigParamsEncMPEG4(); +} - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - return ret; +MixVideoConfigParamsEncMPEG4 * +mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) { + if (NULL != mix) + mix->Ref(); + return mix; } /* TODO: Add getters and setters for properties if any */ @@ -251,51 +91,51 @@ mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second) #define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ + - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->profile_and_level_indication = profile_and_level_indication; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level ( + MixVideoConfigParamsEncMPEG4 * obj, uchar profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->profile_and_level_indication = profile_and_level_indication; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar * profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); - *profile_and_level_indication = obj->profile_and_level_indication; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level ( + MixVideoConfigParamsEncMPEG4 * obj, uchar * profile_and_level_indication) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); + *profile_and_level_indication = obj->profile_and_level_indication; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->fixed_vop_time_increment = fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti ( + MixVideoConfigParamsEncMPEG4 * obj, uint fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->fixed_vop_time_increment = fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint * fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); - *fixed_vop_time_increment = obj->fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti ( + MixVideoConfigParamsEncMPEG4 * obj, uint * fixed_vop_time_increment) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); + *fixed_vop_time_increment = obj->fixed_vop_time_increment; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk ( + MixVideoConfigParamsEncMPEG4 * obj, uint disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); + obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk ( + MixVideoConfigParamsEncMPEG4 * obj, uint * disable_deblocking_filter_idc) { + MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); + *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h index 733b34b..5efc6ea 100644 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h +++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -13,99 +13,68 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideodef.h" - -/** -* MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4: -* -* Get type of class. -*/ -//#define MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 (mix_videoconfigparamsenc_mpeg4_get_type ()) - /** * MIX_VIDEOCONFIGPARAMSENC_MPEG4: * @obj: object to be type-casted. */ -//#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4)) #define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4: * @obj: an object. -* +* * Checks if the given object is an instance of #MixVideoConfigParamsEncMPEG4 */ -//#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) - -/** -* MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: -* @klass: class to be type-casted. -*/ -//#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixVideoConfigParamsEncMPEG4Class -*/ -//#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4)) - -/** -* MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -//#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class)) +#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (NULL != MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj)) -//typedef struct _MixVideoConfigParamsEncMPEG4Class MixVideoConfigParamsEncMPEG4Class; - /** * MixVideoConfigParamsEncMPEG4: * * MI-X VideoConfig Parameter object */ -class MixVideoConfigParamsEncMPEG4 : public MixVideoConfigParamsEnc -{ +class MixVideoConfigParamsEncMPEG4 : public MixVideoConfigParamsEnc { public: - /*< public > */ - - /* TODO: Add MPEG-4 configuration paramters */ - - /* Indicate profile and level. - * Default value is 3. - * Can be ignored (refer to encoding - * specification for more info). */ - guchar profile_and_level_indication; - - /* Number of ticks between two successive VOPs - * in display order. Default value is 3. - * Can be ignored (refer to encoding specification - * for more info) */ - guint fixed_vop_time_increment; - - /* enable/disable deblocking */ - guint disable_deblocking_filter_idc; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; + MixVideoConfigParamsEncMPEG4(); + virtual ~MixVideoConfigParamsEncMPEG4(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; + virtual MixParams* dup() const; + +public: + /* TODO: Add MPEG-4 configuration paramters */ + /* Indicate profile and level. + * Default value is 3. + * Can be ignored (refer to encoding + * specification for more info). */ + uchar profile_and_level_indication; + + /* Number of ticks between two successive VOPs + * in display order. Default value is 3. + * Can be ignored (refer to encoding specification + * for more info) */ + uint fixed_vop_time_increment; + + /* enable/disable deblocking */ + uint disable_deblocking_filter_idc; + + /* Reserved for future use */ + void *reserved1; + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; +}; /** * mix_videoconfigparamsenc_mpeg4_new: * @returns: A newly allocated instance of #MixVideoConfigParamsEncMPEG4 -* +* * Use this method to create new instance of #MixVideoConfigParamsEncMPEG4 */ MixVideoConfigParamsEncMPEG4 *mix_videoconfigparamsenc_mpeg4_new (void); @@ -113,16 +82,15 @@ MixVideoConfigParamsEncMPEG4 *mix_videoconfigparamsenc_mpeg4_new (void); * mix_videoconfigparamsenc_mpeg4_ref: * @mix: object to add reference * @returns: the #MixVideoConfigParamsEncMPEG4 instance where reference count has been increased. -* +* * Add reference count. */ -MixVideoConfigParamsEncMPEG4 - * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix); +MixVideoConfigParamsEncMPEG4* mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix); /** * mix_videoconfigparamsenc_mpeg4_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj)) @@ -139,8 +107,8 @@ MixVideoConfigParamsEncMPEG4 * * Set the The flag to enable/disable deblocking */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint disable_deblocking_filter_idc); +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk ( + MixVideoConfigParamsEncMPEG4 * obj, uint disable_deblocking_filter_idc); /** * mix_videoconfigparamsenc_mpeg4_get_dlk: @@ -150,21 +118,21 @@ MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * * Get the The flag to enable/disable deblocking */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj, - guint * disable_deblocking_filter_idc); +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk ( + MixVideoConfigParamsEncMPEG4 * obj, uint * disable_deblocking_filter_idc); /** * mix_videoconfigparamsenc_mpeg4_set_profile_level: * @obj: #MixVideoConfigParamsEncMPEG4 object - * @profile_and_level_indication: Indicate profile and level. Default value is 3. - * Can be ignored (refer to encoding specification + * @profile_and_level_indication: Indicate profile and level. Default value is 3. + * Can be ignored (refer to encoding specification * for more info). * @returns: Common Video Error Return Codes * * Set profile_and_level_indication */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar profile_and_level_indication); +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level ( + MixVideoConfigParamsEncMPEG4 * obj, uchar profile_and_level_indication); /** * mix_videoconfigparamsenc_mpeg4_get_profile_level: @@ -174,21 +142,21 @@ MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParam * * Get profile_and_level_indication */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj, - guchar * profile_and_level_indication); +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level ( + MixVideoConfigParamsEncMPEG4 * obj, uchar * profile_and_level_indication); /** * mix_videoconfigparamsenc_mpeg4_get_profile_level: * @obj: #MixVideoConfigParamsEncMPEG4 object - * @fixed_vop_time_increment: Number of ticks between two successive VOPs in display order. - * Default value is 3. Can be ignored (refer to encoding specification + * @fixed_vop_time_increment: Number of ticks between two successive VOPs in display order. + * Default value is 3. Can be ignored (refer to encoding specification * for more info) * @returns: Common Video Error Return Codes * * Set fixed_vop_time_increment */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint fixed_vop_time_increment); +MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti ( + MixVideoConfigParamsEncMPEG4 * obj, uint fixed_vop_time_increment); /** * mix_videoconfigparamsenc_mpeg4_get_fixed_vti: @@ -198,9 +166,8 @@ MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEnc * * Get fixed_vop_time_increment */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj, - guint * fixed_vop_time_increment); - +MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti ( + MixVideoConfigParamsEncMPEG4 * obj, uint * fixed_vop_time_increment); #endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.cpp b/mix_video/src/mixvideoconfigparamsenc_preview.cpp index 890aaac..ecf73d2 100644 --- a/mix_video/src/mixvideoconfigparamsenc_preview.cpp +++ b/mix_video/src/mixvideoconfigparamsenc_preview.cpp @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -18,200 +18,30 @@ No license under any patent, copyright, trade secret or other intellectual prope #define MDEBUG - -static GType _mix_videoconfigparamsenc_preview_type = 0; -static MixVideoConfigParamsEncClass *parent_class = NULL; - -#define _do_init { _mix_videoconfigparamsenc_preview_type = g_define_type_id; } - -gboolean mix_videoconfigparamsenc_preview_copy (MixParams * target, - const MixParams * src); -MixParams *mix_videoconfigparamsenc_preview_dup (const MixParams * obj); -gboolean mix_videoconfigparamsencenc_preview_equal (MixParams * first, - MixParams * second); -static void mix_videoconfigparamsenc_preview_finalize (MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncPreview, /* The name of the new type, in Camel case */ - mix_videoconfigparamsenc_preview, /* The name of the new type in lowercase */ - MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */ - _do_init); - -void -_mix_videoconfigparamsenc_preview_initialize (void) -{ - /* the MixParams types need to be class_ref'd once before it can be - * done from multiple threads; - * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */ - g_type_class_ref (mix_videoconfigparamsenc_preview_get_type ()); -} - -static void -mix_videoconfigparamsenc_preview_init (MixVideoConfigParamsEncPreview * self) -{ - /* initialize properties here */ - /* TODO: initialize properties */ -} - -static void -mix_videoconfigparamsenc_preview_class_init (MixVideoConfigParamsEncPreviewClass * klass) -{ - MixVideoConfigParamsEncClass *this_parent_class = - MIX_VIDEOCONFIGPARAMSENC_CLASS (klass); - MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class); - - /* setup static parent class */ - parent_class = - (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass); - - this_root_class->finalize = mix_videoconfigparamsenc_preview_finalize; - this_root_class->copy = - (MixParamsCopyFunction) mix_videoconfigparamsenc_preview_copy; - this_root_class->dup = - (MixParamsDupFunction) mix_videoconfigparamsenc_preview_dup; - this_root_class->equal = - (MixParamsEqualFunction) mix_videoconfigparamsencenc_preview_equal; +MixVideoConfigParamsEncPreview::MixVideoConfigParamsEncPreview() { } -MixVideoConfigParamsEncPreview * -mix_videoconfigparamsenc_preview_new (void) -{ - MixVideoConfigParamsEncPreview *ret = (MixVideoConfigParamsEncPreview *) - g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW); - - return ret; -} - -void -mix_videoconfigparamsenc_preview_finalize (MixParams * obj) -{ - /* MixVideoConfigParamsEncPreview *this_obj = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (obj); */ - MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class); - - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - - if (root_class->finalize) - { - root_class->finalize (obj); +MixParams* MixVideoConfigParamsEncPreview::dup() const { + MixParams *ret = NULL; + MixVideoConfigParamsEncPreview *duplicate = new MixVideoConfigParamsEncPreview(); + if (TRUE == copy(duplicate)) { + ret = duplicate; + } else { + if (NULL != duplicate) + duplicate->Unref(); } + return ret; } -MixVideoConfigParamsEncPreview - * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix) -{ - return (MixVideoConfigParamsEncPreview *) mix_params_ref (MIX_PARAMS (mix)); -} - -/** -* mix_videoconfigparamsenc_preview_dup: -* @obj: a #MixVideoConfigParams object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams * -mix_videoconfigparamsenc_preview_dup (const MixParams * obj) -{ - MixParams *ret = NULL; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (obj)) - { - MixVideoConfigParamsEncPreview *duplicate = mix_videoconfigparamsenc_preview_new (); - if (mix_videoconfigparamsenc_preview_copy - (MIX_PARAMS (duplicate), MIX_PARAMS (obj))) - { - ret = MIX_PARAMS (duplicate); - } - else - { - mix_videoconfigparamsenc_preview_unref (duplicate); - } - } - return ret; -} - -/** -* mix_videoconfigparamsenc_preview_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsenc_preview_copy (MixParams * target, const MixParams * src) -{ - MixVideoConfigParamsEncPreview *this_target, *this_src; - MixParamsClass *root_class; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (target) - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (src)) - { - // Cast the base object to this child object - this_target = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (target); - this_src = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (src); - - // Now chainup base class - root_class = MIX_PARAMS_CLASS (parent_class); - - if (root_class->copy) - { - return root_class->copy (MIX_PARAMS_CAST (target), - MIX_PARAMS_CAST (src)); - } - else - { - return TRUE; - } - } - return FALSE; +MixVideoConfigParamsEncPreview * +mix_videoconfigparamsenc_preview_new (void) { + return new MixVideoConfigParamsEncPreview(); } -/** -* mix_videoconfigparamsenc_preview: -* @first: first object to compare -* @second: seond object to compare -* @returns: boolean indicates if instance are equal. -* -* Copy instance data from @src to @target. -*/ -gboolean -mix_videoconfigparamsencenc_preview_equal (MixParams * first, MixParams * second) -{ - gboolean ret = FALSE; - MixVideoConfigParamsEncPreview *this_first, *this_second; - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (first) - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (second)) - { - // Cast the base object to this child object - - this_first = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (first); - this_second = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (second); - +MixVideoConfigParamsEncPreview* +mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix) { + if (NULL != mix) + mix->Ref(); + return mix; - ret = TRUE; - - - /* TODO: add comparison for properties */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class); - if (klass->equal) - { - ret = klass->equal (first, second); - } - else - { - ret = TRUE; - } - } - } - - return ret; } - -/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.h b/mix_video/src/mixvideoconfigparamsenc_preview.h index ddfe075..9ea2a67 100644 --- a/mix_video/src/mixvideoconfigparamsenc_preview.h +++ b/mix_video/src/mixvideoconfigparamsenc_preview.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -12,113 +12,62 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsenc.h" #include "mixvideodef.h" -G_BEGIN_DECLS - -/** -* MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW: -* -* Get type of class. -*/ -#define MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW (mix_videoconfigparamsenc_preview_get_type ()) /** * MIX_VIDEOCONFIGPARAMSENC_PREVIEW: * @obj: object to be type-casted. */ -#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreview)) +#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW: * @obj: an object. -* +* * Checks if the given object is an instance of #MixVideoConfigParamsEncPreview */ -#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW)) +#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (NULL != MIX_VIDEOCONFIGPARAMSENC_PREVIEW(obj)) -/** -* MIX_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreviewClass)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixVideoConfigParamsEncPreviewClass -*/ -#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW)) - -/** -* MIX_VIDEOCONFIGPARAMSENC_PREVIEW_GET_CLASS: -* @obj: a #MixParams object. -* -* Get the class instance of the object. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreviewClass)) -typedef struct _MixVideoConfigParamsEncPreview MixVideoConfigParamsEncPreview; -typedef struct _MixVideoConfigParamsEncPreviewClass MixVideoConfigParamsEncPreviewClass; /** * MixVideoConfigParamsEncPreview: * * MI-X VideoConfig Parameter object */ -struct _MixVideoConfigParamsEncPreview -{ - /*< public > */ - MixVideoConfigParamsEnc parent; - +class MixVideoConfigParamsEncPreview : public MixVideoConfigParamsEnc { +public: + MixVideoConfigParamsEncPreview(); + virtual MixParams* dup() const; }; -/** -* MixVideoConfigParamsEncPreviewClass: -* -* MI-X VideoConfig object class -*/ -struct _MixVideoConfigParamsEncPreviewClass -{ - /*< public > */ - MixVideoConfigParamsEncClass parent_class; - /* class members */ -}; - -/** -* mix_videoconfigparamsenc_preview_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_videoconfigparamsenc_preview_get_type (void); /** * mix_videoconfigparamsenc_preview_new: * @returns: A newly allocated instance of #MixVideoConfigParamsEncPreview -* +* * Use this method to create new instance of #MixVideoConfigParamsEncPreview */ -MixVideoConfigParamsEncPreview *mix_videoconfigparamsenc_preview_new (void); +MixVideoConfigParamsEncPreview* mix_videoconfigparamsenc_preview_new (void); /** * mix_videoconfigparamsenc_preview_ref: * @mix: object to add reference * @returns: the MixVideoConfigParamsEncPreview instance where reference count has been increased. -* +* * Add reference count. */ -MixVideoConfigParamsEncPreview - * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix); +MixVideoConfigParamsEncPreview* mix_videoconfigparamsenc_preview_ref ( + MixVideoConfigParamsEncPreview * mix); /** * mix_videoconfigparamsenc_preview_unref: * @obj: object to unref. -* +* * Decrement reference count of the object. */ #define mix_videoconfigparamsenc_preview_unref(obj) mix_params_unref(MIX_PARAMS(obj)) -G_END_DECLS + #endif /* __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ */ diff --git a/mix_video/src/mixvideodecodeparams.cpp b/mix_video/src/mixvideodecodeparams.cpp index 7ac2179..a0965eb 100644 --- a/mix_video/src/mixvideodecodeparams.cpp +++ b/mix_video/src/mixvideodecodeparams.cpp @@ -10,7 +10,7 @@ * SECTION:mixvideodecodeparams * @short_description: MI-X Video Decode Paramters * - * The #MixVideoDecodeParams object will be created by the MMF/App + * The #MixVideoDecodeParams object will be created by the MMF/App * and provided to MixVideo in the MixVideo mix_video_decode() function. */ @@ -19,107 +19,107 @@ #define MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - + MixVideoDecodeParams::MixVideoDecodeParams() - :timestamp(0) - ,discontinuity(FALSE) - ,new_sequence(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :timestamp(0) + ,discontinuity(FALSE) + ,new_sequence(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoDecodeParams::~MixVideoDecodeParams() { } -gboolean MixVideoDecodeParams::copy(MixParams *target) const { - gboolean ret = FALSE; - MixVideoDecodeParams * this_target = MIX_VIDEODECODEPARAMS(target); - if (NULL != this_target) { - // chain up base class - ret = MixParams::copy(target); - } - return ret; +bool MixVideoDecodeParams::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoDecodeParams * this_target = MIX_VIDEODECODEPARAMS(target); + if (NULL != this_target) { + // chain up base class + ret = MixParams::copy(target); + } + return ret; } -gboolean MixVideoDecodeParams::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoDecodeParams * this_obj = MIX_VIDEODECODEPARAMS(obj); - if (NULL != this_obj) - ret = MixParams::equal(this_obj); - return ret; +bool MixVideoDecodeParams::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoDecodeParams * this_obj = MIX_VIDEODECODEPARAMS(obj); + if (NULL != this_obj) + ret = MixParams::equal(this_obj); + return ret; } MixParams* MixVideoDecodeParams::dup() const { - MixParams *ret = new MixVideoDecodeParams(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoDecodeParams(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } MixVideoDecodeParams * mix_videodecodeparams_new(void) { - return new MixVideoDecodeParams(); + return new MixVideoDecodeParams(); } MixVideoDecodeParams * mix_videodecodeparams_ref(MixVideoDecodeParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } /* TODO: Add getters and setters for properties. */ MIX_RESULT mix_videodecodeparams_set_timestamp( - MixVideoDecodeParams * obj, guint64 timestamp) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; + MixVideoDecodeParams * obj, uint64 timestamp) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videodecodeparams_get_timestamp( - MixVideoDecodeParams * obj, guint64 * timestamp) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; + MixVideoDecodeParams * obj, uint64 * timestamp) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videodecodeparams_set_discontinuity( - MixVideoDecodeParams * obj, gboolean discontinuity) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; + MixVideoDecodeParams * obj, bool discontinuity) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videodecodeparams_get_discontinuity( - MixVideoDecodeParams * obj, gboolean *discontinuity) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; + MixVideoDecodeParams * obj, bool *discontinuity) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videodecodeparams_set_new_sequence( - MixVideoDecodeParams * obj, gboolean new_sequence) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->new_sequence = new_sequence; - return MIX_RESULT_SUCCESS; + MixVideoDecodeParams * obj, bool new_sequence) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->new_sequence = new_sequence; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videodecodeparams_get_new_sequence( - MixVideoDecodeParams * obj, gboolean *new_sequence) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, new_sequence); - *new_sequence = obj->new_sequence; - return MIX_RESULT_SUCCESS; + MixVideoDecodeParams * obj, bool *new_sequence) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, new_sequence); + *new_sequence = obj->new_sequence; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h index 3d4d72f..1da4cdb 100644 --- a/mix_video/src/mixvideodecodeparams.h +++ b/mix_video/src/mixvideodecodeparams.h @@ -36,34 +36,34 @@ class MixVideoDecodeParams : public MixParams { public: MixVideoDecodeParams(); ~MixVideoDecodeParams(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; virtual MixParams* dup() const; public: - /*< public > */ - /* TODO: Add properties */ - - /* Presentation timestamp for the video - * frame data, in milliseconds */ - guint64 timestamp; - - /* Indicates a discontinuity in the stream */ - gboolean discontinuity; + /*< public > */ + /* TODO: Add properties */ + + /* Presentation timestamp for the video + * frame data, in milliseconds */ + uint64 timestamp; + + /* Indicates a discontinuity in the stream */ + bool discontinuity; /* output only, indicate if stream contains a new sequence */ - gboolean new_sequence; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; + bool new_sequence; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; /** @@ -98,71 +98,71 @@ MixVideoDecodeParams *mix_videodecodeparams_ref(MixVideoDecodeParams * mix); /** * mix_videodecodeparams_set_timestamp: * @obj: #MixVideoDecodeParams object - * @timestamp: Presentation timestamp for the video frame data, in milliseconds + * @timestamp: Presentation timestamp for the video frame data, in milliseconds * @returns: Common Video Error Return Codes * - * Set Presentation timestamp + * Set Presentation timestamp */ MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj, - guint64 timestamp); + uint64 timestamp); /** * mix_videodecodeparams_get_timestamp: * @obj: #MixVideoDecodeParams object - * @timestamp: Presentation timestamp for the video frame data, in milliseconds to be returned. + * @timestamp: Presentation timestamp for the video frame data, in milliseconds to be returned. * @returns: Common Video Error Return Codes * - * Get Presentation timestamp + * Get Presentation timestamp */ MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj, - guint64 * timestamp); + uint64 * timestamp); /** * mix_videodecodeparams_set_discontinuity: * @obj: #MixVideoDecodeParams object - * @discontinuity: Flag to indicate a discontinuity in the stream. + * @discontinuity: Flag to indicate a discontinuity in the stream. * @returns: Common Video Error Return Codes * * Set discontinuity flag */ MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, - gboolean discontinuity); + bool discontinuity); /** * mix_videodecodeparams_get_discontinuity: * @obj: #MixVideoDecodeParams object - * @discontinuity: Discontinuity flag to be returned + * @discontinuity: Discontinuity flag to be returned * @returns: Common Video Error Return Codes * * Get discontinuity flag */ MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, - gboolean *discontinuity); + bool *discontinuity); /** * mix_videodecodeparams_set_new_sequence: * @obj: #MixVideoDecodeParams object - * @new_sequence: Flag to indicate if stream contains a new sequence. + * @new_sequence: Flag to indicate if stream contains a new sequence. * @returns: Common Video Error Return Codes * * Set new_sequence flag */ MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj, - gboolean new_sequence); + bool new_sequence); /** * mix_videodecodeparams_get_new_sequence: * @obj: #MixVideoDecodeParams object - * @new_sequence: new_sequence flag to be returned + * @new_sequence: new_sequence flag to be returned * @returns: Common Video Error Return Codes * * Get new_sequence flag */ MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj, - gboolean *new_sequence); + bool *new_sequence); #endif /* __MIX_VIDEODECODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h index bac6e8c..aea6ad6 100644 --- a/mix_video/src/mixvideodef.h +++ b/mix_video/src/mixvideodef.h @@ -33,57 +33,59 @@ #include +#define MAX_ENC_SURFACE_COUNT 20 +#define MIX_STRING_TO_FOURCC(format) ((uint32)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24))) /* * MI-X video error code */ typedef enum { - MIX_RESULT_FRAME_NOTAVAIL = MIX_RESULT_ERROR_VIDEO_START + 1, - MIX_RESULT_EOS, - MIX_RESULT_POOLEMPTY, - MIX_RESULT_OUTOFSURFACES, - MIX_RESULT_DROPFRAME, - MIX_RESULT_NOTIMPL, - MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW, - MIX_RESULT_NOT_PERMITTED, - MIX_RESULT_ERROR_PROCESS_STREAM, - MIX_RESULT_MISSING_CONFIG, - MIX_RESULT_VIDEO_LAST + MIX_RESULT_FRAME_NOTAVAIL = MIX_RESULT_ERROR_VIDEO_START + 1, + MIX_RESULT_EOS, + MIX_RESULT_POOLEMPTY, + MIX_RESULT_OUTOFSURFACES, + MIX_RESULT_DROPFRAME, + MIX_RESULT_NOTIMPL, + MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW, + MIX_RESULT_NOT_PERMITTED, + MIX_RESULT_ERROR_PROCESS_STREAM, + MIX_RESULT_MISSING_CONFIG, + MIX_RESULT_VIDEO_LAST } MIX_VIDEO_ERROR_CODE; /* MixCodecMode */ typedef enum { - MIX_CODEC_MODE_ENCODE = 0, - MIX_CODEC_MODE_DECODE, - MIX_CODEC_MODE_LAST + MIX_CODEC_MODE_ENCODE = 0, + MIX_CODEC_MODE_DECODE, + MIX_CODEC_MODE_LAST } MixCodecMode; typedef enum { - MIX_FRAMEORDER_MODE_DISPLAYORDER = 0, - MIX_FRAMEORDER_MODE_DECODEORDER, - MIX_FRAMEORDER_MODE_LAST + MIX_FRAMEORDER_MODE_DISPLAYORDER = 0, + MIX_FRAMEORDER_MODE_DECODEORDER, + MIX_FRAMEORDER_MODE_LAST } MixFrameOrderMode; typedef struct _MixIOVec { - guchar *data; - gint buffer_size; - gint data_size; + uchar *data; + int buffer_size; + int data_size; } MixIOVec; typedef struct _MixRect { - gshort x; - gshort y; - gushort width; - gushort height; + short x; + short y; + ushort width; + ushort height; } MixRect; typedef enum { - MIX_STATE_UNINITIALIZED = 0, - MIX_STATE_INITIALIZED, - MIX_STATE_CONFIGURED, - MIX_STATE_LAST + MIX_STATE_UNINITIALIZED = 0, + MIX_STATE_INITIALIZED, + MIX_STATE_CONFIGURED, + MIX_STATE_LAST } MixState; @@ -93,6 +95,7 @@ typedef enum MIX_RAW_TARGET_FORMAT_YUV420 = 1, MIX_RAW_TARGET_FORMAT_YUV422 = 2, MIX_RAW_TARGET_FORMAT_YUV444 = 4, + MIX_RAW_TARGET_FORMAT_NV12 = 8, MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000, MIX_RAW_TARGET_FORMAT_LAST } MixRawTargetFormat; @@ -149,57 +152,106 @@ typedef enum { typedef struct _MixAIRParams { - guint air_MBs; - guint air_threshold; - guint air_auto; + uint air_MBs; + uint air_threshold; + uint air_auto; } MixAIRParams; typedef enum { - MIX_ENC_PARAMS_START_UNUSED = 0x01000000, - MIX_ENC_PARAMS_BITRATE, - MIX_ENC_PARAMS_INIT_QP, - MIX_ENC_PARAMS_MIN_QP, - MIX_ENC_PARAMS_WINDOW_SIZE, - MIX_ENC_PARAMS_TARGET_PERCENTAGE, - MIX_ENC_PARAMS_SLICE_NUM, - MIX_ENC_PARAMS_I_SLICE_NUM, - MIX_ENC_PARAMS_P_SLICE_NUM, - MIX_ENC_PARAMS_RESOLUTION, - MIX_ENC_PARAMS_GOP_SIZE, - MIX_ENC_PARAMS_FRAME_RATE, - MIX_ENC_PARAMS_FORCE_KEY_FRAME, - MIX_ENC_PARAMS_IDR_INTERVAL, - MIX_ENC_PARAMS_RC_MODE, - MIX_ENC_PARAMS_MTU_SLICE_SIZE, - MIX_ENC_PARAMS_REFRESH_TYPE, - MIX_ENC_PARAMS_AIR, - MIX_ENC_PARAMS_CIR_FRAME_CNT, - MIX_ENC_PARAMS_LAST + MIX_ENC_PARAMS_START_UNUSED = 0x01000000, + MIX_ENC_PARAMS_BITRATE, + MIX_ENC_PARAMS_INIT_QP, + MIX_ENC_PARAMS_MIN_QP, + MIX_ENC_PARAMS_WINDOW_SIZE, + MIX_ENC_PARAMS_TARGET_PERCENTAGE, + MIX_ENC_PARAMS_SLICE_NUM, + MIX_ENC_PARAMS_I_SLICE_NUM, + MIX_ENC_PARAMS_P_SLICE_NUM, + MIX_ENC_PARAMS_RESOLUTION, + MIX_ENC_PARAMS_GOP_SIZE, + MIX_ENC_PARAMS_FRAME_RATE, + MIX_ENC_PARAMS_FORCE_KEY_FRAME, + MIX_ENC_PARAMS_IDR_INTERVAL, + MIX_ENC_PARAMS_RC_MODE, + MIX_ENC_PARAMS_MTU_SLICE_SIZE, + MIX_ENC_PARAMS_REFRESH_TYPE, + MIX_ENC_PARAMS_AIR, + MIX_ENC_PARAMS_CIR_FRAME_CNT, + MIX_ENC_PARAMS_LAST } MixEncParamsType; typedef struct _MixEncDynamicParams { - guint bitrate; - guint init_QP; - guint min_QP; - guint window_size; - guint target_percentage; - guint slice_num; - guint I_slice_num; - guint P_slice_num; - guint width; - guint height; - guint frame_rate_num; - guint frame_rate_denom; - guint intra_period; - guint idr_interval; - guint CIR_frame_cnt; - guint max_slice_size; - gboolean force_idr; - MixRateControl rc_mode; - MixVideoIntraRefreshType refresh_type; - MixAIRParams air_params; + uint bitrate; + uint init_QP; + uint min_QP; + uint window_size; + uint target_percentage; + uint slice_num; + uint I_slice_num; + uint P_slice_num; + uint width; + uint height; + uint frame_rate_num; + uint frame_rate_denom; + uint intra_period; + uint idr_interval; + uint CIR_frame_cnt; + uint max_slice_size; + bool force_idr; + MixRateControl rc_mode; + MixVideoIntraRefreshType refresh_type; + MixAIRParams air_params; } MixEncDynamicParams; +typedef enum +{ + MIX_BUFFER_ALLOC_NORMAL = 1, //Means non shared buffer mode + MIX_BUFFER_UPSTREAM_ALLOC_CI = 2, + MIX_BUFFER_UPSTREAM_ALLOC_V4L2 = 4, + MIX_BUFFER_UPSTREAM_ALLOC_SURFACE = 8, + MIX_BUFFER_SELF_ALLOC_SURFACE = 16, + MIX_BUFFER_LAST +} MixBufferAllocationMode; + +typedef enum +{ + MIX_OUTPUT_BUFFER_NORMAL = 0, //Output whatever driver generates + MIX_OUTPUT_BUFFER_SEPARATED_HEADER = 1, //Applications need to pass at least two buffers, one for header and the other for data. And once this output mode is used, we will generate a new header + MIX_OUTPUT_BUFFER_LAST +} MixOutputEncBufferMode; + +typedef struct _MixCISharedBufferInfo { + uint ci_frame_cnt; + ulong * ci_frame_id; +} MixCISharedBufferInfo ; + +typedef struct _MixV4l2SharedBufferInfo { + int v4l2_fd; + uint v4l2_buf_cnt; + void * v4l2_fmt; + void ** v4l2_buf; +} MixV4l2SharedBufferInfo; + +typedef struct _MixSurfaceSharedBufferInfo { + uint surface_cnt; + ulong *surface; +} MixSurfaceSharedBufferInfo; + +/* + * This union is defined for upstreamer buffer info Set/Get + */ +typedef union _MixSharedBufferInfo { + MixCISharedBufferInfo ci_buffer_info; + MixV4l2SharedBufferInfo v4l2_buffer_info; + MixSurfaceSharedBufferInfo surface_buffer_info; +} MixSharedBufferInfo; + + +typedef struct _MixUserReqSurfacesInfo { + uint surface_allocated[MAX_ENC_SURFACE_COUNT]; + uint8 * usrptr[MAX_ENC_SURFACE_COUNT]; + uint surface_cnt; +} MixUsrReqSurfacesInfo; #endif /* __MIX_VIDEO_DEF_H__ */ diff --git a/mix_video/src/mixvideoencodeparams.cpp b/mix_video/src/mixvideoencodeparams.cpp index 52be78f..6c8a32a 100644 --- a/mix_video/src/mixvideoencodeparams.cpp +++ b/mix_video/src/mixvideoencodeparams.cpp @@ -10,200 +10,129 @@ * SECTION:mixvideoencodeparams * @short_description: MI-X Video Encode Parameters * - * The #MixVideoEncodeParams object will be created by - * the MMF/App and provided to #MixVideo in the #MixVideo - * mix_video_encode() function. Get methods for the - * properties will be available for the caller to - * retrieve configuration information. Currently this + * The #MixVideoEncodeParams object will be created by + * the MMF/App and provided to #MixVideo in the #MixVideo + * mix_video_encode() function. Get methods for the + * properties will be available for the caller to + * retrieve configuration information. Currently this * object is reserved for future use. */ #include "mixvideoencodeparams.h" -static GType _mix_videoencodeparams_type = 0; -static MixParamsClass *parent_class = NULL; - -#define _do_init { _mix_videoencodeparams_type = g_define_type_id; } - -gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src); -MixParams *mix_videoencodeparams_dup(const MixParams * obj); -gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second); -static void mix_videoencodeparams_finalize(MixParams * obj); - -G_DEFINE_TYPE_WITH_CODE (MixVideoEncodeParams, mix_videoencodeparams, - MIX_TYPE_PARAMS, _do_init); - -static void mix_videoencodeparams_init(MixVideoEncodeParams * self) { - /* initialize properties here */ - - /* TODO: initialize properties */ - - self->timestamp = 0; - self->discontinuity = FALSE; - self->reserved1 = NULL; - self->reserved2 = NULL; - self->reserved3 = NULL; - self->reserved4 = NULL; +#define MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ + + +MixVideoEncodeParams::MixVideoEncodeParams() + :timestamp(0) + ,discontinuity(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } -static void mix_videoencodeparams_class_init(MixVideoEncodeParamsClass * klass) { - MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixParamsClass *) g_type_class_peek_parent(klass); - - mixparams_class->finalize = mix_videoencodeparams_finalize; - mixparams_class->copy = (MixParamsCopyFunction) mix_videoencodeparams_copy; - mixparams_class->dup = (MixParamsDupFunction) mix_videoencodeparams_dup; - mixparams_class->equal - = (MixParamsEqualFunction) mix_videoencodeparams_equal; +MixVideoEncodeParams::~MixVideoEncodeParams() { } + MixVideoEncodeParams * mix_videoencodeparams_new(void) { - MixVideoEncodeParams *ret = - (MixVideoEncodeParams *) g_type_create_instance( - MIX_TYPE_VIDEOENCODEPARAMS); - - return ret; + return new MixVideoEncodeParams(); } -void mix_videoencodeparams_finalize(MixParams * obj) { - /* clean up here. */ - /* TODO: cleanup resources allocated */ - - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } -} MixVideoEncodeParams * mix_videoencodeparams_ref(MixVideoEncodeParams * mix) { - return (MixVideoEncodeParams *) mix_params_ref(MIX_PARAMS(mix)); + if (NULL != mix) + mix->Ref(); + return mix; } /** - * mix_videoencodeparams_dup: - * @obj: a #MixVideoEncodeParams object + * dup: * @returns: a newly allocated duplicate of the object. * * Copy duplicate of the object. */ -MixParams * -mix_videoencodeparams_dup(const MixParams * obj) { - MixParams *ret = NULL; - - if (MIX_IS_VIDEOENCODEPARAMS(obj)) { - MixVideoEncodeParams *duplicate = mix_videoencodeparams_new(); - if (mix_videoencodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) { - ret = MIX_PARAMS(duplicate); - } else { - mix_videoencodeparams_unref(duplicate); - } - } - return ret; +MixParams *MixVideoEncodeParams::dup() const { + MixParams *ret = new MixVideoEncodeParams(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } /** - * mix_videoencodeparams_copy: + * copy: * @target: copy to target - * @src: copy from src * @returns: boolean indicates if copy is successful. * * Copy instance data from @src to @target. */ -gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src) { - MixVideoEncodeParams *this_target, *this_src; - - if (MIX_IS_VIDEOENCODEPARAMS(target) && MIX_IS_VIDEOENCODEPARAMS(src)) { - // Cast the base object to this child object - this_target = MIX_VIDEOENCODEPARAMS(target); - this_src = MIX_VIDEOENCODEPARAMS(src); - - // TODO: copy properties */ - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST( - src)); - } else { - return TRUE; - } - } - return FALSE; +bool MixVideoEncodeParams::copy(MixParams * target) const { + bool ret = FALSE; + MixVideoEncodeParams *this_target = MIX_VIDEOENCODEPARAMS(target); + if (NULL!= this_target) { + // chain up base class + ret = MixParams::copy(this_target); + } + return ret; } /** - * mix_videoencodeparams_: - * @first: first object to compare - * @second: seond object to compare + * equal: + * @obj: the object to compare * @returns: boolean indicates if instance are equal. * * Copy instance data from @src to @target. */ -gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second) { - gboolean ret = FALSE; - MixVideoEncodeParams *this_first, *this_second; - - if (MIX_IS_VIDEOENCODEPARAMS(first) && MIX_IS_VIDEOENCODEPARAMS(second)) { - // Deep compare - // Cast the base object to this child object - - this_first = MIX_VIDEOENCODEPARAMS(first); - this_second = MIX_VIDEOENCODEPARAMS(second); - - /* TODO: add comparison for properties */ - /* if ( first properties == sencod properties) */ - { - // members within this scope equal. chaining up. - MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - - return ret; +bool MixVideoEncodeParams::equal(MixParams * obj) const { + bool ret = FALSE; + MixVideoEncodeParams * this_obj = MIX_VIDEOENCODEPARAMS(obj); + if (NULL != this_obj) + ret = MixParams::equal(this_obj); + return ret; } -#define MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ /* TODO: Add getters and setters for properties. */ -MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj, - guint64 timestamp) { - MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoencodeparams_set_timestamp( + MixVideoEncodeParams * obj, uint64 timestamp) { + MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj, - guint64 * timestamp) { - MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoencodeparams_get_timestamp( + MixVideoEncodeParams * obj, uint64 * timestamp) { + MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj, - gboolean discontinuity) { - MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoencodeparams_set_discontinuity( + MixVideoEncodeParams * obj, bool discontinuity) { + MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj, - gboolean *discontinuity) { - MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; +MIX_RESULT mix_videoencodeparams_get_discontinuity( + MixVideoEncodeParams * obj, bool *discontinuity) { + MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoencodeparams.h b/mix_video/src/mixvideoencodeparams.h index ac8e6c2..f2de76c 100644 --- a/mix_video/src/mixvideoencodeparams.h +++ b/mix_video/src/mixvideoencodeparams.h @@ -12,20 +12,12 @@ #include #include "mixvideodef.h" -G_BEGIN_DECLS - -/** - * MIX_TYPE_VIDEOENCODEPARAMS: - * - * Get type of class. - */ -#define MIX_TYPE_VIDEOENCODEPARAMS (mix_videoencodeparams_get_type ()) /** * MIX_VIDEOENCODEPARAMS: * @obj: object to be type-casted. */ -#define MIX_VIDEOENCODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParams)) +#define MIX_VIDEOENCODEPARAMS(obj) (reinterpret_cast(obj)) /** * MIX_IS_VIDEOENCODEPARAMS: @@ -33,84 +25,43 @@ G_BEGIN_DECLS * * Checks if the given object is an instance of #MixParams */ -#define MIX_IS_VIDEOENCODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOENCODEPARAMS)) +#define MIX_IS_VIDEOENCODEPARAMS(obj) ((NULL !=MIX_VIDEOENCODEPARAMS(obj)) ? TRUE : FALSE) -/** - * MIX_VIDEOENCODEPARAMS_CLASS: - * @klass: class to be type-casted. - */ -#define MIX_VIDEOENCODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParamsClass)) /** - * MIX_IS_VIDEOENCODEPARAMS_CLASS: - * @klass: a class. + * MixVideoEncodeParams: * - * Checks if the given class is #MixParamsClass + * MI-X VideoDecode Parameter object */ -#define MIX_IS_VIDEOENCODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOENCODEPARAMS)) +class MixVideoEncodeParams :public MixParams { +public: + MixVideoEncodeParams(); + virtual ~MixVideoEncodeParams(); + virtual bool copy(MixParams * target) const; + virtual MixParams *dup() const; + virtual bool equal(MixParams * obj) const; -/** - * MIX_VIDEOENCODEPARAMS_GET_CLASS: - * @obj: a #MixParams object. - * - * Get the class instance of the object. - */ -#define MIX_VIDEOENCODEPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParamsClass)) +public: + /* TODO: Add properties */ + /* < private > */ + uint64 timestamp; + bool discontinuity; -typedef struct _MixVideoEncodeParams MixVideoEncodeParams; -typedef struct _MixVideoEncodeParamsClass MixVideoEncodeParamsClass; + /* < public > */ -/** - * MixVideoEncodeParams: - * - * MI-X VideoDecode Parameter object - */ -struct _MixVideoEncodeParams { - /*< public > */ - MixParams parent; - - /*< public > */ - - /* TODO: Add properties */ - - /* < private > */ - guint64 timestamp; - gboolean discontinuity; - - /* < public > */ - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; + /* Reserved for future use */ + void *reserved1; -/** - * MixVideoEncodeParamsClass: - * - * MI-X VideoDecode object class - */ -struct _MixVideoEncodeParamsClass { - /*< public > */ - MixParamsClass parent_class; + /* Reserved for future use */ + void *reserved2; - /* class members */ + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; }; -/** - * mix_videoencodeparams_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoencodeparams_get_type(void); /** * mix_videoencodeparams_new: @@ -139,17 +90,14 @@ MixVideoEncodeParams *mix_videoencodeparams_ref(MixVideoEncodeParams * mix); /* Class Methods */ /* TODO: Add getters and setters for properties */ -MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj, - guint64 timestamp); -MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj, - guint64 * timestamp); - -MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj, - gboolean discontinuity); -MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj, - gboolean *discontinuity); - -G_END_DECLS +MIX_RESULT mix_videoencodeparams_set_timestamp( + MixVideoEncodeParams * obj, uint64 timestamp); +MIX_RESULT mix_videoencodeparams_get_timestamp( + MixVideoEncodeParams * obj, uint64 * timestamp); +MIX_RESULT mix_videoencodeparams_set_discontinuity( + MixVideoEncodeParams * obj, bool discontinuity); +MIX_RESULT mix_videoencodeparams_get_discontinuity( + MixVideoEncodeParams * obj, bool *discontinuity); #endif /* __MIX_VIDEOENCODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideoformat.cpp b/mix_video/src/mixvideoformat.cpp index 9c7ff42..09984cb 100644 --- a/mix_video/src/mixvideoformat.cpp +++ b/mix_video/src/mixvideoformat.cpp @@ -5,289 +5,279 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include "mixvideolog.h" #include "mixvideoformat.h" +#include +#include #define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } MixVideoFormat::MixVideoFormat() - :mLock() - ,initialized(FALSE) - ,va_initialized(FALSE) - ,framemgr(NULL) - ,surfacepool(NULL) - ,inputbufpool(NULL) - ,inputbufqueue(NULL) - ,va_display(NULL) - ,va_context(VA_INVALID_ID) - ,va_config(VA_INVALID_ID) - ,va_surfaces(NULL) - ,va_num_surfaces(0) - ,mime_type(NULL) - ,frame_rate_num(0) - ,frame_rate_denom(0) - ,picture_width(0) - ,picture_height(0) - ,parse_in_progress(FALSE) - ,current_timestamp((guint64)-1) - ,end_picture_pending(FALSE) - ,video_frame(NULL) - ,extra_surfaces(0) - ,config_params(NULL) - ,ref_count(1) { + :mLock() + ,initialized(FALSE) + ,va_initialized(FALSE) + ,framemgr(NULL) + ,surfacepool(NULL) + ,inputbufpool(NULL) + ,inputbufqueue(NULL) + ,va_display(NULL) + ,va_context(VA_INVALID_ID) + ,va_config(VA_INVALID_ID) + ,va_surfaces(NULL) + ,va_num_surfaces(0) + ,mime_type(NULL) + ,frame_rate_num(0) + ,frame_rate_denom(0) + ,picture_width(0) + ,picture_height(0) + ,parse_in_progress(FALSE) + ,current_timestamp((uint64)-1) + ,end_picture_pending(FALSE) + ,video_frame(NULL) + ,extra_surfaces(0) + ,config_params(NULL) + ,ref_count(1) { } -MixVideoFormat::~MixVideoFormat(){ - /* clean up here. */ - VAStatus va_status; - MixInputBufferEntry *buf_entry = NULL; - - if (this->mime_type) { - if (this->mime_type->str) - g_string_free(this->mime_type, TRUE); - else - g_string_free(this->mime_type, FALSE); - } - - //MiVideo object calls the _deinitialize() for frame manager - MIXUNREF(this->framemgr, mix_framemanager_unref); - - if (this->surfacepool) { - mix_surfacepool_deinitialize(this->surfacepool); - MIXUNREF(this->surfacepool, mix_surfacepool_unref); - } - - if (this->config_params) { - mix_videoconfigparams_unref(this->config_params); - this->config_params = NULL; - } - - //libVA cleanup (vaTerminate is called from MixVideo object) - if (this->va_display) { - if (this->va_context != VA_INVALID_ID) { - va_status = vaDestroyContext(this->va_display, this->va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroyContext\n"); - } - this->va_context = VA_INVALID_ID; - } - if (this->va_config != VA_INVALID_ID) { - va_status = vaDestroyConfig(this->va_display, this->va_config); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroyConfig\n"); - } - this->va_config = VA_INVALID_ID; - } - if (this->va_surfaces) { - va_status = vaDestroySurfaces(this->va_display, this->va_surfaces, this->va_num_surfaces); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroySurfaces\n"); - } - g_free(this->va_surfaces); - this->va_surfaces = NULL; - this->va_num_surfaces = 0; - } - } - - if (this->video_frame) { - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - } - - //Deinit input buffer queue - while (!g_queue_is_empty(this->inputbufqueue)) { - buf_entry = reinterpret_cast(g_queue_pop_head(this->inputbufqueue)); - mix_buffer_unref(buf_entry->buf); - g_free(buf_entry); - } - - g_queue_free(this->inputbufqueue); - - //MixBuffer pool is deallocated in MixVideo object - this->inputbufpool = NULL; +MixVideoFormat::~MixVideoFormat() { + /* clean up here. */ + VAStatus va_status; + MixInputBufferEntry *buf_entry = NULL; + + if (this->mime_type) { + free(this->mime_type); + } + + //MiVideo object calls the _deinitialize() for frame manager + MIXUNREF(this->framemgr, mix_framemanager_unref); + + if (this->surfacepool) { + mix_surfacepool_deinitialize(this->surfacepool); + MIXUNREF(this->surfacepool, mix_surfacepool_unref); + } + + if (this->config_params) { + mix_videoconfigparams_unref(this->config_params); + this->config_params = NULL; + } + + //libVA cleanup (vaTerminate is called from MixVideo object) + if (this->va_display) { + if (this->va_context != VA_INVALID_ID) { + va_status = vaDestroyContext(this->va_display, this->va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroyContext\n"); + } + this->va_context = VA_INVALID_ID; + } + if (this->va_config != VA_INVALID_ID) { + va_status = vaDestroyConfig(this->va_display, this->va_config); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroyConfig\n"); + } + this->va_config = VA_INVALID_ID; + } + if (this->va_surfaces) { + va_status = vaDestroySurfaces(this->va_display, this->va_surfaces, this->va_num_surfaces); + if (va_status != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaDestroySurfaces\n"); + } + free(this->va_surfaces); + this->va_surfaces = NULL; + this->va_num_surfaces = 0; + } + } + + if (this->video_frame) { + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + } + + //Deinit input buffer queue + while (!j_queue_is_empty(this->inputbufqueue)) { + buf_entry = reinterpret_cast(j_queue_pop_head(this->inputbufqueue)); + mix_buffer_unref(buf_entry->buf); + free(buf_entry); + } + + j_queue_free(this->inputbufqueue); + + //MixBuffer pool is deallocated in MixVideo object + this->inputbufpool = NULL; } -MIX_RESULT MixVideoFormat::GetCaps(GString *msg) { - g_print("mix_videofmt_getcaps_default\n"); - return MIX_RESULT_SUCCESS; +MIX_RESULT MixVideoFormat::GetCaps(char *msg) { + LOG_V("mix_videofmt_getcaps_default\n"); + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - LOG_V( "Begin\n"); - MIX_RESULT res = MIX_RESULT_SUCCESS; - MixInputBufferEntry *buf_entry = NULL; - - if (!config_params || !frame_mgr || !input_buf_pool || !surface_pool || !va_display) { - LOG_E( "NUll pointer passed in\n"); - return (MIX_RESULT_NULL_PTR); - } - - Lock(); - - //Clean up any previous framemgr - MIXUNREF(this->framemgr, mix_framemanager_unref); - this->framemgr = frame_mgr; - mix_framemanager_ref(this->framemgr); - if (this->config_params) { - mix_videoconfigparams_unref(this->config_params); - } - this->config_params = config_params; - mix_videoconfigparams_ref(reinterpret_cast(this->config_params)); - - this->va_display = va_display; - - //Clean up any previous mime_type - if (this->mime_type) { - if (this->mime_type->str) - g_string_free(this->mime_type, TRUE); - else - g_string_free(this->mime_type, FALSE); - } - gchar *mime_tmp = NULL; - res = mix_videoconfigparamsdec_get_mime_type(config_params, &mime_tmp); - if (NULL != mime_tmp) { - this->mime_type = g_string_new(mime_tmp); - g_free(mime_tmp); - if (NULL == this->mime_type) {//new failed - res = MIX_RESULT_NO_MEMORY; - LOG_E( "Could not duplicate mime_type\n"); - goto cleanup; - } - }//else there is no mime_type; leave as NULL - - res = mix_videoconfigparamsdec_get_frame_rate(config_params, &(this->frame_rate_num), &(this->frame_rate_denom)); - if (res != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame_rate\n"); - goto cleanup; - } - res = mix_videoconfigparamsdec_get_picture_res(config_params, &(this->picture_width), &(this->picture_height)); - if (res != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting picture_res\n"); - goto cleanup; - } - - if (this->inputbufqueue) { - //Deinit previous input buffer queue - while (!g_queue_is_empty(this->inputbufqueue)) { - buf_entry = reinterpret_cast(g_queue_pop_head(this->inputbufqueue)); - mix_buffer_unref(buf_entry->buf); - g_free(buf_entry); - } - g_queue_free(this->inputbufqueue); - } - - //MixBuffer pool is cleaned up in MixVideo object - this->inputbufpool = NULL; - - this->inputbufpool = input_buf_pool; - this->inputbufqueue = g_queue_new(); - if (NULL == this->inputbufqueue) {//New failed - res = MIX_RESULT_NO_MEMORY; - LOG_E( "Could not duplicate mime_type\n"); - goto cleanup; - } - - // surface pool, VA context/config and parser handle are initialized by - // derived classes - - + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + LOG_V( "Begin\n"); + MIX_RESULT res = MIX_RESULT_SUCCESS; + MixInputBufferEntry *buf_entry = NULL; + + if (!config_params || !frame_mgr || !input_buf_pool || !surface_pool || !va_display) { + LOG_E( "NUll pointer passed in\n"); + return (MIX_RESULT_NULL_PTR); + } + + Lock(); + + //Clean up any previous framemgr + MIXUNREF(this->framemgr, mix_framemanager_unref); + this->framemgr = frame_mgr; + mix_framemanager_ref(this->framemgr); + if (this->config_params) { + mix_videoconfigparams_unref(this->config_params); + } + this->config_params = config_params; + mix_videoconfigparams_ref(reinterpret_cast(this->config_params)); + + this->va_display = va_display; + + //Clean up any previous mime_type + if (this->mime_type) { + free(this->mime_type); + this->mime_type = NULL; + } + + res = mix_videoconfigparamsdec_get_mime_type(config_params, &this->mime_type); + if (NULL == this->mime_type) { + res = MIX_RESULT_NO_MEMORY; + LOG_E( "Could not duplicate mime_type\n"); + goto cleanup; + }//else there is no mime_type; leave as NULL + + res = mix_videoconfigparamsdec_get_frame_rate(config_params, &(this->frame_rate_num), &(this->frame_rate_denom)); + if (res != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame_rate\n"); + goto cleanup; + } + res = mix_videoconfigparamsdec_get_picture_res(config_params, &(this->picture_width), &(this->picture_height)); + if (res != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting picture_res\n"); + goto cleanup; + } + + if (this->inputbufqueue) { + //Deinit previous input buffer queue + while (!j_queue_is_empty(this->inputbufqueue)) { + buf_entry = reinterpret_cast(j_queue_pop_head(this->inputbufqueue)); + mix_buffer_unref(buf_entry->buf); + free(buf_entry); + } + j_queue_free(this->inputbufqueue); + } + + //MixBuffer pool is cleaned up in MixVideo object + this->inputbufpool = NULL; + + this->inputbufpool = input_buf_pool; + this->inputbufqueue = j_queue_new(); + if (NULL == this->inputbufqueue) {//New failed + res = MIX_RESULT_NO_MEMORY; + LOG_E( "Could not duplicate mime_type\n"); + goto cleanup; + } + + // surface pool, VA context/config and parser handle are initialized by + // derived classes + + cleanup: - if (res != MIX_RESULT_SUCCESS) { - MIXUNREF(this->framemgr, mix_framemanager_unref); - if (this->mime_type) { - if (this->mime_type->str) - g_string_free(this->mime_type, TRUE); - else - g_string_free(this->mime_type, FALSE); - this->mime_type = NULL; - } - Unlock(); - this->frame_rate_num = 0; - this->frame_rate_denom = 1; - this->picture_width = 0; - this->picture_height = 0; - } else {//Normal unlock - Unlock(); - } - - LOG_V( "End\n"); - - return res; + if (res != MIX_RESULT_SUCCESS) { + MIXUNREF(this->framemgr, mix_framemanager_unref); + if (this->mime_type) { + free(this->mime_type); + this->mime_type = NULL; + } + Unlock(); + this->frame_rate_num = 0; + this->frame_rate_denom = 1; + this->picture_width = 0; + this->picture_height = 0; + } else {//Normal unlock + Unlock(); + } + + LOG_V( "End\n"); + + return res; } MIX_RESULT MixVideoFormat::Decode( - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params) { - return MIX_RESULT_SUCCESS; + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params) { + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat::Flush() { - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat::EndOfStream() { - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat::Deinitialize() { - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MixVideoFormat* mix_videoformat_unref(MixVideoFormat* mix){ - if (NULL != mix) - return mix->Unref(); - else - return NULL; +MixVideoFormat* mix_videoformat_unref(MixVideoFormat* mix) { + if (NULL != mix) + return mix->Unref(); + else + return NULL; } MixVideoFormat * mix_videoformat_new(void) { - return new MixVideoFormat(); + return new MixVideoFormat(); } MixVideoFormat * mix_videoformat_ref(MixVideoFormat * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } /* mixvideoformat class methods implementation */ -MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg) { - return mix->GetCaps(msg); +MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, char *msg) { + return mix->GetCaps(msg); } MIX_RESULT mix_videofmt_initialize( - MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - return mix->Initialize(config_params, frame_mgr, - input_buf_pool, surface_pool, va_display); + MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + return mix->Initialize(config_params, frame_mgr, + input_buf_pool, surface_pool, va_display); } MIX_RESULT mix_videofmt_decode( - MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params) { - return mix->Decode(bufin, bufincnt, decode_params); + MixVideoFormat *mix, MixBuffer * bufin[], + int bufincnt, MixVideoDecodeParams * decode_params) { + return mix->Decode(bufin, bufincnt, decode_params); } MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix) { - return mix->Flush(); + return mix->Flush(); } MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix) { - return mix->EndOfStream(); + return mix->EndOfStream(); } MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix) { - return mix->Deinitialize(); + return mix->Deinitialize(); } diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h index 26f91f6..cda1804 100644 --- a/mix_video/src/mixvideoformat.h +++ b/mix_video/src/mixvideoformat.h @@ -10,7 +10,7 @@ #define __MIX_VIDEOFORMAT_H__ #include -#include + extern "C" { #include "vbp_loader.h" @@ -27,6 +27,8 @@ extern "C" { #include "mixbufferpool.h" #include "mixvideoformatqueue.h" #include "mixvideothread.h" +#include + // Redefine the Handle defined in vbp_loader.h #define VBPhandle Handle @@ -36,91 +38,91 @@ class MixVideoFormat; #define MIX_VIDEOFORMAT(obj) (dynamic_cast(obj)) /* vmethods typedef */ -typedef MIX_RESULT (*MixVideoFmtGetCapsFunc)(MixVideoFormat *mix, GString *msg); +typedef MIX_RESULT (*MixVideoFmtGetCapsFunc)(MixVideoFormat *mix, char *msg); typedef MIX_RESULT (*MixVideoFmtInitializeFunc)(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -typedef MIX_RESULT (*MixVideoFmtDecodeFunc)(MixVideoFormat *mix, - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params); + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); +typedef MIX_RESULT (*MixVideoFmtDecodeFunc)(MixVideoFormat *mix, + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params); typedef MIX_RESULT (*MixVideoFmtFlushFunc)(MixVideoFormat *mix); typedef MIX_RESULT (*MixVideoFmtEndOfStreamFunc)(MixVideoFormat *mix); typedef MIX_RESULT (*MixVideoFmtDeinitializeFunc)(MixVideoFormat *mix); class MixVideoFormat { - /*< public > */ + /*< public > */ public: - MixVideoFormat(); - virtual ~MixVideoFormat(); - - - virtual MIX_RESULT GetCaps(GString *msg); - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); - virtual MIX_RESULT Deinitialize(); - - - void Lock() { - mLock.lock(); - } - - void Unlock() { - mLock.unlock(); - } - - MixVideoFormat* Ref() { - ++ref_count; - return this; - } - MixVideoFormat* Unref() { - if (0 == (--ref_count)) { - delete this; - return NULL; - } else { - return this; - } - } + MixVideoFormat(); + virtual ~MixVideoFormat(); + + + virtual MIX_RESULT GetCaps(char *msg); + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); + virtual MIX_RESULT Deinitialize(); + + + void Lock() { + mLock.lock(); + } + + void Unlock() { + mLock.unlock(); + } + + MixVideoFormat* Ref() { + ++ref_count; + return this; + } + MixVideoFormat* Unref() { + if (0 == (--ref_count)) { + delete this; + return NULL; + } else { + return this; + } + } public: - /*< private > */ - MixVideoMutex mLock; - gboolean initialized; - MixFrameManager *framemgr; - MixSurfacePool *surfacepool; - VADisplay va_display; - VAContextID va_context; - VAConfigID va_config; - VASurfaceID *va_surfaces; - guint va_num_surfaces; - VBPhandle parser_handle; - GString *mime_type; - guint frame_rate_num; - guint frame_rate_denom; - guint picture_width; - guint picture_height; - gboolean parse_in_progress; - gboolean discontinuity_frame_in_progress; - guint64 current_timestamp; - MixBufferPool *inputbufpool; - GQueue *inputbufqueue; - gboolean va_initialized; - gboolean end_picture_pending; - MixVideoFrame* video_frame; - guint extra_surfaces; - MixVideoConfigParamsDec * config_params; - guint ref_count ; + /*< private > */ + MixVideoMutex mLock; + bool initialized; + MixFrameManager *framemgr; + MixSurfacePool *surfacepool; + VADisplay va_display; + VAContextID va_context; + VAConfigID va_config; + VASurfaceID *va_surfaces; + uint va_num_surfaces; + VBPhandle parser_handle; + char *mime_type; + uint frame_rate_num; + uint frame_rate_denom; + uint picture_width; + uint picture_height; + bool parse_in_progress; + bool discontinuity_frame_in_progress; + uint64 current_timestamp; + MixBufferPool *inputbufpool; + JQueue *inputbufqueue; + bool va_initialized; + bool end_picture_pending; + MixVideoFrame* video_frame; + uint extra_surfaces; + MixVideoConfigParamsDec * config_params; + uint ref_count ; }; @@ -151,19 +153,19 @@ MixVideoFormat* mix_videoformat_unref(MixVideoFormat* mix); /* Class Methods */ -MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg); +MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, char *msg); MIX_RESULT mix_videofmt_initialize( - MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); + MixVideoFormat *mix, + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); MIX_RESULT mix_videofmt_decode( - MixVideoFormat *mix, MixBuffer * bufin[], - gint bufincnt, MixVideoDecodeParams * decode_params); + MixVideoFormat *mix, MixBuffer * bufin[], + int bufincnt, MixVideoDecodeParams * decode_params); MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix); diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp index 27e1447..08d8e78 100644 --- a/mix_video/src/mixvideoformat_h264.cpp +++ b/mix_video/src/mixvideoformat_h264.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include #ifndef ANDROID #include @@ -26,1090 +26,1091 @@ static int mix_video_h264_counter = 0; * can be used for chaining method call if needed. */ MixVideoFormat_H264::MixVideoFormat_H264() - :dpb_surface_table(NULL) + :dpb_surface_table(NULL) #ifdef DECODER_ROBUSTNESS - ,last_decoded_frame(NULL) + ,last_decoded_frame(NULL) #endif {} -MixVideoFormat_H264::~MixVideoFormat_H264() { - gint32 pret = VBP_OK; - /* clean up here. */ - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - - if (this->dpb_surface_table) { - //Free the DPB surface table - //First remove all the entries (frames will be unrefed) - g_hash_table_remove_all(this->dpb_surface_table); - //Then unref the table - g_hash_table_unref(this->dpb_surface_table); - this->dpb_surface_table = NULL; - } - - Lock(); - this->initialized = TRUE; - this->parse_in_progress = FALSE; - - //Close the parser - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - if (pret != VBP_OK) { - LOG_E( "Error closing parser\n"); - } - Unlock(); +MixVideoFormat_H264::~MixVideoFormat_H264() { + int32 pret = VBP_OK; + /* clean up here. */ + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + + if (this->dpb_surface_table) { + //Free the DPB surface table + //First remove all the entries (frames will be unrefed) + j_hash_table_remove_all(this->dpb_surface_table); + //Then unref the table + j_hash_table_unref(this->dpb_surface_table); + this->dpb_surface_table = NULL; + } + + Lock(); + this->initialized = TRUE; + this->parse_in_progress = FALSE; + + //Close the parser + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + if (pret != VBP_OK) { + LOG_E( "Error closing parser\n"); + } + Unlock(); } MIX_RESULT MixVideoFormat_H264::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_H264; - vbp_data_h264 *data = NULL; - MixIOVec *header = NULL; - - if (config_params == NULL || frame_mgr == NULL || - input_buf_pool == NULL || va_display == NULL) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - // chain up parent method - MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, - surface_pool, va_display); - - /* Chainup parent method. */ - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - Lock(); - - this->surfacepool = mix_surfacepool_new(); - *surface_pool = this->surfacepool; - - if (NULL == this->surfacepool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "parent->surfacepool == NULL.\n"); - goto CLEAN_UP; - } - - //Create our table of Decoded Picture Buffer "in use" surfaces - this->dpb_surface_table = g_hash_table_new_full(NULL, NULL, - mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value); - - if (NULL == this->dpb_surface_table) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating dbp surface table\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation( - config_params, &(this->extra_surfaces)); - - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto CLEAN_UP; - } - - LOG_V( "Before vbp_open\n"); - //Load the bitstream parser - pret = vbp_open(ptype, &(this->parser_handle)); - LOG_V( "After vbp_open\n"); - - if (VBP_OK != pret) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto CLEAN_UP; - } - LOG_V( "Opened parser\n"); - - ret = mix_videoconfigparamsdec_get_header(config_params, &header); - - if ((MIX_RESULT_SUCCESS != ret) || (NULL == header)) { - // Delay initializing VA if codec configuration data is not ready, but don't return an error. - ret = MIX_RESULT_SUCCESS; - LOG_W( "Codec data is not available in the configuration parameter.\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)(this->parser_handle)); - - pret = vbp_parse(this->parser_handle, header->data, header->data_size, TRUE); - - if ((VBP_OK != pret) && (VBP_DONE != pret)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Parsed header\n"); - - //Get the header data and save - pret = vbp_query(this->parser_handle, (void **)&data); - - if ((VBP_OK != pret) || (NULL == data)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Queried parser for header data\n"); - - _update_config_params(data); - - ret = _initialize_va(data); - if (MIX_RESULT_SUCCESS != ret) { - LOG_E( "Error initializing va. \n"); - goto CLEAN_UP; - } + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_H264; + vbp_data_h264 *data = NULL; + MixIOVec *header = NULL; + + if (config_params == NULL || frame_mgr == NULL || + input_buf_pool == NULL || va_display == NULL) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + // chain up parent method + MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, + surface_pool, va_display); + + /* Chainup parent method. */ + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + Lock(); + + this->surfacepool = mix_surfacepool_new(); + *surface_pool = this->surfacepool; + + if (NULL == this->surfacepool) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "parent->surfacepool == NULL.\n"); + goto CLEAN_UP; + } + + //Create our table of Decoded Picture Buffer "in use" surfaces + this->dpb_surface_table = j_hash_table_new_full(NULL, NULL, + mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value); + + if (NULL == this->dpb_surface_table) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating dbp surface table\n"); + goto CLEAN_UP; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation( + config_params, &(this->extra_surfaces)); + + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto CLEAN_UP; + } + + LOG_V( "Before vbp_open\n"); + //Load the bitstream parser + pret = vbp_open(ptype, &(this->parser_handle)); + LOG_V( "After vbp_open\n"); + + if (VBP_OK != pret) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto CLEAN_UP; + } + LOG_V( "Opened parser\n"); + + ret = mix_videoconfigparamsdec_get_header(config_params, &header); + + if ((MIX_RESULT_SUCCESS != ret) || (NULL == header)) { + // Delay initializing VA if codec configuration data is not ready, but don't return an error. + ret = MIX_RESULT_SUCCESS; + LOG_W( "Codec data is not available in the configuration parameter.\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)(this->parser_handle)); + + pret = vbp_parse(this->parser_handle, header->data, header->data_size, TRUE); + + if ((VBP_OK != pret) && (VBP_DONE != pret)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Parsed header\n"); + + //Get the header data and save + pret = vbp_query(this->parser_handle, (void **)&data); + + if ((VBP_OK != pret) || (NULL == data)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Queried parser for header data\n"); + + _update_config_params(data); + + ret = _initialize_va(data); + if (MIX_RESULT_SUCCESS != ret) { + LOG_E( "Error initializing va. \n"); + goto CLEAN_UP; + } CLEAN_UP: - if (MIX_RESULT_SUCCESS != ret) { - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - this->initialized = FALSE; - } else { - this->initialized = TRUE; - } - if (NULL != header) { - if (NULL != header->data) - g_free(header->data); - g_free(header); - header = NULL; - } - LOG_V( "Unlocking\n"); - Unlock(); - return ret; - + if (MIX_RESULT_SUCCESS != ret) { + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + this->initialized = FALSE; + } else { + this->initialized = TRUE; + } + if (NULL != header) { + if (NULL != header->data) + free(header->data); + free(header); + header = NULL; + } + LOG_V( "Unlocking\n"); + Unlock(); + return ret; + } MIX_RESULT MixVideoFormat_H264::Decode( - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params) { + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params) { - int i = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - guint64 ts = 0; - gboolean discontinuity = FALSE; + int i = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + uint64 ts = 0; + bool discontinuity = FALSE; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (NULL == bufin || NULL == decode_params || 0 == bufincnt) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } + if (NULL == bufin || NULL == decode_params || 0 == bufincnt) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + /* Chainup parent method. + We are not chaining up to parent method for now. + */ #if 0 - MixVideoFormat::Decode(bufin, bufincnt, decode_params); + MixVideoFormat::Decode(bufin, bufincnt, decode_params); #endif - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } - decode_params->new_sequence = FALSE; + decode_params->new_sequence = FALSE; - //From now on, we exit this function through cleanup: - LOG_V( "Locking\n"); - Lock(); + //From now on, we exit this function through cleanup: + LOG_V( "Locking\n"); + Lock(); - LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts); + LOG_V( "Starting current frame %d, timestamp %"UINT64_FORMAT"\n", mix_video_h264_counter++, ts); - for (i = 0; i < bufincnt; i++) { - LOG_V( "Decoding a buf %x, size %d\n", (guint)bufin[i]->data, bufin[i]->size); - // decode a buffer at a time - ret = _decode_a_buffer(bufin[i], ts, discontinuity, decode_params); - if (MIX_RESULT_SUCCESS != ret) { - LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); - goto CLEAN_UP; - } - } + for (i = 0; i < bufincnt; i++) { + LOG_V( "Decoding a buf %x, size %d\n", (uint)bufin[i]->data, bufin[i]->size); + // decode a buffer at a time + ret = _decode_a_buffer(bufin[i], ts, discontinuity, decode_params); + if (MIX_RESULT_SUCCESS != ret) { + LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); + goto CLEAN_UP; + } + } CLEAN_UP: - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; } MIX_RESULT MixVideoFormat_H264::Flush() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - uint32 pret = 0; - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + uint32 pret = 0; + /* Chainup parent method. + We are not chaining up to parent method for now. + */ #if 0 - MixVideoFormat::Flush(); + MixVideoFormat::Flush(); #endif - Lock(); - // drop any decode-pending picture, and ignore return value - _decode_end(TRUE); + Lock(); + // drop any decode-pending picture, and ignore return value + _decode_end(TRUE); - //Clear parse_in_progress flag and current timestamp - this->parse_in_progress = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (guint64)-1; + //Clear parse_in_progress flag and current timestamp + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (uint64)-1; - //Clear the DPB surface table - g_hash_table_remove_all(this->dpb_surface_table); + //Clear the DPB surface table + j_hash_table_remove_all(this->dpb_surface_table); - //Call parser flush - pret = vbp_flush(this->parser_handle); - if (VBP_OK != pret) - ret = MIX_RESULT_FAIL; + //Call parser flush + pret = vbp_flush(this->parser_handle); + if (VBP_OK != pret) + ret = MIX_RESULT_FAIL; - Unlock(); - LOG_V( "End\n"); - return ret; + Unlock(); + LOG_V( "End\n"); + return ret; } MIX_RESULT MixVideoFormat_H264::EndOfStream() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + /* Chainup parent method. + We are not chaining up to parent method for now. + */ #if 0 - if (parent_class->eos) { - return parent_class->eos(mix, msg); - } + if (parent_class->eos) { + return parent_class->eos(mix, msg); + } #endif - Lock(); - // finished decoding the pending frame - _decode_end(FALSE); - Unlock(); - //Call Frame Manager with _eos() - ret = mix_framemanager_eos(this->framemgr); - LOG_V( "End\n"); - return ret; + Lock(); + // finished decoding the pending frame + _decode_end(FALSE); + Unlock(); + //Call Frame Manager with _eos() + ret = mix_framemanager_eos(this->framemgr); + LOG_V( "End\n"); + return ret; } MixVideoFormat_H264 * mix_videoformat_h264_new(void) { - return new MixVideoFormat_H264(); + return new MixVideoFormat_H264(); } MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } MixVideoFormat_H264 *mix_videoformat_h264_unref(MixVideoFormat_H264 *mix) { - if (NULL != mix) - return MIX_VIDEOFORMAT_H264(mix->Unref()); - else - return mix; + if (NULL != mix) + return MIX_VIDEOFORMAT_H264(mix->Unref()); + else + return mix; } MIX_RESULT MixVideoFormat_H264::_update_config_params(vbp_data_h264 *data) { - if (0 == this->picture_width || 0 == this->picture_height || data->new_sps) { - this->picture_width = - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; - this->picture_height = - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; - - mix_videoconfigparamsdec_set_picture_res( - this->config_params, this->picture_width, this->picture_height); - } - - // video_range has default value of 0. - mix_videoconfigparamsdec_set_video_range(this->config_params, - data->codec_data->video_full_range_flag); - - uint8 color_matrix; - switch (data->codec_data->matrix_coefficients) { - case 1: - color_matrix = VA_SRC_BT709; - break; - // ITU-R Recommendation BT.470-6 System B, G (MP4), same as - // SMPTE 170M/BT601 - case 5: - case 6: - color_matrix = VA_SRC_BT601; - break; - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - this->config_params, - data->codec_data->sar_width, - data->codec_data->sar_height); - mix_videoconfigparamsdec_set_bit_rate( - this->config_params, data->codec_data->bit_rate); - return MIX_RESULT_SUCCESS; + if (0 == this->picture_width || 0 == this->picture_height || data->new_sps) { + this->picture_width = + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; + this->picture_height = + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; + + mix_videoconfigparamsdec_set_picture_res( + this->config_params, this->picture_width, this->picture_height); + } + + // video_range has default value of 0. + mix_videoconfigparamsdec_set_video_range(this->config_params, + data->codec_data->video_full_range_flag); + + uint8 color_matrix; + switch (data->codec_data->matrix_coefficients) { + case 1: + color_matrix = VA_SRC_BT709; + break; + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + color_matrix = VA_SRC_BT601; + break; + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + this->config_params, + data->codec_data->sar_width, + data->codec_data->sar_height); + mix_videoconfigparamsdec_set_bit_rate( + this->config_params, data->codec_data->bit_rate); + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VAConfigAttrib attrib; - if (this->va_initialized) { - LOG_W("va already initialized.\n"); - return MIX_RESULT_SUCCESS; - } - LOG_V( "Begin\n"); - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - attrib.value = VA_RT_FORMAT_YUV420; - - //Initialize and save the VA config ID - //We use high profile for all kinds of H.264 profiles (baseline, main and high) - vret = vaCreateConfig(this->va_display, VAProfileH264High, - VAEntrypointVLD, &attrib, 1, &(this->va_config)); - - if (VA_STATUS_SUCCESS != vret) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E("vaCreateConfig failed\n"); - return ret; - } - - LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); - - // handle both frame and field coding for interlaced content - int num_ref_pictures = data->codec_data->num_ref_frames; - - //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that - // will not start decoding until a new frame is received. - this->va_num_surfaces = 1 + 1 + this->extra_surfaces + - (((num_ref_pictures + 3) < MIX_VIDEO_H264_SURFACE_NUM) ? (num_ref_pictures + 3) : MIX_VIDEO_H264_SURFACE_NUM); - - this->va_surfaces = - reinterpret_cast(g_malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); - if (NULL == this->va_surfaces){ - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "parent->va_surfaces == NULL. \n"); - return ret; - } - - LOG_V( "Codec data says picture size is %d x %d\n", - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); - LOG_V( "getcaps says picture size is %d x %d\n", this->picture_width, this->picture_height); - - vret = vaCreateSurfaces( - this->va_display, - this->picture_width, - this->picture_height, - VA_RT_FORMAT_YUV420, - this->va_num_surfaces, - this->va_surfaces); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - return ret; - } - - LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); - - //Initialize the surface pool - ret = mix_surfacepool_initialize( - this->surfacepool, - this->va_surfaces, - this->va_num_surfaces, - this->va_display); - - switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init surface pool\n"); - return ret; - break; - } - - if (data->codec_data->pic_order_cnt_type == 0) { - int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); - mix_framemanager_set_max_picture_number(this->framemgr, max); - } - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext( - this->va_display, - this->va_config, - this->picture_width, - this->picture_height, - 0, // no flag set - this->va_surfaces, - this->va_num_surfaces, - &(this->va_context)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - return ret; - } - - this->va_initialized = TRUE; - - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VAConfigAttrib attrib; + if (this->va_initialized) { + LOG_W("va already initialized.\n"); + return MIX_RESULT_SUCCESS; + } + LOG_V( "Begin\n"); + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + //Initialize and save the VA config ID + //We use high profile for all kinds of H.264 profiles (baseline, main and high) + vret = vaCreateConfig(this->va_display, VAProfileH264High, + VAEntrypointVLD, &attrib, 1, &(this->va_config)); + + if (VA_STATUS_SUCCESS != vret) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E("vaCreateConfig failed\n"); + return ret; + } + + LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); + + // handle both frame and field coding for interlaced content + int num_ref_pictures = data->codec_data->num_ref_frames; + + //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that + // will not start decoding until a new frame is received. + this->va_num_surfaces = 1 + 1 + this->extra_surfaces + + (((num_ref_pictures + 3) < MIX_VIDEO_H264_SURFACE_NUM) ? (num_ref_pictures + 3) : MIX_VIDEO_H264_SURFACE_NUM); + + this->va_surfaces = + reinterpret_cast(malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); + if (NULL == this->va_surfaces) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "parent->va_surfaces == NULL. \n"); + return ret; + } + + LOG_V( "Codec data says picture size is %d x %d\n", + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); + LOG_V( "getcaps says picture size is %d x %d\n", this->picture_width, this->picture_height); + + vret = vaCreateSurfaces( + this->va_display, + this->picture_width, + this->picture_height, + VA_RT_FORMAT_YUV420, + this->va_num_surfaces, + this->va_surfaces); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + return ret; + } + + LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); + + //Initialize the surface pool + ret = mix_surfacepool_initialize( + this->surfacepool, + this->va_surfaces, + this->va_num_surfaces, + this->va_display); + + switch (ret) { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init surface pool\n"); + return ret; + break; + } + + if (data->codec_data->pic_order_cnt_type == 0) { + int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); + mix_framemanager_set_max_picture_number(this->framemgr, max); + } + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext( + this->va_display, + this->va_config, + this->picture_width, + this->picture_height, + 0, // no flag set + this->va_surfaces, + this->va_num_surfaces, + &(this->va_context)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + return ret; + } + + this->va_initialized = TRUE; + + return ret; } MIX_RESULT MixVideoFormat_H264::_handle_new_sequence(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("new sequence is received.\n"); + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("new sequence is received.\n"); - // original picture resolution - uint32 width = this->picture_width; - uint32 height = this->picture_height; + // original picture resolution + uint32 width = this->picture_width; + uint32 height = this->picture_height; - _update_config_params(data); + _update_config_params(data); - if (width != this->picture_width || height != this->picture_height) { - // flush frame manager only if resolution is changed. - ret = mix_framemanager_flush(this->framemgr); - } - // TO DO: re-initialize VA - return ret; + if (width != this->picture_width || height != this->picture_height) { + // flush frame manager only if resolution is changed. + ret = mix_framemanager_flush(this->framemgr); + } + // TO DO: re-initialize VA + return ret; } MIX_RESULT MixVideoFormat_H264::_update_ref_pic_list( - VAPictureParameterBufferH264* picture_params, - VASliceParameterBufferH264* slice_params) { - //Do slice parameters - //First patch up the List0 and List1 surface IDs - uint32 j = 0; - guint poc = 0; - gpointer video_frame = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++) { - if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) { - poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList0[j])); - video_frame = g_hash_table_lookup(this->dpb_surface_table, (gpointer)poc); - if (video_frame == NULL) { - LOG_E("unable to find surface of picture %d (current picture %d).", - poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - return ret; - } else { - slice_params->RefPicList0[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - } - - if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6)) { - for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++) { - if (!(slice_params->RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) { - poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList1[j])); - video_frame = g_hash_table_lookup(this->dpb_surface_table, (gpointer)poc); - if (video_frame == NULL) { - LOG_E("unable to find surface of picture %d (current picture %d).", - poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - return ret; - } else { - slice_params->RefPicList1[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - } - } - return ret; + VAPictureParameterBufferH264* picture_params, + VASliceParameterBufferH264* slice_params) { + //Do slice parameters + //First patch up the List0 and List1 surface IDs + uint32 j = 0; + uint poc = 0; + void* video_frame = NULL; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++) { + if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) { + poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList0[j])); + video_frame = j_hash_table_lookup(this->dpb_surface_table, (void*)poc); + if (video_frame == NULL) { + LOG_E("unable to find surface of picture %d (current picture %d).", + poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); + ret = MIX_RESULT_DROPFRAME; //return non-fatal error + return ret; + } else { + slice_params->RefPicList0[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + } + + if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6)) { + for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++) { + if (!(slice_params->RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) { + poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList1[j])); + video_frame = j_hash_table_lookup(this->dpb_surface_table, (void*)poc); + if (video_frame == NULL) { + LOG_E("unable to find surface of picture %d (current picture %d).", + poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); + ret = MIX_RESULT_DROPFRAME; //return non-fatal error + return ret; + } else { + slice_params->RefPicList1[j].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } + } + } + } + return ret; } MIX_RESULT MixVideoFormat_H264::_decode_a_slice( - vbp_data_h264 *data, int picture_index, int slice_index) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - guint buffer_id_cnt = 0; - - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID buffer_ids[4]; - - LOG_V( "Begin\n"); - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - vbp_picture_data_h264* pic_data = &(data->pic_data[picture_index]); - vbp_slice_data_h264* slice_data = &(pic_data->slc_data[slice_index]); - VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; - VASliceParameterBufferH264* slice_params = &(slice_data->slc_parms); - vadisplay = this->va_display; - vacontext = this->va_context; + vbp_data_h264 *data, int picture_index, int slice_index) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + uint buffer_id_cnt = 0; + + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID buffer_ids[4]; + + LOG_V( "Begin\n"); + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + vbp_picture_data_h264* pic_data = &(data->pic_data[picture_index]); + vbp_slice_data_h264* slice_data = &(pic_data->slc_data[slice_index]); + VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; + VASliceParameterBufferH264* slice_params = &(slice_data->slc_parms); + vadisplay = this->va_display; + vacontext = this->va_context; #ifdef DECODER_ROBUSTNESS - if ((slice_params->first_mb_in_slice == 0) || (!this->end_picture_pending)) + if ((slice_params->first_mb_in_slice == 0) || (!this->end_picture_pending)) #else - if (slice_params->first_mb_in_slice == 0) + if (slice_params->first_mb_in_slice == 0) #endif - { - // this is the first slice of the picture - if (this->end_picture_pending) { - // interlace content, decoding the first field - vret = vaEndPicture(vadisplay, vacontext); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("vaEndPicture failed.\n"); - LOG_V( "End\n"); - return ret; - } - // for interlace content, top field may be valid only after the second field is parsed - mix_videoframe_set_displayorder(this->video_frame, pic_params->CurrPic.TopFieldOrderCnt); - } - - gulong surface = 0; - LOG_V("mix->video_frame = 0x%x\n", (unsigned)this->video_frame); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(this->video_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting surface ID from frame object\n"); - LOG_V( "End\n"); - return ret; } + { + // this is the first slice of the picture + if (this->end_picture_pending) { + // interlace content, decoding the first field + vret = vaEndPicture(vadisplay, vacontext); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("vaEndPicture failed.\n"); + LOG_V( "End\n"); + return ret; + } + // for interlace content, top field may be valid only after the second field is parsed + mix_videoframe_set_displayorder(this->video_frame, pic_params->CurrPic.TopFieldOrderCnt); + } + + ulong surface = 0; + LOG_V("mix->video_frame = 0x%x\n", (unsigned)this->video_frame); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(this->video_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting surface ID from frame object\n"); + LOG_V( "End\n"); + return ret; + } #ifdef DECODER_ROBUSTNESS - LOG_V( "Updating DPB for libva\n"); - //Now handle the reference frames and surface IDs for DPB and current frame - _handle_ref_frames(pic_params, this->video_frame); + LOG_V( "Updating DPB for libva\n"); + //Now handle the reference frames and surface IDs for DPB and current frame + _handle_ref_frames(pic_params, this->video_frame); #ifdef HACK_DPB - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - ret = mix_videofmt_h264_hack_dpb(this, pic_data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error reference frame not found\n"); - //Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it - _cleanup_ref_frame(pic_params, this->frame); - LOG_V( "End\n"); - return ret; - } + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + ret = mix_videofmt_h264_hack_dpb(this, pic_data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error reference frame not found\n"); + //Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it + _cleanup_ref_frame(pic_params, this->frame); + LOG_V( "End\n"); + return ret; + } #endif - LOG_V( "Calling vaBeginPicture\n"); - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - LOG_V( "End\n"); - return ret; - } - // vaBeginPicture needs a matching vaEndPicture - this->end_picture_pending = TRUE; + LOG_V( "Calling vaBeginPicture\n"); + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + LOG_V( "End\n"); + return ret; + } + // vaBeginPicture needs a matching vaEndPicture + this->end_picture_pending = TRUE; #else - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - LOG_V( "End\n"); - return ret; - } - // vaBeginPicture needs a matching vaEndPicture - this->end_picture_pending = TRUE; - LOG_V( "Updating DPB for libva\n"); - //Now handle the reference frames and surface IDs for DPB and current frame - _handle_ref_frames(pic_params, this->video_frame); + LOG_V( "Calling vaBeginPicture\n"); + + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + LOG_V( "End\n"); + return ret; + } + // vaBeginPicture needs a matching vaEndPicture + this->end_picture_pending = TRUE; + LOG_V( "Updating DPB for libva\n"); + //Now handle the reference frames and surface IDs for DPB and current frame + _handle_ref_frames(pic_params, this->video_frame); #ifdef HACK_DPB - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - ret = mix_videofmt_h264_hack_dpb(this, pic_data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error reference frame not found\n"); - LOG_V( "End\n"); - return ret; - } + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + ret = mix_videofmt_h264_hack_dpb(this, pic_data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error reference frame not found\n"); + LOG_V( "End\n"); + return ret; + } #endif #endif - //Libva buffer set up - LOG_V( "Creating libva picture parameter buffer\n"); - LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferH264), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; - } - - buffer_id_cnt++; - LOG_V( "Creating libva IQMatrix buffer\n"); - - //Then the IQ matrix buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferH264), - 1, - data->IQ_matrix_buf, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; - } - buffer_id_cnt++; - } + //Libva buffer set up + LOG_V( "Creating libva picture parameter buffer\n"); + LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; + } + + buffer_id_cnt++; + LOG_V( "Creating libva IQMatrix buffer\n"); + + //Then the IQ matrix buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; + } + buffer_id_cnt++; + } #ifndef DECODER_ROBUSTNESS - if (!this->end_picture_pending) { - LOG_E("first slice is lost??????????\n"); - ret = MIX_RESULT_DROPFRAME; - LOG_V( "End\n"); - return ret; + if (!this->end_picture_pending) { + LOG_E("first slice is lost??????????\n"); + ret = MIX_RESULT_DROPFRAME; + LOG_V( "End\n"); + return ret; - } + } #endif - //Now for slices + //Now for slices - ret = _update_ref_pic_list(pic_params, slice_params); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videofmt_h264_update_ref_pic_list failed.\n"); - LOG_V( "End\n"); - return ret; - } + ret = _update_ref_pic_list(pic_params, slice_params); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videofmt_h264_update_ref_pic_list failed.\n"); + LOG_V( "End\n"); + return ret; + } - LOG_V( "Creating libva slice parameter buffer\n"); + LOG_V( "Creating libva slice parameter buffer\n"); - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), - 1, - slice_params, - &buffer_ids[buffer_id_cnt]); + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + slice_params, + &buffer_ids[buffer_id_cnt]); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; - } + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; + } - buffer_id_cnt++; + buffer_id_cnt++; - //Do slice data + //Do slice data - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferH264 + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferH264 - LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", - (guint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size); + LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", + (uint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size); - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - slice_data->slice_size, //size - 1, //num_elements - slice_data->buffer_addr + slice_data->slice_offset, - &buffer_ids[buffer_id_cnt]); + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + slice_data->slice_size, //size + 1, //num_elements + slice_data->buffer_addr + slice_data->slice_offset, + &buffer_ids[buffer_id_cnt]); - buffer_id_cnt++; + buffer_id_cnt++; - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + LOG_V( "End\n"); + return ret; - } + } - LOG_V( "Calling vaRenderPicture\n"); + LOG_V( "Calling vaRenderPicture\n"); - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - LOG_V( "End\n"); - return ret; - } + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + LOG_V( "End\n"); + return ret; + } - LOG_V( "End\n"); - return ret; + LOG_V( "End\n"); + return ret; } -MIX_RESULT MixVideoFormat_H264::_decode_end(gboolean drop_picture) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; +MIX_RESULT MixVideoFormat_H264::_decode_end(bool drop_picture) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; #ifdef DECODER_ROBUSTNESS - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); #else - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); #endif - LOG_V("Begin\n"); - if (!this->end_picture_pending) { - if (this->video_frame) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame is not unreferenced.\n"); - } - goto CLEAN_UP; - } - - if (this->video_frame == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame has been unreferenced.\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling vaEndPicture\n"); - vret = vaEndPicture(this->va_display, this->va_context); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto CLEAN_UP; - } + LOG_V("Begin\n"); + if (!this->end_picture_pending) { + if (this->video_frame) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame is not unreferenced.\n"); + } + goto CLEAN_UP; + } + + if (this->video_frame == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame has been unreferenced.\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaEndPicture\n"); + vret = vaEndPicture(this->va_display, this->va_context); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto CLEAN_UP; + } #if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - LOG_V( "Calling vaSyncSurface\n"); + LOG_V( "Calling vaSyncSurface\n"); - //Decode the picture - vret = vaSyncSurface(parent->va_display, parent->video_frame->frame_id); + //Decode the picture + vret = vaSyncSurface(parent->va_display, parent->video_frame->frame_id); - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - CLEAN_UP; - } + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + CLEAN_UP; + } #endif - if (drop_picture) { - // we are asked to drop this decoded picture - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - goto CLEAN_UP; - } + if (drop_picture) { + // we are asked to drop this decoded picture + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + goto CLEAN_UP; + } - LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", - this->video_frame->timestamp); + LOG_V( "Enqueueing the frame with frame manager, timestamp %"UINT64_FORMAT"\n", + this->video_frame->timestamp); #ifdef DECODER_ROBUSTNESS - if (this->last_decoded_frame) - mix_videoframe_unref(this->last_decoded_frame); - this->last_decoded_frame = this->video_frame; - mix_videoframe_ref(this->last_decoded_frame); + if (this->last_decoded_frame) + mix_videoframe_unref(this->last_decoded_frame); + this->last_decoded_frame = this->video_frame; + mix_videoframe_ref(this->last_decoded_frame); #endif - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error enqueuing frame object\n"); - goto CLEAN_UP; - } else { - // video frame is passed to frame manager - this->video_frame = NULL; - LOG_V("video_frame is assigned to be NULL !\n"); - } + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error enqueuing frame object\n"); + goto CLEAN_UP; + } else { + // video frame is passed to frame manager + this->video_frame = NULL; + LOG_V("video_frame is assigned to be NULL !\n"); + } CLEAN_UP: - if (this->video_frame) { - /* this always indicates an error */ - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - } - this->end_picture_pending = FALSE; - LOG_V("End\n"); - return ret; + if (this->video_frame) { + /* this always indicates an error */ + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + } + this->end_picture_pending = FALSE; + LOG_V("End\n"); + return ret; } MIX_RESULT MixVideoFormat_H264::_decode_continue(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - uint32 i, j; - vbp_picture_data_h264* pic_data = NULL; - LOG_V("Begin\n"); - for (i = 0; i < data->num_pictures; i++) { - pic_data = &(data->pic_data[i]); - if (pic_data->pic_parms == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->pic_parms is NULL.\n"); - LOG_V("End\n"); - return ret; - } - - if (pic_data->slc_data == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->slc_data is NULL.\n"); - LOG_V("End\n"); - return ret; - } - - if (pic_data->num_slices == 0) { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->num_slices == 0.\n"); - LOG_V("End\n"); - return ret; - } - - LOG_V( "num_slices is %d\n", pic_data->num_slices); - for (j = 0; j < pic_data->num_slices; j++) { - LOG_V( "Decoding slice %d\n", j); - ret = _decode_a_slice(data, i, j); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "mix_videofmt_h264_decode_a_slice failed, error = %#X.", ret); - LOG_V("End\n"); - return ret; - } - } - } - - LOG_V("End\n"); - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + uint32 i, j; + vbp_picture_data_h264* pic_data = NULL; + LOG_V("Begin\n"); + for (i = 0; i < data->num_pictures; i++) { + pic_data = &(data->pic_data[i]); + if (pic_data->pic_parms == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->pic_parms is NULL.\n"); + LOG_V("End\n"); + return ret; + } + + if (pic_data->slc_data == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->slc_data is NULL.\n"); + LOG_V("End\n"); + return ret; + } + + if (pic_data->num_slices == 0) { + ret = MIX_RESULT_FAIL; + LOG_E("pic_data->num_slices == 0.\n"); + LOG_V("End\n"); + return ret; + } + + LOG_V( "num_slices is %d\n", pic_data->num_slices); + for (j = 0; j < pic_data->num_slices; j++) { + LOG_V( "Decoding slice %d\n", j); + ret = _decode_a_slice(data, i, j); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "mix_videofmt_h264_decode_a_slice failed, error = %#X.", ret); + LOG_V("End\n"); + return ret; + } + } + } + + LOG_V("End\n"); + return ret; } MIX_RESULT MixVideoFormat_H264::_set_frame_type(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - //Set the picture type (I, B or P frame) - //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) - MixFrameType frame_type = TYPE_INVALID; - switch (data->pic_data[0].slc_data[0].slc_parms.slice_type) { - case 0: - case 3: - case 5: - case 8: - frame_type = TYPE_P; - break; - case 1: - case 6: - frame_type = TYPE_B; - break; - case 2: - case 4: - case 7: - case 9: - frame_type = TYPE_I; - break; - default: - break; - } - - //Do not have to check for B frames after a seek - //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise - // DPB will not be correct and frames may come in with invalid references - // This will be detected when DPB is checked for valid mapped surfaces and - // error returned from there. - - LOG_V( "frame type is %d\n", frame_type); - - //Set the frame type for the frame object (used in reordering by frame manager) - ret = mix_videoframe_set_frame_type(this->video_frame, frame_type); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error setting frame type on frame\n"); - } - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + //Set the picture type (I, B or P frame) + //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) + MixFrameType frame_type = TYPE_INVALID; + switch (data->pic_data[0].slc_data[0].slc_parms.slice_type) { + case 0: + case 3: + case 5: + case 8: + frame_type = TYPE_P; + break; + case 1: + case 6: + frame_type = TYPE_B; + break; + case 2: + case 4: + case 7: + case 9: + frame_type = TYPE_I; + break; + default: + break; + } + + //Do not have to check for B frames after a seek + //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise + // DPB will not be correct and frames may come in with invalid references + // This will be detected when DPB is checked for valid mapped surfaces and + // error returned from there. + + LOG_V( "frame type is %d\n", frame_type); + + //Set the frame type for the frame object (used in reordering by frame manager) + ret = mix_videoframe_set_frame_type(this->video_frame, frame_type); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error setting frame type on frame\n"); + } + return ret; } MIX_RESULT MixVideoFormat_H264::_set_frame_structure( - vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) { - mix_videoframe_set_frame_structure(this->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); - } else { - mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); - } - return ret; + vbp_data_h264 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) { + mix_videoframe_set_frame_structure(this->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); + } else { + mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); + } + return ret; } MIX_RESULT MixVideoFormat_H264::_decode_begin(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - //Get a frame from the surface pool - LOG_V("Begin\n"); - ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame from surfacepool\n"); - return ret; - } - - /* the following calls will always succeed */ - // set frame type - ret = _set_frame_type(data); - // set frame structure - ret = _set_frame_structure(data); - //Set the discontinuity flag - mix_videoframe_set_discontinuity(this->video_frame, this->discontinuity_frame_in_progress); - //Set the timestamp - mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); - // Set displayorder - ret = mix_videoframe_set_displayorder(this->video_frame, - data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt); - if(ret != MIX_RESULT_SUCCESS) { - LOG_E("Error setting displayorder\n"); - return ret; - } - ret = _decode_continue(data); - LOG_V("End\n"); - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + //Get a frame from the surface pool + LOG_V("Begin\n"); + ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame from surfacepool\n"); + return ret; + } + + /* the following calls will always succeed */ + // set frame type + ret = _set_frame_type(data); + // set frame structure + ret = _set_frame_structure(data); + //Set the discontinuity flag + mix_videoframe_set_discontinuity(this->video_frame, this->discontinuity_frame_in_progress); + //Set the timestamp + mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); + // Set displayorder + ret = mix_videoframe_set_displayorder(this->video_frame, + data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Error setting displayorder\n"); + return ret; + } + ret = _decode_continue(data); + LOG_V("End\n"); + return ret; } MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( - MixBuffer * bufin, - guint64 ts, - gboolean discontinuity, - MixVideoDecodeParams * decode_params) { - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_h264 *data = NULL; - - LOG_V( "Begin\n"); - LOG_V( "Calling parse for current frame, parse handle %d\n", (int)this->parser_handle); - pret = vbp_parse(this->parser_handle, - bufin->data, - bufin->size, - FALSE); - - LOG_V( "Called parse for current frame\n"); - if ((pret != VBP_DONE) &&(pret != VBP_OK)) { - ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME; - LOG_E( "vbp_parse failed.\n"); - LOG_V("End\n"); - return ret; - } - - //query for data - pret = vbp_query(this->parser_handle, (void**)&data); - - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "vbp_query failed.\n"); - LOG_V("End\n"); - return ret; - - } - LOG_V( "Called query for current frame\n"); - - if (data->has_sps == 0 || data->has_pps == 0) { - ret = MIX_RESULT_MISSING_CONFIG; // MIX_RESULT_SUCCESS; - LOG_V("SPS or PPS is not available.\n"); - LOG_V("End\n"); - return ret; - - } - - if (data->new_sps) { - decode_params->new_sequence = data->new_sps; - - ret = _handle_new_sequence(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_video_h264_handle_new_sequence failed.\n"); - LOG_V("End\n"); - return ret; - - } - } - - if (this->va_initialized == FALSE) { - _update_config_params(data); - - LOG_V("try initializing VA...\n"); - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_initialize_va failed.\n"); - LOG_V("End\n"); - return ret; - } - } - - // first pic_data always exists, check if any slice is parsed - if (data->pic_data[0].num_slices == 0) { - ret = MIX_RESULT_SUCCESS; - LOG_V("slice is not available.\n"); - LOG_V("End\n"); - return ret; - } - - guint64 last_ts = this->current_timestamp; - this->current_timestamp = ts; - this->discontinuity_frame_in_progress = discontinuity; - - LOG_V("ts = %lli last_ts = %lli\n", ts, last_ts); - - if (last_ts != ts) { - // finish decoding the last frame - ret = _decode_end(FALSE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_decode_end failed.\n"); - LOG_V("End\n"); - return ret; - } - - // start decoding a new frame - ret = _decode_begin(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_decode_begin failed.\n"); - LOG_V("End\n"); - return ret; - } - } else { - // parital frame - LOG_V("partial frame handling...\n"); - ret = _decode_continue(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_decode_continue failed.\n"); - LOG_V("End\n"); - return ret; - } - } - - LOG_V("End\n"); - return ret; + MixBuffer * bufin, + uint64 ts, + bool discontinuity, + MixVideoDecodeParams * decode_params) { + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_h264 *data = NULL; + + LOG_V( "Begin\n"); + LOG_V( "Calling parse for current frame, parse handle %d\n", (int)this->parser_handle); + pret = vbp_parse(this->parser_handle, + bufin->data, + bufin->size, + FALSE); + + LOG_V( "Called parse for current frame\n"); + if ((pret != VBP_DONE) &&(pret != VBP_OK)) { + ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME; + LOG_E( "vbp_parse failed.\n"); + LOG_V("End\n"); + return ret; + } + + //query for data + pret = vbp_query(this->parser_handle, (void**)&data); + + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "vbp_query failed.\n"); + LOG_V("End\n"); + return ret; + + } + LOG_V( "Called query for current frame\n"); + + if (data->has_sps == 0 || data->has_pps == 0) { + ret = MIX_RESULT_MISSING_CONFIG; // MIX_RESULT_SUCCESS; + LOG_V("SPS or PPS is not available.\n"); + LOG_V("End\n"); + return ret; + + } + + if (data->new_sps) { + decode_params->new_sequence = data->new_sps; + + ret = _handle_new_sequence(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_video_h264_handle_new_sequence failed.\n"); + LOG_V("End\n"); + return ret; + + } + } + + if (this->va_initialized == FALSE) { + _update_config_params(data); + + LOG_V("try initializing VA...\n"); + ret = _initialize_va(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_initialize_va failed.\n"); + LOG_V("End\n"); + return ret; + } + } + + // first pic_data always exists, check if any slice is parsed + if (data->pic_data[0].num_slices == 0) { + ret = MIX_RESULT_SUCCESS; + LOG_V("slice is not available.\n"); + LOG_V("End\n"); + return ret; + } + + uint64 last_ts = this->current_timestamp; + this->current_timestamp = ts; + this->discontinuity_frame_in_progress = discontinuity; + + LOG_V("ts = %lli last_ts = %lli\n", ts, last_ts); + + if (last_ts != ts) { + // finish decoding the last frame + ret = _decode_end(FALSE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_decode_end failed.\n"); + LOG_V("End\n"); + return ret; + } + + // start decoding a new frame + ret = _decode_begin(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_decode_begin failed.\n"); + LOG_V("End\n"); + return ret; + } + } else { + // parital frame + LOG_V("partial frame handling...\n"); + ret = _decode_continue(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_h264_decode_continue failed.\n"); + LOG_V("End\n"); + return ret; + } + } + + LOG_V("End\n"); + return ret; } @@ -1117,277 +1118,277 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( #define HACK_DPB #ifdef HACK_DPB static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, - vbp_picture_data_h264* pic_data - ) + vbp_picture_data_h264* pic_data + ) { - gboolean found = FALSE; - guint tflags = 0; - VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; - VAPictureH264 *pRefList = NULL; - uint32 i = 0, j = 0, k = 0, list = 0; - - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - //Set the surface ID for everything in the parser DPB to INVALID - for (i = 0; i < 16; i++) - { - pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE; - pic_params->ReferenceFrames[i].frame_idx = -1; - pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; - pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; - pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags - } - - pic_params->num_ref_frames = 0; - - for (i = 0; i < pic_data->num_slices; i++) - { - - //Copy from the List0 and List1 surface IDs - pRefList = pic_data->slc_data[i].slc_parms.RefPicList0; - for (list = 0; list < 2; list++) - { - for (j = 0; j < 32; j++) - { - if (pRefList[j].flags & VA_PICTURE_H264_INVALID) - { - break; //no more valid reference frames in this list - } - found = FALSE; - for (k = 0; k < pic_params->num_ref_frames; k++) - { - if (pic_params->ReferenceFrames[k].TopFieldOrderCnt == pRefList[j].TopFieldOrderCnt) - { - ///check for complementary field - tflags = pic_params->ReferenceFrames[k].flags | pRefList[j].flags; - //If both TOP and BOTTOM are set, we'll clear those flags - if ((tflags & VA_PICTURE_H264_TOP_FIELD) && - (tflags & VA_PICTURE_H264_TOP_FIELD)) - pic_params->ReferenceFrames[k].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; - found = TRUE; //already in the DPB; will not add this one - break; - } - } - if (!found) - { - guint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); - gpointer video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc); + bool found = FALSE; + uint tflags = 0; + VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; + VAPictureH264 *pRefList = NULL; + uint32 i = 0, j = 0, k = 0, list = 0; + + MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); + + //Set the surface ID for everything in the parser DPB to INVALID + for (i = 0; i < 16; i++) + { + pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE; + pic_params->ReferenceFrames[i].frame_idx = -1; + pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; + pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags + } + + pic_params->num_ref_frames = 0; + + for (i = 0; i < pic_data->num_slices; i++) + { + + //Copy from the List0 and List1 surface IDs + pRefList = pic_data->slc_data[i].slc_parms.RefPicList0; + for (list = 0; list < 2; list++) + { + for (j = 0; j < 32; j++) + { + if (pRefList[j].flags & VA_PICTURE_H264_INVALID) + { + break; //no more valid reference frames in this list + } + found = FALSE; + for (k = 0; k < pic_params->num_ref_frames; k++) + { + if (pic_params->ReferenceFrames[k].TopFieldOrderCnt == pRefList[j].TopFieldOrderCnt) + { + ///check for complementary field + tflags = pic_params->ReferenceFrames[k].flags | pRefList[j].flags; + //If both TOP and BOTTOM are set, we'll clear those flags + if ((tflags & VA_PICTURE_H264_TOP_FIELD) && + (tflags & VA_PICTURE_H264_TOP_FIELD)) + pic_params->ReferenceFrames[k].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + found = TRUE; //already in the DPB; will not add this one + break; + } + } + if (!found) + { + uint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); + void* video_frame = j_hash_table_lookup(self->dpb_surface_table, (void*)poc); #ifdef DECODER_ROBUSTNESS - if (!video_frame) - { - if (!self->last_decoded_frame) - { - //No saved reference frame, can't recover this one - return MIX_RESULT_DROPFRAME; - } - - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)self->last_decoded_frame)->frame_id; - LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); - - } - else - { - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } + if (!video_frame) + { + if (!self->last_decoded_frame) + { + //No saved reference frame, can't recover this one + return MIX_RESULT_DROPFRAME; + } + + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)self->last_decoded_frame)->frame_id; + LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + + } + else + { + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; + } #else - if (!video_frame) return MIX_RESULT_DROPFRAME; //return non-fatal error + if (!video_frame) return MIX_RESULT_DROPFRAME; //return non-fatal error - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; + pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = + ((MixVideoFrame *)video_frame)->frame_id; #endif - LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); + LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); - pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = - pRefList[j].flags; - pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = - pRefList[j].frame_idx; - pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = - pRefList[j].TopFieldOrderCnt; - pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = - pRefList[j].BottomFieldOrderCnt; - } + pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = + pRefList[j].flags; + pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = + pRefList[j].frame_idx; + pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = + pRefList[j].TopFieldOrderCnt; + pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = + pRefList[j].BottomFieldOrderCnt; + } - } - pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; - } + } + pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; + } - } - return MIX_RESULT_SUCCESS; + } + return MIX_RESULT_SUCCESS; } #endif MIX_RESULT MixVideoFormat_H264::_handle_ref_frames( - VAPictureParameterBufferH264* pic_params, - MixVideoFrame * current_frame) { + VAPictureParameterBufferH264* pic_params, + MixVideoFrame * current_frame) { + + uint poc = 0; + LOG_V( "Begin\n"); - guint poc = 0; - LOG_V( "Begin\n"); + if (current_frame == NULL || pic_params == NULL) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } - if (current_frame == NULL || pic_params == NULL) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", - pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, - pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); + LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", + pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, + pic_params->CurrPic.BottomFieldOrderCnt, (int) current_frame->frame_id); #ifdef MIX_LOG_ENABLE - if (pic_params->CurrPic.flags & VA_PICTURE_H264_INVALID) - LOG_V( "Flags show VA_PICTURE_H264_INVALID\n"); + if (pic_params->CurrPic.flags & VA_PICTURE_H264_INVALID) + LOG_V( "Flags show VA_PICTURE_H264_INVALID\n"); - if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) - LOG_V( "Flags show VA_PICTURE_H264_TOP_FIELD\n"); + if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) + LOG_V( "Flags show VA_PICTURE_H264_TOP_FIELD\n"); - if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) - LOG_V( "Flags show VA_PICTURE_H264_BOTTOM_FIELD\n"); + if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) + LOG_V( "Flags show VA_PICTURE_H264_BOTTOM_FIELD\n"); - if (pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) - LOG_V( "Flags show VA_PICTURE_H264_SHORT_TERM_REFERENCE\n"); + if (pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) + LOG_V( "Flags show VA_PICTURE_H264_SHORT_TERM_REFERENCE\n"); - if (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE) - LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n"); + if (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE) + LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n"); #endif - //First we need to check the parser DBP against our DPB table - //So for each item in our DBP table, we look to see if it is in the parser DPB - //If it is not, it gets unrefed and removed + //First we need to check the parser DBP against our DPB table + //So for each item in our DBP table, we look to see if it is in the parser DPB + //If it is not, it gets unrefed and removed #ifdef MIX_LOG_ENABLE - guint num_removed = + uint num_removed = #endif - g_hash_table_foreach_remove(this->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params); - - LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); - - - MixVideoFrame *mvf = NULL; - gboolean found = FALSE; - //Set the surface ID for everything in the parser DPB - int i = 0; - for (; i < 16; i++) { - if (!(pic_params->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID)) { - poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i])); - LOG_V( "Looking up poc %d in dpb table\n", poc); - found = g_hash_table_lookup_extended(this->dpb_surface_table, - (gpointer)poc, NULL, (gpointer*)&mvf); - if (found) { - pic_params->ReferenceFrames[i].picture_id = mvf->frame_id; - LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id); - } else { - LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); - } - LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", - poc, i, (gint)pic_params->ReferenceFrames[i].picture_id); - } - } - //Set picture_id for current picture - pic_params->CurrPic.picture_id = current_frame->frame_id; - - //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || - (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { - //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); - //Increment the reference count for this frame - mix_videoframe_ref(current_frame); - LOG_V( "Inserting poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); - //Add this frame to the DPB surface table - g_hash_table_insert(this->dpb_surface_table, (gpointer)poc, current_frame); - } - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + j_hash_table_foreach_remove(this->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params); + + LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); + + + MixVideoFrame *mvf = NULL; + //bool found = FALSE; + //Set the surface ID for everything in the parser DPB + int i = 0; + for (; i < 16; i++) { + if (!(pic_params->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID)) { + poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i])); + LOG_V( "Looking up poc %d in dpb table\n", poc); + //found = j_hash_table_lookup_extended(this->dpb_surface_table, (void*)poc, NULL, (void**)&mvf); + mvf = (MixVideoFrame*)j_hash_table_lookup( this->dpb_surface_table, (void *)poc); + if (NULL != mvf) { + pic_params->ReferenceFrames[i].picture_id = mvf->frame_id; + LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (int)mvf->frame_id); + } else { + LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); + } + LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", + poc, i, (int)pic_params->ReferenceFrames[i].picture_id); + } + } + //Set picture_id for current picture + pic_params->CurrPic.picture_id = current_frame->frame_id; + + //Check to see if current frame is a reference frame + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + //Get current frame's POC + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + //Increment the reference count for this frame + mix_videoframe_ref(current_frame); + LOG_V( "Inserting poc %d, surfaceID %d\n", poc, (int)current_frame->frame_id); + //Add this frame to the DPB surface table + j_hash_table_insert(this->dpb_surface_table, (void*)poc, current_frame); + } + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat_H264::_cleanup_ref_frame( - VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame) { - - guint poc = 0; - LOG_V( "Begin\n"); - - if (current_frame == NULL || pic_params == NULL) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", - pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, - pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id); - - //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || - (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { - //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); - //We don't need to decrement the ref count for the video frame here; it's done elsewhere - LOG_V( "Removing poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id); - //Remove this frame from the DPB surface table - g_hash_table_remove(this->dpb_surface_table, (gpointer)poc); - } - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame) { + + uint poc = 0; + LOG_V( "Begin\n"); + + if (current_frame == NULL || pic_params == NULL) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", + pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, + pic_params->CurrPic.BottomFieldOrderCnt, (int) current_frame->frame_id); + + //Check to see if current frame is a reference frame + if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + //Get current frame's POC + poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); + //We don't need to decrement the ref count for the video frame here; it's done elsewhere + LOG_V( "Removing poc %d, surfaceID %d\n", poc, (int)current_frame->frame_id); + //Remove this frame from the DPB surface table + j_hash_table_remove(this->dpb_surface_table, (void*)poc); + } + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; } -guint mix_videofmt_h264_get_poc(VAPictureH264 *pic) { - if (pic == NULL) - return 0; +uint mix_videofmt_h264_get_poc(VAPictureH264 *pic) { + if (pic == NULL) + return 0; - if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) - return pic->BottomFieldOrderCnt; + if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) + return pic->BottomFieldOrderCnt; - if (pic->flags & VA_PICTURE_H264_TOP_FIELD) - return pic->TopFieldOrderCnt; + if (pic->flags & VA_PICTURE_H264_TOP_FIELD) + return pic->TopFieldOrderCnt; - return pic->TopFieldOrderCnt; + return pic->TopFieldOrderCnt; } -gboolean mix_videofmt_h264_check_in_DPB( - gpointer key, gpointer value, gpointer user_data) { - gboolean ret = TRUE; - if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key - return FALSE; - - VAPictureH264* vaPic = NULL; - int i = 0; - for (; i < 16; i++) - { - vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]); - if (vaPic->flags & VA_PICTURE_H264_INVALID) - continue; - - if ((guint)key == vaPic->TopFieldOrderCnt || - (guint)key == vaPic->BottomFieldOrderCnt) - { - ret = FALSE; - break; - } - } - return ret; +int mix_videofmt_h264_check_in_DPB( + void* key, void* value, void* user_data) { + int ret = (!0); + if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key + return 0; + + VAPictureH264* vaPic = NULL; + int i = 0; + for (; i < 16; i++) + { + vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]); + if (vaPic->flags & VA_PICTURE_H264_INVALID) + continue; + + if ((uint)key == vaPic->TopFieldOrderCnt || + (uint)key == vaPic->BottomFieldOrderCnt) + { + ret = 0; + break; + } + } + return ret; } -void mix_videofmt_h264_destroy_DPB_key(gpointer data) +void mix_videofmt_h264_destroy_DPB_key(void* data) { - //TODO remove this method and don't register it with the hash table foreach call; it is no longer needed - LOG_V( "Begin, poc of %d\n", (guint)data); - LOG_V( "End\n"); - return; + //TODO remove this method and don't register it with the hash table foreach call; it is no longer needed + LOG_V( "Begin, poc of %d\n", (uint)data); + LOG_V( "End\n"); + return; } -void mix_videofmt_h264_destroy_DPB_value(gpointer data) +void mix_videofmt_h264_destroy_DPB_value(void* data) { - LOG_V( "Begin\n"); - if (data != NULL) { - mix_videoframe_unref((MixVideoFrame *)data); - } - LOG_V( "End\n"); - return; + LOG_V( "Begin\n"); + if (data != NULL) { + mix_videoframe_unref((MixVideoFrame *)data); + } + LOG_V( "End\n"); + return; } diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h index e2ea007..ce16281 100644 --- a/mix_video/src/mixvideoformat_h264.h +++ b/mix_video/src/mixvideoformat_h264.h @@ -11,6 +11,7 @@ #include "mixvideoformat.h" #include "mixvideoframe_private.h" +#include #define DECODER_ROBUSTNESS @@ -23,52 +24,53 @@ + class MixVideoFormat_H264 : public MixVideoFormat { public: - MixVideoFormat_H264(); - virtual ~MixVideoFormat_H264(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); + MixVideoFormat_H264(); + virtual ~MixVideoFormat_H264(); + + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); private: - // Local Help Func - MIX_RESULT _update_config_params(vbp_data_h264 *data); - MIX_RESULT _initialize_va(vbp_data_h264 *data); - MIX_RESULT _decode_a_buffer(MixBuffer * bufin, guint64 ts, - gboolean discontinuity, MixVideoDecodeParams * decode_params); - MIX_RESULT _decode_end(gboolean drop_picture); - MIX_RESULT _handle_new_sequence(vbp_data_h264 *data); - MIX_RESULT _decode_begin(vbp_data_h264 *data); - MIX_RESULT _decode_continue(vbp_data_h264 *data); - MIX_RESULT _set_frame_type(vbp_data_h264 *data); - MIX_RESULT _set_frame_structure(vbp_data_h264 *data); - MIX_RESULT _update_ref_pic_list(VAPictureParameterBufferH264* picture_params, - VASliceParameterBufferH264* slice_params); - MIX_RESULT _decode_a_slice(vbp_data_h264 *data, - int picture_index, int slice_index); - MIX_RESULT _cleanup_ref_frame( - VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame); - MIX_RESULT _handle_ref_frames( - VAPictureParameterBufferH264* pic_params, - MixVideoFrame * current_frame); + // Local Help Func + MIX_RESULT _update_config_params(vbp_data_h264 *data); + MIX_RESULT _initialize_va(vbp_data_h264 *data); + MIX_RESULT _decode_a_buffer(MixBuffer * bufin, uint64 ts, + bool discontinuity, MixVideoDecodeParams * decode_params); + MIX_RESULT _decode_end(bool drop_picture); + MIX_RESULT _handle_new_sequence(vbp_data_h264 *data); + MIX_RESULT _decode_begin(vbp_data_h264 *data); + MIX_RESULT _decode_continue(vbp_data_h264 *data); + MIX_RESULT _set_frame_type(vbp_data_h264 *data); + MIX_RESULT _set_frame_structure(vbp_data_h264 *data); + MIX_RESULT _update_ref_pic_list(VAPictureParameterBufferH264* picture_params, + VASliceParameterBufferH264* slice_params); + MIX_RESULT _decode_a_slice(vbp_data_h264 *data, + int picture_index, int slice_index); + MIX_RESULT _cleanup_ref_frame( + VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame); + MIX_RESULT _handle_ref_frames( + VAPictureParameterBufferH264* pic_params, + MixVideoFrame * current_frame); public: - /*< public > */ - /*< private > */ - GHashTable *dpb_surface_table; + /*< public > */ + /*< private > */ + JHashTable *dpb_surface_table; #ifdef DECODER_ROBUSTNESS - //Can improve which frame is used for this at a later time - MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing + //Can improve which frame is used for this at a later time + MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing #endif }; @@ -100,9 +102,9 @@ MixVideoFormat_H264* mix_videoformat_h264_unref(MixVideoFormat_H264 *mix); /* Helper functions to manage the DPB table */ -gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer user_data); -void mix_videofmt_h264_destroy_DPB_key(gpointer data); -void mix_videofmt_h264_destroy_DPB_value(gpointer data); -guint mix_videofmt_h264_get_poc(VAPictureH264 *pic); +int mix_videofmt_h264_check_in_DPB(void* key, void* value, void* user_data); +void mix_videofmt_h264_destroy_DPB_key(void* data); +void mix_videofmt_h264_destroy_DPB_value(void* data); +uint mix_videofmt_h264_get_poc(VAPictureH264 *pic); #endif /* __MIX_VIDEOFORMAT_H264_H__ */ diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp index 29d460e..8264a38 100644 --- a/mix_video/src/mixvideoformat_mp42.cpp +++ b/mix_video/src/mixvideoformat_mp42.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include #include "mixvideolog.h" #include "mixvideoformat_mp42.h" @@ -13,1052 +13,1052 @@ // Value of VOP type defined here follows MP4 spec, and has the same value of corresponding frame type // defined in enumeration MixFrameType (except sprite (S)) enum { - MP4_VOP_TYPE_I = 0, - MP4_VOP_TYPE_P = 1, - MP4_VOP_TYPE_B = 2, - MP4_VOP_TYPE_S = 3, + MP4_VOP_TYPE_I = 0, + MP4_VOP_TYPE_P = 1, + MP4_VOP_TYPE_B = 2, + MP4_VOP_TYPE_S = 3, }; MixVideoFormat_MP42::MixVideoFormat_MP42() - :last_frame(NULL) - ,last_vop_coding_type(-1) - ,last_vop_time_increment(0) - ,next_nvop_for_PB_frame(FALSE) - ,iq_matrix_buf_sent(FALSE) { - this->reference_frames[0] = NULL; - this->reference_frames[1] = NULL; + :last_frame(NULL) + ,last_vop_coding_type(-1) + ,last_vop_time_increment(0) + ,next_nvop_for_PB_frame(FALSE) + ,iq_matrix_buf_sent(FALSE) { + this->reference_frames[0] = NULL; + this->reference_frames[1] = NULL; } MixVideoFormat_MP42::~MixVideoFormat_MP42() { - /* clean up here. */ - gint32 vbp_ret = VBP_OK; - LOG_V("Begin\n"); - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - - Lock(); - - /* unref reference frames */ - for (uint32 idx = 0; idx < 2; idx++) { - if (this->reference_frames[idx] != NULL) { - mix_videoframe_unref(this->reference_frames[idx]); - this->reference_frames[idx] = NULL; - } - } - if (this->last_frame) { - mix_videoframe_unref(this->last_frame); - this->last_frame = NULL; - } - this->next_nvop_for_PB_frame = FALSE; - this->iq_matrix_buf_sent = FALSE; - - /* Reset state */ - this->initialized = TRUE; - this->end_picture_pending = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (guint64)-1; - - /* Close the parser */ - if (this->parser_handle) { - vbp_ret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - } - - Unlock(); - LOG_V("End\n"); + /* clean up here. */ + int32 vbp_ret = VBP_OK; + LOG_V("Begin\n"); + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + + Lock(); + + /* unref reference frames */ + for (uint32 idx = 0; idx < 2; idx++) { + if (this->reference_frames[idx] != NULL) { + mix_videoframe_unref(this->reference_frames[idx]); + this->reference_frames[idx] = NULL; + } + } + if (this->last_frame) { + mix_videoframe_unref(this->last_frame); + this->last_frame = NULL; + } + this->next_nvop_for_PB_frame = FALSE; + this->iq_matrix_buf_sent = FALSE; + + /* Reset state */ + this->initialized = TRUE; + this->end_picture_pending = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (uint64)-1; + + /* Close the parser */ + if (this->parser_handle) { + vbp_ret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + } + + Unlock(); + LOG_V("End\n"); } MixVideoFormat_MP42 *mix_videoformat_mp42_new(void) { - return new MixVideoFormat_MP42; + return new MixVideoFormat_MP42; } MixVideoFormat_MP42 * mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } -MixVideoFormat_MP42 *mix_videoformat_mp42_unref(MixVideoFormat_MP42 * mix){ - if (NULL != mix) - return MIX_VIDEOFORMAT_MP42(mix->Unref()); - else - return mix; +MixVideoFormat_MP42 *mix_videoformat_mp42_unref(MixVideoFormat_MP42 * mix) { + if (NULL != mix) + return MIX_VIDEOFORMAT_MP42(mix->Unref()); + else + return mix; } MIX_RESULT MixVideoFormat_MP42::_update_config_params( - vbp_data_mp42 *data) { - if (this->picture_width == 0 || - this->picture_height == 0 || - this->picture_width < data->codec_data.video_object_layer_width || - this->picture_height < data->codec_data.video_object_layer_height) { - this->picture_width = data->codec_data.video_object_layer_width; - this->picture_height = data->codec_data.video_object_layer_height; - mix_videoconfigparamsdec_set_picture_res( - this->config_params, this->picture_width, this->picture_height); - } - // video_range has default value of 0. Y ranges from 16 to 235. - mix_videoconfigparamsdec_set_video_range(this->config_params, data->codec_data.video_range); - uint8 color_matrix; - switch (data->codec_data.matrix_coefficients) { - case 1: - color_matrix = VA_SRC_BT709; - break; - // ITU-R Recommendation BT.470-6 System B, G (MP4), same as - // SMPTE 170M/BT601 - case 5: - case 6: - color_matrix = VA_SRC_BT601; - break; - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); - - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - this->config_params, data->codec_data.par_width, data->codec_data.par_height); - return MIX_RESULT_SUCCESS; + vbp_data_mp42 *data) { + if (this->picture_width == 0 || + this->picture_height == 0 || + this->picture_width < data->codec_data.video_object_layer_width || + this->picture_height < data->codec_data.video_object_layer_height) { + this->picture_width = data->codec_data.video_object_layer_width; + this->picture_height = data->codec_data.video_object_layer_height; + mix_videoconfigparamsdec_set_picture_res( + this->config_params, this->picture_width, this->picture_height); + } + // video_range has default value of 0. Y ranges from 16 to 235. + mix_videoconfigparamsdec_set_video_range(this->config_params, data->codec_data.video_range); + uint8 color_matrix; + switch (data->codec_data.matrix_coefficients) { + case 1: + color_matrix = VA_SRC_BT709; + break; + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + color_matrix = VA_SRC_BT601; + break; + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); + + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + this->config_params, data->codec_data.par_width, data->codec_data.par_height); + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat_MP42::_initialize_va(vbp_data_mp42 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VAConfigAttrib attrib; - VAProfile va_profile; - LOG_V( "Begin\n"); - if (this->va_initialized) { - LOG_W("va already initialized.\n"); - return MIX_RESULT_SUCCESS; - } - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - attrib.value = VA_RT_FORMAT_YUV420; - - //Initialize and save the VA config ID - if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) { - va_profile = VAProfileMPEG4AdvancedSimple; - } else { - va_profile = VAProfileMPEG4Simple; - } - vret = vaCreateConfig( - this->va_display, - va_profile, - VAEntrypointVLD, - &attrib, - 1, - &(this->va_config)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("vaCreateConfig failed\n"); - goto CLEAN_UP; - } - - // add 1 more surface for packed frame (PB frame), and another one - // for partial frame handling - this->va_num_surfaces = this->extra_surfaces + 4 + 1 + 1; - //if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) - // parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; - - this->va_surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); - if (this->va_surfaces == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E( "parent->va_surfaces == NULL. \n"); - goto CLEAN_UP; - } - - vret = vaCreateSurfaces( - this->va_display, - this->picture_width, - this->picture_height, - VA_RT_FORMAT_YUV420, - this->va_num_surfaces, - this->va_surfaces); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto CLEAN_UP; - } - - LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); - - //Initialize the surface pool - ret = mix_surfacepool_initialize( - this->surfacepool, - this->va_surfaces, - this->va_num_surfaces, - this->va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init surface pool\n"); - goto CLEAN_UP; - break; - } - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext( - this->va_display, - this->va_config, - this->picture_width, - this->picture_height, - 0, - this->va_surfaces, - this->va_num_surfaces, - &(this->va_context)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto CLEAN_UP; - } - this->va_initialized = TRUE; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VAConfigAttrib attrib; + VAProfile va_profile; + LOG_V( "Begin\n"); + if (this->va_initialized) { + LOG_W("va already initialized.\n"); + return MIX_RESULT_SUCCESS; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + //Initialize and save the VA config ID + if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) { + va_profile = VAProfileMPEG4AdvancedSimple; + } else { + va_profile = VAProfileMPEG4Simple; + } + vret = vaCreateConfig( + this->va_display, + va_profile, + VAEntrypointVLD, + &attrib, + 1, + &(this->va_config)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("vaCreateConfig failed\n"); + goto CLEAN_UP; + } + + // add 1 more surface for packed frame (PB frame), and another one + // for partial frame handling + this->va_num_surfaces = this->extra_surfaces + 4 + 1 + 1; + //if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) + // parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; + + this->va_surfaces = reinterpret_cast(malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); + if (this->va_surfaces == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E( "parent->va_surfaces == NULL. \n"); + goto CLEAN_UP; + } + + vret = vaCreateSurfaces( + this->va_display, + this->picture_width, + this->picture_height, + VA_RT_FORMAT_YUV420, + this->va_num_surfaces, + this->va_surfaces); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto CLEAN_UP; + } + + LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); + + //Initialize the surface pool + ret = mix_surfacepool_initialize( + this->surfacepool, + this->va_surfaces, + this->va_num_surfaces, + this->va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init surface pool\n"); + goto CLEAN_UP; + break; + } + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext( + this->va_display, + this->va_config, + this->picture_width, + this->picture_height, + 0, + this->va_surfaces, + this->va_num_surfaces, + &(this->va_context)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto CLEAN_UP; + } + this->va_initialized = TRUE; CLEAN_UP: - return ret; + return ret; } MIX_RESULT MixVideoFormat_MP42::_decode_a_slice( - vbp_data_mp42* data, vbp_picture_data_mp42* pic_data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - guint buffer_id_cnt = 0; - gint frame_type = -1; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID buffer_ids[4]; - VAPictureParameterBufferMPEG4* pic_params = &(pic_data->picture_param); - vbp_slice_data_mp42* slice_data = &(pic_data->slice_data); - VASliceParameterBufferMPEG4* slice_params = &(slice_data->slice_param); - - LOG_V( "Begin\n"); - - vadisplay = this->va_display; - vacontext = this->va_context; - - if (!this->end_picture_pending) { - LOG_E("picture decoder is not started!\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - // update reference pictures - frame_type = pic_params->vop_fields.bits.vop_coding_type; - - switch (frame_type) { - case MP4_VOP_TYPE_I: - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - break; - - case MP4_VOP_TYPE_P: - pic_params-> forward_reference_picture - = this->reference_frames[0]->frame_id; - pic_params-> backward_reference_picture = VA_INVALID_SURFACE; - break; - - case MP4_VOP_TYPE_B: - pic_params->vop_fields.bits.backward_reference_vop_coding_type - = this->last_vop_coding_type; - pic_params->forward_reference_picture - = this->reference_frames[1]->frame_id; - pic_params->backward_reference_picture - = this->reference_frames[0]->frame_id; - break; - - case MP4_VOP_TYPE_S: - pic_params-> forward_reference_picture - = this->reference_frames[0]->frame_id; - pic_params-> backward_reference_picture = VA_INVALID_SURFACE; - break; - - default: - LOG_W("default, Will never reach here\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - break; - } - - //Now for slices - - LOG_V( "Creating libva picture parameter buffer\n"); - - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferMPEG4), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - buffer_id_cnt++; - - if (pic_params->vol_fields.bits.quant_type && - this->iq_matrix_buf_sent == FALSE) { - LOG_V( "Creating libva IQMatrix buffer\n"); - // only send IQ matrix for the first slice in the picture - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferMPEG4), - 1, - &(data->iq_matrix_buffer), - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - this->iq_matrix_buf_sent = TRUE; - buffer_id_cnt++; - } - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferMPEG4), - 1, - slice_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - buffer_id_cnt++; - - - //Do slice data - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferMP42 - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - slice_data->slice_size, //size - 1, //num_elements - slice_data->buffer_addr + slice_data->slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto CLEAN_UP; - } + vbp_data_mp42* data, vbp_picture_data_mp42* pic_data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + uint buffer_id_cnt = 0; + int frame_type = -1; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID buffer_ids[4]; + VAPictureParameterBufferMPEG4* pic_params = &(pic_data->picture_param); + vbp_slice_data_mp42* slice_data = &(pic_data->slice_data); + VASliceParameterBufferMPEG4* slice_params = &(slice_data->slice_param); + + LOG_V( "Begin\n"); + + vadisplay = this->va_display; + vacontext = this->va_context; + + if (!this->end_picture_pending) { + LOG_E("picture decoder is not started!\n"); + ret = MIX_RESULT_FAIL; + goto CLEAN_UP; + } + + // update reference pictures + frame_type = pic_params->vop_fields.bits.vop_coding_type; + + switch (frame_type) { + case MP4_VOP_TYPE_I: + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + break; + + case MP4_VOP_TYPE_P: + pic_params-> forward_reference_picture + = this->reference_frames[0]->frame_id; + pic_params-> backward_reference_picture = VA_INVALID_SURFACE; + break; + + case MP4_VOP_TYPE_B: + pic_params->vop_fields.bits.backward_reference_vop_coding_type + = this->last_vop_coding_type; + pic_params->forward_reference_picture + = this->reference_frames[1]->frame_id; + pic_params->backward_reference_picture + = this->reference_frames[0]->frame_id; + break; + + case MP4_VOP_TYPE_S: + pic_params-> forward_reference_picture + = this->reference_frames[0]->frame_id; + pic_params-> backward_reference_picture = VA_INVALID_SURFACE; + break; + + default: + LOG_W("default, Will never reach here\n"); + ret = MIX_RESULT_FAIL; + goto CLEAN_UP; + break; + } + + //Now for slices + + LOG_V( "Creating libva picture parameter buffer\n"); + + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferMPEG4), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + buffer_id_cnt++; + + if (pic_params->vol_fields.bits.quant_type && + this->iq_matrix_buf_sent == FALSE) { + LOG_V( "Creating libva IQMatrix buffer\n"); + // only send IQ matrix for the first slice in the picture + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferMPEG4), + 1, + &(data->iq_matrix_buffer), + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + this->iq_matrix_buf_sent = TRUE; + buffer_id_cnt++; + } + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferMPEG4), + 1, + slice_params, + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + buffer_id_cnt++; + + + //Do slice data + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferMP42 + + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + slice_data->slice_size, //size + 1, //num_elements + slice_data->buffer_addr + slice_data->slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto CLEAN_UP; + } CLEAN_UP: - LOG_V( "End\n"); - return ret; + LOG_V( "End\n"); + return ret; } -MIX_RESULT MixVideoFormat_MP42::_decode_end(gboolean drop_picture) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; +MIX_RESULT MixVideoFormat_MP42::_decode_end(bool drop_picture) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; - if (!this->end_picture_pending) { - if (this->video_frame) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame is not unreferenced.\n"); - } - goto CLEAN_UP; - } + if (!this->end_picture_pending) { + if (this->video_frame) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame is not unreferenced.\n"); + } + goto CLEAN_UP; + } - if (this->video_frame == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame has been unreferenced.\n"); - goto CLEAN_UP; - } + if (this->video_frame == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E("Unexpected: video_frame has been unreferenced.\n"); + goto CLEAN_UP; + } - vret = vaEndPicture(this->va_display, this->va_context); + vret = vaEndPicture(this->va_display, this->va_context); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto CLEAN_UP; - } + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto CLEAN_UP; + } #if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - LOG_V( "Calling vaSyncSurface\n"); + LOG_V( "Calling vaSyncSurface\n"); - //Decode the picture - vret = vaSyncSurface(vadisplay, surface); + //Decode the picture + vret = vaSyncSurface(vadisplay, surface); - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - CLEAN_UP; - } + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + CLEAN_UP; + } #endif - if (drop_picture) { - // we are asked to drop this decoded picture - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - goto CLEAN_UP; - } - - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error enqueuing frame object\n"); - goto CLEAN_UP; - } else { - // video frame is passed to frame manager - this->video_frame = NULL; - } + if (drop_picture) { + // we are asked to drop this decoded picture + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + goto CLEAN_UP; + } + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error enqueuing frame object\n"); + goto CLEAN_UP; + } else { + // video frame is passed to frame manager + this->video_frame = NULL; + } CLEAN_UP: - if (this->video_frame) { - /* this always indicates an error */ - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - } - this->end_picture_pending = FALSE; - return ret; + if (this->video_frame) { + /* this always indicates an error */ + mix_videoframe_unref(this->video_frame); + this->video_frame = NULL; + } + this->end_picture_pending = FALSE; + return ret; } MIX_RESULT MixVideoFormat_MP42::_decode_continue(vbp_data_mp42 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - uint32 i; - gint frame_type = -1; - vbp_picture_data_mp42* pic_data = NULL; - VAPictureParameterBufferMPEG4* pic_params = NULL; - /* - Packed Frame Assumption: - - 1. In one packed frame, there's only one P or I frame and only one B frame. - 2. In packed frame, there's no skipped frame (vop_coded = 0) - 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately). - 4. N-VOP frame is the frame with vop_coded = 0. - 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame - - - I, P, {P, B}, B, N, P, N, I, ... - I, P, {P, B}, N, P, N, I, ... - - The first N is placeholder for P frame in the packed frame - The second N is a skipped frame - */ - - pic_data = data->picture_data; - for (i = 0; i < data->number_picture_data; i++, pic_data = pic_data->next_picture_data) { - pic_params = &(pic_data->picture_param); - frame_type = pic_params->vop_fields.bits.vop_coding_type; - if (frame_type == MP4_VOP_TYPE_S && - pic_params->no_of_sprite_warping_points > 1) { - // hardware only support up to one warping point (stationary or translation) - LOG_E("sprite with %d warping points is not supported by HW.\n", - pic_params->no_of_sprite_warping_points); - return MIX_RESULT_DROPFRAME; - } - - if (pic_data->vop_coded == 0) { - // this should never happen - LOG_E("VOP is not coded.\n"); - return MIX_RESULT_DROPFRAME; - } - - if (pic_data->new_picture_flag == 1 || - this->end_picture_pending == FALSE) { - if (pic_data->new_picture_flag == 0) { - LOG_W("First slice of picture is lost!\n"); - } - - gulong surface = 0; - if (this->end_picture_pending) - { - // this indicates the start of a new frame in the packed frame - LOG_V("packed frame is found.\n"); - - // Update timestamp for packed frame as timestamp is for the B frame! - if (this->video_frame && pic_params->vop_time_increment_resolution) { - guint64 ts, ts_inc; - mix_videoframe_get_timestamp(this->video_frame, &ts); - ts_inc= this->last_vop_time_increment - pic_data->vop_time_increment + - pic_params->vop_time_increment_resolution; - ts_inc = ts_inc % pic_params->vop_time_increment_resolution; - LOG_V("timestamp is incremented by %d at %d resolution.\n", - ts_inc, pic_params->vop_time_increment_resolution); - // convert to nano-second - ts_inc = ts_inc * 1e9 / pic_params->vop_time_increment_resolution; - LOG_V("timestamp of P frame in packed frame is updated from %"G_GINT64_FORMAT" to %"G_GUINT64_FORMAT".\n", - ts, ts + ts_inc); - ts += ts_inc; - mix_videoframe_set_timestamp(this->video_frame, ts); - } - - _decode_end(FALSE); - this->next_nvop_for_PB_frame = TRUE; - } - - if (this->next_nvop_for_PB_frame == TRUE && - frame_type != MP4_VOP_TYPE_B) { - LOG_E("The second frame in the packed frame is not B frame.\n"); - this->next_nvop_for_PB_frame = FALSE; - return MIX_RESULT_DROPFRAME; - } - - //Get a frame from the surface pool - ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame from surfacepool\n"); - return MIX_RESULT_FAIL; - } - - /* the following calls will always succeed */ - - // set frame type - if (frame_type == MP4_VOP_TYPE_S) { - // sprite is treated as P frame in the display order - mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)MP4_VOP_TYPE_P); - } else { - mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)frame_type); - } - - // set frame structure - if (pic_data->picture_param.vol_fields.bits.interlaced) { - // only MPEG-4 studio profile can have field coding. All other profiles - // use frame coding only, i.e, no field VOP. (see vop_structure in MP4 spec) - mix_videoframe_set_frame_structure( - this->video_frame, - VA_BOTTOM_FIELD | VA_TOP_FIELD); - LOG_W("Interlaced content, set frame structure to 3 (TOP | BOTTOM field) !\n"); - } else { - mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); - } - - //Set the discontinuity flag - mix_videoframe_set_discontinuity( - this->video_frame, - this->discontinuity_frame_in_progress); - - //Set the timestamp - mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(this->video_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting surface ID from frame object\n"); - goto CLEAN_UP; - } - - /* If I or P frame, update the reference array */ - if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { - LOG_V("Updating forward/backward references for libva\n"); - this->last_vop_coding_type = frame_type; - this->last_vop_time_increment = pic_data->vop_time_increment; - _handle_ref_frames((_picture_type)frame_type, this->video_frame); - if (this->last_frame != NULL) { - mix_videoframe_unref(this->last_frame); - } - this->last_frame = this->video_frame; - mix_videoframe_ref(this->last_frame); - } - - //Now we can begin the picture - vret = vaBeginPicture(this->va_display, this->va_context, surface); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto CLEAN_UP; - } - - // vaBeginPicture needs a matching vaEndPicture - this->end_picture_pending = TRUE; - this->iq_matrix_buf_sent = FALSE; - } - - - ret = _decode_a_slice(data, pic_data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "_decode_a_slice failed, error = %#X.", ret); - goto CLEAN_UP; - } - } + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + uint32 i; + int frame_type = -1; + vbp_picture_data_mp42* pic_data = NULL; + VAPictureParameterBufferMPEG4* pic_params = NULL; + /* + Packed Frame Assumption: + + 1. In one packed frame, there's only one P or I frame and only one B frame. + 2. In packed frame, there's no skipped frame (vop_coded = 0) + 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately). + 4. N-VOP frame is the frame with vop_coded = 0. + 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame + + + I, P, {P, B}, B, N, P, N, I, ... + I, P, {P, B}, N, P, N, I, ... + + The first N is placeholder for P frame in the packed frame + The second N is a skipped frame + */ + + pic_data = data->picture_data; + for (i = 0; i < data->number_picture_data; i++, pic_data = pic_data->next_picture_data) { + pic_params = &(pic_data->picture_param); + frame_type = pic_params->vop_fields.bits.vop_coding_type; + if (frame_type == MP4_VOP_TYPE_S && + pic_params->no_of_sprite_warping_points > 1) { + // hardware only support up to one warping point (stationary or translation) + LOG_E("sprite with %d warping points is not supported by HW.\n", + pic_params->no_of_sprite_warping_points); + return MIX_RESULT_DROPFRAME; + } + + if (pic_data->vop_coded == 0) { + // this should never happen + LOG_E("VOP is not coded.\n"); + return MIX_RESULT_DROPFRAME; + } + + if (pic_data->new_picture_flag == 1 || + this->end_picture_pending == FALSE) { + if (pic_data->new_picture_flag == 0) { + LOG_W("First slice of picture is lost!\n"); + } + + ulong surface = 0; + if (this->end_picture_pending) + { + // this indicates the start of a new frame in the packed frame + LOG_V("packed frame is found.\n"); + + // Update timestamp for packed frame as timestamp is for the B frame! + if (this->video_frame && pic_params->vop_time_increment_resolution) { + uint64 ts, ts_inc; + mix_videoframe_get_timestamp(this->video_frame, &ts); + ts_inc= this->last_vop_time_increment - pic_data->vop_time_increment + + pic_params->vop_time_increment_resolution; + ts_inc = ts_inc % pic_params->vop_time_increment_resolution; + LOG_V("timestamp is incremented by %"UINT64_FORMAT" at %d resolution.\n", + ts_inc, pic_params->vop_time_increment_resolution); + // convert to nano-second + ts_inc = ts_inc * 1e9 / pic_params->vop_time_increment_resolution; + LOG_V("timestamp of P frame in packed frame is updated from %"UINT64_FORMAT" to %"UINT64_FORMAT".\n", + ts, ts + ts_inc); + ts += ts_inc; + mix_videoframe_set_timestamp(this->video_frame, ts); + } + + _decode_end(FALSE); + this->next_nvop_for_PB_frame = TRUE; + } + + if (this->next_nvop_for_PB_frame == TRUE && + frame_type != MP4_VOP_TYPE_B) { + LOG_E("The second frame in the packed frame is not B frame.\n"); + this->next_nvop_for_PB_frame = FALSE; + return MIX_RESULT_DROPFRAME; + } + + //Get a frame from the surface pool + ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame from surfacepool\n"); + return MIX_RESULT_FAIL; + } + + /* the following calls will always succeed */ + + // set frame type + if (frame_type == MP4_VOP_TYPE_S) { + // sprite is treated as P frame in the display order + mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)MP4_VOP_TYPE_P); + } else { + mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)frame_type); + } + + // set frame structure + if (pic_data->picture_param.vol_fields.bits.interlaced) { + // only MPEG-4 studio profile can have field coding. All other profiles + // use frame coding only, i.e, no field VOP. (see vop_structure in MP4 spec) + mix_videoframe_set_frame_structure( + this->video_frame, + VA_BOTTOM_FIELD | VA_TOP_FIELD); + LOG_W("Interlaced content, set frame structure to 3 (TOP | BOTTOM field) !\n"); + } else { + mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); + } + + //Set the discontinuity flag + mix_videoframe_set_discontinuity( + this->video_frame, + this->discontinuity_frame_in_progress); + + //Set the timestamp + mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(this->video_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting surface ID from frame object\n"); + goto CLEAN_UP; + } + + /* If I or P frame, update the reference array */ + if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { + LOG_V("Updating forward/backward references for libva\n"); + this->last_vop_coding_type = frame_type; + this->last_vop_time_increment = pic_data->vop_time_increment; + _handle_ref_frames((_picture_type)frame_type, this->video_frame); + if (this->last_frame != NULL) { + mix_videoframe_unref(this->last_frame); + } + this->last_frame = this->video_frame; + mix_videoframe_ref(this->last_frame); + } + + //Now we can begin the picture + vret = vaBeginPicture(this->va_display, this->va_context, surface); + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto CLEAN_UP; + } + + // vaBeginPicture needs a matching vaEndPicture + this->end_picture_pending = TRUE; + this->iq_matrix_buf_sent = FALSE; + } + + + ret = _decode_a_slice(data, pic_data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "_decode_a_slice failed, error = %#X.", ret); + goto CLEAN_UP; + } + } CLEAN_UP: - return ret; + return ret; } MIX_RESULT MixVideoFormat_MP42::_decode_begin(vbp_data_mp42* data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - gint frame_type = -1; - VAPictureParameterBufferMPEG4* pic_params = NULL; - vbp_picture_data_mp42 *pic_data = NULL; - pic_data = data->picture_data; - pic_params = &(pic_data->picture_param); - frame_type = pic_params->vop_fields.bits.vop_coding_type; - - if (this->next_nvop_for_PB_frame) { - // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type - // of this frame must be B. - // for example: {PB} B N P B B P... - if (pic_data->vop_coded == 1 && frame_type != MP4_VOP_TYPE_B) { - LOG_E("Invalid coding type while waiting for n-vop for packed frame.\n"); - // timestamp of P frame in the queue is not correct - mix_framemanager_flush(this->framemgr); - this->next_nvop_for_PB_frame = FALSE; - } - } - - if (pic_data->vop_coded == 0) { - if (this->last_frame == NULL) { - LOG_E("The forward reference frame is NULL, couldn't reconstruct skipped frame.\n"); - mix_framemanager_flush(this->framemgr); - this->next_nvop_for_PB_frame = FALSE; - return MIX_RESULT_DROPFRAME; - } - - if (this->next_nvop_for_PB_frame) { - // P frame is already in queue, just need to update time stamp. - mix_videoframe_set_timestamp(this->last_frame, this->current_timestamp); - this->next_nvop_for_PB_frame = FALSE; - } else { - // handle skipped frame - MixVideoFrame *skip_frame = NULL; - gulong frame_id = VA_INVALID_SURFACE; - - skip_frame = mix_videoframe_new(); - ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); - ret = mix_videoframe_get_frame_id(this->last_frame, &frame_id); - ret = mix_videoframe_set_frame_id(skip_frame, frame_id); - ret = mix_videoframe_set_frame_type(skip_frame, (MixFrameType)MP4_VOP_TYPE_P); - ret = mix_videoframe_set_real_frame(skip_frame, this->last_frame); - // add a reference as skip_frame holds the last_frame. - mix_videoframe_ref(this->last_frame); - ret = mix_videoframe_set_timestamp(skip_frame, this->current_timestamp); - ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); - - LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", - (guint)skip_frame, (guint)frame_id, this->current_timestamp); - - /* Enqueue the skipped frame using frame manager */ - ret = mix_framemanager_enqueue(this->framemgr, skip_frame); - } - - if (data->number_picture_data > 1) { - LOG_E("Unexpected to have more picture data following a not-coded VOP.\n"); - //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for - // coded picture, a frame is lost. - } - return MIX_RESULT_SUCCESS; - } else { - /* - * Check for B frames after a seek - * We need to have both reference frames in hand before we can decode a B frame - * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME - */ - if (frame_type == MP4_VOP_TYPE_B) { - if (this->reference_frames[0] == NULL || - this->reference_frames[1] == NULL) { - LOG_W("Insufficient reference frames for B frame\n"); - return MIX_RESULT_DROPFRAME; - } - } else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S) { - if (this->reference_frames[0] == NULL) { - LOG_W("Reference frames for P/S frame is missing\n"); - return MIX_RESULT_DROPFRAME; - } - } - // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue - ret = _decode_continue(data); - } - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + int frame_type = -1; + VAPictureParameterBufferMPEG4* pic_params = NULL; + vbp_picture_data_mp42 *pic_data = NULL; + pic_data = data->picture_data; + pic_params = &(pic_data->picture_param); + frame_type = pic_params->vop_fields.bits.vop_coding_type; + + if (this->next_nvop_for_PB_frame) { + // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type + // of this frame must be B. + // for example: {PB} B N P B B P... + if (pic_data->vop_coded == 1 && frame_type != MP4_VOP_TYPE_B) { + LOG_E("Invalid coding type while waiting for n-vop for packed frame.\n"); + // timestamp of P frame in the queue is not correct + mix_framemanager_flush(this->framemgr); + this->next_nvop_for_PB_frame = FALSE; + } + } + + if (pic_data->vop_coded == 0) { + if (this->last_frame == NULL) { + LOG_E("The forward reference frame is NULL, couldn't reconstruct skipped frame.\n"); + mix_framemanager_flush(this->framemgr); + this->next_nvop_for_PB_frame = FALSE; + return MIX_RESULT_DROPFRAME; + } + + if (this->next_nvop_for_PB_frame) { + // P frame is already in queue, just need to update time stamp. + mix_videoframe_set_timestamp(this->last_frame, this->current_timestamp); + this->next_nvop_for_PB_frame = FALSE; + } else { + // handle skipped frame + MixVideoFrame *skip_frame = NULL; + ulong frame_id = VA_INVALID_SURFACE; + + skip_frame = mix_videoframe_new(); + ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); + ret = mix_videoframe_get_frame_id(this->last_frame, &frame_id); + ret = mix_videoframe_set_frame_id(skip_frame, frame_id); + ret = mix_videoframe_set_frame_type(skip_frame, (MixFrameType)MP4_VOP_TYPE_P); + ret = mix_videoframe_set_real_frame(skip_frame, this->last_frame); + // add a reference as skip_frame holds the last_frame. + mix_videoframe_ref(this->last_frame); + ret = mix_videoframe_set_timestamp(skip_frame, this->current_timestamp); + ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); + + LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"UINT64_FORMAT"\n", + (uint)skip_frame, (uint)frame_id, this->current_timestamp); + + /* Enqueue the skipped frame using frame manager */ + ret = mix_framemanager_enqueue(this->framemgr, skip_frame); + } + + if (data->number_picture_data > 1) { + LOG_E("Unexpected to have more picture data following a not-coded VOP.\n"); + //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for + // coded picture, a frame is lost. + } + return MIX_RESULT_SUCCESS; + } else { + /* + * Check for B frames after a seek + * We need to have both reference frames in hand before we can decode a B frame + * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME + */ + if (frame_type == MP4_VOP_TYPE_B) { + if (this->reference_frames[0] == NULL || + this->reference_frames[1] == NULL) { + LOG_W("Insufficient reference frames for B frame\n"); + return MIX_RESULT_DROPFRAME; + } + } else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S) { + if (this->reference_frames[0] == NULL) { + LOG_W("Reference frames for P/S frame is missing\n"); + return MIX_RESULT_DROPFRAME; + } + } + // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue + ret = _decode_continue(data); + } + return ret; } MIX_RESULT MixVideoFormat_MP42::_decode_a_buffer( - MixBuffer * bufin, guint64 ts, gboolean discontinuity) { - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_mp42 *data = NULL; - guint64 last_ts = 0; - - LOG_V( "Begin\n"); - pret = vbp_parse(this->parser_handle, - bufin->data, - bufin->size, - FALSE); - - if (pret != VBP_OK) { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "vbp_parse failed.\n"); - goto CLEAN_UP; - } - else { - LOG_V("vbp_parse succeeded.\n"); - } - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - - if ((pret != VBP_OK) || (data == NULL)) { - // never happen! - ret = MIX_RESULT_FAIL; - LOG_E( "vbp_query failed.\n"); - goto CLEAN_UP; - } else { - LOG_V("vbp_query succeeded.\n"); - } - - if (this->va_initialized == FALSE) { - _update_config_params(data); - LOG_V("try initializing VA...\n"); - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_initialize_va failed.\n"); - goto CLEAN_UP; - } - } - - // check if any slice is parsed, we may just receive configuration data - if (data->number_picture_data == 0) { - ret = MIX_RESULT_SUCCESS; - LOG_V("slice is not available.\n"); - goto CLEAN_UP; - } - - last_ts = this->current_timestamp; - this->current_timestamp = ts; - this->discontinuity_frame_in_progress = discontinuity; - - if (last_ts != ts) { - // finish decoding the last frame - ret = _decode_end(FALSE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_decode_end failed.\n"); - goto CLEAN_UP; - } - - // start decoding a new frame - ret = _decode_begin(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_decode_begin failed.\n"); - goto CLEAN_UP; - } - } else { - ret = _decode_continue(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_decode_continue failed.\n"); - goto CLEAN_UP; - } - } + MixBuffer * bufin, uint64 ts, bool discontinuity) { + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_mp42 *data = NULL; + uint64 last_ts = 0; + + LOG_V( "Begin\n"); + pret = vbp_parse(this->parser_handle, + bufin->data, + bufin->size, + FALSE); + + if (pret != VBP_OK) { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "vbp_parse failed.\n"); + goto CLEAN_UP; + } + else { + LOG_V("vbp_parse succeeded.\n"); + } + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + + if ((pret != VBP_OK) || (data == NULL)) { + // never happen! + ret = MIX_RESULT_FAIL; + LOG_E( "vbp_query failed.\n"); + goto CLEAN_UP; + } else { + LOG_V("vbp_query succeeded.\n"); + } + + if (this->va_initialized == FALSE) { + _update_config_params(data); + LOG_V("try initializing VA...\n"); + ret = _initialize_va(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_initialize_va failed.\n"); + goto CLEAN_UP; + } + } + + // check if any slice is parsed, we may just receive configuration data + if (data->number_picture_data == 0) { + ret = MIX_RESULT_SUCCESS; + LOG_V("slice is not available.\n"); + goto CLEAN_UP; + } + + last_ts = this->current_timestamp; + this->current_timestamp = ts; + this->discontinuity_frame_in_progress = discontinuity; + + if (last_ts != ts) { + // finish decoding the last frame + ret = _decode_end(FALSE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_decode_end failed.\n"); + goto CLEAN_UP; + } + + // start decoding a new frame + ret = _decode_begin(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_decode_begin failed.\n"); + goto CLEAN_UP; + } + } else { + ret = _decode_continue(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V("mix_videofmt_mp42_decode_continue failed.\n"); + goto CLEAN_UP; + } + } CLEAN_UP: - LOG_V( "End\n"); - return ret; + LOG_V( "End\n"); + return ret; } MIX_RESULT MixVideoFormat_MP42::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_MPEG4; - vbp_data_mp42 *data = NULL; - MixIOVec *header = NULL; - - if (config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - // chain up parent method - MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, - surface_pool, va_display); - - if (ret != MIX_RESULT_SUCCESS){ - LOG_E( "Error initializing\n"); - return ret; - } - - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - Lock(); - - this->surfacepool = mix_surfacepool_new(); - *surface_pool = this->surfacepool; - - if (this->surfacepool == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "parent->surfacepool == NULL.\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &this->extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto CLEAN_UP; - } - - //Load the bitstream parser - pret = vbp_open(ptype, &(this->parser_handle)); - - if (!(pret == VBP_OK)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto CLEAN_UP; - } - LOG_V( "Opened parser\n"); - - - ret = mix_videoconfigparamsdec_get_header(config_params, &header); - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { - // Delay initializing VA if codec configuration data is not ready, but don't return an error. - ret = MIX_RESULT_SUCCESS; - LOG_W( "Codec data is not available in the configuration parameter.\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); - - pret = vbp_parse(this->parser_handle, header->data, - header->data_size, TRUE); - - if (pret != VBP_OK) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Parsed header\n"); - - //Get the header data and save - pret = vbp_query(this->parser_handle, (void **)&data); - - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Queried parser for header data\n"); - - _update_config_params(data); - - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error initializing va. \n"); - goto CLEAN_UP; - } + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display ) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_MPEG4; + vbp_data_mp42 *data = NULL; + MixIOVec *header = NULL; + + if (config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + // chain up parent method + MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, + surface_pool, va_display); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error initializing\n"); + return ret; + } + + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + Lock(); + + this->surfacepool = mix_surfacepool_new(); + *surface_pool = this->surfacepool; + + if (this->surfacepool == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "parent->surfacepool == NULL.\n"); + goto CLEAN_UP; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &this->extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto CLEAN_UP; + } + + //Load the bitstream parser + pret = vbp_open(ptype, &(this->parser_handle)); + + if (!(pret == VBP_OK)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto CLEAN_UP; + } + LOG_V( "Opened parser\n"); + + + ret = mix_videoconfigparamsdec_get_header(config_params, &header); + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { + // Delay initializing VA if codec configuration data is not ready, but don't return an error. + ret = MIX_RESULT_SUCCESS; + LOG_W( "Codec data is not available in the configuration parameter.\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); + + pret = vbp_parse(this->parser_handle, header->data, + header->data_size, TRUE); + + if (pret != VBP_OK) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Parsed header\n"); + + //Get the header data and save + pret = vbp_query(this->parser_handle, (void **)&data); + + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto CLEAN_UP; + } + + LOG_V( "Queried parser for header data\n"); + + _update_config_params(data); + + ret = _initialize_va(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error initializing va. \n"); + goto CLEAN_UP; + } CLEAN_UP: - if (ret != MIX_RESULT_SUCCESS) { - if (this->parser_handle) { - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - } - this->initialized = FALSE; - } else { - this->initialized = TRUE; - } - if (header != NULL) { - if (header->data != NULL) - g_free(header->data); - g_free(header); - header = NULL; - } - LOG_V( "Unlocking\n"); - Unlock(); - return ret; + if (ret != MIX_RESULT_SUCCESS) { + if (this->parser_handle) { + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + } + this->initialized = FALSE; + } else { + this->initialized = TRUE; + } + if (header != NULL) { + if (header->data != NULL) + free(header->data); + free(header); + header = NULL; + } + LOG_V( "Unlocking\n"); + Unlock(); + return ret; } MIX_RESULT MixVideoFormat_MP42::Decode( - MixBuffer * bufin[], gint bufincnt, MixVideoDecodeParams * decode_params) { + MixBuffer * bufin[], int bufincnt, MixVideoDecodeParams * decode_params) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - guint64 ts = 0; - gboolean discontinuity = FALSE; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + uint64 ts = 0; + bool discontinuity = FALSE; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (bufin == NULL || decode_params == NULL ) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } + if (bufin == NULL || decode_params == NULL ) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + /* Chainup parent method. + We are not chaining up to parent method for now. + */ #if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, decode_params); - } + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, decode_params); + } #endif - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } - - //From now on, we exit this function through cleanup: - LOG_V( "Locking\n"); - Lock(); - - LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts); - - for (int i = 0; i < bufincnt; i++) { - LOG_V("decode buffer %d in total %d \n", i, bufincnt); - // decode a buffer at a time - ret = _decode_a_buffer(bufin[i], ts, discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n"); - break; - } - } - - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } + + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + // never happen + return MIX_RESULT_FAIL; + } + + //From now on, we exit this function through cleanup: + LOG_V( "Locking\n"); + Lock(); + + LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"UINT64_FORMAT"\n", ts); + + for (int i = 0; i < bufincnt; i++) { + LOG_V("decode buffer %d in total %d \n", i, bufincnt); + // decode a buffer at a time + ret = _decode_a_buffer(bufin[i], ts, discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n"); + break; + } + } + + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; } MIX_RESULT MixVideoFormat_MP42::Flush() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("Begin\n"); - - Lock(); - // drop any decode-pending picture, and ignore return value - _decode_end(TRUE); - - /* - * Clear parse_in_progress flag and current timestamp - */ - this->parse_in_progress = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (guint64)-1; - this->next_nvop_for_PB_frame = FALSE; - - for (gint idx = 0; idx < 2; idx++) { - if (this->reference_frames[idx] != NULL) { - mix_videoframe_unref(this->reference_frames[idx]); - this->reference_frames[idx] = NULL; - } - } - if (this->last_frame) { - mix_videoframe_unref(this->last_frame); - this->last_frame = NULL; - } - - /* Call parser flush */ - vbp_flush(this->parser_handle); - Unlock(); - LOG_V("End\n"); - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("Begin\n"); + + Lock(); + // drop any decode-pending picture, and ignore return value + _decode_end(TRUE); + + /* + * Clear parse_in_progress flag and current timestamp + */ + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (uint64)-1; + this->next_nvop_for_PB_frame = FALSE; + + for (int idx = 0; idx < 2; idx++) { + if (this->reference_frames[idx] != NULL) { + mix_videoframe_unref(this->reference_frames[idx]); + this->reference_frames[idx] = NULL; + } + } + if (this->last_frame) { + mix_videoframe_unref(this->last_frame); + this->last_frame = NULL; + } + + /* Call parser flush */ + vbp_flush(this->parser_handle); + Unlock(); + LOG_V("End\n"); + return ret; } MIX_RESULT MixVideoFormat_MP42::EndOfStream() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("Begin\n"); - Lock(); - _decode_end(FALSE); - ret = mix_framemanager_eos(this->framemgr); - Unlock(); - LOG_V("End\n"); - return ret; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V("Begin\n"); + Lock(); + _decode_end(FALSE); + ret = mix_framemanager_eos(this->framemgr); + Unlock(); + LOG_V("End\n"); + return ret; } MIX_RESULT MixVideoFormat_MP42::_handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame) { - LOG_V("Begin\n"); - if (current_frame == NULL) { - return MIX_RESULT_NULL_PTR; - } - switch (frame_type) { - case MP4_VOP_TYPE_I: - case MP4_VOP_TYPE_P: - LOG_V("Refing reference frame %x\n", (guint) current_frame); - mix_videoframe_ref(current_frame); - - /* should only happen on first frame */ - if (this->reference_frames[0] == NULL) { - this->reference_frames[0] = current_frame; - /* should only happen on second frame */ - } else if (this->reference_frames[1] == NULL) { - this->reference_frames[1] = current_frame; - } else { - LOG_V("Releasing reference frame %x\n", - (guint) this->reference_frames[0]); - mix_videoframe_unref(this->reference_frames[0]); - this->reference_frames[0] = this->reference_frames[1]; - this->reference_frames[1] = current_frame; - } - break; - case MP4_VOP_TYPE_B: - case MP4_VOP_TYPE_S: - default: - break; - - } - - LOG_V("End\n"); - - return MIX_RESULT_SUCCESS; + enum _picture_type frame_type, MixVideoFrame * current_frame) { + LOG_V("Begin\n"); + if (current_frame == NULL) { + return MIX_RESULT_NULL_PTR; + } + switch (frame_type) { + case MP4_VOP_TYPE_I: + case MP4_VOP_TYPE_P: + LOG_V("Refing reference frame %x\n", (uint) current_frame); + mix_videoframe_ref(current_frame); + + /* should only happen on first frame */ + if (this->reference_frames[0] == NULL) { + this->reference_frames[0] = current_frame; + /* should only happen on second frame */ + } else if (this->reference_frames[1] == NULL) { + this->reference_frames[1] = current_frame; + } else { + LOG_V("Releasing reference frame %x\n", + (uint) this->reference_frames[0]); + mix_videoframe_unref(this->reference_frames[0]); + this->reference_frames[0] = this->reference_frames[1]; + this->reference_frames[1] = current_frame; + } + break; + case MP4_VOP_TYPE_B: + case MP4_VOP_TYPE_S: + default: + break; + + } + + LOG_V("End\n"); + + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h index 31267c2..e05d3be 100644 --- a/mix_video/src/mixvideoformat_mp42.h +++ b/mix_video/src/mixvideoformat_mp42.h @@ -23,47 +23,47 @@ class MixVideoFormat_MP42 : public MixVideoFormat { public: - MixVideoFormat_MP42(); - virtual ~MixVideoFormat_MP42(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); + MixVideoFormat_MP42(); + virtual ~MixVideoFormat_MP42(); + + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); private: - MIX_RESULT _handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame); - MIX_RESULT _release_input_buffers(guint64 timestamp); - MIX_RESULT _update_config_params(vbp_data_mp42 *data); - MIX_RESULT _initialize_va(vbp_data_mp42 *data); - MIX_RESULT _decode_a_slice( - vbp_data_mp42* data, vbp_picture_data_mp42* pic_data); - MIX_RESULT _decode_end(gboolean drop_picture); - MIX_RESULT _decode_continue(vbp_data_mp42 *data); - MIX_RESULT _decode_begin(vbp_data_mp42* data); - MIX_RESULT _decode_a_buffer( - MixBuffer * bufin, guint64 ts, gboolean discontinuity); + MIX_RESULT _handle_ref_frames( + enum _picture_type frame_type, MixVideoFrame * current_frame); + MIX_RESULT _release_input_buffers(uint64 timestamp); + MIX_RESULT _update_config_params(vbp_data_mp42 *data); + MIX_RESULT _initialize_va(vbp_data_mp42 *data); + MIX_RESULT _decode_a_slice( + vbp_data_mp42* data, vbp_picture_data_mp42* pic_data); + MIX_RESULT _decode_end(bool drop_picture); + MIX_RESULT _decode_continue(vbp_data_mp42 *data); + MIX_RESULT _decode_begin(vbp_data_mp42* data); + MIX_RESULT _decode_a_buffer( + MixBuffer * bufin, uint64 ts, bool discontinuity); public: - /*< public > */ - MixVideoFrame * reference_frames[2]; - MixVideoFrame * last_frame; - gint last_vop_coding_type; - guint last_vop_time_increment; + /*< public > */ + MixVideoFrame * reference_frames[2]; + MixVideoFrame * last_frame; + int last_vop_coding_type; + uint last_vop_time_increment; - /* indicate if future n-vop is a placeholder of a packed frame */ - gboolean next_nvop_for_PB_frame; + /* indicate if future n-vop is a placeholder of a packed frame */ + bool next_nvop_for_PB_frame; - /* indicate if iq_matrix_buffer is sent to driver */ - gboolean iq_matrix_buf_sent; + /* indicate if iq_matrix_buffer is sent to driver */ + bool iq_matrix_buf_sent; }; diff --git a/mix_video/src/mixvideoformat_vc1.cpp b/mix_video/src/mixvideoformat_vc1.cpp index 2199ff6..342c49d 100644 --- a/mix_video/src/mixvideoformat_vc1.cpp +++ b/mix_video/src/mixvideoformat_vc1.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include "mixvideolog.h" #include "mixvideoformat_vc1.h" @@ -19,7 +19,7 @@ #endif /* YUVDUMP */ #include - +#include #ifdef MIX_LOG_ENABLE static int mix_video_vc1_counter = 0; @@ -27,1338 +27,1340 @@ static int mix_video_vc1_counter = 0; MixVideoFormat_VC1::MixVideoFormat_VC1() { - this->reference_frames[0] = NULL; - this->reference_frames[1] = NULL; + this->reference_frames[0] = NULL; + this->reference_frames[1] = NULL; } MixVideoFormat_VC1::~MixVideoFormat_VC1() { - gint32 pret = VBP_OK; - /* clean up here. */ - Lock(); - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - //Unref our reference frames; - for (int i = 0; i < 2; i++) { - if (this->reference_frames[i] != NULL) - { - mix_videoframe_unref(this->reference_frames[i]); - this->reference_frames[i] = NULL; - } - } - - //Reset state - this->initialized = TRUE; - this->parse_in_progress = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (guint64)-1; - - //Close the parser - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - if (pret != VBP_OK) { - LOG_E( "Error closing parser\n"); - } - - Unlock(); + int32 pret = VBP_OK; + /* clean up here. */ + Lock(); + //surfacepool is deallocated by parent + //inputbufqueue is deallocated by parent + //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces + //Unref our reference frames; + for (int i = 0; i < 2; i++) { + if (this->reference_frames[i] != NULL) + { + mix_videoframe_unref(this->reference_frames[i]); + this->reference_frames[i] = NULL; + } + } + + //Reset state + this->initialized = TRUE; + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (uint64)-1; + + //Close the parser + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + if (pret != VBP_OK) { + LOG_E( "Error closing parser\n"); + } + + Unlock(); } MixVideoFormat_VC1 * mix_videoformat_vc1_new(void) { - return new MixVideoFormat_VC1(); + return new MixVideoFormat_VC1(); } MixVideoFormat_VC1 * mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } MixVideoFormat_VC1 *mix_videoformat_vc1_unref(MixVideoFormat_VC1 * mix) { - if (NULL != mix) - return MIX_VIDEOFORMAT_VC1(mix->Unref()); - else - return mix; + if (NULL != mix) + return MIX_VIDEOFORMAT_VC1(mix->Unref()); + else + return mix; } MIX_RESULT MixVideoFormat_VC1::_update_seq_header( - MixVideoConfigParamsDec* config_params, - MixIOVec *header) { - guint width = 0; - guint height = 0; - - gint i = 0; - guchar* p = NULL; - MIX_RESULT res = MIX_RESULT_SUCCESS; - - if (!config_params || !header) { - LOG_E( "NUll pointer passed in\n"); - return (MIX_RESULT_NULL_PTR); - } - - p = header->data; - - res = mix_videoconfigparamsdec_get_picture_res( - config_params, &width, &height); - - if (MIX_RESULT_SUCCESS != res) { - return res; - } - - /* Check for start codes. If one exist, then this is VC-1 and not WMV. */ - while (i < header->data_size - 2) { - if ((p[i] == 0) && (p[i + 1] == 0) && (p[i + 2] == 1)) { - return MIX_RESULT_SUCCESS; - } - i++; - } - - p = reinterpret_cast(g_malloc0(header->data_size + 9)); - - if (!p) { - LOG_E( "Cannot allocate memory\n"); - return MIX_RESULT_NO_MEMORY; - } - - /* If we get here we have 4+ bytes of codec data that must be formatted */ - /* to pass through as an RCV sequence header. */ - p[0] = 0; - p[1] = 0; - p[2] = 1; - p[3] = 0x0f; /* Start code. */ - - p[4] = (width >> 8) & 0x0ff; - p[5] = width & 0x0ff; - p[6] = (height >> 8) & 0x0ff; - p[7] = height & 0x0ff; - - memcpy(p + 8, header->data, header->data_size); - *(p + header->data_size + 8) = 0x80; - - g_free(header->data); - header->data = p; - header->data_size = header->data_size + 9; - - return MIX_RESULT_SUCCESS; + MixVideoConfigParamsDec* config_params, + MixIOVec *header) { + uint width = 0; + uint height = 0; + + int i = 0; + uchar* p = NULL; + MIX_RESULT res = MIX_RESULT_SUCCESS; + + if (!config_params || !header) { + LOG_E( "NUll pointer passed in\n"); + return (MIX_RESULT_NULL_PTR); + } + + p = header->data; + + res = mix_videoconfigparamsdec_get_picture_res( + config_params, &width, &height); + + if (MIX_RESULT_SUCCESS != res) { + return res; + } + + /* Check for start codes. If one exist, then this is VC-1 and not WMV. */ + while (i < header->data_size - 2) { + if ((p[i] == 0) && (p[i + 1] == 0) && (p[i + 2] == 1)) { + return MIX_RESULT_SUCCESS; + } + i++; + } + +// p = reinterpret_cast(g_malloc0(header->data_size + 9)); + p = reinterpret_cast(malloc(header->data_size + 9)); + + if (!p) { + LOG_E( "Cannot allocate memory\n"); + return MIX_RESULT_NO_MEMORY; + } + memset(p, 0, header->data_size + 9); + + /* If we get here we have 4+ bytes of codec data that must be formatted */ + /* to pass through as an RCV sequence header. */ + p[0] = 0; + p[1] = 0; + p[2] = 1; + p[3] = 0x0f; /* Start code. */ + + p[4] = (width >> 8) & 0x0ff; + p[5] = width & 0x0ff; + p[6] = (height >> 8) & 0x0ff; + p[7] = height & 0x0ff; + + memcpy(p + 8, header->data, header->data_size); + *(p + header->data_size + 8) = 0x80; + + free(header->data); + header->data = p; + header->data_size = header->data_size + 9; + + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat_VC1::_update_config_params(vbp_data_vc1 *data) { - if (this->picture_width == 0 || - this->picture_height == 0) { - this->picture_width = data->se_data->CODED_WIDTH; - this->picture_height = data->se_data->CODED_HEIGHT; - mix_videoconfigparamsdec_set_picture_res( - this->config_params, - this->picture_width, - this->picture_height); - } - - // scaling has been performed on the decoded image. - mix_videoconfigparamsdec_set_video_range(this->config_params, 1); - uint8 color_matrix; - switch (data->se_data->MATRIX_COEF) { - case 1: - color_matrix = VA_SRC_BT709; - break; - // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996. - case 6: - color_matrix = VA_SRC_BT601; - break; - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - this->config_params, - data->se_data->ASPECT_HORIZ_SIZE, - data->se_data->ASPECT_VERT_SIZE); - return MIX_RESULT_SUCCESS; + if (this->picture_width == 0 || + this->picture_height == 0) { + this->picture_width = data->se_data->CODED_WIDTH; + this->picture_height = data->se_data->CODED_HEIGHT; + mix_videoconfigparamsdec_set_picture_res( + this->config_params, + this->picture_width, + this->picture_height); + } + + // scaling has been performed on the decoded image. + mix_videoconfigparamsdec_set_video_range(this->config_params, 1); + uint8 color_matrix; + switch (data->se_data->MATRIX_COEF) { + case 1: + color_matrix = VA_SRC_BT709; + break; + // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996. + case 6: + color_matrix = VA_SRC_BT601; + break; + default: + // unknown color matrix, set to 0 so color space flag will not be set. + color_matrix = 0; + break; + } + mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); + mix_videoconfigparamsdec_set_pixel_aspect_ratio( + this->config_params, + data->se_data->ASPECT_HORIZ_SIZE, + data->se_data->ASPECT_VERT_SIZE); + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat_VC1::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_VC1; - vbp_data_vc1 *data = NULL; - MixIOVec *header = NULL; - gint numprofs = 0, numactualprofs = 0; - gint numentrypts = 0, numactualentrypts = 0; - VADisplay vadisplay = NULL; - VAProfile *profiles = NULL; - VAEntrypoint *entrypts = NULL; - VAConfigAttrib attrib; - VAStatus vret = VA_STATUS_SUCCESS; - guint extra_surfaces = 0; - VASurfaceID *surfaces = NULL; - guint numSurfaces = 0; - gint vaentrypt = 0; - gint vaprof = 0; - - //TODO Partition this method into smaller methods - if (config_params == NULL || frame_mgr == NULL || - !input_buf_pool || !surface_pool || !va_display) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - LOG_V( "Begin\n"); - - // chain up parent method - MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, - surface_pool, va_display); - - if (ret != MIX_RESULT_SUCCESS) { - return ret; - } - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - Lock(); - - //Load the bitstream parser - pret = vbp_open(ptype, &(this->parser_handle)); - - if (!(pret == VBP_OK)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto CLEAN_UP; - } - - LOG_V( "Opened parser\n"); - - ret = mix_videoconfigparamsdec_get_header(config_params, - &header); - - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get header data\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); - - ret = _update_seq_header(config_params, header); - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error updating sequence header\n"); - goto CLEAN_UP; - } - - pret = vbp_parse(this->parser_handle, header->data, - header->data_size, TRUE); - - if (!((pret == VBP_OK) || (pret == VBP_DONE))) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data, size %d\n", header->data_size); - goto CLEAN_UP; - } - - - LOG_V( "Parsed header\n"); - //Get the header data and save - pret = vbp_query(this->parser_handle, (void **)&data); - - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto CLEAN_UP; - } - LOG_V( "Queried parser for header data\n"); - - _update_config_params(data); - - //Time for libva initialization - vadisplay = this->va_display; - numprofs = vaMaxNumProfiles(vadisplay); - profiles = reinterpret_cast(g_malloc(numprofs*sizeof(VAProfile))); - - if (!profiles) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto CLEAN_UP; - } - - vret = vaQueryConfigProfiles(vadisplay, profiles, - &numactualprofs); - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto CLEAN_UP; - } - - //check the desired profile support - - VAProfile profile; - switch (data->se_data->PROFILE) { - case 0: - profile = VAProfileVC1Simple; - break; - case 1: - profile = VAProfileVC1Main; - break; - default: - profile = VAProfileVC1Advanced; - break; - } - - for (; vaprof < numactualprofs; vaprof++) { - if (profiles[vaprof] == profile) - break; - } - if (vaprof >= numprofs || profiles[vaprof] != profile) { - ret = MIX_RESULT_FAIL; - LOG_E( "Profile not supported by driver\n"); - goto CLEAN_UP; - } - - numentrypts = vaMaxNumEntrypoints(vadisplay); - entrypts = reinterpret_cast(g_malloc(numentrypts*sizeof(VAEntrypoint))); - - if (!entrypts) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto CLEAN_UP; - } - - vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], - entrypts, &numactualentrypts); - - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto CLEAN_UP; - } - - for (; vaentrypt < numactualentrypts; vaentrypt++) { - if (entrypts[vaentrypt] == VAEntrypointVLD) - break; - } - if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) { - ret = MIX_RESULT_FAIL; - LOG_E( "Entry point not supported by driver\n"); - goto CLEAN_UP; - } - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - - vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1); - - //TODO Handle other values returned for RT format - // and check with requested format provided in config params - //Right now only YUV 4:2:0 is supported by libva - // and this is our default - if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto CLEAN_UP; - } - - //Initialize and save the VA config ID - vret = vaCreateConfig(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1, &(this->va_config)); - - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto CLEAN_UP; - } - - LOG_V( "Created libva config with profile %d\n", vaprof); - - //Check for loop filtering - if (data->se_data->LOOPFILTER == 1) - this->loopFilter = TRUE; - else - this->loopFilter = FALSE; - - LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", - data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); - - //Initialize the surface pool - if ((data->se_data->MAXBFRAMES > 0) || - (data->se_data->PROFILE == 3) || - (data->se_data->PROFILE == 1)) - //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof - this->haveBframes = TRUE; - else - this->haveBframes = FALSE; - - //Calculate VC1 numSurfaces based on max number of B frames or - // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less - - //Adding 1 to work around VBLANK issue - this->va_num_surfaces = 1 + extra_surfaces + - ((3 + (this->haveBframes ? 1 : 0) < MIX_VIDEO_VC1_SURFACE_NUM) ? - (3 + (this->haveBframes ? 1 : 0)) : MIX_VIDEO_VC1_SURFACE_NUM); - numSurfaces = this->va_num_surfaces; - this->va_surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); - surfaces = this->va_surfaces; - - if (surfaces == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot allocate temporary data\n"); - goto CLEAN_UP; - } - - vret = vaCreateSurfaces( - vadisplay, this->picture_width, - this->picture_height, entrypts[vaentrypt], - numSurfaces, surfaces); - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto CLEAN_UP; - } - - this->surfacepool = mix_surfacepool_new(); - *surface_pool = this->surfacepool; - if (this->surfacepool == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing surface pool\n"); - goto CLEAN_UP; - } - - - ret = mix_surfacepool_initialize(this->surfacepool, - surfaces, numSurfaces, vadisplay); - - switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init failure\n"); - goto CLEAN_UP; - break; - } - - LOG_V( "Created %d libva surfaces, MAXBFRAMES is %d\n", - numSurfaces, data->se_data->MAXBFRAMES); - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext(vadisplay, this->va_config, - this->picture_width, this->picture_height, - 0, surfaces, numSurfaces, - &(this->va_context)); - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto CLEAN_UP; - } - - LOG_V( "Created libva context width %d, height %d\n", - this->picture_width, this->picture_height); - LOG_V( "mix_video vinfo: Content type %s, %s\n", - (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); - LOG_V( "mix_video vinfo: Content width %d, height %d\n", - this->picture_width, this->picture_height); - LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", - data->se_data->MAXBFRAMES); - LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", - data->se_data->PROFILE, data->se_data->LEVEL); + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display) { + + uint32 pret = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + enum _vbp_parser_type ptype = VBP_VC1; + vbp_data_vc1 *data = NULL; + MixIOVec *header = NULL; + int numprofs = 0, numactualprofs = 0; + int numentrypts = 0, numactualentrypts = 0; + VADisplay vadisplay = NULL; + VAProfile *profiles = NULL; + VAEntrypoint *entrypts = NULL; + VAConfigAttrib attrib; + VAStatus vret = VA_STATUS_SUCCESS; + uint extra_surfaces = 0; + VASurfaceID *surfaces = NULL; + uint numSurfaces = 0; + int vaentrypt = 0; + int vaprof = 0; + + //TODO Partition this method into smaller methods + if (config_params == NULL || frame_mgr == NULL || + !input_buf_pool || !surface_pool || !va_display) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + LOG_V( "Begin\n"); + + // chain up parent method + MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, + surface_pool, va_display); + + if (ret != MIX_RESULT_SUCCESS) { + return ret; + } + LOG_V( "Locking\n"); + //From now on, we exit this function through cleanup: + Lock(); + + //Load the bitstream parser + pret = vbp_open(ptype, &(this->parser_handle)); + + if (!(pret == VBP_OK)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error opening parser\n"); + goto CLEAN_UP; + } + + LOG_V( "Opened parser\n"); + + ret = mix_videoconfigparamsdec_get_header(config_params, + &header); + + if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get header data\n"); + goto CLEAN_UP; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); + + ret = _update_seq_header(config_params, header); + if (ret != MIX_RESULT_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error updating sequence header\n"); + goto CLEAN_UP; + } + + pret = vbp_parse(this->parser_handle, header->data, + header->data_size, TRUE); + + if (!((pret == VBP_OK) || (pret == VBP_DONE))) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing header data, size %d\n", header->data_size); + goto CLEAN_UP; + } + + + LOG_V( "Parsed header\n"); + //Get the header data and save + pret = vbp_query(this->parser_handle, (void **)&data); + + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error reading parsed header data\n"); + goto CLEAN_UP; + } + LOG_V( "Queried parser for header data\n"); + + _update_config_params(data); + + //Time for libva initialization + vadisplay = this->va_display; + numprofs = vaMaxNumProfiles(vadisplay); + profiles = reinterpret_cast(malloc(numprofs*sizeof(VAProfile))); + + if (!profiles) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto CLEAN_UP; + } + + vret = vaQueryConfigProfiles(vadisplay, profiles, + &numactualprofs); + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto CLEAN_UP; + } + + //check the desired profile support + + VAProfile profile; + switch (data->se_data->PROFILE) { + case 0: + profile = VAProfileVC1Simple; + break; + case 1: + profile = VAProfileVC1Main; + break; + default: + profile = VAProfileVC1Advanced; + break; + } + + for (; vaprof < numactualprofs; vaprof++) { + if (profiles[vaprof] == profile) + break; + } + if (vaprof >= numprofs || profiles[vaprof] != profile) { + ret = MIX_RESULT_FAIL; + LOG_E( "Profile not supported by driver\n"); + goto CLEAN_UP; + } + + numentrypts = vaMaxNumEntrypoints(vadisplay); + entrypts = reinterpret_cast(malloc(numentrypts*sizeof(VAEntrypoint))); + + if (!entrypts) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating memory\n"); + goto CLEAN_UP; + } + + vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], + entrypts, &numactualentrypts); + + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto CLEAN_UP; + } + + for (; vaentrypt < numactualentrypts; vaentrypt++) { + if (entrypts[vaentrypt] == VAEntrypointVLD) + break; + } + if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) { + ret = MIX_RESULT_FAIL; + LOG_E( "Entry point not supported by driver\n"); + goto CLEAN_UP; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + + vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1); + + //TODO Handle other values returned for RT format + // and check with requested format provided in config params + //Right now only YUV 4:2:0 is supported by libva + // and this is our default + if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto CLEAN_UP; + } + + //Initialize and save the VA config ID + vret = vaCreateConfig(vadisplay, profiles[vaprof], + entrypts[vaentrypt], &attrib, 1, &(this->va_config)); + + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing driver\n"); + goto CLEAN_UP; + } + + LOG_V( "Created libva config with profile %d\n", vaprof); + + //Check for loop filtering + if (data->se_data->LOOPFILTER == 1) + this->loopFilter = TRUE; + else + this->loopFilter = FALSE; + + LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", + data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); + + //Initialize the surface pool + if ((data->se_data->MAXBFRAMES > 0) || + (data->se_data->PROFILE == 3) || + (data->se_data->PROFILE == 1)) + //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof + this->haveBframes = TRUE; + else + this->haveBframes = FALSE; + + //Calculate VC1 numSurfaces based on max number of B frames or + // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less + + //Adding 1 to work around VBLANK issue + this->va_num_surfaces = 1 + extra_surfaces + + ((3 + (this->haveBframes ? 1 : 0) < MIX_VIDEO_VC1_SURFACE_NUM) ? + (3 + (this->haveBframes ? 1 : 0)) : MIX_VIDEO_VC1_SURFACE_NUM); + numSurfaces = this->va_num_surfaces; + this->va_surfaces = reinterpret_cast(malloc(sizeof(VASurfaceID)*numSurfaces)); + surfaces = this->va_surfaces; + + if (surfaces == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot allocate temporary data\n"); + goto CLEAN_UP; + } + + vret = vaCreateSurfaces( + vadisplay, this->picture_width, + this->picture_height, entrypts[vaentrypt], + numSurfaces, surfaces); + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto CLEAN_UP; + } + + this->surfacepool = mix_surfacepool_new(); + *surface_pool = this->surfacepool; + if (this->surfacepool == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing surface pool\n"); + goto CLEAN_UP; + } + + + ret = mix_surfacepool_initialize(this->surfacepool, + surfaces, numSurfaces, vadisplay); + + switch (ret) { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init failure\n"); + goto CLEAN_UP; + break; + } + + LOG_V( "Created %d libva surfaces, MAXBFRAMES is %d\n", + numSurfaces, data->se_data->MAXBFRAMES); + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext(vadisplay, this->va_config, + this->picture_width, this->picture_height, + 0, surfaces, numSurfaces, + &(this->va_context)); + if (!(vret == VA_STATUS_SUCCESS)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto CLEAN_UP; + } + + LOG_V( "Created libva context width %d, height %d\n", + this->picture_width, this->picture_height); + LOG_V( "mix_video vinfo: Content type %s, %s\n", + (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); + LOG_V( "mix_video vinfo: Content width %d, height %d\n", + this->picture_width, this->picture_height); + LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", + data->se_data->MAXBFRAMES); + LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", + data->se_data->PROFILE, data->se_data->LEVEL); CLEAN_UP: - if (ret != MIX_RESULT_SUCCESS) { - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - this->initialized = FALSE; - } else { - this->initialized = TRUE; - } - if (header != NULL) { - if (header->data != NULL) - g_free(header->data); - g_free(header); - header = NULL; - } - g_free(profiles); - g_free(entrypts); - this->lastFrame = NULL; - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; + if (ret != MIX_RESULT_SUCCESS) { + pret = vbp_close(this->parser_handle); + this->parser_handle = NULL; + this->initialized = FALSE; + } else { + this->initialized = TRUE; + } + if (header != NULL) { + if (header->data != NULL) + free(header->data); + free(header); + header = NULL; + } + free(profiles); + free(entrypts); + this->lastFrame = NULL; + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; } MIX_RESULT MixVideoFormat_VC1::Decode( - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params) { - - uint32 pret = 0; - int i = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - guint64 ts = 0; - vbp_data_vc1 *data = NULL; - gboolean discontinuity = FALSE; - MixInputBufferEntry *bufentry = NULL; - if (bufin == NULL || decode_params == NULL) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - //TODO remove iovout and iovoutcnt; they are not used (need to remove from MixVideo/MI-X API too) - LOG_V( "Begin\n"); - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params) { + + uint32 pret = 0; + int i = 0; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + uint64 ts = 0; + vbp_data_vc1 *data = NULL; + bool discontinuity = FALSE; + MixInputBufferEntry *bufentry = NULL; + if (bufin == NULL || decode_params == NULL) { + LOG_E( "NUll pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + //TODO remove iovout and iovoutcnt; they are not used (need to remove from MixVideo/MI-X API too) + LOG_V( "Begin\n"); + /* Chainup parent method. + We are not chaining up to parent method for now. + */ #if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, - decode_params); - } + if (parent_class->decode) { + return parent_class->decode(mix, bufin, bufincnt, + decode_params); + } #endif - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) { - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - return MIX_RESULT_FAIL; - } - - //From now on, we exit this function through cleanup: - LOG_V( "Locking\n"); - Lock(); - - //If this is a new frame and we haven't retrieved parser - // workload data from previous frame yet, do so - if ((ts != this->current_timestamp) && - (this->parse_in_progress)) { - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing parser\n"); - goto CLEAN_UP; - } - LOG_V( "Queried for last frame data\n"); - //process and decode data - ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); - - if (ret != MIX_RESULT_SUCCESS) { - //We log this but need to process the new frame data, so do not return - LOG_E( "process_decode failed.\n"); - } - LOG_V( "Called process and decode for last frame\n"); - this->parse_in_progress = FALSE; - } - - this->current_timestamp = ts; - this->discontinuity_frame_in_progress = discontinuity; - LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_vc1_counter++, ts); - - for (i = 0; i < bufincnt; i++) { - LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", - (int)this->parser_handle, (guint)bufin[i]->data, bufin[i]->size); - pret = vbp_parse(this->parser_handle, bufin[i]->data, bufin[i]->size, FALSE); - LOG_V( "Called parse for current frame\n"); - if (pret == VBP_DONE) { - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting parser data\n"); - goto CLEAN_UP; - } - LOG_V( "Called query for current frame\n"); - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = reinterpret_cast(g_malloc(sizeof( - MixInputBufferEntry))); - if (bufentry == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); - goto CLEAN_UP; - } - - bufentry->buf = bufin[i]; - LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", - (guint)bufentry, (guint)bufentry->buf, ts); - bufentry->timestamp = ts; - - LOG_V( "Enqueue this input buffer for current frame\n"); - LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp); - - //Enqueue this input buffer - g_queue_push_tail(this->inputbufqueue, - (gpointer)bufentry); - - //process and decode data - ret = _process_decode(data, ts, discontinuity); - - if (ret != MIX_RESULT_SUCCESS) { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Process_decode failed.\n"); - } - - LOG_V( "Called process and decode for current frame\n"); - this->parse_in_progress = FALSE; - } else if (pret != VBP_OK) { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Parsing failed.\n"); - ret = MIX_RESULT_FAIL; - } else { - LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = reinterpret_cast(g_malloc(sizeof(MixInputBufferEntry))); - if (bufentry == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); - goto CLEAN_UP; - } - bufentry->buf = bufin[i]; - bufentry->timestamp = ts; - - //Enqueue this input buffer - g_queue_push_tail(this->inputbufqueue, - (gpointer)bufentry); - this->parse_in_progress = TRUE; - } - } + ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); + if (ret != MIX_RESULT_SUCCESS) { + return MIX_RESULT_FAIL; + } + + ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); + if (ret != MIX_RESULT_SUCCESS) { + return MIX_RESULT_FAIL; + } + + //From now on, we exit this function through cleanup: + LOG_V( "Locking\n"); + Lock(); + + //If this is a new frame and we haven't retrieved parser + // workload data from previous frame yet, do so + if ((ts != this->current_timestamp) && + (this->parse_in_progress)) { + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing parser\n"); + goto CLEAN_UP; + } + LOG_V( "Queried for last frame data\n"); + //process and decode data + ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); + + if (ret != MIX_RESULT_SUCCESS) { + //We log this but need to process the new frame data, so do not return + LOG_E( "process_decode failed.\n"); + } + LOG_V( "Called process and decode for last frame\n"); + this->parse_in_progress = FALSE; + } + + this->current_timestamp = ts; + this->discontinuity_frame_in_progress = discontinuity; + LOG_V( "Starting current frame %d, timestamp %"UINT64_FORMAT"\n", mix_video_vc1_counter++, ts); + + for (i = 0; i < bufincnt; i++) { + LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", + (int)this->parser_handle, (uint)bufin[i]->data, bufin[i]->size); + pret = vbp_parse(this->parser_handle, bufin[i]->data, bufin[i]->size, FALSE); + LOG_V( "Called parse for current frame\n"); + if (pret == VBP_DONE) { + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting parser data\n"); + goto CLEAN_UP; + } + LOG_V( "Called query for current frame\n"); + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = reinterpret_cast(malloc(sizeof( + MixInputBufferEntry))); + if (bufentry == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto CLEAN_UP; + } + + bufentry->buf = bufin[i]; + LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"UINT64_FORMAT"\n", + (uint)bufentry, (uint)bufentry->buf, ts); + bufentry->timestamp = ts; + + LOG_V( "Enqueue this input buffer for current frame\n"); + LOG_V( "bufentry->timestamp %"UINT64_FORMAT"\n", bufentry->timestamp); + + //Enqueue this input buffer + j_queue_push_tail(this->inputbufqueue, + (void*)bufentry); + + //process and decode data + ret = _process_decode(data, ts, discontinuity); + + if (ret != MIX_RESULT_SUCCESS) { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Process_decode failed.\n"); + } + + LOG_V( "Called process and decode for current frame\n"); + this->parse_in_progress = FALSE; + } else if (pret != VBP_OK) { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Parsing failed.\n"); + ret = MIX_RESULT_FAIL; + } else { + LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); + //Increase the ref count of this input buffer + mix_buffer_ref(bufin[i]); + //Create a new MixInputBufferEntry + //TODO make this from a pool to optimize + bufentry = reinterpret_cast(malloc(sizeof(MixInputBufferEntry))); + if (bufentry == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating bufentry\n"); + goto CLEAN_UP; + } + bufentry->buf = bufin[i]; + bufentry->timestamp = ts; + + //Enqueue this input buffer + j_queue_push_tail(this->inputbufqueue, + (void*)bufentry); + this->parse_in_progress = TRUE; + } + } CLEAN_UP: - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; + LOG_V( "Unlocking\n"); + Unlock(); + LOG_V( "End\n"); + return ret; } #ifdef YUVDUMP //TODO Complete this YUVDUMP code and move into base class MIX_RESULT MixVideoFormat_VC1::_get_Img_from_surface (MixVideoFrame * frame) { - VAStatus vaStatus = VA_STATUS_SUCCESS; - VAImageFormat va_image_format; - VAImage va_image; - unsigned char* pBuffer; - unsigned int ui32SrcWidth = this->picture_width; - unsigned int ui32SrcHeight = this->picture_height; - unsigned int ui32Stride; - unsigned int ui32ChromaOffset; - FILE *fp = NULL; - int r = 0; - int i; - g_print ("_get_Img_from_surface \n"); - - if (NULL == frame) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - fp = fopen("yuvdump.yuv", "a+"); - - static int have_va_image = 0; - - if (!have_va_image) { - va_image_format.fourcc = VA_FOURCC_NV12; - //va_image_format.fourcc = VA_FOURCC_YV12; - vaStatus = vaCreateImage( - this->va_display, &va_image_format, - ui32SrcWidth, ui32SrcHeight, &va_image); - have_va_image = 1; - } - - vaStatus = vaGetImage( - this->va_display, frame->frame_id, 0, 0, - ui32SrcWidth, ui32SrcHeight, va_image.image_id ); - vaStatus = vaMapBuffer(this->va_display, va_image.buf, (void **) &pBuffer); - ui32ChromaOffset = va_image.offsets[1]; - ui32Stride = va_image.pitches[0]; - - if (VA_STATUS_SUCCESS != vaStatus) { - g_print ("VideoProcessBlt: Unable to copy surface\n\r"); - return vaStatus; - } - - { - g_print ("before copy memory....\n"); - g_print ("width = %d, height = %d\n", ui32SrcWidth, ui32SrcHeight); - g_print ("data_size = %d\n", va_image.data_size); - g_print ("num_planes = %d\n", va_image.num_planes); - g_print ("va_image.pitches[0] = %d\n", va_image.pitches[0]); - g_print ("va_image.pitches[1] = %d\n", va_image.pitches[1]); - g_print ("va_image.pitches[2] = %d\n", va_image.pitches[2]); - g_print ("va_image.offsets[0] = %d\n", va_image.offsets[0]); - g_print ("va_image.offsets[1] = %d\n", va_image.offsets[1]); - g_print ("va_image.offsets[2] = %d\n", va_image.offsets[2]); - // r = fwrite (pBuffer, 1, va_image.offsets[1], fp); - - r = fwrite (pBuffer, va_image.offsets[1], 1, fp); - - for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) - r = fwrite (pBuffer + va_image.offsets[1] + i / 2, 1, 1, fp); - - for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) - r = fwrite (pBuffer + va_image.offsets[1] + i / 2 + 1, 1, 1, fp); - - g_print ("ui32ChromaOffset = %d, ui32Stride = %d\n", ui32ChromaOffset, ui32Stride); - - } - - vaStatus = vaUnmapBuffer(this->va_display, va_image.buf); - return vaStatus; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAImageFormat va_image_format; + VAImage va_image; + unsigned char* pBuffer; + unsigned int ui32SrcWidth = this->picture_width; + unsigned int ui32SrcHeight = this->picture_height; + unsigned int ui32Stride; + unsigned int ui32ChromaOffset; + FILE *fp = NULL; + int r = 0; + int i; + g_print ("_get_Img_from_surface \n"); + + if (NULL == frame) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + fp = fopen("yuvdump.yuv", "a+"); + + static int have_va_image = 0; + + if (!have_va_image) { + va_image_format.fourcc = VA_FOURCC_NV12; + //va_image_format.fourcc = VA_FOURCC_YV12; + vaStatus = vaCreateImage( + this->va_display, &va_image_format, + ui32SrcWidth, ui32SrcHeight, &va_image); + have_va_image = 1; + } + + vaStatus = vaGetImage( + this->va_display, frame->frame_id, 0, 0, + ui32SrcWidth, ui32SrcHeight, va_image.image_id ); + vaStatus = vaMapBuffer(this->va_display, va_image.buf, (void **) &pBuffer); + ui32ChromaOffset = va_image.offsets[1]; + ui32Stride = va_image.pitches[0]; + + if (VA_STATUS_SUCCESS != vaStatus) { + g_print ("VideoProcessBlt: Unable to copy surface\n\r"); + return vaStatus; + } + + { + g_print ("before copy memory....\n"); + g_print ("width = %d, height = %d\n", ui32SrcWidth, ui32SrcHeight); + g_print ("data_size = %d\n", va_image.data_size); + g_print ("num_planes = %d\n", va_image.num_planes); + g_print ("va_image.pitches[0] = %d\n", va_image.pitches[0]); + g_print ("va_image.pitches[1] = %d\n", va_image.pitches[1]); + g_print ("va_image.pitches[2] = %d\n", va_image.pitches[2]); + g_print ("va_image.offsets[0] = %d\n", va_image.offsets[0]); + g_print ("va_image.offsets[1] = %d\n", va_image.offsets[1]); + g_print ("va_image.offsets[2] = %d\n", va_image.offsets[2]); + // r = fwrite (pBuffer, 1, va_image.offsets[1], fp); + + r = fwrite (pBuffer, va_image.offsets[1], 1, fp); + + for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) + r = fwrite (pBuffer + va_image.offsets[1] + i / 2, 1, 1, fp); + + for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) + r = fwrite (pBuffer + va_image.offsets[1] + i / 2 + 1, 1, 1, fp); + + g_print ("ui32ChromaOffset = %d, ui32Stride = %d\n", ui32ChromaOffset, ui32Stride); + + } + + vaStatus = vaUnmapBuffer(this->va_display, va_image.buf); + return vaStatus; } #endif /* YUVDUMP */ MIX_RESULT MixVideoFormat_VC1::_decode_a_picture( - vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - guint buffer_id_cnt = 0; - VABufferID *buffer_ids = NULL; - vbp_picture_data_vc1* pic_data = &(data->pic_data[pic_index]); - VAPictureParameterBufferVC1 *pic_params = pic_data->pic_parms; - enum _picture_type frame_type = VC1_PTYPE_I; - gulong surface = 0; - - if (pic_params == NULL) { - ret = MIX_RESULT_NULL_PTR; - LOG_E( "Error reading parser data\n"); - goto CLEAN_UP; - } - - LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); - - //Set up reference frames for the picture parameter buffer - //Set the picture type (I, B or P frame) - frame_type = (_picture_type)pic_params->picture_fields.bits.picture_type; - - //Check for B frames after a seek - //We need to have both reference frames in hand before we can decode a B frame - //If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME - //Note: demuxer should do the right thing and only seek to I frame, so we should - // not get P frame first, but may get B frames after the first I frame - if (frame_type == VC1_PTYPE_B) { - if (this->reference_frames[1] == NULL) { - LOG_E( "Insufficient reference frames for B frame\n"); - ret = MIX_RESULT_DROPFRAME; - goto CLEAN_UP; - } - } - - buffer_ids = reinterpret_cast(g_malloc(sizeof(VABufferID) * ((pic_data->num_slices * 2) + 2))); - if (buffer_ids == NULL) { - LOG_E( "Cannot allocate buffer IDs\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - - LOG_V( "Getting a new surface\n"); - LOG_V( "frame type is %d\n", frame_type); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting surface ID from frame object\n"); - goto CLEAN_UP; - } - - //Get a frame from the surface pool - if (0 == pic_index) { - //Set the frame type for the frame object (used in reordering by frame manager) - switch (frame_type) { - case VC1_PTYPE_I: // I frame type - case VC1_PTYPE_P: // P frame type - case VC1_PTYPE_B: // B frame type - ret = mix_videoframe_set_frame_type(frame, (MixFrameType)frame_type); - break; - case VC1_PTYPE_BI: // BI frame type - ret = mix_videoframe_set_frame_type(frame, TYPE_B); - break; - //Not indicated here case VC1_PTYPE_SKIPPED: - default: - break; - } - } - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error setting frame type on frame\n"); - goto CLEAN_UP; - } - - LOG_V( "Setting reference frames in picparams, frame_type = %d\n", frame_type); - //TODO Check if we need to add more handling of B or P frames when reference frames are not set up (such as after flush/seek) - - switch (frame_type) { - case VC1_PTYPE_I: // I frame type - /* forward and backward reference pictures are not used but just set to current - surface to be in consistence with test suite - */ - pic_params->forward_reference_picture = surface; - pic_params->backward_reference_picture = surface; - LOG_V( "I frame, surface ID %u\n", (guint)frame->frame_id); - LOG_V( "mix_video vinfo: Frame type is I\n"); - break; - case VC1_PTYPE_P: // P frame type - // check REFDIST in the picture parameter buffer - if (0 != pic_params->reference_fields.bits.reference_distance_flag && - 0 != pic_params->reference_fields.bits.reference_distance) { - /* The previous decoded frame (distance is up to 16 but not 0) is used - for reference, as we don't allocate that many surfaces so the reference picture - could have been overwritten and hence not avaiable for reference. - */ - LOG_E( "reference distance is not 0!"); - ret = MIX_RESULT_DROPFRAME; - goto CLEAN_UP; - } - if (1 == pic_index) { - // handle interlace field coding case - if (1 == pic_params->reference_fields.bits.num_reference_pictures || - 1 == pic_params->reference_fields.bits.reference_field_pic_indicator) { - /* two reference fields or the second closest I/P field is used for - prediction. Set forward reference picture to INVALID so it will be - updated to a valid previous reconstructed reference frame later. - */ - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - } else { - /* the closest I/P is used for reference so it must be the - complementary field in the same surface. - */ - pic_params->forward_reference_picture = surface; - } - } - if (VA_INVALID_SURFACE == pic_params->forward_reference_picture) { - if (this->reference_frames[1]) { - pic_params->forward_reference_picture = this->reference_frames[1]->frame_id; - } else if (this->reference_frames[0]) { - pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; - } else { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Error could not find reference frames for P frame\n"); - goto CLEAN_UP; - } - } - pic_params->backward_reference_picture = VA_INVALID_SURFACE; + vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VADisplay vadisplay = NULL; + VAContextID vacontext; + uint buffer_id_cnt = 0; + VABufferID *buffer_ids = NULL; + vbp_picture_data_vc1* pic_data = &(data->pic_data[pic_index]); + VAPictureParameterBufferVC1 *pic_params = pic_data->pic_parms; + enum _picture_type frame_type = VC1_PTYPE_I; + ulong surface = 0; + + if (pic_params == NULL) { + ret = MIX_RESULT_NULL_PTR; + LOG_E( "Error reading parser data\n"); + goto CLEAN_UP; + } + + LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); + + //Set up reference frames for the picture parameter buffer + //Set the picture type (I, B or P frame) + frame_type = (_picture_type)pic_params->picture_fields.bits.picture_type; + + //Check for B frames after a seek + //We need to have both reference frames in hand before we can decode a B frame + //If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME + //Note: demuxer should do the right thing and only seek to I frame, so we should + // not get P frame first, but may get B frames after the first I frame + if (frame_type == VC1_PTYPE_B) { + if (this->reference_frames[1] == NULL) { + LOG_E( "Insufficient reference frames for B frame\n"); + ret = MIX_RESULT_DROPFRAME; + goto CLEAN_UP; + } + } + + buffer_ids = reinterpret_cast(malloc(sizeof(VABufferID) * ((pic_data->num_slices * 2) + 2))); + if (buffer_ids == NULL) { + LOG_E( "Cannot allocate buffer IDs\n"); + ret = MIX_RESULT_NO_MEMORY; + goto CLEAN_UP; + } + + LOG_V( "Getting a new surface\n"); + LOG_V( "frame type is %d\n", frame_type); + + //Get our surface ID from the frame object + ret = mix_videoframe_get_frame_id(frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting surface ID from frame object\n"); + goto CLEAN_UP; + } + + //Get a frame from the surface pool + if (0 == pic_index) { + //Set the frame type for the frame object (used in reordering by frame manager) + switch (frame_type) { + case VC1_PTYPE_I: // I frame type + case VC1_PTYPE_P: // P frame type + case VC1_PTYPE_B: // B frame type + ret = mix_videoframe_set_frame_type(frame, (MixFrameType)frame_type); + break; + case VC1_PTYPE_BI: // BI frame type + ret = mix_videoframe_set_frame_type(frame, TYPE_B); + break; + //Not indicated here case VC1_PTYPE_SKIPPED: + default: + break; + } + } + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error setting frame type on frame\n"); + goto CLEAN_UP; + } + + LOG_V( "Setting reference frames in picparams, frame_type = %d\n", frame_type); + //TODO Check if we need to add more handling of B or P frames when reference frames are not set up (such as after flush/seek) + + switch (frame_type) { + case VC1_PTYPE_I: // I frame type + /* forward and backward reference pictures are not used but just set to current + surface to be in consistence with test suite + */ + pic_params->forward_reference_picture = surface; + pic_params->backward_reference_picture = surface; + LOG_V( "I frame, surface ID %u\n", (uint)frame->frame_id); + LOG_V( "mix_video vinfo: Frame type is I\n"); + break; + case VC1_PTYPE_P: // P frame type + // check REFDIST in the picture parameter buffer + if (0 != pic_params->reference_fields.bits.reference_distance_flag && + 0 != pic_params->reference_fields.bits.reference_distance) { + /* The previous decoded frame (distance is up to 16 but not 0) is used + for reference, as we don't allocate that many surfaces so the reference picture + could have been overwritten and hence not avaiable for reference. + */ + LOG_E( "reference distance is not 0!"); + ret = MIX_RESULT_DROPFRAME; + goto CLEAN_UP; + } + if (1 == pic_index) { + // handle interlace field coding case + if (1 == pic_params->reference_fields.bits.num_reference_pictures || + 1 == pic_params->reference_fields.bits.reference_field_pic_indicator) { + /* two reference fields or the second closest I/P field is used for + prediction. Set forward reference picture to INVALID so it will be + updated to a valid previous reconstructed reference frame later. + */ + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + } else { + /* the closest I/P is used for reference so it must be the + complementary field in the same surface. + */ + pic_params->forward_reference_picture = surface; + } + } + if (VA_INVALID_SURFACE == pic_params->forward_reference_picture) { + if (this->reference_frames[1]) { + pic_params->forward_reference_picture = this->reference_frames[1]->frame_id; + } else if (this->reference_frames[0]) { + pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; + } else { + ret = MIX_RESULT_DROPFRAME; + LOG_E( "Error could not find reference frames for P frame\n"); + goto CLEAN_UP; + } + } + pic_params->backward_reference_picture = VA_INVALID_SURFACE; #ifdef MIX_LOG_ENABLE /* this is to fix a crash when MIX_LOG_ENABLE is set */ - if(this->reference_frames[0] && frame) { - LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", - (guint)frame->frame_id, (guint)this->reference_frames[0]->frame_id); - } + if (this->reference_frames[0] && frame) { + LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", + (uint)frame->frame_id, (uint)this->reference_frames[0]->frame_id); + } #endif - LOG_V( "mix_video vinfo: Frame type is P\n"); - break; - - case VC1_PTYPE_B: // B frame type - LOG_V( "B frame, forw ref %d, back ref %d\n", - (guint)this->reference_frames[0]->frame_id, - (guint)this->reference_frames[1]->frame_id); - - if (!this->haveBframes) {//We don't expect B frames and have not allocated a surface - // for the extra ref frame so this is an error - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Unexpected B frame, cannot process\n"); - goto CLEAN_UP; - } - - pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; - pic_params->backward_reference_picture = this->reference_frames[1]->frame_id; - - LOG_V( "B frame, surface ID %u, forw ref %d, back ref %d\n", - (guint)frame->frame_id, (guint)this->reference_frames[0]->frame_id, - (guint)this->reference_frames[1]->frame_id); - LOG_V( "mix_video vinfo: Frame type is B\n"); - break; - case VC1_PTYPE_BI: - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - LOG_V( "BI frame\n"); - LOG_V( "mix_video vinfo: Frame type is BI\n"); - break; - case VC1_PTYPE_SKIPPED: - //Will never happen here - break; - default: - LOG_V( "Hit default\n"); - break; - } - - //Loop filter handling - if (this->loopFilter) { - LOG_V( "Setting in loop decoded picture to current frame\n"); - LOG_V( "Double checking picparams inloop filter is %d\n", - pic_params->entrypoint_fields.bits.loopfilter); - pic_params->inloop_decoded_picture = frame->frame_id; - } else { - LOG_V( "Setting in loop decoded picture to invalid\n"); - pic_params->inloop_decoded_picture = VA_INVALID_SURFACE; - } - //Libva buffer set up - vadisplay = this->va_display; - vacontext = this->va_context; - - LOG_V( "Creating libva picture parameter buffer\n"); - - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferVC1), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - LOG_V( "Creating libva bitplane buffer\n"); - - if (pic_params->bitplane_present.value) { - //Then the bitplane buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VABitPlaneBufferType, - pic_data->size_bitplanes, - 1, - pic_data->packed_bitplanes, - &buffer_ids[buffer_id_cnt]); - buffer_id_cnt++; - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - } - - //Now for slices - for (uint32 i = 0; i < pic_data->num_slices; i++) { - LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); - - //Do slice parameters - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferVC1), - 1, - &(pic_data->slc_data[i].slc_parms), - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - buffer_id_cnt++; - - LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); - - - //Do slice data - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - //size - pic_data->slc_data[i].slice_size, - //num_elements - 1, - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferVC1 - pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - } - - - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling vaEndPicture\n"); - - //End picture - vret = vaEndPicture(vadisplay, vacontext); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto CLEAN_UP; - } + LOG_V( "mix_video vinfo: Frame type is P\n"); + break; + + case VC1_PTYPE_B: // B frame type + LOG_V( "B frame, forw ref %d, back ref %d\n", + (uint)this->reference_frames[0]->frame_id, + (uint)this->reference_frames[1]->frame_id); + + if (!this->haveBframes) {//We don't expect B frames and have not allocated a surface + // for the extra ref frame so this is an error + ret = MIX_RESULT_DROPFRAME; + LOG_E( "Unexpected B frame, cannot process\n"); + goto CLEAN_UP; + } + + pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; + pic_params->backward_reference_picture = this->reference_frames[1]->frame_id; + + LOG_V( "B frame, surface ID %u, forw ref %d, back ref %d\n", + (uint)frame->frame_id, (uint)this->reference_frames[0]->frame_id, + (uint)this->reference_frames[1]->frame_id); + LOG_V( "mix_video vinfo: Frame type is B\n"); + break; + case VC1_PTYPE_BI: + pic_params->forward_reference_picture = VA_INVALID_SURFACE; + pic_params->backward_reference_picture = VA_INVALID_SURFACE; + LOG_V( "BI frame\n"); + LOG_V( "mix_video vinfo: Frame type is BI\n"); + break; + case VC1_PTYPE_SKIPPED: + //Will never happen here + break; + default: + LOG_V( "Hit default\n"); + break; + } + + //Loop filter handling + if (this->loopFilter) { + LOG_V( "Setting in loop decoded picture to current frame\n"); + LOG_V( "Double checking picparams inloop filter is %d\n", + pic_params->entrypoint_fields.bits.loopfilter); + pic_params->inloop_decoded_picture = frame->frame_id; + } else { + LOG_V( "Setting in loop decoded picture to invalid\n"); + pic_params->inloop_decoded_picture = VA_INVALID_SURFACE; + } + //Libva buffer set up + vadisplay = this->va_display; + vacontext = this->va_context; + + LOG_V( "Creating libva picture parameter buffer\n"); + + //First the picture parameter buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferVC1), + 1, + pic_params, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + LOG_V( "Creating libva bitplane buffer\n"); + + if (pic_params->bitplane_present.value) { + //Then the bitplane buffer + vret = vaCreateBuffer( + vadisplay, + vacontext, + VABitPlaneBufferType, + pic_data->size_bitplanes, + 1, + pic_data->packed_bitplanes, + &buffer_ids[buffer_id_cnt]); + buffer_id_cnt++; + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + } + + //Now for slices + for (uint32 i = 0; i < pic_data->num_slices; i++) { + LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); + + //Do slice parameters + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferVC1), + 1, + &(pic_data->slc_data[i].slc_parms), + &buffer_ids[buffer_id_cnt]); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + + buffer_id_cnt++; + + LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (uint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); + + + //Do slice data + vret = vaCreateBuffer( + vadisplay, + vacontext, + VASliceDataBufferType, + //size + pic_data->slc_data[i].slice_size, + //num_elements + 1, + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferVC1 + pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, + &buffer_ids[buffer_id_cnt]); + + buffer_id_cnt++; + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaCreateBuffer\n"); + goto CLEAN_UP; + } + } + + + LOG_V( "Calling vaBeginPicture\n"); + + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaRenderPicture\n"); + + //Render the picture + vret = vaRenderPicture( + vadisplay, + vacontext, + buffer_ids, + buffer_id_cnt); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaRenderPicture\n"); + goto CLEAN_UP; + } + + LOG_V( "Calling vaEndPicture\n"); + + //End picture + vret = vaEndPicture(vadisplay, vacontext); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaEndPicture\n"); + goto CLEAN_UP; + } #if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - LOG_V( "Calling vaSyncSurface\n"); + LOG_V( "Calling vaSyncSurface\n"); - //Decode the picture - vret = vaSyncSurface(vadisplay, surface); + //Decode the picture + vret = vaSyncSurface(vadisplay, surface); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - goto CLEAN_UP; - } + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaSyncSurface\n"); + goto CLEAN_UP; + } #endif CLEAN_UP: - if (NULL != buffer_ids) - g_free(buffer_ids); - return ret; + if (NULL != buffer_ids) + free(buffer_ids); + return ret; } MIX_RESULT MixVideoFormat_VC1::Flush() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - uint32 pret = 0; - MixInputBufferEntry *bufentry = NULL; - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + uint32 pret = 0; + MixInputBufferEntry *bufentry = NULL; + /* Chainup parent method. + We are not chaining up to parent method for now. + */ #if 0 - if (parent_class->flush) - { - return parent_class->flush(mix, msg); - } + if (parent_class->flush) + { + return parent_class->flush(mix, msg); + } #endif - Lock(); - - //Clear the contents of inputbufqueue - while (!g_queue_is_empty(this->inputbufqueue)) { - bufentry = (MixInputBufferEntry *) g_queue_pop_head(this->inputbufqueue); - if (bufentry == NULL) - continue; - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } - - //Clear parse_in_progress flag and current timestamp - this->parse_in_progress = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (guint64)-1; - - int i = 0; - for (; i < 2; i++) { - if (this->reference_frames[i] != NULL) { - mix_videoframe_unref(this->reference_frames[i]); - this->reference_frames[i] = NULL; - } - } - - //Call parser flush - pret = vbp_flush(this->parser_handle); - if (pret != VBP_OK) - ret = MIX_RESULT_FAIL; - - Unlock(); - LOG_V( "End\n"); - return ret; + Lock(); + + //Clear the contents of inputbufqueue + while (!j_queue_is_empty(this->inputbufqueue)) { + bufentry = (MixInputBufferEntry *) j_queue_pop_head(this->inputbufqueue); + if (bufentry == NULL) + continue; + mix_buffer_unref(bufentry->buf); + free(bufentry); + } + + //Clear parse_in_progress flag and current timestamp + this->parse_in_progress = FALSE; + this->discontinuity_frame_in_progress = FALSE; + this->current_timestamp = (uint64)-1; + + int i = 0; + for (; i < 2; i++) { + if (this->reference_frames[i] != NULL) { + mix_videoframe_unref(this->reference_frames[i]); + this->reference_frames[i] = NULL; + } + } + + //Call parser flush + pret = vbp_flush(this->parser_handle); + if (pret != VBP_OK) + ret = MIX_RESULT_FAIL; + + Unlock(); + LOG_V( "End\n"); + return ret; } MIX_RESULT MixVideoFormat_VC1::EndOfStream() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_vc1 *data = NULL; - uint32 pret = 0; - LOG_V( "Begin\n"); - /* Chainup parent method. - We are not chaining up to parent method for now. - */ + MIX_RESULT ret = MIX_RESULT_SUCCESS; + vbp_data_vc1 *data = NULL; + uint32 pret = 0; + LOG_V( "Begin\n"); + /* Chainup parent method. + We are not chaining up to parent method for now. + */ #if 0 - if (parent_class->eos) - { - return parent_class->eos(mix, msg); - } + if (parent_class->eos) + { + return parent_class->eos(mix, msg); + } #endif - Lock(); - //if a frame is in progress, process the frame - if (this->parse_in_progress) { - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting last parse data\n"); - goto CLEAN_UP; - } - - //process and decode data - ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); - this->parse_in_progress = FALSE; - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error processing last frame\n"); - goto CLEAN_UP; - } - } + Lock(); + //if a frame is in progress, process the frame + if (this->parse_in_progress) { + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting last parse data\n"); + goto CLEAN_UP; + } + + //process and decode data + ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); + this->parse_in_progress = FALSE; + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error processing last frame\n"); + goto CLEAN_UP; + } + } CLEAN_UP: - Unlock(); - ret = mix_framemanager_eos(this->framemgr); - LOG_V( "End\n"); - return ret; + Unlock(); + ret = mix_framemanager_eos(this->framemgr); + LOG_V( "End\n"); + return ret; } MIX_RESULT MixVideoFormat_VC1::_handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame) { - LOG_V( "Begin\n"); - if (NULL == current_frame) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - switch (frame_type) { - case VC1_PTYPE_I: // I frame type - case VC1_PTYPE_P: // P frame type - LOG_V( "Refing reference frame %x\n", (guint) current_frame); - mix_videoframe_ref(current_frame); - - //If we have B frames, we need to keep forward and backward reference frames - if (this->haveBframes) { - if (this->reference_frames[0] == NULL) { //should only happen on first frame - this->reference_frames[0] = current_frame; - //this->reference_frames[1] = NULL; - } else if (this->reference_frames[1] == NULL) {//should only happen on second frame - this->reference_frames[1] = current_frame; - } else { - LOG_V( "Releasing reference frame %x\n", (guint) this->reference_frames[0]); - mix_videoframe_unref(this->reference_frames[0]); - this->reference_frames[0] = this->reference_frames[1]; - this->reference_frames[1] = current_frame; - } - }else {//No B frames in this content, only need to keep the forward reference frame - LOG_V( "Releasing reference frame %x\n", (guint) this->reference_frames[0]); - if (this->reference_frames[0] != NULL) - mix_videoframe_unref(this->reference_frames[0]); - this->reference_frames[0] = current_frame; - } - break; - case VC1_PTYPE_B: // B or BI frame type (should not happen) - case VC1_PTYPE_BI: - default: - LOG_E( "Wrong frame type for handling reference frames\n"); - return MIX_RESULT_FAIL; - break; - - } - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + enum _picture_type frame_type, MixVideoFrame * current_frame) { + LOG_V( "Begin\n"); + if (NULL == current_frame) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + switch (frame_type) { + case VC1_PTYPE_I: // I frame type + case VC1_PTYPE_P: // P frame type + LOG_V( "Refing reference frame %x\n", (uint) current_frame); + mix_videoframe_ref(current_frame); + + //If we have B frames, we need to keep forward and backward reference frames + if (this->haveBframes) { + if (this->reference_frames[0] == NULL) { //should only happen on first frame + this->reference_frames[0] = current_frame; + //this->reference_frames[1] = NULL; + } else if (this->reference_frames[1] == NULL) {//should only happen on second frame + this->reference_frames[1] = current_frame; + } else { + LOG_V( "Releasing reference frame %x\n", (uint) this->reference_frames[0]); + mix_videoframe_unref(this->reference_frames[0]); + this->reference_frames[0] = this->reference_frames[1]; + this->reference_frames[1] = current_frame; + } + } else {//No B frames in this content, only need to keep the forward reference frame + LOG_V( "Releasing reference frame %x\n", (uint) this->reference_frames[0]); + if (this->reference_frames[0] != NULL) + mix_videoframe_unref(this->reference_frames[0]); + this->reference_frames[0] = current_frame; + } + break; + case VC1_PTYPE_B: // B or BI frame type (should not happen) + case VC1_PTYPE_BI: + default: + LOG_E( "Wrong frame type for handling reference frames\n"); + return MIX_RESULT_FAIL; + break; + + } + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; } MIX_RESULT MixVideoFormat_VC1::_process_decode( - vbp_data_vc1 *data, guint64 timestamp, gboolean discontinuity) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - gboolean unrefVideoFrame = FALSE; - MixVideoFrame *frame = NULL; - int num_pictures = 0; - enum _picture_type frame_type = VC1_PTYPE_I; - - //TODO Partition this method into smaller methods - LOG_V( "Begin\n"); - if (NULL == data) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - if (0 == data->num_pictures || NULL == data->pic_data) { - return MIX_RESULT_INVALID_PARAM; - } - - //Check for skipped frame - //For skipped frames, we will reuse the last P or I frame surface and treat as P frame - if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) { - LOG_V( "mix_video vinfo: Frame type is SKIPPED\n"); - if (this->lastFrame == NULL) { - //we shouldn't get a skipped frame before we are able to get a real frame - LOG_E( "Error for skipped frame, prev frame is NULL\n"); - ret = MIX_RESULT_DROPFRAME; - goto CLEAN_UP; - } - - //We don't worry about this memory allocation because SKIPPED is not a common case - //Doing the allocation on the fly is a more efficient choice than trying to manage yet another pool - MixVideoFrame *skip_frame = mix_videoframe_new(); - if (skip_frame == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating new video frame object for skipped frame\n"); - goto CLEAN_UP; - } - - mix_videoframe_set_is_skipped(skip_frame, TRUE); - //mix_videoframe_ref(skip_frame); - mix_videoframe_ref(this->lastFrame); - gulong frameid = VA_INVALID_SURFACE; - mix_videoframe_get_frame_id(this->lastFrame, &frameid); - mix_videoframe_set_frame_id(skip_frame, frameid); - mix_videoframe_set_frame_type(skip_frame, (MixFrameType)VC1_PTYPE_P); - mix_videoframe_set_real_frame(skip_frame, this->lastFrame); - mix_videoframe_set_timestamp(skip_frame, timestamp); - mix_videoframe_set_discontinuity(skip_frame, FALSE); - LOG_V( "Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", - (guint)skip_frame, (guint)frameid, timestamp); - //Process reference frames - LOG_V( "Updating skipped frame forward/backward references for libva\n"); - _handle_ref_frames(VC1_PTYPE_P, skip_frame); - //Enqueue the skipped frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, skip_frame); - goto CLEAN_UP; - } - - ret = mix_surfacepool_get(this->surfacepool, &frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame from surfacepool\n"); - goto CLEAN_UP; - } - unrefVideoFrame = TRUE; - - // TO DO: handle multiple frames parsed from a sample buffer - num_pictures = (data->num_pictures > 1) ? 2 : 1; - for (int index = 0; index < num_pictures; index++) { - ret = _decode_a_picture(data, index, frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to decode a picture.\n"); - goto CLEAN_UP; - } - } - - //Set the discontinuity flag - mix_videoframe_set_discontinuity(frame, discontinuity); - - //Set the timestamp - mix_videoframe_set_timestamp(frame, timestamp); - - // setup frame structure - if (data->num_pictures > 1) { - if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) - mix_videoframe_set_frame_structure(frame, VA_TOP_FIELD); - else - mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD); - } else { - mix_videoframe_set_frame_structure(frame, VA_FRAME_PICTURE); - } - - frame_type = (_picture_type)data->pic_data[0].pic_parms->picture_fields.bits.picture_type; - - //For I or P frames - //Save this frame off for skipped frame handling - if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) { - if (this->lastFrame != NULL) { - mix_videoframe_unref(this->lastFrame); - } - this->lastFrame = frame; - mix_videoframe_ref(frame); - } - - //Update the references frames for the current frame - if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) {//If I or P frame, update the reference array - LOG_V( "Updating forward/backward references for libva\n"); - _handle_ref_frames(frame_type, frame); - } + vbp_data_vc1 *data, uint64 timestamp, bool discontinuity) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + bool unrefVideoFrame = FALSE; + MixVideoFrame *frame = NULL; + int num_pictures = 0; + enum _picture_type frame_type = VC1_PTYPE_I; + + //TODO Partition this method into smaller methods + LOG_V( "Begin\n"); + if (NULL == data) { + LOG_E( "Null pointer passed in\n"); + return MIX_RESULT_NULL_PTR; + } + + if (0 == data->num_pictures || NULL == data->pic_data) { + return MIX_RESULT_INVALID_PARAM; + } + + //Check for skipped frame + //For skipped frames, we will reuse the last P or I frame surface and treat as P frame + if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) { + LOG_V( "mix_video vinfo: Frame type is SKIPPED\n"); + if (this->lastFrame == NULL) { + //we shouldn't get a skipped frame before we are able to get a real frame + LOG_E( "Error for skipped frame, prev frame is NULL\n"); + ret = MIX_RESULT_DROPFRAME; + goto CLEAN_UP; + } + + //We don't worry about this memory allocation because SKIPPED is not a common case + //Doing the allocation on the fly is a more efficient choice than trying to manage yet another pool + MixVideoFrame *skip_frame = mix_videoframe_new(); + if (skip_frame == NULL) { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "Error allocating new video frame object for skipped frame\n"); + goto CLEAN_UP; + } + + mix_videoframe_set_is_skipped(skip_frame, TRUE); + //mix_videoframe_ref(skip_frame); + mix_videoframe_ref(this->lastFrame); + ulong frameid = VA_INVALID_SURFACE; + mix_videoframe_get_frame_id(this->lastFrame, &frameid); + mix_videoframe_set_frame_id(skip_frame, frameid); + mix_videoframe_set_frame_type(skip_frame, (MixFrameType)VC1_PTYPE_P); + mix_videoframe_set_real_frame(skip_frame, this->lastFrame); + mix_videoframe_set_timestamp(skip_frame, timestamp); + mix_videoframe_set_discontinuity(skip_frame, FALSE); + LOG_V( "Processing skipped frame %x, frame_id set to %d, ts %"UINT64_FORMAT"\n", + (uint)skip_frame, (uint)frameid, timestamp); + //Process reference frames + LOG_V( "Updating skipped frame forward/backward references for libva\n"); + _handle_ref_frames(VC1_PTYPE_P, skip_frame); + //Enqueue the skipped frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, skip_frame); + goto CLEAN_UP; + } + + ret = mix_surfacepool_get(this->surfacepool, &frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error getting frame from surfacepool\n"); + goto CLEAN_UP; + } + unrefVideoFrame = TRUE; + + // TO DO: handle multiple frames parsed from a sample buffer + num_pictures = (data->num_pictures > 1) ? 2 : 1; + for (int index = 0; index < num_pictures; index++) { + ret = _decode_a_picture(data, index, frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Failed to decode a picture.\n"); + goto CLEAN_UP; + } + } + + //Set the discontinuity flag + mix_videoframe_set_discontinuity(frame, discontinuity); + + //Set the timestamp + mix_videoframe_set_timestamp(frame, timestamp); + + // setup frame structure + if (data->num_pictures > 1) { + if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) + mix_videoframe_set_frame_structure(frame, VA_TOP_FIELD); + else + mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD); + } else { + mix_videoframe_set_frame_structure(frame, VA_FRAME_PICTURE); + } + + frame_type = (_picture_type)data->pic_data[0].pic_parms->picture_fields.bits.picture_type; + + //For I or P frames + //Save this frame off for skipped frame handling + if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) { + if (this->lastFrame != NULL) { + mix_videoframe_unref(this->lastFrame); + } + this->lastFrame = frame; + mix_videoframe_ref(frame); + } + + //Update the references frames for the current frame + if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) {//If I or P frame, update the reference array + LOG_V( "Updating forward/backward references for libva\n"); + _handle_ref_frames(frame_type, frame); + } //TODO Complete YUVDUMP code and move into base class #ifdef YUVDUMP - if (mix_video_vc1_counter < 10) - ret = _get_Img_from_surface(frame); - //g_usleep(5000000); + if (mix_video_vc1_counter < 10) + ret = _get_Img_from_surface(frame); + //g_usleep(5000000); #endif /* YUVDUMP */ - LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp); + LOG_V( "Enqueueing the frame with frame manager, timestamp %"UINT64_FORMAT"\n", timestamp); + + //Enqueue the decoded frame using frame manager + ret = mix_framemanager_enqueue(this->framemgr, frame); - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error enqueuing frame object\n"); + goto CLEAN_UP; + } + unrefVideoFrame = FALSE; - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error enqueuing frame object\n"); - goto CLEAN_UP; - } - unrefVideoFrame = FALSE; - CLEAN_UP: - _release_input_buffers(timestamp); - if (unrefVideoFrame) - mix_videoframe_unref(frame); - LOG_V( "End\n"); - return ret; + _release_input_buffers(timestamp); + if (unrefVideoFrame) + mix_videoframe_unref(frame); + LOG_V( "End\n"); + return ret; } -MIX_RESULT MixVideoFormat_VC1::_release_input_buffers(guint64 timestamp) { - MixInputBufferEntry *bufentry = NULL; - gboolean done = FALSE; - LOG_V( "Begin\n"); - - //Dequeue and release all input buffers for this frame - LOG_V( "Releasing all the MixBuffers for this frame\n"); - //While the head of the queue has timestamp == current ts - //dequeue the entry, unref the MixBuffer, and free the struct - done = FALSE; - while (!done) { - bufentry = (MixInputBufferEntry *) g_queue_peek_head(this->inputbufqueue); - if (bufentry == NULL) - break; - LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", - (guint)bufentry->buf, timestamp, bufentry->timestamp); - if (bufentry->timestamp != timestamp) { - LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", - (guint)bufentry->buf, timestamp, bufentry->timestamp); - done = TRUE; - break; - } - bufentry = (MixInputBufferEntry *) g_queue_pop_head(this->inputbufqueue); - LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf); - mix_buffer_unref(bufentry->buf); - g_free(bufentry); - } - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; +MIX_RESULT MixVideoFormat_VC1::_release_input_buffers(uint64 timestamp) { + MixInputBufferEntry *bufentry = NULL; + bool done = FALSE; + LOG_V( "Begin\n"); + + //Dequeue and release all input buffers for this frame + LOG_V( "Releasing all the MixBuffers for this frame\n"); + //While the head of the queue has timestamp == current ts + //dequeue the entry, unref the MixBuffer, and free the struct + done = FALSE; + while (!done) { + bufentry = (MixInputBufferEntry *) j_queue_peek_head(this->inputbufqueue); + if (bufentry == NULL) + break; + LOG_V( "head of queue buf %x, timestamp %"UINT64_FORMAT", buffer timestamp %"UINT64_FORMAT"\n", + (uint)bufentry->buf, timestamp, bufentry->timestamp); + if (bufentry->timestamp != timestamp) { + LOG_V( "buf %x, timestamp %"UINT64_FORMAT", buffer timestamp %"UINT64_FORMAT"\n", + (uint)bufentry->buf, timestamp, bufentry->timestamp); + done = TRUE; + break; + } + bufentry = (MixInputBufferEntry *) j_queue_pop_head(this->inputbufqueue); + LOG_V( "Unref this MixBuffers %x\n", (uint)bufentry->buf); + mix_buffer_unref(bufentry->buf); + free(bufentry); + } + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h index 2171e00..8ec0eea 100644 --- a/mix_video/src/mixvideoformat_vc1.h +++ b/mix_video/src/mixvideoformat_vc1.h @@ -24,45 +24,45 @@ class MixVideoFormat_VC1 : public MixVideoFormat { public: - MixVideoFormat_VC1(); - virtual ~MixVideoFormat_VC1(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], gint bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); + MixVideoFormat_VC1(); + virtual ~MixVideoFormat_VC1(); + + virtual MIX_RESULT Initialize( + MixVideoConfigParamsDec * config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + VADisplay va_display); + virtual MIX_RESULT Decode( + MixBuffer * bufin[], int bufincnt, + MixVideoDecodeParams * decode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); private: - MIX_RESULT _handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame); - MIX_RESULT _process_decode( - vbp_data_vc1 *data, guint64 timestamp, gboolean discontinuity); - MIX_RESULT _release_input_buffers(guint64 timestamp); - MIX_RESULT _update_seq_header( - MixVideoConfigParamsDec* config_params, MixIOVec *header); - MIX_RESULT _update_config_params(vbp_data_vc1 *data); - MIX_RESULT _decode_a_picture( - vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame); + MIX_RESULT _handle_ref_frames( + enum _picture_type frame_type, MixVideoFrame * current_frame); + MIX_RESULT _process_decode( + vbp_data_vc1 *data, uint64 timestamp, bool discontinuity); + MIX_RESULT _release_input_buffers(uint64 timestamp); + MIX_RESULT _update_seq_header( + MixVideoConfigParamsDec* config_params, MixIOVec *header); + MIX_RESULT _update_config_params(vbp_data_vc1 *data); + MIX_RESULT _decode_a_picture( + vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame); #ifdef YUVDUMP - MIX_RESULT _get_Img_from_surface (MixVideoFrame * frame); + MIX_RESULT _get_Img_from_surface (MixVideoFrame * frame); #endif public: - /*< public > */ + /*< public > */ - /*< private > */ - MixVideoFrame * reference_frames[2]; - gboolean haveBframes; - gboolean loopFilter; - MixVideoFrame * lastFrame; + /*< private > */ + MixVideoFrame * reference_frames[2]; + bool haveBframes; + bool loopFilter; + MixVideoFrame * lastFrame; }; /** diff --git a/mix_video/src/mixvideoformatenc.cpp b/mix_video/src/mixvideoformatenc.cpp index f76a39d..b8e1e30 100644 --- a/mix_video/src/mixvideoformatenc.cpp +++ b/mix_video/src/mixvideoformatenc.cpp @@ -5,880 +5,786 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include +#include #include "mixvideolog.h" #include "mixvideoformatenc.h" //#define MDEBUG -/* Default vmethods implementation */ -static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, - GString *msg); -static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay vadisplay); - -static MIX_RESULT -mix_videofmtenc_encode_default(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix); -static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix); -static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix); -static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( - MixVideoFormatEnc *mix, guint *max_size); -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type); - - -static GObjectClass *parent_class = NULL; - -static void mix_videoformatenc_finalize(GObject * obj); -G_DEFINE_TYPE (MixVideoFormatEnc, mix_videoformatenc, G_TYPE_OBJECT); - -static void mix_videoformatenc_init(MixVideoFormatEnc * self) { - /* TODO: public member initialization */ - - /* TODO: private member initialization */ - - self->objectlock = g_mutex_new(); - - self->initialized = FALSE; - self->framemgr = NULL; - self->surfacepool = NULL; - self->inputbufpool = NULL; - self->inputbufqueue = NULL; - self->va_display = NULL; - self->va_context = 0; - self->va_config = 0; - self->mime_type = NULL; - self->frame_rate_num= 0; - self->frame_rate_denom = 1; - self->picture_width = 0; - self->picture_height = 0; - - /* - * bitrate control - */ - self->initial_qp = 0; - self->min_qp = 0; - self->target_percentage = 95; - self->window_size = 500; - self->bitrate = 0; - - self->intra_period = 0; - self->share_buf_mode = FALSE; - self->ci_frame_id = NULL; - self->ci_frame_num = 0; - self->drawable = 0x0; - self->need_display = TRUE; - - self->va_rcmode = VA_RC_NONE; - self->va_format = VA_RT_FORMAT_YUV420; - self->va_entrypoint = VAEntrypointEncSlice; - self->va_profile = VAProfileH264Baseline; - self->level = 30; - - self->refresh_type = MIX_VIDEO_NONIR; - self->CIR_frame_cnt = 15; //default value - - /* - * Parameters for AIR intra refresh mode - */ - self->air_params.air_MBs = 0; - self->air_params.air_threshold = 0; - self->air_params.air_auto = 0; - - self->max_slice_size = 0; - - self->force_key_frame = FALSE; - self->new_header_required = FALSE; - self->render_mss_required = FALSE; - self->render_QP_required = FALSE; - self->render_AIR_required = FALSE; - self->render_framerate_required = FALSE; - self->render_bitrate_required = FALSE; - - //add more properties here +MixVideoFormatEnc::MixVideoFormatEnc() + :mLock() + ,initialized(FALSE) + ,framemgr(NULL) + ,surfacepool(NULL) + ,va_display(NULL) + ,va_context(0) + ,va_config(0) + ,mime_type(NULL) + ,frame_rate_num(0) + ,frame_rate_denom(1) + ,picture_width(0) + ,picture_height(0) + ,intra_period(0) + ,initial_qp(0) + ,min_qp(0) + ,bitrate(0) + ,target_percentage(95) + ,window_size(500) + ,share_buf_mode(FALSE) + ,ci_frame_id(NULL) + ,ci_frame_num(0) + ,force_key_frame(FALSE) + ,new_header_required(FALSE) + ,refresh_type(MIX_VIDEO_NONIR) + ,CIR_frame_cnt(15) + ,max_slice_size(0) + ,render_mss_required(FALSE) + ,render_QP_required (FALSE) + ,render_AIR_required(FALSE) + ,render_framerate_required(FALSE) + ,render_bitrate_required(FALSE) + ,drawable(0X0) + ,need_display(TRUE) + ,va_profile(VAProfileH264Baseline) + ,va_entrypoint(VAEntrypointEncSlice) + ,va_format(VA_RT_FORMAT_YUV420) + ,va_rcmode(VA_RC_NONE) + ,level(40) + ,buffer_mode(MIX_BUFFER_ALLOC_NORMAL) + ,buf_info(NULL) + ,inputbufpool(NULL) + ,inputbufqueue(NULL) + ,ref_count(1) { + air_params.air_MBs = 0; + air_params.air_threshold = 0; + air_params.air_auto = 0; } -static void mix_videoformatenc_class_init(MixVideoFormatEncClass * klass) { - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* parent class for later use */ - parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); - - gobject_class->finalize = mix_videoformatenc_finalize; - - /* setup vmethods with base implementation */ - klass->getcaps = mix_videofmtenc_getcaps_default; - klass->initialize = mix_videofmtenc_initialize_default; - klass->encode = mix_videofmtenc_encode_default; - klass->flush = mix_videofmtenc_flush_default; - klass->eos = mix_videofmtenc_eos_default; - klass->deinitialize = mix_videofmtenc_deinitialize_default; - klass->getmaxencodedbufsize = mix_videofmtenc_get_max_coded_buffer_size_default; - klass->set_dynamic_config = mix_videofmtenc_set_dynamic_enc_config_default; -} - -MixVideoFormatEnc * -mix_videoformatenc_new(void) { - MixVideoFormatEnc *ret = reinterpret_cast(g_object_new(MIX_TYPE_VIDEOFORMATENC, NULL)); - - return ret; -} - -void mix_videoformatenc_finalize(GObject * obj) { - /* clean up here. */ - - if (obj == NULL) { - LOG_E( "obj == NULL\n"); - return; - } - - MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj); - +MixVideoFormatEnc::~MixVideoFormatEnc() { LOG_V( "\n"); - - if(mix->objectlock) { - g_mutex_free(mix->objectlock); - mix->objectlock = NULL; + //MiVideo object calls the _deinitialize() for frame manager + if (this->framemgr) { + mix_framemanager_unref(this->framemgr); + this->framemgr = NULL; } - //MiVideo object calls the _deinitialize() for frame manager - if (mix->framemgr) - { - mix_framemanager_unref(mix->framemgr); - mix->framemgr = NULL; - } - - if (mix->mime_type) - { - if (mix->mime_type->str) - g_string_free(mix->mime_type, TRUE); - else - g_string_free(mix->mime_type, FALSE); + if (this->mime_type) { + free(this->mime_type); } - if (mix->ci_frame_id) - g_free (mix->ci_frame_id); - + if (this->ci_frame_id) + free (this->ci_frame_id); - if (mix->surfacepool) - { - mix_surfacepool_deinitialize(mix->surfacepool); - mix_surfacepool_unref(mix->surfacepool); - mix->surfacepool = NULL; + if (this->surfacepool) { + mix_surfacepool_deinitialize(this->surfacepool); + mix_surfacepool_unref(this->surfacepool); + this->surfacepool = NULL; } + if (this->buffer_mode == MIX_BUFFER_UPSTREAM_ALLOC_CI) { + MixCISharedBufferInfo * ci_tmp = NULL; + if (this->buf_info) { + ci_tmp = reinterpret_cast (this->buf_info); + if (ci_tmp->ci_frame_id) { + free (ci_tmp->ci_frame_id); + ci_tmp->ci_frame_id = NULL; + } + free (ci_tmp); + ci_tmp = NULL; + this->buf_info = NULL; + } + } +} - /* TODO: cleanup here */ - /* Chain up parent */ - if (parent_class->finalize) { - parent_class->finalize(obj); - } +MixVideoFormatEnc * +mix_videoformatenc_new(void) { + return new MixVideoFormatEnc() ; } + MixVideoFormatEnc * mix_videoformatenc_ref(MixVideoFormatEnc * mix) { - return (MixVideoFormatEnc *) g_object_ref(G_OBJECT(mix)); + if (NULL != mix) + return mix->Ref(); + else + return NULL; +} + +MixVideoFormatEnc * +mix_videoformatenc_unref(MixVideoFormatEnc * mix) { + if (NULL!=mix) + return mix->Unref(); + else + return NULL; } -/* Default vmethods implementation */ -static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix, - GString *msg) { +MIX_RESULT +MixVideoFormatEnc::GetCaps(char *msg) { LOG_V( "Begin\n"); return MIX_RESULT_SUCCESS; } -static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { +MIX_RESULT +MixVideoFormatEnc::Initialize( + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display) { LOG_V( "Begin\n"); - if (mix == NULL ||config_params_enc == NULL) { - LOG_E( - "!mix || config_params_enc == NULL\n"); + if (config_params_enc == NULL) { + LOG_E("config_params_enc == NULL\n"); return MIX_RESULT_NULL_PTR; } - MIX_RESULT ret = MIX_RESULT_SUCCESS; + //TODO check return values of getter fns for config_params - //TODO check return values of getter fns for config_params - - g_mutex_lock(mix->objectlock); + this->Lock(); + this->framemgr = frame_mgr; + mix_framemanager_ref(this->framemgr); - mix->framemgr = frame_mgr; - mix_framemanager_ref(mix->framemgr); + this->va_display = va_display; - mix->va_display = va_display; - - LOG_V( - "Start to get properities from parent params\n"); + LOG_V("Start to get properities from parent params\n"); /* get properties from param (parent) Object*/ - ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, - &(mix->bitrate)); + ret = mix_videoconfigparamsenc_get_bit_rate ( + config_params_enc, &(this->bitrate)); + if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_bps\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_bps\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, - &(mix->frame_rate_num), &(mix->frame_rate_denom)); + ret = mix_videoconfigparamsenc_get_frame_rate ( + config_params_enc, &(this->frame_rate_num), &(this->frame_rate_denom)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, - &(mix->initial_qp)); + ret = mix_videoconfigparamsenc_get_init_qp ( + config_params_enc, &(this->initial_qp)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_init_qp\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_init_qp\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - - ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, - &(mix->min_qp)); + ret = mix_videoconfigparamsenc_get_min_qp ( + config_params_enc, &(this->min_qp)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_min_qp\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_min_qp\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_target_percentage(config_params_enc, - &(mix->target_percentage)); + ret = mix_videoconfigparamsenc_get_target_percentage( + config_params_enc, &(this->target_percentage)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_target_percentage\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_target_percentage\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, - &(mix->window_size)); + ret = mix_videoconfigparamsenc_get_window_size ( + config_params_enc, &(this->window_size)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_window_size\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_window_size\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, - &(mix->intra_period)); + ret = mix_videoconfigparamsenc_get_intra_period ( + config_params_enc, &(this->intra_period)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_intra_period\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_intra_period\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, - &(mix->picture_width), &(mix->picture_height)); + ret = mix_videoconfigparamsenc_get_picture_res ( + config_params_enc, &(this->picture_width), &(this->picture_height)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_picture_res\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_picture_res\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_share_buf_mode (config_params_enc, - &(mix->share_buf_mode)); + ret = mix_videoconfigparamsenc_get_share_buf_mode ( + config_params_enc, &(this->share_buf_mode)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_ci_frame_info (config_params_enc, - &(mix->ci_frame_id), &(mix->ci_frame_num)); + ret = mix_videoconfigparamsenc_get_ci_frame_info ( + config_params_enc, &(this->ci_frame_id), &(this->ci_frame_num)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_drawable (config_params_enc, - &(mix->drawable)); + /* + * temporarily code here for compatibility with old CI shared buffer solution + */ + + if (this->share_buf_mode) { + ret = mix_videoconfigparamsenc_set_buffer_mode (config_params_enc, MIX_BUFFER_UPSTREAM_ALLOC_CI); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E("Failed to mix_videoconfigparamsenc_set_buffer_mode\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + } + + if (this->share_buf_mode && this->ci_frame_id && this->ci_frame_num) { + + MixCISharedBufferInfo * ci_tmp = NULL; + //ci_tmp = (MixCISharedBufferInfo *) g_malloc (sizeof (MixCISharedBufferInfo)); + ci_tmp = (MixCISharedBufferInfo *) new MixCISharedBufferInfo; + if (!ci_tmp) { + return MIX_RESULT_NO_MEMORY; + } + ci_tmp->ci_frame_cnt = this->ci_frame_num; + //ci_tmp->ci_frame_id = g_malloc (ci_tmp->ci_frame_cnt * sizeof (gulong)); + ci_tmp->ci_frame_id = new ulong [ci_tmp->ci_frame_cnt]; + if (!ci_tmp->ci_frame_id) { + return MIX_RESULT_NO_MEMORY; + } + + memcpy (ci_tmp->ci_frame_id, this->ci_frame_id, ci_tmp->ci_frame_cnt * sizeof (ulong)); + ret = mix_videoconfigparamsenc_set_upstream_buffer_info (config_params_enc, MIX_BUFFER_UPSTREAM_ALLOC_CI, (void*)ci_tmp); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E("Failed to mix_videoconfigparamsenc_set_upstream_buffer_info\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + + free (ci_tmp->ci_frame_id); + ci_tmp->ci_frame_id = NULL; + free (ci_tmp); + ci_tmp = NULL; + + } + + /* + * temporarily code done + */ + + ret = mix_videoconfigparamsenc_get_drawable ( + config_params_enc, &(this->drawable)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_drawable\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_drawable\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_need_display (config_params_enc, - &(mix->need_display)); + ret = mix_videoconfigparamsenc_get_need_display ( + config_params_enc, &(this->need_display)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_drawable\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_drawable\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, - (MixRateControl*)&(mix->va_rcmode)); + ret = mix_videoconfigparamsenc_get_rate_control ( + config_params_enc,(MixRateControl*)&(this->va_rcmode)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_rc_mode\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_rc_mode\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_raw_format (config_params_enc, - (MixRawTargetFormat*)&(mix->va_format)); + ret = mix_videoconfigparamsenc_get_raw_format ( + config_params_enc, &(this->raw_format)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_format\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_format\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_profile (config_params_enc, - (MixProfile *) &(mix->va_profile)); + ret = mix_videoconfigparamsenc_get_profile ( + config_params_enc, (MixProfile *) &(this->va_profile)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_profile\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_profile\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_level (config_params_enc, - &(mix->level)); + ret = mix_videoconfigparamsenc_get_level ( + config_params_enc, &(this->level)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_level\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_level\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, - &(mix->CIR_frame_cnt)); + ret = mix_videoconfigparamsenc_get_CIR_frame_cnt( + config_params_enc, &(this->CIR_frame_cnt)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, - &(mix->max_slice_size)); + ret = mix_videoconfigparamsenc_get_max_slice_size( + config_params_enc, &(this->max_slice_size)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_max_slice_size\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_max_slice_size\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - - ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, - &(mix->refresh_type)); + ret = mix_videoconfigparamsenc_get_refresh_type( + config_params_enc, &(this->refresh_type)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_refresh_type\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, - &(mix->air_params)); + ret = mix_videoconfigparamsenc_get_AIR_params( + config_params_enc, &(this->air_params)); if (ret != MIX_RESULT_SUCCESS) { //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); - g_mutex_unlock(mix->objectlock); + LOG_E("Failed to mix_videoconfigparamsenc_get_AIR_params\n"); + this->Unlock(); return MIX_RESULT_FAIL; } - LOG_V( - "======Video Encode Parent Object properities======:\n"); - - LOG_I( "mix->bitrate = %d\n", - mix->bitrate); - LOG_I( "mix->frame_rate = %d\n", - mix->frame_rate_denom / mix->frame_rate_denom); - LOG_I( "mix->initial_qp = %d\n", - mix->initial_qp); - LOG_I( "mix->min_qp = %d\n", - mix->min_qp); - LOG_I( "mix->intra_period = %d\n", - mix->intra_period); - LOG_I( "mix->picture_width = %d\n", - mix->picture_width); - LOG_I( "mix->picture_height = %d\n", - mix->picture_height); - LOG_I( "mix->share_buf_mode = %d\n", - mix->share_buf_mode); - LOG_I( "mix->ci_frame_id = 0x%08x\n", - mix->ci_frame_id); - LOG_I( "mix->ci_frame_num = %d\n", - mix->ci_frame_num); - LOG_I( "mix->drawable = 0x%08x\n", - mix->drawable); - LOG_I( "mix->need_display = %d\n", - mix->need_display); - LOG_I( "mix->va_format = %d\n", - mix->va_format); - LOG_I( "mix->va_profile = %d\n", - mix->va_profile); - LOG_I( "mix->va_rcmode = %d\n\n", - mix->va_rcmode); - LOG_I( "mix->CIR_frame_cnt = %d\n\n", - mix->CIR_frame_cnt); - LOG_I( "mix->max_slice_size = %d\n\n", - mix->max_slice_size); - - g_mutex_unlock(mix->objectlock); + ret = mix_videoconfigparamsenc_get_buffer_mode( + config_params_enc, &(this->buffer_mode)); + + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E("Failed to mix_videoconfigparamsenc_get_buffer_mode\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + + if (this->buffer_mode == MIX_BUFFER_UPSTREAM_ALLOC_CI) { + ret = mix_videoconfigparamsenc_get_upstream_buffer_info ( + config_params_enc, this->buffer_mode, &(this->buf_info)); + if (ret != MIX_RESULT_SUCCESS) { + LOG_V ("ret = %d\n", ret); + LOG_E("Failed to mix_videoconfigparamsenc_get_upstream_buffer_info\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + } + + LOG_V("======Video Encode Parent Object properities======:\n"); + LOG_I( "mix->bitrate = %d\n", this->bitrate); + LOG_I( "mix->frame_rate = %d\n", this->frame_rate_denom / this->frame_rate_denom); + LOG_I( "mix->initial_qp = %d\n", this->initial_qp); + LOG_I( "mix->min_qp = %d\n", this->min_qp); + LOG_I( "mix->intra_period = %d\n", this->intra_period); + LOG_I( "mix->picture_width = %d\n", this->picture_width); + LOG_I( "mix->picture_height = %d\n", this->picture_height); + LOG_I( "mix->share_buf_mode = %d\n", this->share_buf_mode); + LOG_I( "mix->ci_frame_id = 0x%08x\n", this->ci_frame_id); + LOG_I( "mix->ci_frame_num = %d\n", this->ci_frame_num); + LOG_I( "mix->drawable = 0x%08x\n", this->drawable); + LOG_I( "mix->need_display = %d\n", this->need_display); + LOG_I( "mix->va_format = %d\n", this->va_format); + LOG_I( "mix->va_profile = %d\n", this->va_profile); + LOG_I( "mix->va_rcmode = %d\n\n", this->va_rcmode); + LOG_I( "mix->CIR_frame_cnt = %d\n\n", this->CIR_frame_cnt); + LOG_I( "mix->max_slice_size = %d\n\n", this->max_slice_size); + + //g_mutex_unlock(mix->objectlock); + this->Unlock(); LOG_V( "end\n"); return MIX_RESULT_SUCCESS; } -static MIX_RESULT mix_videofmtenc_encode_default (MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { +MIX_RESULT +MixVideoFormatEnc:: Encode( + MixBuffer * bufin[], int bufincnt, MixIOVec * iovout[], + int iovoutcnt, MixVideoEncodeParams * encode_params) { return MIX_RESULT_SUCCESS; } -static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix) { + +MIX_RESULT +MixVideoFormatEnc::Flush() { return MIX_RESULT_SUCCESS; } -static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix) { - return MIX_RESULT_SUCCESS; +MIX_RESULT +MixVideoFormatEnc::EndOfStream() { + return MIX_RESULT_SUCCESS; } -static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix) { - - //TODO decide whether to put any of the teardown from _finalize() here +MIX_RESULT MixVideoFormatEnc::Deinitialize() { + return MIX_RESULT_SUCCESS; +} - return MIX_RESULT_SUCCESS; +MIX_RESULT MixVideoFormatEnc::GetMaxEncodedBufSize (uint *max_size) { + return MIX_RESULT_SUCCESS; } -static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default( - MixVideoFormatEnc *mix, guint *max_size) { +MIX_RESULT MixVideoFormatEnc::SetDynamicEncConfig ( + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { - return MIX_RESULT_SUCCESS; -} + MIX_RESULT ret = MIX_RESULT_SUCCESS; -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (mix == NULL ||config_params_enc == NULL) { - LOG_E( - "!mix || config_params_enc == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - - - g_mutex_lock(mix->objectlock); - - mix->new_header_required = FALSE; - - switch (params_type) { - case MIX_ENC_PARAMS_BITRATE: - { - ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(mix->bitrate)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E( - "Failed to mix_videoconfigparamsenc_get_bit_rate\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_INIT_QP: - { - ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_init_qp\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_MIN_QP: - { - ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(mix->min_qp)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_min_qp\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_WINDOW_SIZE: - { - ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, &(mix->window_size)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to MIX_ENC_PARAMS_WINDOW_SIZE\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_TARGET_PERCENTAGE: - { - ret = mix_videoconfigparamsenc_get_target_percentage (config_params_enc, &(mix->target_percentage)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to MIX_ENC_PARAMS_TARGET_PERCENTAGE\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_MTU_SLICE_SIZE: - { - ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, &(mix->max_slice_size)); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_get_max_slice_size\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_mss_required = TRUE; - - } - - case MIX_ENC_PARAMS_SLICE_NUM: - { - /* - * This type of dynamic control will be handled in H.264 override method - */ - } - break; - - case MIX_ENC_PARAMS_RC_MODE: - { - ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, (MixRateControl*)&(mix->va_rcmode)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_rate_control\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - /* - * We only can change the RC mode to re-start encoding session - */ - - } - break; - - case MIX_ENC_PARAMS_RESOLUTION: - { - - ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(mix->picture_width), &(mix->picture_height)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_picture_res\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->new_header_required = TRUE; - } - break; - case MIX_ENC_PARAMS_GOP_SIZE: - { - - ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(mix->intra_period)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_intra_period\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->new_header_required = TRUE; - - } - break; - case MIX_ENC_PARAMS_FRAME_RATE: - { - ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(mix->frame_rate_num), &(mix->frame_rate_denom)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_frame_rate\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_framerate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_FORCE_KEY_FRAME: - { - mix->new_header_required = TRUE; - - } - break; - - case MIX_ENC_PARAMS_REFRESH_TYPE: - { - ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, &(mix->refresh_type)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_refresh_type\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - } - break; - - case MIX_ENC_PARAMS_AIR: - { - ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, &(mix->air_params)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_AIR_params\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - - mix->render_AIR_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_CIR_FRAME_CNT: - { - ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(mix->CIR_frame_cnt)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E( - "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); - g_mutex_unlock(mix->objectlock); - return MIX_RESULT_FAIL; - } - } - break; - - default: - break; - } - - g_mutex_unlock(mix->objectlock); + if (config_params_enc == NULL) { + LOG_E(" config_params_enc == NULL\n"); + return MIX_RESULT_NULL_PTR; + } - return MIX_RESULT_SUCCESS; -} + this->Lock(); + this->new_header_required = FALSE; -/* mixvideoformatenc class methods implementation */ + switch (params_type) { + case MIX_ENC_PARAMS_BITRATE: + { + ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(this->bitrate)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + LOG_E("Failed to mix_videoconfigparamsenc_get_bit_rate\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + this->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_INIT_QP: + { + ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(this->initial_qp)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup -MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + LOG_E("Failed to mix_videoconfigparamsenc_get_init_qp\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } - LOG_V( "Begin\n"); + this->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_MIN_QP: + { + ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(this->min_qp)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E("Failed to mix_videoconfigparamsenc_get_min_qp\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } - if (klass->getcaps) { - return klass->getcaps(mix, msg); + this->render_bitrate_required = TRUE; } - return MIX_RESULT_NOTIMPL; -} + break; + + case MIX_ENC_PARAMS_WINDOW_SIZE: + { + ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, &(this->window_size)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup -MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); + LOG_E("Failed to MIX_ENC_PARAMS_WINDOW_SIZE\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } - /*frame_mgr and input_buf_pool is reserved for future use*/ - if (klass->initialize) { - return klass->initialize(mix, config_params_enc, frame_mgr, - input_buf_pool, surface_pool, va_display); + this->render_bitrate_required = TRUE; } + break; - return MIX_RESULT_FAIL; + case MIX_ENC_PARAMS_TARGET_PERCENTAGE: + { + ret = mix_videoconfigparamsenc_get_target_percentage (config_params_enc, &(this->target_percentage)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup -} + LOG_E("Failed to MIX_ENC_PARAMS_TARGET_PERCENTAGE\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + + this->render_bitrate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_MTU_SLICE_SIZE: + { + ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, &(this->max_slice_size)); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videoconfigparamsenc_get_max_slice_size\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } -MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { + this->render_mss_required = TRUE; - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->encode) { - return klass->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params); } - return MIX_RESULT_FAIL; -} + case MIX_ENC_PARAMS_SLICE_NUM: + { + /* + * This type of dynamic control will be handled in H.264 override method + */ + } + break; + + case MIX_ENC_PARAMS_RC_MODE: + { + ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, (MixRateControl*)&(this->va_rcmode)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E("Failed to mix_videoconfigparamsenc_get_rate_control\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + + /* + * We only can change the RC mode to re-start encoding session + */ -MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->flush) { - return klass->flush(mix); } + break; - return MIX_RESULT_FAIL; -} + case MIX_ENC_PARAMS_RESOLUTION: + { -MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->eos) { - return klass->eos(mix); + ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(this->picture_width), &(this->picture_height)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E("Failed to mix_videoconfigparamsenc_get_picture_res\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + + this->new_header_required = TRUE; } + break; + case MIX_ENC_PARAMS_GOP_SIZE: + { - return MIX_RESULT_FAIL; -} + ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(this->intra_period)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E("Failed to mix_videoconfigparamsenc_get_intra_period\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + + this->new_header_required = TRUE; -MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) { - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->deinitialize) { - return klass->deinitialize(mix); } + break; + case MIX_ENC_PARAMS_FRAME_RATE: + { + ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(this->frame_rate_num), &(this->frame_rate_denom)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup - return MIX_RESULT_FAIL; -} + LOG_E("Failed to mix_videoconfigparamsenc_get_frame_rate\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } -MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint * max_size) { + this->render_framerate_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_FORCE_KEY_FRAME: + { + this->new_header_required = TRUE; - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->encode) { - return klass->getmaxencodedbufsize(mix, max_size); } + break; - return MIX_RESULT_FAIL; -} + case MIX_ENC_PARAMS_REFRESH_TYPE: + { + ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, &(this->refresh_type)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E("Failed to mix_videoconfigparamsenc_get_refresh_type\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + } + break; + + case MIX_ENC_PARAMS_AIR: + { + ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, &(this->air_params)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup + + LOG_E("Failed to mix_videoconfigparamsenc_get_AIR_params\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + + this->render_AIR_required = TRUE; + } + break; + + case MIX_ENC_PARAMS_CIR_FRAME_CNT: + { + ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(this->CIR_frame_cnt)); + if (ret != MIX_RESULT_SUCCESS) { + //TODO cleanup -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { + LOG_E("Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); + this->Unlock(); + return MIX_RESULT_FAIL; + } + } + break; - MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix); - if (klass->set_dynamic_config) { - return klass->set_dynamic_config(mix, config_params_enc, params_type); + default: + break; } + this->Unlock(); + return MIX_RESULT_SUCCESS; +} + +/* mixvideoformatenc class methods implementation */ + +MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, char *msg) { + LOG_V( "Begin\n"); + if (NULL != mix) + return mix->GetCaps(msg); + else + return MIX_RESULT_NOTIMPL; +} + +MIX_RESULT mix_videofmtenc_initialize( + MixVideoFormatEnc *mix, + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display) { + + if (NULL != mix) + return mix->Initialize( + config_params_enc, + frame_mgr, + input_buf_pool, + surface_pool, + requested_surface_info, + va_display); + else + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_encode( + MixVideoFormatEnc *mix, MixBuffer * bufin[], + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params) { + if (NULL != mix) + return mix->Encode(bufin, bufincnt, iovout, iovoutcnt, encode_params); + else + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) { + if (NULL != mix) + return mix->Flush(); + else + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) { + if (NULL != mix) + return mix->EndOfStream(); + else + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) { + if (NULL != mix) + return mix->Deinitialize(); + else + return MIX_RESULT_FAIL; +} - return MIX_RESULT_FAIL; +MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size( + MixVideoFormatEnc *mix, uint * max_size) { + if (NULL != mix) + return mix->GetMaxEncodedBufSize(max_size); + else + return MIX_RESULT_FAIL; +} + +MIX_RESULT mix_videofmtenc_set_dynamic_enc_config ( + MixVideoFormatEnc * mix, + MixVideoConfigParamsEnc * config_params_enc, + MixEncParamsType params_type) { + if (NULL != mix) + return mix->SetDynamicEncConfig(config_params_enc, params_type); + else + return MIX_RESULT_FAIL; } diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h index 3b208b4..b08bf70 100644 --- a/mix_video/src/mixvideoformatenc.h +++ b/mix_video/src/mixvideoformatenc.h @@ -10,7 +10,6 @@ #define __MIX_VIDEOFORMATENC_H__ #include -#include #include "mixvideodef.h" #include #include "mixvideoconfigparamsenc.h" @@ -21,139 +20,142 @@ #include "mixbufferpool.h" #include "mixvideoformatqueue.h" #include "mixvideoencodeparams.h" +#include +class MixVideoFormatEnc; -G_BEGIN_DECLS -/* - * Type macros. - */ -#define MIX_TYPE_VIDEOFORMATENC (mix_videoformatenc_get_type ()) -#define MIX_VIDEOFORMATENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEnc)) -#define MIX_IS_VIDEOFORMATENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC)) -#define MIX_VIDEOFORMATENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEncClass)) -#define MIX_IS_VIDEOFORMATENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC)) -#define MIX_VIDEOFORMATENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEncClass)) -typedef struct _MixVideoFormatEnc MixVideoFormatEnc; -typedef struct _MixVideoFormatEncClass MixVideoFormatEncClass; +#define MIX_VIDEOFORMATENC(obj) (reinterpret_cast(obj)) +#define MIX_IS_VIDEOFORMATENC(obj) (NULL != MIX_VIDEOFORMATENC(obj)) /* vmethods typedef */ /* TODO: change return type and method parameters */ -typedef MIX_RESULT (*MixVideoFmtEncGetCapsFunc)(MixVideoFormatEnc *mix, GString *msg); +typedef MIX_RESULT (*MixVideoFmtEncGetCapsFunc)(MixVideoFormatEnc *mix, char *msg); typedef MIX_RESULT (*MixVideoFmtEncInitializeFunc)(MixVideoFormatEnc *mix, MixVideoConfigParamsEnc* config_params_enc, MixFrameManager * frame_mgr, MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, VADisplay va_display); typedef MIX_RESULT (*MixVideoFmtEncodeFunc)(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, + int bufincnt, MixIOVec * iovout[], int iovoutcnt, MixVideoEncodeParams * encode_params); typedef MIX_RESULT (*MixVideoFmtEncFlushFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix); typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix); -typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, guint *max_size); +typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, uint *max_size); typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params, - MixEncParamsType params_type); + MixVideoConfigParamsEnc * config_params, + MixEncParamsType params_type); -struct _MixVideoFormatEnc { - /*< public > */ - GObject parent; +class MixVideoFormatEnc { +public: + MixVideoFormatEnc(); + virtual ~MixVideoFormatEnc(); - /*< public > */ - - /*< private > */ - GMutex *objectlock; - gboolean initialized; + virtual MIX_RESULT GetCaps(char *msg); + virtual MIX_RESULT Initialize( + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display); + virtual MIX_RESULT Encode( MixBuffer * bufin[], + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT EndOfStream(); + virtual MIX_RESULT Deinitialize(); + virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); + virtual MIX_RESULT SetDynamicEncConfig ( + MixVideoConfigParamsEnc * config_params, MixEncParamsType params_type); + + void Lock() { + mLock.lock(); + } + void Unlock() { + mLock.unlock(); + } + + MixVideoFormatEnc* Ref() { + ++ref_count; + return this; + } + + MixVideoFormatEnc* Unref() { + if (0 == (--ref_count)) { + delete this; + return NULL; + } else { + return this; + } + } + +public: + + MixVideoMutex mLock; + bool initialized; MixFrameManager *framemgr; MixSurfacePool *surfacepool; VADisplay va_display; VAContextID va_context; VAConfigID va_config; - GString *mime_type; - - guint frame_rate_num; - guint frame_rate_denom; - guint picture_width; - guint picture_height; - - guint intra_period; - + char *mime_type; + MixRawTargetFormat raw_format; + uint frame_rate_num; + uint frame_rate_denom; + uint picture_width; + uint picture_height; + uint intra_period; /* * Following is for bitrate control */ - guint initial_qp; - guint min_qp; - guint bitrate; - guint target_percentage; - guint window_size; + uint initial_qp; + uint min_qp; + uint bitrate; + uint target_percentage; + uint window_size; - gboolean share_buf_mode; - gulong * ci_frame_id; - guint ci_frame_num; + bool share_buf_mode; + ulong * ci_frame_id; + uint ci_frame_num; - gboolean force_key_frame; - gboolean new_header_required; + bool force_key_frame; + bool new_header_required; MixVideoIntraRefreshType refresh_type; - guint CIR_frame_cnt; + uint CIR_frame_cnt; MixAIRParams air_params; - guint max_slice_size; + uint max_slice_size; + bool render_mss_required; + bool render_QP_required; + bool render_AIR_required; + bool render_framerate_required; + bool render_bitrate_required; - gboolean render_mss_required; - gboolean render_QP_required; - gboolean render_AIR_required; - gboolean render_framerate_required; - gboolean render_bitrate_required; - - gulong drawable; - gboolean need_display; + ulong drawable; + bool need_display; VAProfile va_profile; VAEntrypoint va_entrypoint; - guint va_format; - guint va_rcmode; - guint8 level; + uint va_format; + uint va_rcmode; + uint8 level; + MixBufferAllocationMode buffer_mode; + void * buf_info; MixBufferPool *inputbufpool; - GQueue *inputbufqueue; -}; - -/** - * MixVideoFormatEncClass: - * - * MI-X Video object class - */ -struct _MixVideoFormatEncClass { - /*< public > */ - GObjectClass parent_class; - - /* class members */ - - /*< public > */ - MixVideoFmtEncGetCapsFunc getcaps; - MixVideoFmtEncInitializeFunc initialize; - MixVideoFmtEncodeFunc encode; - MixVideoFmtEncFlushFunc flush; - MixVideoFmtEncEndOfStreamFunc eos; - MixVideoFmtEncDeinitializeFunc deinitialize; - MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize; - MixVideoFmtEncSetDynamicEncConfigFunc set_dynamic_config; + JQueue *inputbufqueue; + uint ref_count ; }; -/** - * mix_videoformatenc_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformatenc_get_type(void); /** * mix_videoformatenc_new: @@ -178,23 +180,26 @@ MixVideoFormatEnc *mix_videoformatenc_ref(MixVideoFormatEnc * mix); * * Decrement reference count of the object. */ -#define mix_videoformatenc_unref(obj) g_object_unref (G_OBJECT(obj)) +MixVideoFormatEnc *mix_videoformatenc_unref(MixVideoFormatEnc * mix); + /* Class Methods */ /* TODO: change method parameter list */ -MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg); +MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, char *msg); MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * enc_config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - -MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); + MixVideoConfigParamsEnc * enc_config_params, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display); + +MIX_RESULT mix_videofmtenc_encode( + MixVideoFormatEnc *mix, MixBuffer * bufin[], + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix); @@ -203,11 +208,11 @@ MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix); MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, - guint *max_size); + uint *max_size); MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params, - MixEncParamsType params_type); + MixVideoConfigParamsEnc * config_params, + MixEncParamsType params_type); + -G_END_DECLS #endif /* __MIX_VIDEOFORMATENC_H__ */ diff --git a/mix_video/src/mixvideoformatenc_h263.cpp b/mix_video/src/mixvideoformatenc_h263.cpp index f7b3626..78aec17 100644 --- a/mix_video/src/mixvideoformatenc_h263.cpp +++ b/mix_video/src/mixvideoformatenc_h263.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include #include @@ -21,1341 +21,1435 @@ Window win = 0; #endif /* SHOW_SRC */ - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_h263_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_H263, mix_videoformatenc_h263, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_h263_init(MixVideoFormatEnc_H263 * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - /* member initialization */ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; -#ifdef ANDROID - self->last_mix_buffer = NULL; -#endif - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - self->coded_buf_index = 0; - - parent->initialized = FALSE; - +MixVideoFormatEnc_H263::MixVideoFormatEnc_H263() + :encoded_frames(0) + ,pic_skipped(FALSE) + ,is_intra(TRUE) + ,cur_frame(NULL) + ,ref_frame(NULL) + ,rec_frame(NULL) + ,lookup_frame(NULL) + ,last_mix_buffer(NULL) + ,shared_surfaces(NULL) + ,surfaces(NULL) + ,surface_num(0) + ,shared_surfaces_cnt(0) + ,precreated_surfaces_cnt(0) + ,usrptr(NULL) + ,coded_buf_index(0) + ,coded_buf_size(0) { } -static void mix_videoformatenc_h263_class_init( - MixVideoFormatEnc_H263Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_h263_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_h263_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_h263_initialize; - video_formatenc_class->encode = mix_videofmtenc_h263_encode; - video_formatenc_class->flush = mix_videofmtenc_h263_flush; - video_formatenc_class->eos = mix_videofmtenc_h263_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_h263_deinitialize; - video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h263_get_max_encoded_buf_size; +MixVideoFormatEnc_H263::~MixVideoFormatEnc_H263() { } + MixVideoFormatEnc_H263 * mix_videoformatenc_h263_new(void) { - MixVideoFormatEnc_H263 *ret = reinterpret_cast( - g_object_new(MIX_TYPE_VIDEOFORMATENC_H263, NULL)); - - return ret; + return new MixVideoFormatEnc_H263(); } -void mix_videoformatenc_h263_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_H263 *mix = MIX_VIDEOFORMATENC_H263(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } -} MixVideoFormatEnc_H263 * mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix) { - return (MixVideoFormatEnc_H263 *) g_object_ref(G_OBJECT(mix)); + if (NULL != mix) + mix->Ref(); + return mix; } -/*H263 vmethods implementation */ -MIX_RESULT mix_videofmtenc_h263_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_h263_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; +MixVideoFormatEnc_H263 * +mix_videoformatenc_h263_unref(MixVideoFormatEnc_H263 * mix) { + if (NULL != mix) + return MIX_VIDEOFORMATENC_H263(mix->Unref()); + else + return mix; } -#define CLEAN_UP {\ - if(ret == MIX_RESULT_SUCCESS) {\ - parent->initialized = TRUE;\ - }\ - /*free profiles and entrypoints*/\ - if(va_profiles)\ - g_free(va_profiles);\ - if(va_entrypoints)\ - g_free(va_entrypoints);\ - if(surfaces)\ - g_free(surfaces);\ - g_mutex_unlock(parent->objectlock);\ - LOG_V( "end\n"); \ - return ret;} - -MIX_RESULT mix_videofmtenc_h263_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - +MIX_RESULT MixVideoFormatEnc_H263::Initialize( + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; MixVideoConfigParamsEncH263 * config_params_enc_h263; - + VAStatus va_status = VA_STATUS_SUCCESS; VASurfaceID * surfaces = NULL; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; + + int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + int va_num_profiles, va_num_entrypoints; VAProfile *va_profiles = NULL; VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - + VAConfigAttrib va_attrib[2]; + uint index; + uint max_size = 0; + + /* + * Different MIX buffer mode will have different surface handling approach + */ + + + uint normal_surfaces_cnt = 2; + + /* + * shared_surfaces_cnt is for upstream buffer allocation case + */ + uint shared_surfaces_cnt = 0; + + /* + * precreated_surfaces_cnt is for self buffer allocation case + */ + uint precreated_surfaces_cnt = 0; + + MixCISharedBufferInfo * ci_info = NULL; + /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + + if ( config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL) { + LOG_E( + " config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + + /* + * Check more for requested_surface_info + */ + if (requested_surface_info->surface_cnt != 0 && + (requested_surface_info->surface_allocated == NULL || requested_surface_info->usrptr == NULL)) { + LOG_E( + "surface_cnt != 0 && (surface_allocated == NULL || usrptr == NULL)\n"); return MIX_RESULT_NULL_PTR; } + if (requested_surface_info->surface_cnt > MAX_ENC_SURFACE_COUNT) { + LOG_E ("Something wrong, we have to quite now!\n"); + return MIX_RESULT_FAIL; + } + LOG_V( "begin\n"); /* Chainup parent method. */ - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - + ret = MixVideoFormatEnc::Initialize(config_params_enc, frame_mgr, input_buf_pool, surface_pool, requested_surface_info, va_display); + + if (ret != MIX_RESULT_SUCCESS) { return ret; } - if (!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (config_params_enc)) { - config_params_enc_h263 = - MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h263_initialize: no h263 config params found\n"); - return MIX_RESULT_FAIL; - } - - g_mutex_lock(parent->objectlock); - LOG_V( - "Start to get properities from H263 params\n"); + if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (config_params_enc)) { + config_params_enc_h263 = + MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); + } else { + LOG_V( + "mix_videofmtenc_h263_initialize: no h263 config params found\n"); + return MIX_RESULT_FAIL; + } - /* get properties from H263 params Object, which is special to H263 format*/ +// g_mutex_lock(parent->objectlock); + Lock(); - ret = mix_videoconfigparamsenc_h263_get_slice_num (config_params_enc_h263, - &self->slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h263_get_slice_num\n"); - CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_h263_get_dlk (config_params_enc_h263, - &(self->disable_deblocking_filter_idc)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h263_get_dlk\n"); - CLEAN_UP; - } - - - LOG_V( - "======H263 Encode Object properities======:\n"); - - LOG_I( "self->slice_num = %d\n", - self->slice_num); - LOG_I( "self->disabled_deblocking_filter_idc = %d\n\n", - self->disable_deblocking_filter_idc); - - LOG_V( - "Get properities from params done\n"); - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); + LOG_V( + "Start to get properities from H263 params\n"); -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); - va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } + /* get properties from H263 params Object, which is special to H263 format*/ - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) - break; - } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + ret = mix_videoconfigparamsenc_h263_get_slice_num (config_params_enc_h263, + &this->slice_num); - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h263_get_slice_num\n"); + goto cleanup; + } - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + ret = mix_videoconfigparamsenc_h263_get_dlk (config_params_enc_h263, + &(this->disable_deblocking_filter_idc)); - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = reinterpret_cast( - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; - } + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h263_get_dlk\n"); + goto cleanup; + } - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - parent->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = parent->surfacepool; - //which is useful to check before encode + LOG_V( + "======H263 Encode Object properities======:\n"); - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + LOG_I( "this->slice_num = %d\n", + this->slice_num); + LOG_I( "this->disabled_deblocking_filter_idc = %d\n\n", + this->disable_deblocking_filter_idc); - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: + LOG_V( + "Get properities from params done\n"); - LOG_E("Error init failure\n"); + this->va_display = va_display; - ret = MIX_RESULT_ALREADY_INIT; - CLEAN_UP; - default: - break; - } + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", + (uint)va_display); - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } +#if 0 + /* query the vender information, can ignore*/ + va_vendor = vaQueryVendorString (va_display); + LOG_I( "Vendor = %s\n", + va_vendor); +#endif - guint max_size = 0; - ret = mix_videofmtenc_h263_get_max_encoded_buf_size (parent, &max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_h263_get_max_encoded_buf_size\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf[0]); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(va_display); + LOG_I( "va_max_num_profiles = %d\n", + va_max_num_profiles); + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); + LOG_I( "va_max_num_entrypoints = %d\n", + va_max_num_entrypoints); - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - - CLEAN_UP; -} -#undef CLEAN_UP - -#define CLEAN_UP {\ - LOG_V( "UnLocking\n"); \ - g_mutex_unlock(parent->objectlock);\ - LOG_V( "end\n"); \ - return ret;} - -MIX_RESULT mix_videofmtenc_h263_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); + LOG_I( "va_max_num_attribs = %d\n", + va_max_num_attribs); - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); +// va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_profiles = new VAProfile[va_max_num_profiles]; +// va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; + + if (va_profiles == NULL || va_entrypoints ==NULL) + { + LOG_E( + "!va_profiles || !va_entrypoints\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; } -#endif - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: also we could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_h263_process_encode\n"); + LOG_I( + "va_profiles = 0x%08x\n", (uint)va_profiles); - ret = mix_videofmtenc_h263_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) + LOG_V( "vaQueryConfigProfiles\n"); + + + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed mix_videofmtenc_h263_process_encode\n"); - CLEAN_UP; + LOG_E( + "Failed to call vaQueryConfigProfiles\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - CLEAN_UP; -} -#undef CLEAN_UP + LOG_V( "vaQueryConfigProfiles Done\n"); -MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); + + + /*check whether profile is supported*/ + for (index= 0; index < va_num_profiles; index++) { + if (this->va_profile == va_profiles[index]) + break; } -#endif - - if(!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); - - g_mutex_lock(mix->objectlock); - - /*unref the current source surface*/ - if (self->cur_frame != NULL) + if (index == va_num_profiles) { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; + LOG_E( "Profile not supported\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) + + LOG_V( "vaQueryConfigEntrypoints\n"); + + + /*Check entry point*/ + va_status = vaQueryConfigEntrypoints(va_display, + this->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; + LOG_E( + "Failed to call vaQueryConfigEntrypoints\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointEncSlice) { + break; + } + } + + if (index == va_num_entrypoints) { + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } -#ifdef ANDROID - if(self->last_mix_buffer) { - mix_buffer_unref(self->last_mix_buffer); - self->last_mix_buffer = NULL; - } -#endif - /*reset the properities*/ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} -MIX_RESULT mix_videofmtenc_h263_eos(MixVideoFormatEnc *mix) { + va_attrib[0].type = VAConfigAttribRTFormat; + va_attrib[1].type = VAConfigAttribRateControl; - LOG_V( "\n"); + LOG_V( "vaGetConfigAttributes\n"); - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } + va_status = vaGetConfigAttributes(va_display, this->va_profile, + this->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } - if (parent_class->eos) { - return parent_class->eos(mix); + if ((va_attrib[0].value & this->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - return MIX_RESULT_SUCCESS; -} -#define CLEAN_UP {\ - parent->initialized = TRUE;\ - g_mutex_unlock(parent->objectlock); \ - LOG_V( "end\n"); \ - return ret;} -MIX_RESULT mix_videofmtenc_h263_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); + if ((va_attrib[1].value & this->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } + va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = this->va_rcmode; - if(!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; + LOG_V( "======VA Configuration======\n"); - if(parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } + LOG_I( "profile = %d\n", + this->va_profile); + LOG_I( "va_entrypoint = %d\n", + this->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", + va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", + va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", + va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", + va_attrib[1].value); - if(ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263(mix); - - LOG_V( "Release frames\n"); + LOG_V( "vaCreateConfig\n"); - g_mutex_lock(parent->objectlock); + va_status = vaCreateConfig(va_display, this->va_profile, + this->va_entrypoint, + &va_attrib[0], 2, &(this->va_config)); -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) + if (va_status != VA_STATUS_SUCCESS) { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; + LOG_E( "Failed vaCreateConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; + + if (this->va_rcmode == VA_RC_VCM) { + + /* + * Following three features are only enabled in VCM mode + */ + this->render_mss_required = TRUE; + this->render_AIR_required = TRUE; + this->render_bitrate_required = TRUE; } - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } + /* + * For upstream allocates buffer, it is mandatory to set buffer mode + * and for other stuff, it is optional + */ + + + + + LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); + + if (requested_surface_info->surface_cnt == 0) { + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + ci_info = (MixCISharedBufferInfo *) (this->buf_info); + shared_surfaces_cnt = ci_info->ci_frame_cnt; + normal_surfaces_cnt = 2; + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /* + * To be develped + */ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /* + * To be develped + */ + break; + default: + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + break; + } + } + else if (requested_surface_info->surface_cnt == 1) { + /* + * Un-normal case, TBD + */ + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + } + else { + this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; + precreated_surfaces_cnt = requested_surface_info->surface_cnt; + this->alloc_surface_cnt = requested_surface_info->surface_cnt; + +// self->usrptr = g_malloc (requested_surface_info->surface_cnt * sizeof (uint8 *)); + this->usrptr = new uint8 *[requested_surface_info->surface_cnt] ; + if (this->usrptr == NULL) { + LOG_E("Failed allocate memory\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } - LOG_V( "Release surfaces\n"); + memcpy (this->usrptr, requested_surface_info->usrptr, requested_surface_info->surface_cnt * sizeof (uint8 *)); - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; } - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + LOG_I ("buffer_mode = %d\n", this->buffer_mode); - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + this->shared_surfaces_cnt = shared_surfaces_cnt; + this->precreated_surfaces_cnt = precreated_surfaces_cnt; + +#if 0 + + int ii = 0; + for (ii=0; ii < alloc_surface_cnt; ii++) { + + g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); + g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); - CLEAN_UP; -} -#undef CLEAN_UP -MIX_RESULT mix_videofmtenc_h263_send_seq_params (MixVideoFormatEnc_H263 *mix) -{ - - VAStatus va_status; - VAEncSequenceParameterBufferH263 h263_seq_param; - VABufferID seq_para_buf_id; - - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix = NULL\n"); - return MIX_RESULT_NULL_PTR; } - - LOG_V( "Begin\n\n"); - - if (!MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set up the sequence params for HW*/ - h263_seq_param.bits_per_second= parent->bitrate; - h263_seq_param.frame_rate = 30; //hard-coded, driver need; - //(unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - h263_seq_param.initial_qp = parent->initial_qp; - h263_seq_param.min_qp = parent->min_qp; - h263_seq_param.intra_period = parent->intra_period; - - //h263_seq_param.fixed_vop_rate = 30; + /*TODO: compute the surface number*/ + int numSurfaces; - LOG_V( - "===h263 sequence params===\n"); - - LOG_I( "bitrate = %d\n", - h263_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - h263_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - h263_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - h263_seq_param.min_qp); - LOG_I( "intra_period = %d\n\n", - h263_seq_param.intra_period); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(h263_seq_param), - 1, &h263_seq_param, - &seq_para_buf_id); - if (va_status != VA_STATUS_SUCCESS) + if (parent->share_buf_mode) { + numSurfaces = 2; + } + else { + numSurfaces = 2; + parent->ci_frame_num = 0; + } + + //self->surface_num = numSurfaces + parent->ci_frame_num; +#endif + + this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; + + //surfaces = g_malloc(sizeof(VASurfaceID)*normal_surfaces_cnt); + surfaces = new VASurfaceID[normal_surfaces_cnt] ; + if (surfaces == NULL) { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; + LOG_E( + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &seq_para_buf_id, 1); - if (va_status != VA_STATUS_SUCCESS) + + //self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + this->surfaces = new VASurfaceID[this->surface_num] ; + if (this->surfaces == NULL) { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; + LOG_E( + "Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; - -} -MIX_RESULT mix_videofmtenc_h263_send_picture_parameter (MixVideoFormatEnc_H263 *mix) -{ - VAStatus va_status; - VAEncPictureParameterBufferH263 h263_pic_param; - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix = NULL\n"); - return MIX_RESULT_NULL_PTR; + LOG_V( "vaCreateSurfaces\n"); + + va_status = vaCreateSurfaces(va_display, this->picture_width, + this->picture_height, this->va_format, + normal_surfaces_cnt, surfaces); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaCreateSurfaces\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - LOG_V( "Begin\n\n"); - -#if 0 //not needed currently - MixVideoConfigParamsEncH263 * params_h263 - = MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); -#endif - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - /*set picture params for HW*/ - h263_pic_param.reference_picture = mix->ref_frame->frame_id; - h263_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - h263_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - h263_pic_param.picture_width = parent->picture_width; - h263_pic_param.picture_height = parent->picture_height; - h263_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; - - - - LOG_V( - "======h263 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h263_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - h263_pic_param.reconstructed_picture); - LOG_I( "coded_buf = 0x%08x\n", - h263_pic_param.coded_buf); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "picture_width = %d\n", - h263_pic_param.picture_width); - LOG_I( "picture_height = %d\n", - h263_pic_param.picture_height); - LOG_I( "picture_type = %d\n\n", - h263_pic_param.picture_type); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(h263_pic_param), - 1,&h263_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) + if (shared_surfaces_cnt != 0) { +// self->shared_surfaces =g_malloc(sizeof(VASurfaceID) * shared_surfaces_cnt); + this->shared_surfaces =new VASurfaceID[shared_surfaces_cnt] ; + if (this->shared_surfaces == NULL) { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; + LOG_E( + "Failed allocate shared surface\n"); + + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + for (index = 0; index < this->shared_surfaces_cnt; index++) { + + va_status = vaCreateSurfaceFromCIFrame(va_display, + (ulong) (ci_info->ci_frame_id[index]), + &this->shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + this->surfaces[index] = this->shared_surfaces[index]; + } + } + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /*To be develped*/ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /*To be develped*/ + break; + case MIX_BUFFER_ALLOC_NORMAL: + break; + case MIX_BUFFER_SELF_ALLOC_SURFACE: + { + for (index = 0; index < requested_surface_info->surface_cnt; index ++) { + this->surfaces[index] = requested_surface_info->surface_allocated[index]; + } + } + break; + default: + break; + } + + for (index = 0; index < normal_surfaces_cnt; index++) { + this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", this->surface_num); + + +#if 0 //current put this in gst + images = g_malloc(sizeof(VAImage)*numSurfaces); + if (images == NULL) + { + g_mutex_unlock(parent->objectlock); + return MIX_RESULT_FAIL; + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. + //The image buffer can then be mapped/unmapped for CPU access + va_status = vaDeriveImage(va_display, surfaces[index], + &images[index]); + } +#endif + + LOG_V( "mix_surfacepool_new\n"); + + this->surfacepool = mix_surfacepool_new(); + if (surface_pool) + *surface_pool = this->surfacepool; + //which is useful to check before encode + + if (this->surfacepool == NULL) + { + LOG_E( + "Failed to mix_surfacepool_new\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize(this->surfacepool, + this->surfaces, this->surface_num, va_display); + + switch (ret) + { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + + LOG_E("Error init failure\n"); + + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + default: + break; + } + + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext(va_display, this->va_config, + this->picture_width, this->picture_height, + VA_PROGRESSIVE, this->surfaces, this->surface_num, + &(this->va_context)); + + LOG_I( + "Created libva context width %d, height %d\n", + this->picture_width, this->picture_height); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", + (uint)va_status); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + ret = GetMaxEncodedBufSize(&max_size); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videofmtenc_h263_get_max_encoded_buf_size\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, this->va_context, + VAEncCodedBufferType, + this->coded_buf_size, // + 1, NULL, + &this->coded_buf[0]); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + /*Create coded buffer for output*/ + va_status = vaCreateBuffer (va_display, this->va_context, + VAEncCodedBufferType, + this->coded_buf_size, // + 1, NULL, + &(this->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +#ifdef SHOW_SRC + Display * display = XOpenDisplay (NULL); + + LOG_I( "display = 0x%08x\n", + (uint) display); + win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, + parent->picture_width, parent->picture_height, 0, 0, + WhitePixel(display, 0)); + XMapWindow(display, win); + XSelectInput(display, win, KeyPressMask | StructureNotifyMask); + + XSync(display, False); + LOG_I( "va_display = 0x%08x\n", + (uint) va_display); + +#endif /* SHOW_SRC */ + +cleanup: + + if (ret == MIX_RESULT_SUCCESS) { + this->initialized = TRUE; + } + + /*free profiles and entrypoints*/ + if (va_profiles) +// g_free(va_profiles); + delete[]va_profiles; + if (va_entrypoints) +// g_free(va_entrypoints); + delete[]va_entrypoints; + if (surfaces) +// g_free(surfaces); + delete[]surfaces; + +// g_mutex_unlock(parent->objectlock); + Unlock(); + + LOG_V( "end\n"); + + return ret; + +} + +MIX_RESULT MixVideoFormatEnc_H263::Encode( + MixBuffer * bufin[], int bufincnt, + MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + /*currenly only support one input and output buffer*/ + + if (bufincnt != 1 || iovoutcnt != 1) { + LOG_E( + "buffer count not equel to 1\n"); + LOG_E( + "maybe some exception occurs\n"); + } + + if (bufin[0] == NULL || iovout[0] == NULL) { + LOG_E( + "!bufin[0] ||!iovout[0]\n"); + return MIX_RESULT_NULL_PTR; + } + +#if 0 + if (parent_class->encode) { + return parent_class->encode(mix, bufin, bufincnt, iovout, + iovoutcnt, encode_params); + } +#endif + + + LOG_V( "Locking\n"); +// g_mutex_lock(parent->objectlock); + Lock(); + + + //TODO: also we could move some encode Preparation work to here + + LOG_V( + "mix_videofmtenc_h263_process_encode\n"); + + ret = _process_encode(bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed mix_videofmtenc_h263_process_encode\n"); + goto cleanup; + } + +cleanup: + + LOG_V( "UnLocking\n"); + +// g_mutex_unlock(parent->objectlock); + Unlock(); + + LOG_V( "end\n"); + + return ret; + +} + +MIX_RESULT MixVideoFormatEnc_H263::Flush() { + //MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + /*not chain to parent flush func*/ +#if 0 + if (parent_class->flush) { + return parent_class->flush(mix, msg); + } +#endif + +// g_mutex_lock(mix->objectlock); + Lock(); + + /*unref the current source surface*/ + if (this->cur_frame != NULL) + { + mix_videoframe_unref (this->cur_frame); + this->cur_frame = NULL; + } + + /*unref the reconstructed surface*/ + if (this->rec_frame != NULL) + { + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (this->ref_frame != NULL) + { + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; + } + + if (this->last_mix_buffer) { + mix_buffer_unref(this->last_mix_buffer); + this->last_mix_buffer = NULL; + } + + /*reset the properities*/ + this->encoded_frames = 0; + this->pic_skipped = FALSE; + this->is_intra = TRUE; + +// g_mutex_unlock(mix->objectlock); + Unlock(); + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoFormatEnc_H263::Deinitialize() { + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + + LOG_V( "Begin\n"); + + MixVideoFormatEnc::Deinitialize(); + + if (ret != MIX_RESULT_SUCCESS) + { + return ret; + } + + + LOG_V( "Release frames\n"); + +// g_mutex_lock(parent->objectlock); + Lock(); + +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) + { + mix_videoframe_unref (self->cur_frame); + self->cur_frame = NULL; + } +#endif + + /*unref the reconstructed surface*/ + if (this->rec_frame != NULL) + { + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; + } + + /*unref the reference surface*/ + if (this->ref_frame != NULL) + { + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; + } + + if (this->lookup_frame != NULL) + { + mix_videoframe_unref (this->lookup_frame); + this->lookup_frame = NULL; + } + + if (this->last_mix_buffer) { + mix_buffer_unref(this->last_mix_buffer); + this->last_mix_buffer = NULL; + } + + LOG_V( "Release surfaces\n"); + + if (this->shared_surfaces) + { +// g_free (self->shared_surfaces); + delete[]this->shared_surfaces; + this->shared_surfaces = NULL; + } + + if (this->surfaces) + { +// g_free (self->surfaces); + delete[]this->surfaces; + this->surfaces = NULL; + } + + if (this->usrptr) { +// g_free (self->usrptr); + delete[]this->usrptr; + this->usrptr = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (this->va_display, this->va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyContext\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (this->va_display, this->va_config); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaDestroyConfig\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +cleanup: + + this->initialized = TRUE; + +// g_mutex_unlock(parent->objectlock); + Unlock(); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT MixVideoFormatEnc_H263::GetMaxEncodedBufSize (uint *max_size) { + + + LOG_V( "Begin\n"); + + if (MIX_IS_VIDEOFORMATENC_H263(this)) { + + if (this->coded_buf_size > 0) { + *max_size = this->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (this->va_rcmode_h263 == VA_RC_NONE) { + this->coded_buf_size = + (this->picture_width* this->picture_height * 830) / (16 * 16); + // set to value according to QP + } + else { + this->coded_buf_size = this->bitrate/ 4; + } + + this->coded_buf_size = + max (this->coded_buf_size , + (this->picture_width* this->picture_height * 830) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + this->coded_buf_size = + max(this->coded_buf_size, + (this->picture_width * this->picture_height * 1.5 * 8)); + this->coded_buf_size = (this->coded_buf_size + 15) &(~15); + } + else + { + LOG_E( + "not H263 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; + } + + *max_size = this->coded_buf_size; + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT MixVideoFormatEnc_H263::_process_encode (MixBuffer * bufin, + MixIOVec * iovout) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VADisplay va_display = NULL; + VAContextID va_context; + ulong surface = 0; + uint16 width, height; + + MixVideoFrame * tmp_frame; + uint8 *buf; + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + uint total_size = 0; + uint size = 0; + + if ((bufin == NULL) || (iovout == NULL)) { + LOG_E( + "bufin == NULL || iovout == NULL\n"); + return MIX_RESULT_NULL_PTR; + } + + LOG_V( "Begin\n"); + + + va_display = this->va_display; + va_context = this->va_context; + width = this->picture_width; + height = this->picture_height; + + + LOG_I( "encoded_frames = %d\n", + this->encoded_frames); + LOG_I( "is_intra = %d\n", + this->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", + (uint) this->ci_frame_id); + + /* determine the picture type*/ + if ((this->encoded_frames % this->intra_period) == 0) { + this->is_intra = TRUE; + } else { + this->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", + this->is_intra); + + LOG_V( + "Get Surface from the pool\n"); + + /*current we use one surface for source data, + * one for reference and one for reconstructed*/ + /*TODO, could be refine here*/ + + + + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + + + //MixVideoFrame * frame = mix_videoframe_new(); + if (this->lookup_frame == NULL) + { + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) + { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + if (this->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get reference surface from pool failed\n"); + goto cleanup; + } } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) + + if (this->rec_frame == NULL) { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - -} + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); -MIX_RESULT mix_videofmtenc_h263_send_slice_parameter (MixVideoFormatEnc_H263 *mix) -{ - VAStatus va_status; - - guint slice_num; - guint slice_height; - guint slice_index; - guint slice_height_in_mb; - - if (mix == NULL) { - LOG_E("mix = NULL\n"); - return MIX_RESULT_NULL_PTR; - } + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto cleanup; + } + } - LOG_V("Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; + //mix_videoframe_unref (mix->cur_frame); - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - //slice_num = mix->slice_num; - slice_num = 1; // one slice per picture; - slice_height = parent->picture_height / slice_num; - - slice_height += 15; - slice_height &= (~15); - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - slice_num, NULL, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; + if (this->need_display) { + this->cur_frame = NULL; } - - VAEncSliceParameterBuffer *slice_param, *current_slice; - - va_status = vaMapBuffer(parent->va_display, - mix->slice_param_buf, - (void **)&slice_param); - - if (va_status != VA_STATUS_SUCCESS) + + if (this->cur_frame == NULL) { - LOG_E("Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - current_slice = slice_param; - - for (slice_index = 0; slice_index < slice_num; slice_index++) { - current_slice = slice_param + slice_index; - slice_height_in_mb = - min (slice_height, parent->picture_height - - slice_index * slice_height) / 16; - - // starting MB row number for this slice - current_slice->start_row_number = slice_index * slice_height / 16; - // slice height measured in MB - current_slice->slice_height = slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = mix->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - LOG_V("======h263 slice params======\n"); - - LOG_I("slice_index = %d\n", - (gint) slice_index); - LOG_I("start_row_number = %d\n", - (gint) current_slice->start_row_number); - LOG_I("slice_height_in_mb = %d\n", - (gint) current_slice->slice_height); - LOG_I("slice.is_intra = %d\n", - (gint) current_slice->slice_flags.bits.is_intra); - LOG_I("disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - + uint ci_idx; +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif + + LOG_I( + "surface_num = %d\n", this->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > this->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + + } + + + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + goto cleanup; + + } } - - va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + + } + + /* + * end of CI buffer allocation mode + */ + + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + break; + + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + + break; + + case MIX_BUFFER_SELF_ALLOC_SURFACE: + { + if (this->lookup_frame == NULL) { - LOG_E("Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) + { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + uint surface_idx = (uint) -1; //fixme, temp use a big value + uint idx = 0; + + LOG_I ("bufin->data = 0x%08x\n", bufin->data); + + for (idx = 0; idx < this->alloc_surface_cnt; idx++) { - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) + LOG_I ("this->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); + + if (bufin->data == this->usrptr[idx]) + surface_idx = idx; + } + + LOG_I( + "surface_num = %d\n", this->surface_num); + LOG_I( + "surface_idx = %d\n", surface_idx); + + if (surface_idx > this->surface_num - 2) { + LOG_W( + "the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); + ret = MIX_RESULT_FAIL; + goto no_share_mode; + + } + + if (this->ref_frame == NULL) { - LOG_E("Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V("end\n"); - - return MIX_RESULT_SUCCESS; -} - - -#define CLEAN_UP {\ - if(ret != MIX_RESULT_SUCCESS) {\ - if(iovout->data) {\ - g_free(iovout->data);\ - iovout->data = NULL;\ - }\ - }\ - LOG_V( "end\n"); \ - return MIX_RESULT_SUCCESS;} - -MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - gulong surface = 0; - guint16 width, height; - - MixVideoFrame * tmp_frame; - guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } - LOG_V( "Begin\n"); - - if (! MIX_IS_VIDEOFORMATENC_H263(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; - } + LOG_E( + "get reference surface from pool failed\n"); + goto cleanup; } - - if (mix->rec_frame == NULL) + } + + if (this->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; - } + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; } - if (parent->need_display) { - mix->cur_frame = NULL; + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto cleanup; } - - if (mix->cur_frame == NULL) + } + + //mix_videoframe_unref (mix->cur_frame); + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) + { + + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); + if (ret != MIX_RESULT_SUCCESS) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; - } + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - CLEAN_UP; + LOG_E( + "get current working surface from pool failed\n"); + goto cleanup; + } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) + } + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + + } + + break; + /* + * end of Self buffer allocation mode + */ + + case MIX_BUFFER_ALLOC_NORMAL: + { + +no_share_mode: + + LOG_V( + "We are NOT in share buffer mode\n"); + + if (this->ref_frame == NULL) + { + ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); + if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { - LOG_E( - "Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) + } + + if (this->rec_frame == NULL) + { + ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - -#ifndef ANDROID -#define USE_SRC_FMT_YUV420 -#else -#define USE_SRC_FMT_NV21 + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) + { + ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + LOG_V( "Get Surface Done\n"); + + + VAImage src_image; + uint8 *pvbuf; + uint8 *dst_y; + uint8 *dst_uv; + int i,j; + + LOG_V( + "map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; + } + + + LOG_I( + "surface id = 0x%08x\n", (uint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); + //need to destroy + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDeriveImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + VAImage *image = &src_image; + + LOG_V( "vaDeriveImage Done\n"); + + + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed to vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "vaImage information\n"); + LOG_I( + "image->pitches[0] = %d\n", image->pitches[0]); + LOG_I( + "image->pitches[1] = %d\n", image->pitches[1]); + LOG_I( + "image->offsets[0] = %d\n", image->offsets[0]); + LOG_I( + "image->offsets[1] = %d\n", image->offsets[1]); + LOG_I( + "image->num_planes = %d\n", image->num_planes); + LOG_I( + "image->width = %d\n", image->width); + LOG_I( + "image->height = %d\n", image->height); + + LOG_I( + "input buf size = %d\n", bufin->size); + + uint8 *inbuf = bufin->data; + +#ifdef ANDROID +#define USE_SRC_FMT_NV12 +#endif + int offset_uv = width * height; + uint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + +#ifdef ANDROID + //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + +#ifdef USE_SRC_FMT_NV12 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v + } + dst_uv += image->pitches[1]; + inbuf_uv += width_uv; + } #endif -#ifdef USE_SRC_FMT_YUV420 - /*need to convert YUV420 to NV12*/ +#else + + if (this->raw_format == MIX_RAW_TARGET_FORMAT_YUV420) { dst_y = pvbuf +image->offsets[0]; - + for (i = 0; i < height; i ++) { memcpy (dst_y, inbuf + i * width, width); dst_y += image->pitches[0]; } - + dst_uv = pvbuf + image->offsets[1]; - + for (i = 0; i < height / 2; i ++) { for (j = 0; j < width; j+=2) { dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = + dst_uv [j + 1] = inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; } dst_uv += image->pitches[1]; } - -#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - int offset_uv = width * height; - guint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; + } + + else if (this->raw_format == MIX_RAW_TARGET_FORMAT_NV12) { dst_y = pvbuf + image->offsets[0]; for (i = 0; i < height; i++) { @@ -1363,500 +1457,717 @@ MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, dst_y += image->pitches[0]; } -#ifdef USE_SRC_FMT_NV12 dst_uv = pvbuf + image->offsets[1]; for (i = 0; i < height_uv; i++) { memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); dst_uv += image->pitches[1]; } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif + } + else { + LOG_E("Raw format not supoort\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + #endif //USE_SRC_FMT_YUV420 - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - LOG_V( - "Map source data to surface done\n"); - + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( + "Map source data to surface done\n"); + } + break; + default: + break; + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(uint)va_context); + LOG_I( "surface = 0x%08x\n",(uint)surface); + LOG_I( "va_display = 0x%08x\n",(uint)va_display); + + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = _send_encode_command (); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } + + + if ((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 0) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + } + + if (this->encoded_frames == 0) { + this->encoded_frames ++; + this->last_coded_buf = this->coded_buf[this->coded_buf_index]; + this->coded_buf_index ++; + this->coded_buf_index %=2; + + this->last_frame = this->cur_frame; + + + /* determine the picture type*/ + if ((this->encoded_frames % this->intra_period) == 0) { + this->is_intra = TRUE; + } else { + this->is_intra = FALSE; + } + + tmp_frame = this->rec_frame; + this->rec_frame= this->ref_frame; + this->ref_frame = tmp_frame; + + + } + + LOG_V( "vaSyncSurface\n"); + + va_status = vaSyncSurface(va_display, this->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaSyncSurface\n"); + //return MIX_RESULT_FAIL; + } + + + LOG_V( + "Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, this->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; + + if (coded_seg->next == NULL) + break; + + coded_seg = (VACodedBufferSegment *)coded_seg->next; + num_seg ++; + } + + +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (uint*) buf; +#endif + + iovout->data_size = total_size; + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + + //iovout->data = g_malloc (iovout->data_size); + iovout->data = new uchar[iovout->data_size]; + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; } - - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } + } - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - CLEAN_UP; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } + //memcpy (iovout->data, buf + 16, iovout->data_size); - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - CLEAN_UP; - } - } + while (1) { - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); + if (coded_seg->next == NULL) + break; - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - CLEAN_UP; - } - } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - + coded_seg = (VACodedBufferSegment *)coded_seg->next; + } + + iovout->buffer_size = iovout->data_size; + + LOG_I( + "out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, this->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "get encoded data done\n"); + + if (!((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 1)) { + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( "Failed vaEndPicture\n"); + return MIX_RESULT_FAIL; } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); - + } + if (this->encoded_frames == 1) { va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) + if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaBeginPicture\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - ret = mix_videofmtenc_h263_send_encode_command (mix); + goto cleanup; + } + + ret = _send_encode_command (); if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - CLEAN_UP; - } - - - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + LOG_E ( + "Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; } - - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - - mix->last_frame = mix->cur_frame; - - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - - - } - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - //return MIX_RESULT_FAIL; - } - - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) + + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaMapBuffer\n"); + LOG_E( "Failed vaEndPicture\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + goto cleanup; + } + + } - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - guint total_size = 0; - guint size = 0; + VASurfaceStatus status; - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; + /*query the status of current surface*/ + va_status = vaQuerySurfaceStatus(va_display, surface, &status); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + this->pic_skipped = status & VASurfaceSkipped; - while (1) { - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - coded_seg = reinterpret_cast(coded_seg->next); - num_seg ++; + if (this->need_display) { + ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to set sync_flag\n"); + goto cleanup; } + ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_framemanager_enqueue\n"); + goto cleanup; + } + } + + /*update the reference surface and reconstructed surface */ + if (!this->pic_skipped) { + tmp_frame = this->rec_frame; + this->rec_frame= this->ref_frame; + this->ref_frame = tmp_frame; + } -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; -#endif - iovout->data_size = total_size; - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - - iovout->data = (guchar*)g_malloc (iovout->data_size); - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } - } - - //memcpy (iovout->data, buf + 16, iovout->data_size); - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - while (1) { - - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = reinterpret_cast(coded_seg->next); - } - - iovout->buffer_size = iovout->data_size; - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - LOG_V( "get encoded data done\n"); - - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - } - - if (mix->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - ret = mix_videofmtenc_h263_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - CLEAN_UP; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - } - - VASurfaceStatus status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - mix->pic_skipped = status & VASurfaceSkipped; - - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - CLEAN_UP; - } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_framemanager_enqueue\n"); - CLEAN_UP; - } - } - - /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - } - - #if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - - mix_videoframe_unref (mix->cur_frame); -#endif - - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; - -#ifdef ANDROID - if(mix->last_mix_buffer) { - LOG_V("calls to mix_buffer_unref \n"); - LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); - mix_buffer_unref(mix->last_mix_buffer); - } + if (this->ref_frame != NULL) + mix_videoframe_unref (this->ref_frame); + this->ref_frame = this->rec_frame; - LOG_V("ref the current bufin\n"); - mix->last_mix_buffer = mix_buffer_ref(bufin); + mix_videoframe_unref (this->cur_frame); #endif - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - CLEAN_UP; -} -#undef CLEAN_UP + this->encoded_frames ++; + this->last_coded_buf = this->coded_buf[this->coded_buf_index]; + this->coded_buf_index ++; + this->coded_buf_index %=2; + this->last_frame = this->cur_frame; -MIX_RESULT mix_videofmtenc_h263_get_max_encoded_buf_size ( - MixVideoFormatEnc *mix, guint * max_size) -{ + if (this->last_mix_buffer) { + LOG_V("calls to mix_buffer_unref \n"); + LOG_V("refcount = %d\n", MIX_PARAMS(this->last_mix_buffer)->GetRefCount()); + mix_buffer_unref(this->last_mix_buffer); + } - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) - { - LOG_E( - "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; + LOG_V("ref the current bufin\n"); + this->last_mix_buffer = mix_buffer_ref(bufin); + + if (!(this->need_display)) { + mix_videoframe_unref (this->cur_frame); + this->cur_frame = NULL; } - - LOG_V( "Begin\n"); - - parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_H263 *self = MIX_VIDEOFORMATENC_H263 (mix); - if (MIX_IS_VIDEOFORMATENC_H263(self)) { +cleanup: - if (self->coded_buf_size > 0) { - *max_size = self->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 830) / (16 * 16); - // set to value according to QP - } - else { - self->coded_buf_size = parent->bitrate/ 4; + if (ret != MIX_RESULT_SUCCESS) { + if (iovout->data) { + //g_free(iovout->data); + delete[]iovout->data; + iovout->data = NULL; } - - self->coded_buf_size = - max (self->coded_buf_size , - (parent->picture_width* parent->picture_height * 830) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - max(self->coded_buf_size, - (parent->picture_width * parent->picture_height * 1.5 * 8)); - self->coded_buf_size = (self->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not H263 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; } - *max_size = self->coded_buf_size; - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix) -{ +MIX_RESULT MixVideoFormatEnc_H263::_send_encode_command () { MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - if (MIX_IS_VIDEOFORMATENC_H263(mix)) + if (MIX_IS_VIDEOFORMATENC_H263(this)) { - if (mix->encoded_frames == 0) { - ret = mix_videofmtenc_h263_send_seq_params (mix); + if (this->encoded_frames == 0) { + ret = _send_seq_params (); if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed mix_videofmtenc_h263_send_seq_params\n"); + LOG_E( + "Failed SendSeqParams\n"); return MIX_RESULT_FAIL; } } - - ret = mix_videofmtenc_h263_send_picture_parameter (mix); - + + ret = _send_picture_parameter (); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed mix_videofmtenc_h263_send_picture_parameter\n"); + LOG_E( + "Failed SendPictureParameter\n"); return MIX_RESULT_FAIL; } - - ret = mix_videofmtenc_h263_send_slice_parameter (mix); + + ret = _send_slice_parameter (); if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h263_send_slice_parameter\n"); + { + LOG_E( + "Failed SendSliceParameter\n"); return MIX_RESULT_FAIL; - } - + } + } else { LOG_E( - "not H263 video encode Object\n"); + "not H263 video encode Object\n"); return MIX_RESULT_INVALID_PARAM; } - LOG_V( "End\n"); + LOG_V( "End\n"); + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT MixVideoFormatEnc_H263::_send_seq_params () { + VAStatus va_status; + VAEncSequenceParameterBufferH263 h263_seq_param; + VABufferID seq_para_buf_id; + + + LOG_V( "Begin\n\n"); + + + /*set up the sequence params for HW*/ + h263_seq_param.bits_per_second= this->bitrate; + h263_seq_param.frame_rate = 30; //hard-coded, driver need; + //(unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + h263_seq_param.initial_qp = this->initial_qp; + h263_seq_param.min_qp = this->min_qp; + h263_seq_param.intra_period = this->intra_period; + + //h263_seq_param.fixed_vop_rate = 30; + + LOG_V( + "===h263 sequence params===\n"); + + LOG_I( "bitrate = %d\n", + h263_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h263_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h263_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h263_seq_param.min_qp); + LOG_I( "intra_period = %d\n\n", + h263_seq_param.intra_period); + + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncSequenceParameterBufferType, + sizeof(h263_seq_param), + 1, &h263_seq_param, + &seq_para_buf_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(this->va_display, this->va_context, + &seq_para_buf_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT MixVideoFormatEnc_H263::_send_picture_parameter () { + + VAStatus va_status; + VAEncPictureParameterBufferH263 h263_pic_param; + + LOG_V( "Begin\n\n"); + +#if 0 //not needed currently + MixVideoConfigParamsEncH263 * params_h263 + = MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); +#endif + + + /*set picture params for HW*/ + h263_pic_param.reference_picture = this->ref_frame->frame_id; + h263_pic_param.reconstructed_picture = this->rec_frame->frame_id; + h263_pic_param.coded_buf = this->coded_buf[this->coded_buf_index]; + h263_pic_param.picture_width = this->picture_width; + h263_pic_param.picture_height = this->picture_height; + h263_pic_param.picture_type = this->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + + + LOG_V( + "======h263 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h263_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h263_pic_param.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", + h263_pic_param.coded_buf); + LOG_I( "coded_buf_index = %d\n", + this->coded_buf_index); + LOG_I( "picture_width = %d\n", + h263_pic_param.picture_width); + LOG_I( "picture_height = %d\n", + h263_pic_param.picture_height); + LOG_I( "picture_type = %d\n\n", + h263_pic_param.picture_type); + + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncPictureParameterBufferType, + sizeof(h263_pic_param), + 1,&h263_pic_param, + &this->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(this->va_display, this->va_context, + &this->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT MixVideoFormatEnc_H263::_send_slice_parameter () { + VAStatus va_status; + + uint slice_num; + uint slice_height; + uint slice_index; + uint slice_height_in_mb; + + + + LOG_V("Begin\n\n"); + + + //slice_num = mix->slice_num; + slice_num = 1; // one slice per picture; + slice_height = this->picture_height / slice_num; + + slice_height += 15; + slice_height &= (~15); + + va_status = vaCreateBuffer (this->va_display, this->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + slice_num, NULL, + &this->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + VAEncSliceParameterBuffer *slice_param, *current_slice; + + va_status = vaMapBuffer(this->va_display, + this->slice_param_buf, + (void **)&slice_param); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaMapBuffer\n"); + return MIX_RESULT_FAIL; + } + + current_slice = slice_param; + + for (slice_index = 0; slice_index < slice_num; slice_index++) { + current_slice = slice_param + slice_index; + slice_height_in_mb = + min (slice_height, this->picture_height + - slice_index * slice_height) / 16; + + // starting MB row number for this slice + current_slice->start_row_number = slice_index * slice_height / 16; + // slice height measured in MB + current_slice->slice_height = slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = this->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc + = this->disable_deblocking_filter_idc; + + LOG_V("======h263 slice params======\n"); + + LOG_I("slice_index = %d\n", + (int) slice_index); + LOG_I("start_row_number = %d\n", + (int) current_slice->start_row_number); + LOG_I("slice_height_in_mb = %d\n", + (int) current_slice->slice_height); + LOG_I("slice.is_intra = %d\n", + (int) current_slice->slice_flags.bits.is_intra); + LOG_I("disable_deblocking_filter_idc = %d\n\n", + (int) this->disable_deblocking_filter_idc); + + } + + va_status = vaUnmapBuffer(this->va_display, this->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(this->va_display, this->va_context, + &this->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E("Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + LOG_V("end\n"); + + return MIX_RESULT_SUCCESS; +} + + +MIX_RESULT MixVideoFormatEnc_H263::_send_dynamic_bitrate () { + VAStatus va_status; + + + LOG_V( "Begin\n\n"); + + + if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call SendDynamicBitrate\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterRateControl * bitrate_control_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl; + bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data; + + bitrate_control_param->bits_per_second = this->bitrate; + bitrate_control_param->initial_qp = this->initial_qp; + bitrate_control_param->min_qp = this->min_qp; + bitrate_control_param->target_percentage = this->target_percentage; + bitrate_control_param->window_size = this->window_size; + + va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + + va_status = vaRenderPicture(this->va_display, this->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT MixVideoFormatEnc_H263::_send_dynamic_framerate() { + VAStatus va_status; + + LOG_V( "Begin\n\n"); + + if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call SendDynamicFramerate\n"); + return VA_STATUS_SUCCESS; + } + + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterFrameRate * framerate_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncMiscParameterBufferType, + sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; + framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; + framerate_param->framerate = + (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; + + va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(this->va_display, this->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } + + + LOG_I( "frame rate = %d\n", + framerate_param->framerate); return MIX_RESULT_SUCCESS; + } + + diff --git a/mix_video/src/mixvideoformatenc_h263.h b/mix_video/src/mixvideoformatenc_h263.h index f13db38..90ef29d 100644 --- a/mix_video/src/mixvideoformatenc_h263.h +++ b/mix_video/src/mixvideoformatenc_h263.h @@ -12,7 +12,6 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" -G_BEGIN_DECLS #define MIX_VIDEO_ENC_H263_SURFACE_NUM 20 @@ -22,75 +21,78 @@ G_BEGIN_DECLS /* * Type macros. */ -#define MIX_TYPE_VIDEOFORMATENC_H263 (mix_videoformatenc_h263_get_type ()) -#define MIX_VIDEOFORMATENC_H263(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_H263, MixVideoFormatEnc_H263)) -#define MIX_IS_VIDEOFORMATENC_H263(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_H263)) -#define MIX_VIDEOFORMATENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_H263, MixVideoFormatEnc_H263Class)) -#define MIX_IS_VIDEOFORMATENC_H263_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_H263)) -#define MIX_VIDEOFORMATENC_H263_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_H263, MixVideoFormatEnc_H263Class)) - -typedef struct _MixVideoFormatEnc_H263 MixVideoFormatEnc_H263; -typedef struct _MixVideoFormatEnc_H263Class MixVideoFormatEnc_H263Class; - -struct _MixVideoFormatEnc_H263 { - /*< public > */ - MixVideoFormatEnc parent; - - - VABufferID coded_buf[2]; - VABufferID last_coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * ci_shared_surfaces; - VASurfaceID * surfaces; - guint surface_num; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; -#ifdef ANDROID - MixBuffer *last_mix_buffer; -#endif - - guint disable_deblocking_filter_idc; - guint slice_num; - guint va_rcmode; - - guint encoded_frames; - gboolean pic_skipped; - - gboolean is_intra; - - guint coded_buf_size; - guint coded_buf_index; - - - /*< public > */ +#define MIX_VIDEOFORMATENC_H263(obj) (reinterpret_cast(obj)) +#define MIX_IS_VIDEOFORMATENC_H263(obj) ((NULL !=MIX_VIDEOFORMATENC_H263(obj)) ? TRUE : FALSE) + +class MixVideoFormatEnc_H263 : public MixVideoFormatEnc { +public: + MixVideoFormatEnc_H263(); + virtual ~MixVideoFormatEnc_H263(); + + virtual MIX_RESULT Initialize( + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display); + + virtual MIX_RESULT Encode( MixBuffer * bufin[], + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); + + virtual MIX_RESULT Flush(); + + virtual MIX_RESULT Deinitialize(); + + virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); + + /* Local Methods */ +private: + MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); + MIX_RESULT _send_encode_command(); + MIX_RESULT _send_seq_params(); + MIX_RESULT _send_picture_parameter(); + MIX_RESULT _send_slice_parameter(); + MIX_RESULT _send_dynamic_bitrate(); + MIX_RESULT _send_dynamic_framerate(); + +public: + VABufferID coded_buf[2]; + VABufferID last_coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * shared_surfaces; + VASurfaceID * surfaces; + uint surface_num; + uint shared_surfaces_cnt; + uint precreated_surfaces_cnt; + + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *last_frame; //last frame; + MixVideoFrame *lookup_frame; + MixBuffer *last_mix_buffer; + + uint disable_deblocking_filter_idc; + uint slice_num; + uint va_rcmode_h263; + + uint encoded_frames; + bool pic_skipped; + + bool is_intra; + + uint coded_buf_size; + uint coded_buf_index; + + uint8 ** usrptr; + uint alloc_surface_cnt; }; -/** - * MixVideoFormatEnc_H263Class: - * - * MI-X Video object class - */ -struct _MixVideoFormatEnc_H263Class { - /*< public > */ - MixVideoFormatEncClass parent_class; - /* class members */ - - /*< public > */ -}; - -/** - * mix_videoformatenc_h263_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformatenc_h263_get_type(void); /** * mix_videoformatenc_h263_new: @@ -115,33 +117,8 @@ MixVideoFormatEnc_H263 *mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix * * Decrement reference count of the object. */ -#define mix_videoformatenc_h263_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* MPEG-4:2 vmethods */ -MIX_RESULT mix_videofmtenc_h263_getcaps(MixVideoFormatEnc *mix, GString *msg); -MIX_RESULT mix_videofmtenc_h263_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmtenc_h263_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -MIX_RESULT mix_videofmtenc_h263_flush(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h263_eos(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h263_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h263_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); - -/* Local Methods */ - -MIX_RESULT mix_videofmtenc_h263_process_encode (MixVideoFormatEnc_H263 *mix, MixBuffer * bufin, - MixIOVec * iovout); -MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix); - -G_END_DECLS +//#define mix_videoformatenc_h263_unref(obj) g_object_unref (G_OBJECT(obj)) +MixVideoFormatEnc_H263 *mix_videoformatenc_h263_unref(MixVideoFormatEnc_H263 * mix); #endif /* __MIX_VIDEOFORMATENC_H263_H__ */ diff --git a/mix_video/src/mixvideoformatenc_h264.cpp b/mix_video/src/mixvideoformatenc_h264.cpp index cb354ae..71d4282 100644 --- a/mix_video/src/mixvideoformatenc_h264.cpp +++ b/mix_video/src/mixvideoformatenc_h264.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include #include @@ -21,324 +21,293 @@ Window win = 0; #endif /* SHOW_SRC */ - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_h264_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_H264, mix_videoformatenc_h264, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - /* member initialization */ - self->encoded_frames = 0; - self->frame_num = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; - self->lookup_frame = NULL; -#ifdef ANDROID - self->last_mix_buffer = NULL; +MixVideoFormatEnc_H264::MixVideoFormatEnc_H264() + :MixVideoFormatEnc() + ,encoded_frames(0) + ,frame_num(0) + ,pic_skipped(FALSE) + ,is_intra(TRUE) + ,cur_frame(NULL) + ,ref_frame(NULL) + ,rec_frame(NULL) + ,lookup_frame(NULL) +#if 1 + ,last_mix_buffer ( NULL) #endif - - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - - self->coded_buf_index = 0; - parent->initialized = FALSE; + ,shared_surfaces(NULL) + ,surfaces(NULL) + ,surface_num(0) + ,shared_surfaces_cnt(0) + ,precreated_surfaces_cnt(0) + ,usrptr(NULL) + ,coded_buf_index(0) + ,coded_buf_size(0) { } -static void mix_videoformatenc_h264_class_init( - MixVideoFormatEnc_H264Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_h264_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_h264_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_h264_initialize; - video_formatenc_class->encode = mix_videofmtenc_h264_encode; - video_formatenc_class->flush = mix_videofmtenc_h264_flush; - video_formatenc_class->eos = mix_videofmtenc_h264_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_h264_deinitialize; - video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h264_get_max_encoded_buf_size; - video_formatenc_class->set_dynamic_config = mix_videofmtenc_h264_set_dynamic_enc_config; +MixVideoFormatEnc_H264::~MixVideoFormatEnc_H264() { } + MixVideoFormatEnc_H264 * mix_videoformatenc_h264_new(void) { - MixVideoFormatEnc_H264 *ret = reinterpret_cast( - g_object_new(MIX_TYPE_VIDEOFORMATENC_H264, NULL)); - - return ret; -} - -void mix_videoformatenc_h264_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_H264 *mix = MIX_VIDEOFORMATENC_H264(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); - } + return new MixVideoFormatEnc_H264(); } MixVideoFormatEnc_H264 * mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix) { - return (MixVideoFormatEnc_H264 *) g_object_ref(G_OBJECT(mix)); + if (NULL != mix) + mix->Ref(); + return mix; } -/*H.264 vmethods implementation */ -MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_h264_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; +MixVideoFormatEnc_H264 * +mix_videoformatenc_h264_unref(MixVideoFormatEnc_H264 * mix) { + if (NULL != mix) + return MIX_VIDEOFORMATENC_H264(mix->Unref()); + else + return mix; } -#define CLEAN_UP {\ - if (ret == MIX_RESULT_SUCCESS) {\ - parent->initialized = TRUE;\ - }\ - if (va_profiles)\ - g_free(va_profiles);\ - if (va_entrypoints)\ - g_free (va_entrypoints);\ - if (surfaces)\ - g_free (surfaces);\ - g_mutex_unlock(parent->objectlock);\ - LOG_V( "end\n");\ - return ret;} - -MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { +MIX_RESULT +MixVideoFormatEnc_H264::Initialize( + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display ) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; +// MixVideoFormatEnc *parent = NULL; MixVideoConfigParamsEncH264 * config_params_enc_h264; VAStatus va_status = VA_STATUS_SUCCESS; VASurfaceID * surfaces = NULL; - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; + int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + int va_num_profiles, va_num_entrypoints; VAProfile *va_profiles = NULL; VAEntrypoint *va_entrypoints = NULL; VAConfigAttrib va_attrib[2]; - guint index; + uint index; + uint max_size = 0; + /* + * For upstream allocates buffer, it is mandatory to set buffer mode + * and for other stuff, it is optional + */ + + + /* + * Different MIX buffer mode will have different surface handling approach + */ + + + uint normal_surfaces_cnt = 2; + /* + * shared_surfaces_cnt is for upstream buffer allocation case + */ + uint shared_surfaces_cnt = 0; + + /* + * precreated_surfaces_cnt is for self buffer allocation case + */ + uint precreated_surfaces_cnt = 0; + + MixCISharedBufferInfo * ci_info = NULL; /*frame_mgr and input_buf_pool is reservered for future use*/ - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { + if ( config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL) { LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + " config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL\n"); return MIX_RESULT_NULL_PTR; } - LOG_V( "begin\n"); - + /* + * Check more for requested_surface_info + */ + if (requested_surface_info->surface_cnt != 0 && + (requested_surface_info->surface_allocated == NULL || requested_surface_info->usrptr == NULL)) { + LOG_E( + "surface_cnt != 0 && (surface_allocated == NULL || usrptr == NULL)\n"); + return MIX_RESULT_NULL_PTR; + } - /* Chainup parent method. */ - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); + if (requested_surface_info->surface_cnt > MAX_ENC_SURFACE_COUNT) { + LOG_E ("Something wrong, we have to quite now!\n"); + return MIX_RESULT_FAIL; } + LOG_V( "begin\n"); + + ret = MixVideoFormatEnc::Initialize(config_params_enc,frame_mgr,input_buf_pool,surface_pool,requested_surface_info,va_display); + if (ret != MIX_RESULT_SUCCESS) { return ret; } - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); +// parent = MIX_VIDEOFORMATENC(this); +// MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { config_params_enc_h264 = MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); } else { LOG_V( - "mix_videofmtenc_h264_initialize: no h264 config params found\n"); + "mix_videofmtenc_h264_initialize: no h264 config params found\n"); return MIX_RESULT_FAIL; } - g_mutex_lock(parent->objectlock); +// g_mutex_lock(parent->objectlock); + Lock(); LOG_V( - "Start to get properities from h.264 params\n"); + "Start to get properities from h.264 params\n"); /* get properties from H264 params Object, which is special to H264 format*/ ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, - &self->basic_unit_size); + &this->basic_unit_size); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); - CLEAN_UP; + "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); + goto CLEAN_UP; } ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, - &self->disable_deblocking_filter_idc); + &this->disable_deblocking_filter_idc); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); + goto CLEAN_UP; + } + + ret = mix_videoconfigparamsenc_h264_get_vui_flag (config_params_enc_h264, + &this->vui_flag); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); - CLEAN_UP; + "Failed to mix_videoconfigparamsenc_h264_get_vui_flag\n"); + goto CLEAN_UP; } ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, - &self->slice_num); + &this->slice_num); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - CLEAN_UP; + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); + goto CLEAN_UP; } ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, - &self->I_slice_num); + &this->I_slice_num); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); - CLEAN_UP; + "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); + goto CLEAN_UP; } ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, - &self->P_slice_num); + &this->P_slice_num); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); - CLEAN_UP; + "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); + goto CLEAN_UP; } ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, - &self->delimiter_type); + &this->delimiter_type); if (ret != MIX_RESULT_SUCCESS) { LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); - CLEAN_UP; + "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); + goto CLEAN_UP; } ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, - &self->idr_interval); + &this->idr_interval); if (ret != MIX_RESULT_SUCCESS) { LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); - CLEAN_UP; + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + goto CLEAN_UP; } LOG_V( - "======H264 Encode Object properities======:\n"); - - LOG_I( "self->basic_unit_size = %d\n", - self->basic_unit_size); - LOG_I( "self->disable_deblocking_filter_idc = %d\n", - self->disable_deblocking_filter_idc); - LOG_I( "self->slice_num = %d\n", - self->slice_num); - LOG_I( "self->I_slice_num = %d\n", - self->I_slice_num); - LOG_I( "self->P_slice_num = %d\n", - self->P_slice_num); - LOG_I ("self->delimiter_type = %d\n", - self->delimiter_type); - LOG_I ("self->idr_interval = %d\n", - self->idr_interval); + "======H264 Encode Object properities======:\n"); + + LOG_I( "this->basic_unit_size = %d\n", + this->basic_unit_size); + LOG_I( "this->disable_deblocking_filter_idc = %d\n", + this->disable_deblocking_filter_idc); + LOG_I( "this->slice_num = %d\n", + this->slice_num); + LOG_I( "this->I_slice_num = %d\n", + this->I_slice_num); + LOG_I( "this->P_slice_num = %d\n", + this->P_slice_num); + LOG_I ("this->delimiter_type = %d\n", + this->delimiter_type); + LOG_I ("this->idr_interval = %d\n", + this->idr_interval); LOG_V( - "Get properities from params done\n"); + "Get properities from params done\n"); - parent->va_display = va_display; + this->va_display = va_display; LOG_V( "Get Display\n"); LOG_I( "Display = 0x%08x\n", - (guint)va_display); + (uint)va_display); #if 0 /* query the vender information, can ignore*/ va_vendor = vaQueryVendorString (va_display); LOG_I( "Vendor = %s\n", - va_vendor); + va_vendor); #endif /*get the max number for profiles/entrypoints/attribs*/ va_max_num_profiles = vaMaxNumProfiles(va_display); LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); + va_max_num_profiles); va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); + va_max_num_entrypoints); va_max_num_attribs = vaMaxNumConfigAttributes(va_display); LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); - va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); + va_max_num_attribs); +// va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); + va_profiles = new VAProfile[va_max_num_profiles]; +// va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; if (va_profiles == NULL || va_entrypoints ==NULL) { LOG_E( - "!va_profiles || !va_entrypoints\n"); + "!va_profiles || !va_entrypoints\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto CLEAN_UP; } LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); + "va_profiles = 0x%08x\n", (uint)va_profiles); LOG_V( "vaQueryConfigProfiles\n"); @@ -348,10 +317,10 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to call vaQueryConfigProfiles\n"); + "Failed to call vaQueryConfigProfiles\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( "vaQueryConfigProfiles Done\n"); @@ -359,16 +328,16 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) + for (index= 0; index < va_num_profiles; index++) { + if (this->va_profile == va_profiles[index]) break; } - if(index == va_num_profiles) + if (index == va_num_profiles) { LOG_E( "Profile not supported\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( "vaQueryConfigEntrypoints\n"); @@ -376,15 +345,15 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, /*Check entry point*/ va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); + this->va_profile, + va_entrypoints, &va_num_entrypoints); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); + "Failed to call vaQueryConfigEntrypoints\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } for (index = 0; index < va_num_entrypoints; index ++) { @@ -396,7 +365,7 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, if (index == va_num_entrypoints) { LOG_E( "Entrypoint not found\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } va_attrib[0].type = VAConfigAttribRTFormat; @@ -404,74 +373,140 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, LOG_V( "vaGetConfigAttributes\n"); - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); + va_status = vaGetConfigAttributes(va_display, this->va_profile, + this->va_entrypoint, + &va_attrib[0], 2); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to call vaGetConfigAttributes\n"); + "Failed to call vaGetConfigAttributes\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - if ((va_attrib[0].value & parent->va_format) == 0) { + if ((va_attrib[0].value & this->va_format) == 0) { LOG_E( "Matched format not found\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { + LOG_E( "RC mode va_attrib[1].value=%d, this->va_rcmode=%d",va_attrib[1].value, this->va_rcmode); + if ((va_attrib[1].value & this->va_rcmode) == 0) { LOG_E( "RC mode not found\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; + va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = this->va_rcmode; LOG_V( "======VA Configuration======\n"); LOG_I( "profile = %d\n", - parent->va_profile); + this->va_profile); LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); + this->va_entrypoint); LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); + va_attrib[0].type); LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); + va_attrib[1].type); LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); + va_attrib[0].value); LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); + va_attrib[1].value); LOG_V( "vaCreateConfig\n"); - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); + va_status = vaCreateConfig(va_display, this->va_profile, + this->va_entrypoint, + &va_attrib[0], 2, &(this->va_config)); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaCreateConfig\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; + } + + + if (this->va_rcmode == VA_RC_VCM) { + + /* + * Following three features are only enabled in VCM mode + */ + this->render_mss_required = TRUE; + this->render_AIR_required = TRUE; + this->render_bitrate_required = TRUE; + this->slice_num = (this->picture_height + 15) / 16; //if we are in VCM, we will set slice num to max value } - if (parent->va_rcmode == VA_RC_VCM) { - /* - * Following three features are only enabled in VCM mode - */ - parent->render_mss_required = TRUE; - parent->render_AIR_required = TRUE; - parent->render_bitrate_required = TRUE; - self->slice_num = (parent->picture_height + 15) / 16; //if we are in VCM, we will set slice num to max value + LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); + + if (requested_surface_info->surface_cnt == 0) { + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + ci_info = (MixCISharedBufferInfo *) (this->buf_info); + shared_surfaces_cnt = ci_info->ci_frame_cnt; + normal_surfaces_cnt = 2; + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /* + * To be develped + */ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /* + * To be develped + */ + break; + default: + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + break; + } + } + else if (requested_surface_info->surface_cnt == 1) { + /* + * Un-normal case, TBD + */ + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + } + else { + this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; + precreated_surfaces_cnt = requested_surface_info->surface_cnt; + this->alloc_surface_cnt = requested_surface_info->surface_cnt; + + //this->usrptr = g_malloc (requested_surface_info->surface_cnt * sizeof (uint8 *)); + this->usrptr = new uint8 *[requested_surface_info->surface_cnt]; + if (this->usrptr == NULL) { + LOG_E("Failed allocate memory\n"); + ret = MIX_RESULT_NO_MEMORY; + goto CLEAN_UP; + } + + memcpy (this->usrptr, requested_surface_info->usrptr, requested_surface_info->surface_cnt * sizeof (uint8 *)); + } + LOG_I ("buffer_mode = %d\n", this->buffer_mode); + + this->shared_surfaces_cnt = shared_surfaces_cnt; + this->precreated_surfaces_cnt = precreated_surfaces_cnt; + +#if 0 + + int ii = 0; + for (ii=0; ii < alloc_surface_cnt; ii++) { + + g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); + g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); + + + } /*TODO: compute the surface number*/ int numSurfaces; @@ -480,102 +515,111 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, numSurfaces = 2; } else { - numSurfaces = 8; + numSurfaces = 2; parent->ci_frame_num = 0; } - self->surface_num = numSurfaces + parent->ci_frame_num; + //self->surface_num = numSurfaces + parent->ci_frame_num; +#endif - surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); + this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; + // surfaces = g_malloc(sizeof(VASurfaceID)*normal_surfaces_cnt); + surfaces = new VASurfaceID[normal_surfaces_cnt]; if (surfaces == NULL) { LOG_E( - "Failed allocate surface\n"); + "Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto CLEAN_UP; + } + + //this->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); + this->surfaces = new VASurfaceID[this->surface_num] ; + if (this->surfaces == NULL) + { + LOG_E( + "Failed allocate private surface\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( "vaCreateSurfaces\n"); - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); + va_status = vaCreateSurfaces(va_display, this->picture_width, + this->picture_height, this->va_format, + normal_surfaces_cnt, surfaces); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed vaCreateSurfaces\n"); + "Failed vaCreateSurfaces\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - if (parent->share_buf_mode) { - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = reinterpret_cast( - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); + if (shared_surfaces_cnt != 0) { +// this->shared_surfaces = +// g_malloc(sizeof(VASurfaceID) * shared_surfaces_cnt); + this->shared_surfaces = + new VASurfaceID[shared_surfaces_cnt]; - if (self->ci_shared_surfaces == NULL) + if (this->shared_surfaces == NULL) { LOG_E( - "Failed allocate shared surface\n"); + "Failed allocate shared surface\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto CLEAN_UP; } + } - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + for (index = 0; index < this->shared_surfaces_cnt; index++) { va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); + (ulong) (ci_info->ci_frame_id[index]), + &this->shared_surfaces[index]); if (va_status != VA_STATUS_SUCCESS) { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); + LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); - - if (self->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + this->surfaces[index] = this->shared_surfaces[index]; + } } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /*To be develped*/ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /*To be develped*/ + break; + case MIX_BUFFER_ALLOC_NORMAL: + break; + case MIX_BUFFER_SELF_ALLOC_SURFACE: + { + for (index = 0; index < requested_surface_info->surface_cnt; index ++) { + this->surfaces[index] = requested_surface_info->surface_allocated[index]; + } + } + break; + default: + break; } - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + for (index = 0; index < normal_surfaces_cnt; index++) { + this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; } LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); + LOG_I( "Created %d libva surfaces\n", this->surface_num); #if 0 //current put this in gst images = g_malloc(sizeof(VAImage)*numSurfaces); @@ -589,137 +633,154 @@ MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, //Derive an VAImage from an existing surface. //The image buffer can then be mapped/unmapped for CPU access va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); + &images[index]); } #endif LOG_V( "mix_surfacepool_new\n"); - parent->surfacepool = mix_surfacepool_new(); + this->surfacepool = mix_surfacepool_new(); if (surface_pool) - *surface_pool = parent->surfacepool; + *surface_pool = this->surfacepool; //which is useful to check before encode - if (parent->surfacepool == NULL) + if (this->surfacepool == NULL) { LOG_E( - "Failed to mix_surfacepool_new\n"); + "Failed to mix_surfacepool_new\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( - "mix_surfacepool_initialize\n"); + "mix_surfacepool_initialize\n"); - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); + ret = mix_surfacepool_initialize(this->surfacepool, + this->surfaces, this->surface_num, va_display); switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - LOG_E( "Error init failure\n"); - ret = MIX_RESULT_ALREADY_INIT; - CLEAN_UP; - default: - break; + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + LOG_E( "Error init failure\n"); + ret = MIX_RESULT_ALREADY_INIT; + goto CLEAN_UP; + default: + break; } //Initialize and save the VA context ID LOG_V( "vaCreateContext\n"); - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - 0, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); + va_status = vaCreateContext(va_display, this->va_config, + this->picture_width, this->picture_height, + 0, this->surfaces, this->surface_num, + &(this->va_context)); LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); + "Created libva context width %d, height %d\n", + this->picture_width, this->picture_height); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateContext\n"); + "Failed to vaCreateContext\n"); LOG_I( "va_status = %d\n", - (guint)va_status); + (uint)va_status); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - guint max_size = 0; - ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size); + + ret = GetMaxEncodedBufSize(&max_size); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); - CLEAN_UP; + "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); + goto CLEAN_UP; } /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[0])); + va_status = vaCreateBuffer (va_display, this->va_context, + VAEncCodedBufferType, + this->coded_buf_size, // + 1, NULL, + &(this->coded_buf[0])); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); + va_status = vaCreateBuffer (va_display, this->va_context, + VAEncCodedBufferType, + this->coded_buf_size, // + 1, NULL, + &(this->coded_buf[1])); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } #ifdef SHOW_SRC Display * display = XOpenDisplay (NULL); LOG_I( "display = 0x%08x\n", - (guint) display); + (uint) display); win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); + this->picture_width, this->picture_height, 0, 0, + WhitePixel(display, 0)); XMapWindow(display, win); XSelectInput(display, win, KeyPressMask | StructureNotifyMask); XSync(display, False); LOG_I( "va_display = 0x%08x\n", - (guint) va_display); + (uint) va_display); #endif /* SHOW_SRC */ - CLEAN_UP; -} -#undef CLEAN_UP -#define CLEAN_UP {\ - LOG_V( "UnLocking\n");\ - g_mutex_unlock(parent->objectlock);\ - LOG_V( "end\n");\ - return ret;} +CLEAN_UP: + + + if (ret == MIX_RESULT_SUCCESS) { + initialized = TRUE; + } + + /*free profiles and entrypoints*/ + if (va_profiles) + delete [] va_profiles; -MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { + if (va_entrypoints) + delete [] va_entrypoints; + + if (surfaces) + delete []surfaces; + +// g_mutex_unlock(parent->objectlock); + Unlock(); + + LOG_V( "end\n"); + + return ret; +} + +MIX_RESULT +MixVideoFormatEnc_H264::Encode( + MixBuffer * bufin[], int bufincnt, + MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; LOG_V( "Begin\n"); @@ -727,63 +788,62 @@ MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin if (bufincnt != 1 || iovoutcnt != 1) { LOG_E( - "buffer count not equel to 1\n"); + "buffer count not equel to 1\n"); LOG_E( - "maybe some exception occurs\n"); + "maybe some exception occurs\n"); } - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { + if (bufin[0] == NULL || iovout[0] == NULL) { LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); + " !bufin[0] ||!iovout[0]\n"); return MIX_RESULT_NULL_PTR; } #if 0 if (parent_class->encode) { return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); + iovoutcnt, encode_params); } #endif - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix); LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); +// g_mutex_lock(parent->objectlock); + Lock(); //TODO: also we could move some encode Preparation work to here LOG_V( - "mix_videofmtenc_h264_process_encode\n"); + "ProcessEncode\n"); - ret = mix_videofmtenc_h264_process_encode (self, - bufin[0], iovout[0]); + ret = _process_encode ( + bufin[0], iovout[0]); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed mix_videofmtenc_h264_process_encode\n"); - CLEAN_UP; + "Failed ProcessEncode\n"); + goto CLEAN_UP; } - CLEAN_UP; +CLEAN_UP: + + LOG_V( "UnLocking\n"); + +// g_mutex_unlock(parent->objectlock); + Unlock(); + + LOG_V( "end\n"); + + return ret; } -#undef CLEAN_UP -MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { +MIX_RESULT MixVideoFormatEnc_H264::Flush() { //MIX_RESULT ret = MIX_RESULT_SUCCESS; LOG_V( "Begin\n"); - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - /*not chain to parent flush func*/ #if 0 @@ -792,12 +852,8 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { } #endif - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - g_mutex_lock(mix->objectlock); +// g_mutex_lock(mix->objectlock); + Lock(); #if 0 /*unref the current source surface*/ @@ -809,91 +865,64 @@ MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) { #endif /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) + if (this->rec_frame != NULL) { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_frame != NULL) + if (this->ref_frame != NULL) { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; } -#ifdef ANDROID - if(self->last_mix_buffer) { - mix_buffer_unref(self->last_mix_buffer); - self->last_mix_buffer = NULL; +//#ifdef ANDROID +#if 1 + if (this->last_mix_buffer) { + mix_buffer_unref(this->last_mix_buffer); + this->last_mix_buffer = NULL; } #endif /*reset the properities*/ - self->encoded_frames = 0; - self->frame_num = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; + this->encoded_frames = 0; + this->frame_num = 0; + this->pic_skipped = FALSE; + this->is_intra = TRUE; - g_mutex_unlock(mix->objectlock); +// g_mutex_unlock(mix->objectlock); + Unlock(); LOG_V( "end\n"); return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) { - - LOG_V( "\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (parent_class->eos) { - return parent_class->eos(mix); - } - return MIX_RESULT_SUCCESS; -} -#define CLEAN_UP {\ - parent->initialized = FALSE;\ - g_mutex_unlock(parent->objectlock);\ - LOG_V( "end\n");\ - return ret;} -MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { +MIX_RESULT +MixVideoFormatEnc_H264::Deinitialize() { - MixVideoFormatEnc *parent = NULL; VAStatus va_status; MIX_RESULT ret = MIX_RESULT_SUCCESS; LOG_V( "Begin\n"); - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - if (parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } + ret = MixVideoFormatEnc::Deinitialize(); if (ret != MIX_RESULT_SUCCESS) { return ret; } - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); LOG_V( "Release frames\n"); - g_mutex_lock(parent->objectlock); +// g_mutex_lock(parent->objectlock); + Lock(); #if 0 /*unref the current source surface*/ @@ -905,509 +934,626 @@ MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) { #endif /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) + if (this->rec_frame != NULL) { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; } /*unref the reference surface*/ - if (self->ref_frame != NULL) + if (this->ref_frame != NULL) { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; } - if (self->lookup_frame != NULL) + if (this->lookup_frame != NULL) { - mix_videoframe_unref (self->lookup_frame); - self->lookup_frame = NULL; + mix_videoframe_unref (this->lookup_frame); + this->lookup_frame = NULL; + } + + if (this->last_mix_buffer) { + mix_buffer_unref(this->last_mix_buffer); + this->last_mix_buffer = NULL; } LOG_V( "Release surfaces\n"); - if (self->ci_shared_surfaces) + if (this->shared_surfaces) { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; +// g_free (self->shared_surfaces); + delete []this->shared_surfaces; + this->shared_surfaces = NULL; } - if (self->surfaces) + if (this->surfaces) { - g_free (self->surfaces); - self->surfaces = NULL; +// g_free (self->surfaces); + delete[] this->surfaces; + this->surfaces = NULL; + } + + if (this->usrptr) { +// g_free (self->usrptr); + delete[]this->usrptr; + this->usrptr = NULL; } LOG_V( "vaDestroyContext\n"); - va_status = vaDestroyContext (parent->va_display, parent->va_context); + va_status = vaDestroyContext (this->va_display, this->va_context); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed vaDestroyContext\n"); + "Failed vaDestroyContext\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( "vaDestroyConfig\n"); - va_status = vaDestroyConfig (parent->va_display, parent->va_config); + va_status = vaDestroyConfig (this->va_display, this->va_config); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed vaDestroyConfig\n"); + "Failed vaDestroyConfig\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - CLEAN_UP; -} -#undef CLEAN_UP +CLEAN_UP: + this->initialized = FALSE; -MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix) -{ + //g_mutex_unlock(parent->objectlock); + Unlock(); + + LOG_V( "end\n"); + + return ret; +} - VAStatus va_status; - VAEncSequenceParameterBufferH264 h264_seq_param; - MixVideoFormatEnc *parent = NULL; +MIX_RESULT +MixVideoFormatEnc_H264::GetMaxEncodedBufSize (uint *max_size) { - if (mix == NULL) { - LOG_E("mix == NULL\n"); + if (max_size == NULL) + { + LOG_E( + "max_size == NULL\n"); return MIX_RESULT_NULL_PTR; } - LOG_V( "Begin\n\n"); - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; + LOG_V( "Begin\n"); - parent = MIX_VIDEOFORMATENC(&(mix->parent)); + if (MIX_IS_VIDEOFORMATENC_H264(this)) { - /*set up the sequence params for HW*/ - h264_seq_param.level_idc = 30; //TODO, hard code now - h264_seq_param.intra_period = parent->intra_period; - h264_seq_param.intra_idr_period = mix->idr_interval; - h264_seq_param.picture_width_in_mbs = parent->picture_width / 16; - h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16; - h264_seq_param.bits_per_second = parent->bitrate; - h264_seq_param.frame_rate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - h264_seq_param.initial_qp = parent->initial_qp; - h264_seq_param.min_qp = parent->min_qp; - h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage - h264_seq_param.intra_period = parent->intra_period; - //h264_seq_param.vui_flag = 248; - //h264_seq_param.seq_parameter_set_id = 176; + if (this->coded_buf_size > 0) { + *max_size = this->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } - LOG_V( - "===h264 sequence params===\n"); + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (this->va_rcmode_h264 == VA_RC_NONE) { + this->coded_buf_size = + (this->picture_width* this->picture_height * 400) / (16 * 16); + // set to value according to QP + } + else { + this->coded_buf_size = this->bitrate/ 4; + } - LOG_I( "seq_parameter_set_id = %d\n", - (guint)h264_seq_param.seq_parameter_set_id); - LOG_I( "level_idc = %d\n", - (guint)h264_seq_param.level_idc); - LOG_I( "intra_period = %d\n", - h264_seq_param.intra_period); - LOG_I( "idr_interval = %d\n", - h264_seq_param.intra_idr_period); - LOG_I( "picture_width_in_mbs = %d\n", - h264_seq_param.picture_width_in_mbs); - LOG_I( "picture_height_in_mbs = %d\n", - h264_seq_param.picture_height_in_mbs); - LOG_I( "bitrate = %d\n", - h264_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - h264_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - h264_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - h264_seq_param.min_qp); - LOG_I( "basic_unit_size = %d\n", - h264_seq_param.basic_unit_size); - LOG_I( "vui_flag = %d\n\n", - h264_seq_param.vui_flag); + this->coded_buf_size = + max (this->coded_buf_size , + (this->picture_width* this->picture_height * 400) / (16 * 16)); - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(h264_seq_param), - 1, &h264_seq_param, - &mix->seq_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; + /*in case got a very large user input bit rate value*/ + this->coded_buf_size = + min(this->coded_buf_size, + (this->picture_width * this->picture_height * 1.5 * 8)); + this->coded_buf_size = (this->coded_buf_size + 15) &(~15); } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->seq_param_buf, 1); - if (va_status != VA_STATUS_SUCCESS) + else { LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; + "not H264 video encode Object\n"); + return MIX_RESULT_INVALID_PARAM; } + *max_size = this->coded_buf_size; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - VAEncPictureParameterBufferH264 h264_pic_param; - MixVideoFormatEnc *parent = NULL; - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; +MIX_RESULT MixVideoFormatEnc_H264::SetDynamicEncConfig(MixVideoConfigParamsEnc * config_params, + MixEncParamsType params_type) { + + MIX_RESULT ret = MIX_RESULT_SUCCESS; + MixVideoConfigParamsEncH264 * config_params_enc_h264; + + LOG_V( "Begin\n"); + + + if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params)) { + config_params_enc_h264 = + MIX_VIDEOCONFIGPARAMSENC_H264 (config_params); + } else { + LOG_V( + "mix_videofmtenc_h264_initialize: no h264 config params found\n"); + return MIX_RESULT_FAIL; } - LOG_V( "Begin\n\n"); + /* + * For case params_type == MIX_ENC_PARAMS_SLICE_NUM + * we don't need to chain up to parent method, as we will handle + * dynamic slice height change inside this method, and other dynamic + * controls will be handled in parent method. + */ + if (params_type == MIX_ENC_PARAMS_SLICE_NUM) { - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; +// g_mutex_lock(parent->objectlock); + Lock(); - parent = MIX_VIDEOFORMATENC(&(mix->parent)); + ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, + &this->slice_num); - /*set picture params for HW*/ - h264_pic_param.reference_picture = mix->ref_frame->frame_id; - h264_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - h264_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - h264_pic_param.picture_width = parent->picture_width; - h264_pic_param.picture_height = parent->picture_height; - h264_pic_param.last_picture = 0; + this->I_slice_num = this->P_slice_num = this->slice_num; + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - LOG_V( - "======h264 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h264_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - h264_pic_param.reconstructed_picture); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "coded_buf = 0x%08x\n", - h264_pic_param.coded_buf); - LOG_I( "picture_width = %d\n", - h264_pic_param.picture_width); - LOG_I( "picture_height = %d\n\n", - h264_pic_param.picture_height); +// g_mutex_unlock(parent->objectlock); + Unlock(); + return ret; + } - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(h264_pic_param), - 1,&h264_pic_param, - &mix->pic_param_buf); + //g_mutex_unlock(parent->objectlock); + Unlock(); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; } + else if (params_type == MIX_ENC_PARAMS_I_SLICE_NUM) { + //g_mutex_lock(parent->objectlock); + Lock(); - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); + ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, + &this->I_slice_num); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - -} + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); + //g_mutex_unlock(parent->objectlock); + Unlock(); -MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; + return ret; + } - guint slice_num; - guint slice_height; - guint slice_index; - guint slice_height_in_mb; - guint max_slice_num; - guint min_slice_num; + //g_mutex_unlock(parent->objectlock); + Unlock(); - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; } + else if (params_type == MIX_ENC_PARAMS_P_SLICE_NUM) { - LOG_V( "Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - max_slice_num = (parent->picture_height + 15) / 16; - min_slice_num = 1; - - if (mix->is_intra) { - slice_num = mix->I_slice_num; - } - else { - slice_num = mix->P_slice_num; - } + //g_mutex_lock(parent->objectlock); + Lock(); - if (slice_num < min_slice_num) { - LOG_W ("Slice Number is too small"); - slice_num = min_slice_num; - } + ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, + &this->P_slice_num); - if (slice_num > max_slice_num) { - LOG_W ("Slice Number is too big"); - slice_num = max_slice_num; - } - - slice_height = parent->picture_height / slice_num; - - slice_height += 15; - slice_height &= (~15); - - slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height; - -#if 1 - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - slice_num, NULL, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); - VAEncSliceParameterBuffer *slice_param, *current_slice; + //g_mutex_unlock(parent->objectlock); + Unlock(); - va_status = vaMapBuffer(parent->va_display, - mix->slice_param_buf, - (void **)&slice_param); + return ret; + } - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } + //g_mutex_unlock(parent->objectlock); + Unlock(); - current_slice = slice_param; + } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { + //g_mutex_lock(parent->objectlock); + Lock(); - for (slice_index = 0; slice_index < slice_num; slice_index++) { - current_slice = slice_param + slice_index; - slice_height_in_mb = - min (slice_height, parent->picture_height - - slice_index * slice_height) / 16; + ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, + &this->idr_interval); - // starting MB row number for this slice - current_slice->start_row_number = slice_index * slice_height / 16; - // slice height measured in MB - current_slice->slice_height = slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = mix->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( + "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); - LOG_V( - "======h264 slice params======\n"); + //g_mutex_unlock(parent->objectlock); + Unlock(); - LOG_I( "slice_index = %d\n", - (gint) slice_index); - LOG_I( "start_row_number = %d\n", - (gint) current_slice->start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (gint) current_slice->slice_height); - LOG_I( "slice.is_intra = %d\n", - (gint) current_slice->slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); + return ret; + } - } + this->new_header_required = TRUE; - va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf); + //g_mutex_unlock(parent->objectlock); + Unlock(); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } -#endif + } else { -#if 0 - VAEncSliceParameterBuffer slice_param; - slice_index = 0; - slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; - slice_param.slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(slice_param), - slice_num, &slice_param, - &mix->slice_param_buf); + /* Chainup parent method. */ + ret = MixVideoFormatEnc::SetDynamicEncConfig(config_params,params_type); - if (va_status != VA_STATUS_SUCCESS) + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; + LOG_V( + "chainup parent method (set_dynamic_config) failed \n"); + return ret; } -#endif - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; } - LOG_V( "end\n"); + LOG_V( "End\n"); return MIX_RESULT_SUCCESS; } -#define CLEAN_UP {\ - if (ret != MIX_RESULT_SUCCESS) {\ - if (iovout->data) {\ - g_free (iovout->data);\ - iovout->data = NULL;\ - }\ - }\ - LOG_V( "end\n");\ - /* - * The error level of MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW - * is lower than other errors, so if any other errors happen, we won't - * return slice size overflow - */\ - if (ret == MIX_RESULT_SUCCESS && slice_size_overflow)\ - ret = MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW;\ - return ret;} - -MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ +MIX_RESULT MixVideoFormatEnc_H264::_process_encode( MixBuffer * bufin, + MixIOVec * iovout) { MIX_RESULT ret = MIX_RESULT_SUCCESS; VAStatus va_status = VA_STATUS_SUCCESS; VADisplay va_display = NULL; VAContextID va_context; - gulong surface = 0; - guint16 width, height; + ulong surface = 0; + uint16 width, height; + bool usingMixDataBuffer = FALSE; MixVideoFrame * tmp_frame; - guint8 *buf; - gboolean slice_size_overflow = FALSE; + uint8 *buf; + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + uint total_size = 0; + uint size = 0; + uint status = 0; + bool slice_size_overflow = FALSE; - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { + if ((bufin == NULL) || (iovout == NULL)) { LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); + "bufin == NULL || iovout == NULL\n"); return MIX_RESULT_NULL_PTR; } LOG_V( "Begin\n"); - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; + va_display = this->va_display; + va_context = this->va_context; + width = this->picture_width; + height = this->picture_height; LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); + this->encoded_frames); LOG_I( "frame_num = %d\n", - mix->frame_num); + this->frame_num); LOG_I( "is_intra = %d\n", - mix->is_intra); + this->is_intra); LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); + (uint) this->ci_frame_id); - if (parent->new_header_required) { - mix->frame_num = 0; + if (this->new_header_required) { + this->frame_num = 0; } /* determine the picture type*/ //if ((mix->encoded_frames % parent->intra_period) == 0) { - if ((mix->frame_num % parent->intra_period) == 0) { - mix->is_intra = TRUE; + if (this->intra_period == 0) { + if (this->frame_num == 0) + this->is_intra = TRUE; + else + this->is_intra = FALSE; + } + else if ((this->frame_num % this->intra_period) == 0) { + this->is_intra = TRUE; } else { - mix->is_intra = FALSE; + this->is_intra = FALSE; } LOG_I( "is_intra_picture = %d\n", - mix->is_intra); + this->is_intra); LOG_V( - "Get Surface from the pool\n"); + "Get Surface from the pool\n"); /*current we use one surface for source data, * one for reference and one for reconstructed*/ /*TODO, could be refine here*/ - if (!parent->share_buf_mode) { + + + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + //MixVideoFrame * frame = mix_videoframe_new(); + if (this->lookup_frame == NULL) + { + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) + { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto CLEAN_UP; + } + } + + if (this->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get reference surface from pool failed\n"); + goto CLEAN_UP; + } + } + + if (this->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto CLEAN_UP; + } + } + + //mix_videoframe_unref (mix->cur_frame); + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) + { + uint ci_idx; +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif + + LOG_I( + "surface_num = %d\n", this->surface_num); + LOG_I( + "ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > this->surface_num - 2) { + LOG_E( + "the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto CLEAN_UP; + + } + + + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto CLEAN_UP; + } + + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + goto CLEAN_UP; + + } + } + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + } + + /* + * end of CI buffer allocation mode + */ + + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + break; + + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + + break; + + case MIX_BUFFER_SELF_ALLOC_SURFACE: + { + if (this->lookup_frame == NULL) + { + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) + { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto CLEAN_UP; + } + } + + uint surface_idx = (uint) -1; //fixme, temp use a big value + uint idx = 0; + + LOG_I ("bufin->data = 0x%08x\n", bufin->data); + + for (idx = 0; idx < this->alloc_surface_cnt; idx++) { + LOG_I ("this->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); + + if (bufin->data == this->usrptr[idx]) + surface_idx = idx; + } + + LOG_I( + "surface_num = %d\n", this->surface_num); + LOG_I( + "surface_idx = %d\n", surface_idx); + + if (surface_idx > this->surface_num - 2) { + LOG_W( + "the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); + ret = MIX_RESULT_FAIL; + goto no_share_mode; + + } + + if (this->ref_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get reference surface from pool failed\n"); + goto CLEAN_UP; + } + } + + if (this->rec_frame == NULL) + { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto CLEAN_UP; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get recontructed surface from pool failed\n"); + goto CLEAN_UP; + } + } + + //mix_videoframe_unref (mix->cur_frame); + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) + { + + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "mix_videoframe_set_ci_frame_idx failed\n"); + goto CLEAN_UP; + } + + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "get current working surface from pool failed\n"); + goto CLEAN_UP; + + } + } + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + } + + break; + /* + * end of Self buffer allocation mode + */ + + case MIX_BUFFER_ALLOC_NORMAL: + { + +no_share_mode: + LOG_V( - "We are NOT in share buffer mode\n"); + "We are NOT in share buffer mode\n"); - if (mix->ref_frame == NULL) + if (this->ref_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); + ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used { LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; + "Failed to mix_surfacepool_get\n"); + goto CLEAN_UP; } } - if (mix->rec_frame == NULL) + if (this->rec_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); + ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; + "Failed to mix_surfacepool_get\n"); + goto CLEAN_UP; } } - if (parent->need_display) { - mix->cur_frame = NULL; + if (this->need_display) { + this->cur_frame = NULL; } - if (mix->cur_frame == NULL) + if (this->cur_frame == NULL) { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); + ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; + "Failed to mix_surfacepool_get\n"); + goto CLEAN_UP; } } @@ -1415,25 +1561,25 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; + uint8 *pvbuf; + uint8 *dst_y; + uint8 *dst_uv; int i,j; LOG_V( - "map source data to surface\n"); + "map source data to surface\n"); - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - CLEAN_UP; + "Failed to mix_videoframe_get_frame_id\n"); + goto CLEAN_UP; } LOG_I( - "surface id = 0x%08x\n", (guint) surface); + "surface id = 0x%08x\n", (uint) surface); va_status = vaDeriveImage(va_display, surface, &src_image); //need to destroy @@ -1441,9 +1587,9 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaDeriveImage\n"); + "Failed to vaDeriveImage\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } VAImage *image = &src_image; @@ -1456,61 +1602,89 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, { LOG_E( "Failed to vaMapBuffer\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( - "vaImage information\n"); + "vaImage information\n"); LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); + "image->pitches[0] = %d\n", image->pitches[0]); LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); + "image->pitches[1] = %d\n", image->pitches[1]); LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); + "image->offsets[0] = %d\n", image->offsets[0]); LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); + "image->offsets[1] = %d\n", image->offsets[1]); LOG_I( - "image->num_planes = %d\n", image->num_planes); + "image->num_planes = %d\n", image->num_planes); LOG_I( - "image->width = %d\n", image->width); + "image->width = %d\n", image->width); LOG_I( - "image->height = %d\n", image->height); + "image->height = %d\n", image->height); LOG_I( - "input buf size = %d\n", bufin->size); + "input buf size = %d\n", bufin->size); - guint8 *inbuf = bufin->data; + uint8 *inbuf = bufin->data; -#ifndef ANDROID -#define USE_SRC_FMT_YUV420 -#else -#define USE_SRC_FMT_NV21 +#ifdef ANDROID +#define USE_SRC_FMT_NV12 #endif + int offset_uv = width * height; + uint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; -#ifdef USE_SRC_FMT_YUV420 - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; +#ifdef ANDROID + //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - for (i = 0; i < height; i ++) { + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { memcpy (dst_y, inbuf + i * width, width); dst_y += image->pitches[0]; } +#ifdef USE_SRC_FMT_NV12 dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v } dst_uv += image->pitches[1]; + inbuf_uv += width_uv; } -#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - int offset_uv = width * height; - guint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; +#endif + +#else + + if (this->raw_format == MIX_RAW_TARGET_FORMAT_YUV420) { + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + } + + else if (this->raw_format == MIX_RAW_TARGET_FORMAT_NV12) { dst_y = pvbuf + image->offsets[0]; for (i = 0; i < height; i++) { @@ -1518,165 +1692,55 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, dst_y += image->pitches[0]; } -#ifdef USE_SRC_FMT_NV12 dst_uv = pvbuf + image->offsets[1]; for (i = 0; i < height_uv; i++) { memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); dst_uv += image->pitches[1]; } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif + } + else { + LOG_E("Raw format not supoort\n"); + ret = MIX_RESULT_FAIL; + goto CLEAN_UP; + } + #endif //USE_SRC_FMT_YUV420 va_status = vaUnmapBuffer(va_display, image->buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaUnmapBuffer\n"); + "Failed to vaUnmapBuffer\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } va_status = vaDestroyImage(va_display, src_image.image_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaDestroyImage\n"); + "Failed to vaDestroyImage\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( - "Map source data to surface done\n"); - + "Map source data to surface done\n"); } - - else {//if (!parent->share_buf_mode) - - //MixVideoFrame * frame = mix_videoframe_new(); - if (mix->lookup_frame == NULL) - { - mix->lookup_frame = mix_videoframe_new (); - if (mix->lookup_frame == NULL) - { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } - } - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, mix->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get reference surface from pool failed\n"); - CLEAN_UP; - } - } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, mix->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - CLEAN_UP; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - - } - - - ret = mix_videoframe_set_ci_frame_idx (mix->lookup_frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } - - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, mix->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - CLEAN_UP; - - } - } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); + break; + default: + break; } - /** + /* * Start encoding process **/ LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); + LOG_I( "va_context = 0x%08x\n",(uint)va_context); + LOG_I( "surface = 0x%08x\n",(uint)surface); + LOG_I( "va_display = 0x%08x\n",(uint)va_display); va_status = vaBeginPicture(va_display, va_context, surface); @@ -1684,52 +1748,55 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, { LOG_E( "Failed vaBeginPicture\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - ret = mix_videofmtenc_h264_send_encode_command (mix); + ret = _send_encode_command (); if (ret != MIX_RESULT_SUCCESS) { LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - CLEAN_UP; + "Failed SendEncodEcommand\n"); + goto CLEAN_UP; } - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { + if ((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 0) { va_status = vaEndPicture (va_display, va_context); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaEndPicture\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } } LOG_V( "vaEndPicture\n"); - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->frame_num ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; + if (this->encoded_frames == 0) { + this->encoded_frames ++; + this->frame_num ++; + this->last_coded_buf = this->coded_buf[this->coded_buf_index]; + this->coded_buf_index ++; + this->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; + this->last_frame = this->cur_frame; /* determine the picture type*/ //if ((mix->encoded_frames % parent->intra_period) == 0) { - if ((mix->frame_num % parent->intra_period) == 0) { - mix->is_intra = TRUE; + if (this->intra_period == 0) { + this->is_intra = FALSE; //Here mix->frame_num is bigger than 0 + } + else if ((this->frame_num % this->intra_period) == 0) { + this->is_intra = TRUE; } else { - mix->is_intra = FALSE; + this->is_intra = FALSE; } - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; + tmp_frame = this->rec_frame; + this->rec_frame= this->ref_frame; + this->ref_frame = tmp_frame; } @@ -1737,7 +1804,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, LOG_V( "vaSyncSurface\n"); - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); + va_status = vaSyncSurface(va_display, this->last_frame->frame_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaSyncSurface\n"); @@ -1746,23 +1813,19 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } LOG_V( - "Start to get encoded data\n"); + "Start to get encoded data\n"); /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); + va_status = vaMapBuffer (va_display, this->last_coded_buf, (void **)&buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaMapBuffer\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + goto CLEAN_UP; + } + - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - guint total_size = 0; - guint size = 0; - guint status = 0; coded_seg = (VACodedBufferSegment *)buf; num_seg = 1; @@ -1774,13 +1837,13 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (!slice_size_overflow) { - slice_size_overflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; + slice_size_overflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; } if (coded_seg->next == NULL) break; - coded_seg = reinterpret_cast(coded_seg->next); + coded_seg = (VACodedBufferSegment *)coded_seg->next; num_seg ++; } @@ -1789,9 +1852,9 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, #if 0 // first 4 bytes is the size of the buffer memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; + //size = (uint*) buf; - guint size = iovout->data_size + 100; + uint size = iovout->data_size + 100; #endif iovout->data_size = total_size; @@ -1804,18 +1867,20 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - iovout->data = reinterpret_cast(g_malloc (size)); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed + usingMixDataBuffer = TRUE; + //iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed + iovout->data = new uchar[size]; if (iovout->data == NULL) { LOG_E( "iovout->data == NULL\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto CLEAN_UP; } } coded_seg = (VACodedBufferSegment *)buf; total_size = 0; - if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) { + if (this->delimiter_type == MIX_DELIMITER_ANNEXB) { while (1) { @@ -1825,7 +1890,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (coded_seg->next == NULL) break; - coded_seg = reinterpret_cast(coded_seg->next); + coded_seg = (VACodedBufferSegment *)coded_seg->next; } //memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte @@ -1833,12 +1898,12 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, } else { - guint pos = 0; - guint zero_byte_count = 0; - guint prefix_length = 0; - guint8 nal_unit_type = 0; - //guint8 * payload = buf + 16; - guint8 * payload = reinterpret_cast(coded_seg->buf); + uint pos = 0; + uint zero_byte_count = 0; + uint prefix_length = 0; + uint8 nal_unit_type = 0; + //uint8 * payload = buf + 16; + uint8 * payload = ( uint8 *)coded_seg->buf; while ((payload[pos++] == 0x00)) { zero_byte_count ++; @@ -1846,15 +1911,18 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, break; } - nal_unit_type = (guint8)(payload[pos] & 0x1f); + nal_unit_type = (uint8)(payload[pos] & 0x1f); prefix_length = zero_byte_count + 1; + /*prefix_length won't bigger than the total size, don't need to check here*/ + LOG_I ("nal_unit_type = %d\n", nal_unit_type); LOG_I ("zero_byte_count = %d\n", zero_byte_count); + LOG_I ("data_size = %d\n", iovout->data_size); size = iovout->data_size; - if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1 && num_seg == 1) { + if ((payload [pos - 1] & 0x01) && this->slice_num == 1 && nal_unit_type == 1 && num_seg == 1) { iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; @@ -1863,26 +1931,29 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, //memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); memcpy (iovout->data + 4, coded_seg->buf + prefix_length, size - prefix_length); LOG_V ("We only have one start code, copy directly\n"); + + iovout->data_size = size - prefix_length + 4; } else { if (num_seg == 1) { - ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (reinterpret_cast(coded_seg->buf), coded_seg->size, iovout->data, &size); + ret = _AnnexB_to_length_prefixed ( (uint8*)coded_seg->buf, coded_seg->size, iovout->data, &size); if (ret != MIX_RESULT_SUCCESS) { LOG_E ( - "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); - CLEAN_UP; + "Failed AnnexBtoLengthPrefixed\n"); + goto CLEAN_UP; } } else { - guint8 * tem_buf = NULL; - tem_buf = reinterpret_cast(g_malloc (size)); + uint8 * tem_buf = NULL; + //tem_buf = g_malloc (size); + tem_buf = new uint8[size]; if (tem_buf == NULL) { LOG_E( "tem_buf == NULL\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto CLEAN_UP; } while (1) { @@ -1893,61 +1964,64 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (coded_seg->next == NULL) break; - coded_seg = reinterpret_cast(coded_seg->next); + coded_seg = (VACodedBufferSegment *)coded_seg->next; } - ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); + ret = _AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n"); - CLEAN_UP; + LOG_E ("Failed AnnexBtoLengthPrefixed\n"); + //g_free (tem_buf); + delete[] tem_buf; + goto CLEAN_UP; } - g_free (tem_buf); + //g_free (tem_buf); + delete[] tem_buf; } + iovout->data_size = size; } } LOG_I( - "out size is = %d\n", iovout->data_size); + "out size is = %d\n", iovout->data_size); - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); + va_status = vaUnmapBuffer (va_display, this->last_coded_buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaUnmapBuffer\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } LOG_V( "get encoded data done\n"); - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { + if (!((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 1)) { va_status = vaEndPicture (va_display, va_context); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaEndPicture\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } } - if (mix->encoded_frames == 1) { + if (this->encoded_frames == 1) { va_status = vaBeginPicture(va_display, va_context, surface); if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed vaBeginPicture\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - ret = mix_videofmtenc_h264_send_encode_command (mix); + ret = _send_encode_command (); if (ret != MIX_RESULT_SUCCESS) { LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - CLEAN_UP; + "Failed SendEncodeCommand\n"); + goto CLEAN_UP; } va_status = vaEndPicture (va_display, va_context); @@ -1955,7 +2029,7 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, { LOG_E( "Failed vaEndPicture\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } } @@ -1967,34 +2041,34 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed vaQuerySurfaceStatus\n"); + "Failed vaQuerySurfaceStatus\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto CLEAN_UP; } - mix->pic_skipped = va_surface_status & VASurfaceSkipped; + this->pic_skipped = va_surface_status & VASurfaceSkipped; - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (this->need_display) { + ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to set sync_flag\n"); - CLEAN_UP; + goto CLEAN_UP; } - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed mix_framemanager_enqueue\n"); - CLEAN_UP; + "Failed mix_framemanager_enqueue\n"); + goto CLEAN_UP; } } /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; + if (!this->pic_skipped) { + tmp_frame = this->rec_frame; + this->rec_frame= this->ref_frame; + this->ref_frame = tmp_frame; } #if 0 @@ -2005,114 +2079,80 @@ MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, mix_videoframe_unref (mix->cur_frame); #endif - mix->encoded_frames ++; - mix->frame_num ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; + this->encoded_frames ++; + this->frame_num ++; + this->last_coded_buf = this->coded_buf[this->coded_buf_index]; + this->coded_buf_index ++; + this->coded_buf_index %=2; + this->last_frame = this->cur_frame; -#ifdef ANDROID - if(mix->last_mix_buffer) { - LOG_V("calls to mix_buffer_unref \n"); - LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount); - mix_buffer_unref(mix->last_mix_buffer); +//#ifdef ANDROID +#if 1 + if (this->last_mix_buffer) { + LOG_V("calls to mix_buffer_unref \n"); + LOG_V("refcount = %d\n", MIX_PARAMS(this->last_mix_buffer)->GetRefCount()); + mix_buffer_unref(this->last_mix_buffer); } LOG_V("ref the current bufin\n"); - mix->last_mix_buffer = mix_buffer_ref(bufin); -#endif - - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - CLEAN_UP; -} -#undef CLEAN_UP - -MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size ( - MixVideoFormatEnc *mix, guint *max_size) -{ - - MixVideoFormatEnc *parent = NULL; + this->last_mix_buffer = mix_buffer_ref(bufin); +#endif - if (mix == NULL || max_size == NULL) - { - LOG_E( - "mix == NULL || max_size == NULL\n"); - return MIX_RESULT_NULL_PTR; + if (!(this->need_display)) { + mix_videoframe_unref (this->cur_frame); + this->cur_frame = NULL; } - parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); +CLEAN_UP: - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_H264(self)) { - - if (self->coded_buf_size > 0) { - *max_size = self->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 400) / (16 * 16); - // set to value according to QP - } - else { - self->coded_buf_size = parent->bitrate/ 4; + if (ret != MIX_RESULT_SUCCESS) { + if (iovout->data && (usingMixDataBuffer == TRUE)) { + //g_free (iovout->data); + delete[] iovout->data; + iovout->data = NULL; + usingMixDataBuffer = FALSE; } + } - self->coded_buf_size = - max (self->coded_buf_size , - (parent->picture_width* parent->picture_height * 400) / (16 * 16)); + LOG_V( "end\n"); - /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - min(self->coded_buf_size, - (parent->picture_width * parent->picture_height * 1.5 * 8)); - self->coded_buf_size = (self->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } + /* + * The error level of MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW + * is lower than other errors, so if any other errors happen, we won't + * return slice size overflow + */ + if (ret == MIX_RESULT_SUCCESS && slice_size_overflow) + ret = MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW; - *max_size = self->coded_buf_size; + return ret; - return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( - guint8 * bufin, guint bufin_len, guint8* bufout, guint * bufout_len) -{ +MIX_RESULT +MixVideoFormatEnc_H264::_AnnexB_to_length_prefixed ( + uint8 * bufin, uint bufin_len, uint8* bufout, uint *bufout_len) { - guint pos = 0; - guint last_pos = 0; - guint zero_byte_count = 0; - guint nal_size = 0; - guint prefix_length = 0; - guint size_copied = 0; - guint leading_zero_count = 0; + uint pos = 0; + uint last_pos = 0; + + uint zero_byte_count = 0; + uint nal_size = 0; + uint prefix_length = 0; + uint size_copied = 0; + uint leading_zero_count = 0; if (bufin == NULL || bufout == NULL || bufout_len == NULL) { LOG_E( - "bufin == NULL || bufout == NULL || bufout_len = NULL\n"); + "bufin == NULL || bufout == NULL || bufout_len = NULL\n"); return MIX_RESULT_NULL_PTR; } if (bufin_len <= 0 || *bufout_len <= 0) { LOG_E( - "bufin_len <= 0 || *bufout_len <= 0\n"); + "bufin_len <= 0 || *bufout_len <= 0\n"); return MIX_RESULT_FAIL; } @@ -2223,553 +2263,715 @@ MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( } -MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix) -{ +MIX_RESULT +MixVideoFormatEnc_H264::_send_encode_command () { MIX_RESULT ret = MIX_RESULT_SUCCESS; LOG_V( "Begin\n"); - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; //if (mix->encoded_frames == 0 || parent->new_header_required) { - if (mix->frame_num == 0 || parent->new_header_required) { - ret = mix_videofmtenc_h264_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - - parent->new_header_required = FALSE; //Set to require new header filed to FALSE - } - - if (parent->render_mss_required && parent->max_slice_size != 0) { - ret = mix_videofmtenc_h264_send_max_slice_size(mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_max_slice_size\n"); - return MIX_RESULT_FAIL; - } - - parent->render_mss_required = FALSE; - } - - if (parent->render_bitrate_required) { - ret = mix_videofmtenc_h264_send_dynamic_bitrate(mix); + if (this->frame_num == 0 || this->new_header_required) { + ret = _send_seq_params (); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed mix_videofmtenc_h264_send_dynamic_bitrate\n"); + "Failed SendSeqParams\n"); return MIX_RESULT_FAIL; } - parent->render_bitrate_required = FALSE; + this->new_header_required = FALSE; //Set to require new header filed to FALSE } - if (parent->render_AIR_required && - (parent->refresh_type == MIX_VIDEO_AIR || parent->refresh_type == MIX_VIDEO_BOTH)) - { - - ret = mix_videofmtenc_h264_send_AIR (mix); + if (this->render_mss_required && this->max_slice_size != 0) { + ret = _send_max_slice_size(); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed mix_videofmtenc_h264_send_AIR\n"); + "Failed SendMaxSliceSize\n"); return MIX_RESULT_FAIL; } - parent->render_AIR_required = FALSE; + this->render_mss_required = FALSE; } - if (parent->render_framerate_required) { - - ret = mix_videofmtenc_h264_send_dynamic_framerate (mix); + if (this->render_bitrate_required) { + ret = _send_dynamic_bitrate(); if (ret != MIX_RESULT_SUCCESS) { LOG_E( - "Failed mix_videofmtenc_h264_send_dynamic_framerate\n"); + "Failed SendDynamicBitrate\n"); return MIX_RESULT_FAIL; } - parent->render_framerate_required = FALSE; + this->render_bitrate_required = FALSE; } - ret = mix_videofmtenc_h264_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) + if (this->render_AIR_required && + (this->refresh_type == MIX_VIDEO_AIR || this->refresh_type == MIX_VIDEO_BOTH)) { - LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); - return MIX_RESULT_FAIL; - } - - ret = mix_videofmtenc_h264_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "End\n"); - - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { - - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncH264 * config_params_enc_h264; - - LOG_V( "Begin\n"); - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { - config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h264_initialize: no h264 config params found\n"); - return MIX_RESULT_FAIL; - } - - /* - * For case params_type == MIX_ENC_PARAMS_SLICE_NUM - * we don't need to chain up to parent method, as we will handle - * dynamic slice height change inside this method, and other dynamic - * controls will be handled in parent method. - */ - if (params_type == MIX_ENC_PARAMS_SLICE_NUM) { - - g_mutex_lock(parent->objectlock); - - ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, - &self->slice_num); - - self->I_slice_num = self->P_slice_num = self->slice_num; - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - - g_mutex_unlock(parent->objectlock); - - return ret; - } - - g_mutex_unlock(parent->objectlock); - - } - else if (params_type == MIX_ENC_PARAMS_I_SLICE_NUM) { - - g_mutex_lock(parent->objectlock); - - ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, - &self->I_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); - - g_mutex_unlock(parent->objectlock); - - return ret; - } - - g_mutex_unlock(parent->objectlock); - - } - else if (params_type == MIX_ENC_PARAMS_P_SLICE_NUM) { - - g_mutex_lock(parent->objectlock); - - ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, - &self->P_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); - - g_mutex_unlock(parent->objectlock); - - return ret; - } - - g_mutex_unlock(parent->objectlock); - - } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { - - g_mutex_lock(parent->objectlock); - ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, - &self->idr_interval); + ret = _send_AIR (); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed SendAIR\n"); + return MIX_RESULT_FAIL; + } - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); + this->render_AIR_required = FALSE; + } - g_mutex_unlock(parent->objectlock); + if (this->render_framerate_required) { - return ret; - } + ret = _send_dynamic_framerate (); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed SendDynamicFramerate\n"); + return MIX_RESULT_FAIL; + } - parent->new_header_required = TRUE; + this->render_framerate_required = FALSE; + } - g_mutex_unlock(parent->objectlock); + ret = _send_picture_parameter (); - } else{ + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed SendPictureParameter\n"); + return MIX_RESULT_FAIL; + } - /* Chainup parent method. */ - if (parent_class->set_dynamic_config) { - ret = parent_class->set_dynamic_config(mix, config_params_enc, - params_type); - } + ret = _send_slice_parameter (); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( + "Failed SendSliceParameter\n"); + return MIX_RESULT_FAIL; + } - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V( - "chainup parent method (set_dynamic_config) failed \n"); - return ret; - } - } + LOG_V( "End\n"); - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix) -{ - VAStatus va_status; - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } +MIX_RESULT +MixVideoFormatEnc_H264::_send_dynamic_bitrate () { + VAStatus va_status; LOG_V( "Begin\n\n"); - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); + if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_bitrate\n"); - return VA_STATUS_SUCCESS; + LOG_W ("Not in VCM mode, but call SendDynamicBitrate\n"); + return VA_STATUS_SUCCESS; } VAEncMiscParameterBuffer * misc_enc_param_buf; VAEncMiscParameterRateControl * bitrate_control_param; VABufferID misc_param_buffer_id; - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), - 1, NULL, - &misc_param_buffer_id); + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), + 1, NULL, + &misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl; bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data; - bitrate_control_param->bits_per_second = parent->bitrate; - bitrate_control_param->initial_qp = parent->initial_qp; - bitrate_control_param->min_qp = parent->min_qp; - bitrate_control_param->target_percentage = parent->target_percentage; - bitrate_control_param->window_size = parent->window_size; + bitrate_control_param->bits_per_second = this->bitrate; + bitrate_control_param->initial_qp = this->initial_qp; + bitrate_control_param->min_qp = this->min_qp; + bitrate_control_param->target_percentage = this->target_percentage; + bitrate_control_param->window_size = this->window_size; - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaUnmapBuffer\n"); + "Failed to vaUnmapBuffer\n"); return MIX_RESULT_FAIL; } - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); + va_status = vaRenderPicture(this->va_display, this->va_context, + &misc_param_buffer_id, 1); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaRenderPicture\n"); + "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; } return MIX_RESULT_SUCCESS; - } -MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix) -{ +MIX_RESULT +MixVideoFormatEnc_H264::_send_max_slice_size () { VAStatus va_status; - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - LOG_V( "Begin\n\n"); - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_max_slice_size\n"); - return VA_STATUS_SUCCESS; + LOG_W ("Not in VCM mode, but call send_max_slice_size\n"); + return VA_STATUS_SUCCESS; } VAEncMiscParameterBuffer * misc_enc_param_buf; VAEncMiscParameterMaxSliceSize * max_slice_size_param; VABufferID misc_param_buffer_id; - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize), - 1, NULL, - &misc_param_buffer_id); + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize), + 1, NULL, + &misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } misc_enc_param_buf->type = VAEncMiscParameterTypeMaxSliceSize; max_slice_size_param = (VAEncMiscParameterMaxSliceSize *)misc_enc_param_buf->data; - max_slice_size_param->max_slice_size = parent->max_slice_size; + max_slice_size_param->max_slice_size = this->max_slice_size; - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaUnmapBuffer\n"); + "Failed to vaUnmapBuffer\n"); return MIX_RESULT_FAIL; } LOG_I( "max slice size = %d\n", - max_slice_size_param->max_slice_size); + max_slice_size_param->max_slice_size); - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); + va_status = vaRenderPicture(this->va_display, this->va_context, + &misc_param_buffer_id, 1); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaRenderPicture\n"); + "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; } return MIX_RESULT_SUCCESS; + } +MIX_RESULT +MixVideoFormatEnc_H264::_send_dynamic_framerate () { -MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix) -{ VAStatus va_status; - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; + + if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { + + LOG_W ("Not in VCM mode, but call SendDynamicFramerate\n"); + return VA_STATUS_SUCCESS; } - LOG_V( "Begin\n\n"); + VAEncMiscParameterBuffer * misc_enc_param_buf; + VAEncMiscParameterFrameRate * framerate_param; + VABufferID misc_param_buffer_id; + + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncMiscParameterBufferType, + sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), + 1, NULL, + &misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } - MixVideoFormatEnc *parent = NULL; + va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; + framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; + framerate_param->framerate = + (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; + + va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(this->va_display, this->va_context, + &misc_param_buffer_id, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; - parent = MIX_VIDEOFORMATENC(&(mix->parent)); + LOG_I( "frame rate = %d\n", + framerate_param->framerate); + + return MIX_RESULT_SUCCESS; + +} + +MIX_RESULT +MixVideoFormatEnc_H264::_send_AIR () { + + VAStatus va_status; + LOG_V( "Begin\n\n"); - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_AIR\n"); - return VA_STATUS_SUCCESS; + LOG_W ("Not in VCM mode, but call send_AIR\n"); + return VA_STATUS_SUCCESS; } VAEncMiscParameterBuffer * misc_enc_param_buf; VAEncMiscParameterAIR * air_param; VABufferID misc_param_buffer_id; - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterAIR), - 1, NULL, - &misc_param_buffer_id); + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncMiscParameterBufferType, + sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterAIR), + 1, NULL, + &misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } misc_enc_param_buf->type = VAEncMiscParameterTypeAIR; air_param = (VAEncMiscParameterAIR *)misc_enc_param_buf->data; - air_param->air_auto = parent->air_params.air_auto; - air_param->air_num_mbs = parent->air_params.air_MBs; - air_param->air_threshold = parent->air_params.air_threshold; + air_param->air_auto = this->air_params.air_auto; + air_param->air_num_mbs = this->air_params.air_MBs; + air_param->air_threshold = this->air_params.air_threshold; - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); + va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaUnmapBuffer\n"); + "Failed to vaUnmapBuffer\n"); return MIX_RESULT_FAIL; } - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); + va_status = vaRenderPicture(this->va_display, this->va_context, + &misc_param_buffer_id, 1); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaRenderPicture\n"); + "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; } LOG_I( "air_threshold = %d\n", - air_param->air_threshold); + air_param->air_threshold); return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix) +int +MixVideoFormatEnc_H264::_calc_level(int nummbs) { + int level = 30; + + if (nummbs < 3600) + { + level = 30; + } + else if (nummbs < 5120) + { + level = 31; + } + else if (nummbs < 8192) + { + level = 32; + } + else if (nummbs < 8704) + { + level = 40; + } + else if (nummbs < 22080) + { + level = 42; + } + else if (nummbs < 36864) + { + level = 50; + } + else + { + level = 51; + } + return level; +} + +MIX_RESULT +MixVideoFormatEnc_H264::_send_seq_params () { VAStatus va_status; + VAEncSequenceParameterBufferH264 h264_seq_param; + int level; - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; + LOG_V( "Begin\n\n"); + + /*set up the sequence params for HW*/ +// h264_seq_param.level_idc = 30; //TODO, hard code now + h264_seq_param.intra_period = this->intra_period; + h264_seq_param.intra_idr_period = this->idr_interval; + h264_seq_param.picture_width_in_mbs = (this->picture_width + 15) / 16; + h264_seq_param.picture_height_in_mbs = (this->picture_height + 15) / 16; + + level = _calc_level( + h264_seq_param.picture_width_in_mbs * h264_seq_param.picture_height_in_mbs); + + h264_seq_param.level_idc = level; + + h264_seq_param.bits_per_second = this->bitrate; + h264_seq_param.frame_rate = + (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; + h264_seq_param.initial_qp = this->initial_qp; + h264_seq_param.min_qp = this->min_qp; + h264_seq_param.basic_unit_size = this->basic_unit_size; //for rate control usage + h264_seq_param.intra_period = this->intra_period; + h264_seq_param.vui_flag = this->vui_flag; + //h264_seq_param.vui_flag = 248; + //h264_seq_param.seq_parameter_set_id = 176; + + // This is a temporary fix suggested by Binglin for bad encoding quality issue + //h264_seq_param.max_num_ref_frames = 1; // TODO: We need a long term design for this field + + LOG_V( + "===h264 sequence params===\n"); + + LOG_I( "seq_parameter_set_id = %d\n", + (uint)h264_seq_param.seq_parameter_set_id); + LOG_I( "level_idc = %d\n", + (uint)h264_seq_param.level_idc); + LOG_I( "intra_period = %d\n", + h264_seq_param.intra_period); + LOG_I( "idr_interval = %d\n", + h264_seq_param.intra_idr_period); + LOG_I( "picture_width_in_mbs = %d\n", + h264_seq_param.picture_width_in_mbs); + LOG_I( "picture_height_in_mbs = %d\n", + h264_seq_param.picture_height_in_mbs); + LOG_I( "bitrate = %d\n", + h264_seq_param.bits_per_second); + LOG_I( "frame_rate = %d\n", + h264_seq_param.frame_rate); + LOG_I( "initial_qp = %d\n", + h264_seq_param.initial_qp); + LOG_I( "min_qp = %d\n", + h264_seq_param.min_qp); + LOG_I( "basic_unit_size = %d\n", + h264_seq_param.basic_unit_size); + LOG_I( "vui_flag = %d\n\n", + h264_seq_param.vui_flag); + + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncSequenceParameterBufferType, + sizeof(h264_seq_param), + 1, &h264_seq_param, + &this->seq_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + + va_status = vaRenderPicture(this->va_display, this->va_context, + &this->seq_param_buf, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; } + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT +MixVideoFormatEnc_H264::_send_picture_parameter () { + VAStatus va_status; + VAEncPictureParameterBufferH264 h264_pic_param; + LOG_V( "Begin\n\n"); - MixVideoFormatEnc *parent = NULL; + /*set picture params for HW*/ + h264_pic_param.reference_picture = this->ref_frame->frame_id; + h264_pic_param.reconstructed_picture = this->rec_frame->frame_id; + h264_pic_param.coded_buf = this->coded_buf[this->coded_buf_index]; + h264_pic_param.picture_width = this->picture_width; + h264_pic_param.picture_height = this->picture_height; + h264_pic_param.last_picture = 0; + - if (!MIX_IS_VIDEOFORMATENC_H264(mix)) - return MIX_RESULT_INVALID_PARAM; + LOG_V( + "======h264 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", + h264_pic_param.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", + h264_pic_param.reconstructed_picture); + LOG_I( "coded_buf_index = %d\n", + this->coded_buf_index); + LOG_I( "coded_buf = 0x%08x\n", + h264_pic_param.coded_buf); + LOG_I( "picture_width = %d\n", + h264_pic_param.picture_width); + LOG_I( "picture_height = %d\n\n", + h264_pic_param.picture_height); + + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncPictureParameterBufferType, + sizeof(h264_pic_param), + 1,&h264_pic_param, + &this->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) { + va_status = vaRenderPicture(this->va_display, this->va_context, + &this->pic_param_buf, 1); - LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_framerate\n"); - return VA_STATUS_SUCCESS; + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; } - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterFrameRate * framerate_param; - VABufferID misc_param_buffer_id; + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; + +} + + +MIX_RESULT +MixVideoFormatEnc_H264::_send_slice_parameter () { + VAStatus va_status; + + uint slice_num; + uint slice_height; + uint slice_index; + uint slice_height_in_mb; + uint max_slice_num; + uint min_slice_num; + + int actual_slice_height_in_mb; + int start_row_in_mb; + int modulus; + + LOG_V( "Begin\n\n"); + + + max_slice_num = (this->picture_height + 15) / 16; + min_slice_num = 1; + + if (this->is_intra) { + slice_num = this->I_slice_num; + } + else { + slice_num = this->P_slice_num; + } + + if (slice_num < min_slice_num) { + LOG_W ("Slice Number is too small"); + slice_num = min_slice_num; + } + + if (slice_num > max_slice_num) { + LOG_W ("Slice Number is too big"); + slice_num = max_slice_num; + } + + this->slice_num = slice_num; + modulus = max_slice_num % slice_num; + slice_height_in_mb = (max_slice_num - modulus) / slice_num ; + +#if 1 + va_status = vaCreateBuffer (this->va_display, this->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + slice_num, NULL, + &this->slice_param_buf); - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncMiscParameterBufferType, - sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), - 1, NULL, - &misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); + VAEncSliceParameterBuffer *slice_param, *current_slice; + + va_status = vaMapBuffer(this->va_display, + this->slice_param_buf, + (void **)&slice_param); + if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaCreateBuffer\n"); + "Failed to vaMapBuffer\n"); return MIX_RESULT_FAIL; } - misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; - framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; - framerate_param->framerate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; + current_slice = slice_param; + start_row_in_mb = 0; + for (slice_index = 0; slice_index < slice_num; slice_index++) { + current_slice = slice_param + slice_index; + + actual_slice_height_in_mb = slice_height_in_mb; + if (slice_index < modulus) { + actual_slice_height_in_mb ++; + } + + // starting MB row number for this slice + current_slice->start_row_number = start_row_in_mb; + // slice height measured in MB + current_slice->slice_height = actual_slice_height_in_mb; + current_slice->slice_flags.bits.is_intra = this->is_intra; + current_slice->slice_flags.bits.disable_deblocking_filter_idc + = this->disable_deblocking_filter_idc; + + // This is a temporary fix suggested by Binglin for bad encoding quality issue + //current_slice->slice_flags.bits.uses_long_term_ref = 0; // TODO: We need a long term design for this field + //current_slice->slice_flags.bits.is_long_term_ref = 0; // TODO: We need a long term design for this field + + LOG_V( + "======h264 slice params======\n"); + + LOG_I( "slice_index = %d\n", + (int) slice_index); + LOG_I( "start_row_number = %d\n", + (int) current_slice->start_row_number); + LOG_I( "slice_height_in_mb = %d\n", + (int) current_slice->slice_height); + LOG_I( "slice.is_intra = %d\n", + (int) current_slice->slice_flags.bits.is_intra); + LOG_I( + "disable_deblocking_filter_idc = %d\n\n", + (int) this->disable_deblocking_filter_idc); + + start_row_in_mb += actual_slice_height_in_mb; + } + + va_status = vaUnmapBuffer(this->va_display, this->slice_param_buf); - va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaUnmapBuffer\n"); + "Failed to vaUnmapBuffer\n"); + return MIX_RESULT_FAIL; + } +#endif + +#if 0 + VAEncSliceParameterBuffer slice_param; + slice_index = 0; + slice_height_in_mb = slice_height / 16; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.slice_flags.bits.disable_deblocking_filter_idc + = mix->disable_deblocking_filter_idc; + + va_status = vaCreateBuffer (parent->va_display, parent->va_context, + VAEncSliceParameterBufferType, + sizeof(slice_param), + slice_num, &slice_param, + &mix->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } +#endif + + va_status = vaRenderPicture(this->va_display, this->va_context, + &this->slice_param_buf, 1); - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &misc_param_buffer_id, 1); if (va_status != VA_STATUS_SUCCESS) { LOG_E( - "Failed to vaRenderPicture\n"); + "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; } - LOG_I( "frame rate = %d\n", - framerate_param->framerate); + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; - } + + diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h index 6cd9d83..29871ad 100644 --- a/mix_video/src/mixvideoformatenc_h264.h +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -12,8 +12,6 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" -G_BEGIN_DECLS - #define MIX_VIDEO_ENC_H264_SURFACE_NUM 20 #define min(X,Y) (((X) < (Y)) ? (X) : (Y)) @@ -22,80 +20,98 @@ G_BEGIN_DECLS /* * Type macros. */ -#define MIX_TYPE_VIDEOFORMATENC_H264 (mix_videoformatenc_h264_get_type ()) -#define MIX_VIDEOFORMATENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264)) -#define MIX_IS_VIDEOFORMATENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_H264)) -#define MIX_VIDEOFORMATENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264Class)) -#define MIX_IS_VIDEOFORMATENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_H264)) -#define MIX_VIDEOFORMATENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264Class)) - -typedef struct _MixVideoFormatEnc_H264 MixVideoFormatEnc_H264; -typedef struct _MixVideoFormatEnc_H264Class MixVideoFormatEnc_H264Class; - -struct _MixVideoFormatEnc_H264 { - /*< public > */ - MixVideoFormatEnc parent; - - VABufferID coded_buf[2]; - VABufferID last_coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * ci_shared_surfaces; - VASurfaceID * surfaces; - guint surface_num; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; - MixVideoFrame *lookup_frame; -#ifdef ANDROID - MixBuffer *last_mix_buffer; -#endif - - guint basic_unit_size; //for rate control - guint disable_deblocking_filter_idc; - MixDelimiterType delimiter_type; - guint idr_interval; - guint slice_num; - guint I_slice_num; - guint P_slice_num; - guint va_rcmode; +#define MIX_VIDEOFORMATENC_H264(obj) (reinterpret_cast(obj)) +#define MIX_IS_VIDEOFORMATENC_H264(obj) ((NULL !=MIX_VIDEOFORMATENC_H264(obj)) ? TRUE : FALSE) - guint encoded_frames; - guint frame_num; - gboolean pic_skipped; +class MixVideoFormatEnc_H264 : public MixVideoFormatEnc { - gboolean is_intra; +public: + MixVideoFormatEnc_H264(); + ~MixVideoFormatEnc_H264(); - guint coded_buf_size; - guint coded_buf_index; + virtual MIX_RESULT Initialize( + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display); - /*< public > */ -}; + virtual MIX_RESULT Encode( + MixBuffer * bufin[], int bufincnt, + MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); -/** - * MixVideoFormatEnc_H264Class: - * - * MI-X Video object class - */ -struct _MixVideoFormatEnc_H264Class { - /*< public > */ - MixVideoFormatEncClass parent_class; + virtual MIX_RESULT Flush(); + + virtual MIX_RESULT Deinitialize(); + + virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); + + virtual MIX_RESULT SetDynamicEncConfig ( + MixVideoConfigParamsEnc * config_params, MixEncParamsType params_type); + +private: + /* Local Methods */ + MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); + MIX_RESULT _AnnexB_to_length_prefixed ( + uint8 * bufin, uint bufin_len, + uint8* bufout, uint *bufout_len); + MIX_RESULT _send_encode_command (); + MIX_RESULT _send_dynamic_bitrate (); + MIX_RESULT _send_max_slice_size (); + MIX_RESULT _send_dynamic_framerate (); + MIX_RESULT _send_AIR (); + MIX_RESULT _send_seq_params (); + MIX_RESULT _send_picture_parameter (); + MIX_RESULT _send_slice_parameter (); + int _calc_level(int nummbs); + + +public: + VABufferID coded_buf[2]; + VABufferID last_coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + //VASurfaceID * ci_shared_surfaces; + VASurfaceID * shared_surfaces; + VASurfaceID * surfaces; + uint surface_num; + uint shared_surfaces_cnt; + uint precreated_surfaces_cnt; + + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *last_frame; //last frame; + MixVideoFrame *lookup_frame; + MixBuffer *last_mix_buffer; + + uint basic_unit_size; //for rate control + uint disable_deblocking_filter_idc; + uint8 vui_flag; + MixDelimiterType delimiter_type; + uint idr_interval; + uint slice_num; + uint I_slice_num; + uint P_slice_num; + uint va_rcmode_h264; - /* class members */ + uint encoded_frames; + uint frame_num; + bool pic_skipped; + + bool is_intra; + + uint coded_buf_size; + uint coded_buf_index; + uint8 ** usrptr; + uint alloc_surface_cnt; - /*< public > */ }; -/** - * mix_videoformatenc_h264_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformatenc_h264_get_type(void); + /** * mix_videoformatenc_h264_new: @@ -120,43 +136,6 @@ MixVideoFormatEnc_H264 *mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix * * Decrement reference count of the object. */ -#define mix_videoformatenc_h264_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* H.264 vmethods */ -MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg); -MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type); - -/* Local Methods */ - -MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint *max_size); -MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin, - MixIOVec * iovout); -MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed ( - guint8 * bufin, guint bufin_len, guint8* bufout, guint *bufout_len); - -MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix); - -MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix); -MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix); -MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix); -MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix); - -G_END_DECLS +MixVideoFormatEnc_H264 *mix_videoformatenc_h264_unref(MixVideoFormatEnc_H264 * mix); #endif /* __MIX_VIDEOFORMATENC_H264_H__ */ diff --git a/mix_video/src/mixvideoformatenc_mpeg4.cpp b/mix_video/src/mixvideoformatenc_mpeg4.cpp index c74ed8f..1f76307 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.cpp +++ b/mix_video/src/mixvideoformatenc_mpeg4.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include #include @@ -21,711 +21,657 @@ Window win = 0; #endif /* SHOW_SRC */ - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_mpeg4_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_MPEG4, mix_videoformatenc_mpeg4, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - /*member initialization */ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; - - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - self->coded_buf_index = 0; - - parent->initialized = FALSE; - +MixVideoFormatEnc_MPEG4::MixVideoFormatEnc_MPEG4() + :shared_surfaces(NULL) + ,surfaces(NULL) + ,surface_num(0) + ,shared_surfaces_cnt(0) + ,precreated_surfaces_cnt(0) + ,cur_frame(NULL) + ,ref_frame(NULL) + ,rec_frame(NULL) + ,lookup_frame(NULL) + ,last_mix_buffer(NULL) + ,encoded_frames(0) + ,pic_skipped(FALSE) + ,is_intra(TRUE) + ,coded_buf_size(0) + ,coded_buf_index(0) + ,usrptr(NULL) { } -static void mix_videoformatenc_mpeg4_class_init( - MixVideoFormatEnc_MPEG4Class * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_mpeg4_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_mpeg4_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_mpeg4_initialize; - video_formatenc_class->encode = mix_videofmtenc_mpeg4_encode; - video_formatenc_class->flush = mix_videofmtenc_mpeg4_flush; - video_formatenc_class->eos = mix_videofmtenc_mpeg4_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_mpeg4_deinitialize; - video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_mpeg4_get_max_encoded_buf_size; +MixVideoFormatEnc_MPEG4::~MixVideoFormatEnc_MPEG4() { } MixVideoFormatEnc_MPEG4 * mix_videoformatenc_mpeg4_new(void) { - MixVideoFormatEnc_MPEG4 *ret = reinterpret_cast( - g_object_new(MIX_TYPE_VIDEOFORMATENC_MPEG4, NULL)); - - return ret; + return new MixVideoFormatEnc_MPEG4(); } -void mix_videoformatenc_mpeg4_finalize(GObject * obj) { - /* clean up here. */ - - /*MixVideoFormatEnc_MPEG4 *mix = MIX_VIDEOFORMATENC_MPEG4(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); +MixVideoFormatEnc_MPEG4 * +mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { + if (NULL != mix) { + mix->Ref(); + return mix; + } + else { + return NULL; } } MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { - return (MixVideoFormatEnc_MPEG4 *) g_object_ref(G_OBJECT(mix)); +mix_videoformatenc_mpeg4_unref(MixVideoFormatEnc_MPEG4 * mix) { + if (NULL!=mix) + if (NULL != mix->Unref()) + return mix; + else + return NULL; + else + return NULL; } -/*MPEG-4:2 vmethods implementation */ -MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_mpeg4_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; -} -#define CLEAN_UP {\ - if (ret == MIX_RESULT_SUCCESS) {\ - parent->initialized = TRUE; \ - }\ - /*free profiles and entrypoints*/\ - if (va_profiles) \ - g_free(va_profiles);\ - if (va_entrypoints)\ - g_free (va_entrypoints); \ - if (surfaces) \ - g_free (surfaces);\ - g_mutex_unlock(parent->objectlock); \ - LOG_V( "end\n"); \ - return ret;} - -MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { +/*MPEG-4:2 vmethods implementation */ +MIX_RESULT MixVideoFormatEnc_MPEG4::Initialize( + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display ) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4; - + VAStatus va_status = VA_STATUS_SUCCESS; VASurfaceID * surfaces = NULL; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; + + int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + int va_num_profiles, va_num_entrypoints; VAProfile *va_profiles = NULL; VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - + VAConfigAttrib va_attrib[2]; + uint index; + uint max_size = 0; + + /* + * Different MIX buffer mode will have different surface handling approach + */ + uint normal_surfaces_cnt = 2; + + /* + * shared_surfaces_cnt is for upstream buffer allocation case + */ + uint shared_surfaces_cnt = 0; + + /* + * precreated_surfaces_cnt is for self buffer allocation case + */ + uint precreated_surfaces_cnt = 0; + + MixCISharedBufferInfo * ci_info = NULL; + /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + if (config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL) { + LOG_E("config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL\n"); return MIX_RESULT_NULL_PTR; } - LOG_V( "begin\n"); + /* + * Check more for requested_surface_info + */ + if (requested_surface_info->surface_cnt != 0 && + (requested_surface_info->surface_allocated == NULL || requested_surface_info->usrptr == NULL)) { + LOG_E("surface_cnt != 0 && (surface_allocated == NULL || usrptr == NULL)\n"); + return MIX_RESULT_NULL_PTR; + } - + if (requested_surface_info->surface_cnt > MAX_ENC_SURFACE_COUNT) { + LOG_E ("Something wrong, we have to quite now!\n"); + return MIX_RESULT_FAIL; + } - /* Chainup parent method. */ + LOG_V( "begin\n"); - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { + /* Chainup parent method. */ + ret = MixVideoFormatEnc::Initialize( + config_params_enc, + frame_mgr, + input_buf_pool, + surface_pool, + requested_surface_info, + va_display); + if (ret != MIX_RESULT_SUCCESS) { return ret; } - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) { - config_params_enc_mpeg4 = - MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); + config_params_enc_mpeg4 = MIX_VIDEOCONFIGPARAMSENC_MPEG4(config_params_enc); } else { - LOG_V( - "mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); + LOG_V("mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); return MIX_RESULT_FAIL; } - - g_mutex_lock(parent->objectlock); - LOG_V( - "Start to get properities from MPEG-4:2 params\n"); + Lock(); + + LOG_V("Start to get properities from MPEG-4:2 params\n"); /* get properties from MPEG4 params Object, which is special to MPEG4 format*/ + ret = mix_videoconfigparamsenc_mpeg4_get_profile_level ( + config_params_enc_mpeg4, &this->profile_and_level_indication); - ret = mix_videoconfigparamsenc_mpeg4_get_profile_level (config_params_enc_mpeg4, - &self->profile_and_level_indication); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); - CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti (config_params_enc_mpeg4, - &(self->fixed_vop_time_increment)); - + LOG_E("Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti ( + config_params_enc_mpeg4, &(this->fixed_vop_time_increment)); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); - CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_dlk (config_params_enc_mpeg4, - &(self->disable_deblocking_filter_idc)); - + LOG_E("Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsenc_mpeg4_get_dlk ( + config_params_enc_mpeg4, &(this->disable_deblocking_filter_idc)); + if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to config_params_enc_mpeg4\n"); - CLEAN_UP; - } - - - LOG_V( - "======MPEG4 Encode Object properities======:\n"); - - LOG_I( "self->profile_and_level_indication = %d\n", - self->profile_and_level_indication); - LOG_I( "self->fixed_vop_time_increment = %d\n\n", - self->fixed_vop_time_increment); - - LOG_V( - "Get properities from params done\n"); - - - parent->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); + LOG_E("Failed to config_params_enc_mpeg4\n"); + goto cleanup; + } + LOG_V("======MPEG4 Encode Object properities======:\n"); + LOG_I("self->profile_and_level_indication = %d\n", this->profile_and_level_indication); + LOG_I( "self->fixed_vop_time_increment = %d\n\n", this->fixed_vop_time_increment); + LOG_V("Get properities from params done\n"); + this->va_display = va_display; + + LOG_V( "Get Display\n"); + LOG_I( "Display = 0x%08x\n", (uint)va_display); #if 0 /* query the vender information, can ignore*/ va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - + LOG_I( "Vendor = %s\n", + va_vendor); +#endif + /*get the max number for profiles/entrypoints/attribs*/ va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - + LOG_I( "va_max_num_profiles = %d\n", va_max_num_profiles); + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - + LOG_I( "va_max_num_entrypoints = %d\n", va_max_num_entrypoints); + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); - va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); + LOG_I( "va_max_num_attribs = %d\n", va_max_num_attribs); + + va_profiles = new VAProfile[va_max_num_profiles]; + va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; + + if (va_profiles == NULL || va_entrypoints ==NULL) { + LOG_E("!va_profiles || !va_entrypoints\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto cleanup; } - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); - + LOG_I("va_profiles = 0x%08x\n", (uint)va_profiles); LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); + va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaQueryConfigProfiles\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - LOG_V( "vaQueryConfigProfiles Done\n"); - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) + for (index= 0; index < va_num_profiles; index++) { + if (this->va_profile == va_profiles[index]) break; } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); + if (index == va_num_profiles) { + LOG_E( "Profile not supported\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } LOG_V( "vaQueryConfigEntrypoints\n"); - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); + va_status = vaQueryConfigEntrypoints( + va_display, + this->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaQueryConfigEntrypoints\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - + for (index = 0; index < va_num_entrypoints; index ++) { if (va_entrypoints[index] == VAEntrypointEncSlice) { break; } } - + if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); + LOG_E( "Entrypoint not found\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - + goto cleanup; + } + va_attrib[0].type = VAConfigAttribRTFormat; va_attrib[1].type = VAConfigAttribRateControl; - + LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); + + va_status = vaGetConfigAttributes( + va_display, + this->va_profile, + this->va_entrypoint, + &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaGetConfigAttributes\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); + + if ((va_attrib[0].value & this->va_format) == 0) { + LOG_E( "Matched format not found\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - LOG_V( "======VA Configuration======\n"); + if ((va_attrib[1].value & this->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); + va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = this->va_rcmode; + LOG_V( "======VA Configuration======\n"); + LOG_I( "profile = %d\n", this->va_profile); + LOG_I( "va_entrypoint = %d\n", this->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", va_attrib[1].value); LOG_V( "vaCreateConfig\n"); - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); + va_status = vaCreateConfig( + va_display, this->va_profile, this->va_entrypoint, + &va_attrib[0], 2, &(this->va_config)); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaCreateConfig\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; + } + + if (this->va_rcmode == VA_RC_VCM) { + /* + * Following three features are only enabled in VCM mode + */ + this->render_mss_required = TRUE; + this->render_AIR_required = TRUE; + this->render_bitrate_required = TRUE; + } + + /* + * For upstream allocates buffer, it is mandatory to set buffer mode + * and for other stuff, it is optional + */ + + LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); + + if (requested_surface_info->surface_cnt == 0) { + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + ci_info = (MixCISharedBufferInfo *) (this->buf_info); + shared_surfaces_cnt = ci_info->ci_frame_cnt; + normal_surfaces_cnt = 2; + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /* + * To be develped + */ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /* + * To be develped + */ + break; + default: + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + break; + } + } + else if (requested_surface_info->surface_cnt == 1) { + /* + * Un-normal case, TBD + */ + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + } + else { + this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; + precreated_surfaces_cnt = requested_surface_info->surface_cnt; + this->alloc_surface_cnt = requested_surface_info->surface_cnt; + this->usrptr = new uint8*[requested_surface_info->surface_cnt]; + if (this->usrptr == NULL) { + LOG_E("Failed allocate memory\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + memcpy (this->usrptr, + requested_surface_info->usrptr, + requested_surface_info->surface_cnt * sizeof (uint8 *)); + } + + LOG_I ("buffer_mode = %d\n", this->buffer_mode); + + this->shared_surfaces_cnt = shared_surfaces_cnt; + this->precreated_surfaces_cnt = precreated_surfaces_cnt; + +#if 0 + + int ii = 0; + for (ii=0; ii < alloc_surface_cnt; ii++) { + + g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); + g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); + + } /*TODO: compute the surface number*/ int numSurfaces; - + if (parent->share_buf_mode) { numSurfaces = 2; } else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); + numSurfaces = 2; + parent->ci_frame_num = 0; + } + + //self->surface_num = numSurfaces + parent->ci_frame_num; +#endif + + this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; + + surfaces = new VASurfaceID[normal_surfaces_cnt]; + + if (surfaces == NULL) { + LOG_E("Failed allocate surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + this->surfaces = new VASurfaceID[this->surface_num]; + + if (this->surfaces == NULL) { + LOG_E("Failed allocate private surface\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto cleanup; } - + LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); + va_status = vaCreateSurfaces( + va_display, this->picture_width, + this->picture_height, this->va_format, + normal_surfaces_cnt, surfaces); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaCreateSurfaces\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = reinterpret_cast( - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); + if (shared_surfaces_cnt != 0) { + this->shared_surfaces = new VASurfaceID[shared_surfaces_cnt]; + if (this->shared_surfaces == NULL) { + LOG_E("Failed allocate shared surface\n"); ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; + goto cleanup; } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); + } + + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + for (index = 0; index < this->shared_surfaces_cnt; index++) { + va_status = vaCreateSurfaceFromCIFrame( + va_display, + (ulong) (ci_info->ci_frame_id[index]), + &this->shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + goto cleanup; + } + this->surfaces[index] = this->shared_surfaces[index]; } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); - - if (self->surfaces == NULL) + } + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /*To be develped*/ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /*To be develped*/ + break; + case MIX_BUFFER_ALLOC_NORMAL: + break; + case MIX_BUFFER_SELF_ALLOC_SURFACE: { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } - - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; + for (index = 0; index < requested_surface_info->surface_cnt; index ++) { + this->surfaces[index] = requested_surface_info->surface_allocated[index]; + } + } + break; + default: + break; } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + + for (index = 0; index < normal_surfaces_cnt; index++) { + this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; } - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", this->surface_num); + #if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); + images = g_malloc(sizeof(VAImage)*numSurfaces); if (images == NULL) { - g_mutex_unlock(parent->objectlock); + g_mutex_unlock(parent->objectlock); return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. //The image buffer can then be mapped/unmapped for CPU access va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); + &images[index]); } -#endif - - LOG_V( "mix_surfacepool_new\n"); +#endif + + LOG_V( "mix_surfacepool_new\n"); - parent->surfacepool = mix_surfacepool_new(); + this->surfacepool = mix_surfacepool_new(); if (surface_pool) - *surface_pool = parent->surfacepool; + *surface_pool = this->surfacepool; //which is useful to check before encode - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); + if (this->surfacepool == NULL) { + LOG_E("Failed to mix_surfacepool_new\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - LOG_E( "Error init failure\n"); - ret = MIX_RESULT_ALREADY_INIT; - CLEAN_UP; - default: - break; + LOG_V("mix_surfacepool_initialize\n"); + + ret = mix_surfacepool_initialize( + this->surfacepool, this->surfaces, + this->surface_num, va_display); + + switch (ret) { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + LOG_E( "Error init failure\n"); + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + default: + break; } - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); + LOG_V( "vaCreateContext\n"); + + va_status = vaCreateContext( + va_display, this->va_config, + this->picture_width, this->picture_height, + VA_PROGRESSIVE, this->surfaces, this->surface_num, + &(this->va_context)); + + LOG_I("Created libva context width %d, height %d\n", + this->picture_width, this->picture_height); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", (uint)va_status); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - guint max_size = 0; - ret = mix_videofmtenc_mpeg4_get_max_encoded_buf_size (parent, &max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); - CLEAN_UP; - + ret = GetMaxEncodedBufSize(&max_size); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); + goto cleanup; } /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf[0]); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + va_status = vaCreateBuffer ( + va_display, this->va_context, + VAEncCodedBufferType, + this->coded_buf_size, + 1, NULL, + &this->coded_buf[0]); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateBuffer: VAEncCodedBufferType\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &(self->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + va_status = vaCreateBuffer ( + va_display, this->va_context, + VAEncCodedBufferType, + this->coded_buf_size, + 1, NULL, + &(this->coded_buf[1])); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateBuffer: VAEncCodedBufferType\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } #ifdef SHOW_SRC Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); + LOG_I( "display = 0x%08x\n", (uint) display); + win = XCreateSimpleWindow( + display, RootWindow(display, 0), 0, 0, + this->picture_width, this->picture_height, 0, 0, + WhitePixel(display, 0)); XMapWindow(display, win); XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ - CLEAN_UP; + LOG_I( "va_display = 0x%08x\n", (uint) va_display); +#endif /* SHOW_SRC */ + +cleanup: + + if (ret == MIX_RESULT_SUCCESS) { + this->initialized = TRUE; + } + + /*free profiles and entrypoints*/ + if (va_profiles) + delete [] va_profiles; + + if (va_entrypoints) + delete [] va_entrypoints; + + if (surfaces) + delete [] surfaces; + + Unlock(); + LOG_V( "end\n"); + return ret; } -#undef CLEAN_UP - -#define CLEAN_UP {\ - LOG_V( "UnLocking\n"); \ - g_mutex_unlock(parent->objectlock);\ - LOG_V( "end\n"); \ - return ret;} - -MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - + +MIX_RESULT MixVideoFormatEnc_MPEG4::Encode( + MixBuffer * bufin[], int bufincnt, + MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - + LOG_V( "Begin\n"); /*currenly only support one input and output buffer*/ - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); + LOG_E("buffer count not equel to 1\n"); + LOG_E("maybe some exception occurs\n"); + } + + if (bufin[0] == NULL || iovout[0] == NULL) { + LOG_E("!bufin[0] ||!iovout[0]\n"); return MIX_RESULT_NULL_PTR; } - + #if 0 if (parent_class->encode) { return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); + iovoutcnt, encode_params); } #endif - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: also we could move some encode Preparation work to here - LOG_V( - "mix_videofmtenc_mpeg4_process_encode\n"); + LOG_V( "Locking\n"); + Lock(); - ret = mix_videofmtenc_mpeg4_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_mpeg4_process_encode\n"); - CLEAN_UP; + //TODO: also we could move some encode Preparation work to here + LOG_V("mix_videofmtenc_mpeg4_process_encode\n"); + ret = _process_encode(bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videofmtenc_mpeg4_process_encode\n"); + goto cleanup; } - CLEAN_UP; + +cleanup: + + LOG_V( "UnLocking\n"); + Unlock(); + LOG_V( "end\n"); + return ret; } -#undef CLEAN_UP - -MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - + +MIX_RESULT MixVideoFormatEnc_MPEG4::Flush() { + LOG_V( "Begin\n"); /*not chain to parent flush func*/ #if 0 if (parent_class->flush) { @@ -733,1081 +679,984 @@ MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) { } #endif - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - g_mutex_lock(mix->objectlock); - - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; + Lock(); + + /*unref the current source surface*/ + if (this->cur_frame != NULL) { + mix_videoframe_unref (this->cur_frame); + this->cur_frame = NULL; } - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - /*reset the properities*/ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} + /*unref the reconstructed surface*/ + if (this->rec_frame != NULL) { + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; + } -MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix) { + /*unref the reference surface*/ + if (this->ref_frame != NULL) { + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; + } - LOG_V( "\n"); + if (this->last_mix_buffer) { + mix_buffer_unref(this->last_mix_buffer); + this->last_mix_buffer = NULL; + } - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } + /*reset the properities*/ + this->encoded_frames = 0; + this->pic_skipped = FALSE; + this->is_intra = TRUE; - if (parent_class->eos) { - return parent_class->eos(mix); - } + Unlock(); + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; } -#define CLEAN_UP {\ - parent->initialized = TRUE;\ - g_mutex_unlock(parent->objectlock); \ - LOG_V( "end\n"); \ - return ret;} -MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; +MIX_RESULT MixVideoFormatEnc_MPEG4::Deinitialize() { VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - - if (parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } - - if (ret != MIX_RESULT_SUCCESS) - { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + ret = MixVideoFormatEnc::Deinitialize(); + if (ret != MIX_RESULT_SUCCESS) { return ret; - } - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix); - - LOG_V( "Release frames\n"); + } - g_mutex_lock(parent->objectlock); + LOG_V( "Release frames\n"); + Lock(); #if 0 - /*unref the current source surface*/ + /*unref the current source surface*/ if (self->cur_frame != NULL) { mix_videoframe_unref (self->cur_frame); self->cur_frame = NULL; } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; +#endif + + /*unref the reconstructed surface*/ + if (this->rec_frame != NULL) { + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; } - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } + /*unref the reference surface*/ + if (this->ref_frame != NULL) { + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; + } - LOG_V( "Release surfaces\n"); + if (this->lookup_frame != NULL) { + mix_videoframe_unref (this->lookup_frame); + this->lookup_frame = NULL; + } - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; + if (this->last_mix_buffer) { + mix_buffer_unref(this->last_mix_buffer); + this->last_mix_buffer = NULL; } - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); + LOG_V( "Release surfaces\n"); + + if (this->shared_surfaces) { + delete [] this->shared_surfaces; + this->shared_surfaces = NULL; + } + + if (this->surfaces) { + delete [] this->surfaces; + this->surfaces = NULL; + } + + if (this->usrptr) { + delete [] this->usrptr; + this->usrptr = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (this->va_display, this->va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaDestroyContext\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + goto cleanup; + } - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (this->va_display, this->va_config); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaDestroyConfig\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - CLEAN_UP; + goto cleanup; + } + +cleanup: + + this->initialized = TRUE; + Unlock(); + LOG_V( "end\n"); + return ret; } -#undef CLEAN_UP -MIX_RESULT mix_videofmtenc_mpeg4_send_seq_params (MixVideoFormatEnc_MPEG4 *mix) -{ - +MIX_RESULT MixVideoFormatEnc_MPEG4::_send_seq_params () { VAStatus va_status; VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param; - VABufferID seq_para_buf_id; - - - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - + VABufferID seq_para_buf_id; + + LOG_V( "Begin\n\n"); + /*set up the sequence params for HW*/ - mpeg4_seq_param.profile_and_level_indication = mix->profile_and_level_indication; //TODO, hard code now - mpeg4_seq_param.video_object_layer_width= parent->picture_width; - mpeg4_seq_param.video_object_layer_height= parent->picture_height; + mpeg4_seq_param.profile_and_level_indication = this->profile_and_level_indication; //TODO, hard code now + mpeg4_seq_param.video_object_layer_width= this->picture_width; + mpeg4_seq_param.video_object_layer_height= this->picture_height; mpeg4_seq_param.vop_time_increment_resolution = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - mpeg4_seq_param.fixed_vop_time_increment= mix->fixed_vop_time_increment; - mpeg4_seq_param.bits_per_second= parent->bitrate; + (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; + mpeg4_seq_param.fixed_vop_time_increment= this->fixed_vop_time_increment; + mpeg4_seq_param.bits_per_second= this->bitrate; mpeg4_seq_param.frame_rate = - (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - mpeg4_seq_param.initial_qp = parent->initial_qp; - mpeg4_seq_param.min_qp = parent->min_qp; - mpeg4_seq_param.intra_period = parent->intra_period; - - + (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; + mpeg4_seq_param.initial_qp = this->initial_qp; + mpeg4_seq_param.min_qp = this->min_qp; + mpeg4_seq_param.intra_period = this->intra_period; //mpeg4_seq_param.fixed_vop_rate = 30; - - - LOG_V( - "===mpeg4 sequence params===\n"); - - LOG_I( "profile_and_level_indication = %d\n", - (guint)mpeg4_seq_param.profile_and_level_indication); - LOG_I( "intra_period = %d\n", - mpeg4_seq_param.intra_period); - LOG_I( "video_object_layer_width = %d\n", - mpeg4_seq_param.video_object_layer_width); - LOG_I( "video_object_layer_height = %d\n", - mpeg4_seq_param.video_object_layer_height); - LOG_I( "vop_time_increment_resolution = %d\n", - mpeg4_seq_param.vop_time_increment_resolution); - LOG_I( "fixed_vop_rate = %d\n", - mpeg4_seq_param.fixed_vop_rate); - LOG_I( "fixed_vop_time_increment = %d\n", - mpeg4_seq_param.fixed_vop_time_increment); - LOG_I( "bitrate = %d\n", - mpeg4_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - mpeg4_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - mpeg4_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - mpeg4_seq_param.min_qp); - LOG_I( "intra_period = %d\n\n", - mpeg4_seq_param.intra_period); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncSequenceParameterBufferType, - sizeof(mpeg4_seq_param), - 1, &mpeg4_seq_param, - &seq_para_buf_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); + LOG_V("===mpeg4 sequence params===\n"); + LOG_I("profile_and_level_indication = %d\n", (uint)mpeg4_seq_param.profile_and_level_indication); + LOG_I("intra_period = %d\n", mpeg4_seq_param.intra_period); + LOG_I("video_object_layer_width = %d\n", mpeg4_seq_param.video_object_layer_width); + LOG_I("video_object_layer_height = %d\n", mpeg4_seq_param.video_object_layer_height); + LOG_I("vop_time_increment_resolution = %d\n", mpeg4_seq_param.vop_time_increment_resolution); + LOG_I("fixed_vop_rate = %d\n", mpeg4_seq_param.fixed_vop_rate); + LOG_I("fixed_vop_time_increment = %d\n", mpeg4_seq_param.fixed_vop_time_increment); + LOG_I("bitrate = %d\n", mpeg4_seq_param.bits_per_second); + LOG_I("frame_rate = %d\n", mpeg4_seq_param.frame_rate); + LOG_I("initial_qp = %d\n", mpeg4_seq_param.initial_qp); + LOG_I("min_qp = %d\n", mpeg4_seq_param.min_qp); + LOG_I("intra_period = %d\n\n", mpeg4_seq_param.intra_period); + + va_status = vaCreateBuffer( + this->va_display, this->va_context, + VAEncSequenceParameterBufferType, + sizeof(mpeg4_seq_param), + 1, &mpeg4_seq_param, + &seq_para_buf_id); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &seq_para_buf_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); + + va_status = vaRenderPicture( + this->va_display, this->va_context, &seq_para_buf_id, 1); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - + } + + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; - - } -MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 *mix) -{ +MIX_RESULT MixVideoFormatEnc_MPEG4::_send_picture_parameter() { VAStatus va_status; VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - + LOG_V( "Begin\n\n"); + #if 0 //not needed currently MixVideoConfigParamsEncMPEG4 * params_mpeg4 - = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); -#endif - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - + = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); +#endif /*set picture params for HW*/ - mpeg4_pic_param.reference_picture = mix->ref_frame->frame_id; - mpeg4_pic_param.reconstructed_picture = mix->rec_frame->frame_id; - mpeg4_pic_param.coded_buf = mix->coded_buf[mix->coded_buf_index]; - mpeg4_pic_param.picture_width = parent->picture_width; - mpeg4_pic_param.picture_height = parent->picture_height; - mpeg4_pic_param.vop_time_increment= mix->encoded_frames; - mpeg4_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; - - - - LOG_V( - "======mpeg4 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - mpeg4_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - mpeg4_pic_param.reconstructed_picture); - LOG_I( "coded_buf = 0x%08x\n", - mpeg4_pic_param.coded_buf); - LOG_I( "coded_buf_index = %d\n", - mix->coded_buf_index); - LOG_I( "picture_width = %d\n", - mpeg4_pic_param.picture_width); - LOG_I( "picture_height = %d\n", - mpeg4_pic_param.picture_height); - LOG_I( "vop_time_increment = %d\n", - mpeg4_pic_param.vop_time_increment); - LOG_I( "picture_type = %d\n\n", - mpeg4_pic_param.picture_type); - - va_status = vaCreateBuffer(parent->va_display, parent->va_context, - VAEncPictureParameterBufferType, - sizeof(mpeg4_pic_param), - 1,&mpeg4_pic_param, - &mix->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); + mpeg4_pic_param.reference_picture = this->ref_frame->frame_id; + mpeg4_pic_param.reconstructed_picture = this->rec_frame->frame_id; + mpeg4_pic_param.coded_buf = this->coded_buf[this->coded_buf_index]; + mpeg4_pic_param.picture_width = this->picture_width; + mpeg4_pic_param.picture_height = this->picture_height; + mpeg4_pic_param.vop_time_increment= this->encoded_frames; + mpeg4_pic_param.picture_type = this->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + LOG_V("======mpeg4 picture params======\n"); + LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture); + LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture); + LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf); + LOG_I("coded_buf_index = %d\n", this->coded_buf_index); + LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width); + LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height); + LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment); + LOG_I("picture_type = %d\n\n", mpeg4_pic_param.picture_type); + + va_status = vaCreateBuffer( + this->va_display, this->va_context, + VAEncPictureParameterBufferType, + sizeof(mpeg4_pic_param), + 1,&mpeg4_pic_param, + &this->pic_param_buf); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; } - - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - return MIX_RESULT_SUCCESS; - + va_status = vaRenderPicture( + this->va_display, this->va_context, + &this->pic_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaRenderPicture\n"); + LOG_I( "va_status = %d\n", va_status); + return MIX_RESULT_FAIL; + } + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_mpeg4_send_slice_parameter (MixVideoFormatEnc_MPEG4 *mix) -{ +MIX_RESULT MixVideoFormatEnc_MPEG4::_send_slice_parameter() { VAStatus va_status; + uint slice_height; + uint slice_index; + uint slice_height_in_mb; - guint slice_height; - guint slice_index; - guint slice_height_in_mb; - - if (mix == NULL) { - LOG_E("mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n\n"); - - - MixVideoFormatEnc *parent = NULL; - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - slice_height = parent->picture_height; - + LOG_V( "Begin\n\n"); + + slice_height = this->picture_height; slice_height += 15; slice_height &= (~15); - VAEncSliceParameterBuffer slice_param; slice_index = 0; slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; + slice_param.start_row_number = 0; + slice_param.slice_height = slice_height / 16; + slice_param.slice_flags.bits.is_intra = this->is_intra; slice_param.slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - LOG_V( - "======mpeg4 slice params======\n"); - - LOG_I( "start_row_number = %d\n", - (gint) slice_param.start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (gint) slice_param.slice_height); - LOG_I( "slice.is_intra = %d\n", - (gint) slice_param.slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (gint) mix->disable_deblocking_filter_idc); - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - 1, &slice_param, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); + = this->disable_deblocking_filter_idc; + + LOG_V("======mpeg4 slice params======\n"); + LOG_I( "start_row_number = %d\n", (int) slice_param.start_row_number); + LOG_I( "slice_height_in_mb = %d\n", (int) slice_param.slice_height); + LOG_I( "slice.is_intra = %d\n", (int) slice_param.slice_flags.bits.is_intra); + LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) this->disable_deblocking_filter_idc); + + va_status = vaCreateBuffer ( + this->va_display, this->va_context, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + 1, &slice_param, + &this->slice_param_buf); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateBuffer\n"); return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(parent->va_display, parent->va_context, - &mix->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); + } + + va_status = vaRenderPicture( + this->va_display, this->va_context, + &this->slice_param_buf, 1); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; - } + } - LOG_V( "end\n"); - + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; } -#define CLEAN_UP {\ - if (ret != MIX_RESULT_SUCCESS) {\ - if (iovout->data) {\ - g_free (iovout->data);\ - iovout->data = NULL;\ - }\ - } \ - LOG_V( "end\n"); \ - return ret;} - -MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - +MIX_RESULT MixVideoFormatEnc_MPEG4::_process_encode ( + MixBuffer * bufin, MixIOVec * iovout) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; + VADisplay va_display = NULL; VAContextID va_context; - gulong surface = 0; - guint16 width, height; - + ulong surface = 0; + uint16 width, height; + MixVideoFrame * tmp_frame; - guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); + uint8 *buf; + + VACodedBufferSegment *coded_seg = NULL; + int num_seg = 0; + uint total_size = 0; + uint size = 0; + + if ((bufin == NULL) || (iovout == NULL)) { + LOG_E("mix == NUL) || bufin == NULL || iovout == NULL\n"); return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - if (!MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); + } + + LOG_V( "Begin\n"); + + va_display = this->va_display; + va_context = this->va_context; + width = this->picture_width; + height = this->picture_height; + + LOG_I( "encoded_frames = %d\n", this->encoded_frames); + LOG_I( "is_intra = %d\n", this->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", (uint) this->ci_frame_id); /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; + if ((this->encoded_frames % this->intra_period) == 0) { + this->is_intra = TRUE; } else { - mix->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - mix->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, + this->is_intra = FALSE; + } + + LOG_I( "is_intra_picture = %d\n", this->is_intra); + LOG_V("Get Surface from the pool\n"); + + /*current we use one surface for source data, * one for reference and one for reconstructed*/ /*TODO, could be refine here*/ - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + //MixVideoFrame * frame = mix_videoframe_new(); + if (this->lookup_frame == NULL) { + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + if (this->ref_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get reference surface from pool failed\n"); + goto cleanup; + } + } + + if (this->rec_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get recontructed surface from pool failed\n"); + goto cleanup; } } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; + + //mix_videoframe_unref (mix->cur_frame); + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) { + uint ci_idx; +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif + + LOG_I("surface_num = %d\n", this->surface_num); + LOG_I("ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > this->surface_num - 2) { + LOG_E("the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get current working surface from pool failed\n"); + goto cleanup; } } - if (parent->need_display) { - mix->cur_frame = NULL; + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + } + /* + * end of CI buffer allocation mode + */ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + break; + case MIX_BUFFER_SELF_ALLOC_SURFACE: + { + if (this->lookup_frame == NULL) { + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } } - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - CLEAN_UP; - } + uint surface_idx = (uint) -1; //fixme, temp use a big value + uint idx = 0; + + LOG_I ("bufin->data = 0x%08x\n", bufin->data); + + for (idx = 0; idx < this->alloc_surface_cnt; idx++) { + LOG_I ("mix->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); + if (bufin->data == this->usrptr[idx]) + surface_idx = idx; } - - LOG_V( "Get Surface Done\n"); - + LOG_I("surface_num = %d\n", this->surface_num); + LOG_I("surface_idx = %d\n", surface_idx); + if (surface_idx > this->surface_num - 2) { + LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); + ret = MIX_RESULT_FAIL; + goto no_share_mode; + } + + if (this->ref_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get reference surface from pool failed\n"); + goto cleanup; + } + } + + if (this->rec_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get recontructed surface from pool failed\n"); + goto cleanup; + } + } + + //mix_videoframe_unref (mix->cur_frame); + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get current working surface from pool failed\n"); + goto cleanup; + } + } + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + } + + break; + /* + * end of Self buffer allocation mode + */ + + case MIX_BUFFER_ALLOC_NORMAL: + { + +no_share_mode: + + LOG_V("We are NOT in share buffer mode\n"); + + if (this->ref_frame == NULL) { + ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); + if (ret != MIX_RESULT_SUCCESS) {//#ifdef SLEEP_SURFACE not used + LOG_E("Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (this->rec_frame == NULL) { + ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) { + ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + LOG_V( "Get Surface Done\n"); + VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; + uint8 *pvbuf; + uint8 *dst_y; + uint8 *dst_uv; int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - CLEAN_UP; + + LOG_V("map source data to surface\n"); + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); + + LOG_I("surface id = 0x%08x\n", (uint) surface); + + va_status = vaDeriveImage(va_display, surface, &src_image); //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaDeriveImage\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; + goto cleanup; } - + VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - + LOG_V( "vaDeriveImage Done\n"); + va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { + if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V("vaImage information\n"); + LOG_I("image->pitches[0] = %d\n", image->pitches[0]); + LOG_I("image->pitches[1] = %d\n", image->pitches[1]); + LOG_I("image->offsets[0] = %d\n", image->offsets[0]); + LOG_I("image->offsets[1] = %d\n", image->offsets[1]); + LOG_I("image->num_planes = %d\n", image->num_planes); + LOG_I("image->width = %d\n", image->width); + LOG_I("image->height = %d\n", image->height); + LOG_I("input buf size = %d\n", bufin->size); + + uint8 *inbuf = bufin->data; +#ifdef ANDROID +#define USE_SRC_FMT_NV12 +#endif + int offset_uv = width * height; + uint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + +#ifdef ANDROID + //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { memcpy (dst_y, inbuf + i * width, width); dst_y += image->pitches[0]; } - + +#ifdef USE_SRC_FMT_NV12 dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v } dst_uv += image->pitches[1]; + inbuf_uv += width_uv; } - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); +#endif + +#else + + if (this->raw_format == MIX_RAW_TARGET_FORMAT_YUV420) { + dst_y = pvbuf +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; + dst_uv [j + 1] = + inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dst_uv += image->pitches[1]; + } + } + else if (this->raw_format == MIX_RAW_TARGET_FORMAT_NV12) { + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; + } + + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } + } + else { + LOG_E("Raw format not supoort\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + +#endif //USE_SRC_FMT_YUV420 + + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaUnmapBuffer\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - + goto cleanup; + } + va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaDestroyImage\n"); ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - LOG_V( - "Map source data to surface done\n"); - - } - - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - CLEAN_UP; - } + goto cleanup; } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - CLEAN_UP; - } + LOG_V("Map source data to surface done\n"); + } + break; + default: + break; + + } + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(uint)va_context); + LOG_I( "surface = 0x%08x\n",(uint)surface); + LOG_I( "va_display = 0x%08x\n",(uint)va_display); + + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = _send_encode_command(); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E ("Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } + + + if ((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 0) { + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } + } - if (parent->need_display) { - mix->cur_frame = NULL; - } - - if (mix->cur_frame == NULL) - { - guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - CLEAN_UP; - } + if (this->encoded_frames == 0) { + this->encoded_frames ++; + this->last_coded_buf = this->coded_buf[this->coded_buf_index]; + this->coded_buf_index ++; + this->coded_buf_index %=2; + this->last_frame = this->cur_frame; + /* determine the picture type*/ + if ((this->encoded_frames % this->intra_period) == 0) { + this->is_intra = TRUE; + } else { + this->is_intra = FALSE; } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - + tmp_frame = this->rec_frame; + this->rec_frame= this->ref_frame; + this->ref_frame = tmp_frame; } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); + LOG_V( "vaSyncSurface\n"); - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - ret = mix_videofmtenc_mpeg4_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - CLEAN_UP; - } - - - if ((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } + va_status = vaSyncSurface(va_display, this->last_frame->frame_id); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaSyncSurface\n"); + //return MIX_RESULT_FAIL; + } + + LOG_V("Start to get encoded data\n"); + + /*get encoded data from the VA buffer*/ + va_status = vaMapBuffer (va_display, this->last_coded_buf, (void **)&buf); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaMapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + coded_seg = (VACodedBufferSegment *)buf; + num_seg = 1; + + while (1) { + total_size += coded_seg->size; + if (coded_seg->next == NULL) + break; + coded_seg = (VACodedBufferSegment*)coded_seg->next; + num_seg ++; + } + + +#if 0 + // first 4 bytes is the size of the buffer + memcpy (&(iovout->data_size), (void*)buf, 4); + //size = (uint*) buf; +#endif + + iovout->data_size = total_size; + + if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. + iovout->data = new uchar[iovout->data_size]; + if (iovout->data == NULL) { + LOG_E( "iovout->data == NULL\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; } - - if (mix->encoded_frames == 0) { - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - - mix->last_frame = mix->cur_frame; - - - /* determine the picture type*/ - if ((mix->encoded_frames % parent->intra_period) == 0) { - mix->is_intra = TRUE; - } else { - mix->is_intra = FALSE; - } - - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - - + } + + //memcpy (iovout->data, buf + 16, iovout->data_size); + + coded_seg = (VACodedBufferSegment *)buf; + total_size = 0; + + while (1) { + memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); + total_size += coded_seg->size; + if (coded_seg->next == NULL) + break; + coded_seg = (VACodedBufferSegment *)coded_seg->next; + } + + iovout->buffer_size = iovout->data_size; + + LOG_I("out size is = %d\n", iovout->data_size); + + va_status = vaUnmapBuffer (va_display, this->last_coded_buf); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + LOG_V( "get encoded data done\n"); + + if (!((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 1)) { + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, mix->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - //return MIX_RESULT_FAIL; - } - - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, mix->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - guint total_size = 0; - guint size = 0; - - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; - - while (1) { - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = reinterpret_cast(coded_seg->next); - num_seg ++; + } + + if (this->encoded_frames == 1) { + va_status = vaBeginPicture(va_display, va_context, surface); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaBeginPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (guint*) buf; -#endif + ret = _send_encode_command (); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E ("Failed mix_videofmtenc_h264_send_encode_command\n"); + goto cleanup; + } - iovout->data_size = total_size; - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - - iovout->data = (guchar*)g_malloc (iovout->data_size); - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - CLEAN_UP; - } + va_status = vaEndPicture (va_display, va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaEndPicture\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - - //memcpy (iovout->data, buf + 16, iovout->data_size); - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - while (1) { - - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = reinterpret_cast(coded_seg->next); - } - - iovout->buffer_size = iovout->data_size; - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, mix->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - LOG_V( "get encoded data done\n"); - - if (!((parent->va_rcmode == VA_RC_NONE) || mix->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - } - - if (mix->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - ret = mix_videofmtenc_mpeg4_send_encode_command (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - CLEAN_UP; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - - } + } VASurfaceStatus status; - + /*query the status of current surface*/ va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - CLEAN_UP; - } - mix->pic_skipped = status & VASurfaceSkipped; + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaQuerySurfaceStatus\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + this->pic_skipped = status & VASurfaceSkipped; - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); + //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (this->need_display) { + ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to set sync_flag\n"); - CLEAN_UP; + goto cleanup; } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); + + ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed mix_framemanager_enqueue\n"); - CLEAN_UP; + goto cleanup; } } - + /*update the reference surface and reconstructed surface */ - if (!mix->pic_skipped) { - tmp_frame = mix->rec_frame; - mix->rec_frame= mix->ref_frame; - mix->ref_frame = tmp_frame; - } - + if (!this->pic_skipped) { + tmp_frame = this->rec_frame; + this->rec_frame= this->ref_frame; + this->ref_frame = tmp_frame; + } + #if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - - mix_videoframe_unref (mix->cur_frame); -#endif + if (this->ref_frame != NULL) + mix_videoframe_unref (this->ref_frame); + this->ref_frame = this->rec_frame; - mix->encoded_frames ++; - mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index]; - mix->coded_buf_index ++; - mix->coded_buf_index %=2; - mix->last_frame = mix->cur_frame; + mix_videoframe_unref (this->cur_frame); +#endif - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; + this->encoded_frames ++; + this->last_coded_buf = this->coded_buf[this->coded_buf_index]; + this->coded_buf_index ++; + this->coded_buf_index %=2; + this->last_frame = this->cur_frame; + if (this->last_mix_buffer) { + LOG_V("calls to mix_buffer_unref \n"); + LOG_V("refcount = %d\n", MIX_PARAMS(this->last_mix_buffer)->GetRefCount()); + mix_buffer_unref(this->last_mix_buffer); } - CLEAN_UP; -} -#undef CLEAN_UP + LOG_V("ref the current bufin\n"); + + this->last_mix_buffer = mix_buffer_ref(bufin); + if (!(this->need_display)) { + mix_videoframe_unref (this->cur_frame); + this->cur_frame = NULL; + } -MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size ( - MixVideoFormatEnc *mix, guint * max_size) -{ +cleanup: - MixVideoFormatEnc *parent = NULL; - - if (mix == NULL) - { - LOG_E( - "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - parent = MIX_VIDEOFORMATENC(mix); - MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(self)) { - - if (self->coded_buf_size > 0) { - *max_size = self->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (self->va_rcmode == VA_RC_NONE) { - self->coded_buf_size = - (parent->picture_width* parent->picture_height * 400) / (16 * 16); - // set to value according to QP - } - else { - self->coded_buf_size = parent->bitrate/ 4; + if (ret != MIX_RESULT_SUCCESS) { + if (iovout->data) { + delete [] iovout->data; + iovout->data = NULL; } - - self->coded_buf_size = - max (self->coded_buf_size , - (parent->picture_width* parent->picture_height * 400) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - self->coded_buf_size = - max(self->coded_buf_size, - (parent->picture_width * parent->picture_height * 1.5 * 8)); - self->coded_buf_size = (self->coded_buf_size + 15) &(~15); } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; + LOG_V( "end\n"); + return ret; +} + +MIX_RESULT MixVideoFormatEnc_MPEG4::GetMaxEncodedBufSize( + uint * max_size) { + LOG_V( "Begin\n"); + + if (this->coded_buf_size > 0) { + *max_size = this->coded_buf_size; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return MIX_RESULT_SUCCESS; + } + + /*base on the rate control mode to calculate the defaule encoded buffer size*/ + if (this->va_rcmode_mpeg4 == VA_RC_NONE) { + this->coded_buf_size = (this->picture_width* this->picture_height * 400) / (16 * 16); + // set to value according to QP + } + else { + this->coded_buf_size = this->bitrate / 4; } - *max_size = self->coded_buf_size; - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; + this->coded_buf_size = max( + this->coded_buf_size , + (this->picture_width* this->picture_height * 400) / (16 * 16)); + + /*in case got a very large user input bit rate value*/ + this->coded_buf_size = max( + this->coded_buf_size, (this->picture_width * this->picture_height * 1.5 * 8)); + this->coded_buf_size = (this->coded_buf_size + 15) & (~15); + + *max_size = this->coded_buf_size; + LOG_V( "end\n"); + return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix) -{ +MIX_RESULT MixVideoFormatEnc_MPEG4::_send_encode_command () { MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) - { - if (mix->encoded_frames == 0) { - ret = mix_videofmtenc_mpeg4_send_seq_params (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - } - - ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_picture_parameter\n"); + if (this->encoded_frames == 0) { + ret = _send_seq_params (); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videofmtenc_h264_send_seq_params\n"); return MIX_RESULT_FAIL; } - - ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - } - else - { - LOG_E( - "not MPEG4 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - LOG_V( "End\n"); + ret = _send_picture_parameter (); - return MIX_RESULT_SUCCESS; + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videofmtenc_h264_send_picture_parameter\n"); + return MIX_RESULT_FAIL; + } + + ret = _send_slice_parameter (); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videofmtenc_h264_send_slice_parameter\n"); + return MIX_RESULT_FAIL; + } + + LOG_V( "End\n"); + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h index 7e8e29b..6c5241d 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.h +++ b/mix_video/src/mixvideoformatenc_mpeg4.h @@ -12,9 +12,9 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" -G_BEGIN_DECLS -#define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20 + +#define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20 #define min(X,Y) (((X) < (Y)) ? (X) : (Y)) #define max(X,Y) (((X) > (Y)) ? (X) : (Y)) @@ -22,76 +22,75 @@ G_BEGIN_DECLS /* * Type macros. */ -#define MIX_TYPE_VIDEOFORMATENC_MPEG4 (mix_videoformatenc_mpeg4_get_type ()) -#define MIX_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4)) -#define MIX_IS_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4)) -#define MIX_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) -#define MIX_IS_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4)) -#define MIX_VIDEOFORMATENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class)) - -typedef struct _MixVideoFormatEnc_MPEG4 MixVideoFormatEnc_MPEG4; -typedef struct _MixVideoFormatEnc_MPEG4Class MixVideoFormatEnc_MPEG4Class; - -struct _MixVideoFormatEnc_MPEG4 { - /*< public > */ - MixVideoFormatEnc parent; - - - VABufferID coded_buf[2]; - VABufferID last_coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * ci_shared_surfaces; - VASurfaceID * surfaces; - guint surface_num; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; - - - guchar profile_and_level_indication; - guint fixed_vop_time_increment; - guint disable_deblocking_filter_idc; - - guint va_rcmode; - - guint encoded_frames; - gboolean pic_skipped; - - gboolean is_intra; - - guint coded_buf_size; - guint coded_buf_index; - - - /*< public > */ +#define MIX_VIDEOFORMATENC_MPEG4(obj) (reinterpret_cast(obj)) +#define MIX_IS_VIDEOFORMATENC_MPEG4(obj) (NULL != MIX_VIDEOFORMATENC_MPEG4(obj)) + +class MixVideoFormatEnc_MPEG4 : public MixVideoFormatEnc { +public: + MixVideoFormatEnc_MPEG4(); + virtual ~MixVideoFormatEnc_MPEG4(); + + /* MPEG-4:2 vmethods */ + virtual MIX_RESULT Initialize( + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display); + virtual MIX_RESULT Encode( MixBuffer * bufin[], + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT Deinitialize(); + virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); + +protected: + /* Local Methods */ + MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); + MIX_RESULT _send_encode_command (); + MIX_RESULT _send_seq_params (); + MIX_RESULT _send_picture_parameter(); + MIX_RESULT _send_slice_parameter(); + +public: + VABufferID coded_buf[2]; + VABufferID last_coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * shared_surfaces; + VASurfaceID * surfaces; + uint surface_num; + uint shared_surfaces_cnt; + uint precreated_surfaces_cnt; + + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *last_frame; //last frame; + MixVideoFrame *lookup_frame; + + MixBuffer *last_mix_buffer; + + uchar profile_and_level_indication; + uint fixed_vop_time_increment; + uint disable_deblocking_filter_idc; + + uint va_rcmode_mpeg4; + + uint encoded_frames; + bool pic_skipped; + + bool is_intra; + + uint coded_buf_size; + uint coded_buf_index; + + uint8 ** usrptr; + uint alloc_surface_cnt; }; -/** - * MixVideoFormatEnc_MPEG4Class: - * - * MI-X Video object class - */ -struct _MixVideoFormatEnc_MPEG4Class { - /*< public > */ - MixVideoFormatEncClass parent_class; - - /* class members */ - - /*< public > */ -}; - -/** - * mix_videoformatenc_mpeg4_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformatenc_mpeg4_get_type(void); - /** * mix_videoformatenc_mpeg4_new: * @returns: A newly allocated instance of #MixVideoFormatEnc_MPEG4 @@ -115,33 +114,6 @@ MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * * * Decrement reference count of the object. */ -#define mix_videoformatenc_mpeg4_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* MPEG-4:2 vmethods */ -MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg); -MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size); - -/* Local Methods */ - -MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, MixBuffer * bufin, - MixIOVec * iovout); -MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix); - -G_END_DECLS +MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_unref(MixVideoFormatEnc_MPEG4 * mix); #endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */ - diff --git a/mix_video/src/mixvideoformatenc_preview.cpp b/mix_video/src/mixvideoformatenc_preview.cpp index 3444437..ae2f7bb 100644 --- a/mix_video/src/mixvideoformatenc_preview.cpp +++ b/mix_video/src/mixvideoformatenc_preview.cpp @@ -5,7 +5,7 @@ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. */ -#include + #include #include @@ -21,1167 +21,1098 @@ Window win = 0; #endif /* SHOW_SRC */ - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -static MixVideoFormatEncClass *parent_class = NULL; - -static void mix_videoformatenc_preview_finalize(GObject * obj); - -/* - * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC - */ -G_DEFINE_TYPE (MixVideoFormatEnc_Preview, mix_videoformatenc_preview, MIX_TYPE_VIDEOFORMATENC); - -static void mix_videoformatenc_preview_init(MixVideoFormatEnc_Preview * self) { - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self); - - - /* member variable initialization */ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - self->cur_frame = NULL; - self->ref_frame = NULL; - self->rec_frame = NULL; - - self->ci_shared_surfaces = NULL; - self->surfaces= NULL; - self->surface_num = 0; - - parent->initialized = FALSE; +MixVideoFormatEnc_Preview::MixVideoFormatEnc_Preview() + :shared_surfaces(NULL) + ,surfaces(NULL) + ,surface_num(0) + ,shared_surfaces_cnt(0) + ,precreated_surfaces_cnt(0) + ,cur_frame(NULL) + ,ref_frame(NULL) + ,rec_frame(NULL) + ,lookup_frame(NULL) + ,encoded_frames(0) + ,pic_skipped(FALSE) + ,is_intra(TRUE) + ,usrptr(NULL) { } -static void mix_videoformatenc_preview_class_init( - MixVideoFormatEnc_PreviewClass * klass) { - - /* root class */ - GObjectClass *gobject_class = (GObjectClass *) klass; - - /* direct parent class */ - MixVideoFormatEncClass *video_formatenc_class = - MIX_VIDEOFORMATENC_CLASS(klass); - - /* parent class for later use */ - parent_class = reinterpret_cast(g_type_class_peek_parent(klass)); - - /* setup finializer */ - gobject_class->finalize = mix_videoformatenc_preview_finalize; - - /* setup vmethods with base implementation */ - video_formatenc_class->getcaps = mix_videofmtenc_preview_getcaps; - video_formatenc_class->initialize = mix_videofmtenc_preview_initialize; - video_formatenc_class->encode = mix_videofmtenc_preview_encode; - video_formatenc_class->flush = mix_videofmtenc_preview_flush; - video_formatenc_class->eos = mix_videofmtenc_preview_eos; - video_formatenc_class->deinitialize = mix_videofmtenc_preview_deinitialize; +MixVideoFormatEnc_Preview::~MixVideoFormatEnc_Preview() { } MixVideoFormatEnc_Preview * mix_videoformatenc_preview_new(void) { - MixVideoFormatEnc_Preview *ret = reinterpret_cast( - g_object_new(MIX_TYPE_VIDEOFORMATENC_PREVIEW, NULL)); - - return ret; + return new MixVideoFormatEnc_Preview(); } -void mix_videoformatenc_preview_finalize(GObject * obj) { - /* clean up here. */ - /*MixVideoFormatEnc_Preview *mix = MIX_VIDEOFORMATENC_PREVIEW(obj); */ - GObjectClass *root_class = (GObjectClass *) parent_class; - - LOG_V( "\n"); - - /* Chain up parent */ - if (root_class->finalize) { - root_class->finalize(obj); +MixVideoFormatEnc_Preview * +mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) { + if (NULL != mix) { + mix->Ref(); + return mix; + } + else { + return NULL; } } MixVideoFormatEnc_Preview * -mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) { - return (MixVideoFormatEnc_Preview *) g_object_ref(G_OBJECT(mix)); +mix_videoformatenc_preview_unref(MixVideoFormatEnc_Preview * mix) { + if (NULL!=mix) + if (NULL != mix->Unref()) + return mix; + else + return NULL; + else + return NULL; } -/*Preview vmethods implementation */ -MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg) { - - LOG_V( "mix_videofmtenc_preview_getcaps\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - if (parent_class->getcaps) { - return parent_class->getcaps(mix, msg); - } - return MIX_RESULT_SUCCESS; -} -MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { +MIX_RESULT MixVideoFormatEnc_Preview::Initialize( + MixVideoConfigParamsEnc * config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display ) { MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; MixVideoConfigParamsEncPreview * config_params_enc_preview; - + VAStatus va_status = VA_STATUS_SUCCESS; VASurfaceID * surfaces = NULL; - - gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - gint va_num_profiles, va_num_entrypoints; + + int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + int va_num_profiles, va_num_entrypoints; VAProfile *va_profiles = NULL; VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - guint index; - + VAConfigAttrib va_attrib[2]; + uint index; + + /* + * Different MIX buffer mode will have different surface handling approach + */ + uint normal_surfaces_cnt = 2; + + /* + * shared_surfaces_cnt is for upstream buffer allocation case + */ + uint shared_surfaces_cnt = 0; + + /* + * precreated_surfaces_cnt is for self buffer allocation case + */ + uint precreated_surfaces_cnt = 0; + + MixCISharedBufferInfo * ci_info = NULL; /*frame_mgr and input_buf_pool is reservered for future use*/ - - if (mix == NULL || config_params_enc == NULL || va_display == NULL) { - LOG_E( - "mix == NULL || config_params_enc == NULL || va_display == NULL\n"); + if (config_params_enc == NULL || va_display == NULL) { + LOG_E("config_params_enc == NULL || va_display == NULL\n"); return MIX_RESULT_NULL_PTR; } LOG_V( "begin\n"); - /* Chainup parent method. */ - if (parent_class->initialize) { - ret = parent_class->initialize(mix, config_params_enc, - frame_mgr, input_buf_pool, surface_pool, - va_display); - } - - if (ret != MIX_RESULT_SUCCESS) - { + ret = MixVideoFormatEnc::Initialize( + config_params_enc, frame_mgr, input_buf_pool, + surface_pool, requested_surface_info, va_display); + if (ret != MIX_RESULT_SUCCESS) { return ret; } - - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc)) { - config_params_enc_preview = - MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); + if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(config_params_enc)) { + config_params_enc_preview = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); } else { - LOG_V( - "mix_videofmtenc_preview_initialize: no preview config params found\n"); + LOG_V("mix_videofmtenc_preview_initialize: no preview config params found\n"); return MIX_RESULT_FAIL; } - - g_mutex_lock(parent->objectlock); - - LOG_V( - "Get properities from params done\n"); + Lock(); + + LOG_V("Get properities from params done\n"); + + this->va_display = va_display; - parent->va_display = va_display; - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (guint)va_display); + LOG_I( "Display = 0x%08x\n", (uint)va_display); - /*get the max number for profiles/entrypoints/attribs*/ va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - + LOG_I( "va_max_num_profiles = %d\n", va_max_num_profiles); + va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - + LOG_I( "va_max_num_entrypoints = %d\n", va_max_num_entrypoints); + va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - - va_profiles = reinterpret_cast(g_malloc(sizeof(VAProfile)*va_max_num_profiles)); - va_entrypoints = reinterpret_cast(g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints)); - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); + LOG_I( "va_max_num_attribs = %d\n", va_max_num_attribs); + + va_profiles = new VAProfile[va_max_num_profiles]; + va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; + + if (va_profiles == NULL || va_entrypoints ==NULL) { + LOG_E("!va_profiles || !va_entrypoints\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + goto cleanup; } - LOG_I( - "va_profiles = 0x%08x\n", (guint)va_profiles); + LOG_I("va_profiles = 0x%08x\n", (uint)va_profiles); + LOG_V("vaQueryConfigProfiles\n"); - LOG_V( "vaQueryConfigProfiles\n"); - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - ret = MIX_RESULT_FAIL; + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaQueryConfigProfiles\n"); + ret = MIX_RESULT_FAIL; goto cleanup; } - LOG_V( "vaQueryConfigProfiles Done\n"); - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(parent->va_profile == va_profiles[index]) + for (index= 0; index < va_num_profiles; index++) { + if (this->va_profile == va_profiles[index]) break; } - - if(index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); + + if (index == va_num_profiles) { + LOG_E( "Profile not supported\n"); ret = MIX_RESULT_FAIL; goto cleanup; } LOG_V( "vaQueryConfigEntrypoints\n"); - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - parent->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); + va_status = vaQueryConfigEntrypoints( + va_display, + this->va_profile, + va_entrypoints, &va_num_entrypoints); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaQueryConfigEntrypoints\n"); ret = MIX_RESULT_FAIL; goto cleanup; } - + for (index = 0; index < va_num_entrypoints; index ++) { if (va_entrypoints[index] == VAEntrypointEncSlice) { break; } } - + if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; + LOG_E( "Entrypoint not found\n"); + ret = MIX_RESULT_FAIL; goto cleanup; - } - - + } + va_attrib[0].type = VAConfigAttribRTFormat; va_attrib[1].type = VAConfigAttribRateControl; - + LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & parent->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if ((va_attrib[1].value & parent->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = parent->va_rcmode; - LOG_V( "======VA Configuration======\n"); + va_status = vaGetConfigAttributes( + va_display, this->va_profile, + this->va_entrypoint, &va_attrib[0], 2); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to call vaGetConfigAttributes\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + if ((va_attrib[0].value & this->va_format) == 0) { + LOG_E( "Matched format not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + + if ((va_attrib[1].value & this->va_rcmode) == 0) { + LOG_E( "RC mode not found\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } - LOG_I( "profile = %d\n", - parent->va_profile); - LOG_I( "va_entrypoint = %d\n", - parent->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); + va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; + va_attrib[1].value = this->va_rcmode; + LOG_V( "======VA Configuration======\n"); + LOG_I( "profile = %d\n", this->va_profile); + LOG_I( "va_entrypoint = %d\n", this->va_entrypoint); + LOG_I( "va_attrib[0].type = %d\n", va_attrib[0].type); + LOG_I( "va_attrib[1].type = %d\n", va_attrib[1].type); + LOG_I( "va_attrib[0].value (Format) = %d\n", va_attrib[0].value); + LOG_I( "va_attrib[1].value (RC mode) = %d\n", va_attrib[1].value); LOG_V( "vaCreateConfig\n"); - va_status = vaCreateConfig(va_display, parent->va_profile, - parent->va_entrypoint, - &va_attrib[0], 2, &(parent->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); + va_status = vaCreateConfig( + va_display, this->va_profile, + this->va_entrypoint, + &va_attrib[0], 2, &(this->va_config)); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaCreateConfig\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; + } + + /* + * For upstream allocates buffer, it is mandatory to set buffer mode + * and for other stuff, it is optional + */ + + LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); + + if (requested_surface_info->surface_cnt == 0) { + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + ci_info = (MixCISharedBufferInfo *) (this->buf_info); + shared_surfaces_cnt = ci_info->ci_frame_cnt; + normal_surfaces_cnt = 2; + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /* + * To be develped + */ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /* + * To be develped + */ + break; + default: + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + break; + } + } + else if (requested_surface_info->surface_cnt == 1) { + /* + * Un-normal case, TBD + */ + this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; + normal_surfaces_cnt = 8; + } + else { + this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; + precreated_surfaces_cnt = requested_surface_info->surface_cnt; + this->alloc_surface_cnt = requested_surface_info->surface_cnt; + this->usrptr = new uint8*[requested_surface_info->surface_cnt]; + if (this->usrptr == NULL) { + LOG_E("Failed allocate memory\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + + memcpy (this->usrptr, requested_surface_info->usrptr, requested_surface_info->surface_cnt * sizeof (uint8 *)); + + } + + LOG_I ("buffer_mode = %d\n", this->buffer_mode); + + this->shared_surfaces_cnt = shared_surfaces_cnt; + this->precreated_surfaces_cnt = precreated_surfaces_cnt; + +#if 0 + + int ii = 0; + for (ii=0; ii < alloc_surface_cnt; ii++) { + + g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); + g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); + + } - /*compute the surface number*/ + /*TODO: compute the surface number*/ int numSurfaces; - + if (parent->share_buf_mode) { numSurfaces = 2; } else { - numSurfaces = 8; - parent->ci_frame_num = 0; - } - - self->surface_num = numSurfaces + parent->ci_frame_num; - - surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID)*numSurfaces)); - - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); + numSurfaces = 2; + parent->ci_frame_num = 0; + } + + //self->surface_num = numSurfaces + parent->ci_frame_num; +#endif + + this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; + surfaces = new VASurfaceID[normal_surfaces_cnt]; + + if (surfaces == NULL) { + LOG_E("Failed allocate surface\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + goto cleanup; } - + + this->surfaces = new VASurfaceID[this->surface_num]; + + if (this->surfaces == NULL) { + LOG_E("Failed allocate private surface\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, parent->picture_width, - parent->picture_height, parent->va_format, - numSurfaces, surfaces); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); + va_status = vaCreateSurfaces( + va_display, this->picture_width, + this->picture_height, this->va_format, + normal_surfaces_cnt, surfaces); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaCreateSurfaces\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - if (parent->share_buf_mode) { - - LOG_V( - "We are in share buffer mode!\n"); - self->ci_shared_surfaces = reinterpret_cast( - g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num)); - - if (self->ci_shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); + if (shared_surfaces_cnt != 0) { + this->shared_surfaces = new VASurfaceID[shared_surfaces_cnt]; + if (this->shared_surfaces == NULL) { + LOG_E("Failed allocate shared surface\n"); ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + goto cleanup; } - - guint index; - for(index = 0; index < parent->ci_frame_num; index++) { - - LOG_I( "ci_frame_id = %lu\n", - parent->ci_frame_id[index]); - - LOG_V( - "vaCreateSurfaceFromCIFrame\n"); - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (gulong) (parent->ci_frame_id[index]), - &self->ci_shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateSurfaceFromCIFrame\n"); + } + + + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + for (index = 0; index < this->shared_surfaces_cnt; index++) { + va_status = vaCreateSurfaceFromCIFrame( + va_display, (ulong) (ci_info->ci_frame_id[index]), + &this->shared_surfaces[index]); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); ret = MIX_RESULT_FAIL; goto cleanup; - - } + } + this->surfaces[index] = this->shared_surfaces[index]; } - - LOG_V( - "vaCreateSurfaceFromCIFrame Done\n"); - - }// if (parent->share_buf_mode) - - self->surfaces = reinterpret_cast(g_malloc(sizeof(VASurfaceID) * self->surface_num)); - - if (self->surfaces == NULL) + } + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + /*To be develped*/ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + /*To be develped*/ + break; + case MIX_BUFFER_ALLOC_NORMAL: + break; + case MIX_BUFFER_SELF_ALLOC_SURFACE: { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; + for (index = 0; index < requested_surface_info->surface_cnt; index ++) { + this->surfaces[index] = requested_surface_info->surface_allocated[index]; + } + } + break; + default: + break; + } - } - if (parent->share_buf_mode) { - /*shared surfaces should be put in pool first, - * because we will get it accoring to CI index*/ - for(index = 0; index < parent->ci_frame_num; index++) - self->surfaces[index] = self->ci_shared_surfaces[index]; - } - - for(index = 0; index < numSurfaces; index++) { - self->surfaces[index + parent->ci_frame_num] = surfaces[index]; + for (index = 0; index < normal_surfaces_cnt; index++) { + this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; } - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", - numSurfaces + parent->ci_frame_num); - + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", this->surface_num); + #if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); + images = g_malloc(sizeof(VAImage)*numSurfaces); if (images == NULL) { - g_mutex_unlock(parent->objectlock); + g_mutex_unlock(parent->objectlock); return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. + } + + for (index = 0; index < numSurfaces; index++) { + //Derive an VAImage from an existing surface. //The image buffer can then be mapped/unmapped for CPU access va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); + &images[index]); } -#endif - - LOG_V( "mix_surfacepool_new\n"); +#endif - parent->surfacepool = mix_surfacepool_new(); + LOG_V( "mix_surfacepool_new\n"); + + this->surfacepool = mix_surfacepool_new(); if (surface_pool) - *surface_pool = parent->surfacepool; + *surface_pool = this->surfacepool; //which is useful to check before encode - if (parent->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - + if (this->surfacepool == NULL) { + LOG_E("Failed to mix_surfacepool_new\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(parent->surfacepool, - self->surfaces, parent->ci_frame_num + numSurfaces, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; + LOG_V("mix_surfacepool_initialize\n"); - default: - break; + ret = mix_surfacepool_initialize( + this->surfacepool, this->surfaces, + this->surface_num, va_display); + + switch (ret) { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: + ret = MIX_RESULT_ALREADY_INIT; + goto cleanup; + default: + break; } - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, parent->va_config, - parent->picture_width, parent->picture_height, - 0, self->surfaces, parent->ci_frame_num + numSurfaces, - &(parent->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - parent->picture_width, parent->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (guint)va_status); + LOG_V( "vaCreateContext\n"); + va_status = vaCreateContext( + va_display, this->va_config, + this->picture_width, this->picture_height, + 0, this->surfaces, this->surface_num, + &(this->va_context)); + + LOG_I("Created libva context width %d, height %d\n", + this->picture_width, this->picture_height); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateContext\n"); + LOG_I( "va_status = %d\n", (uint)va_status); ret = MIX_RESULT_FAIL; goto cleanup; - } - self->coded_buf_size = 4; + this->coded_buf_size = 4; /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, parent->va_context, - VAEncCodedBufferType, - self->coded_buf_size, // - 1, NULL, - &self->coded_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); + va_status = vaCreateBuffer ( + va_display, this->va_context, + VAEncCodedBufferType, + this->coded_buf_size, // + 1, NULL, + &this->coded_buf); + + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaCreateBuffer: VAEncCodedBufferType\n"); ret = MIX_RESULT_FAIL; goto cleanup; } - + #ifdef SHOW_SRC Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (guint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); + LOG_I("display = 0x%08x\n", (uint) display); + win = XCreateSimpleWindow( + display, RootWindow(display, 0), 0, 0, + this->picture_width, this->picture_height, 0, 0, + WhitePixel(display, 0)); XMapWindow(display, win); XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (guint) va_display); - -#endif /* SHOW_SRC */ + LOG_I( "va_display = 0x%08x\n", (uint) va_display); +#endif /* SHOW_SRC */ - - LOG_V( "end\n"); + LOG_V("end\n"); cleanup: - if (ret == MIX_RESULT_SUCCESS) { - parent->initialized = TRUE; + this->initialized = TRUE; } - /*free profiles and entrypoints*/ - if (va_profiles) - g_free(va_profiles); - + /*free profiles and entrypoints*/ + if (va_profiles) + delete [] va_profiles; + if (va_entrypoints) - g_free (va_entrypoints); + delete [] va_entrypoints; - if (surfaces) - g_free (surfaces); + if (surfaces) + delete [] surfaces; - g_mutex_unlock(parent->objectlock); - + Unlock(); return ret; } -MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params) { - +MIX_RESULT MixVideoFormatEnc_Preview::Encode( + MixBuffer * bufin[], int bufincnt, + MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoFormatEnc *parent = NULL; - - LOG_V( "Begin\n"); - + LOG_V( "Begin\n"); /*currenly only support one input and output buffer*/ - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!mix || !bufin[0] ||!iovout[0]\n"); + LOG_E("buffer count not equel to 1\n"); + LOG_E("maybe some exception occurs\n"); + } + + if (bufin[0] == NULL || iovout[0] == NULL) { + LOG_E("!bufin[0] ||!iovout[0]\n"); return MIX_RESULT_NULL_PTR; } - - #if 0 if (parent_class->encode) { return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); + iovoutcnt, encode_params); } #endif - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW (mix); - - LOG_V( "Locking\n"); - g_mutex_lock(parent->objectlock); - - - //TODO: we also could move some encode Preparation work to here + LOG_V( "Locking\n"); + Lock(); - LOG_V( - "mix_videofmtenc_preview_process_encode\n"); + //TODO: we also could move some encode Preparation work to here + LOG_V("mix_videofmtenc_preview_process_encode\n"); - ret = mix_videofmtenc_preview_process_encode (self, - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_preview_process_encode\n"); + ret = _process_encode (bufin[0], iovout[0]); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_videofmtenc_preview_process_encode\n"); goto cleanup; } - cleanup: - - LOG_V( "UnLocking\n"); - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - + + LOG_V( "UnLocking\n"); + Unlock(); + LOG_V( "end\n"); return ret; } -MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - +MIX_RESULT MixVideoFormatEnc_Preview::Flush() { + + LOG_V( "Begin\n"); /*not chain to parent flush func*/ #if 0 if (parent_class->flush) { return parent_class->flush(mix, msg); } #endif - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW (mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - - g_mutex_lock(mix->objectlock); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { + Lock(); +#if 0 + /*unref the current source surface*/ + if (self->cur_frame != NULL) { mix_videoframe_unref (self->cur_frame); self->cur_frame = NULL; } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; - } +#endif - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } - - /*reset the properities*/ - self->encoded_frames = 0; - self->pic_skipped = FALSE; - self->is_intra = TRUE; - - g_mutex_unlock(mix->objectlock); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} + /*unref the reconstructed surface*/ + if (this->rec_frame != NULL) { + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; + } -MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix) { - - LOG_V( "\n"); + /*unref the reference surface*/ + if (this->ref_frame != NULL) { + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; + } - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } + /*reset the properities*/ + this->encoded_frames = 0; + this->pic_skipped = FALSE; + this->is_intra = TRUE; - if (parent_class->eos) { - return parent_class->eos(mix); - } + Unlock(); + LOG_V( "end\n"); return MIX_RESULT_SUCCESS; } -MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) { - - MixVideoFormatEnc *parent = NULL; - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (mix == NULL) { - LOG_E( "mix == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - if (parent_class->deinitialize) { - ret = parent_class->deinitialize(mix); - } - if (ret != MIX_RESULT_SUCCESS) - { +MIX_RESULT MixVideoFormatEnc_Preview::Deinitialize() { + VAStatus va_status; + MIX_RESULT ret = MIX_RESULT_SUCCESS; + LOG_V( "Begin\n"); + ret = MixVideoFormatEnc::Deinitialize(); + if (ret != MIX_RESULT_SUCCESS) { return ret; - } - - - parent = MIX_VIDEOFORMATENC(&(mix->parent)); - MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix); - - LOG_V( "Release frames\n"); + } - g_mutex_lock(parent->objectlock); + LOG_V( "Release frames\n"); + Lock(); #if 0 - /*unref the current source surface*/ + /*unref the current source surface*/ if (self->cur_frame != NULL) { mix_videoframe_unref (self->cur_frame); self->cur_frame = NULL; } -#endif - - /*unref the reconstructed surface*/ - if (self->rec_frame != NULL) - { - mix_videoframe_unref (self->rec_frame); - self->rec_frame = NULL; +#endif + + /*unref the reconstructed surface*/ + if (this->rec_frame != NULL) { + mix_videoframe_unref (this->rec_frame); + this->rec_frame = NULL; } - /*unref the reference surface*/ - if (self->ref_frame != NULL) - { - mix_videoframe_unref (self->ref_frame); - self->ref_frame = NULL; - } + /*unref the reference surface*/ + if (this->ref_frame != NULL) { + mix_videoframe_unref (this->ref_frame); + this->ref_frame = NULL; + } + + if (this->lookup_frame != NULL) { + mix_videoframe_unref (this->lookup_frame); + this->lookup_frame = NULL; + } - LOG_V( "Release surfaces\n"); + LOG_V( "Release surfaces\n"); - if (self->ci_shared_surfaces) - { - g_free (self->ci_shared_surfaces); - self->ci_shared_surfaces = NULL; + if (this->shared_surfaces) { + delete [] this->shared_surfaces; + this->shared_surfaces = NULL; } - if (self->surfaces) - { - g_free (self->surfaces); - self->surfaces = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (parent->va_display, parent->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); + if (this->surfaces) { + delete [] this->surfaces; + this->surfaces = NULL; + } + + if (this->usrptr) { + delete [] this->usrptr; + this->usrptr = NULL; + } + + LOG_V( "vaDestroyContext\n"); + + va_status = vaDestroyContext (this->va_display, this->va_context); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaDestroyContext\n"); ret = MIX_RESULT_FAIL; goto cleanup; - } + } - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (parent->va_display, parent->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); + LOG_V( "vaDestroyConfig\n"); + + va_status = vaDestroyConfig (this->va_display, this->va_config); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed vaDestroyConfig\n"); ret = MIX_RESULT_FAIL; goto cleanup; - } + } cleanup: - parent->initialized = FALSE; - - g_mutex_unlock(parent->objectlock); - - LOG_V( "end\n"); - + this->initialized = FALSE; + Unlock(); + LOG_V( "end\n"); return ret; } -MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, - MixBuffer * bufin, MixIOVec * iovout) -{ - +MIX_RESULT MixVideoFormatEnc_Preview::_process_encode ( + MixBuffer * bufin, MixIOVec * iovout) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; + VADisplay va_display = NULL; VAContextID va_context; - gulong surface = 0; - guint16 width, height; - + ulong surface = 0; + uint16 width, height; + + uint surface_idx = (uint) -1; //fixme, temp use a big value + uint idx = 0; + + VAImage src_image; + uint8 *pvbuf; + uint8 *dst_y; + uint8 *dst_uv; + int i,j; + //MixVideoFrame * tmp_frame; - //guint8 *buf; - - if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) { - LOG_E( - "mix == NUL) || bufin == NULL || iovout == NULL\n"); + //uint8 *buf; + if ((bufin == NULL) || (iovout == NULL)) { + LOG_E("bufin == NULL || iovout == NULL\n"); return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - if (!MIX_IS_VIDEOFORMATENC_PREVIEW(mix)) - return MIX_RESULT_INVALID_PARAM; - - MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent)); - - va_display = parent->va_display; - va_context = parent->va_context; - width = parent->picture_width; - height = parent->picture_height; - - - LOG_I( "encoded_frames = %d\n", - mix->encoded_frames); - LOG_I( "is_intra = %d\n", - mix->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (guint) parent->ci_frame_id); - - LOG_V( - "Get Surface from the pool\n"); - - if (!parent->share_buf_mode) { - LOG_V( - "We are NOT in share buffer mode\n"); - - if (mix->ref_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); + } + + LOG_V( "Begin\n"); + va_display = this->va_display; + va_context = this->va_context; + width = this->picture_width; + height = this->picture_height; + + LOG_I( "encoded_frames = %d\n", this->encoded_frames); + LOG_I( "is_intra = %d\n", this->is_intra); + LOG_I( "ci_frame_id = 0x%08x\n", (uint) this->ci_frame_id); + LOG_V("Get Surface from the pool\n"); + + switch (this->buffer_mode) { + case MIX_BUFFER_UPSTREAM_ALLOC_CI: + { + //MixVideoFrame * frame = mix_videoframe_new(); + if (this->lookup_frame == NULL) { + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + if (this->ref_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get reference surface from pool failed\n"); + goto cleanup; + } + } + + if (this->rec_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get recontructed surface from pool failed\n"); + goto cleanup; + } + } + + //mix_videoframe_unref (mix->cur_frame); + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) { + uint ci_idx; +#ifndef ANDROID + memcpy (&ci_idx, bufin->data, bufin->size); +#else + memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); +#endif + + LOG_I("surface_num = %d\n", this->surface_num); + LOG_I("ci_frame_idx = %d\n", ci_idx); + + if (ci_idx > this->surface_num - 2) { + LOG_E("the CI frame idx is too bigger than CI frame number\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; + } + + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get current working surface from pool failed\n"); + goto cleanup; + } + } + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + } + + /* + * end of CI buffer allocation mode + */ + break; + case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: + break; + case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: + break; + case MIX_BUFFER_SELF_ALLOC_SURFACE: + { + if (this->lookup_frame == NULL) { + this->lookup_frame = mix_videoframe_new (); + if (this->lookup_frame == NULL) { + LOG_E("mix_videoframe_new() failed!\n"); + ret = MIX_RESULT_NO_MEMORY; + goto cleanup; + } + } + + LOG_I("bufin->data = 0x%08x\n", bufin->data); + for (idx = 0; idx < this->alloc_surface_cnt; idx++) { + LOG_I ("mix->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); + if (bufin->data == this->usrptr[idx]) + surface_idx = idx; + } + + LOG_I("surface_num = %d\n", this->surface_num); + LOG_I("surface_idx = %d\n", surface_idx); + + if (surface_idx > this->surface_num - 2) { + LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); + ret = MIX_RESULT_FAIL; + goto no_share_mode; + } + + if (this->ref_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->ref_frame, this->lookup_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get reference surface from pool failed\n"); goto cleanup; } } - - if (mix->rec_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); + + if (this->rec_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); + goto cleanup; + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->rec_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get recontructed surface from pool failed\n"); goto cleanup; } } - if (parent->need_display) { - mix->cur_frame = NULL; + //mix_videoframe_unref (this->cur_frame); + + if (this->need_display) { + this->cur_frame = NULL; } - - if (mix->cur_frame == NULL) - { - ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); + + if (this->cur_frame == NULL) { + ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); goto cleanup; - } + } + + ret = mix_surfacepool_get_frame_with_ci_frameidx + (this->surfacepool, &this->cur_frame, this->lookup_frame); + + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("get current working surface from pool failed\n"); + goto cleanup; + } } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - guint8 *pvbuf; - guint8 *dst_y; - guint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; + + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + } + + break; + /* + * end of Self buffer allocation mode + */ + case MIX_BUFFER_ALLOC_NORMAL: + { + +no_share_mode: + + LOG_V("We are NOT in share buffer mode\n"); + if (this->ref_frame == NULL) { + ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); + if (ret != MIX_RESULT_SUCCESS) {//#ifdef SLEEP_SURFACE not used + LOG_E("Failed to mix_surfacepool_get\n"); + goto cleanup; + } } - - - LOG_I( - "surface id = 0x%08x\n", (guint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); + + if (this->rec_frame == NULL) { + ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + if (this->need_display) { + this->cur_frame = NULL; + } + + if (this->cur_frame == NULL) { + ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_surfacepool_get\n"); + goto cleanup; + } + } + + LOG_V( "Get Surface Done\n"); + LOG_V("map source data to surface\n"); + ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed to mix_videoframe_get_frame_id\n"); + goto cleanup; + } + + LOG_I("surface id = 0x%08x\n", (uint) surface); + va_status = vaDeriveImage(va_display, surface, &src_image); //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaDeriveImage\n"); ret = MIX_RESULT_FAIL; - goto cleanup; + goto cleanup; } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - + VAImage *image = &src_image; + LOG_V( "vaDeriveImage Done\n"); va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { + if (va_status != VA_STATUS_SUCCESS) { LOG_E( "Failed to vaMapBuffer\n"); ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - guint8 *inbuf = bufin->data; - + goto cleanup; + } + LOG_V("vaImage information\n"); + LOG_I("image->pitches[0] = %d\n", image->pitches[0]); + LOG_I("image->pitches[1] = %d\n", image->pitches[1]); + LOG_I("image->offsets[0] = %d\n", image->offsets[0]); + LOG_I("image->offsets[1] = %d\n", image->offsets[1]); + LOG_I("image->num_planes = %d\n", image->num_planes); + LOG_I("image->width = %d\n", image->width); + LOG_I("image->height = %d\n", image->height); + LOG_I("input buf size = %d\n", bufin->size); + uint8 *inbuf = bufin->data; + +#ifndef ANDROID +#define USE_SRC_FMT_YUV420 +#else +#define USE_SRC_FMT_NV21 +#endif + +#ifdef USE_SRC_FMT_YUV420 /*need to convert YUV420 to NV12*/ dst_y = pvbuf +image->offsets[0]; - + for (i = 0; i < height; i ++) { memcpy (dst_y, inbuf + i * width, width); dst_y += image->pitches[0]; } - + dst_uv = pvbuf + image->offsets[1]; - + for (i = 0; i < height / 2; i ++) { for (j = 0; j < width; j+=2) { dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + dst_uv [j + 1] = inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; } dst_uv += image->pitches[1]; } - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "Map source data to surface done\n"); - - } - - else {//if (!parent->share_buf_mode) - - MixVideoFrame * frame = mix_videoframe_new(); - - if (mix->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->ref_frame, frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "get reference surface from pool failed\n"); - goto cleanup; - } +#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 + int offset_uv = width * height; + uint8 *inbuf_uv = inbuf + offset_uv; + int height_uv = height / 2; + int width_uv = width; + + dst_y = pvbuf + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy (dst_y, inbuf + i * width, width); + dst_y += image->pitches[0]; } - - if (mix->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->rec_frame, frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto cleanup; +#ifdef USE_SRC_FMT_NV12 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i++) { + memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); + dst_uv += image->pitches[1]; + } +#else //USE_SRC_FMT_NV21 + dst_uv = pvbuf + image->offsets[1]; + for (i = 0; i < height_uv; i ++) { + for (j = 0; j < width_uv; j += 2) { + dst_uv[j] = inbuf_uv[j+1]; //u + dst_uv[j+1] = inbuf_uv[j]; //v } + dst_uv += image->pitches[1]; + inbuf_uv += width_uv; } - - //mix_videoframe_unref (mix->cur_frame); - - if (parent->need_display) { - mix->cur_frame = NULL; +#endif +#endif //USE_SRC_FMT_YUV420 + va_status = vaUnmapBuffer(va_display, image->buf); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaUnmapBuffer\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - - if (mix->cur_frame == NULL) - { - guint ci_idx; - memcpy (&ci_idx, bufin->data, bufin->size); - - LOG_I( - "surface_num = %d\n", mix->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > mix->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (parent->surfacepool, &mix->cur_frame, frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto cleanup; - } + va_status = vaDestroyImage(va_display, src_image.image_id); + if (va_status != VA_STATUS_SUCCESS) { + LOG_E("Failed to vaDestroyImage\n"); + ret = MIX_RESULT_FAIL; + goto cleanup; } - - ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_get_frame_id failed\n"); - goto cleanup; - } - + LOG_V("Map source data to surface done\n"); + } + break; + default: + break; + } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(guint)va_context); - LOG_I( "surface = 0x%08x\n",(guint)surface); - LOG_I( "va_display = 0x%08x\n",(guint)va_display); + + LOG_V( "vaBeginPicture\n"); + LOG_I( "va_context = 0x%08x\n",(uint)va_context); + LOG_I( "surface = 0x%08x\n",(uint)surface); + LOG_I( "va_display = 0x%08x\n",(uint)va_display); iovout->data_size = 4; - iovout->data = (guchar*)g_malloc (iovout->data_size); + iovout->data = new uchar[iovout->data_size]; if (iovout->data == NULL) { ret = MIX_RESULT_NO_MEMORY; goto cleanup; - } - - memset (iovout->data, 0, iovout->data_size); + } + memset (iovout->data, 0, iovout->data_size); iovout->buffer_size = iovout->data_size; - - if (parent->need_display) { - ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE); + if (this->need_display) { + ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); if (ret != MIX_RESULT_SUCCESS) { LOG_E("Failed to set sync_flag\n"); goto cleanup; } - - ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_framemanager_enqueue\n"); + + ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E("Failed mix_framemanager_enqueue\n"); goto cleanup; - } - } - - - if (!(parent->need_display)) { - mix_videoframe_unref (mix->cur_frame); - mix->cur_frame = NULL; - } - mix->encoded_frames ++; - + } + } + + if (!(this->need_display)) { + mix_videoframe_unref (this->cur_frame); + this->cur_frame = NULL; + } + this->encoded_frames ++; + cleanup: if (ret != MIX_RESULT_SUCCESS) { if (iovout->data) { - g_free (iovout->data); + delete [] iovout->data; iovout->data = NULL; } - } - - LOG_V( "end\n"); - + } + LOG_V( "end\n"); return ret; } diff --git a/mix_video/src/mixvideoformatenc_preview.h b/mix_video/src/mixvideoformatenc_preview.h index 09bc149..bb3db71 100644 --- a/mix_video/src/mixvideoformatenc_preview.h +++ b/mix_video/src/mixvideoformatenc_preview.h @@ -12,8 +12,6 @@ #include "mixvideoformatenc.h" #include "mixvideoframe_private.h" -G_BEGIN_DECLS - #define MIX_VIDEO_ENC_PREVIEW_SURFACE_NUM 20 #define min(X,Y) (((X) < (Y)) ? (X) : (Y)) @@ -22,69 +20,67 @@ G_BEGIN_DECLS /* * Type macros. */ -#define MIX_TYPE_VIDEOFORMATENC_PREVIEW (mix_videoformatenc_preview_get_type ()) -#define MIX_VIDEOFORMATENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_Preview)) -#define MIX_IS_VIDEOFORMATENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW)) -#define MIX_VIDEOFORMATENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_PreviewClass)) -#define MIX_IS_VIDEOFORMATENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_PREVIEW)) -#define MIX_VIDEOFORMATENC_PREVIEW_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_PreviewClass)) - -typedef struct _MixVideoFormatEnc_Preview MixVideoFormatEnc_Preview; -typedef struct _MixVideoFormatEnc_PreviewClass MixVideoFormatEnc_PreviewClass; +#define MIX_VIDEOFORMATENC_PREVIEW(obj) (dynamic_cast(obj))) +#define MIX_IS_VIDEOFORMATENC_PREVIEW(obj) (NULL != MIX_VIDEOFORMATENC_PREVIEW(obj)) -struct _MixVideoFormatEnc_Preview { - /*< public > */ - MixVideoFormatEnc parent; +class MixVideoFormatEnc_Preview : public MixVideoFormatEnc { +public: + MixVideoFormatEnc_Preview(); + virtual ~MixVideoFormatEnc_Preview(); - VABufferID coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * ci_shared_surfaces; - VASurfaceID * surfaces; - guint surface_num; + virtual MIX_RESULT Initialize( + MixVideoConfigParamsEnc* config_params_enc, + MixFrameManager * frame_mgr, + MixBufferPool * input_buf_pool, + MixSurfacePool ** surface_pool, + MixUsrReqSurfacesInfo * requested_surface_info, + VADisplay va_display); + virtual MIX_RESULT Encode( MixBuffer * bufin[], + int bufincnt, MixIOVec * iovout[], int iovoutcnt, + MixVideoEncodeParams * encode_params); + virtual MIX_RESULT Flush(); + virtual MIX_RESULT Deinitialize(); - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; +private: + /* Local Methods */ + MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); - guint basic_unit_size; //for rate control - guint disable_deblocking_filter_idc; - guint slice_num; - guint va_rcmode; +public: + VABufferID coded_buf; + VABufferID seq_param_buf; + VABufferID pic_param_buf; + VABufferID slice_param_buf; + VASurfaceID * shared_surfaces; + VASurfaceID * surfaces; + uint surface_num; + uint shared_surfaces_cnt; + uint precreated_surfaces_cnt; + MixVideoFrame *cur_frame; //current input frame to be encoded; + MixVideoFrame *ref_frame; //reference frame + MixVideoFrame *rec_frame; //reconstructed frame; + MixVideoFrame *lookup_frame; - guint encoded_frames; - gboolean pic_skipped; + uint basic_unit_size; //for rate control + uint disable_deblocking_filter_idc; + uint slice_num; + uint va_rcmode_preview; - gboolean is_intra; - guint coded_buf_size; + uint encoded_frames; + bool pic_skipped; - /*< public > */ -}; + bool is_intra; -/** - * MixVideoFormatEnc_PreviewClass: - * - * MI-X Video object class - */ -struct _MixVideoFormatEnc_PreviewClass { - /*< public > */ - MixVideoFormatEncClass parent_class; + uint coded_buf_size; - /* class members */ + uint8 ** usrptr; + uint alloc_surface_cnt; - /*< public > */ + /*< public > */ }; -/** - * mix_videoformatenc_preview_get_type: - * @returns: type - * - * Get the type of object. - */ -GType mix_videoformatenc_preview_get_type(void); + /** * mix_videoformatenc_preview_new: @@ -109,29 +105,6 @@ MixVideoFormatEnc_Preview *mix_videoformatenc_preview_ref(MixVideoFormatEnc_Prev * * Decrement reference count of the object. */ -#define mix_videoformatenc_preview_unref(obj) g_object_unref (G_OBJECT(obj)) - -/* Class Methods */ - -/* Pure preview vmethods */ -MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg); -MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[], - gint bufincnt, MixIOVec * iovout[], gint iovoutcnt, - MixVideoEncodeParams * encode_params); -MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix); -MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix); - -/* Local Methods */ -MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, MixBuffer * bufin, - MixIOVec * iovout); - -G_END_DECLS +MixVideoFormatEnc_Preview * mix_videoformatenc_preview_unref(MixVideoFormatEnc_Preview * mix); #endif /* __MIX_VIDEOFORMATENC_PREVIEW_H__ */ diff --git a/mix_video/src/mixvideoformatqueue.h b/mix_video/src/mixvideoformatqueue.h index 97c2b08..b917313 100644 --- a/mix_video/src/mixvideoformatqueue.h +++ b/mix_video/src/mixvideoformatqueue.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -15,9 +15,9 @@ typedef struct _MixInputBufferEntry MixInputBufferEntry; struct _MixInputBufferEntry { - /*< private > */ - MixBuffer *buf; - guint64 timestamp; + /*< private > */ + MixBuffer *buf; + uint64 timestamp; }; #endif /* __MIX_VIDEOFORMATQUEUE_H__ */ diff --git a/mix_video/src/mixvideoframe.cpp b/mix_video/src/mixvideoframe.cpp index 82f774b..a5be31a 100644 --- a/mix_video/src/mixvideoframe.cpp +++ b/mix_video/src/mixvideoframe.cpp @@ -8,24 +8,24 @@ /** * SECTION:mixvideoframe - * @short_description: MI-X Video Frame Object - * + * @short_description: MI-X Video Frame Object + * * - * The MixVideoFrame object will be created by - * MixVideo and provided to the MMF/App in the + * The MixVideoFrame object will be created by + * MixVideo and provided to the MMF/App in the * MixVideo mix_video_get_frame() function. * - * - * mix_video_release_frame() must be used - * to release frame object returned from - * mix_video_get_frame(). Caller must not - * use mix_videoframe_ref() or mix_videoframe_unref() - * or adjust the reference count directly in any way. - * This object can be supplied in the mix_video_render() - * function to render the associated video frame. - * The MMF/App can release this object when it no longer + * + * mix_video_release_frame() must be used + * to release frame object returned from + * mix_video_get_frame(). Caller must not + * use mix_videoframe_ref() or mix_videoframe_unref() + * or adjust the reference count directly in any way. + * This object can be supplied in the mix_video_render() + * function to render the associated video frame. + * The MMF/App can release this object when it no longer * needs to display/re-display this frame. - * + * */ @@ -38,324 +38,326 @@ #include "mixsurfacepool.h" #include "mixvideoframe.h" -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } +#define SAFE_FREE(p) if(p) { free(p); p = NULL; } #define MIX_VIDEOFRAME_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ - + MixVideoFrame::MixVideoFrame() - :frame_id(VA_INVALID_SURFACE) - ,timestamp(0) - ,discontinuity(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,pool(NULL) - ,is_skipped(FALSE) - ,real_frame(NULL) - ,sync_flag(FALSE) - ,frame_structure(VA_FRAME_PICTURE) - ,va_display(NULL){ - g_static_rec_mutex_init (&lock); + :frame_id(VA_INVALID_SURFACE) + ,timestamp(0) + ,discontinuity(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,pool(NULL) + ,is_skipped(FALSE) + ,real_frame(NULL) + ,sync_flag(FALSE) + ,frame_structure(VA_FRAME_PICTURE) + ,va_display(NULL) { +// g_static_rec_mutex_init (&lock); } MixVideoFrame::~MixVideoFrame() { - g_static_rec_mutex_free (&lock); -} - -gboolean MixVideoFrame::copy(MixParams *target) const { - gboolean ret = FALSE; - MixVideoFrame * this_target = MIX_VIDEOFRAME(target); - if (NULL != this_target) { - this_target->frame_id = this->frame_id; - this_target->timestamp = this->timestamp; - this_target->discontinuity = this->discontinuity; - // chain up base class - ret = MixParams::copy(target); - } - return ret; -} - -gboolean MixVideoFrame::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoFrame * this_obj = MIX_VIDEOFRAME(obj); - if (NULL != this_obj) { - /* TODO: add comparison for other properties */ - if (this->frame_id == this_obj->frame_id && - this->timestamp == this_obj->timestamp && - this->discontinuity == this_obj->discontinuity) { - ret = MixParams::equal(this_obj); - } - } - return ret; +// g_static_rec_mutex_free (&lock); +} + +bool MixVideoFrame::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoFrame * this_target = MIX_VIDEOFRAME(target); + if (NULL != this_target) { + this_target->frame_id = this->frame_id; + this_target->timestamp = this->timestamp; + this_target->discontinuity = this->discontinuity; + // chain up base class + ret = MixParams::copy(target); + } + return ret; +} + +bool MixVideoFrame::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoFrame * this_obj = MIX_VIDEOFRAME(obj); + if (NULL != this_obj) { + /* TODO: add comparison for other properties */ + if (this->frame_id == this_obj->frame_id && + this->timestamp == this_obj->timestamp && + this->discontinuity == this_obj->discontinuity) { + ret = MixParams::equal(this_obj); + } + } + return ret; } MixParams* MixVideoFrame::dup() const { - MixParams *ret = new MixVideoFrame(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoFrame(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } void MixVideoFrame::Lock() { - g_static_rec_mutex_lock(&lock); +// g_static_rec_mutex_lock(&lock); + mLock.lock(); } void MixVideoFrame::Unlock() { - g_static_rec_mutex_unlock (&lock); +// g_static_rec_mutex_unlock (&lock); + mLock.unlock(); } MixVideoFrame * mix_videoframe_new(void) { - return new MixVideoFrame(); + return new MixVideoFrame(); } MixVideoFrame * mix_videoframe_ref(MixVideoFrame * obj) { - if (NULL != obj) { - obj->Lock(); - LOG_I("obj %x, new refcount is %d\n", (guint) obj, - obj->GetRefCount() + 1); - obj->Ref(); - obj->Unlock(); - } - return obj; + if (NULL != obj) { + obj->Lock(); + LOG_I("obj %x, new refcount is %d\n", (uint) obj, + obj->GetRefCount() + 1); + obj->Ref(); + obj->Unlock(); + } + return obj; } void mix_videoframe_unref(MixVideoFrame * obj) { - if(NULL == obj) { - LOG_E("obj is NULL\n"); - return; - } - - obj->Lock(); - LOG_I("obj %x, frame id %d, new refcount is %d\n", (guint) obj, - (guint) obj->frame_id, obj->GetRefCount() - 1); - - // Check if we have reduced to 1, in which case we add ourselves to free pool - // but only do this for real frames, not skipped frames - if (((obj->GetRefCount() - 1) == 1) && (!(obj->is_skipped))) { - LOG_I("Adding obj %x, frame id %d back to pool\n", (guint) obj, - (guint) obj->frame_id); - MixSurfacePool *pool = obj->pool; - if(pool == NULL) { - LOG_E("pool is NULL\n"); - obj->Unlock(); - return; - } - mix_videoframe_reset(obj); - mix_surfacepool_put(pool, obj); - } - - //If this is a skipped frame that is being deleted, release the real frame - if (((obj->GetRefCount() - 1) == 0) && (obj->is_skipped)) { - LOG_I("skipped frame obj %x, releasing real frame %x \n", - (guint) obj, (guint) obj->real_frame); - mix_videoframe_unref(obj->real_frame); - } - - // Unref through base class - obj->Unref(); - obj->Unlock(); + if (NULL == obj) { + LOG_E("obj is NULL\n"); + return; + } + + obj->Lock(); + LOG_I("obj %x, frame id %d, new refcount is %d\n", (uint) obj, + (uint) obj->frame_id, obj->GetRefCount() - 1); + + // Check if we have reduced to 1, in which case we add ourselves to free pool + // but only do this for real frames, not skipped frames + if (((obj->GetRefCount() - 1) == 1) && (!(obj->is_skipped))) { + LOG_I("Adding obj %x, frame id %d back to pool\n", (uint) obj, + (uint) obj->frame_id); + MixSurfacePool *pool = obj->pool; + if (pool == NULL) { + LOG_E("pool is NULL\n"); + obj->Unlock(); + return; + } + mix_videoframe_reset(obj); + mix_surfacepool_put(pool, obj); + } + + //If this is a skipped frame that is being deleted, release the real frame + if (((obj->GetRefCount() - 1) == 0) && (obj->is_skipped)) { + LOG_I("skipped frame obj %x, releasing real frame %x \n", + (uint) obj, (uint) obj->real_frame); + mix_videoframe_unref(obj->real_frame); + } + + // Unref through base class + obj->Unref(); + obj->Unlock(); } /* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ MIX_RESULT mix_videoframe_set_frame_id( - MixVideoFrame * obj, gulong frame_id) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->frame_id = frame_id; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, ulong frame_id) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->frame_id = frame_id; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_frame_id( - MixVideoFrame * obj, gulong * frame_id) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_id); - *frame_id = obj->frame_id; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, ulong * frame_id) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_id); + *frame_id = obj->frame_id; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_ci_frame_idx ( - MixVideoFrame * obj, guint ci_frame_idx) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->ci_frame_idx = ci_frame_idx; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, uint ci_frame_idx) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->ci_frame_idx = ci_frame_idx; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_ci_frame_idx ( - MixVideoFrame * obj, guint * ci_frame_idx) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, ci_frame_idx); - *ci_frame_idx = obj->ci_frame_idx; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, uint * ci_frame_idx) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, ci_frame_idx); + *ci_frame_idx = obj->ci_frame_idx; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_timestamp( - MixVideoFrame * obj, guint64 timestamp) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, uint64 timestamp) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->timestamp = timestamp; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_timestamp( - MixVideoFrame * obj, guint64 * timestamp) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, uint64 * timestamp) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, timestamp); + *timestamp = obj->timestamp; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_discontinuity( - MixVideoFrame * obj, gboolean discontinuity) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, bool discontinuity) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->discontinuity = discontinuity; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_discontinuity( - MixVideoFrame * obj, gboolean * discontinuity) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, bool * discontinuity) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, discontinuity); + *discontinuity = obj->discontinuity; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_frame_structure( - MixVideoFrame * obj, guint32 frame_structure) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->frame_structure = frame_structure; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, uint32 frame_structure) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->frame_structure = frame_structure; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_frame_structure( - MixVideoFrame * obj, guint32* frame_structure) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure); - *frame_structure = obj->frame_structure; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, uint32* frame_structure) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure); + *frame_structure = obj->frame_structure; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_pool( - MixVideoFrame * obj, MixSurfacePool * pool) { - /* set pool pointer in private structure */ - obj->pool = pool; - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, MixSurfacePool * pool) { + /* set pool pointer in private structure */ + obj->pool = pool; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_frame_type( - MixVideoFrame *obj, MixFrameType frame_type) { - obj->frame_type = frame_type; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, MixFrameType frame_type) { + obj->frame_type = frame_type; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_frame_type( - MixVideoFrame *obj, MixFrameType *frame_type) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, frame_type); - *frame_type = obj->frame_type; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, MixFrameType *frame_type) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, frame_type); + *frame_type = obj->frame_type; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_is_skipped( - MixVideoFrame *obj, gboolean is_skipped) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->is_skipped = is_skipped; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, bool is_skipped) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->is_skipped = is_skipped; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_is_skipped( - MixVideoFrame *obj, gboolean *is_skipped) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, is_skipped); - *is_skipped = obj->is_skipped; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, bool *is_skipped) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, is_skipped); + *is_skipped = obj->is_skipped; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_real_frame( - MixVideoFrame *obj, MixVideoFrame *real) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->real_frame = real; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, MixVideoFrame *real) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->real_frame = real; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_real_frame( - MixVideoFrame *obj, MixVideoFrame **real) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, real); - *real = obj->real_frame; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, MixVideoFrame **real) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, real); + *real = obj->real_frame; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_reset(MixVideoFrame *obj) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->timestamp = 0; - obj->discontinuity = FALSE; - obj->is_skipped = FALSE; - obj->real_frame = NULL; - obj->sync_flag = FALSE; - obj->frame_structure = VA_FRAME_PICTURE; - return MIX_RESULT_SUCCESS; + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->timestamp = 0; + obj->discontinuity = FALSE; + obj->is_skipped = FALSE; + obj->real_frame = NULL; + obj->sync_flag = FALSE; + obj->frame_structure = VA_FRAME_PICTURE; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_sync_flag( - MixVideoFrame *obj, gboolean sync_flag) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->sync_flag = sync_flag; - if (obj->real_frame && obj->real_frame != obj) { - mix_videoframe_set_sync_flag(obj->real_frame, sync_flag); - } - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, bool sync_flag) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->sync_flag = sync_flag; + if (obj->real_frame && obj->real_frame != obj) { + mix_videoframe_set_sync_flag(obj->real_frame, sync_flag); + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_sync_flag( - MixVideoFrame *obj, gboolean *sync_flag) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, sync_flag); - if (obj->real_frame && obj->real_frame != obj) { - return mix_videoframe_get_sync_flag(obj->real_frame, sync_flag); - } else { - *sync_flag = obj -> sync_flag; - } - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, bool *sync_flag) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, sync_flag); + if (obj->real_frame && obj->real_frame != obj) { + return mix_videoframe_get_sync_flag(obj->real_frame, sync_flag); + } else { + *sync_flag = obj -> sync_flag; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_vadisplay( - MixVideoFrame * obj, void *va_display) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->va_display = va_display; - if (obj->real_frame && obj->real_frame != obj) { - mix_videoframe_set_vadisplay(obj->real_frame, va_display); - } - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, void *va_display) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->va_display = va_display; + if (obj->real_frame && obj->real_frame != obj) { + mix_videoframe_set_vadisplay(obj->real_frame, va_display); + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_vadisplay( - MixVideoFrame * obj, void **va_display) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, va_display); - if (obj->real_frame && obj->real_frame != obj) { - return mix_videoframe_get_vadisplay(obj->real_frame, va_display); - } else { - *va_display = obj->va_display; - } - return MIX_RESULT_SUCCESS; + MixVideoFrame * obj, void **va_display) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, va_display); + if (obj->real_frame && obj->real_frame != obj) { + return mix_videoframe_get_vadisplay(obj->real_frame, va_display); + } else { + *va_display = obj->va_display; + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_set_displayorder( - MixVideoFrame *obj, guint32 displayorder) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->displayorder = displayorder; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, uint32 displayorder) { + MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); + obj->displayorder = displayorder; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videoframe_get_displayorder( - MixVideoFrame *obj, guint32 *displayorder) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder); - *displayorder = obj->displayorder; - return MIX_RESULT_SUCCESS; + MixVideoFrame *obj, uint32 *displayorder) { + MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder); + *displayorder = obj->displayorder; + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h index 9c5c084..fd58bbd 100644 --- a/mix_video/src/mixvideoframe.h +++ b/mix_video/src/mixvideoframe.h @@ -11,6 +11,7 @@ #include #include "mixvideodef.h" +#include "mixvideothread.h" class MixSurfacePool; @@ -31,10 +32,10 @@ class MixSurfacePool; typedef enum _MixFrameType { - TYPE_I, - TYPE_P, - TYPE_B, - TYPE_INVALID + TYPE_I, + TYPE_P, + TYPE_B, + TYPE_INVALID } MixFrameType; /** @@ -44,56 +45,57 @@ typedef enum _MixFrameType { */ class MixVideoFrame : public MixParams { public: - MixVideoFrame(); - ~MixVideoFrame(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; - virtual MixParams* dup() const; - void Lock(); - void Unlock(); + MixVideoFrame(); + ~MixVideoFrame(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; + virtual MixParams* dup() const; + void Lock(); + void Unlock(); public: - /* ID associated with the decoded frame */ - gulong frame_id; - - /* ID associated with the CI frame - * (used for encode only) */ - guint ci_frame_idx; - - /* 64 bit timestamp. For decode, - * this is preserved from the corresponding - * MixVideoDecodeParams field. For encode, - * this is created during encoding. */ - guint64 timestamp; - - /* Flag indicating whether there - * is a discontinuity. For decode, - * this is preserved from the corresponding - * MixVideoDecodeParams field. */ - gboolean discontinuity; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; + /* ID associated with the decoded frame */ + ulong frame_id; + + /* ID associated with the CI frame + * (used for encode only) */ + uint ci_frame_idx; + + /* 64 bit timestamp. For decode, + * this is preserved from the corresponding + * MixVideoDecodeParams field. For encode, + * this is created during encoding. */ + uint64 timestamp; + + /* Flag indicating whether there + * is a discontinuity. For decode, + * this is preserved from the corresponding + * MixVideoDecodeParams field. */ + bool discontinuity; + + /* Reserved for future use */ + void *reserved1; + + /* Reserved for future use */ + void *reserved2; + + /* Reserved for future use */ + void *reserved3; + + /* Reserved for future use */ + void *reserved4; public: - // from structure MixVideoFramePrivate - MixSurfacePool *pool; - MixFrameType frame_type; - gboolean is_skipped; - MixVideoFrame *real_frame; - GStaticRecMutex lock; - gboolean sync_flag; - guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field - void *va_display; - guint32 displayorder; + // from structure MixVideoFramePrivate + MixSurfacePool *pool; + MixFrameType frame_type; + bool is_skipped; + MixVideoFrame *real_frame; +// GStaticRecMutex lock; + mutable MixVideoMutex mLock; + bool sync_flag; + uint32 frame_structure; // 0: frame, 1: top field, 2: bottom field + void *va_display; + uint32 displayorder; }; @@ -126,82 +128,82 @@ void mix_videoframe_unref(MixVideoFrame * obj); /** * mix_videoframe_set_frame_id: * @obj: #MixVideoFrame object - * @frame_id: ID associated with the decoded frame + * @frame_id: ID associated with the decoded frame * @returns: Common Video Error Return Codes * - * Set Frame ID + * Set Frame ID */ -MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, gulong frame_id); +MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, ulong frame_id); /** * mix_videoframe_get_frame_id: * @obj: #MixVideoFrame object - * @frame_id: frame ID to be returned + * @frame_id: frame ID to be returned * @returns: Common Video Error Return Codes * - * Get Frame ID + * Get Frame ID */ -MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, gulong * frame_id); +MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, ulong * frame_id); /** * mix_videoframe_set_ci_frame_idx: * @obj: #MixVideoFrame object - * @ci_frame_idx: ID associated with the CI frame (used for encode only) + * @ci_frame_idx: ID associated with the CI frame (used for encode only) * @returns: Common Video Error Return Codes * - * Set CI Frame ID + * Set CI Frame ID */ -MIX_RESULT mix_videoframe_set_ci_frame_idx(MixVideoFrame * obj, guint ci_frame_idx); +MIX_RESULT mix_videoframe_set_ci_frame_idx(MixVideoFrame * obj, uint ci_frame_idx); /** * mix_videoframe_get_ci_frame_idx: * @obj: #MixVideoFrame object - * @ci_frame_idx: CI Frame ID to be returned + * @ci_frame_idx: CI Frame ID to be returned * @returns: Common Video Error Return Codes * - * Get CI Frame ID + * Get CI Frame ID */ -MIX_RESULT mix_videoframe_get_ci_frame_idx(MixVideoFrame * obj, guint * ci_frame_idx); +MIX_RESULT mix_videoframe_get_ci_frame_idx(MixVideoFrame * obj, uint * ci_frame_idx); /** * mix_videoframe_set_timestamp: * @obj: #MixVideoFrame object - * @timestamp: Frame timestamp + * @timestamp: Frame timestamp * @returns: Common Video Error Return Codes * - * Set Frame timestamp + * Set Frame timestamp */ -MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, guint64 timestamp); +MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, uint64 timestamp); /** * mix_videoframe_get_timestamp: * @obj: #MixVideoFrame object - * @timestamp: Frame timestamp to be returned + * @timestamp: Frame timestamp to be returned * @returns: Common Video Error Return Codes * - * Get Frame timestamp + * Get Frame timestamp */ -MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, guint64 * timestamp); +MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, uint64 * timestamp); /** * mix_videoframe_set_discontinuity: * @obj: #MixVideoFrame object - * @discontinuity: Discontinuity flag + * @discontinuity: Discontinuity flag * @returns: Common Video Error Return Codes * - * Get discontinuity flag + * Get discontinuity flag */ -MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, gboolean discontinuity); +MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, bool discontinuity); /** * mix_videoframe_get_discontinuity: * @obj: #MixVideoFrame object - * @discontinuity: Discontinuity flag to be returned + * @discontinuity: Discontinuity flag to be returned * @returns: Common Video Error Return Codes * - * Get discontinuity flag + * Get discontinuity flag */ -MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, gboolean * discontinuity); +MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, bool * discontinuity); /** * TODO: Add document the following 2 functions @@ -209,7 +211,7 @@ MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, gboolean * disc */ MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display); MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display); -MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure); +MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, uint32* frame_structure); // from private structure MixVideoFramePrivate /* Private functions */ @@ -223,10 +225,10 @@ MIX_RESULT mix_videoframe_get_frame_type (MixVideoFrame *obj, MixFrameType *frame_type); MIX_RESULT -mix_videoframe_set_is_skipped (MixVideoFrame *obj, gboolean is_skipped); +mix_videoframe_set_is_skipped (MixVideoFrame *obj, bool is_skipped); MIX_RESULT -mix_videoframe_get_is_skipped (MixVideoFrame *obj, gboolean *is_skipped); +mix_videoframe_get_is_skipped (MixVideoFrame *obj, bool *is_skipped); MIX_RESULT mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real); @@ -238,18 +240,18 @@ MIX_RESULT mix_videoframe_reset(MixVideoFrame *obj); MIX_RESULT -mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag); +mix_videoframe_set_sync_flag(MixVideoFrame *obj, bool sync_flag); MIX_RESULT -mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag); +mix_videoframe_get_sync_flag(MixVideoFrame *obj, bool *sync_flag); -MIX_RESULT -mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); +MIX_RESULT +mix_videoframe_set_frame_structure(MixVideoFrame * obj, uint32 frame_structure); MIX_RESULT -mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder); +mix_videoframe_set_displayorder(MixVideoFrame *obj, uint32 displayorder); MIX_RESULT -mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder); +mix_videoframe_get_displayorder(MixVideoFrame *obj, uint32 *displayorder); #endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h index 54d8a3e..96c22bd 100644 --- a/mix_video/src/mixvideoframe_private.h +++ b/mix_video/src/mixvideoframe_private.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. +Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -14,22 +14,22 @@ No license under any patent, copyright, trade secret or other intellectual prope typedef enum _MixFrameType { - TYPE_I, - TYPE_P, - TYPE_B, - TYPE_INVALID + TYPE_I, + TYPE_P, + TYPE_B, + TYPE_INVALID } MixFrameType; -class MixVideoFramePrivate +class MixVideoFramePrivate { public: - MixVideoFramePrivate() - :pool(NULL) - ,is_skipped(FALSE) - ,real_frame(NULL) - ,sync_flag(FALSE) - ,frame_structure(VA_FRAME_PICTURE) - ,va_display(NULL) + MixVideoFramePrivate() + :pool(NULL) + ,is_skipped(FALSE) + ,real_frame(NULL) + ,sync_flag(FALSE) + ,frame_structure(VA_FRAME_PICTURE) + ,va_display(NULL) {} public: /*< private > */ @@ -50,10 +50,10 @@ MIX_RESULT mix_videoframe_get_frame_type (MixVideoFrame *obj, MixFrameType *frame_type); MIX_RESULT -mix_videoframe_set_is_skipped (MixVideoFrame *obj, gboolean is_skipped); +mix_videoframe_set_is_skipped (MixVideoFrame *obj, bool is_skipped); MIX_RESULT -mix_videoframe_get_is_skipped (MixVideoFrame *obj, gboolean *is_skipped); +mix_videoframe_get_is_skipped (MixVideoFrame *obj, bool *is_skipped); MIX_RESULT mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real); @@ -65,19 +65,19 @@ MIX_RESULT mix_videoframe_reset(MixVideoFrame *obj); MIX_RESULT -mix_videoframe_set_sync_flag(MixVideoFrame *obj, gboolean sync_flag); +mix_videoframe_set_sync_flag(MixVideoFrame *obj, bool sync_flag); MIX_RESULT -mix_videoframe_get_sync_flag(MixVideoFrame *obj, gboolean *sync_flag); +mix_videoframe_get_sync_flag(MixVideoFrame *obj, bool *sync_flag); -MIX_RESULT -mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure); +MIX_RESULT +mix_videoframe_set_frame_structure(MixVideoFrame * obj, uint32 frame_structure); MIX_RESULT -mix_videoframe_set_displayorder(MixVideoFrame *obj, guint32 displayorder); +mix_videoframe_set_displayorder(MixVideoFrame *obj, uint32 displayorder); MIX_RESULT -mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder); +mix_videoframe_get_displayorder(MixVideoFrame *obj, uint32 *displayorder); #endif #endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideoinitparams.cpp b/mix_video/src/mixvideoinitparams.cpp index bcc282c..cba548a 100644 --- a/mix_video/src/mixvideoinitparams.cpp +++ b/mix_video/src/mixvideoinitparams.cpp @@ -10,117 +10,117 @@ * SECTION:mixvideoinitparams * @short_description: MI-X Video Initialization Parameters * - * The MixVideoInitParams object will be created by the MMF/App - * and provided in the mix_video_initialize() function. - * The get and set methods for the properties will be available for + * The MixVideoInitParams object will be created by the MMF/App + * and provided in the mix_video_initialize() function. + * The get and set methods for the properties will be available for * the caller to set and get information used at initialization time. */ #include "mixvideoinitparams.h" -#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; } +#define SAFE_FREE(p) if(p) { free(p); p = NULL; } #define MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ - + MixVideoInitParams::MixVideoInitParams() - :display(NULL) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :display(NULL) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoInitParams::~MixVideoInitParams() { - /* unref display */ - if (this->display) { - mix_display_unref(this->display); - this->display = NULL; - } + /* unref display */ + if (this->display) { + mix_display_unref(this->display); + this->display = NULL; + } } -gboolean MixVideoInitParams::copy(MixParams *target) const { - gboolean ret = FALSE; - MixVideoInitParams * this_target = MIX_VIDEOINITPARAMS(target); - if (NULL != this_target) { - /* duplicate display */ - this_target->display = mix_display_dup(this->display); - // chain up base class - ret = MixParams::copy(target); - } - return ret; +bool MixVideoInitParams::copy(MixParams *target) const { + bool ret = FALSE; + MixVideoInitParams * this_target = MIX_VIDEOINITPARAMS(target); + if (NULL != this_target) { + /* duplicate display */ + this_target->display = mix_display_dup(this->display); + // chain up base class + ret = MixParams::copy(target); + } + return ret; } -gboolean MixVideoInitParams::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoInitParams * this_obj = MIX_VIDEOINITPARAMS(obj); - if (NULL != this_obj) { - /* TODO: add comparison for other properties */ - if ((NULL == this->display && NULL == this_obj->display) || - mix_display_equal(this->display, this_obj->display)) { - ret = MixParams::equal(this_obj); - } - } - return ret; +bool MixVideoInitParams::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoInitParams * this_obj = MIX_VIDEOINITPARAMS(obj); + if (NULL != this_obj) { + /* TODO: add comparison for other properties */ + if ((NULL == this->display && NULL == this_obj->display) || + mix_display_equal(this->display, this_obj->display)) { + ret = MixParams::equal(this_obj); + } + } + return ret; } MixParams* MixVideoInitParams::dup() const { - MixParams *ret = new MixVideoInitParams(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; + MixParams *ret = new MixVideoInitParams(); + if (NULL != ret) { + if (FALSE == copy(ret)) { + ret->Unref(); + ret = NULL; + } + } + return ret; } MixVideoInitParams * mix_videoinitparams_new(void) { - return new MixVideoInitParams(); + return new MixVideoInitParams(); } MixVideoInitParams * mix_videoinitparams_ref(MixVideoInitParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } MIX_RESULT mix_videoinitparams_set_display( - MixVideoInitParams * obj, MixDisplay * display) { - MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT (obj); - if(obj->display) { - mix_display_unref(obj->display); - } - obj->display = NULL; - if(display) { - /* obj->display = mix_display_dup(display); - if(!obj->display) { - return MIX_RESULT_NO_MEMORY; - }*/ - - obj->display = mix_display_ref(display); - } - return MIX_RESULT_SUCCESS; + MixVideoInitParams * obj, MixDisplay * display) { + MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT (obj); + if (obj->display) { + mix_display_unref(obj->display); + } + obj->display = NULL; + if (display) { + /* obj->display = mix_display_dup(display); + if(!obj->display) { + return MIX_RESULT_NO_MEMORY; + }*/ + + obj->display = mix_display_ref(display); + } + return MIX_RESULT_SUCCESS; } /* Caller is responsible to use g_free to free the memory */ MIX_RESULT mix_videoinitparams_get_display( - MixVideoInitParams * obj, MixDisplay ** display) { - MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT (obj, display); - *display = NULL; - if(obj->display) { - /* *display = mix_display_dup(obj->display); - if(!*display) { - return MIX_RESULT_NO_MEMORY; - }*/ - *display = mix_display_ref(obj->display); - } - return MIX_RESULT_SUCCESS; + MixVideoInitParams * obj, MixDisplay ** display) { + MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT (obj, display); + *display = NULL; + if (obj->display) { + /* *display = mix_display_dup(obj->display); + if(!*display) { + return MIX_RESULT_NO_MEMORY; + }*/ + *display = mix_display_ref(obj->display); + } + return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h index 8d8dbec..000257c 100644 --- a/mix_video/src/mixvideoinitparams.h +++ b/mix_video/src/mixvideoinitparams.h @@ -1,6 +1,6 @@ -/* +/* INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. + Copyright 2009 Intel Corporation All Rights Reserved. The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. @@ -22,7 +22,7 @@ /** * MIX_IS_VIDEOINITPARAMS: * @obj: an object. - * + * * Checks if the given object is an instance of #MixParams */ #define MIX_IS_VIDEOINITPARAMS(obj) ((NULL != MIX_VIDEOINITPARAMS(obj)) ? TRUE : FALSE) @@ -34,35 +34,35 @@ */ class MixVideoInitParams : public MixParams { public: - MixVideoInitParams(); - ~MixVideoInitParams(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams* obj) const; - virtual MixParams* dup() const; + MixVideoInitParams(); + ~MixVideoInitParams(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams* obj) const; + virtual MixParams* dup() const; public: - /*< public > */ + /*< public > */ - /* Pointer to a MixDisplay object - * such as MixDisplayX11 */ - MixDisplay *display; + /* Pointer to a MixDisplay object + * such as MixDisplayX11 */ + MixDisplay *display; - /* Reserved for future use */ - void *reserved1; + /* Reserved for future use */ + void *reserved1; - /* Reserved for future use */ - void *reserved2; + /* Reserved for future use */ + void *reserved2; - /* Reserved for future use */ - void *reserved3; + /* Reserved for future use */ + void *reserved3; - /* Reserved for future use */ - void *reserved4; + /* Reserved for future use */ + void *reserved4; }; /** * mix_videoinitparams_new: * @returns: A newly allocated instance of #MixVideoInitParams - * + * * Use this method to create new instance of #MixVideoInitParams */ MixVideoInitParams *mix_videoinitparams_new (void); @@ -70,7 +70,7 @@ MixVideoInitParams *mix_videoinitparams_new (void); * mix_videoinitparams_ref: * @mix: object to add reference * @returns: the #MixVideoInitParams instance where reference count has been increased. - * + * * Add reference count. */ MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix); @@ -78,7 +78,7 @@ MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix); /** * mix_videoinitparams_unref: * @obj: object to unref. - * + * * Decrement reference count of the object. */ #define mix_videoinitparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) @@ -89,24 +89,24 @@ MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix); /** * mix_videoinitparams_set_display: * @obj: #MixVideoInitParams object - * @display: Pointer to a MixDisplay object such as MixDisplayX11 + * @display: Pointer to a MixDisplay object such as MixDisplayX11 * @returns: Common Video Error Return Codes * - * Set MixDisplay object + * Set MixDisplay object */ MIX_RESULT mix_videoinitparams_set_display ( - MixVideoInitParams * obj, MixDisplay * display); + MixVideoInitParams * obj, MixDisplay * display); /** * mix_videoinitparams_get_display: * @obj: #MixVideoInitParams object - * @dislay: Pointer to pointer of a MixDisplay object such as MixDisplayX11 + * @dislay: Pointer to pointer of a MixDisplay object such as MixDisplayX11 * @returns: Common Video Error Return Codes * - * Get MixDisplay object + * Get MixDisplay object */ MIX_RESULT mix_videoinitparams_get_display ( - MixVideoInitParams * obj, MixDisplay ** dislay); + MixVideoInitParams * obj, MixDisplay ** dislay); diff --git a/mix_video/src/mixvideorenderparams.cpp b/mix_video/src/mixvideorenderparams.cpp index 65e5183..063c32b 100644 --- a/mix_video/src/mixvideorenderparams.cpp +++ b/mix_video/src/mixvideorenderparams.cpp @@ -10,7 +10,7 @@ * SECTION:mixvideorenderparams * @short_description: MI-X Video Render Parameters * - * The #MixVideoRenderParams object will be created by the MMF/App + * The #MixVideoRenderParams object will be created by the MMF/App * and provided to #MixVideo in the #MixVideo mix_video_render() function. */ @@ -23,277 +23,279 @@ #define MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT(obj) \ if(!obj) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ - + #define MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT(obj, prop) \ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ - -gboolean mix_rect_equal(MixRect rc1, MixRect rc2); + +bool mix_rect_equal(MixRect rc1, MixRect rc2); MixVideoRenderParams::MixVideoRenderParams() - :display(NULL) - ,clipping_rects(NULL) - ,number_of_clipping_rects(0) - ,reserved(NULL) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,mVa_cliprects(NULL) { - /* initialize properties here */ - memset(&src_rect, 0, sizeof(MixRect)); - memset(&dst_rect, 0, sizeof(MixRect)); + :display(NULL) + ,clipping_rects(NULL) + ,number_of_clipping_rects(0) + ,reserved(NULL) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) + ,mVa_cliprects(NULL) { + /* initialize properties here */ + memset(&src_rect, 0, sizeof(MixRect)); + memset(&dst_rect, 0, sizeof(MixRect)); } MixVideoRenderParams::~MixVideoRenderParams() { - if (NULL != clipping_rects) { - g_free(clipping_rects); - clipping_rects = NULL; - } - if(NULL != mVa_cliprects) { - g_free(mVa_cliprects); - mVa_cliprects = NULL; - } - number_of_clipping_rects = 0; - if (NULL != display) { - mix_display_unref(display); - display = NULL; - } + if (NULL != clipping_rects) { + free(clipping_rects); + clipping_rects = NULL; + } + if (NULL != mVa_cliprects) { + free(mVa_cliprects); + mVa_cliprects = NULL; + } + number_of_clipping_rects = 0; + if (NULL != display) { + mix_display_unref(display); + display = NULL; + } } -gboolean MixVideoRenderParams::copy(MixParams *target) const { - if (NULL == target) return FALSE; - MixVideoRenderParams *this_target = MIX_VIDEORENDERPARAMS(target); - MIX_RESULT mix_result = MIX_RESULT_FAIL; - - if (this_target == this) { - return TRUE; - } - - if(NULL != this_target) { - mix_result = mix_videorenderparams_set_display(this_target, display); - if (MIX_RESULT_SUCCESS != mix_result) { - return FALSE; - } - - mix_result = mix_videorenderparams_set_clipping_rects(this_target, - clipping_rects, number_of_clipping_rects); - - if (MIX_RESULT_SUCCESS != mix_result) { - return FALSE; - } - - this_target->src_rect = src_rect; - this_target->dst_rect = dst_rect; - - } - return MixParams::copy(target); +bool MixVideoRenderParams::copy(MixParams *target) const { + if (NULL == target) return FALSE; + MixVideoRenderParams *this_target = MIX_VIDEORENDERPARAMS(target); + MIX_RESULT mix_result = MIX_RESULT_FAIL; + + if (this_target == this) { + return TRUE; + } + + if (NULL != this_target) { + mix_result = mix_videorenderparams_set_display(this_target, display); + if (MIX_RESULT_SUCCESS != mix_result) { + return FALSE; + } + + mix_result = mix_videorenderparams_set_clipping_rects(this_target, + clipping_rects, number_of_clipping_rects); + + if (MIX_RESULT_SUCCESS != mix_result) { + return FALSE; + } + + this_target->src_rect = src_rect; + this_target->dst_rect = dst_rect; + + } + return MixParams::copy(target); } -gboolean MixVideoRenderParams::equal(MixParams* obj) const { - gboolean ret = FALSE; - MixVideoRenderParams *this_obj = MIX_VIDEORENDERPARAMS(obj); - if (NULL != this_obj) { - // Deep compare - if (mix_display_equal(MIX_DISPLAY(display), MIX_DISPLAY( - this_obj->display)) && mix_rect_equal(src_rect, - this_obj->src_rect) && mix_rect_equal(dst_rect, - this_obj->dst_rect) && number_of_clipping_rects - == this_obj->number_of_clipping_rects && memcmp( - (guchar *) number_of_clipping_rects, - (guchar *) this_obj->number_of_clipping_rects, - number_of_clipping_rects) == 0) { - // members within this scope equal. chaining up. - ret = MixParams::equal(obj); - } - } - return ret; +bool MixVideoRenderParams::equal(MixParams* obj) const { + bool ret = FALSE; + MixVideoRenderParams *this_obj = MIX_VIDEORENDERPARAMS(obj); + if (NULL != this_obj) { + // Deep compare + if (mix_display_equal(MIX_DISPLAY(display), MIX_DISPLAY( + this_obj->display)) && mix_rect_equal(src_rect, + this_obj->src_rect) && mix_rect_equal(dst_rect, + this_obj->dst_rect) && number_of_clipping_rects + == this_obj->number_of_clipping_rects && memcmp( + (uchar *) number_of_clipping_rects, + (uchar *) this_obj->number_of_clipping_rects, + number_of_clipping_rects) == 0) { + // members within this scope equal. chaining up. + ret = MixParams::equal(obj); + } + } + return ret; } MixParams* MixVideoRenderParams::dup() const { - MixParams *ret = NULL; - MixVideoRenderParams *duplicate = mix_videorenderparams_new(); - if (copy(duplicate)) { - ret = duplicate; - } else { - mix_videorenderparams_unref(duplicate); - } - return ret; + MixParams *ret = NULL; + MixVideoRenderParams *duplicate = mix_videorenderparams_new(); + if (copy(duplicate)) { + ret = duplicate; + } else { + mix_videorenderparams_unref(duplicate); + } + return ret; } MIX_RESULT MixVideoRenderParams::set_clipping_rects( - MixRect* clipping_rects, - guint number_of_clipping_rects) { - - if (this->clipping_rects) { - g_free(this->clipping_rects); - this->clipping_rects = NULL; - this->number_of_clipping_rects = 0; - } - - if(this->mVa_cliprects) { - g_free(this->mVa_cliprects); - this->mVa_cliprects = NULL; - } - - if ((NULL == clipping_rects) && (0 != number_of_clipping_rects)) { - this->clipping_rects = reinterpret_cast(g_memdup(clipping_rects, - number_of_clipping_rects * sizeof(MixRect))); - if (NULL == this->clipping_rects) { - return MIX_RESULT_NO_MEMORY; - } - this->number_of_clipping_rects = number_of_clipping_rects; - - /* create VARectangle list */ - this->mVa_cliprects = reinterpret_cast(g_malloc(number_of_clipping_rects * sizeof(VARectangle))); - if (NULL == this->mVa_cliprects) { - return MIX_RESULT_NO_MEMORY; - } - - for (guint idx = 0; idx < number_of_clipping_rects; ++idx) { - this->mVa_cliprects[idx].x = clipping_rects[idx].x; - this->mVa_cliprects[idx].y = clipping_rects[idx].y; - this->mVa_cliprects[idx].width = clipping_rects[idx].width; - this->mVa_cliprects[idx].height = clipping_rects[idx].height; - } - } - - return MIX_RESULT_SUCCESS; + MixRect* clipping_rects, + uint number_of_clipping_rects) { + + if (this->clipping_rects) { + free(this->clipping_rects); + this->clipping_rects = NULL; + this->number_of_clipping_rects = 0; + } + + if (this->mVa_cliprects) { + free(this->mVa_cliprects); + this->mVa_cliprects = NULL; + } + + if ((NULL == clipping_rects) && (0 != number_of_clipping_rects)) { +// this->clipping_rects = reinterpret_cast(g_memdup(clipping_rects, number_of_clipping_rects * sizeof(MixRect))); + this->clipping_rects = (MixRect*)malloc(number_of_clipping_rects * sizeof(MixRect)); + if (NULL == this->clipping_rects) { + return MIX_RESULT_NO_MEMORY; + } + memcpy(this->clipping_rects, clipping_rects, number_of_clipping_rects * sizeof(MixRect)); + this->number_of_clipping_rects = number_of_clipping_rects; + + /* create VARectangle list */ + this->mVa_cliprects = reinterpret_cast(malloc(number_of_clipping_rects * sizeof(VARectangle))); + if (NULL == this->mVa_cliprects) { + return MIX_RESULT_NO_MEMORY; + } + + for (uint idx = 0; idx < number_of_clipping_rects; ++idx) { + this->mVa_cliprects[idx].x = clipping_rects[idx].x; + this->mVa_cliprects[idx].y = clipping_rects[idx].y; + this->mVa_cliprects[idx].width = clipping_rects[idx].width; + this->mVa_cliprects[idx].height = clipping_rects[idx].height; + } + } + + return MIX_RESULT_SUCCESS; } -MIX_RESULT MixVideoRenderParams::get_clipping_rects(MixRect ** clipping_rects, - guint* number_of_clipping_rects) { - if (NULL == clipping_rects || NULL == number_of_clipping_rects) - return MIX_RESULT_NULL_PTR; - - *clipping_rects = NULL; - *number_of_clipping_rects = 0; - - if ((NULL != this->clipping_rects) && (0 != this->number_of_clipping_rects)) { - *clipping_rects = reinterpret_cast(g_memdup(this->clipping_rects, - this->number_of_clipping_rects * sizeof(MixRect))); - if (NULL == *clipping_rects) { - return MIX_RESULT_NO_MEMORY; - } - *number_of_clipping_rects = this->number_of_clipping_rects; - } - return MIX_RESULT_SUCCESS; +MIX_RESULT MixVideoRenderParams::get_clipping_rects(MixRect ** clipping_rects, + uint* number_of_clipping_rects) { + if (NULL == clipping_rects || NULL == number_of_clipping_rects) + return MIX_RESULT_NULL_PTR; + + *clipping_rects = NULL; + *number_of_clipping_rects = 0; + + if ((NULL != this->clipping_rects) && (0 != this->number_of_clipping_rects)) { +// *clipping_rects = reinterpret_cast(g_memdup(this->clipping_rects, this->number_of_clipping_rects * sizeof(MixRect))); + *clipping_rects = (MixRect*)malloc(this->number_of_clipping_rects * sizeof(MixRect)); + if (NULL == *clipping_rects) { + return MIX_RESULT_NO_MEMORY; + } + memcpy(*clipping_rects, this->clipping_rects, this->number_of_clipping_rects * sizeof(MixRect)); + *number_of_clipping_rects = this->number_of_clipping_rects; + } + return MIX_RESULT_SUCCESS; } -MIX_RESULT MixVideoRenderParams::get_va_cliprects(VARectangle ** va_cliprects, - guint* number_of_cliprects) { - if (NULL == va_cliprects || NULL == number_of_cliprects) - return MIX_RESULT_NULL_PTR; +MIX_RESULT MixVideoRenderParams::get_va_cliprects(VARectangle ** va_cliprects, + uint* number_of_cliprects) { + if (NULL == va_cliprects || NULL == number_of_cliprects) + return MIX_RESULT_NULL_PTR; - *va_cliprects = NULL; - *number_of_cliprects = 0; + *va_cliprects = NULL; + *number_of_cliprects = 0; - if ((NULL != mVa_cliprects) && (0 != number_of_clipping_rects)) { - *va_cliprects = mVa_cliprects; - *number_of_cliprects = number_of_clipping_rects; - } - return MIX_RESULT_SUCCESS; + if ((NULL != mVa_cliprects) && (0 != number_of_clipping_rects)) { + *va_cliprects = mVa_cliprects; + *number_of_cliprects = number_of_clipping_rects; + } + return MIX_RESULT_SUCCESS; } MixVideoRenderParams * mix_videorenderparams_new(void) { - return new MixVideoRenderParams(); + return new MixVideoRenderParams(); } MixVideoRenderParams * mix_videorenderparams_ref(MixVideoRenderParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; + if (NULL != mix) + mix->Ref(); + return mix; } -gboolean mix_rect_equal(MixRect rc1, MixRect rc2) { - if (rc1.x == rc2.x && rc1.y == rc2.y && rc1.width == rc2.width - && rc1.height == rc2.height) { - return TRUE; - } - return FALSE; +bool mix_rect_equal(MixRect rc1, MixRect rc2) { + if (rc1.x == rc2.x && rc1.y == rc2.y && rc1.width == rc2.width + && rc1.height == rc2.height) { + return TRUE; + } + return FALSE; } /* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ MIX_RESULT mix_videorenderparams_set_display( - MixVideoRenderParams * obj, MixDisplay * display) { - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - if (obj->display) { - mix_display_unref(obj->display); - obj->display = NULL; - } - /* dup */ - if (display) { - obj->display = mix_display_ref(display); - } - return MIX_RESULT_SUCCESS; + MixVideoRenderParams * obj, MixDisplay * display) { + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + if (obj->display) { + mix_display_unref(obj->display); + obj->display = NULL; + } + /* dup */ + if (display) { + obj->display = mix_display_ref(display); + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videorenderparams_get_display( - MixVideoRenderParams * obj, MixDisplay ** display) { - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, display); - /* dup? */ - if (obj->display) { - *display = mix_display_ref(obj->display); - } - return MIX_RESULT_SUCCESS; + MixVideoRenderParams * obj, MixDisplay ** display) { + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, display); + /* dup? */ + if (obj->display) { + *display = mix_display_ref(obj->display); + } + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videorenderparams_set_src_rect( - MixVideoRenderParams * obj, MixRect src_rect) { - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - obj->src_rect = src_rect; - return MIX_RESULT_SUCCESS; + MixVideoRenderParams * obj, MixRect src_rect) { + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + obj->src_rect = src_rect; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videorenderparams_get_src_rect( - MixVideoRenderParams * obj, MixRect * src_rect) { - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, src_rect); - *src_rect = obj->src_rect; - return MIX_RESULT_SUCCESS; + MixVideoRenderParams * obj, MixRect * src_rect) { + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, src_rect); + *src_rect = obj->src_rect; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videorenderparams_set_dest_rect( - MixVideoRenderParams * obj, MixRect dst_rect) { - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - obj->dst_rect = dst_rect; - return MIX_RESULT_SUCCESS; + MixVideoRenderParams * obj, MixRect dst_rect) { + MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); + obj->dst_rect = dst_rect; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videorenderparams_get_dest_rect( - MixVideoRenderParams * obj, MixRect * dst_rect) { - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, dst_rect); - *dst_rect = obj->dst_rect; - return MIX_RESULT_SUCCESS; + MixVideoRenderParams * obj, MixRect * dst_rect) { + MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, dst_rect); + *dst_rect = obj->dst_rect; + return MIX_RESULT_SUCCESS; } MIX_RESULT mix_videorenderparams_set_clipping_rects( - MixVideoRenderParams * obj, MixRect* clipping_rects, - guint number_of_clipping_rects) { - if (NULL == obj) - return MIX_RESULT_NULL_PTR; - return obj->set_clipping_rects(clipping_rects, number_of_clipping_rects); + MixVideoRenderParams * obj, MixRect* clipping_rects, + uint number_of_clipping_rects) { + if (NULL == obj) + return MIX_RESULT_NULL_PTR; + return obj->set_clipping_rects(clipping_rects, number_of_clipping_rects); } MIX_RESULT mix_videorenderparams_get_clipping_rects( - MixVideoRenderParams * obj, MixRect ** clipping_rects, - guint* number_of_clipping_rects) { - if (NULL == obj) - return MIX_RESULT_NULL_PTR; - return obj->get_clipping_rects(clipping_rects, number_of_clipping_rects); + MixVideoRenderParams * obj, MixRect ** clipping_rects, + uint* number_of_clipping_rects) { + if (NULL == obj) + return MIX_RESULT_NULL_PTR; + return obj->get_clipping_rects(clipping_rects, number_of_clipping_rects); } /* The mixvideo internal method */ MIX_RESULT mix_videorenderparams_get_cliprects_internal( - MixVideoRenderParams * obj, VARectangle ** va_cliprects, - guint* number_of_cliprects) { - if (NULL == obj) - return MIX_RESULT_NULL_PTR; - return obj->get_va_cliprects(va_cliprects, number_of_cliprects);; + MixVideoRenderParams * obj, VARectangle ** va_cliprects, + uint* number_of_cliprects) { + if (NULL == obj) + return MIX_RESULT_NULL_PTR; + return obj->get_va_cliprects(va_cliprects, number_of_cliprects);; } /* TODO: implement properties' setters and getters */ diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h index f841cb6..39d2683 100644 --- a/mix_video/src/mixvideorenderparams.h +++ b/mix_video/src/mixvideorenderparams.h @@ -37,62 +37,62 @@ */ class MixVideoRenderParams : public MixParams { public: - MixVideoRenderParams(); - virtual ~MixVideoRenderParams(); - virtual gboolean copy(MixParams *target) const; - virtual gboolean equal(MixParams*) const; - virtual MixParams* dup() const; - - MIX_RESULT set_clipping_rects(MixRect* clipping_rects, - guint number_of_clipping_rects); - MIX_RESULT get_clipping_rects(MixRect ** clipping_rects, - guint* number_of_clipping_rects); - MIX_RESULT get_va_cliprects(VARectangle ** va_cliprects, - guint* number_of_cliprects); + MixVideoRenderParams(); + virtual ~MixVideoRenderParams(); + virtual bool copy(MixParams *target) const; + virtual bool equal(MixParams*) const; + virtual MixParams* dup() const; + + MIX_RESULT set_clipping_rects(MixRect* clipping_rects, + uint number_of_clipping_rects); + MIX_RESULT get_clipping_rects(MixRect ** clipping_rects, + uint* number_of_clipping_rects); + MIX_RESULT get_va_cliprects(VARectangle ** va_cliprects, + uint* number_of_cliprects); public: - /*< public > */ - /* Pointer to a MixDisplay object - * such as MixDisplayX11 */ - MixDisplay *display; - - /* MixRect object to define offset, - * height and width of source image */ - MixRect src_rect; - - /* MixRect object to define offset, - * height and width of the display - * destination */ - MixRect dst_rect; - - /* Array of clipping rectangles - * to be applied */ - MixRect *clipping_rects; - - /* Number of clipping rectangles - * in clipping_rects */ - guint number_of_clipping_rects; - - /* Post processing parameters */ - guint post_proc; - - /* Reserved */ - gpointer reserved; - - /* Reserved for future use */ - gpointer reserved1; - - /* Reserved for future use */ - gpointer reserved2; - - /* Reserved for future use */ - gpointer reserved3; - - /* Reserved for future use */ - gpointer reserved4; + /*< public > */ + /* Pointer to a MixDisplay object + * such as MixDisplayX11 */ + MixDisplay *display; + + /* MixRect object to define offset, + * height and width of source image */ + MixRect src_rect; + + /* MixRect object to define offset, + * height and width of the display + * destination */ + MixRect dst_rect; + + /* Array of clipping rectangles + * to be applied */ + MixRect *clipping_rects; + + /* Number of clipping rectangles + * in clipping_rects */ + uint number_of_clipping_rects; + + /* Post processing parameters */ + uint post_proc; + + /* Reserved */ + void* reserved; + + /* Reserved for future use */ + void* reserved1; + + /* Reserved for future use */ + void* reserved2; + + /* Reserved for future use */ + void* reserved3; + + /* Reserved for future use */ + void* reserved4; private: - VARectangle *mVa_cliprects; + VARectangle *mVa_cliprects; }; @@ -128,96 +128,96 @@ MixVideoRenderParams *mix_videorenderparams_ref(MixVideoRenderParams * mix); /** * mix_videorenderparams_set_display: * @obj: #MixVideoRenderParams object - * @display: #MixDisplay object + * @display: #MixDisplay object * @returns: Common Video Error Return Codes * - * Set MixDisplay Object + * Set MixDisplay Object */ MIX_RESULT mix_videorenderparams_set_display( - MixVideoRenderParams * obj, MixDisplay * display); + MixVideoRenderParams * obj, MixDisplay * display); /** * mix_videorenderparams_get_display: * @obj: #MixVideoRenderParams object - * @display: pointer to #MixDisplay object + * @display: pointer to #MixDisplay object * @returns: Common Video Error Return Codes * - * Get MixDisplay Object + * Get MixDisplay Object */ MIX_RESULT mix_videorenderparams_get_display( - MixVideoRenderParams * obj, MixDisplay ** display); + MixVideoRenderParams * obj, MixDisplay ** display); /** * mix_videorenderparams_set_src_rect: * @obj: #MixVideoRenderParams object - * @src_rect: MixRect object to define offset, height and width of source image + * @src_rect: MixRect object to define offset, height and width of source image * @returns: Common Video Error Return Codes * - * Set source rectangle + * Set source rectangle */ MIX_RESULT mix_videorenderparams_set_src_rect( - MixVideoRenderParams * obj, MixRect src_rect); + MixVideoRenderParams * obj, MixRect src_rect); /** * mix_videorenderparams_get_src_rect: * @obj: #MixVideoRenderParams object - * @src_rect: Source rectangle to be returned + * @src_rect: Source rectangle to be returned * @returns: Common Video Error Return Codes * - * Get source rectangle + * Get source rectangle */ MIX_RESULT mix_videorenderparams_get_src_rect( - MixVideoRenderParams * obj, MixRect * src_rect); + MixVideoRenderParams * obj, MixRect * src_rect); /** * mix_videorenderparams_set_dest_rect: * @obj: #MixVideoRenderParams object - * @dst_rect: MixRect object to define offset, height and width of the display destination + * @dst_rect: MixRect object to define offset, height and width of the display destination * @returns: Common Video Error Return Codes * - * Set destination rectangle + * Set destination rectangle */ MIX_RESULT mix_videorenderparams_set_dest_rect( - MixVideoRenderParams * obj, MixRect dst_rect); + MixVideoRenderParams * obj, MixRect dst_rect); /** * mix_videorenderparams_set_dest_rect: * @obj: #MixVideoRenderParams object - * @dst_rect: MixRect object to define offset, height and width of the display destination + * @dst_rect: MixRect object to define offset, height and width of the display destination * @returns: Common Video Error Return Codes * - * Get destination rectangle + * Get destination rectangle */ MIX_RESULT mix_videorenderparams_get_dest_rect( - MixVideoRenderParams * obj, MixRect * dst_rect); + MixVideoRenderParams * obj, MixRect * dst_rect); /** * mix_videorenderparams_set_clipping_rects: * @obj: #MixVideoRenderParams object - * @clipping_rects: Array of clipping rectangles to be applied - * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects + * @clipping_rects: Array of clipping rectangles to be applied + * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects * @returns: Common Video Error Return Codes * - * Set clipping rectangles + * Set clipping rectangles */ MIX_RESULT mix_videorenderparams_set_clipping_rects( - MixVideoRenderParams * obj, MixRect* clipping_rects, guint number_of_clipping_rects); + MixVideoRenderParams * obj, MixRect* clipping_rects, uint number_of_clipping_rects); /** * mix_videorenderparams_get_clipping_rects: * @obj: #MixVideoRenderParams object - * @clipping_rects: Array of clipping rectangles returned - * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects returned + * @clipping_rects: Array of clipping rectangles returned + * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects returned * @returns: Common Video Error Return Codes * * Get clipping rectangles - * - * + * + * * DO NOT free clipping_rects! - * + * */ MIX_RESULT mix_videorenderparams_get_clipping_rects( - MixVideoRenderParams * obj, MixRect ** clipping_rects, guint* number_of_clipping_rects); + MixVideoRenderParams * obj, MixRect ** clipping_rects, uint* number_of_clipping_rects); /* TODO: Add getters and setters for other properties */ diff --git a/mix_video/src/mixvideorenderparams_internal.h b/mix_video/src/mixvideorenderparams_internal.h index 14215a0..b1b3620 100644 --- a/mix_video/src/mixvideorenderparams_internal.h +++ b/mix_video/src/mixvideorenderparams_internal.h @@ -13,9 +13,9 @@ /* Internal function */ MIX_RESULT mix_videorenderparams_get_cliprects_internal( - MixVideoRenderParams * obj, - VARectangle ** va_cliprects, - guint* number_of_cliprects); + MixVideoRenderParams * obj, + VARectangle ** va_cliprects, + uint* number_of_cliprects); diff --git a/mix_video/src/mixvideothread.cpp b/mix_video/src/mixvideothread.cpp index b9a92e0..6ee1524 100644 --- a/mix_video/src/mixvideothread.cpp +++ b/mix_video/src/mixvideothread.cpp @@ -10,41 +10,41 @@ * SECTION:mixvideoinitparams * @short_description: MI-X Video Initialization Parameters * - * The MixVideoInitParams object will be created by the MMF/App - * and provided in the mix_video_initialize() function. - * The get and set methods for the properties will be available for + * The MixVideoInitParams object will be created by the MMF/App + * and provided in the mix_video_initialize() function. + * The get and set methods for the properties will be available for * the caller to set and get information used at initialization time. */ #include "mixvideothread.h" MixVideoMutex::MixVideoMutex() { - pthread_mutex_init(&mMutex, NULL); + pthread_mutex_init(&mMutex, NULL); } MixVideoMutex::MixVideoMutex(const char* name) { - pthread_mutex_init(&mMutex, NULL); + pthread_mutex_init(&mMutex, NULL); } MixVideoMutex::MixVideoMutex(int type, const char* name) { - if (type == SHARED) { - pthread_mutexattr_t attr; - pthread_mutexattr_init(&attr); - pthread_mutexattr_setpshared(&attr, PTHREAD_PROCESS_SHARED); - pthread_mutex_init(&mMutex, &attr); - pthread_mutexattr_destroy(&attr); - } else { - pthread_mutex_init(&mMutex, NULL); - } + if (type == SHARED) { + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_setpshared(&attr, PTHREAD_PROCESS_SHARED); + pthread_mutex_init(&mMutex, &attr); + pthread_mutexattr_destroy(&attr); + } else { + pthread_mutex_init(&mMutex, NULL); + } } MixVideoMutex::~MixVideoMutex() { - pthread_mutex_destroy(&mMutex); + pthread_mutex_destroy(&mMutex); } int MixVideoMutex::lock() { - return -pthread_mutex_lock(&mMutex); + return -pthread_mutex_lock(&mMutex); } void MixVideoMutex::unlock() { - pthread_mutex_unlock(&mMutex); + pthread_mutex_unlock(&mMutex); } int MixVideoMutex::tryLock() { - return -pthread_mutex_trylock(&mMutex); + return -pthread_mutex_trylock(&mMutex); } diff --git a/mix_video/src/mixvideothread.h b/mix_video/src/mixvideothread.h index 5ad36a6..feb9406 100644 --- a/mix_video/src/mixvideothread.h +++ b/mix_video/src/mixvideothread.h @@ -14,28 +14,28 @@ class MixVideoMutex { public: - enum { - PRIVATE = 0, - SHARED = 1 - }; - - MixVideoMutex(); - MixVideoMutex(const char* name); - MixVideoMutex(int type, const char* name = NULL); - ~MixVideoMutex(); - - // lock or unlock the mutex - int lock(); - void unlock(); - // lock if possible; returns 0 on success, error otherwise - int tryLock(); + enum { + PRIVATE = 0, + SHARED = 1 + }; + + MixVideoMutex(); + MixVideoMutex(const char* name); + MixVideoMutex(int type, const char* name = NULL); + ~MixVideoMutex(); + + // lock or unlock the mutex + int lock(); + void unlock(); + // lock if possible; returns 0 on success, error otherwise + int tryLock(); private: - // A mutex cannot be copied - MixVideoMutex(const MixVideoMutex&); - MixVideoMutex& operator = (const MixVideoMutex&); + // A mutex cannot be copied + MixVideoMutex(const MixVideoMutex&); + MixVideoMutex& operator = (const MixVideoMutex&); private: - pthread_mutex_t mMutex; + pthread_mutex_t mMutex; }; diff --git a/mix_video/src/test.cpp b/mix_video/src/test.cpp index 8f9aee5..76bba31 100644 --- a/mix_video/src/test.cpp +++ b/mix_video/src/test.cpp @@ -1,5 +1,5 @@ #include -#include + #include #include "mixvideo.h" #include "mixdisplayx11.h" @@ -7,81 +7,81 @@ int main (int argc, char **argv) { - MIX_RESULT ret; + MIX_RESULT ret; - g_type_init (); + g_type_init (); -/* test MixDisplay */ - { + /* test MixDisplay */ + { - MixDisplayX11 *x11_clone = NULL; - MixDisplayX11 *x11 = mix_displayx11_new (); + MixDisplayX11 *x11_clone = NULL; + MixDisplayX11 *x11 = mix_displayx11_new (); - MixDisplay *base = MIX_DISPLAY (x11); + MixDisplay *base = MIX_DISPLAY (x11); - gboolean flag = MIX_IS_DISPLAYX11 (base); + bool flag = MIX_IS_DISPLAYX11 (base); - Drawable drawable = 1024; + Drawable drawable = 1024; - mix_displayx11_set_drawable (x11, drawable); + mix_displayx11_set_drawable (x11, drawable); -/* clone x11 */ + /* clone x11 */ - x11_clone = (MixDisplayX11 *) mix_display_dup (MIX_DISPLAY (x11)); + x11_clone = (MixDisplayX11 *) mix_display_dup (MIX_DISPLAY (x11)); - base = MIX_DISPLAY (x11_clone); + base = MIX_DISPLAY (x11_clone); - flag = MIX_IS_DISPLAYX11 (base); + flag = MIX_IS_DISPLAYX11 (base); - mix_displayx11_get_drawable (x11_clone, &drawable); + mix_displayx11_get_drawable (x11_clone, &drawable); -/* TODO: add more test cases */ + /* TODO: add more test cases */ -/* release */ - mix_display_unref (MIX_DISPLAY (x11)); - mix_display_unref (MIX_DISPLAY (x11_clone)); - g_print ("MixDisplayX11 test is done!\n"); - } + /* release */ + mix_display_unref (MIX_DISPLAY (x11)); + mix_display_unref (MIX_DISPLAY (x11_clone)); + g_print ("MixDisplayX11 test is done!\n"); + } -/* test MixVideoInitParams */ - { - MixVideoInitParams *init_params = mix_videoinitparams_new (); + /* test MixVideoInitParams */ + { + MixVideoInitParams *init_params = mix_videoinitparams_new (); - MixDisplayX11 *x11 = mix_displayx11_new (); - mix_displayx11_set_drawable (x11, 1024); + MixDisplayX11 *x11 = mix_displayx11_new (); + mix_displayx11_set_drawable (x11, 1024); - mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); + mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); -/* release */ - mix_params_unref (MIX_PARAMS (init_params)); - mix_display_unref (MIX_DISPLAY (x11)); + /* release */ + mix_params_unref (MIX_PARAMS (init_params)); + mix_display_unref (MIX_DISPLAY (x11)); - g_print ("MixVideoInitParams test is done!\n"); - } + g_print ("MixVideoInitParams test is done!\n"); + } -/* test MixVideo */ + /* test MixVideo */ - { - MixVideo *video = mix_video_new (); - MixVideoInitParams *init_params = mix_videoinitparams_new (); - MixDisplayX11 *x11 = mix_displayx11_new (); - MixDrmParams *drm = mix_drmparams_new (); - MixCodecMode mode = MIX_CODEC_MODE_DECODE; + { + MixVideo *video = mix_video_new (); + MixVideoInitParams *init_params = mix_videoinitparams_new (); + MixDisplayX11 *x11 = mix_displayx11_new (); + MixDrmParams *drm = mix_drmparams_new (); + MixCodecMode mode = MIX_CODEC_MODE_DECODE; - mix_displayx11_set_drawable (x11, 1024); - mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); + mix_displayx11_set_drawable (x11, 1024); + mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); - mix_video_initialize (video, mode, init_params, drm); + mix_video_initialize (video, mode, init_params, drm); -/* TODO: add more test cases */ + /* TODO: add more test cases */ -/* unref the objects. */ + /* unref the objects. */ - mix_params_unref (MIX_PARAMS (init_params)); - mix_params_unref (MIX_PARAMS (drm)); - mix_display_unref (MIX_DISPLAY (x11)); - g_object_unref (G_OBJECT (video)); + mix_params_unref (MIX_PARAMS (init_params)); + mix_params_unref (MIX_PARAMS (drm)); + mix_display_unref (MIX_DISPLAY (x11)); + g_object_unref (G_OBJECT (video)); - g_print ("MixVideo test is done!\n"); - } + g_print ("MixVideo test is done!\n"); + } } diff --git a/mix_video/test/autogen.sh b/mix_video/test/autogen.sh deleted file mode 100644 index 79033fb..0000000 --- a/mix_video/test/autogen.sh +++ /dev/null @@ -1 +0,0 @@ -autoreconf diff --git a/mix_video/test/src/test_framemanager.cpp b/mix_video/test/src/test_framemanager.cpp index f4b8be9..c930737 100644 --- a/mix_video/test/src/test_framemanager.cpp +++ b/mix_video/test/src/test_framemanager.cpp @@ -1,200 +1,200 @@ #include "../../src/mixframemanager.h" -gboolean stop_thread = FALSE; +bool stop_thread = FALSE; GCond* data_cond = NULL; GMutex* data_mutex = NULL; void *deque_function(void *data) { - MixFrameManager *fm = (MixFrameManager *) data; - MIX_RESULT mixresult; - MixVideoFrame *mvf = NULL; - guint64 pts; - while(!stop_thread) { + MixFrameManager *fm = (MixFrameManager *) data; + MIX_RESULT mixresult; + MixVideoFrame *mvf = NULL; + uint64 pts; + while (!stop_thread) { - g_mutex_lock (data_mutex); + g_mutex_lock (data_mutex); - mixresult = mix_framemanager_dequeue(fm, &mvf); - if(mixresult == MIX_RESULT_SUCCESS) { - mixresult = mix_videoframe_get_timestamp(mvf, &pts); - g_print("dequeued timestamp = %"G_GINT64_FORMAT"\n", pts); - /* mix_videoframe_unref(mvf); */ - } else if(mixresult == MIX_RESULT_FRAME_NOTAVAIL) { - g_print("mixresult == MIX_RESULT_FRAME_NOTAVAIL\n"); - g_cond_wait (data_cond, data_mutex); - } + mixresult = mix_framemanager_dequeue(fm, &mvf); + if (mixresult == MIX_RESULT_SUCCESS) { + mixresult = mix_videoframe_get_timestamp(mvf, &pts); + g_print("dequeued timestamp = %"UINT64_FORMAT"\n", pts); + /* mix_videoframe_unref(mvf); */ + } else if (mixresult == MIX_RESULT_FRAME_NOTAVAIL) { + g_print("mixresult == MIX_RESULT_FRAME_NOTAVAIL\n"); + g_cond_wait (data_cond, data_mutex); + } - g_mutex_unlock (data_mutex); + g_mutex_unlock (data_mutex); - } + } } void shuffle(GPtrArray *list) { - guint idx, jdx; - guint len = list->len; - for (idx = 0; idx < len - 1; idx++) { - jdx = rand() % len; - if (idx != jdx) { - gpointer tmp = g_ptr_array_index(list, jdx); - g_ptr_array_index(list, jdx) = g_ptr_array_index(list, idx); - g_ptr_array_index(list, idx) = tmp; - } - } + uint idx, jdx; + uint len = list->len; + for (idx = 0; idx < len - 1; idx++) { + jdx = rand() % len; + if (idx != jdx) { + void* tmp = g_ptr_array_index(list, jdx); + g_ptr_array_index(list, jdx) = g_ptr_array_index(list, idx); + g_ptr_array_index(list, idx) = tmp; + } + } } int main() { - MIX_RESULT mixresult; - - gint fps_n = 24000; - gint fps_d = 1001; - -/* - gint fps_n = 2500000; - gint fps_d = 104297; -*/ - GPtrArray *fa = NULL; - MixFrameManager *fm = NULL; - MixVideoFrame *mvf = NULL; - MixVideoFrame *mvf_1st = NULL; - - gint idx = 0; - guint64 pts = 0; - - GThread *deque_thread = NULL; - GError *deque_thread_error = NULL; - - /* first ting first */ - g_type_init(); - - /* create frame manager */ - fm = mix_framemanager_new(); - if (!fm) { - goto cleanup; - } + MIX_RESULT mixresult; + + int fps_n = 24000; + int fps_d = 1001; + + /* + int fps_n = 2500000; + int fps_d = 104297; + */ + GPtrArray *fa = NULL; + MixFrameManager *fm = NULL; + MixVideoFrame *mvf = NULL; + MixVideoFrame *mvf_1st = NULL; + + int idx = 0; + uint64 pts = 0; + + GThread *deque_thread = NULL; + GError *deque_thread_error = NULL; + + /* first ting first */ + g_type_init(); + + /* create frame manager */ + fm = mix_framemanager_new(); + if (!fm) { + goto cleanup; + } - /* initialize frame manager */ - mixresult = mix_framemanager_initialize(fm, - MIX_FRAMEORDER_MODE_DISPLAYORDER, fps_n, fps_d); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } + /* initialize frame manager */ + mixresult = mix_framemanager_initialize(fm, + MIX_FRAMEORDER_MODE_DISPLAYORDER, fps_n, fps_d); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } - /* create frame_array */ - fa = g_ptr_array_sized_new(64); - if (!fa) { - goto cleanup; - } + /* create frame_array */ + fa = g_ptr_array_sized_new(64); + if (!fa) { + goto cleanup; + } - for (idx = 0; idx < 16; idx++) { - /* generate MixVideoFrame */ - mvf = mix_videoframe_new(); - if (!mvf) { - goto cleanup; - } + for (idx = 0; idx < 16; idx++) { + /* generate MixVideoFrame */ + mvf = mix_videoframe_new(); + if (!mvf) { + goto cleanup; + } - pts = idx * G_USEC_PER_SEC * G_GINT64_CONSTANT(1000) * fps_d / fps_n; - mixresult = mix_videoframe_set_timestamp(mvf, pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } + pts = idx * G_USEC_PER_SEC * INT64_CONSTANT(1000) * fps_d / fps_n; + mixresult = mix_videoframe_set_timestamp(mvf, pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } - g_print("original timestamp = %"G_GINT64_FORMAT"\n", pts); + g_print("original timestamp = %"UINT64_FORMAT"\n", pts); - if (idx == 0) { - mvf_1st = mvf; - } else { - g_ptr_array_add(fa, (gpointer) mvf); - } - } + if (idx == 0) { + mvf_1st = mvf; + } else { + g_ptr_array_add(fa, (void*) mvf); + } + } - /* shuffle the array */ - shuffle( fa); + /* shuffle the array */ + shuffle( fa); - data_mutex = g_mutex_new (); - if(!data_mutex) { - goto cleanup; - } + data_mutex = g_mutex_new (); + if (!data_mutex) { + goto cleanup; + } - data_cond = g_cond_new(); - if(!data_cond) { - goto cleanup; - } + data_cond = g_cond_new(); + if (!data_cond) { + goto cleanup; + } - /* create another thread to dequeue */ - deque_thread = g_thread_create((GThreadFunc) deque_function, (void *) fm, - TRUE, &deque_thread_error); - if (!deque_thread) { - goto cleanup; - } + /* create another thread to dequeue */ + deque_thread = g_thread_create((GThreadFunc) deque_function, (void *) fm, + TRUE, &deque_thread_error); + if (!deque_thread) { + goto cleanup; + } - /* enqueue */ - mixresult = mix_framemanager_enqueue(fm, mvf_1st); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } + /* enqueue */ + mixresult = mix_framemanager_enqueue(fm, mvf_1st); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } - mixresult = mix_videoframe_get_timestamp(mvf_1st, &pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); + mixresult = mix_videoframe_get_timestamp(mvf_1st, &pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } + g_print("shuffled timestamp = %"UINT64_FORMAT"\n", pts); - for (idx = 0; idx < fa->len; idx++) { + for (idx = 0; idx < fa->len; idx++) { - g_mutex_lock (data_mutex); + g_mutex_lock (data_mutex); - /* wait for 100ms to enqueue another frame */ - g_usleep(G_USEC_PER_SEC / 10 ); + /* wait for 100ms to enqueue another frame */ + g_usleep(G_USEC_PER_SEC / 10 ); - mvf = (MixVideoFrame *) g_ptr_array_index(fa, idx); - mixresult = mix_framemanager_enqueue(fm, mvf); + mvf = (MixVideoFrame *) g_ptr_array_index(fa, idx); + mixresult = mix_framemanager_enqueue(fm, mvf); - /* wake up deque thread */ - g_cond_signal (data_cond); + /* wake up deque thread */ + g_cond_signal (data_cond); - g_mutex_unlock (data_mutex); + g_mutex_unlock (data_mutex); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } - mixresult = mix_videoframe_get_timestamp(mvf, &pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } + mixresult = mix_videoframe_get_timestamp(mvf, &pts); + if (mixresult != MIX_RESULT_SUCCESS) { + goto cleanup; + } - g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts); - } + g_print("shuffled timestamp = %"UINT64_FORMAT"\n", pts); + } - getchar(); + getchar(); - stop_thread = TRUE; + stop_thread = TRUE; - /* wake up deque thread */ - g_cond_signal (data_cond); + /* wake up deque thread */ + g_cond_signal (data_cond); - g_thread_join(deque_thread); + g_thread_join(deque_thread); cleanup: - if(data_mutex) { - g_mutex_free(data_mutex); - } + if (data_mutex) { + g_mutex_free(data_mutex); + } - if(data_cond) { - g_cond_free(data_cond); - } + if (data_cond) { + g_cond_free(data_cond); + } - if (fm) { - mix_framemanager_unref(fm); - } + if (fm) { + mix_framemanager_unref(fm); + } - if (fa) { - g_ptr_array_free(fa, TRUE); - } + if (fa) { + g_ptr_array_free(fa, TRUE); + } - return 0; + return 0; } -- cgit v1.2.3 From 93b1226654de3e476f839ade24aa75d5bff809d9 Mon Sep 17 00:00:00 2001 From: "Liu, Shuo" Date: Thu, 17 Mar 2011 11:29:33 +0800 Subject: add assist files for libmix Change-Id: I0b64c4d245f9f753ec6ae986d92bbf52b999715a Signed-off-by: Liu, Shuo --- mix_common/src/j_hashtable.cpp | 321 +++++++++++++++++++++++++++++++++++++++++ mix_common/src/j_hashtable.h | 52 +++++++ mix_common/src/j_queue.cpp | 131 +++++++++++++++++ mix_common/src/j_queue.h | 33 +++++ mix_common/src/j_slist.cpp | 211 +++++++++++++++++++++++++++ mix_common/src/j_slist.h | 41 ++++++ mix_common/src/mixtypes.h | 49 +++++++ 7 files changed, 838 insertions(+) create mode 100644 mix_common/src/j_hashtable.cpp create mode 100644 mix_common/src/j_hashtable.h create mode 100644 mix_common/src/j_queue.cpp create mode 100644 mix_common/src/j_queue.h create mode 100644 mix_common/src/j_slist.cpp create mode 100644 mix_common/src/j_slist.h create mode 100644 mix_common/src/mixtypes.h diff --git a/mix_common/src/j_hashtable.cpp b/mix_common/src/j_hashtable.cpp new file mode 100644 index 0000000..e086c54 --- /dev/null +++ b/mix_common/src/j_hashtable.cpp @@ -0,0 +1,321 @@ +#include +#include + +#include + + +/* + * Notice: this is not thread-safe but re-entrable API. + */ +JHashTable* j_hash_table_new_full(JHashFunc hash_func, + JEqualFunc key_equal_func, JDestroyNotify key_destroy_func, + JDestroyNotify value_destroy_func) +{ + JHashTable *pTable = (JHashTable*)malloc(sizeof (JHashTable)); + assert(pTable != NULL); + pTable->hash_func = hash_func; + pTable->key_equal_func = key_equal_func; + pTable->key_destroy_func = key_destroy_func; + pTable->value_destroy_func = value_destroy_func; + + pTable->table_size = INIT_TABLE_SIZE; + pTable->hash_table = (JHashItem**) malloc(sizeof (JHashItem*) * pTable->table_size); + + memset(pTable->hash_table, 0, sizeof(JHashItem*) * pTable->table_size); + + assert(pTable->hash_table != NULL); + pTable->ref_count = 1; + return pTable; +} + +void j_hash_table_unref(JHashTable* pTable) { + assert(pTable != NULL); + assert(pTable->hash_table != NULL); + + pTable->ref_count --; + if (pTable->ref_count == 0) { + j_hash_table_remove_all(pTable); + free(pTable->hash_table); + free(pTable); + } +} + +void j_hash_table_remove_all(JHashTable *pTable) { + int i; + + JHashItem *pItem = NULL; + JHashItem *next = NULL; + + assert(pTable != NULL); + + for (i = 0; i < pTable->table_size; i ++) { + pItem = pTable->hash_table[i]; + while (pItem != NULL) { + next = pItem->next; + if (pTable->key_destroy_func != NULL) pTable->key_destroy_func(pItem->key); + if (pTable->value_destroy_func != NULL) pTable->value_destroy_func(pItem->data); + free(pItem); + pItem = next; + } + pTable->hash_table[i] = NULL; + } +} + +void * j_hash_table_lookup(JHashTable *pTable, void * key) +{ + int i; + int hash_key; + int index; + + assert(pTable != NULL); + assert(pTable->hash_table != NULL); + + JHashItem *pItem = NULL; + JHashItem *next = NULL; + + if (pTable->hash_func != NULL) { + hash_key = pTable->hash_func(key); + } else { + hash_key = (int)key; + } + + index = hash_key % pTable->table_size; + + pItem = pTable->hash_table[index]; + + while (pItem != NULL) { + if (key == pItem->key) break; + pItem = pItem->next; + } + + if (pItem == NULL) return NULL; + + return pItem->data; +} + +void j_hash_table_insert(JHashTable *pTable, void * key, void * data) { + JHashItem *pItem = (JHashItem*) malloc (sizeof (JHashItem)); + JHashItem *pExistItem = NULL; + + int hash_key; + unsigned int index; + + assert (pItem != NULL); + + pItem->key = key; + pItem->data = data; + + if (pTable->hash_func != NULL) { + hash_key = pTable->hash_func(key); + } else { + hash_key = (int)key; + } + + index = hash_key % pTable->table_size; + + pExistItem = pTable->hash_table[index]; + + pItem->next = pExistItem; + + pTable->hash_table[index] = pItem; +} + +int j_hash_table_remove(JHashTable *pTable, void *key) +{ + JHashItem *pItem = NULL; + JHashItem *pPrevItem = NULL; + + int hash_key; + int index; + + assert(pTable != NULL); + + if (pTable->hash_func != NULL) { + hash_key = pTable->hash_func(key); + } else { + hash_key = (int)key; + } + + index = hash_key % pTable->table_size; + + pPrevItem = pItem = pTable->hash_table[index]; + + while (pItem != NULL) { + if (pItem->key == key) break; + pPrevItem = pItem; + pItem = pItem->next; + } + + if (pItem == NULL) { + // not found + return 0; + } + + if (pItem == pTable->hash_table[index]) { + pTable->hash_table[index] = pItem->next; + } else { + pPrevItem->next = pItem->next; + } + + if (pTable->key_destroy_func) { + pTable->key_destroy_func(pItem->key); + } + + if (pTable->value_destroy_func) { + pTable->value_destroy_func(pItem->data); + } + + free(pItem); + return 1; +} + +int j_hash_table_lookup_extended(JHashTable *pTable, + void* key, void *orig_key, void *value) +{/* + int i; + int hash_key; + int index; + int j=0; + + assert(pTable != NULL); + assert(pTable->hash_table != NULL); + + JHashItem *pItem = NULL; + JHashItem *next = NULL; + + if (pTable->hash_func != NULL) { + hash_key = pTable->hash_func(key); + } else { + hash_key = key; + } + + index = hash_key % pTable->table_size; + + pItem = pTable->hash_table[index]; + + while (pItem != NULL) { + if (key == pItem->key) break; + pItem = pItem->next; + } + + + if (pItem) + { + if (orig_key) + *orig_key = (void *)pItem->key; + if (value) + *value = (void *)pItem->data; + j = 1; + } + else + j = 0; + */ // Priya: We don't need this implementation for now as we can replace with _lookup instead. + return 0; + +} + +unsigned int j_hash_table_foreach_remove(JHashTable *pTable, JHRFunc func, void *user_data) +{ + JHashItem *pItem = NULL; + JHashItem *pPrevItem = NULL; + + int hash_key; + int i; + unsigned int num_item_removed = 0; + + assert(pTable != NULL); + assert(func != NULL); + + for (i = 0; i < pTable->table_size; i ++ ) { + pPrevItem = pItem = pTable->hash_table[i]; + while (pItem != NULL) { + if (func(pItem->key, pItem->data, user_data)) { + //prev item is same + if (pItem == pTable->hash_table[i]) { + pTable->hash_table[i] = pItem->next; + pPrevItem = NULL; + } else { + pPrevItem->next = pItem->next; + } + + if (pTable->key_destroy_func) { + pTable->key_destroy_func(pItem->key); + } + + if (pTable->value_destroy_func) { + pTable->value_destroy_func(pItem->data); + } + + free(pItem); + num_item_removed ++; + } else { + pPrevItem = pItem; + } + + if (pPrevItem != NULL) { + pItem = pPrevItem->next; + } else { + pItem = pPrevItem = pTable->hash_table[i]; + } + + } + } + + return num_item_removed; +} + + +#ifdef _J_HASH_TABLE_UT_ +#include + +void DestroyKey(void* data) +{ + printf("%d is destroied\n", (int) data); +} + +void DestroyData(void* data) +{ + printf("0x%x(%d) is destroied\n", data, *(int*)data); + free(data); +} + +int testKeynData(void* key, void* data, void* user_data) +{ + return (0 == (((int)*(int*)data) % (unsigned int)user_data)); +} + +int main() { + JHashTable *pTable = j_hash_table_new_full(NULL, + NULL, DestroyKey, DestroyData); + int i; + void *data; + int *p; +#define KEY_TABLE_SIZE (INIT_TABLE_SIZE * 2 - 1) + void* key_table[KEY_TABLE_SIZE]; + for (i = 0; i < KEY_TABLE_SIZE; i ++) { + p = malloc(sizeof(int)); + *p = i; + j_hash_table_insert(pTable, p, p); + key_table[i] = p; + } + + for (i = 0; i < KEY_TABLE_SIZE; i ++) { + data = j_hash_table_lookup(pTable, key_table[i]); + printf("found 0x%x(%d)\n", data, *(int*)data); + } + + int num_elem = 0; + num_elem = j_hash_table_foreach_remove(pTable, testKeynData, 10); + printf("%d elements are removed\n", num_elem); + + int ret; + for (i = 0; i < 10; i ++) { + ret = j_hash_table_remove(pTable, key_table[i]); + printf("key[%d]:0x%x is removed(%d)\n", i, data, ret); + } + + j_hash_table_remove_all(pTable); + j_hash_table_unref(pTable); + return 0; +} +#endif diff --git a/mix_common/src/j_hashtable.h b/mix_common/src/j_hashtable.h new file mode 100644 index 0000000..eb08b1a --- /dev/null +++ b/mix_common/src/j_hashtable.h @@ -0,0 +1,52 @@ +#ifndef __J_HASH_TABLE__ +#define __J_HASH_TABLE__ +#ifdef __cplusplus +extern "C" { +#endif + + typedef unsigned int (*JHashFunc)(void *key); + typedef unsigned int (*JEqualFunc) (void *a, void *b); + typedef void (*JDestroyNotify) (void *data); + + typedef int (*JHRFunc) (void *key, void *value, void *user_data); + + typedef struct JHashItem_s { + struct JHashItem_s *next; + void* data; + void* key; + } JHashItem; + +#define INIT_TABLE_SIZE 256 + typedef struct JHashTable_s { + int ref_count; + int table_size; + + JHashFunc hash_func; + JEqualFunc key_equal_func; + JDestroyNotify key_destroy_func; + JDestroyNotify value_destroy_func; + JHashItem **hash_table; + } JHashTable; + + JHashTable* j_hash_table_new_full(JHashFunc hash_func, + JEqualFunc key_equal_func, JDestroyNotify key_destroy_func, + JDestroyNotify value_destroy_func); + + void j_hash_table_unref(JHashTable* pTable); + void j_hash_table_remove_all(JHashTable *pTable); + + void * j_hash_table_lookup(JHashTable *pTable, void * key); + + void j_hash_table_insert(JHashTable *pTable, void * key, void * data); + int j_hash_table_remove(JHashTable *pTable, void *key); + + int j_hash_table_lookup_extended(JHashTable *pTable, + void* lookup_key, void* orig_key, void* value); + + unsigned int j_hash_table_foreach_remove(JHashTable *pTable, + JHRFunc func, void *user_data); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/mix_common/src/j_queue.cpp b/mix_common/src/j_queue.cpp new file mode 100644 index 0000000..301815a --- /dev/null +++ b/mix_common/src/j_queue.cpp @@ -0,0 +1,131 @@ +#include +#include +#include + +int j_queue_is_empty(JQueue* queue) +{ + assert (queue); + assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) + == queue->element_count); + + return (queue->element_count == 0); +} + +void* j_queue_pop_head(JQueue* queue) +{ + void *ret; + assert (queue); + assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) + == queue->element_count); + + if (queue->element_count == 0) return NULL; + + ret = queue->rooms[queue->head]; + + queue->head = (queue->head + 1) % queue->room_size; + queue->element_count --; + + if (queue->element_count == 0) { + queue->head = queue->tail = 0; + } + return ret; +} + +void *j_queue_peek_head(JQueue* queue) +{ + void *ret; + assert (queue); + assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) + == queue->element_count); + + if (queue->element_count == 0) return NULL; + + ret = queue->rooms[queue->head]; + return ret; +} + +void j_queue_free(JQueue* queue) +{ + assert (queue); + assert (queue->rooms); + assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) + == queue->element_count); + free(queue->rooms); + queue->rooms = NULL; + free(queue); +} + +JQueue* j_queue_new() +{ + JQueue *queue = (JQueue*) malloc(sizeof(JQueue)); + assert (queue != NULL); + queue->room_size = INIT_ROOM_SIZE; + + queue->rooms = (void**) malloc(sizeof(void*) * queue->room_size); + assert (queue->rooms); + queue->head = queue->tail = 0; + queue->element_count = 0; + return queue; +} + +void j_queue_push_tail(JQueue *queue, void *data) +{ + assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) + == queue->element_count); + + if (queue->element_count == (queue->room_size -1)) { + queue->rooms = (void**) realloc(queue->rooms, sizeof(void*) * queue->room_size * 2); + if (queue->head > queue->tail) { + memcpy(&queue->rooms[0], &queue->rooms[queue->element_count], queue->tail); + queue->tail = queue->head + queue->element_count; + } + + queue->room_size = queue->room_size * 2; + assert(queue->rooms); + } + + queue->rooms[queue->tail] = data; + + queue->element_count ++; + queue->tail = (queue->tail + 1 + queue->room_size) % queue->room_size; +} + +#ifdef _J_QUEUE_UT_ +#include + +int main() { + JQueue *pQueue = j_queue_new(); + int i; + void *data; + int *p; +#define ELEM_TABLE_SIZE (INIT_ROOM_SIZE * 2 - 1) + + for (i = 0; i < ELEM_TABLE_SIZE; i ++) { + j_queue_push_tail(pQueue, i); + } + + printf("queue is empty(%d)\n", j_queue_is_empty(pQueue)); + + for (i = 0; i < ELEM_TABLE_SIZE; i ++) { + data = j_queue_pop_head(pQueue); + printf("elements(%d) poped %d\n", i, (int)data); + } + + printf("queue is empty(%d)\n", j_queue_is_empty(pQueue)); + + int j; + for (j = 0; j < ELEM_TABLE_SIZE; j ++) { + for (i = 0; i < 5; i ++) { + j_queue_push_tail(pQueue, i); + } + + for (i = 0; i < 4; i ++) { + data = j_queue_pop_head(pQueue); + printf("elements(%d) poped %d\n", i, (int)data); + } + } + + j_queue_free(pQueue); + return 0; +} +#endif diff --git a/mix_common/src/j_queue.h b/mix_common/src/j_queue.h new file mode 100644 index 0000000..cebf21e --- /dev/null +++ b/mix_common/src/j_queue.h @@ -0,0 +1,33 @@ +#ifndef __J_QUEUE__ +#define __J_QUEUE__ +#ifdef __cplusplus +extern "C" { +#endif + +#define INIT_ROOM_SIZE 64 + typedef struct JQueue_s { + unsigned int room_size; + void **rooms; + + //point to position for fetch + unsigned int head; + + //point to position for fill + unsigned int tail; + + //to double check the "element number" + unsigned int element_count; + } JQueue; + + int j_queue_is_empty(JQueue* queue); + void* j_queue_pop_head(JQueue* queue); + void *j_queue_peek_head(JQueue* queue); + void j_queue_free(JQueue* queue); + JQueue* j_queue_new(); + void j_queue_push_tail(JQueue *queue, void *data); + +#ifdef __cplusplus +} +#endif +#endif + diff --git a/mix_common/src/j_slist.cpp b/mix_common/src/j_slist.cpp new file mode 100644 index 0000000..3168689 --- /dev/null +++ b/mix_common/src/j_slist.cpp @@ -0,0 +1,211 @@ +#include +#include +#include + +JSList* j_slist_append (JSList* list, void* data) +{ + JSList *item = (JSList*) malloc(sizeof(JSList)); + item->data = data; + item->next = NULL; + + if (list == NULL) { + return item; + } + + JSList *traverse_item = list; + JSList *tail = NULL; + + while (traverse_item != NULL) { + tail = traverse_item; + traverse_item = traverse_item->next; + } + tail->next = item; + + return list; +} + +JSList* j_slist_find (JSList *list, void* data) +{ + JSList *traverse_item = list; + while (traverse_item != NULL) { + if (traverse_item->data == data) break; + traverse_item = traverse_item->next; + } + + return traverse_item; +} + +JSList* j_slist_remove(JSList *list, void* data) +{ + JSList *traverse_item = list; + JSList *prev_item = NULL; + + if (list->data == data) { + list = list->next; + free(traverse_item); + return list; + } + + while (traverse_item != NULL) { + if (traverse_item->data == data) break; + prev_item = traverse_item; + traverse_item = traverse_item->next; + } + + if (traverse_item != NULL) { + assert(prev_item != NULL); // as 1st element is processed @ beginning + prev_item->next = traverse_item->next; + traverse_item->next = NULL; + free(traverse_item); + } + + return list; +} + + +JSList* j_slist_remove_link(JSList *list, JSList* link) +{ + JSList *traverse_item = list; + JSList *tmp; + if (list == link) { + tmp = list->next; + link->next = NULL; +// TED return link->next; + return tmp; + } + + while (traverse_item != NULL) { + if (traverse_item->next == link) break; + traverse_item = traverse_item->next; + } + + if (traverse_item != NULL) { + traverse_item->next = link->next; + } + + link->next = NULL; + return list; +} + +JSList *j_slist_delete_link(JSList *list, JSList *link) +{ + list = j_slist_remove_link(list, link); + free(link); + return list; +} + +JSList *j_slist_concat(JSList* list1, JSList *list2) +{ + JSList *traverse_item = list1; + if (list1 == NULL) { + return list2; + } + + while (traverse_item->next != NULL) { + traverse_item = traverse_item->next; + } + + traverse_item->next = list2; + + return list1; +} + +unsigned int j_slist_length (JSList *list) +{ + unsigned int list_length = 0; + JSList *traverse_item = list; + while (traverse_item != NULL) { + list_length ++; + traverse_item = traverse_item->next; + } + return list_length; +} + +void *j_slist_nth_data(JSList *list, unsigned int n) +{ + unsigned int count = n; + JSList *traverse_item = list; + while (traverse_item != NULL) { + if (count == 0) break; + traverse_item = traverse_item->next; + count --; + } + return traverse_item? traverse_item->data : NULL; +} + +JSList* j_slist_find_custom(JSList *list, void* data, JCompareFunc func) +{ + JSList *traverse_item = list; + while (traverse_item != NULL) { + if (func(traverse_item->data, data) != 0) break; + traverse_item = traverse_item->next; + } + + return traverse_item; +} + +void j_slist_foreach(JSList *list, JFunc func, void* userdata) +{ + JSList *traverse_item = list; + while (traverse_item != NULL) { + func(traverse_item->data, userdata); + traverse_item = traverse_item->next; + } +} + +#ifdef _J_SLIST_UT_ +#include + +void testData(void* data, void* user_data) +{ + printf("test (%d)\n", (int) data); +} + +int main() { + JSList *pList = NULL; + JSList *pList2 = NULL; + int i; + +#define KEY_TABLE_SIZE 20 + for (i = 0; i < KEY_TABLE_SIZE; i ++) { + pList = j_slist_append(pList, i); + } + + assert(KEY_TABLE_SIZE == j_slist_length(pList)); + pList2 = NULL; + for (i = 0; i < KEY_TABLE_SIZE; i ++) { + pList2 = j_slist_find(pList, i); + + if (pList2) { + printf("Found data(%d)\n", i); + } + } + + pList2 = NULL; + for (i = 0; i < KEY_TABLE_SIZE; i ++) { + pList2 = j_slist_nth_data(pList, i); + if (pList2) { + printf("Found data(%d) @ %d\n", pList2->data, i); + } + } + + pList2 = NULL; + for (i = KEY_TABLE_SIZE; i > 0; i --) { + pList2 = j_slist_append(pList2, i); + } + + j_slist_foreach(pList, testData, 0); + printf("*************************************************************\n"); + pList = j_slit_concat(pList, pList2); + + j_slist_foreach(pList, testData, 0); + printf("*************************************************************\n"); + for (i = KEY_TABLE_SIZE; i > 0; i --) { + pList = j_slist_remove(pList, i); + } + + j_slist_foreach(pList, testData, 0); + + return 0; +} +#endif diff --git a/mix_common/src/j_slist.h b/mix_common/src/j_slist.h new file mode 100644 index 0000000..d2f866b --- /dev/null +++ b/mix_common/src/j_slist.h @@ -0,0 +1,41 @@ +#ifndef __J_SLIST_H__ +#define __J_SLIST_H__ +#ifdef __cplusplus +extern "C" { +#endif + + typedef struct JSList_s { + struct JSList_s *next; + void* data; + } JSList; + + typedef int (*JCompareFunc)(void* data1, void* data2); + typedef void (*JFunc)(void* data, void* userdata); + + JSList* j_slist_append (JSList* list, void* data); + + JSList* j_slist_find (JSList *list, void* data); + + JSList* j_slist_remove(JSList *list, void* data); + + JSList* j_slist_remove_link(JSList *list, JSList* link); + + JSList *j_slist_delete_link(JSList *list, JSList *link); + + JSList *j_slist_concat(JSList* list1, JSList *list2); + + unsigned int j_slist_length (JSList *list); + + void *j_slist_nth_data(JSList *list, unsigned int n); + + JSList* j_slist_find_custom(JSList *list, void* data, JCompareFunc func); + + void j_slist_foreach(JSList *list, JFunc func, void* userdata); + +#ifdef __cplusplus +} +#endif + +#endif + + diff --git a/mix_common/src/mixtypes.h b/mix_common/src/mixtypes.h new file mode 100644 index 0000000..330c623 --- /dev/null +++ b/mix_common/src/mixtypes.h @@ -0,0 +1,49 @@ +#ifndef __MIX_TYPES_H__ +#define __MIX_TYPES_H__ + +/* Provide type definitions for commonly used types. + * These are useful because a "int8" can be adjusted + * to be 1 byte (8 bits) on all platforms. Similarly and + * more importantly, "int32" can be adjusted to be + * 4 bytes (32 bits) on all platforms. + */ +typedef unsigned char uchar; +typedef unsigned short ushort; +typedef unsigned long ulong; +typedef unsigned int uint; + +typedef signed char int8; +typedef unsigned char uint8; +typedef signed short int16; +typedef unsigned short uint16; + +typedef signed int int32; +typedef unsigned int uint32; + +typedef signed long long int64; +typedef unsigned long long uint64; + +#define TRUE true +#define FALSE false + +#define return_if_fail(expr) do{ (void)0; }while(0) +#define return_val_if_fail(expr,val) do{ (void)0; }while(0) + +#define INT64_CONSTANT(val) (val##LL) + +#define INT64_FORMAT "lli" +#define UINT64_FORMAT "llu" + +#undef CLAMP +#define CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x))) + +#ifndef NULL +#ifdef __cplusplus +#define NULL (0L) +#else /* !__cplusplus */ +#define NULL ((void*) 0) +#endif /* !__cplusplus */ +#endif + + +#endif /* __MIX_TYPES_H__ */ -- cgit v1.2.3 From a1f02eb448f46977b0d7d48e66391292cdb79f57 Mon Sep 17 00:00:00 2001 From: xli111 Date: Tue, 22 Mar 2011 17:53:17 +0800 Subject: Enable buffer sharing in libmix and close log Change-Id: I09cf415f82e0490fb0d887f7207875ed150b8126 Signed-off-by: xli111 --- mix_common/src/j_slist.cpp | 2 +- mix_video/src/Android.mk | 2 +- mix_video/src/mixvideo.cpp | 7 +------ mix_video/src/mixvideoformatenc_h264.cpp | 6 +++--- 4 files changed, 6 insertions(+), 11 deletions(-) diff --git a/mix_common/src/j_slist.cpp b/mix_common/src/j_slist.cpp index 3168689..26435d8 100644 --- a/mix_common/src/j_slist.cpp +++ b/mix_common/src/j_slist.cpp @@ -137,7 +137,7 @@ JSList* j_slist_find_custom(JSList *list, void* data, JCompareFunc func) { JSList *traverse_item = list; while (traverse_item != NULL) { - if (func(traverse_item->data, data) != 0) break; + if (func(traverse_item->data, data) == 0) break; traverse_item = traverse_item->next; } diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk index 39c057d..74e9849 100644 --- a/mix_video/src/Android.mk +++ b/mix_video/src/Android.mk @@ -1,7 +1,7 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) -MIXVIDEO_LOG_ENABLE := true +MIXVIDEO_LOG_ENABLE := false LOCAL_SRC_FILES := \ mixvideothread.cpp \ diff --git a/mix_video/src/mixvideo.cpp b/mix_video/src/mixvideo.cpp index 5053595..c240350 100644 --- a/mix_video/src/mixvideo.cpp +++ b/mix_video/src/mixvideo.cpp @@ -2042,16 +2042,11 @@ MIX_RESULT mix_video_get_new_userptr_for_surface_buffer_default (MixVideo * mix, goto cleanup; } -#if 1 - va_status = vaCreateSurfaces(priv->va_display, width, - height, VA_RT_FORMAT_YUV420, - 1, &surface); -#else va_status = vaCreateSurfacesForUserPtr ( priv->va_display, width, height, VA_RT_FORMAT_YUV420, 1, &surface, expected_size, VA_FOURCC_NV12, width, width, width, 0, width * height, width * height); -#endif + if (va_status != VA_STATUS_SUCCESS) { LOG_E("Failed vaCreateSurfaces\n"); diff --git a/mix_video/src/mixvideoformatenc_h264.cpp b/mix_video/src/mixvideoformatenc_h264.cpp index 71d4282..6c2374a 100644 --- a/mix_video/src/mixvideoformatenc_h264.cpp +++ b/mix_video/src/mixvideoformatenc_h264.cpp @@ -2696,7 +2696,7 @@ MixVideoFormatEnc_H264::_send_seq_params () { //h264_seq_param.seq_parameter_set_id = 176; // This is a temporary fix suggested by Binglin for bad encoding quality issue - //h264_seq_param.max_num_ref_frames = 1; // TODO: We need a long term design for this field + h264_seq_param.max_num_ref_frames = 1; // TODO: We need a long term design for this field LOG_V( "===h264 sequence params===\n"); @@ -2902,8 +2902,8 @@ MixVideoFormatEnc_H264::_send_slice_parameter () { = this->disable_deblocking_filter_idc; // This is a temporary fix suggested by Binglin for bad encoding quality issue - //current_slice->slice_flags.bits.uses_long_term_ref = 0; // TODO: We need a long term design for this field - //current_slice->slice_flags.bits.is_long_term_ref = 0; // TODO: We need a long term design for this field + current_slice->slice_flags.bits.uses_long_term_ref = 0; // TODO: We need a long term design for this field + current_slice->slice_flags.bits.is_long_term_ref = 0; // TODO: We need a long term design for this field LOG_V( "======h264 slice params======\n"); -- cgit v1.2.3 From c3365d590274f40f8c5dedb2ef0435d8103ea7d0 Mon Sep 17 00:00:00 2001 From: xli111 Date: Wed, 23 Mar 2011 09:58:25 +0800 Subject: Delete all unused makefile Change-Id: I6b4e3d782ccc7cbbd978babb7f60004cd09e946c Signed-off-by: xli111 --- mix_common/Makefile.am | 10 -- mix_common/configure.ac | 39 ----- mix_common/mixcommon.spec | 43 ------ mix_common/src/Makefile.am | 23 --- mix_vbp/Makefile.am | 9 -- mix_vbp/configure.ac | 77 ---------- mix_vbp/m4/Makefile.am | 1 - mix_vbp/mixvbp.spec | 68 --------- mix_vbp/viddec_fw/fw/parser/Makefile.am | 203 -------------------------- mix_video/Makefile.am | 9 -- mix_video/configure.ac | 137 ----------------- mix_video/docs/Makefile.am | 4 - mix_video/docs/reference/Makefile.am | 4 - mix_video/docs/reference/MixVideo/Makefile.am | 116 --------------- mix_video/m4/Makefile.am | 1 - mix_video/mixvideo.spec | 81 ---------- mix_video/src/Makefile.am | 139 ------------------ mix_video/src/Makefile.old | 40 ----- mix_video/test/Makefile.am | 2 - mix_video/test/configure.ac | 53 ------- mix_video/test/src/Makefile.am | 22 --- 21 files changed, 1081 deletions(-) delete mode 100644 mix_common/Makefile.am delete mode 100644 mix_common/configure.ac delete mode 100644 mix_common/mixcommon.spec delete mode 100644 mix_common/src/Makefile.am delete mode 100644 mix_vbp/Makefile.am delete mode 100644 mix_vbp/configure.ac delete mode 100644 mix_vbp/m4/Makefile.am delete mode 100644 mix_vbp/mixvbp.spec delete mode 100644 mix_vbp/viddec_fw/fw/parser/Makefile.am delete mode 100644 mix_video/Makefile.am delete mode 100644 mix_video/configure.ac delete mode 100644 mix_video/docs/Makefile.am delete mode 100644 mix_video/docs/reference/Makefile.am delete mode 100644 mix_video/docs/reference/MixVideo/Makefile.am delete mode 100644 mix_video/m4/Makefile.am delete mode 100644 mix_video/mixvideo.spec delete mode 100644 mix_video/src/Makefile.am delete mode 100644 mix_video/src/Makefile.old delete mode 100644 mix_video/test/Makefile.am delete mode 100644 mix_video/test/configure.ac delete mode 100644 mix_video/test/src/Makefile.am diff --git a/mix_common/Makefile.am b/mix_common/Makefile.am deleted file mode 100644 index f5b19ff..0000000 --- a/mix_common/Makefile.am +++ /dev/null @@ -1,10 +0,0 @@ -SUBDIRS = src - -#ACLOCAL_AMFLAGS=-I m4 -#Uncomment the following line if building documentation using gtkdoc -#SUBDIRS += docs - -pkgconfigdir = $(libdir)/pkgconfig -pkgconfig_DATA=mixcommon.pc -EXTRA_DIST = autogen.sh m4 -DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc diff --git a/mix_common/configure.ac b/mix_common/configure.ac deleted file mode 100644 index 2dfa6aa..0000000 --- a/mix_common/configure.ac +++ /dev/null @@ -1,39 +0,0 @@ -AC_INIT("", "", [khanh.v.nguyen@intel.com]) - -AC_CONFIG_MACRO_DIR(m4) - -AS_MIX_VERSION(mixcommon, MIXCOMMON, 0, 1, 8) - -AM_INIT_AUTOMAKE($PACKAGE, $VERSION) -#AM_INIT_AUTOMAKE([-Wall -Werror foreign]) - -AC_PROG_CC -AC_PROG_LIBTOOL - -AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes , no) - -dnl Give error and exit if we don't have pkgconfig -if test "x$HAVE_PKGCONFIG" = "xno"; then - AC_MSG_ERROR(you need to have pkgconfig installed !) -fi - -GLIB_REQ=2.16 -dnl Check for glib2 without extra fat, useful for the unversioned tool frontends -dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -if test "x$HAVE_GLIB" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) -if test "x$HAVE_GOBJECT" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -AC_CONFIG_HEADERS([config.h]) -AC_CONFIG_FILES([ - mixcommon.pc - Makefile - src/Makefile -]) -AC_OUTPUT diff --git a/mix_common/mixcommon.spec b/mix_common/mixcommon.spec deleted file mode 100644 index be17602..0000000 --- a/mix_common/mixcommon.spec +++ /dev/null @@ -1,43 +0,0 @@ -Summary: MIX Common -Name: mixcommon -Version: 0.1.8 -Release: 1 -Source0: %{name}-%{version}.tar.gz -NoSource: 0 -License: Proprietary -Group: System Environment/Libraries -BuildRoot: %{_tmppath}/%{name}-root -ExclusiveArch: i586 - -%description -MIX Common contains common classes, datatype, header files used by other MIX components - -%package devel -Summary: Libraries include files -Group: Development/Libraries -Requires: %{name} = %{version} - -%description devel -The %{name}-devel package contains the header files and static libraries for building applications which use %{name}. - -%prep -%setup -q -%build -./autogen.sh -./configure --prefix=%{_prefix} -make -%install -rm -rf $RPM_BUILD_ROOT -make DESTDIR=$RPM_BUILD_ROOT install -%clean -rm -rf $RPM_BUILD_ROOT -%files -%defattr(-,root,root) -%{_prefix}/lib/*.so* - -%files devel -%defattr(-,root,root) -%{_prefix}/include -%{_prefix}/lib/*.la -%{_prefix}/lib/pkgconfig/mixcommon.pc -%doc COPYING diff --git a/mix_common/src/Makefile.am b/mix_common/src/Makefile.am deleted file mode 100644 index 199c509..0000000 --- a/mix_common/src/Makefile.am +++ /dev/null @@ -1,23 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -lib_LTLIBRARIES = libmixcommon.la - -############################################################################## -# sources used to compile -libmixcommon_la_SOURCES = mixparams.c mixlog.c mixdrmparams.c - -# flags used to compile this plugin -# add other _CFLAGS and _LIBS as needed -libmixcommon_la_CFLAGS = $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) -libmixcommon_la_LIBADD = $(GLIB_LIBS) $(GOBJECT_LIBS) -libmixcommon_la_LDFLAGS = $(GLIB_LIBS) $(GOBJECT_LIBS) -version-info @MIXCOMMON_CURRENT@:@MIXCOMMON_REVISION@:@MIXCOMMON_AGE@ -libmixcommon_la_LIBTOOLFLAGS = --tag=disable-static - -include_HEADERS = mixparams.h mixresult.h mixlog.h mixdrmparams.h -#mixcommonincludedir = $(includedir) -#mixcommoninclude_HEADERS = mixparams.h mixresult.h diff --git a/mix_vbp/Makefile.am b/mix_vbp/Makefile.am deleted file mode 100644 index a8b59cd..0000000 --- a/mix_vbp/Makefile.am +++ /dev/null @@ -1,9 +0,0 @@ -SUBDIRS = viddec_fw/fw/parser - -#Uncomment the following line if building documentation using gtkdoc -#SUBDIRS += docs - -pkgconfigdir = $(libdir)/pkgconfig -pkgconfig_DATA=mixvbp.pc -EXTRA_DIST = autogen.sh mixvbp.spec -DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac deleted file mode 100644 index 68e5d09..0000000 --- a/mix_vbp/configure.ac +++ /dev/null @@ -1,77 +0,0 @@ -AC_INIT([""],[""],[linda.s.cline@intel.com]) - -AC_CONFIG_MACRO_DIR(m4) - -UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 20) - -dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode -AM_MAINTAINER_MODE - -AM_INIT_AUTOMAKE($PACKAGE, $VERSION) - -dnl make aclocal work in maintainer mode -AC_SUBST(ACLOCAL_AMFLAGS, "-I m4") - -AC_CONFIG_HEADERS([config.h]) - -dnl check for tools -AC_PROG_CC -LT_INIT - -MIX_CFLAGS="-Wall -Werror" - -dnl decide on error flags -dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR") -dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR") - -dnl Check for pkgconfig first -AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no) - -dnl Give error and exit if we don't have pkgconfig -if test "x$HAVE_PKGCONFIG" = "xno"; then - AC_MSG_ERROR(you need to have pkgconfig installed !) -fi - -dnl GLib -dnl FIXME: need to align with moblin glib version -dnl FIXME: currently using an earlier version so it can be built on dev box. -GLIB_REQ=2.16 - -dnl Check for glib2 without extra fat, useful for the unversioned tool frontends -dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -if test "x$HAVE_GLIB" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) -if test "x$HAVE_GOBJECT" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no) -if test "x$HAVE_GTHREAD" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -dnl Check for documentation xrefs -dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`" -dnl AC_SUBST(GLIB_PREFIX) - -AC_SUBST(GLIB_CFLAGS) -AC_SUBST(GLIB_LIBS) -AC_SUBST(GOBJECT_CFLAGS) -AC_SUBST(GOBJECT_LIBS) -AC_SUBST(MIX_CFLAGS) -AC_SUBST(GTHREAD_CFLAGS) -AC_SUBST(GTHREAD_LIBS) - -AC_CONFIG_FILES([ -mixvbp.pc -Makefile -viddec_fw/fw/parser/Makefile -]) - -AC_OUTPUT - - diff --git a/mix_vbp/m4/Makefile.am b/mix_vbp/m4/Makefile.am deleted file mode 100644 index 66381d4..0000000 --- a/mix_vbp/m4/Makefile.am +++ /dev/null @@ -1 +0,0 @@ -EXTRA_DIST += diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec deleted file mode 100644 index 1b6a353..0000000 --- a/mix_vbp/mixvbp.spec +++ /dev/null @@ -1,68 +0,0 @@ -# INTEL CONFIDENTIAL -# Copyright 2009 Intel Corporation All Rights Reserved. -# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. -# -# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - -Summary: MIX Video Bitstream Parser -Name: mixvbp -Version: 0.1.20 -Release: 1 -Source0: %{name}-%{version}.tar.bz2 -NoSource: 0 -License: Proprietary -Group: System Environment/Libraries -BuildRoot: %{_tmppath}/%{name}-root -ExclusiveArch: i586 -BuildRequires: glib2-devel libva-devel - -%description -MIX Video Bitstream Parser is an user library interface for various video format bitstream parsing - -%package devel -Summary: Libraries include files -Group: Development/Libraries -Requires: %{name} = %{version} - -%description devel -The %{name}-devel package contains the header files and static libraries for building applications which use %{name}. - -%prep -%setup -q - -%build -./autogen.sh -./configure --prefix=%{_prefix} -make - -%install -make DESTDIR=$RPM_BUILD_ROOT install -rm -f $RPM_BUILD_ROOT/%{_prefix}/lib/libmixvbp_mpeg2* - -%post -p /sbin/ldconfig -%postun -p /sbin/ldconfig - -%clean -rm -rf $RPM_BUILD_ROOT -%files -%defattr(-,root,root) -%{_prefix}/lib/libmixvbp.so.* -%{_prefix}/lib/libmixvbp_vc1.so.* -%{_prefix}/lib/libmixvbp_h264.so.* -%{_prefix}/lib/libmixvbp_mpeg4.so.* - -%files devel -%defattr(-,root,root) -%{_prefix}/include/mixvbp -%{_prefix}/lib/*.la -%{_prefix}/lib/pkgconfig/mixvbp.pc -%{_prefix}/lib/libmixvbp.so -%{_prefix}/lib/libmixvbp_vc1.so -%{_prefix}/lib/libmixvbp_h264.so -%{_prefix}/lib/libmixvbp_mpeg4.so - - -%changelog -* Mon Sep 13 2010 John Q Public 0.0 -- Dummy changelog to satisfy rpmlint. - diff --git a/mix_vbp/viddec_fw/fw/parser/Makefile.am b/mix_vbp/viddec_fw/fw/parser/Makefile.am deleted file mode 100644 index 89c995c..0000000 --- a/mix_vbp/viddec_fw/fw/parser/Makefile.am +++ /dev/null @@ -1,203 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# -VC1PATH=./../codecs/vc1/parser -MP2PATH=./../codecs/mp2/parser -MP4PATH=./../codecs/mp4/parser -H264PATH=./../codecs/h264/parser - -PARSER_INCLUDE_PATH=-I./include \ - -I../include \ - -I../../include \ - -I./vc1/include \ - -I../codecs/vc1/include \ - -I../codecs/mp2/include \ - -I../codecs/mp4/include \ - -I../codecs/h264/include \ - -I../codecs/vc1/parser - - -PARSER_MACROS= -DVBP \ - -DHOST_ONLY \ - -DG_LOG_DOMAIN=\"vbp\" - - -la_CFLAGS = $(GLIB_CFLAGS) \ - $(MIX_CFLAGS) \ - $(GOBJECT_CFLAGS) \ - $(GTHREAD_CFLAGS) \ - $(PARSER_INCLUDE_PATH) \ - $(PARSER_MACROS) \ - -DMIXVBP_CURRENT=@MIXVBP_CURRENT@ \ - -DMIXVBP_AGE=@MIXVBP_AGE@ \ - -DMIXVBP_REVISION=@MIXVBP_REVISION@ - -la_LIBADD = $(GLIB_LIBS) \ - $(GOBJECT_LIBS) \ - $(GTHREAD_LIBS) - -la_LDFLAGS = $(GLIB_LIBS) \ - $(GOBJECT_LIBS) \ - $(GTHREAD_LIBS) \ - -version-info @MIXVBP_CURRENT@:@MIXVBP_REVISION@:@MIXVBP_AGE@ - -lib_LTLIBRARIES = libmixvbp.la \ - libmixvbp_vc1.la \ - libmixvbp_mpeg2.la \ - libmixvbp_mpeg4.la \ - libmixvbp_h264.la - - -###################################### vbp loader ######################################## - -# sources used to compile -libmixvbp_la_SOURCES = vbp_loader.c \ - vbp_utils.c \ - vbp_trace.c \ - vbp_h264_parser.c \ - vbp_vc1_parser.c \ - vbp_mp42_parser.c \ - viddec_pm.c \ - viddec_pm_stubs.c \ - viddec_pm_parser_ops.c \ - viddec_pm_utils_bstream.c \ - viddec_pm_tags.c \ - viddec_emit.c \ - viddec_pm_utils_list.c \ - viddec_parse_sc.c \ - viddec_parse_sc_stub.c - -libmixvbp_la_CFLAGS = $(la_CFLAGS) -libmixvbp_la_LIBADD = $(la_LIBADD) -libmixvbp_la_LDFLAGS = $(la_LDFLAGS) -libmixvbp_la_LIBTOOLFLAGS = --tag=disable-static - -###################################### VC-1 parser ######################################## - -libmixvbp_vc1_la_SOURCES = $(VC1PATH)/vc1parse.c \ - $(VC1PATH)/vc1parse_bitplane.c \ - $(VC1PATH)/vc1parse_bpic.c \ - $(VC1PATH)/vc1parse_bpic_adv.c \ - $(VC1PATH)/vc1parse_common_tables.c \ - $(VC1PATH)/vc1parse_huffman.c \ - $(VC1PATH)/vc1parse_ipic.c \ - $(VC1PATH)/vc1parse_ipic_adv.c \ - $(VC1PATH)/vc1parse_mv_com.c \ - $(VC1PATH)/vc1parse_pic_com.c \ - $(VC1PATH)/vc1parse_pic_com_adv.c \ - $(VC1PATH)/vc1parse_ppic.c \ - $(VC1PATH)/vc1parse_ppic_adv.c \ - $(VC1PATH)/vc1parse_vopdq.c \ - $(VC1PATH)/viddec_vc1_parse.c \ - $(VC1PATH)/mix_vbp_vc1_stubs.c - -libmixvbp_vc1_la_CFLAGS = $(la_CFLAGS) -libmixvbp_vc1_la_LIBADD = $(la_LIBADD) libmixvbp.la -libmixvbp_vc1_la_LDFLAGS = $(la_LDFLAGS) -libmixvbp_vc1_la_LIBTOOLFLAGS = --tag=disable-static - -###################################### MPEG-2 parser ######################################## - -libmixvbp_mpeg2_la_SOURCES = $(MP2PATH)/viddec_mpeg2_metadata.c \ - $(MP2PATH)/viddec_mpeg2_parse.c \ - $(MP2PATH)/mix_vbp_mpeg2_stubs.c - -libmixvbp_mpeg2_la_CFLAGS = $(la_CFLAGS) -libmixvbp_mpeg2_la_LIBADD = $(la_LIBADD) libmixvbp.la -libmixvbp_mpeg2_la_LDFLAGS = $(la_LDFLAGS) -libmixvbp_mpeg2_la_LIBTOOLFLAGS = --tag=disable-static - -###################################### MPEG-4 parser ######################################## - -libmixvbp_mpeg4_la_SOURCES = $(MP4PATH)/viddec_mp4_parse.c \ - $(MP4PATH)/viddec_parse_sc_mp4.c \ - $(MP4PATH)/viddec_mp4_visualobject.c \ - $(MP4PATH)/viddec_mp4_videoobjectplane.c \ - $(MP4PATH)/viddec_mp4_shortheader.c \ - $(MP4PATH)/viddec_mp4_videoobjectlayer.c \ - $(MP4PATH)/viddec_mp4_decodevideoobjectplane.c - -libmixvbp_mpeg4_la_CFLAGS = $(la_CFLAGS) -libmixvbp_mpeg4_la_LIBADD = $(la_LIBADD) libmixvbp.la -libmixvbp_mpeg4_la_LDFLAGS = $(la_LDFLAGS) -libmixvbp_mpeg4_la_LIBTOOLFLAGS = --tag=disable-static - -###################################### H.264 parser ######################################## - -libmixvbp_h264_la_SOURCES = $(H264PATH)/h264parse.c \ - $(H264PATH)/h264parse_bsd.c \ - $(H264PATH)/h264parse_math.c \ - $(H264PATH)/h264parse_mem.c \ - $(H264PATH)/h264parse_sei.c \ - $(H264PATH)/h264parse_sh.c \ - $(H264PATH)/h264parse_pps.c \ - $(H264PATH)/h264parse_sps.c \ - $(H264PATH)/h264parse_dpb.c \ - $(H264PATH)/viddec_h264_parse.c \ - $(H264PATH)/mix_vbp_h264_stubs.c - -libmixvbp_h264_la_CFLAGS = $(la_CFLAGS) -libmixvbp_h264_la_LIBADD = $(la_LIBADD) libmixvbp.la -libmixvbp_h264_la_LDFLAGS = $(la_LDFLAGS) -libmixvbp_h264_la_LIBTOOLFLAGS = --tag=disable-static - -############################################################################################## - -# headers we need but don't want installed -noinst_HEADERS = ./vbp_h264_parser.h \ - ./vbp_mp42_parser.h \ - ./vbp_vc1_parser.h \ - ./vbp_trace.h \ - ./vbp_loader.h \ - ./vbp_utils.h \ - ./include/fw_pvt.h \ - ./include/ipc_fw_custom.h \ - ./include/viddec_emitter.h \ - ./include/viddec_fw_debug.h \ - ./include/viddec_fw_parser_fw_ipc.h \ - ./include/viddec_h264_parse.h \ - ./include/viddec_mp4_parse.h \ - ./include/viddec_mpeg2_parse.h \ - ./include/viddec_parser_ops.h \ - ./include/viddec_pm.h \ - ./include/viddec_pm_parse.h \ - ./include/viddec_pm_tags.h \ - ./include/viddec_pm_utils_bstream.h \ - ./include/viddec_pm_utils_list.h \ - ./include/viddec_vc1_parse.h \ - ../include/viddec_debug.h \ - ../include/viddec_fw_version.h \ - ../../include/viddec_fw_common_defs.h \ - ../../include/viddec_fw_decoder_host.h \ - ../../include/viddec_fw_frame_attr.h \ - ../../include/viddec_fw_item_types.h \ - ../../include/viddec_fw_parser_host.h \ - ../../include/viddec_fw_workload.h \ - ../../fw/include/viddec_debug.h \ - ../../fw/include/viddec_fw_version.h \ - ../../fw/codecs/h264/include/h264.h \ - ../../fw/codecs/h264/include/h264parse.h \ - ../../fw/codecs/h264/include/h264parse_dpb.h \ - ../../fw/codecs/h264/include/h264parse_sei.h \ - ../../fw/codecs/mp2/include/mpeg2.h \ - ../../fw/codecs/mp2/include/viddec_mpeg2.h \ - ../../fw/codecs/mp4/include/viddec_fw_mp4.h \ - ../../fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h \ - ../../fw/codecs/mp4/parser/viddec_mp4_parse.h \ - ../../fw/codecs/mp4/parser/viddec_mp4_shortheader.h \ - ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h \ - ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h \ - ../../fw/codecs/mp4/parser/viddec_mp4_visualobject.h \ - ../../fw/codecs/vc1/include/vc1common.h \ - ../../fw/codecs/vc1/parser/vc1.h \ - ../../fw/codecs/vc1/parser/vc1parse.h \ - ../../fw/codecs/vc1/parser/vc1parse_common_defs.h - - -mixincludedir=$(includedir)/mixvbp -mixinclude_HEADERS = vbp_loader.h - -############################################################################################## diff --git a/mix_video/Makefile.am b/mix_video/Makefile.am deleted file mode 100644 index c6339cf..0000000 --- a/mix_video/Makefile.am +++ /dev/null @@ -1,9 +0,0 @@ -SUBDIRS = src - -#Uncomment the following line if building documentation using gtkdoc -#SUBDIRS += docs - -pkgconfigdir = $(libdir)/pkgconfig -pkgconfig_DATA=mixvideo.pc mixvideoint.pc -EXTRA_DIST = autogen.sh mixvideo.spec -DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc diff --git a/mix_video/configure.ac b/mix_video/configure.ac deleted file mode 100644 index 93f2986..0000000 --- a/mix_video/configure.ac +++ /dev/null @@ -1,137 +0,0 @@ -AC_INIT([""],[""],[linda.s.cline@intel.com]) - -AC_CONFIG_MACRO_DIR(m4) - -UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 24) - -dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode -AM_MAINTAINER_MODE - -AM_INIT_AUTOMAKE($PACKAGE, $VERSION) -#AM_INIT_AUTOMAKE([-Wall -Werror foreign]) - -dnl make aclocal work in maintainer mode -AC_SUBST(ACLOCAL_AMFLAGS, "-I m4") - -AC_CONFIG_HEADERS([config.h]) - -dnl check for tools -AC_PROG_CC -LT_INIT - -#MIX_CFLAGS="-Wall -Werror" -MIX_CFLAGS="-Wall" - -AC_ARG_ENABLE(mixlog, - AS_HELP_STRING([--enable-mixlog], - [Enable mixlog (default=no)]), - [mixlog_enabled=$enableval], - [mixlog_enabled=no]) -AC_MSG_RESULT($mixlog_enabled) -AM_CONDITIONAL(MIXLOG_ENABLED, test "$mixlog_enabled" = "yes") - - -dnl decide on error flags -dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR") -dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR") - -dnl Check for pkgconfig first -AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no) - -dnl Give error and exit if we don't have pkgconfig -if test "x$HAVE_PKGCONFIG" = "xno"; then - AC_MSG_ERROR(you need to have pkgconfig installed !) -fi - -dnl GLib -dnl FIXME: need to align with moblin glib version -dnl FIXME: currently using an earlier version so it can be built on dev box. -GLIB_REQ=2.16 - -dnl Check for glib2 without extra fat, useful for the unversioned tool frontends -dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -if test "x$HAVE_GLIB" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) -if test "x$HAVE_GOBJECT" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no) -if test "x$HAVE_GTHREAD" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -LIBVA_REQ=0.31 -PKG_CHECK_MODULES(LIBVA, libva >= $LIBVA_REQ,HAVE_LIBVA=yes,HAVE_LIBVA=no) -if test "x$HAVE_LIBVA" = "xno"; then - AC_MSG_ERROR(You need libva development package installed !) -fi -#LIBVA_CFLAGS="-I/usr/local/include" -#LIBVA_LIBS="-lva" - - -LIBVA_X11_REQ=0.31 -PKG_CHECK_MODULES(LIBVA_X11, libva-x11 >= $LIBVA_X11_REQ,HAVE_LIBVA_X11=yes,HAVE_LIBVA_X11=no) -if test "x$HAVE_LIBVA_X11" = "xno"; then - AC_MSG_ERROR(You need libva development package installed !) -fi -#LIBVA_X11_CFLAGS="-I/usr/local/include" -#LIBVA_X11LIBS="-lva-x11" - - -MIXCOMMON_REQ=0.1 -PKG_CHECK_MODULES(MIXCOMMON, mixcommon >= $MIXCOMMON_REQ, HAVE_MIXCOMMON=yes, HAVE_MIXCOMMON=no) -if test "x$HAVE_MIXCOMMON" = "xno"; then - AC_MSG_ERROR(You need mixcommon development package installed !) -fi - -MIXVBP_REQ=0.1 -PKG_CHECK_MODULES(MIXVBP, mixvbp >= $MIXVBP_REQ, HAVE_MIXVBP=yes, HAVE_MIXVBP=no) -if test "x$HAVE_MIXVBP" = "xno"; then - AC_MSG_ERROR(You need mixvbp development package installed !) -fi - -dnl Check for documentation xrefs -dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`" -dnl AC_SUBST(GLIB_PREFIX) - -AC_SUBST(GLIB_CFLAGS) -AC_SUBST(GLIB_LIBS) -AC_SUBST(GOBJECT_CFLAGS) -AC_SUBST(GOBJECT_LIBS) -AC_SUBST(MIX_CFLAGS) -AC_SUBST(GTHREAD_CFLAGS) -AC_SUBST(GTHREAD_LIBS) -AC_SUBST(LIBVA_CFLAGS) -AC_SUBST(LIBVA_LIBS) -AC_SUBST(MIXCOMMON_CFLAGS) -AC_SUBST(MIXCOMMON_LIBS) -AC_SUBST(MIXVBP_CFLAGS) -AC_SUBST(MIXVBP_LIBS) - - -dnl check for gtkdoc. Uncomment the following line to build gtkdoc -dnl GTK_DOC_CHECK(1.9) - -AC_CONFIG_FILES([ -mixvideo.pc -mixvideoint.pc -Makefile -src/Makefile -]) - -dnl Additional Makefiles if we are building document with gtkdoc. -dnl Un-comment this section to enable building of documentation. -dnl AC_CONFIG_FILES( -dnl docs/Makefile -dnl docs/reference/Makefile -dnl docs/reference/MixVideo/Makefile -dnl ) - -AC_OUTPUT - - diff --git a/mix_video/docs/Makefile.am b/mix_video/docs/Makefile.am deleted file mode 100644 index 621e3f7..0000000 --- a/mix_video/docs/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -SUBDIRS = reference - -DIST_SUBDIRS = reference - diff --git a/mix_video/docs/reference/Makefile.am b/mix_video/docs/reference/Makefile.am deleted file mode 100644 index 39b3000..0000000 --- a/mix_video/docs/reference/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -SUBDIRS = MixVideo - -DIST_SUBDIRS = MixVideo - diff --git a/mix_video/docs/reference/MixVideo/Makefile.am b/mix_video/docs/reference/MixVideo/Makefile.am deleted file mode 100644 index def9d68..0000000 --- a/mix_video/docs/reference/MixVideo/Makefile.am +++ /dev/null @@ -1,116 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - - -## Process this file with automake to produce Makefile.in - -# We require automake 1.6 at least. -AUTOMAKE_OPTIONS = 1.6 - -# This is a blank Makefile.am for using gtk-doc. -# Copy this to your project's API docs directory and modify the variables to -# suit your project. See the GTK+ Makefiles in gtk+/docs/reference for examples -# of using the various options. - -# The name of the module, e.g. 'glib'. -DOC_MODULE=MixVideo - -# The top-level SGML file. You can change this if you want to. -DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml - -# The directory containing the source code. Relative to $(srcdir). -# gtk-doc will search all .c & .h files beneath here for inline comments -# documenting the functions and macros. -# e.g. DOC_SOURCE_DIR=../../../gtk -DOC_SOURCE_DIR=$(top_srcdir)/src - -# Extra options to pass to gtkdoc-scangobj. Not normally needed. -#SCANGOBJ_OPTIONS=--type-init-func="g_init(NULL,NULL)" - -# Extra options to supply to gtkdoc-scan. -# e.g. SCAN_OPTIONS=--deprecated-guards="GTK_DISABLE_DEPRECATED" -#SCAN_OPTIONS=--rebuild-sections --rebuild-types -#SCAN_OPTIONS=--rebuild-sections -#SCAN_OPTIONS=--rebuild-types - -# Extra options to supply to gtkdoc-mkdb. -# e.g. MKDB_OPTIONS=--sgml-mode --output-format=xml -MKDB_OPTIONS=--sgml-mode --output-format=xml - -# Extra options to supply to gtkdoc-mktmpl -# e.g. MKTMPL_OPTIONS=--only-section-tmpl -MKTMPL_OPTIONS= - -# Extra options to supply to gtkdoc-fixref. Not normally needed. -# e.g. FIXXREF_OPTIONS=--extra-dir=../gdk-pixbuf/html --extra-dir=../gdk/html -FIXXREF_OPTIONS= - -# Used for dependencies. The docs will be rebuilt if any of these change. -# e.g. HFILE_GLOB=$(top_srcdir)/gtk/*.h -# e.g. CFILE_GLOB=$(top_srcdir)/gtk/*.c -HFILE_GLOB=$(top_srcdir)/src/*.h -CFILE_GLOB=$(top_srcdir)/src/*.c - -# Header files to ignore when scanning. -# e.g. IGNORE_HFILES=gtkdebug.h gtkintl.h -IGNORE_HFILES=*~ \ - mixbufferpool.h \ - mixbuffer_private.h \ - mixframemanager.h \ - mixsurfacepool.h \ - mixvideocaps.h \ - mixvideoformatenc.h \ - mixvideoformatenc_h264.h \ - mixvideoformatenc_mpeg4.h \ - mixvideoformatenc_preview.h \ - mixvideoformat.h \ - mixvideoformat_h264.h \ - mixvideoformat_mp42.h \ - mixvideoformatqueue.h \ - mixvideoformat_vc1.h \ - mixvideoframe_private.h \ - mixvideolog.h \ - mixvideo_private.h \ - mixvideorenderparams_internal.h \ - mixvideoconfigparamsenc_preview.h - -# Images to copy into HTML directory. -# e.g. HTML_IMAGES=$(top_srcdir)/gtk/stock-icons/stock_about_24.png -HTML_IMAGES= - -# Extra SGML files that are included by $(DOC_MAIN_SGML_FILE). -# e.g. content_files=running.sgml building.sgml changes-2.0.sgml -content_files= - -# SGML files where gtk-doc abbrevations (#GtkWidget) are expanded -# These files must be listed here *and* in content_files -# e.g. expand_content_files=running.sgml -expand_content_files= - -# CFLAGS and LDFLAGS for compiling gtkdoc-scangobj with your library. -# Only needed if you are using gtkdoc-scangobj to dynamically query widget -# signals and properties. -# e.g. INCLUDES=-I$(top_srcdir) -I$(top_builddir) $(GTK_DEBUG_FLAGS) -# e.g. GTKDOC_LIBS=$(top_builddir)/gtk/$(gtktargetlib) -AM_CFLAGS=$(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS) -GTKDOC_LIBS=$(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS) $(top_srcdir)/src/libmixvideo.la - -# This includes the standard gtk-doc make rules, copied by gtkdocize. -include $(top_srcdir)/gtk-doc.make - -# Other files to distribute -# e.g. EXTRA_DIST += version.xml.in -EXTRA_DIST += - -# Files not to distribute -# for --rebuild-types in $(SCAN_OPTIONS), e.g. $(DOC_MODULE).types -# for --rebuild-sections in $(SCAN_OPTIONS) e.g. $(DOC_MODULE)-sections.txt -#DISTCLEANFILES = - -# Comment this out if you want your docs-status tested during 'make check' -#TESTS = $(GTKDOC_CHECK) - diff --git a/mix_video/m4/Makefile.am b/mix_video/m4/Makefile.am deleted file mode 100644 index 66381d4..0000000 --- a/mix_video/m4/Makefile.am +++ /dev/null @@ -1 +0,0 @@ -EXTRA_DIST += diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec deleted file mode 100644 index dec7602..0000000 --- a/mix_video/mixvideo.spec +++ /dev/null @@ -1,81 +0,0 @@ -# INTEL CONFIDENTIAL -# Copyright 2009 Intel Corporation All Rights Reserved. -# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. -# -# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - -Summary: MIX Video -Name: mixvideo -Version: 0.1.24 -Release: 1 -Source0: %{name}-%{version}.tar.bz2 -NoSource: 0 -License: Proprietary -Group: System Environment/Libraries -BuildRoot: %{_tmppath}/%{name}-root -ExclusiveArch: i586 -Requires: glib2 , mixcommon, mixvbp, libva, libX11 -BuildRequires: glib2-devel mixcommon-devel mixvbp-devel libva-devel libX11-devel - -%description -MIX Video is an user library interface for various video codecs available on the platform. - -## devel package ## - -%package devel -Summary: Libraries include files -Group: Development/Libraries -Requires: %{name} = %{version}, mixcommon-devel , glib2-devel - -%description devel -The %{name}-devel package contains the header files and static libraries for building applications which use %{name}. - -## internal devel package ## - -%package int-devel -Summary: Libraries include files -Group: Development/Libraries -Requires: %{name} = %{version}, mixcommon-devel , glib2-devel, mixvbp-devel - -%description int-devel -The %{name}-int-devel package contains the header files and static libraries for building applications which use %{name}. - -%prep -%setup -q - -%build -./autogen.sh -./configure --prefix=%{_prefix} -make - -%install -make DESTDIR=$RPM_BUILD_ROOT install - -%clean -rm -rf $RPM_BUILD_ROOT - -%post -p /sbin/ldconfig - -%postun -p /sbin/ldconfig - -%files -%defattr(-,root,root) -%{_prefix}/lib/libmixvideo.so.* - -%files devel -%defattr(-,root,root) -%{_prefix}/include/mix -%{_prefix}/lib/libmixvideo.so -%{_prefix}/lib/*.la -%{_prefix}/lib/pkgconfig/mixvideo.pc - -%files int-devel -%defattr(-,root,root) -%{_prefix}/lib/libmixvideo.so -%{_prefix}/include/mixvideoint -%{_prefix}/lib/pkgconfig/mixvideoint.pc - -%changelog -* Mon Sep 13 2010 John Q Public 0.0 -- Dummy changelog to satisfy rpmlint. - diff --git a/mix_video/src/Makefile.am b/mix_video/src/Makefile.am deleted file mode 100644 index cbbe071..0000000 --- a/mix_video/src/Makefile.am +++ /dev/null @@ -1,139 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -lib_LTLIBRARIES = libmixvideo.la - -############################################################################## -# sources used to compile -libmixvideo_la_SOURCES = mixdisplay.c \ - mixvideo.c \ - mixvideoconfigparams.c \ - mixvideoconfigparamsdec.c \ - mixvideoconfigparamsdec_vc1.c \ - mixvideoconfigparamsdec_h264.c \ - mixvideoconfigparamsdec_mp42.c \ - mixvideoframe.c \ - mixvideorenderparams.c \ - mixdisplayx11.c \ - mixvideocaps.c \ - mixvideodecodeparams.c \ - mixvideoinitparams.c \ - mixvideoformat.c \ - mixvideoformat_h264.c \ - mixvideoformat_vc1.c \ - mixvideoformat_mp42.c \ - mixsurfacepool.c \ - mixframemanager.c \ - mixbufferpool.c \ - mixbuffer.c \ - mixvideoformatenc.c \ - mixvideoformatenc_h264.c \ - mixvideoformatenc_mpeg4.c \ - mixvideoformatenc_preview.c \ - mixvideoformatenc_h263.c \ - mixvideoconfigparamsenc.c \ - mixvideoconfigparamsenc_h264.c \ - mixvideoconfigparamsenc_mpeg4.c \ - mixvideoconfigparamsenc_preview.c \ - mixvideoconfigparamsenc_h263.c \ - mixvideoencodeparams.c - -if MIXLOG_ENABLED -MIXLOG_CFLAGS = -DMIX_LOG_ENABLE -else -MIXLOG_CFLAGS = -endif - - -# flags used to compile this plugin -# add other _CFLAGS and _LIBS as needed -libmixvideo_la_CFLAGS = $(GLIB_CFLAGS) \ - $(MIX_CFLAGS) \ - $(MIXLOG_CFLAGS) \ - $(GOBJECT_CFLAGS) \ - $(GTHREAD_CFLAGS) \ - $(LIBVA_CFLAGS) \ - $(LIBVA_X11_CFLAGS) \ - $(MIXCOMMON_CFLAGS) \ - $(MIXVBP_CFLAGS) \ - -DMIXVIDEO_CURRENT=@MIXVIDEO_CURRENT@ \ - -DMIXVIDEO_AGE=@MIXVIDEO_AGE@ \ - -DMIXVIDEO_REVISION=@MIXVIDEO_REVISION@ - -libmixvideo_la_LIBADD = $(GLIB_LIBS) \ - $(GOBJECT_LIBS) \ - $(GTHREAD_LIBS) \ - $(LIBVA_LIBS) \ - $(LIBVA_X11_LIBS) \ - $(MIXCOMMON_LIBS) \ - $(MIXVBP_LIBS) - -libmixvideo_la_LDFLAGS = $(GLIB_LIBS) \ - $(GOBJECT_LIBS) \ - $(GTHREAD_LIBS) \ - $(LIBVA_LIBS) \ - $(LIBVA_X11_LIBS) \ - $(MIXCOMMON_LIBS) \ - $(MIXVBP_LIBS) \ - -lva-tpi \ - -version-info @MIXVIDEO_CURRENT@:@MIXVIDEO_REVISION@:@MIXVIDEO_AGE@ - -libmixvideo_la_LIBTOOLFLAGS = --tag=disable-static - -# headers we need but don't want installed -noinst_HEADERS = mixvideoformat.h \ - mixvideoformat_h264.h \ - mixvideoformat_vc1.h \ - mixvideoformat_mp42.h \ - mixsurfacepool.h \ - mixvideoframe_private.h \ - mixbuffer_private.h \ - mixframemanager.h \ - mixbufferpool.h \ - mixvideoformatqueue.h \ - mixvideo_private.h \ - mixvideorenderparams_internal.h \ - mixvideoformatenc_h264.h \ - mixvideoformatenc_mpeg4.h \ - mixvideoformatenc_preview.h \ - mixvideoformatenc_h263.h \ - mixvideoformatenc.h \ - mixvideolog.h - -# TODO: decide whehter a /usr/include/mix is needed for mix headers -mixincludedir=$(includedir)/mix -mixinclude_HEADERS = mixvideodef.h \ - mixdisplayx11.h \ - mixvideoconfigparams.h \ - mixvideoconfigparamsdec.h \ - mixvideoconfigparamsdec_vc1.h \ - mixvideoconfigparamsdec_h264.h \ - mixvideoconfigparamsdec_mp42.h \ - mixvideoframe.h \ - mixvideoinitparams.h \ - mixdisplay.h \ - mixvideocaps.h \ - mixvideodecodeparams.h \ - mixvideoencodeparams.h \ - mixvideo.h \ - mixvideorenderparams.h \ - mixbuffer.h \ - mixvideoconfigparamsenc_h264.h \ - mixvideoconfigparamsenc_mpeg4.h \ - mixvideoconfigparamsenc_preview.h \ - mixvideoconfigparamsenc_h263.h \ - mixvideoconfigparamsenc.h - - -mixintincludedir=$(includedir)/mixvideoint -mixintinclude_HEADERS = mixvideoformat.h \ - mixframemanager.h \ - mixsurfacepool.h \ - mixbufferpool.h \ - mixvideoformatqueue.h \ - mixvideoframe_private.h \ - mixvideoformat_vc1.h diff --git a/mix_video/src/Makefile.old b/mix_video/src/Makefile.old deleted file mode 100644 index 2bc3f79..0000000 --- a/mix_video/src/Makefile.old +++ /dev/null @@ -1,40 +0,0 @@ -INCL_CONFIG = `pkg-config --cflags glib-2.0 gobject-2.0` -g -LIB_CONFIG = `pkg-config --libs glib-2.0 gobject-2.0` -lgthread-2.0 -g - -all: - gcc -c mixparams.c -o mixparams.o $(INCL_CONFIG) - gcc -c mixdrmparams.c -o mixdrmparams.o $(INCL_CONFIG) - gcc -c mixvideocaps.c -o mixvideocaps.o $(INCL_CONFIG) - gcc -c mixdisplay.c -o mixdisplay.o $(INCL_CONFIG) - gcc -c mixdisplayx11.c -o mixdisplayx11.o $(INCL_CONFIG) - gcc -c mixvideoinitparams.c -o mixvideoinitparams.o $(INCL_CONFIG) - gcc -c mixvideoconfigparams.c -o mixvideoconfigparams.o $(INCL_CONFIG) - gcc -c mixvideoconfigparams_h264.c -o mixvideoconfigparams_h264.o $(INCL_CONFIG) - gcc -c mixvideoconfigparams_vc1.c -o mixvideoconfigparams_vc1.o $(INCL_CONFIG) - gcc -c mixvideodecodeparams.c -o mixvideodecodeparams.o $(INCL_CONFIG) - gcc -c mixvideorenderparams.c -o mixvideorenderparams.o $(INCL_CONFIG) - gcc -c mixvideoframe.c -o mixvideoframe.o $(INCL_CONFIG) - gcc -c mixvideo.c -o mixvideo.o $(INCL_CONFIG) - gcc -c test.c -o test.o $(INCL_CONFIG) - gcc test.o \ - mixvideo.o \ - mixparams.o \ - mixdrmparams.o \ - mixvideorenderparams.o \ - mixvideodecodeparams.o \ - mixvideoconfigparams.o \ - mixvideoconfigparams_vc1.o \ - mixvideoconfigparams_h264.o \ - mixvideoinitparams.o \ - mixdisplay.o \ - mixdisplayx11.o \ - mixvideocaps.o \ - mixvideoframe.o \ - -o test $(LIB_CONFIG) - -clean: - rm *~ - rm *.o - rm test - - diff --git a/mix_video/test/Makefile.am b/mix_video/test/Makefile.am deleted file mode 100644 index aa58280..0000000 --- a/mix_video/test/Makefile.am +++ /dev/null @@ -1,2 +0,0 @@ -SUBDIRS = src -EXTRA_DIST = autogen.sh diff --git a/mix_video/test/configure.ac b/mix_video/test/configure.ac deleted file mode 100644 index 82235ad..0000000 --- a/mix_video/test/configure.ac +++ /dev/null @@ -1,53 +0,0 @@ - -AC_INIT([testmixvideo],[0.1],[tao.q.tao@intel.com]) - -dnl AC_CONFIG_MACRO_DIR([m4]) - -AM_INIT_AUTOMAKE($PACKAGE, $VERSION) -AM_INIT_AUTOMAKE([-Wall -Werror foreign]) - -AC_PROG_CC -AC_PROG_LIBTOOL - -AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes , no) - -dnl Give error and exit if we don't have pkgconfig -if test "x$HAVE_PKGCONFIG" = "xno"; then - AC_MSG_ERROR(you need to have pkgconfig installed !) -fi - -GLIB_REQ=2.18 -dnl Check for glib2 without extra fat, useful for the unversioned tool frontends -dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no) -if test "x$HAVE_GLIB" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no) -if test "x$HAVE_GOBJECT" = "xno"; then - AC_MSG_ERROR(You need glib development packages installed !) -fi - -MIXVIDEO_REQ=0.5 -PKG_CHECK_MODULES(MIXVIDEO, mixvideo >= $MIXVIDEO_REQ,HAVE_MIXVIDEO=yes,HAVE_MIXVIDEO=no) -if test "x$HAVE_MIXVIDEO" = "xno"; then - AC_MSG_ERROR(You need mixvideo development packages installed !) -fi - -AC_ARG_ENABLE(optimization, AS_HELP_STRING([ --disable-optimization],[Do not optimize the library for speed. Might be required for debugging.])) -AC_ARG_ENABLE(debuginfo, AS_HELP_STRING([ --enable-debuginfo ],[add -g to the compiler flags (to create debug information)])) - -if test "$enable_optimization" = "no" ; then - DEBUG=true -else - DEBUG=false -fi - - -AC_CONFIG_HEADERS([config.h]) -AC_CONFIG_FILES([ - Makefile - src/Makefile -]) -AC_OUTPUT diff --git a/mix_video/test/src/Makefile.am b/mix_video/test/src/Makefile.am deleted file mode 100644 index 2c98fa4..0000000 --- a/mix_video/test/src/Makefile.am +++ /dev/null @@ -1,22 +0,0 @@ -#INTEL CONFIDENTIAL -#Copyright 2009 Intel Corporation All Rights Reserved. -#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -# - -noinst_PROGRAMS = test_framemanager - -############################################################################## -# sources used to compile -test_framemanager_SOURCES = test_framemanager.c - -test_framemanager_CFLAGS = $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXVIDEO_CFLAGS) -test_framemanager_LDADD = $(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXVIDEO_LIBS) -test_framemanager_LIBTOOLFLAGS = --tag=disable-static - -# headers we need but don't want installed -noinst_HEADERS = - - - -- cgit v1.2.3 From 0fbcfceea9863411ecbe2af0aa625d38a759e084 Mon Sep 17 00:00:00 2001 From: Yong Yao Date: Wed, 6 Apr 2011 10:25:25 +0800 Subject: Fix for MPEG4 HD videos getting stuck in between. 0001334 and 0001343 - Fix for MPEG4 HD videos getting stuck in between. Change-Id: Icba179cb081a16459e7da417cc30bee9f02bc97f --- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index a4783ee..740235d 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -45,7 +45,8 @@ uint32 vbp_get_sc_pos_mp42( uint32 length, uint32 *sc_end_pos, uint8 *is_normal_sc, - uint8* resync_marker); + uint8* resync_marker, + const bool svh_search); void vbp_on_vop_mp42(vbp_context *pcontext, int list_index); void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index); @@ -249,7 +250,7 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) while (1) { found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, - &sc_end_pos, &is_normal_sc, &resync_marker); + &sc_end_pos, &is_normal_sc, &resync_marker,short_video_header); VTRACE("buf=%x, bytes_parsed=%d, unparsed=%d", (uint32)buf, bytes_parsed, size- bytes_parsed); VTRACE("found_sc=%d, cxt->list.num_items=%d, resync_marker=%d, ", @@ -632,7 +633,8 @@ uint32 vbp_get_sc_pos_mp42( uint32 length, uint32 *sc_end_pos, uint8 *is_normal_sc, - uint8 *resync_marker) + uint8 *resync_marker, + const bool svh_search) { uint8 *ptr = buf; uint32 size; @@ -713,8 +715,9 @@ uint32 vbp_get_sc_pos_mp42( if (phase == 2) { normal_sc = (*ptr == THIRD_STARTCODE_BYTE); - short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); - + if (svh_search) { + short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); + } *is_normal_sc = normal_sc; // at least 16-bit 0, may be GOB start code or -- cgit v1.2.3 From 487b4d18dad4946e5b762c8d4c74214468008387 Mon Sep 17 00:00:00 2001 From: xli111 Date: Thu, 14 Apr 2011 17:51:38 +0800 Subject: [BZ1235] sync libmix to froyo stable branch Change-Id: If25ea8cf0730289d0ae029f5e7462bd7051905d5 Signed-off-by: xli111 --- mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 4 + .../fw/codecs/h264/parser/h264parse_sps.c | 128 +++- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c | 40 ++ .../fw/codecs/vc1/parser/vc1parse_common_defs.h | 23 +- .../fw/codecs/vc1/parser/viddec_vc1_parse.c | 49 ++ mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 55 +- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 16 +- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 2 + mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 8 + mix_video/src/mixframemanager.cpp | 36 +- mix_video/src/mixsurfacepool.cpp | 4 +- mix_video/src/mixvideoconfigparamsdec.cpp | 109 +++- mix_video/src/mixvideoconfigparamsdec.h | 61 +- mix_video/src/mixvideoconfigparamsdec_h264.cpp | 49 +- mix_video/src/mixvideoconfigparamsdec_h264.h | 24 +- mix_video/src/mixvideodecodeparams.cpp | 30 +- mix_video/src/mixvideodecodeparams.h | 27 + mix_video/src/mixvideodef.h | 1 + mix_video/src/mixvideoformat.cpp | 50 +- mix_video/src/mixvideoformat.h | 3 +- mix_video/src/mixvideoformat_h264.cpp | 140 +++- mix_video/src/mixvideoformat_h264.h | 2 +- mix_video/src/mixvideoformat_mp42.cpp | 23 +- mix_video/src/mixvideoformat_mp42.h | 2 +- mix_video/src/mixvideoformat_vc1.cpp | 709 +++++++-------------- mix_video/src/mixvideoformat_vc1.h | 1 + 26 files changed, 1028 insertions(+), 568 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h index 1976567..a3490b4 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -505,6 +505,7 @@ extern "C" { #ifdef VBP uint8_t video_full_range_flag; // u(1) uint8_t matrix_coefficients; // u(8) + uint32_t bit_rate_value; #endif uint8_t colour_description_present_flag; // u(1) @@ -785,6 +786,9 @@ extern "C" { uint8_t mb_adaptive_frame_field_flag; uint8_t direct_8x8_inference_flag; uint8_t frame_cropping_flag; +#ifdef VBP + uint8_t separate_colour_plane_flag; +#endif uint16_t vui_parameters_present_flag; uint16_t chroma_format_idc; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c index 8cedd1f..4a329b0 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c @@ -3,6 +3,9 @@ #include "h264.h" #include "h264parse.h" +#ifdef VBP +#include +#endif /// SPS extension unit (unit_type = 13) @@ -300,11 +303,20 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag"); //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag"); +#ifdef VBP + viddec_pm_get_bits(parent, &code, 5); //constraint flag set0...set4 (h.264 Spec v2009) + SPS->constraint_set_flags = (uint8_t)code; + + //// reserved_zero_3bits + viddec_pm_get_bits(parent, (uint32_t *)&code, 3); //3bits zero reserved (h.264 Spec v2009) +#else + viddec_pm_get_bits(parent, &code, 4); SPS->constraint_set_flags = (uint8_t)code; //// reserved_zero_4bits viddec_pm_get_bits(parent, (uint32_t *)&code, 4); +#endif viddec_pm_get_bits(parent, &code, 8); SPS->level_idc = (uint8_t)code; @@ -338,6 +350,9 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //// seq_parameter_set_id ---[0,31] if (SPS->seq_parameter_set_id > MAX_NUM_SPS -1) break; +#ifdef VBP + SPS->sps_disp.separate_colour_plane_flag = 0; +#endif if ((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) || (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) ) @@ -349,6 +364,12 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param SPS->sps_disp.chroma_format_idc = (uint8_t)data; //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {} +#ifdef VBP + if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.separate_colour_plane_flag = (uint8_t)data; + } +#endif //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel data = h264_GetVLCElement(parent, pInfo, false); if ( data) @@ -502,13 +523,112 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param if (SPS->sps_disp.vui_parameters_present_flag) { -#ifndef VBP // Ignore VUI parsing result - ret = +#ifndef VBP + ret = h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); +#else + // Ignore VUI parsing result + h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); + if (SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag) + { + i = SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; + uint32_t bit_rate_value = 0; + bit_rate_value = pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] + 1; + bit_rate_value *= pow(2, 6 + pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale); + SPS->sps_disp.vui_seq_parameters.bit_rate_value = bit_rate_value; + } + /* + else if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + i = SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; + uint32_t bit_rate_value = 0; + bit_rate_value = pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] + 1; + bit_rate_value *= pow(2, 6 + pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale); + SPS->sps_disp.vui_seq_parameters.bit_rate_value = bit_rate_value; + }*/ + #endif - h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); } - } while (0); +#ifdef VBP + if (SPS->sps_disp.vui_seq_parameters.bit_rate_value == 0) + { + int maxBR = 0; + switch(SPS->level_idc) + { + case h264_Level1: + maxBR = 64; + break; + + case h264_Level1b: + maxBR = 128; + break; + + case h264_Level11: + maxBR = 192; + break; + + case h264_Level12: + maxBR = 384; + break; + + case h264_Level13: + maxBR = 768; + break; + + case h264_Level2: + maxBR = 2000; + break; + + case h264_Level21: + case h264_Level22: + maxBR = 4000; + break; + + case h264_Level3: + maxBR = 10000; + break; + + case h264_Level31: + maxBR = 14000; + break; + + case h264_Level32: + case h264_Level4: + maxBR = 20000; + break; + + case h264_Level41: + case h264_Level42: + maxBR = 50000; + break; + + case h264_Level5: + maxBR = 135000; + break; + + case h264_Level51: + maxBR = 240000; + break; + } + + uint32_t cpbBrVclFactor = 1200; + if (SPS->profile_idc == 100) + { + cpbBrVclFactor = 1500; // HIGH + } + else if (SPS->profile_idc == 110) + { + cpbBrVclFactor = 3600; // HIGH 10 + } + else if (SPS->profile_idc == 122 || + SPS->profile_idc == 144) + { + cpbBrVclFactor = 4800; // HIGH 4:2:2 and HIGH 4:4:4 + } + + SPS->sps_disp.vui_seq_parameters.bit_rate_value = maxBR * cpbBrVclFactor; + } +#endif //h264_Parse_rbsp_trailing_bits(pInfo); diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c index fb59613..0fe26c9 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c @@ -37,6 +37,13 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) result = viddec_pm_get_bits(ctxt, &rcv.struct_a_rcv, 32); md->width = rcv.struct_a.HORIZ_SIZE; md->height = rcv.struct_a.VERT_SIZE; +#ifdef VBP + //The HRD rate and HRD buffer size may be encoded according to a 64 bit sequence header data structure B + //if there is no data strcuture B metadata contained in the bitstream, we will not be able to get the + //bitrate data, hence we set it to 0 for now + md->HRD_NUM_LEAKY_BUCKETS = 0; + md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0; +#endif result = viddec_pm_get_bits(ctxt, &rcv.struct_c_rcv, 32); md->PROFILE = rcv.struct_c.PROFILE >> 2; @@ -203,11 +210,44 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) result = viddec_pm_get_bits(ctxt, &tempValue, 5); sh.HRD_NUM_LEAKY_BUCKETS = tempValue; md->HRD_NUM_LEAKY_BUCKETS = sh.HRD_NUM_LEAKY_BUCKETS; +#ifndef VBP // Skip the rest of the parsing - hrdinfo is not required for decode or for attributes +#else + { + uint8_t count; + uint8_t bitRateExponent; + uint8_t bufferSizeExponent; + + /* bit_rate_exponent */ + result = viddec_pm_get_bits(ctxt, &tempValue, 4); + bitRateExponent = (uint8_t)(tempValue + 6); + + /* buffer_size_exponent */ + result = viddec_pm_get_bits(ctxt, &tempValue, 4); + bufferSizeExponent = (uint8_t)(tempValue + 4); + md->hrd_initial_state.BUFFER_SIZE_EXPONENT = bufferSizeExponent; + + for(count = 0; count < sh.HRD_NUM_LEAKY_BUCKETS; count++) + { + /* hrd_rate */ + result = viddec_pm_get_bits(ctxt, &tempValue, 16); + md->hrd_initial_state.sLeakyBucket[count].HRD_RATE = + (uint32_t)(tempValue + 1) << bitRateExponent; + + /* hrd_buffer */ + result = viddec_pm_get_bits(ctxt, &tempValue, 16); + md->hrd_initial_state.sLeakyBucket[count].HRD_BUFFER = + (uint32_t)(tempValue + 1) << bufferSizeExponent; + } + } +#endif } else { md->HRD_NUM_LEAKY_BUCKETS = 0; +#ifdef VBP + md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0; +#endif } md->widthMB = (((md->width + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA; diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h index 821df32..e474800 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h @@ -141,6 +141,27 @@ extern "C" { uint32_t *databits; } vc1_Bitplane; +#ifdef VBP +#define VC1_MAX_HRD_NUM_LEAKY_BUCKETS 32 + + typedef struct + { + uint32_t HRD_RATE; /** Maximum bit rate in bits per second */ + uint32_t HRD_BUFFER; /** Buffer size in bits */ + uint32_t HRD_FULLNESS; /** Buffer fullness in complete bits */ + uint32_t HRD_FULLFRACTION; /** Numerator of fractional bit buffer fullness count */ + uint32_t HRD_FULLDENOMINATOR; /** Denominator of fractional bit buffer fullness count */ + } vc1_leaky_bucket; + + typedef struct _vc1_hrd_state + { + uint8_t BIT_RATE_EXPONENT; /** Buckets + (0 if none specified) */ + uint8_t BUFFER_SIZE_EXPONENT; + vc1_leaky_bucket sLeakyBucket[VC1_MAX_HRD_NUM_LEAKY_BUCKETS]; /** Per-bucket information */ + } vc1_hrd_state, *vc1_hrd_state_ptr; +#endif + /** This structure represents all bitstream metadata needed for register programming. */ typedef struct { @@ -214,7 +235,7 @@ extern "C" { uint8_t ASPECT_RATIO; uint8_t ASPECT_HORIZ_SIZE; uint8_t ASPECT_VERT_SIZE; - + vc1_hrd_state hrd_initial_state; #endif } vc1_metadata_t; diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c index a1b47ff..8a90804 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c @@ -119,6 +119,55 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) parser->sc_seen_since_last_wkld |= VC1_SC_SEQ; #ifdef VBP parser->start_code = VC1_SC_SEQ; + if (parser->info.metadata.HRD_NUM_LEAKY_BUCKETS == 0) + { + if (parser->info.metadata.PROFILE == VC1_PROFILE_SIMPLE) + { + switch(parser->info.metadata.LEVEL) + { + case 0: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 96000; + break; + case 1: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 384000; + break; + } + } + else if (parser->info.metadata.PROFILE == VC1_PROFILE_MAIN) + { + switch(parser->info.metadata.LEVEL) + { + case 0: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 2000000; + break; + case 1: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 10000000; + break; + case 2: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 20000000; + break; + } + } + else if (parser->info.metadata.PROFILE == VC1_PROFILE_ADVANCED) + { + switch(parser->info.metadata.LEVEL) + { + case 0: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 2000000; + break; + case 1: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 10000000; + break; + case 2: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 20000000; + break; + case 3: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 45000000; + break; + } + } + } + #endif break; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index dd3c12d..f01dc3b 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -792,8 +792,12 @@ static void vbp_set_codec_data_h264( codec_data->level_idc = parser->info.active_SPS.level_idc; - codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; - + /*constraint flag sets (h.264 Spec v2009)*/ + codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4; + codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3; + codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; + codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1; + codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1; /* reference frames */ codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; @@ -817,6 +821,41 @@ static void vbp_set_codec_data_h264( codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; + /* cropping information */ + codec_data->crop_left = 0; + codec_data->crop_right = 0; + codec_data->crop_top = 0; + codec_data->crop_bottom = 0; + if(parser->info.active_SPS.sps_disp.frame_cropping_flag) { + int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; + int ChromaArrayType = 0; + if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) { + if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) { + SubWidthC = 2; + SubHeightC = 2; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) { + SubWidthC = 2; + SubHeightC = 1; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) { + SubWidthC = 1; + SubHeightC = 1; + } + ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc; + } + + if(ChromaArrayType == 0) { + CropUnitX = 1; + CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + } else { + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag); + } + + codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; + codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1; + codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; + codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1; + } /* aspect ratio */ if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) @@ -878,6 +917,7 @@ static void vbp_set_codec_data_h264( codec_data->matrix_coefficients = 2; } + codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value; /* picture order type and count */ codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; @@ -1340,6 +1380,9 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) int i = 0; viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + //Enable emulation prevention + cxt->getbits.is_emul_reqd = 1; + /* check if configuration data is start code prefix */ viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; viddec_parser_ops_t *ops = pcontext->parser_ops; @@ -1725,17 +1768,15 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_SPS: - if (query_data->has_sps) - query_data->new_sps = 1; + + query_data->new_sps = 1; query_data->has_sps = 1; query_data->has_pps = 0; ITRACE("SPS header is parsed."); break; case h264_NAL_UNIT_TYPE_PPS: - if (query_data->has_pps || query_data->new_sps) - query_data->new_pps = 1; - + query_data->new_pps = 1; query_data->has_pps = 1; ITRACE("PPS header is parsed."); break; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 0ef4fbf..ccf8e00 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -61,6 +61,8 @@ typedef struct _vbp_codec_data_mp42 uint8 par_width; uint8 par_height; + // bit rate + int bit_rate; } vbp_codec_data_mp42; typedef struct _vbp_slice_data_mp42 @@ -108,7 +110,12 @@ typedef struct _vbp_codec_data_h264 uint8 profile_idc; uint8 level_idc; + /*constraint flag sets (h.264 Spec v2009)*/ + uint8 constraint_set0_flag; uint8 constraint_set1_flag; + uint8 constraint_set2_flag; + uint8 constraint_set3_flag; + uint8 constraint_set4_flag; uint8 num_ref_frames; uint8 gaps_in_frame_num_value_allowed_flag; @@ -126,6 +133,12 @@ typedef struct _vbp_codec_data_h264 uint16 sar_width; uint16 sar_height; + /* cropping information */ + int crop_top; + int crop_bottom; + int crop_left; + int crop_right; + /* video fromat */ // default 5 unspecified @@ -254,7 +267,8 @@ typedef struct _vbp_codec_data_vc1 uint8 ASPECT_HORIZ_SIZE; uint8 ASPECT_VERT_SIZE; - + // bit rate + int bit_rate; } vbp_codec_data_vc1; typedef struct _vbp_slice_data_vc1 diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 740235d..e1a0829 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -346,6 +346,8 @@ void vbp_fill_codec_data(vbp_context *pcontext) vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; vbp_codec_data_mp42* codec_data = &(query_data->codec_data); + codec_data->bit_rate = parser->info.VisualObject.VideoObject.VOLControlParameters.bit_rate; + codec_data->profile_and_level_indication = parser->info.profile_and_level_indication; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 21c431a..f84dbd7 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -692,6 +692,14 @@ uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) /* first we get the SH/EP data. Can we cut down on this? */ vbp_codec_data_vc1 *se_data = query_data->se_data; + + + uint32_t curHrdNum = seqLayerHeader->HRD_NUM_LEAKY_BUCKETS; + + se_data->bit_rate = curHrdNum ? + seqLayerHeader->hrd_initial_state.sLeakyBucket[curHrdNum -1].HRD_RATE : + seqLayerHeader->hrd_initial_state.sLeakyBucket[0].HRD_RATE; + se_data->PROFILE = seqLayerHeader->PROFILE; se_data->LEVEL = seqLayerHeader->LEVEL; se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG; diff --git a/mix_video/src/mixframemanager.cpp b/mix_video/src/mixframemanager.cpp index 31b84fc..adfc53e 100644 --- a/mix_video/src/mixframemanager.cpp +++ b/mix_video/src/mixframemanager.cpp @@ -236,10 +236,13 @@ MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) { uint32 num; mix_videoframe_get_displayorder(mvf, &num); - LOG_V("pic %d is enqueued.\n", num); + uint64 ts; + mix_videoframe_get_timestamp(mvf, &ts); + LOG_V("pic %d is enqueued, ts = %"INT64_FORMAT"\n", num, ts); } - if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) { + else// if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) { + { uint64 ts; mix_videoframe_get_timestamp(mvf, &ts); LOG_V("ts %"UINT64_FORMAT" is enqueued.\n", ts); @@ -337,6 +340,11 @@ MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoF { first_i_or_p = p; } + + // When there are more than 1 i or p frame in queue, + // we shouldn't update the B frame's time stamp again. + if (num_i_or_p > 1) + break; } // if there are more than one reference frame in the list, the first one is dequeued. @@ -461,12 +469,17 @@ MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVide uint32 picnum; uint32 next_picnum_pending; + int least_poc_index; + uint32 least_poc; + len = j_slist_length(fm->frame_list); retry: next_picnum_pending = (uint32)-1; + least_poc_index = -1; + least_poc = (uint32)-1; - for (i = 0; i < len; i++) + for (i = 0; i < len; ) { p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); mix_videoframe_get_displayorder(p, &picnum); @@ -482,6 +495,23 @@ retry: return MIX_RESULT_SUCCESS; } + if(picnum == 0) { + if(i == 0) { + fm->next_frame_picnumber = 0; + } else { + fm->next_frame_picnumber = least_poc; + i = least_poc_index; + } + continue; + } + if(picnum < least_poc) { + least_poc = picnum; + least_poc_index = i; + LOG_V("least_poc_index = %d\n", least_poc_index); + } + + ++i; + if (picnum > fm->next_frame_picnumber && picnum < next_picnum_pending) { diff --git a/mix_video/src/mixsurfacepool.cpp b/mix_video/src/mixsurfacepool.cpp index 5c6899c..d5328bc 100644 --- a/mix_video/src/mixsurfacepool.cpp +++ b/mix_video/src/mixsurfacepool.cpp @@ -392,9 +392,9 @@ MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, Mix obj->mLock.unlock(); - LOG_E( "Element is null\n"); + LOG_E( "Element associated with the given frame index is null\n"); - return MIX_RESULT_FAIL; + return MIX_RESULT_DROPFRAME; } else { //Concat the element to the in_use_list obj->in_use_list = j_slist_concat(obj->in_use_list, element); diff --git a/mix_video/src/mixvideoconfigparamsdec.cpp b/mix_video/src/mixvideoconfigparamsdec.cpp index 3500b3c..0d6e72f 100644 --- a/mix_video/src/mixvideoconfigparamsdec.cpp +++ b/mix_video/src/mixvideoconfigparamsdec.cpp @@ -32,25 +32,30 @@ MixVideoConfigParamsDec::MixVideoConfigParamsDec() - :frame_order_mode(MIX_FRAMEORDER_MODE_DISPLAYORDER) - ,mime_type(NULL) - ,frame_rate_num(0) - ,frame_rate_denom(0) - ,picture_width(0) - ,picture_height(0) - ,raw_format(0) - ,rate_control(0) - ,mixbuffer_pool_size(0) - ,extra_surface_allocation(0) - ,video_range(0) - ,color_matrix(0) - ,bit_rate(0) - ,par_num(0) - ,par_denom(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) + :frame_order_mode(MIX_FRAMEORDER_MODE_DISPLAYORDER) + ,mime_type(NULL) + ,frame_rate_num(1) + ,frame_rate_denom(1) + ,picture_width(0) + ,picture_height(0) + ,raw_format(0) + ,rate_control(0) + ,mixbuffer_pool_size(0) + ,extra_surface_allocation(0) + ,video_range(0) + ,color_matrix(0) + ,bit_rate(0) + ,par_num(0) + ,par_denom(0) + ,crop_left(0) + ,crop_right(0) + ,crop_top(0) + ,crop_bottom(0) + ,error_concealment(TRUE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { memset(&this->header, 0, sizeof(header)); @@ -91,6 +96,11 @@ bool MixVideoConfigParamsDec::copy(MixParams *target) const { this_target->bit_rate = this->bit_rate; this_target->par_num = this->par_num; this_target->par_denom = this->par_denom; + this_target->crop_left = this->crop_left; + this_target->crop_right = this->crop_right; + this_target->crop_top = this->crop_top; + this_target->crop_bottom = this->crop_bottom; + this_target->error_concealment = this->error_concealment; /* copy properties of non-primitive */ @@ -214,6 +224,27 @@ bool MixVideoConfigParamsDec::equal(MixParams* obj) const { if (this->par_denom != this_obj->par_denom) { goto not_equal; } + + if (this->crop_left != this_obj->crop_left) + { + goto not_equal; + } + if (this->crop_right != this_obj->crop_right) + { + goto not_equal; + } + if (this->crop_top != this_obj->crop_top) + { + goto not_equal; + } + if (this->crop_bottom != this_obj->crop_bottom) + { + goto not_equal; + } + if (this->error_concealment != this_obj->error_concealment) + { + goto not_equal; + } ret = TRUE; not_equal: @@ -502,4 +533,44 @@ MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio( return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videoconfigparamsdec_set_cropping_info(MixVideoConfigParamsDec * obj, + uint crop_left, uint crop_right, uint crop_top, uint crop_bottom) +{ + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + obj->crop_left = crop_left; + obj->crop_right = crop_right; + obj->crop_top = crop_top; + obj->crop_bottom = crop_bottom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_cropping_info(MixVideoConfigParamsDec * obj, + uint *crop_left, uint *crop_right, uint *crop_top, uint *crop_bottom) +{ + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, crop_left, crop_right); + if(!crop_top || !crop_bottom ) { + return MIX_RESULT_NULL_PTR; + } + *crop_left = obj->crop_left; + *crop_right = obj->crop_right; + *crop_top = obj->crop_top; + *crop_bottom = obj->crop_bottom; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_set_error_concealment ( + MixVideoConfigParamsDec * obj, bool error_concealment) { + MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); + + obj->error_concealment = error_concealment; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsdec_get_error_concealment( + MixVideoConfigParamsDec * obj, bool *error_concealment) { + MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, error_concealment); + + *error_concealment = obj->error_concealment; + return MIX_RESULT_SUCCESS; +} diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h index f492128..a3778ee 100644 --- a/mix_video/src/mixvideoconfigparamsdec.h +++ b/mix_video/src/mixvideoconfigparamsdec.h @@ -92,7 +92,7 @@ public: uint8 color_matrix; /* bit rate in bps, output only */ - uint8 bit_rate; + uint bit_rate; /* Pixel aspect ratio numerator value */ uint par_num; @@ -100,6 +100,14 @@ public: /* Pixel aspect ratio denominator value */ uint par_denom; + uint crop_left; + uint crop_right; + uint crop_top; + uint crop_bottom; + + /* Error concealment enabled/disabled */ + bool error_concealment; + /* Reserved for future use */ void *reserved1; @@ -452,6 +460,57 @@ MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio(MixVideoConfigParamsD MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio(MixVideoConfigParamsDec * obj, uint * par_num, uint * par_denom); +/** + * mix_videoconfigparamsdec_set_cropping_info: + * @obj: #MixVideoConfigParamsDec object + * @crop_left: left cropping value + * @crop_right: right cropping value + * @crop_top: top cropping value + * @crop_bottom: bottom cropping value + * @returns: Common Video Error Return Codes + * + * Set cropping information + */ +MIX_RESULT mix_videoconfigparamsdec_set_cropping_info(MixVideoConfigParamsDec * obj, + uint crop_left, uint crop_right, uint crop_top, uint crop_bottom); + +/** + * mix_videoconfigparamsdec_get_cropping_info: + * @obj: #MixVideoConfigParamsDec object + * @crop_left: left cropping value + * @crop_right: right cropping value + * @crop_top: top cropping value + * @crop_bottom: bottom cropping value + * @returns: Common Video Error Return Codes + * + * Get cropping information + */ +MIX_RESULT mix_videoconfigparamsdec_get_cropping_info(MixVideoConfigParamsDec * obj, + uint *crop_left, uint *crop_right, uint *crop_top, uint *crop_bottom); + + +/** + * mix_videoconfigparamsdec_set_error_concealment: + * @obj: #MixVideoConfigParamsDec object + * @error_concealment: A flag to indicate whether error concealment is enabled for decoder + * @returns: Common Video Error Return Codes + * + * Set the flag that indicates whether error concealment is enabled + */ +MIX_RESULT mix_videoconfigparamsdec_set_error_concealment (MixVideoConfigParamsDec * obj, + bool error_concealment); + +/** + * mix_videoconfigparamsdec_get_error_concealment: + * @obj: #MixVideoConfigParamsDec object + * @error_concealment: the flag to be returned that indicates error concealment is enabled for decoder + * @returns: Common Video Error Return Codes + * + * Get the flag that indicates whether error concealment is enabled + */ +MIX_RESULT mix_videoconfigparamsdec_get_error_concealment(MixVideoConfigParamsDec * obj, + bool *error_concealment); + /* TODO: Add getters and setters for other properties */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.cpp b/mix_video/src/mixvideoconfigparamsdec_h264.cpp index 64dfdf7..bfbcacc 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.cpp +++ b/mix_video/src/mixvideoconfigparamsdec_h264.cpp @@ -16,10 +16,11 @@ No license under any patent, copyright, trade secret or other intellectual prope #include "mixvideoconfigparamsdec_h264.h" MixVideoConfigParamsDecH264::MixVideoConfigParamsDecH264() - :reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :va_setup_flag(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoConfigParamsDecH264::~MixVideoConfigParamsDecH264() { } @@ -27,6 +28,8 @@ MixVideoConfigParamsDecH264::~MixVideoConfigParamsDecH264() { bool MixVideoConfigParamsDecH264::copy(MixParams *target) const { bool ret = FALSE; MixVideoConfigParamsDecH264 * this_target = MIX_VIDEOCONFIGPARAMSDEC_H264(target); + + this_target->va_setup_flag = this->va_setup_flag; if (NULL != this_target) ret = MixVideoConfigParamsDec::copy(target); return ret; @@ -35,6 +38,18 @@ bool MixVideoConfigParamsDecH264::copy(MixParams *target) const { bool MixVideoConfigParamsDecH264::equal(MixParams* obj) const { bool ret = FALSE; MixVideoConfigParamsDecH264 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264(obj); + + if (this->va_setup_flag != this_obj->va_setup_flag) { + goto not_equal; + } + + ret = TRUE; + +not_equal: + + if (ret != TRUE) { + return ret; + } if (NULL != this_obj) ret = MixVideoConfigParamsDec::equal(this_obj); return ret; @@ -63,4 +78,28 @@ mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix) { return mix; } -/* TODO: Add getters and setters for properties if any */ +#define MIX_VIDEOCONFIGPARAMSDEC_H264_SETTER_CHECK_INPUT(obj) \ + if(!obj) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj)) return MIX_RESULT_FAIL; \ + +#define MIX_VIDEOCONFIGPARAMSDEC_H264_GETTER_CHECK_INPUT(obj, prop) \ + if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ + if(!MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj)) return MIX_RESULT_FAIL; \ + + +MIX_RESULT mix_videoconfigparamsdec_h264_set_va_setup_flag (MixVideoConfigParamsDecH264 * obj, + bool va_setup_flag) { + + MIX_VIDEOCONFIGPARAMSDEC_H264_SETTER_CHECK_INPUT (obj); + obj->va_setup_flag = va_setup_flag; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videoconfigparamsenc_h264_get_va_setup_flag (MixVideoConfigParamsDecH264 * obj, + bool *va_setup_flag) { + + MIX_VIDEOCONFIGPARAMSDEC_H264_GETTER_CHECK_INPUT (obj, va_setup_flag); + *va_setup_flag = obj->va_setup_flag; + return MIX_RESULT_SUCCESS; +} + diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h index ee8b786..e461765 100644 --- a/mix_video/src/mixvideoconfigparamsdec_h264.h +++ b/mix_video/src/mixvideoconfigparamsdec_h264.h @@ -44,6 +44,7 @@ public: /*< public > */ /* TODO: Add H.264 configuration paramters */ + bool va_setup_flag; /* Reserved for future use */ void *reserved1; @@ -95,5 +96,26 @@ MixVideoConfigParamsDecH264 /* Class Methods */ -/* TODO: Add getters and setters for other properties */ +/** + * mix_videoconfigparamsenc_h264_set_va_setup_flag: + * @obj: #MixVideoConfigParamsDecH264 object + * @va_setup_flag: The flag to enable/disable setup va directly + * @returns: Common Video Error Return Codes + * + * Set the The flag to enable/disable setup va directly + */ +MIX_RESULT mix_videoconfigparamsdec_h264_set_va_setup_flag (MixVideoConfigParamsDecH264 * obj, + bool va_setup_flag); + +/** + * mix_videoconfigparamsenc_h264_get_va_setup_flag: + * @obj: #MixVideoConfigParamsDecH264 object + * @va_setup_flag: The flag to enable/disable setup va directly + * @returns: Common Video Error Return Codes + * + * Get the The flag to enable/disable setup va directly + */ +MIX_RESULT mix_videoconfigparamsenc_h264_get_va_setup_flag (MixVideoConfigParamsDecH264 * obj, + bool *va_setup_flag); + #endif /* __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ */ diff --git a/mix_video/src/mixvideodecodeparams.cpp b/mix_video/src/mixvideodecodeparams.cpp index a0965eb..a6979f8 100644 --- a/mix_video/src/mixvideodecodeparams.cpp +++ b/mix_video/src/mixvideodecodeparams.cpp @@ -26,13 +26,14 @@ MixVideoDecodeParams::MixVideoDecodeParams() - :timestamp(0) - ,discontinuity(FALSE) - ,new_sequence(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { + :timestamp(0) + ,discontinuity(FALSE) + ,new_sequence(FALSE) + ,complete_frame(FALSE) + ,reserved1(NULL) + ,reserved2(NULL) + ,reserved3(NULL) + ,reserved4(NULL) { } MixVideoDecodeParams::~MixVideoDecodeParams() { @@ -123,3 +124,18 @@ MIX_RESULT mix_videodecodeparams_get_new_sequence( return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_videodecodeparams_set_complete_frame(MixVideoDecodeParams * obj, + bool complete_frame) { + MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); + obj->complete_frame = complete_frame; + return MIX_RESULT_SUCCESS; +} + +MIX_RESULT mix_videodecodeparams_get_complete_frame(MixVideoDecodeParams * obj, + bool *complete_frame) { + MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, complete_frame); + *complete_frame = obj->complete_frame; + return MIX_RESULT_SUCCESS; +} + + diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h index 1da4cdb..aa5e799 100644 --- a/mix_video/src/mixvideodecodeparams.h +++ b/mix_video/src/mixvideodecodeparams.h @@ -53,6 +53,9 @@ public: /* output only, indicate if stream contains a new sequence */ bool new_sequence; + /* Indicates a complete frame */ + bool complete_frame; + /* Reserved for future use */ void *reserved1; @@ -164,5 +167,29 @@ MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj, MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj, bool *new_sequence); +/** + * mix_videodecodeparams_get_complete_frame: + * @obj: #MixVideoDecodeParams object + * @complete_frame: complete_frame flag to be returned + * @returns: Common Video Error Return Codes + * + * Get complete_frame flag + */ +MIX_RESULT mix_videodecodeparams_get_complete_frame(MixVideoDecodeParams * obj, + bool *complete_frame); + + +/** + * mix_videodecodeparams_set_complete_frame: + * @obj: #MixVideoDecodeParams object + * @complete_frame: Flag to indicate if frame is a complete frame or not + * @returns: Common Video Error Return Codes + * + * Set new_sequence flag + */ +MIX_RESULT mix_videodecodeparams_set_complete_frame(MixVideoDecodeParams * obj, + bool complete_frame); + + #endif /* __MIX_VIDEODECODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h index aea6ad6..1c0aa02 100644 --- a/mix_video/src/mixvideodef.h +++ b/mix_video/src/mixvideodef.h @@ -72,6 +72,7 @@ typedef struct _MixIOVec { uchar *data; int buffer_size; int data_size; + bool is_key_frame; } MixIOVec; typedef struct _MixRect { diff --git a/mix_video/src/mixvideoformat.cpp b/mix_video/src/mixvideoformat.cpp index 09984cb..a566694 100644 --- a/mix_video/src/mixvideoformat.cpp +++ b/mix_video/src/mixvideoformat.cpp @@ -15,30 +15,32 @@ #define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } MixVideoFormat::MixVideoFormat() - :mLock() - ,initialized(FALSE) - ,va_initialized(FALSE) - ,framemgr(NULL) - ,surfacepool(NULL) - ,inputbufpool(NULL) - ,inputbufqueue(NULL) - ,va_display(NULL) - ,va_context(VA_INVALID_ID) - ,va_config(VA_INVALID_ID) - ,va_surfaces(NULL) - ,va_num_surfaces(0) - ,mime_type(NULL) - ,frame_rate_num(0) - ,frame_rate_denom(0) - ,picture_width(0) - ,picture_height(0) - ,parse_in_progress(FALSE) - ,current_timestamp((uint64)-1) - ,end_picture_pending(FALSE) - ,video_frame(NULL) - ,extra_surfaces(0) - ,config_params(NULL) - ,ref_count(1) { + :mLock() + ,initialized(FALSE) + ,va_initialized(FALSE) + ,framemgr(NULL) + ,surfacepool(NULL) + ,inputbufpool(NULL) + ,inputbufqueue(NULL) + ,va_display(NULL) + ,va_context(VA_INVALID_ID) + ,va_config(VA_INVALID_ID) + ,va_surfaces(NULL) + ,va_num_surfaces(0) + ,mime_type(NULL) + ,frame_rate_num(0) + ,frame_rate_denom(0) + ,picture_width(0) + ,picture_height(0) + ,parse_in_progress(FALSE) + ,current_timestamp((uint64)-1) + ,end_picture_pending(FALSE) + ,video_frame(NULL) + ,extra_surfaces(0) + ,config_params(NULL) + ,error_concealment(TRUE) + ,ref_count(1) +{ } MixVideoFormat::~MixVideoFormat() { diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h index cda1804..bfaa440 100644 --- a/mix_video/src/mixvideoformat.h +++ b/mix_video/src/mixvideoformat.h @@ -121,8 +121,9 @@ public: bool end_picture_pending; MixVideoFrame* video_frame; uint extra_surfaces; + bool error_concealment; MixVideoConfigParamsDec * config_params; - uint ref_count ; + int ref_count; }; diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp index 08d8e78..0bbe19a 100644 --- a/mix_video/src/mixvideoformat_h264.cpp +++ b/mix_video/src/mixvideoformat_h264.cpp @@ -13,11 +13,18 @@ #include "mixvideolog.h" #include "mixvideoformat_h264.h" +#include "mixvideoconfigparamsdec_h264.h" #ifdef MIX_LOG_ENABLE static int mix_video_h264_counter = 0; #endif /* MIX_LOG_ENABLE */ +#define HACK_DPB +#ifdef HACK_DPB +static inline MIX_RESULT mix_videofmt_h264_hack_dpb( MixVideoFormat *mix, vbp_picture_data_h264* pic_data); +#endif + + // Local Help Funcs @@ -74,6 +81,9 @@ MIX_RESULT MixVideoFormat_H264::Initialize( vbp_data_h264 *data = NULL; MixIOVec *header = NULL; + MixVideoConfigParamsDecH264 *config_params_h264 = NULL; + bool va_setup_flag = FALSE; + if (config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) { LOG_E( "NUll pointer passed in\n"); @@ -118,6 +128,28 @@ MIX_RESULT MixVideoFormat_H264::Initialize( goto CLEAN_UP; } + ret = mix_videoconfigparamsdec_get_error_concealment( + config_params, + &this->error_concealment); + + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get error_concealment flag\n"); + goto CLEAN_UP; + } + + config_params_h264 = MIX_VIDEOCONFIGPARAMSDEC_H264(config_params); + ret = mix_videoconfigparamsenc_h264_get_va_setup_flag(config_params_h264, &va_setup_flag); + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Failed to get va_setup_flag\n"); + return ret; + } + + LOG_V("va_setup_flag = %d\n", va_setup_flag); + LOG_V( "Before vbp_open\n"); //Load the bitstream parser pret = vbp_open(ptype, &(this->parser_handle)); @@ -130,6 +162,17 @@ MIX_RESULT MixVideoFormat_H264::Initialize( } LOG_V( "Opened parser\n"); + if(va_setup_flag) { + LOG_V("calling to mix_videofmt_h264_initialize_va(mix, NULL)\n"); + ret = _initialize_va( NULL); + LOG_V("ret = 0x%x\n", ret); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_E( "Error initializing va. \n"); + } + goto CLEAN_UP; + } + ret = mix_videoconfigparamsdec_get_header(config_params, &header); if ((MIX_RESULT_SUCCESS != ret) || (NULL == header)) { @@ -143,7 +186,7 @@ MIX_RESULT MixVideoFormat_H264::Initialize( pret = vbp_parse(this->parser_handle, header->data, header->data_size, TRUE); - if ((VBP_OK != pret) && (VBP_DONE != pret)) { + if (VBP_OK != pret) { ret = MIX_RESULT_FAIL; LOG_E( "Error parsing header data\n"); goto CLEAN_UP; @@ -154,7 +197,7 @@ MIX_RESULT MixVideoFormat_H264::Initialize( //Get the header data and save pret = vbp_query(this->parser_handle, (void **)&data); - if ((VBP_OK != pret) || (NULL == data)) { + if (VBP_OK != pret) { ret = MIX_RESULT_FAIL; LOG_E( "Error reading parsed header data\n"); goto CLEAN_UP; @@ -236,7 +279,13 @@ MIX_RESULT MixVideoFormat_H264::Decode( for (i = 0; i < bufincnt; i++) { LOG_V( "Decoding a buf %x, size %d\n", (uint)bufin[i]->data, bufin[i]->size); // decode a buffer at a time - ret = _decode_a_buffer(bufin[i], ts, discontinuity, decode_params); + ret = _decode_a_buffer( + bufin[i], + ts, + discontinuity, + decode_params, + (i == bufincnt-1 ? decode_params->complete_frame : 0)); + if (MIX_RESULT_SUCCESS != ret) { LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); goto CLEAN_UP; @@ -361,6 +410,17 @@ MIX_RESULT MixVideoFormat_H264::_update_config_params(vbp_data_h264 *data) { data->codec_data->sar_height); mix_videoconfigparamsdec_set_bit_rate( this->config_params, data->codec_data->bit_rate); + + LOG_V("crop_left = %d crop_right = %d crop_top = %d crop_bottom = %d\n", + data->codec_data->crop_left, data->codec_data->crop_right, + data->codec_data->crop_top, data->codec_data->crop_bottom); + + mix_videoconfigparamsdec_set_cropping_info( + this->config_params, + data->codec_data->crop_left, + data->codec_data->crop_right, + data->codec_data->crop_top, + data->codec_data->crop_bottom); return MIX_RESULT_SUCCESS; } @@ -379,20 +439,50 @@ MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { attrib.value = VA_RT_FORMAT_YUV420; //Initialize and save the VA config ID - //We use high profile for all kinds of H.264 profiles (baseline, main and high) - vret = vaCreateConfig(this->va_display, VAProfileH264High, - VAEntrypointVLD, &attrib, 1, &(this->va_config)); - +#ifdef ANDROID + if((this->error_concealment == TRUE) && (data == NULL || (data != NULL && ((data->codec_data->profile_idc == 66) || (data->codec_data->constraint_set0_flag == 1)) && + (data->codec_data->constraint_set1_flag == 1)))) + { + //it is constrained baseline profile according to subclause A.2.1.1 in H.264 Spec v200903 + vret = vaCreateConfig( + this->va_display, + VAProfileH264ConstrainedBaseline, + VAEntrypointVLD, + &attrib, + 1, + &(this->va_config)); + } + else + { +#endif + //We use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline + vret = vaCreateConfig( + this->va_display, + VAProfileH264High, + VAEntrypointVLD, + &attrib, + 1, + &(this->va_config)); +#ifdef ANDROID + } +#endif if (VA_STATUS_SUCCESS != vret) { ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; LOG_E("vaCreateConfig failed\n"); return ret; } - LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); +#ifdef MIX_LOG_ENABLE + if(data) { + LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); + } +#endif // handle both frame and field coding for interlaced content - int num_ref_pictures = data->codec_data->num_ref_frames; + int num_ref_pictures = 0; + if(data) { + num_ref_pictures = data->codec_data->num_ref_frames; + } //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that // will not start decoding until a new frame is received. @@ -407,10 +497,14 @@ MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { return ret; } - LOG_V( "Codec data says picture size is %d x %d\n", - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); - LOG_V( "getcaps says picture size is %d x %d\n", this->picture_width, this->picture_height); +#ifdef MIX_LOG_ENABLE + if(data) { + LOG_V( "Codec data says picture size is %d x %d\n", + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); + LOG_V( "getcaps says picture size is %d x %d\n", this->picture_width, this->picture_height); + } +#endif vret = vaCreateSurfaces( this->va_display, @@ -445,11 +539,13 @@ MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { return ret; break; } +#if 0 // NOTE: We don't use the value in frame manager, comment out the following lines if (data->codec_data->pic_order_cnt_type == 0) { int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); mix_framemanager_set_max_picture_number(this->framemgr, max); } +#endif //Initialize and save the VA context ID //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 @@ -601,7 +697,7 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_slice( if (ret != MIX_RESULT_SUCCESS) { LOG_E( "Error reference frame not found\n"); //Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it - _cleanup_ref_frame(pic_params, this->frame); + _cleanup_ref_frame(pic_params, this->video_frame); LOG_V( "End\n"); return ret; } @@ -1004,7 +1100,8 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( MixBuffer * bufin, uint64 ts, bool discontinuity, - MixVideoDecodeParams * decode_params) { + MixVideoDecodeParams * decode_params, + bool complete_frame) { uint32 pret = 0; MIX_RESULT ret = MIX_RESULT_SUCCESS; vbp_data_h264 *data = NULL; @@ -1017,7 +1114,7 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( FALSE); LOG_V( "Called parse for current frame\n"); - if ((pret != VBP_DONE) &&(pret != VBP_OK)) { + if (pret != VBP_OK) { ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME; LOG_E( "vbp_parse failed.\n"); LOG_V("End\n"); @@ -1108,6 +1205,16 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( return ret; } } + if (complete_frame) + { + // finish decoding current frame + ret = _decode_end(FALSE); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_h264_decode_end failed.\n"); + return ret; + } + } LOG_V("End\n"); return ret; @@ -1115,7 +1222,6 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( -#define HACK_DPB #ifdef HACK_DPB static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, vbp_picture_data_h264* pic_data diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h index ce16281..b85d6df 100644 --- a/mix_video/src/mixvideoformat_h264.h +++ b/mix_video/src/mixvideoformat_h264.h @@ -47,7 +47,7 @@ private: MIX_RESULT _update_config_params(vbp_data_h264 *data); MIX_RESULT _initialize_va(vbp_data_h264 *data); MIX_RESULT _decode_a_buffer(MixBuffer * bufin, uint64 ts, - bool discontinuity, MixVideoDecodeParams * decode_params); + bool discontinuity, MixVideoDecodeParams * decode_params,bool complete_frame); MIX_RESULT _decode_end(bool drop_picture); MIX_RESULT _handle_new_sequence(vbp_data_h264 *data); MIX_RESULT _decode_begin(vbp_data_h264 *data); diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp index 8264a38..755501a 100644 --- a/mix_video/src/mixvideoformat_mp42.cpp +++ b/mix_video/src/mixvideoformat_mp42.cpp @@ -120,6 +120,11 @@ MIX_RESULT MixVideoFormat_MP42::_update_config_params( mix_videoconfigparamsdec_set_pixel_aspect_ratio( this->config_params, data->codec_data.par_width, data->codec_data.par_height); + + mix_videoconfigparamsdec_set_bit_rate( + this->config_params, + data->codec_data.bit_rate); + return MIX_RESULT_SUCCESS; } @@ -717,7 +722,7 @@ MIX_RESULT MixVideoFormat_MP42::_decode_begin(vbp_data_mp42* data) { } MIX_RESULT MixVideoFormat_MP42::_decode_a_buffer( - MixBuffer * bufin, uint64 ts, bool discontinuity) { + MixBuffer * bufin, uint64 ts, bool discontinuity,bool complete_frame) { uint32 pret = 0; MIX_RESULT ret = MIX_RESULT_SUCCESS; vbp_data_mp42 *data = NULL; @@ -791,6 +796,16 @@ MIX_RESULT MixVideoFormat_MP42::_decode_a_buffer( goto CLEAN_UP; } } + if (complete_frame) + { + // finish decoding current frame + ret = _decode_end(FALSE); + if (ret != MIX_RESULT_SUCCESS) + { + LOG_V("mix_videofmt_mp42_decode_end failed.\n"); + goto CLEAN_UP; + } + } CLEAN_UP: LOG_V( "End\n"); @@ -964,7 +979,11 @@ MIX_RESULT MixVideoFormat_MP42::Decode( for (int i = 0; i < bufincnt; i++) { LOG_V("decode buffer %d in total %d \n", i, bufincnt); // decode a buffer at a time - ret = _decode_a_buffer(bufin[i], ts, discontinuity); + ret = _decode_a_buffer( + bufin[i], + ts, + discontinuity, + ((i == bufincnt - 1) ? decode_params->complete_frame : 0)); if (ret != MIX_RESULT_SUCCESS) { LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n"); break; diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h index e05d3be..9d00d1a 100644 --- a/mix_video/src/mixvideoformat_mp42.h +++ b/mix_video/src/mixvideoformat_mp42.h @@ -50,7 +50,7 @@ private: MIX_RESULT _decode_continue(vbp_data_mp42 *data); MIX_RESULT _decode_begin(vbp_data_mp42* data); MIX_RESULT _decode_a_buffer( - MixBuffer * bufin, uint64 ts, bool discontinuity); + MixBuffer * bufin, uint64 ts, bool discontinuity,bool complete_frame); public: /*< public > */ diff --git a/mix_video/src/mixvideoformat_vc1.cpp b/mix_video/src/mixvideoformat_vc1.cpp index 342c49d..cd672d9 100644 --- a/mix_video/src/mixvideoformat_vc1.cpp +++ b/mix_video/src/mixvideoformat_vc1.cpp @@ -13,13 +13,9 @@ #include #endif -#ifdef YUVDUMP -//TODO Complete YUVDUMP code and move into base class -#include -#endif /* YUVDUMP */ #include -#include + #ifdef MIX_LOG_ENABLE static int mix_video_vc1_counter = 0; @@ -32,7 +28,6 @@ MixVideoFormat_VC1::MixVideoFormat_VC1() { } MixVideoFormat_VC1::~MixVideoFormat_VC1() { - int32 pret = VBP_OK; /* clean up here. */ Lock(); //surfacepool is deallocated by parent @@ -54,10 +49,10 @@ MixVideoFormat_VC1::~MixVideoFormat_VC1() { this->current_timestamp = (uint64)-1; //Close the parser - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - if (pret != VBP_OK) { - LOG_E( "Error closing parser\n"); + if (this->parser_handle) + { + vbp_close(this->parser_handle); + this->parser_handle = NULL; } Unlock(); @@ -176,9 +171,155 @@ MIX_RESULT MixVideoFormat_VC1::_update_config_params(vbp_data_vc1 *data) { this->config_params, data->se_data->ASPECT_HORIZ_SIZE, data->se_data->ASPECT_VERT_SIZE); + + mix_videoconfigparamsdec_set_bit_rate( + this->config_params, + data->se_data->bit_rate); return MIX_RESULT_SUCCESS; } +MIX_RESULT MixVideoFormat_VC1::_initialize_va(vbp_data_vc1 *data) { + MIX_RESULT ret = MIX_RESULT_SUCCESS; + VAStatus vret = VA_STATUS_SUCCESS; + VAConfigAttrib attrib; + VAProfile va_profile; + + LOG_V( "Begin\n"); + if (this->va_initialized) { + LOG_W("va already initialized.\n"); + return MIX_RESULT_SUCCESS; + } + + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + //Initialize and save the VA config ID + switch (data->se_data->PROFILE) { + case 0: + va_profile = VAProfileVC1Simple; + break; + case 1: + va_profile = VAProfileVC1Main; + break; + + default: + va_profile = VAProfileVC1Advanced; + break; + } + + vret = vaCreateConfig( + this->va_display, + va_profile, + VAEntrypointVLD, + &attrib, + 1, + &(this->va_config)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E("vaCreateConfig failed\n"); + goto cleanup; + } + + + //Check for loop filtering + if (data->se_data->LOOPFILTER == 1) + this->loopFilter = TRUE; + else + this->loopFilter = FALSE; + + LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); + + if ((data->se_data->MAXBFRAMES > 0) || (data->se_data->PROFILE == 3) || (data->se_data->PROFILE == 1)) { + //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof + this->haveBframes = TRUE; + } + else { + this->haveBframes = FALSE; + } + + //Calculate VC1 numSurfaces based on max number of B frames or + // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less + + //Adding 1 to work around VBLANK issue + this->va_num_surfaces = 1 + this->extra_surfaces + ((3 + (this->haveBframes ? 1 : 0) < + MIX_VIDEO_VC1_SURFACE_NUM) ? + (3 + (this->haveBframes ? 1 : 0)) + : MIX_VIDEO_VC1_SURFACE_NUM); + + this->va_surfaces = new VASurfaceID[this->va_num_surfaces]; + if (this->va_surfaces == NULL) { + ret = MIX_RESULT_FAIL; + LOG_E( "parent->va_surfaces == NULL. \n"); + goto cleanup; + } + + vret = vaCreateSurfaces( + this->va_display, + this->picture_width, + this->picture_height, + VA_RT_FORMAT_YUV420, + this->va_num_surfaces, + this->va_surfaces); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error allocating surfaces\n"); + goto cleanup; + } + + LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); + + //Initialize the surface pool + ret = mix_surfacepool_initialize( + this->surfacepool, + this->va_surfaces, + this->va_num_surfaces, + this->va_display); + + switch (ret) { + case MIX_RESULT_SUCCESS: + break; + case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. + default: + ret = MIX_RESULT_ALREADY_INIT; + LOG_E( "Error init surface pool\n"); + goto cleanup; + break; + } + + //Initialize and save the VA context ID + //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 + vret = vaCreateContext( + this->va_display, + this->va_config, + this->picture_width, + this->picture_height, + 0, + this->va_surfaces, + this->va_num_surfaces, + &(this->va_context)); + + if (vret != VA_STATUS_SUCCESS) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error initializing video driver\n"); + goto cleanup; + } + + LOG_V( "mix_video vinfo: Content type %s\n", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); + LOG_V( "mix_video vinfo: Content width %d, height %d\n", this->picture_width, this->picture_height); + LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", data->se_data->MAXBFRAMES); + LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", data->se_data->PROFILE, data->se_data->LEVEL); + + this->va_initialized = TRUE; +cleanup: + /* nothing to clean up */ + + return ret; + +} + MIX_RESULT MixVideoFormat_VC1::Initialize( MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr, @@ -191,18 +332,6 @@ MIX_RESULT MixVideoFormat_VC1::Initialize( enum _vbp_parser_type ptype = VBP_VC1; vbp_data_vc1 *data = NULL; MixIOVec *header = NULL; - int numprofs = 0, numactualprofs = 0; - int numentrypts = 0, numactualentrypts = 0; - VADisplay vadisplay = NULL; - VAProfile *profiles = NULL; - VAEntrypoint *entrypts = NULL; - VAConfigAttrib attrib; - VAStatus vret = VA_STATUS_SUCCESS; - uint extra_surfaces = 0; - VASurfaceID *surfaces = NULL; - uint numSurfaces = 0; - int vaentrypt = 0; - int vaprof = 0; //TODO Partition this method into smaller methods if (config_params == NULL || frame_mgr == NULL || @@ -223,13 +352,34 @@ MIX_RESULT MixVideoFormat_VC1::Initialize( //From now on, we exit this function through cleanup: Lock(); + this->surfacepool = mix_surfacepool_new(); + *surface_pool = this->surfacepool; + + if (this->surfacepool == NULL) + { + ret = MIX_RESULT_NO_MEMORY; + LOG_E( "parent->surfacepool == NULL.\n"); + goto cleanup; + } + + ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, + &this->extra_surfaces); + + if (ret != MIX_RESULT_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Cannot get extra surface allocation setting\n"); + goto cleanup; + } + + //Load the bitstream parser pret = vbp_open(ptype, &(this->parser_handle)); if (!(pret == VBP_OK)) { ret = MIX_RESULT_FAIL; LOG_E( "Error opening parser\n"); - goto CLEAN_UP; + goto cleanup; } LOG_V( "Opened parser\n"); @@ -238,36 +388,29 @@ MIX_RESULT MixVideoFormat_VC1::Initialize( &header); if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get header data\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &extra_surfaces); + ret = MIX_RESULT_SUCCESS; + LOG_W( "Codec data is not available in the configuration parameter.\n"); - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto CLEAN_UP; + goto cleanup; } LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); + LOG_V( "mix_video vinfo: Content type %s, %s\n", (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); ret = _update_seq_header(config_params, header); if (ret != MIX_RESULT_SUCCESS) { ret = MIX_RESULT_FAIL; LOG_E( "Error updating sequence header\n"); - goto CLEAN_UP; + goto cleanup; } pret = vbp_parse(this->parser_handle, header->data, header->data_size, TRUE); - if (!((pret == VBP_OK) || (pret == VBP_DONE))) { + if ((pret != VBP_OK)) { ret = MIX_RESULT_FAIL; LOG_E( "Error parsing header data, size %d\n", header->data_size); - goto CLEAN_UP; + goto cleanup; } @@ -278,207 +421,19 @@ MIX_RESULT MixVideoFormat_VC1::Initialize( if ((pret != VBP_OK) || (data == NULL)) { ret = MIX_RESULT_FAIL; LOG_E( "Error reading parsed header data\n"); - goto CLEAN_UP; + goto cleanup; } LOG_V( "Queried parser for header data\n"); _update_config_params(data); - //Time for libva initialization - vadisplay = this->va_display; - numprofs = vaMaxNumProfiles(vadisplay); - profiles = reinterpret_cast(malloc(numprofs*sizeof(VAProfile))); - - if (!profiles) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto CLEAN_UP; - } - - vret = vaQueryConfigProfiles(vadisplay, profiles, - &numactualprofs); - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto CLEAN_UP; - } - - //check the desired profile support - - VAProfile profile; - switch (data->se_data->PROFILE) { - case 0: - profile = VAProfileVC1Simple; - break; - case 1: - profile = VAProfileVC1Main; - break; - default: - profile = VAProfileVC1Advanced; - break; - } - - for (; vaprof < numactualprofs; vaprof++) { - if (profiles[vaprof] == profile) - break; - } - if (vaprof >= numprofs || profiles[vaprof] != profile) { - ret = MIX_RESULT_FAIL; - LOG_E( "Profile not supported by driver\n"); - goto CLEAN_UP; - } - - numentrypts = vaMaxNumEntrypoints(vadisplay); - entrypts = reinterpret_cast(malloc(numentrypts*sizeof(VAEntrypoint))); - - if (!entrypts) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating memory\n"); - goto CLEAN_UP; - } - - vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], - entrypts, &numactualentrypts); - - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto CLEAN_UP; - } - - for (; vaentrypt < numactualentrypts; vaentrypt++) { - if (entrypts[vaentrypt] == VAEntrypointVLD) - break; - } - if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD) { - ret = MIX_RESULT_FAIL; - LOG_E( "Entry point not supported by driver\n"); - goto CLEAN_UP; - } - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - - vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1); - - //TODO Handle other values returned for RT format - // and check with requested format provided in config params - //Right now only YUV 4:2:0 is supported by libva - // and this is our default - if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) || vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto CLEAN_UP; - } - - //Initialize and save the VA config ID - vret = vaCreateConfig(vadisplay, profiles[vaprof], - entrypts[vaentrypt], &attrib, 1, &(this->va_config)); - - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing driver\n"); - goto CLEAN_UP; - } - - LOG_V( "Created libva config with profile %d\n", vaprof); - - //Check for loop filtering - if (data->se_data->LOOPFILTER == 1) - this->loopFilter = TRUE; - else - this->loopFilter = FALSE; - - LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", - data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); - - //Initialize the surface pool - if ((data->se_data->MAXBFRAMES > 0) || - (data->se_data->PROFILE == 3) || - (data->se_data->PROFILE == 1)) - //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof - this->haveBframes = TRUE; - else - this->haveBframes = FALSE; - - //Calculate VC1 numSurfaces based on max number of B frames or - // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less - - //Adding 1 to work around VBLANK issue - this->va_num_surfaces = 1 + extra_surfaces + - ((3 + (this->haveBframes ? 1 : 0) < MIX_VIDEO_VC1_SURFACE_NUM) ? - (3 + (this->haveBframes ? 1 : 0)) : MIX_VIDEO_VC1_SURFACE_NUM); - numSurfaces = this->va_num_surfaces; - this->va_surfaces = reinterpret_cast(malloc(sizeof(VASurfaceID)*numSurfaces)); - surfaces = this->va_surfaces; - - if (surfaces == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot allocate temporary data\n"); - goto CLEAN_UP; - } - - vret = vaCreateSurfaces( - vadisplay, this->picture_width, - this->picture_height, entrypts[vaentrypt], - numSurfaces, surfaces); - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto CLEAN_UP; - } - - this->surfacepool = mix_surfacepool_new(); - *surface_pool = this->surfacepool; - if (this->surfacepool == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing surface pool\n"); - goto CLEAN_UP; - } - - - ret = mix_surfacepool_initialize(this->surfacepool, - surfaces, numSurfaces, vadisplay); - - switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init failure\n"); - goto CLEAN_UP; - break; - } - - LOG_V( "Created %d libva surfaces, MAXBFRAMES is %d\n", - numSurfaces, data->se_data->MAXBFRAMES); - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext(vadisplay, this->va_config, - this->picture_width, this->picture_height, - 0, surfaces, numSurfaces, - &(this->va_context)); - if (!(vret == VA_STATUS_SUCCESS)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto CLEAN_UP; + ret = _initialize_va(data); + if (ret != MIX_RESULT_SUCCESS) { + LOG_E( "Error initializing va. \n"); + goto cleanup; } - LOG_V( "Created libva context width %d, height %d\n", - this->picture_width, this->picture_height); - LOG_V( "mix_video vinfo: Content type %s, %s\n", - (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); - LOG_V( "mix_video vinfo: Content width %d, height %d\n", - this->picture_width, this->picture_height); - LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", - data->se_data->MAXBFRAMES); - LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", - data->se_data->PROFILE, data->se_data->LEVEL); - -CLEAN_UP: +cleanup: if (ret != MIX_RESULT_SUCCESS) { pret = vbp_close(this->parser_handle); this->parser_handle = NULL; @@ -486,14 +441,14 @@ CLEAN_UP: } else { this->initialized = TRUE; } + if (header != NULL) { if (header->data != NULL) - free(header->data); - free(header); + delete[](header->data); + delete(header); header = NULL; } - free(profiles); - free(entrypts); + this->lastFrame = NULL; LOG_V( "Unlocking\n"); Unlock(); @@ -511,7 +466,6 @@ MIX_RESULT MixVideoFormat_VC1::Decode( uint64 ts = 0; vbp_data_vc1 *data = NULL; bool discontinuity = FALSE; - MixInputBufferEntry *bufentry = NULL; if (bufin == NULL || decode_params == NULL) { LOG_E( "NUll pointer passed in\n"); return MIX_RESULT_NULL_PTR; @@ -542,29 +496,6 @@ MIX_RESULT MixVideoFormat_VC1::Decode( LOG_V( "Locking\n"); Lock(); - //If this is a new frame and we haven't retrieved parser - // workload data from previous frame yet, do so - if ((ts != this->current_timestamp) && - (this->parse_in_progress)) { - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing parser\n"); - goto CLEAN_UP; - } - LOG_V( "Queried for last frame data\n"); - //process and decode data - ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); - - if (ret != MIX_RESULT_SUCCESS) { - //We log this but need to process the new frame data, so do not return - LOG_E( "process_decode failed.\n"); - } - LOG_V( "Called process and decode for last frame\n"); - this->parse_in_progress = FALSE; - } - this->current_timestamp = ts; this->discontinuity_frame_in_progress = discontinuity; LOG_V( "Starting current frame %d, timestamp %"UINT64_FORMAT"\n", mix_video_vc1_counter++, ts); @@ -574,74 +505,42 @@ MIX_RESULT MixVideoFormat_VC1::Decode( (int)this->parser_handle, (uint)bufin[i]->data, bufin[i]->size); pret = vbp_parse(this->parser_handle, bufin[i]->data, bufin[i]->size, FALSE); LOG_V( "Called parse for current frame\n"); - if (pret == VBP_DONE) { - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting parser data\n"); - goto CLEAN_UP; - } - LOG_V( "Called query for current frame\n"); - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = reinterpret_cast(malloc(sizeof( - MixInputBufferEntry))); - if (bufentry == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); - goto CLEAN_UP; - } - - bufentry->buf = bufin[i]; - LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"UINT64_FORMAT"\n", - (uint)bufentry, (uint)bufentry->buf, ts); - bufentry->timestamp = ts; - - LOG_V( "Enqueue this input buffer for current frame\n"); - LOG_V( "bufentry->timestamp %"UINT64_FORMAT"\n", bufentry->timestamp); - - //Enqueue this input buffer - j_queue_push_tail(this->inputbufqueue, - (void*)bufentry); - - //process and decode data - ret = _process_decode(data, ts, discontinuity); + if (pret != VBP_OK) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error parsing data\n"); + goto CLEAN_UP; + } + //query for data + pret = vbp_query(this->parser_handle, (void **) &data); + if ((pret != VBP_OK) || (data == NULL)) { + ret = MIX_RESULT_FAIL; + LOG_E( "Error getting parser data\n"); + goto CLEAN_UP; + } + if (this->va_initialized == FALSE) { + _update_config_params(data); + LOG_V("try initializing VA...\n"); + ret = _initialize_va(data); if (ret != MIX_RESULT_SUCCESS) { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Process_decode failed.\n"); - } - - LOG_V( "Called process and decode for current frame\n"); - this->parse_in_progress = FALSE; - } else if (pret != VBP_OK) { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Parsing failed.\n"); - ret = MIX_RESULT_FAIL; - } else { - LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n"); - //Increase the ref count of this input buffer - mix_buffer_ref(bufin[i]); - //Create a new MixInputBufferEntry - //TODO make this from a pool to optimize - bufentry = reinterpret_cast(malloc(sizeof(MixInputBufferEntry))); - if (bufentry == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating bufentry\n"); + LOG_V("mix_videofmt_vc1_initialize_va failed.\n"); goto CLEAN_UP; } - bufentry->buf = bufin[i]; - bufentry->timestamp = ts; + } - //Enqueue this input buffer - j_queue_push_tail(this->inputbufqueue, - (void*)bufentry); - this->parse_in_progress = TRUE; + LOG_V( "Called query for current frame\n"); + + //process and decode data + ret = _process_decode(data, ts, discontinuity); + if (ret != MIX_RESULT_SUCCESS) + { + //We log this but continue since we need to complete our processing of input buffers + LOG_E( "Process_decode failed.\n"); + goto CLEAN_UP; } + } + CLEAN_UP: LOG_V( "Unlocking\n"); Unlock(); @@ -650,81 +549,6 @@ CLEAN_UP: } -#ifdef YUVDUMP -//TODO Complete this YUVDUMP code and move into base class -MIX_RESULT MixVideoFormat_VC1::_get_Img_from_surface (MixVideoFrame * frame) { - VAStatus vaStatus = VA_STATUS_SUCCESS; - VAImageFormat va_image_format; - VAImage va_image; - unsigned char* pBuffer; - unsigned int ui32SrcWidth = this->picture_width; - unsigned int ui32SrcHeight = this->picture_height; - unsigned int ui32Stride; - unsigned int ui32ChromaOffset; - FILE *fp = NULL; - int r = 0; - int i; - g_print ("_get_Img_from_surface \n"); - - if (NULL == frame) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - fp = fopen("yuvdump.yuv", "a+"); - - static int have_va_image = 0; - - if (!have_va_image) { - va_image_format.fourcc = VA_FOURCC_NV12; - //va_image_format.fourcc = VA_FOURCC_YV12; - vaStatus = vaCreateImage( - this->va_display, &va_image_format, - ui32SrcWidth, ui32SrcHeight, &va_image); - have_va_image = 1; - } - - vaStatus = vaGetImage( - this->va_display, frame->frame_id, 0, 0, - ui32SrcWidth, ui32SrcHeight, va_image.image_id ); - vaStatus = vaMapBuffer(this->va_display, va_image.buf, (void **) &pBuffer); - ui32ChromaOffset = va_image.offsets[1]; - ui32Stride = va_image.pitches[0]; - - if (VA_STATUS_SUCCESS != vaStatus) { - g_print ("VideoProcessBlt: Unable to copy surface\n\r"); - return vaStatus; - } - - { - g_print ("before copy memory....\n"); - g_print ("width = %d, height = %d\n", ui32SrcWidth, ui32SrcHeight); - g_print ("data_size = %d\n", va_image.data_size); - g_print ("num_planes = %d\n", va_image.num_planes); - g_print ("va_image.pitches[0] = %d\n", va_image.pitches[0]); - g_print ("va_image.pitches[1] = %d\n", va_image.pitches[1]); - g_print ("va_image.pitches[2] = %d\n", va_image.pitches[2]); - g_print ("va_image.offsets[0] = %d\n", va_image.offsets[0]); - g_print ("va_image.offsets[1] = %d\n", va_image.offsets[1]); - g_print ("va_image.offsets[2] = %d\n", va_image.offsets[2]); - // r = fwrite (pBuffer, 1, va_image.offsets[1], fp); - - r = fwrite (pBuffer, va_image.offsets[1], 1, fp); - - for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) - r = fwrite (pBuffer + va_image.offsets[1] + i / 2, 1, 1, fp); - - for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2) - r = fwrite (pBuffer + va_image.offsets[1] + i / 2 + 1, 1, 1, fp); - - g_print ("ui32ChromaOffset = %d, ui32Stride = %d\n", ui32ChromaOffset, ui32Stride); - - } - - vaStatus = vaUnmapBuffer(this->va_display, va_image.buf); - return vaStatus; -} -#endif /* YUVDUMP */ - MIX_RESULT MixVideoFormat_VC1::_decode_a_picture( vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame) { MIX_RESULT ret = MIX_RESULT_SUCCESS; @@ -792,7 +616,8 @@ MIX_RESULT MixVideoFormat_VC1::_decode_a_picture( case VC1_PTYPE_BI: // BI frame type ret = mix_videoframe_set_frame_type(frame, TYPE_B); break; - //Not indicated here case VC1_PTYPE_SKIPPED: + //Not indicated here + case VC1_PTYPE_SKIPPED: default: break; } @@ -913,6 +738,17 @@ MIX_RESULT MixVideoFormat_VC1::_decode_a_picture( //Libva buffer set up vadisplay = this->va_display; vacontext = this->va_context; + LOG_V( "Calling vaBeginPicture\n"); + + //Now we can begin the picture + vret = vaBeginPicture(vadisplay, vacontext, surface); + + if (vret != VA_STATUS_SUCCESS) + { + ret = MIX_RESULT_FAIL; + LOG_E( "Video driver returned error from vaBeginPicture\n"); + goto CLEAN_UP; + } LOG_V( "Creating libva picture parameter buffer\n"); @@ -1003,18 +839,6 @@ MIX_RESULT MixVideoFormat_VC1::_decode_a_picture( } } - - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto CLEAN_UP; - } - LOG_V( "Calling vaRenderPicture\n"); //Render the picture @@ -1065,7 +889,6 @@ MIX_RESULT MixVideoFormat_VC1::Flush() { MIX_RESULT ret = MIX_RESULT_SUCCESS; LOG_V( "Begin\n"); uint32 pret = 0; - MixInputBufferEntry *bufentry = NULL; /* Chainup parent method. We are not chaining up to parent method for now. */ @@ -1078,16 +901,7 @@ MIX_RESULT MixVideoFormat_VC1::Flush() { Lock(); //Clear the contents of inputbufqueue - while (!j_queue_is_empty(this->inputbufqueue)) { - bufentry = (MixInputBufferEntry *) j_queue_pop_head(this->inputbufqueue); - if (bufentry == NULL) - continue; - mix_buffer_unref(bufentry->buf); - free(bufentry); - } - //Clear parse_in_progress flag and current timestamp - this->parse_in_progress = FALSE; this->discontinuity_frame_in_progress = FALSE; this->current_timestamp = (uint64)-1; @@ -1111,8 +925,7 @@ MIX_RESULT MixVideoFormat_VC1::Flush() { MIX_RESULT MixVideoFormat_VC1::EndOfStream() { MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_vc1 *data = NULL; - uint32 pret = 0; + LOG_V( "Begin\n"); /* Chainup parent method. We are not chaining up to parent method for now. @@ -1123,27 +936,8 @@ MIX_RESULT MixVideoFormat_VC1::EndOfStream() { return parent_class->eos(mix, msg); } #endif - Lock(); - //if a frame is in progress, process the frame - if (this->parse_in_progress) { - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting last parse data\n"); - goto CLEAN_UP; - } - //process and decode data - ret = _process_decode(data, this->current_timestamp, this->discontinuity_frame_in_progress); - this->parse_in_progress = FALSE; - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error processing last frame\n"); - goto CLEAN_UP; - } - } -CLEAN_UP: - Unlock(); + //Call Frame Manager with _eos() ret = mix_framemanager_eos(this->framemgr); LOG_V( "End\n"); return ret; @@ -1306,12 +1100,6 @@ MIX_RESULT MixVideoFormat_VC1::_process_decode( _handle_ref_frames(frame_type, frame); } -//TODO Complete YUVDUMP code and move into base class -#ifdef YUVDUMP - if (mix_video_vc1_counter < 10) - ret = _get_Img_from_surface(frame); - //g_usleep(5000000); -#endif /* YUVDUMP */ LOG_V( "Enqueueing the frame with frame manager, timestamp %"UINT64_FORMAT"\n", timestamp); @@ -1325,7 +1113,7 @@ MIX_RESULT MixVideoFormat_VC1::_process_decode( unrefVideoFrame = FALSE; CLEAN_UP: - _release_input_buffers(timestamp); + if (unrefVideoFrame) mix_videoframe_unref(frame); LOG_V( "End\n"); @@ -1333,32 +1121,11 @@ CLEAN_UP: } MIX_RESULT MixVideoFormat_VC1::_release_input_buffers(uint64 timestamp) { - MixInputBufferEntry *bufentry = NULL; - bool done = FALSE; + LOG_V( "Begin\n"); - //Dequeue and release all input buffers for this frame - LOG_V( "Releasing all the MixBuffers for this frame\n"); - //While the head of the queue has timestamp == current ts - //dequeue the entry, unref the MixBuffer, and free the struct - done = FALSE; - while (!done) { - bufentry = (MixInputBufferEntry *) j_queue_peek_head(this->inputbufqueue); - if (bufentry == NULL) - break; - LOG_V( "head of queue buf %x, timestamp %"UINT64_FORMAT", buffer timestamp %"UINT64_FORMAT"\n", - (uint)bufentry->buf, timestamp, bufentry->timestamp); - if (bufentry->timestamp != timestamp) { - LOG_V( "buf %x, timestamp %"UINT64_FORMAT", buffer timestamp %"UINT64_FORMAT"\n", - (uint)bufentry->buf, timestamp, bufentry->timestamp); - done = TRUE; - break; - } - bufentry = (MixInputBufferEntry *) j_queue_pop_head(this->inputbufqueue); - LOG_V( "Unref this MixBuffers %x\n", (uint)bufentry->buf); - mix_buffer_unref(bufentry->buf); - free(bufentry); - } + // Nothing to release. Deprecated. + LOG_V( "End\n"); return MIX_RESULT_SUCCESS; } diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h index 8ec0eea..80659f3 100644 --- a/mix_video/src/mixvideoformat_vc1.h +++ b/mix_video/src/mixvideoformat_vc1.h @@ -50,6 +50,7 @@ private: MIX_RESULT _update_config_params(vbp_data_vc1 *data); MIX_RESULT _decode_a_picture( vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame); + MIX_RESULT _initialize_va(vbp_data_vc1 *data); #ifdef YUVDUMP MIX_RESULT _get_Img_from_surface (MixVideoFrame * frame); #endif -- cgit v1.2.3 From 40016f07c72ec97761793bef760388a43d6b251c Mon Sep 17 00:00:00 2001 From: xli111 Date: Mon, 25 Apr 2011 10:05:09 +0800 Subject: [BZ669] Ignore unimportant bits in h263 picture layer. Frames fail to show up on some Level>=30 H263 media clips. Root Cause: Mixvbp does not handle custom picture clock frequency in OPPTYPE of PLUSTYPE in h263 picture layer. Solution: When Mixvbp check custom picture clock frequency is set, then skip 10bits to get vop_quant value Change-Id: I31091635dd3f937d3135ee23cc9e83bdc424ffda Signed-off-by: xli111 --- .../viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index 32d6d93..2c19add 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -25,6 +25,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); int32_t getbits = 0; uint8_t pei = 0; + uint8_t optional_indicators_8bits = 0; do { @@ -116,11 +117,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p //optional indicators getbits = viddec_pm_get_bits(parent, &data, 8); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if ( 0 != (data & 0xff)) - { - ret = MP4_STATUS_PARSE_ERROR; - break; - } + optional_indicators_8bits = data; //reserved zero bits getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); @@ -224,6 +221,12 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->picture_height_indication = (data & 0x1ff); } + + if (optional_indicators_8bits & 0x80) { + viddec_pm_get_bits(parent, &data, 8); + viddec_pm_get_bits(parent, &data, 2); + } + viddec_pm_get_bits(parent, &data, 5); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->vop_quant = (data & 0x1f); -- cgit v1.2.3 From a0d0bee589c8cad2295373ecef1655b71b4272a4 Mon Sep 17 00:00:00 2001 From: xli111 Date: Mon, 25 Apr 2011 16:50:01 +0800 Subject: [BZ1129] Change the quantization table for MPEG4 decoder. Change-Id: Ieef014bc5d9a142cb7ad581d0d4e6df6e8c0d88f Signed-off-by: xli111 --- .../mp4/parser/viddec_mp4_videoobjectlayer.c | 27 +++++++++++++++++++++- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 6 ++--- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index 5ef0960..f5784c3 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -1,6 +1,6 @@ #include #include "viddec_mp4_videoobjectlayer.h" - +#ifndef VBP const unsigned char mp4_DefaultIntraQuantMatrix[64] = { 8, 17, 18, 19, 21, 23, 25, 27, 17, 18, 19, 21, 23, 25, 27, 28, @@ -21,6 +21,31 @@ const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = { 22, 23, 24, 26, 27, 28, 30, 31, 23, 24, 25, 27, 28, 30, 31, 33 }; + +#else +const unsigned char mp4_DefaultIntraQuantMatrix[64] = { + 8, 17, 17, 20, 18, 18, 19, 19, + 21, 21, 22, 22, 22, 21, 21, 23, + 23, 23, 23, 23, 23, 25, 24, 24, + 24, 24, 25, 25, 27, 27, 26, 26, + 26, 26, 26, 27, 28, 28, 28, 28, + 28, 28, 28, 30, 30, 30, 30, 30, + 30, 32, 32, 32, 32, 32, 35, 35, + 35, 35, 38, 38, 38, 41, 41, 45 +}; + +const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = { + 16, 17, 17, 18, 18, 18, 19, 19, + 19, 19, 20, 20, 20, 20, 20, 21, + 21, 21, 21, 21, 21, 22, 22, 22, + 22, 22, 22, 22, 23, 23, 23, 23, + 23, 23, 23, 23, 24, 24, 24, 25, + 24, 24, 24, 25, 26, 26, 26, 26, + 25, 27, 27, 27, 27, 27, 28, 28, + 28, 28, 30, 30, 30, 31, 31, 33 +}; + +#endif const unsigned char mp4_ClassicalZigzag[64] = { 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index e1a0829..97c0304 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -606,9 +606,9 @@ void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) iq_matrix = &(query_data->iq_matrix_buffer); - iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat; - iq_matrix->load_non_intra_quant_mat - = quant_mat_info->load_nonintra_quant_mat; + iq_matrix->load_intra_quant_mat = 1; //quant_mat_info->load_intra_quant_mat; + iq_matrix->load_non_intra_quant_mat = 1; + // = quant_mat_info->load_nonintra_quant_mat; memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64); memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64); } -- cgit v1.2.3 From 4aa8d988a71ea74845116863b6505661c4f9f311 Mon Sep 17 00:00:00 2001 From: Shuo Liu Date: Tue, 10 May 2011 15:13:15 +0800 Subject: [BZ1652]: H263 video couldn't seek root cause: the bug is caused by a special clips that uses P frame as key frame. solution: All of the macroblocks of the key-P frame is intra coded and hence though it is marked as inter frame, it could be independently decoded. Remove the reference frame check for P frame within MIX so that the key frame could pass decoding process when seeking. Change-Id: I3a007732779433de83ba207f3a1ca9baf53532b8 Signed-off-by: Shuo Liu --- mix_video/src/mixvideoformat_mp42.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp index 755501a..b3b6e25 100644 --- a/mix_video/src/mixvideoformat_mp42.cpp +++ b/mix_video/src/mixvideoformat_mp42.cpp @@ -710,10 +710,16 @@ MIX_RESULT MixVideoFormat_MP42::_decode_begin(vbp_data_mp42* data) { return MIX_RESULT_DROPFRAME; } } else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S) { +#if 0 + /* + * For special clips using P frame (special P frame with all MB intra coded) as key frame + * Need to skip the reference check to enable the seek + */ if (this->reference_frames[0] == NULL) { LOG_W("Reference frames for P/S frame is missing\n"); return MIX_RESULT_DROPFRAME; } +#endif } // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue ret = _decode_continue(data); -- cgit v1.2.3 From 8a43d0ac2e2bd385a572b05b683bffd86198220c Mon Sep 17 00:00:00 2001 From: Shuo Liu Date: Mon, 30 May 2011 09:46:18 +0800 Subject: libmix: override AVC's DPB size from VUI data BZ: 1360 For some incorrectly coded clips, the reference frame count within SPS is smaller than the DPB length described in VUI data, which is in fact not permitted by H.264 spec. Establishing DPB by SPS's reference frame count will cause the DPB length not large enough, thus handling reference frames incorrectly and causing correction and mosaics in decoded pictures. Enlarge DPB length when VUI's DPB length exceed SPS's reference frame count. Change-Id: I27c0bec82aa19647c676c59a80a2aecdfe0a0f75 Signed-off-by: Shuo Liu --- mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 25ca059..6d52307 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -2908,6 +2908,10 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) { //MFD_PARSER_DEBUG(ERROR_H264_DPB); //// err handling here + + //// For some ilegal clips, the max dpb length described in vui might exceed the sps's value + //// To guarantee normal playback, just select the vui value to override + p_dpb->BumpLevel = active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering; } else { p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ? -- cgit v1.2.3 From f6fc06387218978a10722194c491f846d1d800f4 Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Tue, 14 Jun 2011 22:28:27 -0700 Subject: new asf parser and video decoder libraries with several fixes in mix_vbp Change-Id: Iacff5d468a1525cb33568478eef934688b1d02a7 BZ: 3372 --- Android.mk | 2 + asfparser/Android.mk | 20 + asfparser/AsfDataParser.cpp | 608 ++++++++++++++++ asfparser/AsfDataParser.h | 254 +++++++ asfparser/AsfGuids.cpp | 30 + asfparser/AsfGuids.h | 192 +++++ asfparser/AsfHeaderParser.cpp | 396 ++++++++++ asfparser/AsfHeaderParser.h | 78 ++ asfparser/AsfIndexParser.cpp | 135 ++++ asfparser/AsfIndexParser.h | 61 ++ asfparser/AsfObjects.h | 309 ++++++++ asfparser/AsfParserDefs.h | 116 +++ asfparser/AsfStreamParser.cpp | 189 +++++ asfparser/AsfStreamParser.h | 88 +++ mix_vbp/Android.mk | 1 + mix_vbp/viddec_fw/fw/codecs/vc1/parser/Android.mk | 37 + mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 286 +++----- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h | 29 +- mix_vbp/viddec_fw/fw/parser/vbp_loader.c | 37 +- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 87 ++- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 281 ++++---- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h | 29 +- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 96 +-- mix_vbp/viddec_fw/fw/parser/vbp_utils.h | 57 +- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 53 +- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h | 30 +- videodecoder/Android.mk | 40 + videodecoder/VideoDecoderAVC.cpp | 768 ++++++++++++++++++++ videodecoder/VideoDecoderAVC.h | 83 +++ videodecoder/VideoDecoderBase.cpp | 842 ++++++++++++++++++++++ videodecoder/VideoDecoderBase.h | 134 ++++ videodecoder/VideoDecoderDefs.h | 167 +++++ videodecoder/VideoDecoderHost.cpp | 68 ++ videodecoder/VideoDecoderHost.h | 37 + videodecoder/VideoDecoderInterface.h | 42 ++ videodecoder/VideoDecoderMPEG4.cpp | 497 +++++++++++++ videodecoder/VideoDecoderMPEG4.h | 71 ++ videodecoder/VideoDecoderTrace.cpp | 45 ++ videodecoder/VideoDecoderTrace.h | 94 +++ videodecoder/VideoDecoderVP8.cpp | 85 +++ videodecoder/VideoDecoderVP8.h | 50 ++ videodecoder/VideoDecoderWMV.cpp | 475 ++++++++++++ videodecoder/VideoDecoderWMV.h | 63 ++ 43 files changed, 6591 insertions(+), 471 deletions(-) create mode 100644 asfparser/Android.mk create mode 100644 asfparser/AsfDataParser.cpp create mode 100644 asfparser/AsfDataParser.h create mode 100644 asfparser/AsfGuids.cpp create mode 100644 asfparser/AsfGuids.h create mode 100644 asfparser/AsfHeaderParser.cpp create mode 100644 asfparser/AsfHeaderParser.h create mode 100644 asfparser/AsfIndexParser.cpp create mode 100644 asfparser/AsfIndexParser.h create mode 100644 asfparser/AsfObjects.h create mode 100644 asfparser/AsfParserDefs.h create mode 100644 asfparser/AsfStreamParser.cpp create mode 100644 asfparser/AsfStreamParser.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vc1/parser/Android.mk create mode 100644 videodecoder/Android.mk create mode 100644 videodecoder/VideoDecoderAVC.cpp create mode 100644 videodecoder/VideoDecoderAVC.h create mode 100644 videodecoder/VideoDecoderBase.cpp create mode 100644 videodecoder/VideoDecoderBase.h create mode 100644 videodecoder/VideoDecoderDefs.h create mode 100644 videodecoder/VideoDecoderHost.cpp create mode 100644 videodecoder/VideoDecoderHost.h create mode 100644 videodecoder/VideoDecoderInterface.h create mode 100644 videodecoder/VideoDecoderMPEG4.cpp create mode 100644 videodecoder/VideoDecoderMPEG4.h create mode 100644 videodecoder/VideoDecoderTrace.cpp create mode 100644 videodecoder/VideoDecoderTrace.h create mode 100644 videodecoder/VideoDecoderVP8.cpp create mode 100644 videodecoder/VideoDecoderVP8.h create mode 100644 videodecoder/VideoDecoderWMV.cpp create mode 100644 videodecoder/VideoDecoderWMV.h diff --git a/Android.mk b/Android.mk index 135c4c4..1a56908 100644 --- a/Android.mk +++ b/Android.mk @@ -7,3 +7,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_common/src/Android.mk #include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_audio/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_video/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk diff --git a/asfparser/Android.mk b/asfparser/Android.mk new file mode 100644 index 0000000..83fafea --- /dev/null +++ b/asfparser/Android.mk @@ -0,0 +1,20 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + + +LOCAL_SRC_FILES := \ + AsfStreamParser.cpp \ + AsfDataParser.cpp \ + AsfHeaderParser.cpp \ + AsfIndexParser.cpp \ + AsfGuids.cpp + + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) + + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libasfparser + +include $(BUILD_SHARED_LIBRARY) diff --git a/asfparser/AsfDataParser.cpp b/asfparser/AsfDataParser.cpp new file mode 100644 index 0000000..1982179 --- /dev/null +++ b/asfparser/AsfDataParser.cpp @@ -0,0 +1,608 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#include "AsfDataParser.h" +#include "AsfGuids.h" +#include "AsfObjects.h" +#include + + +// Helper fucctions + +static inline uint8_t lengthType2Bytes(uint8_t lengthType) { + // lengthType: 0 1 2 3 + // bits: 0 8 16 32 + // bytes: 0 1 2 4 + return 4 >> (3 - (lengthType & 0x03)); +} + +static inline uint32_t getModuleValue(uint32_t value, uint8_t lengthType) { + switch (lengthType) { + case 0: + return 0; // field does not exist + case 1: + return value % 0x100; // (BYTE) + case 2: + return value % 0x10000; // (WORD) + case 3: + return value; //(DWORD) + } + return value; +} + +static inline uint32_t getFieldValue(uint8_t *buffer, uint8_t lengthType) { + switch (lengthType) { + case 0: + return 0; // field does not exist + case 1: + return *buffer; + case 2: + return *(uint16_t*)buffer; + case 3: + return *(uint32_t*)buffer; + } + // This line should not be reached + return 0xffffffff ; + } + +static void freePayloadDataInfo(AsfPayloadDataInfo *header) { + while (header) { + AsfPayloadDataInfo *next = header->next; + delete header; + header = next; + } +} +AsfPayloadDataInfoPool::AsfPayloadDataInfoPool() + : mFirstDataInfo(NULL), + mLastDataInfo(NULL) { +} + +AsfPayloadDataInfoPool::~AsfPayloadDataInfoPool() { + freePayloadDataInfo(mFirstDataInfo); +} + +void AsfPayloadDataInfoPool::releasePayloadDataInfo(AsfPayloadDataInfo *info) { + if (info == NULL) { + return; + } + + if (mFirstDataInfo == NULL) { + mFirstDataInfo = info; + } else { + mLastDataInfo->next = info; + } + while (info->next != NULL) { + info = info->next; + } + mLastDataInfo = info; +} + +AsfPayloadDataInfo* AsfPayloadDataInfoPool::getPayloadDataInfo() { + AsfPayloadDataInfo *entry; + + if (mFirstDataInfo == NULL) { + entry = new AsfPayloadDataInfo; + if (entry == NULL) { + return NULL; + } + } else { + entry = mFirstDataInfo; + mFirstDataInfo = mFirstDataInfo->next; + if (mFirstDataInfo == NULL) { + mLastDataInfo = NULL; + } + } + memset(entry, 0, sizeof(AsfPayloadDataInfo)); + return entry; +} + + +int AsfErrorCorrectionData::parse(uint8_t *buffer, uint32_t size) { + errorCorrectionFlags.value = *buffer; + + blockSize = 0; + if (errorCorrectionFlags.bits.errorCorrectionPresent == 0) { + return ASF_PARSER_SUCCESS; + } + + blockSize = 1; + // determine if Error Correction Data Length is valid + if (errorCorrectionFlags.bits.errorCorrectionLengthType == 0) { + // Error Correction Data Length is valid only if the value of the Error Correction Length Type is 00 + + // Error Correction Data Length should be 0010 + // Opaque Data Present should be set to 0 + blockSize += errorCorrectionFlags.bits.errorCorrectionDataLength; + return ASF_PARSER_SUCCESS; + } + + // if Error Correction Length Type is different thant 00, Error Correction Data Length shall be zero. + if (errorCorrectionFlags.bits.errorCorrectionDataLength == 0) { + return ASF_PARSER_SUCCESS; + } + + return ASF_PARSER_BAD_VALUE; +} + + +int AsfPayloadParsingInformation::parse(uint8_t *buffer, uint32_t size) { + lengthTypeFlags.value = *buffer; + propertyFlags.value = *(buffer + 1); + + // lengthTypeFlags: + // sequence type should be set to 00 + // packet length type should be set to 00 when creating content + // propertyFlags: + // replicated data length type should be set to 01 (BYTE) + // offset into media object shall be set to 11 (DWORD) + // media object number length type shall be set to 01 (BYTE) + // stream number length type shalll be set to 01 (BYTE) + + blockSize = 2; + packetLength = getFieldValue(buffer + blockSize, lengthTypeFlags.bits.packetLengthType); + blockSize += lengthType2Bytes(lengthTypeFlags.bits.packetLengthType); + + sequence = getFieldValue(buffer + blockSize, lengthTypeFlags.bits.sequenceType); + blockSize += lengthType2Bytes(lengthTypeFlags.bits.sequenceType); + + paddingLength = getFieldValue(buffer + blockSize, lengthTypeFlags.bits.paddingLengthType); + blockSize += lengthType2Bytes(lengthTypeFlags.bits.paddingLengthType); + + sendTime = *(uint32_t*)(buffer + blockSize); + blockSize += 4; + + duration = *(uint16_t*)(buffer + blockSize); + blockSize += 2; + + return ASF_PARSER_SUCCESS; +} + + +int AsfSinglePayloadUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { + // initialize output + *out = NULL; + streamNumber.value = *buffer; + blockSize = 1; + + mediaObjectNumber = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.mediaObjectNumberLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.mediaObjectNumberLengthType); + + offsetIntoMediaObject = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + + replicatedDataLength = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.replicatedDataLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.replicatedDataLengthType); + + if (replicatedDataLength == 1) { + // compressed payload + blockSize == 0; + return ASF_PARSER_COMPRESSED_PAYLOAD; + } + + if (replicatedDataLength == 0) { + // TODO: + return ASF_PARSER_UNEXPECTED_VALUE; + } + + if (replicatedDataLength < 8) { + return ASF_PARSER_BAD_VALUE; + } + + AsfPayloadDataInfo *obj = pool->getPayloadDataInfo(); + if (obj == NULL) { + return ASF_PARSER_NO_MEMORY; + } + + // Replicated data, at least 8 bytes + obj->mediaObjectLength = *(uint32_t*)(buffer + blockSize); + obj->presentationTime = *(uint32_t*)(buffer + blockSize + 4); + + blockSize += replicatedDataLength; + + obj->payloadData = buffer + blockSize; + + // size = packet length - packet header length + // payload size = size - payload header size (blockSize) - padding length + obj->payloadSize = size - blockSize - ppi->paddingLength; + if ((int)obj->payloadSize <= 0) { + delete obj; + return ASF_PARSER_BAD_VALUE; + } + obj->offsetIntoMediaObject = offsetIntoMediaObject; + obj->streamNumber = streamNumber.bits.streamNumber; + obj->mediaObjectNumber = mediaObjectNumber; + obj->keyframe = streamNumber.bits.keyFrameBit; + obj->next = NULL; + + // skip padding data + blockSize += ppi->paddingLength; + *out = obj; + return ASF_PARSER_SUCCESS; +} + + +int AsfSinglePayloadCompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { + // initialize output + *out = NULL; + streamNumber.value = *buffer; + blockSize = 1; + + mediaObjectNumber = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.mediaObjectNumberLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.mediaObjectNumberLengthType); + + // presentation time is coded using the value of Offset Into Media Object Length Type + presentationTime= getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + + // must be 1 + replicatedDataLength = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.replicatedDataLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.replicatedDataLengthType); + + presentationTimeDelta = *(buffer + blockSize); + blockSize++; + + int payloadLenRemaining = size - blockSize - ppi->paddingLength; + if (payloadLenRemaining <= 0) { + return ASF_PARSER_BAD_VALUE; + } + + uint32_t pts = presentationTime; + uint32_t objNumber = mediaObjectNumber; + + uint8_t subPayloadDataLength; + AsfPayloadDataInfo *first = NULL, *next = NULL, *last = NULL; + + while (payloadLenRemaining > 0) { + subPayloadDataLength = *(buffer + blockSize); + blockSize++; + payloadLenRemaining -= 1; + + next = pool->getPayloadDataInfo(); + if (next == NULL) { + freePayloadDataInfo(first); + return ASF_PARSER_NO_MEMORY; + } + + next->payloadData = buffer + blockSize; + next->payloadSize = subPayloadDataLength; + next->presentationTime = pts; + next->offsetIntoMediaObject = 0; + next->mediaObjectLength = subPayloadDataLength; + next->streamNumber = streamNumber.bits.streamNumber; + next->mediaObjectNumber = getModuleValue(objNumber, ppi->propertyFlags.bits.mediaObjectNumberLengthType); + next->keyframe = streamNumber.bits.keyFrameBit; + next->next = NULL; + + if (first == NULL) { + first = next; + last = next; + } else { + last->next = next; + last = next; + } + + pts += presentationTimeDelta; + objNumber++; + blockSize += subPayloadDataLength; + payloadLenRemaining -= subPayloadDataLength; + } + + + if (payloadLenRemaining != 0) { + // TODO: + freePayloadDataInfo(first); + return ASF_PARSER_BAD_VALUE; + } + + // skip padding data + blockSize += ppi->paddingLength; + *out = first; + return ASF_PARSER_SUCCESS; +} + + +int AsfMultiplePayloadsHeader::parse(uint8_t *buffer, uint32_t size) { + payloadFlags.value = *buffer; + blockSize = 1; + + // number of payloads must not be 0 + if (payloadFlags.bits.numberOfPayloads == 0) { + return ASF_PARSER_BAD_VALUE; + } + + // payload length type should be set to 10 (WORD) + return ASF_PARSER_SUCCESS; +} + + +int AsfMultiplePayloadsUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { + // initialize output + *out = NULL; + streamNumber.value = *buffer; + blockSize = 1; + + mediaObjectNumber = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.mediaObjectNumberLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.mediaObjectNumberLengthType); + + offsetIntoMediaObject = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + + replicatedDataLength = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.replicatedDataLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.replicatedDataLengthType); + + if (replicatedDataLength == 1) { + // compressed payload + blockSize == 0; + return ASF_PARSER_COMPRESSED_PAYLOAD; + } + + if (replicatedDataLength == 0) { + // TODO: + return ASF_PARSER_UNEXPECTED_VALUE; + } + + if (replicatedDataLength < 8) { + return ASF_PARSER_BAD_VALUE; + } + + AsfPayloadDataInfo *obj = pool->getPayloadDataInfo(); + if (obj == NULL) { + return ASF_PARSER_NO_MEMORY; + } + + // at least 8 bytes replicated data + obj->mediaObjectLength = *(uint32_t *)(buffer + blockSize); + obj->presentationTime = *(uint32_t *)(buffer + blockSize + 4); + + blockSize += replicatedDataLength; + + // payload length must not be 0 + payloadLength = getFieldValue(buffer + blockSize, mpHeader->payloadFlags.bits.payloadLengthType); + blockSize += lengthType2Bytes(mpHeader->payloadFlags.bits.payloadLengthType); + + if (payloadLength == 0 || payloadLength + blockSize > size) { + delete obj; + return ASF_PARSER_BAD_VALUE; + } + + obj->payloadData = buffer + blockSize; + obj->payloadSize = payloadLength; + + obj->offsetIntoMediaObject = offsetIntoMediaObject; + obj->streamNumber = streamNumber.bits.streamNumber; + obj->mediaObjectNumber = mediaObjectNumber; + obj->keyframe = streamNumber.bits.keyFrameBit; + obj->next = NULL; + + // skip payload data + blockSize += payloadLength; + *out = obj; + return ASF_PARSER_SUCCESS; +} + + +int AsfMultiplePayloadsCompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { + // initialize output + *out = NULL; + streamNumber.value = *buffer; + blockSize = 1; + + mediaObjectNumber = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.mediaObjectNumberLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.mediaObjectNumberLengthType); + + // presentation time is coded using the value of Offset Into Media Object Length Type + presentationTime= getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.offsetIntoMediaObjectLengthType); + + // must be 1 + replicatedDataLength = getFieldValue(buffer + blockSize, ppi->propertyFlags.bits.replicatedDataLengthType); + blockSize += lengthType2Bytes(ppi->propertyFlags.bits.replicatedDataLengthType); + + presentationTimeDelta = *(buffer + blockSize); + blockSize++; + + // payload length must not be 0 + payloadLength = getFieldValue(buffer + blockSize, mpHeader->payloadFlags.bits.payloadLengthType); + blockSize += lengthType2Bytes(mpHeader->payloadFlags.bits.payloadLengthType); + if (payloadLength == 0 || blockSize + payloadLength > size) { + return ASF_PARSER_BAD_VALUE; + } + + // safe to case from uint32_t to int. + int payloadLenRemaining = (int)payloadLength; + uint32_t pts = presentationTime; + uint32_t objNumber = mediaObjectNumber; + uint8_t subPayloadDataLength; + AsfPayloadDataInfo *first = NULL, *next = NULL, *last = NULL; + + while (payloadLenRemaining > 0) { + subPayloadDataLength = *(buffer + blockSize); + blockSize++; + payloadLenRemaining -= 1; + + next = pool->getPayloadDataInfo(); + if (next == NULL) { + freePayloadDataInfo(first); + return ASF_PARSER_NO_MEMORY; + } + + next->payloadData = buffer + blockSize; + next->payloadSize = subPayloadDataLength; + next->presentationTime = pts; + next->offsetIntoMediaObject = 0; + next->mediaObjectLength = subPayloadDataLength; + next->streamNumber = streamNumber.bits.streamNumber; + next->mediaObjectNumber = getModuleValue(objNumber, ppi->propertyFlags.bits.mediaObjectNumberLengthType); + next->keyframe = streamNumber.bits.keyFrameBit; + next->next = NULL; + + if (first == NULL) { + first = next; + last = next; + } else { + last->next = next; + last = next; + } + + pts += presentationTimeDelta; + objNumber++; + blockSize += subPayloadDataLength; + payloadLenRemaining -= subPayloadDataLength; + } + + + if (payloadLenRemaining < 0) { + // TODO: + freePayloadDataInfo(first); + return ASF_PARSER_BAD_VALUE; + } + + // blockSize stays as it is + *out = first; + return ASF_PARSER_SUCCESS; +} + + +AsfDataParser::AsfDataParser(void) + : mTotalDataPackets(0) { + mSPUncompressed.ppi = &mPPI; + mSPCompressed.ppi = &mPPI; + mMPHeader.ppi = &mPPI; + mMPUncompressed.ppi = &mPPI; + mMPCompressed.ppi = &mPPI; + + mMPUncompressed.mpHeader = &mMPHeader; + mMPCompressed.mpHeader = &mMPHeader; + + mSPUncompressed.pool = &mPool; + mSPCompressed.pool = &mPool; + mMPUncompressed.pool = &mPool; + mMPCompressed.pool = &mPool; +} + + +AsfDataParser::~AsfDataParser(void) { +} + +int AsfDataParser::parseHeader(uint8_t *buffer, uint32_t size) { + if (size < sizeof(AsfDataObject)) { + return ASF_PARSER_BAD_DATA; + } + AsfDataObject *obj = (AsfDataObject*)buffer; + mTotalDataPackets = obj->totalDataPackets; + return ASF_PARSER_SUCCESS; +} + +uint64_t AsfDataParser::getTotalDataPackets() { + return mTotalDataPackets; +} + +int AsfDataParser::parsePacket(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { + int status; + AsfPayloadDataInfo *first = NULL; + + status = mECD.parse(buffer, size); + if (status != ASF_PARSER_SUCCESS) { + return status; + } + + buffer += mECD.blockSize; + size -= mECD.blockSize; + status = mPPI.parse(buffer, size); + if (status != ASF_PARSER_SUCCESS) { + return status; + } + + buffer += mPPI.blockSize; + size -= mPPI.blockSize; + + if (mPPI.lengthTypeFlags.bits.multiplePayloadsPresent) { + status = mMPHeader.parse(buffer, size); + if (status != ASF_PARSER_SUCCESS) { + return status; + } + buffer += mMPHeader.blockSize; + size -= mMPHeader.blockSize; + + AsfPayloadDataInfo *last = NULL, *next = NULL; + for (int i = 0; i < mMPHeader.payloadFlags.bits.numberOfPayloads; i++) { + status = mMPUncompressed.parse(buffer, size, &next); + + if (status == ASF_PARSER_SUCCESS) { + buffer += mMPUncompressed.blockSize; + size -= mMPUncompressed.blockSize; + } else if (status == ASF_PARSER_COMPRESSED_PAYLOAD) { + status = mMPCompressed.parse(buffer, size, &next); + if (status != ASF_PARSER_SUCCESS) { + break; + } + buffer += mMPCompressed.blockSize; + size -= mMPCompressed.blockSize; + } + else { + break; + } + + if ((int)size < 0) { + status = ASF_PARSER_BAD_VALUE; + break; + } + // concatenate the payloads. + if (first == NULL) { + first = next; + last = next; + } + else { + while (last->next != NULL) { + last = last->next; + } + last->next = next; + last = next; + } + } + } + else { + status = mSPUncompressed.parse(buffer, size, &first); + + if (status == ASF_PARSER_COMPRESSED_PAYLOAD) { + status = mSPCompressed.parse(buffer, size, &first); + } + } + + if (status != ASF_PARSER_SUCCESS) { + freePayloadDataInfo(first); + return status; + } + + *out = first; + return ASF_PARSER_SUCCESS; +} + +void AsfDataParser::releasePayloadDataInfo(AsfPayloadDataInfo *info) { + mPool.releasePayloadDataInfo(info); +} + + diff --git a/asfparser/AsfDataParser.h b/asfparser/AsfDataParser.h new file mode 100644 index 0000000..a32924a --- /dev/null +++ b/asfparser/AsfDataParser.h @@ -0,0 +1,254 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + + +#ifndef ASF_DATA_PARSER_H_ +#define ASF_DATA_PARSER_H_ + +#include "AsfParserDefs.h" + +class AsfPayloadDataInfoPool { +public: + AsfPayloadDataInfoPool(); + ~AsfPayloadDataInfoPool(); + + // put payload data info to internal queue for reuse. + void releasePayloadDataInfo(AsfPayloadDataInfo *info); + inline AsfPayloadDataInfo* getPayloadDataInfo(); + +private: + AsfPayloadDataInfo *mFirstDataInfo; + AsfPayloadDataInfo *mLastDataInfo; +}; + + +struct AsfErrorCorrectionData { + int parse(uint8_t *buffer, uint32_t size); + + union { + struct { + uint8_t errorCorrectionDataLength :4; + uint8_t opaqueDataPresent :1; + uint8_t errorCorrectionLengthType :2; + uint8_t errorCorrectionPresent :1; + } bits; + uint8_t value; + } errorCorrectionFlags; + + // size of this data block, 0 if Error Correction Present is 0 + uint32_t blockSize; +}; + +struct AsfPayloadParsingInformation { + int parse(uint8_t *buffer, uint32_t size); + + union { + struct { + uint8_t multiplePayloadsPresent :1; + uint8_t sequenceType :2; + uint8_t paddingLengthType :2; + uint8_t packetLengthType :2; + uint8_t errorCorrectionPresent :1; + } bits; + uint8_t value; + } lengthTypeFlags; + + union { + struct { + uint8_t replicatedDataLengthType :2; + uint8_t offsetIntoMediaObjectLengthType :2; + uint8_t mediaObjectNumberLengthType :2; + uint8_t streamNumberLengthType :2; + } bits; + uint8_t value; + } propertyFlags; + + + uint32_t packetLength; // Varialbe length: 0, 8, 16, 32 + uint32_t sequence; // Variable length: 0, 8, 16, 32 + uint32_t paddingLength; // Varialbe length: 0, 8, 16, 32 + uint32_t sendTime; + uint16_t duration; + + // size of this data block + uint32_t blockSize; +}; + + +struct AsfSinglePayloadUncompressed { + int parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out); + + union { + struct { + uint8_t streamNumber :7; + uint8_t keyFrameBit :1; + } bits; + uint8_t value; + } streamNumber; + + uint32_t mediaObjectNumber; // Varialbe length: 0, 8, 16, 32 + uint32_t offsetIntoMediaObject; // Varialbe length: 0, 8, 16, 32 + uint32_t replicatedDataLength; // Varialbe length: 0, 8, 16, 32 + //BYTE replicatedData[]; + //BYTE payloadData[]; + + // size of this data block including padding data + uint32_t blockSize; + AsfPayloadParsingInformation *ppi; + AsfPayloadDataInfoPool *pool; +}; + + +struct AsfSinglePayloadCompressed { + int parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out); + + union { + struct { + uint8_t streamNumber :7; + uint8_t keyFrameBit :1; + } bits; + uint8_t value; + } streamNumber; + + uint32_t mediaObjectNumber; // Varialbe length: 0, 8, 16, 32 + uint32_t presentationTime; // Varialbe length: 0, 8, 16, 32 + uint32_t replicatedDataLength; // Varialbe length: 0, 8, 16, 32 + uint8_t presentationTimeDelta; + //BYTE subPayload #0 data length + //BYTE subPayload #0 data + //Byte subPayload #1 data length + //Byte subPayload #1 data + + // size of this block including padding data + uint32_t blockSize; + AsfPayloadParsingInformation *ppi; + AsfPayloadDataInfoPool *pool; +}; + + +struct AsfMultiplePayloadsHeader { + int parse(uint8_t *buffer, uint32_t size); + + union { + struct { + uint8_t numberOfPayloads :6; + uint8_t payloadLengthType :2; + } bits; + uint8_t value; + } payloadFlags; + + // BYTES payloads[]; + + // size of this header block, must be 1 + uint32_t blockSize; + AsfPayloadParsingInformation *ppi; +}; + + +struct AsfMultiplePayloadsUncompressed { + int parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out); + + union { + struct { + uint8_t streamNumber :7; + uint8_t keyFrameBit :1; + } bits; + uint8_t value; + } streamNumber; + + uint32_t mediaObjectNumber; // Varialbe length: 0, 8, 16, 32 + uint32_t offsetIntoMediaObject; // Varialbe length: 0, 8, 16, 32 + uint32_t replicatedDataLength; // Varialbe length: 0, 8, 16, 32 + //BYTE replicatedData[]; + uint32_t payloadLength; // Varialbe length: 8, 16, 32 + //BYTE payloadData[]; + + // size of this single uncompressed payload block in the multiple payloads packet + uint32_t blockSize; + AsfPayloadParsingInformation *ppi; + AsfMultiplePayloadsHeader *mpHeader; + AsfPayloadDataInfoPool *pool; +}; + + +struct AsfMultiplePayloadsCompressed { + int parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out); + + union { + struct { + uint8_t streamNumber :7; + uint8_t keyFrameBit :1; + } bits; + uint8_t value; + } streamNumber; + uint32_t mediaObjectNumber; // Varialbe length: 0, 8, 16, 32 + uint32_t presentationTime; // Varialbe length: 0, 8, 16, 32 + uint32_t replicatedDataLength; // Varialbe length: 0, 8, 16, 32 + uint8_t presentationTimeDelta; + uint32_t payloadLength; // Varialbe length: 8, 16, 32 + //BYTE subPayload #0 data length + //BYTE subPayload #0 data + //Byte subPayload #1 data length + //Byte subPayload #1 data + + // size of this single compressed payload block in the multiple payloads packet. + uint32_t blockSize; + AsfPayloadParsingInformation *ppi; + AsfMultiplePayloadsHeader *mpHeader; + AsfPayloadDataInfoPool *pool; +}; + +class AsfDataParser { +public: + AsfDataParser(void); + ~AsfDataParser(void); + +public: + int parseHeader(uint8_t *buffer, uint32_t size); + + uint64_t getTotalDataPackets(); + // buffer must contain a complete data packet and only one packet + int parsePacket(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out); + // put payload data info to internal queue for reuse. + void releasePayloadDataInfo(AsfPayloadDataInfo *info); + +private: + inline AsfPayloadDataInfo* getPayloadDataInfo(); + +private: + uint64_t mTotalDataPackets; + AsfErrorCorrectionData mECD; + AsfPayloadParsingInformation mPPI; + AsfSinglePayloadUncompressed mSPUncompressed; + AsfSinglePayloadCompressed mSPCompressed; + AsfMultiplePayloadsHeader mMPHeader; + AsfMultiplePayloadsUncompressed mMPUncompressed; + AsfMultiplePayloadsCompressed mMPCompressed; + AsfPayloadDataInfoPool mPool; +}; + +#endif + diff --git a/asfparser/AsfGuids.cpp b/asfparser/AsfGuids.cpp new file mode 100644 index 0000000..ae330a4 --- /dev/null +++ b/asfparser/AsfGuids.cpp @@ -0,0 +1,30 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#define INITGUID + +#include "AsfGuids.h" + + diff --git a/asfparser/AsfGuids.h b/asfparser/AsfGuids.h new file mode 100644 index 0000000..5ab07ae --- /dev/null +++ b/asfparser/AsfGuids.h @@ -0,0 +1,192 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#ifndef ASF_GUIDS_H_ +#define ASF_GUIDS_H_ + + +typedef struct _GUID { + unsigned long Data1; + unsigned short Data2; + unsigned short Data3; + unsigned char Data4[8]; +} GUID; + +inline bool operator == (const GUID& a, const GUID& b) { + return a.Data1 == b.Data1 + && a.Data2 == b.Data2 + && a.Data3 == b.Data3 + && a.Data4[0] == b.Data4[0] + && a.Data4[1] == b.Data4[1] + && a.Data4[2] == b.Data4[2] + && a.Data4[3] == b.Data4[3] + && a.Data4[4] == b.Data4[4] + && a.Data4[5] == b.Data4[5] + && a.Data4[6] == b.Data4[6] + && a.Data4[7] == b.Data4[7]; +} + + +#ifdef INITGUID + #define DEFINE_GUID(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \ + extern const GUID name = { l, w1, w2, { b1, b2, b3, b4, b5, b6, b7, b8 } } + +#else + #define DEFINE_GUID(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \ + extern const GUID name +#endif + + +DEFINE_GUID(ASF_Null_Object, + 0x00000000, 0x0000, 0x0000, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00); +DEFINE_GUID(ASF_Header_Object, + 0x75B22630, 0x668E, 0x11CF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C); +DEFINE_GUID(ASF_Data_Object, + 0x75B22636, 0x668E, 0x11CF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C); +DEFINE_GUID(ASF_Simple_Index_Object, + 0x33000890, 0xE5B1, 0x11CF, 0x89, 0xF4, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xCB); +DEFINE_GUID(ASF_Index_Object, + 0xD6E229D3, 0x35DA, 0x11D1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE); +DEFINE_GUID(ASF_Media_Object_Index_Object, + 0xFEB103F8, 0x12AD, 0x4C64, 0x84, 0x0F, 0x2A, 0x1D, 0x2F, 0x7A, 0xD4, 0x8C); +DEFINE_GUID(ASF_Timecode_Index_Object, + 0x3CB73FD0, 0x0C4A, 0x4803, 0x95, 0x3D, 0xED, 0xF7, 0xB6, 0x22, 0x8F, 0x0C); +DEFINE_GUID(ASF_File_Properties_Object, + 0x8CABDCA1, 0xA947, 0x11CF, 0x8E, 0xE4, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65); +DEFINE_GUID(ASF_Stream_Properties_Object, + 0xB7DC0791, 0xA9B7, 0x11CF, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65); +DEFINE_GUID(ASF_Header_Extension_Object, + 0x5FBF03B5, 0xA92E, 0x11CF, 0x8E, 0xE3, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65); +DEFINE_GUID(ASF_Codec_List_Object, + 0x86D15240, 0x311D, 0x11D0, 0xA3, 0xA4, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6); +DEFINE_GUID(ASF_Script_Command_Object, + 0x1EFB1A30, 0x0B62, 0x11D0, 0xA3, 0x9B, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6); +DEFINE_GUID(ASF_Marker_Object, + 0xF487CD01, 0xA951, 0x11CF, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65); +DEFINE_GUID(ASF_Bitrate_Mutual_Exclusion_Object, + 0xD6E229DC, 0x35DA, 0x11D1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE); +DEFINE_GUID(ASF_Error_Correction_Object, + 0x75B22635, 0x668E, 0x11CF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C); +DEFINE_GUID(ASF_Content_Description_Object, + 0x75B22633, 0x668E, 0x11CF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C); +DEFINE_GUID(ASF_Extended_Content_Description_Object, + 0xD2D0A440, 0xE307, 0x11D2, 0x97, 0xF0, 0x00, 0xA0, 0xC9, 0x5E, 0xA8, 0x50); +DEFINE_GUID(ASF_Content_Branding_Object, + 0x2211B3FA, 0xBD23, 0x11D2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E); +DEFINE_GUID(ASF_Stream_Bitrate_Properties_Object, + 0x7BF875CE, 0x468D, 0x11D1, 0x8D, 0x82, 0x00, 0x60, 0x97, 0xC9, 0xA2, 0xB2); +DEFINE_GUID(ASF_Content_Encryption_Object, + 0x2211B3FB, 0xBD23, 0x11D2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E); +DEFINE_GUID(ASF_Extended_Content_Encryption_Object, + 0x298AE614, 0x2622, 0x4C17, 0xB9, 0x35, 0xDA, 0xE0, 0x7E, 0xE9, 0x28, 0x9C); +DEFINE_GUID(ASF_Digital_Signature_Object, + 0x2211B3FC, 0xBD23, 0x11D2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E); +DEFINE_GUID(ASF_Padding_Object, + 0x1806D474, 0xCADF, 0x4509, 0xA4, 0xBA, 0x9A, 0xAB, 0xCB, 0x96, 0xAA, 0xE8); +DEFINE_GUID(ASF_Extended_Stream_Properties_Object, + 0x14E6A5CB, 0xC672, 0x4332, 0x83, 0x99, 0xA9, 0x69, 0x52, 0x06, 0x5B, 0x5A); +DEFINE_GUID(ASF_Advanced_Mutual_Exclusion_Object, + 0xA08649CF, 0x4775, 0x4670, 0x8A, 0x16, 0x6E, 0x35, 0x35, 0x75, 0x66, 0xCD); +DEFINE_GUID(ASF_Group_Mutual_Exclusion_Object, + 0xD1465A40, 0x5A79, 0x4338, 0xB7, 0x1B, 0xE3, 0x6B, 0x8F, 0xD6, 0xC2, 0x49); +DEFINE_GUID(ASF_Stream_Prioritization_Object, + 0xD4FED15B, 0x88D3, 0x454F, 0x81, 0xF0, 0xED, 0x5C, 0x45, 0x99, 0x9E, 0x24); +DEFINE_GUID(ASF_Bandwidth_Sharing_Object, + 0xA69609E6, 0x517B, 0x11D2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9); +DEFINE_GUID(ASF_Language_List_Object, + 0x7C4346A9, 0xEFE0, 0x4BFC, 0xB2, 0x29, 0x39, 0x3E, 0xDE, 0x41, 0x5C, 0x85); +DEFINE_GUID(ASF_Metadata_Object, + 0xC5F8CBEA, 0x5BAF, 0x4877, 0x84, 0x67, 0xAA, 0x8C, 0x44, 0xFA, 0x4C, 0xCA); +DEFINE_GUID(ASF_Metadata_Library_Object, + 0x44231C94, 0x9498, 0x49D1, 0xA1, 0x41, 0x1D, 0x13, 0x4E, 0x45, 0x70, 0x54); +DEFINE_GUID(ASF_Index_Parameters_Object, + 0xD6E229DF, 0x35DA, 0x11D1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE); +DEFINE_GUID(ASF_Media_Object_Index_Parameters_Object, + 0x6B203BAD, 0x3F11, 0x48E4, 0xAC, 0xA8, 0xD7, 0x61, 0x3D, 0xE2, 0xCF, 0xA7); +DEFINE_GUID(ASF_Timecode_Index_Parameters_Object, + 0xF55E496D, 0x9797, 0x4B5D, 0x8C, 0x8B, 0x60, 0x4D, 0xFE, 0x9B, 0xFB, 0x24); +DEFINE_GUID(ASF_Compatibility_Object, + 0x26F18B5D, 0x4584, 0x47EC, 0x9F, 0x5F, 0x0E, 0x65, 0x1F, 0x04, 0x52, 0xC9); +DEFINE_GUID(ASF_Advanced_Content_Encryption_Object, + 0x43058533, 0x6981, 0x49E6, 0x9B, 0x74, 0xAD, 0x12, 0xCB, 0x86, 0xD5, 0x8C); +DEFINE_GUID(ASF_Audio_Media, + 0xF8699E40, 0x5B4D, 0x11CF, 0xA8, 0xFD, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B); +DEFINE_GUID(ASF_Video_Media, + 0xBC19EFC0, 0x5B4D, 0x11CF, 0xA8, 0xFD, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B); +DEFINE_GUID(ASF_Command_Media, + 0x59DACFC0, 0x59E6, 0x11D0, 0xA3, 0xAC, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6); +DEFINE_GUID(ASF_JFIF_Media, + 0xB61BE100, 0x5B4E, 0x11CF, 0xA8, 0xFD, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B); +DEFINE_GUID(ASF_Degradable_JPEG_Media, + 0x35907DE0, 0xE415, 0x11CF, 0xA9, 0x17, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B); +DEFINE_GUID(ASF_File_Transfer_Media, + 0x91BD222C, 0xF21C, 0x497A, 0x8B, 0x6D, 0x5A, 0xA8, 0x6B, 0xFC, 0x01, 0x85); +DEFINE_GUID(ASF_Binary_Media, + 0x3AFB65E2, 0x47EF, 0x40F2, 0xAC, 0x2C, 0x70, 0xA9, 0x0D, 0x71, 0xD3, 0x43); +DEFINE_GUID(ASF_Web_Stream_Media_Subtype, + 0x776257D4, 0xC627, 0x41CB, 0x8F, 0x81, 0x7A, 0xC7, 0xFF, 0x1C, 0x40, 0xCC); +DEFINE_GUID(ASF_Web_Stream_Format, + 0xDA1E6B13, 0x8359, 0x4050, 0xB3, 0x98, 0x38, 0x8E, 0x96, 0x5B, 0xF0, 0x0C); +DEFINE_GUID(ASF_No_Error_Correction, + 0x20FB5700, 0x5B55, 0x11CF, 0xA8, 0xFD, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B); +DEFINE_GUID(ASF_Audio_Spread, + 0xBFC3CD50, 0x618F, 0x11CF, 0x8B, 0xB2, 0x00, 0xAA, 0x00, 0xB4, 0xE2, 0x20); +DEFINE_GUID(ASF_Reserved_1, + 0xABD3D211, 0xA9BA, 0x11cf, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65); +DEFINE_GUID(ASF_Content_Encryption_System_Windows_Media_DRM_Network_Devices, + 0x7A079BB6, 0xDAA4, 0x4e12, 0xA5, 0xCA, 0x91, 0xD3, 0x8D, 0xC1, 0x1A, 0x8D); +DEFINE_GUID(ASF_Reserved_2, + 0x86D15241, 0x311D, 0x11D0, 0xA3, 0xA4, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6); +DEFINE_GUID(ASF_Reserved_3, + 0x4B1ACBE3, 0x100B, 0x11D0, 0xA3, 0x9B, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6); +DEFINE_GUID(ASF_Reserved_4, + 0x4CFEDB20, 0x75F6, 0x11CF, 0x9C, 0x0F, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xCB); +DEFINE_GUID(ASF_Mutex_Language, + 0xD6E22A00, 0x35DA, 0x11D1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE); +DEFINE_GUID(ASF_Mutex_Bitrate, + 0xD6E22A01, 0x35DA, 0x11D1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE); +DEFINE_GUID(ASF_Mutex_Unknown, + 0xD6E22A02, 0x35DA, 0x11D1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE); +DEFINE_GUID(ASF_Bandwidth_Sharing_Exclusive, + 0xAF6060AA, 0x5197, 0x11D2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9); +DEFINE_GUID(ASF_Bandwidth_Sharing_Partial, + 0xAF6060AB, 0x5197, 0x11D2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9); +DEFINE_GUID(ASF_Payload_Extension_System_Timecode, + 0x399595EC, 0x8667, 0x4E2D, 0x8F, 0xDB, 0x98, 0x81, 0x4C, 0xE7, 0x6C, 0x1E); +DEFINE_GUID(ASF_Payload_Extension_System_File_Name, + 0xE165EC0E, 0x19ED, 0x45D7, 0xB4, 0xA7, 0x25, 0xCB, 0xD1, 0xE2, 0x8E, 0x9B); +DEFINE_GUID(ASF_Payload_Extension_System_Content_Type, + 0xD590DC20, 0x07BC, 0x436C, 0x9C, 0xF7, 0xF3, 0xBB, 0xFB, 0xF1, 0xA4, 0xDC); +DEFINE_GUID(ASF_Payload_Extension_System_Pixel_Aspect_Ratio, + 0x1B1EE554, 0xF9EA, 0x4BC8, 0x82, 0x1A, 0x37, 0x6B, 0x74, 0xE4, 0xC4, 0xB8); +DEFINE_GUID(ASF_Payload_Extension_System_Sample_Duration, + 0xC6BD9450, 0x867F, 0x4907, 0x83, 0xA3, 0xC7, 0x79, 0x21, 0xB7, 0x33, 0xAD); +DEFINE_GUID(ASF_Payload_Extension_System_Encryption_Sample_ID, + 0x6698B84E, 0x0AFA, 0x4330, 0xAE, 0xB2, 0x1C, 0x0A, 0x98, 0xD7, 0xA4, 0x4D); + + +#endif + diff --git a/asfparser/AsfHeaderParser.cpp b/asfparser/AsfHeaderParser.cpp new file mode 100644 index 0000000..4665d76 --- /dev/null +++ b/asfparser/AsfHeaderParser.cpp @@ -0,0 +1,396 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + + +#include "AsfHeaderParser.h" +#include + + +AsfHeaderParser::AsfHeaderParser(void) + : mAudioInfo(NULL), + mVideoInfo(NULL), + mFileInfo(NULL), + mNumObjectParsed(0), + mNumberofHeaderObjects(0) { + mFileInfo = new AsfFileMediaInfo; + memset(mFileInfo, 0, sizeof(AsfFileMediaInfo)); +} + +AsfHeaderParser::~AsfHeaderParser(void) { + delete mFileInfo; + + resetStreamInfo(); +} + +AsfAudioStreamInfo* AsfHeaderParser::getAudioInfo() const { + return mAudioInfo; +} + +AsfVideoStreamInfo* AsfHeaderParser::getVideoInfo() const { + return mVideoInfo; +} + +AsfFileMediaInfo* AsfHeaderParser::getFileInfo() const { + return mFileInfo; +} + +uint64_t AsfHeaderParser::getDuration() { + return mFileInfo->duration - mFileInfo->preroll * ASF_SCALE_MS_TO_100NANOSEC; +} + +uint32_t AsfHeaderParser::getDataPacketSize() { + return mFileInfo->packetSize; +} + +uint32_t AsfHeaderParser::getPreroll() { + // in millisecond unit + return mFileInfo->preroll; +} + +uint64_t AsfHeaderParser::getTimeOffset() { + // in 100-nanoseconds unit + if (mAudioInfo) { + return mAudioInfo->timeOffset; + } + + if (mVideoInfo) { + return mVideoInfo->timeOffset; + } + + return 0; +} + +bool AsfHeaderParser::hasVideo() { + return mVideoInfo != NULL; +} + +bool AsfHeaderParser::hasAudio() { + return mAudioInfo != NULL; +} + +bool AsfHeaderParser::isSeekable() { + return mFileInfo->seekable; +} + +int AsfHeaderParser::parse(uint8_t *buffer, uint32_t size) { + int status = ASF_PARSER_SUCCESS; + + // reset parser's status + mNumObjectParsed = 0; + resetStreamInfo(); + memset(mFileInfo, 0, sizeof(AsfFileMediaInfo)); + + do { + if (size < sizeof(AsfObject)) { + return ASF_PARSER_BAD_DATA; + } + + AsfObject *obj = (AsfObject*)buffer; + if (obj->objectSize > size) { + return ASF_PARSER_BAD_VALUE; + } + + if (obj->objectID == ASF_Header_Object) { + if (size < sizeof(AsfHeaderObject)) { + return ASF_PARSER_BAD_DATA; + } + AsfHeaderObject *headerObj = (AsfHeaderObject*)buffer; + mNumberofHeaderObjects = headerObj->numberofHeaderObjects; + size -= sizeof(AsfHeaderObject); + buffer += sizeof(AsfHeaderObject); + } else { + if(obj->objectID == ASF_File_Properties_Object) { + status = onFilePropertiesObject(buffer, size); + } else if(obj->objectID == ASF_Stream_Properties_Object) { + status = onStreamPropertiesObject(buffer, size); + } else if(obj->objectID == ASF_Header_Extension_Object) { + //AsfHeaderExtensionObject *headerExtObj = (AsfHeaderExtensionObject*)buffer; + if (size < sizeof(AsfHeaderExtensionObject)) { + return ASF_PARSER_BAD_DATA; + } + status = parseHeaderExtensionObject( + buffer + sizeof(AsfHeaderExtensionObject), + size - sizeof(AsfHeaderExtensionObject)); + } else if(obj->objectID == ASF_Codec_List_Object) { + } else if(obj->objectID == ASF_Script_Command_Object) { + } else if(obj->objectID == ASF_Marker_Object) { + } else if(obj->objectID == ASF_Bitrate_Mutual_Exclusion_Object) { + } else if(obj->objectID == ASF_Error_Correction_Object) { + } else if(obj->objectID == ASF_Content_Description_Object) { + } else if(obj->objectID == ASF_Extended_Content_Description_Object) { + } else if(obj->objectID == ASF_Stream_Bitrate_Properties_Object) { + } else if(obj->objectID == ASF_Content_Branding_Object) { + } else if(obj->objectID == ASF_Content_Encryption_Object) { + } else if(obj->objectID == ASF_Extended_Content_Encryption_Object) { + } else if(obj->objectID == ASF_Digital_Signature_Object) { + } else if(obj->objectID == ASF_Padding_Object) { + } else { + } + if (status != ASF_PARSER_SUCCESS) { + return status; + } + size -= (uint32_t)obj->objectSize; + buffer += obj->objectSize; + mNumObjectParsed++; + if (mNumObjectParsed == mNumberofHeaderObjects) { + return ASF_PARSER_SUCCESS; + } + } + } + while (status == ASF_PARSER_SUCCESS); + + return status; +} + + +int AsfHeaderParser::onFilePropertiesObject(uint8_t *buffer, uint32_t size) { + if (size < sizeof(AsfFilePropertiesObject)) { + return ASF_PARSER_BAD_DATA; + } + + AsfFilePropertiesObject *obj = (AsfFilePropertiesObject*)buffer; + mFileInfo->dataPacketsCount = obj->dataPacketsCount; + mFileInfo->duration = obj->playDuration; + mFileInfo->fileSize = obj->fileSize; + mFileInfo->packetSize = obj->maximumDataPacketSize; + if (mFileInfo->packetSize != obj->minimumDataPacketSize) { + return ASF_PARSER_BAD_VALUE; + } + mFileInfo->preroll = obj->preroll; + mFileInfo->seekable = obj->flags.bits.seekableFlag; + if (obj->flags.bits.broadcastFlag) { + // turn off seeking + mFileInfo->seekable = false; + } + return ASF_PARSER_SUCCESS; +} + +int AsfHeaderParser::onStreamPropertiesObject(uint8_t *buffer, uint32_t size) { + int status; + if (size < sizeof(AsfStreamPropertiesObject)) { + return ASF_PARSER_BAD_DATA; + } + + AsfStreamPropertiesObject *obj = (AsfStreamPropertiesObject*)buffer; + if (obj->typeSpecificDataLength + obj->errorCorrectionDataLength > + size - sizeof(AsfStreamPropertiesObject)) { + return ASF_PARSER_BAD_VALUE; + } + uint8_t *typeSpecificData = buffer + sizeof(AsfStreamPropertiesObject); + if (obj->streamType == ASF_Video_Media) { + status = onVideoSpecificData(obj, typeSpecificData); + } else if (obj->streamType == ASF_Audio_Media) { + status = onAudioSpecificData(obj, typeSpecificData); + } else { + // ignore other media specific data + status = ASF_PARSER_SUCCESS; + } + return status; +} + +int AsfHeaderParser::onVideoSpecificData(AsfStreamPropertiesObject *obj, uint8_t *data) { + // size of codec specific data is obj->typeSpecificDataLength + uint32_t headerLen = sizeof(AsfVideoInfoHeader) + sizeof(AsfBitmapInfoHeader); + if (obj->typeSpecificDataLength < headerLen) { + return ASF_PARSER_BAD_DATA; + } + AsfVideoInfoHeader *info = (AsfVideoInfoHeader*)data; + AsfBitmapInfoHeader *bmp = (AsfBitmapInfoHeader*)(data + sizeof(AsfVideoInfoHeader)); + + if (info->formatDataSize < sizeof(AsfBitmapInfoHeader)) { + return ASF_PARSER_BAD_VALUE; + } + + if (bmp->formatDataSize - sizeof(AsfBitmapInfoHeader) > + obj->typeSpecificDataLength - headerLen) { + + // codec specific data is invalid + return ASF_PARSER_BAD_VALUE; + } + + AsfVideoStreamInfo *videoInfo = new AsfVideoStreamInfo; + if (videoInfo == NULL) { + return ASF_PARSER_NO_MEMORY; + } + videoInfo->streamNumber = obj->flags.bits.streamNumber; + videoInfo->encryptedContentFlag = obj->flags.bits.encryptedContentFlag; + videoInfo->timeOffset = obj->timeOffset; + videoInfo->width = info->encodedImageWidth; + videoInfo->height = info->encodedImageHeight; + videoInfo->fourCC = bmp->compressionID; + + // TODO: get aspect ratio from video meta data + videoInfo->aspectX = 1; + videoInfo->aspectY = 1; + + videoInfo->codecDataSize = bmp->formatDataSize - sizeof(AsfBitmapInfoHeader); + if (videoInfo->codecDataSize) { + videoInfo->codecData = new uint8_t [videoInfo->codecDataSize]; + if (videoInfo->codecData == NULL) { + delete videoInfo; + return ASF_PARSER_NO_MEMORY; + } + memcpy(videoInfo->codecData, + data + headerLen, + videoInfo->codecDataSize); + } else { + videoInfo->codecData = NULL; + } + + videoInfo->next = NULL; + if (mVideoInfo == NULL) { + mVideoInfo = videoInfo; + } else { + AsfVideoStreamInfo *last = mVideoInfo; + while (last->next != NULL) { + last = last->next; + } + last->next = videoInfo; + } + + return ASF_PARSER_SUCCESS; +} + +int AsfHeaderParser::onAudioSpecificData(AsfStreamPropertiesObject *obj, uint8_t *data) { + if (obj->typeSpecificDataLength < sizeof(AsfWaveFormatEx)) { + return ASF_PARSER_BAD_DATA; + } + + AsfWaveFormatEx *format = (AsfWaveFormatEx*)data; + if (format->codecSpecificDataSize > + obj->typeSpecificDataLength - sizeof(AsfWaveFormatEx)) { + return ASF_PARSER_BAD_VALUE; + } + + AsfAudioStreamInfo *audioInfo = new AsfAudioStreamInfo; + if (audioInfo == NULL) { + return ASF_PARSER_NO_MEMORY; + } + audioInfo->streamNumber = obj->flags.bits.streamNumber; + audioInfo->encryptedContentFlag = obj->flags.bits.encryptedContentFlag; + audioInfo->timeOffset = obj->timeOffset; + audioInfo->codecID = format->codecIDFormatTag; + audioInfo->numChannels = format->numberOfChannels; + audioInfo->sampleRate= format->samplesPerSecond; + audioInfo->avgByteRate = format->averageNumberOfBytesPerSecond; + audioInfo->blockAlignment = format->blockAlignment; + audioInfo->bitsPerSample = format->bitsPerSample; + audioInfo->codecDataSize = format->codecSpecificDataSize; + if (audioInfo->codecDataSize) { + audioInfo->codecData = new uint8_t [audioInfo->codecDataSize]; + if (audioInfo->codecData == NULL) { + delete audioInfo; + return ASF_PARSER_NO_MEMORY; + } + memcpy(audioInfo->codecData, + data + sizeof(AsfWaveFormatEx), + audioInfo->codecDataSize); + } else { + audioInfo->codecData = NULL; + } + + audioInfo->next = NULL; + + if (mAudioInfo == NULL) { + mAudioInfo = audioInfo; + } else { + AsfAudioStreamInfo *last = mAudioInfo; + while (last->next != NULL) { + last = last->next; + } + last->next = audioInfo; + } + + return ASF_PARSER_SUCCESS; +} + + +int AsfHeaderParser::onExtendedStreamPropertiesObject(uint8_t *buffer, uint32_t size) { + return ASF_PARSER_SUCCESS; +} + +int AsfHeaderParser::parseHeaderExtensionObject(uint8_t* buffer, uint32_t size) { + // No empty space, padding, leading, or trailing bytes are allowed in the extention data + int status; + do { + if (size < sizeof(AsfObject)) { + return ASF_PARSER_BAD_DATA; + } + + AsfObject *obj = (AsfObject *)buffer; + if (obj->objectSize > size) { + return ASF_PARSER_BAD_VALUE; + } + + if(obj->objectID == ASF_Extended_Stream_Properties_Object) { + status = onExtendedStreamPropertiesObject(buffer, size); + } else if(obj->objectID == ASF_Advanced_Mutual_Exclusion_Object) { + } else if(obj->objectID == ASF_Group_Mutual_Exclusion_Object) { + } else if(obj->objectID == ASF_Stream_Prioritization_Object) { + } else if(obj->objectID == ASF_Bandwidth_Sharing_Object) { + } else if(obj->objectID == ASF_Language_List_Object) { + } else if(obj->objectID == ASF_Metadata_Object) { + } else if(obj->objectID == ASF_Metadata_Library_Object) { + } else if(obj->objectID == ASF_Index_Parameters_Object) { + } else if(obj->objectID == ASF_Media_Object_Index_Parameters_Object) { + } else if(obj->objectID == ASF_Timecode_Index_Parameters_Object) { + } else if(obj->objectID == ASF_Compatibility_Object) { + } else if(obj->objectID == ASF_Advanced_Content_Encryption_Object) { + } else { + } + + if (status != ASF_PARSER_SUCCESS) { + break; + } + + size -= (uint32_t)obj->objectSize; + buffer += obj->objectSize; + + if (size == 0) { + break; + } + } + while (status == ASF_PARSER_SUCCESS); + + return status; +} + +void AsfHeaderParser::resetStreamInfo() { + while (mAudioInfo) { + AsfAudioStreamInfo *next = mAudioInfo->next; + delete [] mAudioInfo->codecData; + delete mAudioInfo; + mAudioInfo = next; + } + + while (mVideoInfo) { + AsfVideoStreamInfo *next = mVideoInfo->next; + delete [] mVideoInfo->codecData; + delete mVideoInfo; + mVideoInfo = next; + } +} + diff --git a/asfparser/AsfHeaderParser.h b/asfparser/AsfHeaderParser.h new file mode 100644 index 0000000..1e1f821 --- /dev/null +++ b/asfparser/AsfHeaderParser.h @@ -0,0 +1,78 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + + +#ifndef ASF_HEADER_PARSER_H_ +#define ASF_HEADER_PARSER_H_ + +#include "AsfParserDefs.h" +#include "AsfObjects.h" +#include "AsfGuids.h" + +class AsfHeaderParser { +public: + AsfHeaderParser(void); + ~AsfHeaderParser(void); + +public: + // buffer must contain a complete header object + int parse(uint8_t* buffer, uint32_t size); + + AsfAudioStreamInfo* getAudioInfo() const; + AsfVideoStreamInfo* getVideoInfo() const; + AsfFileMediaInfo* getFileInfo() const; + // return duration in 100-nanosecond unit , readable when header object is parsed + uint64_t getDuration(); + // return data packet size, readable when header object is parsed + uint32_t getDataPacketSize(); + // return preroll in milliseconds + uint32_t getPreroll(); + // return Time Offset in any stream properties in 100-nanosecond unit + // Time Offset must be equal for all the stream properties object + uint64_t getTimeOffset(); + bool hasVideo(); + bool hasAudio(); + bool isSeekable(); + +private: + int onFilePropertiesObject(uint8_t *buffer, uint32_t size); + int onStreamPropertiesObject(uint8_t *buffer, uint32_t size); + int onVideoSpecificData(AsfStreamPropertiesObject *obj, uint8_t *data); + int onAudioSpecificData(AsfStreamPropertiesObject *obj, uint8_t *data); + int onExtendedStreamPropertiesObject(uint8_t *buffer, uint32_t size); + int parseHeaderExtensionObject(uint8_t *buffer, uint32_t size); + void resetStreamInfo(); + +private: + AsfAudioStreamInfo *mAudioInfo; + AsfVideoStreamInfo *mVideoInfo; + AsfFileMediaInfo *mFileInfo; + uint32_t mNumObjectParsed; + uint32_t mNumberofHeaderObjects; +}; + +#endif + diff --git a/asfparser/AsfIndexParser.cpp b/asfparser/AsfIndexParser.cpp new file mode 100644 index 0000000..e80f529 --- /dev/null +++ b/asfparser/AsfIndexParser.cpp @@ -0,0 +1,135 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#include "AsfIndexParser.h" +#include "AsfObjects.h" +#include + + +AsfSimpleIndexParser::AsfSimpleIndexParser(void) + : mIndexInfo(NULL) { +} + +AsfSimpleIndexParser::~AsfSimpleIndexParser(void) { + resetIndexInfo(); +} + +AsfSimpleIndexInfo* AsfSimpleIndexParser::getIndexInfo() const { + return mIndexInfo; +} + +int AsfSimpleIndexParser::parse(uint8_t *buffer, uint32_t size) { + // reset parser status + resetIndexInfo(); + + if (size <= sizeof(AsfSimpleIndexObject)) { + return ASF_PARSER_BAD_DATA; + } + AsfSimpleIndexObject *obj = (AsfSimpleIndexObject*)buffer; + if (obj->objectSize != size) { + return ASF_PARSER_BAD_VALUE; + } + + if (obj->indexEntryTimeInterval == 0) { + return ASF_PARSER_BAD_VALUE; + } + + mIndexInfo = new AsfSimpleIndexInfo; + if (mIndexInfo == NULL) { + return ASF_PARSER_NO_MEMORY; + } + + mIndexInfo->indexSize = size - sizeof(AsfSimpleIndexObject); + mIndexInfo->indexData = new uint8_t [mIndexInfo->indexSize]; + + if (mIndexInfo->indexData == NULL) { + delete mIndexInfo; + return ASF_PARSER_NO_MEMORY; + } + + memcpy(mIndexInfo->indexData, + buffer + sizeof(AsfSimpleIndexObject), + size - sizeof(AsfSimpleIndexObject)); + + mIndexInfo->indexEntryTimeInterval = obj->indexEntryTimeInterval; + mIndexInfo->maximumPacketCount = obj->maximumPacketCount; + mIndexInfo->indexEntriesCount = obj->indexEntriesCount; + return ASF_PARSER_SUCCESS; +} + + +int AsfSimpleIndexParser::seek( + uint64_t seekTime, + bool nextSync, + uint32_t& packetNumber, + uint64_t& targetTime) { + if (mIndexInfo == NULL) { + return ASF_PARSER_INVALID_STATE; + } + + // calculate offset of index entry in 6-byte unit + uint32_t offset; + if (nextSync) { + offset = (seekTime + mIndexInfo->indexEntryTimeInterval - 1)/mIndexInfo->indexEntryTimeInterval; + } else { + offset = seekTime/mIndexInfo->indexEntryTimeInterval; + } + + if (offset >= mIndexInfo->indexEntriesCount) { + return ASF_PARSER_BAD_VALUE; + } + + if (INDEX_ENTRY_SIZE * offset > mIndexInfo->indexSize - INDEX_ENTRY_SIZE) { + return ASF_PARSER_BAD_VALUE; + } + + targetTime = offset * mIndexInfo->indexEntryTimeInterval; + uint8_t *data = mIndexInfo->indexData + INDEX_ENTRY_SIZE * offset; + // packet number 4 bytes + // packet count 2 bytes + packetNumber = *(uint32_t*)data; + + return ASF_PARSER_SUCCESS; +} + +uint32_t AsfSimpleIndexParser::getMaximumPacketCount() { + if (mIndexInfo == NULL) + return 0; + + return mIndexInfo->maximumPacketCount; +} + +void AsfSimpleIndexParser::resetIndexInfo() { + if (mIndexInfo) { + if (mIndexInfo->indexData) { + delete [] mIndexInfo->indexData; + } + delete mIndexInfo; + } + + mIndexInfo = NULL; +} + diff --git a/asfparser/AsfIndexParser.h b/asfparser/AsfIndexParser.h new file mode 100644 index 0000000..6a0b15c --- /dev/null +++ b/asfparser/AsfIndexParser.h @@ -0,0 +1,61 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + + +#ifndef ASF_INDEX_PARSER_H_ +#define ASF_INDEX_PARSER_H_ + +#include "AsfParserDefs.h" + +class AsfSimpleIndexParser { +public: + AsfSimpleIndexParser(void); + ~AsfSimpleIndexParser(void); + +public: + AsfSimpleIndexInfo* getIndexInfo() const; + + // buffer must contain a complete simple index object + int parse(uint8_t *buffer, uint32_t size); + // seek to the closest previous or next sync packet. time stamp is in 100-nanosecond units + int seek(uint64_t seekTime, bool nextSync, uint32_t& packetNumber, uint64_t& targetTime); + // return maximum video packet count per object, readable when simple index object is parsed. + // If simple index object is not parsed or is not available, 0 is returned + uint32_t getMaximumPacketCount(); + +private: + void resetIndexInfo(); + +private: + enum { + // 4 bytes of "packet number" plus 2 bytes of "packet count" + INDEX_ENTRY_SIZE = 6, + }; + AsfSimpleIndexInfo *mIndexInfo; +}; + +#endif + diff --git a/asfparser/AsfObjects.h b/asfparser/AsfObjects.h new file mode 100644 index 0000000..1942c8d --- /dev/null +++ b/asfparser/AsfObjects.h @@ -0,0 +1,309 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + + +#ifndef ASF_OBJECTS_H_ +#define ASF_OBJECTS_H_ + +#include "AsfParserDefs.h" +#include "AsfGuids.h" + +#pragma pack(push, 1) + +struct AsfObject { + GUID objectID; + uint64_t objectSize; +}; + +struct AsfHeaderObject : AsfObject { + uint32_t numberofHeaderObjects; + uint8_t reserved1; + uint8_t reserved2; +}; + + +struct AsfFilePropertiesObject : AsfObject { + GUID fileID; + uint64_t fileSize; + uint64_t creationDate; + uint64_t dataPacketsCount; + uint64_t playDuration; + uint64_t sendDuration; + uint64_t preroll; + union { + struct { + uint32_t broadcastFlag :1; + uint32_t seekableFlag :1; + uint32_t reserved :30; + } bits; + uint32_t value; + } flags; + uint32_t minimumDataPacketSize; + uint32_t maximumDataPacketSize; + uint32_t maximumBitrate; +}; + +struct AsfStreamPropertiesObject : AsfObject { + GUID streamType; + GUID errorCorrectionType; + uint64_t timeOffset; + uint32_t typeSpecificDataLength; + uint32_t errorCorrectionDataLength; + union { + struct { + uint16_t streamNumber :7; + uint16_t reserved :8; + uint16_t encryptedContentFlag :1; + } bits; + uint16_t value; + } flags; + uint32_t reserved; + //type-Specific Data; + //error Correction Data; +}; + +struct AsfHeaderExtensionObject : AsfObject { + GUID clockType; // Reserved Field 1 + uint16_t clockSize; // Reserved Field 2 + uint32_t headerExtensionDataSize; + //header Extension Data; +}; + + +struct AsfCodecListObject : AsfObject { + // TODO: +}; + +struct AsfScriptCommandObject : AsfObject { + // TODO: +}; + +struct AsfMarkerObject : AsfObject { + // TODO: +}; + +struct AsfBitrateMutualExclusionObject : AsfObject { + // TODO: +}; + +struct AsfErrorCorrectionObject : AsfObject { + // TODO: +}; + +struct AsfContentDescriptionObject : AsfObject { + // TODO: +}; + +struct AsfExtendedContentDescriptionObject : AsfObject { + // TODO: +}; + +struct AsfStreamBitratePropertiesObject : AsfObject { + // TODO: +}; + +struct AsfContentBrandingObject : AsfObject { + // TODO: +}; + +struct AsfContentEncryptionObject : AsfObject { + // TODO: +}; + +struct AsfExtendedContentEncryptionObject : AsfObject { + // TODO: +}; + +struct AsfDigitalSignatureObject : AsfObject { + // TODO: +}; + +struct AsfPaddingObject : AsfObject { + // TODO: +}; + +// objects in the ASF Header Extension object +struct AsfExtendedStreamPropertiesObject : AsfObject { + uint64_t startTime; + uint64_t endTime; + uint32_t dataBitrate; + uint32_t bufferSize; + uint32_t initialBufferFullness; + uint32_t alternateDataBitrate; + uint32_t alternateBufferSize; + uint32_t alternateInitialBufferFullness; + uint32_t maximumObjectSize; + union { + struct { + uint32_t reliableFlag :1; + uint32_t seekableFlag :1; + uint32_t noCleanpointsFlag :1; + uint32_t resendLiveCleanpointsFlag :1; + uint32_t reservedFlags :28; + } bits; + uint32_t value; + } flags; + uint16_t streamNumber; + uint16_t streamLanguageIDIndex; + uint64_t averageTimePerFrame; + uint16_t streamNameCount; + uint16_t payloadExtensionSystemCount; + //Stream Names - variable length + //Payload Extension Systems - variable length + //Stream Properties Object - variable length +}; + + +struct AsfAdvancedMutualExclusionObject : AsfObject { + // TODO: +}; + +struct AsfGroupMutualExclusionObject : AsfObject { + // TODO: +}; + +struct AsfStreamPrioritizationObject : AsfObject { + // TODO: +}; + +struct AsfBandwidthSharingObject : AsfObject { + // TODO: +}; + +struct AsfLanguageListObject : AsfObject { + // TODO: +}; + +struct AsfMetadataObject : AsfObject { + // TODO: +}; + +struct AsfMetadataLibraryObject : AsfObject { + // TODO: +}; + +struct AsfIndexParametersObject : AsfObject { + // TODO: +}; + +struct AsfMediaObjectIndexParametersObject : AsfObject { + // TODO: +}; + +struct AsfTimeCodeIndexParametersObject : AsfObject { + // TODO: +}; + +struct AsfCompatibilityObject : AsfObject { + // TODO: +}; + +struct AsfAdvancedContentEncryptionObject : AsfObject { + // TODO: +}; + + +// ASF top-level data object + +struct AsfDataObject : AsfObject { + GUID fileID; + uint64_t totalDataPackets; + uint16_t reserved; + //Data Packets; +}; + + +// ASF top-level index objects + +struct AsfSimpleIndexObject : AsfObject { + GUID fileID; + // in 100-nanosecond units + uint64_t indexEntryTimeInterval; + uint32_t maximumPacketCount; + uint32_t indexEntriesCount; + //packet number for entry #0 (4 bytes) + //packet count for entry #0 (2 bytes) + //packet number for entry #1 + //packet count for entry #1 +}; + +struct AsfIndexObject : AsfObject { + // TODO: +}; + +struct AsfMediaObjectIndexObject : AsfObject { + // TODO: +}; + + +struct AsfTimecodeIndexObject : AsfObject { + // TODO: +}; + + +// media specific data structure + +struct AsfWaveFormatEx { + uint16_t codecIDFormatTag; + uint16_t numberOfChannels; + uint32_t samplesPerSecond; + uint32_t averageNumberOfBytesPerSecond; + uint16_t blockAlignment; + uint16_t bitsPerSample; + uint16_t codecSpecificDataSize; + //uint8_t codecSpecificData[]; +}; + +struct AsfVideoInfoHeader { + uint32_t encodedImageWidth; + uint32_t encodedImageHeight; + uint8_t reservedFlags; + uint16_t formatDataSize; + //FormatData formatData[]; +}; + +struct AsfBitmapInfoHeader { + uint32_t formatDataSize; + int32_t imageWidth; + int32_t imageHeight; + uint16_t reserved; + uint16_t bitsPerPixelCount; + uint32_t compressionID; + uint32_t imageSize; + int32_t horizontalPixelsPerMeter; + int32_t verticalPixelsPerMeter; + uint32_t colorsUsedCount; + uint32_t importantColorsCount; + //uint8_t codecSpecificData[]; +}; + +#pragma pack(pop) + +#endif + + + + diff --git a/asfparser/AsfParserDefs.h b/asfparser/AsfParserDefs.h new file mode 100644 index 0000000..27ff623 --- /dev/null +++ b/asfparser/AsfParserDefs.h @@ -0,0 +1,116 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#ifndef ASF_PARSER_DEFS_H_ +#define ASF_PARSER_DEFS_H_ + +#include + +#ifndef NULL +//#define NULL (void*) 0 +#define NULL 0 +#endif + +// data object header size is 50 bytes +#define ASF_DATA_OBJECT_HEADER_SIZE 50 +#define ASF_SIMPLE_INDEX_OBJECT_HEADER_SIZE 56 + +// 1 millisecond = 10,000 100-nano seconds +#define ASF_SCALE_MS_TO_100NANOSEC 10000 + +// ASF parser error codes +enum { + ASF_PARSER_NULL_POINTER = -7, + ASF_PARSER_INVALID_STATE = -6, + ASF_PARSER_UNEXPECTED_VALUE = -5, + ASF_PARSER_BAD_VALUE = -4, + ASF_PARSER_BAD_DATA = -3, + ASF_PARSER_NO_MEMORY = -2, + ASF_PARSER_FAILED = -1, + ASF_PARSER_COMPRESSED_PAYLOAD = 0, + ASF_PARSER_SUCCESS = 1, +}; + +struct AsfAudioStreamInfo { + uint8_t streamNumber; + uint8_t encryptedContentFlag; + uint64_t timeOffset; // in 100-nanosecond units + uint32_t codecID; + uint32_t numChannels; + uint32_t sampleRate; + uint32_t avgByteRate; + uint32_t blockAlignment; + uint32_t bitsPerSample; + uint32_t codecDataSize; + uint8_t *codecData; + AsfAudioStreamInfo *next; +}; + +struct AsfVideoStreamInfo { + uint8_t streamNumber; + uint8_t encryptedContentFlag; + uint64_t timeOffset; // in 100-nanosecond units + uint32_t width; + uint32_t height; + uint32_t fourCC; + uint32_t aspectX; + uint32_t aspectY; + uint32_t codecDataSize; + uint8_t *codecData; + AsfVideoStreamInfo *next; +}; + +struct AsfFileMediaInfo { + uint64_t fileSize; + uint64_t dataPacketsCount; + uint64_t duration; // 100-nanosecond units + uint64_t preroll; // in millisecond units. + uint32_t packetSize; + bool seekable; +}; + +struct AsfSimpleIndexInfo { + uint8_t *indexData; + uint32_t indexSize; + uint64_t indexEntryTimeInterval; // in 100-nanosecond unit + uint32_t maximumPacketCount; + uint32_t indexEntriesCount; +}; + +struct AsfPayloadDataInfo { + const uint8_t *payloadData; + uint32_t payloadSize; + uint32_t presentationTime; // in milliseconds + uint32_t offsetIntoMediaObject; + uint32_t mediaObjectLength; + uint8_t streamNumber; + uint8_t mediaObjectNumber; + bool keyframe; + AsfPayloadDataInfo *next; +}; + +#endif // ASF_PARSER_DEFS_H_ + diff --git a/asfparser/AsfStreamParser.cpp b/asfparser/AsfStreamParser.cpp new file mode 100644 index 0000000..b9210ca --- /dev/null +++ b/asfparser/AsfStreamParser.cpp @@ -0,0 +1,189 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#include "AsfHeaderParser.h" +#include "AsfDataParser.h" +#include "AsfIndexParser.h" +#include "AsfStreamParser.h" +#include + + +AsfStreamParser::AsfStreamParser(void) + : mDataPacketSize(0), + mTimeOffsetMs(0), + mHeaderParsed(false) { + mHeaderParser = new AsfHeaderParser; + mDataParser = new AsfDataParser; + mSimpleIndexParser = new AsfSimpleIndexParser; +} + +AsfStreamParser::~AsfStreamParser(void) { + delete mHeaderParser; + delete mDataParser; + delete mSimpleIndexParser; +} + +bool AsfStreamParser::isSimpleIndexObject(uint8_t *guid) { + GUID *id = (GUID *)guid; + return (*id == ASF_Simple_Index_Object); +} + +bool AsfStreamParser::isHeaderObject(uint8_t *guid) { + GUID *id = (GUID *)guid; + return (*id == ASF_Header_Object); +} + +int AsfStreamParser::parseHeaderObject(uint8_t *buffer, uint32_t size) { + int status = mHeaderParser->parse(buffer, size); + if (status != ASF_PARSER_SUCCESS) { + return status; + } + + mDataPacketSize = mHeaderParser->getDataPacketSize(); + mTimeOffsetMs = mHeaderParser->getTimeOffset() / ASF_SCALE_MS_TO_100NANOSEC; + + if (mTimeOffsetMs == 0) { + // offset of PTS in milliseconds due to buffering + mTimeOffsetMs = mHeaderParser->getPreroll(); + } + mHeaderParsed = true; + return ASF_PARSER_SUCCESS; +} + +AsfAudioStreamInfo* AsfStreamParser::getAudioInfo() const { + return mHeaderParser->getAudioInfo(); +} + +AsfVideoStreamInfo* AsfStreamParser::getVideoInfo() const { + return mHeaderParser->getVideoInfo(); +} + +AsfFileMediaInfo* AsfStreamParser::getFileInfo() const { + return mHeaderParser->getFileInfo(); +} + +uint64_t AsfStreamParser::getDuration() { + return mHeaderParser->getDuration(); +} + +uint32_t AsfStreamParser::getDataPacketSize() { + return mHeaderParser->getDataPacketSize(); +} + +bool AsfStreamParser::hasVideo() { + return mHeaderParser->hasVideo(); +} + +bool AsfStreamParser::hasAudio() { + return mHeaderParser->hasAudio(); +} + +int AsfStreamParser::parseDataObjectHeader(uint8_t *buffer, uint32_t size) { + if (mHeaderParsed == false) { + return ASF_PARSER_INVALID_STATE; + } + return mDataParser->parseHeader(buffer, size); +} + +int AsfStreamParser::parseDataPacket(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { + if (mHeaderParsed == false) { + return ASF_PARSER_INVALID_STATE; + } + + if (size != mDataPacketSize) { + return ASF_PARSER_BAD_DATA; + } + + if (out == NULL) { + return ASF_PARSER_NULL_POINTER; + } + + int status = mDataParser->parsePacket(buffer, size, out); + if (status != ASF_PARSER_SUCCESS) { + return status; + } + + if (mTimeOffsetMs == 0) { + return ASF_PARSER_SUCCESS; + } + + // update presentation time stamp + AsfPayloadDataInfo *next = *out; + while (next) { + if (next->presentationTime >= mTimeOffsetMs) { + next->presentationTime -= mTimeOffsetMs; + } + else { + // TODO: + next->presentationTime = 0; + //return ASF_PARSER_BAD_VALUE; + } + next = next->next; + } + return status; +} + +void AsfStreamParser::releasePayloadDataInfo(AsfPayloadDataInfo *info) { + mDataParser->releasePayloadDataInfo(info); +} + + +int AsfStreamParser::parseSimpleIndexObject(uint8_t *buffer, uint32_t size) { + if (mHeaderParsed == false) { + return ASF_PARSER_INVALID_STATE; + } + + if (mHeaderParser->isSeekable() == false) { + return ASF_PARSER_FAILED; + } + + return mSimpleIndexParser->parse(buffer, size); +} + +AsfSimpleIndexInfo* AsfStreamParser::getIndexInfo() const { + return mSimpleIndexParser->getIndexInfo(); +} + +int AsfStreamParser::seek( + uint64_t seekTime, + bool nextSync, + uint32_t& packetNumber, + uint64_t& targetTime) { + if (mHeaderParsed == false) { + return ASF_PARSER_INVALID_STATE; + } + + if (mHeaderParser->isSeekable() == false) { + return ASF_PARSER_FAILED; + } + + return mSimpleIndexParser->seek(seekTime, nextSync, packetNumber, targetTime); +} + +uint32_t AsfStreamParser::getMaxObjectSize() { + return mSimpleIndexParser->getMaximumPacketCount() * mDataPacketSize; +} + diff --git a/asfparser/AsfStreamParser.h b/asfparser/AsfStreamParser.h new file mode 100644 index 0000000..91d21ef --- /dev/null +++ b/asfparser/AsfStreamParser.h @@ -0,0 +1,88 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#ifndef ASF_STREAM_PARSER_H_ +#define ASF_STREAM_PARSER_H_ + +#include "AsfParserDefs.h" + +class AsfStreamParser { +public: + AsfStreamParser(void); + ~AsfStreamParser(void); + +public: + static bool isSimpleIndexObject(uint8_t *guid); + static bool isHeaderObject(uint8_t *guid); + + // buffer must contain a complete header object + int parseHeaderObject(uint8_t *buffer, uint32_t size); + AsfAudioStreamInfo* getAudioInfo() const; + AsfVideoStreamInfo* getVideoInfo() const; + AsfFileMediaInfo* getFileInfo() const; + + // return duration in 100-nanosecond unit , readable when header object is parsed + uint64_t getDuration(); + // return data packet size, readable when header object is parsed + uint32_t getDataPacketSize(); + bool hasVideo(); + bool hasAudio(); + // buffer must contain a complete data object header + int parseDataObjectHeader(uint8_t *buffer, uint32_t size); + // buffer must contain a complete data packet and only a packet + int parseDataPacket(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out); + + // caller must release AsfPayloadDataInfo using this method + void releasePayloadDataInfo(AsfPayloadDataInfo *info); + + // buffer must contain a complete simple index object + int parseSimpleIndexObject(uint8_t *buffer, uint32_t size); + + AsfSimpleIndexInfo* getIndexInfo() const; + + // seek to the closest previous or next sync packet. time stamp is in 100-nanosecond units + int seek(uint64_t seekTime, bool nextSync, uint32_t& packetNumber, uint64_t& targetTime); + + // return maximum video object size, readable when simple index object is parsed. + // If simple index object is not parsed or is not available, 0 is returned + uint32_t getMaxObjectSize(); + +private: + // fixed data packet size + uint32_t mDataPacketSize; + // offset of PTS in milliseconds (converted from original 100-nanoseconds unit) due to cut/edit. + // all stream properties must have same "Time Offset". + // If value is zero, it will be set to "Preroll" value in the File Properties object. + // Preroll value is used for data buffering. + uint32_t mTimeOffsetMs; + bool mHeaderParsed; + class AsfHeaderParser *mHeaderParser; + class AsfDataParser *mDataParser; + class AsfSimpleIndexParser *mSimpleIndexParser; +}; + +#endif + diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index 7d5d2a8..42082ef 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -6,3 +6,4 @@ VENDORS_INTEL_MRST_MIXVBP_ROOT := $(LOCAL_PATH) include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/parser/Android.mk +include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser/Android.mk diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/Android.mk new file mode 100644 index 0000000..e2479fe --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/Android.mk @@ -0,0 +1,37 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + mix_vbp_vc1_stubs.c \ + vc1parse_bpic_adv.c \ + vc1parse_huffman.c \ + vc1parse_mv_com.c \ + vc1parse_ppic_adv.c \ + viddec_vc1_parse.c \ + vc1parse_bpic.c \ + vc1parse_common_tables.c \ + vc1parse_ipic_adv.c \ + vc1parse_pic_com_adv.c \ + vc1parse_ppic.c \ + vc1parse_bitplane.c \ + vc1parse.c \ + vc1parse_ipic.c \ + vc1parse_pic_com.c \ + vc1parse_vopdq.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES := \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/include + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_vc1 + +LOCAL_SHARED_LIBRARIES := \ + libmixvbp + +include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index f01dc3b..ac4d13b 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1,12 +1,28 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -//#include #include #include "h264.h" @@ -14,6 +30,7 @@ #include "vbp_utils.h" #include "vbp_h264_parser.h" +typedef struct vbp_h264_parser_private_t vbp_h264_parser_private; typedef enum { @@ -22,16 +39,18 @@ typedef enum H264_BS_SINGLE_NAL } H264_BS_PATTERN; -/* number of bytes used to encode length of NAL payload. If parser does not receive configuration data -and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB -byte stream format. */ -static int NAL_length_size = 0; - -/* indicate if stream is length prefixed */ -static int length_prefix_verified = 0; +struct vbp_h264_parser_private_t +{ + /* number of bytes used to encode length of NAL payload. If parser does not receive configuration data + and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB + byte stream format. */ + int NAL_length_size; -static H264_BS_PATTERN bitstream_pattern = H264_BS_SC_PREFIXED; + /* indicate if stream is length prefixed */ + int length_prefix_verified; + H264_BS_PATTERN bitstream_pattern; +}; /* default scaling list table */ unsigned char Default_4x4_Intra[16] = @@ -185,7 +204,7 @@ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) pcontext->query_data = NULL; vbp_data_h264 *query_data = NULL; - query_data = g_try_new0(vbp_data_h264, 1); + query_data = vbp_malloc_set0(vbp_data_h264, 1); if (NULL == query_data) { goto cleanup; @@ -194,7 +213,7 @@ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) /* assign the pointer */ pcontext->query_data = (void *)query_data; - query_data->pic_data = g_try_new0(vbp_picture_data_h264, MAX_NUM_PICTURES); + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES); if (NULL == query_data->pic_data) { goto cleanup; @@ -203,13 +222,13 @@ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) int i; for (i = 0; i < MAX_NUM_PICTURES; i++) { - query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferH264, 1); + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1); if (NULL == query_data->pic_data[i].pic_parms) { goto cleanup; } query_data->pic_data[i].num_slices = 0; - query_data->pic_data[i].slc_data = g_try_new0(vbp_slice_data_h264, MAX_NUM_SLICES); + query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES); if (NULL == query_data->pic_data[i].slc_data) { goto cleanup; @@ -217,18 +236,36 @@ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) } - query_data->IQ_matrix_buf = g_try_new0(VAIQMatrixBufferH264, 1); + query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1); if (NULL == query_data->IQ_matrix_buf) { goto cleanup; } - query_data->codec_data = g_try_new0(vbp_codec_data_h264, 1); + query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1); if (NULL == query_data->codec_data) { goto cleanup; } + pcontext->parser_private = NULL; + vbp_h264_parser_private *parser_private = NULL; + + parser_private = vbp_malloc_set0(vbp_h264_parser_private, 1); + if (NULL == parser_private) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->parser_private = (void *)parser_private; + + /* init the pointer */ + parser_private->NAL_length_size = 0; + + parser_private->length_prefix_verified = 0; + + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; return VBP_OK; cleanup: @@ -239,6 +276,12 @@ cleanup: uint32 vbp_free_query_data_h264(vbp_context *pcontext) { + if (NULL != pcontext->parser_private) + { + free(pcontext->parser_private); + pcontext->parser_private = NULL; + } + if (NULL == pcontext->query_data) { return VBP_OK; @@ -264,10 +307,6 @@ uint32 vbp_free_query_data_h264(vbp_context *pcontext) pcontext->query_data = NULL; - NAL_length_size = 0; - length_prefix_verified = 0; - bitstream_pattern = H264_BS_SC_PREFIXED; - return VBP_OK; } @@ -354,8 +393,8 @@ static inline void vbp_set_slice_ref_list_h264( refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); if ((i == 0) && - ((h264_PtypeB == slice_header->slice_type) || - (h264_PtypeP == slice_header->slice_type))) + ((h264_PtypeB == slice_header->slice_type) || + (h264_PtypeP == slice_header->slice_type))) { num_ref_idx_active = slice_header->num_ref_idx_l0_active; if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) @@ -412,10 +451,10 @@ static inline void vbp_set_pre_weight_table_h264( int i, j; if ((((h264_PtypeP == slice_header->slice_type) || - (h264_PtypeB == slice_header->slice_type)) && - h264_parser->info.active_PPS.weighted_pred_flag) || - ((h264_PtypeB == slice_header->slice_type) && - (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) + (h264_PtypeB == slice_header->slice_type)) && + h264_parser->info.active_PPS.weighted_pred_flag) || + ((h264_PtypeB == slice_header->slice_type) && + (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) { slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; @@ -803,7 +842,7 @@ static void vbp_set_codec_data_h264( codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && - !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) + !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) { /* no longer necessary: two fields share the same interlaced surface */ /* codec_data->num_ref_frames *= 2; */ @@ -1074,148 +1113,6 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) return VBP_OK; } -#if 0 -static inline void vbp_update_reference_frames_h264_methodA(vbp_picture_data_h264* pic_data) -{ - VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; - - char is_used[16]; - memset(is_used, 0, sizeof(is_used)); - - int ref_list; - int slice_index; - int i, j; - VAPictureH264* pRefList = NULL; - - for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) - { - VASliceParameterBufferH264* slice_parms = - &(pic_data->slc_data[slice_index].slc_parms); - - for (ref_list = 0; ref_list < 2; ref_list++) - { - if (0 == ref_list) - pRefList = slice_parms->RefPicList0; - else - pRefList = slice_parms->RefPicList1; - - for (i = 0; i < 32; i++, pRefList++) - { - if (VA_PICTURE_H264_INVALID == pRefList->flags) - break; - - for (j = 0; j < 16; j++) - { - if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == - pRefList->TopFieldOrderCnt) - { - is_used[j] = 1; - break; - } - } - } - } - } - - int frame_idx = 0; - VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; - for (i = 0; i < 16; i++) - { - if (is_used[i]) - { - memcpy(pRefFrame, - &(pic_parms->ReferenceFrames[i]), - sizeof(VAPictureH264)); - - pRefFrame++; - frame_idx++; - } - } - pic_parms->num_ref_frames = frame_idx; - - for (; frame_idx < 16; frame_idx++) - { - pRefFrame->picture_id = VA_INVALID_SURFACE; - pRefFrame->frame_idx = -1; - pRefFrame->flags = VA_PICTURE_H264_INVALID; - pRefFrame->TopFieldOrderCnt = -1; - pRefFrame->BottomFieldOrderCnt = -1; - pRefFrame++; - } -} -#endif - -#if 0 -static inline void vbp_update_reference_frames_h264_methodB(vbp_picture_data_h264* pic_data) -{ - VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms; - int i; - VAPictureH264* pRefFrame = pic_parms->ReferenceFrames; - for (i = 0; i < 16; i++) - { - pRefFrame->picture_id = VA_INVALID_SURFACE; - pRefFrame->frame_idx = -1; - pRefFrame->flags = VA_PICTURE_H264_INVALID; - pRefFrame->TopFieldOrderCnt = -1; - pRefFrame->BottomFieldOrderCnt = -1; - pRefFrame++; - } - - pic_parms->num_ref_frames = 0; - - - int ref_list; - int slice_index; - int j; - VAPictureH264* pRefList = NULL; - - for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++) - { - VASliceParameterBufferH264* slice_parms = - &(pic_data->slc_data[slice_index].slc_parms); - - for (ref_list = 0; ref_list < 2; ref_list++) - { - if (0 == ref_list) - pRefList = slice_parms->RefPicList0; - else - pRefList = slice_parms->RefPicList1; - - for (i = 0; i < 32; i++, pRefList++) - { - if (VA_PICTURE_H264_INVALID == pRefList->flags) - break; - - for (j = 0; j < 16; j++) - { - if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt == - pRefList->TopFieldOrderCnt) - { - pic_parms->ReferenceFrames[j].flags |= - pRefList->flags; - - if ((pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_TOP_FIELD) && - (pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_BOTTOM_FIELD)) - { - pic_parms->ReferenceFrames[j].flags = 0; - } - break; - } - } - if (j == 16) - { - memcpy(&(pic_parms->ReferenceFrames[pic_parms->num_ref_frames++]), - pRefList, - sizeof(VAPictureH264)); - } - - } - } - } -} -#endif - - static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; @@ -1380,6 +1277,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) int i = 0; viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private; //Enable emulation prevention cxt->getbits.is_emul_reqd = 1; @@ -1392,7 +1290,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) if (ret == 1) { WTRACE("configuration data is start-code prefixed.\n"); - bitstream_pattern = H264_BS_SC_PREFIXED; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; return vbp_parse_start_code_h264(pcontext); } @@ -1423,7 +1321,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); } - NAL_length_size = length_size_minus_one + 1; + parser_private->NAL_length_size = length_size_minus_one + 1; cur_data++; @@ -1525,13 +1423,13 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); } - bitstream_pattern = H264_BS_LENGTH_PREFIXED; + parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED; return VBP_OK; } -static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p) +static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size) { - switch (NAL_length_size) + switch (*NAL_length_size) { case 4: return vbp_utils_ntohl(p); @@ -1551,7 +1449,7 @@ static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p) default: WTRACE("invalid NAL_length_size: %d.", NAL_length_size); /* default to 4 bytes for length */ - NAL_length_size = 4; + *NAL_length_size = 4; return vbp_utils_ntohl(p); } } @@ -1564,6 +1462,7 @@ static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p) uint32 vbp_parse_start_code_h264(vbp_context *pcontext) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private; /* reset query data for the new sample buffer */ vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; @@ -1593,7 +1492,7 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) /* start code emulation prevention byte is present in NAL */ cxt->getbits.is_emul_reqd = 1; - if (bitstream_pattern == H264_BS_LENGTH_PREFIXED) + if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED) { viddec_sc_parse_cubby_cxt_t* cubby = NULL; int32_t size_left = 0; @@ -1604,11 +1503,16 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) size_left = cubby->size; - while (size_left >= NAL_length_size) + while (size_left >= parser_private->NAL_length_size) { - NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed); + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size); + if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size) + { + ETRACE("Invalid NAL_length parsed."); + break; + } - size_parsed += NAL_length_size; + size_parsed += parser_private->NAL_length_size; cxt->list.data[cxt->list.num_items].stpos = size_parsed; size_parsed += NAL_length; /* skip NAL bytes */ /* end position is exclusive */ @@ -1623,14 +1527,14 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) size_left = cubby->size - size_parsed; } - if (size_left != 0 && length_prefix_verified == 0) + if (size_left != 0 && parser_private->length_prefix_verified == 0) { WTRACE("Elementary stream is not aligned (%d).", size_left); /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed */ - length_prefix_verified = 1; + parser_private->length_prefix_verified = 1; viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby; viddec_parser_ops_t *ops = pcontext->parser_ops; @@ -1642,8 +1546,8 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) if (ret == 1) { WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed."); - NAL_length_size = 0; - bitstream_pattern = H264_BS_SC_PREFIXED; + parser_private->NAL_length_size = 0; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; /* reset parsing data */ for (i = 0; i < MAX_NUM_PICTURES; i++) { @@ -1656,7 +1560,7 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) } - if (bitstream_pattern == H264_BS_SC_PREFIXED) + if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED) { viddec_sc_parse_cubby_cxt_t cubby; /* memory copy without updating cxt->parse_cubby */ @@ -1705,7 +1609,7 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) if (cxt->list.num_items == 0) { cxt->list.num_items = 1; - bitstream_pattern = H264_BS_SINGLE_NAL; + parser_private->bitstream_pattern = H264_BS_SINGLE_NAL; WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL."); } cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; @@ -1715,7 +1619,7 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) } - if (bitstream_pattern == H264_BS_SINGLE_NAL) + if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL) { cxt->list.num_items = 1; cxt->list.data[0].stpos = 0; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h index 673b3bd..0094edb 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h @@ -1,10 +1,27 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ #ifndef VBP_H264_PARSER_H #define VBP_H264_PARSER_H diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c index 2dc9a48..7797a78 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c @@ -1,12 +1,27 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -//#include #include "vbp_loader.h" #include "vbp_utils.h" @@ -28,10 +43,8 @@ uint32 vbp_open(uint32 parser_type, Handle *hcontext) ppcontext = (vbp_context **)hcontext; - /** - * TO DO: - * check if vbp context has been created. - */ + // TODO: check if vbp context has been created. + error = vbp_utils_create_context(parser_type, ppcontext); if (VBP_OK != error) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index ccf8e00..c690e88 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -1,10 +1,27 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ #ifndef VBP_LOADER_H #define VBP_LOADER_H @@ -105,53 +122,53 @@ typedef struct _vbp_data_mp42 typedef struct _vbp_codec_data_h264 { - uint8 pic_parameter_set_id; - uint8 seq_parameter_set_id; + uint8 pic_parameter_set_id; + uint8 seq_parameter_set_id; - uint8 profile_idc; - uint8 level_idc; + uint8 profile_idc; + uint8 level_idc; /*constraint flag sets (h.264 Spec v2009)*/ - uint8 constraint_set0_flag; - uint8 constraint_set1_flag; - uint8 constraint_set2_flag; - uint8 constraint_set3_flag; - uint8 constraint_set4_flag; + uint8 constraint_set0_flag; + uint8 constraint_set1_flag; + uint8 constraint_set2_flag; + uint8 constraint_set3_flag; + uint8 constraint_set4_flag; - uint8 num_ref_frames; - uint8 gaps_in_frame_num_value_allowed_flag; + uint8 num_ref_frames; + uint8 gaps_in_frame_num_value_allowed_flag; - uint8 frame_mbs_only_flag; - uint8 mb_adaptive_frame_field_flag; + uint8 frame_mbs_only_flag; + uint8 mb_adaptive_frame_field_flag; - int frame_width; - int frame_height; + int frame_width; + int frame_height; - uint8 vui_parameters_present_flag; + uint8 vui_parameters_present_flag; /* aspect ratio */ - uint8 aspect_ratio_idc; - uint16 sar_width; - uint16 sar_height; + uint8 aspect_ratio_idc; + uint16 sar_width; + uint16 sar_height; /* cropping information */ - int crop_top; - int crop_bottom; - int crop_left; - int crop_right; + int crop_top; + int crop_bottom; + int crop_left; + int crop_right; /* video fromat */ // default 5 unspecified - uint8 video_format; - uint8 video_full_range_flag; + uint8 video_format; + uint8 video_full_range_flag; // default 2 unspecified - uint8 matrix_coefficients; + uint8 matrix_coefficients; - uint8 pic_order_cnt_type; - int log2_max_pic_order_cnt_lsb_minus4; + uint8 pic_order_cnt_type; + int log2_max_pic_order_cnt_lsb_minus4; - int bit_rate; + int bit_rate; } vbp_codec_data_h264; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 97c0304..ca1d2e1 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -1,13 +1,29 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -//#include #include #include @@ -18,7 +34,12 @@ -static bool short_video_header = TRUE; +typedef struct vbp_mp42_parser_private_t vbp_mp42_parser_private; + +struct vbp_mp42_parser_private_t +{ + bool short_video_header; +}; static uint8 mp4_aspect_ratio_table[][2] = { @@ -131,6 +152,7 @@ uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private; uint8 is_svh = 0; uint32 current_sc = parser->current_sc; @@ -145,21 +167,21 @@ uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) case MP4_SC_VISUAL_OBJECT_SEQUENCE: VTRACE ("Visual Object Sequence is parsed.\n"); query_data->codec_data.profile_and_level_indication - = parser->info.profile_and_level_indication; + = parser->info.profile_and_level_indication; VTRACE ("profile_and_level_indication = 0x%x\n", parser->info.profile_and_level_indication); break; case MP4_SC_VIDEO_OBJECT_PLANE: - VTRACE ("Video Object Plane is parsed.\n"); + //VTRACE ("Video Object Plane is parsed.\n"); vbp_on_vop_mp42(pcontext, list_index); break; default: if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && - (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) + (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) { VTRACE ("Video Object Layer is parsed\n"); - short_video_header = FALSE; + parser_private->short_video_header = FALSE; vbp_fill_codec_data(pcontext); } else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX && @@ -179,7 +201,7 @@ uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) { if (parser->sc_seen == MP4_SC_SEEN_SVH) { - VTRACE ("Short video header is parsed.\n"); + //VTRACE ("Short video header is parsed.\n"); vbp_on_vop_svh_mp42(pcontext, list_index); } } @@ -224,6 +246,7 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) uint32 bytes_parsed = 0; viddec_mp4_parser_t *pinfo = NULL; vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private; // reset query data for the new sample buffer @@ -249,21 +272,20 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) while (1) { - found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, - &sc_end_pos, &is_normal_sc, &resync_marker,short_video_header); - - VTRACE("buf=%x, bytes_parsed=%d, unparsed=%d", (uint32)buf, bytes_parsed, size- bytes_parsed); - VTRACE("found_sc=%d, cxt->list.num_items=%d, resync_marker=%d, ", - found_sc, cxt->list.num_items, resync_marker); + found_sc = vbp_get_sc_pos_mp42( + buf + bytes_parsed, + size - bytes_parsed, + &sc_end_pos, + &is_normal_sc, + &resync_marker, + parser_private->short_video_header); if (found_sc) { - cxt->list.data[cxt->list.num_items].stpos = bytes_parsed - + sc_end_pos - 3; + cxt->list.data[cxt->list.num_items].stpos = bytes_parsed + sc_end_pos - 3; if (cxt->list.num_items != 0) { - cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed - + sc_end_pos - 3; + cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed + sc_end_pos - 3; } bytes_parsed += sc_end_pos; @@ -274,8 +296,7 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) { if (cxt->list.num_items != 0) { - cxt->list.data[cxt->list.num_items - 1].edpos - = cxt->parse_cubby.size; + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; break; } else @@ -288,7 +309,7 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) if (resync_marker) { // either the first slice (GOB) is lost or parser receives a single slice (GOB) - if (short_video_header) + if (parser_private->short_video_header) { // TODO: revisit if HW supportd GOB layer decoding for h.263 WTRACE("Partial frame: GOB buffer.\n"); @@ -342,25 +363,26 @@ vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data) void vbp_fill_codec_data(vbp_context *pcontext) { viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; vbp_codec_data_mp42* codec_data = &(query_data->codec_data); + vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private; codec_data->bit_rate = parser->info.VisualObject.VideoObject.VOLControlParameters.bit_rate; codec_data->profile_and_level_indication - = parser->info.profile_and_level_indication; + = parser->info.profile_and_level_indication; codec_data->video_object_layer_width = - parser->info.VisualObject.VideoObject.video_object_layer_width; + parser->info.VisualObject.VideoObject.video_object_layer_width; codec_data->video_object_layer_height = - parser->info.VisualObject.VideoObject.video_object_layer_height; + parser->info.VisualObject.VideoObject.video_object_layer_height; if (parser->info.VisualObject.VideoSignalType.is_video_signal_type) { codec_data->video_format = - parser->info.VisualObject.VideoSignalType.video_format; + parser->info.VisualObject.VideoSignalType.video_format; } else { @@ -369,14 +391,14 @@ void vbp_fill_codec_data(vbp_context *pcontext) } codec_data->video_range = - parser->info.VisualObject.VideoSignalType.video_range; + parser->info.VisualObject.VideoSignalType.video_range; if (parser->info.VisualObject.VideoSignalType.is_colour_description) { codec_data->matrix_coefficients = - parser->info.VisualObject.VideoSignalType.matrix_coefficients; + parser->info.VisualObject.VideoSignalType.matrix_coefficients; } - else if (short_video_header) + else if (parser_private->short_video_header) { // SMPTE 170M codec_data->matrix_coefficients = 6; @@ -387,7 +409,7 @@ void vbp_fill_codec_data(vbp_context *pcontext) codec_data->matrix_coefficients = 1; } - codec_data->short_video_header = short_video_header; + codec_data->short_video_header = parser_private->short_video_header; // aspect ratio codec_data->aspect_ratio_info = parser->info.VisualObject.VideoObject.aspect_ratio_info; @@ -411,7 +433,7 @@ void vbp_fill_codec_data(vbp_context *pcontext) void vbp_fill_slice_data(vbp_context *pcontext, int list_index) { viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); if (!parser->info.VisualObject.VideoObject.short_video_header) { @@ -426,7 +448,7 @@ void vbp_fill_slice_data(vbp_context *pcontext, int list_index) void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) { viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; vbp_picture_data_mp42 *picture_data = NULL; @@ -443,7 +465,7 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) // first entry if (picture_data == NULL) { - picture_data = (vbp_picture_data_mp42*)g_try_new0(vbp_picture_data_mp42, 1); + picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1); query_data->picture_data = picture_data; } query_data->number_picture_data = 1; @@ -459,7 +481,7 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) } if (picture_data->next_picture_data == NULL) { - picture_data->next_picture_data = g_try_new0(vbp_picture_data_mp42, 1); + picture_data->next_picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1); } query_data->number_picture_data++; @@ -474,13 +496,12 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) picture_data->new_picture_flag = new_picture_flag; picture_data->vop_coded - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded; - VTRACE ("vop_coded = %d\n", picture_data->vop_coded); picture_data->vop_time_increment = - parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment; + parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment; // fill picture_param @@ -490,9 +511,9 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) * to VOL->video_object_layer_width and VOL->video_object_layer_height */ picture_param->vop_width - = parser->info.VisualObject.VideoObject.video_object_layer_width; + = parser->info.VisualObject.VideoObject.video_object_layer_width; picture_param->vop_height - = parser->info.VisualObject.VideoObject.video_object_layer_height; + = parser->info.VisualObject.VideoObject.video_object_layer_height; picture_param->forward_reference_picture = VA_INVALID_SURFACE; picture_param->backward_reference_picture = VA_INVALID_SURFACE; @@ -500,45 +521,44 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) // Fill VAPictureParameterBufferMPEG4::vol_fields picture_param->vol_fields.bits.short_video_header - = parser->info.VisualObject.VideoObject.short_video_header; + = parser->info.VisualObject.VideoObject.short_video_header; picture_param->vol_fields.bits.chroma_format - = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format; + = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format; - /* TODO: find out why testsuite always set this value to be 0 */ + // TODO: find out why testsuite always set this value to be 0 picture_param->vol_fields.bits.chroma_format = 0; picture_param->vol_fields.bits.interlaced - = parser->info.VisualObject.VideoObject.interlaced; + = parser->info.VisualObject.VideoObject.interlaced; picture_param->vol_fields.bits.obmc_disable - = parser->info.VisualObject.VideoObject.obmc_disable; + = parser->info.VisualObject.VideoObject.obmc_disable; picture_param->vol_fields.bits.sprite_enable - = parser->info.VisualObject.VideoObject.sprite_enable; + = parser->info.VisualObject.VideoObject.sprite_enable; picture_param->vol_fields.bits.sprite_warping_accuracy - = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy; + = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy; picture_param->vol_fields.bits.quant_type - = parser->info.VisualObject.VideoObject.quant_type; + = parser->info.VisualObject.VideoObject.quant_type; picture_param->vol_fields.bits.quarter_sample - = parser->info.VisualObject.VideoObject.quarter_sample; + = parser->info.VisualObject.VideoObject.quarter_sample; picture_param->vol_fields.bits.data_partitioned - = parser->info.VisualObject.VideoObject.data_partitioned; + = parser->info.VisualObject.VideoObject.data_partitioned; picture_param->vol_fields.bits.reversible_vlc - = parser->info.VisualObject.VideoObject.reversible_vlc; + = parser->info.VisualObject.VideoObject.reversible_vlc; picture_param->vol_fields.bits.resync_marker_disable - = parser->info.VisualObject.VideoObject.resync_marker_disable; - + = parser->info.VisualObject.VideoObject.resync_marker_disable; picture_param->no_of_sprite_warping_points - = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points; + = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points; for (idx = 0; idx < 3; idx++) { picture_param->sprite_trajectory_du[idx] - = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx]; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx]; picture_param->sprite_trajectory_dv[idx] - = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx]; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx]; } picture_param->quant_precision - = parser->info.VisualObject.VideoObject.quant_precision; + = parser->info.VisualObject.VideoObject.quant_precision; // fill VAPictureParameterBufferMPEG4::vop_fields @@ -546,47 +566,44 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) if (!parser->info.VisualObject.VideoObject.short_video_header) { picture_param->vop_fields.bits.vop_coding_type - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type; } else { picture_param->vop_fields.bits.vop_coding_type - = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type; + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type; } - /* - * TODO: - * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type - * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7 - */ + // TODO: fill picture_param->vop_fields.bits.backward_reference_vop_coding_type + // This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7 if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) { picture_param->vop_fields.bits.backward_reference_vop_coding_type - = picture_param->vop_fields.bits.vop_coding_type; + = picture_param->vop_fields.bits.vop_coding_type; } picture_param->vop_fields.bits.vop_rounding_type - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type; picture_param->vop_fields.bits.intra_dc_vlc_thr - = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr; picture_param->vop_fields.bits.top_field_first - = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first; picture_param->vop_fields.bits.alternate_vertical_scan_flag - = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag; picture_param->vop_fcode_forward - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward; picture_param->vop_fcode_backward - = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward; + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward; picture_param->vop_time_increment_resolution - = parser->info.VisualObject.VideoObject.vop_time_increment_resolution; + = parser->info.VisualObject.VideoObject.vop_time_increment_resolution; // short header related picture_param->num_gobs_in_vop - = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop; + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop; picture_param->num_macroblocks_in_gob - = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob; + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob; // for direct mode prediction picture_param->TRB = parser->info.VisualObject.VideoObject.TRB; @@ -596,19 +613,18 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) { viddec_mp4_parser_t *parser = - (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; mp4_VOLQuant_mat_t *quant_mat_info = - &(parser->info.VisualObject.VideoObject.quant_mat_info); + &(parser->info.VisualObject.VideoObject.quant_mat_info); VAIQMatrixBufferMPEG4 *iq_matrix = NULL; iq_matrix = &(query_data->iq_matrix_buffer); iq_matrix->load_intra_quant_mat = 1; //quant_mat_info->load_intra_quant_mat; - iq_matrix->load_non_intra_quant_mat = 1; - // = quant_mat_info->load_nonintra_quant_mat; + iq_matrix->load_non_intra_quant_mat = 1; // = quant_mat_info->load_nonintra_quant_mat; memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64); memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64); } @@ -636,7 +652,7 @@ uint32 vbp_get_sc_pos_mp42( uint32 *sc_end_pos, uint8 *is_normal_sc, uint8 *resync_marker, - const bool svh_search) + const bool svh_search) { uint8 *ptr = buf; uint32 size; @@ -717,9 +733,10 @@ uint32 vbp_get_sc_pos_mp42( if (phase == 2) { normal_sc = (*ptr == THIRD_STARTCODE_BYTE); - if (svh_search) { + if (svh_search) + { short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); - } + } *is_normal_sc = normal_sc; // at least 16-bit 0, may be GOB start code or @@ -778,7 +795,7 @@ uint32 vbp_parse_video_packet_header_mp42( mp4_Info_t *pInfo = &(parser_cxt->info); mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); mp4_VideoObjectPlane_t *vidObjPlane = - &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); uint32 code = 0; int32_t getbits = 0; @@ -867,20 +884,19 @@ uint32 vbp_parse_video_packet_header_mp42( vidObjPlane->intra_dc_vlc_thr = code; if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && - (vop_coding_type == MP4_VOP_TYPE_S) && - (vidObjLay->sprite_info.no_of_sprite_warping_points> 0)) + (vop_coding_type == MP4_VOP_TYPE_S) && + (vidObjLay->sprite_info.no_of_sprite_warping_points> 0)) { - if (vbp_sprite_trajectory_mp42(parent, vidObjLay, - vidObjPlane) != VBP_OK) + if (vbp_sprite_trajectory_mp42(parent, vidObjLay, vidObjPlane) != VBP_OK) { break; } } if (vidObjLay->reduced_resolution_vop_enable && - (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && - ((vop_coding_type == MP4_VOP_TYPE_I) || - (vop_coding_type == MP4_VOP_TYPE_P))) + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && + ((vop_coding_type == MP4_VOP_TYPE_I) || + (vop_coding_type == MP4_VOP_TYPE_P))) { // vop_reduced_resolution getbits = viddec_pm_get_bits(parent, &code, 1); @@ -925,7 +941,7 @@ uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt) { mp4_Info_t *pInfo = &(parser_cxt->info); mp4_VideoObjectPlane_t *vidObjPlane = - &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); uint32 resync_marker_length = 0; if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) @@ -959,7 +975,7 @@ uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; viddec_pm_cxt_t *parent = pcontext->parser_cxt; viddec_mp4_parser_t *parser_cxt = - (viddec_mp4_parser_t *) &(parent->codec_data[0]); + (viddec_mp4_parser_t *) &(parent->codec_data[0]); vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data); vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data); @@ -974,10 +990,10 @@ uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) slice_data->buffer_addr = parent->parse_cubby.buf; - slice_data->slice_offset = byte_offset - + parent->list.data[list_index].stpos; - slice_data->slice_size = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset; + slice_data->slice_offset = + byte_offset + parent->list.data[list_index].stpos; + slice_data->slice_size = + parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset; slice_param->slice_data_size = slice_data->slice_size; slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; @@ -985,7 +1001,7 @@ uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) slice_param->macroblock_offset = bit_offset; slice_param->macroblock_number = 0; slice_param->quant_scale - = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant; + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant; return ret; } @@ -1010,39 +1026,18 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) int32_t getbits = 0; uint32 resync_marker_length = 0; -#ifdef VBP_TRACE - uint32 list_size_at_index = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos; - - VTRACE ("list_index = %d list_size_at_index = %d\n", list_index, - list_size_at_index); - - VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index, - parent->list.data[list_index].edpos, - parent->list.data[list_index].stpos); -#endif - /* The offsets are relative to parent->parse_cubby.buf */ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); -#if 0 - if (is_emul) { - g_print("*** emul != 0\n"); - /*byte_offset += 1;*/ - } -#endif - - picture_data = vbp_get_mp42_picture_data(query_data); slice_data = &(picture_data->slice_data); slice_param = &(slice_data->slice_param); slice_data->buffer_addr = parent->parse_cubby.buf; - slice_data->slice_offset = byte_offset - + parent->list.data[list_index].stpos; - slice_data->slice_size = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset; + slice_data->slice_offset = byte_offset + parent->list.data[list_index].stpos; + slice_data->slice_size = + parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset; slice_param->slice_data_size = slice_data->slice_size; slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; @@ -1050,7 +1045,7 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) slice_param->macroblock_offset = bit_offset; slice_param->macroblock_number = 0; slice_param->quant_scale - = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant; + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant; if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) { @@ -1094,8 +1089,8 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); // update slice data as we found resync_marker - slice_data->slice_size -= (parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset); + slice_data->slice_size -= + (parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset); slice_param->slice_data_size = slice_data->slice_size; // skip resync marker @@ -1126,10 +1121,10 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) slice_data->buffer_addr = parent->parse_cubby.buf; - slice_data->slice_offset = byte_offset - + parent->list.data[list_index].stpos; - slice_data->slice_size = parent->list.data[list_index].edpos - - parent->list.data[list_index].stpos - byte_offset; + slice_data->slice_offset = + byte_offset + parent->list.data[list_index].stpos; + slice_data->slice_size = + parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset; slice_param->slice_data_size = slice_data->slice_size; slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; @@ -1221,8 +1216,7 @@ uint32 vbp_process_video_packet_mp42(vbp_context *pcontext) uint32 macroblock_number = 0; // parse video_packet_header - vbp_parse_video_packet_header_mp42(parent, parser_cxt, - &quant_scale, ¯oblock_number); + vbp_parse_video_packet_header_mp42(parent, parser_cxt, &quant_scale, ¯oblock_number); // new_picture_flag = 0, this is not the first slice of a picture vbp_fill_picture_param(pcontext, 0); @@ -1376,6 +1370,11 @@ uint32 vbp_free_query_data_mp42(vbp_context *pcontext) vbp_picture_data_mp42* current = NULL; vbp_picture_data_mp42* next = NULL; + if (pcontext->parser_private) + { + free(pcontext->parser_private); + pcontext->parser_private = NULL; + } if (query_data) { current = query_data->picture_data; @@ -1401,7 +1400,7 @@ uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) vbp_data_mp42 *query_data; pcontext->query_data = NULL; - query_data = g_try_new0(vbp_data_mp42, 1); + query_data = vbp_malloc_set0(vbp_data_mp42, 1); if (query_data == NULL) { goto cleanup; @@ -1412,6 +1411,20 @@ uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) query_data->number_picture_data = 0; query_data->number_pictures = 0; + pcontext->parser_private = NULL; + vbp_mp42_parser_private *parser_private = NULL; + + parser_private = vbp_malloc_set0(vbp_mp42_parser_private, 1); + if (NULL == parser_private) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->parser_private = (void *)parser_private; + + /* init the pointer */ + parser_private->short_video_header = TRUE; return VBP_OK; cleanup: diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h index c0deaa4..93416b7 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h @@ -1,10 +1,27 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ #ifndef VBP_MP42_PARSER_H #define VBP_MP42_PARSER_H diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index a527607..237a02f 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -1,12 +1,28 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -//#include #include #include "vc1.h" @@ -18,17 +34,17 @@ #include "vbp_mp42_parser.h" -void* g_try_malloc0(uint32 size) { +/* buffer counter */ +uint32 buffer_counter = 0; + + +void* vbp_try_malloc0(uint32 size) { void* pMem = malloc(size); if (pMem) memset(pMem, 0, size); return pMem; } -/* buffer counter */ -uint32 buffer_counter = 0; - - /** * * uninitialize parser context @@ -101,7 +117,7 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) break; default: - //g_warning ("Warning! Unsupported parser type!"); + WTRACE("Unsupported parser type!"); return VBP_TYPE; } @@ -113,7 +129,7 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) goto cleanup; } - pcontext->parser_ops = g_try_new(viddec_parser_ops_t, 1); + pcontext->parser_ops = vbp_malloc(viddec_parser_ops_t, 1); if (NULL == pcontext->parser_ops) { ETRACE("Failed to allocate memory"); @@ -122,15 +138,15 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) } #define SET_FUNC_POINTER(X, Y)\ - case X:\ - pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\ - pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\ - pcontext->func_free_query_data = vbp_free_query_data_##Y;\ - pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\ - pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\ - pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\ - pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\ - break; + case X:\ + pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\ + pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\ + pcontext->func_free_query_data = vbp_free_query_data_##Y;\ + pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\ + pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\ + pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\ + pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\ + break; switch (pcontext->parser_type) { @@ -204,7 +220,7 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) uint32 error = VBP_OK; viddec_parser_memory_sizes_t sizes; - pcontext->parser_cxt = g_try_new(viddec_pm_cxt_t, 1); + pcontext->parser_cxt = vbp_malloc(viddec_pm_cxt_t, 1); if (NULL == pcontext->parser_cxt) { ETRACE("Failed to allocate memory"); @@ -219,7 +235,7 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) /* allocate persistent memory for parser */ if (sizes.persist_size) { - pcontext->persist_mem = g_try_malloc(sizes.persist_size); + pcontext->persist_mem = malloc(sizes.persist_size); if (NULL == pcontext->persist_mem) { ETRACE("Failed to allocate memory"); @@ -231,8 +247,8 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) { /* OK for VC-1, MPEG2 and MPEG4. */ if ((VBP_VC1 == pcontext->parser_type) || - (VBP_MPEG2 == pcontext->parser_type) || - (VBP_MPEG4 == pcontext->parser_type)) + (VBP_MPEG2 == pcontext->parser_type) || + (VBP_MPEG4 == pcontext->parser_type)) { pcontext->persist_mem = NULL; } @@ -246,7 +262,7 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) } /* allocate a new workload with 1000 items. */ - pcontext->workload1 = g_try_malloc(sizeof(viddec_workload_t) + + pcontext->workload1 = malloc(sizeof(viddec_workload_t) + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); if (NULL == pcontext->workload1) { @@ -256,7 +272,7 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) } /* allocate a second workload with 1000 items. */ - pcontext->workload2 = g_try_malloc(sizeof(viddec_workload_t) + + pcontext->workload2 = malloc(sizeof(viddec_workload_t) + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); if (NULL == pcontext->workload2) { @@ -336,10 +352,7 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f /* setup buffer pointer */ cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf; - /* - * TO DO: - * check if cxt->getbits.is_emul_reqd is set properly - */ + // TODO: check if cxt->getbits.is_emul_reqd is set properly for (i = 0; i < cxt->list.num_items; i++) { @@ -364,16 +377,15 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); /* can't return error for now. Neet further investigation */ - - /*if (0 != error) +#if 0 + if (0 != error) { - ETRACE("failed to parse the syntax: %d!", error); - return error; - }*/ + ETRACE("failed to parse the syntax: %d!", error); + return error; + } +#endif - /* - * process parsing result - */ + /* process parsing result */ error = pcontext->func_process_parsing_result(pcontext, i); if (0 != error) @@ -400,7 +412,7 @@ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) /* prevention from the failure */ *ppcontext = NULL; - pcontext = g_try_new0(vbp_context, 1); + pcontext = vbp_malloc_set0(vbp_context, 1); if (NULL == pcontext) { error = VBP_MEM; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h index 08bb76f..073c2c2 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h @@ -1,10 +1,27 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ #ifndef VBP_UTILS_H #define VBP_UTILS_H @@ -24,17 +41,14 @@ /* maximum two pictures per sample buffer */ #define MAX_NUM_PICTURES 2 -#define free free -#define g_try_malloc malloc -#define g_try_new(struct_type, n_structs) \ - ((struct_type *) g_try_malloc (sizeof (struct_type) * n_structs)) -#define g_try_new0(struct_type, n_structs) \ - ((struct_type *) g_try_malloc0 (sizeof (struct_type) * n_structs)) +#define vbp_malloc(struct_type, n_structs) \ + ((struct_type *) malloc(sizeof(struct_type) * n_structs)) +#define vbp_malloc_set0(struct_type, n_structs) \ + ((struct_type *) vbp_try_malloc0(sizeof(struct_type) * n_structs)) -void* g_try_malloc0(uint32 size); extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state); @@ -79,17 +93,22 @@ struct vbp_context_t /* format specific query data */ void *query_data; + /* parser type specific data*/ + void *parser_private; - function_init_parser_entries func_init_parser_entries; - function_allocate_query_data func_allocate_query_data; - function_free_query_data func_free_query_data; - function_parse_init_data func_parse_init_data; - function_parse_start_code func_parse_start_code; + function_init_parser_entries func_init_parser_entries; + function_allocate_query_data func_allocate_query_data; + function_free_query_data func_free_query_data; + function_parse_init_data func_parse_init_data; + function_parse_start_code func_parse_start_code; function_process_parsing_result func_process_parsing_result; - function_populate_query_data func_populate_query_data; + function_populate_query_data func_populate_query_data; }; + +void* vbp_try_malloc0(uint32 size); + /** * create VBP context */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index f84dbd7..3ab3467 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -1,12 +1,27 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ -//#include #include #include @@ -122,7 +137,7 @@ uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext) pcontext->query_data = NULL; vbp_data_vc1 *query_data = NULL; - query_data = g_try_new0(vbp_data_vc1, 1); + query_data = vbp_malloc_set0(vbp_data_vc1, 1); if (NULL == query_data) { return VBP_MEM; @@ -131,12 +146,12 @@ uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext) /* assign the pointer */ pcontext->query_data = (void *)query_data; - query_data->se_data = g_try_new0(vbp_codec_data_vc1, 1); + query_data->se_data = vbp_malloc_set0(vbp_codec_data_vc1, 1); if (NULL == query_data->se_data) { goto cleanup; } - query_data->pic_data = g_try_new0(vbp_picture_data_vc1, MAX_NUM_PICTURES); + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vc1, MAX_NUM_PICTURES); if (NULL == query_data->pic_data) { goto cleanup; @@ -145,19 +160,19 @@ uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext) int i; for (i = 0; i < MAX_NUM_PICTURES; i++) { - query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferVC1, 1); + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVC1, 1); if (NULL == query_data->pic_data[i].pic_parms) { goto cleanup; } - query_data->pic_data[i].packed_bitplanes = g_try_malloc0(MAX_BITPLANE_SIZE); + query_data->pic_data[i].packed_bitplanes = vbp_try_malloc0(MAX_BITPLANE_SIZE); if (NULL == query_data->pic_data[i].packed_bitplanes) { goto cleanup; } - query_data->pic_data[i].slc_data = g_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1)); + query_data->pic_data[i].slc_data = vbp_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1)); if (NULL == query_data->pic_data[i].slc_data) { goto cleanup; @@ -421,8 +436,7 @@ uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext) } else { - /* WMV: vc1 simple or main profile. No start code present. - */ + /* WMV: vc1 simple or main profile. No start code present. */ /* must set is_emul_reqd to 0! */ cxt->getbits.is_emul_reqd = 0; @@ -875,7 +889,7 @@ static void vbp_pack_picture_params_vc1( break; default: - /* to do: handle this case */ + // TODO: handle this case break; } pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM; @@ -1043,8 +1057,9 @@ uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index) uint32 error = VBP_OK; vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; - if (parser->start_code != VC1_SC_FRM && parser->start_code != VC1_SC_FLD && - parser->start_code != VC1_SC_SLC) + if (parser->start_code != VC1_SC_FRM && + parser->start_code != VC1_SC_FLD && + parser->start_code != VC1_SC_SLC) { /* only handle frame data, field data and slice data here */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h index 510e16c..aec7a56 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h @@ -1,10 +1,26 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ #ifndef VBP_VC1_PARSER_H #define VBP_VC1_PARSER_H diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk new file mode 100644 index 0000000..63b8619 --- /dev/null +++ b/videodecoder/Android.mk @@ -0,0 +1,40 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + + +LOCAL_SRC_FILES := \ + VideoDecoderHost.cpp \ + VideoDecoderBase.cpp \ + VideoDecoderWMV.cpp \ + VideoDecoderMPEG4.cpp \ + VideoDecoderAVC.cpp \ + VideoDecoderVP8.cpp \ + VideoDecoderTrace.cpp + +# LOCAL_CFLAGS := + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmixvbp + +#LOCAL_LDLIBS += -lpthread + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libmixvbp \ + libva \ + libva-android \ + libva-tpi + + +#LOCAL_CFLAGS += -DANDROID + + +#LOCAL_SHARED_LIBRARIES += liblog + + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libva_videodecoder + +include $(BUILD_SHARED_LIBRARY) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp new file mode 100644 index 0000000..9bf06fb --- /dev/null +++ b/videodecoder/VideoDecoderAVC.cpp @@ -0,0 +1,768 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVC.h" +#include "VideoDecoderTrace.h" +#include + +VideoDecoderAVC::VideoDecoderAVC(const char *mimeType) + : VideoDecoderBase(mimeType, VBP_H264), + mToggleDPB(0), + mErrorConcealment(false){ + + invalidateDPB(0); + invalidateDPB(1); + mLastPictureFlags = VA_PICTURE_H264_INVALID; +} + +VideoDecoderAVC::~VideoDecoderAVC() { + stop(); +} + +Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) { + Decode_Status status; + + status = VideoDecoderBase::start(buffer); + CHECK_STATUS("VideoDecoderBase::start"); + + // We don't want base class to manage reference. + VideoDecoderBase::ManageReference(false); + // output by picture order count + VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC); + + mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT; + if (buffer->data == NULL || buffer->size == 0) { + WTRACE("No config data to start VA."); + if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) { + ITRACE("Used client supplied profile and surface to start VA."); + return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile); + } + return DECODE_SUCCESS; + } + + vbp_data_h264 *data = NULL; + status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + + status = startVA(data); + return status; +} + +void VideoDecoderAVC::stop(void) { + // drop the last frame and ignore return value + endDecodingFrame(true); + VideoDecoderBase::stop(); + invalidateDPB(0); + invalidateDPB(1); + mToggleDPB = 0; + mErrorConcealment = false; + mLastPictureFlags = VA_PICTURE_H264_INVALID; +} + +void VideoDecoderAVC::flush(void) { + // drop the frame and ignore return value + VideoDecoderBase::flush(); + invalidateDPB(0); + invalidateDPB(1); + mToggleDPB = 0; + mLastPictureFlags = VA_PICTURE_H264_INVALID; +} + +Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + vbp_data_h264 *data = NULL; + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + status = VideoDecoderBase::parseBuffer( + buffer->data, + buffer->size, + false, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + + if (!mVAStarted) { + if (data->has_sps && data->has_pps) { + status = startVA(data); + CHECK_STATUS("startVA"); + } else { + WTRACE("Can't start VA as either SPS or PPS is still not available."); + return DECODE_SUCCESS; + } + } + + status = decodeFrame(buffer, data); + return status; +} + +Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { + Decode_Status status; + if (data->has_sps == 0 || data->has_pps == 0) { + return DECODE_NO_CONFIG; + } + + // Don't remove the following codes, it can be enabled for debugging DPB. +#if 0 + for (int i = 0; i < data->num_pictures; i++) { + VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic; + VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d", + i, + buffer->timeStamp/1E6, + pic.TopFieldOrderCnt, + pic.BottomFieldOrderCnt, + pic.flags, + (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)); + } +#endif + if (data->new_sps) { + status = handleNewSequence(data); + CHECK_STATUS("handleNewSequence"); + } + + // first pic_data always exists, check if any slice is parsed + if (data->pic_data[0].num_slices == 0) { + ITRACE("No slice available for decoding."); + return DECODE_SUCCESS; + } + + uint64_t lastPTS = mCurrentPTS; + mCurrentPTS = buffer->timeStamp; + + //if (lastPTS != mCurrentPTS) { + if (isNewFrame(data)) { + // finish decoding the last frame + status = endDecodingFrame(false); + CHECK_STATUS("endDecodingFrame"); + + // start decoding a new frame + status = beginDecodingFrame(data); + CHECK_STATUS("beginDecodingFrame"); + } else { + status = continueDecodingFrame(data); + CHECK_STATUS("continueDecodingFrame"); + } + + // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field + /* if (buffer->flag & HAS_COMPLETE_FRAME) { + // finish decoding current frame + status = endDecodingFrame(false); + CHECK_STATUS("endDecodingFrame"); + }*/ + + if (mSizeChanged) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) { + Decode_Status status; + + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic); + if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + mAcquiredBuffer->referenceFrame = true; + } else { + mAcquiredBuffer->referenceFrame = false; + } + // set asReference in updateDPB + + if (picture->flags & VA_PICTURE_H264_TOP_FIELD) { + mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD; + } else { + mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; + } + + // TODO: Set the discontinuity flag + mAcquiredBuffer->renderBuffer.flag = 0; + mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; + mAcquiredBuffer->pictureOrder = picture->TopFieldOrderCnt; + + status = continueDecodingFrame(data); + // surface buffer is released if decode fails + return status; +} + + +Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) { + Decode_Status status; + vbp_picture_data_h264 *picData = data->pic_data; + + // TODO: remove these debugging codes + if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) { + ETRACE("mAcquiredBuffer is NULL. Implementation bug."); + return DECODE_FAIL; + } + for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) { + // sanity check + if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) { + return DECODE_PARSER_FAIL; + } + + for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) { + status = decodeSlice(data, picIndex, sliceIndex); + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + // TODO: this is new code + // remove current frame from DPB as it can't be decoded. + removeReferenceFromDPB(picData->pic_parms); + return status; + } + } + } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + sliceData->buffer_addr + sliceData->slice_offset, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) { + int32_t numList = 1; + // TODO: set numList to 0 if it is I slice + if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) { + // B slice + numList = 2; + } + + int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1; + VAPictureH264 *ref = sliceParam->RefPicList0; + + for (int32_t i = 0; i < numList; i++) { + if (activeMinus1 >= REF_LIST_SIZE) { + ETRACE("Invalid activeMinus1 (%d)", activeMinus1); + return DECODE_PARSER_FAIL; + } + for (int32_t j = 0; j <= activeMinus1; j++, ref++) { + if (!(ref->flags & VA_PICTURE_H264_INVALID)) { + ref->picture_id = findSurface(ref); + if (ref->picture_id == VA_INVALID_SURFACE) { + if (mLastReference) { + WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref)); + ref->picture_id = mLastReference->renderBuffer.surface; + } else { + ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref)); + return DECODE_NO_REFERENCE; + } + } + } + } + activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1; + ref = sliceParam->RefPicList1; + } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) { + clearAsReference(mToggleDPB); + // pointer to toggled DPB (new) + DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB]; + VAPictureH264 *ref = picParam->ReferenceFrames; + + // update current picture ID + picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface; + + // build new DPB + for (int32_t i = 0; i < DPB_SIZE; i++, ref++) { + if (ref->flags & VA_PICTURE_H264_INVALID) { + continue; + } + dpb->poc = getPOC(ref); + dpb->surfaceBuffer = findSurfaceBuffer(ref); + if (dpb->surfaceBuffer == NULL) { + ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic))); + if (dpb->poc == getPOC(&(picParam->CurrPic))) { + WTRACE("updateDPB: Using the current picture for missing reference."); + dpb->surfaceBuffer = mAcquiredBuffer; + } else if (mLastReference) { + WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder); + // TODO: this is new code for error resilience + dpb->surfaceBuffer = mLastReference; + } else { + WTRACE("updateDPB: Unable to recover the missing reference frame."); + // continue buillding DPB without updating dpb pointer. + continue; + // continue building DPB as this reference may not be actually used. + // especially happen after seeking to a non-IDR I frame. + //return DECODE_NO_REFERENCE; + } + } + if (dpb->surfaceBuffer) { + // this surface is used as reference + dpb->surfaceBuffer->asReferernce = true; + } + dpb++; + } + + // add current frame to DPB if it is a reference frame + if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + dpb->poc = getPOC(&(picParam->CurrPic)); + dpb->surfaceBuffer = mAcquiredBuffer; + dpb->surfaceBuffer->asReferernce = true; + } + // invalidate the current used DPB + invalidateDPB(mToggleDPB); + mToggleDPB = !mToggleDPB; + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) { + bool found = false; + uint32_t flags = 0; + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = NULL; + uint8_t activeMinus1 = 0; + VAPictureH264 *refList = NULL; + VAPictureH264 *dpb = picParam->ReferenceFrames; + VAPictureH264 *refFrame = NULL; + + // invalidate DPB in the picture buffer + memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames)); + picParam->num_ref_frames = 0; + + // update DPB from the reference list in each slice. + for (uint32_t slice = 0; slice < picData->num_slices; slice++) { + sliceParam = &(picData->slc_data[slice].slc_parms); + for (int32_t list = 0; list < 2; list++) { + refList = (list == 0) ? sliceParam->RefPicList0 : + sliceParam->RefPicList1; + activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 : + sliceParam->num_ref_idx_l1_active_minus1; + if (activeMinus1 >= REF_LIST_SIZE) { + return DECODE_PARSER_FAIL; + } + for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) { + if (refList->flags & VA_PICTURE_H264_INVALID) { + break; + } + found = false; + refFrame = picParam->ReferenceFrames; + for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) { + if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) { + ///check for complementary field + flags = refFrame->flags | refList->flags; + //If both TOP and BOTTOM are set, we'll clear those flags + if ((flags & VA_PICTURE_H264_TOP_FIELD) && + (flags & VA_PICTURE_H264_BOTTOM_FIELD)) { + refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + found = true; //already in the DPB; will not add this one + break; + } + } + if (found == false) { + // add a new reference to the DPB + dpb->picture_id = findSurface(refList); + if (dpb->picture_id == VA_INVALID_SURFACE) { + if (mLastReference != NULL) { + dpb->picture_id = mLastReference->renderBuffer.surface; + } else { + ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList)); + return DECODE_NO_REFERENCE; + } + } + dpb->flags = refList->flags; + dpb->frame_idx = refList->frame_idx; + dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt; + dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt; + dpb++; + picParam->num_ref_frames++; + } + } + } + } + return DECODE_SUCCESS; +} + +void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) { + // remove the current frame from DPB as it can't be decoded. + if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + DecodedPictureBuffer *dpb = mDPBs[mToggleDPB]; + uint32_t poc = getPOC(&(picParam->CurrPic)); + for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) { + if (poc == dpb->poc) { + dpb->poc = (uint32_t)-1; + if (dpb->surfaceBuffer) { + dpb->surfaceBuffer->asReferernce = false; + } + dpb->surfaceBuffer = NULL; + break; + } + } + } +} + +uint32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) { + if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) { + return pic->BottomFieldOrderCnt; + } + return pic->TopFieldOrderCnt; +} + +VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) { + VideoSurfaceBuffer *p = findSurfaceBuffer(pic); + if (p == NULL) { + ETRACE("Could not find surface for poc %d", getPOC(pic)); + return VA_INVALID_SURFACE; + } + return p->renderBuffer.surface; +} + +VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) { + DecodedPictureBuffer *dpb = mDPBs[mToggleDPB]; + for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) { + if (dpb->poc == pic->BottomFieldOrderCnt || + dpb->poc == pic->TopFieldOrderCnt) { + // TODO: remove these debugging codes + if (dpb->surfaceBuffer == NULL) { + ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic)); + } + return dpb->surfaceBuffer; + } + } + ETRACE("Unable to find surface for poc %d", getPOC(pic)); + return NULL; +} + +void VideoDecoderAVC::invalidateDPB(int toggle) { + DecodedPictureBuffer* p = mDPBs[toggle]; + for (int i = 0; i < DPB_SIZE; i++) { + p->poc = (uint32_t) -1; + p->surfaceBuffer = NULL; + p++; + } +} + +void VideoDecoderAVC::clearAsReference(int toggle) { + DecodedPictureBuffer* p = mDPBs[toggle]; + for (int i = 0; i < DPB_SIZE; i++) { + if (p->surfaceBuffer) { + p->surfaceBuffer->asReferernce = false; + } + p++; + } +} + +Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { + updateFormatInfo(data); + + //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline + VAProfile vaProfile = VAProfileH264High; + + // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so + if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) && + data->codec_data->constraint_set1_flag == 1) { + if (mErrorConcealment) { + vaProfile = VAProfileH264ConstrainedBaseline; + } + } + // TODO: use maxDPBSize to set window size + // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16 + //VideoDecoderBase::setOutputWindowSize(getDPBSize(data)); + return VideoDecoderBase::setupVA(data->codec_data->num_ref_frames + AVC_EXTRA_SURFACE_NUMBER, vaProfile); +} + +void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { + // new video size + int width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; + int height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; + ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d", + mVideoFormatInfo.width, mVideoFormatInfo.height, width, height); + + if (mVideoFormatInfo.width != width || + mVideoFormatInfo.height!= height) { + mVideoFormatInfo.width = width; + mVideoFormatInfo.height = height; + mSizeChanged = true; + ITRACE("Video size is changed."); + } + + if (data->new_sps) { + mSizeChanged = true; + ITRACE("New sequence is received. Assuming video size is changed."); + } + + // video_range has default value of 0. + mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag; + + switch (data->codec_data->matrix_coefficients) { + case 1: + mVideoFormatInfo.colorMatrix = VA_SRC_BT709; + break; + + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + mVideoFormatInfo.colorMatrix = VA_SRC_BT601; + break; + + default: + // unknown color matrix, set to 0 so color space flag will not be set. + mVideoFormatInfo.colorMatrix = 0; + break; + } + mVideoFormatInfo.aspectX = data->codec_data->sar_width; + mVideoFormatInfo.aspectY = data->codec_data->sar_height; + mVideoFormatInfo.bitrate = data->codec_data->bit_rate; + mVideoFormatInfo.cropLeft = data->codec_data->crop_left; + mVideoFormatInfo.cropRight = data->codec_data->crop_right; + mVideoFormatInfo.cropTop = data->codec_data->crop_top; + mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom; + + ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d", + data->codec_data->crop_left, + data->codec_data->crop_top, + data->codec_data->crop_right, + data->codec_data->crop_bottom); + + mVideoFormatInfo.valid = true; +} + +Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { + int width = mVideoFormatInfo.width; + int height = mVideoFormatInfo.height; + + updateFormatInfo(data); + if (mSizeChanged == false) { + return DECODE_SUCCESS; + } + + if (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth || + mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight) { + ETRACE("New video size %d x %d exceeds surface size %d x %d.", + mVideoFormatInfo.width, mVideoFormatInfo.height, + mVideoFormatInfo.surfaceWidth, mVideoFormatInfo.surfaceHeight); + return DECODE_NEED_RESTART; + } + + if (width == mVideoFormatInfo.width && + height == mVideoFormatInfo.height) { + ITRACE("New video sequence with the same resolution."); + } else { + WTRACE("Video size changed from %d x %d to %d x %d.", width, height, + mVideoFormatInfo.width, mVideoFormatInfo.height); + flush(); + } + return DECODE_SUCCESS; +} + +bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data) { + if (data->num_pictures == 0) { + LOGE("num_pictures == 0"); + return true; + } + + vbp_picture_data_h264* picData = data->pic_data; + if (picData->num_slices == 0) { + LOGE("num_slices == 0"); + return true; + } + + bool newFrame = false; + uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD; + + if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) { + // not the first slice, assume it is continuation of a partial frame + // TODO: check if it is new frame boundary as the first slice may get lost in streaming case. + LOGW("first_mb_in_slice != 0"); + } else { + if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) { + LOGE("Current picture has both odd field and even field."); + } + // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and + // the last picture form an opposite field pair + if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) { + // opposite field + newFrame = false; + LOGW("current picture is not at frame boundary."); + mLastPictureFlags = 0; + } else { + newFrame = true; + mLastPictureFlags = 0; + for (uint32_t i = 0; i < data->num_pictures; i++) { + mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags; + } + if ((mLastPictureFlags & fieldFlags) == fieldFlags) { + // current buffer contains both odd field and even field. + mLastPictureFlags = 0; + } + } + } + + return newFrame; +} + +int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) { + struct DPBTable { + int32_t level; + float maxDPB; + } dpbTable[] = { + {9, 148.5}, + {10, 148.5}, + {11, 337.5}, + {12, 891.0}, + {13, 891.0}, + {20, 891.0}, + {21, 1782.0}, + {22, 3037.5}, + {30, 3037.5}, + {31, 6750.0}, + {32, 7680.0}, + {40, 12288.0}, + {41, 12288.0}, + {42, 13056.0}, + {50, 41400.0}, + {51, 69120.0} + }; + + int32_t count = sizeof(dpbTable)/sizeof(DPBTable); + float maxDPB = 0; + for (int32_t i = 0; i < count; i++) + { + if (dpbTable[i].level == data->codec_data->level_idc) { + maxDPB = dpbTable[i].maxDPB; + break; + } + } + + int32_t maxDPBSize = maxDPB * 1024 / ( + (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * + (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * + (data->codec_data->frame_mbs_only_flag ? 1 : 2) * + 384); + + if (maxDPBSize > 16) { + maxDPBSize = 16; + } else if (maxDPBSize == 0) { + maxDPBSize = 3; + } + + LOGI("maxDPBSize is %d", maxDPBSize); + return maxDPBSize; +} + diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h new file mode 100644 index 0000000..ff52486 --- /dev/null +++ b/videodecoder/VideoDecoderAVC.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_H_ +#define VIDEO_DECODER_AVC_H_ + +#include "VideoDecoderBase.h" + + +class VideoDecoderAVC : public VideoDecoderBase { +public: + VideoDecoderAVC(const char *mimeType); + virtual ~VideoDecoderAVC(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + virtual void flush(void); + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + Decode_Status beginDecodingFrame(vbp_data_h264 *data); + Decode_Status continueDecodingFrame(vbp_data_h264 *data); + Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + Decode_Status setReference(VASliceParameterBufferH264 *sliceParam); + Decode_Status updateDPB(VAPictureParameterBufferH264 *picParam); + Decode_Status updateReferenceFrames(vbp_picture_data_h264 *picData); + void removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam); + inline uint32_t getPOC(VAPictureH264 *pic); // Picture Order Count + inline VASurfaceID findSurface(VAPictureH264 *pic); + inline VideoSurfaceBuffer* findSurfaceBuffer(VAPictureH264 *pic); + inline void invalidateDPB(int toggle); + inline void clearAsReference(int toggle); + Decode_Status startVA(vbp_data_h264 *data); + void updateFormatInfo(vbp_data_h264 *data); + Decode_Status handleNewSequence(vbp_data_h264 *data); + bool isNewFrame(vbp_data_h264 *data); + int32_t getDPBSize(vbp_data_h264 *data); + +private: + struct DecodedPictureBuffer { + VideoSurfaceBuffer *surfaceBuffer; + uint32_t poc; // Picture Order Count + }; + + enum { + AVC_EXTRA_SURFACE_NUMBER = 6, + // maximum DPB (Decoded Picture Buffer) size + DPB_SIZE = 16, + REF_LIST_SIZE = 32, + }; + + // maintain 2 ping-pong decoded picture buffers + DecodedPictureBuffer mDPBs[2][DPB_SIZE]; + uint8_t mToggleDPB; // 0 or 1 + bool mErrorConcealment; + uint32_t mLastPictureFlags; +}; + + + +#endif /* VIDEO_DECODER_AVC_H_ */ diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp new file mode 100644 index 0000000..876a143 --- /dev/null +++ b/videodecoder/VideoDecoderBase.cpp @@ -0,0 +1,842 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderBase.h" +#include "VideoDecoderTrace.h" +#include +#include +#include + +#define INVALID_PTS ((uint64_t)-1) +#define INVALID_POC ((uint32_t)-1) +#define ANDROID_DISPLAY_HANDLE 0x18C34078 + +// TODO: check what is the best number. Must be at least 2 to support one backward reference frame. +// Currently set to 3 to support 2 backward reference frames. This value is used for AVC frame reordering only. +// e.g: +// POC: 4P, 8P, 10P, 6B and mNextOutputPOC = 5 +#define OUTPUT_WINDOW_SIZE 3 + + +VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) + : mDisplay(NULL), + mVADisplay(NULL), + mVAContext(VA_INVALID_ID), + mVAConfig(VA_INVALID_ID), + + mVAStarted(false), + + mCurrentPTS(INVALID_PTS), + mAcquiredBuffer(NULL), + mLastReference(NULL), + mForwardReference(NULL), + mDecodingFrame(false), + mSizeChanged(false), + + // private member variables + mFirstFrame(true), + mLowDelay(false), + mRawOutput(false), + mManageReference(true), + mOutputMethod (OUTPUT_BY_PCT), + mOutputWindowSize(OUTPUT_WINDOW_SIZE), + mNumSurfaces(0), + mSurfaceBuffers(NULL), + mOutputHead(NULL), + mOutputTail(NULL), + mSurfaces(NULL), + mSurfaceUserPtr(NULL), + mSurfaceAcquirePos(0), + mNextOutputPOC(0), + mParserType(type), + mParserHandle(NULL) { + + memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo)); + memset(&mConfigBuffer, 0, sizeof(mConfigBuffer)); + mVideoFormatInfo.mimeType = strdup(mimeType); +} + +VideoDecoderBase::~VideoDecoderBase() { + stop(); + free(mVideoFormatInfo.mimeType); +} + +Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) { + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + + if (mParserHandle != NULL) { + WTRACE("Decoder has already started."); + return DECODE_SUCCESS; + } + + if ((int32_t)mParserType != VBP_INVALID) { + if (vbp_open(mParserType, &mParserHandle) != VBP_OK) { + ETRACE("Failed to open VBP parser."); + return DECODE_NO_PARSER; + } + } + // keep a copy of configure buffer, meta data only. It can be used to override VA setup parameter. + mConfigBuffer = *buffer; + mConfigBuffer.data = NULL; + mConfigBuffer.size = 0; + + mVideoFormatInfo.width = buffer->width; + mVideoFormatInfo.height = buffer->height; + mLowDelay = buffer->flag & WANT_LOW_DELAY; + mRawOutput = buffer->flag & WANT_RAW_OUTPUT; + if (mRawOutput) { + WTRACE("Output is raw data."); + } + + return DECODE_SUCCESS; +} + +void VideoDecoderBase::stop(void) { + terminateVA(); + + mCurrentPTS = INVALID_PTS; + mAcquiredBuffer = NULL; + mLastReference = NULL; + mForwardReference = NULL; + mDecodingFrame = false; + mSizeChanged = false; + + // private variables + mFirstFrame = true; + mLowDelay = false; + mRawOutput = false; + mNumSurfaces = 0; + mSurfaceAcquirePos = 0; + mNextOutputPOC = 0; + + mVideoFormatInfo.valid = false; + if (mParserHandle){ + vbp_close(mParserHandle); + mParserHandle = NULL; + } +} + +void VideoDecoderBase::flush(void) { + if (mVAStarted == false) { + // nothing to flush at this stage + return; + } + + endDecodingFrame(true); + + // avoid setting mSurfaceAcquirePos to 0 as it may cause tearing + // (surface is still being rendered) + mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces; + mNextOutputPOC = 0; + mCurrentPTS = INVALID_PTS; + mAcquiredBuffer = NULL; + mLastReference = NULL; + mForwardReference = NULL; + mOutputHead = NULL; + mOutputTail = NULL; + mDecodingFrame = false; + mSizeChanged = false; + + mFirstFrame = true; + // initialize surface buffer without resetting mapped/raw data + initSurfaceBuffer(false); +} + +const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) { + return &mVideoFormatInfo; +} + +const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { + if (mVAStarted == false) { + return NULL; + } + + if (draining) { + // complete decoding the last frame and ignore return + endDecodingFrame(false); + } + + if (mOutputHead == NULL) { + return NULL; + } + + // output by position (the first buffer) + VideoSurfaceBuffer *outputByPos = mOutputHead; + + if (mLowDelay || mFirstFrame) { + mOutputHead = mOutputHead->next; + if (mOutputHead == NULL) { + mOutputTail = NULL; + } + mFirstFrame = false; + mNextOutputPOC = outputByPos->pictureOrder + 1; + + //VTRACE("Output POC %u for display (pts = %.2f)", outputByPos->pictureOrder, outputByPos->renderBuffer.timeStamp/1E6); + return &(outputByPos->renderBuffer); + } + + // output by presentation time stamp (the smallest pts) + VideoSurfaceBuffer *outputByPts = NULL; + + // output by picture coding type (PCT) or by picture order count (POC) + // for output by PCT: + // if there is more than one reference frame, the first reference frame is ouput, otherwise, + // output non-reference frame if any. + // for output by POC: + // + VideoSurfaceBuffer *output = NULL; + + VideoSurfaceBuffer *p = mOutputHead; + int32_t reference = 0; + int32_t count = 0; + uint64_t pts = INVALID_PTS; + uint32_t poc = INVALID_POC; + do { + if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) { + // find buffer with the smallest PTS + pts = p->renderBuffer.timeStamp; + outputByPts = p; + } + + if (mOutputMethod == OUTPUT_BY_PCT) { + if (p->referenceFrame) { + reference++; + } else if (output == NULL) { + // first non-reference frame + output = p; + } + + if (reference > 1 && output == NULL) { + // first reference frame + output = outputByPos; + } + } else if (mOutputMethod == OUTPUT_BY_POC) { + count++; + if (p->pictureOrder == 0) { + // any picture before this POC (new IDR) must be output + if (output == NULL) { + output = p; + mNextOutputPOC = 1; + } else { + mNextOutputPOC = output->pictureOrder + 1; + } + break; + } + if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) { + // this POC meets ouput criteria. + poc = p->pictureOrder; + output = p; + } + if (poc == mNextOutputPOC || count == mOutputWindowSize) { + if (output != NULL) { + // this indicates two cases: + // 1) the next output POC is found. + // 2) output queue is full and there is at least one buffer meeting the output criteria. + mNextOutputPOC = output->pictureOrder + 1; + break; + } else { + // this indicates output queue is full and no buffer in the queue meets the output criteria + // restart processing as queue is FULL and output criteria is changed. (next output POC is 0) + mNextOutputPOC = 0; + count = 0; + reference = 0; + poc = INVALID_POC; + pts = INVALID_PTS; + p = mOutputHead; + continue; + } + } + if (p->next == NULL) { + output = NULL; + } + + } else { + ETRACE("Invalid output method."); + return NULL; + } + + p = p->next; + } while (p != NULL); + + if (output != NULL) { + if (output != outputByPts) { + // swap time stamp + uint64_t ts = output->renderBuffer.timeStamp; + output->renderBuffer.timeStamp = outputByPts->renderBuffer.timeStamp; + outputByPts->renderBuffer.timeStamp = ts; + } + + if (output != outputByPos) { + // remove this output from middle or end of the list + p = outputByPos; + while (p->next != output) { + p = p->next; + } + p->next = output->next; + if (mOutputTail == output) { + mOutputTail = p; + } + } else { + // remove this output from head of the list + mOutputHead = mOutputHead->next; + if (mOutputHead == NULL) { + mOutputTail = NULL; + } + } + + //VTRACE("Output POC %u for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); + return &(output->renderBuffer); + } + + if (draining){ + // output buffer in the head of list + mOutputHead = mOutputHead->next; + if (mOutputHead == NULL) { + mOutputTail = NULL; + } + return &(outputByPos->renderBuffer); + } + + return NULL; +} + + +Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) { + if (mVAStarted == false) { + return DECODE_FAIL; + } + + if (mAcquiredBuffer != NULL) { + ETRACE("mAcquiredBuffer is not NULL. Implementation bug."); + return DECODE_FAIL; + } + + int nextAcquire = mSurfaceAcquirePos; + VideoSurfaceBuffer *acquiredBuffer = NULL; + bool acquired = false; + while (acquired == false) { + acquiredBuffer = mSurfaceBuffers + nextAcquire; + if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true) { + // this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping + VideoSurfaceBuffer *temp; + acquired = true; + for (int i = 0; i < mNumSurfaces; i++) { + if (i == nextAcquire) { + continue; + } + temp = mSurfaceBuffers + i; + // use mSurfaces[nextAcquire] instead of acquiredBuffer->renderBuffer.surface as its the actual surface to use. + if (temp->renderBuffer.surface == mSurfaces[nextAcquire] && + temp->renderBuffer.renderDone == false) { + ITRACE("Surface is referenced by other surface buffer."); + acquired = false; + break; + } + } + } + if (acquired) { + break; + } + nextAcquire++; + if (nextAcquire == mNumSurfaces) { + nextAcquire = 0; + } + if (nextAcquire == mSurfaceAcquirePos) { + return DECODE_NO_SURFACE; + } + } + + if (acquired == false) { + return DECODE_NO_SURFACE; + } + + mAcquiredBuffer = acquiredBuffer; + mSurfaceAcquirePos = nextAcquire; + + // set surface again as surface maybe reset by skipped frame. + // skipped frame is a "non-coded frame" and decoder needs to duplicate the previous reference frame as the output. + mAcquiredBuffer->renderBuffer.surface = mSurfaces[mSurfaceAcquirePos]; + if (mSurfaceUserPtr && mAcquiredBuffer->mappedData) { + mAcquiredBuffer->mappedData->data = mSurfaceUserPtr[mSurfaceAcquirePos]; + } + mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS; + mAcquiredBuffer->renderBuffer.display = mVADisplay; + mAcquiredBuffer->renderBuffer.flag = 0; + mAcquiredBuffer->renderBuffer.renderDone = true; + mAcquiredBuffer->asReferernce = false; + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) { + Decode_Status status; + if (mAcquiredBuffer == NULL) { + ETRACE("mAcquiredBuffer is NULL. Implementation bug."); + return DECODE_FAIL; + } + + if (mRawOutput) { + status = getRawDataFromSurface(); + CHECK_STATUS(); + } + // frame is successfly decoded to the current surface, it is ready for output + mAcquiredBuffer->renderBuffer.renderDone = false; + // decoder must set "asReference and referenceFrame" flags properly + + // update reference frames + if (mAcquiredBuffer->referenceFrame) { + if (mManageReference) { + // managing reference for MPEG4/H.263/WMV. + // AVC should manage reference frame in a different way + if (mForwardReference != NULL) { + // this foward reference is no longer needed + mForwardReference->asReferernce = false; + } + // Forware reference for either P or B frame prediction + mForwardReference = mLastReference; + mAcquiredBuffer->asReferernce = true; + } + + // the last reference frame. + mLastReference = mAcquiredBuffer; + } + // add to the output list + if (mOutputHead == NULL) { + mOutputHead = mAcquiredBuffer; + } else { + mOutputTail->next = mAcquiredBuffer; + } + mOutputTail = mAcquiredBuffer; + mOutputTail->next = NULL; + + //VTRACE("Pushing POC %u to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6); + + mAcquiredBuffer = NULL; + mSurfaceAcquirePos = (mSurfaceAcquirePos + 1 ) % mNumSurfaces; + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) { + if (mAcquiredBuffer == NULL) { + // this is harmless error + return DECODE_SUCCESS; + } + + // frame is not decoded to the acquired buffer, current surface is invalid, and can't be output. + mAcquiredBuffer->asReferernce = false; + mAcquiredBuffer->renderBuffer.renderDone = true; + mAcquiredBuffer = NULL; + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { + Decode_Status status = DECODE_SUCCESS; + VAStatus vaStatus; + + if (mDecodingFrame == false) { + if (mAcquiredBuffer != NULL) { + //ETRACE("mAcquiredBuffer is not NULL. Implementation bug."); + releaseSurfaceBuffer(); + status = DECODE_FAIL; + } + return status; + } + // return through exit label to reset mDecodingFrame + if (mAcquiredBuffer == NULL) { + ETRACE("mAcquiredBuffer is NULL. Implementation bug."); + status = DECODE_FAIL; + goto exit; + } + + vaStatus = vaEndPicture(mVADisplay, mVAContext); + if (vaStatus != VA_STATUS_SUCCESS) { + releaseSurfaceBuffer(); + ETRACE("vaEndPicture failed. vaStatus = %d", vaStatus); + status = DECODE_DRIVER_FAIL; + goto exit; + } + + if (dropFrame) { + // we are asked to drop this decoded picture + releaseSurfaceBuffer(); + goto exit; + } + + status = outputSurfaceBuffer(); + // fall through +exit: + mDecodingFrame = false; + return status; +} + + +Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { + VAStatus vaStatus = VA_STATUS_SUCCESS; + Decode_Status status; + VAConfigAttrib attrib; + + if (mVAStarted) { + return DECODE_SUCCESS; + } + + // TODO: validate profile + if (numSurface == 0) { + return DECODE_FAIL; + } + + if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) { + if (numSurface < mConfigBuffer.surfaceNumber) { + LOGW("surface to allocated %d is less than minimum number required %d", + numSurface, mConfigBuffer.surfaceNumber); + numSurface = mConfigBuffer.surfaceNumber; + } + } + + if (mVADisplay != NULL) { + ETRACE("VA is partially started."); + return DECODE_FAIL; + } + + // Display is defined as "unsigned int" + + mDisplay = new Display; + *mDisplay = ANDROID_DISPLAY_HANDLE; + + mVADisplay = vaGetDisplay(mDisplay); + if (mVADisplay == NULL) { + ETRACE("vaGetDisplay failed."); + return DECODE_DRIVER_FAIL; + } + + int majorVersion, minorVersion; + vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); + CHECK_VA_STATUS("vaInitialize"); + + if ((int32_t)profile != VAProfileSoftwareDecoding) { + //We are requesting RT attributes + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib, + 1, + &mVAConfig); + CHECK_VA_STATUS("vaCreateConfig"); + } + + mNumSurfaces = numSurface; + mSurfaces = new VASurfaceID [mNumSurfaces]; + if (mSurfaces == NULL) { + return DECODE_MEMORY_FAIL; + } + + vaStatus = vaCreateSurfaces( + mVADisplay, + mVideoFormatInfo.width, + mVideoFormatInfo.height, + VA_RT_FORMAT_YUV420, + mNumSurfaces, + mSurfaces); + CHECK_VA_STATUS("vaCreateSurfaces"); + + mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; + mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height; + mVideoFormatInfo.surfaceNumber = mNumSurfaces; + + if ((int32_t)profile != VAProfileSoftwareDecoding) { + vaStatus = vaCreateContext( + mVADisplay, + mVAConfig, + mVideoFormatInfo.width, + mVideoFormatInfo.height, + 0, + mSurfaces, + mNumSurfaces, + &mVAContext); + CHECK_VA_STATUS("vaCreateContext"); + } + + mSurfaceBuffers = new VideoSurfaceBuffer [mNumSurfaces]; + if (mSurfaceBuffers == NULL) { + return DECODE_MEMORY_FAIL; + } + + initSurfaceBuffer(true); + + if ((int32_t)profile == VAProfileSoftwareDecoding) { + // derive user pointer from surface for direct access + status = mapSurface(); + CHECK_STATUS("mapSurface") + } + + mVAStarted = true; + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::terminateVA(void) { + if (mSurfaceBuffers) { + for (int32_t i = 0; i < mNumSurfaces; i++) { + if (mSurfaceBuffers[i].renderBuffer.rawData) { + if (mSurfaceBuffers[i].renderBuffer.rawData->data) { + delete [] mSurfaceBuffers[i].renderBuffer.rawData->data; + } + delete mSurfaceBuffers[i].renderBuffer.rawData; + } + if (mSurfaceBuffers[i].mappedData) { + // don't delete data pointer as it is mapped from surface + delete mSurfaceBuffers[i].mappedData; + } + } + delete [] mSurfaceBuffers; + mSurfaceBuffers = NULL; + } + + if (mSurfaceUserPtr) { + delete [] mSurfaceUserPtr; + mSurfaceUserPtr = NULL; + } + + if (mSurfaces) + { + vaDestroySurfaces(mVADisplay, mSurfaces, mNumSurfaces); + delete [] mSurfaces; + mSurfaces = NULL; + } + + if (mVAContext != VA_INVALID_ID) { + vaDestroyContext(mVADisplay, mVAContext); + mVAContext = VA_INVALID_ID; + } + + if (mVAConfig != VA_INVALID_ID) { + vaDestroyConfig(mVADisplay, mVAConfig); + mVAConfig = VA_INVALID_ID; + } + + if (mVADisplay) { + vaTerminate(mVADisplay); + mVADisplay = NULL; + } + + if (mDisplay) { + delete mDisplay; + mDisplay = NULL; + } + + mVAStarted = false; + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData) { + // DON'T check if mVAStarted == true + if (mParserHandle == NULL) { + return DECODE_NO_PARSER; + } + + uint32_t vbpStatus; + if (buffer == NULL || size <= 0) { + return DECODE_INVALID_DATA; + } + + uint8_t configFlag = config ? 1 : 0; + vbpStatus = vbp_parse(mParserHandle, buffer, size, configFlag); + CHECK_VBP_STATUS("vbp_parse"); + + vbpStatus = vbp_query(mParserHandle, vbpData); + CHECK_VBP_STATUS("vbp_query"); + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::mapSurface(void){ + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAImage image; + uint8_t *userPtr; + mSurfaceUserPtr = new uint8_t* [mNumSurfaces]; + if (mSurfaceUserPtr == NULL) { + return DECODE_MEMORY_FAIL; + } + + for (int32_t i = 0; i< mNumSurfaces; i++) { + vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &image); + CHECK_VA_STATUS("vaDeriveImage"); + vaStatus = vaMapBuffer(mVADisplay, image.buf, (void**)&userPtr); + CHECK_VA_STATUS("vaMapBuffer"); + mSurfaceUserPtr[i] = userPtr; + mSurfaceBuffers[i].mappedData = new VideoFrameRawData; + if (mSurfaceBuffers[i].mappedData == NULL) { + return DECODE_MEMORY_FAIL; + } + mSurfaceBuffers[i].mappedData->own = false; // derived from surface so can't be released + mSurfaceBuffers[i].mappedData->data = NULL; // specified during acquireSurfaceBuffer + mSurfaceBuffers[i].mappedData->fourcc = image.format.fourcc; + mSurfaceBuffers[i].mappedData->width = mVideoFormatInfo.width; + mSurfaceBuffers[i].mappedData->height = mVideoFormatInfo.height; + mSurfaceBuffers[i].mappedData->size = image.data_size; + for (int pi = 0; pi < 3; pi++) { + mSurfaceBuffers[i].mappedData->pitch[pi] = image.pitches[pi]; + mSurfaceBuffers[i].mappedData->offset[pi] = image.offsets[pi]; + } + // debug information + if (image.pitches[0] != image.pitches[1] || + image.width != mVideoFormatInfo.width || + image.height != mVideoFormatInfo.height || + image.offsets[0] != 0) { + WTRACE("Unexpected VAImage format, w = %d, h = %d, offset = %d", image.width, image.height, image.offsets[0]); + } + // TODO: do we need to unmap buffer? + //vaStatus = vaUnmapBuffer(mVADisplay, image.buf); + //CHECK_VA_STATUS("vaMapBuffer"); + vaStatus = vaDestroyImage(mVADisplay,image.image_id); + CHECK_VA_STATUS("vaDestroyImage"); + + } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { + if (mAcquiredBuffer == NULL) { + return DECODE_FAIL; + } + + VAStatus vaStatus; + VAImageFormat imageFormat; + VAImage vaImage; + vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaSyncSurface"); + + vaImage.image_id = VA_INVALID_ID; + // driver currently only supports NV12 and IYUV format. + // byte_order information is from driver and hard-coded here + imageFormat.fourcc = VA_FOURCC_NV12; + imageFormat.byte_order = VA_LSB_FIRST; + imageFormat.bits_per_pixel = 16; + vaStatus = vaCreateImage( + mVADisplay, + &imageFormat, + mVideoFormatInfo.width, + mVideoFormatInfo.height, + &vaImage); + CHECK_VA_STATUS("vaCreateImage"); + + vaStatus = vaGetImage( + mVADisplay, + mAcquiredBuffer->renderBuffer.surface, + 0, + 0, + vaImage.width, + vaImage.height, + vaImage.image_id); + CHECK_VA_STATUS("vaGetImage"); + + void *pBuf = NULL; + vaStatus = vaMapBuffer(mVADisplay, vaImage.buf, &pBuf); + CHECK_VA_STATUS("vaMapBuffer"); + + VideoFrameRawData *rawData = NULL; + if (mAcquiredBuffer->renderBuffer.rawData == NULL) { + rawData = new VideoFrameRawData; + if (rawData == NULL) { + return DECODE_MEMORY_FAIL; + } + memset(rawData, 0, sizeof(VideoFrameRawData)); + mAcquiredBuffer->renderBuffer.rawData = rawData; + } else { + rawData = mAcquiredBuffer->renderBuffer.rawData; + } + // size in NV12 format + int32_t size = mVideoFormatInfo.width * mVideoFormatInfo.height * 3/2; + if (rawData->data != NULL && rawData->size != size) { + delete [] rawData->data; + rawData->data = NULL; + rawData->size = 0; + } + if (rawData->data == NULL) { + rawData->data = new uint8_t [size]; + if (rawData->data == NULL) { + return DECODE_MEMORY_FAIL; + } + } + rawData->own = true; // allocated by this library + rawData->width = mVideoFormatInfo.width; + rawData->height = mVideoFormatInfo.height; + rawData->pitch[0] = mVideoFormatInfo.width; + rawData->pitch[1] = mVideoFormatInfo.width; + rawData->pitch[2] = 0; // interleaved U/V, two planes + rawData->offset[0] = 0; + rawData->offset[1] = mVideoFormatInfo.width * mVideoFormatInfo.height; + rawData->offset[2] = mVideoFormatInfo.width * mVideoFormatInfo.height * 3/2; + rawData->size = size;; + rawData->fourcc = 'NV12'; + if (size == (int32_t)vaImage.data_size) { + memcpy(rawData->data, pBuf, size); + } else { + // copy Y data + uint8_t *src = (uint8_t*)pBuf; + uint8_t *dst = rawData->data; + int32_t row = 0; + for (row = 0; row < mVideoFormatInfo.height; row++) { + memcpy(dst, src, mVideoFormatInfo.width); + dst += mVideoFormatInfo.width; + src += vaImage.pitches[0]; + } + // copy interleaved V and U data + src = (uint8_t*)pBuf + vaImage.offsets[1]; + for (row = 0; row < mVideoFormatInfo.height/2; row++) { + memcpy(dst, src, mVideoFormatInfo.width); + dst += mVideoFormatInfo.width; + src += vaImage.pitches[1]; + } + } + // TODO: image may not get destroyed if error happens. + if (vaImage.image_id != VA_INVALID_ID) { + vaDestroyImage(mVADisplay, vaImage.image_id); + } + return DECODE_SUCCESS; +} + +void VideoDecoderBase::initSurfaceBuffer(bool reset) { + for (int32_t i = 0; i < mNumSurfaces; i++) { + mSurfaceBuffers[i].renderBuffer.display = mVADisplay; + mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer + mSurfaceBuffers[i].renderBuffer.flag = 0; + mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE; + mSurfaceBuffers[i].renderBuffer.timeStamp = 0; + mSurfaceBuffers[i].renderBuffer.renderDone = true; + mSurfaceBuffers[i].referenceFrame = false; + mSurfaceBuffers[i].asReferernce= false; + mSurfaceBuffers[i].pictureOrder = 0; + mSurfaceBuffers[i].next = NULL; + if (reset == true) { + mSurfaceBuffers[i].renderBuffer.rawData = NULL; + mSurfaceBuffers[i].mappedData = NULL; + } + } +} + diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h new file mode 100644 index 0000000..39ffe07 --- /dev/null +++ b/videodecoder/VideoDecoderBase.h @@ -0,0 +1,134 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_BASE_H_ +#define VIDEO_DECODER_BASE_H_ + +#include +#include "VideoDecoderDefs.h" +#include "VideoDecoderInterface.h" + +extern "C" { +#include "vbp_loader.h" +} + +#ifndef Display +typedef unsigned int Display; +#endif + + +class VideoDecoderBase : public IVideoDecoder { +public: + VideoDecoderBase(const char *mimeType, _vbp_parser_type type); + virtual ~VideoDecoderBase(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + //virtual Decode_Status decode(VideoDecodeBuffer *buffer); + virtual void flush(void); + virtual const VideoRenderBuffer* getOutput(bool draining = false); + virtual const VideoFormatInfo* getFormatInfo(void); + +protected: + // each acquireSurfaceBuffer must be followed by a corresponding outputSurfaceBuffer or releaseSurfaceBuffer. + // Only one surface buffer can be acquired at any given time + virtual Decode_Status acquireSurfaceBuffer(void); + // frame is successfully decoded to the acquired surface buffer and surface is ready for output + virtual Decode_Status outputSurfaceBuffer(void); + // acquired surface buffer is not used + virtual Decode_Status releaseSurfaceBuffer(void); + virtual Decode_Status endDecodingFrame(bool dropFrame); + virtual Decode_Status setupVA(int32_t numSurface, VAProfile profile); + virtual Decode_Status terminateVA(void); + virtual Decode_Status parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData); + + static inline int32_t alignMB(int32_t a) { + return ((a + 15) & (~15)); + } + +private: + Decode_Status mapSurface(void); + Decode_Status getRawDataFromSurface(void); + void initSurfaceBuffer(bool reset); + +protected: + VideoFormatInfo mVideoFormatInfo; + Display *mDisplay; + VADisplay mVADisplay; + VAContextID mVAContext; + VAConfigID mVAConfig; + bool mVAStarted; + uint64_t mCurrentPTS; // current presentation time stamp (unit is unknown, depend on the framework: GStreamer 100-nanosec, Android: microsecond) + // the following three member variables should be set using + // acquireSurfaceBuffer/outputSurfaceBuffer/releaseSurfaceBuffer + VideoSurfaceBuffer *mAcquiredBuffer; + VideoSurfaceBuffer *mLastReference; + VideoSurfaceBuffer *mForwardReference; + VideoConfigBuffer mConfigBuffer; // only store configure meta data. + bool mDecodingFrame; // indicate whether a frame is being decoded + bool mSizeChanged; // indicate whether video size is changed. + + enum { + // TODO: move this to vbp_loader.h + VBP_INVALID = 0xFF, + // TODO: move this to va.h + VAProfileSoftwareDecoding = 0xFF, + }; + + enum OUTPUT_METHOD { + // output by Picture Coding Type (I, P, B) + OUTPUT_BY_PCT, + // output by Picture Order Count (for AVC only) + OUTPUT_BY_POC, + //OUTPUT_BY_POS, + //OUTPUT_BY_PTS, + }; + +private: + bool mFirstFrame; + bool mLowDelay; // when true, decoded frame is immediately output for rendering + bool mRawOutput; // whether to output NV12 raw data + bool mManageReference; // this should stay true for VC1/MP4 decoder, and stay false for AVC decoder. AVC handles reference frame using DPB + OUTPUT_METHOD mOutputMethod; + int32_t mOutputWindowSize; // indicate limit of number of outstanding frames for output + + int32_t mNumSurfaces; + VideoSurfaceBuffer *mSurfaceBuffers; + VideoSurfaceBuffer *mOutputHead; // head of output buffer list + VideoSurfaceBuffer *mOutputTail; // tail of output buffer list + VASurfaceID *mSurfaces; // surfaces array + uint8_t **mSurfaceUserPtr; // mapped user space pointer + int32_t mSurfaceAcquirePos; // position of surface to start acquiring + uint32_t mNextOutputPOC; // Picture order count of next output + _vbp_parser_type mParserType; + void *mParserHandle; + +protected: + void ManageReference(bool enable) {mManageReference = enable;} + void setOutputMethod(OUTPUT_METHOD method) {mOutputMethod = method;} + void setOutputWindowSize(int32_t size) {mOutputWindowSize = size;} +}; + + +#endif // VIDEO_DECODER_BASE_H_ diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h new file mode 100644 index 0000000..40754ab --- /dev/null +++ b/videodecoder/VideoDecoderDefs.h @@ -0,0 +1,167 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_DEFS_H_ +#define VIDEO_DECODER_DEFS_H_ + +#include +#include + +// format specific data, for future extension. +struct VideoFormatSpecificData { + int32_t formatType; + int32_t formatSize; + uint8_t *formatData; +}; + +struct VideoFrameRawData { + int32_t width; + int32_t height; + int32_t pitch[3]; + int32_t offset[3]; + uint32_t fourcc; //NV12 + int32_t size; + uint8_t *data; + bool own; // own data or derived from surface. If true, the library will release the memory during clearnup +}; + +// flags for VideoDecodeBuffer, VideoConfigBuffer and VideoRenderBuffer +typedef enum { + // indicates if sample has discontinuity in time stamp (happen after seeking usually) + HAS_DISCONTINUITY = 0x01, + + // indicates wheter the sample contains a complete frame or end of frame. + HAS_COMPLETE_FRAME = 0x02, + + // indicate whether surfaceNumber field in the VideoConfigBuffer is valid + HAS_SURFACE_NUMBER = 0x04, + + // indicate whether profile field in the VideoConfigBuffer is valid + HAS_VA_PROFILE = 0x08, + + // indicate whether output order will be the same as decoder order + WANT_LOW_DELAY = 0x10, // make display order same as decoding order + + // indicates whether error concealment algorithm should be enabled to automatically conceal error. + WANT_ERROR_CONCEALMENT = 0x20, + + // indicate wheter raw data should be output. + WANT_RAW_OUTPUT = 0x40, + + // indicate sample is decoded but should not be displayed. + WANT_DECODE_ONLY = 0x80, + + // indicate surfaceNumber field is valid and it contains minimum surface number to allocate. + HAS_MINIMUM_SURFACE_NUMBER = 0x100, +} VIDEO_BUFFER_FLAG; + +struct VideoDecodeBuffer { + uint8_t *data; + int32_t size; + int64_t timeStamp; + uint32_t flag; + VideoFormatSpecificData *ext; +}; + + +struct VideoConfigBuffer { + uint8_t *data; + int32_t size; + int32_t width; + int32_t height; + int32_t surfaceNumber; + VAProfile profile; + uint32_t flag; + VideoFormatSpecificData *ext; +}; + +struct VideoRenderBuffer { + VASurfaceID surface; + VADisplay display; + int32_t scanFormat; //progressive, top-field first, or bottom-field first + int64_t timeStamp; // presentation time stamp + mutable volatile bool renderDone; // indicated whether frame is rendered, this must be set to false by the client of this library once + // surface is rendered. Not setting this flag will lead to DECODE_NO_SURFACE error. + uint32_t flag; + VideoFrameRawData *rawData; +}; + + +struct VideoSurfaceBuffer { + VideoRenderBuffer renderBuffer; + uint32_t pictureOrder; // picture order count, valid only for AVC format + bool referenceFrame; // indicated whether frame associated with this surface is a reference I/P frame + bool asReferernce; // indicated wheter frame is used as reference (as a result surface can not be used for decoding) + VideoFrameRawData *mappedData; + VideoSurfaceBuffer *next; +}; + +struct VideoFormatInfo { + bool valid; // indicates whether format info is valid. MimeType is always valid. + char *mimeType; + int32_t width; + int32_t height; + int32_t surfaceWidth; + int32_t surfaceHeight; + int32_t surfaceNumber; + int32_t aspectX; + int32_t aspectY; + int32_t cropLeft; + int32_t cropRight; + int32_t cropTop; + int32_t cropBottom; + int32_t colorMatrix; + int32_t videoRange; + int32_t bitrate; + int32_t framerateNom; + int32_t framerateDenom; + VideoFormatSpecificData *ext; +}; + +// TODO: categorize the follow errors as fatal and non-fatal. +typedef enum { + DECODE_NOT_STARTED = -10, + DECODE_NEED_RESTART = -9, + DECODE_NO_CONFIG = -8, + DECODE_NO_SURFACE = -7, + DECODE_NO_REFERENCE = -6, + DECODE_NO_PARSER = -5, + DECODE_INVALID_DATA = -4, + DECODE_DRIVER_FAIL = -3, + DECODE_PARSER_FAIL = -2, + DECODE_MEMORY_FAIL = -1, + DECODE_FAIL = 0, + DECODE_SUCCESS = 1, + DECODE_FORMAT_CHANGE = 2, + DECODE_FRAME_DROPPED = 3, +} VIDEO_DECODE_STATUS; + +typedef int32_t Decode_Status; + +#ifndef NULL +#define NULL 0 +#endif + + +#endif // VIDEO_DECODER_DEFS_H_ diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp new file mode 100644 index 0000000..d946a2b --- /dev/null +++ b/videodecoder/VideoDecoderHost.cpp @@ -0,0 +1,68 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderWMV.h" +#include "VideoDecoderMPEG4.h" +#include "VideoDecoderAVC.h" +#include "VideoDecoderHost.h" +#include "VideoDecoderTrace.h" +#include + +IVideoDecoder* createVideoDecoder(const char* mimeType) { + if (mimeType == NULL) { + ETRACE("NULL mime type."); + return NULL; + } + + if (strcasecmp(mimeType, "video/wmv") == 0 || + strcasecmp(mimeType, "video/vc1") == 0) { + VideoDecoderWMV *p = new VideoDecoderWMV(mimeType); + return (IVideoDecoder *)p; + } else if (strcasecmp(mimeType, "video/avc") == 0 || + strcasecmp(mimeType, "video/h264") == 0) { + VideoDecoderAVC *p = new VideoDecoderAVC(mimeType); + return (IVideoDecoder *)p; + } else if (strcasecmp(mimeType, "video/mp4v-es") == 0 || + strcasecmp(mimeType, "video/mpeg4") == 0 || + strcasecmp(mimeType, "video/h263") == 0 || + strcasecmp(mimeType, "video/3gpp") == 0) { + VideoDecoderMPEG4 *p = new VideoDecoderMPEG4(mimeType); + return (IVideoDecoder *)p; + } else { + ETRACE("Unknown mime type: %s", mimeType); + } + return NULL; +} + +void releaseVideoDecoder(IVideoDecoder* p) { + if (p) { + const VideoFormatInfo *info = p->getFormatInfo(); + if (info && info->mimeType) { + ITRACE("Deleting decoder for %s", info->mimeType); + } + } + delete p; +} + + diff --git a/videodecoder/VideoDecoderHost.h b/videodecoder/VideoDecoderHost.h new file mode 100644 index 0000000..8062c42 --- /dev/null +++ b/videodecoder/VideoDecoderHost.h @@ -0,0 +1,37 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VIDEO_DECODER_HOST_H_ +#define VIDEO_DECODER_HOST_H_ + + +#include "VideoDecoderInterface.h" + +IVideoDecoder* createVideoDecoder(const char* mimeType); +void releaseVideoDecoder(IVideoDecoder *p); + + + +#endif /* VIDEO_DECODER_HOST_H_ */ diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h new file mode 100644 index 0000000..eb2d171 --- /dev/null +++ b/videodecoder/VideoDecoderInterface.h @@ -0,0 +1,42 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VIDEO_DECODER_INTERFACE_H_ +#define VIDEO_DECODER_INTERFACE_H_ + +#include "VideoDecoderDefs.h" + +class IVideoDecoder { +public: + virtual ~IVideoDecoder() {} + virtual Decode_Status start(VideoConfigBuffer *buffer) = 0; + virtual void stop(void) = 0; + virtual void flush() = 0; + virtual Decode_Status decode(VideoDecodeBuffer *buffer) = 0; + virtual const VideoRenderBuffer* getOutput(bool draining = false) = 0; + virtual const VideoFormatInfo* getFormatInfo(void) = 0; +}; + +#endif /* VIDEO_DECODER_INTERFACE_H_ */ diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp new file mode 100644 index 0000000..5975436 --- /dev/null +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -0,0 +1,497 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderMPEG4.h" +#include "VideoDecoderTrace.h" +#include + +VideoDecoderMPEG4::VideoDecoderMPEG4(const char *mimeType) + : VideoDecoderBase(mimeType, VBP_MPEG4), + mLastVOPTimeIncrement(0), + mExpectingNVOP(false), + mSendIQMatrixBuf(false), + mLastVOPCodingType(MP4_VOP_TYPE_I) { +} + +VideoDecoderMPEG4::~VideoDecoderMPEG4() { + stop(); +} + +Decode_Status VideoDecoderMPEG4::start(VideoConfigBuffer *buffer) { + Decode_Status status; + + status = VideoDecoderBase::start(buffer); + CHECK_STATUS("VideoDecoderBase::start"); + + if (buffer->data == NULL || buffer->size == 0) { + WTRACE("No config data to start VA."); + return DECODE_SUCCESS; + } + + vbp_data_mp42 *data = NULL; + status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + + status = startVA(data); + return status; +} + +void VideoDecoderMPEG4::stop(void) { + // drop the last frame and ignore return value + endDecodingFrame(true); + VideoDecoderBase::stop(); + + mLastVOPTimeIncrement = 0; + mExpectingNVOP = false; + mLastVOPCodingType = MP4_VOP_TYPE_I; +} + +Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + vbp_data_mp42 *data = NULL; + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + status = VideoDecoderBase::parseBuffer( + buffer->data, + buffer->size, + false, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + + if (!mVAStarted) { + status = startVA(data); + CHECK_STATUS("startVA"); + } + + status = decodeFrame(buffer, data); + CHECK_STATUS("decodeFrame"); + if (mSizeChanged) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + + return status; +} + +void VideoDecoderMPEG4::flush(void) { + VideoDecoderBase::flush(); + + mExpectingNVOP = false; + mLastVOPTimeIncrement = 0; + mLastVOPCodingType = MP4_VOP_TYPE_I; +} + +Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data) { + Decode_Status status; + // check if any slice is parsed, we may just receive configuration data + if (data->number_picture_data == 0) { + WTRACE("number_picture_data == 0"); + return DECODE_SUCCESS; + } + + uint64_t lastPTS = mCurrentPTS; + mCurrentPTS = buffer->timeStamp; + + if (lastPTS != mCurrentPTS) { + // finish decoding the last frame + status = endDecodingFrame(false); + CHECK_STATUS("endDecodingFrame"); + + // start decoding a new frame + status = beginDecodingFrame(data); + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + } + CHECK_STATUS("beginDecodingFrame"); + } else { + status = continueDecodingFrame(data); + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + } + CHECK_STATUS("continueDecodingFrame"); + } + + if (buffer->flag & HAS_COMPLETE_FRAME) { + // finish decoding current frame + status = endDecodingFrame(false); + CHECK_STATUS("endDecodingFrame"); + } + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { + Decode_Status status = DECODE_SUCCESS; + vbp_picture_data_mp42 *picData = data->picture_data; + VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param); + int codingType = picParam->vop_fields.bits.vop_coding_type; + + // start sanity checking + if (mExpectingNVOP) { + // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type + // of this frame must be B + // for example: {PB} B N P B B P... + if (picData->vop_coded == 1 && codingType != MP4_VOP_TYPE_B) { + WTRACE("Invalid coding type while waiting for n-vop for packed frame."); + // timestamp of P frame in the queue is not correct. + // TODO: handle timestamp + flush(); + mExpectingNVOP = false; + } + } + + // handle N-VOP picuture, it could be a skipped frame or a simple placeholder of packed frame + if (picData->vop_coded == 0) { + if (mLastReference == NULL) { + WTRACE("The last reference is unavailable to construct skipped frame."); + flush(); + mExpectingNVOP = false; + // TODO: handle this case + return DECODE_SUCCESS; + } + + if (mExpectingNVOP) { + // P frame is already in queue, just need to update time stamp. + mLastReference->renderBuffer.timeStamp = mCurrentPTS; + mExpectingNVOP = false; + } + else { + // this is skipped frame, use the last reference frame as output + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; + mAcquiredBuffer->renderBuffer.flag = 0; + mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat; + mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface; + // No need to update mappedData for HW decoding + //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data; + mAcquiredBuffer->referenceFrame = true; + status = outputSurfaceBuffer(); + CHECK_STATUS("outputSurfaceBuffer"); + } + + if (data->number_picture_data > 1) { + WTRACE("Unexpected to have more picture data following a non-coded VOP."); + //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for + // coded picture, a frame is lost. + // TODO: handle this case + // return DECODE_FAIL; + } + return DECODE_SUCCESS; + } + else { + // Check if we have reference frame(s) for decoding + if (codingType == MP4_VOP_TYPE_B) { + if (mForwardReference == NULL || + mLastReference == NULL) { + return DECODE_NO_REFERENCE; + } + } else if (codingType == MP4_VOP_TYPE_P || codingType == MP4_VOP_TYPE_S) { + if (mLastReference == NULL) { + return DECODE_NO_REFERENCE; + } + } + // all sanity checks pass, continue decoding through continueDecodingFrame + status = continueDecodingFrame(data); + } + return status; +} + +Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { + Decode_Status status = DECODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + + /* + Packed Frame Assumption: + + 1. In one packed frame, there's only one P or I frame and only one B frame. + 2. In packed frame, there's no skipped frame (vop_coded = 0) + 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately). + 4. N-VOP frame is the frame with vop_coded = 0. + 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame + + + I, P, {P, B}, B, N, P, N, I, ... + I, P, {P, B}, N, P, N, I, ... + + The first N is placeholder for P frame in the packed frame + The second N is a skipped frame + */ + + vbp_picture_data_mp42 *picData = data->picture_data; + for (uint32_t i = 0; i < data->number_picture_data; i++, picData = picData->next_picture_data) { + // each slice has its own picture data, video_packet_header following resync_marker may reset picture header, see MP4 spec + VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param); + int codingType = picParam->vop_fields.bits.vop_coding_type; + if (codingType == MP4_VOP_TYPE_S && picParam->no_of_sprite_warping_points > 1) { + WTRACE("Hardware only supports up to one warping point (stationary or translation)"); + // TODO: we actually can't decode this frame + return DECODE_FAIL; + } + + if (picData->vop_coded == 0) { + ETRACE("Unexpected to have non-coded VOP."); + return DECODE_FAIL; + } + if (picData->new_picture_flag == 1 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (picData->new_picture_flag == 0) { + WTRACE("First slice of picture is lost!"); + // TODO: handle this case + } + if (mDecodingFrame) { + // this indicates the start of a new frame in the packed frame + // Update timestamp for P frame in the packed frame as timestamp here is for the B frame! + if (picParam->vop_time_increment_resolution) + { + uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment + + picParam->vop_time_increment_resolution; + increment = increment % picParam->vop_time_increment_resolution; + // convert to nano-second + // TODO: unit of time stamp varies on different frame work + increment = increment * 1e9 / picParam->vop_time_increment_resolution; + mAcquiredBuffer->renderBuffer.timeStamp += increment; + } + endDecodingFrame(false); + mExpectingNVOP = true; + } + + if (mExpectingNVOP == true && codingType != MP4_VOP_TYPE_B) { + ETRACE("The second frame in the packed frame is not B frame."); + mExpectingNVOP = false; + // TODO: should be able to continue + return DECODE_FAIL; + } + // acquire a new surface buffer + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + + // sprite is treated as P frame in the display order, so only B frame frame is not used as "reference" + mAcquiredBuffer->referenceFrame = (codingType != MP4_VOP_TYPE_B); + if (picData->picture_param.vol_fields.bits.interlaced) { + // only MPEG-4 studio profile can have field coding. All other profiles + // use frame coding only, i.e, there is no field VOP. (see vop_structure in MP4 spec) + mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD; + } else { + mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; + } + // TODO: set discontinuity flag + mAcquiredBuffer->renderBuffer.flag = 0; + mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; + + if (codingType == MP4_VOP_TYPE_I || codingType == MP4_VOP_TYPE_P) { + mLastVOPCodingType = codingType; + mLastVOPTimeIncrement = picData->vop_time_increment; + } + + // start decoding a frame + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + mDecodingFrame = true; + mSendIQMatrixBuf = true; + } + + status = decodeSlice(data, picData); + CHECK_STATUS("decodeSlice"); + } + + return DECODE_SUCCESS; +} + + +Decode_Status VideoDecoderMPEG4::decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData) { + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param); + vbp_slice_data_mp42 *sliceData = &(picData->slice_data); + VASliceParameterBufferMPEG4 *sliceParam = &(sliceData->slice_param); + + // send picture parametre for each slice + status = setReference(picParam); + CHECK_STATUS("setReference"); + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferMPEG4), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + + bufferIDCount++; + if (picParam->vol_fields.bits.quant_type && mSendIQMatrixBuf) + { + // only send IQ matrix for the first slice in the picture + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferMPEG4), + 1, + &(data->iq_matrix_buffer), + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + + mSendIQMatrixBuf = false; + bufferIDCount++; + } + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferMPEG4), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + + bufferIDCount++; + + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferMP42 + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + sliceData->buffer_addr + sliceData->slice_offset, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *picParam) { + switch (picParam->vop_fields.bits.vop_coding_type) { + case MP4_VOP_TYPE_I: + picParam->forward_reference_picture = VA_INVALID_SURFACE; + picParam->backward_reference_picture = VA_INVALID_SURFACE; + break; + case MP4_VOP_TYPE_P: + if (mLastReference == NULL) { + return DECODE_NO_REFERENCE; + } + picParam->forward_reference_picture = mLastReference->renderBuffer.surface; + picParam->backward_reference_picture = VA_INVALID_SURFACE; + break; + case MP4_VOP_TYPE_B: + picParam->vop_fields.bits.backward_reference_vop_coding_type = mLastVOPCodingType; + // WEIRD, CHECK AGAIN !!!!!!! + picParam->forward_reference_picture = mLastReference->renderBuffer.surface; + picParam->backward_reference_picture = mForwardReference->renderBuffer.surface; + break; + case MP4_VOP_TYPE_S: + // WEIRD, CHECK AGAIN!!!! WAS using mForwardReference + if (mLastReference == NULL) { + return DECODE_NO_REFERENCE; + } + picParam->forward_reference_picture = mLastReference->renderBuffer.surface; + picParam->backward_reference_picture = VA_INVALID_SURFACE; + break; + + default: + // Will never reach here; + return DECODE_PARSER_FAIL; + } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderMPEG4::startVA(vbp_data_mp42 *data) { + updateFormatInfo(data); + + VAProfile vaProfile; + + if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) { + vaProfile = VAProfileMPEG4AdvancedSimple; + } else { + vaProfile = VAProfileMPEG4Simple; + } + + return VideoDecoderBase::setupVA(MP4_SURFACE_NUMBER, vaProfile); +} + +void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) { + ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d", + mVideoFormatInfo.width, mVideoFormatInfo.height, + data->codec_data.video_object_layer_width, + data->codec_data.video_object_layer_height); + + if (mVideoFormatInfo.width != (int32_t)data->codec_data.video_object_layer_width || + mVideoFormatInfo.height != (int32_t)data->codec_data.video_object_layer_height) { + // update encoded image size + mVideoFormatInfo.width = data->codec_data.video_object_layer_width; + mVideoFormatInfo.height = data->codec_data.video_object_layer_height; + mSizeChanged = true; + ITRACE("Video size is changed."); + } + + // video_range has default value of 0. Y ranges from 16 to 235. + mVideoFormatInfo.videoRange = data->codec_data.video_range; + + switch (data->codec_data.matrix_coefficients) { + case 1: + mVideoFormatInfo.colorMatrix = VA_SRC_BT709; + break; + + // ITU-R Recommendation BT.470-6 System B, G (MP4), same as + // SMPTE 170M/BT601 + case 5: + case 6: + mVideoFormatInfo.colorMatrix = VA_SRC_BT601; + break; + + default: + // unknown color matrix, set to 0 so color space flag will not be set. + mVideoFormatInfo.colorMatrix = 0; + break; + } + + mVideoFormatInfo.aspectX = data->codec_data.par_width; + mVideoFormatInfo.aspectY = data->codec_data.par_height; + //mVideoFormatInfo.bitrate = data->codec_data.bit_rate; + mVideoFormatInfo.valid = true; +} diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h new file mode 100644 index 0000000..79e5b99 --- /dev/null +++ b/videodecoder/VideoDecoderMPEG4.h @@ -0,0 +1,71 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_MPEG4_H_ +#define VIDEO_DECODER_MPEG4_H_ + +#include "VideoDecoderBase.h" + + +class VideoDecoderMPEG4 : public VideoDecoderBase { +public: + VideoDecoderMPEG4(const char *mimeType); + virtual ~VideoDecoderMPEG4(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + virtual void flush(void); + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data); + Decode_Status beginDecodingFrame(vbp_data_mp42 *data); + Decode_Status continueDecodingFrame(vbp_data_mp42 *data); + Decode_Status decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData); + Decode_Status setReference(VAPictureParameterBufferMPEG4 *picParam); + Decode_Status startVA(vbp_data_mp42 *data); + void updateFormatInfo(vbp_data_mp42 *data); + +private: + // Value of VOP type defined here follows MP4 spec + enum { + MP4_VOP_TYPE_I = 0, + MP4_VOP_TYPE_P = 1, + MP4_VOP_TYPE_B = 2, + MP4_VOP_TYPE_S = 3, + }; + + enum { + MP4_SURFACE_NUMBER = 10, + }; + + uint64_t mLastVOPTimeIncrement; + bool mExpectingNVOP; // indicate if future n-vop is a placeholder of a packed frame + bool mSendIQMatrixBuf; // indicate if iq_matrix_buffer is sent to driver + int32_t mLastVOPCodingType; +}; + + + +#endif /* VIDEO_DECODER_MPEG4_H_ */ diff --git a/videodecoder/VideoDecoderTrace.cpp b/videodecoder/VideoDecoderTrace.cpp new file mode 100644 index 0000000..62e9f1c --- /dev/null +++ b/videodecoder/VideoDecoderTrace.cpp @@ -0,0 +1,45 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#include "VideoDecoderTrace.h" + +#ifdef ENABLE_VIDEO_DECODER_TRACE + +void TraceVideoDecoder(const char* cat, const char* fun, int line, const char* format, ...) +{ + if (NULL == cat || NULL == fun || NULL == format) + return; + + printf("%s %s(#%d): ", cat, fun, line); + va_list args; + va_start(args, format); + vprintf(format, args); + va_end(args); + printf("\n"); +} + +#endif + diff --git a/videodecoder/VideoDecoderTrace.h b/videodecoder/VideoDecoderTrace.h new file mode 100644 index 0000000..809a9ad --- /dev/null +++ b/videodecoder/VideoDecoderTrace.h @@ -0,0 +1,94 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VIDEO_DECODER_TRACE_H_ +#define VIDEO_DECODER_TRACE_H_ + + +#define ENABLE_VIDEO_DECODER_TRACE +//#define ANDROID + + +#ifdef ENABLE_VIDEO_DECODER_TRACE + +#ifndef ANDROID + +#include +#include + +extern void TraceVideoDecoder(const char* cat, const char* fun, int line, const char* format, ...); +#define VIDEO_DECODER_TRACE(cat, format, ...) \ +TraceVideoDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) + +#else + +#include +#define VIDEO_DECODER_TRACE(cat, format, ...) \ +__android_log_print(ANDROID_LOG_VERBOSE, "VideoDecoder : "cat,format, ##__VA_ARGS__) + +#endif + + +#define ETRACE(format, ...) VIDEO_DECODER_TRACE("ERROR: ", format, ##__VA_ARGS__) +#define WTRACE(format, ...) VIDEO_DECODER_TRACE("WARNING: ", format, ##__VA_ARGS__) +#define ITRACE(format, ...) VIDEO_DECODER_TRACE("INFO: ", format, ##__VA_ARGS__) +#define VTRACE(format, ...) VIDEO_DECODER_TRACE("VERBOSE: ", format, ##__VA_ARGS__) + +#else + +#define ETRACE(format, ...) +#define WTRACE(format, ...) +#define ITRACE(format, ...) +#define VTRACE(format, ...) + + +#endif /* ENABLE_VIDEO_DECODER_TRACE*/ + + +#define CHECK_STATUS(FUNC)\ + if (status != DECODE_SUCCESS) {\ + if (status > DECODE_SUCCESS) {\ + WTRACE(FUNC" failed. status = %d", status);\ + } else {\ + ETRACE(FUNC" failed. status = %d", status);\ + }\ + return status;\ + } + +#define CHECK_VA_STATUS(FUNC)\ + if (vaStatus != VA_STATUS_SUCCESS) {\ + ETRACE(FUNC" failed. vaStatus = 0x%x", vaStatus);\ + return DECODE_DRIVER_FAIL;\ + } + +#define CHECK_VBP_STATUS(FUNC)\ + if (vbpStatus != VBP_OK) {\ + ETRACE(FUNC" failed. vbpStatus = %d", (int)vbpStatus);\ + return DECODE_PARSER_FAIL;\ + } + +#endif /*VIDEO_DECODER_TRACE_H_*/ + + diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp new file mode 100644 index 0000000..0329c2a --- /dev/null +++ b/videodecoder/VideoDecoderVP8.cpp @@ -0,0 +1,85 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderVP8.h" +#include "VideoDecoderTrace.h" +#include + +VideoDecoderVP8::VideoDecoderVP8(const char *mimeType) + : VideoDecoderBase(mimeType, (_vbp_parser_type)VBP_INVALID) { +} + +VideoDecoderVP8::~VideoDecoderVP8() { + stop(); +} + +Decode_Status VideoDecoderVP8::start(VideoConfigBuffer *buffer) { + Decode_Status status; + + status = VideoDecoderBase::start(buffer); + CHECK_STATUS("VideoDecoderBase::start"); + + // config VP8 software decoder if necessary + // TODO: update mVideoFormatInfo here + + status = VideoDecoderBase::setupVA( + VP8_SURFACE_NUMBER, + (VAProfile)VAProfileSoftwareDecoding); + + return status; +} + +void VideoDecoderVP8::stop(void) { + VideoDecoderBase::stop(); +} + +void VideoDecoderVP8::flush(void) { + VideoDecoderBase::flush(); +} + +Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + + // TODO: decode sample to mAcquiredBuffer->mappedAddr. + // make sure decoded output is in NV12 format. + // << add decoding codes here>> + + + // set referenceFrame to true if frame decoded is I/P frame, false otherwise. + mAcquiredBuffer->referenceFrame = true; + // assume it is frame picture. + mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; + mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp; + mAcquiredBuffer->renderBuffer.flag = 0; + + // if sample is successfully decoded, call outputSurfaceBuffer(); otherwise + // call releaseSurfacebuffer(); + status = outputSurfaceBuffer(); + return status; +} + + diff --git a/videodecoder/VideoDecoderVP8.h b/videodecoder/VideoDecoderVP8.h new file mode 100644 index 0000000..676bd1a --- /dev/null +++ b/videodecoder/VideoDecoderVP8.h @@ -0,0 +1,50 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_VP8_H_ +#define VIDEO_DECODER_VP8_H_ + +#include "VideoDecoderBase.h" + + +class VideoDecoderVP8 : public VideoDecoderBase { +public: + VideoDecoderVP8(const char *mimeType); + virtual ~VideoDecoderVP8(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + virtual void flush(void); + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + + +private: + enum { + VP8_SURFACE_NUMBER = 10, + }; +}; + + + +#endif /* VIDEO_DECODER_VP8_H_ */ diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp new file mode 100644 index 0000000..f3d5fbc --- /dev/null +++ b/videodecoder/VideoDecoderWMV.cpp @@ -0,0 +1,475 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderWMV.h" +#include "VideoDecoderTrace.h" +#include + +VideoDecoderWMV::VideoDecoderWMV(const char *mimeType) + : VideoDecoderBase(mimeType, VBP_VC1), + mBufferIDs(NULL), + mNumBufferIDs(0), + mConfigDataParsed(false) { +} + + +VideoDecoderWMV::~VideoDecoderWMV() { + stop(); +} + +Decode_Status VideoDecoderWMV::start(VideoConfigBuffer *buffer) { + Decode_Status status; + + status = VideoDecoderBase::start(buffer); + CHECK_STATUS("VideoDecoderBase::start"); + + if (buffer->data == NULL || buffer->size == 0) { + WTRACE("No config data to start VA."); + return DECODE_SUCCESS; + } + + vbp_data_vc1 *data = NULL; + status = parseBuffer(buffer->data, buffer->size, &data); + CHECK_STATUS("parseBuffer"); + + status = startVA(data); + return status; +} + +void VideoDecoderWMV::stop(void) { + if (mBufferIDs) { + delete [] mBufferIDs; + mBufferIDs = NULL; + } + mNumBufferIDs = 0; + mConfigDataParsed = false; + + VideoDecoderBase::stop(); +} + +void VideoDecoderWMV::flush(void) { + VideoDecoderBase::flush(); +} + +Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + vbp_data_vc1 *data = NULL; + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + + status = parseBuffer(buffer->data, buffer->size, &data); + CHECK_STATUS("parseBuffer"); + + if (!mVAStarted) { + status = startVA(data); + CHECK_STATUS("startVA"); + } + + status = decodeFrame(buffer, data); + CHECK_STATUS("decodeFrame"); + if (mSizeChanged) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + return status; +} + +Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vc1 *data) { + Decode_Status status; + mCurrentPTS = buffer->timeStamp; + if (0 == data->num_pictures || NULL == data->pic_data) { + WTRACE("Number of pictures is 0, buffer contains configuration data only?"); + return DECODE_SUCCESS; + } + + if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) { + //use the last P or I frame surface for skipped frame and treat it as P frame + if (mLastReference == NULL) { + // TODO: handle this case + WTRACE("The last reference is unavailable to construct skipped frame."); + return DECODE_SUCCESS; + } + + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; + mAcquiredBuffer->renderBuffer.flag = 0; + mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat; + mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface; + // No need to update mappedData for HW decoding + //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data; + mAcquiredBuffer->referenceFrame = true; + // let outputSurfaceBuffer handle "asReference" for VC1 + status = outputSurfaceBuffer(); + return status; + } + + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + + mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp; + if (buffer->flag & HAS_DISCONTINUITY) { + mAcquiredBuffer->renderBuffer.flag |= HAS_DISCONTINUITY; + } + if (buffer->flag & WANT_DECODE_ONLY) { + mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY; + } + + if (data->num_pictures > 1) { + if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) { + mAcquiredBuffer->renderBuffer.scanFormat = VA_TOP_FIELD; + } else { + mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD; + } + } else { + mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; + } + int frameType = data->pic_data[0].pic_parms->picture_fields.bits.picture_type; + mAcquiredBuffer->referenceFrame = (frameType == VC1_PTYPE_I || frameType == VC1_PTYPE_P); + + // TODO: handle multiple frames parsed from a sample buffer + int numPictures = (data->num_pictures > 1) ? 2 : 1; + + for (int index = 0; index < numPictures; index++) { + status = decodePicture(data, index); + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + return status; + } + } + + // let outputSurfaceBuffer handle "asReference" for VC1 + status = outputSurfaceBuffer(); + return status; +} + + +Decode_Status VideoDecoderWMV::decodePicture(vbp_data_vc1 *data, int32_t picIndex) { + VAStatus vaStatus = VA_STATUS_SUCCESS; + Decode_Status status; + int32_t bufferIDCount = 0; + vbp_picture_data_vc1 *picData = &(data->pic_data[picIndex]); + VAPictureParameterBufferVC1 *picParams = picData->pic_parms; + + if (picParams == NULL) { + return DECODE_PARSER_FAIL; + } + + status = allocateVABufferIDs(picData->num_slices * 2 + 2); + CHECK_STATUS("allocateVABufferIDs"); + + status = setReference(picParams, picIndex, mAcquiredBuffer->renderBuffer.surface); + CHECK_STATUS("setReference"); + + if (data->se_data->LOOPFILTER) { + //Loop filter handling + picParams->inloop_decoded_picture = mAcquiredBuffer->renderBuffer.surface; + } else { + picParams->inloop_decoded_picture = VA_INVALID_SURFACE; + } + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding. + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferVC1), + 1, + picParams, + &mBufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + if (picParams->bitplane_present.value) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VABitPlaneBufferType, + picData->size_bitplanes, + 1, + picData->packed_bitplanes, + &mBufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateBitPlaneBuffer"); + bufferIDCount++; + } + + for (uint32_t i = 0; i < picData->num_slices; i++) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferVC1), + 1, + &(picData->slc_data[i].slc_parms), + &mBufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + //size + picData->slc_data[i].slice_size, + //num_elements + 1, + //slice data buffer pointer + //Note that this is the original data buffer ptr; + // offset to the actual slice data is provided in + // slice_data_offset in VASliceParameterBufferVC1 + picData->slc_data[i].buffer_addr + picData->slc_data[i].slice_offset, + &mBufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + } + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + mBufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + vaStatus = vaEndPicture(mVADisplay, mVAContext); + mDecodingFrame = false; + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +Decode_Status VideoDecoderWMV::setReference( + VAPictureParameterBufferVC1 *params, + int32_t picIndex, + VASurfaceID current) { + int frameType = params->picture_fields.bits.picture_type; + switch (frameType) { + case VC1_PTYPE_I: + params->forward_reference_picture = current; + params->backward_reference_picture = current; + break; + case VC1_PTYPE_P: + // check REFDIST in the picture parameter buffer + if (0 != params->reference_fields.bits.reference_distance_flag && + 0 != params->reference_fields.bits.reference_distance) { + /* The previous decoded frame (distance is up to 16 but not 0) is used + for reference. Not supported here. + */ + return DECODE_NO_REFERENCE; + } + if (1 == picIndex) { + // handle interlace field coding case + if (1 == params->reference_fields.bits.num_reference_pictures || + 1 == params->reference_fields.bits.reference_field_pic_indicator) { + /* + two reference fields or the second closest I/P field is used for + prediction. Set forward reference picture to INVALID so it will be + updated to a valid previous reconstructed reference frame later. + */ + params->forward_reference_picture = VA_INVALID_SURFACE; + } else { + /* the closest I/P is used for reference so it must be the + complementary field in the same surface. + */ + params->forward_reference_picture = current; + } + } + if (VA_INVALID_SURFACE == params->forward_reference_picture) { + if (mLastReference == NULL) { + return DECODE_NO_REFERENCE; + } + params->forward_reference_picture = mLastReference->renderBuffer.surface; + } + params->backward_reference_picture = VA_INVALID_SURFACE; + break; + case VC1_PTYPE_B: + if (mForwardReference == NULL || mLastReference == NULL) { + return DECODE_NO_REFERENCE; + } + params->forward_reference_picture = mForwardReference->renderBuffer.surface; + params->backward_reference_picture = mLastReference->renderBuffer.surface; + break; + case VC1_PTYPE_BI: + params->forward_reference_picture = VA_INVALID_SURFACE; + params->backward_reference_picture = VA_INVALID_SURFACE; + break; + case VC1_PTYPE_SKIPPED: + //Will never happen here + break; + default: + break; + } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderWMV::updateConfigData( + uint8_t *configData, + int32_t configDataLen, + uint8_t **newConfigData, + int32_t* newConfigDataLen) { + int32_t i = 0; + uint8_t *p = configData; + + /* Check for start codes. If one exist, then this is VC-1 and not WMV. */ + while (i < configDataLen - 2) { + if ((p[i] == 0) && + (p[i + 1] == 0) && + (p[i + 2] == 1)) { + *newConfigData = NULL; + *newConfigDataLen = 0; + return DECODE_SUCCESS; + } + i++; + } + + *newConfigDataLen = configDataLen + 9; + p = *newConfigData = new uint8_t [*newConfigDataLen]; + if (!p) { + return DECODE_MEMORY_FAIL; + } + + /* If we get here we have 4+ bytes of codec data that must be formatted */ + /* to pass through as an RCV sequence header. */ + p[0] = 0; + p[1] = 0; + p[2] = 1; + p[3] = 0x0f; /* Start code. */ + p[4] = (mVideoFormatInfo.width >> 8) & 0x0ff; + p[5] = mVideoFormatInfo.width & 0x0ff; + p[6] = (mVideoFormatInfo.height >> 8) & 0x0ff; + p[7] = mVideoFormatInfo.height & 0x0ff; + + memcpy(p + 8, configData, configDataLen); + *(p + configDataLen + 8) = 0x80; + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderWMV::startVA(vbp_data_vc1 *data) { + updateFormatInfo(data); + + VAProfile vaProfile; + switch (data->se_data->PROFILE) { + case 0: + vaProfile = VAProfileVC1Simple; + break; + case 1: + vaProfile = VAProfileVC1Main; + break; + default: + vaProfile = VAProfileVC1Advanced; + break; + } + + return VideoDecoderBase::setupVA(VC1_SURFACE_NUMBER, vaProfile); +} + +void VideoDecoderWMV::updateFormatInfo(vbp_data_vc1 *data) { + ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d", + mVideoFormatInfo.width, mVideoFormatInfo.height, + data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT); + + if (mVideoFormatInfo.width != data->se_data->CODED_WIDTH || + mVideoFormatInfo.height!= data->se_data->CODED_HEIGHT) { + // encoded image size + mVideoFormatInfo.width = data->se_data->CODED_WIDTH; + mVideoFormatInfo.height = data->se_data->CODED_HEIGHT; + mSizeChanged = true; + ITRACE("Video size is changed."); + } + + // scaling has been performed on the decoded image. + mVideoFormatInfo.videoRange = 1; + + switch (data->se_data->MATRIX_COEF) { + case 1: + mVideoFormatInfo.colorMatrix = VA_SRC_BT709; + break; + // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996. + case 6: + mVideoFormatInfo.colorMatrix = VA_SRC_BT601; + break; + default: + // unknown color matrix, set to 0 so color space flag will not be set. + mVideoFormatInfo.colorMatrix = 0; + break; + } + + mVideoFormatInfo.aspectX = data->se_data->ASPECT_HORIZ_SIZE; + mVideoFormatInfo.aspectY = data->se_data->ASPECT_VERT_SIZE; + mVideoFormatInfo.bitrate = 0; //data->se_data->bitrate; + mVideoFormatInfo.valid = true; +} + +Decode_Status VideoDecoderWMV::allocateVABufferIDs(int32_t number) { + if (mNumBufferIDs > number) { + return DECODE_SUCCESS; + } + if (mBufferIDs) { + delete [] mBufferIDs; + } + mBufferIDs = NULL; + mNumBufferIDs = 0; + mBufferIDs = new VABufferID [number]; + if (mBufferIDs == NULL) { + return DECODE_MEMORY_FAIL; + } + mNumBufferIDs = number; + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderWMV::parseBuffer(uint8_t *data, int32_t size, vbp_data_vc1 **vbpData) { + Decode_Status status; + + if (data == NULL || size == 0) { + return DECODE_INVALID_DATA; + } + + if (mConfigDataParsed) { + status = VideoDecoderBase::parseBuffer(data, size, false, (void**)vbpData); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + } else { + uint8_t *newData = NULL; + int32_t newSize = 0; + status = updateConfigData(data, size, &newData, &newSize); + CHECK_STATUS("updateConfigData"); + + if (newSize) { + status = VideoDecoderBase::parseBuffer(newData, newSize, true, (void**)vbpData); + delete [] newData; + } else { + status = VideoDecoderBase::parseBuffer(data, size, true, (void**)vbpData); + } + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + mConfigDataParsed = true; + } + return DECODE_SUCCESS; +} + + diff --git a/videodecoder/VideoDecoderWMV.h b/videodecoder/VideoDecoderWMV.h new file mode 100644 index 0000000..e1b0f99 --- /dev/null +++ b/videodecoder/VideoDecoderWMV.h @@ -0,0 +1,63 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_WMV_H_ +#define VIDEO_DECODER_WMV_H_ + +#include "VideoDecoderBase.h" + + +class VideoDecoderWMV : public VideoDecoderBase { +public: + VideoDecoderWMV(const char *mimeType); + virtual ~VideoDecoderWMV(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + virtual void flush(void); + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_vc1 *data); + Decode_Status decodePicture(vbp_data_vc1 *data, int32_t picIndex); + Decode_Status setReference(VAPictureParameterBufferVC1 *params, int32_t picIndex, VASurfaceID current); + Decode_Status updateConfigData(uint8_t *configData, int32_t configDataLen, uint8_t **newConfigData, int32_t *newConfigDataLen); + Decode_Status startVA(vbp_data_vc1 *data); + void updateFormatInfo(vbp_data_vc1 *data); + inline Decode_Status allocateVABufferIDs(int32_t number); + Decode_Status parseBuffer(uint8_t *data, int32_t size, vbp_data_vc1 **vbpData); + +private: + enum { + VC1_SURFACE_NUMBER = 10, + }; + + VABufferID *mBufferIDs; + int32_t mNumBufferIDs; + bool mConfigDataParsed; +}; + + + +#endif /* VIDEO_DECODER_WMV_H_ */ -- cgit v1.2.3 From 71c3f6021df207b7ed81d3768e333cb2e52b0467 Mon Sep 17 00:00:00 2001 From: xli111 Date: Wed, 15 Jun 2011 15:40:07 +0800 Subject: libmix: correct the method calucalte timestamp for mpeg4 decode. BZ: 3035 Some mpeg4 clips freeze when playbacking, the timestamp is calculated uncorrectly, and framework will keep waiting to render the next frame according to the wrong timestamp. When sometimes calculating the timestamp for a new frame, nanosecond is take as the basic unit, actually it should be macrosecond. Change nanosecond to macrosecond as the basic unit for calcuating timestamp Change-Id: I5bf57793900e0f60ceb849338b7929992297ea9f Signed-off-by: xli111 --- mix_video/src/mixvideoformat_mp42.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp index b3b6e25..e8c5fd1 100644 --- a/mix_video/src/mixvideoformat_mp42.cpp +++ b/mix_video/src/mixvideoformat_mp42.cpp @@ -532,8 +532,8 @@ MIX_RESULT MixVideoFormat_MP42::_decode_continue(vbp_data_mp42 *data) { ts_inc = ts_inc % pic_params->vop_time_increment_resolution; LOG_V("timestamp is incremented by %"UINT64_FORMAT" at %d resolution.\n", ts_inc, pic_params->vop_time_increment_resolution); - // convert to nano-second - ts_inc = ts_inc * 1e9 / pic_params->vop_time_increment_resolution; + // convert to macrosecond, timestamp takes microsecond as basic unit. + ts_inc = ts_inc * 1e6 / pic_params->vop_time_increment_resolution; LOG_V("timestamp of P frame in packed frame is updated from %"UINT64_FORMAT" to %"UINT64_FORMAT".\n", ts, ts + ts_inc); ts += ts_inc; -- cgit v1.2.3 From f41c0aa7ee63a9d53742423cdc700179b69f28b1 Mon Sep 17 00:00:00 2001 From: Yingjian He Date: Mon, 16 May 2011 15:21:09 -0700 Subject: Added error detection code in the middleware. All code is wrapped into ifdef SW_ERROR_CONCEALMENT flag, and this flag is not enabled for now to avoid disturbing the normal operation. upgrade to 3.3.4 (replace patch #8613) BZ:2528 Change-Id: I787dd6981ab6a3f19b2c4bffccb18b750bfc0f92 Signed-off-by: Weian Chen --- mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 9 +- .../fw/codecs/h264/parser/h264parse_dpb.c | 22 +++- .../fw/codecs/h264/parser/h264parse_pps.c | 70 ++++++++++++- .../viddec_fw/fw/codecs/h264/parser/h264parse_sh.c | 111 +++++++++++++++++++-- .../fw/codecs/h264/parser/h264parse_sps.c | 54 +++++++++- .../fw/codecs/h264/parser/viddec_h264_parse.c | 13 +++ 6 files changed, 265 insertions(+), 14 deletions(-) mode change 100644 => 100755 mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h mode change 100644 => 100755 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c mode change 100644 => 100755 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c mode change 100644 => 100755 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c mode change 100644 => 100755 mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c mode change 100644 => 100755 mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h old mode 100644 new mode 100755 index a3490b4..1033948 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -26,6 +26,9 @@ #include "viddec_fw_workload.h" #include "h264parse_sei.h" +#ifdef VBP +//#define SW_ERROR_CONCEALEMNT +#endif #ifdef WIN32 #define mfd_printf OS_INFO @@ -1022,7 +1025,11 @@ extern "C" { uint32_t wl_err_curr; uint32_t wl_err_next; - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + uint32_t sw_bail; +#endif +#endif } h264_Info; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c old mode 100644 new mode 100755 index 6d52307..7d26bce --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -2487,7 +2487,11 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) // only invoke following process for a conforming bitstream // when gaps_in_frame_num_value_allowed_flag is equal to 1 pInfo->img.gaps_in_frame_num = 0; - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif //mfd_printf("ERROR STREAM??\n"); ////// Error handling here---- } @@ -4003,6 +4007,14 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame if (pInfo->SliceHeader.idr_flag) { pInfo->img.FrameNumOffset = 0; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (pInfo->img.frame_num) + { + pInfo->sw_bail = 1; + } +#endif +#endif } else { @@ -4103,6 +4115,14 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame pInfo->img.toppoc = 0; pInfo->img.bottompoc = 0; pInfo->img.ThisPOC = 0; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (pInfo->img.frame_num) + { + pInfo->sw_bail = 1; + } +#endif +#endif } else { diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c old mode 100644 new mode 100755 index 1719a04..17f0930 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c @@ -21,6 +21,14 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->pic_parameter_set_id = (uint8_t)code; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code > 255) + { + pInfo->sw_bail = 1; + } +#endif +#endif code = h264_GetVLCElement(parent, pInfo, false); if (code > MAX_NUM_SPS-1) { @@ -28,6 +36,14 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->seq_parameter_set_id = (uint8_t)code; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code > 31) + { + pInfo->sw_bail = 1; + } +#endif +#endif ///// entropy_coding_mode_flag viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; @@ -37,6 +53,14 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (PictureParameterSet->num_slice_groups_minus1 > 8) + { + pInfo->sw_bail = 1; + } +#endif +#endif // // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0 // @@ -49,6 +73,11 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa //// PPS->num_ref_idx_l0_active --- [0,32] if (((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES)) { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; } @@ -56,6 +85,14 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->weighted_pred_flag = (uint8_t)code; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code > 2) + { + pInfo->sw_bail = 1; + } +#endif +#endif viddec_pm_get_bits(parent, &code, 2); PictureParameterSet->weighted_bipred_idc = (uint8_t)code; @@ -63,9 +100,24 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true); PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true); if (((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP)) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; + } PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((12 < PictureParameterSet->chroma_qp_index_offset) || (-12 > PictureParameterSet->chroma_qp_index_offset) ) + { + pInfo->sw_bail = 1; + } +#endif +#endif //// Deblocking ctl parameters viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code; @@ -77,6 +129,14 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa break; PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code && (pInfo->active_SPS.profile_idc != h264_ProfileBaseline)) + { + pInfo->sw_bail = 1; + } +#endif +#endif //// Check if have more RBSP Data for additional parameters if (h264_More_RBSP_Data(parent, pInfo)) { @@ -107,8 +167,14 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix - //if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12)) - // break; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12)) + { + pInfo->sw_bail = 1; + } +#endif +#endif } else { diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c old mode 100644 new mode 100755 index de34811..189e57b --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c @@ -25,6 +25,14 @@ h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_ ///// slice_type slice_type = h264_GetVLCElement(parent, pInfo, false); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (slice_type > 9) + { + pInfo->sw_bail = 1; + } +#endif +#endif SliceHeader->slice_type = (slice_type%5); if (SliceHeader->slice_type > h264_PtypeI) { @@ -36,6 +44,11 @@ h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_ ////// pic_parameter_id data = h264_GetVLCElement(parent, pInfo, false); if (data > MAX_PIC_PARAMS) { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif ret = H264_PPS_INVALID_PIC_ID; break; } @@ -109,6 +122,14 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (SliceHeader->idr_pic_id > 65535) + { + pInfo->sw_bail = 1; + } +#endif +#endif } if (pInfo->active_SPS.pic_order_cnt_type == 0) @@ -276,13 +297,24 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice if (SliceHeader->cabac_init_idc > 2) { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; } SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); if ( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26))) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; - + } if ((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) ) { @@ -295,9 +327,15 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); if ( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; + } } - if (pInfo->active_PPS.deblocking_filter_control_present_flag) { SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); @@ -305,13 +343,25 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice { SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; - if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) { + if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; } SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; - if (slice_beta_offset < -12 || slice_beta_offset > 12) { + if (slice_beta_offset < -12 || slice_beta_offset > 12) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; } } @@ -430,16 +480,38 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (SliceHeader->sh_predwttbl.luma_log2_weight_denom > 7) + { + pInfo->sw_bail = 1; + } +#endif +#endif if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) { SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); } - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (SliceHeader->sh_predwttbl.chroma_log2_weight_denom > 7) + { + pInfo->sw_bail = 1; + } +#endif +#endif for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) { SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); @@ -455,7 +527,14 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) { for (j=0; j <2; j++) @@ -482,7 +561,14 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) { SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); @@ -498,7 +584,14 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) { for (j=0; j <2; j++) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c old mode 100644 new mode 100755 index 4a329b0..431892b --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c @@ -294,6 +294,11 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param case h264_ProfileHigh: break; default: +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif return H264_SPS_INVALID_PROFILE; break; } @@ -317,7 +322,14 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //// reserved_zero_4bits viddec_pm_get_bits(parent, (uint32_t *)&code, 4); #endif - +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code != 0) + { + pInfo->sw_bail = 1; + } +#endif +#endif viddec_pm_get_bits(parent, &code, 8); SPS->level_idc = (uint8_t)code; @@ -341,6 +353,11 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param case h264_Level51: break; default: +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif return H264_SPS_INVALID_LEVEL; } @@ -349,7 +366,14 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //// seq_parameter_set_id ---[0,31] if (SPS->seq_parameter_set_id > MAX_NUM_SPS -1) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; + } #ifdef VBP SPS->sps_disp.separate_colour_plane_flag = 0; #endif @@ -422,13 +446,27 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //// log2_max_frame_num_minus4 ---[0,12] data = (h264_GetVLCElement(parent, pInfo, false)); if ( data > 12) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; + } SPS->log2_max_frame_num_minus4 = (uint8_t)data; //// pic_order_cnt_type ---- [0,2] data = h264_GetVLCElement(parent, pInfo, false); if ( data > 2) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; + } SPS->pic_order_cnt_type = (uint8_t)data; @@ -445,7 +483,14 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255] data = h264_GetVLCElement(parent, pInfo, false); if ( data > 255) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; + } SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data; @@ -468,7 +513,14 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //// num_ref_frames ---[0,16] data = h264_GetVLCElement(parent, pInfo, false); if ( data > 16) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif break; + } SPS->num_ref_frames = (uint8_t)data; viddec_pm_get_bits(parent, &code, 1); diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c old mode 100644 new mode 100755 index b5b6c87..5936014 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -28,6 +28,11 @@ static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserv } /* picture level info which will always be initialized */ h264_init_Info_under_sps_pps_level(pInfo); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 0; +#endif +#endif return; } @@ -252,6 +257,14 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) h264_dpb_update_ref_lists( pInfo); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames) + { + pInfo->sw_bail = 1; + } +#endif +#endif #ifdef DUMP_HEADER_INFO dump_ref_list(pInfo); #endif -- cgit v1.2.3 From 83e91d9ee70cd42330c4fcb750749fef2af76262 Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Mon, 27 Jun 2011 11:23:55 -0700 Subject: Copy headers to target out headers. BZ: 4012 Copy private header files to target output directory. videodecoder include headers are copied to libmix_videodecoder asfparser include headers are copied to libmix_asfparser Change-Id: I1e50e1dd7551c440d47c5e18acf2151c2696fb76 Signed-off-by: Andy Qiu --- asfparser/Android.mk | 5 +++++ videodecoder/Android.mk | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/asfparser/Android.mk b/asfparser/Android.mk index 83fafea..3fecfbf 100644 --- a/asfparser/Android.mk +++ b/asfparser/Android.mk @@ -13,6 +13,11 @@ LOCAL_SRC_FILES := \ LOCAL_C_INCLUDES := \ $(LOCAL_PATH) +LOCAL_COPY_HEADERS_TO := libmix_asfparser + +LOCAL_COPY_HEADERS := \ + AsfParserDefs.h \ + AsfStreamParser.h LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libasfparser diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 63b8619..3dbe77c 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -33,6 +33,12 @@ LOCAL_SHARED_LIBRARIES := \ #LOCAL_SHARED_LIBRARIES += liblog +LOCAL_COPY_HEADERS_TO := libmix_videodecoder + +LOCAL_COPY_HEADERS := \ + VideoDecoderHost.h \ + VideoDecoderInterface.h \ + VideoDecoderDefs.h LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videodecoder -- cgit v1.2.3 From 909a220facc871a5c8904fb5dd92b99aa49dc743 Mon Sep 17 00:00:00 2001 From: Shuo Liu Date: Fri, 1 Jul 2011 13:41:53 +0800 Subject: libmix: Distinguish fatal decoder errors from other decoder status BZ: 4192 For decoder errors that are not fatal, the decoder client could continue to decode without complaining the abnormal issues. Add logics to distinguish fatal errors from those that could be omitted. Change-Id: I4ea66c91f34ea7c4ae9477339aae996168f52b0d Signed-off-by: Shuo Liu --- videodecoder/VideoDecoderDefs.h | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 40754ab..ccf3b46 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -163,5 +163,17 @@ typedef int32_t Decode_Status; #define NULL 0 #endif +inline bool checkFatalDecoderError(Decode_Status status) { + if (status == DECODE_NOT_STARTED || + status == DECODE_NEED_RESTART || + status == DECODE_NO_PARSER || + status == DECODE_INVALID_DATA || + status == DECODE_MEMORY_FAIL || + status == DECODE_FAIL) { + return true; + } else { + return false; + } +} #endif // VIDEO_DECODER_DEFS_H_ -- cgit v1.2.3 From 8bb5be45480cc72f5bbd69ccea63622912fc64b3 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 4 Jul 2011 23:32:15 +0800 Subject: libmix: correct the DPB management when a IDR is encountered. BZ: 3951 Change-Id: I9b4cdcd510c8cebcf8a8e255561c035ecce0db1b The reference frame buffer should be cleared when a IDR is encountered according the H.264 Spec, Or the reference frame may be misused, the decording pic will be incorrect. Signed-off-by: ywan171 --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index ac4d13b..2dc9723 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1102,6 +1102,15 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) vbp_set_reference_frames_h264(parser, pic_parms); if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { + int frame_idx; + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } /* num of reference frame is 0 if current picture is IDR */ pic_parms->num_ref_frames = 0; } -- cgit v1.2.3 From e3ceeef3c73e3065f1a0ee28b6c8d8150e1af389 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 25 Jul 2011 18:09:46 +0800 Subject: libmix: the frame ordering issue for AVC BZ: 3255 POC should be integer. Each time IDR arrives, set the NextFramePOC to be nagative, didn't force the IDR to be outputed. should't force the frame output when mfirstFlag is set. Change-Id: I870d3cd05915f4fac82daf1aecd0facfad67921b Signed-off-by: ywan171 --- videodecoder/VideoDecoderAVC.cpp | 35 +++-- videodecoder/VideoDecoderBase.cpp | 314 +++++++++++++++++++++++++------------- videodecoder/VideoDecoderBase.h | 6 +- videodecoder/VideoDecoderDefs.h | 2 +- 4 files changed, 233 insertions(+), 124 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 9bf06fb..98f54ad 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -124,7 +124,7 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h // Don't remove the following codes, it can be enabled for debugging DPB. #if 0 - for (int i = 0; i < data->num_pictures; i++) { + for (unsigned int i = 0; i < data->num_pictures; i++) { VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic; VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d", i, @@ -201,7 +201,7 @@ Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) { // TODO: Set the discontinuity flag mAcquiredBuffer->renderBuffer.flag = 0; mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; - mAcquiredBuffer->pictureOrder = picture->TopFieldOrderCnt; + mAcquiredBuffer->pictureOrder = getPOC(picture); status = continueDecodingFrame(data); // surface buffer is released if decode fails @@ -262,7 +262,10 @@ Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picInde CHECK_VA_STATUS("vaEndPicture"); // for interlace content, top field may be valid only after the second field is parsed - mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + int32_t poc = getPOC(&(picParam->CurrPic)); + if (poc < mAcquiredBuffer->pictureOrder) { + mAcquiredBuffer->pictureOrder = poc; + } } // Check there is no reference frame loss before decoding a frame @@ -568,6 +571,7 @@ void VideoDecoderAVC::clearAsReference(int toggle) { } Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { + int32_t DPBSize = getDPBSize(data); updateFormatInfo(data); //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline @@ -580,10 +584,8 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { vaProfile = VAProfileH264ConstrainedBaseline; } } - // TODO: use maxDPBSize to set window size - // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16 - //VideoDecoderBase::setOutputWindowSize(getDPBSize(data)); - return VideoDecoderBase::setupVA(data->codec_data->num_ref_frames + AVC_EXTRA_SURFACE_NUMBER, vaProfile); + VideoDecoderBase::setOutputWindowSize(DPBSize); + return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile); } void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { @@ -673,13 +675,13 @@ Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data) { if (data->num_pictures == 0) { - LOGE("num_pictures == 0"); + ETRACE("num_pictures == 0"); return true; } vbp_picture_data_h264* picData = data->pic_data; if (picData->num_slices == 0) { - LOGE("num_slices == 0"); + ETRACE("num_slices == 0"); return true; } @@ -689,17 +691,17 @@ bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data) { if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) { // not the first slice, assume it is continuation of a partial frame // TODO: check if it is new frame boundary as the first slice may get lost in streaming case. - LOGW("first_mb_in_slice != 0"); + WTRACE("first_mb_in_slice != 0"); } else { if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) { - LOGE("Current picture has both odd field and even field."); + ETRACE("Current picture has both odd field and even field."); } // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and // the last picture form an opposite field pair if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) { // opposite field newFrame = false; - LOGW("current picture is not at frame boundary."); + WTRACE("current picture is not at frame boundary."); mLastPictureFlags = 0; } else { newFrame = true; @@ -718,6 +720,7 @@ bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data) { } int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) { + // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16 struct DPBTable { int32_t level; float maxDPB; @@ -753,7 +756,6 @@ int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) { int32_t maxDPBSize = maxDPB * 1024 / ( (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * - (data->codec_data->frame_mbs_only_flag ? 1 : 2) * 384); if (maxDPBSize > 16) { @@ -761,8 +763,13 @@ int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) { } else if (maxDPBSize == 0) { maxDPBSize = 3; } + if(maxDPBSize < data->codec_data->num_ref_frames) { + maxDPBSize = data->codec_data->num_ref_frames; + } - LOGI("maxDPBSize is %d", maxDPBSize); + // add one extra frame for current frame. + maxDPBSize += 1; + ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames); return maxDPBSize; } diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 876a143..912c347 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -29,7 +29,8 @@ #include #define INVALID_PTS ((uint64_t)-1) -#define INVALID_POC ((uint32_t)-1) +#define MAXIMUM_POC 0x7FFFFFFF +#define MINIMUM_POC 0x80000000 #define ANDROID_DISPLAY_HANDLE 0x18C34078 // TODO: check what is the best number. Must be at least 2 to support one backward reference frame. @@ -55,11 +56,10 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mSizeChanged(false), // private member variables - mFirstFrame(true), mLowDelay(false), mRawOutput(false), mManageReference(true), - mOutputMethod (OUTPUT_BY_PCT), + mOutputMethod(OUTPUT_BY_PCT), mOutputWindowSize(OUTPUT_WINDOW_SIZE), mNumSurfaces(0), mSurfaceBuffers(NULL), @@ -68,7 +68,7 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mSurfaces(NULL), mSurfaceUserPtr(NULL), mSurfaceAcquirePos(0), - mNextOutputPOC(0), + mNextOutputPOC(MINIMUM_POC), mParserType(type), mParserHandle(NULL) { @@ -125,12 +125,11 @@ void VideoDecoderBase::stop(void) { mSizeChanged = false; // private variables - mFirstFrame = true; mLowDelay = false; mRawOutput = false; mNumSurfaces = 0; mSurfaceAcquirePos = 0; - mNextOutputPOC = 0; + mNextOutputPOC = MINIMUM_POC; mVideoFormatInfo.valid = false; if (mParserHandle){ @@ -150,7 +149,7 @@ void VideoDecoderBase::flush(void) { // avoid setting mSurfaceAcquirePos to 0 as it may cause tearing // (surface is still being rendered) mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces; - mNextOutputPOC = 0; + mNextOutputPOC = MINIMUM_POC; mCurrentPTS = INVALID_PTS; mAcquiredBuffer = NULL; mLastReference = NULL; @@ -160,7 +159,6 @@ void VideoDecoderBase::flush(void) { mDecodingFrame = false; mSizeChanged = false; - mFirstFrame = true; // initialize surface buffer without resetting mapped/raw data initSurfaceBuffer(false); } @@ -186,143 +184,244 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { // output by position (the first buffer) VideoSurfaceBuffer *outputByPos = mOutputHead; - if (mLowDelay || mFirstFrame) { + if (mLowDelay) { mOutputHead = mOutputHead->next; if (mOutputHead == NULL) { mOutputTail = NULL; } - mFirstFrame = false; - mNextOutputPOC = outputByPos->pictureOrder + 1; - - //VTRACE("Output POC %u for display (pts = %.2f)", outputByPos->pictureOrder, outputByPos->renderBuffer.timeStamp/1E6); return &(outputByPos->renderBuffer); } // output by presentation time stamp (the smallest pts) - VideoSurfaceBuffer *outputByPts = NULL; + VideoSurfaceBuffer *outputByPts = findOutputByPts(draining); - // output by picture coding type (PCT) or by picture order count (POC) - // for output by PCT: - // if there is more than one reference frame, the first reference frame is ouput, otherwise, - // output non-reference frame if any. - // for output by POC: - // VideoSurfaceBuffer *output = NULL; + if (mOutputMethod == OUTPUT_BY_POC) { + output = findOutputByPoc(draining); + } else if (mOutputMethod == OUTPUT_BY_PCT) { + output = findOutputByPct(draining); + } else { + ETRACE("Invalid output method."); + return NULL; + } + if (output == NULL) { + return NULL; + } + + if (output != outputByPts) { + // swap time stamp + uint64_t ts = output->renderBuffer.timeStamp; + output->renderBuffer.timeStamp = outputByPts->renderBuffer.timeStamp; + outputByPts->renderBuffer.timeStamp = ts; + } + + if (output != outputByPos) { + // remove this output from middle or end of the list + VideoSurfaceBuffer *p = outputByPos; + while (p->next != output) { + p = p->next; + } + p->next = output->next; + if (mOutputTail == output) { + mOutputTail = p; + } + } else { + // remove this output from head of the list + mOutputHead = mOutputHead->next; + if (mOutputHead == NULL) { + mOutputTail = NULL; + } + } + //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); + return &(output->renderBuffer); +} + +VideoSurfaceBuffer* VideoDecoderBase::findOutputByPts(bool draining) { + // output by presentation time stamp - buffer with the smallest time stamp is output VideoSurfaceBuffer *p = mOutputHead; - int32_t reference = 0; - int32_t count = 0; + VideoSurfaceBuffer *outputByPts = NULL; uint64_t pts = INVALID_PTS; - uint32_t poc = INVALID_POC; do { if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) { // find buffer with the smallest PTS pts = p->renderBuffer.timeStamp; outputByPts = p; } + p = p->next; + } while (p != NULL); - if (mOutputMethod == OUTPUT_BY_PCT) { - if (p->referenceFrame) { - reference++; - } else if (output == NULL) { - // first non-reference frame - output = p; - } + return outputByPts; +} - if (reference > 1 && output == NULL) { - // first reference frame - output = outputByPos; - } - } else if (mOutputMethod == OUTPUT_BY_POC) { - count++; - if (p->pictureOrder == 0) { - // any picture before this POC (new IDR) must be output - if (output == NULL) { - output = p; - mNextOutputPOC = 1; - } else { - mNextOutputPOC = output->pictureOrder + 1; - } +VideoSurfaceBuffer* VideoDecoderBase::findOutputByPct(bool draining) { + // output by picture coding type (PCT) + // if there is more than one reference frame, the first reference frame is ouput, otherwise, + // output non-reference frame if there is any. + + VideoSurfaceBuffer *p = mOutputHead; + VideoSurfaceBuffer *outputByPct = NULL; + int32_t reference = 0; + do { + if (p->referenceFrame) { + reference++; + if (reference > 1) { + // mOutputHead must be a reference frame + outputByPct = mOutputHead; break; } - if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) { - // this POC meets ouput criteria. - poc = p->pictureOrder; - output = p; - } - if (poc == mNextOutputPOC || count == mOutputWindowSize) { - if (output != NULL) { - // this indicates two cases: - // 1) the next output POC is found. - // 2) output queue is full and there is at least one buffer meeting the output criteria. - mNextOutputPOC = output->pictureOrder + 1; - break; - } else { - // this indicates output queue is full and no buffer in the queue meets the output criteria - // restart processing as queue is FULL and output criteria is changed. (next output POC is 0) - mNextOutputPOC = 0; - count = 0; - reference = 0; - poc = INVALID_POC; - pts = INVALID_PTS; - p = mOutputHead; - continue; - } - } - if (p->next == NULL) { - output = NULL; - } - } else { - ETRACE("Invalid output method."); - return NULL; + // first non-reference frame + outputByPct = p; + break; } - p = p->next; } while (p != NULL); - if (output != NULL) { - if (output != outputByPts) { - // swap time stamp - uint64_t ts = output->renderBuffer.timeStamp; - output->renderBuffer.timeStamp = outputByPts->renderBuffer.timeStamp; - outputByPts->renderBuffer.timeStamp = ts; + if (outputByPct == NULL && draining) { + outputByPct = mOutputHead; + } + return outputByPct; +} + +#if 0 +VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) { + // output by picture order count (POC) + // Output criteria: + // if there is IDR frame (POC == 0), all the frames before IDR must be output; + // Otherwise, if draining flag is set or list is full, frame with the least POC is output; + // Otherwise, NOTHING is output + + int32_t dpbFullness = 0; + for (int32_t i = 0; i < mNumSurfaces; i++) { + // count num of reference frames + if (mSurfaceBuffers[i].asReferernce) { + dpbFullness++; } + } - if (output != outputByPos) { - // remove this output from middle or end of the list - p = outputByPos; - while (p->next != output) { - p = p->next; - } - p->next = output->next; - if (mOutputTail == output) { - mOutputTail = p; - } - } else { - // remove this output from head of the list - mOutputHead = mOutputHead->next; - if (mOutputHead == NULL) { - mOutputTail = NULL; + if (mAcquiredBuffer && mAcquiredBuffer->asReferernce) { + // frame is being decoded and is not ready for output yet + dpbFullness--; + } + + VideoSurfaceBuffer *p = mOutputHead; + while (p != NULL) { + // count dpbFullness with non-reference frame in the output queue + if (p->asReferernce == false) { + dpbFullness++; + } + p = p->next; + } + +Retry: + p = mOutputHead; + VideoSurfaceBuffer *outputByPoc = NULL; + int32_t count = 0; + int32_t poc = MAXIMUM_POC; + + do { + if (p->pictureOrder == 0) { + // output picture with the least POC before IDR + if (outputByPoc != NULL) { + mNextOutputPOC = outputByPoc->pictureOrder + 1; + return outputByPoc; + } else { + mNextOutputPOC = MINIMUM_POC; } } - //VTRACE("Output POC %u for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); - return &(output->renderBuffer); - } + // POC of the output candidate must not be less than mNextOutputPOC + if (p->pictureOrder < mNextOutputPOC) { + break; + } - if (draining){ - // output buffer in the head of list - mOutputHead = mOutputHead->next; - if (mOutputHead == NULL) { - mOutputTail = NULL; + if (p->pictureOrder < poc) { + // update the least POC. + poc = p->pictureOrder; + outputByPoc = p; } - return &(outputByPos->renderBuffer); + count++; + p = p->next; + } while (p != NULL && count < mOutputWindowSize); + + if (draining == false && dpbFullness < mOutputWindowSize) { + // list is not full and we are not in draining state + // if DPB is already full, one frame must be output + return NULL; } - return NULL; + if (outputByPoc == NULL) { + mNextOutputPOC = MINIMUM_POC; + goto Retry; + } + + // for debugging purpose + if (outputByPoc->pictureOrder != 0 && outputByPoc->pictureOrder < mNextOutputPOC) { + ETRACE("Output POC is not incremental, expected %d, actual %d", mNextOutputPOC, outputByPoc->pictureOrder); + //gaps_in_frame_num_value_allowed_flag is not currently supported + } + + mNextOutputPOC = outputByPoc->pictureOrder + 1; + + return outputByPoc; } +#else +VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) { + VideoSurfaceBuffer *output = NULL; + VideoSurfaceBuffer *p = mOutputHead; + int32_t count = 0; + int32_t poc = MAXIMUM_POC; + VideoSurfaceBuffer *outputleastpoc = mOutputHead; + do { + count++; + if (p->pictureOrder == 0) { + // any picture before this POC (new IDR) must be output + if (output == NULL) { + mNextOutputPOC = MINIMUM_POC; + // looking for any POC with negative value + } else { + mNextOutputPOC = output->pictureOrder + 1; + break; + } + } + if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) { + // this POC meets ouput criteria. + poc = p->pictureOrder; + output = p; + outputleastpoc = p; + } + if (poc == mNextOutputPOC || count == OUTPUT_WINDOW_SIZE) { + if (output != NULL) { + // this indicates two cases: + // 1) the next output POC is found. + // 2) output queue is full and there is at least one buffer meeting the output criteria. + mNextOutputPOC = output->pictureOrder + 1; + break; + } else { + // this indicates output queue is full and no buffer in the queue meets the output criteria + // restart processing as queue is FULL and output criteria is changed. (next output POC is 0) + mNextOutputPOC = MINIMUM_POC; + count = 0; + poc = MAXIMUM_POC; + p = mOutputHead; + continue; + } + } + if (p->next == NULL) { + output = NULL; + } + + p = p->next; + } while (p != NULL); + + if (draining == true && output == NULL) { + output = outputleastpoc; + } + return output; +} +#endif Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) { if (mVAStarted == false) { @@ -404,6 +503,7 @@ Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) { } // frame is successfly decoded to the current surface, it is ready for output mAcquiredBuffer->renderBuffer.renderDone = false; + // decoder must set "asReference and referenceFrame" flags properly // update reference frames @@ -432,7 +532,7 @@ Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) { mOutputTail = mAcquiredBuffer; mOutputTail->next = NULL; - //VTRACE("Pushing POC %u to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6); + //VTRACE("Pushing POC %d to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6); mAcquiredBuffer = NULL; mSurfaceAcquirePos = (mSurfaceAcquirePos + 1 ) % mNumSurfaces; diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 39ffe07..da1a655 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -59,6 +59,9 @@ protected: // acquired surface buffer is not used virtual Decode_Status releaseSurfaceBuffer(void); virtual Decode_Status endDecodingFrame(bool dropFrame); + virtual VideoSurfaceBuffer* findOutputByPoc(bool draining = false); + virtual VideoSurfaceBuffer* findOutputByPct(bool draining = false); + virtual VideoSurfaceBuffer* findOutputByPts(bool draining = false); virtual Decode_Status setupVA(int32_t numSurface, VAProfile profile); virtual Decode_Status terminateVA(void); virtual Decode_Status parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData); @@ -106,7 +109,6 @@ protected: }; private: - bool mFirstFrame; bool mLowDelay; // when true, decoded frame is immediately output for rendering bool mRawOutput; // whether to output NV12 raw data bool mManageReference; // this should stay true for VC1/MP4 decoder, and stay false for AVC decoder. AVC handles reference frame using DPB @@ -120,7 +122,7 @@ private: VASurfaceID *mSurfaces; // surfaces array uint8_t **mSurfaceUserPtr; // mapped user space pointer int32_t mSurfaceAcquirePos; // position of surface to start acquiring - uint32_t mNextOutputPOC; // Picture order count of next output + int32_t mNextOutputPOC; // Picture order count of next output _vbp_parser_type mParserType; void *mParserHandle; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index ccf3b46..59a5e04 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -110,7 +110,7 @@ struct VideoRenderBuffer { struct VideoSurfaceBuffer { VideoRenderBuffer renderBuffer; - uint32_t pictureOrder; // picture order count, valid only for AVC format + int32_t pictureOrder; // picture order count, valid only for AVC format bool referenceFrame; // indicated whether frame associated with this surface is a reference I/P frame bool asReferernce; // indicated wheter frame is used as reference (as a result surface can not be used for decoding) VideoFrameRawData *mappedData; -- cgit v1.2.3 From 851de78423f3aca599fae2f427b8c985aa5a2f42 Mon Sep 17 00:00:00 2001 From: Linda Cline Date: Tue, 19 Jul 2011 13:57:14 -0700 Subject: Libmix: Fixed surface pool allocation BZ: 4928, 4144 Changes to frame manager and AVC object to correctly support AVC specification Change-Id: Id80920257f08fbec7b0c8d58eed12d98af8fe51c --- mix_video/src/mixframemanager.cpp | 97 +++++++++++++++++++---------------- mix_video/src/mixframemanager.h | 3 ++ mix_video/src/mixvideoformat_h264.cpp | 89 +++++++++++++++++++++++++++++++- 3 files changed, 143 insertions(+), 46 deletions(-) diff --git a/mix_video/src/mixframemanager.cpp b/mix_video/src/mixframemanager.cpp index adfc53e..cbd2439 100644 --- a/mix_video/src/mixframemanager.cpp +++ b/mix_video/src/mixframemanager.cpp @@ -44,6 +44,7 @@ MixFrameManager::MixFrameManager() ,next_frame_picnumber(0) ,max_enqueue_size(MIX_MAX_ENQUEUE_SIZE) ,max_picture_number((uint32)-1) + ,dpb_size((uint32)-1) ,ref_count(1) { } @@ -186,6 +187,15 @@ MIX_RESULT mix_framemanager_set_max_picture_number( return MIX_RESULT_SUCCESS; } +MIX_RESULT mix_framemanager_set_dpb_size( + MixFrameManager *fm, uint32 num) { + fm->mLock.lock(); + fm->dpb_size = num < MIX_MAX_ENQUEUE_SIZE ? num : MIX_MAX_ENQUEUE_SIZE; + LOG_V("dpb is %d\n", fm->dpb_size); + fm->mLock.unlock(); + return MIX_RESULT_SUCCESS; +} + MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { MixVideoFrame *frame = NULL; @@ -466,11 +476,15 @@ MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVide { int i, len; MixVideoFrame* p; + MixVideoFrame* outp; + int outpicnum; + int prevpicnum; uint32 picnum; uint32 next_picnum_pending; int least_poc_index; uint32 least_poc; + uint32 maxframeinqueue; len = j_slist_length(fm->frame_list); @@ -479,61 +493,56 @@ retry: least_poc_index = -1; least_poc = (uint32)-1; - for (i = 0; i < len; ) + if ((fm->dpb_size == -1) || (fm->dpb_size == len)) { - p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); + maxframeinqueue = len; + maxframeinqueue = (maxframeinqueue < (MIX_MAX_ENQUEUE_SIZE + 1)) ? maxframeinqueue : (MIX_MAX_ENQUEUE_SIZE + 1); + } + else + { + maxframeinqueue = 0; + } + + if (maxframeinqueue) + { + p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, 0); mix_videoframe_get_displayorder(p, &picnum); - if (picnum == fm->next_frame_picnumber) + outpicnum = picnum; + prevpicnum = picnum; + outp = p; + + for (i = 1; i < maxframeinqueue;i++ ) { - fm->frame_list = j_slist_remove(fm->frame_list, p); - mix_framemanager_update_timestamp(fm, p); - *mvf = p; - LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber); - fm->next_frame_picnumber++; - //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; - return MIX_RESULT_SUCCESS; - } + p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); + mix_videoframe_get_displayorder(p, &picnum); - if(picnum == 0) { - if(i == 0) { - fm->next_frame_picnumber = 0; - } else { - fm->next_frame_picnumber = least_poc; - i = least_poc_index; + if (picnum ==0) + { + break; + } + else if (picnum < outpicnum) + { + outpicnum = picnum; + outp = p; + prevpicnum = picnum; + } + else //(picnum >= outpicnum) + { + prevpicnum = picnum; } - continue; - } - if(picnum < least_poc) { - least_poc = picnum; - least_poc_index = i; - LOG_V("least_poc_index = %d\n", least_poc_index); } - ++i; + fm->frame_list = j_slist_remove(fm->frame_list, (void *)outp); + mix_framemanager_update_timestamp(fm, outp); + *mvf = outp; - if (picnum > fm->next_frame_picnumber && - picnum < next_picnum_pending) - { - next_picnum_pending = picnum; - } - if (picnum < fm->next_frame_picnumber && - fm->next_frame_picnumber - picnum < 8) - { - // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number" - // to the pic number in the list is less than half of 16, it is safe to assume that pic number - // is reset when a new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1). - LOG_V("picture number is reset to %d, next pic number is %d, next pending number is %d.\n", - picnum, fm->next_frame_picnumber, next_picnum_pending); - break; - } + return MIX_RESULT_SUCCESS; } - if (len <= fm->max_enqueue_size && fm->eos == FALSE) { LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", - fm->next_frame_picnumber, next_picnum_pending, len); + fm->next_frame_picnumber, next_picnum_pending, len); return MIX_RESULT_FRAME_NOTAVAIL; } @@ -541,7 +550,7 @@ retry: if (next_picnum_pending != (uint32)-1) { LOG_V("picture number has gap, jumping from %d to %d.\n", - fm->next_frame_picnumber, next_picnum_pending); + fm->next_frame_picnumber, next_picnum_pending); fm->next_frame_picnumber = next_picnum_pending; goto retry; @@ -549,7 +558,7 @@ retry: // picture number roll-over LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", - fm->next_frame_picnumber); + fm->next_frame_picnumber); fm->next_frame_picnumber = 0; goto retry; diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h index 684857d..ca49ef0 100644 --- a/mix_video/src/mixframemanager.h +++ b/mix_video/src/mixframemanager.h @@ -52,6 +52,7 @@ public: int max_enqueue_size; uint32 max_picture_number; uint32 ref_count; + int dpb_size; }; @@ -119,6 +120,8 @@ MIX_RESULT mix_framemanager_set_max_enqueue_size( MIX_RESULT mix_framemanager_set_max_picture_number( MixFrameManager *fm, uint32 num); +MIX_RESULT mix_framemanager_set_dpb_size( + MixFrameManager *fm, uint32 num); /* * Get Display Order Mode diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp index 0bbe19a..158c456 100644 --- a/mix_video/src/mixvideoformat_h264.cpp +++ b/mix_video/src/mixvideoformat_h264.cpp @@ -349,6 +349,7 @@ MIX_RESULT MixVideoFormat_H264::EndOfStream() { Unlock(); //Call Frame Manager with _eos() ret = mix_framemanager_eos(this->framemgr); + mix_framemanager_set_dpb_size(this->framemgr, -1); LOG_V( "End\n"); return ret; } @@ -484,10 +485,94 @@ MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { num_ref_pictures = data->codec_data->num_ref_frames; } + int pic_size; + int size = 3; + if (data) + { + pic_size = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * (data->codec_data->frame_mbs_only_flag?1:2) * 384; + + switch (data->codec_data->level_idc) + { + case 9: + size = 152064; + break; + case 10: + size = 152064; + break; + case 11: + size = 345600; + break; + case 12: + size = 912384; + break; + case 13: + size = 912384; + break; + case 20: + size = 912384; + break; + case 21: + size = 1824768; + break; + case 22: + size = 3110400; + break; + case 30: + size = 3110400; + break; + case 31: + size = 6912000; + break; + case 32: + size = 7864320; + break; + case 40: + size = 12582912; + break; + case 41: + size = 12582912; + break; + case 42: + size = 13369344; + break; + case 50: + size = 42393600; + break; + case 51: + size = 70778880; + break; + default: + //error ("undefined level", 500); + break; + } + + if (pic_size) + { + size /= pic_size; + if (size == 0) + { + size = 3; + } + else if (size > 16) + { + size = 15; + } + } + else + { + size = 3; + } + } + + mix_framemanager_set_dpb_size(this->framemgr, size); + //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that // will not start decoding until a new frame is received. - this->va_num_surfaces = 1 + 1 + this->extra_surfaces + - (((num_ref_pictures + 3) < MIX_VIDEO_H264_SURFACE_NUM) ? (num_ref_pictures + 3) : MIX_VIDEO_H264_SURFACE_NUM); + this->va_num_surfaces = 1 + 1 + this->extra_surfaces + (((size + 3) < + MIX_VIDEO_H264_SURFACE_NUM) ? + (size + 3) + : MIX_VIDEO_H264_SURFACE_NUM); + this->va_num_surfaces = this->va_num_surfaces > 24 ? 24 : this->va_num_surfaces; this->va_surfaces = reinterpret_cast(malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); -- cgit v1.2.3 From 3d2fc8eaa2c3fc97aa4b48802022806e18caad88 Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Thu, 4 Aug 2011 16:37:08 -0700 Subject: Logging mechanism of video middleware is not Android conformant, which makes it difficult to capture or filter messages based on need. Change tracing utilities to use Android's logging utilities. BZ 6741 Change-Id: I9509612cfc502fe9fdef8de38e2c6f6ffe848aca Signed-off-by: Andy Qiu --- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h | 10 ++++++++++ mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 25 +++++++++++++++++-------- videodecoder/VideoDecoderTrace.h | 21 ++++++++++++++------- 3 files changed, 41 insertions(+), 15 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h index 21894c9..ca92d17 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h @@ -17,9 +17,11 @@ typedef unsigned int size_t; #define LOG(...) #else + #include #include #include +#ifndef VBP enum { NONE = 0, CRITICAL, @@ -33,6 +35,7 @@ enum { #define LOG( log_lev, format, args ... ) \ if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ , ## args ); } #endif +#endif #include "viddec_fw_workload.h" #include "vc1parse_common_defs.h" @@ -42,10 +45,17 @@ enum { extern "C" { #endif +#ifndef VBP #define LOG_CRIT(format, args ... ) LOG( CRITICAL, format, ## args) #define LOG_WARN(format, args ... ) LOG( WARNING, format, ## args) #define LOG_INFO(format, args ... ) LOG( INFO, format, ## args) #define LOG_DEBUG(format, args ... ) LOG( DEBUG, format, ## args) +#else +#define LOG_CRIT(format, args ... ) +#define LOG_WARN(format, args ... ) +#define LOG_INFO(format, args ... ) +#define LOG_DEBUG(format, args ... ) +#endif // Seems to be hardware bug: DO NOT TRY TO SWAP BITPLANE0 and BITPLANE2 // Block Control Register at offset 222C uses Bitplane_raw_ID0 to indicate directmb/fieldtx while diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h index 253a85c..ba916b9 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -26,21 +26,30 @@ extern void vbp_trace_util(const char* cat, const char* fun, int line, const cha #define VBP_TRACE_UTIL(cat, format, ...) \ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) + +#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR: ", format, ##__VA_ARGS__) +#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ", format, ##__VA_ARGS__) +#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__) +#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__) + + #else +// For Android OS + +//#define LOG_NDEBUG 0 + +#define LOG_TAG "MixVBP" + #include -#define VBP_TRACE_UTIL(cat, format, ...) \ -__android_log_print(ANDROID_LOG_VERBOSE, "mixvbp : "cat, "%s() : %d: "format, \ -__FUNCTION__, __LINE__, ##__VA_ARGS__) +#define ETRACE(...) LOGE(__VA_ARGS__) +#define WTRACE(...) LOGW(__VA_ARGS__) +#define ITRACE(...) LOGI(__VA_ARGS__) +#define VTRACE(...) LOGV(__VA_ARGS__) #endif -#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR: ", format, ##__VA_ARGS__) -#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ", format, ##__VA_ARGS__) -#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__) -#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__) - #else /* if VBP_TRACE is not defined */ #define ETRACE(format, ...) diff --git a/videodecoder/VideoDecoderTrace.h b/videodecoder/VideoDecoderTrace.h index 809a9ad..8de40e0 100644 --- a/videodecoder/VideoDecoderTrace.h +++ b/videodecoder/VideoDecoderTrace.h @@ -42,20 +42,27 @@ extern void TraceVideoDecoder(const char* cat, const char* fun, int line, const #define VIDEO_DECODER_TRACE(cat, format, ...) \ TraceVideoDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) +#define ETRACE(format, ...) VIDEO_DECODER_TRACE("ERROR: ", format, ##__VA_ARGS__) +#define WTRACE(format, ...) VIDEO_DECODER_TRACE("WARNING: ", format, ##__VA_ARGS__) +#define ITRACE(format, ...) VIDEO_DECODER_TRACE("INFO: ", format, ##__VA_ARGS__) +#define VTRACE(format, ...) VIDEO_DECODER_TRACE("VERBOSE: ", format, ##__VA_ARGS__) + #else +// for Android OS + +//#define LOG_NDEBUG 0 + +#define LOG_TAG "VideoDecoder" #include -#define VIDEO_DECODER_TRACE(cat, format, ...) \ -__android_log_print(ANDROID_LOG_VERBOSE, "VideoDecoder : "cat,format, ##__VA_ARGS__) +#define ETRACE(...) LOGE(__VA_ARGS__) +#define WTRACE(...) LOGW(__VA_ARGS__) +#define ITRACE(...) LOGI(__VA_ARGS__) +#define VTRACE(...) LOGV(__VA_ARGS__) #endif -#define ETRACE(format, ...) VIDEO_DECODER_TRACE("ERROR: ", format, ##__VA_ARGS__) -#define WTRACE(format, ...) VIDEO_DECODER_TRACE("WARNING: ", format, ##__VA_ARGS__) -#define ITRACE(format, ...) VIDEO_DECODER_TRACE("INFO: ", format, ##__VA_ARGS__) -#define VTRACE(format, ...) VIDEO_DECODER_TRACE("VERBOSE: ", format, ##__VA_ARGS__) - #else #define ETRACE(format, ...) -- cgit v1.2.3 From f950b5880136acf1c35979d4648f8a03dce87724 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 8 Aug 2011 22:28:09 +0800 Subject: libmix: limit the surface number for 1080p, considering the storage limitation BZ: 6825 Considering the storage limitation in landscape mode, limit the surface number for 1080p not larger than 19. Change-Id: Ic779ee160dea3c48daa11ddb0cec89645e441f7f Signed-off-by: ywan171 --- videodecoder/VideoDecoderAVC.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 98f54ad..4de8936 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -584,6 +584,10 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { vaProfile = VAProfileH264ConstrainedBaseline; } } + // for 1080p, limit the total surface to 19, according the hardware limitation + if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) { + DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER; + } VideoDecoderBase::setOutputWindowSize(DPBSize); return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile); } -- cgit v1.2.3 From 1f177a56d37a876429416791d675e2d78297f122 Mon Sep 17 00:00:00 2001 From: xli111 Date: Tue, 16 Aug 2011 17:01:19 +0800 Subject: libmix: ignore mSizeChanged flag in flush function to avoid resolution changing issue. BZ: 5196 The clip's resolution in container and in SPS are different, a DECODE_FORMAT_CHANGE should be returned from decoder, but the flush() will damage this mechanism through modifing the mSizeChanged flag. So ignore this flag in flush(). Change-Id: Ia86c2631cb43165a5a52c5c15b0050a30ede205a Signed-off-by: xli111 --- videodecoder/VideoDecoderBase.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 912c347..1fc8835 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -157,7 +157,6 @@ void VideoDecoderBase::flush(void) { mOutputHead = NULL; mOutputTail = NULL; mDecodingFrame = false; - mSizeChanged = false; // initialize surface buffer without resetting mapped/raw data initSurfaceBuffer(false); -- cgit v1.2.3 From ef285e74b27df3f7268e815bf0b5ad69251b56d7 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Tue, 13 Sep 2011 17:24:59 +0800 Subject: [PORT FROM R1][libmix] new encoder library BZ: 5809 New encoder common library Change-Id: Ia3b61d5c0ffb6edfb32cb7be7daa42df6279959c Orig-Change-Id: I1c402c8d4b468011d892538f229d28f6f5abdcb0 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/18317 Tested-by: Sang, Shuduo Reviewed-by: Monnier, OlivierX Reviewed-by: buildbot Tested-by: buildbot --- Android.mk | 1 + 1 file changed, 1 insertion(+) diff --git a/Android.mk b/Android.mk index 1a56908..baf7b48 100644 --- a/Android.mk +++ b/Android.mk @@ -9,3 +9,4 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_video/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk -- cgit v1.2.3 From 685c8a5052a74ea2515f33fbe83b4ca6ab3fbeb1 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Tue, 13 Sep 2011 17:26:07 +0800 Subject: [PORT FROM R1][libmix] new encoder library BZ: 5809 New encoder common library Change-Id: I33a1896ac3efaf8697d9ee9e32b02932d9c2c896 Orig-Change-Id: I1c402c8d4b468011d892538f229d28f6f5abdcb0 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/18318 Tested-by: Sang, Shuduo Reviewed-by: Monnier, OlivierX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/Android.mk | 43 + videoencoder/VideoEncoderAVC.cpp | 890 ++++++++++++++++++ videoencoder/VideoEncoderAVC.h | 55 ++ videoencoder/VideoEncoderBase.cpp | 1694 ++++++++++++++++++++++++++++++++++ videoencoder/VideoEncoderBase.h | 144 +++ videoencoder/VideoEncoderDef.h | 435 +++++++++ videoencoder/VideoEncoderH263.cpp | 162 ++++ videoencoder/VideoEncoderH263.h | 45 + videoencoder/VideoEncoderHost.cpp | 43 + videoencoder/VideoEncoderHost.h | 17 + videoencoder/VideoEncoderInterface.h | 29 + videoencoder/VideoEncoderLog.h | 68 ++ videoencoder/VideoEncoderMP4.cpp | 311 +++++++ videoencoder/VideoEncoderMP4.h | 51 + 14 files changed, 3987 insertions(+) create mode 100644 videoencoder/Android.mk create mode 100644 videoencoder/VideoEncoderAVC.cpp create mode 100644 videoencoder/VideoEncoderAVC.h create mode 100644 videoencoder/VideoEncoderBase.cpp create mode 100644 videoencoder/VideoEncoderBase.h create mode 100644 videoencoder/VideoEncoderDef.h create mode 100644 videoencoder/VideoEncoderH263.cpp create mode 100644 videoencoder/VideoEncoderH263.h create mode 100644 videoencoder/VideoEncoderHost.cpp create mode 100644 videoencoder/VideoEncoderHost.h create mode 100644 videoencoder/VideoEncoderInterface.h create mode 100644 videoencoder/VideoEncoderLog.h create mode 100644 videoencoder/VideoEncoderMP4.cpp create mode 100644 videoencoder/VideoEncoderMP4.h diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk new file mode 100644 index 0000000..9ee2e42 --- /dev/null +++ b/videoencoder/Android.mk @@ -0,0 +1,43 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +VIDEO_ENC_LOG_ENABLE := false + +LOCAL_SRC_FILES := \ + VideoEncoderBase.cpp \ + VideoEncoderAVC.cpp \ + VideoEncoderH263.cpp \ + VideoEncoderMP4.cpp \ + VideoEncoderHost.cpp + +# LOCAL_CFLAGS := + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libva \ + +#LOCAL_LDLIBS += -lpthread + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libva \ + libva-android \ + libva-tpi + +#LOCAL_CFLAGS += -DANDROID + +LOCAL_COPY_HEADERS_TO := libmix_videoencoder + +LOCAL_COPY_HEADERS := \ + VideoEncoderHost.h \ + VideoEncoderInterface.h \ + VideoEncoderDef.h + +ifeq ($(VIDEO_ENC_LOG_ENABLE),true) +LOCAL_CPPFLAGS += -DVIDEO_ENC_LOG_ENABLE +endif + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libva_videoencoder + +include $(BUILD_SHARED_LIBRARY) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp new file mode 100644 index 0000000..93951c6 --- /dev/null +++ b/videoencoder/VideoEncoderAVC.cpp @@ -0,0 +1,890 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include +#include +#include "VideoEncoderLog.h" +#include "VideoEncoderAVC.h" +#include + +VideoEncoderAVC::VideoEncoderAVC() + :VideoEncoderBase() { + mVideoParamsAVC.basicUnitSize = 0; + mVideoParamsAVC.VUIFlag = 0; + mVideoParamsAVC.sliceNum.iSliceNum = 2; + mVideoParamsAVC.sliceNum.pSliceNum = 2; + mVideoParamsAVC.idrInterval = 2; + mVideoParamsAVC.maxSliceSize = 0; + mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB; + mSliceNum = 2; +} + +Encode_Status VideoEncoderAVC::start() { + + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n"); + + if (mComParams.rcMode == VA_RC_VCM) { + // If we are in VCM, we will set slice num to max value + mVideoParamsAVC.sliceNum.iSliceNum = (mComParams.resolution.height + 15) / 16; + mVideoParamsAVC.sliceNum.pSliceNum = mVideoParamsAVC.sliceNum.iSliceNum; + } + + ret = VideoEncoderBase::start (); + CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start"); + + LOG_V( "end\n"); + return ret; +} + +Encode_Status VideoEncoderAVC::derivedSetParams(VideoParamConfigSet *videoEncParams) { + + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + VideoParamsAVC *encParamsAVC = reinterpret_cast (videoEncParams); + + // AVC parames + if (encParamsAVC->size != sizeof (VideoParamsAVC)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoParamsAVC = *encParamsAVC; + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC:: derivedGetParams(VideoParamConfigSet *videoEncParams) { + + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + VideoParamsAVC *encParamsAVC = reinterpret_cast (videoEncParams); + + // AVC parames + if (encParamsAVC->size != sizeof (VideoParamsAVC)) { + return ENCODE_INVALID_PARAMS; + } + + *encParamsAVC = mVideoParamsAVC; + return ENCODE_SUCCESS; + +} + +Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncConfig) { + + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); + LOG_I("Config type = %d\n", (int)videoEncConfig->type); + + switch (videoEncConfig->type) { + case VideoConfigTypeAVCIntraPeriod: { + + VideoConfigAVCIntraPeriod *configAVCIntraPeriod = + reinterpret_cast (videoEncConfig); + // Config Intra Peroid + if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval; + mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod; + mNewHeader = true; + break; + } + case VideoConfigTypeNALSize: { + // Config MTU + VideoConfigNALSize *configNALSize = + reinterpret_cast (videoEncConfig); + if (configNALSize->size != sizeof (VideoConfigNALSize)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoParamsAVC.maxSliceSize = configNALSize->maxSliceSize; + mRenderMaxSliceSize = true; + break; + } + case VideoConfigTypeIDRRequest: { + + mNewHeader = true; + break; + } + case VideoConfigTypeSliceNum: { + + VideoConfigSliceNum *configSliceNum = + reinterpret_cast (videoEncConfig); + // Config Slice size + if (configSliceNum->size != sizeof (VideoConfigSliceNum)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoParamsAVC.sliceNum = configSliceNum->sliceNum; + break; + } + default: { + LOG_E ("Invalid Config Type"); + break; + } + } + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC:: derivedGetConfig( + VideoParamConfigSet *videoEncConfig) { + + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); + LOG_I("Config type = %d\n", (int)videoEncConfig->type); + + switch (videoEncConfig->type) { + + case VideoConfigTypeAVCIntraPeriod: { + + VideoConfigAVCIntraPeriod *configAVCIntraPeriod = + reinterpret_cast (videoEncConfig); + if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) { + return ENCODE_INVALID_PARAMS; + } + + configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval; + configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod; + + break; + } + case VideoConfigTypeNALSize: { + + VideoConfigNALSize *configNALSize = + reinterpret_cast (videoEncConfig); + if (configNALSize->size != sizeof (VideoConfigNALSize)) { + return ENCODE_INVALID_PARAMS; + } + + configNALSize->maxSliceSize = mVideoParamsAVC.maxSliceSize; + break; + } + case VideoConfigTypeIDRRequest: { + break; + + } + case VideoConfigTypeSliceNum: { + + VideoConfigSliceNum *configSliceNum = + reinterpret_cast (videoEncConfig); + if (configSliceNum->size != sizeof (VideoConfigSliceNum)) { + return ENCODE_INVALID_PARAMS; + } + + configSliceNum->sliceNum = mVideoParamsAVC.sliceNum; + break; + } + default: { + LOG_E ("Invalid Config Type"); + break; + } + } + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + bool useLocalBuffer = false; + uint32_t nalType = 0; + uint32_t nalSize = 0; + uint32_t nalOffset = 0; + uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; + + LOG_V("Begin\n"); + CHECK_NULL_RETURN_IFFAIL(outBuffer); + + if (mFrameNum > 2) { + if (idrPeroid != 0 && (((mFrameNum - 2) % idrPeroid) == 0)) { + mKeyFrame = true; + } else { + mKeyFrame = false; + } + } + + // prepare for output, map the coded buffer + ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); + CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); + + switch (outBuffer->format) { + case OUTPUT_EVERYTHING: + case OUTPUT_FRAME_DATA: { + // Output whatever we have + ret = VideoEncoderBase::outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + break; + } + case OUTPUT_CODEC_DATA: { + // Output the codec data + ret = outputCodecData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputCodecData"); + break; + } + + case OUTPUT_ONE_NAL: { + // Output only one NAL unit + ret = outputOneNALU(outBuffer, true); + CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU"); + break; + } + + case OUTPUT_ONE_NAL_WITHOUT_STARTCODE: { + ret = outputOneNALU(outBuffer, false); + CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU"); + break; + } + + case OUTPUT_LENGTH_PREFIXED: { + // Output length prefixed + ret = outputLengthPrefixed(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputLengthPrefixed"); + break; + } + + default: + LOG_E("Invalid buffer mode\n"); + ret = ENCODE_FAIL; + break; + } + + LOG_I("out size is = %d\n", outBuffer->dataSize); + + // cleanup, unmap the coded buffer if all + // data has been copied out + ret = VideoEncoderBase::cleanupForOutput(); + +CLEAN_UP: + + if (ret < ENCODE_SUCCESS) { + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } + + // error happens, unmap the buffer + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; + } + } + LOG_V("End\n"); + return ret; +} + +Encode_Status VideoEncoderAVC::getOneNALUnit( + uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, + uint32_t *nalType, uint32_t *nalOffset) { + uint32_t pos = 0; + uint32_t zeroByteCount = 0; + uint32_t prefixLength = 0; + uint32_t leadingZeroCnt = 0; + + // Don't need to check parameters here as we just checked by caller + while ((inBuffer[pos++] == 0x00)) { + zeroByteCount ++; + if (pos >= bufSize) //to make sure the buffer to be accessed is valid + break; + } + + if (inBuffer[pos - 1] != 0x01 || zeroByteCount < 2) { + LOG_E("The stream is not AnnexB format \n"); + return ENCODE_FAIL; //not AnnexB, we won't process it + } + + *nalType = (*(inBuffer + pos)) & 0x1F; + LOG_I ("NAL type = 0x%x\n", *nalType); + + zeroByteCount = 0; + *nalOffset = pos; + + while (pos < bufSize) { + + while (inBuffer[pos++] == 0) { + zeroByteCount ++; + if (pos >= bufSize) //to make sure the buffer to be accessed is valid + break; + } + + if (inBuffer[pos - 1] == 0x01 && zeroByteCount >= 2) { + if (zeroByteCount == 2) { + prefixLength = 3; + } else { + prefixLength = 4; + leadingZeroCnt = zeroByteCount - 3; + } + + LOG_V("leading_zero_count = %d\n", leadingZeroCnt); + *nalSize = pos - *nalOffset - prefixLength - leadingZeroCnt; + break; + } else if (pos == bufSize) { + LOG_V ("The last NALU\n"); + *nalSize = pos - *nalOffset; + } else { + zeroByteCount = 0; + leadingZeroCnt = 0; + } + } + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::getHeader( + uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) { + + uint32_t nalType = 0; + uint32_t nalSize = 0; + uint32_t nalOffset = 0; + uint32_t size = 0; + uint8_t *buf = inBuffer; + Encode_Status ret = ENCODE_SUCCESS; + + *headerSize = 0; + CHECK_NULL_RETURN_IFFAIL(inBuffer); + + if (bufSize == 0) { + //bufSize shoule not be 0, error happens + LOG_E("Buffer size is 0\n"); + return ENCODE_FAIL; + } + + while (1) { + nalType = nalSize = nalOffset = 0; + ret = getOneNALUnit(buf, bufSize, &nalSize, &nalType, &nalOffset); + CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); + + LOG_I("NAL type = %d, NAL size = %d, offset = %d\n", nalType, nalSize, nalOffset); + size = nalSize + nalOffset; + + // Codec_data should be SPS or PPS + if (nalType == 7 || nalType == 8) { + *headerSize += size; + buf += size; + bufSize -= size; + } else { + LOG_V("No header found or no header anymore\n"); + break; + } + } + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::outputCodecData( + VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + uint32_t headerSize = 0; + + ret = getHeader((uint8_t *)mCurSegment->buf + mOffsetInSeg, + mCurSegment->size - mOffsetInSeg, &headerSize); + CHECK_ENCODE_STATUS_RETURN("getHeader"); + if (headerSize == 0) { + outBuffer->dataSize = 0; + mCurSegment = NULL; + return ENCODE_NO_REQUEST_DATA; + } + + if (headerSize <= outBuffer->bufferSize) { + memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize); + mTotalSizeCopied += headerSize; + mOffsetInSeg += headerSize; + outBuffer->dataSize = headerSize; + outBuffer->remainingSize = 0; + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG; + outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + } else { + // we need a big enough buffer, otherwise we won't output anything + outBuffer->dataSize = 0; + outBuffer->remainingSize = headerSize; + outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID; + LOG_E("Buffer size too small\n"); + return ENCODE_BUFFER_TOO_SMALL; + } + + return ret; +} + +Encode_Status VideoEncoderAVC::outputOneNALU( + VideoEncOutputBuffer *outBuffer, bool startCode) { + + uint32_t nalType = 0; + uint32_t nalSize = 0; + uint32_t nalOffset = 0; + uint32_t sizeToBeCopied = 0; + + Encode_Status ret = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); + + ret = getOneNALUnit((uint8_t *)mCurSegment->buf + mOffsetInSeg, + mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset); + CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); + + // check if we need startcode along with the payload + if (startCode) { + sizeToBeCopied = nalSize + nalOffset; + } else { + sizeToBeCopied = nalSize; + } + + if (sizeToBeCopied <= outBuffer->bufferSize) { + if (startCode) { + memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied); + } else { + memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset, + sizeToBeCopied); + } + mTotalSizeCopied += sizeToBeCopied; + mOffsetInSeg += (nalSize + nalOffset); + outBuffer->dataSize = sizeToBeCopied; + outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + outBuffer->remainingSize = 0; + } else { + // if nothing to be copied out, set flag to invalid + outBuffer->dataSize = 0; + outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID; + outBuffer->remainingSize = sizeToBeCopied; + LOG_W("Buffer size too small\n"); + return ENCODE_BUFFER_TOO_SMALL; + } + + // check if all data in current segment has been copied out + if (mCurSegment->size == mOffsetInSeg) { + if (mCurSegment->next != NULL) { + mCurSegment = (VACodedBufferSegment *)mCurSegment->next; + mOffsetInSeg = 0; + } else { + LOG_V("End of stream\n"); + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + mCurSegment = NULL; + } + } + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + uint32_t nalType = 0; + uint32_t nalSize = 0; + uint32_t nalOffset = 0; + uint32_t sizeCopiedHere = 0; + uint32_t sizeToBeCopied = 0; + + CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); + + while (1) { + + if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) { + LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n"); + return ENCODE_FAIL; + } + + // we need to handle the whole bitstream NAL by NAL + ret = getOneNALUnit( + (uint8_t *)mCurSegment->buf + mOffsetInSeg, + mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset); + CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); + + if (nalSize + 4 <= outBuffer->bufferSize - sizeCopiedHere) { + // write the NAL length to bit stream + outBuffer->data[sizeCopiedHere] = (nalSize >> 24) & 0xff; + outBuffer->data[sizeCopiedHere + 1] = (nalSize >> 16) & 0xff; + outBuffer->data[sizeCopiedHere + 2] = (nalSize >> 8) & 0xff; + outBuffer->data[sizeCopiedHere + 3] = nalSize & 0xff; + + sizeCopiedHere += 4; + mTotalSizeCopied += 4; + + memcpy(outBuffer->data + sizeCopiedHere, + (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset, sizeToBeCopied); + + sizeCopiedHere += nalSize; + mTotalSizeCopied += nalSize; + mOffsetInSeg += (nalSize + nalOffset); + + } else { + outBuffer->dataSize = sizeCopiedHere; + // In case the start code is 3-byte length but we use 4-byte for length prefixed + // so the remainingSize size may larger than the remaining data size + outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100; + outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + LOG_E("Buffer size too small\n"); + return ENCODE_BUFFER_TOO_SMALL; + } + + // check if all data in current segment has been copied out + if (mCurSegment->size == mOffsetInSeg) { + if (mCurSegment->next != NULL) { + mCurSegment = (VACodedBufferSegment *)mCurSegment->next; + mOffsetInSeg = 0; + } else { + LOG_V("End of stream\n"); + outBuffer->dataSize = sizeCopiedHere; + outBuffer->remainingSize = 0; + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + mCurSegment = NULL; + break; + } + } + } + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { + Encode_Status ret = ENCODE_SUCCESS; + + LOG_V( "Begin\n"); + + if (mFrameNum == 0 || mNewHeader) { + ret = renderSequenceParams(); + CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); + mNewHeader = false; //Set to require new header filed to false + } + + if (mRenderMaxSliceSize && mVideoParamsAVC.maxSliceSize != 0) { + ret = renderMaxSliceSize(); + CHECK_ENCODE_STATUS_RETURN("renderMaxSliceSize"); + mRenderMaxSliceSize = false; + } + + if (mRenderBitRate) { + ret = VideoEncoderBase::renderDynamicBitrate(); + CHECK_ENCODE_STATUS_RETURN("renderDynamicBitrate"); + + mRenderBitRate = false; + } + + if (mRenderAIR && + (mComParams.refreshType == VIDEO_ENC_AIR || + mComParams.refreshType == VIDEO_ENC_BOTH)) { + + ret = renderAIR(); + CHECK_ENCODE_STATUS_RETURN("renderAIR"); + + mRenderAIR = false; + } + + if (mRenderFrameRate) { + + ret = VideoEncoderBase::renderDynamicFrameRate(); + CHECK_ENCODE_STATUS_RETURN("renderDynamicFrameRate"); + + mRenderFrameRate = false; + } + + ret = renderPictureParams(); + CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + + ret = renderSliceParams(); + CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); + + LOG_V( "End\n"); + return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderAVC::renderMaxSliceSize() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n\n"); + + if (mComParams.rcMode != RATE_CONTROL_VCM) { + LOG_W ("Not in VCM mode, but call send_max_slice_size\n"); + return ENCODE_SUCCESS; + } + + VAEncMiscParameterBuffer *miscEncParamBuf; + VAEncMiscParameterMaxSliceSize *maxSliceSizeParam; + VABufferID miscParamBufferID; + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize), + 1, NULL, &miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + miscEncParamBuf->type = VAEncMiscParameterTypeMaxSliceSize; + maxSliceSizeParam = (VAEncMiscParameterMaxSliceSize *)miscEncParamBuf->data; + + maxSliceSizeParam->max_slice_size = mVideoParamsAVC.maxSliceSize; + + vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + LOG_I( "max slice size = %d\n", maxSliceSizeParam->max_slice_size); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::renderAIR() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + LOG_V( "Begin\n\n"); + + if (mComParams.rcMode != RATE_CONTROL_VCM) { + + LOG_W("Not in VCM mode, but call send_AIR\n"); + return ENCODE_SUCCESS; + } + + VAEncMiscParameterBuffer *miscEncParamBuf; + VAEncMiscParameterAIR *airParams; + VABufferID miscParamBufferID; + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterAIR), + 1, NULL, &miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + miscEncParamBuf->type = VAEncMiscParameterTypeAIR; + airParams = (VAEncMiscParameterAIR *)miscEncParamBuf->data; + + airParams->air_num_mbs = mComParams.airParams.airMBs; + airParams->air_threshold= mComParams.airParams.airThreshold; + airParams->air_auto = mComParams.airParams.airAuto; + + vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_I( "airThreshold = %d\n", airParams->air_threshold); + return ENCODE_SUCCESS; +} + +int VideoEncoderAVC::calcLevel(int numMbs) { + int level = 30; + + if (numMbs < 3600) { + level = 30; + } else if (numMbs < 5120) { + level = 31; + } else if (numMbs < 8192) { + level = 32; + } else if (numMbs < 8704) { + level = 40; + } else if (numMbs < 22080) { + level = 42; + } else if (numMbs < 36864) { + level = 50; + } else { + level = 51; + } + return level; +} + +Encode_Status VideoEncoderAVC::renderSequenceParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncSequenceParameterBufferH264 avcSeqParams; + int level; + uint32_t frameRateNum = mComParams.frameRate.frameRateNum; + uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; + + LOG_V( "Begin\n\n"); + + // set up the sequence params for HW + // avcSeqParams.level_idc = mLevel; + avcSeqParams.intra_period = mComParams.intraPeriod; + avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; + avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; + avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; + + level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs); + avcSeqParams.level_idc = level; + avcSeqParams.bits_per_second = mComParams.rcParams.bitRate; + avcSeqParams.frame_rate = + (unsigned int) (frameRateNum + frameRateDenom /2 ) / frameRateDenom; + avcSeqParams.initial_qp = mComParams.rcParams.initQP; + avcSeqParams.min_qp = mComParams.rcParams.minQP; + avcSeqParams.basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage + avcSeqParams.intra_period = mComParams.intraPeriod; + //avcSeqParams.vui_flag = 248; + avcSeqParams.seq_parameter_set_id = 8; + + // This is a temporary fix suggested by Binglin for bad encoding quality issue + avcSeqParams.max_num_ref_frames = 1; // TODO: We need a long term design for this field + + LOG_V("===h264 sequence params===\n"); + LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id); + LOG_I( "level_idc = %d\n", (uint32_t)avcSeqParams.level_idc); + LOG_I( "intra_period = %d\n", avcSeqParams.intra_period); + LOG_I( "idr_interval = %d\n", avcSeqParams.intra_idr_period); + LOG_I( "picture_width_in_mbs = %d\n", avcSeqParams.picture_width_in_mbs); + LOG_I( "picture_height_in_mbs = %d\n", avcSeqParams.picture_height_in_mbs); + LOG_I( "bitrate = %d\n", avcSeqParams.bits_per_second); + LOG_I( "frame_rate = %d\n", avcSeqParams.frame_rate); + LOG_I( "initial_qp = %d\n", avcSeqParams.initial_qp); + LOG_I( "min_qp = %d\n", avcSeqParams.min_qp); + LOG_I( "basic_unit_size = %d\n", avcSeqParams.basic_unit_size); + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSequenceParameterBufferType, + sizeof(avcSeqParams), 1, &avcSeqParams, + &mSeqParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderAVC::renderPictureParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncPictureParameterBufferH264 avcPicParams; + + LOG_V( "Begin\n\n"); + // set picture params for HW + avcPicParams.reference_picture = mRefFrame->surface; + avcPicParams.reconstructed_picture = mRecFrame->surface; + avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; + avcPicParams.picture_width = mComParams.resolution.width; + avcPicParams.picture_height = mComParams.resolution.height; + avcPicParams.last_picture = 0; + + LOG_V("======h264 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", avcPicParams.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.reconstructed_picture); + LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); + LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf); + LOG_I( "picture_width = %d\n", avcPicParams.picture_width); + LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height); + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncPictureParameterBufferType, + sizeof(avcPicParams), + 1,&avcPicParams, + &mPicParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V( "end\n"); + return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderAVC::renderSliceParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + + uint32_t sliceNum = 0; + uint32_t sliceHeight = 0; + uint32_t sliceIndex = 0; + uint32_t sliceHeightInMB = 0; + uint32_t maxSliceNum = 0; + uint32_t minSliceNum = 0; + int actualSliceHeightInMB = 0; + int startRowInMB = 0; + uint32_t modulus = 0; + + LOG_V( "Begin\n\n"); + + maxSliceNum = (mComParams.resolution.height + 15) / 16; + minSliceNum = 1; + + if (mIsIntra) { + sliceNum = mVideoParamsAVC.sliceNum.iSliceNum; + } else { + sliceNum = mVideoParamsAVC.sliceNum.pSliceNum; + } + + if (sliceNum < minSliceNum) { + LOG_W("Slice Number is too small"); + sliceNum = minSliceNum; + } + + if (sliceNum > maxSliceNum) { + LOG_W("Slice Number is too big"); + sliceNum = maxSliceNum; + } + + mSliceNum= sliceNum; + modulus = maxSliceNum % sliceNum; + sliceHeightInMB = (maxSliceNum - modulus) / sliceNum ; + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + sliceNum, NULL, + &mSliceParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + VAEncSliceParameterBuffer *sliceParams, *currentSlice; + vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + currentSlice = sliceParams; + startRowInMB = 0; + for (sliceIndex = 0; sliceIndex < sliceNum; sliceIndex++) { + currentSlice = sliceParams + sliceIndex; + actualSliceHeightInMB = sliceHeightInMB; + if (sliceIndex < modulus) { + actualSliceHeightInMB ++; + } + + // starting MB row number for this slice + currentSlice->start_row_number = startRowInMB; + // slice height measured in MB + currentSlice->slice_height = actualSliceHeightInMB; + currentSlice->slice_flags.bits.is_intra = mIsIntra; + currentSlice->slice_flags.bits.disable_deblocking_filter_idc + = mComParams.disableDeblocking; + + // This is a temporary fix suggested by Binglin for bad encoding quality issue + // TODO: We need a long term design for this field + currentSlice->slice_flags.bits.uses_long_term_ref = 0; + currentSlice->slice_flags.bits.is_long_term_ref = 0; + + LOG_V("======AVC slice params======\n"); + LOG_I( "slice_index = %d\n", (int) sliceIndex); + LOG_I( "start_row_number = %d\n", (int) currentSlice->start_row_number); + LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->slice_height); + LOG_I( "slice.is_intra = %d\n", (int) currentSlice->slice_flags.bits.is_intra); + LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->slice_flags.bits.disable_deblocking_filter_idc); + + startRowInMB += actualSliceHeightInMB; + } + + vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + LOG_V( "end\n"); + return ENCODE_SUCCESS; +} diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h new file mode 100644 index 0000000..c86b0b4 --- /dev/null +++ b/videoencoder/VideoEncoderAVC.h @@ -0,0 +1,55 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __VIDEO_ENCODER_AVC_H__ +#define __VIDEO_ENCODER_AVC_H__ + +#include "VideoEncoderBase.h" + +class VideoEncoderAVC : public VideoEncoderBase { + +public: + VideoEncoderAVC(); + ~VideoEncoderAVC() {}; + + virtual Encode_Status start(); + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); + + virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams); + virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams); + virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig); + virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig); + +protected: + + virtual Encode_Status sendEncodeCommand(void); + +private: + // Local Methods + + Encode_Status getOneNALUnit(uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, uint32_t *nalType, uint32_t *nalOffset); + Encode_Status getHeader(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize); + Encode_Status outputCodecData(VideoEncOutputBuffer *outBuffer); + Encode_Status outputOneNALU(VideoEncOutputBuffer *outBuffer, bool startCode); + Encode_Status outputLengthPrefixed(VideoEncOutputBuffer *outBuffer); + + Encode_Status renderMaxSliceSize(); + Encode_Status renderAIR(); + Encode_Status renderSequenceParams(); + Encode_Status renderPictureParams(); + Encode_Status renderSliceParams(); + int calcLevel(int numMbs); + +public: + + VideoParamsAVC mVideoParamsAVC; + uint32_t mSliceNum; + +}; + +#endif /* __VIDEO_ENCODER_AVC_H__ */ diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp new file mode 100644 index 0000000..4c6fe62 --- /dev/null +++ b/videoencoder/VideoEncoderBase.cpp @@ -0,0 +1,1694 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ +#include +#include "VideoEncoderLog.h" +#include "VideoEncoderBase.h" +#include +#include + +VideoEncoderBase::VideoEncoderBase() + :mInitialized(false) + ,mVADisplay(NULL) + ,mVAContext(0) + ,mVAConfig(0) + ,mVAEntrypoint(VAEntrypointEncSlice) + ,mCurSegment(NULL) + ,mOffsetInSeg(0) + ,mTotalSize(0) + ,mTotalSizeCopied(0) + ,mBufferMode(BUFFER_SHARING_NONE) + ,mUpstreamBufferList(NULL) + ,mUpstreamBufferCnt(0) + ,mForceKeyFrame(false) + ,mNewHeader(false) + ,mFirstFrame (true) + ,mRenderMaxSliceSize(false) + ,mRenderQP (false) + ,mRenderAIR(false) + ,mRenderFrameRate(false) + ,mRenderBitRate(false) + ,mLastCodedBuffer(0) + ,mOutCodedBuffer(0) + ,mSeqParamBuf(0) + ,mPicParamBuf(0) + ,mSliceParamBuf(0) + ,mSharedSurfaces(NULL) + ,mSurfaces(NULL) + ,mSurfaceCnt(0) + ,mSharedSurfacesCnt(0) + ,mReqSurfacesCnt(0) + ,mUsrPtr(NULL) + ,mVideoSrcBufferList(NULL) + ,mCurFrame(NULL) + ,mRefFrame(NULL) + ,mRecFrame(NULL) + ,mLastFrame(NULL) + ,mLastInputRawBuffer(NULL) + ,mEncodedFrames(0) + ,mFrameNum(0) + ,mCodedBufSize(0) + ,mCodedBufIndex(0) + ,mPicSkipped(false) + ,mIsIntra(true) + ,mSliceSizeOverflow(false) + ,mCodedBufferMapped(false) + ,mDataCopiedOut(false) + ,mKeyFrame(true) { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + // here the display can be any value, use following one + // just for consistence purpose, so don't define it + unsigned int display = 0x18C34078; + int majorVersion = -1; + int minorVersion = -1; + + setDefaultParams(); + mVACodedBuffer [0] = 0; + mVACodedBuffer [1] = 0; + + LOG_V("vaGetDisplay \n"); + mVADisplay = vaGetDisplay(&display); + if (mVADisplay == NULL) { + LOG_E("vaGetDisplay failed."); + } + + vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); + LOG_V("vaInitialize \n"); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus); + } +} + +VideoEncoderBase::~VideoEncoderBase() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + vaStatus = vaTerminate(mVADisplay); + LOG_V( "vaTerminate\n"); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaTerminate, vaStatus = %d\n", vaStatus); + } else { + mVADisplay = NULL; + } +} + +Encode_Status VideoEncoderBase::start() { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VASurfaceID *surfaces = NULL; + + VAConfigAttrib vaAttrib[2]; + uint32_t index; + uint32_t maxSize = 0; + + VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; + uint32_t normalSurfacesCnt = 2; + + if (mInitialized) { + LOG_V("Encoder has been started\n"); + return ENCODE_ALREADY_INIT; + } + + // For upstream allocates buffer, it is mandatory to set buffer mode + // and for other stuff, it is optional + // Different buffer mode will have different surface handling approach + + // mSharedSurfacesCnt is for upstream buffer allocation case + mSharedSurfacesCnt = 0; + + vaAttrib[0].type = VAConfigAttribRTFormat; + vaAttrib[1].type = VAConfigAttribRateControl; + vaAttrib[0].value = VA_RT_FORMAT_YUV420; + vaAttrib[1].value = mComParams.rcMode; + + LOG_V( "======VA Configuration======\n"); + + LOG_I( "profile = %d\n", mComParams.profile); + LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint); + LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type); + LOG_I( "vaAttrib[1].type = %d\n", vaAttrib[1].type); + LOG_I( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value); + LOG_I( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value); + + LOG_V( "vaCreateConfig\n"); + + vaStatus = vaCreateConfig( + mVADisplay, mComParams.profile, mVAEntrypoint, + &vaAttrib[0], 2, &(mVAConfig)); + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateConfig"); + + if (mComParams.rcMode == VA_RC_VCM) { + + // Following three features are only enabled in VCM mode + mRenderMaxSliceSize = true; + mRenderAIR = true; + mRenderBitRate = true; + } + + LOG_I("mReqSurfacesCnt = %d\n", mReqSurfacesCnt); + + if (mReqSurfacesCnt == 0) { + switch (mBufferMode) { + case BUFFER_SHARING_CI: { + mSharedSurfacesCnt = mUpstreamBufferCnt; + normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE; + + if (mSharedSurfacesCnt != 0) { + mSharedSurfaces = new VASurfaceID[mSharedSurfacesCnt]; + + if (mSharedSurfaces == NULL) { + LOG_E("Failed allocate shared surface\n"); + ret = ENCODE_NO_MEMORY; + goto CLEAN_UP; + } + } + } + break; + case BUFFER_SHARING_V4L2: + case BUFFER_SHARING_SURFACE: + // To be develped + break; + default: + mBufferMode = BUFFER_SHARING_NONE; + normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE; + break; + } + } else if (mReqSurfacesCnt == 1) { + // TODO: Un-normal case, + mBufferMode = BUFFER_SHARING_NONE; + normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE; + } else { + mBufferMode = BUFFER_SHARING_USRPTR; + mUsrPtr = new uint8_t *[mReqSurfacesCnt]; + if (mUsrPtr == NULL) { + LOG_E("Failed allocate memory\n"); + ret = ENCODE_NO_MEMORY; + goto CLEAN_UP; + } + } + + LOG_I("mBufferMode = %d\n", mBufferMode); + + mSurfaceCnt = normalSurfacesCnt + mSharedSurfacesCnt + mReqSurfacesCnt; + + surfaces = new VASurfaceID[normalSurfacesCnt]; + if (surfaces == NULL) { + LOG_E("Failed allocate surface\n"); + ret = ENCODE_NO_MEMORY; + goto CLEAN_UP; + } + + mSurfaces = new VASurfaceID[mSurfaceCnt] ; + if (mSurfaces == NULL) { + LOG_E("Failed allocate private surface\n"); + ret = ENCODE_NO_MEMORY; + goto CLEAN_UP; + } + + vaStatus = vaCreateSurfaces(mVADisplay, mComParams.resolution.width, + mComParams.resolution.height, VA_RT_FORMAT_YUV420, + normalSurfacesCnt, surfaces); + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces"); + + switch (mBufferMode) { + case BUFFER_SHARING_CI: { + for (index = 0; index < mSharedSurfacesCnt; index++) { + + vaStatus = vaCreateSurfaceFromCIFrame( + mVADisplay, (uint32_t)mUpstreamBufferCnt, &mSharedSurfaces[index]); + + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaceFromCIFrame"); + + mSurfaces[index] = mSharedSurfaces[index]; + + videoSurfaceBuffer = new VideoEncSurfaceBuffer; + if (videoSurfaceBuffer == NULL) { + LOG_E( "new VideoEncSurfaceBuffer failed\n"); + return ENCODE_NO_MEMORY; + } + + videoSurfaceBuffer->surface = mSharedSurfaces[index]; + videoSurfaceBuffer->usrptr = NULL; + videoSurfaceBuffer->index = index; + videoSurfaceBuffer->bufAvailable = true; + videoSurfaceBuffer->next = NULL; + + mVideoSrcBufferList = appendVideoSurfaceBuffer + (mVideoSrcBufferList, videoSurfaceBuffer); + videoSurfaceBuffer = NULL; + } + } + break; + case BUFFER_SHARING_V4L2: + case BUFFER_SHARING_SURFACE: + // To be develped + break; + case BUFFER_SHARING_NONE: + break; + case BUFFER_SHARING_USRPTR: { + videoSurfaceBuffer = mVideoSrcBufferList; + index = 0; + while (videoSurfaceBuffer != NULL) { + mSurfaces[index] = videoSurfaceBuffer->surface; + mUsrPtr [index] = videoSurfaceBuffer->usrptr; + videoSurfaceBuffer = videoSurfaceBuffer->next; + index ++; + } + } + break; + default: + break; + } + + for (index = 0; index < normalSurfacesCnt; index++) { + mSurfaces[mReqSurfacesCnt + mSharedSurfacesCnt + index] = surfaces[index]; + + videoSurfaceBuffer = new VideoEncSurfaceBuffer; + if (videoSurfaceBuffer == NULL) { + LOG_E( "new VideoEncSurfaceBuffer failed\n"); + return ENCODE_NO_MEMORY; + } + + videoSurfaceBuffer->surface = surfaces[index]; + videoSurfaceBuffer->usrptr = NULL; + videoSurfaceBuffer->index = mReqSurfacesCnt + mSharedSurfacesCnt + index; + videoSurfaceBuffer->bufAvailable = true; + videoSurfaceBuffer->next = NULL; + + mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer); + + videoSurfaceBuffer = NULL; + } + + LOG_V( "assign surface Done\n"); + LOG_I( "Created %d libva surfaces\n", mSurfaceCnt); + + //Initialize and save the VA context ID + LOG_V( "vaCreateContext\n"); + + vaStatus = vaCreateContext(mVADisplay, mVAConfig, + mComParams.resolution.width, + mComParams.resolution.height, + 0, mSurfaces, mSurfaceCnt, + &(mVAContext)); + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateContext"); + + LOG_I("Created libva context width %d, height %d\n", + mComParams.resolution.width, mComParams.resolution.height); + + ret = getMaxOutSize(&maxSize); + CHECK_ENCODE_STATUS_CLEANUP("getMaxOutSize"); + + // Create coded buffer for output + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncCodedBufferType, + mCodedBufSize, + 1, NULL, + &(mVACodedBuffer[0])); + + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType"); + + // Create coded buffer for output + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncCodedBufferType, + mCodedBufSize, + 1, NULL, + &(mVACodedBuffer[1])); + + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType"); + + mFirstFrame = true; + +CLEAN_UP: + + if (ret == ENCODE_SUCCESS) { + mInitialized = true; + } + + if (surfaces) delete []surfaces; + + LOG_V( "end\n"); + return ret; +} + +Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + + if (!mInitialized) { + LOG_E("Encoder has not initialized yet\n"); + return ENCODE_NOT_INIT; + } + + CHECK_NULL_RETURN_IFFAIL(inBuffer); + + inBuffer->bufAvailable = false; + if (mNewHeader) mFrameNum = 0; + + // current we use one surface for source data, + // one for reference and one for reconstructed + decideFrameType(); + ret = manageSrcSurface(inBuffer); + CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); + + // Start encoding process + LOG_V( "vaBeginPicture\n"); + LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext); + LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurFrame->surface); + LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); + CHECK_VA_STATUS_RETURN("vaBeginPicture"); + + ret = sendEncodeCommand(); + CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + + if ((mComParams.rcMode == VA_RC_NONE) || mFirstFrame) { + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS_RETURN("vaEndPicture"); + } + + LOG_V( "vaEndPicture\n"); + + if (mFirstFrame) { + updateProperities(); + decideFrameType(); + } + + LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastFrame->surface); + vaStatus = vaSyncSurface(mVADisplay, mLastFrame->surface); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaSyncSurface\n"); + } + + mOutCodedBuffer = mLastCodedBuffer; + + if (!((mComParams.rcMode == VA_RC_NONE) || mFirstFrame)) { + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS_RETURN("vaEndPicture"); + + } + + if (mFirstFrame) { + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); + CHECK_VA_STATUS_RETURN("vaBeginPicture"); + + ret = sendEncodeCommand(); + CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS_RETURN("vaEndPicture"); + + mKeyFrame = true; + } + + // Query the status of current surface + VASurfaceStatus vaSurfaceStatus; + vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastFrame->surface, &vaSurfaceStatus); + CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); + + mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + + if (!mFirstFrame) { + VideoEncoderBase::appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame); + } + + mLastFrame = NULL; + updateProperities(); + mCurFrame = NULL; + + if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true; + + LOG_V("ref the current inBuffer\n"); + + mLastInputRawBuffer = inBuffer; + mFirstFrame = false; + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + bool useLocalBuffer = false; + + CHECK_NULL_RETURN_IFFAIL(outBuffer); + + LOG_V("Begin\n"); + + if (outBuffer->format != OUTPUT_EVERYTHING && outBuffer->format != OUTPUT_FRAME_DATA) { + LOG_E("Output buffer mode not supported\n"); + goto CLEAN_UP; + } + + // For first getOutput, the mFrameNum already increased to 2, and of course is key frame + // frame 0 is already encoded and will be outputed here + // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call + if (mFrameNum > 2) { + if (mComParams.intraPeriod != 0 && + (((mFrameNum - 2) % mComParams.intraPeriod) == 0)) { + mKeyFrame = true; + } else { + mKeyFrame = false; + } + } + + ret = prepareForOutput(outBuffer, &useLocalBuffer); + CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); + + ret = outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + + LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize); + + ret = cleanupForOutput(); + CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput"); + +CLEAN_UP: + + if (ret < ENCODE_SUCCESS) { + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } + + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; + } + } + + LOG_V("End\n"); + return ret; +} + + +void VideoEncoderBase::flush() { + + LOG_V( "Begin\n"); + + // put reconstructed surface back to list + if (mRecFrame != NULL) { + appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); + mRecFrame = NULL; + } + + // put reference surface back to list + if (mRefFrame != NULL) { + appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); + mRefFrame = NULL; + } + + // Here this raw buffer means the surface being encoding + if (mLastInputRawBuffer) { + mLastInputRawBuffer->bufAvailable = true; + mLastInputRawBuffer = NULL; + } + + // reset the properities + mEncodedFrames = 0; + mFrameNum = 0; + mPicSkipped = false; + mIsIntra = true; + + LOG_V( "end\n"); +} + +Encode_Status VideoEncoderBase::stop() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; + VideoEncSurfaceBuffer *tmpBuffer = NULL; + + + LOG_V( "Begin\n"); + + if (mSharedSurfaces) { + delete [] mSharedSurfaces; + mSharedSurfaces = NULL; + } + + if (mSurfaces) { + delete [] mSurfaces; + mSurfaces = NULL; + } + + if (mUsrPtr) { + delete [] mUsrPtr; + mUsrPtr = NULL; + } + + if (mUpstreamBufferList) { + delete [] mUpstreamBufferList; + mUpstreamBufferList = NULL; + } + + // It is possible that above pointers have been allocated + // before we set mInitialized to true + if (!mInitialized) { + LOG_V("Encoder has been stopped\n"); + return ENCODE_SUCCESS; + } + + LOG_V( "Release frames\n"); + + // put reconstructed surface back to list + if (mRecFrame != NULL) { + appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); + mRecFrame = NULL; + } + + // put reference surface back to list + if (mRefFrame != NULL) { + appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); + mRefFrame = NULL; + } + + // put Source surface back to list + if (mLastFrame != NULL) { + appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame); + mLastFrame = NULL; + } + + LOG_V( "Release surfaces\n"); + + + LOG_V( "vaDestroyContext\n"); + vaStatus = vaDestroyContext(mVADisplay, mVAContext); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); + + LOG_V( "vaDestroyConfig\n"); + vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); + + // Release Src Surface Buffer List + LOG_V( "Rlease Src Surface Buffer \n"); + + videoSurfaceBuffer = mVideoSrcBufferList; + + while (videoSurfaceBuffer != NULL) { + tmpBuffer = videoSurfaceBuffer; + videoSurfaceBuffer = videoSurfaceBuffer->next; + delete tmpBuffer; + } + +CLEAN_UP: + mInitialized = false; + LOG_V( "end\n"); + return ret; +} + + +Encode_Status VideoEncoderBase::prepareForOutput( + VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VACodedBufferSegment *vaCodedSeg = NULL; + uint32_t status = 0; + uint8_t *buf = NULL; + + LOG_V( "begin\n"); + // Won't check parameters here as the caller already checked them + // mCurSegment is NULL means it is first time to be here after finishing encoding a frame + if (mCurSegment == NULL && !mCodedBufferMapped) { + LOG_I ("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer); + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + CHECK_NULL_RETURN_IFFAIL(buf); + + mCodedBufferMapped = true; + mTotalSize = 0; + mOffsetInSeg = 0; + mTotalSizeCopied = 0; + vaCodedSeg = (VACodedBufferSegment *)buf; + mCurSegment = (VACodedBufferSegment *)buf; + + while (1) { + + mTotalSize += vaCodedSeg->size; + status = vaCodedSeg->status; + + if (!mSliceSizeOverflow) { + mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; + } + + if (vaCodedSeg->next == NULL) + break; + + vaCodedSeg = (VACodedBufferSegment *)vaCodedSeg->next; + } + } + + + // We will support two buffer allocation mode, + // one is application allocates the buffer and passes to encode, + // the other is encode allocate memory + + //means app doesn't allocate the buffer, so _encode will allocate it. + if (outBuffer->data == NULL) { + *useLocalBuffer = true; + outBuffer->data = new uint8_t[mTotalSize - mTotalSizeCopied + 100]; + if (outBuffer->data == NULL) { + LOG_E( "outBuffer->data == NULL\n"); + return ENCODE_NO_MEMORY; + } + outBuffer->bufferSize = mTotalSize + 100; + outBuffer->dataSize = 0; + } + + // Clear all flag for every call + outBuffer->flag = 0; + if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW; + + if (mCurSegment->size < mOffsetInSeg) { + LOG_E("mCurSegment->size < mOffsetInSeg\n"); + return ENCODE_FAIL; + } + + // Make sure we have data in current segment + if (mCurSegment->size == mOffsetInSeg) { + if (mCurSegment->next != NULL) { + mCurSegment = (VACodedBufferSegment *)mCurSegment->next; + mOffsetInSeg = 0; + } else { + LOG_V("No more data available\n"); + outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID; + outBuffer->dataSize = 0; + mCurSegment = NULL; + return ENCODE_NO_REQUEST_DATA; + } + } + + LOG_V( "end\n"); + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderBase::cleanupForOutput() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + + //mCurSegment is NULL means all data has been copied out + if (mCurSegment == NULL && mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + mCodedBufferMapped = false; + } + return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderBase::outputAllData( + VideoEncOutputBuffer *outBuffer) { + + // Data size been copied for every single call + uint32_t sizeCopiedHere = 0; + uint32_t sizeToBeCopied = 0; + + CHECK_NULL_RETURN_IFFAIL(outBuffer->data); + + while (1) { + + LOG_I("mCurSegment->size = %d, mOffsetInSeg = %d\n", mCurSegment->size, mOffsetInSeg); + LOG_I("outBuffer->bufferSize = %d, sizeCopiedHere = %d, mTotalSizeCopied = %d\n", + outBuffer->bufferSize, sizeCopiedHere, mTotalSizeCopied); + + if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) { + LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n"); + return ENCODE_FAIL; + } + + if ((mCurSegment->size - mOffsetInSeg) <= outBuffer->bufferSize - sizeCopiedHere) { + sizeToBeCopied = mCurSegment->size - mOffsetInSeg; + memcpy(outBuffer->data + sizeCopiedHere, + (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied); + sizeCopiedHere += sizeToBeCopied; + mTotalSizeCopied += sizeToBeCopied; + mOffsetInSeg = 0; + } else { + sizeToBeCopied = outBuffer->bufferSize - sizeCopiedHere; + memcpy(outBuffer->data + sizeCopiedHere, + (uint8_t *)mCurSegment->buf + mOffsetInSeg, outBuffer->bufferSize - sizeCopiedHere); + mTotalSizeCopied += sizeToBeCopied; + mOffsetInSeg += sizeToBeCopied; + outBuffer->dataSize = outBuffer->bufferSize; + outBuffer->remainingSize = mTotalSize - mTotalSizeCopied; + outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + return ENCODE_BUFFER_TOO_SMALL; + } + + if (mCurSegment->next == NULL) { + outBuffer->dataSize = sizeCopiedHere; + outBuffer->remainingSize = 0; + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + mCurSegment = NULL; + return ENCODE_SUCCESS; + } + + mCurSegment = (VACodedBufferSegment *)mCurSegment->next; + mOffsetInSeg = 0; + } +} + +void VideoEncoderBase::setDefaultParams() { + + // Set default value for input parameters + mComParams.profile = VAProfileH264Baseline; + mComParams.level = 40; + mComParams.rawFormat = RAW_FORMAT_NV12; + mComParams.frameRate.frameRateNum = 30; + mComParams.frameRate.frameRateDenom = 1; + mComParams.resolution.width = 0; + mComParams.resolution.height = 0; + mComParams.intraPeriod = 30; + mComParams.rcMode = RATE_CONTROL_NONE; + mComParams.rcParams.initQP = 15; + mComParams.rcParams.minQP = 1; + mComParams.rcParams.bitRate = 640000; + mComParams.rcParams.targetPercentage= 95; + mComParams.rcParams.windowSize = 500; + mComParams.cyclicFrameInterval = 30; + mComParams.refreshType = VIDEO_ENC_NONIR; + mComParams.airParams.airMBs = 0; + mComParams.airParams.airThreshold = 0; + mComParams.airParams.airAuto = 1; + mComParams.disableDeblocking = 2; +} + +Encode_Status VideoEncoderBase::setParameters( + VideoParamConfigSet *videoEncParams) { + + Encode_Status ret = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + LOG_I("Config type = %d\n", (int)videoEncParams->type); + + if (mInitialized) { + LOG_E("Encoder has been initialized, should use setConfig to change configurations\n"); + return ENCODE_ALREADY_INIT; + } + + switch (videoEncParams->type) { + case VideoParamsTypeCommon: { + + VideoParamsCommon *paramsCommon = + reinterpret_cast (videoEncParams); + + if (paramsCommon->size != sizeof (VideoParamsCommon)) { + return ENCODE_INVALID_PARAMS; + } + mComParams = *paramsCommon; + break; + } + + case VideoParamsTypeUpSteamBuffer: { + + VideoParamsUpstreamBuffer *upStreamBuffer = + reinterpret_cast (videoEncParams); + + if (upStreamBuffer->size != sizeof (VideoParamsUpstreamBuffer)) { + return ENCODE_INVALID_PARAMS; + } + + ret = setUpstreamBuffer( + upStreamBuffer->bufferMode, upStreamBuffer->bufList, upStreamBuffer->bufCnt); + break; + } + + case VideoParamsTypeUsrptrBuffer: { + + // usrptr only can be get + // this case should not happen + break; + } + + case VideoParamsTypeAVC: + case VideoParamsTypeH263: + case VideoParamsTypeMP4: + case VideoParamsTypeVC1: { + ret = derivedSetParams(videoEncParams); + break; + } + + default: { + LOG_E ("Wrong ParamType here\n"); + break; + } + + } + + return ret; + +} + + +Encode_Status VideoEncoderBase::getParameters( + VideoParamConfigSet *videoEncParams) { + + Encode_Status ret = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + LOG_I("Config type = %d\n", (int)videoEncParams->type); + + switch (videoEncParams->type) { + case VideoParamsTypeCommon: { + + VideoParamsCommon *paramsCommon = + reinterpret_cast (videoEncParams); + + if (paramsCommon->size != sizeof (VideoParamsCommon)) { + return ENCODE_INVALID_PARAMS; + } + *paramsCommon = mComParams; + break; + } + + case VideoParamsTypeUpSteamBuffer: { + + // Get upstream buffer could happen + // but not meaningful a lot + break; + } + + case VideoParamsTypeUsrptrBuffer: { + VideoParamsUsrptrBuffer *usrptrBuffer = + reinterpret_cast (videoEncParams); + + if (usrptrBuffer->size != sizeof (VideoParamsUsrptrBuffer)) { + return ENCODE_INVALID_PARAMS; + } + + ret = getNewUsrptrFromSurface( + usrptrBuffer->width, usrptrBuffer->height, usrptrBuffer->format, + usrptrBuffer->expectedSize, &(usrptrBuffer->actualSize), + &(usrptrBuffer->stride), &(usrptrBuffer->usrPtr)); + + break; + } + + case VideoParamsTypeAVC: + case VideoParamsTypeH263: + case VideoParamsTypeMP4: + case VideoParamsTypeVC1: { + derivedGetParams(videoEncParams); + break; + } + + default: { + LOG_E ("Wrong ParamType here\n"); + break; + } + + } + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { + + Encode_Status ret = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); + LOG_I("Config type = %d\n", (int)videoEncConfig->type); + + if (!mInitialized) { + LOG_E("Encoder has not initialized yet, can't call setConfig\n"); + return ENCODE_NOT_INIT; + } + + switch (videoEncConfig->type) { + case VideoConfigTypeFrameRate: { + VideoConfigFrameRate *configFrameRate = + reinterpret_cast (videoEncConfig); + + if (configFrameRate->size != sizeof (VideoConfigFrameRate)) { + return ENCODE_INVALID_PARAMS; + } + mComParams.frameRate = configFrameRate->frameRate; + mRenderFrameRate = true; + break; + } + + case VideoConfigTypeBitRate: { + VideoConfigBitRate *configBitRate = + reinterpret_cast (videoEncConfig); + + if (configBitRate->size != sizeof (VideoConfigBitRate)) { + return ENCODE_INVALID_PARAMS; + } + mComParams.rcParams = configBitRate->rcParams; + + mRenderBitRate = true; + + break; + } + case VideoConfigTypeResolution: { + + // Not Implemented + break; + } + case VideoConfigTypeIntraRefreshType: { + + VideoConfigIntraRefreshType *configIntraRefreshType = + reinterpret_cast (videoEncConfig); + + if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) { + return ENCODE_INVALID_PARAMS; + } + mComParams.refreshType = configIntraRefreshType->refreshType; + + break; + } + + case VideoConfigTypeCyclicFrameInterval: { + VideoConfigCyclicFrameInterval *configCyclicFrameInterval = + reinterpret_cast (videoEncConfig); + if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) { + return ENCODE_INVALID_PARAMS; + } + + mComParams.cyclicFrameInterval = configCyclicFrameInterval->cyclicFrameInterval; + break; + } + + case VideoConfigTypeAIR: { + + VideoConfigAIR *configAIR = reinterpret_cast (videoEncConfig); + + if (configAIR->size != sizeof (VideoConfigAIR)) { + return ENCODE_INVALID_PARAMS; + } + + mComParams.airParams = configAIR->airParams; + + mRenderAIR = true; + + break; + } + case VideoConfigTypeAVCIntraPeriod: + case VideoConfigTypeNALSize: + case VideoConfigTypeIDRRequest: + case VideoConfigTypeSliceNum: { + + ret = derivedSetConfig(videoEncConfig); + + break; + } + default: { + LOG_E ("Wrong Config Type here\n"); + break; + } + } + + + return ret; +} + +Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { + + Encode_Status ret = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); + LOG_I("Config type = %d\n", (int)videoEncConfig->type); + + switch (videoEncConfig->type) { + case VideoConfigTypeFrameRate: { + VideoConfigFrameRate *configFrameRate = + reinterpret_cast (videoEncConfig); + + if (configFrameRate->size != sizeof (VideoConfigFrameRate)) { + return ENCODE_INVALID_PARAMS; + } + + configFrameRate->frameRate = mComParams.frameRate; + + break; + } + + case VideoConfigTypeBitRate: { + VideoConfigBitRate *configBitRate = + reinterpret_cast (videoEncConfig); + + if (configBitRate->size != sizeof (VideoConfigBitRate)) { + return ENCODE_INVALID_PARAMS; + } + configBitRate->rcParams = mComParams.rcParams; + + + break; + } + case VideoConfigTypeResolution: { + // Not Implemented + break; + } + case VideoConfigTypeIntraRefreshType: { + + VideoConfigIntraRefreshType *configIntraRefreshType = + reinterpret_cast (videoEncConfig); + + if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) { + return ENCODE_INVALID_PARAMS; + } + configIntraRefreshType->refreshType = mComParams.refreshType; + + break; + } + + case VideoConfigTypeCyclicFrameInterval: { + VideoConfigCyclicFrameInterval *configCyclicFrameInterval = + reinterpret_cast (videoEncConfig); + if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) { + return ENCODE_INVALID_PARAMS; + } + + configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval; + + break; + } + + case VideoConfigTypeAIR: { + + VideoConfigAIR *configAIR = reinterpret_cast (videoEncConfig); + + if (configAIR->size != sizeof (VideoConfigAIR)) { + return ENCODE_INVALID_PARAMS; + } + + configAIR->airParams = mComParams.airParams; + + break; + } + case VideoConfigTypeAVCIntraPeriod: + case VideoConfigTypeNALSize: + case VideoConfigTypeIDRRequest: + case VideoConfigTypeSliceNum: { + + ret = derivedGetConfig(videoEncConfig); + + break; + } + default: { + LOG_E ("Wrong ParamType here\n"); + break; + } + } + + return ret; +} + +void VideoEncoderBase:: decideFrameType () { + + LOG_I( "mEncodedFrames = %d\n", mEncodedFrames); + LOG_I( "mFrameNum = %d\n", mFrameNum); + LOG_I( "mIsIntra = %d\n", mIsIntra); + + // determine the picture type + if (mComParams.intraPeriod == 0) { + if (mFrameNum == 0) + mIsIntra = true; + else + mIsIntra = false; + } else if ((mFrameNum % mComParams.intraPeriod) == 0) { + mIsIntra = true; + } else { + mIsIntra = false; + } + + LOG_I( "mIsIntra = %d\n",mIsIntra); +} + + +void VideoEncoderBase:: updateProperities () { + + VideoEncSurfaceBuffer *tmpFrame = NULL; + LOG_V( "Begin\n"); + + mEncodedFrames ++; + mFrameNum ++; + mLastCodedBuffer = mVACodedBuffer[mCodedBufIndex]; + mCodedBufIndex ++; + mCodedBufIndex %=2; + + mLastFrame = mCurFrame; + + if (!mPicSkipped) { + tmpFrame = mRecFrame; + mRecFrame = mRefFrame; + mRefFrame = tmpFrame; + } + + LOG_V( "End\n"); +} + + +Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { + + uint32_t size = mComParams.resolution.width * mComParams.resolution.height; + + if (maxSize == NULL) { + LOG_E("maxSize == NULL\n"); + return ENCODE_NULL_PTR; + } + + + LOG_V( "Begin\n"); + + + if (mCodedBufSize > 0) { + *maxSize = mCodedBufSize; + LOG_V ("Already calculate the max encoded size, get the value directly"); + return ENCODE_SUCCESS; + } + + // base on the rate control mode to calculate the defaule encoded buffer size + if (mComParams.rcMode == VA_RC_NONE) { + mCodedBufSize = (size * 400) / (16 * 16); + // set to value according to QP + } else { + mCodedBufSize = mComParams.rcParams.bitRate / 4; + } + + mCodedBufSize = + max (mCodedBufSize , (size * 400) / (16 * 16)); + + // in case got a very large user input bit rate value + mCodedBufSize = + min(mCodedBufSize, (size * 1.5 * 8)); + mCodedBufSize = (mCodedBufSize + 15) &(~15); + + *maxSize = mCodedBufSize; + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( + uint32_t width, uint32_t height, uint32_t format, + uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) { + + Encode_Status ret = ENCODE_FAIL; + VAStatus vaStatus = VA_STATUS_SUCCESS; + + VASurfaceID surface = VA_INVALID_SURFACE; + VAImage image; + uint32_t index = 0; + + VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; + + LOG_V( "Begin\n"); + + // If encode session has been configured, we can not request surface creation anymore + if (mInitialized) { + LOG_E( "Already Initialized, can not request VA surface anymore\n"); + return ENCODE_WRONG_STATE; + } + + if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) { + LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n"); + return ENCODE_NULL_PTR; + } + + // Current only NV12 is supported in VA API + // Through format we can get known the number of planes + if (format != STRING_TO_FOURCC("NV12")) { + + LOG_W ("Format is not supported\n"); + return ENCODE_NOT_SUPPORTED; + } + + vaStatus = vaCreateSurfacesForUserPtr(mVADisplay, width, height, VA_RT_FORMAT_YUV420, 1, + &surface, expectedSize, VA_FOURCC_NV12, width, width, width, + 0, width * height, width * height); + + CHECK_VA_STATUS_RETURN("vaCreateSurfacesForUserPtr"); + + vaStatus = vaDeriveImage(mVADisplay, surface, &image); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + + LOG_V( "vaDeriveImage Done\n"); + + vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + // make sure the physical page been allocated + for (index = 0; index < image.data_size; index = index + 4096) { + unsigned char tmp = *(*usrptr + index); + if (tmp == 0) + *(*usrptr + index) = 0; + } + + *outsize = image.data_size; + *stride = image.pitches[0]; + + videoSurfaceBuffer = new VideoEncSurfaceBuffer; + if (videoSurfaceBuffer == NULL) { + LOG_E( "new VideoEncSurfaceBuffer failed\n"); + return ENCODE_NO_MEMORY; + } + + videoSurfaceBuffer->surface = surface; + videoSurfaceBuffer->usrptr = *usrptr; + videoSurfaceBuffer->index = mReqSurfacesCnt; + videoSurfaceBuffer->bufAvailable = true; + videoSurfaceBuffer->next = NULL; + + mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer); + + LOG_I( "surface = 0x%08x\n",(uint32_t)surface); + LOG_I("image->pitches[0] = %d\n", image.pitches[0]); + LOG_I("image->pitches[1] = %d\n", image.pitches[1]); + LOG_I("image->offsets[0] = %d\n", image.offsets[0]); + LOG_I("image->offsets[1] = %d\n", image.offsets[1]); + LOG_I("image->num_planes = %d\n", image.num_planes); + LOG_I("image->width = %d\n", image.width); + LOG_I("image->height = %d\n", image.height); + + LOG_I ("data_size = %d\n", image.data_size); + LOG_I ("usrptr = 0x%p\n", *usrptr); + LOG_I ("mReqSurfacesCnt = %d\n", mReqSurfacesCnt); + LOG_I ("videoSurfaceBuffer->usrptr = 0x%p\n ", videoSurfaceBuffer->usrptr); + + videoSurfaceBuffer = NULL; + + vaStatus = vaUnmapBuffer(mVADisplay, image.buf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + + vaStatus = vaDestroyImage(mVADisplay, image.image_id); + CHECK_VA_STATUS_RETURN("vaDestroyImage"); + + if (*outsize < expectedSize) { + LOG_E ("Allocated buffer size is small than the expected size, destroy the surface"); + LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expectedSize); + vaStatus = vaDestroySurfaces(mVADisplay, &surface, 1); + CHECK_VA_STATUS_RETURN("vaDestroySurfaces"); + return ENCODE_FAIL; + } + + mReqSurfacesCnt ++; + ret = ENCODE_SUCCESS; + + return ret; +} + + +Encode_Status VideoEncoderBase::setUpstreamBuffer( + VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt) { + + CHECK_NULL_RETURN_IFFAIL(bufList); + if (bufCnt == 0) { + LOG_E("bufCnt == 0\n"); + return ENCODE_FAIL; + } + + if (mUpstreamBufferList) delete [] mUpstreamBufferList; + + mUpstreamBufferCnt = bufCnt; + mUpstreamBufferList = new uint32_t [bufCnt]; + if (!mUpstreamBufferList) { + LOG_E ("mUpstreamBufferList NULL\n"); + return ENCODE_NO_MEMORY; + } + + memcpy(mUpstreamBufferList, bufList, bufCnt * sizeof (uint32_t)); + return ENCODE_SUCCESS; + +} + + +Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + + uint32_t idx = 0; + uint32_t bufIndex = 0; + + if (mBufferMode == BUFFER_SHARING_CI) { + + memcpy(&bufIndex, inBuffer->data, sizeof(unsigned int)); + // bufIndex = *(uint32_t*)inBuffer->data; + + LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt); + LOG_I("bufIndex = %d\n", bufIndex); + + if (bufIndex > mSurfaceCnt - 2) { + LOG_E("the CI frame idx is bigger than total CI frame count\n"); + ret = ENCODE_FAIL; + return ret; + + } + + } else if (mBufferMode == BUFFER_SHARING_USRPTR) { + + bufIndex = (uint32_t) -1; //fixme, temp use a big value + + LOG_I("bufin->data = 0x%p \n", inBuffer->data); + + for (idx = 0; idx < mReqSurfacesCnt; idx++) { + LOG_I("mUsrPtr[%d] = 0x%p\n", idx, mUsrPtr[idx]); + + if (inBuffer->data == mUsrPtr[idx]) + bufIndex = idx; + } + + LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt); + LOG_I("bufIndex = %d\n", bufIndex); + + if (bufIndex > mSurfaceCnt - 2) { + LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); + ret = ENCODE_FAIL; + goto no_share_mode; + + } + } + + + switch (mBufferMode) { + + case BUFFER_SHARING_CI: + case BUFFER_SHARING_USRPTR: { + + if (mRefFrame== NULL) { + mRefFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt -1 ); + if (mRefFrame == NULL) { + LOG_E ("No Surface buffer available, something should be wrong\n"); + return ENCODE_FAIL; + } + mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); + + } + + if (mRecFrame== NULL) { + mRecFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt - 2); + if (mRecFrame == NULL) { + LOG_E ("No Surface buffer available, something should be wrong\n"); + return ENCODE_FAIL; + } + mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); + + } + + if (mCurFrame== NULL) { + mCurFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, bufIndex); + if (mCurFrame == NULL) { + LOG_E ("No Surface buffer available, something should be wrong\n"); + return ENCODE_FAIL; + } + mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame); + } + } + + break; + case BUFFER_SHARING_V4L2: + case BUFFER_SHARING_SURFACE: + LOG_E("Not Implemented\n"); + break; + + case BUFFER_SHARING_NONE: { +no_share_mode: + + if (mRefFrame== NULL) { + mRefFrame = mVideoSrcBufferList; + if (mRefFrame == NULL) { + LOG_E("No Surface buffer available, something should be wrong\n"); + return ENCODE_FAIL; + } + mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); + + } + + if (mRecFrame== NULL) { + mRecFrame = mVideoSrcBufferList; + if (mRecFrame == NULL) { + LOG_E ("No Surface buffer available, something should be wrong\n"); + return ENCODE_FAIL; + } + mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); + + } + + if (mCurFrame== NULL) { + mCurFrame = mVideoSrcBufferList; + if (mCurFrame == NULL) { + LOG_E ("No Surface buffer available, something should be wrong\n"); + return ENCODE_FAIL; + } + mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame); + } + + LOG_V( "Get Surface Done\n"); + ret = uploadDataToSurface (inBuffer); + CHECK_ENCODE_STATUS_RETURN("uploadDataToSurface"); + } + break; + default: + break; + + } + + return ENCODE_SUCCESS; +} + +VideoEncSurfaceBuffer *VideoEncoderBase::appendVideoSurfaceBuffer( + VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) { + + if (head == NULL) { + return buffer; + } + + VideoEncSurfaceBuffer *node = head; + VideoEncSurfaceBuffer *tail = NULL; + + while (node != NULL) { + tail = node; + node = node->next; + } + tail->next = buffer; + + return head; +} + +VideoEncSurfaceBuffer *VideoEncoderBase::removeVideoSurfaceBuffer( + VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) { + + VideoEncSurfaceBuffer *node = head; + VideoEncSurfaceBuffer *tmpNode = NULL; + + if (head == buffer) { + tmpNode = head->next; + buffer->next = NULL; + return tmpNode; + } + + while (node != NULL) { + if (node->next == buffer) + break; + node = node->next; + } + + if (node != NULL) { + node->next = buffer->next; + } + + buffer->next = NULL; + return head; + +} + +VideoEncSurfaceBuffer *VideoEncoderBase::getVideoSurfaceBufferByIndex( + VideoEncSurfaceBuffer *head, uint32_t index) { + VideoEncSurfaceBuffer *node = head; + + while (node != NULL) { + if (node->index == index) + break; + node = node->next; + } + + return node; +} + +Encode_Status VideoEncoderBase::uploadDataToSurface(VideoEncRawBuffer *inBuffer) { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + + uint32_t width = mComParams.resolution.width; + uint32_t height = mComParams.resolution.height; + + VAImage srcImage; + uint8_t *pvBuffer; + uint8_t *dstY; + uint8_t *dstUV; + uint32_t i,j; + + uint8_t *inBuf = inBuffer->data; + VAImage *image = NULL; + + int uvOffset = width * height; + uint8_t *uvBufIn = inBuf + uvOffset; + uint32_t uvHeight = height / 2; + uint32_t uvWidth = width; + + LOG_V("map source data to surface\n"); + LOG_I("Surface ID = 0x%08x\n", (uint32_t) mCurFrame->surface); + + vaStatus = vaDeriveImage(mVADisplay, mCurFrame->surface, &srcImage); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + + LOG_V( "vaDeriveImage Done\n"); + + image = &srcImage; + + vaStatus = vaMapBuffer(mVADisplay, image->buf, (void **)&pvBuffer); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + LOG_V("vaImage information\n"); + LOG_I("image->pitches[0] = %d\n", image->pitches[0]); + LOG_I("image->pitches[1] = %d\n", image->pitches[1]); + LOG_I("image->offsets[0] = %d\n", image->offsets[0]); + LOG_I("image->offsets[1] = %d\n", image->offsets[1]); + LOG_I("image->num_planes = %d\n", image->num_planes); + LOG_I("image->width = %d\n", image->width); + LOG_I("image->height = %d\n", image->height); + + LOG_I("input buf size = %d\n", inBuffer->size); + + if (mComParams.rawFormat == RAW_FORMAT_YUV420) { + dstY = pvBuffer +image->offsets[0]; + + for (i = 0; i < height; i ++) { + memcpy(dstY, inBuf + i * width, width); + dstY += image->pitches[0]; + } + + dstUV = pvBuffer + image->offsets[1]; + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dstUV [j] = inBuf [width * height + i * width / 2 + j / 2]; + dstUV [j + 1] = + inBuf [width * height * 5 / 4 + i * width / 2 + j / 2]; + } + dstUV += image->pitches[1]; + } + } + + else if (mComParams.rawFormat == RAW_FORMAT_NV12) { + + dstY = pvBuffer + image->offsets[0]; + for (i = 0; i < height; i++) { + memcpy(dstY, inBuf + i * width, width); + dstY += image->pitches[0]; + } + + dstUV = pvBuffer + image->offsets[1]; + for (i = 0; i < uvHeight; i++) { + memcpy(dstUV, uvBufIn + i * uvWidth, uvWidth); + dstUV += image->pitches[1]; + } + } else { + LOG_E("Raw format not supoort\n"); + return ENCODE_FAIL; + } + + vaStatus = vaUnmapBuffer(mVADisplay, image->buf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaDestroyImage(mVADisplay, srcImage.image_id); + CHECK_VA_STATUS_RETURN("vaDestroyImage"); + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderBase::renderDynamicBitrate() { + VAStatus vaStatus = VA_STATUS_SUCCESS; + + LOG_V( "Begin\n\n"); + + if (mComParams.rcMode != RATE_CONTROL_VCM) { + + LOG_W("Not in VCM mode, but call renderDynamicBitrate\n"); + return ENCODE_SUCCESS; + } + + VAEncMiscParameterBuffer *miscEncParamBuf; + VAEncMiscParameterRateControl *bitrateControlParam; + VABufferID miscParamBufferID; + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), + 1, NULL, + &miscParamBufferID); + + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + miscEncParamBuf->type = VAEncMiscParameterTypeRateControl; + bitrateControlParam = (VAEncMiscParameterRateControl *)miscEncParamBuf->data; + + bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate; + bitrateControlParam->initial_qp = mComParams.rcParams.initQP; + bitrateControlParam->min_qp = mComParams.rcParams.minQP; + bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage; + bitrateControlParam->window_size = mComParams.rcParams.windowSize; + + vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, + &miscParamBufferID, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderBase::renderDynamicFrameRate() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + + if (mComParams.rcMode != RATE_CONTROL_VCM) { + + LOG_W("Not in VCM mode, but call SendDynamicFramerate\n"); + return ENCODE_SUCCESS; + } + + VAEncMiscParameterBuffer *miscEncParamBuf; + VAEncMiscParameterFrameRate *frameRateParam; + VABufferID miscParamBufferID; + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterFrameRate), + 1, NULL, &miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + miscEncParamBuf->type = VAEncMiscParameterTypeFrameRate; + frameRateParam = (VAEncMiscParameterFrameRate *)miscEncParamBuf->data; + frameRateParam->framerate = + (unsigned int) (mComParams.frameRate.frameRateNum + mComParams.frameRate.frameRateDenom/2) + / mComParams.frameRate.frameRateDenom; + + vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_I( "frame rate = %d\n", frameRateParam->framerate); + return ENCODE_SUCCESS; +} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h new file mode 100644 index 0000000..8ea052c --- /dev/null +++ b/videoencoder/VideoEncoderBase.h @@ -0,0 +1,144 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __VIDEO_ENCODER_BASE_H__ +#define __VIDEO_ENCODER_BASE_H__ + +#include +#include "VideoEncoderDef.h" +#include "VideoEncoderInterface.h" + +class VideoEncoderBase : IVideoEncoder { + +public: + VideoEncoderBase(); + virtual ~VideoEncoderBase(); + + virtual Encode_Status start(void); + virtual void flush(void); + virtual Encode_Status stop(void); + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer); + + /* + * getOutput can be called several time for a frame (such as first time codec data, and second time others) + * encoder will provide encoded data according to the format (whole frame, codec_data, sigle NAL etc) + * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL + * and caller should provide a big enough buffer and call again + */ + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); + + + virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams); + virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams); + virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig); + virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig); + + virtual Encode_Status getMaxOutSize(uint32_t *maxSize); + + +protected: + virtual Encode_Status sendEncodeCommand(void) = 0; + virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0; + virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0; + virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0; + virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0; + + Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); + Encode_Status cleanupForOutput(); + Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); + Encode_Status renderDynamicFrameRate(); + Encode_Status renderDynamicBitrate(); + +private: + void setDefaultParams(void); + Encode_Status setUpstreamBuffer(VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt); + Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format, + uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr); + + VideoEncSurfaceBuffer *appendVideoSurfaceBuffer( + VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer); + VideoEncSurfaceBuffer *removeVideoSurfaceBuffer( + VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer); + VideoEncSurfaceBuffer *getVideoSurfaceBufferByIndex( + VideoEncSurfaceBuffer *head, uint32_t index); + + Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer); + void updateProperities(void); + void decideFrameType(void); + Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer); + +protected: + + bool mInitialized; + VADisplay mVADisplay; + VAContextID mVAContext; + VAConfigID mVAConfig; + VAEntrypoint mVAEntrypoint; + + VACodedBufferSegment *mCurSegment; + uint32_t mOffsetInSeg; + uint32_t mTotalSize; + uint32_t mTotalSizeCopied; + + VideoParamsCommon mComParams; + + VideoBufferSharingMode mBufferMode; + uint32_t *mUpstreamBufferList; + uint32_t mUpstreamBufferCnt; + + bool mForceKeyFrame; + bool mNewHeader; + bool mFirstFrame; + + bool mRenderMaxSliceSize; //Max Slice Size + bool mRenderQP; + bool mRenderAIR; + bool mRenderFrameRate; + bool mRenderBitRate; + + VABufferID mVACodedBuffer[2]; + VABufferID mLastCodedBuffer; + VABufferID mOutCodedBuffer; + VABufferID mSeqParamBuf; + VABufferID mPicParamBuf; + VABufferID mSliceParamBuf; + + VASurfaceID *mSharedSurfaces; + VASurfaceID *mSurfaces; + uint32_t mSurfaceCnt; + uint32_t mSharedSurfacesCnt; + uint32_t mReqSurfacesCnt; + uint8_t **mUsrPtr; + + VideoEncSurfaceBuffer *mVideoSrcBufferList; + VideoEncSurfaceBuffer *mCurFrame; //current input frame to be encoded; + VideoEncSurfaceBuffer *mRefFrame; //reference frame + VideoEncSurfaceBuffer *mRecFrame; //reconstructed frame; + VideoEncSurfaceBuffer *mLastFrame; //last frame; + + VideoEncRawBuffer *mLastInputRawBuffer; + + uint32_t mEncodedFrames; + uint32_t mFrameNum; + uint32_t mCodedBufSize; + uint32_t mCodedBufIndex; + + bool mPicSkipped; + bool mIsIntra; + bool mSliceSizeOverflow; + bool mCodedBufferMapped; + bool mDataCopiedOut; + bool mKeyFrame; + + // Constants + static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2; + static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8; +}; + + +#endif /* __VIDEO_ENCODER_BASE_H__ */ diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h new file mode 100644 index 0000000..1e90094 --- /dev/null +++ b/videoencoder/VideoEncoderDef.h @@ -0,0 +1,435 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __VIDEO_ENCODER_DEF_H__ +#define __VIDEO_ENCODER_DEF_H__ + +#include + +#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24))) +#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) +#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) + +typedef int32_t Encode_Status; + +// Video encode error code +enum { + ENCODE_NO_REQUEST_DATA = -10, + ENCODE_WRONG_STATE = -9, + ENCODE_NOTIMPL = -8, + ENCODE_NO_MEMORY = -7, + ENCODE_NOT_INIT = -6, + ENCODE_DRIVER_FAIL = -5, + ENCODE_INVALID_PARAMS = -4, + ENCODE_NOT_SUPPORTED = -3, + ENCODE_NULL_PTR = -2, + ENCODE_FAIL = -1, + ENCODE_SUCCESS = 0, + ENCODE_ALREADY_INIT = 1, + ENCODE_SLICESIZE_OVERFLOW = 2, + ENCODE_BUFFER_TOO_SMALL = 3 // The buffer passed to encode is too small to contain encoded data +}; + +typedef enum { + OUTPUT_EVERYTHING = 0, //Output whatever driver generates + OUTPUT_CODEC_DATA = 1, + OUTPUT_FRAME_DATA = 2, //Equal to OUTPUT_EVERYTHING when no header along with the frame data + OUTPUT_ONE_NAL = 4, + OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8, + OUTPUT_LENGTH_PREFIXED = 16, + OUTPUT_BUFFER_LAST +} VideoOutputFormat; + +typedef enum { + RAW_FORMAT_NONE = 0, + RAW_FORMAT_YUV420 = 1, + RAW_FORMAT_YUV422 = 2, + RAW_FORMAT_YUV444 = 4, + RAW_FORMAT_NV12 = 8, + RAW_FORMAT_PROTECTED = 0x80000000, + RAW_FORMAT_LAST +} VideoRawFormat; + +typedef enum { + RATE_CONTROL_NONE = 1, + RATE_CONTROL_CBR = 2, + RATE_CONTROL_VBR = 4, + RATE_CONTROL_VCM = 8, + RATE_CONTROL_LAST +} VideoRateControl; + +typedef enum { + PROFILE_MPEG2SIMPLE = 0, + PROFILE_MPEG2MAIN, + PROFILE_MPEG4SIMPLE, + PROFILE_MPEG4ADVANCEDSIMPLE, + PROFILE_MPEG4MAIN, + PROFILE_H264BASELINE, + PROFILE_H264MAIN, + PROFILE_H264HIGH, + PROFILE_VC1SIMPLE, + PROFILE_VC1MAIN, + PROFILE_VC1ADVANCED, + PROFILE_H263BASELINE +} VideoProfile; + +typedef enum { + AVC_DELIMITER_LENGTHPREFIX = 0, + AVC_DELIMITER_ANNEXB +} AVCDelimiterType; + +typedef enum { + VIDEO_ENC_NONIR, // Non intra refresh + VIDEO_ENC_CIR, // Cyclic intra refresh + VIDEO_ENC_AIR, // Adaptive intra refresh + VIDEO_ENC_BOTH, + VIDEO_ENC_LAST +} VideoIntraRefreshType; + +enum VideoBufferSharingMode { + BUFFER_SHARING_NONE = 1, //Means non shared buffer mode + BUFFER_SHARING_CI = 2, + BUFFER_SHARING_V4L2 = 4, + BUFFER_SHARING_SURFACE = 8, + BUFFER_SHARING_USRPTR = 16, + BUFFER_LAST +}; + +// Output buffer flag +#define ENCODE_BUFFERFLAG_ENDOFFRAME 0x00000001 +#define ENCODE_BUFFERFLAG_PARTIALFRAME 0x00000002 +#define ENCODE_BUFFERFLAG_SYNCFRAME 0x00000004 +#define ENCODE_BUFFERFLAG_CODECCONFIG 0x00000008 +#define ENCODE_BUFFERFLAG_DATACORRUPT 0x00000010 +#define ENCODE_BUFFERFLAG_DATAINVALID 0x00000020 +#define ENCODE_BUFFERFLAG_SLICEOVERFOLOW 0x00000040 + +typedef struct { + uint8_t *data; + uint32_t bufferSize; //buffer size + uint32_t dataSize; //actuall size + uint32_t remainingSize; + int flag; //Key frame, Codec Data etc + VideoOutputFormat format; //output format + uint64_t timeStamp; //reserved +} VideoEncOutputBuffer; + +typedef struct { + uint8_t *data; + uint32_t size; + bool bufAvailable; //To indicate whether this buffer can be reused + uint64_t timeStamp; //reserved +} VideoEncRawBuffer; + +struct VideoEncSurfaceBuffer { + VASurfaceID surface; + uint8_t *usrptr; + uint32_t index; + bool bufAvailable; + VideoEncSurfaceBuffer *next; +}; + +struct AirParams { + uint32_t airMBs; + uint32_t airThreshold; + uint32_t airAuto; + + AirParams &operator=(const AirParams &other) { + if (this == &other) return *this; + + this->airMBs= other.airMBs; + this->airThreshold= other.airThreshold; + this->airAuto = other.airAuto; + return *this; + } +}; + +struct VideoFrameRate { + uint32_t frameRateNum; + uint32_t frameRateDenom; + + VideoFrameRate &operator=(const VideoFrameRate &other) { + if (this == &other) return *this; + + this->frameRateNum = other.frameRateNum; + this->frameRateDenom = other.frameRateDenom; + return *this; + } +}; + +struct VideoResolution { + uint32_t width; + uint32_t height; + + VideoResolution &operator=(const VideoResolution &other) { + if (this == &other) return *this; + + this->width = other.width; + this->height = other.height; + return *this; + } +}; + +struct VideoRateControlParams { + uint32_t bitRate; + uint32_t initQP; + uint32_t minQP; + uint32_t windowSize; + uint32_t targetPercentage; + + VideoRateControlParams &operator=(const VideoRateControlParams &other) { + if (this == &other) return *this; + + this->bitRate = other.bitRate; + this->initQP = other.initQP; + this->minQP = other.minQP; + this->windowSize = other.windowSize; + this->targetPercentage = other.targetPercentage; + return *this; + } +}; + +struct SliceNum { + uint32_t iSliceNum; + uint32_t pSliceNum; + + SliceNum &operator=(const SliceNum &other) { + if (this == &other) return *this; + + this->iSliceNum = other.iSliceNum; + this->pSliceNum= other.pSliceNum; + return *this; + } +}; + +enum VideoParamConfigType { + VideoParamsTypeStartUnused = 0x01000000, + VideoParamsTypeCommon, + VideoParamsTypeAVC, + VideoParamsTypeH263, + VideoParamsTypeMP4, + VideoParamsTypeVC1, + VideoParamsTypeUpSteamBuffer, + VideoParamsTypeUsrptrBuffer, + + VideoConfigTypeFrameRate, + VideoConfigTypeBitRate, + VideoConfigTypeResolution, + VideoConfigTypeIntraRefreshType, + VideoConfigTypeAIR, + VideoConfigTypeCyclicFrameInterval, + VideoConfigTypeAVCIntraPeriod, + VideoConfigTypeNALSize, + VideoConfigTypeIDRRequest, + VideoConfigTypeSliceNum, + + VideoParamsConfigExtension +}; + +struct VideoParamConfigSet { + VideoParamConfigType type; + uint32_t size; + + VideoParamConfigSet &operator=(const VideoParamConfigSet &other) { + if (this == &other) return *this; + this->type = other.type; + this->size = other.size; + return *this; + } +}; + +struct VideoParamsCommon : VideoParamConfigSet { + + VAProfile profile; + uint8_t level; + VideoRawFormat rawFormat; + VideoResolution resolution; + VideoFrameRate frameRate; + int32_t intraPeriod; + VideoRateControl rcMode; + VideoRateControlParams rcParams; + VideoIntraRefreshType refreshType; + int32_t cyclicFrameInterval; + AirParams airParams; + uint32_t disableDeblocking; + + VideoParamsCommon() { + type = VideoParamsTypeCommon; + size = sizeof(VideoParamsCommon); + } + + VideoParamsCommon &operator=(const VideoParamsCommon &other) { + if (this == &other) return *this; + + VideoParamConfigSet::operator=(other); + this->profile = other.profile; + this->level = other.level; + this->rawFormat = other.rawFormat; + this->resolution = other.resolution; + this->frameRate = other.frameRate; + this->intraPeriod = other.intraPeriod; + this->rcMode = other.rcMode; + this->rcParams = other.rcParams; + this->refreshType = other.refreshType; + this->cyclicFrameInterval = other.cyclicFrameInterval; + this->airParams = other.airParams; + this->disableDeblocking = other.disableDeblocking; + return *this; + } +}; + +struct VideoParamsAVC : VideoParamConfigSet { + uint32_t basicUnitSize; //for rate control + uint8_t VUIFlag; + int32_t maxSliceSize; + uint32_t idrInterval; + SliceNum sliceNum; + AVCDelimiterType delimiterType; + + VideoParamsAVC() { + type = VideoParamsTypeAVC; + size = sizeof(VideoParamsAVC); + } + + VideoParamsAVC &operator=(const VideoParamsAVC &other) { + if (this == &other) return *this; + + VideoParamConfigSet::operator=(other); + this->basicUnitSize = other.basicUnitSize; + this->VUIFlag = other.VUIFlag; + this->maxSliceSize = other.maxSliceSize; + this->idrInterval = other.idrInterval; + this->sliceNum = other.sliceNum; + this->delimiterType = other.delimiterType; + + return *this; + } +}; + +struct VideoParamsUpstreamBuffer : VideoParamConfigSet { + + VideoParamsUpstreamBuffer() { + type = VideoParamsTypeUpSteamBuffer; + size = sizeof(VideoParamsUpstreamBuffer); + } + + VideoBufferSharingMode bufferMode; + uint32_t *bufList; + uint32_t bufCnt; +}; + +struct VideoParamsUsrptrBuffer : VideoParamConfigSet { + + VideoParamsUsrptrBuffer() { + type = VideoParamsTypeUsrptrBuffer; + size = sizeof(VideoParamsUsrptrBuffer); + } + + //input + uint32_t width; + uint32_t height; + uint32_t format; + uint32_t expectedSize; + + //output + uint32_t actualSize; + uint32_t stride; + uint8_t *usrPtr; +}; + +struct VideoConfigFrameRate : VideoParamConfigSet { + + VideoConfigFrameRate() { + type = VideoConfigTypeFrameRate; + size = sizeof(VideoConfigFrameRate); + } + + VideoFrameRate frameRate; +}; + +struct VideoConfigBitRate : VideoParamConfigSet { + + VideoConfigBitRate() { + type = VideoConfigTypeBitRate; + size = sizeof(VideoConfigBitRate); + } + + VideoRateControlParams rcParams; +}; + +struct VideoConfigAVCIntraPeriod : VideoParamConfigSet { + + VideoConfigAVCIntraPeriod() { + type = VideoConfigTypeAVCIntraPeriod; + size = sizeof(VideoConfigAVCIntraPeriod); + } + + uint32_t idrInterval; //How many Intra frame will have a IDR frame + uint32_t intraPeriod; +}; + +struct VideoConfigNALSize : VideoParamConfigSet { + + VideoConfigNALSize() { + type = VideoConfigTypeNALSize; + size = sizeof(VideoConfigNALSize); + } + + uint32_t maxSliceSize; +}; + +struct VideoConfigResoltuion : VideoParamConfigSet { + + VideoConfigResoltuion() { + type = VideoConfigTypeResolution; + size = sizeof(VideoConfigResoltuion); + } + + VideoResolution resolution; +}; + +struct VideoConfigIntraRefreshType : VideoParamConfigSet { + + VideoConfigIntraRefreshType() { + type = VideoConfigTypeIntraRefreshType; + size = sizeof(VideoConfigIntraRefreshType); + } + + VideoIntraRefreshType refreshType; +}; + +struct VideoConfigCyclicFrameInterval : VideoParamConfigSet { + + VideoConfigCyclicFrameInterval() { + type = VideoConfigTypeCyclicFrameInterval; + size = sizeof(VideoConfigCyclicFrameInterval); + } + + int32_t cyclicFrameInterval; +}; + +struct VideoConfigAIR : VideoParamConfigSet { + + VideoConfigAIR() { + type = VideoConfigTypeAIR; + size = sizeof(VideoConfigAIR); + } + + AirParams airParams; +}; + +struct VideoConfigSliceNum : VideoParamConfigSet { + + VideoConfigSliceNum() { + type = VideoConfigTypeSliceNum; + size = sizeof(VideoConfigSliceNum); + } + + SliceNum sliceNum; +}; +#endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp new file mode 100644 index 0000000..6fb510b --- /dev/null +++ b/videoencoder/VideoEncoderH263.cpp @@ -0,0 +1,162 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include +#include +#include "VideoEncoderLog.h" +#include "VideoEncoderH263.h" +#include + +VideoEncoderH263::VideoEncoderH263() { + mComParams.profile = (VAProfile)PROFILE_H263BASELINE; +} + +Encode_Status VideoEncoderH263::sendEncodeCommand(void) { + + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n"); + + if (mFrameNum == 0) { + ret = renderSequenceParams(); + CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); + } + + ret = renderPictureParams(); + CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + + ret = renderSliceParams(); + CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); + + LOG_V( "End\n"); + return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderH263::renderSequenceParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncSequenceParameterBufferH263 h263SequenceParam; + + LOG_V( "Begin\n\n"); + + //set up the sequence params for HW + h263SequenceParam.bits_per_second= mComParams.rcParams.bitRate; + h263SequenceParam.frame_rate = 30; //hard-coded, driver need; + h263SequenceParam.initial_qp = mComParams.rcParams.initQP; + h263SequenceParam.min_qp = mComParams.rcParams.minQP; + h263SequenceParam.intra_period = mComParams.intraPeriod; + + //h263_seq_param.fixed_vop_rate = 30; + + LOG_V("===h263 sequence params===\n"); + LOG_I( "bitrate = %d\n", h263SequenceParam.bits_per_second); + LOG_I( "frame_rate = %d\n", h263SequenceParam.frame_rate); + LOG_I( "initial_qp = %d\n", h263SequenceParam.initial_qp); + LOG_I( "min_qp = %d\n", h263SequenceParam.min_qp); + LOG_I( "intra_period = %d\n\n", h263SequenceParam.intra_period); + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSequenceParameterBufferType, + sizeof(h263SequenceParam), + 1, &h263SequenceParam, + &mSeqParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V( "end\n"); + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderH263::renderPictureParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncPictureParameterBufferH263 h263PictureParams; + + LOG_V( "Begin\n\n"); + + // set picture params for HW + h263PictureParams.reference_picture = mRefFrame->surface; + h263PictureParams.reconstructed_picture = mRecFrame->surface; + h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; + h263PictureParams.picture_width = mComParams.resolution.width; + h263PictureParams.picture_height = mComParams.resolution.height; + h263PictureParams.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + LOG_V("======h263 picture params======\n"); + LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture); + LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture); + LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf); + LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); + LOG_I( "picture_width = %d\n", h263PictureParams.picture_width); + LOG_I( "picture_height = %d\n",h263PictureParams.picture_height); + LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type); + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncPictureParameterBufferType, + sizeof(h263PictureParams), + 1,&h263PictureParams, + &mPicParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf , 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V( "end\n"); + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderH263::renderSliceParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + uint32_t sliceNum; + uint32_t sliceHeight; + uint32_t sliceHeightInMB; + + LOG_V("Begin\n\n"); + + sliceHeight = mComParams.resolution.height; + sliceHeight += 15; + sliceHeight &= (~15); + sliceHeightInMB = sliceHeight / 16; + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + 1, NULL, &mSliceParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + VAEncSliceParameterBuffer *sliceParams; + vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + // starting MB row number for this slice + sliceParams->start_row_number = 0; + // slice height measured in MB + sliceParams->slice_height = sliceHeightInMB; + sliceParams->slice_flags.bits.is_intra = mIsIntra; + sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0; + + LOG_V("======h263 slice params======\n"); + LOG_I("start_row_number = %d\n", (int) sliceParams->start_row_number); + LOG_I("slice_height_in_mb = %d\n", (int) sliceParams->slice_height); + LOG_I("slice.is_intra = %d\n", (int) sliceParams->slice_flags.bits.is_intra); + + vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V("end\n"); + return ENCODE_SUCCESS; +} diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h new file mode 100644 index 0000000..2113e2f --- /dev/null +++ b/videoencoder/VideoEncoderH263.h @@ -0,0 +1,45 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __VIDEO_ENCODER_H263_H__ +#define __VIDEO_ENCODER_H263_H__ + +#include "VideoEncoderBase.h" + +/** + * H.263 Encoder class, derived from VideoEncoderBase + */ +class VideoEncoderH263: public VideoEncoderBase { +public: + VideoEncoderH263(); + virtual ~VideoEncoderH263() {}; + +protected: + virtual Encode_Status sendEncodeCommand(void); + virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { + return ENCODE_SUCCESS; + } + virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) { + return ENCODE_SUCCESS; + } + virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) { + return ENCODE_SUCCESS; + } + virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { + return ENCODE_SUCCESS; + } + + // Local Methods +private: + Encode_Status renderSequenceParams(); + Encode_Status renderPictureParams(); + Encode_Status renderSliceParams(); +}; + +#endif /* __VIDEO_ENCODER_H263_H__ */ + diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp new file mode 100644 index 0000000..aed2bb9 --- /dev/null +++ b/videoencoder/VideoEncoderHost.cpp @@ -0,0 +1,43 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intels prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include "VideoEncoderMP4.h" +#include "VideoEncoderH263.h" +#include "VideoEncoderAVC.h" +#include "VideoEncoderHost.h" +#include "VideoEncoderLog.h" +#include + +IVideoEncoder *createVideoEncoder(const char *mimeType) { + + if (mimeType == NULL) { + LOG_E("NULL mime type"); + return NULL; + } + + if (strcasecmp(mimeType, "video/avc") == 0 || + strcasecmp(mimeType, "video/h264") == 0) { + VideoEncoderAVC *p = new VideoEncoderAVC(); + return (IVideoEncoder *)p; + } else if (strcasecmp(mimeType, "video/h263") == 0) { + VideoEncoderH263 *p = new VideoEncoderH263(); + return (IVideoEncoder *)p; + } else if (strcasecmp(mimeType, "video/mpeg4") == 0 || + strcasecmp(mimeType, "video/mp4v-es") == 0) { + VideoEncoderMP4 *p = new VideoEncoderMP4(); + return (IVideoEncoder *)p; + } else { + LOG_E ("Unknown mime type: %s", mimeType); + } + return NULL; +} + +void releaseVideoEncoder(IVideoEncoder *p) { + if (p) delete p; +} + diff --git a/videoencoder/VideoEncoderHost.h b/videoencoder/VideoEncoderHost.h new file mode 100644 index 0000000..cd39dc3 --- /dev/null +++ b/videoencoder/VideoEncoderHost.h @@ -0,0 +1,17 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intels prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef VIDEO_ENCODER_HOST_H_ +#define VIDEO_ENCODER_HOST_H_ + +#include "VideoEncoderInterface.h" + +IVideoEncoder *createVideoEncoder(const char *mimeType); +void releaseVideoEncoder(IVideoEncoder *p); + +#endif /* VIDEO_ENCODER_HOST_H_ */ \ No newline at end of file diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h new file mode 100644 index 0000000..416c29d --- /dev/null +++ b/videoencoder/VideoEncoderInterface.h @@ -0,0 +1,29 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intels prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef VIDEO_ENCODER_INTERFACE_H_ +#define VIDEO_ENCODER_INTERFACE_H_ + +#include "VideoEncoderDef.h" + +class IVideoEncoder { +public: + virtual ~IVideoEncoder() {}; + virtual Encode_Status start(void) = 0; + virtual Encode_Status stop(void) = 0; + virtual void flush(void) = 0; + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer) = 0; + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer) = 0; + virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0; + virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0; + virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0; + virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0; + virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0; +}; + +#endif /* VIDEO_ENCODER_INTERFACE_H_ */ diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h new file mode 100644 index 0000000..4c1e982 --- /dev/null +++ b/videoencoder/VideoEncoderLog.h @@ -0,0 +1,68 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __VIDEO_ENCODER_LOG_H__ +#define __VIDEO_ENCODER_LOG_H__ + +// Components +#define VIDEO_ENC_COMP "VideoEncoder" + +#include + +#define VIDEO_ENC_LOG_LEVEL_ERROR ANDROID_LOG_ERROR +#define VIDEO_ENC_LOG_LEVEL_WARNING ANDROID_LOG_WARN +#define VIDEO_ENC_LOG_LEVEL_INFO ANDROID_LOG_INFO +#define VIDEO_ENC_LOG_LEVEL_VERBOSE ANDROID_LOG_VERBOSE + +#define mix_log(comp, level, format, ...) \ + __android_log_print(level, comp, "%s():%d: "format, \ + __FUNCTION__, __LINE__, ##__VA_ARGS__) + +#ifdef VIDEO_ENC_LOG_ENABLE +#define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) +#define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__) +#define LOG_W(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_WARNING, format, ##__VA_ARGS__) +#else +#define LOG_V(format, ...) +#define LOG_I(format, ...) +#define LOG_W(format, ...) +#endif + +#define LOG_E(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_ERROR, format, ##__VA_ARGS__) + +#define CHECK_VA_STATUS_RETURN(FUNC)\ + if (vaStatus != VA_STATUS_SUCCESS) {\ + LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\ + return ENCODE_DRIVER_FAIL;\ + } + +#define CHECK_VA_STATUS_GOTO_CLEANUP(FUNC)\ + if (vaStatus != VA_STATUS_SUCCESS) {\ + LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\ + ret = ENCODE_DRIVER_FAIL; \ + goto CLEAN_UP;\ + } + +#define CHECK_ENCODE_STATUS_RETURN(FUNC)\ + if (ret != ENCODE_SUCCESS) { \ + LOG_E(FUNC"Failed. ret = 0x%08x\n", ret); \ + return ret; \ + } + +#define CHECK_ENCODE_STATUS_CLEANUP(FUNC)\ + if (ret != ENCODE_SUCCESS) { \ + LOG_E(FUNC"Failed, ret = 0x%08x\n", ret); \ + goto CLEAN_UP;\ + } + +#define CHECK_NULL_RETURN_IFFAIL(POINTER)\ + if (POINTER == NULL) { \ + LOG_E("Invalid pointer\n"); \ + return ENCODE_NULL_PTR;\ + } +#endif /* __VIDEO_ENCODER_LOG_H__ */ diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp new file mode 100644 index 0000000..37dce53 --- /dev/null +++ b/videoencoder/VideoEncoderMP4.cpp @@ -0,0 +1,311 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include +#include + +#include "VideoEncoderLog.h" +#include "VideoEncoderMP4.h" +#include + +VideoEncoderMP4::VideoEncoderMP4() + :mProfileLevelIndication(3) + ,mFixedVOPTimeIncrement(0) { + mComParams.profile = (VAProfile)PROFILE_MPEG4SIMPLE; +} + +Encode_Status VideoEncoderMP4::getHeaderPos( + uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) { + + uint8_t *buf = inBuffer; + uint32_t bytesLeft = bufSize; + Encode_Status ret = ENCODE_SUCCESS; + + *headerSize = 0; + CHECK_NULL_RETURN_IFFAIL(inBuffer); + + if (bufSize < 4) { + //bufSize shoule not < 4 + LOG_E("Buffer size too small\n"); + return ENCODE_FAIL; + } + + while (bytesLeft > 4 && + (memcmp("\x00\x00\x01\xB6", &inBuffer[bufSize - bytesLeft], 4) && + memcmp("\x00\x00\x01\xB3", &inBuffer[bufSize - bytesLeft], 4))) { + --bytesLeft; + } + + if (bytesLeft <= 4) { + LOG_E("NO header found\n"); + *headerSize = 0; // + } else { + *headerSize = bufSize - bytesLeft; + } + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderMP4::outputConfigData( + VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + uint32_t headerSize = 0; + + ret = getHeaderPos((uint8_t *)mCurSegment->buf + mOffsetInSeg, + mCurSegment->size - mOffsetInSeg, &headerSize); + CHECK_ENCODE_STATUS_RETURN("getHeaderPos"); + if (headerSize == 0) { + outBuffer->dataSize = 0; + mCurSegment = NULL; + return ENCODE_NO_REQUEST_DATA; + } + + if (headerSize <= outBuffer->bufferSize) { + memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize); + mTotalSizeCopied += headerSize; + mOffsetInSeg += headerSize; + outBuffer->dataSize = headerSize; + outBuffer->remainingSize = 0; + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG; + outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + } else { + // we need a big enough buffer, otherwise we won't output anything + outBuffer->dataSize = 0; + outBuffer->remainingSize = headerSize; + outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID; + LOG_E("Buffer size too small\n"); + return ENCODE_BUFFER_TOO_SMALL; + } + + return ret; +} + + +Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + bool useLocalBuffer = false; + + LOG_V("Begin\n"); + CHECK_NULL_RETURN_IFFAIL(outBuffer); + + if (mFrameNum > 2) { + if (mComParams.intraPeriod != 0 && + (((mFrameNum - 2) % mComParams.intraPeriod) == 0)) { + mKeyFrame = true; + } else { + mKeyFrame = false; + } + } + + // prepare for output, map the coded buffer + ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); + CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); + + switch (outBuffer->format) { + case OUTPUT_EVERYTHING: + case OUTPUT_FRAME_DATA: { + // Output whatever we have + ret = VideoEncoderBase::outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + break; + } + case OUTPUT_CODEC_DATA: { + // Output the codec config data + ret = outputConfigData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputCodecData"); + break; + } + default: + LOG_E("Invalid buffer mode for MPEG-4:2\n"); + ret = ENCODE_FAIL; + break; + } + + LOG_I("out size is = %d\n", outBuffer->dataSize); + + // cleanup, unmap the coded buffer if all + // data has been copied out + ret = VideoEncoderBase::cleanupForOutput(); + +CLEAN_UP: + + if (ret < ENCODE_SUCCESS) { + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } + + // error happens, unmap the buffer + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; + } + } + LOG_V("End\n"); + return ret; +} + + +Encode_Status VideoEncoderMP4::renderSequenceParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncSequenceParameterBufferMPEG4 mp4SequenceParams; + + uint32_t frameRateNum = mComParams.frameRate.frameRateNum; + uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; + + LOG_V( "Begin\n\n"); + + // set up the sequence params for HW + mp4SequenceParams.profile_and_level_indication = mProfileLevelIndication; + mp4SequenceParams.video_object_layer_width= mComParams.resolution.width; + mp4SequenceParams.video_object_layer_height= mComParams.resolution.height; + mp4SequenceParams.vop_time_increment_resolution = + (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; + mp4SequenceParams.fixed_vop_time_increment= mFixedVOPTimeIncrement; + mp4SequenceParams.bits_per_second= mComParams.rcParams.bitRate; + mp4SequenceParams.frame_rate = + (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; + mp4SequenceParams.initial_qp = mComParams.rcParams.initQP; + mp4SequenceParams.min_qp = mComParams.rcParams.minQP; + mp4SequenceParams.intra_period = mComParams.intraPeriod; + //mpeg4_seq_param.fixed_vop_rate = 30; + + LOG_V("===mpeg4 sequence params===\n"); + LOG_I("profile_and_level_indication = %d\n", (uint32_t)mp4SequenceParams.profile_and_level_indication); + LOG_I("intra_period = %d\n", mp4SequenceParams.intra_period); + LOG_I("video_object_layer_width = %d\n", mp4SequenceParams.video_object_layer_width); + LOG_I("video_object_layer_height = %d\n", mp4SequenceParams.video_object_layer_height); + LOG_I("vop_time_increment_resolution = %d\n", mp4SequenceParams.vop_time_increment_resolution); + LOG_I("fixed_vop_rate = %d\n", mp4SequenceParams.fixed_vop_rate); + LOG_I("fixed_vop_time_increment = %d\n", mp4SequenceParams.fixed_vop_time_increment); + LOG_I("bitrate = %d\n", mp4SequenceParams.bits_per_second); + LOG_I("frame_rate = %d\n", mp4SequenceParams.frame_rate); + LOG_I("initial_qp = %d\n", mp4SequenceParams.initial_qp); + LOG_I("min_qp = %d\n", mp4SequenceParams.min_qp); + LOG_I("intra_period = %d\n\n", mp4SequenceParams.intra_period); + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSequenceParameterBufferType, + sizeof(mp4SequenceParams), + 1, &mp4SequenceParams, + &mSeqParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V( "end\n"); + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderMP4::renderPictureParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; + LOG_V( "Begin\n\n"); + + // set picture params for HW + mpeg4_pic_param.reference_picture = mRefFrame->surface; + mpeg4_pic_param.reconstructed_picture = mRecFrame->surface; + mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex]; + mpeg4_pic_param.picture_width = mComParams.resolution.width; + mpeg4_pic_param.picture_height = mComParams.resolution.height; + mpeg4_pic_param.vop_time_increment= mFrameNum; + mpeg4_pic_param.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + + LOG_V("======mpeg4 picture params======\n"); + LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture); + LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture); + LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf); + LOG_I("coded_buf_index = %d\n", mCodedBufIndex); + LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width); + LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height); + LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment); + LOG_I("picture_type = %d\n\n", mpeg4_pic_param.picture_type); + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncPictureParameterBufferType, + sizeof(mpeg4_pic_param), + 1,&mpeg4_pic_param, + &mPicParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderMP4::renderSliceParams() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + uint32_t sliceHeight; + uint32_t sliceHeightInMB; + + VAEncSliceParameterBuffer sliceParams; + + LOG_V( "Begin\n\n"); + + sliceHeight = mComParams.resolution.height; + sliceHeight += 15; + sliceHeight &= (~15); + sliceHeightInMB = sliceHeight / 16; + + sliceParams.start_row_number = 0; + sliceParams.slice_height = sliceHeightInMB; + sliceParams.slice_flags.bits.is_intra = mIsIntra; + sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0; + + LOG_V("======mpeg4 slice params======\n"); + LOG_I( "start_row_number = %d\n", (int) sliceParams.start_row_number); + LOG_I( "sliceHeightInMB = %d\n", (int) sliceParams.slice_height); + LOG_I( "is_intra = %d\n", (int) sliceParams.slice_flags.bits.is_intra); + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + 1, &sliceParams, + &mSliceParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V( "end\n"); + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderMP4::sendEncodeCommand(void) { + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n"); + + if (mFrameNum == 0) { + ret = renderSequenceParams(); + CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); + } + + ret = renderPictureParams(); + CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + + ret = renderSliceParams(); + CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + + LOG_V( "End\n"); + return ENCODE_SUCCESS; +} diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h new file mode 100644 index 0000000..b453023 --- /dev/null +++ b/videoencoder/VideoEncoderMP4.h @@ -0,0 +1,51 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __VIDEO_ENCODER__MPEG4_H__ +#define __VIDEO_ENCODER__MPEG4_H__ + +#include "VideoEncoderBase.h" + +/** + * MPEG-4:2 Encoder class, derived from VideoEncoderBase + */ +class VideoEncoderMP4: public VideoEncoderBase { +public: + VideoEncoderMP4(); + virtual ~VideoEncoderMP4() {}; + + Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); + +protected: + virtual Encode_Status sendEncodeCommand(void); + + virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { + return ENCODE_SUCCESS; + } + virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) { + return ENCODE_SUCCESS; + } + virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) { + return ENCODE_SUCCESS; + } + virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { + return ENCODE_SUCCESS; + } + // Local Methods +private: + Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize); + Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer); + Encode_Status renderSequenceParams(); + Encode_Status renderPictureParams(); + Encode_Status renderSliceParams(); + + unsigned char mProfileLevelIndication; + uint32_t mFixedVOPTimeIncrement; +}; + +#endif /* __VIDEO_ENCODER__MPEG4_H__ */ -- cgit v1.2.3 From 7032eb6c2d11e2419380e7a4837667c2829c6d76 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Tue, 13 Sep 2011 17:27:39 +0800 Subject: [PORT FROM R1][libmix] fix issue - green line displays on the left side of thumbnail for the 1080p recorded video clip BZ: 7906 remove the cropped area when copying out the raw data Change-Id: I440ea210b4d2fca07e0678b7f31c513dc5145088 Orig-Change-Id: Ib07a9bb382859d0fabb937b26a22f64bcd1e84a7 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/18319 Tested-by: Sang, Shuduo Reviewed-by: Monnier, OlivierX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 1fc8835..3964ef6 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -869,8 +869,12 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { } else { rawData = mAcquiredBuffer->renderBuffer.rawData; } + // size in NV12 format - int32_t size = mVideoFormatInfo.width * mVideoFormatInfo.height * 3/2; + uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight); + uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop); + int32_t size = cropWidth * cropHeight * 3 / 2; + if (rawData->data != NULL && rawData->size != size) { delete [] rawData->data; rawData->data = NULL; @@ -883,15 +887,15 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { } } rawData->own = true; // allocated by this library - rawData->width = mVideoFormatInfo.width; - rawData->height = mVideoFormatInfo.height; - rawData->pitch[0] = mVideoFormatInfo.width; - rawData->pitch[1] = mVideoFormatInfo.width; + rawData->width = cropWidth; + rawData->height = cropHeight; + rawData->pitch[0] = cropWidth; + rawData->pitch[1] = cropWidth; rawData->pitch[2] = 0; // interleaved U/V, two planes rawData->offset[0] = 0; - rawData->offset[1] = mVideoFormatInfo.width * mVideoFormatInfo.height; - rawData->offset[2] = mVideoFormatInfo.width * mVideoFormatInfo.height * 3/2; - rawData->size = size;; + rawData->offset[1] = cropWidth * cropHeight; + rawData->offset[2] = cropWidth * cropHeight * 3 / 2; + rawData->size = size; rawData->fourcc = 'NV12'; if (size == (int32_t)vaImage.data_size) { memcpy(rawData->data, pBuf, size); @@ -900,16 +904,16 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { uint8_t *src = (uint8_t*)pBuf; uint8_t *dst = rawData->data; int32_t row = 0; - for (row = 0; row < mVideoFormatInfo.height; row++) { - memcpy(dst, src, mVideoFormatInfo.width); - dst += mVideoFormatInfo.width; + for (row = 0; row < cropHeight; row++) { + memcpy(dst, src, cropWidth); + dst += cropWidth; src += vaImage.pitches[0]; } // copy interleaved V and U data src = (uint8_t*)pBuf + vaImage.offsets[1]; - for (row = 0; row < mVideoFormatInfo.height/2; row++) { - memcpy(dst, src, mVideoFormatInfo.width); - dst += mVideoFormatInfo.width; + for (row = 0; row < cropHeight / 2; row++) { + memcpy(dst, src, cropWidth); + dst += cropWidth; src += vaImage.pitches[1]; } } -- cgit v1.2.3 From 00fe22856c23e1f4109dfa11a034be6c336d7c18 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Tue, 13 Sep 2011 17:28:57 +0800 Subject: [PORT FROM R1][libmix] asf parser: Take into consideration when seek the preroll for asf parser BZ: 8009 Take the preroll of audio/video elementary stream of asf bit stream into consideration when calcuate the actual seeking point. Change-Id: Ie6b4813c5ee73cd4d6038fbe7501f64ba73318ba Orig-Change-Id: I0c52877fca53dfdcc3fe2d587f6c2cb64b52a585 Signed-off-by: Shuo Liu Reviewed-on: http://android.intel.com:8080/18320 Tested-by: Sang, Shuduo Reviewed-by: Monnier, OlivierX Reviewed-by: buildbot Tested-by: buildbot --- asfparser/AsfStreamParser.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/asfparser/AsfStreamParser.cpp b/asfparser/AsfStreamParser.cpp index b9210ca..647b2e8 100644 --- a/asfparser/AsfStreamParser.cpp +++ b/asfparser/AsfStreamParser.cpp @@ -180,6 +180,8 @@ int AsfStreamParser::seek( return ASF_PARSER_FAILED; } + seekTime += mHeaderParser->getPreroll()*ASF_SCALE_MS_TO_100NANOSEC; //add preroll start time + return mSimpleIndexParser->seek(seekTime, nextSync, packetNumber, targetTime); } -- cgit v1.2.3 From a6a49e2d86fee539430dba76296d6b0884b5bb48 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Tue, 20 Sep 2011 17:40:27 +0800 Subject: [PORT FROM R1][Video] libmix: set correct reference POC for interlaced H264 stream. BZ: 6801 Change-Id: I72cc0f90b061191dc4a2800a27ae01732c2ef626 Orig-Change-Id: I740fd15604444a638d9a516201200d8be0d1f4b8 Reviewed-on: http://android.intel.com:8080/19119 Reviewed-by: Sang, Shuduo Tested-by: Sang, Shuduo Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 2dc9723..c04726b 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -337,8 +337,8 @@ static inline void vbp_set_VAPicture_h264( WTRACE("Reference picture structure is not frame for current frame picture!"); } pic->flags = 0; - pic->TopFieldOrderCnt = store->frame.poc; - pic->BottomFieldOrderCnt = store->frame.poc; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; } else { @@ -546,8 +546,8 @@ static inline void vbp_set_reference_frames_h264( pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; if (FRAME == parser->info.img.structure) { - pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; - pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; } else { -- cgit v1.2.3 From 571795ecd0c1168be6cc02f2f2cb4e95d8d1860f Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Tue, 20 Sep 2011 13:44:52 +0800 Subject: [PORT FROM R1][Video] libmix: workaround gfx memory shortage issue BZ: 9134 limite surface number to 10 to workaround memory shortage issue at gfx side Signed-off-by: Weian Chen Change-Id: Ia0c1f840de5094d08c9ffccd4ce659501364e601 Orig-Change-Id: Id6677baeaff90886fb8cf25f03ee6868948710e8 Reviewed-on: http://android.intel.com:8080/19061 Reviewed-by: Sang, Shuduo Tested-by: Sang, Shuduo Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 4de8936..a415a20 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -585,8 +585,9 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { } } // for 1080p, limit the total surface to 19, according the hardware limitation - if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) { - DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER; + // change the max surface number from 19->10 to workaround memory shortage + if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 10) { + DPBSize = 10 - AVC_EXTRA_SURFACE_NUMBER; } VideoDecoderBase::setOutputWindowSize(DPBSize); return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile); -- cgit v1.2.3 From a518446523b6fdc4fdb4b27c9f0f27256a7388c5 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Tue, 27 Sep 2011 14:55:29 +0800 Subject: [PORT FROM R1][Video]libmix: add 16-byte aligment for H.263 decoder BZ: 8355 add 16-byte aligment for H.263 parser to fix flicker issue at bottom of 360p clip Change-Id: I27a7c48058ee2ff98858dbb1431b753098e003ba Orig-Change-Id: I51fc1143382c2b8abae3e685a0d358ca77f40885 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/19890 Reviewed-by: Sang, Shuduo Tested-by: Sang, Shuduo Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index 2c19add..4125a6c 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -317,8 +317,8 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser { k = 4; } - svh->num_macroblocks_in_gob = (vol->video_object_layer_width/16)*k; - svh->num_gobs_in_vop = (vol->video_object_layer_height)/(16*k); + svh->num_macroblocks_in_gob = (((vol->video_object_layer_width + 15) & ~15) /16)*k; + svh->num_gobs_in_vop = (((vol->video_object_layer_height + 15) & ~15)/(16*k)); svh->num_rows_in_gob = k; } else -- cgit v1.2.3 From cf5ce7a99b5df90830e032fd592203848418b74e Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Tue, 13 Sep 2011 10:48:37 -0700 Subject: Support playing protected contents. BZ: 7358 Add VideoDecoderPAVC class for playing protected contents. Change-Id: Ie3acefdd5605578ac494841fc6fabf1b324fb9ca Signed-off-by: Andy Qiu Reviewed-on: http://android.intel.com:8080/20278 Reviewed-by: Sun, Hang L Tested-by: Sun, Hang L Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/Android.mk | 1 + videodecoder/VideoDecoderAVC.h | 6 +- videodecoder/VideoDecoderBase.cpp | 10 +- videodecoder/VideoDecoderDefs.h | 13 ++- videodecoder/VideoDecoderHost.cpp | 6 +- videodecoder/VideoDecoderPAVC.cpp | 186 ++++++++++++++++++++++++++++++++++++++ videodecoder/VideoDecoderPAVC.h | 59 ++++++++++++ 7 files changed, 274 insertions(+), 7 deletions(-) create mode 100644 videodecoder/VideoDecoderPAVC.cpp create mode 100644 videodecoder/VideoDecoderPAVC.h diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 3dbe77c..b075586 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -9,6 +9,7 @@ LOCAL_SRC_FILES := \ VideoDecoderMPEG4.cpp \ VideoDecoderAVC.cpp \ VideoDecoderVP8.cpp \ + VideoDecoderPAVC.cpp \ VideoDecoderTrace.cpp # LOCAL_CFLAGS := diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index ff52486..2d8d1b6 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -38,11 +38,11 @@ public: virtual void flush(void); virtual Decode_Status decode(VideoDecodeBuffer *buffer); -private: +protected: Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); Decode_Status beginDecodingFrame(vbp_data_h264 *data); Decode_Status continueDecodingFrame(vbp_data_h264 *data); - Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); Decode_Status setReference(VASliceParameterBufferH264 *sliceParam); Decode_Status updateDPB(VAPictureParameterBufferH264 *picParam); Decode_Status updateReferenceFrames(vbp_picture_data_h264 *picData); diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 3964ef6..0a68d68 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -655,11 +655,17 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { return DECODE_MEMORY_FAIL; } + int32_t format = VA_RT_FORMAT_YUV420; + if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { + format |= VA_RT_FORMAT_PROTECTED; + LOGW("Surface is protected."); + } + vaStatus = vaCreateSurfaces( mVADisplay, mVideoFormatInfo.width, mVideoFormatInfo.height, - VA_RT_FORMAT_YUV420, + format, mNumSurfaces, mSurfaces); CHECK_VA_STATUS("vaCreateSurfaces"); diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 59a5e04..a715d7c 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -28,6 +28,7 @@ #include #include + // format specific data, for future extension. struct VideoFormatSpecificData { int32_t formatType; @@ -74,6 +75,16 @@ typedef enum { // indicate surfaceNumber field is valid and it contains minimum surface number to allocate. HAS_MINIMUM_SURFACE_NUMBER = 0x100, + + // indicates surface created will be protected + WANT_SURFACE_PROTECTION = 0x400, + + // indicates if extra data is appended at end of buffer + HAS_EXTRADATA = 0x800, + + // indicates if buffer contains codec data + HAS_CODECDATA = 0x1000, + } VIDEO_BUFFER_FLAG; struct VideoDecodeBuffer { diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index d946a2b..b3e3039 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -25,6 +25,7 @@ #include "VideoDecoderWMV.h" #include "VideoDecoderMPEG4.h" #include "VideoDecoderAVC.h" +#include "VideoDecoderPAVC.h" #include "VideoDecoderHost.h" #include "VideoDecoderTrace.h" #include @@ -49,6 +50,9 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { strcasecmp(mimeType, "video/3gpp") == 0) { VideoDecoderMPEG4 *p = new VideoDecoderMPEG4(mimeType); return (IVideoDecoder *)p; + } else if (strcasecmp(mimeType, "video/pavc") == 0) { + VideoDecoderAVC *p = new VideoDecoderPAVC(mimeType); + return (IVideoDecoder *)p; } else { ETRACE("Unknown mime type: %s", mimeType); } diff --git a/videodecoder/VideoDecoderPAVC.cpp b/videodecoder/VideoDecoderPAVC.cpp new file mode 100644 index 0000000..c05330a --- /dev/null +++ b/videodecoder/VideoDecoderPAVC.cpp @@ -0,0 +1,186 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderPAVC.h" +#include "VideoDecoderTrace.h" +#include + +VideoDecoderPAVC::VideoDecoderPAVC(const char *mimeType) + : VideoDecoderAVC(mimeType), + mMetadata(NULL) { +} + +VideoDecoderPAVC::~VideoDecoderPAVC() { +} + +Decode_Status VideoDecoderPAVC::decode(VideoDecodeBuffer *buffer) { + // TODO: preprocessing protected content here + + mMetadata = NULL; + + if (buffer->flag & HAS_EXTRADATA) { + mMetadata = buffer->data + buffer->size; + } + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderPAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + if (mMetadata == NULL) { + // non-protected content playback path + return VideoDecoderAVC::decodeSlice(data, picIndex, sliceIndex); + } + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which medata is correlated to current slice + PAVCMetadata *pMetadata = (PAVCMetadata*)mMetadata; + uint32_t accumulatedClearNALUSize = 0; + uint32_t clearNALUSize = 0; + do { + clearNALUSize = pMetadata->clearHeaderSize + pMetadata->decryptionDataSize; + if (clearNALUSize == 0) { + LOGE("Could not find meta data for current NAL unit."); + return DECODE_INVALID_DATA; + } + + if (accumulatedClearNALUSize + clearNALUSize > sliceData->slice_offset) { + break; + } + accumulatedClearNALUSize += clearNALUSize; + pMetadata++; + } while (1); + + // add bytes that are encrypted + sliceParam->slice_data_size += pMetadata->encryptionDataSize; + sliceData->slice_size = sliceParam->slice_data_size; + + // no need to update: + // sliceParam->slice_data_offset - 0 always + // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedClearNALUSize is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + uint32_t offset = pMetadata->clearHeaderIMROffset + sliceData->slice_offset - accumulatedClearNALUSize; + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + //VASliceDataBufferType, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + //sliceData->buffer_addr + sliceData->slice_offset, + (uint8_t*)offset, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + diff --git a/videodecoder/VideoDecoderPAVC.h b/videodecoder/VideoDecoderPAVC.h new file mode 100644 index 0000000..195c07d --- /dev/null +++ b/videodecoder/VideoDecoderPAVC.h @@ -0,0 +1,59 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_PAVC_H_ +#define VIDEO_DECODER_PAVC_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderPAVC : public VideoDecoderAVC { +public: + VideoDecoderPAVC(const char *mimeType); + virtual ~VideoDecoderPAVC(); + + // data in the decoded buffer only contains clearHeader and decrypted data. + // encrypted data is not included in the buffer as it may contain start code emulation bytes. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + + // structure PAVCMetadata is appended after the VideodecodeBuffer::data + VideoDecoderBuffer::size + // number of structures is equal to number of nal units in the buffer. + struct PAVCMetadata + { + uint32_t clearHeaderSize; // 0 means no more meta data + uint32_t decryptionDataSize; + uint32_t encryptionDataSize; + uint32_t clearHeaderIMROffset; // always points to clear header in the IMR + }; + +private: + uint8_t *mMetadata; // pointer to metadata appended at end of buffer +}; + + + +#endif /* VIDEO_DECODER_PAVC_H_ */ -- cgit v1.2.3 From c957f88ad880c7f6fdcb84e3c9fe10141589b1d5 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Thu, 29 Sep 2011 11:30:20 -0400 Subject: libmix: fix video corruption in WiDi ext mode BZ: 11037 fix video corruption in WiDi ext mode by getting skip frameflag and set reference frame correctly Change-Id: Iad0610606573e2614a986a0e2b44ce70e6a673a4 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/20215 Reviewed-by: Tao, Tao Q Reviewed-by: Marotte, Fabien Tested-by: Sun, Hang L Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 4c6fe62..5fe041b 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -340,6 +340,7 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; + uint8_t *buf = NULL; if (!mInitialized) { LOG_E("Encoder has not initialized yet\n"); @@ -389,6 +390,12 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { mOutCodedBuffer = mLastCodedBuffer; + // Need map buffer before calling query surface below to get + // the right skip frame flag for current frame + // It is a requirement of video driver + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + if (!((mComParams.rcMode == VA_RC_NONE) || mFirstFrame)) { vaStatus = vaEndPicture(mVADisplay, mVAContext); CHECK_VA_STATUS_RETURN("vaEndPicture"); @@ -410,7 +417,7 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { // Query the status of current surface VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastFrame->surface, &vaSurfaceStatus); + vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurFrame->surface, &vaSurfaceStatus); CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; -- cgit v1.2.3 From fb575c6ba27470b03c8ccf9da0987743e54c08d8 Mon Sep 17 00:00:00 2001 From: Linda Cline Date: Wed, 28 Sep 2011 18:52:57 -0700 Subject: libmix: mix_vbp - Fix green artifacts in CNN videos BZ: 9679 Flash video streamed from CNN site can have errors in the bitstream, missing reference frames, etc, causing green artifacts in the decoded video. This fix will skip video frames for which reference frames are not available, displaying only frames decoded correctly and increasing robustness. Change to mix_vbp to return NAL unit type so that reference IDR frames can be tracked in MixVideo. Consequences are that black video will be displayed until a correct video bitstream is received from the server. Change-Id: Ie03ef8f7bf3ac96bb4bb683659236ad2b7afbf3f Signed-off-by: Linda Cline Reviewed-on: http://android.intel.com:8080/20165 Reviewed-by: Qiu, Junhai Reviewed-by: Chen, Weian Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 77 +++++++++++---------------- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 2 + 2 files changed, 34 insertions(+), 45 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index c04726b..eae56a9 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -49,6 +49,12 @@ struct vbp_h264_parser_private_t /* indicate if stream is length prefixed */ int length_prefix_verified; + /* active sequence parameter set id */ + uint8 seq_parameter_set_id; + + /* active picture parameter set id */ + uint8 pic_parameter_set_id; + H264_BS_PATTERN bitstream_pattern; }; @@ -266,6 +272,12 @@ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) parser_private->length_prefix_verified = 0; parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + + /* range from 0 to 31 inclusive */ + parser_private->seq_parameter_set_id = 0xff; + + /* range from 0 to 255 inclusive */ + parser_private->pic_parameter_set_id = 0xff; return VBP_OK; cleanup: @@ -1154,33 +1166,8 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) /* bit: bits parsed within the current parsing position */ viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + slc_data->nal_unit_type = h264_parser->info.nal_unit_type; -#if 0 - /* add 4 bytes of start code prefix */ - slc_parms->slice_data_size = slc_data->slice_size = - pcontext->parser_cxt->list.data[index].edpos - - pcontext->parser_cxt->list.data[index].stpos + 4; - - slc_data->slice_offset = pcontext->parser_cxt->list.data[index].stpos - 4; - - /* overwrite the "length" bytes to start code (0x00000001) */ - *(slc_data->buffer_addr + slc_data->slice_offset) = 0; - *(slc_data->buffer_addr + slc_data->slice_offset + 1) = 0; - *(slc_data->buffer_addr + slc_data->slice_offset + 2) = 0; - *(slc_data->buffer_addr + slc_data->slice_offset + 3) = 1; - - - /* the offset to the NAL start code for this slice */ - slc_parms->slice_data_offset = 0; - - /* whole slice is in this buffer */ - slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; - - /* bit offset from NAL start code to the beginning of slice data */ - /* slc_parms->slice_data_bit_offset = bit;*/ - slc_parms->slice_data_bit_offset = (byte + 4)* 8 + bit; - -#else slc_parms->slice_data_size = slc_data->slice_size = pcontext->parser_cxt->list.data[index].edpos - pcontext->parser_cxt->list.data[index].stpos; @@ -1194,7 +1181,7 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) /* bit offset from NAL start code to the beginning of slice data */ slc_parms->slice_data_bit_offset = bit + byte * 8; -#endif + if (is_emul) { @@ -1483,16 +1470,6 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) } query_data->num_pictures = 0; - if (query_data->new_sps && !query_data->has_pps) - { - // we are waiting for a new pps, so should net reset new_sps flag - } - else - { - query_data->new_sps = 0; - } - query_data->new_pps = 0; - cxt->list.num_items = 0; /* reset start position of first item to 0 in case there is only one item */ @@ -1675,22 +1652,15 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) error = vbp_add_slice_data_h264(pcontext, i); } break; - case h264_NAL_UNIT_TYPE_SEI: //ITRACE("SEI header is parsed."); break; case h264_NAL_UNIT_TYPE_SPS: - - query_data->new_sps = 1; - query_data->has_sps = 1; - query_data->has_pps = 0; ITRACE("SPS header is parsed."); break; case h264_NAL_UNIT_TYPE_PPS: - query_data->new_pps = 1; - query_data->has_pps = 1; ITRACE("PPS header is parsed."); break; @@ -1703,7 +1673,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_EOstream: - ITRACE("EOStream is parsed."); + ITRACE("EOStream is parsed"); break; default: @@ -1722,9 +1692,11 @@ uint32 vbp_populate_query_data_h264(vbp_context *pcontext) { vbp_data_h264 *query_data = NULL; struct h264_viddec_parser *parser = NULL; + struct vbp_h264_parser_private_t* private = NULL; parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; query_data = (vbp_data_h264 *)pcontext->query_data; + private = (struct vbp_h264_parser_private_t *)pcontext->parser_private; vbp_set_codec_data_h264(parser, query_data->codec_data); @@ -1748,6 +1720,21 @@ uint32 vbp_populate_query_data_h264(vbp_context *pcontext) */ vbp_add_pic_data_h264(pcontext, 0); } + + query_data->new_pps = 0; + query_data->new_sps = 0; + if (private->seq_parameter_set_id != 0xff) + { + query_data->new_pps = (private->pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + query_data->new_sps = (private->seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + } + + private->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; + private->seq_parameter_set_id = parser->info.active_PPS.seq_parameter_set_id; + + query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; + query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; + return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index c690e88..38e2a05 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -180,6 +180,8 @@ typedef struct _vbp_slice_data_h264 uint32 slice_size; /* slice data size */ + uint8 nal_unit_type; + VASliceParameterBufferH264 slc_parms; } vbp_slice_data_h264; -- cgit v1.2.3 From ada45d1adc65599cb938dd1b87f730ed834e6c30 Mon Sep 17 00:00:00 2001 From: Tang Guifang Date: Wed, 28 Sep 2011 09:38:38 +0800 Subject: libmix: increase DPB_SIZE to handle H264 stream with 16 reference frames. BZ: 10408 Current DPB_SIZE for H264 is set to 16, if reference frame number reaches maximum 16 and the frame to be decoded is a referencs as well, no left buffer node for current frame and one reference frame will be replaced, which leads to decoding error due to missing reference. Change-Id: If88bb5cc17384f1c5dcec21fdf5b517625b48c17 Reviewed-on: http://android.intel.com:8080/19993 Reviewed-by: Tang, Guifang Reviewed-by: Qiu, Junhai Reviewed-by: Chen, Weian Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 2 +- videodecoder/VideoDecoderAVC.h | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index a415a20..27c1e1d 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -388,7 +388,7 @@ Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface; // build new DPB - for (int32_t i = 0; i < DPB_SIZE; i++, ref++) { + for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) { if (ref->flags & VA_PICTURE_H264_INVALID) { continue; } diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 2d8d1b6..c3a1fe5 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -67,7 +67,8 @@ private: enum { AVC_EXTRA_SURFACE_NUMBER = 6, // maximum DPB (Decoded Picture Buffer) size - DPB_SIZE = 16, + MAX_REF_NUMBER = 16, + DPB_SIZE = 17, // DPB_SIZE = MAX_REF_NUMBER + 1, REF_LIST_SIZE = 32, }; -- cgit v1.2.3 From 778c180b6762b92a1c2281307113c958072bf20a Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Thu, 20 Oct 2011 10:25:55 -0400 Subject: libmix: remove the workaround for gfx memory shortage issue as they have a real fix BZ: 9134 remove the workaround for gfx memory shortage issue as they have a real fix Signed-off-by: Weian Chen Change-Id: I597aec141c700a13cc7d6bdc95b598294757f15b Reviewed-on: http://android.intel.com:8080/22086 Reviewed-by: Yuan, Shengquan Tested-by: Ding, Haitao Reviewed-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 27c1e1d..7ceb4a6 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -586,8 +586,9 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { } // for 1080p, limit the total surface to 19, according the hardware limitation // change the max surface number from 19->10 to workaround memory shortage - if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 10) { - DPBSize = 10 - AVC_EXTRA_SURFACE_NUMBER; + // remove the workaround + if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) { + DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER; } VideoDecoderBase::setOutputWindowSize(DPBSize); return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile); -- cgit v1.2.3 From bc54fa93e07fb3ba5aeda25093c7b0208246c287 Mon Sep 17 00:00:00 2001 From: Linda Cline Date: Wed, 28 Sep 2011 19:34:54 -0700 Subject: libmix: mix_video - Fix green artifacts in CNN videos BZ: 9679 Flash video streamed from CNN site can have errors in the bitstream, missing reference frames, etc, causing green artifacts in the decoded video. This fix will skip video frames for which reference frames are not available displaying only frames decoded correctly and increasing robustness. Change to mix_video to track reference IDR frames. Consequences are that black video will be displayed until a correct video bitstream is received from the server. Change-Id: I4a2cd044c9e30a324c2e987d91df9f14e873f8a8 Signed-off-by: Linda Cline Reviewed-on: http://android.intel.com:8080/20167 Reviewed-by: Chen, Weian Tested-by: Sun, Hang L Reviewed-by: buildbot Tested-by: buildbot --- mix_video/src/mixvideodef.h | 1 + mix_video/src/mixvideoformat_h264.cpp | 13 +++++++++++++ mix_video/src/mixvideoformat_h264.h | 1 + 3 files changed, 15 insertions(+) mode change 100644 => 100755 mix_video/src/mixvideoformat_h264.cpp mode change 100644 => 100755 mix_video/src/mixvideoformat_h264.h diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h index 1c0aa02..464bc14 100644 --- a/mix_video/src/mixvideodef.h +++ b/mix_video/src/mixvideodef.h @@ -50,6 +50,7 @@ typedef enum { MIX_RESULT_NOT_PERMITTED, MIX_RESULT_ERROR_PROCESS_STREAM, MIX_RESULT_MISSING_CONFIG, + MIX_RESULT_MISSING_IDR, MIX_RESULT_VIDEO_LAST } MIX_VIDEO_ERROR_CODE; diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp old mode 100644 new mode 100755 index 158c456..a8213a7 --- a/mix_video/src/mixvideoformat_h264.cpp +++ b/mix_video/src/mixvideoformat_h264.cpp @@ -80,6 +80,7 @@ MIX_RESULT MixVideoFormat_H264::Initialize( enum _vbp_parser_type ptype = VBP_H264; vbp_data_h264 *data = NULL; MixIOVec *header = NULL; + missing_idr = true; MixVideoConfigParamsDecH264 *config_params_h264 = NULL; bool va_setup_flag = FALSE; @@ -1228,6 +1229,7 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( if (data->new_sps) { decode_params->new_sequence = data->new_sps; + missing_idr = true; ret = _handle_new_sequence(data); if (ret != MIX_RESULT_SUCCESS) { @@ -1258,6 +1260,17 @@ MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( return ret; } + // Check if first slice is IDR (5) + if (data->pic_data->slc_data->nal_unit_type == 5) { + missing_idr = false; + LOG_V("Received IDR.\n"); + } + else if (missing_idr) { + LOG_V("Missing IDR.\n"); + LOG_V("End\n"); + return MIX_RESULT_MISSING_IDR; + } + uint64 last_ts = this->current_timestamp; this->current_timestamp = ts; this->discontinuity_frame_in_progress = discontinuity; diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h old mode 100644 new mode 100755 index b85d6df..a7d6479 --- a/mix_video/src/mixvideoformat_h264.h +++ b/mix_video/src/mixvideoformat_h264.h @@ -72,6 +72,7 @@ public: //Can improve which frame is used for this at a later time MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing #endif + bool missing_idr; }; -- cgit v1.2.3 From 38c7c944473633cf85944560fc6182b733e3a575 Mon Sep 17 00:00:00 2001 From: Ola Olsson Date: Mon, 24 Oct 2011 10:48:17 -0700 Subject: Removing global variable because of a race condition. BZ: 10581 When different threads in the same process spawns multiple instances of the AVCDecoder, we still use the same active_fs varible for all instances even though we should have a separate one. This is fixed by this patch Change-Id: Icd3c8d3b0efd3243e89fb088824928f669ca0201 Signed-off-by: Ola Olsson Reviewed-on: http://android.intel.com:8080/22447 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 1 + .../fw/codecs/h264/include/h264parse_dpb.h | 8 +- .../fw/codecs/h264/parser/h264parse_dpb.c | 749 ++++++++++----------- 3 files changed, 375 insertions(+), 383 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h index 1033948..d6261d2 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -411,6 +411,7 @@ extern "C" { int32_t FrameHeightInMbs; frame_store fs[NUM_DPB_FRAME_STORES]; + frame_store* active_fs; uint8_t fs_ref_idc[16]; uint8_t fs_ltref_idc[16]; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h index 002818b..f7935a4 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h @@ -46,7 +46,7 @@ extern "C" { extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo); - extern void h264_dpb_is_used_for_reference(int32_t * flag); + extern void h264_dpb_is_used_for_reference(h264_DecodedPictureBuffer * p_dpb, int32_t * flag); extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index); @@ -76,8 +76,8 @@ extern "C" { int32_t frame_request, int32_t num_ref_frames); - extern void h264_dpb_split_field (h264_Info * pInfo); - extern void h264_dpb_combine_field(int32_t use_old); + extern void h264_dpb_split_field (h264_DecodedPictureBuffer *p_dpb, h264_Info * pInfo); + extern void h264_dpb_combine_field(h264_DecodedPictureBuffer *p_dpb, int32_t use_old); extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo, int32_t used_for_reference, @@ -98,8 +98,6 @@ extern "C" { extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing); //////////////////////////////////////////////////////////// Globals - extern frame_store *active_fs; - #ifdef __cplusplus } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 7d26bce..55047f1 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -13,7 +13,6 @@ #include "viddec_h264_parse.h" - //#include #include "h264parse.h" #include "h264parse_dpb.h" @@ -26,12 +25,6 @@ //#define NULL 0 //#endif -////////////////////////// Declare Globals/////////////////////////////// -frame_store *active_fs; - -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ ///////////////////////// DPB init ////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// // Init DPB @@ -109,36 +102,36 @@ void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExi h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); //if(active_fs->is_reference) - if (active_fs->frame.used_for_reference) + if (p_dpb->active_fs->frame.used_for_reference) { - if (viddec_h264_get_is_long_term(active_fs)) + if (viddec_h264_get_is_long_term(p_dpb->active_fs)) { - if (viddec_h264_get_dec_structure(active_fs) == FRAME) - h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) + h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); else { uint32_t found_in_list = 0, i = 0; for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) { - if (p_dpb->fs_ltref_idc[i] == active_fs->fs_idc) found_in_list = 1; + if (p_dpb->fs_ltref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1; } - if (found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + if (found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); } } else { - if (viddec_h264_get_dec_structure(active_fs) == FRAME) { - h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) { + h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc); } else { uint32_t found_in_list = 0, i = 0; for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++) { - if (p_dpb->fs_ref_idc[i] == active_fs->fs_idc) found_in_list = 1; + if (p_dpb->fs_ref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1; } - if (found_in_list == 0) h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc); + if (found_in_list == 0) h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc); } } } @@ -156,7 +149,7 @@ void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExi void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index) { - active_fs = &p_dpb->fs[index]; + p_dpb->active_fs = &p_dpb->fs[index]; } /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -198,11 +191,11 @@ void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t // Used to sort a list based on a corresponding sort indices ////////////////////////////////////////////////////////////////////////////// -int32_t h264_dpb_pic_is_bottom_field_ref(int32_t long_term) +int32_t h264_dpb_pic_is_bottom_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term) { int32_t temp; - if (long_term) temp = ((active_fs->bottom_field.used_for_reference) && (active_fs->bottom_field.is_long_term)) ? 1 : 0; - else temp = ((active_fs->bottom_field.used_for_reference) && !(active_fs->bottom_field.is_long_term)) ? 1 : 0; + if (long_term) temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && (p_dpb->active_fs->bottom_field.is_long_term)) ? 1 : 0; + else temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && !(p_dpb->active_fs->bottom_field.is_long_term)) ? 1 : 0; return temp; } @@ -215,13 +208,13 @@ int32_t h264_dpb_pic_is_bottom_field_ref(int32_t long_term) // Used to sort a list based on a corresponding sort indices ////////////////////////////////////////////////////////////////////////////// -int32_t h264_dpb_pic_is_top_field_ref(int32_t long_term) +int32_t h264_dpb_pic_is_top_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term) { int32_t temp; if (long_term) - temp = ((active_fs->top_field.used_for_reference) && (active_fs->top_field.is_long_term)) ? 1 : 0; + temp = ((p_dpb->active_fs->top_field.used_for_reference) && (p_dpb->active_fs->top_field.is_long_term)) ? 1 : 0; else - temp = ((active_fs->top_field.used_for_reference) && !(active_fs->top_field.is_long_term)) ? 1 : 0; + temp = ((p_dpb->active_fs->top_field.used_for_reference) && !(p_dpb->active_fs->top_field.is_long_term)) ? 1 : 0; return temp; } @@ -258,9 +251,9 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, while ((top_idx < list_size) & ~got_pic) { h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x1) + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) { - if (h264_dpb_pic_is_top_field_ref(long_term)) + if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) { pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field list_idx++; @@ -275,9 +268,9 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, while ((bot_idx < list_size) & ~got_pic) { h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x2) + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) { - if (h264_dpb_pic_is_bottom_field_ref(long_term)) + if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) { pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field list_idx++; @@ -298,8 +291,8 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, while ((bot_idx < list_size) && (!(got_pic))) { h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x2) { - if (h264_dpb_pic_is_bottom_field_ref(long_term)) { + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) { + if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) { // short term ref pic pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field list_idx++; @@ -314,8 +307,8 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, while ((top_idx < list_size) && (!(got_pic))) { h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); - if ((viddec_h264_get_is_used(active_fs))&0x1) { - if (h264_dpb_pic_is_top_field_ref(long_term)) { + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) { + if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) { // short term ref pic pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field list_idx++; @@ -451,20 +444,20 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if ((viddec_h264_get_is_used(active_fs) == 3)&&(active_fs->frame.used_for_reference == 3)) + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3)&&(p_dpb->active_fs->frame.used_for_reference == 3)) { - if (active_fs->frame_num > pInfo->img.frame_num) - active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; + if (p_dpb->active_fs->frame_num > pInfo->img.frame_num) + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum; else - active_fs->frame_num_wrap = active_fs->frame_num; + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num; - active_fs->frame.pic_num = active_fs->frame_num_wrap; + p_dpb->active_fs->frame.pic_num = p_dpb->active_fs->frame_num_wrap; // Use this opportunity to sort list for a p-frame if (pInfo->SliceHeader.slice_type == h264_PtypeP) { sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.pic_num; + list_sort_number[list0idx] = p_dpb->active_fs->frame.pic_num; list0idx++; } } @@ -483,14 +476,14 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3) && (active_fs->frame.used_for_reference == 3)) + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3) && (p_dpb->active_fs->frame.used_for_reference == 3)) { - active_fs->frame.long_term_pic_num = active_fs->frame.long_term_frame_idx; + p_dpb->active_fs->frame.long_term_pic_num = p_dpb->active_fs->frame.long_term_frame_idx; if (pInfo->SliceHeader.slice_type == h264_PtypeP) { sort_fs_idc[list0idx-p_dpb->listXsize[0]] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[list0idx-p_dpb->listXsize[0]] = active_fs->frame.long_term_pic_num; + list_sort_number[list0idx-p_dpb->listXsize[0]] = p_dpb->active_fs->frame.long_term_pic_num; list0idx++; } } @@ -522,25 +515,25 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (active_fs->frame.used_for_reference) + if (p_dpb->active_fs->frame.used_for_reference) { - if (active_fs->frame_num > pInfo->SliceHeader.frame_num) { - active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum; + if (p_dpb->active_fs->frame_num > pInfo->SliceHeader.frame_num) { + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum; } else { - active_fs->frame_num_wrap = active_fs->frame_num; + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num; } - if ((active_fs->frame.used_for_reference)&0x1) { - active_fs->top_field.pic_num = (active_fs->frame_num_wrap << 1) + add_top; + if ((p_dpb->active_fs->frame.used_for_reference)&0x1) { + p_dpb->active_fs->top_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_top; } - if ((active_fs->frame.used_for_reference)&0x2) { - active_fs->bottom_field.pic_num = (active_fs->frame_num_wrap << 1) + add_bottom; + if ((p_dpb->active_fs->frame.used_for_reference)&0x2) { + p_dpb->active_fs->bottom_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_bottom; } if (pInfo->SliceHeader.slice_type == h264_PtypeP) { sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame_num_wrap; + list_sort_number[list0idx] = p_dpb->active_fs->frame_num_wrap; list0idx++; } } @@ -567,18 +560,18 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if (viddec_h264_get_is_long_term(active_fs)&0x1) { - active_fs->top_field.long_term_pic_num = (active_fs->top_field.long_term_frame_idx << 1) + add_top; + if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) { + p_dpb->active_fs->top_field.long_term_pic_num = (p_dpb->active_fs->top_field.long_term_frame_idx << 1) + add_top; } - if (viddec_h264_get_is_long_term(active_fs)&0x2) { - active_fs->bottom_field.long_term_pic_num = (active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom; + if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) { + p_dpb->active_fs->bottom_field.long_term_pic_num = (p_dpb->active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom; } if (pInfo->SliceHeader.slice_type == h264_PtypeP) { sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[listltidx] = active_fs->long_term_frame_idx; + list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx; listltidx++; } } @@ -631,22 +624,22 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (viddec_h264_get_is_used(active_fs) == 3) + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { if (check_non_existing) { - if (viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; else skip_picture = 0; } if (skip_picture == 0) { - if ((active_fs->frame.used_for_reference==3) && (!(active_fs->frame.is_long_term))) + if ((p_dpb->active_fs->frame.used_for_reference==3) && (!(p_dpb->active_fs->frame.is_long_term))) { - if (pInfo->img.framepoc >= active_fs->frame.poc) + if (pInfo->img.framepoc >= p_dpb->active_fs->frame.poc) { sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.poc; + list_sort_number[list0idx] = p_dpb->active_fs->frame.poc; list0idx++; } } @@ -666,22 +659,22 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (viddec_h264_get_is_used(active_fs) == 3) + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { if (check_non_existing) { - if (viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1; + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; else skip_picture = 0; } if (skip_picture == 0) { - if ((active_fs->frame.used_for_reference) && (!(active_fs->frame.is_long_term))) + if ((p_dpb->active_fs->frame.used_for_reference) && (!(p_dpb->active_fs->frame.is_long_term))) { - if (pInfo->img.framepoc < active_fs->frame.poc) + if (pInfo->img.framepoc < p_dpb->active_fs->frame.poc) { sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; + list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc; list0idx++; } } @@ -713,11 +706,11 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3)) + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3)) { // if we have two fields, both must be long-term sort_fs_idc[list0idx] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.long_term_pic_num; + list_sort_number[list0idx] = p_dpb->active_fs->frame.long_term_pic_num; list0idx++; } } @@ -738,18 +731,18 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (viddec_h264_get_is_used(active_fs)) { + if (viddec_h264_get_is_used(p_dpb->active_fs)) { if (check_non_existing) { - if (viddec_h264_get_is_non_existent(active_fs)) + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; else skip_picture = 0; } if (skip_picture == 0) { - if (pInfo->img.ThisPOC >= active_fs->frame.poc) { + if (pInfo->img.ThisPOC >= p_dpb->active_fs->frame.poc) { sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx] = active_fs->frame.poc; + list_sort_number[list0idx] = p_dpb->active_fs->frame.poc; list0idx++; } } @@ -767,19 +760,19 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (viddec_h264_get_is_used(active_fs)) + if (viddec_h264_get_is_used(p_dpb->active_fs)) { if (check_non_existing) { - if (viddec_h264_get_is_non_existent(active_fs)) + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; else skip_picture = 0; } if (skip_picture == 0) { - if (pInfo->img.ThisPOC < active_fs->frame.poc) { + if (pInfo->img.ThisPOC < p_dpb->active_fs->frame.poc) { sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; - list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc; + list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc; list0idx++; } } @@ -817,7 +810,7 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; - list_sort_number[listltidx] = active_fs->long_term_frame_idx; + list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx; listltidx++; } @@ -1426,7 +1419,7 @@ static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - if (viddec_h264_get_is_output(active_fs) == 0) + if (viddec_h264_get_is_output(p_dpb->active_fs) == 0) { (number)++; } @@ -1473,24 +1466,24 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag); used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0); - switch (viddec_h264_get_dec_structure(active_fs)) + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) { case(TOP_FIELD) : { - active_fs->top_field.used_for_reference = used_for_reference; - viddec_h264_set_is_top_used(active_fs, 1); - //active_fs->crc_field_coded = 1; + p_dpb->active_fs->top_field.used_for_reference = used_for_reference; + viddec_h264_set_is_top_used(p_dpb->active_fs, 1); + //p_dpb->active_fs->crc_field_coded = 1; } break; case(BOTTOM_FIELD): { - active_fs->bottom_field.used_for_reference = used_for_reference << 1; - viddec_h264_set_is_bottom_used(active_fs, 1); - //active_fs->crc_field_coded = 1; + p_dpb->active_fs->bottom_field.used_for_reference = used_for_reference << 1; + viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1); + //p_dpb->active_fs->crc_field_coded = 1; } break; default: { - active_fs->frame.used_for_reference = used_for_reference?3:0; - viddec_h264_set_is_frame_used(active_fs, 3); - //if(pInfo->img.MbaffFrameFlag) active_fs->crc_field_coded = 1; + p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); + //if(pInfo->img.MbaffFrameFlag) p_dpb->active_fs->crc_field_coded = 1; } break; @@ -1511,7 +1504,7 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin // Reset the active frame store - could have changed in mem management ftns h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - if ((viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD)) + if ((viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD)) { // check for frame store with same pic_number -- always true in my case, YH // when we allocate frame store for the second field, we make sure the frame store for the second @@ -1520,7 +1513,7 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin // In this way we don't need to move image data around and can reduce memory bandwidth. // simply check if the check if the other field has been decoded or not - if (viddec_h264_get_is_used(active_fs) != 0) + if (viddec_h264_get_is_used(p_dpb->active_fs) != 0) { if (pInfo->img.second_field) { @@ -1534,9 +1527,9 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin { // Set up locals for non-existing frames used_for_reference = 1; - active_fs->frame.used_for_reference = used_for_reference?3:0; - viddec_h264_set_is_frame_used(active_fs, 3); - viddec_h264_set_dec_structure(active_fs, FRAME); + p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); + viddec_h264_set_dec_structure(p_dpb->active_fs, FRAME); pInfo->img.structure = FRAME; } @@ -1548,7 +1541,7 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin // non-reference frames may be output directly h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - if ((used_for_reference == 0) && (viddec_h264_get_is_used(active_fs) == 3)) + if ((used_for_reference == 0) && (viddec_h264_get_is_used(p_dpb->active_fs) == 3)) { h264_dpb_get_smallest_poc (p_dpb, &poc, &pos); h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); @@ -1688,77 +1681,77 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference if (NonExisting == 0) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num; + p_dpb->active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num; } else { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); - active_fs->frame_num = active_fs->frame.pic_num; + p_dpb->active_fs->frame_num = p_dpb->active_fs->frame.pic_num; } if (add2dpb) { - p_dpb->fs_dpb_idc[p_dpb->used_size] = active_fs->fs_idc; + p_dpb->fs_dpb_idc[p_dpb->used_size] = p_dpb->active_fs->fs_idc; p_dpb->used_size++; } - switch (viddec_h264_get_dec_structure(active_fs)) + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) { case FRAME : { - viddec_h264_set_is_frame_used(active_fs, 3); - active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); + p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; if (used_for_reference) { - active_fs->frame.used_for_reference = 3; - if (active_fs->frame.is_long_term) - viddec_h264_set_is_frame_long_term(active_fs, 3); + p_dpb->active_fs->frame.used_for_reference = 3; + if (p_dpb->active_fs->frame.is_long_term) + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 3); } // Split frame to 2 fields for prediction - h264_dpb_split_field(pInfo); + h264_dpb_split_field(p_dpb, pInfo); } break; case TOP_FIELD : { - viddec_h264_set_is_top_used(active_fs, 1); + viddec_h264_set_is_top_used(p_dpb->active_fs, 1); - active_fs->top_field.used_for_reference = used_for_reference; + p_dpb->active_fs->top_field.used_for_reference = used_for_reference; if (used_for_reference) { - active_fs->frame.used_for_reference |= 0x1; - if (active_fs->top_field.is_long_term) + p_dpb->active_fs->frame.used_for_reference |= 0x1; + if (p_dpb->active_fs->top_field.is_long_term) { - viddec_h264_set_is_top_long_term(active_fs, 1); - active_fs->long_term_frame_idx = active_fs->top_field.long_term_frame_idx; + viddec_h264_set_is_top_long_term(p_dpb->active_fs, 1); + p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->top_field.long_term_frame_idx; } } - if (viddec_h264_get_is_used(active_fs) == 3) { - h264_dpb_combine_field(use_old); // generate frame view + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { + h264_dpb_combine_field(p_dpb, use_old); // generate frame view } else { - active_fs->frame.poc = active_fs->top_field.poc; + p_dpb->active_fs->frame.poc = p_dpb->active_fs->top_field.poc; } } break; case BOTTOM_FIELD : { - viddec_h264_set_is_bottom_used(active_fs, 1); + viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1); - active_fs->bottom_field.used_for_reference = (used_for_reference<<1); + p_dpb->active_fs->bottom_field.used_for_reference = (used_for_reference<<1); if (used_for_reference) { - active_fs->frame.used_for_reference |= 0x2; - if (active_fs->bottom_field.is_long_term) + p_dpb->active_fs->frame.used_for_reference |= 0x2; + if (p_dpb->active_fs->bottom_field.is_long_term) { - viddec_h264_set_is_bottom_long_term(active_fs, 1); - active_fs->long_term_frame_idx = active_fs->bottom_field.long_term_frame_idx; + viddec_h264_set_is_bottom_long_term(p_dpb->active_fs, 1); + p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->bottom_field.long_term_frame_idx; } } - if (viddec_h264_get_is_used(active_fs) == 3) { - h264_dpb_combine_field(use_old); // generate frame view + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { + h264_dpb_combine_field(p_dpb, use_old); // generate frame view } else { - active_fs->frame.poc = active_fs->bottom_field.poc; + p_dpb->active_fs->frame.poc = p_dpb->active_fs->bottom_field.poc; } } @@ -1767,7 +1760,7 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference /* if ( gRestartMode.LastRestartType == RESTART_SEI ) { - if ( active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1; + if ( p_dpb->active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1; } gRestartMode.LastRestartType = 0xFFFF; @@ -1813,11 +1806,11 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff no need to continue to check - hence set unmark_done */ - if ((active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(active_fs) == 0) && - (active_fs->frame.pic_num == picNumX)) + if ((p_dpb->active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 0) && + (p_dpb->active_fs->frame.pic_num == picNumX)) { - h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); unmark_done = 1; } } @@ -1833,28 +1826,28 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff How will this affect the reference list update ftn coming after?? */ - if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& - (active_fs->top_field.pic_num == picNumX) ) + if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&& + (p_dpb->active_fs->top_field.pic_num == picNumX) ) { - active_fs->top_field.used_for_reference = 0; - active_fs->frame.used_for_reference &= 2; + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->frame.used_for_reference &= 2; unmark_done = 1; //Check if other field is used for short-term reference, if not remove from list... - if (active_fs->bottom_field.used_for_reference == 0) + if (p_dpb->active_fs->bottom_field.used_for_reference == 0) h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); } - if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && - (active_fs->bottom_field.pic_num == picNumX) ) + if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) && + (p_dpb->active_fs->bottom_field.pic_num == picNumX) ) { - active_fs->bottom_field.used_for_reference = 0; - active_fs->frame.used_for_reference &= 1; + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->frame.used_for_reference &= 1; unmark_done = 1; //Check if other field is used for reference, if not remove from list... - if (active_fs->top_field.used_for_reference == 0) + if (p_dpb->active_fs->top_field.used_for_reference == 0) h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); } } @@ -1894,8 +1887,8 @@ void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long if (pInfo->img.structure == FRAME) { - if ((active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(active_fs)==3) && - (active_fs->frame.long_term_pic_num == long_term_pic_num)) + if ((p_dpb->active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(p_dpb->active_fs)==3) && + (p_dpb->active_fs->frame.long_term_pic_num == long_term_pic_num)) { h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); @@ -1905,33 +1898,33 @@ void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long else { /// Check top field - if ((active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(active_fs)&0x1) && - (active_fs->top_field.long_term_pic_num == long_term_pic_num) ) + if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) && + (p_dpb->active_fs->top_field.long_term_pic_num == long_term_pic_num) ) { - active_fs->top_field.used_for_reference = 0; - active_fs->top_field.is_long_term = 0; - active_fs->frame.used_for_reference &= 2; - viddec_h264_set_is_frame_long_term(active_fs, 2); + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->top_field.is_long_term = 0; + p_dpb->active_fs->frame.used_for_reference &= 2; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 2); unmark_done = 1; //Check if other field is used for long term reference, if not remove from list... - if ((active_fs->bottom_field.used_for_reference == 0) || (active_fs->bottom_field.is_long_term == 0)) + if ((p_dpb->active_fs->bottom_field.used_for_reference == 0) || (p_dpb->active_fs->bottom_field.is_long_term == 0)) h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); } /// Check Bottom field - if ((active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(active_fs)&0x2) && - (active_fs->bottom_field.long_term_pic_num == long_term_pic_num) ) + if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) && + (p_dpb->active_fs->bottom_field.long_term_pic_num == long_term_pic_num) ) { - active_fs->bottom_field.used_for_reference = 0; - active_fs->bottom_field.is_long_term = 0; - active_fs->frame.used_for_reference &= 1; - viddec_h264_set_is_frame_long_term(active_fs, 1); + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->bottom_field.is_long_term = 0; + p_dpb->active_fs->frame.used_for_reference &= 1; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 1); unmark_done = 1; //Check if other field is used for long term reference, if not remove from list... - if ((active_fs->top_field.used_for_reference == 0) || (active_fs->top_field.is_long_term == 0)) + if ((p_dpb->active_fs->top_field.used_for_reference == 0) || (p_dpb->active_fs->top_field.is_long_term == 0)) { h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); } @@ -1961,15 +1954,15 @@ int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&& - (active_fs->top_field.pic_num == picNumX) ) + if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&& + (p_dpb->active_fs->top_field.pic_num == picNumX) ) { found = 1; pic_struct = TOP_FIELD; } - if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) && - (active_fs->bottom_field.pic_num == picNumX) ) + if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) && + (p_dpb->active_fs->bottom_field.pic_num == picNumX) ) { found = 1; pic_struct = BOTTOM_FIELD; @@ -2011,7 +2004,7 @@ void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t differenc polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); if (polarity != INVALID) - h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, active_fs->fs_idc, polarity); + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->active_fs->fs_idc, polarity); } h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX); @@ -2046,7 +2039,7 @@ void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb idx2 = idx - removed_count; h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]); - if (active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) + if (p_dpb->active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) { removed_count++; h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]); @@ -2091,36 +2084,36 @@ void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb int32_t picNumX; h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - if (viddec_h264_get_dec_structure(active_fs) == FRAME) + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) { h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - active_fs->frame.is_long_term = 1; - active_fs->frame.long_term_frame_idx = long_term_frame_idx; - active_fs->frame.long_term_pic_num = long_term_frame_idx; + p_dpb->active_fs->frame.is_long_term = 1; + p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->frame.long_term_pic_num = long_term_frame_idx; } else { - if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD) { - picNumX = (active_fs->top_field.pic_num << 1) + 1; - active_fs->top_field.is_long_term = 1; - active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + picNumX = (p_dpb->active_fs->top_field.pic_num << 1) + 1; + p_dpb->active_fs->top_field.is_long_term = 1; + p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx; // Assign long-term pic num - active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + p_dpb->active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; } else { - picNumX = (active_fs->bottom_field.pic_num << 1) + 1; - active_fs->bottom_field.is_long_term = 1; - active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + picNumX = (p_dpb->active_fs->bottom_field.pic_num << 1) + 1; + p_dpb->active_fs->bottom_field.is_long_term = 1; + p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; // Assign long-term pic num - active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + p_dpb->active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; } - h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(active_fs)); + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(p_dpb->active_fs)); } // Add to long term list //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc); @@ -2144,7 +2137,7 @@ void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPict { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if (active_fs->long_term_frame_idx == long_term_frame_idx) + if (p_dpb->active_fs->long_term_frame_idx == long_term_frame_idx) { h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); @@ -2173,15 +2166,15 @@ void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPict for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if (active_fs->long_term_frame_idx == long_term_frame_idx) + if (p_dpb->active_fs->long_term_frame_idx == long_term_frame_idx) { - if (active_fs->fs_idc == fs_idc) + if (p_dpb->active_fs->fs_idc == fs_idc) { // Again these seem like redundant checks but for safety while until JM is updated if (polarity == TOP_FIELD) - is_complement = (active_fs->bottom_field.is_long_term)? 1:0; + is_complement = (p_dpb->active_fs->bottom_field.is_long_term)? 1:0; else if (polarity == BOTTOM_FIELD) - is_complement = (active_fs->top_field.is_long_term) ? 1:0; + is_complement = (p_dpb->active_fs->top_field.is_long_term) ? 1:0; } found = 1; } @@ -2242,26 +2235,26 @@ void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (active_fs->frame.used_for_reference == 3) + if (p_dpb->active_fs->frame.used_for_reference == 3) { - if ((!(active_fs->frame.is_long_term))&&(active_fs->frame.pic_num == picNumX)) + if ((!(p_dpb->active_fs->frame.is_long_term))&&(p_dpb->active_fs->frame.pic_num == picNumX)) { - active_fs->long_term_frame_idx = long_term_frame_idx; - active_fs->frame.long_term_frame_idx = long_term_frame_idx; - active_fs->top_field.long_term_frame_idx = long_term_frame_idx; - active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; - active_fs->frame.is_long_term = 1; - active_fs->top_field.is_long_term = 1; - active_fs->bottom_field.is_long_term = 1; + p_dpb->active_fs->frame.is_long_term = 1; + p_dpb->active_fs->top_field.is_long_term = 1; + p_dpb->active_fs->bottom_field.is_long_term = 1; - viddec_h264_set_is_frame_long_term(active_fs, 3); + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 3); mark_done = 1; // Assign long-term pic num - active_fs->frame.long_term_pic_num = long_term_frame_idx; - active_fs->top_field.long_term_pic_num = long_term_frame_idx; - active_fs->bottom_field.long_term_pic_num = long_term_frame_idx; + p_dpb->active_fs->frame.long_term_pic_num = long_term_frame_idx; + p_dpb->active_fs->top_field.long_term_pic_num = long_term_frame_idx; + p_dpb->active_fs->bottom_field.long_term_pic_num = long_term_frame_idx; // Add to long term list h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]); // Remove from short-term list @@ -2273,44 +2266,44 @@ void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, else { polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); - active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG + p_dpb->active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG if (polarity == TOP_FIELD) { - active_fs->top_field.long_term_frame_idx = long_term_frame_idx; - active_fs->top_field.is_long_term = 1; - viddec_h264_set_is_top_long_term(active_fs, 1); + p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->top_field.is_long_term = 1; + viddec_h264_set_is_top_long_term(p_dpb->active_fs, 1); // Assign long-term pic num - active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0); + p_dpb->active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0); } else if (polarity == BOTTOM_FIELD) { - active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; - active_fs->bottom_field.is_long_term = 1; - viddec_h264_set_is_bottom_long_term(active_fs, 1); + p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->bottom_field.is_long_term = 1; + viddec_h264_set_is_bottom_long_term(p_dpb->active_fs, 1); // Assign long-term pic num - active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0); + p_dpb->active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0); } - if (viddec_h264_get_is_long_term(active_fs) == 3) + if (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3) { - active_fs->frame.is_long_term = 1; - active_fs->frame.long_term_frame_idx = long_term_frame_idx; - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + p_dpb->active_fs->frame.is_long_term = 1; + p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx; + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); } else { // We need to add this idc to the long term ref list... - h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc); + h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); // If the opposite field is not a short term reference, remove it from the // short term list. Since we know top field is a reference but both are not long term // we can simply check that both fields are not references... - if (active_fs->frame.used_for_reference != 3) - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + if (p_dpb->active_fs->frame.used_for_reference != 3) + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); } } return; @@ -2383,25 +2376,25 @@ void h264_dpb_adaptive_memory_management (h264_Info * pInfo) pInfo->SliceHeader.frame_num=0; h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - if (viddec_h264_get_dec_structure(active_fs) == FRAME) + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) { - pInfo->img.bottompoc -= active_fs->frame.poc; - pInfo->img.toppoc -= active_fs->frame.poc; + pInfo->img.bottompoc -= p_dpb->active_fs->frame.poc; + pInfo->img.toppoc -= p_dpb->active_fs->frame.poc; - active_fs->frame.poc = 0; - active_fs->frame.pic_num = 0; - active_fs->frame_num = 0; + p_dpb->active_fs->frame.poc = 0; + p_dpb->active_fs->frame.pic_num = 0; + p_dpb->active_fs->frame_num = 0; } - else if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD) + else if (viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD) { - active_fs->top_field.poc = active_fs->top_field.pic_num = 0; - pInfo->img.toppoc = active_fs->top_field.poc; + p_dpb->active_fs->top_field.poc = p_dpb->active_fs->top_field.pic_num = 0; + pInfo->img.toppoc = p_dpb->active_fs->top_field.poc; } - else if (viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD) + else if (viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD) { - active_fs->bottom_field.poc = active_fs->bottom_field.pic_num = 0; + p_dpb->active_fs->bottom_field.poc = p_dpb->active_fs->bottom_field.pic_num = 0; pInfo->img.bottompoc = 0; } @@ -2446,7 +2439,7 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) if (prev_idc != MPD_DPB_FS_NULL_IDC) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - active_fs->frame_num =0; + p_dpb->active_fs->frame_num =0; } } pInfo->img.PreviousFrameNumOffset = 0; @@ -2511,8 +2504,8 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) if (prev_idc != MPD_DPB_FS_NULL_IDC) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - if (viddec_h264_get_is_used(active_fs) != 3) { - h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME + if (viddec_h264_get_is_used(p_dpb->active_fs) != 3) { + h264_dpb_mark_dangling_field(p_dpb, p_dpb->active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME } } } @@ -2522,15 +2515,15 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) h264_dpb_assign_frame_store(pInfo, 1); // Set up initial markings - not sure if all are needed - viddec_h264_set_dec_structure(active_fs, FRAME); + viddec_h264_set_dec_structure(p_dpb->active_fs, FRAME); if (MaxFrameNum) ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); - active_fs->frame.pic_num = temp; - active_fs->long_term_frame_idx = 0; - active_fs->frame.long_term_pic_num = 0; - viddec_h264_set_is_frame_long_term(active_fs, 0); + p_dpb->active_fs->frame.pic_num = temp; + p_dpb->active_fs->long_term_frame_idx = 0; + p_dpb->active_fs->frame.long_term_pic_num = 0; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); // Note the call below will overwrite some aspects of the img structure with info relating to the // non-existent picture @@ -2540,7 +2533,7 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) h264_hdr_decoding_poc(pInfo, 1, temp); pInfo->img.structure = FRAME; - active_fs->frame.poc = pInfo->img.framepoc; + p_dpb->active_fs->frame.poc = pInfo->img.framepoc; // call store_picture_in_dpb @@ -2566,11 +2559,11 @@ void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_ { h264_dpb_set_active_fs(p_dpb, fs_idc); - if (viddec_h264_get_is_used(active_fs)&0x1) active_fs->top_field.used_for_reference = 0; - if (viddec_h264_get_is_used(active_fs)&0x2) active_fs->bottom_field.used_for_reference = 0; - if (viddec_h264_get_is_used(active_fs) == 3) active_fs->frame.used_for_reference = 0; + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) p_dpb->active_fs->top_field.used_for_reference = 0; + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) p_dpb->active_fs->bottom_field.used_for_reference = 0; + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) p_dpb->active_fs->frame.used_for_reference = 0; - active_fs->frame.used_for_reference = 0; + p_dpb->active_fs->frame.used_for_reference = 0; return; } @@ -2589,25 +2582,25 @@ void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, i { h264_dpb_set_active_fs(p_dpb, fs_idc); - if (viddec_h264_get_is_used(active_fs)&0x1) + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) { - active_fs->top_field.used_for_reference = 0; - active_fs->top_field.is_long_term = 0; + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->top_field.is_long_term = 0; } - if (viddec_h264_get_is_used(active_fs)&0x2) + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) { - active_fs->bottom_field.used_for_reference = 0; - active_fs->bottom_field.is_long_term = 0; + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->bottom_field.is_long_term = 0; } - if (viddec_h264_get_is_used(active_fs) == 3) + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { - active_fs->frame.used_for_reference = 0; - active_fs->frame.is_long_term = 0; + p_dpb->active_fs->frame.used_for_reference = 0; + p_dpb->active_fs->frame.is_long_term = 0; } - active_fs->frame.used_for_reference = 0; - viddec_h264_set_is_frame_long_term(active_fs, 0); + p_dpb->active_fs->frame.used_for_reference = 0; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); return; } @@ -2638,20 +2631,20 @@ void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_i as a dangler twice which would upset the HW dpb_disp_q count */ - if (viddec_h264_get_is_dangling(active_fs) == 0) + if (viddec_h264_get_is_dangling(p_dpb->active_fs) == 0) { - switch (viddec_h264_get_dec_structure(active_fs)) + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) { case TOP_FIELD: - viddec_h264_set_is_dangling(active_fs, 1); - //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), active_fs->fs_idc); + viddec_h264_set_is_dangling(p_dpb->active_fs, 1); + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc); break; case BOTTOM_FIELD: - //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), active_fs->fs_idc); - viddec_h264_set_is_dangling(active_fs, 1); + //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc); + viddec_h264_set_is_dangling(p_dpb->active_fs, 1); break; default: - //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), active_fs->fs_idc); + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc); break; } @@ -2670,21 +2663,21 @@ void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_i // // Check if one of the frames/fields in active_fs is used for reference // -void h264_dpb_is_used_for_reference(int32_t * flag) +void h264_dpb_is_used_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t * flag) { /* Check out below for embedded */ *flag = 0; - if (active_fs->frame.used_for_reference) + if (p_dpb->active_fs->frame.used_for_reference) *flag = 1; - else if (viddec_h264_get_is_used(active_fs) ==3) // frame - *flag = active_fs->frame.used_for_reference; + else if (viddec_h264_get_is_used(p_dpb->active_fs) ==3) // frame + *flag = p_dpb->active_fs->frame.used_for_reference; else { - if (viddec_h264_get_is_used(active_fs)&0x1) // top field - *flag = active_fs->top_field.used_for_reference; - if (viddec_h264_get_is_used(active_fs)&0x2) // bottom field - *flag = *flag || active_fs->bottom_field.used_for_reference; + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) // top field + *flag = p_dpb->active_fs->top_field.used_for_reference; + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) // bottom field + *flag = *flag || p_dpb->active_fs->bottom_field.used_for_reference; } } @@ -2723,17 +2716,17 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",active_fs->fs_idc, active_fs->first_dsn); - viddec_h264_set_is_frame_used(active_fs, 0); - //if( (active_fs->frame_sent == 0x01) && (active_fs->is_output == 0x0)) + //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",p_dpb->active_fs->fs_idc, p_dpb->active_fs->first_dsn); + viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); + //if( (p_dpb->active_fs->frame_sent == 0x01) && (p_dpb->active_fs->is_output == 0x0)) { //DECODED_FRAME sent but not DISPLAY_FRAME - h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc); - h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc); + h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host /// Add into drop-out list for all frms in dpb without display - if (!(viddec_h264_get_is_non_existent(active_fs))) { + if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) { if ( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx]; p_dpb->frame_numbers_need_to_be_removed ++; @@ -2775,21 +2768,21 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac if (pInfo->img.long_term_reference_flag) { p_dpb->max_long_term_pic_idx = 0; - switch (viddec_h264_get_dec_structure(active_fs)) + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) { case FRAME : - active_fs->frame.is_long_term = 1; + p_dpb->active_fs->frame.is_long_term = 1; case TOP_FIELD : - active_fs->top_field.is_long_term = 1; + p_dpb->active_fs->top_field.is_long_term = 1; case BOTTOM_FIELD : - active_fs->bottom_field.is_long_term = 1; + p_dpb->active_fs->bottom_field.is_long_term = 1; } - active_fs->long_term_frame_idx = 0; + p_dpb->active_fs->long_term_frame_idx = 0; } else { p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC; - viddec_h264_set_is_frame_long_term(active_fs, 0); + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); } } @@ -2949,10 +2942,10 @@ void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t id fs_idc = p_dpb->fs_dpb_idc[idx]; h264_dpb_set_active_fs(p_dpb, fs_idc); - viddec_h264_set_is_frame_used(active_fs, 0); + viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); //add to support frame relocation interface to host - if (!(viddec_h264_get_is_non_existent(active_fs))) + if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) { p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc; p_dpb->frame_numbers_need_to_be_removed ++; @@ -3002,7 +2995,7 @@ void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - h264_dpb_is_used_for_reference(&used_for_reference); + h264_dpb_is_used_for_reference(p_dpb, &used_for_reference); //if( (used_for_reference == 0x0 ) && active_fs->is_output && active_fs->is_non_existent == 0x0) //{ @@ -3010,14 +3003,14 @@ void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1); //} - if (viddec_h264_get_is_output(active_fs) && (used_for_reference == 0)) + if (viddec_h264_get_is_output(p_dpb->active_fs) && (used_for_reference == 0)) { h264_dpb_remove_frame_from_dpb(p_dpb, idx); *flag = 1; } /* /////// Removed following OLO source (Sodaville H.D) - else if ( (first_non_exist_valid == 0x0) && active_fs->is_non_existent ) + else if ( (first_non_exist_valid == 0x0) && p_dpb->active_fs->is_non_existent ) { first_non_exist_valid = 0x01; non_exist_idx = idx; @@ -3054,32 +3047,32 @@ void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, i *pos = MPD_DPB_FS_NULL_IDC; h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]); - poc_int = active_fs->frame.poc; + poc_int = p_dpb->active_fs->frame.poc; for (idx = 0; idx < p_dpb->used_size; idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - if (viddec_h264_get_is_output(active_fs) == 0) + if (viddec_h264_get_is_output(p_dpb->active_fs) == 0) { //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc); - if ((viddec_h264_get_is_used(active_fs) == 3) || (viddec_h264_get_is_dangling(active_fs))) + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) || (viddec_h264_get_is_dangling(p_dpb->active_fs))) { if (first_non_output) { *pos = idx; first_non_output = 0; - poc_int = active_fs->frame.poc; + poc_int = p_dpb->active_fs->frame.poc; } - else if (poc_int > active_fs->frame.poc) + else if (poc_int > p_dpb->active_fs->frame.poc) { - poc_int = active_fs->frame.poc; + poc_int = p_dpb->active_fs->frame.poc; *pos = idx; } } else if (p_dpb->used_size == 1) { - poc_int = active_fs->frame.poc; + poc_int = p_dpb->active_fs->frame.poc; *pos = idx; } } @@ -3099,24 +3092,24 @@ void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, i // Extract field information from a frame ////////////////////////////////////////////////////////////////////////////// -void h264_dpb_split_field (h264_Info * pInfo) +void h264_dpb_split_field (h264_DecodedPictureBuffer *p_dpb, h264_Info * pInfo) { - //active_fs->frame.poc = active_fs->frame.poc; - // active_fs->top_field.poc = active_fs->frame.poc; + //p_dpb->active_fs->frame.poc = p_dpb->active_fs->frame.poc; + // p_dpb->active_fs->top_field.poc = p_dpb->active_fs->frame.poc; // This line changed on 11/05/05 KMc - active_fs->top_field.poc = pInfo->img.toppoc; - active_fs->bottom_field.poc = pInfo->img.bottompoc; + p_dpb->active_fs->top_field.poc = pInfo->img.toppoc; + p_dpb->active_fs->bottom_field.poc = pInfo->img.bottompoc; - active_fs->top_field.used_for_reference = active_fs->frame.used_for_reference & 1; - active_fs->bottom_field.used_for_reference = active_fs->frame.used_for_reference >> 1; + p_dpb->active_fs->top_field.used_for_reference = p_dpb->active_fs->frame.used_for_reference & 1; + p_dpb->active_fs->bottom_field.used_for_reference = p_dpb->active_fs->frame.used_for_reference >> 1; - active_fs->top_field.is_long_term = active_fs->frame.is_long_term; - active_fs->bottom_field.is_long_term = active_fs->frame.is_long_term; + p_dpb->active_fs->top_field.is_long_term = p_dpb->active_fs->frame.is_long_term; + p_dpb->active_fs->bottom_field.is_long_term = p_dpb->active_fs->frame.is_long_term; - active_fs->long_term_frame_idx = active_fs->frame.long_term_frame_idx; - active_fs->top_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; - active_fs->bottom_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx; + p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx; + p_dpb->active_fs->top_field.long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx; + p_dpb->active_fs->bottom_field.long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx; // Assign field mvs attached to MB-Frame buffer to the proper buffer @@ -3136,24 +3129,24 @@ void h264_dpb_split_field (h264_Info * pInfo) // Generate a frame from top and bottom fields ////////////////////////////////////////////////////////////////////////////// -void h264_dpb_combine_field(int32_t use_old) +void h264_dpb_combine_field(h264_DecodedPictureBuffer *p_dpb, int32_t use_old) { //remove warning use_old = use_old; - active_fs->frame.poc = (active_fs->top_field.poc < active_fs->bottom_field.poc)? - active_fs->top_field.poc: active_fs->bottom_field.poc; + p_dpb->active_fs->frame.poc = (p_dpb->active_fs->top_field.poc < p_dpb->active_fs->bottom_field.poc)? + p_dpb->active_fs->top_field.poc: p_dpb->active_fs->bottom_field.poc; - //active_fs->frame.poc = active_fs->poc; + //p_dpb->active_fs->frame.poc = p_dpb->active_fs->poc; - active_fs->frame.used_for_reference = active_fs->top_field.used_for_reference |(active_fs->bottom_field.used_for_reference); + p_dpb->active_fs->frame.used_for_reference = p_dpb->active_fs->top_field.used_for_reference |(p_dpb->active_fs->bottom_field.used_for_reference); - active_fs->frame.is_long_term = active_fs->top_field.is_long_term |(active_fs->bottom_field.is_long_term <<1); + p_dpb->active_fs->frame.is_long_term = p_dpb->active_fs->top_field.is_long_term |(p_dpb->active_fs->bottom_field.is_long_term <<1); - if (active_fs->frame.is_long_term) - active_fs->frame.long_term_frame_idx = active_fs->long_term_frame_idx; + if (p_dpb->active_fs->frame.is_long_term) + p_dpb->active_fs->frame.long_term_frame_idx = p_dpb->active_fs->long_term_frame_idx; return; @@ -3192,7 +3185,7 @@ void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, if (NonExisting == 0) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - viddec_h264_set_is_frame_long_term(active_fs, 0); + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); } } } @@ -3233,11 +3226,11 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int if (pInfo->sei_information.disp_frozen) { // check pocs - if (active_fs->top_field.poc >= pInfo->sei_information.freeze_POC) + if (p_dpb->active_fs->top_field.poc >= pInfo->sei_information.freeze_POC) { - if (active_fs->top_field.poc < pInfo->sei_information.release_POC) + if (p_dpb->active_fs->top_field.poc < pInfo->sei_information.release_POC) { - viddec_h264_set_is_top_skipped(active_fs, 1); + viddec_h264_set_is_top_skipped(p_dpb->active_fs, 1); } else { @@ -3245,11 +3238,11 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int } } - if (active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC) + if (p_dpb->active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC) { - if (active_fs->bottom_field.poc < pInfo->sei_information.release_POC) + if (p_dpb->active_fs->bottom_field.poc < pInfo->sei_information.release_POC) { - viddec_h264_set_is_bottom_skipped(active_fs, 1); + viddec_h264_set_is_bottom_skipped(p_dpb->active_fs, 1); } else { @@ -3258,14 +3251,14 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int } } - if ( viddec_h264_get_broken_link_picture(active_fs) ) + if ( viddec_h264_get_broken_link_picture(p_dpb->active_fs) ) pInfo->sei_information.broken_link = 1; if ( pInfo->sei_information.broken_link) { // Check if this was the recovery point picture - going to have recovery point on // a frame basis - if (viddec_h264_get_recovery_pt_picture(active_fs)) + if (viddec_h264_get_recovery_pt_picture(p_dpb->active_fs)) { pInfo->sei_information.broken_link = 0; // Also reset wait on sei recovery point picture @@ -3273,7 +3266,7 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int } else { - viddec_h264_set_is_frame_skipped(active_fs, 3); + viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3); } } else @@ -3282,34 +3275,34 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int // Did we use SEI recovery point for th elast restart? if ( p_dpb->WaitSeiRecovery ) { - if ( viddec_h264_get_recovery_pt_picture(active_fs) ) { + if ( viddec_h264_get_recovery_pt_picture(p_dpb->active_fs) ) { p_dpb->WaitSeiRecovery = 0; } else { - viddec_h264_set_is_frame_skipped(active_fs, 3); + viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3); } } } if ( p_dpb->SuspendOutput ) { - if ( viddec_h264_get_open_gop_entry(active_fs) ) { + if ( viddec_h264_get_open_gop_entry(p_dpb->active_fs) ) { p_dpb->SuspendOutput = 0; } else { - viddec_h264_set_is_frame_skipped(active_fs, 3); + viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3); } } //h264_send_new_display_frame(0x0); - viddec_h264_set_is_output(active_fs, 1); + viddec_h264_set_is_output(p_dpb->active_fs, 1); - if (viddec_h264_get_is_non_existent(active_fs) == 0) + if (viddec_h264_get_is_non_existent(p_dpb->active_fs) == 0) { *existing = 1; - p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=active_fs->fs_idc; + p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=p_dpb->active_fs->fs_idc; p_dpb->frame_numbers_need_to_be_displayed++; //if(direct) - //h264_dpb_remove_frame_from_dpb(p_dpb, active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos] + //h264_dpb_remove_frame_from_dpb(p_dpb, p_dpb->active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos] } else { @@ -3317,11 +3310,11 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int } if (direct) { - viddec_h264_set_is_frame_used(active_fs, 0); - active_fs->frame.used_for_reference = 0; - active_fs->top_field.used_for_reference = 0; - active_fs->bottom_field.used_for_reference = 0; - active_fs->fs_idc = MPD_DPB_FS_NULL_IDC; + viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); + p_dpb->active_fs->frame.used_for_reference = 0; + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->fs_idc = MPD_DPB_FS_NULL_IDC; } return; } ///////// End of dpb frame output @@ -3376,7 +3369,7 @@ int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int3 if (existing) is_pushed = 1; // If non-reference, free frame store and move empty store to end of buffer - h264_dpb_is_used_for_reference(&used_for_reference); + h264_dpb_is_used_for_reference(p_dpb, &used_for_reference); if (!(used_for_reference)) h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] } @@ -3396,7 +3389,7 @@ int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int3 for (idx = 0; idx < p_dpb->used_size; idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - h264_dpb_is_used_for_reference(&used_for_reference); + h264_dpb_is_used_for_reference(p_dpb, &used_for_reference); if (used_for_reference) { break; @@ -3537,14 +3530,14 @@ void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHe { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - if (viddec_h264_get_is_used(active_fs) != 3) - h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET + if (viddec_h264_get_is_used(p_dpb->active_fs) != 3) + h264_dpb_mark_dangling_field(p_dpb, p_dpb->active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET } } // initialize software DPB - if (active_fs) { - viddec_h264_set_dec_structure(active_fs, INVALID); + if (p_dpb->active_fs) { + viddec_h264_set_dec_structure(p_dpb->active_fs, INVALID); } h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1 @@ -3665,12 +3658,12 @@ int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) ///////////////////////////////h264_dpb_reset_fs(); h264_dpb_set_active_fs(p_dpb, idc); - active_fs->fs_flag_1 = 0; - active_fs->fs_flag_2 = 0; - viddec_h264_set_is_non_existent(active_fs, NonExisting); - viddec_h264_set_is_output(active_fs, (NonExisting?1:0)); + p_dpb->active_fs->fs_flag_1 = 0; + p_dpb->active_fs->fs_flag_2 = 0; + viddec_h264_set_is_non_existent(p_dpb->active_fs, NonExisting); + viddec_h264_set_is_output(p_dpb->active_fs, (NonExisting?1:0)); - active_fs->pic_type = ((FRAME_TYPE_INVALID<active_fs->pic_type = ((FRAME_TYPE_INVALID<top.poc would also not be overwritten until a new valid value comes along, // but I don't think it is used before then so no need to reset - //active_fs->is_long_term = 0; - active_fs->frame.used_for_reference = 0; - active_fs->frame.poc = 0; + //p_dpb->active_fs->is_long_term = 0; + p_dpb->active_fs->frame.used_for_reference = 0; + p_dpb->active_fs->frame.poc = 0; return 1; } @@ -3706,7 +3699,7 @@ void h264_dpb_update_queue_dangling_field(h264_Info * pInfo) if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC) { h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); - if (viddec_h264_get_is_used(active_fs) != 3) + if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3) { prev_pic_unpaired_field = 1; } @@ -3765,9 +3758,9 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) if (prev_idc != MPD_DPB_FS_NULL_IDC) { h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); - if (viddec_h264_get_is_used(active_fs) != 3) + if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3) { - //PRINTF(MFD_NONE, " FN: %d active_fs->is_used = %d \n", (h264_frame_number+1), active_fs->is_used); + //PRINTF(MFD_NONE, " FN: %d p_dpb->active_fs->is_used = %d \n", (h264_frame_number+1), p_dpb->active_fs->is_used); prev_pic_unpaired_field = 1; } } @@ -3784,11 +3777,11 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) // If we establish the previous pic was an unpaired field and this picture is not // its complement, the previous picture was a dangling field if (pInfo->img.second_field == 0) - h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FIELD + h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc); //, DANGLING_TYPE_FIELD } } else if (prev_pic_unpaired_field) { - h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FRAME + h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc); //, DANGLING_TYPE_FRAME } free_fs_found = 0; @@ -3807,79 +3800,79 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) ////////////// TODO: THe following init #if 1 if ( pInfo->img.second_field) { - //active_fs->second_dsn = pInfo->img.dsn; - //active_fs->prev_dsn = pInfo->img.prev_dsn; - if (active_fs->pic_type == FRAME_TYPE_IDR || - active_fs->pic_type == FRAME_TYPE_I) { + //p_dpb->active_fs->second_dsn = pInfo->img.dsn; + //p_dpb->active_fs->prev_dsn = pInfo->img.prev_dsn; + if (dpb_ptr->active_fs->pic_type == FRAME_TYPE_IDR || + dpb_ptr->active_fs->pic_type == FRAME_TYPE_I) { - viddec_h264_set_first_field_intra(active_fs, 1); + viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 1); } else { - viddec_h264_set_first_field_intra(active_fs, 0); + viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 0); } } else { - //active_fs->first_dsn = pInfo->img.dsn; - //active_fs->prev_dsn = pInfo->img.prev_dsn; - viddec_h264_set_first_field_intra(active_fs, 0); + //p_dpb->active_fs->first_dsn = pInfo->img.dsn; + //p_dpb->active_fs->prev_dsn = pInfo->img.prev_dsn; + viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 0); } if (pInfo->img.structure == FRAME) { - //active_fs->second_dsn = 0x0; + //dpb_ptr->active_fs->second_dsn = 0x0; } if ( pInfo->sei_information.broken_link_pic ) { - viddec_h264_set_broken_link_picture(active_fs, 1); + viddec_h264_set_broken_link_picture(dpb_ptr->active_fs, 1); pInfo->sei_information.broken_link_pic = 0; } if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0)) - viddec_h264_set_recovery_pt_picture(active_fs, 1); + viddec_h264_set_recovery_pt_picture(dpb_ptr->active_fs, 1); //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr)) if (pInfo->img.recovery_point_found == 6) { - viddec_h264_set_open_gop_entry(active_fs, 1); + viddec_h264_set_open_gop_entry(dpb_ptr->active_fs, 1); pInfo->dpb.SuspendOutput = 1; } #endif if ((pInfo->img.second_field) || (free_fs_found)) { - viddec_h264_set_dec_structure(active_fs, pInfo->img.structure); - viddec_h264_set_is_output(active_fs, 0); + viddec_h264_set_dec_structure(dpb_ptr->active_fs, pInfo->img.structure); + viddec_h264_set_is_output(dpb_ptr->active_fs, 0); switch (pInfo->img.structure) { case (FRAME) : { - active_fs->frame.pic_num = pInfo->img.frame_num; - active_fs->frame.long_term_frame_idx = 0; - active_fs->frame.long_term_pic_num = 0; - active_fs->frame.used_for_reference = 0; - active_fs->frame.is_long_term = 0; - //active_fs->frame.structure = pInfo->img.structure; - active_fs->frame.poc = pInfo->img.framepoc; + dpb_ptr->active_fs->frame.pic_num = pInfo->img.frame_num; + dpb_ptr->active_fs->frame.long_term_frame_idx = 0; + dpb_ptr->active_fs->frame.long_term_pic_num = 0; + dpb_ptr->active_fs->frame.used_for_reference = 0; + dpb_ptr->active_fs->frame.is_long_term = 0; + //dpb_ptr->active_fs->frame.structure = pInfo->img.structure; + dpb_ptr->active_fs->frame.poc = pInfo->img.framepoc; } break; case (TOP_FIELD) : { - active_fs->top_field.pic_num = pInfo->img.frame_num; - active_fs->top_field.long_term_frame_idx = 0; - active_fs->top_field.long_term_pic_num = 0; - active_fs->top_field.used_for_reference = 0; - active_fs->top_field.is_long_term = 0; - //active_fs->top_field.structure = pInfo->img.structure; - active_fs->top_field.poc = pInfo->img.toppoc; + dpb_ptr->active_fs->top_field.pic_num = pInfo->img.frame_num; + dpb_ptr->active_fs->top_field.long_term_frame_idx = 0; + dpb_ptr->active_fs->top_field.long_term_pic_num = 0; + dpb_ptr->active_fs->top_field.used_for_reference = 0; + dpb_ptr->active_fs->top_field.is_long_term = 0; + //dpb_ptr->active_fs->top_field.structure = pInfo->img.structure; + dpb_ptr->active_fs->top_field.poc = pInfo->img.toppoc; } break; case(BOTTOM_FIELD) : { - active_fs->bottom_field.pic_num = pInfo->img.frame_num; - active_fs->bottom_field.long_term_frame_idx = 0; - active_fs->bottom_field.long_term_pic_num = 0; - active_fs->bottom_field.used_for_reference = 0; - active_fs->bottom_field.is_long_term = 0; - //active_fs->bottom_field.structure = pInfo->img.structure; - active_fs->bottom_field.poc = pInfo->img.bottompoc; + dpb_ptr->active_fs->bottom_field.pic_num = pInfo->img.frame_num; + dpb_ptr->active_fs->bottom_field.long_term_frame_idx = 0; + dpb_ptr->active_fs->bottom_field.long_term_pic_num = 0; + dpb_ptr->active_fs->bottom_field.used_for_reference = 0; + dpb_ptr->active_fs->bottom_field.is_long_term = 0; + //dpb_ptr->active_fs->bottom_field.structure = pInfo->img.structure; + dpb_ptr->active_fs->bottom_field.poc = pInfo->img.bottompoc; } break; } -- cgit v1.2.3 From b8709d4c3941630cbbe1914cf9f514608276f3cd Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Sat, 5 Nov 2011 00:55:03 -0400 Subject: libmix: to correct wrong buffer length in length prefixed path BZ: 13281 Fix a wrong buffer length issue in length prefixed path Change-Id: If57111da0b80e4e12502e322e7c05534a866e59b Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/23223 Reviewed-by: Fang, Yanlong Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 93951c6..5c95f79 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -506,7 +506,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf mTotalSizeCopied += 4; memcpy(outBuffer->data + sizeCopiedHere, - (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset, sizeToBeCopied); + (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset, nalSize); sizeCopiedHere += nalSize; mTotalSizeCopied += nalSize; -- cgit v1.2.3 From 7fbd666ee644365a05553f51d4479c48d597ef8e Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Tue, 8 Nov 2011 08:26:38 -0500 Subject: libmix: quality issue fix from Tao - set correct level BZ: 13940 fix the quality issue found in WiDi, no impact to normal video recording Signed-off-by: Weian Chen Change-Id: Ic9db19878e7c65407c4ecf3686f4ea4aa68a6719 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/23532 Reviewed-by: Veeramani, Karthik Reviewed-by: Tao, Tao Q Tested-by: Sun, Hang L Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 5c95f79..2f0cb62 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -31,8 +31,8 @@ Encode_Status VideoEncoderAVC::start() { if (mComParams.rcMode == VA_RC_VCM) { // If we are in VCM, we will set slice num to max value - mVideoParamsAVC.sliceNum.iSliceNum = (mComParams.resolution.height + 15) / 16; - mVideoParamsAVC.sliceNum.pSliceNum = mVideoParamsAVC.sliceNum.iSliceNum; + // mVideoParamsAVC.sliceNum.iSliceNum = (mComParams.resolution.height + 15) / 16; + // mVideoParamsAVC.sliceNum.pSliceNum = mVideoParamsAVC.sliceNum.iSliceNum; } ret = VideoEncoderBase::start (); @@ -682,19 +682,22 @@ Encode_Status VideoEncoderAVC::renderAIR() { int VideoEncoderAVC::calcLevel(int numMbs) { int level = 30; - if (numMbs < 3600) { + if (numMbs < 1620) { level = 30; - } else if (numMbs < 5120) { + } else if (numMbs < 3600) { level = 31; - } else if (numMbs < 8192) { + } else if (numMbs < 5120) { level = 32; + } else if (numMbs < 8192) { + level = 41; } else if (numMbs < 8704) { - level = 40; - } else if (numMbs < 22080) { level = 42; - } else if (numMbs < 36864) { + } else if (numMbs < 22080) { level = 50; + } else if (numMbs < 36864) { + level = 51; } else { + LOG_W("No such level can support that resolution"); level = 51; } return level; -- cgit v1.2.3 From f2336c948ada27e99d1085888fb1f9d465096865 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 20 Dec 2011 01:50:53 +0800 Subject: libmix:enable native buffer in libmix BZ: 18370 add native buffer support in libmix Signed-off-by: ywan171 Change-Id: I424d0f11aae17bf9f18590ad7a5193ea966cde5f Reviewed-on: http://android.intel.com:8080/27897 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_video/src/mixvideoformat_h264.cpp | 2 +- mix_video/src/mixvideoformat_mp42.cpp | 3 +- mix_video/src/mixvideoformat_vc1.cpp | 2 +- mix_video/src/mixvideoformatenc_h263.cpp | 2 +- mix_video/src/mixvideoformatenc_h264.cpp | 2 +- mix_video/src/mixvideoformatenc_mpeg4.cpp | 2 +- mix_video/src/mixvideoformatenc_preview.cpp | 2 +- videodecoder/VideoDecoderBase.cpp | 174 +++++++++++++++++++++++++++- videodecoder/VideoDecoderBase.h | 12 ++ videodecoder/VideoDecoderDefs.h | 10 ++ videodecoder/VideoDecoderInterface.h | 3 + videoencoder/VideoEncoderBase.cpp | 2 +- 12 files changed, 203 insertions(+), 13 deletions(-) diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp index a8213a7..7a43869 100755 --- a/mix_video/src/mixvideoformat_h264.cpp +++ b/mix_video/src/mixvideoformat_h264.cpp @@ -598,7 +598,7 @@ MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { this->picture_height, VA_RT_FORMAT_YUV420, this->va_num_surfaces, - this->va_surfaces); + this->va_surfaces, NULL, 0); if (vret != VA_STATUS_SUCCESS) { ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp index e8c5fd1..627401e 100644 --- a/mix_video/src/mixvideoformat_mp42.cpp +++ b/mix_video/src/mixvideoformat_mp42.cpp @@ -182,7 +182,8 @@ MIX_RESULT MixVideoFormat_MP42::_initialize_va(vbp_data_mp42 *data) { this->picture_height, VA_RT_FORMAT_YUV420, this->va_num_surfaces, - this->va_surfaces); + this->va_surfaces, + NULL, 0); if (vret != VA_STATUS_SUCCESS) { ret = MIX_RESULT_FAIL; diff --git a/mix_video/src/mixvideoformat_vc1.cpp b/mix_video/src/mixvideoformat_vc1.cpp index cd672d9..82b5a78 100644 --- a/mix_video/src/mixvideoformat_vc1.cpp +++ b/mix_video/src/mixvideoformat_vc1.cpp @@ -261,7 +261,7 @@ MIX_RESULT MixVideoFormat_VC1::_initialize_va(vbp_data_vc1 *data) { this->picture_height, VA_RT_FORMAT_YUV420, this->va_num_surfaces, - this->va_surfaces); + this->va_surfaces, NULL , 0); if (vret != VA_STATUS_SUCCESS) { ret = MIX_RESULT_FAIL; diff --git a/mix_video/src/mixvideoformatenc_h263.cpp b/mix_video/src/mixvideoformatenc_h263.cpp index 78aec17..a2f9854 100644 --- a/mix_video/src/mixvideoformatenc_h263.cpp +++ b/mix_video/src/mixvideoformatenc_h263.cpp @@ -475,7 +475,7 @@ MIX_RESULT MixVideoFormatEnc_H263::Initialize( va_status = vaCreateSurfaces(va_display, this->picture_width, this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces); + normal_surfaces_cnt, surfaces, NULL, 0); if (va_status != VA_STATUS_SUCCESS) { diff --git a/mix_video/src/mixvideoformatenc_h264.cpp b/mix_video/src/mixvideoformatenc_h264.cpp index 6c2374a..f227c8d 100644 --- a/mix_video/src/mixvideoformatenc_h264.cpp +++ b/mix_video/src/mixvideoformatenc_h264.cpp @@ -548,7 +548,7 @@ MixVideoFormatEnc_H264::Initialize( va_status = vaCreateSurfaces(va_display, this->picture_width, this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces); + normal_surfaces_cnt, surfaces, NULL, 0); if (va_status != VA_STATUS_SUCCESS) { diff --git a/mix_video/src/mixvideoformatenc_mpeg4.cpp b/mix_video/src/mixvideoformatenc_mpeg4.cpp index 1f76307..4dcf9c6 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.cpp +++ b/mix_video/src/mixvideoformatenc_mpeg4.cpp @@ -433,7 +433,7 @@ MIX_RESULT MixVideoFormatEnc_MPEG4::Initialize( va_status = vaCreateSurfaces( va_display, this->picture_width, this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces); + normal_surfaces_cnt, surfaces, NULL, 0); if (va_status != VA_STATUS_SUCCESS) { LOG_E("Failed vaCreateSurfaces\n"); diff --git a/mix_video/src/mixvideoformatenc_preview.cpp b/mix_video/src/mixvideoformatenc_preview.cpp index ae2f7bb..a1cdcbb 100644 --- a/mix_video/src/mixvideoformatenc_preview.cpp +++ b/mix_video/src/mixvideoformatenc_preview.cpp @@ -363,7 +363,7 @@ MIX_RESULT MixVideoFormatEnc_Preview::Initialize( va_status = vaCreateSurfaces( va_display, this->picture_width, this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces); + normal_surfaces_cnt, surfaces, NULL, 0 ); if (va_status != VA_STATUS_SUCCESS) { LOG_E("Failed vaCreateSurfaces\n"); diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 0a68d68..c71eff6 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -66,18 +66,27 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mOutputHead(NULL), mOutputTail(NULL), mSurfaces(NULL), + mVASurfaceAttrib(NULL), + mVAExternalMemoryBuffers(NULL), mSurfaceUserPtr(NULL), mSurfaceAcquirePos(0), mNextOutputPOC(MINIMUM_POC), mParserType(type), - mParserHandle(NULL) { + mParserHandle(NULL), + initialized(false), + mSignalBufferSize(0){ memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo)); memset(&mConfigBuffer, 0, sizeof(mConfigBuffer)); + for(int i =0 ; i < MAX_GRAPHIC_NUM ; i++) { + mSignalBufferPre[i] = NULL; + } + pthread_mutex_init(&mLock, NULL); mVideoFormatInfo.mimeType = strdup(mimeType); } VideoDecoderBase::~VideoDecoderBase() { + pthread_mutex_destroy(&mLock); stop(); free(mVideoFormatInfo.mimeType); } @@ -160,6 +169,7 @@ void VideoDecoderBase::flush(void) { // initialize surface buffer without resetting mapped/raw data initSurfaceBuffer(false); + } const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) { @@ -170,6 +180,7 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { if (mVAStarted == false) { return NULL; } + bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; if (draining) { // complete decoding the last frame and ignore return @@ -188,6 +199,9 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { if (mOutputHead == NULL) { mOutputTail = NULL; } + if(useGraphicBuffer ) { + vaSyncSurface(mVADisplay,outputByPos->renderBuffer.surface); + } return &(outputByPos->renderBuffer); } @@ -232,6 +246,9 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { mOutputTail = NULL; } } + if(useGraphicBuffer ) { + vaSyncSurface(mVADisplay,output->renderBuffer.surface); + } //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); return &(output->renderBuffer); } @@ -601,6 +618,9 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { return DECODE_SUCCESS; } + if(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ + numSurface = mConfigBuffer.surfaceNumber; + } // TODO: validate profile if (numSurface == 0) { return DECODE_FAIL; @@ -660,14 +680,52 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { format |= VA_RT_FORMAT_PROTECTED; LOGW("Surface is protected."); } - - vaStatus = vaCreateSurfaces( + if(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { + mVASurfaceAttrib = new VASurfaceAttrib; + if (mVASurfaceAttrib == NULL) { + return DECODE_MEMORY_FAIL; + } + mVAExternalMemoryBuffers = new VAExternalMemoryBuffers; + if (mVAExternalMemoryBuffers == NULL) { + return DECODE_MEMORY_FAIL; + } + mVAExternalMemoryBuffers->buffers= (unsigned int *)malloc(sizeof(unsigned int)*mNumSurfaces); + if (mVAExternalMemoryBuffers->buffers == NULL) { + return DECODE_MEMORY_FAIL; + } + mVAExternalMemoryBuffers->count = mNumSurfaces; + mVAExternalMemoryBuffers->luma_stride= mConfigBuffer.graphicBufferStride; + mVAExternalMemoryBuffers->pixel_format = mConfigBuffer.graphicBufferColorFormat; + mVAExternalMemoryBuffers->width = mVideoFormatInfo.width; + mVAExternalMemoryBuffers->height = mVideoFormatInfo.height; + mVAExternalMemoryBuffers->type = VAExternalMemoryAndroidGrallocBuffer; + for(int i=0; i < mNumSurfaces; i++) { + mVAExternalMemoryBuffers->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; + } + mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_SETTABLE; + mVASurfaceAttrib->type = VASurfaceAttribNativeHandle; + mVASurfaceAttrib->value.type =VAGenericValueTypePointer; + mVASurfaceAttrib->value.value.p_val= (void *)mVAExternalMemoryBuffers; + vaStatus = vaCreateSurfaces( mVADisplay, mVideoFormatInfo.width, mVideoFormatInfo.height, format, mNumSurfaces, - mSurfaces); + mSurfaces, + mVASurfaceAttrib, + 1); + } else { + vaStatus = vaCreateSurfaces( + mVADisplay, + mVideoFormatInfo.width, + mVideoFormatInfo.height, + format, + mNumSurfaces, + mSurfaces, + NULL, + 0); + } CHECK_VA_STATUS("vaCreateSurfaces"); mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; @@ -722,6 +780,22 @@ Decode_Status VideoDecoderBase::terminateVA(void) { mSurfaceBuffers = NULL; } + + if(mVAExternalMemoryBuffers) { + if(mVAExternalMemoryBuffers->buffers) { + free(mVAExternalMemoryBuffers->buffers); + mVAExternalMemoryBuffers->buffers= NULL; + } + delete mVAExternalMemoryBuffers; + mVAExternalMemoryBuffers = NULL; + } + + if(mVASurfaceAttrib) { + delete mVASurfaceAttrib; + mVASurfaceAttrib = NULL; + } + + if (mSurfaceUserPtr) { delete [] mSurfaceUserPtr; mSurfaceUserPtr = NULL; @@ -931,13 +1005,16 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { } void VideoDecoderBase::initSurfaceBuffer(bool reset) { + bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; + if(useGraphicBuffer && reset){ + pthread_mutex_lock(&mLock); + } for (int32_t i = 0; i < mNumSurfaces; i++) { mSurfaceBuffers[i].renderBuffer.display = mVADisplay; mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer mSurfaceBuffers[i].renderBuffer.flag = 0; mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE; mSurfaceBuffers[i].renderBuffer.timeStamp = 0; - mSurfaceBuffers[i].renderBuffer.renderDone = true; mSurfaceBuffers[i].referenceFrame = false; mSurfaceBuffers[i].asReferernce= false; mSurfaceBuffers[i].pictureOrder = 0; @@ -946,6 +1023,93 @@ void VideoDecoderBase::initSurfaceBuffer(bool reset) { mSurfaceBuffers[i].renderBuffer.rawData = NULL; mSurfaceBuffers[i].mappedData = NULL; } + if (useGraphicBuffer){ + if(reset){ + mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i]; + mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false + for(int j =0; j < mSignalBufferSize; j++){ + if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle){ + mSurfaceBuffers[i].renderBuffer.renderDone = true; + VTRACE("initSurfaceBuffer set renderDone = true index=%d",i); + mSignalBufferPre[j] = NULL; + break; + } + } + } + else{ + mSurfaceBuffers[i].renderBuffer.renderDone = false; + } + + } else { + mSurfaceBuffers[i].renderBuffer.graphicBufferHandle= NULL; + mSurfaceBuffers[i].renderBuffer.renderDone = true; + } + mSurfaceBuffers[i].renderBuffer.acquirePos = i; + } + + if(useGraphicBuffer && reset){ + initialized = true; + mSignalBufferSize = 0; + pthread_mutex_unlock(&mLock); } } +Decode_Status VideoDecoderBase::SignalRenderDoneFlag(void * graphichandler) { + + if (graphichandler == NULL) { + return DECODE_SUCCESS; + } + pthread_mutex_lock(&mLock); + int i = 0; + if(!initialized){ + mSignalBufferPre[mSignalBufferSize++] = graphichandler; + VTRACE("SignalRenderDoneFlag initialized = false graphichandler = %p, mSignalBufferSize=%d",graphichandler,mSignalBufferSize); + if(mSignalBufferSize > MAX_GRAPHIC_NUM) + return DECODE_INVALID_DATA; + } + else{ + if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { + return DECODE_SUCCESS; + } + for (i = 0; i < mNumSurfaces; i++) { + if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle== graphichandler) { + mSurfaceBuffers[i].renderBuffer.renderDone = true; + VTRACE("SignalRenderDoneFlag initialized = true index =%d",i); + break; + } + } + } + pthread_mutex_unlock(&mLock); + + return DECODE_SUCCESS; + +} + +Decode_Status VideoDecoderBase::GetNativeBufferStatus(void * graphichandler, bool* used) { + bool inuse = false; + if(!initialized) { + *used == false; + return DECODE_NOT_STARTED; + } + + for (int32_t i = 0; i < mNumSurfaces; i++) { + if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) { + if (mSurfaceBuffers[i].asReferernce == true ||mSurfaceBuffers + i == mAcquiredBuffer ) { + inuse = true; + } + break; + } + } + VideoSurfaceBuffer *p = mOutputHead; + + while (p!=NULL) { + if(p ->renderBuffer.graphicBufferHandle == graphichandler) { + inuse = true; + break; + } + p = p->next; + } + + *used = inuse; + return DECODE_SUCCESS; +} diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index da1a655..3df3e3f 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -28,6 +28,8 @@ #include #include "VideoDecoderDefs.h" #include "VideoDecoderInterface.h" +#include + extern "C" { #include "vbp_loader.h" @@ -48,6 +50,8 @@ public: //virtual Decode_Status decode(VideoDecodeBuffer *buffer); virtual void flush(void); virtual const VideoRenderBuffer* getOutput(bool draining = false); + virtual Decode_Status SignalRenderDoneFlag(void * graphichandler); + virtual Decode_Status GetNativeBufferStatus(void * graphichandler, bool* used); virtual const VideoFormatInfo* getFormatInfo(void); protected: @@ -75,6 +79,10 @@ private: Decode_Status getRawDataFromSurface(void); void initSurfaceBuffer(bool reset); + bool initialized; + pthread_mutex_t mLock; + + protected: VideoFormatInfo mVideoFormatInfo; Display *mDisplay; @@ -120,11 +128,15 @@ private: VideoSurfaceBuffer *mOutputHead; // head of output buffer list VideoSurfaceBuffer *mOutputTail; // tail of output buffer list VASurfaceID *mSurfaces; // surfaces array + VASurfaceAttrib *mVASurfaceAttrib; + VAExternalMemoryBuffers *mVAExternalMemoryBuffers; uint8_t **mSurfaceUserPtr; // mapped user space pointer int32_t mSurfaceAcquirePos; // position of surface to start acquiring int32_t mNextOutputPOC; // Picture order count of next output _vbp_parser_type mParserType; void *mParserHandle; + void *mSignalBufferPre[MAX_GRAPHIC_NUM]; + uint32 mSignalBufferSize; protected: void ManageReference(bool enable) {mManageReference = enable;} diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index a715d7c..5f1eac4 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -85,6 +85,9 @@ typedef enum { // indicates if buffer contains codec data HAS_CODECDATA = 0x1000, + // indicate if it use graphic buffer. + USE_NATIVE_GRAPHIC_BUFFER = 0x2000, + } VIDEO_BUFFER_FLAG; struct VideoDecodeBuffer { @@ -96,6 +99,8 @@ struct VideoDecodeBuffer { }; +#define MAX_GRAPHIC_NUM 16+1+6 // max DPB +1+AVC_EXTRA_NUM + struct VideoConfigBuffer { uint8_t *data; int32_t size; @@ -104,6 +109,9 @@ struct VideoConfigBuffer { int32_t surfaceNumber; VAProfile profile; uint32_t flag; + void *graphicBufferHandler[ MAX_GRAPHIC_NUM ]; + uint32_t graphicBufferStride; + uint32_t graphicBufferColorFormat; VideoFormatSpecificData *ext; }; @@ -114,6 +122,8 @@ struct VideoRenderBuffer { int64_t timeStamp; // presentation time stamp mutable volatile bool renderDone; // indicated whether frame is rendered, this must be set to false by the client of this library once // surface is rendered. Not setting this flag will lead to DECODE_NO_SURFACE error. + void * graphicBufferHandle; + int32_t acquirePos; //the acquirepos in graphichandle array uint32_t flag; VideoFrameRawData *rawData; }; diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h index eb2d171..6b54deb 100644 --- a/videodecoder/VideoDecoderInterface.h +++ b/videodecoder/VideoDecoderInterface.h @@ -37,6 +37,9 @@ public: virtual Decode_Status decode(VideoDecodeBuffer *buffer) = 0; virtual const VideoRenderBuffer* getOutput(bool draining = false) = 0; virtual const VideoFormatInfo* getFormatInfo(void) = 0; + virtual Decode_Status SignalRenderDoneFlag(void * graphichandler) = 0; + virtual Decode_Status GetNativeBufferStatus(void * graphichandler, bool* used) = 0; + }; #endif /* VIDEO_DECODER_INTERFACE_H_ */ diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 5fe041b..0378af6 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -212,7 +212,7 @@ Encode_Status VideoEncoderBase::start() { vaStatus = vaCreateSurfaces(mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, - normalSurfacesCnt, surfaces); + normalSurfacesCnt, surfaces, NULL , 0); CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces"); switch (mBufferMode) { -- cgit v1.2.3 From a6cb41555ddf01590bf37407d48a5e9219fc6c06 Mon Sep 17 00:00:00 2001 From: Ji Guoliang Date: Wed, 28 Dec 2011 15:47:29 -0500 Subject: libmix-videoencode: port the videoencode of libmix from R2 to R3 BZ: 18528 merge/port the changes of R2 to R3 Change-Id: Iea4f47decf70c81598659a0294ac5e0d79f63bcb Signed-off-by: Ji Guoliang Reviewed-on: http://android.intel.com:8080/30075 Reviewed-by: Yuan, Shengquan Reviewed-by: Chang, Ying Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 107 +++++--- videoencoder/VideoEncoderAVC.h | 4 +- videoencoder/VideoEncoderBase.cpp | 500 +++++++++++++++++++++++++++++++------- videoencoder/VideoEncoderBase.h | 15 +- videoencoder/VideoEncoderDef.h | 19 ++ videoencoder/VideoEncoderH263.cpp | 5 +- videoencoder/VideoEncoderMP4.cpp | 9 +- 7 files changed, 525 insertions(+), 134 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 2f0cb62..5f65835 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -198,13 +198,7 @@ Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { LOG_V("Begin\n"); CHECK_NULL_RETURN_IFFAIL(outBuffer); - if (mFrameNum > 2) { - if (idrPeroid != 0 && (((mFrameNum - 2) % idrPeroid) == 0)) { - mKeyFrame = true; - } else { - mKeyFrame = false; - } - } + setKeyFrame(idrPeroid); // prepare for output, map the coded buffer ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); @@ -279,11 +273,14 @@ CLEAN_UP: Encode_Status VideoEncoderAVC::getOneNALUnit( uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, - uint32_t *nalType, uint32_t *nalOffset) { + uint32_t *nalType, uint32_t *nalOffset, uint32_t status) { uint32_t pos = 0; uint32_t zeroByteCount = 0; uint32_t prefixLength = 0; uint32_t leadingZeroCnt = 0; + uint32_t singleByteTable[3][2] = {{1,0},{2,0},{2,3}}; + uint32_t dataRemaining = 0; + uint8_t *dataPtr; // Don't need to check parameters here as we just checked by caller while ((inBuffer[pos++] == 0x00)) { @@ -303,39 +300,75 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( zeroByteCount = 0; *nalOffset = pos; - while (pos < bufSize) { + if (status & VA_CODED_BUF_STATUS_AVC_SINGLE_NALU) { + *nalSize = bufSize - pos; + return ENCODE_SUCCESS; + } - while (inBuffer[pos++] == 0) { - zeroByteCount ++; - if (pos >= bufSize) //to make sure the buffer to be accessed is valid + dataPtr = inBuffer + pos; + dataRemaining = bufSize - pos + 1; + + while ((dataRemaining > 0) && (zeroByteCount < 3)) { + if (((((uint32_t)dataPtr) & 0xF ) == 0) && (0 == zeroByteCount) + && (dataRemaining > 0xF)) { + __asm__( + //Data input + "movl %1, %%ecx\n\t"//data_ptr=>ecx + "movl %0, %%eax\n\t"//data_remaing=>eax + //Main compare loop + "MATCH_8_ZERO:\n\t" + "pxor %%xmm0,%%xmm0\n\t"//set 0=>xmm0 + "pcmpeqb (%%ecx),%%xmm0\n\t"//data_ptr=xmm0,(byte==0)?0xFF:0x00 + "pmovmskb %%xmm0, %%edx\n\t"//edx[0]=xmm0[7],edx[1]=xmm0[15],...,edx[15]=xmm0[127] + "test $0xAAAA, %%edx\n\t"//edx& 1010 1010 1010 1010b + "jnz DATA_RET\n\t"//Not equal to zero means that at least one byte 0x00 + + "PREPARE_NEXT_MATCH:\n\t" + "sub $0x10, %%eax\n\t"//16 + ecx --> ecx + "add $0x10, %%ecx\n\t"//eax-16 --> eax + "cmp $0x10, %%eax\n\t" + "jge MATCH_8_ZERO\n\t"//search next 16 bytes + + "DATA_RET:\n\t" + "movl %%ecx, %1\n\t"//output ecx->data_ptr + "movl %%eax, %0\n\t"//output eax->data_remaining + : "+m"(dataRemaining), "+m"(dataPtr) + : + :"eax", "ecx", "edx", "xmm0" + ); + + if (0 >= dataRemaining) { break; - } - - if (inBuffer[pos - 1] == 0x01 && zeroByteCount >= 2) { - if (zeroByteCount == 2) { - prefixLength = 3; - } else { - prefixLength = 4; - leadingZeroCnt = zeroByteCount - 3; } - LOG_V("leading_zero_count = %d\n", leadingZeroCnt); - *nalSize = pos - *nalOffset - prefixLength - leadingZeroCnt; - break; - } else if (pos == bufSize) { - LOG_V ("The last NALU\n"); - *nalSize = pos - *nalOffset; - } else { + } + //check the value of each byte + if ((*dataPtr) >= 2) { + zeroByteCount = 0; - leadingZeroCnt = 0; + } + else { + zeroByteCount = singleByteTable[zeroByteCount][*dataPtr]; + } + + dataPtr ++; + dataRemaining --; } + if ((3 == zeroByteCount) && (dataRemaining > 0)) { + + *nalSize = bufSize - dataRemaining - *nalOffset - 3; + + } else if (0 == dataRemaining) { + + *nalSize = bufSize - *nalOffset; + } return ENCODE_SUCCESS; } Encode_Status VideoEncoderAVC::getHeader( - uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) { + uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize, uint32_t status) { uint32_t nalType = 0; uint32_t nalSize = 0; @@ -355,7 +388,7 @@ Encode_Status VideoEncoderAVC::getHeader( while (1) { nalType = nalSize = nalOffset = 0; - ret = getOneNALUnit(buf, bufSize, &nalSize, &nalType, &nalOffset); + ret = getOneNALUnit(buf, bufSize, &nalSize, &nalType, &nalOffset, status); CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); LOG_I("NAL type = %d, NAL size = %d, offset = %d\n", nalType, nalSize, nalOffset); @@ -382,7 +415,7 @@ Encode_Status VideoEncoderAVC::outputCodecData( uint32_t headerSize = 0; ret = getHeader((uint8_t *)mCurSegment->buf + mOffsetInSeg, - mCurSegment->size - mOffsetInSeg, &headerSize); + mCurSegment->size - mOffsetInSeg, &headerSize, mCurSegment->status); CHECK_ENCODE_STATUS_RETURN("getHeader"); if (headerSize == 0) { outBuffer->dataSize = 0; @@ -423,7 +456,7 @@ Encode_Status VideoEncoderAVC::outputOneNALU( CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); ret = getOneNALUnit((uint8_t *)mCurSegment->buf + mOffsetInSeg, - mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset); + mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status); CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); // check if we need startcode along with the payload @@ -492,7 +525,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf // we need to handle the whole bitstream NAL by NAL ret = getOneNALUnit( (uint8_t *)mCurSegment->buf + mOffsetInSeg, - mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset); + mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status); CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); if (nalSize + 4 <= outBuffer->bufferSize - sizeCopiedHere) { @@ -549,6 +582,13 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { LOG_V( "Begin\n"); if (mFrameNum == 0 || mNewHeader) { + + if (mRenderHrd) { + ret = renderHrd(); + mRenderHrd = false; + CHECK_ENCODE_STATUS_RETURN("renderHrd"); + } + ret = renderSequenceParams(); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); mNewHeader = false; //Set to require new header filed to false @@ -730,6 +770,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { avcSeqParams.basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage avcSeqParams.intra_period = mComParams.intraPeriod; //avcSeqParams.vui_flag = 248; + avcSeqParams.vui_flag = mVideoParamsAVC.VUIFlag; avcSeqParams.seq_parameter_set_id = 8; // This is a temporary fix suggested by Binglin for bad encoding quality issue diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h index c86b0b4..b57ef67 100644 --- a/videoencoder/VideoEncoderAVC.h +++ b/videoencoder/VideoEncoderAVC.h @@ -32,8 +32,8 @@ protected: private: // Local Methods - Encode_Status getOneNALUnit(uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, uint32_t *nalType, uint32_t *nalOffset); - Encode_Status getHeader(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize); + Encode_Status getOneNALUnit(uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, uint32_t *nalType, uint32_t *nalOffset, uint32_t status); + Encode_Status getHeader(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize, uint32_t status); Encode_Status outputCodecData(VideoEncOutputBuffer *outBuffer); Encode_Status outputOneNALU(VideoEncOutputBuffer *outBuffer, bool startCode); Encode_Status outputLengthPrefixed(VideoEncOutputBuffer *outBuffer); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 0378af6..7e4a09f 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -11,9 +11,48 @@ #include #include +#undef DUMP_SRC_DATA // To dump source data +// API declaration +extern "C" { +VAStatus vaLockSurface(VADisplay dpy, + VASurfaceID surface, + unsigned int *fourcc, + unsigned int *luma_stride, + unsigned int *chroma_u_stride, + unsigned int *chroma_v_stride, + unsigned int *luma_offset, + unsigned int *chroma_u_offset, + unsigned int *chroma_v_offset, + unsigned int *buffer_name, + void **buffer +); + +VAStatus vaUnlockSurface(VADisplay dpy, + VASurfaceID surface +); + +VAStatus vaCreateSurfaceFromKBuf( + VADisplay dpy, + int width, + int height, + int format, + VASurfaceID *surface, /* out */ + unsigned int kbuf_handle, /* kernel buffer handle*/ + unsigned size, /* kernel buffer size */ + unsigned int kBuf_fourcc, /* expected fourcc */ + unsigned int luma_stride, /* luma stride, could be width aligned with a special value */ + unsigned int chroma_u_stride, /* chroma stride */ + unsigned int chroma_v_stride, + unsigned int luma_offset, /* could be 0 */ + unsigned int chroma_u_offset, /* UV offset from the beginning of the memory */ + unsigned int chroma_v_offset +); +} + VideoEncoderBase::VideoEncoderBase() :mInitialized(false) ,mVADisplay(NULL) + ,mVADecoderDisplay(NULL) ,mVAContext(0) ,mVAConfig(0) ,mVAEntrypoint(VAEntrypointEncSlice) @@ -32,6 +71,7 @@ VideoEncoderBase::VideoEncoderBase() ,mRenderAIR(false) ,mRenderFrameRate(false) ,mRenderBitRate(false) + ,mRenderHrd(false) ,mLastCodedBuffer(0) ,mOutCodedBuffer(0) ,mSeqParamBuf(0) @@ -151,10 +191,13 @@ Encode_Status VideoEncoderBase::start() { } LOG_I("mReqSurfacesCnt = %d\n", mReqSurfacesCnt); + LOG_I("mUpstreamBufferCnt = %d\n", mUpstreamBufferCnt); if (mReqSurfacesCnt == 0) { switch (mBufferMode) { - case BUFFER_SHARING_CI: { + case BUFFER_SHARING_CI: + case BUFFER_SHARING_V4L2: + case BUFFER_SHARING_SURFACE: { mSharedSurfacesCnt = mUpstreamBufferCnt; normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE; @@ -166,13 +209,13 @@ Encode_Status VideoEncoderBase::start() { ret = ENCODE_NO_MEMORY; goto CLEAN_UP; } + } else { + LOG_E("Set to upstream mode, but no upstream info, something is wrong"); + ret = ENCODE_FAIL; + goto CLEAN_UP; } } break; - case BUFFER_SHARING_V4L2: - case BUFFER_SHARING_SURFACE: - // To be develped - break; default: mBufferMode = BUFFER_SHARING_NONE; normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE; @@ -184,7 +227,7 @@ Encode_Status VideoEncoderBase::start() { normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE; } else { mBufferMode = BUFFER_SHARING_USRPTR; - mUsrPtr = new uint8_t *[mReqSurfacesCnt]; + mUsrPtr = new uint8_t *[mReqSurfacesCnt]; if (mUsrPtr == NULL) { LOG_E("Failed allocate memory\n"); ret = ENCODE_NO_MEMORY; @@ -216,38 +259,17 @@ Encode_Status VideoEncoderBase::start() { CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces"); switch (mBufferMode) { - case BUFFER_SHARING_CI: { - for (index = 0; index < mSharedSurfacesCnt; index++) { - - vaStatus = vaCreateSurfaceFromCIFrame( - mVADisplay, (uint32_t)mUpstreamBufferCnt, &mSharedSurfaces[index]); - - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaceFromCIFrame"); - - mSurfaces[index] = mSharedSurfaces[index]; - - videoSurfaceBuffer = new VideoEncSurfaceBuffer; - if (videoSurfaceBuffer == NULL) { - LOG_E( "new VideoEncSurfaceBuffer failed\n"); - return ENCODE_NO_MEMORY; - } - - videoSurfaceBuffer->surface = mSharedSurfaces[index]; - videoSurfaceBuffer->usrptr = NULL; - videoSurfaceBuffer->index = index; - videoSurfaceBuffer->bufAvailable = true; - videoSurfaceBuffer->next = NULL; - - mVideoSrcBufferList = appendVideoSurfaceBuffer - (mVideoSrcBufferList, videoSurfaceBuffer); - videoSurfaceBuffer = NULL; - } - } + case BUFFER_SHARING_CI: + ret = surfaceMappingForCIFrameList(); + CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForCIFrameList"); break; case BUFFER_SHARING_V4L2: - case BUFFER_SHARING_SURFACE: // To be develped - break; + break; + case BUFFER_SHARING_SURFACE: + ret = surfaceMappingForSurfaceList(); + CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForSurfaceList"); + break; case BUFFER_SHARING_NONE: break; case BUFFER_SHARING_USRPTR: { @@ -338,16 +360,26 @@ CLEAN_UP: Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { - Encode_Status ret = ENCODE_SUCCESS; - VAStatus vaStatus = VA_STATUS_SUCCESS; - uint8_t *buf = NULL; - if (!mInitialized) { LOG_E("Encoder has not initialized yet\n"); return ENCODE_NOT_INIT; } CHECK_NULL_RETURN_IFFAIL(inBuffer); + if (mComParams.syncEncMode) { + LOG_I("Sync Enocde Mode, no optimization, no one frame delay\n"); + return syncEncode(inBuffer); + } else { + LOG_I("Async Enocde Mode, HW/SW works in parallel, introduce one frame delay\n"); + return asyncEncode(inBuffer); + } +} + +Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + uint8_t *buf = NULL; inBuffer->bufAvailable = false; if (mNewHeader) mFrameNum = 0; @@ -364,6 +396,51 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurFrame->surface); LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay); +#ifdef DUMP_SRC_DATA + + if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){ + + FILE *fp = fopen("/data/data/dump_encoder.yuv", "wb"); + VAImage image; + uint8_t *usrptr = NULL; + uint32_t stride = 0; + uint32_t frameSize = 0; + + vaStatus = vaDeriveImage(mVADisplay, mCurFrame->surface, &image); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + + LOG_V( "vaDeriveImage Done\n"); + + frameSize = image.data_size; + stride = image.pitches[0]; + + LOG_I("Source Surface/Image information --- start ---- :"); + LOG_I("surface = 0x%08x\n",(uint32_t)mCurFrame->surface); + LOG_I("image->pitches[0] = %d\n", image.pitches[0]); + LOG_I("image->pitches[1] = %d\n", image.pitches[1]); + LOG_I("image->offsets[0] = %d\n", image.offsets[0]); + LOG_I("image->offsets[1] = %d\n", image.offsets[1]); + LOG_I("image->num_planes = %d\n", image.num_planes); + LOG_I("image->width = %d\n", image.width); + LOG_I("image->height = %d\n", image.height); + LOG_I ("frameSize= %d\n", image.data_size); + LOG_I("Source Surface/Image information ----end ----"); + + vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) &usrptr); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + fwrite(usrptr, frameSize, 1, fp); + fflush(fp); + fclose(fp); + + vaStatus = vaUnmapBuffer(mVADisplay, image.buf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaDestroyImage(mVADisplay, image.image_id); + CHECK_VA_STATUS_RETURN("vaDestroyImage"); + } +#endif + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); CHECK_VA_STATUS_RETURN("vaBeginPicture"); @@ -440,6 +517,98 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { return ENCODE_SUCCESS; } +Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + uint8_t *buf = NULL; + VideoEncSurfaceBuffer *tmpFrame = NULL; + + inBuffer->bufAvailable = false; + if (mNewHeader) mFrameNum = 0; + + // current we use one surface for source data, + // one for reference and one for reconstructed + decideFrameType(); + ret = manageSrcSurface(inBuffer); + CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); + CHECK_VA_STATUS_RETURN("vaBeginPicture"); + + ret = sendEncodeCommand(); + CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS_RETURN("vaEndPicture"); + + LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurFrame->surface); + vaStatus = vaSyncSurface(mVADisplay, mCurFrame->surface); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaSyncSurface\n"); + } + + mOutCodedBuffer = mVACodedBuffer[mCodedBufIndex]; + + // Need map buffer before calling query surface below to get + // the right skip frame flag for current frame + // It is a requirement of video driver + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + + // Query the status of current surface + VASurfaceStatus vaSurfaceStatus; + vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurFrame->surface, &vaSurfaceStatus); + CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); + + mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + + VideoEncoderBase::appendVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame); + mCurFrame = NULL; + + mEncodedFrames ++; + mFrameNum ++; + + if (!mPicSkipped) { + tmpFrame = mRecFrame; + mRecFrame = mRefFrame; + mRefFrame = tmpFrame; + } + + inBuffer->bufAvailable = true; + return ENCODE_SUCCESS; +} + +void VideoEncoderBase::setKeyFrame(int32_t keyFramePeriod) { + + // For first getOutput async mode, the mFrameNum already increased to 2, and of course is key frame + // frame 0 is already encoded and will be outputed here + // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call + if (!mComParams.syncEncMode) { + if (mFrameNum > 2) { + if (keyFramePeriod != 0 && + (((mFrameNum - 2) % keyFramePeriod) == 0)) { + mKeyFrame = true; + } else { + mKeyFrame = false; + } + } else if (mFrameNum == 2) { + mKeyFrame = true; + } + } else { + if (mFrameNum > 1) { + if (keyFramePeriod != 0 && + (((mFrameNum - 1) % keyFramePeriod) == 0)) { + mKeyFrame = true; + } else { + mKeyFrame = false; + } + } else if (mFrameNum == 1) { + mKeyFrame = true; + } + } +} + Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; @@ -455,17 +624,7 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) { goto CLEAN_UP; } - // For first getOutput, the mFrameNum already increased to 2, and of course is key frame - // frame 0 is already encoded and will be outputed here - // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call - if (mFrameNum > 2) { - if (mComParams.intraPeriod != 0 && - (((mFrameNum - 2) % mComParams.intraPeriod) == 0)) { - mKeyFrame = true; - } else { - mKeyFrame = false; - } - } + setKeyFrame(mComParams.intraPeriod); ret = prepareForOutput(outBuffer, &useLocalBuffer); CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); @@ -588,8 +747,6 @@ Encode_Status VideoEncoderBase::stop() { } LOG_V( "Release surfaces\n"); - - LOG_V( "vaDestroyContext\n"); vaStatus = vaDestroyContext(mVADisplay, mVAContext); CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); @@ -615,7 +772,6 @@ CLEAN_UP: return ret; } - Encode_Status VideoEncoderBase::prepareForOutput( VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) { @@ -657,7 +813,6 @@ Encode_Status VideoEncoderBase::prepareForOutput( } } - // We will support two buffer allocation mode, // one is application allocates the buffer and passes to encode, // the other is encode allocate memory @@ -787,12 +942,18 @@ void VideoEncoderBase::setDefaultParams() { mComParams.rcParams.bitRate = 640000; mComParams.rcParams.targetPercentage= 95; mComParams.rcParams.windowSize = 500; + mComParams.rcParams.disableFrameSkip = 0; + mComParams.rcParams.disableBitsStuffing = 1; mComParams.cyclicFrameInterval = 30; mComParams.refreshType = VIDEO_ENC_NONIR; mComParams.airParams.airMBs = 0; mComParams.airParams.airThreshold = 0; mComParams.airParams.airAuto = 1; mComParams.disableDeblocking = 2; + mComParams.syncEncMode = false; + + mHrdParam.bufferSize = 0; + mHrdParam.initBufferFullness = 0; } Encode_Status VideoEncoderBase::setParameters( @@ -830,7 +991,7 @@ Encode_Status VideoEncoderBase::setParameters( } ret = setUpstreamBuffer( - upStreamBuffer->bufferMode, upStreamBuffer->bufList, upStreamBuffer->bufCnt); + upStreamBuffer->bufferMode, upStreamBuffer->bufList, upStreamBuffer->bufCnt, (VADisplay)upStreamBuffer->display); break; } @@ -841,6 +1002,21 @@ Encode_Status VideoEncoderBase::setParameters( break; } + case VideoParamsTypeHRD: { + VideoParamsHRD *hrd = + reinterpret_cast (videoEncParams); + + if (hrd->size != sizeof (VideoParamsHRD)) { + return ENCODE_INVALID_PARAMS; + } + + mHrdParam.bufferSize = hrd->bufferSize; + mHrdParam.initBufferFullness = hrd->initBufferFullness; + mRenderHrd = true; + + break; + } + case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: @@ -853,14 +1029,10 @@ Encode_Status VideoEncoderBase::setParameters( LOG_E ("Wrong ParamType here\n"); break; } - } - return ret; - } - Encode_Status VideoEncoderBase::getParameters( VideoParamConfigSet *videoEncParams) { @@ -904,6 +1076,20 @@ Encode_Status VideoEncoderBase::getParameters( break; } + case VideoParamsTypeHRD: { + VideoParamsHRD *hrd = + reinterpret_cast (videoEncParams); + + if (hrd->size != sizeof (VideoParamsHRD)) { + return ENCODE_INVALID_PARAMS; + } + + hrd->bufferSize = mHrdParam.bufferSize; + hrd->initBufferFullness = mHrdParam.initBufferFullness; + + break; + } + case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: @@ -927,10 +1113,13 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { CHECK_NULL_RETURN_IFFAIL(videoEncConfig); LOG_I("Config type = %d\n", (int)videoEncConfig->type); + // workaround +#if 0 if (!mInitialized) { LOG_E("Encoder has not initialized yet, can't call setConfig\n"); return ENCODE_NOT_INIT; } +#endif switch (videoEncConfig->type) { case VideoConfigTypeFrameRate: { @@ -953,9 +1142,7 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_INVALID_PARAMS; } mComParams.rcParams = configBitRate->rcParams; - mRenderBitRate = true; - break; } case VideoConfigTypeResolution: { @@ -972,7 +1159,6 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_INVALID_PARAMS; } mComParams.refreshType = configIntraRefreshType->refreshType; - break; } @@ -996,9 +1182,7 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { } mComParams.airParams = configAIR->airParams; - mRenderAIR = true; - break; } case VideoConfigTypeAVCIntraPeriod: @@ -1007,7 +1191,6 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeSliceNum: { ret = derivedSetConfig(videoEncConfig); - break; } default: { @@ -1015,8 +1198,6 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { break; } } - - return ret; } @@ -1036,7 +1217,6 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { } configFrameRate->frameRate = mComParams.frameRate; - break; } @@ -1065,7 +1245,6 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_INVALID_PARAMS; } configIntraRefreshType->refreshType = mComParams.refreshType; - break; } @@ -1077,7 +1256,6 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { } configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval; - break; } @@ -1090,7 +1268,6 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { } configAIR->airParams = mComParams.airParams; - break; } case VideoConfigTypeAVCIntraPeriod: @@ -1099,7 +1276,6 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeSliceNum: { ret = derivedGetConfig(videoEncConfig); - break; } default: { @@ -1107,7 +1283,6 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { break; } } - return ret; } @@ -1165,10 +1340,8 @@ Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { return ENCODE_NULL_PTR; } - LOG_V( "Begin\n"); - if (mCodedBufSize > 0) { *maxSize = mCodedBufSize; LOG_V ("Already calculate the max encoded size, get the value directly"); @@ -1306,7 +1479,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( Encode_Status VideoEncoderBase::setUpstreamBuffer( - VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt) { + VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt, VADisplay display) { CHECK_NULL_RETURN_IFFAIL(bufList); if (bufCnt == 0) { @@ -1317,6 +1490,8 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer( if (mUpstreamBufferList) delete [] mUpstreamBufferList; mUpstreamBufferCnt = bufCnt; + mVADecoderDisplay = display; + mBufferMode = bufferMode; mUpstreamBufferList = new uint32_t [bufCnt]; if (!mUpstreamBufferList) { LOG_E ("mUpstreamBufferList NULL\n"); @@ -1325,9 +1500,103 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer( memcpy(mUpstreamBufferList, bufList, bufCnt * sizeof (uint32_t)); return ENCODE_SUCCESS; +} + + +Encode_Status VideoEncoderBase::generateVideoBufferAndAttachToList(uint32_t index, uint8_t *usrptr) { + + VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; + videoSurfaceBuffer = new VideoEncSurfaceBuffer; + if (videoSurfaceBuffer == NULL) { + LOG_E( "new VideoEncSurfaceBuffer failed\n"); + return ENCODE_NO_MEMORY; + } + videoSurfaceBuffer->surface = mSharedSurfaces[index]; + videoSurfaceBuffer->usrptr = NULL; + videoSurfaceBuffer->index = index; + videoSurfaceBuffer->bufAvailable = true; + videoSurfaceBuffer->next = NULL; + + mVideoSrcBufferList = appendVideoSurfaceBuffer + (mVideoSrcBufferList, videoSurfaceBuffer); + videoSurfaceBuffer = NULL; + + return ENCODE_SUCCESS; } +Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { + + uint32_t index; + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + + uint32_t fourCC = 0; + uint32_t lumaStride = 0; + uint32_t chromaUStride = 0; + uint32_t chromaVStride = 0; + uint32_t lumaOffset = 0; + uint32_t chromaUOffset = 0; + uint32_t chromaVOffset = 0; + uint32_t kBufHandle = 0; + + for (index = 0; index < mSharedSurfacesCnt; index++) { + + vaStatus = vaLockSurface( + mVADecoderDisplay, (VASurfaceID)mUpstreamBufferList[index], + &fourCC, &lumaStride, &chromaUStride, &chromaVStride, + &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL); + + CHECK_VA_STATUS_RETURN("vaLockSurface"); + LOG_I("Surface incoming = 0x%08x", mUpstreamBufferList[index]); + LOG_I("lumaStride = %d", lumaStride); + LOG_I("chromaUStride = %d", chromaUStride); + LOG_I("chromaVStride = %d", chromaVStride); + LOG_I("lumaOffset = %d", lumaOffset); + LOG_I("chromaUOffset = %d", chromaUOffset); + LOG_I("chromaVOffset = %d", chromaVOffset); + LOG_I("kBufHandle = 0x%08x", kBufHandle); + LOG_I("fourCC = %d", fourCC); + + vaStatus = vaUnlockSurface(mVADecoderDisplay, (VASurfaceID)mUpstreamBufferList[index]); + CHECK_VA_STATUS_RETURN("vaUnlockSurface"); + + vaStatus = vaCreateSurfaceFromKBuf( + mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + (VASurfaceID *)&mSharedSurfaces[index], kBufHandle, lumaStride * mComParams.resolution.height * 3 / 2, + fourCC, lumaStride, chromaUStride, chromaVStride, lumaOffset, chromaUOffset, chromaVOffset); + + CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); + + LOG_I("Surface ID created from Kbuf = 0x%08x", mSharedSurfaces[index]); + + mSurfaces[index] = mSharedSurfaces[index]; + ret = generateVideoBufferAndAttachToList(index, NULL); + CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); + } + + return ret; +} + +Encode_Status VideoEncoderBase::surfaceMappingForCIFrameList() { + uint32_t index; + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + + for (index = 0; index < mSharedSurfacesCnt; index++) { + + vaStatus = vaCreateSurfaceFromCIFrame( + mVADisplay, (uint32_t)mUpstreamBufferCnt, &mSharedSurfaces[index]); + + CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromCIFrame"); + + mSurfaces[index] = mSharedSurfaces[index]; + + ret = generateVideoBufferAndAttachToList(index, NULL); + CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList") + } + return ret; +} Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { @@ -1336,6 +1605,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { uint32_t idx = 0; uint32_t bufIndex = 0; + uint32_t data = 0; if (mBufferMode == BUFFER_SHARING_CI) { @@ -1352,11 +1622,33 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } - } else if (mBufferMode == BUFFER_SHARING_USRPTR) { + } else if (mBufferMode == BUFFER_SHARING_SURFACE) { + + bufIndex = (uint32_t) -1; + data = *(uint32_t*)inBuffer->data; + + LOG_I("data = 0x%08x\n", data); + + for (idx = 0; idx < mSharedSurfacesCnt; idx++) { + + LOG_I("mUpstreamBufferList[%d] = 0x%08x\n", idx, mUpstreamBufferList[idx]); + if (data == mUpstreamBufferList[idx]) + bufIndex = idx; + } + + LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt); + LOG_I("bufIndex = %d\n", bufIndex); + + if (bufIndex > mSurfaceCnt - 2) { + LOG_E("Can't find the surface in our list\n"); + ret = ENCODE_FAIL; + return ret; + } + }else if (mBufferMode == BUFFER_SHARING_USRPTR) { bufIndex = (uint32_t) -1; //fixme, temp use a big value - LOG_I("bufin->data = 0x%p \n", inBuffer->data); + LOG_I("bufin->data = 0x%p\n", inBuffer->data); for (idx = 0; idx < mReqSurfacesCnt; idx++) { LOG_I("mUsrPtr[%d] = 0x%p\n", idx, mUsrPtr[idx]); @@ -1372,7 +1664,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); ret = ENCODE_FAIL; goto no_share_mode; - } } @@ -1380,6 +1671,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { switch (mBufferMode) { case BUFFER_SHARING_CI: + case BUFFER_SHARING_SURFACE: case BUFFER_SHARING_USRPTR: { if (mRefFrame== NULL) { @@ -1414,7 +1706,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { break; case BUFFER_SHARING_V4L2: - case BUFFER_SHARING_SURFACE: LOG_E("Not Implemented\n"); break; @@ -1618,12 +1909,7 @@ Encode_Status VideoEncoderBase::renderDynamicBitrate() { VAStatus vaStatus = VA_STATUS_SUCCESS; LOG_V( "Begin\n\n"); - - if (mComParams.rcMode != RATE_CONTROL_VCM) { - - LOG_W("Not in VCM mode, but call renderDynamicBitrate\n"); - return ENCODE_SUCCESS; - } + // disable bits stuffing and skip frame apply to all rate control mode VAEncMiscParameterBuffer *miscEncParamBuf; VAEncMiscParameterRateControl *bitrateControlParam; @@ -1648,6 +1934,16 @@ Encode_Status VideoEncoderBase::renderDynamicBitrate() { bitrateControlParam->min_qp = mComParams.rcParams.minQP; bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage; bitrateControlParam->window_size = mComParams.rcParams.windowSize; + bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip; + bitrateControlParam->rc_flags.bits.disable_bit_stuffing = mComParams.rcParams.disableBitsStuffing; + + LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second); + LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp); + LOG_I("min_qp = %d\n", bitrateControlParam->min_qp); + LOG_I("target_percentage = %d\n", bitrateControlParam->target_percentage); + LOG_I("window_size = %d\n", bitrateControlParam->window_size); + LOG_I("disable_frame_skip = %d\n", bitrateControlParam->rc_flags.bits.disable_frame_skip); + LOG_I("disable_bit_stuffing = %d\n", bitrateControlParam->rc_flags.bits.disable_bit_stuffing); vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); @@ -1699,3 +1995,35 @@ Encode_Status VideoEncoderBase::renderDynamicFrameRate() { LOG_I( "frame rate = %d\n", frameRateParam->framerate); return ENCODE_SUCCESS; } + +Encode_Status VideoEncoderBase::renderHrd() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + + VAEncMiscParameterBuffer *miscEncParamBuf; + VAEncMiscParameterHRD *hrdParam; + VABufferID miscParamBufferID; + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterHRD), + 1, NULL, &miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + miscEncParamBuf->type = VAEncMiscParameterTypeHRD; + hrdParam = (VAEncMiscParameterHRD *)miscEncParamBuf->data; + + hrdParam->buffer_size = mHrdParam.bufferSize; + hrdParam->initial_buffer_fullness = mHrdParam.initBufferFullness; + + vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + return ENCODE_SUCCESS; +} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 8ea052c..6d68ff7 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -12,7 +12,6 @@ #include #include "VideoEncoderDef.h" #include "VideoEncoderInterface.h" - class VideoEncoderBase : IVideoEncoder { public: @@ -32,7 +31,6 @@ public: */ virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); - virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig); @@ -40,7 +38,6 @@ public: virtual Encode_Status getMaxOutSize(uint32_t *maxSize); - protected: virtual Encode_Status sendEncodeCommand(void) = 0; virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0; @@ -53,12 +50,17 @@ protected: Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); Encode_Status renderDynamicFrameRate(); Encode_Status renderDynamicBitrate(); + Encode_Status renderHrd(); + void setKeyFrame(int32_t keyFramePeriod); private: void setDefaultParams(void); - Encode_Status setUpstreamBuffer(VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt); + Encode_Status setUpstreamBuffer(VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt, VADisplay display); Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr); + Encode_Status generateVideoBufferAndAttachToList(uint32_t index, uint8_t *usrptr); + Encode_Status surfaceMappingForSurfaceList(); + Encode_Status surfaceMappingForCIFrameList(); VideoEncSurfaceBuffer *appendVideoSurfaceBuffer( VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer); @@ -71,11 +73,14 @@ private: void updateProperities(void); void decideFrameType(void); Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer); + Encode_Status syncEncode(VideoEncRawBuffer *inBuffer); + Encode_Status asyncEncode(VideoEncRawBuffer *inBuffer); protected: bool mInitialized; VADisplay mVADisplay; + VADisplay mVADecoderDisplay; VAContextID mVAContext; VAConfigID mVAConfig; VAEntrypoint mVAEntrypoint; @@ -86,6 +91,7 @@ protected: uint32_t mTotalSizeCopied; VideoParamsCommon mComParams; + VideoParamsHRD mHrdParam; VideoBufferSharingMode mBufferMode; uint32_t *mUpstreamBufferList; @@ -100,6 +106,7 @@ protected: bool mRenderAIR; bool mRenderFrameRate; bool mRenderBitRate; + bool mRenderHrd; VABufferID mVACodedBuffer[2]; VABufferID mLastCodedBuffer; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 1e90094..6100789 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -181,6 +181,8 @@ struct VideoRateControlParams { uint32_t minQP; uint32_t windowSize; uint32_t targetPercentage; + uint32_t disableFrameSkip; + uint32_t disableBitsStuffing; VideoRateControlParams &operator=(const VideoRateControlParams &other) { if (this == &other) return *this; @@ -190,6 +192,8 @@ struct VideoRateControlParams { this->minQP = other.minQP; this->windowSize = other.windowSize; this->targetPercentage = other.targetPercentage; + this->disableFrameSkip = other.disableFrameSkip; + this->disableBitsStuffing = other.disableBitsStuffing; return *this; } }; @@ -216,6 +220,7 @@ enum VideoParamConfigType { VideoParamsTypeVC1, VideoParamsTypeUpSteamBuffer, VideoParamsTypeUsrptrBuffer, + VideoParamsTypeHRD, VideoConfigTypeFrameRate, VideoConfigTypeBitRate, @@ -257,6 +262,7 @@ struct VideoParamsCommon : VideoParamConfigSet { int32_t cyclicFrameInterval; AirParams airParams; uint32_t disableDeblocking; + bool syncEncMode; VideoParamsCommon() { type = VideoParamsTypeCommon; @@ -279,6 +285,7 @@ struct VideoParamsCommon : VideoParamConfigSet { this->cyclicFrameInterval = other.cyclicFrameInterval; this->airParams = other.airParams; this->disableDeblocking = other.disableDeblocking; + this->syncEncMode = other.syncEncMode; return *this; } }; @@ -321,6 +328,7 @@ struct VideoParamsUpstreamBuffer : VideoParamConfigSet { VideoBufferSharingMode bufferMode; uint32_t *bufList; uint32_t bufCnt; + void *display; }; struct VideoParamsUsrptrBuffer : VideoParamConfigSet { @@ -342,6 +350,17 @@ struct VideoParamsUsrptrBuffer : VideoParamConfigSet { uint8_t *usrPtr; }; +struct VideoParamsHRD : VideoParamConfigSet { + + VideoParamsHRD() { + type = VideoParamsTypeHRD; + size = sizeof(VideoParamsHRD); + } + + uint32_t bufferSize; + uint32_t initBufferFullness; +}; + struct VideoConfigFrameRate : VideoParamConfigSet { VideoConfigFrameRate() { diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index 6fb510b..2aed78f 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -41,12 +41,15 @@ Encode_Status VideoEncoderH263::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH263 h263SequenceParam; + uint32_t frameRateNum = mComParams.frameRate.frameRateNum; + uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; LOG_V( "Begin\n\n"); //set up the sequence params for HW h263SequenceParam.bits_per_second= mComParams.rcParams.bitRate; - h263SequenceParam.frame_rate = 30; //hard-coded, driver need; + h263SequenceParam.frame_rate = + (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; //hard-coded, driver need; h263SequenceParam.initial_qp = mComParams.rcParams.initQP; h263SequenceParam.min_qp = mComParams.rcParams.minQP; h263SequenceParam.intra_period = mComParams.intraPeriod; diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index 37dce53..a220563 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -97,14 +97,7 @@ Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) { LOG_V("Begin\n"); CHECK_NULL_RETURN_IFFAIL(outBuffer); - if (mFrameNum > 2) { - if (mComParams.intraPeriod != 0 && - (((mFrameNum - 2) % mComParams.intraPeriod) == 0)) { - mKeyFrame = true; - } else { - mKeyFrame = false; - } - } + setKeyFrame(mComParams.intraPeriod); // prepare for output, map the coded buffer ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); -- cgit v1.2.3 From 80b80b7bae806f958afa8f8b123f5cba92adee16 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Tue, 27 Dec 2011 09:28:40 -0500 Subject: libmix/videodecoder: Patches porting from R2 to R3 BZ: 18443 original patches: 27557 - libmix: fix issue when B frame number between 2 P frames is bigger than 2 (BZ: 17193) 26595 - decoder: expose VA context surfaces vie getInfo() (BZ: 14001) 24710 - libmix: if the frame is sync frame in container, skip the reference frame check (BZ: 8998) 24457 - Flush decoded buffer while resolution is changed. (BZ: 14251) Signed-off-by: Weian Chen Change-Id: Ic954d2a7972649e8e21021b6547b31c2e0d78e57 Reviewed-on: http://android.intel.com:8080/30008 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 8 +++++--- videodecoder/VideoDecoderBase.cpp | 14 ++++++++++++++ videodecoder/VideoDecoderBase.h | 2 ++ videodecoder/VideoDecoderDefs.h | 4 ++++ videodecoder/VideoDecoderMPEG4.cpp | 15 ++++++++++++--- videodecoder/VideoDecoderMPEG4.h | 1 + 6 files changed, 38 insertions(+), 6 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 7ceb4a6..6613935 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -136,7 +136,7 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)); } #endif - if (data->new_sps) { + if (data->new_sps || data->new_pps) { status = handleNewSequence(data); CHECK_STATUS("handleNewSequence"); } @@ -144,7 +144,9 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h // first pic_data always exists, check if any slice is parsed if (data->pic_data[0].num_slices == 0) { ITRACE("No slice available for decoding."); - return DECODE_SUCCESS; + status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS; + mSizeChanged = false; + return status; } uint64_t lastPTS = mCurrentPTS; @@ -674,7 +676,7 @@ Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { } else { WTRACE("Video size changed from %d x %d to %d x %d.", width, height, mVideoFormatInfo.width, mVideoFormatInfo.height); - flush(); + flushSurfaceBuffers(); } return DECODE_SUCCESS; } diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index c71eff6..0f7f4ae 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -568,6 +568,19 @@ Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) { return DECODE_SUCCESS; } +void VideoDecoderBase::flushSurfaceBuffers(void) { + endDecodingFrame(true); + VideoSurfaceBuffer *p = NULL; + while (mOutputHead) { + mOutputHead->renderBuffer.renderDone = true; + p = mOutputHead; + mOutputHead = mOutputHead->next; + p->next = NULL; + } + mOutputHead = NULL; + mOutputTail = NULL; +} + Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { Decode_Status status = DECODE_SUCCESS; VAStatus vaStatus; @@ -731,6 +744,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height; mVideoFormatInfo.surfaceNumber = mNumSurfaces; + mVideoFormatInfo.ctxSurfaces = mSurfaces; if ((int32_t)profile != VAProfileSoftwareDecoding) { vaStatus = vaCreateContext( diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 3df3e3f..8cf4579 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -62,6 +62,8 @@ protected: virtual Decode_Status outputSurfaceBuffer(void); // acquired surface buffer is not used virtual Decode_Status releaseSurfaceBuffer(void); + // flush all decoded but not rendered buffers + virtual void flushSurfaceBuffers(void); virtual Decode_Status endDecodingFrame(bool dropFrame); virtual VideoSurfaceBuffer* findOutputByPoc(bool draining = false); virtual VideoSurfaceBuffer* findOutputByPct(bool draining = false); diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 5f1eac4..92a5668 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -88,6 +88,9 @@ typedef enum { // indicate if it use graphic buffer. USE_NATIVE_GRAPHIC_BUFFER = 0x2000, + // indicate whether it is a sync frame in container + IS_SYNC_FRAME = 0x4000, + } VIDEO_BUFFER_FLAG; struct VideoDecodeBuffer { @@ -146,6 +149,7 @@ struct VideoFormatInfo { int32_t surfaceWidth; int32_t surfaceHeight; int32_t surfaceNumber; + VASurfaceID *ctxSurfaces; int32_t aspectX; int32_t aspectY; int32_t cropLeft; diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 5975436..2abcf37 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -73,6 +73,11 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { if (buffer == NULL) { return DECODE_INVALID_DATA; } + if (buffer->flag & IS_SYNC_FRAME) { + mIsSyncFrame = true; + } else { + mIsSyncFrame = false; + } status = VideoDecoderBase::parseBuffer( buffer->data, buffer->size, @@ -209,7 +214,7 @@ Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { return DECODE_NO_REFERENCE; } } else if (codingType == MP4_VOP_TYPE_P || codingType == MP4_VOP_TYPE_S) { - if (mLastReference == NULL) { + if (mLastReference == NULL&& mIsSyncFrame == false) { return DECODE_NO_REFERENCE; } } @@ -412,10 +417,14 @@ Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *pic picParam->backward_reference_picture = VA_INVALID_SURFACE; break; case MP4_VOP_TYPE_P: - if (mLastReference == NULL) { + if (mLastReference == NULL&& mIsSyncFrame == false) { return DECODE_NO_REFERENCE; } - picParam->forward_reference_picture = mLastReference->renderBuffer.surface; + if (mLastReference != NULL) { + picParam->forward_reference_picture = mLastReference->renderBuffer.surface; + } else { + picParam->forward_reference_picture = VA_INVALID_SURFACE; + } picParam->backward_reference_picture = VA_INVALID_SURFACE; break; case MP4_VOP_TYPE_B: diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h index 79e5b99..300b583 100644 --- a/videodecoder/VideoDecoderMPEG4.h +++ b/videodecoder/VideoDecoderMPEG4.h @@ -64,6 +64,7 @@ private: bool mExpectingNVOP; // indicate if future n-vop is a placeholder of a packed frame bool mSendIQMatrixBuf; // indicate if iq_matrix_buffer is sent to driver int32_t mLastVOPCodingType; + bool mIsSyncFrame; // indicate if it is SyncFrame in container }; -- cgit v1.2.3 From 15b84426eae21f4c5e31746a7deac767c85ed7f0 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Tue, 27 Dec 2011 06:51:55 -0500 Subject: mix_vbp: Porting patches from R2 to R3 BZ: 18443 --------------- patches history --------------- commit b649c693a0ee8cde9377f64f3874cce9063e0ae4 Author: Andy Qiu Date: Mon Nov 14 07:54:33 2011 -0800 Extended video middleware to support dynamic resolution change. BZ 14251 vbp_parser only reports new sequence when there is change in SPS or PPS id. For some contents, SPS and PPS stays the same but actual resolution is changed. Change vbp parser to compare current resolution with the previous resolution to detect if there is a new sequence. Change-Id: Iafae7f0e0610df98edc77101b9a9df3341beed47 Signed-off-by: Andy Qiu Reviewed-on: http://android.intel.com:8080/24045 Reviewed-by: Chen, Weian Reviewed-by: Fang, Yanlong Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot commit 74c19eb381782be1e04684efb4856c77b4fa12b2 Author: Richard Tang Date: Thu Nov 3 17:58:56 2011 +0800 mix_vbp: viddec_parse_sc optimization BZ: 5661 viddec_parse_sc function is to parse SC code 0x00 0x00 0x01 from buffer. And it will also change the 32-bits algorithm to 128-bits algorithm. This patch will change algorithm to handle little endian directly. Change-Id: I3787b9c094159e8c8e671737ebb180746ff0b0bd Signed-off-by: Richard Tang Reviewed-on: http://android.intel.com:8080/21889 Reviewed-by: Qiu, Junhai Reviewed-by: Chen, Weian Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot Change-Id: I0cfa84bd28ca8e472f8de97018ea8ff3bea8710e Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/30000 Reviewed-by: Tang, Richard Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 52 +++++++------- mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c | 98 ++++++++++++++++++++++++++- 2 files changed, 118 insertions(+), 32 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index eae56a9..9ed4285 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -49,12 +49,6 @@ struct vbp_h264_parser_private_t /* indicate if stream is length prefixed */ int length_prefix_verified; - /* active sequence parameter set id */ - uint8 seq_parameter_set_id; - - /* active picture parameter set id */ - uint8 pic_parameter_set_id; - H264_BS_PATTERN bitstream_pattern; }; @@ -273,11 +267,6 @@ uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; - /* range from 0 to 31 inclusive */ - parser_private->seq_parameter_set_id = 0xff; - - /* range from 0 to 255 inclusive */ - parser_private->pic_parameter_set_id = 0xff; return VBP_OK; cleanup: @@ -832,8 +821,16 @@ static inline void vbp_set_scaling_list_h264( static void vbp_set_codec_data_h264( struct h264_viddec_parser *parser, - vbp_codec_data_h264* codec_data) + vbp_data_h264 *query_data) { + vbp_codec_data_h264* codec_data = query_data->codec_data; + + /* The following variables are used to detect if there is new SPS or PPS */ + uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id; + uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id; + int frame_width = codec_data->frame_width; + int frame_height = codec_data->frame_height; + /* parameter id */ codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; @@ -878,7 +875,7 @@ static void vbp_set_codec_data_h264( codec_data->crop_top = 0; codec_data->crop_bottom = 0; if(parser->info.active_SPS.sps_disp.frame_cropping_flag) { - int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; + int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; int ChromaArrayType = 0; if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) { if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) { @@ -903,7 +900,7 @@ static void vbp_set_codec_data_h264( } codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; - codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1; + codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1; codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1; } @@ -974,6 +971,17 @@ static void vbp_set_codec_data_h264( codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + + /* udpate sps and pps status */ + query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; + query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; + if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) + { + query_data->new_sps = 1; + query_data->new_pps = 1; + } } @@ -1698,7 +1706,7 @@ uint32 vbp_populate_query_data_h264(vbp_context *pcontext) query_data = (vbp_data_h264 *)pcontext->query_data; private = (struct vbp_h264_parser_private_t *)pcontext->parser_private; - vbp_set_codec_data_h264(parser, query_data->codec_data); + vbp_set_codec_data_h264(parser, query_data); /* buffer number */ query_data->buf_number = buffer_counter; @@ -1721,20 +1729,6 @@ uint32 vbp_populate_query_data_h264(vbp_context *pcontext) vbp_add_pic_data_h264(pcontext, 0); } - query_data->new_pps = 0; - query_data->new_sps = 0; - if (private->seq_parameter_set_id != 0xff) - { - query_data->new_pps = (private->pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; - query_data->new_sps = (private->seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; - } - - private->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; - private->seq_parameter_set_id = parser->info.active_PPS.seq_parameter_set_id; - - query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; - query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; - return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c index 6f5aae0..510349c 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c @@ -1,6 +1,100 @@ #include "viddec_pm_parse.h" #include "viddec_fw_debug.h" +#ifndef MFDBIGENDIAN +uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) +{ + uint8_t *ptr; + uint32_t data_left=0, phase = 0, ret = 0; + uint32_t single_byte_table[3][2] = {{1, 0}, {2, 0}, {2, 3}}; + viddec_sc_parse_cubby_cxt_t *cxt; + /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. + Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. + if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern + we are looking for. Its incremented to 4 once we see a byte after this pattern */ + cxt = ( viddec_sc_parse_cubby_cxt_t *)in; + data_left = cxt->size; + ptr = cxt->buf; + phase = cxt->phase; + cxt->sc_end_pos = -1; + pcxt=pcxt; + + /* parse until there is more data and start code not found */ + while ((data_left > 0) && (phase < 3)) + { + /* Check if we are 16 bytes aligned & phase=0 & more than 16 bytes left, + if thats the case we can check work at a time instead of byte */ + + if (((((uint32_t)ptr) & 0xF) == 0) && (phase == 0) && (data_left > 0xF)) + { + // 15 14 13 12 11 10 09 08 07 06 05 04 03 02 01 00 -- check 16 bytes at one time + // 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? -- if no 00 at byte position: 15,13,11,09,07,05,03,01 + // it is impossible to have 0x010000 at these 16 bytes. + // so we cound drop 16 bytes one time (increase ptr, decrease data_left and keep phase = 0) + __asm__( + //Data input + "movl %1, %%ecx\n\t" //ptr-->ecx + "movl %0, %%eax\n\t" //data_left-->eax + + //Main compare loop + "MATCH_8_ZERO:\n\t" + "pxor %%xmm0,%%xmm0\n\t" //0 --> xmm0 + "pcmpeqb (%%ecx),%%xmm0\n\t" //uint128_data[ptr] eq xmm0 --> xmm0 , For each byte do calculation, (byte == 0x00)?0xFF:0x00 + "pmovmskb %%xmm0, %%edx\n\t" //xmm0(128)-->edx(32), edx[0]=xmm0[7], edx[1]=xmm0[15], ... , edx[15]=xmm0[127], edx[31-16]=0x0000 + "test $0xAAAA, %%edx\n\t" //edx& 1010 1010 1010 1010b + "jnz DATA_RET\n\t" //Not equal to zero means that at least one byte is 0x00. + + "PREPARE_NEXT_MATCH:\n\t" + "add $0x10, %%ecx\n\t" //16 + ecx --> ecx + "sub $0x10, %%eax\n\t" //eax-16 --> eax + "cmp $0x10, %%eax\n\t" //eax >= 16? + "jge MATCH_8_ZERO\n\t" //search next 16 bytes + + "DATA_RET:\n\t" + "movl %%ecx, %1\n\t" //ecx --> ptr + "movl %%eax, %0\n\t" //eax --> data_left + : "+m"(data_left), "+m"(ptr) //data_left --> eax, ptr -> ecx + : + :"eax", "ecx", "edx", "xmm0" + ); + + if (data_left <= 0) + { + break; + } + } + + //check byte one by one + // (*ptr) 0 1 >=2 + // phase=0 1 0 0 + // phase=1 2 0 0 + // phase=2 2 3 0 + if (*ptr >= 2) + { + phase = 0; + } + else + { + phase = single_byte_table[phase][*ptr]; + } + ptr ++; + data_left --; + } + if ((data_left > 0) && (phase == 3)) + { + viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; + cxt->sc_end_pos = cxt->size - data_left; + state->next_sc = cxt->buf[cxt->sc_end_pos]; + state->second_scprfx_length = 3; + phase++; + ret = 1; + } + cxt->phase = phase; + /* Return SC found only if phase is 4, else always success */ + return ret; +} + +#else #define FIRST_STARTCODE_BYTE 0x00 #define SECOND_STARTCODE_BYTE 0x00 #define THIRD_STARTCODE_BYTE 0x01 @@ -45,9 +139,6 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) char mask1 = 0, mask2=0; data = *((uint32_t *)ptr); -#ifndef MFDBIGENDIAN - data = SWAP_WORD(data); -#endif mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need @@ -125,3 +216,4 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) /* Return SC found only if phase is 4, else always success */ return ret; } +#endif -- cgit v1.2.3 From 8bec736c285115754c8cd91629a8c3522f67465d Mon Sep 17 00:00:00 2001 From: Jason Hu Date: Fri, 30 Dec 2011 20:51:56 +0800 Subject: Pass down NativeWindow BZ: 15626 Change-Id: I78f69aa13367c2e59a20ef565ee3a6b7d192b9c5 Signed-off-by: Jason Hu Reviewed-on: http://android.intel.com:8080/30227 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 1 + videodecoder/VideoDecoderDefs.h | 1 + 2 files changed, 2 insertions(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 0f7f4ae..15df3ca 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -709,6 +709,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { mVAExternalMemoryBuffers->count = mNumSurfaces; mVAExternalMemoryBuffers->luma_stride= mConfigBuffer.graphicBufferStride; mVAExternalMemoryBuffers->pixel_format = mConfigBuffer.graphicBufferColorFormat; + mVAExternalMemoryBuffers->native_window = mConfigBuffer.nativeWindow; mVAExternalMemoryBuffers->width = mVideoFormatInfo.width; mVAExternalMemoryBuffers->height = mVideoFormatInfo.height; mVAExternalMemoryBuffers->type = VAExternalMemoryAndroidGrallocBuffer; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 92a5668..0e9949e 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -116,6 +116,7 @@ struct VideoConfigBuffer { uint32_t graphicBufferStride; uint32_t graphicBufferColorFormat; VideoFormatSpecificData *ext; + void* nativeWindow; }; struct VideoRenderBuffer { -- cgit v1.2.3 From fa6d499f8dd18fc96153088f5597f17871126e60 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Sun, 1 Jan 2012 21:15:02 +0800 Subject: mix_vbp: Porting patch#30260 from R2 to R3 BZ: 20200 --------------- patches history --------------- commit ac65b13110c4dea833a8c2ce5e1874fca212cdff Author: Dan Liang Date: Sun Jan 1 21:15:02 2012 +0800 mix_vbp: fix vbp parser issue when mpeg-4 resync marker is embedded in 00 00 03 byte sequence pattern. BZ: 18166 mpeg-4 spec doesn't support the mechanism of emulation prevention byte insertion, so no need to remove 0x03 from the bitstream. Otherwise the resync marker may not be found and cause slice data loss. Change-Id: I2b6314ce8ab2cf712461bed5a85c55898fd72426 Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/30260 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot Change-Id: I4ad93d02428608e7f4ff481e350b6f4066bf0642 Reviewed-on: http://android.intel.com:8080/32268 Reviewed-by: Liang, Dan Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 4 ++-- mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index ca1d2e1..249a9f8 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -253,8 +253,8 @@ uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) query_data->number_picture_data= 0; query_data->number_pictures = 0; - // emulation prevention byte is always present - cxt->getbits.is_emul_reqd = 1; + // emulation prevention byte is not needed + cxt->getbits.is_emul_reqd = 0; cxt->list.num_items = 0; cxt->list.data[0].stpos = 0; diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c index 693741f..4130234 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -155,7 +155,7 @@ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t while (valid_bytes_read < num_bytes) { cur_byte = bstream->buf[bstream->buf_index + *act_bytes]; - if ((cur_byte == 0x3) &&(*phase == 2)) + if (emul_reqd && (cur_byte == 0x3) &&(*phase == 2)) {/* skip emulation byte. we update the phase only if emulation prevention is enabled */ *phase = 0; } -- cgit v1.2.3 From cb7c4a02023784717a3b2746ff2642aced76c28d Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Mon, 30 Jan 2012 11:08:02 -0500 Subject: libmix: enable gfx handle buffer sharing mode in encoder library BZ: 21566 enable gfx handle buffer sharing mode, specific for WiDi ext video mode so far Signed-off-by: Weian Chen Change-Id: I809cddb17a57c6016eee8e3e2b602a0bf431a9c7 Reviewed-on: http://android.intel.com:8080/33189 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 146 +++++++++++++++++++++++++++++++++----- videoencoder/VideoEncoderBase.h | 4 +- videoencoder/VideoEncoderDef.h | 10 +++ 3 files changed, 141 insertions(+), 19 deletions(-) mode change 100644 => 100755 videoencoder/VideoEncoderBase.cpp diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp old mode 100644 new mode 100755 index 7e4a09f..2f45064 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -63,6 +63,7 @@ VideoEncoderBase::VideoEncoderBase() ,mBufferMode(BUFFER_SHARING_NONE) ,mUpstreamBufferList(NULL) ,mUpstreamBufferCnt(0) + ,mBufAttrib(NULL) ,mForceKeyFrame(false) ,mNewHeader(false) ,mFirstFrame (true) @@ -197,7 +198,9 @@ Encode_Status VideoEncoderBase::start() { switch (mBufferMode) { case BUFFER_SHARING_CI: case BUFFER_SHARING_V4L2: - case BUFFER_SHARING_SURFACE: { + case BUFFER_SHARING_SURFACE: + case BUFFER_SHARING_GFXHANDLE: + { mSharedSurfacesCnt = mUpstreamBufferCnt; normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE; @@ -214,12 +217,15 @@ Encode_Status VideoEncoderBase::start() { ret = ENCODE_FAIL; goto CLEAN_UP; } + break; } - break; + default: + { mBufferMode = BUFFER_SHARING_NONE; normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE; break; + } } } else if (mReqSurfacesCnt == 1) { // TODO: Un-normal case, @@ -262,14 +268,18 @@ Encode_Status VideoEncoderBase::start() { case BUFFER_SHARING_CI: ret = surfaceMappingForCIFrameList(); CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForCIFrameList"); - break; + break; case BUFFER_SHARING_V4L2: // To be develped - break; + break; case BUFFER_SHARING_SURFACE: ret = surfaceMappingForSurfaceList(); CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForSurfaceList"); - break; + break; + case BUFFER_SHARING_GFXHANDLE: + ret = surfaceMappingForGfxHandle(); + CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForGfxHandle"); + break; case BUFFER_SHARING_NONE: break; case BUFFER_SHARING_USRPTR: { @@ -282,7 +292,7 @@ Encode_Status VideoEncoderBase::start() { index ++; } } - break; + break; default: break; } @@ -719,6 +729,11 @@ Encode_Status VideoEncoderBase::stop() { mUpstreamBufferList = NULL; } + if (mBufAttrib) { + delete mBufAttrib; + mBufAttrib = NULL; + } + // It is possible that above pointers have been allocated // before we set mInitialized to true if (!mInitialized) { @@ -990,8 +1005,7 @@ Encode_Status VideoEncoderBase::setParameters( return ENCODE_INVALID_PARAMS; } - ret = setUpstreamBuffer( - upStreamBuffer->bufferMode, upStreamBuffer->bufList, upStreamBuffer->bufCnt, (VADisplay)upStreamBuffer->display); + ret = setUpstreamBuffer(upStreamBuffer); break; } @@ -1478,27 +1492,41 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( } -Encode_Status VideoEncoderBase::setUpstreamBuffer( - VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt, VADisplay display) { +Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) { - CHECK_NULL_RETURN_IFFAIL(bufList); - if (bufCnt == 0) { + CHECK_NULL_RETURN_IFFAIL(upStreamBuffer); + if (upStreamBuffer->bufCnt == 0) { LOG_E("bufCnt == 0\n"); return ENCODE_FAIL; } if (mUpstreamBufferList) delete [] mUpstreamBufferList; + if (mBufAttrib) delete mBufAttrib; + + mUpstreamBufferCnt = upStreamBuffer->bufCnt; + mVADecoderDisplay = upStreamBuffer->display; + mBufferMode = upStreamBuffer->bufferMode; + mBufAttrib = new ExternalBufferAttrib; + if (!mBufAttrib) { + LOG_E ("mBufAttrib NULL\n"); + return ENCODE_NO_MEMORY; + } + + if (upStreamBuffer->bufAttrib) { + mBufAttrib->format = upStreamBuffer->bufAttrib->format; + mBufAttrib->lumaStride = upStreamBuffer->bufAttrib->lumaStride; + } else { + LOG_E ("Buffer Attrib doesn't set by client, return error"); + return ENCODE_INVALID_PARAMS; + } - mUpstreamBufferCnt = bufCnt; - mVADecoderDisplay = display; - mBufferMode = bufferMode; - mUpstreamBufferList = new uint32_t [bufCnt]; + mUpstreamBufferList = new uint32_t [upStreamBuffer->bufCnt]; if (!mUpstreamBufferList) { LOG_E ("mUpstreamBufferList NULL\n"); return ENCODE_NO_MEMORY; } - memcpy(mUpstreamBufferList, bufList, bufCnt * sizeof (uint32_t)); + memcpy(mUpstreamBufferList, upStreamBuffer->bufList, upStreamBuffer->bufCnt * sizeof (uint32_t)); return ENCODE_SUCCESS; } @@ -1578,6 +1606,87 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { return ret; } +Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle() { + + uint32_t index; + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + + VASurfaceAttrib * vaSurfaceAttrib = new VASurfaceAttrib; + if (vaSurfaceAttrib == NULL) { + LOG_E("Failed to allocate VASurfaceAttrib\n"); + return ENCODE_NO_MEMORY; + } + + VAExternalMemoryBuffers *vaExternalMemoryBuffers = new VAExternalMemoryBuffers; + if (vaExternalMemoryBuffers == NULL) { + LOG_E("Failed to allocate VAExternalMemoryBuffers\n"); + return ENCODE_NO_MEMORY; + } + + vaExternalMemoryBuffers->buffers = new uint32_t[mSharedSurfacesCnt]; + if (vaExternalMemoryBuffers->buffers == NULL) { + LOG_E("Failed to allocate buffers for VAExternalMemoryBuffers\n"); + return ENCODE_NO_MEMORY; + } + + LOG_I("mSharedSurfacesCnt = %d\n", mSharedSurfacesCnt); + LOG_I("lumaStride = %d\n", mBufAttrib->lumaStride); + LOG_I("format = 0x%08x\n", mBufAttrib->format); + LOG_I("width = %d\n", mComParams.resolution.width); + LOG_I("height = %d\n", mComParams.resolution.height); + + vaExternalMemoryBuffers->count = mSharedSurfacesCnt; + vaExternalMemoryBuffers->luma_stride = mBufAttrib->lumaStride; + vaExternalMemoryBuffers->pixel_format = mBufAttrib->format; + vaExternalMemoryBuffers->width = mComParams.resolution.width; + vaExternalMemoryBuffers->height = mComParams.resolution.height; + vaExternalMemoryBuffers->type = VAExternalMemoryAndroidGrallocBuffer; + for(index = 0; index < mSharedSurfacesCnt; index++) { + vaExternalMemoryBuffers->buffers[index] = (uint32_t) mUpstreamBufferList[index]; + LOG_I("NativeHandleList[%d] = 0x%08x", index, mUpstreamBufferList[index]); + } + vaSurfaceAttrib->flags = VA_SURFACE_ATTRIB_SETTABLE; + vaSurfaceAttrib->type = VASurfaceAttribNativeHandle; + vaSurfaceAttrib->value.type = VAGenericValueTypePointer; + vaSurfaceAttrib->value.value.p_val = (void *)vaExternalMemoryBuffers; + vaStatus = vaCreateSurfaces( + mVADisplay, + mComParams.resolution.width, + mComParams.resolution.height, + VA_RT_FORMAT_YUV420, + mSharedSurfacesCnt, + mSharedSurfaces, + vaSurfaceAttrib, + 1); + + CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); + LOG_V("Successfully create surfaces from native hanle"); + + for(index = 0; index < mSharedSurfacesCnt; index++) { + mSurfaces[index] = mSharedSurfaces[index]; + ret = generateVideoBufferAndAttachToList(index, NULL); + LOG_I("mSurfaces[%d] = %08x", index, mSurfaces[index]); + CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); + } + if(vaExternalMemoryBuffers) { + if(vaExternalMemoryBuffers->buffers) { + delete [] vaExternalMemoryBuffers->buffers; + vaExternalMemoryBuffers->buffers= NULL; + } + delete vaExternalMemoryBuffers; + vaExternalMemoryBuffers = NULL; + } + + if(vaSurfaceAttrib) { + delete vaSurfaceAttrib; + vaSurfaceAttrib = NULL; + } + + LOG_V("surfaceMappingForGfxHandle: Done"); + return ret; +} + Encode_Status VideoEncoderBase::surfaceMappingForCIFrameList() { uint32_t index; VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -1622,7 +1731,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } - } else if (mBufferMode == BUFFER_SHARING_SURFACE) { + } else if (mBufferMode == BUFFER_SHARING_SURFACE || mBufferMode == BUFFER_SHARING_GFXHANDLE) { bufIndex = (uint32_t) -1; data = *(uint32_t*)inBuffer->data; @@ -1672,6 +1781,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { case BUFFER_SHARING_CI: case BUFFER_SHARING_SURFACE: + case BUFFER_SHARING_GFXHANDLE: case BUFFER_SHARING_USRPTR: { if (mRefFrame== NULL) { diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 6d68ff7..a03f0dd 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -55,11 +55,12 @@ protected: private: void setDefaultParams(void); - Encode_Status setUpstreamBuffer(VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt, VADisplay display); + Encode_Status setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer); Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr); Encode_Status generateVideoBufferAndAttachToList(uint32_t index, uint8_t *usrptr); Encode_Status surfaceMappingForSurfaceList(); + Encode_Status surfaceMappingForGfxHandle(); Encode_Status surfaceMappingForCIFrameList(); VideoEncSurfaceBuffer *appendVideoSurfaceBuffer( @@ -96,6 +97,7 @@ protected: VideoBufferSharingMode mBufferMode; uint32_t *mUpstreamBufferList; uint32_t mUpstreamBufferCnt; + ExternalBufferAttrib *mBufAttrib; bool mForceKeyFrame; bool mNewHeader; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 6100789..91657d0 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -97,6 +97,7 @@ enum VideoBufferSharingMode { BUFFER_SHARING_V4L2 = 4, BUFFER_SHARING_SURFACE = 8, BUFFER_SHARING_USRPTR = 16, + BUFFER_SHARING_GFXHANDLE = 32, BUFFER_LAST }; @@ -211,6 +212,14 @@ struct SliceNum { } }; +typedef struct { + uint32_t width; + uint32_t height; + uint32_t lumaStride; + uint32_t chromStride; + uint32_t format; +} ExternalBufferAttrib; + enum VideoParamConfigType { VideoParamsTypeStartUnused = 0x01000000, VideoParamsTypeCommon, @@ -328,6 +337,7 @@ struct VideoParamsUpstreamBuffer : VideoParamConfigSet { VideoBufferSharingMode bufferMode; uint32_t *bufList; uint32_t bufCnt; + ExternalBufferAttrib *bufAttrib; void *display; }; -- cgit v1.2.3 From 3e43556d34927ce2d5f33337928eb2f9557c324a Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Wed, 1 Feb 2012 08:37:44 -0500 Subject: [patch porting R2->R3] for bug 17183 - video screen shaking (libmix part) BZ: 21825 solution reworked as code base changed Signed-off-by: Weian Chen Change-Id: Ie53c7c639d9a152c487a8f0e8ad1d707bf5d5cf3 Reviewed-on: http://android.intel.com:8080/33455 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.h | 2 +- videodecoder/VideoDecoderBase.cpp | 4 ++-- videodecoder/VideoDecoderDefs.h | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index c3a1fe5..1794e03 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -65,7 +65,7 @@ private: }; enum { - AVC_EXTRA_SURFACE_NUMBER = 6, + AVC_EXTRA_SURFACE_NUMBER = 11, // maximum DPB (Decoded Picture Buffer) size MAX_REF_NUMBER = 16, DPB_SIZE = 17, // DPB_SIZE = MAX_REF_NUMBER + 1, diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 15df3ca..1cf8e25 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -34,10 +34,10 @@ #define ANDROID_DISPLAY_HANDLE 0x18C34078 // TODO: check what is the best number. Must be at least 2 to support one backward reference frame. -// Currently set to 3 to support 2 backward reference frames. This value is used for AVC frame reordering only. +// Currently set to 8 to support 7 backward reference frames. This value is used for AVC frame reordering only. // e.g: // POC: 4P, 8P, 10P, 6B and mNextOutputPOC = 5 -#define OUTPUT_WINDOW_SIZE 3 +#define OUTPUT_WINDOW_SIZE 8 VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 0e9949e..f4cea28 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -102,7 +102,7 @@ struct VideoDecodeBuffer { }; -#define MAX_GRAPHIC_NUM 16+1+6 // max DPB +1+AVC_EXTRA_NUM +#define MAX_GRAPHIC_NUM 16 + 1 + 11 // max DPB + 1 + AVC_EXTRA_NUM struct VideoConfigBuffer { uint8_t *data; -- cgit v1.2.3 From 2061f2a5428d577e9e422b7d6f7a27477332f871 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Fri, 3 Feb 2012 10:52:08 -0500 Subject: libmix: fix the buffer status messy issue BZ: 20262 18785 when getting new sps/pps, but the resolution is not changed, we won't sent out format_changed, it is a workaround here Signed-off-by: Weian Chen Change-Id: I670b329df58c5fa494a9e5bb69baee92798dbc62 Reviewed-on: http://android.intel.com:8080/33841 Reviewed-by: Wang, Yi A Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 6613935..dbb13d4 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -673,6 +673,7 @@ Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { if (width == mVideoFormatInfo.width && height == mVideoFormatInfo.height) { ITRACE("New video sequence with the same resolution."); + mSizeChanged = false; } else { WTRACE("Video size changed from %d x %d to %d x %d.", width, height, mVideoFormatInfo.width, mVideoFormatInfo.height); -- cgit v1.2.3 From fea762d9c60f79378c3cb7b5777fb3af160cc6cf Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Wed, 15 Feb 2012 04:01:27 -0500 Subject: libmix: fix the WMV skip frame handling issue BZ: 22714 we do nothing for skip frame as the framework will render last frame by natively Change-Id: I54a0c39b82511ae865e65acb99ed79546fb3c0a7 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/35355 Reviewed-by: Guo, Nana N Reviewed-by: Jiang, Fei Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderWMV.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index f3d5fbc..b32edf4 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -105,6 +105,11 @@ Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v } if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) { + + // Do nothing for skip frame as the last frame will be rendered agian by natively + // No needs to handle reference frame neither + return DECODE_SUCCESS; +#if 0 //use the last P or I frame surface for skipped frame and treat it as P frame if (mLastReference == NULL) { // TODO: handle this case @@ -124,6 +129,7 @@ Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v // let outputSurfaceBuffer handle "asReference" for VC1 status = outputSurfaceBuffer(); return status; +#endif } status = acquireSurfaceBuffer(); -- cgit v1.2.3 From f49b5958f5c4402cb768bede0860b58cd517909b Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Mon, 20 Feb 2012 04:51:44 -0500 Subject: libmix: fix corruption issue for MPEG-4:2 clips BZ: 23492 Fix the corruption issue for MPEG-4:2 for by handling skip frame correctly Signed-off-by: Weian Chen Change-Id: I64a8caebbca9ad8b38b9eb4d29de5b42083597b3 Reviewed-on: http://android.intel.com:8080/35821 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderMPEG4.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 2abcf37..c77f24c 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -183,6 +183,9 @@ Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { mExpectingNVOP = false; } else { + // Do nothing for skip frame as the last frame will be rendered agian by natively + // No needs to handle reference frame neither +#if 0 // this is skipped frame, use the last reference frame as output status = acquireSurfaceBuffer(); CHECK_STATUS("acquireSurfaceBuffer"); @@ -195,6 +198,7 @@ Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { mAcquiredBuffer->referenceFrame = true; status = outputSurfaceBuffer(); CHECK_STATUS("outputSurfaceBuffer"); +#endif } if (data->number_picture_data > 1) { -- cgit v1.2.3 From 8e2885633f3220923c7e181b2397daa6e537f34d Mon Sep 17 00:00:00 2001 From: xiao Date: Tue, 14 Feb 2012 11:29:30 +0800 Subject: VideoDecoderMPEG4: Support multiple frame in decode buffer BZ: 21585 Add mix handling for the multiple frame in decode buffer. return next frame timestamp,offset and DECODE_MULTIPLE_FRAME to OMX IL for handling. Signed-off-by: xiao Change-Id: Ic64e5c24cd5f72ab2a7007f8ab274af2993f8cb3 Reviewed-on: http://android.intel.com:8080/35189 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 1 + mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 7 +++ videodecoder/VideoDecoderBase.cpp | 1 + videodecoder/VideoDecoderDefs.h | 4 ++ videodecoder/VideoDecoderMPEG4.cpp | 81 +++++++++++++++++++-------- videodecoder/VideoDecoderMPEG4.h | 4 +- 6 files changed, 72 insertions(+), 26 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 38e2a05..7037fd0 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -113,6 +113,7 @@ typedef struct _vbp_data_mp42 uint32 number_pictures; vbp_picture_data_mp42 *picture_data; + uint32 frameSize; // fist frame size in buffer. Use for multiple frame in a buffer } vbp_data_mp42; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 249a9f8..ccc0ab5 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -632,6 +632,13 @@ void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) { + if(list_index == 0) { + // for the fist list item + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + query_data->frameSize = parent->list.total_bytes; //record the first item frame size + } + vbp_fill_codec_data(pcontext); vbp_fill_picture_param(pcontext, 1); vbp_fill_iq_matrix_buffer(pcontext); diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 1cf8e25..341a092 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -610,6 +610,7 @@ Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { if (dropFrame) { // we are asked to drop this decoded picture + LOGW("Frame dropped"); releaseSurfaceBuffer(); goto exit; } diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index f4cea28..561a1aa 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -99,6 +99,9 @@ struct VideoDecodeBuffer { int64_t timeStamp; uint32_t flag; VideoFormatSpecificData *ext; + bool hasNext; // for multiple frame in a buffer + int64_t nextTimeStamp; // next frame timestamp + int32_t offSet; // next frame offset }; @@ -181,6 +184,7 @@ typedef enum { DECODE_SUCCESS = 1, DECODE_FORMAT_CHANGE = 2, DECODE_FRAME_DROPPED = 3, + DECODE_MULTIPLE_FRAME = 4, } VIDEO_DECODE_STATUS; typedef int32_t Decode_Status; diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index c77f24c..4bb73f4 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -125,14 +125,14 @@ Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data CHECK_STATUS("endDecodingFrame"); // start decoding a new frame - status = beginDecodingFrame(data); - if (status != DECODE_SUCCESS) { + status = beginDecodingFrame(data, buffer); + if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) { endDecodingFrame(true); } CHECK_STATUS("beginDecodingFrame"); } else { - status = continueDecodingFrame(data); - if (status != DECODE_SUCCESS) { + status = continueDecodingFrame(data, buffer); + if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) { endDecodingFrame(true); } CHECK_STATUS("continueDecodingFrame"); @@ -147,7 +147,8 @@ Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data return DECODE_SUCCESS; } -Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { +Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) { + Decode_Status status = DECODE_SUCCESS; vbp_picture_data_mp42 *picData = data->picture_data; VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param); @@ -223,14 +224,15 @@ Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { } } // all sanity checks pass, continue decoding through continueDecodingFrame - status = continueDecodingFrame(data); + status = continueDecodingFrame(data, buffer); } return status; } -Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { +Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) { Decode_Status status = DECODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; + bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; /* Packed Frame Assumption: @@ -271,28 +273,59 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { // TODO: handle this case } if (mDecodingFrame) { - // this indicates the start of a new frame in the packed frame - // Update timestamp for P frame in the packed frame as timestamp here is for the B frame! - if (picParam->vop_time_increment_resolution) - { - uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment + - picParam->vop_time_increment_resolution; - increment = increment % picParam->vop_time_increment_resolution; - // convert to nano-second - // TODO: unit of time stamp varies on different frame work - increment = increment * 1e9 / picParam->vop_time_increment_resolution; - mAcquiredBuffer->renderBuffer.timeStamp += increment; + if (codingType == MP4_VOP_TYPE_B){ + // this indicates the start of a new frame in the packed frame + // Update timestamp for P frame in the packed frame as timestamp here is for the B frame! + if (picParam->vop_time_increment_resolution){ + uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment + + picParam->vop_time_increment_resolution; + increment = increment % picParam->vop_time_increment_resolution; + // convert to micro-second + // TODO: unit of time stamp varies on different frame work + increment = increment * 1e6 / picParam->vop_time_increment_resolution; + mAcquiredBuffer->renderBuffer.timeStamp += increment; + if (useGraphicBuffer){ + buffer->nextTimeStamp = mCurrentPTS; + mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp; + } + } + } else { + // this indicates the start of a new frame in the packed frame. no B frame int the packet + // Update the timestamp according the increment + if (picParam->vop_time_increment_resolution){ + int64_t increment = picData->vop_time_increment - mLastVOPTimeIncrement + picParam->vop_time_increment_resolution; + increment = increment % picParam->vop_time_increment_resolution; + //convert to micro-second + increment = increment * 1e6 / picParam->vop_time_increment_resolution; + if (useGraphicBuffer) { + buffer->nextTimeStamp = mCurrentPTS + increment; + } + else { + mCurrentPTS += increment; + } + + } else { + if (useGraphicBuffer) { + buffer->nextTimeStamp = mCurrentPTS + 30000; + } + else { + mCurrentPTS += 30000; + } + } } endDecodingFrame(false); mExpectingNVOP = true; + if (codingType != MP4_VOP_TYPE_B) { + mExpectingNVOP = false; + } + if (useGraphicBuffer) { + buffer->hasNext = true; + buffer->offSet = data->frameSize; + VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",data->frameSize,buffer->nextTimeStamp); + return DECODE_MULTIPLE_FRAME; + } } - if (mExpectingNVOP == true && codingType != MP4_VOP_TYPE_B) { - ETRACE("The second frame in the packed frame is not B frame."); - mExpectingNVOP = false; - // TODO: should be able to continue - return DECODE_FAIL; - } // acquire a new surface buffer status = acquireSurfaceBuffer(); CHECK_STATUS("acquireSurfaceBuffer"); diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h index 300b583..bfa8ca2 100644 --- a/videodecoder/VideoDecoderMPEG4.h +++ b/videodecoder/VideoDecoderMPEG4.h @@ -40,8 +40,8 @@ public: private: Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data); - Decode_Status beginDecodingFrame(vbp_data_mp42 *data); - Decode_Status continueDecodingFrame(vbp_data_mp42 *data); + Decode_Status beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer); + Decode_Status continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer); Decode_Status decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData); Decode_Status setReference(VAPictureParameterBufferMPEG4 *picParam); Decode_Status startVA(vbp_data_mp42 *data); -- cgit v1.2.3 From 7f3b1d7673a673506b1f7bf8f4d6ee55c9eec165 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Tue, 21 Feb 2012 11:32:27 -0500 Subject: libmix: code cleanup along with new API to support one Queue buffer management BZ: 24552 code cleanup along with new API to support one Queue buffer management (from Andy) Change-Id: I1424915b52da05e38a72f2c04adc88cdc988f14d Reviewed-on: http://android.intel.com:8080/36031 Reviewed-by: Chen, Weian Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 134 +++++++++++++++++------------------ videodecoder/VideoDecoderBase.h | 6 +- videodecoder/VideoDecoderInterface.h | 4 +- 3 files changed, 70 insertions(+), 74 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 341a092..bb7dce4 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -73,12 +73,12 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mNextOutputPOC(MINIMUM_POC), mParserType(type), mParserHandle(NULL), - initialized(false), - mSignalBufferSize(0){ + mInitialized(false), + mSignalBufferSize(0) { memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo)); memset(&mConfigBuffer, 0, sizeof(mConfigBuffer)); - for(int i =0 ; i < MAX_GRAPHIC_NUM ; i++) { + for (int i =0; i < MAX_GRAPHIC_NUM; i++) { mSignalBufferPre[i] = NULL; } pthread_mutex_init(&mLock, NULL); @@ -199,8 +199,8 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { if (mOutputHead == NULL) { mOutputTail = NULL; } - if(useGraphicBuffer ) { - vaSyncSurface(mVADisplay,outputByPos->renderBuffer.surface); + if (useGraphicBuffer ) { + vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); } return &(outputByPos->renderBuffer); } @@ -246,8 +246,8 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { mOutputTail = NULL; } } - if(useGraphicBuffer ) { - vaSyncSurface(mVADisplay,output->renderBuffer.surface); + if (useGraphicBuffer ) { + vaSyncSurface(mVADisplay, output->renderBuffer.surface); } //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); return &(output->renderBuffer); @@ -439,6 +439,30 @@ VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) { } #endif +bool VideoDecoderBase::checkBufferAvail(void) { + if (!mInitialized) { + if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) == 0) { + return true; + } + for (int i = 0; i < MAX_GRAPHIC_NUM; i++) { + if (mSignalBufferPre[i] != NULL) { + return true; + } + } + return false; + } + // check whether there is buffer available for decoding + // TODO: check frame being referenced for frame skipping + VideoSurfaceBuffer *buffer = NULL; + for (int32_t i = 0; i < mNumSurfaces; i++) { + buffer = mSurfaceBuffers + i; + if (buffer->asReferernce == false && buffer->renderBuffer.renderDone == true) { + return true; + } + } + return false; +} + Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) { if (mVAStarted == false) { return DECODE_FAIL; @@ -500,7 +524,7 @@ Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) { mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS; mAcquiredBuffer->renderBuffer.display = mVADisplay; mAcquiredBuffer->renderBuffer.flag = 0; - mAcquiredBuffer->renderBuffer.renderDone = true; + mAcquiredBuffer->renderBuffer.renderDone = false; mAcquiredBuffer->asReferernce = false; return DECODE_SUCCESS; @@ -632,7 +656,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { return DECODE_SUCCESS; } - if(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ + if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ numSurface = mConfigBuffer.surfaceNumber; } // TODO: validate profile @@ -642,7 +666,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) { if (numSurface < mConfigBuffer.surfaceNumber) { - LOGW("surface to allocated %d is less than minimum number required %d", + WTRACE("surface to allocated %d is less than minimum number required %d", numSurface, mConfigBuffer.surfaceNumber); numSurface = mConfigBuffer.surfaceNumber; } @@ -692,9 +716,9 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { int32_t format = VA_RT_FORMAT_YUV420; if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { format |= VA_RT_FORMAT_PROTECTED; - LOGW("Surface is protected."); + WTRACE("Surface is protected."); } - if(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { + if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { mVASurfaceAttrib = new VASurfaceAttrib; if (mVASurfaceAttrib == NULL) { return DECODE_MEMORY_FAIL; @@ -708,19 +732,19 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { return DECODE_MEMORY_FAIL; } mVAExternalMemoryBuffers->count = mNumSurfaces; - mVAExternalMemoryBuffers->luma_stride= mConfigBuffer.graphicBufferStride; + mVAExternalMemoryBuffers->luma_stride = mConfigBuffer.graphicBufferStride; mVAExternalMemoryBuffers->pixel_format = mConfigBuffer.graphicBufferColorFormat; mVAExternalMemoryBuffers->native_window = mConfigBuffer.nativeWindow; mVAExternalMemoryBuffers->width = mVideoFormatInfo.width; mVAExternalMemoryBuffers->height = mVideoFormatInfo.height; mVAExternalMemoryBuffers->type = VAExternalMemoryAndroidGrallocBuffer; - for(int i=0; i < mNumSurfaces; i++) { + for (int i = 0; i < mNumSurfaces; i++) { mVAExternalMemoryBuffers->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; } mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_SETTABLE; mVASurfaceAttrib->type = VASurfaceAttribNativeHandle; - mVASurfaceAttrib->value.type =VAGenericValueTypePointer; - mVASurfaceAttrib->value.value.p_val= (void *)mVAExternalMemoryBuffers; + mVASurfaceAttrib->value.type = VAGenericValueTypePointer; + mVASurfaceAttrib->value.value.p_val = (void *)mVAExternalMemoryBuffers; vaStatus = vaCreateSurfaces( mVADisplay, mVideoFormatInfo.width, @@ -797,16 +821,16 @@ Decode_Status VideoDecoderBase::terminateVA(void) { } - if(mVAExternalMemoryBuffers) { - if(mVAExternalMemoryBuffers->buffers) { + if (mVAExternalMemoryBuffers) { + if (mVAExternalMemoryBuffers->buffers) { free(mVAExternalMemoryBuffers->buffers); - mVAExternalMemoryBuffers->buffers= NULL; + mVAExternalMemoryBuffers->buffers = NULL; } delete mVAExternalMemoryBuffers; - mVAExternalMemoryBuffers = NULL; + mVAExternalMemoryBuffers = NULL; } - if(mVASurfaceAttrib) { + if (mVASurfaceAttrib) { delete mVASurfaceAttrib; mVASurfaceAttrib = NULL; } @@ -845,6 +869,7 @@ Decode_Status VideoDecoderBase::terminateVA(void) { } mVAStarted = false; + mInitialized = false; return DECODE_SUCCESS; } @@ -1022,9 +1047,9 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { void VideoDecoderBase::initSurfaceBuffer(bool reset) { bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; - if(useGraphicBuffer && reset){ + if (useGraphicBuffer && reset) { pthread_mutex_lock(&mLock); - } + } for (int32_t i = 0; i < mNumSurfaces; i++) { mSurfaceBuffers[i].renderBuffer.display = mVADisplay; mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer @@ -1040,13 +1065,13 @@ void VideoDecoderBase::initSurfaceBuffer(bool reset) { mSurfaceBuffers[i].mappedData = NULL; } if (useGraphicBuffer){ - if(reset){ + if (reset) { mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i]; mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false - for(int j =0; j < mSignalBufferSize; j++){ - if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle){ + for (int j = 0; j < mSignalBufferSize; j++) { + if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle) { mSurfaceBuffers[i].renderBuffer.renderDone = true; - VTRACE("initSurfaceBuffer set renderDone = true index=%d",i); + VTRACE("initSurfaceBuffer set renderDone = true index = %d", i); mSignalBufferPre[j] = NULL; break; } @@ -1057,40 +1082,38 @@ void VideoDecoderBase::initSurfaceBuffer(bool reset) { } } else { - mSurfaceBuffers[i].renderBuffer.graphicBufferHandle= NULL; + mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL; mSurfaceBuffers[i].renderBuffer.renderDone = true; } mSurfaceBuffers[i].renderBuffer.acquirePos = i; } - if(useGraphicBuffer && reset){ - initialized = true; + if (useGraphicBuffer && reset) { + mInitialized = true; mSignalBufferSize = 0; pthread_mutex_unlock(&mLock); } } -Decode_Status VideoDecoderBase::SignalRenderDoneFlag(void * graphichandler) { - +Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) { if (graphichandler == NULL) { return DECODE_SUCCESS; } pthread_mutex_lock(&mLock); int i = 0; - if(!initialized){ - mSignalBufferPre[mSignalBufferSize++] = graphichandler; - VTRACE("SignalRenderDoneFlag initialized = false graphichandler = %p, mSignalBufferSize=%d",graphichandler,mSignalBufferSize); - if(mSignalBufferSize > MAX_GRAPHIC_NUM) - return DECODE_INVALID_DATA; - } - else{ + if (!mInitialized) { + mSignalBufferPre[mSignalBufferSize++] = graphichandler; + VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize); + if (mSignalBufferSize > MAX_GRAPHIC_NUM) + return DECODE_INVALID_DATA; + } else { if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { return DECODE_SUCCESS; } for (i = 0; i < mNumSurfaces; i++) { - if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle== graphichandler) { - mSurfaceBuffers[i].renderBuffer.renderDone = true; - VTRACE("SignalRenderDoneFlag initialized = true index =%d",i); + if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) { + mSurfaceBuffers[i].renderBuffer.renderDone = true; + VTRACE("SignalRenderDoneFlag mInitialized = true index = %d", i); break; } } @@ -1101,31 +1124,4 @@ Decode_Status VideoDecoderBase::SignalRenderDoneFlag(void * graphichandler) { } -Decode_Status VideoDecoderBase::GetNativeBufferStatus(void * graphichandler, bool* used) { - bool inuse = false; - if(!initialized) { - *used == false; - return DECODE_NOT_STARTED; - } - - for (int32_t i = 0; i < mNumSurfaces; i++) { - if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) { - if (mSurfaceBuffers[i].asReferernce == true ||mSurfaceBuffers + i == mAcquiredBuffer ) { - inuse = true; - } - break; - } - } - VideoSurfaceBuffer *p = mOutputHead; - while (p!=NULL) { - if(p ->renderBuffer.graphicBufferHandle == graphichandler) { - inuse = true; - break; - } - p = p->next; - } - - *used = inuse; - return DECODE_SUCCESS; -} diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 8cf4579..5e09a14 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -50,9 +50,9 @@ public: //virtual Decode_Status decode(VideoDecodeBuffer *buffer); virtual void flush(void); virtual const VideoRenderBuffer* getOutput(bool draining = false); - virtual Decode_Status SignalRenderDoneFlag(void * graphichandler); - virtual Decode_Status GetNativeBufferStatus(void * graphichandler, bool* used); + virtual Decode_Status signalRenderDone(void * graphichandler); virtual const VideoFormatInfo* getFormatInfo(void); + virtual bool checkBufferAvail(); protected: // each acquireSurfaceBuffer must be followed by a corresponding outputSurfaceBuffer or releaseSurfaceBuffer. @@ -81,7 +81,7 @@ private: Decode_Status getRawDataFromSurface(void); void initSurfaceBuffer(bool reset); - bool initialized; + bool mInitialized; pthread_mutex_t mLock; diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h index 6b54deb..e3cf728 100644 --- a/videodecoder/VideoDecoderInterface.h +++ b/videodecoder/VideoDecoderInterface.h @@ -37,8 +37,8 @@ public: virtual Decode_Status decode(VideoDecodeBuffer *buffer) = 0; virtual const VideoRenderBuffer* getOutput(bool draining = false) = 0; virtual const VideoFormatInfo* getFormatInfo(void) = 0; - virtual Decode_Status SignalRenderDoneFlag(void * graphichandler) = 0; - virtual Decode_Status GetNativeBufferStatus(void * graphichandler, bool* used) = 0; + virtual Decode_Status signalRenderDone(void * graphichandler) = 0; + virtual bool checkBufferAvail() = 0; }; -- cgit v1.2.3 From c5d8d50c943a552170b5f96cc5084c1c020c024f Mon Sep 17 00:00:00 2001 From: nguo Date: Thu, 23 Feb 2012 17:06:41 +0800 Subject: libmix: Critical klocwork issues BZ: 23921 added code for double check Signed-off-by: nguo Change-Id: Ibc43039b29efe8d5c39ffc180da849d6d9387336 Reviewed-on: http://android.intel.com:8080/36376 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_common/src/j_slist.cpp | 11 ++++-- .../fw/codecs/h264/parser/h264parse_dpb.c | 23 +++++++----- .../fw/codecs/h264/parser/h264parse_sei.c | 12 ++++-- .../viddec_fw/fw/codecs/h264/parser/h264parse_sh.c | 43 ++++++++++++---------- .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 9 +++-- mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c | 11 ++++-- mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c | 11 ++++-- 7 files changed, 72 insertions(+), 48 deletions(-) diff --git a/mix_common/src/j_slist.cpp b/mix_common/src/j_slist.cpp index 26435d8..a794211 100644 --- a/mix_common/src/j_slist.cpp +++ b/mix_common/src/j_slist.cpp @@ -53,10 +53,13 @@ JSList* j_slist_remove(JSList *list, void* data) } if (traverse_item != NULL) { - assert(prev_item != NULL); // as 1st element is processed @ beginning - prev_item->next = traverse_item->next; - traverse_item->next = NULL; - free(traverse_item); + if (prev_item != NULL) { + assert(prev_item != NULL); // as 1st element is processed @ beginning + prev_item->next = traverse_item->next; + traverse_item->next = NULL; + free(traverse_item); + } + } return list; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 55047f1..704c180 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -1064,9 +1064,11 @@ static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value) lvp = lvp->next; } lvp->value = list_value; // force end matches - - // remove lvp from the list - lvp_prev->next = lvp->next; + if (lvp_prev != NULL) + { + // remove lvp from the list + lvp_prev->next = lvp->next; + } if (lvp==lp->end) lp->end = lvp_prev; // insert lvp in front of lp->entry @@ -2037,13 +2039,16 @@ void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb for (idx = 0; idx < temp; idx++) { idx2 = idx - removed_count; - h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]); - - if (p_dpb->active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) + if (idx2 < 16 && idx2 > 0) { - removed_count++; - h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]); - h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]); + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]); + + if (p_dpb->active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) + { + removed_count++; + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]); + } } } return; diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c index 43655dd..f70e64c 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c @@ -379,8 +379,10 @@ h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payloa { viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); - - wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + if (wi.user_data.size < 11) + { + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + } wi.user_data.size++; if (11 == wi.user_data.size) @@ -439,8 +441,10 @@ h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t pay { viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); - - wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + if (wi.user_data.size < 11) + { + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + } wi.user_data.size++; if (11 == wi.user_data.size) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c index 189e57b..9db8cee 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c @@ -787,33 +787,36 @@ h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_S { do { - SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + if (i < NUM_MMCO_OPERATIONS) { - SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); - } + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + } - if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) - { - SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); - } + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) + { + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); + } - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) - { - SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); - } + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) + { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); + } - if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) - { - SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); - } + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) + { + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); + } - if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) - { - pInfo->img.curr_has_mmco_5 = 1; + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) + { + pInfo->img.curr_has_mmco_5 = 1; + } } - if (i>NUM_MMCO_OPERATIONS) { + if (i >= NUM_MMCO_OPERATIONS) { return H264_STATUS_ERROR; } diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c index eaed197..7c7eaa8 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -181,9 +181,12 @@ uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t { if (warp_index < vol->sprite_info.no_of_sprite_warping_points) { - viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index); - viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]); - viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]); + if (warp_index < 4) + { + viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index); + viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]); + viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]); + } } else { diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c index d7fa5dd..f33961a 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c @@ -142,10 +142,13 @@ void viddec_pm_add_tags_to_pendinglist(viddec_pm_cxt_t *cxt, uint32_t ignore_fir } while ( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes)) - {/* walk through consumed buffers and buffer id's in pending list */ - pend->pending_tags[t_index] = list->sc_ibuf[index].id; - index++; - t_index++; + { + if (t_index < MAX_IBUFS_PER_SC) + { /* walk through consumed buffers and buffer id's in pending list */ + pend->pending_tags[t_index] = list->sc_ibuf[index].id; + index++; + t_index++; + } } if ( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes)) {/* If last item is partially consumed still add it to pending tags since tag association is based on start of ES buffer */ diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c index 1561449..1641c6c 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c @@ -200,10 +200,13 @@ void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint list->start_offset = byte_pos - list->data[items].stpos; while (items < list->num_items) { - es_buf = &(list->sc_ibuf[items]); - list->sc_ibuf[index] = *es_buf; - index++; - items++; + if (index < MAX_IBUFS_PER_SC) + { + es_buf = &(list->sc_ibuf[items]); + list->sc_ibuf[index] = *es_buf; + index++; + items++; + } } list->num_items = index; } -- cgit v1.2.3 From 8346d8f8f62d0d6980e912524300290d4820fb01 Mon Sep 17 00:00:00 2001 From: Miguel Verdu Date: Wed, 29 Feb 2012 14:36:20 +0200 Subject: libmix: decoder: check driver render status before acquiring output surfaces BZ: 25331 The video decoder checks for the rendering status before acquiring Normal renderDone flag is used to track the surface status from OMX client point of view. There are cases where Android rendering pipeline thinks a frame is free to be reused but that's not the case. The video driver knows better, that's why we check with it. One of those cases are Widi and HDMI extended video modes Change-Id: I89db1ed8ff6244bffe3bec3bcc6867937bd920de Signed-off-by: Miguel Verdu Reviewed-on: http://android.intel.com:8080/37262 Reviewed-by: Qiu, Junhai Reviewed-by: Tao, Tao Q Reviewed-by: Chen, Weian Tested-by: Sun, Hang L Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 27 +++++++++++++++++++++++++-- videodecoder/VideoDecoderBase.h | 1 + videodecoder/VideoDecoderDefs.h | 1 + 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index bb7dce4..68a83d0 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -456,7 +456,11 @@ bool VideoDecoderBase::checkBufferAvail(void) { VideoSurfaceBuffer *buffer = NULL; for (int32_t i = 0; i < mNumSurfaces; i++) { buffer = mSurfaceBuffers + i; - if (buffer->asReferernce == false && buffer->renderBuffer.renderDone == true) { + + querySurfaceRenderStatus(buffer); + if (buffer->asReferernce == false && + buffer->renderBuffer.renderDone == true && + buffer->renderBuffer.driverRenderDone == true) { return true; } } @@ -476,9 +480,13 @@ Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) { int nextAcquire = mSurfaceAcquirePos; VideoSurfaceBuffer *acquiredBuffer = NULL; bool acquired = false; + while (acquired == false) { acquiredBuffer = mSurfaceBuffers + nextAcquire; - if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true) { + + querySurfaceRenderStatus(acquiredBuffer); + + if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true && acquiredBuffer->renderBuffer.driverRenderDone == true) { // this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping VideoSurfaceBuffer *temp; acquired = true; @@ -1124,4 +1132,19 @@ Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) { } +void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) { + VASurfaceStatus surfStat = VASurfaceReady; + VAStatus vaStat = VA_STATUS_SUCCESS; + + surface->renderBuffer.driverRenderDone = true; + if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { + + vaStat = vaQuerySurfaceStatus(mVADisplay, surface->renderBuffer.surface, &surfStat); + + if ((vaStat == VA_STATUS_SUCCESS) && (surfStat != VASurfaceReady)) + surface->renderBuffer.driverRenderDone = false; + + } + +} diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 5e09a14..83b868f 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -144,6 +144,7 @@ protected: void ManageReference(bool enable) {mManageReference = enable;} void setOutputMethod(OUTPUT_METHOD method) {mOutputMethod = method;} void setOutputWindowSize(int32_t size) {mOutputWindowSize = size;} + void querySurfaceRenderStatus(VideoSurfaceBuffer* surface); }; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 561a1aa..219a559 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -132,6 +132,7 @@ struct VideoRenderBuffer { void * graphicBufferHandle; int32_t acquirePos; //the acquirepos in graphichandle array uint32_t flag; + mutable volatile bool driverRenderDone; VideoFrameRawData *rawData; }; -- cgit v1.2.3 From a2cdd9bcdd6109c55ca226775f5c79927d59f43f Mon Sep 17 00:00:00 2001 From: xiao Date: Tue, 6 Mar 2012 09:30:43 +0800 Subject: Disable frame dropped log when mix flushing BZ: 25719 Change LOGW to VTRACE Change-Id: I5348cd6ec0deda12a769c27b36a1382e2387bf66 Signed-off-by: xiao Reviewed-on: http://android.intel.com:8080/37618 Reviewed-by: Liang, Dan Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 68a83d0..22d32b0 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -642,7 +642,7 @@ Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { if (dropFrame) { // we are asked to drop this decoded picture - LOGW("Frame dropped"); + VTRACE("Frame dropped in endDecodingFrame"); releaseSurfaceBuffer(); goto exit; } -- cgit v1.2.3 From e9295dd53d598e4c2b0b5be181b69f377ddb4924 Mon Sep 17 00:00:00 2001 From: xiao Date: Thu, 8 Mar 2012 13:40:24 +0800 Subject: Refine multiple frame decoder support code BZ: 26883 Add PACKED_FRAME_TYPE for VideoExtensionBuffer type. Move next frame info to PackedFrameData(from Andy) Change-Id: I0964f122bb0eecc5f56355cdb47bd7561cc19251 Signed-off-by: xiao Reviewed-on: http://android.intel.com:8080/37901 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderDefs.h | 26 ++++++++++++++++---------- videodecoder/VideoDecoderMPEG4.cpp | 36 +++++++++++++++++++++++------------- videodecoder/VideoDecoderMPEG4.h | 6 ++++-- 3 files changed, 43 insertions(+), 25 deletions(-) diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 219a559..c13966e 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -30,12 +30,16 @@ // format specific data, for future extension. -struct VideoFormatSpecificData { - int32_t formatType; - int32_t formatSize; - uint8_t *formatData; +struct VideoExtensionBuffer { + int32_t extType; + int32_t extSize; + uint8_t *extData; }; +typedef enum { + PACKED_FRAME_TYPE, +} VIDEO_EXTENSION_TYPE; + struct VideoFrameRawData { int32_t width; int32_t height; @@ -47,6 +51,11 @@ struct VideoFrameRawData { bool own; // own data or derived from surface. If true, the library will release the memory during clearnup }; +struct PackedFrameData { + int64_t timestamp; + int32_t offSet; +}; + // flags for VideoDecodeBuffer, VideoConfigBuffer and VideoRenderBuffer typedef enum { // indicates if sample has discontinuity in time stamp (happen after seeking usually) @@ -98,10 +107,7 @@ struct VideoDecodeBuffer { int32_t size; int64_t timeStamp; uint32_t flag; - VideoFormatSpecificData *ext; - bool hasNext; // for multiple frame in a buffer - int64_t nextTimeStamp; // next frame timestamp - int32_t offSet; // next frame offset + VideoExtensionBuffer *ext; }; @@ -118,7 +124,7 @@ struct VideoConfigBuffer { void *graphicBufferHandler[ MAX_GRAPHIC_NUM ]; uint32_t graphicBufferStride; uint32_t graphicBufferColorFormat; - VideoFormatSpecificData *ext; + VideoExtensionBuffer *ext; void* nativeWindow; }; @@ -166,7 +172,7 @@ struct VideoFormatInfo { int32_t bitrate; int32_t framerateNom; int32_t framerateDenom; - VideoFormatSpecificData *ext; + VideoExtensionBuffer *ext; }; // TODO: categorize the follow errors as fatal and non-fatal. diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 4bb73f4..d8243be 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -78,6 +78,7 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { } else { mIsSyncFrame = false; } + buffer->ext = NULL; status = VideoDecoderBase::parseBuffer( buffer->data, buffer->size, @@ -125,14 +126,24 @@ Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data CHECK_STATUS("endDecodingFrame"); // start decoding a new frame - status = beginDecodingFrame(data, buffer); - if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) { + status = beginDecodingFrame(data); + if (status == DECODE_MULTIPLE_FRAME) { + buffer->ext = &mExtensionBuffer; + mExtensionBuffer.extType = PACKED_FRAME_TYPE; + mExtensionBuffer.extSize = sizeof(mPackedFrame); + mExtensionBuffer.extData = (uint8_t*)&mPackedFrame; + } else if (status != DECODE_SUCCESS) { endDecodingFrame(true); } CHECK_STATUS("beginDecodingFrame"); } else { - status = continueDecodingFrame(data, buffer); - if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) { + status = continueDecodingFrame(data); + if (status == DECODE_MULTIPLE_FRAME) { + buffer->ext = &mExtensionBuffer; + mExtensionBuffer.extType = PACKED_FRAME_TYPE; + mExtensionBuffer.extSize = sizeof(mPackedFrame); + mExtensionBuffer.extData = (uint8_t*)&mPackedFrame; + } else if (status != DECODE_SUCCESS) { endDecodingFrame(true); } CHECK_STATUS("continueDecodingFrame"); @@ -147,7 +158,7 @@ Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data return DECODE_SUCCESS; } -Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) { +Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { Decode_Status status = DECODE_SUCCESS; vbp_picture_data_mp42 *picData = data->picture_data; @@ -224,12 +235,12 @@ Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data, VideoDe } } // all sanity checks pass, continue decoding through continueDecodingFrame - status = continueDecodingFrame(data, buffer); + status = continueDecodingFrame(data); } return status; } -Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) { +Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { Decode_Status status = DECODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; @@ -285,7 +296,7 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, Vide increment = increment * 1e6 / picParam->vop_time_increment_resolution; mAcquiredBuffer->renderBuffer.timeStamp += increment; if (useGraphicBuffer){ - buffer->nextTimeStamp = mCurrentPTS; + mPackedFrame.timestamp = mCurrentPTS; mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp; } } @@ -298,7 +309,7 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, Vide //convert to micro-second increment = increment * 1e6 / picParam->vop_time_increment_resolution; if (useGraphicBuffer) { - buffer->nextTimeStamp = mCurrentPTS + increment; + mPackedFrame.timestamp = mCurrentPTS + increment; } else { mCurrentPTS += increment; @@ -306,7 +317,7 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, Vide } else { if (useGraphicBuffer) { - buffer->nextTimeStamp = mCurrentPTS + 30000; + mPackedFrame.timestamp = mCurrentPTS + 30000; } else { mCurrentPTS += 30000; @@ -319,9 +330,8 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, Vide mExpectingNVOP = false; } if (useGraphicBuffer) { - buffer->hasNext = true; - buffer->offSet = data->frameSize; - VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",data->frameSize,buffer->nextTimeStamp); + mPackedFrame.offSet = data->frameSize; + VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",data->frameSize,mPackedFrame.timestamp); return DECODE_MULTIPLE_FRAME; } } diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h index bfa8ca2..234eaac 100644 --- a/videodecoder/VideoDecoderMPEG4.h +++ b/videodecoder/VideoDecoderMPEG4.h @@ -40,8 +40,8 @@ public: private: Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data); - Decode_Status beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer); - Decode_Status continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer); + Decode_Status beginDecodingFrame(vbp_data_mp42 *data); + Decode_Status continueDecodingFrame(vbp_data_mp42 *data); Decode_Status decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData); Decode_Status setReference(VAPictureParameterBufferMPEG4 *picParam); Decode_Status startVA(vbp_data_mp42 *data); @@ -65,6 +65,8 @@ private: bool mSendIQMatrixBuf; // indicate if iq_matrix_buffer is sent to driver int32_t mLastVOPCodingType; bool mIsSyncFrame; // indicate if it is SyncFrame in container + VideoExtensionBuffer mExtensionBuffer; + PackedFrameData mPackedFrame; }; -- cgit v1.2.3 From 8b1c26c4c63e2971c86154a39436cc7c9a1d3dc3 Mon Sep 17 00:00:00 2001 From: xiao Date: Fri, 24 Feb 2012 11:43:33 +0800 Subject: Set video rotation info to VA BZ: 23620 Pass down video clip rotation to psb_video in order to implement hw rotation feature. Change-Id: I25f66ae17ea746ac46e7b577bf1bdf9b0467b257 Signed-off-by: xiao Reviewed-on: http://android.intel.com:8080/36474 Reviewed-by: Hu, Jason Reviewed-by: Liang, Dan Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 14 ++++++++++++++ videodecoder/VideoDecoderDefs.h | 1 + 2 files changed, 15 insertions(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 22d32b0..60e5a72 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -806,6 +806,20 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { CHECK_STATUS("mapSurface") } + VADisplayAttribute rotate; + rotate.type = VADisplayAttribRotation; + rotate.value = VA_ROTATION_NONE; + if (mConfigBuffer.rotationDegrees == 0) + rotate.value = VA_ROTATION_NONE; + else if (mConfigBuffer.rotationDegrees == 90) + rotate.value = VA_ROTATION_90; + else if (mConfigBuffer.rotationDegrees == 180) + rotate.value = VA_ROTATION_180; + else if (mConfigBuffer.rotationDegrees == 270) + rotate.value = VA_ROTATION_270; + + vaStatus = vaSetDisplayAttributes(mVADisplay, &rotate, 1); + mVAStarted = true; return DECODE_SUCCESS; } diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index c13966e..1eed64b 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -126,6 +126,7 @@ struct VideoConfigBuffer { uint32_t graphicBufferColorFormat; VideoExtensionBuffer *ext; void* nativeWindow; + uint32_t rotationDegrees; }; struct VideoRenderBuffer { -- cgit v1.2.3 From 713e237082c7d13d0bde789c1151732ea7c358b2 Mon Sep 17 00:00:00 2001 From: hding3 Date: Tue, 20 Mar 2012 18:45:23 +0800 Subject: [PORT FROM R2]AVCE: Export API for custom crop and SAR settings. BZ: 20965 Extend struct VideoParamsAVC to accomodate crop and SAR settings. Orig-Change-Id: I2b182dd35bb6680e0aea4002aae5fb452247730e Change-Id: I1c9c3ceee2ab23e7583a9f498fccb4ace60e41f2 Signed-off-by: hding3 Reviewed-on: http://android.intel.com:8080/39743 Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 23 +++++++++++++++++++++++ videoencoder/VideoEncoderDef.h | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 5f65835..4297a8a 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -22,6 +22,12 @@ VideoEncoderAVC::VideoEncoderAVC() mVideoParamsAVC.maxSliceSize = 0; mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB; mSliceNum = 2; + mVideoParamsAVC.crop.LeftOffset = 0; + mVideoParamsAVC.crop.RightOffset = 0; + mVideoParamsAVC.crop.TopOffset = 0; + mVideoParamsAVC.crop.BottomOffset = 0; + mVideoParamsAVC.SAR.SarWidth = 0; + mVideoParamsAVC.SAR.SarHeight = 0; } Encode_Status VideoEncoderAVC::start() { @@ -772,6 +778,23 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { //avcSeqParams.vui_flag = 248; avcSeqParams.vui_flag = mVideoParamsAVC.VUIFlag; avcSeqParams.seq_parameter_set_id = 8; + if (mVideoParamsAVC.crop.LeftOffset || + mVideoParamsAVC.crop.RightOffset || + mVideoParamsAVC.crop.TopOffset || + mVideoParamsAVC.crop.BottomOffset) { + avcSeqParams.frame_cropping_flag = true; + avcSeqParams.frame_crop_left_offset = mVideoParamsAVC.crop.LeftOffset; + avcSeqParams.frame_crop_right_offset = mVideoParamsAVC.crop.RightOffset; + avcSeqParams.frame_crop_top_offset = mVideoParamsAVC.crop.TopOffset; + avcSeqParams.frame_crop_bottom_offset = mVideoParamsAVC.crop.BottomOffset; + } else + avcSeqParams.frame_cropping_flag = false; + if(avcSeqParams.vui_flag && (mVideoParamsAVC.SAR.SarWidth || mVideoParamsAVC.SAR.SarHeight)) { + avcSeqParams.aspect_ratio_info_present_flag = true; + avcSeqParams.aspect_ratio_idc = 0xff /* Extended_SAR */; + avcSeqParams.sar_width = mVideoParamsAVC.SAR.SarWidth; + avcSeqParams.sar_height = mVideoParamsAVC.SAR.SarHeight; + } // This is a temporary fix suggested by Binglin for bad encoding quality issue avcSeqParams.max_num_ref_frames = 1; // TODO: We need a long term design for this field diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 91657d0..67a2443 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -220,6 +220,36 @@ typedef struct { uint32_t format; } ExternalBufferAttrib; +struct Cropping { + uint32_t LeftOffset; + uint32_t RightOffset; + uint32_t TopOffset; + uint32_t BottomOffset; + + Cropping &operator=(const Cropping &other) { + if (this == &other) return *this; + + this->LeftOffset = other.LeftOffset; + this->RightOffset = other.RightOffset; + this->TopOffset = other.TopOffset; + this->BottomOffset = other.BottomOffset; + return *this; + } +}; + +struct SamplingAspectRatio { + uint16_t SarWidth; + uint16_t SarHeight; + + SamplingAspectRatio &operator=(const SamplingAspectRatio &other) { + if (this == &other) return *this; + + this->SarWidth = other.SarWidth; + this->SarHeight = other.SarHeight; + return *this; + } +}; + enum VideoParamConfigType { VideoParamsTypeStartUnused = 0x01000000, VideoParamsTypeCommon, @@ -306,6 +336,8 @@ struct VideoParamsAVC : VideoParamConfigSet { uint32_t idrInterval; SliceNum sliceNum; AVCDelimiterType delimiterType; + Cropping crop; + SamplingAspectRatio SAR; VideoParamsAVC() { type = VideoParamsTypeAVC; @@ -322,6 +354,12 @@ struct VideoParamsAVC : VideoParamConfigSet { this->idrInterval = other.idrInterval; this->sliceNum = other.sliceNum; this->delimiterType = other.delimiterType; + this->crop.LeftOffset = other.crop.LeftOffset; + this->crop.RightOffset = other.crop.RightOffset; + this->crop.TopOffset = other.crop.TopOffset; + this->crop.BottomOffset = other.crop.BottomOffset; + this->SAR.SarWidth = other.SAR.SarWidth; + this->SAR.SarHeight = other.SAR.SarHeight; return *this; } -- cgit v1.2.3 From 066798eb961014c74583d3f8c80ffd9c8d3645f4 Mon Sep 17 00:00:00 2001 From: xiao Date: Tue, 27 Mar 2012 10:31:19 +0800 Subject: VideoDecoderAVC: Add timestamp checking when first slice is lost. BZ: 28186 In a streaming case, the first slice is lost, so the mix doesn't treat it as a new frame, then call continueDecodingFrame causing decode fail. Add timestamp checking when first slice is lost. if have difference timestamp then treat it as new frame. It is a workaround here. Change-Id: I053d016aef83816d49cb31f965e54d2f2433ba45 Signed-off-by: xiao Reviewed-on: http://android.intel.com:8080/40784 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 9 +++++++-- videodecoder/VideoDecoderAVC.h | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index dbb13d4..2fd539f 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -153,7 +153,7 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h mCurrentPTS = buffer->timeStamp; //if (lastPTS != mCurrentPTS) { - if (isNewFrame(data)) { + if (isNewFrame(data, lastPTS == mCurrentPTS)) { // finish decoding the last frame status = endDecodingFrame(false); CHECK_STATUS("endDecodingFrame"); @@ -682,7 +682,7 @@ Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { return DECODE_SUCCESS; } -bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data) { +bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) { if (data->num_pictures == 0) { ETRACE("num_pictures == 0"); return true; @@ -701,6 +701,11 @@ bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data) { // not the first slice, assume it is continuation of a partial frame // TODO: check if it is new frame boundary as the first slice may get lost in streaming case. WTRACE("first_mb_in_slice != 0"); + if (!equalPTS) { + // return true if different timestamp, it is a workaround here for a streaming case + WTRACE("different PTS, treat it as a new frame"); + return true; + } } else { if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) { ETRACE("Current picture has both odd field and even field."); diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 1794e03..339e05a 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -55,7 +55,7 @@ protected: Decode_Status startVA(vbp_data_h264 *data); void updateFormatInfo(vbp_data_h264 *data); Decode_Status handleNewSequence(vbp_data_h264 *data); - bool isNewFrame(vbp_data_h264 *data); + bool isNewFrame(vbp_data_h264 *data, bool equalPTS); int32_t getDPBSize(vbp_data_h264 *data); private: -- cgit v1.2.3 From d305c9562a3bdbd1747dc49130057624dc611fcd Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Thu, 15 Mar 2012 06:42:02 -0400 Subject: libmix: support Kbuf handle buffer sharing mode BZ: 27501 support Kbuf handle buffer sharing mode Signed-off-by: Weian Chen Change-Id: Ic766a4e0d91cb9d8ebe48cea5a82c65fe7676d42 Reviewed-on: http://android.intel.com:8080/39155 Reviewed-by: Tao, Tao Q Reviewed-by: Verdu, Miguel Reviewed-by: Poornachandran, Rajesh Reviewed-by: Chen, Weian Tested-by: Sun, Hang L Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 42 ++++++++++++++++++++++++++++++++++++--- videoencoder/VideoEncoderBase.h | 1 + videoencoder/VideoEncoderDef.h | 5 +++-- 3 files changed, 43 insertions(+), 5 deletions(-) mode change 100755 => 100644 videoencoder/VideoEncoderBase.cpp diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp old mode 100755 new mode 100644 index 2f45064..7a3b6a8 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -200,6 +200,7 @@ Encode_Status VideoEncoderBase::start() { case BUFFER_SHARING_V4L2: case BUFFER_SHARING_SURFACE: case BUFFER_SHARING_GFXHANDLE: + case BUFFER_SHARING_KBUFHANDLE: { mSharedSurfacesCnt = mUpstreamBufferCnt; normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE; @@ -280,6 +281,10 @@ Encode_Status VideoEncoderBase::start() { ret = surfaceMappingForGfxHandle(); CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForGfxHandle"); break; + case BUFFER_SHARING_KBUFHANDLE: + ret = surfaceMappingForKbufHandle(); + CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForKbufHandle"); + break; case BUFFER_SHARING_NONE: break; case BUFFER_SHARING_USRPTR: { @@ -1513,8 +1518,7 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS } if (upStreamBuffer->bufAttrib) { - mBufAttrib->format = upStreamBuffer->bufAttrib->format; - mBufAttrib->lumaStride = upStreamBuffer->bufAttrib->lumaStride; + memcpy(mBufAttrib, upStreamBuffer->bufAttrib, sizeof(ExternalBufferAttrib)); } else { LOG_E ("Buffer Attrib doesn't set by client, return error"); return ENCODE_INVALID_PARAMS; @@ -1687,6 +1691,35 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle() { return ret; } +Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle() { + + uint32_t index; + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + + uint32_t lumaOffset = 0; + uint32_t chromaUOffset = mBufAttrib->realHeight * mBufAttrib->lumaStride; + uint32_t chromaVOffset = chromaUOffset + 1; + + for (index = 0; index < mSharedSurfacesCnt; index++) { + + vaStatus = vaCreateSurfaceFromKBuf( + mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + (VASurfaceID *)&mSharedSurfaces[index], mUpstreamBufferList[index], mBufAttrib->lumaStride * mComParams.resolution.height * 3 / 2, + mBufAttrib->format, mBufAttrib->lumaStride, mBufAttrib->chromStride, mBufAttrib->chromStride, lumaOffset, chromaUOffset, chromaVOffset); + + CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); + + LOG_I("Surface ID created from Kbuf = 0x%08x", mSharedSurfaces[index]); + + mSurfaces[index] = mSharedSurfaces[index]; + ret = generateVideoBufferAndAttachToList(index, NULL); + CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); + } + + return ret; +} + Encode_Status VideoEncoderBase::surfaceMappingForCIFrameList() { uint32_t index; VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -1731,7 +1764,9 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } - } else if (mBufferMode == BUFFER_SHARING_SURFACE || mBufferMode == BUFFER_SHARING_GFXHANDLE) { + } else if (mBufferMode == BUFFER_SHARING_SURFACE || + mBufferMode == BUFFER_SHARING_GFXHANDLE || + mBufferMode == BUFFER_SHARING_KBUFHANDLE) { bufIndex = (uint32_t) -1; data = *(uint32_t*)inBuffer->data; @@ -1782,6 +1817,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { case BUFFER_SHARING_CI: case BUFFER_SHARING_SURFACE: case BUFFER_SHARING_GFXHANDLE: + case BUFFER_SHARING_KBUFHANDLE: case BUFFER_SHARING_USRPTR: { if (mRefFrame== NULL) { diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index a03f0dd..4b11253 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -62,6 +62,7 @@ private: Encode_Status surfaceMappingForSurfaceList(); Encode_Status surfaceMappingForGfxHandle(); Encode_Status surfaceMappingForCIFrameList(); + Encode_Status surfaceMappingForKbufHandle(); VideoEncSurfaceBuffer *appendVideoSurfaceBuffer( VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer); diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 67a2443..d5f10b5 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -98,6 +98,7 @@ enum VideoBufferSharingMode { BUFFER_SHARING_SURFACE = 8, BUFFER_SHARING_USRPTR = 16, BUFFER_SHARING_GFXHANDLE = 32, + BUFFER_SHARING_KBUFHANDLE = 64, BUFFER_LAST }; @@ -213,8 +214,8 @@ struct SliceNum { }; typedef struct { - uint32_t width; - uint32_t height; + uint32_t realWidth; + uint32_t realHeight; uint32_t lumaStride; uint32_t chromStride; uint32_t format; -- cgit v1.2.3 From 83dc04eedbf4f1111ef48b764b2bd5ba4548f2a4 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Fri, 30 Mar 2012 11:30:05 -0400 Subject: libmix: check if new width/height equel to 0 when resolution change happens BZ: 29395 If new width or height equel to 0, we don't treat it as resolution change, as there may some issues happen, we try to use old resolution to continue decode Change-Id: I9460f1350f0851c2b4e018b80ea7763f62fe4192 Signed-off-by: Weian Chen Reviewed-on: http://android.intel.com:8080/41858 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 5 +++-- videodecoder/VideoDecoderMPEG4.cpp | 6 ++++-- videodecoder/VideoDecoderWMV.cpp | 6 ++++-- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 2fd539f..dff124e 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -603,8 +603,9 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d", mVideoFormatInfo.width, mVideoFormatInfo.height, width, height); - if (mVideoFormatInfo.width != width || - mVideoFormatInfo.height!= height) { + if ((mVideoFormatInfo.width != width || + mVideoFormatInfo.height != height) && + width && height) { mVideoFormatInfo.width = width; mVideoFormatInfo.height = height; mSizeChanged = true; diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index d8243be..cce2e64 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -516,8 +516,10 @@ void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) { data->codec_data.video_object_layer_width, data->codec_data.video_object_layer_height); - if (mVideoFormatInfo.width != (int32_t)data->codec_data.video_object_layer_width || - mVideoFormatInfo.height != (int32_t)data->codec_data.video_object_layer_height) { + if ((mVideoFormatInfo.width != (int32_t)data->codec_data.video_object_layer_width || + mVideoFormatInfo.height != (int32_t)data->codec_data.video_object_layer_height) && + data->codec_data.video_object_layer_width && + data->codec_data.video_object_layer_height) { // update encoded image size mVideoFormatInfo.width = data->codec_data.video_object_layer_width; mVideoFormatInfo.height = data->codec_data.video_object_layer_height; diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index b32edf4..85a0d28 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -401,8 +401,10 @@ void VideoDecoderWMV::updateFormatInfo(vbp_data_vc1 *data) { mVideoFormatInfo.width, mVideoFormatInfo.height, data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT); - if (mVideoFormatInfo.width != data->se_data->CODED_WIDTH || - mVideoFormatInfo.height!= data->se_data->CODED_HEIGHT) { + if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH || + mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) && + data->se_data->CODED_WIDTH && + data->se_data->CODED_HEIGHT) { // encoded image size mVideoFormatInfo.width = data->se_data->CODED_WIDTH; mVideoFormatInfo.height = data->se_data->CODED_HEIGHT; -- cgit v1.2.3 From ac49fd199e3ef19224fdd26d7b567a74526986ae Mon Sep 17 00:00:00 2001 From: Jason Hu Date: Fri, 6 Apr 2012 18:22:01 +0800 Subject: libmix: optimize video performance specific for high bitrates stream. BZ: 20854 Current buffer sync in decoder pipeline will block the video render thread and cause serious frame drop in video framework. So move the video buffer sync idle from libmix to hwc. That will remove blocking on pop up a output buffer from OpenMax. Change-Id: I60cebc63c02ab6c5f455107af38431ec60428afe Signed-off-by: Jason Hu Reviewed-on: http://android.intel.com:8080/42680 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 60e5a72..e567bcf 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -199,9 +199,6 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { if (mOutputHead == NULL) { mOutputTail = NULL; } - if (useGraphicBuffer ) { - vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); - } return &(outputByPos->renderBuffer); } @@ -246,9 +243,6 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { mOutputTail = NULL; } } - if (useGraphicBuffer ) { - vaSyncSurface(mVADisplay, output->renderBuffer.surface); - } //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); return &(output->renderBuffer); } -- cgit v1.2.3 From 091ce4ac89e1d78799c82f710a66d44afa26b37d Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Tue, 27 Mar 2012 06:09:04 -0400 Subject: libmix: takes fps info to set render mode BZ: 29113 takes fps info to set render mode to support 1080p@60fps Signed-off-by: Weian Chen Change-Id: I6db3fad3dee07a8e226d7a2b8fbe92db6bd233e4 Reviewed-on: http://android.intel.com:8080/40781 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 11 +++++++++++ videodecoder/VideoDecoderDefs.h | 1 + 2 files changed, 12 insertions(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index e567bcf..b0a115e 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -694,6 +694,17 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); CHECK_VA_STATUS("vaInitialize"); + if (mConfigBuffer.frameRate > 45 && mVideoFormatInfo.height >= 1080) { + // ugly workaround here + // for fps > 45 and height > 1080, we will force to + // use surfaceTexture render mode duo to performance issue + VADisplayAttribute renderMode; + renderMode.type = VADisplayAttribRenderMode; + renderMode.value = VA_RENDER_MODE_EXTERNAL_GPU; + vaStatus = vaSetDisplayAttributes(mVADisplay, &renderMode, 1); + CHECK_VA_STATUS("vaSetDisplayAttributes"); + } + if ((int32_t)profile != VAProfileSoftwareDecoding) { //We are requesting RT attributes attrib.type = VAConfigAttribRTFormat; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 1eed64b..61c6c9e 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -118,6 +118,7 @@ struct VideoConfigBuffer { int32_t size; int32_t width; int32_t height; + int32_t frameRate; int32_t surfaceNumber; VAProfile profile; uint32_t flag; -- cgit v1.2.3 From 2bd17f6c8199aa4af03a75e9a301a6764be218ae Mon Sep 17 00:00:00 2001 From: Elaine Wang Date: Fri, 13 Apr 2012 12:00:01 +0800 Subject: Optimaz libmix encoder in async mode. BZ: 30377 Before vaSyncSurface(frame N), vaEndPicture(frame N-1) should be invoked, which prevents TopazSC from entering D0i3 while there is upcomming encoding request. Signed-off-by: Elaine Wang Change-Id: I9d91d778eb83d555c37ea4a7d9e379afaf29db63 Reviewed-on: http://android.intel.com:8080/43342 Reviewed-by: Wang, Elaine Reviewed-by: Yuan, Shengquan Reviewed-by: Tao, Tao Q Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 7a3b6a8..7f7584e 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -462,10 +462,8 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { ret = sendEncodeCommand(); CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); - if ((mComParams.rcMode == VA_RC_NONE) || mFirstFrame) { - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); - } + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS_RETURN("vaEndPicture"); LOG_V( "vaEndPicture\n"); @@ -488,12 +486,6 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - if (!((mComParams.rcMode == VA_RC_NONE) || mFirstFrame)) { - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); - - } - if (mFirstFrame) { vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); CHECK_VA_STATUS_RETURN("vaBeginPicture"); @@ -509,7 +501,7 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { // Query the status of current surface VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurFrame->surface, &vaSurfaceStatus); + vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastFrame->surface, &vaSurfaceStatus); CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; -- cgit v1.2.3 From 445be2530e19403d8e2a5b7abab06bb9aa4f1419 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Wed, 18 Apr 2012 09:39:09 +0800 Subject: libmix: enhance libmix to handle the case that resolution in container is diff from ES resolution BZ: 25724 29844 28316 enhance libmix to handle the case that resolution in container is diff from ES resolution 1: if the resolution in container is larger than real ES resolution, return the correct crop info to omxcodec to set the proper region for display 2: if the resolution in container is smaller than real ES resolution, Graphic buffer in omxcodec should be reallocated and reset graphic related info to OMX IL and libmix, va should be restart. Signed-off-by: ywan171 Change-Id: Iaf97c822d66b0a978661c6c469b682713b009ad5 Reviewed-on: http://android.intel.com:8080/42767 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 4 +++ videodecoder/VideoDecoderBase.cpp | 55 +++++++++++++++++++++++++++++++----- videodecoder/VideoDecoderBase.h | 1 + videodecoder/VideoDecoderDefs.h | 2 ++ videodecoder/VideoDecoderInterface.h | 1 + videodecoder/VideoDecoderMPEG4.cpp | 5 ++++ videodecoder/VideoDecoderWMV.cpp | 7 ++++- 7 files changed, 67 insertions(+), 8 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index dff124e..adc6b2a 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -661,6 +661,10 @@ Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { updateFormatInfo(data); if (mSizeChanged == false) { return DECODE_SUCCESS; + } else if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ + mSizeChanged = false; + flushSurfaceBuffers(); + return DECODE_FORMAT_CHANGE; } if (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth || diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index b0a115e..3e816b8 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -114,6 +114,10 @@ Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) { mVideoFormatInfo.width = buffer->width; mVideoFormatInfo.height = buffer->height; + if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) { + mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth; + mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight; + } mLowDelay = buffer->flag & WANT_LOW_DELAY; mRawOutput = buffer->flag & WANT_RAW_OUTPUT; if (mRawOutput) { @@ -123,6 +127,35 @@ Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) { return DECODE_SUCCESS; } + +Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) { + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + + // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec + terminateVA(); + + // reset the mconfigBuffer to pass it for startVA. + mConfigBuffer = *buffer; + mConfigBuffer.data = NULL; + mConfigBuffer.size = 0; + + mVideoFormatInfo.width = buffer->width; + mVideoFormatInfo.height = buffer->height; + mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth; + mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight; + mLowDelay = buffer->flag & WANT_LOW_DELAY; + mRawOutput = buffer->flag & WANT_RAW_OUTPUT; + mSignalBufferSize = 0; + if (mRawOutput) { + WTRACE("Output is raw data."); + } + return DECODE_SUCCESS; +} + + + void VideoDecoderBase::stop(void) { terminateVA(); @@ -659,7 +692,15 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { } if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ - numSurface = mConfigBuffer.surfaceNumber; + numSurface = mConfigBuffer.surfaceNumber; + // if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode, + // we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs + if (mSizeChanged) { + if (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + } } // TODO: validate profile if (numSurface == 0) { @@ -748,8 +789,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { mVAExternalMemoryBuffers->luma_stride = mConfigBuffer.graphicBufferStride; mVAExternalMemoryBuffers->pixel_format = mConfigBuffer.graphicBufferColorFormat; mVAExternalMemoryBuffers->native_window = mConfigBuffer.nativeWindow; - mVAExternalMemoryBuffers->width = mVideoFormatInfo.width; - mVAExternalMemoryBuffers->height = mVideoFormatInfo.height; + mVAExternalMemoryBuffers->width = mVideoFormatInfo.surfaceWidth; + mVAExternalMemoryBuffers->height = mVideoFormatInfo.surfaceHeight; mVAExternalMemoryBuffers->type = VAExternalMemoryAndroidGrallocBuffer; for (int i = 0; i < mNumSurfaces; i++) { mVAExternalMemoryBuffers->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; @@ -760,8 +801,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { mVASurfaceAttrib->value.value.p_val = (void *)mVAExternalMemoryBuffers; vaStatus = vaCreateSurfaces( mVADisplay, - mVideoFormatInfo.width, - mVideoFormatInfo.height, + mVideoFormatInfo.surfaceWidth, + mVideoFormatInfo.surfaceHeight, format, mNumSurfaces, mSurfaces, @@ -777,11 +818,11 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { mSurfaces, NULL, 0); + mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; + mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height; } CHECK_VA_STATUS("vaCreateSurfaces"); - mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; - mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height; mVideoFormatInfo.surfaceNumber = mNumSurfaces; mVideoFormatInfo.ctxSurfaces = mSurfaces; diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 83b868f..c80367a 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -46,6 +46,7 @@ public: virtual ~VideoDecoderBase(); virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual Decode_Status reset(VideoConfigBuffer *buffer) ; virtual void stop(void); //virtual Decode_Status decode(VideoDecodeBuffer *buffer); virtual void flush(void); diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 61c6c9e..dc72a6f 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -125,6 +125,8 @@ struct VideoConfigBuffer { void *graphicBufferHandler[ MAX_GRAPHIC_NUM ]; uint32_t graphicBufferStride; uint32_t graphicBufferColorFormat; + uint32_t graphicBufferWidth; + uint32_t graphicBufferHeight; VideoExtensionBuffer *ext; void* nativeWindow; uint32_t rotationDegrees; diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h index e3cf728..66d62e3 100644 --- a/videodecoder/VideoDecoderInterface.h +++ b/videodecoder/VideoDecoderInterface.h @@ -32,6 +32,7 @@ class IVideoDecoder { public: virtual ~IVideoDecoder() {} virtual Decode_Status start(VideoConfigBuffer *buffer) = 0; + virtual Decode_Status reset(VideoConfigBuffer *buffer) = 0; virtual void stop(void) = 0; virtual void flush() = 0; virtual Decode_Status decode(VideoDecodeBuffer *buffer) = 0; diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index cce2e64..050ab9a 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -516,6 +516,11 @@ void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) { data->codec_data.video_object_layer_width, data->codec_data.video_object_layer_height); + mVideoFormatInfo.cropBottom = data->codec_data.video_object_layer_height > mVideoFormatInfo.height ? + data->codec_data.video_object_layer_height - mVideoFormatInfo.height : 0; + mVideoFormatInfo.cropRight = data->codec_data.video_object_layer_width > mVideoFormatInfo.width ? + data->codec_data.video_object_layer_width - mVideoFormatInfo.width : 0; + if ((mVideoFormatInfo.width != (int32_t)data->codec_data.video_object_layer_width || mVideoFormatInfo.height != (int32_t)data->codec_data.video_object_layer_height) && data->codec_data.video_object_layer_width && diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index 85a0d28..7bb9ece 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -401,7 +401,12 @@ void VideoDecoderWMV::updateFormatInfo(vbp_data_vc1 *data) { mVideoFormatInfo.width, mVideoFormatInfo.height, data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT); - if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH || + mVideoFormatInfo.cropBottom = data->se_data->CODED_HEIGHT > mVideoFormatInfo.height ? + data->se_data->CODED_HEIGHT - mVideoFormatInfo.height : 0; + mVideoFormatInfo.cropRight = data->se_data->CODED_WIDTH > mVideoFormatInfo.width ? + data->se_data->CODED_WIDTH - mVideoFormatInfo.width : 0; + + if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH || mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) && data->se_data->CODED_WIDTH && data->se_data->CODED_HEIGHT) { -- cgit v1.2.3 From 1c4bdecf4d40c94ee9fad3a2f843d50f5ed6f7df Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Tue, 28 Feb 2012 17:56:12 -0800 Subject: Extend middleware to support secure video playback. BZ: 29937 Change-Id: Ia8d59ff6285395efbfbd85d56b7fe70298aa6bb2 Signed-off-by: Andy Qiu Reviewed-on: http://android.intel.com:8080/36931 Reviewed-by: buildbot Reviewed-by: Vehmanen, Kai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao --- videodecoder/Android.mk | 1 + videodecoder/VideoDecoderAVC.cpp | 6 +- videodecoder/VideoDecoderAVCSecure.cpp | 515 +++++++++++++++++++++++++++++++++ videodecoder/VideoDecoderAVCSecure.h | 83 ++++++ videodecoder/VideoDecoderBase.cpp | 8 +- videodecoder/VideoDecoderBase.h | 2 +- videodecoder/VideoDecoderDefs.h | 10 +- videodecoder/VideoDecoderHost.cpp | 4 + 8 files changed, 618 insertions(+), 11 deletions(-) create mode 100644 videodecoder/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/VideoDecoderAVCSecure.h diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index b075586..26fa709 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -10,6 +10,7 @@ LOCAL_SRC_FILES := \ VideoDecoderAVC.cpp \ VideoDecoderVP8.cpp \ VideoDecoderPAVC.cpp \ + VideoDecoderAVCSecure.cpp \ VideoDecoderTrace.cpp # LOCAL_CFLAGS := diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index adc6b2a..243ca80 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -167,11 +167,13 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h } // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field - /* if (buffer->flag & HAS_COMPLETE_FRAME) { +#if 0 + if (buffer->flag & HAS_COMPLETE_FRAME) { // finish decoding current frame status = endDecodingFrame(false); CHECK_STATUS("endDecodingFrame"); - }*/ + } +#endif if (mSizeChanged) { mSizeChanged = false; diff --git a/videodecoder/VideoDecoderAVCSecure.cpp b/videodecoder/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..3bcfd70 --- /dev/null +++ b/videodecoder/VideoDecoderAVCSecure.cpp @@ -0,0 +1,515 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + + +#define STARTCODE_00 0x00 +#define STARTCODE_01 0x01 +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F + + +// mask for little endian, to mast the second and fourth bytes in the byte stream +#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 +#define STARTCODE_MASK1 0x0000FF00 //0x000000FF + + +typedef enum { + NAL_UNIT_TYPE_unspecified0 = 0, + NAL_UNIT_TYPE_SLICE, + NAL_UNIT_TYPE_DPA, + NAL_UNIT_TYPE_DPB, + NAL_UNIT_TYPE_DPC, + NAL_UNIT_TYPE_IDR, + NAL_UNIT_TYPE_SEI, + NAL_UNIT_TYPE_SPS, + NAL_UNIT_TYPE_PPS, + NAL_UNIT_TYPE_Acc_unit_delimiter, + NAL_UNIT_TYPE_EOSeq, + NAL_UNIT_TYPE_EOstream, + NAL_UNIT_TYPE_filler_data, + NAL_UNIT_TYPE_SPS_extension, + NAL_UNIT_TYPE_Reserved14, + NAL_UNIT_TYPE_Reserved15, + NAL_UNIT_TYPE_Reserved16, + NAL_UNIT_TYPE_Reserved17, + NAL_UNIT_TYPE_Reserved18, + NAL_UNIT_TYPE_ACP, + NAL_UNIT_TYPE_Reserved20, + NAL_UNIT_TYPE_Reserved21, + NAL_UNIT_TYPE_Reserved22, + NAL_UNIT_TYPE_Reserved23, + NAL_UNIT_TYPE_unspecified24, +} NAL_UNIT_TYPE; + +#ifndef min +#define min(X, Y) ((X) <(Y) ? (X) : (Y)) +#endif + + +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mInputBuffer(NULL) { + + memset(&mMetadata, 0, sizeof(NaluMetadata)); + memset(&mByteStream, 0, sizeof(NaluByteStream)); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; + mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mMetadata.naluInfo == NULL || + mByteStream.byteStream == NULL || + mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory."); + // TODO: release all allocated memory + return DECODE_MEMORY_FAIL; + } + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mMetadata.naluInfo) { + delete [] mMetadata.naluInfo; + mMetadata.naluInfo = NULL; + } + + if (mByteStream.byteStream) { + delete [] mByteStream.byteStream; + mByteStream.byteStream = NULL; + } + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sizeLeft = 0; + uint8_t *pByteStream = NULL; + NaluInfo *pNaluInfo = mMetadata.naluInfo; + + if (buffer->flag & IS_SECURE_DATA) { + pByteStream = buffer->data; + sizeLeft = buffer->size; + mInputBuffer = NULL; + } else { + status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); + CHECK_STATUS("parseAnnexBStream"); + pByteStream = mByteStream.byteStream; + sizeLeft = mByteStream.streamPos; + mInputBuffer = buffer->data; + } + if (sizeLeft < 4) { + ETRACE("Not enough data to read number of NALU."); + return DECODE_INVALID_DATA; + } + + // read number of NALU + memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); + pByteStream += 4; + sizeLeft -= 4; + + if (mMetadata.naluNumber == 0) { + WTRACE("Number of NALU is ZERO!"); + return DECODE_SUCCESS; + } + + for (int32_t i = 0; i < mMetadata.naluNumber; i++) { + if (sizeLeft < 12) { + ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); + return DECODE_INVALID_DATA; + } + sizeLeft -= 12; + // read NALU offset + memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU size + memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU header length + memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + if (sizeLeft < pNaluInfo->naluHeaderLen) { + ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); + return DECODE_INVALID_DATA; + } + + sizeLeft -= pNaluInfo->naluHeaderLen; + + if (pNaluInfo->naluHeaderLen) { + // copy start code prefix to buffer + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + + // copy NALU header + memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); + pByteStream += pNaluInfo->naluHeaderLen; + + sizeAccumulated += pNaluInfo->naluHeaderLen; + } else { + WTRACE("header len is zero for NALU %d", i); + } + + // for next NALU + pNaluInfo++; + } + + buffer->data = mNaluHeaderBuffer; + buffer->size = sizeAccumulated; + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which naluinfo is correlated to current slice + int naluIndex = 0; + uint32_t accumulatedHeaderLen = 0; + uint32_t headerLen = 0; + for (; naluIndex < mMetadata.naluNumber; naluIndex++) { + headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; + if (headerLen == 0) { + WTRACE("lenght of current NAL unit is 0."); + continue; + } + accumulatedHeaderLen += STARTCODE_PREFIX_LEN; + if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { + break; + } + accumulatedHeaderLen += headerLen; + } + + if (sliceData->slice_offset != accumulatedHeaderLen) { + WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); + } + + sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; + sliceData->slice_size = sliceParam->slice_data_size; + + // no need to update: + // sliceParam->slice_data_offset - 0 always + // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; + if (mInputBuffer != NULL) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + mInputBuffer + sliceOffset, + &bufferIDs[bufferIDCount]); + } else { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + (uint8_t*)sliceOffset, // IMR offset + &bufferIDs[bufferIDCount]); + } + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. +// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. +int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { + uint8_t *ptr; + uint32_t left = 0, data = 0, phase = 0; + uint8_t mask1 = 0, mask2 = 0; + + /* Meaning of phase: + 0: initial status, "0x000001" bytes are not found so far; + 1: one "0x00" byte is found; + 2: two or more consecutive "0x00" bytes" are found; + 3: "0x000001" patten is found ; + 4: if there is one more byte after "0x000001"; + */ + + left = length; + ptr = (uint8_t *) (stream + offset); + phase = 0; + + // parse until there is more data and start code not found + while ((left > 0) && (phase < 3)) { + // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { + while (left > 3) { + data = *((uint32_t *)ptr); + mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); + mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); + // If second byte and fourth byte are not zero's then we cannot have a start code here, + // as we need two consecutive zero bytes for a start code pattern. + if (mask1 && mask2) { + // skip 4 bytes and start over + ptr += 4; + left -=4; + continue; + } else { + break; + } + } + } + + // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time + if (left > 0) { + if (*ptr == STARTCODE_00) { + phase++; + if (phase > 2) { + // more than 2 consecutive '0x00' bytes is found + phase = 2; + } + } else if ((*ptr == STARTCODE_01) && (phase == 2)) { + // start code is found + phase = 3; + } else { + // reset lookup + phase = 0; + } + ptr++; + left--; + } + } + + if ((left > 0) && (phase == 3)) { + phase = 4; + // return offset of position following the pattern in the buffer which matches "0x000001" byte string + return (int32_t)(ptr - stream); + } + return -1; +} + + +Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { + uint8_t naluType; + int32_t naluHeaderLen; + + naluType = *(uint8_t *)(stream + naluStream->naluOffset); + naluType &= NALU_TYPE_MASK; + // first update nalu header length based on nalu type + if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { + // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes + naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); + } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { + //sps, pps, sei, etc, return the entire NAL unit in clear + naluHeaderLen = naluStream->naluLen; + } else { + return DECODE_FRAME_DROPPED; + } + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); + naluStream->streamPos += 4; + + if (naluHeaderLen) { + memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); + naluStream->streamPos += naluHeaderLen; + } + return DECODE_SUCCESS; +} + + +// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container +Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { + int32_t naluOffset, offset, left; + NaluInfo *info; + uint32_t ret = DECODE_SUCCESS; + + naluOffset = 0; + offset = 0; + left = length; + + // leave 4 bytes to copy nalu count + naluStream->streamPos = 4; + naluStream->naluCount = 0; + memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); + + for (; ;) { + naluOffset = findNalUnitOffset(stream, offset, left); + if (naluOffset == -1) { + break; + } + + if (naluStream->naluCount == 0) { + naluStream->naluOffset = naluOffset; + } else { + naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; + ret = copyNaluHeader(stream, naluStream); + if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { + LOGW("copyNaluHeader returned %d", ret); + return ret; + } + // starting position for next NALU + naluStream->naluOffset = naluOffset; + } + + if (ret == DECODE_SUCCESS) { + naluStream->naluCount++; + } + + // update next lookup position and length + offset = naluOffset + 1; // skip one byte of NAL unit type + left = length - offset; + } + + if (naluStream->naluCount > 0) { + naluStream->naluLen = length - naluStream->naluOffset; + memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); + // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED + copyNaluHeader(stream, naluStream); + return DECODE_SUCCESS; + } + + LOGW("number of valid NALU is 0!"); + return DECODE_SUCCESS; +} + diff --git a/videodecoder/VideoDecoderAVCSecure.h b/videodecoder/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..af5ae44 --- /dev/null +++ b/videodecoder/VideoDecoderAVCSecure.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + + // data in the decoded buffer is all encrypted. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + enum { + MAX_SLICE_HEADER_SIZE = 30, + MAX_NALU_HEADER_BUFFER = 8192, + MAX_NALU_NUMBER = 400, // > 4096/12 + }; + + // Information of Network Abstraction Layer Unit + struct NaluInfo { + int32_t naluOffset; // offset of NAL unit in the firewalled buffer + int32_t naluLen; // length of NAL unit + int32_t naluHeaderLen; // length of NAL unit header + }; + + struct NaluMetadata { + NaluInfo *naluInfo; + int32_t naluNumber; // number of NAL units + }; + + struct NaluByteStream { + int32_t naluOffset; + int32_t naluLen; + int32_t streamPos; + uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData + int32_t naluCount; + }; + + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); + Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); + Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); + +private: + NaluMetadata mMetadata; + NaluByteStream mByteStream; + uint8_t *mNaluHeaderBuffer; + uint8_t *mInputBuffer; +}; + + + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 3e816b8..29c7cec 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -78,7 +78,7 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo)); memset(&mConfigBuffer, 0, sizeof(mConfigBuffer)); - for (int i =0; i < MAX_GRAPHIC_NUM; i++) { + for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { mSignalBufferPre[i] = NULL; } pthread_mutex_init(&mLock, NULL); @@ -471,7 +471,7 @@ bool VideoDecoderBase::checkBufferAvail(void) { if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) == 0) { return true; } - for (int i = 0; i < MAX_GRAPHIC_NUM; i++) { + for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { if (mSignalBufferPre[i] != NULL) { return true; } @@ -1153,7 +1153,7 @@ void VideoDecoderBase::initSurfaceBuffer(bool reset) { mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL; mSurfaceBuffers[i].renderBuffer.renderDone = true; } - mSurfaceBuffers[i].renderBuffer.acquirePos = i; + mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i; } if (useGraphicBuffer && reset) { @@ -1172,7 +1172,7 @@ Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) { if (!mInitialized) { mSignalBufferPre[mSignalBufferSize++] = graphichandler; VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize); - if (mSignalBufferSize > MAX_GRAPHIC_NUM) + if (mSignalBufferSize > MAX_GRAPHIC_BUFFER_NUM) return DECODE_INVALID_DATA; } else { if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index c80367a..ece3a44 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -138,7 +138,7 @@ private: int32_t mNextOutputPOC; // Picture order count of next output _vbp_parser_type mParserType; void *mParserHandle; - void *mSignalBufferPre[MAX_GRAPHIC_NUM]; + void *mSignalBufferPre[MAX_GRAPHIC_BUFFER_NUM]; uint32 mSignalBufferSize; protected: diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index dc72a6f..54ce618 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -28,7 +28,6 @@ #include #include - // format specific data, for future extension. struct VideoExtensionBuffer { int32_t extType; @@ -100,6 +99,9 @@ typedef enum { // indicate whether it is a sync frame in container IS_SYNC_FRAME = 0x4000, + // indicate whether video decoder buffer contains secure data + IS_SECURE_DATA = 0x8000, + } VIDEO_BUFFER_FLAG; struct VideoDecodeBuffer { @@ -111,7 +113,7 @@ struct VideoDecodeBuffer { }; -#define MAX_GRAPHIC_NUM 16 + 1 + 11 // max DPB + 1 + AVC_EXTRA_NUM +#define MAX_GRAPHIC_BUFFER_NUM (16 + 1 + 11) // max DPB + 1 + AVC_EXTRA_NUM struct VideoConfigBuffer { uint8_t *data; @@ -122,7 +124,7 @@ struct VideoConfigBuffer { int32_t surfaceNumber; VAProfile profile; uint32_t flag; - void *graphicBufferHandler[ MAX_GRAPHIC_NUM ]; + void *graphicBufferHandler[MAX_GRAPHIC_BUFFER_NUM]; uint32_t graphicBufferStride; uint32_t graphicBufferColorFormat; uint32_t graphicBufferWidth; @@ -140,7 +142,7 @@ struct VideoRenderBuffer { mutable volatile bool renderDone; // indicated whether frame is rendered, this must be set to false by the client of this library once // surface is rendered. Not setting this flag will lead to DECODE_NO_SURFACE error. void * graphicBufferHandle; - int32_t acquirePos; //the acquirepos in graphichandle array + int32_t graphicBufferIndex; //the index in graphichandle array uint32_t flag; mutable volatile bool driverRenderDone; VideoFrameRawData *rawData; diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index b3e3039..4e6b1b4 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -26,6 +26,7 @@ #include "VideoDecoderMPEG4.h" #include "VideoDecoderAVC.h" #include "VideoDecoderPAVC.h" +#include "VideoDecoderAVCSecure.h" #include "VideoDecoderHost.h" #include "VideoDecoderTrace.h" #include @@ -53,6 +54,9 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { } else if (strcasecmp(mimeType, "video/pavc") == 0) { VideoDecoderAVC *p = new VideoDecoderPAVC(mimeType); return (IVideoDecoder *)p; + } else if (strcasecmp(mimeType, "video/avc-secure") == 0) { + VideoDecoderAVC *p = new VideoDecoderAVCSecure(mimeType); + return (IVideoDecoder *)p; } else { ETRACE("Unknown mime type: %s", mimeType); } -- cgit v1.2.3 From c0b3c18befef72a81c79b871a4f800c0a6430e74 Mon Sep 17 00:00:00 2001 From: wgu11 Date: Wed, 2 May 2012 14:27:46 +0800 Subject: libmix: add the parameters check to avoid mediaserver crash BZ: 29342 When the MPEG4 parser gets the invaild parameters, add the check and return error to OMX to avoid mediaserver crash. Change-Id: I3c0f71b036bd8c630e245366a5fe1d6390695317 Signed-off-by: wgu11 Reviewed-on: http://android.intel.com:8080/46945 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderMPEG4.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 050ab9a..2ddc5ff 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -86,6 +86,14 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { (void**)&data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); + // When the MPEG4 parser gets the invaild parameters, add the check + // and return error to OMX to avoid mediaserver crash. + if (data && data->picture_data && + (data->picture_data->picture_param.vop_width == 0 + || data->picture_data->picture_param.vop_height == 0)) { + return DECODE_FAIL; + } + if (!mVAStarted) { status = startVA(data); CHECK_STATUS("startVA"); -- cgit v1.2.3 From 2725fd2d4628331d10d9139b56f967b63b1e252d Mon Sep 17 00:00:00 2001 From: Manjunath Date: Thu, 10 May 2012 18:56:57 +0530 Subject: [Redridge] Support for WMA9 audio decoder BZ: 32562 This patch contains changes related to AsfStreamParser to enable seek for wma/wmv files. Signed-off-by: Manjunath Change-Id: I7359a77289844261e18da9c4fb9e7ea2bef47689 Reviewed-on: http://android.intel.com:8080/48247 Reviewed-by: Sidorov, Sergey Reviewed-by: B S, ManjunathX Reviewed-by: Shashkina, Julia Reviewed-by: Kandasamy, Muthukumar Reviewed-by: Krishna, Samaga Reviewed-by: Gupta, ArvindX K Reviewed-by: Hibare, PramodX Tested-by: Hibare, PramodX Reviewed-by: Sikkandar D, Madar Reviewed-by: buildbot Tested-by: buildbot --- asfparser/AsfStreamParser.cpp | 57 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 53 insertions(+), 4 deletions(-) diff --git a/asfparser/AsfStreamParser.cpp b/asfparser/AsfStreamParser.cpp index 647b2e8..f7e611c 100644 --- a/asfparser/AsfStreamParser.cpp +++ b/asfparser/AsfStreamParser.cpp @@ -37,7 +37,7 @@ AsfStreamParser::AsfStreamParser(void) mHeaderParsed(false) { mHeaderParser = new AsfHeaderParser; mDataParser = new AsfDataParser; - mSimpleIndexParser = new AsfSimpleIndexParser; + mSimpleIndexParser = NULL; } AsfStreamParser::~AsfStreamParser(void) { @@ -160,10 +160,27 @@ int AsfStreamParser::parseSimpleIndexObject(uint8_t *buffer, uint32_t size) { return ASF_PARSER_FAILED; } - return mSimpleIndexParser->parse(buffer, size); + if (mSimpleIndexParser) { + delete mSimpleIndexParser; + mSimpleIndexParser = NULL; + } + + mSimpleIndexParser = new AsfSimpleIndexParser; + + if (mSimpleIndexParser == NULL) return ASF_PARSER_FAILED; + + if (ASF_PARSER_SUCCESS != mSimpleIndexParser->parse(buffer, size)) { + delete mSimpleIndexParser; + mSimpleIndexParser = NULL; + return ASF_PARSER_FAILED; + } + + return ASF_PARSER_SUCCESS; } AsfSimpleIndexInfo* AsfStreamParser::getIndexInfo() const { + if (!mSimpleIndexParser) return NULL; + return mSimpleIndexParser->getIndexInfo(); } @@ -180,9 +197,41 @@ int AsfStreamParser::seek( return ASF_PARSER_FAILED; } - seekTime += mHeaderParser->getPreroll()*ASF_SCALE_MS_TO_100NANOSEC; //add preroll start time + if (mSimpleIndexParser) { + seekTime += mHeaderParser->getPreroll()*ASF_SCALE_MS_TO_100NANOSEC; //add preroll start time + return mSimpleIndexParser->seek(seekTime, nextSync, packetNumber, targetTime); + } + else { + // no index object, need to seek using average bitrate method + + if (mHeaderParser->hasVideo()){ + return ASF_PARSER_FAILED; + } + + if (!mHeaderParser->hasAudio()) { + return ASF_PARSER_FAILED; + } + + int totalByteRate=0; + AsfAudioStreamInfo* audioInfo = mHeaderParser->getAudioInfo(); + while (audioInfo != NULL) { + totalByteRate += audioInfo->avgByteRate; + audioInfo = audioInfo->next; + } + + if (totalByteRate == 0) { + return ASF_PARSER_FAILED; + } + + uint32_t packetSize = mHeaderParser->getDataPacketSize(); + if (packetSize <= 0) { + return ASF_PARSER_FAILED; + } - return mSimpleIndexParser->seek(seekTime, nextSync, packetNumber, targetTime); + packetNumber = seekTime/10000000 * totalByteRate / packetSize; + targetTime = seekTime; + return ASF_PARSER_SUCCESS; + } } uint32_t AsfStreamParser::getMaxObjectSize() { -- cgit v1.2.3 From 9d3f84dfa18e028cf375fe21cbc0993abc9bbdcd Mon Sep 17 00:00:00 2001 From: msun9 Date: Sat, 12 May 2012 00:39:38 +0800 Subject: libmix: mix_vbp - remove the handler of pInfo->img.recovery_point_found equal to 0 BZ: 34362 If a clip has only one IDR frame, it would not parse frame after resume and playback from beginning, because wl_err_curr would be set to VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE. Change-Id: I503edf18398d204ac0edfcb89ce31776a9ce787b Signed-off-by: msun9 Reviewed-on: http://android.intel.com:8080/48305 Reviewed-by: Qiu, Junhai Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index 5936014..4cc58d3 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -113,12 +113,14 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) +#ifndef VBP if (pInfo->img.recovery_point_found == 0) { pInfo->img.structure = FRAME; pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); break; } +#endif //////////////////////////////////////////////////////////////////////////// // Step 2: Parsing slice header -- cgit v1.2.3 From 3f07377d11403431ac9c721bd306479c0117dfd8 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Mon, 14 May 2012 05:35:23 -0400 Subject: libmix: avoid passing invalid surface to video driver BZ: 36120 Check surface status at MW to avoid passing invalid surface to video driver Signed-off-by: Weian Chen Change-Id: I37dfe6647fe728cf6a09c4981a462a90e80462a0 Reviewed-on: http://android.intel.com:8080/48613 Reviewed-by: Chen, Weian Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 29c7cec..fe43045 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1196,8 +1196,13 @@ void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) { VASurfaceStatus surfStat = VASurfaceReady; VAStatus vaStat = VA_STATUS_SUCCESS; + if (!surface) { + LOGW("SurfaceBuffer not ready yet"); + return; + } surface->renderBuffer.driverRenderDone = true; - if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { + if (surface->renderBuffer.surface != VA_INVALID_SURFACE && + (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { vaStat = vaQuerySurfaceStatus(mVADisplay, surface->renderBuffer.surface, &surfStat); -- cgit v1.2.3 From 9b0d65113ab1ea0ddbd8ce98788ee60311b8d046 Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Thu, 17 May 2012 09:19:19 -0700 Subject: Ensure video decoding completely stops when pipeline is being flushed. BZ: 35857 When pipeline is flushed, video decoding needs to stop completely. Change-Id: I15a418dfa4048e24a1db405af1f569f64da05d18 Signed-off-by: Andy Qiu Reviewed-on: http://android.intel.com:8080/49253 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index fe43045..36d7d04 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -670,6 +670,7 @@ Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { if (dropFrame) { // we are asked to drop this decoded picture VTRACE("Frame dropped in endDecodingFrame"); + vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface); releaseSurfaceBuffer(); goto exit; } -- cgit v1.2.3 From 196b6e20139b9966f4a2d95f8bdf086cb4d19297 Mon Sep 17 00:00:00 2001 From: Marie-Celine Dubut Date: Tue, 29 May 2012 12:53:30 +0200 Subject: Revert "Ensure video decoding completely stops when pipeline is being flushed." BZ: 35857 This reverts commit bdbe4bf7af2d7498f5209f98e0c0ec13553d6cba. This reverts patch http://android.intel.com:8080/#/c/49253/ Change-Id: Ife1bc668c5805c93a07cd82d3abf60072289861d Signed-off-by: Marie-Celine Dubut Reviewed-on: http://android.intel.com:8080/50509 --- videodecoder/VideoDecoderBase.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 36d7d04..fe43045 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -670,7 +670,6 @@ Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { if (dropFrame) { // we are asked to drop this decoded picture VTRACE("Frame dropped in endDecodingFrame"); - vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface); releaseSurfaceBuffer(); goto exit; } -- cgit v1.2.3 From ea0f3b778b051679298f9d03e6fc41ca37ff2bea Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Fri, 25 May 2012 10:52:59 -0400 Subject: [PORT FROM R3] libmix: set timestamp to video driver BZ: 37057 settimestamp to video driver through private API Signed-off-by: Weian Chen Change-Id: I096c7d91ce8f9fdebaa47894624866dc25bfa2f8 Orig-Change-Id: I4ae3d6aa57cdc9c77f264f99cd66cd7a32614db6 Reviewed-on: http://android.intel.com:8080/50119 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index fe43045..e5027b8 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -39,7 +39,6 @@ // POC: 4P, 8P, 10P, 6B and mNextOutputPOC = 5 #define OUTPUT_WINDOW_SIZE 8 - VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) : mDisplay(NULL), mVADisplay(NULL), @@ -210,6 +209,7 @@ const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) { } const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { + VAStatus vaStatus; if (mVAStarted == false) { return NULL; } @@ -232,6 +232,7 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { if (mOutputHead == NULL) { mOutputTail = NULL; } + vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); return &(outputByPos->renderBuffer); } @@ -277,6 +278,7 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { } } //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); + vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp); return &(output->renderBuffer); } -- cgit v1.2.3 From 34d153ae59c521639457fd12cb8b49e390674779 Mon Sep 17 00:00:00 2001 From: tianmi Date: Fri, 25 May 2012 17:09:54 +0800 Subject: [PORT FROM R3] asfparser: Add sanity check when entering getMaxObjectSize() BZ: 38144 Add sanity check to avoid crash. Change-Id: I9bdb0143b5f37ec02ca73b3a5f3b9b3fe34843f3 Signed-off-by: tianmi Reviewed-on: http://android.intel.com:8080/50173 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- asfparser/AsfStreamParser.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/asfparser/AsfStreamParser.cpp b/asfparser/AsfStreamParser.cpp index f7e611c..f2b45f4 100644 --- a/asfparser/AsfStreamParser.cpp +++ b/asfparser/AsfStreamParser.cpp @@ -235,6 +235,7 @@ int AsfStreamParser::seek( } uint32_t AsfStreamParser::getMaxObjectSize() { + if (!mSimpleIndexParser) return NULL; return mSimpleIndexParser->getMaximumPacketCount() * mDataPacketSize; } -- cgit v1.2.3 From d4e5ba8dcfd6b1f8e3a5b4dfda68799e5a2924c5 Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Thu, 17 May 2012 09:19:19 -0700 Subject: [PORT FROM R3] Ensure video decoding completely stops when pipeline is being flushed. BZ: 35857 When pipeline is flushed, video decoding needs to stop completely. Change-Id: I8bb596bafdfe58858534921708523daf3a9f006c Signed-off-by: Andy Qiu Reviewed-on: http://android.intel.com:8080/50574 Reviewed-by: Poornachandran, Rajesh Reviewed-by: Saffores, Ryan D Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index e5027b8..ba38e11 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -672,6 +672,7 @@ Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { if (dropFrame) { // we are asked to drop this decoded picture VTRACE("Frame dropped in endDecodingFrame"); + vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface); releaseSurfaceBuffer(); goto exit; } -- cgit v1.2.3 From 973d5c3158dbeb2c5c178c2e0e391f4ac2f699ae Mon Sep 17 00:00:00 2001 From: fxiao4X Date: Wed, 30 May 2012 16:04:55 +0800 Subject: [PORT FROM R3]Avoid array out of boundary and make sure the lock is unlock before function return BZ: 38732 Refine these is signalRenderDone function. Change-Id: I22cd14471071537821a9ea4f030358e6619646a5 Signed-off-by: fxiao4X Reviewed-on: http://android.intel.com:8080/50612 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index ba38e11..10a456f 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1173,12 +1173,15 @@ Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) { pthread_mutex_lock(&mLock); int i = 0; if (!mInitialized) { + if (mSignalBufferSize >= MAX_GRAPHIC_BUFFER_NUM) { + pthread_mutex_unlock(&mLock); + return DECODE_INVALID_DATA; + } mSignalBufferPre[mSignalBufferSize++] = graphichandler; VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize); - if (mSignalBufferSize > MAX_GRAPHIC_BUFFER_NUM) - return DECODE_INVALID_DATA; } else { if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { + pthread_mutex_unlock(&mLock); return DECODE_SUCCESS; } for (i = 0; i < mNumSurfaces; i++) { -- cgit v1.2.3 From 6822e8139209dc169bf2f62824d421292fa09f63 Mon Sep 17 00:00:00 2001 From: hding3 Date: Mon, 11 Jun 2012 21:18:05 +0800 Subject: [video-libmix] porting MRFL branch to ICS mainline BZ: 35390 porting MRFL branch to ICS mainline. Change-Id: I1d250e2ebc62ac79dd3ab770d9f962e2842d8598 Signed-off-by: hding3 Reviewed-on: http://android.intel.com:8080/52340 Reviewed-by: buildbot Tested-by: buildbot --- mix_video/src/mixvideo.cpp | 4 +- mix_video/src/mixvideoformat_h264.cpp | 6 +- mix_video/src/mixvideoformat_mp42.cpp | 4 +- mix_video/src/mixvideoformat_vc1.cpp | 6 +- mix_video/src/mixvideoformatenc_h263.cpp | 8 +- mix_video/src/mixvideoformatenc_h264.cpp | 69 +++++++--- mix_video/src/mixvideoformatenc_h264.h | 1 + mix_video/src/mixvideoformatenc_mpeg4.cpp | 8 +- mix_video/src/mixvideoformatenc_preview.cpp | 8 +- videodecoder/VideoDecoderBase.cpp | 53 +++----- videodecoder/VideoDecoderBase.h | 4 +- videoencoder/Android.mk | 2 +- videoencoder/VideoEncoderAVC.cpp | 86 +++++++++--- videoencoder/VideoEncoderBase.cpp | 202 +++++++++++++++++----------- videoencoder/VideoEncoderBase.h | 3 + 15 files changed, 287 insertions(+), 177 deletions(-) diff --git a/mix_video/src/mixvideo.cpp b/mix_video/src/mixvideo.cpp index c240350..d21c5c8 100644 --- a/mix_video/src/mixvideo.cpp +++ b/mix_video/src/mixvideo.cpp @@ -2041,12 +2041,12 @@ MIX_RESULT mix_video_get_new_userptr_for_surface_buffer_default (MixVideo * mix, LOG_E("Usr created Surface count is exceed max number!\n"); goto cleanup; } - +#if 0 //jgl va_status = vaCreateSurfacesForUserPtr ( priv->va_display, width, height, VA_RT_FORMAT_YUV420, 1, &surface, expected_size, VA_FOURCC_NV12, width, width, width, 0, width * height, width * height); - +#endif if (va_status != VA_STATUS_SUCCESS) { LOG_E("Failed vaCreateSurfaces\n"); diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp index 7a43869..8aa87fe 100755 --- a/mix_video/src/mixvideoformat_h264.cpp +++ b/mix_video/src/mixvideoformat_h264.cpp @@ -594,11 +594,11 @@ MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { vret = vaCreateSurfaces( this->va_display, + VA_RT_FORMAT_YUV420, this->picture_width, this->picture_height, - VA_RT_FORMAT_YUV420, - this->va_num_surfaces, - this->va_surfaces, NULL, 0); + this->va_surfaces , + this->va_num_surfaces, NULL, 0); if (vret != VA_STATUS_SUCCESS) { ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp index 627401e..8a90e30 100644 --- a/mix_video/src/mixvideoformat_mp42.cpp +++ b/mix_video/src/mixvideoformat_mp42.cpp @@ -178,11 +178,11 @@ MIX_RESULT MixVideoFormat_MP42::_initialize_va(vbp_data_mp42 *data) { vret = vaCreateSurfaces( this->va_display, + VA_RT_FORMAT_YUV420, this->picture_width, this->picture_height, - VA_RT_FORMAT_YUV420, + this->va_surfaces , this->va_num_surfaces, - this->va_surfaces, NULL, 0); if (vret != VA_STATUS_SUCCESS) { diff --git a/mix_video/src/mixvideoformat_vc1.cpp b/mix_video/src/mixvideoformat_vc1.cpp index 82b5a78..eeb5bf9 100644 --- a/mix_video/src/mixvideoformat_vc1.cpp +++ b/mix_video/src/mixvideoformat_vc1.cpp @@ -257,11 +257,11 @@ MIX_RESULT MixVideoFormat_VC1::_initialize_va(vbp_data_vc1 *data) { vret = vaCreateSurfaces( this->va_display, + VA_RT_FORMAT_YUV420, this->picture_width, this->picture_height, - VA_RT_FORMAT_YUV420, - this->va_num_surfaces, - this->va_surfaces, NULL , 0); + this->va_surfaces , + this->va_num_surfaces, NULL , 0); if (vret != VA_STATUS_SUCCESS) { ret = MIX_RESULT_FAIL; diff --git a/mix_video/src/mixvideoformatenc_h263.cpp b/mix_video/src/mixvideoformatenc_h263.cpp index a2f9854..7ba2c05 100644 --- a/mix_video/src/mixvideoformatenc_h263.cpp +++ b/mix_video/src/mixvideoformatenc_h263.cpp @@ -473,9 +473,9 @@ MIX_RESULT MixVideoFormatEnc_H263::Initialize( LOG_V( "vaCreateSurfaces\n"); - va_status = vaCreateSurfaces(va_display, this->picture_width, - this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces, NULL, 0); + va_status = vaCreateSurfaces(va_display, this->va_format, + this->picture_width, this->picture_height, + surfaces, normal_surfaces_cnt, NULL, 0); if (va_status != VA_STATUS_SUCCESS) { @@ -503,6 +503,7 @@ MIX_RESULT MixVideoFormatEnc_H263::Initialize( switch (this->buffer_mode) { case MIX_BUFFER_UPSTREAM_ALLOC_CI: { + #if 0 for (index = 0; index < this->shared_surfaces_cnt; index++) { va_status = vaCreateSurfaceFromCIFrame(va_display, @@ -517,6 +518,7 @@ MIX_RESULT MixVideoFormatEnc_H263::Initialize( this->surfaces[index] = this->shared_surfaces[index]; } + #endif } break; case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: diff --git a/mix_video/src/mixvideoformatenc_h264.cpp b/mix_video/src/mixvideoformatenc_h264.cpp index f227c8d..34b30ba 100644 --- a/mix_video/src/mixvideoformatenc_h264.cpp +++ b/mix_video/src/mixvideoformatenc_h264.cpp @@ -14,6 +14,7 @@ #include "mixvideoformatenc_h264.h" #include "mixvideoconfigparamsenc_h264.h" #include +#include #undef SHOW_SRC @@ -546,9 +547,9 @@ MixVideoFormatEnc_H264::Initialize( LOG_V( "vaCreateSurfaces\n"); - va_status = vaCreateSurfaces(va_display, this->picture_width, - this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces, NULL, 0); + va_status = vaCreateSurfaces(va_display, this->va_format, + this->picture_width, this->picture_height, + surfaces,normal_surfaces_cnt, NULL, 0); if (va_status != VA_STATUS_SUCCESS) { @@ -579,8 +580,8 @@ MixVideoFormatEnc_H264::Initialize( switch (this->buffer_mode) { case MIX_BUFFER_UPSTREAM_ALLOC_CI: { +#if 0 for (index = 0; index < this->shared_surfaces_cnt; index++) { - va_status = vaCreateSurfaceFromCIFrame(va_display, (ulong) (ci_info->ci_frame_id[index]), &this->shared_surfaces[index]); @@ -593,7 +594,9 @@ MixVideoFormatEnc_H264::Initialize( this->surfaces[index] = this->shared_surfaces[index]; } +#endif } + break; case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: /*To be develped*/ @@ -2668,6 +2671,8 @@ MIX_RESULT MixVideoFormatEnc_H264::_send_seq_params () { VAStatus va_status; VAEncSequenceParameterBufferH264 h264_seq_param; + VAEncMiscParameterRateControl rc_misc_param; + VAEncMiscParameterFrameRate frame_rate_param; int level; LOG_V( "Begin\n\n"); @@ -2685,13 +2690,13 @@ MixVideoFormatEnc_H264::_send_seq_params () { h264_seq_param.level_idc = level; h264_seq_param.bits_per_second = this->bitrate; - h264_seq_param.frame_rate = + frame_rate_param.framerate = (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; - h264_seq_param.initial_qp = this->initial_qp; - h264_seq_param.min_qp = this->min_qp; - h264_seq_param.basic_unit_size = this->basic_unit_size; //for rate control usage + rc_misc_param.initial_qp = this->initial_qp; + rc_misc_param.min_qp = this->min_qp; + rc_misc_param.basic_unit_size = this->basic_unit_size; //for rate control usage h264_seq_param.intra_period = this->intra_period; - h264_seq_param.vui_flag = this->vui_flag; + h264_seq_param.vui_parameters_present_flag = this->vui_flag; //h264_seq_param.vui_flag = 248; //h264_seq_param.seq_parameter_set_id = 176; @@ -2716,15 +2721,15 @@ MixVideoFormatEnc_H264::_send_seq_params () { LOG_I( "bitrate = %d\n", h264_seq_param.bits_per_second); LOG_I( "frame_rate = %d\n", - h264_seq_param.frame_rate); + frame_rate_param.frame_rate); LOG_I( "initial_qp = %d\n", - h264_seq_param.initial_qp); + rc_misc_param.initial_qp); LOG_I( "min_qp = %d\n", - h264_seq_param.min_qp); + rc_misc_param.min_qp); LOG_I( "basic_unit_size = %d\n", - h264_seq_param.basic_unit_size); + rc_misc_param.basic_unit_size); LOG_I( "vui_flag = %d\n\n", - h264_seq_param.vui_flag); + h264_seq_param.vui_parameters_present_flag); va_status = vaCreateBuffer(this->va_display, this->va_context, VAEncSequenceParameterBufferType, @@ -2738,6 +2743,18 @@ MixVideoFormatEnc_H264::_send_seq_params () { return MIX_RESULT_FAIL; } + va_status = vaCreateBuffer(this->va_display, this->va_context, + VAEncMiscParameterBufferType, + sizeof(rc_misc_param), + 1, &h264_seq_param, + &this->rc_param_buf); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaCreateBuffer\n"); + return MIX_RESULT_FAIL; + } + va_status = vaRenderPicture(this->va_display, this->va_context, &this->seq_param_buf, 1); if (va_status != VA_STATUS_SUCCESS) @@ -2746,7 +2763,14 @@ MixVideoFormatEnc_H264::_send_seq_params () { "Failed to vaRenderPicture\n"); return MIX_RESULT_FAIL; } - + va_status = vaRenderPicture(this->va_display, this->va_context, + &this->rc_param_buf, 1); + if (va_status != VA_STATUS_SUCCESS) + { + LOG_E( + "Failed to vaRenderPicture\n"); + return MIX_RESULT_FAIL; + } return MIX_RESULT_SUCCESS; } @@ -2761,29 +2785,30 @@ MixVideoFormatEnc_H264::_send_picture_parameter () { /*set picture params for HW*/ - h264_pic_param.reference_picture = this->ref_frame->frame_id; - h264_pic_param.reconstructed_picture = this->rec_frame->frame_id; + h264_pic_param.ReferenceFrames[0].picture_id= this->ref_frame->frame_id; + h264_pic_param.CurrPic.picture_id= this->rec_frame->frame_id; h264_pic_param.coded_buf = this->coded_buf[this->coded_buf_index]; - h264_pic_param.picture_width = this->picture_width; - h264_pic_param.picture_height = this->picture_height; + //h264_pic_param.picture_width = this->picture_width; + //h264_pic_param.picture_height = this->picture_height; h264_pic_param.last_picture = 0; LOG_V( "======h264 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", - h264_pic_param.reference_picture); + h264_pic_param.ReferenceFrames[0].picture_id); LOG_I( "reconstructed_picture = 0x%08x\n", - h264_pic_param.reconstructed_picture); + h264_pic_param.CurrPic.picture_id); LOG_I( "coded_buf_index = %d\n", this->coded_buf_index); LOG_I( "coded_buf = 0x%08x\n", h264_pic_param.coded_buf); + /* LOG_I( "picture_width = %d\n", h264_pic_param.picture_width); LOG_I( "picture_height = %d\n\n", h264_pic_param.picture_height); - + */ va_status = vaCreateBuffer(this->va_display, this->va_context, VAEncPictureParameterBufferType, sizeof(h264_pic_param), diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h index 29871ad..c5e4412 100644 --- a/mix_video/src/mixvideoformatenc_h264.h +++ b/mix_video/src/mixvideoformatenc_h264.h @@ -72,6 +72,7 @@ public: VABufferID coded_buf[2]; VABufferID last_coded_buf; VABufferID seq_param_buf; + VABufferID rc_param_buf; VABufferID pic_param_buf; VABufferID slice_param_buf; //VASurfaceID * ci_shared_surfaces; diff --git a/mix_video/src/mixvideoformatenc_mpeg4.cpp b/mix_video/src/mixvideoformatenc_mpeg4.cpp index 4dcf9c6..a608ef5 100644 --- a/mix_video/src/mixvideoformatenc_mpeg4.cpp +++ b/mix_video/src/mixvideoformatenc_mpeg4.cpp @@ -431,9 +431,9 @@ MIX_RESULT MixVideoFormatEnc_MPEG4::Initialize( LOG_V( "vaCreateSurfaces\n"); va_status = vaCreateSurfaces( - va_display, this->picture_width, - this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces, NULL, 0); + va_display, this->va_format, + this->picture_width, this->picture_height, + surfaces, normal_surfaces_cnt, NULL, 0); if (va_status != VA_STATUS_SUCCESS) { LOG_E("Failed vaCreateSurfaces\n"); @@ -453,6 +453,7 @@ MIX_RESULT MixVideoFormatEnc_MPEG4::Initialize( switch (this->buffer_mode) { case MIX_BUFFER_UPSTREAM_ALLOC_CI: { + #if 0 for (index = 0; index < this->shared_surfaces_cnt; index++) { va_status = vaCreateSurfaceFromCIFrame( va_display, @@ -465,6 +466,7 @@ MIX_RESULT MixVideoFormatEnc_MPEG4::Initialize( } this->surfaces[index] = this->shared_surfaces[index]; } + #endif } break; case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: diff --git a/mix_video/src/mixvideoformatenc_preview.cpp b/mix_video/src/mixvideoformatenc_preview.cpp index a1cdcbb..e1b7be9 100644 --- a/mix_video/src/mixvideoformatenc_preview.cpp +++ b/mix_video/src/mixvideoformatenc_preview.cpp @@ -361,9 +361,9 @@ MIX_RESULT MixVideoFormatEnc_Preview::Initialize( LOG_V( "vaCreateSurfaces\n"); va_status = vaCreateSurfaces( - va_display, this->picture_width, - this->picture_height, this->va_format, - normal_surfaces_cnt, surfaces, NULL, 0 ); + va_display, this->va_format, + this->picture_width, this->picture_height, + surfaces, normal_surfaces_cnt, NULL, 0 ); if (va_status != VA_STATUS_SUCCESS) { LOG_E("Failed vaCreateSurfaces\n"); @@ -384,6 +384,7 @@ MIX_RESULT MixVideoFormatEnc_Preview::Initialize( switch (this->buffer_mode) { case MIX_BUFFER_UPSTREAM_ALLOC_CI: { + #if 0 for (index = 0; index < this->shared_surfaces_cnt; index++) { va_status = vaCreateSurfaceFromCIFrame( va_display, (ulong) (ci_info->ci_frame_id[index]), @@ -395,6 +396,7 @@ MIX_RESULT MixVideoFormatEnc_Preview::Initialize( } this->surfaces[index] = this->shared_surfaces[index]; } + #endif } break; case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 10a456f..69912da 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -66,7 +66,6 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mOutputTail(NULL), mSurfaces(NULL), mVASurfaceAttrib(NULL), - mVAExternalMemoryBuffers(NULL), mSurfaceUserPtr(NULL), mSurfaceAcquirePos(0), mNextOutputPOC(MINIMUM_POC), @@ -776,49 +775,43 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { WTRACE("Surface is protected."); } if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { - mVASurfaceAttrib = new VASurfaceAttrib; + mVASurfaceAttrib = new VASurfaceAttributeTPI; if (mVASurfaceAttrib == NULL) { return DECODE_MEMORY_FAIL; } - mVAExternalMemoryBuffers = new VAExternalMemoryBuffers; - if (mVAExternalMemoryBuffers == NULL) { - return DECODE_MEMORY_FAIL; - } - mVAExternalMemoryBuffers->buffers= (unsigned int *)malloc(sizeof(unsigned int)*mNumSurfaces); - if (mVAExternalMemoryBuffers->buffers == NULL) { + + mVASurfaceAttrib->buffers= (unsigned int *)malloc(sizeof(unsigned int)*mNumSurfaces); + if (mVASurfaceAttrib->buffers == NULL) { return DECODE_MEMORY_FAIL; } - mVAExternalMemoryBuffers->count = mNumSurfaces; - mVAExternalMemoryBuffers->luma_stride = mConfigBuffer.graphicBufferStride; - mVAExternalMemoryBuffers->pixel_format = mConfigBuffer.graphicBufferColorFormat; - mVAExternalMemoryBuffers->native_window = mConfigBuffer.nativeWindow; - mVAExternalMemoryBuffers->width = mVideoFormatInfo.surfaceWidth; - mVAExternalMemoryBuffers->height = mVideoFormatInfo.surfaceHeight; - mVAExternalMemoryBuffers->type = VAExternalMemoryAndroidGrallocBuffer; + mVASurfaceAttrib->count = mNumSurfaces; + mVASurfaceAttrib->luma_stride = mConfigBuffer.graphicBufferStride; + mVASurfaceAttrib->pixel_format = mConfigBuffer.graphicBufferColorFormat; + mVASurfaceAttrib->width = mVideoFormatInfo.width; + mVASurfaceAttrib->height = mVideoFormatInfo.height; + mVASurfaceAttrib->type = VAExternalMemoryAndroidGrallocBuffer; + mVASurfaceAttrib->reserved[0] = (unsigned int)mConfigBuffer.nativeWindow; + for (int i = 0; i < mNumSurfaces; i++) { - mVAExternalMemoryBuffers->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; + mVASurfaceAttrib->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; } - mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_SETTABLE; - mVASurfaceAttrib->type = VASurfaceAttribNativeHandle; - mVASurfaceAttrib->value.type = VAGenericValueTypePointer; - mVASurfaceAttrib->value.value.p_val = (void *)mVAExternalMemoryBuffers; - vaStatus = vaCreateSurfaces( + + vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, mVideoFormatInfo.surfaceWidth, mVideoFormatInfo.surfaceHeight, format, mNumSurfaces, mSurfaces, - mVASurfaceAttrib, - 1); + mVASurfaceAttrib); } else { vaStatus = vaCreateSurfaces( mVADisplay, + format, mVideoFormatInfo.width, mVideoFormatInfo.height, - format, - mNumSurfaces, mSurfaces, + mNumSurfaces, NULL, 0); mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; @@ -891,16 +884,6 @@ Decode_Status VideoDecoderBase::terminateVA(void) { mSurfaceBuffers = NULL; } - - if (mVAExternalMemoryBuffers) { - if (mVAExternalMemoryBuffers->buffers) { - free(mVAExternalMemoryBuffers->buffers); - mVAExternalMemoryBuffers->buffers = NULL; - } - delete mVAExternalMemoryBuffers; - mVAExternalMemoryBuffers = NULL; - } - if (mVASurfaceAttrib) { delete mVASurfaceAttrib; mVASurfaceAttrib = NULL; diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index ece3a44..8ebd067 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -26,6 +26,7 @@ #define VIDEO_DECODER_BASE_H_ #include +#include #include "VideoDecoderDefs.h" #include "VideoDecoderInterface.h" #include @@ -131,8 +132,7 @@ private: VideoSurfaceBuffer *mOutputHead; // head of output buffer list VideoSurfaceBuffer *mOutputTail; // tail of output buffer list VASurfaceID *mSurfaces; // surfaces array - VASurfaceAttrib *mVASurfaceAttrib; - VAExternalMemoryBuffers *mVAExternalMemoryBuffers; + VASurfaceAttributeTPI *mVASurfaceAttrib; uint8_t **mSurfaceUserPtr; // mapped user space pointer int32_t mSurfaceAcquirePos; // position of surface to start acquiring int32_t mNextOutputPOC; // Picture order count of next output diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 9ee2e42..0333bc9 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -1,7 +1,7 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) -VIDEO_ENC_LOG_ENABLE := false +VIDEO_ENC_LOG_ENABLE := true LOCAL_SRC_FILES := \ VideoEncoderBase.cpp \ diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 4297a8a..2ee25f8 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -11,6 +11,7 @@ #include "VideoEncoderLog.h" #include "VideoEncoderAVC.h" #include +#include VideoEncoderAVC::VideoEncoderAVC() :VideoEncoderBase() { @@ -306,7 +307,7 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( zeroByteCount = 0; *nalOffset = pos; - if (status & VA_CODED_BUF_STATUS_AVC_SINGLE_NALU) { + if (status & VA_CODED_BUF_STATUS_SINGLE_NALU) { *nalSize = bufSize - pos; return ENCODE_SUCCESS; } @@ -753,30 +754,63 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH264 avcSeqParams; + VAEncMiscParameterBuffer *miscEncRCParamBuf; + VAEncMiscParameterBuffer *miscEncFrameRateParamBuf; + VAEncMiscParameterRateControl *rcMiscParam; + VAEncMiscParameterFrameRate *framerateParam; int level; uint32_t frameRateNum = mComParams.frameRate.frameRateNum; uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; + const char* device_info; LOG_V( "Begin\n\n"); - + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), + 1, NULL, + &mRcParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + vaStatus = vaMapBuffer(mVADisplay, mRcParamBuf, (void **)&miscEncRCParamBuf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterFrameRate), + 1, NULL, + &mFrameRateParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + vaStatus = vaMapBuffer(mVADisplay, mFrameRateParamBuf, (void **)&miscEncFrameRateParamBuf); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + miscEncRCParamBuf->type = VAEncMiscParameterTypeRateControl; + rcMiscParam = (VAEncMiscParameterRateControl *)miscEncRCParamBuf->data; + miscEncFrameRateParamBuf->type = VAEncMiscParameterTypeFrameRate; + framerateParam = (VAEncMiscParameterFrameRate *)miscEncFrameRateParamBuf->data; // set up the sequence params for HW // avcSeqParams.level_idc = mLevel; avcSeqParams.intra_period = mComParams.intraPeriod; avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; + if((avcSeqParams.picture_width_in_mbs >=1920)|| (avcSeqParams.picture_height_in_mbs >=1080)) + { + device_info = vaQueryVendorString(mVADisplay); + if(strstr(device_info, "LEXINGTON")) + return ENCODE_INVALID_PARAMS; + } level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs); avcSeqParams.level_idc = level; avcSeqParams.bits_per_second = mComParams.rcParams.bitRate; - avcSeqParams.frame_rate = + framerateParam->framerate = (unsigned int) (frameRateNum + frameRateDenom /2 ) / frameRateDenom; - avcSeqParams.initial_qp = mComParams.rcParams.initQP; - avcSeqParams.min_qp = mComParams.rcParams.minQP; - avcSeqParams.basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage + rcMiscParam->initial_qp = mComParams.rcParams.initQP; + rcMiscParam->min_qp = mComParams.rcParams.minQP; + rcMiscParam->window_size = mComParams.rcParams.windowSize; + rcMiscParam->basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage avcSeqParams.intra_period = mComParams.intraPeriod; //avcSeqParams.vui_flag = 248; - avcSeqParams.vui_flag = mVideoParamsAVC.VUIFlag; + avcSeqParams.vui_parameters_present_flag = mVideoParamsAVC.VUIFlag; avcSeqParams.seq_parameter_set_id = 8; if (mVideoParamsAVC.crop.LeftOffset || mVideoParamsAVC.crop.RightOffset || @@ -789,8 +823,8 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { avcSeqParams.frame_crop_bottom_offset = mVideoParamsAVC.crop.BottomOffset; } else avcSeqParams.frame_cropping_flag = false; - if(avcSeqParams.vui_flag && (mVideoParamsAVC.SAR.SarWidth || mVideoParamsAVC.SAR.SarHeight)) { - avcSeqParams.aspect_ratio_info_present_flag = true; + if(avcSeqParams.vui_parameters_present_flag && (mVideoParamsAVC.SAR.SarWidth || mVideoParamsAVC.SAR.SarHeight)) { + avcSeqParams.vui_fields.bits.aspect_ratio_info_present_flag = true; avcSeqParams.aspect_ratio_idc = 0xff /* Extended_SAR */; avcSeqParams.sar_width = mVideoParamsAVC.SAR.SarWidth; avcSeqParams.sar_height = mVideoParamsAVC.SAR.SarHeight; @@ -807,18 +841,26 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { LOG_I( "picture_width_in_mbs = %d\n", avcSeqParams.picture_width_in_mbs); LOG_I( "picture_height_in_mbs = %d\n", avcSeqParams.picture_height_in_mbs); LOG_I( "bitrate = %d\n", avcSeqParams.bits_per_second); - LOG_I( "frame_rate = %d\n", avcSeqParams.frame_rate); - LOG_I( "initial_qp = %d\n", avcSeqParams.initial_qp); - LOG_I( "min_qp = %d\n", avcSeqParams.min_qp); - LOG_I( "basic_unit_size = %d\n", avcSeqParams.basic_unit_size); + LOG_I( "frame_rate = %d\n", framerateParam->framerate); + LOG_I( "initial_qp = %d\n", rcMiscParam->initial_qp); + LOG_I( "min_qp = %d\n", rcMiscParam->min_qp); + LOG_I( "basic_unit_size = %d\n", rcMiscParam->basic_unit_size); + vaStatus = vaUnmapBuffer(mVADisplay, mRcParamBuf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + vaStatus = vaUnmapBuffer(mVADisplay, mFrameRateParamBuf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAEncSequenceParameterBufferType, sizeof(avcSeqParams), 1, &avcSeqParams, &mSeqParamBuf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mFrameRateParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); @@ -833,20 +875,20 @@ Encode_Status VideoEncoderAVC::renderPictureParams() { LOG_V( "Begin\n\n"); // set picture params for HW - avcPicParams.reference_picture = mRefFrame->surface; - avcPicParams.reconstructed_picture = mRecFrame->surface; + avcPicParams.ReferenceFrames[0].picture_id= mRefFrame->surface; + avcPicParams.CurrPic.picture_id= mRecFrame->surface; avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; - avcPicParams.picture_width = mComParams.resolution.width; - avcPicParams.picture_height = mComParams.resolution.height; + //avcPicParams.picture_width = mComParams.resolution.width; + //avcPicParams.picture_height = mComParams.resolution.height; avcPicParams.last_picture = 0; LOG_V("======h264 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", avcPicParams.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.reconstructed_picture); + LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id); + LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id); LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf); - LOG_I( "picture_width = %d\n", avcPicParams.picture_width); - LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height); + //LOG_I( "picture_width = %d\n", avcPicParams.picture_width); + //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height); vaStatus = vaCreateBuffer( mVADisplay, mVAContext, diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 7f7584e..287b8c2 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -30,25 +30,7 @@ VAStatus vaLockSurface(VADisplay dpy, VAStatus vaUnlockSurface(VADisplay dpy, VASurfaceID surface ); - -VAStatus vaCreateSurfaceFromKBuf( - VADisplay dpy, - int width, - int height, - int format, - VASurfaceID *surface, /* out */ - unsigned int kbuf_handle, /* kernel buffer handle*/ - unsigned size, /* kernel buffer size */ - unsigned int kBuf_fourcc, /* expected fourcc */ - unsigned int luma_stride, /* luma stride, could be width aligned with a special value */ - unsigned int chroma_u_stride, /* chroma stride */ - unsigned int chroma_v_stride, - unsigned int luma_offset, /* could be 0 */ - unsigned int chroma_u_offset, /* UV offset from the beginning of the memory */ - unsigned int chroma_v_offset -); } - VideoEncoderBase::VideoEncoderBase() :mInitialized(false) ,mVADisplay(NULL) @@ -242,7 +224,7 @@ Encode_Status VideoEncoderBase::start() { } } - LOG_I("mBufferMode = %d\n", mBufferMode); + LOG_E("mBufferMode = %d\n", mBufferMode); mSurfaceCnt = normalSurfacesCnt + mSharedSurfacesCnt + mReqSurfacesCnt; @@ -260,9 +242,9 @@ Encode_Status VideoEncoderBase::start() { goto CLEAN_UP; } - vaStatus = vaCreateSurfaces(mVADisplay, mComParams.resolution.width, - mComParams.resolution.height, VA_RT_FORMAT_YUV420, - normalSurfacesCnt, surfaces, NULL , 0); + vaStatus = vaCreateSurfaces(mVADisplay,VA_RT_FORMAT_YUV420, mComParams.resolution.width, + mComParams.resolution.height, + surfaces, normalSurfacesCnt, NULL , 0); CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces"); switch (mBufferMode) { @@ -1412,12 +1394,21 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( LOG_W ("Format is not supported\n"); return ENCODE_NOT_SUPPORTED; } + VASurfaceAttributeTPI *attribute_tpi = new VASurfaceAttributeTPI; - vaStatus = vaCreateSurfacesForUserPtr(mVADisplay, width, height, VA_RT_FORMAT_YUV420, 1, - &surface, expectedSize, VA_FOURCC_NV12, width, width, width, - 0, width * height, width * height); + attribute_tpi->size = expectedSize; + attribute_tpi->luma_stride = width; + attribute_tpi->chroma_u_stride = width; + attribute_tpi->chroma_v_stride = width; + attribute_tpi->luma_offset = 0; + attribute_tpi->chroma_u_offset = width*height; + attribute_tpi->chroma_v_offset = width*height; + attribute_tpi->pixel_format = VA_FOURCC_NV12; + attribute_tpi->type = VAExternalMemoryNULL; - CHECK_VA_STATUS_RETURN("vaCreateSurfacesForUserPtr"); + vaCreateSurfacesWithAttribute(mVADisplay, width, height, VA_RT_FORMAT_YUV420, + 1, &surface, attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); vaStatus = vaDeriveImage(mVADisplay, surface, &image); CHECK_VA_STATUS_RETURN("vaDeriveImage"); @@ -1484,6 +1475,10 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( mReqSurfacesCnt ++; ret = ENCODE_SUCCESS; + if(attribute_tpi) { + delete attribute_tpi; + attribute_tpi = NULL; + } return ret; } @@ -1550,7 +1545,6 @@ Encode_Status VideoEncoderBase::generateVideoBufferAndAttachToList(uint32_t inde } Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { - uint32_t index; VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; @@ -1564,6 +1558,13 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { uint32_t chromaVOffset = 0; uint32_t kBufHandle = 0; + VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; + if (vaSurfaceAttrib == NULL) { + LOG_E("Failed to allocate VASurfaceAttrib\n"); + return ENCODE_NO_MEMORY; + } + vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; + for (index = 0; index < mSharedSurfacesCnt; index++) { vaStatus = vaLockSurface( @@ -1585,10 +1586,20 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { vaStatus = vaUnlockSurface(mVADecoderDisplay, (VASurfaceID)mUpstreamBufferList[index]); CHECK_VA_STATUS_RETURN("vaUnlockSurface"); - vaStatus = vaCreateSurfaceFromKBuf( + vaSurfaceAttrib->size = mComParams.resolution.width*mComParams.resolution.height*1.5; + vaSurfaceAttrib->luma_stride = lumaStride; + vaSurfaceAttrib->chroma_u_stride = chromaUStride; + vaSurfaceAttrib->chroma_v_stride = chromaVStride; + vaSurfaceAttrib->luma_offset = lumaOffset; + vaSurfaceAttrib->chroma_u_offset = chromaUOffset; + vaSurfaceAttrib->chroma_v_offset = chromaVOffset; + vaSurfaceAttrib->buffers[0] = kBufHandle; + vaSurfaceAttrib->pixel_format = fourCC; + vaSurfaceAttrib->type = VAExternalMemoryKernelDRMBufffer; + + vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, - (VASurfaceID *)&mSharedSurfaces[index], kBufHandle, lumaStride * mComParams.resolution.height * 3 / 2, - fourCC, lumaStride, chromaUStride, chromaVStride, lumaOffset, chromaUOffset, chromaVOffset); + 1 /*mSharedSurfacesCnt*/, &mSharedSurfaces[index], vaSurfaceAttrib); CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); @@ -1598,7 +1609,15 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { ret = generateVideoBufferAndAttachToList(index, NULL); CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); } - + + if(vaSurfaceAttrib) { + if(vaSurfaceAttrib->buffers) { + delete [] vaSurfaceAttrib->buffers; + vaSurfaceAttrib->buffers= NULL; + } + delete vaSurfaceAttrib; + vaSurfaceAttrib = NULL; + } return ret; } @@ -1608,21 +1627,15 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle() { VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; - VASurfaceAttrib * vaSurfaceAttrib = new VASurfaceAttrib; + VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; if (vaSurfaceAttrib == NULL) { LOG_E("Failed to allocate VASurfaceAttrib\n"); return ENCODE_NO_MEMORY; } - VAExternalMemoryBuffers *vaExternalMemoryBuffers = new VAExternalMemoryBuffers; - if (vaExternalMemoryBuffers == NULL) { - LOG_E("Failed to allocate VAExternalMemoryBuffers\n"); - return ENCODE_NO_MEMORY; - } - - vaExternalMemoryBuffers->buffers = new uint32_t[mSharedSurfacesCnt]; - if (vaExternalMemoryBuffers->buffers == NULL) { - LOG_E("Failed to allocate buffers for VAExternalMemoryBuffers\n"); + vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; + if (vaSurfaceAttrib->buffers == NULL) { + LOG_E("Failed to allocate buffers for vaSurfaceAttrib\n"); return ENCODE_NO_MEMORY; } @@ -1632,31 +1645,27 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle() { LOG_I("width = %d\n", mComParams.resolution.width); LOG_I("height = %d\n", mComParams.resolution.height); - vaExternalMemoryBuffers->count = mSharedSurfacesCnt; - vaExternalMemoryBuffers->luma_stride = mBufAttrib->lumaStride; - vaExternalMemoryBuffers->pixel_format = mBufAttrib->format; - vaExternalMemoryBuffers->width = mComParams.resolution.width; - vaExternalMemoryBuffers->height = mComParams.resolution.height; - vaExternalMemoryBuffers->type = VAExternalMemoryAndroidGrallocBuffer; + vaSurfaceAttrib->count = mSharedSurfacesCnt; + vaSurfaceAttrib->luma_stride = mBufAttrib->lumaStride; + vaSurfaceAttrib->pixel_format = mBufAttrib->format; + vaSurfaceAttrib->width = mComParams.resolution.width; + vaSurfaceAttrib->height = mComParams.resolution.height; + vaSurfaceAttrib->type = VAExternalMemoryAndroidGrallocBuffer; for(index = 0; index < mSharedSurfacesCnt; index++) { - vaExternalMemoryBuffers->buffers[index] = (uint32_t) mUpstreamBufferList[index]; + vaSurfaceAttrib->buffers[index] = (uint32_t) mUpstreamBufferList[index]; LOG_I("NativeHandleList[%d] = 0x%08x", index, mUpstreamBufferList[index]); } - vaSurfaceAttrib->flags = VA_SURFACE_ATTRIB_SETTABLE; - vaSurfaceAttrib->type = VASurfaceAttribNativeHandle; - vaSurfaceAttrib->value.type = VAGenericValueTypePointer; - vaSurfaceAttrib->value.value.p_val = (void *)vaExternalMemoryBuffers; - vaStatus = vaCreateSurfaces( + + vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, mSharedSurfacesCnt, mSharedSurfaces, - vaSurfaceAttrib, - 1); + vaSurfaceAttrib); - CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); LOG_V("Successfully create surfaces from native hanle"); for(index = 0; index < mSharedSurfacesCnt; index++) { @@ -1665,16 +1674,12 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle() { LOG_I("mSurfaces[%d] = %08x", index, mSurfaces[index]); CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); } - if(vaExternalMemoryBuffers) { - if(vaExternalMemoryBuffers->buffers) { - delete [] vaExternalMemoryBuffers->buffers; - vaExternalMemoryBuffers->buffers= NULL; - } - delete vaExternalMemoryBuffers; - vaExternalMemoryBuffers = NULL; - } if(vaSurfaceAttrib) { + if(vaSurfaceAttrib->buffers) { + delete [] vaSurfaceAttrib->buffers; + vaSurfaceAttrib->buffers= NULL; + } delete vaSurfaceAttrib; vaSurfaceAttrib = NULL; } @@ -1692,13 +1697,29 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle() { uint32_t lumaOffset = 0; uint32_t chromaUOffset = mBufAttrib->realHeight * mBufAttrib->lumaStride; uint32_t chromaVOffset = chromaUOffset + 1; - + + VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; + if (vaSurfaceAttrib == NULL) { + LOG_E("Failed to allocate VASurfaceAttrib\n"); + return ENCODE_NO_MEMORY; + } + vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; + for (index = 0; index < mSharedSurfacesCnt; index++) { - - vaStatus = vaCreateSurfaceFromKBuf( + vaSurfaceAttrib->size = mBufAttrib->lumaStride * mComParams.resolution.height * 3 / 2; + vaSurfaceAttrib->luma_stride = mBufAttrib->lumaStride; + vaSurfaceAttrib->chroma_u_stride = mBufAttrib->chromStride; + vaSurfaceAttrib->chroma_v_stride = mBufAttrib->chromStride; + vaSurfaceAttrib->luma_offset = lumaOffset; + vaSurfaceAttrib->chroma_u_offset = chromaUOffset; + vaSurfaceAttrib->chroma_v_offset = chromaVOffset; + vaSurfaceAttrib->buffers[0] = mUpstreamBufferList[index]; + vaSurfaceAttrib->pixel_format = mBufAttrib->format; + vaSurfaceAttrib->type = VAExternalMemoryKernelDRMBufffer; + + vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, - (VASurfaceID *)&mSharedSurfaces[index], mUpstreamBufferList[index], mBufAttrib->lumaStride * mComParams.resolution.height * 3 / 2, - mBufAttrib->format, mBufAttrib->lumaStride, mBufAttrib->chromStride, mBufAttrib->chromStride, lumaOffset, chromaUOffset, chromaVOffset); + 1 /*mSharedSurfacesCnt*/, &mSharedSurfaces[index], vaSurfaceAttrib); CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); @@ -1708,6 +1729,15 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle() { ret = generateVideoBufferAndAttachToList(index, NULL); CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); } + + if(vaSurfaceAttrib) { + if(vaSurfaceAttrib->buffers) { + delete [] vaSurfaceAttrib->buffers; + vaSurfaceAttrib->buffers= NULL; + } + delete vaSurfaceAttrib; + vaSurfaceAttrib = NULL; + } return ret; } @@ -1716,19 +1746,39 @@ Encode_Status VideoEncoderBase::surfaceMappingForCIFrameList() { uint32_t index; VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; - + VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; + if (vaSurfaceAttrib == NULL) { + LOG_E("Failed to allocate VASurfaceAttrib\n"); + return ENCODE_NO_MEMORY; + } + vaSurfaceAttrib->type = VAExternalMemoryCIFrame; + vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; + for (index = 0; index < mSharedSurfacesCnt; index++) { - - vaStatus = vaCreateSurfaceFromCIFrame( - mVADisplay, (uint32_t)mUpstreamBufferCnt, &mSharedSurfaces[index]); - - CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromCIFrame"); - + vaSurfaceAttrib->buffers[0] = (uint32_t)mUpstreamBufferCnt; + vaStatus = vaCreateSurfacesWithAttribute( + mVADisplay, + mComParams.resolution.width, + mComParams.resolution.height, + VA_RT_FORMAT_YUV420, + 1 /*mSharedSurfacesCnt*/, + &mSharedSurfaces[index], + vaSurfaceAttrib); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); mSurfaces[index] = mSharedSurfaces[index]; ret = generateVideoBufferAndAttachToList(index, NULL); CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList") } + + if(vaSurfaceAttrib) { + if(vaSurfaceAttrib->buffers) { + delete [] vaSurfaceAttrib->buffers; + vaSurfaceAttrib->buffers= NULL; + } + delete vaSurfaceAttrib; + vaSurfaceAttrib = NULL; + } return ret; } diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 4b11253..7cc8f62 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -10,6 +10,7 @@ #define __VIDEO_ENCODER_BASE_H__ #include +#include #include "VideoEncoderDef.h" #include "VideoEncoderInterface.h" class VideoEncoderBase : IVideoEncoder { @@ -115,6 +116,8 @@ protected: VABufferID mLastCodedBuffer; VABufferID mOutCodedBuffer; VABufferID mSeqParamBuf; + VABufferID mRcParamBuf; + VABufferID mFrameRateParamBuf; VABufferID mPicParamBuf; VABufferID mSliceParamBuf; -- cgit v1.2.3 From bb8d22dde8cd726f7ad320fc260d624a42f3f3f3 Mon Sep 17 00:00:00 2001 From: hding3 Date: Tue, 26 Jun 2012 15:16:35 +0800 Subject: Commit new libMIX changes for video encode enhancement. BZ: 43450 Commit new libMIX changes for video encode enhancement. Change-Id: Ia0cf7beb7404a878b44751ff287fa60a1942429e Signed-off-by: Zhao Liang Signed-off-by: hding3 Reviewed-on: http://android.intel.com:8080/54087 Reviewed-by: Yuan, Shengquan Reviewed-by: buildbot Tested-by: buildbot --- test/Android.mk | 53 ++ test/btest.cpp | 78 +++ test/mix_encoder.cpp | 781 ++++++++++++++++++++++ videoencoder/Android.mk | 33 +- videoencoder/IntelMetadataBuffer.cpp | 259 ++++++++ videoencoder/IntelMetadataBuffer.h | 106 +++ videoencoder/VideoEncoderAVC.cpp | 4 +- videoencoder/VideoEncoderBase.cpp | 1221 ++++++++++++++++------------------ videoencoder/VideoEncoderBase.h | 67 +- videoencoder/VideoEncoderDef.h | 22 + videoencoder/VideoEncoderH263.cpp | 4 +- videoencoder/VideoEncoderInterface.h | 1 + videoencoder/VideoEncoderLog.h | 7 + videoencoder/VideoEncoderMP4.cpp | 4 +- 14 files changed, 1948 insertions(+), 692 deletions(-) create mode 100644 test/Android.mk create mode 100644 test/btest.cpp create mode 100644 test/mix_encoder.cpp create mode 100644 videoencoder/IntelMetadataBuffer.cpp create mode 100644 videoencoder/IntelMetadataBuffer.h diff --git a/test/Android.mk b/test/Android.mk new file mode 100644 index 0000000..2f4d6a8 --- /dev/null +++ b/test/Android.mk @@ -0,0 +1,53 @@ +LOCAL_PATH := $(call my-dir) + +# For intelmetadatabuffer test +# ===================================================== + +include $(CLEAR_VARS) + +#VIDEO_ENC_LOG_ENABLE := true + +LOCAL_SRC_FILES := \ + btest.cpp + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libmix_videoencoder \ + +LOCAL_SHARED_LIBRARIES := \ + libintelmetadatabuffer + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := btest + +include $(BUILD_EXECUTABLE) + +# For mix_encoder +# ===================================================== + +include $(CLEAR_VARS) + +#VIDEO_ENC_LOG_ENABLE := true + +LOCAL_SRC_FILES := \ + mix_encoder.cpp + +LOCAL_C_INCLUDES := \ + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmix_videoencoder \ + $(TOP)/frameworks/base/include/display \ + $(LOCAL_PATH) + +LOCAL_SHARED_LIBRARIES := \ + libintelmetadatabuffer \ + libva_videoencoder \ + libva \ + libva-android \ + libva-tpi \ + libgui \ + libbinder + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := mix_encoder + +include $(BUILD_EXECUTABLE) diff --git a/test/btest.cpp b/test/btest.cpp new file mode 100644 index 0000000..26f104f --- /dev/null +++ b/test/btest.cpp @@ -0,0 +1,78 @@ +#include "IntelMetadataBuffer.h" +#include +#include +#include + +#define SUCCESS "PASS IntelMetadataBuffer Unit Test\n" +#define FAIL "Fail IntelMetadataBuffer Unit Test\n" + +int main(int argc, char* argv[]) +{ + IntelMetadataBuffer *mb1, *mb2; + uint8_t* bytes; + uint32_t size; + IMB_Result ret; + + MetadataBufferType t1 = MetadataBufferTypeCameraSource; + MetadataBufferType t2; + int32_t v1 = 0x00000010; + int32_t v2 = 0; + ValueInfo vi1, *vi2 = NULL; + int32_t ev1[10]; + int32_t *ev2 = NULL; + unsigned int count; + + if (argc > 1) + t1 = (MetadataBufferType) atoi(argv[1]); + + if (argc > 2) + v1 = atoi(argv[2]); + + memset(&vi1, 0, sizeof(ValueInfo)); + + mb1 = new IntelMetadataBuffer(); + ret = mb1->SetType(t1); + ret = mb1->SetValue(v1); + if (t1 != MetadataBufferTypeGrallocSource) { + ret = mb1->SetValueInfo(&vi1); + ret = mb1->SetExtraValues(ev1, 10); + } + ret = mb1->GetBytes(bytes, size); + printf("assembling IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); + + printf("size = %d, bytes = ", size); + for(int i=0; iSetBytes(bytes, size); + printf("parsing IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); + + ret = mb2->GetType(t2); + ret = mb2->GetValue(v2); + ret = mb2->GetValueInfo(vi2); + ret = mb2->GetExtraValues(ev2, count); + + printf("t2=%d, v2=%d, vi2=%x, ev2=%x\n", t2, v2, vi2, ev2); + if (v1 == v2 && t1 == t2 ) { + if (vi2) { + if (memcmp(&vi1, vi2, sizeof(ValueInfo)) == 0) { + if (ev2) { + if (memcmp(ev1, ev2, count) == 0) + printf(SUCCESS); + else + printf(FAIL); + }else + printf(SUCCESS); + }else + printf(FAIL); + }else + printf(SUCCESS); + }else + printf(SUCCESS); + + return 1; +} diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp new file mode 100644 index 0000000..a228b71 --- /dev/null +++ b/test/mix_encoder.cpp @@ -0,0 +1,781 @@ +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include + +#include +#include + +#define CHECK_ENCODE_STATUS(FUNC)\ + if (ret < ENCODE_SUCCESS) { \ + printf(FUNC" Failed. ret = 0x%08x\n", ret); \ + return -1; \ + } + +static const char *AVC_MIME_TYPE = "video/h264"; +static const char *MPEG4_MIME_TYPE = "video/mpeg4"; +static const char *H263_MIME_TYPE = "video/h263"; +static const int box_width = 128; + +static IVideoEncoder *gVideoEncoder = NULL; +static VideoParamsCommon gEncoderParams; +static VideoParamsStoreMetaDataInBuffers gStoreMetaDataInBuffers; +static VideoRateControl gRC = RATE_CONTROL_CBR; + +static int gCodec = 0; //0: H264, 1: MPEG4, 2: H263 +static int gRCMode = 1; //0: NO_RC, 1: CBR, 2: VBR, 3: VCM +static int gBitrate = 1280000; + +static bool gSyncEncMode = false; +static uint32_t gEncFrames = 15; +static const int gSrcFrames = 15; + +static uint32_t gAllocatedSize; +static uint32_t gWidth = 1280; +static uint32_t gHeight = 720; +static uint32_t gStride = 1280; +static uint32_t gFrameRate = 30; + +static char* gFile = (char*)"out.264"; + +static uint32_t gMode = 0; //0:Camera malloc , 1: WiDi clone, 2: WiDi ext, 3: WiDi user, 4: Raw, 5: SurfaceMediaSource +static const char* gModeString[7] = {"Camera malloc", "WiDi clone", "WiDi ext", "WiDi user", "Raw", "GrallocSource(Composer)", "GrallocSource(Gralloc)"}; +static const char* gRCModeString[4] ={"NO_RC", "CBR", "VBR", "VCM"}; + +//for uploading src pictures, also for Camera malloc, WiDi clone, raw mode usrptr storage +static uint8_t* gUsrptr[gSrcFrames]; + +//for metadatabuffer transfer +static IntelMetadataBuffer* gIMB[gSrcFrames] = {NULL}; + +//for WiDi user mode +static VADisplay gVADisplay; +static VASurfaceID gSurface[gSrcFrames]; + +//for WiDi ext mode +static uint32_t gkBufHandle[gSrcFrames]; + +//for gfxhandle +static sp gGraphicBufferAlloc; +static sp gGraphicBuffer[gSrcFrames]; + +extern "C" { +VAStatus vaLockSurface(VADisplay dpy, + VASurfaceID surface, + unsigned int *fourcc, + unsigned int *luma_stride, + unsigned int *chroma_u_stride, + unsigned int *chroma_v_stride, + unsigned int *luma_offset, + unsigned int *chroma_u_offset, + unsigned int *chroma_v_offset, + unsigned int *buffer_name, + void **buffer +); + +VAStatus vaUnlockSurface(VADisplay dpy, + VASurfaceID surface +); +} + +static hw_module_t const *gModule; +static gralloc_module_t const *gAllocMod; /* get by force hw_module_t */ +static alloc_device_t *gAllocDev; /* get by gralloc_open */ + +static void gfx_init() +{ + int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &gModule); + if (err) { + printf("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + exit(-1); + } + + gAllocMod = (gralloc_module_t const *)gModule; + + err = gralloc_open(gModule, &gAllocDev); + if (err) { + printf("FATAL: gralloc open failed\n"); + exit(-1); + } + +} + +static int gfx_alloc(uint32_t w, uint32_t h, int format, + int usage, buffer_handle_t* handle, int32_t* stride) +{ + int err; + + err = gAllocDev->alloc(gAllocDev, w, h, format, usage, handle, stride); + if (err) { + printf("alloc(%u, %u, %d, %08x, ...) failed %d (%s)\n", + w, h, format, usage, err, strerror(-err)); + exit(-1); + } + + return err; +} + +static int gfx_free(buffer_handle_t handle) +{ + int err; + + err = gAllocDev->free(gAllocDev, handle); + if (err) { + printf("free(...) failed %d (%s)\n", err, strerror(-err)); + exit(-1); + } + + return err; +} + +static int gfx_lock(buffer_handle_t handle, + int usage, int left, int top, int width, int height, + void** vaddr) +{ + int err; + + err = gAllocMod->lock(gAllocMod, handle, usage, + left, top, width, height, + vaddr); + + if (err){ + printf("lock(...) failed %d (%s)", err, strerror(-err)); + exit(-1); + } + + return err; +} + + +static int gfx_unlock(buffer_handle_t handle) +{ + int err; + + err = gAllocMod->unlock(gAllocMod, handle); + if (err) { + printf("unlock(...) failed %d (%s)", err, strerror(-err)); + exit(-1); + } + + return err; +} + +Encode_Status SetVideoEncoderParam() { + + Encode_Status ret = ENCODE_SUCCESS; + + ret = gVideoEncoder->getParameters(&gEncoderParams); + CHECK_ENCODE_STATUS("getParameters"); + + gEncoderParams.resolution.height = gHeight; + gEncoderParams.resolution.width = gWidth; + gEncoderParams.frameRate.frameRateDenom = 1; + gEncoderParams.frameRate.frameRateNum = gFrameRate; + gEncoderParams.rcMode = gRC; + gEncoderParams.syncEncMode = gSyncEncMode; + + switch(gCodec) + { + case 0: + break; + case 1: + gEncoderParams.profile = (VAProfile)VAProfileMPEG4Simple; + break; + case 2: + gEncoderParams.profile = (VAProfile)VAProfileH263Baseline; + break; + default: + break; + } + + gEncoderParams.rcParams.bitRate = gBitrate; +#if 0 + gEncoderParams->intraPeriod = 15; + gEncoderParams->rawFormat = RAW_FORMAT_NV12; + gEncoderParams->rcParams.initQP = 0; + gEncoderParams->rcParams.minQP = 0; + gEncoderParams->rcParams.windowSize = 0; + gEncoderParams->rcParams.targetPercentage = 0; + gEncoderParams->rcParams.bitRate = 10000; + gEncoderParams->rcMode = RATE_CONTROL_CBR; + gEncoderParams->refreshType = VIDEO_ENC_NONIR; +#endif + + ret = gVideoEncoder->setParameters(&gEncoderParams); + CHECK_ENCODE_STATUS("setParameters VideoParamsCommon"); + + if (gMode != 4) + { + gStoreMetaDataInBuffers.isEnabled = true; + + ret = gVideoEncoder->setParameters(&gStoreMetaDataInBuffers); + CHECK_ENCODE_STATUS("setParameters StoreMetaDataInBuffers"); + } + + return ret; +} + +static int YUV_generator_planar(int width, int height, + unsigned char *Y_start, int Y_pitch, + unsigned char *U_start, int U_pitch, + unsigned char *V_start, int V_pitch, + int UV_interleave) +{ + static int row_shift = 0; + int row; + + /* copy Y plane */ + for (row=0;rowmode = MEM_MODE_MALLOC; + vinfo->handle = 0; + vinfo->size = size; + vinfo->width = gWidth; + vinfo->height = gHeight; + vinfo->lumaStride = gStride; + vinfo->chromStride = gStride; + vinfo->format = STRING_TO_FOURCC("NV12"); + vinfo->s3dformat = 0xFFFFFFFF; + + for(int i = 0; i < gSrcFrames; i ++) + { + gUsrptr[i] = (uint8_t*)malloc(size); + + gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + + gIMB[i]->SetValueInfo(vinfo); + } + delete vinfo; +} + +//apply memory from encoder, and get usrptr to upload pictures +void GetAllUsrptr() +{ + Encode_Status ret = ENCODE_SUCCESS; + VideoParamsUsrptrBuffer paramsUsrptrBuffer; + + paramsUsrptrBuffer.type = VideoParamsTypeUsrptrBuffer; + paramsUsrptrBuffer.size = sizeof(VideoParamsUsrptrBuffer); + paramsUsrptrBuffer.expectedSize = gWidth * gHeight * 3 / 2; + paramsUsrptrBuffer.format = STRING_TO_FOURCC("NV12"); + paramsUsrptrBuffer.width = gWidth; + paramsUsrptrBuffer.height = gHeight; + + for(int i = 0; i < gSrcFrames; i ++) + { + ret = gVideoEncoder->getParameters(¶msUsrptrBuffer); + if(ret != ENCODE_SUCCESS ) { + printf("could not allocate input surface from the encoder %d", ret); + ret = ENCODE_NO_MEMORY; + break; + } + gAllocatedSize = paramsUsrptrBuffer.actualSize; + gUsrptr[i] = paramsUsrptrBuffer.usrPtr; + gStride = paramsUsrptrBuffer.stride; + + gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeEncoder, (int32_t)gUsrptr[i]); + } + +} + +void CreateUserSurfaces(int mode) +{ + unsigned int display = 0; + int majorVersion = -1; + int minorVersion = -1; + VAStatus vaStatus; + + gVADisplay = vaGetDisplay(&display); + + if (gVADisplay == NULL) { + printf("vaGetDisplay failed."); + } + + vaStatus = vaInitialize(gVADisplay, &majorVersion, &minorVersion); + if (vaStatus != VA_STATUS_SUCCESS) { + printf( "Failed vaInitialize, vaStatus = %d\n", vaStatus); + } + + VASurfaceAttributeTPI attribute_tpi; + + attribute_tpi.size = gWidth * gHeight * 3 /2; + attribute_tpi.luma_stride = gWidth; + attribute_tpi.chroma_u_stride = gWidth; + attribute_tpi.chroma_v_stride = gWidth; + attribute_tpi.luma_offset = 0; + attribute_tpi.chroma_u_offset = gWidth * gHeight; + attribute_tpi.chroma_v_offset = gWidth * gHeight; + attribute_tpi.pixel_format = VA_FOURCC_NV12; + attribute_tpi.type = VAExternalMemoryNULL; + + vaStatus = vaCreateSurfacesWithAttribute(gVADisplay, gWidth, gHeight, VA_RT_FORMAT_YUV420, + gSrcFrames, gSurface, &attribute_tpi); + + if (vaStatus != VA_STATUS_SUCCESS) { + printf( "Failed vaCreateSurfaces, vaStatus = %d\n", vaStatus); + } + + VideoParamsUpstreamBuffer upstreamParam; + if (mode == 0) + upstreamParam.bufferMode = BUFFER_SHARING_SURFACE; + else + upstreamParam.bufferMode = BUFFER_SHARING_KBUFHANDLE; + + ExternalBufferAttrib attrib; + attrib.realWidth = gWidth; + attrib.realHeight = gHeight; + attrib.lumaStride = gStride; + attrib.chromStride = gStride; + attrib.format = VA_FOURCC_NV12; + upstreamParam.bufAttrib = &attrib; + + uint32_t *list = new uint32_t[gSrcFrames]; + if (mode == 1){ + uint32_t fourCC = 0; + uint32_t lumaStride = 0; + uint32_t chromaUStride = 0; + uint32_t chromaVStride = 0; + uint32_t lumaOffset = 0; + uint32_t chromaUOffset = 0; + uint32_t chromaVOffset = 0; + + for(int i = 0; i < gSrcFrames; i++) { + vaStatus = vaLockSurface( + gVADisplay, (VASurfaceID)gSurface[i], + &fourCC, &lumaStride, &chromaUStride, &chromaVStride, + &lumaOffset, &chromaUOffset, &chromaVOffset, &gkBufHandle[i], NULL); + if (vaStatus != VA_STATUS_SUCCESS) { + printf( "Failed vaLockSurface, vaStatus = %d\n", vaStatus); + } +#if 0 + printf("lumaStride = %d", lumaStride); + printf("chromaUStride = %d", chromaUStride); + printf("chromaVStride = %d", chromaVStride); + printf("lumaOffset = %d", lumaOffset); + printf("chromaUOffset = %d", chromaUOffset); + printf("chromaVOffset = %d", chromaVOffset); + printf("kBufHandle = 0x%08x", gkBufHandle[i]); + printf("fourCC = %d\n", fourCC); +#endif + vaStatus = vaUnlockSurface(gVADisplay, (VASurfaceID)gSurface[i]); + list[i] = gkBufHandle[i]; + } + + }else{ + + for (int i = 0; i < gSrcFrames; i++) + list[i] = gSurface[i]; + } + + upstreamParam.bufList = list; + upstreamParam.bufCnt = gSrcFrames; + upstreamParam.display = gVADisplay; + Encode_Status ret; + ret = gVideoEncoder->setParameters((VideoParamConfigSet *)&upstreamParam); + if (ret != ENCODE_SUCCESS) { + printf("Failed setParameters, Status = %d\n", ret); + } + delete list; + + //get usrptr for uploading src pictures + VAImage surface_image; + for (int i=0; i composer(ComposerService::getComposerService()); + gGraphicBufferAlloc = composer->createGraphicBufferAlloc(); + + uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN | GraphicBuffer::USAGE_SW_READ_OFTEN; // | GraphicBuffer::USAGE_HW_COMPOSER; + int format = HAL_PIXEL_FORMAT_NV12_VED; //HAL_PIXEL_FORMAT_RGBA_8888 + int32_t error; + + int adjusted_width, adjusted_height; + if (0) { + ; + } else if (512 >= gWidth) { + adjusted_width = 512; + } else if (1024 >= gWidth) { + adjusted_width = 1024; + } else if (1280 >= gWidth) { + adjusted_width = 1280; + } else if (2048 >= gWidth) { + adjusted_width = 2048; + } else if (4096 >= gWidth) { + adjusted_width = 4096; + } else { + adjusted_width = (gWidth + 0x1f) & ~0x1f; + } + + adjusted_height = (gHeight + 0x1f) & ~0x1f; + +printf("adjust width=%d, height=%d\n", adjusted_width, adjusted_height); + for(int i = 0; i < gSrcFrames; i ++) + { + sp graphicBuffer( + gGraphicBufferAlloc->createGraphicBuffer( +// gWidth, gHeight, format, usage, &error)); + adjusted_width, adjusted_height, format, usage, &error)); + + gGraphicBuffer[i] = graphicBuffer; + graphicBuffer->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i])); + + gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)gGraphicBuffer[i]->handle); + graphicBuffer->unlock(); + } + +} + +void CreateGralloc() +{ + int usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_HW_TEXTURE; + int format = HAL_PIXEL_FORMAT_NV12_VED; + + gfx_init(); + + void* vaddr; + buffer_handle_t handle; + + for(int i = 0; i < gSrcFrames; i ++) + { + gfx_alloc(gWidth, gHeight, format, usage, &handle, (int32_t*)&gStride); + gfx_lock(handle, usage, 0, 0, gWidth, gHeight, &vaddr); + printf("vaddr= %p\n", vaddr); + gUsrptr[i] = (uint8_t*)vaddr; + gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)handle); + gfx_unlock(handle); + } + +} + +int CheckArgs(int argc, char* argv[]) +{ + char c; + + while ((c =getopt(argc, argv,"b:c:r:w:h:m:f:n:s:?") ) != EOF) { + switch (c) { + case 'w': + gWidth = atoi(optarg); + gStride = gWidth; + break; + case 'h': + gHeight = atoi(optarg); + break; + case 'n': + gEncFrames = atoi(optarg); + break; + case 'm': + gMode = atoi(optarg); + break; + case 'f': + gFile = optarg; + break; + case 'c': + gCodec = atoi(optarg); + break; + case 'r': + gRCMode = atoi(optarg); + break; + case 'b': + gBitrate = atoi(optarg); + break; + case 's': + gSyncEncMode = atoi(optarg); + break; + case '?': + default: + printf("\n./mix_encode -c -b -r -w -h -n -m -s -f \n"); + printf("\nCodec:\n"); + printf("0: H264 (default)\n1: MPEG4\n2: H263\n"); + printf("\nRate control:\n"); + printf("0: NO_RC \n1: CBR (default)\n2: VBR\n3: VCM\n"); + printf("\nMode:\n"); + printf("0: Camera malloc (default)\n1: WiDi clone\n2: WiDi ext\n3: WiDi user\n4: Raw\n5: GrallocSource(Composer)\n6: GrallocSource(Gralloc)\n"); + exit(0); + } + } + + return 0; +} + +int main(int argc, char* argv[]) +{ + Encode_Status ret; + const char *codec; + + CheckArgs(argc, argv); + + sp proc(ProcessState::self()); + + ProcessState::self()->startThreadPool(); + + switch(gCodec) + { + case 0: + codec = AVC_MIME_TYPE; + break; + case 1: + codec = MPEG4_MIME_TYPE; + break; + case 2: + codec = H263_MIME_TYPE; + break; + default: + printf("Not support this type codec\n"); + return 1; + } + + switch(gRCMode) + { + case 0: + gRC = RATE_CONTROL_NONE; + break; + case 1: + gRC = RATE_CONTROL_CBR; + break; + case 2: + gRC = RATE_CONTROL_VBR; + break; + case 3: + gRC = RATE_CONTROL_VCM; + break; + default: + printf("Not support this rate control mode\n"); + return 1; + } + + printf("\nStart %s Encoding ....\n", codec); + printf("Mode is %s, RC mode is %s, Width=%d, Height=%d, Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, out file is %s\n\n", gModeString[gMode], gRCModeString[gRCMode], gWidth, gHeight, gBitrate, gEncFrames, gSyncEncMode, gFile); + +//sleep(10); + +for(int i=0; i<1; i++) +{ + gVideoEncoder = createVideoEncoder(codec); + + //set parameter + SetVideoEncoderParam(); + + //prepare src pictures, get user ptrs for uploading picture and prepare metadatabuffer in different mode + + switch (gMode) + { + case 0: //Camera malloc + MallocExternalMemory(); + break; + case 1: //WiDi clone + GetAllUsrptr(); + break; + case 2: //WiDi ext + CreateUserSurfaces(1); + break; + case 3: //WiDi user + CreateUserSurfaces(0); + break; + case 4: //Raw + MallocExternalMemory(); + break; + case 5: //SurfaceMediaSource + CreateGfxhandle(); + break; + case 6: //Gralloc + CreateGralloc(); + break; + default: + break; + } + +//sleep(10); + + //upload src data + for(int i=0; istart(); + CHECK_ENCODE_STATUS("start"); + + //open out file + FILE* file = fopen(gFile, "w"); + if (!file) + { + printf("create out file failed\n"); + return 1; + } + + //input buffers + VideoEncRawBuffer InBuf; + uint8_t *data; + uint32_t size; + + //output buffers + VideoEncOutputBuffer OutBuf; + uint32_t maxsize; + gVideoEncoder->getMaxOutSize(&maxsize); + uint8_t out[maxsize]; + OutBuf.bufferSize = maxsize; + OutBuf.dataSize = 0; + OutBuf.data = out; + OutBuf.format = OUTPUT_EVERYTHING; + + printf("\n"); + for(unsigned int i=0; iGetBytes(data, size); + // printf("srcno =%d, data=%x, size=%d\n", i % gSrcFrames, data, size); + }else + { + data = gUsrptr[i % gSrcFrames]; + size = gWidth * gHeight * 3 /2; + } + InBuf.data = data; + InBuf.size = size; + InBuf.bufAvailable = true; + + ret = gVideoEncoder->encode(&InBuf); + CHECK_ENCODE_STATUS("encode"); + + ret = gVideoEncoder->getOutput(&OutBuf); + CHECK_ENCODE_STATUS("getOutput"); + // printf("OutBuf.dataSize = %d .........\n", OutBuf.dataSize); + fwrite(OutBuf.data, 1, OutBuf.dataSize, file); + + printf("Encoding %d Frames \r", i+1); + fflush(stdout); + } + fclose(file); + + VideoStatistics stat; + gVideoEncoder->getStatistics(&stat); + printf("\nVideoStatistics\n"); + printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, stat.min_encode_time, stat.min_encode_frame ); + if(gVideoEncoder) { + releaseVideoEncoder(gVideoEncoder); + gVideoEncoder = NULL; + } + + + switch(gMode) + { + case 0: //camera malloc + case 4: //Raw + for(int i=0; iGetValue((int32_t&)handle); + gfx_free(handle); + } + } + break; + } + + for(int i=0; i +#include + +IntelMetadataBuffer::IntelMetadataBuffer() +{ + mType = MetadataBufferTypeCameraSource; + mValue = 0; + mInfo = NULL; + mExtraValues = NULL; + mExtraValues_Count = 0; + mBytes = NULL; + mSize = 0; +} + +IntelMetadataBuffer::IntelMetadataBuffer(MetadataBufferType type, int32_t value) +{ + mType = type; + mValue = value; + mInfo = NULL; + mExtraValues = NULL; + mExtraValues_Count = 0; + mBytes = NULL; + mSize = 0; +} + +IntelMetadataBuffer::~IntelMetadataBuffer() +{ + if (mInfo) + delete mInfo; + + if (mExtraValues) + delete[] mExtraValues; + + if (mBytes) + delete[] mBytes; +} + +IMB_Result IntelMetadataBuffer::GetType(MetadataBufferType& type) +{ + type = mType; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::SetType(MetadataBufferType type) +{ + if (type < MetadataBufferTypeLast) + mType = type; + else + return IMB_INVAL_PARAM; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::GetValue(int32_t& value) +{ + value = mValue; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::SetValue(int32_t value) +{ + mValue = value; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::GetValueInfo(ValueInfo* &info) +{ + info = mInfo; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::SetValueInfo(ValueInfo* info) +{ + if (info) + { + if (mInfo == NULL) + mInfo = new ValueInfo; + + memcpy(mInfo, info, sizeof(ValueInfo)); + } + else + return IMB_INVAL_PARAM; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::GetExtraValues(int32_t* &values, uint32_t& num) +{ + values = mExtraValues; + num = mExtraValues_Count; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num) +{ + if (values && num > 0) + { + if (mExtraValues && mExtraValues_Count != num) + { + delete[] mExtraValues; + mExtraValues = NULL; + } + + if (mExtraValues == NULL) + mExtraValues = new int32_t[num]; + + memcpy(mExtraValues, values, sizeof(int32_t) * num); + mExtraValues_Count = num; + } + else + return IMB_INVAL_PARAM; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) +{ + if (!data || size == 0) + return IMB_INVAL_PARAM; + + MetadataBufferType type; + int32_t value; + uint32_t extrasize = size - 8; + ValueInfo* info = NULL; + int32_t* ExtraValues = NULL; + uint32_t ExtraValues_Count = 0; + + memcpy(&type, data, 4); + data += 4; + memcpy(&value, data, 4); + data += 4; + + switch (type) + { + case MetadataBufferTypeCameraSource: + case MetadataBufferTypeEncoder: + case MetadataBufferTypeUser: + { + if (extrasize >0 && extrasize < sizeof(ValueInfo)) + return IMB_INVAL_BUFFER; + + if (extrasize > sizeof(ValueInfo)) //has extravalues + { + if ( (extrasize - sizeof(ValueInfo)) % 4 != 0 ) + return IMB_INVAL_BUFFER; + ExtraValues_Count = (extrasize - sizeof(ValueInfo)) / 4; + } + + if (extrasize > 0) + { + info = new ValueInfo; + memcpy(info, data, sizeof(ValueInfo)); + data += sizeof(ValueInfo); + } + + if (ExtraValues_Count > 0) + { + ExtraValues = new int32_t[ExtraValues_Count]; + memcpy(ExtraValues, data, ExtraValues_Count * 4); + } + + break; + } + case MetadataBufferTypeGrallocSource: + if (extrasize > 0) + return IMB_INVAL_BUFFER; + + break; + default: + return IMB_INVAL_BUFFER; + } + + //store data + mType = type; + mValue = value; + if (mInfo) + delete mInfo; + mInfo = info; + if (mExtraValues) + delete[] mExtraValues; + mExtraValues = ExtraValues; + mExtraValues_Count = ExtraValues_Count; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) +{ + if (mBytes == NULL) + { + if (mType == MetadataBufferTypeGrallocSource && mInfo) + return IMB_INVAL_PARAM; + + //assemble bytes according members + mSize = 8; + if (mInfo) + { + mSize += sizeof(ValueInfo); + if (mExtraValues) + mSize += 4 * mExtraValues_Count; + } + + mBytes = new uint8_t[mSize]; + uint8_t *ptr = mBytes; + memcpy(ptr, &mType, 4); + ptr += 4; + memcpy(ptr, &mValue, 4); + ptr += 4; + + if (mInfo) + { + memcpy(ptr, mInfo, sizeof(ValueInfo)); + ptr += sizeof(ValueInfo); + + if (mExtraValues) + memcpy(ptr, mExtraValues, mExtraValues_Count * 4); + } + } + + data = mBytes; + size = mSize; + + return IMB_SUCCESS; +} + +uint32_t IntelMetadataBuffer::GetMaxBufferSize() +{ + return 256; +} diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h new file mode 100644 index 0000000..802ef7b --- /dev/null +++ b/videoencoder/IntelMetadataBuffer.h @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2007 Intel Corporation. All Rights Reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sub license, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice (including the + * next paragraph) shall be included in all copies or substantial portions + * of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. + * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR + * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +#ifndef _INTEL_METADATA_BUFFER_H_ +#define _INTEL_METADATA_BUFFER_H_ + +#include + +#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24))) + +typedef enum { + IMB_SUCCESS = 0, + IMB_INVAL_PARAM = 1, + IMB_INVAL_BUFFER = 2, +}IMB_Result; + +typedef enum { + MEM_MODE_MALLOC = 1, + MEM_MODE_CI = 2, + MEM_MODE_V4L2 = 4, + MEM_MODE_SURFACE = 8, + MEM_MODE_USRPTR = 16, + MEM_MODE_GFXHANDLE = 32, + MEM_MODE_KBUFHANDLE = 64, + MEM_MODE_ION = 128, +}MemMode; + +typedef struct { + MemMode mode; //memory type, vasurface/malloc/gfx/ion/v4l2/ci etc + uint32_t handle; //handle + uint32_t size; //memory size + uint32_t width; //picture width + uint32_t height; //picture height + uint32_t lumaStride; //picture luma stride + uint32_t chromStride; //picture chrom stride + uint32_t format; //color format + uint32_t s3dformat; //S3D format +}ValueInfo; + +typedef enum { + MetadataBufferTypeCameraSource = 0, //for CameraSource + MetadataBufferTypeGrallocSource = 1, //for SurfaceMediaSource + MetadataBufferTypeEncoder = 2, //for WiDi clone mode + MetadataBufferTypeUser = 3, //for WiDi user mode + MetadataBufferTypeLast = 4, //type number +}MetadataBufferType; + +class IntelMetadataBuffer { +public: + IntelMetadataBuffer(); //for generator + IntelMetadataBuffer(MetadataBufferType type, int32_t value); //for quick generator + ~IntelMetadataBuffer(); + + IMB_Result GetType(MetadataBufferType &type); + IMB_Result SetType(MetadataBufferType type); + IMB_Result GetValue(int32_t &value); + IMB_Result SetValue(int32_t value); + IMB_Result GetValueInfo(ValueInfo* &info); + IMB_Result SetValueInfo(ValueInfo *info); + IMB_Result GetExtraValues(int32_t* &values, uint32_t &num); + IMB_Result SetExtraValues(int32_t *values, uint32_t num); + + //for bytes input, also for parser + IMB_Result SetBytes(uint8_t* data, uint32_t size); + + //for bytes output, also for generator + IMB_Result GetBytes(uint8_t* &data, uint32_t& size); + + //Static, for get max IntelMetadataBuffer size + static uint32_t GetMaxBufferSize(); + +private: + MetadataBufferType mType; + int32_t mValue; + ValueInfo* mInfo; + + int32_t* mExtraValues; + uint32_t mExtraValues_Count; + + uint8_t* mBytes; + uint32_t mSize; +}; + +#endif + diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 2ee25f8..9930b99 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -875,8 +875,8 @@ Encode_Status VideoEncoderAVC::renderPictureParams() { LOG_V( "Begin\n\n"); // set picture params for HW - avcPicParams.ReferenceFrames[0].picture_id= mRefFrame->surface; - avcPicParams.CurrPic.picture_id= mRecFrame->surface; + avcPicParams.ReferenceFrames[0].picture_id= mRefSurface; + avcPicParams.CurrPic.picture_id= mRecSurface; avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; //avcPicParams.picture_width = mComParams.resolution.width; //avcPicParams.picture_height = mComParams.resolution.height; diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 287b8c2..2dea07e 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -8,6 +8,7 @@ #include #include "VideoEncoderLog.h" #include "VideoEncoderBase.h" +#include "IntelMetadataBuffer.h" #include #include @@ -34,7 +35,6 @@ VAStatus vaUnlockSurface(VADisplay dpy, VideoEncoderBase::VideoEncoderBase() :mInitialized(false) ,mVADisplay(NULL) - ,mVADecoderDisplay(NULL) ,mVAContext(0) ,mVAConfig(0) ,mVAEntrypoint(VAEntrypointEncSlice) @@ -42,10 +42,6 @@ VideoEncoderBase::VideoEncoderBase() ,mOffsetInSeg(0) ,mTotalSize(0) ,mTotalSizeCopied(0) - ,mBufferMode(BUFFER_SHARING_NONE) - ,mUpstreamBufferList(NULL) - ,mUpstreamBufferCnt(0) - ,mBufAttrib(NULL) ,mForceKeyFrame(false) ,mNewHeader(false) ,mFirstFrame (true) @@ -60,17 +56,13 @@ VideoEncoderBase::VideoEncoderBase() ,mSeqParamBuf(0) ,mPicParamBuf(0) ,mSliceParamBuf(0) - ,mSharedSurfaces(NULL) ,mSurfaces(NULL) ,mSurfaceCnt(0) - ,mSharedSurfacesCnt(0) - ,mReqSurfacesCnt(0) - ,mUsrPtr(NULL) - ,mVideoSrcBufferList(NULL) - ,mCurFrame(NULL) - ,mRefFrame(NULL) - ,mRecFrame(NULL) - ,mLastFrame(NULL) + ,mSrcSurfaceMapList(NULL) + ,mCurSurface(VA_INVALID_SURFACE) + ,mRefSurface(VA_INVALID_SURFACE) + ,mRecSurface(VA_INVALID_SURFACE) + ,mLastSurface(VA_INVALID_SURFACE) ,mLastInputRawBuffer(NULL) ,mEncodedFrames(0) ,mFrameNum(0) @@ -105,6 +97,12 @@ VideoEncoderBase::VideoEncoderBase() if (vaStatus != VA_STATUS_SUCCESS) { LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus); } + +#ifdef VIDEO_ENC_STATISTICS_ENABLE + memset(&mVideoStat, 0, sizeof(VideoStatistics)); + mVideoStat.min_encode_time = 0xFFFFFFFF; +#endif + } VideoEncoderBase::~VideoEncoderBase() { @@ -123,27 +121,18 @@ Encode_Status VideoEncoderBase::start() { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - VASurfaceID *surfaces = NULL; + VASurfaceID surfaces[2]; + int32_t index = -1; + SurfaceMap *map = mSrcSurfaceMapList; VAConfigAttrib vaAttrib[2]; - uint32_t index; uint32_t maxSize = 0; - VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; - uint32_t normalSurfacesCnt = 2; - if (mInitialized) { LOG_V("Encoder has been started\n"); return ENCODE_ALREADY_INIT; } - // For upstream allocates buffer, it is mandatory to set buffer mode - // and for other stuff, it is optional - // Different buffer mode will have different surface handling approach - - // mSharedSurfacesCnt is for upstream buffer allocation case - mSharedSurfacesCnt = 0; - vaAttrib[0].type = VAConfigAttribRTFormat; vaAttrib[1].type = VAConfigAttribRateControl; vaAttrib[0].value = VA_RT_FORMAT_YUV420; @@ -173,139 +162,31 @@ Encode_Status VideoEncoderBase::start() { mRenderBitRate = true; } - LOG_I("mReqSurfacesCnt = %d\n", mReqSurfacesCnt); - LOG_I("mUpstreamBufferCnt = %d\n", mUpstreamBufferCnt); - - if (mReqSurfacesCnt == 0) { - switch (mBufferMode) { - case BUFFER_SHARING_CI: - case BUFFER_SHARING_V4L2: - case BUFFER_SHARING_SURFACE: - case BUFFER_SHARING_GFXHANDLE: - case BUFFER_SHARING_KBUFHANDLE: - { - mSharedSurfacesCnt = mUpstreamBufferCnt; - normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE; - - if (mSharedSurfacesCnt != 0) { - mSharedSurfaces = new VASurfaceID[mSharedSurfacesCnt]; - - if (mSharedSurfaces == NULL) { - LOG_E("Failed allocate shared surface\n"); - ret = ENCODE_NO_MEMORY; - goto CLEAN_UP; - } - } else { - LOG_E("Set to upstream mode, but no upstream info, something is wrong"); - ret = ENCODE_FAIL; - goto CLEAN_UP; - } - break; - } - - default: - { - mBufferMode = BUFFER_SHARING_NONE; - normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE; - break; - } - } - } else if (mReqSurfacesCnt == 1) { - // TODO: Un-normal case, - mBufferMode = BUFFER_SHARING_NONE; - normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE; - } else { - mBufferMode = BUFFER_SHARING_USRPTR; - mUsrPtr = new uint8_t *[mReqSurfacesCnt]; - if (mUsrPtr == NULL) { - LOG_E("Failed allocate memory\n"); - ret = ENCODE_NO_MEMORY; - goto CLEAN_UP; - } - } - - LOG_E("mBufferMode = %d\n", mBufferMode); - - mSurfaceCnt = normalSurfacesCnt + mSharedSurfacesCnt + mReqSurfacesCnt; - - surfaces = new VASurfaceID[normalSurfacesCnt]; - if (surfaces == NULL) { - LOG_E("Failed allocate surface\n"); - ret = ENCODE_NO_MEMORY; - goto CLEAN_UP; - } - - mSurfaces = new VASurfaceID[mSurfaceCnt] ; - if (mSurfaces == NULL) { - LOG_E("Failed allocate private surface\n"); - ret = ENCODE_NO_MEMORY; - goto CLEAN_UP; - } - + LOG_V( "======VA CreateSurfaces for Rec/Ref frames ======\n"); vaStatus = vaCreateSurfaces(mVADisplay,VA_RT_FORMAT_YUV420, mComParams.resolution.width, mComParams.resolution.height, - surfaces, normalSurfacesCnt, NULL , 0); + surfaces, 2, NULL , 0); CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces"); + mRefSurface = surfaces[0]; + mRecSurface = surfaces[1]; - switch (mBufferMode) { - case BUFFER_SHARING_CI: - ret = surfaceMappingForCIFrameList(); - CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForCIFrameList"); - break; - case BUFFER_SHARING_V4L2: - // To be develped - break; - case BUFFER_SHARING_SURFACE: - ret = surfaceMappingForSurfaceList(); - CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForSurfaceList"); - break; - case BUFFER_SHARING_GFXHANDLE: - ret = surfaceMappingForGfxHandle(); - CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForGfxHandle"); - break; - case BUFFER_SHARING_KBUFHANDLE: - ret = surfaceMappingForKbufHandle(); - CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForKbufHandle"); - break; - case BUFFER_SHARING_NONE: - break; - case BUFFER_SHARING_USRPTR: { - videoSurfaceBuffer = mVideoSrcBufferList; - index = 0; - while (videoSurfaceBuffer != NULL) { - mSurfaces[index] = videoSurfaceBuffer->surface; - mUsrPtr [index] = videoSurfaceBuffer->usrptr; - videoSurfaceBuffer = videoSurfaceBuffer->next; - index ++; - } - } - break; - default: - break; + //count total surface id already allocated + mSurfaceCnt = 2; + + while(map) { + mSurfaceCnt ++; + map = map->next; } - for (index = 0; index < normalSurfacesCnt; index++) { - mSurfaces[mReqSurfacesCnt + mSharedSurfacesCnt + index] = surfaces[index]; - - videoSurfaceBuffer = new VideoEncSurfaceBuffer; - if (videoSurfaceBuffer == NULL) { - LOG_E( "new VideoEncSurfaceBuffer failed\n"); - return ENCODE_NO_MEMORY; - } - - videoSurfaceBuffer->surface = surfaces[index]; - videoSurfaceBuffer->usrptr = NULL; - videoSurfaceBuffer->index = mReqSurfacesCnt + mSharedSurfacesCnt + index; - videoSurfaceBuffer->bufAvailable = true; - videoSurfaceBuffer->next = NULL; - - mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer); - - videoSurfaceBuffer = NULL; + mSurfaces = new VASurfaceID[mSurfaceCnt]; + map = mSrcSurfaceMapList; + while(map) { + mSurfaces[++index] = map->surface; + map->added = true; + map = map->next; } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", mSurfaceCnt); + mSurfaces[++index] = mRefSurface; + mSurfaces[++index] = mRecSurface; //Initialize and save the VA context ID LOG_V( "vaCreateContext\n"); @@ -349,8 +230,6 @@ CLEAN_UP: mInitialized = true; } - if (surfaces) delete []surfaces; - LOG_V( "end\n"); return ret; } @@ -363,13 +242,42 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) { } CHECK_NULL_RETURN_IFFAIL(inBuffer); + +#ifdef VIDEO_ENC_STATISTICS_ENABLE + struct timespec ts1; + clock_gettime(CLOCK_MONOTONIC, &ts1); + +#endif + + Encode_Status status; + if (mComParams.syncEncMode) { LOG_I("Sync Enocde Mode, no optimization, no one frame delay\n"); - return syncEncode(inBuffer); + status = syncEncode(inBuffer); } else { LOG_I("Async Enocde Mode, HW/SW works in parallel, introduce one frame delay\n"); - return asyncEncode(inBuffer); + status = asyncEncode(inBuffer); + } + +#ifdef VIDEO_ENC_STATISTICS_ENABLE + struct timespec ts2; + clock_gettime(CLOCK_MONOTONIC, &ts2); + + uint32_t encode_time = (ts2.tv_sec - ts1.tv_sec) * 1000000 + (ts2.tv_nsec - ts1.tv_nsec) / 1000; + if (encode_time > mVideoStat.max_encode_time) { + mVideoStat.max_encode_time = encode_time; + mVideoStat.max_encode_frame = mFrameNum; } + + if (encode_time < mVideoStat.min_encode_time) { + mVideoStat.min_encode_time = encode_time; + mVideoStat.min_encode_frame = mFrameNum; + } + + mVideoStat.average_encode_time += encode_time; +#endif + + return status; } Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { @@ -390,9 +298,10 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { // Start encoding process LOG_V( "vaBeginPicture\n"); LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext); - LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurFrame->surface); + LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurSurface); LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay); +#if 0 #ifdef DUMP_SRC_DATA if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){ @@ -403,7 +312,7 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { uint32_t stride = 0; uint32_t frameSize = 0; - vaStatus = vaDeriveImage(mVADisplay, mCurFrame->surface, &image); + vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &image); CHECK_VA_STATUS_RETURN("vaDeriveImage"); LOG_V( "vaDeriveImage Done\n"); @@ -436,9 +345,10 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { vaStatus = vaDestroyImage(mVADisplay, image.image_id); CHECK_VA_STATUS_RETURN("vaDestroyImage"); } +#endif #endif - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); CHECK_VA_STATUS_RETURN("vaBeginPicture"); ret = sendEncodeCommand(); @@ -454,8 +364,8 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { decideFrameType(); } - LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastFrame->surface); - vaStatus = vaSyncSurface(mVADisplay, mLastFrame->surface); + LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastSurface); + vaStatus = vaSyncSurface(mVADisplay, mLastSurface); if (vaStatus != VA_STATUS_SUCCESS) { LOG_W( "Failed vaSyncSurface\n"); } @@ -469,7 +379,7 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); if (mFirstFrame) { - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); CHECK_VA_STATUS_RETURN("vaBeginPicture"); ret = sendEncodeCommand(); @@ -483,18 +393,19 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { // Query the status of current surface VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastFrame->surface, &vaSurfaceStatus); + vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; - if (!mFirstFrame) { - VideoEncoderBase::appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame); - } +#ifdef VIDEO_ENC_STATISTICS_ENABLE + if (mPicSkipped) + mVideoStat.skipped_frames ++; +#endif - mLastFrame = NULL; + mLastSurface = VA_INVALID_SURFACE; updateProperities(); - mCurFrame = NULL; + mCurSurface = VA_INVALID_SURFACE; if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true; @@ -511,7 +422,7 @@ Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; uint8_t *buf = NULL; - VideoEncSurfaceBuffer *tmpFrame = NULL; + VASurfaceID tmpSurface = VA_INVALID_SURFACE; inBuffer->bufAvailable = false; if (mNewHeader) mFrameNum = 0; @@ -522,7 +433,7 @@ Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { ret = manageSrcSurface(inBuffer); CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface); + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); CHECK_VA_STATUS_RETURN("vaBeginPicture"); ret = sendEncodeCommand(); @@ -531,8 +442,8 @@ Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { vaStatus = vaEndPicture(mVADisplay, mVAContext); CHECK_VA_STATUS_RETURN("vaEndPicture"); - LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurFrame->surface); - vaStatus = vaSyncSurface(mVADisplay, mCurFrame->surface); + LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurSurface); + vaStatus = vaSyncSurface(mVADisplay, mCurSurface); if (vaStatus != VA_STATUS_SUCCESS) { LOG_W( "Failed vaSyncSurface\n"); } @@ -547,23 +458,27 @@ Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { // Query the status of current surface VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurFrame->surface, &vaSurfaceStatus); + vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurSurface, &vaSurfaceStatus); CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; - VideoEncoderBase::appendVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame); - mCurFrame = NULL; + mCurSurface = 0; mEncodedFrames ++; mFrameNum ++; if (!mPicSkipped) { - tmpFrame = mRecFrame; - mRecFrame = mRefFrame; - mRefFrame = tmpFrame; + tmpSurface = mRecSurface; + mRecSurface = mRefSurface; + mRefSurface = tmpSurface; } +#ifdef VIDEO_ENC_STATISTICS_ENABLE + if (mPicSkipped) + mVideoStat.skipped_frames ++; +#endif + inBuffer->bufAvailable = true; return ENCODE_SUCCESS; } @@ -651,24 +566,6 @@ void VideoEncoderBase::flush() { LOG_V( "Begin\n"); - // put reconstructed surface back to list - if (mRecFrame != NULL) { - appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); - mRecFrame = NULL; - } - - // put reference surface back to list - if (mRefFrame != NULL) { - appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); - mRefFrame = NULL; - } - - // Here this raw buffer means the surface being encoding - if (mLastInputRawBuffer) { - mLastInputRawBuffer->bufAvailable = true; - mLastInputRawBuffer = NULL; - } - // reset the properities mEncodedFrames = 0; mFrameNum = 0; @@ -682,37 +579,15 @@ Encode_Status VideoEncoderBase::stop() { VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; - VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; - VideoEncSurfaceBuffer *tmpBuffer = NULL; - + SurfaceMap *map = NULL; LOG_V( "Begin\n"); - if (mSharedSurfaces) { - delete [] mSharedSurfaces; - mSharedSurfaces = NULL; - } - if (mSurfaces) { delete [] mSurfaces; mSurfaces = NULL; } - if (mUsrPtr) { - delete [] mUsrPtr; - mUsrPtr = NULL; - } - - if (mUpstreamBufferList) { - delete [] mUpstreamBufferList; - mUpstreamBufferList = NULL; - } - - if (mBufAttrib) { - delete mBufAttrib; - mBufAttrib = NULL; - } - // It is possible that above pointers have been allocated // before we set mInitialized to true if (!mInitialized) { @@ -720,27 +595,6 @@ Encode_Status VideoEncoderBase::stop() { return ENCODE_SUCCESS; } - LOG_V( "Release frames\n"); - - // put reconstructed surface back to list - if (mRecFrame != NULL) { - appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); - mRecFrame = NULL; - } - - // put reference surface back to list - if (mRefFrame != NULL) { - appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); - mRefFrame = NULL; - } - - // put Source surface back to list - if (mLastFrame != NULL) { - appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame); - mLastFrame = NULL; - } - - LOG_V( "Release surfaces\n"); LOG_V( "vaDestroyContext\n"); vaStatus = vaDestroyContext(mVADisplay, mVAContext); CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); @@ -749,19 +603,28 @@ Encode_Status VideoEncoderBase::stop() { vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); - // Release Src Surface Buffer List - LOG_V( "Rlease Src Surface Buffer \n"); - - videoSurfaceBuffer = mVideoSrcBufferList; + // Release Src Surface Buffer Map + LOG_V( "Rlease Src Surface Map\n"); - while (videoSurfaceBuffer != NULL) { - tmpBuffer = videoSurfaceBuffer; - videoSurfaceBuffer = videoSurfaceBuffer->next; - delete tmpBuffer; + map = mSrcSurfaceMapList; + while(map) { + if (! map->added) { + //destroy surface by itself + LOG_V( "Rlease Src Surface Buffer not added into vaContext\n"); + vaDestroySurfaces(mVADisplay, &map->surface, 1); + } + SurfaceMap *tmp = map; + map = map->next; + delete tmp; } CLEAN_UP: mInitialized = false; + +#ifdef VIDEO_ENC_STATISTICS_ENABLE + memset(&mVideoStat, 0, sizeof(VideoStatistics)); + mVideoStat.min_encode_time = 0xFFFFFFFF; +#endif LOG_V( "end\n"); return ret; } @@ -948,6 +811,8 @@ void VideoEncoderBase::setDefaultParams() { mHrdParam.bufferSize = 0; mHrdParam.initBufferFullness = 0; + + mStoreMetaDataInBuffers.isEnabled = false; } Encode_Status VideoEncoderBase::setParameters( @@ -1010,6 +875,19 @@ Encode_Status VideoEncoderBase::setParameters( break; } + case VideoParamsTypeStoreMetaDataInBuffers: { + VideoParamsStoreMetaDataInBuffers *metadata = + reinterpret_cast (videoEncParams); + + if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) { + return ENCODE_INVALID_PARAMS; + } + + mStoreMetaDataInBuffers.isEnabled = metadata->isEnabled; + + break; + } + case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: @@ -1020,7 +898,7 @@ Encode_Status VideoEncoderBase::setParameters( default: { LOG_E ("Wrong ParamType here\n"); - break; + return ENCODE_INVALID_PARAMS; } } return ret; @@ -1083,6 +961,19 @@ Encode_Status VideoEncoderBase::getParameters( break; } + case VideoParamsTypeStoreMetaDataInBuffers: { + VideoParamsStoreMetaDataInBuffers *metadata = + reinterpret_cast (videoEncParams); + + if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) { + return ENCODE_INVALID_PARAMS; + } + + metadata->isEnabled = mStoreMetaDataInBuffers.isEnabled; + + break; + } + case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: @@ -1303,7 +1194,7 @@ void VideoEncoderBase:: decideFrameType () { void VideoEncoderBase:: updateProperities () { - VideoEncSurfaceBuffer *tmpFrame = NULL; + VASurfaceID tmp = VA_INVALID_SURFACE; LOG_V( "Begin\n"); mEncodedFrames ++; @@ -1312,12 +1203,12 @@ void VideoEncoderBase:: updateProperities () { mCodedBufIndex ++; mCodedBufIndex %=2; - mLastFrame = mCurFrame; + mLastSurface = mCurSurface; if (!mPicSkipped) { - tmpFrame = mRecFrame; - mRecFrame = mRefFrame; - mRefFrame = tmpFrame; + tmp = mRecSurface; + mRecSurface = mRefSurface; + mRefSurface = tmp; } LOG_V( "End\n"); @@ -1361,6 +1252,25 @@ Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { return ENCODE_SUCCESS; } +Encode_Status VideoEncoderBase::getStatistics (VideoStatistics *videoStat) { + +#ifdef VIDEO_ENC_STATISTICS_ENABLE + if (videoStat != NULL) { + videoStat->total_frames = mEncodedFrames; + videoStat->skipped_frames = mVideoStat.skipped_frames; + videoStat->average_encode_time = mVideoStat.average_encode_time / mEncodedFrames; + videoStat->max_encode_time = mVideoStat.max_encode_time; + videoStat->max_encode_frame = mVideoStat.max_encode_frame; + videoStat->min_encode_time = mVideoStat.min_encode_time; + videoStat->min_encode_frame = mVideoStat.min_encode_frame; + } + + return ENCODE_SUCCESS; +#else + return ENCODE_NOT_SUPPORTED; +#endif +} + Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) { @@ -1372,7 +1282,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( VAImage image; uint32_t index = 0; - VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; + SurfaceMap *map = NULL; LOG_V( "Begin\n"); @@ -1390,24 +1300,24 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( // Current only NV12 is supported in VA API // Through format we can get known the number of planes if (format != STRING_TO_FOURCC("NV12")) { - LOG_W ("Format is not supported\n"); return ENCODE_NOT_SUPPORTED; } - VASurfaceAttributeTPI *attribute_tpi = new VASurfaceAttributeTPI; - attribute_tpi->size = expectedSize; - attribute_tpi->luma_stride = width; - attribute_tpi->chroma_u_stride = width; - attribute_tpi->chroma_v_stride = width; - attribute_tpi->luma_offset = 0; - attribute_tpi->chroma_u_offset = width*height; - attribute_tpi->chroma_v_offset = width*height; - attribute_tpi->pixel_format = VA_FOURCC_NV12; - attribute_tpi->type = VAExternalMemoryNULL; + VASurfaceAttributeTPI attribute_tpi; + + attribute_tpi.size = expectedSize; + attribute_tpi.luma_stride = width; + attribute_tpi.chroma_u_stride = width; + attribute_tpi.chroma_v_stride = width; + attribute_tpi.luma_offset = 0; + attribute_tpi.chroma_u_offset = width*height; + attribute_tpi.chroma_v_offset = width*height; + attribute_tpi.pixel_format = VA_FOURCC_NV12; + attribute_tpi.type = VAExternalMemoryNULL; vaCreateSurfacesWithAttribute(mVADisplay, width, height, VA_RT_FORMAT_YUV420, - 1, &surface, attribute_tpi); + 1, &surface, &attribute_tpi); CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); vaStatus = vaDeriveImage(mVADisplay, surface, &image); @@ -1428,19 +1338,28 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( *outsize = image.data_size; *stride = image.pitches[0]; - videoSurfaceBuffer = new VideoEncSurfaceBuffer; - if (videoSurfaceBuffer == NULL) { - LOG_E( "new VideoEncSurfaceBuffer failed\n"); + map = new SurfaceMap; + if (map == NULL) { + LOG_E( "new SurfaceMap failed\n"); return ENCODE_NO_MEMORY; } - videoSurfaceBuffer->surface = surface; - videoSurfaceBuffer->usrptr = *usrptr; - videoSurfaceBuffer->index = mReqSurfacesCnt; - videoSurfaceBuffer->bufAvailable = true; - videoSurfaceBuffer->next = NULL; + map->surface = surface; + map->type = MetadataBufferTypeEncoder; + map->value = (int32_t)*usrptr; + map->vinfo.mode = (MemMode)MEM_MODE_USRPTR; + map->vinfo.handle = 0; + map->vinfo.size = 0; + map->vinfo.width = width; + map->vinfo.height = height; + map->vinfo.lumaStride = width; + map->vinfo.chromStride = width; + map->vinfo.format = VA_FOURCC_NV12; + map->vinfo.s3dformat = 0xffffffff; + map->added = false; + map->next = NULL; - mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer); + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); LOG_I( "surface = 0x%08x\n",(uint32_t)surface); LOG_I("image->pitches[0] = %d\n", image.pitches[0]); @@ -1453,10 +1372,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( LOG_I ("data_size = %d\n", image.data_size); LOG_I ("usrptr = 0x%p\n", *usrptr); - LOG_I ("mReqSurfacesCnt = %d\n", mReqSurfacesCnt); - LOG_I ("videoSurfaceBuffer->usrptr = 0x%p\n ", videoSurfaceBuffer->usrptr); - - videoSurfaceBuffer = NULL; + LOG_I ("map->value = 0x%p\n ", (void *)map->value); vaStatus = vaUnmapBuffer(mVADisplay, image.buf); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); @@ -1473,12 +1389,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( return ENCODE_FAIL; } - mReqSurfacesCnt ++; ret = ENCODE_SUCCESS; - if(attribute_tpi) { - delete attribute_tpi; - attribute_tpi = NULL; - } return ret; } @@ -1486,68 +1397,63 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) { + Encode_Status status = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(upStreamBuffer); if (upStreamBuffer->bufCnt == 0) { LOG_E("bufCnt == 0\n"); return ENCODE_FAIL; } - if (mUpstreamBufferList) delete [] mUpstreamBufferList; - if (mBufAttrib) delete mBufAttrib; - - mUpstreamBufferCnt = upStreamBuffer->bufCnt; - mVADecoderDisplay = upStreamBuffer->display; - mBufferMode = upStreamBuffer->bufferMode; - mBufAttrib = new ExternalBufferAttrib; - if (!mBufAttrib) { - LOG_E ("mBufAttrib NULL\n"); - return ENCODE_NO_MEMORY; - } - - if (upStreamBuffer->bufAttrib) { - memcpy(mBufAttrib, upStreamBuffer->bufAttrib, sizeof(ExternalBufferAttrib)); - } else { + if (upStreamBuffer->bufAttrib == NULL) { LOG_E ("Buffer Attrib doesn't set by client, return error"); return ENCODE_INVALID_PARAMS; } - mUpstreamBufferList = new uint32_t [upStreamBuffer->bufCnt]; - if (!mUpstreamBufferList) { - LOG_E ("mUpstreamBufferList NULL\n"); - return ENCODE_NO_MEMORY; + for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) { + if (findSurfaceMapByValue(mSrcSurfaceMapList, upStreamBuffer->bufList[i]) != NULL) //already mapped + continue; + + //wrap upstream buffer into vaSurface + SurfaceMap *map = new SurfaceMap; + + map->type = MetadataBufferTypeUser; + map->value = upStreamBuffer->bufList[i]; + map->vinfo.mode = (MemMode)upStreamBuffer->bufferMode; + map->vinfo.handle = (uint32_t)upStreamBuffer->display; + map->vinfo.size = 0; + map->vinfo.width = upStreamBuffer->bufAttrib->realWidth; + map->vinfo.height = upStreamBuffer->bufAttrib->realHeight; + map->vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride; + map->vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride; + map->vinfo.format = upStreamBuffer->bufAttrib->format; + map->vinfo.s3dformat = 0xFFFFFFFF; + map->added = false; + map->next = NULL; + status = surfaceMapping(map); + + if (status == ENCODE_SUCCESS) + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + else + delete map; + + if (mSrcSurfaceMapList == NULL) { + LOG_E ("mSrcSurfaceMapList should not be NULL now, maybe meet mapping error\n"); + return ENCODE_NO_MEMORY; + } } - memcpy(mUpstreamBufferList, upStreamBuffer->bufList, upStreamBuffer->bufCnt * sizeof (uint32_t)); - return ENCODE_SUCCESS; + return status; } +Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { -Encode_Status VideoEncoderBase::generateVideoBufferAndAttachToList(uint32_t index, uint8_t *usrptr) { - - VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL; - videoSurfaceBuffer = new VideoEncSurfaceBuffer; - if (videoSurfaceBuffer == NULL) { - LOG_E( "new VideoEncSurfaceBuffer failed\n"); - return ENCODE_NO_MEMORY; - } - - videoSurfaceBuffer->surface = mSharedSurfaces[index]; - videoSurfaceBuffer->usrptr = NULL; - videoSurfaceBuffer->index = index; - videoSurfaceBuffer->bufAvailable = true; - videoSurfaceBuffer->next = NULL; - - mVideoSrcBufferList = appendVideoSurfaceBuffer - (mVideoSrcBufferList, videoSurfaceBuffer); - videoSurfaceBuffer = NULL; - - return ENCODE_SUCCESS; -} + if (!map) + return ENCODE_NULL_PTR; -Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { - uint32_t index; VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; + VASurfaceID surface; uint32_t fourCC = 0; uint32_t lumaStride = 0; @@ -1558,442 +1464,443 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() { uint32_t chromaVOffset = 0; uint32_t kBufHandle = 0; - VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; - if (vaSurfaceAttrib == NULL) { - LOG_E("Failed to allocate VASurfaceAttrib\n"); - return ENCODE_NO_MEMORY; - } - vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; + VASurfaceAttributeTPI vaSurfaceAttrib; + uint32_t buf; - for (index = 0; index < mSharedSurfacesCnt; index++) { - - vaStatus = vaLockSurface( - mVADecoderDisplay, (VASurfaceID)mUpstreamBufferList[index], - &fourCC, &lumaStride, &chromaUStride, &chromaVStride, - &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL); - - CHECK_VA_STATUS_RETURN("vaLockSurface"); - LOG_I("Surface incoming = 0x%08x", mUpstreamBufferList[index]); - LOG_I("lumaStride = %d", lumaStride); - LOG_I("chromaUStride = %d", chromaUStride); - LOG_I("chromaVStride = %d", chromaVStride); - LOG_I("lumaOffset = %d", lumaOffset); - LOG_I("chromaUOffset = %d", chromaUOffset); - LOG_I("chromaVOffset = %d", chromaVOffset); - LOG_I("kBufHandle = 0x%08x", kBufHandle); - LOG_I("fourCC = %d", fourCC); - - vaStatus = vaUnlockSurface(mVADecoderDisplay, (VASurfaceID)mUpstreamBufferList[index]); - CHECK_VA_STATUS_RETURN("vaUnlockSurface"); - - vaSurfaceAttrib->size = mComParams.resolution.width*mComParams.resolution.height*1.5; - vaSurfaceAttrib->luma_stride = lumaStride; - vaSurfaceAttrib->chroma_u_stride = chromaUStride; - vaSurfaceAttrib->chroma_v_stride = chromaVStride; - vaSurfaceAttrib->luma_offset = lumaOffset; - vaSurfaceAttrib->chroma_u_offset = chromaUOffset; - vaSurfaceAttrib->chroma_v_offset = chromaVOffset; - vaSurfaceAttrib->buffers[0] = kBufHandle; - vaSurfaceAttrib->pixel_format = fourCC; - vaSurfaceAttrib->type = VAExternalMemoryKernelDRMBufffer; - - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, - 1 /*mSharedSurfacesCnt*/, &mSharedSurfaces[index], vaSurfaceAttrib); - - CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); - - LOG_I("Surface ID created from Kbuf = 0x%08x", mSharedSurfaces[index]); - - mSurfaces[index] = mSharedSurfaces[index]; - ret = generateVideoBufferAndAttachToList(index, NULL); - CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); - } + vaSurfaceAttrib.buffers = &buf; + + vaStatus = vaLockSurface( + (VADisplay)map->vinfo.handle, (VASurfaceID)map->value, + &fourCC, &lumaStride, &chromaUStride, &chromaVStride, + &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL); + + CHECK_VA_STATUS_RETURN("vaLockSurface"); + LOG_I("Surface incoming = 0x%08x", map->value); + LOG_I("lumaStride = %d", lumaStride); + LOG_I("chromaUStride = %d", chromaUStride); + LOG_I("chromaVStride = %d", chromaVStride); + LOG_I("lumaOffset = %d", lumaOffset); + LOG_I("chromaUOffset = %d", chromaUOffset); + LOG_I("chromaVOffset = %d", chromaVOffset); + LOG_I("kBufHandle = 0x%08x", kBufHandle); + LOG_I("fourCC = %d", fourCC); + + vaStatus = vaUnlockSurface((VADisplay)map->vinfo.handle, (VASurfaceID)map->value); + CHECK_VA_STATUS_RETURN("vaUnlockSurface"); + + vaSurfaceAttrib.count = 1; + vaSurfaceAttrib.size = mComParams.resolution.width * mComParams.resolution.height * 3 /2; + vaSurfaceAttrib.luma_stride = lumaStride; + vaSurfaceAttrib.chroma_u_stride = chromaUStride; + vaSurfaceAttrib.chroma_v_stride = chromaVStride; + vaSurfaceAttrib.luma_offset = lumaOffset; + vaSurfaceAttrib.chroma_u_offset = chromaUOffset; + vaSurfaceAttrib.chroma_v_offset = chromaVOffset; + vaSurfaceAttrib.buffers[0] = kBufHandle; + vaSurfaceAttrib.pixel_format = fourCC; + vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer; + + vaStatus = vaCreateSurfacesWithAttribute( + mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + 1, &surface, &vaSurfaceAttrib); + + CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); + + LOG_I("Surface ID created from Kbuf = 0x%08x", surface); + + map->surface = surface; - if(vaSurfaceAttrib) { - if(vaSurfaceAttrib->buffers) { - delete [] vaSurfaceAttrib->buffers; - vaSurfaceAttrib->buffers= NULL; - } - delete vaSurfaceAttrib; - vaSurfaceAttrib = NULL; - } return ret; } -Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle() { +Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { + + if (!map) + return ENCODE_NULL_PTR; - uint32_t index; VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; + VASurfaceID surface; - VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; - if (vaSurfaceAttrib == NULL) { - LOG_E("Failed to allocate VASurfaceAttrib\n"); - return ENCODE_NO_MEMORY; - } + VASurfaceAttributeTPI vaSurfaceAttrib; + uint32_t buf; - vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; - if (vaSurfaceAttrib->buffers == NULL) { - LOG_E("Failed to allocate buffers for vaSurfaceAttrib\n"); - return ENCODE_NO_MEMORY; - } + vaSurfaceAttrib.buffers = &buf; - LOG_I("mSharedSurfacesCnt = %d\n", mSharedSurfacesCnt); - LOG_I("lumaStride = %d\n", mBufAttrib->lumaStride); - LOG_I("format = 0x%08x\n", mBufAttrib->format); + LOG_I("surfaceMappingForGfxHandle ......\n"); + LOG_I("lumaStride = %d\n", map->vinfo.lumaStride); + LOG_I("format = 0x%08x\n", map->vinfo.format); LOG_I("width = %d\n", mComParams.resolution.width); LOG_I("height = %d\n", mComParams.resolution.height); + LOG_I("gfxhandle = %d\n", map->value); - vaSurfaceAttrib->count = mSharedSurfacesCnt; - vaSurfaceAttrib->luma_stride = mBufAttrib->lumaStride; - vaSurfaceAttrib->pixel_format = mBufAttrib->format; - vaSurfaceAttrib->width = mComParams.resolution.width; - vaSurfaceAttrib->height = mComParams.resolution.height; - vaSurfaceAttrib->type = VAExternalMemoryAndroidGrallocBuffer; - for(index = 0; index < mSharedSurfacesCnt; index++) { - vaSurfaceAttrib->buffers[index] = (uint32_t) mUpstreamBufferList[index]; - LOG_I("NativeHandleList[%d] = 0x%08x", index, mUpstreamBufferList[index]); - } + vaSurfaceAttrib.count = 1; + vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; + vaSurfaceAttrib.pixel_format = map->vinfo.format; + vaSurfaceAttrib.width = mComParams.resolution.width; + vaSurfaceAttrib.height = mComParams.resolution.height; + vaSurfaceAttrib.type = VAExternalMemoryAndroidGrallocBuffer; + vaSurfaceAttrib.buffers[0] = (uint32_t) map->value; vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, - mSharedSurfacesCnt, - mSharedSurfaces, - vaSurfaceAttrib); + 1, + &surface, + &vaSurfaceAttrib); CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); LOG_V("Successfully create surfaces from native hanle"); - for(index = 0; index < mSharedSurfacesCnt; index++) { - mSurfaces[index] = mSharedSurfaces[index]; - ret = generateVideoBufferAndAttachToList(index, NULL); - LOG_I("mSurfaces[%d] = %08x", index, mSurfaces[index]); - CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); - } - - if(vaSurfaceAttrib) { - if(vaSurfaceAttrib->buffers) { - delete [] vaSurfaceAttrib->buffers; - vaSurfaceAttrib->buffers= NULL; - } - delete vaSurfaceAttrib; - vaSurfaceAttrib = NULL; - } + map->surface = surface; LOG_V("surfaceMappingForGfxHandle: Done"); return ret; } -Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle() { +Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { - uint32_t index; + if (!map) + return ENCODE_NULL_PTR; + + LOG_I("surfaceMappingForKbufHandle value=%d\n", map->value); VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; + VASurfaceID surface; uint32_t lumaOffset = 0; - uint32_t chromaUOffset = mBufAttrib->realHeight * mBufAttrib->lumaStride; + uint32_t chromaUOffset = map->vinfo.height * map->vinfo.lumaStride; uint32_t chromaVOffset = chromaUOffset + 1; - VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; - if (vaSurfaceAttrib == NULL) { - LOG_E("Failed to allocate VASurfaceAttrib\n"); - return ENCODE_NO_MEMORY; - } - vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; - - for (index = 0; index < mSharedSurfacesCnt; index++) { - vaSurfaceAttrib->size = mBufAttrib->lumaStride * mComParams.resolution.height * 3 / 2; - vaSurfaceAttrib->luma_stride = mBufAttrib->lumaStride; - vaSurfaceAttrib->chroma_u_stride = mBufAttrib->chromStride; - vaSurfaceAttrib->chroma_v_stride = mBufAttrib->chromStride; - vaSurfaceAttrib->luma_offset = lumaOffset; - vaSurfaceAttrib->chroma_u_offset = chromaUOffset; - vaSurfaceAttrib->chroma_v_offset = chromaVOffset; - vaSurfaceAttrib->buffers[0] = mUpstreamBufferList[index]; - vaSurfaceAttrib->pixel_format = mBufAttrib->format; - vaSurfaceAttrib->type = VAExternalMemoryKernelDRMBufffer; - - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, - 1 /*mSharedSurfacesCnt*/, &mSharedSurfaces[index], vaSurfaceAttrib); - - CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); - - LOG_I("Surface ID created from Kbuf = 0x%08x", mSharedSurfaces[index]); - - mSurfaces[index] = mSharedSurfaces[index]; - ret = generateVideoBufferAndAttachToList(index, NULL); - CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList"); - } + VASurfaceAttributeTPI vaSurfaceAttrib; + uint32_t buf; + + vaSurfaceAttrib.buffers = &buf; - if(vaSurfaceAttrib) { - if(vaSurfaceAttrib->buffers) { - delete [] vaSurfaceAttrib->buffers; - vaSurfaceAttrib->buffers= NULL; - } - delete vaSurfaceAttrib; - vaSurfaceAttrib = NULL; - } + vaSurfaceAttrib.count = 1; + vaSurfaceAttrib.size = map->vinfo.lumaStride * mComParams.resolution.height * 3 / 2; + vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; + vaSurfaceAttrib.chroma_u_stride = map->vinfo.chromStride; + vaSurfaceAttrib.chroma_v_stride = map->vinfo.chromStride; + vaSurfaceAttrib.luma_offset = lumaOffset; + vaSurfaceAttrib.chroma_u_offset = chromaUOffset; + vaSurfaceAttrib.chroma_v_offset = chromaVOffset; + vaSurfaceAttrib.buffers[0] = map->value; + vaSurfaceAttrib.pixel_format = map->vinfo.format; + vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer; - return ret; -} + vaStatus = vaCreateSurfacesWithAttribute( + mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + 1, &surface, &vaSurfaceAttrib); -Encode_Status VideoEncoderBase::surfaceMappingForCIFrameList() { - uint32_t index; - VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; - VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI; - if (vaSurfaceAttrib == NULL) { - LOG_E("Failed to allocate VASurfaceAttrib\n"); - return ENCODE_NO_MEMORY; - } - vaSurfaceAttrib->type = VAExternalMemoryCIFrame; - vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt]; - - for (index = 0; index < mSharedSurfacesCnt; index++) { - vaSurfaceAttrib->buffers[0] = (uint32_t)mUpstreamBufferCnt; - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, - mComParams.resolution.width, - mComParams.resolution.height, - VA_RT_FORMAT_YUV420, - 1 /*mSharedSurfacesCnt*/, - &mSharedSurfaces[index], - vaSurfaceAttrib); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); - mSurfaces[index] = mSharedSurfaces[index]; - - ret = generateVideoBufferAndAttachToList(index, NULL); - CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList") - } - - if(vaSurfaceAttrib) { - if(vaSurfaceAttrib->buffers) { - delete [] vaSurfaceAttrib->buffers; - vaSurfaceAttrib->buffers= NULL; - } - delete vaSurfaceAttrib; - vaSurfaceAttrib = NULL; - } + CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); + + LOG_I("Surface ID created from Kbuf = 0x%08x", map->value); + + map->surface = surface; + return ret; } -Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { +Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) { + + if (!map) + return ENCODE_NULL_PTR; - Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + VASurfaceID surface; - uint32_t idx = 0; - uint32_t bufIndex = 0; - uint32_t data = 0; + VASurfaceAttributeTPI vaSurfaceAttrib; + uint32_t buf; - if (mBufferMode == BUFFER_SHARING_CI) { + vaSurfaceAttrib.buffers = &buf; - memcpy(&bufIndex, inBuffer->data, sizeof(unsigned int)); - // bufIndex = *(uint32_t*)inBuffer->data; + vaSurfaceAttrib.count = 1; + vaSurfaceAttrib.type = VAExternalMemoryCIFrame; + vaSurfaceAttrib.buffers[0] = (uint32_t)map->value; + vaStatus = vaCreateSurfacesWithAttribute( + mVADisplay, + mComParams.resolution.width, + mComParams.resolution.height, + VA_RT_FORMAT_YUV420, + 1, + &surface, + &vaSurfaceAttrib); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); - LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt); - LOG_I("bufIndex = %d\n", bufIndex); + map->surface = surface; + + return ret; +} - if (bufIndex > mSurfaceCnt - 2) { - LOG_E("the CI frame idx is bigger than total CI frame count\n"); - ret = ENCODE_FAIL; - return ret; +Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { - } + if (!map) + return ENCODE_NULL_PTR; - } else if (mBufferMode == BUFFER_SHARING_SURFACE || - mBufferMode == BUFFER_SHARING_GFXHANDLE || - mBufferMode == BUFFER_SHARING_KBUFHANDLE) { + VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; + VASurfaceID surface; - bufIndex = (uint32_t) -1; - data = *(uint32_t*)inBuffer->data; + VASurfaceAttributeTPI vaSurfaceAttrib; + uint32_t buf; - LOG_I("data = 0x%08x\n", data); + vaSurfaceAttrib.buffers = &buf; - for (idx = 0; idx < mSharedSurfacesCnt; idx++) { + vaSurfaceAttrib.count = 1; + vaSurfaceAttrib.width = map->vinfo.width; + vaSurfaceAttrib.height = map->vinfo.height; + vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; + vaSurfaceAttrib.buffers[0] = map->value; + vaSurfaceAttrib.pixel_format = map->vinfo.format; + vaSurfaceAttrib.type = VAExternalMemoryUserPointer; - LOG_I("mUpstreamBufferList[%d] = 0x%08x\n", idx, mUpstreamBufferList[idx]); - if (data == mUpstreamBufferList[idx]) - bufIndex = idx; - } + vaStatus = vaCreateSurfacesWithAttribute( + mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + 1, &surface, &vaSurfaceAttrib); - LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt); - LOG_I("bufIndex = %d\n", bufIndex); + CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromMalloc"); - if (bufIndex > mSurfaceCnt - 2) { - LOG_E("Can't find the surface in our list\n"); - ret = ENCODE_FAIL; - return ret; - } - }else if (mBufferMode == BUFFER_SHARING_USRPTR) { + LOG_I("Surface ID created from Malloc = 0x%08x", map->value); - bufIndex = (uint32_t) -1; //fixme, temp use a big value + map->surface = surface; - LOG_I("bufin->data = 0x%p\n", inBuffer->data); + return ret; +} - for (idx = 0; idx < mReqSurfacesCnt; idx++) { - LOG_I("mUsrPtr[%d] = 0x%p\n", idx, mUsrPtr[idx]); +Encode_Status VideoEncoderBase::surfaceMapping(SurfaceMap *map) { - if (inBuffer->data == mUsrPtr[idx]) - bufIndex = idx; - } + if (!map) + return ENCODE_NULL_PTR; - LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt); - LOG_I("bufIndex = %d\n", bufIndex); + Encode_Status status; - if (bufIndex > mSurfaceCnt - 2) { - LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); - ret = ENCODE_FAIL; - goto no_share_mode; - } +LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, heith=%d, value=%x\n", map->vinfo.mode, map->vinfo.format, map->vinfo.lumaStride, map->vinfo.width, map->vinfo.height, map->value); + switch (map->vinfo.mode) { + case MEM_MODE_CI: + status = surfaceMappingForCI(map); + break; + case MEM_MODE_SURFACE: + status = surfaceMappingForSurface(map); + break; + case MEM_MODE_GFXHANDLE: + status = surfaceMappingForGfxHandle(map); + break; + case MEM_MODE_KBUFHANDLE: + status = surfaceMappingForKbufHandle(map); + break; + case MEM_MODE_MALLOC: + status = surfaceMappingForMalloc(map); + break; + case MEM_MODE_ION: + case MEM_MODE_V4L2: + case MEM_MODE_USRPTR: + default: + status = ENCODE_NOT_SUPPORTED; + break; } + return status; +} - switch (mBufferMode) { - - case BUFFER_SHARING_CI: - case BUFFER_SHARING_SURFACE: - case BUFFER_SHARING_GFXHANDLE: - case BUFFER_SHARING_KBUFHANDLE: - case BUFFER_SHARING_USRPTR: { - - if (mRefFrame== NULL) { - mRefFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt -1 ); - if (mRefFrame == NULL) { - LOG_E ("No Surface buffer available, something should be wrong\n"); - return ENCODE_FAIL; - } - mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); - - } - - if (mRecFrame== NULL) { - mRecFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt - 2); - if (mRecFrame == NULL) { - LOG_E ("No Surface buffer available, something should be wrong\n"); - return ENCODE_FAIL; - } - mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); +Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { - } + Encode_Status ret = ENCODE_SUCCESS; + MetadataBufferType type; + int32_t value; + ValueInfo vinfo; + ValueInfo *pvinfo = &vinfo; + int32_t *extravalues = NULL; + unsigned int extravalues_count = 0; - if (mCurFrame== NULL) { - mCurFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, bufIndex); - if (mCurFrame == NULL) { - LOG_E ("No Surface buffer available, something should be wrong\n"); - return ENCODE_FAIL; - } - mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame); - } + IntelMetadataBuffer *imb = new IntelMetadataBuffer; + SurfaceMap *map = NULL; + + if (mStoreMetaDataInBuffers.isEnabled) { + //metadatabuffer mode + LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); + if (imb->SetBytes(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { + //fail to parse buffer + delete imb; + return ENCODE_NO_REQUEST_DATA; } - break; - case BUFFER_SHARING_V4L2: - LOG_E("Not Implemented\n"); - break; - - case BUFFER_SHARING_NONE: { -no_share_mode: + imb->GetType(type); + imb->GetValue(value); + } else { + //raw mode + LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); + if (! inBuffer->data || inBuffer->size == 0) { + delete imb; + return ENCODE_NULL_PTR; + } - if (mRefFrame== NULL) { - mRefFrame = mVideoSrcBufferList; - if (mRefFrame == NULL) { - LOG_E("No Surface buffer available, something should be wrong\n"); - return ENCODE_FAIL; - } - mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame); + type = MetadataBufferTypeUser; + value = (int32_t)inBuffer->data; + } + + //find if mapped + map = findSurfaceMapByValue(mSrcSurfaceMapList, value); + + if (map) { + //has mapped, get surfaceID directly + LOG_I("direct find surface %d from value %x\n", map->surface, value); + mCurSurface = map->surface; + + delete imb; + return ret; + } + + //if no found from list, then try to map value with parameters + LOG_I("not find surface from cache with value %x, start mapping if enough information\n", value); + + if (mStoreMetaDataInBuffers.isEnabled) { + + //if type is MetadataBufferTypeGrallocSource, use default parameters + if (type == MetadataBufferTypeGrallocSource) { + vinfo.mode = MEM_MODE_GFXHANDLE; + vinfo.handle = 0; + vinfo.size = 0; + vinfo.width = mComParams.resolution.width; + vinfo.height = mComParams.resolution.height; + vinfo.lumaStride = mComParams.resolution.width; + vinfo.chromStride = mComParams.resolution.width; + vinfo.format = VA_FOURCC_NV12; + vinfo.s3dformat = 0xFFFFFFFF; + } else { + //get all info mapping needs + imb->GetValueInfo(pvinfo); + imb->GetExtraValues(extravalues, extravalues_count); + } + + } else { - } + //raw mode + vinfo.mode = MEM_MODE_MALLOC; + vinfo.handle = 0; + vinfo.size = inBuffer->size; + vinfo.width = mComParams.resolution.width; + vinfo.height = mComParams.resolution.height; + vinfo.lumaStride = mComParams.resolution.width; + vinfo.chromStride = mComParams.resolution.width; + vinfo.format = VA_FOURCC_NV12; + vinfo.s3dformat = 0xFFFFFFFF; + } + + /* Start mapping, if pvinfo is not NULL, then have enough info to map; + * if extravalues is not NULL, then need to do more times mapping + */ + if (pvinfo){ + //map according info, and add to surfacemap list + map = new SurfaceMap; + map->type = type; + map->value = value; + memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); + map->added = false; + map->next = NULL; + + ret = surfaceMapping(map); + if (ret == ENCODE_SUCCESS) { + LOG_I("surface mapping success, map value %x into surface %d\n", value, map->surface); + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + } else { + delete map; + delete imb; + LOG_E("surface mapping failed, wrong info or meet serious error\n"); + return ret; + } - if (mRecFrame== NULL) { - mRecFrame = mVideoSrcBufferList; - if (mRecFrame == NULL) { - LOG_E ("No Surface buffer available, something should be wrong\n"); - return ENCODE_FAIL; - } - mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame); + mCurSurface = map->surface; + } else { + //can't map due to no info + delete imb; + LOG_E("surface mapping failed, missing information\n"); + return ENCODE_NO_REQUEST_DATA; + } + + if (extravalues) { + //map more using same ValueInfo + for(unsigned int i=0; itype = type; + map->value = extravalues[i]; + memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); + map->added = false; + map->next = NULL; + + ret = surfaceMapping(map); + if (ret == ENCODE_SUCCESS) { + LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->surface); + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + } else { + delete map; + map = NULL; + LOG_E( "surface mapping extravalue failed, extravalue is %x\n", extravalues[i]); } - - if (mCurFrame== NULL) { - mCurFrame = mVideoSrcBufferList; - if (mCurFrame == NULL) { - LOG_E ("No Surface buffer available, something should be wrong\n"); - return ENCODE_FAIL; - } - mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame); - } - - LOG_V( "Get Surface Done\n"); - ret = uploadDataToSurface (inBuffer); - CHECK_ENCODE_STATUS_RETURN("uploadDataToSurface"); } - break; - default: - break; - } - - return ENCODE_SUCCESS; + + delete imb; + + return ret; } -VideoEncSurfaceBuffer *VideoEncoderBase::appendVideoSurfaceBuffer( - VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) { +SurfaceMap *VideoEncoderBase::appendSurfaceMap( + SurfaceMap *head, SurfaceMap *map) { if (head == NULL) { - return buffer; + return map; } - VideoEncSurfaceBuffer *node = head; - VideoEncSurfaceBuffer *tail = NULL; + SurfaceMap *node = head; + SurfaceMap *tail = NULL; while (node != NULL) { tail = node; node = node->next; } - tail->next = buffer; + tail->next = map; return head; } -VideoEncSurfaceBuffer *VideoEncoderBase::removeVideoSurfaceBuffer( - VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) { +SurfaceMap *VideoEncoderBase::removeSurfaceMap( + SurfaceMap *head, SurfaceMap *map) { - VideoEncSurfaceBuffer *node = head; - VideoEncSurfaceBuffer *tmpNode = NULL; + SurfaceMap *node = head; + SurfaceMap *tmpNode = NULL; - if (head == buffer) { + if (head == map) { tmpNode = head->next; - buffer->next = NULL; + map->next = NULL; return tmpNode; } while (node != NULL) { - if (node->next == buffer) + if (node->next == map) break; node = node->next; } if (node != NULL) { - node->next = buffer->next; + node->next = map->next; } - buffer->next = NULL; + map->next = NULL; return head; - } -VideoEncSurfaceBuffer *VideoEncoderBase::getVideoSurfaceBufferByIndex( - VideoEncSurfaceBuffer *head, uint32_t index) { - VideoEncSurfaceBuffer *node = head; +SurfaceMap *VideoEncoderBase::findSurfaceMapByValue( + SurfaceMap *head, int32_t value) { + + SurfaceMap *node = head; while (node != NULL) { - if (node->index == index) + if (node->value == value) break; node = node->next; } @@ -2001,6 +1908,7 @@ VideoEncSurfaceBuffer *VideoEncoderBase::getVideoSurfaceBufferByIndex( return node; } +#if 0 Encode_Status VideoEncoderBase::uploadDataToSurface(VideoEncRawBuffer *inBuffer) { VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -2023,9 +1931,9 @@ Encode_Status VideoEncoderBase::uploadDataToSurface(VideoEncRawBuffer *inBuffer) uint32_t uvWidth = width; LOG_V("map source data to surface\n"); - LOG_I("Surface ID = 0x%08x\n", (uint32_t) mCurFrame->surface); + LOG_I("Surface ID = 0x%08x\n", (uint32_t) mCurSurface); - vaStatus = vaDeriveImage(mVADisplay, mCurFrame->surface, &srcImage); + vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &srcImage); CHECK_VA_STATUS_RETURN("vaDeriveImage"); LOG_V( "vaDeriveImage Done\n"); @@ -2092,6 +2000,7 @@ Encode_Status VideoEncoderBase::uploadDataToSurface(VideoEncRawBuffer *inBuffer) return ENCODE_SUCCESS; } +#endif Encode_Status VideoEncoderBase::renderDynamicBitrate() { VAStatus vaStatus = VA_STATUS_SUCCESS; diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 7cc8f62..cf65085 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -13,6 +13,18 @@ #include #include "VideoEncoderDef.h" #include "VideoEncoderInterface.h" +#include "IntelMetadataBuffer.h" + +struct SurfaceMap { + VASurfaceID surface; + MetadataBufferType type; + int32_t value; + ValueInfo vinfo; + uint32_t index; + bool added; + SurfaceMap *next; +}; + class VideoEncoderBase : IVideoEncoder { public: @@ -38,6 +50,7 @@ public: virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig); virtual Encode_Status getMaxOutSize(uint32_t *maxSize); + virtual Encode_Status getStatistics(VideoStatistics *videoStat); protected: virtual Encode_Status sendEncodeCommand(void) = 0; @@ -59,23 +72,24 @@ private: Encode_Status setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer); Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr); - Encode_Status generateVideoBufferAndAttachToList(uint32_t index, uint8_t *usrptr); - Encode_Status surfaceMappingForSurfaceList(); - Encode_Status surfaceMappingForGfxHandle(); - Encode_Status surfaceMappingForCIFrameList(); - Encode_Status surfaceMappingForKbufHandle(); - - VideoEncSurfaceBuffer *appendVideoSurfaceBuffer( - VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer); - VideoEncSurfaceBuffer *removeVideoSurfaceBuffer( - VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer); - VideoEncSurfaceBuffer *getVideoSurfaceBufferByIndex( - VideoEncSurfaceBuffer *head, uint32_t index); + Encode_Status surfaceMappingForSurface(SurfaceMap *map); + Encode_Status surfaceMappingForGfxHandle(SurfaceMap *map); + Encode_Status surfaceMappingForCI(SurfaceMap *map); + Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map); + Encode_Status surfaceMappingForMalloc(SurfaceMap *map); + Encode_Status surfaceMapping(SurfaceMap *map); + + SurfaceMap *appendSurfaceMap( + SurfaceMap *head, SurfaceMap *map); + SurfaceMap *removeSurfaceMap( + SurfaceMap *head, SurfaceMap *map); + SurfaceMap *findSurfaceMapByValue( + SurfaceMap *head, int32_t value); Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer); void updateProperities(void); void decideFrameType(void); - Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer); +// Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer); Encode_Status syncEncode(VideoEncRawBuffer *inBuffer); Encode_Status asyncEncode(VideoEncRawBuffer *inBuffer); @@ -83,7 +97,6 @@ protected: bool mInitialized; VADisplay mVADisplay; - VADisplay mVADecoderDisplay; VAContextID mVAContext; VAConfigID mVAConfig; VAEntrypoint mVAEntrypoint; @@ -95,11 +108,7 @@ protected: VideoParamsCommon mComParams; VideoParamsHRD mHrdParam; - - VideoBufferSharingMode mBufferMode; - uint32_t *mUpstreamBufferList; - uint32_t mUpstreamBufferCnt; - ExternalBufferAttrib *mBufAttrib; + VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers; bool mForceKeyFrame; bool mNewHeader; @@ -121,18 +130,16 @@ protected: VABufferID mPicParamBuf; VABufferID mSliceParamBuf; - VASurfaceID *mSharedSurfaces; VASurfaceID *mSurfaces; uint32_t mSurfaceCnt; - uint32_t mSharedSurfacesCnt; - uint32_t mReqSurfacesCnt; - uint8_t **mUsrPtr; - VideoEncSurfaceBuffer *mVideoSrcBufferList; - VideoEncSurfaceBuffer *mCurFrame; //current input frame to be encoded; - VideoEncSurfaceBuffer *mRefFrame; //reference frame - VideoEncSurfaceBuffer *mRecFrame; //reconstructed frame; - VideoEncSurfaceBuffer *mLastFrame; //last frame; + SurfaceMap *mSrcSurfaceMapList; + + //for new design + VASurfaceID mCurSurface; //current input surface to be encoded + VASurfaceID mRefSurface; //reference surface + VASurfaceID mRecSurface; //reconstructed surface + VASurfaceID mLastSurface; //last surface VideoEncRawBuffer *mLastInputRawBuffer; @@ -148,6 +155,10 @@ protected: bool mDataCopiedOut; bool mKeyFrame; +#ifdef VIDEO_ENC_STATISTICS_ENABLE + VideoStatistics mVideoStat; +#endif + // Constants static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2; static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index d5f10b5..b9feca2 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -261,6 +261,7 @@ enum VideoParamConfigType { VideoParamsTypeUpSteamBuffer, VideoParamsTypeUsrptrBuffer, VideoParamsTypeHRD, + VideoParamsTypeStoreMetaDataInBuffers, VideoConfigTypeFrameRate, VideoConfigTypeBitRate, @@ -410,6 +411,16 @@ struct VideoParamsHRD : VideoParamConfigSet { uint32_t initBufferFullness; }; +struct VideoParamsStoreMetaDataInBuffers : VideoParamConfigSet { + + VideoParamsStoreMetaDataInBuffers() { + type = VideoParamsTypeStoreMetaDataInBuffers; + size = sizeof(VideoParamsStoreMetaDataInBuffers); + } + + bool isEnabled; +}; + struct VideoConfigFrameRate : VideoParamConfigSet { VideoConfigFrameRate() { @@ -500,4 +511,15 @@ struct VideoConfigSliceNum : VideoParamConfigSet { SliceNum sliceNum; }; + +typedef struct { + uint32_t total_frames; + uint32_t skipped_frames; + uint32_t average_encode_time; + uint32_t max_encode_time; + uint32_t max_encode_frame; + uint32_t min_encode_time; + uint32_t min_encode_frame; +}VideoStatistics; + #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index 2aed78f..68f8741 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -86,8 +86,8 @@ Encode_Status VideoEncoderH263::renderPictureParams() { LOG_V( "Begin\n\n"); // set picture params for HW - h263PictureParams.reference_picture = mRefFrame->surface; - h263PictureParams.reconstructed_picture = mRecFrame->surface; + h263PictureParams.reference_picture = mRefSurface; + h263PictureParams.reconstructed_picture = mRecSurface; h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; h263PictureParams.picture_width = mComParams.resolution.width; h263PictureParams.picture_height = mComParams.resolution.height; diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h index 416c29d..243e4a1 100644 --- a/videoencoder/VideoEncoderInterface.h +++ b/videoencoder/VideoEncoderInterface.h @@ -24,6 +24,7 @@ public: virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0; + virtual Encode_Status getStatistics(VideoStatistics *videoStat) = 0; }; #endif /* VIDEO_ENCODER_INTERFACE_H_ */ diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h index 4c1e982..49c34df 100644 --- a/videoencoder/VideoEncoderLog.h +++ b/videoencoder/VideoEncoderLog.h @@ -23,6 +23,7 @@ __android_log_print(level, comp, "%s():%d: "format, \ __FUNCTION__, __LINE__, ##__VA_ARGS__) +#if 1 #ifdef VIDEO_ENC_LOG_ENABLE #define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) #define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__) @@ -34,6 +35,12 @@ #endif #define LOG_E(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_ERROR, format, ##__VA_ARGS__) +#else +#define LOG_V printf +#define LOG_I printf +#define LOG_W printf +#define LOG_E printf +#endif #define CHECK_VA_STATUS_RETURN(FUNC)\ if (vaStatus != VA_STATUS_SUCCESS) {\ diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index a220563..51068f4 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -211,8 +211,8 @@ Encode_Status VideoEncoderMP4::renderPictureParams() { LOG_V( "Begin\n\n"); // set picture params for HW - mpeg4_pic_param.reference_picture = mRefFrame->surface; - mpeg4_pic_param.reconstructed_picture = mRecFrame->surface; + mpeg4_pic_param.reference_picture = mRefSurface; + mpeg4_pic_param.reconstructed_picture = mRecSurface; mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex]; mpeg4_pic_param.picture_width = mComParams.resolution.width; mpeg4_pic_param.picture_height = mComParams.resolution.height; -- cgit v1.2.3 From e53ae3b54a3f668e2d38bb80500313f29b4a2350 Mon Sep 17 00:00:00 2001 From: Sergey Melnikov Date: Tue, 15 May 2012 16:09:06 +0400 Subject: icc:Uninitialized variable BZ:36480 Usage of uninitialized variables Change-Id: Ie93d4281aad4ff11ab985cd18ccca6ee2d8d3a99 Signed-off-by: Sergey Melnikov Reviewed-on: http://android.intel.com:8080/49556 Reviewed-by: Beare, Bruce J Reviewed-by: Sun, Hang L Reviewed-by: Chupin, Pavel V Reviewed-by: Ding, Haitao Reviewed-by: Smith, Geoff Tested-by: Ng, Cheon-woei Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c index bbf2835..1037958 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c @@ -130,7 +130,7 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) { viddec_workload_item_t wi; - h264_slice_data slice_data; + h264_slice_data slice_data = {}; uint32_t i=0, nitems=0, data=0; uint32_t bits_offset =0, byte_offset =0; -- cgit v1.2.3 From 25933e9c3f5a5e8227c1c2c06f50104fe2fa9cb4 Mon Sep 17 00:00:00 2001 From: hding3 Date: Thu, 5 Jul 2012 11:24:27 +0800 Subject: [libmix] Add one log for further bz40478 analysis BZ: 45166 Add one log for further bz40478 analysis Change-Id: I4ef85c5bc582248047b61fc02a01fb4ebc29ac26 Signed-off-by: Zhao Liang Signed-off-by: hding3 Reviewed-on: http://android.intel.com:8080/55364 Reviewed-by: Yuan, Shengquan Reviewed-by: buildbot Tested-by: buildbot --- test/mix_encoder.cpp | 10 +++++++--- videoencoder/VideoEncoderAVC.cpp | 1 + 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index a228b71..3198054 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -721,9 +721,13 @@ for(int i=0; i<1; i++) fclose(file); VideoStatistics stat; - gVideoEncoder->getStatistics(&stat); - printf("\nVideoStatistics\n"); - printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, stat.min_encode_time, stat.min_encode_frame ); + if (gVideoEncoder->getStatistics(&stat) == ENCODE_SUCCESS) + { + printf("\nVideoStatistics\n"); + printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", \ + stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \ + stat.min_encode_time, stat.min_encode_frame ); + } if(gVideoEncoder) { releaseVideoEncoder(gVideoEncoder); gVideoEncoder = NULL; diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 9930b99..8833548 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -298,6 +298,7 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( if (inBuffer[pos - 1] != 0x01 || zeroByteCount < 2) { LOG_E("The stream is not AnnexB format \n"); + LOG_E("segment status is %x \n", status); return ENCODE_FAIL; //not AnnexB, we won't process it } -- cgit v1.2.3 From 611427a217c0ed24b1c0e87975dab66cdd7ce1be Mon Sep 17 00:00:00 2001 From: Bruce Beare Date: Fri, 13 Jul 2012 20:49:35 -0700 Subject: build: make various subsystems selectable BZ: 46710 For bringup and support of various hardware, we need to be able to disable intel add-on's in a convenient way via BoardConfig.mk. This commit adds required fixes to various Android.mk files. BoardConfig.mk is also changed disable a number of subsystems for bring-up of JB Change-Id: Id976736d07966e8948256ed462ad148246a05bbb Signed-off-by: Bruce Beare Reviewed-on: http://android.intel.com:8080/56382 Reviewed-by: Li, XiaojingX Tested-by: Li, XiaojingX --- Android.mk | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Android.mk b/Android.mk index baf7b48..0ed1c8a 100644 --- a/Android.mk +++ b/Android.mk @@ -1,6 +1,8 @@ LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) +ifeq ($(INTEL_VA),true) + +include $(CLEAR_VARS) VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_common/src/Android.mk @@ -10,3 +12,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk + +endif -- cgit v1.2.3 From f6e141d1c4162c7bc594ecd29a28c820e019f43a Mon Sep 17 00:00:00 2001 From: Xiaolin Zhang Date: Fri, 13 Jul 2012 21:37:21 +0800 Subject: [libmix] fixed JB build issue by rewriting AVC asm code. BZ: 47236 fixed JB build issue by rewriting AVC asm code due to gcc update. Change-Id: Ia31aa32237f4a1e4fe59322dbf9c2e21a72d4021 Signed-off-by: Xiaolin Zhang Reviewed-on: http://android.intel.com:8080/56583 Reviewed-by: Beare, Bruce J --- videoencoder/VideoEncoderAVC.cpp | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 8833548..5922e6a 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -319,32 +319,33 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( while ((dataRemaining > 0) && (zeroByteCount < 3)) { if (((((uint32_t)dataPtr) & 0xF ) == 0) && (0 == zeroByteCount) && (dataRemaining > 0xF)) { - __asm__( + + __asm__ ( //Data input "movl %1, %%ecx\n\t"//data_ptr=>ecx "movl %0, %%eax\n\t"//data_remaing=>eax //Main compare loop - "MATCH_8_ZERO:\n\t" + // + "0:\n\t" //MATCH_8_ZERO: "pxor %%xmm0,%%xmm0\n\t"//set 0=>xmm0 "pcmpeqb (%%ecx),%%xmm0\n\t"//data_ptr=xmm0,(byte==0)?0xFF:0x00 "pmovmskb %%xmm0, %%edx\n\t"//edx[0]=xmm0[7],edx[1]=xmm0[15],...,edx[15]=xmm0[127] "test $0xAAAA, %%edx\n\t"//edx& 1010 1010 1010 1010b - "jnz DATA_RET\n\t"//Not equal to zero means that at least one byte 0x00 + "jnz 2f\n\t"//Not equal to zero means that at least one byte 0x00 - "PREPARE_NEXT_MATCH:\n\t" + "1:\n\t" //PREPARE_NEXT_MATCH: "sub $0x10, %%eax\n\t"//16 + ecx --> ecx "add $0x10, %%ecx\n\t"//eax-16 --> eax "cmp $0x10, %%eax\n\t" - "jge MATCH_8_ZERO\n\t"//search next 16 bytes + "jge 0b\n\t"//search next 16 bytes - "DATA_RET:\n\t" + "2:\n\t" //DATA_RET: "movl %%ecx, %1\n\t"//output ecx->data_ptr "movl %%eax, %0\n\t"//output eax->data_remaining : "+m"(dataRemaining), "+m"(dataPtr) : :"eax", "ecx", "edx", "xmm0" ); - if (0 >= dataRemaining) { break; } -- cgit v1.2.3 From 6c7580bddb43168d83b9709775ba5530ba883bed Mon Sep 17 00:00:00 2001 From: ywan171 Date: Wed, 25 Jul 2012 08:13:22 +0800 Subject: libmix: port WMV related patch BZ:47670 original patch: 51112:libstagefright: Fix an issue that thumbnail for WMV file can not be generated 42039: libstagefright: Ignore seek in case of no index object within asf file 30078:libstagefright: Porting patches from R2 to R3 Signed-off-by: ywan171 Change-Id: I13ccf6d9c4194bf74dc6f708a9a5a18f83e8d4bd Reviewed-on: http://android.intel.com:8080/58297 Reviewed-by: Feng, Wei Reviewed-by: Zhang, Xiaolin Tested-by: Zhang, Xiaolin --- Android.mk | 2 +- frameworks/asf_extractor/Android.mk | 31 ++ frameworks/asf_extractor/AsfExtractor.cpp | 718 +++++++++++++++++++++++++++ frameworks/asf_extractor/AsfExtractor.h | 122 +++++ frameworks/asf_extractor/MediaBufferPool.cpp | 105 ++++ frameworks/asf_extractor/MediaBufferPool.h | 55 ++ frameworks/asf_extractor/MetaDataExt.h | 53 ++ 7 files changed, 1085 insertions(+), 1 deletion(-) create mode 100644 frameworks/asf_extractor/Android.mk create mode 100644 frameworks/asf_extractor/AsfExtractor.cpp create mode 100644 frameworks/asf_extractor/AsfExtractor.h create mode 100644 frameworks/asf_extractor/MediaBufferPool.cpp create mode 100644 frameworks/asf_extractor/MediaBufferPool.h create mode 100644 frameworks/asf_extractor/MetaDataExt.h diff --git a/Android.mk b/Android.mk index 0ed1c8a..b73ddce 100644 --- a/Android.mk +++ b/Android.mk @@ -12,5 +12,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk - +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk endif diff --git a/frameworks/asf_extractor/Android.mk b/frameworks/asf_extractor/Android.mk new file mode 100644 index 0000000..78afb17 --- /dev/null +++ b/frameworks/asf_extractor/Android.mk @@ -0,0 +1,31 @@ +ifeq ($(strip $(USE_INTEL_ASF_EXTRACTOR)),true) + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + + +LOCAL_SRC_FILES := \ + AsfExtractor.cpp \ + MediaBufferPool.cpp + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libmix_asfparser \ + $(TOP)/frameworks/av/media/libstagefright/include \ + $(TOP)/frameworks/native/include/media/openmax + +LOCAL_COPY_HEADERS_TO := libmix_asf_extractor + +LOCAL_COPY_HEADERS := \ + AsfExtractor.h \ + MetaDataExt.h \ + MediaBufferPool.h + +LOCAL_CPPFLAGS += -DUSE_INTEL_ASF_EXTRACTOR +LOCAL_MODULE := libasfextractor +LOCAL_MODULE_TAGS := optional + +include $(BUILD_STATIC_LIBRARY) + + +endif diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp new file mode 100644 index 0000000..9a10581 --- /dev/null +++ b/frameworks/asf_extractor/AsfExtractor.cpp @@ -0,0 +1,718 @@ +/* +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + +//#define LOG_NDEBUG 0 +#define LOG_TAG "AsfExtractor" +#include + +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "MetaDataExt.h" +#include "MediaBufferPool.h" +#include "AsfStreamParser.h" +#include "AsfExtractor.h" + + +namespace android { + +class ASFSource : public MediaSource { +public: + ASFSource(const sp &extractor, int trackIndex) + : mExtractor(extractor), + mTrackIndex(trackIndex) { + } + + virtual status_t start(MetaData *params = NULL) { + return OK; + } + + virtual status_t stop() { + return OK; + } + + virtual sp getFormat() { + return mExtractor->getTrackMetaData(mTrackIndex, 0); + } + + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL) { + return mExtractor->read(mTrackIndex, buffer, options); + } + +protected: + virtual ~ASFSource() { + mExtractor = NULL; + } + +private: + sp mExtractor; + int mTrackIndex; + + ASFSource(const ASFSource &); + ASFSource &operator=(const ASFSource &); +}; + + +AsfExtractor::AsfExtractor(const sp &source) + : mDataSource(source), + mInitialized(false), + mHasIndexObject(false), + mFirstTrack(NULL), + mLastTrack(NULL), + mReadLock(), + mFileMetaData(new MetaData), + mParser(NULL), + mHeaderObjectSize(0), + mDataObjectSize(0), + mDataPacketBeginOffset(0), + mDataPacketEndOffset(0), + mDataPacketCurrentOffset(0), + mDataPacketSize(0), + mDataPacketData(NULL) { + mParser = new AsfStreamParser; +} + +AsfExtractor::~AsfExtractor() { + uninitialize(); + mDataSource = NULL; + mFileMetaData = NULL; + delete mParser; + mParser = NULL; +} + +sp AsfExtractor::getMetaData() { + status_t err = initialize(); + if (err != OK) { + return new MetaData; + } + + return mFileMetaData; +} + +size_t AsfExtractor::countTracks() { + status_t err = initialize(); + if (err != OK) { + return 0; + } + + size_t n = 0; + Track *track = mFirstTrack; + while (track) { + ++n; + track = track->next; + } + + ALOGV("track count is %d", n); + return n; +} + +sp AsfExtractor::getTrackMetaData(size_t index, uint32_t flags) { + status_t err = initialize(); + if (err != OK) { + return NULL; + } + + Track *track = getTrackByTrackIndex(index); + if (track == NULL) { + return NULL; + } + + // There is no thumbnail data so ignore flags: kIncludeExtensiveMetaData + return track->meta; +} + +sp AsfExtractor::getTrack(size_t index) { + status_t err; + if ((err = initialize()) != OK) { + return NULL; + } + + Track *track = getTrackByTrackIndex(index); + if (track == NULL) { + return NULL; + } + + // Assume this track is active + track->skipTrack = false; + return new ASFSource(this, index); +} + +status_t AsfExtractor::read( + int trackIndex, + MediaBuffer **buffer, + const MediaSource::ReadOptions *options) { + Track *track = getTrackByTrackIndex(trackIndex); + if (track == NULL) { + return BAD_VALUE; + } + + if (!mParser->hasVideo() || (mParser->hasVideo() && mHasIndexObject)) { + if (options != NULL) { + status_t err = seek_l(track, options); + if (err != OK) { + return err; + } + } + } else { + ALOGW("No index object. Seek may not be supported!!!"); + } + + return read_l(track, buffer); +} + +status_t AsfExtractor::initialize() { + if (mInitialized) { + return OK; + } + + status_t status = OK; + // header object is the first mandatory object. The first 16 bytes + // is GUID of object, the following 8 bytes is size of object + if (mDataSource->readAt(16, &mHeaderObjectSize, 8) != 8) { + return ERROR_IO; + } + + uint8_t* headerObjectData = new uint8_t [mHeaderObjectSize]; + if (headerObjectData == NULL) { + return NO_MEMORY; + } + + if (mDataSource->readAt(0, headerObjectData, mHeaderObjectSize) != mHeaderObjectSize) { + return ERROR_IO; + } + status = mParser->parseHeaderObject(headerObjectData, mHeaderObjectSize); + if (status != ASF_PARSER_SUCCESS) { + ALOGE("Failed to parse header object."); + return ERROR_MALFORMED; + } + + delete [] headerObjectData; + headerObjectData = NULL; + + uint8_t dataObjectHeaderData[ASF_DATA_OBJECT_HEADER_SIZE]; + if (mDataSource->readAt(mHeaderObjectSize, dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE) + != ASF_DATA_OBJECT_HEADER_SIZE) { + return ERROR_IO; + } + status = mParser->parseDataObjectHeader(dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE); + if (status != ASF_PARSER_SUCCESS) { + ALOGE("Failed to parse data object header."); + return ERROR_MALFORMED; + } + + // first 16 bytes is GUID of data object + mDataObjectSize = *(uint64_t*)(dataObjectHeaderData + 16); + mDataPacketBeginOffset = mHeaderObjectSize + ASF_DATA_OBJECT_HEADER_SIZE; + mDataPacketEndOffset = mHeaderObjectSize + mDataObjectSize; + mDataPacketCurrentOffset = mDataPacketBeginOffset; + + // allocate memory for data packet + mDataPacketSize = mParser->getDataPacketSize(); + mDataPacketData = new uint8_t [mDataPacketSize]; + if (mDataPacketData == NULL) { + return NO_MEMORY; + } + + const AsfFileMediaInfo *fileMediaInfo = mParser->getFileInfo(); + if (fileMediaInfo && fileMediaInfo->seekable) { + uint64_t offset = mDataPacketEndOffset; + + // Find simple index object for time seeking. + // object header include 16 bytes of object GUID and 8 bytes of object size. + uint8_t objectHeader[24]; + int64_t objectSize; + for (;;) { + if (mDataSource->readAt(offset, objectHeader, 24) != 24) { + break; + } + + objectSize = *(int64_t *)(objectHeader + 16); + if (!AsfStreamParser::isSimpleIndexObject(objectHeader)) { + offset += objectSize; + continue; + } + mHasIndexObject = true; + uint8_t* indexObjectData = new uint8_t [objectSize]; + if (indexObjectData == NULL) { + // don't report as error, we just lose time seeking capability. + break; + } + if (mDataSource->readAt(offset, indexObjectData, objectSize) == objectSize) { + // Ignore return value + mParser->parseSimpleIndexObject(indexObjectData, objectSize); + } + delete [] indexObjectData; + break; + } + } + + if (mParser->hasVideo() || mParser->hasAudio()) { + ALOGV("MEDIA_MIMETYPE_CONTAINER_ASF"); + mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_ASF); + } else { + ALOGE("Content does not have neither audio nor video."); + return ERROR_UNSUPPORTED; + } + + // duration returned from parser is in 100-nanosecond unit, converted it to microseconds (us) + ALOGV("Duration is %.2f (sec)", mParser->getDuration()/1E7); + mFileMetaData->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); + + setupTracks(); + mInitialized = true; + return OK; +} + +void AsfExtractor::uninitialize() { + if (mDataPacketData) { + delete [] mDataPacketData; + mDataPacketData = NULL; + } + mDataPacketSize = 0; + + Track* track = mFirstTrack; + MediaBuffer* p; + while (track != NULL) { + track->meta = NULL; + if (track->bufferActive) { + track->bufferActive->release(); + track->bufferActive = NULL; + } + + int size = track->bufferQueue.size(); + for (int i = 0; i < size; i++) { + p = track->bufferQueue.editItemAt(i); + p->release(); + } + + track->bufferQueue.clear(); + delete track->bufferPool; + + track->meta = NULL; + mFirstTrack = track->next; + delete track; + track = mFirstTrack; + } + mFirstTrack = NULL; + mLastTrack = NULL; +} + +static const char* FourCC2MIME(uint32_t fourcc) { + // The first charater of FOURCC characters appears in the least-significant byte + // WVC1 => 0x31435657 + switch (fourcc) { + //case FOURCC('W', 'M', 'V', '1'): + //case FOURCC('W', 'M', 'V', '2'): + //case FOURCC('W', 'M', 'V', 'A'): + case FOURCC('1', 'V', 'M', 'W'): + ALOGW("WMV1 format is not supported."); + return "video/wmv1"; + case FOURCC('2', 'V', 'M', 'W'): + ALOGW("WMV2 format is not supported."); + return "video/wmv2"; + case FOURCC('A', 'V', 'M', 'W'): + ALOGW("WMV Advanced profile, assuming as WVC1 for now"); + return MEDIA_MIMETYPE_VIDEO_WMV; + //case FOURCC('W', 'M', 'V', '3'): + //case FOURCC('W', 'V', 'C', '1'): + case FOURCC('3', 'V', 'M', 'W'): + case FOURCC('1', 'C', 'V', 'W'): + return MEDIA_MIMETYPE_VIDEO_WMV; + default: + ALOGE("Unknown video format."); + return "video/unknown-type"; + } +} + +static const char* CodecID2MIME(uint32_t codecID) { + switch (codecID) { + // WMA version 1 + case 0x0160: + // WMA version 2 (7, 8, 9 series) + case 0x0161: + // WMA 9/10 profressional (WMA version 3) + case 0x0162: + return MEDIA_MIMETYPE_AUDIO_WMA; + // WMA 9 lossless + case 0x0163: + //return MEDIA_MIMETYPE_AUDIO_WMA_LOSSLESS; + return MEDIA_MIMETYPE_AUDIO_WMA; + // WMA voice 9 + case 0x000A: + // WMA voice 10 + case 0x000B: + ALOGW("WMA voice 9/10 is not supported."); + return "audio/wma-voice"; + default: + ALOGE("Unsupported Audio codec ID: %#x", codecID); + return "audio/unknown-type"; + } +} + + +status_t AsfExtractor::setupTracks() { + ALOGW("Audio is temporarily disabled!!!!!!!!!!!!!!"); + AsfAudioStreamInfo* audioInfo = NULL;//mParser->getAudioInfo(); + AsfVideoStreamInfo* videoInfo = mParser->getVideoInfo(); + Track* track; + while (audioInfo || videoInfo) { + track = new Track; + if (mLastTrack == NULL) { + mFirstTrack = track; + mLastTrack = track; + } else { + mLastTrack->next = track; + mLastTrack = track; + } + + // this flag will be set to false within getTrack + track->skipTrack = true; + track->seekCompleted = false; + track->next = NULL; + track->meta = new MetaData; + track->bufferActive = NULL; + track->bufferPool = new MediaBufferPool; + + if (audioInfo) { + track->streamNumber = audioInfo->streamNumber; + track->encrypted = audioInfo->encryptedContentFlag; + track->meta->setInt32(kKeyChannelCount, audioInfo->numChannels); + track->meta->setInt32(kKeySampleRate, audioInfo->sampleRate); + + if (audioInfo->codecDataSize) { + track->meta->setData( + kKeyConfigData, + kTypeConfigData, + audioInfo->codecData, + audioInfo->codecDataSize); + } + // duration returned is in 100-nanosecond unit + track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); + track->meta->setCString(kKeyMIMEType, CodecID2MIME(audioInfo->codecID)); + track->meta->setInt32(kKeySuggestedBufferSize, mParser->getDataPacketSize()); + audioInfo = audioInfo->next; + } else { + track->streamNumber = videoInfo->streamNumber; + track->encrypted = videoInfo->encryptedContentFlag; + track->meta->setInt32(kKeyWidth, videoInfo->width); + track->meta->setInt32(kKeyHeight, videoInfo->height); + if (videoInfo->codecDataSize) { + track->meta->setData( + kKeyConfigData, + kTypeConfigData, + videoInfo->codecData, + videoInfo->codecDataSize); + } + // duration returned is in 100-nanosecond unit + track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); + track->meta->setCString(kKeyMIMEType, FourCC2MIME(videoInfo->fourCC)); + int maxSize = mParser->getMaxObjectSize(); + if (maxSize == 0) { + // estimated maximum packet size. + maxSize = 10 * mParser->getDataPacketSize(); + } + track->meta->setInt32(kKeySuggestedBufferSize, maxSize); + if (mHasIndexObject) { + // set arbitary thumbnail time + track->meta->setInt64(kKeyThumbnailTime, mParser->getDuration() / (SCALE_100_NANOSEC_TO_USEC * 2)); + } else { + track->meta->setInt64(kKeyThumbnailTime, 0); + } + videoInfo = videoInfo->next; + } + } + + return OK; +} + +status_t AsfExtractor::seek_l(Track* track, const MediaSource::ReadOptions *options) { + Mutex::Autolock lockSeek(mReadLock); + + // It is expected seeking will happen on all the tracks with the same seeking options. + // Only the first track receiving the seeking command will perform seeking and all other + // tracks just siliently ignore it. + + // TODO: potential problems in the following case: + // audio seek + // video read + // video seek + // video read + + if (track->seekCompleted) { + // seeking is completed through a different track + track->seekCompleted = false; + return OK; + } + int64_t seekTimeUs; + MediaSource::ReadOptions::SeekMode mode; + if(!options->getSeekTo(&seekTimeUs,&mode)) { + return OK; + } + + uint64_t targetSampleTimeUs = 0; + + // seek to next sync sample or previous sync sample + bool nextSync = false; + switch (mode) { + case MediaSource::ReadOptions::SEEK_NEXT_SYNC: + nextSync = true; + break; + // Always seek to the closest previous sync frame + case MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC: + case MediaSource::ReadOptions::SEEK_CLOSEST_SYNC: + + // Not supported, already seek to sync frame, so will not set kKeyTargetTime on bufferActive. + case MediaSource::ReadOptions::SEEK_CLOSEST: + default: + break; + } + + uint32_t packetNumber; + uint64_t targetTime; + // parser takes seek time in 100-nanosecond unit and returns target time in 100-nanosecond as well. + if (!mParser->seek(seekTimeUs * SCALE_100_NANOSEC_TO_USEC, nextSync, packetNumber, targetTime)) { + ALOGV("Seeking failed."); + return ERROR_END_OF_STREAM; + } + ALOGV("seek time = %.2f secs, actual time = %.2f secs", seekTimeUs/1E6, targetTime / 1E7); + + // convert to microseconds + targetSampleTimeUs = targetTime / SCALE_100_NANOSEC_TO_USEC; + mDataPacketCurrentOffset = mDataPacketBeginOffset + packetNumber * mDataPacketSize; + ALOGV("data packet offset = %lld", mDataPacketCurrentOffset); + + // flush all pending buffers on all the tracks + Track* temp = mFirstTrack; + while (temp != NULL) { + Mutex::Autolock lockTrack(temp->lock); + if (temp->bufferActive) { + temp->bufferActive->release(); + temp->bufferActive = NULL; + } + + int size = temp->bufferQueue.size(); + for (int i = 0; i < size; i++) { + MediaBuffer* buffer = temp->bufferQueue.editItemAt(i); + buffer->release(); + } + temp->bufferQueue.clear(); + + if (temp != track) { + // notify all other tracks seeking is completed. + // this flag is reset when seeking request is made on each track. + // don't set this flag on the driving track so a new seek can be made. + temp->seekCompleted = true; + } + temp = temp->next; + } + + return OK; +} + +status_t AsfExtractor::read_l(Track *track, MediaBuffer **buffer) { + status_t err = OK; + while (err == OK) { + Mutex::Autolock lock(track->lock); + if (track->bufferQueue.size() != 0) { + *buffer = track->bufferQueue[0]; + track->bufferQueue.removeAt(0); + return OK; + } + track->lock.unlock(); + + err = readPacket(); + } + ALOGE("read_l failed."); + return err; +} + +status_t AsfExtractor::readPacket() { + Mutex::Autolock lock(mReadLock); + if (mDataPacketCurrentOffset + mDataPacketSize > mDataPacketEndOffset) { + ALOGI("readPacket hits end of stream."); + return ERROR_END_OF_STREAM; + } + + if (mDataSource->readAt(mDataPacketCurrentOffset, mDataPacketData, mDataPacketSize) != + mDataPacketSize) { + return ERROR_END_OF_STREAM; + } + + // update next read position + mDataPacketCurrentOffset += mDataPacketSize; + AsfPayloadDataInfo *payloads = NULL; + int status = mParser->parseDataPacket(mDataPacketData, mDataPacketSize, &payloads); + if (status != ASF_PARSER_SUCCESS || payloads == NULL) { + ALOGE("Failed to parse data packet. status = %d", status); + return ERROR_END_OF_STREAM; + } + + AsfPayloadDataInfo* payload = payloads; + while (payload) { + Track* track = getTrackByStreamNumber(payload->streamNumber); + if (track == NULL || track->skipTrack) { + payload = payload->next; + continue; + } + if (payload->mediaObjectLength == payload->payloadSize || + payload->offsetIntoMediaObject == 0) { + // a comple object or the first payload of fragmented object + MediaBuffer *buffer = NULL; + status = track->bufferPool->acquire_buffer( + payload->mediaObjectLength, &buffer); + if (status != OK) { + ALOGE("Failed to acquire buffer."); + mParser->releasePayloadDataInfo(payloads); + return status; + } + memcpy(buffer->data(), + payload->payloadData, + payload->payloadSize); + + buffer->set_range(0, payload->mediaObjectLength); + // kKeyTime is in microsecond unit (usecs) + // presentationTime is in mililsecond unit (ms) + buffer->meta_data()->setInt64(kKeyTime, payload->presentationTime * 1000); + + if (payload->keyframe) { + buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); + } + + if (payload->mediaObjectLength == payload->payloadSize) { + Mutex::Autolock lockTrack(track->lock); + // a complete object + track->bufferQueue.push(buffer); + } else { + // the first payload of a fragmented object + track->bufferActive = buffer; + if (track->encrypted) { + Mutex::Autolock lockTrack(track->lock); + MediaBuffer* copy = NULL; + track->bufferPool->acquire_buffer(payload->payloadSize, ©); + copy->meta_data()->setInt64(kKeyTime, payload->presentationTime * 1000); + memcpy(copy->data(), payload->payloadData, payload->payloadSize); + copy->set_range(0, payload->payloadSize); + track->bufferQueue.push(copy); + } + } + } else { + if (track->bufferActive == NULL) { + ALOGE("Receiving corrupt or discontinuous data packet."); + payload = payload->next; + continue; + } + // TODO: check object number and buffer size!!!!!!!!!!!!!! + // the last payload or the middle payload of a fragmented object + memcpy( + (uint8_t*)track->bufferActive->data() + payload->offsetIntoMediaObject, + payload->payloadData, + payload->payloadSize); + + if (payload->offsetIntoMediaObject + payload->payloadSize == + payload->mediaObjectLength) { + // the last payload of a fragmented object + // for encrypted content, push a cloned media buffer to vector instead. + if (!track->encrypted) + { + Mutex::Autolock lockTrack(track->lock); + track->bufferQueue.push(track->bufferActive); + track->bufferActive = NULL; + } else { + Mutex::Autolock lockTrack(track->lock); + track->bufferActive->set_range(payload->offsetIntoMediaObject, payload->payloadSize); + track->bufferQueue.push(track->bufferActive); + track->bufferActive = NULL; + } + } else { + // middle payload of a fragmented object + if (track->encrypted) { + Mutex::Autolock lockTrack(track->lock); + MediaBuffer* copy = NULL; + int64_t keytime; + track->bufferPool->acquire_buffer(payload->payloadSize, ©); + track->bufferActive->meta_data()->findInt64(kKeyTime, &keytime); + copy->meta_data()->setInt64(kKeyTime, keytime); + memcpy(copy->data(), payload->payloadData, payload->payloadSize); + copy->set_range(0, payload->payloadSize); + track->bufferQueue.push(copy); + } + } + } + payload = payload->next; + }; + + mParser->releasePayloadDataInfo(payloads); + return OK; +} + +AsfExtractor::Track* AsfExtractor::getTrackByTrackIndex(int index) { + Track *track = mFirstTrack; + while (index > 0) { + if (track == NULL) { + return NULL; + } + + track = track->next; + --index; + } + return track; +} + +AsfExtractor::Track* AsfExtractor::getTrackByStreamNumber(int stream) { + Track *track = mFirstTrack; + while (track != NULL) { + if (track->streamNumber == stream) { + return track; + } + track = track->next; + } + return NULL; +} + +bool SniffAsf( + const sp &source, + String8 *mimeType, + float *confidence, + sp *) { + uint8_t guid[16]; + if (source->readAt(0, guid, 16) != 16) { + return false; + } + if (!AsfStreamParser::isHeaderObject(guid)) { + return false; + } + + *mimeType = MEDIA_MIMETYPE_CONTAINER_ASF; + *confidence = 0.4f; + return true; +} + +} // namespace android + diff --git a/frameworks/asf_extractor/AsfExtractor.h b/frameworks/asf_extractor/AsfExtractor.h new file mode 100644 index 0000000..adb8e9d --- /dev/null +++ b/frameworks/asf_extractor/AsfExtractor.h @@ -0,0 +1,122 @@ +/* +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + +#ifndef ASF_EXTRACTOR_H_ +#define ASF_EXTRACTOR_H_ + +#include +#include +#include +#include + + +namespace android { + +struct AMessage; +class DataSource; +//class String8; + + +class AsfExtractor : public MediaExtractor { +public: + // Extractor assumes ownership of "source". + AsfExtractor(const sp &source); + virtual ~AsfExtractor(); + + virtual size_t countTracks(); + virtual sp getTrack(size_t index); + virtual sp getTrackMetaData(size_t index, uint32_t flags); + virtual sp getMetaData(); + +private: + status_t read( + int trackIndex, + MediaBuffer **buffer, + const MediaSource::ReadOptions *options = NULL); + + friend class ASFSource; + +private: + struct Track { + Track *next; + sp meta; + bool skipTrack; + bool seekCompleted; + bool encrypted; + uint8_t streamNumber; + + // outgoing buffer queue (ready for decoding) + Vector bufferQueue; + + // buffer pool + class MediaBufferPool *bufferPool; + + // buffer currently being used to read payload data + MediaBuffer *bufferActive; + Mutex lock; + }; + + sp mDataSource; + bool mInitialized; + bool mHasIndexObject; + Track *mFirstTrack; + Track *mLastTrack; + + Mutex mReadLock; + sp mFileMetaData; + class AsfStreamParser *mParser; + + int64_t mHeaderObjectSize; + int64_t mDataObjectSize; + + int64_t mDataPacketBeginOffset; + int64_t mDataPacketEndOffset; + int64_t mDataPacketCurrentOffset; + + int64_t mDataPacketSize; + uint8_t *mDataPacketData; + + enum { + // 100 nano seconds to micro second + SCALE_100_NANOSEC_TO_USEC = 10, + }; + + AsfExtractor(const AsfExtractor &); + AsfExtractor &operator=(const AsfExtractor &); + +private: + struct Track; + status_t initialize(); + void uninitialize(); + status_t setupTracks(); + inline Track* getTrackByTrackIndex(int index); + inline Track* getTrackByStreamNumber(int stream); + status_t seek_l(Track* track, const MediaSource::ReadOptions *options); + status_t read_l(Track *track, MediaBuffer **buffer); + status_t readPacket(); +}; + + +bool SniffAsf( + const sp &source, + String8 *mimeType, + float *confidence, + sp *); + +} // namespace android + +#endif // ASF_EXTRACTOR_H_ diff --git a/frameworks/asf_extractor/MediaBufferPool.cpp b/frameworks/asf_extractor/MediaBufferPool.cpp new file mode 100644 index 0000000..22b25e0 --- /dev/null +++ b/frameworks/asf_extractor/MediaBufferPool.cpp @@ -0,0 +1,105 @@ +/* +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + +#define LOG_TAG "MediaBufferPool" +#include + +#include +#include +#include "MediaBufferPool.h" + +#define DEFAULT_PAGE_SIZE 4096 + +namespace android { + +MediaBufferPool::MediaBufferPool() + : mMaxBufferSize(0), + mFirstBuffer(NULL), + mLastBuffer(NULL) { +} + +MediaBufferPool::~MediaBufferPool() { + MediaBuffer *next; + for (MediaBuffer *buffer = mFirstBuffer; buffer != NULL; + buffer = next) { + next = buffer->nextBuffer(); + + CHECK_EQ(buffer->refcount(), 0); + + buffer->setObserver(NULL); + buffer->release(); + } +} + +status_t MediaBufferPool::acquire_buffer(int size, MediaBuffer **out) { + Mutex::Autolock autoLock(mLock); + + MediaBuffer *next = NULL; + while (mFirstBuffer) { + if ((int)mFirstBuffer->size() >= size) { + next = mFirstBuffer->nextBuffer(); + + // pop first buffer out of list + *out = mFirstBuffer; + mFirstBuffer->add_ref(); + mFirstBuffer->reset(); + + mFirstBuffer = next; + if (mFirstBuffer == NULL) { + mLastBuffer = NULL; + } + return OK; + } else { + // delete the first buffer from the list + next = mFirstBuffer->nextBuffer(); + mFirstBuffer->setObserver(NULL); + mFirstBuffer->release(); + mFirstBuffer = next; + } + } + + // not a single buffer matches the requirement. Allocating a new buffer. + + mFirstBuffer = NULL; + mLastBuffer = NULL; + + size = ((size + DEFAULT_PAGE_SIZE - 1)/DEFAULT_PAGE_SIZE) * DEFAULT_PAGE_SIZE; + if (size < mMaxBufferSize) { + size = mMaxBufferSize; + } else { + mMaxBufferSize = size; + } + MediaBuffer *p = new MediaBuffer(size); + *out = p; + return (p != NULL) ? OK : NO_MEMORY; +} + +void MediaBufferPool::signalBufferReturned(MediaBuffer *buffer) { + Mutex::Autolock autoLock(mLock); + + buffer->setObserver(this); + + if (mLastBuffer) { + mLastBuffer->setNextBuffer(buffer); + } else { + mFirstBuffer = buffer; + } + + mLastBuffer = buffer; +} + +} // namespace android diff --git a/frameworks/asf_extractor/MediaBufferPool.h b/frameworks/asf_extractor/MediaBufferPool.h new file mode 100644 index 0000000..2e35e0a --- /dev/null +++ b/frameworks/asf_extractor/MediaBufferPool.h @@ -0,0 +1,55 @@ +/* +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + +#ifndef MEDIA_BUFFER_POOL_H_ + +#define MEDIA_BUFFER_POOL_H_ + +#include +#include +#include + +namespace android { + +class MediaBuffer; +class MetaData; + +class MediaBufferPool : public MediaBufferObserver { +public: + MediaBufferPool(); + ~MediaBufferPool(); + + status_t acquire_buffer(int size, MediaBuffer **buffer); + +protected: + virtual void signalBufferReturned(MediaBuffer *buffer); + +private: + friend class MediaBuffer; + + Mutex mLock; + int mMaxBufferSize; + + MediaBuffer *mFirstBuffer, *mLastBuffer; + + MediaBufferPool(const MediaBufferPool &); + MediaBufferPool &operator=(const MediaBufferPool &); +}; + +} // namespace android + +#endif // MEDIA_BUFFER_POOL_H_ diff --git a/frameworks/asf_extractor/MetaDataExt.h b/frameworks/asf_extractor/MetaDataExt.h new file mode 100644 index 0000000..6476b9b --- /dev/null +++ b/frameworks/asf_extractor/MetaDataExt.h @@ -0,0 +1,53 @@ +/* +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + +#ifndef META_DATA_EXT_H_ +#define META_DATA_EXT_H_ + +#include + +namespace android { + +#define MEDIA_MIMETYPE_AUDIO_WMA "audio/wma" +#define MEDIA_MIMETYPE_AUDIO_AC3 "audio/ac3" +#define MEDIA_MIMETYPE_VIDEO_WMV "video/wmv" +#define MEDIA_MIMETYPE_CONTAINER_ASF "video/x-ms-asf" +#define MEDIA_MIMETYPE_VIDEO_VA "video/x-va" +#define MEDIA_MIMETYPE_AUDIO_WMA_VOICE "audio/wma-voice" + + +enum +{ + // value by default takes int32_t unless specified + kKeyConfigData = 'kcfg', // raw data + kKeyProtected = 'prot', // int32_t (bool) + kKeyCropLeft = 'clft', + kKeyCropRight = 'crit', + kKeyCropTop = 'ctop', + kKeyCropBottom = 'cbtm', + kKeySuggestedBufferSize = 'sgbz', + kKeyWantRawOutput = 'rawo' +}; + +enum +{ + kTypeConfigData = 'tcfg', +}; + +} // namespace android + +#endif // META_DATA_EXT_H_ -- cgit v1.2.3 From 52ca8ce27e6273468324ddac35d042db1c64a58c Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 26 Jul 2012 08:05:50 +0800 Subject: Enable VAVideoDecoder as thumbnail generation path (Video HAL) BZ: 48580 Use Intel HW decoder as the thumbnail generation path to improve performance. Change-Id: Ifd6e966cdea40e8cc038963b31ab64ed4bd74551 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/58656 Reviewed-by: buildbot Reviewed-by: Zhang, Xiaolin Tested-by: Zhang, Xiaolin --- Android.mk | 1 + frameworks/vavideodecoder/Android.mk | 27 +++ frameworks/vavideodecoder/VAVideoDecoder.cpp | 340 +++++++++++++++++++++++++++ frameworks/vavideodecoder/VAVideoDecoder.h | 65 +++++ 4 files changed, 433 insertions(+) create mode 100644 frameworks/vavideodecoder/Android.mk create mode 100644 frameworks/vavideodecoder/VAVideoDecoder.cpp create mode 100644 frameworks/vavideodecoder/VAVideoDecoder.h diff --git a/Android.mk b/Android.mk index b73ddce..1056c24 100644 --- a/Android.mk +++ b/Android.mk @@ -13,4 +13,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk endif diff --git a/frameworks/vavideodecoder/Android.mk b/frameworks/vavideodecoder/Android.mk new file mode 100644 index 0000000..1a9faaf --- /dev/null +++ b/frameworks/vavideodecoder/Android.mk @@ -0,0 +1,27 @@ +ifeq ($(strip $(USE_INTEL_ASF_EXTRACTOR)),true) + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_COPY_HEADERS_TO := libvavideodecoder +LOCAL_COPY_HEADERS := VAVideoDecoder.h + +LOCAL_SRC_FILES := \ + VAVideoDecoder.cpp \ + +#LOCAL_SHARED_LIBRARIES += libasfparser + +LOCAL_C_INCLUDES := \ + $(TOP)/frameworks/av/media/libstagefright/include \ + $(TOP)/frameworks/native/include/media/openmax \ + $(TARGET_OUT_HEADERS)/libmix_videodecoder \ + $(TARGET_OUT_HEADERS)/libmix_asf_extractor \ + $(TARGET_OUT_HEADERS)/libva/ + + +LOCAL_MODULE:= libvavideodecoder +LOCAL_MODULE_TAGS := optional + +include $(BUILD_STATIC_LIBRARY) + +endif diff --git a/frameworks/vavideodecoder/VAVideoDecoder.cpp b/frameworks/vavideodecoder/VAVideoDecoder.cpp new file mode 100644 index 0000000..a860e86 --- /dev/null +++ b/frameworks/vavideodecoder/VAVideoDecoder.cpp @@ -0,0 +1,340 @@ +/* +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + + +//#define LOG_NDEBUG 0 +#define LOG_TAG "VAVideoDecoder" +#include + +#include +//#include +#include +#include +#include +#include +#include +#include "MetaDataExt.h" +#include "VAVideoDecoder.h" +#include "VideoDecoderInterface.h" +#include "VideoDecoderHost.h" +#include +namespace android { + +VAVideoDecoder::VAVideoDecoder(const sp &source) + : mSource(source), + mStarted(false), + mRawOutput(false), + mInputBuffer(NULL), + mTargetTimeUs(-1), + mFrameIndex(0), + mErrorCount(0), + mDecoder(NULL) { + + const char *mime; + CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime)); + mDecoder = createVideoDecoder(mime); + if (mDecoder == NULL) { + LOGE("Failed to create video decoder for %s", mime); + } + + mFormat = new MetaData; + mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VA); + + int32_t width, height; + CHECK(mSource->getFormat()->findInt32(kKeyWidth, &width)); + CHECK(mSource->getFormat()->findInt32(kKeyHeight, &height)); + mFormat->setInt32(kKeyWidth, width); + mFormat->setInt32(kKeyHeight, height); + mFormat->setInt32(kKeyColorFormat, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar); + mFormat->setCString(kKeyDecoderComponent, "VAVideoDecoder"); + + int64_t durationUs; + if (mSource->getFormat()->findInt64(kKeyDuration, &durationUs)) { + mFormat->setInt64(kKeyDuration, durationUs); + } + +} + +VAVideoDecoder::~VAVideoDecoder() { + if (mStarted) { + stop(); + } + releaseVideoDecoder(mDecoder); + mDecoder = NULL; +} + +status_t VAVideoDecoder::start(MetaData *params) { + CHECK(!mStarted); + + if (mDecoder == NULL) { + LOGE("Decoder is not created."); + return UNKNOWN_ERROR; + } + + int32_t ret; + char str[255]; + sprintf(str, "%d", gettid()); + ret = setenv("PSB_VIDEO_THUMBNAIL", str, 1); + if (ret) { + LOGW("Set environmnet'PSB_VIDEO_SURFACE_MMU' fail\n"); + } + uint32_t type; + const void *data; + size_t size; + sp meta = mSource->getFormat(); + VideoConfigBuffer configBuffer; + memset(&configBuffer, 0, sizeof(VideoConfigBuffer)); + + if (meta->findData(kKeyConfigData, &type, &data, &size) || + meta->findData(kKeyESDS, &type, &data, &size) || + meta->findData(kKeyAVCC, &type, &data, &size)) { + configBuffer.data = (uint8_t*)data; + configBuffer.size = size; + } else { + LOGW("No configuration data found!"); + } + + configBuffer.flag |= WANT_RAW_OUTPUT; + mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); + mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + mRawOutput = true; + LOGW("Decoder will output raw data."); + + mFormat->findInt32(kKeyWidth, &configBuffer.width); + mFormat->findInt32(kKeyHeight, &configBuffer.height); + + Decode_Status res = mDecoder->start(&configBuffer); + if (res != DECODE_SUCCESS) { + LOGE("Failed to start decoder. Error = %d", res); + return UNKNOWN_ERROR; + } + + // TODO: update format meta, including frame cropping information. + + // create MediaBuffer pool only when output is VASurface + if (mRawOutput == false) { + for (int32_t i = 0; i < NUM_OF_MEDIA_BUFFER; ++i) { + MediaBuffer *buffer = new MediaBuffer(sizeof(VideoRenderBuffer)); + buffer->setObserver(this); + // output is unreadable VASurface + buffer->meta_data()->setInt32(kKeyIsUnreadable, 1); + buffer->meta_data()->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VA); + buffer->meta_data()->setInt32(kKeyColorFormat,OMX_COLOR_FormatYUV420SemiPlanar); + mFrames.push(buffer); + } + } + + mSource->start(); + unsetenv("PSB_VIDEO_THUMBNAIL"); + + mFrameIndex = 0; + mErrorCount = 0; + mTargetTimeUs = -1; + mStarted = true; + return OK; +} + + +status_t VAVideoDecoder::stop() { + CHECK(mStarted); + + if (mInputBuffer) { + mInputBuffer->release(); + mInputBuffer = NULL; + } + + for (size_t i = 0; i < mFrames.size(); ++i) { + MediaBuffer *buffer = mFrames.editItemAt(i); + buffer->setObserver(NULL); + buffer->release(); + } + mFrames.clear(); + mSource->stop(); + mDecoder->stop(); + + mFrameIndex = 0; + mErrorCount = 0; + mRawOutput = false; + mStarted = false; + + return OK; +} + +sp VAVideoDecoder::getFormat() { + return mFormat; +} + +MediaBuffer *VAVideoDecoder::getOutputBuffer(bool bDraining) { + const VideoRenderBuffer* buffer = mDecoder->getOutput(bDraining); + if (buffer == NULL) { + return NULL; + } + // indicate buffer is rendered + buffer->renderDone = true; + + if (mTargetTimeUs >= 0) { + CHECK(buffer->timeStamp <= mTargetTimeUs); + if (buffer->timeStamp < mTargetTimeUs) { + // We're still waiting for the frame with the matching + // timestamp and we won't return the current one. + LOGV("skipping frame at %lld us", buffer->timeStamp); + return NULL; + } else { + LOGV("found target frame at %lld us", buffer->timeStamp); + mTargetTimeUs = -1; + } + } + + MediaBuffer *mbuf = NULL; + if (mRawOutput == false) { + mbuf = mFrames.editItemAt(mFrameIndex); + mFrameIndex++; + if (mFrameIndex >= mFrames.size()) { + mFrameIndex = 0; + } + memcpy(mbuf->data(), buffer, sizeof(VideoRenderBuffer)); + mbuf->meta_data()->setInt64(kKeyTime, buffer->timeStamp); + mbuf->set_range(0, mbuf->size()); + mbuf->add_ref(); + } else { + mbuf = new MediaBuffer(buffer->rawData->data, buffer->rawData->size); + mbuf->meta_data()->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + mbuf->meta_data()->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); + mbuf->meta_data()->setInt64(kKeyTime, buffer->timeStamp); + } + + return mbuf; +} + +status_t VAVideoDecoder::read(MediaBuffer **out, const ReadOptions *options) { + *out = NULL; + if (mDecoder == NULL) { + LOGE("Decoder is not created."); + return UNKNOWN_ERROR; + } + + int64_t seekTimeUs; + ReadOptions::SeekMode mode; + ReadOptions seekOptions; + bool seeking = false; + + if (options && options->getSeekTo(&seekTimeUs, &mode)) { + LOGV("seek requested to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6); + + if (seekTimeUs < 0) { + LOGE("Invalid seek time : %ld", (long int32_t)seekTimeUs); + seekTimeUs = 0; + //return ERROR_END_OF_STREAM; + } + //CHECK(seekTimeUs >= 0); + + seekOptions.setSeekTo(seekTimeUs, mode); + mDecoder->flush(); + seeking = true; + } + + for (;;) { + status_t err = mSource->read(&mInputBuffer, &seekOptions); + seekOptions.clearSeekTo(); + + if (err != OK) { + LOGE("Failed to read buffer from source extractor."); + // drain the output buffer when end of stream + *out = getOutputBuffer(true); + return (*out == NULL) ? err : (status_t)OK; + } + + if (mInputBuffer->range_length() > 0) { + break; + } + + mInputBuffer->release(); + mInputBuffer = NULL; + } + + if (mInputBuffer == NULL) { + LOGE("Unexpected NULL input buffer."); + return ERROR_END_OF_STREAM; + } + + if (seeking) { + int64_t targetTimeUs; + if (mInputBuffer->meta_data()->findInt64(kKeyTargetTime, &targetTimeUs) && targetTimeUs >= 0) { + mTargetTimeUs = targetTimeUs; + } else { + mTargetTimeUs = -1; + } + } + + status_t err = UNKNOWN_ERROR; + + // prepare decoding buffer + VideoDecodeBuffer decodeBuffer; + memset(&decodeBuffer, 0, sizeof(decodeBuffer)); + decodeBuffer.data = (uint8_t*)mInputBuffer->data() + mInputBuffer->range_offset(); + decodeBuffer.size = mInputBuffer->range_length(); + decodeBuffer.flag = seeking ? HAS_DISCONTINUITY : 0; + mInputBuffer->meta_data()->findInt64(kKeyTime, &decodeBuffer.timeStamp); + Decode_Status res = mDecoder->decode(&decodeBuffer); + + mInputBuffer->release(); + mInputBuffer = NULL; + + if (res == DECODE_FORMAT_CHANGE) { + LOGW("Format changed."); + // drain all the frames. + MediaBuffer *mbuf = NULL; + while ((mbuf = getOutputBuffer(true)) != NULL) { + mbuf->release(); + } + const VideoFormatInfo *info = mDecoder->getFormatInfo(); + uint32_t cropWidth, cropHeight; + if (info != NULL) { + cropWidth = info->width - (info->cropLeft + info->cropRight); + cropHeight = info->height - (info->cropBottom + info->cropTop); + mFormat->setInt32(kKeyWidth, cropWidth); + mFormat->setInt32(kKeyHeight, cropHeight); + } + // TODO: handle format change + err = INFO_FORMAT_CHANGED; + } + else if (res == DECODE_SUCCESS) { + mErrorCount = 0; + err = OK; + MediaBuffer *mbuf = getOutputBuffer(); + if (mbuf == NULL) { + *out = new MediaBuffer(0); + } else { + *out = mbuf; + } + } else { + mErrorCount++; + LOGE("Failed to decode buffer (#%d). Error = %d", mErrorCount, res); + if (checkFatalDecoderError(res)) { + err = UNKNOWN_ERROR; + } else { + // For decoder errors that could be omitted, not throw error and continue to decode. + err = OK; + *out = new MediaBuffer(0); + } + } + + return err; +} + +}// namespace android + diff --git a/frameworks/vavideodecoder/VAVideoDecoder.h b/frameworks/vavideodecoder/VAVideoDecoder.h new file mode 100644 index 0000000..911b0dc --- /dev/null +++ b/frameworks/vavideodecoder/VAVideoDecoder.h @@ -0,0 +1,65 @@ +/* +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + +#ifndef VA_VIDEO_DECODER_H_ +#define VA_VIDEO_DECODER_H_ + +#include +#include +#include + +class IVideoDecoder; + +namespace android { + +struct VAVideoDecoder : public MediaSource, + public MediaBufferObserver { + VAVideoDecoder(const sp &source); + + virtual status_t start(MetaData *params); + virtual status_t stop(); + virtual sp getFormat(); + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options); + virtual void signalBufferReturned(MediaBuffer* buffer) {} + +protected: + virtual ~VAVideoDecoder(); + +private: + MediaBuffer *getOutputBuffer(bool bDraining = false); + VAVideoDecoder(const VAVideoDecoder &); + VAVideoDecoder &operator=(const VAVideoDecoder &); + +private: + enum { + NUM_OF_MEDIA_BUFFER = 20, + }; + sp mSource; + bool mStarted; + bool mRawOutput; + sp mFormat; + Vector mFrames; + MediaBuffer *mInputBuffer; + int64_t mTargetTimeUs; + uint32_t mFrameIndex; + uint32_t mErrorCount; + IVideoDecoder *mDecoder; +}; + +} // namespace android + +#endif // VA_VIDEO_DECODER_H_ -- cgit v1.2.3 From d2255c529aaf2625144ebd5078a17e4925df5bb7 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 27 Jul 2012 07:25:55 +0800 Subject: To enable video editor on Jelly Bean BZ: 48987 Add color convert codes to support convert from NV12 to I420 on video editor decoder side and from I420 to NV12 on video editor encoder side. Change-Id: I8df003c41622be29c7ba69eb899926ff87ecdc44 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/59022 Reviewed-by: buildbot Reviewed-by: Zhang, Xiaolin Tested-by: Zhang, Xiaolin --- Android.mk | 1 + frameworks/libI420colorconvert/Android.mk | 19 ++++ frameworks/libI420colorconvert/ColorConvert.cpp | 114 ++++++++++++++++++++++++ 3 files changed, 134 insertions(+) create mode 100644 frameworks/libI420colorconvert/Android.mk create mode 100644 frameworks/libI420colorconvert/ColorConvert.cpp diff --git a/Android.mk b/Android.mk index 1056c24..369ab19 100644 --- a/Android.mk +++ b/Android.mk @@ -14,4 +14,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/libI420colorconvert/Android.mk endif diff --git a/frameworks/libI420colorconvert/Android.mk b/frameworks/libI420colorconvert/Android.mk new file mode 100644 index 0000000..ee84680 --- /dev/null +++ b/frameworks/libI420colorconvert/Android.mk @@ -0,0 +1,19 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + ColorConvert.cpp + +LOCAL_C_INCLUDES:= \ + $(TOP)/frameworks/native/include/media/openmax \ + $(TOP)/frameworks/native/include/media/editor + +LOCAL_SHARED_LIBRARIES := \ + +LOCAL_MODULE_TAGS := optional + +LOCAL_MODULE := libI420colorconvert + +include $(BUILD_SHARED_LIBRARY) + + diff --git a/frameworks/libI420colorconvert/ColorConvert.cpp b/frameworks/libI420colorconvert/ColorConvert.cpp new file mode 100644 index 0000000..5f2c8a1 --- /dev/null +++ b/frameworks/libI420colorconvert/ColorConvert.cpp @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +static int getDecoderOutputFormat() { + return OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar; +} + +static int convertDecoderOutputToI420( + void* srcBits, int srcWidth, int srcHeight, ARect srcRect, void* dstBits) { + + const uint8_t *pSrc_y = (const uint8_t *)srcBits + + srcWidth * srcRect.top + srcRect.left; + const uint8_t *pSrc_uv = (const uint8_t *)pSrc_y + + srcWidth * (srcHeight - srcRect.top / 2); + + int dstWidth = srcRect.right - srcRect.left + 1; + int dstHeight = srcRect.bottom - srcRect.top + 1; + size_t dst_y_size = dstWidth * dstHeight; + size_t dst_uv_stride = dstWidth / 2; + size_t dst_uv_size = dstWidth / 2 * dstHeight / 2; + uint8_t *pDst_y = (uint8_t *)dstBits; + uint8_t *pDst_u = pDst_y + dst_y_size; + uint8_t *pDst_v = pDst_u + dst_uv_size; + + for (int y = 0; y < dstHeight; ++y) { + memcpy(pDst_y, pSrc_y, dstWidth); + pSrc_y += srcWidth; + pDst_y += dstWidth; + } + + size_t tmp = (dstWidth + 1) / 2; + for (int y = 0; y < (dstHeight + 1) / 2; ++y) { + for (size_t x = 0; x < tmp; ++x) { + pDst_u[x] = pSrc_uv[2 * x]; + pDst_v[x] = pSrc_uv[2 * x + 1]; + } + pSrc_uv += srcWidth; + pDst_u += dst_uv_stride; + pDst_v += dst_uv_stride; + } + return 0; +} + +static int getEncoderInputFormat() { + return OMX_COLOR_FormatYUV420SemiPlanar; +} + +static int convertI420ToEncoderInput( + void* srcBits, int srcWidth, int srcHeight, + int dstWidth, int dstHeight, ARect dstRect, + void* dstBits) { + uint8_t *pSrc_y = (uint8_t*) srcBits; + uint8_t *pDst_y = (uint8_t*) dstBits; + for(int i=0; i < srcHeight; i++) { + memcpy(pDst_y, pSrc_y, srcWidth); + pSrc_y += srcWidth; + pDst_y += dstWidth; + } + uint8_t* pSrc_u = (uint8_t*)srcBits + (srcWidth * srcHeight); + uint8_t* pSrc_v = (uint8_t*)pSrc_u + (srcWidth / 2) * (srcHeight / 2); + uint8_t* pDst_uv = (uint8_t*)dstBits + dstWidth * dstHeight; + + for(int i=0; i < srcHeight / 2; i++) { + for(int j=0, k=0; j < srcWidth / 2; j++, k+=2) { + pDst_uv[k] = pSrc_u[j]; + pDst_uv[k+1] = pSrc_v[j]; + } + pDst_uv += dstWidth; + pSrc_u += srcWidth / 2; + pSrc_v += srcWidth / 2; + } + return 0; +} + +static int getEncoderInputBufferInfo( + int actualWidth, int actualHeight, + int* encoderWidth, int* encoderHeight, + ARect* encoderRect, int* encoderBufferSize) { + + *encoderWidth = actualWidth; + *encoderHeight = actualHeight; + encoderRect->left = 0; + encoderRect->top = 0; + encoderRect->right = actualWidth - 1; + encoderRect->bottom = actualHeight - 1; + *encoderBufferSize = (actualWidth * actualHeight * 3 / 2); + + return 0; +} + +extern "C" void getI420ColorConverter(II420ColorConverter *converter) { + converter->getDecoderOutputFormat = getDecoderOutputFormat; + converter->convertDecoderOutputToI420 = convertDecoderOutputToI420; + converter->getEncoderInputFormat = getEncoderInputFormat; + converter->convertI420ToEncoderInput = convertI420ToEncoderInput; + converter->getEncoderInputBufferInfo = getEncoderInputBufferInfo; +} -- cgit v1.2.3 From 964ba517dcd9e09ede6e5c3b2ab4f146a44f59d5 Mon Sep 17 00:00:00 2001 From: fxiao4X Date: Fri, 27 Jul 2012 15:54:04 +0800 Subject: [PORT FROM R3] Fix MPEG4 multiple frame issue. BZ: 43475 Streams may contain vol header, so the first item is not the vop,then the framesize won't be set. Use the last picdata offset and size to calculate the next frame offset. Orignal Change-Id: If06e4bfbfc66dbd57839d8a10c60f11013626fc5 Change-Id: I94060ff7c5dfc780a913fd5cd25d4a6457a8eac6 Signed-off-by: fxiao4X Reviewed-on: http://android.intel.com:8080/59193 Reviewed-by: buildbot Reviewed-by: Zhang, Xiaolin Tested-by: Zhang, Xiaolin --- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 1 - mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 7 ------- videodecoder/VideoDecoderMPEG4.cpp | 13 +++++++++++-- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 7037fd0..38e2a05 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -113,7 +113,6 @@ typedef struct _vbp_data_mp42 uint32 number_pictures; vbp_picture_data_mp42 *picture_data; - uint32 frameSize; // fist frame size in buffer. Use for multiple frame in a buffer } vbp_data_mp42; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index ccc0ab5..249a9f8 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -632,13 +632,6 @@ void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) { - if(list_index == 0) { - // for the fist list item - viddec_pm_cxt_t *parent = pcontext->parser_cxt; - vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; - query_data->frameSize = parent->list.total_bytes; //record the first item frame size - } - vbp_fill_codec_data(pcontext); vbp_fill_picture_param(pcontext, 1); vbp_fill_iq_matrix_buffer(pcontext); diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 2ddc5ff..5499fbb 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -338,8 +338,17 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { mExpectingNVOP = false; } if (useGraphicBuffer) { - mPackedFrame.offSet = data->frameSize; - VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",data->frameSize,mPackedFrame.timestamp); + int32_t count = i - 1; + if (count < 0) { + WTRACE("Shuld not be here!"); + return DECODE_SUCCESS; + } + vbp_picture_data_mp42 *lastpic = data->picture_data; + for(int k = 0; k < count; k++ ) { + lastpic = lastpic->next_picture_data; + } + mPackedFrame.offSet = lastpic->slice_data.slice_offset + lastpic->slice_data.slice_size; + VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",mPackedFrame.offSet,mPackedFrame.timestamp); return DECODE_MULTIPLE_FRAME; } } -- cgit v1.2.3 From 752d650567a63396d9074b591c6bfd6d3d149293 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 30 Jul 2012 18:09:05 +0800 Subject: To Enable Video Editor Encoder Buffer Sharing BZ: 49310 The patch links the Intel encoder buffer sharing static library in video editor. The implementation is placed in video HAL layer. Original patches: R3: 38063 BZ: 25225 54576 BZ: 43092 49988 BZ: 34584 35665 Change-Id: I8a1fb9208eeda66c470a29785cdb0a1acd289b5f Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/59446 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- Android.mk | 1 + frameworks/videoedit/Android.mk | 1 + frameworks/videoedit/stagefrightshells/Android.mk | 75 + .../IntelVideoEditorAVCEncoder.cpp | 468 +++++ .../stagefrightshells/IntelVideoEditorAVCEncoder.h | 85 + .../IntelVideoEditorEncoderSource.cpp | 278 +++ .../IntelVideoEditorEncoderSource.h | 92 + .../IntelVideoEditorH263Encoder.cpp | 400 ++++ .../IntelVideoEditorH263Encoder.h | 84 + .../stagefrightshells/IntelVideoEditorUtils.cpp | 511 +++++ .../stagefrightshells/IntelVideoEditorUtils.h | 105 + .../stagefrightshells/MediaBufferPuller.cpp | 179 ++ .../stagefrightshells/MediaBufferPuller.h | 90 + .../stagefrightshells/VideoEditor3gpReader.cpp | 2030 ++++++++++++++++++++ .../stagefrightshells/VideoEditorAudioDecoder.cpp | 991 ++++++++++ .../stagefrightshells/VideoEditorAudioEncoder.cpp | 755 ++++++++ .../stagefrightshells/VideoEditorBuffer.c | 265 +++ .../stagefrightshells/VideoEditorMp3Reader.cpp | 803 ++++++++ .../stagefrightshells/VideoEditorVideoDecoder.cpp | 1778 +++++++++++++++++ .../stagefrightshells/VideoEditorVideoEncoder.cpp | 1295 +++++++++++++ 20 files changed, 10286 insertions(+) create mode 100644 frameworks/videoedit/Android.mk create mode 100644 frameworks/videoedit/stagefrightshells/Android.mk create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp create mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h create mode 100644 frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp create mode 100644 frameworks/videoedit/stagefrightshells/MediaBufferPuller.h create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp diff --git a/Android.mk b/Android.mk index 369ab19..825caaa 100644 --- a/Android.mk +++ b/Android.mk @@ -15,4 +15,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/libI420colorconvert/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk endif diff --git a/frameworks/videoedit/Android.mk b/frameworks/videoedit/Android.mk new file mode 100644 index 0000000..5053e7d --- /dev/null +++ b/frameworks/videoedit/Android.mk @@ -0,0 +1 @@ +include $(call all-subdir-makefiles) diff --git a/frameworks/videoedit/stagefrightshells/Android.mk b/frameworks/videoedit/stagefrightshells/Android.mk new file mode 100644 index 0000000..2e7c5ef --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/Android.mk @@ -0,0 +1,75 @@ +# +# Copyright (C) 2011 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + MediaBufferPuller.cpp \ + VideoEditorVideoDecoder.cpp \ + VideoEditorAudioDecoder.cpp \ + VideoEditorMp3Reader.cpp \ + VideoEditor3gpReader.cpp \ + VideoEditorBuffer.c \ + VideoEditorVideoEncoder.cpp \ + VideoEditorAudioEncoder.cpp \ + IntelVideoEditorUtils.cpp \ + IntelVideoEditorEncoderSource.cpp \ + IntelVideoEditorAVCEncoder.cpp \ + IntelVideoEditorH263Encoder.cpp + +LOCAL_C_INCLUDES += \ + $(TOP)/frameworks/av/media/libmediaplayerservice \ + $(TOP)/frameworks/av/media/libstagefright \ + $(TOP)/frameworks/av/media/libstagefright/include \ + $(TOP)/frameworks/av/media/libstagefright/rtsp \ + $(call include-path-for, corecg graphics) \ + $(TOP)/frameworks/av/libvideoeditor/lvpp \ + $(TOP)/frameworks/av/libvideoeditor/osal/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/common/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/mcs/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/stagefrightshells/inc \ + $(TOP)/frameworks/native/include/media/editor \ + $(TOP)/frameworks/native/include/media/openmax \ + $(TARGET_OUT_HEADERS)/libsharedbuffer \ + $(TARGET_OUT_HEADERS)/libmix_videoencoder \ + $(TARGET_OUT_HEADERS)/libva + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + libmedia \ + libbinder \ + libstagefright \ + libstagefright_foundation \ + libstagefright_omx \ + libgui \ + libvideoeditor_osal \ + libvideoeditorplayer \ + libsharedbuffer + +LOCAL_CFLAGS += \ + +LOCAL_STATIC_LIBRARIES := \ + libstagefright_color_conversion + + +LOCAL_MODULE:= libvideoeditor_stagefrightshells_intel + +LOCAL_MODULE_TAGS := optional + +include $(BUILD_STATIC_LIBRARY) diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp new file mode 100644 index 0000000..9681bba --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp @@ -0,0 +1,468 @@ +/* + * INTEL CONFIDENTIAL + * Copyright 2010-2011 Intel Corporation All Rights Reserved. + + * The source code, information and material ("Material") contained herein is owned + * by Intel Corporation or its suppliers or licensors, and title to such Material + * remains with Intel Corporation or its suppliers or licensors. The Material contains + * proprietary information of Intel or its suppliers and licensors. The Material is + * protected by worldwide copyright laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, + * distributed or disclosed in any way without Intel's prior express written permission. + * No license under any patent, copyright or other intellectual property rights in the + * Material is granted to or conferred upon you, either expressly, by implication, inducement, + * estoppel or otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + + * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any + * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. + */ + +#define LOG_NDEBUG 1 +#define LOG_TAG "IntelVideoEditorAVCEncoder" +#include +#include "OMX_Video.h" +#include +#include +#include +#include +#include +#include "IntelVideoEditorAVCEncoder.h" +#include + +#define INIT_BUF_FULLNESS_RATIO 0.6 +#define MIN_INTRA_PERIOD 30 +#define SHORT_INTRA_PERIOD (mVideoFrameRate) +#define MEDIUM_INTRA_PERIOD (2*mVideoFrameRate) +#define LONG_INTRA_PERIOD (4*mVideoFrameRate) +#define LOW_QUALITY_BITRATE 2000000 +#define MEDIUM_QUALITY_BITRATE 5000000 +#define BITRATE_1M 1000000 +#define BITRATE_2M 2000000 +#define BITRATE_4M 4000000 +#define BITRATE_5M 5000000 + +namespace android { + +IntelVideoEditorAVCEncoder::IntelVideoEditorAVCEncoder( + const sp& source, + const sp& meta) + : mSource(source), + mMeta(meta), + mUseSyncMode(0), + mStarted(false), + mFirstFrame(true), + mFrameCount(0), + mVAEncoder(NULL), + mOutBufGroup(NULL), + mLastInputBuffer(NULL) { + + LOGV("Construct IntelVideoEditorAVCEncoder"); +} + +IntelVideoEditorAVCEncoder::~IntelVideoEditorAVCEncoder() { + LOGV("Destruct IntelVideoEditorAVCEncoder"); + if (mStarted) { + stop(); + } +} + +status_t IntelVideoEditorAVCEncoder::initCheck(const sp& meta) { + LOGV("initCheck"); + + Encode_Status encStatus; + + sp sourceFormat = mSource->getFormat(); + + CHECK(sourceFormat->findInt32(kKeyWidth, &mVideoWidth)); + CHECK(sourceFormat->findInt32(kKeyHeight, &mVideoHeight)); + CHECK(sourceFormat->findInt32(kKeyFrameRate, &mVideoFrameRate)); + CHECK(sourceFormat->findInt32(kKeyColorFormat, &mVideoColorFormat)); + + CHECK(sourceFormat->findInt32(kKeyBitRate, &mVideoBitRate)); + + // Tune the output bitrates to improve the quality + if (mVideoBitRate < BITRATE_1M) { + mVideoBitRate = BITRATE_1M; + if (mVideoHeight > 720) { + mVideoBitRate = BITRATE_4M; + } + else if (mVideoHeight > 480) + { + mVideoBitRate = BITRATE_2M; + } + } + else if (mVideoBitRate < BITRATE_4M) { + if (mVideoHeight > 720) { + mVideoBitRate = BITRATE_5M; + } + else if (mVideoHeight > 480) { + mVideoBitRate = BITRATE_4M; + } + } + + LOGI("mVideoWidth = %d, mVideoHeight = %d, mVideoFrameRate = %d, mVideoColorFormat = %d, mVideoBitRate = %d", + mVideoWidth, mVideoHeight, mVideoFrameRate, mVideoColorFormat, mVideoBitRate); + + if (mVideoColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) { + LOGE("Color format %d is not supported", mVideoColorFormat); + return BAD_VALUE; + } + + mFrameSize = mVideoHeight* mVideoWidth* 1.5; + + /* + * SET PARAMS FOR THE ENCODER BASED ON THE METADATA + * */ + encStatus = mVAEncoder->getParameters(&mEncParamsCommon); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("got encoder params"); + + mEncParamsCommon.resolution.width = mVideoWidth; + mEncParamsCommon.resolution.height= mVideoHeight; + mEncParamsCommon.frameRate.frameRateNum = mVideoFrameRate; + mEncParamsCommon.frameRate.frameRateDenom = 1; + mEncParamsCommon.rcMode = RATE_CONTROL_VBR; + mEncParamsCommon.rcParams.bitRate = mVideoBitRate; + mEncParamsCommon.rawFormat = RAW_FORMAT_NV12; + + mEncParamsCommon.rcParams.minQP = 0; + mEncParamsCommon.rcParams.initQP = 0; + + if (mVideoBitRate < LOW_QUALITY_BITRATE) { + mEncParamsCommon.intraPeriod = LONG_INTRA_PERIOD; + } + else if (mVideoBitRate < MEDIUM_QUALITY_BITRATE) { + mEncParamsCommon.intraPeriod = MEDIUM_INTRA_PERIOD; + } + else { + mEncParamsCommon.intraPeriod = SHORT_INTRA_PERIOD; + } + if (mEncParamsCommon.intraPeriod < MIN_INTRA_PERIOD) { + mEncParamsCommon.intraPeriod = MIN_INTRA_PERIOD; + } + + mEncParamsCommon.syncEncMode = mUseSyncMode; + mFrameCount = 0; + + // All luma and chroma block edges of the slice are filtered + mEncParamsCommon.disableDeblocking = 0; + + encStatus = mVAEncoder->setParameters(&mEncParamsCommon); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("new encoder params set"); + + encStatus = mVAEncoder->getParameters(&mEncParamsH264); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("got H264 encoder params "); + + mEncParamsH264.idrInterval = 1; + mEncParamsH264.sliceNum.iSliceNum = 2; + mEncParamsH264.sliceNum.pSliceNum = 2; + + // If the bitrate is low, we set the slice number to 1 in one frame to avoid visible boundary + if (mVideoBitRate < LOW_QUALITY_BITRATE) { + mEncParamsH264.sliceNum.iSliceNum = 1; + mEncParamsH264.sliceNum.pSliceNum = 1; + } + mEncParamsH264.VUIFlag = 0; + + encStatus = mVAEncoder->setParameters(&mEncParamsH264); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("new H264 encoder params set"); + + VideoParamsHRD hrdParam; + encStatus = mVAEncoder->getParameters(&hrdParam); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("got encoder hrd params "); + + hrdParam.bufferSize = mVideoBitRate; + hrdParam.initBufferFullness = hrdParam.bufferSize * INIT_BUF_FULLNESS_RATIO; + + encStatus = mVAEncoder->setParameters(&hrdParam); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("new encoder hard params set"); + + mOutBufGroup = new MediaBufferGroup(); + CHECK(mOutBufGroup != NULL); + + return OK; +} + +status_t IntelVideoEditorAVCEncoder::start(MetaData *params) { + LOGV("start"); + status_t ret = OK; + + if (mStarted) { + LOGW("Call start() when encoder already started"); + return OK; + } + + mSource->start(params); + + mVAEncoder = createVideoEncoder(MEDIA_MIMETYPE_VIDEO_AVC); + + if (mVAEncoder == NULL) { + LOGE("Fail to create video encoder"); + return NO_MEMORY; + } + mInitCheck = initCheck(mMeta); + + if (mInitCheck != OK) { + return mInitCheck; + } + + uint32_t maxSize; + mVAEncoder->getMaxOutSize(&maxSize); + + LOGV("allocating output buffers of size %d",maxSize); + for (int i = 0; i < OUTPUT_BUFFERS; i++ ) { + mOutBufGroup->add_buffer(new MediaBuffer(maxSize)); + } + + if (OK != getSharedBuffers()) { + LOGE("Failed to get the shared buffers from encoder "); + return UNKNOWN_ERROR; + } + + Encode_Status err; + err = mVAEncoder->start(); + if (err!= ENCODE_SUCCESS) { + LOGE("Failed to initialize the encoder: %d", err); + return UNKNOWN_ERROR; + } + + if (OK != setSharedBuffers()) { + LOGE("Failed to setup the shared buffers"); + return UNKNOWN_ERROR; + } + + mStarted = true; + LOGV("start- DONE"); + return OK; +} + +int IntelVideoEditorAVCEncoder::SBShutdownFunc(void* arg) +{ + LOGV("IntelVideoEditorAVCEncoder::SBShutdownFunc begin()"); + sp r = BufferShareRegistry::getInstance(); + int error = r->sourceExitSharingMode(); + LOGV("sourceExitSharingMode returns %d",error); + return 0; +} + +status_t IntelVideoEditorAVCEncoder::stop() { + LOGV("stop"); + if (!mStarted) { + LOGW("Call stop() when encoder has not started"); + return OK; + } + + if (mOutBufGroup) { + delete mOutBufGroup; + mOutBufGroup = NULL; + } + if (mLastInputBuffer!=NULL) { + mLastInputBuffer->release(); + } + mLastInputBuffer = NULL; + + /* call mSource->stop in a new thread, so the source + can do its end of shared buffer shutdown */ + + androidCreateThread(SBShutdownFunc,this); + LOGV("Successfull create thread!"); + + /* do encoder's buffer sharing shutdown */ + sp r = BufferShareRegistry::getInstance(); + int err = r->encoderExitSharingMode(); + LOGV("encoderExitSharingMode returned %d\n", err); + + mSource->stop(); + + err = r->encoderRequestToDisableSharingMode(); + LOGV("encoderRequestToDisableSharingMode returned %d\n", err); + + /* libsharedbuffer wants the source to call this after the encoder calls + * encoderRequestToDisableSharingMode. Instead of doing complicated + * synchronization, let's just call this ourselves on the source's + * behalf. */ + err = r->sourceRequestToDisableSharingMode(); + LOGV("sourceRequestToDisableSharingMode returned %d\n", err); + + releaseVideoEncoder(mVAEncoder); + mVAEncoder = NULL; + + mStarted = false; + LOGV("stop - DONE"); + + return OK; +} + +sp IntelVideoEditorAVCEncoder::getFormat() { + + sp format = new MetaData; + format->setInt32(kKeyWidth, mVideoWidth); + format->setInt32(kKeyHeight, mVideoHeight); + format->setInt32(kKeyBitRate, mVideoBitRate); + format->setInt32(kKeySampleRate, mVideoFrameRate); + format->setInt32(kKeyColorFormat, mVideoColorFormat); + format->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); + format->setCString(kKeyDecoderComponent, "IntelVideoEditorAVCEncoder"); + return format; +} + +status_t IntelVideoEditorAVCEncoder::read(MediaBuffer **out, const ReadOptions *options) { + + status_t err; + Encode_Status encRet; + MediaBuffer *tmpIn; + int64_t timestamp = 0; + CHECK(!options); + mReadOptions = options; + *out = NULL; + + LOGV("IntelVideoEditorAVCEncoder::read start"); + + do { + err = mSource->read(&tmpIn, NULL); + if (err == INFO_FORMAT_CHANGED) { + stop(); + start(NULL); + } + } while (err == INFO_FORMAT_CHANGED); + + if (err == ERROR_END_OF_STREAM) { + return err; + } + else if (err != OK) { + LOGE("Failed to read input video frame: %d", err); + return err; + } + + + VideoEncRawBuffer vaInBuf; + + vaInBuf.data = (uint8_t *)tmpIn->data(); + vaInBuf.size = tmpIn->size(); + + tmpIn->meta_data()->findInt64(kKeyTime, (int64_t *)&(vaInBuf.timeStamp)); + LOGV("Encoding: buffer %p, size = %d, ts= %llu",vaInBuf.data, vaInBuf.size, vaInBuf.timeStamp); + + encRet = mVAEncoder->encode(&vaInBuf); + if (encRet != ENCODE_SUCCESS) { + LOGE("Failed to encode input video frame: %d", encRet); + tmpIn->release(); + return UNKNOWN_ERROR; + } + + if (mLastInputBuffer != NULL) { + mLastInputBuffer->release(); + mLastInputBuffer = NULL; + } + mLastInputBuffer = tmpIn; + + LOGV("Encoding Done, getting output buffer "); + MediaBuffer *outputBuffer; + + CHECK(mOutBufGroup->acquire_buffer(&outputBuffer) == OK); + LOGV("Waiting for outputbuffer"); + VideoEncOutputBuffer vaOutBuf; + vaOutBuf.bufferSize = outputBuffer->size(); + vaOutBuf.dataSize = 0; + vaOutBuf.data = (uint8_t *) outputBuffer->data(); + vaOutBuf.format = OUTPUT_EVERYTHING; + + if (mFirstFrame) { + LOGV("mFirstFrame\n"); + encRet = mVAEncoder->getOutput(&vaOutBuf); + if (encRet != ENCODE_SUCCESS) { + LOGE("Failed to retrieve encoded video frame: %d", encRet); + outputBuffer->release(); + return UNKNOWN_ERROR; + } + outputBuffer->meta_data()->setInt32(kKeyIsCodecConfig,true); + outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame,true); + mFirstFrame = false; + } else { + vaOutBuf.format = OUTPUT_EVERYTHING; + encRet = mVAEncoder->getOutput(&vaOutBuf); + if (encRet != ENCODE_SUCCESS) { + LOGE("Failed to retrieve encoded video frame: %d", encRet); + outputBuffer->release(); + return UNKNOWN_ERROR; + } + if (vaOutBuf.flag & ENCODE_BUFFERFLAG_SYNCFRAME) { + outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame,true); + } + } + timestamp = vaInBuf.timeStamp; + + LOGV("Got it! data= %p, ts=%llu size =%d", vaOutBuf.data, timestamp, vaOutBuf.dataSize); + + outputBuffer->set_range(0, vaOutBuf.dataSize); + outputBuffer->meta_data()->setInt64(kKeyTime,timestamp); + *out = outputBuffer; + + LOGV("IntelVideoEditorAVCEncoder::read end"); + return OK; +} + +status_t IntelVideoEditorAVCEncoder::getSharedBuffers() { + + LOGV("getSharedBuffers begin"); + Encode_Status encRet; + status_t ret = OK; + + sp r = BufferShareRegistry::getInstance(); + + if (r->encoderRequestToEnableSharingMode() == BS_SUCCESS) { + LOGV("Shared buffer mode available\n"); + } + else { + LOGE("Request to enable sharing failed \n"); + return UNKNOWN_ERROR; + } + + for(int i = 0; i < INPUT_SHARED_BUFFERS; i++) { + VideoParamsUsrptrBuffer paramsUsrptrBuffer; + paramsUsrptrBuffer.type = VideoParamsTypeUsrptrBuffer; + paramsUsrptrBuffer.size = sizeof(VideoParamsUsrptrBuffer); + paramsUsrptrBuffer.expectedSize = mFrameSize; + paramsUsrptrBuffer.format = STRING_TO_FOURCC("NV12"); + paramsUsrptrBuffer.width = mVideoWidth; + paramsUsrptrBuffer.height = mVideoHeight; + LOGV("Share buffer request="); + encRet = mVAEncoder->getParameters(¶msUsrptrBuffer); + if (encRet != ENCODE_SUCCESS ) { + LOGE("could not allocate input surface from the encoder %d", encRet); + ret = NO_MEMORY; + break; + } + mSharedBufs[i].allocatedSize = paramsUsrptrBuffer.actualSize; + mSharedBufs[i].height = mVideoHeight; + mSharedBufs[i].width = mVideoWidth; + mSharedBufs[i].pointer = paramsUsrptrBuffer.usrPtr; + mSharedBufs[i].stride = paramsUsrptrBuffer.stride; + } + LOGV("getSharedBuffers end"); + return ret; +} + +status_t IntelVideoEditorAVCEncoder::setSharedBuffers() { + LOGV("setSharedBuffers"); + sp r = BufferShareRegistry::getInstance(); + + if (r->encoderSetSharedBuffer(mSharedBufs,INPUT_SHARED_BUFFERS) != BS_SUCCESS) { + LOGE("encoderSetSharedBuffer failed \n"); + return UNKNOWN_ERROR; + } + + if (r->encoderEnterSharingMode() != BS_SUCCESS) { + LOGE("sourceEnterSharingMode failed\n"); + return UNKNOWN_ERROR; + } + return OK; +} + +} diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h new file mode 100644 index 0000000..29fd1c7 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h @@ -0,0 +1,85 @@ +/* + * INTEL CONFIDENTIAL + * Copyright 2010-2011 Intel Corporation All Rights Reserved. + + * The source code, information and material ("Material") contained herein is owned + * by Intel Corporation or its suppliers or licensors, and title to such Material + * remains with Intel Corporation or its suppliers or licensors. The Material contains + * proprietary information of Intel or its suppliers and licensors. The Material is + * protected by worldwide copyright laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, + * distributed or disclosed in any way without Intel's prior express written permission. + * No license under any patent, copyright or other intellectual property rights in the + * Material is granted to or conferred upon you, either expressly, by implication, inducement, + * estoppel or otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + + * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any + * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. + */ + +#ifndef INTELVIDEOEDITORAVCENCODER_H +#define INTELVIDEOEDITORAVCENCODER_H + +#include +#include +#include +#include +#include "va/va.h" +#include "VideoEncoderHost.h" +#include + +namespace android { +struct IntelVideoEditorAVCEncoder : public MediaSource { + IntelVideoEditorAVCEncoder(const sp &source, + const sp& meta); + + virtual status_t start(MetaData *params); + virtual status_t stop(); + + virtual sp getFormat(); + + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options); + + +protected: + virtual ~IntelVideoEditorAVCEncoder(); + +private: + sp mSource; + sp mMeta; + + int32_t mVideoWidth; + int32_t mVideoHeight; + int32_t mFrameSize; + int32_t mVideoFrameRate; + int32_t mVideoBitRate; + int32_t mVideoColorFormat; + int32_t mUseSyncMode; + status_t mInitCheck; + bool mStarted; + bool mFirstFrame; + int32_t mFrameCount; + static const int OUTPUT_BUFFERS = 6; + static const int INPUT_SHARED_BUFFERS = 8; + IVideoEncoder *mVAEncoder; + VideoParamsCommon mEncParamsCommon; + VideoParamsAVC mEncParamsH264; + SharedBufferType mSharedBufs[INPUT_SHARED_BUFFERS]; + const ReadOptions *mReadOptions; + MediaBufferGroup *mOutBufGroup; /* group of output buffers*/ + MediaBuffer *mLastInputBuffer; + +private: + status_t initCheck(const sp& meta); + int32_t calcBitrate(int width, int height); + status_t getSharedBuffers(); + status_t setSharedBuffers(); + static int SBShutdownFunc(void* arg); + + IntelVideoEditorAVCEncoder(const IntelVideoEditorAVCEncoder &); + IntelVideoEditorAVCEncoder &operator=(const IntelVideoEditorAVCEncoder &); +}; +}; +#endif + diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp new file mode 100644 index 0000000..e2c16ed --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp @@ -0,0 +1,278 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * INTEL CONFIDENTIAL + * Copyright 2010-2011 Intel Corporation All Rights Reserved. + + * The source code, information and material ("Material") contained herein is owned + * by Intel Corporation or its suppliers or licensors, and title to such Material + * remains with Intel Corporation or its suppliers or licensors. The Material contains + * proprietary information of Intel or its suppliers and licensors. The Material is + * protected by worldwide copyright laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, + * distributed or disclosed in any way without Intel's prior express written permission. + * No license under any patent, copyright or other intellectual property rights in the + * Material is granted to or conferred upon you, either expressly, by implication, inducement, + * estoppel or otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + + * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any + * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IntelVideoEditorEncoderSource" +#include "utils/Log.h" + +#include "IntelVideoEditorEncoderSource.h" +#include "utils/Vector.h" +#include +#include +#include +#include +#include +#include + +namespace android { +sp IntelVideoEditorEncoderSource::Create( + const sp &format) { + + sp aSource = + new IntelVideoEditorEncoderSource(format); + return aSource; +} + +IntelVideoEditorEncoderSource::IntelVideoEditorEncoderSource( + const sp &format): + mGroup(NULL), + mUseSharedBuffers(false), + mFirstBufferLink(NULL), + mLastBufferLink(NULL), + mNbBuffer(0), + mIsEOS(false), + mState(CREATED), + mEncFormat(format) { + LOGV("IntelVideoEditorEncoderSource::IntelVideoEditorEncoderSource"); +} + +IntelVideoEditorEncoderSource::~IntelVideoEditorEncoderSource() { + + // Safety clean up + if( STARTED == mState ) { + stop(); + } +} + +status_t IntelVideoEditorEncoderSource::start(MetaData *meta) { + Mutex::Autolock autolock(mLock); + status_t err = OK; + + LOGV("IntelVideoEditorEncoderSource::start() begin"); + + if( CREATED != mState ) { + LOGV("IntelVideoEditorEncoderSource::start: invalid state %d", mState); + return UNKNOWN_ERROR; + } + mState = STARTED; + sp r = BufferShareRegistry::getInstance(); + if (r->sourceRequestToEnableSharingMode() == BS_SUCCESS) { + LOGI("Shared buffer mode available\n"); + mUseSharedBuffers = true; + mGroup = NULL; + } + else + { + LOGE("Shared buffer mode not available\n"); + return UNKNOWN_ERROR; + } + LOGV("IntelVideoEditorEncoderSource::start() END (0x%x)", err); + return err; +} + +status_t IntelVideoEditorEncoderSource::getSharedBuffers() +{ + Mutex::Autolock autolock(mLock); + + LOGV("IntelVideoEditorEncoderSource::getSharedBuffers begin"); + sp r = BufferShareRegistry::getInstance(); + SharedBufferType *bufs = NULL; + int buf_cnt = 0; + + if (r->sourceEnterSharingMode() != BS_SUCCESS) { + LOGE("sourceEnterSharingMode failed\n"); + return UNKNOWN_ERROR; + } + + if (r->sourceGetSharedBuffer(NULL, &buf_cnt) != BS_SUCCESS) { + LOGE("sourceGetSharedBuffer failed, unable to get buffer count\n"); + return UNKNOWN_ERROR; + } + + bufs = new SharedBufferType[buf_cnt]; + if (r->sourceGetSharedBuffer(bufs, &buf_cnt) != BS_SUCCESS) { + LOGE("sourceGetSharedBuffer failed, unable to retrieve buffers\n"); + delete [] bufs; + return UNKNOWN_ERROR; + } + + mGroup = new MediaBufferGroup(); + + for (int n = 0; n < buf_cnt; n++) + { + mGroup->add_buffer(new MediaBuffer(bufs[n].pointer, bufs[n].allocatedSize)); + } + + delete [] bufs; + + LOGV("IntelVideoEditorAVCEncoderSource::getSharedBuffers end"); + return OK; +} + + +status_t IntelVideoEditorEncoderSource::stop() { + + LOGV("IntelVideoEditorAVCEncoderSource::stop() begin"); + status_t err = OK; + + if( STARTED != mState ) { + LOGV("IntelVideoEditorAVCEncoderSource::stop: invalid state %d", mState); + return UNKNOWN_ERROR; + } + + if (mUseSharedBuffers) { + if (mGroup) { + delete mGroup; + mGroup = NULL; + } + mUseSharedBuffers = false; + } + + int32_t i = 0; + MediaBufferChain* tmpLink = NULL; + while( mFirstBufferLink ) { + i++; + tmpLink = mFirstBufferLink; + mFirstBufferLink = mFirstBufferLink->nextLink; + delete tmpLink; + } + LOGV("IntelVideoEditorEncoderSource::stop : %d buffer remained", i); + mFirstBufferLink = NULL; + mLastBufferLink = NULL; + mState = CREATED; + + LOGV("IntelVideoEditorEncoderSource::stop() END (0x%x)", err); + return err; +} + +sp IntelVideoEditorEncoderSource::getFormat() { + + LOGV("IntelVideoEditorEncoderSource::getFormat"); + return mEncFormat; +} + +status_t IntelVideoEditorEncoderSource::read(MediaBuffer **buffer, + const ReadOptions *options) { + Mutex::Autolock autolock(mLock); + + LOGV("IntelVideoEditorEncoderSource::read() begin"); + + MediaSource::ReadOptions readOptions; + status_t err = OK; + MediaBufferChain* tmpLink = NULL; + + if ( STARTED != mState ) { + LOGV("IntelVideoEditorEncoderSource::read: invalid state %d", mState); + return UNKNOWN_ERROR; + } + + while (mFirstBufferLink == NULL && !mIsEOS) { + LOGV("Wait for buffer in IntelVideoEditorEncoderSource::read()"); + mBufferCond.wait(mLock); + } + + LOGV("Get the buffer in IntelVideoEditorEncoderSource::read()!"); + + // End of stream? + if (mFirstBufferLink == NULL) { + *buffer = NULL; + LOGV("IntelVideoEditorEncoderSource::read : EOS"); + return ERROR_END_OF_STREAM; + } + + // Get a buffer from the chain + *buffer = mFirstBufferLink->buffer; + tmpLink = mFirstBufferLink; + mFirstBufferLink = mFirstBufferLink->nextLink; + + if ( NULL == mFirstBufferLink) { + mLastBufferLink = NULL; + } + delete tmpLink; + mNbBuffer--; + + LOGV("IntelVideoEditorEncoderSource::read() END (0x%x)", err); + return err; +} + +int32_t IntelVideoEditorEncoderSource::storeBuffer(MediaBuffer *buffer) { + Mutex::Autolock autolock(mLock); + + LOGV("IntelVideoEditorEncoderSource::storeBuffer() begin"); + + status_t err = OK; + + if( NULL == buffer ) { + LOGV("IntelVideoEditorEncoderSource::storeBuffer : reached EOS"); + mIsEOS = true; + } else { + MediaBufferChain* newLink = new MediaBufferChain; + newLink->buffer = buffer; + newLink->nextLink = NULL; + if( NULL != mLastBufferLink ) { + mLastBufferLink->nextLink = newLink; + } else { + mFirstBufferLink = newLink; + } + mLastBufferLink = newLink; + mNbBuffer++; + } + mBufferCond.signal(); + LOGV("IntelVideoEditorEncoderSource::storeBuffer() end"); + return mNbBuffer; +} + +int32_t IntelVideoEditorEncoderSource::requestBuffer(MediaBuffer **buffer) { + status_t err = OK; + LOGV("IntelVideoEditorEncoderSource::requestBuffer() begin"); + if (!mGroup && mUseSharedBuffers) { + err = getSharedBuffers(); + if (err != OK) { + LOGE("shared buffer setup failed\n"); + return err; + } + } + + err = mGroup->acquire_buffer(buffer); + LOGV("requestBuffer buffer addr = 0x%p",(uint8_t *)(*buffer)->data()); + if (err != OK) { + LOGE("Fail to get shared buffers"); + return UNKNOWN_ERROR; + } + LOGV("IntelVideoEditorEncoderSource::requestBuffer() end"); + return err; +} +} diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h new file mode 100644 index 0000000..2f76051 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * INTEL CONFIDENTIAL + * Copyright 2010-2011 Intel Corporation All Rights Reserved. + + * The source code, information and material ("Material") contained herein is owned + * by Intel Corporation or its suppliers or licensors, and title to such Material + * remains with Intel Corporation or its suppliers or licensors. The Material contains + * proprietary information of Intel or its suppliers and licensors. The Material is + * protected by worldwide copyright laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, + * distributed or disclosed in any way without Intel's prior express written permission. + * No license under any patent, copyright or other intellectual property rights in the + * Material is granted to or conferred upon you, either expressly, by implication, inducement, + * estoppel or otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + + * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any + * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. + */ + +#ifndef INTELVIDEOEDITORENCODERSOURCE_H +#define INTELVIDEOEDITORENCODERSOURCE_H + +#include +#include +#include +#include + +namespace android { +struct IntelVideoEditorEncoderSource : public MediaSource { + public: + static sp Create( + const sp &format); + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + virtual sp getFormat(); + virtual status_t read(MediaBuffer **buffer, + const ReadOptions *options = NULL); + virtual int32_t storeBuffer(MediaBuffer *buffer); + virtual int32_t requestBuffer(MediaBuffer **buffer); + + protected: + virtual ~IntelVideoEditorEncoderSource(); + + private: + status_t getSharedBuffers(); + MediaBufferGroup* mGroup; + bool mUseSharedBuffers; + + struct MediaBufferChain { + MediaBuffer* buffer; + MediaBufferChain* nextLink; + }; + enum State { + CREATED, + STARTED, + ERROR + }; + IntelVideoEditorEncoderSource(const sp &format); + + // Don't call me + IntelVideoEditorEncoderSource(const IntelVideoEditorEncoderSource &); + IntelVideoEditorEncoderSource &operator=( + const IntelVideoEditorEncoderSource &); + + MediaBufferChain* mFirstBufferLink; + MediaBufferChain* mLastBufferLink; + int32_t mNbBuffer; + bool mIsEOS; + State mState; + sp mEncFormat; + Mutex mLock; + Condition mBufferCond; +}; +} +#endif diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp new file mode 100644 index 0000000..02f91f3 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp @@ -0,0 +1,400 @@ +/* + * INTEL CONFIDENTIAL + * Copyright 2010-2011 Intel Corporation All Rights Reserved. + + * The source code, information and material ("Material") contained herein is owned + * by Intel Corporation or its suppliers or licensors, and title to such Material + * remains with Intel Corporation or its suppliers or licensors. The Material contains + * proprietary information of Intel or its suppliers and licensors. The Material is + * protected by worldwide copyright laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, + * distributed or disclosed in any way without Intel's prior express written permission. + * No license under any patent, copyright or other intellectual property rights in the + * Material is granted to or conferred upon you, either expressly, by implication, inducement, + * estoppel or otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + + * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any + * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. + */ + +#define LOG_NDEBUG 1 +#define LOG_TAG "IntelVideoEditorH263Encoder" +#include +#include "OMX_Video.h" +#include +#include +#include +#include +#include +#include +#include "IntelVideoEditorH263Encoder.h" +#define INIT_BUF_FULLNESS_RATIO 0.8125 +#define INITIAL_INTRA_PERIOD (mVideoFrameRate * 2 / 3) +#define NORMAL_INTRA_PERIOD (mVideoFrameRate * 3) + +namespace android { + +IntelVideoEditorH263Encoder::IntelVideoEditorH263Encoder( + const sp& source, + const sp& meta) + : mSource(source), + mMeta(meta), + mUseSyncMode(0), + mStarted(false), + mFirstFrame(true), + mFrameCount(0), + mVAEncoder(NULL), + mOutBufGroup(NULL), + mLastInputBuffer(NULL) { + + LOGV("Construct IntelVideoEditorH263Encoder"); +} + +IntelVideoEditorH263Encoder::~IntelVideoEditorH263Encoder() { + LOGV("Destruct IntelVideoEditorH263Encoder"); + if (mStarted) { + stop(); + } +} + +status_t IntelVideoEditorH263Encoder::initCheck(const sp& meta) { + LOGV("initCheck"); + + Encode_Status encStatus; + + sp sourceFormat = mSource->getFormat(); + + CHECK(sourceFormat->findInt32(kKeyWidth, &mVideoWidth)); + CHECK(sourceFormat->findInt32(kKeyHeight, &mVideoHeight)); + CHECK(sourceFormat->findInt32(kKeyFrameRate, &mVideoFrameRate)); + CHECK(sourceFormat->findInt32(kKeyColorFormat, &mVideoColorFormat)); + + CHECK(sourceFormat->findInt32(kKeyBitRate, &mVideoBitRate)); + LOGV("mVideoWidth = %d, mVideoHeight = %d, mVideoFrameRate = %d, mVideoColorFormat = %d, mVideoBitRate = %d", + mVideoWidth, mVideoHeight, mVideoFrameRate, mVideoColorFormat, mVideoBitRate); + if (mVideoColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) { + LOGE("Color format %d is not supported", mVideoColorFormat); + return BAD_VALUE; + } + mFrameSize = mVideoHeight* mVideoWidth* 1.5; + /* + * SET PARAMS FOR THE ENCODER BASED ON THE METADATA + * */ + + encStatus = mVAEncoder->getParameters(&mEncParamsCommon); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("got encoder params"); + + mEncParamsCommon.resolution.width = mVideoWidth; + mEncParamsCommon.resolution.height= mVideoHeight; + mEncParamsCommon.frameRate.frameRateNum = mVideoFrameRate; + mEncParamsCommon.frameRate.frameRateDenom = 1; + mEncParamsCommon.rcMode = RATE_CONTROL_VBR; + mEncParamsCommon.rcParams.bitRate = mVideoBitRate; + mEncParamsCommon.rawFormat = RAW_FORMAT_NV12; + + // Set intra period to be a small value so that more IDR will be generated + // at the beginning of encoding. After a certain period of time, change intra period + // to be a bigger value, NORMAL_INTRA_PERIOD, in the rest of encoding. + // This is to workaround that it may take long to show video after clone / extended + // mode switching. During mode swithing, the current Widi stack sends RTSP command to + // set adaptor jitter buffer size. Widi adaptor may miss the first IDR during adaptor + // jitter buffer size setting. If the first IDR is missed, Widi adaptor must wait for + // the next IDR to arrive so that decoding can be started. If intra period is long, + // it will take long to show video. + mEncParamsCommon.intraPeriod = INITIAL_INTRA_PERIOD; + + mEncParamsCommon.rcParams.minQP = 1; + mEncParamsCommon.rcParams.initQP = 24; + + mEncParamsCommon.syncEncMode = mUseSyncMode; + mFrameCount = 0; + + encStatus = mVAEncoder->setParameters(&mEncParamsCommon); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("new encoder params set"); + + VideoParamsHRD hrdParam; + encStatus = mVAEncoder->getParameters(&hrdParam); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("got encoder hrd params "); + + hrdParam.bufferSize = mVideoBitRate; + hrdParam.initBufferFullness = hrdParam.bufferSize * INIT_BUF_FULLNESS_RATIO; + + encStatus = mVAEncoder->setParameters(&hrdParam); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("new encoder hard params set"); + + mOutBufGroup = new MediaBufferGroup(); + CHECK(mOutBufGroup != NULL); + + return OK; +} + +status_t IntelVideoEditorH263Encoder::start(MetaData *params) { + LOGV("start"); + status_t ret = OK; + + if (mStarted) { + LOGW("Call start() when encoder already started"); + return OK; + } + + mSource->start(params); + + mVAEncoder = createVideoEncoder("video/h263"); + + if (mVAEncoder == NULL) { + LOGE("Fail to create video encoder"); + return NO_MEMORY; + } + mInitCheck = initCheck(mMeta); + + if (mInitCheck != OK) { + return mInitCheck; + } + + uint32_t maxSize; + mVAEncoder->getMaxOutSize(&maxSize); + + LOGV("allocating output buffers of size %d",maxSize); + for (int i = 0; i < OUTPUT_BUFFERS; i++ ) { + mOutBufGroup->add_buffer(new MediaBuffer(maxSize)); + } + + if (OK != getSharedBuffers()) { + LOGE("Failed to get the shared buffers from encoder "); + return UNKNOWN_ERROR; + } + + Encode_Status err; + err = mVAEncoder->start(); + if (err!= ENCODE_SUCCESS) { + LOGE("Failed to initialize the encoder: %d", err); + return UNKNOWN_ERROR; + } + + if (OK != setSharedBuffers()) { + LOGE("Failed to setup the shared buffers"); + return UNKNOWN_ERROR; + } + + mStarted = true; + LOGV("start- DONE"); + return OK; +} + +int IntelVideoEditorH263Encoder::SBShutdownFunc(void* arg) +{ + LOGV("IntelVideoEditorAVCEncoder::SBShutdownFunc begin()"); + sp r = BufferShareRegistry::getInstance(); + int error = r->sourceExitSharingMode(); + LOGV("sourceExitSharingMode returns %d",error); + return 0; +} + +status_t IntelVideoEditorH263Encoder::stop() { + LOGV("stop"); + if (!mStarted) { + LOGW("Call stop() when encoder has not started"); + return OK; + } + + if (mOutBufGroup) { + delete mOutBufGroup; + mOutBufGroup = NULL; + } + if (mLastInputBuffer!=NULL) { + mLastInputBuffer->release(); + } + mLastInputBuffer = NULL; + + /* call mSource->stop in a new thread, so the source + can do its end of shared buffer shutdown */ + + androidCreateThread(SBShutdownFunc,this); + LOGV("Successfull create thread!"); + + /* do encoder's buffer sharing shutdown */ + sp r = BufferShareRegistry::getInstance(); + int err = r->encoderExitSharingMode(); + LOGV("encoderExitSharingMode returned %d\n", err); + + mSource->stop(); + + err = r->encoderRequestToDisableSharingMode(); + LOGV("encoderRequestToDisableSharingMode returned %d\n", err); + + /* libsharedbuffer wants the source to call this after the encoder calls + * encoderRequestToDisableSharingMode. Instead of doing complicated + * synchronization, let's just call this ourselves on the source's + * behalf. */ + err = r->sourceRequestToDisableSharingMode(); + LOGV("sourceRequestToDisableSharingMode returned %d\n", err); + + releaseVideoEncoder(mVAEncoder); + mVAEncoder = NULL; + + mStarted = false; + LOGV("stop - DONE"); + + return OK; +} + +sp IntelVideoEditorH263Encoder::getFormat() { + LOGV("getFormat"); + + sp format = new MetaData; + format->setInt32(kKeyWidth, mVideoWidth); + format->setInt32(kKeyHeight, mVideoHeight); + format->setInt32(kKeyBitRate, mVideoBitRate); + format->setInt32(kKeySampleRate, mVideoFrameRate); + format->setInt32(kKeyColorFormat, mVideoColorFormat); + format->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); + format->setCString(kKeyDecoderComponent, "IntelVideoEditorH263Encoder"); + return format; +} + +status_t IntelVideoEditorH263Encoder::read(MediaBuffer **out, const ReadOptions *options) { + + status_t err; + Encode_Status encRet; + MediaBuffer *tmpIn; + int64_t timestamp = 0; + CHECK(!options); + mReadOptions = options; + *out = NULL; + + LOGV("IntelVideoEditorAVCEncoder::read start"); + + do { + err = mSource->read(&tmpIn, NULL); + if (err == INFO_FORMAT_CHANGED) { + stop(); + start(NULL); + } + } while (err == INFO_FORMAT_CHANGED); + + if (err == ERROR_END_OF_STREAM) { + return err; + } + else if (err != OK) { + LOGE("Failed to read input video frame: %d", err); + return err; + } + + VideoEncRawBuffer vaInBuf; + + vaInBuf.data = (uint8_t *)tmpIn->data(); + vaInBuf.size = tmpIn->size(); + + tmpIn->meta_data()->findInt64(kKeyTime, (int64_t *)&(vaInBuf.timeStamp)); + LOGV("Encoding: buffer %p, size = %d, ts= %llu",vaInBuf.data, vaInBuf.size, vaInBuf.timeStamp); + + encRet = mVAEncoder->encode(&vaInBuf); + if (encRet != ENCODE_SUCCESS) { + LOGE("Failed to encode input video frame: %d", encRet); + tmpIn->release(); + return UNKNOWN_ERROR; + } + + if (mLastInputBuffer != NULL) { + mLastInputBuffer->release(); + mLastInputBuffer = NULL; + } + mLastInputBuffer = tmpIn; + + LOGV("Encoding Done, getting output buffer "); + MediaBuffer *outputBuffer; + + CHECK(mOutBufGroup->acquire_buffer(&outputBuffer) == OK); + LOGV("Waiting for outputbuffer"); + VideoEncOutputBuffer vaOutBuf; + vaOutBuf.bufferSize = outputBuffer->size(); + vaOutBuf.dataSize = 0; + vaOutBuf.data = (uint8_t *) outputBuffer->data(); + vaOutBuf.format = OUTPUT_EVERYTHING; + + vaOutBuf.format = OUTPUT_EVERYTHING; + encRet = mVAEncoder->getOutput(&vaOutBuf); + if (encRet != ENCODE_SUCCESS) { + LOGE("Failed to retrieve encoded video frame: %d", encRet); + outputBuffer->release(); + return UNKNOWN_ERROR; + } + if (vaOutBuf.flag & ENCODE_BUFFERFLAG_SYNCFRAME) { + outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame,true); + } + + timestamp = vaInBuf.timeStamp; + + LOGV("Got it! data= %p, ts=%llu size =%d", vaOutBuf.data, timestamp, vaOutBuf.dataSize); + + outputBuffer->set_range(0, vaOutBuf.dataSize); + outputBuffer->meta_data()->setInt64(kKeyTime,timestamp); + *out = outputBuffer; + LOGV("IntelVideoEditorAVCEncoder::read end"); + return OK; +} + +status_t IntelVideoEditorH263Encoder::getSharedBuffers() { + + LOGV("getSharedBuffers begin"); + Encode_Status encRet; + status_t ret = OK; + + sp r = BufferShareRegistry::getInstance(); + + if (r->encoderRequestToEnableSharingMode() == BS_SUCCESS) { + LOGI("Shared buffer mode available\n"); + } + else { + LOGE("Request to enable sharing failed \n"); + return UNKNOWN_ERROR; + } + + for(int i = 0; i < INPUT_SHARED_BUFFERS; i++) { + VideoParamsUsrptrBuffer paramsUsrptrBuffer; + paramsUsrptrBuffer.type = VideoParamsTypeUsrptrBuffer; + paramsUsrptrBuffer.size = sizeof(VideoParamsUsrptrBuffer); + paramsUsrptrBuffer.expectedSize = mFrameSize; + paramsUsrptrBuffer.format = STRING_TO_FOURCC("NV12"); + paramsUsrptrBuffer.width = mVideoWidth; + paramsUsrptrBuffer.height = mVideoHeight; + LOGV("Share buffer request="); + encRet = mVAEncoder->getParameters(¶msUsrptrBuffer); + if (encRet != ENCODE_SUCCESS ) { + LOGE("could not allocate input surface from the encoder %d", encRet); + ret = NO_MEMORY; + break; + } + mSharedBufs[i].allocatedSize = paramsUsrptrBuffer.actualSize; + mSharedBufs[i].height = mVideoHeight; + mSharedBufs[i].width = mVideoWidth; + mSharedBufs[i].pointer = paramsUsrptrBuffer.usrPtr; + mSharedBufs[i].stride = paramsUsrptrBuffer.stride; + } + LOGV("getSharedBuffers end"); + return ret; +} + +status_t IntelVideoEditorH263Encoder::setSharedBuffers() { + + LOGV("setSharedBuffers"); + sp r = BufferShareRegistry::getInstance(); + + if (r->encoderSetSharedBuffer(mSharedBufs,INPUT_SHARED_BUFFERS) != BS_SUCCESS) { + LOGE("encoderSetSharedBuffer failed \n"); + return UNKNOWN_ERROR; + } + + if (r->encoderEnterSharingMode() != BS_SUCCESS) { + LOGE("sourceEnterSharingMode failed\n"); + return UNKNOWN_ERROR; + } + return OK; +} + +} diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h new file mode 100644 index 0000000..0c4bee6 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h @@ -0,0 +1,84 @@ +/* + * INTEL CONFIDENTIAL + * Copyright 2010-2011 Intel Corporation All Rights Reserved. + + * The source code, information and material ("Material") contained herein is owned + * by Intel Corporation or its suppliers or licensors, and title to such Material + * remains with Intel Corporation or its suppliers or licensors. The Material contains + * proprietary information of Intel or its suppliers and licensors. The Material is + * protected by worldwide copyright laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, + * distributed or disclosed in any way without Intel's prior express written permission. + * No license under any patent, copyright or other intellectual property rights in the + * Material is granted to or conferred upon you, either expressly, by implication, inducement, + * estoppel or otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + + * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any + * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. + */ + +#ifndef INTELVIDEOEDITORH263ENCODER_H +#define INTELVIDEOEDITORH263ENCODER_H + +#include +#include +#include +#include +#include "va/va.h" +#include "VideoEncoderHost.h" +#include + +namespace android { +struct IntelVideoEditorH263Encoder : public MediaSource { + IntelVideoEditorH263Encoder(const sp &source, + const sp& meta); + + virtual status_t start(MetaData *params); + virtual status_t stop(); + + virtual sp getFormat(); + + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options); + + +protected: + virtual ~IntelVideoEditorH263Encoder(); + +private: + sp mSource; + sp mMeta; + + int32_t mVideoWidth; + int32_t mVideoHeight; + int32_t mFrameSize; + int32_t mVideoFrameRate; + int32_t mVideoBitRate; + int32_t mVideoColorFormat; + int32_t mUseSyncMode; + status_t mInitCheck; + bool mStarted; + bool mFirstFrame; + int32_t mFrameCount; + static const int OUTPUT_BUFFERS = 6; + static const int INPUT_SHARED_BUFFERS = 8; + IVideoEncoder *mVAEncoder; + VideoParamsCommon mEncParamsCommon; + SharedBufferType mSharedBufs[INPUT_SHARED_BUFFERS]; + const ReadOptions *mReadOptions; + MediaBufferGroup *mOutBufGroup; /* group of output buffers*/ + MediaBuffer *mLastInputBuffer; + +private: + status_t initCheck(const sp& meta); + int32_t calcBitrate(int width, int height); + status_t getSharedBuffers(); + status_t setSharedBuffers(); + static int SBShutdownFunc(void* arg); + + IntelVideoEditorH263Encoder(const IntelVideoEditorH263Encoder &); + IntelVideoEditorH263Encoder &operator=(const IntelVideoEditorH263Encoder &); +}; +}; +#endif + diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp new file mode 100644 index 0000000..299d82a --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp @@ -0,0 +1,511 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +************************************************************************* +* @file VideoEditorUtils.cpp +* @brief StageFright shell Utilities +************************************************************************* +*/ +#define LOG_NDEBUG 1 +#define LOG_TAG "SF_utils" +#include "utils/Log.h" + +#include "IntelVideoEditorUtils.h" + +#include +#include +#include +#include +#include +#include + +/* Android includes*/ +#include +#include + +/*---------------------*/ +/* DEBUG LEVEL SETUP */ +/*---------------------*/ +#define LOG1 ALOGE /*ERRORS Logging*/ +#define LOG2 ALOGI /*WARNING Logging*/ +#define LOG3 //ALOGV /*COMMENTS Logging*/ + +namespace android { + +void displayMetaData(const sp meta) { + + const char* charData; + int32_t int32Data; + int64_t int64Data; + uint32_t type; + const void* data; + void* ptr; + size_t size; + + if (meta->findCString(kKeyMIMEType, &charData)) { + LOG1("displayMetaData kKeyMIMEType %s", charData); + } + if (meta->findInt32(kKeyWidth, &int32Data)) { + LOG1("displayMetaData kKeyWidth %d", int32Data); + } + if (meta->findInt32(kKeyHeight, &int32Data)) { + LOG1("displayMetaData kKeyHeight %d", int32Data); + } + if (meta->findInt32(kKeyIFramesInterval, &int32Data)) { + LOG1("displayMetaData kKeyIFramesInterval %d", int32Data); + } + if (meta->findInt32(kKeyStride, &int32Data)) { + LOG1("displayMetaData kKeyStride %d", int32Data); + } + if (meta->findInt32(kKeySliceHeight, &int32Data)) { + LOG1("displayMetaData kKeySliceHeight %d", int32Data); + } + if (meta->findInt32(kKeyChannelCount, &int32Data)) { + LOG1("displayMetaData kKeyChannelCount %d", int32Data); + } + if (meta->findInt32(kKeySampleRate, &int32Data)) { + LOG1("displayMetaData kKeySampleRate %d", int32Data); + } + if (meta->findInt32(kKeyBitRate, &int32Data)) { + LOG1("displayMetaData kKeyBitRate %d", int32Data); + } + if (meta->findData(kKeyESDS, &type, &data, &size)) { + LOG1("displayMetaData kKeyESDS type=%d size=%d", type, size); + } + if (meta->findData(kKeyAVCC, &type, &data, &size)) { + LOG1("displayMetaData kKeyAVCC data=0x%X type=%d size=%d", + *((unsigned int*)data), type, size); + } + if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) { + LOG1("displayMetaData kKeyVorbisInfo type=%d size=%d", type, size); + } + if (meta->findData(kKeyVorbisBooks, &type, &data, &size)) { + LOG1("displayMetaData kKeyVorbisBooks type=%d size=%d", type, size); + } + if (meta->findInt32(kKeyWantsNALFragments, &int32Data)) { + LOG1("displayMetaData kKeyWantsNALFragments %d", int32Data); + } + if (meta->findInt32(kKeyIsSyncFrame, &int32Data)) { + LOG1("displayMetaData kKeyIsSyncFrame %d", int32Data); + } + if (meta->findInt32(kKeyIsCodecConfig, &int32Data)) { + LOG1("displayMetaData kKeyIsCodecConfig %d", int32Data); + } + if (meta->findInt64(kKeyTime, &int64Data)) { + LOG1("displayMetaData kKeyTime %lld", int64Data); + } + if (meta->findInt32(kKeyDuration, &int32Data)) { + LOG1("displayMetaData kKeyDuration %d", int32Data); + } + if (meta->findInt32(kKeyColorFormat, &int32Data)) { + LOG1("displayMetaData kKeyColorFormat %d", int32Data); + } + if (meta->findPointer(kKeyPlatformPrivate, &ptr)) { + LOG1("displayMetaData kKeyPlatformPrivate pointer=0x%x", (int32_t) ptr); + } + if (meta->findCString(kKeyDecoderComponent, &charData)) { + LOG1("displayMetaData kKeyDecoderComponent %s", charData); + } + if (meta->findInt32(kKeyBufferID, &int32Data)) { + LOG1("displayMetaData kKeyBufferID %d", int32Data); + } + if (meta->findInt32(kKeyMaxInputSize, &int32Data)) { + LOG1("displayMetaData kKeyMaxInputSize %d", int32Data); + } + if (meta->findInt64(kKeyThumbnailTime, &int64Data)) { + LOG1("displayMetaData kKeyThumbnailTime %lld", int64Data); + } + if (meta->findCString(kKeyAlbum, &charData)) { + LOG1("displayMetaData kKeyAlbum %s", charData); + } + if (meta->findCString(kKeyArtist, &charData)) { + LOG1("displayMetaData kKeyArtist %s", charData); + } + if (meta->findCString(kKeyAlbumArtist, &charData)) { + LOG1("displayMetaData kKeyAlbumArtist %s", charData); + } + if (meta->findCString(kKeyComposer, &charData)) { + LOG1("displayMetaData kKeyComposer %s", charData); + } + if (meta->findCString(kKeyGenre, &charData)) { + LOG1("displayMetaData kKeyGenre %s", charData); + } + if (meta->findCString(kKeyTitle, &charData)) { + LOG1("displayMetaData kKeyTitle %s", charData); + } + if (meta->findCString(kKeyYear, &charData)) { + LOG1("displayMetaData kKeyYear %s", charData); + } + if (meta->findData(kKeyAlbumArt, &type, &data, &size)) { + LOG1("displayMetaData kKeyAlbumArt type=%d size=%d", type, size); + } + if (meta->findCString(kKeyAlbumArtMIME, &charData)) { + LOG1("displayMetaData kKeyAlbumArtMIME %s", charData); + } + if (meta->findCString(kKeyAuthor, &charData)) { + LOG1("displayMetaData kKeyAuthor %s", charData); + } + if (meta->findCString(kKeyCDTrackNumber, &charData)) { + LOG1("displayMetaData kKeyCDTrackNumber %s", charData); + } + if (meta->findCString(kKeyDiscNumber, &charData)) { + LOG1("displayMetaData kKeyDiscNumber %s", charData); + } + if (meta->findCString(kKeyDate, &charData)) { + LOG1("displayMetaData kKeyDate %s", charData); + } + if (meta->findCString(kKeyWriter, &charData)) { + LOG1("displayMetaData kKeyWriter %s", charData); + } + if (meta->findInt32(kKeyTimeScale, &int32Data)) { + LOG1("displayMetaData kKeyTimeScale %d", int32Data); + } + if (meta->findInt32(kKeyVideoProfile, &int32Data)) { + LOG1("displayMetaData kKeyVideoProfile %d", int32Data); + } + if (meta->findInt32(kKeyVideoLevel, &int32Data)) { + LOG1("displayMetaData kKeyVideoLevel %d", int32Data); + } + if (meta->findInt32(kKey64BitFileOffset, &int32Data)) { + LOG1("displayMetaData kKey64BitFileOffset %d", int32Data); + } + if (meta->findInt32(kKeyFileType, &int32Data)) { + LOG1("displayMetaData kKeyFileType %d", int32Data); + } + if (meta->findInt64(kKeyTrackTimeStatus, &int64Data)) { + LOG1("displayMetaData kKeyTrackTimeStatus %lld", int64Data); + } + if (meta->findInt32(kKeyNotRealTime, &int32Data)) { + LOG1("displayMetaData kKeyNotRealTime %d", int32Data); + } +} + +/** + * This code was extracted from StageFright MPEG4 writer + * Is is used to parse and format the AVC codec specific info received + * from StageFright encoders + */ +static const uint8_t kNalUnitTypeSeqParamSet = 0x07; +static const uint8_t kNalUnitTypePicParamSet = 0x08; +struct AVCParamSet { + AVCParamSet(uint16_t length, const uint8_t *data) + : mLength(length), mData(data) {} + + uint16_t mLength; + const uint8_t *mData; +}; +struct AVCCodecSpecificContext { + List mSeqParamSets; + List mPicParamSets; + uint8_t mProfileIdc; + uint8_t mProfileCompatible; + uint8_t mLevelIdc; +}; + +const uint8_t *parseParamSet(AVCCodecSpecificContext* pC, + const uint8_t *data, size_t length, int type, size_t *paramSetLen) { + CHECK(type == kNalUnitTypeSeqParamSet || + type == kNalUnitTypePicParamSet); + + size_t bytesLeft = length; + while (bytesLeft > 4 && + memcmp("\x00\x00\x00\x01", &data[length - bytesLeft], 4)) { + --bytesLeft; + } + if (bytesLeft <= 4) { + bytesLeft = 0; // Last parameter set + } + const uint8_t *nextStartCode = &data[length - bytesLeft]; + *paramSetLen = nextStartCode - data; + if (*paramSetLen == 0) { + ALOGE("Param set is malformed, since its length is 0"); + return NULL; + } + + AVCParamSet paramSet(*paramSetLen, data); + if (type == kNalUnitTypeSeqParamSet) { + if (*paramSetLen < 4) { + ALOGE("Seq parameter set malformed"); + return NULL; + } + if (pC->mSeqParamSets.empty()) { + pC->mProfileIdc = data[1]; + pC->mProfileCompatible = data[2]; + pC->mLevelIdc = data[3]; + } else { + if (pC->mProfileIdc != data[1] || + pC->mProfileCompatible != data[2] || + pC->mLevelIdc != data[3]) { + ALOGV("Inconsistent profile/level found in seq parameter sets"); + return NULL; + } + } + pC->mSeqParamSets.push_back(paramSet); + } else { + pC->mPicParamSets.push_back(paramSet); + } + return nextStartCode; +} + +status_t buildAVCCodecSpecificData(uint8_t **pOutputData, size_t *pOutputSize, + const uint8_t *data, size_t size, MetaData *param) +{ + //ALOGV("buildAVCCodecSpecificData"); + + if ( (pOutputData == NULL) || (pOutputSize == NULL) ) { + ALOGE("output is invalid"); + return ERROR_MALFORMED; + } + + if (*pOutputData != NULL) { + ALOGE("Already have codec specific data"); + return ERROR_MALFORMED; + } + + if (size < 4) { + ALOGE("Codec specific data length too short: %d", size); + return ERROR_MALFORMED; + } + + // Data is in the form of AVCCodecSpecificData + if (memcmp("\x00\x00\x00\x01", data, 4)) { + // 2 bytes for each of the parameter set length field + // plus the 7 bytes for the header + if (size < 4 + 7) { + ALOGE("Codec specific data length too short: %d", size); + return ERROR_MALFORMED; + } + + *pOutputSize = size; + *pOutputData = (uint8_t*)malloc(size); + memcpy(*pOutputData, data, size); + return OK; + } + + AVCCodecSpecificContext ctx; + uint8_t *outputData = NULL; + size_t outputSize = 0; + + // Check if the data is valid + uint8_t type = kNalUnitTypeSeqParamSet; + bool gotSps = false; + bool gotPps = false; + const uint8_t *tmp = data; + const uint8_t *nextStartCode = data; + size_t bytesLeft = size; + size_t paramSetLen = 0; + outputSize = 0; + while (bytesLeft > 4 && !memcmp("\x00\x00\x00\x01", tmp, 4)) { + type = (*(tmp + 4)) & 0x1F; + if (type == kNalUnitTypeSeqParamSet) { + if (gotPps) { + ALOGE("SPS must come before PPS"); + return ERROR_MALFORMED; + } + if (!gotSps) { + gotSps = true; + } + nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, + ¶mSetLen); + } else if (type == kNalUnitTypePicParamSet) { + if (!gotSps) { + ALOGE("SPS must come before PPS"); + return ERROR_MALFORMED; + } + if (!gotPps) { + gotPps = true; + } + nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, + ¶mSetLen); + } else { + ALOGE("Only SPS and PPS Nal units are expected"); + return ERROR_MALFORMED; + } + + if (nextStartCode == NULL) { + return ERROR_MALFORMED; + } + + // Move on to find the next parameter set + bytesLeft -= nextStartCode - tmp; + tmp = nextStartCode; + outputSize += (2 + paramSetLen); + + if (gotSps && gotPps) { + break; + } + } + + { + // Check on the number of seq parameter sets + size_t nSeqParamSets = ctx.mSeqParamSets.size(); + if (nSeqParamSets == 0) { + ALOGE("Cound not find sequence parameter set"); + return ERROR_MALFORMED; + } + + if (nSeqParamSets > 0x1F) { + ALOGE("Too many seq parameter sets (%d) found", nSeqParamSets); + return ERROR_MALFORMED; + } + } + + { + // Check on the number of pic parameter sets + size_t nPicParamSets = ctx.mPicParamSets.size(); + if (nPicParamSets == 0) { + ALOGE("Cound not find picture parameter set"); + return ERROR_MALFORMED; + } + if (nPicParamSets > 0xFF) { + ALOGE("Too many pic parameter sets (%d) found", nPicParamSets); + return ERROR_MALFORMED; + } + } + + // ISO 14496-15: AVC file format + outputSize += 7; // 7 more bytes in the header + outputData = (uint8_t *)malloc(outputSize); + uint8_t *header = outputData; + header[0] = 1; // version + header[1] = ctx.mProfileIdc; // profile indication + header[2] = ctx.mProfileCompatible; // profile compatibility + header[3] = ctx.mLevelIdc; + + // 6-bit '111111' followed by 2-bit to lengthSizeMinuusOne + int32_t use2ByteNalLength = 0; + if (param && + param->findInt32(kKey2ByteNalLength, &use2ByteNalLength) && + use2ByteNalLength) { + header[4] = 0xfc | 1; // length size == 2 bytes + } else { + header[4] = 0xfc | 3; // length size == 4 bytes + } + + // 3-bit '111' followed by 5-bit numSequenceParameterSets + int nSequenceParamSets = ctx.mSeqParamSets.size(); + header[5] = 0xe0 | nSequenceParamSets; + header += 6; + for (List::iterator it = ctx.mSeqParamSets.begin(); + it != ctx.mSeqParamSets.end(); ++it) { + // 16-bit sequence parameter set length + uint16_t seqParamSetLength = it->mLength; + header[0] = seqParamSetLength >> 8; + header[1] = seqParamSetLength & 0xff; + //ALOGE("### SPS %d %d %d", seqParamSetLength, header[0], header[1]); + + // SPS NAL unit (sequence parameter length bytes) + memcpy(&header[2], it->mData, seqParamSetLength); + header += (2 + seqParamSetLength); + } + + // 8-bit nPictureParameterSets + int nPictureParamSets = ctx.mPicParamSets.size(); + header[0] = nPictureParamSets; + header += 1; + for (List::iterator it = ctx.mPicParamSets.begin(); + it != ctx.mPicParamSets.end(); ++it) { + // 16-bit picture parameter set length + uint16_t picParamSetLength = it->mLength; + header[0] = picParamSetLength >> 8; + header[1] = picParamSetLength & 0xff; +//ALOGE("### PPS %d %d %d", picParamSetLength, header[0], header[1]); + + // PPS Nal unit (picture parameter set length bytes) + memcpy(&header[2], it->mData, picParamSetLength); + header += (2 + picParamSetLength); + } + + *pOutputSize = outputSize; + *pOutputData = outputData; + return OK; +} + +status_t removeAVCCodecSpecificData(uint8_t **pOutputData, size_t *pOutputSize, + const uint8_t *data, size_t size, MetaData *param) +{ + LOGV("removeAVCCodecSpecificData begin"); + LOGV("Inputdataaddr = %p,Inputsize = %d", data,size); + if ( (pOutputData == NULL) || (pOutputSize == NULL) ) { + LOGE("output is invalid"); + return ERROR_MALFORMED; + } + + if (size < 4) { + LOGE("Codec specific data length too short: %d", size); + return ERROR_MALFORMED; + } + + AVCCodecSpecificContext ctx; + uint8_t *outputData = NULL; + size_t outputSize = 0; + + // Check if the data is valid + uint8_t type = kNalUnitTypeSeqParamSet; + bool gotSps = false; + bool gotPps = false; + const uint8_t *tmp = data; + const uint8_t *nextStartCode = data; + size_t bytesLeft = size; + size_t paramSetLen = 0; + outputSize = 0; + + while (bytesLeft > 4 && !memcmp("\x00\x00\x00\x01", tmp, 4)) { + type = (*(tmp + 4)) & 0x1F; + if (type == kNalUnitTypeSeqParamSet) { + if (gotPps) { + LOGE("SPS must come before PPS"); + return ERROR_MALFORMED; + } + if (!gotSps) { + gotSps = true; + } + nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, + ¶mSetLen); + } else if (type == kNalUnitTypePicParamSet) { + if (!gotSps) { + LOGE("SPS must come before PPS"); + return ERROR_MALFORMED; + } + if (!gotPps) { + gotPps = true; + } + nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, + ¶mSetLen); + } else { + LOGE("Only SPS and PPS Nal units are expected"); + return ERROR_MALFORMED; + } + if (nextStartCode == NULL) { + return ERROR_MALFORMED; + } + bytesLeft -= nextStartCode - tmp; + tmp = nextStartCode; + outputSize += (4 + paramSetLen); + LOGV("DSI size is %d!",outputSize); + if(gotSps && gotPps) + { + break; + } + } + *pOutputData = (uint8_t *)(data + outputSize); + *pOutputSize = outputSize; + LOGV("Outputdataaddr = %p,Outputsize = %d", *pOutputData, *pOutputSize); + LOGV("removeAVCCodecSpecificData end"); + return OK; +} +}// namespace android diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h new file mode 100644 index 0000000..81acf17 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h @@ -0,0 +1,105 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorUtils.cpp +* @brief StageFright shell Utilities +************************************************************************* +*/ +#ifndef ANDROID_UTILS_H_ +#define ANDROID_UTILS_H_ + +/******************* + * HEADERS * + *******************/ + +#include "M4OSA_Debug.h" + +#include "utils/Log.h" +#include +#include +#include +#include + +/** + ************************************************************************* + * VIDEOEDITOR_CHECK(test, errCode) + * @note This macro displays an error message and goes to function cleanUp label + * if the test fails. + ************************************************************************* + */ +#define VIDEOEDITOR_CHECK(test, errCode) \ +{ \ + if( !(test) ) { \ + ALOGV("!!! %s (L%d) check failed : " #test ", yields error 0x%.8x", \ + __FILE__, __LINE__, errCode); \ + err = (errCode); \ + goto cleanUp; \ + } \ +} + +/** + ************************************************************************* + * SAFE_FREE(p) + * @note This macro calls free and makes sure the pointer is set to NULL. + ************************************************************************* + */ +#define SAFE_FREE(p) \ +{ \ + if(M4OSA_NULL != (p)) { \ + free((p)) ; \ + (p) = M4OSA_NULL ; \ + } \ +} + +/** + ************************************************************************* + * SAFE_MALLOC(p, type, count, comment) + * @note This macro allocates a buffer, checks for success and fills the buffer + * with 0. + ************************************************************************* + */ +#define SAFE_MALLOC(p, type, count, comment) \ +{ \ + (p) = (type*)M4OSA_32bitAlignedMalloc(sizeof(type)*(count), 0xFF,(M4OSA_Char*)comment);\ + VIDEOEDITOR_CHECK(M4OSA_NULL != (p), M4ERR_ALLOC); \ + memset((void *)(p), 0,sizeof(type)*(count)); \ +} + + + /******************** + * UTILITIES * + ********************/ + + +namespace android { + +/*--------------------------*/ +/* DISPLAY METADATA CONTENT */ +/*--------------------------*/ +void displayMetaData(const sp meta); + +// Build the AVC codec spcific info from the StageFright encoders output +status_t buildAVCCodecSpecificData(uint8_t **outputData, size_t *outputSize, + const uint8_t *data, size_t size, MetaData *param); + +// Remove the AVC codec specific info from the StageFright encoders output +status_t removeAVCCodecSpecificData(uint8_t **outputData, size_t *outputSize, + const uint8_t *data, size_t size, MetaData *param); +}//namespace android + + +#endif //ANDROID_UTILS_H_ diff --git a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp new file mode 100644 index 0000000..acc8268 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaBufferPuller" +#include + +#include +#include +#include +#include "MediaBufferPuller.h" + +namespace android { + + +MediaBufferPuller::MediaBufferPuller(const sp& source) + : mSource(source), + mAskToStart(false), + mAskToStop(false), + mAcquireStopped(false), + mReleaseStopped(false), + mSourceError(OK) { + + androidCreateThread(acquireThreadStart, this); + androidCreateThread(releaseThreadStart, this); +} + +MediaBufferPuller::~MediaBufferPuller() { + stop(); +} + +bool MediaBufferPuller::hasMediaSourceReturnedError() const { + Mutex::Autolock autolock(mLock); + return ((mSourceError != OK) ? true : false); +} +void MediaBufferPuller::start() { + Mutex::Autolock autolock(mLock); + mAskToStart = true; + mAcquireCond.signal(); + mReleaseCond.signal(); +} + +void MediaBufferPuller::stop() { + Mutex::Autolock autolock(mLock); + mAskToStop = true; + mAcquireCond.signal(); + mReleaseCond.signal(); + while (!mAcquireStopped || !mReleaseStopped) { + mUserCond.wait(mLock); + } + + // Release remaining buffers + for (size_t i = 0; i < mBuffers.size(); i++) { + mBuffers.itemAt(i)->release(); + } + + for (size_t i = 0; i < mReleaseBuffers.size(); i++) { + mReleaseBuffers.itemAt(i)->release(); + } + + mBuffers.clear(); + mReleaseBuffers.clear(); +} + +MediaBuffer* MediaBufferPuller::getBufferNonBlocking() { + Mutex::Autolock autolock(mLock); + if (mBuffers.empty()) { + return NULL; + } else { + MediaBuffer* b = mBuffers.itemAt(0); + mBuffers.removeAt(0); + return b; + } +} + +MediaBuffer* MediaBufferPuller::getBufferBlocking() { + Mutex::Autolock autolock(mLock); + while (mBuffers.empty() && !mAcquireStopped) { + mUserCond.wait(mLock); + } + + if (mBuffers.empty()) { + return NULL; + } else { + MediaBuffer* b = mBuffers.itemAt(0); + mBuffers.removeAt(0); + return b; + } +} + +void MediaBufferPuller::putBuffer(MediaBuffer* buffer) { + Mutex::Autolock autolock(mLock); + mReleaseBuffers.push(buffer); + mReleaseCond.signal(); +} + +int MediaBufferPuller::acquireThreadStart(void* arg) { + MediaBufferPuller* self = (MediaBufferPuller*)arg; + self->acquireThreadFunc(); + return 0; +} + +int MediaBufferPuller::releaseThreadStart(void* arg) { + MediaBufferPuller* self = (MediaBufferPuller*)arg; + self->releaseThreadFunc(); + return 0; +} + +void MediaBufferPuller::acquireThreadFunc() { + mLock.lock(); + + // Wait for the start signal + while (!mAskToStart && !mAskToStop) { + mAcquireCond.wait(mLock); + } + + // Loop until we are asked to stop, or there is nothing more to read + while (!mAskToStop) { + MediaBuffer* pBuffer; + mLock.unlock(); + status_t result = mSource->read(&pBuffer, NULL); + mLock.lock(); + mSourceError = result; + if (result != OK) { + break; + } + mBuffers.push(pBuffer); + mUserCond.signal(); + } + + mAcquireStopped = true; + mUserCond.signal(); + mLock.unlock(); +} + +void MediaBufferPuller::releaseThreadFunc() { + mLock.lock(); + + // Wait for the start signal + while (!mAskToStart && !mAskToStop) { + mReleaseCond.wait(mLock); + } + + // Loop until we are asked to stop + while (1) { + if (mReleaseBuffers.empty()) { + if (mAskToStop) { + break; + } else { + mReleaseCond.wait(mLock); + continue; + } + } + MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0); + mReleaseBuffers.removeAt(0); + mLock.unlock(); + pBuffer->release(); + mLock.lock(); + } + + mReleaseStopped = true; + mUserCond.signal(); + mLock.unlock(); +} + +}; // namespace android diff --git a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h new file mode 100644 index 0000000..ed72a53 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MEDIA_BUFFER_PULLER_H +#define _MEDIA_BUFFER_PULLER_H + +#include +#include + + +namespace android { + +struct MediaSource; +struct MediaBuffer; + +/* + * An object of this class can pull a list of media buffers + * from a MediaSource repeatedly. The user can then get the + * buffers from that list. + */ +struct MediaBufferPuller { +public: + MediaBufferPuller(const sp& source); + ~MediaBufferPuller(); + + // Start to build up the list of the buffers. + void start(); + + // Release the list of the available buffers, and stop + // pulling buffers from the MediaSource. + void stop(); + + // Get a buffer from the list. If there is no buffer available + // at the time this method is called, NULL is returned. + MediaBuffer* getBufferBlocking(); + + // Get a buffer from the list. If there is no buffer available + // at the time this method is called, it blocks waiting for + // a buffer to become available or until stop() is called. + MediaBuffer* getBufferNonBlocking(); + + // Add a buffer to the end of the list available media buffers + void putBuffer(MediaBuffer* buffer); + + // Check whether the source returned an error or not. + bool hasMediaSourceReturnedError() const; + +private: + static int acquireThreadStart(void* arg); + void acquireThreadFunc(); + + static int releaseThreadStart(void* arg); + void releaseThreadFunc(); + + sp mSource; + Vector mBuffers; + Vector mReleaseBuffers; + + mutable Mutex mLock; + Condition mUserCond; // for the user of this class + Condition mAcquireCond; // for the acquire thread + Condition mReleaseCond; // for the release thread + + bool mAskToStart; // Asks the threads to start + bool mAskToStop; // Asks the threads to stop + bool mAcquireStopped; // The acquire thread has stopped + bool mReleaseStopped; // The release thread has stopped + status_t mSourceError; // Error returned by MediaSource read + + // Don't call me! + MediaBufferPuller(const MediaBufferPuller&); + MediaBufferPuller& operator=(const MediaBufferPuller&); +}; + +} // namespace android + +#endif // _MEDIA_BUFFER_PULLER_H diff --git a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp new file mode 100644 index 0000000..5026073 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp @@ -0,0 +1,2030 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditor3gpReader.cpp +* @brief StageFright shell 3GP Reader +************************************************************************* +*/ + +#define LOG_NDEBUG 1 +#define LOG_TAG "VIDEOEDITOR_3GPREADER" + +/** + * HEADERS + * + */ +#define VIDEOEDITOR_BITSTREAM_PARSER + +#include "M4OSA_Debug.h" +#include "VideoEditor3gpReader.h" +#include "M4SYS_AccessUnit.h" +#include "VideoEditorUtils.h" +#include "M4READER_3gpCom.h" +#include "M4_Common.h" +#include "M4OSA_FileWriter.h" + +#ifdef VIDEOEDITOR_BITSTREAM_PARSER +#include "M4OSA_CoreID.h" +#include "M4OSA_Error.h" +#include "M4OSA_Memory.h" +#include "M4_Utils.h" +#endif + +#include "ESDS.h" +#include "utils/Log.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include + +/** + * SOURCE CLASS + */ +namespace android { +/** + * ENGINE INTERFACE + */ + +/** + ************************************************************************ + * @brief Array of AMR NB/WB bitrates + * @note Array to match the mode and the bit rate + ************************************************************************ +*/ +const M4OSA_UInt32 VideoEditor3gpReader_AmrBitRate [2 /* 8kHz / 16kHz */] + [9 /* the bitrate mode */] = +{ + {4750, 5150, 5900, 6700, 7400, 7950, 10200, 12200, 0}, + {6600, 8850, 12650, 14250, 15850, 18250, 19850, 23050, 23850} +}; + +/** + ******************************************************************************* + * structure VideoEditor3gpReader_Context + * @brief:This structure defines the context of the StageFright 3GP shell Reader + ******************************************************************************* +*/ +typedef struct { + sp mDataSource; + sp mExtractor; + sp mAudioSource; + sp mVideoSource; + M4_StreamHandler* mAudioStreamHandler; + M4_StreamHandler* mVideoStreamHandler; + M4SYS_AccessUnit mAudioAu; + M4SYS_AccessUnit mVideoAu; + M4OSA_Time mMaxDuration; + int64_t mFileSize; + M4_StreamType mStreamType; + M4OSA_UInt32 mStreamId; + int32_t mTracks; + int32_t mCurrTrack; + M4OSA_Bool mAudioSeeking; + M4OSA_Time mAudioSeekTime; + M4OSA_Bool mVideoSeeking; + M4OSA_Time mVideoSeekTime; + +} VideoEditor3gpReader_Context; + +#ifdef VIDEOEDITOR_BITSTREAM_PARSER +/** + ************************************************************************ + * structure VideoEditor3gpReader_BitStreamParserContext + * @brief Internal BitStreamParser context + ************************************************************************ +*/ +typedef struct { + M4OSA_UInt32* mPbitStream; /**< bitstream pointer (32bits aligned) */ + M4OSA_Int32 mSize; /**< bitstream size in bytes */ + M4OSA_Int32 mIndex; /**< byte index */ + M4OSA_Int32 mBitIndex; /**< bit index */ + M4OSA_Int32 mStructSize; /**< size of structure */ +} VideoEditor3gpReader_BitStreamParserContext; + +/** + ******************************************************************************* + * @brief Allocates the context and initializes internal data. + * @param pContext (OUT) Pointer to the BitStreamParser context to create. + * @param bitStream A pointer to the bitstream + * @param size The size of the bitstream in bytes + ******************************************************************************* +*/ +static void VideoEditor3gpReader_BitStreamParserInit(void** pContext, + void* pBitStream, M4OSA_Int32 size) { + VideoEditor3gpReader_BitStreamParserContext* pStreamContext; + + *pContext=M4OSA_NULL; + pStreamContext = (VideoEditor3gpReader_BitStreamParserContext*)M4OSA_32bitAlignedMalloc( + sizeof(VideoEditor3gpReader_BitStreamParserContext), M4READER_3GP, + (M4OSA_Char*)"3GP BitStreamParser Context"); + if (M4OSA_NULL == pStreamContext) { + return; + } + pStreamContext->mPbitStream=(M4OSA_UInt32*)pBitStream; + pStreamContext->mSize=size; + pStreamContext->mIndex=0; + pStreamContext->mBitIndex=0; + pStreamContext->mStructSize = + sizeof(VideoEditor3gpReader_BitStreamParserContext); + + *pContext=pStreamContext; +} +/** + ********************************************************************** + * @brief Clean up context + * @param pContext (IN/OUT) BitStreamParser context. + ********************************************************************** +*/ +static void VideoEditor3gpReader_BitStreamParserCleanUp(void* pContext) { + free((M4OSA_Int32*)pContext); +} +/** + ***************************************************************************** + * @brief Read the next bits in the bitstream. + * @note The function does not update the bitstream pointer. + * @param pContext (IN/OUT) BitStreamParser context. + * @param length (IN) The number of bits to extract from the bitstream + * @return the read bits + ***************************************************************************** +*/ +static M4OSA_UInt32 VideoEditor3gpReader_BitStreamParserShowBits(void* pContext, + M4OSA_Int32 length) { + VideoEditor3gpReader_BitStreamParserContext* pStreamContext = + (VideoEditor3gpReader_BitStreamParserContext*)pContext; + + M4OSA_UInt32 u_mask; + M4OSA_UInt32 retval; + M4OSA_Int32 i_ovf; + + M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, + "VideoEditor3gpReader_BitStreamParserShowBits:invalid context pointer"); + + retval=(M4OSA_UInt32)GET_MEMORY32(pStreamContext->\ + mPbitStream[ pStreamContext->mIndex ]); + i_ovf = pStreamContext->mBitIndex + length - 32; + u_mask = (length >= 32) ? 0xffffffff: (1 << length) - 1; + + /* do we have enough bits availble in the current word(32bits)*/ + if (i_ovf <= 0) { + retval=(retval >> (- i_ovf)) & u_mask; + } else { + M4OSA_UInt32 u_nextword = (M4OSA_UInt32)GET_MEMORY32( + pStreamContext->mPbitStream[ pStreamContext->mIndex + 1 ]); + M4OSA_UInt32 u_msb_mask, u_msb_value, u_lsb_mask, u_lsb_value; + + u_msb_mask = ((1 << (32 - pStreamContext->mBitIndex)) - 1) << i_ovf; + u_msb_value = retval << i_ovf; + u_lsb_mask = (1 << i_ovf) - 1; + u_lsb_value = u_nextword >> (32 - i_ovf); + retval= (u_msb_value & u_msb_mask ) | (u_lsb_value & u_lsb_mask); + } + /* return the bits...*/ + return retval; +} +/** + ************************************************************************ + * @brief Increment the bitstream pointer of bits. + * @param pContext (IN/OUT) BitStreamParser context. + * @param length (IN) The number of bit to shift the bitstream + ************************************************************************ +*/ +static void VideoEditor3gpReader_BitStreamParserFlushBits(void* pContext, + M4OSA_Int32 length) { + VideoEditor3gpReader_BitStreamParserContext* pStreamContext=( + VideoEditor3gpReader_BitStreamParserContext*)pContext; + M4OSA_Int32 val; + + if (M4OSA_NULL == pStreamContext) { + return; + } + val=pStreamContext->mBitIndex + length; + /* update the bits...*/ + pStreamContext->mBitIndex += length; + + if (val - 32 >= 0) { + /* update the bits...*/ + pStreamContext->mBitIndex -= 32; + /* update the words*/ + pStreamContext->mIndex++; + } +} + +static M4OSA_UInt32 VideoEditor3gpReader_BitStreamParserGetBits( + void* pContext,M4OSA_Int32 bitPos, M4OSA_Int32 bitLength) { + VideoEditor3gpReader_BitStreamParserContext* pStreamContext = + (VideoEditor3gpReader_BitStreamParserContext*)pContext; + + M4OSA_Int32 bitLocation, bitIndex; + M4OSA_UInt32 retval=0; + + M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, + "VideoEditor3gpReader_BitStreamParserGetBits: invalid context pointer"); + + /* computes the word location*/ + bitLocation=bitPos/32; + bitIndex=(bitPos) % 32; + + if (bitLocation < pStreamContext->mSize) { + M4OSA_UInt32 u_mask; + M4OSA_Int32 i_ovf = bitIndex + bitLength - 32; + retval=(M4OSA_UInt32)GET_MEMORY32( + pStreamContext->mPbitStream[ bitLocation ]); + + u_mask = (bitLength >= 32) ? 0xffffffff: (1 << bitLength) - 1; + + if (i_ovf <= 0) { + retval=(retval >> (- i_ovf)) & u_mask; + } else { + M4OSA_UInt32 u_nextword = (M4OSA_UInt32)GET_MEMORY32( + pStreamContext->mPbitStream[ bitLocation + 1 ]); + M4OSA_UInt32 u_msb_mask, u_msb_value, u_lsb_mask, u_lsb_value; + + u_msb_mask = ((1 << (32 - bitIndex)) - 1) << i_ovf; + u_msb_value = retval << i_ovf; + u_lsb_mask = (1 << i_ovf) - 1; + u_lsb_value = u_nextword >> (32 - i_ovf); + retval= (u_msb_value & u_msb_mask ) | (u_lsb_value & u_lsb_mask); + } + } + return retval; +} + +static void VideoEditor3gpReader_BitStreamParserRestart(void* pContext) { + VideoEditor3gpReader_BitStreamParserContext* pStreamContext = + (VideoEditor3gpReader_BitStreamParserContext*)pContext; + + if (M4OSA_NULL == pStreamContext) { + return; + } + /* resets the bitstream pointers*/ + pStreamContext->mIndex=0; + pStreamContext->mBitIndex=0; +} +/** + ******************************************************************************* + * @brief Get a pointer to the current byte pointed by the bitstream pointer. + * @note It should be used carefully as the pointer is in the bitstream itself + * and no copy is made. + * @param pContext (IN/OUT) BitStreamParser context. + * @return Pointer to the current location in the bitstream + ******************************************************************************* +*/ +static M4OSA_UInt8* VideoEditor3gpReader_GetCurrentbitStreamPointer( + void* pContext) { + VideoEditor3gpReader_BitStreamParserContext* pStreamContext = + (VideoEditor3gpReader_BitStreamParserContext*)pContext; + M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, "invalid context pointer"); + + return (M4OSA_UInt8*)((M4OSA_UInt8*)pStreamContext->mPbitStream + \ + pStreamContext->mIndex * sizeof(M4OSA_UInt32) + \ + pStreamContext->mBitIndex/8) ; +} + +static M4OSA_Int32 VideoEditor3gpReader_BitStreamParserGetSize(void* pContext) { + VideoEditor3gpReader_BitStreamParserContext* pStreamContext = + (VideoEditor3gpReader_BitStreamParserContext*)pContext; + M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, "invalid context pointer"); + + return pStreamContext->mSize; +} + + +static void VideoEditor3gpReader_MPEG4BitStreamParserInit(void** pContext, + void* pBitStream, M4OSA_Int32 size) { + VideoEditor3gpReader_BitStreamParserInit(pContext, pBitStream, size); +} +static M4OSA_Int32 VideoEditor3gpReader_GetMpegLengthFromInteger(void* pContext, + M4OSA_UInt32 val) { + M4OSA_UInt32 length=0; + M4OSA_UInt32 numBytes=0; + M4OSA_UInt32 b=0; + + M4OSA_DEBUG_IF1((M4OSA_NULL==pContext), 0, "invalid context pointer"); + + /* the length is encoded as a sequence of bytes. The highest bit is used + to indicate that the length continues on the next byte. + + The length can be: 0x80 0x80 0x80 0x22 + of just 0x22 (highest bit not set) + + */ + + do { + b=(val & ((0xff)<< (8 * numBytes)))>> (8 * numBytes); + length=(length << 7) | (b & 0x7f); + numBytes++; + } while ((b & 0x80) && numBytes < 4); + + return length; +} + +/** + ******************************************************************************* + * @brief Decode an MPEG4 Systems descriptor size from an encoded SDL size data + * @note The value is read from the current bitstream location. + * @param pContext (IN/OUT) BitStreamParser context. + * @return Size in a human readable form + ******************************************************************************* +*/ +static M4OSA_Int32 VideoEditor3gpReader_GetMpegLengthFromStream(void* pContext){ + M4OSA_UInt32 length=0; + M4OSA_UInt32 numBytes=0; + M4OSA_UInt32 b=0; + + M4OSA_DEBUG_IF1((M4OSA_NULL==pContext), 0, "invalid context pointer"); + + /* the length is encoded as a sequence of bytes. The highest bit is used + to indicate that the length continues on the next byte. + + The length can be: 0x80 0x80 0x80 0x22 + of just 0x22 (highest bit not set) + */ + + do { + b=VideoEditor3gpReader_BitStreamParserShowBits(pContext, 8); + VideoEditor3gpReader_BitStreamParserFlushBits(pContext, 8); + length=(length << 7) | (b & 0x7f); + numBytes++; + } while ((b & 0x80) && numBytes < 4); + + return length; +} +#endif /* VIDEOEDITOR_BITSTREAM_PARSER */ +/** +************************************************************************ +* @brief create an instance of the 3gp reader + * @note allocates the context + * + * @param pContext: (OUT) pointer on a reader context + * + * @return M4NO_ERROR there is no error + * @return M4ERR_ALLOC a memory allocation has failed + * @return M4ERR_PARAMETER at least one parameter is not valid +************************************************************************ +*/ + +M4OSA_ERR VideoEditor3gpReader_create(M4OSA_Context *pContext) { + VideoEditor3gpReader_Context* pC = NULL; + M4OSA_ERR err = M4NO_ERROR; + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext , M4ERR_PARAMETER); + + ALOGV("VideoEditor3gpReader_create begin"); + + /* Context allocation & initialization */ + SAFE_MALLOC(pC, VideoEditor3gpReader_Context, 1, "VideoEditor3gpReader"); + + memset(pC, sizeof(VideoEditor3gpReader_Context), 0); + + pC->mAudioStreamHandler = M4OSA_NULL; + pC->mAudioAu.dataAddress = M4OSA_NULL; + pC->mVideoStreamHandler = M4OSA_NULL; + pC->mVideoAu.dataAddress = M4OSA_NULL; + + pC->mAudioSeeking = M4OSA_FALSE; + pC->mAudioSeekTime = 0; + + pC->mVideoSeeking = M4OSA_FALSE; + pC->mVideoSeekTime = 0; + + pC->mMaxDuration = 0; + + *pContext=pC; + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditor3gpReader_create no error"); + } else { + ALOGV("VideoEditor3gpReader_create ERROR 0x%X", err); + } + ALOGV("VideoEditor3gpReader_create end "); + return err; +} + +/** +************************************************************************** +* @brief destroy the instance of the 3gp reader +* @note after this call the context is invalid +* @param context: (IN) Context of the reader +* @return M4NO_ERROR there is no error +* @return M4ERR_PARAMETER pContext parameter is not properly set +************************************************************************** +*/ + +M4OSA_ERR VideoEditor3gpReader_destroy(M4OSA_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditor3gpReader_Context* pC = M4OSA_NULL; + + ALOGV("VideoEditor3gpReader_destroy begin"); + + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + pC = (VideoEditor3gpReader_Context*)pContext; + + SAFE_FREE(pC->mAudioAu.dataAddress); + pC->mAudioAu.dataAddress = M4OSA_NULL; + SAFE_FREE(pC->mVideoAu.dataAddress); + pC->mVideoAu.dataAddress = M4OSA_NULL; + SAFE_FREE(pC); + pContext = M4OSA_NULL; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditor3gpReader_destroy no error"); + } + else + { + ALOGV("VideoEditor3gpReader_destroy ERROR 0x%X", err); + } + + ALOGV("VideoEditor3gpReader_destroy end "); + return err; +} + +/** +************************************************************************ +* @brief open the reader and initializes its created instance +* @note this function open the media file +* @param context: (IN) Context of the reader +* @param pFileDescriptor: (IN) Pointer to proprietary data identifying +* the media to open +* @return M4NO_ERROR there is no error +* @return M4ERR_PARAMETER the context is NULL +* @return M4ERR_UNSUPPORTED_MEDIA_TYPE +* the media is DRM protected +************************************************************************ +*/ + +M4OSA_ERR VideoEditor3gpReader_open(M4OSA_Context pContext, + M4OSA_Void* pFileDescriptor) { + VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)pContext; + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditor3gpReader_open start "); + M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, + "VideoEditor3gpReader_open: invalid context pointer"); + M4OSA_DEBUG_IF1((M4OSA_NULL == pFileDescriptor), M4ERR_PARAMETER, + "VideoEditor3gpReader_open: invalid pointer pFileDescriptor"); + + ALOGV("VideoEditor3gpReader_open Datasource start %s", + (char*)pFileDescriptor); + //pC->mDataSource = DataSource::CreateFromURI((char*)pFileDescriptor); + pC->mDataSource = new FileSource ((char*)pFileDescriptor); + + if (pC->mDataSource == NULL) { + ALOGV("VideoEditor3gpReader_open Datasource error"); + return M4ERR_PARAMETER; + } + + pC->mExtractor = MediaExtractor::Create(pC->mDataSource, + MEDIA_MIMETYPE_CONTAINER_MPEG4); + + if (pC->mExtractor == NULL) { + ALOGV("VideoEditor3gpReader_open extractor error"); + return M4ERR_PARAMETER; + } + + int32_t isDRMProtected = 0; + sp meta = pC->mExtractor->getMetaData(); + meta->findInt32(kKeyIsDRM, &isDRMProtected); + if (isDRMProtected) { + ALOGV("VideoEditorMp3Reader_open error - DRM Protected"); + return M4ERR_UNSUPPORTED_MEDIA_TYPE; + } + + ALOGV("VideoEditor3gpReader_open end "); + return err; +} + +/** +************************************************************************ +* @brief close the reader +* @note close the 3GP file +* @param context: (IN) Context of the reader +* @return M4NO_ERROR there is no error +* @return M4ERR_PARAMETER the context is NULL +* @return M4ERR_BAD_CONTEXT provided context is not a valid one +************************************************************************ +*/ +M4OSA_ERR VideoEditor3gpReader_close(M4OSA_Context context) { + VideoEditor3gpReader_Context *pC = (VideoEditor3gpReader_Context*)context; + M4READER_AudioSbrUserdata *pAudioSbrUserData; + M4_AccessUnit *pAU; + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditor3gpReader_close begin"); + + M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, + "VideoEditor3gpReader_close: invalid context pointer"); + + if (pC->mAudioStreamHandler) { + ALOGV("VideoEditor3gpReader_close Audio"); + + if (M4OSA_NULL != pC->mAudioStreamHandler->m_pDecoderSpecificInfo) { + free(pC->mAudioStreamHandler->\ + m_pDecoderSpecificInfo); + pC->mAudioStreamHandler->m_decoderSpecificInfoSize = 0; + pC->mAudioStreamHandler->m_pDecoderSpecificInfo = M4OSA_NULL; + } + + if ((M4DA_StreamTypeAudioAac == pC->mAudioStreamHandler->m_streamType) + && (M4OSA_NULL != pC->mAudioStreamHandler->m_pUserData)) { + pAudioSbrUserData = (M4READER_AudioSbrUserdata*)(\ + pC->mAudioStreamHandler->m_pUserData); + + pAU = (M4_AccessUnit*)pAudioSbrUserData->m_pFirstAU; + if (M4OSA_NULL != pAU) { + free(pAU); + } + + if (M4OSA_NULL != pAudioSbrUserData->m_pAacDecoderUserConfig) { + free(pAudioSbrUserData->\ + m_pAacDecoderUserConfig); + } + free(pAudioSbrUserData); + pC->mAudioStreamHandler->m_pUserData = M4OSA_NULL; + } + + if (pC->mAudioStreamHandler->m_pESDSInfo != M4OSA_NULL) { + free(pC->mAudioStreamHandler->m_pESDSInfo); + pC->mAudioStreamHandler->m_pESDSInfo = M4OSA_NULL; + pC->mAudioStreamHandler->m_ESDSInfoSize = 0; + } + /* Finally destroy the stream handler */ + free(pC->mAudioStreamHandler); + pC->mAudioStreamHandler = M4OSA_NULL; + + pC->mAudioSource->stop(); + pC->mAudioSource.clear(); + } + if (pC->mVideoStreamHandler) { + ALOGV("VideoEditor3gpReader_close Video "); + + if(M4OSA_NULL != pC->mVideoStreamHandler->m_pDecoderSpecificInfo) { + free(pC->mVideoStreamHandler->\ + m_pDecoderSpecificInfo); + pC->mVideoStreamHandler->m_decoderSpecificInfoSize = 0; + pC->mVideoStreamHandler->m_pDecoderSpecificInfo = M4OSA_NULL; + } + + if(M4OSA_NULL != pC->mVideoStreamHandler->m_pH264DecoderSpecificInfo) { + free(pC->mVideoStreamHandler->\ + m_pH264DecoderSpecificInfo); + pC->mVideoStreamHandler->m_H264decoderSpecificInfoSize = 0; + pC->mVideoStreamHandler->m_pH264DecoderSpecificInfo = M4OSA_NULL; + } + + if(pC->mVideoStreamHandler->m_pESDSInfo != M4OSA_NULL) { + free(pC->mVideoStreamHandler->m_pESDSInfo); + pC->mVideoStreamHandler->m_pESDSInfo = M4OSA_NULL; + pC->mVideoStreamHandler->m_ESDSInfoSize = 0; + } + + /* Finally destroy the stream handler */ + free(pC->mVideoStreamHandler); + pC->mVideoStreamHandler = M4OSA_NULL; + + pC->mVideoSource->stop(); + pC->mVideoSource.clear(); + } + pC->mExtractor.clear(); + pC->mDataSource.clear(); + + ALOGV("VideoEditor3gpReader_close end"); + return err; +} + +/** +************************************************************************ +* @brief get an option from the 3gp reader +* @note it allows the caller to retrieve a property value: +* +* @param context: (IN) Context of the reader +* @param optionId: (IN) indicates the option to get +* @param pValue: (OUT) pointer to structure or value (allocated +* by user) where option is stored +* +* @return M4NO_ERROR there is no error +* @return M4ERR_BAD_CONTEXT provided context is not a valid one +* @return M4ERR_PARAMETER at least one parameter is not properly set +* @return M4ERR_BAD_OPTION_ID when the option ID is not a valid one +* @return M4ERR_VIDEO_NOT_H263 No video stream H263 in file. +* @return M4ERR_NO_VIDEO_STREAM_RETRIEVED_YET +* Function 3gpReader_getNextStreamHandler must be called before +************************************************************************ +*/ +M4OSA_ERR VideoEditor3gpReader_getOption(M4OSA_Context context, + M4OSA_OptionID optionId, M4OSA_DataOption pValue) { + VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditor3gpReader_getOption begin %d", optionId); + + M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, + "invalid context pointer"); + M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, + "VideoEditor3gpReader_getOption: invalid pointer on value"); + + switch (optionId) { + case M4READER_kOptionID_Duration: + { + ALOGV("VideoEditor3gpReader_getOption duration %d",pC->mMaxDuration); + *(M4OSA_Time*)pValue = pC->mMaxDuration; + } + break; + case M4READER_kOptionID_Version: + /* not used */ + ALOGV("VideoEditor3gpReader_getOption: M4READER_kOptionID_Version"); + break; + + case M4READER_kOptionID_Copyright: + /* not used */ + ALOGV(">>>>>>> M4READER_kOptionID_Copyright"); + break; + + case M4READER_kOptionID_CreationTime: + /* not used */ + ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_CreationTime"); + break; + + case M4READER_kOptionID_Bitrate: + { + M4OSA_UInt32* pBitrate = (M4OSA_UInt32*)pValue; + + if (pC->mMaxDuration != 0) { + M4OSA_UInt32 ui32Tmp = (M4OSA_UInt32)pC->mMaxDuration; + *pBitrate = (M4OSA_UInt32)(pC->mFileSize * 8000.0 / pC->mMaxDuration); + } + ALOGV("VideoEditor3gpReader_getOption bitrate %ld", *pBitrate); + } + break; + case M4READER_3GP_kOptionID_H263Properties: + { + if(M4OSA_NULL == pC->mVideoStreamHandler) { + ALOGV("VideoEditor3gpReader_getOption no videoStream retrieved"); + + err = M4ERR_NO_VIDEO_STREAM_RETRIEVED_YET; + break; + } + if((M4DA_StreamTypeVideoH263 != pC->mVideoStreamHandler->\ + m_streamType) || (pC->mVideoStreamHandler->\ + m_decoderSpecificInfoSize < 7)) { + ALOGV("VideoEditor3gpReader_getOption DSI Size %d", + pC->mVideoStreamHandler->m_decoderSpecificInfoSize); + + err = M4ERR_VIDEO_NOT_H263; + break; + } + + /* MAGICAL in the decoder confi H263: the 7th byte is the profile + * number, 6th byte is the level number */ + ((M4READER_3GP_H263Properties *)pValue)->uiProfile = + pC->mVideoStreamHandler->m_pDecoderSpecificInfo[6]; + ((M4READER_3GP_H263Properties *)pValue)->uiLevel = + pC->mVideoStreamHandler->m_pDecoderSpecificInfo[5]; + ALOGV("VideoEditor3gpReader_getOption M4READER_3GP_kOptionID_\ + H263Properties end"); + } + break; + case M4READER_3GP_kOptionID_PurpleLabsDrm: + ALOGV("VideoEditor3gpReaderOption M4READER_3GP_kOptionID_PurpleLabsDrm"); + /* not used */ + break; + + case M4READER_kOptionID_GetNumberOfAudioAu: + /* not used */ + ALOGV("VideoEditor3gpReadeOption M4READER_kOptionID_GetNumberOfAudioAu"); + break; + + case M4READER_kOptionID_GetNumberOfVideoAu: + /* not used */ + ALOGV("VideoEditor3gpReader_getOption :GetNumberOfVideoAu"); + break; + + case M4READER_kOptionID_GetMetadata: + /* not used */ + ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_GetMetadata"); + break; + + case M4READER_kOptionID_3gpFtypBox: + /* used only for SEMC */ + ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_3gpFtypBox"); + err = M4ERR_BAD_OPTION_ID; //check this + break; + +#ifdef OPTIONID_GET_NEXT_VIDEO_CTS + case M4READER_3GP_kOptionID_getNextVideoCTS: + /* not used */ + ALOGV("VideoEditor3gpReader_getOption: getNextVideoCTS"); + break; +#endif + default: + { + err = M4ERR_BAD_OPTION_ID; + ALOGV("VideoEditor3gpReader_getOption M4ERR_BAD_OPTION_ID"); + } + break; + } + ALOGV("VideoEditor3gpReader_getOption end: optionID: x%x", optionId); + return err; +} +/** +************************************************************************ +* @brief set an option on the 3gp reader +* @note No option can be set yet. +* @param context: (IN) Context of the reader +* @param optionId: (IN) indicates the option to set +* @param pValue: (IN) pointer to structure or value (allocated +* by user) where option is stored +* @return M4NO_ERROR there is no error +* @return M4ERR_BAD_CONTEXT provided context is not a valid one +* @return M4ERR_PARAMETER at least one parameter is not properly set +* @return M4ERR_BAD_OPTION_ID when the option ID is not a valid one +************************************************************************ +*/ +M4OSA_ERR VideoEditor3gpReader_setOption(M4OSA_Context context, + M4OSA_OptionID optionId, M4OSA_DataOption pValue) { + VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + + /* Check function parameters */ + M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, + "invalid context pointer"); + M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, + "invalid value pointer"); + + ALOGV("VideoEditor3gpReader_setOption begin %d",optionId); + + switch(optionId) { + case M4READER_kOptionID_SetOsaFileReaderFctsPtr: + break; + + case M4READER_3GP_kOptionID_AudioOnly: + break; + + case M4READER_3GP_kOptionID_VideoOnly: + break; + + case M4READER_3GP_kOptionID_FastOpenMode: + break; + + case M4READER_kOptionID_MaxMetadataSize: + break; + + default: + { + ALOGV("VideoEditor3gpReader_setOption: returns M4ERR_BAD_OPTION_ID"); + err = M4ERR_BAD_OPTION_ID; + } + break; + } + ALOGV("VideoEditor3gpReader_setOption end "); + return err; +} +/** + ************************************************************************ + * @brief fill the access unit structure with initialization values + * @param context: (IN) Context of the reader + * @param pStreamHandler: (IN) pointer to the stream handler to which + * the access unit will be associated + * @param pAccessUnit: (IN/OUT) pointer to the access unit (allocated + * by the caller) to initialize + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER at least one parameter is not properly set + ************************************************************************ +*/ +M4OSA_ERR VideoEditor3gpReader_fillAuStruct(M4OSA_Context context, + M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { + VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; + M4OSA_ERR err= M4NO_ERROR; + + M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_fillAuStruct: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_fillAuStruc invalid pointer to M4_StreamHandler"); + M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_fillAuStruct: invalid pointer to M4_AccessUnit"); + + ALOGV("VideoEditor3gpReader_fillAuStruct begin"); + + /* Initialize pAccessUnit structure */ + pAccessUnit->m_size = 0; + pAccessUnit->m_CTS = 0; + pAccessUnit->m_DTS = 0; + pAccessUnit->m_attribute = 0; + pAccessUnit->m_dataAddress = M4OSA_NULL; + pAccessUnit->m_maxsize = pStreamHandler->m_maxAUSize; + pAccessUnit->m_streamID = pStreamHandler->m_streamId; + pAccessUnit->m_structSize = sizeof(M4_AccessUnit); + + ALOGV("VideoEditor3gpReader_fillAuStruct end"); + return M4NO_ERROR; +} + +/** +******************************************************************************** +* @brief jump into the stream at the specified time +* @note +* @param context: (IN) Context of the reader +* @param pStreamHandler (IN) the stream handler of the stream to make jump +* @param pTime (I/O)IN the time to jump to (in ms) +* OUT the time to which the stream really jumped +* @return M4NO_ERROR there is no error +* @return M4ERR_PARAMETER at least one parameter is not properly set +******************************************************************************** +*/ +M4OSA_ERR VideoEditor3gpReader_jump(M4OSA_Context context, + M4_StreamHandler *pStreamHandler, M4OSA_Int32* pTime) { + VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + M4SYS_AccessUnit* pAu; + M4OSA_Time time64; + + M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_jump: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_jump: invalid pointer to M4_StreamHandler"); + M4OSA_DEBUG_IF1((pTime == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_jump: invalid time pointer"); + + ALOGV("VideoEditor3gpReader_jump begin"); + + if (*pTime == (pStreamHandler->m_duration)) { + *pTime -= 1; + } + time64 = (M4OSA_Time)*pTime; + + ALOGV("VideoEditor3gpReader_jump time us %ld ", time64); + + if ((pC->mAudioStreamHandler != M4OSA_NULL) && + (pStreamHandler->m_streamId == pC->mAudioStreamHandler->m_streamId)) + { + pAu = &pC->mAudioAu; + pAu->CTS = time64; + pAu->DTS = time64; + + time64 = time64 * 1000; /* Convert the time into micro sec */ + pC->mAudioSeeking = M4OSA_TRUE; + pC->mAudioSeekTime = time64; + ALOGV("VideoEditor3gpReader_jump AUDIO time us %ld ", time64); + } else if ((pC->mVideoStreamHandler != M4OSA_NULL) && + (pStreamHandler->m_streamId == pC->mVideoStreamHandler->m_streamId)) + { + pAu = &pC->mVideoAu; + pAu->CTS = time64; + pAu->DTS = time64; + + time64 = time64 * 1000; /* Convert the time into micro sec */ + pC->mVideoSeeking = M4OSA_TRUE; + pC->mVideoSeekTime = time64; + ALOGV("VideoEditor3gpReader_jump VIDEO time us %ld ", time64); + } else { + ALOGV("VideoEditor3gpReader_jump passed StreamHandler is not known\n"); + return M4ERR_PARAMETER; + } + time64 = time64 / 1000; /* Convert the time into milli sec */ + ALOGV("VideoEditor3gpReader_jump time ms before seekset %ld ", time64); + + *pTime = (M4OSA_Int32)time64; + + ALOGV("VideoEditor3gpReader_jump end"); + err = M4NO_ERROR; + return err; +} +/** +******************************************************************************** +* @brief reset the stream, that is seek it to beginning and make it ready +* @note +* @param context: (IN) Context of the reader +* @param pStreamHandler (IN) The stream handler of the stream to reset +* @return M4NO_ERROR there is no error +* @return M4ERR_PARAMETER at least one parameter is not properly set +******************************************************************************** +*/ +M4OSA_ERR VideoEditor3gpReader_reset(M4OSA_Context context, + M4_StreamHandler *pStreamHandler) { + VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + M4SYS_StreamID streamIdArray[2]; + M4SYS_AccessUnit* pAu; + M4OSA_Time time64 = 0; + + M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_reset: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_reset: invalid pointer to M4_StreamHandler"); + + ALOGV("VideoEditor3gpReader_reset begin"); + + if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) { + pAu = &pC->mAudioAu; + } else if (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) { + pAu = &pC->mVideoAu; + } else { + ALOGV("VideoEditor3gpReader_reset passed StreamHandler is not known\n"); + return M4ERR_PARAMETER; + } + + pAu->CTS = time64; + pAu->DTS = time64; + + ALOGV("VideoEditor3gpReader_reset end"); + return err; +} + +/** +******************************************************************************** +* @brief Gets an access unit (AU) from the stream handler source. +* @note An AU is the smallest possible amount of data to be decoded by decoder +* +* @param context: (IN) Context of the reader +* @param pStreamHandler (IN) The stream handler of the stream to make jump +* @param pAccessUnit (IO) Pointer to access unit to fill with read data +* @return M4NO_ERROR there is no error +* @return M4ERR_PARAMETER at least one parameter is not properly set +* @returns M4ERR_ALLOC memory allocation failed +* @returns M4WAR_NO_MORE_AU there are no more access unit in the stream +******************************************************************************** +*/ +M4OSA_ERR VideoEditor3gpReader_getNextAu(M4OSA_Context context, + M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { + VideoEditor3gpReader_Context* pC=(VideoEditor3gpReader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + M4SYS_AccessUnit* pAu; + int64_t tempTime64 = 0; + MediaBuffer *mMediaBuffer = NULL; + MediaSource::ReadOptions options; + M4OSA_Bool flag = M4OSA_FALSE; + status_t error; + int32_t i32Tmp = 0; + + M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_getNextAu: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_getNextAu: invalid pointer to M4_StreamHandler"); + M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_getNextAu: invalid pointer to M4_AccessUnit"); + + ALOGV("VideoEditor3gpReader_getNextAu begin"); + + if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) { + ALOGV("VideoEditor3gpReader_getNextAu audio stream"); + pAu = &pC->mAudioAu; + if (pC->mAudioSeeking == M4OSA_TRUE) { + ALOGV("VideoEditor3gpReader_getNextAu audio seek time: %ld", + pC->mAudioSeekTime); + options.setSeekTo(pC->mAudioSeekTime); + pC->mAudioSource->read(&mMediaBuffer, &options); + + mMediaBuffer->meta_data()->findInt64(kKeyTime, + (int64_t*)&tempTime64); + options.clearSeekTo(); + pC->mAudioSeeking = M4OSA_FALSE; + flag = M4OSA_TRUE; + } else { + ALOGV("VideoEditor3gpReader_getNextAu audio no seek:"); + pC->mAudioSource->read(&mMediaBuffer, &options); + if (mMediaBuffer != NULL) { + mMediaBuffer->meta_data()->findInt64(kKeyTime, + (int64_t*)&tempTime64); + } + } + } else if (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) { + ALOGV("VideoEditor3gpReader_getNextAu video steram "); + pAu = &pC->mVideoAu; + if(pC->mVideoSeeking == M4OSA_TRUE) { + flag = M4OSA_TRUE; + ALOGV("VideoEditor3gpReader_getNextAu seek: %ld",pC->mVideoSeekTime); + options.setSeekTo(pC->mVideoSeekTime, + MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); + do + { + if (mMediaBuffer != NULL) { + ALOGV("VideoEditor3gpReader_getNextAu free the MediaBuffer"); + mMediaBuffer->release(); + } + error = pC->mVideoSource->read(&mMediaBuffer, &options); + ALOGV("VE3gpReader_getNextAu MediaBuffer %x , error %d", + mMediaBuffer, error); + if (mMediaBuffer != NULL) + { + if (mMediaBuffer->meta_data()->findInt32(kKeyIsSyncFrame, + &i32Tmp) && i32Tmp) { + ALOGV("SYNC FRAME FOUND--%d", i32Tmp); + pAu->attribute = AU_RAP; + } + else { + pAu->attribute = AU_P_Frame; + } + mMediaBuffer->meta_data()->findInt64(kKeyTime, + (int64_t*)&tempTime64); + } else { + break; + } + options.clearSeekTo(); + } while(tempTime64 < pC->mVideoSeekTime); + + ALOGV("VE3gpReader_getNextAu: video time with seek = %lld:", + tempTime64); + pC->mVideoSeeking = M4OSA_FALSE; + } else { + ALOGV("VideoEditor3gpReader_getNextAu video no seek:"); + pC->mVideoSource->read(&mMediaBuffer, &options); + + if(mMediaBuffer != NULL) { + if (mMediaBuffer->meta_data()->findInt32(kKeyIsSyncFrame, + &i32Tmp) && i32Tmp) { + ALOGV("SYNC FRAME FOUND--%d", i32Tmp); + pAu->attribute = AU_RAP; + } + else { + pAu->attribute = AU_P_Frame; + } + mMediaBuffer->meta_data()->findInt64(kKeyTime, + (int64_t*)&tempTime64); + ALOGV("VE3gpReader_getNextAu: video no seek time = %lld:", + tempTime64); + }else { + ALOGV("VE3gpReader_getNextAu:video no seek time buffer is NULL"); + } + } + } else { + ALOGV("VideoEditor3gpReader_getNextAu M4ERR_PARAMETER"); + return M4ERR_PARAMETER; + } + + if (mMediaBuffer != NULL) { + if( (pAu->dataAddress == NULL) || (pAu->size < \ + mMediaBuffer->range_length())) { + if(pAu->dataAddress != NULL) { + free((M4OSA_Int32*)pAu->dataAddress); + pAu->dataAddress = NULL; + } + ALOGV("Buffer lenght = %d ,%d",(mMediaBuffer->range_length() +\ + 3) & ~0x3,(mMediaBuffer->range_length())); + + pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc( + (mMediaBuffer->range_length() + 3) & ~0x3,M4READER_3GP, + (M4OSA_Char*)"pAccessUnit->m_dataAddress" ); + if(pAu->dataAddress == NULL) { + ALOGV("VideoEditor3gpReader_getNextAu malloc failed"); + return M4ERR_ALLOC; + } + } + pAu->size = mMediaBuffer->range_length(); + + memcpy((void *)pAu->dataAddress, + (void *)((const char *)mMediaBuffer->data() + mMediaBuffer->range_offset()), + mMediaBuffer->range_length()); + + if( (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) && + (pStreamHandler->m_streamType == M4DA_StreamTypeVideoMpeg4Avc) ) { + M4OSA_UInt32 size = mMediaBuffer->range_length(); + M4OSA_UInt8 *lbuffer; + + lbuffer = (M4OSA_UInt8 *) pAu->dataAddress; + ALOGV("pAccessUnit->m_dataAddress size = %x",size); + + lbuffer[0] = (size >> 24) & 0xFF; + lbuffer[1] = (size >> 16) & 0xFF; + lbuffer[2] = (size >> 8) & 0xFF; + lbuffer[3] = (size) & 0xFF; + } + + pAu->CTS = tempTime64; + + pAu->CTS = pAu->CTS / 1000; //converting the microsec to millisec + ALOGV("VideoEditor3gpReader_getNextAu CTS = %ld",pAu->CTS); + + pAu->DTS = pAu->CTS; + if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) { + pAu->attribute = M4SYS_kFragAttrOk; + } + mMediaBuffer->release(); + + pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress; + pAccessUnit->m_size = pAu->size; + pAccessUnit->m_maxsize = pAu->size; + pAccessUnit->m_CTS = pAu->CTS; + pAccessUnit->m_DTS = pAu->DTS; + pAccessUnit->m_attribute = pAu->attribute; + + } else { + ALOGV("VideoEditor3gpReader_getNextAu: M4WAR_NO_MORE_AU (EOS) reached"); + pAccessUnit->m_size = 0; + err = M4WAR_NO_MORE_AU; + } + options.clearSeekTo(); + + pAu->nbFrag = 0; + mMediaBuffer = NULL; + ALOGV("VideoEditor3gpReader_getNextAu end "); + + return err; +} +/** + ******************************************************************************* + * @brief Split the AVC DSI in its different components and write it in + * ONE memory buffer + * @note + * @param pStreamHandler: (IN/OUT) The MPEG4-AVC stream + * @param pDecoderConfigLocal: (IN) The DSI buffer + * @param decoderConfigSizeLocal: (IN) The DSI buffer size + * @return M4NO_ERROR there is no error + * @return ERR_FILE_SYNTAX_ERROR pDecoderConfigLocal is NULL + ******************************************************************************* +*/ +static M4OSA_ERR VideoEditor3gpReader_AnalyseAvcDsi( + M4_StreamHandler *pStreamHandler, M4OSA_Int32* pDecoderConfigLocal, + M4OSA_Int32 decoderConfigSizeLocal) { + struct _avcSpecificInfo *pAvcSpecInfo = M4OSA_NULL; + M4OSA_UInt32 uiSpecInfoSize; + M4OSA_Context pBitParserContext = M4OSA_NULL; + M4OSA_MemAddr8 pPos; + + /** + * First parsing to get the total allocation size (we must not do + * multiple malloc, but only one instead) */ + { + M4OSA_Int32 val; + M4OSA_UInt32 i,j; + M4OSA_UInt8 nalUnitLength; + M4OSA_UInt8 numOfSequenceParameterSets; + M4OSA_UInt32 uiTotalSizeOfSPS = 0; + M4OSA_UInt8 numOfPictureParameterSets; + M4OSA_UInt32 uiTotalSizeOfPPS = 0; + M4OSA_UInt32 uiSize; + struct _avcSpecificInfo avcSpIf; + + avcSpIf.m_nalUnitLength = 0; + + if (M4OSA_NULL == pDecoderConfigLocal) { + return M4ERR_READER3GP_DECODER_CONFIG_ERROR; + } + + VideoEditor3gpReader_MPEG4BitStreamParserInit(&pBitParserContext, + pDecoderConfigLocal, decoderConfigSizeLocal); + + if (M4OSA_NULL == pBitParserContext) { + return M4ERR_ALLOC; + } + + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- configuration version */ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- avc profile indication*/ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- profile compatibility */ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- avc level indication*/ + val=VideoEditor3gpReader_BitStreamParserShowBits(pBitParserContext, 8); + /* 6 bits reserved 111111b 2 bits length Size minus one*/ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* m_nalUnitLength */ + + nalUnitLength = (M4OSA_UInt8)((val & 0x03) + 1);/*0b11111100*/ + if (nalUnitLength > 4) { + pStreamHandler->m_decoderSpecificInfoSize = 0; + pStreamHandler->m_pDecoderSpecificInfo = M4OSA_NULL; + VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); + } else { + /** + * SPS table */ + val=VideoEditor3gpReader_BitStreamParserShowBits(pBitParserContext, + 8);/* 3 bits-reserved 111b-5 bits number of sequence parameter set*/ + numOfSequenceParameterSets = val & 0x1F; + /*1F instead of E0*/ /*0b11100000*/ /*Number of seq parameter sets*/ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + for (i=0; i < numOfSequenceParameterSets; i++) { + /** + * Get the size of this element */ + uiSize = + (M4OSA_UInt32)VideoEditor3gpReader_BitStreamParserShowBits( + pBitParserContext, 16); + uiTotalSizeOfSPS += uiSize; + VideoEditor3gpReader_BitStreamParserFlushBits( + pBitParserContext, 16); + /** + *Read the element(dont keep it, we only want size right now) */ + for (j=0; jm_nalUnitLength = nalUnitLength; + pAvcSpecInfo->m_numOfSequenceParameterSets = + numOfSequenceParameterSets; + pAvcSpecInfo->m_numOfPictureParameterSets = + numOfPictureParameterSets; + + /* We place the SPS param sets table after m_pPictureParameterSet */ + pAvcSpecInfo->m_pSequenceParameterSet= (struct _parameterSet*)( + (M4OSA_MemAddr8)(&pAvcSpecInfo->m_pPictureParameterSet) + + sizeof(pAvcSpecInfo->m_pPictureParameterSet)); + /*We place the PPS param sets table after the SPS param sets table*/ + pAvcSpecInfo->m_pPictureParameterSet = (struct _parameterSet*)( + (M4OSA_MemAddr8)(pAvcSpecInfo->m_pSequenceParameterSet) + + (numOfSequenceParameterSets * sizeof(struct _parameterSet))); + /**< The data will be placed after the PPS param sets table */ + pPos = (M4OSA_MemAddr8)pAvcSpecInfo->m_pPictureParameterSet + + (numOfPictureParameterSets * sizeof(struct _parameterSet)); + + /** + * reset the bit parser */ + VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); + } + } + + /** + * Second parsing to copy the data */ + if (M4OSA_NULL != pAvcSpecInfo) { + M4OSA_Int32 i,j; + + VideoEditor3gpReader_MPEG4BitStreamParserInit(&pBitParserContext, + pDecoderConfigLocal, decoderConfigSizeLocal); + + if (M4OSA_NULL == pBitParserContext) { + free(pAvcSpecInfo); + return M4ERR_ALLOC; + } + + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- configuration version */ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- avc profile indication*/ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- profile compatibility */ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 8 bits -- avc level indication*/ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* m_nalUnitLength */ + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* 3 bits -- reserved 111b -- 5 bits number of sequence parameter set*/ + + for (i=0; i < pAvcSpecInfo->m_numOfSequenceParameterSets; i++) { + pAvcSpecInfo->m_pSequenceParameterSet[i].m_length = + (M4OSA_UInt16)VideoEditor3gpReader_BitStreamParserShowBits( + pBitParserContext, 16); + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext,16); + + pAvcSpecInfo->m_pSequenceParameterSet[i].m_pParameterSetUnit = + (M4OSA_UInt8*)pPos; /**< current position in the buffer */ + pPos += pAvcSpecInfo->m_pSequenceParameterSet[i].m_length; + /**< increment the position in the buffer */ + for (j=0; jm_pSequenceParameterSet[i].m_length;j++){ + pAvcSpecInfo->m_pSequenceParameterSet[i].m_pParameterSetUnit[j]= + (M4OSA_UInt8)VideoEditor3gpReader_BitStreamParserShowBits( + pBitParserContext, 8); + VideoEditor3gpReader_BitStreamParserFlushBits( + pBitParserContext, 8); + } + } + + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); + /* number of pîcture parameter set*/ + + for (i=0; i < pAvcSpecInfo->m_numOfPictureParameterSets; i++) { + pAvcSpecInfo->m_pPictureParameterSet[i].m_length = + (M4OSA_UInt16)VideoEditor3gpReader_BitStreamParserShowBits( + pBitParserContext, 16); + VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext,16); + + pAvcSpecInfo->m_pPictureParameterSet[i].m_pParameterSetUnit = + (M4OSA_UInt8*)pPos; /**< current position in the buffer */ + pPos += pAvcSpecInfo->m_pPictureParameterSet[i].m_length; + /**< increment the position in the buffer */ + for (j=0; jm_pPictureParameterSet[i].m_length; j++) { + pAvcSpecInfo->m_pPictureParameterSet[i].m_pParameterSetUnit[j] = + (M4OSA_UInt8)VideoEditor3gpReader_BitStreamParserShowBits( + pBitParserContext, 8); + VideoEditor3gpReader_BitStreamParserFlushBits( + pBitParserContext, 8); + } + } + VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); + pStreamHandler->m_decoderSpecificInfoSize = uiSpecInfoSize; + pStreamHandler->m_pDecoderSpecificInfo = (M4OSA_UInt8*)pAvcSpecInfo; + } + pStreamHandler->m_H264decoderSpecificInfoSize = decoderConfigSizeLocal; + pStreamHandler->m_pH264DecoderSpecificInfo = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + decoderConfigSizeLocal, M4READER_3GP, + (M4OSA_Char*)"MPEG-4 AVC DecoderSpecific"); + if (M4OSA_NULL == pStreamHandler->m_pH264DecoderSpecificInfo) { + goto cleanup; + } + + memcpy((void * ) pStreamHandler->m_pH264DecoderSpecificInfo, + (void * )pDecoderConfigLocal, + pStreamHandler->m_H264decoderSpecificInfoSize); + return M4NO_ERROR; +cleanup: + VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); + return M4ERR_READER3GP_DECODER_CONFIG_ERROR; +} +/** +******************************************************************************** +* @brief Get the next stream found in the 3gp file +* @note +* @param context: (IN) Context of the reader +* @param pMediaFamily: OUT) pointer to a user allocated +* M4READER_MediaFamily that will be filled +* with the media family of the found stream +* @param pStreamHandler:(OUT) pointer to StreamHandler that will be allocated +* and filled with the found stream description +* @return M4NO_ERROR there is no error +* @return M4ERR_BAD_CONTEXT provided context is not a valid one +* @return M4ERR_PARAMETER at least one parameter is not properly set +* @return M4WAR_NO_MORE_STREAM no more available stream in the media +******************************************************************************** +*/ +M4OSA_ERR VideoEditor3gpReader_getNextStreamHandler(M4OSA_Context context, + M4READER_MediaFamily *pMediaFamily, + M4_StreamHandler **pStreamHandler) { + VideoEditor3gpReader_Context* pC=(VideoEditor3gpReader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + M4SYS_StreamID streamIdArray[2]; + M4SYS_StreamDescription streamDesc; + M4_AudioStreamHandler* pAudioStreamHandler; + M4_VideoStreamHandler* pVideoStreamHandler; + M4OSA_Int8 *DecoderSpecificInfo = M4OSA_NULL; + M4OSA_Int32 decoderSpecificInfoSize =0, maxAUSize = 0; + + M4_StreamType streamType = M4DA_StreamTypeUnknown; + M4OSA_UInt8 temp, i, trackCount; + M4OSA_Bool haveAudio = M4OSA_FALSE; + M4OSA_Bool haveVideo = M4OSA_FALSE; + sp meta = NULL; + int64_t Duration = 0; + M4OSA_UInt8* DecoderSpecific = M4OSA_NULL ; + uint32_t type; + const void *data; + size_t size; + const void *codec_specific_data; + size_t codec_specific_data_size; + M4OSA_Int32 ptempTime; + M4OSA_Int32 avgFPS=0; + + ALOGV("VideoEditor3gpReader_getNextStreamHandler begin"); + + M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_getNextStreamHandler: invalid context"); + M4OSA_DEBUG_IF1((pMediaFamily == 0), M4ERR_PARAMETER, + "getNextStreamHandler: invalid pointer to MediaFamily"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "getNextStreamHandler: invalid pointer to StreamHandler"); + + trackCount = pC->mExtractor->countTracks(); + temp = pC->mCurrTrack; + + if(temp >= trackCount) { + ALOGV("VideoEditor3gpReader_getNextStreamHandler error = %d", + M4WAR_NO_MORE_STREAM); + return (M4WAR_NO_MORE_STREAM); + } else { + const char *mime; + meta = pC->mExtractor->getTrackMetaData(temp); + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + if (!haveVideo && !strncasecmp(mime, "video/", 6)) { + pC->mVideoSource = pC->mExtractor->getTrack(temp); + pC->mVideoSource->start(); + + *pMediaFamily = M4READER_kMediaFamilyVideo; + haveVideo = true; + ALOGV("VideoEditor3gpReader_getNextStreamHandler getTrack called"); + if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) { + streamType = M4DA_StreamTypeVideoMpeg4Avc; + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) { + streamType = M4DA_StreamTypeVideoH263; + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) { + streamType = M4DA_StreamTypeVideoMpeg4; + } else { + ALOGV("VideoEditor3gpReaderGetNextStreamHandler streamTypeNONE"); + } + ALOGV("VideoEditor3gpReader_getNextStreamHandler: stream type: %d ", + streamType); + + if(streamType != M4DA_StreamTypeUnknown) { + pC->mStreamType = streamType; + pC->mStreamId = pC->mCurrTrack; + + pVideoStreamHandler = (M4_VideoStreamHandler*)M4OSA_32bitAlignedMalloc + (sizeof(M4_VideoStreamHandler), M4READER_3GP, + (M4OSA_Char*)"M4_VideoStreamHandler"); + if (M4OSA_NULL == pVideoStreamHandler) { + return M4ERR_ALLOC; + } + pVideoStreamHandler->m_structSize=sizeof(M4_VideoStreamHandler); + + meta->findInt32(kKeyWidth, + (int32_t*)&(pVideoStreamHandler->m_videoWidth)); + meta->findInt32(kKeyHeight, + (int32_t*)&(pVideoStreamHandler->m_videoHeight)); + + (*pStreamHandler) = (M4_StreamHandler*)(pVideoStreamHandler); + meta->findInt64(kKeyDuration, + (int64_t*)&(Duration)); + ((*pStreamHandler)->m_duration) = + (int32_t)((Duration)/1000); // conversion to mS + pC->mMaxDuration = ((*pStreamHandler)->m_duration); + ALOGV("VideoEditor3gpReader_getNextStreamHandler m_duration %d", + (*pStreamHandler)->m_duration); + + off64_t fileSize = 0; + pC->mDataSource->getSize(&fileSize); + pC->mFileSize = fileSize; + + ALOGV("VideoEditor3gpReader_getNextStreamHandler m_fileSize %d", + pC->mFileSize); + + meta->findInt32(kKeyMaxInputSize, (int32_t*)&(maxAUSize)); + if(maxAUSize == 0) { + maxAUSize = 70000; + } + (*pStreamHandler)->m_maxAUSize = maxAUSize; + ALOGV("<<<<<<<<<< video: mMaxAUSize from MP4 extractor: %d", + (*pStreamHandler)->m_maxAUSize); + + ((M4_StreamHandler*)pVideoStreamHandler)->m_averageBitRate = + (pC->mFileSize * 8000)/pC->mMaxDuration; + ALOGV("VideoEditor3gpReader_getNextStreamHandler m_averageBitrate %d", + ((M4_StreamHandler*)pVideoStreamHandler)->m_averageBitRate); + + + meta->findInt32(kKeyFrameRate, + (int32_t*)&(avgFPS)); + ALOGV("<<<<<<<<<< video: Average FPS from MP4 extractor: %d", + avgFPS); + + pVideoStreamHandler->m_averageFrameRate =(M4OSA_Float) avgFPS; + ALOGV("<<<<<<<<<< video: Average FPS from MP4 extractor in FLOAT: %f", + pVideoStreamHandler->m_averageFrameRate); + + // Get the video rotation degree + int32_t rotationDegree; + if(!meta->findInt32(kKeyRotation, &rotationDegree)) { + rotationDegree = 0; + } + pVideoStreamHandler->videoRotationDegrees = rotationDegree; + + pC->mVideoStreamHandler = + (M4_StreamHandler*)(pVideoStreamHandler); + + /* Get the DSI info */ + if(M4DA_StreamTypeVideoH263 == streamType) { + if (meta->findData(kKeyD263, &type, &data, &size)) { + (*pStreamHandler)->m_decoderSpecificInfoSize = size; + if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { + DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + (*pStreamHandler)->m_decoderSpecificInfoSize, + M4READER_3GP,(M4OSA_Char*)"H263 DSI"); + if (M4OSA_NULL == DecoderSpecific) { + return M4ERR_ALLOC; + } + memcpy((void *)DecoderSpecific, + (void *)data, size); + (*pStreamHandler)->m_pDecoderSpecificInfo = + DecoderSpecific; + } + else { + (*pStreamHandler)->m_pDecoderSpecificInfo = + M4OSA_NULL; + (*pStreamHandler)->m_decoderSpecificInfoSize = 0; + } + (*pStreamHandler)->m_pESDSInfo = M4OSA_NULL; + (*pStreamHandler)->m_ESDSInfoSize = 0; + (*pStreamHandler)->m_pH264DecoderSpecificInfo = M4OSA_NULL; + (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0; + } else { + ALOGV("VE_getNextStreamHandler: H263 dsi not found"); + (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL; + (*pStreamHandler)->m_decoderSpecificInfoSize = 0; + (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0; + (*pStreamHandler)->m_pH264DecoderSpecificInfo = + M4OSA_NULL; + (*pStreamHandler)->m_pESDSInfo = M4OSA_NULL; + (*pStreamHandler)->m_ESDSInfoSize = 0; + } + } + else if(M4DA_StreamTypeVideoMpeg4Avc == streamType) { + if(meta->findData(kKeyAVCC, &type, &data, &size)) { + decoderSpecificInfoSize = size; + if (decoderSpecificInfoSize != 0) { + DecoderSpecificInfo = (M4OSA_Int8*)M4OSA_32bitAlignedMalloc( + decoderSpecificInfoSize, M4READER_3GP, + (M4OSA_Char*)"H264 DecoderSpecific" ); + if (M4OSA_NULL == DecoderSpecificInfo) { + ALOGV("VideoEditor3gp_getNextStream is NULL "); + return M4ERR_ALLOC; + } + memcpy((void *)DecoderSpecificInfo, + (void *)data, decoderSpecificInfoSize); + } else { + ALOGV("DSI Size %d", decoderSpecificInfoSize); + DecoderSpecificInfo = M4OSA_NULL; + } + } + (*pStreamHandler)->m_pESDSInfo = M4OSA_NULL; + (*pStreamHandler)->m_ESDSInfoSize = 0; + + err = VideoEditor3gpReader_AnalyseAvcDsi(*pStreamHandler, + (M4OSA_Int32*)DecoderSpecificInfo, decoderSpecificInfoSize); + + if (M4NO_ERROR != err) { + return err; + } + ALOGV("decsize %d, h264decsize %d: %d", (*pStreamHandler)->\ + m_decoderSpecificInfoSize, (*pStreamHandler)->\ + m_H264decoderSpecificInfoSize); + + if(M4OSA_NULL != DecoderSpecificInfo) { + free(DecoderSpecificInfo); + DecoderSpecificInfo = M4OSA_NULL; + } + } else if( (M4DA_StreamTypeVideoMpeg4 == streamType) ) { + if (meta->findData(kKeyESDS, &type, &data, &size)) { + ESDS esds((const char *)data, size); + CHECK_EQ(esds.InitCheck(), (status_t)OK); + + (*pStreamHandler)->m_ESDSInfoSize = size; + (*pStreamHandler)->m_pESDSInfo = (M4OSA_UInt8*)\ + M4OSA_32bitAlignedMalloc((*pStreamHandler)->m_ESDSInfoSize, + M4READER_3GP, (M4OSA_Char*)"M4V DecoderSpecific" ); + if (M4OSA_NULL == (*pStreamHandler)->m_pESDSInfo) { + return M4ERR_ALLOC; + } + memcpy((void *)(*pStreamHandler)->\ + m_pESDSInfo, (void *)data, size); + + esds.getCodecSpecificInfo(&codec_specific_data, + &codec_specific_data_size); + ALOGV("VE MP4 dsisize: %d, %x", codec_specific_data_size, + codec_specific_data); + + (*pStreamHandler)->m_decoderSpecificInfoSize = + codec_specific_data_size; + if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { + DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + (*pStreamHandler)->m_decoderSpecificInfoSize, + M4READER_3GP, (M4OSA_Char*)" DecoderSpecific" ); + if (M4OSA_NULL == DecoderSpecific) { + return M4ERR_ALLOC; + } + memcpy((void *)DecoderSpecific, + (void *)codec_specific_data, + codec_specific_data_size); + (*pStreamHandler)->m_pDecoderSpecificInfo = + DecoderSpecific; + } + else { + (*pStreamHandler)->m_pDecoderSpecificInfo = + M4OSA_NULL; + } + (*pStreamHandler)->m_pH264DecoderSpecificInfo = + M4OSA_NULL; + (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0; + } + } else { + ALOGV("VideoEditor3gpReader_getNextStream NO video stream"); + return M4ERR_READER_UNKNOWN_STREAM_TYPE; + } + } + else { + ALOGV("VideoEditor3gpReader_getNextStream NO video stream"); + return M4ERR_READER_UNKNOWN_STREAM_TYPE; + } + + } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { + ALOGV("VideoEditor3gpReader_getNextStream audio getTrack called"); + pC->mAudioSource = pC->mExtractor->getTrack(pC->mCurrTrack); + pC->mAudioSource->start(); + *pMediaFamily = M4READER_kMediaFamilyAudio; + + if(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { + streamType = M4DA_StreamTypeAudioAmrNarrowBand; + } else if(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { + streamType = M4DA_StreamTypeAudioAmrWideBand; + } + else if(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { + streamType = M4DA_StreamTypeAudioAac; + } else { + ALOGV("VideoEditor3gpReader_getNextStrea streamtype Unknown "); + } + if(streamType != M4DA_StreamTypeUnknown) { + pC->mStreamType = streamType; + pC->mStreamId = pC->mCurrTrack; + + ALOGV("VE streamtype %d ,id %d", streamType, pC->mCurrTrack); + + pAudioStreamHandler = (M4_AudioStreamHandler*)M4OSA_32bitAlignedMalloc + (sizeof(M4_AudioStreamHandler), M4READER_3GP, + (M4OSA_Char*)"M4_AudioStreamHandler"); + if (M4OSA_NULL == pAudioStreamHandler) { + return M4ERR_ALLOC; + } + pAudioStreamHandler->m_structSize=sizeof(M4_AudioStreamHandler); + pAudioStreamHandler->m_byteSampleSize = 0; + pAudioStreamHandler->m_nbChannels = 0; + pAudioStreamHandler->m_samplingFrequency= 0; + pAudioStreamHandler->m_byteFrameLength = 0; + + (*pStreamHandler) = (M4_StreamHandler*)(pAudioStreamHandler); + pC->mAudioStreamHandler = + (M4_StreamHandler*)(pAudioStreamHandler); + (*pStreamHandler)->m_averageBitRate = 0; + haveAudio = true; + pC->mAudioStreamHandler=(M4_StreamHandler*)pAudioStreamHandler; + pC->mAudioStreamHandler->m_pESDSInfo = M4OSA_NULL; + pC->mAudioStreamHandler->m_ESDSInfoSize = 0; + + meta->findInt32(kKeyMaxInputSize, (int32_t*)&(maxAUSize)); + if(maxAUSize == 0) { + maxAUSize = 70000; + } + (*pStreamHandler)->m_maxAUSize = maxAUSize; + ALOGV("VE Audio mMaxAUSize from MP4 extractor: %d", maxAUSize); + } + if((M4DA_StreamTypeAudioAmrNarrowBand == streamType) || + (M4DA_StreamTypeAudioAmrWideBand == streamType)) { + M4OSA_UInt32 freqIndex = 0; /**< AMR NB */ + M4OSA_UInt32 modeSet; + M4OSA_UInt32 i; + M4OSA_Context pBitParserContext = M4OSA_NULL; + + if(M4DA_StreamTypeAudioAmrWideBand == streamType) { + freqIndex = 1; /**< AMR WB */ + } + + if (meta->findData(kKeyESDS, &type, &data, &size)) { + ESDS esds((const char *)data, size); + CHECK_EQ(esds.InitCheck(), (status_t)OK); + + esds.getCodecSpecificInfo(&codec_specific_data, + &codec_specific_data_size); + (*pStreamHandler)->m_decoderSpecificInfoSize = + codec_specific_data_size; + + if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { + DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + (*pStreamHandler)->m_decoderSpecificInfoSize, + M4READER_3GP, (M4OSA_Char*)"AMR DecoderSpecific" ); + if (M4OSA_NULL == DecoderSpecific) { + return M4ERR_ALLOC; + } + memcpy((void *)DecoderSpecific, + (void *)codec_specific_data, + codec_specific_data_size); + (*pStreamHandler)->m_pDecoderSpecificInfo = + DecoderSpecific; + } else { + (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL; + } + } else { + M4OSA_UChar AmrDsi[] = + {'P','H','L','P',0x00, 0x00, 0x80, 0x00, 0x01,}; + (*pStreamHandler)->m_decoderSpecificInfoSize = 9; + DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + (*pStreamHandler)->m_decoderSpecificInfoSize, + M4READER_3GP, (M4OSA_Char*)"PHLP DecoderSpecific" ); + if (M4OSA_NULL == DecoderSpecific) { + return M4ERR_ALLOC; + } + if(freqIndex ==0) { + AmrDsi[8] = 0x01; + } else { + AmrDsi[8] = 0x02; + } + for(i = 0; i< 9; i++) { + DecoderSpecific[i] = AmrDsi[i]; + } + (*pStreamHandler)->m_pDecoderSpecificInfo = DecoderSpecific; + } + (*pStreamHandler)->m_averageBitRate = + VideoEditor3gpReader_AmrBitRate[freqIndex][7]; + } else if((M4DA_StreamTypeAudioAac == streamType)) { + if (meta->findData(kKeyESDS, &type, &data, &size)) { + ESDS esds((const char *)data, size); + CHECK_EQ(esds.InitCheck(), (status_t)OK); + + (*pStreamHandler)->m_ESDSInfoSize = size; + (*pStreamHandler)->m_pESDSInfo = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + (*pStreamHandler)->m_ESDSInfoSize, M4READER_3GP, + (M4OSA_Char*)"AAC DecoderSpecific" ); + if (M4OSA_NULL == (*pStreamHandler)->m_pESDSInfo) { + return M4ERR_ALLOC; + } + memcpy((void *)(*pStreamHandler)->m_pESDSInfo, + (void *)data, size); + esds.getCodecSpecificInfo(&codec_specific_data, + &codec_specific_data_size); + + ALOGV("VEdsi %d,%x",codec_specific_data_size, + codec_specific_data); + + (*pStreamHandler)->m_decoderSpecificInfoSize = + codec_specific_data_size; + if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { + DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + (*pStreamHandler)->m_decoderSpecificInfoSize, + M4READER_3GP, (M4OSA_Char*)"AAC DecoderSpecific" ); + if (M4OSA_NULL == DecoderSpecific) { + return M4ERR_ALLOC; + } + memcpy((void *)DecoderSpecific, + (void *)codec_specific_data, + codec_specific_data_size); + (*pStreamHandler)->m_pDecoderSpecificInfo = + DecoderSpecific; + } else { + (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL; + } + } + } else { + ALOGV("VideoEditor3gpReader_getNextStream mStreamType: none "); + return M4ERR_READER_UNKNOWN_STREAM_TYPE; + } + } else { + ALOGV("VE noaudio-video stream:pC->mCurrTrack = %d ",pC->mCurrTrack); + pC->mCurrTrack++; //Increment current track to get the next track + return M4ERR_READER_UNKNOWN_STREAM_TYPE; + } + ALOGV("VE StreamType: %d, stremhandler %x",streamType, *pStreamHandler ); + (*pStreamHandler)->m_streamType = streamType; + (*pStreamHandler)->m_streamId = pC->mStreamId; + (*pStreamHandler)->m_pUserData = M4OSA_NULL; + (*pStreamHandler)->m_structSize = sizeof(M4_StreamHandler); + (*pStreamHandler)->m_bStreamIsOK = M4OSA_TRUE; + + meta->findInt64(kKeyDuration, + (int64_t*)&(Duration)); + + (*pStreamHandler)->m_duration = (int32_t)(Duration / 1000); + + pC->mMaxDuration = ((*pStreamHandler)->m_duration); + ALOGV("VE str duration duration: %d ", (*pStreamHandler)->m_duration); + + /* In AAC case: Put the first AU in pAudioStreamHandler->m_pUserData + *since decoder has to know if stream contains SBR data(Implicit sig) */ + if(M4DA_StreamTypeAudioAac == (*pStreamHandler)->m_streamType) { + M4READER_AudioSbrUserdata* pAudioSbrUserdata; + + pAudioSbrUserdata = (M4READER_AudioSbrUserdata*)M4OSA_32bitAlignedMalloc( + sizeof(M4READER_AudioSbrUserdata),M4READER_3GP, + (M4OSA_Char*)"M4READER_AudioSbrUserdata"); + if (M4OSA_NULL == pAudioSbrUserdata) { + err = M4ERR_ALLOC; + goto Error; + } + (*pStreamHandler)->m_pUserData = pAudioSbrUserdata; + pAudioSbrUserdata->m_bIsSbrEnabled = M4OSA_FALSE; + + pAudioSbrUserdata->m_pFirstAU = (M4_AccessUnit*)M4OSA_32bitAlignedMalloc( + sizeof(M4_AccessUnit),M4READER_3GP, (M4OSA_Char*)"1st AAC AU"); + if (M4OSA_NULL == pAudioSbrUserdata->m_pFirstAU) { + pAudioSbrUserdata->m_pAacDecoderUserConfig = M4OSA_NULL; + err = M4ERR_ALLOC; + goto Error; + } + pAudioSbrUserdata->m_pAacDecoderUserConfig = (M4_AacDecoderConfig*)\ + M4OSA_32bitAlignedMalloc(sizeof(M4_AacDecoderConfig),M4READER_3GP, + (M4OSA_Char*)"m_pAacDecoderUserConfig"); + if (M4OSA_NULL == pAudioSbrUserdata->m_pAacDecoderUserConfig) { + err = M4ERR_ALLOC; + goto Error; + } + } + if(M4DA_StreamTypeAudioAac == (*pStreamHandler)->m_streamType) { + M4_AudioStreamHandler* pAudioStreamHandler = + (M4_AudioStreamHandler*)(*pStreamHandler); + M4READER_AudioSbrUserdata* pUserData = (M4READER_AudioSbrUserdata*)\ + (pAudioStreamHandler->m_basicProperties.m_pUserData); + + err = VideoEditor3gpReader_fillAuStruct(pC, (*pStreamHandler), + (M4_AccessUnit*)pUserData->m_pFirstAU); + if (M4NO_ERROR != err) { + goto Error; + } + err = VideoEditor3gpReader_getNextAu(pC, (*pStreamHandler), + (M4_AccessUnit*)pUserData->m_pFirstAU); + + /* + * 1. "M4WAR_NO_MORE_AU == err" indicates that there is no more + * access unit from the current track. In other words, there + * is only a single access unit from the current track, and + * the parsing of this track has reached EOS. The reason why + * the first access unit needs to be parsed here is because for + * some audio codec (like AAC), the very first access unit + * must be decoded before its configuration/encoding parameters + * (such as # of channels and sample rate) can be correctly + * determined. + * + * 2. "trackCount > pC->mCurrTrack" indicates that there are other + * tracks to be parsed, in addition to the current track. + * + * When both conditions 1 & 2 hold, other tracks should be + * parsed. Thus, we should not bail out. + */ + if (M4WAR_NO_MORE_AU == err && trackCount > pC->mCurrTrack) { + err = M4NO_ERROR; + } + + if (M4NO_ERROR != err) { + goto Error; + } + err = VideoEditor3gpReader_reset(pC, (*pStreamHandler)); + if (M4NO_ERROR != err) { + goto Error; + } + } + } + pC->mCurrTrack++; //Increment the current track to get next track + ALOGV("pC->mCurrTrack = %d",pC->mCurrTrack); + + if (!haveAudio && !haveVideo) { + *pMediaFamily=M4READER_kMediaFamilyUnknown; + return M4ERR_READER_UNKNOWN_STREAM_TYPE; + } +Error: + ALOGV("VideoEditor3gpReader_getNextStreamHandler end error = %d",err); + return err; +} + +M4OSA_ERR VideoEditor3gpReader_getPrevRapTime(M4OSA_Context context, + M4_StreamHandler *pStreamHandler, M4OSA_Int32* pTime) +{ + VideoEditor3gpReader_Context *pC = (VideoEditor3gpReader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + MediaBuffer *mMediaBuffer = M4OSA_NULL; + MediaSource::ReadOptions options; + M4OSA_Time time64; + int64_t tempTime64 = 0; + status_t error; + + ALOGV("VideoEditor3gpReader_getPrevRapTime begin"); + + M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_getPrevRapTime: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_getPrevRapTime invalid pointer to StreamHandler"); + M4OSA_DEBUG_IF1((pTime == 0), M4ERR_PARAMETER, + "VideoEditor3gpReader_getPrevRapTime: invalid time pointer"); + if (*pTime == (pStreamHandler->m_duration)) { + *pTime -= 1; + } + + time64 = (M4OSA_Time)*pTime * 1000; + + ALOGV("VideoEditor3gpReader_getPrevRapTime seek time: %ld",time64); + options.setSeekTo(time64, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); + error = pC->mVideoSource->read(&mMediaBuffer, &options); + if (error != OK) { + //Can not get the previous Sync. + //Must be end of stream. + return M4WAR_NO_MORE_AU; + } + + mMediaBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&tempTime64); + ALOGV("VideoEditor3gpReader_getPrevRapTime read time %ld, %x", tempTime64, + mMediaBuffer); + + *pTime = (M4OSA_Int32)(tempTime64 / 1000); + + if(mMediaBuffer != M4OSA_NULL) { + ALOGV(" mMediaBuffer size = %d length %d", mMediaBuffer->size(), + mMediaBuffer->range_length()); + mMediaBuffer->release(); + mMediaBuffer = M4OSA_NULL; + } + options.clearSeekTo(); + + if(error != OK) { + ALOGV("VideoEditor3gpReader_getPrevRapTime end \ + M4WAR_READER_INFORMATION_NOT_PRESENT"); + return M4WAR_READER_INFORMATION_NOT_PRESENT; + } else { + ALOGV("VideoEditor3gpReader_getPrevRapTime end: err %x", err); + err = M4NO_ERROR; + return err; + } +} + +extern "C" { +M4OSA_ERR VideoEditor3gpReader_getInterface(M4READER_MediaType *pMediaType, + M4READER_GlobalInterface **pRdrGlobalInterface, + M4READER_DataInterface **pRdrDataInterface) { + + M4OSA_ERR err = M4NO_ERROR; + + VIDEOEDITOR_CHECK(M4OSA_NULL != pMediaType, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrGlobalInterface, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrDataInterface, M4ERR_PARAMETER); + + ALOGV("VideoEditor3gpReader_getInterface begin"); + ALOGV("VideoEditor3gpReader_getInterface %d 0x%x 0x%x", *pMediaType, + *pRdrGlobalInterface,*pRdrDataInterface); + + SAFE_MALLOC(*pRdrGlobalInterface, M4READER_GlobalInterface, 1, + "VideoEditor3gpReader_getInterface"); + SAFE_MALLOC(*pRdrDataInterface, M4READER_DataInterface, 1, + "VideoEditor3gpReader_getInterface"); + + *pMediaType = M4READER_kMediaType3GPP; + + (*pRdrGlobalInterface)->m_pFctCreate = VideoEditor3gpReader_create; + (*pRdrGlobalInterface)->m_pFctDestroy = VideoEditor3gpReader_destroy; + (*pRdrGlobalInterface)->m_pFctOpen = VideoEditor3gpReader_open; + (*pRdrGlobalInterface)->m_pFctClose = VideoEditor3gpReader_close; + (*pRdrGlobalInterface)->m_pFctGetOption = VideoEditor3gpReader_getOption; + (*pRdrGlobalInterface)->m_pFctSetOption = VideoEditor3gpReader_setOption; + (*pRdrGlobalInterface)->m_pFctGetNextStream = + VideoEditor3gpReader_getNextStreamHandler; + (*pRdrGlobalInterface)->m_pFctFillAuStruct = + VideoEditor3gpReader_fillAuStruct; + (*pRdrGlobalInterface)->m_pFctStart = M4OSA_NULL; + (*pRdrGlobalInterface)->m_pFctStop = M4OSA_NULL; + (*pRdrGlobalInterface)->m_pFctJump = VideoEditor3gpReader_jump; + (*pRdrGlobalInterface)->m_pFctReset = VideoEditor3gpReader_reset; + (*pRdrGlobalInterface)->m_pFctGetPrevRapTime = + VideoEditor3gpReader_getPrevRapTime; + (*pRdrDataInterface)->m_pFctGetNextAu = VideoEditor3gpReader_getNextAu; + (*pRdrDataInterface)->m_readerContext = M4OSA_NULL; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditor3gpReader_getInterface no error"); + } else { + SAFE_FREE(*pRdrGlobalInterface); + SAFE_FREE(*pRdrDataInterface); + + ALOGV("VideoEditor3gpReader_getInterface ERROR 0x%X", err); + } + ALOGV("VideoEditor3gpReader_getInterface end"); + return err; +} + +} /* extern "C" */ + +} /* namespace android */ + + diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp new file mode 100644 index 0000000..9b35d07 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp @@ -0,0 +1,991 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorAudioDecoder.cpp +* @brief StageFright shell Audio Decoder +************************************************************************* +*/ + +#define LOG_NDEBUG 1 +#define LOG_TAG "VIDEOEDITOR_AUDIODECODER" + +#include "M4OSA_Debug.h" +#include "VideoEditorAudioDecoder.h" +#include "VideoEditorUtils.h" +#include "M4MCS_InternalTypes.h" + +#include "utils/Log.h" +#include "utils/Vector.h" +#include +#include +#include +#include +#include +#include + +/******************** + * DEFINITIONS * + ********************/ +// Version +#define VIDEOEDITOR_AUDIO_DECODER_VERSION_MAJOR 1 +#define VIDEOEDITOR_AUDIO_DECODER_VERSION_MINOR 0 +#define VIDEOEDITOR_AUDIO_DECODER_VERSION_REV 0 + +// Force using software decoder as engine does not support prefetch +#define VIDEOEDITOR_FORCECODEC kSoftwareCodecsOnly + +namespace android { + +struct VideoEditorAudioDecoderSource : public MediaSource { + public: + static sp Create( + const sp& format, void *decoderShellContext); + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + virtual sp getFormat(); + virtual status_t read(MediaBuffer **buffer, + const ReadOptions *options = NULL); + virtual void storeBuffer(MediaBuffer *buffer); + + protected: + virtual ~VideoEditorAudioDecoderSource(); + + private: + enum State { + CREATED, + STARTED, + ERROR + }; + VideoEditorAudioDecoderSource(const sp& format, + void *decoderShellContext); + sp mFormat; + Vector mBuffers; + Mutex mLock; // protects mBuffers + bool mIsEOS; + State mState; + void* mDecShellContext; + // Don't call me. + VideoEditorAudioDecoderSource(const VideoEditorAudioDecoderSource&); + VideoEditorAudioDecoderSource& operator=( + const VideoEditorAudioDecoderSource &); +}; + +/** + ****************************************************************************** + * structure VideoEditorAudioDecoder_Context + * @brief This structure defines the context of the StageFright audio decoder + * shell + ****************************************************************************** +*/ + +typedef struct { + M4AD_Type mDecoderType; + M4_AudioStreamHandler* mAudioStreamHandler; + sp mDecoderSource; + OMXClient mClient; + sp mDecoder; + int32_t mNbOutputChannels; + uint32_t mNbInputFrames; + uint32_t mNbOutputFrames; + M4READER_DataInterface *m_pReader; + M4_AccessUnit* m_pNextAccessUnitToDecode; + M4OSA_ERR readerErrCode; + int32_t timeStampMs; + +} VideoEditorAudioDecoder_Context; + +sp VideoEditorAudioDecoderSource::Create( + const sp& format, void *decoderShellContext) { + + sp aSource = + new VideoEditorAudioDecoderSource(format, decoderShellContext); + + return aSource; +} + +VideoEditorAudioDecoderSource::VideoEditorAudioDecoderSource( + const sp& format, void* decoderShellContext): + mFormat(format), + mIsEOS(false), + mState(CREATED), + mDecShellContext(decoderShellContext) { +} + +VideoEditorAudioDecoderSource::~VideoEditorAudioDecoderSource() { + + if( STARTED == mState ) { + stop(); + } +} + +status_t VideoEditorAudioDecoderSource::start(MetaData *meta) { + status_t err = OK; + + if( CREATED != mState ) { + ALOGV("VideoEditorAudioDecoderSource::start: invalid state %d", mState); + return UNKNOWN_ERROR; + } + + mState = STARTED; + +cleanUp: + ALOGV("VideoEditorAudioDecoderSource::start END (0x%x)", err); + return err; +} + +status_t VideoEditorAudioDecoderSource::stop() { + Mutex::Autolock autolock(mLock); + status_t err = OK; + + ALOGV("VideoEditorAudioDecoderSource::stop begin"); + + if( STARTED != mState ) { + ALOGV("VideoEditorAudioDecoderSource::stop: invalid state %d", mState); + return UNKNOWN_ERROR; + } + + if (!mBuffers.empty()) { + int n = mBuffers.size(); + for (int i = 0; i < n; i++) { + mBuffers.itemAt(i)->release(); + } + ALOGW("VideoEditorAudioDecoderSource::stop : %d buffer remained", n); + mBuffers.clear(); + } + + mState = CREATED; + + ALOGV("VideoEditorAudioDecoderSource::stop END (0x%x)", err); + return err; +} + +sp VideoEditorAudioDecoderSource::getFormat() { + + ALOGV("VideoEditorAudioDecoderSource::getFormat"); + return mFormat; +} + +static MediaBuffer* readBufferFromReader( + VideoEditorAudioDecoder_Context* pDecContext) { + M4OSA_ERR lerr = M4NO_ERROR; + M4_AccessUnit* pAccessUnit = pDecContext->m_pNextAccessUnitToDecode; + + // Get next AU from reader. + lerr = pDecContext->m_pReader->m_pFctGetNextAu( + pDecContext->m_pReader->m_readerContext, + (M4_StreamHandler*)pDecContext->mAudioStreamHandler, + pAccessUnit); + + if (lerr == M4WAR_NO_MORE_AU) { + ALOGV("readBufferFromReader : EOS"); + return NULL; + } + + pDecContext->timeStampMs = pAccessUnit->m_CTS; + + MediaBuffer* newBuffer = new MediaBuffer((size_t)pAccessUnit->m_size); + memcpy((void *)((M4OSA_Int8*)newBuffer->data() + newBuffer->range_offset()), + (void *)pAccessUnit->m_dataAddress, pAccessUnit->m_size); + newBuffer->meta_data()->setInt64(kKeyTime, (pAccessUnit->m_CTS * 1000LL)); + return newBuffer; +} + +status_t VideoEditorAudioDecoderSource::read(MediaBuffer **buffer, + const ReadOptions *options) { + Mutex::Autolock autolock(mLock); + MediaSource::ReadOptions readOptions; + + VideoEditorAudioDecoder_Context* pDecContext = + (VideoEditorAudioDecoder_Context *)mDecShellContext; + + if ( STARTED != mState ) { + ALOGV("VideoEditorAudioDecoderSource::read invalid state %d", mState); + return UNKNOWN_ERROR; + } + + // Get a buffer from the reader if we don't have any + if(mBuffers.empty()) { + MediaBuffer* newBuffer = readBufferFromReader(pDecContext); + if (!newBuffer) { + *buffer = NULL; + pDecContext->readerErrCode = M4WAR_NO_MORE_AU; + return ERROR_END_OF_STREAM; + } + mBuffers.push(newBuffer); + } + *buffer = mBuffers.itemAt(0); + mBuffers.removeAt(0); + + return OK; +} + +void VideoEditorAudioDecoderSource::storeBuffer(MediaBuffer *buffer) { + Mutex::Autolock autolock(mLock); + VideoEditorAudioDecoder_Context* pDecContext = + (VideoEditorAudioDecoder_Context *)mDecShellContext; + + ALOGV("VideoEditorAudioDecoderSource::storeBuffer begin"); + + // If the user didn't give us a buffer, get it from the reader. + if(buffer == NULL) { + MediaBuffer* newBuffer = readBufferFromReader(pDecContext); + if (!newBuffer) { + pDecContext->readerErrCode = M4WAR_NO_MORE_AU; + return; + } + buffer = newBuffer; + } + + mBuffers.push(buffer); + ALOGV("VideoEditorAudioDecoderSource::storeBuffer END"); +} + +/******************** + * TOOLS * + ********************/ + +M4OSA_ERR VideoEditorAudioDecoder_getBits(M4OSA_Int8* pData, + M4OSA_UInt32 dataSize, M4OSA_UInt8 nbBits, M4OSA_Int32* pResult, + M4OSA_UInt32* pOffset) { + + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt32 startByte = 0; + M4OSA_UInt32 startBit = 0; + M4OSA_UInt32 endByte = 0; + M4OSA_UInt32 endBit = 0; + M4OSA_UInt32 currentByte = 0; + M4OSA_UInt32 result = 0; + M4OSA_UInt32 ui32Tmp = 0; + M4OSA_UInt32 ui32Mask = 0; + + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pData, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pOffset, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(32 >= nbBits, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK((*pOffset + nbBits) <= 8*dataSize, M4ERR_PARAMETER); + + ALOGV("VideoEditorAudioDecoder_getBits begin"); + + startByte = (*pOffset) >> 3; + endByte = (*pOffset + nbBits) >> 3; + startBit = (*pOffset) % 8; + endBit = (*pOffset + nbBits) % 8; + currentByte = startByte; + + // Extract the requested nunber of bits from memory + while( currentByte <= endByte) { + ui32Mask = 0x000000FF; + if( currentByte == startByte ) { + ui32Mask >>= startBit; + } + ui32Tmp = ui32Mask & ((M4OSA_UInt32)pData[currentByte]); + if( currentByte == endByte ) { + ui32Tmp >>= (8-endBit); + result <<= endBit; + } else { + result <<= 8; + } + result |= ui32Tmp; + currentByte++; + } + + *pResult = result; + *pOffset += nbBits; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_getBits no error"); + } else { + ALOGV("VideoEditorAudioDecoder_getBits ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_getBits end"); + return err; +} + + +#define FREQ_TABLE_SIZE 16 +const M4OSA_UInt32 AD_AAC_FREQ_TABLE[FREQ_TABLE_SIZE] = + {96000, 88200, 64000, 48000, 44100, + 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350, 0, 0, 0}; + + +M4OSA_ERR VideoEditorAudioDecoder_parse_AAC_DSI(M4OSA_Int8* pDSI, + M4OSA_UInt32 dsiSize, AAC_DEC_STREAM_PROPS* pProperties) { + + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt32 offset = 0; + M4OSA_Int32 result = 0; + + ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI begin"); + + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pDSI, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pProperties, M4ERR_PARAMETER); + + // Get the object type + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 5, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + switch( result ) { + case 2: + /* Audio Object Type is 2 (AAC Low Complexity) */ + pProperties->aPSPresent = 0; + pProperties->aSBRPresent = 0; + break; + case 5: + /* Audio Object Type is 5 (Spectral Band Replication) */ + pProperties->aPSPresent = 0; + pProperties->aSBRPresent = 1; + break; + case 29: + /* Audio Object Type is 29 (Parametric Stereo) */ + pProperties->aPSPresent = 1; + pProperties->aSBRPresent = 1; + break; + default: + ALOGV("parse_AAC_DSI ERROR : object type %d is not supported", + result); + VIDEOEDITOR_CHECK(!"invalid AAC object type", M4ERR_BAD_OPTION_ID); + break; + } + pProperties->aAudioObjectType = (M4OSA_Int32)result; + + // Get the frequency index + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + VIDEOEDITOR_CHECK((0 <= result) && (FREQ_TABLE_SIZE > result), + M4ERR_PARAMETER); + pProperties->aSampFreq = AD_AAC_FREQ_TABLE[result]; + pProperties->aExtensionSampFreq = 0; + + // Get the number of channels + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + pProperties->aNumChan = (M4OSA_UInt32)result; + + // Set the max PCM samples per channel + pProperties->aMaxPCMSamplesPerCh = (pProperties->aSBRPresent) ? 2048 : 1024; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI no error"); + } else { + ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI end"); + return err; +} + +/******************** + * ENGINE INTERFACE * + ********************/ + +M4OSA_ERR VideoEditorAudioDecoder_destroy(M4AD_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; + + ALOGV("VideoEditorAudioDecoder_destroy begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; + + // Stop the graph + if( M4OSA_NULL != pDecoderContext->mDecoder.get() ) { + pDecoderContext->mDecoder->stop(); + } + + // Destroy the graph + pDecoderContext->mDecoderSource.clear(); + pDecoderContext->mDecoder.clear(); + pDecoderContext->mClient.disconnect(); + + SAFE_FREE(pDecoderContext); + pContext = M4OSA_NULL; + ALOGV("VideoEditorAudioDecoder_destroy : DONE"); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_destroy no error"); + } else { + ALOGV("VideoEditorAudioDecoder_destroy ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_destroy : end"); + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_create(M4AD_Type decoderType, + M4AD_Context* pContext, M4_AudioStreamHandler* pStreamHandler, + void* pUserData) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; + AAC_DEC_STREAM_PROPS aacProperties; + status_t result = OK; + sp decoderMetaData = NULL; + const char* mime = NULL; + uint32_t codecFlags = 0; + + ALOGV("VideoEditorAudioDecoder_create begin: decoderType %d", decoderType); + + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler, M4ERR_PARAMETER); + + // Context allocation & initialization + SAFE_MALLOC(pDecoderContext, VideoEditorAudioDecoder_Context, 1, + "AudioDecoder"); + pDecoderContext->mDecoderType = decoderType; + pDecoderContext->mAudioStreamHandler = pStreamHandler; + + pDecoderContext->mNbInputFrames = 0; + pDecoderContext->mNbOutputFrames = 0; + pDecoderContext->readerErrCode = M4NO_ERROR; + pDecoderContext->timeStampMs = -1; + + ALOGV("VideoEditorAudioDecoder_create : maxAUSize %d", + pDecoderContext->mAudioStreamHandler->m_basicProperties.m_maxAUSize); + + // Create the meta data for the decoder + decoderMetaData = new MetaData; + switch( pDecoderContext->mDecoderType ) { + case M4AD_kTypeAMRNB: + // StageFright parameters + mime = MEDIA_MIMETYPE_AUDIO_AMR_NB; + // Engine parameters + pDecoderContext->mAudioStreamHandler->m_byteFrameLength = 160; + // Number of bytes per sample + pDecoderContext->mAudioStreamHandler->m_byteSampleSize = 2; + pDecoderContext->mAudioStreamHandler->m_samplingFrequency = 8000; + pDecoderContext->mAudioStreamHandler->m_nbChannels = 1; + break; + + case M4AD_kTypeAMRWB: + // StageFright parameters + mime = MEDIA_MIMETYPE_AUDIO_AMR_WB; + + pDecoderContext->mAudioStreamHandler->m_byteFrameLength = 160; + // Number of bytes per sample + pDecoderContext->mAudioStreamHandler->m_byteSampleSize = 2; + pDecoderContext->mAudioStreamHandler->m_samplingFrequency = 16000; + pDecoderContext->mAudioStreamHandler->m_nbChannels = 1; + break; + + case M4AD_kTypeAAC: + // Reject ADTS & ADIF (or any incorrect type) + VIDEOEDITOR_CHECK(M4DA_StreamTypeAudioAac == + pDecoderContext->mAudioStreamHandler->\ + m_basicProperties.m_streamType,M4ERR_PARAMETER); + + // StageFright parameters + mime = MEDIA_MIMETYPE_AUDIO_AAC; + + decoderMetaData->setData(kKeyESDS, kTypeESDS, + pStreamHandler->m_basicProperties.m_pESDSInfo, + pStreamHandler->m_basicProperties.m_ESDSInfoSize); + + // Engine parameters + // Retrieve sampling frequency and number of channels from the DSI + err = VideoEditorAudioDecoder_parse_AAC_DSI( + (M4OSA_Int8*)pStreamHandler->m_basicProperties.\ + m_pDecoderSpecificInfo, + pStreamHandler->m_basicProperties.m_decoderSpecificInfoSize, + &aacProperties); + + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + pDecoderContext->mAudioStreamHandler->m_byteFrameLength = 1024; + // Number of bytes per sample + pDecoderContext->mAudioStreamHandler->m_byteSampleSize = 2; + pDecoderContext->mAudioStreamHandler->m_samplingFrequency = + aacProperties.aSampFreq; + pDecoderContext->mAudioStreamHandler->m_nbChannels = + aacProperties.aNumChan; + + // Copy the stream properties into userdata + if( M4OSA_NULL != pUserData ) { + memcpy((void *)pUserData, + (void *)&aacProperties, + sizeof(AAC_DEC_STREAM_PROPS)); + } + break; + + case M4AD_kTypeMP3: + // StageFright parameters + mime = MEDIA_MIMETYPE_AUDIO_MPEG; + break; + + default: + VIDEOEDITOR_CHECK(!"AudioDecoder_open : incorrect input format", + M4ERR_STATE); + break; + } + decoderMetaData->setCString(kKeyMIMEType, mime); + decoderMetaData->setInt32(kKeySampleRate, + (int32_t)pDecoderContext->mAudioStreamHandler->m_samplingFrequency); + decoderMetaData->setInt32(kKeyChannelCount, + pDecoderContext->mAudioStreamHandler->m_nbChannels); + decoderMetaData->setInt64(kKeyDuration, + (int64_t)pDecoderContext->mAudioStreamHandler->\ + m_basicProperties.m_duration); + + // Create the decoder source + pDecoderContext->mDecoderSource = VideoEditorAudioDecoderSource::Create( + decoderMetaData, (void *)pDecoderContext); + VIDEOEDITOR_CHECK(NULL != pDecoderContext->mDecoderSource.get(), + M4ERR_STATE); + + // Connect to the OMX client + result = pDecoderContext->mClient.connect(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + + // Create the OMX codec +#ifdef VIDEOEDITOR_FORCECODEC + codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; +#endif /* VIDEOEDITOR_FORCECODEC */ + + pDecoderContext->mDecoder = OMXCodec::Create(pDecoderContext->\ + mClient.interface(), + decoderMetaData, false, pDecoderContext->mDecoderSource, NULL, + codecFlags); + VIDEOEDITOR_CHECK(NULL != pDecoderContext->mDecoder.get(), M4ERR_STATE); + + // Get the output channels, the decoder might overwrite the input metadata + pDecoderContext->mDecoder->getFormat()->findInt32(kKeyChannelCount, + &pDecoderContext->mNbOutputChannels); + ALOGV("VideoEditorAudioDecoder_create : output chan %d", + pDecoderContext->mNbOutputChannels); + + // Start the decoder + result = pDecoderContext->mDecoder->start(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + + *pContext = pDecoderContext; + ALOGV("VideoEditorAudioDecoder_create : DONE"); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_create no error"); + } else { + VideoEditorAudioDecoder_destroy(pDecoderContext); + *pContext = M4OSA_NULL; + ALOGV("VideoEditorAudioDecoder_create ERROR 0x%X", err); + } + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_create_AAC(M4AD_Context* pContext, + M4_AudioStreamHandler* pStreamHandler, void* pUserData) { + + return VideoEditorAudioDecoder_create( + M4AD_kTypeAAC, pContext, pStreamHandler,pUserData); +} + + +M4OSA_ERR VideoEditorAudioDecoder_create_AMRNB(M4AD_Context* pContext, + M4_AudioStreamHandler* pStreamHandler, void* pUserData) { + + return VideoEditorAudioDecoder_create( + M4AD_kTypeAMRNB, pContext, pStreamHandler, pUserData); +} + + +M4OSA_ERR VideoEditorAudioDecoder_create_AMRWB(M4AD_Context* pContext, + M4_AudioStreamHandler* pStreamHandler, void* pUserData) { + + return VideoEditorAudioDecoder_create( + M4AD_kTypeAMRWB, pContext, pStreamHandler, pUserData); +} + + +M4OSA_ERR VideoEditorAudioDecoder_create_MP3(M4AD_Context* pContext, + M4_AudioStreamHandler* pStreamHandler, void* pUserData) { + + return VideoEditorAudioDecoder_create( + M4AD_kTypeMP3, pContext, pStreamHandler, pUserData); +} + +M4OSA_ERR VideoEditorAudioDecoder_processInputBuffer( + M4AD_Context pContext, M4AD_Buffer* pInputBuffer) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; + MediaBuffer* buffer = NULL; + + ALOGV("VideoEditorAudioDecoder_processInputBuffer begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + + pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; + + if( M4OSA_NULL != pInputBuffer ) { + buffer = new MediaBuffer((size_t)pInputBuffer->m_bufferSize); + memcpy((void *)((M4OSA_Int8*)buffer->data() + buffer->range_offset()), + (void *)pInputBuffer->m_dataAddress, pInputBuffer->m_bufferSize); + buffer->meta_data()->setInt64(kKeyTime, pInputBuffer->m_timeStampUs); + } + pDecoderContext->mDecoderSource->storeBuffer(buffer); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_processInputBuffer no error"); + } else { + ALOGV("VideoEditorAudioDecoder_processInputBuffer ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_processInputBuffer end"); + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_processOutputBuffer(M4AD_Context pContext, + MediaBuffer* buffer, M4AD_Buffer* pOuputBuffer) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; + int32_t i32Tmp = 0; + int64_t i64Tmp = 0; + status_t result = OK; + + ALOGV("VideoEditorAudioDecoder_processOutputBuffer begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pOuputBuffer, M4ERR_PARAMETER); + + pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; + + // Process the returned data + if( 0 == buffer->range_length() ) { + // Decoder has no data yet, nothing unusual + goto cleanUp; + } + + pDecoderContext->mNbOutputFrames++; + + if( pDecoderContext->mAudioStreamHandler->m_nbChannels == + (M4OSA_UInt32)pDecoderContext->mNbOutputChannels ) { + // Just copy the PCMs + pOuputBuffer->m_bufferSize = (M4OSA_UInt32)buffer->range_length(); + memcpy((void *)pOuputBuffer->m_dataAddress, + (void *)(((M4OSA_MemAddr8)buffer->data())+buffer->range_offset()), + buffer->range_length()); + } else if( pDecoderContext->mAudioStreamHandler->m_nbChannels < + (M4OSA_UInt32)pDecoderContext->mNbOutputChannels ) { + // The decoder forces stereo output, downsample + pOuputBuffer->m_bufferSize = (M4OSA_UInt32)(buffer->range_length()/2); + M4OSA_Int16* pDataIn = ((M4OSA_Int16*)buffer->data()) + + buffer->range_offset(); + M4OSA_Int16* pDataOut = (M4OSA_Int16*)pOuputBuffer->m_dataAddress; + M4OSA_Int16* pDataEnd = pDataIn + \ + (buffer->range_length()/sizeof(M4OSA_Int16)); + while( pDataIn < pDataEnd ) { + *pDataOut = *pDataIn; + pDataIn+=2; + pDataOut++; + } + } else { + // The decoder forces mono output, not supported + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); + } + +cleanUp: + // Release the buffer + buffer->release(); + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_processOutputBuffer no error"); + } else { + pOuputBuffer->m_bufferSize = 0; + ALOGV("VideoEditorAudioDecoder_processOutputBuffer ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_processOutputBuffer end"); + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_step(M4AD_Context pContext, + M4AD_Buffer* pInputBuffer, M4AD_Buffer* pOutputBuffer, + M4OSA_Bool bJump) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; + status_t result = OK; + MediaBuffer* outputBuffer = NULL; + + ALOGV("VideoEditorAudioDecoder_step begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; + pDecoderContext->mNbInputFrames++; + + // Push the input buffer to the decoder source + err = VideoEditorAudioDecoder_processInputBuffer(pDecoderContext, + pInputBuffer); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Read + result = pDecoderContext->mDecoder->read(&outputBuffer, NULL); + if (INFO_FORMAT_CHANGED == result) { + ALOGV("VideoEditorAudioDecoder_step: Audio decoder \ + returned INFO_FORMAT_CHANGED"); + CHECK(outputBuffer == NULL); + sp meta = pDecoderContext->mDecoder->getFormat(); + int32_t sampleRate, channelCount; + + CHECK(meta->findInt32(kKeySampleRate, &sampleRate)); + CHECK(meta->findInt32(kKeyChannelCount, &channelCount)); + ALOGV("VideoEditorAudioDecoder_step: samplingFreq = %d", sampleRate); + ALOGV("VideoEditorAudioDecoder_step: channelCnt = %d", channelCount); + pDecoderContext->mAudioStreamHandler->m_samplingFrequency = + (uint32_t)sampleRate; + pDecoderContext->mAudioStreamHandler->m_nbChannels = + (uint32_t)channelCount; + pDecoderContext->mNbOutputChannels = channelCount; + + return M4WAR_INFO_FORMAT_CHANGE; + } else if (ERROR_END_OF_STREAM == result) { + ALOGV("VideoEditorAudioDecoder_step: Audio decoder \ + returned ERROR_END_OF_STREAM"); + pDecoderContext->readerErrCode = M4WAR_NO_MORE_AU; + return M4WAR_NO_MORE_AU; + } else if (OK != result) { + return M4ERR_STATE; + } + + // Convert the PCM buffer + err = VideoEditorAudioDecoder_processOutputBuffer(pDecoderContext, + outputBuffer, pOutputBuffer); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_step no error"); + } else { + ALOGV("VideoEditorAudioDecoder_step ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_step end"); + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_getVersion(M4_VersionInfo* pVersionInfo) { + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditorAudioDecoder_getVersion begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pVersionInfo, M4ERR_PARAMETER); + + pVersionInfo->m_major = VIDEOEDITOR_AUDIO_DECODER_VERSION_MAJOR; + pVersionInfo->m_minor = VIDEOEDITOR_AUDIO_DECODER_VERSION_MINOR; + pVersionInfo->m_revision = VIDEOEDITOR_AUDIO_DECODER_VERSION_REV; + pVersionInfo->m_structSize = sizeof(M4_VersionInfo); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_getVersion no error"); + } else { + ALOGV("VideoEditorAudioDecoder_getVersion ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_getVersion end"); + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_setOption(M4AD_Context pContext, + M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; + + ALOGV("VideoEditorAudioDecoder_setOption begin 0x%X", optionID); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; + + switch( optionID ) { + case M4AD_kOptionID_UserParam: + ALOGV("VideoEditorAudioDecodersetOption UserParam is not supported"); + err = M4ERR_NOT_IMPLEMENTED; + break; + + case M4AD_kOptionID_3gpReaderInterface: + ALOGV("VideoEditorAudioDecodersetOption 3gpReaderInterface"); + pDecoderContext->m_pReader = + (M4READER_DataInterface *)optionValue; + break; + + case M4AD_kOptionID_AudioAU: + ALOGV("VideoEditorAudioDecodersetOption AudioAU"); + pDecoderContext->m_pNextAccessUnitToDecode = + (M4_AccessUnit *)optionValue; + break; + + default: + ALOGV("VideoEditorAudioDecoder_setOption unsupported optionId 0x%X", + optionID); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); + break; + } + +cleanUp: + if( ((M4OSA_UInt32)M4NO_ERROR == err) || ((M4OSA_UInt32)M4ERR_NOT_IMPLEMENTED == err) ) { + ALOGV("VideoEditorAudioDecoder_setOption error 0x%X", err); + } else { + ALOGV("VideoEditorAudioDecoder_setOption ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_setOption end"); + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_getOption(M4AD_Context pContext, + M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; + + ALOGV("VideoEditorAudioDecoder_getOption begin: optionID 0x%X", optionID); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; + + switch( optionID ) { + + case M4AD_kOptionID_GetAudioAUErrCode: + *(uint32_t *)optionValue = pDecoderContext->readerErrCode; + break; + + case M4AD_kOptionID_AudioNbChannels: + *(uint32_t *)optionValue = + pDecoderContext->mAudioStreamHandler->m_nbChannels; + break; + + case M4AD_kOptionID_AudioSampFrequency: + *(uint32_t *)optionValue = + pDecoderContext->mAudioStreamHandler->m_samplingFrequency; + break; + + case M4AD_kOptionID_AuCTS: + *(uint32_t *)optionValue = pDecoderContext->timeStampMs; + break; + + default: + ALOGV("VideoEditorAudioDecoder_getOption unsupported optionId 0x%X", + optionID); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); + break; + } + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_getOption no error"); + } else { + ALOGV("VideoEditorAudioDecoder_getOption ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_getOption end"); + return err; +} + +M4OSA_ERR VideoEditorAudioDecoder_getInterface(M4AD_Type decoderType, + M4AD_Type* pDecoderType, M4AD_Interface** pDecoderInterface) { + + M4OSA_ERR err = M4NO_ERROR; + + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pDecoderType, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pDecoderInterface, M4ERR_PARAMETER); + + ALOGV("VideoEditorAudioDecoder_getInterface begin %d 0x%x 0x%x", + decoderType, pDecoderType, pDecoderInterface); + + SAFE_MALLOC(*pDecoderInterface, M4AD_Interface, 1, + "VideoEditorAudioDecoder"); + + *pDecoderType = decoderType; + + switch( decoderType ) { + case M4AD_kTypeAMRNB: + (*pDecoderInterface)->m_pFctCreateAudioDec = + VideoEditorAudioDecoder_create_AMRNB; + break; + case M4AD_kTypeAMRWB: + (*pDecoderInterface)->m_pFctCreateAudioDec = + VideoEditorAudioDecoder_create_AMRWB; + break; + case M4AD_kTypeAAC: + (*pDecoderInterface)->m_pFctCreateAudioDec = + VideoEditorAudioDecoder_create_AAC; + break; + case M4AD_kTypeMP3: + (*pDecoderInterface)->m_pFctCreateAudioDec = + VideoEditorAudioDecoder_create_MP3; + break; + default: + ALOGV("VEAD_getInterface ERROR: unsupported type %d", decoderType); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); + break; + } + (*pDecoderInterface)->m_pFctDestroyAudioDec = + VideoEditorAudioDecoder_destroy; + (*pDecoderInterface)->m_pFctResetAudioDec = M4OSA_NULL; + (*pDecoderInterface)->m_pFctStartAudioDec = M4OSA_NULL; + (*pDecoderInterface)->m_pFctStepAudioDec = + VideoEditorAudioDecoder_step; + (*pDecoderInterface)->m_pFctGetVersionAudioDec = + VideoEditorAudioDecoder_getVersion; + (*pDecoderInterface)->m_pFctSetOptionAudioDec = + VideoEditorAudioDecoder_setOption; + (*pDecoderInterface)->m_pFctGetOptionAudioDec = + VideoEditorAudioDecoder_getOption; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioDecoder_getInterface no error"); + } else { + *pDecoderInterface = M4OSA_NULL; + ALOGV("VideoEditorAudioDecoder_getInterface ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioDecoder_getInterface end"); + return err; +} + + +extern "C" { + +M4OSA_ERR VideoEditorAudioDecoder_getInterface_AAC(M4AD_Type* pDecoderType, + M4AD_Interface** pDecoderInterface) { + ALOGV("TEST: AAC VideoEditorAudioDecoder_getInterface no error"); + return VideoEditorAudioDecoder_getInterface( + M4AD_kTypeAAC, pDecoderType, pDecoderInterface); +} + +M4OSA_ERR VideoEditorAudioDecoder_getInterface_AMRNB(M4AD_Type* pDecoderType, + M4AD_Interface** pDecoderInterface) { + ALOGV("TEST: AMR VideoEditorAudioDecoder_getInterface no error"); + return VideoEditorAudioDecoder_getInterface( + M4AD_kTypeAMRNB, pDecoderType, pDecoderInterface); +} + +M4OSA_ERR VideoEditorAudioDecoder_getInterface_AMRWB(M4AD_Type* pDecoderType, + M4AD_Interface** pDecoderInterface) { + + return VideoEditorAudioDecoder_getInterface( + M4AD_kTypeAMRWB, pDecoderType, pDecoderInterface); +} + +M4OSA_ERR VideoEditorAudioDecoder_getInterface_MP3(M4AD_Type* pDecoderType, + M4AD_Interface** pDecoderInterface) { + + return VideoEditorAudioDecoder_getInterface( + M4AD_kTypeMP3, pDecoderType, pDecoderInterface); +} + +} // extern "C" + +} // namespace android diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp new file mode 100644 index 0000000..a91f3ee --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp @@ -0,0 +1,755 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorAudioEncoder.cpp +* @brief StageFright shell Audio Encoder +************************************************************************* +*/ + +#define LOG_NDEBUG 1 +#define LOG_TAG "VIDEOEDITOR_AUDIOENCODER" + +#include "M4OSA_Debug.h" +#include "VideoEditorAudioEncoder.h" +#include "VideoEditorUtils.h" + +#include "utils/Log.h" +#include +#include +#include +#include +#include +#include + +/*** DEFINITIONS ***/ +// Force using software encoder as engine does not support prefetch +#define VIDEOEDITOR_FORCECODEC kSoftwareCodecsOnly + +namespace android { +struct VideoEditorAudioEncoderSource : public MediaSource { + public: + static sp Create( + const sp &format); + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + virtual sp getFormat(); + virtual status_t read(MediaBuffer **buffer, + const ReadOptions *options = NULL); + virtual int32_t storeBuffer(MediaBuffer *buffer); + + protected: + virtual ~VideoEditorAudioEncoderSource(); + + private: + struct MediaBufferChain { + MediaBuffer* buffer; + MediaBufferChain* nextLink; + }; + enum State { + CREATED, + STARTED, + ERROR + }; + + MediaBufferChain* mFirstBufferLink; + MediaBufferChain* mLastBufferLink; + int32_t mNbBuffer; + State mState; + sp mEncFormat; + + VideoEditorAudioEncoderSource(const sp &format); + + // Don't call me. + VideoEditorAudioEncoderSource(const VideoEditorAudioEncoderSource&); + VideoEditorAudioEncoderSource& operator=( + const VideoEditorAudioEncoderSource&); +}; + +sp VideoEditorAudioEncoderSource::Create( + const sp &format) { + + ALOGV("VideoEditorAudioEncoderSource::Create"); + sp aSource = + new VideoEditorAudioEncoderSource(format); + + return aSource; +} + +VideoEditorAudioEncoderSource::VideoEditorAudioEncoderSource( + const sp &format): + mFirstBufferLink(NULL), + mLastBufferLink(NULL), + mNbBuffer(0), + mState(CREATED), + mEncFormat(format) { + ALOGV("VideoEditorAudioEncoderSource::VideoEditorAudioEncoderSource"); +} + + +VideoEditorAudioEncoderSource::~VideoEditorAudioEncoderSource() { + ALOGV("VideoEditorAudioEncoderSource::~VideoEditorAudioEncoderSource"); + + if( STARTED == mState ) { + stop(); + } +} + +status_t VideoEditorAudioEncoderSource::start(MetaData *meta) { + status_t err = OK; + + ALOGV("VideoEditorAudioEncoderSource::start"); + + if( CREATED != mState ) { + ALOGV("VideoEditorAudioEncoderSource::start ERROR : invalid state %d", + mState); + return UNKNOWN_ERROR; + } + + mState = STARTED; + +cleanUp: + ALOGV("VideoEditorAudioEncoderSource::start END (0x%x)", err); + return err; +} + +status_t VideoEditorAudioEncoderSource::stop() { + status_t err = OK; + + ALOGV("VideoEditorAudioEncoderSource::stop"); + + if( STARTED != mState ) { + ALOGV("VideoEditorAudioEncoderSource::stop ERROR: invalid state %d", + mState); + return UNKNOWN_ERROR; + } + + int32_t i = 0; + MediaBufferChain* tmpLink = NULL; + while( mFirstBufferLink ) { + i++; + tmpLink = mFirstBufferLink; + mFirstBufferLink = mFirstBufferLink->nextLink; + delete tmpLink; + } + ALOGV("VideoEditorAudioEncoderSource::stop : %d buffer remained", i); + mFirstBufferLink = NULL; + mLastBufferLink = NULL; + + mState = CREATED; + + ALOGV("VideoEditorAudioEncoderSource::stop END (0x%x)", err); + return err; +} + +sp VideoEditorAudioEncoderSource::getFormat() { + ALOGV("VideoEditorAudioEncoderSource::getFormat"); + return mEncFormat; +} + +status_t VideoEditorAudioEncoderSource::read(MediaBuffer **buffer, + const ReadOptions *options) { + MediaSource::ReadOptions readOptions; + status_t err = OK; + MediaBufferChain* tmpLink = NULL; + + ALOGV("VideoEditorAudioEncoderSource::read"); + + if ( STARTED != mState ) { + ALOGV("VideoEditorAudioEncoderSource::read ERROR : invalid state %d", + mState); + return UNKNOWN_ERROR; + } + + if( NULL == mFirstBufferLink ) { + *buffer = NULL; + ALOGV("VideoEditorAudioEncoderSource::read : EOS"); + return ERROR_END_OF_STREAM; + } + *buffer = mFirstBufferLink->buffer; + + tmpLink = mFirstBufferLink; + mFirstBufferLink = mFirstBufferLink->nextLink; + if( NULL == mFirstBufferLink ) { + mLastBufferLink = NULL; + } + delete tmpLink; + mNbBuffer--; + + ALOGV("VideoEditorAudioEncoderSource::read END (0x%x)", err); + return err; +} + +int32_t VideoEditorAudioEncoderSource::storeBuffer(MediaBuffer *buffer) { + status_t err = OK; + + ALOGV("VideoEditorAudioEncoderSource::storeBuffer"); + + MediaBufferChain* newLink = new MediaBufferChain; + newLink->buffer = buffer; + newLink->nextLink = NULL; + if( NULL != mLastBufferLink ) { + mLastBufferLink->nextLink = newLink; + } else { + mFirstBufferLink = newLink; + } + mLastBufferLink = newLink; + mNbBuffer++; + + ALOGV("VideoEditorAudioEncoderSource::storeBuffer END"); + return mNbBuffer; +} + +/******************** + * ENGINE INTERFACE * + ********************/ +/** + ****************************************************************************** + * structure VideoEditorAudioEncoder_Context + * @brief This structure defines the context of the StageFright audio + * encoder shell + ****************************************************************************** +*/ +typedef struct { + M4ENCODER_AudioFormat mFormat; + M4ENCODER_AudioParams* mCodecParams; + M4ENCODER_AudioDecSpecificInfo mDSI; + sp mEncoderSource; + OMXClient mClient; + sp mEncoder; + uint32_t mNbInputFrames; + uint32_t mNbOutputFrames; + int64_t mFirstOutputCts; + int64_t mLastOutputCts; +} VideoEditorAudioEncoder_Context; + +M4OSA_ERR VideoEditorAudioEncoder_cleanup(M4OSA_Context pContext) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV("VideoEditorAudioEncoder_cleanup begin"); + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; + + SAFE_FREE(pEncoderContext->mDSI.pInfo); + SAFE_FREE(pEncoderContext); + pContext = M4OSA_NULL; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_cleanup no error"); + } else { + ALOGV("VideoEditorAudioEncoder_cleanup ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_cleanup end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_init(M4ENCODER_AudioFormat format, + M4OSA_Context* pContext, M4OSA_Void* pUserData) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV(" VideoEditorAudioEncoder_init begin: format %d", format); + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + SAFE_MALLOC(pEncoderContext, VideoEditorAudioEncoder_Context, 1, + "VideoEditorAudioEncoder"); + pEncoderContext->mFormat = format; + + *pContext = pEncoderContext; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_init no error"); + } else { + VideoEditorAudioEncoder_cleanup(pEncoderContext); + *pContext = M4OSA_NULL; + ALOGV("VideoEditorAudioEncoder_init ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_init end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_init_AAC(M4OSA_Context* pContext, + M4OSA_Void* pUserData) { + return VideoEditorAudioEncoder_init(M4ENCODER_kAAC, pContext, pUserData); +} + +M4OSA_ERR VideoEditorAudioEncoder_init_AMRNB(M4OSA_Context* pContext, + M4OSA_Void* pUserData) { + return VideoEditorAudioEncoder_init(M4ENCODER_kAMRNB, pContext, pUserData); +} + +M4OSA_ERR VideoEditorAudioEncoder_init_MP3(M4OSA_Context* pContext, + M4OSA_Void* pUserData) { + return VideoEditorAudioEncoder_init(M4ENCODER_kMP3, pContext, pUserData); +} + +M4OSA_ERR VideoEditorAudioEncoder_close(M4OSA_Context pContext) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV("VideoEditorAudioEncoder_close begin"); + + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; + + SAFE_FREE(pEncoderContext->mCodecParams); + + pEncoderContext->mEncoder->stop(); + pEncoderContext->mEncoder.clear(); + pEncoderContext->mClient.disconnect(); + pEncoderContext->mEncoderSource.clear(); + + ALOGV("AudioEncoder_close:IN %d frames,OUT %d frames from %lld to %lld", + pEncoderContext->mNbInputFrames, + pEncoderContext->mNbOutputFrames, pEncoderContext->mFirstOutputCts, + pEncoderContext->mLastOutputCts); + + if( pEncoderContext->mNbInputFrames != pEncoderContext->mNbInputFrames ) { + ALOGV("VideoEditorAudioEncoder_close:some frames were not encoded %d %d", + pEncoderContext->mNbInputFrames, pEncoderContext->mNbInputFrames); + } + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_close no error"); + } else { + ALOGV("VideoEditorAudioEncoder_close ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_close begin end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_open(M4OSA_Context pContext, + M4ENCODER_AudioParams *pParams, M4ENCODER_AudioDecSpecificInfo *pDSI, + M4OSA_Context pGrabberContext) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + status_t result = OK; + sp encoderMetadata = NULL; + const char* mime = NULL; + int32_t iNbChannel = 0; + uint32_t codecFlags = 0; + + ALOGV("VideoEditorAudioEncoder_open begin"); + + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pDSI, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; + pDSI->pInfo = M4OSA_NULL; + pDSI->infoSize = 0; + + pEncoderContext->mNbInputFrames = 0; + pEncoderContext->mNbOutputFrames = 0; + pEncoderContext->mFirstOutputCts = -1; + pEncoderContext->mLastOutputCts = -1; + + // Allocate & initialize the encoding parameters + ALOGV("VideoEditorAudioEncoder_open : params F=%d CN=%d BR=%d F=%d", + pParams->Frequency, pParams->ChannelNum, pParams->Bitrate, + pParams->Format); + SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_AudioParams, 1, + "VIDEOEDITOR CodecParams"); + pEncoderContext->mCodecParams->Frequency = pParams->Frequency; + pEncoderContext->mCodecParams->ChannelNum = pParams->ChannelNum; + pEncoderContext->mCodecParams->Bitrate = pParams->Bitrate; + pEncoderContext->mCodecParams->Format = pParams->Format; + + // Check output format consistency + VIDEOEDITOR_CHECK(pEncoderContext->mCodecParams->Format == + pEncoderContext->mFormat, M4ERR_PARAMETER); + + /** + * StageFright graph building + */ + // Create the meta data for the encoder + encoderMetadata = new MetaData; + switch( pEncoderContext->mCodecParams->Format ) { + case M4ENCODER_kAAC: + { + mime = MEDIA_MIMETYPE_AUDIO_AAC; + break; + } + case M4ENCODER_kAMRNB: + { + mime = MEDIA_MIMETYPE_AUDIO_AMR_NB; + break; + } + default: + { + VIDEOEDITOR_CHECK(!"AudioEncoder_open : incorrect input format", + M4ERR_PARAMETER); + break; + } + } + encoderMetadata->setCString(kKeyMIMEType, mime); + encoderMetadata->setInt32(kKeySampleRate, + (int32_t)pEncoderContext->mCodecParams->Frequency); + encoderMetadata->setInt32(kKeyBitRate, + (int32_t)pEncoderContext->mCodecParams->Bitrate); + + switch( pEncoderContext->mCodecParams->ChannelNum ) { + case M4ENCODER_kMono: + { + iNbChannel = 1; + break; + } + case M4ENCODER_kStereo: + { + iNbChannel = 2; + break; + } + default: + { + VIDEOEDITOR_CHECK(!"AudioEncoder_open : incorrect channel number", + M4ERR_STATE); + break; + } + } + encoderMetadata->setInt32(kKeyChannelCount, iNbChannel); + + // Create the encoder source + pEncoderContext->mEncoderSource = VideoEditorAudioEncoderSource::Create( + encoderMetadata); + VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoderSource.get(), + M4ERR_STATE); + + // Connect to the OMX client + result = pEncoderContext->mClient.connect(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + + // Create the OMX codec +#ifdef VIDEOEDITOR_FORCECODEC + codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; +#endif /* VIDEOEDITOR_FORCECODEC */ + // FIXME: + // We are moving away to use software AACEncoder and instead use OMX-based + // software AAC audio encoder. We want to use AACEncoder for now. After we + // fix the interface issue with the OMX-based AAC audio encoder, we should + // then set the component name back to NULL to allow the system to pick up + // the right AAC audio encoder. + pEncoderContext->mEncoder = OMXCodec::Create( + pEncoderContext->mClient.interface(), encoderMetadata, true, + pEncoderContext->mEncoderSource, "AACEncoder" /* component name */, + codecFlags); + VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); + + // Start the graph + result = pEncoderContext->mEncoder->start(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + + // Get AAC DSI, this code can only work with software encoder + if( M4ENCODER_kAAC == pEncoderContext->mCodecParams->Format ) { + int32_t isCodecConfig = 0; + MediaBuffer* buffer = NULL; + + // Read once to get the DSI + result = pEncoderContext->mEncoder->read(&buffer, NULL); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + VIDEOEDITOR_CHECK(buffer->meta_data()->findInt32(kKeyIsCodecConfig, + &isCodecConfig) && isCodecConfig, M4ERR_STATE); + + // Save the DSI + pEncoderContext->mDSI.infoSize = (M4OSA_UInt32)buffer->range_length(); + SAFE_MALLOC(pEncoderContext->mDSI.pInfo, M4OSA_Int8, + pEncoderContext->mDSI.infoSize, "Encoder header"); + + memcpy((void *)pEncoderContext->mDSI.pInfo, + (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->range_offset()), + pEncoderContext->mDSI.infoSize); + + buffer->release(); + *pDSI = pEncoderContext->mDSI; + } + ALOGV("VideoEditorAudioEncoder_open : DONE"); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_open no error"); + } else { + VideoEditorAudioEncoder_close(pEncoderContext); + ALOGV("VideoEditorAudioEncoder_open ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_open end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_processInputBuffer(M4OSA_Context pContext, + M4ENCODER_AudioBuffer* pInBuffer) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + M4OSA_Int8* pData = M4OSA_NULL; + MediaBuffer* buffer = NULL; + int32_t nbBuffer = 0; + + ALOGV("VideoEditorAudioEncoder_processInputBuffer begin"); + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; + + switch( pEncoderContext->mCodecParams->ChannelNum ) { + case M4ENCODER_kMono: + case M4ENCODER_kStereo: + // Let the MediaBuffer own the data so we don't have to free it + buffer = new MediaBuffer((size_t)pInBuffer->pTableBufferSize[0]); + pData = (M4OSA_Int8*)buffer->data() + buffer->range_offset(); + memcpy((void *)pData, (void *)pInBuffer->pTableBuffer[0], + pInBuffer->pTableBufferSize[0]); + break; + default: + ALOGV("VEAE_processInputBuffer unsupported channel configuration %d", + pEncoderContext->mCodecParams->ChannelNum); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); + break; + } + + ALOGV("VideoEditorAudioEncoder_processInputBuffer : store %d bytes", + buffer->range_length()); + // Push the buffer to the source + nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_processInputBuffer no error"); + } else { + if( NULL != buffer ) { + buffer->release(); + } + ALOGV("VideoEditorAudioEncoder_processInputBuffer ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_processInputBuffer end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_processOutputBuffer(M4OSA_Context pContext, + MediaBuffer* buffer, M4ENCODER_AudioBuffer* pOutBuffer) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + M4OSA_UInt32 Cts = 0; + int32_t i32Tmp = 0; + int64_t i64Tmp = 0; + status_t result = OK; + + ALOGV("VideoEditorAudioEncoder_processOutputBuffer begin"); + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pOutBuffer, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; + + // Process the returned AU + if( 0 == buffer->range_length() ) { + // Encoder has no data yet, nothing unusual + ALOGV("VideoEditorAudioEncoder_processOutputBuffer : buffer is empty"); + pOutBuffer->pTableBufferSize[0] = 0; + goto cleanUp; + } + if( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ) { + /* This should not happen with software encoder, + * DSI was retrieved beforehand */ + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_STATE); + } else { + // Check the CTS + VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), + M4ERR_STATE); + Cts = (M4OSA_Int32)(i64Tmp/1000); + + pEncoderContext->mNbOutputFrames++; + if( 0 > pEncoderContext->mFirstOutputCts ) { + pEncoderContext->mFirstOutputCts = i64Tmp; + } + pEncoderContext->mLastOutputCts = i64Tmp; + + // Format the AU + memcpy((void *)pOutBuffer->pTableBuffer[0], + (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->range_offset()), + buffer->range_length()); + pOutBuffer->pTableBufferSize[0] = (M4OSA_UInt32)buffer->range_length(); + } + +cleanUp: + // Release the buffer + buffer->release(); + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_processOutputBuffer no error"); + } else { + ALOGV("VideoEditorAudioEncoder_processOutputBuffer ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_processOutputBuffer end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_step(M4OSA_Context pContext, + M4ENCODER_AudioBuffer* pInBuffer, M4ENCODER_AudioBuffer* pOutBuffer) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + status_t result = OK; + MediaBuffer* buffer = NULL; + + ALOGV("VideoEditorAudioEncoder_step begin"); + + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pInBuffer, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pOutBuffer, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; + pEncoderContext->mNbInputFrames++; + + // Push the input buffer to the encoder source + err = VideoEditorAudioEncoder_processInputBuffer(pEncoderContext,pInBuffer); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Read + result = pEncoderContext->mEncoder->read(&buffer, NULL); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + + // Provide the encoded AU to the writer + err = VideoEditorAudioEncoder_processOutputBuffer(pEncoderContext, buffer, + pOutBuffer); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_step no error"); + } else { + ALOGV("VideoEditorAudioEncoder_step ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_step end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_getOption(M4OSA_Context pContext, + M4OSA_OptionID optionID, M4OSA_DataOption* optionValue) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV("VideoEditorAudioEncoder_getOption begin optionID 0x%X", optionID); + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; + + switch( optionID ) { + default: + ALOGV("VideoEditorAudioEncoder_getOption: unsupported optionId 0x%X", + optionID); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); + break; + } + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_getOption no error"); + } else { + ALOGV("VideoEditorAudioEncoder_getOption ERROR 0x%X", err); + } + ALOGV("VideoEditorAudioEncoder_getOption end"); + return err; +} + +M4OSA_ERR VideoEditorAudioEncoder_getInterface( + M4ENCODER_AudioFormat format, M4ENCODER_AudioFormat* pFormat, + M4ENCODER_AudioGlobalInterface** pEncoderInterface) { + M4OSA_ERR err = M4NO_ERROR; + + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); + + ALOGV("VideoEditorAudioEncoder_getInterface 0x%x 0x%x",pFormat, + pEncoderInterface); + SAFE_MALLOC(*pEncoderInterface, M4ENCODER_AudioGlobalInterface, 1, + "AudioEncoder"); + + *pFormat = format; + + switch( format ) { + case M4ENCODER_kAAC: + { + (*pEncoderInterface)->pFctInit = VideoEditorAudioEncoder_init_AAC; + break; + } + case M4ENCODER_kAMRNB: + { + (*pEncoderInterface)->pFctInit = VideoEditorAudioEncoder_init_AMRNB; + break; + } + case M4ENCODER_kMP3: + { + (*pEncoderInterface)->pFctInit = VideoEditorAudioEncoder_init_MP3; + break; + } + default: + { + ALOGV("VideoEditorAudioEncoder_getInterface: unsupported format %d", + format); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); + break; + } + } + (*pEncoderInterface)->pFctCleanUp = VideoEditorAudioEncoder_cleanup; + (*pEncoderInterface)->pFctOpen = VideoEditorAudioEncoder_open; + (*pEncoderInterface)->pFctClose = VideoEditorAudioEncoder_close; + (*pEncoderInterface)->pFctStep = VideoEditorAudioEncoder_step; + (*pEncoderInterface)->pFctGetOption = VideoEditorAudioEncoder_getOption; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorAudioEncoder_getInterface no error"); + } else { + *pEncoderInterface = M4OSA_NULL; + ALOGV("VideoEditorAudioEncoder_getInterface ERROR 0x%X", err); + } + return err; +} +extern "C" { + +M4OSA_ERR VideoEditorAudioEncoder_getInterface_AAC( + M4ENCODER_AudioFormat* pFormat, + M4ENCODER_AudioGlobalInterface** pEncoderInterface) { + return VideoEditorAudioEncoder_getInterface( + M4ENCODER_kAAC, pFormat, pEncoderInterface); +} + +M4OSA_ERR VideoEditorAudioEncoder_getInterface_AMRNB( + M4ENCODER_AudioFormat* pFormat, + M4ENCODER_AudioGlobalInterface** pEncoderInterface) { + + return VideoEditorAudioEncoder_getInterface( + M4ENCODER_kAMRNB, pFormat, pEncoderInterface); +} + +M4OSA_ERR VideoEditorAudioEncoder_getInterface_MP3( + M4ENCODER_AudioFormat* pFormat, + M4ENCODER_AudioGlobalInterface** pEncoderInterface) { + ALOGV("VideoEditorAudioEncoder_getInterface_MP3 no error"); + + return VideoEditorAudioEncoder_getInterface( + M4ENCODER_kMP3, pFormat, pEncoderInterface); +} + +} // extern "C" + +} // namespace android diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c new file mode 100644 index 0000000..98919d2 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c @@ -0,0 +1,265 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorBuffer.c +* @brief StageFright shell Buffer +************************************************************************* +*/ +#undef M4OSA_TRACE_LEVEL +#define M4OSA_TRACE_LEVEL 1 + +#include "VideoEditorBuffer.h" +#include "utils/Log.h" + +#define VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE 40 + +#define VIDEOEDITOR_SAFE_FREE(p) \ +{ \ + if(M4OSA_NULL != p) \ + { \ + free(p); \ + p = M4OSA_NULL; \ + } \ +} + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, + * M4OSA_UInt32 nbBuffers) + * @brief Allocate a pool of nbBuffers buffers + * + * @param ppool : IN The buffer pool to create + * @param nbBuffers : IN The number of buffers in the pool + * @param poolName : IN a name given to the pool + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, + M4OSA_UInt32 nbBuffers, M4OSA_Char* poolName) +{ + M4OSA_ERR lerr = M4NO_ERROR; + VIDEOEDITOR_BUFFER_Pool* pool; + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : ppool = 0x%x nbBuffers = %d ", + ppool, nbBuffers); + + pool = M4OSA_NULL; + pool = (VIDEOEDITOR_BUFFER_Pool*)M4OSA_32bitAlignedMalloc( + sizeof(VIDEOEDITOR_BUFFER_Pool), VIDEOEDITOR_BUFFER_EXTERNAL, + (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: pool")); + if (M4OSA_NULL == pool) + { + lerr = M4ERR_ALLOC; + goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; + } + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool buffers"); + pool->pNXPBuffer = M4OSA_NULL; + pool->pNXPBuffer = (VIDEOEDITOR_BUFFER_Buffer*)M4OSA_32bitAlignedMalloc( + sizeof(VIDEOEDITOR_BUFFER_Buffer)*nbBuffers, + VIDEOEDITOR_BUFFER_EXTERNAL, + (M4OSA_Char*)("BUFFER_allocatePool: pNXPBuffer")); + if(M4OSA_NULL == pool->pNXPBuffer) + { + lerr = M4ERR_ALLOC; + goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; + } + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool name buffer"); + pool->poolName = M4OSA_NULL; + pool->poolName = (M4OSA_Char*)M4OSA_32bitAlignedMalloc( + VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE,VIDEOEDITOR_BUFFER_EXTERNAL, + (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: poolname")); + if(pool->poolName == M4OSA_NULL) + { + lerr = M4ERR_ALLOC; + goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; + } + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Assigning Pool name buffer"); + + memset((void *)pool->poolName, 0,VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE); + memcpy((void *)pool->poolName, (void *)poolName, + VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE-1); + + pool->NB = nbBuffers; + +VIDEOEDITOR_BUFFER_allocatePool_Cleanup: + if(M4NO_ERROR != lerr) + { + VIDEOEDITOR_SAFE_FREE(pool->pNXPBuffer); + VIDEOEDITOR_SAFE_FREE(pool->poolName); + VIDEOEDITOR_SAFE_FREE(pool); + } + *ppool = pool; + ALOGV("VIDEOEDITOR_BUFFER_allocatePool END"); + + return lerr; +} + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(VIDEOEDITOR_BUFFER_Pool* ppool) + * @brief Deallocate a buffer pool + * + * @param ppool : IN The buffer pool to free + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(VIDEOEDITOR_BUFFER_Pool* ppool) +{ + M4OSA_ERR err; + M4OSA_UInt32 j = 0; + + ALOGV("VIDEOEDITOR_BUFFER_freePool : ppool = 0x%x", ppool); + + err = M4NO_ERROR; + + for (j = 0; j < ppool->NB; j++) + { + if(M4OSA_NULL != ppool->pNXPBuffer[j].pData) + { + free(ppool->pNXPBuffer[j].pData); + ppool->pNXPBuffer[j].pData = M4OSA_NULL; + } + } + + if(ppool != M4OSA_NULL) + { + SAFE_FREE(ppool->pNXPBuffer); + SAFE_FREE(ppool->poolName); + SAFE_FREE(ppool); + } + + return(err); +} + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, + * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) + * @brief Returns a buffer in a given state + * + * @param ppool : IN The buffer pool + * @param desiredState : IN The buffer state + * @param pNXPBuffer : IN The selected buffer + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, + VIDEOEDITOR_BUFFER_State desiredState, + VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_Bool bFound = M4OSA_FALSE; + M4OSA_UInt32 i, ibuf; + + ALOGV("VIDEOEDITOR_BUFFER_getBuffer from %s in state=%d", + ppool->poolName, desiredState); + + ibuf = 0; + + for (i=0; i < ppool->NB; i++) + { + bFound = (ppool->pNXPBuffer[i].state == desiredState); + if (bFound) + { + ibuf = i; + break; + } + } + + if(!bFound) + { + ALOGV("VIDEOEDITOR_BUFFER_getBuffer No buffer available in state %d", + desiredState); + *pNXPBuffer = M4OSA_NULL; + return M4ERR_NO_BUFFER_AVAILABLE; + } + + /* case where a buffer has been found */ + *pNXPBuffer = &(ppool->pNXPBuffer[ibuf]); + + ALOGV("VIDEOEDITOR_BUFFER_getBuffer: idx = %d", ibuf); + + return(err); +} + +M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers(VIDEOEDITOR_BUFFER_Pool* pool, + M4OSA_UInt32 lSize) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt32 index, j; + + /** + * Initialize all the buffers in the pool */ + for(index = 0; index < pool->NB; index++) + { + pool->pNXPBuffer[index].pData = M4OSA_NULL; + pool->pNXPBuffer[index].pData = (M4OSA_Void*)M4OSA_32bitAlignedMalloc( + lSize, VIDEOEDITOR_BUFFER_EXTERNAL, + (M4OSA_Char*)("BUFFER_initPoolBuffers: Buffer data")); + if(M4OSA_NULL == pool->pNXPBuffer[index].pData) + { + for (j = 0; j < index; j++) + { + if(M4OSA_NULL != pool->pNXPBuffer[j].pData) + { + free(pool->pNXPBuffer[j].pData); + pool->pNXPBuffer[j].pData = M4OSA_NULL; + } + } + err = M4ERR_ALLOC; + return err; + } + pool->pNXPBuffer[index].size = 0; + pool->pNXPBuffer[index].state = VIDEOEDITOR_BUFFER_kEmpty; + pool->pNXPBuffer[index].idx = index; + pool->pNXPBuffer[index].buffCTS = -1; + } + return err; +} + +M4OSA_ERR VIDEOEDITOR_BUFFER_getOldestBuffer(VIDEOEDITOR_BUFFER_Pool *pool, + VIDEOEDITOR_BUFFER_State desiredState, + VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt32 index, j; + M4_MediaTime candidateTimeStamp = (M4_MediaTime)0x7ffffff; + M4OSA_Bool bFound = M4OSA_FALSE; + + *pNXPBuffer = M4OSA_NULL; + for(index = 0; index< pool->NB; index++) + { + if(pool->pNXPBuffer[index].state == desiredState) + { + if(pool->pNXPBuffer[index].buffCTS <= candidateTimeStamp) + { + bFound = M4OSA_TRUE; + candidateTimeStamp = pool->pNXPBuffer[index].buffCTS; + *pNXPBuffer = &(pool->pNXPBuffer[index]); + } + } + } + if(M4OSA_FALSE == bFound) + { + ALOGV("VIDEOEDITOR_BUFFER_getOldestBuffer WARNING no buffer available"); + err = M4ERR_NO_BUFFER_AVAILABLE; + } + return err; +} diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp new file mode 100644 index 0000000..af53c54 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp @@ -0,0 +1,803 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorMp3Reader.cpp +* @brief StageFright shell MP3 Reader +************************************************************************* +*/ +#define LOG_NDEBUG 1 +#define LOG_TAG "VIDEOEDITOR_MP3READER" + +/** + * HEADERS + * + */ +#include "M4OSA_Debug.h" +#include "M4SYS_AccessUnit.h" +#include "VideoEditorMp3Reader.h" +#include "VideoEditorUtils.h" + +#include "utils/Log.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include + +/** + * SOURCE CLASS + */ + +namespace android { +/** + * ENGINE INTERFACE + */ + +/** + ************************************************************************** + * structure VideoEditorMp3Reader_Context + * @brief This structure defines the context of the SF MP3 reader shell. + ************************************************************************** + */ +typedef struct { + sp mDataSource; + sp mExtractor; + sp mMediaSource; + M4_AudioStreamHandler* mAudioStreamHandler; + M4SYS_AccessUnit mAudioAu; + M4OSA_Time mMaxDuration; + M4OSA_UInt8 mStreamNumber; + M4OSA_Bool mSeeking; + M4OSA_Time mSeekTime; + uint32_t mExtractorFlags; +} VideoEditorMp3Reader_Context; + +/** + **************************************************************************** + * @brief create an instance of the MP3 reader + * @note allocates the context + * + * @param pContext: (OUT) pointer on a reader context + * + * @return M4NO_ERROR there is no error + * @return M4ERR_ALLOC a memory allocation has failed + * @return M4ERR_PARAMETER at least one parameter is not valid + **************************************************************************** +*/ +M4OSA_ERR VideoEditorMp3Reader_create(M4OSA_Context *pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorMp3Reader_Context *pReaderContext = M4OSA_NULL; + + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + ALOGV("VideoEditorMp3Reader_create begin"); + + /* Context allocation & initialization */ + SAFE_MALLOC(pReaderContext, VideoEditorMp3Reader_Context, 1, + "VideoEditorMp3Reader"); + + pReaderContext->mAudioStreamHandler = M4OSA_NULL; + pReaderContext->mAudioAu.dataAddress = M4OSA_NULL; + pReaderContext->mMaxDuration = 0; + *pContext = pReaderContext; + +cleanUp: + if (M4NO_ERROR == err) { + ALOGV("VideoEditorMp3Reader_create no error"); + } else { + ALOGV("VideoEditorMp3Reader_create ERROR 0x%X", err); + } + ALOGV("VideoEditorMp3Reader_create end"); + return err; +} + +/** + ******************************************************************************* + * @brief destroy the instance of the MP3 reader + * @note after this call the context is invalid + * @param context: (IN) Context of the reader + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER The input parameter is not properly set + ******************************************************************************* +*/ +M4OSA_ERR VideoEditorMp3Reader_destroy(M4OSA_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)pContext; + + VIDEOEDITOR_CHECK(M4OSA_NULL != pReaderContext, M4ERR_PARAMETER); + ALOGV("VideoEditorMp3Reader_destroy begin"); + + SAFE_FREE(pReaderContext); +cleanUp: + if (M4NO_ERROR == err) { + ALOGV("VideoEditorMp3Reader_destroy no error"); + } else { + ALOGV("VideoEditorMp3Reader_destroy ERROR 0x%X", err); + } + ALOGV("VideoEditorMp3Reader_destroy end"); + return err; +} +/** + ****************************************************************************** + * @brief open the reader and initializes its created instance + * @note this function opens the MP3 file + * @param context: (IN) Context of the reader + * @param pFileDescriptor: (IN) Pointer to proprietary data identifying + * the media to open + + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER the context is NULL + * @return M4ERR_BAD_CONTEXT provided context is not a valid one + * @return M4ERR_UNSUPPORTED_MEDIA_TYPE the media is DRM protected + ****************************************************************************** +*/ +M4OSA_ERR VideoEditorMp3Reader_open(M4OSA_Context context, + M4OSA_Void* pFileDescriptor){ + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditorMp3Reader_open begin"); + /* Check function parameters*/ + M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, + "VideoEditorMp3Reader_open: invalid context pointer"); + M4OSA_DEBUG_IF1((M4OSA_NULL == pFileDescriptor), M4ERR_PARAMETER, + "VideoEditorMp3Reader_open: invalid pointer pFileDescriptor"); + + ALOGV("VideoEditorMp3Reader_open Datasource start %s", + (char*)pFileDescriptor); + pReaderContext->mDataSource = new FileSource ((char*)pFileDescriptor); + ALOGV("VideoEditorMp3Reader_open Datasource end"); + + if (pReaderContext->mDataSource == NULL) { + ALOGV("VideoEditorMp3Reader_open Datasource error"); + return UNKNOWN_ERROR; + } + + ALOGV("VideoEditorMp3Reader_open extractor start"); + pReaderContext->mExtractor = MediaExtractor::Create( + pReaderContext->mDataSource,MEDIA_MIMETYPE_AUDIO_MPEG); + ALOGV("VideoEditorMp3Reader_open extractor end"); + + if (pReaderContext->mExtractor == NULL) { + ALOGV("VideoEditorMp3Reader_open extractor error"); + return UNKNOWN_ERROR; + } + pReaderContext->mStreamNumber = 0; + + int32_t isDRMProtected = 0; + sp meta = pReaderContext->mExtractor->getMetaData(); + meta->findInt32(kKeyIsDRM, &isDRMProtected); + if (isDRMProtected) { + ALOGV("VideoEditorMp3Reader_open error - DRM Protected"); + return M4ERR_UNSUPPORTED_MEDIA_TYPE; + } + + ALOGV("VideoEditorMp3Reader_open end"); + return err; +} +/** + ************************************************************************** + * @brief close the reader + * @note this function closes the MP3 reader + * @param context: (IN) Context of the reader + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER the context is NULL + ************************************************************************** +*/ +M4OSA_ERR VideoEditorMp3Reader_close(M4OSA_Context context) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditorMp3Reader_close begin"); + /* Check function parameters */ + M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, + "VideoEditorMp3Reader_close: invalid context pointer"); + + if (pReaderContext->mAudioStreamHandler != NULL) { + if (M4OSA_NULL != pReaderContext->mAudioStreamHandler->\ + m_basicProperties.m_pDecoderSpecificInfo) { + free(pReaderContext->mAudioStreamHandler->\ + m_basicProperties.m_pDecoderSpecificInfo); + pReaderContext->mAudioStreamHandler->m_basicProperties.\ + m_decoderSpecificInfoSize = 0; + pReaderContext->mAudioStreamHandler->m_basicProperties.\ + m_pDecoderSpecificInfo = M4OSA_NULL; + } + + /* Finally destroy the stream handler */ + free(pReaderContext->mAudioStreamHandler); + pReaderContext->mAudioStreamHandler = M4OSA_NULL; + + if (pReaderContext->mAudioAu.dataAddress != NULL) { + free(pReaderContext->mAudioAu.dataAddress); + pReaderContext->mAudioAu.dataAddress = NULL; + } + } + + pReaderContext->mMediaSource->stop(); + pReaderContext->mMediaSource.clear(); + pReaderContext->mExtractor.clear(); + pReaderContext->mDataSource.clear(); + + ALOGV("VideoEditorMp3Reader_close end "); + return err; +} +/** + ****************************************************************************** + * @brief get an option value from the reader + * @note + * it allows the caller to retrieve a property value: + * + * @param context: (IN) Context of the reader + * @param optionId: (IN) indicates the option to get + * @param pValue: (OUT) pointer to structure or value (allocated + * by user) where option is stored + * + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER at least one parameter is not properly set + * @return M4ERR_BAD_OPTION_ID when the option ID is not a valid one + ****************************************************************************** +*/ +M4OSA_ERR VideoEditorMp3Reader_getOption(M4OSA_Context context, + M4OSA_OptionID optionId, M4OSA_DataOption pValue) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditorMp3Reader_getOption begin: optionId= %d ",(int)optionId); + + M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, + "invalid value pointer"); + M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, + "invalid value pointer"); + + switch(optionId) { + case M4READER_kOptionID_Duration: + { + ALOGV("Mp3Reader duration=%ld",pReaderContext->mMaxDuration); + *(M4OSA_Time*)pValue = pReaderContext->mMaxDuration; + } + break; + + case M4READER_kOptionID_Bitrate: + { + M4OSA_UInt32* pBitrate = (M4OSA_UInt32*)pValue; + if (M4OSA_NULL != pReaderContext->mAudioStreamHandler) { + *pBitrate = pReaderContext->mAudioStreamHandler->\ + m_basicProperties.m_averageBitRate; + } else { + pBitrate = 0; + err = M4ERR_PARAMETER; + } + } + break; + + case M4READER_kOptionID_Mp3Id3v1Tag: + break; + + case M4READER_kOptionID_Mp3Id3v2Tag: + break; + + case M4READER_kOptionID_GetMetadata: + break; + + default : + { + ALOGV("VideoEditorMp3Reader_getOption: M4ERR_BAD_OPTION_ID"); + err = M4ERR_BAD_OPTION_ID; + } + } + ALOGV("VideoEditorMp3Reader_getOption end "); + return err; +} +/** + ****************************************************************************** + * @brief set an option value of the reader + * @note + * it allows the caller to set a property value: + * + * @param context: (IN) Context of the reader + * @param optionId: (IN) Identifier indicating the option to set + * @param pValue: (IN) Pointer to structure or value (allocated + * by user) where option is stored + * + * @return M4NO_ERROR There is no error + * @return M4ERR_BAD_OPTION_ID The option ID is not a valid one + * @return M4ERR_STATE State automaton is not applied + * @return M4ERR_PARAMETER The option parameter is invalid + ****************************************************************************** +*/ +M4OSA_ERR VideoEditorMp3Reader_setOption(M4OSA_Context context, + M4OSA_OptionID optionId, M4OSA_DataOption pValue) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditorMp3Reader_Context begin: optionId: %d Value: %d ", + (int)optionId,(int)pValue); + + M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, + "invalid context pointer"); + M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, + "invalid value pointer"); + + switch(optionId) { + case M4READER_kOptionID_SetOsaFileReaderFctsPtr: + default : + { + err = M4NO_ERROR; + } + } + ALOGV("VideoEditorMp3Reader_Context end "); + return err; +} +/** + ****************************************************************************** + * @brief jump into the stream at the specified time + * @note + * @param context: (IN) Context of the reader + * @param pStreamHandler(IN) stream description of the stream to make jump + * @param pTime (I/O)IN:the time to jump to (in ms) + * OUT: the time to which the stream really jumped + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER at least one parameter is not properly set + ****************************************************************************** +*/ +M4OSA_ERR VideoEditorMp3Reader_jump(M4OSA_Context context, + M4_StreamHandler *pStreamHandler, M4OSA_Int32* pTime) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4SYS_StreamID streamIdArray[2]; + M4OSA_ERR err = M4NO_ERROR; + M4SYS_AccessUnit* pAu; + M4OSA_Time time64 = (M4OSA_Time)*pTime; + + ALOGV("VideoEditorMp3Reader_jump begin"); + M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_jump: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_jump: invalid pointer to M4_StreamHandler"); + M4OSA_DEBUG_IF1((pTime == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_jump: invalid time pointer"); + + if(pStreamHandler == (M4_StreamHandler*)pReaderContext->\ + mAudioStreamHandler){ + pAu = &pReaderContext->mAudioAu; + } else { + ALOGV("VideoEditorMp3Reader_jump: passed StreamHandler is not known"); + return M4ERR_PARAMETER; + } + + streamIdArray[0] = pStreamHandler->m_streamId; + streamIdArray[1] = 0; + + ALOGV("VideoEditorMp3Reader_jump time ms %ld ", time64); + + pAu->CTS = time64; + pAu->DTS = time64; + + time64 = time64 * 1000; /* Convert the time into micro sec */ + ALOGV("VideoEditorMp3Reader_jump time us %ld ", time64); + + pReaderContext->mSeeking = M4OSA_TRUE; + pReaderContext->mSeekTime = time64; + + time64 = time64 / 1000; /* Convert the time into milli sec */ + *pTime = (M4OSA_Int32)time64; + ALOGV("VideoEditorMp3Reader_jump end "); + return err; +} +/** + ******************************************************************************* + * @brief Get the next stream found in the media file + * + * @param context: (IN) Context of the reader + * @param pMediaFamily: (OUT) pointer to a user allocated + * M4READER_MediaFamily that will be filled with + * the media family of the found stream + * @param pStreamHandler: (OUT) pointer to a stream handler that will be + * allocated and filled with stream description + * + * @return M4NO_ERROR there is no error + * @return M4WAR_NO_MORE_STREAM no more available stream in the media + * @return M4ERR_PARAMETER at least one parameter is not properly set + ******************************************************************************* +*/ +M4OSA_ERR VideoEditorMp3Reader_getNextStream(M4OSA_Context context, + M4READER_MediaFamily *pMediaFamily, + M4_StreamHandler **pStreamHandlerParam) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + M4SYS_StreamID streamIdArray[2]; + M4SYS_StreamDescription streamDesc; + M4_AudioStreamHandler* pAudioStreamHandler; + M4_StreamHandler* pStreamHandler; + M4OSA_UInt8 type, temp; + M4OSA_Bool haveAudio = M4OSA_FALSE; + sp meta = NULL; + int64_t Duration; + + ALOGV("VideoEditorMp3Reader_getNextStream begin"); + M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_getNextStream: invalid context"); + M4OSA_DEBUG_IF1((pMediaFamily == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_getNextStream: invalid pointer to MediaFamily"); + M4OSA_DEBUG_IF1((pStreamHandlerParam == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_getNextStream: invalid pointer to StreamHandler"); + + ALOGV("VideoEditorMp3Reader_getNextStream stream number = %d", + pReaderContext->mStreamNumber); + if (pReaderContext->mStreamNumber >= 1) { + ALOGV("VideoEditorMp3Reader_getNextStream max number of stream reached"); + return M4WAR_NO_MORE_STREAM; + } + pReaderContext->mStreamNumber = pReaderContext->mStreamNumber + 1; + ALOGV("VideoEditorMp3Reader_getNextStream number of Tracks%d", + pReaderContext->mExtractor->countTracks()); + for (temp = 0; temp < pReaderContext->mExtractor->countTracks(); temp++) { + meta = pReaderContext->mExtractor->getTrackMetaData(temp); + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { + pReaderContext->mMediaSource = + pReaderContext->mExtractor->getTrack(temp); + pReaderContext->mMediaSource->start(); + haveAudio = true; + } + + if (haveAudio) { + break; + } + } + + if (!haveAudio) { + ALOGV("VideoEditorMp3Reader_getNextStream no more stream "); + pReaderContext->mDataSource.clear(); + return M4WAR_NO_MORE_STREAM; + } + + pReaderContext->mExtractorFlags = pReaderContext->mExtractor->flags(); + *pMediaFamily = M4READER_kMediaFamilyAudio; + + streamDesc.duration = meta->findInt64(kKeyDuration, &Duration); + streamDesc.duration = (M4OSA_Time)Duration/1000; + + meta->findInt32(kKeyBitRate, (int32_t*)&streamDesc.averageBitrate); + meta->findInt32(kKeySampleRate, (int32_t*)&streamDesc.timeScale); + ALOGV("Bitrate = %d, SampleRate = %d duration = %lld", + streamDesc.averageBitrate,streamDesc.timeScale,Duration/1000); + + streamDesc.streamType = M4SYS_kMP3; + streamDesc.profileLevel = 0xFF ; + streamDesc.streamID = pReaderContext->mStreamNumber; + streamDesc.decoderSpecificInfo = M4OSA_NULL; + streamDesc.decoderSpecificInfoSize = 0; + streamDesc.maxBitrate = streamDesc.averageBitrate; + + /* Allocate the audio stream handler and set its parameters */ + pAudioStreamHandler = (M4_AudioStreamHandler*)M4OSA_32bitAlignedMalloc( + sizeof(M4_AudioStreamHandler), M4READER_MP3, + (M4OSA_Char*)"M4_AudioStreamHandler"); + + if (pAudioStreamHandler == M4OSA_NULL) { + ALOGV("VideoEditorMp3Reader_getNextStream malloc failed"); + pReaderContext->mMediaSource->stop(); + pReaderContext->mMediaSource.clear(); + pReaderContext->mDataSource.clear(); + + return M4ERR_ALLOC; + } + pStreamHandler =(M4_StreamHandler*)(pAudioStreamHandler); + *pStreamHandlerParam = pStreamHandler; + pReaderContext->mAudioStreamHandler = pAudioStreamHandler; + + pAudioStreamHandler->m_structSize = sizeof(M4_AudioStreamHandler); + + if (meta == NULL) { + ALOGV("VideoEditorMp3Reader_getNextStream meta is NULL"); + } + + pAudioStreamHandler->m_samplingFrequency = streamDesc.timeScale; + pStreamHandler->m_pDecoderSpecificInfo = + (M4OSA_UInt8*)(streamDesc.decoderSpecificInfo); + pStreamHandler->m_decoderSpecificInfoSize = + streamDesc.decoderSpecificInfoSize; + + meta->findInt32(kKeyChannelCount, + (int32_t*)&pAudioStreamHandler->m_nbChannels); + pAudioStreamHandler->m_byteFrameLength = 1152; + pAudioStreamHandler->m_byteSampleSize = 2; + + pStreamHandler->m_pUserData = NULL; + pStreamHandler->m_streamId = streamDesc.streamID; + pStreamHandler->m_duration = streamDesc.duration; + pReaderContext->mMaxDuration = streamDesc.duration; + pStreamHandler->m_averageBitRate = streamDesc.averageBitrate; + + pStreamHandler->m_maxAUSize = 0; + pStreamHandler->m_streamType = M4DA_StreamTypeAudioMp3; + + ALOGV("VideoEditorMp3Reader_getNextStream end "); + return err; +} + +/** + ******************************************************************************* + * @brief fill the access unit structure with initialization values + * @param context: (IN) Context of the reader + * @param pStreamHandler: (IN) pointer to the stream handler to which + * the access unit will be associated + * @param pAccessUnit: (IN/OUT) pointer to the access unit (allocated by + * the caller) to initialize + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER at least one parameter is not properly set + ******************************************************************************* +*/ +M4OSA_ERR VideoEditorMp3Reader_fillAuStruct(M4OSA_Context context, + M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4SYS_AccessUnit *pAu; + + M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_fillAuStruct: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_fillAuStruct invalid pointer to StreamHandler"); + M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_fillAuStruct: invalid pointer to M4_AccessUnit"); + + ALOGV("VideoEditorMp3Reader_fillAuStruct start "); + if(pStreamHandler == (M4_StreamHandler*)pReaderContext->\ + mAudioStreamHandler){ + pAu = &pReaderContext->mAudioAu; + } else { + ALOGV("VideoEditorMp3Reader_fillAuStruct StreamHandler is not known"); + return M4ERR_PARAMETER; + } + + /* Initialize pAu structure */ + pAu->dataAddress = M4OSA_NULL; + pAu->size = 0; + pAu->CTS = 0; + pAu->DTS = 0; + pAu->attribute = 0; + pAu->nbFrag = 0; + + /* Initialize pAccessUnit structure */ + pAccessUnit->m_size = 0; + pAccessUnit->m_CTS = 0; + pAccessUnit->m_DTS = 0; + pAccessUnit->m_attribute = 0; + pAccessUnit->m_dataAddress = M4OSA_NULL; + pAccessUnit->m_maxsize = pStreamHandler->m_maxAUSize; + pAccessUnit->m_streamID = pStreamHandler->m_streamId; + pAccessUnit->m_structSize = sizeof(M4_AccessUnit); + + ALOGV("VideoEditorMp3Reader_fillAuStruct end"); + return M4NO_ERROR; +} + +/** + ******************************************************************************* + * @brief reset the stream, i.e seek it to the beginning + * @note + * @param context: (IN) Context of the reader + * @param pStreamHandler (IN) The stream handler of the stream to reset + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER at least one parameter is not properly set + ******************************************************************************* +*/ +M4OSA_ERR VideoEditorMp3Reader_reset(M4OSA_Context context, + M4_StreamHandler *pStreamHandler) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + + M4OSA_ERR err = M4NO_ERROR; + M4SYS_StreamID streamIdArray[2]; + M4SYS_AccessUnit* pAu; + M4OSA_Time time64 = 0; + + ALOGV("VideoEditorMp3Reader_reset start"); + M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_reset: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_reset: invalid pointer to M4_StreamHandler"); + + if (pStreamHandler == (M4_StreamHandler*)pReaderContext->\ + mAudioStreamHandler) { + pAu = &pReaderContext->mAudioAu; + } else { + ALOGV("VideoEditorMp3Reader_reset StreamHandler is not known"); + return M4ERR_PARAMETER; + } + streamIdArray[0] = pStreamHandler->m_streamId; + streamIdArray[1] = 0; + pAu->CTS = time64; + pAu->DTS = time64; + + pReaderContext->mSeeking = M4OSA_TRUE; + pReaderContext->mSeekTime = time64; + + ALOGV("VideoEditorMp3Reader_reset end"); + return err; +} +/** + ******************************************************************************* + * @brief Gets an access unit (AU) from the stream handler source. + * @note AU is the smallest possible amount of data to be decoded by decoder + * + * @param context: (IN) Context of the reader + * @param pStreamHandler (IN) The stream handler of the stream to make jump + * @param pAccessUnit (I/O)Pointer to an access unit to fill with read data + * @return M4NO_ERROR there is no error + * @return M4ERR_PARAMETER at least one parameter is not properly set + * @returns M4ERR_ALLOC memory allocation failed + * @returns M4WAR_NO_MORE_AU there are no more access unit in the stream + ******************************************************************************* +*/ +M4OSA_ERR VideoEditorMp3Reader_getNextAu(M4OSA_Context context, + M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { + VideoEditorMp3Reader_Context *pReaderContext = + (VideoEditorMp3Reader_Context*)context; + M4OSA_ERR err = M4NO_ERROR; + M4SYS_AccessUnit* pAu; + MediaBuffer *mAudioBuffer; + MediaSource::ReadOptions options; + + ALOGV("VideoEditorMp3Reader_getNextAu start"); + M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_getNextAu: invalid context"); + M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_StreamHandler"); + M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, + "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_AccessUnit"); + + if (pStreamHandler == (M4_StreamHandler*)pReaderContext->\ + mAudioStreamHandler) { + pAu = &pReaderContext->mAudioAu; + } else { + ALOGV("VideoEditorMp3Reader_getNextAu: StreamHandler is not known\n"); + return M4ERR_PARAMETER; + } + + if (pReaderContext->mSeeking) { + options.setSeekTo(pReaderContext->mSeekTime); + } + + pReaderContext->mMediaSource->read(&mAudioBuffer, &options); + + if (mAudioBuffer != NULL) { + if ((pAu->dataAddress == NULL) || + (pAu->size < mAudioBuffer->range_length())) { + if (pAu->dataAddress != NULL) { + free((M4OSA_Int32*)pAu->dataAddress); + pAu->dataAddress = NULL; + } + pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc( + (mAudioBuffer->range_length() + 3) & ~0x3, + M4READER_MP3, (M4OSA_Char*)"pAccessUnit->m_dataAddress" ); + + if (pAu->dataAddress == NULL) { + ALOGV("VideoEditorMp3Reader_getNextAu malloc failed"); + pReaderContext->mMediaSource->stop(); + pReaderContext->mMediaSource.clear(); + pReaderContext->mDataSource.clear(); + + return M4ERR_ALLOC; + } + } + pAu->size = mAudioBuffer->range_length(); + memcpy((M4OSA_MemAddr8)pAu->dataAddress, + (const char *)mAudioBuffer->data() + mAudioBuffer->range_offset(), + mAudioBuffer->range_length()); + + mAudioBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&pAu->CTS); + + + pAu->CTS = pAu->CTS / 1000; /*converting the microsec to millisec */ + pAu->DTS = pAu->CTS; + pAu->attribute = M4SYS_kFragAttrOk; + mAudioBuffer->release(); + + ALOGV("VideoEditorMp3Reader_getNextAu AU CTS = %ld",pAu->CTS); + + pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress; + pAccessUnit->m_size = pAu->size; + pAccessUnit->m_CTS = pAu->CTS; + pAccessUnit->m_DTS = pAu->DTS; + pAccessUnit->m_attribute = pAu->attribute; + } else { + ALOGV("VideoEditorMp3Reader_getNextAu EOS reached."); + pAccessUnit->m_size=0; + err = M4WAR_NO_MORE_AU; + } + pAu->nbFrag = 0; + + options.clearSeekTo(); + pReaderContext->mSeeking = M4OSA_FALSE; + mAudioBuffer = NULL; + ALOGV("VideoEditorMp3Reader_getNextAu end"); + + return err; +} + +extern "C" { + +M4OSA_ERR VideoEditorMp3Reader_getInterface( + M4READER_MediaType *pMediaType, + M4READER_GlobalInterface **pRdrGlobalInterface, + M4READER_DataInterface **pRdrDataInterface) { + M4OSA_ERR err = M4NO_ERROR; + + ALOGV("VideoEditorMp3Reader_getInterface: begin"); + /* Input parameters check */ + VIDEOEDITOR_CHECK(M4OSA_NULL != pMediaType, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrGlobalInterface, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrDataInterface, M4ERR_PARAMETER); + + SAFE_MALLOC(*pRdrGlobalInterface, M4READER_GlobalInterface, 1, + "VideoEditorMp3Reader_getInterface"); + SAFE_MALLOC(*pRdrDataInterface, M4READER_DataInterface, 1, + "VideoEditorMp3Reader_getInterface"); + + *pMediaType = M4READER_kMediaTypeMP3; + + (*pRdrGlobalInterface)->m_pFctCreate = VideoEditorMp3Reader_create; + (*pRdrGlobalInterface)->m_pFctDestroy = VideoEditorMp3Reader_destroy; + (*pRdrGlobalInterface)->m_pFctOpen = VideoEditorMp3Reader_open; + (*pRdrGlobalInterface)->m_pFctClose = VideoEditorMp3Reader_close; + (*pRdrGlobalInterface)->m_pFctGetOption = VideoEditorMp3Reader_getOption; + (*pRdrGlobalInterface)->m_pFctSetOption = VideoEditorMp3Reader_setOption; + (*pRdrGlobalInterface)->m_pFctGetNextStream = + VideoEditorMp3Reader_getNextStream; + (*pRdrGlobalInterface)->m_pFctFillAuStruct = + VideoEditorMp3Reader_fillAuStruct; + (*pRdrGlobalInterface)->m_pFctStart = M4OSA_NULL; + (*pRdrGlobalInterface)->m_pFctStop = M4OSA_NULL; + (*pRdrGlobalInterface)->m_pFctJump = VideoEditorMp3Reader_jump; + (*pRdrGlobalInterface)->m_pFctReset = VideoEditorMp3Reader_reset; + (*pRdrGlobalInterface)->m_pFctGetPrevRapTime = M4OSA_NULL; + + (*pRdrDataInterface)->m_pFctGetNextAu = VideoEditorMp3Reader_getNextAu; + (*pRdrDataInterface)->m_readerContext = M4OSA_NULL; + +cleanUp: + if( M4NO_ERROR == err ) + { + ALOGV("VideoEditorMp3Reader_getInterface no error"); + } + else + { + SAFE_FREE(*pRdrGlobalInterface); + SAFE_FREE(*pRdrDataInterface); + + ALOGV("VideoEditorMp3Reader_getInterface ERROR 0x%X", err); + } + ALOGV("VideoEditorMp3Reader_getInterface: end"); + return err; +} +} /* extern "C" */ +} /* namespace android */ diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp new file mode 100644 index 0000000..21d3c30 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp @@ -0,0 +1,1778 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorVideoDecoder.cpp +* @brief StageFright shell video decoder +************************************************************************* +*/ +#define LOG_NDEBUG 1 +#define LOG_TAG "VIDEOEDITOR_VIDEODECODER" +/******************* + * HEADERS * + *******************/ + +#include "VideoEditorVideoDecoder_internal.h" +#include "VideoEditorUtils.h" +#include "M4VD_Tools.h" + +#include +#include +#include +/******************** + * DEFINITIONS * + ********************/ +#define MAX_DEC_BUFFERS 10 + +/******************** + * SOURCE CLASS * + ********************/ +using namespace android; +static M4OSA_ERR copyBufferToQueue( + VideoEditorVideoDecoder_Context* pDecShellContext, + MediaBuffer* pDecodedBuffer); + +class VideoEditorVideoDecoderSource : public MediaSource { + public: + + VideoEditorVideoDecoderSource( + const sp &format, + VIDEOEDITOR_CodecType codecType, + void *decoderShellContext); + + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + virtual sp getFormat(); + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + + protected : + virtual ~VideoEditorVideoDecoderSource(); + + private: + sp mFormat; + MediaBuffer* mBuffer; + MediaBufferGroup* mGroup; + Mutex mLock; + VideoEditorVideoDecoder_Context* mpDecShellContext; + int32_t mMaxAUSize; + bool mStarted; + VIDEOEDITOR_CodecType mCodecType; + + // Don't call me + VideoEditorVideoDecoderSource(const VideoEditorVideoDecoderSource &); + VideoEditorVideoDecoderSource &operator=( + const VideoEditorVideoDecoderSource &); +}; + +VideoEditorVideoDecoderSource::VideoEditorVideoDecoderSource( + const sp &format, VIDEOEDITOR_CodecType codecType, + void *decoderShellContext) : + mFormat(format), + mBuffer(NULL), + mGroup(NULL), + mStarted(false), + mCodecType(codecType) { + mpDecShellContext = (VideoEditorVideoDecoder_Context*) decoderShellContext; +} + +VideoEditorVideoDecoderSource::~VideoEditorVideoDecoderSource() { + if (mStarted == true) { + stop(); + } +} + +status_t VideoEditorVideoDecoderSource::start( + MetaData *params) { + + if (!mStarted) { + if (mFormat->findInt32(kKeyMaxInputSize, &mMaxAUSize) == false) { + ALOGE("Could not find kKeyMaxInputSize"); + return ERROR_MALFORMED; + } + + mGroup = new MediaBufferGroup; + if (mGroup == NULL) { + ALOGE("FATAL: memory limitation ! "); + return NO_MEMORY; + } + + mGroup->add_buffer(new MediaBuffer(mMaxAUSize)); + + mStarted = true; + } + return OK; +} + +status_t VideoEditorVideoDecoderSource::stop() { + if (mStarted) { + if (mBuffer != NULL) { + + // FIXME: + // Why do we need to check on the ref count? + int ref_count = mBuffer->refcount(); + ALOGV("MediaBuffer refcount is %d",ref_count); + for (int i = 0; i < ref_count; ++i) { + mBuffer->release(); + } + + mBuffer = NULL; + } + delete mGroup; + mGroup = NULL; + mStarted = false; + } + return OK; +} + +sp VideoEditorVideoDecoderSource::getFormat() { + Mutex::Autolock autolock(mLock); + + return mFormat; +} + +status_t VideoEditorVideoDecoderSource::read(MediaBuffer** buffer_out, + const ReadOptions *options) { + + Mutex::Autolock autolock(mLock); + if (options != NULL) { + int64_t time_us; + MediaSource::ReadOptions::SeekMode mode; + options->getSeekTo(&time_us, &mode); + if (mode != MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC) { + ALOGE("Unexpected read options"); + return BAD_VALUE; + } + + M4OSA_ERR err; + M4OSA_Int32 rapTime = time_us / 1000; + + /*--- Retrieve the previous RAP time ---*/ + err = mpDecShellContext->m_pReaderGlobal->m_pFctGetPrevRapTime( + mpDecShellContext->m_pReader->m_readerContext, + (M4_StreamHandler*)mpDecShellContext->m_pVideoStreamhandler, + &rapTime); + + if (err == M4WAR_READER_INFORMATION_NOT_PRESENT) { + /* No RAP table, jump backward and predecode */ + rapTime -= 40000; + if(rapTime < 0) rapTime = 0; + } else if (err != OK) { + ALOGE("get rap time error = 0x%x\n", (uint32_t)err); + return UNKNOWN_ERROR; + } + + err = mpDecShellContext->m_pReaderGlobal->m_pFctJump( + mpDecShellContext->m_pReader->m_readerContext, + (M4_StreamHandler*)mpDecShellContext->m_pVideoStreamhandler, + &rapTime); + + if (err != OK) { + ALOGE("jump err = 0x%x\n", (uint32_t)err); + return BAD_VALUE; + } + } + + *buffer_out = NULL; + + M4OSA_ERR lerr = mGroup->acquire_buffer(&mBuffer); + if (lerr != OK) { + return lerr; + } + mBuffer->meta_data()->clear(); // clear all the meta data + + if (mStarted) { + //getNext AU from reader. + M4_AccessUnit* pAccessUnit = mpDecShellContext->m_pNextAccessUnitToDecode; + lerr = mpDecShellContext->m_pReader->m_pFctGetNextAu( + mpDecShellContext->m_pReader->m_readerContext, + (M4_StreamHandler*)mpDecShellContext->m_pVideoStreamhandler, + pAccessUnit); + if (lerr == M4WAR_NO_DATA_YET || lerr == M4WAR_NO_MORE_AU) { + *buffer_out = NULL; + return ERROR_END_OF_STREAM; + } + + //copy the reader AU buffer to mBuffer + M4OSA_UInt32 lSize = (pAccessUnit->m_size > (M4OSA_UInt32)mMaxAUSize)\ + ? (M4OSA_UInt32)mMaxAUSize : pAccessUnit->m_size; + memcpy((void *)mBuffer->data(),(void *)pAccessUnit->m_dataAddress, + lSize); + + mBuffer->set_range(0, lSize); + int64_t frameTimeUs = (int64_t) (pAccessUnit->m_CTS * 1000); + mBuffer->meta_data()->setInt64(kKeyTime, frameTimeUs); + + // Replace the AU start code for H264 + if (VIDEOEDITOR_kH264VideoDec == mCodecType) { + uint8_t *data =(uint8_t *)mBuffer->data() + mBuffer->range_offset(); + data[0]=0; + data[1]=0; + data[2]=0; + data[3]=1; + } + mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, + (pAccessUnit->m_attribute == 0x04)? 1 : 0); + *buffer_out = mBuffer; + } + return OK; +} + +static M4OSA_UInt32 VideoEditorVideoDecoder_GetBitsFromMemory( + VIDEOEDITOR_VIDEO_Bitstream_ctxt* parsingCtxt, M4OSA_UInt32 nb_bits) { + return (M4VD_Tools_GetBitsFromMemory((M4VS_Bitstream_ctxt*) parsingCtxt, + nb_bits)); +} + +M4OSA_ERR VideoEditorVideoDecoder_internalParseVideoDSI(M4OSA_UInt8* pVol, + M4OSA_Int32 aVolSize, M4DECODER_MPEG4_DecoderConfigInfo* pDci, + M4DECODER_VideoSize* pVideoSize) { + + VIDEOEDITOR_VIDEO_Bitstream_ctxt parsingCtxt; + M4OSA_UInt32 code, j; + M4OSA_MemAddr8 start; + M4OSA_UInt8 i; + M4OSA_UInt32 time_incr_length; + M4OSA_UInt8 vol_verid=0, b_hierarchy_type; + + /* Parsing variables */ + M4OSA_UInt8 video_object_layer_shape = 0; + M4OSA_UInt8 sprite_enable = 0; + M4OSA_UInt8 reduced_resolution_vop_enable = 0; + M4OSA_UInt8 scalability = 0; + M4OSA_UInt8 enhancement_type = 0; + M4OSA_UInt8 complexity_estimation_disable = 0; + M4OSA_UInt8 interlaced = 0; + M4OSA_UInt8 sprite_warping_points = 0; + M4OSA_UInt8 sprite_brightness_change = 0; + M4OSA_UInt8 quant_precision = 0; + + /* Fill the structure with default parameters */ + pVideoSize->m_uiWidth = 0; + pVideoSize->m_uiHeight = 0; + + pDci->uiTimeScale = 0; + pDci->uiProfile = 0; + pDci->uiUseOfResynchMarker = 0; + pDci->bDataPartition = M4OSA_FALSE; + pDci->bUseOfRVLC = M4OSA_FALSE; + + /* Reset the bitstream context */ + parsingCtxt.stream_byte = 0; + parsingCtxt.stream_index = 8; + parsingCtxt.in = (M4OSA_MemAddr8) pVol; + + start = (M4OSA_MemAddr8) pVol; + + /* Start parsing */ + while (parsingCtxt.in - start < aVolSize) { + code = VideoEditorVideoDecoder_GetBitsFromMemory(&parsingCtxt, 8); + if (code == 0) { + code = VideoEditorVideoDecoder_GetBitsFromMemory(&parsingCtxt, 8); + if (code == 0) { + code = VideoEditorVideoDecoder_GetBitsFromMemory(&parsingCtxt,8); + if (code == 1) { + /* start code found */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 8); + + /* ----- 0x20..0x2F : video_object_layer_start_code ----- */ + + if ((code > 0x1F) && (code < 0x30)) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 8); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + if (code == 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 4); + vol_verid = (M4OSA_UInt8)code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 3); + } + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 4); + if (code == 15) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 16); + } + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + if (code == 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 3); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + if (code == 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 32); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 31); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 16); + } + } + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 2); + /* Need to save it for vop parsing */ + video_object_layer_shape = (M4OSA_UInt8)code; + + if (code != 0) { + return 0; /* only rectangular case supported */ + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 16); + pDci->uiTimeScale = code; + + /* Computes time increment length */ + j = code - 1; + for (i = 0; (i < 32) && (j != 0); j >>=1) { + i++; + } + time_incr_length = (i == 0) ? 1 : i; + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + if (code == 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, time_incr_length); + } + + if(video_object_layer_shape != 1) { /* 1 = Binary */ + if(video_object_layer_shape == 0) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* Marker bit */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 13);/* Width */ + pVideoSize->m_uiWidth = code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* Marker bit */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 13);/* Height */ + pVideoSize->m_uiHeight = code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* Marker bit */ + } + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* interlaced */ + interlaced = (M4OSA_UInt8)code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* OBMC disable */ + + if(vol_verid == 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* sprite enable */ + sprite_enable = (M4OSA_UInt8)code; + } else { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 2);/* sprite enable */ + sprite_enable = (M4OSA_UInt8)code; + } + if ((sprite_enable == 1) || (sprite_enable == 2)) { + if (sprite_enable != 2) { + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 13);/* sprite width */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* Marker bit */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 13);/* sprite height */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* Marker bit */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 13);/* sprite l coordinate */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* Marker bit */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 13);/* sprite top coordinate */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* Marker bit */ + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 6);/* sprite warping points */ + sprite_warping_points = (M4OSA_UInt8)code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 2);/* sprite warping accuracy */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* sprite brightness change */ + sprite_brightness_change = (M4OSA_UInt8)code; + if (sprite_enable != 2) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + } + } + if ((vol_verid != 1) && (video_object_layer_shape != 0)){ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* sadct disable */ + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); /* not 8 bits */ + if (code) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 4);/* quant precision */ + quant_precision = (M4OSA_UInt8)code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 4);/* bits per pixel */ + } + + /* greyscale not supported */ + if(video_object_layer_shape == 3) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 3); + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* quant type */ + if (code) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* load intra quant mat */ + if (code) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 8);/* */ + i = 1; + while (i < 64) { + code = + VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 8); + if (code == 0) { + break; + } + i++; + } + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* load non intra quant mat */ + if (code) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 8);/* */ + i = 1; + while (i < 64) { + code = + VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 8); + if (code == 0) { + break; + } + i++; + } + } + } + + if (vol_verid != 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* quarter sample */ + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* complexity estimation disable */ + complexity_estimation_disable = (M4OSA_UInt8)code; + if (!code) { + //return M4ERR_NOT_IMPLEMENTED; + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* resync marker disable */ + pDci->uiUseOfResynchMarker = (code) ? 0 : 1; + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* data partitionned */ + pDci->bDataPartition = (code) ? M4OSA_TRUE : M4OSA_FALSE; + if (code) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* reversible VLC */ + pDci->bUseOfRVLC = (code) ? M4OSA_TRUE : M4OSA_FALSE; + } + + if (vol_verid != 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* newpred */ + if (code) { + //return M4ERR_PARAMETER; + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1); + reduced_resolution_vop_enable = (M4OSA_UInt8)code; + } + + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* scalability */ + scalability = (M4OSA_UInt8)code; + if (code) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* hierarchy type */ + b_hierarchy_type = (M4OSA_UInt8)code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 4);/* ref layer id */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* ref sampling direct */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5);/* hor sampling factor N */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5);/* hor sampling factor M */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5);/* vert sampling factor N */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5);/* vert sampling factor M */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* enhancement type */ + enhancement_type = (M4OSA_UInt8)code; + if ((!b_hierarchy_type) && + (video_object_layer_shape == 1)) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* use ref shape */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* use ref texture */ + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5); + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 5); + } + } + break; + } + + /* ----- 0xB0 : visual_object_sequence_start_code ----- */ + + else if(code == 0xB0) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 8);/* profile_and_level_indication */ + pDci->uiProfile = (M4OSA_UInt8)code; + } + + /* ----- 0xB5 : visual_object_start_code ----- */ + + else if(code == 0xB5) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 1);/* is object layer identifier */ + if (code == 1) { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 4); /* visual object verid */ + vol_verid = (M4OSA_UInt8)code; + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 3); + } else { + code = VideoEditorVideoDecoder_GetBitsFromMemory( + &parsingCtxt, 7); /* Realign on byte */ + vol_verid = 1; + } + } + + /* ----- end ----- */ + } else { + if ((code >> 2) == 0x20) { + /* H263 ...-> wrong*/ + break; + } + } + } + } + } + return M4NO_ERROR; +} + +M4VIFI_UInt8 M4VIFI_SemiplanarYVU420toYUV420(void *user_data, + M4VIFI_UInt8 *inyuv, M4VIFI_ImagePlane *PlaneOut ) { + M4VIFI_UInt8 return_code = M4VIFI_OK; + M4VIFI_UInt8 *outyuv = + ((M4VIFI_UInt8*)&(PlaneOut[0].pac_data[PlaneOut[0].u_topleft])); + int32_t width = PlaneOut[0].u_width; + int32_t height = PlaneOut[0].u_height; + + int32_t outYsize = width * height; + uint32_t *outy = (uint32_t *) outyuv; + uint16_t *outcb = + (uint16_t *) &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]); + uint16_t *outcr = + (uint16_t *) &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]); + + /* Y copying */ + memcpy((void *)outy, (void *)inyuv, outYsize); + + /* U & V copying */ + uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize); + for (int32_t i = height >> 1; i > 0; --i) { + for (int32_t j = width >> 2; j > 0; --j) { + uint32_t temp = *inyuv_4++; + uint32_t tempU = temp & 0xFF; + tempU = tempU | ((temp >> 8) & 0xFF00); + + uint32_t tempV = (temp >> 8) & 0xFF; + tempV = tempV | ((temp >> 16) & 0xFF00); + + // Flip U and V + *outcb++ = tempV; + *outcr++ = tempU; + } + } + return return_code; +} +void logSupportDecodersAndCapabilities(M4DECODER_VideoDecoders* decoders) { + VideoDecoder *pDecoder; + VideoComponentCapabilities *pOmxComponents = NULL; + VideoProfileLevel *pProfileLevel = NULL; + pDecoder = decoders->decoder; + for (size_t i = 0; i< decoders->decoderNumber; i++) { + ALOGV("Supported Codec[%d] :%d", i, pDecoder->codec); + pOmxComponents = pDecoder->component; + for(size_t j = 0; j < pDecoder->componentNumber; j++) { + pProfileLevel = pOmxComponents->profileLevel; + ALOGV("-->component %d", j); + for(size_t k = 0; k < pOmxComponents->profileNumber; k++) { + ALOGV("-->profile:%ld maxLevel:%ld", pProfileLevel->mProfile, + pProfileLevel->mLevel); + pProfileLevel++; + } + pOmxComponents++; + } + pDecoder++; + } +} + +M4OSA_ERR queryVideoDecoderCapabilities + (M4DECODER_VideoDecoders** decoders) { + M4OSA_ERR err = M4NO_ERROR; + const char *kMimeTypes[] = { + MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_MPEG4, + MEDIA_MIMETYPE_VIDEO_H263 + }; + + int32_t supportFormats = sizeof(kMimeTypes) / sizeof(kMimeTypes[0]); + M4DECODER_VideoDecoders *pDecoders; + VideoDecoder *pDecoder; + VideoComponentCapabilities *pOmxComponents = NULL; + VideoProfileLevel *pProfileLevel = NULL; + OMXClient client; + status_t status = OK; + SAFE_MALLOC(pDecoders, M4DECODER_VideoDecoders, 1, "VideoDecoders"); + SAFE_MALLOC(pDecoder, VideoDecoder, supportFormats, + "VideoDecoder"); + pDecoders->decoder = pDecoder; + + pDecoders->decoderNumber= supportFormats; + status = client.connect(); + CHECK(status == OK); + for (size_t k = 0; k < sizeof(kMimeTypes) / sizeof(kMimeTypes[0]); + ++k) { + Vector results; + CHECK_EQ(QueryCodecs(client.interface(), kMimeTypes[k], + true, // queryDecoders + &results), (status_t)OK); + + if (results.size()) { + SAFE_MALLOC(pOmxComponents, VideoComponentCapabilities, + results.size(), "VideoComponentCapabilities"); + ALOGV("K=%d",k); + pDecoder->component = pOmxComponents; + pDecoder->componentNumber = results.size(); + } + + for (size_t i = 0; i < results.size(); ++i) { + ALOGV(" decoder '%s' supports ", + results[i].mComponentName.string()); + + if (results[i].mProfileLevels.size() == 0) { + ALOGV("NOTHING.\n"); + continue; + } + +#if 0 + // FIXME: + // We should ignore the software codecs and make IsSoftwareCodec() + // part of pubic API from OMXCodec.cpp + if (IsSoftwareCodec(results[i].mComponentName.string())) { + ALOGV("Ignore software codec %s", results[i].mComponentName.string()); + continue; + } +#endif + + // Count the supported profiles + int32_t profileNumber = 0; + int32_t profile = -1; + for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) { + const CodecProfileLevel &profileLevel = + results[i].mProfileLevels[j]; + // FIXME: assume that the profiles are ordered + if (profileLevel.mProfile != profile) { + profile = profileLevel.mProfile; + profileNumber++; + } + } + SAFE_MALLOC(pProfileLevel, VideoProfileLevel, + profileNumber, "VideoProfileLevel"); + pOmxComponents->profileLevel = pProfileLevel; + pOmxComponents->profileNumber = profileNumber; + + // Get the max Level for each profile. + int32_t maxLevel = -1; + profile = -1; + profileNumber = 0; + for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) { + const CodecProfileLevel &profileLevel = + results[i].mProfileLevels[j]; + if (profile == -1 && maxLevel == -1) { + profile = profileLevel.mProfile; + maxLevel = profileLevel.mLevel; + pProfileLevel->mProfile = profile; + pProfileLevel->mLevel = maxLevel; + ALOGV("%d profile: %ld, max level: %ld", + __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel); + } + if (profileLevel.mProfile != profile) { + profile = profileLevel.mProfile; + maxLevel = profileLevel.mLevel; + profileNumber++; + pProfileLevel++; + pProfileLevel->mProfile = profile; + pProfileLevel->mLevel = maxLevel; + ALOGV("%d profile: %ld, max level: %ld", + __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel); + } else if (profileLevel.mLevel > maxLevel) { + maxLevel = profileLevel.mLevel; + pProfileLevel->mLevel = maxLevel; + ALOGV("%d profile: %ld, max level: %ld", + __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel); + } + + } + pOmxComponents++; + } + if (!strcmp(MEDIA_MIMETYPE_VIDEO_AVC, kMimeTypes[k])) + pDecoder->codec = M4DA_StreamTypeVideoMpeg4Avc; + if (!strcmp(MEDIA_MIMETYPE_VIDEO_MPEG4, kMimeTypes[k])) + pDecoder->codec = M4DA_StreamTypeVideoMpeg4; + if (!strcmp(MEDIA_MIMETYPE_VIDEO_H263, kMimeTypes[k])) + pDecoder->codec = M4DA_StreamTypeVideoH263; + + pDecoder++; + } + + logSupportDecodersAndCapabilities(pDecoders); + *decoders = pDecoders; +cleanUp: + return err; +} +/******************** + * ENGINE INTERFACE * + ********************/ +M4OSA_ERR VideoEditorVideoDecoder_configureFromMetadata(M4OSA_Context pContext, + MetaData* meta) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoDecoder_Context* pDecShellContext = M4OSA_NULL; + bool success = OK; + int32_t width = 0; + int32_t height = 0; + int32_t frameSize = 0; + int32_t vWidth, vHeight; + int32_t cropLeft, cropTop, cropRight, cropBottom; + + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != meta, M4ERR_PARAMETER); + + ALOGV("VideoEditorVideoDecoder_configureFromMetadata begin"); + + pDecShellContext = (VideoEditorVideoDecoder_Context*)pContext; + + success = meta->findInt32(kKeyWidth, &vWidth); + VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); + success = meta->findInt32(kKeyHeight, &vHeight); + VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); + + ALOGV("vWidth = %d, vHeight = %d", vWidth, vHeight); + + pDecShellContext->mGivenWidth = vWidth; + pDecShellContext->mGivenHeight = vHeight; + + if (!meta->findRect( + kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) { + + cropLeft = cropTop = 0; + cropRight = vWidth - 1; + cropBottom = vHeight - 1; + + ALOGV("got dimensions only %d x %d", width, height); + } else { + ALOGV("got crop rect %d, %d, %d, %d", + cropLeft, cropTop, cropRight, cropBottom); + } + + pDecShellContext->mCropRect.left = cropLeft; + pDecShellContext->mCropRect.right = cropRight; + pDecShellContext->mCropRect.top = cropTop; + pDecShellContext->mCropRect.bottom = cropBottom; + + width = cropRight - cropLeft + 1; + height = cropBottom - cropTop + 1; + + ALOGV("VideoDecoder_configureFromMetadata : W=%d H=%d", width, height); + VIDEOEDITOR_CHECK((0 != width) && (0 != height), M4ERR_PARAMETER); + + if( (M4OSA_NULL != pDecShellContext->m_pDecBufferPool) && + (pDecShellContext->m_pVideoStreamhandler->m_videoWidth == \ + (uint32_t)width) && + (pDecShellContext->m_pVideoStreamhandler->m_videoHeight == \ + (uint32_t)height) ) { + // No need to reconfigure + goto cleanUp; + } + ALOGV("VideoDecoder_configureFromMetadata reset: W=%d H=%d", width, height); + // Update the stream handler parameters + pDecShellContext->m_pVideoStreamhandler->m_videoWidth = width; + pDecShellContext->m_pVideoStreamhandler->m_videoHeight = height; + frameSize = (width * height * 3) / 2; + + // Configure the buffer pool + if( M4OSA_NULL != pDecShellContext->m_pDecBufferPool ) { + ALOGV("VideoDecoder_configureFromMetadata : reset the buffer pool"); + VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool); + pDecShellContext->m_pDecBufferPool = M4OSA_NULL; + } + err = VIDEOEDITOR_BUFFER_allocatePool(&pDecShellContext->m_pDecBufferPool, + MAX_DEC_BUFFERS, (M4OSA_Char*)"VIDEOEDITOR_DecodedBufferPool"); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + err = VIDEOEDITOR_BUFFER_initPoolBuffers(pDecShellContext->m_pDecBufferPool, + frameSize + pDecShellContext->mGivenWidth * 2); + + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoDecoder_configureFromMetadata no error"); + } else { + if( M4OSA_NULL != pDecShellContext->m_pDecBufferPool ) { + VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool); + pDecShellContext->m_pDecBufferPool = M4OSA_NULL; + } + ALOGV("VideoEditorVideoDecoder_configureFromMetadata ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoDecoder_configureFromMetadata end"); + return err; +} + +M4OSA_ERR VideoEditorVideoDecoder_destroy(M4OSA_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoDecoder_Context* pDecShellContext = + (VideoEditorVideoDecoder_Context*)pContext; + + // Input parameters check + ALOGV("VideoEditorVideoDecoder_destroy begin"); + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + // Release the color converter + delete pDecShellContext->mI420ColorConverter; + + // Destroy the graph + if( pDecShellContext->mVideoDecoder != NULL ) { + ALOGV("### VideoEditorVideoDecoder_destroy : releasing decoder"); + pDecShellContext->mVideoDecoder->stop(); + pDecShellContext->mVideoDecoder.clear(); + } + pDecShellContext->mClient.disconnect(); + pDecShellContext->mReaderSource.clear(); + + // Release memory + if( pDecShellContext->m_pDecBufferPool != M4OSA_NULL ) { + VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool); + pDecShellContext->m_pDecBufferPool = M4OSA_NULL; + } + SAFE_FREE(pDecShellContext); + pContext = NULL; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoDecoder_destroy no error"); + } else { + ALOGV("VideoEditorVideoDecoder_destroy ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoDecoder_destroy end"); + return err; +} + +M4OSA_ERR VideoEditorVideoDecoder_create(M4OSA_Context *pContext, + M4_StreamHandler *pStreamHandler, + M4READER_GlobalInterface *pReaderGlobalInterface, + M4READER_DataInterface *pReaderDataInterface, + M4_AccessUnit *pAccessUnit, M4OSA_Void *pUserData) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoDecoder_Context* pDecShellContext = M4OSA_NULL; + status_t status = OK; + bool success = TRUE; + int32_t colorFormat = 0; + M4OSA_UInt32 size = 0; + sp decoderMetadata = NULL; + int decoderOutput = OMX_COLOR_FormatYUV420Planar; + + ALOGV("VideoEditorVideoDecoder_create begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pReaderDataInterface, M4ERR_PARAMETER); + + // Context allocation & initialization + SAFE_MALLOC(pDecShellContext, VideoEditorVideoDecoder_Context, 1, + "VideoEditorVideoDecoder"); + pDecShellContext->m_pVideoStreamhandler = + (M4_VideoStreamHandler*)pStreamHandler; + pDecShellContext->m_pNextAccessUnitToDecode = pAccessUnit; + pDecShellContext->m_pReaderGlobal = pReaderGlobalInterface; + pDecShellContext->m_pReader = pReaderDataInterface; + pDecShellContext->m_lastDecodedCTS = -1; + pDecShellContext->m_lastRenderCts = -1; + switch( pStreamHandler->m_streamType ) { + case M4DA_StreamTypeVideoH263: + pDecShellContext->mDecoderType = VIDEOEDITOR_kH263VideoDec; + break; + case M4DA_StreamTypeVideoMpeg4: + pDecShellContext->mDecoderType = VIDEOEDITOR_kMpeg4VideoDec; + // Parse the VOL header + err = VideoEditorVideoDecoder_internalParseVideoDSI( + (M4OSA_UInt8*)pDecShellContext->m_pVideoStreamhandler->\ + m_basicProperties.m_pDecoderSpecificInfo, + pDecShellContext->m_pVideoStreamhandler->\ + m_basicProperties.m_decoderSpecificInfoSize, + &pDecShellContext->m_Dci, &pDecShellContext->m_VideoSize); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + break; + case M4DA_StreamTypeVideoMpeg4Avc: + pDecShellContext->mDecoderType = VIDEOEDITOR_kH264VideoDec; + break; + default: + VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", + M4ERR_PARAMETER); + break; + } + + pDecShellContext->mNbInputFrames = 0; + pDecShellContext->mFirstInputCts = -1.0; + pDecShellContext->mLastInputCts = -1.0; + pDecShellContext->mNbRenderedFrames = 0; + pDecShellContext->mFirstRenderedCts = -1.0; + pDecShellContext->mLastRenderedCts = -1.0; + pDecShellContext->mNbOutputFrames = 0; + pDecShellContext->mFirstOutputCts = -1; + pDecShellContext->mLastOutputCts = -1; + pDecShellContext->m_pDecBufferPool = M4OSA_NULL; + + // Calculate the interval between two video frames. + CHECK(pDecShellContext->m_pVideoStreamhandler->m_averageFrameRate > 0); + pDecShellContext->mFrameIntervalMs = + 1000.0 / pDecShellContext->m_pVideoStreamhandler->m_averageFrameRate; + + /** + * StageFright graph building + */ + decoderMetadata = new MetaData; + switch( pDecShellContext->mDecoderType ) { + case VIDEOEDITOR_kH263VideoDec: + decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); + break; + case VIDEOEDITOR_kMpeg4VideoDec: + decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); + decoderMetadata->setData(kKeyESDS, kTypeESDS, + pStreamHandler->m_pESDSInfo, + pStreamHandler->m_ESDSInfoSize); + break; + case VIDEOEDITOR_kH264VideoDec: + decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); + decoderMetadata->setData(kKeyAVCC, kTypeAVCC, + pStreamHandler->m_pH264DecoderSpecificInfo, + pStreamHandler->m_H264decoderSpecificInfoSize); + break; + default: + VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", + M4ERR_PARAMETER); + break; + } + + decoderMetadata->setInt32(kKeyMaxInputSize, pStreamHandler->m_maxAUSize); + decoderMetadata->setInt32(kKeyWidth, + pDecShellContext->m_pVideoStreamhandler->m_videoWidth); + decoderMetadata->setInt32(kKeyHeight, + pDecShellContext->m_pVideoStreamhandler->m_videoHeight); + + // Create the decoder source + pDecShellContext->mReaderSource = new VideoEditorVideoDecoderSource( + decoderMetadata, pDecShellContext->mDecoderType, + (void *)pDecShellContext); + VIDEOEDITOR_CHECK(NULL != pDecShellContext->mReaderSource.get(), + M4ERR_SF_DECODER_RSRC_FAIL); + + // Connect to the OMX client + status = pDecShellContext->mClient.connect(); + VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); + + // Create the decoder + pDecShellContext->mVideoDecoder = OMXCodec::Create( + pDecShellContext->mClient.interface(), + decoderMetadata, false, pDecShellContext->mReaderSource); + VIDEOEDITOR_CHECK(NULL != pDecShellContext->mVideoDecoder.get(), + M4ERR_SF_DECODER_RSRC_FAIL); + + + // Get the output color format + success = pDecShellContext->mVideoDecoder->getFormat()->findInt32( + kKeyColorFormat, &colorFormat); + VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); + pDecShellContext->decOuputColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; + + pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyWidth, + pDecShellContext->m_pVideoStreamhandler->m_videoWidth); + pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyHeight, + pDecShellContext->m_pVideoStreamhandler->m_videoHeight); + + // Get the color converter + pDecShellContext->mI420ColorConverter = new I420ColorConverter; + if (pDecShellContext->mI420ColorConverter->isLoaded()) { + decoderOutput = pDecShellContext->mI420ColorConverter->getDecoderOutputFormat(); + } + + if (decoderOutput == OMX_COLOR_FormatYUV420Planar) { + delete pDecShellContext->mI420ColorConverter; + pDecShellContext->mI420ColorConverter = NULL; + } + + ALOGI("decoder output format = 0x%X\n", decoderOutput); + + // Configure the buffer pool from the metadata + err = VideoEditorVideoDecoder_configureFromMetadata(pDecShellContext, + pDecShellContext->mVideoDecoder->getFormat().get()); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Start the graph + status = pDecShellContext->mVideoDecoder->start(); + VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); + + *pContext = (M4OSA_Context)pDecShellContext; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoDecoder_create no error"); + } else { + VideoEditorVideoDecoder_destroy(pDecShellContext); + *pContext = M4OSA_NULL; + ALOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoDecoder_create : DONE"); + return err; +} + +M4OSA_ERR VideoEditorVideoSoftwareDecoder_create(M4OSA_Context *pContext, + M4_StreamHandler *pStreamHandler, + M4READER_GlobalInterface *pReaderGlobalInterface, + M4READER_DataInterface *pReaderDataInterface, + M4_AccessUnit *pAccessUnit, M4OSA_Void *pUserData) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoDecoder_Context* pDecShellContext = M4OSA_NULL; + status_t status = OK; + bool success = TRUE; + int32_t colorFormat = 0; + M4OSA_UInt32 size = 0; + sp decoderMetadata = NULL; + + ALOGV("VideoEditorVideoDecoder_create begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pReaderDataInterface, M4ERR_PARAMETER); + + // Context allocation & initialization + SAFE_MALLOC(pDecShellContext, VideoEditorVideoDecoder_Context, 1, + "VideoEditorVideoDecoder"); + pDecShellContext->m_pVideoStreamhandler = + (M4_VideoStreamHandler*)pStreamHandler; + pDecShellContext->m_pNextAccessUnitToDecode = pAccessUnit; + pDecShellContext->m_pReaderGlobal = pReaderGlobalInterface; + pDecShellContext->m_pReader = pReaderDataInterface; + pDecShellContext->m_lastDecodedCTS = -1; + pDecShellContext->m_lastRenderCts = -1; + switch( pStreamHandler->m_streamType ) { + case M4DA_StreamTypeVideoH263: + pDecShellContext->mDecoderType = VIDEOEDITOR_kH263VideoDec; + break; + case M4DA_StreamTypeVideoMpeg4: + pDecShellContext->mDecoderType = VIDEOEDITOR_kMpeg4VideoDec; + // Parse the VOL header + err = VideoEditorVideoDecoder_internalParseVideoDSI( + (M4OSA_UInt8*)pDecShellContext->m_pVideoStreamhandler->\ + m_basicProperties.m_pDecoderSpecificInfo, + pDecShellContext->m_pVideoStreamhandler->\ + m_basicProperties.m_decoderSpecificInfoSize, + &pDecShellContext->m_Dci, &pDecShellContext->m_VideoSize); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + break; + case M4DA_StreamTypeVideoMpeg4Avc: + pDecShellContext->mDecoderType = VIDEOEDITOR_kH264VideoDec; + break; + default: + VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", + M4ERR_PARAMETER); + break; + } + + pDecShellContext->mNbInputFrames = 0; + pDecShellContext->mFirstInputCts = -1.0; + pDecShellContext->mLastInputCts = -1.0; + pDecShellContext->mNbRenderedFrames = 0; + pDecShellContext->mFirstRenderedCts = -1.0; + pDecShellContext->mLastRenderedCts = -1.0; + pDecShellContext->mNbOutputFrames = 0; + pDecShellContext->mFirstOutputCts = -1; + pDecShellContext->mLastOutputCts = -1; + pDecShellContext->m_pDecBufferPool = M4OSA_NULL; + + /** + * StageFright graph building + */ + decoderMetadata = new MetaData; + switch( pDecShellContext->mDecoderType ) { + case VIDEOEDITOR_kH263VideoDec: + decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); + break; + case VIDEOEDITOR_kMpeg4VideoDec: + decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); + decoderMetadata->setData(kKeyESDS, kTypeESDS, + pStreamHandler->m_pESDSInfo, + pStreamHandler->m_ESDSInfoSize); + break; + case VIDEOEDITOR_kH264VideoDec: + decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); + decoderMetadata->setData(kKeyAVCC, kTypeAVCC, + pStreamHandler->m_pH264DecoderSpecificInfo, + pStreamHandler->m_H264decoderSpecificInfoSize); + break; + default: + VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", + M4ERR_PARAMETER); + break; + } + + decoderMetadata->setInt32(kKeyMaxInputSize, pStreamHandler->m_maxAUSize); + decoderMetadata->setInt32(kKeyWidth, + pDecShellContext->m_pVideoStreamhandler->m_videoWidth); + decoderMetadata->setInt32(kKeyHeight, + pDecShellContext->m_pVideoStreamhandler->m_videoHeight); + + // Create the decoder source + pDecShellContext->mReaderSource = new VideoEditorVideoDecoderSource( + decoderMetadata, pDecShellContext->mDecoderType, + (void *)pDecShellContext); + VIDEOEDITOR_CHECK(NULL != pDecShellContext->mReaderSource.get(), + M4ERR_SF_DECODER_RSRC_FAIL); + + // Connect to the OMX client + status = pDecShellContext->mClient.connect(); + VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); + + ALOGI("Using software codecs only"); + // Create the decoder + pDecShellContext->mVideoDecoder = OMXCodec::Create( + pDecShellContext->mClient.interface(), + decoderMetadata, false, pDecShellContext->mReaderSource,NULL,OMXCodec::kSoftwareCodecsOnly); + VIDEOEDITOR_CHECK(NULL != pDecShellContext->mVideoDecoder.get(), + M4ERR_SF_DECODER_RSRC_FAIL); + + // Get the output color format + success = pDecShellContext->mVideoDecoder->getFormat()->findInt32( + kKeyColorFormat, &colorFormat); + VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); + pDecShellContext->decOuputColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; + + pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyWidth, + pDecShellContext->m_pVideoStreamhandler->m_videoWidth); + pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyHeight, + pDecShellContext->m_pVideoStreamhandler->m_videoHeight); + + // Configure the buffer pool from the metadata + err = VideoEditorVideoDecoder_configureFromMetadata(pDecShellContext, + pDecShellContext->mVideoDecoder->getFormat().get()); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Start the graph + status = pDecShellContext->mVideoDecoder->start(); + VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); + + *pContext = (M4OSA_Context)pDecShellContext; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoDecoder_create no error"); + } else { + VideoEditorVideoDecoder_destroy(pDecShellContext); + *pContext = M4OSA_NULL; + ALOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoDecoder_create : DONE"); + return err; +} + + +M4OSA_ERR VideoEditorVideoDecoder_getOption(M4OSA_Context context, + M4OSA_OptionID optionId, M4OSA_DataOption pValue) { + M4OSA_ERR lerr = M4NO_ERROR; + VideoEditorVideoDecoder_Context* pDecShellContext = + (VideoEditorVideoDecoder_Context*) context; + M4_VersionInfo* pVersionInfo; + M4DECODER_VideoSize* pVideoSize; + M4OSA_UInt32* pNextFrameCts; + M4OSA_UInt32 *plastDecodedFrameCts; + M4DECODER_AVCProfileLevel* profile; + M4DECODER_MPEG4_DecoderConfigInfo* pDecConfInfo; + + ALOGV("VideoEditorVideoDecoder_getOption begin"); + + switch (optionId) { + case M4DECODER_kOptionID_AVCLastDecodedFrameCTS: + plastDecodedFrameCts = (M4OSA_UInt32 *) pValue; + *plastDecodedFrameCts = pDecShellContext->m_lastDecodedCTS; + break; + + case M4DECODER_kOptionID_Version: + pVersionInfo = (M4_VersionInfo*)pValue; + + pVersionInfo->m_major = VIDEOEDITOR_VIDEC_SHELL_VER_MAJOR; + pVersionInfo->m_minor= VIDEOEDITOR_VIDEC_SHELL_VER_MINOR; + pVersionInfo->m_revision = VIDEOEDITOR_VIDEC_SHELL_VER_REVISION; + pVersionInfo->m_structSize=sizeof(M4_VersionInfo); + break; + + case M4DECODER_kOptionID_VideoSize: + /** Only VPS uses this Option ID. */ + pVideoSize = (M4DECODER_VideoSize*)pValue; + pDecShellContext->mVideoDecoder->getFormat()->findInt32(kKeyWidth, + (int32_t*)(&pVideoSize->m_uiWidth)); + pDecShellContext->mVideoDecoder->getFormat()->findInt32(kKeyHeight, + (int32_t*)(&pVideoSize->m_uiHeight)); + ALOGV("VideoEditorVideoDecoder_getOption : W=%d H=%d", + pVideoSize->m_uiWidth, pVideoSize->m_uiHeight); + break; + + case M4DECODER_kOptionID_NextRenderedFrameCTS: + /** How to get this information. SF decoder does not provide this. * + ** Let us provide last decoded frame CTS as of now. * + ** Only VPS uses this Option ID. */ + pNextFrameCts = (M4OSA_UInt32 *)pValue; + *pNextFrameCts = pDecShellContext->m_lastDecodedCTS; + break; + case M4DECODER_MPEG4_kOptionID_DecoderConfigInfo: + if(pDecShellContext->mDecoderType == VIDEOEDITOR_kMpeg4VideoDec) { + (*(M4DECODER_MPEG4_DecoderConfigInfo*)pValue) = + pDecShellContext->m_Dci; + } + break; + default: + lerr = M4ERR_BAD_OPTION_ID; + break; + + } + + ALOGV("VideoEditorVideoDecoder_getOption: end with err = 0x%x", lerr); + return lerr; +} + +M4OSA_ERR VideoEditorVideoDecoder_setOption(M4OSA_Context context, + M4OSA_OptionID optionId, M4OSA_DataOption pValue) { + M4OSA_ERR lerr = M4NO_ERROR; + VideoEditorVideoDecoder_Context *pDecShellContext = + (VideoEditorVideoDecoder_Context*) context; + + ALOGV("VideoEditorVideoDecoder_setOption begin"); + + switch (optionId) { + case M4DECODER_kOptionID_OutputFilter: { + M4DECODER_OutputFilter* pOutputFilter = + (M4DECODER_OutputFilter*) pValue; + pDecShellContext->m_pFilter = + (M4VIFI_PlanConverterFunctionType*)pOutputFilter->\ + m_pFilterFunction; + pDecShellContext->m_pFilterUserData = + pOutputFilter->m_pFilterUserData; + } + break; + case M4DECODER_kOptionID_DeblockingFilter: + break; + default: + lerr = M4ERR_BAD_CONTEXT; + break; + } + + ALOGV("VideoEditorVideoDecoder_setOption: end with err = 0x%x", lerr); + return lerr; +} + +M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, + M4_MediaTime* pTime, M4OSA_Bool bJump, M4OSA_UInt32 tolerance) { + M4OSA_ERR lerr = M4NO_ERROR; + VideoEditorVideoDecoder_Context* pDecShellContext = + (VideoEditorVideoDecoder_Context*) context; + int64_t lFrameTime; + MediaBuffer* pDecoderBuffer = NULL; + MediaBuffer* pNextBuffer = NULL; + status_t errStatus; + bool needSeek = bJump; + + ALOGV("VideoEditorVideoDecoder_decode begin"); + + if( M4OSA_TRUE == pDecShellContext->mReachedEOS ) { + // Do not call read(), it could lead to a freeze + ALOGV("VideoEditorVideoDecoder_decode : EOS already reached"); + lerr = M4WAR_NO_MORE_AU; + goto VIDEOEDITOR_VideoDecode_cleanUP; + } + if(pDecShellContext->m_lastDecodedCTS >= *pTime) { + ALOGV("VideoDecoder_decode: Already decoded up to this time CTS = %lf.", + pDecShellContext->m_lastDecodedCTS); + goto VIDEOEDITOR_VideoDecode_cleanUP; + } + if(M4OSA_TRUE == bJump) { + ALOGV("VideoEditorVideoDecoder_decode: Jump called"); + pDecShellContext->m_lastDecodedCTS = -1; + pDecShellContext->m_lastRenderCts = -1; + } + + pDecShellContext->mNbInputFrames++; + if (0 > pDecShellContext->mFirstInputCts){ + pDecShellContext->mFirstInputCts = *pTime; + } + pDecShellContext->mLastInputCts = *pTime; + + while (pDecoderBuffer == NULL || pDecShellContext->m_lastDecodedCTS + tolerance < *pTime) { + ALOGV("VideoEditorVideoDecoder_decode, frameCTS = %lf, DecodeUpTo = %lf", + pDecShellContext->m_lastDecodedCTS, *pTime); + + // Read the buffer from the stagefright decoder + if (needSeek) { + MediaSource::ReadOptions options; + int64_t time_us = *pTime * 1000; + options.setSeekTo(time_us, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); + errStatus = pDecShellContext->mVideoDecoder->read(&pNextBuffer, &options); + needSeek = false; + } else { + errStatus = pDecShellContext->mVideoDecoder->read(&pNextBuffer); + } + + // Handle EOS and format change + if (errStatus == ERROR_END_OF_STREAM) { + ALOGV("End of stream reached, returning M4WAR_NO_MORE_AU "); + pDecShellContext->mReachedEOS = M4OSA_TRUE; + lerr = M4WAR_NO_MORE_AU; + // If we decoded a buffer before EOS, we still need to put it + // into the queue. + if (pDecoderBuffer && bJump) { + copyBufferToQueue(pDecShellContext, pDecoderBuffer); + } + goto VIDEOEDITOR_VideoDecode_cleanUP; + } else if (INFO_FORMAT_CHANGED == errStatus) { + ALOGV("VideoDecoder_decode : source returns INFO_FORMAT_CHANGED"); + lerr = VideoEditorVideoDecoder_configureFromMetadata( + pDecShellContext, + pDecShellContext->mVideoDecoder->getFormat().get()); + if( M4NO_ERROR != lerr ) { + ALOGV("!!! VideoEditorVideoDecoder_decode ERROR : " + "VideoDecoder_configureFromMetadata returns 0x%X", lerr); + break; + } + continue; + } else if (errStatus != OK) { + ALOGE("VideoEditorVideoDecoder_decode ERROR:0x%x(%d)", + errStatus,errStatus); + lerr = errStatus; + goto VIDEOEDITOR_VideoDecode_cleanUP; + } + + // The OMXCodec client should expect to receive 0-length buffers + // and drop the 0-length buffers. + if (pNextBuffer->range_length() == 0) { + pNextBuffer->release(); + continue; + } + + // Now we have a good next buffer, release the previous one. + if (pDecoderBuffer != NULL) { + pDecoderBuffer->release(); + pDecoderBuffer = NULL; + } + pDecoderBuffer = pNextBuffer; + + // Record the timestamp of last decoded buffer + pDecoderBuffer->meta_data()->findInt64(kKeyTime, &lFrameTime); + pDecShellContext->m_lastDecodedCTS = (M4_MediaTime)(lFrameTime/1000); + ALOGV("VideoEditorVideoDecoder_decode,decoded frametime = %lf,size = %d", + (M4_MediaTime)lFrameTime, pDecoderBuffer->size() ); + + /* + * We need to save a buffer if bJump == false to a queue. These + * buffers have a timestamp >= the target time, *pTime (for instance, + * the transition between two videos, or a trimming postion inside + * one video), since they are part of the transition clip or the + * trimmed video. + * + * If *pTime does not have the same value as any of the existing + * video frames, we would like to get the buffer right before *pTime + * and in the transcoding phrase, this video frame will be encoded + * as a key frame and becomes the first video frame for the transition or the + * trimmed video to be generated. This buffer must also be queued. + * + */ + int64_t targetTimeMs = + pDecShellContext->m_lastDecodedCTS + + pDecShellContext->mFrameIntervalMs + + tolerance; + if (!bJump || targetTimeMs > *pTime) { + lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer); + if (lerr != M4NO_ERROR) { + goto VIDEOEDITOR_VideoDecode_cleanUP; + } + } + } + + pDecShellContext->mNbOutputFrames++; + if ( 0 > pDecShellContext->mFirstOutputCts ) { + pDecShellContext->mFirstOutputCts = *pTime; + } + pDecShellContext->mLastOutputCts = *pTime; + +VIDEOEDITOR_VideoDecode_cleanUP: + *pTime = pDecShellContext->m_lastDecodedCTS; + if (pDecoderBuffer != NULL) { + pDecoderBuffer->release(); + pDecoderBuffer = NULL; + } + + ALOGV("VideoEditorVideoDecoder_decode: end with 0x%x", lerr); + return lerr; +} + +static M4OSA_ERR copyBufferToQueue( + VideoEditorVideoDecoder_Context* pDecShellContext, + MediaBuffer* pDecoderBuffer) { + + M4OSA_ERR lerr = M4NO_ERROR; + VIDEOEDITOR_BUFFER_Buffer* tmpDecBuffer; + + // Get a buffer from the queue + lerr = VIDEOEDITOR_BUFFER_getBuffer(pDecShellContext->m_pDecBufferPool, + VIDEOEDITOR_BUFFER_kEmpty, &tmpDecBuffer); + if (lerr == (M4OSA_UInt32)M4ERR_NO_BUFFER_AVAILABLE) { + lerr = VIDEOEDITOR_BUFFER_getOldestBuffer( + pDecShellContext->m_pDecBufferPool, + VIDEOEDITOR_BUFFER_kFilled, &tmpDecBuffer); + tmpDecBuffer->state = VIDEOEDITOR_BUFFER_kEmpty; + lerr = M4NO_ERROR; + } + + if (lerr != M4NO_ERROR) return lerr; + + // Color convert or copy from the given MediaBuffer to our buffer + if (pDecShellContext->mI420ColorConverter) { + if (pDecShellContext->mI420ColorConverter->convertDecoderOutputToI420( + (uint8_t *)pDecoderBuffer->data(),// ?? + pDecoderBuffer->range_offset(), // decoderBits + pDecShellContext->mGivenWidth, // decoderWidth + pDecShellContext->mGivenHeight, // decoderHeight + pDecShellContext->mCropRect, // decoderRect + tmpDecBuffer->pData /* dstBits */) < 0) { + ALOGE("convertDecoderOutputToI420 failed"); + lerr = M4ERR_NOT_IMPLEMENTED; + } + } else if (pDecShellContext->decOuputColorFormat == OMX_COLOR_FormatYUV420Planar) { + int32_t width = pDecShellContext->m_pVideoStreamhandler->m_videoWidth; + int32_t height = pDecShellContext->m_pVideoStreamhandler->m_videoHeight; + int32_t yPlaneSize = width * height; + int32_t uvPlaneSize = width * height / 4; + int32_t offsetSrc = 0; + + if (( width == pDecShellContext->mGivenWidth ) && + ( height == pDecShellContext->mGivenHeight )) + { + M4OSA_MemAddr8 pTmpBuff = (M4OSA_MemAddr8)pDecoderBuffer->data() + pDecoderBuffer->range_offset(); + + memcpy((void *)tmpDecBuffer->pData, (void *)pTmpBuff, yPlaneSize); + + offsetSrc += pDecShellContext->mGivenWidth * pDecShellContext->mGivenHeight; + memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->pData + yPlaneSize), + (void *)(pTmpBuff + offsetSrc), uvPlaneSize); + + offsetSrc += (pDecShellContext->mGivenWidth >> 1) * (pDecShellContext->mGivenHeight >> 1); + memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->pData + yPlaneSize + uvPlaneSize), + (void *)(pTmpBuff + offsetSrc), uvPlaneSize); + } + else + { + M4OSA_MemAddr8 pTmpBuff = (M4OSA_MemAddr8)pDecoderBuffer->data() + pDecoderBuffer->range_offset(); + M4OSA_MemAddr8 pTmpBuffDst = (M4OSA_MemAddr8)tmpDecBuffer->pData; + int32_t index; + + for ( index = 0; index < height; index++) + { + memcpy((void *)pTmpBuffDst, (void *)pTmpBuff, width); + pTmpBuffDst += width; + pTmpBuff += pDecShellContext->mGivenWidth; + } + + pTmpBuff += (pDecShellContext->mGivenWidth * ( pDecShellContext->mGivenHeight - height)); + for ( index = 0; index < height >> 1; index++) + { + memcpy((void *)pTmpBuffDst, (void *)pTmpBuff, width >> 1); + pTmpBuffDst += width >> 1; + pTmpBuff += pDecShellContext->mGivenWidth >> 1; + } + + pTmpBuff += ((pDecShellContext->mGivenWidth * (pDecShellContext->mGivenHeight - height)) / 4); + for ( index = 0; index < height >> 1; index++) + { + memcpy((void *)pTmpBuffDst, (void *)pTmpBuff, width >> 1); + pTmpBuffDst += width >> 1; + pTmpBuff += pDecShellContext->mGivenWidth >> 1; + } + } + } else { + ALOGE("VideoDecoder_decode: unexpected color format 0x%X", + pDecShellContext->decOuputColorFormat); + lerr = M4ERR_PARAMETER; + } + + tmpDecBuffer->buffCTS = pDecShellContext->m_lastDecodedCTS; + tmpDecBuffer->state = VIDEOEDITOR_BUFFER_kFilled; + tmpDecBuffer->size = pDecoderBuffer->size(); + + return lerr; +} + +M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, + M4_MediaTime* pTime, M4VIFI_ImagePlane* pOutputPlane, + M4OSA_Bool bForceRender) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoDecoder_Context* pDecShellContext = + (VideoEditorVideoDecoder_Context*) context; + M4OSA_UInt32 lindex, i; + M4OSA_UInt8* p_buf_src, *p_buf_dest; + M4VIFI_ImagePlane tmpPlaneIn, tmpPlaneOut; + VIDEOEDITOR_BUFFER_Buffer* pTmpVIDEOEDITORBuffer, *pRenderVIDEOEDITORBuffer + = M4OSA_NULL; + M4_MediaTime candidateTimeStamp = -1; + M4OSA_Bool bFound = M4OSA_FALSE; + + ALOGV("VideoEditorVideoDecoder_render begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != context, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pTime, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pOutputPlane, M4ERR_PARAMETER); + + // The output buffer is already allocated, just copy the data + if ( (*pTime <= pDecShellContext->m_lastRenderCts) && + (M4OSA_FALSE == bForceRender) ) { + ALOGV("VIDEOEDITOR_VIDEO_render Frame in the past"); + err = M4WAR_VIDEORENDERER_NO_NEW_FRAME; + goto cleanUp; + } + ALOGV("VideoDecoder_render: lastRendered time = %lf,requested render time = " + "%lf", pDecShellContext->m_lastRenderCts, *pTime); + + /** + * Find the buffer appropriate for rendering. */ + for (i=0; i < pDecShellContext->m_pDecBufferPool->NB; i++) { + pTmpVIDEOEDITORBuffer = &pDecShellContext->m_pDecBufferPool\ + ->pNXPBuffer[i]; + if (pTmpVIDEOEDITORBuffer->state == VIDEOEDITOR_BUFFER_kFilled) { + /** Free all those buffers older than last rendered frame. */ + if (pTmpVIDEOEDITORBuffer->buffCTS < pDecShellContext->\ + m_lastRenderCts) { + pTmpVIDEOEDITORBuffer->state = VIDEOEDITOR_BUFFER_kEmpty; + } + + /** Get the buffer with appropriate timestamp */ + if ( (pTmpVIDEOEDITORBuffer->buffCTS >= pDecShellContext->\ + m_lastRenderCts) && + (pTmpVIDEOEDITORBuffer->buffCTS <= *pTime) && + (pTmpVIDEOEDITORBuffer->buffCTS > candidateTimeStamp)) { + bFound = M4OSA_TRUE; + pRenderVIDEOEDITORBuffer = pTmpVIDEOEDITORBuffer; + candidateTimeStamp = pTmpVIDEOEDITORBuffer->buffCTS; + ALOGV("VideoDecoder_render: found a buffer with timestamp = %lf", + candidateTimeStamp); + } + } + } + if (M4OSA_FALSE == bFound) { + err = M4WAR_VIDEORENDERER_NO_NEW_FRAME; + goto cleanUp; + } + + ALOGV("VideoEditorVideoDecoder_render 3 ouput %d %d %d %d", + pOutputPlane[0].u_width, pOutputPlane[0].u_height, + pOutputPlane[0].u_topleft, pOutputPlane[0].u_stride); + + pDecShellContext->m_lastRenderCts = candidateTimeStamp; + + if( M4OSA_NULL != pDecShellContext->m_pFilter ) { + // Filtering was requested + M4VIFI_ImagePlane tmpPlane[3]; + // Prepare the output image for conversion + tmpPlane[0].u_width = + pDecShellContext->m_pVideoStreamhandler->m_videoWidth; + tmpPlane[0].u_height = + pDecShellContext->m_pVideoStreamhandler->m_videoHeight; + tmpPlane[0].u_topleft = 0; + tmpPlane[0].u_stride = tmpPlane[0].u_width; + tmpPlane[0].pac_data = (M4VIFI_UInt8*)pRenderVIDEOEDITORBuffer->pData; + tmpPlane[1].u_width = tmpPlane[0].u_width/2; + tmpPlane[1].u_height = tmpPlane[0].u_height/2; + tmpPlane[1].u_topleft = 0; + tmpPlane[1].u_stride = tmpPlane[0].u_stride/2; + tmpPlane[1].pac_data = tmpPlane[0].pac_data + + (tmpPlane[0].u_stride * tmpPlane[0].u_height); + tmpPlane[2].u_width = tmpPlane[1].u_width; + tmpPlane[2].u_height = tmpPlane[1].u_height; + tmpPlane[2].u_topleft = 0; + tmpPlane[2].u_stride = tmpPlane[1].u_stride; + tmpPlane[2].pac_data = tmpPlane[1].pac_data + + (tmpPlane[1].u_stride * tmpPlane[1].u_height); + + ALOGV("VideoEditorVideoDecoder_render w = %d H = %d", + tmpPlane[0].u_width,tmpPlane[0].u_height); + pDecShellContext->m_pFilter(M4OSA_NULL, &tmpPlane[0], pOutputPlane); + } else { + // Just copy the YUV420P buffer + M4OSA_MemAddr8 tempBuffPtr = + (M4OSA_MemAddr8)pRenderVIDEOEDITORBuffer->pData; + M4OSA_UInt32 tempWidth = + pDecShellContext->m_pVideoStreamhandler->m_videoWidth; + M4OSA_UInt32 tempHeight = + pDecShellContext->m_pVideoStreamhandler->m_videoHeight; + + memcpy((void *) pOutputPlane[0].pac_data, (void *)tempBuffPtr, + tempWidth * tempHeight); + tempBuffPtr += (tempWidth * tempHeight); + memcpy((void *) pOutputPlane[1].pac_data, (void *)tempBuffPtr, + (tempWidth/2) * (tempHeight/2)); + tempBuffPtr += ((tempWidth/2) * (tempHeight/2)); + memcpy((void *) pOutputPlane[2].pac_data, (void *)tempBuffPtr, + (tempWidth/2) * (tempHeight/2)); + } + + pDecShellContext->mNbRenderedFrames++; + if ( 0 > pDecShellContext->mFirstRenderedCts ) { + pDecShellContext->mFirstRenderedCts = *pTime; + } + pDecShellContext->mLastRenderedCts = *pTime; + +cleanUp: + if( M4NO_ERROR == err ) { + *pTime = pDecShellContext->m_lastRenderCts; + ALOGV("VideoEditorVideoDecoder_render no error"); + } else { + ALOGV("VideoEditorVideoDecoder_render ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoDecoder_render end"); + return err; +} + +M4OSA_ERR VideoEditorVideoDecoder_getInterface(M4DECODER_VideoType decoderType, + M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { + M4DECODER_VideoInterface* pDecoderInterface = M4OSA_NULL; + + pDecoderInterface = (M4DECODER_VideoInterface*)M4OSA_32bitAlignedMalloc( + sizeof(M4DECODER_VideoInterface), M4DECODER_EXTERNAL, + (M4OSA_Char*)"VideoEditorVideoDecoder_getInterface" ); + if (M4OSA_NULL == pDecoderInterface) { + return M4ERR_ALLOC; + } + + *pDecoderType = decoderType; + + pDecoderInterface->m_pFctCreate = VideoEditorVideoDecoder_create; + pDecoderInterface->m_pFctDestroy = VideoEditorVideoDecoder_destroy; + pDecoderInterface->m_pFctGetOption = VideoEditorVideoDecoder_getOption; + pDecoderInterface->m_pFctSetOption = VideoEditorVideoDecoder_setOption; + pDecoderInterface->m_pFctDecode = VideoEditorVideoDecoder_decode; + pDecoderInterface->m_pFctRender = VideoEditorVideoDecoder_render; + + *pDecInterface = (M4OSA_Context)pDecoderInterface; + return M4NO_ERROR; +} + +M4OSA_ERR VideoEditorVideoDecoder_getSoftwareInterface(M4DECODER_VideoType decoderType, + M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { + M4DECODER_VideoInterface* pDecoderInterface = M4OSA_NULL; + + pDecoderInterface = (M4DECODER_VideoInterface*)M4OSA_32bitAlignedMalloc( + sizeof(M4DECODER_VideoInterface), M4DECODER_EXTERNAL, + (M4OSA_Char*)"VideoEditorVideoDecoder_getInterface" ); + if (M4OSA_NULL == pDecoderInterface) { + return M4ERR_ALLOC; + } + + *pDecoderType = decoderType; + + pDecoderInterface->m_pFctCreate = VideoEditorVideoSoftwareDecoder_create; + pDecoderInterface->m_pFctDestroy = VideoEditorVideoDecoder_destroy; + pDecoderInterface->m_pFctGetOption = VideoEditorVideoDecoder_getOption; + pDecoderInterface->m_pFctSetOption = VideoEditorVideoDecoder_setOption; + pDecoderInterface->m_pFctDecode = VideoEditorVideoDecoder_decode; + pDecoderInterface->m_pFctRender = VideoEditorVideoDecoder_render; + + *pDecInterface = (M4OSA_Context)pDecoderInterface; + return M4NO_ERROR; +} +extern "C" { + +M4OSA_ERR VideoEditorVideoDecoder_getInterface_MPEG4( + M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { + return VideoEditorVideoDecoder_getInterface(M4DECODER_kVideoTypeMPEG4, + pDecoderType, pDecInterface); +} + +M4OSA_ERR VideoEditorVideoDecoder_getInterface_H264( + M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { + return VideoEditorVideoDecoder_getInterface(M4DECODER_kVideoTypeAVC, + pDecoderType, pDecInterface); + +} + +M4OSA_ERR VideoEditorVideoDecoder_getSoftwareInterface_MPEG4( + M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { + return VideoEditorVideoDecoder_getSoftwareInterface(M4DECODER_kVideoTypeMPEG4, + pDecoderType, pDecInterface); +} + +M4OSA_ERR VideoEditorVideoDecoder_getSoftwareInterface_H264( + M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { + return VideoEditorVideoDecoder_getSoftwareInterface(M4DECODER_kVideoTypeAVC, + pDecoderType, pDecInterface); + +} + +M4OSA_ERR VideoEditorVideoDecoder_getVideoDecodersAndCapabilities( + M4DECODER_VideoDecoders** decoders) { + return queryVideoDecoderCapabilities(decoders); +} + +} // extern "C" diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp new file mode 100644 index 0000000..1abc447 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp @@ -0,0 +1,1295 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorVideoEncoder.cpp +* @brief StageFright shell video encoder +************************************************************************* +*/ +#define LOG_NDEBUG 1 +#define LOG_TAG "VIDEOEDITOR_VIDEOENCODER" + +/******************* + * HEADERS * + *******************/ +#include "M4OSA_Debug.h" +#include "M4SYS_AccessUnit.h" +#include "VideoEditorVideoEncoder.h" +#include "MediaBufferPuller.h" +#include + +#include +#include "utils/Log.h" +#include "utils/Vector.h" +#include +#include +#include +#include +#include +#include +#include +#include "OMX_Video.h" + +#include "IntelVideoEditorEncoderSource.h" +#include "IntelVideoEditorAVCEncoder.h" +#include "IntelVideoEditorH263Encoder.h" +#include "IntelVideoEditorUtils.h" + +/******************** + * DEFINITIONS * + ********************/ + +// Force using hardware encoder +#define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly + +#if !defined(VIDEOEDITOR_FORCECODEC) + #error "Cannot force DSI retrieval if codec type is not fixed" +#endif + +/******************** + * SOURCE CLASS * + ********************/ + +namespace android { + +struct VideoEditorVideoEncoderSource : public MediaSource { + public: + static sp Create( + const sp &format); + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + virtual sp getFormat(); + virtual status_t read(MediaBuffer **buffer, + const ReadOptions *options = NULL); + virtual int32_t storeBuffer(MediaBuffer *buffer); + virtual int32_t getNumberOfBuffersInQueue(); + + protected: + virtual ~VideoEditorVideoEncoderSource(); + + private: + struct MediaBufferChain { + MediaBuffer* buffer; + MediaBufferChain* nextLink; + }; + enum State { + CREATED, + STARTED, + ERROR + }; + VideoEditorVideoEncoderSource(const sp &format); + + // Don't call me + VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &); + VideoEditorVideoEncoderSource &operator=( + const VideoEditorVideoEncoderSource &); + + MediaBufferChain* mFirstBufferLink; + MediaBufferChain* mLastBufferLink; + int32_t mNbBuffer; + bool mIsEOS; + State mState; + sp mEncFormat; + Mutex mLock; + Condition mBufferCond; +}; + +sp VideoEditorVideoEncoderSource::Create( + const sp &format) { + + sp aSource = + new VideoEditorVideoEncoderSource(format); + return aSource; +} + +VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource( + const sp &format): + mFirstBufferLink(NULL), + mLastBufferLink(NULL), + mNbBuffer(0), + mIsEOS(false), + mState(CREATED), + mEncFormat(format) { + ALOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource"); +} + +VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() { + + // Safety clean up + if( STARTED == mState ) { + stop(); + } +} + +status_t VideoEditorVideoEncoderSource::start(MetaData *meta) { + status_t err = OK; + + ALOGV("VideoEditorVideoEncoderSource::start() begin"); + + if( CREATED != mState ) { + ALOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState); + return UNKNOWN_ERROR; + } + mState = STARTED; + + ALOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err); + return err; +} + +status_t VideoEditorVideoEncoderSource::stop() { + status_t err = OK; + + ALOGV("VideoEditorVideoEncoderSource::stop() begin"); + + if( STARTED != mState ) { + ALOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState); + return UNKNOWN_ERROR; + } + + // Release the buffer chain + int32_t i = 0; + MediaBufferChain* tmpLink = NULL; + while( mFirstBufferLink ) { + i++; + tmpLink = mFirstBufferLink; + mFirstBufferLink = mFirstBufferLink->nextLink; + delete tmpLink; + } + ALOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i); + mFirstBufferLink = NULL; + mLastBufferLink = NULL; + + mState = CREATED; + + ALOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err); + return err; +} + +sp VideoEditorVideoEncoderSource::getFormat() { + + ALOGV("VideoEditorVideoEncoderSource::getFormat"); + return mEncFormat; +} + +status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer, + const ReadOptions *options) { + Mutex::Autolock autolock(mLock); + MediaSource::ReadOptions readOptions; + status_t err = OK; + MediaBufferChain* tmpLink = NULL; + + ALOGV("VideoEditorVideoEncoderSource::read() begin"); + + if ( STARTED != mState ) { + ALOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState); + return UNKNOWN_ERROR; + } + + while (mFirstBufferLink == NULL && !mIsEOS) { + mBufferCond.wait(mLock); + } + + // End of stream? + if (mFirstBufferLink == NULL) { + *buffer = NULL; + ALOGV("VideoEditorVideoEncoderSource::read : EOS"); + return ERROR_END_OF_STREAM; + } + + // Get a buffer from the chain + *buffer = mFirstBufferLink->buffer; + tmpLink = mFirstBufferLink; + mFirstBufferLink = mFirstBufferLink->nextLink; + + if ( NULL == mFirstBufferLink ) { + mLastBufferLink = NULL; + } + delete tmpLink; + mNbBuffer--; + + ALOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err); + return err; +} + +int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) { + Mutex::Autolock autolock(mLock); + status_t err = OK; + + ALOGV("VideoEditorVideoEncoderSource::storeBuffer() begin"); + + if( NULL == buffer ) { + ALOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS"); + mIsEOS = true; + } else { + MediaBufferChain* newLink = new MediaBufferChain; + newLink->buffer = buffer; + newLink->nextLink = NULL; + if( NULL != mLastBufferLink ) { + mLastBufferLink->nextLink = newLink; + } else { + mFirstBufferLink = newLink; + } + mLastBufferLink = newLink; + mNbBuffer++; + } + mBufferCond.signal(); + ALOGV("VideoEditorVideoEncoderSource::storeBuffer() end"); + return mNbBuffer; +} + +int32_t VideoEditorVideoEncoderSource::getNumberOfBuffersInQueue() { + Mutex::Autolock autolock(mLock); + return mNbBuffer; +} + +/** + ****************************************************************************** + * structure VideoEditorVideoEncoder_Context + * @brief This structure defines the context of the StageFright video encoder + * shell + ****************************************************************************** +*/ +typedef enum { + CREATED = 0x1, + OPENED = 0x2, + STARTED = 0x4, + BUFFERING = 0x8, + READING = 0x10 +} VideoEditorVideoEncoder_State; + +typedef struct { + VideoEditorVideoEncoder_State mState; + M4ENCODER_Format mFormat; + M4WRITER_DataInterface* mWriterDataInterface; + M4VPP_apply_fct* mPreProcFunction; + M4VPP_Context mPreProcContext; + M4SYS_AccessUnit* mAccessUnit; + M4ENCODER_Params* mCodecParams; + M4ENCODER_Header mHeader; + H264MCS_ProcessEncodedNALU_fct* mH264NALUPostProcessFct; + M4OSA_Context mH264NALUPostProcessCtx; + M4OSA_UInt32 mLastCTS; + sp mEncoderSource; + OMXClient mClient; + sp mEncoder; + OMX_COLOR_FORMATTYPE mEncoderColorFormat; + MediaBufferPuller* mPuller; + I420ColorConverter* mI420ColorConverter; + + uint32_t mNbInputFrames; + double mFirstInputCts; + double mLastInputCts; + uint32_t mNbOutputFrames; + int64_t mFirstOutputCts; + int64_t mLastOutputCts; + + MediaProfiles *mVideoEditorProfile; + int32_t mMaxPrefetchFrames; +} VideoEditorVideoEncoder_Context; + +/******************** + * TOOLS * + ********************/ + +M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext, + sp metaData) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + status_t result = OK; + int32_t nbBuffer = 0; + int32_t stride = 0; + int32_t height = 0; + int32_t framerate = 0; + int32_t isCodecConfig = 0; + size_t size = 0; + uint32_t codecFlags = 0; + MediaBuffer* inputBuffer = NULL; + MediaBuffer* outputBuffer = NULL; + sp encoderSource = NULL; + sp encoder = NULL;; + OMXClient client; + + ALOGV("VideoEditorVideoEncoder_getDSI begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); + + // Create the encoder source + encoderSource = IntelVideoEditorEncoderSource::Create(metaData); + VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE); + + // Create Hardware encoder + + encoder = new IntelVideoEditorAVCEncoder(encoderSource,metaData); + VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE); + + /** + * Send fake frames and retrieve the DSI + */ + // Send a fake frame to the source + metaData->findInt32(kKeyStride, &stride); + metaData->findInt32(kKeyHeight, &height); + metaData->findInt32(kKeySampleRate, &framerate); + + result = encoder->start(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + + encoderSource->requestBuffer(&inputBuffer); + + inputBuffer->meta_data()->setInt64(kKeyTime, 0); + nbBuffer = encoderSource->storeBuffer(inputBuffer); + encoderSource->storeBuffer(NULL); // Signal EOS + + // Call read once to get the DSI + result = encoder->read(&outputBuffer, NULL); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32( + kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE); + + VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE); + if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { + // For H264, format the DSI + LOGV("outputBuffer->range_offset() = %d, outputBuffer->range_length() = %d", + outputBuffer->range_offset(), outputBuffer->range_length()); + result = buildAVCCodecSpecificData( + (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), + (size_t*)(&(pEncoderContext->mHeader.Size)), + (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(), + outputBuffer->range_length(), encoder->getFormat().get()); + outputBuffer->release(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + } else { + // For MPEG4, just copy the DSI + pEncoderContext->mHeader.Size = + (M4OSA_UInt32)outputBuffer->range_length(); + SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, + pEncoderContext->mHeader.Size, "Encoder header"); + memcpy((void *)pEncoderContext->mHeader.pBuf, + (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()), + pEncoderContext->mHeader.Size); + outputBuffer->release(); + } + + result = encoder->stop(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + +cleanUp: + // Destroy the graph + if ( encoder != NULL ) { encoder.clear(); } + //client.disconnect(); + if ( encoderSource != NULL ) { encoderSource.clear(); } + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_getDSI no error"); + } else { + ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_getDSI end"); + return err; +} +/******************** + * ENGINE INTERFACE * + ********************/ + +M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV("VideoEditorVideoEncoder_cleanup begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); + + // Release memory + SAFE_FREE(pEncoderContext->mHeader.pBuf); + SAFE_FREE(pEncoderContext); + pContext = M4OSA_NULL; + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_cleanup no error"); + } else { + ALOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_cleanup end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format, + M4ENCODER_Context* pContext, + M4WRITER_DataInterface* pWriterDataInterface, + M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt, + M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) { + + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + int encoderInput = OMX_COLOR_FormatYUV420Planar; + + ALOGV("VideoEditorVideoEncoder_init begin: format %d", format); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER); + + // Context allocation & initialization + SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1, + "VideoEditorVideoEncoder"); + pEncoderContext->mState = CREATED; + pEncoderContext->mFormat = format; + pEncoderContext->mWriterDataInterface = pWriterDataInterface; + pEncoderContext->mPreProcFunction = pVPPfct; + pEncoderContext->mPreProcContext = pVPPctxt; + pEncoderContext->mPuller = NULL; + + // Get color converter and determine encoder input format + pEncoderContext->mI420ColorConverter = new I420ColorConverter; + if (pEncoderContext->mI420ColorConverter->isLoaded()) { + encoderInput = pEncoderContext->mI420ColorConverter->getEncoderInputFormat(); + } + if (encoderInput == OMX_COLOR_FormatYUV420Planar) { + delete pEncoderContext->mI420ColorConverter; + pEncoderContext->mI420ColorConverter = NULL; + } + pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput; + ALOGI("encoder input format = 0x%X\n", encoderInput); + + *pContext = pEncoderContext; + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_init no error"); + } else { + VideoEditorVideoEncoder_cleanup(pEncoderContext); + *pContext = M4OSA_NULL; + ALOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_init end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext, + M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, + M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) + { + + return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext, + pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); +} + + +M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext, + M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, + M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) + { + + return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext, + pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); +} + + +M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext, + M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, + M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) + { + + return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext, + pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); +} + +M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV("VideoEditorVideoEncoder_close begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); + + // Release memory + SAFE_FREE(pEncoderContext->mCodecParams); + + // Destroy the graph + pEncoderContext->mEncoder.clear(); + // pEncoderContext->mClient.disconnect(); + pEncoderContext->mEncoderSource.clear(); + + delete pEncoderContext->mPuller; + pEncoderContext->mPuller = NULL; + + delete pEncoderContext->mI420ColorConverter; + pEncoderContext->mI420ColorConverter = NULL; + + // Set the new state + pEncoderContext->mState = CREATED; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_close no error"); + } else { + ALOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_close end"); + return err; +} + + +M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext, + M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + M4ENCODER_Params* pCodecParams = M4OSA_NULL; + status_t result = OK; + sp encoderMetadata = NULL; + const char* mime = NULL; + int32_t iProfile = 0; + int32_t iLevel = 0; + + int32_t iFrameRate = 0; + uint32_t codecFlags = 0; + + ALOGV(">>> VideoEditorVideoEncoder_open begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pAU, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + pCodecParams = (M4ENCODER_Params*)pParams; + VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); + + // Context initialization + pEncoderContext->mAccessUnit = pAU; + pEncoderContext->mVideoEditorProfile = MediaProfiles::getInstance(); + pEncoderContext->mMaxPrefetchFrames = + pEncoderContext->mVideoEditorProfile->getVideoEditorCapParamByName( + "maxPrefetchYUVFrames"); + + // Allocate & initialize the encoding parameters + SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1, + "VideoEditorVideoEncoder"); + + + pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat; + pEncoderContext->mCodecParams->InputFrameWidth = + pCodecParams->InputFrameWidth; + pEncoderContext->mCodecParams->InputFrameHeight = + pCodecParams->InputFrameHeight; + pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth; + pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight; + pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate; + pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate; + pEncoderContext->mCodecParams->Format = pCodecParams->Format; + pEncoderContext->mCodecParams->videoProfile = pCodecParams->videoProfile; + pEncoderContext->mCodecParams->videoLevel= pCodecParams->videoLevel; + + // Check output format consistency and resolution + VIDEOEDITOR_CHECK( + pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat, + M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, + M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, + M4ERR_PARAMETER); + + /** + * StageFright graph building + */ + + // Create the meta data for the encoder + encoderMetadata = new MetaData; + switch( pEncoderContext->mCodecParams->Format ) { + case M4ENCODER_kH263: + mime = MEDIA_MIMETYPE_VIDEO_H263; + break; + case M4ENCODER_kMPEG4: + mime = MEDIA_MIMETYPE_VIDEO_MPEG4; + break; + case M4ENCODER_kH264: + mime = MEDIA_MIMETYPE_VIDEO_AVC; + break; + default: + VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format", + M4ERR_PARAMETER); + break; + } + iProfile = pEncoderContext->mCodecParams->videoProfile; + iLevel = pEncoderContext->mCodecParams->videoLevel; + ALOGV("Encoder mime %s profile %d, level %d", + mime,iProfile, iLevel); + ALOGV("Encoder w %d, h %d, bitrate %d, fps %d", + pEncoderContext->mCodecParams->FrameWidth, + pEncoderContext->mCodecParams->FrameHeight, + pEncoderContext->mCodecParams->Bitrate, + pEncoderContext->mCodecParams->FrameRate); + CHECK(iProfile != 0x7fffffff); + CHECK(iLevel != 0x7fffffff); + + encoderMetadata->setCString(kKeyMIMEType, mime); + encoderMetadata->setInt32(kKeyVideoProfile, iProfile); + //FIXME: + // Temp: Do not set the level for Mpeg4 / H.263 Enc + // as OMX.Nvidia.mp4.encoder and OMX.Nvidia.h263.encoder + // return 0x80001019 + if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH264) { + encoderMetadata->setInt32(kKeyVideoLevel, iLevel); + } + encoderMetadata->setInt32(kKeyWidth, + (int32_t)pEncoderContext->mCodecParams->FrameWidth); + encoderMetadata->setInt32(kKeyStride, + (int32_t)pEncoderContext->mCodecParams->FrameWidth); + encoderMetadata->setInt32(kKeyHeight, + (int32_t)pEncoderContext->mCodecParams->FrameHeight); + encoderMetadata->setInt32(kKeySliceHeight, + (int32_t)pEncoderContext->mCodecParams->FrameHeight); + + switch( pEncoderContext->mCodecParams->FrameRate ) { + case M4ENCODER_k5_FPS: iFrameRate = 5; break; + case M4ENCODER_k7_5_FPS: iFrameRate = 8; break; + case M4ENCODER_k10_FPS: iFrameRate = 10; break; + case M4ENCODER_k12_5_FPS: iFrameRate = 13; break; + case M4ENCODER_k15_FPS: iFrameRate = 15; break; + case M4ENCODER_k20_FPS: iFrameRate = 20; break; + case M4ENCODER_k25_FPS: iFrameRate = 25; break; + case M4ENCODER_k30_FPS: iFrameRate = 30; break; + case M4ENCODER_kVARIABLE_FPS: + iFrameRate = 30; + ALOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30"); + break; + case M4ENCODER_kUSE_TIMESCALE: + iFrameRate = 30; + ALOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE: set to 30"); + break; + + default: + VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate", + M4ERR_STATE); + break; + } + encoderMetadata->setInt32(kKeyFrameRate, iFrameRate); + encoderMetadata->setInt32(kKeyBitRate, + (int32_t)pEncoderContext->mCodecParams->Bitrate); + encoderMetadata->setInt32(kKeyIFramesInterval, 1); + + encoderMetadata->setInt32(kKeyColorFormat, + pEncoderContext->mEncoderColorFormat); + + if (pEncoderContext->mCodecParams->Format != M4ENCODER_kH263) { + // Get the encoder DSI + err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + } + + // Create the encoder source + pEncoderContext->mEncoderSource = IntelVideoEditorEncoderSource::Create( + encoderMetadata); + VIDEOEDITOR_CHECK( + NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE); + + // Create the HW Encoder + if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH264) { + pEncoderContext->mEncoder = new IntelVideoEditorAVCEncoder( + pEncoderContext->mEncoderSource, encoderMetadata); + } else if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH263) { + pEncoderContext->mEncoder = new IntelVideoEditorH263Encoder( + pEncoderContext->mEncoderSource, encoderMetadata); + } + VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); + ALOGV("VideoEditorVideoEncoder_open : DONE"); + pEncoderContext->mPuller = new MediaBufferPuller( + pEncoderContext->mEncoder); + + // Set the new state + pEncoderContext->mState = OPENED; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_open no error"); + } else { + VideoEditorVideoEncoder_close(pEncoderContext); + ALOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_open end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( + M4ENCODER_Context pContext, M4OSA_Double Cts, + M4OSA_Bool bReachedEOS) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + M4VIFI_ImagePlane pOutPlane[3]; + MediaBuffer* buffer = NULL; + int32_t nbBuffer = 0; + + ALOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts %f", Cts); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + pOutPlane[0].pac_data = M4OSA_NULL; + pOutPlane[1].pac_data = M4OSA_NULL; + pOutPlane[2].pac_data = M4OSA_NULL; + + if ( M4OSA_FALSE == bReachedEOS ) { + M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth * + pEncoderContext->mCodecParams->FrameHeight; + M4OSA_UInt32 sizeU = sizeY >> 2; + M4OSA_UInt32 size = sizeY + 2*sizeU; + M4OSA_UInt8* pData = M4OSA_NULL; + buffer = new MediaBuffer((size_t)size); + pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset(); + + // Prepare the output image for pre-processing + pOutPlane[0].u_width = pEncoderContext->mCodecParams->FrameWidth; + pOutPlane[0].u_height = pEncoderContext->mCodecParams->FrameHeight; + pOutPlane[0].u_topleft = 0; + pOutPlane[0].u_stride = pOutPlane[0].u_width; + pOutPlane[1].u_width = pOutPlane[0].u_width/2; + pOutPlane[1].u_height = pOutPlane[0].u_height/2; + pOutPlane[1].u_topleft = 0; + pOutPlane[1].u_stride = pOutPlane[0].u_stride/2; + pOutPlane[2].u_width = pOutPlane[1].u_width; + pOutPlane[2].u_height = pOutPlane[1].u_height; + pOutPlane[2].u_topleft = 0; + pOutPlane[2].u_stride = pOutPlane[1].u_stride; + + pOutPlane[0].pac_data = pData; + pOutPlane[1].pac_data = pData + sizeY; + pOutPlane[2].pac_data = pData + sizeY + sizeU; + + // Apply pre-processing + err = pEncoderContext->mPreProcFunction( + pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Convert MediaBuffer to the encoder input format if necessary + if (pEncoderContext->mI420ColorConverter) { + I420ColorConverter* converter = pEncoderContext->mI420ColorConverter; + int actualWidth = pEncoderContext->mCodecParams->FrameWidth; + int actualHeight = pEncoderContext->mCodecParams->FrameHeight; + + int encoderWidth, encoderHeight; + ARect encoderRect; + int encoderBufferSize; + + if (converter->getEncoderInputBufferInfo( + actualWidth, actualHeight, + &encoderWidth, &encoderHeight, + &encoderRect, &encoderBufferSize) == 0) { + + MediaBuffer* newBuffer; + pEncoderContext->mEncoderSource->requestBuffer(&newBuffer); + if (converter->convertI420ToEncoderInput( + pData, // srcBits + actualWidth, actualHeight, + encoderWidth, encoderHeight, + encoderRect, + (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) { + ALOGE("convertI420ToEncoderInput failed"); + } + + // switch to new buffer + buffer->release(); + buffer = newBuffer; + } + } + + // Set the metadata + buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000)); + } + + // Push the buffer to the source, a NULL buffer, notifies the source of EOS + nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err); + } else { + if( NULL != buffer ) { + buffer->release(); + } + ALOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_processInputBuffer end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( + M4ENCODER_Context pContext, MediaBuffer* buffer) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + M4OSA_UInt32 Cts = 0; + uint8_t *data; + uint32_t length; + int32_t i32Tmp = 0; + int64_t i64Tmp = 0; + status_t result = OK; + LOGV("VideoEditorVideoEncoder_processOutputBuffer begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + + // Process the returned AU + if ( 0 == buffer->range_length() ) { + // Encoder has no data yet, nothing unusual + LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty"); + goto cleanUp; + } + VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER); + if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){ + LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d",buffer->range_length()); + removeAVCCodecSpecificData(&data,&length,(const uint8_t*) buffer->data(),buffer->range_length(),NULL); + buffer->set_range(buffer->range_offset() + length, buffer->range_length() - length); + } + + // Check the CTS + VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), + M4ERR_STATE); + + pEncoderContext->mNbOutputFrames++; + if ( 0 > pEncoderContext->mFirstOutputCts ) { + pEncoderContext->mFirstOutputCts = i64Tmp; + } + pEncoderContext->mLastOutputCts = i64Tmp; + + Cts = (M4OSA_Int32)(i64Tmp/1000); + LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)", + pEncoderContext->mNbOutputFrames, i64Tmp, Cts, + pEncoderContext->mLastCTS); + if ( Cts < pEncoderContext->mLastCTS ) { + LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going " + "backwards %d < %d", Cts, pEncoderContext->mLastCTS); + goto cleanUp; + } + LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d", + Cts, pEncoderContext->mLastCTS); + + // Retrieve the AU container + err = pEncoderContext->mWriterDataInterface->pStartAU( + pEncoderContext->mWriterDataInterface->pWriterContext, + pEncoderContext->mAccessUnit->stream->streamID, + pEncoderContext->mAccessUnit); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Format the AU + VIDEOEDITOR_CHECK( + buffer->range_length() <= pEncoderContext->mAccessUnit->size, + M4ERR_PARAMETER); + // Remove H264 AU start code + if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { + if (!memcmp((const uint8_t *)buffer->data() + \ + buffer->range_offset(), "\x00\x00\x00\x01", 4) ) { + buffer->set_range(buffer->range_offset() + 4, + buffer->range_length() - 4); + } + } + + if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) && + (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) { + // H264 trimming case, NALU post processing is needed + M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size; + err = pEncoderContext->mH264NALUPostProcessFct( + pEncoderContext->mH264NALUPostProcessCtx, + (M4OSA_UInt8*)buffer->data()+buffer->range_offset(), + buffer->range_length(), + (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress, + &outputSize); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize; + } else { + // The AU can just be copied + memcpy((void *)pEncoderContext->mAccessUnit->\ + dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\ + range_offset()), buffer->range_length()); + pEncoderContext->mAccessUnit->size = + (M4OSA_UInt32)buffer->range_length(); + } + + if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){ + pEncoderContext->mAccessUnit->attribute = AU_RAP; + } else { + pEncoderContext->mAccessUnit->attribute = AU_P_Frame; + } + pEncoderContext->mLastCTS = Cts; + pEncoderContext->mAccessUnit->CTS = Cts; + pEncoderContext->mAccessUnit->DTS = Cts; + + LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d", + pEncoderContext->mAccessUnit->dataAddress, + *pEncoderContext->mAccessUnit->dataAddress, + pEncoderContext->mAccessUnit->size, + pEncoderContext->mAccessUnit->CTS); + + // Write the AU + err = pEncoderContext->mWriterDataInterface->pProcessAU( + pEncoderContext->mWriterDataInterface->pWriterContext, + pEncoderContext->mAccessUnit->stream->streamID, + pEncoderContext->mAccessUnit); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + +cleanUp: + if( M4NO_ERROR == err ) { + LOGV("VideoEditorVideoEncoder_processOutputBuffer no error"); + } else { + if (pEncoderContext != NULL) { + SAFE_FREE(pEncoderContext->mHeader.pBuf); + pEncoderContext->mHeader.Size = 0; + } + LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err); + } + LOGV("VideoEditorVideoEncoder_processOutputBuffer end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext, + M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts, + M4ENCODER_FrameMode FrameMode) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + status_t result = OK; + MediaBuffer* outputBuffer = NULL; + + ALOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode); + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + if ( STARTED == pEncoderContext->mState ) { + pEncoderContext->mState = BUFFERING; + } + VIDEOEDITOR_CHECK( + (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE); + + pEncoderContext->mNbInputFrames++; + if ( 0 > pEncoderContext->mFirstInputCts ) { + pEncoderContext->mFirstInputCts = Cts; + } + pEncoderContext->mLastInputCts = Cts; + + ALOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode, + Cts, pEncoderContext->mLastCTS); + + // Push the input buffer to the encoder source + err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts, + M4OSA_FALSE); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Notify the source in case of EOS + if ( M4ENCODER_kLastFrame == FrameMode ) { + err = VideoEditorVideoEncoder_processInputBuffer( + pEncoderContext, 0, M4OSA_TRUE); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + } + + if ( BUFFERING == pEncoderContext->mState ) { + // Prefetch is complete, start reading + pEncoderContext->mState = READING; + } + // Read + while (1) { + MediaBuffer *outputBuffer = + pEncoderContext->mPuller->getBufferNonBlocking(); + + if (outputBuffer == NULL) { + break; + } else { + // Provide the encoded AU to the writer + err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext, + outputBuffer); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + pEncoderContext->mPuller->putBuffer(outputBuffer); + } + } + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_encode no error"); + } else { + ALOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_encode end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + status_t result = OK; + + ALOGV("VideoEditorVideoEncoder_start begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); + + pEncoderContext->mNbInputFrames = 0; + pEncoderContext->mFirstInputCts = -1.0; + pEncoderContext->mLastInputCts = -1.0; + pEncoderContext->mNbOutputFrames = 0; + pEncoderContext->mFirstOutputCts = -1; + pEncoderContext->mLastOutputCts = -1; + + result = pEncoderContext->mEncoder->start(); + VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); + + pEncoderContext->mPuller->start(); + + // Set the new state + pEncoderContext->mState = STARTED; + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_start no error"); + } else { + ALOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_start end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + MediaBuffer* outputBuffer = NULL; + status_t result = OK; + + ALOGV("VideoEditorVideoEncoder_stop begin"); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + + // Send EOS again to make sure the source doesn't block. + err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0, + M4OSA_TRUE); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + // Process the remaining buffers if necessary + if ( (BUFFERING | READING) & pEncoderContext->mState ) { + while (1) { + MediaBuffer *outputBuffer = + pEncoderContext->mPuller->getBufferBlocking(); + + if (outputBuffer == NULL) break; + + err = VideoEditorVideoEncoder_processOutputBuffer( + pEncoderContext, outputBuffer); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + + pEncoderContext->mPuller->putBuffer(outputBuffer); + } + + pEncoderContext->mState = STARTED; + } + + // Stop the graph module if necessary + if ( STARTED == pEncoderContext->mState ) { + pEncoderContext->mPuller->stop(); + pEncoderContext->mEncoder->stop(); + pEncoderContext->mState = OPENED; + } + + if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) { + ALOGW("Some frames were not encoded: input(%d) != output(%d)", + pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames); + } + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_stop no error"); + } else { + ALOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_stop end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) { + ALOGW("regulBitRate is not implemented"); + return M4NO_ERROR; +} + +M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext, + M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + + switch( optionID ) { + case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr: + pEncoderContext->mH264NALUPostProcessFct = + (H264MCS_ProcessEncodedNALU_fct*)optionValue; + break; + case M4ENCODER_kOptionID_H264ProcessNALUContext: + pEncoderContext->mH264NALUPostProcessCtx = + (M4OSA_Context)optionValue; + break; + default: + ALOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X", + optionID); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); + break; + } + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_setOption no error"); + } else { + ALOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err); + } + ALOGV("VideoEditorVideoEncoder_setOption end"); + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext, + M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { + M4OSA_ERR err = M4NO_ERROR; + VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; + + ALOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID); + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); + pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; + + switch( optionID ) { + case M4ENCODER_kOptionID_EncoderHeader: + VIDEOEDITOR_CHECK( + M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE); + *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader); + break; + default: + ALOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X", + optionID); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); + break; + } + +cleanUp: + if ( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_getOption no error"); + } else { + ALOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err); + } + return err; +} + +M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format, + M4ENCODER_Format* pFormat, + M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ + M4OSA_ERR err = M4NO_ERROR; + + // Input parameters check + VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); + + ALOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat, + pEncoderInterface, mode); + + SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1, + "VideoEditorVideoEncoder"); + + *pFormat = format; + + switch( format ) { + case M4ENCODER_kH263: + { + (*pEncoderInterface)->pFctInit = + VideoEditorVideoEncoder_init_H263; + break; + } + case M4ENCODER_kMPEG4: + { + (*pEncoderInterface)->pFctInit = + VideoEditorVideoEncoder_init_MPEG4; + break; + } + case M4ENCODER_kH264: + { + (*pEncoderInterface)->pFctInit = + VideoEditorVideoEncoder_init_H264; + break; + } + default: + ALOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d", + format); + VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); + break; + } + (*pEncoderInterface)->pFctOpen = VideoEditorVideoEncoder_open; + (*pEncoderInterface)->pFctStart = VideoEditorVideoEncoder_start; + (*pEncoderInterface)->pFctStop = VideoEditorVideoEncoder_stop; + (*pEncoderInterface)->pFctPause = M4OSA_NULL; + (*pEncoderInterface)->pFctResume = M4OSA_NULL; + (*pEncoderInterface)->pFctClose = VideoEditorVideoEncoder_close; + (*pEncoderInterface)->pFctCleanup = VideoEditorVideoEncoder_cleanup; + (*pEncoderInterface)->pFctRegulBitRate = + VideoEditorVideoEncoder_regulBitRate; + (*pEncoderInterface)->pFctEncode = VideoEditorVideoEncoder_encode; + (*pEncoderInterface)->pFctSetOption = VideoEditorVideoEncoder_setOption; + (*pEncoderInterface)->pFctGetOption = VideoEditorVideoEncoder_getOption; + +cleanUp: + if( M4NO_ERROR == err ) { + ALOGV("VideoEditorVideoEncoder_getInterface no error"); + } else { + *pEncoderInterface = M4OSA_NULL; + ALOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err); + } + return err; +} + +extern "C" { + +M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat, + M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ + LOGI("VideoEditorVideoEncoder_getInterface_H263: Intel Version"); + return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat, + pEncoderInterface, mode); +} + +M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat, + M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ + LOGI("VideoEditorVideoEncoder_getInterface_MPEG4: Intel Version"); + return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat, + pEncoderInterface, mode); +} + +M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat, + M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ + LOGI("VideoEditorVideoEncoder_getInterface_H264: Intel Version"); + return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat, + pEncoderInterface, mode); + +} + +} // extern "C" + +} // namespace android -- cgit v1.2.3 From 49f891f586cfd2bc1933d61dce9251001f234930 Mon Sep 17 00:00:00 2001 From: hding3 Date: Mon, 16 Jul 2012 11:45:41 +0800 Subject: [PORT FROM ICS] Support dynamic resolution change for omx working in raw data mode. BZ: 40303 Support dynamic resolution change for omx working in raw data mode. Change-Id: I7d7e501f960e6bcb07cd9dd5b864233d04db90e6 Signed-off-by: fxiao4X Signed-off-by: hding3 Reviewed-on: http://android.intel.com:8080/59881 Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 24 +----------------------- videodecoder/VideoDecoderBase.cpp | 7 +++++-- videodecoder/VideoDecoderMPEG4.cpp | 1 + 3 files changed, 7 insertions(+), 25 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 243ca80..2ef67c3 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -657,36 +657,14 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { } Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { - int width = mVideoFormatInfo.width; - int height = mVideoFormatInfo.height; - updateFormatInfo(data); if (mSizeChanged == false) { return DECODE_SUCCESS; - } else if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ + } else { mSizeChanged = false; flushSurfaceBuffers(); return DECODE_FORMAT_CHANGE; } - - if (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth || - mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight) { - ETRACE("New video size %d x %d exceeds surface size %d x %d.", - mVideoFormatInfo.width, mVideoFormatInfo.height, - mVideoFormatInfo.surfaceWidth, mVideoFormatInfo.surfaceHeight); - return DECODE_NEED_RESTART; - } - - if (width == mVideoFormatInfo.width && - height == mVideoFormatInfo.height) { - ITRACE("New video sequence with the same resolution."); - mSizeChanged = false; - } else { - WTRACE("Video size changed from %d x %d to %d x %d.", width, height, - mVideoFormatInfo.width, mVideoFormatInfo.height); - flushSurfaceBuffers(); - } - return DECODE_SUCCESS; } bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) { diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 69912da..c89b9b7 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -141,8 +141,10 @@ Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) { mVideoFormatInfo.width = buffer->width; mVideoFormatInfo.height = buffer->height; - mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth; - mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight; + if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) { + mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth; + mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight; + } mLowDelay = buffer->flag & WANT_LOW_DELAY; mRawOutput = buffer->flag & WANT_RAW_OUTPUT; mSignalBufferSize = 0; @@ -704,6 +706,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { } } } + // TODO: validate profile if (numSurface == 0) { return DECODE_FAIL; diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 5499fbb..2ec6123 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -103,6 +103,7 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("decodeFrame"); if (mSizeChanged) { mSizeChanged = false; + flushSurfaceBuffers(); return DECODE_FORMAT_CHANGE; } -- cgit v1.2.3 From b407009e29c6c71b85426ed3f858f99369f24bfd Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Fri, 3 Aug 2012 15:18:46 +0800 Subject: Enable encoder test tools build, use new API to create ref/rec frames to improve mem usage BZ: 50429 Enable encoder test tools build; Use new API to create ref/rec frames to improve mem usage; Add statistics log output; remove unused code Change-Id: I5b9a96777cadc452f0c639c377950425f2b46086 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/60387 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- test/mix_encoder.cpp | 27 ++++---- videoencoder/VideoEncoderBase.cpp | 125 ++++++++------------------------------ 2 files changed, 38 insertions(+), 114 deletions(-) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 3198054..38fbc00 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -5,9 +5,10 @@ #include #include -#include -#include -#include +#include +#include +#include +#include #include #include @@ -460,9 +461,10 @@ void CreateGfxhandle() gGraphicBufferAlloc = composer->createGraphicBufferAlloc(); uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN | GraphicBuffer::USAGE_SW_READ_OFTEN; // | GraphicBuffer::USAGE_HW_COMPOSER; - int format = HAL_PIXEL_FORMAT_NV12_VED; //HAL_PIXEL_FORMAT_RGBA_8888 +// int format = HAL_PIXEL_FORMAT_YV12; + int format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h int32_t error; - +/* int adjusted_width, adjusted_height; if (0) { ; @@ -483,26 +485,26 @@ void CreateGfxhandle() adjusted_height = (gHeight + 0x1f) & ~0x1f; printf("adjust width=%d, height=%d\n", adjusted_width, adjusted_height); +*/ for(int i = 0; i < gSrcFrames; i ++) { sp graphicBuffer( gGraphicBufferAlloc->createGraphicBuffer( -// gWidth, gHeight, format, usage, &error)); - adjusted_width, adjusted_height, format, usage, &error)); + gWidth, gHeight, format, usage, &error)); +// adjusted_width, adjusted_height, format, usage, &error)); gGraphicBuffer[i] = graphicBuffer; - graphicBuffer->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i])); - + graphicBuffer->lock(GraphicBuffer::USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i])); gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)gGraphicBuffer[i]->handle); graphicBuffer->unlock(); } - } void CreateGralloc() { int usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_HW_TEXTURE; - int format = HAL_PIXEL_FORMAT_NV12_VED; +// int format = HAL_PIXEL_FORMAT_YV12; + int format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h gfx_init(); @@ -512,13 +514,12 @@ void CreateGralloc() for(int i = 0; i < gSrcFrames; i ++) { gfx_alloc(gWidth, gHeight, format, usage, &handle, (int32_t*)&gStride); - gfx_lock(handle, usage, 0, 0, gWidth, gHeight, &vaddr); + gfx_lock(handle, GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, gWidth, gHeight, &vaddr); printf("vaddr= %p\n", vaddr); gUsrptr[i] = (uint8_t*)vaddr; gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)handle); gfx_unlock(handle); } - } int CheckArgs(int argc, char* argv[]) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 2dea07e..b574d2c 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -162,11 +162,24 @@ Encode_Status VideoEncoderBase::start() { mRenderBitRate = true; } - LOG_V( "======VA CreateSurfaces for Rec/Ref frames ======\n"); - vaStatus = vaCreateSurfaces(mVADisplay,VA_RT_FORMAT_YUV420, mComParams.resolution.width, - mComParams.resolution.height, - surfaces, 2, NULL , 0); - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces"); + LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n"); + + VASurfaceAttributeTPI attribute_tpi; + + attribute_tpi.size = mComParams.resolution.width * mComParams.resolution.height * 3 / 2; + attribute_tpi.luma_stride = mComParams.resolution.width; + attribute_tpi.chroma_u_stride = mComParams.resolution.width; + attribute_tpi.chroma_v_stride = mComParams.resolution.width; + attribute_tpi.luma_offset = 0; + attribute_tpi.chroma_u_offset = mComParams.resolution.width * mComParams.resolution.height; + attribute_tpi.chroma_v_offset = mComParams.resolution.width * mComParams.resolution.height; + attribute_tpi.pixel_format = VA_FOURCC_NV12; + attribute_tpi.type = VAExternalMemoryNULL; + + vaCreateSurfacesWithAttribute(mVADisplay, mComParams.resolution.width, mComParams.resolution.height, + VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + mRefSurface = surfaces[0]; mRecSurface = surfaces[1]; @@ -301,7 +314,6 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurSurface); LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay); -#if 0 #ifdef DUMP_SRC_DATA if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){ @@ -345,7 +357,6 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { vaStatus = vaDestroyImage(mVADisplay, image.image_id); CHECK_VA_STATUS_RETURN("vaDestroyImage"); } -#endif #endif vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); @@ -622,6 +633,12 @@ CLEAN_UP: mInitialized = false; #ifdef VIDEO_ENC_STATISTICS_ENABLE + LOG_V("Encoder Statistics:\n"); + LOG_V(" %d frames Encoded, %d frames Skipped\n", mEncodedFrames, mVideoStat.skipped_frames); + LOG_V(" Encode time: Average(%d us), Max(%d us @Frame No.%d), Min(%d us @Frame No.%d)\n", \ + mVideoStat.average_encode_time / mEncodedFrames, mVideoStat.max_encode_time, \ + mVideoStat.max_encode_frame, mVideoStat.min_encode_time, mVideoStat.min_encode_frame); + memset(&mVideoStat, 0, sizeof(VideoStatistics)); mVideoStat.min_encode_time = 0xFFFFFFFF; #endif @@ -1908,100 +1925,6 @@ SurfaceMap *VideoEncoderBase::findSurfaceMapByValue( return node; } -#if 0 -Encode_Status VideoEncoderBase::uploadDataToSurface(VideoEncRawBuffer *inBuffer) { - - VAStatus vaStatus = VA_STATUS_SUCCESS; - - uint32_t width = mComParams.resolution.width; - uint32_t height = mComParams.resolution.height; - - VAImage srcImage; - uint8_t *pvBuffer; - uint8_t *dstY; - uint8_t *dstUV; - uint32_t i,j; - - uint8_t *inBuf = inBuffer->data; - VAImage *image = NULL; - - int uvOffset = width * height; - uint8_t *uvBufIn = inBuf + uvOffset; - uint32_t uvHeight = height / 2; - uint32_t uvWidth = width; - - LOG_V("map source data to surface\n"); - LOG_I("Surface ID = 0x%08x\n", (uint32_t) mCurSurface); - - vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &srcImage); - CHECK_VA_STATUS_RETURN("vaDeriveImage"); - - LOG_V( "vaDeriveImage Done\n"); - - image = &srcImage; - - vaStatus = vaMapBuffer(mVADisplay, image->buf, (void **)&pvBuffer); - CHECK_VA_STATUS_RETURN("vaMapBuffer"); - - LOG_V("vaImage information\n"); - LOG_I("image->pitches[0] = %d\n", image->pitches[0]); - LOG_I("image->pitches[1] = %d\n", image->pitches[1]); - LOG_I("image->offsets[0] = %d\n", image->offsets[0]); - LOG_I("image->offsets[1] = %d\n", image->offsets[1]); - LOG_I("image->num_planes = %d\n", image->num_planes); - LOG_I("image->width = %d\n", image->width); - LOG_I("image->height = %d\n", image->height); - - LOG_I("input buf size = %d\n", inBuffer->size); - - if (mComParams.rawFormat == RAW_FORMAT_YUV420) { - dstY = pvBuffer +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy(dstY, inBuf + i * width, width); - dstY += image->pitches[0]; - } - - dstUV = pvBuffer + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dstUV [j] = inBuf [width * height + i * width / 2 + j / 2]; - dstUV [j + 1] = - inBuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dstUV += image->pitches[1]; - } - } - - else if (mComParams.rawFormat == RAW_FORMAT_NV12) { - - dstY = pvBuffer + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy(dstY, inBuf + i * width, width); - dstY += image->pitches[0]; - } - - dstUV = pvBuffer + image->offsets[1]; - for (i = 0; i < uvHeight; i++) { - memcpy(dstUV, uvBufIn + i * uvWidth, uvWidth); - dstUV += image->pitches[1]; - } - } else { - LOG_E("Raw format not supoort\n"); - return ENCODE_FAIL; - } - - vaStatus = vaUnmapBuffer(mVADisplay, image->buf); - CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - - vaStatus = vaDestroyImage(mVADisplay, srcImage.image_id); - CHECK_VA_STATUS_RETURN("vaDestroyImage"); - - return ENCODE_SUCCESS; -} -#endif - Encode_Status VideoEncoderBase::renderDynamicBitrate() { VAStatus vaStatus = VA_STATUS_SUCCESS; -- cgit v1.2.3 From 5c28f14e531a024ba8f6f87a9a4ad7690f42c4c7 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Mon, 6 Aug 2012 16:54:40 +0800 Subject: fix libmix encoder klockwork issues, rename IntelMetadatabuffer APIs BZ: 50965 fix libmix encoder klockwork issues, remove all unused variables; rename IntelMetadatabuffer APIs, rename SetBytes to UnSerialize, GetBytes to Serialize, SetBytes/GetBytes will be dropped later; Refine encoder code to avoid dynamic memory allocation. Change-Id: I44344bb6d2e8536e65ecee8957e67722c1af21e5 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/60688 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- test/btest.cpp | 4 +-- test/mix_encoder.cpp | 9 +++--- videoencoder/IntelMetadataBuffer.cpp | 63 ++++++++++++++++++++++++++++++++++-- videoencoder/IntelMetadataBuffer.h | 12 +++++-- videoencoder/VideoEncoderAVC.cpp | 14 -------- videoencoder/VideoEncoderBase.cpp | 21 ++++-------- videoencoder/VideoEncoderH263.cpp | 1 - videoencoder/VideoEncoderMP4.cpp | 2 -- 8 files changed, 82 insertions(+), 44 deletions(-) diff --git a/test/btest.cpp b/test/btest.cpp index 26f104f..13b790b 100644 --- a/test/btest.cpp +++ b/test/btest.cpp @@ -37,7 +37,7 @@ int main(int argc, char* argv[]) ret = mb1->SetValueInfo(&vi1); ret = mb1->SetExtraValues(ev1, 10); } - ret = mb1->GetBytes(bytes, size); + ret = mb1->Serialize(bytes, size); printf("assembling IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); printf("size = %d, bytes = ", size); @@ -48,7 +48,7 @@ int main(int argc, char* argv[]) printf("\n"); mb2 = new IntelMetadataBuffer(); - ret = mb2->SetBytes(bytes, size); + ret = mb2->UnSerialize(bytes, size); printf("parsing IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); ret = mb2->GetType(t2); diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 38fbc00..d4b88c0 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -697,7 +697,7 @@ for(int i=0; i<1; i++) { if (gMode != 4) { - gIMB[i % gSrcFrames]->GetBytes(data, size); + gIMB[i % gSrcFrames]->Serialize(data, size); // printf("srcno =%d, data=%x, size=%d\n", i % gSrcFrames, data, size); }else { @@ -729,11 +729,10 @@ for(int i=0; i<1; i++) stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \ stat.min_encode_time, stat.min_encode_frame ); } - if(gVideoEncoder) { - releaseVideoEncoder(gVideoEncoder); - gVideoEncoder = NULL; - } + gVideoEncoder->stop(); + releaseVideoEncoder(gVideoEncoder); + gVideoEncoder = NULL; switch(gMode) { diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index 531d0ca..eb9fe43 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -60,6 +60,55 @@ IntelMetadataBuffer::~IntelMetadataBuffer() delete[] mBytes; } + +IntelMetadataBuffer::IntelMetadataBuffer(const IntelMetadataBuffer& imb) + :mType(imb.mType), mValue(imb.mValue), mInfo(NULL), mExtraValues(NULL), + mExtraValues_Count(imb.mExtraValues_Count), mBytes(NULL), mSize(imb.mSize) +{ + if (imb.mInfo) + mInfo = new ValueInfo(*imb.mInfo); + + if (imb.mExtraValues) + { + mExtraValues = new int32_t[mExtraValues_Count]; + memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); + } + + if (imb.mBytes) + { + mBytes = new uint8_t[mSize]; + memcpy(mBytes, imb.mBytes, mSize); + } +} + +const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuffer& imb) +{ + mType = imb.mType; + mValue = imb.mValue; + mInfo = NULL; + mExtraValues = NULL; + mExtraValues_Count = imb.mExtraValues_Count; + mBytes = NULL; + mSize = imb.mSize; + + if (imb.mInfo) + mInfo = new ValueInfo(*imb.mInfo); + + if (imb.mExtraValues) + { + mExtraValues = new int32_t[mExtraValues_Count]; + memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); + } + + if (imb.mBytes) + { + mBytes = new uint8_t[mSize]; + memcpy(mBytes, imb.mBytes, mSize); + } + + return *this; +} + IMB_Result IntelMetadataBuffer::GetType(MetadataBufferType& type) { type = mType; @@ -143,7 +192,7 @@ IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) +IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) { if (!data || size == 0) return IMB_INVAL_PARAM; @@ -214,7 +263,12 @@ IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) +IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) +{ + return UnSerialize(data, size); +} + +IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) { if (mBytes == NULL) { @@ -253,6 +307,11 @@ IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) return IMB_SUCCESS; } +IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) +{ + return Serialize(data, size); +} + uint32_t IntelMetadataBuffer::GetMaxBufferSize() { return 256; diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index 802ef7b..2eacc5f 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -72,6 +72,9 @@ public: IntelMetadataBuffer(MetadataBufferType type, int32_t value); //for quick generator ~IntelMetadataBuffer(); + IntelMetadataBuffer(const IntelMetadataBuffer& imb); + const IntelMetadataBuffer& operator=(const IntelMetadataBuffer& imb); + IMB_Result GetType(MetadataBufferType &type); IMB_Result SetType(MetadataBufferType type); IMB_Result GetValue(int32_t &value); @@ -81,12 +84,15 @@ public: IMB_Result GetExtraValues(int32_t* &values, uint32_t &num); IMB_Result SetExtraValues(int32_t *values, uint32_t num); - //for bytes input, also for parser + //for bytes input, also for parser, will be obsoleted IMB_Result SetBytes(uint8_t* data, uint32_t size); - - //for bytes output, also for generator + //for bytes output, also for generator, will be obsoleted IMB_Result GetBytes(uint8_t* &data, uint32_t& size); + //New API for bytes input/ouput, UnSerialize=SetBytes, Serialize=GetBytes + IMB_Result UnSerialize(uint8_t* data, uint32_t size); + IMB_Result Serialize(uint8_t* &data, uint32_t& size); + //Static, for get max IntelMetadataBuffer size static uint32_t GetMaxBufferSize(); diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 5922e6a..738eefa 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -197,9 +197,6 @@ Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; bool useLocalBuffer = false; - uint32_t nalType = 0; - uint32_t nalSize = 0; - uint32_t nalOffset = 0; uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; LOG_V("Begin\n"); @@ -283,8 +280,6 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( uint32_t *nalType, uint32_t *nalOffset, uint32_t status) { uint32_t pos = 0; uint32_t zeroByteCount = 0; - uint32_t prefixLength = 0; - uint32_t leadingZeroCnt = 0; uint32_t singleByteTable[3][2] = {{1,0},{2,0},{2,3}}; uint32_t dataRemaining = 0; uint8_t *dataPtr; @@ -520,7 +515,6 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf uint32_t nalSize = 0; uint32_t nalOffset = 0; uint32_t sizeCopiedHere = 0; - uint32_t sizeToBeCopied = 0; CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); @@ -648,7 +642,6 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { Encode_Status VideoEncoderAVC::renderMaxSliceSize() { VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n\n"); if (mComParams.rcMode != RATE_CONTROL_VCM) { @@ -794,12 +787,6 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; - if((avcSeqParams.picture_width_in_mbs >=1920)|| (avcSeqParams.picture_height_in_mbs >=1080)) - { - device_info = vaQueryVendorString(mVADisplay); - if(strstr(device_info, "LEXINGTON")) - return ENCODE_INVALID_PARAMS; - } level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs); avcSeqParams.level_idc = level; @@ -913,7 +900,6 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceNum = 0; - uint32_t sliceHeight = 0; uint32_t sliceIndex = 0; uint32_t sliceHeightInMB = 0; uint32_t maxSliceNum = 0; diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index b574d2c..f4df5e2 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -649,7 +649,6 @@ CLEAN_UP: Encode_Status VideoEncoderBase::prepareForOutput( VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) { - Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; VACodedBufferSegment *vaCodedSeg = NULL; uint32_t status = 0; @@ -733,7 +732,6 @@ Encode_Status VideoEncoderBase::prepareForOutput( Encode_Status VideoEncoderBase::cleanupForOutput() { VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; //mCurSegment is NULL means all data has been copied out if (mCurSegment == NULL && mCodedBufferMapped) { @@ -1733,25 +1731,23 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { int32_t *extravalues = NULL; unsigned int extravalues_count = 0; - IntelMetadataBuffer *imb = new IntelMetadataBuffer; + IntelMetadataBuffer imb; SurfaceMap *map = NULL; if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); - if (imb->SetBytes(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { + if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { //fail to parse buffer - delete imb; return ENCODE_NO_REQUEST_DATA; } - imb->GetType(type); - imb->GetValue(value); + imb.GetType(type); + imb.GetValue(value); } else { //raw mode LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (! inBuffer->data || inBuffer->size == 0) { - delete imb; return ENCODE_NULL_PTR; } @@ -1767,7 +1763,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { LOG_I("direct find surface %d from value %x\n", map->surface, value); mCurSurface = map->surface; - delete imb; return ret; } @@ -1789,8 +1784,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { vinfo.s3dformat = 0xFFFFFFFF; } else { //get all info mapping needs - imb->GetValueInfo(pvinfo); - imb->GetExtraValues(extravalues, extravalues_count); + imb.GetValueInfo(pvinfo); + imb.GetExtraValues(extravalues, extravalues_count); } } else { @@ -1825,7 +1820,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); } else { delete map; - delete imb; LOG_E("surface mapping failed, wrong info or meet serious error\n"); return ret; } @@ -1834,7 +1828,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } else { //can't map due to no info - delete imb; LOG_E("surface mapping failed, missing information\n"); return ENCODE_NO_REQUEST_DATA; } @@ -1861,8 +1854,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } } - delete imb; - return ret; } diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index 68f8741..cbe1e09 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -120,7 +120,6 @@ Encode_Status VideoEncoderH263::renderPictureParams() { Encode_Status VideoEncoderH263::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - uint32_t sliceNum; uint32_t sliceHeight; uint32_t sliceHeightInMB; diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index 51068f4..d93d774 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -22,9 +22,7 @@ VideoEncoderMP4::VideoEncoderMP4() Encode_Status VideoEncoderMP4::getHeaderPos( uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) { - uint8_t *buf = inBuffer; uint32_t bytesLeft = bufSize; - Encode_Status ret = ENCODE_SUCCESS; *headerSize = 0; CHECK_NULL_RETURN_IFFAIL(inBuffer); -- cgit v1.2.3 From 27ed4a76d6d4500151b1eb30338f0c83edb80f58 Mon Sep 17 00:00:00 2001 From: Elaine Wang Date: Tue, 31 Jul 2012 15:37:23 +0800 Subject: [PORT FROM R3]Change the frame skip query BZ: 47812 44763 For better performance, in async mode, libmix need to know if frame N is skipped before frame N-1 is completely encoded. So video driver store the skip flag in frame N-2 instead of frame N-1. See video driver patch 47812. In sync mode, libmix also need to query last frame to check if its next frame is skipped. Signed-off-by: Elaine Wang Change-Id: If3e163cbb47bf482e7e5a447d76398aaa1b68433 Reviewed-on: http://android.intel.com:8080/61656 Reviewed-by: Wang, Elaine Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index f4df5e2..72b4778 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -402,7 +402,7 @@ Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { mKeyFrame = true; } - // Query the status of current surface + // Query the status of last surface to check if its next frame is skipped VASurfaceStatus vaSurfaceStatus; vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); @@ -467,14 +467,17 @@ Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - // Query the status of current surface - VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurSurface, &vaSurfaceStatus); - CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); - - mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + mPicSkipped = false; + if (!mFirstFrame) { + // Query the status of last surface to check if its next frame is skipped + VASurfaceStatus vaSurfaceStatus; + vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); + CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); + mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + } - mCurSurface = 0; + mLastSurface = mCurSurface; + mCurSurface = VA_INVALID_SURFACE; mEncodedFrames ++; mFrameNum ++; -- cgit v1.2.3 From d2152628a2d4fa6084f9a779cfbb1cadf8f9d494 Mon Sep 17 00:00:00 2001 From: Manjunath Date: Tue, 14 Aug 2012 11:30:49 +0530 Subject: JB: Enabling WMA9 audio decoding support BZ: 50306 part-2 of the patchset - This patch contain changes in AsfExtractor in order to support WMA decoding. Change-Id: Ic066f1fae0a103ad05b576780261a748bff95733 Signed-off-by: Manjunath Reviewed-on: http://android.intel.com:8080/61986 Reviewed-by: Sikkandar D, Madar Reviewed-by: Kandasamy, Muthukumar Tested-by: Gupta, ArvindX K Reviewed-by: P C, SreekanthX Reviewed-by: buildbot Tested-by: buildbot --- frameworks/asf_extractor/AsfExtractor.cpp | 57 ++++++++++++++++++++++--------- frameworks/asf_extractor/AsfExtractor.h | 2 +- frameworks/asf_extractor/MetaDataExt.h | 2 +- 3 files changed, 42 insertions(+), 19 deletions(-) diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp index 9a10581..1bdf0a0 100644 --- a/frameworks/asf_extractor/AsfExtractor.cpp +++ b/frameworks/asf_extractor/AsfExtractor.cpp @@ -43,6 +43,17 @@ namespace android { +// The audio format tags that represent the input categories supported +// by the Windows Media Audio decoder, don't change it +enum WMAAudioFormats { + WAVE_FORMAT_MSAUDIO1 = 0x160, + WAVE_FORMAT_WMAUDIO2 = 0x161, + WAVE_FORMAT_WMAUDIO3X = 0x162, + WAVE_FORMAT_WMAUDIO_LOSSLESS = 0x163, + WAVE_FORMAT_WMAVOICE9 = 0x000A, + WAVE_FORMAT_WMAVOICE10 = 0x000B, +}; + class ASFSource : public MediaSource { public: ASFSource(const sp &extractor, int trackIndex) @@ -173,9 +184,11 @@ status_t AsfExtractor::read( return BAD_VALUE; } + int64_t seekTimeUs; + MediaSource::ReadOptions::SeekMode mode; if (!mParser->hasVideo() || (mParser->hasVideo() && mHasIndexObject)) { - if (options != NULL) { - status_t err = seek_l(track, options); + if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { + status_t err = seek_l(track, seekTimeUs, mode); if (err != OK) { return err; } @@ -273,9 +286,13 @@ status_t AsfExtractor::initialize() { } } - if (mParser->hasVideo() || mParser->hasAudio()) { + if (mParser->hasVideo()) { ALOGV("MEDIA_MIMETYPE_CONTAINER_ASF"); mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_ASF); + } else if (mParser->hasAudio() && mParser->getAudioInfo()->codecID >= WAVE_FORMAT_MSAUDIO1 && + mParser->getAudioInfo()->codecID <= WAVE_FORMAT_WMAUDIO_LOSSLESS) { + LOGV("MEDIA_MIMETYPE_AUDIO_WMA", mParser->getAudioInfo()->codecID); + mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_WMA); } else { ALOGE("Content does not have neither audio nor video."); return ERROR_UNSUPPORTED; @@ -354,20 +371,20 @@ static const char* FourCC2MIME(uint32_t fourcc) { static const char* CodecID2MIME(uint32_t codecID) { switch (codecID) { // WMA version 1 - case 0x0160: + case WAVE_FORMAT_MSAUDIO1: // WMA version 2 (7, 8, 9 series) - case 0x0161: + case WAVE_FORMAT_WMAUDIO2: // WMA 9/10 profressional (WMA version 3) - case 0x0162: + case WAVE_FORMAT_WMAUDIO3X: return MEDIA_MIMETYPE_AUDIO_WMA; // WMA 9 lossless - case 0x0163: + case WAVE_FORMAT_WMAUDIO_LOSSLESS: //return MEDIA_MIMETYPE_AUDIO_WMA_LOSSLESS; return MEDIA_MIMETYPE_AUDIO_WMA; // WMA voice 9 - case 0x000A: + case WAVE_FORMAT_WMAVOICE9: // WMA voice 10 - case 0x000B: + case WAVE_FORMAT_WMAVOICE10: ALOGW("WMA voice 9/10 is not supported."); return "audio/wma-voice"; default: @@ -378,8 +395,7 @@ static const char* CodecID2MIME(uint32_t codecID) { status_t AsfExtractor::setupTracks() { - ALOGW("Audio is temporarily disabled!!!!!!!!!!!!!!"); - AsfAudioStreamInfo* audioInfo = NULL;//mParser->getAudioInfo(); + AsfAudioStreamInfo* audioInfo = mParser->getAudioInfo(); AsfVideoStreamInfo* videoInfo = mParser->getVideoInfo(); Track* track; while (audioInfo || videoInfo) { @@ -401,10 +417,22 @@ status_t AsfExtractor::setupTracks() { track->bufferPool = new MediaBufferPool; if (audioInfo) { + LOGV("streamNumber = %d\n, encryptedContentFlag= %d\n, timeOffset = %lld\n, + codecID = %d\n, numChannels=%d\n, sampleRate=%d\n, avgBitRate = %d\n, + blockAlignment =%d\n, bitsPerSample=%d\n, codecDataSize=%d\n", + audioInfo->streamNumber, audioInfo->encryptedContentFlag, + audioInfo->timeOffset, audioInfo->codecID, audioInfo->numChannels, + audioInfo->sampleRate, audioInfo->avgByteRate*8, audioInfo->blockAlignment, + audioInfo->bitsPerSample, audioInfo->codecDataSize); + track->streamNumber = audioInfo->streamNumber; track->encrypted = audioInfo->encryptedContentFlag; track->meta->setInt32(kKeyChannelCount, audioInfo->numChannels); track->meta->setInt32(kKeySampleRate, audioInfo->sampleRate); + track->meta->setInt32(kKeyWmaBlockAlign, audioInfo->blockAlignment); + track->meta->setInt32(kKeyBitPerSample, audioInfo->bitsPerSample); + track->meta->setInt32(kKeyBitRate, audioInfo->avgByteRate*8); + track->meta->setInt32(kKeyWmaFormatTag, audioInfo->codecID); if (audioInfo->codecDataSize) { track->meta->setData( @@ -452,7 +480,7 @@ status_t AsfExtractor::setupTracks() { return OK; } -status_t AsfExtractor::seek_l(Track* track, const MediaSource::ReadOptions *options) { +status_t AsfExtractor::seek_l(Track* track, int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) { Mutex::Autolock lockSeek(mReadLock); // It is expected seeking will happen on all the tracks with the same seeking options. @@ -470,11 +498,6 @@ status_t AsfExtractor::seek_l(Track* track, const MediaSource::ReadOptions *opti track->seekCompleted = false; return OK; } - int64_t seekTimeUs; - MediaSource::ReadOptions::SeekMode mode; - if(!options->getSeekTo(&seekTimeUs,&mode)) { - return OK; - } uint64_t targetSampleTimeUs = 0; diff --git a/frameworks/asf_extractor/AsfExtractor.h b/frameworks/asf_extractor/AsfExtractor.h index adb8e9d..4e17083 100644 --- a/frameworks/asf_extractor/AsfExtractor.h +++ b/frameworks/asf_extractor/AsfExtractor.h @@ -105,7 +105,7 @@ private: status_t setupTracks(); inline Track* getTrackByTrackIndex(int index); inline Track* getTrackByStreamNumber(int stream); - status_t seek_l(Track* track, const MediaSource::ReadOptions *options); + status_t seek_l(Track* track, int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode); status_t read_l(Track *track, MediaBuffer **buffer); status_t readPacket(); }; diff --git a/frameworks/asf_extractor/MetaDataExt.h b/frameworks/asf_extractor/MetaDataExt.h index 6476b9b..bee1431 100644 --- a/frameworks/asf_extractor/MetaDataExt.h +++ b/frameworks/asf_extractor/MetaDataExt.h @@ -22,7 +22,7 @@ namespace android { -#define MEDIA_MIMETYPE_AUDIO_WMA "audio/wma" +#define MEDIA_MIMETYPE_AUDIO_WMA "audio/x-ms-wma" #define MEDIA_MIMETYPE_AUDIO_AC3 "audio/ac3" #define MEDIA_MIMETYPE_VIDEO_WMV "video/wmv" #define MEDIA_MIMETYPE_CONTAINER_ASF "video/x-ms-asf" -- cgit v1.2.3 From 62215b5d8cff0699444eb4f7207fce19ded336dd Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 14 Aug 2012 22:59:01 +0800 Subject: Enable Video Editor NV12 Processing on JB (BSP) BZ: 51587 In video editor's original design, only IYUV420 processing is supported. But the video format of Intel HW encoder and decoder is NV12, we have to convert NV12 to IYUV420 on decoder side, and IYUV420 to NV12 on encoder side. This is one bottleneck for video editor, which slows down the video export process. In this patch, we add NV12 support in video editor codes, which eliminate the color conversion process. This significantly accelerate the video export. Change-Id: Iabd328de127b3a3b7224d6e6614666a00ec29df6 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/62018 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- frameworks/libI420colorconvert/Android.mk | 4 + frameworks/libI420colorconvert/ColorConvert.cpp | 14 +- frameworks/videoedit/lvpp/Android.mk | 55 + frameworks/videoedit/lvpp/VideoEditorToolsNV12.c | 1590 +++++++++ frameworks/videoedit/lvpp/VideoEditorToolsNV12.h | 51 + .../stagefrightshells/VideoEditorBuffer.c | 7 +- .../stagefrightshells/VideoEditorVideoDecoder.cpp | 23 +- .../stagefrightshells/VideoEditorVideoEncoder.cpp | 46 +- frameworks/videoedit/vss/Android.mk | 67 + frameworks/videoedit/vss/EditVideo_NV12.h | 39 + frameworks/videoedit/vss/M4AIR_API_NV12.c | 1181 +++++++ frameworks/videoedit/vss/M4AIR_API_NV12.h | 112 + frameworks/videoedit/vss/M4MCS_NV12.h | 22 + .../videoedit/vss/M4MCS_VideoPreProcessing_NV12.c | 396 +++ .../videoedit/vss/M4VSS3GPP_EditVideo_NV12.c | 640 ++++ frameworks/videoedit/vss/M4xVSS_NV12.h | 63 + frameworks/videoedit/vss/M4xVSS_internal_NV12.c | 3512 ++++++++++++++++++++ 17 files changed, 7760 insertions(+), 62 deletions(-) create mode 100644 frameworks/videoedit/lvpp/Android.mk create mode 100644 frameworks/videoedit/lvpp/VideoEditorToolsNV12.c create mode 100644 frameworks/videoedit/lvpp/VideoEditorToolsNV12.h create mode 100644 frameworks/videoedit/vss/Android.mk create mode 100644 frameworks/videoedit/vss/EditVideo_NV12.h create mode 100644 frameworks/videoedit/vss/M4AIR_API_NV12.c create mode 100644 frameworks/videoedit/vss/M4AIR_API_NV12.h create mode 100644 frameworks/videoedit/vss/M4MCS_NV12.h create mode 100644 frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c create mode 100644 frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c create mode 100644 frameworks/videoedit/vss/M4xVSS_NV12.h create mode 100644 frameworks/videoedit/vss/M4xVSS_internal_NV12.c diff --git a/frameworks/libI420colorconvert/Android.mk b/frameworks/libI420colorconvert/Android.mk index ee84680..5abf9bc 100644 --- a/frameworks/libI420colorconvert/Android.mk +++ b/frameworks/libI420colorconvert/Android.mk @@ -14,6 +14,10 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libI420colorconvert +ifeq ($(USE_VIDEOEDITOR_INTEL_NV12_VERSION),true) +LOCAL_CFLAGS += -DVIDEOEDITOR_INTEL_NV12_VERSION +endif + include $(BUILD_SHARED_LIBRARY) diff --git a/frameworks/libI420colorconvert/ColorConvert.cpp b/frameworks/libI420colorconvert/ColorConvert.cpp index 5f2c8a1..4b5b343 100644 --- a/frameworks/libI420colorconvert/ColorConvert.cpp +++ b/frameworks/libI420colorconvert/ColorConvert.cpp @@ -29,10 +29,11 @@ static int convertDecoderOutputToI420( srcWidth * srcRect.top + srcRect.left; const uint8_t *pSrc_uv = (const uint8_t *)pSrc_y + srcWidth * (srcHeight - srcRect.top / 2); - int dstWidth = srcRect.right - srcRect.left + 1; int dstHeight = srcRect.bottom - srcRect.top + 1; size_t dst_y_size = dstWidth * dstHeight; + +#ifndef VIDEOEDITOR_INTEL_NV12_VERSION size_t dst_uv_stride = dstWidth / 2; size_t dst_uv_size = dstWidth / 2 * dstHeight / 2; uint8_t *pDst_y = (uint8_t *)dstBits; @@ -55,6 +56,11 @@ static int convertDecoderOutputToI420( pDst_u += dst_uv_stride; pDst_v += dst_uv_stride; } +#else + uint8_t *pDst_y = (uint8_t *)dstBits; + memcpy(pDst_y,pSrc_y,dst_y_size*3/2); +#endif + return 0; } @@ -68,6 +74,8 @@ static int convertI420ToEncoderInput( void* dstBits) { uint8_t *pSrc_y = (uint8_t*) srcBits; uint8_t *pDst_y = (uint8_t*) dstBits; + +#ifndef VIDEOEDITOR_INTEL_NV12_VERSION for(int i=0; i < srcHeight; i++) { memcpy(pDst_y, pSrc_y, srcWidth); pSrc_y += srcWidth; @@ -86,6 +94,10 @@ static int convertI420ToEncoderInput( pSrc_u += srcWidth / 2; pSrc_v += srcWidth / 2; } +#else + memcpy(pDst_y,pSrc_y,dstWidth*dstHeight*3/2); +#endif + return 0; } diff --git a/frameworks/videoedit/lvpp/Android.mk b/frameworks/videoedit/lvpp/Android.mk new file mode 100644 index 0000000..1a70db4 --- /dev/null +++ b/frameworks/videoedit/lvpp/Android.mk @@ -0,0 +1,55 @@ +# +# Copyright (C) 2011 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE:= liblvpp_intel + +LOCAL_COPY_HEADERS_TO := videoeditornv12 + +LOCAL_COPY_HEADERS := VideoEditorToolsNV12.h + +LOCAL_SRC_FILES:= \ + VideoEditorToolsNV12.c + +LOCAL_MODULE_TAGS := optional + + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + libvideoeditor_osal \ + + +LOCAL_C_INCLUDES += \ + $(TOP)/frameworks/av/libvideoeditor/osal/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/common/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/mcs/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/stagefrightshells/inc \ + $(TOP)/frameworks/av/libvideoeditor/lvpp \ + + +LOCAL_SHARED_LIBRARIES += libdl + +# All of the shared libraries we link against. +LOCAL_LDLIBS := \ + -lpthread -ldl + +include $(BUILD_STATIC_LIBRARY) + diff --git a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c new file mode 100644 index 0000000..1cc86ab --- /dev/null +++ b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c @@ -0,0 +1,1590 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_NDEBUG 1 +#define LOG_TAG "VideoEditorToolsNV12" +#include + +#include "VideoEditorToolsNV12.h" +#define M4VIFI_ALLOC_FAILURE 10 + +static M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420_X86(void *user_data, + M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) +{ + + M4VIFI_UInt32 i; + M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v; + M4VIFI_UInt8 *p_buf_dest_u,*p_buf_dest_v,*p_buf_src_uv; + M4VIFI_UInt8 return_code = M4VIFI_OK; + + /* the filter is implemented with the assumption that the width is equal to stride */ + if(PlaneIn[0].u_width != PlaneIn[0].u_stride) + return M4VIFI_INVALID_PARAM; + + /* The input Y Plane is the same as the output Y Plane */ + p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]); + p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]); + memcpy((void *)p_buf_dest,(void *)p_buf_src , + PlaneOut[0].u_width * PlaneOut[0].u_height); + + /* The U and V components are planar. The need to be made interleaved */ + p_buf_src_uv = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]); + p_buf_dest_u = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]); + p_buf_dest_v = &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]); + + for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++) + { + *p_buf_dest_u++ = *p_buf_src_uv++; + *p_buf_dest_v++ = *p_buf_src_uv++; + } + return return_code; +} + +/** + *********************************************************************************************** + * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420_X86(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, + * M4VIFI_ImagePlane *pPlaneOut) + * @author David Dana (PHILIPS Software) + * @brief Resizes YUV420 Planar plane. + * @note Basic structure of the function + * Loop on each row (step 2) + * Loop on each column (step 2) + * Get four Y samples and 1 U & V sample + * Resize the Y with corresponing U and V samples + * Place the YUV in the ouput plane + * end loop column + * end loop row + * For resizing bilinear interpolation linearly interpolates along + * each row, and then uses that result in a linear interpolation down each column. + * Each estimated pixel in the output image is a weighted + * combination of its four neighbours. The ratio of compression + * or dilatation is estimated using input and output sizes. + * @param pUserData: (IN) User Data + * @param pPlaneIn: (IN) Pointer to YUV420 (Planar) plane buffer + * @param pPlaneOut: (OUT) Pointer to YUV420 (Planar) plane + * @return M4VIFI_OK: there is no error + * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height + * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in width + *********************************************************************************************** +*/ + +static M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420_X86(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_UInt8 *pu8_data_in, *pu8_data_out, *pu8dum; + M4VIFI_UInt32 u32_plane; + M4VIFI_UInt32 u32_width_in, u32_width_out, u32_height_in, u32_height_out; + M4VIFI_UInt32 u32_stride_in, u32_stride_out; + M4VIFI_UInt32 u32_x_inc, u32_y_inc; + M4VIFI_UInt32 u32_x_accum, u32_y_accum, u32_x_accum_start; + M4VIFI_UInt32 u32_width, u32_height; + M4VIFI_UInt32 u32_y_frac; + M4VIFI_UInt32 u32_x_frac; + M4VIFI_UInt32 u32_temp_value; + M4VIFI_UInt8 *pu8_src_top; + M4VIFI_UInt8 *pu8_src_bottom; + + M4VIFI_UInt8 u8Wflag = 0; + M4VIFI_UInt8 u8Hflag = 0; + M4VIFI_UInt32 loop = 0; + + /* + If input width is equal to output width and input height equal to + output height then M4VIFI_YUV420toYUV420 is called. + */ + if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) && + (pPlaneIn[0].u_width == pPlaneOut[0].u_width)) + { + return M4VIFI_YUV420toYUV420(pUserData, pPlaneIn, pPlaneOut); + } + + /* Check for the YUV width and height are even */ + if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE) || + (IS_EVEN(pPlaneOut[0].u_height) == FALSE)) + { + return M4VIFI_ILLEGAL_FRAME_HEIGHT; + } + + if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) || + (IS_EVEN(pPlaneOut[0].u_width) == FALSE)) + { + return M4VIFI_ILLEGAL_FRAME_WIDTH; + } + + /* Loop on planes */ + for(u32_plane = 0;u32_plane < PLANES;u32_plane++) + { + /* Set the working pointers at the beginning of the input/output data field */ + pu8_data_in = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; + pu8_data_out = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft; + + /* Get the memory jump corresponding to a row jump */ + u32_stride_in = pPlaneIn[u32_plane].u_stride; + u32_stride_out = pPlaneOut[u32_plane].u_stride; + + /* Set the bounds of the active image */ + u32_width_in = pPlaneIn[u32_plane].u_width; + u32_height_in = pPlaneIn[u32_plane].u_height; + + u32_width_out = pPlaneOut[u32_plane].u_width; + u32_height_out = pPlaneOut[u32_plane].u_height; + + /* + For the case , width_out = width_in , set the flag to avoid + accessing one column beyond the input width.In this case the last + column is replicated for processing + */ + if (u32_width_out == u32_width_in) { + u32_width_out = u32_width_out-1; + u8Wflag = 1; + } + + /* Compute horizontal ratio between src and destination width.*/ + if (u32_width_out >= u32_width_in) + { + u32_x_inc = ((u32_width_in-1) * MAX_SHORT) / (u32_width_out-1); + } + else + { + u32_x_inc = (u32_width_in * MAX_SHORT) / (u32_width_out); + } + + /* + For the case , height_out = height_in , set the flag to avoid + accessing one row beyond the input height.In this case the last + row is replicated for processing + */ + if (u32_height_out == u32_height_in) { + u32_height_out = u32_height_out-1; + u8Hflag = 1; + } + + /* Compute vertical ratio between src and destination height.*/ + if (u32_height_out >= u32_height_in) + { + u32_y_inc = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out-1); + } + else + { + u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out); + } + + /* + Calculate initial accumulator value : u32_y_accum_start. + u32_y_accum_start is coded on 15 bits, and represents a value + between 0 and 0.5 + */ + if (u32_y_inc >= MAX_SHORT) + { + /* + Keep the fractionnal part, assimung that integer part is coded + on the 16 high bits and the fractional on the 15 low bits + */ + u32_y_accum = u32_y_inc & 0xffff; + + if (!u32_y_accum) + { + u32_y_accum = MAX_SHORT; + } + + u32_y_accum >>= 1; + } + else + { + u32_y_accum = 0; + } + + /* + Calculate initial accumulator value : u32_x_accum_start. + u32_x_accum_start is coded on 15 bits, and represents a value + between 0 and 0.5 + */ + if (u32_x_inc >= MAX_SHORT) + { + u32_x_accum_start = u32_x_inc & 0xffff; + + if (!u32_x_accum_start) + { + u32_x_accum_start = MAX_SHORT; + } + + u32_x_accum_start >>= 1; + } + else + { + u32_x_accum_start = 0; + } + + u32_height = u32_height_out; + + /* + Bilinear interpolation linearly interpolates along each row, and + then uses that result in a linear interpolation donw each column. + Each estimated pixel in the output image is a weighted combination + of its four neighbours according to the formula: + F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+ + f(p+&,q+1)R(1-a)R(b-1) with R(x) = / x+1 -1 =< x =< 0 \ 1-x + 0 =< x =< 1 and a (resp. b)weighting coefficient is the distance + from the nearest neighbor in the p (resp. q) direction + */ + + do { /* Scan all the row */ + + /* Vertical weight factor */ + u32_y_frac = (u32_y_accum>>12)&15; + + /* Reinit accumulator */ + u32_x_accum = u32_x_accum_start; + + u32_width = u32_width_out; + + do { /* Scan along each row */ + pu8_src_top = pu8_data_in + (u32_x_accum >> 16); + pu8_src_bottom = pu8_src_top + u32_stride_in; + u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */ + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += u32_x_inc; + } while(--u32_width); + + /* + This u8Wflag flag gets in to effect if input and output + width is same, and height may be different. So previous + pixel is replicated here + */ + if (u8Wflag) { + *pu8_data_out = (M4VIFI_UInt8)u32_temp_value; + } + + pu8dum = (pu8_data_out-u32_width_out); + pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out; + + /* Update vertical accumulator */ + u32_y_accum += u32_y_inc; + if (u32_y_accum>>16) { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in; + u32_y_accum &= 0xffff; + } + } while(--u32_height); + + /* + This u8Hflag flag gets in to effect if input and output height + is same, and width may be different. So previous pixel row is + replicated here + */ + if (u8Hflag) { + for(loop =0; loop < (u32_width_out+u8Wflag); loop++) { + *pu8_data_out++ = (M4VIFI_UInt8)*pu8dum++; + } + } + } + + return M4VIFI_OK; +} + +/** + ********************************************************************************************* + * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toBGR565_X86(void *pContext, M4VIFI_ImagePlane *pPlaneIn, + * M4VIFI_ImagePlane *pPlaneOut) + * @brief Resize YUV420 plane and converts to BGR565 with +90 rotation. + * @note Basic sturture of the function + * Loop on each row (step 2) + * Loop on each column (step 2) + * Get four Y samples and 1 u & V sample + * Resize the Y with corresponing U and V samples + * Compute the four corresponding R G B values + * Place the R G B in the ouput plane in rotated fashion + * end loop column + * end loop row + * For resizing bilinear interpolation linearly interpolates along + * each row, and then uses that result in a linear interpolation down each column. + * Each estimated pixel in the output image is a weighted + * combination of its four neighbours. The ratio of compression + * or dilatation is estimated using input and output sizes. + * @param pPlaneIn: (IN) Pointer to YUV plane buffer + * @param pContext: (IN) Context Pointer + * @param pPlaneOut: (OUT) Pointer to BGR565 Plane + * @return M4VIFI_OK: there is no error + * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD + * @return M4VIFI_ILLEGAL_FRAME_WIDTH: YUV Plane width is ODD + ********************************************************************************************* +*/ +static M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toBGR565_X86(void* pContext, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_UInt8 *pu8_data_in[PLANES], *pu8_data_in1[PLANES],*pu8_data_out; + M4VIFI_UInt32 *pu32_rgb_data_current, *pu32_rgb_data_next, *pu32_rgb_data_start; + + M4VIFI_UInt32 u32_width_in[PLANES], u32_width_out, u32_height_in[PLANES], u32_height_out; + M4VIFI_UInt32 u32_stride_in[PLANES]; + M4VIFI_UInt32 u32_stride_out, u32_stride2_out, u32_width2_RGB, u32_height2_RGB; + M4VIFI_UInt32 u32_x_inc[PLANES], u32_y_inc[PLANES]; + M4VIFI_UInt32 u32_x_accum_Y, u32_x_accum_U, u32_x_accum_start; + M4VIFI_UInt32 u32_y_accum_Y, u32_y_accum_U; + M4VIFI_UInt32 u32_x_frac_Y, u32_x_frac_U, u32_y_frac_Y,u32_y_frac_U; + M4VIFI_Int32 U_32, V_32, Y_32, Yval_32; + M4VIFI_UInt8 u8_Red, u8_Green, u8_Blue; + M4VIFI_UInt32 u32_row, u32_col; + + M4VIFI_UInt32 u32_plane; + M4VIFI_UInt32 u32_rgb_temp1, u32_rgb_temp2; + M4VIFI_UInt32 u32_rgb_temp3,u32_rgb_temp4; + M4VIFI_UInt32 u32_check_size; + + M4VIFI_UInt8 *pu8_src_top_Y,*pu8_src_top_U,*pu8_src_top_V ; + M4VIFI_UInt8 *pu8_src_bottom_Y, *pu8_src_bottom_U, *pu8_src_bottom_V; + + /* Check for the YUV width and height are even */ + u32_check_size = IS_EVEN(pPlaneIn[0].u_height); + if( u32_check_size == FALSE ) + { + return M4VIFI_ILLEGAL_FRAME_HEIGHT; + } + u32_check_size = IS_EVEN(pPlaneIn[0].u_width); + if (u32_check_size == FALSE ) + { + return M4VIFI_ILLEGAL_FRAME_WIDTH; + + } + /* Make the ouput width and height as even */ + pPlaneOut->u_height = pPlaneOut->u_height & 0xFFFFFFFE; + pPlaneOut->u_width = pPlaneOut->u_width & 0xFFFFFFFE; + pPlaneOut->u_stride = pPlaneOut->u_stride & 0xFFFFFFFC; + + /* Assignment of output pointer */ + pu8_data_out = pPlaneOut->pac_data + pPlaneOut->u_topleft; + /* Assignment of output width(rotated) */ + u32_width_out = pPlaneOut->u_width; + /* Assignment of output height(rotated) */ + u32_height_out = pPlaneOut->u_height; + + u32_width2_RGB = pPlaneOut->u_width >> 1; + u32_height2_RGB = pPlaneOut->u_height >> 1; + + u32_stride_out = pPlaneOut->u_stride >> 1; + u32_stride2_out = pPlaneOut->u_stride >> 2; + + for(u32_plane = 0; u32_plane < PLANES; u32_plane++) + { + /* Set the working pointers at the beginning of the input/output data field */ + pu8_data_in[u32_plane] = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; + + /* Get the memory jump corresponding to a row jump */ + u32_stride_in[u32_plane] = pPlaneIn[u32_plane].u_stride; + + /* Set the bounds of the active image */ + u32_width_in[u32_plane] = pPlaneIn[u32_plane].u_width; + u32_height_in[u32_plane] = pPlaneIn[u32_plane].u_height; + } + /* Compute horizontal ratio between src and destination width for Y Plane. */ + if (u32_width_out >= u32_width_in[YPlane]) + { + u32_x_inc[YPlane] = ((u32_width_in[YPlane]-1) * MAX_SHORT) / (u32_width_out-1); + } + else + { + u32_x_inc[YPlane] = (u32_width_in[YPlane] * MAX_SHORT) / (u32_width_out); + } + + /* Compute vertical ratio between src and destination height for Y Plane.*/ + if (u32_height_out >= u32_height_in[YPlane]) + { + u32_y_inc[YPlane] = ((u32_height_in[YPlane]-1) * MAX_SHORT) / (u32_height_out-1); + } + else + { + u32_y_inc[YPlane] = (u32_height_in[YPlane] * MAX_SHORT) / (u32_height_out); + } + + /* Compute horizontal ratio between src and destination width for U and V Planes. */ + if (u32_width2_RGB >= u32_width_in[UPlane]) + { + u32_x_inc[UPlane] = ((u32_width_in[UPlane]-1) * MAX_SHORT) / (u32_width2_RGB-1); + } + else + { + u32_x_inc[UPlane] = (u32_width_in[UPlane] * MAX_SHORT) / (u32_width2_RGB); + } + + /* Compute vertical ratio between src and destination height for U and V Planes. */ + + if (u32_height2_RGB >= u32_height_in[UPlane]) + { + u32_y_inc[UPlane] = ((u32_height_in[UPlane]-1) * MAX_SHORT) / (u32_height2_RGB-1); + } + else + { + u32_y_inc[UPlane] = (u32_height_in[UPlane] * MAX_SHORT) / (u32_height2_RGB); + } + + u32_y_inc[VPlane] = u32_y_inc[UPlane]; + u32_x_inc[VPlane] = u32_x_inc[UPlane]; + + /* + Calculate initial accumulator value : u32_y_accum_start. + u32_y_accum_start is coded on 15 bits,and represents a value between 0 and 0.5 + */ + if (u32_y_inc[YPlane] > MAX_SHORT) + { + /* + Keep the fractionnal part, assimung that integer part is coded on the 16 high bits, + and the fractionnal on the 15 low bits + */ + u32_y_accum_Y = u32_y_inc[YPlane] & 0xffff; + u32_y_accum_U = u32_y_inc[UPlane] & 0xffff; + + if (!u32_y_accum_Y) + { + u32_y_accum_Y = MAX_SHORT; + u32_y_accum_U = MAX_SHORT; + } + u32_y_accum_Y >>= 1; + u32_y_accum_U >>= 1; + } + else + { + u32_y_accum_Y = 0; + u32_y_accum_U = 0; + + } + + /* + Calculate initial accumulator value : u32_x_accum_start. + u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5 + */ + if (u32_x_inc[YPlane] > MAX_SHORT) + { + u32_x_accum_start = u32_x_inc[YPlane] & 0xffff; + + if (!u32_x_accum_start) + { + u32_x_accum_start = MAX_SHORT; + } + + u32_x_accum_start >>= 1; + } + else + { + u32_x_accum_start = 0; + } + + pu32_rgb_data_start = (M4VIFI_UInt32*)pu8_data_out; + + /* + Bilinear interpolation linearly interpolates along each row, and then uses that + result in a linear interpolation donw each column. Each estimated pixel in the + output image is a weighted combination of its four neighbours according to the formula : + F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+f(p+&,q+1)R(1-a)R(b-1) + with R(x) = / x+1 -1 =< x =< 0 \ 1-x 0 =< x =< 1 and a (resp. b) weighting coefficient + is the distance from the nearest neighbor in the p (resp. q) direction + */ + for (u32_row = u32_height_out; u32_row != 0; u32_row -= 2) + { + u32_x_accum_Y = u32_x_accum_start; + u32_x_accum_U = u32_x_accum_start; + + /* Vertical weight factor */ + u32_y_frac_Y = (u32_y_accum_Y >> 12) & 15; + u32_y_frac_U = (u32_y_accum_U >> 12) & 15; + + /* RGB current line position pointer */ + pu32_rgb_data_current = pu32_rgb_data_start ; + + /* RGB next line position pointer */ + pu32_rgb_data_next = pu32_rgb_data_current + (u32_stride2_out); + + /* Y Plane next row pointer */ + pu8_data_in1[YPlane] = pu8_data_in[YPlane]; + + u32_rgb_temp3 = u32_y_accum_Y + (u32_y_inc[YPlane]); + if (u32_rgb_temp3 >> 16) + { + pu8_data_in1[YPlane] = pu8_data_in[YPlane] + + (u32_rgb_temp3 >> 16) * (u32_stride_in[YPlane]); + u32_rgb_temp3 &= 0xffff; + } + u32_rgb_temp4 = (u32_rgb_temp3 >> 12) & 15; + + for (u32_col = u32_width_out; u32_col != 0; u32_col -= 2) + { + + /* Input Y plane elements */ + pu8_src_top_Y = pu8_data_in[YPlane] + (u32_x_accum_Y >> 16); + pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; + + /* Input U Plane elements */ + pu8_src_top_U = pu8_data_in[UPlane] + (u32_x_accum_U >> 16); + pu8_src_bottom_U = pu8_src_top_U + u32_stride_in[UPlane]; + + pu8_src_top_V = pu8_data_in[VPlane] + (u32_x_accum_U >> 16); + pu8_src_bottom_V = pu8_src_top_V + u32_stride_in[VPlane]; + + /* Horizontal weight factor for Y plane */ + u32_x_frac_Y = (u32_x_accum_Y >> 12)&15; + /* Horizontal weight factor for U and V planes */ + u32_x_frac_U = (u32_x_accum_U >> 12)&15; + + /* Weighted combination */ + U_32 = (((pu8_src_top_U[0]*(16-u32_x_frac_U) + pu8_src_top_U[1]*u32_x_frac_U) + *(16-u32_y_frac_U) + (pu8_src_bottom_U[0]*(16-u32_x_frac_U) + + pu8_src_bottom_U[1]*u32_x_frac_U)*u32_y_frac_U ) >> 8); + + V_32 = (((pu8_src_top_V[0]*(16-u32_x_frac_U) + pu8_src_top_V[1]*u32_x_frac_U) + *(16-u32_y_frac_U)+ (pu8_src_bottom_V[0]*(16-u32_x_frac_U) + + pu8_src_bottom_V[1]*u32_x_frac_U)*u32_y_frac_U ) >> 8); + + Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) + *(16-u32_y_frac_Y) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) + + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_y_frac_Y ) >> 8); + + u32_x_accum_U += (u32_x_inc[UPlane]); + + /* YUV to RGB */ + #ifdef __RGB_V1__ + Yval_32 = Y_32*37; + #else /* __RGB_V1__v */ + Yval_32 = Y_32*0x2568; + #endif /* __RGB_V1__v */ + + DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); + + /* Pack 8 bit R,G,B to RGB565 */ + #ifdef LITTLE_ENDIAN + u32_rgb_temp1 = PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); + #else /* LITTLE_ENDIAN */ + u32_rgb_temp1 = PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); + #endif /* LITTLE_ENDIAN */ + + + pu8_src_top_Y = pu8_data_in1[YPlane]+(u32_x_accum_Y >> 16); + pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; + + /* Weighted combination */ + Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) + *(16-u32_rgb_temp4) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) + + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_rgb_temp4 ) >> 8); + + u32_x_accum_Y += u32_x_inc[YPlane]; + + /* Horizontal weight factor */ + u32_x_frac_Y = (u32_x_accum_Y >> 12)&15; + + /* YUV to RGB */ + #ifdef __RGB_V1__ + Yval_32 = Y_32*37; + #else /* __RGB_V1__v */ + Yval_32 = Y_32*0x2568; + #endif /* __RGB_V1__v */ + + DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); + + /* Pack 8 bit R,G,B to RGB565 */ + #ifdef LITTLE_ENDIAN + u32_rgb_temp2 = PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); + #else /* LITTLE_ENDIAN */ + u32_rgb_temp2 = PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); + #endif /* LITTLE_ENDIAN */ + + + pu8_src_top_Y = pu8_data_in[YPlane] + (u32_x_accum_Y >> 16) ; + pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; + + /* Weighted combination */ + Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) + *(16-u32_y_frac_Y) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) + + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_y_frac_Y ) >> 8); + + /* YUV to RGB */ + #ifdef __RGB_V1__ + Yval_32 = Y_32*37; + #else /* __RGB_V1__v */ + Yval_32 = Y_32*0x2568; + #endif /* __RGB_V1__v */ + + DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); + + /* Pack 8 bit R,G,B to RGB565 */ + #ifdef LITTLE_ENDIAN + *(pu32_rgb_data_current)++ = u32_rgb_temp1 | + PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); + #else /* LITTLE_ENDIAN */ + *(pu32_rgb_data_current)++ = u32_rgb_temp1 | + PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); + #endif /* LITTLE_ENDIAN */ + + + pu8_src_top_Y = pu8_data_in1[YPlane]+ (u32_x_accum_Y >> 16); + pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; + + /* Weighted combination */ + Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) + *(16-u32_rgb_temp4) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) + + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_rgb_temp4 )>>8); + + u32_x_accum_Y += u32_x_inc[YPlane]; + /* YUV to RGB */ + #ifdef __RGB_V1__ + Yval_32=Y_32*37; + #else /* __RGB_V1__v */ + Yval_32=Y_32*0x2568; + #endif /* __RGB_V1__v */ + + DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); + + /* Pack 8 bit R,G,B to RGB565 */ + #ifdef LITTLE_ENDIAN + *(pu32_rgb_data_next)++ = u32_rgb_temp2 | + PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); + #else /* LITTLE_ENDIAN */ + *(pu32_rgb_data_next)++ = u32_rgb_temp2 | + PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); + #endif /* LITTLE_ENDIAN */ + + } /* End of horizontal scanning */ + + u32_y_accum_Y = u32_rgb_temp3 + (u32_y_inc[YPlane]); + u32_y_accum_U += (u32_y_inc[UPlane]); + + /* Y plane row update */ + if (u32_y_accum_Y >> 16) + { + pu8_data_in[YPlane] = pu8_data_in1[YPlane] + + ((u32_y_accum_Y >> 16) * (u32_stride_in[YPlane])); + u32_y_accum_Y &= 0xffff; + } + else + { + pu8_data_in[YPlane] = pu8_data_in1[YPlane]; + } + /* U and V planes row update */ + if (u32_y_accum_U >> 16) + { + pu8_data_in[UPlane] = pu8_data_in[UPlane] + + (u32_y_accum_U >> 16) * (u32_stride_in[UPlane]); + pu8_data_in[VPlane] = pu8_data_in[VPlane] + + (u32_y_accum_U >> 16) * (u32_stride_in[VPlane]); + u32_y_accum_U &= 0xffff; + } + /* BGR pointer Update */ + pu32_rgb_data_start += u32_stride_out; + + } /* End of vertical scanning */ + return M4VIFI_OK; +} + +/*************************************************************************** +Proto: +M4VIFI_UInt8 M4VIFI_RGB888toNV12(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[2]); +Author: Patrice Martinez / Philips Digital Networks - MP4Net +Purpose: filling of the NV12 plane from a BGR24 plane +Abstract: Loop on each row ( 2 rows by 2 rows ) + Loop on each column ( 2 col by 2 col ) + Get 4 BGR samples from input data and build 4 output Y samples and each single U & V data + end loop on col + end loop on row + +In: RGB24 plane +InOut: none +Out: array of 3 M4VIFI_ImagePlane structures +Modified: ML: RGB function modified to BGR. +***************************************************************************/ +M4VIFI_UInt8 M4VIFI_RGB888toNV12(void *pUserData, + M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut) +{ + + M4VIFI_UInt32 u32_width, u32_height; + M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_UV, u32_stride_rgb, u32_stride_2rgb; + M4VIFI_UInt32 u32_col, u32_row; + + M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11; + M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11; + M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11; + M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11; + M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11; + M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11; + M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v; + M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data; + M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn; + + /* check sizes */ + if( (PlaneIn->u_height != PlaneOut[0].u_height) || + (PlaneOut[0].u_height != (PlaneOut[1].u_height<<1))) + return M4VIFI_ILLEGAL_FRAME_HEIGHT; + + if( (PlaneIn->u_width != PlaneOut[0].u_width) || + (PlaneOut[0].u_width != PlaneOut[1].u_width)) + return M4VIFI_ILLEGAL_FRAME_WIDTH; + + + /* set the pointer to the beginning of the output data buffers */ + pu8_y_data = PlaneOut[0].pac_data + PlaneOut[0].u_topleft; + pu8_u_data = PlaneOut[1].pac_data + PlaneOut[1].u_topleft; + pu8_v_data = pu8_u_data + 1; + + /* idem for input buffer */ + pu8_rgbn_data = PlaneIn->pac_data + PlaneIn->u_topleft; + + /* get the size of the output image */ + u32_width = PlaneOut[0].u_width; + u32_height = PlaneOut[0].u_height; + + /* set the size of the memory jumps corresponding to row jump in each output plane */ + u32_stride_Y = PlaneOut[0].u_stride; + u32_stride2_Y= u32_stride_Y << 1; + u32_stride_UV = PlaneOut[1].u_stride; + + /* idem for input plane */ + u32_stride_rgb = PlaneIn->u_stride; + u32_stride_2rgb = u32_stride_rgb << 1; + + /* loop on each row of the output image, input coordinates are estimated from output ones */ + /* two YUV rows are computed at each pass */ + for (u32_row = u32_height ;u32_row != 0; u32_row -=2) + { + /* update working pointers */ + pu8_yn = pu8_y_data; + pu8_ys = pu8_yn + u32_stride_Y; + + pu8_u = pu8_u_data; + pu8_v = pu8_v_data; + + pu8_rgbn= pu8_rgbn_data; + + /* loop on each column of the output image*/ + for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) + { + /* get RGB samples of 4 pixels */ + GET_RGB24(i32_r00, i32_g00, i32_b00, pu8_rgbn, 0); + GET_RGB24(i32_r10, i32_g10, i32_b10, pu8_rgbn, CST_RGB_24_SIZE); + GET_RGB24(i32_r01, i32_g01, i32_b01, pu8_rgbn, u32_stride_rgb); + GET_RGB24(i32_r11, i32_g11, i32_b11, pu8_rgbn, u32_stride_rgb + CST_RGB_24_SIZE); + + i32_u00 = U24(i32_r00, i32_g00, i32_b00); + i32_v00 = V24(i32_r00, i32_g00, i32_b00); + i32_y00 = Y24(i32_r00, i32_g00, i32_b00); /* matrix luminance */ + pu8_yn[0]= (M4VIFI_UInt8)i32_y00; + + i32_u10 = U24(i32_r10, i32_g10, i32_b10); + i32_v10 = V24(i32_r10, i32_g10, i32_b10); + i32_y10 = Y24(i32_r10, i32_g10, i32_b10); + pu8_yn[1]= (M4VIFI_UInt8)i32_y10; + + i32_u01 = U24(i32_r01, i32_g01, i32_b01); + i32_v01 = V24(i32_r01, i32_g01, i32_b01); + i32_y01 = Y24(i32_r01, i32_g01, i32_b01); + pu8_ys[0]= (M4VIFI_UInt8)i32_y01; + + i32_u11 = U24(i32_r11, i32_g11, i32_b11); + i32_v11 = V24(i32_r11, i32_g11, i32_b11); + i32_y11 = Y24(i32_r11, i32_g11, i32_b11); + pu8_ys[1] = (M4VIFI_UInt8)i32_y11; + + *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2); + *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2); + + pu8_rgbn += (CST_RGB_24_SIZE<<1); + pu8_yn += 2; + pu8_ys += 2; + + pu8_u += 2; + pu8_v += 2; + } /* end of horizontal scanning */ + + pu8_y_data += u32_stride2_Y; + pu8_u_data += u32_stride_UV; + pu8_v_data += u32_stride_UV; + pu8_rgbn_data += u32_stride_2rgb; + + + } /* End of vertical scanning */ + + return M4VIFI_OK; +} + +/** NV12 to NV12 */ +/** + ******************************************************************************************* + * M4VIFI_UInt8 M4VIFI_NV12toNV12 (void *pUserData, + * M4VIFI_ImagePlane *pPlaneIn, + * M4VIFI_ImagePlane *pPlaneOut) + * @brief Transform NV12 image to a NV12 image. + * @param pUserData: (IN) User Specific Data (Unused - could be NULL) + * @param pPlaneIn: (IN) Pointer to NV12 plane buffer + * @param pPlaneOut: (OUT) Pointer to NV12 Plane + * @return M4VIFI_OK: there is no error + * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in plane height + * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in plane width + ******************************************************************************************* + */ + +M4VIFI_UInt8 M4VIFI_NV12toNV12(void *user_data, + M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut) +{ + M4VIFI_Int32 plane_number; + M4VIFI_UInt32 i; + M4VIFI_UInt8 *p_buf_src, *p_buf_dest; + + for (plane_number = 0; plane_number < 2; plane_number++) + { + p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]); + p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]); + for (i = 0; i < PlaneOut[plane_number].u_height; i++) + { + memcpy((void *)p_buf_dest, (void *)p_buf_src ,PlaneOut[plane_number].u_width); + p_buf_src += PlaneIn[plane_number].u_stride; + p_buf_dest += PlaneOut[plane_number].u_stride; + } + } + return M4VIFI_OK; +} + +/** + *********************************************************************************************** + * M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toNV12(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, + * M4VIFI_ImagePlane *pPlaneOut) + * @author David Dana (PHILIPS Software) + * @brief Resizes NV12 Planar plane. + * @note Basic structure of the function + * Loop on each row (step 2) + * Loop on each column (step 2) + * Get four Y samples and 1 U & V sample + * Resize the Y with corresponing U and V samples + * Place the NV12 in the ouput plane + * end loop column + * end loop row + * For resizing bilinear interpolation linearly interpolates along + * each row, and then uses that result in a linear interpolation down each column. + * Each estimated pixel in the output image is a weighted + * combination of its four neighbours. The ratio of compression + * or dilatation is estimated using input and output sizes. + * @param pUserData: (IN) User Data + * @param pPlaneIn: (IN) Pointer to NV12 (Planar) plane buffer + * @param pPlaneOut: (OUT) Pointer to NV12 (Planar) plane + * @return M4VIFI_OK: there is no error + * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height + * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in width + *********************************************************************************************** +*/ +M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toNV12(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_UInt8 *pu8_data_in, *pu8_data_out, *pu8dum; + M4VIFI_UInt32 u32_plane; + M4VIFI_UInt32 u32_width_in, u32_width_out, u32_height_in, u32_height_out; + M4VIFI_UInt32 u32_stride_in, u32_stride_out; + M4VIFI_UInt32 u32_x_inc, u32_y_inc; + M4VIFI_UInt32 u32_x_accum, u32_y_accum, u32_x_accum_start; + M4VIFI_UInt32 u32_width, u32_height; + M4VIFI_UInt32 u32_y_frac; + M4VIFI_UInt32 u32_x_frac; + M4VIFI_UInt32 u32_temp_value,u32_temp_value1; + M4VIFI_UInt8 *pu8_src_top; + M4VIFI_UInt8 *pu8_src_bottom; + + M4VIFI_UInt8 u8Wflag = 0; + M4VIFI_UInt8 u8Hflag = 0; + M4VIFI_UInt32 loop = 0; + + LOGV("M4VIFI_ResizeBilinearNV12toNV12 begin"); + /* + If input width is equal to output width and input height equal to + output height then M4VIFI_NV12toNV12 is called. + */ + + LOGV("pPlaneIn[0].u_height = %d, pPlaneIn[0].u_width = %d,\ + pPlaneOut[0].u_height = %d, pPlaneOut[0].u_width = %d", + pPlaneIn[0].u_height, pPlaneIn[0].u_width, + pPlaneOut[0].u_height, pPlaneOut[0].u_width + ); + LOGV("pPlaneIn[1].u_height = %d, pPlaneIn[1].u_width = %d,\ + pPlaneOut[1].u_height = %d, pPlaneOut[1].u_width = %d", + pPlaneIn[1].u_height, pPlaneIn[1].u_width, + pPlaneOut[1].u_height, pPlaneOut[1].u_width + ); + if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) && + (pPlaneIn[0].u_width == pPlaneOut[0].u_width)) + { + return M4VIFI_NV12toNV12(pUserData, pPlaneIn, pPlaneOut); + } + + /* Check for the YUV width and height are even */ + if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE) || + (IS_EVEN(pPlaneOut[0].u_height) == FALSE)) + { + return M4VIFI_ILLEGAL_FRAME_HEIGHT; + } + + if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) || + (IS_EVEN(pPlaneOut[0].u_width) == FALSE)) + { + return M4VIFI_ILLEGAL_FRAME_WIDTH; + } + + /* Loop on planes */ + for(u32_plane = 0;u32_plane < 2;u32_plane++) + { + /* Get the memory jump corresponding to a row jump */ + u32_stride_in = pPlaneIn[u32_plane].u_stride; + u32_stride_out = pPlaneOut[u32_plane].u_stride; + + /* Set the bounds of the active image */ + u32_width_in = pPlaneIn[u32_plane].u_width; + u32_height_in = pPlaneIn[u32_plane].u_height; + + u32_width_out = pPlaneOut[u32_plane].u_width; + u32_height_out = pPlaneOut[u32_plane].u_height; + + /* + For the case , width_out = width_in , set the flag to avoid + accessing one column beyond the input width.In this case the last + column is replicated for processing + */ + if (u32_width_out == u32_width_in) { + u32_width_out = u32_width_out - 1 - u32_plane; + u8Wflag = 1; + } + + /* Compute horizontal ratio between src and destination width.*/ + if (u32_width_out >= u32_width_in) + { + u32_x_inc = ((u32_width_in -1 -u32_plane) * MAX_SHORT)/(u32_width_out -1 -u32_plane); + } + else + { + u32_x_inc = (u32_width_in * MAX_SHORT) / (u32_width_out); + } + + /* + For the case , height_out = height_in , set the flag to avoid + accessing one row beyond the input height.In this case the last + row is replicated for processing + */ + if (u32_height_out == u32_height_in) { + u32_height_out = u32_height_out-1; + u8Hflag = 1; + } + + /* Compute vertical ratio between src and destination height.*/ + if (u32_height_out >= u32_height_in) + { + u32_y_inc = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out - 1); + } + else + { + u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out); + } + + /* + Calculate initial accumulator value : u32_y_accum_start. + u32_y_accum_start is coded on 15 bits, and represents a value + between 0 and 0.5 + */ + if (u32_y_inc >= MAX_SHORT) + { + /* + Keep the fractionnal part, assimung that integer part is coded + on the 16 high bits and the fractional on the 15 low bits + */ + u32_y_accum = u32_y_inc & 0xffff; + + if (!u32_y_accum) + { + u32_y_accum = MAX_SHORT; + } + + u32_y_accum >>= 1; + } + else + { + u32_y_accum = 0; + } + + + /* + Calculate initial accumulator value : u32_x_accum_start. + u32_x_accum_start is coded on 15 bits, and represents a value + between 0 and 0.5 + */ + if (u32_x_inc >= MAX_SHORT) + { + u32_x_accum_start = u32_x_inc & 0xffff; + + if (!u32_x_accum_start) + { + u32_x_accum_start = MAX_SHORT; + } + + u32_x_accum_start >>= 1; + } + else + { + u32_x_accum_start = 0; + } + + u32_height = u32_height_out; + + /* + Bilinear interpolation linearly interpolates along each row, and + then uses that result in a linear interpolation donw each column. + Each estimated pixel in the output image is a weighted combination + of its four neighbours according to the formula: + F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+ + f(p+&,q+1)R(1-a)R(b-1) with R(x) = / x+1 -1 =< x =< 0 \ 1-x + 0 =< x =< 1 and a (resp. b)weighting coefficient is the distance + from the nearest neighbor in the p (resp. q) direction + */ + + if (u32_plane == 0) + { + /* Set the working pointers at the beginning of the input/output data field */ + pu8_data_in = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; + pu8_data_out = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft; + + do { /* Scan all the row */ + + /* Vertical weight factor */ + u32_y_frac = (u32_y_accum>>12)&15; + + /* Reinit accumulator */ + u32_x_accum = u32_x_accum_start; + + u32_width = u32_width_out; + + do { /* Scan along each row */ + pu8_src_top = pu8_data_in + (u32_x_accum >> 16); + pu8_src_bottom = pu8_src_top + u32_stride_in; + u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */ + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += u32_x_inc; + } while(--u32_width); + + /* + This u8Wflag flag gets in to effect if input and output + width is same, and height may be different. So previous + pixel is replicated here + */ + if (u8Wflag) { + *pu8_data_out = (M4VIFI_UInt8)u32_temp_value; + } + + pu8dum = (pu8_data_out-u32_width_out); + pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out; + + /* Update vertical accumulator */ + u32_y_accum += u32_y_inc; + if (u32_y_accum>>16) { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in; + u32_y_accum &= 0xffff; + } + } while(--u32_height); + + /* + This u8Hflag flag gets in to effect if input and output height + is same, and width may be different. So previous pixel row is + replicated here + */ + if (u8Hflag) { + memcpy((void *)pu8_data_out,(void *)pu8dum,u32_width_out+u8Wflag); + } + } + else + { + /* Set the working pointers at the beginning of the input/output data field */ + pu8_data_in = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; + pu8_data_out = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft; + + do { /* Scan all the row */ + + /* Vertical weight factor */ + u32_y_frac = (u32_y_accum>>12)&15; + + /* Reinit accumulator */ + u32_x_accum = u32_x_accum_start; + + u32_width = u32_width_out; + + do { /* Scan along each row */ + pu8_src_top = pu8_data_in + ((u32_x_accum >> 16) << 1); + pu8_src_bottom = pu8_src_top + u32_stride_in; + u32_x_frac = (u32_x_accum >> 12)&15; + + /* U planar weighted combination */ + u32_temp_value1 = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value1; + + pu8_src_top = pu8_src_top + 1; + pu8_src_bottom = pu8_src_bottom + 1; + + /* V planar weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += u32_x_inc; + u32_width -= 2; + } while(u32_width); + + /* + This u8Wflag flag gets in to effect if input and output + width is same, and height may be different. So previous + pixel is replicated here + */ + if (u8Wflag) { + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value1; + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + } + + pu8dum = (pu8_data_out - u32_width_out); + pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out; + + /* Update vertical accumulator */ + u32_y_accum += u32_y_inc; + if (u32_y_accum>>16) { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in; + u32_y_accum &= 0xffff; + } + } while(--u32_height); + + /* + This u8Hflag flag gets in to effect if input and output height + is same, and width may be different. So previous pixel row is + replicated here + */ + if (u8Hflag) { + memcpy((void *)pu8_data_out,(void *)pu8dum,u32_width_out+u8Wflag+1); + } + } + } + LOGV("M4VIFI_ResizeBilinearNV12toNV12 end"); + return M4VIFI_OK; +} + +M4VIFI_UInt8 M4VIFI_Rotate90LeftNV12toNV12(void* pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_Int32 plane_number; + M4VIFI_UInt32 i,j, u_stride; + M4VIFI_UInt8 *p_buf_src, *p_buf_dest; + + /**< Loop on Y,U and V planes */ + for (plane_number = 0; plane_number < 2; plane_number++) { + /**< Get adresses of first valid pixel in input and output buffer */ + /**< As we have a -90. rotation, first needed pixel is the upper-right one */ + if (plane_number == 0) { + p_buf_src = + &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) + + pPlaneOut[plane_number].u_height - 1 ; + p_buf_dest = + &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); + u_stride = pPlaneIn[plane_number].u_stride; + /**< Loop on output rows */ + for (i = pPlaneOut[plane_number].u_height; i != 0; i--) { + /**< Loop on all output pixels in a row */ + for (j = pPlaneOut[plane_number].u_width; j != 0; j--) { + *p_buf_dest++= *p_buf_src; + p_buf_src += u_stride; /**< Go to the next row */ + } + + /**< Go on next row of the output frame */ + p_buf_dest += + pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; + /**< Go to next pixel in the last row of the input frame*/ + p_buf_src -= + pPlaneIn[plane_number].u_stride * pPlaneOut[plane_number].u_width + 1 ; + } + } else { + p_buf_src = + &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) + + pPlaneIn[plane_number].u_width - 2 ; + p_buf_dest = + &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); + u_stride = pPlaneIn[plane_number].u_stride; + /**< Loop on output rows */ + for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { + /**< Loop on all output pixels in a row */ + for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { + *p_buf_dest++= *p_buf_src++; + *p_buf_dest++= *p_buf_src--; + p_buf_src += u_stride; /**< Go to the next row */ + } + + /**< Go on next row of the output frame */ + p_buf_dest += + pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; + /**< Go to next pixel in the last row of the input frame*/ + p_buf_src -= + pPlaneIn[plane_number].u_stride * pPlaneIn[plane_number].u_height + 2 ; + } + } + } + + return M4VIFI_OK; +} + +M4VIFI_UInt8 M4VIFI_Rotate90RightNV12toNV12(void* pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_Int32 plane_number; + M4VIFI_UInt32 i,j, u_stride; + M4VIFI_UInt8 *p_buf_src, *p_buf_dest; + + /**< Loop on Y,U and V planes */ + for (plane_number = 0; plane_number < 2; plane_number++) { + /**< Get adresses of first valid pixel in input and output buffer */ + /**< As we have a +90 rotation, first needed pixel is the left-down one */ + p_buf_src = + &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) + + (pPlaneIn[plane_number].u_stride * (pPlaneIn[plane_number].u_height - 1)); + p_buf_dest = + &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); + u_stride = pPlaneIn[plane_number].u_stride; + if (plane_number == 0) { + /**< Loop on output rows */ + for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { + /**< Loop on all output pixels in a row */ + for (j = pPlaneOut[plane_number].u_width; j != 0 ; j--) { + *p_buf_dest++= *p_buf_src; + p_buf_src -= u_stride; /**< Go to the previous row */ + } + + /**< Go on next row of the output frame */ + p_buf_dest += + pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; + /**< Go to next pixel in the last row of the input frame*/ + p_buf_src += + pPlaneIn[plane_number].u_stride * pPlaneOut[plane_number].u_width + 1 ; + } + } else { + /**< Loop on output rows */ + for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { + /**< Loop on all output pixels in a row */ + for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { + *p_buf_dest++= *p_buf_src++; + *p_buf_dest++= *p_buf_src--; + p_buf_src -= u_stride; /**< Go to the previous row */ + } + + /**< Go on next row of the output frame */ + p_buf_dest += + pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; + /**< Go to next pixel in the last row of the input frame*/ + p_buf_src += + pPlaneIn[plane_number].u_stride * pPlaneIn[plane_number].u_height + 2 ; + } + } + } + + return M4VIFI_OK; +} + +M4VIFI_UInt8 M4VIFI_Rotate180NV12toNV12(void* pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_Int32 plane_number; + M4VIFI_UInt32 i,j; + M4VIFI_UInt8 *p_buf_src, *p_buf_dest, temp_pix1; + M4VIFI_UInt16 *p16_buf_src, *p16_buf_dest, temp_pix2; + + /**< Loop on Y,U and V planes */ + for (plane_number = 0; plane_number < 2; plane_number++) { + /**< Get adresses of first valid pixel in input and output buffer */ + p_buf_src = + &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]); + p_buf_dest = + &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); + + if (plane_number == 0) { + /**< If pPlaneIn = pPlaneOut, the algorithm will be different */ + if (p_buf_src == p_buf_dest) { + /**< Get Address of last pixel in the last row of the frame */ + p_buf_dest += + pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + + pPlaneOut[plane_number].u_width - 1; + + /**< We loop (height/2) times on the rows. + * In case u_height is odd, the row at the middle of the frame + * has to be processed as must be mirrored */ + for (i = (pPlaneOut[plane_number].u_height>>1); i != 0; i--) { + for (j = pPlaneOut[plane_number].u_width; j != 0 ; j--) { + temp_pix1= *p_buf_dest; + *p_buf_dest--= *p_buf_src; + *p_buf_src++ = temp_pix1; + } + /**< Go on next row in top of frame */ + p_buf_src += + pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; + /**< Go to the last pixel in previous row in bottom of frame*/ + p_buf_dest -= + pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; + } + + /**< Mirror middle row in case height is odd */ + if ((pPlaneOut[plane_number].u_height%2)!= 0) { + p_buf_src = + &(pPlaneOut[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]); + p_buf_src += + pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height>>1); + p_buf_dest = + p_buf_src + pPlaneOut[plane_number].u_width; + + /**< We loop u_width/2 times on this row. + * In case u_width is odd, the pixel at the middle of this row + * remains unchanged */ + for (j = (pPlaneOut[plane_number].u_width>>1); j != 0 ; j--) { + temp_pix1= *p_buf_dest; + *p_buf_dest--= *p_buf_src; + *p_buf_src++ = temp_pix1; + } + } + } else { + /**< Get Address of last pixel in the last row of the output frame */ + p_buf_dest += + pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + + pPlaneIn[plane_number].u_width - 1; + + /**< Loop on rows */ + for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { + for (j = pPlaneOut[plane_number].u_width; j != 0 ; j--) { + *p_buf_dest--= *p_buf_src++; + } + + /**< Go on next row in top of input frame */ + p_buf_src += + pPlaneIn[plane_number].u_stride - pPlaneOut[plane_number].u_width; + /**< Go to last pixel of previous row in bottom of input frame*/ + p_buf_dest -= + pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; + } + } + } else { + /**< If pPlaneIn = pPlaneOut, the algorithm will be different */ + if (p_buf_src == p_buf_dest) { + p16_buf_src = (M4VIFI_UInt16 *)p_buf_src; + p16_buf_dest = (M4VIFI_UInt16 *)p_buf_dest; + /**< Get Address of last pixel in the last row of the frame */ + p16_buf_dest += + ((pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + + pPlaneOut[plane_number].u_width)>>1) - 1; + + /**< We loop (height/2) times on the rows. + * In case u_height is odd, the row at the middle of the frame + * has to be processed as must be mirrored */ + for (i = (pPlaneOut[plane_number].u_height >> 1); i != 0 ; i--) { + for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { + temp_pix2 = *p16_buf_dest; + *p16_buf_dest--= *p16_buf_src; + *p16_buf_src++ = temp_pix2; + } + /**< Go on next row in top of frame */ + p16_buf_src += + ((pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); + /**< Go to the last pixel in previous row in bottom of frame*/ + p16_buf_dest -= + ((pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); + } + + /**< Mirror middle row in case height is odd */ + if ((pPlaneOut[plane_number].u_height%2)!= 0) { + p_buf_src = + &(pPlaneOut[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]); + p_buf_src += + pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height>>1); + p16_buf_src = (M4VIFI_UInt16 *)p_buf_src; + p_buf_dest = + p_buf_src + pPlaneOut[plane_number].u_width - 1; + p16_buf_dest = (M4VIFI_UInt16 *)p_buf_dest; + + /**< We loop u_width/2 times on this row. + * In case u_width is odd, the pixel at the middle of this row + * remains unchanged */ + for (j = (pPlaneOut[plane_number].u_width>>2); j != 0 ; j--) { + temp_pix2= *p16_buf_dest; + *p16_buf_dest--= *p16_buf_src; + *p16_buf_src++ = temp_pix2; + } + } + } else { + /**< Get Address of last pixel in the last row of the output frame */ + p_buf_dest += + pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + + pPlaneIn[plane_number].u_width - 2; + p16_buf_dest = (M4VIFI_UInt16 *)p_buf_dest; + p16_buf_src = (M4VIFI_UInt16 *)p_buf_src; + + /**< Loop on rows */ + for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { + for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { + *p16_buf_dest--= *p16_buf_src++; + } + + /**< Go on next row in top of input frame */ + p16_buf_src += + ((pPlaneIn[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); + /**< Go to last pixel of previous row in bottom of input frame*/ + p16_buf_dest -= + ((pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); + } + } + } + } + + return M4VIFI_OK; +} + + +M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toYUV420(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + + LOGV("M4VIFI_ResizeBilinearNV12toYUV420 begin"); + + M4VIFI_ImagePlane pPlaneTmp[3]; + M4OSA_UInt32 mVideoWidth, mVideoHeight; + M4OSA_UInt32 mFrameSize; + + mVideoWidth = pPlaneIn[0].u_width; + mVideoHeight = pPlaneIn[0].u_height; + mFrameSize = mVideoWidth * mVideoHeight * 3/2; + + M4OSA_UInt8 *pData = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( + mFrameSize, + 12420, + (M4OSA_Char*)("M4VIFI_ResizeBilinearNV12toYUV420: tempBuffer") + ); + + if (NULL == pData) + { + LOGE("Error: Fail to allocate tempBuffer!"); + return M4VIFI_ALLOC_FAILURE; + } + + pPlaneTmp[0].pac_data = pData; + pPlaneTmp[0].u_height = pPlaneIn[0].u_height; + pPlaneTmp[0].u_width = pPlaneIn[0].u_width; + pPlaneTmp[0].u_stride = pPlaneIn[0].u_stride; + pPlaneTmp[0].u_topleft = pPlaneIn[0].u_topleft; + + pPlaneTmp[1].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight); + pPlaneTmp[1].u_height = pPlaneTmp[0].u_height/2; + pPlaneTmp[1].u_width = pPlaneTmp[0].u_width/2; + pPlaneTmp[1].u_stride = pPlaneTmp[0].u_stride/2; + pPlaneTmp[1].u_topleft = pPlaneTmp[0].u_topleft; + + pPlaneTmp[2].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight*5/4); + pPlaneTmp[2].u_height = pPlaneTmp[0].u_height/2; + pPlaneTmp[2].u_width = pPlaneTmp[0].u_width/2; + pPlaneTmp[2].u_stride = pPlaneTmp[0].u_stride/2; + pPlaneTmp[2].u_topleft = pPlaneTmp[0].u_topleft; + + M4VIFI_UInt8 err; + err = M4VIFI_SemiplanarYUV420toYUV420_X86(pUserData, pPlaneIn,&pPlaneTmp[0]); + + if(err != M4VIFI_OK) + { + LOGE("Error: M4VIFI_SemiplanarYUV420toYUV420 fails!"); + free(pData); + return err; + } + + err = M4VIFI_ResizeBilinearYUV420toYUV420_X86(pUserData,&pPlaneTmp[0],pPlaneOut); + + free(pData); + LOGV("M4VIFI_ResizeBilinearNV12toYUV420 end"); + return err; + +} + +M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toBGR565(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) +{ + LOGV("M4VIFI_ResizeBilinearNV12toBGR565 begin"); + + M4VIFI_ImagePlane pPlaneTmp[3]; + M4OSA_UInt32 mVideoWidth, mVideoHeight; + M4OSA_UInt32 mFrameSize; + + mVideoWidth = pPlaneIn[0].u_width; + mVideoHeight = pPlaneIn[0].u_height; + mFrameSize = mVideoWidth * mVideoHeight * 3/2; + + M4OSA_UInt8 *pData = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( + mFrameSize, + 12420, + (M4OSA_Char*)("M4VIFI_ResizeBilinearNV12toYUV420:tempBuffer") + ); + if (NULL == pData) + { + LOGE("Error: Fail to allocate tempBuffer!"); + return M4VIFI_ALLOC_FAILURE; + } + pPlaneTmp[0].pac_data = pData; + pPlaneTmp[0].u_height = pPlaneIn[0].u_height; + pPlaneTmp[0].u_width = pPlaneIn[0].u_width; + pPlaneTmp[0].u_stride = pPlaneIn[0].u_stride; + pPlaneTmp[0].u_topleft = pPlaneIn[0].u_topleft; + + pPlaneTmp[1].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight); + pPlaneTmp[1].u_height = pPlaneTmp[0].u_height/2; + pPlaneTmp[1].u_width = pPlaneTmp[0].u_width/2; + pPlaneTmp[1].u_stride = pPlaneTmp[0].u_stride/2; + pPlaneTmp[1].u_topleft = pPlaneTmp[0].u_topleft; + + pPlaneTmp[2].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight*5/4); + pPlaneTmp[2].u_height = pPlaneTmp[0].u_height/2; + pPlaneTmp[2].u_width = pPlaneTmp[0].u_width/2; + pPlaneTmp[2].u_stride = pPlaneTmp[0].u_stride/2; + pPlaneTmp[2].u_topleft = pPlaneTmp[0].u_topleft; + + M4VIFI_UInt8 err; + err = M4VIFI_SemiplanarYUV420toYUV420_X86(pUserData, pPlaneIn,&pPlaneTmp[0]); + + if(err != M4VIFI_OK) + { + LOGE("Error: M4VIFI_SemiplanarYUV420toYUV420 fails!"); + free(pData); + return err; + } + + err = M4VIFI_ResizeBilinearYUV420toBGR565_X86(pUserData,&pPlaneTmp[0],pPlaneOut); + free(pData); + + LOGV("M4VIFI_ResizeBilinearNV12toBGR565 end"); + return err; +} + diff --git a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h new file mode 100644 index 0000000..96bbd5b --- /dev/null +++ b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_EDITOR_TOOLS_NV12_H +#define VIDEO_EDITOR_TOOLS_NV12_H + +#include "M4OSA_Types.h" +#include "M4VIFI_FiltersAPI.h" +#include "M4VIFI_Clip.h" +#include "M4VIFI_Defines.h" +#include "M4OSA_Memory.h" + +M4VIFI_UInt8 M4VIFI_RGB888toNV12(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +M4VIFI_UInt8 M4VIFI_NV12toNV12(void *user_data, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toNV12(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +M4VIFI_UInt8 M4VIFI_Rotate90LeftNV12toNV12(void* pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +M4VIFI_UInt8 M4VIFI_Rotate90RightNV12toNV12(void* pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +M4VIFI_UInt8 M4VIFI_Rotate180NV12toNV12(void* pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toBGR565(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toYUV420(void *pUserData, + M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); + +#endif + diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c index 98919d2..4bed81f 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c +++ b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c @@ -93,9 +93,10 @@ M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Assigning Pool name buffer"); memset((void *)pool->poolName, 0,VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE); - memcpy((void *)pool->poolName, (void *)poolName, - VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE-1); - + if (strlen((const char *)poolName) < VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE) { + memcpy((void *)pool->poolName, (void *)poolName, + strlen((const char *)poolName)); + } pool->NB = nbBuffers; VIDEOEDITOR_BUFFER_allocatePool_Cleanup: diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp index 21d3c30..9e7bafd 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp @@ -868,7 +868,8 @@ cleanUp: if( M4NO_ERROR == err ) { ALOGV("VideoEditorVideoDecoder_configureFromMetadata no error"); } else { - if( M4OSA_NULL != pDecShellContext->m_pDecBufferPool ) { + if( (M4OSA_NULL != pDecShellContext) && \ + (M4OSA_NULL != pDecShellContext->m_pDecBufferPool) ) { VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool); pDecShellContext->m_pDecBufferPool = M4OSA_NULL; } @@ -1626,7 +1627,7 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, goto cleanUp; } - ALOGV("VideoEditorVideoDecoder_render 3 ouput %d %d %d %d", + ALOGV("VideoEditorVideoDecoder_render 3 output %d %d %d %d", pOutputPlane[0].u_width, pOutputPlane[0].u_height, pOutputPlane[0].u_topleft, pOutputPlane[0].u_stride); @@ -1643,20 +1644,13 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, tmpPlane[0].u_topleft = 0; tmpPlane[0].u_stride = tmpPlane[0].u_width; tmpPlane[0].pac_data = (M4VIFI_UInt8*)pRenderVIDEOEDITORBuffer->pData; - tmpPlane[1].u_width = tmpPlane[0].u_width/2; + tmpPlane[1].u_width = tmpPlane[0].u_width; tmpPlane[1].u_height = tmpPlane[0].u_height/2; tmpPlane[1].u_topleft = 0; - tmpPlane[1].u_stride = tmpPlane[0].u_stride/2; + tmpPlane[1].u_stride = tmpPlane[0].u_stride; tmpPlane[1].pac_data = tmpPlane[0].pac_data + (tmpPlane[0].u_stride * tmpPlane[0].u_height); - tmpPlane[2].u_width = tmpPlane[1].u_width; - tmpPlane[2].u_height = tmpPlane[1].u_height; - tmpPlane[2].u_topleft = 0; - tmpPlane[2].u_stride = tmpPlane[1].u_stride; - tmpPlane[2].pac_data = tmpPlane[1].pac_data + - (tmpPlane[1].u_stride * tmpPlane[1].u_height); - - ALOGV("VideoEditorVideoDecoder_render w = %d H = %d", + ALOGV("VideoEditorVideoDecoder_render W = %d H = %d", tmpPlane[0].u_width,tmpPlane[0].u_height); pDecShellContext->m_pFilter(M4OSA_NULL, &tmpPlane[0], pOutputPlane); } else { @@ -1672,10 +1666,7 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, tempWidth * tempHeight); tempBuffPtr += (tempWidth * tempHeight); memcpy((void *) pOutputPlane[1].pac_data, (void *)tempBuffPtr, - (tempWidth/2) * (tempHeight/2)); - tempBuffPtr += ((tempWidth/2) * (tempHeight/2)); - memcpy((void *) pOutputPlane[2].pac_data, (void *)tempBuffPtr, - (tempWidth/2) * (tempHeight/2)); + tempWidth * tempHeight/2); } pDecShellContext->mNbRenderedFrames++; diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp index 1abc447..e22075e 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp @@ -737,7 +737,7 @@ M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( M4OSA_Bool bReachedEOS) { M4OSA_ERR err = M4NO_ERROR; VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - M4VIFI_ImagePlane pOutPlane[3]; + M4VIFI_ImagePlane pOutPlane[2]; MediaBuffer* buffer = NULL; int32_t nbBuffer = 0; @@ -748,7 +748,6 @@ M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; pOutPlane[0].pac_data = M4OSA_NULL; pOutPlane[1].pac_data = M4OSA_NULL; - pOutPlane[2].pac_data = M4OSA_NULL; if ( M4OSA_FALSE == bReachedEOS ) { M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth * @@ -756,7 +755,7 @@ M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( M4OSA_UInt32 sizeU = sizeY >> 2; M4OSA_UInt32 size = sizeY + 2*sizeU; M4OSA_UInt8* pData = M4OSA_NULL; - buffer = new MediaBuffer((size_t)size); + pEncoderContext->mEncoderSource->requestBuffer(&buffer); pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset(); // Prepare the output image for pre-processing @@ -764,56 +763,19 @@ M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( pOutPlane[0].u_height = pEncoderContext->mCodecParams->FrameHeight; pOutPlane[0].u_topleft = 0; pOutPlane[0].u_stride = pOutPlane[0].u_width; - pOutPlane[1].u_width = pOutPlane[0].u_width/2; + pOutPlane[1].u_width = pOutPlane[0].u_width; pOutPlane[1].u_height = pOutPlane[0].u_height/2; pOutPlane[1].u_topleft = 0; - pOutPlane[1].u_stride = pOutPlane[0].u_stride/2; - pOutPlane[2].u_width = pOutPlane[1].u_width; - pOutPlane[2].u_height = pOutPlane[1].u_height; - pOutPlane[2].u_topleft = 0; - pOutPlane[2].u_stride = pOutPlane[1].u_stride; + pOutPlane[1].u_stride = pOutPlane[0].u_stride; pOutPlane[0].pac_data = pData; pOutPlane[1].pac_data = pData + sizeY; - pOutPlane[2].pac_data = pData + sizeY + sizeU; // Apply pre-processing err = pEncoderContext->mPreProcFunction( pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane); VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - // Convert MediaBuffer to the encoder input format if necessary - if (pEncoderContext->mI420ColorConverter) { - I420ColorConverter* converter = pEncoderContext->mI420ColorConverter; - int actualWidth = pEncoderContext->mCodecParams->FrameWidth; - int actualHeight = pEncoderContext->mCodecParams->FrameHeight; - - int encoderWidth, encoderHeight; - ARect encoderRect; - int encoderBufferSize; - - if (converter->getEncoderInputBufferInfo( - actualWidth, actualHeight, - &encoderWidth, &encoderHeight, - &encoderRect, &encoderBufferSize) == 0) { - - MediaBuffer* newBuffer; - pEncoderContext->mEncoderSource->requestBuffer(&newBuffer); - if (converter->convertI420ToEncoderInput( - pData, // srcBits - actualWidth, actualHeight, - encoderWidth, encoderHeight, - encoderRect, - (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) { - ALOGE("convertI420ToEncoderInput failed"); - } - - // switch to new buffer - buffer->release(); - buffer = newBuffer; - } - } - // Set the metadata buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000)); } diff --git a/frameworks/videoedit/vss/Android.mk b/frameworks/videoedit/vss/Android.mk new file mode 100644 index 0000000..cf3af92 --- /dev/null +++ b/frameworks/videoedit/vss/Android.mk @@ -0,0 +1,67 @@ +# +# Copyright (C) 2011 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE:= libvss_intel + +LOCAL_COPY_HEADERS_TO := videoeditornv12 + +LOCAL_COPY_HEADERS := M4xVSS_NV12.h \ + M4MCS_NV12.h \ + EditVideo_NV12.h + +LOCAL_SRC_FILES:= \ + M4AIR_API_NV12.c \ + M4MCS_VideoPreProcessing_NV12.c \ + M4VSS3GPP_EditVideo_NV12.c \ + M4xVSS_internal_NV12.c \ + +LOCAL_MODULE_TAGS := optional + + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + libvideoeditor_osal \ + +LOCAL_STATIC_LIBRARIES := \ + liblvpp_intel \ + + +LOCAL_C_INCLUDES += \ + $(TOP)/frameworks/av/libvideoeditor/osal/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/common/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/mcs/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/inc \ + $(TOP)/frameworks/av/libvideoeditor/vss/stagefrightshells/inc \ + $(TOP)/frameworks/av/libvideoeditor/lvpp \ + $(TOP)/frameworks/native/include/media/openmax \ + $(TARGET_OUT_HEADERS)/videoeditornv12 + +LOCAL_SHARED_LIBRARIES += libdl + +LOCAL_CFLAGS += -DDECODE_GIF_ON_SAVING \ + -DVIDEOEDITOR_INTEL_NV12_VERSION + +# All of the shared libraries we link against. +LOCAL_LDLIBS := \ + -lpthread -ldl + +include $(BUILD_STATIC_LIBRARY) + diff --git a/frameworks/videoedit/vss/EditVideo_NV12.h b/frameworks/videoedit/vss/EditVideo_NV12.h new file mode 100644 index 0000000..1af74bd --- /dev/null +++ b/frameworks/videoedit/vss/EditVideo_NV12.h @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef EDITVIDEO_NV12_H +#define EDITVIDEO_NV12_H + +M4OSA_ERR M4VSS3GPP_intSetNv12PlaneFromARGB888( + M4VSS3GPP_InternalEditContext *pC, M4VSS3GPP_ClipContext* pClipCtxt); + +M4OSA_ERR M4VSS3GPP_intRotateVideo_NV12(M4VIFI_ImagePlane* pPlaneIn, + M4OSA_UInt32 rotationDegree); + +M4OSA_ERR M4VSS3GPP_intApplyRenderingMode_NV12(M4VSS3GPP_InternalEditContext *pC, + M4xVSS_MediaRendering renderingMode, M4VIFI_ImagePlane* pInplane, + M4VIFI_ImagePlane* pOutplane); + +unsigned char M4VFL_modifyLumaWithScale_NV12(M4ViComImagePlane *plane_in, + M4ViComImagePlane *plane_out, unsigned long lum_factor, + void *user_data); + +unsigned char M4VIFI_ImageBlendingonNV12 (void *pUserData, + M4ViComImagePlane *pPlaneIn1, M4ViComImagePlane *pPlaneIn2, + M4ViComImagePlane *pPlaneOut, UInt32 Progress); + +#endif diff --git a/frameworks/videoedit/vss/M4AIR_API_NV12.c b/frameworks/videoedit/vss/M4AIR_API_NV12.c new file mode 100644 index 0000000..fd87c89 --- /dev/null +++ b/frameworks/videoedit/vss/M4AIR_API_NV12.c @@ -0,0 +1,1181 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + ************************************************************************* + * @file M4AIR_API_NV12.c + * @brief Area of Interest Resizer API + ************************************************************************* + */ + +#define M4AIR_YUV420_FORMAT_SUPPORTED +#define M4AIR_YUV420A_FORMAT_SUPPORTED +#define M4AIR_NV12_FORMAT_SUPPORTED +#define M4AIR_NV12A_FORMAT_SUPPORTED + +/******************************* INCLUDES *******************************/ +#include "M4OSA_Types.h" +#include "M4OSA_Error.h" +#include "M4OSA_CoreID.h" +#include "M4OSA_Mutex.h" +#include "M4OSA_Memory.h" +#include "M4VIFI_FiltersAPI.h" +#include "M4AIR_API.h" +#include "M4OSA_Debug.h" +#include "M4AIR_API_NV12.h" + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat) + * @brief This function initialize an instance of the AIR. + * @param pContext: (IN/OUT) Address of the context to create + * @param inputFormat: (IN) input format type. + * @return M4NO_ERROR: there is no error + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType + * @return M4ERR_ALLOC: No more memory is available + ****************************************************************************** + */ + +/************************ M4AIR INTERNAL TYPES DEFINITIONS ***********************/ + +/** + ****************************************************************************** + * enum M4AIR_States + * @brief The following enumeration defines the internal states of the AIR. + ****************************************************************************** + */ +typedef enum +{ + M4AIR_kCreated, /**< State after M4AIR_create has been called */ + M4AIR_kConfigured /**< State after M4AIR_configure has been called */ +}M4AIR_States; + + +/** + ****************************************************************************** + * struct M4AIR_InternalContext + * @brief The following structure is the internal context of the AIR. + ****************************************************************************** + */ +typedef struct +{ + M4AIR_States m_state; /**< Internal state */ + M4AIR_InputFormatType m_inputFormat; /**< Input format like YUV420Planar, + RGB565, JPG, etc ... */ + M4AIR_Params m_params; /**< Current input Parameter of the processing */ + M4OSA_UInt32 u32_x_inc[4]; /**< ratio between input and ouput width for YUV */ + M4OSA_UInt32 u32_y_inc[4]; /**< ratio between input and ouput height for YUV */ + M4OSA_UInt32 u32_x_accum_start[4]; /**< horizontal initial accumulator value */ + M4OSA_UInt32 u32_y_accum_start[4]; /**< Vertical initial accumulator value */ + M4OSA_UInt32 u32_x_accum[4]; /**< save of horizontal accumulator value */ + M4OSA_UInt32 u32_y_accum[4]; /**< save of vertical accumulator value */ + M4OSA_UInt8* pu8_data_in[4]; /**< Save of input plane pointers + in case of stripe mode */ + M4OSA_UInt32 m_procRows; /**< Number of processed rows, + used in stripe mode only */ + M4OSA_Bool m_bOnlyCopy; /**< Flag to know if we just perform a copy + or a bilinear interpolation */ + M4OSA_Bool m_bFlipX; /**< Depend on output orientation, used during + processing to revert processing order in X + coordinates */ + M4OSA_Bool m_bFlipY; /**< Depend on output orientation, used during + processing to revert processing order in Y + coordinates */ + M4OSA_Bool m_bRevertXY; /**< Depend on output orientation, used during + processing to revert X and Y processing order + (+-90?rotation) */ +}M4AIR_InternalContext; + +/********************************* MACROS *******************************/ +#define M4ERR_CHECK_NULL_RETURN_VALUE(retval, pointer)\ + if ((pointer) == M4OSA_NULL) return ((M4OSA_ERR)(retval)); + + +M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat) +{ + M4OSA_ERR err = M4NO_ERROR ; + M4AIR_InternalContext* pC = M4OSA_NULL ; + + /* Check that the address on the context is not NULL */ + M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ; + + *pContext = M4OSA_NULL ; + + /* Internal Context creation */ + pC = (M4AIR_InternalContext*)M4OSA_32bitAlignedMalloc(sizeof(M4AIR_InternalContext), + M4AIR,(M4OSA_Char *)"AIR internal context") ; + M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, pC) ; + + + /* Check if the input format is supported */ + switch(inputFormat) + { +#ifdef M4AIR_NV12_FORMAT_SUPPORTED + case M4AIR_kNV12P: + break ; +#endif +#ifdef M4AIR_NV12A_FORMAT_SUPPORTED + case M4AIR_kNV12AP: + break ; +#endif + +#ifdef M4AIR_YUV420_FORMAT_SUPPORTED + case M4AIR_kYUV420P: + break ; +#endif +#ifdef M4AIR_YUV420A_FORMAT_SUPPORTED + case M4AIR_kYUV420AP: + break ; +#endif + default: + err = M4ERR_AIR_FORMAT_NOT_SUPPORTED; + goto M4AIR_create_cleanup ; + } + + /**< Save input format and update state */ + pC->m_inputFormat = inputFormat; + pC->m_state = M4AIR_kCreated; + + /* Return the context to the caller */ + *pContext = pC ; + + return M4NO_ERROR ; + +M4AIR_create_cleanup: + /* Error management : we destroy the context if needed */ + if(M4OSA_NULL != pC) + { + free(pC) ; + } + + *pContext = M4OSA_NULL ; + + return err ; +} + + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext) + * @brief This function destroys an instance of the AIR component + * @param pContext: (IN) Context identifying the instance to destroy + * @return M4NO_ERROR: there is no error + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). + * @return M4ERR_STATE: Internal state is incompatible with this function call. + ****************************************************************************** + */ +M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext) +{ + M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ; + + M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ; + + /**< Check state */ + if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state)) + { + return M4ERR_STATE; + } + free(pC) ; + + return M4NO_ERROR ; + +} + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams) + * @brief This function will configure the AIR. + * @note It will set the input and output coordinates and sizes, + * and indicates if we will proceed in stripe or not. + * In case a M4AIR_get in stripe mode was on going, it will cancel this previous + * processing and reset the get process. + * @param pContext: (IN) Context identifying the instance + * @param pParams->m_bOutputStripe:(IN) Stripe mode. + * @param pParams->m_inputCoord: (IN) X,Y coordinates of the first valid pixel in input. + * @param pParams->m_inputSize: (IN) input ROI size. + * @param pParams->m_outputSize: (IN) output size. + * @return M4NO_ERROR: there is no error + * @return M4ERR_ALLOC: No more memory space to add a new effect. + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). + * @return M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported. + ****************************************************************************** + */ +M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams) +{ + M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ; + M4OSA_UInt32 i,u32_width_in, u32_width_out, u32_height_in, u32_height_out; + M4OSA_UInt32 nb_planes; + + M4OSA_TRACE1_0("M4AIR_configure_NV12 start"); + M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext); + + if(M4AIR_kNV12P == pC->m_inputFormat) + { + nb_planes = 2; + } + else + { + nb_planes = 3; + } + + /**< Check state */ + if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state)) + { + return M4ERR_STATE; + } + + /** Save parameters */ + pC->m_params = *pParams; + + /* Check for the input&output width and height are even */ + if(((pC->m_params.m_inputSize.m_height)&0x1) || + ((pC->m_params.m_outputSize.m_height)&0x1)) + { + return M4ERR_AIR_ILLEGAL_FRAME_SIZE; + } + + if(((pC->m_params.m_inputSize.m_width)&0x1)|| + ((pC->m_params.m_outputSize.m_width)&0x1)) + { + return M4ERR_AIR_ILLEGAL_FRAME_SIZE; + } + if(((pC->m_params.m_inputSize.m_width) == (pC->m_params.m_outputSize.m_width)) + &&((pC->m_params.m_inputSize.m_height) == (pC->m_params.m_outputSize.m_height))) + { + /**< No resize in this case, we will just copy input in output */ + pC->m_bOnlyCopy = M4OSA_TRUE; + } + else + { + pC->m_bOnlyCopy = M4OSA_FALSE; + + /**< Initialize internal variables used for resize filter */ + for(i=0;im_params.m_inputSize.m_width; + u32_height_in = ((i==0)||(i==2))?pC->m_params.m_inputSize.m_height:\ + (pC->m_params.m_inputSize.m_height+1)>>1; + u32_width_out = pC->m_params.m_outputSize.m_width; + u32_height_out = ((i==0)||(i==2))?pC->m_params.m_outputSize.m_height:\ + (pC->m_params.m_outputSize.m_height+1)>>1; + + M4OSA_TRACE1_4("u32_width_in =%d, u32_height_in = %d, u32_width_out = %d, u32_height_out = %d",\ + u32_width_in, u32_height_in, u32_width_out, u32_height_out); + + /* Compute horizontal ratio between src and destination width.*/ + if (u32_width_out >= u32_width_in) + { + if (i == 1) + { + pC->u32_x_inc[i] = ((u32_width_in-2) * 0x10000) / (u32_width_out-2); + } + else + { + pC->u32_x_inc[i] = ((u32_width_in-1) * 0x10000) / (u32_width_out-1); + } + } + else + { + pC->u32_x_inc[i] = (u32_width_in * 0x10000) / (u32_width_out); + } + + /* Compute vertical ratio between src and destination height.*/ + if (u32_height_out >= u32_height_in) + { + pC->u32_y_inc[i] = ((u32_height_in - 1) * 0x10000) / (u32_height_out-1); + } + else + { + pC->u32_y_inc[i] = (u32_height_in * 0x10000) / (u32_height_out); + } + + /* + Calculate initial accumulator value : u32_y_accum_start. + u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5 + */ + if (pC->u32_y_inc[i] >= 0x10000) + { + /* + Keep the fractionnal part, assimung that integer part is coded + on the 16 high bits and the fractionnal on the 15 low bits + */ + pC->u32_y_accum_start[i] = pC->u32_y_inc[i] & 0xffff; + + if (!pC->u32_y_accum_start[i]) + { + pC->u32_y_accum_start[i] = 0x10000; + } + + pC->u32_y_accum_start[i] >>= 1; + } + else + { + pC->u32_y_accum_start[i] = 0; + } + /**< Take into account that Y coordinate can be odd + in this case we have to put a 0.5 offset + for U and V plane as there a 2 times sub-sampled vs Y*/ + if((pC->m_params.m_inputCoord.m_y&0x1)&&(i==1)) + { + pC->u32_y_accum_start[i] += 0x8000; + } + + /* + Calculate initial accumulator value : u32_x_accum_start. + u32_x_accum_start is coded on 15 bits, and represents a value between + 0 and 0.5 + */ + + if (pC->u32_x_inc[i] >= 0x10000) + { + pC->u32_x_accum_start[i] = pC->u32_x_inc[i] & 0xffff; + + if (!pC->u32_x_accum_start[i]) + { + pC->u32_x_accum_start[i] = 0x10000; + } + + pC->u32_x_accum_start[i] >>= 1; + } + else + { + pC->u32_x_accum_start[i] = 0; + } + /**< Take into account that X coordinate can be odd + in this case we have to put a 0.5 offset + for U and V plane as there a 2 times sub-sampled vs Y*/ + if((pC->m_params.m_inputCoord.m_x&0x1)&&(i==1)) + { + pC->u32_x_accum_start[i] += 0x8000; + } + + M4OSA_TRACE1_4("u32_x_inc = 0x%x, u32_y_inc = 0x%x, u32_x_accum_start = 0x%x, u32_y_accum_start = 0x%x",\ + pC->u32_x_inc[i], pC->u32_y_inc[i], \ + pC->u32_x_accum_start[i], pC->u32_y_accum_start[i]); + } + } + + /**< Reset variable used for stripe mode */ + pC->m_procRows = 0; + + /**< Initialize var for X/Y processing order according to orientation */ + pC->m_bFlipX = M4OSA_FALSE; + pC->m_bFlipY = M4OSA_FALSE; + pC->m_bRevertXY = M4OSA_FALSE; + switch(pParams->m_outputOrientation) + { + case M4COMMON_kOrientationTopLeft: + break; + case M4COMMON_kOrientationTopRight: + pC->m_bFlipX = M4OSA_TRUE; + break; + case M4COMMON_kOrientationBottomRight: + pC->m_bFlipX = M4OSA_TRUE; + pC->m_bFlipY = M4OSA_TRUE; + break; + case M4COMMON_kOrientationBottomLeft: + pC->m_bFlipY = M4OSA_TRUE; + break; + case M4COMMON_kOrientationLeftTop: + pC->m_bRevertXY = M4OSA_TRUE; + break; + case M4COMMON_kOrientationRightTop: + pC->m_bRevertXY = M4OSA_TRUE; + pC->m_bFlipY = M4OSA_TRUE; + break; + case M4COMMON_kOrientationRightBottom: + pC->m_bRevertXY = M4OSA_TRUE; + pC->m_bFlipX = M4OSA_TRUE; + pC->m_bFlipY = M4OSA_TRUE; + break; + case M4COMMON_kOrientationLeftBottom: + pC->m_bRevertXY = M4OSA_TRUE; + pC->m_bFlipX = M4OSA_TRUE; + break; + default: + return M4ERR_PARAMETER; + } + /**< Update state */ + pC->m_state = M4AIR_kConfigured; + + return M4NO_ERROR ; +} + + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut) + * @brief This function will provide the requested resized area of interest according to + * settings provided in M4AIR_configure. + * @note In case the input format type is JPEG, input plane(s) + * in pIn is not used. In normal mode, dimension specified in output plane(s) structure + * must be the same than the one specified in M4AIR_configure. In stripe mode, only the + * width will be the same, height will be taken as the stripe height (typically 16). + * In normal mode, this function is call once to get the full output picture. + * In stripe mode, it is called for each stripe till the whole picture has been + * retrieved,and the position of the output stripe in the output picture + * is internally incremented at each step. + * Any call to M4AIR_configure during stripe process will reset this one to the + * beginning of the output picture. + * @param pContext: (IN) Context identifying the instance + * @param pIn: (IN) Plane structure containing input Plane(s). + * @param pOut: (IN/OUT) Plane structure containing output Plane(s). + * @return M4NO_ERROR: there is no error + * @return M4ERR_ALLOC: No more memory space to add a new effect. + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). + ****************************************************************************** + */ +M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, + M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut) +{ + M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ; + M4OSA_UInt32 i,j,k,u32_x_frac,u32_y_frac,u32_x_accum,u32_y_accum,u32_shift; + M4OSA_UInt8 *pu8_data_in, *pu8_data_in_org, *pu8_data_in_tmp, *pu8_data_out; + M4OSA_UInt8 *pu8_src_top; + M4OSA_UInt8 *pu8_src_bottom; + M4OSA_UInt32 u32_temp_value; + M4OSA_Int32 i32_tmp_offset; + M4OSA_UInt32 nb_planes; + + M4OSA_TRACE1_0("M4AIR_get_NV12 start"); + + M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ; + + /**< Check state */ + if(M4AIR_kConfigured != pC->m_state) + { + return M4ERR_STATE; + } + + if(M4AIR_kNV12P == pC->m_inputFormat) + { + nb_planes = 2; + } + else + { + nb_planes = 3; + } + + /**< Loop on each Plane */ + for(i=0;im_params.m_inputCoord.m_x,pC->m_params.m_inputCoord.m_y); + + if((M4OSA_FALSE == pC->m_params.m_bOutputStripe)\ + ||((M4OSA_TRUE == pC->m_params.m_bOutputStripe)&&(0 == pC->m_procRows))) + { + /**< For input, take care about ROI */ + pu8_data_in = pIn[i].pac_data + ((pIn[i].u_topleft >> u32_shift) << u32_shift) + + ((pC->m_params.m_inputCoord.m_x >> u32_shift) << u32_shift) + + (pC->m_params.m_inputCoord.m_y >> u32_shift) * pIn[i].u_stride; + + /** Go at end of line/column in case X/Y scanning is flipped */ + if(M4OSA_TRUE == pC->m_bFlipX) + { + pu8_data_in += pC->m_params.m_inputSize.m_width -1 ; + } + if(M4OSA_TRUE == pC->m_bFlipY) + { + pu8_data_in += ((pC->m_params.m_inputSize.m_height>>u32_shift) -1)\ + * pIn[i].u_stride; + } + + /**< Initialize accumulators in case we are using it (bilinear interpolation) */ + if( M4OSA_FALSE == pC->m_bOnlyCopy) + { + pC->u32_x_accum[i] = pC->u32_x_accum_start[i]; + pC->u32_y_accum[i] = pC->u32_y_accum_start[i]; + } + + } + else + { + /**< In case of stripe mode for other than first stripe, we need to recover input + pointer from internal context */ + pu8_data_in = pC->pu8_data_in[i]; + } + + /**< In every mode, output data are at the beginning of the output plane */ + pu8_data_out = pOut[i].pac_data + ((pOut[i].u_topleft >> u32_shift) << u32_shift); + + M4OSA_TRACE1_2("pOut[%d].u_topleft = %d",i,pOut[i].u_topleft); + + /**< Initialize input offset applied after each pixel */ + if(M4OSA_FALSE == pC->m_bFlipY) + { + i32_tmp_offset = pIn[i].u_stride; + } + else + { + i32_tmp_offset = -pIn[i].u_stride; + } + + /**< In this case, no bilinear interpolation is needed as input and output dimensions + are the same */ + if( M4OSA_TRUE == pC->m_bOnlyCopy) + { + /**< No +-90?rotation */ + if(M4OSA_FALSE == pC->m_bRevertXY) + { + /**< No flip on X abscissa */ + if(M4OSA_FALSE == pC->m_bFlipX) + { + /**< Loop on each row */ + for(j=0;jm_bFlipY) + { + pu8_data_in += pIn[i].u_stride; + } + else + { + pu8_data_in -= pIn[i].u_stride; + } + } + } + else + { + /**< Loop on each row */ + for(j=0;jm_bFlipX) + { + pu8_data_in ++; + } + else + { + pu8_data_in --; + } + } + } + } + /**< Bilinear interpolation */ + else + { + if(0 == i) /**< Y plane */ + { + /**No +-90?rotation */ + if(M4OSA_FALSE == pC->m_bRevertXY) + { + /**< Loop on each row */ + for(j=0;ju32_y_accum[i]>>12)&15; + + /* Reinit horizontal weight factor */ + u32_x_accum = pC->u32_x_accum_start[i]; + + + if(M4OSA_TRUE == pC->m_bFlipX) + { + + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Fraction of Horizontal + weight factor */ + + pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ; + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[1]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += pC->u32_x_inc[i]; + } + } + else + { + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Fraction of Horizontal + weight factor */ + + pu8_src_top = pu8_data_in + (u32_x_accum >> 16); + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += pC->u32_x_inc[i]; + } + } + + pu8_data_out += pOut[i].u_stride - pOut[i].u_width; + + /* Update vertical accumulator */ + pC->u32_y_accum[i] += pC->u32_y_inc[i]; + if (pC->u32_y_accum[i]>>16) + { + pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset; + pC->u32_y_accum[i] &= 0xffff; + } + } + } + /** +-90?rotation */ + else + { + pu8_data_in_org = pu8_data_in; + + /**< Loop on each output row */ + for(j=0;ju32_x_accum[i]>>12)&15; + + /* Reinit accumulator */ + u32_y_accum = pC->u32_y_accum_start[i]; + + if (M4OSA_TRUE == pC->m_bFlipX) + { + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Vertical weight factor */ + + pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1; + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[1]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update vertical accumulator */ + u32_y_accum += pC->u32_y_inc[i]; + if (u32_y_accum>>16) + { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; + u32_y_accum &= 0xffff; + } + } + } + else + { + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Vertical weight factor */ + + pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16); + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update vertical accumulator */ + u32_y_accum += pC->u32_y_inc[i]; + if (u32_y_accum>>16) + { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; + u32_y_accum &= 0xffff; + } + } + } + pu8_data_out += pOut[i].u_stride - pOut[i].u_width; + + /* Update horizontal accumulator */ + pC->u32_x_accum[i] += pC->u32_x_inc[i]; + pu8_data_in = pu8_data_in_org; + } + } + } + else if(1 == i) /**< U&V plane */ + { + /**No +-90?rotation */ + if(M4OSA_FALSE == pC->m_bRevertXY) + { + /**< Loop on each row */ + for(j=0;ju32_y_accum[i]>>12)&15; + + /* Reinit horizontal weight factor */ + u32_x_accum = pC->u32_x_accum_start[i]; + + if(M4OSA_TRUE == pC->m_bFlipX) + { + + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Fraction of Horizontal + weight factor */ + + pu8_src_top = (pu8_data_in - ((u32_x_accum >> 16) << 1)) -2 ; + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* U plane weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[2]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + + // handle V plane + pu8_src_top = pu8_src_top + 1 ; + + pu8_src_bottom = pu8_src_bottom + 1; + + /* V plane weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[2]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += pC->u32_x_inc[i]; + } + } + + else + { + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Fraction of Horizontal + weight factor */ + + pu8_src_top = pu8_data_in + ((u32_x_accum >> 16) << 1); + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* U plane weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + // handle V plane + pu8_src_top = pu8_src_top + 1; + + pu8_src_bottom = pu8_src_bottom + 1; + + /* V plane weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += pC->u32_x_inc[i]; + } + } + + pu8_data_out += pOut[i].u_stride - pOut[i].u_width; + + /* Update vertical accumulator */ + pC->u32_y_accum[i] += pC->u32_y_inc[i]; + if (pC->u32_y_accum[i]>>16) + { + pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset; + pC->u32_y_accum[i] &= 0xffff; + } + } + } + /** +-90?rotation */ + else + { + pu8_data_in_org = pu8_data_in; + + /**< Loop on each output row */ + for(j=0;ju32_x_accum[i]>>12)&15; + + /* Reinit accumulator */ + u32_y_accum = pC->u32_y_accum_start[i]; + + if(M4OSA_TRUE == pC->m_bFlipX) + { + + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Vertical weight factor */ + + + pu8_src_top = (pu8_data_in - ((pC->u32_x_accum[i] >> 16) << 1)) - 2; + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[2]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + // handle V plane + u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */ + + + pu8_src_top = pu8_src_top + 1; + + pu8_src_bottom = pu8_src_bottom + 1; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[2]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update vertical accumulator */ + u32_y_accum += pC->u32_y_inc[i]; + if (u32_y_accum>>16) + { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; + u32_y_accum &= 0xffff; + } + + } + } + else + { + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Vertical weight factor */ + + pu8_src_top = pu8_data_in + ((pC->u32_x_accum[i] >> 16) << 1); + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + // handle V plane + pu8_src_top = pu8_src_top + 1; + + pu8_src_bottom = pu8_src_bottom + 1; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update vertical accumulator */ + u32_y_accum += pC->u32_y_inc[i]; + if (u32_y_accum>>16) + { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; + u32_y_accum &= 0xffff; + } + } + } + pu8_data_out += pOut[i].u_stride - pOut[i].u_width; + + /* Update horizontal accumulator */ + pC->u32_x_accum[i] += pC->u32_x_inc[i]; + pu8_data_in = pu8_data_in_org; + } + } + } + else /**< alpha plane */ + { + /**No +-90?rotation */ + if(M4OSA_FALSE == pC->m_bRevertXY) + { + + /**< Loop on each row */ + for(j=0;ju32_y_accum[i]>>12)&15; + + /* Reinit horizontal weight factor */ + u32_x_accum = pC->u32_x_accum_start[i]; + + if(M4OSA_TRUE == pC->m_bFlipX) + { + + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Fraction of Horizontal + weight factor */ + + pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ; + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[1]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + u32_temp_value= (u32_temp_value >> 7)*0xff; + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += pC->u32_x_inc[i]; + } + } + + else + { + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Fraction of Horizontal + weight factor */ + + pu8_src_top = pu8_data_in + (u32_x_accum >> 16); + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); + + u32_temp_value= (u32_temp_value >> 7)*0xff; + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update horizontal accumulator */ + u32_x_accum += pC->u32_x_inc[i]; + } + + } + + pu8_data_out += pOut[i].u_stride - pOut[i].u_width; + + /* Update vertical accumulator */ + pC->u32_y_accum[i] += pC->u32_y_inc[i]; + if (pC->u32_y_accum[i]>>16) + { + pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset; + pC->u32_y_accum[i] &= 0xffff; + } + } + + } /**< M4OSA_FALSE == pC->m_bRevertXY */ + /** +-90?rotation */ + else + { + pu8_data_in_org = pu8_data_in; + + /**< Loop on each output row */ + for(j=0;ju32_x_accum[i]>>12)&15; + + /* Reinit accumulator */ + u32_y_accum = pC->u32_y_accum_start[i]; + + if(M4OSA_TRUE == pC->m_bFlipX) + { + + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Vertical weight factor */ + + pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1; + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + + pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[1]*(16-u32_x_frac) + + pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); + + u32_temp_value= (u32_temp_value >> 7)*0xff; + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update vertical accumulator */ + u32_y_accum += pC->u32_y_inc[i]; + if (u32_y_accum>>16) + { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; + u32_y_accum &= 0xffff; + } + } + } + else + { + /**< Loop on each output pixel in a row */ + for(k=0;k> 12)&15; /* Vertical weight factor */ + + pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16); + + pu8_src_bottom = pu8_src_top + i32_tmp_offset; + + /* Weighted combination */ + u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + + pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + + (pu8_src_bottom[0]*(16-u32_x_frac) + + pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); + + u32_temp_value= (u32_temp_value >> 7)*0xff; + + *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + + /* Update vertical accumulator */ + u32_y_accum += pC->u32_y_inc[i]; + if (u32_y_accum>>16) + { + pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; + u32_y_accum &= 0xffff; + } + } + } + pu8_data_out += pOut[i].u_stride - pOut[i].u_width; + + /* Update horizontal accumulator */ + pC->u32_x_accum[i] += pC->u32_x_inc[i]; + + pu8_data_in = pu8_data_in_org; + } + } /**< M4OSA_TRUE == pC->m_bRevertXY */ + }/** 2 == i */ + } + /**< In case of stripe mode, save current input pointer */ + if (M4OSA_TRUE == pC->m_params.m_bOutputStripe) + { + pC->pu8_data_in[i] = pu8_data_in; + } + } + + /**< Update number of processed rows, reset it if we have finished + with the whole processing */ + pC->m_procRows += pOut[0].u_height; + if(M4OSA_FALSE == pC->m_bRevertXY) + { + if(pC->m_params.m_outputSize.m_height <= pC->m_procRows) pC->m_procRows = 0; + } + else + { + if(pC->m_params.m_outputSize.m_width <= pC->m_procRows) pC->m_procRows = 0; + } + + return M4NO_ERROR ; + +} + + + diff --git a/frameworks/videoedit/vss/M4AIR_API_NV12.h b/frameworks/videoedit/vss/M4AIR_API_NV12.h new file mode 100644 index 0000000..aa04efc --- /dev/null +++ b/frameworks/videoedit/vss/M4AIR_API_NV12.h @@ -0,0 +1,112 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* + * @file M4AIR_API_NV12.h + * @brief Area of Interest Resizer API + * @note +************************************************************************* +*/ + +#ifndef M4AIR_API_NV12_H +#define M4AIR_API_NV12_H +/******************************* INCLUDES *******************************/ +#include "M4OSA_Types.h" +#include "M4OSA_Error.h" +#include "M4OSA_CoreID.h" +#include "M4OSA_Mutex.h" +#include "M4OSA_Memory.h" +#include "M4VIFI_FiltersAPI.h" +#include "M4Common_types.h" +#include "M4AIR_API.h" + + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat); + * @brief This function initialize an instance of the AIR. + * @param pContext: (IN/OUT) Address of the context to create + * @param inputFormat: (IN) input format type. + * @return M4NO_ERROR: there is no error + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType + * @return M4ERR_ALLOC: No more memory is available + ****************************************************************************** +*/ +M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat); + + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext) + * @brief This function destroys an instance of the AIR component + * @param pContext: (IN) Context identifying the instance to destroy + * @return M4NO_ERROR: there is no error + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). + * @return M4ERR_STATE: Internal state is incompatible with this function call. + ****************************************************************************** +*/ +M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext); + + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams) + * @brief This function will configure the AIR. + * @note It will set the input and output coordinates and sizes, + * and indicates if we will proceed in stripe or not. + * In case a M4AIR_get in stripe mode was on going, it will cancel this previous + * processing and reset the get process. + * @param pContext: (IN) Context identifying the instance + * @param pParams->m_bOutputStripe:(IN) Stripe mode. + * @param pParams->m_inputCoord: (IN) X,Y coordinates of the first valid pixel in input. + * @param pParams->m_inputSize: (IN) input ROI size. + * @param pParams->m_outputSize: (IN) output size. + * @return M4NO_ERROR: there is no error + * @return M4ERR_ALLOC: No more memory space to add a new effect. + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). + * @return M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported. + ****************************************************************************** +*/ +M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams); + + +/** + ****************************************************************************** + * M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut) + * @brief This function will provide the requested resized area of interest according to + * settings provided in M4AIR_configure. + * @note In case the input format type is JPEG, input plane(s) + * in pIn is not used. In normal mode, dimension specified in output plane(s) structure + * must be the same than the one specified in M4AIR_configure. In stripe mode, only + * the width will be the same, height will be taken as the stripe height (typically 16). + * In normal mode, this function is call once to get the full output picture. In stripe + * mode, it is called for each stripe till the whole picture has been retrieved,and + * the position of the output stripe in the output picture is internally incremented + * at each step. + * Any call to M4AIR_configure during stripe process will reset this one to the + * beginning of the output picture. + * @param pContext: (IN) Context identifying the instance + * @param pIn: (IN) Plane structure containing input Plane(s). + * @param pOut: (IN/OUT) Plane structure containing output Plane(s). + * @return M4NO_ERROR: there is no error + * @return M4ERR_ALLOC: No more memory space to add a new effect. + * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). + ****************************************************************************** +*/ +M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut); + +#endif + diff --git a/frameworks/videoedit/vss/M4MCS_NV12.h b/frameworks/videoedit/vss/M4MCS_NV12.h new file mode 100644 index 0000000..6ad804c --- /dev/null +++ b/frameworks/videoedit/vss/M4MCS_NV12.h @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef M4MCS_NV12_H +#define M4MCS_NV12_H + +M4OSA_ERR M4MCS_intApplyVPP_NV12(M4VPP_Context pContext, + M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut); + +#endif diff --git a/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c b/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c new file mode 100644 index 0000000..b9d5cc1 --- /dev/null +++ b/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c @@ -0,0 +1,396 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + ****************************************************************************** + * M4OSA_ERR M4MCS_intApplyVPP_NV12(M4VPP_Context pContext, M4VIFI_ImagePlane* pPlaneIn, + * M4VIFI_ImagePlane* pPlaneOut) + * @brief Do the video rendering and the resize (if needed) + * @note It is called by the video encoder + * @param pContext (IN) VPP context, which actually is the MCS internal context in our case + * @param pPlaneIn (IN) Contains the image + * @param pPlaneOut (IN/OUT) Pointer to an array of 2 planes that will contain the output + * NV12 image + * @return M4NO_ERROR: No error + * @return M4MCS_ERR_VIDEO_DECODE_ERROR: the video decoding failed + * @return M4MCS_ERR_RESIZE_ERROR: the resizing failed + * @return Any error returned by an underlaying module + ****************************************************************************** + */ + + +/** + ******************************************************************** + * Includes + ******************************************************************** + */ +/* OSAL headers */ +#include "M4OSA_Memory.h" /* OSAL memory management */ +#include "M4OSA_Debug.h" /* OSAL debug management */ + + +/* Core headers */ +#include "M4MCS_InternalTypes.h" +#include "M4MCS_ErrorCodes.h" + +/** + * Video preprocessing interface definition */ +#include "M4VPP_API.h" + +/** + * Video filters */ +#include "M4VIFI_FiltersAPI.h" /**< for M4VIFI_ResizeBilinearYUV420toYUV420() */ +#include "M4AIR_API_NV12.h" +#include "VideoEditorToolsNV12.h" + +#define UV_PLANE_BORDER_VALUE 0x80 + +M4OSA_ERR M4MCS_intApplyVPP_NV12(M4VPP_Context pContext, + M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut) +{ + M4OSA_ERR err = M4NO_ERROR; + +/* This part is used only if video codecs are compiled*/ +#ifndef M4MCS_AUDIOONLY + /** + * The VPP context is actually the MCS context! */ + M4MCS_InternalContext *pC = (M4MCS_InternalContext*)(pContext); + + M4_MediaTime mtCts = pC->dViDecCurrentCts; + + /** + * When Closing after an error occured, it may happen that pReaderVideoAU->m_dataAddress has + * not been allocated yet. When closing in pause mode, the decoder can be null. + * We don't want an error to be returned because it would interrupt the close process and + * thus some resources would be locked. So we return M4NO_ERROR. + */ + /* Initialize to black plane the output plane if the media rendering + is black borders */ + if(pC->MediaRendering == M4MCS_kBlackBorders) + { + memset((void *)pPlaneOut[0].pac_data,Y_PLANE_BORDER_VALUE, + (pPlaneOut[0].u_height*pPlaneOut[0].u_stride)); + memset((void *)pPlaneOut[1].pac_data,UV_PLANE_BORDER_VALUE, + (pPlaneOut[1].u_height*pPlaneOut[1].u_stride)); + } + else if ((M4OSA_NULL == pC->ReaderVideoAU.m_dataAddress) || + (M4OSA_NULL == pC->pViDecCtxt)) + { + /** + * We must fill the input of the encoder with a dummy image, because + * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */ + memset((void *)pPlaneOut[0].pac_data,0, + pPlaneOut[0].u_stride * pPlaneOut[0].u_height); + memset((void *)pPlaneOut[1].pac_data,0, + pPlaneOut[1].u_stride * pPlaneOut[1].u_height); + + M4OSA_TRACE1_0("M4MCS_intApplyVPP_NV12: pReaderVideoAU->m_dataAddress is M4OSA_NULL,\ + returning M4NO_ERROR"); + return M4NO_ERROR; + } + if(pC->isRenderDup == M4OSA_FALSE) + { + /** + * m_pPreResizeFrame different than M4OSA_NULL means that resizing is needed */ + if (M4OSA_NULL != pC->pPreResizeFrame) + { + /** FB 2008/10/20: + Used for cropping and black borders*/ + M4AIR_Params Params; + + M4OSA_TRACE3_0("M4MCS_intApplyVPP_NV12: Need to resize"); + err = pC->m_pVideoDecoder->m_pFctRender(pC->pViDecCtxt, &mtCts, + pC->pPreResizeFrame, M4OSA_TRUE); + if (M4NO_ERROR != err) + { + M4OSA_TRACE1_1("M4MCS_intApplyVPP_NV12: m_pFctRender returns 0x%x!", err); + return err; + } + + if(pC->MediaRendering == M4MCS_kResizing) + { + /* + * Call the resize filter. From the intermediate frame to the encoder + * image plane + */ + err = M4VIFI_ResizeBilinearNV12toNV12(M4OSA_NULL, + pC->pPreResizeFrame, pPlaneOut); + if (M4NO_ERROR != err) + { + M4OSA_TRACE1_1("M4MCS_intApplyVPP_NV12: M4ViFilResizeBilinearNV12toNV12\ + returns 0x%x!", err); + return err; + } + } + else + { + M4VIFI_ImagePlane pImagePlanesTemp[2]; + M4VIFI_ImagePlane* pPlaneTemp; + M4OSA_UInt8* pOutPlaneY = pPlaneOut[0].pac_data + + pPlaneOut[0].u_topleft; + M4OSA_UInt8* pOutPlaneUV = pPlaneOut[1].pac_data + + pPlaneOut[1].u_topleft; + M4OSA_UInt8* pInPlaneY = M4OSA_NULL; + M4OSA_UInt8* pInPlaneUV = M4OSA_NULL; + M4OSA_UInt32 i = 0; + + /*FB 2008/10/20: to keep media aspect ratio*/ + /*Initialize AIR Params*/ + Params.m_inputCoord.m_x = 0; + Params.m_inputCoord.m_y = 0; + Params.m_inputSize.m_height = pC->pPreResizeFrame->u_height; + Params.m_inputSize.m_width = pC->pPreResizeFrame->u_width; + Params.m_outputSize.m_width = pPlaneOut->u_width; + Params.m_outputSize.m_height = pPlaneOut->u_height; + Params.m_bOutputStripe = M4OSA_FALSE; + Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; + /** + Media rendering: Black borders*/ + if(pC->MediaRendering == M4MCS_kBlackBorders) + { + pImagePlanesTemp[0].u_width = pPlaneOut[0].u_width; + pImagePlanesTemp[0].u_height = pPlaneOut[0].u_height; + pImagePlanesTemp[0].u_stride = pPlaneOut[0].u_width; + pImagePlanesTemp[0].u_topleft = 0; + + pImagePlanesTemp[1].u_width = pPlaneOut[1].u_width; + pImagePlanesTemp[1].u_height = pPlaneOut[1].u_height; + pImagePlanesTemp[1].u_stride = pPlaneOut[1].u_width; + pImagePlanesTemp[1].u_topleft = 0; + + + /* Allocates plan in local image plane structure */ + pImagePlanesTemp[0].pac_data = + (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[0]\ + .u_width * pImagePlanesTemp[0].u_height, M4VS, + (M4OSA_Char *)"M4xVSS_PictureCallbackFct: temporary plane bufferY") ; + if(pImagePlanesTemp[0].pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("Error alloc in M4MCS_intApplyVPP_NV12"); + return M4ERR_ALLOC; + } + pImagePlanesTemp[1].pac_data = + (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[1]\ + .u_width * pImagePlanesTemp[1].u_height, M4VS, + (M4OSA_Char *)"M4xVSS_PictureCallbackFct: temporary plane bufferU") ; + if(pImagePlanesTemp[1].pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("Error alloc in M4MCS_intApplyVPP_NV12"); + return M4ERR_ALLOC; + } + + pInPlaneY = pImagePlanesTemp[0].pac_data ; + pInPlaneUV = pImagePlanesTemp[1].pac_data ; + + memset((void *)pImagePlanesTemp[0].pac_data,Y_PLANE_BORDER_VALUE, + (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); + memset((void *)pImagePlanesTemp[1].pac_data,UV_PLANE_BORDER_VALUE, + (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); + if((M4OSA_UInt32)((pC->pPreResizeFrame->u_height * pPlaneOut->u_width)\ + /pC->pPreResizeFrame->u_width) <= pPlaneOut->u_height) + //Params.m_inputSize.m_height < Params.m_inputSize.m_width) + { + /*it is height so black borders will be on the top and on the bottom side*/ + Params.m_outputSize.m_width = pPlaneOut->u_width; + Params.m_outputSize.m_height = + (M4OSA_UInt32) + ((pC->pPreResizeFrame->u_height * pPlaneOut->u_width)\ + /pC->pPreResizeFrame->u_width); + /*number of lines at the top*/ + pImagePlanesTemp[0].u_topleft = + (M4MCS_ABS((M4OSA_Int32) + (pImagePlanesTemp[0].u_height\ + -Params.m_outputSize.m_height)>>1)) * + pImagePlanesTemp[0].u_stride; + pImagePlanesTemp[0].u_height = Params.m_outputSize.m_height; + pImagePlanesTemp[1].u_topleft = + (M4MCS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height\ + -(Params.m_outputSize.m_height>>1)))>>1)\ + * pImagePlanesTemp[1].u_stride; + pImagePlanesTemp[1].u_height = Params.m_outputSize.m_height>>1; + + } + else + { + /*it is width so black borders will be on the left and right side*/ + Params.m_outputSize.m_height = pPlaneOut->u_height; + Params.m_outputSize.m_width = + (M4OSA_UInt32)((pC->pPreResizeFrame->u_width + * pPlaneOut->u_height)\ + /pC->pPreResizeFrame->u_height); + + pImagePlanesTemp[0].u_topleft = + (M4MCS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width-\ + Params.m_outputSize.m_width)>>1)); + pImagePlanesTemp[0].u_width = Params.m_outputSize.m_width; + pImagePlanesTemp[1].u_topleft = + (M4MCS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width-\ + Params.m_outputSize.m_width))>>1); + pImagePlanesTemp[1].u_width = Params.m_outputSize.m_width; + + } + + /*Width and height have to be even*/ + Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; + Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; + Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; + Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; + pImagePlanesTemp[0].u_width = (pImagePlanesTemp[0].u_width>>1)<<1; + pImagePlanesTemp[1].u_width = (pImagePlanesTemp[1].u_width>>1)<<1; + pImagePlanesTemp[0].u_height = (pImagePlanesTemp[0].u_height>>1)<<1; + pImagePlanesTemp[1].u_height = (pImagePlanesTemp[1].u_height>>1)<<1; + + + /*Check that values are coherent*/ + if(Params.m_inputSize.m_height == Params.m_outputSize.m_height) + { + Params.m_inputSize.m_width = Params.m_outputSize.m_width; + } + else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width) + { + Params.m_inputSize.m_height = Params.m_outputSize.m_height; + } + pPlaneTemp = pImagePlanesTemp; + } + /** + Media rendering: Cropping*/ + if(pC->MediaRendering == M4MCS_kCropping) + { + Params.m_outputSize.m_height = pPlaneOut->u_height; + Params.m_outputSize.m_width = pPlaneOut->u_width; + if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\ + /Params.m_outputSize.m_width>1)<<1; + Params.m_inputCoord.m_y = + (M4OSA_Int32)((M4OSA_Int32) + ((pC->pPreResizeFrame->u_height\ + - Params.m_inputSize.m_height))>>1); + } + else + { + /*width will be cropped*/ + Params.m_inputSize.m_width = + (M4OSA_UInt32)((Params.m_outputSize.m_width\ + * Params.m_inputSize.m_height) / + Params.m_outputSize.m_height); + Params.m_inputSize.m_width = + (Params.m_inputSize.m_width>>1)<<1; + Params.m_inputCoord.m_x = + (M4OSA_Int32)((M4OSA_Int32) + ((pC->pPreResizeFrame->u_width\ + - Params.m_inputSize.m_width))>>1); + } + pPlaneTemp = pPlaneOut; + } + /** + * Call AIR functions */ + if(M4OSA_NULL == pC->m_air_context) + { + err = M4AIR_create_NV12(&pC->m_air_context, M4AIR_kNV12P); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ + Error when initializing AIR_NV12: 0x%x", err); + return err; + } + } + + err = M4AIR_configure_NV12(pC->m_air_context, &Params); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ + Error when configuring AIR_NV12: 0x%x", err); + M4AIR_cleanUp_NV12(pC->m_air_context); + return err; + } + + err = M4AIR_get_NV12(pC->m_air_context, pC->pPreResizeFrame, + pPlaneTemp); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ + Error when getting AIR_NV12 plane: 0x%x", err); + M4AIR_cleanUp_NV12(pC->m_air_context); + return err; + } + if(pC->MediaRendering == M4MCS_kBlackBorders) + { + for(i=0; im_pVideoDecoder->m_pFctRender(pC->pViDecCtxt, + &mtCts, pPlaneOut, + M4OSA_TRUE); + if (M4NO_ERROR != err) + { + M4OSA_TRACE1_1("M4MCS_intApplyVPP_NV12: m_pFctRender returns 0x%x!", err); + return err; + } + } + pC->lastDecodedPlane = pPlaneOut; + } + else + { + /* Copy last decoded plane to output plane */ + memcpy((void *)pPlaneOut[0].pac_data, + (void *)pC->lastDecodedPlane[0].pac_data, + (pPlaneOut[0].u_height * pPlaneOut[0].u_width)); + memcpy((void *)pPlaneOut[1].pac_data, + (void *)pC->lastDecodedPlane[1].pac_data, + (pPlaneOut[1].u_height * pPlaneOut[1].u_width)); + + pC->lastDecodedPlane = pPlaneOut; + } + + +#endif /*M4MCS_AUDIOONLY*/ + M4OSA_TRACE3_0("M4MCS_intApplyVPP_NV12: returning M4NO_ERROR"); + return M4NO_ERROR; +} + diff --git a/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c b/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c new file mode 100644 index 0000000..581665c --- /dev/null +++ b/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c @@ -0,0 +1,640 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Our header */ +#include "M4VSS3GPP_API.h" +#include "M4VSS3GPP_InternalTypes.h" +#include "M4VSS3GPP_InternalFunctions.h" +#include "M4VSS3GPP_InternalConfig.h" +#include "M4VSS3GPP_ErrorCodes.h" + +// StageFright encoders require %16 resolution +#include "M4ENCODER_common.h" +/** + * OSAL headers */ +#include "M4OSA_Memory.h" /**< OSAL memory management */ +#include "M4OSA_Debug.h" /**< OSAL debug management */ + +#include "M4AIR_API_NV12.h" +#include "VideoEditorToolsNV12.h" + +#define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) ) +#define Y_PLANE_BORDER_VALUE 0x00 +#define UV_PLANE_BORDER_VALUE 0x80 + +/** +****************************************************************************** +* M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, +* M4OSA_FileReadPointer* pFileReadPtr, +* M4VIFI_ImagePlane* pImagePlanes, +* M4OSA_UInt32 width, +* M4OSA_UInt32 height); +* @brief It Coverts and resizes a ARGB8888 image to NV12 +* @note +* @param pFileIn (IN) The ARGB888 input file +* @param pFileReadPtr (IN) Pointer on filesystem functions +* @param pImagePlanes (IN/OUT) Pointer on NV12 output planes allocated by the user. +* ARGB8888 image will be converted and resized to output +* NV12 plane size +* @param width (IN) width of the ARGB8888 +* @param height (IN) height of the ARGB8888 +* @return M4NO_ERROR: No error +* @return M4ERR_ALLOC: memory error +* @return M4ERR_PARAMETER: At least one of the function parameters is null +****************************************************************************** +*/ + +M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, + M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, + M4OSA_UInt32 width, M4OSA_UInt32 height) +{ + M4OSA_Context pARGBIn; + M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; + M4OSA_UInt32 frameSize_argb = width * height * 4; + M4OSA_UInt32 frameSize_rgb888 = width * height * 3; + M4OSA_UInt32 i = 0,j= 0; + M4OSA_ERR err = M4NO_ERROR; + + M4OSA_UInt8 *pArgbPlane = + (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, + M4VS, (M4OSA_Char*)"argb data"); + if (pArgbPlane == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12: \ + Failed to allocate memory for ARGB plane"); + return M4ERR_ALLOC; + } + + /* Get file size */ + err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 : \ + Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); + free(pArgbPlane); + pArgbPlane = M4OSA_NULL; + goto cleanup; + } + + err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane, + &frameSize_argb); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 \ + Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err); + pFileReadPtr->closeRead(pARGBIn); + free(pArgbPlane); + pArgbPlane = M4OSA_NULL; + goto cleanup; + } + + err = pFileReadPtr->closeRead(pARGBIn); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 \ + Can not close ARGB8888 file %s, error: 0x%x\n",pFileIn, err); + free(pArgbPlane); + pArgbPlane = M4OSA_NULL; + goto cleanup; + } + + rgbPlane1.pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, + M4VS, (M4OSA_Char*)"RGB888 plane1"); + if(rgbPlane1.pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 \ + Failed to allocate memory for rgb plane1"); + free(pArgbPlane); + return M4ERR_ALLOC; + } + rgbPlane1.u_height = height; + rgbPlane1.u_width = width; + rgbPlane1.u_stride = width*3; + rgbPlane1.u_topleft = 0; + + + /** Remove the alpha channel */ + for (i=0, j = 0; i < frameSize_argb; i++) { + if ((i % 4) == 0) continue; + rgbPlane1.pac_data[j] = pArgbPlane[i]; + j++; + } + free(pArgbPlane); + + /** + * Check if resizing is required with color conversion */ + if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) { + + frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3; + rgbPlane2.pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS, + (M4OSA_Char*)"rgb Plane2"); + if(rgbPlane2.pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2"); + free(rgbPlane1.pac_data); + return M4ERR_ALLOC; + } + rgbPlane2.u_height = pImagePlanes->u_height; + rgbPlane2.u_width = pImagePlanes->u_width; + rgbPlane2.u_stride = pImagePlanes->u_width*3; + rgbPlane2.u_topleft = 0; + + /* Resizing */ + err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, + &rgbPlane1, &rgbPlane2); + free(rgbPlane1.pac_data); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err); + free(rgbPlane2.pac_data); + return err; + } + + /*Converting Resized RGB888 to NV12 */ + err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane2, pImagePlanes); + free(rgbPlane2.pac_data); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("error converting from RGB888 to NV12: 0x%x\n", err); + return err; + } + } else { + err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane1, pImagePlanes); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("error when converting from RGB to NV12: 0x%x\n", err); + } + free(rgbPlane1.pac_data); + } +cleanup: + M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 exit"); + return err; +} + + +M4OSA_ERR M4VSS3GPP_intApplyRenderingMode_NV12(M4VSS3GPP_InternalEditContext *pC, + M4xVSS_MediaRendering renderingMode, M4VIFI_ImagePlane* pInplane, + M4VIFI_ImagePlane* pOutplane) +{ + + M4OSA_ERR err = M4NO_ERROR; + M4AIR_Params airParams; + M4VIFI_ImagePlane pImagePlanesTemp[2]; + M4OSA_UInt32 i = 0; + + M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode_NV12 begin"); + + if (renderingMode == M4xVSS_kBlackBorders) { + memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE, + (pOutplane[0].u_height*pOutplane[0].u_stride)); + memset((void *)pOutplane[1].pac_data, UV_PLANE_BORDER_VALUE, + (pOutplane[1].u_height*pOutplane[1].u_stride)); + } + + if (renderingMode == M4xVSS_kResizing) { + /** + * Call the resize filter. + * From the intermediate frame to the encoder image plane */ + err = M4VIFI_ResizeBilinearNV12toNV12(M4OSA_NULL, + pInplane, pOutplane); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ + M4ViFilResizeBilinearNV12toNV12 returns 0x%x!", err); + return err; + } + } else { + M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL; + M4OSA_UInt8* pOutPlaneY = + pOutplane[0].pac_data + pOutplane[0].u_topleft; + M4OSA_UInt8* pOutPlaneUV = + pOutplane[1].pac_data + pOutplane[1].u_topleft; + + M4OSA_UInt8* pInPlaneY = M4OSA_NULL; + M4OSA_UInt8* pInPlaneUV = M4OSA_NULL; + + /* To keep media aspect ratio*/ + /* Initialize AIR Params*/ + airParams.m_inputCoord.m_x = 0; + airParams.m_inputCoord.m_y = 0; + airParams.m_inputSize.m_height = pInplane->u_height; + airParams.m_inputSize.m_width = pInplane->u_width; + airParams.m_outputSize.m_width = pOutplane->u_width; + airParams.m_outputSize.m_height = pOutplane->u_height; + airParams.m_bOutputStripe = M4OSA_FALSE; + airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft; + + /** + Media rendering: Black borders*/ + if (renderingMode == M4xVSS_kBlackBorders) { + pImagePlanesTemp[0].u_width = pOutplane[0].u_width; + pImagePlanesTemp[0].u_height = pOutplane[0].u_height; + pImagePlanesTemp[0].u_stride = pOutplane[0].u_width; + pImagePlanesTemp[0].u_topleft = 0; + + pImagePlanesTemp[1].u_width = pOutplane[1].u_width; + pImagePlanesTemp[1].u_height = pOutplane[1].u_height; + pImagePlanesTemp[1].u_stride = pOutplane[1].u_width; + pImagePlanesTemp[1].u_topleft = 0; + + + /** + * Allocates plan in local image plane structure */ + pImagePlanesTemp[0].pac_data = + (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, + M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ; + if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode_NV12: Alloc Error"); + return M4ERR_ALLOC; + } + pImagePlanesTemp[1].pac_data = + (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, + M4VS, (M4OSA_Char *)"pImagePlaneTemp UV") ; + if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode_NV12: Alloc Error"); + free(pImagePlanesTemp[0].pac_data); + return M4ERR_ALLOC; + } + + pInPlaneY = pImagePlanesTemp[0].pac_data ; + pInPlaneUV = pImagePlanesTemp[1].pac_data ; + + memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE, + (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); + memset((void *)pImagePlanesTemp[1].pac_data, UV_PLANE_BORDER_VALUE, + (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); + + M4OSA_UInt32 height = + (pInplane->u_height * pOutplane->u_width) /pInplane->u_width; + + if (height <= pOutplane->u_height) { + /** + * Black borders will be on the top and the bottom side */ + airParams.m_outputSize.m_width = pOutplane->u_width; + airParams.m_outputSize.m_height = height; + /** + * Number of lines at the top */ + pImagePlanesTemp[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height - + airParams.m_outputSize.m_height)>>1)) * + pImagePlanesTemp[0].u_stride; + pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height; + pImagePlanesTemp[1].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height - + (airParams.m_outputSize.m_height>>1)))>>1) * + pImagePlanesTemp[1].u_stride; + pImagePlanesTemp[1].u_topleft = ((pImagePlanesTemp[1].u_topleft>>1)<<1); + pImagePlanesTemp[1].u_height = + airParams.m_outputSize.m_height>>1; + + } else { + /** + * Black borders will be on the left and right side */ + airParams.m_outputSize.m_height = pOutplane->u_height; + airParams.m_outputSize.m_width = + (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height); + + pImagePlanesTemp[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width - + airParams.m_outputSize.m_width)>>1)); + pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width; + pImagePlanesTemp[1].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width - + airParams.m_outputSize.m_width))>>1); + pImagePlanesTemp[1].u_topleft = ((pImagePlanesTemp[1].u_topleft>>1)<<1); + pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width; + } + + /** + * Width and height have to be even */ + airParams.m_outputSize.m_width = + (airParams.m_outputSize.m_width>>1)<<1; + airParams.m_outputSize.m_height = + (airParams.m_outputSize.m_height>>1)<<1; + airParams.m_inputSize.m_width = + (airParams.m_inputSize.m_width>>1)<<1; + airParams.m_inputSize.m_height = + (airParams.m_inputSize.m_height>>1)<<1; + pImagePlanesTemp[0].u_width = + (pImagePlanesTemp[0].u_width>>1)<<1; + pImagePlanesTemp[1].u_width = + (pImagePlanesTemp[1].u_width>>1)<<1; + pImagePlanesTemp[0].u_height = + (pImagePlanesTemp[0].u_height>>1)<<1; + pImagePlanesTemp[1].u_height = + (pImagePlanesTemp[1].u_height>>1)<<1; + + /** + * Check that values are coherent */ + if (airParams.m_inputSize.m_height == + airParams.m_outputSize.m_height) { + airParams.m_inputSize.m_width = + airParams.m_outputSize.m_width; + } else if (airParams.m_inputSize.m_width == + airParams.m_outputSize.m_width) { + airParams.m_inputSize.m_height = + airParams.m_outputSize.m_height; + } + pPlaneTemp = pImagePlanesTemp; + } + + /** + * Media rendering: Cropping*/ + if (renderingMode == M4xVSS_kCropping) { + airParams.m_outputSize.m_height = pOutplane->u_height; + airParams.m_outputSize.m_width = pOutplane->u_width; + if ((airParams.m_outputSize.m_height * + airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width < + airParams.m_inputSize.m_height) { + /* Height will be cropped */ + airParams.m_inputSize.m_height = + (M4OSA_UInt32)((airParams.m_outputSize.m_height * + airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width); + airParams.m_inputSize.m_height = + (airParams.m_inputSize.m_height>>1)<<1; + airParams.m_inputCoord.m_y = + (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height - + airParams.m_inputSize.m_height))>>1); + } else { + /* Width will be cropped */ + airParams.m_inputSize.m_width = + (M4OSA_UInt32)((airParams.m_outputSize.m_width * + airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height); + airParams.m_inputSize.m_width = + (airParams.m_inputSize.m_width>>1)<<1; + airParams.m_inputCoord.m_x = + (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width - + airParams.m_inputSize.m_width))>>1); + } + pPlaneTemp = pOutplane; + } + /** + * Call AIR functions */ + if (M4OSA_NULL == pC->m_air_context) { + err = M4AIR_create_NV12(&pC->m_air_context, M4AIR_kNV12P); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode_NV12: \ + M4AIR_create returned error 0x%x", err); + goto cleanUp; + } + } + + err = M4AIR_configure_NV12(pC->m_air_context, &airParams); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode_NV12: \ + Error when configuring AIR: 0x%x", err); + M4AIR_cleanUp_NV12(pC->m_air_context); + goto cleanUp; + } + + err = M4AIR_get_NV12(pC->m_air_context, pInplane, pPlaneTemp); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode_NV12: \ + Error when getting AIR plane: 0x%x", err); + M4AIR_cleanUp_NV12(pC->m_air_context); + goto cleanUp; + } + + if (renderingMode == M4xVSS_kBlackBorders) { + for (i=0; ipPlaneYuv = + (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( + 2*sizeof(M4VIFI_ImagePlane), M4VS, + (M4OSA_Char*)"pPlaneYuv"); + + if (pClipCtxt->pPlaneYuv == M4OSA_NULL) { + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuv[0].u_height = + pClipCtxt->pSettings->ClipProperties.uiStillPicHeight; + pClipCtxt->pPlaneYuv[0].u_width = + pClipCtxt->pSettings->ClipProperties.uiStillPicWidth; + pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width; + pClipCtxt->pPlaneYuv[0].u_topleft = 0; + + pClipCtxt->pPlaneYuv[0].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( + pClipCtxt->pPlaneYuv[0].u_height* + pClipCtxt->pPlaneYuv[0].u_width * 1.5, + M4VS, (M4OSA_Char*)"imageClip YUV data"); + if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) { + free(pClipCtxt->pPlaneYuv); + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; + pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width; + pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width; + pClipCtxt->pPlaneYuv[1].u_topleft = 0; + pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)( + pClipCtxt->pPlaneYuv[0].pac_data + + pClipCtxt->pPlaneYuv[0].u_height * + pClipCtxt->pPlaneYuv[0].u_width); + + + err = M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 ( + pClipCtxt->pSettings->pFile, + pC->pOsaFileReadPtr, + pClipCtxt->pPlaneYuv, + pClipCtxt->pSettings->ClipProperties.uiStillPicWidth, + pClipCtxt->pSettings->ClipProperties.uiStillPicHeight); + if (M4NO_ERROR != err) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + return err; + } + + // Set the YUV data to the decoder using setoption + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption ( + pClipCtxt->pViDecCtxt, + M4DECODER_kOptionID_DecYuvData, + (M4OSA_DataOption)pClipCtxt->pPlaneYuv); // FIXME: not sure when call this + if (M4NO_ERROR != err) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + return err; + } + + pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE; + + // Allocate Yuv plane with effect + pClipCtxt->pPlaneYuvWithEffect = + (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( + 2*sizeof(M4VIFI_ImagePlane), M4VS, + (M4OSA_Char*)"pPlaneYuvWithEffect"); + if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight; + pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth; + pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth; + pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0; + + pClipCtxt->pPlaneYuvWithEffect[0].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( + pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5, + M4VS, (M4OSA_Char*)"imageClip YUV data"); + if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + free(pClipCtxt->pPlaneYuvWithEffect); + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuvWithEffect[1].u_height = + pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; + pClipCtxt->pPlaneYuvWithEffect[1].u_width = + pClipCtxt->pPlaneYuvWithEffect[0].u_width; + pClipCtxt->pPlaneYuvWithEffect[1].u_stride = + pClipCtxt->pPlaneYuvWithEffect[1].u_width; + pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0; + pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)( + pClipCtxt->pPlaneYuvWithEffect[0].pac_data + + pClipCtxt->pPlaneYuvWithEffect[0].u_height * + pClipCtxt->pPlaneYuvWithEffect[0].u_width); + + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous, + (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect); + if (M4NO_ERROR != err) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + free(pClipCtxt->pPlaneYuvWithEffect); + return err; + } + + return M4NO_ERROR; +} + + +M4OSA_ERR M4VSS3GPP_intRotateVideo_NV12(M4VIFI_ImagePlane* pPlaneIn, + M4OSA_UInt32 rotationDegree) +{ + M4OSA_ERR err = M4NO_ERROR; + M4VIFI_ImagePlane outPlane[2]; + + if (rotationDegree != 180) { + // Swap width and height of in plane + outPlane[0].u_width = pPlaneIn[0].u_height; + outPlane[0].u_height = pPlaneIn[0].u_width; + outPlane[0].u_stride = outPlane[0].u_width; + outPlane[0].u_topleft = 0; + outPlane[0].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( + (outPlane[0].u_stride*outPlane[0].u_height), M4VS, + (M4OSA_Char*)("out Y plane for rotation")); + if (outPlane[0].pac_data == M4OSA_NULL) { + return M4ERR_ALLOC; + } + + outPlane[1].u_width = outPlane[0].u_width; + outPlane[1].u_height = outPlane[0].u_height >> 1; + outPlane[1].u_stride = outPlane[1].u_width; + outPlane[1].u_topleft = 0; + outPlane[1].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( + (outPlane[1].u_stride*outPlane[1].u_height), M4VS, + (M4OSA_Char*)("out U plane for rotation")); + if (outPlane[1].pac_data == M4OSA_NULL) { + free((void *)outPlane[0].pac_data); + return M4ERR_ALLOC; + } + } + + switch(rotationDegree) { + case 90: + M4VIFI_Rotate90RightNV12toNV12(M4OSA_NULL, pPlaneIn, outPlane); + break; + + case 180: + // In plane rotation, so planeOut = planeIn + M4VIFI_Rotate180NV12toNV12(M4OSA_NULL, pPlaneIn, pPlaneIn); + break; + + case 270: + M4VIFI_Rotate90LeftNV12toNV12(M4OSA_NULL, pPlaneIn, outPlane); + break; + + default: + M4OSA_TRACE1_1("invalid rotation param %d", (int)rotationDegree); + err = M4ERR_PARAMETER; + break; + } + + if (rotationDegree != 180) { + memset((void *)pPlaneIn[0].pac_data, 0, + (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); + memset((void *)pPlaneIn[1].pac_data, 0, + (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); + + // Copy Y, U and V planes + memcpy((void *)pPlaneIn[0].pac_data, (void *)outPlane[0].pac_data, + (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); + memcpy((void *)pPlaneIn[1].pac_data, (void *)outPlane[1].pac_data, + (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); + + free((void *)outPlane[0].pac_data); + free((void *)outPlane[1].pac_data); + + // Swap the width and height of the in plane + uint32_t temp = 0; + temp = pPlaneIn[0].u_width; + pPlaneIn[0].u_width = pPlaneIn[0].u_height; + pPlaneIn[0].u_height = temp; + pPlaneIn[0].u_stride = pPlaneIn[0].u_width; + + pPlaneIn[1].u_width = pPlaneIn[0].u_width; + pPlaneIn[1].u_height = pPlaneIn[0].u_height >> 1; + pPlaneIn[1].u_stride = pPlaneIn[1].u_width; + + } + + return err; +} + diff --git a/frameworks/videoedit/vss/M4xVSS_NV12.h b/frameworks/videoedit/vss/M4xVSS_NV12.h new file mode 100644 index 0000000..35f4c67 --- /dev/null +++ b/frameworks/videoedit/vss/M4xVSS_NV12.h @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef M4XVSS_NV12_H +#define M4XVSS_NV12_H + +M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, + M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, + M4OSA_UInt32 width, M4OSA_UInt32 height); + +M4OSA_ERR M4xVSS_PictureCallbackFct_NV12(M4OSA_Void* pPictureCtxt, + M4VIFI_ImagePlane* pImagePlanes, M4OSA_Double* pPictureDuration); + +M4OSA_ERR M4VSS3GPP_externalVideoEffectColor_NV12(M4OSA_Void *pFunctionContext, + M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut, + M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind); + +M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming_NV12(M4OSA_Void *userData, + M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut, + M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind); + +M4OSA_ERR M4xVSS_internalConvertRGBtoNV12(M4xVSS_FramingStruct* framingCtx); + +M4OSA_ERR M4xVSS_AlphaMagic_NV12(M4OSA_Void *userData, M4VIFI_ImagePlane *PlaneIn1, + M4VIFI_ImagePlane *PlaneIn2, M4VIFI_ImagePlane *PlaneOut, + M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind); + +M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, + M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, + M4OSA_UInt32 width,M4OSA_UInt32 height); + +M4OSA_ERR M4xVSS_internalConvertARGB888toNV12_FrammingEffect(M4OSA_Context pContext, + M4VSS3GPP_EffectSettings* pEffect, M4xVSS_FramingStruct* framingCtx, + M4VIDEOEDITING_VideoFrameSize OutputVideoResolution); + +M4OSA_ERR M4xVSS_AlphaMagicBlending_NV12(M4OSA_Void *userData, + M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, + M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, + M4OSA_UInt32 uiTransitionKind); + +M4OSA_ERR M4xVSS_SlideTransition_NV12(M4OSA_Void *userData, + M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, + M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, + M4OSA_UInt32 uiTransitionKind); + +M4OSA_ERR M4xVSS_FadeBlackTransition_NV12(M4OSA_Void *userData, + M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, + M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, + M4OSA_UInt32 uiTransitionKind); + +#endif diff --git a/frameworks/videoedit/vss/M4xVSS_internal_NV12.c b/frameworks/videoedit/vss/M4xVSS_internal_NV12.c new file mode 100644 index 0000000..310f49e --- /dev/null +++ b/frameworks/videoedit/vss/M4xVSS_internal_NV12.c @@ -0,0 +1,3512 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "M4OSA_Debug.h" +#include "M4OSA_CharStar.h" + +#include "NXPSW_CompilerSwitches.h" + +#include "M4VSS3GPP_API.h" +#include "M4VSS3GPP_ErrorCodes.h" + +#include "M4xVSS_API.h" +#include "M4xVSS_Internal.h" + + +/*for rgb16 color effect*/ +#include "M4VIFI_Defines.h" +#include "M4VIFI_Clip.h" + +/** + * component includes */ +#include "M4VFL_transition.h" /**< video effects */ + +/* Internal header file of VSS is included because of MMS use case */ +#include "M4VSS3GPP_InternalTypes.h" + +/*Exif header files to add image rendering support (cropping, black borders)*/ +#include "M4EXIFC_CommonAPI.h" +// StageFright encoders require %16 resolution +#include "M4ENCODER_common.h" + +#include "M4AIR_API_NV12.h" +#include "VideoEditorToolsNV12.h" + +#define TRANSPARENT_COLOR 0x7E0 +#define LUM_FACTOR_MAX 10 + +/** + ****************************************************************************** + * M4VIFI_UInt8 M4VIFI_RGB565toNV12 (void *pUserData, + * M4VIFI_ImagePlane *pPlaneIn, + * M4VIFI_ImagePlane *pPlaneOut) + * @brief transform RGB565 image to a NV12 image. + * @note Convert RGB565 to NV12, + * Loop on each row ( 2 rows by 2 rows ) + * Loop on each column ( 2 col by 2 col ) + * Get 4 RGB samples from input data and build 4 output Y samples + * and each single U & V data + * end loop on col + * end loop on row + * @param pUserData: (IN) User Specific Data + * @param pPlaneIn: (IN) Pointer to RGB565 Plane + * @param pPlaneOut: (OUT) Pointer to NV12 buffer Plane + * @return M4VIFI_OK: there is no error + * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD + * @return M4VIFI_ILLEGAL_FRAME_WIDTH: YUV Plane width is ODD + ****************************************************************************** +*/ + +M4VIFI_UInt8 M4VIFI_RGB565toNV12(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, + M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_UInt32 u32_width, u32_height; + M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_UV; + M4VIFI_UInt32 u32_stride_rgb, u32_stride_2rgb; + M4VIFI_UInt32 u32_col, u32_row; + + M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11; + M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11; + M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11; + M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11; + M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11; + M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11; + M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v; + M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data; + M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn; + M4VIFI_UInt16 u16_pix1, u16_pix2, u16_pix3, u16_pix4; + + /* Check planes height are appropriate */ + if ((pPlaneIn->u_height != pPlaneOut[0].u_height) || + (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1))) + { + return M4VIFI_ILLEGAL_FRAME_HEIGHT; + } + + /* Check planes width are appropriate */ + if ((pPlaneIn->u_width != pPlaneOut[0].u_width) || + (pPlaneOut[0].u_width != pPlaneOut[1].u_width)) + { + return M4VIFI_ILLEGAL_FRAME_WIDTH; + } + + /* Set the pointer to the beginning of the output data buffers */ + pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft; + pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft; + pu8_v_data = pu8_u_data + 1; + + /* Set the pointer to the beginning of the input data buffers */ + pu8_rgbn_data = pPlaneIn->pac_data + pPlaneIn->u_topleft; + + /* Get the size of the output image */ + u32_width = pPlaneOut[0].u_width; + u32_height = pPlaneOut[0].u_height; + + /* Set the size of the memory jumps corresponding to row jump in each output plane */ + u32_stride_Y = pPlaneOut[0].u_stride; + u32_stride2_Y = u32_stride_Y << 1; + u32_stride_UV = pPlaneOut[1].u_stride; + + /* Set the size of the memory jumps corresponding to row jump in input plane */ + u32_stride_rgb = pPlaneIn->u_stride; + u32_stride_2rgb = u32_stride_rgb << 1; + + + /* Loop on each row of the output image, input coordinates are estimated from output ones */ + /* Two YUV rows are computed at each pass */ + + for (u32_row = u32_height ;u32_row != 0; u32_row -=2) + { + /* Current Y plane row pointers */ + pu8_yn = pu8_y_data; + /* Next Y plane row pointers */ + pu8_ys = pu8_yn + u32_stride_Y; + /* Current U plane row pointer */ + pu8_u = pu8_u_data; + /* Current V plane row pointer */ + pu8_v = pu8_v_data; + + pu8_rgbn = pu8_rgbn_data; + + /* Loop on each column of the output image */ + for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) + { + /* Get four RGB 565 samples from input data */ + u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn); + u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE)); + u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb)); + u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE)); + + /* Unpack RGB565 to 8bit R, G, B */ + /* (x,y) */ + GET_RGB565(i32_r00,i32_g00,i32_b00,u16_pix1); + /* (x+1,y) */ + GET_RGB565(i32_r10,i32_g10,i32_b10,u16_pix2); + /* (x,y+1) */ + GET_RGB565(i32_r01,i32_g01,i32_b01,u16_pix3); + /* (x+1,y+1) */ + GET_RGB565(i32_r11,i32_g11,i32_b11,u16_pix4); + + /* Convert RGB value to YUV */ + i32_u00 = U16(i32_r00, i32_g00, i32_b00); + i32_v00 = V16(i32_r00, i32_g00, i32_b00); + /* luminance value */ + i32_y00 = Y16(i32_r00, i32_g00, i32_b00); + + i32_u10 = U16(i32_r10, i32_g10, i32_b10); + i32_v10 = V16(i32_r10, i32_g10, i32_b10); + /* luminance value */ + i32_y10 = Y16(i32_r10, i32_g10, i32_b10); + + i32_u01 = U16(i32_r01, i32_g01, i32_b01); + i32_v01 = V16(i32_r01, i32_g01, i32_b01); + /* luminance value */ + i32_y01 = Y16(i32_r01, i32_g01, i32_b01); + + i32_u11 = U16(i32_r11, i32_g11, i32_b11); + i32_v11 = V16(i32_r11, i32_g11, i32_b11); + /* luminance value */ + i32_y11 = Y16(i32_r11, i32_g11, i32_b11); + + /* Store luminance data */ + pu8_yn[0] = (M4VIFI_UInt8)i32_y00; + pu8_yn[1] = (M4VIFI_UInt8)i32_y10; + pu8_ys[0] = (M4VIFI_UInt8)i32_y01; + pu8_ys[1] = (M4VIFI_UInt8)i32_y11; + + /* Store chroma data */ + *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2); + *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2); + + /* Prepare for next column */ + pu8_rgbn += (CST_RGB_16_SIZE<<1); + /* Update current Y plane line pointer*/ + pu8_yn += 2; + /* Update next Y plane line pointer*/ + pu8_ys += 2; + /* Update U plane line pointer*/ + pu8_u += 2; + /* Update V plane line pointer*/ + pu8_v += 2; + } /* End of horizontal scanning */ + + /* Prepare pointers for the next row */ + pu8_y_data += u32_stride2_Y; + pu8_u_data += u32_stride_UV; + pu8_v_data += u32_stride_UV; + pu8_rgbn_data += u32_stride_2rgb; + + + } /* End of vertical scanning */ + + return M4VIFI_OK; +} + + +unsigned char M4VFL_modifyLumaWithScale_NV12(M4ViComImagePlane *plane_in, + M4ViComImagePlane *plane_out, unsigned long lum_factor, + void *user_data) +{ + unsigned short *p_src, *p_dest, *p_src_line, *p_dest_line; + unsigned char *p_csrc, *p_cdest, *p_csrc_line, *p_cdest_line; + unsigned long pix_src; + unsigned long u_outpx, u_outpx2; + unsigned long u_width, u_stride, u_stride_out,u_height, pix; + long i, j; + + /* copy or filter chroma */ + u_width = plane_in[1].u_width; + u_height = plane_in[1].u_height; + u_stride = plane_in[1].u_stride; + u_stride_out = plane_out[1].u_stride; + p_cdest_line = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft]; + p_csrc_line = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft]; + + if (lum_factor > 256) + { + /* copy chroma */ + for (j = u_height; j != 0; j--) + { + memcpy((void *)p_cdest_line, (void *)p_csrc_line, u_width); + p_cdest_line += u_stride_out; + p_csrc_line += u_stride; + } + } + else + { + /* filter chroma */ + pix = (1024 - lum_factor) << 7; + for (j = u_height; j != 0; j--) + { + p_cdest = p_cdest_line; + p_csrc = p_csrc_line; + for (i = u_width; i != 0; i--) + { + *p_cdest++ = ((pix + (*p_csrc++ & 0xFF) * lum_factor) >> LUM_FACTOR_MAX); + } + p_cdest_line += u_stride_out; + p_csrc_line += u_stride; + } + } + + /* apply luma factor */ + u_width = plane_in[0].u_width; + u_height = plane_in[0].u_height; + u_stride = (plane_in[0].u_stride >> 1); + u_stride_out = (plane_out[0].u_stride >> 1); + p_dest = (unsigned short *) &plane_out[0].pac_data[plane_out[0].u_topleft]; + p_src = (unsigned short *) &plane_in[0].pac_data[plane_in[0].u_topleft]; + p_dest_line = p_dest; + p_src_line = p_src; + + for (j = u_height; j != 0; j--) + { + p_dest = p_dest_line; + p_src = p_src_line; + for (i = (u_width >> 1); i != 0; i--) + { + pix_src = (unsigned long) *p_src++; + pix = pix_src & 0xFF; + u_outpx = ((pix * lum_factor) >> LUM_FACTOR_MAX); + pix = ((pix_src & 0xFF00) >> 8); + u_outpx2 = (((pix * lum_factor) >> LUM_FACTOR_MAX)<< 8) ; + *p_dest++ = (unsigned short) (u_outpx2 | u_outpx); + } + p_dest_line += u_stride_out; + p_src_line += u_stride; + } + return 0; +} + +unsigned char M4VIFI_ImageBlendingonNV12 (void *pUserData, + M4ViComImagePlane *pPlaneIn1, M4ViComImagePlane *pPlaneIn2, + M4ViComImagePlane *pPlaneOut, UInt32 Progress) +{ + UInt8 *pu8_data_Y_start1, *pu8_data_Y_start2, *pu8_data_Y_start3; + UInt8 *pu8_data_UV_start1, *pu8_data_UV_start2, *pu8_data_UV_start3; + UInt8 *pu8_data_Y_current1, *pu8_data_Y_next1; + UInt8 *pu8_data_Y_current2, *pu8_data_Y_next2; + UInt8 *pu8_data_Y_current3, *pu8_data_Y_next3; + UInt8 *pu8_data_UV1, *pu8_data_UV2, *pu8_data_UV3; + UInt32 u32_stride_Y1, u32_stride2_Y1, u32_stride_UV1; + UInt32 u32_stride_Y2, u32_stride2_Y2, u32_stride_UV2; + UInt32 u32_stride_Y3, u32_stride2_Y3, u32_stride_UV3; + UInt32 u32_height, u32_width; + UInt32 u32_blendfactor, u32_startA, u32_endA, u32_blend_inc, u32_x_accum; + UInt32 u32_col, u32_row, u32_rangeA, u32_progress; + UInt32 u32_U1,u32_V1,u32_U2,u32_V2, u32_Y1, u32_Y2; + + /* Check the Y plane height is EVEN and image plane heights are same */ + if( (IS_EVEN(pPlaneIn1[0].u_height) == FALSE) || + (IS_EVEN(pPlaneIn2[0].u_height) == FALSE) || + (IS_EVEN(pPlaneOut[0].u_height) == FALSE) || + (pPlaneIn1[0].u_height != pPlaneOut[0].u_height) || + (pPlaneIn2[0].u_height != pPlaneOut[0].u_height) ) + { + return M4VIFI_ILLEGAL_FRAME_HEIGHT; + } + + /* Check the Y plane width is EVEN and image plane widths are same */ + if( (IS_EVEN(pPlaneIn1[0].u_width) == FALSE) || + (IS_EVEN(pPlaneIn2[0].u_width) == FALSE) || + (IS_EVEN(pPlaneOut[0].u_width) == FALSE) || + (pPlaneIn1[0].u_width != pPlaneOut[0].u_width) || + (pPlaneIn2[0].u_width != pPlaneOut[0].u_width) ) + { + return M4VIFI_ILLEGAL_FRAME_WIDTH; + } + /* Set the pointer to the beginning of the input1 NV12 image planes */ + pu8_data_Y_start1 = pPlaneIn1[0].pac_data + pPlaneIn1[0].u_topleft; + pu8_data_UV_start1 = pPlaneIn1[1].pac_data + pPlaneIn1[1].u_topleft; + + /* Set the pointer to the beginning of the input2 NV12 image planes */ + pu8_data_Y_start2 = pPlaneIn2[0].pac_data + pPlaneIn2[0].u_topleft; + pu8_data_UV_start2 = pPlaneIn2[1].pac_data + pPlaneIn2[1].u_topleft; + + /* Set the pointer to the beginning of the output NV12 image planes */ + pu8_data_Y_start3 = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft; + pu8_data_UV_start3 = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft; + + /* Set the stride for the next row in each input1 NV12 plane */ + u32_stride_Y1 = pPlaneIn1[0].u_stride; + u32_stride_UV1 = pPlaneIn1[1].u_stride; + + /* Set the stride for the next row in each input2 NV12 plane */ + u32_stride_Y2 = pPlaneIn2[0].u_stride; + u32_stride_UV2 = pPlaneIn2[1].u_stride; + + /* Set the stride for the next row in each output NV12 plane */ + u32_stride_Y3 = pPlaneOut[0].u_stride; + u32_stride_UV3 = pPlaneOut[1].u_stride; + + u32_stride2_Y1 = u32_stride_Y1 << 1; + u32_stride2_Y2 = u32_stride_Y2 << 1; + u32_stride2_Y3 = u32_stride_Y3 << 1; + + /* Get the size of the output image */ + u32_height = pPlaneOut[0].u_height; + u32_width = pPlaneOut[0].u_width; + + /* User Specified Progress value */ + u32_progress = Progress; + + /* Map Progress value from (0 - 1000) to (0 - 1024) -> for optimisation */ + if(u32_progress < 1000) + u32_progress = ((u32_progress << 10) / 1000); + else + u32_progress = 1024; + + /* Set the range of blendingfactor */ + if(u32_progress <= 512) + { + u32_startA = 0; + u32_endA = (u32_progress << 1); + } + else /* u32_progress > 512 */ + { + u32_startA = (u32_progress - 512) << 1; + u32_endA = 1024; + } + u32_rangeA = u32_endA - u32_startA; + + /* Set the increment of blendingfactor for each element in the image row */ + if ((u32_width >= u32_rangeA) && (u32_rangeA > 0) ) + { + u32_blend_inc = ((u32_rangeA-1) * MAX_SHORT) / (u32_width - 1); + } + else /* (u32_width < u32_rangeA) || (u32_rangeA < 0) */ + { + u32_blend_inc = (u32_rangeA * MAX_SHORT) / (u32_width); + } + /* Two YUV420 rows are computed at each pass */ + for (u32_row = u32_height; u32_row != 0; u32_row -=2) + { + /* Set pointers to the beginning of the row for each input image1 plane */ + pu8_data_Y_current1 = pu8_data_Y_start1; + pu8_data_UV1 = pu8_data_UV_start1; + + /* Set pointers to the beginning of the row for each input image2 plane */ + pu8_data_Y_current2 = pu8_data_Y_start2; + pu8_data_UV2 = pu8_data_UV_start2; + + /* Set pointers to the beginning of the row for each output image plane */ + pu8_data_Y_current3 = pu8_data_Y_start3; + pu8_data_UV3 = pu8_data_UV_start3; + + /* Set pointers to the beginning of the next row for image luma plane */ + pu8_data_Y_next1 = pu8_data_Y_current1 + u32_stride_Y1; + pu8_data_Y_next2 = pu8_data_Y_current2 + u32_stride_Y2; + pu8_data_Y_next3 = pu8_data_Y_current3 + u32_stride_Y3; + + /* Initialise blendfactor */ + u32_blendfactor = u32_startA; + /* Blendfactor Increment accumulator */ + u32_x_accum = 0; + + /* Loop on each column of the output image */ + for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) + { + /* Update the blending factor */ + u32_blendfactor = u32_startA + (u32_x_accum >> 16); + + /* Get Luma value (x,y) of input Image1 */ + u32_Y1 = *pu8_data_Y_current1++; + + /* Get chrominance2 value */ + u32_U1 = *pu8_data_UV1++; + u32_V1 = *pu8_data_UV1++; + + /* Get Luma value (x,y) of input Image2 */ + u32_Y2 = *pu8_data_Y_current2++; + + /* Get chrominance2 value */ + u32_U2 = *pu8_data_UV2++; + u32_V2 = *pu8_data_UV2++; + + /* Compute Luma value (x,y) of Output image */ + *pu8_data_Y_current3++ = (UInt8)((u32_blendfactor * u32_Y2 + + (1024 - u32_blendfactor)*u32_Y1) >> 10); + /* Compute chroma(U) value of Output image */ + *pu8_data_UV3++ = (UInt8)((u32_blendfactor * u32_U2 + + (1024 - u32_blendfactor)*u32_U1) >> 10); + /* Compute chroma(V) value of Output image */ + *pu8_data_UV3++ = (UInt8)((u32_blendfactor * u32_V2 + + (1024 - u32_blendfactor)*u32_V1) >> 10); + + /* Get Luma value (x,y+1) of input Image1 */ + u32_Y1 = *pu8_data_Y_next1++; + + /* Get Luma value (x,y+1) of input Image2 */ + u32_Y2 = *pu8_data_Y_next2++; + + /* Compute Luma value (x,y+1) of Output image*/ + *pu8_data_Y_next3++ = (UInt8)((u32_blendfactor * u32_Y2 + + (1024 - u32_blendfactor)*u32_Y1) >> 10); + /* Update accumulator */ + u32_x_accum += u32_blend_inc; + + /* Update the blending factor */ + u32_blendfactor = u32_startA + (u32_x_accum >> 16); + + /* Get Luma value (x+1,y) of input Image1 */ + u32_Y1 = *pu8_data_Y_current1++; + + /* Get Luma value (x+1,y) of input Image2 */ + u32_Y2 = *pu8_data_Y_current2++; + + /* Compute Luma value (x+1,y) of Output image*/ + *pu8_data_Y_current3++ = (UInt8)((u32_blendfactor * u32_Y2 + + (1024 - u32_blendfactor)*u32_Y1) >> 10); + + /* Get Luma value (x+1,y+1) of input Image1 */ + u32_Y1 = *pu8_data_Y_next1++; + + /* Get Luma value (x+1,y+1) of input Image2 */ + u32_Y2 = *pu8_data_Y_next2++; + + /* Compute Luma value (x+1,y+1) of Output image*/ + *pu8_data_Y_next3++ = (UInt8)((u32_blendfactor * u32_Y2 + + (1024 - u32_blendfactor)*u32_Y1) >> 10); + /* Update accumulator */ + u32_x_accum += u32_blend_inc; + + /* Working pointers are incremented just after each storage */ + + }/* End of row scanning */ + + /* Update working pointer of input image1 for next row */ + pu8_data_Y_start1 += u32_stride2_Y1; + pu8_data_UV_start1 += u32_stride_UV1; + + /* Update working pointer of input image2 for next row */ + pu8_data_Y_start2 += u32_stride2_Y2; + pu8_data_UV_start2 += u32_stride_UV2; + + /* Update working pointer of output image for next row */ + pu8_data_Y_start3 += u32_stride2_Y3; + pu8_data_UV_start3 += u32_stride_UV3; + + }/* End of column scanning */ + + return M4VIFI_OK; +} + + +/** + ****************************************************************************** + * M4VIFI_UInt8 M4VIFI_RGB565toNV12 (void *pUserData, + * M4VIFI_ImagePlane *pPlaneIn, + * M4VIFI_ImagePlane *pPlaneOut) + * @brief transform RGB565 image to a NV12 image. + * @note Convert RGB565 to NV12, + * Loop on each row ( 2 rows by 2 rows ) + * Loop on each column ( 2 col by 2 col ) + * Get 4 RGB samples from input data and build 4 output Y samples + * and each single U & V data + * end loop on col + * end loop on row + * @param pUserData: (IN) User Specific Data + * @param pPlaneIn: (IN) Pointer to RGB565 Plane + * @param pPlaneOut: (OUT) Pointer to NV12 buffer Plane + * @return M4VIFI_OK: there is no error + * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD + * @return M4VIFI_ILLEGAL_FRAME_WIDTH: YUV Plane width is ODD + ****************************************************************************** +*/ +M4VIFI_UInt8 M4VIFI_xVSS_RGB565toNV12(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, + M4VIFI_ImagePlane *pPlaneOut) +{ + M4VIFI_UInt32 u32_width, u32_height; + M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_UV; + M4VIFI_UInt32 u32_stride_rgb, u32_stride_2rgb; + M4VIFI_UInt32 u32_col, u32_row; + + M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11; + M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11; + M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11; + M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11; + M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11; + M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11; + M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v; + M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data; + M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn; + M4VIFI_UInt16 u16_pix1, u16_pix2, u16_pix3, u16_pix4; + M4VIFI_UInt8 count_null=0; + + /* Check planes height are appropriate */ + if( (pPlaneIn->u_height != pPlaneOut[0].u_height) || + (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1))) + { + return M4VIFI_ILLEGAL_FRAME_HEIGHT; + } + + /* Check planes width are appropriate */ + if( (pPlaneIn->u_width != pPlaneOut[0].u_width) || + (pPlaneOut[0].u_width != pPlaneOut[1].u_width)) + { + return M4VIFI_ILLEGAL_FRAME_WIDTH; + } + + /* Set the pointer to the beginning of the output data buffers */ + pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft; + pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft; + pu8_v_data = pu8_u_data + 1; + + /* Set the pointer to the beginning of the input data buffers */ + pu8_rgbn_data = pPlaneIn->pac_data + pPlaneIn->u_topleft; + + /* Get the size of the output image */ + u32_width = pPlaneOut[0].u_width; + u32_height = pPlaneOut[0].u_height; + + /* Set the size of the memory jumps corresponding to row jump in each output plane */ + u32_stride_Y = pPlaneOut[0].u_stride; + u32_stride2_Y = u32_stride_Y << 1; + u32_stride_UV = pPlaneOut[1].u_stride; + + /* Set the size of the memory jumps corresponding to row jump in input plane */ + u32_stride_rgb = pPlaneIn->u_stride; + u32_stride_2rgb = u32_stride_rgb << 1; + + /* Loop on each row of the output image, input coordinates are estimated from output ones */ + /* Two YUV rows are computed at each pass */ + for (u32_row = u32_height ;u32_row != 0; u32_row -=2) + { + /* Current Y plane row pointers */ + pu8_yn = pu8_y_data; + /* Next Y plane row pointers */ + pu8_ys = pu8_yn + u32_stride_Y; + /* Current U plane row pointer */ + pu8_u = pu8_u_data; + /* Current V plane row pointer */ + pu8_v = pu8_v_data; + + pu8_rgbn = pu8_rgbn_data; + + /* Loop on each column of the output image */ + for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) + { + /* Get four RGB 565 samples from input data */ + u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn); + u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE)); + u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb)); + u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE)); + + /* Unpack RGB565 to 8bit R, G, B */ + /* (x,y) */ + GET_RGB565(i32_b00,i32_g00,i32_r00,u16_pix1); + /* (x+1,y) */ + GET_RGB565(i32_b10,i32_g10,i32_r10,u16_pix2); + /* (x,y+1) */ + GET_RGB565(i32_b01,i32_g01,i32_r01,u16_pix3); + /* (x+1,y+1) */ + GET_RGB565(i32_b11,i32_g11,i32_r11,u16_pix4); + /* If RGB is transparent color (0, 63, 0), we transform it to white (31,63,31) */ + if(i32_b00 == 0 && i32_g00 == 63 && i32_r00 == 0) + { + i32_b00 = 31; + i32_r00 = 31; + } + if(i32_b10 == 0 && i32_g10 == 63 && i32_r10 == 0) + { + i32_b10 = 31; + i32_r10 = 31; + } + if(i32_b01 == 0 && i32_g01 == 63 && i32_r01 == 0) + { + i32_b01 = 31; + i32_r01 = 31; + } + if(i32_b11 == 0 && i32_g11 == 63 && i32_r11 == 0) + { + i32_b11 = 31; + i32_r11 = 31; + } + /* Convert RGB value to YUV */ + i32_u00 = U16(i32_r00, i32_g00, i32_b00); + i32_v00 = V16(i32_r00, i32_g00, i32_b00); + /* luminance value */ + i32_y00 = Y16(i32_r00, i32_g00, i32_b00); + + i32_u10 = U16(i32_r10, i32_g10, i32_b10); + i32_v10 = V16(i32_r10, i32_g10, i32_b10); + /* luminance value */ + i32_y10 = Y16(i32_r10, i32_g10, i32_b10); + + i32_u01 = U16(i32_r01, i32_g01, i32_b01); + i32_v01 = V16(i32_r01, i32_g01, i32_b01); + /* luminance value */ + i32_y01 = Y16(i32_r01, i32_g01, i32_b01); + + i32_u11 = U16(i32_r11, i32_g11, i32_b11); + i32_v11 = V16(i32_r11, i32_g11, i32_b11); + /* luminance value */ + i32_y11 = Y16(i32_r11, i32_g11, i32_b11); + /* Store luminance data */ + pu8_yn[0] = (M4VIFI_UInt8)i32_y00; + pu8_yn[1] = (M4VIFI_UInt8)i32_y10; + pu8_ys[0] = (M4VIFI_UInt8)i32_y01; + pu8_ys[1] = (M4VIFI_UInt8)i32_y11; + *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2); + *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2); + /* Prepare for next column */ + pu8_rgbn += (CST_RGB_16_SIZE<<1); + /* Update current Y plane line pointer*/ + pu8_yn += 2; + /* Update next Y plane line pointer*/ + pu8_ys += 2; + /* Update U plane line pointer*/ + pu8_u += 2; + /* Update V plane line pointer*/ + pu8_v += 2; + } /* End of horizontal scanning */ + + /* Prepare pointers for the next row */ + pu8_y_data += u32_stride2_Y; + pu8_u_data += u32_stride_UV; + pu8_v_data += u32_stride_UV; + pu8_rgbn_data += u32_stride_2rgb; + + + } /* End of vertical scanning */ + + return M4VIFI_OK; +} + + +/** + ****************************************************************************** + * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, + * M4OSA_FileReadPointer* pFileReadPtr, + * M4VIFI_ImagePlane* pImagePlanes, + * M4OSA_UInt32 width, + * M4OSA_UInt32 height); + * @brief It Coverts and resizes a ARGB8888 image to NV12 + * @note + * @param pFileIn (IN) The Image input file + * @param pFileReadPtr (IN) Pointer on filesystem functions + * @param pImagePlanes (IN/OUT) Pointer on NV12 output planes allocated by the user + * ARGB8888 image will be converted and resized to output + * NV12 plane size + *@param width (IN) width of the ARGB8888 + *@param height (IN) height of the ARGB8888 + * @return M4NO_ERROR: No error + * @return M4ERR_ALLOC: memory error + * @return M4ERR_PARAMETER: At least one of the function parameters is null + ****************************************************************************** + */ + +M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, + M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, + M4OSA_UInt32 width,M4OSA_UInt32 height) +{ + M4OSA_Context pARGBIn; + M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; + M4OSA_UInt32 frameSize_argb=(width * height * 4); + M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data. + M4OSA_UInt32 i = 0,j= 0; + M4OSA_ERR err=M4NO_ERROR; + + + M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, + M4VS, (M4OSA_Char*)"Image argb data"); + M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 Entering :"); + if(pTmpData == M4OSA_NULL) { + M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 :\ + Failed to allocate memory for Image clip"); + return M4ERR_ALLOC; + } + + M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 :width and height %d %d", + width ,height); + /* Get file size (mandatory for chunk decoding) */ + err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 :\ + Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); + free(pTmpData); + pTmpData = M4OSA_NULL; + goto cleanup; + } + + err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 Can't close ARGB8888\ + file %s, error: 0x%x\n",pFileIn, err); + pFileReadPtr->closeRead(pARGBIn); + free(pTmpData); + pTmpData = M4OSA_NULL; + goto cleanup; + } + + err = pFileReadPtr->closeRead(pARGBIn); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 Can't close ARGB8888 \ + file %s, error: 0x%x\n",pFileIn, err); + free(pTmpData); + pTmpData = M4OSA_NULL; + goto cleanup; + } + + rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, + (M4OSA_Char*)"Image clip RGB888 data"); + if(rgbPlane1.pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 \ + Failed to allocate memory for Image clip"); + free(pTmpData); + return M4ERR_ALLOC; + } + + rgbPlane1.u_height = height; + rgbPlane1.u_width = width; + rgbPlane1.u_stride = width*3; + rgbPlane1.u_topleft = 0; + + + /** Remove the alpha channel */ + for (i=0, j = 0; i < frameSize_argb; i++) { + if ((i % 4) == 0) continue; + rgbPlane1.pac_data[j] = pTmpData[i]; + j++; + } + free(pTmpData); + + /* To Check if resizing is required with color conversion */ + if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) + { + M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 Resizing :"); + frameSize = ( pImagePlanes->u_width * pImagePlanes->u_height * 3); + rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, + (M4OSA_Char*)"Image clip RGB888 data"); + if(rgbPlane2.pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); + free(pTmpData); + return M4ERR_ALLOC; + } + + rgbPlane2.u_height = pImagePlanes->u_height; + rgbPlane2.u_width = pImagePlanes->u_width; + rgbPlane2.u_stride = pImagePlanes->u_width*3; + rgbPlane2.u_topleft = 0; + + /* Resizing RGB888 to RGB888 */ + err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err); + free(rgbPlane2.pac_data); + free(rgbPlane1.pac_data); + return err; + } + /*Converting Resized RGB888 to YUV420 */ + err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane2, pImagePlanes); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("error when converting from RGB888 to NV12: 0x%x\n", err); + free(rgbPlane2.pac_data); + free(rgbPlane1.pac_data); + return err; + } + free(rgbPlane2.pac_data); + free(rgbPlane1.pac_data); + + M4OSA_TRACE1_0("RGB to YUV done"); + + + } + else + { + M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 NO Resizing :"); + err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane1, pImagePlanes); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); + } + free(rgbPlane1.pac_data); + + M4OSA_TRACE1_0("RGB to YUV done"); + } +cleanup: + M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 leaving :"); + return err; +} + + +/** + ****************************************************************************** + * M4OSA_ERR M4xVSS_internalConvertARGB8888toNV12(M4OSA_Void* pFileIn, + * M4OSA_FileReadPointer* pFileReadPtr, + * M4VIFI_ImagePlane* pImagePlanes, + * M4OSA_UInt32 width, + * M4OSA_UInt32 height); + * @brief It Coverts a ARGB8888 image to NV12 + * @note + * @param pFileIn (IN) The Image input file + * @param pFileReadPtr (IN) Pointer on filesystem functions + * @param pImagePlanes (IN/OUT) Pointer on NV12 output planes allocated by the user + * ARGB8888 image will be converted and resized to output + * NV12 plane size + * @param width (IN) width of the ARGB8888 + * @param height (IN) height of the ARGB8888 + * @return M4NO_ERROR: No error + * @return M4ERR_ALLOC: memory error + * @return M4ERR_PARAMETER: At least one of the function parameters is null + ****************************************************************************** + */ + +M4OSA_ERR M4xVSS_internalConvertARGB8888toNV12(M4OSA_Void* pFileIn, + M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane** pImagePlanes, + M4OSA_UInt32 width, M4OSA_UInt32 height) +{ + M4OSA_ERR err = M4NO_ERROR; + M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL; + + yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(2*sizeof(M4VIFI_ImagePlane), + M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoNV12: Output plane NV12"); + if(yuvPlane == M4OSA_NULL) { + M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toNV12:\ + Failed to allocate memory for Image clip"); + return M4ERR_ALLOC; + } + yuvPlane[0].u_height = height; + yuvPlane[0].u_width = width; + yuvPlane[0].u_stride = width; + yuvPlane[0].u_topleft = 0; + yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \ + * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data"); + + if(yuvPlane[0].pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toNV12 \ + Failed to allocate memory for Image clip"); + free(yuvPlane); + return M4ERR_ALLOC; + } + + yuvPlane[1].u_height = yuvPlane[0].u_height >>1; + yuvPlane[1].u_width = yuvPlane[0].u_width; + yuvPlane[1].u_stride = yuvPlane[1].u_width; + yuvPlane[1].u_topleft = 0; + yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \ + * yuvPlane[0].u_width); + + err = M4xVSS_internalConvertAndResizeARGB8888toNV12(pFileIn,pFileReadPtr, + yuvPlane, width, height); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toNV12 return error: 0x%x\n", err); + free(yuvPlane); + return err; + } + + *pImagePlanes = yuvPlane; + + M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toNV12: Leaving"); + return err; + +} + +/** + ****************************************************************************** + * M4OSA_ERR M4xVSS_PictureCallbackFct_NV12 (M4OSA_Void* pPictureCtxt, + * M4VIFI_ImagePlane* pImagePlanes, + * M4OSA_UInt32* pPictureDuration); + * @brief It feeds the PTO3GPP with NV12 pictures. + * @note This function is given to the PTO3GPP in the M4PTO3GPP_Params structure + * @param pContext (IN) The integrator own context + * @param pImagePlanes(IN/OUT) Pointer to an array of three valid image planes + * @param pPictureDuration(OUT) Duration of the returned picture + * + * @return M4NO_ERROR: No error + * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one + * @return M4ERR_PARAMETER: At least one of the function parameters is null + ****************************************************************************** + */ +M4OSA_ERR M4xVSS_PictureCallbackFct_NV12(M4OSA_Void* pPictureCtxt, + M4VIFI_ImagePlane* pImagePlanes, M4OSA_Double* pPictureDuration) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt8 last_frame_flag = 0; + M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt); + + /*Used for pan&zoom*/ + M4OSA_UInt8 tempPanzoomXa = 0; + M4OSA_UInt8 tempPanzoomXb = 0; + M4AIR_Params Params; + /**/ + + /*Used for cropping and black borders*/ + M4OSA_Context pPictureContext = M4OSA_NULL; + M4OSA_FilePosition pictureSize = 0 ; + M4OSA_UInt8* pictureBuffer = M4OSA_NULL; + //M4EXIFC_Context pExifContext = M4OSA_NULL; + M4EXIFC_BasicTags pBasicTags; + M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0]; + M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1]; + + /**/ + + /** + * Check input parameters */ + M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt), M4ERR_PARAMETER, + "M4xVSS_PictureCallbackFct_NV12: pPictureCtxt is M4OSA_NULL"); + M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes), M4ERR_PARAMETER, + "M4xVSS_PictureCallbackFct_NV12: pImagePlanes is M4OSA_NULL"); + M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER, + "M4xVSS_PictureCallbackFct_NV12: pPictureDuration is M4OSA_NULL"); + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering"); + /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/ + if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0) + { + pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE; + } + + /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/ + if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) + { + /** + * Convert and resize input ARGB8888 file to NV12 */ + /*To support ARGB8888 : */ + M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct_NV12 1: width and height %d %d", + pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); + err = M4xVSS_internalConvertAndResizeARGB8888toNV12(pC->m_FileIn, + pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width, + pC->m_pPto3GPPparams->height); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when decoding JPEG: 0x%x\n", err); + return err; + } + } + /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/ + else + { + /** + * Computes ratios */ + if(pC->m_pDecodedPlane == M4OSA_NULL) + { + /** + * Convert input ARGB8888 file to NV12 */ + M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct_NV12 2: width and height %d %d", + pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); + err = M4xVSS_internalConvertARGB8888toNV12(pC->m_FileIn, pC->m_pFileReadPtr, + &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when decoding JPEG: 0x%x\n", err); + if(pC->m_pDecodedPlane != M4OSA_NULL) + { + /* NV12 planar is returned but allocation is made only once + (contigous planes in memory) */ + if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL) + { + free(pC->m_pDecodedPlane->pac_data); + } + free(pC->m_pDecodedPlane); + pC->m_pDecodedPlane = M4OSA_NULL; + } + return err; + } + } + + /*Initialize AIR Params*/ + Params.m_inputCoord.m_x = 0; + Params.m_inputCoord.m_y = 0; + Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; + Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; + Params.m_outputSize.m_width = pImagePlanes->u_width; + Params.m_outputSize.m_height = pImagePlanes->u_height; + Params.m_bOutputStripe = M4OSA_FALSE; + Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; + + /*Initialize Exif params structure*/ + pBasicTags.orientation = M4COMMON_kOrientationUnknown; + + /** + Pan&zoom params*/ + if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom) + { + /*Save ratio values, they can be reused if the new ratios are 0*/ + tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa; + tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb; + + /*Check that the ratio is not 0*/ + /*Check (a) parameters*/ + if(pC->m_pPto3GPPparams->PanZoomXa == 0) + { + M4OSA_UInt8 maxRatio = 0; + if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= + pC->m_pPto3GPPparams->PanZoomTopleftYa) + { + /*The ratio is 0, that means the area of the picture defined with (a) + parameters is bigger than the image size*/ + if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000) + { + /*The oversize is maxRatio*/ + maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000; + } + } + else + { + /*The ratio is 0, that means the area of the picture defined with (a) + parameters is bigger than the image size*/ + if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000) + { + /*The oversize is maxRatio*/ + maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000; + } + } + /*Modify the (a) parameters:*/ + if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio) + { + /*The (a) topleft parameters can be moved to keep the same area size*/ + pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio; + } + else + { + /*Move the (a) topleft parameter to 0 but the ratio will be also further + modified to match the image size*/ + pC->m_pPto3GPPparams->PanZoomTopleftXa = 0; + } + if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio) + { + /*The (a) topleft parameters can be moved to keep the same area size*/ + pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio; + } + else + { + /*Move the (a) topleft parameter to 0 but the ratio will be also further + modified to match the image size*/ + pC->m_pPto3GPPparams->PanZoomTopleftYa = 0; + } + /*The new ratio is the original one*/ + pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa; + if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000) + { + /*Change the ratio if the area of the picture defined with (a) parameters is + bigger than the image size*/ + pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa; + } + if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000) + { + /*Change the ratio if the area of the picture defined with (a) parameters is + bigger than the image size*/ + pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa; + } + } + /*Check (b) parameters*/ + if(pC->m_pPto3GPPparams->PanZoomXb == 0) + { + M4OSA_UInt8 maxRatio = 0; + if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= + pC->m_pPto3GPPparams->PanZoomTopleftYb) + { + /*The ratio is 0, that means the area of the picture defined with (b) + parameters is bigger than the image size*/ + if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000) + { + /*The oversize is maxRatio*/ + maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000; + } + } + else + { + /*The ratio is 0, that means the area of the picture defined with (b) + parameters is bigger than the image size*/ + if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000) + { + /*The oversize is maxRatio*/ + maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000; + } + } + /*Modify the (b) parameters:*/ + if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio) + { + /*The (b) topleft parameters can be moved to keep the same area size*/ + pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio; + } + else + { + /*Move the (b) topleft parameter to 0 but the ratio will be also further + modified to match the image size*/ + pC->m_pPto3GPPparams->PanZoomTopleftXb = 0; + } + if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio) + { + /*The (b) topleft parameters can be moved to keep the same area size*/ + pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio; + } + else + { + /*Move the (b) topleft parameter to 0 but the ratio will be also further + modified to match the image size*/ + pC->m_pPto3GPPparams->PanZoomTopleftYb = 0; + } + /*The new ratio is the original one*/ + pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb; + if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000) + { + /*Change the ratio if the area of the picture defined with (b) parameters is + bigger than the image size*/ + pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb; + } + if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000) + { + /*Change the ratio if the area of the picture defined with (b) parameters is + bigger than the image size*/ + pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb; + } + } + + /** + * Computes AIR parameters */ +/* Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * + (pC->m_pPto3GPPparams->PanZoomTopleftXa + + (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \ + - pC->m_pPto3GPPparams->PanZoomTopleftXa) * + pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; + Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * + (pC->m_pPto3GPPparams->PanZoomTopleftYa + + (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\ + - pC->m_pPto3GPPparams->PanZoomTopleftYa) * + pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; + + Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * + (pC->m_pPto3GPPparams->PanZoomXa + + (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * + pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; + + Params.m_inputSize.m_height = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * + (pC->m_pPto3GPPparams->PanZoomXa + + (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * + pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; + */ + // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter + // will be x-1 max for x no. of frames + Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * + (pC->m_pPto3GPPparams->PanZoomTopleftXa + + (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\ + - pC->m_pPto3GPPparams->PanZoomTopleftXa) * + pC->m_ImageCounter) / ((M4OSA_Double)pC->m_NbImage-1))) / 1000)); + Params.m_inputCoord.m_y = + (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * + (pC->m_pPto3GPPparams->PanZoomTopleftYa + + (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\ + - pC->m_pPto3GPPparams->PanZoomTopleftYa) * + pC->m_ImageCounter) / ((M4OSA_Double)pC->m_NbImage-1))) / 1000)); + + Params.m_inputSize.m_width = + (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * + (pC->m_pPto3GPPparams->PanZoomXa + + (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\ + - pC->m_pPto3GPPparams->PanZoomXa) * + pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); + + Params.m_inputSize.m_height = + (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * + (pC->m_pPto3GPPparams->PanZoomXa + + (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \ + - pC->m_pPto3GPPparams->PanZoomXa) * + pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); + + + if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\ + > pC->m_pDecodedPlane->u_width) + { + Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \ + - Params.m_inputCoord.m_x; + } + + if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\ + > pC->m_pDecodedPlane->u_height) + { + Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\ + - Params.m_inputCoord.m_y; + } + + Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; + Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; + } + + + + /** + Picture rendering: Black borders*/ + + if(pC->m_mediaRendering == M4xVSS_kBlackBorders) + { + memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE, + (pImagePlanes[0].u_height*pImagePlanes[0].u_stride)); + memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE, + (pImagePlanes[1].u_height*pImagePlanes[1].u_stride)); + /** + First without pan&zoom*/ + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Black borders"); + if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) + { + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Black borders without panzoom"); + + switch(pBasicTags.orientation) + { + default: + case M4COMMON_kOrientationUnknown: + Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; + case M4COMMON_kOrientationTopLeft: + case M4COMMON_kOrientationTopRight: + case M4COMMON_kOrientationBottomRight: + case M4COMMON_kOrientationBottomLeft: + if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ + /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) + //Params.m_inputSize.m_height < Params.m_inputSize.m_width) + { + /*it is height so black borders will be on the top and on the bottom side*/ + Params.m_outputSize.m_width = pImagePlanes->u_width; + Params.m_outputSize.m_height = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \ + * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); + /*number of lines at the top*/ + pImagePlanes[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ + -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; + pImagePlanes[0].u_height = Params.m_outputSize.m_height; + pImagePlanes[1].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ + -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride; + pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; + + } + else + { + /*it is width so black borders will be on the left and right side*/ + Params.m_outputSize.m_height = pImagePlanes->u_height; + Params.m_outputSize.m_width = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ + * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); + + pImagePlanes[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ + -Params.m_outputSize.m_width))>>1); + pImagePlanes[0].u_width = Params.m_outputSize.m_width; + pImagePlanes[1].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)((pImagePlanes[1].u_width\ + -Params.m_outputSize.m_width)>>1))); + pImagePlanes[1].u_width = Params.m_outputSize.m_width; + + } + break; + case M4COMMON_kOrientationLeftTop: + case M4COMMON_kOrientationLeftBottom: + case M4COMMON_kOrientationRightTop: + case M4COMMON_kOrientationRightBottom: + if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ + /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) + //Params.m_inputSize.m_height > Params.m_inputSize.m_width) + { + /*it is height so black borders will be on the top and on + the bottom side*/ + Params.m_outputSize.m_height = pImagePlanes->u_width; + Params.m_outputSize.m_width = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ + * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height); + /*number of lines at the top*/ + pImagePlanes[0].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ + -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; + pImagePlanes[0].u_height = Params.m_outputSize.m_width; + pImagePlanes[1].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ + -(Params.m_outputSize.m_width>>1)))>>1)\ + *pImagePlanes[1].u_stride)+1; + pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; + + } + else + { + /*it is width so black borders will be on the left and right side*/ + Params.m_outputSize.m_width = pImagePlanes->u_height; + Params.m_outputSize.m_height = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ + * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width); + + pImagePlanes[0].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ + -Params.m_outputSize.m_height))>>1))+1; + pImagePlanes[0].u_width = Params.m_outputSize.m_height; + pImagePlanes[1].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ + -(Params.m_outputSize.m_height>>1)))))+1; + pImagePlanes[1].u_width = Params.m_outputSize.m_height; + + } + break; + } + } + + /** + Secondly with pan&zoom*/ + else + { + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Black borders with panzoom"); + switch(pBasicTags.orientation) + { + default: + case M4COMMON_kOrientationUnknown: + Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; + case M4COMMON_kOrientationTopLeft: + case M4COMMON_kOrientationTopRight: + case M4COMMON_kOrientationBottomRight: + case M4COMMON_kOrientationBottomLeft: + /*NO ROTATION*/ + if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ + /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) + //Params.m_inputSize.m_height < Params.m_inputSize.m_width) + { + /*Black borders will be on the top and bottom of the output video*/ + /*Maximum output height if the input image aspect ratio is kept and if + the output width is the screen width*/ + M4OSA_UInt32 tempOutputSizeHeight = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ + * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); + M4OSA_UInt32 tempInputSizeHeightMax = 0; + M4OSA_UInt32 tempFinalInputHeight = 0; + /*The output width is the screen width*/ + Params.m_outputSize.m_width = pImagePlanes->u_width; + tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; + + /*Maximum input height according to the maximum output height + (proportional to the maximum output height)*/ + tempInputSizeHeightMax = (pImagePlanes->u_height\ + *Params.m_inputSize.m_height)/tempOutputSizeHeight; + tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; + + /*Check if the maximum possible input height is contained into the + input image height*/ + if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height) + { + /*The maximum possible input height is contained in the input + image height, + that means no black borders, the input pan zoom area will be extended + so that the input AIR height will be the maximum possible*/ + if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ + <= Params.m_inputCoord.m_y + && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ + <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\ + + Params.m_inputSize.m_height)) + { + /*The input pan zoom area can be extended symmetrically on the + top and bottom side*/ + Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \ + - Params.m_inputSize.m_height)>>1); + } + else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ + -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) + { + /*There is not enough place above the input pan zoom area to + extend it symmetrically, + so extend it to the maximum on the top*/ + Params.m_inputCoord.m_y = 0; + } + else + { + /*There is not enough place below the input pan zoom area to + extend it symmetrically, + so extend it to the maximum on the bottom*/ + Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \ + - tempInputSizeHeightMax; + } + /*The input height of the AIR is the maximum possible height*/ + Params.m_inputSize.m_height = tempInputSizeHeightMax; + } + else + { + /*The maximum possible input height is greater than the input + image height, + that means black borders are necessary to keep aspect ratio + The input height of the AIR is all the input image height*/ + Params.m_outputSize.m_height = + (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\ + /Params.m_inputSize.m_height; + Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; + Params.m_inputCoord.m_y = 0; + Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; + pImagePlanes[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ + -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; + pImagePlanes[0].u_height = Params.m_outputSize.m_height; + pImagePlanes[1].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ + -(Params.m_outputSize.m_height>>1)))>>1)\ + *pImagePlanes[1].u_stride); + pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; + + } + } + else + { + /*Black borders will be on the left and right side of the output video*/ + /*Maximum output width if the input image aspect ratio is kept and if the + output height is the screen height*/ + M4OSA_UInt32 tempOutputSizeWidth = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ + * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); + M4OSA_UInt32 tempInputSizeWidthMax = 0; + M4OSA_UInt32 tempFinalInputWidth = 0; + /*The output height is the screen height*/ + Params.m_outputSize.m_height = pImagePlanes->u_height; + tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; + + /*Maximum input width according to the maximum output width + (proportional to the maximum output width)*/ + tempInputSizeWidthMax = + (pImagePlanes->u_width*Params.m_inputSize.m_width)\ + /tempOutputSizeWidth; + tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; + + /*Check if the maximum possible input width is contained into the input + image width*/ + if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width) + { + /*The maximum possible input width is contained in the input + image width, + that means no black borders, the input pan zoom area will be extended + so that the input AIR width will be the maximum possible*/ + if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \ + <= Params.m_inputCoord.m_x + && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\ + <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ + + Params.m_inputSize.m_width)) + { + /*The input pan zoom area can be extended symmetrically on the + right and left side*/ + Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\ + - Params.m_inputSize.m_width)>>1); + } + else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ + -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) + { + /*There is not enough place above the input pan zoom area to + extend it symmetrically, + so extend it to the maximum on the left*/ + Params.m_inputCoord.m_x = 0; + } + else + { + /*There is not enough place below the input pan zoom area + to extend it symmetrically, + so extend it to the maximum on the right*/ + Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \ + - tempInputSizeWidthMax; + } + /*The input width of the AIR is the maximum possible width*/ + Params.m_inputSize.m_width = tempInputSizeWidthMax; + } + else + { + /*The maximum possible input width is greater than the input + image width, + that means black borders are necessary to keep aspect ratio + The input width of the AIR is all the input image width*/ + Params.m_outputSize.m_width =\ + (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\ + /Params.m_inputSize.m_width; + Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; + Params.m_inputCoord.m_x = 0; + Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; + pImagePlanes[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ + -Params.m_outputSize.m_width))>>1); + pImagePlanes[0].u_width = Params.m_outputSize.m_width; + pImagePlanes[1].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)((pImagePlanes[1].u_width\ + -Params.m_outputSize.m_width)>>1))); + pImagePlanes[1].u_width = Params.m_outputSize.m_width; + + } + } + break; + case M4COMMON_kOrientationLeftTop: + case M4COMMON_kOrientationLeftBottom: + case M4COMMON_kOrientationRightTop: + case M4COMMON_kOrientationRightBottom: + /*ROTATION*/ + if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ + /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) + //Params.m_inputSize.m_height > Params.m_inputSize.m_width) + { + /*Black borders will be on the left and right side of the output video*/ + /*Maximum output height if the input image aspect ratio is kept and if + the output height is the screen width*/ + M4OSA_UInt32 tempOutputSizeHeight = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ + /pC->m_pDecodedPlane->u_height); + M4OSA_UInt32 tempInputSizeHeightMax = 0; + M4OSA_UInt32 tempFinalInputHeight = 0; + /*The output width is the screen height*/ + Params.m_outputSize.m_height = pImagePlanes->u_width; + Params.m_outputSize.m_width= pImagePlanes->u_height; + tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; + + /*Maximum input height according to the maximum output height + (proportional to the maximum output height)*/ + tempInputSizeHeightMax = + (pImagePlanes->u_height*Params.m_inputSize.m_width)\ + /tempOutputSizeHeight; + tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; + + /*Check if the maximum possible input height is contained into the + input image width (rotation included)*/ + if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width) + { + /*The maximum possible input height is contained in the input + image width (rotation included), + that means no black borders, the input pan zoom area will be extended + so that the input AIR width will be the maximum possible*/ + if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \ + <= Params.m_inputCoord.m_x + && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\ + <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ + + Params.m_inputSize.m_width)) + { + /*The input pan zoom area can be extended symmetrically on the + right and left side*/ + Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \ + - Params.m_inputSize.m_width)>>1); + } + else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ + -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) + { + /*There is not enough place on the left of the input pan + zoom area to extend it symmetrically, + so extend it to the maximum on the left*/ + Params.m_inputCoord.m_x = 0; + } + else + { + /*There is not enough place on the right of the input pan zoom + area to extend it symmetrically, + so extend it to the maximum on the right*/ + Params.m_inputCoord.m_x = + pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax; + } + /*The input width of the AIR is the maximum possible width*/ + Params.m_inputSize.m_width = tempInputSizeHeightMax; + } + else + { + /*The maximum possible input height is greater than the input + image width (rotation included), + that means black borders are necessary to keep aspect ratio + The input width of the AIR is all the input image width*/ + Params.m_outputSize.m_width = + (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\ + /Params.m_inputSize.m_width; + Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; + Params.m_inputCoord.m_x = 0; + Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; + pImagePlanes[0].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ + -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; + pImagePlanes[0].u_height = Params.m_outputSize.m_width; + pImagePlanes[1].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ + -(Params.m_outputSize.m_width>>1)))>>1)\ + *pImagePlanes[1].u_stride)+1; + pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; + + } + } + else + { + /*Black borders will be on the top and bottom of the output video*/ + /*Maximum output width if the input image aspect ratio is kept and if + the output width is the screen height*/ + M4OSA_UInt32 tempOutputSizeWidth = + (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\ + /pC->m_pDecodedPlane->u_width); + M4OSA_UInt32 tempInputSizeWidthMax = 0; + M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0; + /*The output height is the screen width*/ + Params.m_outputSize.m_width = pImagePlanes->u_height; + Params.m_outputSize.m_height= pImagePlanes->u_width; + tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; + + /*Maximum input width according to the maximum output width + (proportional to the maximum output width)*/ + tempInputSizeWidthMax = + (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth; + tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; + + /*Check if the maximum possible input width is contained into the input + image height (rotation included)*/ + if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height) + { + /*The maximum possible input width is contained in the input + image height (rotation included), + that means no black borders, the input pan zoom area will be extended + so that the input AIR height will be the maximum possible*/ + if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \ + <= Params.m_inputCoord.m_y + && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\ + <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \ + + Params.m_inputSize.m_height)) + { + /*The input pan zoom area can be extended symmetrically on + the right and left side*/ + Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \ + - Params.m_inputSize.m_height)>>1); + } + else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ + -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) + { + /*There is not enough place on the top of the input pan zoom + area to extend it symmetrically, + so extend it to the maximum on the top*/ + Params.m_inputCoord.m_y = 0; + } + else + { + /*There is not enough place on the bottom of the input pan zoom + area to extend it symmetrically, + so extend it to the maximum on the bottom*/ + Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\ + - tempInputSizeWidthMax; + } + /*The input height of the AIR is the maximum possible height*/ + Params.m_inputSize.m_height = tempInputSizeWidthMax; + } + else + { + /*The maximum possible input width is greater than the input\ + image height (rotation included), + that means black borders are necessary to keep aspect ratio + The input height of the AIR is all the input image height*/ + Params.m_outputSize.m_height = + (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\ + /Params.m_inputSize.m_height; + Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; + Params.m_inputCoord.m_y = 0; + Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; + pImagePlanes[0].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ + -Params.m_outputSize.m_height))>>1))+1; + pImagePlanes[0].u_width = Params.m_outputSize.m_height; + pImagePlanes[1].u_topleft = + ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ + -(Params.m_outputSize.m_height>>1)))))+1; + pImagePlanes[1].u_width = Params.m_outputSize.m_height; + + } + } + break; + } + } + + /*Width and height have to be even*/ + Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; + Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; + Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; + Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; + pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1; + pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1; + + pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1; + pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1; + + + /*Check that values are coherent*/ + if(Params.m_inputSize.m_height == Params.m_outputSize.m_height) + { + Params.m_inputSize.m_width = Params.m_outputSize.m_width; + } + else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width) + { + Params.m_inputSize.m_height = Params.m_outputSize.m_height; + } + } + + /** + Picture rendering: Resizing and Cropping*/ + if(pC->m_mediaRendering != M4xVSS_kBlackBorders) + { + + switch(pBasicTags.orientation) + { + default: + case M4COMMON_kOrientationUnknown: + Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; + case M4COMMON_kOrientationTopLeft: + case M4COMMON_kOrientationTopRight: + case M4COMMON_kOrientationBottomRight: + case M4COMMON_kOrientationBottomLeft: + Params.m_outputSize.m_height = pImagePlanes->u_height; + Params.m_outputSize.m_width = pImagePlanes->u_width; + break; + case M4COMMON_kOrientationLeftTop: + case M4COMMON_kOrientationLeftBottom: + case M4COMMON_kOrientationRightTop: + case M4COMMON_kOrientationRightBottom: + Params.m_outputSize.m_height = pImagePlanes->u_width; + Params.m_outputSize.m_width = pImagePlanes->u_height; + break; + } + } + + /** + Picture rendering: Cropping*/ + if(pC->m_mediaRendering == M4xVSS_kCropping) + { + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Cropping"); + if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\ + /Params.m_outputSize.m_width>1)<<1; + if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) + { + Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\ + ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1); + } + else + { + Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\ + ((tempHeight - Params.m_inputSize.m_height))>>1); + } + } + else + { + M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width; + /*width will be cropped*/ + Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width \ + * Params.m_inputSize.m_height) /Params.m_outputSize.m_height); + Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; + if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) + { + Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\ + ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1); + } + else + { + Params.m_inputCoord.m_x += (M4OSA_Int32)\ + (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1); + } + } + } + + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Before AIR functions"); + + /** + * Call AIR functions */ + if(M4OSA_NULL == pC->m_air_context) + { + err = M4AIR_create_NV12(&pC->m_air_context, M4AIR_kNV12P); + + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: After M4AIR_create_NV12"); + + if(err != M4NO_ERROR) + { + free(pC->m_pDecodedPlane[0].pac_data); + free(pC->m_pDecodedPlane); + pC->m_pDecodedPlane = M4OSA_NULL; + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12:\ + Error when initializing AIR: 0x%x", err); + return err; + } + } + + err = M4AIR_configure_NV12(pC->m_air_context, &Params); + + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: After M4AIR_configure_NV12"); + + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12:\ + Error when configuring AIR: 0x%x", err); + M4AIR_cleanUp_NV12(pC->m_air_context); + free(pC->m_pDecodedPlane[0].pac_data); + free(pC->m_pDecodedPlane); + pC->m_pDecodedPlane = M4OSA_NULL; + return err; + } + + err = M4AIR_get_NV12(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when getting AIR plane: 0x%x", err); + M4AIR_cleanUp_NV12(pC->m_air_context); + free(pC->m_pDecodedPlane[0].pac_data); + free(pC->m_pDecodedPlane); + pC->m_pDecodedPlane = M4OSA_NULL; + return err; + } + pImagePlanes[0] = pImagePlanes1; + pImagePlanes[1] = pImagePlanes2; + + } + + + /** + * Increment the image counter */ + pC->m_ImageCounter++; + + /** + * Check end of sequence */ + last_frame_flag = (pC->m_ImageCounter >= pC->m_NbImage); + + /** + * Keep the picture duration */ + *pPictureDuration = pC->m_timeDuration; + + if (1 == last_frame_flag) + { + if(M4OSA_NULL != pC->m_air_context) + { + err = M4AIR_cleanUp(pC->m_air_context); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when cleaning AIR: 0x%x", err); + return err; + } + } + if(M4OSA_NULL != pC->m_pDecodedPlane) + { + free(pC->m_pDecodedPlane[0].pac_data); + free(pC->m_pDecodedPlane); + pC->m_pDecodedPlane = M4OSA_NULL; + } + return M4PTO3GPP_WAR_LAST_PICTURE; + } + + M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Leaving "); + return M4NO_ERROR; +} + +/** + ****************************************************************************** + * prototype M4OSA_ERR M4xVSS_internalConvertRGBtoNV12(M4xVSS_FramingStruct* framingCtx) + * @brief This function converts an RGB565 plane to NV12 planar + * @note It is used only for framing effect + * It allocates output YUV planes + * @param framingCtx (IN) The framing struct containing input RGB565 plane + * + * @return M4NO_ERROR: No error + * @return M4ERR_PARAMETER: At least one of the function parameters is null + * @return M4ERR_ALLOC: Allocation error (no more memory) + ****************************************************************************** + */ +M4OSA_ERR M4xVSS_internalConvertRGBtoNV12(M4xVSS_FramingStruct* framingCtx) +{ + M4OSA_ERR err; + + /** + * Allocate output NV12 planes */ + framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(2*sizeof(M4VIFI_ImagePlane), + M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoNV12: Output plane NV12"); + if(framingCtx->FramingYuv == M4OSA_NULL) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoNV12"); + return M4ERR_ALLOC; + } + framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width; + framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height; + framingCtx->FramingYuv[0].u_topleft = 0; + framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width; + framingCtx->FramingYuv[0].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\ + *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\ + "Alloc for the Convertion output YUV"); + + if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoNV12"); + return M4ERR_ALLOC; + } + framingCtx->FramingYuv[1].u_width = framingCtx->FramingRgb->u_width; + framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1; + framingCtx->FramingYuv[1].u_topleft = 0; + framingCtx->FramingYuv[1].u_stride = framingCtx->FramingRgb->u_width; + framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \ + + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height; + + /** + * Convert input RGB 565 to NV12 to be able to merge it with output video in framing + effect */ + err = M4VIFI_xVSS_RGB565toNV12(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoNV12:\ + error when converting from RGB to NV12: 0x%x\n", err); + } + + framingCtx->duration = 0; + framingCtx->previousClipTime = -1; + framingCtx->previewOffsetClipTime = -1; + + /** + * Only one element in the chained list (no animated image with RGB buffer...) */ + framingCtx->pCurrent = framingCtx; + framingCtx->pNext = framingCtx; + + return M4NO_ERROR; +} + + +static M4OSA_ERR M4xVSS_internalProbeFramingBoundaryNV12(M4xVSS_FramingStruct *framingCtx) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt32 topleft_x, topleft_y, botright_x, botright_y; + M4OSA_UInt32 isTopLeftFound, isBottomRightFound; + M4OSA_UInt32 u32_width, u32_height; + M4OSA_UInt32 u32_stride_rgb, u32_stride_2rgb; + M4OSA_UInt32 u32_col, u32_row; + M4OSA_UInt8 *pu8_rgbn_data, *pu8_rgbn; + M4OSA_UInt16 u16_pix1, u16_pix2, u16_pix3, u16_pix4; + + M4OSA_TRACE1_0("M4xVSS_internalProbeFramingBoundary starts!"); + + if (!framingCtx->exportmode) { + M4OSA_TRACE1_0("Err: not in export mode!"); + return err; + } + topleft_x = 0; + topleft_y = 0; + botright_x = 0; + botright_y = 0; + isTopLeftFound = 0; + isBottomRightFound = 0; + framingCtx->framing_topleft_x = 0; + framingCtx->framing_topleft_y = 0; + framingCtx->framing_bottomright_x = 0; + framingCtx->framing_bottomright_y = 0; + + + /* Set the pointer to the beginning of the input data buffers */ + pu8_rgbn_data = framingCtx->FramingRgb->pac_data + framingCtx->FramingRgb->u_topleft; + + u32_width = framingCtx->FramingRgb->u_width; + u32_height = framingCtx->FramingRgb->u_height; + + /* Set the size of the memory jumps corresponding to row jump in input plane */ + u32_stride_rgb = framingCtx->FramingRgb->u_stride; + u32_stride_2rgb = u32_stride_rgb << 1; + + + /* Loop on each row of the output image, input coordinates are estimated from output ones */ + /* Two YUV rows are computed at each pass */ + for (u32_row = u32_height ;u32_row != 0; u32_row -=2) + { + pu8_rgbn = pu8_rgbn_data; + + /* Loop on each column of the output image */ + for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) + { + /* Get four RGB 565 samples from input data */ + u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn); + u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE)); + u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb)); + u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE)); + M4OSA_TRACE1_4("u16_pix1 = 0x%x, u16_pix2 = 0x%x, u16_pix3 = 0x%x, u16_pix4 = 0x%x", + u16_pix1, u16_pix2, u16_pix3, u16_pix4); + if (u16_pix1 != 0xE007 && u16_pix2 != 0xE007 && + u16_pix3 != 0xE007 && u16_pix4 != 0xE007 && !isTopLeftFound) + { + topleft_x = u32_width - (u32_col+1); + topleft_y = u32_height - (u32_row+1); + isTopLeftFound = 1; + } + if (u16_pix1 != 0xE007 && u16_pix2 != 0xE007 && + u16_pix3 != 0xE007 && u16_pix4 != 0xE007) + { + botright_x = u32_width - (u32_col+1); + botright_y = u32_height - (u32_row+1); + isBottomRightFound = 1; + } + + /* Prepare for next column */ + pu8_rgbn += (CST_RGB_16_SIZE<<1); + } /* End of horizontal scanning */ + + /* Prepare pointers for the next row */ + pu8_rgbn_data += u32_stride_2rgb; + + } + M4OSA_TRACE1_2("isTopLeftFound = %d, isBottomRightFound = %d", isTopLeftFound, isBottomRightFound); + if (isTopLeftFound && isTopLeftFound) + { + if ((topleft_x < botright_x) && (topleft_y < botright_y)) + { + framingCtx->framing_topleft_x = (((topleft_x + 1)>>1)<<1) + 2; + framingCtx->framing_topleft_y = (((topleft_y + 1)>>1)<<1) + 2; + framingCtx->framing_bottomright_x = (((botright_x- 1)>>1)<<1) - 1; + framingCtx->framing_bottomright_y = (((botright_y- 1)>>1)<<1) - 1; + M4OSA_TRACE1_2("framingCtx->framing_topleft_x = %d, framingCtx->framing_topleft_y = %d", + framingCtx->framing_topleft_x, framingCtx->framing_topleft_y); + M4OSA_TRACE1_2("framingCtx->framing_bottomright_x = %d, framingCtx->framing_bottomright_y = %d", + framingCtx->framing_bottomright_x, framingCtx->framing_bottomright_y); + } + else + { + M4OSA_TRACE1_0("Err: invalid topleft and bottomright!"); + } + } + else + { + M4OSA_TRACE1_0("Err: fail to find framing boundaries!"); + } + return M4NO_ERROR; +} + + +/** + ****************************************************************************** + * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toNV12_FrammingEffect(M4OSA_Context pContext, + * M4VSS3GPP_EffectSettings* pEffect, + * M4xVSS_FramingStruct* framingCtx, + M4VIDEOEDITING_VideoFrameSize OutputVideoResolution) + * + * @brief This function converts ARGB8888 input file to NV12 when used for framming effect + * @note The input ARGB8888 file path is contained in the pEffect structure + * If the ARGB8888 must be resized to fit output video size, this function + * will do it. + * @param pContext (IN) The integrator own context + * @param pEffect (IN) The effect structure containing all informations on + * the file to decode, resizing ... + * @param framingCtx (IN/OUT) Structure in which the output RGB will be stored + * + * @return M4NO_ERROR: No error + * @return M4ERR_PARAMETER: At least one of the function parameters is null + * @return M4ERR_ALLOC: Allocation error (no more memory) + * @return M4ERR_FILE_NOT_FOUND: File not found. + ****************************************************************************** + */ + +M4OSA_ERR M4xVSS_internalConvertARGB888toNV12_FrammingEffect(M4OSA_Context pContext, + M4VSS3GPP_EffectSettings* pEffect, M4xVSS_FramingStruct* framingCtx, + M4VIDEOEDITING_VideoFrameSize OutputVideoResolution) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_Context pARGBIn; + M4OSA_UInt32 file_size; + M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; + M4OSA_UInt32 width, height, width_out, height_out; + M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath; + M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); + M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; + /*UTF conversion support*/ + M4OSA_Char* pDecodedPath = M4OSA_NULL; + M4OSA_UInt32 i = 0,j = 0; + M4VIFI_ImagePlane rgbPlane; + M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4); + M4OSA_UInt32 frameSize; + M4OSA_UInt32 tempAlphaPercent = 0; + M4VIFI_UInt8* TempPacData = M4OSA_NULL; + M4OSA_UInt16 *ptr = M4OSA_NULL; + M4OSA_UInt32 z = 0; + + M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: Entering "); + + M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toNV12_FrammingEffect width and height %d %d ", + framingCtx->width,framingCtx->height); + + M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\ + "Image argb data"); + if(pTmpData == M4OSA_NULL) { + M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); + return M4ERR_ALLOC; + } + /** + * UTF conversion: convert the file path into the customer format*/ + pDecodedPath = pFile; + + if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL + && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) + { + M4OSA_UInt32 length = 0; + err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile, + (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\ + M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); + free(pTmpData); + pTmpData = M4OSA_NULL; + return err; + } + pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; + } + + /** + * End of the conversion, now use the decoded path*/ + + /* Open input ARGB8888 file and store it into memory */ + err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead); + + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err); + free(pTmpData); + pTmpData = M4OSA_NULL; + return err; + } + + err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); + if(err != M4NO_ERROR) + { + xVSS_context->pFileReadPtr->closeRead(pARGBIn); + free(pTmpData); + pTmpData = M4OSA_NULL; + return err; + } + + + err = xVSS_context->pFileReadPtr->closeRead(pARGBIn); + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err); + free(pTmpData); + pTmpData = M4OSA_NULL; + return err; + } + + + rgbPlane.u_height = framingCtx->height; + rgbPlane.u_width = framingCtx->width; + rgbPlane.u_stride = rgbPlane.u_width*3; + rgbPlane.u_topleft = 0; + + frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data + rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)), + M4VS, (M4OSA_Char*)"Image clip RGB888 data"); + if(rgbPlane.pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); + free(pTmpData); + return M4ERR_ALLOC; + } + + M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ + Remove the alpha channel "); + + /* premultiplied alpha % on RGB */ + for (i=0, j = 0; i < frameSize_argb; i += 4) { + /* this is alpha value */ + if ((i % 4) == 0) + { + tempAlphaPercent = pTmpData[i]; + } + + /* R */ + rgbPlane.pac_data[j] = pTmpData[i+1]; + j++; + + /* G */ + if (tempAlphaPercent > 0) { + rgbPlane.pac_data[j] = pTmpData[i+2]; + j++; + } else {/* In case of alpha value 0, make GREEN to 255 */ + rgbPlane.pac_data[j] = 255; //pTmpData[i+2]; + j++; + } + + /* B */ + rgbPlane.pac_data[j] = pTmpData[i+3]; + j++; + } + + free(pTmpData); + pTmpData = M4OSA_NULL; + + /* convert RGB888 to RGB565 */ + + /* allocate temp RGB 565 buffer */ + TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize + + (4 * (framingCtx->width + framingCtx->height + 1)), + M4VS, (M4OSA_Char*)"Image clip RGB565 data"); + if (TempPacData == M4OSA_NULL) { + M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data"); + free(rgbPlane.pac_data); + return M4ERR_ALLOC; + } + + ptr = (M4OSA_UInt16 *)TempPacData; + z = 0; + + for (i = 0; i < j ; i += 3) + { + ptr[z++] = PACK_RGB565(0, rgbPlane.pac_data[i], + rgbPlane.pac_data[i+1], + rgbPlane.pac_data[i+2]); + } + + /* free the RBG888 and assign RGB565 */ + free(rgbPlane.pac_data); + rgbPlane.pac_data = TempPacData; + + /** + * Check if output sizes are odd */ + if(rgbPlane.u_height % 2 != 0) + { + M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data; + M4OSA_UInt32 i; + M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ + output height is odd "); + output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2; + + for(i=0; iduration = 0; + framingCtx->previousClipTime = -1; + framingCtx->previewOffsetClipTime = -1; + + /** + * Only one element in the chained list (no animated image ...) */ + framingCtx->pCurrent = framingCtx; + framingCtx->pNext = framingCtx; + + /** + * Get output width/height */ + switch(OutputVideoResolution) + //switch(xVSS_context->pSettings->xVSS.outputVideoSize) + { + case M4VIDEOEDITING_kSQCIF: + width_out = 128; + height_out = 96; + break; + case M4VIDEOEDITING_kQQVGA: + width_out = 160; + height_out = 120; + break; + case M4VIDEOEDITING_kQCIF: + width_out = 176; + height_out = 144; + break; + case M4VIDEOEDITING_kQVGA: + width_out = 320; + height_out = 240; + break; + case M4VIDEOEDITING_kCIF: + width_out = 352; + height_out = 288; + break; + case M4VIDEOEDITING_kVGA: + width_out = 640; + height_out = 480; + break; + case M4VIDEOEDITING_kWVGA: + width_out = 800; + height_out = 480; + break; + case M4VIDEOEDITING_kNTSC: + width_out = 720; + height_out = 480; + break; + case M4VIDEOEDITING_k640_360: + width_out = 640; + height_out = 360; + break; + case M4VIDEOEDITING_k854_480: + // StageFright encoders require %16 resolution + width_out = M4ENCODER_854_480_Width; + height_out = 480; + break; + case M4VIDEOEDITING_k1280_720: + width_out = 1280; + height_out = 720; + break; + case M4VIDEOEDITING_k1080_720: + // StageFright encoders require %16 resolution + width_out = M4ENCODER_1080_720_Width; + height_out = 720; + break; + case M4VIDEOEDITING_k960_720: + width_out = 960; + height_out = 720; + break; + case M4VIDEOEDITING_k1920_1080: + width_out = 1920; + height_out = M4ENCODER_1920_1080_Height; + break; + /** + * If output video size is not given, we take QCIF size, + * should not happen, because already done in M4xVSS_sendCommand */ + default: + width_out = 176; + height_out = 144; + break; + } + + /** + * Allocate output planes structures */ + framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS, + (M4OSA_Char *)"Framing Output plane RGB"); + if(framingCtx->FramingRgb == M4OSA_NULL) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); + return M4ERR_ALLOC; + } + /** + * Resize RGB if needed */ + if((pEffect->xVSS.bResize) && + (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out)) + { + width = width_out; + height = height_out; + + M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: \ + New Width and height %d %d ",width,height); + + framingCtx->FramingRgb->u_height = height_out; + framingCtx->FramingRgb->u_width = width_out; + framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; + framingCtx->FramingRgb->u_topleft = 0; + + framingCtx->FramingRgb->pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\ + FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS, + (M4OSA_Char *)"Framing Output pac_data RGB"); + + if(framingCtx->FramingRgb->pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("Allocation error in \ + M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); + free(framingCtx->FramingRgb); + free(rgbPlane.pac_data); + return M4ERR_ALLOC; + } + + M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: Resizing Needed "); + M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ + rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width); + + err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb); + + if(err != M4NO_ERROR) + { + M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toNV12_FrammingEffect :\ + when resizing RGB plane: 0x%x\n", err); + return err; + } + + if(rgbPlane.pac_data != M4OSA_NULL) + { + free(rgbPlane.pac_data); + rgbPlane.pac_data = M4OSA_NULL; + } + } + else + { + + M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ + Resizing Not Needed "); + + width = rgbPlane.u_width; + height = rgbPlane.u_height; + framingCtx->FramingRgb->u_height = height; + framingCtx->FramingRgb->u_width = width; + framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; + framingCtx->FramingRgb->u_topleft = 0; + framingCtx->FramingRgb->pac_data = rgbPlane.pac_data; + } + + + if(pEffect->xVSS.bResize) + { + /** + * Force topleft to 0 for pure framing effect */ + framingCtx->topleft_x = 0; + framingCtx->topleft_y = 0; + } + + + /** + * Convert RGB output to NV12 to be able to merge it with output video in framing + effect */ + framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(2*sizeof(M4VIFI_ImagePlane), M4VS, + (M4OSA_Char *)"Framing Output plane NV12"); + if(framingCtx->FramingYuv == M4OSA_NULL) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); + free(framingCtx->FramingRgb->pac_data); + return M4ERR_ALLOC; + } + + // Alloc for Y, U and V planes + framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1; + framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1; + framingCtx->FramingYuv[0].u_topleft = 0; + framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1; + framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc + ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS, + (M4OSA_Char *)"Alloc for the output Y"); + if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); + free(framingCtx->FramingYuv); + free(framingCtx->FramingRgb->pac_data); + return M4ERR_ALLOC; + } + framingCtx->FramingYuv[1].u_width = framingCtx->FramingYuv[0].u_width; + framingCtx->FramingYuv[1].u_height = (framingCtx->FramingYuv[0].u_height)>>1; + framingCtx->FramingYuv[1].u_topleft = 0; + framingCtx->FramingYuv[1].u_stride = framingCtx->FramingYuv[1].u_width; + + framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS, + (M4OSA_Char *)"Alloc for the output U&V"); + if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) { + free(framingCtx->FramingYuv[0].pac_data); + free(framingCtx->FramingYuv); + free(framingCtx->FramingRgb->pac_data); + return M4ERR_ALLOC; + } + + M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ + convert RGB to YUV "); + + err = M4VIFI_RGB565toNV12(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); + + if (framingCtx->exportmode) { + M4xVSS_internalProbeFramingBoundaryNV12(framingCtx); + } + + if (err != M4NO_ERROR) + { + M4OSA_TRACE1_1("SPS png: error when converting from RGB to NV12: 0x%x\n", err); + } + M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: Leaving "); + return err; +} + +/** + ****************************************************************************** + * prototype M4VSS3GPP_externalVideoEffectColor_NV12(M4OSA_Void *pFunctionContext, + * M4VIFI_ImagePlane *PlaneIn, + * M4VIFI_ImagePlane *PlaneOut, + * M4VSS3GPP_ExternalProgress *pProgress, + * M4OSA_UInt32 uiEffectKind) + * + * @brief This function apply a color effect on an input NV12 planar frame + * @note + * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) + * @param PlaneIn (IN) Input NV12 planar + * @param PlaneOut (IN/OUT) Output NV12 planar + * @param pProgress (IN/OUT) Progress indication (0-100) + * @param uiEffectKind (IN) Unused + * + * @return M4VIFI_OK: No error + ****************************************************************************** + */ +M4OSA_ERR M4VSS3GPP_externalVideoEffectColor_NV12(M4OSA_Void *pFunctionContext, + M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut, + M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind) +{ + M4VIFI_Int32 plane_number; + M4VIFI_UInt32 i,j,wStep; + M4VIFI_UInt8 *p_buf_src, *p_buf_dest,*p_buf_dest_uv; + M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext; + M4VIFI_UInt32 uvTmp,uvTmp1,uvTmp2,u_wTmp,u_wTmp1; + M4VIFI_UInt32 *p_buf_dest_uv32; + uvTmp = uvTmp1 = uvTmp2 = 0; + + + for (plane_number = 0; plane_number < 2; plane_number++) + { + p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]); + p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]); + + for (i = 0; i < PlaneOut[plane_number].u_height; i++) + { + /** + * Chrominance */ + if(plane_number==1) + { + p_buf_dest_uv32 = (M4VIFI_UInt32*)p_buf_dest; + //switch ((M4OSA_UInt32)pFunctionContext) + // commented because a structure for the effects context exist + switch (ColorContext->colorEffectType) + { + case M4xVSS_kVideoEffectType_BlackAndWhite: + memset((void *)p_buf_dest,128, + PlaneIn[plane_number].u_width); + break; + case M4xVSS_kVideoEffectType_Pink: + memset((void *)p_buf_dest,255, + PlaneIn[plane_number].u_width); + break; + case M4xVSS_kVideoEffectType_Green: + memset((void *)p_buf_dest,0, + PlaneIn[plane_number].u_width); + break; + case M4xVSS_kVideoEffectType_Sepia: + { + uvTmp1 = 139; + uvTmp2 = 117 | (uvTmp1 << 8); + uvTmp = uvTmp2 | (uvTmp2 << 16); + + u_wTmp = PlaneIn[plane_number].u_width; + + u_wTmp1 = u_wTmp >> 4; + for(wStep = 0; wStep < u_wTmp1; wStep++) + { + *p_buf_dest_uv32++ = uvTmp; + *p_buf_dest_uv32++ = uvTmp; + *p_buf_dest_uv32++ = uvTmp; + *p_buf_dest_uv32++ = uvTmp; + } + u_wTmp1 = u_wTmp - ((u_wTmp>>4)<<4); // equal to u_wTmp % 16 + p_buf_dest_uv = (M4VIFI_UInt8*)p_buf_dest_uv32; + for(j=0; j< u_wTmp1; j++) + { + if (j%2 == 0) + { + *p_buf_dest_uv++ = 117; + } + else + { + *p_buf_dest_uv++ = 139; + } + } + break; + } + case M4xVSS_kVideoEffectType_Negative: + memcpy((void *)p_buf_dest, + (void *)p_buf_src ,PlaneOut[plane_number].u_width); + break; + + case M4xVSS_kVideoEffectType_ColorRGB16: + { + M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; + + /*first get the r, g, b*/ + b = (ColorContext->rgb16ColorData & 0x001f); + g = (ColorContext->rgb16ColorData & 0x07e0)>>5; + r = (ColorContext->rgb16ColorData & 0xf800)>>11; + + /*keep y, but replace u and v*/ + u = U16(r, g, b); + v = V16(r, g, b); + uvTmp1 = (M4OSA_UInt8)v; + uvTmp2 = ((M4OSA_UInt8)u) | (uvTmp1 << 8); + uvTmp = uvTmp2 | (uvTmp2 << 16); + + u_wTmp = PlaneIn[plane_number].u_width; + + u_wTmp1 = u_wTmp >> 2; + for(wStep = 0; wStep < u_wTmp1; wStep++) + { + *p_buf_dest_uv32++ = uvTmp; + } + u_wTmp1 = u_wTmp - ((u_wTmp>>2)<<2); // equal to u_wTmp % 4 + p_buf_dest_uv = (M4VIFI_UInt8*)p_buf_dest_uv32; + if(u_wTmp1 == 0) { + break; + } else if(u_wTmp1 == 1) { + *p_buf_dest_uv = (M4OSA_UInt8)u; + } else if(u_wTmp1 == 2) { + *p_buf_dest_uv++ = (M4OSA_UInt8)u; + *p_buf_dest_uv = (M4OSA_UInt8)v; + } else { + *p_buf_dest_uv++ = (M4OSA_UInt8)u; + *p_buf_dest_uv++ = (M4OSA_UInt8)v; + *p_buf_dest_uv = (M4OSA_UInt8)u; + } + break; + } + case M4xVSS_kVideoEffectType_Gradient: + { + M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; + + /*first get the r, g, b*/ + b = (ColorContext->rgb16ColorData & 0x001f); + g = (ColorContext->rgb16ColorData & 0x07e0)>>5; + r = (ColorContext->rgb16ColorData & 0xf800)>>11; + + /*for color gradation*/ + b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height)); + g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height)); + r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height)); + + /*keep y, but replace u and v*/ + u = U16(r, g, b); + v = V16(r, g, b); + uvTmp1 = (M4OSA_UInt8)v; + uvTmp2 = ((M4OSA_UInt8)u) | (uvTmp1 << 8); + uvTmp = uvTmp2 | (uvTmp2 << 16); + + u_wTmp = PlaneIn[plane_number].u_width; + + u_wTmp1 = u_wTmp >> 2; + for(wStep = 0; wStep < u_wTmp1; wStep++) + { + *p_buf_dest_uv32++ = uvTmp; + } + u_wTmp1 = u_wTmp - ((u_wTmp>>2)<<2); // equal to u_wTmp % 4 + p_buf_dest_uv = (M4VIFI_UInt8*)p_buf_dest_uv32; + if(u_wTmp1 == 0) { + break; + } else if(u_wTmp1 == 1) { + *p_buf_dest_uv = (M4OSA_UInt8)u; + } else if(u_wTmp1 == 2) { + *p_buf_dest_uv++ = (M4OSA_UInt8)u; + *p_buf_dest_uv = (M4OSA_UInt8)v; + } else { + *p_buf_dest_uv++ = (M4OSA_UInt8)u; + *p_buf_dest_uv++ = (M4OSA_UInt8)v; + *p_buf_dest_uv = (M4OSA_UInt8)u; + } + break; + } + default: + break; + } + } + /** + * Luminance */ + else + { + //switch ((M4OSA_UInt32)pFunctionContext) + // commented because a structure for the effects context exist + switch (ColorContext->colorEffectType) + { + case M4xVSS_kVideoEffectType_Negative: + for(j=0; j>8); + M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; + +#ifndef DECODE_GIF_ON_SAVING + Framing = (M4xVSS_FramingStruct *)userData; + currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent; + FramingRGB = Framing->FramingRgb->pac_data; +#endif /*DECODE_GIF_ON_SAVING*/ + + /*FB*/ +#ifdef DECODE_GIF_ON_SAVING + M4OSA_ERR err; + Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; + currentFraming = (M4xVSS_FramingStruct *)Framing; + FramingRGB = Framing->FramingRgb->pac_data; +#endif /*DECODE_GIF_ON_SAVING*/ + /*end FB*/ + + /** + * Initialize input / output plane pointers */ + p_in_Y += PlaneIn[0].u_topleft; + p_in_UV += PlaneIn[1].u_topleft; + + p_out0 = PlaneOut[0].pac_data; + p_out1 = PlaneOut[1].pac_data; + + /** + * Depending on time, initialize Framing frame to use */ + if(Framing->previousClipTime == -1) + { + Framing->previousClipTime = pProgress->uiOutputTime; + } + + /** + * If the current clip time has reach the duration of one frame of the framing picture + * we need to step to next framing picture */ + + Framing->previousClipTime = pProgress->uiOutputTime; + FramingRGB = currentFraming->FramingRgb->pac_data; + topleft[0] = currentFraming->topleft_x; + topleft[1] = currentFraming->topleft_y; + + M4OSA_TRACE1_2("currentFraming->topleft_x = %d, currentFraming->topleft_y = %d", + currentFraming->topleft_x,currentFraming->topleft_y); + + topleft_x = currentFraming->framing_topleft_x; + topleft_y = currentFraming->framing_topleft_y; + botright_x = currentFraming->framing_bottomright_x; + botright_y = currentFraming->framing_bottomright_y; + + M4OSA_TRACE1_4("topleft_x = %d, topleft_y = %d, botright_x = %d, botright_y = %d", + topleft_x,topleft_y, botright_x, botright_y); + + /*Alpha blending support*/ + M4OSA_Float alphaBlending = 1; + + M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =\ + (M4xVSS_internalEffectsAlphaBlending*)\ + ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct; + + if(alphaBlendingStruct != M4OSA_NULL) + { + if(pProgress->uiProgress \ + < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10)) + { + if(alphaBlendingStruct->m_fadeInTime == 0) { + alphaBlending = alphaBlendingStruct->m_start / 100; + } else { + alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\ + - alphaBlendingStruct->m_start)\ + *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10)); + alphaBlending += alphaBlendingStruct->m_start; + alphaBlending /= 100; + } + } + else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\ + m_fadeInTime*10) && pProgress->uiProgress < 1000\ + - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10)) + { + alphaBlending = (M4OSA_Float)\ + ((M4OSA_Float)alphaBlendingStruct->m_middle/100); + } + else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\ + (alphaBlendingStruct->m_fadeOutTime*10)) + { + if(alphaBlendingStruct->m_fadeOutTime == 0) { + alphaBlending = alphaBlendingStruct->m_end / 100; + } else { + alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \ + - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\ + /(alphaBlendingStruct->m_fadeOutTime*10); + alphaBlending += alphaBlendingStruct->m_end; + alphaBlending /= 100; + } + } + } + + M4VIFI_UInt8 alphaBlending_int8 = (M4VIFI_UInt8)(alphaBlending * 255); + + for( x=0 ; x < PlaneIn[0].u_height ; x++) + { + if((x topleft[1] + botright_y)) + { + if(x&0x01) + { + memcpy(p_out0+x*PlaneOut[0].u_stride, + p_in_Y+x*PlaneIn[0].u_stride, + PlaneOut[0].u_width); + } + else + { + memcpy(p_out0+x*PlaneOut[0].u_stride, + p_in_Y+x*PlaneIn[0].u_stride, + PlaneOut[0].u_width); + memcpy(p_out1+(x>>1)*PlaneOut[1].u_stride, + p_in_UV+(x>>1)*PlaneIn[1].u_stride, + PlaneOut[1].u_width); + } + } + else + { + if(x&0x01) + { + for(y=0; y < PlaneIn[0].u_width ; y++) + { + if((y>=topleft[0]+topleft_x) && (y<=topleft[0]+botright_x)) + { + *( p_out0+y+x*PlaneOut[0].u_stride)= + (M4VIFI_UInt8)(((*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\ + +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending_int8 + + (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(255-alphaBlending_int8))>>8); + } + else + { + *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride); + } + } + } + else + { + for(y=0 ; y < PlaneIn[0].u_width ; y++) + { + if((y>=topleft[0]+topleft_x) && (y<=topleft[0]+botright_x)) + { + *( p_out0+y+x*PlaneOut[0].u_stride)= + (M4VIFI_UInt8)(((*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\ + +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending_int8 + +(*(p_in_Y+y+x*PlaneIn[0].u_stride))*(255-alphaBlending_int8))>>8); + + *( p_out1+y+(x>>1)*PlaneOut[1].u_stride)= + (M4VIFI_UInt8)(((*(currentFraming->FramingYuv[1].pac_data+(y-topleft[0])\ + +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\ + *alphaBlending_int8 + + *(p_in_UV+y+(x>>1)*PlaneIn[1].u_stride)*(255-alphaBlending_int8))>>8); + } + else + { + *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride); + *( p_out1+y+(x>>1)*PlaneOut[1].u_stride)= *(p_in_UV+y+(x>>1)*PlaneIn[1].u_stride); + } + } + } + } + } + return M4VIFI_OK; +} + +/** + ****************************************************************************** + * prototype M4xVSS_AlphaMagic_NV12( M4OSA_Void *userData, + * M4VIFI_ImagePlane PlaneIn1[2], + * M4VIFI_ImagePlane PlaneIn2[2], + * M4VIFI_ImagePlane *PlaneOut, + * M4VSS3GPP_ExternalProgress *pProgress, + * M4OSA_UInt32 uiTransitionKind) + * + * @brief This function apply a color effect on an input NV12 planar frame + * @note + * @param userData (IN) Contains a pointer on a settings structure + * @param PlaneIn1 (IN) Input NV12 planar from video 1 + * @param PlaneIn2 (IN) Input NV12 planar from video 2 + * @param PlaneOut (IN/OUT) Output NV12 planar + * @param pProgress (IN/OUT) Progress indication (0-100) + * @param uiTransitionKind(IN) Unused + * + * @return M4VIFI_OK: No error + ****************************************************************************** + */ +M4OSA_ERR M4xVSS_AlphaMagic_NV12( M4OSA_Void *userData, M4VIFI_ImagePlane *PlaneIn1, + M4VIFI_ImagePlane *PlaneIn2, M4VIFI_ImagePlane *PlaneOut, + M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind) +{ + M4OSA_ERR err; + M4xVSS_internal_AlphaMagicSettings* alphaContext; + M4VIFI_Int32 alphaProgressLevel; + + M4VIFI_ImagePlane* planeswap; + M4VIFI_UInt32 x,y; + + M4VIFI_UInt8 *p_out0; + M4VIFI_UInt8 *p_out1; + + M4VIFI_UInt8 *alphaMask; + /* "Old image" */ + M4VIFI_UInt8 *p_in1_Y; + M4VIFI_UInt8 *p_in1_UV; + /* "New image" */ + M4VIFI_UInt8 *p_in2_Y; + M4VIFI_UInt8 *p_in2_UV; + + M4OSA_TRACE1_0("M4xVSS_AlphaMagic_NV12 begin"); + + err = M4NO_ERROR; + + alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; + + alphaProgressLevel = (pProgress->uiProgress * 128)/1000; + + if( alphaContext->isreverse != M4OSA_FALSE) + { + alphaProgressLevel = 128 - alphaProgressLevel; + planeswap = PlaneIn1; + PlaneIn1 = PlaneIn2; + PlaneIn2 = planeswap; + } + + p_out0 = PlaneOut[0].pac_data; + p_out1 = PlaneOut[1].pac_data; + + alphaMask = alphaContext->pPlane->pac_data; + + /* "Old image" */ + p_in1_Y = PlaneIn1[0].pac_data; + p_in1_UV = PlaneIn1[1].pac_data; + + /* "New image" */ + p_in2_Y = PlaneIn2[0].pac_data; + p_in2_UV = PlaneIn2[1].pac_data; + + /** + * For each column ... */ + for( y=0; yu_height; y++ ) + { + /** + * ... and each row of the alpha mask */ + for( x=0; xu_width; x++ ) + { + /** + * If the value of the current pixel of the alpha mask is > to the current time + * ( current time is normalized on [0-255] ) */ + if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] ) + { + /* We keep "old image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); + *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= + *(p_in1_UV+x+(y>>1)*PlaneIn1[1].u_stride); + } + else + { + /* We take "new image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); + *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= + *(p_in2_UV+x+(y>>1)*PlaneIn2[1].u_stride); + } + } + } + + M4OSA_TRACE1_0("M4xVSS_AlphaMagic_NV12 end"); + + return(err); +} + +/** + ****************************************************************************** + * prototype M4xVSS_AlphaMagicBlending_NV12( M4OSA_Void *userData, + * M4VIFI_ImagePlane PlaneIn1[2], + * M4VIFI_ImagePlane PlaneIn2[2], + * M4VIFI_ImagePlane *PlaneOut, + * M4VSS3GPP_ExternalProgress *pProgress, + * M4OSA_UInt32 uiTransitionKind) + * + * @brief This function apply a color effect on an input NV12 planar frame + * @note + * @param userData (IN) Contains a pointer on a settings structure + * @param PlaneIn1 (IN) Input NV12 planar from video 1 + * @param PlaneIn2 (IN) Input NV12 planar from video 2 + * @param PlaneOut (IN/OUT) Output NV12 planar + * @param pProgress (IN/OUT) Progress indication (0-100) + * @param uiTransitionKind(IN) Unused + * + * @return M4VIFI_OK: No error + ****************************************************************************** + */ +M4OSA_ERR M4xVSS_AlphaMagicBlending_NV12(M4OSA_Void *userData, + M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, + M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, + M4OSA_UInt32 uiTransitionKind) +{ + M4OSA_ERR err; + + M4xVSS_internal_AlphaMagicSettings* alphaContext; + M4VIFI_Int32 alphaProgressLevel; + M4VIFI_Int32 alphaBlendLevelMin; + M4VIFI_Int32 alphaBlendLevelMax; + M4VIFI_Int32 alphaBlendRange; + + M4VIFI_ImagePlane* planeswap; + M4VIFI_UInt32 x,y; + M4VIFI_Int32 alphaMaskValue; + + M4VIFI_UInt8 *p_out0; + M4VIFI_UInt8 *p_out1; + M4VIFI_UInt8 *alphaMask; + + /* "Old image" */ + M4VIFI_UInt8 *p_in1_Y; + M4VIFI_UInt8 *p_in1_UV; + + /* "New image" */ + M4VIFI_UInt8 *p_in2_Y; + M4VIFI_UInt8 *p_in2_UV; + + M4OSA_TRACE1_0("M4xVSS_AlphaMagicBlending_NV12 begin"); + + err = M4NO_ERROR; + + alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; + + alphaProgressLevel = (pProgress->uiProgress * 128)/1000; + + if( alphaContext->isreverse != M4OSA_FALSE) + { + alphaProgressLevel = 128 - alphaProgressLevel; + planeswap = PlaneIn1; + PlaneIn1 = PlaneIn2; + PlaneIn2 = planeswap; + } + + alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold; + + alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold; + + alphaBlendRange = (alphaContext->blendingthreshold)*2; + + p_out0 = PlaneOut[0].pac_data; + p_out1 = PlaneOut[1].pac_data; + + + alphaMask = alphaContext->pPlane->pac_data; + + /* "Old image" */ + p_in1_Y = PlaneIn1[0].pac_data; + p_in1_UV = PlaneIn1[1].pac_data; + + /* "New image" */ + p_in2_Y = PlaneIn2[0].pac_data; + p_in2_UV = PlaneIn2[1].pac_data; + + + /* apply Alpha Magic on each pixel */ + for( y=0; yu_height; y++ ) + { + if (y%2 == 0) + { + for( x=0; xu_width; x++ ) + { + alphaMaskValue = alphaMask[x+y*PlaneOut->u_width]; + if( alphaBlendLevelMax < alphaMaskValue ) + { + /* We keep "old image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); + *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= + *(p_in1_UV+x+(y>>1)*PlaneIn1[1].u_stride); + } + else if( (alphaBlendLevelMin < alphaMaskValue)&& + (alphaMaskValue <= alphaBlendLevelMax ) ) + { + /* We blend "old and new image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8) + (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride)) + +(alphaBlendLevelMax-alphaMaskValue)\ + *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange ); + + *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\ + (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_UV+x+(y>>1)\ + *PlaneIn1[1].u_stride)) + +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_UV+x+(y>>1)\ + *PlaneIn2[1].u_stride)) )/alphaBlendRange ); + } + else + { + /* We take "new image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); + *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= + *(p_in2_UV+x+(y>>1)*PlaneIn2[1].u_stride); + + } + } + } + else + { + for( x=0; xu_width; x++ ) + { + alphaMaskValue = alphaMask[x+y*PlaneOut->u_width]; + if( alphaBlendLevelMax < alphaMaskValue ) + { + /* We keep "old image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); + } + else if( (alphaBlendLevelMin < alphaMaskValue)&& + (alphaMaskValue <= alphaBlendLevelMax ) ) + { + /* We blend "old and new image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8) + (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride)) + +(alphaBlendLevelMax-alphaMaskValue)\ + *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange ); + } + else + { + /* We take "new image" in output plane */ + *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); + } + } + } + } + + M4OSA_TRACE1_0("M4xVSS_AlphaMagicBlending_NV12 end"); + + return(err); +} + + +#define M4XXX_SampleAddress_X86(plane, x, y) ( (plane).pac_data + (plane).u_topleft + (y)\ + * (plane).u_stride + (x) ) + +static void M4XXX_CopyPlane_X86(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source) +{ + M4OSA_UInt32 height, width, sourceStride, destStride, y; + M4OSA_MemAddr8 sourceWalk, destWalk; + + /* cache the vars used in the loop so as to avoid them being repeatedly fetched and + recomputed from memory. */ + height = dest->u_height; + width = dest->u_width; + + sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress_X86(*source, 0, 0); + sourceStride = source->u_stride; + + destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress_X86(*dest, 0, 0); + destStride = dest->u_stride; + + for (y=0; ydirection) + || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) ) + { + /* horizontal slide */ + shiftUV = ((PlaneOut[1]).u_width/2 * pProgress->uiProgress)/1000; + M4OSA_TRACE1_2("M4xVSS_SlideTransition_NV12 upper: shiftUV = %d,progress = %d", + shiftUV,pProgress->uiProgress ); + if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) + { + /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV + (since it's a shift from the left frame) so that we start out on the right + (i.e. not left) frame, it + being from the previous clip. */ + return M4xVSS_HorizontalSlideTransition_NV12(PlaneIn2, PlaneIn1, PlaneOut, + (PlaneOut[1]).u_width/2 - shiftUV); + } + else /* Left out, right in*/ + { + return M4xVSS_HorizontalSlideTransition_NV12(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); + } + } + else + { + /* vertical slide */ + shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000; + M4OSA_TRACE1_2("M4xVSS_SlideTransition_NV12 bottom: shiftUV = %d,progress = %d",shiftUV, + pProgress->uiProgress ); + if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction) + { + /* Put the previous clip frame top, the next clip frame bottom. */ + return M4xVSS_VerticalSlideTransition_NV12(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); + } + else /* Bottom out, top in */ + { + return M4xVSS_VerticalSlideTransition_NV12(PlaneIn2, PlaneIn1, PlaneOut, + (PlaneOut[1]).u_height - shiftUV); + } + } +} + +/** + ****************************************************************************** + * prototype M4xVSS_FadeBlackTransition_NV12(M4OSA_Void *pFunctionContext, + * M4VIFI_ImagePlane *PlaneIn, + * M4VIFI_ImagePlane *PlaneOut, + * M4VSS3GPP_ExternalProgress *pProgress, + * M4OSA_UInt32 uiEffectKind) + * + * @brief This function apply a fade to black and then a fade from black + * @note + * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) + * @param PlaneIn (IN) Input NV12 planar + * @param PlaneOut (IN/OUT) Output NV12 planar + * @param pProgress (IN/OUT) Progress indication (0-100) + * @param uiEffectKind (IN) Unused + * + * @return M4VIFI_OK: No error + ****************************************************************************** + */ +M4OSA_ERR M4xVSS_FadeBlackTransition_NV12(M4OSA_Void *userData, + M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, + M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, + M4OSA_UInt32 uiTransitionKind) +{ + M4OSA_Int32 tmp = 0; + M4OSA_ERR err = M4NO_ERROR; + + if((pProgress->uiProgress) < 500) + { + /** + * Compute where we are in the effect (scale is 0->1024) */ + tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 ); + + /** + * Apply the darkening effect */ + err = M4VFL_modifyLumaWithScale_NV12((M4ViComImagePlane*)PlaneIn1, + (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); + if (M4NO_ERROR != err) + { + M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition_NV12: M4VFL_modifyLumaWithScale_NV12 returns\ + error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); + return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; + } + } + else + { + /** + * Compute where we are in the effect (scale is 0->1024). */ + tmp = (M4OSA_Int32)((((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024); + + /** + * Apply the darkening effect */ + err = M4VFL_modifyLumaWithScale_NV12((M4ViComImagePlane*)PlaneIn2, + (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); + if (M4NO_ERROR != err) + { + M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition_NV12:\ + M4VFL_modifyLumaWithScale_NV12 returns error 0x%x,\ + returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); + return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; + } + } + + return M4VIFI_OK; +} + -- cgit v1.2.3 From 817c4c3d8596692783f3aef0af07379e9e9d9ff9 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 15 Aug 2012 16:00:43 +0800 Subject: make ref/rec surfaces stride/height aligned with 16 to avoid bad performance. BZ: 52316 It is found that if ref/rec surfaces stride/height not aligned with 16, the SyncSurface will become very slow, about 1 sec per time. so make all aligned. Change-Id: Ia17c19b30890b0025d8809dcb53f60e229f3460e Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/62236 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 72b4778..417aab0 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -124,6 +124,8 @@ Encode_Status VideoEncoderBase::start() { VASurfaceID surfaces[2]; int32_t index = -1; SurfaceMap *map = mSrcSurfaceMapList; + uint32_t stride_aligned = 0; + uint32_t height_aligned = 0; VAConfigAttrib vaAttrib[2]; uint32_t maxSize = 0; @@ -166,17 +168,20 @@ Encode_Status VideoEncoderBase::start() { VASurfaceAttributeTPI attribute_tpi; - attribute_tpi.size = mComParams.resolution.width * mComParams.resolution.height * 3 / 2; - attribute_tpi.luma_stride = mComParams.resolution.width; - attribute_tpi.chroma_u_stride = mComParams.resolution.width; - attribute_tpi.chroma_v_stride = mComParams.resolution.width; + stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; + height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; + + attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; + attribute_tpi.luma_stride = stride_aligned; + attribute_tpi.chroma_u_stride = stride_aligned; + attribute_tpi.chroma_v_stride = stride_aligned; attribute_tpi.luma_offset = 0; - attribute_tpi.chroma_u_offset = mComParams.resolution.width * mComParams.resolution.height; - attribute_tpi.chroma_v_offset = mComParams.resolution.width * mComParams.resolution.height; + attribute_tpi.chroma_u_offset = stride_aligned * height_aligned; + attribute_tpi.chroma_v_offset = stride_aligned * height_aligned; attribute_tpi.pixel_format = VA_FOURCC_NV12; attribute_tpi.type = VAExternalMemoryNULL; - vaCreateSurfacesWithAttribute(mVADisplay, mComParams.resolution.width, mComParams.resolution.height, + vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); -- cgit v1.2.3 From 7cdfc5ef9facdaaa896a1d079ddf16f048029804 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 15 Aug 2012 17:36:38 +0800 Subject: Revert "fix libmix encoder klockwork issues, rename IntelMetadatabuffer APIs" BZ: 50965 This reverts commit fefa62b938fb4764d49345f509f8718cd6a2a269. The reason is wrong BZ number Change-Id: I8938b2ad89d1b770a91387a919210b5d32eee8b8 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/62247 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- test/btest.cpp | 4 +-- test/mix_encoder.cpp | 9 +++--- videoencoder/IntelMetadataBuffer.cpp | 63 ++---------------------------------- videoencoder/IntelMetadataBuffer.h | 12 ++----- videoencoder/VideoEncoderAVC.cpp | 14 ++++++++ videoencoder/VideoEncoderBase.cpp | 21 ++++++++---- videoencoder/VideoEncoderH263.cpp | 1 + videoencoder/VideoEncoderMP4.cpp | 2 ++ 8 files changed, 44 insertions(+), 82 deletions(-) diff --git a/test/btest.cpp b/test/btest.cpp index 13b790b..26f104f 100644 --- a/test/btest.cpp +++ b/test/btest.cpp @@ -37,7 +37,7 @@ int main(int argc, char* argv[]) ret = mb1->SetValueInfo(&vi1); ret = mb1->SetExtraValues(ev1, 10); } - ret = mb1->Serialize(bytes, size); + ret = mb1->GetBytes(bytes, size); printf("assembling IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); printf("size = %d, bytes = ", size); @@ -48,7 +48,7 @@ int main(int argc, char* argv[]) printf("\n"); mb2 = new IntelMetadataBuffer(); - ret = mb2->UnSerialize(bytes, size); + ret = mb2->SetBytes(bytes, size); printf("parsing IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); ret = mb2->GetType(t2); diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index d4b88c0..38fbc00 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -697,7 +697,7 @@ for(int i=0; i<1; i++) { if (gMode != 4) { - gIMB[i % gSrcFrames]->Serialize(data, size); + gIMB[i % gSrcFrames]->GetBytes(data, size); // printf("srcno =%d, data=%x, size=%d\n", i % gSrcFrames, data, size); }else { @@ -729,10 +729,11 @@ for(int i=0; i<1; i++) stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \ stat.min_encode_time, stat.min_encode_frame ); } + if(gVideoEncoder) { + releaseVideoEncoder(gVideoEncoder); + gVideoEncoder = NULL; + } - gVideoEncoder->stop(); - releaseVideoEncoder(gVideoEncoder); - gVideoEncoder = NULL; switch(gMode) { diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index eb9fe43..531d0ca 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -60,55 +60,6 @@ IntelMetadataBuffer::~IntelMetadataBuffer() delete[] mBytes; } - -IntelMetadataBuffer::IntelMetadataBuffer(const IntelMetadataBuffer& imb) - :mType(imb.mType), mValue(imb.mValue), mInfo(NULL), mExtraValues(NULL), - mExtraValues_Count(imb.mExtraValues_Count), mBytes(NULL), mSize(imb.mSize) -{ - if (imb.mInfo) - mInfo = new ValueInfo(*imb.mInfo); - - if (imb.mExtraValues) - { - mExtraValues = new int32_t[mExtraValues_Count]; - memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); - } - - if (imb.mBytes) - { - mBytes = new uint8_t[mSize]; - memcpy(mBytes, imb.mBytes, mSize); - } -} - -const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuffer& imb) -{ - mType = imb.mType; - mValue = imb.mValue; - mInfo = NULL; - mExtraValues = NULL; - mExtraValues_Count = imb.mExtraValues_Count; - mBytes = NULL; - mSize = imb.mSize; - - if (imb.mInfo) - mInfo = new ValueInfo(*imb.mInfo); - - if (imb.mExtraValues) - { - mExtraValues = new int32_t[mExtraValues_Count]; - memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); - } - - if (imb.mBytes) - { - mBytes = new uint8_t[mSize]; - memcpy(mBytes, imb.mBytes, mSize); - } - - return *this; -} - IMB_Result IntelMetadataBuffer::GetType(MetadataBufferType& type) { type = mType; @@ -192,7 +143,7 @@ IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) +IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) { if (!data || size == 0) return IMB_INVAL_PARAM; @@ -263,12 +214,7 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) -{ - return UnSerialize(data, size); -} - -IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) +IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) { if (mBytes == NULL) { @@ -307,11 +253,6 @@ IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) -{ - return Serialize(data, size); -} - uint32_t IntelMetadataBuffer::GetMaxBufferSize() { return 256; diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index 2eacc5f..802ef7b 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -72,9 +72,6 @@ public: IntelMetadataBuffer(MetadataBufferType type, int32_t value); //for quick generator ~IntelMetadataBuffer(); - IntelMetadataBuffer(const IntelMetadataBuffer& imb); - const IntelMetadataBuffer& operator=(const IntelMetadataBuffer& imb); - IMB_Result GetType(MetadataBufferType &type); IMB_Result SetType(MetadataBufferType type); IMB_Result GetValue(int32_t &value); @@ -84,14 +81,11 @@ public: IMB_Result GetExtraValues(int32_t* &values, uint32_t &num); IMB_Result SetExtraValues(int32_t *values, uint32_t num); - //for bytes input, also for parser, will be obsoleted + //for bytes input, also for parser IMB_Result SetBytes(uint8_t* data, uint32_t size); - //for bytes output, also for generator, will be obsoleted - IMB_Result GetBytes(uint8_t* &data, uint32_t& size); - //New API for bytes input/ouput, UnSerialize=SetBytes, Serialize=GetBytes - IMB_Result UnSerialize(uint8_t* data, uint32_t size); - IMB_Result Serialize(uint8_t* &data, uint32_t& size); + //for bytes output, also for generator + IMB_Result GetBytes(uint8_t* &data, uint32_t& size); //Static, for get max IntelMetadataBuffer size static uint32_t GetMaxBufferSize(); diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 738eefa..5922e6a 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -197,6 +197,9 @@ Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; bool useLocalBuffer = false; + uint32_t nalType = 0; + uint32_t nalSize = 0; + uint32_t nalOffset = 0; uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; LOG_V("Begin\n"); @@ -280,6 +283,8 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( uint32_t *nalType, uint32_t *nalOffset, uint32_t status) { uint32_t pos = 0; uint32_t zeroByteCount = 0; + uint32_t prefixLength = 0; + uint32_t leadingZeroCnt = 0; uint32_t singleByteTable[3][2] = {{1,0},{2,0},{2,3}}; uint32_t dataRemaining = 0; uint8_t *dataPtr; @@ -515,6 +520,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf uint32_t nalSize = 0; uint32_t nalOffset = 0; uint32_t sizeCopiedHere = 0; + uint32_t sizeToBeCopied = 0; CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); @@ -642,6 +648,7 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { Encode_Status VideoEncoderAVC::renderMaxSliceSize() { VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n\n"); if (mComParams.rcMode != RATE_CONTROL_VCM) { @@ -787,6 +794,12 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; + if((avcSeqParams.picture_width_in_mbs >=1920)|| (avcSeqParams.picture_height_in_mbs >=1080)) + { + device_info = vaQueryVendorString(mVADisplay); + if(strstr(device_info, "LEXINGTON")) + return ENCODE_INVALID_PARAMS; + } level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs); avcSeqParams.level_idc = level; @@ -900,6 +913,7 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceNum = 0; + uint32_t sliceHeight = 0; uint32_t sliceIndex = 0; uint32_t sliceHeightInMB = 0; uint32_t maxSliceNum = 0; diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 417aab0..ae29aef 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -657,6 +657,7 @@ CLEAN_UP: Encode_Status VideoEncoderBase::prepareForOutput( VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) { + Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; VACodedBufferSegment *vaCodedSeg = NULL; uint32_t status = 0; @@ -740,6 +741,7 @@ Encode_Status VideoEncoderBase::prepareForOutput( Encode_Status VideoEncoderBase::cleanupForOutput() { VAStatus vaStatus = VA_STATUS_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; //mCurSegment is NULL means all data has been copied out if (mCurSegment == NULL && mCodedBufferMapped) { @@ -1739,23 +1741,25 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { int32_t *extravalues = NULL; unsigned int extravalues_count = 0; - IntelMetadataBuffer imb; + IntelMetadataBuffer *imb = new IntelMetadataBuffer; SurfaceMap *map = NULL; if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); - if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { + if (imb->SetBytes(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { //fail to parse buffer + delete imb; return ENCODE_NO_REQUEST_DATA; } - imb.GetType(type); - imb.GetValue(value); + imb->GetType(type); + imb->GetValue(value); } else { //raw mode LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (! inBuffer->data || inBuffer->size == 0) { + delete imb; return ENCODE_NULL_PTR; } @@ -1771,6 +1775,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { LOG_I("direct find surface %d from value %x\n", map->surface, value); mCurSurface = map->surface; + delete imb; return ret; } @@ -1792,8 +1797,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { vinfo.s3dformat = 0xFFFFFFFF; } else { //get all info mapping needs - imb.GetValueInfo(pvinfo); - imb.GetExtraValues(extravalues, extravalues_count); + imb->GetValueInfo(pvinfo); + imb->GetExtraValues(extravalues, extravalues_count); } } else { @@ -1828,6 +1833,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); } else { delete map; + delete imb; LOG_E("surface mapping failed, wrong info or meet serious error\n"); return ret; } @@ -1836,6 +1842,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } else { //can't map due to no info + delete imb; LOG_E("surface mapping failed, missing information\n"); return ENCODE_NO_REQUEST_DATA; } @@ -1862,6 +1869,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } } + delete imb; + return ret; } diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index cbe1e09..68f8741 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -120,6 +120,7 @@ Encode_Status VideoEncoderH263::renderPictureParams() { Encode_Status VideoEncoderH263::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; + uint32_t sliceNum; uint32_t sliceHeight; uint32_t sliceHeightInMB; diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index d93d774..51068f4 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -22,7 +22,9 @@ VideoEncoderMP4::VideoEncoderMP4() Encode_Status VideoEncoderMP4::getHeaderPos( uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) { + uint8_t *buf = inBuffer; uint32_t bytesLeft = bufSize; + Encode_Status ret = ENCODE_SUCCESS; *headerSize = 0; CHECK_NULL_RETURN_IFFAIL(inBuffer); -- cgit v1.2.3 From ed9195d084dfeba8400532e0363c50a0b80d1767 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 15 Aug 2012 17:44:19 +0800 Subject: fix libmix encoder klockwork issues, rename IntelMetadatabuffer APIs BZ: 50695 fix libmix encoder klockwork issues, remove all unused variables; rename IntelMetadatabuffer APIs, rename SetBytes to UnSerialize, GetBytes to Serialize, SetBytes/GetBytes will be dropped later; Refine encoder code to avoid dynamic memory allocation. Resubmit patch due to last patch's wrong BZ number issue Change-Id: Ib072e40aed440c6c9eb641992d631bb7e5f83483 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/62249 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- test/btest.cpp | 4 +-- test/mix_encoder.cpp | 9 +++--- videoencoder/IntelMetadataBuffer.cpp | 63 ++++++++++++++++++++++++++++++++++-- videoencoder/IntelMetadataBuffer.h | 12 +++++-- videoencoder/VideoEncoderAVC.cpp | 14 -------- videoencoder/VideoEncoderBase.cpp | 21 ++++-------- videoencoder/VideoEncoderH263.cpp | 1 - videoencoder/VideoEncoderMP4.cpp | 2 -- 8 files changed, 82 insertions(+), 44 deletions(-) diff --git a/test/btest.cpp b/test/btest.cpp index 26f104f..13b790b 100644 --- a/test/btest.cpp +++ b/test/btest.cpp @@ -37,7 +37,7 @@ int main(int argc, char* argv[]) ret = mb1->SetValueInfo(&vi1); ret = mb1->SetExtraValues(ev1, 10); } - ret = mb1->GetBytes(bytes, size); + ret = mb1->Serialize(bytes, size); printf("assembling IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); printf("size = %d, bytes = ", size); @@ -48,7 +48,7 @@ int main(int argc, char* argv[]) printf("\n"); mb2 = new IntelMetadataBuffer(); - ret = mb2->SetBytes(bytes, size); + ret = mb2->UnSerialize(bytes, size); printf("parsing IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); ret = mb2->GetType(t2); diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 38fbc00..d4b88c0 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -697,7 +697,7 @@ for(int i=0; i<1; i++) { if (gMode != 4) { - gIMB[i % gSrcFrames]->GetBytes(data, size); + gIMB[i % gSrcFrames]->Serialize(data, size); // printf("srcno =%d, data=%x, size=%d\n", i % gSrcFrames, data, size); }else { @@ -729,11 +729,10 @@ for(int i=0; i<1; i++) stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \ stat.min_encode_time, stat.min_encode_frame ); } - if(gVideoEncoder) { - releaseVideoEncoder(gVideoEncoder); - gVideoEncoder = NULL; - } + gVideoEncoder->stop(); + releaseVideoEncoder(gVideoEncoder); + gVideoEncoder = NULL; switch(gMode) { diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index 531d0ca..eb9fe43 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -60,6 +60,55 @@ IntelMetadataBuffer::~IntelMetadataBuffer() delete[] mBytes; } + +IntelMetadataBuffer::IntelMetadataBuffer(const IntelMetadataBuffer& imb) + :mType(imb.mType), mValue(imb.mValue), mInfo(NULL), mExtraValues(NULL), + mExtraValues_Count(imb.mExtraValues_Count), mBytes(NULL), mSize(imb.mSize) +{ + if (imb.mInfo) + mInfo = new ValueInfo(*imb.mInfo); + + if (imb.mExtraValues) + { + mExtraValues = new int32_t[mExtraValues_Count]; + memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); + } + + if (imb.mBytes) + { + mBytes = new uint8_t[mSize]; + memcpy(mBytes, imb.mBytes, mSize); + } +} + +const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuffer& imb) +{ + mType = imb.mType; + mValue = imb.mValue; + mInfo = NULL; + mExtraValues = NULL; + mExtraValues_Count = imb.mExtraValues_Count; + mBytes = NULL; + mSize = imb.mSize; + + if (imb.mInfo) + mInfo = new ValueInfo(*imb.mInfo); + + if (imb.mExtraValues) + { + mExtraValues = new int32_t[mExtraValues_Count]; + memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); + } + + if (imb.mBytes) + { + mBytes = new uint8_t[mSize]; + memcpy(mBytes, imb.mBytes, mSize); + } + + return *this; +} + IMB_Result IntelMetadataBuffer::GetType(MetadataBufferType& type) { type = mType; @@ -143,7 +192,7 @@ IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) +IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) { if (!data || size == 0) return IMB_INVAL_PARAM; @@ -214,7 +263,12 @@ IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) +IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) +{ + return UnSerialize(data, size); +} + +IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) { if (mBytes == NULL) { @@ -253,6 +307,11 @@ IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) return IMB_SUCCESS; } +IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) +{ + return Serialize(data, size); +} + uint32_t IntelMetadataBuffer::GetMaxBufferSize() { return 256; diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index 802ef7b..2eacc5f 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -72,6 +72,9 @@ public: IntelMetadataBuffer(MetadataBufferType type, int32_t value); //for quick generator ~IntelMetadataBuffer(); + IntelMetadataBuffer(const IntelMetadataBuffer& imb); + const IntelMetadataBuffer& operator=(const IntelMetadataBuffer& imb); + IMB_Result GetType(MetadataBufferType &type); IMB_Result SetType(MetadataBufferType type); IMB_Result GetValue(int32_t &value); @@ -81,12 +84,15 @@ public: IMB_Result GetExtraValues(int32_t* &values, uint32_t &num); IMB_Result SetExtraValues(int32_t *values, uint32_t num); - //for bytes input, also for parser + //for bytes input, also for parser, will be obsoleted IMB_Result SetBytes(uint8_t* data, uint32_t size); - - //for bytes output, also for generator + //for bytes output, also for generator, will be obsoleted IMB_Result GetBytes(uint8_t* &data, uint32_t& size); + //New API for bytes input/ouput, UnSerialize=SetBytes, Serialize=GetBytes + IMB_Result UnSerialize(uint8_t* data, uint32_t size); + IMB_Result Serialize(uint8_t* &data, uint32_t& size); + //Static, for get max IntelMetadataBuffer size static uint32_t GetMaxBufferSize(); diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 5922e6a..738eefa 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -197,9 +197,6 @@ Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; bool useLocalBuffer = false; - uint32_t nalType = 0; - uint32_t nalSize = 0; - uint32_t nalOffset = 0; uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; LOG_V("Begin\n"); @@ -283,8 +280,6 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( uint32_t *nalType, uint32_t *nalOffset, uint32_t status) { uint32_t pos = 0; uint32_t zeroByteCount = 0; - uint32_t prefixLength = 0; - uint32_t leadingZeroCnt = 0; uint32_t singleByteTable[3][2] = {{1,0},{2,0},{2,3}}; uint32_t dataRemaining = 0; uint8_t *dataPtr; @@ -520,7 +515,6 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf uint32_t nalSize = 0; uint32_t nalOffset = 0; uint32_t sizeCopiedHere = 0; - uint32_t sizeToBeCopied = 0; CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); @@ -648,7 +642,6 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { Encode_Status VideoEncoderAVC::renderMaxSliceSize() { VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n\n"); if (mComParams.rcMode != RATE_CONTROL_VCM) { @@ -794,12 +787,6 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; - if((avcSeqParams.picture_width_in_mbs >=1920)|| (avcSeqParams.picture_height_in_mbs >=1080)) - { - device_info = vaQueryVendorString(mVADisplay); - if(strstr(device_info, "LEXINGTON")) - return ENCODE_INVALID_PARAMS; - } level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs); avcSeqParams.level_idc = level; @@ -913,7 +900,6 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceNum = 0; - uint32_t sliceHeight = 0; uint32_t sliceIndex = 0; uint32_t sliceHeightInMB = 0; uint32_t maxSliceNum = 0; diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index ae29aef..91f9331 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -657,7 +657,6 @@ CLEAN_UP: Encode_Status VideoEncoderBase::prepareForOutput( VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) { - Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; VACodedBufferSegment *vaCodedSeg = NULL; uint32_t status = 0; @@ -741,7 +740,6 @@ Encode_Status VideoEncoderBase::prepareForOutput( Encode_Status VideoEncoderBase::cleanupForOutput() { VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; //mCurSegment is NULL means all data has been copied out if (mCurSegment == NULL && mCodedBufferMapped) { @@ -1741,25 +1739,23 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { int32_t *extravalues = NULL; unsigned int extravalues_count = 0; - IntelMetadataBuffer *imb = new IntelMetadataBuffer; + IntelMetadataBuffer imb; SurfaceMap *map = NULL; if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); - if (imb->SetBytes(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { + if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { //fail to parse buffer - delete imb; return ENCODE_NO_REQUEST_DATA; } - imb->GetType(type); - imb->GetValue(value); + imb.GetType(type); + imb.GetValue(value); } else { //raw mode LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (! inBuffer->data || inBuffer->size == 0) { - delete imb; return ENCODE_NULL_PTR; } @@ -1775,7 +1771,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { LOG_I("direct find surface %d from value %x\n", map->surface, value); mCurSurface = map->surface; - delete imb; return ret; } @@ -1797,8 +1792,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { vinfo.s3dformat = 0xFFFFFFFF; } else { //get all info mapping needs - imb->GetValueInfo(pvinfo); - imb->GetExtraValues(extravalues, extravalues_count); + imb.GetValueInfo(pvinfo); + imb.GetExtraValues(extravalues, extravalues_count); } } else { @@ -1833,7 +1828,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); } else { delete map; - delete imb; LOG_E("surface mapping failed, wrong info or meet serious error\n"); return ret; } @@ -1842,7 +1836,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } else { //can't map due to no info - delete imb; LOG_E("surface mapping failed, missing information\n"); return ENCODE_NO_REQUEST_DATA; } @@ -1869,8 +1862,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } } - delete imb; - return ret; } diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index 68f8741..cbe1e09 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -120,7 +120,6 @@ Encode_Status VideoEncoderH263::renderPictureParams() { Encode_Status VideoEncoderH263::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - uint32_t sliceNum; uint32_t sliceHeight; uint32_t sliceHeightInMB; diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index 51068f4..d93d774 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -22,9 +22,7 @@ VideoEncoderMP4::VideoEncoderMP4() Encode_Status VideoEncoderMP4::getHeaderPos( uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) { - uint8_t *buf = inBuffer; uint32_t bytesLeft = bufSize; - Encode_Status ret = ENCODE_SUCCESS; *headerSize = 0; CHECK_NULL_RETURN_IFFAIL(inBuffer); -- cgit v1.2.3 From ace5745d718e994c7de5c87709446459c771c955 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 20 Aug 2012 16:35:05 +0800 Subject: To fix video editor crash issue BZ: 52577 When the sdcard is short of storage, video editor will fail to write the encoded data to SDcard. When this error happens, current error handling code forgets to derease the encoded buffer reference count. This will cause buffer's refcount check fails, which lead to video editor crash. Change-Id: I175790640a5fbbe226d0e17f3d6a752337e6b124 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/62380 Reviewed-by: Gu, Wangyi Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h | 2 +- frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h index 81acf17..4e1c929 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h @@ -44,7 +44,7 @@ #define VIDEOEDITOR_CHECK(test, errCode) \ { \ if( !(test) ) { \ - ALOGV("!!! %s (L%d) check failed : " #test ", yields error 0x%.8x", \ + ALOGW("!!! %s (L%d) check failed : " #test ", yields error 0x%.8x", \ __FILE__, __LINE__, errCode); \ err = (errCode); \ goto cleanUp; \ diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp index e22075e..54bfa7d 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp @@ -980,9 +980,9 @@ M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext, // Provide the encoded AU to the writer err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext, outputBuffer); + pEncoderContext->mPuller->putBuffer(outputBuffer); VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - pEncoderContext->mPuller->putBuffer(outputBuffer); } } @@ -1059,9 +1059,9 @@ M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) { err = VideoEditorVideoEncoder_processOutputBuffer( pEncoderContext, outputBuffer); + pEncoderContext->mPuller->putBuffer(outputBuffer); VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - pEncoderContext->mPuller->putBuffer(outputBuffer); } pEncoderContext->mState = STARTED; -- cgit v1.2.3 From 5b3a9608ea1727ad31c0ce61c8608413c794ebdc Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Mon, 20 Aug 2012 10:29:52 +0800 Subject: Movie studio: fix the abnormal issue when play the exported video BZ: 46307 skip the old video access unit(AU) when write the AU to the file. Change-Id: I8f9b1ea36778d0f4b7fc4b7db2b5bd394d28307a Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/62799 Reviewed-by: Feng, Wei Reviewed-by: Tang, Richard Reviewed-by: Wang, Lili A Reviewed-by: Wang, Elaine Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp index 54bfa7d..6ad5122 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp @@ -841,9 +841,10 @@ M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)", pEncoderContext->mNbOutputFrames, i64Tmp, Cts, pEncoderContext->mLastCTS); - if ( Cts < pEncoderContext->mLastCTS ) { + if ( Cts < pEncoderContext->mLastCTS || Cts < pEncoderContext->mAccessUnit->CTS ) { LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going " - "backwards %d < %d", Cts, pEncoderContext->mLastCTS); + "backwards %d < %d(or %lld)", Cts, pEncoderContext->mLastCTS, + pEncoderContext->mAccessUnit->CTS); goto cleanUp; } LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d", -- cgit v1.2.3 From 7ca1755ab4c9ef5d85192e8375270b752c6aa1e8 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Mon, 27 Aug 2012 14:18:47 +0800 Subject: disable NULL checking in setUpstreamBuffer since upstream buffers may be sufaces, all informations are stored in surface. BZ: 46678 libmix encoder shall ignore bufAttrib if upstream buffers are surfaces Change-Id: Ib0be7dbc1a1c951fa9ca72f0f72d9209fe875fcb Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/63523 Reviewed-by: Tang, Richard Reviewed-by: Wang, Elaine Reviewed-by: Tao, Tao Q Reviewed-by: Feng, Wei Reviewed-by: Wang, Lili A Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 91f9331..5af1aaa 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1428,11 +1428,6 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS return ENCODE_FAIL; } - if (upStreamBuffer->bufAttrib == NULL) { - LOG_E ("Buffer Attrib doesn't set by client, return error"); - return ENCODE_INVALID_PARAMS; - } - for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) { if (findSurfaceMapByValue(mSrcSurfaceMapList, upStreamBuffer->bufList[i]) != NULL) //already mapped continue; @@ -1445,11 +1440,13 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS map->vinfo.mode = (MemMode)upStreamBuffer->bufferMode; map->vinfo.handle = (uint32_t)upStreamBuffer->display; map->vinfo.size = 0; - map->vinfo.width = upStreamBuffer->bufAttrib->realWidth; - map->vinfo.height = upStreamBuffer->bufAttrib->realHeight; - map->vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride; - map->vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride; - map->vinfo.format = upStreamBuffer->bufAttrib->format; + if (upStreamBuffer->bufAttrib) { + map->vinfo.width = upStreamBuffer->bufAttrib->realWidth; + map->vinfo.height = upStreamBuffer->bufAttrib->realHeight; + map->vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride; + map->vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride; + map->vinfo.format = upStreamBuffer->bufAttrib->format; + } map->vinfo.s3dformat = 0xFFFFFFFF; map->added = false; map->next = NULL; -- cgit v1.2.3 From b4a1d96e786acbbdaf3b0a7b1e8786f23c98e4fd Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Tue, 28 Aug 2012 09:24:12 +0800 Subject: Adding error handling and protection when vaInitialize fail. BZ: 53697 vaInitialize may return a fail value rarely.It can cause mediaserver died. So, we need adding some error handling and protectionwhen va initialize failure. Add a variable to flag vaInitialize is successful or not.If not, it will marked as false. Before starting encoding,check this variable. If it is false, reture a error code toOMX Component and stop encoding. Change-Id: I335d21e36c77b5978fa2db978699b6068c8401ed Signed-off-by: Liu Bolun Reviewed-on: http://android.intel.com:8080/63634 Reviewed-by: Wang, Elaine Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 9 ++++++++- videoencoder/VideoEncoderBase.h | 2 ++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 5af1aaa..d102165 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -73,7 +73,8 @@ VideoEncoderBase::VideoEncoderBase() ,mSliceSizeOverflow(false) ,mCodedBufferMapped(false) ,mDataCopiedOut(false) - ,mKeyFrame(true) { + ,mKeyFrame(true) + ,mInitCheck(true) { VAStatus vaStatus = VA_STATUS_SUCCESS; // here the display can be any value, use following one @@ -96,6 +97,7 @@ VideoEncoderBase::VideoEncoderBase() LOG_V("vaInitialize \n"); if (vaStatus != VA_STATUS_SUCCESS) { LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus); + mInitCheck = false; } #ifdef VIDEO_ENC_STATISTICS_ENABLE @@ -135,6 +137,11 @@ Encode_Status VideoEncoderBase::start() { return ENCODE_ALREADY_INIT; } + if (!mInitCheck) { + LOGE("Encoder Initialize fail can not start"); + return ENCODE_DRIVER_FAIL; + } + vaAttrib[0].type = VAConfigAttribRTFormat; vaAttrib[1].type = VAConfigAttribRateControl; vaAttrib[0].value = VA_RT_FORMAT_YUV420; diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index cf65085..9ab7bc6 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -155,6 +155,8 @@ protected: bool mDataCopiedOut; bool mKeyFrame; + int32_t mInitCheck; + #ifdef VIDEO_ENC_STATISTICS_ENABLE VideoStatistics mVideoStat; #endif -- cgit v1.2.3 From 3ec2faeaf4251857f5aed028ca82c538477bc5c9 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Wed, 29 Aug 2012 14:51:05 +0800 Subject: Enable the optimization about the memory copy saving for Jellybean BZ: 54443 Android original design: There is a local queue in movie studio used to mantain the decoded data from the decoder(here need a memory copy from the decoder to the queue), then movie studio render the data in the queue(here need another memory copy from the queue to output buffer) Optimization design: Keep the decoded data(the whole Mediabuffer struct, use the reference method) in the local queue instead of only copy the Mediabuffer->data() to the queue, SAVE one memory copy here. In the render process, copy the data from the Mediabuffer->data() to output buffer. Change-Id: Ibd06d96cc118c72f86780767b3c344b865d8252a Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/63807 Reviewed-by: Tang, Richard Reviewed-by: Feng, Wei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- frameworks/videoedit/stagefrightshells/Android.mk | 2 +- .../stagefrightshells/VideoEditorBuffer.c | 266 ------------------ .../stagefrightshells/VideoEditorBuffer.cpp | 310 +++++++++++++++++++++ .../stagefrightshells/VideoEditorBuffer.h | 162 +++++++++++ .../stagefrightshells/VideoEditorVideoDecoder.cpp | 88 +++--- 5 files changed, 516 insertions(+), 312 deletions(-) delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp create mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h diff --git a/frameworks/videoedit/stagefrightshells/Android.mk b/frameworks/videoedit/stagefrightshells/Android.mk index 2e7c5ef..4c377fa 100644 --- a/frameworks/videoedit/stagefrightshells/Android.mk +++ b/frameworks/videoedit/stagefrightshells/Android.mk @@ -23,7 +23,7 @@ LOCAL_SRC_FILES:= \ VideoEditorAudioDecoder.cpp \ VideoEditorMp3Reader.cpp \ VideoEditor3gpReader.cpp \ - VideoEditorBuffer.c \ + VideoEditorBuffer.cpp \ VideoEditorVideoEncoder.cpp \ VideoEditorAudioEncoder.cpp \ IntelVideoEditorUtils.cpp \ diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c deleted file mode 100644 index 4bed81f..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.c +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** -************************************************************************* -* @file VideoEditorBuffer.c -* @brief StageFright shell Buffer -************************************************************************* -*/ -#undef M4OSA_TRACE_LEVEL -#define M4OSA_TRACE_LEVEL 1 - -#include "VideoEditorBuffer.h" -#include "utils/Log.h" - -#define VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE 40 - -#define VIDEOEDITOR_SAFE_FREE(p) \ -{ \ - if(M4OSA_NULL != p) \ - { \ - free(p); \ - p = M4OSA_NULL; \ - } \ -} - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, - * M4OSA_UInt32 nbBuffers) - * @brief Allocate a pool of nbBuffers buffers - * - * @param ppool : IN The buffer pool to create - * @param nbBuffers : IN The number of buffers in the pool - * @param poolName : IN a name given to the pool - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, - M4OSA_UInt32 nbBuffers, M4OSA_Char* poolName) -{ - M4OSA_ERR lerr = M4NO_ERROR; - VIDEOEDITOR_BUFFER_Pool* pool; - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : ppool = 0x%x nbBuffers = %d ", - ppool, nbBuffers); - - pool = M4OSA_NULL; - pool = (VIDEOEDITOR_BUFFER_Pool*)M4OSA_32bitAlignedMalloc( - sizeof(VIDEOEDITOR_BUFFER_Pool), VIDEOEDITOR_BUFFER_EXTERNAL, - (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: pool")); - if (M4OSA_NULL == pool) - { - lerr = M4ERR_ALLOC; - goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; - } - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool buffers"); - pool->pNXPBuffer = M4OSA_NULL; - pool->pNXPBuffer = (VIDEOEDITOR_BUFFER_Buffer*)M4OSA_32bitAlignedMalloc( - sizeof(VIDEOEDITOR_BUFFER_Buffer)*nbBuffers, - VIDEOEDITOR_BUFFER_EXTERNAL, - (M4OSA_Char*)("BUFFER_allocatePool: pNXPBuffer")); - if(M4OSA_NULL == pool->pNXPBuffer) - { - lerr = M4ERR_ALLOC; - goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; - } - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool name buffer"); - pool->poolName = M4OSA_NULL; - pool->poolName = (M4OSA_Char*)M4OSA_32bitAlignedMalloc( - VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE,VIDEOEDITOR_BUFFER_EXTERNAL, - (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: poolname")); - if(pool->poolName == M4OSA_NULL) - { - lerr = M4ERR_ALLOC; - goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; - } - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Assigning Pool name buffer"); - - memset((void *)pool->poolName, 0,VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE); - if (strlen((const char *)poolName) < VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE) { - memcpy((void *)pool->poolName, (void *)poolName, - strlen((const char *)poolName)); - } - pool->NB = nbBuffers; - -VIDEOEDITOR_BUFFER_allocatePool_Cleanup: - if(M4NO_ERROR != lerr) - { - VIDEOEDITOR_SAFE_FREE(pool->pNXPBuffer); - VIDEOEDITOR_SAFE_FREE(pool->poolName); - VIDEOEDITOR_SAFE_FREE(pool); - } - *ppool = pool; - ALOGV("VIDEOEDITOR_BUFFER_allocatePool END"); - - return lerr; -} - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(VIDEOEDITOR_BUFFER_Pool* ppool) - * @brief Deallocate a buffer pool - * - * @param ppool : IN The buffer pool to free - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(VIDEOEDITOR_BUFFER_Pool* ppool) -{ - M4OSA_ERR err; - M4OSA_UInt32 j = 0; - - ALOGV("VIDEOEDITOR_BUFFER_freePool : ppool = 0x%x", ppool); - - err = M4NO_ERROR; - - for (j = 0; j < ppool->NB; j++) - { - if(M4OSA_NULL != ppool->pNXPBuffer[j].pData) - { - free(ppool->pNXPBuffer[j].pData); - ppool->pNXPBuffer[j].pData = M4OSA_NULL; - } - } - - if(ppool != M4OSA_NULL) - { - SAFE_FREE(ppool->pNXPBuffer); - SAFE_FREE(ppool->poolName); - SAFE_FREE(ppool); - } - - return(err); -} - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, - * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) - * @brief Returns a buffer in a given state - * - * @param ppool : IN The buffer pool - * @param desiredState : IN The buffer state - * @param pNXPBuffer : IN The selected buffer - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, - VIDEOEDITOR_BUFFER_State desiredState, - VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_Bool bFound = M4OSA_FALSE; - M4OSA_UInt32 i, ibuf; - - ALOGV("VIDEOEDITOR_BUFFER_getBuffer from %s in state=%d", - ppool->poolName, desiredState); - - ibuf = 0; - - for (i=0; i < ppool->NB; i++) - { - bFound = (ppool->pNXPBuffer[i].state == desiredState); - if (bFound) - { - ibuf = i; - break; - } - } - - if(!bFound) - { - ALOGV("VIDEOEDITOR_BUFFER_getBuffer No buffer available in state %d", - desiredState); - *pNXPBuffer = M4OSA_NULL; - return M4ERR_NO_BUFFER_AVAILABLE; - } - - /* case where a buffer has been found */ - *pNXPBuffer = &(ppool->pNXPBuffer[ibuf]); - - ALOGV("VIDEOEDITOR_BUFFER_getBuffer: idx = %d", ibuf); - - return(err); -} - -M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers(VIDEOEDITOR_BUFFER_Pool* pool, - M4OSA_UInt32 lSize) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt32 index, j; - - /** - * Initialize all the buffers in the pool */ - for(index = 0; index < pool->NB; index++) - { - pool->pNXPBuffer[index].pData = M4OSA_NULL; - pool->pNXPBuffer[index].pData = (M4OSA_Void*)M4OSA_32bitAlignedMalloc( - lSize, VIDEOEDITOR_BUFFER_EXTERNAL, - (M4OSA_Char*)("BUFFER_initPoolBuffers: Buffer data")); - if(M4OSA_NULL == pool->pNXPBuffer[index].pData) - { - for (j = 0; j < index; j++) - { - if(M4OSA_NULL != pool->pNXPBuffer[j].pData) - { - free(pool->pNXPBuffer[j].pData); - pool->pNXPBuffer[j].pData = M4OSA_NULL; - } - } - err = M4ERR_ALLOC; - return err; - } - pool->pNXPBuffer[index].size = 0; - pool->pNXPBuffer[index].state = VIDEOEDITOR_BUFFER_kEmpty; - pool->pNXPBuffer[index].idx = index; - pool->pNXPBuffer[index].buffCTS = -1; - } - return err; -} - -M4OSA_ERR VIDEOEDITOR_BUFFER_getOldestBuffer(VIDEOEDITOR_BUFFER_Pool *pool, - VIDEOEDITOR_BUFFER_State desiredState, - VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt32 index, j; - M4_MediaTime candidateTimeStamp = (M4_MediaTime)0x7ffffff; - M4OSA_Bool bFound = M4OSA_FALSE; - - *pNXPBuffer = M4OSA_NULL; - for(index = 0; index< pool->NB; index++) - { - if(pool->pNXPBuffer[index].state == desiredState) - { - if(pool->pNXPBuffer[index].buffCTS <= candidateTimeStamp) - { - bFound = M4OSA_TRUE; - candidateTimeStamp = pool->pNXPBuffer[index].buffCTS; - *pNXPBuffer = &(pool->pNXPBuffer[index]); - } - } - } - if(M4OSA_FALSE == bFound) - { - ALOGV("VIDEOEDITOR_BUFFER_getOldestBuffer WARNING no buffer available"); - err = M4ERR_NO_BUFFER_AVAILABLE; - } - return err; -} diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp new file mode 100644 index 0000000..07d158a --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp @@ -0,0 +1,310 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorBuffer.cpp +* @brief StageFright shell Buffer +************************************************************************* +*/ +#undef M4OSA_TRACE_LEVEL +#define M4OSA_TRACE_LEVEL 1 + +#include "VideoEditorBuffer.h" +#include "utils/Log.h" + +#define VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE 40 + +#define VIDEOEDITOR_SAFE_FREE(p) \ +{ \ + if(M4OSA_NULL != p) \ + { \ + free(p); \ + p = M4OSA_NULL; \ + } \ +} + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, + * M4OSA_UInt32 nbBuffers) + * @brief Allocate a pool of nbBuffers buffers + * + * @param ppool : IN The buffer pool to create + * @param nbBuffers : IN The number of buffers in the pool + * @param poolName : IN a name given to the pool + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, + M4OSA_UInt32 nbBuffers, M4OSA_Char* poolName) +{ + M4OSA_ERR lerr = M4NO_ERROR; + VIDEOEDITOR_BUFFER_Pool* pool; + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : ppool = 0x%x nbBuffers = %d ", + ppool, nbBuffers); + + pool = M4OSA_NULL; + pool = (VIDEOEDITOR_BUFFER_Pool*)M4OSA_32bitAlignedMalloc( + sizeof(VIDEOEDITOR_BUFFER_Pool), VIDEOEDITOR_BUFFER_EXTERNAL, + (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: pool")); + if (M4OSA_NULL == pool) + { + lerr = M4ERR_ALLOC; + goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; + } + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool buffers"); + pool->pNXPBuffer = M4OSA_NULL; + pool->pNXPBuffer = (VIDEOEDITOR_BUFFER_Buffer*)M4OSA_32bitAlignedMalloc( + sizeof(VIDEOEDITOR_BUFFER_Buffer)*nbBuffers, + VIDEOEDITOR_BUFFER_EXTERNAL, + (M4OSA_Char*)("BUFFER_allocatePool: pNXPBuffer")); + if(M4OSA_NULL == pool->pNXPBuffer) + { + lerr = M4ERR_ALLOC; + goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; + } + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool name buffer"); + pool->poolName = M4OSA_NULL; + pool->poolName = (M4OSA_Char*)M4OSA_32bitAlignedMalloc( + VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE,VIDEOEDITOR_BUFFER_EXTERNAL, + (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: poolname")); + if(pool->poolName == M4OSA_NULL) + { + lerr = M4ERR_ALLOC; + goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; + } + + ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Assigning Pool name buffer"); + + memset((void *)pool->poolName, 0,VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE); + if (strlen((const char *)poolName) < VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE) { + memcpy((void *)pool->poolName, (void *)poolName, + strlen((const char *)poolName)); + } + pool->NB = nbBuffers; + +VIDEOEDITOR_BUFFER_allocatePool_Cleanup: + if(M4NO_ERROR != lerr) + { + VIDEOEDITOR_SAFE_FREE(pool->pNXPBuffer); + VIDEOEDITOR_SAFE_FREE(pool->poolName); + VIDEOEDITOR_SAFE_FREE(pool); + } + *ppool = pool; + ALOGV("VIDEOEDITOR_BUFFER_allocatePool END"); + + return lerr; +} + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(VIDEOEDITOR_BUFFER_Pool* ppool) + * @brief Deallocate a buffer pool + * + * @param ppool : IN The buffer pool to free + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(VIDEOEDITOR_BUFFER_Pool* ppool) +{ + M4OSA_ERR err; + M4OSA_UInt32 j = 0; + + ALOGV("VIDEOEDITOR_BUFFER_freePool_Ext : ppool = 0x%x", ppool); + + err = M4NO_ERROR; + + for (j = 0; j < ppool->NB; j++) + { + if(M4OSA_NULL != ppool->pNXPBuffer[j].mBuffer) + { + ppool->pNXPBuffer[j].mBuffer->release(); + ppool->pNXPBuffer[j].state = VIDEOEDITOR_BUFFER_kEmpty; + ppool->pNXPBuffer[j].mBuffer = M4OSA_NULL; + ppool->pNXPBuffer[j].size = 0; + ppool->pNXPBuffer[j].buffCTS = -1; + } + } + + if(ppool != M4OSA_NULL) + { + SAFE_FREE(ppool->pNXPBuffer); + SAFE_FREE(ppool->poolName); + SAFE_FREE(ppool); + } + + return(err); +} + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, + * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) + * @brief Returns a buffer in a given state + * + * @param ppool : IN The buffer pool + * @param desiredState : IN The buffer state + * @param pNXPBuffer : IN The selected buffer + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, + VIDEOEDITOR_BUFFER_State desiredState, + VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_Bool bFound = M4OSA_FALSE; + M4OSA_UInt32 i, ibuf; + + ALOGV("VIDEOEDITOR_BUFFER_getBuffer from %s in state=%d", + ppool->poolName, desiredState); + + ibuf = 0; + + for (i=0; i < ppool->NB; i++) + { + bFound = (ppool->pNXPBuffer[i].state == desiredState); + if (bFound) + { + ibuf = i; + break; + } + } + + if(!bFound) + { + ALOGV("VIDEOEDITOR_BUFFER_getBuffer No buffer available in state %d", + desiredState); + *pNXPBuffer = M4OSA_NULL; + return M4ERR_NO_BUFFER_AVAILABLE; + } + + /* case where a buffer has been found */ + *pNXPBuffer = &(ppool->pNXPBuffer[ibuf]); + + ALOGV("VIDEOEDITOR_BUFFER_getBuffer: idx = %d", ibuf); + + return(err); +} + +/** + ************************************************************************ + void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool, + * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) + * @brief Returns a buffer in a given state + * + * @param ppool : IN The buffer pool + * @param desiredState : IN The buffer state + * @param pNXPBuffer : IN The selected buffer + * @return Error code + ************************************************************************ +*/ +void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool) +{ + M4OSA_Bool bFound = M4OSA_FALSE; + M4OSA_UInt32 i, ibuf; + M4_MediaTime candidateTimeStamp = (M4_MediaTime)0x7ffffff; + ibuf = 0; + + for (i=0; i < ppool->NB; i++) + { + bFound = (ppool->pNXPBuffer[i].state == VIDEOEDITOR_BUFFER_kEmpty); + if (bFound) + { + break; + } + } + + if(!bFound) + { + for(i = 0; i< ppool->NB; i++) + { + if(ppool->pNXPBuffer[i].state == VIDEOEDITOR_BUFFER_kFilled) + { + if(ppool->pNXPBuffer[i].buffCTS <= candidateTimeStamp) + { + bFound = M4OSA_TRUE; + candidateTimeStamp = ppool->pNXPBuffer[i].buffCTS; + ibuf = i; + } + } + } + + if(M4OSA_TRUE == bFound) + { + if(M4OSA_NULL != ppool->pNXPBuffer[ibuf].mBuffer) { + ppool->pNXPBuffer[ibuf].mBuffer->release(); + ppool->pNXPBuffer[ibuf].state = VIDEOEDITOR_BUFFER_kEmpty; + ppool->pNXPBuffer[ibuf].mBuffer = M4OSA_NULL; + ppool->pNXPBuffer[ibuf].size = 0; + ppool->pNXPBuffer[ibuf].buffCTS = -1; + } + } + + } + +} +M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers_Ext(VIDEOEDITOR_BUFFER_Pool* pool, + M4OSA_UInt32 lSize) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt32 index, i, j; + + /** + * Initialize all the buffers in the pool */ + for(index = 0; index < pool->NB; index++) + { + pool->pNXPBuffer[index].mBuffer = M4OSA_NULL; + pool->pNXPBuffer[index].size = 0; + pool->pNXPBuffer[index].state = VIDEOEDITOR_BUFFER_kEmpty; + pool->pNXPBuffer[index].idx = index; + pool->pNXPBuffer[index].buffCTS = -1; + } + return err; +} + +M4OSA_ERR VIDEOEDITOR_BUFFER_getOldestBuffer(VIDEOEDITOR_BUFFER_Pool *pool, + VIDEOEDITOR_BUFFER_State desiredState, + VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) +{ + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt32 index, j; + M4_MediaTime candidateTimeStamp = (M4_MediaTime)0x7ffffff; + M4OSA_Bool bFound = M4OSA_FALSE; + + *pNXPBuffer = M4OSA_NULL; + for(index = 0; index< pool->NB; index++) + { + if(pool->pNXPBuffer[index].state == desiredState) + { + if(pool->pNXPBuffer[index].buffCTS <= candidateTimeStamp) + { + bFound = M4OSA_TRUE; + candidateTimeStamp = pool->pNXPBuffer[index].buffCTS; + *pNXPBuffer = &(pool->pNXPBuffer[index]); + } + } + } + if(M4OSA_FALSE == bFound) + { + ALOGV("VIDEOEDITOR_BUFFER_getOldestBuffer WARNING no buffer available"); + err = M4ERR_NO_BUFFER_AVAILABLE; + } + return err; +} diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h new file mode 100644 index 0000000..a180942 --- /dev/null +++ b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h @@ -0,0 +1,162 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* +* @file VideoEditorBuffer.h +* @brief StageFright shell Buffer +************************************************************************* +*/ +#ifndef VIDEOEDITOR_BUFFER_H +#define VIDEOEDITOR_BUFFER_H + +#include "M4OSA_Types.h" +#include "M4OSA_Debug.h" +#include "M4OSA_Memory.h" +#include "M4OSA_CharStar.h" +#include "M4_Utils.h" + +#include "LV_Macros.h" + +#include +using namespace android; + +/*--- Core id for VIDEOEDITOR Buffer allocations ---*/ +#define VIDEOEDITOR_BUFFER_EXTERNAL 0x012F + +/* ----- errors -----*/ +#define M4ERR_NO_BUFFER_AVAILABLE \ + M4OSA_ERR_CREATE(M4_ERR,VIDEOEDITOR_BUFFER_EXTERNAL,0x000001) +#define M4ERR_NO_BUFFER_MATCH \ + M4OSA_ERR_CREATE(M4_ERR,VIDEOEDITOR_BUFFER_EXTERNAL,0x000002) + +typedef enum { + VIDEOEDITOR_BUFFER_kEmpty = 0, + VIDEOEDITOR_BUFFER_kFilled, +} VIDEOEDITOR_BUFFER_State; + +/** + ************************************************************************ + * Structure LVOMX_BUFFER_Buffer + * @brief One OMX Buffer and data related to it + ************************************************************************ +*/ +typedef struct { + MediaBuffer* mBuffer; /**< Pointer to the data for intel platform*/ + M4OSA_UInt32 size; + VIDEOEDITOR_BUFFER_State state; /**< Buffer state */ + M4OSA_UInt32 idx; /**< Index of the buffer inside the pool */ + M4_MediaTime buffCTS; /**< Time stamp of the buffer */ +} VIDEOEDITOR_BUFFER_Buffer; + +/** + ************************************************************************ + * Structure LVOMX_BUFFER_Pool + * @brief Structure to manage buffers + ************************************************************************ +*/ +typedef struct { + VIDEOEDITOR_BUFFER_Buffer* pNXPBuffer; + M4OSA_UInt32 NB; + M4OSA_Char* poolName; +} VIDEOEDITOR_BUFFER_Pool; + +#ifdef __cplusplus +extern "C" +{ +#endif //__cplusplus + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, + * M4OSA_UInt32 nbBuffers) + * @brief Allocate a pool of nbBuffers buffers + * + * @param ppool : IN The buffer pool to create + * @param nbBuffers : IN The number of buffers in the pool + * @param poolName : IN a name given to the pool + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, + M4OSA_UInt32 nbBuffers, M4OSA_Char* poolName); + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(LVOMX_BUFFER_Pool* ppool) + * @brief Deallocate a buffer pool + * + * @param ppool : IN The buffer pool to free + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(VIDEOEDITOR_BUFFER_Pool* ppool); + +/** + ************************************************************************ +M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(LVOMX_BUFFER_Pool* ppool) + * @brief Deallocate a buffer pool + * + * @param ppool : IN The buffer pool to free + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(VIDEOEDITOR_BUFFER_Pool* ppool); + +/** + ************************************************************************ + M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, + * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) + * @brief Returns a buffer in a given state + * + * @param ppool : IN The buffer pool + * @param desiredState : IN The buffer state + * @param pNXPBuffer : IN The selected buffer + * @return Error code + ************************************************************************ +*/ +M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, + VIDEOEDITOR_BUFFER_State desiredState, + VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer); + +/** + ************************************************************************ + void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool, + * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) + * @brief Make sure there are buffers for decoder + * + * @param ppool : IN The buffer pool + * @param desiredState : IN The buffer state + * @return Error code + ************************************************************************ +*/ +void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool); + +M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers(VIDEOEDITOR_BUFFER_Pool* ppool, + M4OSA_UInt32 lSize); + +M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers_Ext(VIDEOEDITOR_BUFFER_Pool* ppool, + M4OSA_UInt32 lSize); + + +M4OSA_ERR VIDEOEDITOR_BUFFER_getOldestBuffer(VIDEOEDITOR_BUFFER_Pool *pool, + VIDEOEDITOR_BUFFER_State desiredState, + VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer); + +#ifdef __cplusplus +} +#endif //__cplusplus +#endif /*VIDEOEDITOR_BUFFER_H*/ + diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp index 9e7bafd..7f5edcd 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp @@ -25,6 +25,7 @@ * HEADERS * *******************/ +#include "VideoEditorBuffer.h" #include "VideoEditorVideoDecoder_internal.h" #include "VideoEditorUtils.h" #include "M4VD_Tools.h" @@ -35,7 +36,7 @@ /******************** * DEFINITIONS * ********************/ -#define MAX_DEC_BUFFERS 10 +#define MAX_DEC_BUFFERS 4 /******************** * SOURCE CLASS * @@ -853,13 +854,13 @@ M4OSA_ERR VideoEditorVideoDecoder_configureFromMetadata(M4OSA_Context pContext, // Configure the buffer pool if( M4OSA_NULL != pDecShellContext->m_pDecBufferPool ) { ALOGV("VideoDecoder_configureFromMetadata : reset the buffer pool"); - VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool); + VIDEOEDITOR_BUFFER_freePool_Ext(pDecShellContext->m_pDecBufferPool); pDecShellContext->m_pDecBufferPool = M4OSA_NULL; } err = VIDEOEDITOR_BUFFER_allocatePool(&pDecShellContext->m_pDecBufferPool, MAX_DEC_BUFFERS, (M4OSA_Char*)"VIDEOEDITOR_DecodedBufferPool"); VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - err = VIDEOEDITOR_BUFFER_initPoolBuffers(pDecShellContext->m_pDecBufferPool, + err = VIDEOEDITOR_BUFFER_initPoolBuffers_Ext(pDecShellContext->m_pDecBufferPool, frameSize + pDecShellContext->mGivenWidth * 2); VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); @@ -870,7 +871,7 @@ cleanUp: } else { if( (M4OSA_NULL != pDecShellContext) && \ (M4OSA_NULL != pDecShellContext->m_pDecBufferPool) ) { - VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool); + VIDEOEDITOR_BUFFER_freePool_Ext(pDecShellContext->m_pDecBufferPool); pDecShellContext->m_pDecBufferPool = M4OSA_NULL; } ALOGV("VideoEditorVideoDecoder_configureFromMetadata ERROR 0x%X", err); @@ -891,20 +892,22 @@ M4OSA_ERR VideoEditorVideoDecoder_destroy(M4OSA_Context pContext) { // Release the color converter delete pDecShellContext->mI420ColorConverter; + // Release memory + if( pDecShellContext->m_pDecBufferPool != M4OSA_NULL ) { + VIDEOEDITOR_BUFFER_freePool_Ext(pDecShellContext->m_pDecBufferPool); + pDecShellContext->m_pDecBufferPool = M4OSA_NULL; + } + // Destroy the graph if( pDecShellContext->mVideoDecoder != NULL ) { ALOGV("### VideoEditorVideoDecoder_destroy : releasing decoder"); pDecShellContext->mVideoDecoder->stop(); pDecShellContext->mVideoDecoder.clear(); } + pDecShellContext->mClient.disconnect(); pDecShellContext->mReaderSource.clear(); - // Release memory - if( pDecShellContext->m_pDecBufferPool != M4OSA_NULL ) { - VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool); - pDecShellContext->m_pDecBufferPool = M4OSA_NULL; - } SAFE_FREE(pDecShellContext); pContext = NULL; @@ -1339,6 +1342,7 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, MediaBuffer* pNextBuffer = NULL; status_t errStatus; bool needSeek = bJump; + bool needSave = M4OSA_TRUE; ALOGV("VideoEditorVideoDecoder_decode begin"); @@ -1368,7 +1372,11 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, while (pDecoderBuffer == NULL || pDecShellContext->m_lastDecodedCTS + tolerance < *pTime) { ALOGV("VideoEditorVideoDecoder_decode, frameCTS = %lf, DecodeUpTo = %lf", pDecShellContext->m_lastDecodedCTS, *pTime); - + if (M4OSA_TRUE == needSave) { + VIDEOEDITOR_BUFFER_getBufferForDecoder(pDecShellContext->m_pDecBufferPool); + } else { + needSave = M4OSA_TRUE; + } // Read the buffer from the stagefright decoder if (needSeek) { MediaSource::ReadOptions options; @@ -1388,6 +1396,7 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, // If we decoded a buffer before EOS, we still need to put it // into the queue. if (pDecoderBuffer && bJump) { + pDecoderBuffer->add_ref(); copyBufferToQueue(pDecShellContext, pDecoderBuffer); } goto VIDEOEDITOR_VideoDecode_cleanUP; @@ -1413,14 +1422,11 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, // and drop the 0-length buffers. if (pNextBuffer->range_length() == 0) { pNextBuffer->release(); + pNextBuffer = NULL; + needSave = M4OSA_FALSE; continue; } - // Now we have a good next buffer, release the previous one. - if (pDecoderBuffer != NULL) { - pDecoderBuffer->release(); - pDecoderBuffer = NULL; - } pDecoderBuffer = pNextBuffer; // Record the timestamp of last decoded buffer @@ -1452,6 +1458,12 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, if (lerr != M4NO_ERROR) { goto VIDEOEDITOR_VideoDecode_cleanUP; } + } else { + if (pDecoderBuffer != NULL) { + pDecoderBuffer->release(); + pDecoderBuffer = NULL; + } + needSave = M4OSA_FALSE; } } @@ -1463,10 +1475,6 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, VIDEOEDITOR_VideoDecode_cleanUP: *pTime = pDecShellContext->m_lastDecodedCTS; - if (pDecoderBuffer != NULL) { - pDecoderBuffer->release(); - pDecoderBuffer = NULL; - } ALOGV("VideoEditorVideoDecoder_decode: end with 0x%x", lerr); return lerr; @@ -1486,7 +1494,11 @@ static M4OSA_ERR copyBufferToQueue( lerr = VIDEOEDITOR_BUFFER_getOldestBuffer( pDecShellContext->m_pDecBufferPool, VIDEOEDITOR_BUFFER_kFilled, &tmpDecBuffer); + tmpDecBuffer->mBuffer->release(); tmpDecBuffer->state = VIDEOEDITOR_BUFFER_kEmpty; + tmpDecBuffer->mBuffer = NULL; + tmpDecBuffer->size = 0; + tmpDecBuffer->buffCTS = -1; lerr = M4NO_ERROR; } @@ -1494,15 +1506,7 @@ static M4OSA_ERR copyBufferToQueue( // Color convert or copy from the given MediaBuffer to our buffer if (pDecShellContext->mI420ColorConverter) { - if (pDecShellContext->mI420ColorConverter->convertDecoderOutputToI420( - (uint8_t *)pDecoderBuffer->data(),// ?? + pDecoderBuffer->range_offset(), // decoderBits - pDecShellContext->mGivenWidth, // decoderWidth - pDecShellContext->mGivenHeight, // decoderHeight - pDecShellContext->mCropRect, // decoderRect - tmpDecBuffer->pData /* dstBits */) < 0) { - ALOGE("convertDecoderOutputToI420 failed"); - lerr = M4ERR_NOT_IMPLEMENTED; - } + tmpDecBuffer->mBuffer = pDecoderBuffer; } else if (pDecShellContext->decOuputColorFormat == OMX_COLOR_FormatYUV420Planar) { int32_t width = pDecShellContext->m_pVideoStreamhandler->m_videoWidth; int32_t height = pDecShellContext->m_pVideoStreamhandler->m_videoHeight; @@ -1515,20 +1519,20 @@ static M4OSA_ERR copyBufferToQueue( { M4OSA_MemAddr8 pTmpBuff = (M4OSA_MemAddr8)pDecoderBuffer->data() + pDecoderBuffer->range_offset(); - memcpy((void *)tmpDecBuffer->pData, (void *)pTmpBuff, yPlaneSize); + memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset()), (void *)pTmpBuff, yPlaneSize); offsetSrc += pDecShellContext->mGivenWidth * pDecShellContext->mGivenHeight; - memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->pData + yPlaneSize), + memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset() + yPlaneSize), (void *)(pTmpBuff + offsetSrc), uvPlaneSize); offsetSrc += (pDecShellContext->mGivenWidth >> 1) * (pDecShellContext->mGivenHeight >> 1); - memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->pData + yPlaneSize + uvPlaneSize), + memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset() + yPlaneSize + uvPlaneSize), (void *)(pTmpBuff + offsetSrc), uvPlaneSize); } else { M4OSA_MemAddr8 pTmpBuff = (M4OSA_MemAddr8)pDecoderBuffer->data() + pDecoderBuffer->range_offset(); - M4OSA_MemAddr8 pTmpBuffDst = (M4OSA_MemAddr8)tmpDecBuffer->pData; + M4OSA_MemAddr8 pTmpBuffDst = (M4OSA_MemAddr8)(tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset()); int32_t index; for ( index = 0; index < height; index++) @@ -1562,7 +1566,7 @@ static M4OSA_ERR copyBufferToQueue( tmpDecBuffer->buffCTS = pDecShellContext->m_lastDecodedCTS; tmpDecBuffer->state = VIDEOEDITOR_BUFFER_kFilled; - tmpDecBuffer->size = pDecoderBuffer->size(); + tmpDecBuffer->size = pDecoderBuffer->range_length(); return lerr; } @@ -1573,7 +1577,7 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, M4OSA_ERR err = M4NO_ERROR; VideoEditorVideoDecoder_Context* pDecShellContext = (VideoEditorVideoDecoder_Context*) context; - M4OSA_UInt32 lindex, i; + M4OSA_UInt32 i; M4OSA_UInt8* p_buf_src, *p_buf_dest; M4VIFI_ImagePlane tmpPlaneIn, tmpPlaneOut; VIDEOEDITOR_BUFFER_Buffer* pTmpVIDEOEDITORBuffer, *pRenderVIDEOEDITORBuffer @@ -1603,12 +1607,6 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, pTmpVIDEOEDITORBuffer = &pDecShellContext->m_pDecBufferPool\ ->pNXPBuffer[i]; if (pTmpVIDEOEDITORBuffer->state == VIDEOEDITOR_BUFFER_kFilled) { - /** Free all those buffers older than last rendered frame. */ - if (pTmpVIDEOEDITORBuffer->buffCTS < pDecShellContext->\ - m_lastRenderCts) { - pTmpVIDEOEDITORBuffer->state = VIDEOEDITOR_BUFFER_kEmpty; - } - /** Get the buffer with appropriate timestamp */ if ( (pTmpVIDEOEDITORBuffer->buffCTS >= pDecShellContext->\ m_lastRenderCts) && @@ -1635,7 +1633,7 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, if( M4OSA_NULL != pDecShellContext->m_pFilter ) { // Filtering was requested - M4VIFI_ImagePlane tmpPlane[3]; + M4VIFI_ImagePlane tmpPlane[2]; // Prepare the output image for conversion tmpPlane[0].u_width = pDecShellContext->m_pVideoStreamhandler->m_videoWidth; @@ -1643,9 +1641,9 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, pDecShellContext->m_pVideoStreamhandler->m_videoHeight; tmpPlane[0].u_topleft = 0; tmpPlane[0].u_stride = tmpPlane[0].u_width; - tmpPlane[0].pac_data = (M4VIFI_UInt8*)pRenderVIDEOEDITORBuffer->pData; + tmpPlane[0].pac_data = (M4VIFI_UInt8*)(pRenderVIDEOEDITORBuffer->mBuffer->data() + pRenderVIDEOEDITORBuffer->mBuffer->range_offset()); tmpPlane[1].u_width = tmpPlane[0].u_width; - tmpPlane[1].u_height = tmpPlane[0].u_height/2; + tmpPlane[1].u_height = tmpPlane[0].u_height>>1; tmpPlane[1].u_topleft = 0; tmpPlane[1].u_stride = tmpPlane[0].u_stride; tmpPlane[1].pac_data = tmpPlane[0].pac_data + @@ -1656,7 +1654,7 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, } else { // Just copy the YUV420P buffer M4OSA_MemAddr8 tempBuffPtr = - (M4OSA_MemAddr8)pRenderVIDEOEDITORBuffer->pData; + (M4OSA_MemAddr8)(pRenderVIDEOEDITORBuffer->mBuffer->data() + pRenderVIDEOEDITORBuffer->mBuffer->range_offset()); M4OSA_UInt32 tempWidth = pDecShellContext->m_pVideoStreamhandler->m_videoWidth; M4OSA_UInt32 tempHeight = @@ -1666,7 +1664,7 @@ M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, tempWidth * tempHeight); tempBuffPtr += (tempWidth * tempHeight); memcpy((void *) pOutputPlane[1].pac_data, (void *)tempBuffPtr, - tempWidth * tempHeight/2); + tempWidth * (tempHeight>>1)); } pDecShellContext->mNbRenderedFrames++; -- cgit v1.2.3 From 9dfc976cddb2d401f4446a324f2f8f4dbdd5eb17 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 4 Sep 2012 11:35:21 +0800 Subject: change to use width/height in metadatabuffer to map surface instead of encoder width/height BZ: 55305 It is to avoid 1088 src picture wrong mapping when encoder width/height is configured as 1080 Change-Id: I6c3769157d1aa598151500effc1efdf9b2bd19d0 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/64349 Reviewed-by: Zheng, Hongjiang A Reviewed-by: Wang, Elaine Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: Jiang, Fei Reviewed-by: buildbot Tested-by: buildbot --- test/mix_encoder.cpp | 38 ++++++++++++++++++++++++++++---------- videoencoder/VideoEncoderBase.cpp | 20 ++++++++++---------- 2 files changed, 38 insertions(+), 20 deletions(-) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index d4b88c0..5ec64b5 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -45,6 +45,8 @@ static uint32_t gWidth = 1280; static uint32_t gHeight = 720; static uint32_t gStride = 1280; static uint32_t gFrameRate = 30; +static uint32_t gEncodeWidth = 0; +static uint32_t gEncodeHeight = 0; static char* gFile = (char*)"out.264"; @@ -54,6 +56,7 @@ static const char* gRCModeString[4] ={"NO_RC", "CBR", "VBR", "VCM"}; //for uploading src pictures, also for Camera malloc, WiDi clone, raw mode usrptr storage static uint8_t* gUsrptr[gSrcFrames]; +static uint8_t* gMallocPtr[gSrcFrames]; //for metadatabuffer transfer static IntelMetadataBuffer* gIMB[gSrcFrames] = {NULL}; @@ -177,8 +180,8 @@ Encode_Status SetVideoEncoderParam() { ret = gVideoEncoder->getParameters(&gEncoderParams); CHECK_ENCODE_STATUS("getParameters"); - gEncoderParams.resolution.height = gHeight; - gEncoderParams.resolution.width = gWidth; + gEncoderParams.resolution.height = gEncodeHeight; + gEncoderParams.resolution.width = gEncodeWidth; gEncoderParams.frameRate.frameRateDenom = 1; gEncoderParams.frameRate.frameRateNum = gFrameRate; gEncoderParams.rcMode = gRC; @@ -294,7 +297,8 @@ void MallocExternalMemory() for(int i = 0; i < gSrcFrames; i ++) { - gUsrptr[i] = (uint8_t*)malloc(size); + gMallocPtr[i] = (uint8_t*)malloc(size + 4095); + gUsrptr[i] = (uint8_t*)((((int )gMallocPtr[i] + 4095) / 4096 ) * 4096); gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); @@ -556,9 +560,15 @@ int CheckArgs(int argc, char* argv[]) case 's': gSyncEncMode = atoi(optarg); break; + case 'k': + gEncodeWidth = atoi(optarg); + break; + case 'g': + gEncodeHeight = atoi(optarg); + break; case '?': default: - printf("\n./mix_encode -c -b -r -w -h -n -m -s -f \n"); + printf("\n./mix_encode -c -b -r -w -h -k -g -n -m -s -f \n"); printf("\nCodec:\n"); printf("0: H264 (default)\n1: MPEG4\n2: H263\n"); printf("\nRate control:\n"); @@ -569,6 +579,17 @@ int CheckArgs(int argc, char* argv[]) } } + if (gMode == 5 || gMode == 6) + { + gWidth = ((gWidth + 15 ) / 16 ) * 16; + gHeight = ((gHeight + 15 ) / 16 ) * 16; + } + + if (gEncodeWidth == 0 || gEncodeHeight == 0) + { + gEncodeWidth = gWidth; + gEncodeHeight = gHeight; + } return 0; } @@ -619,9 +640,8 @@ int main(int argc, char* argv[]) } printf("\nStart %s Encoding ....\n", codec); - printf("Mode is %s, RC mode is %s, Width=%d, Height=%d, Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, out file is %s\n\n", gModeString[gMode], gRCModeString[gRCMode], gWidth, gHeight, gBitrate, gEncFrames, gSyncEncMode, gFile); - -//sleep(10); + printf("Mode is %s, RC mode is %s, Src Width=%d, Height=%d, Encode Width=%d, Height=%d \n", gModeString[gMode], gRCModeString[gRCMode], gWidth, gHeight, gEncodeWidth, gEncodeHeight); + printf("Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, out file is %s\n\n", gBitrate, gEncFrames, gSyncEncMode, gFile); for(int i=0; i<1; i++) { @@ -659,8 +679,6 @@ for(int i=0; i<1; i++) break; } -//sleep(10); - //upload src data for(int i=0; ivinfo.width * map->vinfo.height * 3 / 2; vaSurfaceAttrib.luma_stride = lumaStride; vaSurfaceAttrib.chroma_u_stride = chromaUStride; vaSurfaceAttrib.chroma_v_stride = chromaVStride; @@ -1528,7 +1528,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer; vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + mVADisplay, map->vinfo.width, map->vinfo.height, VA_RT_FORMAT_YUV420, 1, &surface, &vaSurfaceAttrib); CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); @@ -1571,8 +1571,8 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, - mComParams.resolution.width, - mComParams.resolution.height, + map->vinfo.width, + map->vinfo.height, VA_RT_FORMAT_YUV420, 1, &surface, @@ -1607,7 +1607,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { vaSurfaceAttrib.buffers = &buf; vaSurfaceAttrib.count = 1; - vaSurfaceAttrib.size = map->vinfo.lumaStride * mComParams.resolution.height * 3 / 2; + vaSurfaceAttrib.size = map->vinfo.lumaStride * map->vinfo.height * 3 / 2; vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; vaSurfaceAttrib.chroma_u_stride = map->vinfo.chromStride; vaSurfaceAttrib.chroma_v_stride = map->vinfo.chromStride; @@ -1619,7 +1619,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer; vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + mVADisplay, map->vinfo.width, map->vinfo.height, VA_RT_FORMAT_YUV420, 1, &surface, &vaSurfaceAttrib); CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); @@ -1650,8 +1650,8 @@ Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) { vaSurfaceAttrib.buffers[0] = (uint32_t)map->value; vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, - mComParams.resolution.width, - mComParams.resolution.height, + map->vinfo.width, + map->vinfo.height, VA_RT_FORMAT_YUV420, 1, &surface, @@ -1686,7 +1686,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { vaSurfaceAttrib.type = VAExternalMemoryUserPointer; vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420, + mVADisplay, map->vinfo.width, map->vinfo.height, VA_RT_FORMAT_YUV420, 1, &surface, &vaSurfaceAttrib); CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromMalloc"); @@ -1705,7 +1705,7 @@ Encode_Status VideoEncoderBase::surfaceMapping(SurfaceMap *map) { Encode_Status status; -LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, heith=%d, value=%x\n", map->vinfo.mode, map->vinfo.format, map->vinfo.lumaStride, map->vinfo.width, map->vinfo.height, map->value); +LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, value=%x\n", map->vinfo.mode, map->vinfo.format, map->vinfo.lumaStride, map->vinfo.width, map->vinfo.height, map->value); switch (map->vinfo.mode) { case MEM_MODE_CI: status = surfaceMappingForCI(map); -- cgit v1.2.3 From d4cfd51dc964fe9f48933f1b5a15dbcd9e5c7647 Mon Sep 17 00:00:00 2001 From: Manjunath Date: Mon, 10 Sep 2012 12:56:26 +0530 Subject: [JB] Video with WMA9-Pro audio can't be played. BZ: 56385 This change considers files with WMA_Pro audio content to have mime type as "audio/unknown-type" rather than "MIME_TYPE_AUDIO_WMA" since wma_pro audio content decoding isn't supported by the current WMA Decoder. Change-Id: Ic57f5c4d2733df69f3135db20e186d15fd5ce329 Signed-off-by: Manjunath Reviewed-on: http://android.intel.com:8080/65406 Reviewed-by: Kandasamy, Muthukumar Reviewed-by: Sameullah, MazharX Reviewed-by: Sikkandar D, Madar Tested-by: Gupta, ArvindX K Reviewed-by: buildbot Tested-by: buildbot --- frameworks/asf_extractor/AsfExtractor.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp index 1bdf0a0..dc8643f 100644 --- a/frameworks/asf_extractor/AsfExtractor.cpp +++ b/frameworks/asf_extractor/AsfExtractor.cpp @@ -374,8 +374,6 @@ static const char* CodecID2MIME(uint32_t codecID) { case WAVE_FORMAT_MSAUDIO1: // WMA version 2 (7, 8, 9 series) case WAVE_FORMAT_WMAUDIO2: - // WMA 9/10 profressional (WMA version 3) - case WAVE_FORMAT_WMAUDIO3X: return MEDIA_MIMETYPE_AUDIO_WMA; // WMA 9 lossless case WAVE_FORMAT_WMAUDIO_LOSSLESS: -- cgit v1.2.3 From a9d8bc36b0307dc44f275281e312f8c8a70c89eb Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 19 Sep 2012 11:47:38 +0800 Subject: remove obsolete functions from IntelMetadataBuffer library BZ: 58010 Remove GetBytes and SetBytes since all stakeholders change to use new functions Seriaize and UnSerialize. Change-Id: I3103dc36c2bf8ff8e9de1e7992d46a842f0b6851 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/66983 Reviewed-by: Feng, Wei Reviewed-by: Jiang, Fei Reviewed-by: Tang, Richard Reviewed-by: Tao, Tao Q Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/IntelMetadataBuffer.cpp | 10 ---------- videoencoder/IntelMetadataBuffer.h | 5 ----- 2 files changed, 15 deletions(-) diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index eb9fe43..4a2b42f 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -263,11 +263,6 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size) -{ - return UnSerialize(data, size); -} - IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) { if (mBytes == NULL) @@ -307,11 +302,6 @@ IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size) -{ - return Serialize(data, size); -} - uint32_t IntelMetadataBuffer::GetMaxBufferSize() { return 256; diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index 2eacc5f..e133c57 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -84,11 +84,6 @@ public: IMB_Result GetExtraValues(int32_t* &values, uint32_t &num); IMB_Result SetExtraValues(int32_t *values, uint32_t num); - //for bytes input, also for parser, will be obsoleted - IMB_Result SetBytes(uint8_t* data, uint32_t size); - //for bytes output, also for generator, will be obsoleted - IMB_Result GetBytes(uint8_t* &data, uint32_t& size); - //New API for bytes input/ouput, UnSerialize=SetBytes, Serialize=GetBytes IMB_Result UnSerialize(uint8_t* data, uint32_t size); IMB_Result Serialize(uint8_t* &data, uint32_t& size); -- cgit v1.2.3 From 799e2cd4a66db912c5605fd5b160b94e897ebe7a Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Tue, 18 Sep 2012 14:51:03 +0800 Subject: [Movie Studio] add error handle mechanism for MP4_MPEG4 video clip BZ: 56425 MP4_MPEG4 video clip is not supported by movie studio(AOSP defined by default), but the android orginal design can't handle this well, so add proper error handle mechanism when user import such a clip. Change-Id: I056caf6dcec537eb8f71b81a60963924df95aed1 Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/66855 Reviewed-by: Feng, Wei Reviewed-by: Tang, Richard Reviewed-by: Jiang, Fei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- .../videoedit/stagefrightshells/VideoEditor3gpReader.cpp | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp index 5026073..6cf71e1 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp @@ -509,6 +509,20 @@ M4OSA_ERR VideoEditor3gpReader_open(M4OSA_Context pContext, return M4ERR_UNSUPPORTED_MEDIA_TYPE; } + M4OSA_UInt8 temp, trackCount; + const char *mime; + temp = 0; + trackCount = pC->mExtractor->countTracks(); + while (temp < trackCount) { + meta = pC->mExtractor->getTrackMetaData(temp); + CHECK(meta->findCString(kKeyMIMEType, &mime)); + if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_MPEG)) { + ALOGV("VideoEditorMp3Reader_open error - audio/mpeg is not supported"); + return M4ERR_READER_UNKNOWN_STREAM_TYPE; + } + temp++; + } + ALOGV("VideoEditor3gpReader_open end "); return err; } -- cgit v1.2.3 From bc7c05694deaf42aff10dcfb4a0587342e651055 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Mon, 17 Sep 2012 11:58:33 +0800 Subject: Setup libva correctly in gralloc buffer mode. BZ: 34659 Gralloc buffer in NV12 format has stride aligned to 512B, update va parameters accordingly. Change-Id: Ia88f54af430acb60ebb2bdee6789cbe535619f95 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/66723 Reviewed-by: Yuan, Shengquan Reviewed-by: Tang, Richard Reviewed-by: Wang, Elaine Reviewed-by: Feng, Wei Reviewed-by: Jiang, Fei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index aaa230e..d511ec1 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1562,7 +1562,8 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { LOG_I("gfxhandle = %d\n", map->value); vaSurfaceAttrib.count = 1; - vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; + // OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar + vaSurfaceAttrib.luma_stride = (mComParams.resolution.width + 0x1ff) & (~0x1ff); vaSurfaceAttrib.pixel_format = map->vinfo.format; vaSurfaceAttrib.width = mComParams.resolution.width; vaSurfaceAttrib.height = mComParams.resolution.height; -- cgit v1.2.3 From 1d6cb74e026b584ee7a48711d08be53148da5aa7 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Wed, 19 Sep 2012 22:19:31 +0800 Subject: Exit sharedbuffer mode when failing to create HW encoder BZ: 58022 For the MFLD and CTP, there is a hardware limitation that we can only encode one video stream at one time. When the encoder HW is occupied by other apps, such as camera, video editor can't create the HW encoder successfully. In this situation, we must exit the sharedbuffer mode. Change-Id: I4916621c2bc1fc55d40c1fcac75e443127d7d0af Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/66989 Reviewed-by: Tang, Richard Reviewed-by: Zhao, Leo Reviewed-by: Liang, Dan Reviewed-by: Jiang, Fei Reviewed-by: Wang, Yi A Reviewed-by: Gu, Wangyi Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- .../IntelVideoEditorAVCEncoder.cpp | 24 ++++++++++++++++++++ .../IntelVideoEditorH263Encoder.cpp | 26 ++++++++++++++++++++++ 2 files changed, 50 insertions(+) diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp index 9681bba..d7e0529 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp @@ -229,6 +229,30 @@ status_t IntelVideoEditorAVCEncoder::start(MetaData *params) { err = mVAEncoder->start(); if (err!= ENCODE_SUCCESS) { LOGE("Failed to initialize the encoder: %d", err); + + /* We should exit the sharedbuffer mode, when failing to + create the HW video encoder. + */ + + androidCreateThread(SBShutdownFunc,this); + LOGI("Successfull create thread to exit shared buffer mode!"); + + mSource->stop(); + + sp r = BufferShareRegistry::getInstance(); + err = r->encoderRequestToDisableSharingMode(); + LOGV("encoderRequestToDisableSharingMode returned %d\n", err); + + /* libsharedbuffer wants the source to call this after the encoder calls + * encoderRequestToDisableSharingMode. Instead of doing complicated + * synchronization, let's just call this ourselves on the source's + * behalf. */ + err = r->sourceRequestToDisableSharingMode(); + LOGV("sourceRequestToDisableSharingMode returned %d\n", err); + + releaseVideoEncoder(mVAEncoder); + mVAEncoder = NULL; + return UNKNOWN_ERROR; } diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp index 02f91f3..7f576b2 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp @@ -173,6 +173,32 @@ status_t IntelVideoEditorH263Encoder::start(MetaData *params) { err = mVAEncoder->start(); if (err!= ENCODE_SUCCESS) { LOGE("Failed to initialize the encoder: %d", err); + + /* We should exit the sharedbuffer mode, when failing to + create the HW video encoder. + */ + + androidCreateThread(SBShutdownFunc,this); + LOGI("Successfull create thread to exit shared buffer mode!"); + + mSource->stop(); + + sp r = BufferShareRegistry::getInstance(); + err = r->encoderRequestToDisableSharingMode(); + LOGV("encoderRequestToDisableSharingMode returned %d\n", err); + + /* libsharedbuffer wants the source to call this after the encoder calls + * encoderRequestToDisableSharingMode. Instead of doing complicated + * synchronization, let's just call this ourselves on the source's + * behalf. + */ + + err = r->sourceRequestToDisableSharingMode(); + LOGV("sourceRequestToDisableSharingMode returned %d\n", err); + + releaseVideoEncoder(mVAEncoder); + mVAEncoder = NULL; + return UNKNOWN_ERROR; } -- cgit v1.2.3 From ecc50ef31841747dcb001a1434ee0177f8ef421e Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 21 Sep 2012 18:53:27 +0800 Subject: Remove unused codes and config files in video middleware stack BZ: 58358 Remove unused codes and config files in video middleware stack. Change-Id: I9a3d9f4400a0cd6835d659c2cfdd35409094210f Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/67297 Reviewed-by: Xiao, FengX Reviewed-by: Liu, BolunX Reviewed-by: Zhao, Leo Reviewed-by: Wang, Yi A Reviewed-by: Guo, Nana N Reviewed-by: Tang, Richard Reviewed-by: Qiu, Junhai Reviewed-by: Chen, Tianmi Reviewed-by: Gu, Wangyi Reviewed-by: Sun, Mingruo Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- Android.mk | 5 +- mix_common/AUTHORS | 1 - mix_common/COPYING | 26 - mix_common/ChangeLog | 32 - mix_common/INSTALL | 291 -- mix_common/NEWS | 1 - mix_common/README | 1 - mix_common/m4/as-mix-version.m4 | 35 - mix_common/mixcommon.pc.in | 11 - mix_common/src/Android.mk | 37 - mix_common/src/j_hashtable.cpp | 321 --- mix_common/src/j_hashtable.h | 52 - mix_common/src/j_queue.cpp | 131 - mix_common/src/j_queue.h | 33 - mix_common/src/j_slist.cpp | 214 -- mix_common/src/j_slist.h | 41 - mix_common/src/mixdrmparams.cpp | 45 - mix_common/src/mixdrmparams.h | 69 - mix_common/src/mixlog.cpp | 270 -- mix_common/src/mixlog.h | 71 - mix_common/src/mixparams.cpp | 126 - mix_common/src/mixparams.h | 161 -- mix_common/src/mixresult.h | 94 - mix_common/src/mixtypes.h | 49 - mix_video/AUTHORS | 1 - mix_video/COPYING | 26 - mix_video/ChangeLog | 105 - mix_video/INSTALL | 4 - mix_video/NEWS | 2 - mix_video/README | 2 - mix_video/docs/readme | 17 - .../reference/MixVideo/BackupMixVideo-docs.sgml | 53 - .../docs/reference/MixVideo/MixVideo-sections.txt | 452 --- mix_video/docs/reference/MixVideo/MixVideo.types | 22 - .../docs/reference/MixVideo/html/MixBuffer.html | 227 -- .../docs/reference/MixVideo/html/MixDisplay.html | 591 ---- .../reference/MixVideo/html/MixDisplayX11.html | 271 -- .../docs/reference/MixVideo/html/MixDrmParams.html | 137 - .../MixVideo/html/MixVideo-mixvideodef.html | 221 -- .../docs/reference/MixVideo/html/MixVideo.devhelp | 244 -- .../docs/reference/MixVideo/html/MixVideo.devhelp2 | 244 -- .../docs/reference/MixVideo/html/MixVideo.html | 958 ------- .../MixVideo/html/MixVideoConfigParams.html | 162 -- .../MixVideo/html/MixVideoConfigParamsDec.html | 797 ------ .../MixVideo/html/MixVideoConfigParamsDecH264.html | 130 - .../MixVideo/html/MixVideoConfigParamsDecMP42.html | 240 -- .../MixVideo/html/MixVideoConfigParamsDecVC1.html | 137 - .../MixVideo/html/MixVideoConfigParamsEnc.html | 1245 -------- .../MixVideo/html/MixVideoConfigParamsEncH264.html | 398 --- .../html/MixVideoConfigParamsEncMPEG4.html | 319 --- .../MixVideo/html/MixVideoDecodeParams.html | 281 -- .../MixVideo/html/MixVideoEncodeParams.html | 155 - .../reference/MixVideo/html/MixVideoFrame.html | 423 --- .../MixVideo/html/MixVideoInitParams.html | 214 -- .../MixVideo/html/MixVideoRenderParams.html | 418 --- .../reference/MixVideo/html/api-index-full.html | 466 --- mix_video/docs/reference/MixVideo/html/ch01.html | 92 - mix_video/docs/reference/MixVideo/html/index.html | 96 - mix_video/docs/reference/MixVideo/html/index.sgml | 307 -- .../docs/reference/MixVideo/html/object-tree.html | 55 - mix_video/docs/reference/MixVideo/html/style.css | 167 -- mix_video/m4/as-mix-version.m4 | 35 - mix_video/mixvideo.pc.in | 12 - mix_video/mixvideoint.pc.in | 12 - mix_video/src/Android.mk | 119 - mix_video/src/mixbuffer.cpp | 144 - mix_video/src/mixbuffer.h | 108 - mix_video/src/mixbuffer_private.h | 34 - mix_video/src/mixbufferpool.cpp | 377 --- mix_video/src/mixbufferpool.h | 104 - mix_video/src/mixdisplay.cpp | 130 - mix_video/src/mixdisplay.h | 113 - mix_video/src/mixdisplayandroid.cpp | 158 -- mix_video/src/mixdisplayandroid.h | 133 - mix_video/src/mixdisplayx11.cpp | 210 -- mix_video/src/mixdisplayx11.h | 180 -- mix_video/src/mixframemanager.cpp | 686 ----- mix_video/src/mixframemanager.h | 153 - mix_video/src/mixsurfacepool.cpp | 564 ---- mix_video/src/mixsurfacepool.h | 99 - mix_video/src/mixvideo.cpp | 2246 --------------- mix_video/src/mixvideo.h | 574 ---- mix_video/src/mixvideo_private.h | 53 - mix_video/src/mixvideocaps.cpp | 164 -- mix_video/src/mixvideocaps.h | 95 - mix_video/src/mixvideoconfigparams.cpp | 86 - mix_video/src/mixvideoconfigparams.h | 76 - mix_video/src/mixvideoconfigparamsdec.cpp | 576 ---- mix_video/src/mixvideoconfigparamsdec.h | 517 ---- mix_video/src/mixvideoconfigparamsdec_h264.cpp | 105 - mix_video/src/mixvideoconfigparamsdec_h264.h | 121 - mix_video/src/mixvideoconfigparamsdec_mp42.cpp | 112 - mix_video/src/mixvideoconfigparamsdec_mp42.h | 148 - mix_video/src/mixvideoconfigparamsdec_vc1.cpp | 71 - mix_video/src/mixvideoconfigparamsdec_vc1.h | 95 - mix_video/src/mixvideoconfigparamsenc.cpp | 883 ------ mix_video/src/mixvideoconfigparamsenc.h | 765 ----- mix_video/src/mixvideoconfigparamsenc_h263.cpp | 133 - mix_video/src/mixvideoconfigparamsenc_h263.h | 146 - mix_video/src/mixvideoconfigparamsenc_h264.cpp | 256 -- mix_video/src/mixvideoconfigparamsenc_h264.h | 300 -- mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp | 141 - mix_video/src/mixvideoconfigparamsenc_mpeg4.h | 173 -- mix_video/src/mixvideoconfigparamsenc_preview.cpp | 47 - mix_video/src/mixvideoconfigparamsenc_preview.h | 73 - mix_video/src/mixvideodecodeparams.cpp | 141 - mix_video/src/mixvideodecodeparams.h | 195 -- mix_video/src/mixvideodef.h | 259 -- mix_video/src/mixvideoencodeparams.cpp | 138 - mix_video/src/mixvideoencodeparams.h | 103 - mix_video/src/mixvideoformat.cpp | 285 -- mix_video/src/mixvideoformat.h | 177 -- mix_video/src/mixvideoformat_h264.cpp | 1598 ----------- mix_video/src/mixvideoformat_h264.h | 111 - mix_video/src/mixvideoformat_mp42.cpp | 1090 ------- mix_video/src/mixvideoformat_mp42.h | 95 - mix_video/src/mixvideoformat_vc1.cpp | 1133 -------- mix_video/src/mixvideoformat_vc1.h | 95 - mix_video/src/mixvideoformatenc.cpp | 790 ------ mix_video/src/mixvideoformatenc.h | 218 -- mix_video/src/mixvideoformatenc_h263.cpp | 2175 -------------- mix_video/src/mixvideoformatenc_h263.h | 124 - mix_video/src/mixvideoformatenc_h264.cpp | 3002 -------------------- mix_video/src/mixvideoformatenc_h264.h | 142 - mix_video/src/mixvideoformatenc_mpeg4.cpp | 1664 ----------- mix_video/src/mixvideoformatenc_mpeg4.h | 119 - mix_video/src/mixvideoformatenc_preview.cpp | 1120 -------- mix_video/src/mixvideoformatenc_preview.h | 110 - mix_video/src/mixvideoformatqueue.h | 23 - mix_video/src/mixvideoframe.cpp | 364 --- mix_video/src/mixvideoframe.h | 257 -- mix_video/src/mixvideoframe_private.h | 83 - mix_video/src/mixvideoinitparams.cpp | 126 - mix_video/src/mixvideoinitparams.h | 113 - mix_video/src/mixvideolog.h | 27 - mix_video/src/mixvideorenderparams.cpp | 301 -- mix_video/src/mixvideorenderparams.h | 226 -- mix_video/src/mixvideorenderparams_internal.h | 22 - mix_video/src/mixvideothread.cpp | 50 - mix_video/src/mixvideothread.h | 45 - mix_video/src/test.cpp | 87 - mix_video/test/src/test_framemanager.cpp | 200 -- 142 files changed, 1 insertion(+), 39930 deletions(-) delete mode 100644 mix_common/AUTHORS delete mode 100644 mix_common/COPYING delete mode 100644 mix_common/ChangeLog delete mode 100644 mix_common/INSTALL delete mode 100644 mix_common/NEWS delete mode 100644 mix_common/README delete mode 100644 mix_common/m4/as-mix-version.m4 delete mode 100644 mix_common/mixcommon.pc.in delete mode 100644 mix_common/src/Android.mk delete mode 100644 mix_common/src/j_hashtable.cpp delete mode 100644 mix_common/src/j_hashtable.h delete mode 100644 mix_common/src/j_queue.cpp delete mode 100644 mix_common/src/j_queue.h delete mode 100644 mix_common/src/j_slist.cpp delete mode 100644 mix_common/src/j_slist.h delete mode 100644 mix_common/src/mixdrmparams.cpp delete mode 100644 mix_common/src/mixdrmparams.h delete mode 100644 mix_common/src/mixlog.cpp delete mode 100644 mix_common/src/mixlog.h delete mode 100644 mix_common/src/mixparams.cpp delete mode 100644 mix_common/src/mixparams.h delete mode 100644 mix_common/src/mixresult.h delete mode 100644 mix_common/src/mixtypes.h delete mode 100644 mix_video/AUTHORS delete mode 100644 mix_video/COPYING delete mode 100644 mix_video/ChangeLog delete mode 100644 mix_video/INSTALL delete mode 100644 mix_video/NEWS delete mode 100644 mix_video/README delete mode 100644 mix_video/docs/readme delete mode 100644 mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml delete mode 100644 mix_video/docs/reference/MixVideo/MixVideo-sections.txt delete mode 100644 mix_video/docs/reference/MixVideo/MixVideo.types delete mode 100644 mix_video/docs/reference/MixVideo/html/MixBuffer.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixDisplay.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixDisplayX11.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixDrmParams.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo.devhelp delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideo.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoFrame.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html delete mode 100644 mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html delete mode 100644 mix_video/docs/reference/MixVideo/html/api-index-full.html delete mode 100644 mix_video/docs/reference/MixVideo/html/ch01.html delete mode 100644 mix_video/docs/reference/MixVideo/html/index.html delete mode 100644 mix_video/docs/reference/MixVideo/html/index.sgml delete mode 100644 mix_video/docs/reference/MixVideo/html/object-tree.html delete mode 100644 mix_video/docs/reference/MixVideo/html/style.css delete mode 100644 mix_video/m4/as-mix-version.m4 delete mode 100644 mix_video/mixvideo.pc.in delete mode 100644 mix_video/mixvideoint.pc.in delete mode 100644 mix_video/src/Android.mk delete mode 100644 mix_video/src/mixbuffer.cpp delete mode 100644 mix_video/src/mixbuffer.h delete mode 100644 mix_video/src/mixbuffer_private.h delete mode 100644 mix_video/src/mixbufferpool.cpp delete mode 100644 mix_video/src/mixbufferpool.h delete mode 100644 mix_video/src/mixdisplay.cpp delete mode 100644 mix_video/src/mixdisplay.h delete mode 100644 mix_video/src/mixdisplayandroid.cpp delete mode 100644 mix_video/src/mixdisplayandroid.h delete mode 100644 mix_video/src/mixdisplayx11.cpp delete mode 100644 mix_video/src/mixdisplayx11.h delete mode 100644 mix_video/src/mixframemanager.cpp delete mode 100644 mix_video/src/mixframemanager.h delete mode 100644 mix_video/src/mixsurfacepool.cpp delete mode 100644 mix_video/src/mixsurfacepool.h delete mode 100644 mix_video/src/mixvideo.cpp delete mode 100644 mix_video/src/mixvideo.h delete mode 100644 mix_video/src/mixvideo_private.h delete mode 100644 mix_video/src/mixvideocaps.cpp delete mode 100644 mix_video/src/mixvideocaps.h delete mode 100644 mix_video/src/mixvideoconfigparams.cpp delete mode 100644 mix_video/src/mixvideoconfigparams.h delete mode 100644 mix_video/src/mixvideoconfigparamsdec.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec.h delete mode 100644 mix_video/src/mixvideoconfigparamsdec_h264.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec_h264.h delete mode 100644 mix_video/src/mixvideoconfigparamsdec_mp42.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec_mp42.h delete mode 100644 mix_video/src/mixvideoconfigparamsdec_vc1.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsdec_vc1.h delete mode 100644 mix_video/src/mixvideoconfigparamsenc.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc.h delete mode 100644 mix_video/src/mixvideoconfigparamsenc_h263.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_h263.h delete mode 100644 mix_video/src/mixvideoconfigparamsenc_h264.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_h264.h delete mode 100644 mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_mpeg4.h delete mode 100644 mix_video/src/mixvideoconfigparamsenc_preview.cpp delete mode 100644 mix_video/src/mixvideoconfigparamsenc_preview.h delete mode 100644 mix_video/src/mixvideodecodeparams.cpp delete mode 100644 mix_video/src/mixvideodecodeparams.h delete mode 100644 mix_video/src/mixvideodef.h delete mode 100644 mix_video/src/mixvideoencodeparams.cpp delete mode 100644 mix_video/src/mixvideoencodeparams.h delete mode 100644 mix_video/src/mixvideoformat.cpp delete mode 100644 mix_video/src/mixvideoformat.h delete mode 100755 mix_video/src/mixvideoformat_h264.cpp delete mode 100755 mix_video/src/mixvideoformat_h264.h delete mode 100644 mix_video/src/mixvideoformat_mp42.cpp delete mode 100644 mix_video/src/mixvideoformat_mp42.h delete mode 100644 mix_video/src/mixvideoformat_vc1.cpp delete mode 100644 mix_video/src/mixvideoformat_vc1.h delete mode 100644 mix_video/src/mixvideoformatenc.cpp delete mode 100644 mix_video/src/mixvideoformatenc.h delete mode 100644 mix_video/src/mixvideoformatenc_h263.cpp delete mode 100644 mix_video/src/mixvideoformatenc_h263.h delete mode 100644 mix_video/src/mixvideoformatenc_h264.cpp delete mode 100644 mix_video/src/mixvideoformatenc_h264.h delete mode 100644 mix_video/src/mixvideoformatenc_mpeg4.cpp delete mode 100644 mix_video/src/mixvideoformatenc_mpeg4.h delete mode 100644 mix_video/src/mixvideoformatenc_preview.cpp delete mode 100644 mix_video/src/mixvideoformatenc_preview.h delete mode 100644 mix_video/src/mixvideoformatqueue.h delete mode 100644 mix_video/src/mixvideoframe.cpp delete mode 100644 mix_video/src/mixvideoframe.h delete mode 100644 mix_video/src/mixvideoframe_private.h delete mode 100644 mix_video/src/mixvideoinitparams.cpp delete mode 100644 mix_video/src/mixvideoinitparams.h delete mode 100644 mix_video/src/mixvideolog.h delete mode 100644 mix_video/src/mixvideorenderparams.cpp delete mode 100644 mix_video/src/mixvideorenderparams.h delete mode 100644 mix_video/src/mixvideorenderparams_internal.h delete mode 100644 mix_video/src/mixvideothread.cpp delete mode 100644 mix_video/src/mixvideothread.h delete mode 100644 mix_video/src/test.cpp delete mode 100644 mix_video/test/src/test_framemanager.cpp diff --git a/Android.mk b/Android.mk index 825caaa..20651e9 100644 --- a/Android.mk +++ b/Android.mk @@ -4,10 +4,6 @@ ifeq ($(INTEL_VA),true) include $(CLEAR_VARS) VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) - -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_common/src/Android.mk -#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_audio/src/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_video/src/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk @@ -16,4 +12,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/libI420colorconvert/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk + endif diff --git a/mix_common/AUTHORS b/mix_common/AUTHORS deleted file mode 100644 index 2175750..0000000 --- a/mix_common/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Khanh Nguyen diff --git a/mix_common/COPYING b/mix_common/COPYING deleted file mode 100644 index a4f852c..0000000 --- a/mix_common/COPYING +++ /dev/null @@ -1,26 +0,0 @@ -INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License) - -IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING. -Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software. - - -LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions: -1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software. -2. You may not reverse engineer, decompile, or disassemble the Software. -3. You may not sublicense the Software. -4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions. -5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL). -OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights. -EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software. -LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS. -TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate. -APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations. -GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052. -CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos. -ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion. -ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel. -NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties. -SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions. -WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself. -CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions. - diff --git a/mix_common/ChangeLog b/mix_common/ChangeLog deleted file mode 100644 index d5dceab..0000000 --- a/mix_common/ChangeLog +++ /dev/null @@ -1,32 +0,0 @@ -2010-01-25 Echo Choi - - * Updated version to 0.1.8 - -2010-01-11 Echo Choi - - * Updated version to 0.1.6 - * Added NEED_RETRY and ERRNO error code to support retry and errno. - -2009-11-18 Echo Choi - - * Fixed inclusion of m4 directory. - * Added return code. - -2009-11-13 Echo Choi - - * Updated version to 0.1.5 - * Added additional error codes. - -2009-10-16 Echo Choi - - * Included mixdrmparams.* - -2009-10-14 Echo Choi - - * Updated version to 0.1.4. - -2009-10-08 Echo Choi - - * Updated version to 0.1.3 and packaged for build. - -Initial version diff --git a/mix_common/INSTALL b/mix_common/INSTALL deleted file mode 100644 index 8b82ade..0000000 --- a/mix_common/INSTALL +++ /dev/null @@ -1,291 +0,0 @@ -Installation Instructions -************************* - -Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005, -2006, 2007, 2008 Free Software Foundation, Inc. - - This file is free documentation; the Free Software Foundation gives -unlimited permission to copy, distribute and modify it. - -Basic Installation -================== - - Briefly, the shell commands `./configure; make; make install' should -configure, build, and install this package. The following -more-detailed instructions are generic; see the `README' file for -instructions specific to this package. - - The `configure' shell script attempts to guess correct values for -various system-dependent variables used during compilation. It uses -those values to create a `Makefile' in each directory of the package. -It may also create one or more `.h' files containing system-dependent -definitions. Finally, it creates a shell script `config.status' that -you can run in the future to recreate the current configuration, and a -file `config.log' containing compiler output (useful mainly for -debugging `configure'). - - It can also use an optional file (typically called `config.cache' -and enabled with `--cache-file=config.cache' or simply `-C') that saves -the results of its tests to speed up reconfiguring. Caching is -disabled by default to prevent problems with accidental use of stale -cache files. - - If you need to do unusual things to compile the package, please try -to figure out how `configure' could check whether to do them, and mail -diffs or instructions to the address given in the `README' so they can -be considered for the next release. If you are using the cache, and at -some point `config.cache' contains results you don't want to keep, you -may remove or edit it. - - The file `configure.ac' (or `configure.in') is used to create -`configure' by a program called `autoconf'. You need `configure.ac' if -you want to change it or regenerate `configure' using a newer version -of `autoconf'. - -The simplest way to compile this package is: - - 1. `cd' to the directory containing the package's source code and type - `./configure' to configure the package for your system. - - Running `configure' might take a while. While running, it prints - some messages telling which features it is checking for. - - 2. Type `make' to compile the package. - - 3. Optionally, type `make check' to run any self-tests that come with - the package. - - 4. Type `make install' to install the programs and any data files and - documentation. - - 5. You can remove the program binaries and object files from the - source code directory by typing `make clean'. To also remove the - files that `configure' created (so you can compile the package for - a different kind of computer), type `make distclean'. There is - also a `make maintainer-clean' target, but that is intended mainly - for the package's developers. If you use it, you may have to get - all sorts of other programs in order to regenerate files that came - with the distribution. - - 6. Often, you can also type `make uninstall' to remove the installed - files again. - -Compilers and Options -===================== - - Some systems require unusual options for compilation or linking that -the `configure' script does not know about. Run `./configure --help' -for details on some of the pertinent environment variables. - - You can give `configure' initial values for configuration parameters -by setting variables in the command line or in the environment. Here -is an example: - - ./configure CC=c99 CFLAGS=-g LIBS=-lposix - - *Note Defining Variables::, for more details. - -Compiling For Multiple Architectures -==================================== - - You can compile the package for more than one kind of computer at the -same time, by placing the object files for each architecture in their -own directory. To do this, you can use GNU `make'. `cd' to the -directory where you want the object files and executables to go and run -the `configure' script. `configure' automatically checks for the -source code in the directory that `configure' is in and in `..'. - - With a non-GNU `make', it is safer to compile the package for one -architecture at a time in the source code directory. After you have -installed the package for one architecture, use `make distclean' before -reconfiguring for another architecture. - - On MacOS X 10.5 and later systems, you can create libraries and -executables that work on multiple system types--known as "fat" or -"universal" binaries--by specifying multiple `-arch' options to the -compiler but only a single `-arch' option to the preprocessor. Like -this: - - ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ - CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ - CPP="gcc -E" CXXCPP="g++ -E" - - This is not guaranteed to produce working output in all cases, you -may have to build one architecture at a time and combine the results -using the `lipo' tool if you have problems. - -Installation Names -================== - - By default, `make install' installs the package's commands under -`/usr/local/bin', include files under `/usr/local/include', etc. You -can specify an installation prefix other than `/usr/local' by giving -`configure' the option `--prefix=PREFIX'. - - You can specify separate installation prefixes for -architecture-specific files and architecture-independent files. If you -pass the option `--exec-prefix=PREFIX' to `configure', the package uses -PREFIX as the prefix for installing programs and libraries. -Documentation and other data files still use the regular prefix. - - In addition, if you use an unusual directory layout you can give -options like `--bindir=DIR' to specify different values for particular -kinds of files. Run `configure --help' for a list of the directories -you can set and what kinds of files go in them. - - If the package supports it, you can cause programs to be installed -with an extra prefix or suffix on their names by giving `configure' the -option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. - -Optional Features -================= - - Some packages pay attention to `--enable-FEATURE' options to -`configure', where FEATURE indicates an optional part of the package. -They may also pay attention to `--with-PACKAGE' options, where PACKAGE -is something like `gnu-as' or `x' (for the X Window System). The -`README' should mention any `--enable-' and `--with-' options that the -package recognizes. - - For packages that use the X Window System, `configure' can usually -find the X include and library files automatically, but if it doesn't, -you can use the `configure' options `--x-includes=DIR' and -`--x-libraries=DIR' to specify their locations. - -Particular systems -================== - - On HP-UX, the default C compiler is not ANSI C compatible. If GNU -CC is not installed, it is recommended to use the following options in -order to use an ANSI C compiler: - - ./configure CC="cc -Ae" - -and if that doesn't work, install pre-built binaries of GCC for HP-UX. - - On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot -parse its `' header file. The option `-nodtk' can be used as -a workaround. If GNU CC is not installed, it is therefore recommended -to try - - ./configure CC="cc" - -and if that doesn't work, try - - ./configure CC="cc -nodtk" - -Specifying the System Type -========================== - - There may be some features `configure' cannot figure out -automatically, but needs to determine by the type of machine the package -will run on. Usually, assuming the package is built to be run on the -_same_ architectures, `configure' can figure that out, but if it prints -a message saying it cannot guess the machine type, give it the -`--build=TYPE' option. TYPE can either be a short name for the system -type, such as `sun4', or a canonical name which has the form: - - CPU-COMPANY-SYSTEM - -where SYSTEM can have one of these forms: - - OS KERNEL-OS - - See the file `config.sub' for the possible values of each field. If -`config.sub' isn't included in this package, then this package doesn't -need to know the machine type. - - If you are _building_ compiler tools for cross-compiling, you should -use the option `--target=TYPE' to select the type of system they will -produce code for. - - If you want to _use_ a cross compiler, that generates code for a -platform different from the build platform, you should specify the -"host" platform (i.e., that on which the generated programs will -eventually be run) with `--host=TYPE'. - -Sharing Defaults -================ - - If you want to set default values for `configure' scripts to share, -you can create a site shell script called `config.site' that gives -default values for variables like `CC', `cache_file', and `prefix'. -`configure' looks for `PREFIX/share/config.site' if it exists, then -`PREFIX/etc/config.site' if it exists. Or, you can set the -`CONFIG_SITE' environment variable to the location of the site script. -A warning: not all `configure' scripts look for a site script. - -Defining Variables -================== - - Variables not defined in a site shell script can be set in the -environment passed to `configure'. However, some packages may run -configure again during the build, and the customized values of these -variables may be lost. In order to avoid this problem, you should set -them in the `configure' command line, using `VAR=value'. For example: - - ./configure CC=/usr/local2/bin/gcc - -causes the specified `gcc' to be used as the C compiler (unless it is -overridden in the site shell script). - -Unfortunately, this technique does not work for `CONFIG_SHELL' due to -an Autoconf bug. Until the bug is fixed you can use this workaround: - - CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash - -`configure' Invocation -====================== - - `configure' recognizes the following options to control how it -operates. - -`--help' -`-h' - Print a summary of all of the options to `configure', and exit. - -`--help=short' -`--help=recursive' - Print a summary of the options unique to this package's - `configure', and exit. The `short' variant lists options used - only in the top level, while the `recursive' variant lists options - also present in any nested packages. - -`--version' -`-V' - Print the version of Autoconf used to generate the `configure' - script, and exit. - -`--cache-file=FILE' - Enable the cache: use and save the results of the tests in FILE, - traditionally `config.cache'. FILE defaults to `/dev/null' to - disable caching. - -`--config-cache' -`-C' - Alias for `--cache-file=config.cache'. - -`--quiet' -`--silent' -`-q' - Do not print messages saying which checks are being made. To - suppress all normal output, redirect it to `/dev/null' (any error - messages will still be shown). - -`--srcdir=DIR' - Look for the package's source code in directory DIR. Usually - `configure' can determine that directory automatically. - -`--prefix=DIR' - Use DIR as the installation prefix. *Note Installation Names:: - for more details, including other options available for fine-tuning - the installation locations. - -`--no-create' -`-n' - Run the configure checks, but stop before creating any output - files. - -`configure' also accepts some other, not widely useful, options. Run -`configure --help' for more details. - diff --git a/mix_common/NEWS b/mix_common/NEWS deleted file mode 100644 index 4327969..0000000 --- a/mix_common/NEWS +++ /dev/null @@ -1 +0,0 @@ -news diff --git a/mix_common/README b/mix_common/README deleted file mode 100644 index 8178c76..0000000 --- a/mix_common/README +++ /dev/null @@ -1 +0,0 @@ -readme diff --git a/mix_common/m4/as-mix-version.m4 b/mix_common/m4/as-mix-version.m4 deleted file mode 100644 index 8b09d7c..0000000 --- a/mix_common/m4/as-mix-version.m4 +++ /dev/null @@ -1,35 +0,0 @@ -dnl as-mix-version.m4 - -dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) - -dnl example -dnl AS_MIX_VERSION(mixaudio,MIXAUDIO, 0, 3, 2,) -dnl for a 0.3.2 release version - -dnl this macro -dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE -dnl - defines [$PREFIX], VERSION -dnl - AC_SUBST's all defined vars - -AC_DEFUN([AS_MIX_VERSION], -[ - PACKAGE=[$1] - [$2]_MAJOR=[$3] - [$2]_MINOR=[$4] - [$2]_REVISION=[$5] - [$2]_CURRENT=m4_eval([$3] + [$4]) - [$2]_AGE=[$4] - VERSION=[$3].[$4].[$5] - - AC_SUBST([$2]_MAJOR) - AC_SUBST([$2]_MINOR) - AC_SUBST([$2]_REVISION) - AC_SUBST([$2]_CURRENT) - AC_SUBST([$2]_AGE) - - AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name]) - AC_SUBST(PACKAGE) - AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version]) - AC_SUBST(VERSION) - -]) diff --git a/mix_common/mixcommon.pc.in b/mix_common/mixcommon.pc.in deleted file mode 100644 index 05ef285..0000000 --- a/mix_common/mixcommon.pc.in +++ /dev/null @@ -1,11 +0,0 @@ -prefix=@prefix@ -exec_prefix=@exec_prefix@ -libdir=@libdir@ -includedir=@includedir@ - -Name: MI-X Library - Common -Description: Common library for MI-X -Requires: -Version: @VERSION@ -Libs: -L${libdir} -l@PACKAGE@ -Cflags: -I${includedir} diff --git a/mix_common/src/Android.mk b/mix_common/src/Android.mk deleted file mode 100644 index 3dffe11..0000000 --- a/mix_common/src/Android.mk +++ /dev/null @@ -1,37 +0,0 @@ -LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) - -LOCAL_SRC_FILES := \ - mixlog.cpp \ - mixparams.cpp \ - mixdrmparams.cpp \ - j_slist.cpp \ - j_queue.cpp \ - j_hashtable.cpp - - -LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) - -LOCAL_SHARED_LIBRARIES := \ - libcutils - -LOCAL_CFLAGS := -DANDROID - - -LOCAL_COPY_HEADERS_TO := libmixcommon - -LOCAL_COPY_HEADERS := \ - mixtypes.h \ - j_slist.h \ - j_queue.h \ - j_hashtable.h \ - mixlog.h \ - mixresult.h \ - mixparams.h \ - mixdrmparams.h - -LOCAL_MODULE_TAGS := optional -LOCAL_MODULE := libmixcommon - -include $(BUILD_SHARED_LIBRARY) diff --git a/mix_common/src/j_hashtable.cpp b/mix_common/src/j_hashtable.cpp deleted file mode 100644 index e086c54..0000000 --- a/mix_common/src/j_hashtable.cpp +++ /dev/null @@ -1,321 +0,0 @@ -#include -#include - -#include - - -/* - * Notice: this is not thread-safe but re-entrable API. - */ -JHashTable* j_hash_table_new_full(JHashFunc hash_func, - JEqualFunc key_equal_func, JDestroyNotify key_destroy_func, - JDestroyNotify value_destroy_func) -{ - JHashTable *pTable = (JHashTable*)malloc(sizeof (JHashTable)); - assert(pTable != NULL); - pTable->hash_func = hash_func; - pTable->key_equal_func = key_equal_func; - pTable->key_destroy_func = key_destroy_func; - pTable->value_destroy_func = value_destroy_func; - - pTable->table_size = INIT_TABLE_SIZE; - pTable->hash_table = (JHashItem**) malloc(sizeof (JHashItem*) * pTable->table_size); - - memset(pTable->hash_table, 0, sizeof(JHashItem*) * pTable->table_size); - - assert(pTable->hash_table != NULL); - pTable->ref_count = 1; - return pTable; -} - -void j_hash_table_unref(JHashTable* pTable) { - assert(pTable != NULL); - assert(pTable->hash_table != NULL); - - pTable->ref_count --; - if (pTable->ref_count == 0) { - j_hash_table_remove_all(pTable); - free(pTable->hash_table); - free(pTable); - } -} - -void j_hash_table_remove_all(JHashTable *pTable) { - int i; - - JHashItem *pItem = NULL; - JHashItem *next = NULL; - - assert(pTable != NULL); - - for (i = 0; i < pTable->table_size; i ++) { - pItem = pTable->hash_table[i]; - while (pItem != NULL) { - next = pItem->next; - if (pTable->key_destroy_func != NULL) pTable->key_destroy_func(pItem->key); - if (pTable->value_destroy_func != NULL) pTable->value_destroy_func(pItem->data); - free(pItem); - pItem = next; - } - pTable->hash_table[i] = NULL; - } -} - -void * j_hash_table_lookup(JHashTable *pTable, void * key) -{ - int i; - int hash_key; - int index; - - assert(pTable != NULL); - assert(pTable->hash_table != NULL); - - JHashItem *pItem = NULL; - JHashItem *next = NULL; - - if (pTable->hash_func != NULL) { - hash_key = pTable->hash_func(key); - } else { - hash_key = (int)key; - } - - index = hash_key % pTable->table_size; - - pItem = pTable->hash_table[index]; - - while (pItem != NULL) { - if (key == pItem->key) break; - pItem = pItem->next; - } - - if (pItem == NULL) return NULL; - - return pItem->data; -} - -void j_hash_table_insert(JHashTable *pTable, void * key, void * data) { - JHashItem *pItem = (JHashItem*) malloc (sizeof (JHashItem)); - JHashItem *pExistItem = NULL; - - int hash_key; - unsigned int index; - - assert (pItem != NULL); - - pItem->key = key; - pItem->data = data; - - if (pTable->hash_func != NULL) { - hash_key = pTable->hash_func(key); - } else { - hash_key = (int)key; - } - - index = hash_key % pTable->table_size; - - pExistItem = pTable->hash_table[index]; - - pItem->next = pExistItem; - - pTable->hash_table[index] = pItem; -} - -int j_hash_table_remove(JHashTable *pTable, void *key) -{ - JHashItem *pItem = NULL; - JHashItem *pPrevItem = NULL; - - int hash_key; - int index; - - assert(pTable != NULL); - - if (pTable->hash_func != NULL) { - hash_key = pTable->hash_func(key); - } else { - hash_key = (int)key; - } - - index = hash_key % pTable->table_size; - - pPrevItem = pItem = pTable->hash_table[index]; - - while (pItem != NULL) { - if (pItem->key == key) break; - pPrevItem = pItem; - pItem = pItem->next; - } - - if (pItem == NULL) { - // not found - return 0; - } - - if (pItem == pTable->hash_table[index]) { - pTable->hash_table[index] = pItem->next; - } else { - pPrevItem->next = pItem->next; - } - - if (pTable->key_destroy_func) { - pTable->key_destroy_func(pItem->key); - } - - if (pTable->value_destroy_func) { - pTable->value_destroy_func(pItem->data); - } - - free(pItem); - return 1; -} - -int j_hash_table_lookup_extended(JHashTable *pTable, - void* key, void *orig_key, void *value) -{/* - int i; - int hash_key; - int index; - int j=0; - - assert(pTable != NULL); - assert(pTable->hash_table != NULL); - - JHashItem *pItem = NULL; - JHashItem *next = NULL; - - if (pTable->hash_func != NULL) { - hash_key = pTable->hash_func(key); - } else { - hash_key = key; - } - - index = hash_key % pTable->table_size; - - pItem = pTable->hash_table[index]; - - while (pItem != NULL) { - if (key == pItem->key) break; - pItem = pItem->next; - } - - - if (pItem) - { - if (orig_key) - *orig_key = (void *)pItem->key; - if (value) - *value = (void *)pItem->data; - j = 1; - } - else - j = 0; - */ // Priya: We don't need this implementation for now as we can replace with _lookup instead. - return 0; - -} - -unsigned int j_hash_table_foreach_remove(JHashTable *pTable, JHRFunc func, void *user_data) -{ - JHashItem *pItem = NULL; - JHashItem *pPrevItem = NULL; - - int hash_key; - int i; - unsigned int num_item_removed = 0; - - assert(pTable != NULL); - assert(func != NULL); - - for (i = 0; i < pTable->table_size; i ++ ) { - pPrevItem = pItem = pTable->hash_table[i]; - while (pItem != NULL) { - if (func(pItem->key, pItem->data, user_data)) { - //prev item is same - if (pItem == pTable->hash_table[i]) { - pTable->hash_table[i] = pItem->next; - pPrevItem = NULL; - } else { - pPrevItem->next = pItem->next; - } - - if (pTable->key_destroy_func) { - pTable->key_destroy_func(pItem->key); - } - - if (pTable->value_destroy_func) { - pTable->value_destroy_func(pItem->data); - } - - free(pItem); - num_item_removed ++; - } else { - pPrevItem = pItem; - } - - if (pPrevItem != NULL) { - pItem = pPrevItem->next; - } else { - pItem = pPrevItem = pTable->hash_table[i]; - } - - } - } - - return num_item_removed; -} - - -#ifdef _J_HASH_TABLE_UT_ -#include - -void DestroyKey(void* data) -{ - printf("%d is destroied\n", (int) data); -} - -void DestroyData(void* data) -{ - printf("0x%x(%d) is destroied\n", data, *(int*)data); - free(data); -} - -int testKeynData(void* key, void* data, void* user_data) -{ - return (0 == (((int)*(int*)data) % (unsigned int)user_data)); -} - -int main() { - JHashTable *pTable = j_hash_table_new_full(NULL, - NULL, DestroyKey, DestroyData); - int i; - void *data; - int *p; -#define KEY_TABLE_SIZE (INIT_TABLE_SIZE * 2 - 1) - void* key_table[KEY_TABLE_SIZE]; - for (i = 0; i < KEY_TABLE_SIZE; i ++) { - p = malloc(sizeof(int)); - *p = i; - j_hash_table_insert(pTable, p, p); - key_table[i] = p; - } - - for (i = 0; i < KEY_TABLE_SIZE; i ++) { - data = j_hash_table_lookup(pTable, key_table[i]); - printf("found 0x%x(%d)\n", data, *(int*)data); - } - - int num_elem = 0; - num_elem = j_hash_table_foreach_remove(pTable, testKeynData, 10); - printf("%d elements are removed\n", num_elem); - - int ret; - for (i = 0; i < 10; i ++) { - ret = j_hash_table_remove(pTable, key_table[i]); - printf("key[%d]:0x%x is removed(%d)\n", i, data, ret); - } - - j_hash_table_remove_all(pTable); - j_hash_table_unref(pTable); - return 0; -} -#endif diff --git a/mix_common/src/j_hashtable.h b/mix_common/src/j_hashtable.h deleted file mode 100644 index eb08b1a..0000000 --- a/mix_common/src/j_hashtable.h +++ /dev/null @@ -1,52 +0,0 @@ -#ifndef __J_HASH_TABLE__ -#define __J_HASH_TABLE__ -#ifdef __cplusplus -extern "C" { -#endif - - typedef unsigned int (*JHashFunc)(void *key); - typedef unsigned int (*JEqualFunc) (void *a, void *b); - typedef void (*JDestroyNotify) (void *data); - - typedef int (*JHRFunc) (void *key, void *value, void *user_data); - - typedef struct JHashItem_s { - struct JHashItem_s *next; - void* data; - void* key; - } JHashItem; - -#define INIT_TABLE_SIZE 256 - typedef struct JHashTable_s { - int ref_count; - int table_size; - - JHashFunc hash_func; - JEqualFunc key_equal_func; - JDestroyNotify key_destroy_func; - JDestroyNotify value_destroy_func; - JHashItem **hash_table; - } JHashTable; - - JHashTable* j_hash_table_new_full(JHashFunc hash_func, - JEqualFunc key_equal_func, JDestroyNotify key_destroy_func, - JDestroyNotify value_destroy_func); - - void j_hash_table_unref(JHashTable* pTable); - void j_hash_table_remove_all(JHashTable *pTable); - - void * j_hash_table_lookup(JHashTable *pTable, void * key); - - void j_hash_table_insert(JHashTable *pTable, void * key, void * data); - int j_hash_table_remove(JHashTable *pTable, void *key); - - int j_hash_table_lookup_extended(JHashTable *pTable, - void* lookup_key, void* orig_key, void* value); - - unsigned int j_hash_table_foreach_remove(JHashTable *pTable, - JHRFunc func, void *user_data); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/mix_common/src/j_queue.cpp b/mix_common/src/j_queue.cpp deleted file mode 100644 index 301815a..0000000 --- a/mix_common/src/j_queue.cpp +++ /dev/null @@ -1,131 +0,0 @@ -#include -#include -#include - -int j_queue_is_empty(JQueue* queue) -{ - assert (queue); - assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) - == queue->element_count); - - return (queue->element_count == 0); -} - -void* j_queue_pop_head(JQueue* queue) -{ - void *ret; - assert (queue); - assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) - == queue->element_count); - - if (queue->element_count == 0) return NULL; - - ret = queue->rooms[queue->head]; - - queue->head = (queue->head + 1) % queue->room_size; - queue->element_count --; - - if (queue->element_count == 0) { - queue->head = queue->tail = 0; - } - return ret; -} - -void *j_queue_peek_head(JQueue* queue) -{ - void *ret; - assert (queue); - assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) - == queue->element_count); - - if (queue->element_count == 0) return NULL; - - ret = queue->rooms[queue->head]; - return ret; -} - -void j_queue_free(JQueue* queue) -{ - assert (queue); - assert (queue->rooms); - assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) - == queue->element_count); - free(queue->rooms); - queue->rooms = NULL; - free(queue); -} - -JQueue* j_queue_new() -{ - JQueue *queue = (JQueue*) malloc(sizeof(JQueue)); - assert (queue != NULL); - queue->room_size = INIT_ROOM_SIZE; - - queue->rooms = (void**) malloc(sizeof(void*) * queue->room_size); - assert (queue->rooms); - queue->head = queue->tail = 0; - queue->element_count = 0; - return queue; -} - -void j_queue_push_tail(JQueue *queue, void *data) -{ - assert((((queue->tail + queue->room_size) - queue->head) % queue->room_size) - == queue->element_count); - - if (queue->element_count == (queue->room_size -1)) { - queue->rooms = (void**) realloc(queue->rooms, sizeof(void*) * queue->room_size * 2); - if (queue->head > queue->tail) { - memcpy(&queue->rooms[0], &queue->rooms[queue->element_count], queue->tail); - queue->tail = queue->head + queue->element_count; - } - - queue->room_size = queue->room_size * 2; - assert(queue->rooms); - } - - queue->rooms[queue->tail] = data; - - queue->element_count ++; - queue->tail = (queue->tail + 1 + queue->room_size) % queue->room_size; -} - -#ifdef _J_QUEUE_UT_ -#include - -int main() { - JQueue *pQueue = j_queue_new(); - int i; - void *data; - int *p; -#define ELEM_TABLE_SIZE (INIT_ROOM_SIZE * 2 - 1) - - for (i = 0; i < ELEM_TABLE_SIZE; i ++) { - j_queue_push_tail(pQueue, i); - } - - printf("queue is empty(%d)\n", j_queue_is_empty(pQueue)); - - for (i = 0; i < ELEM_TABLE_SIZE; i ++) { - data = j_queue_pop_head(pQueue); - printf("elements(%d) poped %d\n", i, (int)data); - } - - printf("queue is empty(%d)\n", j_queue_is_empty(pQueue)); - - int j; - for (j = 0; j < ELEM_TABLE_SIZE; j ++) { - for (i = 0; i < 5; i ++) { - j_queue_push_tail(pQueue, i); - } - - for (i = 0; i < 4; i ++) { - data = j_queue_pop_head(pQueue); - printf("elements(%d) poped %d\n", i, (int)data); - } - } - - j_queue_free(pQueue); - return 0; -} -#endif diff --git a/mix_common/src/j_queue.h b/mix_common/src/j_queue.h deleted file mode 100644 index cebf21e..0000000 --- a/mix_common/src/j_queue.h +++ /dev/null @@ -1,33 +0,0 @@ -#ifndef __J_QUEUE__ -#define __J_QUEUE__ -#ifdef __cplusplus -extern "C" { -#endif - -#define INIT_ROOM_SIZE 64 - typedef struct JQueue_s { - unsigned int room_size; - void **rooms; - - //point to position for fetch - unsigned int head; - - //point to position for fill - unsigned int tail; - - //to double check the "element number" - unsigned int element_count; - } JQueue; - - int j_queue_is_empty(JQueue* queue); - void* j_queue_pop_head(JQueue* queue); - void *j_queue_peek_head(JQueue* queue); - void j_queue_free(JQueue* queue); - JQueue* j_queue_new(); - void j_queue_push_tail(JQueue *queue, void *data); - -#ifdef __cplusplus -} -#endif -#endif - diff --git a/mix_common/src/j_slist.cpp b/mix_common/src/j_slist.cpp deleted file mode 100644 index a794211..0000000 --- a/mix_common/src/j_slist.cpp +++ /dev/null @@ -1,214 +0,0 @@ -#include -#include -#include - -JSList* j_slist_append (JSList* list, void* data) -{ - JSList *item = (JSList*) malloc(sizeof(JSList)); - item->data = data; - item->next = NULL; - - if (list == NULL) { - return item; - } - - JSList *traverse_item = list; - JSList *tail = NULL; - - while (traverse_item != NULL) { - tail = traverse_item; - traverse_item = traverse_item->next; - } - tail->next = item; - - return list; -} - -JSList* j_slist_find (JSList *list, void* data) -{ - JSList *traverse_item = list; - while (traverse_item != NULL) { - if (traverse_item->data == data) break; - traverse_item = traverse_item->next; - } - - return traverse_item; -} - -JSList* j_slist_remove(JSList *list, void* data) -{ - JSList *traverse_item = list; - JSList *prev_item = NULL; - - if (list->data == data) { - list = list->next; - free(traverse_item); - return list; - } - - while (traverse_item != NULL) { - if (traverse_item->data == data) break; - prev_item = traverse_item; - traverse_item = traverse_item->next; - } - - if (traverse_item != NULL) { - if (prev_item != NULL) { - assert(prev_item != NULL); // as 1st element is processed @ beginning - prev_item->next = traverse_item->next; - traverse_item->next = NULL; - free(traverse_item); - } - - } - - return list; -} - - -JSList* j_slist_remove_link(JSList *list, JSList* link) -{ - JSList *traverse_item = list; - JSList *tmp; - if (list == link) { - tmp = list->next; - link->next = NULL; -// TED return link->next; - return tmp; - } - - while (traverse_item != NULL) { - if (traverse_item->next == link) break; - traverse_item = traverse_item->next; - } - - if (traverse_item != NULL) { - traverse_item->next = link->next; - } - - link->next = NULL; - return list; -} - -JSList *j_slist_delete_link(JSList *list, JSList *link) -{ - list = j_slist_remove_link(list, link); - free(link); - return list; -} - -JSList *j_slist_concat(JSList* list1, JSList *list2) -{ - JSList *traverse_item = list1; - if (list1 == NULL) { - return list2; - } - - while (traverse_item->next != NULL) { - traverse_item = traverse_item->next; - } - - traverse_item->next = list2; - - return list1; -} - -unsigned int j_slist_length (JSList *list) -{ - unsigned int list_length = 0; - JSList *traverse_item = list; - while (traverse_item != NULL) { - list_length ++; - traverse_item = traverse_item->next; - } - return list_length; -} - -void *j_slist_nth_data(JSList *list, unsigned int n) -{ - unsigned int count = n; - JSList *traverse_item = list; - while (traverse_item != NULL) { - if (count == 0) break; - traverse_item = traverse_item->next; - count --; - } - return traverse_item? traverse_item->data : NULL; -} - -JSList* j_slist_find_custom(JSList *list, void* data, JCompareFunc func) -{ - JSList *traverse_item = list; - while (traverse_item != NULL) { - if (func(traverse_item->data, data) == 0) break; - traverse_item = traverse_item->next; - } - - return traverse_item; -} - -void j_slist_foreach(JSList *list, JFunc func, void* userdata) -{ - JSList *traverse_item = list; - while (traverse_item != NULL) { - func(traverse_item->data, userdata); - traverse_item = traverse_item->next; - } -} - -#ifdef _J_SLIST_UT_ -#include - -void testData(void* data, void* user_data) -{ - printf("test (%d)\n", (int) data); -} - -int main() { - JSList *pList = NULL; - JSList *pList2 = NULL; - int i; - -#define KEY_TABLE_SIZE 20 - for (i = 0; i < KEY_TABLE_SIZE; i ++) { - pList = j_slist_append(pList, i); - } - - assert(KEY_TABLE_SIZE == j_slist_length(pList)); - pList2 = NULL; - for (i = 0; i < KEY_TABLE_SIZE; i ++) { - pList2 = j_slist_find(pList, i); - - if (pList2) { - printf("Found data(%d)\n", i); - } - } - - pList2 = NULL; - for (i = 0; i < KEY_TABLE_SIZE; i ++) { - pList2 = j_slist_nth_data(pList, i); - if (pList2) { - printf("Found data(%d) @ %d\n", pList2->data, i); - } - } - - pList2 = NULL; - for (i = KEY_TABLE_SIZE; i > 0; i --) { - pList2 = j_slist_append(pList2, i); - } - - j_slist_foreach(pList, testData, 0); - printf("*************************************************************\n"); - pList = j_slit_concat(pList, pList2); - - j_slist_foreach(pList, testData, 0); - printf("*************************************************************\n"); - for (i = KEY_TABLE_SIZE; i > 0; i --) { - pList = j_slist_remove(pList, i); - } - - j_slist_foreach(pList, testData, 0); - - return 0; -} -#endif diff --git a/mix_common/src/j_slist.h b/mix_common/src/j_slist.h deleted file mode 100644 index d2f866b..0000000 --- a/mix_common/src/j_slist.h +++ /dev/null @@ -1,41 +0,0 @@ -#ifndef __J_SLIST_H__ -#define __J_SLIST_H__ -#ifdef __cplusplus -extern "C" { -#endif - - typedef struct JSList_s { - struct JSList_s *next; - void* data; - } JSList; - - typedef int (*JCompareFunc)(void* data1, void* data2); - typedef void (*JFunc)(void* data, void* userdata); - - JSList* j_slist_append (JSList* list, void* data); - - JSList* j_slist_find (JSList *list, void* data); - - JSList* j_slist_remove(JSList *list, void* data); - - JSList* j_slist_remove_link(JSList *list, JSList* link); - - JSList *j_slist_delete_link(JSList *list, JSList *link); - - JSList *j_slist_concat(JSList* list1, JSList *list2); - - unsigned int j_slist_length (JSList *list); - - void *j_slist_nth_data(JSList *list, unsigned int n); - - JSList* j_slist_find_custom(JSList *list, void* data, JCompareFunc func); - - void j_slist_foreach(JSList *list, JFunc func, void* userdata); - -#ifdef __cplusplus -} -#endif - -#endif - - diff --git a/mix_common/src/mixdrmparams.cpp b/mix_common/src/mixdrmparams.cpp deleted file mode 100644 index ae0ec80..0000000 --- a/mix_common/src/mixdrmparams.cpp +++ /dev/null @@ -1,45 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixdrmparams - * @short_description: Drm parameters - * - * A data object which stores drm specific parameters. - */ - -#include "mixdrmparams.h" - - -MixDrmParams::MixDrmParams() { -} - -MixDrmParams::~MixDrmParams() { -} - -MixDrmParams *mix_drmparams_new(void) { - return new MixDrmParams(); -} - -MixDrmParams *mix_drmparams_ref(MixDrmParams *mix) { - return (MixDrmParams*)mix_params_ref(MIX_PARAMS(mix)); -} - -MixParams * MixDrmParams::dup () const { - MixParams* dup = new MixDrmParams(); - if (NULL != dup) { - if (FALSE == copy(dup)) { - dup->Unref(); - dup = NULL; - } - } - return dup; -} - - - diff --git a/mix_common/src/mixdrmparams.h b/mix_common/src/mixdrmparams.h deleted file mode 100644 index 9bbb2d0..0000000 --- a/mix_common/src/mixdrmparams.h +++ /dev/null @@ -1,69 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_DRMPARAMS_H__ -#define __MIX_DRMPARAMS_H__ - - -#include "mixparams.h" - -/** - * MIX_DRMPARAMS: - * @obj: object to be type-casted. - */ -#define MIX_DRMPARAMS(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_DRMPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_DRMPARAMS(obj) (NULL != MIX_DRMPARAMS(obj)) - -/** - * MixDrmParams: - * - * MI-X Drm Parameter object - */ -class MixDrmParams : public MixParams { -public: - MixDrmParams(); - virtual ~MixDrmParams(); - virtual MixParams * dup () const; -}; - -/** - * mix_drmparams_new: - * @returns: A newly allocated instance of #MixDrmParams - * - * Use this method to create new instance of #MixDrmParams - */ -MixDrmParams *mix_drmparams_new(void); - -/** - * mix_drmparams_ref: - * @mix: object to add reference - * @returns: the MixDrmParams instance where reference count has been increased. - * - * Add reference count. - */ -MixDrmParams *mix_drmparams_ref(MixDrmParams *mix); - -/** - * mix_drmparams_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - - -#endif /* __MIX_DRMPARAMS_H__ */ diff --git a/mix_common/src/mixlog.cpp b/mix_common/src/mixlog.cpp deleted file mode 100644 index cad5c5a..0000000 --- a/mix_common/src/mixlog.cpp +++ /dev/null @@ -1,270 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#include "mixlog.h" - -#ifndef ANDROID -#ifdef MIX_LOG_USE_HT -#include "j_hashtable.h" -#endif -#endif - -#define MIX_DELOG_COMPS "MIX_DELOG_COMPS" -#define MIX_DELOG_FILES "MIX_DELOG_FILES" -#define MIX_DELOG_FUNCS "MIX_DELOG_FUNCS" -#define MIX_LOG_ENABLE "MIX_LOG_ENABLE" -#define MIX_DELOG_DELIMITERS " ,;" - -#define MIX_LOG_LEVEL "MIX_LOG_LEVEL" - -#ifndef ANDROID - -static GStaticMutex g_mutex = G_STATIC_MUTEX_INIT; - -#ifdef MIX_LOG_USE_HT -static JHashTable *g_defile_ht = NULL, *g_defunc_ht = NULL, *g_decom_ht = NULL; -static int g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; -static int g_refcount = 0; - -#define mix_log_destroy_ht(ht) if(ht) { \ - if (ht == NULL || ht->ref_count <= 0) return; \ - j_hash_table_remove_all (ht); \ - j_hash_table_unref (ht); \ - ht = NULL; } - -void mix_log_get_ht(JHashTable **ht, const char *var) { - - const char *delog_list = NULL; - char *item = NULL; - if (!ht || !var) { - return; - } - - delog_list = g_getenv(var); - if (!delog_list) { - return; - } - - if (*ht == NULL) { - *ht = j_hash_table_new_full(g_str_hash, g_str_equal, NULL, NULL); - if (*ht == NULL) { - return; - } - } - - item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); - while (item != NULL) { - j_hash_table_insert(*ht, item, "true"); - item = strtok(NULL, MIX_DELOG_DELIMITERS); - } -} - -void mix_log_initialize_func() { - - const char *mix_log_level = NULL; - g_static_mutex_lock(&g_mutex); - - if (g_refcount == 0) { - - mix_log_level = g_getenv(MIX_LOG_LEVEL); - if (mix_log_level) { - g_mix_log_level = atoi(mix_log_level); - } - - mix_log_get_ht(&g_decom_ht, MIX_DELOG_COMPS); - mix_log_get_ht(&g_defile_ht, MIX_DELOG_FILES); - mix_log_get_ht(&g_defunc_ht, MIX_DELOG_FUNCS); - } - - g_refcount++; - - g_static_mutex_unlock(&g_mutex); -} - -void mix_log_finalize_func() { - - g_static_mutex_lock(&g_mutex); - - g_refcount--; - - if (g_refcount == 0) { - mix_log_destroy_ht(g_decom_ht); - mix_log_destroy_ht(g_defile_ht); - mix_log_destroy_ht(g_defunc_ht); - - g_mix_log_level = MIX_LOG_LEVEL_VERBOSE; - } - - if (g_refcount < 0) { - g_refcount = 0; - } - - g_static_mutex_unlock(&g_mutex); -} - -void mix_log_func(const char* comp, int level, const char *file, - const char *func, int line, const char *format, ...) { - - va_list args; - static char* loglevel[4] = {"**ERROR", "*WARNING", "INFO", "VERBOSE"}; - - if (!format) { - return; - } - - g_static_mutex_lock(&g_mutex); - - if (level > g_mix_log_level) { - goto exit; - } - - if (g_decom_ht) { - if (j_hash_table_lookup(g_decom_ht, comp)) { - goto exit; - } - } - - if (g_defile_ht) { - if (j_hash_table_lookup(g_defile_ht, file)) { - goto exit; - } - } - - if (g_defunc_ht) { - if (j_hash_table_lookup(g_defunc_ht, func)) { - goto exit; - } - } - - if (level > MIX_LOG_LEVEL_VERBOSE) { - level = MIX_LOG_LEVEL_VERBOSE; - } - if (level < MIX_LOG_LEVEL_ERROR) { - level = MIX_LOG_LEVEL_ERROR; - } - - g_print("%s : %s : %s : ", loglevel[level - 1], file, func); - - va_start(args, format); - g_vprintf(format, args); - va_end(args); - -exit: - g_static_mutex_unlock(&g_mutex); -} - -#else /* MIX_LOG_USE_HT */ - -bool mix_shall_delog(const char *name, const char *var) { - - const char *delog_list = NULL; - char *item = NULL; - bool delog = FALSE; - - if (!name || !var) { - return delog; - } - - delog_list = g_getenv(var); - if (!delog_list) { - return delog; - } - - item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS); - while (item != NULL) { - if (strcmp(item, name) == 0) { - delog = TRUE; - break; - } - item = strtok(NULL, MIX_DELOG_DELIMITERS); - } - - return delog; -} - -bool mix_log_enabled() { - - const char *value = NULL; - value = g_getenv(MIX_LOG_ENABLE); - if (!value) { - return FALSE; - } - - if (value[0] == '0') { - return FALSE; - } - return TRUE; -} - -void mix_log_func(const char* comp, int level, const char *file, - const char *func, int line, const char *format, ...) { - - va_list args; - static char* loglevel[4] = { "**ERROR", "*WARNING", "INFO", "VERBOSE" }; - - const char *env_mix_log_level = NULL; - int mix_log_level_threhold = MIX_LOG_LEVEL_VERBOSE; - - if (!mix_log_enabled()) { - return; - } - - if (!format) { - return; - } - - g_static_mutex_lock(&g_mutex); - - /* log level */ - env_mix_log_level = g_getenv(MIX_LOG_LEVEL); - if (env_mix_log_level) { - mix_log_level_threhold = atoi(env_mix_log_level); - } - - if (level > mix_log_level_threhold) { - goto exit; - } - - /* component */ - if (mix_shall_delog(comp, MIX_DELOG_COMPS)) { - goto exit; - } - - /* files */ - if (mix_shall_delog(file, MIX_DELOG_FILES)) { - goto exit; - } - - /* functions */ - if (mix_shall_delog(func, MIX_DELOG_FUNCS)) { - goto exit; - } - - if (level > MIX_LOG_LEVEL_VERBOSE) { - level = MIX_LOG_LEVEL_VERBOSE; - } - if (level < MIX_LOG_LEVEL_ERROR) { - level = MIX_LOG_LEVEL_ERROR; - } - - g_print("%s : %s : %s : ", loglevel[level - 1], file, func); - - va_start(args, format); - g_vprintf(format, args); - va_end(args); - -exit: - g_static_mutex_unlock(&g_mutex); -} - - -#endif /* MIX_LOG_USE_HT */ - -#endif /* !ANDROID */ - diff --git a/mix_common/src/mixlog.h b/mix_common/src/mixlog.h deleted file mode 100644 index bf298dc..0000000 --- a/mix_common/src/mixlog.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include "mixtypes.h" - -#ifndef __MIX_LOG_H__ -#define __MIX_LOG_H__ - -/* Warning: don't call these functions */ -void mix_log_func( - const char* comp, int level, const char *file, - const char *func, int line, const char *format, ...); - -/* Components */ -#define MIX_VIDEO_COMP "mixvideo" -#define GST_MIX_VIDEO_DEC_COMP "gstmixvideodec" -#define GST_MIX_VIDEO_SINK_COMP "gstmixvideosink" -#define GST_MIX_VIDEO_ENC_COMP "gstmixvideoenc" - -#define MIX_AUDIO_COMP "mixaudio" -#define GST_MIX_AUDIO_DEC_COMP "gstmixaudiodec" -#define GST_MIX_AUDIO_SINK_COMP "gstmixaudiosink" - -/* log level */ -#define MIX_LOG_LEVEL_ERROR 1 -#define MIX_LOG_LEVEL_WARNING 2 -#define MIX_LOG_LEVEL_INFO 3 -#define MIX_LOG_LEVEL_VERBOSE 4 - - -/* MACROS for mixlog */ -#ifdef MIX_LOG_ENABLE - -#ifdef ANDROID - -#include - - -#undef MIX_LOG_LEVEL_ERROR -#undef MIX_LOG_LEVEL_WARNING -#undef MIX_LOG_LEVEL_INFO -#undef MIX_LOG_LEVEL_VERBOSE - -#define MIX_LOG_LEVEL_ERROR ANDROID_LOG_ERROR -#define MIX_LOG_LEVEL_WARNING ANDROID_LOG_WARN -#define MIX_LOG_LEVEL_INFO ANDROID_LOG_INFO -#define MIX_LOG_LEVEL_VERBOSE ANDROID_LOG_VERBOSE - -#define mix_log(comp, level, format, ...) \ - __android_log_print(level, comp, "%s():%d: "format, \ - __FUNCTION__, __LINE__, ##__VA_ARGS__) - -#else - -#define mix_log(comp, level, format, ...) \ - mix_log_func(comp, level, __FILE__, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) - -#endif /* ANDROID */ - -#else - -#define mix_log(comp, level, format, ...) - -#endif - -#endif diff --git a/mix_common/src/mixparams.cpp b/mix_common/src/mixparams.cpp deleted file mode 100644 index 3fff640..0000000 --- a/mix_common/src/mixparams.cpp +++ /dev/null @@ -1,126 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixparams - * @short_description: Lightweight base class for the MIX media params - * - */ -#ifdef HAVE_CONFIG_H -#include "config.h" -#endif - -#include "mixparams.h" -#include - - -#define DEBUG_REFCOUNT - -MixParams::MixParams() - :ref_count(1) - ,_reserved(NULL) { -} - -MixParams::~MixParams() { - finalize(); -} - -MixParams* MixParams::Ref() { - this->ref_count++; - return this; -} - -void MixParams::Unref() { - this->ref_count--; - if (0 == this->ref_count) { - delete this; - } -} - -MixParams* MixParams::dup() const { - MixParams *ret = new MixParams(); - if (FALSE != copy(ret)) { - return ret; - } - return NULL; -} - -bool MixParams::copy(MixParams* target) const { - bool ret = FALSE; - if ( NULL != target) { - return TRUE; - } - return ret; -} - -void MixParams::finalize() { -} - -bool MixParams::equal(MixParams *obj) const { - bool ret = FALSE; - if ( NULL != obj) { - return TRUE; - } - return ret; -} - -MixParams* mix_params_new () { - /* we don't support dynamic types because they really aren't useful,*/ - /* and could cause ref_count problems */ - return new MixParams(); -} - -bool mix_params_copy (MixParams *target, const MixParams *src) { - if ( NULL != target && NULL != src) { - return src->copy(target); - } else - return FALSE; -} - -MixParams* mix_params_ref (MixParams *obj) { - if (NULL == obj) - return NULL; - return obj->Ref(); -} - -void mix_params_unref(MixParams *obj) { - if (NULL != obj) - obj->Unref(); -} - -void mix_params_replace (MixParams **olddata, MixParams *newdata) { - if (NULL == olddata) - return; - MixParams *olddata_val = *olddata; - if (olddata_val == newdata) - return; - if (NULL != newdata) - newdata->Ref(); - while (!android_atomic_cmpxchg ( - (int32_t)olddata_val,(int32_t)newdata, (int32_t *) (*olddata))) { - olddata_val = *olddata; - } - if (NULL != olddata_val) - olddata_val->Unref(); -} - -MixParams * mix_params_dup(const MixParams *obj) { - if (NULL != obj) { - return obj->dup(); - } else { - return NULL; - } -} - -bool mix_params_equal (MixParams *first, MixParams *second) { - if (NULL != first && NULL != second) - return first->equal(second); - else - return FALSE; -} - diff --git a/mix_common/src/mixparams.h b/mix_common/src/mixparams.h deleted file mode 100644 index f3395f4..0000000 --- a/mix_common/src/mixparams.h +++ /dev/null @@ -1,161 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_PARAMS_H__ -#define __MIX_PARAMS_H__ - -#include "mixtypes.h" - -#define MIX_PARAMS(obj) (reinterpret_cast ((obj))) -#define MIX_PARAMS_CAST(obj) ((MixParams*)(obj)) - -/** - * MIX_PARAMS_REFCOUNT: - * @obj: a #MixParams - * - * Get access to the reference count field of the object. - */ -#define MIX_PARAMS_REFCOUNT(obj) ((MIX_PARAMS_CAST(obj))->ref_count) - -/** - * MixParams: - * @instance: type instance - * @refcount: atomic refcount - * - * Base class for a refcounted parameter objects. - */ -class MixParams { - -public: - MixParams(); - virtual ~MixParams(); - MixParams* Ref(); - void Unref(); - int GetRefCount() { - return ref_count; - } - -public: - /** - * MixParamsDupFunction: - * @obj: Params to duplicate - * @returns: reference to cloned instance. - * - * Virtual function prototype for methods to create duplicate of instance. - * - */ - virtual MixParams * dup () const; - - /** - * MixParamsCopyFunction: - * @target: target of the copy - * @src: source of the copy - * @returns: intean indicates if copy is successful. - * - * Virtual function prototype for methods to create copies of instance. - * - */ - virtual bool copy(MixParams* target) const; - - /** - * MixParamsFinalizeFunction: - * @obj: Params to finalize - * - * Virtual function prototype for methods to free ressources used by - * object. - */ - virtual void finalize (); - - /** - * MixParamsEqualsFunction: - * @first: first object in the comparison - * @second: second object in the comparison - * - * Virtual function prototype for methods to compare 2 objects and check if they are equal. - */ - virtual bool equal (MixParams *obj) const; - -public: - /*< public >*/ - int ref_count; - - /*< private >*/ - void* _reserved; - -}; - - - -/** - * mix_params_new: - * @returns: return a newly allocated object. - * - * Create new instance of the object. - */ -MixParams* mix_params_new(); - -/** - * mix_params_copy: - * @target: copy to target - * @src: copy from source - * @returns: intean indicating if copy is successful. - * - * Copy data from one instance to the other. This method internally invoked the #MixParams::copy method such that derived object will be copied correctly. - */ -bool mix_params_copy(MixParams *target, const MixParams *src); - - -/** - * mix_params_ref: - * @obj: a #MixParams object. - * @returns: the object with reference count incremented. - * - * Increment reference count. - */ -MixParams* mix_params_ref(MixParams *obj); - - -/** - * mix_params_unref: - * @obj: a #MixParams object. - * - * Decrement reference count. - */ -void mix_params_unref (MixParams *obj); - -/** - * mix_params_replace: - * @olddata: pointer to a pointer to a object to be replaced - * @newdata: pointer to new object - * - * Modifies a pointer to point to a new object. The modification - * is done atomically, and the reference counts are updated correctly. - * Either @newdata and the value pointed to by @olddata may be NULL. - */ -void mix_params_replace(MixParams **olddata, MixParams *newdata); - -/** - * mix_params_dup: - * @obj: #MixParams object to duplicate. - * @returns: A newly allocated duplicate of the object, or NULL if failed. - * - * Duplicate the given #MixParams and allocate a new instance. This method is chained up properly and derive object will be dupped properly. - */ -MixParams *mix_params_dup(const MixParams *obj); - -/** - * mix_params_equal: - * @first: first object to compare - * @second: second object to compare - * @returns: intean indicates if the 2 object contains same data. - * - * Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance. - */ -bool mix_params_equal(MixParams *first, MixParams *second); -#endif - diff --git a/mix_common/src/mixresult.h b/mix_common/src/mixresult.h deleted file mode 100644 index e8325f7..0000000 --- a/mix_common/src/mixresult.h +++ /dev/null @@ -1,94 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2008-2009 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel’s prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ - -#ifndef MIX_RESULT_H -#define MIX_RESULT_H - -#include "mixtypes.h" - -typedef int32 MIX_RESULT; - -#define MIX_SUCCEEDED(result_code) ((((MIX_RESULT)(result_code)) & 0x80000000) == 0) - -typedef enum { - /** General success */ - MIX_RESULT_SUCCESS = (MIX_RESULT) 0x00000000, - MIX_RESULT_SUCCESS_CHG = (MIX_RESULT)0x00000001, - - /** Module specific success starting number */ - - /** Starting success number for Audio */ - MIX_RESULT_SUCCESS_AUDIO_START = (MIX_RESULT) 0x00010000, - /** Starting success number for Video */ - MIX_RESULT_SUCCESS_VIDEO_START = (MIX_RESULT) 0x00020000, - /** Starting success number for DRM */ - MIX_RESULT_SUCCESS_DRM_START = (MIX_RESULT) 0x00030000 -} MIX_SUCCESS_COMMON; - -typedef enum { - /** General failure */ - MIX_RESULT_FAIL = (MIX_RESULT) 0x80000000, - MIX_RESULT_NULL_PTR = (MIX_RESULT) 0x80000001, - MIX_RESULT_LPE_NOTAVAIL = (MIX_RESULT) 0X80000002, - MIX_RESULT_DIRECT_NOTAVAIL = (MIX_RESULT) 0x80000003, - MIX_RESULT_NOT_SUPPORTED = (MIX_RESULT) 0x80000004, - MIX_RESULT_CONF_MISMATCH = (MIX_RESULT) 0x80000005, - MIX_RESULT_RESUME_NEEDED = (MIX_RESULT) 0x80000007, - MIX_RESULT_WRONGMODE = (MIX_RESULT) 0x80000008, - MIX_RESULT_RESOURCES_NOTAVAIL = (MIX_RESULT)0x80000009, - MIX_RESULT_INVALID_PARAM = (MIX_RESULT)0x8000000a, - MIX_RESULT_ALREADY_INIT = (MIX_RESULT)0x8000000b, - MIX_RESULT_WRONG_STATE = (MIX_RESULT)0x8000000c, - MIX_RESULT_NOT_INIT = (MIX_RESULT)0x8000000d, - MIX_RESULT_NOT_CONFIGURED = (MIX_RESULT)0x8000000e, - MIX_RESULT_STREAM_NOTAVAIL = (MIX_RESULT)0x8000000f, - MIX_RESULT_CODEC_NOTAVAIL = (MIX_RESULT)0x80000010, - MIX_RESULT_CODEC_NOTSUPPORTED = (MIX_RESULT)0x80000011, - MIX_RESULT_INVALID_COUNT = (MIX_RESULT)0x80000012, - MIX_RESULT_NOT_ACP = (MIX_RESULT)0x80000013, - MIX_RESULT_INVALID_DECODE_MODE = (MIX_RESULT)0x80000014, - MIX_RESULT_INVALID_STREAM_NAME = (MIX_RESULT)0x80000015, - MIX_RESULT_NO_MEMORY = (MIX_RESULT)0x80000016, - MIX_RESULT_NEED_RETRY = (MIX_RESULT)0x80000017, - MIX_RESULT_SYSTEM_ERRNO = (MIX_RESULT)0x80000018, - MIX_RESULT_AM_REGISTER_FAIL = (MIX_RESULT)0x80000019, - MIX_RESULT_AM_UNREGISTER_FAIL = (MIX_RESULT)0x80000020, - MIX_RESULT_AM_NOTIFY_PAUSE_FAIL = (MIX_RESULT)0x80000021, - MIX_RESULT_AM_NOTIFY_RESUME_FAIL = (MIX_RESULT)0x80000022, - - /** Module specific errors starting number */ - - /** Starting error number for Audio */ - MIX_RESULT_ERROR_AUDIO_START = (MIX_RESULT) 0x80010000, - /** Starting error number for Video */ - MIX_RESULT_ERROR_VIDEO_START = (MIX_RESULT) 0x80020000, - /** Starting error number for DRM */ - MIX_RESULT_ERROR_DRM_START = (MIX_RESULT) 0x80030000 -} MIX_ERROR_COMMON; - -/* New success code should be added just above this line */ -// MIX_RESULT_IAM_DISABLED, /* 0x80000008 */ -// MIX_RESULT_IAM_NOTAVAIL, /* 0x80000009 */ -// MIX_RESULT_IAM_REG_FAILED, /* 0x8000000f */ - - - -#endif // MIX_RESULT_H diff --git a/mix_common/src/mixtypes.h b/mix_common/src/mixtypes.h deleted file mode 100644 index 330c623..0000000 --- a/mix_common/src/mixtypes.h +++ /dev/null @@ -1,49 +0,0 @@ -#ifndef __MIX_TYPES_H__ -#define __MIX_TYPES_H__ - -/* Provide type definitions for commonly used types. - * These are useful because a "int8" can be adjusted - * to be 1 byte (8 bits) on all platforms. Similarly and - * more importantly, "int32" can be adjusted to be - * 4 bytes (32 bits) on all platforms. - */ -typedef unsigned char uchar; -typedef unsigned short ushort; -typedef unsigned long ulong; -typedef unsigned int uint; - -typedef signed char int8; -typedef unsigned char uint8; -typedef signed short int16; -typedef unsigned short uint16; - -typedef signed int int32; -typedef unsigned int uint32; - -typedef signed long long int64; -typedef unsigned long long uint64; - -#define TRUE true -#define FALSE false - -#define return_if_fail(expr) do{ (void)0; }while(0) -#define return_val_if_fail(expr,val) do{ (void)0; }while(0) - -#define INT64_CONSTANT(val) (val##LL) - -#define INT64_FORMAT "lli" -#define UINT64_FORMAT "llu" - -#undef CLAMP -#define CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x))) - -#ifndef NULL -#ifdef __cplusplus -#define NULL (0L) -#else /* !__cplusplus */ -#define NULL ((void*) 0) -#endif /* !__cplusplus */ -#endif - - -#endif /* __MIX_TYPES_H__ */ diff --git a/mix_video/AUTHORS b/mix_video/AUTHORS deleted file mode 100644 index db8081b..0000000 --- a/mix_video/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -linda.s.cline@intel.com diff --git a/mix_video/COPYING b/mix_video/COPYING deleted file mode 100644 index a4f852c..0000000 --- a/mix_video/COPYING +++ /dev/null @@ -1,26 +0,0 @@ -INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License) - -IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING. -Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software. - - -LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions: -1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software. -2. You may not reverse engineer, decompile, or disassemble the Software. -3. You may not sublicense the Software. -4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions. -5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL). -OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights. -EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software. -LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS. -TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate. -APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations. -GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052. -CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos. -ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion. -ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel. -NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties. -SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions. -WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself. -CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions. - diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog deleted file mode 100644 index 6ac8f39..0000000 --- a/mix_video/ChangeLog +++ /dev/null @@ -1,105 +0,0 @@ -2010-11-18 Andy Qiu - * Changed version number to 0.1.24 - -2010-11-17 Weian Chen - * Support dynamic frame rate change in MI-X - -2010-11-16 Weian Chen - * Change to use one API to set AIR parameters - * Support to set AIP dynamically - -2010-11-15 Weian Chen - * Support to set slice number for I and P frame seperately - -2010-11-12 Tao Tao - * Added new error codes to MI-X video and changed some return values in mixvideoformat_h264.c - -2010-11-12 Andy Qiu - * Changed version number to 0.1.23 - -2010-11-11 Weian Chen - * Add new feature (VCM, MTU, etc) - -2010-11-10 Andy Qiu - * Support color matrix, video range, aspect ratio. - * Support h264 dynamic stream detection - * Support H264 RTSP streaming - * Changed version number to 0.1.22 - -2010-10-25 Andy Qiu - * H.264 error robust improvement - * MPEG-4/H.263 partial frame support - * MPEG-4/H.263 RTSP stream support - * Changed version number to 0.1.21 - -2010-09-29 Andy Qiu - * Supported FLV playback - * Re-factored frame manager - * Changed version number to 0.1.20 - -2010-09-15 Tao Tao - - * Merged the changes for Android - -2010-09-02 Andy Qiu - * Refactored h.264 decoder to support partial frame and byte stream handling and delayed decoder configuration. - * Supported MPEG-4 video decoding with "video/x-xvid" mime type. - * Rolled version number to 0.1.19 - -2010-07-29 Andy Qiu - * use high profile for baseline H.264 contents - * Rolled version number to 0.1.18 - -2010-06-30 Tao Tao - - * Added G_BEGIN_DECLS and G_END_DECLS to all header files - -2010-06-09 Weian Chen - - * convert DOS line end format to Unix - -2010-06-04 Tao Tao - - * Rolled version number to 0.1.17 - -2010-06-01 Tao Tao - - * Added va_display to MixVideoFrame - * Made changes in several MI-X video source files to reflect the MixVideoFrame change. - -2010-05-28 Weian Chen - - * Make changes to H.264/H.263/MPEG-4 encode to align with the data structure change of coded buffer in LibVA - * Link encode to libva-tpi - -2010-05-07 Tao Tao - - * Added gtk-doc support to the project - -2010-05-03 Tao Tao - - * Removed mixdrmparams.h/c files and made related changes. - -2010-04-21 Chris Pearson - - * Added null-pointer check to mixvideoformat_h264 to fix shutdown crash. - -2010-03-25 Tao Tao - - * Changed mixvideo and mixframemanger to handle last frame correctly. - -2010-03-22 Tao Tao - - * Rolled version number to 0.1.16 - - -2010-03-09 Weian Chen - - * lots of defects fix for mixvideo encode - - -2010-03-09 Weian Chen - - * Change to use Cleanup convention for mixvideoenc (all mixvideformatenc derived Object, H.264/MPEG-4:2/preview) - * remove some unnecessary comments - diff --git a/mix_video/INSTALL b/mix_video/INSTALL deleted file mode 100644 index 50e1648..0000000 --- a/mix_video/INSTALL +++ /dev/null @@ -1,4 +0,0 @@ -run the following to build and install: -./autogen.sh -./configure -make diff --git a/mix_video/NEWS b/mix_video/NEWS deleted file mode 100644 index 139597f..0000000 --- a/mix_video/NEWS +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/mix_video/README b/mix_video/README deleted file mode 100644 index 2bcf017..0000000 --- a/mix_video/README +++ /dev/null @@ -1,2 +0,0 @@ -MIX Video is an user library interface for various hardware video codecs available on the platform. - diff --git a/mix_video/docs/readme b/mix_video/docs/readme deleted file mode 100644 index 56b1785..0000000 --- a/mix_video/docs/readme +++ /dev/null @@ -1,17 +0,0 @@ -How to generate gtk-doc? - - -By default, gtk-doc generation is disabled in mix_video. To generate gtk-docs - - -01. in mix_video/autogen.sh file, uncomment line #gtkdocize ... -02. in mix_video/Makefile.am, uncomment line #SUBDIRS += docs -03. in mix_video/configure.ac, uncomment #GTK_DOC_CHECK(1.9) and the commented block AC_CONFIG_FILES ... of gtkdoc -04. sh autogen.sh -05. ./configure --enable-gtk-doc -06. make -07. go to directory mix_video/docs/reference/MixVideo, type command -08. cp BackupMixVideo-docs.sgml MixVideo-docs.sgml -09. make -10. use browser to open mix_video/docs/reference/MixVideo/html/index.html - diff --git a/mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml b/mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml deleted file mode 100644 index 2f23d4e..0000000 --- a/mix_video/docs/reference/MixVideo/BackupMixVideo-docs.sgml +++ /dev/null @@ -1,53 +0,0 @@ - - -]> - - - Mi-X Video Reference Manual - - for MI-X Video 0.1 - - - - - - MI-X Video API - - - - - - - - - - - - - - - - - - - - - - - Object Hierarchy - - - - API Index - - - - diff --git a/mix_video/docs/reference/MixVideo/MixVideo-sections.txt b/mix_video/docs/reference/MixVideo/MixVideo-sections.txt deleted file mode 100644 index dc55540..0000000 --- a/mix_video/docs/reference/MixVideo/MixVideo-sections.txt +++ /dev/null @@ -1,452 +0,0 @@ -
-mixvideo -MixVideo -MixVideo -mix_video_new -mix_video_ref -mix_video_unref -mix_video_get_version -mix_video_initialize -mix_video_deinitialize -mix_video_configure -mix_video_get_config -mix_video_decode -mix_video_get_frame -mix_video_release_frame -mix_video_render -mix_video_encode -mix_video_flush -mix_video_eos -mix_video_get_state -mix_video_get_mixbuffer -mix_video_release_mixbuffer -mix_video_get_max_coded_buffer_size - -MIX_VIDEO -MIX_IS_VIDEO -MIX_TYPE_VIDEO -mix_video_get_type -MIX_VIDEO_CLASS -MIX_IS_VIDEO_CLASS -MIX_VIDEO_GET_CLASS -
- -
-mixvideoinitparams -MixVideoInitParams -MixVideoInitParams -mix_videoinitparams_new -mix_videoinitparams_ref -mix_videoinitparams_unref -mix_videoinitparams_set_display -mix_videoinitparams_get_display - -MIX_VIDEOINITPARAMS -MIX_IS_VIDEOINITPARAMS -MIX_TYPE_VIDEOINITPARAMS -mix_videoinitparams_get_type -MIX_VIDEOINITPARAMS_CLASS -MIX_IS_VIDEOINITPARAMS_CLASS -MIX_VIDEOINITPARAMS_GET_CLASS -
- -
-mixdrmparams -MixDrmParams -MixDrmParams -mix_drmparams_new -mix_drmparams_ref -mix_drmparams_unref - -MIX_DRMPARAMS -MIX_IS_DRMPARAMS -MIX_TYPE_DRMPARAMS -mix_drmparams_get_type -MIX_DRMPARAMS_CLASS -MIX_IS_DRMPARAMS_CLASS -MIX_DRMPARAMS_GET_CLASS -
- -
-mixdisplay -MIX_DISPLAY_CLASS -MIX_DISPLAY_CAST -MixDisplayDupFunction -MixDisplayCopyFunction -MixDisplayFinalizeFunction -MixDisplayEqualFunction -MIX_VALUE_HOLDS_DISPLAY -MIX_DISPLAY_REFCOUNT -MIX_DISPLAY_REFCOUNT_VALUE -MixDisplay -MixDisplay -mix_display_new -mix_display_copy -mix_display_ref -mix_display_unref -mix_display_replace -mix_display_dup -mix_display_equal -MIX_TYPE_PARAM_DISPLAY -MIX_IS_PARAM_SPEC_DISPLAY -MIX_PARAM_SPEC_DISPLAY -MixParamSpecDisplay -mix_param_spec_display_get_type -mix_param_spec_display -mix_value_set_display -mix_value_take_display -mix_value_get_display -mix_value_dup_display - -MIX_DISPLAY -MIX_IS_DISPLAY -MIX_TYPE_DISPLAY -mix_display_get_type -MIX_IS_DISPLAY_CLASS -MIX_DISPLAY_GET_CLASS -
- -
-mixdisplayx11 -MixDisplayX11 -MixDisplayX11 -mix_displayx11_new -mix_displayx11_ref -mix_displayx11_unref -mix_displayx11_set_display -mix_displayx11_get_display -mix_displayx11_set_drawable -mix_displayx11_get_drawable - -MIX_DISPLAYX11 -MIX_IS_DISPLAYX11 -MIX_TYPE_DISPLAYX11 -mix_displayx11_get_type -MIX_DISPLAYX11_CLASS -MIX_IS_DISPLAYX11_CLASS -MIX_DISPLAYX11_GET_CLASS -
- -
-mixbuffer -MixBufferCallback -MixBuffer -MixBuffer -mix_buffer_new -mix_buffer_ref -mix_buffer_unref -mix_buffer_set_data - -MIX_BUFFER -MIX_IS_BUFFER -MIX_TYPE_BUFFER -mix_buffer_get_type -MIX_BUFFER_CLASS -MIX_IS_BUFFER_CLASS -MIX_BUFFER_GET_CLASS -
- -
-mixvideoframe -MixVideoFrame -MixVideoFrame -mix_videoframe_new -mix_videoframe_ref -mix_videoframe_unref -mix_videoframe_set_frame_id -mix_videoframe_get_frame_id -mix_videoframe_set_ci_frame_idx -mix_videoframe_get_ci_frame_idx -mix_videoframe_set_timestamp -mix_videoframe_get_timestamp -mix_videoframe_set_discontinuity -mix_videoframe_get_discontinuity - -MIX_VIDEOFRAME -MIX_IS_VIDEOFRAME -MIX_TYPE_VIDEOFRAME -mix_videoframe_get_type -MIX_VIDEOFRAME_CLASS -MIX_IS_VIDEOFRAME_CLASS -MIX_VIDEOFRAME_GET_CLASS -
- -
-mixvideoconfigparams -MixVideoConfigParams -MixVideoConfigParams -mix_videoconfigparams_new -mix_videoconfigparams_ref -mix_videoconfigparams_unref - -MIX_VIDEOCONFIGPARAMS -MIX_IS_VIDEOCONFIGPARAMS -MIX_TYPE_VIDEOCONFIGPARAMS -mix_videoconfigparams_get_type -MIX_VIDEOCONFIGPARAMS_CLASS -MIX_IS_VIDEOCONFIGPARAMS_CLASS -MIX_VIDEOCONFIGPARAMS_GET_CLASS -
- -
-mixvideoconfigparamsdec -MixVideoConfigParamsDec -MixVideoConfigParamsDec -mix_videoconfigparamsdec_new -mix_videoconfigparamsdec_ref -mix_videoconfigparamsdec_unref -mix_videoconfigparamsdec_set_frame_order_mode -mix_videoconfigparamsdec_get_frame_order_mode -mix_videoconfigparamsdec_set_header -mix_videoconfigparamsdec_get_header -mix_videoconfigparamsdec_set_mime_type -mix_videoconfigparamsdec_get_mime_type -mix_videoconfigparamsdec_set_frame_rate -mix_videoconfigparamsdec_get_frame_rate -mix_videoconfigparamsdec_set_picture_res -mix_videoconfigparamsdec_get_picture_res -mix_videoconfigparamsdec_set_raw_format -mix_videoconfigparamsdec_get_raw_format -mix_videoconfigparamsdec_set_rate_control -mix_videoconfigparamsdec_get_rate_control -mix_videoconfigparamsdec_set_buffer_pool_size -mix_videoconfigparamsdec_get_buffer_pool_size -mix_videoconfigparamsdec_set_extra_surface_allocation -mix_videoconfigparamsdec_get_extra_surface_allocation - -MIX_VIDEOCONFIGPARAMSDEC -MIX_IS_VIDEOCONFIGPARAMSDEC -MIX_TYPE_VIDEOCONFIGPARAMSDEC -mix_videoconfigparamsdec_get_type -MIX_VIDEOCONFIGPARAMSDEC_CLASS -MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS -MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS -
- -
-mixvideoconfigparamsdec_vc1 -MixVideoConfigParamsDecVC1 -MixVideoConfigParamsDecVC1 -mix_videoconfigparamsdec_vc1_new -mix_videoconfigparamsdec_vc1_unref - -MIX_VIDEOCONFIGPARAMSDEC_VC1 -MIX_IS_VIDEOCONFIGPARAMSDEC_VC1 -MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1 -mix_videoconfigparamsdec_vc1_get_type -MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS -MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS -MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS -
- -
-mixvideoconfigparamsdec_h264 -MixVideoConfigParamsDecH264 -MixVideoConfigParamsDecH264 -mix_videoconfigparamsdec_h264_new -mix_videoconfigparamsdec_h264_unref - -MIX_VIDEOCONFIGPARAMSDEC_H264 -MIX_IS_VIDEOCONFIGPARAMSDEC_H264 -MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264 -mix_videoconfigparamsdec_h264_get_type -MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS -MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS -MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS -
- -
-mixvideoconfigparamsdec_mp42 -MixVideoConfigParamsDecMP42 -MixVideoConfigParamsDecMP42 -mix_videoconfigparamsdec_mp42_new -mix_videoconfigparamsdec_mp42_unref -mix_videoconfigparamsdec_mp42_set_mpegversion -mix_videoconfigparamsdec_mp42_get_mpegversion -mix_videoconfigparamsdec_mp42_set_divxversion -mix_videoconfigparamsdec_mp42_get_divxversion - -MIX_VIDEOCONFIGPARAMSDEC_MP42 -MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 -MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42 -mix_videoconfigparamsdec_mp42_get_type -MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS -MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS -MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS -
- - -
-mixvideoconfigparamsenc -MixVideoConfigParamsEnc -MixVideoConfigParamsEnc -mix_videoconfigparamsenc_new -mix_videoconfigparamsenc_ref -mix_videoconfigparamsenc_unref -mix_videoconfigparamsenc_set_mime_type -mix_videoconfigparamsenc_get_mime_type -mix_videoconfigparamsenc_set_frame_rate -mix_videoconfigparamsenc_get_frame_rate -mix_videoconfigparamsenc_set_picture_res -mix_videoconfigparamsenc_get_picture_res -mix_videoconfigparamsenc_set_encode_format -mix_videoconfigparamsenc_get_encode_format -mix_videoconfigparamsenc_set_bit_rate -mix_videoconfigparamsenc_get_bit_rate -mix_videoconfigparamsenc_set_init_qp -mix_videoconfigparamsenc_get_init_qp -mix_videoconfigparamsenc_set_min_qp -mix_videoconfigparamsenc_get_min_qp -mix_videoconfigparamsenc_set_intra_period -mix_videoconfigparamsenc_get_intra_period -mix_videoconfigparamsenc_set_buffer_pool_size -mix_videoconfigparamsenc_get_buffer_pool_size -mix_videoconfigparamsenc_set_share_buf_mode -mix_videoconfigparamsenc_get_share_buf_mode -mix_videoconfigparamsenc_set_ci_frame_info -mix_videoconfigparamsenc_get_ci_frame_info -mix_videoconfigparamsenc_set_drawable -mix_videoconfigparamsenc_get_drawable -mix_videoconfigparamsenc_set_need_display -mix_videoconfigparamsenc_get_need_display -mix_videoconfigparamsenc_set_rate_control -mix_videoconfigparamsenc_get_rate_control -mix_videoconfigparamsenc_set_raw_format -mix_videoconfigparamsenc_get_raw_format -mix_videoconfigparamsenc_set_profile -mix_videoconfigparamsenc_get_profile - -MIX_VIDEOCONFIGPARAMSENC -MIX_IS_VIDEOCONFIGPARAMSENC -MIX_TYPE_VIDEOCONFIGPARAMSENC -mix_videoconfigparamsenc_get_type -MIX_VIDEOCONFIGPARAMSENC_CLASS -MIX_IS_VIDEOCONFIGPARAMSENC_CLASS -MIX_VIDEOCONFIGPARAMSENC_GET_CLASS -
- - -
-mixvideoconfigparamsenc_h264 -MixVideoConfigParamsEncH264 -MixVideoConfigParamsEncH264 -mix_videoconfigparamsenc_h264_new -mix_videoconfigparamsenc_h264_unref -mix_videoconfigparamsenc_h264_set_bus -mix_videoconfigparamsenc_h264_get_bus -mix_videoconfigparamsenc_h264_set_dlk -mix_videoconfigparamsenc_h264_get_dlk -mix_videoconfigparamsenc_h264_set_slice_num -mix_videoconfigparamsenc_h264_get_slice_num -mix_videoconfigparamsenc_h264_set_delimiter_type -mix_videoconfigparamsenc_h264_get_delimiter_type - -MIX_VIDEOCONFIGPARAMSENC_H264 -MIX_IS_VIDEOCONFIGPARAMSENC_H264 -MIX_TYPE_VIDEOCONFIGPARAMSENC_H264 -mix_videoconfigparamsenc_h264_get_type -MIX_VIDEOCONFIGPARAMSENC_H264_CLASS -MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS -MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS -
- -
-mixvideoconfigparamsenc_mpeg4 -MixVideoConfigParamsEncMPEG4 -MixVideoConfigParamsEncMPEG4 -mix_videoconfigparamsenc_mpeg4_new -mix_videoconfigparamsenc_mpeg4_unref -mix_videoconfigparamsenc_mpeg4_set_dlk -mix_videoconfigparamsenc_mpeg4_get_dlk -mix_videoconfigparamsenc_mpeg4_set_profile_level -mix_videoconfigparamsenc_mpeg4_get_profile_level -mix_videoconfigparamsenc_mpeg4_set_fixed_vti -mix_videoconfigparamsenc_mpeg4_get_fixed_vti - -MIX_VIDEOCONFIGPARAMSENC_MPEG4 -MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 -MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 -mix_videoconfigparamsenc_mpeg4_get_type -MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS -MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS -MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS -
- -
-mixvideodecodeparams -MixVideoDecodeParams -MixVideoDecodeParams -mix_videodecodeparams_new -mix_videodecodeparams_ref -mix_videodecodeparams_unref -mix_videodecodeparams_set_timestamp -mix_videodecodeparams_get_timestamp -mix_videodecodeparams_set_discontinuity -mix_videodecodeparams_get_discontinuity - -MIX_VIDEODECODEPARAMS -MIX_IS_VIDEODECODEPARAMS -MIX_TYPE_VIDEODECODEPARAMS -mix_videodecodeparams_get_type -MIX_VIDEODECODEPARAMS_CLASS -MIX_IS_VIDEODECODEPARAMS_CLASS -MIX_VIDEODECODEPARAMS_GET_CLASS -
- -
-mixvideoencodeparams -MixVideoEncodeParams -MixVideoEncodeParams -mix_videoencodeparams_new -mix_videoencodeparams_ref -mix_videoencodeparams_unref - -MIX_VIDEOENCODEPARAMS -MIX_IS_VIDEOENCODEPARAMS -MIX_TYPE_VIDEOENCODEPARAMS -mix_videoencodeparams_get_type -MIX_VIDEOENCODEPARAMS_CLASS -MIX_IS_VIDEOENCODEPARAMS_CLASS -MIX_VIDEOENCODEPARAMS_GET_CLASS -
- -
-mixvideorenderparams -MixVideoRenderParams -MixVideoRenderParams -mix_videorenderparams_new -mix_videorenderparams_ref -mix_videorenderparams_unref -mix_videorenderparams_set_display -mix_videorenderparams_get_display -mix_videorenderparams_set_src_rect -mix_videorenderparams_get_src_rect -mix_videorenderparams_set_dest_rect -mix_videorenderparams_get_dest_rect -mix_videorenderparams_set_clipping_rects -mix_videorenderparams_get_clipping_rects - -MIX_VIDEORENDERPARAMS -MIX_IS_VIDEORENDERPARAMS -MIX_TYPE_VIDEORENDERPARAMS -mix_videorenderparams_get_type -MIX_VIDEORENDERPARAMS_CLASS -MIX_IS_VIDEORENDERPARAMS_CLASS -MIX_VIDEORENDERPARAMS_GET_CLASS -
- -
-mixvideodef -MIX_VIDEO_ERROR_CODE -MixCodecMode -MixFrameOrderMode -MixIOVec -MixRect -MixState -MixRawTargetFormat -MixEncodeTargetFormat -MixRateControl -MixProfile -MixDelimiterType -
- diff --git a/mix_video/docs/reference/MixVideo/MixVideo.types b/mix_video/docs/reference/MixVideo/MixVideo.types deleted file mode 100644 index da3937b..0000000 --- a/mix_video/docs/reference/MixVideo/MixVideo.types +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include -mix_video_get_type -mix_videoconfigparamsenc_mpeg4_get_type -mix_videoconfigparams_get_type -mix_videoconfigparamsenc_h264_get_type -mix_videoconfigparamsdec_get_type -mix_videoconfigparamsenc_get_type -mix_videorenderparams_get_type -mix_videoframe_get_type -mix_videodecodeparams_get_type -mix_videoconfigparamsdec_mp42_get_type -mix_buffer_get_type -mix_displayx11_get_type -mix_videoencodeparams_get_type -mix_videoconfigparamsdec_vc1_get_type -mix_videoinitparams_get_type -mix_drmparams_get_type -mix_display_get_type -mix_param_spec_display_get_type -mix_videoconfigparamsdec_h264_get_type -mix_params_get_type diff --git a/mix_video/docs/reference/MixVideo/html/MixBuffer.html b/mix_video/docs/reference/MixVideo/html/MixBuffer.html deleted file mode 100644 index 8f50f9e..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixBuffer.html +++ /dev/null @@ -1,227 +0,0 @@ - - - - -MixBuffer - - - - - - - - - - - - -
- - - - - - - - - -
-
-
- - -
-

MixBuffer

-

MixBuffer — MI-X Video Buffer Parameters

-
-
-

Synopsis

-
-void                (*MixBufferCallback)                (ulong token,
-                                                         uchar *data);
-                    MixBuffer;
-MixBuffer *         mix_buffer_new                      (void);
-MixBuffer *         mix_buffer_ref                      (MixBuffer *mix);
-void                mix_buffer_unref                    (MixBuffer *mix);
-MIX_RESULT          mix_buffer_set_data                 (MixBuffer *obj,
-                                                         uchar *data,
-                                                         uint size,
-                                                         ulong token,
-                                                         MixBufferCallback callback);
-
-
-
-

Object Hierarchy

-
-  MixParams
-   +----MixBuffer
-
-
-
-

Description

-

-

-

-MixBuffer objects are used to wrap input data buffers in a reference counted object as -described in the buffer model section. Data buffers themselves are allocated by the -App/MMF. MixBuffer objects are allocated by MixVideo in a pool and retrieved by the -application using mix_video_get_mixbuffer(). The application will wrap a data buffer -in a Mixbuffer object and pass it into mix_video_decode() or to mix_video_encode(). -

-

-

-

-The MixBuffer objects will be released by MixVideo when they are no longer needed -for the decode or encoder operation. The App/MMF will also release the MixBuffer -object after use. When the MixBuffer is completely released, the callback to the -function registered in the MixBuffer will be called (allowing the App/MMF to release -data buffers as necessary). -

-
-
-

Details

-
-

MixBufferCallback ()

-
void                (*MixBufferCallback)                (ulong token,
-                                                         uchar *data);
-
-
-
-

MixBuffer

-
typedef struct {
-	MixParams parent;
-
-	
-	/* Pointer to coded data buffer */
-	uchar *data;
-	
-	/* Size of coded data buffer */
-	uint size;
-	
-	/* Token that will be passed to 
-	 * the callback function. Can be 
-	 * used by the application for 
-	 * any information to be associated 
-	 * with this coded data buffer, 
-	 * such as a pointer to a structure 
-	 * belonging to the application. */
-	ulong token;
-	
-	/* callback function pointer */
-	MixBufferCallback callback;
-} MixBuffer;
-
-

-MI-X Buffer Parameter object

-
-
-
-

mix_buffer_new ()

-
MixBuffer *         mix_buffer_new                      (void);
-

-Use this method to create new instance of MixBuffer

-
-- - - - -

returns :

A newly allocated instance of MixBuffer -
-
-
-
-

mix_buffer_ref ()

-
MixBuffer *         mix_buffer_ref                      (MixBuffer *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixBuffer instance where reference count has been increased. -
-
-
-
-

mix_buffer_unref ()

-
void                mix_buffer_unref                    (MixBuffer *mix);
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_buffer_set_data ()

-
MIX_RESULT          mix_buffer_set_data                 (MixBuffer *obj,
-                                                         uchar *data,
-                                                         uint size,
-                                                         ulong token,
-                                                         MixBufferCallback callback);
-

-Set data buffer, size, token and callback function

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - -

obj :

MixBuffer object -

data :

data buffer -

size :

data buffer size -

token :

token -

callback :

callback function pointer -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixDisplay.html b/mix_video/docs/reference/MixVideo/html/MixDisplay.html deleted file mode 100644 index cfce197..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixDisplay.html +++ /dev/null @@ -1,591 +0,0 @@ - - - - -MixDisplay - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixDisplay

-

MixDisplay — Lightweight Base Object for MI-X Video Display

-
-
-

Synopsis

-
-#define             MIX_DISPLAY_CLASS                   (klass)
-#define             MIX_DISPLAY_CAST                    (obj)
-MixDisplay *        (*MixDisplayDupFunction)            (const MixDisplay *obj);
-bool            (*MixDisplayCopyFunction)           (MixDisplay *target,
-                                                         const MixDisplay *src);
-void                (*MixDisplayFinalizeFunction)       (MixDisplay *obj);
-bool            (*MixDisplayEqualFunction)          (MixDisplay *first,
-                                                         MixDisplay *second);
-#define             MIX_VALUE_HOLDS_DISPLAY             (value)
-#define             MIX_DISPLAY_REFCOUNT                (obj)
-#define             MIX_DISPLAY_REFCOUNT_VALUE          (obj)
-                    MixDisplay;
-MixDisplay *        mix_display_new                     ();
-bool            mix_display_copy                    (MixDisplay *target,
-                                                         const MixDisplay *src);
-MixDisplay *        mix_display_ref                     (MixDisplay *obj);
-void                mix_display_unref                   (MixDisplay *obj);
-void                mix_display_replace                 (MixDisplay **olddata,
-                                                         MixDisplay *newdata);
-MixDisplay *        mix_display_dup                     (const MixDisplay *obj);
-bool            mix_display_equal                   (MixDisplay *first,
-                                                         MixDisplay *second);
-#define             MIX_TYPE_PARAM_DISPLAY
-#define             MIX_IS_PARAM_SPEC_DISPLAY           (pspec)
-#define             MIX_PARAM_SPEC_DISPLAY              (pspec)
-                    MixParamSpecDisplay;
-GType               mix_param_spec_display_get_type     (void);
-GParamSpec *        mix_param_spec_display              (const char *name,
-                                                         const char *nick,
-                                                         const char *blurb,
-                                                         GType object_type,
-                                                         GParamFlags flags);
-void                mix_value_set_display               (GValue *value,
-                                                         MixDisplay *obj);
-void                mix_value_take_display              (GValue *value,
-                                                         MixDisplay *obj);
-MixDisplay *        mix_value_get_display               (const GValue *value);
-MixDisplay *        mix_value_dup_display               (const GValue *value);
-
-
-
-

Object Hierarchy

-
-  MixDisplay
-   +----MixDisplayX11
-
-
-
-

Description

-

-

-
-
-

Details

-
-

MIX_DISPLAY_CLASS()

-
#define MIX_DISPLAY_CLASS(klass)  (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAY, MixDisplayClass))
-
-
-
-
-

MIX_DISPLAY_CAST()

-
#define MIX_DISPLAY_CAST(obj)     ((MixDisplay*)(obj))
-
-
-
-
-

MixDisplayDupFunction ()

-
MixDisplay *        (*MixDisplayDupFunction)            (const MixDisplay *obj);
-

-Virtual function prototype for methods to create duplicate of instance.

-
-- - - - - - - - - - -

obj :

Display to duplicate -

returns :

reference to cloned instance. -
-
-
-
-

MixDisplayCopyFunction ()

-
bool            (*MixDisplayCopyFunction)           (MixDisplay *target,
-                                                         const MixDisplay *src);
-

-Virtual function prototype for methods to create copies of instance.

-
-- - - - - - - - - - - - - - -

target :

target of the copy -

src :

source of the copy -

returns :

boolean indicates if copy is successful. -
-
-
-
-

MixDisplayFinalizeFunction ()

-
void                (*MixDisplayFinalizeFunction)       (MixDisplay *obj);
-

-Virtual function prototype for methods to free ressources used by -object.

-
-- - - - -

obj :

Display to finalize -
-
-
-
-

MixDisplayEqualFunction ()

-
bool            (*MixDisplayEqualFunction)          (MixDisplay *first,
-                                                         MixDisplay *second);
-
-
-
-

MIX_VALUE_HOLDS_DISPLAY()

-
#define MIX_VALUE_HOLDS_DISPLAY(value)  (G_VALUE_HOLDS(value, MIX_TYPE_DISPLAY))
-
-

-Checks if the given GValue contains a MIX_TYPE_PARAM value.

-
-- - - - -

value :

the GValue to check -
-
-
-
-

MIX_DISPLAY_REFCOUNT()

-
#define MIX_DISPLAY_REFCOUNT(obj)           ((MIX_DISPLAY_CAST(obj))->refcount)
-
-

-Get access to the reference count field of the object.

-
-- - - - -

obj :

a MixDisplay -
-
-
-
-

MIX_DISPLAY_REFCOUNT_VALUE()

-
#define MIX_DISPLAY_REFCOUNT_VALUE(obj)     (g_atomic_int_get (&(MIX_DISPLAY_CAST(obj))->refcount))
-
-

-Get the reference count value of the object

-
-- - - - -

obj :

a MixDisplay -
-
-
-
-

MixDisplay

-
typedef struct {
-  int refcount;
-} MixDisplay;
-
-

-Base class for a refcounted parameter objects.

-
-- - - - -

int refcount;

atomic refcount -
-
-
-
-

mix_display_new ()

-
MixDisplay *        mix_display_new                     ();
-

-Create new instance of the object.

-
-- - - - -

returns :

return a newly allocated object. -
-
-
-
-

mix_display_copy ()

-
bool            mix_display_copy                    (MixDisplay *target,
-                                                         const MixDisplay *src);
-

-Copy data from one instance to the other. This method internally invoked the "copy" method such that derived object will be copied correctly.

-
-- - - - - - - - - - - - - - -

target :

copy to target -

src :

copy from source -

returns :

boolean indicating if copy is successful. -
-
-
-
-

mix_display_ref ()

-
MixDisplay *        mix_display_ref                     (MixDisplay *obj);
-

-Increment reference count.

-
-- - - - - - - - - - -

obj :

a MixDisplay object. -

returns :

the object with reference count incremented. -
-
-
-
-

mix_display_unref ()

-
void                mix_display_unref                   (MixDisplay *obj);
-

-Decrement reference count.

-
-- - - - -

obj :

a MixDisplay object. -
-
-
-
-

mix_display_replace ()

-
void                mix_display_replace                 (MixDisplay **olddata,
-                                                         MixDisplay *newdata);
-

-Replace a pointer of the object with the new one.

-
-- - - - - - - - - - -

olddata :

old data -

newdata :

new data -
-
-
-
-

mix_display_dup ()

-
MixDisplay *        mix_display_dup                     (const MixDisplay *obj);
-

-Duplicate the given MixDisplay and allocate a new instance. This method is chained up properly and derive object will be dupped properly.

-
-- - - - - - - - - - -

obj :

MixDisplay object to duplicate. -

returns :

A newly allocated duplicate of the object, or NULL if failed. -
-
-
-
-

mix_display_equal ()

-
bool            mix_display_equal                   (MixDisplay *first,
-                                                         MixDisplay *second);
-

-Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance.

-
-- - - - - - - - - - - - - - -

first :

first object to compare -

second :

second object to compare -

returns :

boolean indicates if the 2 object contains same data. -
-
-
-
-

MIX_TYPE_PARAM_DISPLAY

-
#define MIX_TYPE_PARAM_DISPLAY (mix_param_spec_display_get_type())
-
-
-
-
-

MIX_IS_PARAM_SPEC_DISPLAY()

-
#define MIX_IS_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_TYPE ((pspec), MIX_TYPE_PARAM_DISPLAY))
-
-
-
-
-

MIX_PARAM_SPEC_DISPLAY()

-
#define MIX_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_CAST ((pspec), MIX_TYPE_PARAM_DISPLAY, MixParamSpecDisplay))
-
-
-
-
-

MixParamSpecDisplay

-
typedef struct {
-  GParamSpec parent;
-} MixParamSpecDisplay;
-
-

-A GParamSpec derived structure that contains the meta data -for MixDisplay properties.

-
-- - - - -

GParamSpec parent;

GParamSpec portion -
-
-
-
-

mix_param_spec_display_get_type ()

-
GType               mix_param_spec_display_get_type     (void);
-
-
-
-

mix_param_spec_display ()

-
GParamSpec *        mix_param_spec_display              (const char *name,
-                                                         const char *nick,
-                                                         const char *blurb,
-                                                         GType object_type,
-                                                         GParamFlags flags);
-

-Creates a new GParamSpec instance that hold MixDisplay references.

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - -

name :

the canonical name of the property -

nick :

the nickname of the property -

blurb :

a short description of the property -

object_type :

the MixDisplayType for the property -

flags :

a combination of GParamFlags -

returns :

a newly allocated GParamSpec instance -
-
-
-
-

mix_value_set_display ()

-
void                mix_value_set_display               (GValue *value,
-                                                         MixDisplay *obj);
-

-Set the contents of a MIX_TYPE_DISPLAY derived GValue to -obj. -The caller retains ownership of the reference.

-
-- - - - - - - - - - -

value :

a valid GValue of MIX_TYPE_DISPLAY derived type -

obj :

object value to set -
-
-
-
-

mix_value_take_display ()

-
void                mix_value_take_display              (GValue *value,
-                                                         MixDisplay *obj);
-

-Set the contents of a MIX_TYPE_DISPLAY derived GValue to -obj. -Takes over the ownership of the caller's reference to obj; -the caller doesn't have to unref it any more.

-
-- - - - - - - - - - -

value :

a valid GValue of MIX_TYPE_DISPLAY derived type -

obj :

object value to take -
-
-
-
-

mix_value_get_display ()

-
MixDisplay *        mix_value_get_display               (const GValue *value);
-

-refcount of the MixDisplay is not increased.

-
-- - - - - - - - - - -

value :

a valid GValue of MIX_TYPE_DISPLAY derived type -

returns :

object contents of value -
-
-
-
-

mix_value_dup_display ()

-
MixDisplay *        mix_value_dup_display               (const GValue *value);
-

-refcount of MixDisplay is increased.

-
-- - - - - - - - - - -

value :

a valid GValue of MIX_TYPE_DISPLAY derived type -

returns :

object contents of value -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixDisplayX11.html b/mix_video/docs/reference/MixVideo/html/MixDisplayX11.html deleted file mode 100644 index c858e79..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixDisplayX11.html +++ /dev/null @@ -1,271 +0,0 @@ - - - - -MixDisplayX11 - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixDisplayX11

-

MixDisplayX11 — MI-X Video X11 Display

-
-
-

Synopsis

-
-                    MixDisplayX11;
-MixDisplayX11 *     mix_displayx11_new                  (void);
-MixDisplayX11 *     mix_displayx11_ref                  (MixDisplayX11 *mix);
-#define             mix_displayx11_unref                (obj)
-MIX_RESULT          mix_displayx11_set_display          (MixDisplayX11 *obj,
-                                                         Display *display);
-MIX_RESULT          mix_displayx11_get_display          (MixDisplayX11 *obj,
-                                                         Display **dislay);
-MIX_RESULT          mix_displayx11_set_drawable         (MixDisplayX11 *obj,
-                                                         Drawable drawable);
-MIX_RESULT          mix_displayx11_get_drawable         (MixDisplayX11 *obj,
-                                                         Drawable *drawable);
-
-
-
-

Object Hierarchy

-
-  MixDisplay
-   +----MixDisplayX11
-
-
-
-

Description

-

-A data object which stores X11 specific parameters. -

-

-

-
-

Data Structures Used in MixDisplayX11 Fields:

-See X11/Xlib.h for Display and Drawable definitions. -
-
-
-

Details

-
-

MixDisplayX11

-
typedef struct {
-  MixDisplay parent;
-
-
-  /* Pointer to a X Window Display structure */
-  Display *display;
-  
-  /* An X Window Drawable that is either a Window 
-   * or a Pixmap. This field is not used in 
-   * mix_video_initialize(). 
-   * See X11/Xlib.h for Display and Drawable definitions.*/
-  Drawable drawable;
-} MixDisplayX11;
-
-

-MI-X VideoInit Parameter object

-
-
-
-

mix_displayx11_new ()

-
MixDisplayX11 *     mix_displayx11_new                  (void);
-

-Use this method to create new instance of MixDisplayX11

-
-- - - - -

returns :

A newly allocated instance of MixDisplayX11 -
-
-
-
-

mix_displayx11_ref ()

-
MixDisplayX11 *     mix_displayx11_ref                  (MixDisplayX11 *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixDisplayX11 instance where reference count has been increased. -
-
-
-
-

mix_displayx11_unref()

-
#define mix_displayx11_unref(obj) mix_display_unref(MIX_DISPLAY(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_displayx11_set_display ()

-
MIX_RESULT          mix_displayx11_set_display          (MixDisplayX11 *obj,
-                                                         Display *display);
-

-Set Display

-
-- - - - - - - - - - - - - - -

obj :

MixDisplayX11 object -

display :

Pointer to a X Window Display structure -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_displayx11_get_display ()

-
MIX_RESULT          mix_displayx11_get_display          (MixDisplayX11 *obj,
-                                                         Display **dislay);
-

-Get Display

-
-- - - - - - - - - - - - - - -

obj :

MixDisplayX11 object -

display :

Pointer to pointer of X Window Display structure -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_displayx11_set_drawable ()

-
MIX_RESULT          mix_displayx11_set_drawable         (MixDisplayX11 *obj,
-                                                         Drawable drawable);
-

-Set drawable

-
-- - - - - - - - - - - - - - -

obj :

MixDisplayX11 object -

drawable :

An X Window Drawable that is either a Window or a Pixmap. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_displayx11_get_drawable ()

-
MIX_RESULT          mix_displayx11_get_drawable         (MixDisplayX11 *obj,
-                                                         Drawable *drawable);
-

-Get drawable

-
-- - - - - - - - - - - - - - -

obj :

MixDisplayX11 object -

drawable :

An X Window Drawable that is either a Window or a Pixmap to be returned. -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixDrmParams.html b/mix_video/docs/reference/MixVideo/html/MixDrmParams.html deleted file mode 100644 index c5f77f9..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixDrmParams.html +++ /dev/null @@ -1,137 +0,0 @@ - - - - -MixDrmParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixDrmParams

-

MixDrmParams — Drm Parameters Base Object

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixDrmParams
-
-
-
-

Description

-

-A data object which stores drm specific parameters.

-
-
-

Details

-
-

MixDrmParams

-
typedef struct {
-  MixParams parent;
-
-
-  /* TODO: Add properties */
-} MixDrmParams;
-
-

-MI-X Drm Parameter object

-
-
-
-

mix_drmparams_new ()

-
MixDrmParams *      mix_drmparams_new                   (void);
-

-Use this method to create new instance of MixDrmParams

-
-- - - - -

returns :

A newly allocated instance of MixDrmParams -
-
-
-
-

mix_drmparams_ref ()

-
MixDrmParams *      mix_drmparams_ref                   (MixDrmParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixDrmParams instance where reference count has been increased. -
-
-
-
-

mix_drmparams_unref()

-
#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html b/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html deleted file mode 100644 index 6f6ea67..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideo-mixvideodef.html +++ /dev/null @@ -1,221 +0,0 @@ - - - - -MI-X Video Data Definitons And Common Error Code - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MI-X Video Data Definitons And Common Error Code

-

MI-X Video Data Definitons And Common Error Code — MI-X Video data definitons and common error code

-
-
-

Synopsis

-
-
-#include <mixvideodef.h>
-
-enum                MIX_VIDEO_ERROR_CODE;
-enum                MixCodecMode;
-enum                MixFrameOrderMode;
-                    MixIOVec;
-                    MixRect;
-enum                MixState;
-enum                MixRawTargetFormat;
-enum                MixEncodeTargetFormat;
-enum                MixRateControl;
-enum                MixProfile;
-enum                MixDelimiterType;
-
-
-
-

Description

-

-The section includes the definition of enum and struct as well as -

-
-

Common Video Error Return Codes of MI-X video functions

-
    -
  • -MIX_RESULT_SUCCESS, Successfully resumed
  • -
  • MIX_RESULT_NULL_PTR, The pointer passed to the function was null.
  • -
  • MIX_RESULT_NO_MEMORY, Memory needed for the operation could not be allocated.
  • -
  • MIX_RESULT_INVALID_PARAM, An argument passed to the function was invalid.
  • -
  • MIX_RESULT_NOT_INIT, MixVideo object has not been initialized yet.
  • -
  • MIX_RESULT_NOT_CONFIGURED, MixVideo object has not been configured yet.
  • -
  • MIX_RESULT_FAIL, For any failure.
  • -
-
-
-
-

Details

-
-

enum MIX_VIDEO_ERROR_CODE

-
typedef enum {
-	MIX_RESULT_FRAME_NOTAVAIL = MIX_RESULT_ERROR_VIDEO_START + 1,
-	MIX_RESULT_EOS,
-	MIX_RESULT_POOLEMPTY,
-	MIX_RESULT_OUTOFSURFACES,
-	MIX_RESULT_DROPFRAME,
-	MIX_RESULT_NOTIMPL,
-	MIX_RESULT_VIDEO_LAST
-} MIX_VIDEO_ERROR_CODE;
-
-
-
-
-

enum MixCodecMode

-
typedef enum {
-	MIX_CODEC_MODE_ENCODE = 0,
-	MIX_CODEC_MODE_DECODE,
-	MIX_CODEC_MODE_LAST
-} MixCodecMode;
-
-
-
-
-

enum MixFrameOrderMode

-
typedef enum {
-	MIX_FRAMEORDER_MODE_DISPLAYORDER = 0,
-	MIX_FRAMEORDER_MODE_DECODEORDER,
-	MIX_FRAMEORDER_MODE_LAST
-} MixFrameOrderMode;
-
-
-
-
-

MixIOVec

-
typedef struct {
-	uchar *data;
-	int buffer_size;
-    int data_size;
-} MixIOVec;
-
-
-
-
-

MixRect

-
typedef struct {
-	short x;
-	short y;
-	ushort width;
-	ushort height;
-} MixRect;
-
-
-
-
-

enum MixState

-
typedef enum {
-	MIX_STATE_UNINITIALIZED = 0,
-	MIX_STATE_INITIALIZED,
-	MIX_STATE_CONFIGURED,
-	MIX_STATE_LAST
-} MixState;
-
-
-
-
-

enum MixRawTargetFormat

-
typedef enum
-{
-    MIX_RAW_TARGET_FORMAT_NONE = 0,
-    MIX_RAW_TARGET_FORMAT_YUV420 = 1,
-    MIX_RAW_TARGET_FORMAT_YUV422 = 2,
-    MIX_RAW_TARGET_FORMAT_YUV444 = 4,
-    MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000,    
-    MIX_RAW_TARGET_FORMAT_LAST
-} MixRawTargetFormat;
-
-
-
-
-

enum MixEncodeTargetFormat

-
typedef enum
-{
-    MIX_ENCODE_TARGET_FORMAT_MPEG4 = 0,
-    MIX_ENCODE_TARGET_FORMAT_H263 = 2,
-    MIX_ENCODE_TARGET_FORMAT_H264 = 4,
-    MIX_ENCODE_TARGET_FORMAT_PREVIEW = 8,
-    MIX_ENCODE_TARGET_FORMAT_LAST
-} MixEncodeTargetFormat;
-
-
-
-
-

enum MixRateControl

-
typedef enum
-{
-    MIX_RATE_CONTROL_NONE = 1,
-    MIX_RATE_CONTROL_CBR = 2,
-    MIX_RATE_CONTROL_VBR = 4,
-    MIX_RATE_CONTROL_LAST
-} MixRateControl;
-
-
-
-
-

enum MixProfile

-
typedef enum
-{
-    MIX_PROFILE_MPEG2SIMPLE = 0,
-    MIX_PROFILE_MPEG2MAIN,
-    MIX_PROFILE_MPEG4SIMPLE,
-    MIX_PROFILE_MPEG4ADVANCEDSIMPLE,
-    MIX_PROFILE_MPEG4MAIN,
-    MIX_PROFILE_H264BASELINE,
-    MIX_PROFILE_H264MAIN,
-    MIX_PROFILE_H264HIGH,
-    MIX_PROFILE_VC1SIMPLE,
-    MIX_PROFILE_VC1MAIN,
-    MIX_PROFILE_VC1ADVANCED,
-    MIX_PROFILE_H263BASELINE
-} MixProfile;
-
-
-
-
-

enum MixDelimiterType

-
typedef enum
-{
-    MIX_DELIMITER_LENGTHPREFIX = 0,
-    MIX_DELIMITER_ANNEXB
-} MixDelimiterType;
-
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp b/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp deleted file mode 100644 index 977309b..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp +++ /dev/null @@ -1,244 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 b/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 deleted file mode 100644 index 5655e98..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideo.devhelp2 +++ /dev/null @@ -1,244 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideo.html b/mix_video/docs/reference/MixVideo/html/MixVideo.html deleted file mode 100644 index aa16589..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideo.html +++ /dev/null @@ -1,958 +0,0 @@ - - - - -MixVideo - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideo

-

MixVideo — Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder.

-
-
-

Synopsis

-
-
-#include <mixvideo.h>
-
-                    MixVideo;
-MixVideo *          mix_video_new                       (void);
-MixVideo *          mix_video_ref                       (MixVideo *mix);
-#define             mix_video_unref                     (obj)
-MIX_RESULT          mix_video_get_version               (MixVideo *mix,
-                                                         uint *major,
-                                                         uint *minor);
-MIX_RESULT          mix_video_initialize                (MixVideo *mix,
-                                                         MixCodecMode mode,
-                                                         MixVideoInitParams *init_params,
-                                                         MixDrmParams *drm_init_params);
-MIX_RESULT          mix_video_deinitialize              (MixVideo *mix);
-MIX_RESULT          mix_video_configure                 (MixVideo *mix,
-                                                         MixVideoConfigParams *config_params,
-                                                         MixDrmParams *drm_config_params);
-MIX_RESULT          mix_video_get_config                (MixVideo *mix,
-                                                         MixVideoConfigParams **config_params);
-MIX_RESULT          mix_video_decode                    (MixVideo *mix,
-                                                         MixBuffer *bufin[],
-                                                         int bufincnt,
-                                                         MixVideoDecodeParams *decode_params);
-MIX_RESULT          mix_video_get_frame                 (MixVideo *mix,
-                                                         MixVideoFrame **frame);
-MIX_RESULT          mix_video_release_frame             (MixVideo *mix,
-                                                         MixVideoFrame *frame);
-MIX_RESULT          mix_video_render                    (MixVideo *mix,
-                                                         MixVideoRenderParams *render_params,
-                                                         MixVideoFrame *frame);
-MIX_RESULT          mix_video_encode                    (MixVideo *mix,
-                                                         MixBuffer *bufin[],
-                                                         int bufincnt,
-                                                         MixIOVec *iovout[],
-                                                         int iovoutcnt,
-                                                         MixVideoEncodeParams *encode_params);
-MIX_RESULT          mix_video_flush                     (MixVideo *mix);
-MIX_RESULT          mix_video_eos                       (MixVideo *mix);
-MIX_RESULT          mix_video_get_state                 (MixVideo *mix,
-                                                         MixState *state);
-MIX_RESULT          mix_video_get_mixbuffer             (MixVideo *mix,
-                                                         MixBuffer **buf);
-MIX_RESULT          mix_video_release_mixbuffer         (MixVideo *mix,
-                                                         MixBuffer *buf);
-MIX_RESULT          mix_video_get_max_coded_buffer_size (MixVideo *mix,
-                                                         uint *bufsize);
-
-
-
-

Object Hierarchy

-
-  GObject
-   +----MixVideo
-
-
-
-

Description

-

-MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video. -

-

-The MixVideo object handles any of the video formats internally. -The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/ -MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure() -call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and -MixVideoRenderParams objects will be passed in the mix_video_initialize(), -mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively. -

-

-The application can take the following steps to decode video: -

-
-

-

-

-For encoding, the application can take the following steps to encode video: -

-
-
-
-

Details

-
-

MixVideo

-
typedef struct {
-	GObject parent;
-} MixVideo;
-
-

-MI-X Video object

-
-- - - - -

GObject parent;

Parent object. -
-
-
-
-

mix_video_new ()

-
MixVideo *          mix_video_new                       (void);
-

-Use this method to create new instance of MixVideo

-
-- - - - -

returns :

A newly allocated instance of MixVideo -
-
-
-
-

mix_video_ref ()

-
MixVideo *          mix_video_ref                       (MixVideo *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideo instance where reference count has been increased. -
-
-
-
-

mix_video_unref()

-
#define mix_video_unref(obj) g_object_unref (G_OBJECT(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_video_get_version ()

-
MIX_RESULT          mix_video_get_version               (MixVideo *mix,
-                                                         uint *major,
-                                                         uint *minor);
-

-This function will return the major and minor version numbers of the library.

-
-- - - - - - - - - - - - - - - - - - -

mix :

MixVideo object. -

major :

Pointer to an unsigned integer indicating the major version number of this MI-X Video library -

minor :

Pointer to an unsigned integer indicating the minor version number of this MI-X Video library -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_initialize ()

-
MIX_RESULT          mix_video_initialize                (MixVideo *mix,
-                                                         MixCodecMode mode,
-                                                         MixVideoInitParams *init_params,
-                                                         MixDrmParams *drm_init_params);
-

-This function will return the major and minor version numbers of the library.

-
-- - - - - - - - - - - - - - - - - - - - - - -

mix :

MixVideo object. -

mode :

Enum value to indicate encode or decode mode -

init_params :

MixVideoInitParams object which includes display type and pointer to display, encode or decode mode -

drm_init_params :

MixDrmParams defined in Moorestown MI-X DRM API. - This can be null if content is not protected. -

returns :

In addition to the Common Video Error Return Codes, - the following error codes may be returned. -
-
-
-
-
-

mix_video_deinitialize ()

-
MIX_RESULT          mix_video_deinitialize              (MixVideo *mix);
-

-This function will un-initialize a session with this MI-X instance. During this call, the -LibVA session is closed and all resources including surface buffers, MixBuffers and -MixVideoFrame objects are freed. This function is called by the application once -mix_video_initialize() is called, before exiting.

-
-- - - - - - - - - - -

mix :

MixVideo object. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_configure ()

-
MIX_RESULT          mix_video_configure                 (MixVideo *mix,
-                                                         MixVideoConfigParams *config_params,
-                                                         MixDrmParams *drm_config_params);
-

-This function can be used to configure a stream for the current session. - The caller can use this function to do the following: -

-
    -
  • Choose frame ordering mode (display order or decode order)
  • -
  • Choose encode or decode mode
  • -
  • Choose whether display frames are enqueued for encode mode
  • -
  • Provide stream parameters
  • -
-

-

-

-This function can only be called after mix_video_initialize() has been called

-
-- - - - - - - - - - - - - - - - - - -

mix :

MixVideo object. -

config_params :

Pointer to MixVideoConfigParams object (either MixVideoConfigParamsDec or - MixVideoConfigParamsEnc for specific media type) -

drm_config_params :

Pointer to MixDrmParams defined in Moorestown MI-X DRM API. - This can be null if content is not protected. -

returns :

In addition to the Common Video Error Return Codes, - the following error codes may be returned. -
    -
  • MIX_RESULT_RESOURCES_NOTAVAIL, HW accelerated decoding is not available.
  • -
  • MIX_RESULT_NOTSUPPORTED, A requested parameter is not supported or not available.
  • -
-
-
-
-
-

mix_video_get_config ()

-
MIX_RESULT          mix_video_get_config                (MixVideo *mix,
-                                                         MixVideoConfigParams **config_params);
-

-This function can be used to get the current configuration of a stream for the current session. -A MixVideoConfigParams object will be returned, which can be used to get each of the -parameter current values. The caller will need to release this object when it is no -longer needed. -

-

-This function can only be called once mix_video_configure() has been called. -

-

-

-
-

Note

See description of mix_video_configure() for MixVideoConfigParams object details. -For mix_video_get_config(), all input parameter fields become OUT parameters. -
-
-- - - - - - - - - - - - - - -

mix :

MixVideo object. -

config_params :

Pointer to pointer to MixVideoConfigParams object defined in - description of mix_video_configure() -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_decode ()

-
MIX_RESULT          mix_video_decode                    (MixVideo *mix,
-                                                         MixBuffer *bufin[],
-                                                         int bufincnt,
-                                                         MixVideoDecodeParams *decode_params);
-

-

-

-This function is used to initiate HW accelerated decoding of encoded data buffers. This -function is used to decode to a surface buffer, which can then be rendered using -mix_video_render(). -Video data input buffers are provided in a scatter/gather list of reference counted -MixBuffers. The input MixBuffers are retained until a full frame of coded data is -accumulated, at which point it will be decoded and the input buffers released. The -decoded data will be stored in a surface buffer until it is rendered. The caller must -provide the presentation timestamp and any stream discontinuity for the video frame -for the encoded data, in the MixVideoDecodeParams object. These will be preserved -and provided for the MixVideoFrame object that contains the decoded data for this -frame data. -

-

-

-

-

-

-As only one timestamp is passed in for the buffer, there should be no more than one -video frame included in the encoded data buffer provided in a single call to -mix_video_decode(). If partial frame data is passed in over multiple calls to -mix_video_decode(), the same timestamp should be provided with each call having -data associated with the same frame. -

-

-

-

-

-

-The application should request a MixBuffer object using mix_video_get_mixbuffer(), -initialize the MixBuffer with the data pointer to the coded input data, along with the -size of the input data buffer, and optionally can provide a token value and a callback -function pointer. When the MixBuffer is released by both the application and MixVideo, -the callback will be called and passed the token value and the input data buffer -pointer for any buffer management processing that the application needs or wants to -perform (such as releasing the actual coded data buffer that was assigned to that -MixBuffer). MixBuffers are allocated in a pool, and the application determines the size -of this pool, which is passed to mix_video_configure() in the MixVideoConfigParams object. -

-
-- - - - - - - - - - - - - - - - - - - - - - -

mix :

MixVideo object. -

bufin :

Array of pointers to MixBuffer objects, described in mix_video_get_mixbuffer() * -

bufincnt :

Number of MixBuffer objects -

decode_params :

MixVideoDecodeParams object -

returns :

In addition to the Common Video Error Return Codes, - the following error codes may be returned. -
  • - MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done. - Caller can try again with the same MixBuffers later when surfaces may have been freed. -
-
-
-
-
-

mix_video_get_frame ()

-
MIX_RESULT          mix_video_get_frame                 (MixVideo *mix,
-                                                         MixVideoFrame **frame);
-

-

-

-This function returns a frame object that represents the next frame ID and includes -timestamp and discontinuity information. If display frame ordering has been -configured, it is the next frame displayed. If decode order frame ordering has been -configured, it is the next frame decoded. In both cases the timestamp reflects the -presentation timestamp. For encode mode the frame order is always display order. -

-

-

-

-

-

-The frame object is a reference counted object that represents the frame. The -application can retain this frame object as long as needed to display the frame and -redisplay as needed. At presentation time, the application can call mix_video_render() -with this frame object to display the frame immediately. When the application no -longer needs to display this frame, it should release the object by calling -mix_video_release_frame(). The application should not modify the reference count or -delete this object directly. -

-
-- - - - - - - - - - - - - - -

mix :

MixVideo object. -

frame :

A pointer to a pointer to a MixVideoFrame object -

returns :

In addition to the Common Video Error Return Codes, - the following error codes may be returned. -
    -
  • - MIX_RESULT_FRAME_NOTAVAIL, No decoded frames are available. -
  • -
  • - MIX_RESULT_EOS, No more decoded frames are available, - since end of stream has been encountered. -
  • -
-
-
-
-
-

mix_video_release_frame ()

-
MIX_RESULT          mix_video_release_frame             (MixVideo *mix,
-                                                         MixVideoFrame *frame);
-

-This function releases a frame object that was acquired from mix_video_get_frame().

-
-- - - - - - - - - - - - - - -

mix :

MixVideo object. -

frame :

A pointer to a MixVideoFrame object, described in mix_video_get_frame() -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_render ()

-
MIX_RESULT          mix_video_render                    (MixVideo *mix,
-                                                         MixVideoRenderParams *render_params,
-                                                         MixVideoFrame *frame);
-

-This function renders a video frame associated with a MixVideoFrame object to the display. -The display is either an X11 Pixmap or an X11 Window using the overlay.

-
-- - - - - - - - - - - - - - - - - - -

mix :

MixVideo object. -

render_params :

MixVideoRenderParams object defined below, - which includes the display window and type, - src and dest image sizes, deinterlace info, clipping rectangles, - some post processing parameters, and so forth. -

frame :

Pointer to a MixVideoFrame object returned from mix_video_get_frame(). -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_encode ()

-
MIX_RESULT          mix_video_encode                    (MixVideo *mix,
-                                                         MixBuffer *bufin[],
-                                                         int bufincnt,
-                                                         MixIOVec *iovout[],
-                                                         int iovoutcnt,
-                                                         MixVideoEncodeParams *encode_params);
-

-

-

-This function is used to initiate HW accelerated encoding of uncompressed video input -buffers. The input buffers may either be uncompressed video in user space buffers, or -CI frame indexes from libCI captured frames. In order to use CI frame indexes, the -shared buffer mode should be indicated in the MixVideoConfigParamsEnc object -provided to mix_video_configure(). -

-

-

-

-

-

-Video uncompressed data input buffers are provided in a scatter/gather list of -reference counted MixBuffers. The input MixBuffers are considered a complete frame -of data, and are used for encoding before the input buffers are released. LibCI frame -indices may also be provided in MixBuffers. -

-

-

-

-

-

-The encoded data will be copied to the output buffers provided in the array of -MixIOVec structures, also in a scatter/gather list. These output buffers are allocated -by the application. The application can query for the proper size of buffer to allocate -for this, using mix_video_get_max_coded_buffer_size(). It is suggested that the -application create a pool of these buffers to pass in, for efficiency. The application will -also set the buffer_size field in the MixIOVec structures to the allocated buffer size. -When the buffers are filled with encoded data by MixVideo, the data_size will be set to -the encoded data size placed in the buffer. For any buffer not used for encoded data, -the data_size will be set to zero. -

-

-

-

-

-

-Alternatively, if the application does not allocate the output buffers, the data pointers -in the MixIOVec structures (still provided by the application) can be set to NULL, -whereupon MixVideo will allocate a data buffer for each frame and set the data, -buffer_size and data_size pointers in the MixIOVec structures accordingly. -

-

-

-

-

-
-

Note

-This is not an efficient method to handle these buffers and it is preferred that -the application provide pre-allocated buffers. -
-

-

-

-

-

-The application should request a MixBuffer object using mix_video_get_mixbuffer(), -initialize the MixBuffer with the data pointer to the uncompressed input data or a LibCI -frame index, along with the size of the input data buffer, and optionally can provide a -token value and a callback function pointer. When the MixBuffer is released by both -the application and MixVideo, the callback will be called and passed the token value -and the input data buffer pointer for any buffer management processing that the -application needs or wants to perform (such as releasing the actual data buffer that -was assigned to that MixBuffer). MixBuffers are allocated in a pool, and the application -determines the size of this pool, which is passed to mix_video_configure() in the -MixVideoConfigParams object. -

-

-

-

-

-

-The application can choose to enable or disable display of the uncompressed video -frames using the need_display of the MixVideoConfigParamsEnc object in -mix_video_configure(). If display is enabled, MixVideoFrames are enqueued by -MixVideo, to be requested by the application with mix_video_get_frame() and used to -provide to mix_video_render() for rendering before releasing with -mix_video_release_frame(). If display is disabled, no MixVideoFrames will be -enqueued. -

-
-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

mix :

MixVideo object. -

bufin :

Array of pointers to MixBuffer objects, structure defined in mix_video_decode() -

bufincnt :

Number of MixBuffer objects -

iovout :

Array of MixIOVec structures, pointing to buffers allocated by the application -

iovoutcnt :

Number of items in iovout array -

encode_params :

MixVideoEncodeParams object -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_flush ()

-
MIX_RESULT          mix_video_flush                     (MixVideo *mix);
-

-This function will flush all encoded and decoded buffers that are currently enqueued or -in the process of decoding. After this call, decoding can commence again, but would -need to start at the beginning of a sequence (for example, with no dependencies on -previously decoded reference frames).

-
-- - - - - - - - - - -

mix :

MixVideo object. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_eos ()

-
MIX_RESULT          mix_video_eos                       (MixVideo *mix);
-

-This function will signal end of stream to MixVideo. This can be used to finalize -decoding of the last frame and other end of stream processing. MixVideo will complete -the decoding of all buffers received, and will continue to provide the decoded frame -objects by means of the mix_video_get_frame() until all frames have been provided, -at which point mix_video_get_frame() will return MIX_RESULT_EOS.

-
-- - - - - - - - - - -

mix :

MixVideo object. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_get_state ()

-
MIX_RESULT          mix_video_get_state                 (MixVideo *mix,
-                                                         MixState *state);
-

-This function returns the current state of the MI-X session.

-
-- - - - - - - - - - - - - - -

mix :

MixVideo object. -

state :

Current state of MI-X session. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_get_mixbuffer ()

-
MIX_RESULT          mix_video_get_mixbuffer             (MixVideo *mix,
-                                                         MixBuffer **buf);
-

-

-

-This function returns a frame object that represents the next frame ID and includes -timestamp and discontinuity information. If display frame ordering has been -configured, it is the next frame displayed. If decode order frame ordering has been -configured, it is the next frame decoded. In both cases the timestamp reflects the -presentation timestamp. -

-

-

-

-

-

-The frame object is a reference counted object that represents the frame. The -application can retain this frame object as long as needed to display the frame and -redisplay as needed. At presentation time, the application can call mix_video_render() -with this frame object to display the frame immediately. When the application no -longer needs to display this frame, it should release the object by calling -mix_video_release_frame(). The application should not modify the reference count or -delete this object directly. -

-
-- - - - - - - - - - - - - - -

mix :

MixVideo object. -

buf :

A pointer to a pointer to a MixBuffer object -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_release_mixbuffer ()

-
MIX_RESULT          mix_video_release_mixbuffer         (MixVideo *mix,
-                                                         MixBuffer *buf);
-

-This function releases a frame object that was acquired from mix_video_get_mixbuffer().

-
-- - - - - - - - - - - - - - -

mix :

MixVideo object. -

buf :

A pointer to a MixBuffer object, described in mix_video_get_mixbuffer(). -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_video_get_max_coded_buffer_size ()

-
MIX_RESULT          mix_video_get_max_coded_buffer_size (MixVideo *mix,
-                                                         uint *bufsize);
-

-

-

-This function can be used to get the maximum size of encoded data buffer needed for -the mix_video_encode() call. -

-

-

-

-This function can only be called once mix_video_configure() has been called. -

-
-- - - - - - - - - - - - - - -

mix :

MixVideo object. -

bufsize :

Pointer to uint. -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html deleted file mode 100644 index c8c9a02..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParams.html +++ /dev/null @@ -1,162 +0,0 @@ - - - - -MixVideoConfigParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParams

-

MixVideoConfigParams — MI-X Video Configuration Parameter Base Object

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsEnc
-         +----MixVideoConfigParamsDec
-
-
-
-

Description

-

-

-

-A base object of MI-X video configuration parameter objects. -

-

-

-

-The derived MixVideoConfigParams object is created by the MMF/App -and provided in the MixVideo mix_video_configure() function. The get and set -methods for the properties will be available for the caller to set and get information at -configuration time. It will also be created by MixVideo and returned from the -mix_video_get_config() function, whereupon the MMF/App can get the get methods to -obtain current configuration information. -

-

-

-

-There are decode mode objects (for example, MixVideoConfigParamsDec) and encode -mode objects (for example, MixVideoConfigParamsEnc). Each of these types is refined -further with media specific objects. The application should create the correct type of -object to match the media format of the stream to be handled, e.g. if the media -format of the stream to be decoded is H.264, the application would create a -MixVideoConfigParamsDecH264 object for the mix_video_configure() call. -

-
-
-

Details

-
-

MixVideoConfigParams

-
typedef struct {
-	MixParams parent;
-} MixVideoConfigParams;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparams_new ()

-
MixVideoConfigParams * mix_videoconfigparams_new        (void);
-

-Use this method to create new instance of MixVideoConfigParams

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParams -
-
-
-
-

mix_videoconfigparams_ref ()

-
MixVideoConfigParams * mix_videoconfigparams_ref        (MixVideoConfigParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoConfigParams instance where reference count has been increased. -
-
-
-
-

mix_videoconfigparams_unref()

-
#define mix_videoconfigparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html deleted file mode 100644 index 592d5c2..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDec.html +++ /dev/null @@ -1,797 +0,0 @@ - - - - -MixVideoConfigParamsDec - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParamsDec

-

MixVideoConfigParamsDec — MI-X Video Decode Configuration Parameter Base Object

-
-
-

Synopsis

-
-                    MixVideoConfigParamsDec;
-MixVideoConfigParamsDec * mix_videoconfigparamsdec_new  (void);
-MixVideoConfigParamsDec * mix_videoconfigparamsdec_ref  (MixVideoConfigParamsDec *mix);
-#define             mix_videoconfigparamsdec_unref      (obj)
-MIX_RESULT          mix_videoconfigparamsdec_set_frame_order_mode
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         MixFrameOrderMode frame_order_mode);
-MIX_RESULT          mix_videoconfigparamsdec_get_frame_order_mode
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         MixFrameOrderMode *frame_order_mode);
-MIX_RESULT          mix_videoconfigparamsdec_set_header (MixVideoConfigParamsDec *obj,
-                                                         MixIOVec *header);
-MIX_RESULT          mix_videoconfigparamsdec_get_header (MixVideoConfigParamsDec *obj,
-                                                         MixIOVec **header);
-MIX_RESULT          mix_videoconfigparamsdec_set_mime_type
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         const char *mime_type);
-MIX_RESULT          mix_videoconfigparamsdec_get_mime_type
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         char **mime_type);
-MIX_RESULT          mix_videoconfigparamsdec_set_frame_rate
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint frame_rate_num,
-                                                         uint frame_rate_denom);
-MIX_RESULT          mix_videoconfigparamsdec_get_frame_rate
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *frame_rate_num,
-                                                         uint *frame_rate_denom);
-MIX_RESULT          mix_videoconfigparamsdec_set_picture_res
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint picture_width,
-                                                         uint picture_height);
-MIX_RESULT          mix_videoconfigparamsdec_get_picture_res
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *picture_width,
-                                                         uint *picture_height);
-MIX_RESULT          mix_videoconfigparamsdec_set_raw_format
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint raw_format);
-MIX_RESULT          mix_videoconfigparamsdec_get_raw_format
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *raw_format);
-MIX_RESULT          mix_videoconfigparamsdec_set_rate_control
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint rate_control);
-MIX_RESULT          mix_videoconfigparamsdec_get_rate_control
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *rate_control);
-MIX_RESULT          mix_videoconfigparamsdec_set_buffer_pool_size
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint bufpoolsize);
-MIX_RESULT          mix_videoconfigparamsdec_get_buffer_pool_size
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *bufpoolsize);
-MIX_RESULT          mix_videoconfigparamsdec_set_extra_surface_allocation
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint extra_surface_allocation);
-MIX_RESULT          mix_videoconfigparamsdec_get_extra_surface_allocation
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *extra_surface_allocation);
-
-
-
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsDec
-               +----MixVideoConfigParamsDecMP42
-               +----MixVideoConfigParamsDecVC1
-               +----MixVideoConfigParamsDecH264
-
-
-
-

Description

-

-A base object of MI-X video decode configuration parameter objects.

-
-
-

Details

-
-

MixVideoConfigParamsDec

-
typedef struct {
-	MixVideoConfigParams parent;
-
-	
-	/* Frame re-ordering mode */
-	MixFrameOrderMode frame_order_mode;
-	
-	/* Stream header information, such as 
-	 * codec_data in GStreamer pipelines */ 
-	MixIOVec header;
-
-	/* Mime type */
-	GString * mime_type;
-	
-	/* Frame rate numerator value */
-	uint frame_rate_num;
-	
-	/* Frame rate denominator value */	
-	uint frame_rate_denom;
-	
-	/* Picture width */
-	ulong picture_width;
-	
-	/* Picture height */
-	ulong picture_height;
-	
-	/* Render target format */
-	uint raw_format;
-	
-	/* Rate control: CBR, VBR, none. Only valid for encoding.
-	 * This should be set to none for decoding. */ 
-	uint rate_control;
-
-	/* Size of pool of MixBuffers to allocate */
-	uint mixbuffer_pool_size;
-	
-	/* Extra surfaces for MixVideoFrame objects to be allocated */
-	uint extra_surface_allocation;
-	
-	/* Reserved for future use */
-	void *reserved1;
-	
-	/* Reserved for future use */
-	void *reserved2;
-	
-	/* Reserved for future use */
-	void *reserved3;
-	
-	/* Reserved for future use */
-	void *reserved4;
-} MixVideoConfigParamsDec;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparamsdec_new ()

-
MixVideoConfigParamsDec * mix_videoconfigparamsdec_new  (void);
-

-Use this method to create new instance of MixVideoConfigParamsDec

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParamsDec -
-
-
-
-

mix_videoconfigparamsdec_ref ()

-
MixVideoConfigParamsDec * mix_videoconfigparamsdec_ref  (MixVideoConfigParamsDec *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoConfigParamsDec instance where reference count has been increased. -
-
-
-
-

mix_videoconfigparamsdec_unref()

-
#define mix_videoconfigparamsdec_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videoconfigparamsdec_set_frame_order_mode ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_frame_order_mode
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         MixFrameOrderMode frame_order_mode);
-

-Set frame order mode.

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

frame_order_mode :

Frame re-ordering mode -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_frame_order_mode ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_frame_order_mode
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         MixFrameOrderMode *frame_order_mode);
-

-Get frame order mode.

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

frame_order_mode :

pointer to frame re-ordering mode -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_header ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_header (MixVideoConfigParamsDec *obj,
-                                                         MixIOVec *header);
-

-Set stream header information.

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

header :

Stream header information, such as codec_data in GStreamer pipelines -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_header ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_header (MixVideoConfigParamsDec *obj,
-                                                         MixIOVec **header);
-

-Get stream header information. -

-
-

Note

-Caller is responsible to free (*header)->data field and *header -
-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

header :

Pointer to pointer of Stream header information -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_mime_type ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_mime_type
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         const char *mime_type);
-

-Set stream mime type

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

mime_type :

mime type -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_mime_type ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_mime_type
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         char **mime_type);
-

-Get mime type -

-
-

Note

-Caller is responsible to free *mime_type -
-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

mime_type :

Pointer to pointer of type char -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_frame_rate ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_frame_rate
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint frame_rate_num,
-                                                         uint frame_rate_denom);
-

-Set frame rate

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

frame_rate_num :

Frame rate numerator value -

frame_rate_denom :

Frame rate denominator value * -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_frame_rate ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_frame_rate
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *frame_rate_num,
-                                                         uint *frame_rate_denom);
-

-Get frame rate

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

frame_rate_num :

Frame rate numerator value to be returned -

frame_rate_denom :

Frame rate denominator value to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_picture_res ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_picture_res
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint picture_width,
-                                                         uint picture_height);
-

-Set video resolution

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

picture_width :

Picture width -

picture_height :

Picture height -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_picture_res ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_picture_res
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *picture_width,
-                                                         uint *picture_height);
-

-Get video resolution

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

picture_width :

Picture width to be returned -

picture_height :

Picture height to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_raw_format ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_raw_format
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint raw_format);
-

-Set Render target format

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

raw_format :

Render target format -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_raw_format ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_raw_format
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *raw_format);
-

-Get Render target format

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

raw_format :

Render target format to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_rate_control ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_rate_control
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint rate_control);
-

-Set rate control

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

rate_control :

Rate control: CBR, VBR, none. Only valid for encoding. - This should be set to none for decoding. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_rate_control ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_rate_control
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *rate_control);
-

-Get rate control

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

rate_control :

Rate control to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_buffer_pool_size ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_buffer_pool_size
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint bufpoolsize);
-

-Set buffer pool size

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

bufpoolsize :

Size of pool of MixBuffers to allocate -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_buffer_pool_size ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_buffer_pool_size
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *bufpoolsize);
-

-Get buffer pool size

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

bufpoolsize :

Size of pool of MixBuffers to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_set_extra_surface_allocation ()

-
MIX_RESULT          mix_videoconfigparamsdec_set_extra_surface_allocation
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint extra_surface_allocation);
-

-Set extra surface allocation

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

extra_surface_allocation :

Extra surfaces for MixVideoFrame objects to be allocated -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_get_extra_surface_allocation ()

-
MIX_RESULT          mix_videoconfigparamsdec_get_extra_surface_allocation
-                                                        (MixVideoConfigParamsDec *obj,
-                                                         uint *extra_surface_allocation);
-

-Get extra surface allocation

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDec object -

extra_surface_allocation :

Extra surfaces for MixVideoFrame objects to be retuned -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html deleted file mode 100644 index eb76a0e..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecH264.html +++ /dev/null @@ -1,130 +0,0 @@ - - - - -MixVideoConfigParamsDecH264 - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParamsDecH264

-

MixVideoConfigParamsDecH264 — MI-X Video H.264 Decode Configuration Parameter

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsDec
-               +----MixVideoConfigParamsDecH264
-
-
-
-

Description

-

-MI-X video H.264 decode configuration parameter objects.

-
-
-

Details

-
-

MixVideoConfigParamsDecH264

-
typedef struct {
-  MixVideoConfigParamsDec parent;
-
-
-  /* TODO: Add H.264 configuration paramters */
-  
-  /* Reserved for future use */
-  void *reserved1;
-  
-  /* Reserved for future use */  
-  void *reserved2;
-  
-  /* Reserved for future use */  
-  void *reserved3;
-  
-  /* Reserved for future use */  
-  void *reserved4;
-} MixVideoConfigParamsDecH264;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparamsdec_h264_new ()

-
MixVideoConfigParamsDecH264 * mix_videoconfigparamsdec_h264_new
-                                                        (void);
-

-Use this method to create new instance of MixVideoConfigParamsDecH264

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParamsDecH264 -
-
-
-
-

mix_videoconfigparamsdec_h264_unref()

-
#define mix_videoconfigparamsdec_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html deleted file mode 100644 index 07bf4e6..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecMP42.html +++ /dev/null @@ -1,240 +0,0 @@ - - - - -MixVideoConfigParamsDecMP42 - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParamsDecMP42

-

MixVideoConfigParamsDecMP42 — MI-X Video MPEG 4:2 Decode Configuration Parameter

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsDec
-               +----MixVideoConfigParamsDecMP42
-
-
-
-

Description

-

-MI-X video MPEG 4:2 decode configuration parameter objects.

-
-
-

Details

-
-

MixVideoConfigParamsDecMP42

-
typedef struct {
-	MixVideoConfigParamsDec parent;
-
-
-	/* MPEG version */
-	uint mpegversion;
-	
-	/* DivX version */
-	uint divxversion;
-
-	/* Reserved for future use */
-	void *reserved1;
-	
-	/* Reserved for future use */
-	void *reserved2;
-	
-	/* Reserved for future use */	
-	void *reserved3;
-	
-	/* Reserved for future use */	
-	void *reserved4;
-} MixVideoConfigParamsDecMP42;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparamsdec_mp42_new ()

-
MixVideoConfigParamsDecMP42 * mix_videoconfigparamsdec_mp42_new
-                                                        (void);
-

-Use this method to create new instance of MixVideoConfigParamsDecMP42

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParamsDecMP42 -
-
-
-
-

mix_videoconfigparamsdec_mp42_unref()

-
#define mix_videoconfigparamsdec_mp42_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videoconfigparamsdec_mp42_set_mpegversion ()

-
MIX_RESULT          mix_videoconfigparamsdec_mp42_set_mpegversion
-                                                        (MixVideoConfigParamsDecMP42 *obj,
-                                                         uint version);
-

-Set MPEG version

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDecMP42 object -

version :

MPEG version -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_mp42_get_mpegversion ()

-
MIX_RESULT          mix_videoconfigparamsdec_mp42_get_mpegversion
-                                                        (MixVideoConfigParamsDecMP42 *obj,
-                                                         uint *version);
-

-Get MPEG version

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDecMP42 object -

version :

MPEG version to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_mp42_set_divxversion ()

-
MIX_RESULT          mix_videoconfigparamsdec_mp42_set_divxversion
-                                                        (MixVideoConfigParamsDecMP42 *obj,
-                                                         uint version);
-

-Get DivX version

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsDecMP42 object -

version :

DivX version to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsdec_mp42_get_divxversion ()

-
MIX_RESULT          mix_videoconfigparamsdec_mp42_get_divxversion
-                                                        (MixVideoConfigParamsDecMP42 *obj,
-                                                         uint *version);
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html deleted file mode 100644 index aec7d28..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsDecVC1.html +++ /dev/null @@ -1,137 +0,0 @@ - - - - -MixVideoConfigParamsDecVC1 - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParamsDecVC1

-

MixVideoConfigParamsDecVC1 — MI-X Video VC-1 Decode Configuration Parameter

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsDec
-               +----MixVideoConfigParamsDecVC1
-
-
-
-

Description

-

-MI-X video VC-1 decode configuration parameter objects.

-
-
-

Details

-
-

MixVideoConfigParamsDecVC1

-
typedef struct {
-  MixVideoConfigParamsDec parent;
-
-
-  /* TODO: Add VC1 configuration paramters */
-  /* TODO: wmv_version and fourcc type might be changed later */
-  
-  /* WMV version */
-  uint wmv_version;
-  
-  /* FourCC code */
-  uint fourcc;
-
-  /* Reserved for future use */
-  void *reserved1;
-  
-  /* Reserved for future use */  
-  void *reserved2;
-  
-  /* Reserved for future use */  
-  void *reserved3;
-  
-  /* Reserved for future use */  
-  void *reserved4;
-} MixVideoConfigParamsDecVC1;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparamsdec_vc1_new ()

-
MixVideoConfigParamsDecVC1 * mix_videoconfigparamsdec_vc1_new
-                                                        (void);
-

-Use this method to create new instance of MixVideoConfigParamsDecVC1

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParamsDecVC1 -
-
-
-
-

mix_videoconfigparamsdec_vc1_unref()

-
#define mix_videoconfigparamsdec_vc1_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html deleted file mode 100644 index b3c9de6..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEnc.html +++ /dev/null @@ -1,1245 +0,0 @@ - - - - -MixVideoConfigParamsEnc - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParamsEnc

-

MixVideoConfigParamsEnc — MI-X Video Encode Configuration Parameter Base Object

-
-
-

Synopsis

-
-                    MixVideoConfigParamsEnc;
-MixVideoConfigParamsEnc * mix_videoconfigparamsenc_new  (void);
-MixVideoConfigParamsEnc * mix_videoconfigparamsenc_ref  (MixVideoConfigParamsEnc *mix);
-#define             mix_videoconfigparamsenc_unref      (obj)
-MIX_RESULT          mix_videoconfigparamsenc_set_mime_type
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         const char *mime_type);
-MIX_RESULT          mix_videoconfigparamsenc_get_mime_type
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         char **mime_type);
-MIX_RESULT          mix_videoconfigparamsenc_set_frame_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint frame_rate_num,
-                                                         uint frame_rate_denom);
-MIX_RESULT          mix_videoconfigparamsenc_get_frame_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *frame_rate_num,
-                                                         uint *frame_rate_denom);
-MIX_RESULT          mix_videoconfigparamsenc_set_picture_res
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint picture_width,
-                                                         uint picture_height);
-MIX_RESULT          mix_videoconfigparamsenc_get_picture_res
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *picture_width,
-                                                         uint *picture_height);
-MIX_RESULT          mix_videoconfigparamsenc_set_encode_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixEncodeTargetFormat encode_format);
-MIX_RESULT          mix_videoconfigparamsenc_get_encode_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixEncodeTargetFormat *encode_format);
-MIX_RESULT          mix_videoconfigparamsenc_set_bit_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint bps);
-MIX_RESULT          mix_videoconfigparamsenc_get_bit_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *bps);
-MIX_RESULT          mix_videoconfigparamsenc_set_init_qp
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint initial_qp);
-MIX_RESULT          mix_videoconfigparamsenc_get_init_qp
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *initial_qp);
-MIX_RESULT          mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc *obj,
-                                                         uint min_qp);
-MIX_RESULT          mix_videoconfigparamsenc_get_min_qp (MixVideoConfigParamsEnc *obj,
-                                                         uint *min_qp);
-MIX_RESULT          mix_videoconfigparamsenc_set_intra_period
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint intra_period);
-MIX_RESULT          mix_videoconfigparamsenc_get_intra_period
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *intra_period);
-MIX_RESULT          mix_videoconfigparamsenc_set_buffer_pool_size
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint bufpoolsize);
-MIX_RESULT          mix_videoconfigparamsenc_get_buffer_pool_size
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *bufpoolsize);
-MIX_RESULT          mix_videoconfigparamsenc_set_share_buf_mode
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool share_buf_mod);
-MIX_RESULT          mix_videoconfigparamsenc_get_share_buf_mode
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool *share_buf_mod);
-MIX_RESULT          mix_videoconfigparamsenc_set_ci_frame_info
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong *ci_frame_id,
-                                                         uint ci_frame_num);
-MIX_RESULT          mix_videoconfigparamsenc_get_ci_frame_info
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong * *ci_frame_id,
-                                                         uint *ci_frame_num);
-MIX_RESULT          mix_videoconfigparamsenc_set_drawable
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong draw);
-MIX_RESULT          mix_videoconfigparamsenc_get_drawable
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong *draw);
-MIX_RESULT          mix_videoconfigparamsenc_set_need_display
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool need_display);
-MIX_RESULT          mix_videoconfigparamsenc_get_need_display
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool *need_display);
-MIX_RESULT          mix_videoconfigparamsenc_set_rate_control
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRateControl rcmode);
-MIX_RESULT          mix_videoconfigparamsenc_get_rate_control
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRateControl *rcmode);
-MIX_RESULT          mix_videoconfigparamsenc_set_raw_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRawTargetFormat raw_format);
-MIX_RESULT          mix_videoconfigparamsenc_get_raw_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRawTargetFormat *raw_format);
-MIX_RESULT          mix_videoconfigparamsenc_set_profile
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixProfile profile);
-MIX_RESULT          mix_videoconfigparamsenc_get_profile
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixProfile *profile);
-
-
-
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsEnc
-               +----MixVideoConfigParamsEncMPEG4
-               +----MixVideoConfigParamsEncH264
-
-
-
-

Description

-

-A base object of MI-X video encode configuration parameter objects.

-
-
-

Details

-
-

MixVideoConfigParamsEnc

-
typedef struct {
-	MixVideoConfigParams parent;
-
-	//MixIOVec header;
-
-	/* the type of the following members will be changed after MIX API doc is ready */
-
-	/* Encoding profile */
-	MixProfile profile;
-
-	/* Raw format to be encoded */
-	MixRawTargetFormat raw_format;
-
-	/* Rate control mode */
-	MixRateControl rate_control;  	
-
-	/* Bitrate when rate control is used */
-	uint bitrate;
-	
-	/* Numerator of frame rate */
-	uint frame_rate_num;
-	
-	/* Denominator of frame rate */
-	uint frame_rate_denom;
-	
-	/* The initial QP value */
-	uint initial_qp;
-	
-	/* The minimum QP value */
-	uint min_qp;
-	
-	/* Number of frames between key frames (GOP size) */
-	uint intra_period;
-	
-	/* Width of video frame */
-	uint16 picture_width;
-	
-	/* Height of the video frame */
-	uint16 picture_height;	
-
-	/* Mime type, reserved */
-	GString * mime_type;
-	
-	/* Encode target format */
-	MixEncodeTargetFormat encode_format;
-
-	/* Size of the pool of MixBuffer objects */
-	uint mixbuffer_pool_size;
-
-	/* Are buffers shared between capture and encoding drivers */
-	bool share_buf_mode;	
-
-	/* Array of frame IDs created by capture library */
-	ulong *	ci_frame_id;
-	
-	/* Size of the array ci_frame_id */
-	uint	ci_frame_num;
-	
-	
-	/* Indicates whether MixVideoFrames suitable for displaying 
-	 * need to be enqueued for retrieval using mix_video_get_frame() */
-	bool need_display;
-	
-	/* Reserved for future use */
-	void *reserved1;
-	
-	/* Reserved for future use */	
-	void *reserved2;
-	
-	/* Reserved for future use */	
-	void *reserved3;
-	
-	/* Reserved for future use */	
-	void *reserved4;
-} MixVideoConfigParamsEnc;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparamsenc_new ()

-
MixVideoConfigParamsEnc * mix_videoconfigparamsenc_new  (void);
-

-Use this method to create new instance of MixVideoConfigParamsEnc

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParamsEnc -
-
-
-
-

mix_videoconfigparamsenc_ref ()

-
MixVideoConfigParamsEnc * mix_videoconfigparamsenc_ref  (MixVideoConfigParamsEnc *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoConfigParamsEnc instance where reference count has been increased. -
-
-
-
-

mix_videoconfigparamsenc_unref()

-
#define mix_videoconfigparamsenc_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videoconfigparamsenc_set_mime_type ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_mime_type
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         const char *mime_type);
-

-Set mime type

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

mime_type :

Mime type -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_mime_type ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_mime_type
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         char **mime_type);
-

-Get mime type -

-

-

-
-

Note

-Caller is responsible to free *mime_type -
-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

mime_type :

Mime type to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_frame_rate ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_frame_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint frame_rate_num,
-                                                         uint frame_rate_denom);
-

-Set frame rate

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

frame_rate_num :

Numerator of frame rate -

frame_rate_denom :

Denominator of frame rate -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_frame_rate ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_frame_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *frame_rate_num,
-                                                         uint *frame_rate_denom);
-

-Get frame rate

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

frame_rate_num :

Numerator of frame rate to be returned -

frame_rate_denom :

Denominator of frame rate to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_picture_res ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_picture_res
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint picture_width,
-                                                         uint picture_height);
-

-Set width and height of video frame

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

picture_width :

Width of video frame -

picture_height :

Height of the video frame -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_picture_res ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_picture_res
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *picture_width,
-                                                         uint *picture_height);
-

-Get width and height of video frame

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

picture_width :

Width of video frame to be returned -

picture_height :

Height of the video frame to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_encode_format ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_encode_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixEncodeTargetFormat encode_format);
-

-Set Encode target format

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

encode_format :

Encode target format -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_encode_format ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_encode_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixEncodeTargetFormat *encode_format);
-

-Get Encode target format

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

encode_format :

Encode target format to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_bit_rate ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_bit_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint bps);
-

-Set bitrate

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

bps :

bitrate -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_bit_rate ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_bit_rate
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *bps);
-

-Get bitrate

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

bps :

bitrate to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_init_qp ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_init_qp
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint initial_qp);
-

-Set The initial QP value

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

initial_qp :

The initial QP value -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_init_qp ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_init_qp
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *initial_qp);
-

-Get The initial QP value

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

initial_qp :

The initial QP value to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_min_qp ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc *obj,
-                                                         uint min_qp);
-

-Set The minimum QP value

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

min_qp :

The minimum QP value -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_min_qp ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_min_qp (MixVideoConfigParamsEnc *obj,
-                                                         uint *min_qp);
-

-Get The minimum QP value

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

min_qp :

The minimum QP value to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_intra_period ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_intra_period
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint intra_period);
-

-Set Number of frames between key frames (GOP size)

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

intra_period :

Number of frames between key frames (GOP size) -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_intra_period ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_intra_period
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *intra_period);
-

-Get Number of frames between key frames (GOP size)

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

intra_period :

Number of frames between key frames (GOP size) to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_buffer_pool_size ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_buffer_pool_size
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint bufpoolsize);
-

-Get Size of the pool of MixBuffer objects

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

bufpoolsize :

Size of the pool of MixBuffer objects to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_buffer_pool_size ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_buffer_pool_size
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         uint *bufpoolsize);
-
-
-
-

mix_videoconfigparamsenc_set_share_buf_mode ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_share_buf_mode
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool share_buf_mod);
-

-Set the flag that indicates whether buffers are shared between capture and encoding drivers or not

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

share_buf_mod :

A flag to indicate whether buffers are shared - between capture and encoding drivers or not -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_share_buf_mode ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_share_buf_mode
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool *share_buf_mod);
-

-Get the flag that indicates whether buffers are shared between capture and encoding drivers or not

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

share_buf_mod :

the flag to be returned that indicates whether buffers - are shared between capture and encoding drivers or not -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_ci_frame_info ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_ci_frame_info
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong *ci_frame_id,
-                                                         uint ci_frame_num);
-

-Set CI frame information

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

ci_frame_id :

Array of frame IDs created by capture library * -

ci_frame_num :

Size of the array ci_frame_id -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_ci_frame_info ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_ci_frame_info
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong * *ci_frame_id,
-                                                         uint *ci_frame_num);
-

-Get CI frame information -

-
-

Note

-Caller is responsible to free *ci_frame_id -
-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

ci_frame_id :

Array of frame IDs created by capture library to be returned -

ci_frame_num :

Size of the array ci_frame_id to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_drawable ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_drawable
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong draw);
-

-Set drawable

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

draw :

drawable -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_drawable ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_drawable
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         ulong *draw);
-

-Get drawable

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

draw :

drawable to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_need_display ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_need_display
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool need_display);
-

-Set the flag used to indicate whether MixVideoFrames suitable for displaying -need to be enqueued for retrieval using mix_video_get_frame()

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

need_display :

Flag to indicates whether MixVideoFrames suitable for displaying - need to be enqueued for retrieval using mix_video_get_frame() -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_need_display ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_need_display
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         bool *need_display);
-

-Get the flag used to indicate whether MixVideoFrames suitable for displaying -need to be enqueued for retrieval using mix_video_get_frame()

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

need_display :

A flag to be returned to indicates whether MixVideoFrames suitable for displaying - need to be enqueued for retrieval using mix_video_get_frame() -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_rate_control ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_rate_control
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRateControl rcmode);
-

-Get Rate control mode

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

rcmode :

Rate control mode to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_rate_control ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_rate_control
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRateControl *rcmode);
-
-
-
-

mix_videoconfigparamsenc_set_raw_format ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_raw_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRawTargetFormat raw_format);
-

-Set Raw format to be encoded

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

raw_format :

Raw format to be encoded -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_raw_format ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_raw_format
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixRawTargetFormat *raw_format);
-

-Get Raw format

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

raw_format :

Raw format to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_set_profile ()

-
MIX_RESULT          mix_videoconfigparamsenc_set_profile
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixProfile profile);
-

-Set Encoding profile

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

profile :

Encoding profile -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_get_profile ()

-
MIX_RESULT          mix_videoconfigparamsenc_get_profile
-                                                        (MixVideoConfigParamsEnc *obj,
-                                                         MixProfile *profile);
-

-Get Encoding profile

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEnc object -

profile :

Encoding profile to be returned -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html deleted file mode 100644 index fd6d7d7..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncH264.html +++ /dev/null @@ -1,398 +0,0 @@ - - - - -MixVideoConfigParamsEncH264 - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParamsEncH264

-

MixVideoConfigParamsEncH264 — MI-X Video H.264 Eecode Configuration Parameter

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsEnc
-               +----MixVideoConfigParamsEncH264
-
-
-
-

Description

-

-MI-X video H.264 eecode configuration parameter objects.

-
-
-

Details

-
-

MixVideoConfigParamsEncH264

-
typedef struct {
-  MixVideoConfigParamsEnc parent;
-
-
-  /* TODO: Add H.264 configuration paramters */
-  
-  /* The basic unit size used by rate control */  
-  uint basic_unit_size;
-  
-  /* Number of slices in one frame */
-  uint slice_num;
-  
-  /* enable/disable deblocking */
-  uint8 disable_deblocking_filter_idc;	
-
-  /* delimiter_type */
-  MixDelimiterType delimiter_type;
-  
-  /* Reserved for future use */  
-  void *reserved1;
-  
-  /* Reserved for future use */  
-  void *reserved2;
-  
-  /* Reserved for future use */  
-  void *reserved3;
-  
-  /* Reserved for future use */  
-  void *reserved4;
-} MixVideoConfigParamsEncH264;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparamsenc_h264_new ()

-
MixVideoConfigParamsEncH264 * mix_videoconfigparamsenc_h264_new
-                                                        (void);
-

-Use this method to create new instance of MixVideoConfigParamsEncH264

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParamsEncH264 -
-
-
-
-

mix_videoconfigparamsenc_h264_unref()

-
#define mix_videoconfigparamsenc_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videoconfigparamsenc_h264_set_bus ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_set_bus
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         uint basic_unit_size);
-

-Set The basic unit size used by rate control

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

basic_unit_size :

The basic unit size used by rate control -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_h264_get_bus ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_get_bus
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         uint *basic_unit_size);
-

-Get The basic unit size used by rate control

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

basic_unit_size :

The basic unit size to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_h264_set_dlk ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_set_dlk
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         uint disable_deblocking_filter_idc);
-

-Set the The flag to enable/disable deblocking

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

disable_deblocking_filter_idc :

The flag to enable/disable deblocking -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_h264_get_dlk ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_get_dlk
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         uint *disable_deblocking_filter_idc);
-

-Get the The flag to enable/disable deblocking

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

disable_deblocking_filter_idc :

deblocking flag to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_h264_set_slice_num ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_set_slice_num
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         uint slice_num);
-

-Set the Number of slices in one frame

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

slice_num :

Number of slices in one frame -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_h264_get_slice_num ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_get_slice_num
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         uint *slice_num);
-

-Get the Number of slices in one frame

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

slice_num :

Number of slices in one frame to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_h264_set_delimiter_type ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_set_delimiter_type
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         MixDelimiterType delimiter_type);
-

-Set Delimiter type

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

delimiter_type :

Delimiter type -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_h264_get_delimiter_type ()

-
MIX_RESULT          mix_videoconfigparamsenc_h264_get_delimiter_type
-                                                        (MixVideoConfigParamsEncH264 *obj,
-                                                         MixDelimiterType *delimiter_type);
-

-Get Delimiter type

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncH264 object -

delimiter_type :

Delimiter type to be returned -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html b/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html deleted file mode 100644 index 706a209..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoConfigParamsEncMPEG4.html +++ /dev/null @@ -1,319 +0,0 @@ - - - - -MixVideoConfigParamsEncMPEG4 - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoConfigParamsEncMPEG4

-

MixVideoConfigParamsEncMPEG4 — MI-X Video MPEG 4:2 Eecode Configuration Parameter

-
-
-

Synopsis

-
-                    MixVideoConfigParamsEncMPEG4;
-MixVideoConfigParamsEncMPEG4 * mix_videoconfigparamsenc_mpeg4_new
-                                                        (void);
-#define             mix_videoconfigparamsenc_mpeg4_unref(obj)
-MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_dlk
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint disable_deblocking_filter_idc);
-MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_dlk
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint *disable_deblocking_filter_idc);
-MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_profile_level
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uchar profile_and_level_indication);
-MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_profile_level
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uchar *profile_and_level_indication);
-MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_fixed_vti
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint fixed_vop_time_increment);
-MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_fixed_vti
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint *fixed_vop_time_increment);
-
-
-
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoConfigParams
-         +----MixVideoConfigParamsEnc
-               +----MixVideoConfigParamsEncMPEG4
-
-
-
-

Description

-

-MI-X video MPEG 4:2 eecode configuration parameter objects.

-
-
-

Details

-
-

MixVideoConfigParamsEncMPEG4

-
typedef struct {
-  MixVideoConfigParamsEnc parent;
-
-
-  /* TODO: Add MPEG-4 configuration paramters */
-  
-  /* Indicate profile and level. 
-   * Default value is 3. 
-   * Can be ignored (refer to encoding 
-   * specification for more info). */
-  uchar  profile_and_level_indication;
-  
-  /* Number of ticks between two successive VOPs 
-   * in display order. Default value is 3. 
-   * Can be ignored (refer to encoding specification 
-   * for more info) */
-  uint fixed_vop_time_increment;
-  
-  /* enable/disable deblocking */
-  uint disable_deblocking_filter_idc;
-  
-  /* Reserved for future use */
-  void *reserved1;
-  
-  /* Reserved for future use */  
-  void *reserved2;
-  
-  /* Reserved for future use */  
-  void *reserved3;
-  
-  /* Reserved for future use */  
-  void *reserved4;
-} MixVideoConfigParamsEncMPEG4;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoconfigparamsenc_mpeg4_new ()

-
MixVideoConfigParamsEncMPEG4 * mix_videoconfigparamsenc_mpeg4_new
-                                                        (void);
-

-Use this method to create new instance of MixVideoConfigParamsEncMPEG4

-
-- - - - -

returns :

A newly allocated instance of MixVideoConfigParamsEncMPEG4 -
-
-
-
-

mix_videoconfigparamsenc_mpeg4_unref()

-
#define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videoconfigparamsenc_mpeg4_set_dlk ()

-
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_dlk
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint disable_deblocking_filter_idc);
-

-Set the The flag to enable/disable deblocking

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncMPEG4 object -

disable_deblocking_filter_idc :

The flag to enable/disable deblocking -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_mpeg4_get_dlk ()

-
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_dlk
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint *disable_deblocking_filter_idc);
-

-Get the The flag to enable/disable deblocking

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncMPEG4 object -

disable_deblocking_filter_idc :

deblocking flag to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_mpeg4_set_profile_level ()

-
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_profile_level
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uchar profile_and_level_indication);
-

-Set profile_and_level_indication

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncMPEG4 object -

profile_and_level_indication :

Indicate profile and level. Default value is 3. - Can be ignored (refer to encoding specification - for more info). -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_mpeg4_get_profile_level ()

-
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_profile_level
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uchar *profile_and_level_indication);
-

-Set fixed_vop_time_increment

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncMPEG4 object -

fixed_vop_time_increment :

Number of ticks between two successive VOPs in display order. - Default value is 3. Can be ignored (refer to encoding specification - for more info) -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoconfigparamsenc_mpeg4_set_fixed_vti ()

-
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_set_fixed_vti
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint fixed_vop_time_increment);
-
-
-
-

mix_videoconfigparamsenc_mpeg4_get_fixed_vti ()

-
MIX_RESULT          mix_videoconfigparamsenc_mpeg4_get_fixed_vti
-                                                        (MixVideoConfigParamsEncMPEG4 *obj,
-                                                         uint *fixed_vop_time_increment);
-

-Get fixed_vop_time_increment

-
-- - - - - - - - - - - - - - -

obj :

MixVideoConfigParamsEncMPEG4 object -

fixed_vop_time_increment :

fixed_vop_time_increment to be returned -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html deleted file mode 100644 index b86c19f..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoDecodeParams.html +++ /dev/null @@ -1,281 +0,0 @@ - - - - -MixVideoDecodeParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoDecodeParams

-

MixVideoDecodeParams — MI-X Video Decode Paramters

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoDecodeParams
-
-
-
-

Description

-

-The MixVideoDecodeParams object will be created by the MMF/App -and provided to MixVideo in the MixVideo mix_video_decode() function.

-
-
-

Details

-
-

MixVideoDecodeParams

-
typedef struct {
-	MixParams parent;
-
-
-	/* TODO: Add properties */
-	
-	/* Presentation timestamp for the video 
-	 * frame data, in milliseconds */
-	uint64 timestamp;
-	
-	/* Indicates a discontinuity in the stream */
-	bool discontinuity;
-
-	/* Reserved for future use */	
-	void *reserved1;
-	
-	/* Reserved for future use */	
-	void *reserved2;
-	
-	/* Reserved for future use */	
-	void *reserved3;
-	
-	/* Reserved for future use */	
-	void *reserved4;
-} MixVideoDecodeParams;
-
-

-MI-X VideoDecode Parameter object

-
-
-
-

mix_videodecodeparams_new ()

-
MixVideoDecodeParams * mix_videodecodeparams_new        (void);
-

-Use this method to create new instance of MixVideoDecodeParams

-
-- - - - -

returns :

A newly allocated instance of MixVideoDecodeParams -
-
-
-
-

mix_videodecodeparams_ref ()

-
MixVideoDecodeParams * mix_videodecodeparams_ref        (MixVideoDecodeParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoDecodeParams instance where reference count has been increased. -
-
-
-
-

mix_videodecodeparams_unref()

-
#define mix_videodecodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videodecodeparams_set_timestamp ()

-
MIX_RESULT          mix_videodecodeparams_set_timestamp (MixVideoDecodeParams *obj,
-                                                         uint64 timestamp);
-

-Set Presentation timestamp

-
-- - - - - - - - - - - - - - -

obj :

MixVideoDecodeParams object -

timestamp :

Presentation timestamp for the video frame data, in milliseconds -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videodecodeparams_get_timestamp ()

-
MIX_RESULT          mix_videodecodeparams_get_timestamp (MixVideoDecodeParams *obj,
-                                                         uint64 *timestamp);
-

-Get Presentation timestamp

-
-- - - - - - - - - - - - - - -

obj :

MixVideoDecodeParams object -

timestamp :

Presentation timestamp for the video frame data, in milliseconds to be returned. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videodecodeparams_set_discontinuity ()

-
MIX_RESULT          mix_videodecodeparams_set_discontinuity
-                                                        (MixVideoDecodeParams *obj,
-                                                         bool discontinuity);
-

-Set discontinuity flag

-
-- - - - - - - - - - - - - - -

obj :

MixVideoDecodeParams object -

discontinuity :

Flag to in Indicates a discontinuity in the stream. -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videodecodeparams_get_discontinuity ()

-
MIX_RESULT          mix_videodecodeparams_get_discontinuity
-                                                        (MixVideoDecodeParams *obj,
-                                                         bool *discontinuity);
-

-Get discontinuity flag

-
-- - - - - - - - - - - - - - -

obj :

MixVideoDecodeParams object -

discontinuity :

Discontinuity flag to be returned -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html deleted file mode 100644 index e84b412..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoEncodeParams.html +++ /dev/null @@ -1,155 +0,0 @@ - - - - -MixVideoEncodeParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoEncodeParams

-

MixVideoEncodeParams — MI-X Video Encode Parameters

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoEncodeParams
-
-
-
-

Description

-

-The MixVideoEncodeParams object will be created by -the MMF/App and provided to MixVideo in the MixVideo -mix_video_encode() function. Get methods for the -properties will be available for the caller to -retrieve configuration information. Currently this -object is reserved for future use.

-
-
-

Details

-
-

MixVideoEncodeParams

-
typedef struct {
-	MixParams parent;
-
-
-	/* TODO: Add properties */
-	
-	
-	/* Reserved for future use */ 
-	void *reserved1;
-	
-	/* Reserved for future use */	
-	void *reserved2;
-	
-	/* Reserved for future use */	
-	void *reserved3;
-	
-	/* Reserved for future use */	
-	void *reserved4;
-} MixVideoEncodeParams;
-
-

-MI-X VideoDecode Parameter object

-
-
-
-

mix_videoencodeparams_new ()

-
MixVideoEncodeParams * mix_videoencodeparams_new        (void);
-

-Use this method to create new instance of MixVideoEncodeParams

-
-- - - - -

returns :

A newly allocated instance of MixVideoEncodeParams -
-
-
-
-

mix_videoencodeparams_ref ()

-
MixVideoEncodeParams * mix_videoencodeparams_ref        (MixVideoEncodeParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoEncodeParams instance where reference count has been increased. -
-
-
-
-

mix_videoencodeparams_unref()

-
#define mix_videoencodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html b/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html deleted file mode 100644 index 767321d..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoFrame.html +++ /dev/null @@ -1,423 +0,0 @@ - - - - -MixVideoFrame - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoFrame

-

MixVideoFrame — MI-X Video Frame Object

-
-
-

Synopsis

-
-                    MixVideoFrame;
-MixVideoFrame *     mix_videoframe_new                  (void);
-MixVideoFrame *     mix_videoframe_ref                  (MixVideoFrame *obj);
-void                mix_videoframe_unref                (MixVideoFrame *obj);
-MIX_RESULT          mix_videoframe_set_frame_id         (MixVideoFrame *obj,
-                                                         ulong frame_id);
-MIX_RESULT          mix_videoframe_get_frame_id         (MixVideoFrame *obj,
-                                                         ulong *frame_id);
-MIX_RESULT          mix_videoframe_set_ci_frame_idx     (MixVideoFrame *obj,
-                                                         uint ci_frame_idx);
-MIX_RESULT          mix_videoframe_get_ci_frame_idx     (MixVideoFrame *obj,
-                                                         uint *ci_frame_idx);
-MIX_RESULT          mix_videoframe_set_timestamp        (MixVideoFrame *obj,
-                                                         uint64 timestamp);
-MIX_RESULT          mix_videoframe_get_timestamp        (MixVideoFrame *obj,
-                                                         uint64 *timestamp);
-MIX_RESULT          mix_videoframe_set_discontinuity    (MixVideoFrame *obj,
-                                                         bool discontinuity);
-MIX_RESULT          mix_videoframe_get_discontinuity    (MixVideoFrame *obj,
-                                                         bool *discontinuity);
-
-
-
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoFrame
-
-
-
-

Description

-

-

-

-The MixVideoFrame object will be created by -MixVideo and provided to the MMF/App in the -MixVideo mix_video_get_frame() function. -

-

-

-

-mix_video_release_frame() must be used -to release frame object returned from -mix_video_get_frame(). Caller must not -use mix_videoframe_ref() or mix_videoframe_unref() -or adjust the reference count directly in any way. -This object can be supplied in the mix_video_render() -function to render the associated video frame. -The MMF/App can release this object when it no longer -needs to display/re-display this frame. -

-
-
-

Details

-
-

MixVideoFrame

-
typedef struct {
-	MixParams parent;
-
-	
-	/* ID associated with the decoded frame */
-	ulong frame_id;
-	
-	/* ID associated with the CI frame 
-	 * (used for encode only) */	
-	uint ci_frame_idx;	
-	
-	/* 64 bit timestamp. For decode, 
-	 * this is preserved from the corresponding 
-	 * MixVideoDecodeParams field. For encode, 
-	 * this is created during encoding. */
-	uint64 timestamp;
-	
-	/* Flag indicating whether there 
-	 * is a discontinuity. For decode, 
-	 * this is preserved from the corresponding 
-	 * MixVideoDecodeParams field. */
-	bool discontinuity;
-
-	/* Reserved for future use */ 
-	void *reserved1;
-	
-	/* Reserved for future use */ 
-	void *reserved2;
-	
-	/* Reserved for future use */ 
-	void *reserved3;
-	
-	/* Reserved for future use */ 
-	void *reserved4;
-} MixVideoFrame;
-
-

-MI-X VideoConfig Parameter object

-
-
-
-

mix_videoframe_new ()

-
MixVideoFrame *     mix_videoframe_new                  (void);
-

-Use this method to create new instance of MixVideoFrame

-
-- - - - -

returns :

A newly allocated instance of MixVideoFrame -
-
-
-
-

mix_videoframe_ref ()

-
MixVideoFrame *     mix_videoframe_ref                  (MixVideoFrame *obj);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoFrame instance where reference count has been increased. -
-
-
-
-

mix_videoframe_unref ()

-
void                mix_videoframe_unref                (MixVideoFrame *obj);
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videoframe_set_frame_id ()

-
MIX_RESULT          mix_videoframe_set_frame_id         (MixVideoFrame *obj,
-                                                         ulong frame_id);
-

-Set Frame ID

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

frame_id :

ID associated with the decoded frame -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoframe_get_frame_id ()

-
MIX_RESULT          mix_videoframe_get_frame_id         (MixVideoFrame *obj,
-                                                         ulong *frame_id);
-

-Get Frame ID

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

frame_id :

frame ID to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoframe_set_ci_frame_idx ()

-
MIX_RESULT          mix_videoframe_set_ci_frame_idx     (MixVideoFrame *obj,
-                                                         uint ci_frame_idx);
-

-Set CI Frame ID

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

ci_frame_idx :

ID associated with the CI frame (used for encode only) -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoframe_get_ci_frame_idx ()

-
MIX_RESULT          mix_videoframe_get_ci_frame_idx     (MixVideoFrame *obj,
-                                                         uint *ci_frame_idx);
-

-Get CI Frame ID

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

ci_frame_idx :

CI Frame ID to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoframe_set_timestamp ()

-
MIX_RESULT          mix_videoframe_set_timestamp        (MixVideoFrame *obj,
-                                                         uint64 timestamp);
-

-Set Frame timestamp

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

timestamp :

Frame timestamp -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoframe_get_timestamp ()

-
MIX_RESULT          mix_videoframe_get_timestamp        (MixVideoFrame *obj,
-                                                         uint64 *timestamp);
-

-Get Frame timestamp

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

timestamp :

Frame timestamp to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoframe_set_discontinuity ()

-
MIX_RESULT          mix_videoframe_set_discontinuity    (MixVideoFrame *obj,
-                                                         bool discontinuity);
-

-Get discontinuity flag

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

discontinuity :

Discontinuity flag -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoframe_get_discontinuity ()

-
MIX_RESULT          mix_videoframe_get_discontinuity    (MixVideoFrame *obj,
-                                                         bool *discontinuity);
-

-Get discontinuity flag

-
-- - - - - - - - - - - - - - -

obj :

MixVideoFrame object -

discontinuity :

Discontinuity flag to be returned -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html deleted file mode 100644 index 79cb486..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoInitParams.html +++ /dev/null @@ -1,214 +0,0 @@ - - - - -MixVideoInitParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoInitParams

-

MixVideoInitParams — MI-X Video Initialization Parameters

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoInitParams
-
-
-
-

Description

-

-The MixVideoInitParams object will be created by the MMF/App -and provided in the mix_video_initialize() function. -The get and set methods for the properties will be available for -the caller to set and get information used at initialization time.

-
-
-

Details

-
-

MixVideoInitParams

-
typedef struct {
-  MixParams parent;
-
-
-  /* Pointer to a MixDisplay object 
-   * such as MixDisplayX11 */
-  MixDisplay *display;
-  
-  /* Reserved for future use */
-  void *reserved1;
-  
-  /* Reserved for future use */  
-  void *reserved2;
-  
-  /* Reserved for future use */  
-  void *reserved3;
-  
-  /* Reserved for future use */  
-  void *reserved4;
-} MixVideoInitParams;
-
-

-MI-X VideoInit Parameter object

-
-
-
-

mix_videoinitparams_new ()

-
MixVideoInitParams * mix_videoinitparams_new            (void);
-

-Use this method to create new instance of MixVideoInitParams

-
-- - - - -

returns :

A newly allocated instance of MixVideoInitParams -
-
-
-
-

mix_videoinitparams_ref ()

-
MixVideoInitParams * mix_videoinitparams_ref            (MixVideoInitParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoInitParams instance where reference count has been increased. -
-
-
-
-

mix_videoinitparams_unref()

-
#define mix_videoinitparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videoinitparams_set_display ()

-
MIX_RESULT          mix_videoinitparams_set_display     (MixVideoInitParams *obj,
-                                                         MixDisplay *display);
-

-Set MixDisplay object

-
-- - - - - - - - - - - - - - -

obj :

MixVideoInitParams object -

display :

Pointer to a MixDisplay object such as MixDisplayX11 -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videoinitparams_get_display ()

-
MIX_RESULT          mix_videoinitparams_get_display     (MixVideoInitParams *obj,
-                                                         MixDisplay **dislay);
-

-Get MixDisplay object

-
-- - - - - - - - - - - - - - -

obj :

MixVideoInitParams object -

dislay :

Pointer to pointer of a MixDisplay object such as MixDisplayX11 -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html b/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html deleted file mode 100644 index ff03b01..0000000 --- a/mix_video/docs/reference/MixVideo/html/MixVideoRenderParams.html +++ /dev/null @@ -1,418 +0,0 @@ - - - - -MixVideoRenderParams - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - -
-

MixVideoRenderParams

-

MixVideoRenderParams — MI-X Video Render Parameters

-
- -
-

Object Hierarchy

-
-  MixParams
-   +----MixVideoRenderParams
-
-
-
-

Description

-

-The MixVideoRenderParams object will be created by the MMF/App -and provided to MixVideo in the MixVideo mix_video_render() function.

-
-
-

Details

-
-

MixVideoRenderParams

-
typedef struct {
-	MixParams parent;
-
-	
-	/* Pointer to a MixDisplay object 
-	 * such as MixDisplayX11 */
-	MixDisplay *display;
-
-	/* MixRect object to define offset, 
-	 * height and width of source image */
-	MixRect src_rect;
-	
-	/* MixRect object to define offset, 
-	 * height and width of the display 
-	 * destination */
-	MixRect dst_rect;
-
-	/* Array of clipping rectangles 
-	 * to be applied */
-	MixRect *clipping_rects;
-	
-	/* Number of clipping rectangles 
-	 * in clipping_rects */
-	uint number_of_clipping_rects;
-
-	/* Post processing parameters */
-	uint post_proc;
-
-	/* Reserved */
-	void* reserved;
-	
-	/* Reserved for future use */
-	void* reserved1;
-	
-	/* Reserved for future use */	
-	void* reserved2;
-	
-	/* Reserved for future use */	
-	void* reserved3;
-	
-	/* Reserved for future use */	
-	void* reserved4;
-} MixVideoRenderParams;
-
-

-MI-X VideoRender Parameter object

-
-
-
-

mix_videorenderparams_new ()

-
MixVideoRenderParams * mix_videorenderparams_new        (void);
-

-Use this method to create new instance of MixVideoRenderParams

-
-- - - - -

returns :

A newly allocated instance of MixVideoRenderParams -
-
-
-
-

mix_videorenderparams_ref ()

-
MixVideoRenderParams * mix_videorenderparams_ref        (MixVideoRenderParams *mix);
-

-Add reference count.

-
-- - - - - - - - - - -

mix :

object to add reference -

returns :

the MixVideoRenderParams instance where reference count has been increased. -
-
-
-
-

mix_videorenderparams_unref()

-
#define mix_videorenderparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
-
-

-Decrement reference count of the object.

-
-- - - - -

obj :

object to unref. -
-
-
-
-

mix_videorenderparams_set_display ()

-
MIX_RESULT          mix_videorenderparams_set_display   (MixVideoRenderParams *obj,
-                                                         MixDisplay *display);
-

-Set MixDisplay Object

-
-- - - - - - - - - - - - - - -

obj :

MixVideoRenderParams object -

display :

MixDisplay object -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videorenderparams_get_display ()

-
MIX_RESULT          mix_videorenderparams_get_display   (MixVideoRenderParams *obj,
-                                                         MixDisplay **display);
-

-Get MixDisplay Object

-
-- - - - - - - - - - - - - - -

obj :

MixVideoRenderParams object -

display :

pointer to MixDisplay object -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videorenderparams_set_src_rect ()

-
MIX_RESULT          mix_videorenderparams_set_src_rect  (MixVideoRenderParams *obj,
-                                                         MixRect src_rect);
-

-Set source rectangle

-
-- - - - - - - - - - - - - - -

obj :

MixVideoRenderParams object -

src_rect :

MixRect object to define offset, height and width of source image -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videorenderparams_get_src_rect ()

-
MIX_RESULT          mix_videorenderparams_get_src_rect  (MixVideoRenderParams *obj,
-                                                         MixRect *src_rect);
-

-Get source rectangle

-
-- - - - - - - - - - - - - - -

obj :

MixVideoRenderParams object -

src_rect :

Source rectangle to be returned -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videorenderparams_set_dest_rect ()

-
MIX_RESULT          mix_videorenderparams_set_dest_rect (MixVideoRenderParams *obj,
-                                                         MixRect dst_rect);
-

-Get destination rectangle

-
-- - - - - - - - - - - - - - -

obj :

MixVideoRenderParams object -

dst_rect :

MixRect object to define offset, height and width of the display destination -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videorenderparams_get_dest_rect ()

-
MIX_RESULT          mix_videorenderparams_get_dest_rect (MixVideoRenderParams *obj,
-                                                         MixRect *dst_rect);
-
-
-
-

mix_videorenderparams_set_clipping_rects ()

-
MIX_RESULT          mix_videorenderparams_set_clipping_rects
-                                                        (MixVideoRenderParams *obj,
-                                                         MixRect *clipping_rects,
-                                                         uint number_of_clipping_rects);
-

-Set clipping rectangles

-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoRenderParams object -

clipping_rects :

Array of clipping rectangles to be applied -

number_of_clipping_rects :

Number of clipping rectangles in clipping_rects -

returns :

Common Video Error Return Codes -
-
-
-
-

mix_videorenderparams_get_clipping_rects ()

-
MIX_RESULT          mix_videorenderparams_get_clipping_rects
-                                                        (MixVideoRenderParams *obj,
-                                                         MixRect **clipping_rects,
-                                                         uint *number_of_clipping_rects);
-

-Get clipping rectangles -

-

-

-
-

Note

-DO NOT free clipping_rects! -
-
-- - - - - - - - - - - - - - - - - - -

obj :

MixVideoRenderParams object -

clipping_rects :

Array of clipping rectangles returned -

number_of_clipping_rects :

Number of clipping rectangles in clipping_rects returned -

returns :

Common Video Error Return Codes -
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/api-index-full.html b/mix_video/docs/reference/MixVideo/html/api-index-full.html deleted file mode 100644 index 29a702d..0000000 --- a/mix_video/docs/reference/MixVideo/html/api-index-full.html +++ /dev/null @@ -1,466 +0,0 @@ - - - - -API Index - - - - - - - - - - - - - - - - - - -
-

-API Index

-
-
-

M

-
-
MixBuffer
-
-
MixBufferCallback
-
-
MixCodecMode
-
-
MixDelimiterType
-
-
MixDisplay
-
-
MixDisplayCopyFunction
-
-
MixDisplayDupFunction
-
-
MixDisplayEqualFunction
-
-
MixDisplayFinalizeFunction
-
-
MixDisplayX11
-
-
MixDrmParams
-
-
MixEncodeTargetFormat
-
-
MixFrameOrderMode
-
-
MixIOVec
-
-
MixParamSpecDisplay
-
-
MixProfile
-
-
MixRateControl
-
-
MixRawTargetFormat
-
-
MixRect
-
-
MixState
-
-
MixVideo
-
-
MixVideoConfigParams
-
-
MixVideoConfigParamsDec
-
-
MixVideoConfigParamsDecH264
-
-
MixVideoConfigParamsDecMP42
-
-
MixVideoConfigParamsDecVC1
-
-
MixVideoConfigParamsEnc
-
-
MixVideoConfigParamsEncH264
-
-
MixVideoConfigParamsEncMPEG4
-
-
MixVideoDecodeParams
-
-
MixVideoEncodeParams
-
-
MixVideoFrame
-
-
MixVideoInitParams
-
-
MixVideoRenderParams
-
-
mix_buffer_new
-
-
mix_buffer_ref
-
-
mix_buffer_set_data
-
-
mix_buffer_unref
-
-
mix_displayx11_get_display
-
-
mix_displayx11_get_drawable
-
-
mix_displayx11_new
-
-
mix_displayx11_ref
-
-
mix_displayx11_set_display
-
-
mix_displayx11_set_drawable
-
-
mix_displayx11_unref
-
-
MIX_DISPLAY_CAST
-
-
MIX_DISPLAY_CLASS
-
-
mix_display_copy
-
-
mix_display_dup
-
-
mix_display_equal
-
-
mix_display_new
-
-
mix_display_ref
-
-
MIX_DISPLAY_REFCOUNT
-
-
MIX_DISPLAY_REFCOUNT_VALUE
-
-
mix_display_replace
-
-
mix_display_unref
-
-
mix_drmparams_new
-
-
mix_drmparams_ref
-
-
mix_drmparams_unref
-
-
MIX_IS_PARAM_SPEC_DISPLAY
-
-
MIX_PARAM_SPEC_DISPLAY
-
-
mix_param_spec_display
-
-
mix_param_spec_display_get_type
-
-
MIX_TYPE_PARAM_DISPLAY
-
-
mix_value_dup_display
-
-
mix_value_get_display
-
-
MIX_VALUE_HOLDS_DISPLAY
-
-
mix_value_set_display
-
-
mix_value_take_display
-
-
mix_videoconfigparamsdec_get_buffer_pool_size
-
-
mix_videoconfigparamsdec_get_extra_surface_allocation
-
-
mix_videoconfigparamsdec_get_frame_order_mode
-
-
mix_videoconfigparamsdec_get_frame_rate
-
-
mix_videoconfigparamsdec_get_header
-
-
mix_videoconfigparamsdec_get_mime_type
-
-
mix_videoconfigparamsdec_get_picture_res
-
-
mix_videoconfigparamsdec_get_rate_control
-
-
mix_videoconfigparamsdec_get_raw_format
-
-
mix_videoconfigparamsdec_h264_new
-
-
mix_videoconfigparamsdec_h264_unref
-
-
mix_videoconfigparamsdec_mp42_get_divxversion
-
-
mix_videoconfigparamsdec_mp42_get_mpegversion
-
-
mix_videoconfigparamsdec_mp42_new
-
-
mix_videoconfigparamsdec_mp42_set_divxversion
-
-
mix_videoconfigparamsdec_mp42_set_mpegversion
-
-
mix_videoconfigparamsdec_mp42_unref
-
-
mix_videoconfigparamsdec_new
-
-
mix_videoconfigparamsdec_ref
-
-
mix_videoconfigparamsdec_set_buffer_pool_size
-
-
mix_videoconfigparamsdec_set_extra_surface_allocation
-
-
mix_videoconfigparamsdec_set_frame_order_mode
-
-
mix_videoconfigparamsdec_set_frame_rate
-
-
mix_videoconfigparamsdec_set_header
-
-
mix_videoconfigparamsdec_set_mime_type
-
-
mix_videoconfigparamsdec_set_picture_res
-
-
mix_videoconfigparamsdec_set_rate_control
-
-
mix_videoconfigparamsdec_set_raw_format
-
-
mix_videoconfigparamsdec_unref
-
-
mix_videoconfigparamsdec_vc1_new
-
-
mix_videoconfigparamsdec_vc1_unref
-
-
mix_videoconfigparamsenc_get_bit_rate
-
-
mix_videoconfigparamsenc_get_buffer_pool_size
-
-
mix_videoconfigparamsenc_get_ci_frame_info
-
-
mix_videoconfigparamsenc_get_drawable
-
-
mix_videoconfigparamsenc_get_encode_format
-
-
mix_videoconfigparamsenc_get_frame_rate
-
-
mix_videoconfigparamsenc_get_init_qp
-
-
mix_videoconfigparamsenc_get_intra_period
-
-
mix_videoconfigparamsenc_get_mime_type
-
-
mix_videoconfigparamsenc_get_min_qp
-
-
mix_videoconfigparamsenc_get_need_display
-
-
mix_videoconfigparamsenc_get_picture_res
-
-
mix_videoconfigparamsenc_get_profile
-
-
mix_videoconfigparamsenc_get_rate_control
-
-
mix_videoconfigparamsenc_get_raw_format
-
-
mix_videoconfigparamsenc_get_share_buf_mode
-
-
mix_videoconfigparamsenc_h264_get_bus
-
-
mix_videoconfigparamsenc_h264_get_delimiter_type
-
-
mix_videoconfigparamsenc_h264_get_dlk
-
-
mix_videoconfigparamsenc_h264_get_slice_num
-
-
mix_videoconfigparamsenc_h264_new
-
-
mix_videoconfigparamsenc_h264_set_bus
-
-
mix_videoconfigparamsenc_h264_set_delimiter_type
-
-
mix_videoconfigparamsenc_h264_set_dlk
-
-
mix_videoconfigparamsenc_h264_set_slice_num
-
-
mix_videoconfigparamsenc_h264_unref
-
-
mix_videoconfigparamsenc_mpeg4_get_dlk
-
-
mix_videoconfigparamsenc_mpeg4_get_fixed_vti
-
-
mix_videoconfigparamsenc_mpeg4_get_profile_level
-
-
mix_videoconfigparamsenc_mpeg4_new
-
-
mix_videoconfigparamsenc_mpeg4_set_dlk
-
-
mix_videoconfigparamsenc_mpeg4_set_fixed_vti
-
-
mix_videoconfigparamsenc_mpeg4_set_profile_level
-
-
mix_videoconfigparamsenc_mpeg4_unref
-
-
mix_videoconfigparamsenc_new
-
-
mix_videoconfigparamsenc_ref
-
-
mix_videoconfigparamsenc_set_bit_rate
-
-
mix_videoconfigparamsenc_set_buffer_pool_size
-
-
mix_videoconfigparamsenc_set_ci_frame_info
-
-
mix_videoconfigparamsenc_set_drawable
-
-
mix_videoconfigparamsenc_set_encode_format
-
-
mix_videoconfigparamsenc_set_frame_rate
-
-
mix_videoconfigparamsenc_set_init_qp
-
-
mix_videoconfigparamsenc_set_intra_period
-
-
mix_videoconfigparamsenc_set_mime_type
-
-
mix_videoconfigparamsenc_set_min_qp
-
-
mix_videoconfigparamsenc_set_need_display
-
-
mix_videoconfigparamsenc_set_picture_res
-
-
mix_videoconfigparamsenc_set_profile
-
-
mix_videoconfigparamsenc_set_rate_control
-
-
mix_videoconfigparamsenc_set_raw_format
-
-
mix_videoconfigparamsenc_set_share_buf_mode
-
-
mix_videoconfigparamsenc_unref
-
-
mix_videoconfigparams_new
-
-
mix_videoconfigparams_ref
-
-
mix_videoconfigparams_unref
-
-
mix_videodecodeparams_get_discontinuity
-
-
mix_videodecodeparams_get_timestamp
-
-
mix_videodecodeparams_new
-
-
mix_videodecodeparams_ref
-
-
mix_videodecodeparams_set_discontinuity
-
-
mix_videodecodeparams_set_timestamp
-
-
mix_videodecodeparams_unref
-
-
mix_videoencodeparams_new
-
-
mix_videoencodeparams_ref
-
-
mix_videoencodeparams_unref
-
-
mix_videoframe_get_ci_frame_idx
-
-
mix_videoframe_get_discontinuity
-
-
mix_videoframe_get_frame_id
-
-
mix_videoframe_get_timestamp
-
-
mix_videoframe_new
-
-
mix_videoframe_ref
-
-
mix_videoframe_set_ci_frame_idx
-
-
mix_videoframe_set_discontinuity
-
-
mix_videoframe_set_frame_id
-
-
mix_videoframe_set_timestamp
-
-
mix_videoframe_unref
-
-
mix_videoinitparams_get_display
-
-
mix_videoinitparams_new
-
-
mix_videoinitparams_ref
-
-
mix_videoinitparams_set_display
-
-
mix_videoinitparams_unref
-
-
mix_videorenderparams_get_clipping_rects
-
-
mix_videorenderparams_get_dest_rect
-
-
mix_videorenderparams_get_display
-
-
mix_videorenderparams_get_src_rect
-
-
mix_videorenderparams_new
-
-
mix_videorenderparams_ref
-
-
mix_videorenderparams_set_clipping_rects
-
-
mix_videorenderparams_set_dest_rect
-
-
mix_videorenderparams_set_display
-
-
mix_videorenderparams_set_src_rect
-
-
mix_videorenderparams_unref
-
-
mix_video_configure
-
-
mix_video_decode
-
-
mix_video_deinitialize
-
-
mix_video_encode
-
-
mix_video_eos
-
-
MIX_VIDEO_ERROR_CODE
-
-
mix_video_flush
-
-
mix_video_get_config
-
-
mix_video_get_frame
-
-
mix_video_get_max_coded_buffer_size
-
-
mix_video_get_mixbuffer
-
-
mix_video_get_state
-
-
mix_video_get_version
-
-
mix_video_initialize
-
-
mix_video_new
-
-
mix_video_ref
-
-
mix_video_release_frame
-
-
mix_video_release_mixbuffer
-
-
mix_video_render
-
-
mix_video_unref
-
-
-
-
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/ch01.html b/mix_video/docs/reference/MixVideo/html/ch01.html deleted file mode 100644 index d3b325c..0000000 --- a/mix_video/docs/reference/MixVideo/html/ch01.html +++ /dev/null @@ -1,92 +0,0 @@ - - - - -MI-X Video API - - - - - - - - - - - - - - - - - - - -
-

-MI-X Video API

-
-
-MixVideo — Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. -
-
-MixVideoInitParams — MI-X Video Initialization Parameters -
-
-MixDrmParams — Drm Parameters Base Object -
-
-MixDisplay — Lightweight Base Object for MI-X Video Display -
-
-MixDisplayX11 — MI-X Video X11 Display -
-
-MixBuffer — MI-X Video Buffer Parameters -
-
-MixVideoFrame — MI-X Video Frame Object -
-
-MixVideoConfigParams — MI-X Video Configuration Parameter Base Object -
-
-MixVideoConfigParamsDec — MI-X Video Decode Configuration Parameter Base Object -
-
-MixVideoConfigParamsDecVC1 — MI-X Video VC-1 Decode Configuration Parameter -
-
-MixVideoConfigParamsDecH264 — MI-X Video H.264 Decode Configuration Parameter -
-
-MixVideoConfigParamsDecMP42 — MI-X Video MPEG 4:2 Decode Configuration Parameter -
-
-MixVideoConfigParamsEnc — MI-X Video Encode Configuration Parameter Base Object -
-
-MixVideoConfigParamsEncH264 — MI-X Video H.264 Eecode Configuration Parameter -
-
-MixVideoConfigParamsEncMPEG4 — MI-X Video MPEG 4:2 Eecode Configuration Parameter -
-
-MixVideoDecodeParams — MI-X Video Decode Paramters -
-
-MixVideoEncodeParams — MI-X Video Encode Parameters -
-
-MixVideoRenderParams — MI-X Video Render Parameters -
-
-MI-X Video Data Definitons And Common Error Code — MI-X Video data definitons and common error code -
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/index.html b/mix_video/docs/reference/MixVideo/html/index.html deleted file mode 100644 index 4924a06..0000000 --- a/mix_video/docs/reference/MixVideo/html/index.html +++ /dev/null @@ -1,96 +0,0 @@ - - - - -Mi-X Video Reference Manual - - - - - - - - - - -
-
-
-
-

- for MI-X Video 0.1 - -

-
-
-
-
-
MI-X Video API
-
-
-MixVideo — Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. -
-
-MixVideoInitParams — MI-X Video Initialization Parameters -
-
-MixDrmParams — Drm Parameters Base Object -
-
-MixDisplay — Lightweight Base Object for MI-X Video Display -
-
-MixDisplayX11 — MI-X Video X11 Display -
-
-MixBuffer — MI-X Video Buffer Parameters -
-
-MixVideoFrame — MI-X Video Frame Object -
-
-MixVideoConfigParams — MI-X Video Configuration Parameter Base Object -
-
-MixVideoConfigParamsDec — MI-X Video Decode Configuration Parameter Base Object -
-
-MixVideoConfigParamsDecVC1 — MI-X Video VC-1 Decode Configuration Parameter -
-
-MixVideoConfigParamsDecH264 — MI-X Video H.264 Decode Configuration Parameter -
-
-MixVideoConfigParamsDecMP42 — MI-X Video MPEG 4:2 Decode Configuration Parameter -
-
-MixVideoConfigParamsEnc — MI-X Video Encode Configuration Parameter Base Object -
-
-MixVideoConfigParamsEncH264 — MI-X Video H.264 Eecode Configuration Parameter -
-
-MixVideoConfigParamsEncMPEG4 — MI-X Video MPEG 4:2 Eecode Configuration Parameter -
-
-MixVideoDecodeParams — MI-X Video Decode Paramters -
-
-MixVideoEncodeParams — MI-X Video Encode Parameters -
-
-MixVideoRenderParams — MI-X Video Render Parameters -
-
-MI-X Video Data Definitons And Common Error Code — MI-X Video data definitons and common error code -
-
-
Object Hierarchy
-
API Index
-
-
- - - diff --git a/mix_video/docs/reference/MixVideo/html/index.sgml b/mix_video/docs/reference/MixVideo/html/index.sgml deleted file mode 100644 index 7a6764e..0000000 --- a/mix_video/docs/reference/MixVideo/html/index.sgml +++ /dev/null @@ -1,307 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/mix_video/docs/reference/MixVideo/html/object-tree.html b/mix_video/docs/reference/MixVideo/html/object-tree.html deleted file mode 100644 index 31a1ca9..0000000 --- a/mix_video/docs/reference/MixVideo/html/object-tree.html +++ /dev/null @@ -1,55 +0,0 @@ - - - - -Object Hierarchy - - - - - - - - - - - - - - - - - - - - - - - diff --git a/mix_video/docs/reference/MixVideo/html/style.css b/mix_video/docs/reference/MixVideo/html/style.css deleted file mode 100644 index bb44c28..0000000 --- a/mix_video/docs/reference/MixVideo/html/style.css +++ /dev/null @@ -1,167 +0,0 @@ -.synopsis, .classsynopsis -{ - background: #eeeeee; - border: solid 1px #aaaaaa; - padding: 0.5em; -} -.programlisting -{ - background: #eeeeff; - border: solid 1px #aaaaff; - padding: 0.5em; -} -.variablelist -{ - padding: 4px; - margin-left: 3em; -} -.variablelist td:first-child -{ - vertical-align: top; -} - -/* this is needed so that the local anchors are displayed below the naviagtion */ -@media screen { - sup a.footnote - { - position: relative; - top: 0em ! important; - } - div.refnamediv a[name], div.refsect1 a[name] - { - position: relative; - top: -4.5em; - } - table.navigation#top - { - background: #ffeeee; - border: solid 1px #ffaaaa; - margin-top: 0; - margin-bottom: 0; - position: fixed; - top: 0; - left: 0; - height: 2em; - z-index: 1; - } - .navigation a - { - color: #770000; - } - .navigation a:visited - { - color: #550000; - } - td.shortcuts - { - color: #770000; - font-size: 80%; - white-space: nowrap; - } - div.refentry, div.chapter, div.reference, div.part, div.book, div.glossary, div.sect1, div.appendix, div.preface - { - position: relative; - top: 3em; - z-index: 0; - } - div.glossary, div.index - { - position: relative; - top: 2em; - z-index: 0; - } - div.refnamediv - { - margin-top: 2em; - } - body - { - padding-bottom: 20em; - } -} -@media print { - table.navigation { - visibility: collapse; - display: none; - } - div.titlepage table.navigation { - visibility: visible; - display: table; - background: #ffeeee; - border: solid 1px #ffaaaa; - margin-top: 0; - margin-bottom: 0; - top: 0; - left: 0; - height: 2em; - } -} - -.navigation .title -{ - font-size: 200%; -} - - -div.gallery-float -{ - float: left; - padding: 10px; -} -div.gallery-float img -{ - border-style: none; -} -div.gallery-spacer -{ - clear: both; -} -a -{ - text-decoration: none; -} -a:hover -{ - text-decoration: underline; - color: #FF0000; -} - -div.table table -{ - border-collapse: collapse; - border-spacing: 0px; - border-style: solid; - border-color: #777777; - border-width: 1px; -} - -div.table table td, div.table table th -{ - border-style: solid; - border-color: #777777; - border-width: 1px; - padding: 3px; - vertical-align: top; -} - -div.table table th -{ - background-color: #eeeeee; -} - -hr -{ - color: #777777; - background: #777777; - border: 0; - height: 1px; - clear: both; -} - -.footer -{ - padding-top: 3.5em; - color: #777777; - text-align: center; - font-size: 80%; -} diff --git a/mix_video/m4/as-mix-version.m4 b/mix_video/m4/as-mix-version.m4 deleted file mode 100644 index 82f6c95..0000000 --- a/mix_video/m4/as-mix-version.m4 +++ /dev/null @@ -1,35 +0,0 @@ -dnl as-mix-version.m4 - -dnl UMG_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE) - -dnl example -dnl UMG_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,) -dnl for a 0.3.2 release version - -dnl this macro -dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE -dnl - defines [$PREFIX], VERSION -dnl - AC_SUBST's all defined vars - -AC_DEFUN([UMG_MIX_VERSION], -[ - PACKAGE=[$1] - [$2]_MAJOR=[$3] - [$2]_MINOR=[$4] - [$2]_REVISION=[$5] - [$2]_CURRENT=m4_eval([$3] + [$4]) - [$2]_AGE=[$4] - VERSION=[$3].[$4].[$5] - - AC_SUBST([$2]_MAJOR) - AC_SUBST([$2]_MINOR) - AC_SUBST([$2]_REVISION) - AC_SUBST([$2]_CURRENT) - AC_SUBST([$2]_AGE) - - AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name]) - AC_SUBST(PACKAGE) - AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version]) - AC_SUBST(VERSION) - -]) diff --git a/mix_video/mixvideo.pc.in b/mix_video/mixvideo.pc.in deleted file mode 100644 index 8666d24..0000000 --- a/mix_video/mixvideo.pc.in +++ /dev/null @@ -1,12 +0,0 @@ -prefix=@prefix@ -exec_prefix=@exec_prefix@ -libdir=@libdir@ -includedir=@includedir@ - -Name: MI-X Video -Description: MI-X Video Library -Requires: libva >= 0.30 -Version: @VERSION@ -Libs: -L${libdir} -l@PACKAGE@ -Cflags: -I${includedir}/mix - diff --git a/mix_video/mixvideoint.pc.in b/mix_video/mixvideoint.pc.in deleted file mode 100644 index f1ff2d1..0000000 --- a/mix_video/mixvideoint.pc.in +++ /dev/null @@ -1,12 +0,0 @@ -prefix=@prefix@ -exec_prefix=@exec_prefix@ -libdir=@libdir@ -includedir=@includedir@ - -Name: MI-X Video Internal -Description: MI-X Video Library for internal development -Requires: libva >= 0.30 -Version: @VERSION@ -Libs: -L${libdir} -l@PACKAGE@ -Cflags: -I${includedir}/mixvideoint - diff --git a/mix_video/src/Android.mk b/mix_video/src/Android.mk deleted file mode 100644 index 74e9849..0000000 --- a/mix_video/src/Android.mk +++ /dev/null @@ -1,119 +0,0 @@ -LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) - -MIXVIDEO_LOG_ENABLE := false - -LOCAL_SRC_FILES := \ - mixvideothread.cpp \ - mixbuffer.cpp \ - mixbufferpool.cpp \ - mixdisplay.cpp \ - mixdisplayandroid.cpp \ - mixframemanager.cpp \ - mixsurfacepool.cpp \ - mixvideocaps.cpp \ - mixvideoconfigparams.cpp \ - mixvideoconfigparamsdec.cpp \ - mixvideoconfigparamsdec_h264.cpp \ - mixvideoconfigparamsdec_mp42.cpp \ - mixvideoconfigparamsdec_vc1.cpp \ - mixvideoconfigparamsenc.cpp \ - mixvideoconfigparamsenc_mpeg4.cpp \ - mixvideoconfigparamsenc_h264.cpp \ - mixvideoconfigparamsenc_h263.cpp \ - mixvideoconfigparamsenc_preview.cpp \ - mixvideodecodeparams.cpp \ - mixvideoencodeparams.cpp \ - mixvideoformat.cpp \ - mixvideoformat_h264.cpp \ - mixvideoformat_mp42.cpp \ - mixvideoformat_vc1.cpp \ - mixvideoformatenc.cpp \ - mixvideoformatenc_h264.cpp \ - mixvideoformatenc_h263.cpp \ - mixvideoformatenc_mpeg4.cpp \ - mixvideoformatenc_preview.cpp \ - mixvideoframe.cpp \ - mixvideoinitparams.cpp \ - mixvideorenderparams.cpp \ - mixvideo.cpp - -LOCAL_CFLAGS := \ - -DMIXVIDEO_AGE=1 \ - -DMIXVIDEO_CURRENT=1 \ - -DMIXVIDEO_MAJOR=0 \ - -DMIXVIDEO_MINOR=1 \ - -DMIXVIDEO_REVISION=8 - -LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) \ - $(TARGET_OUT_HEADERS)/libmixcommon \ - $(TARGET_OUT_HEADERS)/libmixvbp \ - $(TARGET_OUT_HEADERS)/libva - -LOCAL_LDLIBS += -lpthread - -LOCAL_SHARED_LIBRARIES := \ - libcutils \ - libmixcommon \ - libmixvbp \ - libva \ - libva-android \ - libva-tpi - - -LOCAL_CFLAGS += -DANDROID \ - -DMIXVIDEO_ENCODE_ENABLE=0 - -ifeq ($(strip $(MIXVIDEO_LOG_ENABLE)),true) -LOCAL_CFLAGS += -DMIX_LOG_ENABLE -LOCAL_SHARED_LIBRARIES += liblog -endif - -LOCAL_COPY_HEADERS_TO := libmixvideo - -LOCAL_COPY_HEADERS := \ - mixvideothread.h \ - mixbuffer.h \ - mixbuffer_private.h \ - mixbufferpool.h \ - mixdisplay.h \ - mixdisplayandroid.h \ - mixframemanager.h \ - mixsurfacepool.h \ - mixvideo.h \ - mixvideodef.h \ - mixvideo_private.h \ - mixvideocaps.h \ - mixvideoconfigparams.h \ - mixvideoconfigparamsdec.h \ - mixvideoconfigparamsdec_h264.h \ - mixvideoconfigparamsdec_mp42.h \ - mixvideoconfigparamsdec_vc1.h \ - mixvideoconfigparamsenc.h \ - mixvideoconfigparamsenc_h264.h \ - mixvideoconfigparamsenc_h263.h \ - mixvideoconfigparamsenc_mpeg4.h \ - mixvideoconfigparamsenc_preview.h \ - mixvideodecodeparams.h \ - mixvideoencodeparams.h \ - mixvideoformat.h \ - mixvideoformat_h264.h \ - mixvideoformat_mp42.h \ - mixvideoformat_vc1.h \ - mixvideoformatenc.h \ - mixvideoformatenc_h264.h \ - mixvideoformatenc_h263.h \ - mixvideoformatenc_mpeg4.h \ - mixvideoformatenc_preview.h \ - mixvideoformatqueue.h \ - mixvideoframe.h \ - mixvideoframe_private.h \ - mixvideoinitparams.h \ - mixvideorenderparams.h \ - mixvideorenderparams_internal.h - -LOCAL_MODULE_TAGS := optional -LOCAL_MODULE := libmixvideo - -include $(BUILD_SHARED_LIBRARY) diff --git a/mix_video/src/mixbuffer.cpp b/mix_video/src/mixbuffer.cpp deleted file mode 100644 index 90fdf02..0000000 --- a/mix_video/src/mixbuffer.cpp +++ /dev/null @@ -1,144 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixbuffer - * @short_description: MI-X Video Buffer Parameters - * - * - * #MixBuffer objects are used to wrap input data buffers in a reference counted object as - * described in the buffer model section. Data buffers themselves are allocated by the - * App/MMF. #MixBuffer objects are allocated by #MixVideo in a pool and retrieved by the - * application using mix_video_get_mixbuffer(). The application will wrap a data buffer - * in a #Mixbuffer object and pass it into mix_video_decode() or to mix_video_encode(). - * - * - * The #MixBuffer objects will be released by #MixVideo when they are no longer needed - * for the decode or encoder operation. The App/MMF will also release the #MixBuffer - * object after use. When the #MixBuffer is completely released, the callback to the - * function registered in the #MixBuffer will be called (allowing the App/MMF to release - * data buffers as necessary). - * - */ - -#include - -#include "mixvideolog.h" -#include "mixbufferpool.h" -#include "mixbuffer.h" -#include "mixbuffer_private.h" - -#define SAFE_FREE(p) if(p) { free(p); p = NULL; } - -MixBuffer::MixBuffer() - :data(NULL) - ,size(0) - ,token(0) - ,callback(NULL) - ,pool(NULL) { -} - -MixBuffer::~MixBuffer() { -} - -/** - * mix_buffer_dup: - * @obj: a #MixBuffer object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams * MixBuffer::dup() const { - MixParams *ret = new MixBuffer(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -/** - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -bool MixBuffer::copy(MixParams * target) const { - bool ret = FALSE; - MixBuffer * this_target = MIX_BUFFER(target); - if (NULL != this_target) { - this_target->data = data; - this_target->size = size; - this_target->token = token; - this_target->callback = callback; - ret = MixParams::copy(target); - } - return ret; -} - -bool MixBuffer::equal(MixParams * obj) const { - bool ret = FALSE; - MixBuffer * this_obj = MIX_BUFFER(obj); - if (NULL != this_obj) { - if (this_obj->data == data && - this_obj->size == size && - this_obj->token == token && - this_obj->callback == callback) { - ret = MixParams::equal(this_obj); - } - } - return ret; -} - -MixBuffer * mix_buffer_new(void) { - return new MixBuffer(); -} - -MixBuffer * mix_buffer_ref(MixBuffer * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - - -MIX_RESULT mix_buffer_set_data( - MixBuffer * obj, uchar *data, uint size, - ulong token, MixBufferCallback callback) { - obj->data = data; - obj->size = size; - obj->token = token; - obj->callback = callback; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_buffer_set_pool(MixBuffer *obj, MixBufferPool *pool) { - obj->pool = pool; - return MIX_RESULT_SUCCESS; -} - -void mix_buffer_unref(MixBuffer * obj) { - - if (NULL != obj) { - int newRefcount = obj->GetRefCount() - 1; - LOG_I( "after unref, refcount = %d\n", newRefcount); - // Unref through base class - obj->Unref(); - if (1 == newRefcount) { - return_if_fail(obj->pool != NULL); - if (obj->callback) { - obj->callback(obj->token, obj->data); - } - mix_bufferpool_put(obj->pool, obj); - } - } -} - diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h deleted file mode 100644 index af23aaf..0000000 --- a/mix_video/src/mixbuffer.h +++ /dev/null @@ -1,108 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_BUFFER_H__ -#define __MIX_BUFFER_H__ - -#include -#include "mixvideodef.h" - -/** - * MIX_BUFFER: - * @obj: object to be type-casted. - */ -#define MIX_BUFFER(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_BUFFER: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_BUFFER(obj) (NULL != MIX_BUFFER(obj)) - -typedef void (*MixBufferCallback)(ulong token, uchar *data); - -class MixBufferPool; - -/** - * MixBuffer: - * - * MI-X Buffer Parameter object - */ -class MixBuffer : public MixParams { -public: - MixBuffer(); - virtual ~MixBuffer(); - virtual bool copy(MixParams* target) const; - virtual MixParams* dup() const; - virtual bool equal(MixParams* obj) const; -public: - /* Pointer to coded data buffer */ - uchar *data; - - /* Size of coded data buffer */ - uint size; - - /* Token that will be passed to - * the callback function. Can be - * used by the application for - * any information to be associated - * with this coded data buffer, - * such as a pointer to a structure - * belonging to the application. */ - ulong token; - - /* callback function pointer */ - MixBufferCallback callback; - - /* < private > */ - MixBufferPool *pool; -}; - -/** - * mix_buffer_new: - * @returns: A newly allocated instance of #MixBuffer - * - * Use this method to create new instance of #MixBuffer - */ -MixBuffer *mix_buffer_new(void); -/** - * mix_buffer_ref: - * @mix: object to add reference - * @returns: the #MixBuffer instance where reference count has been increased. - * - * Add reference count. - */ -MixBuffer *mix_buffer_ref(MixBuffer * mix); - -/** - * mix_buffer_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -void mix_buffer_unref(MixBuffer * mix); - -/* Class Methods */ - -/** - * mix_buffer_set_data: - * @obj: #MixBuffer object - * @data: data buffer - * @size: data buffer size - * @token: token - * @callback: callback function pointer - * @returns: Common Video Error Return Codes - * - * Set data buffer, size, token and callback function - */ -MIX_RESULT mix_buffer_set_data(MixBuffer * obj, uchar *data, uint size, - ulong token, MixBufferCallback callback); - -#endif /* __MIX_BUFFER_H__ */ diff --git a/mix_video/src/mixbuffer_private.h b/mix_video/src/mixbuffer_private.h deleted file mode 100644 index 6ade94d..0000000 --- a/mix_video/src/mixbuffer_private.h +++ /dev/null @@ -1,34 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_BUFFER_PRIVATE_H__ -#define __MIX_BUFFER_PRIVATE_H__ - -#include "mixbuffer.h" -#include "mixbufferpool.h" - - -class MixBuffer; -class MixBufferPool; - -#if 0 -typedef struct _MixBufferPrivate MixBufferPrivate; - -struct _MixBufferPrivate -{ - /*< private > */ - MixBufferPool *pool; - -}; -#endif - -/* Private functions */ -MIX_RESULT -mix_buffer_set_pool (MixBuffer *obj, MixBufferPool *pool); - -#endif /* __MIX_BUFFER_PRIVATE_H__ */ diff --git a/mix_video/src/mixbufferpool.cpp b/mix_video/src/mixbufferpool.cpp deleted file mode 100644 index 47afa55..0000000 --- a/mix_video/src/mixbufferpool.cpp +++ /dev/null @@ -1,377 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixbufferpool - * @short_description: MI-X Input Buffer Pool - * - * A data object which stores and manipulates a pool of compressed video buffers. - */ - -#include "mixvideolog.h" -#include "mixbufferpool.h" -#include "mixbuffer_private.h" - -#define SAFE_FREE(p) if(p) { free(p); p = NULL; } - -MixBufferPool::MixBufferPool() - :free_list(NULL) - ,in_use_list(NULL) - ,free_list_max_size(0) - ,high_water_mark(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,mLock() { -} - -MixBufferPool::~MixBufferPool() { -} - -MixBufferPool * mix_bufferpool_new(void) { - return new MixBufferPool(); -} - -MixBufferPool * mix_bufferpool_ref(MixBufferPool * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -MixParams * MixBufferPool::dup() const { - MixBufferPool * ret = new MixBufferPool(); - MixBufferPool * this_obj = const_cast(this); - if (NULL != ret) { - this_obj->Lock(); - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - this_obj->Unlock(); - } - return ret; -} - - -/** - * mix_bufferpool_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ - -bool MixBufferPool::copy(MixParams * target) const { - bool ret = FALSE; - MixBufferPool * this_target = MIX_BUFFERPOOL(target); - MixBufferPool * this_obj = const_cast(this); - if (NULL != this_target) { - this_obj->Lock(); - this_target->Lock(); - this_target->free_list = free_list; - this_target->in_use_list = in_use_list; - this_target->free_list_max_size = free_list_max_size; - this_target->high_water_mark = high_water_mark; - ret = MixParams::copy(target); - this_target->Unlock(); - this_obj->Unlock(); - } - return ret; -} - -bool MixBufferPool::equal(MixParams * obj) const { - bool ret = FALSE; - MixBufferPool * this_obj = MIX_BUFFERPOOL(obj); - MixBufferPool * unconst_this = const_cast(this); - if (NULL != this_obj) { - unconst_this->Lock(); - this_obj->Lock(); - if (free_list == this_obj->free_list && - in_use_list == this_obj->in_use_list && - free_list_max_size == this_obj->free_list_max_size && - high_water_mark == this_obj->high_water_mark) { - ret = MixParams::equal(this_obj); - } - this_obj->Unlock(); - unconst_this->Unlock(); - } - return ret; -} - -/** - * mix_bufferpool_initialize: - * @returns: MIX_RESULT_SUCCESS if successful in creating the buffer pool - * - * Use this method to create a new buffer pool, consisting of a GSList of - * buffer objects that represents a pool of buffers. - */ -MIX_RESULT mix_bufferpool_initialize( - MixBufferPool * obj, uint num_buffers) { - LOG_V( "Begin\n"); - - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - obj->Lock(); - - if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { - //buffer pool is in use; return error; need proper cleanup - //TODO need cleanup here? - - obj->Unlock(); - - return MIX_RESULT_ALREADY_INIT; - } - - if (num_buffers == 0) { - obj->free_list = NULL; - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_buffers; - - obj->high_water_mark = 0; - - obj->Unlock(); - - return MIX_RESULT_SUCCESS; - } - - // Initialize the free pool with MixBuffer objects - - uint i = 0; - MixBuffer *buffer = NULL; - - for (; i < num_buffers; i++) { - - buffer = mix_buffer_new(); - - if (buffer == NULL) { - //TODO need to log an error here and do cleanup - - obj->Unlock(); - - return MIX_RESULT_NO_MEMORY; - } - - // Set the pool reference in the private data of the MixBuffer object - mix_buffer_set_pool(buffer, obj); - - //Add each MixBuffer object to the pool list - obj->free_list = j_slist_append(obj->free_list, buffer); - - } - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_buffers; - - obj->high_water_mark = 0; - - obj->Unlock(); - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -/** - * mix_bufferpool_put: - * @returns: SUCCESS or FAILURE - * - * Use this method to return a buffer to the free pool - */ -MIX_RESULT mix_bufferpool_put(MixBufferPool * obj, MixBuffer * buffer) { - - if (obj == NULL || buffer == NULL) - return MIX_RESULT_NULL_PTR; - - obj->Lock(); - - if (obj->in_use_list == NULL) { - //in use list cannot be empty if a buffer is in use - //TODO need better error code for this - - obj->Unlock(); - - return MIX_RESULT_FAIL; - } - - JSList *element = j_slist_find(obj->in_use_list, buffer); - if (element == NULL) { - //Integrity error; buffer not found in in use list - //TODO need better error code and handling for this - - obj->Unlock(); - - return MIX_RESULT_FAIL; - } else { - //Remove this element from the in_use_list - obj->in_use_list = j_slist_remove_link(obj->in_use_list, element); - - //Concat the element to the free_list - obj->free_list = j_slist_concat(obj->free_list, element); - } - - //Note that we do nothing with the ref count for this. We want it to - //stay at 1, which is what triggered it to be added back to the free list. - - obj->Unlock(); - - return MIX_RESULT_SUCCESS; -} - -/** - * mix_bufferpool_get: - * @returns: SUCCESS or FAILURE - * - * Use this method to get a buffer from the free pool - */ -MIX_RESULT mix_bufferpool_get(MixBufferPool * obj, MixBuffer ** buffer) { - - if (obj == NULL || buffer == NULL) - return MIX_RESULT_NULL_PTR; - - obj->Lock(); - - if (obj->free_list == NULL) { - //We are out of buffers - //TODO need to log this as well - - obj->Unlock(); - - return MIX_RESULT_POOLEMPTY; - } - - //Remove a buffer from the free pool - - //We just remove the one at the head, since it's convenient - JSList *element = obj->free_list; - obj->free_list = j_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this - - obj->Unlock(); - - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = j_slist_concat(obj->in_use_list, element); - - //TODO replace with proper logging - - LOG_I( "buffer refcount%d\n", - MIX_PARAMS(element->data)->ref_count); - - //Set the out buffer pointer - *buffer = (MixBuffer *) element->data; - - //Check the high water mark for buffer use - uint size = j_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } - - //Increment the reference count for the buffer - mix_buffer_ref(*buffer); - - obj->Unlock(); - - return MIX_RESULT_SUCCESS; -} - -/** - * mix_bufferpool_deinitialize: - * @returns: SUCCESS or FAILURE - * - * Use this method to teardown a buffer pool - */ -MIX_RESULT mix_bufferpool_deinitialize(MixBufferPool * obj) { - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - obj->Lock(); - - if ((obj->in_use_list != NULL) || (j_slist_length(obj->free_list) - != obj->free_list_max_size)) { - //TODO better error code - //We have outstanding buffer objects in use and they need to be - //freed before we can deinitialize. - - obj->Unlock(); - - return MIX_RESULT_FAIL; - } - - //Now remove buffer objects from the list - - MixBuffer *buffer = NULL; - - while (obj->free_list != NULL) { - //Get the buffer object from the head of the list - buffer = reinterpret_cast(obj->free_list->data); - //buffer = g_slist_nth_data(obj->free_list, 0); - - //Release it - mix_buffer_unref(buffer); - - //Delete the head node of the list and store the new head - obj->free_list = j_slist_delete_link(obj->free_list, obj->free_list); - - //Repeat until empty - } - - obj->free_list_max_size = 0; - - //May want to log this information for tuning - obj->high_water_mark = 0; - - obj->Unlock(); - - return MIX_RESULT_SUCCESS; -} - -#define MIX_BUFFERPOOL_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_BUFFERPOOL_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT -mix_bufferpool_dumpbuffer(MixBuffer *buffer) { - LOG_I( "\tBuffer %x, ptr %x, refcount %d\n", (uint)buffer, - (uint)buffer->data, MIX_PARAMS(buffer)->ref_count); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -mix_bufferpool_dumpprint (MixBufferPool * obj) { - //TODO replace this with proper logging later - LOG_I( "BUFFER POOL DUMP:\n"); - LOG_I( "Free list size is %d\n", j_slist_length(obj->free_list)); - LOG_I( "In use list size is %d\n", j_slist_length(obj->in_use_list)); - LOG_I( "High water mark is %lu\n", obj->high_water_mark); - - //Walk the free list and report the contents - LOG_I( "Free list contents:\n"); - j_slist_foreach(obj->free_list, (JFunc) mix_bufferpool_dumpbuffer, NULL); - - //Walk the in_use list and report the contents - LOG_I( "In Use list contents:\n"); - j_slist_foreach(obj->in_use_list, (JFunc) mix_bufferpool_dumpbuffer, NULL); - - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixbufferpool.h b/mix_video/src/mixbufferpool.h deleted file mode 100644 index 655fba4..0000000 --- a/mix_video/src/mixbufferpool.h +++ /dev/null @@ -1,104 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_BUFFERPOOL_H__ -#define __MIX_BUFFERPOOL_H__ - -#include -#include "mixvideodef.h" -#include "mixbuffer.h" -#include "mixvideothread.h" -#include -#include - - -class MixBuffer; - -/** -* MIX_BUFFERPOOL: -* @obj: object to be type-casted. -*/ -#define MIX_BUFFERPOOL(obj) (reinterpret_cast(obj)) - -/** -* MIX_IS_BUFFERPOOL: -* @obj: an object. -* -* Checks if the given object is an instance of #MixBufferPool -*/ -#define MIX_IS_BUFFERPOOL(obj) (NULL != MIX_BUFFERPOOL(obj)) - -/** -* MixBufferPool: -* -* MI-X Video Buffer Pool object -*/ -class MixBufferPool : public MixParams -{ -public: - MixBufferPool(); - virtual ~MixBufferPool(); - virtual bool copy(MixParams* target) const; - virtual MixParams* dup() const; - virtual bool equal(MixParams* obj) const; - - void Lock() { - mLock.lock(); - } - void Unlock() { - mLock.unlock(); - } -public: - /*< public > */ - JSList *free_list; /* list of free buffers */ - JSList *in_use_list; /* list of buffers in use */ - ulong free_list_max_size; /* initial size of the free list */ - ulong high_water_mark; /* most buffers in use at one time */ - - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; - - /*< private > */ - MixVideoMutex mLock; -}; - - - -/** -* mix_bufferpool_new: -* @returns: A newly allocated instance of #MixBufferPool -* -* Use this method to create new instance of #MixBufferPool -*/ -MixBufferPool *mix_bufferpool_new (void); -/** -* mix_bufferpool_ref: -* @mix: object to add reference -* @returns: the MixBufferPool instance where reference count has been increased. -* -* Add reference count. -*/ -MixBufferPool *mix_bufferpool_ref (MixBufferPool * mix); - -/** -* mix_bufferpool_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_bufferpool_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ -MIX_RESULT mix_bufferpool_initialize (MixBufferPool * obj, uint num_buffers); -MIX_RESULT mix_bufferpool_put (MixBufferPool * obj, MixBuffer * buffer); -MIX_RESULT mix_bufferpool_get (MixBufferPool * obj, MixBuffer ** buffer); -MIX_RESULT mix_bufferpool_deinitialize (MixBufferPool * obj); - -#endif /* __MIX_BUFFERPOOL_H__ */ diff --git a/mix_video/src/mixdisplay.cpp b/mix_video/src/mixdisplay.cpp deleted file mode 100644 index 5ab7c0b..0000000 --- a/mix_video/src/mixdisplay.cpp +++ /dev/null @@ -1,130 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixdisplay -* @short_description: Lightweight Base Object for MI-X Video Display -* -*/ -#ifdef HAVE_CONFIG_H -#include "config.h" -#endif - -#include "mixdisplay.h" - -#define DEBUG_REFCOUNT - -MixDisplay::MixDisplay() - :refcount(1) { -} -MixDisplay::~MixDisplay() { - Finalize(); -} - -MixDisplay* MixDisplay::Dup() const { - MixDisplay* dup = new MixDisplay(); - if (NULL != dup ) { - if (FALSE == Copy(dup)) { - dup->Unref(); - dup = NULL; - } - } - return dup; -} - -bool MixDisplay::Copy(MixDisplay* target) const { - if (NULL != target) - return TRUE; - else - return FALSE; -} - -void MixDisplay::Finalize() { -} - -bool MixDisplay::Equal(const MixDisplay* obj) const { - if (NULL != obj) - return TRUE; - else - return FALSE; -} - -MixDisplay * MixDisplay::Ref() { - ++refcount; - return this; -} -void MixDisplay::Unref () { - if (0 == (--refcount)) { - delete this; - } -} - -bool mix_display_copy (MixDisplay * target, const MixDisplay * src) { - if (target == src) - return TRUE; - if (NULL == target || NULL == src) - return FALSE; - return src->Copy(target); -} - - -MixDisplay * mix_display_dup (const MixDisplay * obj) { - if (NULL == obj) - return NULL; - return obj->Dup(); -} - - - -MixDisplay * mix_display_new (void) { - return new MixDisplay(); -} - -MixDisplay * mix_display_ref (MixDisplay * obj) { - if (NULL != obj) - obj->Ref(); - return obj; -} - -void mix_display_unref (MixDisplay * obj) { - if (NULL != obj) - obj->Unref(); -} - - -/** -* mix_display_replace: -* @olddata: pointer to a pointer to a object to be replaced -* @newdata: pointer to new object -* -* Modifies a pointer to point to a new object. The modification -* is done atomically, and the reference counts are updated correctly. -* Either @newdata and the value pointed to by @olddata may be NULL. -*/ -void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata) { - if (NULL == olddata) - return; - if (*olddata == newdata) - return; - MixDisplay *olddata_val = *olddata; - if (NULL != newdata) - newdata->Ref(); - *olddata = newdata; - if (NULL != olddata_val) - olddata_val->Unref(); -} - -bool mix_display_equal (MixDisplay * first, MixDisplay * second) { - if (first == second) - return TRUE; - if (NULL == first || NULL == second) - return FALSE; - return first->Equal(second); -} - - diff --git a/mix_video/src/mixdisplay.h b/mix_video/src/mixdisplay.h deleted file mode 100644 index da7f074..0000000 --- a/mix_video/src/mixdisplay.h +++ /dev/null @@ -1,113 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_DISPLAY_H__ -#define __MIX_DISPLAY_H__ - -#include - -#define MIX_DISPLAY(obj) (reinterpret_cast(obj)) - -/** -* MixDisplay: -* @instance: type instance -* @refcount: atomic refcount -* -* Base class for a refcounted parameter objects. -*/ -class MixDisplay { -public: - virtual ~MixDisplay(); - - virtual MixDisplay* Dup() const; - virtual bool Copy(MixDisplay* target) const; - virtual void Finalize(); - virtual bool Equal(const MixDisplay* obj) const; - - MixDisplay * Ref(); - void Unref (); - - friend MixDisplay *mix_display_new (void); - -protected: - MixDisplay(); -public: - /*< public > */ - int refcount; - /*< private > */ - void* _reserved; -}; - - -/** -* mix_display_new: -* @returns: return a newly allocated object. -* -* Create new instance of the object. -*/ -MixDisplay *mix_display_new (void); - - - -/** -* mix_display_copy: -* @target: copy to target -* @src: copy from source -* @returns: boolean indicating if copy is successful. -* -* Copy data from one instance to the other. This method internally invoked the #MixDisplay::copy method such that derived object will be copied correctly. -*/ -bool mix_display_copy (MixDisplay * target, const MixDisplay * src); - -/** -* mix_display_ref: -* @obj: a #MixDisplay object. -* @returns: the object with reference count incremented. -* -* Increment reference count. -*/ -MixDisplay *mix_display_ref (MixDisplay * obj); - -/** -* mix_display_unref: -* @obj: a #MixDisplay object. -* -* Decrement reference count. -*/ -void mix_display_unref (MixDisplay * obj); - -/** -* mix_display_replace: -* @olddata: old data -* @newdata: new data -* -* Replace a pointer of the object with the new one. -*/ -void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata); - -/** -* mix_display_dup: -* @obj: #MixDisplay object to duplicate. -* @returns: A newly allocated duplicate of the object, or NULL if failed. -* -* Duplicate the given #MixDisplay and allocate a new instance. This method is chained up properly and derive object will be dupped properly. -*/ -MixDisplay *mix_display_dup (const MixDisplay * obj); - -/** -* mix_display_equal: -* @first: first object to compare -* @second: second object to compare -* @returns: boolean indicates if the 2 object contains same data. -* -* Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance. -*/ -bool mix_display_equal (MixDisplay * first, MixDisplay * second); - - -#endif diff --git a/mix_video/src/mixdisplayandroid.cpp b/mix_video/src/mixdisplayandroid.cpp deleted file mode 100644 index 1d00d24..0000000 --- a/mix_video/src/mixdisplayandroid.cpp +++ /dev/null @@ -1,158 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixdisplayandroid - * @short_description: MI-X Video Android Display - * - * A data object which stores Android specific parameters. - * - * - * Data Structures Used in MixDisplayAndroid Fields: - * - */ - -#ifdef ANDROID - -#include "mixdisplayandroid.h" - -#define SAFE_FREE(p) if(p) { free(p); p = NULL; } - -MixDisplayAndroid::MixDisplayAndroid() - :display(NULL) { -} - -MixDisplayAndroid::~MixDisplayAndroid() { - Finalize(); -} - -MixDisplay* MixDisplayAndroid::Dup() const { - MixDisplayAndroid* dup = new MixDisplayAndroid(); - if (NULL != dup ) { - if (FALSE == Copy(dup)) { - dup->Unref(); - dup = NULL; - } - } - return dup; - -} - -bool MixDisplayAndroid::Copy(MixDisplay* target) const { - bool ret = FALSE; - MixDisplayAndroid* this_target = reinterpret_cast(target); - if (NULL != this_target) { - this_target->display = this->display; - ret = MixDisplay::Copy(target); - } - return ret; -} - -void MixDisplayAndroid::Finalize() { - MixDisplay::Finalize(); -} - -bool MixDisplayAndroid::Equal(const MixDisplay* obj) const { - bool ret = FALSE; - const MixDisplayAndroid* this_obj = reinterpret_cast(obj); - if (NULL != this_obj) { - if (this_obj->display == this->display) - ret = MixDisplay::Equal(obj); - } - return ret; -} - - -MixDisplayAndroid * mix_displayandroid_new(void) { - return new MixDisplayAndroid(); -} - - - -MixDisplayAndroid * mix_displayandroid_ref(MixDisplayAndroid * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -/** - * mix_mixdisplayandroid_dup: - * @obj: a #MixDisplayAndroid object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixDisplay * mix_displayandroid_dup(const MixDisplay * obj) { - MixDisplay *ret = NULL; - if (NULL != obj) { - ret = obj->Dup(); - } - return ret; -} - -/** - * mix_mixdisplayandroid_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -bool mix_displayandroid_copy(MixDisplay * target, const MixDisplay * src) { - if (target == src) - return TRUE; - if (NULL == target || NULL == src) - return FALSE; - const MixDisplayAndroid *this_src = - reinterpret_cast(src); - MixDisplayAndroid *this_target = - reinterpret_cast(target); - return this_src->Copy(this_target); -} - -/** - * mix_mixdisplayandroid_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -bool mix_displayandroid_equal(MixDisplay * first, MixDisplay * second) { - if (first == second) - return TRUE; - if (NULL == first || NULL == second) - return FALSE; - MixDisplayAndroid *this_first = - reinterpret_cast(first); - MixDisplayAndroid *this_second = - reinterpret_cast(second); - return first->Equal(second); -} - -#define MIX_DISPLAYANDROID_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; - -#define MIX_DISPLAYANDROID_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || prop == NULL) return MIX_RESULT_NULL_PTR; - -MIX_RESULT mix_displayandroid_set_display(MixDisplayAndroid * obj, void * display) { - MIX_DISPLAYANDROID_SETTER_CHECK_INPUT (obj); - // TODO: needs to decide to clone or just copy pointer - obj->display = display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayandroid_get_display(MixDisplayAndroid * obj, void ** display) { - MIX_DISPLAYANDROID_GETTER_CHECK_INPUT (obj, display); - // TODO: needs to decide to clone or just copy pointer - *display = obj->display; - return MIX_RESULT_SUCCESS; -} - -#endif /* ANDROID */ diff --git a/mix_video/src/mixdisplayandroid.h b/mix_video/src/mixdisplayandroid.h deleted file mode 100644 index 1b47f42..0000000 --- a/mix_video/src/mixdisplayandroid.h +++ /dev/null @@ -1,133 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_DISPLAYANDROID_H__ -#define __MIX_DISPLAYANDROID_H__ - -#include "mixdisplay.h" -#include "mixvideodef.h" - -//#ifdef ANDROID -//#include -//using namespace android; -//#endif - -#ifdef __cplusplus -extern "C" { -#endif - -#ifdef ANDROID - - /** - * MIX_DISPLAYANDROID: - * @obj: object to be type-casted. - */ -#define MIX_DISPLAYANDROID(obj) (reinterpret_cast(obj)) - - /** - * MIX_IS_DISPLAYANDROID: - * @obj: an object. - * - * Checks if the given object is an instance of #MixDisplay - */ -#define MIX_IS_DISPLAYANDROID(obj) (NULL != MIX_DISPLAYANDROID(obj)) - - - /** - * MixDisplayAndroid: - * - * MI-X VideoInit Parameter object - */ - class MixDisplayAndroid : public MixDisplay { - - public: - ~MixDisplayAndroid(); - virtual MixDisplay* Dup() const; - virtual bool Copy(MixDisplay* target) const; - virtual void Finalize(); - virtual bool Equal(const MixDisplay* obj) const; - - - - friend MixDisplayAndroid *mix_displayandroid_new (void); - - protected: - MixDisplayAndroid(); - public: - /*< public > */ - - /* Pointer to a Android specific display */ - void *display; - - /* An Android drawable that is a smart pointer - * of ISurface. This field is not used in - * mix_video_initialize(). - */ - // sp drawable; - }; - - /** - * mix_displayandroid_new: - * @returns: A newly allocated instance of #MixDisplayAndroid - * - * Use this method to create new instance of #MixDisplayAndroid - */ - MixDisplayAndroid *mix_displayandroid_new (void); - - - /** - * mix_displayandroid_ref: - * @mix: object to add reference - * @returns: the #MixDisplayAndroid instance where reference count has been increased. - * - * Add reference count. - */ - MixDisplayAndroid *mix_displayandroid_ref (MixDisplayAndroid * mix); - - /** - * mix_displayandroid_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_displayandroid_unref(obj) mix_display_unref(MIX_DISPLAY(obj)) - - /* Class Methods */ - - - /** - * mix_displayandroid_set_display: - * @obj: #MixDisplayAndroid object - * @display: Pointer to Android specific display - * @returns: Common Video Error Return Codes - * - * Set Display - */ - MIX_RESULT mix_displayandroid_set_display ( - MixDisplayAndroid * obj, void * display); - - /** - * mix_displayandroid_get_display: - * @obj: #MixDisplayAndroid object - * @display: Pointer to pointer of Android specific display - * @returns: Common Video Error Return Codes - * - * Get Display - */ - MIX_RESULT mix_displayandroid_get_display ( - MixDisplayAndroid * obj, void ** dislay); - - -#endif /* ANDROID */ - -#ifdef __cplusplus -} -#endif - -#endif /* __MIX_DISPLAYANDROID_H__ */ - diff --git a/mix_video/src/mixdisplayx11.cpp b/mix_video/src/mixdisplayx11.cpp deleted file mode 100644 index 23fb8c0..0000000 --- a/mix_video/src/mixdisplayx11.cpp +++ /dev/null @@ -1,210 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixdisplayx11 - * @short_description: MI-X Video X11 Display - * - * A data object which stores X11 specific parameters. - * - * - * Data Structures Used in MixDisplayX11 Fields: - * See X11/Xlib.h for Display and Drawable definitions. - * - */ - -#include "mixdisplayx11.h" - -#define SAFE_FREE(p) if(p) { free(p); p = NULL; } - -static GType _mix_displayx11_type = 0; -static MixDisplayClass *parent_class = NULL; - -#define _do_init { _mix_displayx11_type = g_define_type_id; } - -bool mix_displayx11_copy(MixDisplay * target, const MixDisplay * src); -MixDisplay *mix_displayx11_dup(const MixDisplay * obj); -bool mix_displayx11_equal(MixDisplay * first, MixDisplay * second); -static void mix_displayx11_finalize(MixDisplay * obj); - -G_DEFINE_TYPE_WITH_CODE (MixDisplayX11, mix_displayx11, - MIX_TYPE_DISPLAY, _do_init); - -static void mix_displayx11_init(MixDisplayX11 * self) { - - /* Initialize member varibles */ - self->display = NULL; - self->drawable = 0; -} - -static void mix_displayx11_class_init(MixDisplayX11Class * klass) { - MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass); - - /* setup static parent class */ - parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass); - - mixdisplay_class->finalize = mix_displayx11_finalize; - mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayx11_copy; - mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayx11_dup; - mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayx11_equal; -} - -MixDisplayX11 * -mix_displayx11_new(void) { - MixDisplayX11 *ret = (MixDisplayX11 *) g_type_create_instance( - MIX_TYPE_DISPLAYX11); - - return ret; -} - -void mix_displayx11_finalize(MixDisplay * obj) { - /* clean up here. */ - /* MixDisplayX11 *self = MIX_DISPLAYX11 (obj); */ - - /* NOTE: we don't need to do anything - * with display and drawable */ - - /* Chain up parent */ - if (parent_class->finalize) - parent_class->finalize(obj); -} - -MixDisplayX11 * -mix_displayx11_ref(MixDisplayX11 * mix) { - return (MixDisplayX11 *) mix_display_ref(MIX_DISPLAY(mix)); -} - -/** - * mix_mixdisplayx11_dup: - * @obj: a #MixDisplayX11 object - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixDisplay * -mix_displayx11_dup(const MixDisplay * obj) { - MixDisplay *ret = NULL; - - if (MIX_IS_DISPLAYX11(obj)) { - MixDisplayX11 *duplicate = mix_displayx11_new(); - if (mix_displayx11_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) { - ret = MIX_DISPLAY(duplicate); - } else { - mix_displayx11_unref(duplicate); - } - } - return ret; -} - -/** - * mix_mixdisplayx11_copy: - * @target: copy to target - * @src: copy from src - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -bool mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) { - MixDisplayX11 *this_target, *this_src; - - if (MIX_IS_DISPLAYX11(target) && MIX_IS_DISPLAYX11(src)) { - // Cast the base object to this child object - this_target = MIX_DISPLAYX11(target); - this_src = MIX_DISPLAYX11(src); - - // Copy properties from source to target. - - this_target->display = this_src->display; - this_target->drawable = this_src->drawable; - - // Now chainup base class - if (parent_class->copy) { - return parent_class->copy(MIX_DISPLAY_CAST(target), - MIX_DISPLAY_CAST(src)); - } else { - return TRUE; - } - } - return FALSE; -} - -/** - * mix_mixdisplayx11_equal: - * @first: first object to compare - * @second: seond object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -bool mix_displayx11_equal(MixDisplay * first, MixDisplay * second) { - bool ret = FALSE; - - MixDisplayX11 *this_first, *this_second; - - this_first = MIX_DISPLAYX11(first); - this_second = MIX_DISPLAYX11(second); - - if (MIX_IS_DISPLAYX11(first) && MIX_IS_DISPLAYX11(second)) { - // Compare member variables - - // TODO: if in the copy method we just copy the pointer of display, the comparison - // below is enough. But we need to decide how to copy! - - if (this_first->display == this_second->display && this_first->drawable - == this_second->drawable) { - // members within this scope equal. chaining up. - MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class); - if (klass->equal) - ret = parent_class->equal(first, second); - else - ret = TRUE; - } - } - return ret; -} - -#define MIX_DISPLAYX11_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_DISPLAYX11_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \ - -MIX_RESULT mix_displayx11_set_display(MixDisplayX11 * obj, Display * display) { - MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); - - // TODO: needs to decide to clone or just copy pointer - obj->display = display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayx11_get_display(MixDisplayX11 * obj, Display ** display) { - MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, display); - - // TODO: needs to decide to clone or just copy pointer - *display = obj->display; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayx11_set_drawable(MixDisplayX11 * obj, Drawable drawable) { - MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj); - - // TODO: needs to decide to clone or just copy pointer - obj->drawable = drawable; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_displayx11_get_drawable(MixDisplayX11 * obj, Drawable * drawable) { - MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, drawable); - - // TODO: needs to decide to clone or just copy pointer - *drawable = obj->drawable; - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h deleted file mode 100644 index a3fe183..0000000 --- a/mix_video/src/mixdisplayx11.h +++ /dev/null @@ -1,180 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_DISPLAYX11_H__ -#define __MIX_DISPLAYX11_H__ - -#include "mixdisplay.h" -#include "mixvideodef.h" -#include - -G_BEGIN_DECLS - -/** -* MIX_TYPE_DISPLAYX11: -* -* Get type of class. -*/ -#define MIX_TYPE_DISPLAYX11 (mix_displayx11_get_type ()) - -/** -* MIX_DISPLAYX11: -* @obj: object to be type-casted. -*/ -#define MIX_DISPLAYX11(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAYX11, MixDisplayX11)) - -/** -* MIX_IS_DISPLAYX11: -* @obj: an object. -* -* Checks if the given object is an instance of #MixDisplay -*/ -#define MIX_IS_DISPLAYX11(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAYX11)) - -/** -* MIX_DISPLAYX11_CLASS: -* @klass: class to be type-casted. -*/ -#define MIX_DISPLAYX11_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAYX11, MixDisplayX11Class)) - -/** -* MIX_IS_DISPLAYX11_CLASS: -* @klass: a class. -* -* Checks if the given class is #MixDisplayClass -*/ -#define MIX_IS_DISPLAYX11_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAYX11)) - -/** -* MIX_DISPLAYX11_GET_CLASS: -* @obj: a #MixDisplay object. -* -* Get the class instance of the object. -*/ -#define MIX_DISPLAYX11_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAYX11, MixDisplayX11Class)) - -typedef struct _MixDisplayX11 MixDisplayX11; -typedef struct _MixDisplayX11Class MixDisplayX11Class; - -/** -* MixDisplayX11: -* -* MI-X VideoInit Parameter object -*/ -struct _MixDisplayX11 -{ - /*< public > */ - MixDisplay parent; - - /*< public > */ - - /* Pointer to a X Window Display structure */ - Display *display; - - /* An X Window Drawable that is either a Window - * or a Pixmap. This field is not used in - * mix_video_initialize(). - * See X11/Xlib.h for Display and Drawable definitions.*/ - Drawable drawable; -}; - -/** -* MixDisplayX11Class: -* -* MI-X VideoInit object class -*/ -struct _MixDisplayX11Class -{ - /*< public > */ - MixDisplayClass parent_class; - - /* class members */ -}; - -/** -* mix_displayx11_get_type: -* @returns: type -* -* Get the type of object. -*/ -GType mix_displayx11_get_type (void); - -/** -* mix_displayx11_new: -* @returns: A newly allocated instance of #MixDisplayX11 -* -* Use this method to create new instance of #MixDisplayX11 -*/ -MixDisplayX11 *mix_displayx11_new (void); -/** -* mix_displayx11_ref: -* @mix: object to add reference -* @returns: the #MixDisplayX11 instance where reference count has been increased. -* -* Add reference count. -*/ -MixDisplayX11 *mix_displayx11_ref (MixDisplayX11 * mix); - -/** -* mix_displayx11_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_displayx11_unref(obj) mix_display_unref(MIX_DISPLAY(obj)) - -/* Class Methods */ - - -/** - * mix_displayx11_set_display: - * @obj: #MixDisplayX11 object - * @display: Pointer to a X Window Display structure - * @returns: Common Video Error Return Codes - * - * Set Display - */ -MIX_RESULT mix_displayx11_set_display (MixDisplayX11 * obj, - Display * display); - -/** - * mix_displayx11_get_display: - * @obj: #MixDisplayX11 object - * @display: Pointer to pointer of X Window Display structure - * @returns: Common Video Error Return Codes - * - * Get Display - */ -MIX_RESULT mix_displayx11_get_display (MixDisplayX11 * obj, - Display ** dislay); - -/** - * mix_displayx11_set_drawable: - * @obj: #MixDisplayX11 object - * @drawable: An X Window Drawable that is either a Window or a Pixmap. - * @returns: Common Video Error Return Codes - * - * Set drawable - */ -MIX_RESULT mix_displayx11_set_drawable (MixDisplayX11 * obj, - Drawable drawable); - -/** - * mix_displayx11_get_drawable: - * @obj: #MixDisplayX11 object - * @drawable: An X Window Drawable that is either a Window or a Pixmap to be returned. - * @returns: Common Video Error Return Codes - * - * Get drawable - */ -MIX_RESULT mix_displayx11_get_drawable (MixDisplayX11 * obj, - Drawable * drawable); - -G_END_DECLS - -#endif /* __MIX_DISPLAYX11_H__ */ diff --git a/mix_video/src/mixframemanager.cpp b/mix_video/src/mixframemanager.cpp deleted file mode 100644 index cbd2439..0000000 --- a/mix_video/src/mixframemanager.cpp +++ /dev/null @@ -1,686 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - - -#include "mixvideolog.h" -#include "mixframemanager.h" -#include "mixvideoframe_private.h" - -#define INITIAL_FRAME_ARRAY_SIZE 16 - -// Assume only one backward reference is used. This will hold up to 2 frames before forcing -// the earliest frame out of queue. -#define MIX_MAX_ENQUEUE_SIZE 2 - -// RTP timestamp is 32-bit long and could be rollover in 13 hours (based on 90K Hz clock) -#define TS_ROLLOVER_THRESHOLD (0xFFFFFFFF/2) - - -#define MIX_SECOND (1000000 * INT64_CONSTANT (1000)) - -MixFrameManager::~MixFrameManager() { - /* cleanup here */ - mix_framemanager_deinitialize(this); -} - -MixFrameManager::MixFrameManager() - :initialized(FALSE) - ,flushing(FALSE) - ,eos(FALSE) - ,mLock() - ,frame_list(NULL) - ,framerate_numerator(30) - ,framerate_denominator(1) - ,frame_timestamp_delta(0) - ,mode(MIX_DISPLAY_ORDER_UNKNOWN) - ,is_first_frame(TRUE) - ,last_frame_timestamp(0) - ,next_frame_timestamp(0) - ,next_frame_picnumber(0) - ,max_enqueue_size(MIX_MAX_ENQUEUE_SIZE) - ,max_picture_number((uint32)-1) - ,dpb_size((uint32)-1) - ,ref_count(1) { -} - -MixFrameManager *mix_framemanager_new(void) { - return new MixFrameManager(); -} - - - -MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) { - if (NULL != fm) - fm->ref_count++; - return fm; -} - -/* MixFrameManager class methods */ - -MIX_RESULT mix_framemanager_initialize( - MixFrameManager *fm, MixDisplayOrderMode mode, - int framerate_numerator, int framerate_denominator) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (fm->initialized) { - return MIX_RESULT_ALREADY_INIT; - } - - fm->frame_list = NULL; - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; - - fm->mode = mode; - - LOG_V("fm->mode = %d\n", fm->mode); - - fm->is_first_frame = TRUE; - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - fm->next_frame_picnumber = 0; - fm->initialized = TRUE; -cleanup: - - return ret; -} - -MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) { - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - mix_framemanager_flush(fm); - - fm->mLock.lock(); - - fm->initialized = FALSE; - - fm->mLock.unlock(); - - return MIX_RESULT_SUCCESS; -} - -MixFrameManager* mix_framemanager_unref(MixFrameManager *fm) { - if (NULL != fm) { - fm->ref_count--; - if (0 == fm->ref_count) { - delete fm; - return NULL; - } - } - return fm; -} - -MIX_RESULT mix_framemanager_set_framerate( - MixFrameManager *fm, int framerate_numerator, int framerate_denominator) { - - if (framerate_numerator <= 0 || framerate_denominator <= 0) { - return MIX_RESULT_INVALID_PARAM; - } - fm->mLock.lock(); - fm->framerate_numerator = framerate_numerator; - fm->framerate_denominator = framerate_denominator; - fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND - / fm->framerate_numerator; - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm, - int *framerate_numerator, int *framerate_denominator) { - if (!framerate_numerator || !framerate_denominator) { - return MIX_RESULT_INVALID_PARAM; - } - fm->mLock.lock(); - *framerate_numerator = fm->framerate_numerator; - *framerate_denominator = fm->framerate_denominator; - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_get_display_order_mode( - MixFrameManager *fm, MixDisplayOrderMode *mode) { - if (!mode) { - return MIX_RESULT_INVALID_PARAM; - } - /* no need to use lock */ - *mode = fm->mode; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_set_max_enqueue_size( - MixFrameManager *fm, int size) { - if (size <= 0) { - return MIX_RESULT_FAIL; - } - fm->mLock.lock(); - fm->max_enqueue_size = size; - LOG_V("max enqueue size is %d\n", size); - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_set_max_picture_number( - MixFrameManager *fm, uint32 num) { - // NOTE: set maximum picture order number only if pic_order_cnt_type is 0 (see H.264 spec) - if (num < 16) { - // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16. - return MIX_RESULT_INVALID_PARAM; - } - fm->mLock.lock(); - // max_picture_number is exclusie (range from 0 to num - 1). - // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the - // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches - // fm->max_picture_number. - fm->max_picture_number = num; - LOG_V("max picture number is %d\n", num); - - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_framemanager_set_dpb_size( - MixFrameManager *fm, uint32 num) { - fm->mLock.lock(); - fm->dpb_size = num < MIX_MAX_ENQUEUE_SIZE ? num : MIX_MAX_ENQUEUE_SIZE; - LOG_V("dpb is %d\n", fm->dpb_size); - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) { - MixVideoFrame *frame = NULL; - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - fm->mLock.lock(); - while (fm->frame_list) { - frame = (MixVideoFrame*) j_slist_nth_data(fm->frame_list, 0); - fm->frame_list = j_slist_remove(fm->frame_list, frame); - mix_videoframe_unref(frame); - LOG_V("one frame is flushed\n"); - }; - - fm->eos = FALSE; - fm->is_first_frame = TRUE; - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - fm->next_frame_picnumber = 0; - - fm->mLock.unlock(); - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("Begin fm->mode = %d\n", fm->mode); - - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - bool discontinuity = FALSE; - mix_videoframe_get_discontinuity(mvf, &discontinuity); - if (discontinuity) - { - LOG_V("current frame has discontinuity!\n"); - mix_framemanager_flush(fm); - } -#ifdef MIX_LOG_ENABLE - if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) { - uint32 num; - mix_videoframe_get_displayorder(mvf, &num); - uint64 ts; - mix_videoframe_get_timestamp(mvf, &ts); - LOG_V("pic %d is enqueued, ts = %"INT64_FORMAT"\n", num, ts); - } - - else// if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) { - { - uint64 ts; - mix_videoframe_get_timestamp(mvf, &ts); - LOG_V("ts %"UINT64_FORMAT" is enqueued.\n", ts); - } -#endif - - fm->mLock.lock(); - fm->frame_list = j_slist_append(fm->frame_list, (void*)mvf); - fm->mLock.unlock(); - LOG_V("End\n"); - return ret; -} - -void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) -{ - // this function finds the lowest time stamp in the list and assign it to the dequeued video frame, - // if that timestamp is smaller than the timestamp of dequeued video frame. - int i; - uint64 ts = 0, min_ts = 0; - MixVideoFrame *p = NULL, *min_p = NULL; - int len = j_slist_length(fm->frame_list); - if (len == 0) - { - // nothing to update - return; - } - - // find video frame with the smallest timestamp, take rollover into account when - // comparing timestamp. - for (i = 0; i < len; i++) - { - p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_timestamp(p, &ts); - if (i == 0 || - (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) - { - min_ts = ts; - min_p = p; - } - } - - mix_videoframe_get_timestamp(mvf, &ts); - if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) || - (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) - { - // frame to be updated has smaller time stamp - } - else - { - // time stamp needs to be monotonically non-decreasing so swap timestamp. - mix_videoframe_set_timestamp(mvf, min_ts); - mix_videoframe_set_timestamp(min_p, ts); - LOG_V("timestamp for current frame is updated from %"UINT64_FORMAT" to %"UINT64_FORMAT"\n", - ts, min_ts); - } -} - - -MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) -{ - int i, num_i_or_p; - MixVideoFrame *p, *first_i_or_p; - MixFrameType type; - int len = j_slist_length(fm->frame_list); - - num_i_or_p = 0; - first_i_or_p = NULL; - - for (i = 0; i < len; i++) - { - p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_frame_type(p, &type); - if (type == TYPE_B) - { - // B frame has higher display priority as only one reference frame is kept in the list - // and it should be backward reference frame for B frame. - fm->frame_list = j_slist_remove(fm->frame_list, p); - mix_framemanager_update_timestamp(fm, p); - *mvf = p; - LOG_V("B frame is dequeued.\n"); - return MIX_RESULT_SUCCESS; - } - - if (type != TYPE_I && type != TYPE_P) - { - // this should never happen - LOG_E("Frame typs is invalid!!!\n"); - fm->frame_list = j_slist_remove(fm->frame_list, p); - mix_videoframe_unref(p); - return MIX_RESULT_FRAME_NOTAVAIL; - } - num_i_or_p++; - if (first_i_or_p == NULL) - { - first_i_or_p = p; - } - - // When there are more than 1 i or p frame in queue, - // we shouldn't update the B frame's time stamp again. - if (num_i_or_p > 1) - break; - } - - // if there are more than one reference frame in the list, the first one is dequeued. - if (num_i_or_p > 1 || fm->eos) - { - if (first_i_or_p == NULL) - { - // this should never happen! - LOG_E("first_i_or_p frame is NULL!\n"); - return MIX_RESULT_FAIL; - } - fm->frame_list = j_slist_remove(fm->frame_list, first_i_or_p); - mix_framemanager_update_timestamp(fm, first_i_or_p); - *mvf = first_i_or_p; -#ifdef MIX_LOG_ENABLE - mix_videoframe_get_frame_type(first_i_or_p, &type); - if (type == TYPE_I) - { - LOG_V("I frame is dequeued.\n"); - } - else - { - LOG_V("P frame is dequeued.\n"); - } -#endif - return MIX_RESULT_SUCCESS; - } - - return MIX_RESULT_FRAME_NOTAVAIL; -} - -MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) -{ - int i, len; - MixVideoFrame *p, *p_out_of_dated; - uint64 ts, ts_next_pending, ts_out_of_dated; - uint64 tolerance = fm->frame_timestamp_delta/4; - -retry: - // len may be changed during retry! - len = j_slist_length(fm->frame_list); - ts_next_pending = (uint64)-1; - ts_out_of_dated = 0; - p_out_of_dated = NULL; - - - for (i = 0; i < len; i++) - { - p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_timestamp(p, &ts); - if (ts >= fm->last_frame_timestamp && - ts <= fm->next_frame_timestamp + tolerance) - { - fm->frame_list = j_slist_remove(fm->frame_list, p); - *mvf = p; - mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp)); - fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; - LOG_V("frame is dequeud, ts = %"UINT64_FORMAT".\n", ts); - return MIX_RESULT_SUCCESS; - } - - if (ts > fm->next_frame_timestamp + tolerance && - ts < ts_next_pending) - { - ts_next_pending = ts; - } - if (ts < fm->last_frame_timestamp && - ts >= ts_out_of_dated) - { - // video frame that is most recently out-of-dated. - // this may happen in variable frame rate scenario where two adjacent frames both meet - // the "next frame" criteria, and the one with larger timestamp is dequeued first. - ts_out_of_dated = ts; - p_out_of_dated = p; - } - } - - if (p_out_of_dated && - fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD) - { - fm->frame_list = j_slist_remove(fm->frame_list, p_out_of_dated); - mix_videoframe_unref(p_out_of_dated); - LOG_W("video frame is out of dated. ts = %"UINT64_FORMAT" compared to last ts = %"UINT64_FORMAT".\n", - ts_out_of_dated, fm->last_frame_timestamp); - return MIX_RESULT_FRAME_NOTAVAIL; - } - - if (len <= fm->max_enqueue_size && fm->eos == FALSE) - { - LOG_V("no frame is dequeued, expected ts = %"UINT64_FORMAT", next pending ts = %"UINT64_FORMAT".(List size = %d)\n", - fm->next_frame_timestamp, ts_next_pending, len); - return MIX_RESULT_FRAME_NOTAVAIL; - } - - // timestamp has gap - if (ts_next_pending != (uint64)-1) - { - LOG_V("timestamp has gap, jumping from %"UINT64_FORMAT" to %"UINT64_FORMAT".\n", - fm->next_frame_timestamp, ts_next_pending); - - fm->next_frame_timestamp = ts_next_pending; - goto retry; - } - - // time stamp roll-over - LOG_V("time stamp is rolled over, resetting next frame timestamp from %"UINT64_FORMAT" to 0.\n", - fm->next_frame_timestamp); - - fm->next_frame_timestamp = 0; - fm->last_frame_timestamp = 0; - goto retry; - - // should never run to here - LOG_E("Error in timestamp-based dequeue implementation!\n"); - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) -{ - int i, len; - MixVideoFrame* p; - MixVideoFrame* outp; - int outpicnum; - int prevpicnum; - uint32 picnum; - uint32 next_picnum_pending; - - int least_poc_index; - uint32 least_poc; - uint32 maxframeinqueue; - - len = j_slist_length(fm->frame_list); - -retry: - next_picnum_pending = (uint32)-1; - least_poc_index = -1; - least_poc = (uint32)-1; - - if ((fm->dpb_size == -1) || (fm->dpb_size == len)) - { - maxframeinqueue = len; - maxframeinqueue = (maxframeinqueue < (MIX_MAX_ENQUEUE_SIZE + 1)) ? maxframeinqueue : (MIX_MAX_ENQUEUE_SIZE + 1); - } - else - { - maxframeinqueue = 0; - } - - if (maxframeinqueue) - { - p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, 0); - mix_videoframe_get_displayorder(p, &picnum); - outpicnum = picnum; - prevpicnum = picnum; - outp = p; - - for (i = 1; i < maxframeinqueue;i++ ) - { - p = (MixVideoFrame*)j_slist_nth_data(fm->frame_list, i); - mix_videoframe_get_displayorder(p, &picnum); - - if (picnum ==0) - { - break; - } - else if (picnum < outpicnum) - { - outpicnum = picnum; - outp = p; - prevpicnum = picnum; - } - else //(picnum >= outpicnum) - { - prevpicnum = picnum; - } - } - - fm->frame_list = j_slist_remove(fm->frame_list, (void *)outp); - mix_framemanager_update_timestamp(fm, outp); - *mvf = outp; - - - return MIX_RESULT_SUCCESS; - } - if (len <= fm->max_enqueue_size && fm->eos == FALSE) - { - LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", - fm->next_frame_picnumber, next_picnum_pending, len); - return MIX_RESULT_FRAME_NOTAVAIL; - } - - // picture number has gap - if (next_picnum_pending != (uint32)-1) - { - LOG_V("picture number has gap, jumping from %d to %d.\n", - fm->next_frame_picnumber, next_picnum_pending); - - fm->next_frame_picnumber = next_picnum_pending; - goto retry; - } - - // picture number roll-over - LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", - fm->next_frame_picnumber); - - fm->next_frame_picnumber = 0; - goto retry; - - // should never run to here - LOG_E("Error in picnumber-based dequeue implementation!\n"); - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V("Begin\n"); - - if (!mvf) { - return MIX_RESULT_INVALID_PARAM; - } - - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - - fm->mLock.lock(); - - if (fm->frame_list == NULL) - { - if (fm->eos) - { - LOG_V("No frame is dequeued (eos)!\n"); - ret = MIX_RESULT_EOS; - } - else - { - LOG_V("No frame is dequeued as queue is empty!\n"); - ret = MIX_RESULT_FRAME_NOTAVAIL; - } - } - else if (fm->is_first_frame) - { - // dequeue the first entry in the list. Not need to update the time stamp as - // the list should contain only one frame. -#ifdef MIX_LOG_ENABLE - if (j_slist_length(fm->frame_list) != 1) - { - LOG_W("length of list is not equal to 1 for the first frame.\n"); - } -#endif - *mvf = (MixVideoFrame*) j_slist_nth_data(fm->frame_list, 0); - fm->frame_list = j_slist_remove(fm->frame_list, (*mvf)); - - if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP) - { - mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp)); - fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta; - LOG_V("The first frame is dequeued, ts = %"UINT64_FORMAT"\n", fm->last_frame_timestamp); - } - else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER) - { - mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber)); - LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber); - fm->next_frame_picnumber++; - //if (fm->next_frame_picnumber == fm->max_picture_number) - // fm->next_frame_picnumber = 0; - } - else - { -#ifdef MIX_LOG_ENABLE - MixFrameType type; - mix_videoframe_get_frame_type(*mvf, &type); - LOG_V("The first frame is dequeud, frame type is %d.\n", type); -#endif - } - fm->is_first_frame = FALSE; - - ret = MIX_RESULT_SUCCESS; - } - else - { - // not the first frame and list is not empty - switch (fm->mode) - { - case MIX_DISPLAY_ORDER_TIMESTAMP: - ret = mix_framemanager_timestamp_based_dequeue(fm, mvf); - break; - - case MIX_DISPLAY_ORDER_PICNUMBER: - ret = mix_framemanager_picnumber_based_dequeue(fm, mvf); - break; - - case MIX_DISPLAY_ORDER_PICTYPE: - ret = mix_framemanager_pictype_based_dequeue(fm, mvf); - break; - - case MIX_DISPLAY_ORDER_FIFO: - *mvf = (MixVideoFrame*) j_slist_nth_data(fm->frame_list, 0); - fm->frame_list = j_slist_remove(fm->frame_list, (*mvf)); - ret = MIX_RESULT_SUCCESS; - LOG_V("One frame is dequeued.\n"); - break; - - default: - LOG_E("Invalid frame order mode\n"); - ret = MIX_RESULT_FAIL; - break; - } - } - - fm->mLock.unlock(); - LOG_V("End\n"); - return ret; -} - -MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (!fm->initialized) { - return MIX_RESULT_NOT_INIT; - } - fm->mLock.lock(); - fm->eos = TRUE; - LOG_V("EOS is received.\n"); - fm->mLock.unlock(); - return ret; -} - diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h deleted file mode 100644 index ca49ef0..0000000 --- a/mix_video/src/mixframemanager.h +++ /dev/null @@ -1,153 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_FRAMEMANAGER_H__ -#define __MIX_FRAMEMANAGER_H__ - -#include -#include "mixvideodef.h" -#include "mixvideoframe.h" -#include "mixvideothread.h" -#include - -/* -* MIX_FRAMEORDER_MODE_DECODEORDER is here interpreted as -* MIX_DISPLAY_ORDER_FIFO, a special case of display order mode. -*/ -typedef enum -{ - MIX_DISPLAY_ORDER_UNKNOWN, - MIX_DISPLAY_ORDER_FIFO, - MIX_DISPLAY_ORDER_TIMESTAMP, - MIX_DISPLAY_ORDER_PICNUMBER, - MIX_DISPLAY_ORDER_PICTYPE, - MIX_DISPLAY_ORDER_LAST -} MixDisplayOrderMode; - - -class MixFrameManager { -public: - MixFrameManager(); - ~MixFrameManager(); - -public: - bool initialized; - bool flushing; - bool eos; - MixVideoMutex mLock; - JSList* frame_list; - int framerate_numerator; - int framerate_denominator; - uint64 frame_timestamp_delta; - MixDisplayOrderMode mode; - bool is_first_frame; - uint64 last_frame_timestamp; - uint64 next_frame_timestamp; - uint32 next_frame_picnumber; - int max_enqueue_size; - uint32 max_picture_number; - uint32 ref_count; - int dpb_size; -}; - - -/** - * mix_framemanager_new: - * @returns: A newly allocated instance of #MixFrameManager - * - * Use this method to create new instance of #MixFrameManager - */ -MixFrameManager *mix_framemanager_new(void); - -/** - * mix_framemanager_ref: - * @mix: object to add reference - * @returns: the MixFrameManager instance where reference count has been increased. - * - * Add reference count. - */ -MixFrameManager *mix_framemanager_ref(MixFrameManager * mix); - -/** - * mix_framemanager_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixFrameManager* mix_framemanager_unref(MixFrameManager* fm); - -/* Class Methods */ - -/* - * Initialize FM - */ -MIX_RESULT mix_framemanager_initialize( - MixFrameManager *fm, MixDisplayOrderMode mode, - int framerate_numerator, int framerate_denominator); -/* - * Deinitialize FM - */ -MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm); - -/* - * Set new framerate - */ -MIX_RESULT mix_framemanager_set_framerate( - MixFrameManager *fm, int framerate_numerator, int framerate_denominator); - -/* - * Get framerate - */ -MIX_RESULT mix_framemanager_get_framerate( - MixFrameManager *fm, int *framerate_numerator, int *framerate_denominator); - - -/* - * Set miximum size of queue - */ -MIX_RESULT mix_framemanager_set_max_enqueue_size( - MixFrameManager *fm, int size); - - -/* - * Set miximum picture number - */ -MIX_RESULT mix_framemanager_set_max_picture_number( - MixFrameManager *fm, uint32 num); - -MIX_RESULT mix_framemanager_set_dpb_size( - MixFrameManager *fm, uint32 num); - -/* - * Get Display Order Mode - */ -MIX_RESULT mix_framemanager_get_display_order_mode( - MixFrameManager *fm, MixDisplayOrderMode *mode); - -/* - * For discontiunity, reset FM - */ -MIX_RESULT mix_framemanager_flush(MixFrameManager *fm); - -/* - * Enqueue MixVideoFrame - */ -MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf); - -/* - * Dequeue MixVideoFrame in proper order depends on MixDisplayOrderMode value - * during initialization. - */ -MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf); - -/* - * End of stream. - */ -MIX_RESULT mix_framemanager_eos(MixFrameManager *fm); - -#endif /* __MIX_FRAMEMANAGER_H__ */ diff --git a/mix_video/src/mixsurfacepool.cpp b/mix_video/src/mixsurfacepool.cpp deleted file mode 100644 index d5328bc..0000000 --- a/mix_video/src/mixsurfacepool.cpp +++ /dev/null @@ -1,564 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixsurfacepool - * @short_description: MI-X Video Surface Pool - * - * A data object which stores and manipulates a pool of video surfaces. - */ - -#include "mixvideolog.h" -#include "mixsurfacepool.h" -#include "mixvideoframe_private.h" - -#define MIX_LOCK(lock) g_mutex_lock(lock); -#define MIX_UNLOCK(lock) g_mutex_unlock(lock); - - -#define SAFE_FREE(p) if(p) { free(p); p = NULL; } - -MixSurfacePool::MixSurfacePool() -/* initialize properties here */ - :free_list(NULL) - ,in_use_list(NULL) - ,free_list_max_size(0) - ,free_list_cur_size(0) - ,high_water_mark(0) - ,initialized(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,mLock() { -} - -MixSurfacePool::~MixSurfacePool() { -} - -MixParams* MixSurfacePool::dup() const { - MixParams *ret = NULL; - mLock.lock(); - ret = new MixSurfacePool(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - mLock.unlock(); - return ret; -} - -bool MixSurfacePool::copy(MixParams* target) const { - if (NULL == target) return FALSE; - MixSurfacePool* this_target = MIX_SURFACEPOOL(target); - - mLock.lock(); - this_target->mLock.lock(); - // Free the existing properties - // Duplicate string - this_target->free_list = free_list; - this_target->in_use_list = in_use_list; - this_target->free_list_max_size = free_list_max_size; - this_target->free_list_cur_size = free_list_cur_size; - this_target->high_water_mark = high_water_mark; - - this_target->mLock.unlock(); - mLock.unlock(); - - MixParams::copy(target); - return TRUE; -} - -bool MixSurfacePool::equal(MixParams *first) const { - if (NULL == first) return FALSE; - bool ret = FALSE; - MixSurfacePool *this_first = MIX_SURFACEPOOL(first); - mLock.lock(); - this_first->mLock.lock(); - if (this_first->free_list == free_list - && this_first->in_use_list == in_use_list - && this_first->free_list_max_size - == free_list_max_size - && this_first->free_list_cur_size - == free_list_cur_size - && this_first->high_water_mark == high_water_mark) { - ret = MixParams::equal(first); - } - this_first->mLock.unlock(); - mLock.unlock(); - return ret; -} - -MixSurfacePool * -mix_surfacepool_new(void) { - return new MixSurfacePool(); -} - -MixSurfacePool * -mix_surfacepool_ref(MixSurfacePool * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -/* Class Methods */ - -/** - * mix_surfacepool_initialize: - * @returns: MIX_RESULT_SUCCESS if successful in creating the surface pool - * - * Use this method to create a new surface pool, consisting of a GSList of - * frame objects that represents a pool of surfaces. - */ -MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj, - VASurfaceID *surfaces, uint num_surfaces, VADisplay va_display) { - - LOG_V( "Begin\n"); - - if (obj == NULL || surfaces == NULL) { - - LOG_E( - "Error NULL ptrs, obj %x, surfaces %x\n", (uint) obj, - (uint) surfaces); - - return MIX_RESULT_NULL_PTR; - } - - obj->mLock.lock(); - - if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) { - //surface pool is in use; return error; need proper cleanup - //TODO need cleanup here? - - obj->mLock.unlock(); - - return MIX_RESULT_ALREADY_INIT; - } - - if (num_surfaces == 0) { - obj->free_list = NULL; - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_surfaces; - - obj->free_list_cur_size = num_surfaces; - - obj->high_water_mark = 0; - - /* assume it is initialized */ - obj->initialized = TRUE; - - obj->mLock.unlock(); - - return MIX_RESULT_SUCCESS; - } - - // Initialize the free pool with frame objects - - uint i = 0; - MixVideoFrame *frame = NULL; - - for (; i < num_surfaces; i++) { - - //Create a frame object for each surface ID - frame = mix_videoframe_new(); - - if (frame == NULL) { - //TODO need to log an error here and do cleanup - - obj->mLock.unlock(); - - return MIX_RESULT_NO_MEMORY; - } - - // Set the frame ID to the surface ID - mix_videoframe_set_frame_id(frame, surfaces[i]); - // Set the ci frame index to the surface ID - mix_videoframe_set_ci_frame_idx (frame, i); - // Leave timestamp for each frame object as zero - // Set the pool reference in the private data of the frame object - mix_videoframe_set_pool(frame, obj); - - mix_videoframe_set_vadisplay(frame, va_display); - - //Add each frame object to the pool list - obj->free_list = j_slist_append(obj->free_list, frame); - - } - - obj->in_use_list = NULL; - - obj->free_list_max_size = num_surfaces; - - obj->free_list_cur_size = num_surfaces; - - obj->high_water_mark = 0; - - obj->initialized = TRUE; - - obj->mLock.unlock(); - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -/** - * mix_surfacepool_put: - * @returns: SUCCESS or FAILURE - * - * Use this method to return a surface to the free pool - */ -MIX_RESULT mix_surfacepool_put(MixSurfacePool * obj, MixVideoFrame * frame) { - - LOG_V( "Begin\n"); - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; - - LOG_V( "Frame id: %d\n", frame->frame_id); - obj->mLock.lock(); - - if (obj->in_use_list == NULL) { - //in use list cannot be empty if a frame is in use - //TODO need better error code for this - - obj->mLock.unlock(); - - return MIX_RESULT_FAIL; - } - - JSList *element = j_slist_find(obj->in_use_list, frame); - if (element == NULL) { - //Integrity error; frame not found in in use list - //TODO need better error code and handling for this - - obj->mLock.unlock(); - - return MIX_RESULT_FAIL; - } else { - //Remove this element from the in_use_list - obj->in_use_list = j_slist_remove_link(obj->in_use_list, element); - - //Concat the element to the free_list and reset the timestamp of the frame - //Note that the surface ID stays valid - mix_videoframe_set_timestamp(frame, 0); - obj->free_list = j_slist_concat(obj->free_list, element); - - //increment the free list count - obj->free_list_cur_size++; - } - - //Note that we do nothing with the ref count for this. We want it to - //stay at 1, which is what triggered it to be added back to the free list. - - obj->mLock.unlock(); - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - -/** - * mix_surfacepool_get: - * @returns: SUCCESS or FAILURE - * - * Use this method to get a surface from the free pool - */ -MIX_RESULT mix_surfacepool_get(MixSurfacePool * obj, MixVideoFrame ** frame) { - - LOG_V( "Begin\n"); - - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; - - obj->mLock.lock(); - -#if 0 - if (obj->free_list == NULL) { -#else - if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug -#endif - //We are out of surfaces - //TODO need to log this as well - - obj->mLock.unlock(); - - LOG_E( "out of surfaces\n"); - - return MIX_RESULT_OUTOFSURFACES; - } - - //Remove a frame from the free pool - - //We just remove the one at the head, since it's convenient - JSList *element = obj->free_list; - obj->free_list = j_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this - - obj->mLock.unlock(); - - LOG_E( "Element is null\n"); - - return MIX_RESULT_FAIL; - } else { - //Concat the element to the in_use_list - obj->in_use_list = j_slist_concat(obj->in_use_list, element); - - //TODO replace with proper logging - - LOG_I( "frame refcount%d\n", - MIX_PARAMS(element->data)->ref_count); - - //Set the out frame pointer - *frame = (MixVideoFrame *) element->data; - - LOG_V( "Frame id: %d\n", (*frame)->frame_id); - - //decrement the free list count - obj->free_list_cur_size--; - - //Check the high water mark for surface use - uint size = j_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } - - //Increment the reference count for the frame - mix_videoframe_ref(*frame); - - obj->mLock.unlock(); - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - - -int mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b) -{ - if (a == NULL || b == NULL) - return -1; - if (a->ci_frame_idx == b->ci_frame_idx) - return 0; - else - return -1; -} - -/** - * mix_surfacepool_get: - * @returns: SUCCESS or FAILURE - * - * Use this method to get a surface from the free pool according to the CI frame idx - */ - -MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, MixVideoFrame ** frame, MixVideoFrame *in_frame) { - - LOG_V( "Begin\n"); - - if (obj == NULL || frame == NULL) - return MIX_RESULT_NULL_PTR; - - obj->mLock.lock(); - - if (obj->free_list == NULL) { - //We are out of surfaces - //TODO need to log this as well - - obj->mLock.unlock(); - - LOG_E( "out of surfaces\n"); - - return MIX_RESULT_OUTOFSURFACES; - } - - //Remove a frame from the free pool - - //We just remove the one at the head, since it's convenient - JSList *element = j_slist_find_custom (obj->free_list, in_frame, (JCompareFunc) mixframe_compare_index); - obj->free_list = j_slist_remove_link(obj->free_list, element); - if (element == NULL) { - //Unexpected behavior - //TODO need better error code and handling for this - - obj->mLock.unlock(); - - LOG_E( "Element associated with the given frame index is null\n"); - - return MIX_RESULT_DROPFRAME; - } else { - //Concat the element to the in_use_list - obj->in_use_list = j_slist_concat(obj->in_use_list, element); - - //TODO replace with proper logging - - LOG_I( "frame refcount%d\n", - MIX_PARAMS(element->data)->ref_count); - - //Set the out frame pointer - *frame = (MixVideoFrame *) element->data; - - //Check the high water mark for surface use - uint size = j_slist_length(obj->in_use_list); - if (size > obj->high_water_mark) - obj->high_water_mark = size; - //TODO Log this high water mark - } - - //Increment the reference count for the frame - mix_videoframe_ref(*frame); - - obj->mLock.unlock(); - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} -/** - * mix_surfacepool_check_available: - * @returns: SUCCESS or FAILURE - * - * Use this method to check availability of getting a surface from the free pool - */ -MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) { - - LOG_V( "Begin\n"); - - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - obj->mLock.lock(); - - if (obj->initialized == FALSE) - { - LOG_W("surface pool is not initialized, probably configuration data has not been received yet.\n"); - obj->mLock.unlock(); - return MIX_RESULT_NOT_INIT; - } - - -#if 0 - if (obj->free_list == NULL) { -#else - if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug -#endif - //We are out of surfaces - - obj->mLock.unlock(); - - LOG_W( - "Returning MIX_RESULT_POOLEMPTY because out of surfaces\n"); - - return MIX_RESULT_POOLEMPTY; - } else { - //Pool is not empty - - obj->mLock.unlock(); - - LOG_I( - "Returning MIX_RESULT_SUCCESS because surfaces are available\n"); - - return MIX_RESULT_SUCCESS; - } - -} - -/** - * mix_surfacepool_deinitialize: - * @returns: SUCCESS or FAILURE - * - * Use this method to teardown a surface pool - */ -MIX_RESULT mix_surfacepool_deinitialize(MixSurfacePool * obj) { - if (obj == NULL) - return MIX_RESULT_NULL_PTR; - - obj->mLock.lock(); - - if ((obj->in_use_list != NULL) || (j_slist_length(obj->free_list) - != obj->free_list_max_size)) { - //TODO better error code - //We have outstanding frame objects in use and they need to be - //freed before we can deinitialize. - - obj->mLock.unlock(); - - return MIX_RESULT_FAIL; - } - - //Now remove frame objects from the list - - MixVideoFrame *frame = NULL; - - while (obj->free_list != NULL) { - //Get the frame object from the head of the list - frame = reinterpret_cast(obj->free_list->data); - //frame = g_slist_nth_data(obj->free_list, 0); - - //Release it - mix_videoframe_unref(frame); - - //Delete the head node of the list and store the new head - obj->free_list = j_slist_delete_link(obj->free_list, obj->free_list); - - //Repeat until empty - } - - obj->free_list_max_size = 0; - obj->free_list_cur_size = 0; - - //May want to log this information for tuning - obj->high_water_mark = 0; - - obj->mLock.unlock(); - - return MIX_RESULT_SUCCESS; -} - -#define MIX_SURFACEPOOL_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_SURFACEPOOL_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT -mix_surfacepool_dumpframe(MixVideoFrame *frame) -{ - LOG_I( "\tFrame %x, id %lu, refcount %d, ts %lu\n", (uint)frame, - frame->frame_id, MIX_PARAMS(frame)->ref_count, (ulong) frame->timestamp); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -mix_surfacepool_dumpprint (MixSurfacePool * obj) -{ - //TODO replace this with proper logging later - - LOG_I( "SURFACE POOL DUMP:\n"); - LOG_I( "Free list size is %d\n", obj->free_list_cur_size); - LOG_I( "In use list size is %d\n", j_slist_length(obj->in_use_list)); - LOG_I( "High water mark is %lu\n", obj->high_water_mark); - - //Walk the free list and report the contents - LOG_I( "Free list contents:\n"); - j_slist_foreach(obj->free_list, (JFunc) mix_surfacepool_dumpframe, NULL); - - //Walk the in_use list and report the contents - LOG_I( "In Use list contents:\n"); - j_slist_foreach(obj->in_use_list, (JFunc) mix_surfacepool_dumpframe, NULL); - - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h deleted file mode 100644 index e75e417..0000000 --- a/mix_video/src/mixsurfacepool.h +++ /dev/null @@ -1,99 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_SURFACEPOOL_H__ -#define __MIX_SURFACEPOOL_H__ - -#include -#include "mixvideodef.h" -#include "mixvideoframe.h" -#include "mixvideothread.h" -#include -#include - - -/** -* MIX_SURFACEPOOL: -* @obj: object to be type-casted. -*/ -#define MIX_SURFACEPOOL(obj) (reinterpret_cast(obj)) - -/** -* MixSurfacePool: -* -* MI-X Video Surface Pool object -*/ -class MixSurfacePool : public MixParams -{ -public: - /*< public > */ - JSList *free_list; /* list of free surfaces */ - JSList *in_use_list; /* list of surfaces in use */ - ulong free_list_max_size; /* initial size of the free list */ - ulong free_list_cur_size; /* current size of the free list */ - ulong high_water_mark; /* most surfaces in use at one time */ - bool initialized; -// uint64 timestamp; - - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; - - /*< private > */ - mutable MixVideoMutex mLock; -public: - MixSurfacePool(); - virtual ~MixSurfacePool(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -}; - -/** -* mix_surfacepool_new: -* @returns: A newly allocated instance of #MixSurfacePool -* -* Use this method to create new instance of #MixSurfacePool -*/ -MixSurfacePool *mix_surfacepool_new (void); -/** -* mix_surfacepool_ref: -* @mix: object to add reference -* @returns: the MixSurfacePool instance where reference count has been increased. -* -* Add reference count. -*/ -MixSurfacePool *mix_surfacepool_ref (MixSurfacePool * mix); - -/** -* mix_surfacepool_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_surfacepool_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -MIX_RESULT mix_surfacepool_initialize (MixSurfacePool * obj, - VASurfaceID *surfaces, uint num_surfaces, VADisplay va_display); -MIX_RESULT mix_surfacepool_put (MixSurfacePool * obj, - MixVideoFrame * frame); - -MIX_RESULT mix_surfacepool_get (MixSurfacePool * obj, - MixVideoFrame ** frame); - -MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, - MixVideoFrame ** frame, MixVideoFrame *in_frame); - -MIX_RESULT mix_surfacepool_check_available (MixSurfacePool * obj); - -MIX_RESULT mix_surfacepool_deinitialize (MixSurfacePool * obj); - -#endif /* __MIX_SURFACEPOOL_H__ */ diff --git a/mix_video/src/mixvideo.cpp b/mix_video/src/mixvideo.cpp deleted file mode 100644 index d21c5c8..0000000 --- a/mix_video/src/mixvideo.cpp +++ /dev/null @@ -1,2246 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideo - * @short_description: Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder. - * @include: mixvideo.h - * - * #MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video. - * - * The MixVideo object handles any of the video formats internally. - * The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/ - * MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure() - * call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and - * MixVideoRenderParams objects will be passed in the mix_video_initialize(), - * mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively. - * - * The application can take the following steps to decode video: - * - * Create a mix_video object using mix_video_new() - * Initialize the object using mix_video_initialize() - * Configure the stream using mix_video_configure() - * Decode frames using mix_video_decode() - * Retrieve the decoded frames using mix_video_get_frame(). The decoded frames can be retrieved in decode order or display order. - * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). The frame can be retained for redrawing until the next frame is retrieved. - * When the frame is no longer needed for redrawing, release the frame using mix_video_release_frame(). - * - * - * For encoding, the application can take the following steps to encode video: - * - * Create a mix_video object using mix_video_new() - * Initialize the object using mix_video_initialize() - * Configure the stream using mix_video_configure() - * Encode frames using mix_video_encode() - * Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file. - * Retrieve the uncompressed frames for display using mix_video_get_frame(). - * At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). For encode, the frame should not be retained for redrawing after the initial rendering, due to resource limitations. - * Release the frame using mix_video_release_frame(). - * - * - */ - -#include -#include /* libVA */ -#include -#include - -#ifndef ANDROID -#include -#include -#else -#define Display unsigned int -//#include "mix_vagetdisplay.h" - -#ifdef __cplusplus -extern "C" { -#endif - - VADisplay vaGetDisplay ( - void *android_dpy - ); - -#ifdef __cplusplus -} -#endif - - -#endif - -#include "mixvideolog.h" - -#ifndef ANDROID -#include "mixdisplayx11.h" -#else -#include "mixdisplayandroid.h" -#endif -#include "mixvideoframe.h" - -#include "mixframemanager.h" -#include "mixvideorenderparams.h" -#include "mixvideorenderparams_internal.h" - -#include "mixvideoformat.h" -#include "mixvideoformat_vc1.h" -#include "mixvideoformat_h264.h" -#include "mixvideoformat_mp42.h" - -#include "mixvideoconfigparamsdec_vc1.h" -#include "mixvideoconfigparamsdec_h264.h" -#include "mixvideoconfigparamsdec_mp42.h" - - -#include "mixvideoformatenc.h" -#include "mixvideoformatenc_h264.h" -#include "mixvideoformatenc_mpeg4.h" -#include "mixvideoformatenc_preview.h" -#include "mixvideoformatenc_h263.h" - -#include "mixvideoconfigparamsenc_h264.h" -#include "mixvideoconfigparamsenc_mpeg4.h" -#include "mixvideoconfigparamsenc_preview.h" -#include "mixvideoconfigparamsenc_h263.h" - - -#include "mixvideo.h" -#include "mixvideo_private.h" - -#ifdef ANDROID -#define mix_strcmp strcmp -#else -#define mix_strcmp g_strcmp0 -#endif - -#define USE_OPAQUE_POINTER - -#ifdef USE_OPAQUE_POINTER -#define MIX_VIDEO_PRIVATE(mix) (MixVideoPrivate *)(mix->context) -#else -#define MIX_VIDEO_PRIVATE(mix) MIX_VIDEO_GET_PRIVATE(mix) -#endif - -#define CHECK_INIT(mix, priv) \ - if (!mix) { \ - return MIX_RESULT_NULL_PTR; \ - } \ - priv = MIX_VIDEO_PRIVATE(mix); \ - if (!priv->initialized) { \ - LOG_E( "Not initialized\n"); \ - return MIX_RESULT_NOT_INIT; \ - } - -#define CHECK_INIT_CONFIG(mix, priv) \ - CHECK_INIT(mix, priv); \ - if (!priv->configured) { \ - LOG_E( "Not configured\n"); \ - return MIX_RESULT_NOT_CONFIGURED; \ - } - -/* - * default implementation of virtual methods - */ - -MIX_RESULT mix_video_get_version_default(MixVideo * mix, uint * major, - uint * minor); - -MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params); - -MIX_RESULT mix_video_deinitialize_default(MixVideo * mix); - -MIX_RESULT mix_video_configure_default(MixVideo * mix, - MixVideoConfigParams * config_params, MixDrmParams * drm_config_params); - -MIX_RESULT mix_video_get_config_default(MixVideo * mix, - MixVideoConfigParams ** config_params); - -MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], - int bufincnt, MixVideoDecodeParams * decode_params); - -MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame); - -MIX_RESULT mix_video_release_frame_default(MixVideo * mix, - MixVideoFrame * frame); - -MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame); - -MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - -MIX_RESULT mix_video_flush_default(MixVideo * mix); - -MIX_RESULT mix_video_eos_default(MixVideo * mix); - -MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state); - -MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf); - -MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf); - -MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, uint *max_size); - -MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); - -MIX_RESULT mix_video_get_new_userptr_for_surface_buffer_default (MixVideo * mix, uint width, uint height, uint format, - uint expected_size, uint *outsize, uint * stride, uint8 **usrptr); -static void mix_video_finalize(MixVideo * obj); -MIX_RESULT mix_video_configure_decode(MixVideo * mix, - MixVideoConfigParamsDec * config_params_dec, - MixDrmParams * drm_config_params); - - -MIX_RESULT mix_video_configure_encode(MixVideo * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixDrmParams * drm_config_params); - -static void mix_video_init(MixVideo * self); - -MixVideo::MixVideo() { - //context = malloc(sizeof(MixVideoPrivate)); - context = &mPriv; - get_version_func = mix_video_get_version_default; - initialize_func = mix_video_initialize_default; - deinitialize_func = mix_video_deinitialize_default; - configure_func = mix_video_configure_default; - get_config_func = mix_video_get_config_default; - decode_func = mix_video_decode_default; - get_frame_func = mix_video_get_frame_default; - release_frame_func = mix_video_release_frame_default; - render_func = mix_video_render_default; - - encode_func = mix_video_encode_default; - - flush_func = mix_video_flush_default; - eos_func = mix_video_eos_default; - get_state_func = mix_video_get_state_default; - get_mix_buffer_func = mix_video_get_mixbuffer_default; - release_mix_buffer_func = mix_video_release_mixbuffer_default; - - get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default; - set_dynamic_enc_config_func = mix_video_set_dynamic_enc_config_default; - - get_new_usrptr_for_surface_buffer = mix_video_get_new_userptr_for_surface_buffer_default; - mix_video_init(this); - - ref_count = 1; - -} - -MixVideo::~MixVideo() { - mix_video_finalize(this); -} - -static void mix_video_init(MixVideo * self) { - - MixVideoPrivate *priv = MIX_VIDEO_GET_PRIVATE(self); - -#ifdef USE_OPAQUE_POINTER - self->context = priv; -#else - self->context = NULL; -#endif - - /* private structure initialization */ - mix_video_private_initialize(priv); -} - -MixVideo *mix_video_new(void) { - - MixVideo *ret = new MixVideo; - - return ret; -} - -void mix_video_finalize(MixVideo * mix) { - - /* clean up here. */ - - mix_video_deinitialize(mix); -} - -MixVideo * -mix_video_ref(MixVideo * mix) { - if (NULL != mix) - mix->ref_count ++; - return mix; -} - -MixVideo * -mix_video_unref(MixVideo * mix) { - if (NULL != mix) { - mix->ref_count --; - if (mix->ref_count == 0) { - delete mix; - return NULL; - } - } - return mix; -} - -/* private methods */ -#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } - -void mix_video_private_initialize(MixVideoPrivate* priv) { - priv->initialized = FALSE; - priv->configured = FALSE; - - /* libVA */ - priv->va_display = NULL; - priv->va_major_version = -1; - priv->va_major_version = -1; - - /* mix objects */ - priv->frame_manager = NULL; - priv->video_format = NULL; - - priv->video_format_enc = NULL; //for encoding - - priv->surface_pool = NULL; - priv->buffer_pool = NULL; - - priv->codec_mode = MIX_CODEC_MODE_DECODE; - priv->init_params = NULL; - priv->drm_params = NULL; - priv->config_params = NULL; - - /* - * usrptr shared buffer mode - */ - priv->requested_surface_info.surface_cnt = 0; - memset (priv->requested_surface_info.surface_allocated, 0 , sizeof (uint) * MAX_ENC_SURFACE_COUNT); - memset (priv->requested_surface_info.usrptr, 0 , sizeof (uint8 *) * MAX_ENC_SURFACE_COUNT); -} - -void mix_video_private_cleanup(MixVideoPrivate* priv) { - - VAStatus va_status; - - if (!priv) { - return; - } - - if (priv->video_format_enc) { - mix_videofmtenc_deinitialize(priv->video_format_enc); - } - - MIXUNREF(priv->frame_manager, mix_framemanager_unref) - MIXUNREF(priv->video_format, mix_videoformat_unref) - - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) - - //for encoding - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref) - MIXUNREF(priv->surface_pool, mix_surfacepool_unref) - priv->requested_surface_info.surface_cnt = 0; - memset (priv->requested_surface_info.surface_allocated, 0 , sizeof (uint) * MAX_ENC_SURFACE_COUNT); - memset (priv->requested_surface_info.usrptr, 0 , sizeof (uint8 *) * MAX_ENC_SURFACE_COUNT); - /* MIXUNREF(priv->init_params, mix_videoinitparams_unref) */ - MIXUNREF(priv->drm_params, mix_drmparams_unref) - MIXUNREF(priv->config_params, mix_videoconfigparams_unref) - - /* terminate libVA */ - if (priv->va_display) { - va_status = vaTerminate(priv->va_display); - LOG_V( "vaTerminate\n"); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaTerminate\n"); - } else { - priv->va_display = NULL; - } - } - - MIXUNREF(priv->init_params, mix_videoinitparams_unref) - - priv->va_major_version = -1; - priv->va_major_version = -1; - priv->codec_mode = MIX_CODEC_MODE_DECODE; - priv->initialized = FALSE; - priv->configured = FALSE; -} - -/* The following methods are defined in MI-X API */ - -MIX_RESULT mix_video_get_version_default(MixVideo * mix, uint * major, - uint * minor) { - if (!mix || !major || !minor) { - return MIX_RESULT_NULL_PTR; - } - - *major = MIXVIDEO_CURRENT - MIXVIDEO_AGE; - *minor = MIXVIDEO_AGE; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixDisplay *mix_display = NULL; - - LOG_V( "Begin\n"); - - if (!mix || !init_params) { - LOG_E( "!mix || !init_params\n"); - return MIX_RESULT_NULL_PTR; - } - - if (mode >= MIX_CODEC_MODE_LAST) { - LOG_E("mode >= MIX_CODEC_MODE_LAST\n"); - return MIX_RESULT_INVALID_PARAM; - } - -#if 0 //we have encoding support - /* TODO: We need to support encoding in the future */ - if (mode == MIX_CODEC_MODE_ENCODE) { - LOG_E("mode == MIX_CODEC_MODE_ENCODE\n"); - return MIX_RESULT_NOTIMPL; - } -#endif - - if (!MIX_IS_VIDEOINITPARAMS(init_params)) { - LOG_E("!MIX_IS_VIDEOINITPARAMS(init_params\n"); - return MIX_RESULT_INVALID_PARAM; - } - - priv = MIX_VIDEO_PRIVATE(mix); - - if (priv->initialized) { - LOG_W( "priv->initialized\n"); - return MIX_RESULT_ALREADY_INIT; - } - - /* clone mode */ - priv->codec_mode = mode; - - /* ref init_params */ - priv->init_params = (MixVideoInitParams *) mix_params_ref(MIX_PARAMS( - init_params)); - if (!priv->init_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "!priv->init_params\n"); - goto cleanup; - } - - /* NOTE: we don't do anything with drm_init_params */ - - /* libVA initialization */ - - { - VAStatus va_status; - Display *display = NULL; - ret = mix_videoinitparams_get_display(priv->init_params, &mix_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 1\n"); - goto cleanup; - } -#ifndef ANDROID - if (MIX_IS_DISPLAYX11(mix_display)) { - MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display); - ret = mix_displayx11_get_display(mix_displayx11, &display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 2\n"); - goto cleanup; - - } - } else { - /* TODO: add support to other MixDisplay type. For now, just return error!*/ - LOG_E("It is not display x11\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } -#else - if (MIX_IS_DISPLAYANDROID(mix_display)) { - MixDisplayAndroid *mix_displayandroid = MIX_DISPLAYANDROID(mix_display); - ret = mix_displayandroid_get_display(mix_displayandroid, (void**)&display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get display 2\n"); - goto cleanup; - - } - } else { - /* TODO: add support to other MixDisplay type. For now, just return error!*/ - LOG_E("It is not display android\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } -#endif - /* Now, we can initialize libVA */ - - LOG_V("Try to get vaDisplay : display = %x\n", display); - priv->va_display = vaGetDisplay(display); - - /* Oops! Fail to get VADisplay */ - if (!priv->va_display) { - ret = MIX_RESULT_FAIL; - LOG_E("Fail to get VADisplay\n"); - goto cleanup; - } - - /* Initialize libVA */ - va_status = vaInitialize(priv->va_display, &priv->va_major_version, - &priv->va_minor_version); - - /* Oops! Fail to initialize libVA */ - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Fail to initialize libVA\n"); - goto cleanup; - } - - /* TODO: check the version numbers of libVA */ - - priv->initialized = TRUE; - ret = MIX_RESULT_SUCCESS; - } - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - mix_video_private_cleanup(priv); - } - - MIXUNREF(mix_display, mix_display_unref); - - LOG_V( "End\n"); - - return ret; -} - - -MIX_RESULT mix_video_deinitialize_default(MixVideo * mix) { - - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - - mix_video_private_cleanup(priv); - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_video_configure_decode(MixVideo * mix, - MixVideoConfigParamsDec * config_params_dec, MixDrmParams * drm_config_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixVideoConfigParamsDec *priv_config_params_dec = NULL; - - char *mime_type = NULL; - uint fps_n, fps_d; - uint bufpoolsize = 0; - - MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER; - MixDisplayOrderMode display_order_mode = MIX_DISPLAY_ORDER_UNKNOWN; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - - if (!config_params_dec) { - LOG_E( "!config_params_dec\n"); - return MIX_RESULT_NULL_PTR; - } - - if (!MIX_IS_VIDEOCONFIGPARAMSDEC(config_params_dec)) { - LOG_E("Not a MixVideoConfigParamsDec\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - /* - * MixVideo has already been configured, it should be - * re-configured. - * - * TODO: Allow MixVideo re-configuration - */ - if (priv->configured) { - ret = MIX_RESULT_SUCCESS; - LOG_W( "Already configured\n"); - goto cleanup; - } - - /* Make a copy of config_params */ - priv->config_params = (MixVideoConfigParams *) mix_params_dup(MIX_PARAMS( - config_params_dec)); - if (!priv->config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Fail to duplicate config_params\n"); - goto cleanup; - } - - priv_config_params_dec = (MixVideoConfigParamsDec *)priv->config_params; - - /* Get fps, frame order mode and mime type from config_params */ - ret = mix_videoconfigparamsdec_get_mime_type(priv_config_params_dec, &mime_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mime type\n"); - goto cleanup; - } - - LOG_I( "mime : %s\n", mime_type); - -#ifdef MIX_LOG_ENABLE - if (mix_strcmp(mime_type, "video/x-wmv") == 0) { - - LOG_I( "mime : video/x-wmv\n"); - if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { - LOG_I( "VC1 config_param\n"); - } else { - LOG_E("Not VC1 config_param\n"); - } - } -#endif - - ret = mix_videoconfigparamsdec_get_frame_order_mode(priv_config_params_dec, - &frame_order_mode); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to frame order mode\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_frame_rate(priv_config_params_dec, &fps_n, - &fps_d); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get frame rate\n"); - goto cleanup; - } - - if (!fps_n) { - ret = MIX_RESULT_FAIL; - LOG_E( "fps_n is 0\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_buffer_pool_size(priv_config_params_dec, - &bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get buffer pool size\n"); - goto cleanup; - } - - /* create frame manager */ - priv->frame_manager = mix_framemanager_new(); - if (!priv->frame_manager) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create frame manager\n"); - goto cleanup; - } - - if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER) - { - display_order_mode = MIX_DISPLAY_ORDER_FIFO; - } - else if (mix_strcmp(mime_type, "video/x-wmv") == 0 || - mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0 ) - { - display_order_mode = MIX_DISPLAY_ORDER_PICTYPE; - } - else - { - //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP; - display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER; - } - - /* initialize frame manager */ - ret = mix_framemanager_initialize(priv->frame_manager, - display_order_mode, fps_n, fps_d); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize frame manager\n"); - goto cleanup; - } - - /* create buffer pool */ - priv->buffer_pool = mix_bufferpool_new(); - if (!priv->buffer_pool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create buffer pool\n"); - goto cleanup; - } - - ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize buffer pool\n"); - goto cleanup; - } - - /* Finally, we can create MixVideoFormat */ - /* What type of MixVideoFormat we need create? */ - - if (mix_strcmp(mime_type, "video/x-wmv") == 0 - && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) { - - MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create VC-1 video format\n"); - goto cleanup; - } - - /* TODO: work specific to VC-1 */ - - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else if (mix_strcmp(mime_type, "video/x-h264") == 0 - && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) { - - MixVideoFormat_H264 *video_format = mix_videoformat_h264_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create H.264 video format\n"); - goto cleanup; - } - - /* TODO: work specific to H.264 */ - - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else if (mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-divx") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 || - mix_strcmp(mime_type, "video/x-xvid") == 0 || - mix_strcmp(mime_type, "video/x-dx50") == 0) { - - uint version = 0; - - /* Is this mpeg4:2 ? */ - if (mix_strcmp(mime_type, "video/mpeg") == 0 || - mix_strcmp(mime_type, "video/x-h263") == 0 ) { - - /* - * we don't support mpeg other than mpeg verion 4 - */ - if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - /* what is the mpeg version ? */ - ret = mix_videoconfigparamsdec_mp42_get_mpegversion( - MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mpeg version\n"); - goto cleanup; - } - - /* if it is not MPEG4 */ - if (version != 4) { - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - } else { - - /* config_param shall be MixVideoConfigParamsDecMP42 */ - if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) { - LOG_E("MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 failed.\n"); - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - - /* what is the divx version ? */ - ret = mix_videoconfigparamsdec_mp42_get_divxversion( - MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get divx version\n"); - goto cleanup; - } - - /* if it is not divx 4 or 5 */ - if (version != 4 && version != 5) { - LOG_E("Invalid divx version.\n"); - ret = MIX_RESULT_NOT_SUPPORTED; - goto cleanup; - } - } - - MixVideoFormat_MP42 *video_format = mix_videoformat_mp42_new(); - if (!video_format) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create MPEG-4:2 video format\n"); - goto cleanup; - } - - /* TODO: work specific to MPEG-4:2 */ - priv->video_format = MIX_VIDEOFORMAT(video_format); - - } else { - - /* Oops! A format we don't know */ - - ret = MIX_RESULT_FAIL; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } - - /* initialize MixVideoFormat */ - ret = mix_videofmt_initialize(priv->video_format, priv_config_params_dec, - priv->frame_manager, priv->buffer_pool, &priv->surface_pool, - priv->va_display); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } - - mix_surfacepool_ref(priv->surface_pool); - - /* decide MixVideoFormat from mime_type*/ - - priv->configured = TRUE; - ret = MIX_RESULT_SUCCESS; - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - MIXUNREF(priv->config_params, mix_videoconfigparams_unref); - MIXUNREF(priv->frame_manager, mix_framemanager_unref); - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); - MIXUNREF(priv->video_format, mix_videoformat_unref); - } - - if (mime_type) { - free(mime_type); - } - - priv->objlock.unlock(); - /* ---------------------- end lock --------------------- */ - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_configure_encode(MixVideo * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixDrmParams * drm_config_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - MixVideoConfigParamsEnc *priv_config_params_enc = NULL; - - - char *mime_type = NULL; - MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; - uint bufpoolsize = 0; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - - if (!config_params_enc) { - LOG_E("!config_params_enc\n"); - return MIX_RESULT_NULL_PTR; - } - if (!MIX_IS_VIDEOCONFIGPARAMSENC(config_params_enc)) { - LOG_E("Not a MixVideoConfigParams\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - /* - * MixVideo has already been configured, it should be - * re-configured. - * - * TODO: Allow MixVideo re-configuration - */ - if (priv->configured) { - ret = MIX_RESULT_SUCCESS; - LOG_E( "Already configured\n"); - goto cleanup; - } - - /* Make a copy of config_params */ - priv->config_params = (MixVideoConfigParams *) mix_params_dup( - MIX_PARAMS(config_params_enc)); - if (!priv->config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Fail to duplicate config_params\n"); - goto cleanup; - } - - priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; - - /* Get fps, frame order mode and mime type from config_params */ - ret = mix_videoconfigparamsenc_get_mime_type(priv_config_params_enc, - &mime_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mime type\n"); - goto cleanup; - } - - LOG_I( "mime : %s\n", mime_type); - - ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, - &encode_format); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get target format\n"); - goto cleanup; - } - - LOG_I( "encode_format : %d\n", - encode_format); - - ret = mix_videoconfigparamsenc_get_buffer_pool_size( - priv_config_params_enc, &bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get buffer pool size\n"); - goto cleanup; - } - - /* create frame manager */ - priv->frame_manager = mix_framemanager_new(); - if (!priv->frame_manager) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create frame manager\n"); - goto cleanup; - } - - /* initialize frame manager */ - /* frame rate can be any value for encoding. */ - ret = mix_framemanager_initialize(priv->frame_manager, MIX_DISPLAY_ORDER_FIFO, - 1, 1); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize frame manager\n"); - goto cleanup; - } - - /* create buffer pool */ - priv->buffer_pool = mix_bufferpool_new(); - if (!priv->buffer_pool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to create buffer pool\n"); - goto cleanup; - } - - ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to initialize buffer pool\n"); - goto cleanup; - } - - /* Finally, we can create MixVideoFormatEnc */ - /* What type of MixVideoFormatEnc we need create? */ - - if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 - && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { - - MixVideoFormatEnc_H264 *video_format_enc = - mix_videoformatenc_h264_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); - goto cleanup; - } - - /* work specific to h264 encode */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { - - MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); - goto cleanup; - } - - /* work specific to mpeg4 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 - && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { - - MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); - goto cleanup; - } - - /* work specific to h.263 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { - - MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); - goto cleanup; - } - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else { - - /*unsupported format */ - ret = MIX_RESULT_NOT_SUPPORTED; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } - - /* initialize MixVideoEncFormat */ - ret = mix_videofmtenc_initialize(priv->video_format_enc, - priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, - &(priv->requested_surface_info), priv->va_display); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } - - mix_surfacepool_ref(priv->surface_pool); - - priv->configured = TRUE; - ret = MIX_RESULT_SUCCESS; - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - MIXUNREF(priv->frame_manager, mix_framemanager_unref); - MIXUNREF(priv->config_params, mix_videoconfigparams_unref); - MIXUNREF(priv->buffer_pool, mix_bufferpool_unref); - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref); - } - - if (mime_type) { - free(mime_type); - } - - priv->objlock.unlock(); - /* ---------------------- end lock --------------------- */ - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_configure_default(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT(mix, priv); - if (!config_params) { - LOG_E("!config_params\n"); - return MIX_RESULT_NULL_PTR; - } - - /*Decoder mode or Encoder mode*/ - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && MIX_IS_VIDEOCONFIGPARAMSDEC(config_params)) { - ret = mix_video_configure_decode(mix, (MixVideoConfigParamsDec*)config_params, NULL); - } - else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && MIX_IS_VIDEOCONFIGPARAMSENC(config_params)) { - ret = mix_video_configure_encode(mix, (MixVideoConfigParamsEnc*)config_params, NULL); - } - else { - LOG_E("Codec mode not supported\n"); - } - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT mix_video_get_config_default(MixVideo * mix, - MixVideoConfigParams ** config_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoPrivate *priv = NULL; - - CHECK_INIT_CONFIG(mix, priv); - - if (!config_params) { - LOG_E( "!config_params\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - *config_params = MIX_VIDEOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(priv->config_params))); - if (!*config_params) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("Failed to duplicate MixVideoConfigParams\n"); - goto cleanup; - } - -cleanup: - - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); - - LOG_V( "End\n"); - - return ret; - -} - -MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[], - int bufincnt, MixVideoDecodeParams * decode_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - if (!bufin || !bufincnt || !decode_params) { - LOG_E( "!bufin || !bufincnt || !decode_params\n"); - return MIX_RESULT_NULL_PTR; - } - - // reset new sequence flag - decode_params->new_sequence = FALSE; - - //First check that we have surfaces available for decode - ret = mix_surfacepool_check_available(priv->surface_pool); - - if (ret == MIX_RESULT_POOLEMPTY) { - LOG_I( "Out of surface\n"); - return MIX_RESULT_OUTOFSURFACES; - } - - priv->objlock.lock(); - - ret = mix_videofmt_decode(priv->video_format, bufin, bufincnt, decode_params); - - priv->objlock.unlock(); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame) { - - LOG_V( "Begin\n"); - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - CHECK_INIT_CONFIG(mix, priv); - - if (!frame) { - LOG_E( "!frame\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - LOG_V("Calling frame manager dequeue\n"); - - ret = mix_framemanager_dequeue(priv->frame_manager, frame); - - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_release_frame_default(MixVideo * mix, - MixVideoFrame * frame) { - - LOG_V( "Begin\n"); - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - CHECK_INIT_CONFIG(mix, priv); - - if (!frame) { - LOG_E( "!frame\n"); - return MIX_RESULT_NULL_PTR; - } - - /* - * We don't need lock here. MixVideoFrame has lock to - * protect itself. - */ -#if 0 - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); -#endif - - LOG_I("Releasing reference frame %x\n", (uint) frame); - mix_videoframe_unref(frame); - - ret = MIX_RESULT_SUCCESS; - -#if 0 - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); -#endif - - LOG_V( "End\n"); - - return ret; - -} - -#ifdef ANDROID - -MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - - return MIX_RESULT_NOTIMPL; -} - -#else -MIX_RESULT mix_video_render_default(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - - LOG_V( "Begin\n"); - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - MixDisplay *mix_display = NULL; - MixDisplayX11 *mix_display_x11 = NULL; - - Display *display = NULL; - - Drawable drawable = 0; - MixRect src_rect, dst_rect; - - VARectangle *va_cliprects = NULL; - uint number_of_cliprects = 0; - - /* VASurfaceID va_surface_id; */ - ulong va_surface_id; - VAStatus va_status; - - bool sync_flag = FALSE; - - CHECK_INIT_CONFIG(mix, priv); - - if (!render_params || !frame) { - LOG_E( "!render_params || !frame\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Is this render param valid? */ - if (!MIX_IS_VIDEORENDERPARAMS(render_params)) { - LOG_E("Not MixVideoRenderParams\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* - * We don't need lock here. priv->va_display may be the only variable - * seems need to be protected. But, priv->va_display is initialized - * when mixvideo object is initialized, and it keeps - * the same value thoughout the life of mixvideo. - */ -#if 0 - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); -#endif - - /* get MixDisplay prop from render param */ - ret = mix_videorenderparams_get_display(render_params, &mix_display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get mix_display\n"); - goto cleanup; - } - - /* Is this MixDisplayX11 ? */ - /* TODO: we shall also support MixDisplay other than MixDisplayX11 */ - if (!MIX_IS_DISPLAYX11(mix_display)) { - ret = MIX_RESULT_INVALID_PARAM; - LOG_E( "Not MixDisplayX11\n"); - goto cleanup; - } - - /* cast MixDisplay to MixDisplayX11 */ - mix_display_x11 = MIX_DISPLAYX11(mix_display); - - /* Get Drawable */ - ret = mix_displayx11_get_drawable(mix_display_x11, &drawable); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get drawable\n"); - goto cleanup; - } - - /* Get Display */ - ret = mix_displayx11_get_display(mix_display_x11, &display); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get display\n"); - goto cleanup; - } - - /* get src_rect */ - ret = mix_videorenderparams_get_src_rect(render_params, &src_rect); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get SOURCE src_rect\n"); - goto cleanup; - } - - /* get dst_rect */ - ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to get dst_rect\n"); - goto cleanup; - } - - /* get va_cliprects */ - ret = mix_videorenderparams_get_cliprects_internal(render_params, - &va_cliprects, &number_of_cliprects); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get va_cliprects\n"); - goto cleanup; - } - - /* get surface id from frame */ - ret = mix_videoframe_get_frame_id(frame, &va_surface_id); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get va_surface_id\n"); - goto cleanup; - } - uint64 timestamp = 0; - mix_videoframe_get_timestamp(frame, ×tamp); - LOG_V( "Displaying surface ID %d, timestamp %"UINT64_FORMAT"\n", (int)va_surface_id, timestamp); - - uint32 frame_structure = 0; - mix_videoframe_get_frame_structure(frame, &frame_structure); - - ret = mix_videoframe_get_sync_flag(frame, &sync_flag); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get sync_flag\n"); - goto cleanup; - } - - if (!sync_flag) { - ret = mix_videoframe_set_sync_flag(frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - va_status = vaSyncSurface(priv->va_display, va_surface_id); - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed vaSyncSurface() : va_status = 0x%x\n", va_status); - goto cleanup; - } - } - - - /* TODO: the last param of vaPutSurface is de-interlacing flags, - what is value shall be*/ - va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id, - drawable, src_rect.x, src_rect.y, src_rect.width, src_rect.height, - dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height, - va_cliprects, number_of_cliprects, frame_structure); - - if (va_status != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("Failed vaPutSurface() : va_status = 0x%x\n", va_status); - goto cleanup; - } - - ret = MIX_RESULT_SUCCESS; - -cleanup: - - MIXUNREF(mix_display, mix_display_unref) - /* MIXUNREF(render_params, mix_videorenderparams_unref)*/ - -#if 0 - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); -#endif - - LOG_V( "End\n"); - - return ret; - -} -#endif /* ANDROID */ - - -MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - if (!bufin || !bufincnt) { //we won't check encode_params here, it's just a placeholder - LOG_E( "!bufin || !bufincnt\n"); - return MIX_RESULT_NULL_PTR; - } - - //First check that we have surfaces available for decode - ret = mix_surfacepool_check_available(priv->surface_pool); - - if (ret == MIX_RESULT_POOLEMPTY) { - LOG_I( "Out of surface\n"); - return MIX_RESULT_OUTOFSURFACES; - } - - - priv->objlock.lock(); - - ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt, - iovout, iovoutcnt, encode_params); - - priv->objlock.unlock(); - - LOG_V( "End\n"); - return ret; -} - -MIX_RESULT mix_video_flush_default(MixVideo * mix) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { - ret = mix_videofmt_flush(priv->video_format); - - ret = mix_framemanager_flush(priv->frame_manager); - } - else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE - && priv->video_format_enc != NULL) { - /*No framemanager for encoder now*/ - ret = mix_videofmtenc_flush(priv->video_format_enc); - } - else { - priv->objlock.unlock(); - LOG_E("Invalid video_format/video_format_enc Pointer\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); - - LOG_V( "End\n"); - - return ret; - -} - -MIX_RESULT mix_video_eos_default(MixVideo * mix) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) { - ret = mix_videofmt_eos(priv->video_format); - - /* We should not call mix_framemanager_eos() here. - * MixVideoFormat* is responsible to call this function. - * Commnet the function call here! - */ - /* frame manager will set EOS flag to be TRUE */ - /* ret = mix_framemanager_eos(priv->frame_manager); */ - } - else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE - && priv->video_format_enc != NULL) { - /*No framemanager now*/ - ret = mix_videofmtenc_eos(priv->video_format_enc); - } - else { - priv->objlock.unlock(); - LOG_E("Invalid video_format/video_format_enc Pointer\n"); - return MIX_RESULT_NULL_PTR; - } - - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); - - LOG_V( "End\n"); - - return ret; -} - -MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state) { - - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (!state) { - LOG_E( "!state\n"); - return MIX_RESULT_NULL_PTR; - } - - *state = MIX_STATE_CONFIGURED; - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (!buf) { - LOG_E( "!buf\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - ret = mix_bufferpool_get(priv->buffer_pool, buf); - - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); - - LOG_V( "End ret = 0x%x\n", ret); - - return ret; - -} - -MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf) { - - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (!buf) { - LOG_E( "!buf\n"); - return MIX_RESULT_INVALID_PARAM; - } - - /* ---------------------- begin lock --------------------- */ - priv->objlock.lock(); - - mix_buffer_unref(buf); - - /* ---------------------- end lock --------------------- */ - priv->objlock.unlock(); - - LOG_V( "End\n"); - return ret; - -} - - -MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, uint *max_size) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - if (!mix || !max_size) /* TODO: add other parameter NULL checking */ - { - LOG_E( "!mix || !bufsize\n"); - return MIX_RESULT_NULL_PTR; - } - - CHECK_INIT_CONFIG(mix, priv); - - priv->objlock.lock(); - - ret = mix_videofmtenc_get_max_coded_buffer_size(priv->video_format_enc, max_size); - - priv->objlock.unlock(); - - LOG_V( "End\n"); - return ret; -} - - -MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - - LOG_V( "Begin\n"); - - CHECK_INIT_CONFIG(mix, priv); - - if (dynamic_params == NULL) { - LOG_E( - "dynamic_params == NULL\n"); - return MIX_RESULT_FAIL; - } - - MixVideoConfigParamsEnc *priv_config_params_enc = NULL; - if (priv->config_params) { - /* - * FIXME: It would be better to use ref/unref - */ - priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params; - //priv_config_params_enc = mix_videoconfigparamsenc_ref (priv->config_params); - } - else { - LOG_E( - "priv->config_params is invalid\n"); - return MIX_RESULT_FAIL; - } - - priv->objlock.lock(); - - switch (params_type) { - case MIX_ENC_PARAMS_BITRATE: - { - ret = mix_videoconfigparamsenc_set_bit_rate (priv_config_params_enc, dynamic_params->bitrate); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_INIT_QP: - { - ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->init_QP); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_MIN_QP: - { - ret = mix_videoconfigparamsenc_set_min_qp (priv_config_params_enc, dynamic_params->min_QP); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_min_qp\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_WINDOW_SIZE: - { - ret = mix_videoconfigparamsenc_set_window_size (priv_config_params_enc, dynamic_params->window_size); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_window_size\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_TARGET_PERCENTAGE: - { - ret = mix_videoconfigparamsenc_set_target_percentage (priv_config_params_enc, dynamic_params->target_percentage); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_target_percentage\n"); - goto cleanup; - } - } - break; - - - case MIX_ENC_PARAMS_MTU_SLICE_SIZE: - { - ret = mix_videoconfigparamsenc_set_max_slice_size(priv_config_params_enc, dynamic_params->max_slice_size); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_max_slice_size\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_I_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_I_slice_num (config_params_enc_h264, dynamic_params->I_slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_I_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_P_SLICE_NUM: - { - /* - */ - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_P_slice_num (config_params_enc_h264, dynamic_params->P_slice_num); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_P_slice_num\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_IDR_INTERVAL: - { - MixVideoConfigParamsEncH264 * config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params); - - ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_RC_MODE: - case MIX_ENC_PARAMS_RESOLUTION: - { - /* - * Step 1: Release videofmtenc Object - */ - if (priv->video_format_enc) { - mix_videofmtenc_deinitialize(priv->video_format_enc); - } - - MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref) - - //priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0 - - /* - * Please note there maybe issue here for usrptr shared buffer mode - */ - - /* - * Step 2: Change configuration parameters (frame size) - */ - - if (params_type == MIX_ENC_PARAMS_RESOLUTION) { - ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n"); - goto cleanup; - } - } - else if (params_type == MIX_ENC_PARAMS_RC_MODE) { - LOG_E("set dynamic_params->rc_mode = %d", dynamic_params->rc_mode); - ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n"); - goto cleanup; - } - } - - - /* - * Step 3: Renew mixvideofmtenc object - */ - - MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264; - - ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc, - &encode_format); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to get target format\n"); - goto cleanup; - } - - if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264 - && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) { - - MixVideoFormatEnc_H264 *video_format_enc = - mix_videoformatenc_h264_new(); - - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n"); - goto cleanup; - } - - /* work specific to h264 encode */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4 - && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) { - - MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n"); - goto cleanup; - } - - /* work specific to mpeg4 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263 - && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) { - - MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n"); - goto cleanup; - } - - /* work specific to h.263 */ - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - - else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW - && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) { - - MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new(); - if (!video_format_enc) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "mix_video_configure_encode: Failed to create preview video format\n"); - goto cleanup; - } - - priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc); - - } - else { - - /*unsupported format */ - ret = MIX_RESULT_NOT_SUPPORTED; - LOG_E("Unknown format, we can't handle it\n"); - goto cleanup; - } - - - /* - * Step 4: Re-initialize and start a new encode session, of course with new resolution value - */ - - /* - * Initialize MixVideoEncFormat - */ - - /* - * If we are using usrptr shared buffer mode, alloc_surfaces/usrptr/alloc_surface_cnt - * will be re-requested by v4l2camsrc, how to differetiate old surface pools and new one - * is a problem. - */ - - /* - * priv->alloc_surface_cnt already been reset to 0 after calling mix_videofmtenc_initialize - * For dynamic frame size change, upstream element need to re-call buffer allocation method - * and priv->alloc_surface_cnt will get a new value. - */ - //priv->alloc_surface_cnt = 5; - ret = mix_videofmtenc_initialize(priv->video_format_enc, - priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool, - &(priv->requested_surface_info), priv->va_display); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed initialize video format\n"); - goto cleanup; - } - - mix_surfacepool_ref(priv->surface_pool); - - - } - break; - case MIX_ENC_PARAMS_GOP_SIZE: - { - ret = mix_videoconfigparamsenc_set_intra_period (priv_config_params_enc, dynamic_params->intra_period); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n"); - goto cleanup; - } - - } - break; - case MIX_ENC_PARAMS_FRAME_RATE: - { - ret = mix_videoconfigparamsenc_set_frame_rate (priv_config_params_enc, dynamic_params->frame_rate_num, dynamic_params->frame_rate_denom); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n"); - goto cleanup; - } - } - break; - case MIX_ENC_PARAMS_FORCE_KEY_FRAME: - { - /* - * nothing to be done now. - */ - } - break; - - case MIX_ENC_PARAMS_REFRESH_TYPE: - { - ret = mix_videoconfigparamsenc_set_refresh_type(priv_config_params_enc, dynamic_params->refresh_type); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_refresh_type\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_AIR: - { - ret = mix_videoconfigparamsenc_set_AIR_params(priv_config_params_enc, dynamic_params->air_params); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_AIR_params\n"); - goto cleanup; - } - } - break; - - case MIX_ENC_PARAMS_CIR_FRAME_CNT: - { - ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n"); - goto cleanup; - } - - } - break; - - default: - break; - } - - ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type); - -cleanup: - - priv->objlock.unlock(); - - LOG_V( "End ret = 0x%x\n", ret); - - return ret; -} - - -MIX_RESULT mix_video_get_new_userptr_for_surface_buffer_default (MixVideo * mix, uint width, uint height, uint format, - uint expected_size, uint *outsize, uint * stride, uint8 **usrptr) -{ - MIX_RESULT ret = MIX_RESULT_FAIL; - MixVideoPrivate *priv = NULL; - VAStatus va_status = VA_STATUS_SUCCESS; - - VASurfaceID surface = VA_INVALID_SURFACE; - VAImage image; - int index = 0; - - LOG_V( "Begin\n"); - CHECK_INIT(mix, priv); - - /* - * If mixvideo has been configured, we can not request surface creation anymore - */ - if (priv->configured) { - LOG_E( "Already configured, can not request VA surface anymore\n"); - return MIX_RESULT_WRONG_STATE; - } - - - if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) { - LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /* - * Current only NV12 is supported in VA API - * Through format we can get known the number of planes - */ - if (format != MIX_STRING_TO_FOURCC("NV12")) { - LOG_W ("Format is not supported\n"); - return MIX_RESULT_NOT_SUPPORTED; - } - - priv->objlock.lock(); - - - if (priv->requested_surface_info.surface_cnt >= MAX_ENC_SURFACE_COUNT) { - LOG_E("Usr created Surface count is exceed max number!\n"); - goto cleanup; - } -#if 0 //jgl - va_status = vaCreateSurfacesForUserPtr ( - priv->va_display, width, height, VA_RT_FORMAT_YUV420, 1, - &surface, expected_size, VA_FOURCC_NV12, width, width, width, - 0, width * height, width * height); -#endif - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaCreateSurfaces\n"); - goto cleanup; - } - - va_status = vaDeriveImage(priv->va_display, surface, &image); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaDeriveImage\n"); - goto cleanup; - } - - LOG_V( "vaDeriveImage Done\n"); - - //priv->src_image [priv->alloc_surface_cnt] = image; - - va_status = vaMapBuffer (priv->va_display, image.buf, (void **) usrptr); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed to vaMapBuffer\n"); - goto cleanup; - } - -#if 1 - for (index = 0; index < image.data_size; index = index + 4096) { - unsigned char tmp = *(*usrptr + index); - if (tmp == 0) - *(*usrptr + index) = 0; - } -#endif - - *outsize = image.data_size; - *stride = image.pitches[0]; - - priv->requested_surface_info.surface_allocated[priv->requested_surface_info.surface_cnt] = surface; - priv->requested_surface_info.usrptr[priv->requested_surface_info.surface_cnt] = *usrptr; - - LOG_I( "surface = 0x%08x\n",(uint)surface); - LOG_I("image->pitches[0] = %d\n", image.pitches[0]); - LOG_I("image->pitches[1] = %d\n", image.pitches[1]); - LOG_I("image->offsets[0] = %d\n", image.offsets[0]); - LOG_I("image->offsets[1] = %d\n", image.offsets[1]); - LOG_I("image->num_planes = %d\n", image.num_planes); - LOG_I("image->width = %d\n", image.width); - LOG_I("image->height = %d\n", image.height); - LOG_I("data_size = %d\n", image.data_size); - LOG_I("usrptr = 0x%08x\n", *usrptr); - LOG_I("surface_cnt = %d\n", priv->requested_surface_info.surface_cnt); - LOG_I ("priv->usrptr[%d] = 0x%08x\n ", - priv->requested_surface_info.surface_cnt, - priv->requested_surface_info.usrptr[priv->requested_surface_info.surface_cnt]); - - va_status = vaUnmapBuffer(priv->va_display, image.buf); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaUnmapBuffer\n"); - goto cleanup; - } - - va_status = vaDestroyImage(priv->va_display, image.image_id); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaDestroyImage\n"); - goto cleanup; - } - - if (*outsize < expected_size) { - LOG_E ("Allocated buffer size is small than the expected size, destroy the surface"); - LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expected_size); - va_status = vaDestroySurfaces(priv->va_display, &surface, 1); - goto cleanup; - } - - priv->requested_surface_info.surface_cnt ++; - - ret = MIX_RESULT_SUCCESS; - -cleanup: - - priv->objlock.unlock(); - - LOG_V( "End\n"); - return ret; - -} -/* - * API functions - */ - -#define CHECK_AND_GET_MIX_CLASS(mix, klass) \ - if (!mix) { \ - return MIX_RESULT_NULL_PTR; \ - } \ - if (!MIX_IS_VIDEO(mix)) { \ - LOG_E( "Not MixVideo\n"); \ - return MIX_RESULT_INVALID_PARAM; \ - } - - -MIX_RESULT mix_video_get_version(MixVideo * mix, uint * major, uint * minor) { - - return mix->get_version_func(mix, major, minor); - -} - -MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params) { - - return mix->initialize_func(mix, mode, init_params, drm_init_params); -} - -MIX_RESULT mix_video_deinitialize(MixVideo * mix) { - - return mix->deinitialize_func(mix); -} - -MIX_RESULT mix_video_configure(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params) { - - return mix->configure_func(mix, config_params, drm_config_params); -} - -MIX_RESULT mix_video_get_config(MixVideo * mix, - MixVideoConfigParams ** config_params_dec) { - - return mix->get_config_func(mix, config_params_dec); - -} - -MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params) { - - return mix->decode_func(mix, bufin, bufincnt, - decode_params); - -} - -MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame) { - - return mix->get_frame_func(mix, frame); - -} - -MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame) { - - return mix->release_frame_func(mix, frame); -} - -MIX_RESULT mix_video_render(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame) { - - return mix->render_func(mix, render_params, frame); -} - -MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, - MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params) { - - return mix->encode_func(mix, bufin, bufincnt, iovout, iovoutcnt, - encode_params); -} - -MIX_RESULT mix_video_flush(MixVideo * mix) { - - return mix->flush_func(mix); -} - -MIX_RESULT mix_video_eos(MixVideo * mix) { - - return mix->eos_func(mix); -} - -MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state) { - return mix->get_state_func(mix, state); -} - -MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf) { - - return mix->get_mix_buffer_func(mix, buf); -} - -MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf) { - - return mix->release_mix_buffer_func(mix, buf); -} - -MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, uint *bufsize) { - return mix->get_max_coded_buffer_size_func(mix, bufsize); -} - -MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params) { - return mix->set_dynamic_enc_config_func(mix, params_type, dynamic_params); -} -MIX_RESULT mix_video_get_new_userptr_for_surface_buffer (MixVideo * mix, uint width, uint height, uint format, - uint expected_size, uint *outsize, uint * stride, uint8 **usrptr) { - return mix->get_new_usrptr_for_surface_buffer(mix, width, height, format, expected_size, outsize, stride, usrptr); - -} diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h deleted file mode 100644 index bf63b0c..0000000 --- a/mix_video/src/mixvideo.h +++ /dev/null @@ -1,574 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEO_H__ -#define __MIX_VIDEO_H__ - - - -#include -#include "mixvideoinitparams.h" -#include "mixvideoconfigparamsdec.h" -#include "mixvideodecodeparams.h" -#include "mixvideoconfigparamsenc.h" -#include "mixvideoencodeparams.h" -#include "mixvideorenderparams.h" -#include "mixvideocaps.h" -#include "mixbuffer.h" -#include "mixvideo_private.h" - - -class MixVideo; -/* - * Virtual methods typedef - */ - -typedef MIX_RESULT (*MixVideoGetVersionFunc)(MixVideo * mix, uint * major, - uint * minor); - -typedef MIX_RESULT (*MixVideoInitializeFunc)(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params); - -typedef MIX_RESULT (*MixVideoDeinitializeFunc)(MixVideo * mix); - -typedef MIX_RESULT (*MixVideoConfigureFunc)(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params); - -typedef MIX_RESULT (*MixVideoGetConfigFunc)(MixVideo * mix, - MixVideoConfigParams ** config_params); - -typedef MIX_RESULT (*MixVideoDecodeFunc)(MixVideo * mix, MixBuffer * bufin[], - int bufincnt, MixVideoDecodeParams * decode_params); - -typedef MIX_RESULT (*MixVideoGetFrameFunc)(MixVideo * mix, - MixVideoFrame ** frame); - -typedef MIX_RESULT (*MixVideoReleaseFrameFunc)(MixVideo * mix, - MixVideoFrame * frame); - -typedef MIX_RESULT (*MixVideoRenderFunc)(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame); - - -typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - - -typedef MIX_RESULT (*MixVideoFlushFunc)(MixVideo * mix); - -typedef MIX_RESULT (*MixVideoEOSFunc)(MixVideo * mix); - -typedef MIX_RESULT (*MixVideoGetStateFunc)(MixVideo * mix, MixState * state); - -typedef MIX_RESULT (*MixVideoGetMixBufferFunc)(MixVideo * mix, MixBuffer ** buf); - -typedef MIX_RESULT (*MixVideoReleaseMixBufferFunc)(MixVideo * mix, - MixBuffer * buf); - -typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix, - uint *max_size); - - -typedef MIX_RESULT (*MixVideoSetDynamicEncConfigFunc) (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); - -typedef MIX_RESULT (*MixVideoGetNewUsrptrForSurfaceBufferFunc) (MixVideo * mix, - uint width, uint height, uint format, uint expected_size, - uint *outsize, uint * stride, uint8 **usrptr); - -/** - * MixVideo: - * @parent: Parent object. - * - * MI-X Video object - */ -class MixVideo { -public: - MixVideo(); - ~MixVideo(); - -public: - /*< private > */ - void* context; - uint ref_count; - MixVideoPrivate mPriv; - -public: - /*< virtual public >*/ - MixVideoGetVersionFunc get_version_func; - MixVideoInitializeFunc initialize_func; - MixVideoDeinitializeFunc deinitialize_func; - MixVideoConfigureFunc configure_func; - MixVideoGetConfigFunc get_config_func; - MixVideoDecodeFunc decode_func; - MixVideoGetFrameFunc get_frame_func; - MixVideoReleaseFrameFunc release_frame_func; - MixVideoRenderFunc render_func; - MixVideoEncodeFunc encode_func; - MixVideoFlushFunc flush_func; - MixVideoEOSFunc eos_func; - MixVideoGetStateFunc get_state_func; - MixVideoGetMixBufferFunc get_mix_buffer_func; - MixVideoReleaseMixBufferFunc release_mix_buffer_func; - MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func; - MixVideoSetDynamicEncConfigFunc set_dynamic_enc_config_func; - MixVideoGetNewUsrptrForSurfaceBufferFunc get_new_usrptr_for_surface_buffer; -}; - -/** - * mix_video_new: - * @returns: A newly allocated instance of #MixVideo - * - * Use this method to create new instance of #MixVideo - */ -MixVideo *mix_video_new(void); - -/** - * mix_video_ref: - * @mix: object to add reference - * @returns: the MixVideo instance where reference count has been increased. - * - * Add reference count. - */ -MixVideo *mix_video_ref(MixVideo * mix); - -/** - * mix_video_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideo * -mix_video_unref(MixVideo * mix) ; - -/* Class Methods */ - -/** - * mix_video_get_version: - * @mix: #MixVideo object. - * @major: Pointer to an unsigned integer indicating the major version number of this MI-X Video library - * @minor: Pointer to an unsigned integer indicating the minor version number of this MI-X Video library - * @returns: Common Video Error Return Codes - * - * This function will return the major and minor version numbers of the library. - */ -MIX_RESULT mix_video_get_version(MixVideo * mix, uint * major, uint * minor); - - - -/** - * mix_video_initialize: - * @mix: #MixVideo object. - * @mode: Enum value to indicate encode or decode mode - * @init_params: MixVideoInitParams object which includes display type and pointer to display, encode or decode mode - * @drm_init_params: MixDrmParams defined in Moorestown MI-X DRM API. - * This can be null if content is not protected. - * @returns: In addition to the Common Video Error Return Codes, - * the following error codes may be returned. - * - * MIX_RESULT_ALREADY_INIT, mix_video_initialize() has already been called. - * - * - * This function will return the major and minor version numbers of the library. - */ -MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode, - MixVideoInitParams * init_params, MixDrmParams * drm_init_params); - -/** - * mix_video_deinitialize: - * @mix: #MixVideo object. - * @returns: Common Video Error Return Codes - * - * This function will un-initialize a session with this MI-X instance. During this call, the - * LibVA session is closed and all resources including surface buffers, #MixBuffers and - * #MixVideoFrame objects are freed. This function is called by the application once - * mix_video_initialize() is called, before exiting. - */ -MIX_RESULT mix_video_deinitialize(MixVideo * mix); - - -/** - * mix_video_configure: - * @mix: #MixVideo object. - * @config_params: Pointer to #MixVideoConfigParams object (either #MixVideoConfigParamsDec or - * #MixVideoConfigParamsEnc for specific media type) - * @drm_config_params: Pointer to #MixDrmParams defined in Moorestown MI-X DRM API. - * This can be null if content is not protected. - * @returns: In addition to the Common Video Error Return Codes, - * the following error codes may be returned. - * - * MIX_RESULT_RESOURCES_NOTAVAIL, HW accelerated decoding is not available. - * MIX_RESULT_NOTSUPPORTED, A requested parameter is not supported or not available. - * - * - * This function can be used to configure a stream for the current session. - * The caller can use this function to do the following: - * - * Choose frame ordering mode (display order or decode order) - * Choose encode or decode mode - * Choose whether display frames are enqueued for encode mode - * Provide stream parameters - * - - * This function can only be called after mix_video_initialize() has been called - */ -MIX_RESULT mix_video_configure(MixVideo * mix, - MixVideoConfigParams * config_params, - MixDrmParams * drm_config_params); - - -/** - * mix_video_get_config: - * @mix: #MixVideo object. - * @config_params: Pointer to pointer to #MixVideoConfigParams object defined in - * description of mix_video_configure() - * @returns: Common Video Error Return Codes - * - * This function can be used to get the current configuration of a stream for the current session. - * A #MixVideoConfigParams object will be returned, which can be used to get each of the - * parameter current values. The caller will need to release this object when it is no - * longer needed. - * - * This function can only be called once mix_video_configure() has been called. - * - * See description of mix_video_configure() for #MixVideoConfigParams object details. - * For mix_video_get_config(), all input parameter fields become OUT parameters. - * - */ -MIX_RESULT mix_video_get_config(MixVideo * mix, - MixVideoConfigParams ** config_params); - -/** - * mix_video_decode: - * @mix: #MixVideo object. - * @bufin: Array of pointers to #MixBuffer objects, described in mix_video_get_mixbuffer() * - * @bufincnt: Number of #MixBuffer objects - * @decode_params: #MixVideoDecodeParams object - * @returns: In addition to the Common Video Error Return Codes, - * the following error codes may be returned. - * - * - * MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done. - * Caller can try again with the same MixBuffers later when surfaces may have been freed. - * - * - * - * - * This function is used to initiate HW accelerated decoding of encoded data buffers. This - * function is used to decode to a surface buffer, which can then be rendered using - * mix_video_render(). - * Video data input buffers are provided in a scatter/gather list of reference counted - * #MixBuffers. The input #MixBuffers are retained until a full frame of coded data is - * accumulated, at which point it will be decoded and the input buffers released. The - * decoded data will be stored in a surface buffer until it is rendered. The caller must - * provide the presentation timestamp and any stream discontinuity for the video frame - * for the encoded data, in the #MixVideoDecodeParams object. These will be preserved - * and provided for the #MixVideoFrame object that contains the decoded data for this - * frame data. - * - * - * - * As only one timestamp is passed in for the buffer, there should be no more than one - * video frame included in the encoded data buffer provided in a single call to - * mix_video_decode(). If partial frame data is passed in over multiple calls to - * mix_video_decode(), the same timestamp should be provided with each call having - * data associated with the same frame. - * - * - * - * The application should request a #MixBuffer object using mix_video_get_mixbuffer(), - * initialize the #MixBuffer with the data pointer to the coded input data, along with the - * size of the input data buffer, and optionally can provide a token value and a callback - * function pointer. When the MixBuffer is released by both the application and #MixVideo, - * the callback will be called and passed the token value and the input data buffer - * pointer for any buffer management processing that the application needs or wants to - * perform (such as releasing the actual coded data buffer that was assigned to that - * #MixBuffer). MixBuffers are allocated in a pool, and the application determines the size - * of this pool, which is passed to mix_video_configure() in #the MixVideoConfigParams object. - * - */ -MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params); - - -/** - * mix_video_get_frame: - * @mix: #MixVideo object. - * @frame: A pointer to a pointer to a #MixVideoFrame object - * @returns: In addition to the Common Video Error Return Codes, - * the following error codes may be returned. - * - * - * MIX_RESULT_FRAME_NOTAVAIL, No decoded frames are available. - * - * - * MIX_RESULT_EOS, No more decoded frames are available, - * since end of stream has been encountered. - * - * - * - * - * This function returns a frame object that represents the next frame ID and includes - * timestamp and discontinuity information. If display frame ordering has been - * configured, it is the next frame displayed. If decode order frame ordering has been - * configured, it is the next frame decoded. In both cases the timestamp reflects the - * presentation timestamp. For encode mode the frame order is always display order. - * - * - * - * The frame object is a reference counted object that represents the frame. The - * application can retain this frame object as long as needed to display the frame and - * redisplay as needed. At presentation time, the application can call mix_video_render() - * with this frame object to display the frame immediately. When the application no - * longer needs to display this frame, it should release the object by calling - * mix_video_release_frame(). The application should not modify the reference count or - * delete this object directly. - * - */ -MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame); - - - -/** - * mix_video_release_frame: - * @mix: #MixVideo object. - * @frame: A pointer to a #MixVideoFrame object, described in mix_video_get_frame() - * @returns: Common Video Error Return Codes - * - * This function releases a frame object that was acquired from mix_video_get_frame(). - */ -MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame); - - -/** - * mix_video_render: - * @mix: #MixVideo object. - * @render_params: #MixVideoRenderParams object defined below, - * which includes the display window and type, - * src and dest image sizes, deinterlace info, clipping rectangles, - * some post processing parameters, and so forth. - * @frame: Pointer to a #MixVideoFrame object returned from mix_video_get_frame(). - * @returns: Common Video Error Return Codes - * - * This function renders a video frame associated with a MixVideoFrame object to the display. - * The display is either an X11 Pixmap or an X11 Window using the overlay. - */ -MIX_RESULT mix_video_render(MixVideo * mix, - MixVideoRenderParams * render_params, MixVideoFrame *frame); - - -/** - * mix_video_encode: - * @mix: #MixVideo object. - * @bufin: Array of pointers to #MixBuffer objects, structure defined in mix_video_decode() - * @bufincnt: Number of #MixBuffer objects - * @iovout: Array of #MixIOVec structures, pointing to buffers allocated by the application - * @iovoutcnt: Number of items in iovout array - * @encode_params: #MixVideoEncodeParams object - * @returns: Common Video Error Return Codes - * - * - * This function is used to initiate HW accelerated encoding of uncompressed video input - * buffers. The input buffers may either be uncompressed video in user space buffers, or - * CI frame indexes from libCI captured frames. In order to use CI frame indexes, the - * shared buffer mode should be indicated in the #MixVideoConfigParamsEnc object - * provided to mix_video_configure(). - * - * - * - * Video uncompressed data input buffers are provided in a scatter/gather list of - * reference counted MixBuffers. The input #MixBuffers are considered a complete frame - * of data, and are used for encoding before the input buffers are released. LibCI frame - * indices may also be provided in MixBuffers. - * - * - * - * The encoded data will be copied to the output buffers provided in the array of - * #MixIOVec structures, also in a scatter/gather list. These output buffers are allocated - * by the application. The application can query for the proper size of buffer to allocate - * for this, using mix_video_get_max_coded_buffer_size(). It is suggested that the - * application create a pool of these buffers to pass in, for efficiency. The application will - * also set the buffer_size field in the #MixIOVec structures to the allocated buffer size. - * When the buffers are filled with encoded data by #MixVideo, the data_size will be set to - * the encoded data size placed in the buffer. For any buffer not used for encoded data, - * the data_size will be set to zero. - * - * - * - * Alternatively, if the application does not allocate the output buffers, the data pointers - * in the #MixIOVec structures (still provided by the application) can be set to NULL, - * whereupon #MixVideo will allocate a data buffer for each frame and set the data, - * buffer_size and data_size pointers in the #MixIOVec structures accordingly. - * - * - * - * This is not an efficient method to handle these buffers and it is preferred that - * the application provide pre-allocated buffers. - * - * - * - * The application should request a #MixBuffer object using mix_video_get_mixbuffer(), - * initialize the #MixBuffer with the data pointer to the uncompressed input data or a LibCI - * frame index, along with the size of the input data buffer, and optionally can provide a - * token value and a callback function pointer. When the #MixBuffer is released by both - * the application and #MixVideo, the callback will be called and passed the token value - * and the input data buffer pointer for any buffer management processing that the - * application needs or wants to perform (such as releasing the actual data buffer that - * was assigned to that #MixBuffer). #MixBuffers are allocated in a pool, and the application - * determines the size of this pool, which is passed to mix_video_configure() in the - * #MixVideoConfigParams object. - * - * - * - * The application can choose to enable or disable display of the uncompressed video - * frames using the need_display of the #MixVideoConfigParamsEnc object in - * mix_video_configure(). If display is enabled, #MixVideoFrames are enqueued by - * #MixVideo, to be requested by the application with mix_video_get_frame() and used to - * provide to mix_video_render() for rendering before releasing with - * mix_video_release_frame(). If display is disabled, no #MixVideoFrames will be - * enqueued. - * - * - */ -MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], int bufincnt, - MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - -/** - * mix_video_flush: - * @mix: #MixVideo object. - * @returns: Common Video Error Return Codes - * - * This function will flush all encoded and decoded buffers that are currently enqueued or - * in the process of decoding. After this call, decoding can commence again, but would - * need to start at the beginning of a sequence (for example, with no dependencies on - * previously decoded reference frames). - */ -MIX_RESULT mix_video_flush(MixVideo * mix); - -/** - * mix_video_eos: - * @mix: #MixVideo object. - * @returns: Common Video Error Return Codes - * - * This function will signal end of stream to #MixVideo. This can be used to finalize - * decoding of the last frame and other end of stream processing. #MixVideo will complete - * the decoding of all buffers received, and will continue to provide the decoded frame - * objects by means of the mix_video_get_frame() until all frames have been provided, - * at which point mix_video_get_frame() will return MIX_RESULT_EOS. - */ -MIX_RESULT mix_video_eos(MixVideo * mix); - - -/** - * mix_video_get_state: - * @mix: #MixVideo object. - * @state: Current state of MI-X session. - * @returns: Common Video Error Return Codes - * - * This function returns the current state of the MI-X session. - */ -MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state); - -/** - * mix_video_get_mixbuffer: - * @mix: #MixVideo object. - * @buf: A pointer to a pointer to a #MixBuffer object - * @returns: Common Video Error Return Codes - * - * - * This function returns a frame object that represents the next frame ID and includes - * timestamp and discontinuity information. If display frame ordering has been - * configured, it is the next frame displayed. If decode order frame ordering has been - * configured, it is the next frame decoded. In both cases the timestamp reflects the - * presentation timestamp. - * - * - * - * The frame object is a reference counted object that represents the frame. The - * application can retain this frame object as long as needed to display the frame and - * redisplay as needed. At presentation time, the application can call mix_video_render() - * with this frame object to display the frame immediately. When the application no - * longer needs to display this frame, it should release the object by calling - * mix_video_release_frame(). The application should not modify the reference count or - * delete this object directly. - * - * - */ -MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf); - - -/** - * mix_video_release_mixbuffer: - * @mix: #MixVideo object. - * @buf: A pointer to a #MixBuffer object, described in mix_video_get_mixbuffer(). - * @returns: Common Video Error Return Codes - * - * This function releases a frame object that was acquired from mix_video_get_mixbuffer(). - */ -MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf); - - -/** - * mix_video_get_max_coded_buffer_size: - * @mix: #MixVideo object. - * @bufsize: Pointer to uint. - * @returns: Common Video Error Return Codes - * - * - * This function can be used to get the maximum size of encoded data buffer needed for - * the mix_video_encode() call. - * - * - * This function can only be called once mix_video_configure() has been called. - * - */ -MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, uint *bufsize); - - -/** - * mix_video_set_dynamic_enc_config: - * @mix: #MixVideo object. - * @params_type: Dynamic encoder configuration type - * @dynamic_params: Point to dynamic control data structure which includes the new value to be changed to - * @returns: Common Video Error Return Codes - * - * - * This function can be used to change the encoder parameters at run-time - * - * - * Usually this function is after the encoding session is started. - * - */ - -MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, - MixEncParamsType params_type, MixEncDynamicParams * dynamic_params); - - - -/** - * mix_video_get_new_userptr_for_surface_buffer: - * @mix: #MixVideo object. - * @width: Width of new surface to be created - * @height: Height of new surface to be created - * @format: Format of new surface to be created - * @usrptr: User space pointer mapped from the new created VA surface - * @returns: Common Video Error Return Codes - * - * - * This function can be used to create a new VA surface and map the physical address to user space - * - * - * Usually this function is before the encoding session is started. - * - */ -MIX_RESULT mix_video_get_new_userptr_for_surface_buffer (MixVideo * mix, uint width, uint height, uint format, - uint expected_size, uint *outsize, uint * stride, uint8 **usrptr); -#endif /* __MIX_VIDEO_H__ */ diff --git a/mix_video/src/mixvideo_private.h b/mix_video/src/mixvideo_private.h deleted file mode 100644 index 24bafe1..0000000 --- a/mix_video/src/mixvideo_private.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEO_PRIVATE_H__ -#define __MIX_VIDEO_PRIVATE_H__ - -#include "mixvideothread.h" -#include "mixvideoformatenc.h" - -class MixFrameManager; -class MixVideoFormat; -typedef struct _MixVideoPrivate MixVideoPrivate; - -struct _MixVideoPrivate { - /*< private > */ - - MixVideoMutex objlock; - bool initialized; - bool configured; - - VADisplay va_display; - - int va_major_version; - int va_minor_version; - - MixCodecMode codec_mode; - - MixVideoInitParams *init_params; - MixDrmParams *drm_params; - - MixVideoConfigParams *config_params; - - MixFrameManager *frame_manager; - MixVideoFormat *video_format; - MixVideoFormatEnc *video_format_enc; - - MixSurfacePool *surface_pool; - MixBufferPool *buffer_pool; - MixUsrReqSurfacesInfo requested_surface_info; - -}; - -/* Private functions */ -void mix_video_private_initialize(MixVideoPrivate* priv); -void mix_video_private_cleanup(MixVideoPrivate* priv); - -#define MIX_VIDEO_GET_PRIVATE(mix) (MixVideoPrivate*)(mix->context) -#endif /* __MIX_VIDEO_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideocaps.cpp b/mix_video/src/mixvideocaps.cpp deleted file mode 100644 index c4e0d7a..0000000 --- a/mix_video/src/mixvideocaps.cpp +++ /dev/null @@ -1,164 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixvideocaps -* @short_description: VideoConfig parameters -* -* A data object which stores videoconfig specific parameters. -*/ - -#include -#include "mixvideocaps.h" -#include - - -#ifdef ANDROID -#define mix_strcmp strcmp -#else -#define mix_strcmp g_strcmp0 -#endif - -#define MIX_VIDEOCAPS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCAPS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \ - - -#define SAFE_FREE(p) if(p) {free(p); p = NULL; } - -MixVideoCaps::MixVideoCaps() - :mix_caps(NULL) - ,video_hw_caps(NULL) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} - -MixVideoCaps::~MixVideoCaps() { - SAFE_FREE (this->mix_caps); - SAFE_FREE (this->video_hw_caps); -} - -/** -* mix_videocaps_dup: -* @obj: a #MixVideoCaps object -* @returns: a newly allocated duplicate of the object. -* -* Copy duplicate of the object. -*/ -MixParams* MixVideoCaps::dup() const { - MixParams *ret = new MixVideoCaps(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -/** -* mix_videocaps_copy: -* @target: copy to target -* @src: copy from src -* @returns: boolean indicates if copy is successful. -* -* Copy instance data from @src to @target. -*/ -bool MixVideoCaps::copy (MixParams * target) const { - bool ret = FALSE; - MixVideoCaps * this_target = MIX_VIDEOCAPS(target); - if (NULL != this_target) { - // Free the existing properties - SAFE_FREE (this_target->mix_caps); - SAFE_FREE (this_target->video_hw_caps); - // Duplicate string - this_target->mix_caps = strdup (this->mix_caps); - this_target->video_hw_caps = strdup (this->video_hw_caps); - - // chain up base class - ret = MixParams::copy(target); - } - return ret; -} - - -bool MixVideoCaps::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoCaps * this_obj = MIX_VIDEOCAPS(obj); - if (NULL != this_obj) { - if ((mix_strcmp (this->mix_caps, this_obj->mix_caps) == 0) && - (mix_strcmp (this->video_hw_caps, this_obj->video_hw_caps) == 0)) { - ret = MixParams::equal(this_obj); - } - } - return ret; -} - - - -MixVideoCaps * -mix_videocaps_new (void) { - return new MixVideoCaps(); -} - -MixVideoCaps * -mix_videocaps_ref (MixVideoCaps * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - -/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ -MIX_RESULT mix_videocaps_set_mix_caps ( - MixVideoCaps * obj, char * mix_caps) { - MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); - SAFE_FREE (obj->mix_caps); - obj->mix_caps = strdup (mix_caps); - if (NULL == obj->mix_caps && NULL != mix_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videocaps_get_mix_caps ( - MixVideoCaps * obj, char ** mix_caps) { - MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, mix_caps); - *mix_caps = strdup (obj->mix_caps); - if (NULL == *mix_caps && NULL != obj->mix_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videocaps_set_video_hw_caps ( - MixVideoCaps * obj, char * video_hw_caps) { - MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj); - SAFE_FREE (obj->video_hw_caps); - obj->video_hw_caps = strdup (video_hw_caps); - if (NULL != video_hw_caps && NULL == obj->video_hw_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videocaps_get_video_hw_caps ( - MixVideoCaps * obj, char ** video_hw_caps) { - MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, video_hw_caps); - *video_hw_caps = strdup (obj->video_hw_caps); - if (NULL == *video_hw_caps && NULL != obj->video_hw_caps) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideocaps.h b/mix_video/src/mixvideocaps.h deleted file mode 100644 index e760787..0000000 --- a/mix_video/src/mixvideocaps.h +++ /dev/null @@ -1,95 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCAPS_H__ -#define __MIX_VIDEOCAPS_H__ - -#include -#include "mixvideodef.h" - -/** -* MIX_VIDEOCAPS: -* @obj: object to be type-casted. -*/ -#define MIX_VIDEOCAPS(obj) (reinterpret_cast(obj)) - -/** -* MIX_IS_VIDEOCAPS: -* @obj: an object. -* -* Checks if the given object is an instance of #MixParams -*/ -#define MIX_IS_VIDEOCAPS(obj) ((NULL != MIX_VIDEOCAPS(obj)) ? TRUE : FALSE) - - -/** -* MixVideoCaps: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoCaps : public MixParams -{ -public: - MixVideoCaps(); - virtual ~MixVideoCaps(); - - virtual bool copy(MixParams* target) const; - virtual MixParams *dup() const; - virtual bool equal(MixParams* obj) const; - -public: - /*< public > */ - //MixParams parent; - - /*< public > */ - char *mix_caps; - char *video_hw_caps; - - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; -}; - -/** -* mix_videocaps_new: -* @returns: A newly allocated instance of #MixVideoCaps -* -* Use this method to create new instance of #MixVideoCaps -*/ -MixVideoCaps *mix_videocaps_new (void); -/** -* mix_videocaps_ref: -* @mix: object to add reference -* @returns: the MixVideoCaps instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoCaps *mix_videocaps_ref (MixVideoCaps * mix); - -/** -* mix_videocaps_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videocaps_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -MIX_RESULT mix_videocaps_set_mix_caps (MixVideoCaps * obj, char * mix_caps); -MIX_RESULT mix_videocaps_get_mix_caps (MixVideoCaps * obj, - char ** mix_caps); - -MIX_RESULT mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, - char * video_hw_caps); -MIX_RESULT mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, - char ** video_hw_caps); - - -#endif /* __MIX_VIDEOCAPS_H__ */ diff --git a/mix_video/src/mixvideoconfigparams.cpp b/mix_video/src/mixvideoconfigparams.cpp deleted file mode 100644 index 25c88dd..0000000 --- a/mix_video/src/mixvideoconfigparams.cpp +++ /dev/null @@ -1,86 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparams - * @short_description: MI-X Video Configuration Parameter Base Object - * @include: mixvideoconfigparams.h - * - * - * A base object of MI-X video configuration parameter objects. - * - * - * The derived MixVideoConfigParams object is created by the MMF/App - * and provided in the MixVideo mix_video_configure() function. The get and set - * methods for the properties will be available for the caller to set and get information at - * configuration time. It will also be created by MixVideo and returned from the - * mix_video_get_config() function, whereupon the MMF/App can get the get methods to - * obtain current configuration information. - * - * - * There are decode mode objects (for example, MixVideoConfigParamsDec) and encode - * mode objects (for example, MixVideoConfigParamsEnc). Each of these types is refined - * further with media specific objects. The application should create the correct type of - * object to match the media format of the stream to be handled, e.g. if the media - * format of the stream to be decoded is H.264, the application would create a - * MixVideoConfigParamsDecH264 object for the mix_video_configure() call. - * - */ - -#include -#include "mixvideolog.h" -#include "mixvideoconfigparams.h" - -MixVideoConfigParams::MixVideoConfigParams() - :reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} -MixVideoConfigParams::~MixVideoConfigParams() { -} -bool MixVideoConfigParams::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParams * this_target = MIX_VIDEOCONFIGPARAMS(target); - if (NULL != this_target) - ret = MixParams::copy(target); - return ret; -} - -bool MixVideoConfigParams::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParams * this_obj = MIX_VIDEOCONFIGPARAMS(obj); - if (NULL != this_obj) - ret = MixParams::equal(this_obj); - return ret; -} - -MixParams* MixVideoConfigParams::dup() const { - MixParams *ret = new MixVideoConfigParams(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - - -MixVideoConfigParams * -mix_videoconfigparams_new(void) { - return new MixVideoConfigParams(); -} -MixVideoConfigParams * -mix_videoconfigparams_ref(MixVideoConfigParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h deleted file mode 100644 index d0afa72..0000000 --- a/mix_video/src/mixvideoconfigparams.h +++ /dev/null @@ -1,76 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOCONFIGPARAMS_H__ -#define __MIX_VIDEOCONFIGPARAMS_H__ - -#include -#include "mixvideodef.h" - -/** - * MIX_VIDEOCONFIGPARAMS: - * @obj: object to be type-casted. - */ -#define MIX_VIDEOCONFIGPARAMS(obj) (reinterpret_cast(obj)) - - -/** - * MixVideoConfigParams: - * - * MI-X VideoConfig Parameter object - */ -class MixVideoConfigParams : public MixParams { -public: - MixVideoConfigParams(); - virtual ~MixVideoConfigParams(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; - /*< public > */ - //MixParams parent; - - /*< private > */ -protected: - void *reserved1; - void *reserved2; - void *reserved3; - void *reserved4; -}; - - -/** - * mix_videoconfigparams_new: - * @returns: A newly allocated instance of #MixVideoConfigParams - * - * Use this method to create new instance of #MixVideoConfigParams - */ -MixVideoConfigParams *mix_videoconfigparams_new(void); - -/** - * mix_videoconfigparams_ref: - * @mix: object to add reference - * @returns: the #MixVideoConfigParams instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoConfigParams *mix_videoconfigparams_ref(MixVideoConfigParams * mix); - -/** - * mix_videoconfigparams_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videoconfigparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for other properties */ - - -#endif /* __MIX_VIDEOCONFIGPARAMS_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec.cpp b/mix_video/src/mixvideoconfigparamsdec.cpp deleted file mode 100644 index 0d6e72f..0000000 --- a/mix_video/src/mixvideoconfigparamsdec.cpp +++ /dev/null @@ -1,576 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsdec - * @short_description: MI-X Video Decode Configuration Parameter Base Object - * - * A base object of MI-X video decode configuration parameter objects. - */ - -#include -#include "mixvideolog.h" -#include "mixvideoconfigparamsdec.h" -#include - -#define MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ - if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \ - - -MixVideoConfigParamsDec::MixVideoConfigParamsDec() - :frame_order_mode(MIX_FRAMEORDER_MODE_DISPLAYORDER) - ,mime_type(NULL) - ,frame_rate_num(1) - ,frame_rate_denom(1) - ,picture_width(0) - ,picture_height(0) - ,raw_format(0) - ,rate_control(0) - ,mixbuffer_pool_size(0) - ,extra_surface_allocation(0) - ,video_range(0) - ,color_matrix(0) - ,bit_rate(0) - ,par_num(0) - ,par_denom(0) - ,crop_left(0) - ,crop_right(0) - ,crop_top(0) - ,crop_bottom(0) - ,error_concealment(TRUE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - -{ - memset(&this->header, 0, sizeof(header)); -} - -MixVideoConfigParamsDec::~MixVideoConfigParamsDec() { - /* free header */ - if (NULL != this->header.data) { - free(this->header.data); - memset(&this->header, 0, sizeof(this->header)); - } - - /* free mime_type */ - if (this->mime_type) { - free(this->mime_type); - this->mime_type = NULL; - } -} - -bool MixVideoConfigParamsDec::copy(MixParams *target) const { - MIX_RESULT mix_result = MIX_RESULT_FAIL; - MixVideoConfigParamsDec *this_target = MIX_VIDEOCONFIGPARAMSDEC(target); - LOG_V( "Begin\n"); - - if (NULL != this_target) { - /* copy properties of primitive type */ - this_target->frame_order_mode = this->frame_order_mode; - this_target->frame_rate_num = this->frame_rate_num; - this_target->frame_rate_denom = this->frame_rate_denom; - this_target->picture_width = this->picture_width; - this_target->picture_height = this->picture_height; - this_target->raw_format = this->raw_format; - this_target->rate_control = this->rate_control; - this_target->mixbuffer_pool_size = this->mixbuffer_pool_size; - this_target->extra_surface_allocation = this->extra_surface_allocation; - this_target->video_range = this->video_range; - this_target->color_matrix = this->color_matrix; - this_target->bit_rate = this->bit_rate; - this_target->par_num = this->par_num; - this_target->par_denom = this->par_denom; - this_target->crop_left = this->crop_left; - this_target->crop_right = this->crop_right; - this_target->crop_top = this->crop_top; - this_target->crop_bottom = this->crop_bottom; - this_target->error_concealment = this->error_concealment; - - /* copy properties of non-primitive */ - - /* copy header */ - mix_result = mix_videoconfigparamsdec_set_header(this_target, - const_cast(&this->header)); - - if (MIX_RESULT_SUCCESS != mix_result) { - LOG_E( "set_header failed: mix_result = 0x%x\n", mix_result); - return FALSE; - } - - /* copy mime_type */ - if (NULL != mime_type) { - mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, - this->mime_type); - } else { - mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, NULL); - } - - if (MIX_RESULT_SUCCESS != mix_result) { - LOG_E( "set_mime_type failed: mix_result = 0x%x\n", mix_result); - return FALSE; - } - - /* TODO: copy other properties if there's any */ - - /* Now chainup base class */ - return MixVideoConfigParams::copy(target); - } - - LOG_V( "End\n"); - - return FALSE; -} - -bool MixVideoConfigParamsDec::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParamsDec *this_obj = MIX_VIDEOCONFIGPARAMSDEC(obj); - - if (NULL != this_obj) { - // Deep compare - - /* check the equalitiy of the primitive type properties */ - if (this->frame_order_mode != this_obj->frame_order_mode) { - goto not_equal; - } - - if ((this->frame_rate_num != this_obj->frame_rate_num) && - (this->frame_rate_denom != this_obj->frame_rate_denom)) { - goto not_equal; - } - - if ((this->picture_width != this_obj->picture_width) && - (this->picture_height != this_obj->picture_height)) { - goto not_equal; - } - - if (this->raw_format != this_obj->raw_format) { - goto not_equal; - } - - if (this->rate_control != this_obj->rate_control) { - goto not_equal; - } - - if (this->mixbuffer_pool_size != this_obj->mixbuffer_pool_size) { - goto not_equal; - } - - if (this->extra_surface_allocation != this_obj->extra_surface_allocation) { - goto not_equal; - } - - /* check the equalitiy of the none-primitive type properties */ - - /* MixIOVec header */ - - if (this->header.data_size != this_obj->header.data_size) { - goto not_equal; - } - - if (this->header.buffer_size != this_obj->header.buffer_size) { - goto not_equal; - } - - if (this->header.data && this_obj->header.data) { - if (memcmp(this->header.data, this_obj->header.data, - this_obj->header.data_size) != 0) { - goto not_equal; - } - } else if (!(!this->header.data && !this_obj->header.data)) { - goto not_equal; - } - - /* compare mime_type */ - if (this->mime_type && this_obj->mime_type) { - if (strcmp(this->mime_type, this_obj->mime_type) != 0) { - goto not_equal; - } - } else if (!(!this->mime_type && !this_obj->mime_type)) { - goto not_equal; - } - - if (this->video_range != this_obj->video_range) { - goto not_equal; - } - - if (this->color_matrix != this_obj->color_matrix) { - goto not_equal; - } - - if (this->bit_rate != this_obj->bit_rate) { - goto not_equal; - } - - if (this->par_num != this_obj->par_num) { - goto not_equal; - } - - if (this->par_denom != this_obj->par_denom) { - goto not_equal; - } - - if (this->crop_left != this_obj->crop_left) - { - goto not_equal; - } - if (this->crop_right != this_obj->crop_right) - { - goto not_equal; - } - if (this->crop_top != this_obj->crop_top) - { - goto not_equal; - } - if (this->crop_bottom != this_obj->crop_bottom) - { - goto not_equal; - } - if (this->error_concealment != this_obj->error_concealment) - { - goto not_equal; - } - ret = TRUE; - -not_equal: - - if (TRUE != ret) { - return ret; - } - - /* chaining up. */ - ret = MixVideoConfigParams::equal(obj); - } - - return ret; -} - -MixParams* MixVideoConfigParamsDec::dup() const { - MixParams *ret = NULL; - MixVideoConfigParamsDec *duplicate = new MixVideoConfigParamsDec(); - if (FALSE != copy(duplicate)) { - ret = duplicate; - } else { - mix_videoconfigparamsdec_unref(duplicate); - } - return ret; -} - -MixVideoConfigParamsDec * -mix_videoconfigparamsdec_new(void) { - return new MixVideoConfigParamsDec(); -} - -MixVideoConfigParamsDec * -mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix) { - return (MixVideoConfigParamsDec *) mix_params_ref(MIX_PARAMS(mix)); -} - - -/* TODO: Add getters and setters for other properties. The following is incomplete */ - -MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->frame_order_mode = frame_order_mode; - LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode); - *frame_order_mode = obj->frame_order_mode; - LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_header( - MixVideoConfigParamsDec * obj, MixIOVec * header) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - if (!header) { - return MIX_RESULT_NULL_PTR; - } - - if (header->data && header->buffer_size) { - obj->header.data = (uchar*)malloc(header->buffer_size); -// obj->header.data = (uchar*)memdup(header->data, header->buffer_size); - if (!obj->header.data) { - return MIX_RESULT_NO_MEMORY; - } - memcpy(obj->header.data,header->data,header->buffer_size); - obj->header.buffer_size = header->buffer_size; - obj->header.data_size = header->data_size; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_header( - MixVideoConfigParamsDec * obj, MixIOVec ** header) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, header); - - if (obj->header.data && obj->header.buffer_size) { - *header = (MixIOVec*)malloc(sizeof(MixIOVec)); - if (*header == NULL) { - return MIX_RESULT_NO_MEMORY; - } - (*header)->data = (uchar*)malloc(obj->header.buffer_size); - if ((*header)->data == NULL) { - free(*header); - *header = NULL; - return MIX_RESULT_NO_MEMORY; - } - memcpy((*header)->data,obj->header.data, obj->header.buffer_size); -// (*header)->data = (uchar*)memdup(obj->header.data, obj->header.buffer_size); - (*header)->buffer_size = obj->header.buffer_size; - (*header)->data_size = obj->header.data_size; - } else { - *header = NULL; - } - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsdec_set_mime_type( - MixVideoConfigParamsDec * obj, const char * mime_type) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - if (!mime_type) { - return MIX_RESULT_NULL_PTR; - } - if (obj->mime_type) { - free(obj->mime_type); - obj->mime_type = NULL; - } - obj->mime_type = strdup(mime_type); - if (!obj->mime_type) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_mime_type( - MixVideoConfigParamsDec * obj, char ** mime_type) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, mime_type); - if (!obj->mime_type) { - *mime_type = NULL; - return MIX_RESULT_SUCCESS; - } - *mime_type = strdup(obj->mime_type); - if (!*mime_type) { - return MIX_RESULT_NO_MEMORY; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_frame_rate( - MixVideoConfigParamsDec * obj, uint frame_rate_num, - uint frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->frame_rate_num = frame_rate_num; - obj->frame_rate_denom = frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_frame_rate( - MixVideoConfigParamsDec * obj, uint * frame_rate_num, - uint * frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); - *frame_rate_num = obj->frame_rate_num; - *frame_rate_denom = obj->frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_picture_res( - MixVideoConfigParamsDec * obj, uint picture_width, - uint picture_height) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->picture_width = picture_width; - obj->picture_height = picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_picture_res( - MixVideoConfigParamsDec * obj, uint * picture_width, - uint * picture_height) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); - *picture_width = obj->picture_width; - *picture_height = obj->picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_raw_format( - MixVideoConfigParamsDec * obj, uint raw_format) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - /* TODO: check if the value of raw_format is valid */ - obj->raw_format = raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_raw_format( - MixVideoConfigParamsDec * obj, uint *raw_format) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, raw_format); - *raw_format = obj->raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_rate_control( - MixVideoConfigParamsDec * obj, uint rate_control) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - /* TODO: check if the value of rate_control is valid */ - obj->rate_control = rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_rate_control( - MixVideoConfigParamsDec * obj, uint *rate_control) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, rate_control); - *rate_control = obj->rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size( - MixVideoConfigParamsDec * obj, uint bufpoolsize) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->mixbuffer_pool_size = bufpoolsize; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size( - MixVideoConfigParamsDec * obj, uint *bufpoolsize) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bufpoolsize); - *bufpoolsize = obj->mixbuffer_pool_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation( - MixVideoConfigParamsDec * obj, uint extra_surface_allocation) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->extra_surface_allocation = extra_surface_allocation; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation( - MixVideoConfigParamsDec * obj, uint *extra_surface_allocation) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, extra_surface_allocation); - *extra_surface_allocation = obj->extra_surface_allocation; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsdec_set_video_range( - MixVideoConfigParamsDec * obj, uint8 video_range) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->video_range = video_range; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_video_range( - MixVideoConfigParamsDec * obj, uint8 *video_range) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, video_range); - *video_range = obj->video_range; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_color_matrix( - MixVideoConfigParamsDec * obj, uint8 color_matrix) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->color_matrix = color_matrix; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_color_matrix( - MixVideoConfigParamsDec * obj, uint8 *color_matrix) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, color_matrix); - *color_matrix = obj->color_matrix; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_bit_rate( - MixVideoConfigParamsDec * obj, uint bit_rate) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->bit_rate = bit_rate; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_bit_rate( - MixVideoConfigParamsDec * obj, uint *bit_rate) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bit_rate); - *bit_rate = obj->bit_rate; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio( - MixVideoConfigParamsDec * obj, uint par_num, uint par_denom) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->par_num = par_num; - obj->par_denom = par_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio( - MixVideoConfigParamsDec * obj, uint * par_num, uint * par_denom) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, par_num, par_denom); - *par_num = obj->par_num; - *par_denom = obj->par_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_cropping_info(MixVideoConfigParamsDec * obj, - uint crop_left, uint crop_right, uint crop_top, uint crop_bottom) -{ - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - obj->crop_left = crop_left; - obj->crop_right = crop_right; - obj->crop_top = crop_top; - obj->crop_bottom = crop_bottom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_cropping_info(MixVideoConfigParamsDec * obj, - uint *crop_left, uint *crop_right, uint *crop_top, uint *crop_bottom) -{ - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, crop_left, crop_right); - if(!crop_top || !crop_bottom ) { - return MIX_RESULT_NULL_PTR; - } - *crop_left = obj->crop_left; - *crop_right = obj->crop_right; - *crop_top = obj->crop_top; - *crop_bottom = obj->crop_bottom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_set_error_concealment ( - MixVideoConfigParamsDec * obj, bool error_concealment) { - MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj); - - obj->error_concealment = error_concealment; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_get_error_concealment( - MixVideoConfigParamsDec * obj, bool *error_concealment) { - MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, error_concealment); - - *error_concealment = obj->error_concealment; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h deleted file mode 100644 index a3778ee..0000000 --- a/mix_video/src/mixvideoconfigparamsdec.h +++ /dev/null @@ -1,517 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOCONFIGPARAMSDEC_H__ -#define __MIX_VIDEOCONFIGPARAMSDEC_H__ - -#include -#include "mixvideodef.h" - - -/** - * MIX_VIDEOCONFIGPARAMSDEC: - * @obj: object to be type-casted. - */ -#define MIX_VIDEOCONFIGPARAMSDEC(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEOCONFIGPARAMSDEC: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC(obj)) ? TRUE : FALSE) - -/** - * MixVideoConfigParamsDec: - * - * MI-X VideoConfig Parameter object - */ -class MixVideoConfigParamsDec : public MixVideoConfigParams { -public: - MixVideoConfigParamsDec(); - ~MixVideoConfigParamsDec(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -public: - /*< public > */ - //MixVideoConfigParams parent; - - /*< public > */ - - /* Frame re-ordering mode */ - MixFrameOrderMode frame_order_mode; - - /* Stream header information, such as - * codec_data in GStreamer pipelines */ - MixIOVec header; - - /* Mime type */ - char * mime_type; - - /* Frame rate numerator value */ - uint frame_rate_num; - - /* Frame rate denominator value */ - uint frame_rate_denom; - - /* Picture width */ - ulong picture_width; - - /* Picture height */ - ulong picture_height; - - /* Render target format */ - uint raw_format; - - /* Rate control: CBR, VBR, none. Only valid for encoding. - * This should be set to none for decoding. */ - uint rate_control; - - /* Size of pool of MixBuffers to allocate */ - uint mixbuffer_pool_size; - - /* Extra surfaces for MixVideoFrame objects to be allocated */ - uint extra_surface_allocation; - - /* video range, 0 for short range and 1 for full range, output only */ - uint8 video_range; - - /* - color matrix, output only. Possible values defined in va.h - #define VA_SRC_BT601 0x00000010 - #define VA_SRC_BT709 0x00000020 - #define VA_SRC_SMPTE_240 0x00000040 - */ - uint8 color_matrix; - - /* bit rate in bps, output only */ - uint bit_rate; - - /* Pixel aspect ratio numerator value */ - uint par_num; - - /* Pixel aspect ratio denominator value */ - uint par_denom; - - uint crop_left; - uint crop_right; - uint crop_top; - uint crop_bottom; - - /* Error concealment enabled/disabled */ - bool error_concealment; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - -/** - * mix_videoconfigparamsdec_get_type: - * @returns: type - * - * Get the type of object. - */ -//GType mix_videoconfigparamsdec_get_type(void); - -/** - * mix_videoconfigparamsdec_new: - * @returns: A newly allocated instance of #MixVideoConfigParamsDec - * - * Use this method to create new instance of #MixVideoConfigParamsDec - */ -MixVideoConfigParamsDec *mix_videoconfigparamsdec_new(void); -/** - * mix_videoconfigparamsdec_ref: - * @mix: object to add reference - * @returns: the #MixVideoConfigParamsDec instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoConfigParamsDec *mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix); - -/** - * mix_videoconfigparamsdec_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videoconfigparamsdec_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - - -/** - * mix_videoconfigparamsdec_set_frame_order_mode: - * @obj: #MixVideoConfigParamsDec object - * @frame_order_mode: Frame re-ordering mode - * @returns: Common Video Error Return Codes - * - * Set frame order mode. - */ -MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode); - -/** - * mix_videoconfigparamsdec_get_frame_order_mode: - * @obj: #MixVideoConfigParamsDec object - * @frame_order_mode: pointer to frame re-ordering mode - * @returns: Common Video Error Return Codes - * - * Get frame order mode. - */ -MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode( - MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode); - -/** - * mix_videoconfigparamsdec_set_header: - * @obj: #MixVideoConfigParamsDec object - * @header: Stream header information, such as codec_data in GStreamer pipelines - * @returns: Common Video Error Return Codes - * - * Set stream header information. - */ -MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj, - MixIOVec *header); - -/** - * mix_videoconfigparamsdec_get_header: - * @obj: #MixVideoConfigParamsDec object - * @header: Pointer to pointer of Stream header information - * @returns: Common Video Error Return Codes - * - * Get stream header information. - * - * Caller is responsible to g_free (*header)->data field and *header - * - */ -MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj, - MixIOVec ** header); - -/** - * mix_videoconfigparamsdec_set_mime_type: - * @obj: #MixVideoConfigParamsDec object - * @mime_type: mime type - * @returns: Common Video Error Return Codes - * - * Set stream mime type - */ -MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj, - const char * mime_type); - -/** - * mix_videoconfigparamsdec_get_mime_type: - * @obj: #MixVideoConfigParamsDec object - * @mime_type: Pointer to pointer of type gchar - * @returns: Common Video Error Return Codes - * - * Get mime type - * - * Caller is responsible to g_free *mime_type - * - */ -MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj, - char ** mime_type); - -/** - * mix_videoconfigparamsdec_set_frame_rate: - * @obj: #MixVideoConfigParamsDec object - * @frame_rate_num: Frame rate numerator value - * @frame_rate_denom: Frame rate denominator value * - * @returns: Common Video Error Return Codes - * - * Set frame rate - */ -MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj, - uint frame_rate_num, uint frame_rate_denom); - -/** - * mix_videoconfigparamsdec_get_frame_rate: - * @obj: #MixVideoConfigParamsDec object - * @frame_rate_num: Frame rate numerator value to be returned - * @frame_rate_denom: Frame rate denominator value to be returned - * @returns: Common Video Error Return Codes - * - * Get frame rate - */ -MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj, - uint * frame_rate_num, uint * frame_rate_denom); - -/** - * mix_videoconfigparamsdec_set_picture_res: - * @obj: #MixVideoConfigParamsDec object - * @picture_width: Picture width - * @picture_height: Picture height - * @returns: Common Video Error Return Codes - * - * Set video resolution - */ -MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj, - uint picture_width, uint picture_height); - -/** - * mix_videoconfigparamsdec_get_picture_res: - * @obj: #MixVideoConfigParamsDec object - * @picture_width: Picture width to be returned - * @picture_height: Picture height to be returned - * @returns: Common Video Error Return Codes - * - * Get video resolution - */ -MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj, - uint * picture_width, uint * picture_height); - -/** - * mix_videoconfigparamsdec_set_raw_format: - * @obj: #MixVideoConfigParamsDec object - * @raw_format: Render target format - * @returns: Common Video Error Return Codes - * - * Set Render target format - */ -MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj, - uint raw_format); - -/** - * mix_videoconfigparamsdec_get_raw_format: - * @obj: #MixVideoConfigParamsDec object - * @raw_format: Render target format to be returned - * @returns: Common Video Error Return Codes - * - * Get Render target format - */ -MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj, - uint *raw_format); - -/** - * mix_videoconfigparamsdec_set_rate_control: - * @obj: #MixVideoConfigParamsDec object - * @rate_control: Rate control: CBR, VBR, none. Only valid for encoding. - * This should be set to none for decoding. - * @returns: Common Video Error Return Codes - * - * Set rate control - */ -MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj, - uint rate_control); - -/** - * mix_videoconfigparamsdec_get_rate_control: - * @obj: #MixVideoConfigParamsDec object - * @rate_control: Rate control to be returned - * @returns: Common Video Error Return Codes - * - * Get rate control - */ -MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj, - uint *rate_control); - -/** - * mix_videoconfigparamsdec_set_buffer_pool_size: - * @obj: #MixVideoConfigParamsDec object - * @bufpoolsize: Size of pool of #MixBuffers to allocate - * @returns: Common Video Error Return Codes - * - * Set buffer pool size - */ -MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size(MixVideoConfigParamsDec * obj, - uint bufpoolsize); - -/** - * mix_videoconfigparamsdec_get_buffer_pool_size: - * @obj: #MixVideoConfigParamsDec object - * @bufpoolsize: Size of pool of #MixBuffers to be returned - * @returns: Common Video Error Return Codes - * - * Get buffer pool size - */ -MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size(MixVideoConfigParamsDec * obj, - uint *bufpoolsize); - -/** - * mix_videoconfigparamsdec_set_extra_surface_allocation: - * @obj: #MixVideoConfigParamsDec object - * @extra_surface_allocation: Extra surfaces for #MixVideoFrame objects to be allocated - * @returns: Common Video Error Return Codes - * - * Set extra surface allocation - */ -MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(MixVideoConfigParamsDec * obj, - uint extra_surface_allocation); - -/** - * mix_videoconfigparamsdec_get_extra_surface_allocation: - * @obj: #MixVideoConfigParamsDec object - * @extra_surface_allocation: Extra surfaces for #MixVideoFrame objects to be retuned - * @returns: Common Video Error Return Codes - * - * Get extra surface allocation - */ -MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigParamsDec * obj, - uint *extra_surface_allocation); - - -/** - * mix_videoconfigparamsdec_set_video_range: - * @obj: #MixVideoConfigParamsDec object - * @video_range: 1 for full video range, 0 for short video range. - * @returns: Common Video Error Return Codes - * - * Set video range - */ -MIX_RESULT mix_videoconfigparamsdec_set_video_range(MixVideoConfigParamsDec * obj, - uint8 video_range); - -/** - * mix_videoconfigparamsdec_get_video_range: - * @obj: #MixVideoConfigParamsDec object - * @video_range: video range to be returned - * @returns: Common Video Error Return Codes - * - * Get video range - */ -MIX_RESULT mix_videoconfigparamsdec_get_video_range(MixVideoConfigParamsDec * obj, - uint8 *video_range); - - -/** - * mix_videoconfigparamsdec_set_color_matrix: - * @obj: #MixVideoConfigParamsDec object - * @color_matrix: BT601 or BT709, defined in va.h. 0 for any other including unspecified color matrix. - * @returns: Common Video Error Return Codes - * - * Set color matrix - */ -MIX_RESULT mix_videoconfigparamsdec_set_color_matrix(MixVideoConfigParamsDec * obj, - uint8 color_matrix); - -/** - * mix_videoconfigparamsdec_get_color_matrix: - * @obj: #MixVideoConfigParamsDec object - * @color_matrix: color matrix to be returned - * @returns: Common Video Error Return Codes - * - * Get color matrix - */ -MIX_RESULT mix_videoconfigparamsdec_get_color_matrix(MixVideoConfigParamsDec * obj, - uint8 *color_matrix); - - -/** - * mix_videoconfigparamsdec_set_bit_rate: - * @obj: #MixVideoConfigParamsDec object - * @bit_rate: bit rate in bit per second. - * @returns: Common Video Error Return Codes - * - * Set bit rate - */ -MIX_RESULT mix_videoconfigparamsdec_set_bit_rate(MixVideoConfigParamsDec * obj, - uint bit_rate); - -/** - * mix_videoconfigparamsdec_get_bit_rate: - * @obj: #MixVideoConfigParamsDec object - * @bit_rate: bit rate to be returned - * @returns: Common Video Error Return Codes - * - * Get bit rate - */ -MIX_RESULT mix_videoconfigparamsdec_get_bit_rate(MixVideoConfigParamsDec * obj, - uint *bit_rate); - - - -/** - * mix_videoconfigparamsdec_set_pixel_aspect_ratio: - * @obj: #MixVideoConfigParamsDec object - * @par_num: Pixel aspect ratio numerator value - * @par_denom: Pixel aspect ratio denominator value * - * @returns: Common Video Error Return Codes - * - * Set pixel aspect ratio - */ -MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio(MixVideoConfigParamsDec * obj, - uint par_num, uint par_denom); - -/** - * mix_videoconfigparamsdec_get_pixel_aspect_ratio: - * @obj: #MixVideoConfigParamsDec object - * @par_num: Pixel aspect ratio numerator value to be returned - * @par_denom: Pixel aspect ratio denominator value to be returned - * @returns: Common Video Error Return Codes - * - * Get pixel aspect ratio - */ -MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio(MixVideoConfigParamsDec * obj, - uint * par_num, uint * par_denom); - -/** - * mix_videoconfigparamsdec_set_cropping_info: - * @obj: #MixVideoConfigParamsDec object - * @crop_left: left cropping value - * @crop_right: right cropping value - * @crop_top: top cropping value - * @crop_bottom: bottom cropping value - * @returns: Common Video Error Return Codes - * - * Set cropping information - */ -MIX_RESULT mix_videoconfigparamsdec_set_cropping_info(MixVideoConfigParamsDec * obj, - uint crop_left, uint crop_right, uint crop_top, uint crop_bottom); - -/** - * mix_videoconfigparamsdec_get_cropping_info: - * @obj: #MixVideoConfigParamsDec object - * @crop_left: left cropping value - * @crop_right: right cropping value - * @crop_top: top cropping value - * @crop_bottom: bottom cropping value - * @returns: Common Video Error Return Codes - * - * Get cropping information - */ -MIX_RESULT mix_videoconfigparamsdec_get_cropping_info(MixVideoConfigParamsDec * obj, - uint *crop_left, uint *crop_right, uint *crop_top, uint *crop_bottom); - - -/** - * mix_videoconfigparamsdec_set_error_concealment: - * @obj: #MixVideoConfigParamsDec object - * @error_concealment: A flag to indicate whether error concealment is enabled for decoder - * @returns: Common Video Error Return Codes - * - * Set the flag that indicates whether error concealment is enabled - */ -MIX_RESULT mix_videoconfigparamsdec_set_error_concealment (MixVideoConfigParamsDec * obj, - bool error_concealment); - -/** - * mix_videoconfigparamsdec_get_error_concealment: - * @obj: #MixVideoConfigParamsDec object - * @error_concealment: the flag to be returned that indicates error concealment is enabled for decoder - * @returns: Common Video Error Return Codes - * - * Get the flag that indicates whether error concealment is enabled - */ -MIX_RESULT mix_videoconfigparamsdec_get_error_concealment(MixVideoConfigParamsDec * obj, - bool *error_concealment); - - -/* TODO: Add getters and setters for other properties */ - -#endif /* __MIX_VIDEOCONFIGPARAMSDEC_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.cpp b/mix_video/src/mixvideoconfigparamsdec_h264.cpp deleted file mode 100644 index bfbcacc..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_h264.cpp +++ /dev/null @@ -1,105 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsdec_h264 - * @short_description: MI-X Video H.264 Decode Configuration Parameter - * - * MI-X video H.264 decode configuration parameter objects. - */ - -#include "mixvideoconfigparamsdec_h264.h" - -MixVideoConfigParamsDecH264::MixVideoConfigParamsDecH264() - :va_setup_flag(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} -MixVideoConfigParamsDecH264::~MixVideoConfigParamsDecH264() { -} - -bool MixVideoConfigParamsDecH264::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParamsDecH264 * this_target = MIX_VIDEOCONFIGPARAMSDEC_H264(target); - - this_target->va_setup_flag = this->va_setup_flag; - if (NULL != this_target) - ret = MixVideoConfigParamsDec::copy(target); - return ret; -} - -bool MixVideoConfigParamsDecH264::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParamsDecH264 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264(obj); - - if (this->va_setup_flag != this_obj->va_setup_flag) { - goto not_equal; - } - - ret = TRUE; - -not_equal: - - if (ret != TRUE) { - return ret; - } - if (NULL != this_obj) - ret = MixVideoConfigParamsDec::equal(this_obj); - return ret; -} - -MixParams* MixVideoConfigParamsDecH264::dup() const { - MixParams *ret = new MixVideoConfigParamsDecH264(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -MixVideoConfigParamsDecH264 * -mix_videoconfigparamsdec_h264_new (void) { - return new MixVideoConfigParamsDecH264(); -} - -MixVideoConfigParamsDecH264* -mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -#define MIX_VIDEOCONFIGPARAMSDEC_H264_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSDEC_H264_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsdec_h264_set_va_setup_flag (MixVideoConfigParamsDecH264 * obj, - bool va_setup_flag) { - - MIX_VIDEOCONFIGPARAMSDEC_H264_SETTER_CHECK_INPUT (obj); - obj->va_setup_flag = va_setup_flag; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_va_setup_flag (MixVideoConfigParamsDecH264 * obj, - bool *va_setup_flag) { - - MIX_VIDEOCONFIGPARAMSDEC_H264_GETTER_CHECK_INPUT (obj, va_setup_flag); - *va_setup_flag = obj->va_setup_flag; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h deleted file mode 100644 index e461765..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_h264.h +++ /dev/null @@ -1,121 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ -#define __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ - -#include "mixvideoconfigparamsdec.h" -#include "mixvideodef.h" - -/** -* MIX_VIDEOCONFIGPARAMSDEC_H264: -* @obj: object to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSDEC_H264(obj) (reinterpret_cast(obj)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSDEC_H264: -* @obj: an object. -* -* Checks if the given object is an instance of #MixVideoConfigParamsDecH264 -*/ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_H264(obj)) ? TRUE : FALSE) - - -/** -* MixVideoConfigParamsDecH264: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoConfigParamsDecH264 : public MixVideoConfigParamsDec -{ -public: - MixVideoConfigParamsDecH264(); - ~MixVideoConfigParamsDecH264(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -public: - /*< public > */ - - /* TODO: Add H.264 configuration paramters */ - bool va_setup_flag; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - - - -/** -* mix_videoconfigparamsdec_h264_get_type: -* @returns: type -* -* Get the type of object. -*/ -//GType mix_videoconfigparamsdec_h264_get_type (void); - -/** -* mix_videoconfigparamsdec_h264_new: -* @returns: A newly allocated instance of #MixVideoConfigParamsDecH264 -* -* Use this method to create new instance of #MixVideoConfigParamsDecH264 -*/ -MixVideoConfigParamsDecH264 *mix_videoconfigparamsdec_h264_new (void); -/** -* mix_videoconfigparamsdec_h264_ref: -* @mix: object to add reference -* @returns: #the MixVideoConfigParamsDecH264 instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoConfigParamsDecH264 -* mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix); - -/** -* mix_videoconfigparamsdec_h264_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videoconfigparamsdec_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/** - * mix_videoconfigparamsenc_h264_set_va_setup_flag: - * @obj: #MixVideoConfigParamsDecH264 object - * @va_setup_flag: The flag to enable/disable setup va directly - * @returns: Common Video Error Return Codes - * - * Set the The flag to enable/disable setup va directly - */ -MIX_RESULT mix_videoconfigparamsdec_h264_set_va_setup_flag (MixVideoConfigParamsDecH264 * obj, - bool va_setup_flag); - -/** - * mix_videoconfigparamsenc_h264_get_va_setup_flag: - * @obj: #MixVideoConfigParamsDecH264 object - * @va_setup_flag: The flag to enable/disable setup va directly - * @returns: Common Video Error Return Codes - * - * Get the The flag to enable/disable setup va directly - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_va_setup_flag (MixVideoConfigParamsDecH264 * obj, - bool *va_setup_flag); - -#endif /* __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.cpp b/mix_video/src/mixvideoconfigparamsdec_mp42.cpp deleted file mode 100644 index 98c564d..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.cpp +++ /dev/null @@ -1,112 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsdec_mp42 - * @short_description: MI-X Video MPEG 4:2 Decode Configuration Parameter - * - * MI-X video MPEG 4:2 decode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsdec_mp42.h" - -#define MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \ - -MixVideoConfigParamsDecMP42::MixVideoConfigParamsDecMP42() - :mpegversion(0) - ,divxversion(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} - -MixVideoConfigParamsDecMP42::~MixVideoConfigParamsDecMP42() { -} - -bool MixVideoConfigParamsDecMP42::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParamsDecMP42 * this_target = MIX_VIDEOCONFIGPARAMSDEC_MP42(target); - if (NULL != this_target) { - this_target->mpegversion = this->mpegversion; - this_target->divxversion = this->divxversion; - ret = MixVideoConfigParamsDec::copy(target); - } - return ret; -} - -bool MixVideoConfigParamsDecMP42::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParamsDecMP42 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_MP42(obj); - if (NULL != this_obj) - ret = MixVideoConfigParamsDec::equal(this_obj); - return ret; -} - -MixParams* MixVideoConfigParamsDecMP42::dup() const { - MixParams *ret = new MixVideoConfigParamsDecMP42(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - - -MixVideoConfigParamsDecMP42 * -mix_videoconfigparamsdec_mp42_new(void) { - return new MixVideoConfigParamsDecMP42(); -} - -MixVideoConfigParamsDecMP42 * -mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -/* TODO: Add getters and setters for properties if any */ -MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( - MixVideoConfigParamsDecMP42 *obj, uint version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); - obj->mpegversion = version; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( - MixVideoConfigParamsDecMP42 *obj, uint *version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); - *version = obj->mpegversion; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( - MixVideoConfigParamsDecMP42 *obj, uint version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj); - obj->divxversion = version; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( - MixVideoConfigParamsDecMP42 *obj, uint *version) { - MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version); - *version = obj->divxversion; - return MIX_RESULT_SUCCESS; - -} - diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h deleted file mode 100644 index 5969079..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_mp42.h +++ /dev/null @@ -1,148 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ -#define __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ - -#include "mixvideoconfigparamsdec.h" -#include "mixvideodef.h" - -/** - * MIX_VIDEOCONFIGPARAMSDEC_MP42: - * @obj: object to be type-casted. - */ -#define MIX_VIDEOCONFIGPARAMSDEC_MP42(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEOCONFIGPARAMSDEC_MP42: - * @obj: an object. - * - * Checks if the given object is an instance of #MixVideoConfigParamsDecMP42 - */ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_MP42(obj)) ? TRUE : FALSE) - - -/** - * MixVideoConfigParamsDecMP42: - * - * MI-X VideoConfig Parameter object - */ -class MixVideoConfigParamsDecMP42 : public MixVideoConfigParamsDec { -public: - MixVideoConfigParamsDecMP42(); - ~MixVideoConfigParamsDecMP42(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -public: - /*< public > */ - - /* MPEG version */ - uint mpegversion; - - /* DivX version */ - uint divxversion; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - -/** - * mix_videoconfigparamsdec_mp42_get_type: - * @returns: type - * - * Get the type of object. - */ -//GType mix_videoconfigparamsdec_mp42_get_type(void); - -/** - * mix_videoconfigparamsdec_mp42_new: - * @returns: A newly allocated instance of #MixVideoConfigParamsDecMP42 - * - * Use this method to create new instance of #MixVideoConfigParamsDecMP42 - */ -MixVideoConfigParamsDecMP42 *mix_videoconfigparamsdec_mp42_new(void); -/** - * mix_videoconfigparamsdec_mp42_ref: - * @mix: object to add reference - * @returns: the #MixVideoConfigParamsDecMP42 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoConfigParamsDecMP42 -* mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix); - -/** - * mix_videoconfigparamsdec_mp42_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videoconfigparamsdec_mp42_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for other properties */ - - -/** - * mix_videoconfigparamsdec_mp42_set_mpegversion: - * @obj: #MixVideoConfigParamsDecMP42 object - * @version: MPEG version - * @returns: Common Video Error Return Codes - * - * Set MPEG version - */ -MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion( - MixVideoConfigParamsDecMP42 *obj, uint version); - -/** - * mix_videoconfigparamsdec_mp42_get_mpegversion: - * @obj: #MixVideoConfigParamsDecMP42 object - * @version: MPEG version to be returned - * @returns: Common Video Error Return Codes - * - * Get MPEG version - */ -MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion( - MixVideoConfigParamsDecMP42 *obj, uint *version); - -/** - * mix_videoconfigparamsdec_mp42_set_divxversion: - * @obj: #MixVideoConfigParamsDecMP42 object - * @version: DivX version - * @returns: Common Video Error Return Codes - * - * Set DivX version - */ -MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion( - MixVideoConfigParamsDecMP42 *obj, uint version); - -/** - * mix_videoconfigparamsdec_mp42_set_divxversion: - * @obj: #MixVideoConfigParamsDecMP42 object - * @version: DivX version to be returned - * @returns: Common Video Error Return Codes - * - * Get DivX version - */ -MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion( - MixVideoConfigParamsDecMP42 *obj, uint *version); - - - -#endif /* __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.cpp b/mix_video/src/mixvideoconfigparamsdec_vc1.cpp deleted file mode 100644 index fd2e1c5..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.cpp +++ /dev/null @@ -1,71 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsdec_vc1 - * @short_description: MI-X Video VC-1 Decode Configuration Parameter - * - * MI-X video VC-1 decode configuration parameter objects. - */ - - -#include "mixvideoconfigparamsdec_vc1.h" - -MixVideoConfigParamsDecVC1::MixVideoConfigParamsDecVC1() - :reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} - -MixVideoConfigParamsDecVC1::~MixVideoConfigParamsDecVC1() { -} - -bool MixVideoConfigParamsDecVC1::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParamsDecVC1 * this_target = MIX_VIDEOCONFIGPARAMSDEC_VC1(target); - if (NULL != this_target) { - ret = MixVideoConfigParamsDec::copy(target); - } - return ret; -} - -bool MixVideoConfigParamsDecVC1::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParamsDecVC1 * this_obj = MIX_VIDEOCONFIGPARAMSDEC_VC1(obj); - if (NULL != this_obj) - ret = MixVideoConfigParamsDec::equal(this_obj); - return ret; -} - -MixParams* MixVideoConfigParamsDecVC1::dup() const { - MixParams *ret = new MixVideoConfigParamsDecVC1(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -MixVideoConfigParamsDecVC1 * -mix_videoconfigparamsdec_vc1_new(void) { - return new MixVideoConfigParamsDecVC1(); -} - -MixVideoConfigParamsDecVC1 * -mix_videoconfigparamsdec_vc1_ref( - MixVideoConfigParamsDecVC1 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - -/* TODO: Add getters and setters for properties if any */ diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h deleted file mode 100644 index 1397424..0000000 --- a/mix_video/src/mixvideoconfigparamsdec_vc1.h +++ /dev/null @@ -1,95 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCONFIGPARAMSDEC_VC1_H__ -#define __MIX_VIDEOCONFIGPARAMSDEC_VC1_H__ - -#include "mixvideoconfigparamsdec.h" -#include "mixvideodef.h" - -/** -* MIX_VIDEOCONFIGPARAMSDEC_VC1: -* @obj: object to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSDEC_VC1(obj) (reinterpret_cast(obj)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSDEC_VC1: -* @obj: an object. -* -* Checks if the given object is an instance of #MixVideoConfigParamsDecVC1 -*/ -#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj) ((NULL != MIX_VIDEOCONFIGPARAMSDEC_VC1(obj)) ? TRUE : FALSE) - -/** -* MixVideoConfigParamsDecVC1: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoConfigParamsDecVC1 : public MixVideoConfigParamsDec -{ -public: - MixVideoConfigParamsDecVC1(); - ~MixVideoConfigParamsDecVC1(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -public: - /*< public > */ - - /* TODO: Add VC1 configuration paramters */ - /* TODO: wmv_version and fourcc type might be changed later */ - - /* WMV version */ - uint wmv_version; - - /* FourCC code */ - uint fourcc; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - -/** -* mix_videoconfigparamsdec_vc1_new: -* @returns: A newly allocated instance of #MixVideoConfigParamsDecVC1 -* -* Use this method to create new instance of #MixVideoConfigParamsDecVC1 -*/ -MixVideoConfigParamsDecVC1 *mix_videoconfigparamsdec_vc1_new (void); -/** -* mix_videoconfigparamsdec_vc1_ref: -* @mix: object to add reference -* @returns: the #MixVideoConfigParamsDecVC1 instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoConfigParamsDecVC1 -* mix_videoconfigparamsdec_vc1_ref (MixVideoConfigParamsDecVC1 * mix); - -/** -* mix_videoconfigparamsdec_vc1_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videoconfigparamsdec_vc1_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for other properties */ -#endif /* __MIX_VIDEOCONFIGPARAMSDECDEC_VC1_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc.cpp b/mix_video/src/mixvideoconfigparamsenc.cpp deleted file mode 100644 index 67a0c5f..0000000 --- a/mix_video/src/mixvideoconfigparamsenc.cpp +++ /dev/null @@ -1,883 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoconfigparamsenc - * @short_description: MI-X Video Encode Configuration Parameter Base Object - * - * A base object of MI-X video encode configuration parameter objects. - */ - - -#include -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc.h" -#include - -#define MDEBUG - -MixVideoConfigParamsEnc::MixVideoConfigParamsEnc() - :profile(MIX_PROFILE_H264BASELINE) - ,level(40) - ,raw_format(MIX_RAW_TARGET_FORMAT_YUV420) - ,rate_control(MIX_RATE_CONTROL_NONE) - ,bitrate(0) - ,frame_rate_num(30) - ,frame_rate_denom(1) - ,initial_qp(15) - ,min_qp(0) - ,target_percentage(95) - ,window_size(500) - ,intra_period(30) - ,picture_width(0) - ,picture_height(0) - ,mime_type(NULL) - ,encode_format(MIX_ENCODE_TARGET_FORMAT_MPEG4) - ,mixbuffer_pool_size(0) - ,share_buf_mode(FALSE) - ,ci_frame_id(NULL) - ,ci_frame_num(0) - ,CIR_frame_cnt(15) - ,max_slice_size(0)/*Set to 0 means it won't take effect*/ - ,refresh_type(MIX_VIDEO_NONIR) - ,buffer_mode(MIX_BUFFER_SELF_ALLOC_SURFACE) - ,buf_info(NULL) - ,need_display(TRUE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { - air_params.air_MBs = 0; - air_params.air_threshold = 0; - air_params.air_auto = 0; -} - -MixVideoConfigParamsEnc::~MixVideoConfigParamsEnc() { - /* free mime_type */ - if (mime_type) - free(mime_type); - - if (ci_frame_id) - delete[] ci_frame_id; - - if (buffer_mode == MIX_BUFFER_UPSTREAM_ALLOC_CI) { - MixCISharedBufferInfo * ci_tmp = NULL; - if (buf_info) { - ci_tmp = (MixCISharedBufferInfo *) buf_info;; - if (ci_tmp->ci_frame_id) { - delete [] ci_tmp->ci_frame_id; - ci_tmp->ci_frame_id = NULL; - } - - delete ci_tmp; - ci_tmp = NULL; - buf_info = NULL; - } - } -} - -bool MixVideoConfigParamsEnc::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParamsEnc *this_target = MIX_VIDEOCONFIGPARAMSENC(target); - MIX_RESULT mix_result = MIX_RESULT_FAIL; - - LOG_V( "Begin\n"); - - if (NULL != this_target) { - /* copy properties of primitive type */ - this_target->bitrate = bitrate; - this_target->frame_rate_num = frame_rate_num; - this_target->frame_rate_denom = frame_rate_denom; - this_target->initial_qp = initial_qp; - this_target->min_qp = min_qp; - this_target->target_percentage = target_percentage; - this_target->window_size = window_size; - this_target->max_slice_size = max_slice_size; - this_target->intra_period = intra_period; - this_target->picture_width = picture_width; - this_target->picture_height = picture_height; - this_target->mixbuffer_pool_size = mixbuffer_pool_size; - this_target->share_buf_mode = share_buf_mode; - this_target->encode_format = encode_format; - this_target->ci_frame_num = ci_frame_num; - this_target->draw= draw; - this_target->need_display = need_display; - this_target->rate_control = rate_control; - this_target->raw_format = raw_format; - this_target->profile = profile; - this_target->level = level; - this_target->CIR_frame_cnt = CIR_frame_cnt; - this_target->refresh_type = refresh_type; - this_target->air_params.air_MBs = air_params.air_MBs; - this_target->air_params.air_threshold = air_params.air_threshold; - this_target->air_params.air_auto = air_params.air_auto; - this_target->buffer_mode = buffer_mode; - - /* copy properties of non-primitive */ - /* copy mime_type */ - if (mime_type) { -#ifdef MDEBUG - LOG_I( "mime_type = %s %x\n", mime_type, (unsigned int)mime_type); -#endif - mix_result = mix_videoconfigparamsenc_set_mime_type( - this_target,mime_type); - } else { - LOG_I( "mime_type = NULL\n"); - mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, NULL); - } - - if (mix_result != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n"); - return FALSE; - } - - mix_result = mix_videoconfigparamsenc_set_ci_frame_info ( - this_target, ci_frame_id, ci_frame_num); - mix_result = mix_videoconfigparamsenc_set_upstream_buffer_info ( - this_target, this_target->buffer_mode, buf_info); - - /* TODO: copy other properties if there's any */ - /* Now chainup base class */ - ret = MixVideoConfigParams::copy(target); - } - - return ret; -} - -bool MixVideoConfigParamsEnc::equal(MixParams* obj) const { - bool ret = TRUE; - MixVideoConfigParamsEnc *this_obj = MIX_VIDEOCONFIGPARAMSENC(obj); - if (NULL != this_obj) { - /* check the equalitiy of the primitive type properties */ - if (bitrate != this_obj->bitrate) { - goto not_equal; - } - - if (frame_rate_num != this_obj->frame_rate_num) { - goto not_equal; - } - - if (frame_rate_denom != this_obj->frame_rate_denom) { - goto not_equal; - } - - if (initial_qp != this_obj->initial_qp) { - goto not_equal; - } - - if (min_qp != this_obj->min_qp) { - goto not_equal; - } - - if (target_percentage != this_obj->target_percentage) { - goto not_equal; - } - - if (window_size != this_obj->window_size) { - goto not_equal; - } - - if (max_slice_size != this_obj->max_slice_size) { - goto not_equal; - } - - if (intra_period != this_obj->intra_period) { - goto not_equal; - } - - if (picture_width != this_obj->picture_width && - picture_height != this_obj->picture_height) { - goto not_equal; - } - - if (encode_format != this_obj->encode_format) { - goto not_equal; - } - - if (mixbuffer_pool_size != this_obj->mixbuffer_pool_size) { - goto not_equal; - } - - if (share_buf_mode != this_obj->share_buf_mode) { - goto not_equal; - } - - if (ci_frame_id != this_obj->ci_frame_id) { - goto not_equal; - } - - if (ci_frame_num != this_obj->ci_frame_num) { - goto not_equal; - } - - if (draw != this_obj->draw) { - goto not_equal; - } - - if (need_display!= this_obj->need_display) { - goto not_equal; - } - - if (rate_control != this_obj->rate_control) { - goto not_equal; - } - - if (raw_format != this_obj->raw_format) { - goto not_equal; - } - - if (profile != this_obj->profile) { - goto not_equal; - } - - if (level != this_obj->level) { - goto not_equal; - } - - if (CIR_frame_cnt != this_obj->CIR_frame_cnt) { - goto not_equal; - } - - if (refresh_type != this_obj->refresh_type) { - goto not_equal; - } - - if (air_params.air_MBs != this_obj->air_params.air_MBs) { - goto not_equal; - } - - if (air_params.air_threshold != this_obj->air_params.air_threshold) { - goto not_equal; - } - - if (air_params.air_auto != this_obj->air_params.air_auto) { - goto not_equal; - } - - if (buffer_mode != this_obj->buffer_mode) { - goto not_equal; - } - - /* check the equalitiy of the none-primitive type properties */ - - /* compare mime_type */ - if (mime_type && this_obj->mime_type) { - if (strcmp(mime_type, this_obj->mime_type) != 0) { - goto not_equal; - } - } else if (!(!mime_type && !this_obj->mime_type)) { - goto not_equal; - } - - /* - * TODO: Check the data inside data info - */ - ret = TRUE; - -not_equal: - - if (ret != TRUE) { - return ret; - } - - /* chaining up. */ - ret = MixVideoConfigParams::equal(obj); - - } - return ret; - -} - -MixParams* MixVideoConfigParamsEnc::dup() const { - MixParams *ret = new MixVideoConfigParamsEnc(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - - -MixVideoConfigParamsEnc * -mix_videoconfigparamsenc_new(void) { - return new MixVideoConfigParamsEnc(); -} - -MixVideoConfigParamsEnc * -mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - -#define MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \ - if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \ - -/* TODO: Add getters and setters for other properties. The following is incomplete */ - - -MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj, - const char * mime_type) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - if (!mime_type) { - return MIX_RESULT_NULL_PTR; - } - - LOG_I( "mime_type = %s %x\n", - mime_type, (unsigned int)mime_type); - - if (obj->mime_type) { - free(obj->mime_type); - obj->mime_type = NULL; - } - - - LOG_I( "mime_type = %s %x\n", - mime_type, (unsigned int)mime_type); - - obj->mime_type = strdup(mime_type); - if (!obj->mime_type) { - return MIX_RESULT_NO_MEMORY; - } - - - LOG_I( "mime_type = %s obj->mime_type = %s\n", - mime_type, obj->mime_type); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj, - char ** mime_type) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, mime_type); - - if (!obj->mime_type) { - *mime_type = NULL; - return MIX_RESULT_SUCCESS; - } - *mime_type = strdup(obj->mime_type); - if (!*mime_type) { - return MIX_RESULT_NO_MEMORY; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj, - uint frame_rate_num, uint frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->frame_rate_num = frame_rate_num; - obj->frame_rate_denom = frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj, - uint * frame_rate_num, uint * frame_rate_denom) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom); - *frame_rate_num = obj->frame_rate_num; - *frame_rate_denom = obj->frame_rate_denom; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj, - uint picture_width, uint picture_height) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->picture_width = picture_width; - obj->picture_height = picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj, - uint * picture_width, uint * picture_height) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height); - *picture_width = obj->picture_width; - *picture_height = obj->picture_height; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_encode_format(MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat encode_format) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->encode_format = encode_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj, - MixEncodeTargetFormat* encode_format) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, encode_format); - *encode_format = obj->encode_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj, - uint bitrate) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->bitrate= bitrate; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj, - uint *bitrate) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bitrate); - *bitrate = obj->bitrate; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj, - uint initial_qp) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->initial_qp = initial_qp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj, - uint *initial_qp) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, initial_qp); - *initial_qp = obj->initial_qp; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj, - uint min_qp) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->min_qp = min_qp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj, - uint *min_qp) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, min_qp); - *min_qp = obj->min_qp; - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj, - uint target_percentage) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->target_percentage = target_percentage; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj, - uint *target_percentage) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, target_percentage); - *target_percentage = obj->target_percentage; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj, - uint window_size) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->window_size = window_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj, - uint *window_size) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, window_size); - *window_size = obj->window_size; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj, - uint intra_period) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->intra_period = intra_period; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj, - uint *intra_period) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, intra_period); - *intra_period = obj->intra_period; - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size( - MixVideoConfigParamsEnc * obj, uint bufpoolsize) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - obj->mixbuffer_pool_size = bufpoolsize; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size( - MixVideoConfigParamsEnc * obj, uint *bufpoolsize) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bufpoolsize); - *bufpoolsize = obj->mixbuffer_pool_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode ( - MixVideoConfigParamsEnc * obj, bool share_buf_mod) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - obj->share_buf_mode = share_buf_mod; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj, - bool *share_buf_mod) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, share_buf_mod); - - *share_buf_mod = obj->share_buf_mode; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info( - MixVideoConfigParamsEnc * obj, ulong * ci_frame_id, uint ci_frame_num) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - if (!ci_frame_id || !ci_frame_num) { - obj->ci_frame_id = NULL; - obj->ci_frame_num = 0; - return MIX_RESULT_SUCCESS; - } - - if (obj->ci_frame_id) - delete [] obj->ci_frame_id; - - uint size = ci_frame_num * sizeof (ulong); - obj->ci_frame_num = ci_frame_num; - - obj->ci_frame_id = new ulong[ci_frame_num]; - if (!(obj->ci_frame_id)) { - return MIX_RESULT_NO_MEMORY; - } - - memcpy (obj->ci_frame_id, ci_frame_id, size); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj, - ulong * *ci_frame_id, uint *ci_frame_num) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, ci_frame_id, ci_frame_num); - - *ci_frame_num = obj->ci_frame_num; - - if (!obj->ci_frame_id) { - *ci_frame_id = NULL; - return MIX_RESULT_SUCCESS; - } - - if (obj->ci_frame_num) { - *ci_frame_id = new ulong[obj->ci_frame_num]; - - if (!*ci_frame_id) { - return MIX_RESULT_NO_MEMORY; - } - - memcpy (*ci_frame_id, obj->ci_frame_id, obj->ci_frame_num * sizeof (ulong)); - - } else { - *ci_frame_id = NULL; - } - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, - ulong draw) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->draw = draw; - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj, - ulong *draw) { - - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, draw); - *draw = obj->draw; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_need_display ( - MixVideoConfigParamsEnc * obj, bool need_display) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - obj->need_display = need_display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj, - bool *need_display) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, need_display); - - *need_display = obj->need_display; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl rate_control) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->rate_control = rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj, - MixRateControl * rate_control) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, rate_control); - *rate_control = obj->rate_control; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat raw_format) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->raw_format = raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj, - MixRawTargetFormat * raw_format) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, raw_format); - *raw_format = obj->raw_format; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj, - MixProfile profile) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->profile = profile; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj, - MixProfile * profile) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, profile); - *profile = obj->profile; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, - uint8 level) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->level = level; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, - uint8 * level) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, level); - *level = obj->level; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - uint CIR_frame_cnt) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->CIR_frame_cnt = CIR_frame_cnt; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, - uint * CIR_frame_cnt) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, CIR_frame_cnt); - *CIR_frame_cnt = obj->CIR_frame_cnt; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj, - uint max_slice_size) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->max_slice_size = max_slice_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj, - uint * max_slice_size) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, max_slice_size); - *max_slice_size = obj->max_slice_size; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_set_refresh_type(MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType refresh_type) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->refresh_type = refresh_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj, - MixVideoIntraRefreshType * refresh_type) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, refresh_type); - *refresh_type = obj->refresh_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams air_params) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->air_params.air_MBs = air_params.air_MBs; - obj->air_params.air_threshold = air_params.air_threshold; - obj->air_params.air_auto = air_params.air_auto; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj, - MixAIRParams * air_params) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, air_params); - air_params->air_MBs = obj->air_params.air_MBs; - air_params->air_threshold = obj->air_params.air_threshold; - air_params->air_auto = obj->air_params.air_auto; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_set_buffer_mode (MixVideoConfigParamsEnc * obj, - MixBufferAllocationMode buffer_mode) { - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - obj->buffer_mode = buffer_mode; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_buffer_mode (MixVideoConfigParamsEnc * obj, - MixBufferAllocationMode * buffer_mode) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, buffer_mode); - *buffer_mode = obj->buffer_mode; - return MIX_RESULT_SUCCESS; -} - -/* -* Currently we use void* for buf_info, and will change to use union later which has been defined in mixvideodef.h -*/ -MIX_RESULT mix_videoconfigparamsenc_set_upstream_buffer_info (MixVideoConfigParamsEnc * obj, - MixBufferAllocationMode buffer_mode, void * buf_info) { - - MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj); - - if (!buf_info) { - return MIX_RESULT_NULL_PTR; - } - - switch (buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - MixCISharedBufferInfo * ci_tmp = NULL; - MixCISharedBufferInfo * ci_info_in = (MixCISharedBufferInfo *) buf_info; - - if (obj->buf_info) { - ci_tmp = (MixCISharedBufferInfo *) obj->buf_info;; - if (ci_tmp->ci_frame_id) { - delete [] ci_tmp->ci_frame_id; - ci_tmp->ci_frame_id = NULL; - } - - delete ci_tmp; - ci_tmp = NULL; - obj->buf_info = NULL; - } - - ci_tmp = new MixCISharedBufferInfo; - if (!ci_tmp) { - return MIX_RESULT_NO_MEMORY; - } - - ci_tmp->ci_frame_cnt = ci_info_in->ci_frame_cnt; - ci_tmp->ci_frame_id = NULL; - - ci_tmp->ci_frame_id = new ulong[ci_tmp->ci_frame_cnt]; - if (!ci_tmp->ci_frame_id) { - return MIX_RESULT_NO_MEMORY; - } - - memcpy (ci_tmp->ci_frame_id, ci_info_in->ci_frame_id, ci_tmp->ci_frame_cnt * sizeof (ulong)); - obj->buf_info = (void *) ci_tmp; - } - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - break; - default: - return MIX_RESULT_FAIL; //FIXEME - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_get_upstream_buffer_info (MixVideoConfigParamsEnc * obj, - MixBufferAllocationMode buffer_mode, void ** buf_info) { - MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, buf_info); - - switch (buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - MixCISharedBufferInfo * ci_tmp = (MixCISharedBufferInfo *) (obj->buf_info); - MixCISharedBufferInfo * ci_info_out = NULL; - - if (!ci_tmp) { - return MIX_RESULT_NULL_PTR; - } - - if (!(ci_tmp->ci_frame_id) || !(ci_tmp->ci_frame_cnt)) { - return MIX_RESULT_NULL_PTR; - } - - ci_info_out = new MixCISharedBufferInfo; - if (!ci_info_out) { - return MIX_RESULT_NO_MEMORY; - } - - ci_info_out->ci_frame_cnt = ci_tmp->ci_frame_cnt; - ci_info_out->ci_frame_id = NULL; - - ci_info_out->ci_frame_id = new ulong[ci_info_out->ci_frame_cnt]; - if (!ci_info_out->ci_frame_id) { - return MIX_RESULT_NO_MEMORY; - } - - memcpy (ci_info_out->ci_frame_id, ci_tmp->ci_frame_id, ci_info_out->ci_frame_cnt * sizeof (ulong)); - *buf_info = (MixCISharedBufferInfo *) ci_info_out; - } - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - break; - default: - return MIX_RESULT_FAIL; //FIXME - } - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h deleted file mode 100644 index 00d270d..0000000 --- a/mix_video/src/mixvideoconfigparamsenc.h +++ /dev/null @@ -1,765 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOCONFIGPARAMSENC_H__ -#define __MIX_VIDEOCONFIGPARAMSENC_H__ - -#include -#include "mixvideodef.h" - -/** - * MIX_VIDEOCONFIGPARAMSENC: - * @obj: object to be type-casted. - */ -#define MIX_VIDEOCONFIGPARAMSENC(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEOCONFIGPARAMSENC: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_VIDEOCONFIGPARAMSENC(obj) (NULL != MIX_VIDEOCONFIGPARAMSENC(obj)) - - - -/** - * MixVideoConfigParamsEnc: - * - * MI-X VideoConfig Parameter object - */ -class MixVideoConfigParamsEnc : public MixVideoConfigParams { -public: - MixVideoConfigParamsEnc(); - virtual ~MixVideoConfigParamsEnc(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -public: - /*< public > */ - //MixIOVec header; - - /* the type of the following members will be changed after MIX API doc is ready */ - - /* Encoding profile */ - MixProfile profile; - - uint8 level; - - /* Raw format to be encoded */ - MixRawTargetFormat raw_format; - - /* Rate control mode */ - MixRateControl rate_control; - - /* Bitrate when rate control is used */ - uint bitrate; - - /* Numerator of frame rate */ - uint frame_rate_num; - - /* Denominator of frame rate */ - uint frame_rate_denom; - - /* The initial QP value */ - uint initial_qp; - - /* The minimum QP value */ - uint min_qp; - - /* this is the bit-rate the rate control is targeting, as a percentage of the maximum bit-rate - * for example if target_percentage is 95 then the rate control will target a bit-rate that is - * 95% of the maximum bit-rate - */ - uint target_percentage; - - /* windows size in milliseconds. For example if this is set to 500, then the rate control will guarantee the */ - uint window_size; - - /* Number of frames between key frames (GOP size) */ - uint intra_period; - - /* Width of video frame */ - uint16 picture_width; - - /* Height of the video frame */ - uint16 picture_height; - - /* Mime type, reserved */ - char * mime_type; - - /* Encode target format */ - MixEncodeTargetFormat encode_format; - - /* Size of the pool of MixBuffer objects */ - uint mixbuffer_pool_size; - - /* Are buffers shared between capture and encoding drivers */ - bool share_buf_mode; - - /* Array of frame IDs created by capture library */ - ulong * ci_frame_id; - - /* Size of the array ci_frame_id */ - uint ci_frame_num; - - uint CIR_frame_cnt; - - /* The maximum slice size to be set to video driver (in bits). - * The encoder hardware will try to make sure the single slice does not exceed this size - * If not, mix_video_encode() will report a specific error - */ - uint max_slice_size; - - MixVideoIntraRefreshType refresh_type; - - MixAIRParams air_params; - - MixBufferAllocationMode buffer_mode; - void * buf_info; - - /* < private > */ - ulong draw; - - /*< public > */ - - /* Indicates whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() */ - bool need_display; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - - -/** - * mix_videoconfigparamsenc_new: - * @returns: A newly allocated instance of #MixVideoConfigParamsEnc - * - * Use this method to create new instance of #MixVideoConfigParamsEnc - */ -MixVideoConfigParamsEnc *mix_videoconfigparamsenc_new(void); - -/** - * mix_videoconfigparamsenc_ref: - * @mix: object to add reference - * @returns: the #MixVideoConfigParamsEnc instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoConfigParamsEnc *mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix); - -/** - * mix_videoconfigparamsenc_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videoconfigparamsenc_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/** - * mix_videoconfigparamsenc_set_mime_type: - * @obj: #MixVideoConfigParamsEnc object - * @mime_type: Mime type - * @returns: Common Video Error Return Codes - * - * Set mime type - */ -MIX_RESULT mix_videoconfigparamsenc_set_mime_type( - MixVideoConfigParamsEnc * obj, const char * mime_type); - -/** - * mix_videoconfigparamsenc_get_mime_type: - * @obj: #MixVideoConfigParamsEnc object - * @mime_type: Mime type to be returned - * @returns: Common Video Error Return Codes - * - * Get mime type - * - * - * Caller is responsible to g_free *mime_type - * - */ -MIX_RESULT mix_videoconfigparamsenc_get_mime_type( - MixVideoConfigParamsEnc * obj, char ** mime_type); - - -/** - * mix_videoconfigparamsenc_set_frame_rate: - * @obj: #MixVideoConfigParamsEnc object - * @frame_rate_num: Numerator of frame rate - * @frame_rate_denom: Denominator of frame rate - * @returns: Common Video Error Return Codes - * - * Set frame rate - */ -MIX_RESULT mix_videoconfigparamsenc_set_frame_rate( - MixVideoConfigParamsEnc * obj, uint frame_rate_num, uint frame_rate_denom); - -/** - * mix_videoconfigparamsenc_get_frame_rate: - * @obj: #MixVideoConfigParamsEnc object - * @frame_rate_num: Numerator of frame rate to be returned - * @frame_rate_denom: Denominator of frame rate to be returned - * @returns: Common Video Error Return Codes - * - * Get frame rate - */ -MIX_RESULT mix_videoconfigparamsenc_get_frame_rate( - MixVideoConfigParamsEnc * obj, uint * frame_rate_num, uint * frame_rate_denom); - -/** - * mix_videoconfigparamsenc_set_picture_res: - * @obj: #MixVideoConfigParamsEnc object - * @picture_width: Width of video frame - * @picture_height: Height of the video frame - * @returns: Common Video Error Return Codes - * - * Set width and height of video frame - */ -MIX_RESULT mix_videoconfigparamsenc_set_picture_res( - MixVideoConfigParamsEnc * obj, uint picture_width, uint picture_height); - -/** - * mix_videoconfigparamsenc_get_picture_res: - * @obj: #MixVideoConfigParamsEnc object - * @picture_width: Width of video frame to be returned - * @picture_height: Height of the video frame to be returned - * @returns: Common Video Error Return Codes - * - * Get width and height of video frame - */ -MIX_RESULT mix_videoconfigparamsenc_get_picture_res( - MixVideoConfigParamsEnc * obj, uint * picture_width, uint * picture_height); - -/** - * mix_videoconfigparamsenc_set_encode_format: - * @obj: #MixVideoConfigParamsEnc object - * @encode_format: Encode target format - * @returns: Common Video Error Return Codes - * - * Set Encode target format - */ -MIX_RESULT mix_videoconfigparamsenc_set_encode_format ( - MixVideoConfigParamsEnc * obj, MixEncodeTargetFormat encode_format); - -/** - * mix_videoconfigparamsenc_get_encode_format: - * @obj: #MixVideoConfigParamsEnc object - * @encode_format: Encode target format to be returned - * @returns: Common Video Error Return Codes - * - * Get Encode target format - */ -MIX_RESULT mix_videoconfigparamsenc_get_encode_format ( - MixVideoConfigParamsEnc * obj, MixEncodeTargetFormat * encode_format); - -/** - * mix_videoconfigparamsenc_set_bit_rate: - * @obj: #MixVideoConfigParamsEnc object - * @bps: bitrate - * @returns: Common Video Error Return Codes - * - * Set bitrate - */ -MIX_RESULT mix_videoconfigparamsenc_set_bit_rate ( - MixVideoConfigParamsEnc * obj, uint bps); - -/** - * mix_videoconfigparamsenc_get_bit_rate: - * @obj: #MixVideoConfigParamsEnc object - * @bps: bitrate to be returned - * @returns: Common Video Error Return Codes - * - * Get bitrate - */ -MIX_RESULT mix_videoconfigparamsenc_get_bit_rate ( - MixVideoConfigParamsEnc * obj, uint *bps); - -/** - * mix_videoconfigparamsenc_set_init_qp: - * @obj: #MixVideoConfigParamsEnc object - * @initial_qp: The initial QP value - * @returns: Common Video Error Return Codes - * - * Set The initial QP value - */ -MIX_RESULT mix_videoconfigparamsenc_set_init_qp ( - MixVideoConfigParamsEnc * obj, uint initial_qp); - -/** - * mix_videoconfigparamsenc_get_init_qp: - * @obj: #MixVideoConfigParamsEnc object - * @initial_qp: The initial QP value to be returned - * @returns: Common Video Error Return Codes - * - * Get The initial QP value - */ -MIX_RESULT mix_videoconfigparamsenc_get_init_qp ( - MixVideoConfigParamsEnc * obj, uint *initial_qp); - -/** - * mix_videoconfigparamsenc_set_min_qp: - * @obj: #MixVideoConfigParamsEnc object - * @min_qp: The minimum QP value - * @returns: Common Video Error Return Codes - * - * Set The minimum QP value - */ -MIX_RESULT mix_videoconfigparamsenc_set_min_qp ( - MixVideoConfigParamsEnc * obj, uint min_qp); - -/** - * mix_videoconfigparamsenc_get_min_qp: - * @obj: #MixVideoConfigParamsEnc object - * @min_qp: The minimum QP value to be returned - * @returns: Common Video Error Return Codes - * - * Get The minimum QP value - */ -MIX_RESULT mix_videoconfigparamsenc_get_min_qp( - MixVideoConfigParamsEnc * obj, uint *min_qp); - - -/** - * mix_videoconfigparamsenc_set_target_percentage: - * @obj: #MixVideoConfigParamsEnc object - * @target_percentage: The target percentage value - * @returns: Common Video Error Return Codes - * - * Set The target percentage value - */ -MIX_RESULT mix_videoconfigparamsenc_set_target_percentage ( - MixVideoConfigParamsEnc * obj, uint target_percentage); - -/** - * mix_videoconfigparamsenc_get_target_percentage: - * @obj: #MixVideoConfigParamsEnc object - * @target_percentage: The target percentage value to be returned - * @returns: Common Video Error Return Codes - * - * Get The target percentage value - */ -MIX_RESULT mix_videoconfigparamsenc_get_target_percentage( - MixVideoConfigParamsEnc * obj, uint *target_percentage); - -/** - * mix_videoconfigparamsenc_set_window_size: - * @obj: #MixVideoConfigParamsEnc object - * @window_size: The window size for rate control - * @returns: Common Video Error Return Codes - * - * Set The window size value - */ -MIX_RESULT mix_videoconfigparamsenc_set_window_size ( - MixVideoConfigParamsEnc * obj, uint window_size); - -/** - * mix_videoconfigparamsenc_get_window_size: - * @obj: #MixVideoConfigParamsEnc object - * @window_size: The window size for rate control - * @returns: Common Video Error Return Codes - * - * Get The window size value - */ -MIX_RESULT mix_videoconfigparamsenc_get_window_size ( - MixVideoConfigParamsEnc * obj, uint *window_size); - -/** - * mix_videoconfigparamsenc_set_intra_period: - * @obj: #MixVideoConfigParamsEnc object - * @intra_period: Number of frames between key frames (GOP size) - * @returns: Common Video Error Return Codes - * - * Set Number of frames between key frames (GOP size) - */ -MIX_RESULT mix_videoconfigparamsenc_set_intra_period ( - MixVideoConfigParamsEnc * obj, uint intra_period); - -/** - * mix_videoconfigparamsenc_get_intra_period: - * @obj: #MixVideoConfigParamsEnc object - * @intra_period: Number of frames between key frames (GOP size) to be returned - * @returns: Common Video Error Return Codes - * - * Get Number of frames between key frames (GOP size) - */ -MIX_RESULT mix_videoconfigparamsenc_get_intra_period ( - MixVideoConfigParamsEnc * obj, uint *intra_period); - -/** - * mix_videoconfigparamsenc_set_buffer_pool_size: - * @obj: #MixVideoConfigParamsEnc object - * @bufpoolsize: Size of the pool of #MixBuffer objects - * @returns: Common Video Error Return Codes - * - * Set Size of the pool of #MixBuffer objects - */ -MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size( - MixVideoConfigParamsEnc * obj, uint bufpoolsize); - -/** - * mix_videoconfigparamsenc_set_buffer_pool_size: - * @obj: #MixVideoConfigParamsEnc object - * @bufpoolsize: Size of the pool of #MixBuffer objects to be returned - * @returns: Common Video Error Return Codes - * - * Get Size of the pool of #MixBuffer objects - */ -MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size( - MixVideoConfigParamsEnc * obj, uint *bufpoolsize); - -/** - * mix_videoconfigparamsenc_set_share_buf_mode: - * @obj: #MixVideoConfigParamsEnc object - * @share_buf_mod: A flag to indicate whether buffers are shared - * between capture and encoding drivers or not - * @returns: Common Video Error Return Codes - * - * Set the flag that indicates whether buffers are shared between capture and encoding drivers or not - */ -MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode ( - MixVideoConfigParamsEnc * obj, bool share_buf_mod); - -/** - * mix_videoconfigparamsenc_get_share_buf_mode: - * @obj: #MixVideoConfigParamsEnc object - * @share_buf_mod: the flag to be returned that indicates whether buffers - * are shared between capture and encoding drivers or not - * @returns: Common Video Error Return Codes - * - * Get the flag that indicates whether buffers are shared between capture and encoding drivers or not - */ -MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode( - MixVideoConfigParamsEnc * obj, bool *share_buf_mod); - -/** - * mix_videoconfigparamsenc_set_ci_frame_info: - * @obj: #MixVideoConfigParamsEnc object - * @ci_frame_id: Array of frame IDs created by capture library * - * @ci_frame_num: Size of the array ci_frame_id - * @returns: Common Video Error Return Codes - * - * Set CI frame information - */ -MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info( - MixVideoConfigParamsEnc * obj, ulong * ci_frame_id, uint ci_frame_num); - -/** - * mix_videoconfigparamsenc_get_ci_frame_info: - * @obj: #MixVideoConfigParamsEnc object - * @ci_frame_id: Array of frame IDs created by capture library to be returned - * @ci_frame_num: Size of the array ci_frame_id to be returned - * @returns: Common Video Error Return Codes - * - * Get CI frame information - * - * Caller is responsible to g_free *ci_frame_id - * - */ -MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info ( - MixVideoConfigParamsEnc * obj, ulong * *ci_frame_id, uint *ci_frame_num); - - -/** - * mix_videoconfigparamsenc_set_drawable: - * @obj: #MixVideoConfigParamsEnc object - * @draw: drawable - * @returns: Common Video Error Return Codes - * - * Set drawable - */ -MIX_RESULT mix_videoconfigparamsenc_set_drawable ( - MixVideoConfigParamsEnc * obj, ulong draw); - -/** - * mix_videoconfigparamsenc_get_drawable: - * @obj: #MixVideoConfigParamsEnc object - * @draw: drawable to be returned - * @returns: Common Video Error Return Codes - * - * Get drawable - */ -MIX_RESULT mix_videoconfigparamsenc_get_drawable ( - MixVideoConfigParamsEnc * obj, ulong *draw); - -/** - * mix_videoconfigparamsenc_set_need_display: - * @obj: #MixVideoConfigParamsEnc object - * @need_display: Flag to indicates whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() - * @returns: Common Video Error Return Codes - * - * Set the flag used to indicate whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() - */ -MIX_RESULT mix_videoconfigparamsenc_set_need_display ( - MixVideoConfigParamsEnc * obj, bool need_display); - - -/** - * mix_videoconfigparamsenc_get_need_display: - * @obj: #MixVideoConfigParamsEnc object - * @need_display: A flag to be returned to indicates whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() - * @returns: Common Video Error Return Codes - * - * Get the flag used to indicate whether MixVideoFrames suitable for displaying - * need to be enqueued for retrieval using mix_video_get_frame() - */ -MIX_RESULT mix_videoconfigparamsenc_get_need_display( - MixVideoConfigParamsEnc * obj, bool *need_display); - -/** - * mix_videoconfigparamsenc_set_rate_control: - * @obj: #MixVideoConfigParamsEnc object - * @rcmode: Rate control mode - * @returns: Common Video Error Return Codes - * - * Set Rate control mode - */ -MIX_RESULT mix_videoconfigparamsenc_set_rate_control( - MixVideoConfigParamsEnc * obj, MixRateControl rcmode); - -/** - * mix_videoconfigparamsenc_set_rate_control: - * @obj: #MixVideoConfigParamsEnc object - * @rcmode: Rate control mode to be returned - * @returns: Common Video Error Return Codes - * - * Get Rate control mode - */ -MIX_RESULT mix_videoconfigparamsenc_get_rate_control( - MixVideoConfigParamsEnc * obj, MixRateControl * rcmode); - -/** - * mix_videoconfigparamsenc_set_raw_format: - * @obj: #MixVideoConfigParamsEnc object - * @raw_format: Raw format to be encoded - * @returns: Common Video Error Return Codes - * - * Set Raw format to be encoded - */ -MIX_RESULT mix_videoconfigparamsenc_set_raw_format ( - MixVideoConfigParamsEnc * obj, MixRawTargetFormat raw_format); - -/** - * mix_videoconfigparamsenc_get_raw_format: - * @obj: #MixVideoConfigParamsEnc object - * @raw_format: Raw format to be returned - * @returns: Common Video Error Return Codes - * - * Get Raw format - */ -MIX_RESULT mix_videoconfigparamsenc_get_raw_format ( - MixVideoConfigParamsEnc * obj, MixRawTargetFormat * raw_format); - -/** - * mix_videoconfigparamsenc_set_profile: - * @obj: #MixVideoConfigParamsEnc object - * @profile: Encoding profile - * @returns: Common Video Error Return Codes - * - * Set Encoding profile - */ -MIX_RESULT mix_videoconfigparamsenc_set_profile ( - MixVideoConfigParamsEnc * obj, MixProfile profile); - -/** - * mix_videoconfigparamsenc_get_profile: - * @obj: #MixVideoConfigParamsEnc object - * @profile: Encoding profile to be returned - * @returns: Common Video Error Return Codes - * - * Get Encoding profile - */ -MIX_RESULT mix_videoconfigparamsenc_get_profile ( - MixVideoConfigParamsEnc * obj, MixProfile * profile); - - -/** - * mix_videoconfigparamsenc_set_level: - * @obj: #MixVideoConfigParamsEnc object - * @level: Encoding level - * @returns: Common Video Error Return Codes - * - * Set Encoding level - */ -MIX_RESULT mix_videoconfigparamsenc_set_level ( - MixVideoConfigParamsEnc * obj, uint8 level); - - -/** - * mix_videoconfigparamsenc_get_level: - * @obj: #MixVideoConfigParamsEnc object - * @level: Encoding level to be returned - * @returns: Common Video Error Return Codes - * - * Get Encoding level - */ - -MIX_RESULT mix_videoconfigparamsenc_get_level ( - MixVideoConfigParamsEnc * obj, uint8 * level); - - -/** - * mix_videoconfigparamsenc_set_CIR_frame_cnt: - * @obj: #MixVideoConfigParamsEnc object - * @CIR_frame_cnt: Encoding CIR frame count - * @returns: Common Video Error Return Codes - * - * Set Encoding CIR frame count - */ -MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt ( - MixVideoConfigParamsEnc * obj, uint CIR_frame_cnt); - -/** - * mix_videoconfigparamsenc_set_CIR_frame_cnt: - * @obj: #MixVideoConfigParamsEnc object - * @CIR_frame_cnt: Encoding CIR frame count to be returned - * @returns: Common Video Error Return Codes - * - * Get Encoding CIR frame count - */ - -MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt ( - MixVideoConfigParamsEnc * obj, uint * CIR_frame_cnt); - - -/** - * mix_videoconfigparamsenc_set_max_slice_size: - * @obj: #MixVideoConfigParamsEnc object - * @max_slice_size: Maximum encoded slice size - * @returns: Common Video Error Return Codes - * - * Set Maximum encoded slice size - */ -MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size ( - MixVideoConfigParamsEnc * obj, uint max_slice_size); - -/** - * mix_videoconfigparamsenc_get_max_slice_size: - * @obj: #MixVideoConfigParamsEnc object - * @max_slice_size: Maximum encoded slice size - * @returns: Common Video Error Return Codes - * - * Get Maximum encoded slice size - */ - -MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size ( - MixVideoConfigParamsEnc * obj, uint * max_slice_size); - - -/** - * mix_videoconfigparamsenc_set_refresh_type: - * @obj: #MixVideoConfigParamsEnc object - * @refresh_type: The intra refresh type (CIR, AIR etc) - * @returns: Common Video Error Return Codes - * - * Set Intra Refresh Type - */ -MIX_RESULT mix_videoconfigparamsenc_set_refresh_type ( - MixVideoConfigParamsEnc * obj, MixVideoIntraRefreshType refresh_type); - -/** - * mix_videoconfigparamsenc_get_refresh_type: - * @obj: #MixVideoConfigParamsEnc object - * @refresh_type: The intra refresh type (CIR, AIR etc) - * @returns: Common Video Error Return Codes - * - * Get Intra Refresh Type - */ - -MIX_RESULT mix_videoconfigparamsenc_get_refresh_type ( - MixVideoConfigParamsEnc * obj, MixVideoIntraRefreshType * refresh_type); - -/** - * mix_videoconfigparamsenc_set_AIR_params: - * @obj: #MixVideoConfigParamsEnc object - * @air_params: AIR Parameters, including air_MBs, air_threshold and air_auto - * @returns: Common Video Error Return Codes - * - * Set AIR parameters - */ -MIX_RESULT mix_videoconfigparamsenc_set_AIR_params ( - MixVideoConfigParamsEnc * obj, MixAIRParams air_params); - -/** - * mix_videoconfigparamsenc_get_AIR_params: - * @obj: #MixVideoConfigParamsEnc object - * @air_params: AIR Parameters, including air_MBs, air_threshold and air_auto - * @returns: Common Video Error Return Codes - * - * Get AIR parameters - */ - -MIX_RESULT mix_videoconfigparamsenc_get_AIR_params ( - MixVideoConfigParamsEnc * obj, MixAIRParams * air_params); - -/** - * mix_videoconfigparamsenc_set_buffer_mode: - * @obj: #MixVideoConfigParamsEnc object - * @buffer_mode: Buffer allocation mode - * @returns: Common Video Error Return Codes - * - * Set buffer allocation mode - */ -MIX_RESULT mix_videoconfigparamsenc_set_buffer_mode ( - MixVideoConfigParamsEnc * obj, MixBufferAllocationMode buffer_mode); - -/** - * mix_videoconfigparamsenc_get_buffer_mode: - * @obj: #MixVideoConfigParamsEnc object - * @buffer_mode: Buffer allocation mode - * @returns: Common Video Error Return Codes - * - * Get buffer allocation mode - */ -MIX_RESULT mix_videoconfigparamsenc_get_buffer_mode ( - MixVideoConfigParamsEnc * obj, MixBufferAllocationMode * buffer_mode); - - -/** - * mix_videoconfigparamsenc_set_upstream_buffer_info: - * @obj: #MixVideoConfigParamsEnc object - * @buffer_mode: Buffer allocation mode - * @buf_info: Buffer information - * @returns: Common Video Error Return Codes - * - * Set buffer information according to the buffer mode - */ - -MIX_RESULT mix_videoconfigparamsenc_set_upstream_buffer_info ( - MixVideoConfigParamsEnc * obj, MixBufferAllocationMode buffer_mode, void * buf_info); - -/** - * mix_videoconfigparamsenc_get_upstream_buffer_info: - * @obj: #MixVideoConfigParamsEnc object - * @buffer_mode: Buffer allocation mode - * @buf_info: Buffer information - * @returns: Common Video Error Return Codes - * - * Get buffer information according to the buffer mode - */ -MIX_RESULT mix_videoconfigparamsenc_get_upstream_buffer_info ( - MixVideoConfigParamsEnc * obj, MixBufferAllocationMode buffer_mode, void ** buf_info); - -/* TODO: Add getters and setters for other properties */ -#endif /* __MIX_VIDEOCONFIGPARAMSENC_H__ */ - diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.cpp b/mix_video/src/mixvideoconfigparamsenc_h263.cpp deleted file mode 100644 index f31a3e8..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_h263.cpp +++ /dev/null @@ -1,133 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsenc_h263 - * @short_description: MI-X Video H.263 Eecode Configuration Parameter - * - * MI-X video H.263 eecode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_h263.h" - -#define MDEBUG - -MixVideoConfigParamsEncH263::MixVideoConfigParamsEncH263() - :slice_num(1) - ,disable_deblocking_filter_idc(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} -MixVideoConfigParamsEncH263:: ~MixVideoConfigParamsEncH263() { -} - -bool MixVideoConfigParamsEncH263:: copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParamsEncH263 * this_target = MIX_VIDEOCONFIGPARAMSENC_H263(target); - if (NULL != this_target) { - //add properties - this_target->slice_num = slice_num; - this_target->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - ret = MixVideoConfigParamsEnc::copy(target); - } - return ret; -} - -bool MixVideoConfigParamsEncH263:: equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParamsEncH263 * this_first = MIX_VIDEOCONFIGPARAMSENC_H263(obj); - - if (this_first->slice_num !=slice_num) { - goto not_equal; - } - - if (this_first->disable_deblocking_filter_idc !=disable_deblocking_filter_idc) { - goto not_equal; - } - - ret = TRUE; - -not_equal: - - if (ret != TRUE) { - return ret; - } - ret = MixVideoConfigParamsEnc::equal(obj); - return ret; -} - - -MixParams* -MixVideoConfigParamsEncH263::dup() const { - MixParams *ret = new MixVideoConfigParamsEncH263(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -MixVideoConfigParamsEncH263 * -mix_videoconfigparamsenc_h263_new (void) { - return new MixVideoConfigParamsEncH263(); -} - - -MixVideoConfigParamsEncH263* -mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num ( - MixVideoConfigParamsEncH263 * obj, uint slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); - obj->slice_num = slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num ( - MixVideoConfigParamsEncH263 * obj, uint * slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, slice_num); - *slice_num = obj->slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk ( - MixVideoConfigParamsEncH263 * obj, uint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H263_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk ( - MixVideoConfigParamsEncH263 * obj, uint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H263_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.h b/mix_video/src/mixvideoconfigparamsenc_h263.h deleted file mode 100644 index 4e0d994..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_h263.h +++ /dev/null @@ -1,146 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCONFIGPARAMSENC_H263_H__ -#define __MIX_VIDEOCONFIGPARAMSENC_H263_H__ - -#include "mixvideoconfigparamsenc.h" -#include "mixvideodef.h" - - -/** -* MIX_VIDEOCONFIGPARAMSENC_H263: -* @obj: object to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_H263(obj) (reinterpret_cast(obj)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_H263: -* @obj: an object. -* -* Checks if the given object is an instance of #MixVideoConfigParamsEncH263 -*/ -#define MIX_IS_VIDEOCONFIGPARAMSENC_H263(obj) ((NULL != MIX_VIDEOCONFIGPARAMSENC_H263(obj)) ? TRUE : FALSE) - - -/** -* MixVideoConfigParamsEncH263: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoConfigParamsEncH263 : public MixVideoConfigParamsEnc { -public: - MixVideoConfigParamsEncH263(); - virtual ~MixVideoConfigParamsEncH263(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; - -public: - /*< public > */ - - /* TODO: Add H.263 configuration paramters */ - - /* slice number in one picture */ - uint slice_num; - - /* enable/disable deblocking */ - uint disable_deblocking_filter_idc; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - - - - -/** -* mix_videoconfigparamsenc_h263_new: -* @returns: A newly allocated instance of #MixVideoConfigParamsEncH263 -* -* Use this method to create new instance of #MixVideoConfigParamsEncH263 -*/ -MixVideoConfigParamsEncH263 *mix_videoconfigparamsenc_h263_new (void); -/** -* mix_videoconfigparamsenc_h263_ref: -* @mix: object to add reference -* @returns: the #MixVideoConfigParamsEncH263 instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoConfigParamsEncH263* -mix_videoconfigparamsenc_h263_ref (MixVideoConfigParamsEncH263 * mix); - -/** -* mix_videoconfigparamsenc_h263_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videoconfigparamsenc_h263_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for other properties */ - -/** - * mix_videoconfigparamsenc_h263_set_dlk: - * @obj: #MixVideoConfigParamsEncH263 object - * @disable_deblocking_filter_idc: The flag to enable/disable deblocking - * @returns: Common Video Error Return Codes - * - * Set the The flag to enable/disable deblocking - */ -MIX_RESULT mix_videoconfigparamsenc_h263_set_dlk ( - MixVideoConfigParamsEncH263 * obj, uint disable_deblocking_filter_idc); - -/** - * mix_videoconfigparamsenc_h263_get_dlk: - * @obj: #MixVideoConfigParamsEncH263 object - * @disable_deblocking_filter_idc: deblocking flag to be returned - * @returns: Common Video Error Return Codes - * - * Get the The flag to enable/disable deblocking - */ -MIX_RESULT mix_videoconfigparamsenc_h263_get_dlk ( - MixVideoConfigParamsEncH263 * obj, uint * disable_deblocking_filter_idc); - -/** - * mix_videoconfigparamsenc_h263_set_slice_num: - * @obj: #MixVideoConfigParamsEncH263 object - * @slice_num: Number of slice in one picture encoded. - * @returns: Common Video Error Return Codes - * - * Set slice_num - */ -MIX_RESULT mix_videoconfigparamsenc_h263_set_slice_num ( - MixVideoConfigParamsEncH263 * obj, uint slice_num); - -/** - * mix_videoconfigparamsenc_h263_get_slice_num: - * @obj: #MixVideoConfigParamsEncH263 object - * @slice_num: Number of slice in one picture encoded. - * @returns: Common Video Error Return Codes - * - * Get slice_num - */ -MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num ( - MixVideoConfigParamsEncH263 * obj, uint * slice_num); - - - -#endif /* __MIX_VIDEOCONFIGPARAMSENC_H263_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.cpp b/mix_video/src/mixvideoconfigparamsenc_h264.cpp deleted file mode 100644 index 9114fdf..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_h264.cpp +++ /dev/null @@ -1,256 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsenc_h264 - * @short_description: MI-X Video H.264 Eecode Configuration Parameter - * - * MI-X video H.264 eecode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_h264.h" - -#define MDEBUG - -MixVideoConfigParamsEncH264::MixVideoConfigParamsEncH264() - :basic_unit_size(0) - ,slice_num(1) - ,I_slice_num(1) - ,P_slice_num(1) - ,disable_deblocking_filter_idc(0) - ,vui_flag(0) - ,delimiter_type(MIX_DELIMITER_LENGTHPREFIX) - ,idr_interval(2) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} - -MixVideoConfigParamsEncH264::~MixVideoConfigParamsEncH264() { -} - -bool MixVideoConfigParamsEncH264::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParamsEncH264 * this_target = MIX_VIDEOCONFIGPARAMSENC_H264(target); - if (NULL != this_target) { - this_target->basic_unit_size = basic_unit_size; - this_target->slice_num = slice_num; - this_target->I_slice_num = I_slice_num; - this_target->P_slice_num = P_slice_num; - this_target->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - this_target->vui_flag = vui_flag; - this_target->delimiter_type = delimiter_type; - this_target->idr_interval = idr_interval; - ret = MixVideoConfigParamsEnc::copy(target); - } - return ret; -} - -bool MixVideoConfigParamsEncH264::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParamsEncH264 * this_obj = MIX_VIDEOCONFIGPARAMSENC_H264(obj); - if (NULL == this_obj) - return ret; - - if (this_obj->basic_unit_size != basic_unit_size) { - goto not_equal; - } - - if (this_obj->slice_num != slice_num) { - goto not_equal; - } - - if (this_obj->I_slice_num != I_slice_num) { - goto not_equal; - } - - if (this_obj->P_slice_num !=P_slice_num) { - goto not_equal; - } - - if (this_obj->disable_deblocking_filter_idc != disable_deblocking_filter_idc) { - goto not_equal; - } - - if (this_obj->vui_flag !=vui_flag) { - goto not_equal; - } - - if (this_obj->delimiter_type != delimiter_type) { - goto not_equal; - } - - if (this_obj->idr_interval != idr_interval) { - goto not_equal; - } - - ret = TRUE; -not_equal: - - if (ret != TRUE) { - return ret; - } - - ret = MixVideoConfigParamsEnc::equal(this_obj); - return ret; -} - -MixParams* MixVideoConfigParamsEncH264::dup() const { - MixParams *ret = new MixVideoConfigParamsEncH264(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -MixVideoConfigParamsEncH264 * -mix_videoconfigparamsenc_h264_new (void) { - return new MixVideoConfigParamsEncH264(); -} - - - -MixVideoConfigParamsEncH264* -mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsenc_h264_set_bus ( - MixVideoConfigParamsEncH264 * obj, uint basic_unit_size) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->basic_unit_size = basic_unit_size; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_bus ( - MixVideoConfigParamsEncH264 * obj, uint * basic_unit_size) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, basic_unit_size); - *basic_unit_size = obj->basic_unit_size; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk ( - MixVideoConfigParamsEncH264 * obj, uint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk ( - MixVideoConfigParamsEncH264 * obj, uint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_vui_flag ( - MixVideoConfigParamsEncH264 * obj, uint8 vui_flag) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->vui_flag = vui_flag; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_vui_flag ( - MixVideoConfigParamsEncH264 * obj, uint8 * vui_flag) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, vui_flag); - *vui_flag = obj->vui_flag; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num( - MixVideoConfigParamsEncH264 * obj, uint slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->slice_num = slice_num; - obj->I_slice_num = slice_num; - obj->P_slice_num = slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num( - MixVideoConfigParamsEncH264 * obj, uint * slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, slice_num); - *slice_num = obj->slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num( - MixVideoConfigParamsEncH264 * obj, uint I_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->I_slice_num = I_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num( - MixVideoConfigParamsEncH264 * obj, uint * I_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, I_slice_num); - *I_slice_num = obj->I_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num( - MixVideoConfigParamsEncH264 * obj, uint P_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->P_slice_num = P_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num( - MixVideoConfigParamsEncH264 * obj, uint * P_slice_num) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, P_slice_num); - *P_slice_num = obj->P_slice_num; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type ( - MixVideoConfigParamsEncH264 * obj, MixDelimiterType delimiter_type) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->delimiter_type = delimiter_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type ( - MixVideoConfigParamsEncH264 * obj, MixDelimiterType * delimiter_type) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, delimiter_type); - *delimiter_type = obj->delimiter_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval ( - MixVideoConfigParamsEncH264 * obj, uint idr_interval) { - MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj); - obj->idr_interval = idr_interval; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval ( - MixVideoConfigParamsEncH264 * obj, uint * idr_interval) { - MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, idr_interval); - *idr_interval = obj->idr_interval; - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h deleted file mode 100644 index 09afb9a..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_h264.h +++ /dev/null @@ -1,300 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCONFIGPARAMSENC_H264_H__ -#define __MIX_VIDEOCONFIGPARAMSENC_H264_H__ - -#include "mixvideoconfigparamsenc.h" -#include "mixvideodef.h" - - -/** -* MIX_TYPE_VIDEOCONFIGPARAMSENC_H264: -* -* Get type of class. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (reinterpret_cast(obj)) - - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_H264: -* @obj: an object. -* -* Checks if the given object is an instance of #MixVideoConfigParamsEncH264 -*/ -#define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) ((NULL != MIX_VIDEOCONFIGPARAMSENC_H264(obj)) ? TRUE : FALSE) - - -/** -* MixVideoConfigParamsEncH264: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoConfigParamsEncH264 : public MixVideoConfigParamsEnc { -public: - MixVideoConfigParamsEncH264(); - virtual ~MixVideoConfigParamsEncH264(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; - -public: - - /* TODO: Add H.264 configuration paramters */ - - /* The basic unit size used by rate control */ - uint basic_unit_size; - - /* Number of slices in one frame */ - uint slice_num; - - /* Number of slices in one I frame */ - uint I_slice_num; - - /* Number of slices in one P frame */ - uint P_slice_num; - - /* enable/disable deblocking */ - uint8 disable_deblocking_filter_idc; - - /* enable/disable vui */ - uint8 vui_flag; - - /* delimiter_type */ - MixDelimiterType delimiter_type; - - uint idr_interval; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - - -/** -* mix_videoconfigparamsenc_h264_new: -* @returns: A newly allocated instance of #MixVideoConfigParamsEncH264 -* -* Use this method to create new instance of #MixVideoConfigParamsEncH264 -*/ -MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void); -/** -* mix_videoconfigparamsenc_h264_ref: -* @mix: object to add reference -* @returns: the #MixVideoConfigParamsEncH264 instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoConfigParamsEncH264* -mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix); - -/** -* mix_videoconfigparamsenc_h264_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videoconfigparamsenc_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for other properties */ - - -/** - * mix_videoconfigparamsenc_h264_set_bus: - * @obj: #MixVideoConfigParamsEncH264 object - * @basic_unit_size: The basic unit size used by rate control - * @returns: Common Video Error Return Codes - * - * Set The basic unit size used by rate control - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_bus ( - MixVideoConfigParamsEncH264 * obj, uint basic_unit_size); - -/** - * mix_videoconfigparamsenc_h264_get_bus: - * @obj: #MixVideoConfigParamsEncH264 object - * @basic_unit_size: The basic unit size to be returned - * @returns: Common Video Error Return Codes - * - * Get The basic unit size used by rate control - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_bus ( - MixVideoConfigParamsEncH264 * obj, uint * basic_unit_size); - -/** - * mix_videoconfigparamsenc_h264_set_dlk: - * @obj: #MixVideoConfigParamsEncH264 object - * @disable_deblocking_filter_idc: The flag to enable/disable deblocking - * @returns: Common Video Error Return Codes - * - * Set the The flag to enable/disable deblocking - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk ( - MixVideoConfigParamsEncH264 * obj, uint disable_deblocking_filter_idc); - -/** - * mix_videoconfigparamsenc_h264_get_dlk: - * @obj: #MixVideoConfigParamsEncH264 object - * @disable_deblocking_filter_idc: deblocking flag to be returned - * @returns: Common Video Error Return Codes - * - * Get the The flag to enable/disable deblocking - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk ( - MixVideoConfigParamsEncH264 * obj, uint * disable_deblocking_filter_idc); - -/** - * mix_videoconfigparamsenc_h264_set_vui_flag: - * @obj: #MixVideoConfigParamsEncH264 object - * @vui_flag: The flag to enable/disable vui - * @returns: Common Video Error Return Codes - * - * Set the The flag to enable/disable vui - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_vui_flag ( - MixVideoConfigParamsEncH264 * obj, uint8 vui_flag); - -/** - * mix_videoconfigparamsenc_h264_get_vui_flag - * @obj: #MixVideoConfigParamsEncH264 object - * @vui_flag: vui_flag to be returned - * @returns: Common Video Error Return Codes - * - * Get the The flag to enable/disable vui_flag - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_vui_flag ( - MixVideoConfigParamsEncH264 * obj, uint8 * vui_flag); - - -/** - * mix_videoconfigparamsenc_h264_set_slice_num: - * @obj: #MixVideoConfigParamsEncH264 object - * @slice_num: Number of slices in one frame - * @returns: Common Video Error Return Codes - * - * Set the Number of slices in one frame - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num( - MixVideoConfigParamsEncH264 * obj, uint slice_num); - -/** - * mix_videoconfigparamsenc_h264_get_slice_num: - * @obj: #MixVideoConfigParamsEncH264 object - * @slice_num: Number of slices in one frame to be returned - * @returns: Common Video Error Return Codes - * - * Get the Number of slices in one frame - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num( - MixVideoConfigParamsEncH264 * obj, uint * slice_num); - - -/** - * mix_videoconfigparamsenc_h264_set_I_slice_num: - * @obj: #MixVideoConfigParamsEncH264 object - * @I_slice_num: Number of slices in one I frame - * @returns: Common Video Error Return Codes - * - * Set the Number of slices in one I frame - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num( - MixVideoConfigParamsEncH264 * obj, uint I_slice_num); - -/** - * mix_videoconfigparamsenc_h264_get_I_slice_num: - * @obj: #MixVideoConfigParamsEncH264 object - * @I_slice_num: Number of slices in one I frame to be returned - * @returns: Common Video Error Return Codes - * - * Get the Number of slices in one I frame - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num( - MixVideoConfigParamsEncH264 * obj, uint * I_slice_num); - -/** - * mix_videoconfigparamsenc_h264_set_P_slice_num: - * @obj: #MixVideoConfigParamsEncH264 object - * @P_slice_num: Number of slices in one P frame - * @returns: Common Video Error Return Codes - * - * Set the Number of slices in one P frame - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num( - MixVideoConfigParamsEncH264 * obj, uint P_slice_num); - -/** - * mix_videoconfigparamsenc_h264_get_P_slice_num: - * @obj: #MixVideoConfigParamsEncH264 object - * @P_slice_num: Number of slices in one P frame to be returned - * @returns: Common Video Error Return Codes - * - * Get the Number of slices in one P frame - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num( - MixVideoConfigParamsEncH264 * obj, uint * P_slice_num); - -/** - * mix_videoconfigparamsenc_h264_set_delimiter_type: - * @obj: #MixVideoConfigParamsEncH264 object - * @delimiter_type: Delimiter type - * @returns: Common Video Error Return Codes - * - * Set Delimiter type - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type ( - MixVideoConfigParamsEncH264 * obj, MixDelimiterType delimiter_type); - -/** - * mix_videoconfigparamsenc_h264_get_delimiter_type: - * @obj: #MixVideoConfigParamsEncH264 object - * @delimiter_type: Delimiter type to be returned - * @returns: Common Video Error Return Codes - * - * Get Delimiter type - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type ( - MixVideoConfigParamsEncH264 * obj, MixDelimiterType * delimiter_type); - - -/** - * mix_videoconfigparamsenc_h264_set_IDR_interval: - * @obj: #MixVideoConfigParamsEncH264 object - * @idr_interval: IDR interval - * @returns: Common Video Error Return Codes - * - * Set IDR interval - */ -MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval ( - MixVideoConfigParamsEncH264 * obj, uint idr_interval); - - -/** - * mix_videoconfigparamsenc_h264_get_IDR_interval: - * @obj: #MixVideoConfigParamsEncH264 object - * @idr_interval: IDR interval to be returned - * @returns: Common Video Error Return Codes - * - * Get IDR interval - */ -MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval ( - MixVideoConfigParamsEncH264 * obj, uint * idr_interval); - - - -#endif /* __MIX_VIDEOCONFIGPARAMSENC_H264_H__ */ - diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp b/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp deleted file mode 100644 index 9ce152c..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.cpp +++ /dev/null @@ -1,141 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** - * SECTION:mixvideoconfigparamsenc_mpeg4 - * @short_description: MI-X Video MPEG 4:2 Eecode Configuration Parameter - * - * MI-X video MPEG 4:2 eecode configuration parameter objects. - */ - - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_mpeg4.h" - -#define MDEBUG - -MixVideoConfigParamsEncMPEG4::MixVideoConfigParamsEncMPEG4() - :profile_and_level_indication(3) - ,fixed_vop_time_increment(3) - ,disable_deblocking_filter_idc(0) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} - -MixVideoConfigParamsEncMPEG4::~MixVideoConfigParamsEncMPEG4() { -} - - -bool MixVideoConfigParamsEncMPEG4::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoConfigParamsEncMPEG4 *this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4(target); - if (NULL != this_target) { - //add properties - this_target->profile_and_level_indication= profile_and_level_indication; - this_target->fixed_vop_time_increment= fixed_vop_time_increment; - this_target->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - - // Now chainup base class - ret = MixVideoConfigParamsEnc::copy(target); - } - return ret; -} - -bool MixVideoConfigParamsEncMPEG4::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj); - if ((NULL != this_obj) && - (profile_and_level_indication == this_obj->profile_and_level_indication) && - (fixed_vop_time_increment == this_obj->fixed_vop_time_increment) && - (disable_deblocking_filter_idc == this_obj->disable_deblocking_filter_idc)) { - ret = MixVideoConfigParamsEnc::equal(obj); - } - return ret; -} - -MixParams* MixVideoConfigParamsEncMPEG4::dup() const { - MixParams *ret = NULL; - MixVideoConfigParamsEncMPEG4 *duplicate = new MixVideoConfigParamsEncMPEG4(); - if (TRUE == copy(duplicate)) { - ret = duplicate; - } else { - if (NULL != duplicate) - duplicate->Unref(); - } - return ret; -} - - -MixVideoConfigParamsEncMPEG4 * -mix_videoconfigparamsenc_mpeg4_new (void) { - return new MixVideoConfigParamsEncMPEG4(); -} - - -MixVideoConfigParamsEncMPEG4 * -mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -/* TODO: Add getters and setters for properties if any */ - -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \ - - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level ( - MixVideoConfigParamsEncMPEG4 * obj, uchar profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->profile_and_level_indication = profile_and_level_indication; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level ( - MixVideoConfigParamsEncMPEG4 * obj, uchar * profile_and_level_indication) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication); - *profile_and_level_indication = obj->profile_and_level_indication; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti ( - MixVideoConfigParamsEncMPEG4 * obj, uint fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->fixed_vop_time_increment = fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti ( - MixVideoConfigParamsEncMPEG4 * obj, uint * fixed_vop_time_increment) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment); - *fixed_vop_time_increment = obj->fixed_vop_time_increment; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk ( - MixVideoConfigParamsEncMPEG4 * obj, uint disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj); - obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk ( - MixVideoConfigParamsEncMPEG4 * obj, uint * disable_deblocking_filter_idc) { - MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc); - *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h deleted file mode 100644 index 5efc6ea..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h +++ /dev/null @@ -1,173 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ -#define __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ - -#include "mixvideoconfigparamsenc.h" -#include "mixvideodef.h" - - -/** -* MIX_VIDEOCONFIGPARAMSENC_MPEG4: -* @obj: object to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (reinterpret_cast(obj)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4: -* @obj: an object. -* -* Checks if the given object is an instance of #MixVideoConfigParamsEncMPEG4 -*/ -#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (NULL != MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj)) - - -/** -* MixVideoConfigParamsEncMPEG4: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoConfigParamsEncMPEG4 : public MixVideoConfigParamsEnc { -public: - MixVideoConfigParamsEncMPEG4(); - virtual ~MixVideoConfigParamsEncMPEG4(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; - -public: - /* TODO: Add MPEG-4 configuration paramters */ - /* Indicate profile and level. - * Default value is 3. - * Can be ignored (refer to encoding - * specification for more info). */ - uchar profile_and_level_indication; - - /* Number of ticks between two successive VOPs - * in display order. Default value is 3. - * Can be ignored (refer to encoding specification - * for more info) */ - uint fixed_vop_time_increment; - - /* enable/disable deblocking */ - uint disable_deblocking_filter_idc; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - -/** -* mix_videoconfigparamsenc_mpeg4_new: -* @returns: A newly allocated instance of #MixVideoConfigParamsEncMPEG4 -* -* Use this method to create new instance of #MixVideoConfigParamsEncMPEG4 -*/ -MixVideoConfigParamsEncMPEG4 *mix_videoconfigparamsenc_mpeg4_new (void); -/** -* mix_videoconfigparamsenc_mpeg4_ref: -* @mix: object to add reference -* @returns: the #MixVideoConfigParamsEncMPEG4 instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoConfigParamsEncMPEG4* mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix); - -/** -* mix_videoconfigparamsenc_mpeg4_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for other properties */ - -/** - * mix_videoconfigparamsenc_mpeg4_set_dlk: - * @obj: #MixVideoConfigParamsEncMPEG4 object - * @disable_deblocking_filter_idc: The flag to enable/disable deblocking - * @returns: Common Video Error Return Codes - * - * Set the The flag to enable/disable deblocking - */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk ( - MixVideoConfigParamsEncMPEG4 * obj, uint disable_deblocking_filter_idc); - -/** - * mix_videoconfigparamsenc_mpeg4_get_dlk: - * @obj: #MixVideoConfigParamsEncMPEG4 object - * @disable_deblocking_filter_idc: deblocking flag to be returned - * @returns: Common Video Error Return Codes - * - * Get the The flag to enable/disable deblocking - */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk ( - MixVideoConfigParamsEncMPEG4 * obj, uint * disable_deblocking_filter_idc); - -/** - * mix_videoconfigparamsenc_mpeg4_set_profile_level: - * @obj: #MixVideoConfigParamsEncMPEG4 object - * @profile_and_level_indication: Indicate profile and level. Default value is 3. - * Can be ignored (refer to encoding specification - * for more info). - * @returns: Common Video Error Return Codes - * - * Set profile_and_level_indication - */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level ( - MixVideoConfigParamsEncMPEG4 * obj, uchar profile_and_level_indication); - -/** - * mix_videoconfigparamsenc_mpeg4_get_profile_level: - * @obj: #MixVideoConfigParamsEncMPEG4 object - * @profile_and_level_indication: profile_and_level_indication to be returned - * @returns: Common Video Error Return Codes - * - * Get profile_and_level_indication - */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level ( - MixVideoConfigParamsEncMPEG4 * obj, uchar * profile_and_level_indication); - -/** - * mix_videoconfigparamsenc_mpeg4_get_profile_level: - * @obj: #MixVideoConfigParamsEncMPEG4 object - * @fixed_vop_time_increment: Number of ticks between two successive VOPs in display order. - * Default value is 3. Can be ignored (refer to encoding specification - * for more info) - * @returns: Common Video Error Return Codes - * - * Set fixed_vop_time_increment - */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti ( - MixVideoConfigParamsEncMPEG4 * obj, uint fixed_vop_time_increment); - -/** - * mix_videoconfigparamsenc_mpeg4_get_fixed_vti: - * @obj: #MixVideoConfigParamsEncMPEG4 object - * @fixed_vop_time_increment: fixed_vop_time_increment to be returned - * @returns: Common Video Error Return Codes - * - * Get fixed_vop_time_increment - */ -MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti ( - MixVideoConfigParamsEncMPEG4 * obj, uint * fixed_vop_time_increment); - - -#endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.cpp b/mix_video/src/mixvideoconfigparamsenc_preview.cpp deleted file mode 100644 index ecf73d2..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_preview.cpp +++ /dev/null @@ -1,47 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -/** -* SECTION:mixvideoconfigparamsenc_preview -* @short_description: VideoConfig parameters -* -* A data object which stores videoconfig specific parameters. -*/ - -#include "mixvideolog.h" -#include "mixvideoconfigparamsenc_preview.h" - -#define MDEBUG - -MixVideoConfigParamsEncPreview::MixVideoConfigParamsEncPreview() { -} - -MixParams* MixVideoConfigParamsEncPreview::dup() const { - MixParams *ret = NULL; - MixVideoConfigParamsEncPreview *duplicate = new MixVideoConfigParamsEncPreview(); - if (TRUE == copy(duplicate)) { - ret = duplicate; - } else { - if (NULL != duplicate) - duplicate->Unref(); - } - return ret; -} - -MixVideoConfigParamsEncPreview * -mix_videoconfigparamsenc_preview_new (void) { - return new MixVideoConfigParamsEncPreview(); -} - -MixVideoConfigParamsEncPreview* -mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix) { - if (NULL != mix) - mix->Ref(); - return mix; - -} diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.h b/mix_video/src/mixvideoconfigparamsenc_preview.h deleted file mode 100644 index 9ea2a67..0000000 --- a/mix_video/src/mixvideoconfigparamsenc_preview.h +++ /dev/null @@ -1,73 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ -#define __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ - -#include "mixvideoconfigparamsenc.h" -#include "mixvideodef.h" - - -/** -* MIX_VIDEOCONFIGPARAMSENC_PREVIEW: -* @obj: object to be type-casted. -*/ -#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (reinterpret_cast(obj)) - -/** -* MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW: -* @obj: an object. -* -* Checks if the given object is an instance of #MixVideoConfigParamsEncPreview -*/ -#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (NULL != MIX_VIDEOCONFIGPARAMSENC_PREVIEW(obj)) - - - -/** -* MixVideoConfigParamsEncPreview: -* -* MI-X VideoConfig Parameter object -*/ -class MixVideoConfigParamsEncPreview : public MixVideoConfigParamsEnc { -public: - MixVideoConfigParamsEncPreview(); - virtual MixParams* dup() const; -}; - - - -/** -* mix_videoconfigparamsenc_preview_new: -* @returns: A newly allocated instance of #MixVideoConfigParamsEncPreview -* -* Use this method to create new instance of #MixVideoConfigParamsEncPreview -*/ -MixVideoConfigParamsEncPreview* mix_videoconfigparamsenc_preview_new (void); -/** -* mix_videoconfigparamsenc_preview_ref: -* @mix: object to add reference -* @returns: the MixVideoConfigParamsEncPreview instance where reference count has been increased. -* -* Add reference count. -*/ -MixVideoConfigParamsEncPreview* mix_videoconfigparamsenc_preview_ref ( - MixVideoConfigParamsEncPreview * mix); - -/** -* mix_videoconfigparamsenc_preview_unref: -* @obj: object to unref. -* -* Decrement reference count of the object. -*/ -#define mix_videoconfigparamsenc_preview_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - - - -#endif /* __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ */ - diff --git a/mix_video/src/mixvideodecodeparams.cpp b/mix_video/src/mixvideodecodeparams.cpp deleted file mode 100644 index a6979f8..0000000 --- a/mix_video/src/mixvideodecodeparams.cpp +++ /dev/null @@ -1,141 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideodecodeparams - * @short_description: MI-X Video Decode Paramters - * - * The #MixVideoDecodeParams object will be created by the MMF/App - * and provided to MixVideo in the MixVideo mix_video_decode() function. - */ - -#include "mixvideodecodeparams.h" - -#define MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - - -MixVideoDecodeParams::MixVideoDecodeParams() - :timestamp(0) - ,discontinuity(FALSE) - ,new_sequence(FALSE) - ,complete_frame(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} - -MixVideoDecodeParams::~MixVideoDecodeParams() { -} - -bool MixVideoDecodeParams::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoDecodeParams * this_target = MIX_VIDEODECODEPARAMS(target); - if (NULL != this_target) { - // chain up base class - ret = MixParams::copy(target); - } - return ret; -} - -bool MixVideoDecodeParams::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoDecodeParams * this_obj = MIX_VIDEODECODEPARAMS(obj); - if (NULL != this_obj) - ret = MixParams::equal(this_obj); - return ret; -} - -MixParams* MixVideoDecodeParams::dup() const { - MixParams *ret = new MixVideoDecodeParams(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -MixVideoDecodeParams * mix_videodecodeparams_new(void) { - return new MixVideoDecodeParams(); -} - -MixVideoDecodeParams * -mix_videodecodeparams_ref(MixVideoDecodeParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - -/* TODO: Add getters and setters for properties. */ - -MIX_RESULT mix_videodecodeparams_set_timestamp( - MixVideoDecodeParams * obj, uint64 timestamp) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_get_timestamp( - MixVideoDecodeParams * obj, uint64 * timestamp) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_set_discontinuity( - MixVideoDecodeParams * obj, bool discontinuity) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_get_discontinuity( - MixVideoDecodeParams * obj, bool *discontinuity) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_set_new_sequence( - MixVideoDecodeParams * obj, bool new_sequence) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->new_sequence = new_sequence; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_get_new_sequence( - MixVideoDecodeParams * obj, bool *new_sequence) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, new_sequence); - *new_sequence = obj->new_sequence; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_set_complete_frame(MixVideoDecodeParams * obj, - bool complete_frame) { - MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->complete_frame = complete_frame; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videodecodeparams_get_complete_frame(MixVideoDecodeParams * obj, - bool *complete_frame) { - MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, complete_frame); - *complete_frame = obj->complete_frame; - return MIX_RESULT_SUCCESS; -} - - diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h deleted file mode 100644 index aa5e799..0000000 --- a/mix_video/src/mixvideodecodeparams.h +++ /dev/null @@ -1,195 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEODECODEPARAMS_H__ -#define __MIX_VIDEODECODEPARAMS_H__ - -#include -#include "mixvideodef.h" - -/** - * MIX_VIDEODECODEPARAMS: - * @obj: object to be type-casted. - */ -#define MIX_VIDEODECODEPARAMS(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEODECODEPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_VIDEODECODEPARAMS(obj) ((NULL != MIX_VIDEODECODEPARAMS(obj)) ? TRUE : FALSE) - - -/** - * MixVideoDecodeParams: - * - * MI-X VideoDecode Parameter object - */ -class MixVideoDecodeParams : public MixParams { -public: - MixVideoDecodeParams(); - ~MixVideoDecodeParams(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -public: - /*< public > */ - /* TODO: Add properties */ - - /* Presentation timestamp for the video - * frame data, in milliseconds */ - uint64 timestamp; - - /* Indicates a discontinuity in the stream */ - bool discontinuity; - - /* output only, indicate if stream contains a new sequence */ - bool new_sequence; - - /* Indicates a complete frame */ - bool complete_frame; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - -/** - * mix_videodecodeparams_new: - * @returns: A newly allocated instance of #MixVideoDecodeParams - * - * Use this method to create new instance of #MixVideoDecodeParams - */ -MixVideoDecodeParams *mix_videodecodeparams_new(void); -/** - * mix_videodecodeparams_ref: - * @mix: object to add reference - * @returns: the #MixVideoDecodeParams instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoDecodeParams *mix_videodecodeparams_ref(MixVideoDecodeParams * mix); - -/** - * mix_videodecodeparams_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videodecodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for properties */ - - -/** - * mix_videodecodeparams_set_timestamp: - * @obj: #MixVideoDecodeParams object - * @timestamp: Presentation timestamp for the video frame data, in milliseconds - * @returns: Common Video Error Return Codes - * - * Set Presentation timestamp - */ -MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj, - uint64 timestamp); - -/** - * mix_videodecodeparams_get_timestamp: - * @obj: #MixVideoDecodeParams object - * @timestamp: Presentation timestamp for the video frame data, in milliseconds to be returned. - * @returns: Common Video Error Return Codes - * - * Get Presentation timestamp - */ -MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj, - uint64 * timestamp); - -/** - * mix_videodecodeparams_set_discontinuity: - * @obj: #MixVideoDecodeParams object - * @discontinuity: Flag to indicate a discontinuity in the stream. - * @returns: Common Video Error Return Codes - * - * Set discontinuity flag - */ -MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj, - bool discontinuity); - - -/** - * mix_videodecodeparams_get_discontinuity: - * @obj: #MixVideoDecodeParams object - * @discontinuity: Discontinuity flag to be returned - * @returns: Common Video Error Return Codes - * - * Get discontinuity flag - */ -MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj, - bool *discontinuity); - - -/** - * mix_videodecodeparams_set_new_sequence: - * @obj: #MixVideoDecodeParams object - * @new_sequence: Flag to indicate if stream contains a new sequence. - * @returns: Common Video Error Return Codes - * - * Set new_sequence flag - */ -MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj, - bool new_sequence); - - -/** - * mix_videodecodeparams_get_new_sequence: - * @obj: #MixVideoDecodeParams object - * @new_sequence: new_sequence flag to be returned - * @returns: Common Video Error Return Codes - * - * Get new_sequence flag - */ -MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj, - bool *new_sequence); - -/** - * mix_videodecodeparams_get_complete_frame: - * @obj: #MixVideoDecodeParams object - * @complete_frame: complete_frame flag to be returned - * @returns: Common Video Error Return Codes - * - * Get complete_frame flag - */ -MIX_RESULT mix_videodecodeparams_get_complete_frame(MixVideoDecodeParams * obj, - bool *complete_frame); - - -/** - * mix_videodecodeparams_set_complete_frame: - * @obj: #MixVideoDecodeParams object - * @complete_frame: Flag to indicate if frame is a complete frame or not - * @returns: Common Video Error Return Codes - * - * Set new_sequence flag - */ -MIX_RESULT mix_videodecodeparams_set_complete_frame(MixVideoDecodeParams * obj, - bool complete_frame); - - - -#endif /* __MIX_VIDEODECODEPARAMS_H__ */ diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h deleted file mode 100644 index 464bc14..0000000 --- a/mix_video/src/mixvideodef.h +++ /dev/null @@ -1,259 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideodef - * @title: MI-X Video Data Definitons And Common Error Code - * @short_description: MI-X Video data definitons and common error code - * @include: mixvideodef.h - * - * The section includes the definition of enum and struct as well as - * - * Common Video Error Return Codes of MI-X video functions - * - * #MIX_RESULT_SUCCESS, Successfully resumed - * MIX_RESULT_NULL_PTR, The pointer passed to the function was null. - * MIX_RESULT_NO_MEMORY, Memory needed for the operation could not be allocated. - * MIX_RESULT_INVALID_PARAM, An argument passed to the function was invalid. - * MIX_RESULT_NOT_INIT, MixVideo object has not been initialized yet. - * MIX_RESULT_NOT_CONFIGURED, MixVideo object has not been configured yet. - * MIX_RESULT_FAIL, For any failure. - * - * - */ - -#ifndef __MIX_VIDEO_DEF_H__ -#define __MIX_VIDEO_DEF_H__ - -#include - - -#define MAX_ENC_SURFACE_COUNT 20 -#define MIX_STRING_TO_FOURCC(format) ((uint32)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24))) - -/* - * MI-X video error code - */ -typedef enum { - MIX_RESULT_FRAME_NOTAVAIL = MIX_RESULT_ERROR_VIDEO_START + 1, - MIX_RESULT_EOS, - MIX_RESULT_POOLEMPTY, - MIX_RESULT_OUTOFSURFACES, - MIX_RESULT_DROPFRAME, - MIX_RESULT_NOTIMPL, - MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW, - MIX_RESULT_NOT_PERMITTED, - MIX_RESULT_ERROR_PROCESS_STREAM, - MIX_RESULT_MISSING_CONFIG, - MIX_RESULT_MISSING_IDR, - MIX_RESULT_VIDEO_LAST -} MIX_VIDEO_ERROR_CODE; - -/* - MixCodecMode - */ -typedef enum { - MIX_CODEC_MODE_ENCODE = 0, - MIX_CODEC_MODE_DECODE, - MIX_CODEC_MODE_LAST -} MixCodecMode; - -typedef enum { - MIX_FRAMEORDER_MODE_DISPLAYORDER = 0, - MIX_FRAMEORDER_MODE_DECODEORDER, - MIX_FRAMEORDER_MODE_LAST -} MixFrameOrderMode; - -typedef struct _MixIOVec { - uchar *data; - int buffer_size; - int data_size; - bool is_key_frame; -} MixIOVec; - -typedef struct _MixRect { - short x; - short y; - ushort width; - ushort height; -} MixRect; - -typedef enum { - MIX_STATE_UNINITIALIZED = 0, - MIX_STATE_INITIALIZED, - MIX_STATE_CONFIGURED, - MIX_STATE_LAST -} MixState; - - -typedef enum -{ - MIX_RAW_TARGET_FORMAT_NONE = 0, - MIX_RAW_TARGET_FORMAT_YUV420 = 1, - MIX_RAW_TARGET_FORMAT_YUV422 = 2, - MIX_RAW_TARGET_FORMAT_YUV444 = 4, - MIX_RAW_TARGET_FORMAT_NV12 = 8, - MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000, - MIX_RAW_TARGET_FORMAT_LAST -} MixRawTargetFormat; - - -typedef enum -{ - MIX_ENCODE_TARGET_FORMAT_MPEG4 = 0, - MIX_ENCODE_TARGET_FORMAT_H263 = 2, - MIX_ENCODE_TARGET_FORMAT_H264 = 4, - MIX_ENCODE_TARGET_FORMAT_PREVIEW = 8, - MIX_ENCODE_TARGET_FORMAT_LAST -} MixEncodeTargetFormat; - - -typedef enum -{ - MIX_RATE_CONTROL_NONE = 1, - MIX_RATE_CONTROL_CBR = 2, - MIX_RATE_CONTROL_VBR = 4, - MIX_RATE_CONTROL_VCM = 8, - MIX_RATE_CONTROL_LAST -} MixRateControl; - -typedef enum -{ - MIX_PROFILE_MPEG2SIMPLE = 0, - MIX_PROFILE_MPEG2MAIN, - MIX_PROFILE_MPEG4SIMPLE, - MIX_PROFILE_MPEG4ADVANCEDSIMPLE, - MIX_PROFILE_MPEG4MAIN, - MIX_PROFILE_H264BASELINE, - MIX_PROFILE_H264MAIN, - MIX_PROFILE_H264HIGH, - MIX_PROFILE_VC1SIMPLE, - MIX_PROFILE_VC1MAIN, - MIX_PROFILE_VC1ADVANCED, - MIX_PROFILE_H263BASELINE -} MixProfile; - -typedef enum -{ - MIX_DELIMITER_LENGTHPREFIX = 0, - MIX_DELIMITER_ANNEXB -} MixDelimiterType; - -typedef enum { - MIX_VIDEO_NONIR, - MIX_VIDEO_CIR, /*Cyclic intra refresh*/ - MIX_VIDEO_AIR, /*Adaptive intra refresh*/ - MIX_VIDEO_BOTH, - MIX_VIDEO_LAST -} MixVideoIntraRefreshType; - -typedef struct _MixAIRParams -{ - uint air_MBs; - uint air_threshold; - uint air_auto; -} MixAIRParams; - -typedef enum { - MIX_ENC_PARAMS_START_UNUSED = 0x01000000, - MIX_ENC_PARAMS_BITRATE, - MIX_ENC_PARAMS_INIT_QP, - MIX_ENC_PARAMS_MIN_QP, - MIX_ENC_PARAMS_WINDOW_SIZE, - MIX_ENC_PARAMS_TARGET_PERCENTAGE, - MIX_ENC_PARAMS_SLICE_NUM, - MIX_ENC_PARAMS_I_SLICE_NUM, - MIX_ENC_PARAMS_P_SLICE_NUM, - MIX_ENC_PARAMS_RESOLUTION, - MIX_ENC_PARAMS_GOP_SIZE, - MIX_ENC_PARAMS_FRAME_RATE, - MIX_ENC_PARAMS_FORCE_KEY_FRAME, - MIX_ENC_PARAMS_IDR_INTERVAL, - MIX_ENC_PARAMS_RC_MODE, - MIX_ENC_PARAMS_MTU_SLICE_SIZE, - MIX_ENC_PARAMS_REFRESH_TYPE, - MIX_ENC_PARAMS_AIR, - MIX_ENC_PARAMS_CIR_FRAME_CNT, - MIX_ENC_PARAMS_LAST -} MixEncParamsType; - -typedef struct _MixEncDynamicParams { - uint bitrate; - uint init_QP; - uint min_QP; - uint window_size; - uint target_percentage; - uint slice_num; - uint I_slice_num; - uint P_slice_num; - uint width; - uint height; - uint frame_rate_num; - uint frame_rate_denom; - uint intra_period; - uint idr_interval; - uint CIR_frame_cnt; - uint max_slice_size; - bool force_idr; - MixRateControl rc_mode; - MixVideoIntraRefreshType refresh_type; - MixAIRParams air_params; -} MixEncDynamicParams; - - - -typedef enum -{ - MIX_BUFFER_ALLOC_NORMAL = 1, //Means non shared buffer mode - MIX_BUFFER_UPSTREAM_ALLOC_CI = 2, - MIX_BUFFER_UPSTREAM_ALLOC_V4L2 = 4, - MIX_BUFFER_UPSTREAM_ALLOC_SURFACE = 8, - MIX_BUFFER_SELF_ALLOC_SURFACE = 16, - MIX_BUFFER_LAST -} MixBufferAllocationMode; - -typedef enum -{ - MIX_OUTPUT_BUFFER_NORMAL = 0, //Output whatever driver generates - MIX_OUTPUT_BUFFER_SEPARATED_HEADER = 1, //Applications need to pass at least two buffers, one for header and the other for data. And once this output mode is used, we will generate a new header - MIX_OUTPUT_BUFFER_LAST -} MixOutputEncBufferMode; - -typedef struct _MixCISharedBufferInfo { - uint ci_frame_cnt; - ulong * ci_frame_id; -} MixCISharedBufferInfo ; - -typedef struct _MixV4l2SharedBufferInfo { - int v4l2_fd; - uint v4l2_buf_cnt; - void * v4l2_fmt; - void ** v4l2_buf; -} MixV4l2SharedBufferInfo; - -typedef struct _MixSurfaceSharedBufferInfo { - uint surface_cnt; - ulong *surface; -} MixSurfaceSharedBufferInfo; - -/* - * This union is defined for upstreamer buffer info Set/Get - */ -typedef union _MixSharedBufferInfo { - MixCISharedBufferInfo ci_buffer_info; - MixV4l2SharedBufferInfo v4l2_buffer_info; - MixSurfaceSharedBufferInfo surface_buffer_info; -} MixSharedBufferInfo; - - -typedef struct _MixUserReqSurfacesInfo { - uint surface_allocated[MAX_ENC_SURFACE_COUNT]; - uint8 * usrptr[MAX_ENC_SURFACE_COUNT]; - uint surface_cnt; -} MixUsrReqSurfacesInfo; -#endif /* __MIX_VIDEO_DEF_H__ */ diff --git a/mix_video/src/mixvideoencodeparams.cpp b/mix_video/src/mixvideoencodeparams.cpp deleted file mode 100644 index 6c8a32a..0000000 --- a/mix_video/src/mixvideoencodeparams.cpp +++ /dev/null @@ -1,138 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoencodeparams - * @short_description: MI-X Video Encode Parameters - * - * The #MixVideoEncodeParams object will be created by - * the MMF/App and provided to #MixVideo in the #MixVideo - * mix_video_encode() function. Get methods for the - * properties will be available for the caller to - * retrieve configuration information. Currently this - * object is reserved for future use. - */ - -#include "mixvideoencodeparams.h" - -#define MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \ - - -MixVideoEncodeParams::MixVideoEncodeParams() - :timestamp(0) - ,discontinuity(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} - -MixVideoEncodeParams::~MixVideoEncodeParams() { -} - - -MixVideoEncodeParams * -mix_videoencodeparams_new(void) { - return new MixVideoEncodeParams(); -} - - -MixVideoEncodeParams * -mix_videoencodeparams_ref(MixVideoEncodeParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -/** - * dup: - * @returns: a newly allocated duplicate of the object. - * - * Copy duplicate of the object. - */ -MixParams *MixVideoEncodeParams::dup() const { - MixParams *ret = new MixVideoEncodeParams(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -/** - * copy: - * @target: copy to target - * @returns: boolean indicates if copy is successful. - * - * Copy instance data from @src to @target. - */ -bool MixVideoEncodeParams::copy(MixParams * target) const { - bool ret = FALSE; - MixVideoEncodeParams *this_target = MIX_VIDEOENCODEPARAMS(target); - if (NULL!= this_target) { - // chain up base class - ret = MixParams::copy(this_target); - } - return ret; -} - -/** - * equal: - * @obj: the object to compare - * @returns: boolean indicates if instance are equal. - * - * Copy instance data from @src to @target. - */ -bool MixVideoEncodeParams::equal(MixParams * obj) const { - bool ret = FALSE; - MixVideoEncodeParams * this_obj = MIX_VIDEOENCODEPARAMS(obj); - if (NULL != this_obj) - ret = MixParams::equal(this_obj); - return ret; -} - - - -/* TODO: Add getters and setters for properties. */ - -MIX_RESULT mix_videoencodeparams_set_timestamp( - MixVideoEncodeParams * obj, uint64 timestamp) { - MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoencodeparams_get_timestamp( - MixVideoEncodeParams * obj, uint64 * timestamp) { - MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoencodeparams_set_discontinuity( - MixVideoEncodeParams * obj, bool discontinuity) { - MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoencodeparams_get_discontinuity( - MixVideoEncodeParams * obj, bool *discontinuity) { - MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoencodeparams.h b/mix_video/src/mixvideoencodeparams.h deleted file mode 100644 index f2de76c..0000000 --- a/mix_video/src/mixvideoencodeparams.h +++ /dev/null @@ -1,103 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOENCODEPARAMS_H__ -#define __MIX_VIDEOENCODEPARAMS_H__ - -#include -#include "mixvideodef.h" - - -/** - * MIX_VIDEOENCODEPARAMS: - * @obj: object to be type-casted. - */ -#define MIX_VIDEOENCODEPARAMS(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEOENCODEPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_VIDEOENCODEPARAMS(obj) ((NULL !=MIX_VIDEOENCODEPARAMS(obj)) ? TRUE : FALSE) - - -/** - * MixVideoEncodeParams: - * - * MI-X VideoDecode Parameter object - */ -class MixVideoEncodeParams :public MixParams { -public: - MixVideoEncodeParams(); - virtual ~MixVideoEncodeParams(); - virtual bool copy(MixParams * target) const; - virtual MixParams *dup() const; - virtual bool equal(MixParams * obj) const; - -public: - /* TODO: Add properties */ - /* < private > */ - uint64 timestamp; - bool discontinuity; - - /* < public > */ - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - - -/** - * mix_videoencodeparams_new: - * @returns: A newly allocated instance of #MixVideoEncodeParams - * - * Use this method to create new instance of #MixVideoEncodeParams - */ -MixVideoEncodeParams *mix_videoencodeparams_new(void); -/** - * mix_videoencodeparams_ref: - * @mix: object to add reference - * @returns: the MixVideoEncodeParams instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoEncodeParams *mix_videoencodeparams_ref(MixVideoEncodeParams * mix); - -/** - * mix_videoencodeparams_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videoencodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/* TODO: Add getters and setters for properties */ -MIX_RESULT mix_videoencodeparams_set_timestamp( - MixVideoEncodeParams * obj, uint64 timestamp); -MIX_RESULT mix_videoencodeparams_get_timestamp( - MixVideoEncodeParams * obj, uint64 * timestamp); -MIX_RESULT mix_videoencodeparams_set_discontinuity( - MixVideoEncodeParams * obj, bool discontinuity); -MIX_RESULT mix_videoencodeparams_get_discontinuity( - MixVideoEncodeParams * obj, bool *discontinuity); - -#endif /* __MIX_VIDEOENCODEPARAMS_H__ */ - diff --git a/mix_video/src/mixvideoformat.cpp b/mix_video/src/mixvideoformat.cpp deleted file mode 100644 index a566694..0000000 --- a/mix_video/src/mixvideoformat.cpp +++ /dev/null @@ -1,285 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include "mixvideolog.h" - -#include "mixvideoformat.h" -#include -#include - -#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; } - -MixVideoFormat::MixVideoFormat() - :mLock() - ,initialized(FALSE) - ,va_initialized(FALSE) - ,framemgr(NULL) - ,surfacepool(NULL) - ,inputbufpool(NULL) - ,inputbufqueue(NULL) - ,va_display(NULL) - ,va_context(VA_INVALID_ID) - ,va_config(VA_INVALID_ID) - ,va_surfaces(NULL) - ,va_num_surfaces(0) - ,mime_type(NULL) - ,frame_rate_num(0) - ,frame_rate_denom(0) - ,picture_width(0) - ,picture_height(0) - ,parse_in_progress(FALSE) - ,current_timestamp((uint64)-1) - ,end_picture_pending(FALSE) - ,video_frame(NULL) - ,extra_surfaces(0) - ,config_params(NULL) - ,error_concealment(TRUE) - ,ref_count(1) -{ -} - -MixVideoFormat::~MixVideoFormat() { - /* clean up here. */ - VAStatus va_status; - MixInputBufferEntry *buf_entry = NULL; - - if (this->mime_type) { - free(this->mime_type); - } - - //MiVideo object calls the _deinitialize() for frame manager - MIXUNREF(this->framemgr, mix_framemanager_unref); - - if (this->surfacepool) { - mix_surfacepool_deinitialize(this->surfacepool); - MIXUNREF(this->surfacepool, mix_surfacepool_unref); - } - - if (this->config_params) { - mix_videoconfigparams_unref(this->config_params); - this->config_params = NULL; - } - - //libVA cleanup (vaTerminate is called from MixVideo object) - if (this->va_display) { - if (this->va_context != VA_INVALID_ID) { - va_status = vaDestroyContext(this->va_display, this->va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroyContext\n"); - } - this->va_context = VA_INVALID_ID; - } - if (this->va_config != VA_INVALID_ID) { - va_status = vaDestroyConfig(this->va_display, this->va_config); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroyConfig\n"); - } - this->va_config = VA_INVALID_ID; - } - if (this->va_surfaces) { - va_status = vaDestroySurfaces(this->va_display, this->va_surfaces, this->va_num_surfaces); - if (va_status != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaDestroySurfaces\n"); - } - free(this->va_surfaces); - this->va_surfaces = NULL; - this->va_num_surfaces = 0; - } - } - - if (this->video_frame) { - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - } - - //Deinit input buffer queue - while (!j_queue_is_empty(this->inputbufqueue)) { - buf_entry = reinterpret_cast(j_queue_pop_head(this->inputbufqueue)); - mix_buffer_unref(buf_entry->buf); - free(buf_entry); - } - - j_queue_free(this->inputbufqueue); - - //MixBuffer pool is deallocated in MixVideo object - this->inputbufpool = NULL; -} - -MIX_RESULT MixVideoFormat::GetCaps(char *msg) { - LOG_V("mix_videofmt_getcaps_default\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormat::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - LOG_V( "Begin\n"); - MIX_RESULT res = MIX_RESULT_SUCCESS; - MixInputBufferEntry *buf_entry = NULL; - - if (!config_params || !frame_mgr || !input_buf_pool || !surface_pool || !va_display) { - LOG_E( "NUll pointer passed in\n"); - return (MIX_RESULT_NULL_PTR); - } - - Lock(); - - //Clean up any previous framemgr - MIXUNREF(this->framemgr, mix_framemanager_unref); - this->framemgr = frame_mgr; - mix_framemanager_ref(this->framemgr); - if (this->config_params) { - mix_videoconfigparams_unref(this->config_params); - } - this->config_params = config_params; - mix_videoconfigparams_ref(reinterpret_cast(this->config_params)); - - this->va_display = va_display; - - //Clean up any previous mime_type - if (this->mime_type) { - free(this->mime_type); - this->mime_type = NULL; - } - - res = mix_videoconfigparamsdec_get_mime_type(config_params, &this->mime_type); - if (NULL == this->mime_type) { - res = MIX_RESULT_NO_MEMORY; - LOG_E( "Could not duplicate mime_type\n"); - goto cleanup; - }//else there is no mime_type; leave as NULL - - res = mix_videoconfigparamsdec_get_frame_rate(config_params, &(this->frame_rate_num), &(this->frame_rate_denom)); - if (res != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame_rate\n"); - goto cleanup; - } - res = mix_videoconfigparamsdec_get_picture_res(config_params, &(this->picture_width), &(this->picture_height)); - if (res != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting picture_res\n"); - goto cleanup; - } - - if (this->inputbufqueue) { - //Deinit previous input buffer queue - while (!j_queue_is_empty(this->inputbufqueue)) { - buf_entry = reinterpret_cast(j_queue_pop_head(this->inputbufqueue)); - mix_buffer_unref(buf_entry->buf); - free(buf_entry); - } - j_queue_free(this->inputbufqueue); - } - - //MixBuffer pool is cleaned up in MixVideo object - this->inputbufpool = NULL; - - this->inputbufpool = input_buf_pool; - this->inputbufqueue = j_queue_new(); - if (NULL == this->inputbufqueue) {//New failed - res = MIX_RESULT_NO_MEMORY; - LOG_E( "Could not duplicate mime_type\n"); - goto cleanup; - } - - // surface pool, VA context/config and parser handle are initialized by - // derived classes - - -cleanup: - if (res != MIX_RESULT_SUCCESS) { - MIXUNREF(this->framemgr, mix_framemanager_unref); - if (this->mime_type) { - free(this->mime_type); - this->mime_type = NULL; - } - Unlock(); - this->frame_rate_num = 0; - this->frame_rate_denom = 1; - this->picture_width = 0; - this->picture_height = 0; - } else {//Normal unlock - Unlock(); - } - - LOG_V( "End\n"); - - return res; - -} - -MIX_RESULT MixVideoFormat::Decode( - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params) { - return MIX_RESULT_SUCCESS; -} -MIX_RESULT MixVideoFormat::Flush() { - return MIX_RESULT_SUCCESS; -} -MIX_RESULT MixVideoFormat::EndOfStream() { - return MIX_RESULT_SUCCESS; -} -MIX_RESULT MixVideoFormat::Deinitialize() { - return MIX_RESULT_SUCCESS; -} - - -MixVideoFormat* mix_videoformat_unref(MixVideoFormat* mix) { - if (NULL != mix) - return mix->Unref(); - else - return NULL; -} - -MixVideoFormat * mix_videoformat_new(void) { - return new MixVideoFormat(); -} - -MixVideoFormat * mix_videoformat_ref(MixVideoFormat * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - - -/* mixvideoformat class methods implementation */ -MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, char *msg) { - return mix->GetCaps(msg); -} - -MIX_RESULT mix_videofmt_initialize( - MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - return mix->Initialize(config_params, frame_mgr, - input_buf_pool, surface_pool, va_display); -} - -MIX_RESULT mix_videofmt_decode( - MixVideoFormat *mix, MixBuffer * bufin[], - int bufincnt, MixVideoDecodeParams * decode_params) { - return mix->Decode(bufin, bufincnt, decode_params); -} - -MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix) { - return mix->Flush(); -} - -MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix) { - return mix->EndOfStream(); -} - -MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix) { - return mix->Deinitialize(); -} diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h deleted file mode 100644 index bfaa440..0000000 --- a/mix_video/src/mixvideoformat.h +++ /dev/null @@ -1,177 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMAT_H__ -#define __MIX_VIDEOFORMAT_H__ - -#include - - -extern "C" { -#include "vbp_loader.h" -}; - -#include "mixvideodef.h" -#include -#include "mixvideoconfigparamsdec.h" -#include "mixvideodecodeparams.h" -#include "mixvideoframe.h" -#include "mixframemanager.h" -#include "mixsurfacepool.h" -#include "mixbuffer.h" -#include "mixbufferpool.h" -#include "mixvideoformatqueue.h" -#include "mixvideothread.h" -#include - - -// Redefine the Handle defined in vbp_loader.h -#define VBPhandle Handle - -class MixVideoFormat; - -#define MIX_VIDEOFORMAT(obj) (dynamic_cast(obj)) -/* vmethods typedef */ - -typedef MIX_RESULT (*MixVideoFmtGetCapsFunc)(MixVideoFormat *mix, char *msg); -typedef MIX_RESULT (*MixVideoFmtInitializeFunc)(MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); -typedef MIX_RESULT (*MixVideoFmtDecodeFunc)(MixVideoFormat *mix, - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params); -typedef MIX_RESULT (*MixVideoFmtFlushFunc)(MixVideoFormat *mix); -typedef MIX_RESULT (*MixVideoFmtEndOfStreamFunc)(MixVideoFormat *mix); -typedef MIX_RESULT (*MixVideoFmtDeinitializeFunc)(MixVideoFormat *mix); - -class MixVideoFormat { - /*< public > */ -public: - MixVideoFormat(); - virtual ~MixVideoFormat(); - - - virtual MIX_RESULT GetCaps(char *msg); - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); - virtual MIX_RESULT Deinitialize(); - - - void Lock() { - mLock.lock(); - } - - void Unlock() { - mLock.unlock(); - } - - MixVideoFormat* Ref() { - ++ref_count; - return this; - } - MixVideoFormat* Unref() { - if (0 == (--ref_count)) { - delete this; - return NULL; - } else { - return this; - } - } - -public: - /*< private > */ - MixVideoMutex mLock; - bool initialized; - MixFrameManager *framemgr; - MixSurfacePool *surfacepool; - VADisplay va_display; - VAContextID va_context; - VAConfigID va_config; - VASurfaceID *va_surfaces; - uint va_num_surfaces; - VBPhandle parser_handle; - char *mime_type; - uint frame_rate_num; - uint frame_rate_denom; - uint picture_width; - uint picture_height; - bool parse_in_progress; - bool discontinuity_frame_in_progress; - uint64 current_timestamp; - MixBufferPool *inputbufpool; - JQueue *inputbufqueue; - bool va_initialized; - bool end_picture_pending; - MixVideoFrame* video_frame; - uint extra_surfaces; - bool error_concealment; - MixVideoConfigParamsDec * config_params; - int ref_count; -}; - - -/** - * mix_videoformat_new: - * @returns: A newly allocated instance of #MixVideoFormat - * - * Use this method to create new instance of #MixVideoFormat - */ -MixVideoFormat *mix_videoformat_new(void); - -/** - * mix_videoformat_ref: - * @mix: object to add reference - * @returns: the MixVideoFormat instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormat *mix_videoformat_ref(MixVideoFormat * mix); - -/** - * mix_videoformat_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormat* mix_videoformat_unref(MixVideoFormat* mix); - -/* Class Methods */ - -MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, char *msg); - -MIX_RESULT mix_videofmt_initialize( - MixVideoFormat *mix, - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - -MIX_RESULT mix_videofmt_decode( - MixVideoFormat *mix, MixBuffer * bufin[], - int bufincnt, MixVideoDecodeParams * decode_params); - -MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix); - -MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix); - -MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix); - -#endif /* __MIX_VIDEOFORMAT_H__ */ diff --git a/mix_video/src/mixvideoformat_h264.cpp b/mix_video/src/mixvideoformat_h264.cpp deleted file mode 100755 index 8aa87fe..0000000 --- a/mix_video/src/mixvideoformat_h264.cpp +++ /dev/null @@ -1,1598 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#ifndef ANDROID -#include -#endif - -#include "mixvideolog.h" -#include "mixvideoformat_h264.h" -#include "mixvideoconfigparamsdec_h264.h" - -#ifdef MIX_LOG_ENABLE -static int mix_video_h264_counter = 0; -#endif /* MIX_LOG_ENABLE */ - -#define HACK_DPB -#ifdef HACK_DPB -static inline MIX_RESULT mix_videofmt_h264_hack_dpb( MixVideoFormat *mix, vbp_picture_data_h264* pic_data); -#endif - - -// Local Help Funcs - - -/* The parent class. The pointer will be saved - * in this class's initialization. The pointer - * can be used for chaining method call if needed. - */ -MixVideoFormat_H264::MixVideoFormat_H264() - :dpb_surface_table(NULL) -#ifdef DECODER_ROBUSTNESS - ,last_decoded_frame(NULL) -#endif -{} - -MixVideoFormat_H264::~MixVideoFormat_H264() { - int32 pret = VBP_OK; - /* clean up here. */ - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - - if (this->dpb_surface_table) { - //Free the DPB surface table - //First remove all the entries (frames will be unrefed) - j_hash_table_remove_all(this->dpb_surface_table); - //Then unref the table - j_hash_table_unref(this->dpb_surface_table); - this->dpb_surface_table = NULL; - } - - Lock(); - this->initialized = TRUE; - this->parse_in_progress = FALSE; - - //Close the parser - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - if (pret != VBP_OK) { - LOG_E( "Error closing parser\n"); - } - Unlock(); -} - -MIX_RESULT MixVideoFormat_H264::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_H264; - vbp_data_h264 *data = NULL; - MixIOVec *header = NULL; - missing_idr = true; - - MixVideoConfigParamsDecH264 *config_params_h264 = NULL; - bool va_setup_flag = FALSE; - - if (config_params == NULL || frame_mgr == NULL || - input_buf_pool == NULL || va_display == NULL) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - // chain up parent method - MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, - surface_pool, va_display); - - /* Chainup parent method. */ - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - Lock(); - - this->surfacepool = mix_surfacepool_new(); - *surface_pool = this->surfacepool; - - if (NULL == this->surfacepool) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "parent->surfacepool == NULL.\n"); - goto CLEAN_UP; - } - - //Create our table of Decoded Picture Buffer "in use" surfaces - this->dpb_surface_table = j_hash_table_new_full(NULL, NULL, - mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value); - - if (NULL == this->dpb_surface_table) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating dbp surface table\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation( - config_params, &(this->extra_surfaces)); - - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_error_concealment( - config_params, - &this->error_concealment); - - if (ret != MIX_RESULT_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get error_concealment flag\n"); - goto CLEAN_UP; - } - - config_params_h264 = MIX_VIDEOCONFIGPARAMSDEC_H264(config_params); - ret = mix_videoconfigparamsenc_h264_get_va_setup_flag(config_params_h264, &va_setup_flag); - if (ret != MIX_RESULT_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Failed to get va_setup_flag\n"); - return ret; - } - - LOG_V("va_setup_flag = %d\n", va_setup_flag); - - LOG_V( "Before vbp_open\n"); - //Load the bitstream parser - pret = vbp_open(ptype, &(this->parser_handle)); - LOG_V( "After vbp_open\n"); - - if (VBP_OK != pret) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto CLEAN_UP; - } - LOG_V( "Opened parser\n"); - - if(va_setup_flag) { - LOG_V("calling to mix_videofmt_h264_initialize_va(mix, NULL)\n"); - ret = _initialize_va( NULL); - LOG_V("ret = 0x%x\n", ret); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( "Error initializing va. \n"); - } - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_header(config_params, &header); - - if ((MIX_RESULT_SUCCESS != ret) || (NULL == header)) { - // Delay initializing VA if codec configuration data is not ready, but don't return an error. - ret = MIX_RESULT_SUCCESS; - LOG_W( "Codec data is not available in the configuration parameter.\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)(this->parser_handle)); - - pret = vbp_parse(this->parser_handle, header->data, header->data_size, TRUE); - - if (VBP_OK != pret) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Parsed header\n"); - - //Get the header data and save - pret = vbp_query(this->parser_handle, (void **)&data); - - if (VBP_OK != pret) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Queried parser for header data\n"); - - _update_config_params(data); - - ret = _initialize_va(data); - if (MIX_RESULT_SUCCESS != ret) { - LOG_E( "Error initializing va. \n"); - goto CLEAN_UP; - } - -CLEAN_UP: - if (MIX_RESULT_SUCCESS != ret) { - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - this->initialized = FALSE; - } else { - this->initialized = TRUE; - } - if (NULL != header) { - if (NULL != header->data) - free(header->data); - free(header); - header = NULL; - } - LOG_V( "Unlocking\n"); - Unlock(); - return ret; - -} - -MIX_RESULT MixVideoFormat_H264::Decode( - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params) { - - int i = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - uint64 ts = 0; - bool discontinuity = FALSE; - - LOG_V( "Begin\n"); - - if (NULL == bufin || NULL == decode_params || 0 == bufincnt) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ -#if 0 - MixVideoFormat::Decode(bufin, bufincnt, decode_params); -#endif - - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } - - decode_params->new_sequence = FALSE; - - //From now on, we exit this function through cleanup: - LOG_V( "Locking\n"); - Lock(); - - LOG_V( "Starting current frame %d, timestamp %"UINT64_FORMAT"\n", mix_video_h264_counter++, ts); - - for (i = 0; i < bufincnt; i++) { - LOG_V( "Decoding a buf %x, size %d\n", (uint)bufin[i]->data, bufin[i]->size); - // decode a buffer at a time - ret = _decode_a_buffer( - bufin[i], - ts, - discontinuity, - decode_params, - (i == bufincnt-1 ? decode_params->complete_frame : 0)); - - if (MIX_RESULT_SUCCESS != ret) { - LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n"); - goto CLEAN_UP; - } - } - -CLEAN_UP: - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_H264::Flush() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - uint32 pret = 0; - /* Chainup parent method. - We are not chaining up to parent method for now. - */ -#if 0 - MixVideoFormat::Flush(); -#endif - Lock(); - // drop any decode-pending picture, and ignore return value - _decode_end(TRUE); - - //Clear parse_in_progress flag and current timestamp - this->parse_in_progress = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (uint64)-1; - - //Clear the DPB surface table - j_hash_table_remove_all(this->dpb_surface_table); - - //Call parser flush - pret = vbp_flush(this->parser_handle); - if (VBP_OK != pret) - ret = MIX_RESULT_FAIL; - - Unlock(); - LOG_V( "End\n"); - return ret; - -} - -MIX_RESULT MixVideoFormat_H264::EndOfStream() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - /* Chainup parent method. - We are not chaining up to parent method for now. - */ -#if 0 - if (parent_class->eos) { - return parent_class->eos(mix, msg); - } -#endif - Lock(); - // finished decoding the pending frame - _decode_end(FALSE); - Unlock(); - //Call Frame Manager with _eos() - ret = mix_framemanager_eos(this->framemgr); - mix_framemanager_set_dpb_size(this->framemgr, -1); - LOG_V( "End\n"); - return ret; -} - - -MixVideoFormat_H264 * mix_videoformat_h264_new(void) { - return new MixVideoFormat_H264(); -} - - -MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -MixVideoFormat_H264 *mix_videoformat_h264_unref(MixVideoFormat_H264 *mix) { - if (NULL != mix) - return MIX_VIDEOFORMAT_H264(mix->Unref()); - else - return mix; -} - -MIX_RESULT MixVideoFormat_H264::_update_config_params(vbp_data_h264 *data) { - if (0 == this->picture_width || 0 == this->picture_height || data->new_sps) { - this->picture_width = - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16; - this->picture_height = - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16; - - mix_videoconfigparamsdec_set_picture_res( - this->config_params, this->picture_width, this->picture_height); - } - - // video_range has default value of 0. - mix_videoconfigparamsdec_set_video_range(this->config_params, - data->codec_data->video_full_range_flag); - - uint8 color_matrix; - switch (data->codec_data->matrix_coefficients) { - case 1: - color_matrix = VA_SRC_BT709; - break; - // ITU-R Recommendation BT.470-6 System B, G (MP4), same as - // SMPTE 170M/BT601 - case 5: - case 6: - color_matrix = VA_SRC_BT601; - break; - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - this->config_params, - data->codec_data->sar_width, - data->codec_data->sar_height); - mix_videoconfigparamsdec_set_bit_rate( - this->config_params, data->codec_data->bit_rate); - - LOG_V("crop_left = %d crop_right = %d crop_top = %d crop_bottom = %d\n", - data->codec_data->crop_left, data->codec_data->crop_right, - data->codec_data->crop_top, data->codec_data->crop_bottom); - - mix_videoconfigparamsdec_set_cropping_info( - this->config_params, - data->codec_data->crop_left, - data->codec_data->crop_right, - data->codec_data->crop_top, - data->codec_data->crop_bottom); - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormat_H264::_initialize_va(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VAConfigAttrib attrib; - if (this->va_initialized) { - LOG_W("va already initialized.\n"); - return MIX_RESULT_SUCCESS; - } - LOG_V( "Begin\n"); - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - attrib.value = VA_RT_FORMAT_YUV420; - - //Initialize and save the VA config ID -#ifdef ANDROID - if((this->error_concealment == TRUE) && (data == NULL || (data != NULL && ((data->codec_data->profile_idc == 66) || (data->codec_data->constraint_set0_flag == 1)) && - (data->codec_data->constraint_set1_flag == 1)))) - { - //it is constrained baseline profile according to subclause A.2.1.1 in H.264 Spec v200903 - vret = vaCreateConfig( - this->va_display, - VAProfileH264ConstrainedBaseline, - VAEntrypointVLD, - &attrib, - 1, - &(this->va_config)); - } - else - { -#endif - //We use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline - vret = vaCreateConfig( - this->va_display, - VAProfileH264High, - VAEntrypointVLD, - &attrib, - 1, - &(this->va_config)); -#ifdef ANDROID - } -#endif - if (VA_STATUS_SUCCESS != vret) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E("vaCreateConfig failed\n"); - return ret; - } - -#ifdef MIX_LOG_ENABLE - if(data) { - LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames); - } -#endif - - // handle both frame and field coding for interlaced content - int num_ref_pictures = 0; - if(data) { - num_ref_pictures = data->codec_data->num_ref_frames; - } - - int pic_size; - int size = 3; - if (data) - { - pic_size = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * (data->codec_data->frame_mbs_only_flag?1:2) * 384; - - switch (data->codec_data->level_idc) - { - case 9: - size = 152064; - break; - case 10: - size = 152064; - break; - case 11: - size = 345600; - break; - case 12: - size = 912384; - break; - case 13: - size = 912384; - break; - case 20: - size = 912384; - break; - case 21: - size = 1824768; - break; - case 22: - size = 3110400; - break; - case 30: - size = 3110400; - break; - case 31: - size = 6912000; - break; - case 32: - size = 7864320; - break; - case 40: - size = 12582912; - break; - case 41: - size = 12582912; - break; - case 42: - size = 13369344; - break; - case 50: - size = 42393600; - break; - case 51: - size = 70778880; - break; - default: - //error ("undefined level", 500); - break; - } - - if (pic_size) - { - size /= pic_size; - if (size == 0) - { - size = 3; - } - else if (size > 16) - { - size = 15; - } - } - else - { - size = 3; - } - } - - mix_framemanager_set_dpb_size(this->framemgr, size); - - //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that - // will not start decoding until a new frame is received. - this->va_num_surfaces = 1 + 1 + this->extra_surfaces + (((size + 3) < - MIX_VIDEO_H264_SURFACE_NUM) ? - (size + 3) - : MIX_VIDEO_H264_SURFACE_NUM); - this->va_num_surfaces = this->va_num_surfaces > 24 ? 24 : this->va_num_surfaces; - - this->va_surfaces = - reinterpret_cast(malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); - if (NULL == this->va_surfaces) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "parent->va_surfaces == NULL. \n"); - return ret; - } - -#ifdef MIX_LOG_ENABLE - if(data) { - LOG_V( "Codec data says picture size is %d x %d\n", - (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, - (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16); - LOG_V( "getcaps says picture size is %d x %d\n", this->picture_width, this->picture_height); - } -#endif - - vret = vaCreateSurfaces( - this->va_display, - VA_RT_FORMAT_YUV420, - this->picture_width, - this->picture_height, - this->va_surfaces , - this->va_num_surfaces, NULL, 0); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - return ret; - } - - LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); - - //Initialize the surface pool - ret = mix_surfacepool_initialize( - this->surfacepool, - this->va_surfaces, - this->va_num_surfaces, - this->va_display); - - switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init surface pool\n"); - return ret; - break; - } -#if 0 // NOTE: We don't use the value in frame manager, comment out the following lines - - if (data->codec_data->pic_order_cnt_type == 0) { - int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4); - mix_framemanager_set_max_picture_number(this->framemgr, max); - } -#endif - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext( - this->va_display, - this->va_config, - this->picture_width, - this->picture_height, - 0, // no flag set - this->va_surfaces, - this->va_num_surfaces, - &(this->va_context)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - return ret; - } - - this->va_initialized = TRUE; - - return ret; -} - - -MIX_RESULT MixVideoFormat_H264::_handle_new_sequence(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("new sequence is received.\n"); - - // original picture resolution - uint32 width = this->picture_width; - uint32 height = this->picture_height; - - _update_config_params(data); - - if (width != this->picture_width || height != this->picture_height) { - // flush frame manager only if resolution is changed. - ret = mix_framemanager_flush(this->framemgr); - } - // TO DO: re-initialize VA - return ret; -} - - -MIX_RESULT MixVideoFormat_H264::_update_ref_pic_list( - VAPictureParameterBufferH264* picture_params, - VASliceParameterBufferH264* slice_params) { - //Do slice parameters - //First patch up the List0 and List1 surface IDs - uint32 j = 0; - uint poc = 0; - void* video_frame = NULL; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++) { - if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID)) { - poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList0[j])); - video_frame = j_hash_table_lookup(this->dpb_surface_table, (void*)poc); - if (video_frame == NULL) { - LOG_E("unable to find surface of picture %d (current picture %d).", - poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - return ret; - } else { - slice_params->RefPicList0[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - } - - if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6)) { - for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++) { - if (!(slice_params->RefPicList1[j].flags & VA_PICTURE_H264_INVALID)) { - poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList1[j])); - video_frame = j_hash_table_lookup(this->dpb_surface_table, (void*)poc); - if (video_frame == NULL) { - LOG_E("unable to find surface of picture %d (current picture %d).", - poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic))); - ret = MIX_RESULT_DROPFRAME; //return non-fatal error - return ret; - } else { - slice_params->RefPicList1[j].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } - } - } - } - return ret; -} - -MIX_RESULT MixVideoFormat_H264::_decode_a_slice( - vbp_data_h264 *data, int picture_index, int slice_index) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - uint buffer_id_cnt = 0; - - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID buffer_ids[4]; - - LOG_V( "Begin\n"); - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - vbp_picture_data_h264* pic_data = &(data->pic_data[picture_index]); - vbp_slice_data_h264* slice_data = &(pic_data->slc_data[slice_index]); - VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; - VASliceParameterBufferH264* slice_params = &(slice_data->slc_parms); - vadisplay = this->va_display; - vacontext = this->va_context; - -#ifdef DECODER_ROBUSTNESS - if ((slice_params->first_mb_in_slice == 0) || (!this->end_picture_pending)) -#else - if (slice_params->first_mb_in_slice == 0) -#endif - { - // this is the first slice of the picture - if (this->end_picture_pending) { - // interlace content, decoding the first field - vret = vaEndPicture(vadisplay, vacontext); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("vaEndPicture failed.\n"); - LOG_V( "End\n"); - return ret; - } - // for interlace content, top field may be valid only after the second field is parsed - mix_videoframe_set_displayorder(this->video_frame, pic_params->CurrPic.TopFieldOrderCnt); - } - - ulong surface = 0; - LOG_V("mix->video_frame = 0x%x\n", (unsigned)this->video_frame); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(this->video_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting surface ID from frame object\n"); - LOG_V( "End\n"); - return ret; - } - -#ifdef DECODER_ROBUSTNESS - LOG_V( "Updating DPB for libva\n"); - //Now handle the reference frames and surface IDs for DPB and current frame - _handle_ref_frames(pic_params, this->video_frame); -#ifdef HACK_DPB - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - ret = mix_videofmt_h264_hack_dpb(this, pic_data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error reference frame not found\n"); - //Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it - _cleanup_ref_frame(pic_params, this->video_frame); - LOG_V( "End\n"); - return ret; - } -#endif - LOG_V( "Calling vaBeginPicture\n"); - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - LOG_V( "End\n"); - return ret; - } - // vaBeginPicture needs a matching vaEndPicture - this->end_picture_pending = TRUE; -#else - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - LOG_V( "End\n"); - return ret; - } - // vaBeginPicture needs a matching vaEndPicture - this->end_picture_pending = TRUE; - LOG_V( "Updating DPB for libva\n"); - //Now handle the reference frames and surface IDs for DPB and current frame - _handle_ref_frames(pic_params, this->video_frame); - -#ifdef HACK_DPB - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - ret = mix_videofmt_h264_hack_dpb(this, pic_data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error reference frame not found\n"); - LOG_V( "End\n"); - return ret; - } -#endif - -#endif - //Libva buffer set up - LOG_V( "Creating libva picture parameter buffer\n"); - LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames); - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferH264), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; - } - - buffer_id_cnt++; - LOG_V( "Creating libva IQMatrix buffer\n"); - - //Then the IQ matrix buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferH264), - 1, - data->IQ_matrix_buf, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; - } - buffer_id_cnt++; - } - -#ifndef DECODER_ROBUSTNESS - if (!this->end_picture_pending) { - LOG_E("first slice is lost??????????\n"); - ret = MIX_RESULT_DROPFRAME; - LOG_V( "End\n"); - return ret; - - } -#endif - - //Now for slices - - ret = _update_ref_pic_list(pic_params, slice_params); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videofmt_h264_update_ref_pic_list failed.\n"); - LOG_V( "End\n"); - return ret; - } - - LOG_V( "Creating libva slice parameter buffer\n"); - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), - 1, - slice_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; - } - - buffer_id_cnt++; - - //Do slice data - - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferH264 - - LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", - (uint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size); - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - slice_data->slice_size, //size - 1, //num_elements - slice_data->buffer_addr + slice_data->slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - LOG_V( "End\n"); - return ret; - - } - - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - LOG_V( "End\n"); - return ret; - } - - LOG_V( "End\n"); - return ret; - -} - -MIX_RESULT MixVideoFormat_H264::_decode_end(bool drop_picture) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - -#ifdef DECODER_ROBUSTNESS - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); -#else - //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); -#endif - - LOG_V("Begin\n"); - if (!this->end_picture_pending) { - if (this->video_frame) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame is not unreferenced.\n"); - } - goto CLEAN_UP; - } - - if (this->video_frame == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame has been unreferenced.\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling vaEndPicture\n"); - vret = vaEndPicture(this->va_display, this->va_context); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto CLEAN_UP; - } - -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - - LOG_V( "Calling vaSyncSurface\n"); - - //Decode the picture - vret = vaSyncSurface(parent->va_display, parent->video_frame->frame_id); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - CLEAN_UP; - } -#endif - - if (drop_picture) { - // we are asked to drop this decoded picture - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - goto CLEAN_UP; - } - - LOG_V( "Enqueueing the frame with frame manager, timestamp %"UINT64_FORMAT"\n", - this->video_frame->timestamp); - -#ifdef DECODER_ROBUSTNESS - if (this->last_decoded_frame) - mix_videoframe_unref(this->last_decoded_frame); - this->last_decoded_frame = this->video_frame; - mix_videoframe_ref(this->last_decoded_frame); -#endif - - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error enqueuing frame object\n"); - goto CLEAN_UP; - } else { - // video frame is passed to frame manager - this->video_frame = NULL; - LOG_V("video_frame is assigned to be NULL !\n"); - } - -CLEAN_UP: - if (this->video_frame) { - /* this always indicates an error */ - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - } - this->end_picture_pending = FALSE; - LOG_V("End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_H264::_decode_continue(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - uint32 i, j; - vbp_picture_data_h264* pic_data = NULL; - LOG_V("Begin\n"); - for (i = 0; i < data->num_pictures; i++) { - pic_data = &(data->pic_data[i]); - if (pic_data->pic_parms == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->pic_parms is NULL.\n"); - LOG_V("End\n"); - return ret; - } - - if (pic_data->slc_data == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->slc_data is NULL.\n"); - LOG_V("End\n"); - return ret; - } - - if (pic_data->num_slices == 0) { - ret = MIX_RESULT_FAIL; - LOG_E("pic_data->num_slices == 0.\n"); - LOG_V("End\n"); - return ret; - } - - LOG_V( "num_slices is %d\n", pic_data->num_slices); - for (j = 0; j < pic_data->num_slices; j++) { - LOG_V( "Decoding slice %d\n", j); - ret = _decode_a_slice(data, i, j); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "mix_videofmt_h264_decode_a_slice failed, error = %#X.", ret); - LOG_V("End\n"); - return ret; - } - } - } - - LOG_V("End\n"); - return ret; -} - - - -MIX_RESULT MixVideoFormat_H264::_set_frame_type(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - //Set the picture type (I, B or P frame) - //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type) - MixFrameType frame_type = TYPE_INVALID; - switch (data->pic_data[0].slc_data[0].slc_parms.slice_type) { - case 0: - case 3: - case 5: - case 8: - frame_type = TYPE_P; - break; - case 1: - case 6: - frame_type = TYPE_B; - break; - case 2: - case 4: - case 7: - case 9: - frame_type = TYPE_I; - break; - default: - break; - } - - //Do not have to check for B frames after a seek - //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise - // DPB will not be correct and frames may come in with invalid references - // This will be detected when DPB is checked for valid mapped surfaces and - // error returned from there. - - LOG_V( "frame type is %d\n", frame_type); - - //Set the frame type for the frame object (used in reordering by frame manager) - ret = mix_videoframe_set_frame_type(this->video_frame, frame_type); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error setting frame type on frame\n"); - } - return ret; -} - - -MIX_RESULT MixVideoFormat_H264::_set_frame_structure( - vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) { - mix_videoframe_set_frame_structure(this->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); - } else { - mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); - } - return ret; -} - - -MIX_RESULT MixVideoFormat_H264::_decode_begin(vbp_data_h264 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - //Get a frame from the surface pool - LOG_V("Begin\n"); - ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame from surfacepool\n"); - return ret; - } - - /* the following calls will always succeed */ - // set frame type - ret = _set_frame_type(data); - // set frame structure - ret = _set_frame_structure(data); - //Set the discontinuity flag - mix_videoframe_set_discontinuity(this->video_frame, this->discontinuity_frame_in_progress); - //Set the timestamp - mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); - // Set displayorder - ret = mix_videoframe_set_displayorder(this->video_frame, - data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Error setting displayorder\n"); - return ret; - } - ret = _decode_continue(data); - LOG_V("End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_H264::_decode_a_buffer( - MixBuffer * bufin, - uint64 ts, - bool discontinuity, - MixVideoDecodeParams * decode_params, - bool complete_frame) { - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_h264 *data = NULL; - - LOG_V( "Begin\n"); - LOG_V( "Calling parse for current frame, parse handle %d\n", (int)this->parser_handle); - pret = vbp_parse(this->parser_handle, - bufin->data, - bufin->size, - FALSE); - - LOG_V( "Called parse for current frame\n"); - if (pret != VBP_OK) { - ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME; - LOG_E( "vbp_parse failed.\n"); - LOG_V("End\n"); - return ret; - } - - //query for data - pret = vbp_query(this->parser_handle, (void**)&data); - - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "vbp_query failed.\n"); - LOG_V("End\n"); - return ret; - - } - LOG_V( "Called query for current frame\n"); - - if (data->has_sps == 0 || data->has_pps == 0) { - ret = MIX_RESULT_MISSING_CONFIG; // MIX_RESULT_SUCCESS; - LOG_V("SPS or PPS is not available.\n"); - LOG_V("End\n"); - return ret; - - } - - if (data->new_sps) { - decode_params->new_sequence = data->new_sps; - missing_idr = true; - - ret = _handle_new_sequence(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_video_h264_handle_new_sequence failed.\n"); - LOG_V("End\n"); - return ret; - - } - } - - if (this->va_initialized == FALSE) { - _update_config_params(data); - - LOG_V("try initializing VA...\n"); - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_initialize_va failed.\n"); - LOG_V("End\n"); - return ret; - } - } - - // first pic_data always exists, check if any slice is parsed - if (data->pic_data[0].num_slices == 0) { - ret = MIX_RESULT_SUCCESS; - LOG_V("slice is not available.\n"); - LOG_V("End\n"); - return ret; - } - - // Check if first slice is IDR (5) - if (data->pic_data->slc_data->nal_unit_type == 5) { - missing_idr = false; - LOG_V("Received IDR.\n"); - } - else if (missing_idr) { - LOG_V("Missing IDR.\n"); - LOG_V("End\n"); - return MIX_RESULT_MISSING_IDR; - } - - uint64 last_ts = this->current_timestamp; - this->current_timestamp = ts; - this->discontinuity_frame_in_progress = discontinuity; - - LOG_V("ts = %lli last_ts = %lli\n", ts, last_ts); - - if (last_ts != ts) { - // finish decoding the last frame - ret = _decode_end(FALSE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_decode_end failed.\n"); - LOG_V("End\n"); - return ret; - } - - // start decoding a new frame - ret = _decode_begin(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_decode_begin failed.\n"); - LOG_V("End\n"); - return ret; - } - } else { - // parital frame - LOG_V("partial frame handling...\n"); - ret = _decode_continue(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_h264_decode_continue failed.\n"); - LOG_V("End\n"); - return ret; - } - } - if (complete_frame) - { - // finish decoding current frame - ret = _decode_end(FALSE); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_h264_decode_end failed.\n"); - return ret; - } - } - - LOG_V("End\n"); - return ret; -} - - - -#ifdef HACK_DPB -static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, - vbp_picture_data_h264* pic_data - ) -{ - - bool found = FALSE; - uint tflags = 0; - VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms; - VAPictureH264 *pRefList = NULL; - uint32 i = 0, j = 0, k = 0, list = 0; - - MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix); - - //Set the surface ID for everything in the parser DPB to INVALID - for (i = 0; i < 16; i++) - { - pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE; - pic_params->ReferenceFrames[i].frame_idx = -1; - pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1; - pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1; - pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags - } - - pic_params->num_ref_frames = 0; - - for (i = 0; i < pic_data->num_slices; i++) - { - - //Copy from the List0 and List1 surface IDs - pRefList = pic_data->slc_data[i].slc_parms.RefPicList0; - for (list = 0; list < 2; list++) - { - for (j = 0; j < 32; j++) - { - if (pRefList[j].flags & VA_PICTURE_H264_INVALID) - { - break; //no more valid reference frames in this list - } - found = FALSE; - for (k = 0; k < pic_params->num_ref_frames; k++) - { - if (pic_params->ReferenceFrames[k].TopFieldOrderCnt == pRefList[j].TopFieldOrderCnt) - { - ///check for complementary field - tflags = pic_params->ReferenceFrames[k].flags | pRefList[j].flags; - //If both TOP and BOTTOM are set, we'll clear those flags - if ((tflags & VA_PICTURE_H264_TOP_FIELD) && - (tflags & VA_PICTURE_H264_TOP_FIELD)) - pic_params->ReferenceFrames[k].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; - found = TRUE; //already in the DPB; will not add this one - break; - } - } - if (!found) - { - uint poc = mix_videofmt_h264_get_poc(&(pRefList[j])); - void* video_frame = j_hash_table_lookup(self->dpb_surface_table, (void*)poc); - -#ifdef DECODER_ROBUSTNESS - if (!video_frame) - { - if (!self->last_decoded_frame) - { - //No saved reference frame, can't recover this one - return MIX_RESULT_DROPFRAME; - } - - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)self->last_decoded_frame)->frame_id; - LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); - - } - else - { - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; - } -#else - if (!video_frame) return MIX_RESULT_DROPFRAME; //return non-fatal error - - pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = - ((MixVideoFrame *)video_frame)->frame_id; -#endif - - LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id); - - pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = - pRefList[j].flags; - pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx = - pRefList[j].frame_idx; - pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt = - pRefList[j].TopFieldOrderCnt; - pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt = - pRefList[j].BottomFieldOrderCnt; - } - - } - pRefList = pic_data->slc_data[i].slc_parms.RefPicList1; - } - - } - return MIX_RESULT_SUCCESS; -} -#endif - -MIX_RESULT MixVideoFormat_H264::_handle_ref_frames( - VAPictureParameterBufferH264* pic_params, - MixVideoFrame * current_frame) { - - uint poc = 0; - LOG_V( "Begin\n"); - - if (current_frame == NULL || pic_params == NULL) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", - pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, - pic_params->CurrPic.BottomFieldOrderCnt, (int) current_frame->frame_id); - -#ifdef MIX_LOG_ENABLE - if (pic_params->CurrPic.flags & VA_PICTURE_H264_INVALID) - LOG_V( "Flags show VA_PICTURE_H264_INVALID\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD) - LOG_V( "Flags show VA_PICTURE_H264_TOP_FIELD\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD) - LOG_V( "Flags show VA_PICTURE_H264_BOTTOM_FIELD\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) - LOG_V( "Flags show VA_PICTURE_H264_SHORT_TERM_REFERENCE\n"); - - if (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE) - LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n"); -#endif - - //First we need to check the parser DBP against our DPB table - //So for each item in our DBP table, we look to see if it is in the parser DPB - //If it is not, it gets unrefed and removed -#ifdef MIX_LOG_ENABLE - uint num_removed = -#endif - j_hash_table_foreach_remove(this->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params); - - LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed); - - - MixVideoFrame *mvf = NULL; - //bool found = FALSE; - //Set the surface ID for everything in the parser DPB - int i = 0; - for (; i < 16; i++) { - if (!(pic_params->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID)) { - poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i])); - LOG_V( "Looking up poc %d in dpb table\n", poc); - //found = j_hash_table_lookup_extended(this->dpb_surface_table, (void*)poc, NULL, (void**)&mvf); - mvf = (MixVideoFrame*)j_hash_table_lookup( this->dpb_surface_table, (void *)poc); - if (NULL != mvf) { - pic_params->ReferenceFrames[i].picture_id = mvf->frame_id; - LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (int)mvf->frame_id); - } else { - LOG_V( "Looking up poc %d in dpb table did not find value\n", poc); - } - LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", - poc, i, (int)pic_params->ReferenceFrames[i].picture_id); - } - } - //Set picture_id for current picture - pic_params->CurrPic.picture_id = current_frame->frame_id; - - //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || - (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { - //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); - //Increment the reference count for this frame - mix_videoframe_ref(current_frame); - LOG_V( "Inserting poc %d, surfaceID %d\n", poc, (int)current_frame->frame_id); - //Add this frame to the DPB surface table - j_hash_table_insert(this->dpb_surface_table, (void*)poc, current_frame); - } - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormat_H264::_cleanup_ref_frame( - VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame) { - - uint poc = 0; - LOG_V( "Begin\n"); - - if (current_frame == NULL || pic_params == NULL) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", - pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, - pic_params->CurrPic.BottomFieldOrderCnt, (int) current_frame->frame_id); - - //Check to see if current frame is a reference frame - if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || - (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { - //Get current frame's POC - poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic)); - //We don't need to decrement the ref count for the video frame here; it's done elsewhere - LOG_V( "Removing poc %d, surfaceID %d\n", poc, (int)current_frame->frame_id); - //Remove this frame from the DPB surface table - j_hash_table_remove(this->dpb_surface_table, (void*)poc); - } - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - -uint mix_videofmt_h264_get_poc(VAPictureH264 *pic) { - if (pic == NULL) - return 0; - - if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) - return pic->BottomFieldOrderCnt; - - if (pic->flags & VA_PICTURE_H264_TOP_FIELD) - return pic->TopFieldOrderCnt; - - return pic->TopFieldOrderCnt; - -} - - -int mix_videofmt_h264_check_in_DPB( - void* key, void* value, void* user_data) { - int ret = (!0); - if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key - return 0; - - VAPictureH264* vaPic = NULL; - int i = 0; - for (; i < 16; i++) - { - vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]); - if (vaPic->flags & VA_PICTURE_H264_INVALID) - continue; - - if ((uint)key == vaPic->TopFieldOrderCnt || - (uint)key == vaPic->BottomFieldOrderCnt) - { - ret = 0; - break; - } - } - return ret; -} - - -void mix_videofmt_h264_destroy_DPB_key(void* data) -{ - //TODO remove this method and don't register it with the hash table foreach call; it is no longer needed - LOG_V( "Begin, poc of %d\n", (uint)data); - LOG_V( "End\n"); - return; -} - -void mix_videofmt_h264_destroy_DPB_value(void* data) -{ - LOG_V( "Begin\n"); - if (data != NULL) { - mix_videoframe_unref((MixVideoFrame *)data); - } - LOG_V( "End\n"); - return; -} - diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h deleted file mode 100755 index a7d6479..0000000 --- a/mix_video/src/mixvideoformat_h264.h +++ /dev/null @@ -1,111 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMAT_H264_H__ -#define __MIX_VIDEOFORMAT_H264_H__ - -#include "mixvideoformat.h" -#include "mixvideoframe_private.h" -#include - -#define DECODER_ROBUSTNESS - - -#define MIX_VIDEO_H264_SURFACE_NUM 20 - - -#define MIX_VIDEOFORMAT_H264(obj) (reinterpret_cast(obj)) -#define MIX_IS_VIDEOFORMAT_H264(obj) (NULL != MIX_VIDEOFORMAT_H264(obj)) - - - - -class MixVideoFormat_H264 : public MixVideoFormat { -public: - MixVideoFormat_H264(); - virtual ~MixVideoFormat_H264(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); - -private: - // Local Help Func - MIX_RESULT _update_config_params(vbp_data_h264 *data); - MIX_RESULT _initialize_va(vbp_data_h264 *data); - MIX_RESULT _decode_a_buffer(MixBuffer * bufin, uint64 ts, - bool discontinuity, MixVideoDecodeParams * decode_params,bool complete_frame); - MIX_RESULT _decode_end(bool drop_picture); - MIX_RESULT _handle_new_sequence(vbp_data_h264 *data); - MIX_RESULT _decode_begin(vbp_data_h264 *data); - MIX_RESULT _decode_continue(vbp_data_h264 *data); - MIX_RESULT _set_frame_type(vbp_data_h264 *data); - MIX_RESULT _set_frame_structure(vbp_data_h264 *data); - MIX_RESULT _update_ref_pic_list(VAPictureParameterBufferH264* picture_params, - VASliceParameterBufferH264* slice_params); - MIX_RESULT _decode_a_slice(vbp_data_h264 *data, - int picture_index, int slice_index); - MIX_RESULT _cleanup_ref_frame( - VAPictureParameterBufferH264* pic_params, MixVideoFrame * current_frame); - MIX_RESULT _handle_ref_frames( - VAPictureParameterBufferH264* pic_params, - MixVideoFrame * current_frame); - -public: - /*< public > */ - /*< private > */ - JHashTable *dpb_surface_table; -#ifdef DECODER_ROBUSTNESS - //Can improve which frame is used for this at a later time - MixVideoFrame *last_decoded_frame; //last surface decoded, to be used as reference frame when reference frames are missing -#endif - bool missing_idr; -}; - - -/** - * mix_videoformat_h264_new: - * @returns: A newly allocated instance of #MixVideoFormat_H264 - * - * Use this method to create new instance of #MixVideoFormat_H264 - */ -MixVideoFormat_H264 *mix_videoformat_h264_new(void); - -/** - * mix_videoformat_h264_ref: - * @mix: object to add reference - * @returns: the MixVideoFormat_H264 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix); - -/** - * mix_videoformat_h264_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormat_H264* mix_videoformat_h264_unref(MixVideoFormat_H264 *mix); - - -/* Helper functions to manage the DPB table */ -int mix_videofmt_h264_check_in_DPB(void* key, void* value, void* user_data); -void mix_videofmt_h264_destroy_DPB_key(void* data); -void mix_videofmt_h264_destroy_DPB_value(void* data); -uint mix_videofmt_h264_get_poc(VAPictureH264 *pic); - -#endif /* __MIX_VIDEOFORMAT_H264_H__ */ diff --git a/mix_video/src/mixvideoformat_mp42.cpp b/mix_video/src/mixvideoformat_mp42.cpp deleted file mode 100644 index 8a90e30..0000000 --- a/mix_video/src/mixvideoformat_mp42.cpp +++ /dev/null @@ -1,1090 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#include "mixvideolog.h" -#include "mixvideoformat_mp42.h" - -// Value of VOP type defined here follows MP4 spec, and has the same value of corresponding frame type -// defined in enumeration MixFrameType (except sprite (S)) -enum { - MP4_VOP_TYPE_I = 0, - MP4_VOP_TYPE_P = 1, - MP4_VOP_TYPE_B = 2, - MP4_VOP_TYPE_S = 3, -}; - -MixVideoFormat_MP42::MixVideoFormat_MP42() - :last_frame(NULL) - ,last_vop_coding_type(-1) - ,last_vop_time_increment(0) - ,next_nvop_for_PB_frame(FALSE) - ,iq_matrix_buf_sent(FALSE) { - this->reference_frames[0] = NULL; - this->reference_frames[1] = NULL; -} - -MixVideoFormat_MP42::~MixVideoFormat_MP42() { - /* clean up here. */ - int32 vbp_ret = VBP_OK; - LOG_V("Begin\n"); - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - - Lock(); - - /* unref reference frames */ - for (uint32 idx = 0; idx < 2; idx++) { - if (this->reference_frames[idx] != NULL) { - mix_videoframe_unref(this->reference_frames[idx]); - this->reference_frames[idx] = NULL; - } - } - if (this->last_frame) { - mix_videoframe_unref(this->last_frame); - this->last_frame = NULL; - } - this->next_nvop_for_PB_frame = FALSE; - this->iq_matrix_buf_sent = FALSE; - - /* Reset state */ - this->initialized = TRUE; - this->end_picture_pending = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (uint64)-1; - - /* Close the parser */ - if (this->parser_handle) { - vbp_ret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - } - - Unlock(); - LOG_V("End\n"); -} - - -MixVideoFormat_MP42 *mix_videoformat_mp42_new(void) { - return new MixVideoFormat_MP42; -} - -MixVideoFormat_MP42 * mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -MixVideoFormat_MP42 *mix_videoformat_mp42_unref(MixVideoFormat_MP42 * mix) { - if (NULL != mix) - return MIX_VIDEOFORMAT_MP42(mix->Unref()); - else - return mix; -} - -MIX_RESULT MixVideoFormat_MP42::_update_config_params( - vbp_data_mp42 *data) { - if (this->picture_width == 0 || - this->picture_height == 0 || - this->picture_width < data->codec_data.video_object_layer_width || - this->picture_height < data->codec_data.video_object_layer_height) { - this->picture_width = data->codec_data.video_object_layer_width; - this->picture_height = data->codec_data.video_object_layer_height; - mix_videoconfigparamsdec_set_picture_res( - this->config_params, this->picture_width, this->picture_height); - } - // video_range has default value of 0. Y ranges from 16 to 235. - mix_videoconfigparamsdec_set_video_range(this->config_params, data->codec_data.video_range); - uint8 color_matrix; - switch (data->codec_data.matrix_coefficients) { - case 1: - color_matrix = VA_SRC_BT709; - break; - // ITU-R Recommendation BT.470-6 System B, G (MP4), same as - // SMPTE 170M/BT601 - case 5: - case 6: - color_matrix = VA_SRC_BT601; - break; - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); - - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - this->config_params, data->codec_data.par_width, data->codec_data.par_height); - - mix_videoconfigparamsdec_set_bit_rate( - this->config_params, - data->codec_data.bit_rate); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormat_MP42::_initialize_va(vbp_data_mp42 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VAConfigAttrib attrib; - VAProfile va_profile; - LOG_V( "Begin\n"); - if (this->va_initialized) { - LOG_W("va already initialized.\n"); - return MIX_RESULT_SUCCESS; - } - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - attrib.value = VA_RT_FORMAT_YUV420; - - //Initialize and save the VA config ID - if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) { - va_profile = VAProfileMPEG4AdvancedSimple; - } else { - va_profile = VAProfileMPEG4Simple; - } - vret = vaCreateConfig( - this->va_display, - va_profile, - VAEntrypointVLD, - &attrib, - 1, - &(this->va_config)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("vaCreateConfig failed\n"); - goto CLEAN_UP; - } - - // add 1 more surface for packed frame (PB frame), and another one - // for partial frame handling - this->va_num_surfaces = this->extra_surfaces + 4 + 1 + 1; - //if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) - // parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM; - - this->va_surfaces = reinterpret_cast(malloc(sizeof(VASurfaceID)*this->va_num_surfaces)); - if (this->va_surfaces == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E( "parent->va_surfaces == NULL. \n"); - goto CLEAN_UP; - } - - vret = vaCreateSurfaces( - this->va_display, - VA_RT_FORMAT_YUV420, - this->picture_width, - this->picture_height, - this->va_surfaces , - this->va_num_surfaces, - NULL, 0); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto CLEAN_UP; - } - - LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); - - //Initialize the surface pool - ret = mix_surfacepool_initialize( - this->surfacepool, - this->va_surfaces, - this->va_num_surfaces, - this->va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init surface pool\n"); - goto CLEAN_UP; - break; - } - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext( - this->va_display, - this->va_config, - this->picture_width, - this->picture_height, - 0, - this->va_surfaces, - this->va_num_surfaces, - &(this->va_context)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto CLEAN_UP; - } - this->va_initialized = TRUE; - -CLEAN_UP: - return ret; -} - -MIX_RESULT MixVideoFormat_MP42::_decode_a_slice( - vbp_data_mp42* data, vbp_picture_data_mp42* pic_data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - uint buffer_id_cnt = 0; - int frame_type = -1; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID buffer_ids[4]; - VAPictureParameterBufferMPEG4* pic_params = &(pic_data->picture_param); - vbp_slice_data_mp42* slice_data = &(pic_data->slice_data); - VASliceParameterBufferMPEG4* slice_params = &(slice_data->slice_param); - - LOG_V( "Begin\n"); - - vadisplay = this->va_display; - vacontext = this->va_context; - - if (!this->end_picture_pending) { - LOG_E("picture decoder is not started!\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - // update reference pictures - frame_type = pic_params->vop_fields.bits.vop_coding_type; - - switch (frame_type) { - case MP4_VOP_TYPE_I: - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - break; - - case MP4_VOP_TYPE_P: - pic_params-> forward_reference_picture - = this->reference_frames[0]->frame_id; - pic_params-> backward_reference_picture = VA_INVALID_SURFACE; - break; - - case MP4_VOP_TYPE_B: - pic_params->vop_fields.bits.backward_reference_vop_coding_type - = this->last_vop_coding_type; - pic_params->forward_reference_picture - = this->reference_frames[1]->frame_id; - pic_params->backward_reference_picture - = this->reference_frames[0]->frame_id; - break; - - case MP4_VOP_TYPE_S: - pic_params-> forward_reference_picture - = this->reference_frames[0]->frame_id; - pic_params-> backward_reference_picture = VA_INVALID_SURFACE; - break; - - default: - LOG_W("default, Will never reach here\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - break; - } - - //Now for slices - - LOG_V( "Creating libva picture parameter buffer\n"); - - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferMPEG4), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - buffer_id_cnt++; - - if (pic_params->vol_fields.bits.quant_type && - this->iq_matrix_buf_sent == FALSE) { - LOG_V( "Creating libva IQMatrix buffer\n"); - // only send IQ matrix for the first slice in the picture - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferMPEG4), - 1, - &(data->iq_matrix_buffer), - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - this->iq_matrix_buf_sent = TRUE; - buffer_id_cnt++; - } - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferMPEG4), - 1, - slice_params, - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - buffer_id_cnt++; - - - //Do slice data - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferMP42 - - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - slice_data->slice_size, //size - 1, //num_elements - slice_data->buffer_addr + slice_data->slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto CLEAN_UP; - } - -CLEAN_UP: - LOG_V( "End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_MP42::_decode_end(bool drop_picture) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - - if (!this->end_picture_pending) { - if (this->video_frame) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame is not unreferenced.\n"); - } - goto CLEAN_UP; - } - - if (this->video_frame == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E("Unexpected: video_frame has been unreferenced.\n"); - goto CLEAN_UP; - } - - vret = vaEndPicture(this->va_display, this->va_context); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto CLEAN_UP; - } - -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - - LOG_V( "Calling vaSyncSurface\n"); - - //Decode the picture - vret = vaSyncSurface(vadisplay, surface); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - CLEAN_UP; - } -#endif - - if (drop_picture) { - // we are asked to drop this decoded picture - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - goto CLEAN_UP; - } - - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, this->video_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error enqueuing frame object\n"); - goto CLEAN_UP; - } else { - // video frame is passed to frame manager - this->video_frame = NULL; - } - -CLEAN_UP: - if (this->video_frame) { - /* this always indicates an error */ - mix_videoframe_unref(this->video_frame); - this->video_frame = NULL; - } - this->end_picture_pending = FALSE; - return ret; -} - -MIX_RESULT MixVideoFormat_MP42::_decode_continue(vbp_data_mp42 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - uint32 i; - int frame_type = -1; - vbp_picture_data_mp42* pic_data = NULL; - VAPictureParameterBufferMPEG4* pic_params = NULL; - /* - Packed Frame Assumption: - - 1. In one packed frame, there's only one P or I frame and only one B frame. - 2. In packed frame, there's no skipped frame (vop_coded = 0) - 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately). - 4. N-VOP frame is the frame with vop_coded = 0. - 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame - - - I, P, {P, B}, B, N, P, N, I, ... - I, P, {P, B}, N, P, N, I, ... - - The first N is placeholder for P frame in the packed frame - The second N is a skipped frame - */ - - pic_data = data->picture_data; - for (i = 0; i < data->number_picture_data; i++, pic_data = pic_data->next_picture_data) { - pic_params = &(pic_data->picture_param); - frame_type = pic_params->vop_fields.bits.vop_coding_type; - if (frame_type == MP4_VOP_TYPE_S && - pic_params->no_of_sprite_warping_points > 1) { - // hardware only support up to one warping point (stationary or translation) - LOG_E("sprite with %d warping points is not supported by HW.\n", - pic_params->no_of_sprite_warping_points); - return MIX_RESULT_DROPFRAME; - } - - if (pic_data->vop_coded == 0) { - // this should never happen - LOG_E("VOP is not coded.\n"); - return MIX_RESULT_DROPFRAME; - } - - if (pic_data->new_picture_flag == 1 || - this->end_picture_pending == FALSE) { - if (pic_data->new_picture_flag == 0) { - LOG_W("First slice of picture is lost!\n"); - } - - ulong surface = 0; - if (this->end_picture_pending) - { - // this indicates the start of a new frame in the packed frame - LOG_V("packed frame is found.\n"); - - // Update timestamp for packed frame as timestamp is for the B frame! - if (this->video_frame && pic_params->vop_time_increment_resolution) { - uint64 ts, ts_inc; - mix_videoframe_get_timestamp(this->video_frame, &ts); - ts_inc= this->last_vop_time_increment - pic_data->vop_time_increment + - pic_params->vop_time_increment_resolution; - ts_inc = ts_inc % pic_params->vop_time_increment_resolution; - LOG_V("timestamp is incremented by %"UINT64_FORMAT" at %d resolution.\n", - ts_inc, pic_params->vop_time_increment_resolution); - // convert to macrosecond, timestamp takes microsecond as basic unit. - ts_inc = ts_inc * 1e6 / pic_params->vop_time_increment_resolution; - LOG_V("timestamp of P frame in packed frame is updated from %"UINT64_FORMAT" to %"UINT64_FORMAT".\n", - ts, ts + ts_inc); - ts += ts_inc; - mix_videoframe_set_timestamp(this->video_frame, ts); - } - - _decode_end(FALSE); - this->next_nvop_for_PB_frame = TRUE; - } - - if (this->next_nvop_for_PB_frame == TRUE && - frame_type != MP4_VOP_TYPE_B) { - LOG_E("The second frame in the packed frame is not B frame.\n"); - this->next_nvop_for_PB_frame = FALSE; - return MIX_RESULT_DROPFRAME; - } - - //Get a frame from the surface pool - ret = mix_surfacepool_get(this->surfacepool, &(this->video_frame)); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame from surfacepool\n"); - return MIX_RESULT_FAIL; - } - - /* the following calls will always succeed */ - - // set frame type - if (frame_type == MP4_VOP_TYPE_S) { - // sprite is treated as P frame in the display order - mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)MP4_VOP_TYPE_P); - } else { - mix_videoframe_set_frame_type(this->video_frame, (MixFrameType)frame_type); - } - - // set frame structure - if (pic_data->picture_param.vol_fields.bits.interlaced) { - // only MPEG-4 studio profile can have field coding. All other profiles - // use frame coding only, i.e, no field VOP. (see vop_structure in MP4 spec) - mix_videoframe_set_frame_structure( - this->video_frame, - VA_BOTTOM_FIELD | VA_TOP_FIELD); - LOG_W("Interlaced content, set frame structure to 3 (TOP | BOTTOM field) !\n"); - } else { - mix_videoframe_set_frame_structure(this->video_frame, VA_FRAME_PICTURE); - } - - //Set the discontinuity flag - mix_videoframe_set_discontinuity( - this->video_frame, - this->discontinuity_frame_in_progress); - - //Set the timestamp - mix_videoframe_set_timestamp(this->video_frame, this->current_timestamp); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(this->video_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting surface ID from frame object\n"); - goto CLEAN_UP; - } - - /* If I or P frame, update the reference array */ - if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) { - LOG_V("Updating forward/backward references for libva\n"); - this->last_vop_coding_type = frame_type; - this->last_vop_time_increment = pic_data->vop_time_increment; - _handle_ref_frames((_picture_type)frame_type, this->video_frame); - if (this->last_frame != NULL) { - mix_videoframe_unref(this->last_frame); - } - this->last_frame = this->video_frame; - mix_videoframe_ref(this->last_frame); - } - - //Now we can begin the picture - vret = vaBeginPicture(this->va_display, this->va_context, surface); - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto CLEAN_UP; - } - - // vaBeginPicture needs a matching vaEndPicture - this->end_picture_pending = TRUE; - this->iq_matrix_buf_sent = FALSE; - } - - - ret = _decode_a_slice(data, pic_data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "_decode_a_slice failed, error = %#X.", ret); - goto CLEAN_UP; - } - } - -CLEAN_UP: - return ret; -} - - - -MIX_RESULT MixVideoFormat_MP42::_decode_begin(vbp_data_mp42* data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - int frame_type = -1; - VAPictureParameterBufferMPEG4* pic_params = NULL; - vbp_picture_data_mp42 *pic_data = NULL; - pic_data = data->picture_data; - pic_params = &(pic_data->picture_param); - frame_type = pic_params->vop_fields.bits.vop_coding_type; - - if (this->next_nvop_for_PB_frame) { - // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type - // of this frame must be B. - // for example: {PB} B N P B B P... - if (pic_data->vop_coded == 1 && frame_type != MP4_VOP_TYPE_B) { - LOG_E("Invalid coding type while waiting for n-vop for packed frame.\n"); - // timestamp of P frame in the queue is not correct - mix_framemanager_flush(this->framemgr); - this->next_nvop_for_PB_frame = FALSE; - } - } - - if (pic_data->vop_coded == 0) { - if (this->last_frame == NULL) { - LOG_E("The forward reference frame is NULL, couldn't reconstruct skipped frame.\n"); - mix_framemanager_flush(this->framemgr); - this->next_nvop_for_PB_frame = FALSE; - return MIX_RESULT_DROPFRAME; - } - - if (this->next_nvop_for_PB_frame) { - // P frame is already in queue, just need to update time stamp. - mix_videoframe_set_timestamp(this->last_frame, this->current_timestamp); - this->next_nvop_for_PB_frame = FALSE; - } else { - // handle skipped frame - MixVideoFrame *skip_frame = NULL; - ulong frame_id = VA_INVALID_SURFACE; - - skip_frame = mix_videoframe_new(); - ret = mix_videoframe_set_is_skipped(skip_frame, TRUE); - ret = mix_videoframe_get_frame_id(this->last_frame, &frame_id); - ret = mix_videoframe_set_frame_id(skip_frame, frame_id); - ret = mix_videoframe_set_frame_type(skip_frame, (MixFrameType)MP4_VOP_TYPE_P); - ret = mix_videoframe_set_real_frame(skip_frame, this->last_frame); - // add a reference as skip_frame holds the last_frame. - mix_videoframe_ref(this->last_frame); - ret = mix_videoframe_set_timestamp(skip_frame, this->current_timestamp); - ret = mix_videoframe_set_discontinuity(skip_frame, FALSE); - - LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"UINT64_FORMAT"\n", - (uint)skip_frame, (uint)frame_id, this->current_timestamp); - - /* Enqueue the skipped frame using frame manager */ - ret = mix_framemanager_enqueue(this->framemgr, skip_frame); - } - - if (data->number_picture_data > 1) { - LOG_E("Unexpected to have more picture data following a not-coded VOP.\n"); - //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for - // coded picture, a frame is lost. - } - return MIX_RESULT_SUCCESS; - } else { - /* - * Check for B frames after a seek - * We need to have both reference frames in hand before we can decode a B frame - * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME - */ - if (frame_type == MP4_VOP_TYPE_B) { - if (this->reference_frames[0] == NULL || - this->reference_frames[1] == NULL) { - LOG_W("Insufficient reference frames for B frame\n"); - return MIX_RESULT_DROPFRAME; - } - } else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S) { -#if 0 - /* - * For special clips using P frame (special P frame with all MB intra coded) as key frame - * Need to skip the reference check to enable the seek - */ - if (this->reference_frames[0] == NULL) { - LOG_W("Reference frames for P/S frame is missing\n"); - return MIX_RESULT_DROPFRAME; - } -#endif - } - // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue - ret = _decode_continue(data); - } - return ret; -} - -MIX_RESULT MixVideoFormat_MP42::_decode_a_buffer( - MixBuffer * bufin, uint64 ts, bool discontinuity,bool complete_frame) { - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - vbp_data_mp42 *data = NULL; - uint64 last_ts = 0; - - LOG_V( "Begin\n"); - pret = vbp_parse(this->parser_handle, - bufin->data, - bufin->size, - FALSE); - - if (pret != VBP_OK) { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "vbp_parse failed.\n"); - goto CLEAN_UP; - } - else { - LOG_V("vbp_parse succeeded.\n"); - } - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - - if ((pret != VBP_OK) || (data == NULL)) { - // never happen! - ret = MIX_RESULT_FAIL; - LOG_E( "vbp_query failed.\n"); - goto CLEAN_UP; - } else { - LOG_V("vbp_query succeeded.\n"); - } - - if (this->va_initialized == FALSE) { - _update_config_params(data); - LOG_V("try initializing VA...\n"); - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_initialize_va failed.\n"); - goto CLEAN_UP; - } - } - - // check if any slice is parsed, we may just receive configuration data - if (data->number_picture_data == 0) { - ret = MIX_RESULT_SUCCESS; - LOG_V("slice is not available.\n"); - goto CLEAN_UP; - } - - last_ts = this->current_timestamp; - this->current_timestamp = ts; - this->discontinuity_frame_in_progress = discontinuity; - - if (last_ts != ts) { - // finish decoding the last frame - ret = _decode_end(FALSE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_decode_end failed.\n"); - goto CLEAN_UP; - } - - // start decoding a new frame - ret = _decode_begin(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_decode_begin failed.\n"); - goto CLEAN_UP; - } - } else { - ret = _decode_continue(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_mp42_decode_continue failed.\n"); - goto CLEAN_UP; - } - } - if (complete_frame) - { - // finish decoding current frame - ret = _decode_end(FALSE); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V("mix_videofmt_mp42_decode_end failed.\n"); - goto CLEAN_UP; - } - } - -CLEAN_UP: - LOG_V( "End\n"); - return ret; -} - - -MIX_RESULT MixVideoFormat_MP42::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display ) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_MPEG4; - vbp_data_mp42 *data = NULL; - MixIOVec *header = NULL; - - if (config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - // chain up parent method - MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, - surface_pool, va_display); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error initializing\n"); - return ret; - } - - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - Lock(); - - this->surfacepool = mix_surfacepool_new(); - *surface_pool = this->surfacepool; - - if (this->surfacepool == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "parent->surfacepool == NULL.\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &this->extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto CLEAN_UP; - } - - //Load the bitstream parser - pret = vbp_open(ptype, &(this->parser_handle)); - - if (!(pret == VBP_OK)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto CLEAN_UP; - } - LOG_V( "Opened parser\n"); - - - ret = mix_videoconfigparamsdec_get_header(config_params, &header); - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { - // Delay initializing VA if codec configuration data is not ready, but don't return an error. - ret = MIX_RESULT_SUCCESS; - LOG_W( "Codec data is not available in the configuration parameter.\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); - - pret = vbp_parse(this->parser_handle, header->data, - header->data_size, TRUE); - - if (pret != VBP_OK) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Parsed header\n"); - - //Get the header data and save - pret = vbp_query(this->parser_handle, (void **)&data); - - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto CLEAN_UP; - } - - LOG_V( "Queried parser for header data\n"); - - _update_config_params(data); - - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error initializing va. \n"); - goto CLEAN_UP; - } - -CLEAN_UP: - if (ret != MIX_RESULT_SUCCESS) { - if (this->parser_handle) { - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - } - this->initialized = FALSE; - } else { - this->initialized = TRUE; - } - if (header != NULL) { - if (header->data != NULL) - free(header->data); - free(header); - header = NULL; - } - LOG_V( "Unlocking\n"); - Unlock(); - return ret; -} - -MIX_RESULT MixVideoFormat_MP42::Decode( - MixBuffer * bufin[], int bufincnt, MixVideoDecodeParams * decode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - uint64 ts = 0; - bool discontinuity = FALSE; - - LOG_V( "Begin\n"); - - if (bufin == NULL || decode_params == NULL ) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - /* Chainup parent method. - We are not chaining up to parent method for now. - */ -#if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, decode_params); - } -#endif - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - // never happen - return MIX_RESULT_FAIL; - } - - //From now on, we exit this function through cleanup: - LOG_V( "Locking\n"); - Lock(); - - LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"UINT64_FORMAT"\n", ts); - - for (int i = 0; i < bufincnt; i++) { - LOG_V("decode buffer %d in total %d \n", i, bufincnt); - // decode a buffer at a time - ret = _decode_a_buffer( - bufin[i], - ts, - discontinuity, - ((i == bufincnt - 1) ? decode_params->complete_frame : 0)); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n"); - break; - } - } - - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; -} - - -MIX_RESULT MixVideoFormat_MP42::Flush() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("Begin\n"); - - Lock(); - // drop any decode-pending picture, and ignore return value - _decode_end(TRUE); - - /* - * Clear parse_in_progress flag and current timestamp - */ - this->parse_in_progress = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (uint64)-1; - this->next_nvop_for_PB_frame = FALSE; - - for (int idx = 0; idx < 2; idx++) { - if (this->reference_frames[idx] != NULL) { - mix_videoframe_unref(this->reference_frames[idx]); - this->reference_frames[idx] = NULL; - } - } - if (this->last_frame) { - mix_videoframe_unref(this->last_frame); - this->last_frame = NULL; - } - - /* Call parser flush */ - vbp_flush(this->parser_handle); - Unlock(); - LOG_V("End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_MP42::EndOfStream() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V("Begin\n"); - Lock(); - _decode_end(FALSE); - ret = mix_framemanager_eos(this->framemgr); - Unlock(); - LOG_V("End\n"); - return ret; -} - - -MIX_RESULT MixVideoFormat_MP42::_handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame) { - LOG_V("Begin\n"); - if (current_frame == NULL) { - return MIX_RESULT_NULL_PTR; - } - switch (frame_type) { - case MP4_VOP_TYPE_I: - case MP4_VOP_TYPE_P: - LOG_V("Refing reference frame %x\n", (uint) current_frame); - mix_videoframe_ref(current_frame); - - /* should only happen on first frame */ - if (this->reference_frames[0] == NULL) { - this->reference_frames[0] = current_frame; - /* should only happen on second frame */ - } else if (this->reference_frames[1] == NULL) { - this->reference_frames[1] = current_frame; - } else { - LOG_V("Releasing reference frame %x\n", - (uint) this->reference_frames[0]); - mix_videoframe_unref(this->reference_frames[0]); - this->reference_frames[0] = this->reference_frames[1]; - this->reference_frames[1] = current_frame; - } - break; - case MP4_VOP_TYPE_B: - case MP4_VOP_TYPE_S: - default: - break; - - } - - LOG_V("End\n"); - - return MIX_RESULT_SUCCESS; -} - diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h deleted file mode 100644 index 9d00d1a..0000000 --- a/mix_video/src/mixvideoformat_mp42.h +++ /dev/null @@ -1,95 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMAT_MP42_H__ -#define __MIX_VIDEOFORMAT_MP42_H__ - -#include "mixvideoformat.h" -#include "mixvideoframe_private.h" - -//Note: this is only a max limit. Real number of surfaces allocated is calculated in mix_videoformat_mp42_initialize() -#define MIX_VIDEO_MP42_SURFACE_NUM 8 - -/* - * Type macros. - */ -#define MIX_VIDEOFORMAT_MP42(obj) (reinterpret_cast(obj)) -#define MIX_IS_VIDEOFORMAT_MP42(obj) (NULL != MIX_VIDEOFORMAT_MP42(obj)) - -class MixVideoFormat_MP42 : public MixVideoFormat { -public: - MixVideoFormat_MP42(); - virtual ~MixVideoFormat_MP42(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); - -private: - MIX_RESULT _handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame); - MIX_RESULT _release_input_buffers(uint64 timestamp); - MIX_RESULT _update_config_params(vbp_data_mp42 *data); - MIX_RESULT _initialize_va(vbp_data_mp42 *data); - MIX_RESULT _decode_a_slice( - vbp_data_mp42* data, vbp_picture_data_mp42* pic_data); - MIX_RESULT _decode_end(bool drop_picture); - MIX_RESULT _decode_continue(vbp_data_mp42 *data); - MIX_RESULT _decode_begin(vbp_data_mp42* data); - MIX_RESULT _decode_a_buffer( - MixBuffer * bufin, uint64 ts, bool discontinuity,bool complete_frame); - -public: - /*< public > */ - MixVideoFrame * reference_frames[2]; - MixVideoFrame * last_frame; - int last_vop_coding_type; - uint last_vop_time_increment; - - /* indicate if future n-vop is a placeholder of a packed frame */ - bool next_nvop_for_PB_frame; - - /* indicate if iq_matrix_buffer is sent to driver */ - bool iq_matrix_buf_sent; -}; - - -/** - * mix_videoformat_mp42_new: - * @returns: A newly allocated instance of #MixVideoFormat_MP42 - * - * Use this method to create new instance of #MixVideoFormat_MP42 - */ -MixVideoFormat_MP42 *mix_videoformat_mp42_new(void); - -/** - * mix_videoformat_mp42_ref: - * @mix: object to add reference - * @returns: the MixVideoFormat_MP42 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormat_MP42 *mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix); - -/** - * mix_videoformat_mp42_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormat_MP42 *mix_videoformat_mp42_unref(MixVideoFormat_MP42 * mix); - -#endif /* __MIX_VIDEOFORMAT_MP42_H__ */ diff --git a/mix_video/src/mixvideoformat_vc1.cpp b/mix_video/src/mixvideoformat_vc1.cpp deleted file mode 100644 index eeb5bf9..0000000 --- a/mix_video/src/mixvideoformat_vc1.cpp +++ /dev/null @@ -1,1133 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include "mixvideolog.h" - -#include "mixvideoformat_vc1.h" -#ifndef ANDROID -#include -#endif - - -#include - - -#ifdef MIX_LOG_ENABLE -static int mix_video_vc1_counter = 0; -#endif - - -MixVideoFormat_VC1::MixVideoFormat_VC1() { - this->reference_frames[0] = NULL; - this->reference_frames[1] = NULL; -} - -MixVideoFormat_VC1::~MixVideoFormat_VC1() { - /* clean up here. */ - Lock(); - //surfacepool is deallocated by parent - //inputbufqueue is deallocated by parent - //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces - //Unref our reference frames; - for (int i = 0; i < 2; i++) { - if (this->reference_frames[i] != NULL) - { - mix_videoframe_unref(this->reference_frames[i]); - this->reference_frames[i] = NULL; - } - } - - //Reset state - this->initialized = TRUE; - this->parse_in_progress = FALSE; - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (uint64)-1; - - //Close the parser - if (this->parser_handle) - { - vbp_close(this->parser_handle); - this->parser_handle = NULL; - } - - Unlock(); -} - - -MixVideoFormat_VC1 * mix_videoformat_vc1_new(void) { - return new MixVideoFormat_VC1(); -} - -MixVideoFormat_VC1 * mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} -MixVideoFormat_VC1 *mix_videoformat_vc1_unref(MixVideoFormat_VC1 * mix) { - if (NULL != mix) - return MIX_VIDEOFORMAT_VC1(mix->Unref()); - else - return mix; -} - -MIX_RESULT MixVideoFormat_VC1::_update_seq_header( - MixVideoConfigParamsDec* config_params, - MixIOVec *header) { - uint width = 0; - uint height = 0; - - int i = 0; - uchar* p = NULL; - MIX_RESULT res = MIX_RESULT_SUCCESS; - - if (!config_params || !header) { - LOG_E( "NUll pointer passed in\n"); - return (MIX_RESULT_NULL_PTR); - } - - p = header->data; - - res = mix_videoconfigparamsdec_get_picture_res( - config_params, &width, &height); - - if (MIX_RESULT_SUCCESS != res) { - return res; - } - - /* Check for start codes. If one exist, then this is VC-1 and not WMV. */ - while (i < header->data_size - 2) { - if ((p[i] == 0) && (p[i + 1] == 0) && (p[i + 2] == 1)) { - return MIX_RESULT_SUCCESS; - } - i++; - } - -// p = reinterpret_cast(g_malloc0(header->data_size + 9)); - p = reinterpret_cast(malloc(header->data_size + 9)); - - if (!p) { - LOG_E( "Cannot allocate memory\n"); - return MIX_RESULT_NO_MEMORY; - } - memset(p, 0, header->data_size + 9); - - /* If we get here we have 4+ bytes of codec data that must be formatted */ - /* to pass through as an RCV sequence header. */ - p[0] = 0; - p[1] = 0; - p[2] = 1; - p[3] = 0x0f; /* Start code. */ - - p[4] = (width >> 8) & 0x0ff; - p[5] = width & 0x0ff; - p[6] = (height >> 8) & 0x0ff; - p[7] = height & 0x0ff; - - memcpy(p + 8, header->data, header->data_size); - *(p + header->data_size + 8) = 0x80; - - free(header->data); - header->data = p; - header->data_size = header->data_size + 9; - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormat_VC1::_update_config_params(vbp_data_vc1 *data) { - if (this->picture_width == 0 || - this->picture_height == 0) { - this->picture_width = data->se_data->CODED_WIDTH; - this->picture_height = data->se_data->CODED_HEIGHT; - mix_videoconfigparamsdec_set_picture_res( - this->config_params, - this->picture_width, - this->picture_height); - } - - // scaling has been performed on the decoded image. - mix_videoconfigparamsdec_set_video_range(this->config_params, 1); - uint8 color_matrix; - switch (data->se_data->MATRIX_COEF) { - case 1: - color_matrix = VA_SRC_BT709; - break; - // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996. - case 6: - color_matrix = VA_SRC_BT601; - break; - default: - // unknown color matrix, set to 0 so color space flag will not be set. - color_matrix = 0; - break; - } - mix_videoconfigparamsdec_set_color_matrix(this->config_params, color_matrix); - mix_videoconfigparamsdec_set_pixel_aspect_ratio( - this->config_params, - data->se_data->ASPECT_HORIZ_SIZE, - data->se_data->ASPECT_VERT_SIZE); - - mix_videoconfigparamsdec_set_bit_rate( - this->config_params, - data->se_data->bit_rate); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormat_VC1::_initialize_va(vbp_data_vc1 *data) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VAConfigAttrib attrib; - VAProfile va_profile; - - LOG_V( "Begin\n"); - if (this->va_initialized) { - LOG_W("va already initialized.\n"); - return MIX_RESULT_SUCCESS; - } - - //We are requesting RT attributes - attrib.type = VAConfigAttribRTFormat; - attrib.value = VA_RT_FORMAT_YUV420; - - //Initialize and save the VA config ID - switch (data->se_data->PROFILE) { - case 0: - va_profile = VAProfileVC1Simple; - break; - case 1: - va_profile = VAProfileVC1Main; - break; - - default: - va_profile = VAProfileVC1Advanced; - break; - } - - vret = vaCreateConfig( - this->va_display, - va_profile, - VAEntrypointVLD, - &attrib, - 1, - &(this->va_config)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E("vaCreateConfig failed\n"); - goto cleanup; - } - - - //Check for loop filtering - if (data->se_data->LOOPFILTER == 1) - this->loopFilter = TRUE; - else - this->loopFilter = FALSE; - - LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG); - - if ((data->se_data->MAXBFRAMES > 0) || (data->se_data->PROFILE == 3) || (data->se_data->PROFILE == 1)) { - //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof - this->haveBframes = TRUE; - } - else { - this->haveBframes = FALSE; - } - - //Calculate VC1 numSurfaces based on max number of B frames or - // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less - - //Adding 1 to work around VBLANK issue - this->va_num_surfaces = 1 + this->extra_surfaces + ((3 + (this->haveBframes ? 1 : 0) < - MIX_VIDEO_VC1_SURFACE_NUM) ? - (3 + (this->haveBframes ? 1 : 0)) - : MIX_VIDEO_VC1_SURFACE_NUM); - - this->va_surfaces = new VASurfaceID[this->va_num_surfaces]; - if (this->va_surfaces == NULL) { - ret = MIX_RESULT_FAIL; - LOG_E( "parent->va_surfaces == NULL. \n"); - goto cleanup; - } - - vret = vaCreateSurfaces( - this->va_display, - VA_RT_FORMAT_YUV420, - this->picture_width, - this->picture_height, - this->va_surfaces , - this->va_num_surfaces, NULL , 0); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error allocating surfaces\n"); - goto cleanup; - } - - LOG_V( "Created %d libva surfaces\n", this->va_num_surfaces); - - //Initialize the surface pool - ret = mix_surfacepool_initialize( - this->surfacepool, - this->va_surfaces, - this->va_num_surfaces, - this->va_display); - - switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing. - default: - ret = MIX_RESULT_ALREADY_INIT; - LOG_E( "Error init surface pool\n"); - goto cleanup; - break; - } - - //Initialize and save the VA context ID - //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2 - vret = vaCreateContext( - this->va_display, - this->va_config, - this->picture_width, - this->picture_height, - 0, - this->va_surfaces, - this->va_num_surfaces, - &(this->va_context)); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error initializing video driver\n"); - goto cleanup; - } - - LOG_V( "mix_video vinfo: Content type %s\n", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); - LOG_V( "mix_video vinfo: Content width %d, height %d\n", this->picture_width, this->picture_height); - LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", data->se_data->MAXBFRAMES); - LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", data->se_data->PROFILE, data->se_data->LEVEL); - - this->va_initialized = TRUE; -cleanup: - /* nothing to clean up */ - - return ret; - -} - -MIX_RESULT MixVideoFormat_VC1::Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display) { - - uint32 pret = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - enum _vbp_parser_type ptype = VBP_VC1; - vbp_data_vc1 *data = NULL; - MixIOVec *header = NULL; - - //TODO Partition this method into smaller methods - if (config_params == NULL || frame_mgr == NULL || - !input_buf_pool || !surface_pool || !va_display) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - LOG_V( "Begin\n"); - - // chain up parent method - MixVideoFormat::Initialize(config_params, frame_mgr, input_buf_pool, - surface_pool, va_display); - - if (ret != MIX_RESULT_SUCCESS) { - return ret; - } - LOG_V( "Locking\n"); - //From now on, we exit this function through cleanup: - Lock(); - - this->surfacepool = mix_surfacepool_new(); - *surface_pool = this->surfacepool; - - if (this->surfacepool == NULL) - { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "parent->surfacepool == NULL.\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params, - &this->extra_surfaces); - - if (ret != MIX_RESULT_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Cannot get extra surface allocation setting\n"); - goto cleanup; - } - - - //Load the bitstream parser - pret = vbp_open(ptype, &(this->parser_handle)); - - if (!(pret == VBP_OK)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error opening parser\n"); - goto cleanup; - } - - LOG_V( "Opened parser\n"); - - ret = mix_videoconfigparamsdec_get_header(config_params, - &header); - - if ((ret != MIX_RESULT_SUCCESS) || (header == NULL)) { - ret = MIX_RESULT_SUCCESS; - LOG_W( "Codec data is not available in the configuration parameter.\n"); - - goto cleanup; - } - - LOG_V( "Calling parse on header data, handle %d\n", (int)this->parser_handle); - LOG_V( "mix_video vinfo: Content type %s, %s\n", (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive"); - - ret = _update_seq_header(config_params, header); - if (ret != MIX_RESULT_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error updating sequence header\n"); - goto cleanup; - } - - pret = vbp_parse(this->parser_handle, header->data, - header->data_size, TRUE); - - if ((pret != VBP_OK)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing header data, size %d\n", header->data_size); - goto cleanup; - } - - - LOG_V( "Parsed header\n"); - //Get the header data and save - pret = vbp_query(this->parser_handle, (void **)&data); - - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error reading parsed header data\n"); - goto cleanup; - } - LOG_V( "Queried parser for header data\n"); - - _update_config_params(data); - - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error initializing va. \n"); - goto cleanup; - } - -cleanup: - if (ret != MIX_RESULT_SUCCESS) { - pret = vbp_close(this->parser_handle); - this->parser_handle = NULL; - this->initialized = FALSE; - } else { - this->initialized = TRUE; - } - - if (header != NULL) { - if (header->data != NULL) - delete[](header->data); - delete(header); - header = NULL; - } - - this->lastFrame = NULL; - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_VC1::Decode( - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params) { - - uint32 pret = 0; - int i = 0; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - uint64 ts = 0; - vbp_data_vc1 *data = NULL; - bool discontinuity = FALSE; - if (bufin == NULL || decode_params == NULL) { - LOG_E( "NUll pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - //TODO remove iovout and iovoutcnt; they are not used (need to remove from MixVideo/MI-X API too) - LOG_V( "Begin\n"); - /* Chainup parent method. - We are not chaining up to parent method for now. - */ -#if 0 - if (parent_class->decode) { - return parent_class->decode(mix, bufin, bufincnt, - decode_params); - } -#endif - - ret = mix_videodecodeparams_get_timestamp(decode_params, &ts); - if (ret != MIX_RESULT_SUCCESS) { - return MIX_RESULT_FAIL; - } - - ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity); - if (ret != MIX_RESULT_SUCCESS) { - return MIX_RESULT_FAIL; - } - - //From now on, we exit this function through cleanup: - LOG_V( "Locking\n"); - Lock(); - - this->current_timestamp = ts; - this->discontinuity_frame_in_progress = discontinuity; - LOG_V( "Starting current frame %d, timestamp %"UINT64_FORMAT"\n", mix_video_vc1_counter++, ts); - - for (i = 0; i < bufincnt; i++) { - LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", - (int)this->parser_handle, (uint)bufin[i]->data, bufin[i]->size); - pret = vbp_parse(this->parser_handle, bufin[i]->data, bufin[i]->size, FALSE); - LOG_V( "Called parse for current frame\n"); - if (pret != VBP_OK) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error parsing data\n"); - goto CLEAN_UP; - } - //query for data - pret = vbp_query(this->parser_handle, (void **) &data); - if ((pret != VBP_OK) || (data == NULL)) { - ret = MIX_RESULT_FAIL; - LOG_E( "Error getting parser data\n"); - goto CLEAN_UP; - } - if (this->va_initialized == FALSE) { - _update_config_params(data); - - LOG_V("try initializing VA...\n"); - ret = _initialize_va(data); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V("mix_videofmt_vc1_initialize_va failed.\n"); - goto CLEAN_UP; - } - } - - LOG_V( "Called query for current frame\n"); - - //process and decode data - ret = _process_decode(data, ts, discontinuity); - if (ret != MIX_RESULT_SUCCESS) - { - //We log this but continue since we need to complete our processing of input buffers - LOG_E( "Process_decode failed.\n"); - goto CLEAN_UP; - } - - } - -CLEAN_UP: - LOG_V( "Unlocking\n"); - Unlock(); - LOG_V( "End\n"); - return ret; -} - - -MIX_RESULT MixVideoFormat_VC1::_decode_a_picture( - vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus vret = VA_STATUS_SUCCESS; - VADisplay vadisplay = NULL; - VAContextID vacontext; - uint buffer_id_cnt = 0; - VABufferID *buffer_ids = NULL; - vbp_picture_data_vc1* pic_data = &(data->pic_data[pic_index]); - VAPictureParameterBufferVC1 *pic_params = pic_data->pic_parms; - enum _picture_type frame_type = VC1_PTYPE_I; - ulong surface = 0; - - if (pic_params == NULL) { - ret = MIX_RESULT_NULL_PTR; - LOG_E( "Error reading parser data\n"); - goto CLEAN_UP; - } - - LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2); - - //Set up reference frames for the picture parameter buffer - //Set the picture type (I, B or P frame) - frame_type = (_picture_type)pic_params->picture_fields.bits.picture_type; - - //Check for B frames after a seek - //We need to have both reference frames in hand before we can decode a B frame - //If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME - //Note: demuxer should do the right thing and only seek to I frame, so we should - // not get P frame first, but may get B frames after the first I frame - if (frame_type == VC1_PTYPE_B) { - if (this->reference_frames[1] == NULL) { - LOG_E( "Insufficient reference frames for B frame\n"); - ret = MIX_RESULT_DROPFRAME; - goto CLEAN_UP; - } - } - - buffer_ids = reinterpret_cast(malloc(sizeof(VABufferID) * ((pic_data->num_slices * 2) + 2))); - if (buffer_ids == NULL) { - LOG_E( "Cannot allocate buffer IDs\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - - LOG_V( "Getting a new surface\n"); - LOG_V( "frame type is %d\n", frame_type); - - //Get our surface ID from the frame object - ret = mix_videoframe_get_frame_id(frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting surface ID from frame object\n"); - goto CLEAN_UP; - } - - //Get a frame from the surface pool - if (0 == pic_index) { - //Set the frame type for the frame object (used in reordering by frame manager) - switch (frame_type) { - case VC1_PTYPE_I: // I frame type - case VC1_PTYPE_P: // P frame type - case VC1_PTYPE_B: // B frame type - ret = mix_videoframe_set_frame_type(frame, (MixFrameType)frame_type); - break; - case VC1_PTYPE_BI: // BI frame type - ret = mix_videoframe_set_frame_type(frame, TYPE_B); - break; - //Not indicated here - case VC1_PTYPE_SKIPPED: - default: - break; - } - } - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error setting frame type on frame\n"); - goto CLEAN_UP; - } - - LOG_V( "Setting reference frames in picparams, frame_type = %d\n", frame_type); - //TODO Check if we need to add more handling of B or P frames when reference frames are not set up (such as after flush/seek) - - switch (frame_type) { - case VC1_PTYPE_I: // I frame type - /* forward and backward reference pictures are not used but just set to current - surface to be in consistence with test suite - */ - pic_params->forward_reference_picture = surface; - pic_params->backward_reference_picture = surface; - LOG_V( "I frame, surface ID %u\n", (uint)frame->frame_id); - LOG_V( "mix_video vinfo: Frame type is I\n"); - break; - case VC1_PTYPE_P: // P frame type - // check REFDIST in the picture parameter buffer - if (0 != pic_params->reference_fields.bits.reference_distance_flag && - 0 != pic_params->reference_fields.bits.reference_distance) { - /* The previous decoded frame (distance is up to 16 but not 0) is used - for reference, as we don't allocate that many surfaces so the reference picture - could have been overwritten and hence not avaiable for reference. - */ - LOG_E( "reference distance is not 0!"); - ret = MIX_RESULT_DROPFRAME; - goto CLEAN_UP; - } - if (1 == pic_index) { - // handle interlace field coding case - if (1 == pic_params->reference_fields.bits.num_reference_pictures || - 1 == pic_params->reference_fields.bits.reference_field_pic_indicator) { - /* two reference fields or the second closest I/P field is used for - prediction. Set forward reference picture to INVALID so it will be - updated to a valid previous reconstructed reference frame later. - */ - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - } else { - /* the closest I/P is used for reference so it must be the - complementary field in the same surface. - */ - pic_params->forward_reference_picture = surface; - } - } - if (VA_INVALID_SURFACE == pic_params->forward_reference_picture) { - if (this->reference_frames[1]) { - pic_params->forward_reference_picture = this->reference_frames[1]->frame_id; - } else if (this->reference_frames[0]) { - pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; - } else { - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Error could not find reference frames for P frame\n"); - goto CLEAN_UP; - } - } - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - -#ifdef MIX_LOG_ENABLE /* this is to fix a crash when MIX_LOG_ENABLE is set */ - if (this->reference_frames[0] && frame) { - LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", - (uint)frame->frame_id, (uint)this->reference_frames[0]->frame_id); - } -#endif - LOG_V( "mix_video vinfo: Frame type is P\n"); - break; - - case VC1_PTYPE_B: // B frame type - LOG_V( "B frame, forw ref %d, back ref %d\n", - (uint)this->reference_frames[0]->frame_id, - (uint)this->reference_frames[1]->frame_id); - - if (!this->haveBframes) {//We don't expect B frames and have not allocated a surface - // for the extra ref frame so this is an error - ret = MIX_RESULT_DROPFRAME; - LOG_E( "Unexpected B frame, cannot process\n"); - goto CLEAN_UP; - } - - pic_params->forward_reference_picture = this->reference_frames[0]->frame_id; - pic_params->backward_reference_picture = this->reference_frames[1]->frame_id; - - LOG_V( "B frame, surface ID %u, forw ref %d, back ref %d\n", - (uint)frame->frame_id, (uint)this->reference_frames[0]->frame_id, - (uint)this->reference_frames[1]->frame_id); - LOG_V( "mix_video vinfo: Frame type is B\n"); - break; - case VC1_PTYPE_BI: - pic_params->forward_reference_picture = VA_INVALID_SURFACE; - pic_params->backward_reference_picture = VA_INVALID_SURFACE; - LOG_V( "BI frame\n"); - LOG_V( "mix_video vinfo: Frame type is BI\n"); - break; - case VC1_PTYPE_SKIPPED: - //Will never happen here - break; - default: - LOG_V( "Hit default\n"); - break; - } - - //Loop filter handling - if (this->loopFilter) { - LOG_V( "Setting in loop decoded picture to current frame\n"); - LOG_V( "Double checking picparams inloop filter is %d\n", - pic_params->entrypoint_fields.bits.loopfilter); - pic_params->inloop_decoded_picture = frame->frame_id; - } else { - LOG_V( "Setting in loop decoded picture to invalid\n"); - pic_params->inloop_decoded_picture = VA_INVALID_SURFACE; - } - //Libva buffer set up - vadisplay = this->va_display; - vacontext = this->va_context; - LOG_V( "Calling vaBeginPicture\n"); - - //Now we can begin the picture - vret = vaBeginPicture(vadisplay, vacontext, surface); - - if (vret != VA_STATUS_SUCCESS) - { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaBeginPicture\n"); - goto CLEAN_UP; - } - - LOG_V( "Creating libva picture parameter buffer\n"); - - //First the picture parameter buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferVC1), - 1, - pic_params, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - LOG_V( "Creating libva bitplane buffer\n"); - - if (pic_params->bitplane_present.value) { - //Then the bitplane buffer - vret = vaCreateBuffer( - vadisplay, - vacontext, - VABitPlaneBufferType, - pic_data->size_bitplanes, - 1, - pic_data->packed_bitplanes, - &buffer_ids[buffer_id_cnt]); - buffer_id_cnt++; - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - } - - //Now for slices - for (uint32 i = 0; i < pic_data->num_slices; i++) { - LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i); - - //Do slice parameters - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferVC1), - 1, - &(pic_data->slc_data[i].slc_parms), - &buffer_ids[buffer_id_cnt]); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - - buffer_id_cnt++; - - LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (uint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size); - - - //Do slice data - vret = vaCreateBuffer( - vadisplay, - vacontext, - VASliceDataBufferType, - //size - pic_data->slc_data[i].slice_size, - //num_elements - 1, - //slice data buffer pointer - //Note that this is the original data buffer ptr; - // offset to the actual slice data is provided in - // slice_data_offset in VASliceParameterBufferVC1 - pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset, - &buffer_ids[buffer_id_cnt]); - - buffer_id_cnt++; - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaCreateBuffer\n"); - goto CLEAN_UP; - } - } - - LOG_V( "Calling vaRenderPicture\n"); - - //Render the picture - vret = vaRenderPicture( - vadisplay, - vacontext, - buffer_ids, - buffer_id_cnt); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaRenderPicture\n"); - goto CLEAN_UP; - } - - LOG_V( "Calling vaEndPicture\n"); - - //End picture - vret = vaEndPicture(vadisplay, vacontext); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaEndPicture\n"); - goto CLEAN_UP; - } - -#if 0 /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */ - LOG_V( "Calling vaSyncSurface\n"); - - //Decode the picture - vret = vaSyncSurface(vadisplay, surface); - - if (vret != VA_STATUS_SUCCESS) { - ret = MIX_RESULT_FAIL; - LOG_E( "Video driver returned error from vaSyncSurface\n"); - goto CLEAN_UP; - } -#endif - -CLEAN_UP: - if (NULL != buffer_ids) - free(buffer_ids); - return ret; -} - - -MIX_RESULT MixVideoFormat_VC1::Flush() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - uint32 pret = 0; - /* Chainup parent method. - We are not chaining up to parent method for now. - */ -#if 0 - if (parent_class->flush) - { - return parent_class->flush(mix, msg); - } -#endif - Lock(); - - //Clear the contents of inputbufqueue - - this->discontinuity_frame_in_progress = FALSE; - this->current_timestamp = (uint64)-1; - - int i = 0; - for (; i < 2; i++) { - if (this->reference_frames[i] != NULL) { - mix_videoframe_unref(this->reference_frames[i]); - this->reference_frames[i] = NULL; - } - } - - //Call parser flush - pret = vbp_flush(this->parser_handle); - if (pret != VBP_OK) - ret = MIX_RESULT_FAIL; - - Unlock(); - LOG_V( "End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_VC1::EndOfStream() { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - /* Chainup parent method. - We are not chaining up to parent method for now. - */ -#if 0 - if (parent_class->eos) - { - return parent_class->eos(mix, msg); - } -#endif - - //Call Frame Manager with _eos() - ret = mix_framemanager_eos(this->framemgr); - LOG_V( "End\n"); - return ret; -} - - -MIX_RESULT MixVideoFormat_VC1::_handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame) { - LOG_V( "Begin\n"); - if (NULL == current_frame) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - switch (frame_type) { - case VC1_PTYPE_I: // I frame type - case VC1_PTYPE_P: // P frame type - LOG_V( "Refing reference frame %x\n", (uint) current_frame); - mix_videoframe_ref(current_frame); - - //If we have B frames, we need to keep forward and backward reference frames - if (this->haveBframes) { - if (this->reference_frames[0] == NULL) { //should only happen on first frame - this->reference_frames[0] = current_frame; - //this->reference_frames[1] = NULL; - } else if (this->reference_frames[1] == NULL) {//should only happen on second frame - this->reference_frames[1] = current_frame; - } else { - LOG_V( "Releasing reference frame %x\n", (uint) this->reference_frames[0]); - mix_videoframe_unref(this->reference_frames[0]); - this->reference_frames[0] = this->reference_frames[1]; - this->reference_frames[1] = current_frame; - } - } else {//No B frames in this content, only need to keep the forward reference frame - LOG_V( "Releasing reference frame %x\n", (uint) this->reference_frames[0]); - if (this->reference_frames[0] != NULL) - mix_videoframe_unref(this->reference_frames[0]); - this->reference_frames[0] = current_frame; - } - break; - case VC1_PTYPE_B: // B or BI frame type (should not happen) - case VC1_PTYPE_BI: - default: - LOG_E( "Wrong frame type for handling reference frames\n"); - return MIX_RESULT_FAIL; - break; - - } - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormat_VC1::_process_decode( - vbp_data_vc1 *data, uint64 timestamp, bool discontinuity) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - bool unrefVideoFrame = FALSE; - MixVideoFrame *frame = NULL; - int num_pictures = 0; - enum _picture_type frame_type = VC1_PTYPE_I; - - //TODO Partition this method into smaller methods - LOG_V( "Begin\n"); - if (NULL == data) { - LOG_E( "Null pointer passed in\n"); - return MIX_RESULT_NULL_PTR; - } - - if (0 == data->num_pictures || NULL == data->pic_data) { - return MIX_RESULT_INVALID_PARAM; - } - - //Check for skipped frame - //For skipped frames, we will reuse the last P or I frame surface and treat as P frame - if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) { - LOG_V( "mix_video vinfo: Frame type is SKIPPED\n"); - if (this->lastFrame == NULL) { - //we shouldn't get a skipped frame before we are able to get a real frame - LOG_E( "Error for skipped frame, prev frame is NULL\n"); - ret = MIX_RESULT_DROPFRAME; - goto CLEAN_UP; - } - - //We don't worry about this memory allocation because SKIPPED is not a common case - //Doing the allocation on the fly is a more efficient choice than trying to manage yet another pool - MixVideoFrame *skip_frame = mix_videoframe_new(); - if (skip_frame == NULL) { - ret = MIX_RESULT_NO_MEMORY; - LOG_E( "Error allocating new video frame object for skipped frame\n"); - goto CLEAN_UP; - } - - mix_videoframe_set_is_skipped(skip_frame, TRUE); - //mix_videoframe_ref(skip_frame); - mix_videoframe_ref(this->lastFrame); - ulong frameid = VA_INVALID_SURFACE; - mix_videoframe_get_frame_id(this->lastFrame, &frameid); - mix_videoframe_set_frame_id(skip_frame, frameid); - mix_videoframe_set_frame_type(skip_frame, (MixFrameType)VC1_PTYPE_P); - mix_videoframe_set_real_frame(skip_frame, this->lastFrame); - mix_videoframe_set_timestamp(skip_frame, timestamp); - mix_videoframe_set_discontinuity(skip_frame, FALSE); - LOG_V( "Processing skipped frame %x, frame_id set to %d, ts %"UINT64_FORMAT"\n", - (uint)skip_frame, (uint)frameid, timestamp); - //Process reference frames - LOG_V( "Updating skipped frame forward/backward references for libva\n"); - _handle_ref_frames(VC1_PTYPE_P, skip_frame); - //Enqueue the skipped frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, skip_frame); - goto CLEAN_UP; - } - - ret = mix_surfacepool_get(this->surfacepool, &frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error getting frame from surfacepool\n"); - goto CLEAN_UP; - } - unrefVideoFrame = TRUE; - - // TO DO: handle multiple frames parsed from a sample buffer - num_pictures = (data->num_pictures > 1) ? 2 : 1; - for (int index = 0; index < num_pictures; index++) { - ret = _decode_a_picture(data, index, frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Failed to decode a picture.\n"); - goto CLEAN_UP; - } - } - - //Set the discontinuity flag - mix_videoframe_set_discontinuity(frame, discontinuity); - - //Set the timestamp - mix_videoframe_set_timestamp(frame, timestamp); - - // setup frame structure - if (data->num_pictures > 1) { - if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) - mix_videoframe_set_frame_structure(frame, VA_TOP_FIELD); - else - mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD); - } else { - mix_videoframe_set_frame_structure(frame, VA_FRAME_PICTURE); - } - - frame_type = (_picture_type)data->pic_data[0].pic_parms->picture_fields.bits.picture_type; - - //For I or P frames - //Save this frame off for skipped frame handling - if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) { - if (this->lastFrame != NULL) { - mix_videoframe_unref(this->lastFrame); - } - this->lastFrame = frame; - mix_videoframe_ref(frame); - } - - //Update the references frames for the current frame - if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) {//If I or P frame, update the reference array - LOG_V( "Updating forward/backward references for libva\n"); - _handle_ref_frames(frame_type, frame); - } - - - LOG_V( "Enqueueing the frame with frame manager, timestamp %"UINT64_FORMAT"\n", timestamp); - - //Enqueue the decoded frame using frame manager - ret = mix_framemanager_enqueue(this->framemgr, frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( "Error enqueuing frame object\n"); - goto CLEAN_UP; - } - unrefVideoFrame = FALSE; - -CLEAN_UP: - - if (unrefVideoFrame) - mix_videoframe_unref(frame); - LOG_V( "End\n"); - return ret; -} - -MIX_RESULT MixVideoFormat_VC1::_release_input_buffers(uint64 timestamp) { - - LOG_V( "Begin\n"); - - // Nothing to release. Deprecated. - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} - - diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h deleted file mode 100644 index 80659f3..0000000 --- a/mix_video/src/mixvideoformat_vc1.h +++ /dev/null @@ -1,95 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMAT_VC1_H__ -#define __MIX_VIDEOFORMAT_VC1_H__ - -#include "mixvideoformat.h" -#include "mixvideoframe_private.h" - - -//Note: this is only a max limit. Actual number of surfaces allocated is calculated in mix_videoformat_vc1_initialize() -#define MIX_VIDEO_VC1_SURFACE_NUM 8 - -/* - * Type macros. - */ -#define MIX_VIDEOFORMAT_VC1(obj) (reinterpret_cast(obj)) -#define MIX_IS_VIDEOFORMAT_VC1(obj) (NULL != MIX_VIDEOFORMAT_VC1(obj)) - -class MixVideoFormat_VC1 : public MixVideoFormat { -public: - MixVideoFormat_VC1(); - virtual ~MixVideoFormat_VC1(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsDec * config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - VADisplay va_display); - virtual MIX_RESULT Decode( - MixBuffer * bufin[], int bufincnt, - MixVideoDecodeParams * decode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); - -private: - MIX_RESULT _handle_ref_frames( - enum _picture_type frame_type, MixVideoFrame * current_frame); - MIX_RESULT _process_decode( - vbp_data_vc1 *data, uint64 timestamp, bool discontinuity); - MIX_RESULT _release_input_buffers(uint64 timestamp); - MIX_RESULT _update_seq_header( - MixVideoConfigParamsDec* config_params, MixIOVec *header); - MIX_RESULT _update_config_params(vbp_data_vc1 *data); - MIX_RESULT _decode_a_picture( - vbp_data_vc1 *data, int pic_index, MixVideoFrame *frame); - MIX_RESULT _initialize_va(vbp_data_vc1 *data); -#ifdef YUVDUMP - MIX_RESULT _get_Img_from_surface (MixVideoFrame * frame); -#endif - - -public: - /*< public > */ - - /*< private > */ - MixVideoFrame * reference_frames[2]; - bool haveBframes; - bool loopFilter; - MixVideoFrame * lastFrame; -}; - -/** - * mix_videoformat_vc1_new: - * @returns: A newly allocated instance of #MixVideoFormat_VC1 - * - * Use this method to create new instance of #MixVideoFormat_VC1 - */ -MixVideoFormat_VC1 *mix_videoformat_vc1_new(void); - -/** - * mix_videoformat_vc1_ref: - * @mix: object to add reference - * @returns: the MixVideoFormat_VC1 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormat_VC1 *mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix); - -/** - * mix_videoformat_vc1_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormat_VC1 *mix_videoformat_vc1_unref(MixVideoFormat_VC1 * mix); - - -#endif /* __MIX_VIDEOFORMAT_VC1_H__ */ diff --git a/mix_video/src/mixvideoformatenc.cpp b/mix_video/src/mixvideoformatenc.cpp deleted file mode 100644 index b8e1e30..0000000 --- a/mix_video/src/mixvideoformatenc.cpp +++ /dev/null @@ -1,790 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ -#include -#include "mixvideolog.h" -#include "mixvideoformatenc.h" - -//#define MDEBUG - -MixVideoFormatEnc::MixVideoFormatEnc() - :mLock() - ,initialized(FALSE) - ,framemgr(NULL) - ,surfacepool(NULL) - ,va_display(NULL) - ,va_context(0) - ,va_config(0) - ,mime_type(NULL) - ,frame_rate_num(0) - ,frame_rate_denom(1) - ,picture_width(0) - ,picture_height(0) - ,intra_period(0) - ,initial_qp(0) - ,min_qp(0) - ,bitrate(0) - ,target_percentage(95) - ,window_size(500) - ,share_buf_mode(FALSE) - ,ci_frame_id(NULL) - ,ci_frame_num(0) - ,force_key_frame(FALSE) - ,new_header_required(FALSE) - ,refresh_type(MIX_VIDEO_NONIR) - ,CIR_frame_cnt(15) - ,max_slice_size(0) - ,render_mss_required(FALSE) - ,render_QP_required (FALSE) - ,render_AIR_required(FALSE) - ,render_framerate_required(FALSE) - ,render_bitrate_required(FALSE) - ,drawable(0X0) - ,need_display(TRUE) - ,va_profile(VAProfileH264Baseline) - ,va_entrypoint(VAEntrypointEncSlice) - ,va_format(VA_RT_FORMAT_YUV420) - ,va_rcmode(VA_RC_NONE) - ,level(40) - ,buffer_mode(MIX_BUFFER_ALLOC_NORMAL) - ,buf_info(NULL) - ,inputbufpool(NULL) - ,inputbufqueue(NULL) - ,ref_count(1) { - air_params.air_MBs = 0; - air_params.air_threshold = 0; - air_params.air_auto = 0; -} - -MixVideoFormatEnc::~MixVideoFormatEnc() { - LOG_V( "\n"); - //MiVideo object calls the _deinitialize() for frame manager - if (this->framemgr) { - mix_framemanager_unref(this->framemgr); - this->framemgr = NULL; - } - - if (this->mime_type) { - free(this->mime_type); - } - - if (this->ci_frame_id) - free (this->ci_frame_id); - - if (this->surfacepool) { - mix_surfacepool_deinitialize(this->surfacepool); - mix_surfacepool_unref(this->surfacepool); - this->surfacepool = NULL; - } - - if (this->buffer_mode == MIX_BUFFER_UPSTREAM_ALLOC_CI) { - MixCISharedBufferInfo * ci_tmp = NULL; - if (this->buf_info) { - ci_tmp = reinterpret_cast (this->buf_info); - if (ci_tmp->ci_frame_id) { - free (ci_tmp->ci_frame_id); - ci_tmp->ci_frame_id = NULL; - } - free (ci_tmp); - ci_tmp = NULL; - this->buf_info = NULL; - } - } -} - - -MixVideoFormatEnc * -mix_videoformatenc_new(void) { - return new MixVideoFormatEnc() ; -} - - -MixVideoFormatEnc * -mix_videoformatenc_ref(MixVideoFormatEnc * mix) { - if (NULL != mix) - return mix->Ref(); - else - return NULL; -} - -MixVideoFormatEnc * -mix_videoformatenc_unref(MixVideoFormatEnc * mix) { - if (NULL!=mix) - return mix->Unref(); - else - return NULL; -} - -MIX_RESULT -MixVideoFormatEnc::GetCaps(char *msg) { - LOG_V( "Begin\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -MixVideoFormatEnc::Initialize( - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display) { - - LOG_V( "Begin\n"); - - if (config_params_enc == NULL) { - LOG_E("config_params_enc == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - //TODO check return values of getter fns for config_params - - this->Lock(); - this->framemgr = frame_mgr; - mix_framemanager_ref(this->framemgr); - - this->va_display = va_display; - - LOG_V("Start to get properities from parent params\n"); - - /* get properties from param (parent) Object*/ - ret = mix_videoconfigparamsenc_get_bit_rate ( - config_params_enc, &(this->bitrate)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_bps\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_frame_rate ( - config_params_enc, &(this->frame_rate_num), &(this->frame_rate_denom)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_frame_rate\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_init_qp ( - config_params_enc, &(this->initial_qp)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_init_qp\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_min_qp ( - config_params_enc, &(this->min_qp)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_min_qp\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_target_percentage( - config_params_enc, &(this->target_percentage)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_target_percentage\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_window_size ( - config_params_enc, &(this->window_size)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_window_size\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_intra_period ( - config_params_enc, &(this->intra_period)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_intra_period\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_picture_res ( - config_params_enc, &(this->picture_width), &(this->picture_height)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_picture_res\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_share_buf_mode ( - config_params_enc, &(this->share_buf_mode)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_share_buf_mode\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_ci_frame_info ( - config_params_enc, &(this->ci_frame_id), &(this->ci_frame_num)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_ci_frame_info\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - - /* - * temporarily code here for compatibility with old CI shared buffer solution - */ - - if (this->share_buf_mode) { - ret = mix_videoconfigparamsenc_set_buffer_mode (config_params_enc, MIX_BUFFER_UPSTREAM_ALLOC_CI); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_set_buffer_mode\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - } - - if (this->share_buf_mode && this->ci_frame_id && this->ci_frame_num) { - - MixCISharedBufferInfo * ci_tmp = NULL; - //ci_tmp = (MixCISharedBufferInfo *) g_malloc (sizeof (MixCISharedBufferInfo)); - ci_tmp = (MixCISharedBufferInfo *) new MixCISharedBufferInfo; - if (!ci_tmp) { - return MIX_RESULT_NO_MEMORY; - } - ci_tmp->ci_frame_cnt = this->ci_frame_num; - //ci_tmp->ci_frame_id = g_malloc (ci_tmp->ci_frame_cnt * sizeof (gulong)); - ci_tmp->ci_frame_id = new ulong [ci_tmp->ci_frame_cnt]; - if (!ci_tmp->ci_frame_id) { - return MIX_RESULT_NO_MEMORY; - } - - memcpy (ci_tmp->ci_frame_id, this->ci_frame_id, ci_tmp->ci_frame_cnt * sizeof (ulong)); - ret = mix_videoconfigparamsenc_set_upstream_buffer_info (config_params_enc, MIX_BUFFER_UPSTREAM_ALLOC_CI, (void*)ci_tmp); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_set_upstream_buffer_info\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - free (ci_tmp->ci_frame_id); - ci_tmp->ci_frame_id = NULL; - free (ci_tmp); - ci_tmp = NULL; - - } - - /* - * temporarily code done - */ - - ret = mix_videoconfigparamsenc_get_drawable ( - config_params_enc, &(this->drawable)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_drawable\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_need_display ( - config_params_enc, &(this->need_display)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_drawable\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_rate_control ( - config_params_enc,(MixRateControl*)&(this->va_rcmode)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_rc_mode\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_raw_format ( - config_params_enc, &(this->raw_format)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_format\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_profile ( - config_params_enc, (MixProfile *) &(this->va_profile)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_profile\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_level ( - config_params_enc, &(this->level)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_level\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_CIR_frame_cnt( - config_params_enc, &(this->CIR_frame_cnt)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_max_slice_size( - config_params_enc, &(this->max_slice_size)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_max_slice_size\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_refresh_type( - config_params_enc, &(this->refresh_type)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_refresh_type\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - ret = mix_videoconfigparamsenc_get_AIR_params( - config_params_enc, &(this->air_params)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_AIR_params\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - - ret = mix_videoconfigparamsenc_get_buffer_mode( - config_params_enc, &(this->buffer_mode)); - - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_buffer_mode\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - if (this->buffer_mode == MIX_BUFFER_UPSTREAM_ALLOC_CI) { - ret = mix_videoconfigparamsenc_get_upstream_buffer_info ( - config_params_enc, this->buffer_mode, &(this->buf_info)); - if (ret != MIX_RESULT_SUCCESS) { - LOG_V ("ret = %d\n", ret); - LOG_E("Failed to mix_videoconfigparamsenc_get_upstream_buffer_info\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - } - - LOG_V("======Video Encode Parent Object properities======:\n"); - LOG_I( "mix->bitrate = %d\n", this->bitrate); - LOG_I( "mix->frame_rate = %d\n", this->frame_rate_denom / this->frame_rate_denom); - LOG_I( "mix->initial_qp = %d\n", this->initial_qp); - LOG_I( "mix->min_qp = %d\n", this->min_qp); - LOG_I( "mix->intra_period = %d\n", this->intra_period); - LOG_I( "mix->picture_width = %d\n", this->picture_width); - LOG_I( "mix->picture_height = %d\n", this->picture_height); - LOG_I( "mix->share_buf_mode = %d\n", this->share_buf_mode); - LOG_I( "mix->ci_frame_id = 0x%08x\n", this->ci_frame_id); - LOG_I( "mix->ci_frame_num = %d\n", this->ci_frame_num); - LOG_I( "mix->drawable = 0x%08x\n", this->drawable); - LOG_I( "mix->need_display = %d\n", this->need_display); - LOG_I( "mix->va_format = %d\n", this->va_format); - LOG_I( "mix->va_profile = %d\n", this->va_profile); - LOG_I( "mix->va_rcmode = %d\n\n", this->va_rcmode); - LOG_I( "mix->CIR_frame_cnt = %d\n\n", this->CIR_frame_cnt); - LOG_I( "mix->max_slice_size = %d\n\n", this->max_slice_size); - - //g_mutex_unlock(mix->objectlock); - this->Unlock(); - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -MixVideoFormatEnc:: Encode( - MixBuffer * bufin[], int bufincnt, MixIOVec * iovout[], - int iovoutcnt, MixVideoEncodeParams * encode_params) { - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT -MixVideoFormatEnc::Flush() { - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -MixVideoFormatEnc::EndOfStream() { - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc::Deinitialize() { - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc::GetMaxEncodedBufSize (uint *max_size) { - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormatEnc::SetDynamicEncConfig ( - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - if (config_params_enc == NULL) { - LOG_E(" config_params_enc == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - this->Lock(); - this->new_header_required = FALSE; - - switch (params_type) { - case MIX_ENC_PARAMS_BITRATE: - { - ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(this->bitrate)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - LOG_E("Failed to mix_videoconfigparamsenc_get_bit_rate\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - this->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_INIT_QP: - { - ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(this->initial_qp)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_init_qp\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_MIN_QP: - { - ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(this->min_qp)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_min_qp\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_WINDOW_SIZE: - { - ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, &(this->window_size)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to MIX_ENC_PARAMS_WINDOW_SIZE\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_TARGET_PERCENTAGE: - { - ret = mix_videoconfigparamsenc_get_target_percentage (config_params_enc, &(this->target_percentage)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to MIX_ENC_PARAMS_TARGET_PERCENTAGE\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->render_bitrate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_MTU_SLICE_SIZE: - { - ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, &(this->max_slice_size)); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videoconfigparamsenc_get_max_slice_size\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->render_mss_required = TRUE; - - } - - case MIX_ENC_PARAMS_SLICE_NUM: - { - /* - * This type of dynamic control will be handled in H.264 override method - */ - } - break; - - case MIX_ENC_PARAMS_RC_MODE: - { - ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, (MixRateControl*)&(this->va_rcmode)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_rate_control\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - /* - * We only can change the RC mode to re-start encoding session - */ - - } - break; - - case MIX_ENC_PARAMS_RESOLUTION: - { - - ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(this->picture_width), &(this->picture_height)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_picture_res\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->new_header_required = TRUE; - } - break; - case MIX_ENC_PARAMS_GOP_SIZE: - { - - ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(this->intra_period)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_intra_period\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->new_header_required = TRUE; - - } - break; - case MIX_ENC_PARAMS_FRAME_RATE: - { - ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(this->frame_rate_num), &(this->frame_rate_denom)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_frame_rate\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->render_framerate_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_FORCE_KEY_FRAME: - { - this->new_header_required = TRUE; - - } - break; - - case MIX_ENC_PARAMS_REFRESH_TYPE: - { - ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, &(this->refresh_type)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_refresh_type\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - } - break; - - case MIX_ENC_PARAMS_AIR: - { - ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, &(this->air_params)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_AIR_params\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - - this->render_AIR_required = TRUE; - } - break; - - case MIX_ENC_PARAMS_CIR_FRAME_CNT: - { - ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(this->CIR_frame_cnt)); - if (ret != MIX_RESULT_SUCCESS) { - //TODO cleanup - - LOG_E("Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n"); - this->Unlock(); - return MIX_RESULT_FAIL; - } - } - break; - - default: - break; - } - this->Unlock(); - return MIX_RESULT_SUCCESS; -} - -/* mixvideoformatenc class methods implementation */ - -MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, char *msg) { - LOG_V( "Begin\n"); - if (NULL != mix) - return mix->GetCaps(msg); - else - return MIX_RESULT_NOTIMPL; -} - -MIX_RESULT mix_videofmtenc_initialize( - MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display) { - - if (NULL != mix) - return mix->Initialize( - config_params_enc, - frame_mgr, - input_buf_pool, - surface_pool, - requested_surface_info, - va_display); - else - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_encode( - MixVideoFormatEnc *mix, MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params) { - if (NULL != mix) - return mix->Encode(bufin, bufincnt, iovout, iovoutcnt, encode_params); - else - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) { - if (NULL != mix) - return mix->Flush(); - else - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) { - if (NULL != mix) - return mix->EndOfStream(); - else - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) { - if (NULL != mix) - return mix->Deinitialize(); - else - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size( - MixVideoFormatEnc *mix, uint * max_size) { - if (NULL != mix) - return mix->GetMaxEncodedBufSize(max_size); - else - return MIX_RESULT_FAIL; -} - -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config ( - MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params_enc, - MixEncParamsType params_type) { - if (NULL != mix) - return mix->SetDynamicEncConfig(config_params_enc, params_type); - else - return MIX_RESULT_FAIL; -} diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h deleted file mode 100644 index b08bf70..0000000 --- a/mix_video/src/mixvideoformatenc.h +++ /dev/null @@ -1,218 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMATENC_H__ -#define __MIX_VIDEOFORMATENC_H__ - -#include -#include "mixvideodef.h" -#include -#include "mixvideoconfigparamsenc.h" -#include "mixvideoframe.h" -#include "mixframemanager.h" -#include "mixsurfacepool.h" -#include "mixbuffer.h" -#include "mixbufferpool.h" -#include "mixvideoformatqueue.h" -#include "mixvideoencodeparams.h" -#include -class MixVideoFormatEnc; - - - -#define MIX_VIDEOFORMATENC(obj) (reinterpret_cast(obj)) -#define MIX_IS_VIDEOFORMATENC(obj) (NULL != MIX_VIDEOFORMATENC(obj)) - -/* vmethods typedef */ - -/* TODO: change return type and method parameters */ -typedef MIX_RESULT (*MixVideoFmtEncGetCapsFunc)(MixVideoFormatEnc *mix, char *msg); -typedef MIX_RESULT (*MixVideoFmtEncInitializeFunc)(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display); -typedef MIX_RESULT (*MixVideoFmtEncodeFunc)(MixVideoFormatEnc *mix, MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); -typedef MIX_RESULT (*MixVideoFmtEncFlushFunc)(MixVideoFormatEnc *mix); -typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix); -typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix); -typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, uint *max_size); -typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params, - MixEncParamsType params_type); - -class MixVideoFormatEnc { -public: - MixVideoFormatEnc(); - virtual ~MixVideoFormatEnc(); - - virtual MIX_RESULT GetCaps(char *msg); - virtual MIX_RESULT Initialize( - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display); - virtual MIX_RESULT Encode( MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT EndOfStream(); - virtual MIX_RESULT Deinitialize(); - virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); - virtual MIX_RESULT SetDynamicEncConfig ( - MixVideoConfigParamsEnc * config_params, MixEncParamsType params_type); - - void Lock() { - mLock.lock(); - } - void Unlock() { - mLock.unlock(); - } - - MixVideoFormatEnc* Ref() { - ++ref_count; - return this; - } - - MixVideoFormatEnc* Unref() { - if (0 == (--ref_count)) { - delete this; - return NULL; - } else { - return this; - } - } - -public: - - MixVideoMutex mLock; - bool initialized; - MixFrameManager *framemgr; - MixSurfacePool *surfacepool; - VADisplay va_display; - VAContextID va_context; - VAConfigID va_config; - char *mime_type; - MixRawTargetFormat raw_format; - uint frame_rate_num; - uint frame_rate_denom; - uint picture_width; - uint picture_height; - uint intra_period; - /* - * Following is for bitrate control - */ - uint initial_qp; - uint min_qp; - uint bitrate; - uint target_percentage; - uint window_size; - - bool share_buf_mode; - ulong * ci_frame_id; - uint ci_frame_num; - - bool force_key_frame; - bool new_header_required; - - MixVideoIntraRefreshType refresh_type; - - uint CIR_frame_cnt; - - MixAIRParams air_params; - - uint max_slice_size; - bool render_mss_required; - bool render_QP_required; - bool render_AIR_required; - bool render_framerate_required; - bool render_bitrate_required; - - ulong drawable; - bool need_display; - - VAProfile va_profile; - VAEntrypoint va_entrypoint; - uint va_format; - uint va_rcmode; - uint8 level; - - MixBufferAllocationMode buffer_mode; - void * buf_info; - - MixBufferPool *inputbufpool; - JQueue *inputbufqueue; - uint ref_count ; -}; - - -/** - * mix_videoformatenc_new: - * @returns: A newly allocated instance of #MixVideoFormatEnc - * - * Use this method to create new instance of #MixVideoFormatEnc - */ -MixVideoFormatEnc *mix_videoformatenc_new(void); - -/** - * mix_videoformatenc_ref: - * @mix: object to add reference - * @returns: the MixVideoFormatEnc instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormatEnc *mix_videoformatenc_ref(MixVideoFormatEnc * mix); - -/** - * mix_videoformatenc_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormatEnc *mix_videoformatenc_unref(MixVideoFormatEnc * mix); - - -/* Class Methods */ - -/* TODO: change method parameter list */ -MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, char *msg); - -MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, - MixVideoConfigParamsEnc * enc_config_params, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display); - -MIX_RESULT mix_videofmtenc_encode( - MixVideoFormatEnc *mix, MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - -MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix); - -MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix); - -MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix); - -MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, - uint *max_size); - -MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, - MixVideoConfigParamsEnc * config_params, - MixEncParamsType params_type); - - -#endif /* __MIX_VIDEOFORMATENC_H__ */ diff --git a/mix_video/src/mixvideoformatenc_h263.cpp b/mix_video/src/mixvideoformatenc_h263.cpp deleted file mode 100644 index 7ba2c05..0000000 --- a/mix_video/src/mixvideoformatenc_h263.cpp +++ /dev/null @@ -1,2175 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_h263.h" -#include "mixvideoconfigparamsenc_h263.h" -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - -MixVideoFormatEnc_H263::MixVideoFormatEnc_H263() - :encoded_frames(0) - ,pic_skipped(FALSE) - ,is_intra(TRUE) - ,cur_frame(NULL) - ,ref_frame(NULL) - ,rec_frame(NULL) - ,lookup_frame(NULL) - ,last_mix_buffer(NULL) - ,shared_surfaces(NULL) - ,surfaces(NULL) - ,surface_num(0) - ,shared_surfaces_cnt(0) - ,precreated_surfaces_cnt(0) - ,usrptr(NULL) - ,coded_buf_index(0) - ,coded_buf_size(0) { -} - -MixVideoFormatEnc_H263::~MixVideoFormatEnc_H263() { -} - - -MixVideoFormatEnc_H263 * -mix_videoformatenc_h263_new(void) { - return new MixVideoFormatEnc_H263(); -} - - -MixVideoFormatEnc_H263 * -mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -MixVideoFormatEnc_H263 * -mix_videoformatenc_h263_unref(MixVideoFormatEnc_H263 * mix) { - if (NULL != mix) - return MIX_VIDEOFORMATENC_H263(mix->Unref()); - else - return mix; -} - -MIX_RESULT MixVideoFormatEnc_H263::Initialize( - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoConfigParamsEncH263 * config_params_enc_h263; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - int va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - uint index; - uint max_size = 0; - - /* - * Different MIX buffer mode will have different surface handling approach - */ - - - uint normal_surfaces_cnt = 2; - - /* - * shared_surfaces_cnt is for upstream buffer allocation case - */ - uint shared_surfaces_cnt = 0; - - /* - * precreated_surfaces_cnt is for self buffer allocation case - */ - uint precreated_surfaces_cnt = 0; - - MixCISharedBufferInfo * ci_info = NULL; - - - /*frame_mgr and input_buf_pool is reservered for future use*/ - - if ( config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL) { - LOG_E( - " config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /* - * Check more for requested_surface_info - */ - if (requested_surface_info->surface_cnt != 0 && - (requested_surface_info->surface_allocated == NULL || requested_surface_info->usrptr == NULL)) { - LOG_E( - "surface_cnt != 0 && (surface_allocated == NULL || usrptr == NULL)\n"); - return MIX_RESULT_NULL_PTR; - } - - if (requested_surface_info->surface_cnt > MAX_ENC_SURFACE_COUNT) { - LOG_E ("Something wrong, we have to quite now!\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "begin\n"); - - /* Chainup parent method. */ - ret = MixVideoFormatEnc::Initialize(config_params_enc, frame_mgr, input_buf_pool, surface_pool, requested_surface_info, va_display); - - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H263 (config_params_enc)) { - config_params_enc_h263 = - MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h263_initialize: no h263 config params found\n"); - return MIX_RESULT_FAIL; - } - -// g_mutex_lock(parent->objectlock); - Lock(); - - LOG_V( - "Start to get properities from H263 params\n"); - - /* get properties from H263 params Object, which is special to H263 format*/ - - ret = mix_videoconfigparamsenc_h263_get_slice_num (config_params_enc_h263, - &this->slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h263_get_slice_num\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_h263_get_dlk (config_params_enc_h263, - &(this->disable_deblocking_filter_idc)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h263_get_dlk\n"); - goto cleanup; - } - - - LOG_V( - "======H263 Encode Object properities======:\n"); - - LOG_I( "this->slice_num = %d\n", - this->slice_num); - LOG_I( "this->disabled_deblocking_filter_idc = %d\n\n", - this->disable_deblocking_filter_idc); - - LOG_V( - "Get properities from params done\n"); - - this->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (uint)va_display); - -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - -// va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_profiles = new VAProfile[va_max_num_profiles]; -// va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; - - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_I( - "va_profiles = 0x%08x\n", (uint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for (index= 0; index < va_num_profiles; index++) { - if (this->va_profile == va_profiles[index]) - break; - } - - if (index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - this->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, this->va_profile, - this->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & this->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if ((va_attrib[1].value & this->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = this->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - this->va_profile); - LOG_I( "va_entrypoint = %d\n", - this->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, this->va_profile, - this->va_entrypoint, - &va_attrib[0], 2, &(this->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (this->va_rcmode == VA_RC_VCM) { - - /* - * Following three features are only enabled in VCM mode - */ - this->render_mss_required = TRUE; - this->render_AIR_required = TRUE; - this->render_bitrate_required = TRUE; - } - - /* - * For upstream allocates buffer, it is mandatory to set buffer mode - * and for other stuff, it is optional - */ - - - - - LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); - - if (requested_surface_info->surface_cnt == 0) { - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - ci_info = (MixCISharedBufferInfo *) (this->buf_info); - shared_surfaces_cnt = ci_info->ci_frame_cnt; - normal_surfaces_cnt = 2; - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /* - * To be develped - */ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /* - * To be develped - */ - break; - default: - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - break; - } - } - else if (requested_surface_info->surface_cnt == 1) { - /* - * Un-normal case, TBD - */ - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - } - else { - this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; - precreated_surfaces_cnt = requested_surface_info->surface_cnt; - this->alloc_surface_cnt = requested_surface_info->surface_cnt; - -// self->usrptr = g_malloc (requested_surface_info->surface_cnt * sizeof (uint8 *)); - this->usrptr = new uint8 *[requested_surface_info->surface_cnt] ; - if (this->usrptr == NULL) { - LOG_E("Failed allocate memory\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - memcpy (this->usrptr, requested_surface_info->usrptr, requested_surface_info->surface_cnt * sizeof (uint8 *)); - - } - - LOG_I ("buffer_mode = %d\n", this->buffer_mode); - - this->shared_surfaces_cnt = shared_surfaces_cnt; - this->precreated_surfaces_cnt = precreated_surfaces_cnt; - -#if 0 - - int ii = 0; - for (ii=0; ii < alloc_surface_cnt; ii++) { - - g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); - g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); - - - } - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 2; - parent->ci_frame_num = 0; - } - - //self->surface_num = numSurfaces + parent->ci_frame_num; -#endif - - this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; - - //surfaces = g_malloc(sizeof(VASurfaceID)*normal_surfaces_cnt); - surfaces = new VASurfaceID[normal_surfaces_cnt] ; - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - //self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - this->surfaces = new VASurfaceID[this->surface_num] ; - if (this->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, this->va_format, - this->picture_width, this->picture_height, - surfaces, normal_surfaces_cnt, NULL, 0); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if (shared_surfaces_cnt != 0) { -// self->shared_surfaces =g_malloc(sizeof(VASurfaceID) * shared_surfaces_cnt); - this->shared_surfaces =new VASurfaceID[shared_surfaces_cnt] ; - if (this->shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - #if 0 - for (index = 0; index < this->shared_surfaces_cnt; index++) { - - va_status = vaCreateSurfaceFromCIFrame(va_display, - (ulong) (ci_info->ci_frame_id[index]), - &this->shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - this->surfaces[index] = this->shared_surfaces[index]; - } - #endif - } - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /*To be develped*/ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /*To be develped*/ - break; - case MIX_BUFFER_ALLOC_NORMAL: - break; - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - for (index = 0; index < requested_surface_info->surface_cnt; index ++) { - this->surfaces[index] = requested_surface_info->surface_allocated[index]; - } - } - break; - default: - break; - } - - for (index = 0; index < normal_surfaces_cnt; index++) { - this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", this->surface_num); - - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - this->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = this->surfacepool; - //which is useful to check before encode - - if (this->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(this->surfacepool, - this->surfaces, this->surface_num, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - - LOG_E("Error init failure\n"); - - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, this->va_config, - this->picture_width, this->picture_height, - VA_PROGRESSIVE, this->surfaces, this->surface_num, - &(this->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - this->picture_width, this->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (uint)va_status); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - ret = GetMaxEncodedBufSize(&max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_h263_get_max_encoded_buf_size\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, this->va_context, - VAEncCodedBufferType, - this->coded_buf_size, // - 1, NULL, - &this->coded_buf[0]); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, this->va_context, - VAEncCodedBufferType, - this->coded_buf_size, // - 1, NULL, - &(this->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - - LOG_I( "display = 0x%08x\n", - (uint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - parent->picture_width, parent->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (uint) va_display); - -#endif /* SHOW_SRC */ - -cleanup: - - if (ret == MIX_RESULT_SUCCESS) { - this->initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if (va_profiles) -// g_free(va_profiles); - delete[]va_profiles; - if (va_entrypoints) -// g_free(va_entrypoints); - delete[]va_entrypoints; - if (surfaces) -// g_free(surfaces); - delete[]surfaces; - -// g_mutex_unlock(parent->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return ret; - -} - -MIX_RESULT MixVideoFormatEnc_H263::Encode( - MixBuffer * bufin[], int bufincnt, - MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ - - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - "!bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - - LOG_V( "Locking\n"); -// g_mutex_lock(parent->objectlock); - Lock(); - - - //TODO: also we could move some encode Preparation work to here - - LOG_V( - "mix_videofmtenc_h263_process_encode\n"); - - ret = _process_encode(bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_videofmtenc_h263_process_encode\n"); - goto cleanup; - } - -cleanup: - - LOG_V( "UnLocking\n"); - -// g_mutex_unlock(parent->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return ret; - -} - -MIX_RESULT MixVideoFormatEnc_H263::Flush() { - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - -// g_mutex_lock(mix->objectlock); - Lock(); - - /*unref the current source surface*/ - if (this->cur_frame != NULL) - { - mix_videoframe_unref (this->cur_frame); - this->cur_frame = NULL; - } - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) - { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) - { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - - if (this->last_mix_buffer) { - mix_buffer_unref(this->last_mix_buffer); - this->last_mix_buffer = NULL; - } - - /*reset the properities*/ - this->encoded_frames = 0; - this->pic_skipped = FALSE; - this->is_intra = TRUE; - -// g_mutex_unlock(mix->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc_H263::Deinitialize() { - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - MixVideoFormatEnc::Deinitialize(); - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - - LOG_V( "Release frames\n"); - -// g_mutex_lock(parent->objectlock); - Lock(); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) - { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) - { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - - if (this->lookup_frame != NULL) - { - mix_videoframe_unref (this->lookup_frame); - this->lookup_frame = NULL; - } - - if (this->last_mix_buffer) { - mix_buffer_unref(this->last_mix_buffer); - this->last_mix_buffer = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (this->shared_surfaces) - { -// g_free (self->shared_surfaces); - delete[]this->shared_surfaces; - this->shared_surfaces = NULL; - } - - if (this->surfaces) - { -// g_free (self->surfaces); - delete[]this->surfaces; - this->surfaces = NULL; - } - - if (this->usrptr) { -// g_free (self->usrptr); - delete[]this->usrptr; - this->usrptr = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (this->va_display, this->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (this->va_display, this->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -cleanup: - - this->initialized = TRUE; - -// g_mutex_unlock(parent->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT MixVideoFormatEnc_H263::GetMaxEncodedBufSize (uint *max_size) { - - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_H263(this)) { - - if (this->coded_buf_size > 0) { - *max_size = this->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (this->va_rcmode_h263 == VA_RC_NONE) { - this->coded_buf_size = - (this->picture_width* this->picture_height * 830) / (16 * 16); - // set to value according to QP - } - else { - this->coded_buf_size = this->bitrate/ 4; - } - - this->coded_buf_size = - max (this->coded_buf_size , - (this->picture_width* this->picture_height * 830) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - this->coded_buf_size = - max(this->coded_buf_size, - (this->picture_width * this->picture_height * 1.5 * 8)); - this->coded_buf_size = (this->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not H263 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - *max_size = this->coded_buf_size; - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT MixVideoFormatEnc_H263::_process_encode (MixBuffer * bufin, - MixIOVec * iovout) { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - ulong surface = 0; - uint16 width, height; - - MixVideoFrame * tmp_frame; - uint8 *buf; - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - uint total_size = 0; - uint size = 0; - - if ((bufin == NULL) || (iovout == NULL)) { - LOG_E( - "bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - - va_display = this->va_display; - va_context = this->va_context; - width = this->picture_width; - height = this->picture_height; - - - LOG_I( "encoded_frames = %d\n", - this->encoded_frames); - LOG_I( "is_intra = %d\n", - this->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (uint) this->ci_frame_id); - - /* determine the picture type*/ - if ((this->encoded_frames % this->intra_period) == 0) { - this->is_intra = TRUE; - } else { - this->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - this->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - - - //MixVideoFrame * frame = mix_videoframe_new(); - if (this->lookup_frame == NULL) - { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) - { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - if (this->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) - { - uint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I( - "surface_num = %d\n", this->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > this->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - - } - - - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto cleanup; - - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - - } - - /* - * end of CI buffer allocation mode - */ - - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - break; - - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - - break; - - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - if (this->lookup_frame == NULL) - { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) - { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - uint surface_idx = (uint) -1; //fixme, temp use a big value - uint idx = 0; - - LOG_I ("bufin->data = 0x%08x\n", bufin->data); - - for (idx = 0; idx < this->alloc_surface_cnt; idx++) { - - LOG_I ("this->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); - - if (bufin->data == this->usrptr[idx]) - surface_idx = idx; - } - - LOG_I( - "surface_num = %d\n", this->surface_num); - LOG_I( - "surface_idx = %d\n", surface_idx); - - if (surface_idx > this->surface_num - 2) { - LOG_W( - "the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); - ret = MIX_RESULT_FAIL; - goto no_share_mode; - - } - - if (this->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) - { - - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto cleanup; - - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - - } - - break; - /* - * end of Self buffer allocation mode - */ - - case MIX_BUFFER_ALLOC_NORMAL: - { - -no_share_mode: - - LOG_V( - "We are NOT in share buffer mode\n"); - - if (this->ref_frame == NULL) - { - ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) - { - ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) - { - ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - uint8 *pvbuf; - uint8 *dst_y; - uint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; - } - - - LOG_I( - "surface id = 0x%08x\n", (uint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - uint8 *inbuf = bufin->data; - -#ifdef ANDROID -#define USE_SRC_FMT_NV12 -#endif - int offset_uv = width * height; - uint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; - -#ifdef ANDROID - //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - -#ifdef USE_SRC_FMT_NV12 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif - -#else - - if (this->raw_format == MIX_RAW_TARGET_FORMAT_YUV420) { - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - } - - else if (this->raw_format == MIX_RAW_TARGET_FORMAT_NV12) { - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } - } - else { - LOG_E("Raw format not supoort\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#endif //USE_SRC_FMT_YUV420 - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( - "Map source data to surface done\n"); - } - break; - default: - break; - - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(uint)va_context); - LOG_I( "surface = 0x%08x\n",(uint)surface); - LOG_I( "va_display = 0x%08x\n",(uint)va_display); - - - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = _send_encode_command (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - - if ((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - if (this->encoded_frames == 0) { - this->encoded_frames ++; - this->last_coded_buf = this->coded_buf[this->coded_buf_index]; - this->coded_buf_index ++; - this->coded_buf_index %=2; - - this->last_frame = this->cur_frame; - - - /* determine the picture type*/ - if ((this->encoded_frames % this->intra_period) == 0) { - this->is_intra = TRUE; - } else { - this->is_intra = FALSE; - } - - tmp_frame = this->rec_frame; - this->rec_frame= this->ref_frame; - this->ref_frame = tmp_frame; - - - } - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, this->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - //return MIX_RESULT_FAIL; - } - - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, this->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; - - while (1) { - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = (VACodedBufferSegment *)coded_seg->next; - num_seg ++; - } - - -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (uint*) buf; -#endif - - iovout->data_size = total_size; - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - - //iovout->data = g_malloc (iovout->data_size); - iovout->data = new uchar[iovout->data_size]; - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - //memcpy (iovout->data, buf + 16, iovout->data_size); - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - while (1) { - - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = (VACodedBufferSegment *)coded_seg->next; - } - - iovout->buffer_size = iovout->data_size; - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, this->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "get encoded data done\n"); - - if (!((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - return MIX_RESULT_FAIL; - } - } - - if (this->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = _send_encode_command (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - } - - VASurfaceStatus status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - this->pic_skipped = status & VASurfaceSkipped; - - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - - if (this->need_display) { - ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_framemanager_enqueue\n"); - goto cleanup; - } - } - - /*update the reference surface and reconstructed surface */ - if (!this->pic_skipped) { - tmp_frame = this->rec_frame; - this->rec_frame= this->ref_frame; - this->ref_frame = tmp_frame; - } - - -#if 0 - if (this->ref_frame != NULL) - mix_videoframe_unref (this->ref_frame); - this->ref_frame = this->rec_frame; - - mix_videoframe_unref (this->cur_frame); -#endif - - this->encoded_frames ++; - this->last_coded_buf = this->coded_buf[this->coded_buf_index]; - this->coded_buf_index ++; - this->coded_buf_index %=2; - this->last_frame = this->cur_frame; - - if (this->last_mix_buffer) { - LOG_V("calls to mix_buffer_unref \n"); - LOG_V("refcount = %d\n", MIX_PARAMS(this->last_mix_buffer)->GetRefCount()); - mix_buffer_unref(this->last_mix_buffer); - } - - LOG_V("ref the current bufin\n"); - this->last_mix_buffer = mix_buffer_ref(bufin); - - if (!(this->need_display)) { - mix_videoframe_unref (this->cur_frame); - this->cur_frame = NULL; - } - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - if (iovout->data) { - //g_free(iovout->data); - delete[]iovout->data; - iovout->data = NULL; - } - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc_H263::_send_encode_command () { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_H263(this)) - { - if (this->encoded_frames == 0) { - ret = _send_seq_params (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendSeqParams\n"); - return MIX_RESULT_FAIL; - } - } - - ret = _send_picture_parameter (); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendPictureParameter\n"); - return MIX_RESULT_FAIL; - } - - ret = _send_slice_parameter (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendSliceParameter\n"); - return MIX_RESULT_FAIL; - } - - } - else - { - LOG_E( - "not H263 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormatEnc_H263::_send_seq_params () { - VAStatus va_status; - VAEncSequenceParameterBufferH263 h263_seq_param; - VABufferID seq_para_buf_id; - - - LOG_V( "Begin\n\n"); - - - /*set up the sequence params for HW*/ - h263_seq_param.bits_per_second= this->bitrate; - h263_seq_param.frame_rate = 30; //hard-coded, driver need; - //(unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom; - h263_seq_param.initial_qp = this->initial_qp; - h263_seq_param.min_qp = this->min_qp; - h263_seq_param.intra_period = this->intra_period; - - //h263_seq_param.fixed_vop_rate = 30; - - LOG_V( - "===h263 sequence params===\n"); - - LOG_I( "bitrate = %d\n", - h263_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - h263_seq_param.frame_rate); - LOG_I( "initial_qp = %d\n", - h263_seq_param.initial_qp); - LOG_I( "min_qp = %d\n", - h263_seq_param.min_qp); - LOG_I( "intra_period = %d\n\n", - h263_seq_param.intra_period); - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncSequenceParameterBufferType, - sizeof(h263_seq_param), - 1, &h263_seq_param, - &seq_para_buf_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(this->va_display, this->va_context, - &seq_para_buf_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc_H263::_send_picture_parameter () { - - VAStatus va_status; - VAEncPictureParameterBufferH263 h263_pic_param; - - LOG_V( "Begin\n\n"); - -#if 0 //not needed currently - MixVideoConfigParamsEncH263 * params_h263 - = MIX_VIDEOCONFIGPARAMSENC_H263 (config_params_enc); -#endif - - - /*set picture params for HW*/ - h263_pic_param.reference_picture = this->ref_frame->frame_id; - h263_pic_param.reconstructed_picture = this->rec_frame->frame_id; - h263_pic_param.coded_buf = this->coded_buf[this->coded_buf_index]; - h263_pic_param.picture_width = this->picture_width; - h263_pic_param.picture_height = this->picture_height; - h263_pic_param.picture_type = this->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; - - - - LOG_V( - "======h263 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h263_pic_param.reference_picture); - LOG_I( "reconstructed_picture = 0x%08x\n", - h263_pic_param.reconstructed_picture); - LOG_I( "coded_buf = 0x%08x\n", - h263_pic_param.coded_buf); - LOG_I( "coded_buf_index = %d\n", - this->coded_buf_index); - LOG_I( "picture_width = %d\n", - h263_pic_param.picture_width); - LOG_I( "picture_height = %d\n", - h263_pic_param.picture_height); - LOG_I( "picture_type = %d\n\n", - h263_pic_param.picture_type); - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncPictureParameterBufferType, - sizeof(h263_pic_param), - 1,&h263_pic_param, - &this->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(this->va_display, this->va_context, - &this->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT MixVideoFormatEnc_H263::_send_slice_parameter () { - VAStatus va_status; - - uint slice_num; - uint slice_height; - uint slice_index; - uint slice_height_in_mb; - - - - LOG_V("Begin\n\n"); - - - //slice_num = mix->slice_num; - slice_num = 1; // one slice per picture; - slice_height = this->picture_height / slice_num; - - slice_height += 15; - slice_height &= (~15); - - va_status = vaCreateBuffer (this->va_display, this->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - slice_num, NULL, - &this->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - VAEncSliceParameterBuffer *slice_param, *current_slice; - - va_status = vaMapBuffer(this->va_display, - this->slice_param_buf, - (void **)&slice_param); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - current_slice = slice_param; - - for (slice_index = 0; slice_index < slice_num; slice_index++) { - current_slice = slice_param + slice_index; - slice_height_in_mb = - min (slice_height, this->picture_height - - slice_index * slice_height) / 16; - - // starting MB row number for this slice - current_slice->start_row_number = slice_index * slice_height / 16; - // slice height measured in MB - current_slice->slice_height = slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = this->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc - = this->disable_deblocking_filter_idc; - - LOG_V("======h263 slice params======\n"); - - LOG_I("slice_index = %d\n", - (int) slice_index); - LOG_I("start_row_number = %d\n", - (int) current_slice->start_row_number); - LOG_I("slice_height_in_mb = %d\n", - (int) current_slice->slice_height); - LOG_I("slice.is_intra = %d\n", - (int) current_slice->slice_flags.bits.is_intra); - LOG_I("disable_deblocking_filter_idc = %d\n\n", - (int) this->disable_deblocking_filter_idc); - - } - - va_status = vaUnmapBuffer(this->va_display, this->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(this->va_display, this->va_context, - &this->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V("end\n"); - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormatEnc_H263::_send_dynamic_bitrate () { - VAStatus va_status; - - - LOG_V( "Begin\n\n"); - - - if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call SendDynamicBitrate\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterRateControl * bitrate_control_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl; - bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data; - - bitrate_control_param->bits_per_second = this->bitrate; - bitrate_control_param->initial_qp = this->initial_qp; - bitrate_control_param->min_qp = this->min_qp; - bitrate_control_param->target_percentage = this->target_percentage; - bitrate_control_param->window_size = this->window_size; - - va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(this->va_display, this->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT MixVideoFormatEnc_H263::_send_dynamic_framerate() { - VAStatus va_status; - - LOG_V( "Begin\n\n"); - - if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call SendDynamicFramerate\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterFrameRate * framerate_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncMiscParameterBufferType, - sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; - framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; - framerate_param->framerate = - (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; - - va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(this->va_display, this->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - - LOG_I( "frame rate = %d\n", - framerate_param->framerate); - - return MIX_RESULT_SUCCESS; - -} - - diff --git a/mix_video/src/mixvideoformatenc_h263.h b/mix_video/src/mixvideoformatenc_h263.h deleted file mode 100644 index 90ef29d..0000000 --- a/mix_video/src/mixvideoformatenc_h263.h +++ /dev/null @@ -1,124 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMATENC_H263_H__ -#define __MIX_VIDEOFORMATENC_H263_H__ - -#include "mixvideoformatenc.h" -#include "mixvideoframe_private.h" - - -#define MIX_VIDEO_ENC_H263_SURFACE_NUM 20 - -#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) -#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) - -/* - * Type macros. - */ -#define MIX_VIDEOFORMATENC_H263(obj) (reinterpret_cast(obj)) -#define MIX_IS_VIDEOFORMATENC_H263(obj) ((NULL !=MIX_VIDEOFORMATENC_H263(obj)) ? TRUE : FALSE) - -class MixVideoFormatEnc_H263 : public MixVideoFormatEnc { -public: - MixVideoFormatEnc_H263(); - virtual ~MixVideoFormatEnc_H263(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display); - - virtual MIX_RESULT Encode( MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - - virtual MIX_RESULT Flush(); - - virtual MIX_RESULT Deinitialize(); - - virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); - - /* Local Methods */ -private: - MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); - MIX_RESULT _send_encode_command(); - MIX_RESULT _send_seq_params(); - MIX_RESULT _send_picture_parameter(); - MIX_RESULT _send_slice_parameter(); - MIX_RESULT _send_dynamic_bitrate(); - MIX_RESULT _send_dynamic_framerate(); - -public: - VABufferID coded_buf[2]; - VABufferID last_coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * shared_surfaces; - VASurfaceID * surfaces; - uint surface_num; - uint shared_surfaces_cnt; - uint precreated_surfaces_cnt; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; - MixVideoFrame *lookup_frame; - MixBuffer *last_mix_buffer; - - uint disable_deblocking_filter_idc; - uint slice_num; - uint va_rcmode_h263; - - uint encoded_frames; - bool pic_skipped; - - bool is_intra; - - uint coded_buf_size; - uint coded_buf_index; - - uint8 ** usrptr; - uint alloc_surface_cnt; -}; - - - -/** - * mix_videoformatenc_h263_new: - * @returns: A newly allocated instance of #MixVideoFormatEnc_H263 - * - * Use this method to create new instance of #MixVideoFormatEnc_H263 - */ -MixVideoFormatEnc_H263 *mix_videoformatenc_h263_new(void); - -/** - * mix_videoformatenc_h263_ref: - * @mix: object to add reference - * @returns: the MixVideoFormatEnc_H263 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormatEnc_H263 *mix_videoformatenc_h263_ref(MixVideoFormatEnc_H263 * mix); - -/** - * mix_videoformatenc_h263_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -//#define mix_videoformatenc_h263_unref(obj) g_object_unref (G_OBJECT(obj)) -MixVideoFormatEnc_H263 *mix_videoformatenc_h263_unref(MixVideoFormatEnc_H263 * mix); - -#endif /* __MIX_VIDEOFORMATENC_H263_H__ */ - diff --git a/mix_video/src/mixvideoformatenc_h264.cpp b/mix_video/src/mixvideoformatenc_h264.cpp deleted file mode 100644 index 34b30ba..0000000 --- a/mix_video/src/mixvideoformatenc_h264.cpp +++ /dev/null @@ -1,3002 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_h264.h" -#include "mixvideoconfigparamsenc_h264.h" -#include -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - -MixVideoFormatEnc_H264::MixVideoFormatEnc_H264() - :MixVideoFormatEnc() - ,encoded_frames(0) - ,frame_num(0) - ,pic_skipped(FALSE) - ,is_intra(TRUE) - ,cur_frame(NULL) - ,ref_frame(NULL) - ,rec_frame(NULL) - ,lookup_frame(NULL) -#if 1 - ,last_mix_buffer ( NULL) -#endif - ,shared_surfaces(NULL) - ,surfaces(NULL) - ,surface_num(0) - ,shared_surfaces_cnt(0) - ,precreated_surfaces_cnt(0) - ,usrptr(NULL) - ,coded_buf_index(0) - ,coded_buf_size(0) { -} - - -MixVideoFormatEnc_H264::~MixVideoFormatEnc_H264() { -} - - -MixVideoFormatEnc_H264 * -mix_videoformatenc_h264_new(void) { - return new MixVideoFormatEnc_H264(); -} - -MixVideoFormatEnc_H264 * -mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -MixVideoFormatEnc_H264 * -mix_videoformatenc_h264_unref(MixVideoFormatEnc_H264 * mix) { - if (NULL != mix) - return MIX_VIDEOFORMATENC_H264(mix->Unref()); - else - return mix; -} - -MIX_RESULT -MixVideoFormatEnc_H264::Initialize( - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; -// MixVideoFormatEnc *parent = NULL; - MixVideoConfigParamsEncH264 * config_params_enc_h264; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - int va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - uint index; - uint max_size = 0; - /* - * For upstream allocates buffer, it is mandatory to set buffer mode - * and for other stuff, it is optional - */ - - - /* - * Different MIX buffer mode will have different surface handling approach - */ - - - uint normal_surfaces_cnt = 2; - - /* - * shared_surfaces_cnt is for upstream buffer allocation case - */ - uint shared_surfaces_cnt = 0; - - /* - * precreated_surfaces_cnt is for self buffer allocation case - */ - uint precreated_surfaces_cnt = 0; - - MixCISharedBufferInfo * ci_info = NULL; - - /*frame_mgr and input_buf_pool is reservered for future use*/ - - if ( config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL) { - LOG_E( - " config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - /* - * Check more for requested_surface_info - */ - if (requested_surface_info->surface_cnt != 0 && - (requested_surface_info->surface_allocated == NULL || requested_surface_info->usrptr == NULL)) { - LOG_E( - "surface_cnt != 0 && (surface_allocated == NULL || usrptr == NULL)\n"); - return MIX_RESULT_NULL_PTR; - } - - if (requested_surface_info->surface_cnt > MAX_ENC_SURFACE_COUNT) { - LOG_E ("Something wrong, we have to quite now!\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "begin\n"); - - ret = MixVideoFormatEnc::Initialize(config_params_enc,frame_mgr,input_buf_pool,surface_pool,requested_surface_info,va_display); - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - - -// parent = MIX_VIDEOFORMATENC(this); -// MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix); - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) { - config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc); - } else { - LOG_V( - "mix_videofmtenc_h264_initialize: no h264 config params found\n"); - return MIX_RESULT_FAIL; - } - -// g_mutex_lock(parent->objectlock); - Lock(); - - LOG_V( - "Start to get properities from h.264 params\n"); - - /* get properties from H264 params Object, which is special to H264 format*/ - ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264, - &this->basic_unit_size); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_bus\n"); - goto CLEAN_UP; - } - - - ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264, - &this->disable_deblocking_filter_idc); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_dlk\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_h264_get_vui_flag (config_params_enc_h264, - &this->vui_flag); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_vui_flag\n"); - goto CLEAN_UP; - } - - - ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, - &this->slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, - &this->I_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, - &this->P_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264, - &this->delimiter_type); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n"); - goto CLEAN_UP; - } - - ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, - &this->idr_interval); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E ( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); - goto CLEAN_UP; - } - - LOG_V( - "======H264 Encode Object properities======:\n"); - - LOG_I( "this->basic_unit_size = %d\n", - this->basic_unit_size); - LOG_I( "this->disable_deblocking_filter_idc = %d\n", - this->disable_deblocking_filter_idc); - LOG_I( "this->slice_num = %d\n", - this->slice_num); - LOG_I( "this->I_slice_num = %d\n", - this->I_slice_num); - LOG_I( "this->P_slice_num = %d\n", - this->P_slice_num); - LOG_I ("this->delimiter_type = %d\n", - this->delimiter_type); - LOG_I ("this->idr_interval = %d\n", - this->idr_interval); - - LOG_V( - "Get properities from params done\n"); - - this->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", - (uint)va_display); - - -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", - va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", - va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", - va_max_num_attribs); - -// va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles); - va_profiles = new VAProfile[va_max_num_profiles]; -// va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; - if (va_profiles == NULL || va_entrypoints ==NULL) - { - LOG_E( - "!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - - LOG_I( - "va_profiles = 0x%08x\n", (uint)va_profiles); - - LOG_V( "vaQueryConfigProfiles\n"); - - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigProfiles\n"); - - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_V( "vaQueryConfigProfiles Done\n"); - - - - /*check whether profile is supported*/ - for (index= 0; index < va_num_profiles; index++) { - if (this->va_profile == va_profiles[index]) - break; - } - - if (index == va_num_profiles) - { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints(va_display, - this->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes(va_display, this->va_profile, - this->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - if ((va_attrib[0].value & this->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_E( "RC mode va_attrib[1].value=%d, this->va_rcmode=%d",va_attrib[1].value, this->va_rcmode); - if ((va_attrib[1].value & this->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = this->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - - LOG_I( "profile = %d\n", - this->va_profile); - LOG_I( "va_entrypoint = %d\n", - this->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", - va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", - va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", - va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", - va_attrib[1].value); - - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig(va_display, this->va_profile, - this->va_entrypoint, - &va_attrib[0], 2, &(this->va_config)); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - - if (this->va_rcmode == VA_RC_VCM) { - - /* - * Following three features are only enabled in VCM mode - */ - this->render_mss_required = TRUE; - this->render_AIR_required = TRUE; - this->render_bitrate_required = TRUE; - this->slice_num = (this->picture_height + 15) / 16; //if we are in VCM, we will set slice num to max value - } - - - - LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); - - if (requested_surface_info->surface_cnt == 0) { - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - ci_info = (MixCISharedBufferInfo *) (this->buf_info); - shared_surfaces_cnt = ci_info->ci_frame_cnt; - normal_surfaces_cnt = 2; - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /* - * To be develped - */ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /* - * To be develped - */ - break; - default: - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - break; - } - } - else if (requested_surface_info->surface_cnt == 1) { - /* - * Un-normal case, TBD - */ - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - } - else { - this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; - precreated_surfaces_cnt = requested_surface_info->surface_cnt; - this->alloc_surface_cnt = requested_surface_info->surface_cnt; - - //this->usrptr = g_malloc (requested_surface_info->surface_cnt * sizeof (uint8 *)); - this->usrptr = new uint8 *[requested_surface_info->surface_cnt]; - if (this->usrptr == NULL) { - LOG_E("Failed allocate memory\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - - memcpy (this->usrptr, requested_surface_info->usrptr, requested_surface_info->surface_cnt * sizeof (uint8 *)); - - } - - LOG_I ("buffer_mode = %d\n", this->buffer_mode); - - this->shared_surfaces_cnt = shared_surfaces_cnt; - this->precreated_surfaces_cnt = precreated_surfaces_cnt; - -#if 0 - - int ii = 0; - for (ii=0; ii < alloc_surface_cnt; ii++) { - - g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); - g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); - - - } - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 2; - parent->ci_frame_num = 0; - } - - //self->surface_num = numSurfaces + parent->ci_frame_num; -#endif - - this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; - - // surfaces = g_malloc(sizeof(VASurfaceID)*normal_surfaces_cnt); - surfaces = new VASurfaceID[normal_surfaces_cnt]; - if (surfaces == NULL) - { - LOG_E( - "Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - - //this->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num); - this->surfaces = new VASurfaceID[this->surface_num] ; - if (this->surfaces == NULL) - { - LOG_E( - "Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces(va_display, this->va_format, - this->picture_width, this->picture_height, - surfaces,normal_surfaces_cnt, NULL, 0); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - - if (shared_surfaces_cnt != 0) { -// this->shared_surfaces = -// g_malloc(sizeof(VASurfaceID) * shared_surfaces_cnt); - this->shared_surfaces = - new VASurfaceID[shared_surfaces_cnt]; - - if (this->shared_surfaces == NULL) - { - LOG_E( - "Failed allocate shared surface\n"); - - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - } - - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { -#if 0 - for (index = 0; index < this->shared_surfaces_cnt; index++) { - va_status = vaCreateSurfaceFromCIFrame(va_display, - (ulong) (ci_info->ci_frame_id[index]), - &this->shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - this->surfaces[index] = this->shared_surfaces[index]; - } -#endif - } - - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /*To be develped*/ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /*To be develped*/ - break; - case MIX_BUFFER_ALLOC_NORMAL: - break; - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - for (index = 0; index < requested_surface_info->surface_cnt; index ++) { - this->surfaces[index] = requested_surface_info->surface_allocated[index]; - } - } - break; - default: - break; - } - - for (index = 0; index < normal_surfaces_cnt; index++) { - this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", this->surface_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - this->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = this->surfacepool; - //which is useful to check before encode - - if (this->surfacepool == NULL) - { - LOG_E( - "Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_V( - "mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize(this->surfacepool, - this->surfaces, this->surface_num, va_display); - - switch (ret) - { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - LOG_E( "Error init failure\n"); - ret = MIX_RESULT_ALREADY_INIT; - goto CLEAN_UP; - default: - break; - } - - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext(va_display, this->va_config, - this->picture_width, this->picture_height, - 0, this->surfaces, this->surface_num, - &(this->va_context)); - - LOG_I( - "Created libva context width %d, height %d\n", - this->picture_width, this->picture_height); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", - (uint)va_status); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - - ret = GetMaxEncodedBufSize(&max_size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n"); - goto CLEAN_UP; - - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, this->va_context, - VAEncCodedBufferType, - this->coded_buf_size, // - 1, NULL, - &(this->coded_buf[0])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer (va_display, this->va_context, - VAEncCodedBufferType, - this->coded_buf_size, // - 1, NULL, - &(this->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - LOG_I( "display = 0x%08x\n", - (uint) display); - win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0, - this->picture_width, this->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", - (uint) va_display); - -#endif /* SHOW_SRC */ - -CLEAN_UP: - - - if (ret == MIX_RESULT_SUCCESS) { - initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if (va_profiles) - delete [] va_profiles; - - if (va_entrypoints) - delete [] va_entrypoints; - - if (surfaces) - delete []surfaces; - -// g_mutex_unlock(parent->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT -MixVideoFormatEnc_H264::Encode( - MixBuffer * bufin[], int bufincnt, - MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - /*currenly only support one input and output buffer*/ - - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E( - "buffer count not equel to 1\n"); - LOG_E( - "maybe some exception occurs\n"); - } - - if (bufin[0] == NULL || iovout[0] == NULL) { - LOG_E( - " !bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - - LOG_V( "Locking\n"); -// g_mutex_lock(parent->objectlock); - Lock(); - - - //TODO: also we could move some encode Preparation work to here - - LOG_V( - "ProcessEncode\n"); - - ret = _process_encode ( - bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed ProcessEncode\n"); - goto CLEAN_UP; - } - -CLEAN_UP: - - LOG_V( "UnLocking\n"); - -// g_mutex_unlock(parent->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return ret; -} - -MIX_RESULT MixVideoFormatEnc_H264::Flush() { - - //MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - -// g_mutex_lock(mix->objectlock); - Lock(); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) - { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) - { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - -//#ifdef ANDROID -#if 1 - if (this->last_mix_buffer) { - mix_buffer_unref(this->last_mix_buffer); - this->last_mix_buffer = NULL; - } -#endif - /*reset the properities*/ - this->encoded_frames = 0; - this->frame_num = 0; - this->pic_skipped = FALSE; - this->is_intra = TRUE; - -// g_mutex_unlock(mix->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - - - -MIX_RESULT -MixVideoFormatEnc_H264::Deinitialize() { - - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - - LOG_V( "Begin\n"); - - - ret = MixVideoFormatEnc::Deinitialize(); - - if (ret != MIX_RESULT_SUCCESS) - { - return ret; - } - - - LOG_V( "Release frames\n"); - -// g_mutex_lock(parent->objectlock); - Lock(); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) - { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) - { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - - if (this->lookup_frame != NULL) - { - mix_videoframe_unref (this->lookup_frame); - this->lookup_frame = NULL; - } - - if (this->last_mix_buffer) { - mix_buffer_unref(this->last_mix_buffer); - this->last_mix_buffer = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (this->shared_surfaces) - { -// g_free (self->shared_surfaces); - delete []this->shared_surfaces; - this->shared_surfaces = NULL; - } - - if (this->surfaces) - { -// g_free (self->surfaces); - delete[] this->surfaces; - this->surfaces = NULL; - } - - if (this->usrptr) { -// g_free (self->usrptr); - delete[]this->usrptr; - this->usrptr = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (this->va_display, this->va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (this->va_display, this->va_config); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - -CLEAN_UP: - this->initialized = FALSE; - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - LOG_V( "end\n"); - - return ret; -} - - -MIX_RESULT -MixVideoFormatEnc_H264::GetMaxEncodedBufSize (uint *max_size) { - - if (max_size == NULL) - { - LOG_E( - "max_size == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - - LOG_V( "Begin\n"); - - if (MIX_IS_VIDEOFORMATENC_H264(this)) { - - if (this->coded_buf_size > 0) { - *max_size = this->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (this->va_rcmode_h264 == VA_RC_NONE) { - this->coded_buf_size = - (this->picture_width* this->picture_height * 400) / (16 * 16); - // set to value according to QP - } - else { - this->coded_buf_size = this->bitrate/ 4; - } - - this->coded_buf_size = - max (this->coded_buf_size , - (this->picture_width* this->picture_height * 400) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - this->coded_buf_size = - min(this->coded_buf_size, - (this->picture_width * this->picture_height * 1.5 * 8)); - this->coded_buf_size = (this->coded_buf_size + 15) &(~15); - } - else - { - LOG_E( - "not H264 video encode Object\n"); - return MIX_RESULT_INVALID_PARAM; - } - - *max_size = this->coded_buf_size; - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormatEnc_H264::SetDynamicEncConfig(MixVideoConfigParamsEnc * config_params, - MixEncParamsType params_type) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoConfigParamsEncH264 * config_params_enc_h264; - - LOG_V( "Begin\n"); - - - if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params)) { - config_params_enc_h264 = - MIX_VIDEOCONFIGPARAMSENC_H264 (config_params); - } else { - LOG_V( - "mix_videofmtenc_h264_initialize: no h264 config params found\n"); - return MIX_RESULT_FAIL; - } - - /* - * For case params_type == MIX_ENC_PARAMS_SLICE_NUM - * we don't need to chain up to parent method, as we will handle - * dynamic slice height change inside this method, and other dynamic - * controls will be handled in parent method. - */ - if (params_type == MIX_ENC_PARAMS_SLICE_NUM) { - -// g_mutex_lock(parent->objectlock); - Lock(); - - ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264, - &this->slice_num); - - this->I_slice_num = this->P_slice_num = this->slice_num; - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n"); - -// g_mutex_unlock(parent->objectlock); - Unlock(); - return ret; - } - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - } - else if (params_type == MIX_ENC_PARAMS_I_SLICE_NUM) { - - //g_mutex_lock(parent->objectlock); - Lock(); - - ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264, - &this->I_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n"); - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - return ret; - } - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - } - else if (params_type == MIX_ENC_PARAMS_P_SLICE_NUM) { - - //g_mutex_lock(parent->objectlock); - Lock(); - - ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264, - &this->P_slice_num); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n"); - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - return ret; - } - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - } else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) { - - //g_mutex_lock(parent->objectlock); - Lock(); - - ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264, - &this->idr_interval); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E( - "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n"); - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - return ret; - } - - this->new_header_required = TRUE; - - //g_mutex_unlock(parent->objectlock); - Unlock(); - - } else { - - /* Chainup parent method. */ - ret = MixVideoFormatEnc::SetDynamicEncConfig(config_params,params_type); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_V( - "chainup parent method (set_dynamic_config) failed \n"); - return ret; - } - } - - - LOG_V( "End\n"); - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc_H264::_process_encode( MixBuffer * bufin, - MixIOVec * iovout) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - ulong surface = 0; - uint16 width, height; - bool usingMixDataBuffer = FALSE; - - MixVideoFrame * tmp_frame; - uint8 *buf; - - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - uint total_size = 0; - uint size = 0; - uint status = 0; - bool slice_size_overflow = FALSE; - - if ((bufin == NULL) || (iovout == NULL)) { - LOG_E( - "bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - va_display = this->va_display; - va_context = this->va_context; - width = this->picture_width; - height = this->picture_height; - - - LOG_I( "encoded_frames = %d\n", - this->encoded_frames); - LOG_I( "frame_num = %d\n", - this->frame_num); - LOG_I( "is_intra = %d\n", - this->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", - (uint) this->ci_frame_id); - - if (this->new_header_required) { - this->frame_num = 0; - } - - /* determine the picture type*/ - //if ((mix->encoded_frames % parent->intra_period) == 0) { - if (this->intra_period == 0) { - if (this->frame_num == 0) - this->is_intra = TRUE; - else - this->is_intra = FALSE; - } - else if ((this->frame_num % this->intra_period) == 0) { - this->is_intra = TRUE; - } else { - this->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", - this->is_intra); - - LOG_V( - "Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - //MixVideoFrame * frame = mix_videoframe_new(); - if (this->lookup_frame == NULL) - { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) - { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - } - - if (this->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get reference surface from pool failed\n"); - goto CLEAN_UP; - } - } - - if (this->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto CLEAN_UP; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) - { - uint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I( - "surface_num = %d\n", this->surface_num); - LOG_I( - "ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > this->surface_num - 2) { - LOG_E( - "the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - - } - - - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto CLEAN_UP; - } - - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto CLEAN_UP; - - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - } - - /* - * end of CI buffer allocation mode - */ - - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - break; - - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - - break; - - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - if (this->lookup_frame == NULL) - { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) - { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - } - - uint surface_idx = (uint) -1; //fixme, temp use a big value - uint idx = 0; - - LOG_I ("bufin->data = 0x%08x\n", bufin->data); - - for (idx = 0; idx < this->alloc_surface_cnt; idx++) { - LOG_I ("this->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); - - if (bufin->data == this->usrptr[idx]) - surface_idx = idx; - } - - LOG_I( - "surface_num = %d\n", this->surface_num); - LOG_I( - "surface_idx = %d\n", surface_idx); - - if (surface_idx > this->surface_num - 2) { - LOG_W( - "the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); - ret = MIX_RESULT_FAIL; - goto no_share_mode; - - } - - if (this->ref_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get reference surface from pool failed\n"); - goto CLEAN_UP; - } - } - - if (this->rec_frame == NULL) - { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto CLEAN_UP; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get recontructed surface from pool failed\n"); - goto CLEAN_UP; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) - { - - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "mix_videoframe_set_ci_frame_idx failed\n"); - goto CLEAN_UP; - } - - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "get current working surface from pool failed\n"); - goto CLEAN_UP; - - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - } - - break; - /* - * end of Self buffer allocation mode - */ - - case MIX_BUFFER_ALLOC_NORMAL: - { - -no_share_mode: - - LOG_V( - "We are NOT in share buffer mode\n"); - - if (this->ref_frame == NULL) - { - ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); - if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto CLEAN_UP; - } - } - - if (this->rec_frame == NULL) - { - ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto CLEAN_UP; - } - } - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) - { - ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_surfacepool_get\n"); - goto CLEAN_UP; - } - } - - LOG_V( "Get Surface Done\n"); - - - VAImage src_image; - uint8 *pvbuf; - uint8 *dst_y; - uint8 *dst_uv; - int i,j; - - LOG_V( - "map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed to mix_videoframe_get_frame_id\n"); - goto CLEAN_UP; - } - - - LOG_I( - "surface id = 0x%08x\n", (uint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_V( - "vaImage information\n"); - LOG_I( - "image->pitches[0] = %d\n", image->pitches[0]); - LOG_I( - "image->pitches[1] = %d\n", image->pitches[1]); - LOG_I( - "image->offsets[0] = %d\n", image->offsets[0]); - LOG_I( - "image->offsets[1] = %d\n", image->offsets[1]); - LOG_I( - "image->num_planes = %d\n", image->num_planes); - LOG_I( - "image->width = %d\n", image->width); - LOG_I( - "image->height = %d\n", image->height); - - LOG_I( - "input buf size = %d\n", bufin->size); - - uint8 *inbuf = bufin->data; - -#ifdef ANDROID -#define USE_SRC_FMT_NV12 -#endif - int offset_uv = width * height; - uint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; - -#ifdef ANDROID - //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - -#ifdef USE_SRC_FMT_NV12 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif - -#else - - if (this->raw_format == MIX_RAW_TARGET_FORMAT_YUV420) { - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - } - - else if (this->raw_format == MIX_RAW_TARGET_FORMAT_NV12) { - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } - } - else { - LOG_E("Raw format not supoort\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - -#endif //USE_SRC_FMT_YUV420 - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_V( - "Map source data to surface done\n"); - } - break; - default: - break; - - } - - /* - * Start encoding process - **/ - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(uint)va_context); - LOG_I( "surface = 0x%08x\n",(uint)surface); - LOG_I( "va_display = 0x%08x\n",(uint)va_display); - - - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - ret = _send_encode_command (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed SendEncodEcommand\n"); - goto CLEAN_UP; - } - - - if ((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 0) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - } - - LOG_V( "vaEndPicture\n"); - - if (this->encoded_frames == 0) { - this->encoded_frames ++; - this->frame_num ++; - this->last_coded_buf = this->coded_buf[this->coded_buf_index]; - this->coded_buf_index ++; - this->coded_buf_index %=2; - - this->last_frame = this->cur_frame; - - - /* determine the picture type*/ - //if ((mix->encoded_frames % parent->intra_period) == 0) { - if (this->intra_period == 0) { - this->is_intra = FALSE; //Here mix->frame_num is bigger than 0 - } - else if ((this->frame_num % this->intra_period) == 0) { - this->is_intra = TRUE; - } else { - this->is_intra = FALSE; - } - - tmp_frame = this->rec_frame; - this->rec_frame= this->ref_frame; - this->ref_frame = tmp_frame; - - - } - - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, this->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaSyncSurface\n"); - - //return MIX_RESULT_FAIL; - } - - LOG_V( - "Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, this->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - - - - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; - - while (1) { - total_size += coded_seg->size; - - status = coded_seg->status; - - if (!slice_size_overflow) { - - slice_size_overflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; - } - - if (coded_seg->next == NULL) - break; - - coded_seg = (VACodedBufferSegment *)coded_seg->next; - num_seg ++; - } - - LOG_I ("segment number = %d\n", num_seg); - -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (uint*) buf; - - uint size = iovout->data_size + 100; -#endif - - iovout->data_size = total_size; - size = total_size + 100; - - iovout->buffer_size = size; - - //We will support two buffer mode, one is application allocates the buffer and passes to encode, - //the other is encode allocate memory - - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - usingMixDataBuffer = TRUE; - //iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed - iovout->data = new uchar[size]; - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - } - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - if (this->delimiter_type == MIX_DELIMITER_ANNEXB) { - - while (1) { - - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = (VACodedBufferSegment *)coded_seg->next; - } - - //memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte - //size = iovout->data_size; - - } else { - - uint pos = 0; - uint zero_byte_count = 0; - uint prefix_length = 0; - uint8 nal_unit_type = 0; - //uint8 * payload = buf + 16; - uint8 * payload = ( uint8 *)coded_seg->buf; - - while ((payload[pos++] == 0x00)) { - zero_byte_count ++; - if (pos >= coded_seg->size) //to make sure the buffer to be accessed is valid - break; - } - - nal_unit_type = (uint8)(payload[pos] & 0x1f); - prefix_length = zero_byte_count + 1; - - /*prefix_length won't bigger than the total size, don't need to check here*/ - - LOG_I ("nal_unit_type = %d\n", nal_unit_type); - LOG_I ("zero_byte_count = %d\n", zero_byte_count); - LOG_I ("data_size = %d\n", iovout->data_size); - - size = iovout->data_size; - - if ((payload [pos - 1] & 0x01) && this->slice_num == 1 && nal_unit_type == 1 && num_seg == 1) { - iovout->data[0] = ((size - prefix_length) >> 24) & 0xff; - iovout->data[1] = ((size - prefix_length) >> 16) & 0xff; - iovout->data[2] = ((size - prefix_length) >> 8) & 0xff; - iovout->data[3] = (size - prefix_length) & 0xff; - // use 4 bytes to indicate the NALU length - //memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length); - memcpy (iovout->data + 4, coded_seg->buf + prefix_length, size - prefix_length); - LOG_V ("We only have one start code, copy directly\n"); - - iovout->data_size = size - prefix_length + 4; - } - else { - - if (num_seg == 1) { - ret = _AnnexB_to_length_prefixed ( (uint8*)coded_seg->buf, coded_seg->size, iovout->data, &size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed AnnexBtoLengthPrefixed\n"); - goto CLEAN_UP; - } - - } else { - - uint8 * tem_buf = NULL; - //tem_buf = g_malloc (size); - tem_buf = new uint8[size]; - if (tem_buf == NULL) { - LOG_E( "tem_buf == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto CLEAN_UP; - } - - while (1) { - - memcpy (tem_buf + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - - if (coded_seg->next == NULL) - break; - - coded_seg = (VACodedBufferSegment *)coded_seg->next; - } - - ret = _AnnexB_to_length_prefixed (tem_buf, iovout->data_size, iovout->data, &size); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ("Failed AnnexBtoLengthPrefixed\n"); - //g_free (tem_buf); - delete[] tem_buf; - goto CLEAN_UP; - } - - //g_free (tem_buf); - delete[] tem_buf; - } - iovout->data_size = size; - } - } - - LOG_I( - "out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, this->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - LOG_V( "get encoded data done\n"); - - if (!((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 1)) { - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - } - - if (this->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - ret = _send_encode_command (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E ( - "Failed SendEncodeCommand\n"); - goto CLEAN_UP; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - - } - - VASurfaceStatus va_surface_status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &va_surface_status); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - goto CLEAN_UP; - } - this->pic_skipped = va_surface_status & VASurfaceSkipped; - - if (this->need_display) { - ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto CLEAN_UP; - } - - ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed mix_framemanager_enqueue\n"); - goto CLEAN_UP; - } - } - - - /*update the reference surface and reconstructed surface */ - if (!this->pic_skipped) { - tmp_frame = this->rec_frame; - this->rec_frame= this->ref_frame; - this->ref_frame = tmp_frame; - } - -#if 0 - if (mix->ref_frame != NULL) - mix_videoframe_unref (mix->ref_frame); - mix->ref_frame = mix->rec_frame; - - mix_videoframe_unref (mix->cur_frame); -#endif - - this->encoded_frames ++; - this->frame_num ++; - this->last_coded_buf = this->coded_buf[this->coded_buf_index]; - this->coded_buf_index ++; - this->coded_buf_index %=2; - this->last_frame = this->cur_frame; - -//#ifdef ANDROID -#if 1 - if (this->last_mix_buffer) { - LOG_V("calls to mix_buffer_unref \n"); - LOG_V("refcount = %d\n", MIX_PARAMS(this->last_mix_buffer)->GetRefCount()); - mix_buffer_unref(this->last_mix_buffer); - } - - LOG_V("ref the current bufin\n"); - - this->last_mix_buffer = mix_buffer_ref(bufin); -#endif - - if (!(this->need_display)) { - mix_videoframe_unref (this->cur_frame); - this->cur_frame = NULL; - } - -CLEAN_UP: - - if (ret != MIX_RESULT_SUCCESS) { - if (iovout->data && (usingMixDataBuffer == TRUE)) { - //g_free (iovout->data); - delete[] iovout->data; - iovout->data = NULL; - usingMixDataBuffer = FALSE; - } - } - - LOG_V( "end\n"); - - /* - * The error level of MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW - * is lower than other errors, so if any other errors happen, we won't - * return slice size overflow - */ - if (ret == MIX_RESULT_SUCCESS && slice_size_overflow) - ret = MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW; - - return ret; - -} - -MIX_RESULT -MixVideoFormatEnc_H264::_AnnexB_to_length_prefixed ( - uint8 * bufin, uint bufin_len, uint8* bufout, uint *bufout_len) { - - - uint pos = 0; - uint last_pos = 0; - - uint zero_byte_count = 0; - uint nal_size = 0; - uint prefix_length = 0; - uint size_copied = 0; - uint leading_zero_count = 0; - - if (bufin == NULL || bufout == NULL || bufout_len == NULL) { - - LOG_E( - "bufin == NULL || bufout == NULL || bufout_len = NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - if (bufin_len <= 0 || *bufout_len <= 0) { - LOG_E( - "bufin_len <= 0 || *bufout_len <= 0\n"); - return MIX_RESULT_FAIL; - } - - LOG_V ("Begin\n"); - - while ((bufin[pos++] == 0x00)) { - zero_byte_count ++; - if (pos >= bufin_len) //to make sure the buffer to be accessed is valid - break; - } - - if (bufin[pos - 1] != 0x01 || zero_byte_count < 2) - { - LOG_E("The stream is not AnnexB format \n"); - return MIX_RESULT_FAIL; ; //not AnnexB, we won't process it - } - - zero_byte_count = 0; - last_pos = pos; - - while (pos < bufin_len) { - - while (bufin[pos++] == 0) { - zero_byte_count ++; - if (pos >= bufin_len) //to make sure the buffer to be accessed is valid - break; - } - - if (bufin[pos - 1] == 0x01 && zero_byte_count >= 2) { - if (zero_byte_count == 2) { - prefix_length = 3; - } - else { - prefix_length = 4; - leading_zero_count = zero_byte_count - 3; - } - - LOG_I("leading_zero_count = %d\n", leading_zero_count); - - nal_size = pos - last_pos - prefix_length - leading_zero_count; - if (nal_size < 0) { - LOG_E ("something wrong in the stream\n"); - return MIX_RESULT_FAIL; //not AnnexB, we won't process it - } - - if (*bufout_len < (size_copied + nal_size + 4)) { - LOG_E ("The length of destination buffer is too small\n"); - return MIX_RESULT_FAIL; - } - - LOG_I ("nal_size = %d\n", nal_size); - - /*We use 4 bytes length prefix*/ - bufout [size_copied] = nal_size >> 24 & 0xff; - bufout [size_copied + 1] = nal_size >> 16 & 0xff; - bufout [size_copied + 2] = nal_size >> 8 & 0xff; - bufout [size_copied + 3] = nal_size & 0xff; - - size_copied += 4; //4 bytes length prefix - memcpy (bufout + size_copied, bufin + last_pos, nal_size); - size_copied += nal_size; - - LOG_I ("size_copied = %d\n", size_copied); - - zero_byte_count = 0; - leading_zero_count = 0; - last_pos = pos; - } - - else if (pos == bufin_len) { - - LOG_V ("Last NALU in this frame\n"); - - nal_size = pos - last_pos; - - if (*bufout_len < (size_copied + nal_size + 4)) { - LOG_E ("The length of destination buffer is too small\n"); - return MIX_RESULT_FAIL; - } - - /*We use 4 bytes length prefix*/ - bufout [size_copied] = nal_size >> 24 & 0xff; - bufout [size_copied + 1] = nal_size >> 16 & 0xff; - bufout [size_copied + 2] = nal_size >> 8 & 0xff; - bufout [size_copied + 3] = nal_size & 0xff; - - size_copied += 4; //4 bytes length prefix - memcpy (bufout + size_copied, bufin + last_pos, nal_size); - size_copied += nal_size; - - LOG_I ("size_copied = %d\n", size_copied); - } - - else { - zero_byte_count = 0; - leading_zero_count = 0; - } - - } - - if (size_copied != *bufout_len) { - *bufout_len = size_copied; - } - - LOG_V ("End\n"); - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT -MixVideoFormatEnc_H264::_send_encode_command () { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - - LOG_V( "Begin\n"); - - - //if (mix->encoded_frames == 0 || parent->new_header_required) { - if (this->frame_num == 0 || this->new_header_required) { - ret = _send_seq_params (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendSeqParams\n"); - return MIX_RESULT_FAIL; - } - - this->new_header_required = FALSE; //Set to require new header filed to FALSE - } - - if (this->render_mss_required && this->max_slice_size != 0) { - ret = _send_max_slice_size(); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendMaxSliceSize\n"); - return MIX_RESULT_FAIL; - } - - this->render_mss_required = FALSE; - } - - if (this->render_bitrate_required) { - ret = _send_dynamic_bitrate(); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendDynamicBitrate\n"); - return MIX_RESULT_FAIL; - } - - this->render_bitrate_required = FALSE; - } - - if (this->render_AIR_required && - (this->refresh_type == MIX_VIDEO_AIR || this->refresh_type == MIX_VIDEO_BOTH)) - { - - ret = _send_AIR (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendAIR\n"); - return MIX_RESULT_FAIL; - } - - this->render_AIR_required = FALSE; - } - - if (this->render_framerate_required) { - - ret = _send_dynamic_framerate (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendDynamicFramerate\n"); - return MIX_RESULT_FAIL; - } - - this->render_framerate_required = FALSE; - } - - ret = _send_picture_parameter (); - - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendPictureParameter\n"); - return MIX_RESULT_FAIL; - } - - ret = _send_slice_parameter (); - if (ret != MIX_RESULT_SUCCESS) - { - LOG_E( - "Failed SendSliceParameter\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "End\n"); - - - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT -MixVideoFormatEnc_H264::_send_dynamic_bitrate () { - VAStatus va_status; - - LOG_V( "Begin\n\n"); - - if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call SendDynamicBitrate\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterRateControl * bitrate_control_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl; - bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data; - - bitrate_control_param->bits_per_second = this->bitrate; - bitrate_control_param->initial_qp = this->initial_qp; - bitrate_control_param->min_qp = this->min_qp; - bitrate_control_param->target_percentage = this->target_percentage; - bitrate_control_param->window_size = this->window_size; - - va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(this->va_display, this->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT -MixVideoFormatEnc_H264::_send_max_slice_size () { - VAStatus va_status; - - LOG_V( "Begin\n\n"); - - - if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call send_max_slice_size\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterMaxSliceSize * max_slice_size_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeMaxSliceSize; - max_slice_size_param = (VAEncMiscParameterMaxSliceSize *)misc_enc_param_buf->data; - - max_slice_size_param->max_slice_size = this->max_slice_size; - - va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - LOG_I( "max slice size = %d\n", - max_slice_size_param->max_slice_size); - - va_status = vaRenderPicture(this->va_display, this->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT -MixVideoFormatEnc_H264::_send_dynamic_framerate () { - - VAStatus va_status; - - - if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call SendDynamicFramerate\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterFrameRate * framerate_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncMiscParameterBufferType, - sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate; - framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data; - framerate_param->framerate = - (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; - - va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(this->va_display, this->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - - LOG_I( "frame rate = %d\n", - framerate_param->framerate); - - return MIX_RESULT_SUCCESS; - -} - -MIX_RESULT -MixVideoFormatEnc_H264::_send_AIR () { - - VAStatus va_status; - - LOG_V( "Begin\n\n"); - - if (this->va_rcmode != MIX_RATE_CONTROL_VCM) { - - LOG_W ("Not in VCM mode, but call send_AIR\n"); - return VA_STATUS_SUCCESS; - } - - VAEncMiscParameterBuffer * misc_enc_param_buf; - VAEncMiscParameterAIR * air_param; - VABufferID misc_param_buffer_id; - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncMiscParameterBufferType, - sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterAIR), - 1, NULL, - &misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaMapBuffer (this->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - misc_enc_param_buf->type = VAEncMiscParameterTypeAIR; - air_param = (VAEncMiscParameterAIR *)misc_enc_param_buf->data; - - air_param->air_auto = this->air_params.air_auto; - air_param->air_num_mbs = this->air_params.air_MBs; - air_param->air_threshold = this->air_params.air_threshold; - - va_status = vaUnmapBuffer(this->va_display, misc_param_buffer_id); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(this->va_display, this->va_context, - &misc_param_buffer_id, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - - LOG_I( "air_threshold = %d\n", - air_param->air_threshold); - - return MIX_RESULT_SUCCESS; -} - -int -MixVideoFormatEnc_H264::_calc_level(int nummbs) -{ - int level = 30; - - if (nummbs < 3600) - { - level = 30; - } - else if (nummbs < 5120) - { - level = 31; - } - else if (nummbs < 8192) - { - level = 32; - } - else if (nummbs < 8704) - { - level = 40; - } - else if (nummbs < 22080) - { - level = 42; - } - else if (nummbs < 36864) - { - level = 50; - } - else - { - level = 51; - } - return level; -} - -MIX_RESULT -MixVideoFormatEnc_H264::_send_seq_params () { - VAStatus va_status; - VAEncSequenceParameterBufferH264 h264_seq_param; - VAEncMiscParameterRateControl rc_misc_param; - VAEncMiscParameterFrameRate frame_rate_param; - int level; - - LOG_V( "Begin\n\n"); - - /*set up the sequence params for HW*/ -// h264_seq_param.level_idc = 30; //TODO, hard code now - h264_seq_param.intra_period = this->intra_period; - h264_seq_param.intra_idr_period = this->idr_interval; - h264_seq_param.picture_width_in_mbs = (this->picture_width + 15) / 16; - h264_seq_param.picture_height_in_mbs = (this->picture_height + 15) / 16; - - level = _calc_level( - h264_seq_param.picture_width_in_mbs * h264_seq_param.picture_height_in_mbs); - - h264_seq_param.level_idc = level; - - h264_seq_param.bits_per_second = this->bitrate; - frame_rate_param.framerate = - (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; - rc_misc_param.initial_qp = this->initial_qp; - rc_misc_param.min_qp = this->min_qp; - rc_misc_param.basic_unit_size = this->basic_unit_size; //for rate control usage - h264_seq_param.intra_period = this->intra_period; - h264_seq_param.vui_parameters_present_flag = this->vui_flag; - //h264_seq_param.vui_flag = 248; - //h264_seq_param.seq_parameter_set_id = 176; - - // This is a temporary fix suggested by Binglin for bad encoding quality issue - h264_seq_param.max_num_ref_frames = 1; // TODO: We need a long term design for this field - - LOG_V( - "===h264 sequence params===\n"); - - LOG_I( "seq_parameter_set_id = %d\n", - (uint)h264_seq_param.seq_parameter_set_id); - LOG_I( "level_idc = %d\n", - (uint)h264_seq_param.level_idc); - LOG_I( "intra_period = %d\n", - h264_seq_param.intra_period); - LOG_I( "idr_interval = %d\n", - h264_seq_param.intra_idr_period); - LOG_I( "picture_width_in_mbs = %d\n", - h264_seq_param.picture_width_in_mbs); - LOG_I( "picture_height_in_mbs = %d\n", - h264_seq_param.picture_height_in_mbs); - LOG_I( "bitrate = %d\n", - h264_seq_param.bits_per_second); - LOG_I( "frame_rate = %d\n", - frame_rate_param.frame_rate); - LOG_I( "initial_qp = %d\n", - rc_misc_param.initial_qp); - LOG_I( "min_qp = %d\n", - rc_misc_param.min_qp); - LOG_I( "basic_unit_size = %d\n", - rc_misc_param.basic_unit_size); - LOG_I( "vui_flag = %d\n\n", - h264_seq_param.vui_parameters_present_flag); - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncSequenceParameterBufferType, - sizeof(h264_seq_param), - 1, &h264_seq_param, - &this->seq_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncMiscParameterBufferType, - sizeof(rc_misc_param), - 1, &h264_seq_param, - &this->rc_param_buf); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture(this->va_display, this->va_context, - &this->seq_param_buf, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - va_status = vaRenderPicture(this->va_display, this->va_context, - &this->rc_param_buf, 1); - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT -MixVideoFormatEnc_H264::_send_picture_parameter () { - VAStatus va_status; - VAEncPictureParameterBufferH264 h264_pic_param; - - LOG_V( "Begin\n\n"); - - - /*set picture params for HW*/ - h264_pic_param.ReferenceFrames[0].picture_id= this->ref_frame->frame_id; - h264_pic_param.CurrPic.picture_id= this->rec_frame->frame_id; - h264_pic_param.coded_buf = this->coded_buf[this->coded_buf_index]; - //h264_pic_param.picture_width = this->picture_width; - //h264_pic_param.picture_height = this->picture_height; - h264_pic_param.last_picture = 0; - - - LOG_V( - "======h264 picture params======\n"); - LOG_I( "reference_picture = 0x%08x\n", - h264_pic_param.ReferenceFrames[0].picture_id); - LOG_I( "reconstructed_picture = 0x%08x\n", - h264_pic_param.CurrPic.picture_id); - LOG_I( "coded_buf_index = %d\n", - this->coded_buf_index); - LOG_I( "coded_buf = 0x%08x\n", - h264_pic_param.coded_buf); - /* - LOG_I( "picture_width = %d\n", - h264_pic_param.picture_width); - LOG_I( "picture_height = %d\n\n", - h264_pic_param.picture_height); - */ - va_status = vaCreateBuffer(this->va_display, this->va_context, - VAEncPictureParameterBufferType, - sizeof(h264_pic_param), - 1,&h264_pic_param, - &this->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - - va_status = vaRenderPicture(this->va_display, this->va_context, - &this->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; - -} - - -MIX_RESULT -MixVideoFormatEnc_H264::_send_slice_parameter () { - VAStatus va_status; - - uint slice_num; - uint slice_height; - uint slice_index; - uint slice_height_in_mb; - uint max_slice_num; - uint min_slice_num; - - int actual_slice_height_in_mb; - int start_row_in_mb; - int modulus; - - LOG_V( "Begin\n\n"); - - - max_slice_num = (this->picture_height + 15) / 16; - min_slice_num = 1; - - if (this->is_intra) { - slice_num = this->I_slice_num; - } - else { - slice_num = this->P_slice_num; - } - - if (slice_num < min_slice_num) { - LOG_W ("Slice Number is too small"); - slice_num = min_slice_num; - } - - if (slice_num > max_slice_num) { - LOG_W ("Slice Number is too big"); - slice_num = max_slice_num; - } - - this->slice_num = slice_num; - modulus = max_slice_num % slice_num; - slice_height_in_mb = (max_slice_num - modulus) / slice_num ; - -#if 1 - va_status = vaCreateBuffer (this->va_display, this->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - slice_num, NULL, - &this->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - VAEncSliceParameterBuffer *slice_param, *current_slice; - - va_status = vaMapBuffer(this->va_display, - this->slice_param_buf, - (void **)&slice_param); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaMapBuffer\n"); - return MIX_RESULT_FAIL; - } - - current_slice = slice_param; - start_row_in_mb = 0; - for (slice_index = 0; slice_index < slice_num; slice_index++) { - current_slice = slice_param + slice_index; - - actual_slice_height_in_mb = slice_height_in_mb; - if (slice_index < modulus) { - actual_slice_height_in_mb ++; - } - - // starting MB row number for this slice - current_slice->start_row_number = start_row_in_mb; - // slice height measured in MB - current_slice->slice_height = actual_slice_height_in_mb; - current_slice->slice_flags.bits.is_intra = this->is_intra; - current_slice->slice_flags.bits.disable_deblocking_filter_idc - = this->disable_deblocking_filter_idc; - - // This is a temporary fix suggested by Binglin for bad encoding quality issue - current_slice->slice_flags.bits.uses_long_term_ref = 0; // TODO: We need a long term design for this field - current_slice->slice_flags.bits.is_long_term_ref = 0; // TODO: We need a long term design for this field - - LOG_V( - "======h264 slice params======\n"); - - LOG_I( "slice_index = %d\n", - (int) slice_index); - LOG_I( "start_row_number = %d\n", - (int) current_slice->start_row_number); - LOG_I( "slice_height_in_mb = %d\n", - (int) current_slice->slice_height); - LOG_I( "slice.is_intra = %d\n", - (int) current_slice->slice_flags.bits.is_intra); - LOG_I( - "disable_deblocking_filter_idc = %d\n\n", - (int) this->disable_deblocking_filter_idc); - - start_row_in_mb += actual_slice_height_in_mb; - } - - va_status = vaUnmapBuffer(this->va_display, this->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaUnmapBuffer\n"); - return MIX_RESULT_FAIL; - } -#endif - -#if 0 - VAEncSliceParameterBuffer slice_param; - slice_index = 0; - slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = mix->is_intra; - slice_param.slice_flags.bits.disable_deblocking_filter_idc - = mix->disable_deblocking_filter_idc; - - va_status = vaCreateBuffer (parent->va_display, parent->va_context, - VAEncSliceParameterBufferType, - sizeof(slice_param), - slice_num, &slice_param, - &mix->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } -#endif - - va_status = vaRenderPicture(this->va_display, this->va_context, - &this->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) - { - LOG_E( - "Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - - LOG_V( "end\n"); - - return MIX_RESULT_SUCCESS; -} - - - diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h deleted file mode 100644 index c5e4412..0000000 --- a/mix_video/src/mixvideoformatenc_h264.h +++ /dev/null @@ -1,142 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMATENC_H264_H__ -#define __MIX_VIDEOFORMATENC_H264_H__ - -#include "mixvideoformatenc.h" -#include "mixvideoframe_private.h" - -#define MIX_VIDEO_ENC_H264_SURFACE_NUM 20 - -#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) -#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) - -/* - * Type macros. - */ -#define MIX_VIDEOFORMATENC_H264(obj) (reinterpret_cast(obj)) -#define MIX_IS_VIDEOFORMATENC_H264(obj) ((NULL !=MIX_VIDEOFORMATENC_H264(obj)) ? TRUE : FALSE) - -class MixVideoFormatEnc_H264 : public MixVideoFormatEnc { - -public: - MixVideoFormatEnc_H264(); - ~MixVideoFormatEnc_H264(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display); - - virtual MIX_RESULT Encode( - MixBuffer * bufin[], int bufincnt, - MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - - virtual MIX_RESULT Flush(); - - virtual MIX_RESULT Deinitialize(); - - virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); - - virtual MIX_RESULT SetDynamicEncConfig ( - MixVideoConfigParamsEnc * config_params, MixEncParamsType params_type); - -private: - /* Local Methods */ - MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); - MIX_RESULT _AnnexB_to_length_prefixed ( - uint8 * bufin, uint bufin_len, - uint8* bufout, uint *bufout_len); - MIX_RESULT _send_encode_command (); - MIX_RESULT _send_dynamic_bitrate (); - MIX_RESULT _send_max_slice_size (); - MIX_RESULT _send_dynamic_framerate (); - MIX_RESULT _send_AIR (); - MIX_RESULT _send_seq_params (); - MIX_RESULT _send_picture_parameter (); - MIX_RESULT _send_slice_parameter (); - int _calc_level(int nummbs); - - -public: - VABufferID coded_buf[2]; - VABufferID last_coded_buf; - VABufferID seq_param_buf; - VABufferID rc_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - //VASurfaceID * ci_shared_surfaces; - VASurfaceID * shared_surfaces; - VASurfaceID * surfaces; - uint surface_num; - uint shared_surfaces_cnt; - uint precreated_surfaces_cnt; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; - MixVideoFrame *lookup_frame; - MixBuffer *last_mix_buffer; - - uint basic_unit_size; //for rate control - uint disable_deblocking_filter_idc; - uint8 vui_flag; - MixDelimiterType delimiter_type; - uint idr_interval; - uint slice_num; - uint I_slice_num; - uint P_slice_num; - uint va_rcmode_h264; - - uint encoded_frames; - uint frame_num; - bool pic_skipped; - - bool is_intra; - - uint coded_buf_size; - uint coded_buf_index; - uint8 ** usrptr; - uint alloc_surface_cnt; - -}; - - - -/** - * mix_videoformatenc_h264_new: - * @returns: A newly allocated instance of #MixVideoFormatEnc_H264 - * - * Use this method to create new instance of #MixVideoFormatEnc_H264 - */ -MixVideoFormatEnc_H264 *mix_videoformatenc_h264_new(void); - -/** - * mix_videoformatenc_h264_ref: - * @mix: object to add reference - * @returns: the MixVideoFormatEnc_H264 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormatEnc_H264 *mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix); - -/** - * mix_videoformatenc_h264_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormatEnc_H264 *mix_videoformatenc_h264_unref(MixVideoFormatEnc_H264 * mix); - -#endif /* __MIX_VIDEOFORMATENC_H264_H__ */ diff --git a/mix_video/src/mixvideoformatenc_mpeg4.cpp b/mix_video/src/mixvideoformatenc_mpeg4.cpp deleted file mode 100644 index a608ef5..0000000 --- a/mix_video/src/mixvideoformatenc_mpeg4.cpp +++ /dev/null @@ -1,1664 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_mpeg4.h" -#include "mixvideoconfigparamsenc_mpeg4.h" -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - -MixVideoFormatEnc_MPEG4::MixVideoFormatEnc_MPEG4() - :shared_surfaces(NULL) - ,surfaces(NULL) - ,surface_num(0) - ,shared_surfaces_cnt(0) - ,precreated_surfaces_cnt(0) - ,cur_frame(NULL) - ,ref_frame(NULL) - ,rec_frame(NULL) - ,lookup_frame(NULL) - ,last_mix_buffer(NULL) - ,encoded_frames(0) - ,pic_skipped(FALSE) - ,is_intra(TRUE) - ,coded_buf_size(0) - ,coded_buf_index(0) - ,usrptr(NULL) { -} - -MixVideoFormatEnc_MPEG4::~MixVideoFormatEnc_MPEG4() { -} - -MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_new(void) { - return new MixVideoFormatEnc_MPEG4(); -} - -MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) { - if (NULL != mix) { - mix->Ref(); - return mix; - } - else { - return NULL; - } -} - -MixVideoFormatEnc_MPEG4 * -mix_videoformatenc_mpeg4_unref(MixVideoFormatEnc_MPEG4 * mix) { - if (NULL!=mix) - if (NULL != mix->Unref()) - return mix; - else - return NULL; - else - return NULL; -} - - -/*MPEG-4:2 vmethods implementation */ -MIX_RESULT MixVideoFormatEnc_MPEG4::Initialize( - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - int va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - uint index; - uint max_size = 0; - - /* - * Different MIX buffer mode will have different surface handling approach - */ - uint normal_surfaces_cnt = 2; - - /* - * shared_surfaces_cnt is for upstream buffer allocation case - */ - uint shared_surfaces_cnt = 0; - - /* - * precreated_surfaces_cnt is for self buffer allocation case - */ - uint precreated_surfaces_cnt = 0; - - MixCISharedBufferInfo * ci_info = NULL; - - - /*frame_mgr and input_buf_pool is reservered for future use*/ - if (config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL) { - LOG_E("config_params_enc == NULL || va_display == NULL || requested_surface_info == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - /* - * Check more for requested_surface_info - */ - if (requested_surface_info->surface_cnt != 0 && - (requested_surface_info->surface_allocated == NULL || requested_surface_info->usrptr == NULL)) { - LOG_E("surface_cnt != 0 && (surface_allocated == NULL || usrptr == NULL)\n"); - return MIX_RESULT_NULL_PTR; - } - - if (requested_surface_info->surface_cnt > MAX_ENC_SURFACE_COUNT) { - LOG_E ("Something wrong, we have to quite now!\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "begin\n"); - - /* Chainup parent method. */ - ret = MixVideoFormatEnc::Initialize( - config_params_enc, - frame_mgr, - input_buf_pool, - surface_pool, - requested_surface_info, - va_display); - if (ret != MIX_RESULT_SUCCESS) { - return ret; - } - - if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) { - config_params_enc_mpeg4 = MIX_VIDEOCONFIGPARAMSENC_MPEG4(config_params_enc); - } else { - LOG_V("mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n"); - return MIX_RESULT_FAIL; - } - - Lock(); - - LOG_V("Start to get properities from MPEG-4:2 params\n"); - - /* get properties from MPEG4 params Object, which is special to MPEG4 format*/ - ret = mix_videoconfigparamsenc_mpeg4_get_profile_level ( - config_params_enc_mpeg4, &this->profile_and_level_indication); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti ( - config_params_enc_mpeg4, &(this->fixed_vop_time_increment)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n"); - goto cleanup; - } - - ret = mix_videoconfigparamsenc_mpeg4_get_dlk ( - config_params_enc_mpeg4, &(this->disable_deblocking_filter_idc)); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to config_params_enc_mpeg4\n"); - goto cleanup; - } - - LOG_V("======MPEG4 Encode Object properities======:\n"); - LOG_I("self->profile_and_level_indication = %d\n", this->profile_and_level_indication); - LOG_I( "self->fixed_vop_time_increment = %d\n\n", this->fixed_vop_time_increment); - LOG_V("Get properities from params done\n"); - - this->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", (uint)va_display); -#if 0 - /* query the vender information, can ignore*/ - va_vendor = vaQueryVendorString (va_display); - LOG_I( "Vendor = %s\n", - va_vendor); -#endif - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", va_max_num_attribs); - - va_profiles = new VAProfile[va_max_num_profiles]; - va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; - - if (va_profiles == NULL || va_entrypoints ==NULL) { - LOG_E("!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_I("va_profiles = 0x%08x\n", (uint)va_profiles); - LOG_V( "vaQueryConfigProfiles\n"); - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaQueryConfigProfiles\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - LOG_V( "vaQueryConfigProfiles Done\n"); - - /*check whether profile is supported*/ - for (index= 0; index < va_num_profiles; index++) { - if (this->va_profile == va_profiles[index]) - break; - } - - if (index == va_num_profiles) { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints( - va_display, - this->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes( - va_display, - this->va_profile, - this->va_entrypoint, - &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & this->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[1].value & this->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = this->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - LOG_I( "profile = %d\n", this->va_profile); - LOG_I( "va_entrypoint = %d\n", this->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", va_attrib[1].value); - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig( - va_display, this->va_profile, this->va_entrypoint, - &va_attrib[0], 2, &(this->va_config)); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (this->va_rcmode == VA_RC_VCM) { - /* - * Following three features are only enabled in VCM mode - */ - this->render_mss_required = TRUE; - this->render_AIR_required = TRUE; - this->render_bitrate_required = TRUE; - } - - /* - * For upstream allocates buffer, it is mandatory to set buffer mode - * and for other stuff, it is optional - */ - - LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); - - if (requested_surface_info->surface_cnt == 0) { - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - ci_info = (MixCISharedBufferInfo *) (this->buf_info); - shared_surfaces_cnt = ci_info->ci_frame_cnt; - normal_surfaces_cnt = 2; - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /* - * To be develped - */ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /* - * To be develped - */ - break; - default: - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - break; - } - } - else if (requested_surface_info->surface_cnt == 1) { - /* - * Un-normal case, TBD - */ - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - } - else { - this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; - precreated_surfaces_cnt = requested_surface_info->surface_cnt; - this->alloc_surface_cnt = requested_surface_info->surface_cnt; - this->usrptr = new uint8*[requested_surface_info->surface_cnt]; - if (this->usrptr == NULL) { - LOG_E("Failed allocate memory\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - memcpy (this->usrptr, - requested_surface_info->usrptr, - requested_surface_info->surface_cnt * sizeof (uint8 *)); - } - - LOG_I ("buffer_mode = %d\n", this->buffer_mode); - - this->shared_surfaces_cnt = shared_surfaces_cnt; - this->precreated_surfaces_cnt = precreated_surfaces_cnt; - -#if 0 - - int ii = 0; - for (ii=0; ii < alloc_surface_cnt; ii++) { - - g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); - g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); - - - } - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 2; - parent->ci_frame_num = 0; - } - - //self->surface_num = numSurfaces + parent->ci_frame_num; -#endif - - this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; - - surfaces = new VASurfaceID[normal_surfaces_cnt]; - - if (surfaces == NULL) { - LOG_E("Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - this->surfaces = new VASurfaceID[this->surface_num]; - - if (this->surfaces == NULL) { - LOG_E("Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces( - va_display, this->va_format, - this->picture_width, this->picture_height, - surfaces, normal_surfaces_cnt, NULL, 0); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (shared_surfaces_cnt != 0) { - this->shared_surfaces = new VASurfaceID[shared_surfaces_cnt]; - if (this->shared_surfaces == NULL) { - LOG_E("Failed allocate shared surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - #if 0 - for (index = 0; index < this->shared_surfaces_cnt; index++) { - va_status = vaCreateSurfaceFromCIFrame( - va_display, - (ulong) (ci_info->ci_frame_id[index]), - &this->shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - this->surfaces[index] = this->shared_surfaces[index]; - } - #endif - } - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /*To be develped*/ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /*To be develped*/ - break; - case MIX_BUFFER_ALLOC_NORMAL: - break; - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - for (index = 0; index < requested_surface_info->surface_cnt; index ++) { - this->surfaces[index] = requested_surface_info->surface_allocated[index]; - } - } - break; - default: - break; - } - - for (index = 0; index < normal_surfaces_cnt; index++) { - this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", this->surface_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - this->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = this->surfacepool; - //which is useful to check before encode - - if (this->surfacepool == NULL) { - LOG_E("Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V("mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize( - this->surfacepool, this->surfaces, - this->surface_num, va_display); - - switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - LOG_E( "Error init failure\n"); - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; - default: - break; - } - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext( - va_display, this->va_config, - this->picture_width, this->picture_height, - VA_PROGRESSIVE, this->surfaces, this->surface_num, - &(this->va_context)); - - LOG_I("Created libva context width %d, height %d\n", - this->picture_width, this->picture_height); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", (uint)va_status); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = GetMaxEncodedBufSize(&max_size); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n"); - goto cleanup; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer ( - va_display, this->va_context, - VAEncCodedBufferType, - this->coded_buf_size, - 1, NULL, - &this->coded_buf[0]); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer ( - va_display, this->va_context, - VAEncCodedBufferType, - this->coded_buf_size, - 1, NULL, - &(this->coded_buf[1])); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - LOG_I( "display = 0x%08x\n", (uint) display); - win = XCreateSimpleWindow( - display, RootWindow(display, 0), 0, 0, - this->picture_width, this->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", (uint) va_display); -#endif /* SHOW_SRC */ - -cleanup: - - if (ret == MIX_RESULT_SUCCESS) { - this->initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if (va_profiles) - delete [] va_profiles; - - if (va_entrypoints) - delete [] va_entrypoints; - - if (surfaces) - delete [] surfaces; - - Unlock(); - LOG_V( "end\n"); - return ret; -} - -MIX_RESULT MixVideoFormatEnc_MPEG4::Encode( - MixBuffer * bufin[], int bufincnt, - MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - /*currenly only support one input and output buffer*/ - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E("buffer count not equel to 1\n"); - LOG_E("maybe some exception occurs\n"); - } - - if (bufin[0] == NULL || iovout[0] == NULL) { - LOG_E("!bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } - -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - - LOG_V( "Locking\n"); - Lock(); - - //TODO: also we could move some encode Preparation work to here - LOG_V("mix_videofmtenc_mpeg4_process_encode\n"); - ret = _process_encode(bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videofmtenc_mpeg4_process_encode\n"); - goto cleanup; - } - -cleanup: - - LOG_V( "UnLocking\n"); - Unlock(); - LOG_V( "end\n"); - return ret; -} - -MIX_RESULT MixVideoFormatEnc_MPEG4::Flush() { - LOG_V( "Begin\n"); - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - - Lock(); - - /*unref the current source surface*/ - if (this->cur_frame != NULL) { - mix_videoframe_unref (this->cur_frame); - this->cur_frame = NULL; - } - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - - if (this->last_mix_buffer) { - mix_buffer_unref(this->last_mix_buffer); - this->last_mix_buffer = NULL; - } - - /*reset the properities*/ - this->encoded_frames = 0; - this->pic_skipped = FALSE; - this->is_intra = TRUE; - - Unlock(); - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormatEnc_MPEG4::Deinitialize() { - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - ret = MixVideoFormatEnc::Deinitialize(); - if (ret != MIX_RESULT_SUCCESS) { - return ret; - } - - LOG_V( "Release frames\n"); - Lock(); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - - if (this->lookup_frame != NULL) { - mix_videoframe_unref (this->lookup_frame); - this->lookup_frame = NULL; - } - - if (this->last_mix_buffer) { - mix_buffer_unref(this->last_mix_buffer); - this->last_mix_buffer = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (this->shared_surfaces) { - delete [] this->shared_surfaces; - this->shared_surfaces = NULL; - } - - if (this->surfaces) { - delete [] this->surfaces; - this->surfaces = NULL; - } - - if (this->usrptr) { - delete [] this->usrptr; - this->usrptr = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (this->va_display, this->va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (this->va_display, this->va_config); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -cleanup: - - this->initialized = TRUE; - Unlock(); - LOG_V( "end\n"); - return ret; -} - -MIX_RESULT MixVideoFormatEnc_MPEG4::_send_seq_params () { - VAStatus va_status; - VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param; - VABufferID seq_para_buf_id; - - LOG_V( "Begin\n\n"); - - /*set up the sequence params for HW*/ - mpeg4_seq_param.profile_and_level_indication = this->profile_and_level_indication; //TODO, hard code now - mpeg4_seq_param.video_object_layer_width= this->picture_width; - mpeg4_seq_param.video_object_layer_height= this->picture_height; - mpeg4_seq_param.vop_time_increment_resolution = - (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; - mpeg4_seq_param.fixed_vop_time_increment= this->fixed_vop_time_increment; - mpeg4_seq_param.bits_per_second= this->bitrate; - mpeg4_seq_param.frame_rate = - (unsigned int) (this->frame_rate_num + this->frame_rate_denom /2 ) / this->frame_rate_denom; - mpeg4_seq_param.initial_qp = this->initial_qp; - mpeg4_seq_param.min_qp = this->min_qp; - mpeg4_seq_param.intra_period = this->intra_period; - //mpeg4_seq_param.fixed_vop_rate = 30; - - LOG_V("===mpeg4 sequence params===\n"); - LOG_I("profile_and_level_indication = %d\n", (uint)mpeg4_seq_param.profile_and_level_indication); - LOG_I("intra_period = %d\n", mpeg4_seq_param.intra_period); - LOG_I("video_object_layer_width = %d\n", mpeg4_seq_param.video_object_layer_width); - LOG_I("video_object_layer_height = %d\n", mpeg4_seq_param.video_object_layer_height); - LOG_I("vop_time_increment_resolution = %d\n", mpeg4_seq_param.vop_time_increment_resolution); - LOG_I("fixed_vop_rate = %d\n", mpeg4_seq_param.fixed_vop_rate); - LOG_I("fixed_vop_time_increment = %d\n", mpeg4_seq_param.fixed_vop_time_increment); - LOG_I("bitrate = %d\n", mpeg4_seq_param.bits_per_second); - LOG_I("frame_rate = %d\n", mpeg4_seq_param.frame_rate); - LOG_I("initial_qp = %d\n", mpeg4_seq_param.initial_qp); - LOG_I("min_qp = %d\n", mpeg4_seq_param.min_qp); - LOG_I("intra_period = %d\n\n", mpeg4_seq_param.intra_period); - - va_status = vaCreateBuffer( - this->va_display, this->va_context, - VAEncSequenceParameterBufferType, - sizeof(mpeg4_seq_param), - 1, &mpeg4_seq_param, - &seq_para_buf_id); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture( - this->va_display, this->va_context, &seq_para_buf_id, 1); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc_MPEG4::_send_picture_parameter() { - VAStatus va_status; - VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; - LOG_V( "Begin\n\n"); - -#if 0 //not needed currently - MixVideoConfigParamsEncMPEG4 * params_mpeg4 - = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc); -#endif - /*set picture params for HW*/ - mpeg4_pic_param.reference_picture = this->ref_frame->frame_id; - mpeg4_pic_param.reconstructed_picture = this->rec_frame->frame_id; - mpeg4_pic_param.coded_buf = this->coded_buf[this->coded_buf_index]; - mpeg4_pic_param.picture_width = this->picture_width; - mpeg4_pic_param.picture_height = this->picture_height; - mpeg4_pic_param.vop_time_increment= this->encoded_frames; - mpeg4_pic_param.picture_type = this->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; - - LOG_V("======mpeg4 picture params======\n"); - LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture); - LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture); - LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf); - LOG_I("coded_buf_index = %d\n", this->coded_buf_index); - LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width); - LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height); - LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment); - LOG_I("picture_type = %d\n\n", mpeg4_pic_param.picture_type); - - va_status = vaCreateBuffer( - this->va_display, this->va_context, - VAEncPictureParameterBufferType, - sizeof(mpeg4_pic_param), - 1,&mpeg4_pic_param, - &this->pic_param_buf); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture( - this->va_display, this->va_context, - &this->pic_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaRenderPicture\n"); - LOG_I( "va_status = %d\n", va_status); - return MIX_RESULT_FAIL; - } - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoFormatEnc_MPEG4::_send_slice_parameter() { - VAStatus va_status; - uint slice_height; - uint slice_index; - uint slice_height_in_mb; - - LOG_V( "Begin\n\n"); - - slice_height = this->picture_height; - slice_height += 15; - slice_height &= (~15); - VAEncSliceParameterBuffer slice_param; - slice_index = 0; - slice_height_in_mb = slice_height / 16; - slice_param.start_row_number = 0; - slice_param.slice_height = slice_height / 16; - slice_param.slice_flags.bits.is_intra = this->is_intra; - slice_param.slice_flags.bits.disable_deblocking_filter_idc - = this->disable_deblocking_filter_idc; - - LOG_V("======mpeg4 slice params======\n"); - LOG_I( "start_row_number = %d\n", (int) slice_param.start_row_number); - LOG_I( "slice_height_in_mb = %d\n", (int) slice_param.slice_height); - LOG_I( "slice.is_intra = %d\n", (int) slice_param.slice_flags.bits.is_intra); - LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) this->disable_deblocking_filter_idc); - - va_status = vaCreateBuffer ( - this->va_display, this->va_context, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - 1, &slice_param, - &this->slice_param_buf); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateBuffer\n"); - return MIX_RESULT_FAIL; - } - - va_status = vaRenderPicture( - this->va_display, this->va_context, - &this->slice_param_buf, 1); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaRenderPicture\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc_MPEG4::_process_encode ( - MixBuffer * bufin, MixIOVec * iovout) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - ulong surface = 0; - uint16 width, height; - - MixVideoFrame * tmp_frame; - uint8 *buf; - - VACodedBufferSegment *coded_seg = NULL; - int num_seg = 0; - uint total_size = 0; - uint size = 0; - - if ((bufin == NULL) || (iovout == NULL)) { - LOG_E("mix == NUL) || bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - - va_display = this->va_display; - va_context = this->va_context; - width = this->picture_width; - height = this->picture_height; - - LOG_I( "encoded_frames = %d\n", this->encoded_frames); - LOG_I( "is_intra = %d\n", this->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", (uint) this->ci_frame_id); - - /* determine the picture type*/ - if ((this->encoded_frames % this->intra_period) == 0) { - this->is_intra = TRUE; - } else { - this->is_intra = FALSE; - } - - LOG_I( "is_intra_picture = %d\n", this->is_intra); - LOG_V("Get Surface from the pool\n"); - - /*current we use one surface for source data, - * one for reference and one for reconstructed*/ - /*TODO, could be refine here*/ - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - //MixVideoFrame * frame = mix_videoframe_new(); - if (this->lookup_frame == NULL) { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - if (this->ref_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) { - uint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I("surface_num = %d\n", this->surface_num); - LOG_I("ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > this->surface_num - 2) { - LOG_E("the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get current working surface from pool failed\n"); - goto cleanup; - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - } - /* - * end of CI buffer allocation mode - */ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - break; - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - if (this->lookup_frame == NULL) { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - uint surface_idx = (uint) -1; //fixme, temp use a big value - uint idx = 0; - - LOG_I ("bufin->data = 0x%08x\n", bufin->data); - - for (idx = 0; idx < this->alloc_surface_cnt; idx++) { - LOG_I ("mix->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); - if (bufin->data == this->usrptr[idx]) - surface_idx = idx; - } - - LOG_I("surface_num = %d\n", this->surface_num); - LOG_I("surface_idx = %d\n", surface_idx); - if (surface_idx > this->surface_num - 2) { - LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); - ret = MIX_RESULT_FAIL; - goto no_share_mode; - } - - if (this->ref_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (mix->cur_frame); - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get current working surface from pool failed\n"); - goto cleanup; - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - } - - break; - /* - * end of Self buffer allocation mode - */ - - case MIX_BUFFER_ALLOC_NORMAL: - { - -no_share_mode: - - LOG_V("We are NOT in share buffer mode\n"); - - if (this->ref_frame == NULL) { - ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); - if (ret != MIX_RESULT_SUCCESS) {//#ifdef SLEEP_SURFACE not used - LOG_E("Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) { - ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) { - ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - LOG_V( "Get Surface Done\n"); - - VAImage src_image; - uint8 *pvbuf; - uint8 *dst_y; - uint8 *dst_uv; - int i,j; - - LOG_V("map source data to surface\n"); - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; - } - - LOG_I("surface id = 0x%08x\n", (uint) surface); - - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VAImage *image = &src_image; - - LOG_V( "vaDeriveImage Done\n"); - - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V("vaImage information\n"); - LOG_I("image->pitches[0] = %d\n", image->pitches[0]); - LOG_I("image->pitches[1] = %d\n", image->pitches[1]); - LOG_I("image->offsets[0] = %d\n", image->offsets[0]); - LOG_I("image->offsets[1] = %d\n", image->offsets[1]); - LOG_I("image->num_planes = %d\n", image->num_planes); - LOG_I("image->width = %d\n", image->width); - LOG_I("image->height = %d\n", image->height); - LOG_I("input buf size = %d\n", bufin->size); - - uint8 *inbuf = bufin->data; -#ifdef ANDROID -#define USE_SRC_FMT_NV12 -#endif - int offset_uv = width * height; - uint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; - -#ifdef ANDROID - //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - -#ifdef USE_SRC_FMT_NV12 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif - -#else - - if (this->raw_format == MIX_RAW_TARGET_FORMAT_YUV420) { - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = - inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } - } - else if (this->raw_format == MIX_RAW_TARGET_FORMAT_NV12) { - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } - } - else { - LOG_E("Raw format not supoort\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#endif //USE_SRC_FMT_YUV420 - - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V("Map source data to surface done\n"); - } - break; - default: - break; - - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(uint)va_context); - LOG_I( "surface = 0x%08x\n",(uint)surface); - LOG_I( "va_display = 0x%08x\n",(uint)va_display); - - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = _send_encode_command(); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E ("Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - - if ((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 0) { - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - if (this->encoded_frames == 0) { - this->encoded_frames ++; - this->last_coded_buf = this->coded_buf[this->coded_buf_index]; - this->coded_buf_index ++; - this->coded_buf_index %=2; - this->last_frame = this->cur_frame; - /* determine the picture type*/ - if ((this->encoded_frames % this->intra_period) == 0) { - this->is_intra = TRUE; - } else { - this->is_intra = FALSE; - } - tmp_frame = this->rec_frame; - this->rec_frame= this->ref_frame; - this->ref_frame = tmp_frame; - } - - LOG_V( "vaSyncSurface\n"); - - va_status = vaSyncSurface(va_display, this->last_frame->frame_id); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaSyncSurface\n"); - //return MIX_RESULT_FAIL; - } - - LOG_V("Start to get encoded data\n"); - - /*get encoded data from the VA buffer*/ - va_status = vaMapBuffer (va_display, this->last_coded_buf, (void **)&buf); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - coded_seg = (VACodedBufferSegment *)buf; - num_seg = 1; - - while (1) { - total_size += coded_seg->size; - if (coded_seg->next == NULL) - break; - coded_seg = (VACodedBufferSegment*)coded_seg->next; - num_seg ++; - } - - -#if 0 - // first 4 bytes is the size of the buffer - memcpy (&(iovout->data_size), (void*)buf, 4); - //size = (uint*) buf; -#endif - - iovout->data_size = total_size; - - if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it. - iovout->data = new uchar[iovout->data_size]; - if (iovout->data == NULL) { - LOG_E( "iovout->data == NULL\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - //memcpy (iovout->data, buf + 16, iovout->data_size); - - coded_seg = (VACodedBufferSegment *)buf; - total_size = 0; - - while (1) { - memcpy (iovout->data + total_size, coded_seg->buf, coded_seg->size); - total_size += coded_seg->size; - if (coded_seg->next == NULL) - break; - coded_seg = (VACodedBufferSegment *)coded_seg->next; - } - - iovout->buffer_size = iovout->data_size; - - LOG_I("out size is = %d\n", iovout->data_size); - - va_status = vaUnmapBuffer (va_display, this->last_coded_buf); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "get encoded data done\n"); - - if (!((this->va_rcmode == VA_RC_NONE) || this->encoded_frames == 1)) { - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - if (this->encoded_frames == 1) { - va_status = vaBeginPicture(va_display, va_context, surface); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaBeginPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = _send_encode_command (); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E ("Failed mix_videofmtenc_h264_send_encode_command\n"); - goto cleanup; - } - - va_status = vaEndPicture (va_display, va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaEndPicture\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - } - - VASurfaceStatus status; - - /*query the status of current surface*/ - va_status = vaQuerySurfaceStatus(va_display, surface, &status); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaQuerySurfaceStatus\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - this->pic_skipped = status & VASurfaceSkipped; - - //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_frame); - - if (this->need_display) { - ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_framemanager_enqueue\n"); - goto cleanup; - } - } - - /*update the reference surface and reconstructed surface */ - if (!this->pic_skipped) { - tmp_frame = this->rec_frame; - this->rec_frame= this->ref_frame; - this->ref_frame = tmp_frame; - } - -#if 0 - if (this->ref_frame != NULL) - mix_videoframe_unref (this->ref_frame); - this->ref_frame = this->rec_frame; - - mix_videoframe_unref (this->cur_frame); -#endif - - this->encoded_frames ++; - this->last_coded_buf = this->coded_buf[this->coded_buf_index]; - this->coded_buf_index ++; - this->coded_buf_index %=2; - this->last_frame = this->cur_frame; - if (this->last_mix_buffer) { - LOG_V("calls to mix_buffer_unref \n"); - LOG_V("refcount = %d\n", MIX_PARAMS(this->last_mix_buffer)->GetRefCount()); - mix_buffer_unref(this->last_mix_buffer); - } - - LOG_V("ref the current bufin\n"); - - this->last_mix_buffer = mix_buffer_ref(bufin); - if (!(this->need_display)) { - mix_videoframe_unref (this->cur_frame); - this->cur_frame = NULL; - } - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - if (iovout->data) { - delete [] iovout->data; - iovout->data = NULL; - } - } - LOG_V( "end\n"); - return ret; -} - -MIX_RESULT MixVideoFormatEnc_MPEG4::GetMaxEncodedBufSize( - uint * max_size) { - LOG_V( "Begin\n"); - - if (this->coded_buf_size > 0) { - *max_size = this->coded_buf_size; - LOG_V ("Already calculate the max encoded size, get the value directly"); - return MIX_RESULT_SUCCESS; - } - - /*base on the rate control mode to calculate the defaule encoded buffer size*/ - if (this->va_rcmode_mpeg4 == VA_RC_NONE) { - this->coded_buf_size = (this->picture_width* this->picture_height * 400) / (16 * 16); - // set to value according to QP - } - else { - this->coded_buf_size = this->bitrate / 4; - } - - this->coded_buf_size = max( - this->coded_buf_size , - (this->picture_width* this->picture_height * 400) / (16 * 16)); - - /*in case got a very large user input bit rate value*/ - this->coded_buf_size = max( - this->coded_buf_size, (this->picture_width * this->picture_height * 1.5 * 8)); - this->coded_buf_size = (this->coded_buf_size + 15) & (~15); - - *max_size = this->coded_buf_size; - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoFormatEnc_MPEG4::_send_encode_command () { - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - - if (this->encoded_frames == 0) { - ret = _send_seq_params (); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videofmtenc_h264_send_seq_params\n"); - return MIX_RESULT_FAIL; - } - } - - ret = _send_picture_parameter (); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videofmtenc_h264_send_picture_parameter\n"); - return MIX_RESULT_FAIL; - } - - ret = _send_slice_parameter (); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videofmtenc_h264_send_slice_parameter\n"); - return MIX_RESULT_FAIL; - } - - LOG_V( "End\n"); - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h deleted file mode 100644 index 6c5241d..0000000 --- a/mix_video/src/mixvideoformatenc_mpeg4.h +++ /dev/null @@ -1,119 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMATENC_MPEG4_H__ -#define __MIX_VIDEOFORMATENC_MPEG4_H__ - -#include "mixvideoformatenc.h" -#include "mixvideoframe_private.h" - - - -#define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20 - -#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) -#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) - -/* - * Type macros. - */ -#define MIX_VIDEOFORMATENC_MPEG4(obj) (reinterpret_cast(obj)) -#define MIX_IS_VIDEOFORMATENC_MPEG4(obj) (NULL != MIX_VIDEOFORMATENC_MPEG4(obj)) - -class MixVideoFormatEnc_MPEG4 : public MixVideoFormatEnc { -public: - MixVideoFormatEnc_MPEG4(); - virtual ~MixVideoFormatEnc_MPEG4(); - - /* MPEG-4:2 vmethods */ - virtual MIX_RESULT Initialize( - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display); - virtual MIX_RESULT Encode( MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT Deinitialize(); - virtual MIX_RESULT GetMaxEncodedBufSize (uint *max_size); - -protected: - /* Local Methods */ - MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); - MIX_RESULT _send_encode_command (); - MIX_RESULT _send_seq_params (); - MIX_RESULT _send_picture_parameter(); - MIX_RESULT _send_slice_parameter(); - -public: - VABufferID coded_buf[2]; - VABufferID last_coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * shared_surfaces; - VASurfaceID * surfaces; - uint surface_num; - uint shared_surfaces_cnt; - uint precreated_surfaces_cnt; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *last_frame; //last frame; - MixVideoFrame *lookup_frame; - - MixBuffer *last_mix_buffer; - - uchar profile_and_level_indication; - uint fixed_vop_time_increment; - uint disable_deblocking_filter_idc; - - uint va_rcmode_mpeg4; - - uint encoded_frames; - bool pic_skipped; - - bool is_intra; - - uint coded_buf_size; - uint coded_buf_index; - - uint8 ** usrptr; - uint alloc_surface_cnt; -}; - -/** - * mix_videoformatenc_mpeg4_new: - * @returns: A newly allocated instance of #MixVideoFormatEnc_MPEG4 - * - * Use this method to create new instance of #MixVideoFormatEnc_MPEG4 - */ -MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_new(void); - -/** - * mix_videoformatenc_mpeg4_ref: - * @mix: object to add reference - * @returns: the MixVideoFormatEnc_MPEG4 instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix); - -/** - * mix_videoformatenc_mpeg4_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_unref(MixVideoFormatEnc_MPEG4 * mix); - -#endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */ diff --git a/mix_video/src/mixvideoformatenc_preview.cpp b/mix_video/src/mixvideoformatenc_preview.cpp deleted file mode 100644 index e1b7be9..0000000 --- a/mix_video/src/mixvideoformatenc_preview.cpp +++ /dev/null @@ -1,1120 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#include -#include - -#include "mixvideolog.h" - -#include "mixvideoformatenc_preview.h" -#include "mixvideoconfigparamsenc_preview.h" -#include - -#undef SHOW_SRC - -#ifdef SHOW_SRC -Window win = 0; -#endif /* SHOW_SRC */ - -MixVideoFormatEnc_Preview::MixVideoFormatEnc_Preview() - :shared_surfaces(NULL) - ,surfaces(NULL) - ,surface_num(0) - ,shared_surfaces_cnt(0) - ,precreated_surfaces_cnt(0) - ,cur_frame(NULL) - ,ref_frame(NULL) - ,rec_frame(NULL) - ,lookup_frame(NULL) - ,encoded_frames(0) - ,pic_skipped(FALSE) - ,is_intra(TRUE) - ,usrptr(NULL) { -} - -MixVideoFormatEnc_Preview::~MixVideoFormatEnc_Preview() { -} - -MixVideoFormatEnc_Preview * -mix_videoformatenc_preview_new(void) { - return new MixVideoFormatEnc_Preview(); -} - - -MixVideoFormatEnc_Preview * -mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) { - if (NULL != mix) { - mix->Ref(); - return mix; - } - else { - return NULL; - } -} - -MixVideoFormatEnc_Preview * -mix_videoformatenc_preview_unref(MixVideoFormatEnc_Preview * mix) { - if (NULL!=mix) - if (NULL != mix->Unref()) - return mix; - else - return NULL; - else - return NULL; -} - - -MIX_RESULT MixVideoFormatEnc_Preview::Initialize( - MixVideoConfigParamsEnc * config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display ) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - MixVideoConfigParamsEncPreview * config_params_enc_preview; - - VAStatus va_status = VA_STATUS_SUCCESS; - VASurfaceID * surfaces = NULL; - - int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - int va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; - VAConfigAttrib va_attrib[2]; - uint index; - - /* - * Different MIX buffer mode will have different surface handling approach - */ - uint normal_surfaces_cnt = 2; - - /* - * shared_surfaces_cnt is for upstream buffer allocation case - */ - uint shared_surfaces_cnt = 0; - - /* - * precreated_surfaces_cnt is for self buffer allocation case - */ - uint precreated_surfaces_cnt = 0; - - MixCISharedBufferInfo * ci_info = NULL; - - /*frame_mgr and input_buf_pool is reservered for future use*/ - if (config_params_enc == NULL || va_display == NULL) { - LOG_E("config_params_enc == NULL || va_display == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "begin\n"); - - /* Chainup parent method. */ - ret = MixVideoFormatEnc::Initialize( - config_params_enc, frame_mgr, input_buf_pool, - surface_pool, requested_surface_info, va_display); - if (ret != MIX_RESULT_SUCCESS) { - return ret; - } - - if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(config_params_enc)) { - config_params_enc_preview = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc); - } else { - LOG_V("mix_videofmtenc_preview_initialize: no preview config params found\n"); - return MIX_RESULT_FAIL; - } - - Lock(); - - LOG_V("Get properities from params done\n"); - - this->va_display = va_display; - - LOG_V( "Get Display\n"); - LOG_I( "Display = 0x%08x\n", (uint)va_display); - - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(va_display); - LOG_I( "va_max_num_profiles = %d\n", va_max_num_profiles); - - va_max_num_entrypoints = vaMaxNumEntrypoints(va_display); - LOG_I( "va_max_num_entrypoints = %d\n", va_max_num_entrypoints); - - va_max_num_attribs = vaMaxNumConfigAttributes(va_display); - LOG_I( "va_max_num_attribs = %d\n", va_max_num_attribs); - - va_profiles = new VAProfile[va_max_num_profiles]; - va_entrypoints = new VAEntrypoint[va_max_num_entrypoints]; - - if (va_profiles == NULL || va_entrypoints ==NULL) { - LOG_E("!va_profiles || !va_entrypoints\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_I("va_profiles = 0x%08x\n", (uint)va_profiles); - LOG_V("vaQueryConfigProfiles\n"); - - va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaQueryConfigProfiles\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - LOG_V( "vaQueryConfigProfiles Done\n"); - - /*check whether profile is supported*/ - for (index= 0; index < va_num_profiles; index++) { - if (this->va_profile == va_profiles[index]) - break; - } - - if (index == va_num_profiles) { - LOG_E( "Profile not supported\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaQueryConfigEntrypoints\n"); - - /*Check entry point*/ - va_status = vaQueryConfigEntrypoints( - va_display, - this->va_profile, - va_entrypoints, &va_num_entrypoints); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaQueryConfigEntrypoints\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointEncSlice) { - break; - } - } - - if (index == va_num_entrypoints) { - LOG_E( "Entrypoint not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].type = VAConfigAttribRTFormat; - va_attrib[1].type = VAConfigAttribRateControl; - - LOG_V( "vaGetConfigAttributes\n"); - - va_status = vaGetConfigAttributes( - va_display, this->va_profile, - this->va_entrypoint, &va_attrib[0], 2); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to call vaGetConfigAttributes\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if ((va_attrib[0].value & this->va_format) == 0) { - LOG_E( "Matched format not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - - if ((va_attrib[1].value & this->va_rcmode) == 0) { - LOG_E( "RC mode not found\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_attrib[0].value = this->va_format; //VA_RT_FORMAT_YUV420; - va_attrib[1].value = this->va_rcmode; - - LOG_V( "======VA Configuration======\n"); - LOG_I( "profile = %d\n", this->va_profile); - LOG_I( "va_entrypoint = %d\n", this->va_entrypoint); - LOG_I( "va_attrib[0].type = %d\n", va_attrib[0].type); - LOG_I( "va_attrib[1].type = %d\n", va_attrib[1].type); - LOG_I( "va_attrib[0].value (Format) = %d\n", va_attrib[0].value); - LOG_I( "va_attrib[1].value (RC mode) = %d\n", va_attrib[1].value); - LOG_V( "vaCreateConfig\n"); - - va_status = vaCreateConfig( - va_display, this->va_profile, - this->va_entrypoint, - &va_attrib[0], 2, &(this->va_config)); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed vaCreateConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - /* - * For upstream allocates buffer, it is mandatory to set buffer mode - * and for other stuff, it is optional - */ - - LOG_I ("alloc_surface_cnt = %d\n", requested_surface_info->surface_cnt); - - if (requested_surface_info->surface_cnt == 0) { - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - ci_info = (MixCISharedBufferInfo *) (this->buf_info); - shared_surfaces_cnt = ci_info->ci_frame_cnt; - normal_surfaces_cnt = 2; - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /* - * To be develped - */ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /* - * To be develped - */ - break; - default: - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - break; - } - } - else if (requested_surface_info->surface_cnt == 1) { - /* - * Un-normal case, TBD - */ - this->buffer_mode = MIX_BUFFER_ALLOC_NORMAL; - normal_surfaces_cnt = 8; - } - else { - this->buffer_mode = MIX_BUFFER_SELF_ALLOC_SURFACE; - precreated_surfaces_cnt = requested_surface_info->surface_cnt; - this->alloc_surface_cnt = requested_surface_info->surface_cnt; - this->usrptr = new uint8*[requested_surface_info->surface_cnt]; - if (this->usrptr == NULL) { - LOG_E("Failed allocate memory\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - memcpy (this->usrptr, requested_surface_info->usrptr, requested_surface_info->surface_cnt * sizeof (uint8 *)); - - } - - LOG_I ("buffer_mode = %d\n", this->buffer_mode); - - this->shared_surfaces_cnt = shared_surfaces_cnt; - this->precreated_surfaces_cnt = precreated_surfaces_cnt; - -#if 0 - - int ii = 0; - for (ii=0; ii < alloc_surface_cnt; ii++) { - - g_print ("self->usrptr[%d] = 0x%08x\n", ii, self->usrptr[ii]); - g_print ("usrptr[%d] = 0x%08x\n", ii, usrptr[ii]); - - - } - - /*TODO: compute the surface number*/ - int numSurfaces; - - if (parent->share_buf_mode) { - numSurfaces = 2; - } - else { - numSurfaces = 2; - parent->ci_frame_num = 0; - } - - //self->surface_num = numSurfaces + parent->ci_frame_num; -#endif - - this->surface_num = normal_surfaces_cnt + shared_surfaces_cnt + precreated_surfaces_cnt; - surfaces = new VASurfaceID[normal_surfaces_cnt]; - - if (surfaces == NULL) { - LOG_E("Failed allocate surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - this->surfaces = new VASurfaceID[this->surface_num]; - - if (this->surfaces == NULL) { - LOG_E("Failed allocate private surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - LOG_V( "vaCreateSurfaces\n"); - - va_status = vaCreateSurfaces( - va_display, this->va_format, - this->picture_width, this->picture_height, - surfaces, normal_surfaces_cnt, NULL, 0 ); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaCreateSurfaces\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - if (shared_surfaces_cnt != 0) { - this->shared_surfaces = new VASurfaceID[shared_surfaces_cnt]; - if (this->shared_surfaces == NULL) { - LOG_E("Failed allocate shared surface\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - #if 0 - for (index = 0; index < this->shared_surfaces_cnt; index++) { - va_status = vaCreateSurfaceFromCIFrame( - va_display, (ulong) (ci_info->ci_frame_id[index]), - &this->shared_surfaces[index]); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateSurfaceFromCIFrame\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - this->surfaces[index] = this->shared_surfaces[index]; - } - #endif - } - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - /*To be develped*/ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - /*To be develped*/ - break; - case MIX_BUFFER_ALLOC_NORMAL: - break; - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - for (index = 0; index < requested_surface_info->surface_cnt; index ++) { - this->surfaces[index] = requested_surface_info->surface_allocated[index]; - } - } - break; - default: - break; - } - - - for (index = 0; index < normal_surfaces_cnt; index++) { - this->surfaces[precreated_surfaces_cnt + shared_surfaces_cnt + index] = surfaces[index]; - } - - LOG_V( "assign surface Done\n"); - LOG_I( "Created %d libva surfaces\n", this->surface_num); - -#if 0 //current put this in gst - images = g_malloc(sizeof(VAImage)*numSurfaces); - if (images == NULL) - { - g_mutex_unlock(parent->objectlock); - return MIX_RESULT_FAIL; - } - - for (index = 0; index < numSurfaces; index++) { - //Derive an VAImage from an existing surface. - //The image buffer can then be mapped/unmapped for CPU access - va_status = vaDeriveImage(va_display, surfaces[index], - &images[index]); - } -#endif - - LOG_V( "mix_surfacepool_new\n"); - - this->surfacepool = mix_surfacepool_new(); - if (surface_pool) - *surface_pool = this->surfacepool; - //which is useful to check before encode - - if (this->surfacepool == NULL) { - LOG_E("Failed to mix_surfacepool_new\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V("mix_surfacepool_initialize\n"); - - ret = mix_surfacepool_initialize( - this->surfacepool, this->surfaces, - this->surface_num, va_display); - - switch (ret) { - case MIX_RESULT_SUCCESS: - break; - case MIX_RESULT_ALREADY_INIT: - ret = MIX_RESULT_ALREADY_INIT; - goto cleanup; - default: - break; - } - - //Initialize and save the VA context ID - LOG_V( "vaCreateContext\n"); - - va_status = vaCreateContext( - va_display, this->va_config, - this->picture_width, this->picture_height, - 0, this->surfaces, this->surface_num, - &(this->va_context)); - - LOG_I("Created libva context width %d, height %d\n", - this->picture_width, this->picture_height); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateContext\n"); - LOG_I( "va_status = %d\n", (uint)va_status); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - this->coded_buf_size = 4; - - /*Create coded buffer for output*/ - va_status = vaCreateBuffer ( - va_display, this->va_context, - VAEncCodedBufferType, - this->coded_buf_size, // - 1, NULL, - &this->coded_buf); - - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaCreateBuffer: VAEncCodedBufferType\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - -#ifdef SHOW_SRC - Display * display = XOpenDisplay (NULL); - LOG_I("display = 0x%08x\n", (uint) display); - win = XCreateSimpleWindow( - display, RootWindow(display, 0), 0, 0, - this->picture_width, this->picture_height, 0, 0, - WhitePixel(display, 0)); - XMapWindow(display, win); - XSelectInput(display, win, KeyPressMask | StructureNotifyMask); - XSync(display, False); - LOG_I( "va_display = 0x%08x\n", (uint) va_display); -#endif /* SHOW_SRC */ - - LOG_V("end\n"); - -cleanup: - - if (ret == MIX_RESULT_SUCCESS) { - this->initialized = TRUE; - } - - /*free profiles and entrypoints*/ - if (va_profiles) - delete [] va_profiles; - - if (va_entrypoints) - delete [] va_entrypoints; - - if (surfaces) - delete [] surfaces; - - Unlock(); - return ret; -} - -MIX_RESULT MixVideoFormatEnc_Preview::Encode( - MixBuffer * bufin[], int bufincnt, - MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - /*currenly only support one input and output buffer*/ - if (bufincnt != 1 || iovoutcnt != 1) { - LOG_E("buffer count not equel to 1\n"); - LOG_E("maybe some exception occurs\n"); - } - - if (bufin[0] == NULL || iovout[0] == NULL) { - LOG_E("!bufin[0] ||!iovout[0]\n"); - return MIX_RESULT_NULL_PTR; - } -#if 0 - if (parent_class->encode) { - return parent_class->encode(mix, bufin, bufincnt, iovout, - iovoutcnt, encode_params); - } -#endif - LOG_V( "Locking\n"); - Lock(); - - //TODO: we also could move some encode Preparation work to here - LOG_V("mix_videofmtenc_preview_process_encode\n"); - - ret = _process_encode (bufin[0], iovout[0]); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_videofmtenc_preview_process_encode\n"); - goto cleanup; - } - -cleanup: - - LOG_V( "UnLocking\n"); - Unlock(); - LOG_V( "end\n"); - return ret; -} - -MIX_RESULT MixVideoFormatEnc_Preview::Flush() { - - LOG_V( "Begin\n"); - /*not chain to parent flush func*/ -#if 0 - if (parent_class->flush) { - return parent_class->flush(mix, msg); - } -#endif - Lock(); -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - - /*reset the properities*/ - this->encoded_frames = 0; - this->pic_skipped = FALSE; - this->is_intra = TRUE; - - Unlock(); - LOG_V( "end\n"); - return MIX_RESULT_SUCCESS; -} - - - -MIX_RESULT MixVideoFormatEnc_Preview::Deinitialize() { - VAStatus va_status; - MIX_RESULT ret = MIX_RESULT_SUCCESS; - LOG_V( "Begin\n"); - ret = MixVideoFormatEnc::Deinitialize(); - if (ret != MIX_RESULT_SUCCESS) { - return ret; - } - - LOG_V( "Release frames\n"); - Lock(); - -#if 0 - /*unref the current source surface*/ - if (self->cur_frame != NULL) - { - mix_videoframe_unref (self->cur_frame); - self->cur_frame = NULL; - } -#endif - - /*unref the reconstructed surface*/ - if (this->rec_frame != NULL) { - mix_videoframe_unref (this->rec_frame); - this->rec_frame = NULL; - } - - /*unref the reference surface*/ - if (this->ref_frame != NULL) { - mix_videoframe_unref (this->ref_frame); - this->ref_frame = NULL; - } - - if (this->lookup_frame != NULL) { - mix_videoframe_unref (this->lookup_frame); - this->lookup_frame = NULL; - } - - LOG_V( "Release surfaces\n"); - - if (this->shared_surfaces) { - delete [] this->shared_surfaces; - this->shared_surfaces = NULL; - } - - if (this->surfaces) { - delete [] this->surfaces; - this->surfaces = NULL; - } - - if (this->usrptr) { - delete [] this->usrptr; - this->usrptr = NULL; - } - - LOG_V( "vaDestroyContext\n"); - - va_status = vaDestroyContext (this->va_display, this->va_context); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaDestroyContext\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V( "vaDestroyConfig\n"); - - va_status = vaDestroyConfig (this->va_display, this->va_config); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed vaDestroyConfig\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - -cleanup: - - this->initialized = FALSE; - Unlock(); - LOG_V( "end\n"); - return ret; -} - - -MIX_RESULT MixVideoFormatEnc_Preview::_process_encode ( - MixBuffer * bufin, MixIOVec * iovout) { - - MIX_RESULT ret = MIX_RESULT_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VADisplay va_display = NULL; - VAContextID va_context; - ulong surface = 0; - uint16 width, height; - - uint surface_idx = (uint) -1; //fixme, temp use a big value - uint idx = 0; - - VAImage src_image; - uint8 *pvbuf; - uint8 *dst_y; - uint8 *dst_uv; - int i,j; - - //MixVideoFrame * tmp_frame; - //uint8 *buf; - if ((bufin == NULL) || (iovout == NULL)) { - LOG_E("bufin == NULL || iovout == NULL\n"); - return MIX_RESULT_NULL_PTR; - } - - LOG_V( "Begin\n"); - va_display = this->va_display; - va_context = this->va_context; - width = this->picture_width; - height = this->picture_height; - - LOG_I( "encoded_frames = %d\n", this->encoded_frames); - LOG_I( "is_intra = %d\n", this->is_intra); - LOG_I( "ci_frame_id = 0x%08x\n", (uint) this->ci_frame_id); - LOG_V("Get Surface from the pool\n"); - - switch (this->buffer_mode) { - case MIX_BUFFER_UPSTREAM_ALLOC_CI: - { - //MixVideoFrame * frame = mix_videoframe_new(); - if (this->lookup_frame == NULL) { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - if (this->ref_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (mix->cur_frame); - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) { - uint ci_idx; -#ifndef ANDROID - memcpy (&ci_idx, bufin->data, bufin->size); -#else - memcpy (&ci_idx, bufin->data, sizeof(unsigned int)); -#endif - - LOG_I("surface_num = %d\n", this->surface_num); - LOG_I("ci_frame_idx = %d\n", ci_idx); - - if (ci_idx > this->surface_num - 2) { - LOG_E("the CI frame idx is too bigger than CI frame number\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, ci_idx); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get current working surface from pool failed\n"); - goto cleanup; - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - } - - /* - * end of CI buffer allocation mode - */ - break; - case MIX_BUFFER_UPSTREAM_ALLOC_V4L2: - break; - case MIX_BUFFER_UPSTREAM_ALLOC_SURFACE: - break; - case MIX_BUFFER_SELF_ALLOC_SURFACE: - { - if (this->lookup_frame == NULL) { - this->lookup_frame = mix_videoframe_new (); - if (this->lookup_frame == NULL) { - LOG_E("mix_videoframe_new() failed!\n"); - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - } - - LOG_I("bufin->data = 0x%08x\n", bufin->data); - for (idx = 0; idx < this->alloc_surface_cnt; idx++) { - LOG_I ("mix->usrptr[%d] = 0x%08x\n", idx, this->usrptr[idx]); - if (bufin->data == this->usrptr[idx]) - surface_idx = idx; - } - - LOG_I("surface_num = %d\n", this->surface_num); - LOG_I("surface_idx = %d\n", surface_idx); - - if (surface_idx > this->surface_num - 2) { - LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n"); - ret = MIX_RESULT_FAIL; - goto no_share_mode; - } - - if (this->ref_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 1); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->ref_frame, this->lookup_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get reference surface from pool failed\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, this->surface_num - 2); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->rec_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get recontructed surface from pool failed\n"); - goto cleanup; - } - } - - //mix_videoframe_unref (this->cur_frame); - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) { - ret = mix_videoframe_set_ci_frame_idx (this->lookup_frame, surface_idx); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("mix_videoframe_set_ci_frame_idx failed\n"); - goto cleanup; - } - - ret = mix_surfacepool_get_frame_with_ci_frameidx - (this->surfacepool, &this->cur_frame, this->lookup_frame); - - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("get current working surface from pool failed\n"); - goto cleanup; - } - } - - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - } - - break; - /* - * end of Self buffer allocation mode - */ - case MIX_BUFFER_ALLOC_NORMAL: - { - -no_share_mode: - - LOG_V("We are NOT in share buffer mode\n"); - if (this->ref_frame == NULL) { - ret = mix_surfacepool_get(this->surfacepool, &this->ref_frame); - if (ret != MIX_RESULT_SUCCESS) {//#ifdef SLEEP_SURFACE not used - LOG_E("Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (this->rec_frame == NULL) { - ret = mix_surfacepool_get(this->surfacepool, &this->rec_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - if (this->need_display) { - this->cur_frame = NULL; - } - - if (this->cur_frame == NULL) { - ret = mix_surfacepool_get(this->surfacepool, &this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_surfacepool_get\n"); - goto cleanup; - } - } - - LOG_V( "Get Surface Done\n"); - LOG_V("map source data to surface\n"); - ret = mix_videoframe_get_frame_id(this->cur_frame, &surface); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to mix_videoframe_get_frame_id\n"); - goto cleanup; - } - - LOG_I("surface id = 0x%08x\n", (uint) surface); - va_status = vaDeriveImage(va_display, surface, &src_image); - //need to destroy - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaDeriveImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - VAImage *image = &src_image; - LOG_V( "vaDeriveImage Done\n"); - va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E( "Failed to vaMapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - LOG_V("vaImage information\n"); - LOG_I("image->pitches[0] = %d\n", image->pitches[0]); - LOG_I("image->pitches[1] = %d\n", image->pitches[1]); - LOG_I("image->offsets[0] = %d\n", image->offsets[0]); - LOG_I("image->offsets[1] = %d\n", image->offsets[1]); - LOG_I("image->num_planes = %d\n", image->num_planes); - LOG_I("image->width = %d\n", image->width); - LOG_I("image->height = %d\n", image->height); - LOG_I("input buf size = %d\n", bufin->size); - uint8 *inbuf = bufin->data; - -#ifndef ANDROID -#define USE_SRC_FMT_YUV420 -#else -#define USE_SRC_FMT_NV21 -#endif - -#ifdef USE_SRC_FMT_YUV420 - /*need to convert YUV420 to NV12*/ - dst_y = pvbuf +image->offsets[0]; - - for (i = 0; i < height; i ++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - - dst_uv = pvbuf + image->offsets[1]; - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2]; - dst_uv [j + 1] = inbuf [width * height * 5 / 4 + i * width / 2 + j / 2]; - } - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV12 or USE_SRC_FMT_NV21 - int offset_uv = width * height; - uint8 *inbuf_uv = inbuf + offset_uv; - int height_uv = height / 2; - int width_uv = width; - - dst_y = pvbuf + image->offsets[0]; - for (i = 0; i < height; i++) { - memcpy (dst_y, inbuf + i * width, width); - dst_y += image->pitches[0]; - } - -#ifdef USE_SRC_FMT_NV12 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i++) { - memcpy(dst_uv, inbuf_uv + i * width_uv, width_uv); - dst_uv += image->pitches[1]; - } -#else //USE_SRC_FMT_NV21 - dst_uv = pvbuf + image->offsets[1]; - for (i = 0; i < height_uv; i ++) { - for (j = 0; j < width_uv; j += 2) { - dst_uv[j] = inbuf_uv[j+1]; //u - dst_uv[j+1] = inbuf_uv[j]; //v - } - dst_uv += image->pitches[1]; - inbuf_uv += width_uv; - } -#endif -#endif //USE_SRC_FMT_YUV420 - va_status = vaUnmapBuffer(va_display, image->buf); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaUnmapBuffer\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - va_status = vaDestroyImage(va_display, src_image.image_id); - if (va_status != VA_STATUS_SUCCESS) { - LOG_E("Failed to vaDestroyImage\n"); - ret = MIX_RESULT_FAIL; - goto cleanup; - } - - LOG_V("Map source data to surface done\n"); - } - break; - default: - break; - - } - - LOG_V( "vaBeginPicture\n"); - LOG_I( "va_context = 0x%08x\n",(uint)va_context); - LOG_I( "surface = 0x%08x\n",(uint)surface); - LOG_I( "va_display = 0x%08x\n",(uint)va_display); - - iovout->data_size = 4; - iovout->data = new uchar[iovout->data_size]; - if (iovout->data == NULL) { - ret = MIX_RESULT_NO_MEMORY; - goto cleanup; - } - - memset (iovout->data, 0, iovout->data_size); - iovout->buffer_size = iovout->data_size; - - if (this->need_display) { - ret = mix_videoframe_set_sync_flag(this->cur_frame, TRUE); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed to set sync_flag\n"); - goto cleanup; - } - - ret = mix_framemanager_enqueue(this->framemgr, this->cur_frame); - if (ret != MIX_RESULT_SUCCESS) { - LOG_E("Failed mix_framemanager_enqueue\n"); - goto cleanup; - } - } - - if (!(this->need_display)) { - mix_videoframe_unref (this->cur_frame); - this->cur_frame = NULL; - } - this->encoded_frames ++; - -cleanup: - - if (ret != MIX_RESULT_SUCCESS) { - if (iovout->data) { - delete [] iovout->data; - iovout->data = NULL; - } - } - LOG_V( "end\n"); - return ret; -} diff --git a/mix_video/src/mixvideoformatenc_preview.h b/mix_video/src/mixvideoformatenc_preview.h deleted file mode 100644 index bb3db71..0000000 --- a/mix_video/src/mixvideoformatenc_preview.h +++ /dev/null @@ -1,110 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFORMATENC_PREVIEW_H__ -#define __MIX_VIDEOFORMATENC_PREVIEW_H__ - -#include "mixvideoformatenc.h" -#include "mixvideoframe_private.h" - -#define MIX_VIDEO_ENC_PREVIEW_SURFACE_NUM 20 - -#define min(X,Y) (((X) < (Y)) ? (X) : (Y)) -#define max(X,Y) (((X) > (Y)) ? (X) : (Y)) - -/* - * Type macros. - */ -#define MIX_VIDEOFORMATENC_PREVIEW(obj) (dynamic_cast(obj))) -#define MIX_IS_VIDEOFORMATENC_PREVIEW(obj) (NULL != MIX_VIDEOFORMATENC_PREVIEW(obj)) - -class MixVideoFormatEnc_Preview : public MixVideoFormatEnc { -public: - MixVideoFormatEnc_Preview(); - virtual ~MixVideoFormatEnc_Preview(); - - virtual MIX_RESULT Initialize( - MixVideoConfigParamsEnc* config_params_enc, - MixFrameManager * frame_mgr, - MixBufferPool * input_buf_pool, - MixSurfacePool ** surface_pool, - MixUsrReqSurfacesInfo * requested_surface_info, - VADisplay va_display); - virtual MIX_RESULT Encode( MixBuffer * bufin[], - int bufincnt, MixIOVec * iovout[], int iovoutcnt, - MixVideoEncodeParams * encode_params); - virtual MIX_RESULT Flush(); - virtual MIX_RESULT Deinitialize(); - -private: - /* Local Methods */ - MIX_RESULT _process_encode (MixBuffer * bufin, MixIOVec * iovout); - -public: - VABufferID coded_buf; - VABufferID seq_param_buf; - VABufferID pic_param_buf; - VABufferID slice_param_buf; - VASurfaceID * shared_surfaces; - VASurfaceID * surfaces; - uint surface_num; - uint shared_surfaces_cnt; - uint precreated_surfaces_cnt; - - MixVideoFrame *cur_frame; //current input frame to be encoded; - MixVideoFrame *ref_frame; //reference frame - MixVideoFrame *rec_frame; //reconstructed frame; - MixVideoFrame *lookup_frame; - - uint basic_unit_size; //for rate control - uint disable_deblocking_filter_idc; - uint slice_num; - uint va_rcmode_preview; - - - uint encoded_frames; - bool pic_skipped; - - bool is_intra; - - uint coded_buf_size; - - uint8 ** usrptr; - uint alloc_surface_cnt; - - /*< public > */ -}; - - - -/** - * mix_videoformatenc_preview_new: - * @returns: A newly allocated instance of #MixVideoFormatEnc_Preview - * - * Use this method to create new instance of #MixVideoFormatEnc_Preview - */ -MixVideoFormatEnc_Preview *mix_videoformatenc_preview_new(void); - -/** - * mix_videoformatenc_preview_ref: - * @mix: object to add reference - * @returns: the MixVideoFormatEnc_Preview instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFormatEnc_Preview *mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix); - -/** - * mix_videoformatenc_preview_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -MixVideoFormatEnc_Preview * mix_videoformatenc_preview_unref(MixVideoFormatEnc_Preview * mix); - -#endif /* __MIX_VIDEOFORMATENC_PREVIEW_H__ */ diff --git a/mix_video/src/mixvideoformatqueue.h b/mix_video/src/mixvideoformatqueue.h deleted file mode 100644 index b917313..0000000 --- a/mix_video/src/mixvideoformatqueue.h +++ /dev/null @@ -1,23 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOFORMATQUEUE_H__ -#define __MIX_VIDEOFORMATQUEUE_H__ - -#include "mixbuffer.h" - -typedef struct _MixInputBufferEntry MixInputBufferEntry; - -struct _MixInputBufferEntry -{ - /*< private > */ - MixBuffer *buf; - uint64 timestamp; -}; - -#endif /* __MIX_VIDEOFORMATQUEUE_H__ */ diff --git a/mix_video/src/mixvideoframe.cpp b/mix_video/src/mixvideoframe.cpp deleted file mode 100644 index a5be31a..0000000 --- a/mix_video/src/mixvideoframe.cpp +++ /dev/null @@ -1,364 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoframe - * @short_description: MI-X Video Frame Object - * - * - * The MixVideoFrame object will be created by - * MixVideo and provided to the MMF/App in the - * MixVideo mix_video_get_frame() function. - * - * - * mix_video_release_frame() must be used - * to release frame object returned from - * mix_video_get_frame(). Caller must not - * use mix_videoframe_ref() or mix_videoframe_unref() - * or adjust the reference count directly in any way. - * This object can be supplied in the mix_video_render() - * function to render the associated video frame. - * The MMF/App can release this object when it no longer - * needs to display/re-display this frame. - * - */ - - -#include -#ifndef ANDROID -#include -#endif -#include "mixvideolog.h" -//#include "mixvideoframe_private.h" -#include "mixsurfacepool.h" -#include "mixvideoframe.h" - -#define SAFE_FREE(p) if(p) { free(p); p = NULL; } - -#define MIX_VIDEOFRAME_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \ - - -MixVideoFrame::MixVideoFrame() - :frame_id(VA_INVALID_SURFACE) - ,timestamp(0) - ,discontinuity(FALSE) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,pool(NULL) - ,is_skipped(FALSE) - ,real_frame(NULL) - ,sync_flag(FALSE) - ,frame_structure(VA_FRAME_PICTURE) - ,va_display(NULL) { -// g_static_rec_mutex_init (&lock); -} - -MixVideoFrame::~MixVideoFrame() { -// g_static_rec_mutex_free (&lock); -} - -bool MixVideoFrame::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoFrame * this_target = MIX_VIDEOFRAME(target); - if (NULL != this_target) { - this_target->frame_id = this->frame_id; - this_target->timestamp = this->timestamp; - this_target->discontinuity = this->discontinuity; - // chain up base class - ret = MixParams::copy(target); - } - return ret; -} - -bool MixVideoFrame::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoFrame * this_obj = MIX_VIDEOFRAME(obj); - if (NULL != this_obj) { - /* TODO: add comparison for other properties */ - if (this->frame_id == this_obj->frame_id && - this->timestamp == this_obj->timestamp && - this->discontinuity == this_obj->discontinuity) { - ret = MixParams::equal(this_obj); - } - } - return ret; -} - -MixParams* MixVideoFrame::dup() const { - MixParams *ret = new MixVideoFrame(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -void MixVideoFrame::Lock() { -// g_static_rec_mutex_lock(&lock); - mLock.lock(); -} -void MixVideoFrame::Unlock() { -// g_static_rec_mutex_unlock (&lock); - mLock.unlock(); -} - -MixVideoFrame * mix_videoframe_new(void) { - return new MixVideoFrame(); -} - - - -MixVideoFrame * mix_videoframe_ref(MixVideoFrame * obj) { - if (NULL != obj) { - obj->Lock(); - LOG_I("obj %x, new refcount is %d\n", (uint) obj, - obj->GetRefCount() + 1); - obj->Ref(); - obj->Unlock(); - } - return obj; -} - -void mix_videoframe_unref(MixVideoFrame * obj) { - if (NULL == obj) { - LOG_E("obj is NULL\n"); - return; - } - - obj->Lock(); - LOG_I("obj %x, frame id %d, new refcount is %d\n", (uint) obj, - (uint) obj->frame_id, obj->GetRefCount() - 1); - - // Check if we have reduced to 1, in which case we add ourselves to free pool - // but only do this for real frames, not skipped frames - if (((obj->GetRefCount() - 1) == 1) && (!(obj->is_skipped))) { - LOG_I("Adding obj %x, frame id %d back to pool\n", (uint) obj, - (uint) obj->frame_id); - MixSurfacePool *pool = obj->pool; - if (pool == NULL) { - LOG_E("pool is NULL\n"); - obj->Unlock(); - return; - } - mix_videoframe_reset(obj); - mix_surfacepool_put(pool, obj); - } - - //If this is a skipped frame that is being deleted, release the real frame - if (((obj->GetRefCount() - 1) == 0) && (obj->is_skipped)) { - LOG_I("skipped frame obj %x, releasing real frame %x \n", - (uint) obj, (uint) obj->real_frame); - mix_videoframe_unref(obj->real_frame); - } - - // Unref through base class - obj->Unref(); - obj->Unlock(); -} - - -/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ -MIX_RESULT mix_videoframe_set_frame_id( - MixVideoFrame * obj, ulong frame_id) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->frame_id = frame_id; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_frame_id( - MixVideoFrame * obj, ulong * frame_id) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_id); - *frame_id = obj->frame_id; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_ci_frame_idx ( - MixVideoFrame * obj, uint ci_frame_idx) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->ci_frame_idx = ci_frame_idx; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_ci_frame_idx ( - MixVideoFrame * obj, uint * ci_frame_idx) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, ci_frame_idx); - *ci_frame_idx = obj->ci_frame_idx; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_timestamp( - MixVideoFrame * obj, uint64 timestamp) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->timestamp = timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_timestamp( - MixVideoFrame * obj, uint64 * timestamp) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, timestamp); - *timestamp = obj->timestamp; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_discontinuity( - MixVideoFrame * obj, bool discontinuity) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->discontinuity = discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_discontinuity( - MixVideoFrame * obj, bool * discontinuity) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, discontinuity); - *discontinuity = obj->discontinuity; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_frame_structure( - MixVideoFrame * obj, uint32 frame_structure) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->frame_structure = frame_structure; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_frame_structure( - MixVideoFrame * obj, uint32* frame_structure) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure); - *frame_structure = obj->frame_structure; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_pool( - MixVideoFrame * obj, MixSurfacePool * pool) { - /* set pool pointer in private structure */ - obj->pool = pool; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_frame_type( - MixVideoFrame *obj, MixFrameType frame_type) { - obj->frame_type = frame_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_frame_type( - MixVideoFrame *obj, MixFrameType *frame_type) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, frame_type); - *frame_type = obj->frame_type; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_is_skipped( - MixVideoFrame *obj, bool is_skipped) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->is_skipped = is_skipped; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_is_skipped( - MixVideoFrame *obj, bool *is_skipped) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, is_skipped); - *is_skipped = obj->is_skipped; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_real_frame( - MixVideoFrame *obj, MixVideoFrame *real) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->real_frame = real; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_real_frame( - MixVideoFrame *obj, MixVideoFrame **real) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, real); - *real = obj->real_frame; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_reset(MixVideoFrame *obj) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->timestamp = 0; - obj->discontinuity = FALSE; - obj->is_skipped = FALSE; - obj->real_frame = NULL; - obj->sync_flag = FALSE; - obj->frame_structure = VA_FRAME_PICTURE; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoframe_set_sync_flag( - MixVideoFrame *obj, bool sync_flag) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->sync_flag = sync_flag; - if (obj->real_frame && obj->real_frame != obj) { - mix_videoframe_set_sync_flag(obj->real_frame, sync_flag); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_sync_flag( - MixVideoFrame *obj, bool *sync_flag) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, sync_flag); - if (obj->real_frame && obj->real_frame != obj) { - return mix_videoframe_get_sync_flag(obj->real_frame, sync_flag); - } else { - *sync_flag = obj -> sync_flag; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_vadisplay( - MixVideoFrame * obj, void *va_display) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->va_display = va_display; - if (obj->real_frame && obj->real_frame != obj) { - mix_videoframe_set_vadisplay(obj->real_frame, va_display); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_get_vadisplay( - MixVideoFrame * obj, void **va_display) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, va_display); - if (obj->real_frame && obj->real_frame != obj) { - return mix_videoframe_get_vadisplay(obj->real_frame, va_display); - } else { - *va_display = obj->va_display; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videoframe_set_displayorder( - MixVideoFrame *obj, uint32 displayorder) { - MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj); - obj->displayorder = displayorder; - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT mix_videoframe_get_displayorder( - MixVideoFrame *obj, uint32 *displayorder) { - MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder); - *displayorder = obj->displayorder; - return MIX_RESULT_SUCCESS; -} - - - diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h deleted file mode 100644 index fd58bbd..0000000 --- a/mix_video/src/mixvideoframe.h +++ /dev/null @@ -1,257 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEOFRAME_H__ -#define __MIX_VIDEOFRAME_H__ - -#include -#include "mixvideodef.h" -#include "mixvideothread.h" - -class MixSurfacePool; - -/** - * MIX_VIDEOFRAME: - * @obj: object to be type-casted. - */ -#define MIX_VIDEOFRAME(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEOFRAME: - * @obj: an object. - * - * Checks if the given object is an instance of #MixVideoFrame - */ -#define MIX_IS_VIDEOFRAME(obj) ((NULL != MIX_VIDEOFRAME(obj)) ? TRUE : FALSE) - - - -typedef enum _MixFrameType { - TYPE_I, - TYPE_P, - TYPE_B, - TYPE_INVALID -} MixFrameType; - -/** - * MixVideoFrame: - * - * MI-X VideoConfig Parameter object - */ -class MixVideoFrame : public MixParams { -public: - MixVideoFrame(); - ~MixVideoFrame(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; - void Lock(); - void Unlock(); -public: - /* ID associated with the decoded frame */ - ulong frame_id; - - /* ID associated with the CI frame - * (used for encode only) */ - uint ci_frame_idx; - - /* 64 bit timestamp. For decode, - * this is preserved from the corresponding - * MixVideoDecodeParams field. For encode, - * this is created during encoding. */ - uint64 timestamp; - - /* Flag indicating whether there - * is a discontinuity. For decode, - * this is preserved from the corresponding - * MixVideoDecodeParams field. */ - bool discontinuity; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; - -public: - // from structure MixVideoFramePrivate - MixSurfacePool *pool; - MixFrameType frame_type; - bool is_skipped; - MixVideoFrame *real_frame; -// GStaticRecMutex lock; - mutable MixVideoMutex mLock; - bool sync_flag; - uint32 frame_structure; // 0: frame, 1: top field, 2: bottom field - void *va_display; - uint32 displayorder; - -}; - -/** - * mix_videoframe_new: - * @returns: A newly allocated instance of #MixVideoFrame - * - * Use this method to create new instance of #MixVideoFrame - */ -MixVideoFrame *mix_videoframe_new(void); -/** - * mix_videoframe_ref: - * @mix: object to add reference - * @returns: the #MixVideoFrame instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoFrame *mix_videoframe_ref(MixVideoFrame * obj); - -/** - * mix_videoframe_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -void mix_videoframe_unref(MixVideoFrame * obj); - -/* Class Methods */ - -/** - * mix_videoframe_set_frame_id: - * @obj: #MixVideoFrame object - * @frame_id: ID associated with the decoded frame - * @returns: Common Video Error Return Codes - * - * Set Frame ID - */ -MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, ulong frame_id); - -/** - * mix_videoframe_get_frame_id: - * @obj: #MixVideoFrame object - * @frame_id: frame ID to be returned - * @returns: Common Video Error Return Codes - * - * Get Frame ID - */ -MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, ulong * frame_id); - -/** - * mix_videoframe_set_ci_frame_idx: - * @obj: #MixVideoFrame object - * @ci_frame_idx: ID associated with the CI frame (used for encode only) - * @returns: Common Video Error Return Codes - * - * Set CI Frame ID - */ -MIX_RESULT mix_videoframe_set_ci_frame_idx(MixVideoFrame * obj, uint ci_frame_idx); - -/** - * mix_videoframe_get_ci_frame_idx: - * @obj: #MixVideoFrame object - * @ci_frame_idx: CI Frame ID to be returned - * @returns: Common Video Error Return Codes - * - * Get CI Frame ID - */ -MIX_RESULT mix_videoframe_get_ci_frame_idx(MixVideoFrame * obj, uint * ci_frame_idx); - -/** - * mix_videoframe_set_timestamp: - * @obj: #MixVideoFrame object - * @timestamp: Frame timestamp - * @returns: Common Video Error Return Codes - * - * Set Frame timestamp - */ -MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, uint64 timestamp); - -/** - * mix_videoframe_get_timestamp: - * @obj: #MixVideoFrame object - * @timestamp: Frame timestamp to be returned - * @returns: Common Video Error Return Codes - * - * Get Frame timestamp - */ -MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, uint64 * timestamp); - -/** - * mix_videoframe_set_discontinuity: - * @obj: #MixVideoFrame object - * @discontinuity: Discontinuity flag - * @returns: Common Video Error Return Codes - * - * Get discontinuity flag - */ -MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, bool discontinuity); - -/** - * mix_videoframe_get_discontinuity: - * @obj: #MixVideoFrame object - * @discontinuity: Discontinuity flag to be returned - * @returns: Common Video Error Return Codes - * - * Get discontinuity flag - */ -MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, bool * discontinuity); - -/** - * TODO: Add document the following 2 functions - * - */ -MIX_RESULT mix_videoframe_set_vadisplay(MixVideoFrame * obj, void *va_display); -MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display); -MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, uint32* frame_structure); - -// from private structure MixVideoFramePrivate -/* Private functions */ -MIX_RESULT -mix_videoframe_set_pool (MixVideoFrame *obj, MixSurfacePool *pool); - -MIX_RESULT -mix_videoframe_set_frame_type (MixVideoFrame *obj, MixFrameType frame_type); - -MIX_RESULT -mix_videoframe_get_frame_type (MixVideoFrame *obj, MixFrameType *frame_type); - -MIX_RESULT -mix_videoframe_set_is_skipped (MixVideoFrame *obj, bool is_skipped); - -MIX_RESULT -mix_videoframe_get_is_skipped (MixVideoFrame *obj, bool *is_skipped); - -MIX_RESULT -mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real); - -MIX_RESULT -mix_videoframe_get_real_frame (MixVideoFrame *obj, MixVideoFrame **real); - -MIX_RESULT -mix_videoframe_reset(MixVideoFrame *obj); - -MIX_RESULT -mix_videoframe_set_sync_flag(MixVideoFrame *obj, bool sync_flag); - -MIX_RESULT -mix_videoframe_get_sync_flag(MixVideoFrame *obj, bool *sync_flag); - -MIX_RESULT -mix_videoframe_set_frame_structure(MixVideoFrame * obj, uint32 frame_structure); - -MIX_RESULT -mix_videoframe_set_displayorder(MixVideoFrame *obj, uint32 displayorder); - -MIX_RESULT -mix_videoframe_get_displayorder(MixVideoFrame *obj, uint32 *displayorder); - -#endif /* __MIX_VIDEOFRAME_H__ */ diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h deleted file mode 100644 index 96c22bd..0000000 --- a/mix_video/src/mixvideoframe_private.h +++ /dev/null @@ -1,83 +0,0 @@ -/* -INTEL CONFIDENTIAL -Copyright 2009 Intel Corporation All Rights Reserved. -The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - -No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOFRAME_PRIVATE_H__ -#define __MIX_VIDEOFRAME_PRIVATE_H__ -#if 0 -#include "mixvideoframe.h" -#include "mixsurfacepool.h" - -typedef enum _MixFrameType -{ - TYPE_I, - TYPE_P, - TYPE_B, - TYPE_INVALID -} MixFrameType; - -class MixVideoFramePrivate -{ -public: - MixVideoFramePrivate() - :pool(NULL) - ,is_skipped(FALSE) - ,real_frame(NULL) - ,sync_flag(FALSE) - ,frame_structure(VA_FRAME_PICTURE) - ,va_display(NULL) - {} -public: - /*< private > */ - -}; - - - - -/* Private functions */ -MIX_RESULT -mix_videoframe_set_pool (MixVideoFrame *obj, MixSurfacePool *pool); - -MIX_RESULT -mix_videoframe_set_frame_type (MixVideoFrame *obj, MixFrameType frame_type); - -MIX_RESULT -mix_videoframe_get_frame_type (MixVideoFrame *obj, MixFrameType *frame_type); - -MIX_RESULT -mix_videoframe_set_is_skipped (MixVideoFrame *obj, bool is_skipped); - -MIX_RESULT -mix_videoframe_get_is_skipped (MixVideoFrame *obj, bool *is_skipped); - -MIX_RESULT -mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real); - -MIX_RESULT -mix_videoframe_get_real_frame (MixVideoFrame *obj, MixVideoFrame **real); - -MIX_RESULT -mix_videoframe_reset(MixVideoFrame *obj); - -MIX_RESULT -mix_videoframe_set_sync_flag(MixVideoFrame *obj, bool sync_flag); - -MIX_RESULT -mix_videoframe_get_sync_flag(MixVideoFrame *obj, bool *sync_flag); - -MIX_RESULT -mix_videoframe_set_frame_structure(MixVideoFrame * obj, uint32 frame_structure); - -MIX_RESULT -mix_videoframe_set_displayorder(MixVideoFrame *obj, uint32 displayorder); - -MIX_RESULT -mix_videoframe_get_displayorder(MixVideoFrame *obj, uint32 *displayorder); -#endif - -#endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideoinitparams.cpp b/mix_video/src/mixvideoinitparams.cpp deleted file mode 100644 index cba548a..0000000 --- a/mix_video/src/mixvideoinitparams.cpp +++ /dev/null @@ -1,126 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoinitparams - * @short_description: MI-X Video Initialization Parameters - * - * The MixVideoInitParams object will be created by the MMF/App - * and provided in the mix_video_initialize() function. - * The get and set methods for the properties will be available for - * the caller to set and get information used at initialization time. - */ - -#include "mixvideoinitparams.h" - -#define SAFE_FREE(p) if(p) { free(p); p = NULL; } - -#define MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \ - -MixVideoInitParams::MixVideoInitParams() - :display(NULL) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) { -} -MixVideoInitParams::~MixVideoInitParams() { - /* unref display */ - if (this->display) { - mix_display_unref(this->display); - this->display = NULL; - } -} - -bool MixVideoInitParams::copy(MixParams *target) const { - bool ret = FALSE; - MixVideoInitParams * this_target = MIX_VIDEOINITPARAMS(target); - if (NULL != this_target) { - /* duplicate display */ - this_target->display = mix_display_dup(this->display); - // chain up base class - ret = MixParams::copy(target); - } - return ret; -} - -bool MixVideoInitParams::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoInitParams * this_obj = MIX_VIDEOINITPARAMS(obj); - if (NULL != this_obj) { - /* TODO: add comparison for other properties */ - if ((NULL == this->display && NULL == this_obj->display) || - mix_display_equal(this->display, this_obj->display)) { - ret = MixParams::equal(this_obj); - } - } - return ret; -} - -MixParams* MixVideoInitParams::dup() const { - MixParams *ret = new MixVideoInitParams(); - if (NULL != ret) { - if (FALSE == copy(ret)) { - ret->Unref(); - ret = NULL; - } - } - return ret; -} - -MixVideoInitParams * mix_videoinitparams_new(void) { - return new MixVideoInitParams(); -} - -MixVideoInitParams * -mix_videoinitparams_ref(MixVideoInitParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -MIX_RESULT mix_videoinitparams_set_display( - MixVideoInitParams * obj, MixDisplay * display) { - MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT (obj); - if (obj->display) { - mix_display_unref(obj->display); - } - obj->display = NULL; - if (display) { - /* obj->display = mix_display_dup(display); - if(!obj->display) { - return MIX_RESULT_NO_MEMORY; - }*/ - - obj->display = mix_display_ref(display); - } - return MIX_RESULT_SUCCESS; -} - -/* - Caller is responsible to use g_free to free the memory - */ -MIX_RESULT mix_videoinitparams_get_display( - MixVideoInitParams * obj, MixDisplay ** display) { - MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT (obj, display); - *display = NULL; - if (obj->display) { - /* *display = mix_display_dup(obj->display); - if(!*display) { - return MIX_RESULT_NO_MEMORY; - }*/ - *display = mix_display_ref(obj->display); - } - return MIX_RESULT_SUCCESS; -} diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h deleted file mode 100644 index 000257c..0000000 --- a/mix_video/src/mixvideoinitparams.h +++ /dev/null @@ -1,113 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. -*/ - -#ifndef __MIX_VIDEOINITPARAMS_H__ -#define __MIX_VIDEOINITPARAMS_H__ - -#include -#include "mixdisplay.h" -#include "mixvideodef.h" - -/** - * MIX_VIDEOINITPARAMS: - * @obj: object to be type-casted. - */ -#define MIX_VIDEOINITPARAMS(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEOINITPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_VIDEOINITPARAMS(obj) ((NULL != MIX_VIDEOINITPARAMS(obj)) ? TRUE : FALSE) - -/** - * MixVideoInitParams: - * - * MI-X VideoInit Parameter object - */ -class MixVideoInitParams : public MixParams { -public: - MixVideoInitParams(); - ~MixVideoInitParams(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams* obj) const; - virtual MixParams* dup() const; -public: - /*< public > */ - - /* Pointer to a MixDisplay object - * such as MixDisplayX11 */ - MixDisplay *display; - - /* Reserved for future use */ - void *reserved1; - - /* Reserved for future use */ - void *reserved2; - - /* Reserved for future use */ - void *reserved3; - - /* Reserved for future use */ - void *reserved4; -}; - -/** - * mix_videoinitparams_new: - * @returns: A newly allocated instance of #MixVideoInitParams - * - * Use this method to create new instance of #MixVideoInitParams - */ -MixVideoInitParams *mix_videoinitparams_new (void); -/** - * mix_videoinitparams_ref: - * @mix: object to add reference - * @returns: the #MixVideoInitParams instance where reference count has been increased. - * - * Add reference count. - */ -MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix); - -/** - * mix_videoinitparams_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videoinitparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - - -/** - * mix_videoinitparams_set_display: - * @obj: #MixVideoInitParams object - * @display: Pointer to a MixDisplay object such as MixDisplayX11 - * @returns: Common Video Error Return Codes - * - * Set MixDisplay object - */ -MIX_RESULT mix_videoinitparams_set_display ( - MixVideoInitParams * obj, MixDisplay * display); - -/** - * mix_videoinitparams_get_display: - * @obj: #MixVideoInitParams object - * @dislay: Pointer to pointer of a MixDisplay object such as MixDisplayX11 - * @returns: Common Video Error Return Codes - * - * Get MixDisplay object - */ -MIX_RESULT mix_videoinitparams_get_display ( - MixVideoInitParams * obj, MixDisplay ** dislay); - - - -#endif /* __MIX_VIDEOINITPARAMS_H__ */ diff --git a/mix_video/src/mixvideolog.h b/mix_video/src/mixvideolog.h deleted file mode 100644 index 6f40943..0000000 --- a/mix_video/src/mixvideolog.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEO_LOG_H__ -#define __MIX_VIDEO_LOG_H__ -#include - - -#ifdef MIX_LOG_ENABLE -#define LOG_V(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) -#define LOG_I(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_INFO, format, ##__VA_ARGS__) -#define LOG_W(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_WARNING, format, ##__VA_ARGS__) -#define LOG_E(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, format, ##__VA_ARGS__) -#else -#define LOG_V(format, ...) -#define LOG_I(format, ...) -#define LOG_W(format, ...) -#define LOG_E(format, ...) -#endif - - -#endif /* __MIX_VIDEO_LOG_H__ */ diff --git a/mix_video/src/mixvideorenderparams.cpp b/mix_video/src/mixvideorenderparams.cpp deleted file mode 100644 index 063c32b..0000000 --- a/mix_video/src/mixvideorenderparams.cpp +++ /dev/null @@ -1,301 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideorenderparams - * @short_description: MI-X Video Render Parameters - * - * The #MixVideoRenderParams object will be created by the MMF/App - * and provided to #MixVideo in the #MixVideo mix_video_render() function. - */ - -#include /* libVA */ -#include "mixvideorenderparams.h" -#include "mixvideorenderparams_internal.h" - -#include - -#define MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT(obj) \ - if(!obj) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ - -#define MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT(obj, prop) \ - if(!obj || !prop) return MIX_RESULT_NULL_PTR; \ - if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \ - -bool mix_rect_equal(MixRect rc1, MixRect rc2); - -MixVideoRenderParams::MixVideoRenderParams() - :display(NULL) - ,clipping_rects(NULL) - ,number_of_clipping_rects(0) - ,reserved(NULL) - ,reserved1(NULL) - ,reserved2(NULL) - ,reserved3(NULL) - ,reserved4(NULL) - ,mVa_cliprects(NULL) { - /* initialize properties here */ - memset(&src_rect, 0, sizeof(MixRect)); - memset(&dst_rect, 0, sizeof(MixRect)); -} - -MixVideoRenderParams::~MixVideoRenderParams() { - if (NULL != clipping_rects) { - free(clipping_rects); - clipping_rects = NULL; - } - if (NULL != mVa_cliprects) { - free(mVa_cliprects); - mVa_cliprects = NULL; - } - number_of_clipping_rects = 0; - if (NULL != display) { - mix_display_unref(display); - display = NULL; - } -} - -bool MixVideoRenderParams::copy(MixParams *target) const { - if (NULL == target) return FALSE; - MixVideoRenderParams *this_target = MIX_VIDEORENDERPARAMS(target); - MIX_RESULT mix_result = MIX_RESULT_FAIL; - - if (this_target == this) { - return TRUE; - } - - if (NULL != this_target) { - mix_result = mix_videorenderparams_set_display(this_target, display); - if (MIX_RESULT_SUCCESS != mix_result) { - return FALSE; - } - - mix_result = mix_videorenderparams_set_clipping_rects(this_target, - clipping_rects, number_of_clipping_rects); - - if (MIX_RESULT_SUCCESS != mix_result) { - return FALSE; - } - - this_target->src_rect = src_rect; - this_target->dst_rect = dst_rect; - - } - return MixParams::copy(target); -} - -bool MixVideoRenderParams::equal(MixParams* obj) const { - bool ret = FALSE; - MixVideoRenderParams *this_obj = MIX_VIDEORENDERPARAMS(obj); - if (NULL != this_obj) { - // Deep compare - if (mix_display_equal(MIX_DISPLAY(display), MIX_DISPLAY( - this_obj->display)) && mix_rect_equal(src_rect, - this_obj->src_rect) && mix_rect_equal(dst_rect, - this_obj->dst_rect) && number_of_clipping_rects - == this_obj->number_of_clipping_rects && memcmp( - (uchar *) number_of_clipping_rects, - (uchar *) this_obj->number_of_clipping_rects, - number_of_clipping_rects) == 0) { - // members within this scope equal. chaining up. - ret = MixParams::equal(obj); - } - } - return ret; -} - -MixParams* MixVideoRenderParams::dup() const { - MixParams *ret = NULL; - MixVideoRenderParams *duplicate = mix_videorenderparams_new(); - if (copy(duplicate)) { - ret = duplicate; - } else { - mix_videorenderparams_unref(duplicate); - } - return ret; -} - -MIX_RESULT MixVideoRenderParams::set_clipping_rects( - MixRect* clipping_rects, - uint number_of_clipping_rects) { - - if (this->clipping_rects) { - free(this->clipping_rects); - this->clipping_rects = NULL; - this->number_of_clipping_rects = 0; - } - - if (this->mVa_cliprects) { - free(this->mVa_cliprects); - this->mVa_cliprects = NULL; - } - - if ((NULL == clipping_rects) && (0 != number_of_clipping_rects)) { -// this->clipping_rects = reinterpret_cast(g_memdup(clipping_rects, number_of_clipping_rects * sizeof(MixRect))); - this->clipping_rects = (MixRect*)malloc(number_of_clipping_rects * sizeof(MixRect)); - if (NULL == this->clipping_rects) { - return MIX_RESULT_NO_MEMORY; - } - memcpy(this->clipping_rects, clipping_rects, number_of_clipping_rects * sizeof(MixRect)); - this->number_of_clipping_rects = number_of_clipping_rects; - - /* create VARectangle list */ - this->mVa_cliprects = reinterpret_cast(malloc(number_of_clipping_rects * sizeof(VARectangle))); - if (NULL == this->mVa_cliprects) { - return MIX_RESULT_NO_MEMORY; - } - - for (uint idx = 0; idx < number_of_clipping_rects; ++idx) { - this->mVa_cliprects[idx].x = clipping_rects[idx].x; - this->mVa_cliprects[idx].y = clipping_rects[idx].y; - this->mVa_cliprects[idx].width = clipping_rects[idx].width; - this->mVa_cliprects[idx].height = clipping_rects[idx].height; - } - } - - return MIX_RESULT_SUCCESS; -} - - -MIX_RESULT MixVideoRenderParams::get_clipping_rects(MixRect ** clipping_rects, - uint* number_of_clipping_rects) { - if (NULL == clipping_rects || NULL == number_of_clipping_rects) - return MIX_RESULT_NULL_PTR; - - *clipping_rects = NULL; - *number_of_clipping_rects = 0; - - if ((NULL != this->clipping_rects) && (0 != this->number_of_clipping_rects)) { -// *clipping_rects = reinterpret_cast(g_memdup(this->clipping_rects, this->number_of_clipping_rects * sizeof(MixRect))); - *clipping_rects = (MixRect*)malloc(this->number_of_clipping_rects * sizeof(MixRect)); - if (NULL == *clipping_rects) { - return MIX_RESULT_NO_MEMORY; - } - memcpy(*clipping_rects, this->clipping_rects, this->number_of_clipping_rects * sizeof(MixRect)); - *number_of_clipping_rects = this->number_of_clipping_rects; - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT MixVideoRenderParams::get_va_cliprects(VARectangle ** va_cliprects, - uint* number_of_cliprects) { - if (NULL == va_cliprects || NULL == number_of_cliprects) - return MIX_RESULT_NULL_PTR; - - *va_cliprects = NULL; - *number_of_cliprects = 0; - - if ((NULL != mVa_cliprects) && (0 != number_of_clipping_rects)) { - *va_cliprects = mVa_cliprects; - *number_of_cliprects = number_of_clipping_rects; - } - return MIX_RESULT_SUCCESS; -} - -MixVideoRenderParams * -mix_videorenderparams_new(void) { - return new MixVideoRenderParams(); -} - -MixVideoRenderParams * -mix_videorenderparams_ref(MixVideoRenderParams * mix) { - if (NULL != mix) - mix->Ref(); - return mix; -} - -bool mix_rect_equal(MixRect rc1, MixRect rc2) { - if (rc1.x == rc2.x && rc1.y == rc2.y && rc1.width == rc2.width - && rc1.height == rc2.height) { - return TRUE; - } - return FALSE; -} - - -/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */ - -MIX_RESULT mix_videorenderparams_set_display( - MixVideoRenderParams * obj, MixDisplay * display) { - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - if (obj->display) { - mix_display_unref(obj->display); - obj->display = NULL; - } - /* dup */ - if (display) { - obj->display = mix_display_ref(display); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_get_display( - MixVideoRenderParams * obj, MixDisplay ** display) { - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, display); - /* dup? */ - if (obj->display) { - *display = mix_display_ref(obj->display); - } - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_set_src_rect( - MixVideoRenderParams * obj, MixRect src_rect) { - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - obj->src_rect = src_rect; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_get_src_rect( - MixVideoRenderParams * obj, MixRect * src_rect) { - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, src_rect); - *src_rect = obj->src_rect; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_set_dest_rect( - MixVideoRenderParams * obj, MixRect dst_rect) { - MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj); - obj->dst_rect = dst_rect; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_get_dest_rect( - MixVideoRenderParams * obj, MixRect * dst_rect) { - MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, dst_rect); - *dst_rect = obj->dst_rect; - return MIX_RESULT_SUCCESS; -} - -MIX_RESULT mix_videorenderparams_set_clipping_rects( - MixVideoRenderParams * obj, MixRect* clipping_rects, - uint number_of_clipping_rects) { - if (NULL == obj) - return MIX_RESULT_NULL_PTR; - return obj->set_clipping_rects(clipping_rects, number_of_clipping_rects); -} - -MIX_RESULT mix_videorenderparams_get_clipping_rects( - MixVideoRenderParams * obj, MixRect ** clipping_rects, - uint* number_of_clipping_rects) { - if (NULL == obj) - return MIX_RESULT_NULL_PTR; - return obj->get_clipping_rects(clipping_rects, number_of_clipping_rects); -} - -/* The mixvideo internal method */ -MIX_RESULT mix_videorenderparams_get_cliprects_internal( - MixVideoRenderParams * obj, VARectangle ** va_cliprects, - uint* number_of_cliprects) { - if (NULL == obj) - return MIX_RESULT_NULL_PTR; - return obj->get_va_cliprects(va_cliprects, number_of_cliprects);; -} - -/* TODO: implement properties' setters and getters */ diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h deleted file mode 100644 index 39d2683..0000000 --- a/mix_video/src/mixvideorenderparams.h +++ /dev/null @@ -1,226 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEORENDERPARAMS_H__ -#define __MIX_VIDEORENDERPARAMS_H__ - -#include -#include "mixvideodef.h" -#include "mixdisplay.h" -#include "mixvideoframe.h" -#include - -/** - * MIX_VIDEORENDERPARAMS: - * @obj: object to be type-casted. - */ -#define MIX_VIDEORENDERPARAMS(obj) (reinterpret_cast(obj)) - -/** - * MIX_IS_VIDEORENDERPARAMS: - * @obj: an object. - * - * Checks if the given object is an instance of #MixParams - */ -#define MIX_IS_VIDEORENDERPARAMS(obj) ((NULL != MIX_VIDEORENDERPARAMS(obj)) ? TRUE : FALSE) - - -/** - * MixVideoRenderParams: - * - * MI-X VideoRender Parameter object - */ -class MixVideoRenderParams : public MixParams { -public: - MixVideoRenderParams(); - virtual ~MixVideoRenderParams(); - virtual bool copy(MixParams *target) const; - virtual bool equal(MixParams*) const; - virtual MixParams* dup() const; - - MIX_RESULT set_clipping_rects(MixRect* clipping_rects, - uint number_of_clipping_rects); - MIX_RESULT get_clipping_rects(MixRect ** clipping_rects, - uint* number_of_clipping_rects); - MIX_RESULT get_va_cliprects(VARectangle ** va_cliprects, - uint* number_of_cliprects); - -public: - /*< public > */ - /* Pointer to a MixDisplay object - * such as MixDisplayX11 */ - MixDisplay *display; - - /* MixRect object to define offset, - * height and width of source image */ - MixRect src_rect; - - /* MixRect object to define offset, - * height and width of the display - * destination */ - MixRect dst_rect; - - /* Array of clipping rectangles - * to be applied */ - MixRect *clipping_rects; - - /* Number of clipping rectangles - * in clipping_rects */ - uint number_of_clipping_rects; - - /* Post processing parameters */ - uint post_proc; - - /* Reserved */ - void* reserved; - - /* Reserved for future use */ - void* reserved1; - - /* Reserved for future use */ - void* reserved2; - - /* Reserved for future use */ - void* reserved3; - - /* Reserved for future use */ - void* reserved4; - -private: - VARectangle *mVa_cliprects; - -}; - - -/** - * mix_videorenderparams_new: - * @returns: A newly allocated instance of #MixVideoRenderParams - * - * Use this method to create new instance of #MixVideoRenderParams - */ - -MixVideoRenderParams *mix_videorenderparams_new(void); -/** - * mix_videorenderparams_ref: - * @mix: object to add reference - * @returns: the MixVideoRenderParams instance where reference count has been increased. - * - * Add reference count. - */ - -MixVideoRenderParams *mix_videorenderparams_ref(MixVideoRenderParams * mix); - -/** - * mix_videorenderparams_unref: - * @obj: object to unref. - * - * Decrement reference count of the object. - */ -#define mix_videorenderparams_unref(obj) mix_params_unref(MIX_PARAMS(obj)) - -/* Class Methods */ - -/** - * mix_videorenderparams_set_display: - * @obj: #MixVideoRenderParams object - * @display: #MixDisplay object - * @returns: Common Video Error Return Codes - * - * Set MixDisplay Object - */ -MIX_RESULT mix_videorenderparams_set_display( - MixVideoRenderParams * obj, MixDisplay * display); - -/** - * mix_videorenderparams_get_display: - * @obj: #MixVideoRenderParams object - * @display: pointer to #MixDisplay object - * @returns: Common Video Error Return Codes - * - * Get MixDisplay Object - */ -MIX_RESULT mix_videorenderparams_get_display( - MixVideoRenderParams * obj, MixDisplay ** display); - -/** - * mix_videorenderparams_set_src_rect: - * @obj: #MixVideoRenderParams object - * @src_rect: MixRect object to define offset, height and width of source image - * @returns: Common Video Error Return Codes - * - * Set source rectangle - */ -MIX_RESULT mix_videorenderparams_set_src_rect( - MixVideoRenderParams * obj, MixRect src_rect); - -/** - * mix_videorenderparams_get_src_rect: - * @obj: #MixVideoRenderParams object - * @src_rect: Source rectangle to be returned - * @returns: Common Video Error Return Codes - * - * Get source rectangle - */ -MIX_RESULT mix_videorenderparams_get_src_rect( - MixVideoRenderParams * obj, MixRect * src_rect); - -/** - * mix_videorenderparams_set_dest_rect: - * @obj: #MixVideoRenderParams object - * @dst_rect: MixRect object to define offset, height and width of the display destination - * @returns: Common Video Error Return Codes - * - * Set destination rectangle - */ -MIX_RESULT mix_videorenderparams_set_dest_rect( - MixVideoRenderParams * obj, MixRect dst_rect); - -/** - * mix_videorenderparams_set_dest_rect: - * @obj: #MixVideoRenderParams object - * @dst_rect: MixRect object to define offset, height and width of the display destination - * @returns: Common Video Error Return Codes - * - * Get destination rectangle - */ -MIX_RESULT mix_videorenderparams_get_dest_rect( - MixVideoRenderParams * obj, MixRect * dst_rect); - -/** - * mix_videorenderparams_set_clipping_rects: - * @obj: #MixVideoRenderParams object - * @clipping_rects: Array of clipping rectangles to be applied - * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects - * @returns: Common Video Error Return Codes - * - * Set clipping rectangles - */ -MIX_RESULT mix_videorenderparams_set_clipping_rects( - MixVideoRenderParams * obj, MixRect* clipping_rects, uint number_of_clipping_rects); - -/** - * mix_videorenderparams_get_clipping_rects: - * @obj: #MixVideoRenderParams object - * @clipping_rects: Array of clipping rectangles returned - * @number_of_clipping_rects: Number of clipping rectangles in clipping_rects returned - * @returns: Common Video Error Return Codes - * - * Get clipping rectangles - * - * - * DO NOT free clipping_rects! - * - */ -MIX_RESULT mix_videorenderparams_get_clipping_rects( - MixVideoRenderParams * obj, MixRect ** clipping_rects, uint* number_of_clipping_rects); - -/* TODO: Add getters and setters for other properties */ - - - -#endif /* __MIX_VIDEORENDERPARAMS_H__ */ diff --git a/mix_video/src/mixvideorenderparams_internal.h b/mix_video/src/mixvideorenderparams_internal.h deleted file mode 100644 index b1b3620..0000000 --- a/mix_video/src/mixvideorenderparams_internal.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEORENDERPARAMS_PRIVATE_H__ -#define __MIX_VIDEORENDERPARAMS_PRIVATE_H__ - -#include - -/* Internal function */ -MIX_RESULT mix_videorenderparams_get_cliprects_internal( - MixVideoRenderParams * obj, - VARectangle ** va_cliprects, - uint* number_of_cliprects); - - - -#endif /* __MIX_VIDEORENDERPARAMS_PRIVATE_H__ */ diff --git a/mix_video/src/mixvideothread.cpp b/mix_video/src/mixvideothread.cpp deleted file mode 100644 index 6ee1524..0000000 --- a/mix_video/src/mixvideothread.cpp +++ /dev/null @@ -1,50 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -/** - * SECTION:mixvideoinitparams - * @short_description: MI-X Video Initialization Parameters - * - * The MixVideoInitParams object will be created by the MMF/App - * and provided in the mix_video_initialize() function. - * The get and set methods for the properties will be available for - * the caller to set and get information used at initialization time. - */ - -#include "mixvideothread.h" - -MixVideoMutex::MixVideoMutex() { - pthread_mutex_init(&mMutex, NULL); -} -MixVideoMutex::MixVideoMutex(const char* name) { - pthread_mutex_init(&mMutex, NULL); -} -MixVideoMutex::MixVideoMutex(int type, const char* name) { - if (type == SHARED) { - pthread_mutexattr_t attr; - pthread_mutexattr_init(&attr); - pthread_mutexattr_setpshared(&attr, PTHREAD_PROCESS_SHARED); - pthread_mutex_init(&mMutex, &attr); - pthread_mutexattr_destroy(&attr); - } else { - pthread_mutex_init(&mMutex, NULL); - } -} -MixVideoMutex::~MixVideoMutex() { - pthread_mutex_destroy(&mMutex); -} -int MixVideoMutex::lock() { - return -pthread_mutex_lock(&mMutex); -} -void MixVideoMutex::unlock() { - pthread_mutex_unlock(&mMutex); -} -int MixVideoMutex::tryLock() { - return -pthread_mutex_trylock(&mMutex); -} - diff --git a/mix_video/src/mixvideothread.h b/mix_video/src/mixvideothread.h deleted file mode 100644 index feb9406..0000000 --- a/mix_video/src/mixvideothread.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - INTEL CONFIDENTIAL - Copyright 2009 Intel Corporation All Rights Reserved. - The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. - - No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. - */ - -#ifndef __MIX_VIDEO_THREAD_H__ -#define __MIX_VIDEO_THREAD_H__ - -#include - - -class MixVideoMutex { -public: - enum { - PRIVATE = 0, - SHARED = 1 - }; - - MixVideoMutex(); - MixVideoMutex(const char* name); - MixVideoMutex(int type, const char* name = NULL); - ~MixVideoMutex(); - - // lock or unlock the mutex - int lock(); - void unlock(); - // lock if possible; returns 0 on success, error otherwise - int tryLock(); -private: - // A mutex cannot be copied - MixVideoMutex(const MixVideoMutex&); - MixVideoMutex& operator = (const MixVideoMutex&); - -private: - pthread_mutex_t mMutex; -}; - - - - -#endif /* __MIX_VIDEO_THREAD_H__ */ - diff --git a/mix_video/src/test.cpp b/mix_video/src/test.cpp deleted file mode 100644 index 76bba31..0000000 --- a/mix_video/src/test.cpp +++ /dev/null @@ -1,87 +0,0 @@ -#include - -#include -#include "mixvideo.h" -#include "mixdisplayx11.h" - -int -main (int argc, char **argv) -{ - MIX_RESULT ret; - - g_type_init (); - - /* test MixDisplay */ - { - - MixDisplayX11 *x11_clone = NULL; - MixDisplayX11 *x11 = mix_displayx11_new (); - - MixDisplay *base = MIX_DISPLAY (x11); - - bool flag = MIX_IS_DISPLAYX11 (base); - - Drawable drawable = 1024; - - mix_displayx11_set_drawable (x11, drawable); - - /* clone x11 */ - - x11_clone = (MixDisplayX11 *) mix_display_dup (MIX_DISPLAY (x11)); - - base = MIX_DISPLAY (x11_clone); - - flag = MIX_IS_DISPLAYX11 (base); - - mix_displayx11_get_drawable (x11_clone, &drawable); - - /* TODO: add more test cases */ - - /* release */ - mix_display_unref (MIX_DISPLAY (x11)); - mix_display_unref (MIX_DISPLAY (x11_clone)); - g_print ("MixDisplayX11 test is done!\n"); - } - - /* test MixVideoInitParams */ - { - MixVideoInitParams *init_params = mix_videoinitparams_new (); - - MixDisplayX11 *x11 = mix_displayx11_new (); - mix_displayx11_set_drawable (x11, 1024); - - mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); - - /* release */ - mix_params_unref (MIX_PARAMS (init_params)); - mix_display_unref (MIX_DISPLAY (x11)); - - g_print ("MixVideoInitParams test is done!\n"); - } - - /* test MixVideo */ - - { - MixVideo *video = mix_video_new (); - MixVideoInitParams *init_params = mix_videoinitparams_new (); - MixDisplayX11 *x11 = mix_displayx11_new (); - MixDrmParams *drm = mix_drmparams_new (); - MixCodecMode mode = MIX_CODEC_MODE_DECODE; - - mix_displayx11_set_drawable (x11, 1024); - mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11)); - - mix_video_initialize (video, mode, init_params, drm); - - /* TODO: add more test cases */ - - /* unref the objects. */ - - mix_params_unref (MIX_PARAMS (init_params)); - mix_params_unref (MIX_PARAMS (drm)); - mix_display_unref (MIX_DISPLAY (x11)); - g_object_unref (G_OBJECT (video)); - - g_print ("MixVideo test is done!\n"); - } -} diff --git a/mix_video/test/src/test_framemanager.cpp b/mix_video/test/src/test_framemanager.cpp deleted file mode 100644 index c930737..0000000 --- a/mix_video/test/src/test_framemanager.cpp +++ /dev/null @@ -1,200 +0,0 @@ -#include "../../src/mixframemanager.h" - -bool stop_thread = FALSE; -GCond* data_cond = NULL; -GMutex* data_mutex = NULL; - - -void *deque_function(void *data) { - - MixFrameManager *fm = (MixFrameManager *) data; - MIX_RESULT mixresult; - MixVideoFrame *mvf = NULL; - uint64 pts; - while (!stop_thread) { - - g_mutex_lock (data_mutex); - - mixresult = mix_framemanager_dequeue(fm, &mvf); - if (mixresult == MIX_RESULT_SUCCESS) { - mixresult = mix_videoframe_get_timestamp(mvf, &pts); - g_print("dequeued timestamp = %"UINT64_FORMAT"\n", pts); - /* mix_videoframe_unref(mvf); */ - } else if (mixresult == MIX_RESULT_FRAME_NOTAVAIL) { - g_print("mixresult == MIX_RESULT_FRAME_NOTAVAIL\n"); - g_cond_wait (data_cond, data_mutex); - } - - g_mutex_unlock (data_mutex); - - } -} - -void shuffle(GPtrArray *list) { - uint idx, jdx; - uint len = list->len; - for (idx = 0; idx < len - 1; idx++) { - jdx = rand() % len; - if (idx != jdx) { - void* tmp = g_ptr_array_index(list, jdx); - g_ptr_array_index(list, jdx) = g_ptr_array_index(list, idx); - g_ptr_array_index(list, idx) = tmp; - } - } -} - -int main() { - MIX_RESULT mixresult; - - int fps_n = 24000; - int fps_d = 1001; - - /* - int fps_n = 2500000; - int fps_d = 104297; - */ - GPtrArray *fa = NULL; - MixFrameManager *fm = NULL; - MixVideoFrame *mvf = NULL; - MixVideoFrame *mvf_1st = NULL; - - int idx = 0; - uint64 pts = 0; - - GThread *deque_thread = NULL; - GError *deque_thread_error = NULL; - - /* first ting first */ - g_type_init(); - - /* create frame manager */ - fm = mix_framemanager_new(); - if (!fm) { - goto cleanup; - } - - /* initialize frame manager */ - mixresult = mix_framemanager_initialize(fm, - MIX_FRAMEORDER_MODE_DISPLAYORDER, fps_n, fps_d); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - /* create frame_array */ - fa = g_ptr_array_sized_new(64); - if (!fa) { - goto cleanup; - } - - for (idx = 0; idx < 16; idx++) { - /* generate MixVideoFrame */ - mvf = mix_videoframe_new(); - if (!mvf) { - goto cleanup; - } - - pts = idx * G_USEC_PER_SEC * INT64_CONSTANT(1000) * fps_d / fps_n; - mixresult = mix_videoframe_set_timestamp(mvf, pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - g_print("original timestamp = %"UINT64_FORMAT"\n", pts); - - if (idx == 0) { - mvf_1st = mvf; - } else { - g_ptr_array_add(fa, (void*) mvf); - } - } - - /* shuffle the array */ - shuffle( fa); - - data_mutex = g_mutex_new (); - if (!data_mutex) { - goto cleanup; - } - - data_cond = g_cond_new(); - if (!data_cond) { - goto cleanup; - } - - - /* create another thread to dequeue */ - deque_thread = g_thread_create((GThreadFunc) deque_function, (void *) fm, - TRUE, &deque_thread_error); - if (!deque_thread) { - goto cleanup; - } - - /* enqueue */ - mixresult = mix_framemanager_enqueue(fm, mvf_1st); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - mixresult = mix_videoframe_get_timestamp(mvf_1st, &pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - g_print("shuffled timestamp = %"UINT64_FORMAT"\n", pts); - - for (idx = 0; idx < fa->len; idx++) { - - g_mutex_lock (data_mutex); - - /* wait for 100ms to enqueue another frame */ - g_usleep(G_USEC_PER_SEC / 10 ); - - mvf = (MixVideoFrame *) g_ptr_array_index(fa, idx); - mixresult = mix_framemanager_enqueue(fm, mvf); - - /* wake up deque thread */ - g_cond_signal (data_cond); - - - g_mutex_unlock (data_mutex); - - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - mixresult = mix_videoframe_get_timestamp(mvf, &pts); - if (mixresult != MIX_RESULT_SUCCESS) { - goto cleanup; - } - - g_print("shuffled timestamp = %"UINT64_FORMAT"\n", pts); - } - - getchar(); - - stop_thread = TRUE; - - /* wake up deque thread */ - g_cond_signal (data_cond); - - g_thread_join(deque_thread); - -cleanup: - - if (data_mutex) { - g_mutex_free(data_mutex); - } - - if (data_cond) { - g_cond_free(data_cond); - } - - if (fm) { - mix_framemanager_unref(fm); - } - - if (fa) { - g_ptr_array_free(fa, TRUE); - } - - return 0; -} -- cgit v1.2.3 From aac27960a575b34f16e951311f46d0854fe1ca3b Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 8 Oct 2012 17:47:09 +0800 Subject: Disable frameskip for low bitrate exported movie BZ: 55820 When the exported movie has low bitrates such as 1Mbps, frame drop will be observed. In these situations, we disable the frame skip in video encoder to improve the export video quality. Change-Id: Idba9ff925374cefb402895fda6bf2616367cf8ce Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/69193 Reviewed-by: Gu, Wangyi Reviewed-by: Tang, Richard Reviewed-by: Wang, Elaine Reviewed-by: Jiang, Fei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- .../videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp index d7e0529..a63dfd5 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp @@ -71,7 +71,7 @@ status_t IntelVideoEditorAVCEncoder::initCheck(const sp& meta) { LOGV("initCheck"); Encode_Status encStatus; - + uint32_t disableFrameSkip = 0; sp sourceFormat = mSource->getFormat(); CHECK(sourceFormat->findInt32(kKeyWidth, &mVideoWidth)); @@ -104,6 +104,12 @@ status_t IntelVideoEditorAVCEncoder::initCheck(const sp& meta) { LOGI("mVideoWidth = %d, mVideoHeight = %d, mVideoFrameRate = %d, mVideoColorFormat = %d, mVideoBitRate = %d", mVideoWidth, mVideoHeight, mVideoFrameRate, mVideoColorFormat, mVideoBitRate); + // disable frame skip for low bitrate clips + if (mVideoBitRate < BITRATE_2M) { + LOGI("Frameskip is disabled for low bitrate clips"); + disableFrameSkip = 1; + } + if (mVideoColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) { LOGE("Color format %d is not supported", mVideoColorFormat); return BAD_VALUE; @@ -124,6 +130,7 @@ status_t IntelVideoEditorAVCEncoder::initCheck(const sp& meta) { mEncParamsCommon.frameRate.frameRateDenom = 1; mEncParamsCommon.rcMode = RATE_CONTROL_VBR; mEncParamsCommon.rcParams.bitRate = mVideoBitRate; + mEncParamsCommon.rcParams.disableFrameSkip = disableFrameSkip; mEncParamsCommon.rawFormat = RAW_FORMAT_NV12; mEncParamsCommon.rcParams.minQP = 0; -- cgit v1.2.3 From 94c0dd51127209ad43958182d91c492fccb7361c Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Tue, 9 Oct 2012 16:07:36 +0800 Subject: [Movie Studio] add error handle mechanism for audio codec aac-extended in video clip BZ: 60006 video clip with audio codec aac-extended is not supported by movie studio(AOSP defined by default), but the android orginal design can't handle this well, so add proper error handle mechanism when user import such a clip. Change-Id: Ic5d304fda819cf37584641f835d93a37e044071e Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/69364 Reviewed-by: Tang, Richard Reviewed-by: Feng, Wei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp index 6cf71e1..cc62a8b 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp @@ -64,6 +64,7 @@ namespace android { * ENGINE INTERFACE */ +const char *MEDIA_MIMETYPE_AUDIO_AACEXTENDED = "audio/mp4a-aacextended"; /** ************************************************************************ * @brief Array of AMR NB/WB bitrates @@ -520,6 +521,10 @@ M4OSA_ERR VideoEditor3gpReader_open(M4OSA_Context pContext, ALOGV("VideoEditorMp3Reader_open error - audio/mpeg is not supported"); return M4ERR_READER_UNKNOWN_STREAM_TYPE; } + if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_AACEXTENDED)) { + ALOGV("VideoEditorMp3Reader_open error - audio/aacextended is not supported"); + return M4ERR_READER_UNKNOWN_STREAM_TYPE; + } temp++; } -- cgit v1.2.3 From 737718f02eacdc5abd9b5d5f581bf99a11977860 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Fri, 28 Sep 2012 16:24:03 +0800 Subject: [Movie studio]make exported clip of 1080p as 1920*1080 (1) BZ: 56801 the original design for 1080p is 1920*1088(container size), which can't handle some special clips, such as the clip provided in this bug, this clip only has 1920*1080 valid bit stream; besides the 1920*1088 solution brings a workaound in omx, and one more memory copy for each frame if the imported clip needs transcoding when export it. Signed-off-by: Gu, Wangyi Change-Id: I9757768b508a7b6b0eb0144a86db24cc584bbb1a Reviewed-on: http://android.intel.com:8080/68301 Reviewed-by: Feng, Wei Reviewed-by: Tang, Richard Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- frameworks/videoedit/lvpp/VideoEditorToolsNV12.c | 4 ++-- .../videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp | 13 +++++++++---- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c index 1cc86ab..e77a908 100644 --- a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c +++ b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c @@ -1166,8 +1166,8 @@ M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toNV12(void *pUserData, pixel is replicated here */ if (u8Wflag) { - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value1; - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; + *pu8_data_out = (M4VIFI_UInt8)u32_temp_value1; + *(pu8_data_out+1) = (M4VIFI_UInt8)u32_temp_value; } pu8dum = (pu8_data_out - u32_width_out); diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp index 6ad5122..cafc615 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp @@ -59,6 +59,8 @@ #error "Cannot force DSI retrieval if codec type is not fixed" #endif +#define WIDTH_1080P_INTEL 1920 +#define HEIGHT_1080P_INTEL 1080 /******************** * SOURCE CLASS * ********************/ @@ -606,10 +608,13 @@ M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext, VIDEOEDITOR_CHECK( pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, - M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, - M4ERR_PARAMETER); + if (pEncoderContext->mCodecParams->FrameWidth != WIDTH_1080P_INTEL && + pEncoderContext->mCodecParams->FrameHeight != HEIGHT_1080P_INTEL) { + VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, + M4ERR_PARAMETER); + VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, + M4ERR_PARAMETER); + } /** * StageFright graph building -- cgit v1.2.3 From 3d69046290c0fce32ec0c9ea30df39b71a1868a0 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Mon, 15 Oct 2012 15:26:15 +0800 Subject: commit code coverage for video encoder libmix BZ:62190 btest is special developed for code coverage tesing toward to libintelmetadatabuffer. mix_encoder is special developed for code coverage test toward to libmix. Commit them to mainline branch to keep. Adjusting code style. Signed-off-by: Liu Bolun Change-Id: Iecc22aec29ea5e84747f566a55fd2a7f3a1a1677 Reviewed-on: http://android.intel.com:8080/70007 Reviewed-by: Liu, BolunX Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- test/btest.cpp | 17 +++ test/mix_encoder.cpp | 287 +++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 293 insertions(+), 11 deletions(-) diff --git a/test/btest.cpp b/test/btest.cpp index 13b790b..32f34a8 100644 --- a/test/btest.cpp +++ b/test/btest.cpp @@ -33,10 +33,13 @@ int main(int argc, char* argv[]) mb1 = new IntelMetadataBuffer(); ret = mb1->SetType(t1); ret = mb1->SetValue(v1); + mb1->GetMaxBufferSize(); if (t1 != MetadataBufferTypeGrallocSource) { ret = mb1->SetValueInfo(&vi1); ret = mb1->SetExtraValues(ev1, 10); + ret = mb1->SetExtraValues(ev1, 10); } +// ret = mb1->GetBytes(bytes, size); ret = mb1->Serialize(bytes, size); printf("assembling IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); @@ -48,14 +51,28 @@ int main(int argc, char* argv[]) printf("\n"); mb2 = new IntelMetadataBuffer(); +// ret = mb2->SetBytes(bytes, size); ret = mb2->UnSerialize(bytes, size); printf("parsing IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret ); ret = mb2->GetType(t2); ret = mb2->GetValue(v2); ret = mb2->GetValueInfo(vi2); + ret = mb1->SetExtraValues(ev1, 10); ret = mb2->GetExtraValues(ev2, count); + IntelMetadataBuffer mb3;; +// mb3 = new IntelMetadataBuffer(); + ret = mb3.SetType(t1); + ret = mb3.SetValue(v1); + ret = mb3.SetExtraValues(ev1, 10); + ret = mb3.SetValueInfo(&vi1); + ret = mb3.UnSerialize(bytes, size); + + IntelMetadataBuffer *mb4 = new IntelMetadataBuffer(mb3); + IntelMetadataBuffer *mb5; + mb5 = mb4; + printf("t2=%d, v2=%d, vi2=%x, ev2=%x\n", t2, v2, vi2, ev2); if (v1 == v2 && t1 == t2 ) { if (vi2) { diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 5ec64b5..464b759 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -22,11 +22,23 @@ return -1; \ } +#define CHECK_ENCODE_STATUS_RETURN(FUNC)\ + if (ret != ENCODE_SUCCESS) { \ + printf(FUNC"Failed. ret = 0x%08x\n", ret); \ + return -1; \ + } + static const char *AVC_MIME_TYPE = "video/h264"; static const char *MPEG4_MIME_TYPE = "video/mpeg4"; static const char *H263_MIME_TYPE = "video/h263"; +//add for video encode libmix code coverage test--start +//add two mine type define only for code coverage test +static const char *MPEG4_MIME_TYPE_SP= "video/mp4v-es"; +static const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg"; +//add for video encode libmix code coverage test--end static const int box_width = 128; +static VideoParamsAVC gVideoParamsAVC; static IVideoEncoder *gVideoEncoder = NULL; static VideoParamsCommon gEncoderParams; static VideoParamsStoreMetaDataInBuffers gStoreMetaDataInBuffers; @@ -51,9 +63,14 @@ static uint32_t gEncodeHeight = 0; static char* gFile = (char*)"out.264"; static uint32_t gMode = 0; //0:Camera malloc , 1: WiDi clone, 2: WiDi ext, 3: WiDi user, 4: Raw, 5: SurfaceMediaSource -static const char* gModeString[7] = {"Camera malloc", "WiDi clone", "WiDi ext", "WiDi user", "Raw", "GrallocSource(Composer)", "GrallocSource(Gralloc)"}; +static const char* gModeString[9] = {"Camera malloc", "WiDi clone", "WiDi ext", "WiDi user", "Raw", "GrallocSource(Composer)", "GrallocSource(Gralloc)","MappingSurfaceForCI","Camera malloc For Extra Value"}; static const char* gRCModeString[4] ={"NO_RC", "CBR", "VBR", "VCM"}; + +static uint32_t gOutPutFormat = 0; +static const char* gOutPutFormatString[6] = {"OUTPUT_EVERYTHING","OUTPUT_CODEC_DATA","OUTPUT_FRAME_DATA","OUTPUT_ONE_NAL","OUTPUT_ONE_NAL_WITHOUT_STARTCODE","OUTPUT_LENGTH_PREFIXED"}; + +static uint32_t gOutPutBufferSize = 1; //for uploading src pictures, also for Camera malloc, WiDi clone, raw mode usrptr storage static uint8_t* gUsrptr[gSrcFrames]; static uint8_t* gMallocPtr[gSrcFrames]; @@ -72,6 +89,16 @@ static uint32_t gkBufHandle[gSrcFrames]; static sp gGraphicBufferAlloc; static sp gGraphicBuffer[gSrcFrames]; +static int ev1[10]; + +struct VideoConfigTypeIDRReq: VideoParamConfigSet { + + VideoConfigTypeIDRReq() { + type = VideoConfigTypeIDRRequest; + size = sizeof(VideoConfigTypeIDRReq); + } +}; + extern "C" { VAStatus vaLockSurface(VADisplay dpy, VASurfaceID surface, @@ -95,6 +122,8 @@ static hw_module_t const *gModule; static gralloc_module_t const *gAllocMod; /* get by force hw_module_t */ static alloc_device_t *gAllocDev; /* get by gralloc_open */ + +static int gCodeCoverageTestErrorCase = 0; static void gfx_init() { int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &gModule); @@ -217,6 +246,111 @@ Encode_Status SetVideoEncoderParam() { ret = gVideoEncoder->setParameters(&gEncoderParams); CHECK_ENCODE_STATUS("setParameters VideoParamsCommon"); + gVideoEncoder->getParameters(&gVideoParamsAVC); + gVideoParamsAVC.crop.TopOffset = 1; + gVideoParamsAVC.VUIFlag = 1; + gVideoParamsAVC.SAR.SarWidth = 1; + gVideoEncoder->setParameters(&gVideoParamsAVC); + + VideoParamsStoreMetaDataInBuffers tmpStoreMetaDataInBuffers; + gVideoEncoder->getParameters(&tmpStoreMetaDataInBuffers); + memset(&tmpStoreMetaDataInBuffers,0x00,sizeof(VideoParamsStoreMetaDataInBuffers)); + gVideoEncoder->getParameters(&tmpStoreMetaDataInBuffers); + gVideoEncoder->setParameters(&tmpStoreMetaDataInBuffers); + + VideoParamsUpstreamBuffer tmpVideoParamsUpstreamBuffer; + tmpVideoParamsUpstreamBuffer.bufCnt = 0; + gVideoEncoder->setParameters(&tmpVideoParamsUpstreamBuffer); + + tmpVideoParamsUpstreamBuffer.bufCnt = gSrcFrames; + tmpVideoParamsUpstreamBuffer.bufAttrib = NULL; + gVideoEncoder->setParameters(&tmpVideoParamsUpstreamBuffer); +/* + ExternalBufferAttrib attrib; + tmpVideoParamsUpstreamBuffer.bufCnt = gSrcFrames; + tmpVideoParamsUpstreamBuffer.bufAttrib = &attrib; + tmpVideoParamsUpstreamBuffer.bufferMode = BUFFER_LAST; + gVideoEncoder->setParameters(&tmpVideoParamsUpstreamBuffer); +*/ + VideoParamsUsrptrBuffer tmpVideoParamsUsrptrBuffer; + tmpVideoParamsUsrptrBuffer.width = 0; + gVideoEncoder->getParameters(&tmpVideoParamsUsrptrBuffer); + + //---------------------add for libmix encode code coverage test + // VideoEncodeBase.cpp file setConfig && getConfig code coverage test + // only for VCM mode + if(gRC == RATE_CONTROL_VCM) + { + // for setConfig && getConfig default case + VideoConfigAIR configAIR1; + memset(&configAIR1,0x00,sizeof(VideoConfigAIR)); + gVideoEncoder->setConfig(&configAIR1); + gVideoEncoder->getConfig(&configAIR1); + + // VideoConfigTypeAIR setConfig and getConfig + VideoConfigAIR configAIR; + configAIR.airParams.airAuto = 0; + configAIR.airParams.airMBs = 0; + configAIR.airParams.airThreshold = 0; + gVideoEncoder->setConfig(&configAIR); + gVideoEncoder->getConfig(&configAIR); + + // VideoConfigTypeBitRate setConfig and getConfig + VideoConfigBitRate configBitRate; + configBitRate.rcParams.bitRate = gBitrate; + configBitRate.rcParams.initQP = 15; + configBitRate.rcParams.minQP = 1; + configBitRate.rcParams.windowSize = 50; + configBitRate.rcParams.targetPercentage = 95; + gVideoEncoder->setConfig(&configBitRate); + gVideoEncoder->getConfig(&configBitRate); + + // for VideoConfigTypeSliceNum derivedSetConfig && derivedGetConfig + VideoConfigSliceNum configSliceNum; + gVideoEncoder->getConfig(&configSliceNum); + gVideoEncoder->setConfig(&configSliceNum); + + VideoConfigIntraRefreshType configIntraRefreshType; + configIntraRefreshType.refreshType = VIDEO_ENC_AIR;//VIDEO_ENC_AIR + gVideoEncoder->setConfig(&configIntraRefreshType); + gVideoEncoder->getConfig(&configIntraRefreshType); + + // VideoConfigTypeFrameRate setConfig and getConfig + VideoConfigFrameRate configFrameRate; + configFrameRate.frameRate.frameRateDenom = 1; + configFrameRate.frameRate.frameRateNum = gFrameRate; + gVideoEncoder->setConfig(&configFrameRate); + gVideoEncoder->getConfig(&configFrameRate); + + // VideoEncodeAVC.cpp file derivedSetConfig && derivedGetConfig code coverage test + // for VideoConfigTypeNALSize derivedSetConfig && derivedGetConfig + VideoConfigNALSize configNalSize; + configNalSize.maxSliceSize = 8*gWidth*gHeight*1.5; + gVideoEncoder->setConfig(&configNalSize); + gVideoEncoder->getConfig(&configNalSize); + + VideoParamsHRD paramsHRD; + paramsHRD.bufferSize = (uint32_t)(gBitrate/gFrameRate) * 1024 * 8; + paramsHRD.initBufferFullness = (uint32_t)(gBitrate/gFrameRate); + gVideoEncoder->setParameters(¶msHRD); + gVideoEncoder->getParameters(¶msHRD); + } + else + { + // VideoConfigTypeCyclicFrameInterval setConfig and getConfig + VideoConfigCyclicFrameInterval configCyclicFrameInterval; + configCyclicFrameInterval.cyclicFrameInterval = 30; + gVideoEncoder->setConfig(&configCyclicFrameInterval); + gVideoEncoder->getConfig(&configCyclicFrameInterval); + + // for VideoConfigTypeAVCIntraPeriod derivedSetConfig && derivedGetConfig + VideoConfigAVCIntraPeriod configAVCIntraPeriod; + gVideoEncoder->getConfig(&configAVCIntraPeriod); + gVideoEncoder->setConfig(&configAVCIntraPeriod); + VideoConfigTypeIDRReq tmpVideoConfigTypeIDRReq; + gVideoEncoder->setConfig(&tmpVideoConfigTypeIDRReq); + } + if (gMode != 4) { gStoreMetaDataInBuffers.isEnabled = true; @@ -279,6 +413,34 @@ static int YUV_generator_planar(int width, int height, return 0; } +//malloc external memory, and not need to set into encoder before start() +void MallocExternalMemoryWithExtraValues() +{ + uint32_t size = gWidth * gHeight * 3 /2; + + ValueInfo* vinfo = new ValueInfo; + vinfo->mode = MEM_MODE_MALLOC; + vinfo->handle = 0; + vinfo->size = size; + vinfo->width = gWidth; + vinfo->height = gHeight; + vinfo->lumaStride = gStride; + vinfo->chromStride = gStride; + vinfo->format = STRING_TO_FOURCC("NV12"); + vinfo->s3dformat = 0xFFFFFFFF; + + for(int i = 0; i < gSrcFrames; i ++) + { + gUsrptr[i] = (uint8_t*)malloc(size); + + gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + + gIMB[i]->SetValueInfo(vinfo); + } + delete vinfo; + + gIMB[0]->SetExtraValues(ev1, 10); +} //malloc external memory, and not need to set into encoder before start() void MallocExternalMemory() { @@ -294,8 +456,8 @@ void MallocExternalMemory() vinfo->chromStride = gStride; vinfo->format = STRING_TO_FOURCC("NV12"); vinfo->s3dformat = 0xFFFFFFFF; - - for(int i = 0; i < gSrcFrames; i ++) + + for(int i = 0; i < gSrcFrames; i ++) { gMallocPtr[i] = (uint8_t*)malloc(size + 4095); gUsrptr[i] = (uint8_t*)((((int )gMallocPtr[i] + 4095) / 4096 ) * 4096); @@ -459,6 +621,31 @@ void CreateUserSurfaces(int mode) } } +void CreateSurfaceMappingForCI() +{ + uint32_t size = gWidth * gHeight * 3 /2; + + ValueInfo* vinfo = new ValueInfo; + vinfo->mode = MEM_MODE_CI; + vinfo->handle = 0; + vinfo->size = size; + vinfo->width = gWidth; + vinfo->height = gHeight; + vinfo->lumaStride = gStride; + vinfo->chromStride = gStride; + vinfo->format = STRING_TO_FOURCC("NV12"); + vinfo->s3dformat = 0xFFFFFFFF; + + for(int i = 0; i < gSrcFrames; i ++) + { + gUsrptr[i] = (uint8_t*)malloc(size); + + gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + + gIMB[i]->SetValueInfo(vinfo); + } + delete vinfo; +} void CreateGfxhandle() { sp composer(ComposerService::getComposerService()); @@ -530,7 +717,7 @@ int CheckArgs(int argc, char* argv[]) { char c; - while ((c =getopt(argc, argv,"b:c:r:w:h:m:f:n:s:?") ) != EOF) { + while ((c =getopt(argc, argv,"b:c:r:w:h:m:f:n:s:o:e:z:?") ) != EOF) { switch (c) { case 'w': gWidth = atoi(optarg); @@ -566,6 +753,15 @@ int CheckArgs(int argc, char* argv[]) case 'g': gEncodeHeight = atoi(optarg); break; + case 'o': + gOutPutFormat = atoi(optarg); + break; + case 'e': + gCodeCoverageTestErrorCase = atoi(optarg); + break; + case 'z': + gOutPutBufferSize = atoi(optarg); + break; case '?': default: printf("\n./mix_encode -c -b -r -w -h -k -g -n -m -s -f \n"); @@ -597,6 +793,7 @@ int main(int argc, char* argv[]) { Encode_Status ret; const char *codec; + VideoOutputFormat goutputformat = OUTPUT_EVERYTHING; CheckArgs(argc, argv); @@ -615,9 +812,20 @@ int main(int argc, char* argv[]) case 2: codec = H263_MIME_TYPE; break; +//add for video encode libmix code coverage test--start + case 3: + codec = MPEG4_MIME_TYPE_SP; + break; + case 4: + codec = MEDIA_MIMETYPE_IMAGE_JPEG; + break; + case 5: + codec = NULL; + break; default: printf("Not support this type codec\n"); return 1; +//add for video encode libmix code coverage test--end } switch(gRCMode) @@ -638,15 +846,60 @@ int main(int argc, char* argv[]) printf("Not support this rate control mode\n"); return 1; } - - printf("\nStart %s Encoding ....\n", codec); - printf("Mode is %s, RC mode is %s, Src Width=%d, Height=%d, Encode Width=%d, Height=%d \n", gModeString[gMode], gRCModeString[gRCMode], gWidth, gHeight, gEncodeWidth, gEncodeHeight); - printf("Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, out file is %s\n\n", gBitrate, gEncFrames, gSyncEncMode, gFile); + + switch(gOutPutFormat) + { + case 0: + goutputformat = OUTPUT_EVERYTHING; + break; + case 1: + goutputformat = OUTPUT_CODEC_DATA; + break; + case 2: + goutputformat = OUTPUT_FRAME_DATA; + break; + case 3: + goutputformat = OUTPUT_ONE_NAL; + break; + case 4: + goutputformat = OUTPUT_ONE_NAL_WITHOUT_STARTCODE; + break; + case 5: + goutputformat = OUTPUT_LENGTH_PREFIXED; + break; + case 6: + goutputformat = OUTPUT_BUFFER_LAST; + break; + default: + printf("Not support this Out Put Format\n"); + return 1; + } + +//add for video encode libmix code coverage test--start + if(codec != NULL) + printf("\nStart %s Encoding ....\n", codec); + else + printf("\nStart codec is null only for code coverage test ....\n"); +//add for video encode libmix code coverage test--end + printf("Mode is %s, RC mode is %s, Width=%d, Height=%d, Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, file is %s, outputnalformat is %s\n\n", gModeString[gMode], gRCModeString[gRCMode], gWidth, gHeight, gBitrate, gEncFrames, gSyncEncMode, gFile,gOutPutFormatString[gOutPutFormat]); + +//sleep(10); for(int i=0; i<1; i++) { gVideoEncoder = createVideoEncoder(codec); + if(gVideoEncoder == NULL) + { + printf("Finishing code coverage test ....\n"); + return 1; + } + + // Adding for code coverage test + // VideoEncoderBase.cpp uncalled function + // VideoEncoderBase::flush() + gVideoEncoder->flush(); + //set parameter SetVideoEncoderParam(); @@ -675,6 +928,12 @@ for(int i=0; i<1; i++) case 6: //Gralloc CreateGralloc(); break; + case 7: //SurfaceMappingForCI + CreateSurfaceMappingForCI(); + break; + case 8: //Camera malloc with extra values + MallocExternalMemoryWithExtraValues(); + break; default: break; } @@ -703,12 +962,17 @@ for(int i=0; i<1; i++) //output buffers VideoEncOutputBuffer OutBuf; uint32_t maxsize; + //for error hanlding + gVideoEncoder->getMaxOutSize(NULL); gVideoEncoder->getMaxOutSize(&maxsize); uint8_t out[maxsize]; - OutBuf.bufferSize = maxsize; + if(gOutPutBufferSize == 0) + OutBuf.bufferSize = 0; + else + OutBuf.bufferSize = maxsize; OutBuf.dataSize = 0; OutBuf.data = out; - OutBuf.format = OUTPUT_EVERYTHING; + OutBuf.format = goutputformat; printf("\n"); for(unsigned int i=0; igetOutput(&OutBuf); CHECK_ENCODE_STATUS("getOutput"); + CHECK_ENCODE_STATUS_RETURN("getOutput"); // printf("OutBuf.dataSize = %d .........\n", OutBuf.dataSize); fwrite(OutBuf.data, 1, OutBuf.dataSize, file); @@ -738,7 +1003,7 @@ for(int i=0; i<1; i++) fflush(stdout); } fclose(file); - + VideoStatistics stat; if (gVideoEncoder->getStatistics(&stat) == ENCODE_SUCCESS) { -- cgit v1.2.3 From bd944362a058954f5483b01bdd0aff6c26dabeb7 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 15 Oct 2012 17:39:14 +0800 Subject: Clean up the obsolete codes in libmix BZ: 62108 Clean up the obsolete codes in libmix for code coverage improvement and easier maintenance. Use the existing MARRO VBP (#ifndef VBP) to comment out the unused/uncalled functions or codes. Change-Id: Ibba20c00e80e85ca0f3c5443bf67a0088d7d7947 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/69984 Reviewed-by: Chen, Tianmi Reviewed-by: Wang, Yi A Reviewed-by: Qiu, Junhai Reviewed-by: Jiang, Fei Reviewed-by: Guo, Nana N Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/Merge_readme.txt | 2 - .../fw/codecs/h264/parser/viddec_h264_parse.c | 11 +- .../fw/codecs/mp4/parser/viddec_fw_mp4_workload.c | 3 +- .../fw/codecs/mp4/parser/viddec_mp4_parse.c | 6 + .../fw/codecs/mp4/parser/viddec_parse_sc_mp4.c | 3 +- mix_vbp/viddec_fw/fw/parser/Android.mk | 5 - mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c | 224 -------- .../fw/parser/include/viddec_h264_parse.h | 4 +- .../fw/parser/include/viddec_pm_utils_list.h | 3 +- mix_vbp/viddec_fw/fw/parser/main.c | 608 --------------------- mix_vbp/viddec_fw/fw/parser/utils.c | 253 --------- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 6 +- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 12 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c | 4 + .../viddec_fw/fw/parser/viddec_pm_utils_bstream.c | 7 +- mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c | 2 + mix_video/docs/reference/MixVideo/html/home.png | Bin 654 -> 0 bytes mix_video/docs/reference/MixVideo/html/left.png | Bin 459 -> 0 bytes mix_video/docs/reference/MixVideo/html/right.png | Bin 472 -> 0 bytes mix_video/docs/reference/MixVideo/html/up.png | Bin 406 -> 0 bytes 20 files changed, 44 insertions(+), 1109 deletions(-) delete mode 100644 mix_vbp/Merge_readme.txt mode change 100755 => 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c delete mode 100644 mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c delete mode 100644 mix_vbp/viddec_fw/fw/parser/main.c delete mode 100644 mix_vbp/viddec_fw/fw/parser/utils.c delete mode 100644 mix_video/docs/reference/MixVideo/html/home.png delete mode 100644 mix_video/docs/reference/MixVideo/html/left.png delete mode 100644 mix_video/docs/reference/MixVideo/html/right.png delete mode 100644 mix_video/docs/reference/MixVideo/html/up.png diff --git a/mix_vbp/Merge_readme.txt b/mix_vbp/Merge_readme.txt deleted file mode 100644 index 90936bb..0000000 --- a/mix_vbp/Merge_readme.txt +++ /dev/null @@ -1,2 +0,0 @@ -DHG revision #218237 - diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c old mode 100755 new mode 100644 index 4cc58d3..2694f7d --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -490,7 +490,7 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ - +#ifndef VBP static uint32_t viddec_h264_is_frame_start(void *ctxt) { struct h264_viddec_parser* parser = ctxt; @@ -504,13 +504,11 @@ static uint32_t viddec_h264_is_frame_start(void *ctxt) return ret; } +#endif -#ifdef VBP +#ifndef VBP uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) -#else -static uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) -#endif { struct h264_viddec_parser* parser = ctxt; uint32_t ret = VIDDEC_PARSE_SUCESS; @@ -548,6 +546,7 @@ static uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int nex return ret; } +#endif #ifdef VBP void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) @@ -563,6 +562,7 @@ static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; } +#ifndef VBP void viddec_h264_get_ops(viddec_parser_ops_t *ops) { ops->init = viddec_h264_init; @@ -573,4 +573,5 @@ void viddec_h264_get_ops(viddec_parser_ops_t *ops) ops->is_frame_start = viddec_h264_is_frame_start; return; } +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c index 7c7eaa8..c9ec2fb 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c @@ -1,3 +1,4 @@ +#ifndef VBP #include #include "viddec_fw_workload.h" @@ -373,4 +374,4 @@ uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt) return result; } // viddec_fw_mp4_emit_workload - +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c index 72ad8b7..ef9b3f5 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c @@ -9,7 +9,9 @@ #include "viddec_mp4_videoobjectplane.h" #include "viddec_mp4_visualobject.h" +#ifndef VBP extern uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state); +#endif void viddec_mp4_get_context_size(viddec_parser_memory_sizes_t *size) { @@ -19,6 +21,7 @@ void viddec_mp4_get_context_size(viddec_parser_memory_sizes_t *size) return; } // viddec_mp4_get_context_size +#ifndef VBP uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors) { viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; @@ -105,6 +108,7 @@ uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32 return result; } // viddec_mp4_wkld_done +#endif void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) { @@ -282,6 +286,7 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) return VIDDEC_PARSE_SUCESS; } // viddec_mp4_parse +#ifndef VBP uint32_t viddec_mp4_is_frame_start(void *ctxt) { viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *)ctxt; @@ -298,3 +303,4 @@ void viddec_mp4_get_ops(viddec_parser_ops_t *ops) ops->init = viddec_mp4_init; return; } // viddec_mp4_get_ops +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c index ba296e7..70f7454 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c @@ -17,7 +17,7 @@ get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect of SVH start code. */ - +#ifndef VBP uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) { uint8_t *ptr; @@ -149,3 +149,4 @@ uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) /* Return SC found only if phase is 4, else always success */ return ret; } +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index aa5330e..882b081 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -10,13 +10,8 @@ LOCAL_SRC_FILES := \ vbp_loader.c \ vbp_mp42_parser.c \ vbp_utils.c \ - viddec_emit.c \ viddec_parse_sc.c \ - viddec_parse_sc_stub.c \ - viddec_pm.c \ viddec_pm_parser_ops.c \ - viddec_pm_stubs.c \ - viddec_pm_tags.c \ viddec_pm_utils_bstream.c \ viddec_pm_utils_list.c diff --git a/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c deleted file mode 100644 index c815406..0000000 --- a/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c +++ /dev/null @@ -1,224 +0,0 @@ -/* - - This file is provided under a dual BSD/GPLv2 license. When using or - redistributing this file, you may do so under either license. - - GPL LICENSE SUMMARY - - Copyright(c) 2005-2008 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify - it under the terms of version 2 of the GNU General Public License as - published by the Free Software Foundation. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution - in the file called LICENSE.GPL. - - Contact Information: - Intel Corporation - 2200 Mission College Blvd. - Santa Clara, CA 97052 - - BSD LICENSE - - Copyright(c) 2005-2008 Intel Corporation. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -*/ - -#ifndef SVEN_FW_H -#include "sven_fw.h" -#endif - -#define _OSAL_IO_MEMMAP_H /* to prevent errors when including sven_devh.h */ -#define _OSAL_ASSERT_H /* to prevent errors when including sven_devh.h */ -#include "sven_devh.h" - -#include "fw_pvt.h" - -static os_devhandle_t g_svenh; - -#define FW_SVEN_DEVH_DISABLE_SVEN_REGISTER_IO -//#define SVEN_DEVH_DISABLE_SVEN - -extern int sven_fw_is_tx_enabled( - struct SVENHandle *svenh ); - -#ifndef SVEN_DEVH_DISABLE_SVEN -static void sven_write_event( - struct SVENHandle *svenh, - struct SVENEvent *ev ) -{ - if ( NULL == svenh ) - svenh = &g_svenh.devh_svenh; - - if ( NULL != svenh->phot ) - sven_fw_write_event(svenh,ev); -} - -static void sven_fw_initialize_event_top( - struct SVENEvent *ev, - int module, - int unit, - int event_type, - int event_subtype ) -{ - ev->se_et.et_gencount = 0; - ev->se_et.et_module = module; - ev->se_et.et_unit = unit; - ev->se_et.et_type = event_type; - ev->se_et.et_subtype = event_subtype; -} -#endif - -uint32_t sven_get_timestamp() -{ - uint32_t value = 0; - - if ( NULL != g_svenh.devh_svenh.ptime ) - { - value = sven_fw_read_external_register( &g_svenh.devh_svenh, g_svenh.devh_svenh.ptime ); - } - - return(value); -} - -/* ---------------------------------------------------------------------- */ -/* ---------------------------------------------------------------------- */ - -void devh_SVEN_SetModuleUnit( - os_devhandle_t *devh, - int sven_module, - int sven_unit ) -{ -#ifndef SVEN_DEVH_DISABLE_SVEN - if ( NULL == devh ) - devh = &g_svenh; - devh->devh_sven_module = sven_module; - devh->devh_sven_unit = sven_unit; -#endif -} - -os_devhandle_t *devhandle_factory( const char *desc ) -{ - /* pointer to global vsparc local registers */ - g_svenh.devh_regs_ptr = (void *) 0x10000000; /* firmware address to Local (GV) registers */ - - return( &g_svenh ); -} - -int devhandle_connect_name( - os_devhandle_t *devh, - const char *devname ) -{ - return(1); -} - -/* ---------------------------------------------------------------------- */ -/* ---------------------------------------------------------------------- */ - -void devh_SVEN_WriteModuleEvent( - os_devhandle_t *devh, - int module_event_subtype, - unsigned int payload0, - unsigned int payload1, - unsigned int payload2, - unsigned int payload3, - unsigned int payload4, - unsigned int payload5 ) -{ -#ifndef SVEN_DEVH_DISABLE_SVEN - struct SVENEvent ev __attribute__ ((aligned(8))); - - devh = (NULL != devh) ? devh : &g_svenh; - - if ( ! sven_fw_is_tx_enabled( &devh->devh_svenh ) ) - return; - - sven_fw_initialize_event_top( &ev, - devh->devh_sven_module, - 1 /* devh->devh_sven_unit */, - SVEN_event_type_module_specific, - module_event_subtype ); - - ev.u.se_uint[0] = payload0; - ev.u.se_uint[1] = payload1; - ev.u.se_uint[2] = payload2; - ev.u.se_uint[3] = payload3; - ev.u.se_uint[4] = payload4; - ev.u.se_uint[5] = payload5; - - sven_write_event( &devh->devh_svenh, &ev ); -#endif -} - -/* ---------------------------------------------------------------------- */ -/* SVEN FW TX: Required custom routines to enable FW TX */ -/* ---------------------------------------------------------------------- */ -int sven_fw_set_globals( - struct SVEN_FW_Globals *fw_globals ) -{ - sven_fw_attach( &g_svenh.devh_svenh, fw_globals ); - devh_SVEN_SetModuleUnit( &g_svenh, SVEN_module_GEN4_GV, 1 ); - return(0); -} - -uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); - -unsigned int sven_fw_read_external_register( - struct SVENHandle *svenh, - volatile unsigned int *preg ) -{ - unsigned int reg __attribute__ ((aligned(8))); - - (void)svenh; // argument unused - - cp_using_dma_phys( (uint32_t) preg, (uint32_t) ®, 4, 0, 0 ); - - return( reg ); -} - -void sven_fw_copy_event_to_host_mem( - struct SVENHandle *svenh, - volatile struct SVENEvent *to, - const struct SVENEvent *from ) -{ - (void)svenh; // argument unused - - cp_using_dma_phys( (uint32_t) to, (uint32_t) from, sizeof(*to), 1, 0 ); -} -/* ---------------------------------------------------------------------- */ -/* ---------------------------------------------------------------------- */ diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h index 4712be7..130ce39 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h @@ -1,6 +1,6 @@ #ifndef VIDDEC_H264_PARSE_H #define VIDDEC_H264_PARSE_H - +#ifndef VBP void viddec_h264_get_ops(viddec_parser_ops_t *ops); - +#endif #endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h index 7f406fd..1ead7ec 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h @@ -34,7 +34,7 @@ typedef struct /* This function initialises the list to default values */ void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt); - +#ifndef VBP /* This function adds a new entry to list and will emit tags if needed */ uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf); @@ -49,3 +49,4 @@ void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint /* this function returns 1 if the requested byte is not found. If found returns list and offset into list */ uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset); #endif +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c deleted file mode 100644 index ad921b4..0000000 --- a/mix_vbp/viddec_fw/fw/parser/main.c +++ /dev/null @@ -1,608 +0,0 @@ -#include "fw_pvt.h" -#include "viddec_fw_parser_ipclib_config.h" -#include "viddec_fw_common_defs.h" -#include "viddec_fw_parser.h" -#include "viddec_fw_debug.h" - -/* This define makes sure that the structure is stored in Local memory. - This is shared memory between host and FW.*/ -volatile dmem_t _dmem __attribute__ ((section (".exchange"))); -/* Debug index should be disbaled for Production FW */ -uint32_t dump_ptr=0; -uint32_t timer=0; - -/* Auto Api definitions */ -ismd_api_group viddec_fw_api_array[2]; - -extern void viddec_fw_parser_register_callbacks(void); - -/*------------------------------------------------------------------------------ - * Function: initialize firmware SVEN TX Output - *------------------------------------------------------------------------------ - */ -int SMDEXPORT viddec_fw_parser_sven_init(struct SVEN_FW_Globals *sven_fw_globals ) -{ - extern int sven_fw_set_globals(struct SVEN_FW_Globals *fw_globals ); - return(sven_fw_set_globals(sven_fw_globals)); -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_check_watermark_boundary - * This function figures out if we crossesd watermark boundary on input data. - * before represents the ES Queue data when we started and current represents ES Queue data - * when we are ready to swap.Threshold is the amount of data specified by the driver to trigger an - * interrupt. - * We return true if threshold is between before and current. - *------------------------------------------------------------------------------ - */ -static inline uint32_t viddec_fw_check_watermark_boundary(uint32_t before, uint32_t current, uint32_t threshold) -{ - return ((before >= threshold) && (current < threshold)); -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_get_total_input_Q_data - * This function figures out how much data is available in input queue of the FW - *------------------------------------------------------------------------------ - */ -static uint32_t viddec_fw_get_total_input_Q_data(uint32_t indx) -{ - FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); - uint32_t ret; - int32_t pos=0; - FW_IPC_ReceiveQue *rcv_q; - - rcv_q = &fwipc->rcv_q[indx]; - /* count the cubby buffer which we already read if present */ - ret = (_dmem.stream_info[indx].buffered_data) ? CONFIG_IPC_MESSAGE_MAX_SIZE:0; - ret += ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos); - return ret; -} - -/*------------------------------------------------------------------------------ - * Function: mfd_round_robin - * Params: - * [in] pri: Priority of the stream - * [in] indx: stream id number of the last stream that was scheduled. - * [out] qnum: Stream id of priority(pri) which has data. - * This function is responsible for figuring out which stream needs to be scheduled next. - * It starts after the last scheduled stream and walks through all streams until it finds - * a stream which is of required priority, in start state, has space on output and data in - * input. - * If no such stream is found qnum is not updated and return value is 0. - * If a stream is found then qnum is updated with that id and function returns 1. - *------------------------------------------------------------------------------ - */ - -uint32_t mfd_round_robin(uint32_t pri, int32_t *qnum, int32_t indx) -{ - FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); - int32_t i = CONFIG_IPC_FW_MAX_RX_QUEUES; - uint32_t ret = 0; - /* Go through all queues until we find a valid queue of reqd priority */ - while (i>0) - { - indx++; - if (indx >= CONFIG_IPC_FW_MAX_RX_QUEUES) indx = 0; - - /* We should look only at queues which match priority and - in running state */ - if ( (_dmem.stream_info[indx].state == 1) - && (_dmem.stream_info[indx].priority == pri)) - { - uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos; - FW_IPC_ReceiveQue *rcv_q; - rcv_q = &fwipc->rcv_q[indx]; - inpt_avail = (_dmem.stream_info[indx].buffered_data > 0) || (ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos) > 0); - /* we have to check for two workloads to protect against error cases where we might have to push both current and next workloads */ - output_avail = FwIPC_SpaceAvailForMessage(fwipc, &fwipc->snd_q[indx], CONFIG_IPC_MESSAGE_MAX_SIZE, &pos) >= 2; - pos = 0; - /* Need at least current and next to proceed */ - wklds_avail = (ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos) >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1)); - if (inpt_avail && output_avail && wklds_avail) - {/* Success condition: we have some data on input and enough space on output queue */ - *qnum = indx; - ret =1; - break; - } - } - i--; - } - return ret; -} -static inline void mfd_setup_emitter(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, mfd_pk_strm_cxt *cxt) -{ - int32_t ret1=0,ret=0; - /* We don't check return values for the peek as round robin guarantee's that we have required free workloads */ - ret = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld1), sizeof(ipc_msg_data), 0); - ret1 = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld2), sizeof(ipc_msg_data), 1); - viddec_emit_update(&(cxt->pm.emitter), cxt->wkld1.phys, cxt->wkld2.phys, cxt->wkld1.len, cxt->wkld2.len); -} - -static inline void mfd_init_swap_memory(viddec_pm_cxt_t *pm, uint32_t codec_type, uint32_t start_addr, uint32_t clean) -{ - uint32_t *persist_mem; - persist_mem = (uint32_t *)(start_addr | GV_DDR_MEM_MASK); - viddec_pm_init_context(pm,codec_type, persist_mem, clean); - pm->sc_prefix_info.first_sc_detect = 1; - viddec_emit_init(&(pm->emitter)); -} - -void output_omar_wires( unsigned int value ) -{ -#ifdef RTL_SIMULATION - reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, value ); -#endif -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_init_swap_memory - * This function is responsible for seeting the swap memory to a good state for current stream. - * The swap parameter tells us whether we need to dma the context to local memory. - * We call init on emitter and parser manager which inturn calls init of the codec we are opening the stream for. - *------------------------------------------------------------------------------ - */ - -void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsigned int clean) -{ - mfd_pk_strm_cxt *cxt; - mfd_stream_info *cxt_swap; - cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt); - cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]); - - if (swap) - {/* Swap context into local memory */ - cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false); - } - - { - mfd_init_swap_memory(&(cxt->pm), cxt_swap->strm_type, cxt_swap->ddr_cxt+cxt_swap->cxt_size, clean); - cxt_swap->wl_time = 0; - cxt_swap->es_time = 0; - } - if (swap) - {/* Swap context into DDR */ - cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false); - } -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_push_current_frame_to_output - * This is a helper function to read a workload from input queue and push to output queue. - * This is called when are done with a frame. - *------------------------------------------------------------------------------ - */ -static inline void viddec_fw_push_current_frame_to_output(FW_IPC_Handle *fwipc, uint32_t cur) -{ - ipc_msg_data wkld_to_push; - FwIPC_ReadMessage(fwipc, &fwipc->wkld_q[cur], (char *)&(wkld_to_push), sizeof(ipc_msg_data)); - FwIPC_SendMessage(fwipc, cur, (char *)&(wkld_to_push), sizeof(ipc_msg_data)); -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_get_next_stream_to_schedule - * This is a helper function to figure out which active stream needs to be scheduled next. - * If none of the streams are active it returns -1. - *------------------------------------------------------------------------------ - */ -static inline int viddec_fw_get_next_stream_to_schedule(void) -{ - int32_t cur = -1; - - if (mfd_round_robin(viddec_stream_priority_REALTIME, &cur, _dmem.g_pk_data.high_id)) - { - /* On success store the stream id */ - _dmem.g_pk_data.high_id = cur; - } - else - { - /* Check Low priority Queues, Since we couldn't find a valid realtime stream */ - if (mfd_round_robin(viddec_stream_priority_BACKGROUND, &cur, _dmem.g_pk_data.low_id)) - { - _dmem.g_pk_data.low_id = cur; - } - } - - return cur; -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_update_pending_interrupt_flag - * This is a helper function to figure out if we need to mark an interrupt pending for this stream. - * We update status value here if we find any of the interrupt conditions are true. - * If this stream has a interrupt pending which we could not send to host, we don't overwrite past status info. - *------------------------------------------------------------------------------ - */ -static inline void viddec_fw_update_pending_interrupt_flag(int32_t cur, mfd_stream_info *cxt_swap, uint8_t pushed_a_workload, - uint32_t es_Q_data_at_start) -{ - if (_dmem.int_status[cur].mask) - { - if (!cxt_swap->pending_interrupt) - { - uint32_t es_Q_data_now; - uint8_t wmark_boundary_reached=false; - es_Q_data_now = viddec_fw_get_total_input_Q_data((uint32_t)cur); - wmark_boundary_reached = viddec_fw_check_watermark_boundary(es_Q_data_at_start, es_Q_data_now, cxt_swap->low_watermark); - _dmem.int_status[cur].status = 0; - if (pushed_a_workload) - { - _dmem.int_status[cur].status |= VIDDEC_FW_WKLD_DATA_AVAIL; - } - if (wmark_boundary_reached) - { - _dmem.int_status[cur].status |= VIDDEC_FW_INPUT_WATERMARK_REACHED; - } - cxt_swap->pending_interrupt = ( _dmem.int_status[cur].status != 0); - } - } - else - { - cxt_swap->pending_interrupt = false; - } -} - -static inline void viddec_fw_handle_error_and_inband_messages(int32_t cur, uint32_t pm_ret) -{ - FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); - - viddec_fw_push_current_frame_to_output(fwipc, cur); - switch (pm_ret) - { - case PM_EOS: - case PM_OVERFLOW: - { - viddec_fw_init_swap_memory(cur, false, true); - } - break; - case PM_DISCONTINUITY: - { - viddec_fw_init_swap_memory(cur, false, false); - } - break; - default: - break; - } -} - -void viddec_fw_debug_scheduled_stream_state(int32_t indx, int32_t start) -{ - FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); - uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos; - FW_IPC_ReceiveQue *rcv_q; - uint32_t message; - - message = (start) ? SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_START: SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_END; - rcv_q = &fwipc->rcv_q[indx]; - inpt_avail = ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos); - inpt_avail += ((_dmem.stream_info[indx].buffered_data > 0) ? CONFIG_IPC_MESSAGE_MAX_SIZE: 0); - inpt_avail = inpt_avail >> 4; - pos = 0; - output_avail = ipc_mq_read_avail(&fwipc->snd_q[indx].mq, (int32_t *)&pos); - output_avail = output_avail >> 4; - pos = 0; - wklds_avail = ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos); - wklds_avail = wklds_avail >> 4; - WRITE_SVEN(message, (int)indx, (int)inpt_avail, (int)output_avail, - (int)wklds_avail, 0, 0); -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_process_async_queues(A.K.A -> Parser Kernel) - * This function is responsible for handling the asynchronous queues. - * - * The first step is to figure out which stream to run. The current algorithm - * will go through all high priority queues for a valid stream, if not found we - * go through lower priority queues. - * - * If a valid stream is found we swap the required context from DDR to DMEM and do all necessary - * things to setup the stream. - * Once a stream is setup we call the parser manager and wait until a wrkld is created or no more input - * data left. - * Once we find a wkld we push it to host and save the current context to DDR. - *------------------------------------------------------------------------------ - */ - -static inline int32_t viddec_fw_process_async_queues() -{ - int32_t cur = -1; - - cur = viddec_fw_get_next_stream_to_schedule(); - - if (cur != -1) - { - FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem); - FW_IPC_ReceiveQue *rcv_q; - /* bits captured by OMAR */ - output_omar_wires( 0x0 ); - rcv_q = &fwipc->rcv_q[cur]; - { - mfd_pk_strm_cxt *cxt; - mfd_stream_info *cxt_swap; - cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt); - cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[cur]); - - /* Step 1: Swap rodata to local memory. Not doing this currently as all the rodata fits in local memory. */ - {/* Step 2: Swap context into local memory */ - cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false); - } - /* Step 3:setup emitter by reading input data and workloads and initialising it */ - mfd_setup_emitter(fwipc, &fwipc->wkld_q[cur], cxt); - viddec_fw_debug_scheduled_stream_state(cur, true); - /* Step 4: Call Parser Manager until workload done or No more ES buffers */ - { - ipc_msg_data *data = 0; - uint8_t stream_active = true, pushed_a_workload=false; - uint32_t pm_ret = PM_SUCCESS, es_Q_data_at_start; - uint32_t start_time, time=0; - - start_time = set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX); - timer=0; - es_Q_data_at_start = viddec_fw_get_total_input_Q_data((uint32_t)cur); - do - { - output_omar_wires( 0x1 ); - { - uint32_t es_t0,es_t1; - get_wdog(&es_t0); - pm_ret = viddec_pm_parse_es_buffer(&(cxt->pm), cxt_swap->strm_type, data); - get_wdog(&es_t1); - cxt_swap->es_time += get_total_ticks(es_t0, es_t1); - } - switch (pm_ret) - { - case PM_EOS: - case PM_WKLD_DONE: - case PM_OVERFLOW: - case PM_DISCONTINUITY: - {/* Finished a frame worth of data or encountered fatal error*/ - stream_active = false; - } - break; - case PM_NO_DATA: - { - uint32_t next_ret=0; - if ( (NULL != data) && (0 != cxt_swap->es_time) ) - { - /* print performance info for this buffer */ - WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_DONE, (int)cur, (int)cxt_swap->es_time, (int)cxt->input.phys, - (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); - cxt_swap->es_time = 0; - } - - next_ret = FwIPC_ReadMessage(fwipc, rcv_q, (char *)&(cxt->input), sizeof(ipc_msg_data)); - if (next_ret != 0) - { - data = &(cxt->input); - WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_START, (int)cur, (int)cxt_swap->wl_time, - (int)cxt->input.phys, (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags ); - } - else - {/* No data on input queue */ - cxt_swap->buffered_data = 0; - stream_active = false; - } - } - break; - default: - {/* Not done with current buffer */ - data = NULL; - } - break; - } - } while (stream_active); - get_wdog(&time); - cxt_swap->wl_time += get_total_ticks(start_time, time); - /* Step 5: If workload done push workload out */ - switch (pm_ret) - { - case PM_EOS: - case PM_WKLD_DONE: - case PM_OVERFLOW: - case PM_DISCONTINUITY: - {/* Push current workload as we are done with the frame */ - cxt_swap->buffered_data = (PM_WKLD_DONE == pm_ret) ? true: false; - viddec_pm_update_time(&(cxt->pm), cxt_swap->wl_time); - - /* xmit performance info for this workload output */ - WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_PK_WL_DONE, (int)cur, (int)cxt_swap->wl_time, (int)cxt->wkld1.phys, - (int)cxt->wkld1.len, (int)cxt->wkld1.id, (int)cxt->wkld1.flags ); - cxt_swap->wl_time = 0; - - viddec_fw_push_current_frame_to_output(fwipc, cur); - if (pm_ret != PM_WKLD_DONE) - { - viddec_fw_handle_error_and_inband_messages(cur, pm_ret); - } - pushed_a_workload = true; - } - break; - default: - break; - } - /* Update information on whether we have active interrupt for this stream */ - viddec_fw_update_pending_interrupt_flag(cur, cxt_swap, pushed_a_workload, es_Q_data_at_start); - } - viddec_fw_debug_scheduled_stream_state(cur, false); - /* Step 6: swap context into DDR */ - { - cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false); - } - } - - } - return cur; -} - - -/*------------------------------------------------------------------------------ - * Function: process_command - * This magic function figures out which function to excute based on autoapi. - *------------------------------------------------------------------------------ - */ - -static inline void process_command(uint32_t cmd_id, unsigned char *command) -{ - int32_t groupid = ((cmd_id >> 24) - 13) & 0xff; - int32_t funcid = cmd_id & 0xffffff; - /* writing func pointer to hsot doorbell */ - output_omar_wires( (int) viddec_fw_api_array[groupid].unmarshal[funcid] ); - WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_AUTOAPI_CMD,(int) cmd_id, (int) command, ((int *)command)[0], - ((int *)command)[1], ((int *)command)[2], ((int *)command)[3] ); - - viddec_fw_api_array[groupid].unmarshal[funcid](0, command); - -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_process_sync_queues(A.K.A auto api) - * Params: - * [in] msg: common sync structure where all required parameters are present for autoapi. - * - * This function is responsible for handling synchronous messages. All synchronous messages - * are handled through auto api. - * what are synchronous messages? Anything releated to teardown or opening a stream Ex: open, close, flush etc. - * - * Only once synchronous message at a time. When a synchronous message its id is usually in cp doorbell. Once - * we are done handling synchronous message through auto api we release doorbell to let the host write next - * message. - *------------------------------------------------------------------------------ - */ - -static inline int32_t viddec_fw_process_sync_queues(unsigned char *msg) -{ - int32_t ret = -1; - - if (0 == reg_read(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS)) - { - uint32_t command1=0; - command1 = reg_read(CONFIG_IPC_ROFF_RISC_RX_DOORBELL); - process_command(command1, msg); - reg_write(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS, 0x2); /* Inform Host we are done with this message */ - ret = 0; - } - return ret; -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_check_for_pending_int - * This function walks through all active streams to see if atleast one stream has a pending interrupt - * and returns true if it finds one. - *------------------------------------------------------------------------------ - */ -static inline uint32_t viddec_fw_check_for_pending_int(void) -{ - uint32_t i=0, ret=false; - /* start from 0 to max streams that fw can handle*/ - while (i < FW_SUPPORTED_STREAMS) - { - if (_dmem.stream_info[i].state == 1) - { - if ((_dmem.stream_info[i].pending_interrupt) && _dmem.int_status[i].mask) - { - ret = true; - } - else - {/* If this is not in INT state clear the status before sending it to host */ - _dmem.int_status[i].status = 0; - } - } - i++; - } - return ret; -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_clear_processed_int - * This function walks through all active streams to clear pending interrupt state.This is - * called after a INT was issued. - *------------------------------------------------------------------------------ - */ -static inline void viddec_fw_clear_processed_int(void) -{ - uint32_t i=0; - /* start from 0 to max streams that fw can handle*/ - while (i < FW_SUPPORTED_STREAMS) - { - //if(_dmem.stream_info[i].state == 1) - _dmem.stream_info[i].pending_interrupt = false; - i++; - } - return; -} - -/*------------------------------------------------------------------------------ - * Function: viddec_fw_int_host - * This function interrupts host if data is available for host or any other status - * is valid which the host configures the FW to. - * There is only one interrupt line so this is a shared Int for all streams, Host should - * look at status of all streams when it receives a Int. - * The FW will interrupt the host only if host doorbell is free, in other words the host - * should always make the doorbell free at the End of its ISR. - *------------------------------------------------------------------------------ - */ - -static inline int32_t viddec_fw_int_host() -{ - /* We Interrupt the host only if host is ready to receive an interrupt */ - if ((reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) & GV_DOORBELL_STATS) == GV_DOORBELL_STATS) - { - if (viddec_fw_check_for_pending_int()) - { - /* If a pending interrupt is found trigger INT */ - reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, VIDDEC_FW_PARSER_IPC_HOST_INT); - /* Clear all stream's pending Interrupt info since we use a global INT for all streams */ - viddec_fw_clear_processed_int(); - } - } - return 1; -} -volatile unsigned int stack_corrupted __attribute__ ((section (".stckovrflwchk"))); -/*------------------------------------------------------------------------------ - * Function: main - * This function is the main firmware function. Its a infinite loop where it polls - * for messages and processes them if they are available. Currently we ping pong between - * synchronous and asynchronous messages one at a time. If we have multiple aysnchronous - * queues we always process only one between synchronous messages. - * - * For multiple asynchronous queues we round robin through the high priorities first and pick - * the first one available. Next time when we come around for asynchronous message we start - * from the next stream onwards so this guarantees that we give equal time slices for same - * priority queues. If no high priority queues are active we go to low priority queues and repeat - * the same process. - *------------------------------------------------------------------------------ - */ - -int main(void) -{ - unsigned char *msg = (uint8_t *)&(_dmem.buf.data[0]); - - /* We wait until host reads sync message */ - reg_write(CONFIG_IPC_ROFF_HOST_RX_DOORBELL, GV_FW_IPC_HOST_SYNC); - - while ( GV_DOORBELL_STATS != reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) ) - { /*poll register until done bit is set */ - /* Host re-writes Vsparc DRAM (BSS) in this loop and will hit the DONE bit when complete */ - } - enable_intr(); - /* Initialize State for queues */ - viddec_fw_parser_register_callbacks(); - FwIPC_Initialize(GET_IPC_HANDLE(_dmem), (volatile char *)msg); - _dmem.g_pk_data.high_id = _dmem.g_pk_data.low_id = -1; - viddec_pm_init_ops(); - stack_corrupted = 0xDEADBEEF; - while (1) - { - viddec_fw_process_sync_queues(msg); - viddec_fw_process_async_queues(); - viddec_fw_int_host(); -#if 0 - if (stack_corrupted != 0xDEADBEEF) - { - WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_STACK_CORRPON, 0, 0, 0, 0, 0, 0); - while (1); - } -#endif - } - return 1; -} diff --git a/mix_vbp/viddec_fw/fw/parser/utils.c b/mix_vbp/viddec_fw/fw/parser/utils.c deleted file mode 100644 index dd65bf5..0000000 --- a/mix_vbp/viddec_fw/fw/parser/utils.c +++ /dev/null @@ -1,253 +0,0 @@ -#include "fw_pvt.h" -#include "viddec_fw_parser_ipclib_config.h" - -extern uint32_t timer; - -/*------------------------------------------------------------------------------ - * Function: memcpy - * This is a memory-copy function. - *------------------------------------------------------------------------------ - */ -/* NOTE: we are inventing memcpy since we don't want to include string libs as part of FW Due to size limitations*/ -void *memcpy(void *dest, const void *src, uint32_t n) -{ - uint8_t *ptr8_frm, *ptr8_to; - uint32_t *ptr32_frm, *ptr32_to; - uint32_t bytes_left=n,trail = 0; - uint32_t align=0; - - ptr8_frm = (uint8_t *)src; - ptr8_to = (uint8_t *)dest; - - trail = ((uint32_t)ptr8_frm) & 0x3; - if ((trail == (((uint32_t)ptr8_to) & 0x3)) && (n > 4)) - { - /* check to see what's the offset bytes to go to a word alignment */ - bytes_left -= trail; - while (align > 0) { - *ptr8_to ++ = *ptr8_frm ++; - trail--; - } - /* check to see if rest of bytes is a multiple of 4. */ - trail = bytes_left & 0x3; - bytes_left = (bytes_left >> 2) << 2; - ptr32_to = (uint32_t *)ptr8_to; - ptr32_frm = (uint32_t *)ptr8_frm; - /* copy word by word */ - while (bytes_left > 0) { - *ptr32_to ++ = *ptr32_frm ++; - bytes_left -= 4; - } - /* If there are any trailing bytes do a byte copy */ - ptr8_to = (uint8_t *)ptr32_to; - ptr8_frm = (uint8_t *)ptr32_frm; - while (trail > 0) { - *ptr8_to ++ = *ptr8_frm ++; - trail--; - } - } - else - {/* case when src and dest addr are not on same alignment. - Just do a byte copy */ - while (bytes_left > 0) { - *ptr8_to ++ = *ptr8_frm ++; - bytes_left -= 1; - } - } - return dest; -} - -/*------------------------------------------------------------------------------ - * Function: memset - * This is a function to copy specificed value into memory array. - *------------------------------------------------------------------------------ - */ -/* NOTE: we are inventing memset since we don't want to include string libs as part of FW Due to size limitations*/ -void *memset(void *s, int32_t c, uint32_t n) -{ - uint8_t *ptr8 = (uint8_t *)s; - uint32_t *ptr32, data; - uint32_t mask = 0, bytes_left = n; - - mask = c & 0xFF; - mask |= (mask << 8); - mask |= (mask << 16); - if (n >= 4) - { - uint32_t trail=0; - trail = 4 - (((uint32_t)ptr8) & 0x3); - if (trail < 4) - { - ptr32 = (uint32_t *)(((uint32_t)ptr8) & ~0x3); - data = (*ptr32 >> (8*trail)) << (8*trail); - data |= (mask >> (32 - (8*trail))); - *ptr32 = data; - bytes_left -= trail; - ptr8 += trail; - } - ptr32 = (uint32_t *)((uint32_t)ptr8); - while (bytes_left >= 4) - { - *ptr32 = mask; - ptr32++; - bytes_left -=4; - } - if (bytes_left > 0) - { - data = (*ptr32 << (8*bytes_left)) >> (8*bytes_left); - data |= (mask << (32 - (8*bytes_left))); - *ptr32=data; - } - } - - return s; -} - -/*------------------------------------------------------------------------------ - * Function: cp_using_dma - * This is a function to copy data from local memory to/from system memory. - * Params: - * [in] ddr_addr : Word aligned ddr address. - * [in] local_addr: Word aligned local address. - * [in] size : No of bytes to transfer. - * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory. - * [in] swap : Enable or disable byte swap(endian). - * [out] return : Actual number of bytes copied, which can be more than what was requested - * since we can only copy words at a time. - * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned. - *------------------------------------------------------------------------------ - */ -uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) -{ - uint32_t val=0, wrote = size; - - while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) - { - /* wait if DMA is busy with a transcation Error condition??*/ - } - - reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3) & ~GV_DDR_MEM_MASK); - reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc)); - //wrote += (ddr_addr & 0x3); - wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */ - val=(wrote & 0xffff) << 2; - reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); - val |= DMA_CTRL_STATUS_START; - /* If size > 64 use 128 byte burst speed */ - if (wrote > 64) - val |= (1<<18); - if (swap) /* Endian swap if needed */ - val |= DMA_CTRL_STATUS_SWAP; - if (to_ddr) - val = val | DMA_CTRL_STATUS_DIRCN; - reg_write(DMA_CONTROL_STATUS, val); - while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) - { - /* wait till DMA is done */ - } - reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); - - return (wrote << 2); -} - -/*------------------------------------------------------------------------------ - * Function: cp_using_dma - * This is a function to copy data from local memory to/from system memory. - * Params: - * [in] ddr_addr : Word aligned ddr address. - * [in] local_addr: Word aligned local address. - * [in] size : No of bytes to transfer. - * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory. - * [in] swap : Enable or disable byte swap(endian). - * [out] return : Actual number of bytes copied, which can be more than what was requested - * since we can only copy words at a time. - * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned. - *------------------------------------------------------------------------------ - */ -uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) -{ - uint32_t val=0, wrote = size; - - while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0) - { - /* wait if DMA is busy with a transcation Error condition??*/ - } - - reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3)); - reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc)); - //wrote += (ddr_addr & 0x3); - wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */ - val=(wrote & 0xffff) << 2; - reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); - val |= DMA_CTRL_STATUS_START; - /* If size > 64 use 128 byte burst speed */ - if (wrote > 64) - val |= (1<<18); - if (swap) /* Endian swap if needed */ - val |= DMA_CTRL_STATUS_SWAP; - if (to_ddr) - val = val | DMA_CTRL_STATUS_DIRCN; - reg_write(DMA_CONTROL_STATUS, val); - while ((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0) - { - /* wait till DMA is done */ - } - reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE); - - return (wrote << 2); -} - -void update_ctrl_reg(uint8_t enable, uint32_t mask) -{ - uint32_t read_val = 0; - read_val = reg_read(CONFIG_CP_CONTROL_REG); - if (enable) - { - read_val = read_val | mask; - } - else - { - read_val = read_val & ~mask; - } - reg_write(CONFIG_CP_CONTROL_REG, read_val); - return; - -} - -extern uint32_t sven_get_timestamp(); - -uint32_t set_wdog(uint32_t offset) -{ -#ifdef B0_TIMER_FIX - update_ctrl_reg(0, WATCH_DOG_ENABLE); - reg_write(INT_REG, INT_WDOG_ENABLE); - reg_write(WATCH_DOG_COUNTER, offset & WATCH_DOG_MASK); - update_ctrl_reg(1, WATCH_DOG_ENABLE); - return offset & WATCH_DOG_MASK; -#else - return sven_get_timestamp(); -#endif -} - -void get_wdog(uint32_t *value) -{ -#ifdef B0_TIMER_FIX - *value = reg_read(WATCH_DOG_COUNTER) & WATCH_DOG_MASK; - reg_write(INT_REG, ~INT_WDOG_ENABLE); - update_ctrl_reg(0, WATCH_DOG_ENABLE); -#else - *value = sven_get_timestamp(); -#endif -} - -uint32_t get_total_ticks(uint32_t start, uint32_t end) -{ - uint32_t value; -#ifdef B0_TIMER_FIX - value = (start-end) + (start*timer); - timer=0; -#else - value = end-start;/* convert to 1 MHz clocks */ -#endif - return value; -} diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 9ed4285..1bf8ee6 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -177,14 +177,16 @@ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) ETRACE ("Failed to set entry point." ); return VBP_LOAD; } - +#ifdef VBP + pcontext->parser_ops->is_wkld_done = NULL; +#else pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done"); if (NULL == pcontext->parser_ops->is_wkld_done) { ETRACE ("Failed to set entry point." ); return VBP_LOAD; } - +#endif /* entry point not needed */ pcontext->parser_ops->is_frame_start = NULL; return VBP_OK; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 249a9f8..dfa536b 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -104,14 +104,16 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) ETRACE ("Failed to set entry point." ); return VBP_LOAD; } - +#ifdef VBP + pcontext->parser_ops->parse_sc = NULL; +#else pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4"); if (pcontext->parser_ops->parse_sc == NULL) { ETRACE ("Failed to set entry point." ); return VBP_LOAD; } - +#endif pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse"); if (pcontext->parser_ops->parse_syntax == NULL) { @@ -125,14 +127,16 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) ETRACE ("Failed to set entry point." ); return VBP_LOAD; } - +#ifdef VBP + pcontext->parser_ops->is_wkld_done = NULL; +#else pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done"); if (pcontext->parser_ops->is_wkld_done == NULL) { ETRACE ("Failed to set entry point." ); return VBP_LOAD; } - +#endif return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c index d23c758..f96cd53 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c @@ -73,6 +73,7 @@ int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_ } +#ifndef VBP static inline int32_t viddec_pm_append_restof_pixel_data(void *parent, uint32_t cur_wkld) { int32_t ret = 1; @@ -101,6 +102,7 @@ int32_t viddec_pm_append_pixeldata_next(void *parent) { return viddec_pm_append_restof_pixel_data(parent, 0); } +#endif viddec_workload_t* viddec_pm_get_header(void *parent) { @@ -140,6 +142,7 @@ uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte) return ret; } +#ifndef VBP int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next) { int32_t ret = 1; @@ -152,6 +155,7 @@ int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, v return ret; } +#endif void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error) { diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c index 4130234..853e52a 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c @@ -51,6 +51,7 @@ uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cx return ret; } +#ifndef VBP /* This function returns true if cubby buffer has the last byte of access unit. */ @@ -67,13 +68,14 @@ uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt) } return ret; } +#endif /* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */ static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt) { cxt->st = cxt->size = cxt->bitoff=0; } - +#ifndef VBP /* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if we need to go to next es buffer */ @@ -137,6 +139,7 @@ static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstr data++; } } +#endif /* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream, @@ -259,6 +262,7 @@ void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t } } +#ifndef VBP /* This function gets physical address of the requested au offset(pos). */ @@ -344,6 +348,7 @@ void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt) } } } +#endif /* Init function called by parser manager after sc code detected. diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c index 1641c6c..dc2e47c 100644 --- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c +++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c @@ -14,6 +14,7 @@ void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt) cxt->first_scprfx_length = 0; } +#ifndef VBP /* Add a new ES buffer to list. If not succesful returns 0. */ @@ -223,3 +224,4 @@ void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint list->total_bytes = length; } } +#endif diff --git a/mix_video/docs/reference/MixVideo/html/home.png b/mix_video/docs/reference/MixVideo/html/home.png deleted file mode 100644 index 1700361..0000000 Binary files a/mix_video/docs/reference/MixVideo/html/home.png and /dev/null differ diff --git a/mix_video/docs/reference/MixVideo/html/left.png b/mix_video/docs/reference/MixVideo/html/left.png deleted file mode 100644 index 2d05b3d..0000000 Binary files a/mix_video/docs/reference/MixVideo/html/left.png and /dev/null differ diff --git a/mix_video/docs/reference/MixVideo/html/right.png b/mix_video/docs/reference/MixVideo/html/right.png deleted file mode 100644 index 92832e3..0000000 Binary files a/mix_video/docs/reference/MixVideo/html/right.png and /dev/null differ diff --git a/mix_video/docs/reference/MixVideo/html/up.png b/mix_video/docs/reference/MixVideo/html/up.png deleted file mode 100644 index 85b3e2a..0000000 Binary files a/mix_video/docs/reference/MixVideo/html/up.png and /dev/null differ -- cgit v1.2.3 From d6732f3c407bea972fe881656d0bfc6e82dd1805 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Thu, 18 Oct 2012 09:01:08 +0800 Subject: [Movie Studio] make transition complete when play the exported video BZ: 61850 root cause: the async encode mode can't get the last encoded frame by default, we need special method, that is push the last raw data to encoder twice and get the last encoded frame. Change-Id: I93540d33aa315301bdaf3f23ed908c83fa9c2d93 Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/70522 Reviewed-by: Tang, Richard Reviewed-by: Zhao, Leo Reviewed-by: Jiang, Fei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- .../stagefrightshells/IntelVideoEditorAVCEncoder.cpp | 16 +++++++++++----- .../videoedit/stagefrightshells/MediaBufferPuller.cpp | 4 +++- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp index a63dfd5..3e55e52 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp @@ -345,7 +345,7 @@ sp IntelVideoEditorAVCEncoder::getFormat() { status_t IntelVideoEditorAVCEncoder::read(MediaBuffer **out, const ReadOptions *options) { - status_t err; + status_t err = OK; Encode_Status encRet; MediaBuffer *tmpIn; int64_t timestamp = 0; @@ -364,7 +364,11 @@ status_t IntelVideoEditorAVCEncoder::read(MediaBuffer **out, const ReadOptions * } while (err == INFO_FORMAT_CHANGED); if (err == ERROR_END_OF_STREAM) { - return err; + if (mLastInputBuffer != NULL) { + tmpIn = mLastInputBuffer; + } else { + return err; + } } else if (err != OK) { LOGE("Failed to read input video frame: %d", err); @@ -387,9 +391,12 @@ status_t IntelVideoEditorAVCEncoder::read(MediaBuffer **out, const ReadOptions * return UNKNOWN_ERROR; } - if (mLastInputBuffer != NULL) { + if (mLastInputBuffer != NULL && err != ERROR_END_OF_STREAM) { + mLastInputBuffer->meta_data()->findInt64(kKeyTime, ×tamp); mLastInputBuffer->release(); mLastInputBuffer = NULL; + } else { + timestamp = vaInBuf.timeStamp; } mLastInputBuffer = tmpIn; @@ -427,7 +434,6 @@ status_t IntelVideoEditorAVCEncoder::read(MediaBuffer **out, const ReadOptions * outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame,true); } } - timestamp = vaInBuf.timeStamp; LOGV("Got it! data= %p, ts=%llu size =%d", vaOutBuf.data, timestamp, vaOutBuf.dataSize); @@ -436,7 +442,7 @@ status_t IntelVideoEditorAVCEncoder::read(MediaBuffer **out, const ReadOptions * *out = outputBuffer; LOGV("IntelVideoEditorAVCEncoder::read end"); - return OK; + return err; } status_t IntelVideoEditorAVCEncoder::getSharedBuffers() { diff --git a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp index acc8268..6d826e9 100644 --- a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp +++ b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp @@ -134,7 +134,9 @@ void MediaBufferPuller::acquireThreadFunc() { status_t result = mSource->read(&pBuffer, NULL); mLock.lock(); mSourceError = result; - if (result != OK) { + if (result == ERROR_END_OF_STREAM && pBuffer != NULL) { + mAskToStop = true; + } else if (result != OK) { break; } mBuffers.push(pBuffer); -- cgit v1.2.3 From f61c79018134b9db1c82648da8abf76b217506fa Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 18 Oct 2012 21:31:53 +0800 Subject: Accelerate the thumbnail extraction when the sync-frame duration is long BZ: 58036 When the sync frame duration is very long in the test clips, the thumbnail extration will be very slow. Because the program will have to decode from the previous sync-frame to the desired frame. If end user presses the exit button before the thumbnail extraction finishes, sometimes ANR will happen. In this patch, if the desized frame's timestamp is 3secs larger than the previous sync-frame, we will choose to use the sync-frame as the thumbnail. Change-Id: Ia6c2f31fce13bb0af35f4c72cd8600f3540d87c3 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/70550 Reviewed-by: Guo, Nana N Reviewed-by: Tang, Richard Reviewed-by: Jiang, Fei Reviewed-by: Chen, Tianmi Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- .../stagefrightshells/VideoEditorVideoDecoder.cpp | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp index 7f5edcd..4363a97 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp @@ -38,6 +38,8 @@ ********************/ #define MAX_DEC_BUFFERS 4 +#define THUMBNAIL_THRES 3000 + /******************** * SOURCE CLASS * ********************/ @@ -951,6 +953,9 @@ M4OSA_ERR VideoEditorVideoDecoder_create(M4OSA_Context *pContext, pDecShellContext->m_pReader = pReaderDataInterface; pDecShellContext->m_lastDecodedCTS = -1; pDecShellContext->m_lastRenderCts = -1; + + pDecShellContext->mThumbnail = 0; + switch( pStreamHandler->m_streamType ) { case M4DA_StreamTypeVideoH263: pDecShellContext->mDecoderType = VIDEOEDITOR_kH263VideoDec; @@ -1323,6 +1328,9 @@ M4OSA_ERR VideoEditorVideoDecoder_setOption(M4OSA_Context context, break; case M4DECODER_kOptionID_DeblockingFilter: break; + case M4DECODER_kOptionID_VideoDecodersThumbnailMode: + pDecShellContext->mThumbnail = *((M4OSA_Int32 *)pValue); + break; default: lerr = M4ERR_BAD_CONTEXT; break; @@ -1453,6 +1461,19 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, pDecShellContext->m_lastDecodedCTS + pDecShellContext->mFrameIntervalMs + tolerance; + + if (bJump && pDecShellContext->mThumbnail) { + ALOGV("pDecShellContext->mFrameIntervalMs = %lld, tolerance = %d", (int64_t)pDecShellContext->mFrameIntervalMs, tolerance); + ALOGI("mThumbnail mode: currTimeMs = %lld, targetTimeMs = %lld", targetTimeMs, (int64_t)*pTime); + if (targetTimeMs + THUMBNAIL_THRES < (int64_t)*pTime) { + lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer); + if (lerr != M4NO_ERROR) { + goto VIDEOEDITOR_VideoDecode_cleanUP; + } + break; + } + } + if (!bJump || targetTimeMs > *pTime) { lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer); if (lerr != M4NO_ERROR) { -- cgit v1.2.3 From 6619fdf0d1048302fa6c2b91f9c454f5c5116d02 Mon Sep 17 00:00:00 2001 From: jiguoliang Date: Fri, 19 Oct 2012 16:11:16 -0400 Subject: [libmix] refine the bits_per_seconde param assignment BZ: 63342 correct the misc param(bits_per_seconde ) assignment Signed-off-by: jiguoliang Change-Id: I873456e51320b947401220a62c4b5c515ccd492b Reviewed-on: http://android.intel.com:8080/70807 Reviewed-by: Ji, Guoliang Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 738eefa..a6a0d4b 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -796,6 +796,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { rcMiscParam->initial_qp = mComParams.rcParams.initQP; rcMiscParam->min_qp = mComParams.rcParams.minQP; rcMiscParam->window_size = mComParams.rcParams.windowSize; + rcMiscParam->bits_per_second = mComParams.rcParams.bitRate; //for rate control usage rcMiscParam->basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage avcSeqParams.intra_period = mComParams.intraPeriod; //avcSeqParams.vui_flag = 248; @@ -829,7 +830,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { LOG_I( "idr_interval = %d\n", avcSeqParams.intra_idr_period); LOG_I( "picture_width_in_mbs = %d\n", avcSeqParams.picture_width_in_mbs); LOG_I( "picture_height_in_mbs = %d\n", avcSeqParams.picture_height_in_mbs); - LOG_I( "bitrate = %d\n", avcSeqParams.bits_per_second); + LOG_I( "bitrate = %d\n", rcMiscParam->bits_per_second); LOG_I( "frame_rate = %d\n", framerateParam->framerate); LOG_I( "initial_qp = %d\n", rcMiscParam->initial_qp); LOG_I( "min_qp = %d\n", rcMiscParam->min_qp); -- cgit v1.2.3 From fc31356cfface10abfc755c6b7b8896d0428d82f Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 23 Oct 2012 18:10:14 +0800 Subject: To solve the license problems in libmix BZ: 63609 Use dual licenses to resolve the license problems in libmix. Change-Id: I42d49360900731fce62d87093539d399ec47b22d Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/71052 Reviewed-by: Guo, Nana N Reviewed-by: Chen, Tianmi Reviewed-by: Tang, Richard Reviewed-by: Jiang, Fei Reviewed-by: Cheng, Yao Reviewed-by: Zeng, Li Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- frameworks/asf_extractor/AsfExtractor.cpp | 22 +++++++++++- frameworks/asf_extractor/AsfExtractor.h | 22 +++++++++++- frameworks/asf_extractor/MediaBufferPool.cpp | 22 +++++++++++- frameworks/asf_extractor/MediaBufferPool.h | 22 +++++++++++- frameworks/asf_extractor/MetaDataExt.h | 22 +++++++++++- frameworks/libI420colorconvert/ColorConvert.cpp | 20 +++++++++++ frameworks/vavideodecoder/VAVideoDecoder.cpp | 22 +++++++++++- frameworks/vavideodecoder/VAVideoDecoder.h | 22 +++++++++++- frameworks/videoedit/lvpp/VideoEditorToolsNV12.c | 20 +++++++++++ frameworks/videoedit/lvpp/VideoEditorToolsNV12.h | 20 +++++++++++ .../IntelVideoEditorEncoderSource.cpp | 39 +++++++++++----------- .../IntelVideoEditorEncoderSource.h | 39 +++++++++++----------- .../stagefrightshells/IntelVideoEditorUtils.cpp | 20 +++++++++++ .../stagefrightshells/IntelVideoEditorUtils.h | 20 +++++++++++ .../stagefrightshells/MediaBufferPuller.cpp | 20 +++++++++++ .../stagefrightshells/MediaBufferPuller.h | 20 +++++++++++ .../stagefrightshells/VideoEditor3gpReader.cpp | 20 +++++++++++ .../stagefrightshells/VideoEditorAudioDecoder.cpp | 20 +++++++++++ .../stagefrightshells/VideoEditorAudioEncoder.cpp | 22 ++++++++++++ .../stagefrightshells/VideoEditorBuffer.cpp | 21 ++++++++++++ .../stagefrightshells/VideoEditorBuffer.h | 20 +++++++++++ .../stagefrightshells/VideoEditorMp3Reader.cpp | 20 +++++++++++ .../stagefrightshells/VideoEditorVideoDecoder.cpp | 22 ++++++++++++ .../stagefrightshells/VideoEditorVideoEncoder.cpp | 21 ++++++++++++ frameworks/videoedit/vss/EditVideo_NV12.h | 20 +++++++++++ frameworks/videoedit/vss/M4AIR_API_NV12.c | 20 +++++++++++ frameworks/videoedit/vss/M4AIR_API_NV12.h | 20 +++++++++++ frameworks/videoedit/vss/M4MCS_NV12.h | 20 +++++++++++ .../videoedit/vss/M4MCS_VideoPreProcessing_NV12.c | 20 +++++++++++ .../videoedit/vss/M4VSS3GPP_EditVideo_NV12.c | 20 +++++++++++ frameworks/videoedit/vss/M4xVSS_NV12.h | 20 +++++++++++ frameworks/videoedit/vss/M4xVSS_internal_NV12.c | 21 ++++++++++++ 32 files changed, 654 insertions(+), 45 deletions(-) diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp index dc8643f..d2543fe 100644 --- a/frameworks/asf_extractor/AsfExtractor.cpp +++ b/frameworks/asf_extractor/AsfExtractor.cpp @@ -1,5 +1,25 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/frameworks/asf_extractor/AsfExtractor.h b/frameworks/asf_extractor/AsfExtractor.h index 4e17083..52a3a5c 100644 --- a/frameworks/asf_extractor/AsfExtractor.h +++ b/frameworks/asf_extractor/AsfExtractor.h @@ -1,5 +1,25 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/frameworks/asf_extractor/MediaBufferPool.cpp b/frameworks/asf_extractor/MediaBufferPool.cpp index 22b25e0..c5d4a37 100644 --- a/frameworks/asf_extractor/MediaBufferPool.cpp +++ b/frameworks/asf_extractor/MediaBufferPool.cpp @@ -1,5 +1,25 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/frameworks/asf_extractor/MediaBufferPool.h b/frameworks/asf_extractor/MediaBufferPool.h index 2e35e0a..bfacb1f 100644 --- a/frameworks/asf_extractor/MediaBufferPool.h +++ b/frameworks/asf_extractor/MediaBufferPool.h @@ -1,5 +1,25 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/frameworks/asf_extractor/MetaDataExt.h b/frameworks/asf_extractor/MetaDataExt.h index bee1431..ad03d1c 100644 --- a/frameworks/asf_extractor/MetaDataExt.h +++ b/frameworks/asf_extractor/MetaDataExt.h @@ -1,5 +1,25 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/frameworks/libI420colorconvert/ColorConvert.cpp b/frameworks/libI420colorconvert/ColorConvert.cpp index 4b5b343..825ec75 100644 --- a/frameworks/libI420colorconvert/ColorConvert.cpp +++ b/frameworks/libI420colorconvert/ColorConvert.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/vavideodecoder/VAVideoDecoder.cpp b/frameworks/vavideodecoder/VAVideoDecoder.cpp index a860e86..96f37d8 100644 --- a/frameworks/vavideodecoder/VAVideoDecoder.cpp +++ b/frameworks/vavideodecoder/VAVideoDecoder.cpp @@ -1,5 +1,25 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/frameworks/vavideodecoder/VAVideoDecoder.h b/frameworks/vavideodecoder/VAVideoDecoder.h index 911b0dc..aa16940 100644 --- a/frameworks/vavideodecoder/VAVideoDecoder.h +++ b/frameworks/vavideodecoder/VAVideoDecoder.h @@ -1,5 +1,25 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c index e77a908..cf3fbdc 100644 --- a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c +++ b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h index 96bbd5b..55b527a 100644 --- a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h +++ b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp index e2c16ed..060dad0 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2010 The Android Open Source Project * @@ -14,25 +34,6 @@ * limitations under the License. */ -/* - * INTEL CONFIDENTIAL - * Copyright 2010-2011 Intel Corporation All Rights Reserved. - - * The source code, information and material ("Material") contained herein is owned - * by Intel Corporation or its suppliers or licensors, and title to such Material - * remains with Intel Corporation or its suppliers or licensors. The Material contains - * proprietary information of Intel or its suppliers and licensors. The Material is - * protected by worldwide copyright laws and treaty provisions. No part of the Material - * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, - * distributed or disclosed in any way without Intel's prior express written permission. - * No license under any patent, copyright or other intellectual property rights in the - * Material is granted to or conferred upon you, either expressly, by implication, inducement, - * estoppel or otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - - * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any - * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. - */ //#define LOG_NDEBUG 0 #define LOG_TAG "IntelVideoEditorEncoderSource" diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h index 2f76051..a8c0126 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2010 The Android Open Source Project * @@ -14,25 +34,6 @@ * limitations under the License. */ -/* - * INTEL CONFIDENTIAL - * Copyright 2010-2011 Intel Corporation All Rights Reserved. - - * The source code, information and material ("Material") contained herein is owned - * by Intel Corporation or its suppliers or licensors, and title to such Material - * remains with Intel Corporation or its suppliers or licensors. The Material contains - * proprietary information of Intel or its suppliers and licensors. The Material is - * protected by worldwide copyright laws and treaty provisions. No part of the Material - * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, - * distributed or disclosed in any way without Intel's prior express written permission. - * No license under any patent, copyright or other intellectual property rights in the - * Material is granted to or conferred upon you, either expressly, by implication, inducement, - * estoppel or otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - - * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any - * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. - */ #ifndef INTELVIDEOEDITORENCODERSOURCE_H #define INTELVIDEOEDITORENCODERSOURCE_H diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp index 299d82a..4febe20 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h index 4e1c929..c4a9af5 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp index 6d826e9..d6a0eee 100644 --- a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp +++ b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2012 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h index ed72a53..4863003 100644 --- a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h +++ b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2012 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp index cc62a8b..b5cce1a 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp index 9b35d07..2c5510d 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp index a91f3ee..70140d0 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * @@ -13,6 +33,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + + /** ************************************************************************* * @file VideoEditorAudioEncoder.cpp diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp index 07d158a..2ddea80 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * @@ -13,6 +33,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + /** ************************************************************************* * @file VideoEditorBuffer.cpp diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h index a180942..b2b510f 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h +++ b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp index af53c54..815c242 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp index 4363a97..260c091 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * @@ -13,6 +33,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + + /** ************************************************************************* * @file VideoEditorVideoDecoder.cpp diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp index cafc615..c63d251 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * @@ -13,6 +33,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + /** ************************************************************************* * @file VideoEditorVideoEncoder.cpp diff --git a/frameworks/videoedit/vss/EditVideo_NV12.h b/frameworks/videoedit/vss/EditVideo_NV12.h index 1af74bd..43950f9 100644 --- a/frameworks/videoedit/vss/EditVideo_NV12.h +++ b/frameworks/videoedit/vss/EditVideo_NV12.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel~@~Ys prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/vss/M4AIR_API_NV12.c b/frameworks/videoedit/vss/M4AIR_API_NV12.c index fd87c89..a2de772 100644 --- a/frameworks/videoedit/vss/M4AIR_API_NV12.c +++ b/frameworks/videoedit/vss/M4AIR_API_NV12.c @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/vss/M4AIR_API_NV12.h b/frameworks/videoedit/vss/M4AIR_API_NV12.h index aa04efc..a34c561 100644 --- a/frameworks/videoedit/vss/M4AIR_API_NV12.h +++ b/frameworks/videoedit/vss/M4AIR_API_NV12.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/vss/M4MCS_NV12.h b/frameworks/videoedit/vss/M4MCS_NV12.h index 6ad804c..afa68ab 100644 --- a/frameworks/videoedit/vss/M4MCS_NV12.h +++ b/frameworks/videoedit/vss/M4MCS_NV12.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c b/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c index b9d5cc1..b9ce04c 100644 --- a/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c +++ b/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c b/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c index 581665c..0871f92 100644 --- a/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c +++ b/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/vss/M4xVSS_NV12.h b/frameworks/videoedit/vss/M4xVSS_NV12.h index 35f4c67..91f1527 100644 --- a/frameworks/videoedit/vss/M4xVSS_NV12.h +++ b/frameworks/videoedit/vss/M4xVSS_NV12.h @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * diff --git a/frameworks/videoedit/vss/M4xVSS_internal_NV12.c b/frameworks/videoedit/vss/M4xVSS_internal_NV12.c index 310f49e..d92cb39 100644 --- a/frameworks/videoedit/vss/M4xVSS_internal_NV12.c +++ b/frameworks/videoedit/vss/M4xVSS_internal_NV12.c @@ -1,3 +1,23 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ /* * Copyright (C) 2011 The Android Open Source Project * @@ -13,6 +33,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + #include "M4OSA_Debug.h" #include "M4OSA_CharStar.h" -- cgit v1.2.3 From 772e3705f0be2545cf55554c9ade6c7f1dc421eb Mon Sep 17 00:00:00 2001 From: Elaine Wang Date: Thu, 25 Oct 2012 14:23:08 +0800 Subject: Initialize sequence/picture parameter as zero BZ: 64406 Some fileds in libva encoder sequence/picture buffers aren't inialised and are wrong value. Initialize sequence/picture structures as zero, even some fields are not used now. Signed-off-by: Elaine Wang Change-Id: I3b7f5bc8bca5ddc89ce8e6e7e989c482d9f5e02a Reviewed-on: http://android.intel.com:8080/71566 Reviewed-by: Wang, Elaine Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 7 ++++--- videoencoder/VideoEncoderH263.cpp | 4 ++-- videoencoder/VideoEncoderMP4.cpp | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index a6a0d4b..c4bf805 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -748,7 +748,7 @@ int VideoEncoderAVC::calcLevel(int numMbs) { Encode_Status VideoEncoderAVC::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncSequenceParameterBufferH264 avcSeqParams; + VAEncSequenceParameterBufferH264 avcSeqParams = {}; VAEncMiscParameterBuffer *miscEncRCParamBuf; VAEncMiscParameterBuffer *miscEncFrameRateParamBuf; VAEncMiscParameterRateControl *rcMiscParam; @@ -796,7 +796,8 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { rcMiscParam->initial_qp = mComParams.rcParams.initQP; rcMiscParam->min_qp = mComParams.rcParams.minQP; rcMiscParam->window_size = mComParams.rcParams.windowSize; - rcMiscParam->bits_per_second = mComParams.rcParams.bitRate; //for rate control usage + //target bitrate is sent to libva through Sequence Parameter Buffer + rcMiscParam->bits_per_second = 0; rcMiscParam->basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage avcSeqParams.intra_period = mComParams.intraPeriod; //avcSeqParams.vui_flag = 248; @@ -861,7 +862,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { Encode_Status VideoEncoderAVC::renderPictureParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncPictureParameterBufferH264 avcPicParams; + VAEncPictureParameterBufferH264 avcPicParams = {}; LOG_V( "Begin\n\n"); // set picture params for HW diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index cbe1e09..7371d7a 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -40,7 +40,7 @@ Encode_Status VideoEncoderH263::sendEncodeCommand(void) { Encode_Status VideoEncoderH263::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncSequenceParameterBufferH263 h263SequenceParam; + VAEncSequenceParameterBufferH263 h263SequenceParam = {}; uint32_t frameRateNum = mComParams.frameRate.frameRateNum; uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; @@ -81,7 +81,7 @@ Encode_Status VideoEncoderH263::renderSequenceParams() { Encode_Status VideoEncoderH263::renderPictureParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncPictureParameterBufferH263 h263PictureParams; + VAEncPictureParameterBufferH263 h263PictureParams = {}; LOG_V( "Begin\n\n"); diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index d93d774..6e0263b 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -151,7 +151,7 @@ CLEAN_UP: Encode_Status VideoEncoderMP4::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncSequenceParameterBufferMPEG4 mp4SequenceParams; + VAEncSequenceParameterBufferMPEG4 mp4SequenceParams = {}; uint32_t frameRateNum = mComParams.frameRate.frameRateNum; uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; @@ -205,7 +205,7 @@ Encode_Status VideoEncoderMP4::renderSequenceParams() { Encode_Status VideoEncoderMP4::renderPictureParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncPictureParameterBufferMPEG4 mpeg4_pic_param; + VAEncPictureParameterBufferMPEG4 mpeg4_pic_param = {}; LOG_V( "Begin\n\n"); // set picture params for HW -- cgit v1.2.3 From 0f4bbe295a3d5d1d6a5092dcf1d8506861b540c7 Mon Sep 17 00:00:00 2001 From: psathisx Date: Thu, 25 Oct 2012 23:19:23 +0530 Subject: Audio-MW: For Long-Duration WMA(2 hours),Progress Bar is not in sync with playback after 1 hour of playback BZ: 59271 Data-type conflict between the "presentationTime(ms)"(uint32_t) set by the AsfExtractor and "mPositionTimeMediaUs(us)"(uint64_t) read from the input buffer in fillBuffer() of AudioPlayer is causing this issue. In AsfExtractor.cpp,the "presentationTime"(ms) is type-casted to uint64_t which resolves this issue. Change-Id: Ibf3ffebb129b62a0e2bbe0ac248bf1f6bcc45fae Signed-off-by: psathisx Reviewed-on: http://android.intel.com:8080/71650 Reviewed-by: Bhakte, GurudattaX Reviewed-by: Jayanti, Satya Charitardha Reviewed-by: M, Arulselvan Reviewed-by: Sameullah, MazharX Reviewed-by: Kandasamy, Muthukumar Reviewed-by: Sikkandar D, Madar Reviewed-by: P C, SreekanthX Tested-by: Gupta, ArvindX K Reviewed-by: buildbot Tested-by: buildbot --- frameworks/asf_extractor/AsfExtractor.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp index d2543fe..0fa0e15 100644 --- a/frameworks/asf_extractor/AsfExtractor.cpp +++ b/frameworks/asf_extractor/AsfExtractor.cpp @@ -640,7 +640,7 @@ status_t AsfExtractor::readPacket() { buffer->set_range(0, payload->mediaObjectLength); // kKeyTime is in microsecond unit (usecs) // presentationTime is in mililsecond unit (ms) - buffer->meta_data()->setInt64(kKeyTime, payload->presentationTime * 1000); + buffer->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); if (payload->keyframe) { buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); @@ -657,7 +657,7 @@ status_t AsfExtractor::readPacket() { Mutex::Autolock lockTrack(track->lock); MediaBuffer* copy = NULL; track->bufferPool->acquire_buffer(payload->payloadSize, ©); - copy->meta_data()->setInt64(kKeyTime, payload->presentationTime * 1000); + copy->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); memcpy(copy->data(), payload->payloadData, payload->payloadSize); copy->set_range(0, payload->payloadSize); track->bufferQueue.push(copy); -- cgit v1.2.3 From e1efeb444cf48a251c013d072e19ef1043c24c95 Mon Sep 17 00:00:00 2001 From: Elaine Wang Date: Tue, 30 Oct 2012 10:48:44 +0800 Subject: video editor: refine frame skip disabling code BZ: 65223 Patch 69193 doesn't work well sometimes. So use another libmix interface to disable frame skip in low bitrate encoding. Also fix some unintialized varaible in libmix encoder. Signed-off-by: Elaine Wang Change-Id: Ied588d0613d94ab6106309c6d2975fbf4cf71978 Reviewed-on: http://android.intel.com:8080/72104 Reviewed-by: Wang, Elaine Reviewed-by: Gu, Wangyi Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- .../stagefrightshells/IntelVideoEditorAVCEncoder.cpp | 12 ++++++++++++ videoencoder/VideoEncoderBase.cpp | 7 ++++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp index 3e55e52..9076bbd 100644 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp +++ b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp @@ -178,6 +178,18 @@ status_t IntelVideoEditorAVCEncoder::initCheck(const sp& meta) { CHECK(encStatus == ENCODE_SUCCESS); LOGV("new H264 encoder params set"); + if (disableFrameSkip) { + VideoConfigBitRate configBitrate; + encStatus = mVAEncoder->getConfig(&configBitrate); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("got encoder config set"); + + configBitrate.rcParams.disableFrameSkip = 1; + encStatus = mVAEncoder->setConfig(&configBitrate); + CHECK(encStatus == ENCODE_SUCCESS); + LOGV("got encoder frame skip/bits stuffing set"); + } + VideoParamsHRD hrdParam; encStatus = mVAEncoder->getParameters(&hrdParam); CHECK(encStatus == ENCODE_SUCCESS); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index d511ec1..819adfa 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -825,10 +825,10 @@ void VideoEncoderBase::setDefaultParams() { mComParams.intraPeriod = 30; mComParams.rcMode = RATE_CONTROL_NONE; mComParams.rcParams.initQP = 15; - mComParams.rcParams.minQP = 1; + mComParams.rcParams.minQP = 0; mComParams.rcParams.bitRate = 640000; - mComParams.rcParams.targetPercentage= 95; - mComParams.rcParams.windowSize = 500; + mComParams.rcParams.targetPercentage= 0; + mComParams.rcParams.windowSize = 0; mComParams.rcParams.disableFrameSkip = 0; mComParams.rcParams.disableBitsStuffing = 1; mComParams.cyclicFrameInterval = 30; @@ -1960,6 +1960,7 @@ Encode_Status VideoEncoderBase::renderDynamicBitrate() { bitrateControlParam->window_size = mComParams.rcParams.windowSize; bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip; bitrateControlParam->rc_flags.bits.disable_bit_stuffing = mComParams.rcParams.disableBitsStuffing; + bitrateControlParam->basic_unit_size = 0; LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second); LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp); -- cgit v1.2.3 From 15961d9e14a7630f0ba35a1c1f486fbe45395a8d Mon Sep 17 00:00:00 2001 From: fxiao4X Date: Thu, 1 Nov 2012 17:11:11 +0800 Subject: VideoDecoderAVC: Return the frame info from a multiple frame buffer to decode more. BZ: 63127 The orignal mix will drop the buffer which contains more than one frame. Change-Id: I94d59e41a5874eeedae076353e66df479f1a3d48 Signed-off-by: fxiao4X Reviewed-on: http://android.intel.com:8080/72469 Reviewed-by: Wang, Yi A Reviewed-by: Guo, Nana N Reviewed-by: Qiu, Junhai Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: lab_cactus Tested-by: lab_cactus --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 16 +++++++++------- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 3 ++- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 5 ++++- videodecoder/VideoDecoderAVC.cpp | 17 +++++++++++++++++ videodecoder/VideoDecoderAVC.h | 2 ++ 5 files changed, 34 insertions(+), 9 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 1bf8ee6..8592e64 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1192,7 +1192,6 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) /* bit offset from NAL start code to the beginning of slice data */ slc_parms->slice_data_bit_offset = bit + byte * 8; - if (is_emul) { WTRACE("next byte is emulation prevention byte."); @@ -1571,21 +1570,18 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) &(cxt->sc_prefix_info)); if (ret == 1) { - cubby.phase = 0; - if (cxt->list.num_items == 0) { cxt->list.data[0].stpos = cubby.sc_end_pos; } else { - cxt->list.data[cxt->list.num_items - 1].edpos = - cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; - cxt->list.data[cxt->list.num_items].stpos = - cxt->list.data[cxt->list.num_items - 1].edpos; + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - cubby.phase; /* offset before start code */ } + cubby.phase = 0; cubby.buf = cxt->parse_cubby.buf + cxt->list.data[cxt->list.num_items].stpos; @@ -1690,6 +1686,12 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); break; } + + if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1) + { + WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures); + return (error == VBP_OK ? VBP_MULTI : error); + } return error; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 38e2a05..ffeb332 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -338,7 +338,8 @@ enum _vbp_parser_error VBP_DONE, VBP_MEM, VBP_PARM, - VBP_PARTIAL + VBP_PARTIAL, + VBP_MULTI }; enum _vbp_parser_type diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index 237a02f..42f9c96 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -388,7 +388,10 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f /* process parsing result */ error = pcontext->func_process_parsing_result(pcontext, i); - if (0 != error) + if (VBP_MULTI == error) { + return VBP_OK; + } + else if (0 != error) { ETRACE("Failed to process parsing result."); return error; diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 2ef67c3..8863738 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -113,6 +113,12 @@ Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) { } status = decodeFrame(buffer, data); + if (status == DECODE_MULTIPLE_FRAME) { + buffer->ext = &mExtensionBuffer; + mExtensionBuffer.extType = PACKED_FRAME_TYPE; + mExtensionBuffer.extSize = sizeof(mPackedFrame); + mExtensionBuffer.extData = (uint8_t*)&mPackedFrame; + } return status; } @@ -228,6 +234,17 @@ Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) { return DECODE_PARSER_FAIL; } + if (picIndex > 0 && + (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) { + // it is a packed frame buffer + vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1]; + vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]); + mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset; + mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame + ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size); + return DECODE_MULTIPLE_FRAME; + } + for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) { status = decodeSlice(data, picIndex, sliceIndex); if (status != DECODE_SUCCESS) { diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 339e05a..799ae2e 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -77,6 +77,8 @@ private: uint8_t mToggleDPB; // 0 or 1 bool mErrorConcealment; uint32_t mLastPictureFlags; + VideoExtensionBuffer mExtensionBuffer; + PackedFrameData mPackedFrame; }; -- cgit v1.2.3 From f1e88cd4dd628dcf8ca6e165b5062e3dbe0516b6 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Tue, 20 Nov 2012 15:46:51 +0800 Subject: [JB MR1] Disable asf extractor and video editor in libmix temporarily for dependency reasons BZ: 69200 Change-Id: I997027a3416492d388f677ca22d99d47ee6047aa Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/74805 Reviewed-by: Zhao, Leo Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- Android.mk | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Android.mk b/Android.mk index 20651e9..c0c1ebc 100644 --- a/Android.mk +++ b/Android.mk @@ -5,12 +5,12 @@ ifeq ($(INTEL_VA),true) include $(CLEAR_VARS) VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk +#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk +#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/libI420colorconvert/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk +#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk endif -- cgit v1.2.3 From 9de91157bd0537094599b335ef71a442404237e8 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 23 Nov 2012 21:55:09 +0800 Subject: To enable video editor on JB MR1 BZ: 70001 To enable video editor on JB MR1 Change-Id: I5d93d4d2a0a6acd42df7b76ae2ec7d6fe54db190 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/75662 Reviewed-by: lab_cactus Tested-by: Ding, Haitao Reviewed-by: Tong, BoX Tested-by: Tong, BoX --- Android.mk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Android.mk b/Android.mk index c0c1ebc..a46ef00 100644 --- a/Android.mk +++ b/Android.mk @@ -11,6 +11,6 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk #include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/libI420colorconvert/Android.mk -#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk endif -- cgit v1.2.3 From acad5b82097247e4e4b36fc41679a43b48658da1 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Sun, 25 Nov 2012 15:12:48 +0800 Subject: [JB MR1] Revert patch for disabling asf extractor and video editor in libmix BZ: 69200 Enable asf extractor Change-Id: Ie6ba75490b27d8ceeabb6569a4548762ede8396c Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/75860 Tested-by: Tong, BoX Reviewed-by: Tong, BoX --- Android.mk | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Android.mk b/Android.mk index a46ef00..20651e9 100644 --- a/Android.mk +++ b/Android.mk @@ -5,10 +5,10 @@ ifeq ($(INTEL_VA),true) include $(CLEAR_VARS) VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk -#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk -#include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/libI420colorconvert/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk -- cgit v1.2.3 From 75f3d10570be2f5bee007ceb641664d7837af135 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Fri, 2 Nov 2012 17:24:26 +0800 Subject: [PORT FROM MAIN] Enable HW VP8 decoder for merrifield VP [MRFLD-VIDEO] BZ: 66073 Enable HW VP8 decoder for merrifield VP Change-Id: I91ce7f1eaa65b4014c38ee794f7a3ad22925b85e Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/76396 Tested-by: Tong, BoX Reviewed-by: Tong, BoX Reviewed-by: cactus Tested-by: cactus --- mix_vbp/Android.mk | 9 + .../viddec_fw/fw/codecs/vp8/include/bool_coder.h | 54 ++ mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8.h | 356 ++++++++++++ .../viddec_fw/fw/codecs/vp8/include/vp8_tables.h | 538 ++++++++++++++++++ mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8parse.h | 72 +++ mix_vbp/viddec_fw/fw/codecs/vp8/parser/Android.mk | 25 + .../viddec_fw/fw/codecs/vp8/parser/bool_coder.c | 95 ++++ .../fw/codecs/vp8/parser/viddec_vp8_parse.c | 121 +++++ mix_vbp/viddec_fw/fw/codecs/vp8/parser/vp8parse.c | 605 +++++++++++++++++++++ mix_vbp/viddec_fw/fw/parser/Android.mk | 10 + .../viddec_fw/fw/parser/include/viddec_vp8_parse.h | 6 + mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 61 ++- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 22 +- mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c | 540 ++++++++++++++++++ mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.h | 67 +++ videodecoder/Android.mk | 11 +- videodecoder/VideoDecoderHost.cpp | 13 +- videodecoder/VideoDecoderVP8.cpp | 339 +++++++++++- videodecoder/VideoDecoderVP8.h | 49 +- 19 files changed, 2970 insertions(+), 23 deletions(-) create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/include/bool_coder.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8_tables.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8parse.h create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/parser/Android.mk create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/parser/bool_coder.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c create mode 100644 mix_vbp/viddec_fw/fw/codecs/vp8/parser/vp8parse.c create mode 100644 mix_vbp/viddec_fw/fw/parser/include/viddec_vp8_parse.h create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.h diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index 42082ef..3899789 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -7,3 +7,12 @@ include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser/Android.mk + +# Add source codes for Merrifield +MERRIFIELD_PRODUCT := \ + mrfl_vp \ + mrfl_hvp \ + mrfl_sle +ifneq ($(filter $(TARGET_PRODUCT),$(MERRIFIELD_PRODUCT)),) +include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/parser/Android.mk +endif diff --git a/mix_vbp/viddec_fw/fw/codecs/vp8/include/bool_coder.h b/mix_vbp/viddec_fw/fw/codecs/vp8/include/bool_coder.h new file mode 100644 index 0000000..57660b7 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vp8/include/bool_coder.h @@ -0,0 +1,54 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef _BOOL_CODER_H_ +#define _BOOL_CODER_H_ + +#include +#include +#include +#include + +typedef struct _BOOL_CODER +{ + uint32_t range; // always idential to encoder's range + uint32_t value; // contains at least 24 significant bits + int32_t count; // # of bits shifted out of value, at most 7 + uint32_t pos; + uint8_t *buffer; // pointer to next compressed data byte to be read +} BOOL_CODER; + +typedef struct _BITREADER +{ + int32_t bitsinremainder; // # of bits still used in remainder + uint32_t remainder; // remaining bits from original long + const uint8_t *position; // character pointer position within data +} BITREADER; + +void vp8_start_decode(BOOL_CODER *br, uint8_t *source); +int32_t vp8_decode_bool(BOOL_CODER *br, int32_t probability); +uint32_t vp8_read_bits(BOOL_CODER *br, int32_t bits); + +#endif diff --git a/mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8.h b/mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8.h new file mode 100644 index 0000000..06a7e61 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vp8/include/vp8.h @@ -0,0 +1,356 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef _VP8_H_ +#define _VP8_H_ +#include "bool_coder.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/* VP8 specifies only frame is supported */ +#define VP8_MAX_NUM_PICTURES 1 +/* VP8 has no definition of slice */ +#define VP8_MAX_NUM_SLICES 1 + +#define MAX_MB_SEGMENTS 4 +#define MB_FEATURE_TREE_PROBS 3 +#define MAX_REF_LF_DELTAS 4 +#define MAX_MODE_LF_DELTAS 4 +#define MAX_PARTITIONS 9 +#define BLOCK_TYPES 4 +#define COEF_BANDS 8 +#define PREV_COEF_CONTEXTS 3 +#define MAX_COEF_TOKENS 12 +#define MAX_ENTROPY_TOKENS 12 +#define SEGMENT_DELTADATA 0 +#define SEGMENT_ABSDATA 1 +#define MAX_LOOP_FILTER 63 +#define MAX_QINDEX 127 + + typedef uint8_t vp8_prob; + + typedef enum + { + /*!\brief Operation completed without error */ + VP8_NO_ERROR, + + /*!\brief Unspecified error */ + VP8_UNKNOWN_ERROR, + + /*!\brief Memory operation failed */ + VP8_MEMORY_ERROR, + + VP8_NO_INITIALIZATION, + + VP8_CORRUPT_FRAME, + + VP8_UNSUPPORTED_BITSTREAM, + + VP8_UNSUPPORTED_VERSION, + + VP8_INVALID_FRAME_SYNC_CODE, + + VP8_UNEXPECTED_END_OF_BITSTREAM, + + } vp8_Status; + + enum + { + VP8_MV_max = 1023, /* max absolute value of a MV component */ + VP8_MV_vals = (2 * VP8_MV_max) + 1, /* # possible values "" */ + + VP8_MV_long_width = 10, /* Large MVs have 9 bit magnitudes */ + VP8_MV_num_short = 8, /* magnitudes 0 through 7 */ + + /* probability offsets for coding each MV component */ + VP8_MV_pis_short = 0, /* short (<= 7) vs long (>= 8) */ + VP8_MV_Psign, /* sign for non-zero */ + VP8_MV_Pshort, /* 8 short values = 7-position tree */ + + VP8_MV_Pbits = VP8_MV_Pshort + VP8_MV_num_short - 1, /* mvlong_width long value bits */ + VP8_MV_Pcount = VP8_MV_Pbits + VP8_MV_long_width /* (with independent probabilities) */ + }; + + typedef enum + { + DC_PRED, // average of above and left pixels + V_PRED, // vertical prediction + H_PRED, // horizontal prediction + TM_PRED, // Truemotion prediction + B_PRED, // block based prediction, each block has its own prediction mode + NEARESTMV, + NEARMV, + ZEROMV, + NEWMV, + SPLITMV, + MB_MODE_COUNT + } VP8_MB_PREDICTION_MODE; + +// Segment Feature Masks +#define VP8_SEGMENT_ALTQ 0x01 +#define VP8_SEGMENT_ALT_LF 0x02 + +#define VP8_YMODES (B_PRED + 1) +#define VP8_UV_MODES (TM_PRED + 1) + +#define VP8_MVREFS (1 + SPLITMV - NEARESTMV) + + typedef enum + { + B_DC_PRED, // average of above and left pixels + B_TM_PRED, + + B_VE_PRED, // vertical prediction + B_HE_PRED, // horizontal prediction + + B_LD_PRED, + B_RD_PRED, + + B_VR_PRED, + B_VL_PRED, + B_HD_PRED, + B_HU_PRED, + + LEFT4X4, + ABOVE4X4, + ZERO4X4, + NEW4X4, + + B_MODE_COUNT + } VP8_B_PREDICTION_MODE; + +#define VP8_BINTRAMODES (B_HU_PRED + 1) /* 10 */ +#define VP8_SUBMVREFS (1 + NEW4X4 - LEFT4X4) + +// frame type + typedef enum + { + KEY_FRAME = 0, + INTER_FRAME, + SKIPPED_FRAME + } FRAME_TYPE; + + +// Color Space + typedef enum + { + REG_YUV = 0, /* Regular yuv */ + INT_YUV = 1 /* The type of yuv that can be tranfer to and from RGB through integer transform */ + } YUV_TYPE; + +// Clamp type + typedef enum + { + RECON_CLAMP_REQUIRED = 0, + RECON_CLAMP_NOTREQUIRED = 1 + } CLAMP_TYPE; + + /* Token partition */ + typedef enum + { + ONE_PARTITION = 0, + TWO_PARTITION = 1, + FOUR_PARTITION = 2, + EIGHT_PARTITION = 3 + } TOKEN_PARTITION; + +// Buffer copied + typedef enum + { + BufferCopied_NoneToGolden = 0, + BufferCopied_LastToGolden = 1, + BufferCopied_AltRefToGolden = 2 + } GoldenBufferCopiedType; + + typedef enum + { + BufferCopied_NoneToAltref = 0, + BufferCopied_LastToAltRef = 1, + BufferCopied_GoldenToAltRef = 2 + } AltRefBufferCopiedType; + +// Macroblock level features + typedef enum + { + MB_LVL_ALT_Q = 0, /* Use alternate Quantizer .... */ + MB_LVL_ALT_LF = 1, /* Use alternate loop filter value... */ + MB_LVL_MAX = 2 /* Number of MB level features supported */ + } MB_LVL_FEATURES; + +// Loop filter Type + typedef enum + { + NORMAL_LOOPFILTER = 0, + SIMPLE_LOOPFILTER = 1 + } LoopFilterType; + +// Segmentation data + typedef struct + { + uint8_t Enabled; + uint8_t UpdateMap; + uint8_t UpdateData; + uint8_t AbsDelta; + int8_t FeatureData[MB_LVL_MAX][MAX_MB_SEGMENTS]; + vp8_prob TreeProbs[MB_FEATURE_TREE_PROBS]; + } SegmentationData; + +// Loop filter data + typedef struct + { + LoopFilterType Type; + uint8_t Level; + uint8_t Sharpness; + uint8_t DeltaEnabled; + uint8_t DeltaUpdate; + int8_t DeltasRef[MAX_REF_LF_DELTAS]; + int8_t DeltasMode[MAX_MODE_LF_DELTAS]; + } LoopFilterData; + +// Quantization data + typedef struct + { + int8_t Y1_AC; + int8_t Y1_DC_Delta; + int8_t Y2_DC_Delta; + int8_t Y2_AC_Delta; + int8_t UV_DC_Delta; + int8_t UV_AC_Delta; + } QuantizationData; + +// Frame context + typedef struct + { + vp8_prob B_Mode_Prob[VP8_BINTRAMODES][VP8_BINTRAMODES][VP8_BINTRAMODES-1]; + vp8_prob Y_Mode_Prob [VP8_YMODES-1]; /* interframe intra mode probs */ + vp8_prob UV_Mode_Prob [VP8_UV_MODES-1]; + vp8_prob DCT_Coefficients [BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1]; + vp8_prob MVContext[2][VP8_MV_Pcount]; + vp8_prob Pre_MVContext[2][VP8_MV_Pcount]; //not to caculate the mvcost for the frame if mvc doesn't change. + } FrameContextData; + +// Extern to tables + extern const vp8_prob VP8_Coefficient_Default_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1]; + extern const vp8_prob VP8_Coefficient_Update_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1]; + extern const int VP8_MB_FeatureDataBits[MB_LVL_MAX]; + extern const vp8_prob VP8_BMode_Const[VP8_BINTRAMODES][VP8_BINTRAMODES][VP8_BINTRAMODES-1]; + extern const vp8_prob VP8_YMode_Const[VP8_YMODES-1]; + extern const vp8_prob VP8_UVMode_Const[VP8_UV_MODES-1]; + extern const vp8_prob VP8_MV_UpdateProbs[2][VP8_MV_Pcount], VP8_MV_DefaultMVContext[2][VP8_MV_Pcount]; + + typedef struct + { + FRAME_TYPE frame_type; + uint8_t version; + uint8_t show_frame; + uint32_t first_part_size; + } FrameTagHeader; + + typedef struct _vp8_Info + { + // Frame Tag Header + FrameTagHeader frame_tag; + + // Key Frame data + uint32_t width; + uint32_t height; + uint32_t horiz_scale; + uint32_t vert_scale; + YUV_TYPE clr_type; + CLAMP_TYPE clamp_type; + + vp8_prob prob_intra; + vp8_prob prob_lf; + vp8_prob prob_gf; + + uint8_t y_prob_valid; + uint8_t c_prob_valid; + + uint32_t header_bits; + uint32_t frame_data_offset; + + uint8_t *source; + uint32_t source_sz; + + // Decoded picture number + uint32_t decoded_frame_number; + + BOOL_CODER bool_coder; + + // Refresh flags + uint8_t refresh_lf; + + uint8_t refresh_gf; + uint8_t refresh_af; + uint8_t sign_bias_golden; + uint8_t sign_bias_alternate; + + GoldenBufferCopiedType golden_copied; + AltRefBufferCopiedType altref_copied; + + // Degmentation data + SegmentationData Segmentation; + + // Loop filter data + LoopFilterData LoopFilter; + + // Partitions + uint8_t partition_count; + uint8_t partition_number; + uint32_t partition_size[1<=0; bit--) + { + z |= (vp8_decode_bool(br, 128)<range = 255; + br->count = 8; + br->buffer = source; + br->pos = 0; + br->value = (br->buffer[0]<<24)+(br->buffer[1]<<16)+(br->buffer[2]<<8)+(br->buffer[3]); + br->pos += 4; +} + +int32_t vp8_decode_bool(BOOL_CODER *br, int32_t probability) +{ + uint32_t bit=0; + uint32_t split; + uint32_t bigsplit; + uint32_t count = br->count; + uint32_t range = br->range; + uint32_t value = br->value; + + split = 1 + (((range-1) * probability) >> 8); + bigsplit = (split<<24); + + range = split; + if(value >= bigsplit) + { + range = br->range-split; + value = value-bigsplit; + bit = 1; + } + + if(range>=0x80) + { + br->value = value; + br->range = range; + return bit; + } + else + { + do + { + range +=range; + value +=value; + + if (!--count) + { + count = 8; + value |= br->buffer[br->pos]; + br->pos++; + } + } + while(range < 0x80 ); + } + br->count = count; + br->value = value; + br->range = range; + return bit; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c b/mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c new file mode 100644 index 0000000..b5dfbe6 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c @@ -0,0 +1,121 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" + +#include "viddec_fw_workload.h" +#include "viddec_pm.h" + +#include +#include "vp8.h" +#include "vp8parse.h" +#include "viddec_vp8_parse.h" + +/* Init function which can be called to intialized local context on open and flush and preserve*/ +void viddec_vp8_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +{ + vp8_viddec_parser* parser = ctxt; + vp8_Info *pi = &(parser->info); + + /* Avoid compiler warning */ + persist_mem = persist_mem; + + if (!preserve) + { + /* Init frame header information */ + vp8_init_Info(pi); + } + else + { + /* Initialise the parser */ + pi->decoded_frame_number = 0; + pi->refresh_entropy_lf = 1; + } + + parser->got_start = 1; + return; +} + +uint32_t viddec_vp8_parse(void *parent, void *ctxt) +{ + vp8_Status status = VP8_NO_ERROR; + + vp8_viddec_parser *parser = (vp8_viddec_parser*)ctxt; + if (1 != parser->got_start) return VP8_NO_INITIALIZATION; + + vp8_Info *pi = &(parser->info); + viddec_pm_cxt_t *pm_cxt = (viddec_pm_cxt_t *)parent; + pi->source = pm_cxt->parse_cubby.buf; + pi->source_sz = pm_cxt->parse_cubby.size; + + if (pi->source_sz < 0) + { + return VP8_UNEXPECTED_END_OF_BITSTREAM; + } + else if (pi->source_sz == 0) + { + pi->frame_tag.frame_type = SKIPPED_FRAME; + status = VP8_NO_ERROR; + } + else if (pi->source_sz > 0) + { + status = vp8_parse_frame_header(parser); + } + + return status; +} + +uint32_t viddec_vp8_wkld_done(void *parent, void *ctxt, unsigned int next_sc, + uint32_t *codec_specific_errors) +{ + return 0; +} + +void viddec_vp8_get_context_size(viddec_parser_memory_sizes_t *size) +{ + /* Should return size of my structure */ + size->context_size = sizeof(vp8_viddec_parser); + size->persist_size = 0; + return; +} + +uint32_t viddec_vp8_is_frame_start(void *ctxt) +{ + vp8_viddec_parser* parser = ctxt; + + return parser->got_start; +} + +void viddec_vp8_get_ops(viddec_parser_ops_t *ops) +{ + ops->init = viddec_vp8_init; + + ops->parse_syntax = viddec_vp8_parse; + ops->get_cxt_size = viddec_vp8_get_context_size; + ops->is_wkld_done = viddec_vp8_wkld_done; + ops->is_frame_start = viddec_vp8_is_frame_start; + return; +} diff --git a/mix_vbp/viddec_fw/fw/codecs/vp8/parser/vp8parse.c b/mix_vbp/viddec_fw/fw/codecs/vp8/parser/vp8parse.c new file mode 100644 index 0000000..4f15736 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/vp8/parser/vp8parse.c @@ -0,0 +1,605 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "vp8_tables.h" +#include "vp8parse.h" + +static const uint8_t kVp8SyncCodeByte[] = {0x9d, 0x01, 0x2a}; + +void vp8_init_Info(vp8_Info *pi) +{ + memset(pi, 0, sizeof(vp8_Info)); + + /* Initialise the parser */ + pi->decoded_frame_number = 0; + pi->refresh_entropy_lf = 1; +} + +int32_t vp8_parse_frame_tag(FrameTagHeader *frame_tag, uint8_t *data, uint32_t data_sz) +{ + if (data_sz < 3) + { + return VP8_CORRUPT_FRAME; + } + + /* 1-bit frame type */ + frame_tag->frame_type = (FRAME_TYPE)(data[0] & 1); + + /* 3-bit version number */ + frame_tag->version = (data[0] >> 1) & 7; + if (frame_tag->version > 3) + { + return VP8_UNSUPPORTED_VERSION ; + } + + /* 1-bit show frame flag */ + frame_tag->show_frame = (data[0] >> 4) & 1; + + /* 19-bit field containing the sie of the first data partition in bytes */ + frame_tag->first_part_size = (data[0] | (data[1] << 8) | (data[2] << 16)) >> 5; + + return VP8_NO_ERROR; +} + +void vp8_init_frame(vp8_Info *pi) +{ + pi->golden_copied = BufferCopied_NoneToGolden; + pi->altref_copied = BufferCopied_NoneToAltref; + + if (pi->frame_tag.frame_type == KEY_FRAME) + { + /* Various keyframe initializations */ + /* vp8_prob data initialization */ + memcpy(pi->FrameContext.B_Mode_Prob, VP8_BMode_Const, sizeof(VP8_BMode_Const)); + memcpy(pi->FrameContext.Y_Mode_Prob, VP8_YMode_Const, sizeof(VP8_YMode_Const)); + memcpy(pi->FrameContext.UV_Mode_Prob, VP8_UVMode_Const, sizeof(VP8_UVMode_Const)); + memcpy(pi->FrameContext.MVContext, VP8_MV_DefaultMVContext, sizeof(VP8_MV_DefaultMVContext)); + memcpy(pi->FrameContext.DCT_Coefficients, VP8_Coefficient_Default_Probabilites, sizeof(VP8_Coefficient_Default_Probabilites)); + + /* reset the segment feature data to 0 with delta coding (Default state)*/ + memset(pi->Segmentation.FeatureData, 0, sizeof(pi->Segmentation.FeatureData)); + pi->Segmentation.AbsDelta = SEGMENT_DELTADATA; + + /* reset the mode ref deltasa for loop filter */ + memset(pi->LoopFilter.DeltasRef, 0, sizeof(pi->LoopFilter.DeltasRef)); + memset(pi->LoopFilter.DeltasMode, 0, sizeof(pi->LoopFilter.DeltasMode)); + + /* All buffers are implicitly updated on key frames */ + pi->refresh_gf = 1; + pi->refresh_af = 1; + + pi->sign_bias_golden = 0; + pi->sign_bias_alternate = 0; + } + else if (pi->frame_tag.frame_type == INTER_FRAME) + { + pi->refresh_gf = 0; + pi->refresh_af = 0; + } +} + +/* This function provides vp8_prob and value infomation for implementing + * segment adaptive adjustments to default decoder behaviors. + * The data parsed here applies to the entire frame. The adjustments can be + * quantization level or loop filter strength. + * */ +void vp8_parse_segmentation_adjustments_data(vp8_Info *pi) +{ + int i,j; + BOOL_CODER *bc = &(pi->bool_coder); + + const int *const mb_feature_data_bits = VP8_MB_FeatureDataBits; + + /* Is segmentation enabled */ + pi->Segmentation.Enabled = (uint8_t)vp8_decode_bool(bc, 128); //chapter 9.2 - macroblock uses segments ? 1: 0 + + if(pi->Segmentation.Enabled ) + { + /* Signal whether or not the segmentation map is being explicitly updated this frame */ + pi->Segmentation.UpdateMap = (uint8_t)vp8_decode_bool(bc, 128); + pi->Segmentation.UpdateData = (uint8_t)vp8_decode_bool(bc, 128); + + if (pi->Segmentation.UpdateData) + { + pi->Segmentation.AbsDelta = (uint8_t)vp8_decode_bool(bc, 128); + + memset(pi->Segmentation.FeatureData, 0, sizeof(pi->Segmentation.FeatureData)); + + /* For each segmentation feature (Quant and loop filter level) */ + for (i = 0; i < MB_LVL_MAX; ++i) + { + for (j = 0; j < MAX_MB_SEGMENTS; ++j) + { + /* Frame level data */ + if (vp8_decode_bool(bc, 128)) + { + /* Parse magnitude */ + pi->Segmentation.FeatureData[i][j] = (int8_t) vp8_read_bits(bc, mb_feature_data_bits[i]) ; + + /* Parse sign data */ + if (vp8_decode_bool(bc, 128)) + { + pi->Segmentation.FeatureData[i][j] = -pi->Segmentation.FeatureData[i][j]; + } + } + else + { + pi->Segmentation.FeatureData[i][j] = 0; + } + } + } + + } + + if (pi->Segmentation.UpdateMap) + { + /* Which macro block level features are enabled */ + memset(pi->Segmentation.TreeProbs, 255, sizeof(pi->Segmentation.TreeProbs)); + + /* Read the probs used to decode the segment id for each macro block */ + for (i = 0; i < MB_FEATURE_TREE_PROBS; ++i) + { + /* If not explicitly set value is defaulted to 255 by memset above */ + if (vp8_decode_bool(bc, 128)) + { + pi->Segmentation.TreeProbs[i] = (uint8_t)vp8_read_bits(bc, 8); + } + } + } + } +} + +/* VP8 supprots two types of loop filter. The data parsed in the header + * to support the selection of the type, strength and sharpness behavior + * of the loop filter used for the current frame. + */ +void vp8_parse_loop_filter_type_level(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read the loop filter level and type */ + pi->LoopFilter.Type = (LoopFilterType)vp8_decode_bool(bc, 128); + pi->LoopFilter.Level = (uint8_t)vp8_read_bits(bc, 6); + pi->LoopFilter.Sharpness = (uint8_t)vp8_read_bits(bc, 3); +} + +/* This function provides flag and value information for implmenting + * per-macroblock loop filter level adjustments to default decoder + * behaviors. Data parsed here applies to the entire frame. + */ +void vp8_parse_loop_filter_adjustments_data(vp8_Info *pi) +{ + int i; + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read in loop filter deltas applied at the MB level based on mode or ref frame */ + pi->LoopFilter.DeltaUpdate = 0; + pi->LoopFilter.DeltaEnabled = (uint8_t)vp8_decode_bool(bc, 128); + + if (pi->LoopFilter.DeltaEnabled) + { + /* Do the deltas need to be updated */ + pi->LoopFilter.DeltaUpdate = (uint8_t)vp8_decode_bool(bc, 128); + + if (pi->LoopFilter.DeltaUpdate) + { + /* Update based on reference */ + for (i = 0; i < MAX_REF_LF_DELTAS; ++i) + { + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasRef[i] = (int8_t)vp8_read_bits(bc, 6); + + /* Parse sign */ + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasRef[i] = -1 * pi->LoopFilter.DeltasRef[i]; + } + } + } + + /* Update based on macroblock mode */ + for (i = 0; i < MAX_MODE_LF_DELTAS; ++i) + { + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasMode[i] = (int8_t)vp8_read_bits(bc, 6); + + /* Parse sign */ + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasMode[i] = -1 * pi->LoopFilter.DeltasMode[i]; + } + } + } /* End for (i = 0; i < MAX_MODE_LF_DELTAS; ++i) */ + } /* End if (pi->LoopFilter.DeltaUpdate) */ + } +} + +/* Token partition and partition data offsets */ +void vp8_parse_token_partition_data(vp8_Info *pi, uint8_t *cx_size) +{ + BOOL_CODER *bc = &(pi->bool_coder); + uint8_t *partition = NULL; + uint8_t *source_end = pi->source + pi->source_sz; + uint32_t partition_size = 0, i = 0; + uint8_t *partition_size_ptr = NULL; + + /* Parse number of token partitions to use */ + pi->partition_count = 1 << (uint8_t)vp8_read_bits(bc, 2); + + /* Set up pointers to the first partition */ + partition = cx_size; + if (pi->partition_count > 1) + { + /* Each partition offset is written in 3 bytes */ + partition += 3 * (pi->partition_count - 1); + } + + for (i = 0; i < pi->partition_count; i++) + { + partition_size_ptr = cx_size + i * 3; + + if (i < pi->partition_count - 1) + { + pi->partition_size[i] = vp8_read_partition_size(partition_size_ptr); + } + else + { + /* Last offset can be calculated implictly */ + pi->partition_size[i] = source_end - partition; + } + + partition += pi->partition_size[i]; + } +} + +int32_t vp8_read_partition_size(uint8_t *cx_size) +{ + uint32_t size = cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16); + + return size; +} + +int read_q_delta(BOOL_CODER *bool_coder) +{ + int q_delta = 0; + + /* presence flag */ + if (vp8_decode_bool(bool_coder, 128)) + { + /* magnitude */ + q_delta = (uint8_t)vp8_read_bits(bool_coder, 4) ; + + /* sign */ + if (vp8_decode_bool(bool_coder, 128)) + { + q_delta = -q_delta; + } + } + + return q_delta; +} + +/* Read the default quantizers */ +void vp8_parse_dequantization_indices(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* AC 1st order Q = default as a baseline for other 5 items */ + pi->Quantization.Y1_AC = (int8_t)vp8_read_bits(bc, 7); + pi->Quantization.Y1_DC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.Y2_DC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.Y2_AC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.UV_DC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.UV_AC_Delta = (int8_t)read_q_delta(bc); +} + + +/* Determine if the golden frame or ARF buffer should be updated and how. + * For all non key frames the GF and ARF refresh flags and sign bias + * flags must be set explicitly. + */ +void vp8_parse_gf_af_refresh_flags(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read Golden and AltRef frame refresh */ + pi->refresh_gf = (uint8_t)vp8_decode_bool(bc, 128); + pi->refresh_af = (uint8_t)vp8_decode_bool(bc, 128); + + /* If not refreshed using the current reconstructed frame */ + if (0 == pi->refresh_gf) + { + /* 2 bit indicating which buffer is copied to golden frame */ + pi->golden_copied = (GoldenBufferCopiedType)(int8_t)vp8_read_bits(bc, 2); + } + else + { + /* No buffer is copied */ + pi->golden_copied = (GoldenBufferCopiedType)0; + } + + if (0 == pi->refresh_af) + { + /* 2 bit indicating which buffer is copied to alternative frame */ + pi->altref_copied = (AltRefBufferCopiedType)vp8_read_bits(bc, 2); + } + else + { + pi->altref_copied = (AltRefBufferCopiedType)0; + } + + pi->sign_bias_golden = (uint8_t)vp8_decode_bool(bc, 128); + pi->sign_bias_alternate = (uint8_t)vp8_decode_bool(bc, 128); + +} + +void vp8_parse_coef_probs_tree(vp8_Info *pi) +{ + int i, j, k, l; + + BOOL_CODER *bc = &(pi->bool_coder); + + /* DCT coeffienct probability tree update */ + for (i = 0; i < BLOCK_TYPES; i++) + { + for (j = 0; j < COEF_BANDS; j++) + { + for (k = 0; k < PREV_COEF_CONTEXTS; k++) + { + for (l = 0; l < MAX_COEF_TOKENS - 1; l++) + { + if (vp8_decode_bool(bc, VP8_Coefficient_Update_Probabilites[i][j][k][l])) + { + pi->FrameContext.DCT_Coefficients[i][j][k][l] = (vp8_prob)vp8_read_bits(bc, 8); + } + } + } + } + } +} + +/* Parse remaining non-key-frame only data from frame header */ +void vp8_parse_mb_mv_info(vp8_Info *pi) +{ + // read_mvcontexts + int i = 0; + + BOOL_CODER *bc = &(pi->bool_coder); + + do + { + const vp8_prob *up = VP8_MV_UpdateProbs[i]; + vp8_prob *p = pi->FrameContext.MVContext[i]; + vp8_prob *const pstop = p + VP8_MV_Pcount; + + do + { + if (vp8_decode_bool(bc , *up++ )) + { + const vp8_prob x = (vp8_prob)vp8_read_bits(bc, 7); + + *p = x ? x << 1 : 1; + } + } + while (++p < pstop); + } + while (++i < 2); +} + +/* Parse remaining non-key-frame only data from frame header */ +void vp8_parse_yuv_probs_update(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read probabilities */ + pi->prob_intra = (vp8_prob)vp8_read_bits(bc, 8); + pi->prob_lf = (vp8_prob)vp8_read_bits(bc, 8); + pi->prob_gf = (vp8_prob)vp8_read_bits(bc, 8); + + pi->y_prob_valid = (uint8_t)vp8_decode_bool(bc , 128); + if (1 == pi->y_prob_valid) + { + pi->FrameContext.Y_Mode_Prob[0] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.Y_Mode_Prob[1] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.Y_Mode_Prob[2] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.Y_Mode_Prob[3] = (vp8_prob)vp8_read_bits(bc, 8); + } + + pi->c_prob_valid = (uint8_t)vp8_decode_bool(bc , 128); + if (1 == pi->c_prob_valid) + { + pi->FrameContext.UV_Mode_Prob[0] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.UV_Mode_Prob[1] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.UV_Mode_Prob[2] = (vp8_prob)vp8_read_bits(bc, 8); + } +} + + +void vp8_parse_remaining_frame_header_data(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* MB no coefficients skip */ + pi->mb_no_coeff_skip = (uint8_t)vp8_decode_bool(bc, 128); + + if (1 == pi->mb_no_coeff_skip) + { + pi->prob_skip_false = (vp8_prob)vp8_read_bits(bc, 8); + } + else + { + pi->mb_skip_coeff = 0; + } + + if (pi->frame_tag.frame_type == INTER_FRAME) + { + vp8_parse_yuv_probs_update(pi); + + /* Read motion vector info */ + vp8_parse_mb_mv_info(pi); + } + +} + +#if 0 +vp8_Status vp8_translate_parse_status(vp8_Status status) +{ + switch (status) + { + case VP8_UNSUPPORTED_VERSION: + LOGE("Parser returned VP8_UNSUPPORTED_VERSION"); + return VP8_UNSUPPORTED_VERSION; + case VP8_UNSUPPORTED_BITSTREAM: + LOGE("Parser returned VP8_UNSUPPORTED_BITSTREAM"); + return VP8_UNSUPPORTED_BITSTREAM; + case VP8_INVALID_FRAME_SYNC_CODE: + LOGE("Parser returned VP8_INVALID_FRAME_SYNC_CODE"); + return VP8_INVALID_FRAME_SYNC_CODE; + case VP8_UNEXPECTED_END_OF_BITSTREAM: + LOGE("Parser returned VP8_UNEXPECTED_END_OF_BITSTREAM"); + return VP8_UNEXPECTED_END_OF_BITSTREAM; + default: + LOGE("Parser returned VP8_UNKNOWN_ERROR"); + return VP8_UNKNOWN_ERROR; + } +} +#endif + +/* Parse VP8 frame header */ +int32_t vp8_parse_frame_header(vp8_viddec_parser *parser) +{ + vp8_Status ret = VP8_NO_ERROR; + + vp8_Info *pi = &(parser->info); + + uint8_t *data = pi->source; + uint32_t data_sz = pi->source_sz; + + if (0 == pi->refresh_entropy_lf) + { + memcpy(&(pi->FrameContext), &(pi->LastFrameContext), sizeof(FrameContextData)); + } + + /* Step 1 : parse frame tag containing 3 bytes*/ + ret = vp8_parse_frame_tag(&(pi->frame_tag), data, data_sz); + if (ret != VP8_NO_ERROR) + { + return ret; + } + + /* Pointer advances 3 bytes */ + data += 3; + + /* Start the frame data offset */ + pi->frame_data_offset = 3; + + /* Step 2 : parse key frame parameters*/ + if (pi->frame_tag.frame_type == KEY_FRAME) + { + /* Check sync code containg 3 bytes*/ + if ((data[0] != kVp8SyncCodeByte[0]) || (data[1] != kVp8SyncCodeByte[1]) || (data[2] != kVp8SyncCodeByte[2])) + { + return VP8_INVALID_FRAME_SYNC_CODE; + } + + pi->width = (data[3] | (data[4] << 8)) & 0x3fff; + pi->horiz_scale = data[4] >> 6; + pi->height = (data[5] | (data[6] << 8)) & 0x3fff; + pi->vert_scale = data[6] >> 6; + + /* Pointer advances 7 bytes in this case*/ + data += 7; + pi->frame_data_offset += 7; + } + + if (0 == pi->width || 0 == pi->height) + { + return VP8_UNSUPPORTED_BITSTREAM; + } + + /* Initialize frame parameters*/ + vp8_init_frame(pi); + + /* Initialize bool coder */ + BOOL_CODER *bc = &(pi->bool_coder); + vp8_start_decode(bc, (uint8_t*)data); + + /* Parse key frame parameters */ + if (pi->frame_tag.frame_type == KEY_FRAME) + { + pi->clr_type = (YUV_TYPE)vp8_decode_bool(bc, 128); + pi->clamp_type = (CLAMP_TYPE)vp8_decode_bool(bc, 128); + } + + /* Step 3 : parse macroblock-level segmentation flag */ + vp8_parse_segmentation_adjustments_data(pi); + + /* Step 4 : parse loop filter type and levels */ + vp8_parse_loop_filter_type_level(pi); + + /* Step 5 : parse macroblock-level loop filter adjustments */ + vp8_parse_loop_filter_adjustments_data(pi); + + /* Step 6: parse token partition and partition data offsets */ + vp8_parse_token_partition_data(pi, data + pi->frame_tag.first_part_size); + + /* Step 7: parse dequantization indices */ + vp8_parse_dequantization_indices(pi); + + /* For key frames, both golden frame and altref frame are refreshed/replaced by the current reconstructed frame, by default */ + if (pi->frame_tag.frame_type == INTER_FRAME) + { + /* Step 8: parse golden frame and altref frame refresh flags */ + vp8_parse_gf_af_refresh_flags(pi); + } + + /* Step 9: update proability to decode DCT coef */ + pi->refresh_entropy = (uint8_t)vp8_decode_bool(bc, 128); + if (pi->refresh_entropy == 0) + { + memcpy(&(pi->LastFrameContext), &(pi->FrameContext), sizeof(FrameContextData)); + } + + /* Step 10: refresh last frame buffer */ + pi->refresh_lf = (pi->frame_tag.frame_type == KEY_FRAME) || (uint8_t)(vp8_decode_bool(bc, 128)); + + /* Step 11: read coef vp8_prob tree */ + vp8_parse_coef_probs_tree(pi); + + /* Step 12: read remaining frame header data */ + vp8_parse_remaining_frame_header_data(pi); + + /* Hold the current offset in the bitstream */ + pi->frame_data_offset += pi->bool_coder.pos; + + /* Get the frame header bits */ + pi->header_bits = pi->frame_data_offset * 8 - 16 - pi->bool_coder.count; + + pi->refresh_entropy_lf = pi->refresh_entropy; + + return ret; +} diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 882b081..b578ec2 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -45,4 +45,14 @@ LOCAL_CFLAGS += -DVBP_TRACE LOCAL_SHARED_LIBRARIES += liblog endif +MERRIFIELD_PRODUCT := \ + mrfl_vp \ + mrfl_hvp \ + mrfl_sle +ifneq ($(filter $(TARGET_PRODUCT),$(MERRIFIELD_PRODUCT)),) +LOCAL_SRC_FILES += vbp_vp8_parser.c +LOCAL_C_INCLUDES += $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/include +LOCAL_CFLAGS += -DUSE_HW_VP8 +endif + include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_vp8_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_vp8_parse.h new file mode 100644 index 0000000..6d3583f --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_vp8_parse.h @@ -0,0 +1,6 @@ +#ifndef VIDDEC_VP8_PARSE_H +#define VIDDEC_VP8_PARSE_H + +void viddec_vp8_get_ops(viddec_parser_ops_t *ops); + +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index ffeb332..53988ab 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -28,8 +28,9 @@ #include - - +#ifdef USE_HW_VP8 +#include +#endif #ifndef TRUE #define TRUE 1 @@ -319,6 +320,54 @@ typedef struct _vbp_data_vc1 vbp_picture_data_vc1* pic_data; } vbp_data_vc1; +#ifdef USE_HW_VP8 +typedef struct _vbp_codec_data_vp8 +{ + uint8 frame_type; + uint8 version_num; + int show_frame; + + uint32 frame_width; + uint32 frame_height; + + int refresh_alt_frame; + int refresh_golden_frame; + int refresh_last_frame; + + int golden_copied; + int altref_copied; +} vbp_codec_data_vp8; + +typedef struct _vbp_slice_data_vp8 +{ + uint8 *buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferBase slc_parms; /* pointer to slice parms */ +} vbp_slice_data_vp8; + +typedef struct _vbp_picture_data_vp8 +{ + VAPictureParameterBufferVP8* pic_parms; /* current parsed picture header */ + + uint32 num_slices; /* number of slices. always one for VP8 */ + vbp_slice_data_vp8 *slc_data; /* pointer to array of slice data */ +} vbp_picture_data_vp8; + +typedef struct _vbp_data_vp8 +{ + uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */ + vbp_codec_data_vp8 *codec_data; + + uint32 num_pictures; + + vbp_picture_data_vp8* pic_data; + + VAProbabilityDataBufferVP8* prob_data; + VAIQMatrixBufferVP8* IQ_matrix_buf; +} vbp_data_vp8; +#endif + enum _picture_type { VC1_PTYPE_I, @@ -347,9 +396,13 @@ enum _vbp_parser_type VBP_VC1, VBP_MPEG2, VBP_MPEG4, - VBP_H264 + VBP_H264, +#ifdef USE_HW_VP8 + VBP_VP8 +#endif }; + /* * open video bitstream parser to parse a specific media type. * @param parser_type: one of the types defined in #vbp_parser_type @@ -382,7 +435,7 @@ uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag * query parsing result. * @param hcontext: handle to VBP context. * @param data: pointer to hold a data blob that contains parsing result. - * Structure of data blob is determined by the media type. + * Structure of data blob is determined by the media type. * @return VBP_OK on success, anything else on failure. * */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index 42f9c96..83a38ac 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -32,6 +32,9 @@ #include "vbp_vc1_parser.h" #include "vbp_h264_parser.h" #include "vbp_mp42_parser.h" +#ifdef USE_HW_VP8 +#include "vbp_vp8_parser.h" +#endif /* buffer counter */ @@ -115,7 +118,15 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) parser_name = "libmixvbp_h264.so"; #endif break; - +#ifdef USE_HW_VP8 + case VBP_VP8: +#ifndef ANDROID + parser_name = "libmixvbp_vp8.so.0"; +#else + parser_name = "libmixvbp_vp8.so"; +#endif + break; +#endif default: WTRACE("Unsupported parser type!"); return VBP_TYPE; @@ -153,6 +164,9 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) SET_FUNC_POINTER(VBP_VC1, vc1); SET_FUNC_POINTER(VBP_MPEG4, mp42); SET_FUNC_POINTER(VBP_H264, h264); +#ifdef USE_HW_VP8 + SET_FUNC_POINTER(VBP_VP8, vp8); +#endif } /* set entry points for parser operations: @@ -248,7 +262,11 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) /* OK for VC-1, MPEG2 and MPEG4. */ if ((VBP_VC1 == pcontext->parser_type) || (VBP_MPEG2 == pcontext->parser_type) || - (VBP_MPEG4 == pcontext->parser_type)) + (VBP_MPEG4 == pcontext->parser_type) +#ifdef USE_HW_VP8 + || (VBP_VP8 == pcontext->parser_type) +#endif +) { pcontext->persist_mem = NULL; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c new file mode 100644 index 0000000..25aa85d --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c @@ -0,0 +1,540 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include + +#include "vp8.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_vp8_parser.h" + +uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vp8_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = NULL; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vp8_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vp8_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = NULL; + + /* entry point not needed */ + pcontext->parser_ops->is_frame_start = NULL; + + return VBP_OK; +} + +uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + vbp_data_vp8 *query_data = vbp_malloc_set0(vbp_data_vp8, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vp8, VP8_MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i = 0; + for (i = 0; i < VP8_MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVP8, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_vp8, VP8_MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + query_data->codec_data = vbp_malloc_set0(vbp_codec_data_vp8, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + query_data->prob_data = vbp_malloc_set0(VAProbabilityDataBufferVP8, 1); + if (NULL == query_data->prob_data) + { + goto cleanup; + } + + query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferVP8, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + pcontext->parser_private = NULL; + + return VBP_OK; + +cleanup: + vbp_free_query_data_vp8(pcontext); + + return VBP_MEM; +} + +uint32 vbp_free_query_data_vp8(vbp_context *pcontext) +{ + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + vbp_data_vp8 *query_data = (vbp_data_vp8 *)pcontext->query_data; + if (query_data->pic_data) + { + int i = 0; + for (i = 0; i < VP8_MAX_NUM_PICTURES; i++) + { + if (query_data->pic_data[i].pic_parms) + { + free(query_data->pic_data[i].pic_parms); + query_data->pic_data[i].pic_parms = NULL; + } + if (query_data->pic_data[i].slc_data) + { + free(query_data->pic_data[i].slc_data); + query_data->pic_data[i].slc_data = NULL; + } + } + free(query_data->pic_data); + query_data->pic_data = NULL; + } + + if (query_data->codec_data) + { + free(query_data->codec_data); + query_data->codec_data = NULL; + } + + if (query_data->prob_data) + { + free(query_data->prob_data); + query_data->prob_data = NULL; + } + + if (query_data->IQ_matrix_buf) + { + free(query_data->IQ_matrix_buf); + query_data->IQ_matrix_buf = NULL; + } + + free(query_data); + pcontext->query_data = NULL; + + return VBP_OK; +} + + +/** +* parse decoder configuration data +*/ +uint32 vbp_parse_init_data_vp8(vbp_context* pcontext) +{ + // could never be there + return VBP_OK; +} + +uint32 vbp_parse_start_code_vp8(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint8 *buf = cxt->parse_cubby.buf; + uint32 length = cxt->parse_cubby.size; + if (length < 3) + { + return VBP_DATA; + } + + // check whether it is a key frame + if ((length >= 10) && !(buf[0] & 0x01)) + { + uint8 *c = buf + 3; + + // check start code + if ((c[0] != 0x9d) || (c[1] != 0x01) || (c[2] != 0x2a)) + { + return VBP_PARM; + } + } + + // ugly behavior + cxt->list.num_items = 1; + + vbp_data_vp8 *query_data = (vbp_data_vp8*)pcontext->query_data; + query_data->num_pictures = 0; + + return VBP_OK; +} + +/** +* +* process parsing result after a NAL unit is parsed +* +*/ +uint32 vbp_process_parsing_result_vp8( vbp_context *pcontext, int i) +{ + vp8_viddec_parser *parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data; + switch (parser->info.frame_tag.frame_type) + { + case KEY_FRAME: + ITRACE("This is a key frame."); + parser->info.decoded_frame_number++; + break; + case INTER_FRAME: + ITRACE("This is an inter frame."); + parser->info.decoded_frame_number++; + break; + case SKIPPED_FRAME: + ITRACE("This is skipped frame. We have done nothing."); + break; + default: + WTRACE("Unknown frame type %d", parser->info.frame_tag.frame_type); + break; + } + + //ITRACE("Decoded frame ID = %d", parser->info.decoded_frame_number); + + return VBP_OK; +} + +static void vbp_add_quantization_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + vp8_Info *pi = &(parser->info); + VAIQMatrixBufferVP8 *IQ_buf = query_data->IQ_matrix_buf; + + int i = 0; + if (pi->Segmentation.Enabled) + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta) + { + IQ_buf->quantization_index[i][0] = pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i]; + } + else + { + int temp = pi->Quantization.Y1_AC + pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i]; + IQ_buf->quantization_index[i][0] = (temp >= 0) ? ((temp <= MAX_QINDEX) ? temp : MAX_QINDEX) : 0; + } + } + } + else + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + IQ_buf->quantization_index[i][0] = pi->Quantization.Y1_AC; + } + } + + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + IQ_buf->quantization_index[i][1] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y1_DC_Delta; + IQ_buf->quantization_index[i][2] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_DC_Delta; + IQ_buf->quantization_index[i][3] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_AC_Delta; + IQ_buf->quantization_index[i][4] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_DC_Delta; + IQ_buf->quantization_index[i][5] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_AC_Delta; + } +} + +static void vbp_add_probs_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + FrameContextData *fc = &(parser->info.FrameContext); + VAProbabilityDataBufferVP8 *prob_data = query_data->prob_data; + + /* DCT coefficients probability */ + int i, j, k, l; + for (i = 0; i < 4; i++) + { + for (j = 0; j < 8; j++) + { + for (k = 0; k < 3; k++) + { + for (l = 0; l < 11; l++) + { + prob_data->dct_coeff_probs[i][j][k][l] = fc->DCT_Coefficients[i][j][k][l]; + } + } + } + } +} + +static void vbp_set_codec_data_vp8(vp8_viddec_parser *parser, vbp_codec_data_vp8* codec_data) +{ + vp8_Info *pi = &(parser->info); + + codec_data->frame_type = pi->frame_tag.frame_type; + codec_data->version_num = pi->frame_tag.version; + codec_data->show_frame = pi->frame_tag.show_frame; + + //codec_data->frame_width = pi->width; + //codec_data->frame_height = pi->height; + codec_data->frame_width = ((pi->width + 15) / 16) * 16; + codec_data->frame_height = ((pi->height + 15) / 16) * 16; + + codec_data->refresh_alt_frame = pi->refresh_af; + codec_data->refresh_golden_frame = pi->refresh_gf; + codec_data->refresh_last_frame = pi->refresh_lf; + + codec_data->golden_copied = pi->golden_copied; + codec_data->altref_copied = pi->altref_copied; +} + +static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + vp8_Info *pi = &(parser->info); + query_data->num_pictures++; + + if (query_data->num_pictures > 1) + { + ETRACE("Num of pictures (%d) per sample buffer exceeds the limit %d.", query_data->num_pictures, VP8_MAX_NUM_PICTURES); + return VBP_DATA; + } + + int i = 0; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_data_index]); + VAPictureParameterBufferVP8 *pic_parms = pic_data->pic_parms; + + pic_parms->frame_width = pi->width; + pic_parms->frame_height = pi->height; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.key_frame = pi->frame_tag.frame_type; + pic_parms->pic_fields.bits.version = pi->frame_tag.version; + pic_parms->partition_size[0] = pi->frame_tag.first_part_size; + + /* Segmentation */ + pic_parms->pic_fields.bits.segmentation_enabled = pi->Segmentation.Enabled; + pic_parms->pic_fields.bits.update_mb_segmentation_map = pi->Segmentation.UpdateMap; + pic_parms->pic_fields.bits.update_segment_feature_data = pi->Segmentation.UpdateData; + memcpy(pic_parms->mb_segment_tree_probs, pi->Segmentation.TreeProbs, sizeof(unsigned char) * MB_FEATURE_TREE_PROBS); + + /* Loop filter data */ + pic_parms->pic_fields.bits.filter_type = pi->LoopFilter.Type; + pic_parms->pic_fields.bits.sharpness_level = pi->LoopFilter.Sharpness; + pic_parms->pic_fields.bits.loop_filter_adj_enable = pi->LoopFilter.DeltaEnabled; + pic_parms->pic_fields.bits.mode_ref_lf_delta_update = pi->LoopFilter.DeltaUpdate; + + int baseline_filter_level[MAX_MB_SEGMENTS]; + if (pi->Segmentation.Enabled) + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta) + { + baseline_filter_level[i] = pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i]; + } + else + { + baseline_filter_level[i] = pi->LoopFilter.Level + pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i]; + baseline_filter_level[i] = (baseline_filter_level[i] >= 0) ? ((baseline_filter_level[i] <= MAX_LOOP_FILTER) ? baseline_filter_level[i] : MAX_LOOP_FILTER) : 0; /* Clamp to valid range */ + } + } + } + else + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + baseline_filter_level[i] = pi->LoopFilter.Level; + } + } + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + pic_parms->loop_filter_level[i] = baseline_filter_level[i]; + } + if ((pic_parms->pic_fields.bits.version == 0) || (pic_parms->pic_fields.bits.version == 1)) + { + pic_parms->pic_fields.bits.loop_filter_disable = pic_parms->loop_filter_level[0] > 0 ? true : false; + } + memcpy(pic_parms->loop_filter_deltas_ref_frame, pi->LoopFilter.DeltasRef, sizeof(char) * MAX_REF_LF_DELTAS); + memcpy(pic_parms->loop_filter_deltas_mode, pi->LoopFilter.DeltasMode, sizeof(char) * MAX_MODE_LF_DELTAS); + + pic_parms->pic_fields.bits.sign_bias_golden = pi->sign_bias_golden; + pic_parms->pic_fields.bits.sign_bias_alternate = pi->sign_bias_alternate; + + pic_parms->pic_fields.bits.mb_no_coeff_skip = pi->mb_no_coeff_skip; + pic_parms->pic_fields.bits.mb_skip_coeff = pi->mb_skip_coeff; + + + /* Token Partitions */ + pic_parms->num_of_partitions = pi->partition_count; + for (i = 1; i < 9; i++) + { + pic_parms->partition_size[i] = pi->partition_size[i - 1]; + } + + pic_parms->prob_skip_false = pi->prob_skip_false; + pic_parms->prob_intra = pi->prob_intra; + pic_parms->prob_last = pi->prob_lf; + pic_parms->prob_gf = pi->prob_gf; + + FrameContextData *fc = &(parser->info.FrameContext); + memcpy(pic_parms->y_mode_probs, fc->Y_Mode_Prob, sizeof(unsigned char) * 4); + memcpy(pic_parms->uv_mode_probs, fc->UV_Mode_Prob, sizeof(unsigned char) * 3); + /* Motion vector context */ + for (i = 0; i < 2; i++) + { + memcpy(pic_parms->mv_probs[i], fc->MVContext[i], sizeof(unsigned char) * 19); + } + + /* Bool coder */ + pic_parms->bool_coder_ctx.range = pi->bool_coder.range; + pic_parms->bool_coder_ctx.value = pi->bool_coder.value; + pic_parms->bool_coder_ctx.count = pi->bool_coder.count; + + //pic_parms->current_picture = VA_INVALID_SURFACE; + pic_parms->last_ref_frame = VA_INVALID_SURFACE; + pic_parms->golden_ref_frame = VA_INVALID_SURFACE; + pic_parms->alt_ref_frame = VA_INVALID_SURFACE; + pic_parms->out_of_loop_frame = VA_INVALID_SURFACE; //Reserved for future use + + /* the offset to the first bit of MB from the first byte of slice data */ + pic_parms->macroblock_offset = pi->header_bits; + + /* specify the slice number */ + pic_data->num_slices = 0; + + return VBP_OK; +} + +static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + vp8_Info *pi = &(parser->info); + uint32_t pic_index = query_data->num_pictures - 1; + if (pic_index < 0) + { + ETRACE("Invalid picture data index."); + return VBP_DATA; + } + + vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_index]); + vbp_slice_data_vp8 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); + + slc_data->buffer_addr = pi->source; + slc_data->slice_offset = 0; + slc_data->slice_size = pi->source_sz; + + VASliceParameterBufferBase *slc_parms = &(slc_data->slc_parms); + /* number of bytes in the slice data buffer for this slice */ + slc_parms->slice_data_size = slc_data->slice_size; + + /* the offset to the first byte of slice data */ + slc_parms->slice_data_offset = 0; + + /* see VA_SLICE_DATA_FLAG_XXX definitions */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + pic_data->num_slices++; + if (pic_data->num_slices > VP8_MAX_NUM_SLICES) { + ETRACE("Number of slices (%d) per picture exceeds the limit (%d).", pic_data->num_slices, VP8_MAX_NUM_SLICES); + return VBP_DATA; + } + return VBP_OK; +} + +/* +* +* fill query data structure after sample buffer is parsed +* +*/ +uint32 vbp_populate_query_data_vp8(vbp_context *pcontext) +{ + int32_t error = VBP_OK; + + vbp_data_vp8 *query_data = NULL; + vp8_viddec_parser *parser = NULL; + + parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_vp8 *)pcontext->query_data; + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* Populate picture data */ + error = vbp_add_pic_data_vp8(parser, query_data); + + /* Populate slice data */ + if (error == VBP_OK) + { + error = vbp_add_slice_data_vp8(parser, query_data); + if (error != VBP_OK) + return error; + } + + /* Populate codec data */ + vbp_set_codec_data_vp8(parser, query_data->codec_data); + + /* Populate probability table */ + vbp_add_probs_data_vp8(parser, query_data); + + /* Populate quantization */ + vbp_add_quantization_data_vp8(parser, query_data); + + return VBP_OK; +} diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.h new file mode 100644 index 0000000..3b6407e --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.h @@ -0,0 +1,67 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_VP8_PARSER_H +#define VBP_VP8_PARSER_H + +/* + * setup parser's entry points + */ +uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext); + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_vp8(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_vp8(vbp_context *pcontext); + +/* + * parse start code. Only support lenght prefixed mode. Start + * code prefixed is not supported. + */ +uint32 vbp_parse_start_code_vp8(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_vp8(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_vp8(vbp_context *pcontext); + + + +#endif /*VBP_VP8_PARSER_H*/ diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 26fa709..5bde2ce 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -8,7 +8,6 @@ LOCAL_SRC_FILES := \ VideoDecoderWMV.cpp \ VideoDecoderMPEG4.cpp \ VideoDecoderAVC.cpp \ - VideoDecoderVP8.cpp \ VideoDecoderPAVC.cpp \ VideoDecoderAVCSecure.cpp \ VideoDecoderTrace.cpp @@ -45,4 +44,14 @@ LOCAL_COPY_HEADERS := \ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videodecoder +# Add source codes for Merrifield +MERRIFIELD_PRODUCT := \ + mrfl_vp \ + mrfl_hvp \ + mrfl_sle +ifneq ($(filter $(TARGET_PRODUCT),$(MERRIFIELD_PRODUCT)),) +LOCAL_SRC_FILES += VideoDecoderVP8.cpp +LOCAL_CFLAGS += -DUSE_HW_VP8 +endif + include $(BUILD_SHARED_LIBRARY) diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index 4e6b1b4..f990fa6 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -27,6 +27,9 @@ #include "VideoDecoderAVC.h" #include "VideoDecoderPAVC.h" #include "VideoDecoderAVCSecure.h" +#ifdef USE_HW_VP8 +#include "VideoDecoderVP8.h" +#endif #include "VideoDecoderHost.h" #include "VideoDecoderTrace.h" #include @@ -57,7 +60,15 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { } else if (strcasecmp(mimeType, "video/avc-secure") == 0) { VideoDecoderAVC *p = new VideoDecoderAVCSecure(mimeType); return (IVideoDecoder *)p; - } else { + } +#ifdef USE_HW_VP8 + else if (strcasecmp(mimeType, "video/vp8") == 0 || + strcasecmp(mimeType, "video/x-vnd.on2.vp8") == 0) { + VideoDecoderVP8 *p = new VideoDecoderVP8(mimeType); + return (IVideoDecoder *)p; + } +#endif + else { ETRACE("Unknown mime type: %s", mimeType); } return NULL; diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 0329c2a..bc6274c 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2012 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -27,31 +27,80 @@ #include VideoDecoderVP8::VideoDecoderVP8(const char *mimeType) - : VideoDecoderBase(mimeType, (_vbp_parser_type)VBP_INVALID) { + : VideoDecoderBase(mimeType, VBP_VP8) { + invalidateReferenceFrames(0); + invalidateReferenceFrames(1); } VideoDecoderVP8::~VideoDecoderVP8() { stop(); } +void VideoDecoderVP8::invalidateReferenceFrames(int toggle) { + ReferenceFrameBuffer *p = mRFBs[toggle]; + for (int i = 0; i < VP8_REF_SIZE; i++) { + p->index = (uint32_t) -1; + p->surfaceBuffer = NULL; + p++; + } +} + +void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) { + int32_t width = data->codec_data->frame_width; + int32_t height = data->codec_data->frame_height; + ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d", + mVideoFormatInfo.width, mVideoFormatInfo.height, width, height); + + if ((mVideoFormatInfo.width != width || + mVideoFormatInfo.height != height) && + width && height) { + mVideoFormatInfo.width = width; + mVideoFormatInfo.height = height; + mSizeChanged = true; + ITRACE("Video size is changed."); + } + + mVideoFormatInfo.valid = true; +} + +Decode_Status VideoDecoderVP8::startVA(vbp_data_vp8 *data) { + updateFormatInfo(data); + + VAProfile vaProfile = VAProfileVP8Version0_3; + if (data->codec_data->version_num > 3) { + return DECODE_PARSER_FAIL; + } + + return VideoDecoderBase::setupVA(VP8_SURFACE_NUMBER + VP8_REF_SIZE, vaProfile); +} + Decode_Status VideoDecoderVP8::start(VideoConfigBuffer *buffer) { Decode_Status status; status = VideoDecoderBase::start(buffer); CHECK_STATUS("VideoDecoderBase::start"); - // config VP8 software decoder if necessary - // TODO: update mVideoFormatInfo here + // We don't want base class to manage reference. + VideoDecoderBase::ManageReference(false); - status = VideoDecoderBase::setupVA( - VP8_SURFACE_NUMBER, - (VAProfile)VAProfileSoftwareDecoding); + if (buffer->data == NULL || buffer->size == 0) { + WTRACE("No config data to start VA."); + return DECODE_SUCCESS; + } + vbp_data_vp8 *data = NULL; + status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + + status = startVA(data); return status; } void VideoDecoderVP8::stop(void) { VideoDecoderBase::stop(); + + invalidateReferenceFrames(0); + invalidateReferenceFrames(1); } void VideoDecoderVP8::flush(void) { @@ -60,21 +109,73 @@ void VideoDecoderVP8::flush(void) { Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { Decode_Status status; + vbp_data_vp8 *data = NULL; + if (buffer == NULL) { + ETRACE("VideoDecodeBuffer is NULL."); + return DECODE_INVALID_DATA; + } - status = acquireSurfaceBuffer(); - CHECK_STATUS("acquireSurfaceBuffer"); + status = VideoDecoderBase::parseBuffer( + buffer->data, + buffer->size, + false, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); - // TODO: decode sample to mAcquiredBuffer->mappedAddr. - // make sure decoded output is in NV12 format. - // << add decoding codes here>> + if (!mVAStarted) { + status = startVA(data); + CHECK_STATUS("startVA"); + } + status = decodeFrame(buffer, data); + CHECK_STATUS("decodeFrame"); + if (mSizeChanged) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + return status; +} + +Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vp8 *data) { + Decode_Status status; + mCurrentPTS = buffer->timeStamp; + if (0 == data->num_pictures || NULL == data->pic_data) { + WTRACE("Number of pictures is 0."); + return DECODE_SUCCESS; + } + + if (data->codec_data->frame_type == VP8_SKIPPED_FRAME) { + // Do nothing for skip frame as the last frame will be rendered agian by natively + return DECODE_SUCCESS; + } + + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); // set referenceFrame to true if frame decoded is I/P frame, false otherwise. - mAcquiredBuffer->referenceFrame = true; + int frameType = data->codec_data->frame_type; + mAcquiredBuffer->referenceFrame = (frameType == VP8_KEY_FRAME || frameType == VP8_INTER_FRAME); // assume it is frame picture. mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp; mAcquiredBuffer->renderBuffer.flag = 0; + if (buffer->flag & WANT_DECODE_ONLY) { + mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY; + } + + + // Here data->num_pictures is always equal to 1 + for (int index = 0; index < data->num_pictures; index++) { + status = decodePicture(data, index); + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + return status; + } + } + + if (frameType != VP8_SKIPPED_FRAME) { + updateReferenceFrames(data); + } // if sample is successfully decoded, call outputSurfaceBuffer(); otherwise // call releaseSurfacebuffer(); @@ -82,4 +183,216 @@ Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { return status; } +Decode_Status VideoDecoderVP8::decodePicture(vbp_data_vp8 *data, int32_t picIndex) { + VAStatus vaStatus = VA_STATUS_SUCCESS; + Decode_Status status; + uint32_t bufferIDCount = 0; + VABufferID bufferIDs[5]; + + vbp_picture_data_vp8 *picData = &(data->pic_data[picIndex]); + VAPictureParameterBufferVP8 *picParams = picData->pic_parms; + + status = setReference(picParams, picIndex); + CHECK_STATUS("setReference"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding. + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferVP8), + 1, + picParams, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProbabilityBufferType, + sizeof(VAProbabilityDataBufferVP8), + 1, + data->prob_data, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateProbabilityBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferVP8), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + + /* Here picData->num_slices is always equal to 1 */ + for (uint32_t i = 0; i < picData->num_slices; i++) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferBase), + 1, + &(picData->slc_data[i].slc_parms), + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + picData->slc_data[i].slice_size, //size + 1, //num_elements + picData->slc_data[i].buffer_addr + picData->slc_data[i].slice_offset, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + } + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + vaStatus = vaEndPicture(mVADisplay, mVAContext); + mDecodingFrame = false; + CHECK_VA_STATUS("vaEndPicture"); + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderVP8::setReference(VAPictureParameterBufferVP8 *picParam, int32_t picIndex) { + int frameType = picParam->pic_fields.bits.key_frame; + switch (frameType) { + case VP8_KEY_FRAME: + picParam->last_ref_frame = VA_INVALID_SURFACE; + picParam->alt_ref_frame = VA_INVALID_SURFACE; + picParam->golden_ref_frame = VA_INVALID_SURFACE; + break; + case VP8_INTER_FRAME: + if (mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer == NULL || + mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer == NULL || + mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer == NULL) { + return DECODE_NO_REFERENCE; + } + //mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer = mLastReference; + picParam->last_ref_frame = mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer->renderBuffer.surface; + picParam->alt_ref_frame = mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer->renderBuffer.surface; + picParam->golden_ref_frame = mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer->renderBuffer.surface; + break; + case VP8_SKIPPED_FRAME: + // will never happen here + break; + default: + return DECODE_PARSER_FAIL; + } + + return DECODE_SUCCESS; +} + +void VideoDecoderVP8::updateReferenceFrames(vbp_data_vp8 *data) { + /* Refresh last frame reference buffer using the currently reconstructed frame */ + refreshLastReference(data); + + /* Refresh golden frame reference buffer using the currently reconstructed frame */ + refreshGoldenReference(data); + + /* Refresh alternative frame reference buffer using the currently reconstructed frame */ + refreshAltReference(data); +} + +void VideoDecoderVP8::refreshLastReference(vbp_data_vp8 *data) { + /* Save previous last reference */ + mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer = mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer; + mRFBs[1][VP8_LAST_REF_PIC].index = mRFBs[0][VP8_LAST_REF_PIC].index; + + /* For key frame, this is always true */ + if (data->codec_data->refresh_last_frame) { + mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer = mAcquiredBuffer; + mRFBs[0][VP8_LAST_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface; + + if (mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer) { + mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer->asReferernce = false; + } + } + + if (mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer) { + mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer->asReferernce = true; + } +} + +void VideoDecoderVP8::refreshGoldenReference(vbp_data_vp8 *data) { + /* Save previous golden reference */ + mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer = mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer; + mRFBs[1][VP8_GOLDEN_REF_PIC].index = mRFBs[0][VP8_GOLDEN_REF_PIC].index; + + if (data->codec_data->golden_copied != BufferCopied_NoneToGolden) { + if (data->codec_data->golden_copied == BufferCopied_LastToGolden) { + /* LastFrame is copied to GoldenFrame */ + mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer = mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer; + mRFBs[0][VP8_GOLDEN_REF_PIC].index = mRFBs[1][VP8_LAST_REF_PIC].index; + } else if (data->codec_data->golden_copied == BufferCopied_AltRefToGolden) { + /* AltRefFrame is copied to GoldenFrame */ + mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer = mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer; + mRFBs[0][VP8_GOLDEN_REF_PIC].index = mRFBs[0][VP8_ALT_REF_PIC].index; + } + } + + /* For key frame, this is always true */ + if (data->codec_data->refresh_golden_frame) { + mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer = mAcquiredBuffer; + mRFBs[0][VP8_GOLDEN_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface; + + if (mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer) { + mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer->asReferernce = false; + } + } + + if (mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer) { + mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer->asReferernce = true; + } +} + +void VideoDecoderVP8::refreshAltReference(vbp_data_vp8 *data) { + /* Save previous alternative reference */ + mRFBs[1][VP8_ALT_REF_PIC].surfaceBuffer = mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer; + mRFBs[1][VP8_ALT_REF_PIC].index = mRFBs[0][VP8_ALT_REF_PIC].index; + + if (data->codec_data->altref_copied != BufferCopied_NoneToAltRef) { + if (data->codec_data->altref_copied == BufferCopied_LastToAltRef) { + /* LastFrame is copied to AltRefFrame */ + mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer = mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer; + mRFBs[0][VP8_ALT_REF_PIC].index = mRFBs[1][VP8_LAST_REF_PIC].index; + } else if (data->codec_data->altref_copied == BufferCopied_GoldenToAltRef) { + /* GoldenFrame is copied to AltRefFrame */ + mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer = mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer; + mRFBs[0][VP8_ALT_REF_PIC].index = mRFBs[1][VP8_GOLDEN_REF_PIC].index; + } + } + + /* For key frame, this is always true */ + if (data->codec_data->refresh_alt_frame) { + mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer = mAcquiredBuffer; + mRFBs[0][VP8_ALT_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface; + + if (mRFBs[1][VP8_ALT_REF_PIC].surfaceBuffer) { + mRFBs[1][VP8_ALT_REF_PIC].surfaceBuffer->asReferernce = false; + } + } + + if (mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer) { + mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer->asReferernce = true; + } +} diff --git a/videodecoder/VideoDecoderVP8.h b/videodecoder/VideoDecoderVP8.h index 676bd1a..f3b64e9 100644 --- a/videodecoder/VideoDecoderVP8.h +++ b/videodecoder/VideoDecoderVP8.h @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2012 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -38,11 +38,56 @@ public: virtual void flush(void); virtual Decode_Status decode(VideoDecodeBuffer *buffer); +private: + Decode_Status decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vp8 *data); + Decode_Status decodePicture(vbp_data_vp8 *data, int32_t picIndex); + Decode_Status setReference(VAPictureParameterBufferVP8 *picParam, int32_t picIndex); + Decode_Status startVA(vbp_data_vp8 *data); + void updateReferenceFrames(vbp_data_vp8 *data); + void refreshLastReference(vbp_data_vp8 *data); + void refreshGoldenReference(vbp_data_vp8 *data); + void refreshAltReference(vbp_data_vp8 *data); + void updateFormatInfo(vbp_data_vp8 *data); + void invalidateReferenceFrames(int toggle); private: enum { - VP8_SURFACE_NUMBER = 10, + VP8_SURFACE_NUMBER = 9, + VP8_REF_SIZE = 3, + }; + + enum { + VP8_KEY_FRAME = 0, + VP8_INTER_FRAME, + VP8_SKIPPED_FRAME, + }; + + enum { + VP8_LAST_REF_PIC = 0, + VP8_GOLDEN_REF_PIC, + VP8_ALT_REF_PIC, + }; + + enum { + BufferCopied_NoneToGolden = 0, + BufferCopied_LastToGolden = 1, + BufferCopied_AltRefToGolden = 2 + }; + + enum { + BufferCopied_NoneToAltRef = 0, + BufferCopied_LastToAltRef = 1, + BufferCopied_GoldenToAltRef = 2 + }; + + struct ReferenceFrameBuffer { + VideoSurfaceBuffer *surfaceBuffer; + int32_t index; }; + + //[2] : [0 for current each reference frame, 1 for the previous each reference frame] + //[VP8_REF_SIZE] : [0 for last ref pic, 1 for golden ref pic, 2 for alt ref pic] + ReferenceFrameBuffer mRFBs[2][VP8_REF_SIZE]; }; -- cgit v1.2.3 From 7322c52f2b90f40fdb990e0e1b9956dd95240f30 Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Wed, 7 Nov 2012 10:08:18 +0800 Subject: [PORT FROM MAIN] VideoDecoderAVC: Flush DPB in VideoDecoderBase::flush() BZ: 61843 Original libmixvbp_h264 didn't flush DPB when VideoDecoderBase::flush() was called. For open-GOP clips who has non-IDR keyframes, a B frame refering to both the keyframe and the previous GOP may get decode error. Change-Id: I5ee2f8b091ac74ccdbb276f26145322bc72415e0 Signed-off-by: Cheng Yao Reviewed-on: http://android.intel.com:8080/76397 Tested-by: Tong, BoX Reviewed-by: Tong, BoX Reviewed-by: cactus Tested-by: cactus --- .../fw/codecs/h264/parser/viddec_h264_parse.c | 32 ++++++++++++++++++++++ .../fw/parser/include/viddec_parser_ops.h | 3 ++ mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 10 ++++++- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 6 +++- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 9 ++++-- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 5 +++- videodecoder/VideoDecoderBase.cpp | 7 ++++- 7 files changed, 66 insertions(+), 6 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index 2694f7d..473c64e 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -562,6 +562,37 @@ static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; } +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +#ifdef VBP +void viddec_h264_flush(void *parent, void *ctxt) +#else +static void viddec_h264_flush(void *parent, void *ctxt) +#endif +{ + int i; + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + /* flush the dpb and output all */ + h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames); + + /* reset the dpb to the initial state, avoid parser store + wrong data to dpb in next slice parsing */ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + for (i = 0; i < NUM_DPB_FRAME_STORES; i++) + { + p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + p_dpb->used_size = 0; + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + + return; +} + #ifndef VBP void viddec_h264_get_ops(viddec_parser_ops_t *ops) { @@ -571,6 +602,7 @@ void viddec_h264_get_ops(viddec_parser_ops_t *ops) ops->get_cxt_size = viddec_h264_get_context_size; ops->is_wkld_done = viddec_h264_wkld_done; ops->is_frame_start = viddec_h264_is_frame_start; + ops->flush = viddec_h264_flush; return; } #endif diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h index 608c0e7..561b179 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h @@ -25,6 +25,8 @@ typedef uint32_t (*fn_is_wkld_done)(void *parent, void *ctxt, uint32_t next_s typedef uint32_t (*fn_is_frame_start)(void *ctxt); typedef uint32_t (*fn_gen_contrib_tags)(void *parent, uint32_t ignore_partial); typedef uint32_t (*fn_gen_assoc_tags)(void *parent); +typedef void (*fn_flush_parser) (void *parent, void *ctxt); + typedef struct { @@ -36,6 +38,7 @@ typedef struct fn_is_frame_start is_frame_start; fn_gen_contrib_tags gen_contrib_tags; fn_gen_assoc_tags gen_assoc_tags; + fn_flush_parser flush; } viddec_parser_ops_t; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 8592e64..c1ccef7 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -187,6 +187,14 @@ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) return VBP_LOAD; } #endif + + pcontext->parser_ops->flush = dlsym(pcontext->fd_parser, "viddec_h264_flush");; + if (NULL == pcontext->parser_ops->flush) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + /* entry point not needed */ pcontext->parser_ops->is_frame_start = NULL; return VBP_OK; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index dfa536b..5e4c887 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -137,6 +137,10 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) return VBP_LOAD; } #endif + + /* entry point not needed */ + pcontext->parser_ops->flush = NULL; + return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index 83a38ac..3983387 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -571,11 +571,16 @@ uint32 vbp_utils_query(vbp_context *pcontext, void **data) /** * - * flush parsing buffer. Currently it is no op. + * flush parsing buffer. Currently always succeed. * */ uint32 vbp_utils_flush(vbp_context *pcontext) { + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + if (ops->flush != NULL) { + ops->flush((void *)cxt, (void *)&(cxt->codec_data[0])); + } return VBP_OK; } diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 3ab3467..751227e 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009 Intel Corporation. All rights reserved. +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -120,6 +120,9 @@ uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) return VBP_LOAD; } + /* entry point not needed */ + pcontext->parser_ops->flush = NULL; + return VBP_OK; } diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index c89b9b7..d5a47dd 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* Copyright (c) 2009-2012 Intel Corporation. All rights reserved. * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -200,6 +200,11 @@ void VideoDecoderBase::flush(void) { mOutputTail = NULL; mDecodingFrame = false; + // flush vbp parser + if (mParserHandle && (vbp_flush(mParserHandle) != VBP_OK)) { + WTRACE("Failed to flush parser. Continue"); + } + // initialize surface buffer without resetting mapped/raw data initSurfaceBuffer(false); -- cgit v1.2.3 From 4d4733794251eeec6f80ebed3fff1dc62c6db7de Mon Sep 17 00:00:00 2001 From: ywan171 Date: Wed, 14 Nov 2012 22:01:29 +0800 Subject: [PORT FROM MAIN] VideoDecoderAVC:adjust stpos to not cover next item's SC BZ: 67980 adjust stpos to not cover next item's start code,using phase is not proper for phase is alway 4 if parse_sc return find SC.if start code is "0x00,0x00,0x01" error will happen. adjust stpos due to the least SC lengh 3. Change-Id: Ia10b7eecd670fa975245df5edc4fcbc251d62c8f Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/76398 Tested-by: Tong, BoX Reviewed-by: Tong, BoX Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index c1ccef7..3f6400d 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -1472,6 +1472,9 @@ static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size) * instead, it is comprised of size of NAL unit and payload * of NAL unit. See spec 15 (Sample format) */ + +/* Start code prefix is 001 which is 3 bytes. */ +#define H264_SC_SIZE 3 uint32 vbp_parse_start_code_h264(vbp_context *pcontext) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; @@ -1586,7 +1589,7 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) { cxt->list.data[cxt->list.num_items].stpos = cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; - cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - cubby.phase; /* offset before start code */ + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE; } cubby.phase = 0; -- cgit v1.2.3 From 3726445f96e1959c3efb1144a18e281d6de794e1 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 15 Nov 2012 19:01:35 +0800 Subject: [PORT FROM MAIN] Correct the build options for vavideodecode BZ: 65952 Correct the build option mistake in vavideodecode/Android.mk. Change-Id: Icecda8d6a7b81b9bc85142bd61c7296a5d7f4105 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/76403 Tested-by: Tong, BoX Reviewed-by: Tong, BoX Reviewed-by: cactus Tested-by: cactus --- frameworks/vavideodecoder/Android.mk | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/frameworks/vavideodecoder/Android.mk b/frameworks/vavideodecoder/Android.mk index 1a9faaf..a352e69 100644 --- a/frameworks/vavideodecoder/Android.mk +++ b/frameworks/vavideodecoder/Android.mk @@ -1,4 +1,4 @@ -ifeq ($(strip $(USE_INTEL_ASF_EXTRACTOR)),true) +ifeq ($(strip $(USE_INTEL_VA)),true) LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) @@ -9,7 +9,6 @@ LOCAL_COPY_HEADERS := VAVideoDecoder.h LOCAL_SRC_FILES := \ VAVideoDecoder.cpp \ -#LOCAL_SHARED_LIBRARIES += libasfparser LOCAL_C_INCLUDES := \ $(TOP)/frameworks/av/media/libstagefright/include \ -- cgit v1.2.3 From 307bdf6605a0f340a8b1b6fe38c421b6be3b9d6e Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 19 Nov 2012 21:16:25 +0800 Subject: [PORT FROM MAIN] Avoid mediaserver crash when the sample size in stsz is invalid BZ: 68835 When the sample size in the stsz table is invalid (3GB for example), it will cause the mediaserver crash. A threshold is used here to avoid mediaserver allocate too big memory (like 3GB). The threshold is set as the size limit of the Intel decoder's input port. Change-Id: Ic3126c17775da385f2fd8aa483d1c208e7fae908 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/76404 Tested-by: Tong, BoX Reviewed-by: Tong, BoX Reviewed-by: cactus Tested-by: cactus --- frameworks/vavideodecoder/VAVideoDecoder.cpp | 10 ++++++++++ frameworks/vavideodecoder/VAVideoDecoder.h | 5 +++++ 2 files changed, 15 insertions(+) diff --git a/frameworks/vavideodecoder/VAVideoDecoder.cpp b/frameworks/vavideodecoder/VAVideoDecoder.cpp index 96f37d8..165b012 100644 --- a/frameworks/vavideodecoder/VAVideoDecoder.cpp +++ b/frameworks/vavideodecoder/VAVideoDecoder.cpp @@ -128,6 +128,16 @@ status_t VAVideoDecoder::start(MetaData *params) { LOGW("No configuration data found!"); } + // A threshold is used here to avoid mediaserver allocate too big + // memory (like 3GB) and crash in MPEG4Source::start. The + // threshold is set to be input port size limit for Intel decoders. + int32_t max_size; + if (meta->findInt32(kKeyMaxInputSize, &max_size)) { + if (max_size > MAXINPUTSIZE || max_size < 0) { + LOGE("Invalid kKeyMaxInputSize!"); + return ERROR_MALFORMED; + } + } configBuffer.flag |= WANT_RAW_OUTPUT; mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); diff --git a/frameworks/vavideodecoder/VAVideoDecoder.h b/frameworks/vavideodecoder/VAVideoDecoder.h index aa16940..5d66ec3 100644 --- a/frameworks/vavideodecoder/VAVideoDecoder.h +++ b/frameworks/vavideodecoder/VAVideoDecoder.h @@ -68,6 +68,11 @@ private: enum { NUM_OF_MEDIA_BUFFER = 20, }; + // The maximum input size is set to be size of one 1080P 4:4:4 image. + // It is calculated as 1920x1080x3 = 6220800 bytes. + enum { + MAXINPUTSIZE = 6220800, + }; sp mSource; bool mStarted; bool mRawOutput; -- cgit v1.2.3 From 3463f0fb46a280b299f43eb53c80aa4fed6d5133 Mon Sep 17 00:00:00 2001 From: Elaine Wang Date: Mon, 19 Nov 2012 15:50:37 +0800 Subject: [PORT FROM MAIN]Vidoe encode: Support none-cache user pointermainline BZ: 68817 Cached memory consume more power in video recording. If libva support none-cache user pointer, camera can use ash to allocate none-cache memory(ashmemory) as share buffer. Change-Id: I719edea3789d99a6bec843b07318231176fa1e23 Signed-off-by: Elaine Wang Reviewed-on: http://android.intel.com:8080/76773 Tested-by: Tong, BoX Reviewed-by: Tong, BoX Reviewed-by: cactus Tested-by: cactus --- videoencoder/IntelMetadataBuffer.h | 1 + videoencoder/VideoEncoderBase.cpp | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index e133c57..6d8f978 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -44,6 +44,7 @@ typedef enum { MEM_MODE_GFXHANDLE = 32, MEM_MODE_KBUFHANDLE = 64, MEM_MODE_ION = 128, + MEM_MODE_NONECACHE_USRPTR = 256, }MemMode; typedef struct { diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 819adfa..d0e6d66 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1684,7 +1684,10 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; vaSurfaceAttrib.buffers[0] = map->value; vaSurfaceAttrib.pixel_format = map->vinfo.format; - vaSurfaceAttrib.type = VAExternalMemoryUserPointer; + if (map->vinfo.mode == MEM_MODE_NONECACHE_USRPTR) + vaSurfaceAttrib.type = VAExternalMemoryNoneCacheUserPointer; + else + vaSurfaceAttrib.type = VAExternalMemoryUserPointer; vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, map->vinfo.width, map->vinfo.height, VA_RT_FORMAT_YUV420, @@ -1721,6 +1724,7 @@ LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, va status = surfaceMappingForKbufHandle(map); break; case MEM_MODE_MALLOC: + case MEM_MODE_NONECACHE_USRPTR: status = surfaceMappingForMalloc(map); break; case MEM_MODE_ION: -- cgit v1.2.3 From 6bb09695d9182a07fecee9a953ca19be413d1874 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Mon, 12 Nov 2012 15:35:39 +0800 Subject: [PORT FROM MAIN] Movie studio: enhance the HE-AAC profile support BZ: 59776 It's an android original defect, enhance the HE-AAC profile support in Movie studio. Change-Id: I5ae9e51811ca4f41f20f34d13bad33e0bf9279f7 Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/76962 Tested-by: Tong, BoX Reviewed-by: Tong, BoX Reviewed-by: cactus Tested-by: cactus --- .../stagefrightshells/VideoEditorAudioDecoder.cpp | 76 ++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp index 2c5510d..2e8c5d1 100644 --- a/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp +++ b/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp @@ -349,6 +349,7 @@ M4OSA_ERR VideoEditorAudioDecoder_parse_AAC_DSI(M4OSA_Int8* pDSI, M4OSA_ERR err = M4NO_ERROR; M4OSA_UInt32 offset = 0; M4OSA_Int32 result = 0; + M4OSA_Int32 extensionAudioObjectType = 0; ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI begin"); @@ -386,6 +387,11 @@ M4OSA_ERR VideoEditorAudioDecoder_parse_AAC_DSI(M4OSA_Int8* pDSI, // Get the frequency index err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + if (result == 0x0f) { + // Get the frequency index again + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 24, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + } VIDEOEDITOR_CHECK((0 <= result) && (FREQ_TABLE_SIZE > result), M4ERR_PARAMETER); pProperties->aSampFreq = AD_AAC_FREQ_TABLE[result]; @@ -396,6 +402,76 @@ M4OSA_ERR VideoEditorAudioDecoder_parse_AAC_DSI(M4OSA_Int8* pDSI, VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); pProperties->aNumChan = (M4OSA_UInt32)result; + if (pProperties->aAudioObjectType == 5) { + extensionAudioObjectType = pProperties->aAudioObjectType; + // Get extension sampling frequency index + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + if (result == 0x0f) { + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 24, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + } + VIDEOEDITOR_CHECK((0 <= result) && (FREQ_TABLE_SIZE > result), + M4ERR_PARAMETER); + pProperties->aExtensionSampFreq = AD_AAC_FREQ_TABLE[result]; + // Get the object type again + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 5, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + pProperties->aAudioObjectType = (M4OSA_Int32)result; + } + + // It's for implicit signal the presence of SBR data with AAC-LC audio object type(AOT = 2) + if (pProperties->aAudioObjectType == 2) { /* parseGASpecificConfig begin*/ + // Get frame length flag + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + // Get depends on core coder + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + if (result) { + // Get core coder delay + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + } + // Get extension flag + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + if (result) { + // Get extension flag3 + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + } + }/* parseGASpecificConfig end*/ + + if (extensionAudioObjectType != 5 && (dsiSize*8 - offset) >= 16) { + // get sync extension type + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 11, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + if (result == 0x2b7) { + ALOGV("found syncExtension"); + // Get extension Audio Object Type + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 5, &extensionAudioObjectType, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + // get SBR present flag + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + pProperties->aSBRPresent = result; + if (result == 1) { + // Get extension sampling frequency index + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + if (result == 0x0f) { + // Get extension sampling frequency index again + err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 24, &result, &offset); + VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); + } + VIDEOEDITOR_CHECK((0 <= result) && (FREQ_TABLE_SIZE > result), + M4ERR_PARAMETER); + pProperties->aExtensionSampFreq = AD_AAC_FREQ_TABLE[result]; + } + } + } + // Set the max PCM samples per channel pProperties->aMaxPCMSamplesPerCh = (pProperties->aSBRPresent) ? 2048 : 1024; -- cgit v1.2.3 From e0453f81327ca7f0ce0a9e1e298140c838d36876 Mon Sep 17 00:00:00 2001 From: Guilhem IMBERTON Date: Wed, 28 Nov 2012 17:00:22 +0100 Subject: BUILD: Allow customers to use their own product/device name BZ: 70731 Use dedicated variable instead of TARGET_PRODUCT/TARGET_DEVICE where needed. Remove product name from kernel/module build scripts. All out-of-tree kernel modules use same build script. Change-Id: I5c0604944663d7408bd95b1692dad29d9425ac73 Signed-off-by: Guilhem IMBERTON Reviewed-on: http://android.intel.com:8080/79978 --- mix_vbp/Android.mk | 4 ++-- mix_vbp/viddec_fw/fw/parser/Android.mk | 4 ++-- videodecoder/Android.mk | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index 3899789..3df2e0a 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -9,10 +9,10 @@ include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/parser/Android include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser/Android.mk # Add source codes for Merrifield -MERRIFIELD_PRODUCT := \ +MERRIFIELD_DEVICE := \ mrfl_vp \ mrfl_hvp \ mrfl_sle -ifneq ($(filter $(TARGET_PRODUCT),$(MERRIFIELD_PRODUCT)),) +ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/parser/Android.mk endif diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index b578ec2..6e403da 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -45,11 +45,11 @@ LOCAL_CFLAGS += -DVBP_TRACE LOCAL_SHARED_LIBRARIES += liblog endif -MERRIFIELD_PRODUCT := \ +MERRIFIELD_DEVICE := \ mrfl_vp \ mrfl_hvp \ mrfl_sle -ifneq ($(filter $(TARGET_PRODUCT),$(MERRIFIELD_PRODUCT)),) +ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) LOCAL_SRC_FILES += vbp_vp8_parser.c LOCAL_C_INCLUDES += $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/include LOCAL_CFLAGS += -DUSE_HW_VP8 diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 5bde2ce..9c335f3 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -45,11 +45,11 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videodecoder # Add source codes for Merrifield -MERRIFIELD_PRODUCT := \ +MERRIFIELD_DEVICE := \ mrfl_vp \ mrfl_hvp \ mrfl_sle -ifneq ($(filter $(TARGET_PRODUCT),$(MERRIFIELD_PRODUCT)),) +ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) LOCAL_SRC_FILES += VideoDecoderVP8.cpp LOCAL_CFLAGS += -DUSE_HW_VP8 endif -- cgit v1.2.3 From 81b888ea6f59f99ef8628e222e75584ffa5f674f Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 6 Dec 2012 05:19:56 +0800 Subject: Move video framework enhancement codes to libmedia_utils BZ: 72915 Move video framework enhancement codes from libmix to libmedia_utils folder. Change-Id: I022033be9636b3ff2a0855309bd7337edd53f384 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/79222 Reviewed-by: Tang, Richard Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- Android.mk | 4 - frameworks/asf_extractor/Android.mk | 31 - frameworks/asf_extractor/AsfExtractor.cpp | 759 ----- frameworks/asf_extractor/AsfExtractor.h | 142 - frameworks/asf_extractor/MediaBufferPool.cpp | 125 - frameworks/asf_extractor/MediaBufferPool.h | 75 - frameworks/asf_extractor/MetaDataExt.h | 73 - frameworks/libI420colorconvert/Android.mk | 23 - frameworks/libI420colorconvert/ColorConvert.cpp | 146 - frameworks/vavideodecoder/Android.mk | 26 - frameworks/vavideodecoder/VAVideoDecoder.cpp | 370 -- frameworks/vavideodecoder/VAVideoDecoder.h | 90 - frameworks/videoedit/Android.mk | 1 - frameworks/videoedit/lvpp/Android.mk | 55 - frameworks/videoedit/lvpp/VideoEditorToolsNV12.c | 1610 --------- frameworks/videoedit/lvpp/VideoEditorToolsNV12.h | 71 - frameworks/videoedit/stagefrightshells/Android.mk | 75 - .../IntelVideoEditorAVCEncoder.cpp | 517 --- .../stagefrightshells/IntelVideoEditorAVCEncoder.h | 85 - .../IntelVideoEditorEncoderSource.cpp | 279 -- .../IntelVideoEditorEncoderSource.h | 93 - .../IntelVideoEditorH263Encoder.cpp | 426 --- .../IntelVideoEditorH263Encoder.h | 84 - .../stagefrightshells/IntelVideoEditorUtils.cpp | 531 --- .../stagefrightshells/IntelVideoEditorUtils.h | 125 - .../stagefrightshells/MediaBufferPuller.cpp | 201 -- .../stagefrightshells/MediaBufferPuller.h | 110 - .../stagefrightshells/VideoEditor3gpReader.cpp | 2069 ------------ .../stagefrightshells/VideoEditorAudioDecoder.cpp | 1087 ------ .../stagefrightshells/VideoEditorAudioEncoder.cpp | 777 ----- .../stagefrightshells/VideoEditorBuffer.cpp | 331 -- .../stagefrightshells/VideoEditorBuffer.h | 182 - .../stagefrightshells/VideoEditorMp3Reader.cpp | 823 ----- .../stagefrightshells/VideoEditorVideoDecoder.cpp | 1810 ---------- .../stagefrightshells/VideoEditorVideoEncoder.cpp | 1284 ------- frameworks/videoedit/vss/Android.mk | 67 - frameworks/videoedit/vss/EditVideo_NV12.h | 59 - frameworks/videoedit/vss/M4AIR_API_NV12.c | 1201 ------- frameworks/videoedit/vss/M4AIR_API_NV12.h | 132 - frameworks/videoedit/vss/M4MCS_NV12.h | 42 - .../videoedit/vss/M4MCS_VideoPreProcessing_NV12.c | 416 --- .../videoedit/vss/M4VSS3GPP_EditVideo_NV12.c | 660 ---- frameworks/videoedit/vss/M4xVSS_NV12.h | 83 - frameworks/videoedit/vss/M4xVSS_internal_NV12.c | 3533 -------------------- 44 files changed, 20683 deletions(-) delete mode 100644 frameworks/asf_extractor/Android.mk delete mode 100644 frameworks/asf_extractor/AsfExtractor.cpp delete mode 100644 frameworks/asf_extractor/AsfExtractor.h delete mode 100644 frameworks/asf_extractor/MediaBufferPool.cpp delete mode 100644 frameworks/asf_extractor/MediaBufferPool.h delete mode 100644 frameworks/asf_extractor/MetaDataExt.h delete mode 100644 frameworks/libI420colorconvert/Android.mk delete mode 100644 frameworks/libI420colorconvert/ColorConvert.cpp delete mode 100644 frameworks/vavideodecoder/Android.mk delete mode 100644 frameworks/vavideodecoder/VAVideoDecoder.cpp delete mode 100644 frameworks/vavideodecoder/VAVideoDecoder.h delete mode 100644 frameworks/videoedit/Android.mk delete mode 100644 frameworks/videoedit/lvpp/Android.mk delete mode 100644 frameworks/videoedit/lvpp/VideoEditorToolsNV12.c delete mode 100644 frameworks/videoedit/lvpp/VideoEditorToolsNV12.h delete mode 100644 frameworks/videoedit/stagefrightshells/Android.mk delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h delete mode 100644 frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/MediaBufferPuller.h delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp delete mode 100644 frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp delete mode 100644 frameworks/videoedit/vss/Android.mk delete mode 100644 frameworks/videoedit/vss/EditVideo_NV12.h delete mode 100644 frameworks/videoedit/vss/M4AIR_API_NV12.c delete mode 100644 frameworks/videoedit/vss/M4AIR_API_NV12.h delete mode 100644 frameworks/videoedit/vss/M4MCS_NV12.h delete mode 100644 frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c delete mode 100644 frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c delete mode 100644 frameworks/videoedit/vss/M4xVSS_NV12.h delete mode 100644 frameworks/videoedit/vss/M4xVSS_internal_NV12.c diff --git a/Android.mk b/Android.mk index 20651e9..2948f35 100644 --- a/Android.mk +++ b/Android.mk @@ -8,9 +8,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/asf_extractor/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/vavideodecoder/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/libI420colorconvert/Android.mk -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/frameworks/videoedit/Android.mk endif diff --git a/frameworks/asf_extractor/Android.mk b/frameworks/asf_extractor/Android.mk deleted file mode 100644 index 78afb17..0000000 --- a/frameworks/asf_extractor/Android.mk +++ /dev/null @@ -1,31 +0,0 @@ -ifeq ($(strip $(USE_INTEL_ASF_EXTRACTOR)),true) - -LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) - - -LOCAL_SRC_FILES := \ - AsfExtractor.cpp \ - MediaBufferPool.cpp - -LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) \ - $(TARGET_OUT_HEADERS)/libmix_asfparser \ - $(TOP)/frameworks/av/media/libstagefright/include \ - $(TOP)/frameworks/native/include/media/openmax - -LOCAL_COPY_HEADERS_TO := libmix_asf_extractor - -LOCAL_COPY_HEADERS := \ - AsfExtractor.h \ - MetaDataExt.h \ - MediaBufferPool.h - -LOCAL_CPPFLAGS += -DUSE_INTEL_ASF_EXTRACTOR -LOCAL_MODULE := libasfextractor -LOCAL_MODULE_TAGS := optional - -include $(BUILD_STATIC_LIBRARY) - - -endif diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp deleted file mode 100644 index 0fa0e15..0000000 --- a/frameworks/asf_extractor/AsfExtractor.cpp +++ /dev/null @@ -1,759 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - -//#define LOG_NDEBUG 0 -#define LOG_TAG "AsfExtractor" -#include - -#include - -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "MetaDataExt.h" -#include "MediaBufferPool.h" -#include "AsfStreamParser.h" -#include "AsfExtractor.h" - - -namespace android { - -// The audio format tags that represent the input categories supported -// by the Windows Media Audio decoder, don't change it -enum WMAAudioFormats { - WAVE_FORMAT_MSAUDIO1 = 0x160, - WAVE_FORMAT_WMAUDIO2 = 0x161, - WAVE_FORMAT_WMAUDIO3X = 0x162, - WAVE_FORMAT_WMAUDIO_LOSSLESS = 0x163, - WAVE_FORMAT_WMAVOICE9 = 0x000A, - WAVE_FORMAT_WMAVOICE10 = 0x000B, -}; - -class ASFSource : public MediaSource { -public: - ASFSource(const sp &extractor, int trackIndex) - : mExtractor(extractor), - mTrackIndex(trackIndex) { - } - - virtual status_t start(MetaData *params = NULL) { - return OK; - } - - virtual status_t stop() { - return OK; - } - - virtual sp getFormat() { - return mExtractor->getTrackMetaData(mTrackIndex, 0); - } - - virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL) { - return mExtractor->read(mTrackIndex, buffer, options); - } - -protected: - virtual ~ASFSource() { - mExtractor = NULL; - } - -private: - sp mExtractor; - int mTrackIndex; - - ASFSource(const ASFSource &); - ASFSource &operator=(const ASFSource &); -}; - - -AsfExtractor::AsfExtractor(const sp &source) - : mDataSource(source), - mInitialized(false), - mHasIndexObject(false), - mFirstTrack(NULL), - mLastTrack(NULL), - mReadLock(), - mFileMetaData(new MetaData), - mParser(NULL), - mHeaderObjectSize(0), - mDataObjectSize(0), - mDataPacketBeginOffset(0), - mDataPacketEndOffset(0), - mDataPacketCurrentOffset(0), - mDataPacketSize(0), - mDataPacketData(NULL) { - mParser = new AsfStreamParser; -} - -AsfExtractor::~AsfExtractor() { - uninitialize(); - mDataSource = NULL; - mFileMetaData = NULL; - delete mParser; - mParser = NULL; -} - -sp AsfExtractor::getMetaData() { - status_t err = initialize(); - if (err != OK) { - return new MetaData; - } - - return mFileMetaData; -} - -size_t AsfExtractor::countTracks() { - status_t err = initialize(); - if (err != OK) { - return 0; - } - - size_t n = 0; - Track *track = mFirstTrack; - while (track) { - ++n; - track = track->next; - } - - ALOGV("track count is %d", n); - return n; -} - -sp AsfExtractor::getTrackMetaData(size_t index, uint32_t flags) { - status_t err = initialize(); - if (err != OK) { - return NULL; - } - - Track *track = getTrackByTrackIndex(index); - if (track == NULL) { - return NULL; - } - - // There is no thumbnail data so ignore flags: kIncludeExtensiveMetaData - return track->meta; -} - -sp AsfExtractor::getTrack(size_t index) { - status_t err; - if ((err = initialize()) != OK) { - return NULL; - } - - Track *track = getTrackByTrackIndex(index); - if (track == NULL) { - return NULL; - } - - // Assume this track is active - track->skipTrack = false; - return new ASFSource(this, index); -} - -status_t AsfExtractor::read( - int trackIndex, - MediaBuffer **buffer, - const MediaSource::ReadOptions *options) { - Track *track = getTrackByTrackIndex(trackIndex); - if (track == NULL) { - return BAD_VALUE; - } - - int64_t seekTimeUs; - MediaSource::ReadOptions::SeekMode mode; - if (!mParser->hasVideo() || (mParser->hasVideo() && mHasIndexObject)) { - if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { - status_t err = seek_l(track, seekTimeUs, mode); - if (err != OK) { - return err; - } - } - } else { - ALOGW("No index object. Seek may not be supported!!!"); - } - - return read_l(track, buffer); -} - -status_t AsfExtractor::initialize() { - if (mInitialized) { - return OK; - } - - status_t status = OK; - // header object is the first mandatory object. The first 16 bytes - // is GUID of object, the following 8 bytes is size of object - if (mDataSource->readAt(16, &mHeaderObjectSize, 8) != 8) { - return ERROR_IO; - } - - uint8_t* headerObjectData = new uint8_t [mHeaderObjectSize]; - if (headerObjectData == NULL) { - return NO_MEMORY; - } - - if (mDataSource->readAt(0, headerObjectData, mHeaderObjectSize) != mHeaderObjectSize) { - return ERROR_IO; - } - status = mParser->parseHeaderObject(headerObjectData, mHeaderObjectSize); - if (status != ASF_PARSER_SUCCESS) { - ALOGE("Failed to parse header object."); - return ERROR_MALFORMED; - } - - delete [] headerObjectData; - headerObjectData = NULL; - - uint8_t dataObjectHeaderData[ASF_DATA_OBJECT_HEADER_SIZE]; - if (mDataSource->readAt(mHeaderObjectSize, dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE) - != ASF_DATA_OBJECT_HEADER_SIZE) { - return ERROR_IO; - } - status = mParser->parseDataObjectHeader(dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE); - if (status != ASF_PARSER_SUCCESS) { - ALOGE("Failed to parse data object header."); - return ERROR_MALFORMED; - } - - // first 16 bytes is GUID of data object - mDataObjectSize = *(uint64_t*)(dataObjectHeaderData + 16); - mDataPacketBeginOffset = mHeaderObjectSize + ASF_DATA_OBJECT_HEADER_SIZE; - mDataPacketEndOffset = mHeaderObjectSize + mDataObjectSize; - mDataPacketCurrentOffset = mDataPacketBeginOffset; - - // allocate memory for data packet - mDataPacketSize = mParser->getDataPacketSize(); - mDataPacketData = new uint8_t [mDataPacketSize]; - if (mDataPacketData == NULL) { - return NO_MEMORY; - } - - const AsfFileMediaInfo *fileMediaInfo = mParser->getFileInfo(); - if (fileMediaInfo && fileMediaInfo->seekable) { - uint64_t offset = mDataPacketEndOffset; - - // Find simple index object for time seeking. - // object header include 16 bytes of object GUID and 8 bytes of object size. - uint8_t objectHeader[24]; - int64_t objectSize; - for (;;) { - if (mDataSource->readAt(offset, objectHeader, 24) != 24) { - break; - } - - objectSize = *(int64_t *)(objectHeader + 16); - if (!AsfStreamParser::isSimpleIndexObject(objectHeader)) { - offset += objectSize; - continue; - } - mHasIndexObject = true; - uint8_t* indexObjectData = new uint8_t [objectSize]; - if (indexObjectData == NULL) { - // don't report as error, we just lose time seeking capability. - break; - } - if (mDataSource->readAt(offset, indexObjectData, objectSize) == objectSize) { - // Ignore return value - mParser->parseSimpleIndexObject(indexObjectData, objectSize); - } - delete [] indexObjectData; - break; - } - } - - if (mParser->hasVideo()) { - ALOGV("MEDIA_MIMETYPE_CONTAINER_ASF"); - mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_ASF); - } else if (mParser->hasAudio() && mParser->getAudioInfo()->codecID >= WAVE_FORMAT_MSAUDIO1 && - mParser->getAudioInfo()->codecID <= WAVE_FORMAT_WMAUDIO_LOSSLESS) { - LOGV("MEDIA_MIMETYPE_AUDIO_WMA", mParser->getAudioInfo()->codecID); - mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_WMA); - } else { - ALOGE("Content does not have neither audio nor video."); - return ERROR_UNSUPPORTED; - } - - // duration returned from parser is in 100-nanosecond unit, converted it to microseconds (us) - ALOGV("Duration is %.2f (sec)", mParser->getDuration()/1E7); - mFileMetaData->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); - - setupTracks(); - mInitialized = true; - return OK; -} - -void AsfExtractor::uninitialize() { - if (mDataPacketData) { - delete [] mDataPacketData; - mDataPacketData = NULL; - } - mDataPacketSize = 0; - - Track* track = mFirstTrack; - MediaBuffer* p; - while (track != NULL) { - track->meta = NULL; - if (track->bufferActive) { - track->bufferActive->release(); - track->bufferActive = NULL; - } - - int size = track->bufferQueue.size(); - for (int i = 0; i < size; i++) { - p = track->bufferQueue.editItemAt(i); - p->release(); - } - - track->bufferQueue.clear(); - delete track->bufferPool; - - track->meta = NULL; - mFirstTrack = track->next; - delete track; - track = mFirstTrack; - } - mFirstTrack = NULL; - mLastTrack = NULL; -} - -static const char* FourCC2MIME(uint32_t fourcc) { - // The first charater of FOURCC characters appears in the least-significant byte - // WVC1 => 0x31435657 - switch (fourcc) { - //case FOURCC('W', 'M', 'V', '1'): - //case FOURCC('W', 'M', 'V', '2'): - //case FOURCC('W', 'M', 'V', 'A'): - case FOURCC('1', 'V', 'M', 'W'): - ALOGW("WMV1 format is not supported."); - return "video/wmv1"; - case FOURCC('2', 'V', 'M', 'W'): - ALOGW("WMV2 format is not supported."); - return "video/wmv2"; - case FOURCC('A', 'V', 'M', 'W'): - ALOGW("WMV Advanced profile, assuming as WVC1 for now"); - return MEDIA_MIMETYPE_VIDEO_WMV; - //case FOURCC('W', 'M', 'V', '3'): - //case FOURCC('W', 'V', 'C', '1'): - case FOURCC('3', 'V', 'M', 'W'): - case FOURCC('1', 'C', 'V', 'W'): - return MEDIA_MIMETYPE_VIDEO_WMV; - default: - ALOGE("Unknown video format."); - return "video/unknown-type"; - } -} - -static const char* CodecID2MIME(uint32_t codecID) { - switch (codecID) { - // WMA version 1 - case WAVE_FORMAT_MSAUDIO1: - // WMA version 2 (7, 8, 9 series) - case WAVE_FORMAT_WMAUDIO2: - return MEDIA_MIMETYPE_AUDIO_WMA; - // WMA 9 lossless - case WAVE_FORMAT_WMAUDIO_LOSSLESS: - //return MEDIA_MIMETYPE_AUDIO_WMA_LOSSLESS; - return MEDIA_MIMETYPE_AUDIO_WMA; - // WMA voice 9 - case WAVE_FORMAT_WMAVOICE9: - // WMA voice 10 - case WAVE_FORMAT_WMAVOICE10: - ALOGW("WMA voice 9/10 is not supported."); - return "audio/wma-voice"; - default: - ALOGE("Unsupported Audio codec ID: %#x", codecID); - return "audio/unknown-type"; - } -} - - -status_t AsfExtractor::setupTracks() { - AsfAudioStreamInfo* audioInfo = mParser->getAudioInfo(); - AsfVideoStreamInfo* videoInfo = mParser->getVideoInfo(); - Track* track; - while (audioInfo || videoInfo) { - track = new Track; - if (mLastTrack == NULL) { - mFirstTrack = track; - mLastTrack = track; - } else { - mLastTrack->next = track; - mLastTrack = track; - } - - // this flag will be set to false within getTrack - track->skipTrack = true; - track->seekCompleted = false; - track->next = NULL; - track->meta = new MetaData; - track->bufferActive = NULL; - track->bufferPool = new MediaBufferPool; - - if (audioInfo) { - LOGV("streamNumber = %d\n, encryptedContentFlag= %d\n, timeOffset = %lld\n, - codecID = %d\n, numChannels=%d\n, sampleRate=%d\n, avgBitRate = %d\n, - blockAlignment =%d\n, bitsPerSample=%d\n, codecDataSize=%d\n", - audioInfo->streamNumber, audioInfo->encryptedContentFlag, - audioInfo->timeOffset, audioInfo->codecID, audioInfo->numChannels, - audioInfo->sampleRate, audioInfo->avgByteRate*8, audioInfo->blockAlignment, - audioInfo->bitsPerSample, audioInfo->codecDataSize); - - track->streamNumber = audioInfo->streamNumber; - track->encrypted = audioInfo->encryptedContentFlag; - track->meta->setInt32(kKeyChannelCount, audioInfo->numChannels); - track->meta->setInt32(kKeySampleRate, audioInfo->sampleRate); - track->meta->setInt32(kKeyWmaBlockAlign, audioInfo->blockAlignment); - track->meta->setInt32(kKeyBitPerSample, audioInfo->bitsPerSample); - track->meta->setInt32(kKeyBitRate, audioInfo->avgByteRate*8); - track->meta->setInt32(kKeyWmaFormatTag, audioInfo->codecID); - - if (audioInfo->codecDataSize) { - track->meta->setData( - kKeyConfigData, - kTypeConfigData, - audioInfo->codecData, - audioInfo->codecDataSize); - } - // duration returned is in 100-nanosecond unit - track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); - track->meta->setCString(kKeyMIMEType, CodecID2MIME(audioInfo->codecID)); - track->meta->setInt32(kKeySuggestedBufferSize, mParser->getDataPacketSize()); - audioInfo = audioInfo->next; - } else { - track->streamNumber = videoInfo->streamNumber; - track->encrypted = videoInfo->encryptedContentFlag; - track->meta->setInt32(kKeyWidth, videoInfo->width); - track->meta->setInt32(kKeyHeight, videoInfo->height); - if (videoInfo->codecDataSize) { - track->meta->setData( - kKeyConfigData, - kTypeConfigData, - videoInfo->codecData, - videoInfo->codecDataSize); - } - // duration returned is in 100-nanosecond unit - track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); - track->meta->setCString(kKeyMIMEType, FourCC2MIME(videoInfo->fourCC)); - int maxSize = mParser->getMaxObjectSize(); - if (maxSize == 0) { - // estimated maximum packet size. - maxSize = 10 * mParser->getDataPacketSize(); - } - track->meta->setInt32(kKeySuggestedBufferSize, maxSize); - if (mHasIndexObject) { - // set arbitary thumbnail time - track->meta->setInt64(kKeyThumbnailTime, mParser->getDuration() / (SCALE_100_NANOSEC_TO_USEC * 2)); - } else { - track->meta->setInt64(kKeyThumbnailTime, 0); - } - videoInfo = videoInfo->next; - } - } - - return OK; -} - -status_t AsfExtractor::seek_l(Track* track, int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) { - Mutex::Autolock lockSeek(mReadLock); - - // It is expected seeking will happen on all the tracks with the same seeking options. - // Only the first track receiving the seeking command will perform seeking and all other - // tracks just siliently ignore it. - - // TODO: potential problems in the following case: - // audio seek - // video read - // video seek - // video read - - if (track->seekCompleted) { - // seeking is completed through a different track - track->seekCompleted = false; - return OK; - } - - uint64_t targetSampleTimeUs = 0; - - // seek to next sync sample or previous sync sample - bool nextSync = false; - switch (mode) { - case MediaSource::ReadOptions::SEEK_NEXT_SYNC: - nextSync = true; - break; - // Always seek to the closest previous sync frame - case MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC: - case MediaSource::ReadOptions::SEEK_CLOSEST_SYNC: - - // Not supported, already seek to sync frame, so will not set kKeyTargetTime on bufferActive. - case MediaSource::ReadOptions::SEEK_CLOSEST: - default: - break; - } - - uint32_t packetNumber; - uint64_t targetTime; - // parser takes seek time in 100-nanosecond unit and returns target time in 100-nanosecond as well. - if (!mParser->seek(seekTimeUs * SCALE_100_NANOSEC_TO_USEC, nextSync, packetNumber, targetTime)) { - ALOGV("Seeking failed."); - return ERROR_END_OF_STREAM; - } - ALOGV("seek time = %.2f secs, actual time = %.2f secs", seekTimeUs/1E6, targetTime / 1E7); - - // convert to microseconds - targetSampleTimeUs = targetTime / SCALE_100_NANOSEC_TO_USEC; - mDataPacketCurrentOffset = mDataPacketBeginOffset + packetNumber * mDataPacketSize; - ALOGV("data packet offset = %lld", mDataPacketCurrentOffset); - - // flush all pending buffers on all the tracks - Track* temp = mFirstTrack; - while (temp != NULL) { - Mutex::Autolock lockTrack(temp->lock); - if (temp->bufferActive) { - temp->bufferActive->release(); - temp->bufferActive = NULL; - } - - int size = temp->bufferQueue.size(); - for (int i = 0; i < size; i++) { - MediaBuffer* buffer = temp->bufferQueue.editItemAt(i); - buffer->release(); - } - temp->bufferQueue.clear(); - - if (temp != track) { - // notify all other tracks seeking is completed. - // this flag is reset when seeking request is made on each track. - // don't set this flag on the driving track so a new seek can be made. - temp->seekCompleted = true; - } - temp = temp->next; - } - - return OK; -} - -status_t AsfExtractor::read_l(Track *track, MediaBuffer **buffer) { - status_t err = OK; - while (err == OK) { - Mutex::Autolock lock(track->lock); - if (track->bufferQueue.size() != 0) { - *buffer = track->bufferQueue[0]; - track->bufferQueue.removeAt(0); - return OK; - } - track->lock.unlock(); - - err = readPacket(); - } - ALOGE("read_l failed."); - return err; -} - -status_t AsfExtractor::readPacket() { - Mutex::Autolock lock(mReadLock); - if (mDataPacketCurrentOffset + mDataPacketSize > mDataPacketEndOffset) { - ALOGI("readPacket hits end of stream."); - return ERROR_END_OF_STREAM; - } - - if (mDataSource->readAt(mDataPacketCurrentOffset, mDataPacketData, mDataPacketSize) != - mDataPacketSize) { - return ERROR_END_OF_STREAM; - } - - // update next read position - mDataPacketCurrentOffset += mDataPacketSize; - AsfPayloadDataInfo *payloads = NULL; - int status = mParser->parseDataPacket(mDataPacketData, mDataPacketSize, &payloads); - if (status != ASF_PARSER_SUCCESS || payloads == NULL) { - ALOGE("Failed to parse data packet. status = %d", status); - return ERROR_END_OF_STREAM; - } - - AsfPayloadDataInfo* payload = payloads; - while (payload) { - Track* track = getTrackByStreamNumber(payload->streamNumber); - if (track == NULL || track->skipTrack) { - payload = payload->next; - continue; - } - if (payload->mediaObjectLength == payload->payloadSize || - payload->offsetIntoMediaObject == 0) { - // a comple object or the first payload of fragmented object - MediaBuffer *buffer = NULL; - status = track->bufferPool->acquire_buffer( - payload->mediaObjectLength, &buffer); - if (status != OK) { - ALOGE("Failed to acquire buffer."); - mParser->releasePayloadDataInfo(payloads); - return status; - } - memcpy(buffer->data(), - payload->payloadData, - payload->payloadSize); - - buffer->set_range(0, payload->mediaObjectLength); - // kKeyTime is in microsecond unit (usecs) - // presentationTime is in mililsecond unit (ms) - buffer->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); - - if (payload->keyframe) { - buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); - } - - if (payload->mediaObjectLength == payload->payloadSize) { - Mutex::Autolock lockTrack(track->lock); - // a complete object - track->bufferQueue.push(buffer); - } else { - // the first payload of a fragmented object - track->bufferActive = buffer; - if (track->encrypted) { - Mutex::Autolock lockTrack(track->lock); - MediaBuffer* copy = NULL; - track->bufferPool->acquire_buffer(payload->payloadSize, ©); - copy->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); - memcpy(copy->data(), payload->payloadData, payload->payloadSize); - copy->set_range(0, payload->payloadSize); - track->bufferQueue.push(copy); - } - } - } else { - if (track->bufferActive == NULL) { - ALOGE("Receiving corrupt or discontinuous data packet."); - payload = payload->next; - continue; - } - // TODO: check object number and buffer size!!!!!!!!!!!!!! - // the last payload or the middle payload of a fragmented object - memcpy( - (uint8_t*)track->bufferActive->data() + payload->offsetIntoMediaObject, - payload->payloadData, - payload->payloadSize); - - if (payload->offsetIntoMediaObject + payload->payloadSize == - payload->mediaObjectLength) { - // the last payload of a fragmented object - // for encrypted content, push a cloned media buffer to vector instead. - if (!track->encrypted) - { - Mutex::Autolock lockTrack(track->lock); - track->bufferQueue.push(track->bufferActive); - track->bufferActive = NULL; - } else { - Mutex::Autolock lockTrack(track->lock); - track->bufferActive->set_range(payload->offsetIntoMediaObject, payload->payloadSize); - track->bufferQueue.push(track->bufferActive); - track->bufferActive = NULL; - } - } else { - // middle payload of a fragmented object - if (track->encrypted) { - Mutex::Autolock lockTrack(track->lock); - MediaBuffer* copy = NULL; - int64_t keytime; - track->bufferPool->acquire_buffer(payload->payloadSize, ©); - track->bufferActive->meta_data()->findInt64(kKeyTime, &keytime); - copy->meta_data()->setInt64(kKeyTime, keytime); - memcpy(copy->data(), payload->payloadData, payload->payloadSize); - copy->set_range(0, payload->payloadSize); - track->bufferQueue.push(copy); - } - } - } - payload = payload->next; - }; - - mParser->releasePayloadDataInfo(payloads); - return OK; -} - -AsfExtractor::Track* AsfExtractor::getTrackByTrackIndex(int index) { - Track *track = mFirstTrack; - while (index > 0) { - if (track == NULL) { - return NULL; - } - - track = track->next; - --index; - } - return track; -} - -AsfExtractor::Track* AsfExtractor::getTrackByStreamNumber(int stream) { - Track *track = mFirstTrack; - while (track != NULL) { - if (track->streamNumber == stream) { - return track; - } - track = track->next; - } - return NULL; -} - -bool SniffAsf( - const sp &source, - String8 *mimeType, - float *confidence, - sp *) { - uint8_t guid[16]; - if (source->readAt(0, guid, 16) != 16) { - return false; - } - if (!AsfStreamParser::isHeaderObject(guid)) { - return false; - } - - *mimeType = MEDIA_MIMETYPE_CONTAINER_ASF; - *confidence = 0.4f; - return true; -} - -} // namespace android - diff --git a/frameworks/asf_extractor/AsfExtractor.h b/frameworks/asf_extractor/AsfExtractor.h deleted file mode 100644 index 52a3a5c..0000000 --- a/frameworks/asf_extractor/AsfExtractor.h +++ /dev/null @@ -1,142 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - -#ifndef ASF_EXTRACTOR_H_ -#define ASF_EXTRACTOR_H_ - -#include -#include -#include -#include - - -namespace android { - -struct AMessage; -class DataSource; -//class String8; - - -class AsfExtractor : public MediaExtractor { -public: - // Extractor assumes ownership of "source". - AsfExtractor(const sp &source); - virtual ~AsfExtractor(); - - virtual size_t countTracks(); - virtual sp getTrack(size_t index); - virtual sp getTrackMetaData(size_t index, uint32_t flags); - virtual sp getMetaData(); - -private: - status_t read( - int trackIndex, - MediaBuffer **buffer, - const MediaSource::ReadOptions *options = NULL); - - friend class ASFSource; - -private: - struct Track { - Track *next; - sp meta; - bool skipTrack; - bool seekCompleted; - bool encrypted; - uint8_t streamNumber; - - // outgoing buffer queue (ready for decoding) - Vector bufferQueue; - - // buffer pool - class MediaBufferPool *bufferPool; - - // buffer currently being used to read payload data - MediaBuffer *bufferActive; - Mutex lock; - }; - - sp mDataSource; - bool mInitialized; - bool mHasIndexObject; - Track *mFirstTrack; - Track *mLastTrack; - - Mutex mReadLock; - sp mFileMetaData; - class AsfStreamParser *mParser; - - int64_t mHeaderObjectSize; - int64_t mDataObjectSize; - - int64_t mDataPacketBeginOffset; - int64_t mDataPacketEndOffset; - int64_t mDataPacketCurrentOffset; - - int64_t mDataPacketSize; - uint8_t *mDataPacketData; - - enum { - // 100 nano seconds to micro second - SCALE_100_NANOSEC_TO_USEC = 10, - }; - - AsfExtractor(const AsfExtractor &); - AsfExtractor &operator=(const AsfExtractor &); - -private: - struct Track; - status_t initialize(); - void uninitialize(); - status_t setupTracks(); - inline Track* getTrackByTrackIndex(int index); - inline Track* getTrackByStreamNumber(int stream); - status_t seek_l(Track* track, int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode); - status_t read_l(Track *track, MediaBuffer **buffer); - status_t readPacket(); -}; - - -bool SniffAsf( - const sp &source, - String8 *mimeType, - float *confidence, - sp *); - -} // namespace android - -#endif // ASF_EXTRACTOR_H_ diff --git a/frameworks/asf_extractor/MediaBufferPool.cpp b/frameworks/asf_extractor/MediaBufferPool.cpp deleted file mode 100644 index c5d4a37..0000000 --- a/frameworks/asf_extractor/MediaBufferPool.cpp +++ /dev/null @@ -1,125 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - -#define LOG_TAG "MediaBufferPool" -#include - -#include -#include -#include "MediaBufferPool.h" - -#define DEFAULT_PAGE_SIZE 4096 - -namespace android { - -MediaBufferPool::MediaBufferPool() - : mMaxBufferSize(0), - mFirstBuffer(NULL), - mLastBuffer(NULL) { -} - -MediaBufferPool::~MediaBufferPool() { - MediaBuffer *next; - for (MediaBuffer *buffer = mFirstBuffer; buffer != NULL; - buffer = next) { - next = buffer->nextBuffer(); - - CHECK_EQ(buffer->refcount(), 0); - - buffer->setObserver(NULL); - buffer->release(); - } -} - -status_t MediaBufferPool::acquire_buffer(int size, MediaBuffer **out) { - Mutex::Autolock autoLock(mLock); - - MediaBuffer *next = NULL; - while (mFirstBuffer) { - if ((int)mFirstBuffer->size() >= size) { - next = mFirstBuffer->nextBuffer(); - - // pop first buffer out of list - *out = mFirstBuffer; - mFirstBuffer->add_ref(); - mFirstBuffer->reset(); - - mFirstBuffer = next; - if (mFirstBuffer == NULL) { - mLastBuffer = NULL; - } - return OK; - } else { - // delete the first buffer from the list - next = mFirstBuffer->nextBuffer(); - mFirstBuffer->setObserver(NULL); - mFirstBuffer->release(); - mFirstBuffer = next; - } - } - - // not a single buffer matches the requirement. Allocating a new buffer. - - mFirstBuffer = NULL; - mLastBuffer = NULL; - - size = ((size + DEFAULT_PAGE_SIZE - 1)/DEFAULT_PAGE_SIZE) * DEFAULT_PAGE_SIZE; - if (size < mMaxBufferSize) { - size = mMaxBufferSize; - } else { - mMaxBufferSize = size; - } - MediaBuffer *p = new MediaBuffer(size); - *out = p; - return (p != NULL) ? OK : NO_MEMORY; -} - -void MediaBufferPool::signalBufferReturned(MediaBuffer *buffer) { - Mutex::Autolock autoLock(mLock); - - buffer->setObserver(this); - - if (mLastBuffer) { - mLastBuffer->setNextBuffer(buffer); - } else { - mFirstBuffer = buffer; - } - - mLastBuffer = buffer; -} - -} // namespace android diff --git a/frameworks/asf_extractor/MediaBufferPool.h b/frameworks/asf_extractor/MediaBufferPool.h deleted file mode 100644 index bfacb1f..0000000 --- a/frameworks/asf_extractor/MediaBufferPool.h +++ /dev/null @@ -1,75 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - -#ifndef MEDIA_BUFFER_POOL_H_ - -#define MEDIA_BUFFER_POOL_H_ - -#include -#include -#include - -namespace android { - -class MediaBuffer; -class MetaData; - -class MediaBufferPool : public MediaBufferObserver { -public: - MediaBufferPool(); - ~MediaBufferPool(); - - status_t acquire_buffer(int size, MediaBuffer **buffer); - -protected: - virtual void signalBufferReturned(MediaBuffer *buffer); - -private: - friend class MediaBuffer; - - Mutex mLock; - int mMaxBufferSize; - - MediaBuffer *mFirstBuffer, *mLastBuffer; - - MediaBufferPool(const MediaBufferPool &); - MediaBufferPool &operator=(const MediaBufferPool &); -}; - -} // namespace android - -#endif // MEDIA_BUFFER_POOL_H_ diff --git a/frameworks/asf_extractor/MetaDataExt.h b/frameworks/asf_extractor/MetaDataExt.h deleted file mode 100644 index ad03d1c..0000000 --- a/frameworks/asf_extractor/MetaDataExt.h +++ /dev/null @@ -1,73 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - -#ifndef META_DATA_EXT_H_ -#define META_DATA_EXT_H_ - -#include - -namespace android { - -#define MEDIA_MIMETYPE_AUDIO_WMA "audio/x-ms-wma" -#define MEDIA_MIMETYPE_AUDIO_AC3 "audio/ac3" -#define MEDIA_MIMETYPE_VIDEO_WMV "video/wmv" -#define MEDIA_MIMETYPE_CONTAINER_ASF "video/x-ms-asf" -#define MEDIA_MIMETYPE_VIDEO_VA "video/x-va" -#define MEDIA_MIMETYPE_AUDIO_WMA_VOICE "audio/wma-voice" - - -enum -{ - // value by default takes int32_t unless specified - kKeyConfigData = 'kcfg', // raw data - kKeyProtected = 'prot', // int32_t (bool) - kKeyCropLeft = 'clft', - kKeyCropRight = 'crit', - kKeyCropTop = 'ctop', - kKeyCropBottom = 'cbtm', - kKeySuggestedBufferSize = 'sgbz', - kKeyWantRawOutput = 'rawo' -}; - -enum -{ - kTypeConfigData = 'tcfg', -}; - -} // namespace android - -#endif // META_DATA_EXT_H_ diff --git a/frameworks/libI420colorconvert/Android.mk b/frameworks/libI420colorconvert/Android.mk deleted file mode 100644 index 5abf9bc..0000000 --- a/frameworks/libI420colorconvert/Android.mk +++ /dev/null @@ -1,23 +0,0 @@ -LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) - -LOCAL_SRC_FILES := \ - ColorConvert.cpp - -LOCAL_C_INCLUDES:= \ - $(TOP)/frameworks/native/include/media/openmax \ - $(TOP)/frameworks/native/include/media/editor - -LOCAL_SHARED_LIBRARIES := \ - -LOCAL_MODULE_TAGS := optional - -LOCAL_MODULE := libI420colorconvert - -ifeq ($(USE_VIDEOEDITOR_INTEL_NV12_VERSION),true) -LOCAL_CFLAGS += -DVIDEOEDITOR_INTEL_NV12_VERSION -endif - -include $(BUILD_SHARED_LIBRARY) - - diff --git a/frameworks/libI420colorconvert/ColorConvert.cpp b/frameworks/libI420colorconvert/ColorConvert.cpp deleted file mode 100644 index 825ec75..0000000 --- a/frameworks/libI420colorconvert/ColorConvert.cpp +++ /dev/null @@ -1,146 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include - -static int getDecoderOutputFormat() { - return OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar; -} - -static int convertDecoderOutputToI420( - void* srcBits, int srcWidth, int srcHeight, ARect srcRect, void* dstBits) { - - const uint8_t *pSrc_y = (const uint8_t *)srcBits + - srcWidth * srcRect.top + srcRect.left; - const uint8_t *pSrc_uv = (const uint8_t *)pSrc_y + - srcWidth * (srcHeight - srcRect.top / 2); - int dstWidth = srcRect.right - srcRect.left + 1; - int dstHeight = srcRect.bottom - srcRect.top + 1; - size_t dst_y_size = dstWidth * dstHeight; - -#ifndef VIDEOEDITOR_INTEL_NV12_VERSION - size_t dst_uv_stride = dstWidth / 2; - size_t dst_uv_size = dstWidth / 2 * dstHeight / 2; - uint8_t *pDst_y = (uint8_t *)dstBits; - uint8_t *pDst_u = pDst_y + dst_y_size; - uint8_t *pDst_v = pDst_u + dst_uv_size; - - for (int y = 0; y < dstHeight; ++y) { - memcpy(pDst_y, pSrc_y, dstWidth); - pSrc_y += srcWidth; - pDst_y += dstWidth; - } - - size_t tmp = (dstWidth + 1) / 2; - for (int y = 0; y < (dstHeight + 1) / 2; ++y) { - for (size_t x = 0; x < tmp; ++x) { - pDst_u[x] = pSrc_uv[2 * x]; - pDst_v[x] = pSrc_uv[2 * x + 1]; - } - pSrc_uv += srcWidth; - pDst_u += dst_uv_stride; - pDst_v += dst_uv_stride; - } -#else - uint8_t *pDst_y = (uint8_t *)dstBits; - memcpy(pDst_y,pSrc_y,dst_y_size*3/2); -#endif - - return 0; -} - -static int getEncoderInputFormat() { - return OMX_COLOR_FormatYUV420SemiPlanar; -} - -static int convertI420ToEncoderInput( - void* srcBits, int srcWidth, int srcHeight, - int dstWidth, int dstHeight, ARect dstRect, - void* dstBits) { - uint8_t *pSrc_y = (uint8_t*) srcBits; - uint8_t *pDst_y = (uint8_t*) dstBits; - -#ifndef VIDEOEDITOR_INTEL_NV12_VERSION - for(int i=0; i < srcHeight; i++) { - memcpy(pDst_y, pSrc_y, srcWidth); - pSrc_y += srcWidth; - pDst_y += dstWidth; - } - uint8_t* pSrc_u = (uint8_t*)srcBits + (srcWidth * srcHeight); - uint8_t* pSrc_v = (uint8_t*)pSrc_u + (srcWidth / 2) * (srcHeight / 2); - uint8_t* pDst_uv = (uint8_t*)dstBits + dstWidth * dstHeight; - - for(int i=0; i < srcHeight / 2; i++) { - for(int j=0, k=0; j < srcWidth / 2; j++, k+=2) { - pDst_uv[k] = pSrc_u[j]; - pDst_uv[k+1] = pSrc_v[j]; - } - pDst_uv += dstWidth; - pSrc_u += srcWidth / 2; - pSrc_v += srcWidth / 2; - } -#else - memcpy(pDst_y,pSrc_y,dstWidth*dstHeight*3/2); -#endif - - return 0; -} - -static int getEncoderInputBufferInfo( - int actualWidth, int actualHeight, - int* encoderWidth, int* encoderHeight, - ARect* encoderRect, int* encoderBufferSize) { - - *encoderWidth = actualWidth; - *encoderHeight = actualHeight; - encoderRect->left = 0; - encoderRect->top = 0; - encoderRect->right = actualWidth - 1; - encoderRect->bottom = actualHeight - 1; - *encoderBufferSize = (actualWidth * actualHeight * 3 / 2); - - return 0; -} - -extern "C" void getI420ColorConverter(II420ColorConverter *converter) { - converter->getDecoderOutputFormat = getDecoderOutputFormat; - converter->convertDecoderOutputToI420 = convertDecoderOutputToI420; - converter->getEncoderInputFormat = getEncoderInputFormat; - converter->convertI420ToEncoderInput = convertI420ToEncoderInput; - converter->getEncoderInputBufferInfo = getEncoderInputBufferInfo; -} diff --git a/frameworks/vavideodecoder/Android.mk b/frameworks/vavideodecoder/Android.mk deleted file mode 100644 index a352e69..0000000 --- a/frameworks/vavideodecoder/Android.mk +++ /dev/null @@ -1,26 +0,0 @@ -ifeq ($(strip $(USE_INTEL_VA)),true) - -LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) - -LOCAL_COPY_HEADERS_TO := libvavideodecoder -LOCAL_COPY_HEADERS := VAVideoDecoder.h - -LOCAL_SRC_FILES := \ - VAVideoDecoder.cpp \ - - -LOCAL_C_INCLUDES := \ - $(TOP)/frameworks/av/media/libstagefright/include \ - $(TOP)/frameworks/native/include/media/openmax \ - $(TARGET_OUT_HEADERS)/libmix_videodecoder \ - $(TARGET_OUT_HEADERS)/libmix_asf_extractor \ - $(TARGET_OUT_HEADERS)/libva/ - - -LOCAL_MODULE:= libvavideodecoder -LOCAL_MODULE_TAGS := optional - -include $(BUILD_STATIC_LIBRARY) - -endif diff --git a/frameworks/vavideodecoder/VAVideoDecoder.cpp b/frameworks/vavideodecoder/VAVideoDecoder.cpp deleted file mode 100644 index 165b012..0000000 --- a/frameworks/vavideodecoder/VAVideoDecoder.cpp +++ /dev/null @@ -1,370 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - - -//#define LOG_NDEBUG 0 -#define LOG_TAG "VAVideoDecoder" -#include - -#include -//#include -#include -#include -#include -#include -#include -#include "MetaDataExt.h" -#include "VAVideoDecoder.h" -#include "VideoDecoderInterface.h" -#include "VideoDecoderHost.h" -#include -namespace android { - -VAVideoDecoder::VAVideoDecoder(const sp &source) - : mSource(source), - mStarted(false), - mRawOutput(false), - mInputBuffer(NULL), - mTargetTimeUs(-1), - mFrameIndex(0), - mErrorCount(0), - mDecoder(NULL) { - - const char *mime; - CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime)); - mDecoder = createVideoDecoder(mime); - if (mDecoder == NULL) { - LOGE("Failed to create video decoder for %s", mime); - } - - mFormat = new MetaData; - mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VA); - - int32_t width, height; - CHECK(mSource->getFormat()->findInt32(kKeyWidth, &width)); - CHECK(mSource->getFormat()->findInt32(kKeyHeight, &height)); - mFormat->setInt32(kKeyWidth, width); - mFormat->setInt32(kKeyHeight, height); - mFormat->setInt32(kKeyColorFormat, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar); - mFormat->setCString(kKeyDecoderComponent, "VAVideoDecoder"); - - int64_t durationUs; - if (mSource->getFormat()->findInt64(kKeyDuration, &durationUs)) { - mFormat->setInt64(kKeyDuration, durationUs); - } - -} - -VAVideoDecoder::~VAVideoDecoder() { - if (mStarted) { - stop(); - } - releaseVideoDecoder(mDecoder); - mDecoder = NULL; -} - -status_t VAVideoDecoder::start(MetaData *params) { - CHECK(!mStarted); - - if (mDecoder == NULL) { - LOGE("Decoder is not created."); - return UNKNOWN_ERROR; - } - - int32_t ret; - char str[255]; - sprintf(str, "%d", gettid()); - ret = setenv("PSB_VIDEO_THUMBNAIL", str, 1); - if (ret) { - LOGW("Set environmnet'PSB_VIDEO_SURFACE_MMU' fail\n"); - } - uint32_t type; - const void *data; - size_t size; - sp meta = mSource->getFormat(); - VideoConfigBuffer configBuffer; - memset(&configBuffer, 0, sizeof(VideoConfigBuffer)); - - if (meta->findData(kKeyConfigData, &type, &data, &size) || - meta->findData(kKeyESDS, &type, &data, &size) || - meta->findData(kKeyAVCC, &type, &data, &size)) { - configBuffer.data = (uint8_t*)data; - configBuffer.size = size; - } else { - LOGW("No configuration data found!"); - } - - // A threshold is used here to avoid mediaserver allocate too big - // memory (like 3GB) and crash in MPEG4Source::start. The - // threshold is set to be input port size limit for Intel decoders. - int32_t max_size; - if (meta->findInt32(kKeyMaxInputSize, &max_size)) { - if (max_size > MAXINPUTSIZE || max_size < 0) { - LOGE("Invalid kKeyMaxInputSize!"); - return ERROR_MALFORMED; - } - } - configBuffer.flag |= WANT_RAW_OUTPUT; - mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); - mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); - mRawOutput = true; - LOGW("Decoder will output raw data."); - - mFormat->findInt32(kKeyWidth, &configBuffer.width); - mFormat->findInt32(kKeyHeight, &configBuffer.height); - - Decode_Status res = mDecoder->start(&configBuffer); - if (res != DECODE_SUCCESS) { - LOGE("Failed to start decoder. Error = %d", res); - return UNKNOWN_ERROR; - } - - // TODO: update format meta, including frame cropping information. - - // create MediaBuffer pool only when output is VASurface - if (mRawOutput == false) { - for (int32_t i = 0; i < NUM_OF_MEDIA_BUFFER; ++i) { - MediaBuffer *buffer = new MediaBuffer(sizeof(VideoRenderBuffer)); - buffer->setObserver(this); - // output is unreadable VASurface - buffer->meta_data()->setInt32(kKeyIsUnreadable, 1); - buffer->meta_data()->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VA); - buffer->meta_data()->setInt32(kKeyColorFormat,OMX_COLOR_FormatYUV420SemiPlanar); - mFrames.push(buffer); - } - } - - mSource->start(); - unsetenv("PSB_VIDEO_THUMBNAIL"); - - mFrameIndex = 0; - mErrorCount = 0; - mTargetTimeUs = -1; - mStarted = true; - return OK; -} - - -status_t VAVideoDecoder::stop() { - CHECK(mStarted); - - if (mInputBuffer) { - mInputBuffer->release(); - mInputBuffer = NULL; - } - - for (size_t i = 0; i < mFrames.size(); ++i) { - MediaBuffer *buffer = mFrames.editItemAt(i); - buffer->setObserver(NULL); - buffer->release(); - } - mFrames.clear(); - mSource->stop(); - mDecoder->stop(); - - mFrameIndex = 0; - mErrorCount = 0; - mRawOutput = false; - mStarted = false; - - return OK; -} - -sp VAVideoDecoder::getFormat() { - return mFormat; -} - -MediaBuffer *VAVideoDecoder::getOutputBuffer(bool bDraining) { - const VideoRenderBuffer* buffer = mDecoder->getOutput(bDraining); - if (buffer == NULL) { - return NULL; - } - // indicate buffer is rendered - buffer->renderDone = true; - - if (mTargetTimeUs >= 0) { - CHECK(buffer->timeStamp <= mTargetTimeUs); - if (buffer->timeStamp < mTargetTimeUs) { - // We're still waiting for the frame with the matching - // timestamp and we won't return the current one. - LOGV("skipping frame at %lld us", buffer->timeStamp); - return NULL; - } else { - LOGV("found target frame at %lld us", buffer->timeStamp); - mTargetTimeUs = -1; - } - } - - MediaBuffer *mbuf = NULL; - if (mRawOutput == false) { - mbuf = mFrames.editItemAt(mFrameIndex); - mFrameIndex++; - if (mFrameIndex >= mFrames.size()) { - mFrameIndex = 0; - } - memcpy(mbuf->data(), buffer, sizeof(VideoRenderBuffer)); - mbuf->meta_data()->setInt64(kKeyTime, buffer->timeStamp); - mbuf->set_range(0, mbuf->size()); - mbuf->add_ref(); - } else { - mbuf = new MediaBuffer(buffer->rawData->data, buffer->rawData->size); - mbuf->meta_data()->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); - mbuf->meta_data()->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); - mbuf->meta_data()->setInt64(kKeyTime, buffer->timeStamp); - } - - return mbuf; -} - -status_t VAVideoDecoder::read(MediaBuffer **out, const ReadOptions *options) { - *out = NULL; - if (mDecoder == NULL) { - LOGE("Decoder is not created."); - return UNKNOWN_ERROR; - } - - int64_t seekTimeUs; - ReadOptions::SeekMode mode; - ReadOptions seekOptions; - bool seeking = false; - - if (options && options->getSeekTo(&seekTimeUs, &mode)) { - LOGV("seek requested to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6); - - if (seekTimeUs < 0) { - LOGE("Invalid seek time : %ld", (long int32_t)seekTimeUs); - seekTimeUs = 0; - //return ERROR_END_OF_STREAM; - } - //CHECK(seekTimeUs >= 0); - - seekOptions.setSeekTo(seekTimeUs, mode); - mDecoder->flush(); - seeking = true; - } - - for (;;) { - status_t err = mSource->read(&mInputBuffer, &seekOptions); - seekOptions.clearSeekTo(); - - if (err != OK) { - LOGE("Failed to read buffer from source extractor."); - // drain the output buffer when end of stream - *out = getOutputBuffer(true); - return (*out == NULL) ? err : (status_t)OK; - } - - if (mInputBuffer->range_length() > 0) { - break; - } - - mInputBuffer->release(); - mInputBuffer = NULL; - } - - if (mInputBuffer == NULL) { - LOGE("Unexpected NULL input buffer."); - return ERROR_END_OF_STREAM; - } - - if (seeking) { - int64_t targetTimeUs; - if (mInputBuffer->meta_data()->findInt64(kKeyTargetTime, &targetTimeUs) && targetTimeUs >= 0) { - mTargetTimeUs = targetTimeUs; - } else { - mTargetTimeUs = -1; - } - } - - status_t err = UNKNOWN_ERROR; - - // prepare decoding buffer - VideoDecodeBuffer decodeBuffer; - memset(&decodeBuffer, 0, sizeof(decodeBuffer)); - decodeBuffer.data = (uint8_t*)mInputBuffer->data() + mInputBuffer->range_offset(); - decodeBuffer.size = mInputBuffer->range_length(); - decodeBuffer.flag = seeking ? HAS_DISCONTINUITY : 0; - mInputBuffer->meta_data()->findInt64(kKeyTime, &decodeBuffer.timeStamp); - Decode_Status res = mDecoder->decode(&decodeBuffer); - - mInputBuffer->release(); - mInputBuffer = NULL; - - if (res == DECODE_FORMAT_CHANGE) { - LOGW("Format changed."); - // drain all the frames. - MediaBuffer *mbuf = NULL; - while ((mbuf = getOutputBuffer(true)) != NULL) { - mbuf->release(); - } - const VideoFormatInfo *info = mDecoder->getFormatInfo(); - uint32_t cropWidth, cropHeight; - if (info != NULL) { - cropWidth = info->width - (info->cropLeft + info->cropRight); - cropHeight = info->height - (info->cropBottom + info->cropTop); - mFormat->setInt32(kKeyWidth, cropWidth); - mFormat->setInt32(kKeyHeight, cropHeight); - } - // TODO: handle format change - err = INFO_FORMAT_CHANGED; - } - else if (res == DECODE_SUCCESS) { - mErrorCount = 0; - err = OK; - MediaBuffer *mbuf = getOutputBuffer(); - if (mbuf == NULL) { - *out = new MediaBuffer(0); - } else { - *out = mbuf; - } - } else { - mErrorCount++; - LOGE("Failed to decode buffer (#%d). Error = %d", mErrorCount, res); - if (checkFatalDecoderError(res)) { - err = UNKNOWN_ERROR; - } else { - // For decoder errors that could be omitted, not throw error and continue to decode. - err = OK; - *out = new MediaBuffer(0); - } - } - - return err; -} - -}// namespace android - diff --git a/frameworks/vavideodecoder/VAVideoDecoder.h b/frameworks/vavideodecoder/VAVideoDecoder.h deleted file mode 100644 index 5d66ec3..0000000 --- a/frameworks/vavideodecoder/VAVideoDecoder.h +++ /dev/null @@ -1,90 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - -#ifndef VA_VIDEO_DECODER_H_ -#define VA_VIDEO_DECODER_H_ - -#include -#include -#include - -class IVideoDecoder; - -namespace android { - -struct VAVideoDecoder : public MediaSource, - public MediaBufferObserver { - VAVideoDecoder(const sp &source); - - virtual status_t start(MetaData *params); - virtual status_t stop(); - virtual sp getFormat(); - virtual status_t read(MediaBuffer **buffer, const ReadOptions *options); - virtual void signalBufferReturned(MediaBuffer* buffer) {} - -protected: - virtual ~VAVideoDecoder(); - -private: - MediaBuffer *getOutputBuffer(bool bDraining = false); - VAVideoDecoder(const VAVideoDecoder &); - VAVideoDecoder &operator=(const VAVideoDecoder &); - -private: - enum { - NUM_OF_MEDIA_BUFFER = 20, - }; - // The maximum input size is set to be size of one 1080P 4:4:4 image. - // It is calculated as 1920x1080x3 = 6220800 bytes. - enum { - MAXINPUTSIZE = 6220800, - }; - sp mSource; - bool mStarted; - bool mRawOutput; - sp mFormat; - Vector mFrames; - MediaBuffer *mInputBuffer; - int64_t mTargetTimeUs; - uint32_t mFrameIndex; - uint32_t mErrorCount; - IVideoDecoder *mDecoder; -}; - -} // namespace android - -#endif // VA_VIDEO_DECODER_H_ diff --git a/frameworks/videoedit/Android.mk b/frameworks/videoedit/Android.mk deleted file mode 100644 index 5053e7d..0000000 --- a/frameworks/videoedit/Android.mk +++ /dev/null @@ -1 +0,0 @@ -include $(call all-subdir-makefiles) diff --git a/frameworks/videoedit/lvpp/Android.mk b/frameworks/videoedit/lvpp/Android.mk deleted file mode 100644 index 1a70db4..0000000 --- a/frameworks/videoedit/lvpp/Android.mk +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (C) 2011 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -LOCAL_PATH:= $(call my-dir) - -include $(CLEAR_VARS) - -LOCAL_MODULE:= liblvpp_intel - -LOCAL_COPY_HEADERS_TO := videoeditornv12 - -LOCAL_COPY_HEADERS := VideoEditorToolsNV12.h - -LOCAL_SRC_FILES:= \ - VideoEditorToolsNV12.c - -LOCAL_MODULE_TAGS := optional - - -LOCAL_SHARED_LIBRARIES := \ - libcutils \ - libutils \ - libvideoeditor_osal \ - - -LOCAL_C_INCLUDES += \ - $(TOP)/frameworks/av/libvideoeditor/osal/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/common/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/mcs/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/stagefrightshells/inc \ - $(TOP)/frameworks/av/libvideoeditor/lvpp \ - - -LOCAL_SHARED_LIBRARIES += libdl - -# All of the shared libraries we link against. -LOCAL_LDLIBS := \ - -lpthread -ldl - -include $(BUILD_STATIC_LIBRARY) - diff --git a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c deleted file mode 100644 index cf3fbdc..0000000 --- a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.c +++ /dev/null @@ -1,1610 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_NDEBUG 1 -#define LOG_TAG "VideoEditorToolsNV12" -#include - -#include "VideoEditorToolsNV12.h" -#define M4VIFI_ALLOC_FAILURE 10 - -static M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420_X86(void *user_data, - M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) -{ - - M4VIFI_UInt32 i; - M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v; - M4VIFI_UInt8 *p_buf_dest_u,*p_buf_dest_v,*p_buf_src_uv; - M4VIFI_UInt8 return_code = M4VIFI_OK; - - /* the filter is implemented with the assumption that the width is equal to stride */ - if(PlaneIn[0].u_width != PlaneIn[0].u_stride) - return M4VIFI_INVALID_PARAM; - - /* The input Y Plane is the same as the output Y Plane */ - p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]); - p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]); - memcpy((void *)p_buf_dest,(void *)p_buf_src , - PlaneOut[0].u_width * PlaneOut[0].u_height); - - /* The U and V components are planar. The need to be made interleaved */ - p_buf_src_uv = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]); - p_buf_dest_u = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]); - p_buf_dest_v = &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]); - - for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++) - { - *p_buf_dest_u++ = *p_buf_src_uv++; - *p_buf_dest_v++ = *p_buf_src_uv++; - } - return return_code; -} - -/** - *********************************************************************************************** - * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420_X86(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, - * M4VIFI_ImagePlane *pPlaneOut) - * @author David Dana (PHILIPS Software) - * @brief Resizes YUV420 Planar plane. - * @note Basic structure of the function - * Loop on each row (step 2) - * Loop on each column (step 2) - * Get four Y samples and 1 U & V sample - * Resize the Y with corresponing U and V samples - * Place the YUV in the ouput plane - * end loop column - * end loop row - * For resizing bilinear interpolation linearly interpolates along - * each row, and then uses that result in a linear interpolation down each column. - * Each estimated pixel in the output image is a weighted - * combination of its four neighbours. The ratio of compression - * or dilatation is estimated using input and output sizes. - * @param pUserData: (IN) User Data - * @param pPlaneIn: (IN) Pointer to YUV420 (Planar) plane buffer - * @param pPlaneOut: (OUT) Pointer to YUV420 (Planar) plane - * @return M4VIFI_OK: there is no error - * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height - * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in width - *********************************************************************************************** -*/ - -static M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420_X86(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_UInt8 *pu8_data_in, *pu8_data_out, *pu8dum; - M4VIFI_UInt32 u32_plane; - M4VIFI_UInt32 u32_width_in, u32_width_out, u32_height_in, u32_height_out; - M4VIFI_UInt32 u32_stride_in, u32_stride_out; - M4VIFI_UInt32 u32_x_inc, u32_y_inc; - M4VIFI_UInt32 u32_x_accum, u32_y_accum, u32_x_accum_start; - M4VIFI_UInt32 u32_width, u32_height; - M4VIFI_UInt32 u32_y_frac; - M4VIFI_UInt32 u32_x_frac; - M4VIFI_UInt32 u32_temp_value; - M4VIFI_UInt8 *pu8_src_top; - M4VIFI_UInt8 *pu8_src_bottom; - - M4VIFI_UInt8 u8Wflag = 0; - M4VIFI_UInt8 u8Hflag = 0; - M4VIFI_UInt32 loop = 0; - - /* - If input width is equal to output width and input height equal to - output height then M4VIFI_YUV420toYUV420 is called. - */ - if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) && - (pPlaneIn[0].u_width == pPlaneOut[0].u_width)) - { - return M4VIFI_YUV420toYUV420(pUserData, pPlaneIn, pPlaneOut); - } - - /* Check for the YUV width and height are even */ - if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE) || - (IS_EVEN(pPlaneOut[0].u_height) == FALSE)) - { - return M4VIFI_ILLEGAL_FRAME_HEIGHT; - } - - if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) || - (IS_EVEN(pPlaneOut[0].u_width) == FALSE)) - { - return M4VIFI_ILLEGAL_FRAME_WIDTH; - } - - /* Loop on planes */ - for(u32_plane = 0;u32_plane < PLANES;u32_plane++) - { - /* Set the working pointers at the beginning of the input/output data field */ - pu8_data_in = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; - pu8_data_out = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft; - - /* Get the memory jump corresponding to a row jump */ - u32_stride_in = pPlaneIn[u32_plane].u_stride; - u32_stride_out = pPlaneOut[u32_plane].u_stride; - - /* Set the bounds of the active image */ - u32_width_in = pPlaneIn[u32_plane].u_width; - u32_height_in = pPlaneIn[u32_plane].u_height; - - u32_width_out = pPlaneOut[u32_plane].u_width; - u32_height_out = pPlaneOut[u32_plane].u_height; - - /* - For the case , width_out = width_in , set the flag to avoid - accessing one column beyond the input width.In this case the last - column is replicated for processing - */ - if (u32_width_out == u32_width_in) { - u32_width_out = u32_width_out-1; - u8Wflag = 1; - } - - /* Compute horizontal ratio between src and destination width.*/ - if (u32_width_out >= u32_width_in) - { - u32_x_inc = ((u32_width_in-1) * MAX_SHORT) / (u32_width_out-1); - } - else - { - u32_x_inc = (u32_width_in * MAX_SHORT) / (u32_width_out); - } - - /* - For the case , height_out = height_in , set the flag to avoid - accessing one row beyond the input height.In this case the last - row is replicated for processing - */ - if (u32_height_out == u32_height_in) { - u32_height_out = u32_height_out-1; - u8Hflag = 1; - } - - /* Compute vertical ratio between src and destination height.*/ - if (u32_height_out >= u32_height_in) - { - u32_y_inc = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out-1); - } - else - { - u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out); - } - - /* - Calculate initial accumulator value : u32_y_accum_start. - u32_y_accum_start is coded on 15 bits, and represents a value - between 0 and 0.5 - */ - if (u32_y_inc >= MAX_SHORT) - { - /* - Keep the fractionnal part, assimung that integer part is coded - on the 16 high bits and the fractional on the 15 low bits - */ - u32_y_accum = u32_y_inc & 0xffff; - - if (!u32_y_accum) - { - u32_y_accum = MAX_SHORT; - } - - u32_y_accum >>= 1; - } - else - { - u32_y_accum = 0; - } - - /* - Calculate initial accumulator value : u32_x_accum_start. - u32_x_accum_start is coded on 15 bits, and represents a value - between 0 and 0.5 - */ - if (u32_x_inc >= MAX_SHORT) - { - u32_x_accum_start = u32_x_inc & 0xffff; - - if (!u32_x_accum_start) - { - u32_x_accum_start = MAX_SHORT; - } - - u32_x_accum_start >>= 1; - } - else - { - u32_x_accum_start = 0; - } - - u32_height = u32_height_out; - - /* - Bilinear interpolation linearly interpolates along each row, and - then uses that result in a linear interpolation donw each column. - Each estimated pixel in the output image is a weighted combination - of its four neighbours according to the formula: - F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+ - f(p+&,q+1)R(1-a)R(b-1) with R(x) = / x+1 -1 =< x =< 0 \ 1-x - 0 =< x =< 1 and a (resp. b)weighting coefficient is the distance - from the nearest neighbor in the p (resp. q) direction - */ - - do { /* Scan all the row */ - - /* Vertical weight factor */ - u32_y_frac = (u32_y_accum>>12)&15; - - /* Reinit accumulator */ - u32_x_accum = u32_x_accum_start; - - u32_width = u32_width_out; - - do { /* Scan along each row */ - pu8_src_top = pu8_data_in + (u32_x_accum >> 16); - pu8_src_bottom = pu8_src_top + u32_stride_in; - u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */ - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += u32_x_inc; - } while(--u32_width); - - /* - This u8Wflag flag gets in to effect if input and output - width is same, and height may be different. So previous - pixel is replicated here - */ - if (u8Wflag) { - *pu8_data_out = (M4VIFI_UInt8)u32_temp_value; - } - - pu8dum = (pu8_data_out-u32_width_out); - pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out; - - /* Update vertical accumulator */ - u32_y_accum += u32_y_inc; - if (u32_y_accum>>16) { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in; - u32_y_accum &= 0xffff; - } - } while(--u32_height); - - /* - This u8Hflag flag gets in to effect if input and output height - is same, and width may be different. So previous pixel row is - replicated here - */ - if (u8Hflag) { - for(loop =0; loop < (u32_width_out+u8Wflag); loop++) { - *pu8_data_out++ = (M4VIFI_UInt8)*pu8dum++; - } - } - } - - return M4VIFI_OK; -} - -/** - ********************************************************************************************* - * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toBGR565_X86(void *pContext, M4VIFI_ImagePlane *pPlaneIn, - * M4VIFI_ImagePlane *pPlaneOut) - * @brief Resize YUV420 plane and converts to BGR565 with +90 rotation. - * @note Basic sturture of the function - * Loop on each row (step 2) - * Loop on each column (step 2) - * Get four Y samples and 1 u & V sample - * Resize the Y with corresponing U and V samples - * Compute the four corresponding R G B values - * Place the R G B in the ouput plane in rotated fashion - * end loop column - * end loop row - * For resizing bilinear interpolation linearly interpolates along - * each row, and then uses that result in a linear interpolation down each column. - * Each estimated pixel in the output image is a weighted - * combination of its four neighbours. The ratio of compression - * or dilatation is estimated using input and output sizes. - * @param pPlaneIn: (IN) Pointer to YUV plane buffer - * @param pContext: (IN) Context Pointer - * @param pPlaneOut: (OUT) Pointer to BGR565 Plane - * @return M4VIFI_OK: there is no error - * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD - * @return M4VIFI_ILLEGAL_FRAME_WIDTH: YUV Plane width is ODD - ********************************************************************************************* -*/ -static M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toBGR565_X86(void* pContext, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_UInt8 *pu8_data_in[PLANES], *pu8_data_in1[PLANES],*pu8_data_out; - M4VIFI_UInt32 *pu32_rgb_data_current, *pu32_rgb_data_next, *pu32_rgb_data_start; - - M4VIFI_UInt32 u32_width_in[PLANES], u32_width_out, u32_height_in[PLANES], u32_height_out; - M4VIFI_UInt32 u32_stride_in[PLANES]; - M4VIFI_UInt32 u32_stride_out, u32_stride2_out, u32_width2_RGB, u32_height2_RGB; - M4VIFI_UInt32 u32_x_inc[PLANES], u32_y_inc[PLANES]; - M4VIFI_UInt32 u32_x_accum_Y, u32_x_accum_U, u32_x_accum_start; - M4VIFI_UInt32 u32_y_accum_Y, u32_y_accum_U; - M4VIFI_UInt32 u32_x_frac_Y, u32_x_frac_U, u32_y_frac_Y,u32_y_frac_U; - M4VIFI_Int32 U_32, V_32, Y_32, Yval_32; - M4VIFI_UInt8 u8_Red, u8_Green, u8_Blue; - M4VIFI_UInt32 u32_row, u32_col; - - M4VIFI_UInt32 u32_plane; - M4VIFI_UInt32 u32_rgb_temp1, u32_rgb_temp2; - M4VIFI_UInt32 u32_rgb_temp3,u32_rgb_temp4; - M4VIFI_UInt32 u32_check_size; - - M4VIFI_UInt8 *pu8_src_top_Y,*pu8_src_top_U,*pu8_src_top_V ; - M4VIFI_UInt8 *pu8_src_bottom_Y, *pu8_src_bottom_U, *pu8_src_bottom_V; - - /* Check for the YUV width and height are even */ - u32_check_size = IS_EVEN(pPlaneIn[0].u_height); - if( u32_check_size == FALSE ) - { - return M4VIFI_ILLEGAL_FRAME_HEIGHT; - } - u32_check_size = IS_EVEN(pPlaneIn[0].u_width); - if (u32_check_size == FALSE ) - { - return M4VIFI_ILLEGAL_FRAME_WIDTH; - - } - /* Make the ouput width and height as even */ - pPlaneOut->u_height = pPlaneOut->u_height & 0xFFFFFFFE; - pPlaneOut->u_width = pPlaneOut->u_width & 0xFFFFFFFE; - pPlaneOut->u_stride = pPlaneOut->u_stride & 0xFFFFFFFC; - - /* Assignment of output pointer */ - pu8_data_out = pPlaneOut->pac_data + pPlaneOut->u_topleft; - /* Assignment of output width(rotated) */ - u32_width_out = pPlaneOut->u_width; - /* Assignment of output height(rotated) */ - u32_height_out = pPlaneOut->u_height; - - u32_width2_RGB = pPlaneOut->u_width >> 1; - u32_height2_RGB = pPlaneOut->u_height >> 1; - - u32_stride_out = pPlaneOut->u_stride >> 1; - u32_stride2_out = pPlaneOut->u_stride >> 2; - - for(u32_plane = 0; u32_plane < PLANES; u32_plane++) - { - /* Set the working pointers at the beginning of the input/output data field */ - pu8_data_in[u32_plane] = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; - - /* Get the memory jump corresponding to a row jump */ - u32_stride_in[u32_plane] = pPlaneIn[u32_plane].u_stride; - - /* Set the bounds of the active image */ - u32_width_in[u32_plane] = pPlaneIn[u32_plane].u_width; - u32_height_in[u32_plane] = pPlaneIn[u32_plane].u_height; - } - /* Compute horizontal ratio between src and destination width for Y Plane. */ - if (u32_width_out >= u32_width_in[YPlane]) - { - u32_x_inc[YPlane] = ((u32_width_in[YPlane]-1) * MAX_SHORT) / (u32_width_out-1); - } - else - { - u32_x_inc[YPlane] = (u32_width_in[YPlane] * MAX_SHORT) / (u32_width_out); - } - - /* Compute vertical ratio between src and destination height for Y Plane.*/ - if (u32_height_out >= u32_height_in[YPlane]) - { - u32_y_inc[YPlane] = ((u32_height_in[YPlane]-1) * MAX_SHORT) / (u32_height_out-1); - } - else - { - u32_y_inc[YPlane] = (u32_height_in[YPlane] * MAX_SHORT) / (u32_height_out); - } - - /* Compute horizontal ratio between src and destination width for U and V Planes. */ - if (u32_width2_RGB >= u32_width_in[UPlane]) - { - u32_x_inc[UPlane] = ((u32_width_in[UPlane]-1) * MAX_SHORT) / (u32_width2_RGB-1); - } - else - { - u32_x_inc[UPlane] = (u32_width_in[UPlane] * MAX_SHORT) / (u32_width2_RGB); - } - - /* Compute vertical ratio between src and destination height for U and V Planes. */ - - if (u32_height2_RGB >= u32_height_in[UPlane]) - { - u32_y_inc[UPlane] = ((u32_height_in[UPlane]-1) * MAX_SHORT) / (u32_height2_RGB-1); - } - else - { - u32_y_inc[UPlane] = (u32_height_in[UPlane] * MAX_SHORT) / (u32_height2_RGB); - } - - u32_y_inc[VPlane] = u32_y_inc[UPlane]; - u32_x_inc[VPlane] = u32_x_inc[UPlane]; - - /* - Calculate initial accumulator value : u32_y_accum_start. - u32_y_accum_start is coded on 15 bits,and represents a value between 0 and 0.5 - */ - if (u32_y_inc[YPlane] > MAX_SHORT) - { - /* - Keep the fractionnal part, assimung that integer part is coded on the 16 high bits, - and the fractionnal on the 15 low bits - */ - u32_y_accum_Y = u32_y_inc[YPlane] & 0xffff; - u32_y_accum_U = u32_y_inc[UPlane] & 0xffff; - - if (!u32_y_accum_Y) - { - u32_y_accum_Y = MAX_SHORT; - u32_y_accum_U = MAX_SHORT; - } - u32_y_accum_Y >>= 1; - u32_y_accum_U >>= 1; - } - else - { - u32_y_accum_Y = 0; - u32_y_accum_U = 0; - - } - - /* - Calculate initial accumulator value : u32_x_accum_start. - u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5 - */ - if (u32_x_inc[YPlane] > MAX_SHORT) - { - u32_x_accum_start = u32_x_inc[YPlane] & 0xffff; - - if (!u32_x_accum_start) - { - u32_x_accum_start = MAX_SHORT; - } - - u32_x_accum_start >>= 1; - } - else - { - u32_x_accum_start = 0; - } - - pu32_rgb_data_start = (M4VIFI_UInt32*)pu8_data_out; - - /* - Bilinear interpolation linearly interpolates along each row, and then uses that - result in a linear interpolation donw each column. Each estimated pixel in the - output image is a weighted combination of its four neighbours according to the formula : - F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+f(p+&,q+1)R(1-a)R(b-1) - with R(x) = / x+1 -1 =< x =< 0 \ 1-x 0 =< x =< 1 and a (resp. b) weighting coefficient - is the distance from the nearest neighbor in the p (resp. q) direction - */ - for (u32_row = u32_height_out; u32_row != 0; u32_row -= 2) - { - u32_x_accum_Y = u32_x_accum_start; - u32_x_accum_U = u32_x_accum_start; - - /* Vertical weight factor */ - u32_y_frac_Y = (u32_y_accum_Y >> 12) & 15; - u32_y_frac_U = (u32_y_accum_U >> 12) & 15; - - /* RGB current line position pointer */ - pu32_rgb_data_current = pu32_rgb_data_start ; - - /* RGB next line position pointer */ - pu32_rgb_data_next = pu32_rgb_data_current + (u32_stride2_out); - - /* Y Plane next row pointer */ - pu8_data_in1[YPlane] = pu8_data_in[YPlane]; - - u32_rgb_temp3 = u32_y_accum_Y + (u32_y_inc[YPlane]); - if (u32_rgb_temp3 >> 16) - { - pu8_data_in1[YPlane] = pu8_data_in[YPlane] + - (u32_rgb_temp3 >> 16) * (u32_stride_in[YPlane]); - u32_rgb_temp3 &= 0xffff; - } - u32_rgb_temp4 = (u32_rgb_temp3 >> 12) & 15; - - for (u32_col = u32_width_out; u32_col != 0; u32_col -= 2) - { - - /* Input Y plane elements */ - pu8_src_top_Y = pu8_data_in[YPlane] + (u32_x_accum_Y >> 16); - pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; - - /* Input U Plane elements */ - pu8_src_top_U = pu8_data_in[UPlane] + (u32_x_accum_U >> 16); - pu8_src_bottom_U = pu8_src_top_U + u32_stride_in[UPlane]; - - pu8_src_top_V = pu8_data_in[VPlane] + (u32_x_accum_U >> 16); - pu8_src_bottom_V = pu8_src_top_V + u32_stride_in[VPlane]; - - /* Horizontal weight factor for Y plane */ - u32_x_frac_Y = (u32_x_accum_Y >> 12)&15; - /* Horizontal weight factor for U and V planes */ - u32_x_frac_U = (u32_x_accum_U >> 12)&15; - - /* Weighted combination */ - U_32 = (((pu8_src_top_U[0]*(16-u32_x_frac_U) + pu8_src_top_U[1]*u32_x_frac_U) - *(16-u32_y_frac_U) + (pu8_src_bottom_U[0]*(16-u32_x_frac_U) - + pu8_src_bottom_U[1]*u32_x_frac_U)*u32_y_frac_U ) >> 8); - - V_32 = (((pu8_src_top_V[0]*(16-u32_x_frac_U) + pu8_src_top_V[1]*u32_x_frac_U) - *(16-u32_y_frac_U)+ (pu8_src_bottom_V[0]*(16-u32_x_frac_U) - + pu8_src_bottom_V[1]*u32_x_frac_U)*u32_y_frac_U ) >> 8); - - Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) - *(16-u32_y_frac_Y) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) - + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_y_frac_Y ) >> 8); - - u32_x_accum_U += (u32_x_inc[UPlane]); - - /* YUV to RGB */ - #ifdef __RGB_V1__ - Yval_32 = Y_32*37; - #else /* __RGB_V1__v */ - Yval_32 = Y_32*0x2568; - #endif /* __RGB_V1__v */ - - DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); - - /* Pack 8 bit R,G,B to RGB565 */ - #ifdef LITTLE_ENDIAN - u32_rgb_temp1 = PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); - #else /* LITTLE_ENDIAN */ - u32_rgb_temp1 = PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); - #endif /* LITTLE_ENDIAN */ - - - pu8_src_top_Y = pu8_data_in1[YPlane]+(u32_x_accum_Y >> 16); - pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; - - /* Weighted combination */ - Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) - *(16-u32_rgb_temp4) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) - + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_rgb_temp4 ) >> 8); - - u32_x_accum_Y += u32_x_inc[YPlane]; - - /* Horizontal weight factor */ - u32_x_frac_Y = (u32_x_accum_Y >> 12)&15; - - /* YUV to RGB */ - #ifdef __RGB_V1__ - Yval_32 = Y_32*37; - #else /* __RGB_V1__v */ - Yval_32 = Y_32*0x2568; - #endif /* __RGB_V1__v */ - - DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); - - /* Pack 8 bit R,G,B to RGB565 */ - #ifdef LITTLE_ENDIAN - u32_rgb_temp2 = PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); - #else /* LITTLE_ENDIAN */ - u32_rgb_temp2 = PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); - #endif /* LITTLE_ENDIAN */ - - - pu8_src_top_Y = pu8_data_in[YPlane] + (u32_x_accum_Y >> 16) ; - pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; - - /* Weighted combination */ - Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) - *(16-u32_y_frac_Y) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) - + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_y_frac_Y ) >> 8); - - /* YUV to RGB */ - #ifdef __RGB_V1__ - Yval_32 = Y_32*37; - #else /* __RGB_V1__v */ - Yval_32 = Y_32*0x2568; - #endif /* __RGB_V1__v */ - - DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); - - /* Pack 8 bit R,G,B to RGB565 */ - #ifdef LITTLE_ENDIAN - *(pu32_rgb_data_current)++ = u32_rgb_temp1 | - PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); - #else /* LITTLE_ENDIAN */ - *(pu32_rgb_data_current)++ = u32_rgb_temp1 | - PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); - #endif /* LITTLE_ENDIAN */ - - - pu8_src_top_Y = pu8_data_in1[YPlane]+ (u32_x_accum_Y >> 16); - pu8_src_bottom_Y = pu8_src_top_Y + u32_stride_in[YPlane]; - - /* Weighted combination */ - Y_32 = (((pu8_src_top_Y[0]*(16-u32_x_frac_Y) + pu8_src_top_Y[1]*u32_x_frac_Y) - *(16-u32_rgb_temp4) + (pu8_src_bottom_Y[0]*(16-u32_x_frac_Y) - + pu8_src_bottom_Y[1]*u32_x_frac_Y)*u32_rgb_temp4 )>>8); - - u32_x_accum_Y += u32_x_inc[YPlane]; - /* YUV to RGB */ - #ifdef __RGB_V1__ - Yval_32=Y_32*37; - #else /* __RGB_V1__v */ - Yval_32=Y_32*0x2568; - #endif /* __RGB_V1__v */ - - DEMATRIX(u8_Red,u8_Green,u8_Blue,Yval_32,U_32,V_32); - - /* Pack 8 bit R,G,B to RGB565 */ - #ifdef LITTLE_ENDIAN - *(pu32_rgb_data_next)++ = u32_rgb_temp2 | - PACK_BGR565(16,u8_Red,u8_Green,u8_Blue); - #else /* LITTLE_ENDIAN */ - *(pu32_rgb_data_next)++ = u32_rgb_temp2 | - PACK_BGR565(0,u8_Red,u8_Green,u8_Blue); - #endif /* LITTLE_ENDIAN */ - - } /* End of horizontal scanning */ - - u32_y_accum_Y = u32_rgb_temp3 + (u32_y_inc[YPlane]); - u32_y_accum_U += (u32_y_inc[UPlane]); - - /* Y plane row update */ - if (u32_y_accum_Y >> 16) - { - pu8_data_in[YPlane] = pu8_data_in1[YPlane] + - ((u32_y_accum_Y >> 16) * (u32_stride_in[YPlane])); - u32_y_accum_Y &= 0xffff; - } - else - { - pu8_data_in[YPlane] = pu8_data_in1[YPlane]; - } - /* U and V planes row update */ - if (u32_y_accum_U >> 16) - { - pu8_data_in[UPlane] = pu8_data_in[UPlane] + - (u32_y_accum_U >> 16) * (u32_stride_in[UPlane]); - pu8_data_in[VPlane] = pu8_data_in[VPlane] + - (u32_y_accum_U >> 16) * (u32_stride_in[VPlane]); - u32_y_accum_U &= 0xffff; - } - /* BGR pointer Update */ - pu32_rgb_data_start += u32_stride_out; - - } /* End of vertical scanning */ - return M4VIFI_OK; -} - -/*************************************************************************** -Proto: -M4VIFI_UInt8 M4VIFI_RGB888toNV12(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[2]); -Author: Patrice Martinez / Philips Digital Networks - MP4Net -Purpose: filling of the NV12 plane from a BGR24 plane -Abstract: Loop on each row ( 2 rows by 2 rows ) - Loop on each column ( 2 col by 2 col ) - Get 4 BGR samples from input data and build 4 output Y samples and each single U & V data - end loop on col - end loop on row - -In: RGB24 plane -InOut: none -Out: array of 3 M4VIFI_ImagePlane structures -Modified: ML: RGB function modified to BGR. -***************************************************************************/ -M4VIFI_UInt8 M4VIFI_RGB888toNV12(void *pUserData, - M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut) -{ - - M4VIFI_UInt32 u32_width, u32_height; - M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_UV, u32_stride_rgb, u32_stride_2rgb; - M4VIFI_UInt32 u32_col, u32_row; - - M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11; - M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11; - M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11; - M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11; - M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11; - M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11; - M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v; - M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data; - M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn; - - /* check sizes */ - if( (PlaneIn->u_height != PlaneOut[0].u_height) || - (PlaneOut[0].u_height != (PlaneOut[1].u_height<<1))) - return M4VIFI_ILLEGAL_FRAME_HEIGHT; - - if( (PlaneIn->u_width != PlaneOut[0].u_width) || - (PlaneOut[0].u_width != PlaneOut[1].u_width)) - return M4VIFI_ILLEGAL_FRAME_WIDTH; - - - /* set the pointer to the beginning of the output data buffers */ - pu8_y_data = PlaneOut[0].pac_data + PlaneOut[0].u_topleft; - pu8_u_data = PlaneOut[1].pac_data + PlaneOut[1].u_topleft; - pu8_v_data = pu8_u_data + 1; - - /* idem for input buffer */ - pu8_rgbn_data = PlaneIn->pac_data + PlaneIn->u_topleft; - - /* get the size of the output image */ - u32_width = PlaneOut[0].u_width; - u32_height = PlaneOut[0].u_height; - - /* set the size of the memory jumps corresponding to row jump in each output plane */ - u32_stride_Y = PlaneOut[0].u_stride; - u32_stride2_Y= u32_stride_Y << 1; - u32_stride_UV = PlaneOut[1].u_stride; - - /* idem for input plane */ - u32_stride_rgb = PlaneIn->u_stride; - u32_stride_2rgb = u32_stride_rgb << 1; - - /* loop on each row of the output image, input coordinates are estimated from output ones */ - /* two YUV rows are computed at each pass */ - for (u32_row = u32_height ;u32_row != 0; u32_row -=2) - { - /* update working pointers */ - pu8_yn = pu8_y_data; - pu8_ys = pu8_yn + u32_stride_Y; - - pu8_u = pu8_u_data; - pu8_v = pu8_v_data; - - pu8_rgbn= pu8_rgbn_data; - - /* loop on each column of the output image*/ - for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) - { - /* get RGB samples of 4 pixels */ - GET_RGB24(i32_r00, i32_g00, i32_b00, pu8_rgbn, 0); - GET_RGB24(i32_r10, i32_g10, i32_b10, pu8_rgbn, CST_RGB_24_SIZE); - GET_RGB24(i32_r01, i32_g01, i32_b01, pu8_rgbn, u32_stride_rgb); - GET_RGB24(i32_r11, i32_g11, i32_b11, pu8_rgbn, u32_stride_rgb + CST_RGB_24_SIZE); - - i32_u00 = U24(i32_r00, i32_g00, i32_b00); - i32_v00 = V24(i32_r00, i32_g00, i32_b00); - i32_y00 = Y24(i32_r00, i32_g00, i32_b00); /* matrix luminance */ - pu8_yn[0]= (M4VIFI_UInt8)i32_y00; - - i32_u10 = U24(i32_r10, i32_g10, i32_b10); - i32_v10 = V24(i32_r10, i32_g10, i32_b10); - i32_y10 = Y24(i32_r10, i32_g10, i32_b10); - pu8_yn[1]= (M4VIFI_UInt8)i32_y10; - - i32_u01 = U24(i32_r01, i32_g01, i32_b01); - i32_v01 = V24(i32_r01, i32_g01, i32_b01); - i32_y01 = Y24(i32_r01, i32_g01, i32_b01); - pu8_ys[0]= (M4VIFI_UInt8)i32_y01; - - i32_u11 = U24(i32_r11, i32_g11, i32_b11); - i32_v11 = V24(i32_r11, i32_g11, i32_b11); - i32_y11 = Y24(i32_r11, i32_g11, i32_b11); - pu8_ys[1] = (M4VIFI_UInt8)i32_y11; - - *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2); - *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2); - - pu8_rgbn += (CST_RGB_24_SIZE<<1); - pu8_yn += 2; - pu8_ys += 2; - - pu8_u += 2; - pu8_v += 2; - } /* end of horizontal scanning */ - - pu8_y_data += u32_stride2_Y; - pu8_u_data += u32_stride_UV; - pu8_v_data += u32_stride_UV; - pu8_rgbn_data += u32_stride_2rgb; - - - } /* End of vertical scanning */ - - return M4VIFI_OK; -} - -/** NV12 to NV12 */ -/** - ******************************************************************************************* - * M4VIFI_UInt8 M4VIFI_NV12toNV12 (void *pUserData, - * M4VIFI_ImagePlane *pPlaneIn, - * M4VIFI_ImagePlane *pPlaneOut) - * @brief Transform NV12 image to a NV12 image. - * @param pUserData: (IN) User Specific Data (Unused - could be NULL) - * @param pPlaneIn: (IN) Pointer to NV12 plane buffer - * @param pPlaneOut: (OUT) Pointer to NV12 Plane - * @return M4VIFI_OK: there is no error - * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in plane height - * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in plane width - ******************************************************************************************* - */ - -M4VIFI_UInt8 M4VIFI_NV12toNV12(void *user_data, - M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut) -{ - M4VIFI_Int32 plane_number; - M4VIFI_UInt32 i; - M4VIFI_UInt8 *p_buf_src, *p_buf_dest; - - for (plane_number = 0; plane_number < 2; plane_number++) - { - p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]); - p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]); - for (i = 0; i < PlaneOut[plane_number].u_height; i++) - { - memcpy((void *)p_buf_dest, (void *)p_buf_src ,PlaneOut[plane_number].u_width); - p_buf_src += PlaneIn[plane_number].u_stride; - p_buf_dest += PlaneOut[plane_number].u_stride; - } - } - return M4VIFI_OK; -} - -/** - *********************************************************************************************** - * M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toNV12(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, - * M4VIFI_ImagePlane *pPlaneOut) - * @author David Dana (PHILIPS Software) - * @brief Resizes NV12 Planar plane. - * @note Basic structure of the function - * Loop on each row (step 2) - * Loop on each column (step 2) - * Get four Y samples and 1 U & V sample - * Resize the Y with corresponing U and V samples - * Place the NV12 in the ouput plane - * end loop column - * end loop row - * For resizing bilinear interpolation linearly interpolates along - * each row, and then uses that result in a linear interpolation down each column. - * Each estimated pixel in the output image is a weighted - * combination of its four neighbours. The ratio of compression - * or dilatation is estimated using input and output sizes. - * @param pUserData: (IN) User Data - * @param pPlaneIn: (IN) Pointer to NV12 (Planar) plane buffer - * @param pPlaneOut: (OUT) Pointer to NV12 (Planar) plane - * @return M4VIFI_OK: there is no error - * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height - * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in width - *********************************************************************************************** -*/ -M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toNV12(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_UInt8 *pu8_data_in, *pu8_data_out, *pu8dum; - M4VIFI_UInt32 u32_plane; - M4VIFI_UInt32 u32_width_in, u32_width_out, u32_height_in, u32_height_out; - M4VIFI_UInt32 u32_stride_in, u32_stride_out; - M4VIFI_UInt32 u32_x_inc, u32_y_inc; - M4VIFI_UInt32 u32_x_accum, u32_y_accum, u32_x_accum_start; - M4VIFI_UInt32 u32_width, u32_height; - M4VIFI_UInt32 u32_y_frac; - M4VIFI_UInt32 u32_x_frac; - M4VIFI_UInt32 u32_temp_value,u32_temp_value1; - M4VIFI_UInt8 *pu8_src_top; - M4VIFI_UInt8 *pu8_src_bottom; - - M4VIFI_UInt8 u8Wflag = 0; - M4VIFI_UInt8 u8Hflag = 0; - M4VIFI_UInt32 loop = 0; - - LOGV("M4VIFI_ResizeBilinearNV12toNV12 begin"); - /* - If input width is equal to output width and input height equal to - output height then M4VIFI_NV12toNV12 is called. - */ - - LOGV("pPlaneIn[0].u_height = %d, pPlaneIn[0].u_width = %d,\ - pPlaneOut[0].u_height = %d, pPlaneOut[0].u_width = %d", - pPlaneIn[0].u_height, pPlaneIn[0].u_width, - pPlaneOut[0].u_height, pPlaneOut[0].u_width - ); - LOGV("pPlaneIn[1].u_height = %d, pPlaneIn[1].u_width = %d,\ - pPlaneOut[1].u_height = %d, pPlaneOut[1].u_width = %d", - pPlaneIn[1].u_height, pPlaneIn[1].u_width, - pPlaneOut[1].u_height, pPlaneOut[1].u_width - ); - if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) && - (pPlaneIn[0].u_width == pPlaneOut[0].u_width)) - { - return M4VIFI_NV12toNV12(pUserData, pPlaneIn, pPlaneOut); - } - - /* Check for the YUV width and height are even */ - if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE) || - (IS_EVEN(pPlaneOut[0].u_height) == FALSE)) - { - return M4VIFI_ILLEGAL_FRAME_HEIGHT; - } - - if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) || - (IS_EVEN(pPlaneOut[0].u_width) == FALSE)) - { - return M4VIFI_ILLEGAL_FRAME_WIDTH; - } - - /* Loop on planes */ - for(u32_plane = 0;u32_plane < 2;u32_plane++) - { - /* Get the memory jump corresponding to a row jump */ - u32_stride_in = pPlaneIn[u32_plane].u_stride; - u32_stride_out = pPlaneOut[u32_plane].u_stride; - - /* Set the bounds of the active image */ - u32_width_in = pPlaneIn[u32_plane].u_width; - u32_height_in = pPlaneIn[u32_plane].u_height; - - u32_width_out = pPlaneOut[u32_plane].u_width; - u32_height_out = pPlaneOut[u32_plane].u_height; - - /* - For the case , width_out = width_in , set the flag to avoid - accessing one column beyond the input width.In this case the last - column is replicated for processing - */ - if (u32_width_out == u32_width_in) { - u32_width_out = u32_width_out - 1 - u32_plane; - u8Wflag = 1; - } - - /* Compute horizontal ratio between src and destination width.*/ - if (u32_width_out >= u32_width_in) - { - u32_x_inc = ((u32_width_in -1 -u32_plane) * MAX_SHORT)/(u32_width_out -1 -u32_plane); - } - else - { - u32_x_inc = (u32_width_in * MAX_SHORT) / (u32_width_out); - } - - /* - For the case , height_out = height_in , set the flag to avoid - accessing one row beyond the input height.In this case the last - row is replicated for processing - */ - if (u32_height_out == u32_height_in) { - u32_height_out = u32_height_out-1; - u8Hflag = 1; - } - - /* Compute vertical ratio between src and destination height.*/ - if (u32_height_out >= u32_height_in) - { - u32_y_inc = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out - 1); - } - else - { - u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out); - } - - /* - Calculate initial accumulator value : u32_y_accum_start. - u32_y_accum_start is coded on 15 bits, and represents a value - between 0 and 0.5 - */ - if (u32_y_inc >= MAX_SHORT) - { - /* - Keep the fractionnal part, assimung that integer part is coded - on the 16 high bits and the fractional on the 15 low bits - */ - u32_y_accum = u32_y_inc & 0xffff; - - if (!u32_y_accum) - { - u32_y_accum = MAX_SHORT; - } - - u32_y_accum >>= 1; - } - else - { - u32_y_accum = 0; - } - - - /* - Calculate initial accumulator value : u32_x_accum_start. - u32_x_accum_start is coded on 15 bits, and represents a value - between 0 and 0.5 - */ - if (u32_x_inc >= MAX_SHORT) - { - u32_x_accum_start = u32_x_inc & 0xffff; - - if (!u32_x_accum_start) - { - u32_x_accum_start = MAX_SHORT; - } - - u32_x_accum_start >>= 1; - } - else - { - u32_x_accum_start = 0; - } - - u32_height = u32_height_out; - - /* - Bilinear interpolation linearly interpolates along each row, and - then uses that result in a linear interpolation donw each column. - Each estimated pixel in the output image is a weighted combination - of its four neighbours according to the formula: - F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+ - f(p+&,q+1)R(1-a)R(b-1) with R(x) = / x+1 -1 =< x =< 0 \ 1-x - 0 =< x =< 1 and a (resp. b)weighting coefficient is the distance - from the nearest neighbor in the p (resp. q) direction - */ - - if (u32_plane == 0) - { - /* Set the working pointers at the beginning of the input/output data field */ - pu8_data_in = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; - pu8_data_out = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft; - - do { /* Scan all the row */ - - /* Vertical weight factor */ - u32_y_frac = (u32_y_accum>>12)&15; - - /* Reinit accumulator */ - u32_x_accum = u32_x_accum_start; - - u32_width = u32_width_out; - - do { /* Scan along each row */ - pu8_src_top = pu8_data_in + (u32_x_accum >> 16); - pu8_src_bottom = pu8_src_top + u32_stride_in; - u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */ - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += u32_x_inc; - } while(--u32_width); - - /* - This u8Wflag flag gets in to effect if input and output - width is same, and height may be different. So previous - pixel is replicated here - */ - if (u8Wflag) { - *pu8_data_out = (M4VIFI_UInt8)u32_temp_value; - } - - pu8dum = (pu8_data_out-u32_width_out); - pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out; - - /* Update vertical accumulator */ - u32_y_accum += u32_y_inc; - if (u32_y_accum>>16) { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in; - u32_y_accum &= 0xffff; - } - } while(--u32_height); - - /* - This u8Hflag flag gets in to effect if input and output height - is same, and width may be different. So previous pixel row is - replicated here - */ - if (u8Hflag) { - memcpy((void *)pu8_data_out,(void *)pu8dum,u32_width_out+u8Wflag); - } - } - else - { - /* Set the working pointers at the beginning of the input/output data field */ - pu8_data_in = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft; - pu8_data_out = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft; - - do { /* Scan all the row */ - - /* Vertical weight factor */ - u32_y_frac = (u32_y_accum>>12)&15; - - /* Reinit accumulator */ - u32_x_accum = u32_x_accum_start; - - u32_width = u32_width_out; - - do { /* Scan along each row */ - pu8_src_top = pu8_data_in + ((u32_x_accum >> 16) << 1); - pu8_src_bottom = pu8_src_top + u32_stride_in; - u32_x_frac = (u32_x_accum >> 12)&15; - - /* U planar weighted combination */ - u32_temp_value1 = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value1; - - pu8_src_top = pu8_src_top + 1; - pu8_src_bottom = pu8_src_bottom + 1; - - /* V planar weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += u32_x_inc; - u32_width -= 2; - } while(u32_width); - - /* - This u8Wflag flag gets in to effect if input and output - width is same, and height may be different. So previous - pixel is replicated here - */ - if (u8Wflag) { - *pu8_data_out = (M4VIFI_UInt8)u32_temp_value1; - *(pu8_data_out+1) = (M4VIFI_UInt8)u32_temp_value; - } - - pu8dum = (pu8_data_out - u32_width_out); - pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out; - - /* Update vertical accumulator */ - u32_y_accum += u32_y_inc; - if (u32_y_accum>>16) { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in; - u32_y_accum &= 0xffff; - } - } while(--u32_height); - - /* - This u8Hflag flag gets in to effect if input and output height - is same, and width may be different. So previous pixel row is - replicated here - */ - if (u8Hflag) { - memcpy((void *)pu8_data_out,(void *)pu8dum,u32_width_out+u8Wflag+1); - } - } - } - LOGV("M4VIFI_ResizeBilinearNV12toNV12 end"); - return M4VIFI_OK; -} - -M4VIFI_UInt8 M4VIFI_Rotate90LeftNV12toNV12(void* pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_Int32 plane_number; - M4VIFI_UInt32 i,j, u_stride; - M4VIFI_UInt8 *p_buf_src, *p_buf_dest; - - /**< Loop on Y,U and V planes */ - for (plane_number = 0; plane_number < 2; plane_number++) { - /**< Get adresses of first valid pixel in input and output buffer */ - /**< As we have a -90. rotation, first needed pixel is the upper-right one */ - if (plane_number == 0) { - p_buf_src = - &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) + - pPlaneOut[plane_number].u_height - 1 ; - p_buf_dest = - &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); - u_stride = pPlaneIn[plane_number].u_stride; - /**< Loop on output rows */ - for (i = pPlaneOut[plane_number].u_height; i != 0; i--) { - /**< Loop on all output pixels in a row */ - for (j = pPlaneOut[plane_number].u_width; j != 0; j--) { - *p_buf_dest++= *p_buf_src; - p_buf_src += u_stride; /**< Go to the next row */ - } - - /**< Go on next row of the output frame */ - p_buf_dest += - pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; - /**< Go to next pixel in the last row of the input frame*/ - p_buf_src -= - pPlaneIn[plane_number].u_stride * pPlaneOut[plane_number].u_width + 1 ; - } - } else { - p_buf_src = - &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) + - pPlaneIn[plane_number].u_width - 2 ; - p_buf_dest = - &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); - u_stride = pPlaneIn[plane_number].u_stride; - /**< Loop on output rows */ - for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { - /**< Loop on all output pixels in a row */ - for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { - *p_buf_dest++= *p_buf_src++; - *p_buf_dest++= *p_buf_src--; - p_buf_src += u_stride; /**< Go to the next row */ - } - - /**< Go on next row of the output frame */ - p_buf_dest += - pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; - /**< Go to next pixel in the last row of the input frame*/ - p_buf_src -= - pPlaneIn[plane_number].u_stride * pPlaneIn[plane_number].u_height + 2 ; - } - } - } - - return M4VIFI_OK; -} - -M4VIFI_UInt8 M4VIFI_Rotate90RightNV12toNV12(void* pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_Int32 plane_number; - M4VIFI_UInt32 i,j, u_stride; - M4VIFI_UInt8 *p_buf_src, *p_buf_dest; - - /**< Loop on Y,U and V planes */ - for (plane_number = 0; plane_number < 2; plane_number++) { - /**< Get adresses of first valid pixel in input and output buffer */ - /**< As we have a +90 rotation, first needed pixel is the left-down one */ - p_buf_src = - &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) + - (pPlaneIn[plane_number].u_stride * (pPlaneIn[plane_number].u_height - 1)); - p_buf_dest = - &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); - u_stride = pPlaneIn[plane_number].u_stride; - if (plane_number == 0) { - /**< Loop on output rows */ - for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { - /**< Loop on all output pixels in a row */ - for (j = pPlaneOut[plane_number].u_width; j != 0 ; j--) { - *p_buf_dest++= *p_buf_src; - p_buf_src -= u_stride; /**< Go to the previous row */ - } - - /**< Go on next row of the output frame */ - p_buf_dest += - pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; - /**< Go to next pixel in the last row of the input frame*/ - p_buf_src += - pPlaneIn[plane_number].u_stride * pPlaneOut[plane_number].u_width + 1 ; - } - } else { - /**< Loop on output rows */ - for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { - /**< Loop on all output pixels in a row */ - for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { - *p_buf_dest++= *p_buf_src++; - *p_buf_dest++= *p_buf_src--; - p_buf_src -= u_stride; /**< Go to the previous row */ - } - - /**< Go on next row of the output frame */ - p_buf_dest += - pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; - /**< Go to next pixel in the last row of the input frame*/ - p_buf_src += - pPlaneIn[plane_number].u_stride * pPlaneIn[plane_number].u_height + 2 ; - } - } - } - - return M4VIFI_OK; -} - -M4VIFI_UInt8 M4VIFI_Rotate180NV12toNV12(void* pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_Int32 plane_number; - M4VIFI_UInt32 i,j; - M4VIFI_UInt8 *p_buf_src, *p_buf_dest, temp_pix1; - M4VIFI_UInt16 *p16_buf_src, *p16_buf_dest, temp_pix2; - - /**< Loop on Y,U and V planes */ - for (plane_number = 0; plane_number < 2; plane_number++) { - /**< Get adresses of first valid pixel in input and output buffer */ - p_buf_src = - &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]); - p_buf_dest = - &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]); - - if (plane_number == 0) { - /**< If pPlaneIn = pPlaneOut, the algorithm will be different */ - if (p_buf_src == p_buf_dest) { - /**< Get Address of last pixel in the last row of the frame */ - p_buf_dest += - pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + - pPlaneOut[plane_number].u_width - 1; - - /**< We loop (height/2) times on the rows. - * In case u_height is odd, the row at the middle of the frame - * has to be processed as must be mirrored */ - for (i = (pPlaneOut[plane_number].u_height>>1); i != 0; i--) { - for (j = pPlaneOut[plane_number].u_width; j != 0 ; j--) { - temp_pix1= *p_buf_dest; - *p_buf_dest--= *p_buf_src; - *p_buf_src++ = temp_pix1; - } - /**< Go on next row in top of frame */ - p_buf_src += - pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; - /**< Go to the last pixel in previous row in bottom of frame*/ - p_buf_dest -= - pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; - } - - /**< Mirror middle row in case height is odd */ - if ((pPlaneOut[plane_number].u_height%2)!= 0) { - p_buf_src = - &(pPlaneOut[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]); - p_buf_src += - pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height>>1); - p_buf_dest = - p_buf_src + pPlaneOut[plane_number].u_width; - - /**< We loop u_width/2 times on this row. - * In case u_width is odd, the pixel at the middle of this row - * remains unchanged */ - for (j = (pPlaneOut[plane_number].u_width>>1); j != 0 ; j--) { - temp_pix1= *p_buf_dest; - *p_buf_dest--= *p_buf_src; - *p_buf_src++ = temp_pix1; - } - } - } else { - /**< Get Address of last pixel in the last row of the output frame */ - p_buf_dest += - pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + - pPlaneIn[plane_number].u_width - 1; - - /**< Loop on rows */ - for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { - for (j = pPlaneOut[plane_number].u_width; j != 0 ; j--) { - *p_buf_dest--= *p_buf_src++; - } - - /**< Go on next row in top of input frame */ - p_buf_src += - pPlaneIn[plane_number].u_stride - pPlaneOut[plane_number].u_width; - /**< Go to last pixel of previous row in bottom of input frame*/ - p_buf_dest -= - pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width; - } - } - } else { - /**< If pPlaneIn = pPlaneOut, the algorithm will be different */ - if (p_buf_src == p_buf_dest) { - p16_buf_src = (M4VIFI_UInt16 *)p_buf_src; - p16_buf_dest = (M4VIFI_UInt16 *)p_buf_dest; - /**< Get Address of last pixel in the last row of the frame */ - p16_buf_dest += - ((pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + - pPlaneOut[plane_number].u_width)>>1) - 1; - - /**< We loop (height/2) times on the rows. - * In case u_height is odd, the row at the middle of the frame - * has to be processed as must be mirrored */ - for (i = (pPlaneOut[plane_number].u_height >> 1); i != 0 ; i--) { - for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { - temp_pix2 = *p16_buf_dest; - *p16_buf_dest--= *p16_buf_src; - *p16_buf_src++ = temp_pix2; - } - /**< Go on next row in top of frame */ - p16_buf_src += - ((pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); - /**< Go to the last pixel in previous row in bottom of frame*/ - p16_buf_dest -= - ((pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); - } - - /**< Mirror middle row in case height is odd */ - if ((pPlaneOut[plane_number].u_height%2)!= 0) { - p_buf_src = - &(pPlaneOut[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]); - p_buf_src += - pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height>>1); - p16_buf_src = (M4VIFI_UInt16 *)p_buf_src; - p_buf_dest = - p_buf_src + pPlaneOut[plane_number].u_width - 1; - p16_buf_dest = (M4VIFI_UInt16 *)p_buf_dest; - - /**< We loop u_width/2 times on this row. - * In case u_width is odd, the pixel at the middle of this row - * remains unchanged */ - for (j = (pPlaneOut[plane_number].u_width>>2); j != 0 ; j--) { - temp_pix2= *p16_buf_dest; - *p16_buf_dest--= *p16_buf_src; - *p16_buf_src++ = temp_pix2; - } - } - } else { - /**< Get Address of last pixel in the last row of the output frame */ - p_buf_dest += - pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) + - pPlaneIn[plane_number].u_width - 2; - p16_buf_dest = (M4VIFI_UInt16 *)p_buf_dest; - p16_buf_src = (M4VIFI_UInt16 *)p_buf_src; - - /**< Loop on rows */ - for (i = pPlaneOut[plane_number].u_height; i != 0 ; i--) { - for (j = (pPlaneOut[plane_number].u_width >> 1); j != 0 ; j--) { - *p16_buf_dest--= *p16_buf_src++; - } - - /**< Go on next row in top of input frame */ - p16_buf_src += - ((pPlaneIn[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); - /**< Go to last pixel of previous row in bottom of input frame*/ - p16_buf_dest -= - ((pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width)>>1); - } - } - } - } - - return M4VIFI_OK; -} - - -M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toYUV420(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - - LOGV("M4VIFI_ResizeBilinearNV12toYUV420 begin"); - - M4VIFI_ImagePlane pPlaneTmp[3]; - M4OSA_UInt32 mVideoWidth, mVideoHeight; - M4OSA_UInt32 mFrameSize; - - mVideoWidth = pPlaneIn[0].u_width; - mVideoHeight = pPlaneIn[0].u_height; - mFrameSize = mVideoWidth * mVideoHeight * 3/2; - - M4OSA_UInt8 *pData = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( - mFrameSize, - 12420, - (M4OSA_Char*)("M4VIFI_ResizeBilinearNV12toYUV420: tempBuffer") - ); - - if (NULL == pData) - { - LOGE("Error: Fail to allocate tempBuffer!"); - return M4VIFI_ALLOC_FAILURE; - } - - pPlaneTmp[0].pac_data = pData; - pPlaneTmp[0].u_height = pPlaneIn[0].u_height; - pPlaneTmp[0].u_width = pPlaneIn[0].u_width; - pPlaneTmp[0].u_stride = pPlaneIn[0].u_stride; - pPlaneTmp[0].u_topleft = pPlaneIn[0].u_topleft; - - pPlaneTmp[1].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight); - pPlaneTmp[1].u_height = pPlaneTmp[0].u_height/2; - pPlaneTmp[1].u_width = pPlaneTmp[0].u_width/2; - pPlaneTmp[1].u_stride = pPlaneTmp[0].u_stride/2; - pPlaneTmp[1].u_topleft = pPlaneTmp[0].u_topleft; - - pPlaneTmp[2].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight*5/4); - pPlaneTmp[2].u_height = pPlaneTmp[0].u_height/2; - pPlaneTmp[2].u_width = pPlaneTmp[0].u_width/2; - pPlaneTmp[2].u_stride = pPlaneTmp[0].u_stride/2; - pPlaneTmp[2].u_topleft = pPlaneTmp[0].u_topleft; - - M4VIFI_UInt8 err; - err = M4VIFI_SemiplanarYUV420toYUV420_X86(pUserData, pPlaneIn,&pPlaneTmp[0]); - - if(err != M4VIFI_OK) - { - LOGE("Error: M4VIFI_SemiplanarYUV420toYUV420 fails!"); - free(pData); - return err; - } - - err = M4VIFI_ResizeBilinearYUV420toYUV420_X86(pUserData,&pPlaneTmp[0],pPlaneOut); - - free(pData); - LOGV("M4VIFI_ResizeBilinearNV12toYUV420 end"); - return err; - -} - -M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toBGR565(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) -{ - LOGV("M4VIFI_ResizeBilinearNV12toBGR565 begin"); - - M4VIFI_ImagePlane pPlaneTmp[3]; - M4OSA_UInt32 mVideoWidth, mVideoHeight; - M4OSA_UInt32 mFrameSize; - - mVideoWidth = pPlaneIn[0].u_width; - mVideoHeight = pPlaneIn[0].u_height; - mFrameSize = mVideoWidth * mVideoHeight * 3/2; - - M4OSA_UInt8 *pData = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( - mFrameSize, - 12420, - (M4OSA_Char*)("M4VIFI_ResizeBilinearNV12toYUV420:tempBuffer") - ); - if (NULL == pData) - { - LOGE("Error: Fail to allocate tempBuffer!"); - return M4VIFI_ALLOC_FAILURE; - } - pPlaneTmp[0].pac_data = pData; - pPlaneTmp[0].u_height = pPlaneIn[0].u_height; - pPlaneTmp[0].u_width = pPlaneIn[0].u_width; - pPlaneTmp[0].u_stride = pPlaneIn[0].u_stride; - pPlaneTmp[0].u_topleft = pPlaneIn[0].u_topleft; - - pPlaneTmp[1].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight); - pPlaneTmp[1].u_height = pPlaneTmp[0].u_height/2; - pPlaneTmp[1].u_width = pPlaneTmp[0].u_width/2; - pPlaneTmp[1].u_stride = pPlaneTmp[0].u_stride/2; - pPlaneTmp[1].u_topleft = pPlaneTmp[0].u_topleft; - - pPlaneTmp[2].pac_data = (M4OSA_UInt8 *)(pData + mVideoWidth*mVideoHeight*5/4); - pPlaneTmp[2].u_height = pPlaneTmp[0].u_height/2; - pPlaneTmp[2].u_width = pPlaneTmp[0].u_width/2; - pPlaneTmp[2].u_stride = pPlaneTmp[0].u_stride/2; - pPlaneTmp[2].u_topleft = pPlaneTmp[0].u_topleft; - - M4VIFI_UInt8 err; - err = M4VIFI_SemiplanarYUV420toYUV420_X86(pUserData, pPlaneIn,&pPlaneTmp[0]); - - if(err != M4VIFI_OK) - { - LOGE("Error: M4VIFI_SemiplanarYUV420toYUV420 fails!"); - free(pData); - return err; - } - - err = M4VIFI_ResizeBilinearYUV420toBGR565_X86(pUserData,&pPlaneTmp[0],pPlaneOut); - free(pData); - - LOGV("M4VIFI_ResizeBilinearNV12toBGR565 end"); - return err; -} - diff --git a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h b/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h deleted file mode 100644 index 55b527a..0000000 --- a/frameworks/videoedit/lvpp/VideoEditorToolsNV12.h +++ /dev/null @@ -1,71 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef VIDEO_EDITOR_TOOLS_NV12_H -#define VIDEO_EDITOR_TOOLS_NV12_H - -#include "M4OSA_Types.h" -#include "M4VIFI_FiltersAPI.h" -#include "M4VIFI_Clip.h" -#include "M4VIFI_Defines.h" -#include "M4OSA_Memory.h" - -M4VIFI_UInt8 M4VIFI_RGB888toNV12(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -M4VIFI_UInt8 M4VIFI_NV12toNV12(void *user_data, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toNV12(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -M4VIFI_UInt8 M4VIFI_Rotate90LeftNV12toNV12(void* pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -M4VIFI_UInt8 M4VIFI_Rotate90RightNV12toNV12(void* pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -M4VIFI_UInt8 M4VIFI_Rotate180NV12toNV12(void* pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toBGR565(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -M4VIFI_UInt8 M4VIFI_ResizeBilinearNV12toYUV420(void *pUserData, - M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut); - -#endif - diff --git a/frameworks/videoedit/stagefrightshells/Android.mk b/frameworks/videoedit/stagefrightshells/Android.mk deleted file mode 100644 index 4c377fa..0000000 --- a/frameworks/videoedit/stagefrightshells/Android.mk +++ /dev/null @@ -1,75 +0,0 @@ -# -# Copyright (C) 2011 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -LOCAL_PATH:= $(call my-dir) -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - MediaBufferPuller.cpp \ - VideoEditorVideoDecoder.cpp \ - VideoEditorAudioDecoder.cpp \ - VideoEditorMp3Reader.cpp \ - VideoEditor3gpReader.cpp \ - VideoEditorBuffer.cpp \ - VideoEditorVideoEncoder.cpp \ - VideoEditorAudioEncoder.cpp \ - IntelVideoEditorUtils.cpp \ - IntelVideoEditorEncoderSource.cpp \ - IntelVideoEditorAVCEncoder.cpp \ - IntelVideoEditorH263Encoder.cpp - -LOCAL_C_INCLUDES += \ - $(TOP)/frameworks/av/media/libmediaplayerservice \ - $(TOP)/frameworks/av/media/libstagefright \ - $(TOP)/frameworks/av/media/libstagefright/include \ - $(TOP)/frameworks/av/media/libstagefright/rtsp \ - $(call include-path-for, corecg graphics) \ - $(TOP)/frameworks/av/libvideoeditor/lvpp \ - $(TOP)/frameworks/av/libvideoeditor/osal/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/common/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/mcs/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/stagefrightshells/inc \ - $(TOP)/frameworks/native/include/media/editor \ - $(TOP)/frameworks/native/include/media/openmax \ - $(TARGET_OUT_HEADERS)/libsharedbuffer \ - $(TARGET_OUT_HEADERS)/libmix_videoencoder \ - $(TARGET_OUT_HEADERS)/libva - -LOCAL_SHARED_LIBRARIES := \ - libcutils \ - libutils \ - libmedia \ - libbinder \ - libstagefright \ - libstagefright_foundation \ - libstagefright_omx \ - libgui \ - libvideoeditor_osal \ - libvideoeditorplayer \ - libsharedbuffer - -LOCAL_CFLAGS += \ - -LOCAL_STATIC_LIBRARIES := \ - libstagefright_color_conversion - - -LOCAL_MODULE:= libvideoeditor_stagefrightshells_intel - -LOCAL_MODULE_TAGS := optional - -include $(BUILD_STATIC_LIBRARY) diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp deleted file mode 100644 index 9076bbd..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.cpp +++ /dev/null @@ -1,517 +0,0 @@ -/* - * INTEL CONFIDENTIAL - * Copyright 2010-2011 Intel Corporation All Rights Reserved. - - * The source code, information and material ("Material") contained herein is owned - * by Intel Corporation or its suppliers or licensors, and title to such Material - * remains with Intel Corporation or its suppliers or licensors. The Material contains - * proprietary information of Intel or its suppliers and licensors. The Material is - * protected by worldwide copyright laws and treaty provisions. No part of the Material - * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, - * distributed or disclosed in any way without Intel's prior express written permission. - * No license under any patent, copyright or other intellectual property rights in the - * Material is granted to or conferred upon you, either expressly, by implication, inducement, - * estoppel or otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - - * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any - * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. - */ - -#define LOG_NDEBUG 1 -#define LOG_TAG "IntelVideoEditorAVCEncoder" -#include -#include "OMX_Video.h" -#include -#include -#include -#include -#include -#include "IntelVideoEditorAVCEncoder.h" -#include - -#define INIT_BUF_FULLNESS_RATIO 0.6 -#define MIN_INTRA_PERIOD 30 -#define SHORT_INTRA_PERIOD (mVideoFrameRate) -#define MEDIUM_INTRA_PERIOD (2*mVideoFrameRate) -#define LONG_INTRA_PERIOD (4*mVideoFrameRate) -#define LOW_QUALITY_BITRATE 2000000 -#define MEDIUM_QUALITY_BITRATE 5000000 -#define BITRATE_1M 1000000 -#define BITRATE_2M 2000000 -#define BITRATE_4M 4000000 -#define BITRATE_5M 5000000 - -namespace android { - -IntelVideoEditorAVCEncoder::IntelVideoEditorAVCEncoder( - const sp& source, - const sp& meta) - : mSource(source), - mMeta(meta), - mUseSyncMode(0), - mStarted(false), - mFirstFrame(true), - mFrameCount(0), - mVAEncoder(NULL), - mOutBufGroup(NULL), - mLastInputBuffer(NULL) { - - LOGV("Construct IntelVideoEditorAVCEncoder"); -} - -IntelVideoEditorAVCEncoder::~IntelVideoEditorAVCEncoder() { - LOGV("Destruct IntelVideoEditorAVCEncoder"); - if (mStarted) { - stop(); - } -} - -status_t IntelVideoEditorAVCEncoder::initCheck(const sp& meta) { - LOGV("initCheck"); - - Encode_Status encStatus; - uint32_t disableFrameSkip = 0; - sp sourceFormat = mSource->getFormat(); - - CHECK(sourceFormat->findInt32(kKeyWidth, &mVideoWidth)); - CHECK(sourceFormat->findInt32(kKeyHeight, &mVideoHeight)); - CHECK(sourceFormat->findInt32(kKeyFrameRate, &mVideoFrameRate)); - CHECK(sourceFormat->findInt32(kKeyColorFormat, &mVideoColorFormat)); - - CHECK(sourceFormat->findInt32(kKeyBitRate, &mVideoBitRate)); - - // Tune the output bitrates to improve the quality - if (mVideoBitRate < BITRATE_1M) { - mVideoBitRate = BITRATE_1M; - if (mVideoHeight > 720) { - mVideoBitRate = BITRATE_4M; - } - else if (mVideoHeight > 480) - { - mVideoBitRate = BITRATE_2M; - } - } - else if (mVideoBitRate < BITRATE_4M) { - if (mVideoHeight > 720) { - mVideoBitRate = BITRATE_5M; - } - else if (mVideoHeight > 480) { - mVideoBitRate = BITRATE_4M; - } - } - - LOGI("mVideoWidth = %d, mVideoHeight = %d, mVideoFrameRate = %d, mVideoColorFormat = %d, mVideoBitRate = %d", - mVideoWidth, mVideoHeight, mVideoFrameRate, mVideoColorFormat, mVideoBitRate); - - // disable frame skip for low bitrate clips - if (mVideoBitRate < BITRATE_2M) { - LOGI("Frameskip is disabled for low bitrate clips"); - disableFrameSkip = 1; - } - - if (mVideoColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) { - LOGE("Color format %d is not supported", mVideoColorFormat); - return BAD_VALUE; - } - - mFrameSize = mVideoHeight* mVideoWidth* 1.5; - - /* - * SET PARAMS FOR THE ENCODER BASED ON THE METADATA - * */ - encStatus = mVAEncoder->getParameters(&mEncParamsCommon); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("got encoder params"); - - mEncParamsCommon.resolution.width = mVideoWidth; - mEncParamsCommon.resolution.height= mVideoHeight; - mEncParamsCommon.frameRate.frameRateNum = mVideoFrameRate; - mEncParamsCommon.frameRate.frameRateDenom = 1; - mEncParamsCommon.rcMode = RATE_CONTROL_VBR; - mEncParamsCommon.rcParams.bitRate = mVideoBitRate; - mEncParamsCommon.rcParams.disableFrameSkip = disableFrameSkip; - mEncParamsCommon.rawFormat = RAW_FORMAT_NV12; - - mEncParamsCommon.rcParams.minQP = 0; - mEncParamsCommon.rcParams.initQP = 0; - - if (mVideoBitRate < LOW_QUALITY_BITRATE) { - mEncParamsCommon.intraPeriod = LONG_INTRA_PERIOD; - } - else if (mVideoBitRate < MEDIUM_QUALITY_BITRATE) { - mEncParamsCommon.intraPeriod = MEDIUM_INTRA_PERIOD; - } - else { - mEncParamsCommon.intraPeriod = SHORT_INTRA_PERIOD; - } - if (mEncParamsCommon.intraPeriod < MIN_INTRA_PERIOD) { - mEncParamsCommon.intraPeriod = MIN_INTRA_PERIOD; - } - - mEncParamsCommon.syncEncMode = mUseSyncMode; - mFrameCount = 0; - - // All luma and chroma block edges of the slice are filtered - mEncParamsCommon.disableDeblocking = 0; - - encStatus = mVAEncoder->setParameters(&mEncParamsCommon); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("new encoder params set"); - - encStatus = mVAEncoder->getParameters(&mEncParamsH264); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("got H264 encoder params "); - - mEncParamsH264.idrInterval = 1; - mEncParamsH264.sliceNum.iSliceNum = 2; - mEncParamsH264.sliceNum.pSliceNum = 2; - - // If the bitrate is low, we set the slice number to 1 in one frame to avoid visible boundary - if (mVideoBitRate < LOW_QUALITY_BITRATE) { - mEncParamsH264.sliceNum.iSliceNum = 1; - mEncParamsH264.sliceNum.pSliceNum = 1; - } - mEncParamsH264.VUIFlag = 0; - - encStatus = mVAEncoder->setParameters(&mEncParamsH264); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("new H264 encoder params set"); - - if (disableFrameSkip) { - VideoConfigBitRate configBitrate; - encStatus = mVAEncoder->getConfig(&configBitrate); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("got encoder config set"); - - configBitrate.rcParams.disableFrameSkip = 1; - encStatus = mVAEncoder->setConfig(&configBitrate); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("got encoder frame skip/bits stuffing set"); - } - - VideoParamsHRD hrdParam; - encStatus = mVAEncoder->getParameters(&hrdParam); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("got encoder hrd params "); - - hrdParam.bufferSize = mVideoBitRate; - hrdParam.initBufferFullness = hrdParam.bufferSize * INIT_BUF_FULLNESS_RATIO; - - encStatus = mVAEncoder->setParameters(&hrdParam); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("new encoder hard params set"); - - mOutBufGroup = new MediaBufferGroup(); - CHECK(mOutBufGroup != NULL); - - return OK; -} - -status_t IntelVideoEditorAVCEncoder::start(MetaData *params) { - LOGV("start"); - status_t ret = OK; - - if (mStarted) { - LOGW("Call start() when encoder already started"); - return OK; - } - - mSource->start(params); - - mVAEncoder = createVideoEncoder(MEDIA_MIMETYPE_VIDEO_AVC); - - if (mVAEncoder == NULL) { - LOGE("Fail to create video encoder"); - return NO_MEMORY; - } - mInitCheck = initCheck(mMeta); - - if (mInitCheck != OK) { - return mInitCheck; - } - - uint32_t maxSize; - mVAEncoder->getMaxOutSize(&maxSize); - - LOGV("allocating output buffers of size %d",maxSize); - for (int i = 0; i < OUTPUT_BUFFERS; i++ ) { - mOutBufGroup->add_buffer(new MediaBuffer(maxSize)); - } - - if (OK != getSharedBuffers()) { - LOGE("Failed to get the shared buffers from encoder "); - return UNKNOWN_ERROR; - } - - Encode_Status err; - err = mVAEncoder->start(); - if (err!= ENCODE_SUCCESS) { - LOGE("Failed to initialize the encoder: %d", err); - - /* We should exit the sharedbuffer mode, when failing to - create the HW video encoder. - */ - - androidCreateThread(SBShutdownFunc,this); - LOGI("Successfull create thread to exit shared buffer mode!"); - - mSource->stop(); - - sp r = BufferShareRegistry::getInstance(); - err = r->encoderRequestToDisableSharingMode(); - LOGV("encoderRequestToDisableSharingMode returned %d\n", err); - - /* libsharedbuffer wants the source to call this after the encoder calls - * encoderRequestToDisableSharingMode. Instead of doing complicated - * synchronization, let's just call this ourselves on the source's - * behalf. */ - err = r->sourceRequestToDisableSharingMode(); - LOGV("sourceRequestToDisableSharingMode returned %d\n", err); - - releaseVideoEncoder(mVAEncoder); - mVAEncoder = NULL; - - return UNKNOWN_ERROR; - } - - if (OK != setSharedBuffers()) { - LOGE("Failed to setup the shared buffers"); - return UNKNOWN_ERROR; - } - - mStarted = true; - LOGV("start- DONE"); - return OK; -} - -int IntelVideoEditorAVCEncoder::SBShutdownFunc(void* arg) -{ - LOGV("IntelVideoEditorAVCEncoder::SBShutdownFunc begin()"); - sp r = BufferShareRegistry::getInstance(); - int error = r->sourceExitSharingMode(); - LOGV("sourceExitSharingMode returns %d",error); - return 0; -} - -status_t IntelVideoEditorAVCEncoder::stop() { - LOGV("stop"); - if (!mStarted) { - LOGW("Call stop() when encoder has not started"); - return OK; - } - - if (mOutBufGroup) { - delete mOutBufGroup; - mOutBufGroup = NULL; - } - if (mLastInputBuffer!=NULL) { - mLastInputBuffer->release(); - } - mLastInputBuffer = NULL; - - /* call mSource->stop in a new thread, so the source - can do its end of shared buffer shutdown */ - - androidCreateThread(SBShutdownFunc,this); - LOGV("Successfull create thread!"); - - /* do encoder's buffer sharing shutdown */ - sp r = BufferShareRegistry::getInstance(); - int err = r->encoderExitSharingMode(); - LOGV("encoderExitSharingMode returned %d\n", err); - - mSource->stop(); - - err = r->encoderRequestToDisableSharingMode(); - LOGV("encoderRequestToDisableSharingMode returned %d\n", err); - - /* libsharedbuffer wants the source to call this after the encoder calls - * encoderRequestToDisableSharingMode. Instead of doing complicated - * synchronization, let's just call this ourselves on the source's - * behalf. */ - err = r->sourceRequestToDisableSharingMode(); - LOGV("sourceRequestToDisableSharingMode returned %d\n", err); - - releaseVideoEncoder(mVAEncoder); - mVAEncoder = NULL; - - mStarted = false; - LOGV("stop - DONE"); - - return OK; -} - -sp IntelVideoEditorAVCEncoder::getFormat() { - - sp format = new MetaData; - format->setInt32(kKeyWidth, mVideoWidth); - format->setInt32(kKeyHeight, mVideoHeight); - format->setInt32(kKeyBitRate, mVideoBitRate); - format->setInt32(kKeySampleRate, mVideoFrameRate); - format->setInt32(kKeyColorFormat, mVideoColorFormat); - format->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); - format->setCString(kKeyDecoderComponent, "IntelVideoEditorAVCEncoder"); - return format; -} - -status_t IntelVideoEditorAVCEncoder::read(MediaBuffer **out, const ReadOptions *options) { - - status_t err = OK; - Encode_Status encRet; - MediaBuffer *tmpIn; - int64_t timestamp = 0; - CHECK(!options); - mReadOptions = options; - *out = NULL; - - LOGV("IntelVideoEditorAVCEncoder::read start"); - - do { - err = mSource->read(&tmpIn, NULL); - if (err == INFO_FORMAT_CHANGED) { - stop(); - start(NULL); - } - } while (err == INFO_FORMAT_CHANGED); - - if (err == ERROR_END_OF_STREAM) { - if (mLastInputBuffer != NULL) { - tmpIn = mLastInputBuffer; - } else { - return err; - } - } - else if (err != OK) { - LOGE("Failed to read input video frame: %d", err); - return err; - } - - - VideoEncRawBuffer vaInBuf; - - vaInBuf.data = (uint8_t *)tmpIn->data(); - vaInBuf.size = tmpIn->size(); - - tmpIn->meta_data()->findInt64(kKeyTime, (int64_t *)&(vaInBuf.timeStamp)); - LOGV("Encoding: buffer %p, size = %d, ts= %llu",vaInBuf.data, vaInBuf.size, vaInBuf.timeStamp); - - encRet = mVAEncoder->encode(&vaInBuf); - if (encRet != ENCODE_SUCCESS) { - LOGE("Failed to encode input video frame: %d", encRet); - tmpIn->release(); - return UNKNOWN_ERROR; - } - - if (mLastInputBuffer != NULL && err != ERROR_END_OF_STREAM) { - mLastInputBuffer->meta_data()->findInt64(kKeyTime, ×tamp); - mLastInputBuffer->release(); - mLastInputBuffer = NULL; - } else { - timestamp = vaInBuf.timeStamp; - } - mLastInputBuffer = tmpIn; - - LOGV("Encoding Done, getting output buffer "); - MediaBuffer *outputBuffer; - - CHECK(mOutBufGroup->acquire_buffer(&outputBuffer) == OK); - LOGV("Waiting for outputbuffer"); - VideoEncOutputBuffer vaOutBuf; - vaOutBuf.bufferSize = outputBuffer->size(); - vaOutBuf.dataSize = 0; - vaOutBuf.data = (uint8_t *) outputBuffer->data(); - vaOutBuf.format = OUTPUT_EVERYTHING; - - if (mFirstFrame) { - LOGV("mFirstFrame\n"); - encRet = mVAEncoder->getOutput(&vaOutBuf); - if (encRet != ENCODE_SUCCESS) { - LOGE("Failed to retrieve encoded video frame: %d", encRet); - outputBuffer->release(); - return UNKNOWN_ERROR; - } - outputBuffer->meta_data()->setInt32(kKeyIsCodecConfig,true); - outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame,true); - mFirstFrame = false; - } else { - vaOutBuf.format = OUTPUT_EVERYTHING; - encRet = mVAEncoder->getOutput(&vaOutBuf); - if (encRet != ENCODE_SUCCESS) { - LOGE("Failed to retrieve encoded video frame: %d", encRet); - outputBuffer->release(); - return UNKNOWN_ERROR; - } - if (vaOutBuf.flag & ENCODE_BUFFERFLAG_SYNCFRAME) { - outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame,true); - } - } - - LOGV("Got it! data= %p, ts=%llu size =%d", vaOutBuf.data, timestamp, vaOutBuf.dataSize); - - outputBuffer->set_range(0, vaOutBuf.dataSize); - outputBuffer->meta_data()->setInt64(kKeyTime,timestamp); - *out = outputBuffer; - - LOGV("IntelVideoEditorAVCEncoder::read end"); - return err; -} - -status_t IntelVideoEditorAVCEncoder::getSharedBuffers() { - - LOGV("getSharedBuffers begin"); - Encode_Status encRet; - status_t ret = OK; - - sp r = BufferShareRegistry::getInstance(); - - if (r->encoderRequestToEnableSharingMode() == BS_SUCCESS) { - LOGV("Shared buffer mode available\n"); - } - else { - LOGE("Request to enable sharing failed \n"); - return UNKNOWN_ERROR; - } - - for(int i = 0; i < INPUT_SHARED_BUFFERS; i++) { - VideoParamsUsrptrBuffer paramsUsrptrBuffer; - paramsUsrptrBuffer.type = VideoParamsTypeUsrptrBuffer; - paramsUsrptrBuffer.size = sizeof(VideoParamsUsrptrBuffer); - paramsUsrptrBuffer.expectedSize = mFrameSize; - paramsUsrptrBuffer.format = STRING_TO_FOURCC("NV12"); - paramsUsrptrBuffer.width = mVideoWidth; - paramsUsrptrBuffer.height = mVideoHeight; - LOGV("Share buffer request="); - encRet = mVAEncoder->getParameters(¶msUsrptrBuffer); - if (encRet != ENCODE_SUCCESS ) { - LOGE("could not allocate input surface from the encoder %d", encRet); - ret = NO_MEMORY; - break; - } - mSharedBufs[i].allocatedSize = paramsUsrptrBuffer.actualSize; - mSharedBufs[i].height = mVideoHeight; - mSharedBufs[i].width = mVideoWidth; - mSharedBufs[i].pointer = paramsUsrptrBuffer.usrPtr; - mSharedBufs[i].stride = paramsUsrptrBuffer.stride; - } - LOGV("getSharedBuffers end"); - return ret; -} - -status_t IntelVideoEditorAVCEncoder::setSharedBuffers() { - LOGV("setSharedBuffers"); - sp r = BufferShareRegistry::getInstance(); - - if (r->encoderSetSharedBuffer(mSharedBufs,INPUT_SHARED_BUFFERS) != BS_SUCCESS) { - LOGE("encoderSetSharedBuffer failed \n"); - return UNKNOWN_ERROR; - } - - if (r->encoderEnterSharingMode() != BS_SUCCESS) { - LOGE("sourceEnterSharingMode failed\n"); - return UNKNOWN_ERROR; - } - return OK; -} - -} diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h deleted file mode 100644 index 29fd1c7..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorAVCEncoder.h +++ /dev/null @@ -1,85 +0,0 @@ -/* - * INTEL CONFIDENTIAL - * Copyright 2010-2011 Intel Corporation All Rights Reserved. - - * The source code, information and material ("Material") contained herein is owned - * by Intel Corporation or its suppliers or licensors, and title to such Material - * remains with Intel Corporation or its suppliers or licensors. The Material contains - * proprietary information of Intel or its suppliers and licensors. The Material is - * protected by worldwide copyright laws and treaty provisions. No part of the Material - * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, - * distributed or disclosed in any way without Intel's prior express written permission. - * No license under any patent, copyright or other intellectual property rights in the - * Material is granted to or conferred upon you, either expressly, by implication, inducement, - * estoppel or otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - - * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any - * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. - */ - -#ifndef INTELVIDEOEDITORAVCENCODER_H -#define INTELVIDEOEDITORAVCENCODER_H - -#include -#include -#include -#include -#include "va/va.h" -#include "VideoEncoderHost.h" -#include - -namespace android { -struct IntelVideoEditorAVCEncoder : public MediaSource { - IntelVideoEditorAVCEncoder(const sp &source, - const sp& meta); - - virtual status_t start(MetaData *params); - virtual status_t stop(); - - virtual sp getFormat(); - - virtual status_t read(MediaBuffer **buffer, const ReadOptions *options); - - -protected: - virtual ~IntelVideoEditorAVCEncoder(); - -private: - sp mSource; - sp mMeta; - - int32_t mVideoWidth; - int32_t mVideoHeight; - int32_t mFrameSize; - int32_t mVideoFrameRate; - int32_t mVideoBitRate; - int32_t mVideoColorFormat; - int32_t mUseSyncMode; - status_t mInitCheck; - bool mStarted; - bool mFirstFrame; - int32_t mFrameCount; - static const int OUTPUT_BUFFERS = 6; - static const int INPUT_SHARED_BUFFERS = 8; - IVideoEncoder *mVAEncoder; - VideoParamsCommon mEncParamsCommon; - VideoParamsAVC mEncParamsH264; - SharedBufferType mSharedBufs[INPUT_SHARED_BUFFERS]; - const ReadOptions *mReadOptions; - MediaBufferGroup *mOutBufGroup; /* group of output buffers*/ - MediaBuffer *mLastInputBuffer; - -private: - status_t initCheck(const sp& meta); - int32_t calcBitrate(int width, int height); - status_t getSharedBuffers(); - status_t setSharedBuffers(); - static int SBShutdownFunc(void* arg); - - IntelVideoEditorAVCEncoder(const IntelVideoEditorAVCEncoder &); - IntelVideoEditorAVCEncoder &operator=(const IntelVideoEditorAVCEncoder &); -}; -}; -#endif - diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp deleted file mode 100644 index 060dad0..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.cpp +++ /dev/null @@ -1,279 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -//#define LOG_NDEBUG 0 -#define LOG_TAG "IntelVideoEditorEncoderSource" -#include "utils/Log.h" - -#include "IntelVideoEditorEncoderSource.h" -#include "utils/Vector.h" -#include -#include -#include -#include -#include -#include - -namespace android { -sp IntelVideoEditorEncoderSource::Create( - const sp &format) { - - sp aSource = - new IntelVideoEditorEncoderSource(format); - return aSource; -} - -IntelVideoEditorEncoderSource::IntelVideoEditorEncoderSource( - const sp &format): - mGroup(NULL), - mUseSharedBuffers(false), - mFirstBufferLink(NULL), - mLastBufferLink(NULL), - mNbBuffer(0), - mIsEOS(false), - mState(CREATED), - mEncFormat(format) { - LOGV("IntelVideoEditorEncoderSource::IntelVideoEditorEncoderSource"); -} - -IntelVideoEditorEncoderSource::~IntelVideoEditorEncoderSource() { - - // Safety clean up - if( STARTED == mState ) { - stop(); - } -} - -status_t IntelVideoEditorEncoderSource::start(MetaData *meta) { - Mutex::Autolock autolock(mLock); - status_t err = OK; - - LOGV("IntelVideoEditorEncoderSource::start() begin"); - - if( CREATED != mState ) { - LOGV("IntelVideoEditorEncoderSource::start: invalid state %d", mState); - return UNKNOWN_ERROR; - } - mState = STARTED; - sp r = BufferShareRegistry::getInstance(); - if (r->sourceRequestToEnableSharingMode() == BS_SUCCESS) { - LOGI("Shared buffer mode available\n"); - mUseSharedBuffers = true; - mGroup = NULL; - } - else - { - LOGE("Shared buffer mode not available\n"); - return UNKNOWN_ERROR; - } - LOGV("IntelVideoEditorEncoderSource::start() END (0x%x)", err); - return err; -} - -status_t IntelVideoEditorEncoderSource::getSharedBuffers() -{ - Mutex::Autolock autolock(mLock); - - LOGV("IntelVideoEditorEncoderSource::getSharedBuffers begin"); - sp r = BufferShareRegistry::getInstance(); - SharedBufferType *bufs = NULL; - int buf_cnt = 0; - - if (r->sourceEnterSharingMode() != BS_SUCCESS) { - LOGE("sourceEnterSharingMode failed\n"); - return UNKNOWN_ERROR; - } - - if (r->sourceGetSharedBuffer(NULL, &buf_cnt) != BS_SUCCESS) { - LOGE("sourceGetSharedBuffer failed, unable to get buffer count\n"); - return UNKNOWN_ERROR; - } - - bufs = new SharedBufferType[buf_cnt]; - if (r->sourceGetSharedBuffer(bufs, &buf_cnt) != BS_SUCCESS) { - LOGE("sourceGetSharedBuffer failed, unable to retrieve buffers\n"); - delete [] bufs; - return UNKNOWN_ERROR; - } - - mGroup = new MediaBufferGroup(); - - for (int n = 0; n < buf_cnt; n++) - { - mGroup->add_buffer(new MediaBuffer(bufs[n].pointer, bufs[n].allocatedSize)); - } - - delete [] bufs; - - LOGV("IntelVideoEditorAVCEncoderSource::getSharedBuffers end"); - return OK; -} - - -status_t IntelVideoEditorEncoderSource::stop() { - - LOGV("IntelVideoEditorAVCEncoderSource::stop() begin"); - status_t err = OK; - - if( STARTED != mState ) { - LOGV("IntelVideoEditorAVCEncoderSource::stop: invalid state %d", mState); - return UNKNOWN_ERROR; - } - - if (mUseSharedBuffers) { - if (mGroup) { - delete mGroup; - mGroup = NULL; - } - mUseSharedBuffers = false; - } - - int32_t i = 0; - MediaBufferChain* tmpLink = NULL; - while( mFirstBufferLink ) { - i++; - tmpLink = mFirstBufferLink; - mFirstBufferLink = mFirstBufferLink->nextLink; - delete tmpLink; - } - LOGV("IntelVideoEditorEncoderSource::stop : %d buffer remained", i); - mFirstBufferLink = NULL; - mLastBufferLink = NULL; - mState = CREATED; - - LOGV("IntelVideoEditorEncoderSource::stop() END (0x%x)", err); - return err; -} - -sp IntelVideoEditorEncoderSource::getFormat() { - - LOGV("IntelVideoEditorEncoderSource::getFormat"); - return mEncFormat; -} - -status_t IntelVideoEditorEncoderSource::read(MediaBuffer **buffer, - const ReadOptions *options) { - Mutex::Autolock autolock(mLock); - - LOGV("IntelVideoEditorEncoderSource::read() begin"); - - MediaSource::ReadOptions readOptions; - status_t err = OK; - MediaBufferChain* tmpLink = NULL; - - if ( STARTED != mState ) { - LOGV("IntelVideoEditorEncoderSource::read: invalid state %d", mState); - return UNKNOWN_ERROR; - } - - while (mFirstBufferLink == NULL && !mIsEOS) { - LOGV("Wait for buffer in IntelVideoEditorEncoderSource::read()"); - mBufferCond.wait(mLock); - } - - LOGV("Get the buffer in IntelVideoEditorEncoderSource::read()!"); - - // End of stream? - if (mFirstBufferLink == NULL) { - *buffer = NULL; - LOGV("IntelVideoEditorEncoderSource::read : EOS"); - return ERROR_END_OF_STREAM; - } - - // Get a buffer from the chain - *buffer = mFirstBufferLink->buffer; - tmpLink = mFirstBufferLink; - mFirstBufferLink = mFirstBufferLink->nextLink; - - if ( NULL == mFirstBufferLink) { - mLastBufferLink = NULL; - } - delete tmpLink; - mNbBuffer--; - - LOGV("IntelVideoEditorEncoderSource::read() END (0x%x)", err); - return err; -} - -int32_t IntelVideoEditorEncoderSource::storeBuffer(MediaBuffer *buffer) { - Mutex::Autolock autolock(mLock); - - LOGV("IntelVideoEditorEncoderSource::storeBuffer() begin"); - - status_t err = OK; - - if( NULL == buffer ) { - LOGV("IntelVideoEditorEncoderSource::storeBuffer : reached EOS"); - mIsEOS = true; - } else { - MediaBufferChain* newLink = new MediaBufferChain; - newLink->buffer = buffer; - newLink->nextLink = NULL; - if( NULL != mLastBufferLink ) { - mLastBufferLink->nextLink = newLink; - } else { - mFirstBufferLink = newLink; - } - mLastBufferLink = newLink; - mNbBuffer++; - } - mBufferCond.signal(); - LOGV("IntelVideoEditorEncoderSource::storeBuffer() end"); - return mNbBuffer; -} - -int32_t IntelVideoEditorEncoderSource::requestBuffer(MediaBuffer **buffer) { - status_t err = OK; - LOGV("IntelVideoEditorEncoderSource::requestBuffer() begin"); - if (!mGroup && mUseSharedBuffers) { - err = getSharedBuffers(); - if (err != OK) { - LOGE("shared buffer setup failed\n"); - return err; - } - } - - err = mGroup->acquire_buffer(buffer); - LOGV("requestBuffer buffer addr = 0x%p",(uint8_t *)(*buffer)->data()); - if (err != OK) { - LOGE("Fail to get shared buffers"); - return UNKNOWN_ERROR; - } - LOGV("IntelVideoEditorEncoderSource::requestBuffer() end"); - return err; -} -} diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h deleted file mode 100644 index a8c0126..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorEncoderSource.h +++ /dev/null @@ -1,93 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -#ifndef INTELVIDEOEDITORENCODERSOURCE_H -#define INTELVIDEOEDITORENCODERSOURCE_H - -#include -#include -#include -#include - -namespace android { -struct IntelVideoEditorEncoderSource : public MediaSource { - public: - static sp Create( - const sp &format); - virtual status_t start(MetaData *params = NULL); - virtual status_t stop(); - virtual sp getFormat(); - virtual status_t read(MediaBuffer **buffer, - const ReadOptions *options = NULL); - virtual int32_t storeBuffer(MediaBuffer *buffer); - virtual int32_t requestBuffer(MediaBuffer **buffer); - - protected: - virtual ~IntelVideoEditorEncoderSource(); - - private: - status_t getSharedBuffers(); - MediaBufferGroup* mGroup; - bool mUseSharedBuffers; - - struct MediaBufferChain { - MediaBuffer* buffer; - MediaBufferChain* nextLink; - }; - enum State { - CREATED, - STARTED, - ERROR - }; - IntelVideoEditorEncoderSource(const sp &format); - - // Don't call me - IntelVideoEditorEncoderSource(const IntelVideoEditorEncoderSource &); - IntelVideoEditorEncoderSource &operator=( - const IntelVideoEditorEncoderSource &); - - MediaBufferChain* mFirstBufferLink; - MediaBufferChain* mLastBufferLink; - int32_t mNbBuffer; - bool mIsEOS; - State mState; - sp mEncFormat; - Mutex mLock; - Condition mBufferCond; -}; -} -#endif diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp deleted file mode 100644 index 7f576b2..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.cpp +++ /dev/null @@ -1,426 +0,0 @@ -/* - * INTEL CONFIDENTIAL - * Copyright 2010-2011 Intel Corporation All Rights Reserved. - - * The source code, information and material ("Material") contained herein is owned - * by Intel Corporation or its suppliers or licensors, and title to such Material - * remains with Intel Corporation or its suppliers or licensors. The Material contains - * proprietary information of Intel or its suppliers and licensors. The Material is - * protected by worldwide copyright laws and treaty provisions. No part of the Material - * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, - * distributed or disclosed in any way without Intel's prior express written permission. - * No license under any patent, copyright or other intellectual property rights in the - * Material is granted to or conferred upon you, either expressly, by implication, inducement, - * estoppel or otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - - * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any - * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. - */ - -#define LOG_NDEBUG 1 -#define LOG_TAG "IntelVideoEditorH263Encoder" -#include -#include "OMX_Video.h" -#include -#include -#include -#include -#include -#include -#include "IntelVideoEditorH263Encoder.h" -#define INIT_BUF_FULLNESS_RATIO 0.8125 -#define INITIAL_INTRA_PERIOD (mVideoFrameRate * 2 / 3) -#define NORMAL_INTRA_PERIOD (mVideoFrameRate * 3) - -namespace android { - -IntelVideoEditorH263Encoder::IntelVideoEditorH263Encoder( - const sp& source, - const sp& meta) - : mSource(source), - mMeta(meta), - mUseSyncMode(0), - mStarted(false), - mFirstFrame(true), - mFrameCount(0), - mVAEncoder(NULL), - mOutBufGroup(NULL), - mLastInputBuffer(NULL) { - - LOGV("Construct IntelVideoEditorH263Encoder"); -} - -IntelVideoEditorH263Encoder::~IntelVideoEditorH263Encoder() { - LOGV("Destruct IntelVideoEditorH263Encoder"); - if (mStarted) { - stop(); - } -} - -status_t IntelVideoEditorH263Encoder::initCheck(const sp& meta) { - LOGV("initCheck"); - - Encode_Status encStatus; - - sp sourceFormat = mSource->getFormat(); - - CHECK(sourceFormat->findInt32(kKeyWidth, &mVideoWidth)); - CHECK(sourceFormat->findInt32(kKeyHeight, &mVideoHeight)); - CHECK(sourceFormat->findInt32(kKeyFrameRate, &mVideoFrameRate)); - CHECK(sourceFormat->findInt32(kKeyColorFormat, &mVideoColorFormat)); - - CHECK(sourceFormat->findInt32(kKeyBitRate, &mVideoBitRate)); - LOGV("mVideoWidth = %d, mVideoHeight = %d, mVideoFrameRate = %d, mVideoColorFormat = %d, mVideoBitRate = %d", - mVideoWidth, mVideoHeight, mVideoFrameRate, mVideoColorFormat, mVideoBitRate); - if (mVideoColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) { - LOGE("Color format %d is not supported", mVideoColorFormat); - return BAD_VALUE; - } - mFrameSize = mVideoHeight* mVideoWidth* 1.5; - /* - * SET PARAMS FOR THE ENCODER BASED ON THE METADATA - * */ - - encStatus = mVAEncoder->getParameters(&mEncParamsCommon); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("got encoder params"); - - mEncParamsCommon.resolution.width = mVideoWidth; - mEncParamsCommon.resolution.height= mVideoHeight; - mEncParamsCommon.frameRate.frameRateNum = mVideoFrameRate; - mEncParamsCommon.frameRate.frameRateDenom = 1; - mEncParamsCommon.rcMode = RATE_CONTROL_VBR; - mEncParamsCommon.rcParams.bitRate = mVideoBitRate; - mEncParamsCommon.rawFormat = RAW_FORMAT_NV12; - - // Set intra period to be a small value so that more IDR will be generated - // at the beginning of encoding. After a certain period of time, change intra period - // to be a bigger value, NORMAL_INTRA_PERIOD, in the rest of encoding. - // This is to workaround that it may take long to show video after clone / extended - // mode switching. During mode swithing, the current Widi stack sends RTSP command to - // set adaptor jitter buffer size. Widi adaptor may miss the first IDR during adaptor - // jitter buffer size setting. If the first IDR is missed, Widi adaptor must wait for - // the next IDR to arrive so that decoding can be started. If intra period is long, - // it will take long to show video. - mEncParamsCommon.intraPeriod = INITIAL_INTRA_PERIOD; - - mEncParamsCommon.rcParams.minQP = 1; - mEncParamsCommon.rcParams.initQP = 24; - - mEncParamsCommon.syncEncMode = mUseSyncMode; - mFrameCount = 0; - - encStatus = mVAEncoder->setParameters(&mEncParamsCommon); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("new encoder params set"); - - VideoParamsHRD hrdParam; - encStatus = mVAEncoder->getParameters(&hrdParam); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("got encoder hrd params "); - - hrdParam.bufferSize = mVideoBitRate; - hrdParam.initBufferFullness = hrdParam.bufferSize * INIT_BUF_FULLNESS_RATIO; - - encStatus = mVAEncoder->setParameters(&hrdParam); - CHECK(encStatus == ENCODE_SUCCESS); - LOGV("new encoder hard params set"); - - mOutBufGroup = new MediaBufferGroup(); - CHECK(mOutBufGroup != NULL); - - return OK; -} - -status_t IntelVideoEditorH263Encoder::start(MetaData *params) { - LOGV("start"); - status_t ret = OK; - - if (mStarted) { - LOGW("Call start() when encoder already started"); - return OK; - } - - mSource->start(params); - - mVAEncoder = createVideoEncoder("video/h263"); - - if (mVAEncoder == NULL) { - LOGE("Fail to create video encoder"); - return NO_MEMORY; - } - mInitCheck = initCheck(mMeta); - - if (mInitCheck != OK) { - return mInitCheck; - } - - uint32_t maxSize; - mVAEncoder->getMaxOutSize(&maxSize); - - LOGV("allocating output buffers of size %d",maxSize); - for (int i = 0; i < OUTPUT_BUFFERS; i++ ) { - mOutBufGroup->add_buffer(new MediaBuffer(maxSize)); - } - - if (OK != getSharedBuffers()) { - LOGE("Failed to get the shared buffers from encoder "); - return UNKNOWN_ERROR; - } - - Encode_Status err; - err = mVAEncoder->start(); - if (err!= ENCODE_SUCCESS) { - LOGE("Failed to initialize the encoder: %d", err); - - /* We should exit the sharedbuffer mode, when failing to - create the HW video encoder. - */ - - androidCreateThread(SBShutdownFunc,this); - LOGI("Successfull create thread to exit shared buffer mode!"); - - mSource->stop(); - - sp r = BufferShareRegistry::getInstance(); - err = r->encoderRequestToDisableSharingMode(); - LOGV("encoderRequestToDisableSharingMode returned %d\n", err); - - /* libsharedbuffer wants the source to call this after the encoder calls - * encoderRequestToDisableSharingMode. Instead of doing complicated - * synchronization, let's just call this ourselves on the source's - * behalf. - */ - - err = r->sourceRequestToDisableSharingMode(); - LOGV("sourceRequestToDisableSharingMode returned %d\n", err); - - releaseVideoEncoder(mVAEncoder); - mVAEncoder = NULL; - - return UNKNOWN_ERROR; - } - - if (OK != setSharedBuffers()) { - LOGE("Failed to setup the shared buffers"); - return UNKNOWN_ERROR; - } - - mStarted = true; - LOGV("start- DONE"); - return OK; -} - -int IntelVideoEditorH263Encoder::SBShutdownFunc(void* arg) -{ - LOGV("IntelVideoEditorAVCEncoder::SBShutdownFunc begin()"); - sp r = BufferShareRegistry::getInstance(); - int error = r->sourceExitSharingMode(); - LOGV("sourceExitSharingMode returns %d",error); - return 0; -} - -status_t IntelVideoEditorH263Encoder::stop() { - LOGV("stop"); - if (!mStarted) { - LOGW("Call stop() when encoder has not started"); - return OK; - } - - if (mOutBufGroup) { - delete mOutBufGroup; - mOutBufGroup = NULL; - } - if (mLastInputBuffer!=NULL) { - mLastInputBuffer->release(); - } - mLastInputBuffer = NULL; - - /* call mSource->stop in a new thread, so the source - can do its end of shared buffer shutdown */ - - androidCreateThread(SBShutdownFunc,this); - LOGV("Successfull create thread!"); - - /* do encoder's buffer sharing shutdown */ - sp r = BufferShareRegistry::getInstance(); - int err = r->encoderExitSharingMode(); - LOGV("encoderExitSharingMode returned %d\n", err); - - mSource->stop(); - - err = r->encoderRequestToDisableSharingMode(); - LOGV("encoderRequestToDisableSharingMode returned %d\n", err); - - /* libsharedbuffer wants the source to call this after the encoder calls - * encoderRequestToDisableSharingMode. Instead of doing complicated - * synchronization, let's just call this ourselves on the source's - * behalf. */ - err = r->sourceRequestToDisableSharingMode(); - LOGV("sourceRequestToDisableSharingMode returned %d\n", err); - - releaseVideoEncoder(mVAEncoder); - mVAEncoder = NULL; - - mStarted = false; - LOGV("stop - DONE"); - - return OK; -} - -sp IntelVideoEditorH263Encoder::getFormat() { - LOGV("getFormat"); - - sp format = new MetaData; - format->setInt32(kKeyWidth, mVideoWidth); - format->setInt32(kKeyHeight, mVideoHeight); - format->setInt32(kKeyBitRate, mVideoBitRate); - format->setInt32(kKeySampleRate, mVideoFrameRate); - format->setInt32(kKeyColorFormat, mVideoColorFormat); - format->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); - format->setCString(kKeyDecoderComponent, "IntelVideoEditorH263Encoder"); - return format; -} - -status_t IntelVideoEditorH263Encoder::read(MediaBuffer **out, const ReadOptions *options) { - - status_t err; - Encode_Status encRet; - MediaBuffer *tmpIn; - int64_t timestamp = 0; - CHECK(!options); - mReadOptions = options; - *out = NULL; - - LOGV("IntelVideoEditorAVCEncoder::read start"); - - do { - err = mSource->read(&tmpIn, NULL); - if (err == INFO_FORMAT_CHANGED) { - stop(); - start(NULL); - } - } while (err == INFO_FORMAT_CHANGED); - - if (err == ERROR_END_OF_STREAM) { - return err; - } - else if (err != OK) { - LOGE("Failed to read input video frame: %d", err); - return err; - } - - VideoEncRawBuffer vaInBuf; - - vaInBuf.data = (uint8_t *)tmpIn->data(); - vaInBuf.size = tmpIn->size(); - - tmpIn->meta_data()->findInt64(kKeyTime, (int64_t *)&(vaInBuf.timeStamp)); - LOGV("Encoding: buffer %p, size = %d, ts= %llu",vaInBuf.data, vaInBuf.size, vaInBuf.timeStamp); - - encRet = mVAEncoder->encode(&vaInBuf); - if (encRet != ENCODE_SUCCESS) { - LOGE("Failed to encode input video frame: %d", encRet); - tmpIn->release(); - return UNKNOWN_ERROR; - } - - if (mLastInputBuffer != NULL) { - mLastInputBuffer->release(); - mLastInputBuffer = NULL; - } - mLastInputBuffer = tmpIn; - - LOGV("Encoding Done, getting output buffer "); - MediaBuffer *outputBuffer; - - CHECK(mOutBufGroup->acquire_buffer(&outputBuffer) == OK); - LOGV("Waiting for outputbuffer"); - VideoEncOutputBuffer vaOutBuf; - vaOutBuf.bufferSize = outputBuffer->size(); - vaOutBuf.dataSize = 0; - vaOutBuf.data = (uint8_t *) outputBuffer->data(); - vaOutBuf.format = OUTPUT_EVERYTHING; - - vaOutBuf.format = OUTPUT_EVERYTHING; - encRet = mVAEncoder->getOutput(&vaOutBuf); - if (encRet != ENCODE_SUCCESS) { - LOGE("Failed to retrieve encoded video frame: %d", encRet); - outputBuffer->release(); - return UNKNOWN_ERROR; - } - if (vaOutBuf.flag & ENCODE_BUFFERFLAG_SYNCFRAME) { - outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame,true); - } - - timestamp = vaInBuf.timeStamp; - - LOGV("Got it! data= %p, ts=%llu size =%d", vaOutBuf.data, timestamp, vaOutBuf.dataSize); - - outputBuffer->set_range(0, vaOutBuf.dataSize); - outputBuffer->meta_data()->setInt64(kKeyTime,timestamp); - *out = outputBuffer; - LOGV("IntelVideoEditorAVCEncoder::read end"); - return OK; -} - -status_t IntelVideoEditorH263Encoder::getSharedBuffers() { - - LOGV("getSharedBuffers begin"); - Encode_Status encRet; - status_t ret = OK; - - sp r = BufferShareRegistry::getInstance(); - - if (r->encoderRequestToEnableSharingMode() == BS_SUCCESS) { - LOGI("Shared buffer mode available\n"); - } - else { - LOGE("Request to enable sharing failed \n"); - return UNKNOWN_ERROR; - } - - for(int i = 0; i < INPUT_SHARED_BUFFERS; i++) { - VideoParamsUsrptrBuffer paramsUsrptrBuffer; - paramsUsrptrBuffer.type = VideoParamsTypeUsrptrBuffer; - paramsUsrptrBuffer.size = sizeof(VideoParamsUsrptrBuffer); - paramsUsrptrBuffer.expectedSize = mFrameSize; - paramsUsrptrBuffer.format = STRING_TO_FOURCC("NV12"); - paramsUsrptrBuffer.width = mVideoWidth; - paramsUsrptrBuffer.height = mVideoHeight; - LOGV("Share buffer request="); - encRet = mVAEncoder->getParameters(¶msUsrptrBuffer); - if (encRet != ENCODE_SUCCESS ) { - LOGE("could not allocate input surface from the encoder %d", encRet); - ret = NO_MEMORY; - break; - } - mSharedBufs[i].allocatedSize = paramsUsrptrBuffer.actualSize; - mSharedBufs[i].height = mVideoHeight; - mSharedBufs[i].width = mVideoWidth; - mSharedBufs[i].pointer = paramsUsrptrBuffer.usrPtr; - mSharedBufs[i].stride = paramsUsrptrBuffer.stride; - } - LOGV("getSharedBuffers end"); - return ret; -} - -status_t IntelVideoEditorH263Encoder::setSharedBuffers() { - - LOGV("setSharedBuffers"); - sp r = BufferShareRegistry::getInstance(); - - if (r->encoderSetSharedBuffer(mSharedBufs,INPUT_SHARED_BUFFERS) != BS_SUCCESS) { - LOGE("encoderSetSharedBuffer failed \n"); - return UNKNOWN_ERROR; - } - - if (r->encoderEnterSharingMode() != BS_SUCCESS) { - LOGE("sourceEnterSharingMode failed\n"); - return UNKNOWN_ERROR; - } - return OK; -} - -} diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h deleted file mode 100644 index 0c4bee6..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorH263Encoder.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * INTEL CONFIDENTIAL - * Copyright 2010-2011 Intel Corporation All Rights Reserved. - - * The source code, information and material ("Material") contained herein is owned - * by Intel Corporation or its suppliers or licensors, and title to such Material - * remains with Intel Corporation or its suppliers or licensors. The Material contains - * proprietary information of Intel or its suppliers and licensors. The Material is - * protected by worldwide copyright laws and treaty provisions. No part of the Material - * may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, - * distributed or disclosed in any way without Intel's prior express written permission. - * No license under any patent, copyright or other intellectual property rights in the - * Material is granted to or conferred upon you, either expressly, by implication, inducement, - * estoppel or otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - - * Unless otherwise agreed by Intel in writing, you may not remove or alter this notice or any - * other notice embedded in Materials by Intel or Intel's suppliers or licensors in any way. - */ - -#ifndef INTELVIDEOEDITORH263ENCODER_H -#define INTELVIDEOEDITORH263ENCODER_H - -#include -#include -#include -#include -#include "va/va.h" -#include "VideoEncoderHost.h" -#include - -namespace android { -struct IntelVideoEditorH263Encoder : public MediaSource { - IntelVideoEditorH263Encoder(const sp &source, - const sp& meta); - - virtual status_t start(MetaData *params); - virtual status_t stop(); - - virtual sp getFormat(); - - virtual status_t read(MediaBuffer **buffer, const ReadOptions *options); - - -protected: - virtual ~IntelVideoEditorH263Encoder(); - -private: - sp mSource; - sp mMeta; - - int32_t mVideoWidth; - int32_t mVideoHeight; - int32_t mFrameSize; - int32_t mVideoFrameRate; - int32_t mVideoBitRate; - int32_t mVideoColorFormat; - int32_t mUseSyncMode; - status_t mInitCheck; - bool mStarted; - bool mFirstFrame; - int32_t mFrameCount; - static const int OUTPUT_BUFFERS = 6; - static const int INPUT_SHARED_BUFFERS = 8; - IVideoEncoder *mVAEncoder; - VideoParamsCommon mEncParamsCommon; - SharedBufferType mSharedBufs[INPUT_SHARED_BUFFERS]; - const ReadOptions *mReadOptions; - MediaBufferGroup *mOutBufGroup; /* group of output buffers*/ - MediaBuffer *mLastInputBuffer; - -private: - status_t initCheck(const sp& meta); - int32_t calcBitrate(int width, int height); - status_t getSharedBuffers(); - status_t setSharedBuffers(); - static int SBShutdownFunc(void* arg); - - IntelVideoEditorH263Encoder(const IntelVideoEditorH263Encoder &); - IntelVideoEditorH263Encoder &operator=(const IntelVideoEditorH263Encoder &); -}; -}; -#endif - diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp deleted file mode 100644 index 4febe20..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.cpp +++ /dev/null @@ -1,531 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** -************************************************************************* -* @file VideoEditorUtils.cpp -* @brief StageFright shell Utilities -************************************************************************* -*/ -#define LOG_NDEBUG 1 -#define LOG_TAG "SF_utils" -#include "utils/Log.h" - -#include "IntelVideoEditorUtils.h" - -#include -#include -#include -#include -#include -#include - -/* Android includes*/ -#include -#include - -/*---------------------*/ -/* DEBUG LEVEL SETUP */ -/*---------------------*/ -#define LOG1 ALOGE /*ERRORS Logging*/ -#define LOG2 ALOGI /*WARNING Logging*/ -#define LOG3 //ALOGV /*COMMENTS Logging*/ - -namespace android { - -void displayMetaData(const sp meta) { - - const char* charData; - int32_t int32Data; - int64_t int64Data; - uint32_t type; - const void* data; - void* ptr; - size_t size; - - if (meta->findCString(kKeyMIMEType, &charData)) { - LOG1("displayMetaData kKeyMIMEType %s", charData); - } - if (meta->findInt32(kKeyWidth, &int32Data)) { - LOG1("displayMetaData kKeyWidth %d", int32Data); - } - if (meta->findInt32(kKeyHeight, &int32Data)) { - LOG1("displayMetaData kKeyHeight %d", int32Data); - } - if (meta->findInt32(kKeyIFramesInterval, &int32Data)) { - LOG1("displayMetaData kKeyIFramesInterval %d", int32Data); - } - if (meta->findInt32(kKeyStride, &int32Data)) { - LOG1("displayMetaData kKeyStride %d", int32Data); - } - if (meta->findInt32(kKeySliceHeight, &int32Data)) { - LOG1("displayMetaData kKeySliceHeight %d", int32Data); - } - if (meta->findInt32(kKeyChannelCount, &int32Data)) { - LOG1("displayMetaData kKeyChannelCount %d", int32Data); - } - if (meta->findInt32(kKeySampleRate, &int32Data)) { - LOG1("displayMetaData kKeySampleRate %d", int32Data); - } - if (meta->findInt32(kKeyBitRate, &int32Data)) { - LOG1("displayMetaData kKeyBitRate %d", int32Data); - } - if (meta->findData(kKeyESDS, &type, &data, &size)) { - LOG1("displayMetaData kKeyESDS type=%d size=%d", type, size); - } - if (meta->findData(kKeyAVCC, &type, &data, &size)) { - LOG1("displayMetaData kKeyAVCC data=0x%X type=%d size=%d", - *((unsigned int*)data), type, size); - } - if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) { - LOG1("displayMetaData kKeyVorbisInfo type=%d size=%d", type, size); - } - if (meta->findData(kKeyVorbisBooks, &type, &data, &size)) { - LOG1("displayMetaData kKeyVorbisBooks type=%d size=%d", type, size); - } - if (meta->findInt32(kKeyWantsNALFragments, &int32Data)) { - LOG1("displayMetaData kKeyWantsNALFragments %d", int32Data); - } - if (meta->findInt32(kKeyIsSyncFrame, &int32Data)) { - LOG1("displayMetaData kKeyIsSyncFrame %d", int32Data); - } - if (meta->findInt32(kKeyIsCodecConfig, &int32Data)) { - LOG1("displayMetaData kKeyIsCodecConfig %d", int32Data); - } - if (meta->findInt64(kKeyTime, &int64Data)) { - LOG1("displayMetaData kKeyTime %lld", int64Data); - } - if (meta->findInt32(kKeyDuration, &int32Data)) { - LOG1("displayMetaData kKeyDuration %d", int32Data); - } - if (meta->findInt32(kKeyColorFormat, &int32Data)) { - LOG1("displayMetaData kKeyColorFormat %d", int32Data); - } - if (meta->findPointer(kKeyPlatformPrivate, &ptr)) { - LOG1("displayMetaData kKeyPlatformPrivate pointer=0x%x", (int32_t) ptr); - } - if (meta->findCString(kKeyDecoderComponent, &charData)) { - LOG1("displayMetaData kKeyDecoderComponent %s", charData); - } - if (meta->findInt32(kKeyBufferID, &int32Data)) { - LOG1("displayMetaData kKeyBufferID %d", int32Data); - } - if (meta->findInt32(kKeyMaxInputSize, &int32Data)) { - LOG1("displayMetaData kKeyMaxInputSize %d", int32Data); - } - if (meta->findInt64(kKeyThumbnailTime, &int64Data)) { - LOG1("displayMetaData kKeyThumbnailTime %lld", int64Data); - } - if (meta->findCString(kKeyAlbum, &charData)) { - LOG1("displayMetaData kKeyAlbum %s", charData); - } - if (meta->findCString(kKeyArtist, &charData)) { - LOG1("displayMetaData kKeyArtist %s", charData); - } - if (meta->findCString(kKeyAlbumArtist, &charData)) { - LOG1("displayMetaData kKeyAlbumArtist %s", charData); - } - if (meta->findCString(kKeyComposer, &charData)) { - LOG1("displayMetaData kKeyComposer %s", charData); - } - if (meta->findCString(kKeyGenre, &charData)) { - LOG1("displayMetaData kKeyGenre %s", charData); - } - if (meta->findCString(kKeyTitle, &charData)) { - LOG1("displayMetaData kKeyTitle %s", charData); - } - if (meta->findCString(kKeyYear, &charData)) { - LOG1("displayMetaData kKeyYear %s", charData); - } - if (meta->findData(kKeyAlbumArt, &type, &data, &size)) { - LOG1("displayMetaData kKeyAlbumArt type=%d size=%d", type, size); - } - if (meta->findCString(kKeyAlbumArtMIME, &charData)) { - LOG1("displayMetaData kKeyAlbumArtMIME %s", charData); - } - if (meta->findCString(kKeyAuthor, &charData)) { - LOG1("displayMetaData kKeyAuthor %s", charData); - } - if (meta->findCString(kKeyCDTrackNumber, &charData)) { - LOG1("displayMetaData kKeyCDTrackNumber %s", charData); - } - if (meta->findCString(kKeyDiscNumber, &charData)) { - LOG1("displayMetaData kKeyDiscNumber %s", charData); - } - if (meta->findCString(kKeyDate, &charData)) { - LOG1("displayMetaData kKeyDate %s", charData); - } - if (meta->findCString(kKeyWriter, &charData)) { - LOG1("displayMetaData kKeyWriter %s", charData); - } - if (meta->findInt32(kKeyTimeScale, &int32Data)) { - LOG1("displayMetaData kKeyTimeScale %d", int32Data); - } - if (meta->findInt32(kKeyVideoProfile, &int32Data)) { - LOG1("displayMetaData kKeyVideoProfile %d", int32Data); - } - if (meta->findInt32(kKeyVideoLevel, &int32Data)) { - LOG1("displayMetaData kKeyVideoLevel %d", int32Data); - } - if (meta->findInt32(kKey64BitFileOffset, &int32Data)) { - LOG1("displayMetaData kKey64BitFileOffset %d", int32Data); - } - if (meta->findInt32(kKeyFileType, &int32Data)) { - LOG1("displayMetaData kKeyFileType %d", int32Data); - } - if (meta->findInt64(kKeyTrackTimeStatus, &int64Data)) { - LOG1("displayMetaData kKeyTrackTimeStatus %lld", int64Data); - } - if (meta->findInt32(kKeyNotRealTime, &int32Data)) { - LOG1("displayMetaData kKeyNotRealTime %d", int32Data); - } -} - -/** - * This code was extracted from StageFright MPEG4 writer - * Is is used to parse and format the AVC codec specific info received - * from StageFright encoders - */ -static const uint8_t kNalUnitTypeSeqParamSet = 0x07; -static const uint8_t kNalUnitTypePicParamSet = 0x08; -struct AVCParamSet { - AVCParamSet(uint16_t length, const uint8_t *data) - : mLength(length), mData(data) {} - - uint16_t mLength; - const uint8_t *mData; -}; -struct AVCCodecSpecificContext { - List mSeqParamSets; - List mPicParamSets; - uint8_t mProfileIdc; - uint8_t mProfileCompatible; - uint8_t mLevelIdc; -}; - -const uint8_t *parseParamSet(AVCCodecSpecificContext* pC, - const uint8_t *data, size_t length, int type, size_t *paramSetLen) { - CHECK(type == kNalUnitTypeSeqParamSet || - type == kNalUnitTypePicParamSet); - - size_t bytesLeft = length; - while (bytesLeft > 4 && - memcmp("\x00\x00\x00\x01", &data[length - bytesLeft], 4)) { - --bytesLeft; - } - if (bytesLeft <= 4) { - bytesLeft = 0; // Last parameter set - } - const uint8_t *nextStartCode = &data[length - bytesLeft]; - *paramSetLen = nextStartCode - data; - if (*paramSetLen == 0) { - ALOGE("Param set is malformed, since its length is 0"); - return NULL; - } - - AVCParamSet paramSet(*paramSetLen, data); - if (type == kNalUnitTypeSeqParamSet) { - if (*paramSetLen < 4) { - ALOGE("Seq parameter set malformed"); - return NULL; - } - if (pC->mSeqParamSets.empty()) { - pC->mProfileIdc = data[1]; - pC->mProfileCompatible = data[2]; - pC->mLevelIdc = data[3]; - } else { - if (pC->mProfileIdc != data[1] || - pC->mProfileCompatible != data[2] || - pC->mLevelIdc != data[3]) { - ALOGV("Inconsistent profile/level found in seq parameter sets"); - return NULL; - } - } - pC->mSeqParamSets.push_back(paramSet); - } else { - pC->mPicParamSets.push_back(paramSet); - } - return nextStartCode; -} - -status_t buildAVCCodecSpecificData(uint8_t **pOutputData, size_t *pOutputSize, - const uint8_t *data, size_t size, MetaData *param) -{ - //ALOGV("buildAVCCodecSpecificData"); - - if ( (pOutputData == NULL) || (pOutputSize == NULL) ) { - ALOGE("output is invalid"); - return ERROR_MALFORMED; - } - - if (*pOutputData != NULL) { - ALOGE("Already have codec specific data"); - return ERROR_MALFORMED; - } - - if (size < 4) { - ALOGE("Codec specific data length too short: %d", size); - return ERROR_MALFORMED; - } - - // Data is in the form of AVCCodecSpecificData - if (memcmp("\x00\x00\x00\x01", data, 4)) { - // 2 bytes for each of the parameter set length field - // plus the 7 bytes for the header - if (size < 4 + 7) { - ALOGE("Codec specific data length too short: %d", size); - return ERROR_MALFORMED; - } - - *pOutputSize = size; - *pOutputData = (uint8_t*)malloc(size); - memcpy(*pOutputData, data, size); - return OK; - } - - AVCCodecSpecificContext ctx; - uint8_t *outputData = NULL; - size_t outputSize = 0; - - // Check if the data is valid - uint8_t type = kNalUnitTypeSeqParamSet; - bool gotSps = false; - bool gotPps = false; - const uint8_t *tmp = data; - const uint8_t *nextStartCode = data; - size_t bytesLeft = size; - size_t paramSetLen = 0; - outputSize = 0; - while (bytesLeft > 4 && !memcmp("\x00\x00\x00\x01", tmp, 4)) { - type = (*(tmp + 4)) & 0x1F; - if (type == kNalUnitTypeSeqParamSet) { - if (gotPps) { - ALOGE("SPS must come before PPS"); - return ERROR_MALFORMED; - } - if (!gotSps) { - gotSps = true; - } - nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, - ¶mSetLen); - } else if (type == kNalUnitTypePicParamSet) { - if (!gotSps) { - ALOGE("SPS must come before PPS"); - return ERROR_MALFORMED; - } - if (!gotPps) { - gotPps = true; - } - nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, - ¶mSetLen); - } else { - ALOGE("Only SPS and PPS Nal units are expected"); - return ERROR_MALFORMED; - } - - if (nextStartCode == NULL) { - return ERROR_MALFORMED; - } - - // Move on to find the next parameter set - bytesLeft -= nextStartCode - tmp; - tmp = nextStartCode; - outputSize += (2 + paramSetLen); - - if (gotSps && gotPps) { - break; - } - } - - { - // Check on the number of seq parameter sets - size_t nSeqParamSets = ctx.mSeqParamSets.size(); - if (nSeqParamSets == 0) { - ALOGE("Cound not find sequence parameter set"); - return ERROR_MALFORMED; - } - - if (nSeqParamSets > 0x1F) { - ALOGE("Too many seq parameter sets (%d) found", nSeqParamSets); - return ERROR_MALFORMED; - } - } - - { - // Check on the number of pic parameter sets - size_t nPicParamSets = ctx.mPicParamSets.size(); - if (nPicParamSets == 0) { - ALOGE("Cound not find picture parameter set"); - return ERROR_MALFORMED; - } - if (nPicParamSets > 0xFF) { - ALOGE("Too many pic parameter sets (%d) found", nPicParamSets); - return ERROR_MALFORMED; - } - } - - // ISO 14496-15: AVC file format - outputSize += 7; // 7 more bytes in the header - outputData = (uint8_t *)malloc(outputSize); - uint8_t *header = outputData; - header[0] = 1; // version - header[1] = ctx.mProfileIdc; // profile indication - header[2] = ctx.mProfileCompatible; // profile compatibility - header[3] = ctx.mLevelIdc; - - // 6-bit '111111' followed by 2-bit to lengthSizeMinuusOne - int32_t use2ByteNalLength = 0; - if (param && - param->findInt32(kKey2ByteNalLength, &use2ByteNalLength) && - use2ByteNalLength) { - header[4] = 0xfc | 1; // length size == 2 bytes - } else { - header[4] = 0xfc | 3; // length size == 4 bytes - } - - // 3-bit '111' followed by 5-bit numSequenceParameterSets - int nSequenceParamSets = ctx.mSeqParamSets.size(); - header[5] = 0xe0 | nSequenceParamSets; - header += 6; - for (List::iterator it = ctx.mSeqParamSets.begin(); - it != ctx.mSeqParamSets.end(); ++it) { - // 16-bit sequence parameter set length - uint16_t seqParamSetLength = it->mLength; - header[0] = seqParamSetLength >> 8; - header[1] = seqParamSetLength & 0xff; - //ALOGE("### SPS %d %d %d", seqParamSetLength, header[0], header[1]); - - // SPS NAL unit (sequence parameter length bytes) - memcpy(&header[2], it->mData, seqParamSetLength); - header += (2 + seqParamSetLength); - } - - // 8-bit nPictureParameterSets - int nPictureParamSets = ctx.mPicParamSets.size(); - header[0] = nPictureParamSets; - header += 1; - for (List::iterator it = ctx.mPicParamSets.begin(); - it != ctx.mPicParamSets.end(); ++it) { - // 16-bit picture parameter set length - uint16_t picParamSetLength = it->mLength; - header[0] = picParamSetLength >> 8; - header[1] = picParamSetLength & 0xff; -//ALOGE("### PPS %d %d %d", picParamSetLength, header[0], header[1]); - - // PPS Nal unit (picture parameter set length bytes) - memcpy(&header[2], it->mData, picParamSetLength); - header += (2 + picParamSetLength); - } - - *pOutputSize = outputSize; - *pOutputData = outputData; - return OK; -} - -status_t removeAVCCodecSpecificData(uint8_t **pOutputData, size_t *pOutputSize, - const uint8_t *data, size_t size, MetaData *param) -{ - LOGV("removeAVCCodecSpecificData begin"); - LOGV("Inputdataaddr = %p,Inputsize = %d", data,size); - if ( (pOutputData == NULL) || (pOutputSize == NULL) ) { - LOGE("output is invalid"); - return ERROR_MALFORMED; - } - - if (size < 4) { - LOGE("Codec specific data length too short: %d", size); - return ERROR_MALFORMED; - } - - AVCCodecSpecificContext ctx; - uint8_t *outputData = NULL; - size_t outputSize = 0; - - // Check if the data is valid - uint8_t type = kNalUnitTypeSeqParamSet; - bool gotSps = false; - bool gotPps = false; - const uint8_t *tmp = data; - const uint8_t *nextStartCode = data; - size_t bytesLeft = size; - size_t paramSetLen = 0; - outputSize = 0; - - while (bytesLeft > 4 && !memcmp("\x00\x00\x00\x01", tmp, 4)) { - type = (*(tmp + 4)) & 0x1F; - if (type == kNalUnitTypeSeqParamSet) { - if (gotPps) { - LOGE("SPS must come before PPS"); - return ERROR_MALFORMED; - } - if (!gotSps) { - gotSps = true; - } - nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, - ¶mSetLen); - } else if (type == kNalUnitTypePicParamSet) { - if (!gotSps) { - LOGE("SPS must come before PPS"); - return ERROR_MALFORMED; - } - if (!gotPps) { - gotPps = true; - } - nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type, - ¶mSetLen); - } else { - LOGE("Only SPS and PPS Nal units are expected"); - return ERROR_MALFORMED; - } - if (nextStartCode == NULL) { - return ERROR_MALFORMED; - } - bytesLeft -= nextStartCode - tmp; - tmp = nextStartCode; - outputSize += (4 + paramSetLen); - LOGV("DSI size is %d!",outputSize); - if(gotSps && gotPps) - { - break; - } - } - *pOutputData = (uint8_t *)(data + outputSize); - *pOutputSize = outputSize; - LOGV("Outputdataaddr = %p,Outputsize = %d", *pOutputData, *pOutputSize); - LOGV("removeAVCCodecSpecificData end"); - return OK; -} -}// namespace android diff --git a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h b/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h deleted file mode 100644 index c4a9af5..0000000 --- a/frameworks/videoedit/stagefrightshells/IntelVideoEditorUtils.h +++ /dev/null @@ -1,125 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** -************************************************************************* -* @file VideoEditorUtils.cpp -* @brief StageFright shell Utilities -************************************************************************* -*/ -#ifndef ANDROID_UTILS_H_ -#define ANDROID_UTILS_H_ - -/******************* - * HEADERS * - *******************/ - -#include "M4OSA_Debug.h" - -#include "utils/Log.h" -#include -#include -#include -#include - -/** - ************************************************************************* - * VIDEOEDITOR_CHECK(test, errCode) - * @note This macro displays an error message and goes to function cleanUp label - * if the test fails. - ************************************************************************* - */ -#define VIDEOEDITOR_CHECK(test, errCode) \ -{ \ - if( !(test) ) { \ - ALOGW("!!! %s (L%d) check failed : " #test ", yields error 0x%.8x", \ - __FILE__, __LINE__, errCode); \ - err = (errCode); \ - goto cleanUp; \ - } \ -} - -/** - ************************************************************************* - * SAFE_FREE(p) - * @note This macro calls free and makes sure the pointer is set to NULL. - ************************************************************************* - */ -#define SAFE_FREE(p) \ -{ \ - if(M4OSA_NULL != (p)) { \ - free((p)) ; \ - (p) = M4OSA_NULL ; \ - } \ -} - -/** - ************************************************************************* - * SAFE_MALLOC(p, type, count, comment) - * @note This macro allocates a buffer, checks for success and fills the buffer - * with 0. - ************************************************************************* - */ -#define SAFE_MALLOC(p, type, count, comment) \ -{ \ - (p) = (type*)M4OSA_32bitAlignedMalloc(sizeof(type)*(count), 0xFF,(M4OSA_Char*)comment);\ - VIDEOEDITOR_CHECK(M4OSA_NULL != (p), M4ERR_ALLOC); \ - memset((void *)(p), 0,sizeof(type)*(count)); \ -} - - - /******************** - * UTILITIES * - ********************/ - - -namespace android { - -/*--------------------------*/ -/* DISPLAY METADATA CONTENT */ -/*--------------------------*/ -void displayMetaData(const sp meta); - -// Build the AVC codec spcific info from the StageFright encoders output -status_t buildAVCCodecSpecificData(uint8_t **outputData, size_t *outputSize, - const uint8_t *data, size_t size, MetaData *param); - -// Remove the AVC codec specific info from the StageFright encoders output -status_t removeAVCCodecSpecificData(uint8_t **outputData, size_t *outputSize, - const uint8_t *data, size_t size, MetaData *param); -}//namespace android - - -#endif //ANDROID_UTILS_H_ diff --git a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp deleted file mode 100644 index d6a0eee..0000000 --- a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.cpp +++ /dev/null @@ -1,201 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "MediaBufferPuller" -#include - -#include -#include -#include -#include "MediaBufferPuller.h" - -namespace android { - - -MediaBufferPuller::MediaBufferPuller(const sp& source) - : mSource(source), - mAskToStart(false), - mAskToStop(false), - mAcquireStopped(false), - mReleaseStopped(false), - mSourceError(OK) { - - androidCreateThread(acquireThreadStart, this); - androidCreateThread(releaseThreadStart, this); -} - -MediaBufferPuller::~MediaBufferPuller() { - stop(); -} - -bool MediaBufferPuller::hasMediaSourceReturnedError() const { - Mutex::Autolock autolock(mLock); - return ((mSourceError != OK) ? true : false); -} -void MediaBufferPuller::start() { - Mutex::Autolock autolock(mLock); - mAskToStart = true; - mAcquireCond.signal(); - mReleaseCond.signal(); -} - -void MediaBufferPuller::stop() { - Mutex::Autolock autolock(mLock); - mAskToStop = true; - mAcquireCond.signal(); - mReleaseCond.signal(); - while (!mAcquireStopped || !mReleaseStopped) { - mUserCond.wait(mLock); - } - - // Release remaining buffers - for (size_t i = 0; i < mBuffers.size(); i++) { - mBuffers.itemAt(i)->release(); - } - - for (size_t i = 0; i < mReleaseBuffers.size(); i++) { - mReleaseBuffers.itemAt(i)->release(); - } - - mBuffers.clear(); - mReleaseBuffers.clear(); -} - -MediaBuffer* MediaBufferPuller::getBufferNonBlocking() { - Mutex::Autolock autolock(mLock); - if (mBuffers.empty()) { - return NULL; - } else { - MediaBuffer* b = mBuffers.itemAt(0); - mBuffers.removeAt(0); - return b; - } -} - -MediaBuffer* MediaBufferPuller::getBufferBlocking() { - Mutex::Autolock autolock(mLock); - while (mBuffers.empty() && !mAcquireStopped) { - mUserCond.wait(mLock); - } - - if (mBuffers.empty()) { - return NULL; - } else { - MediaBuffer* b = mBuffers.itemAt(0); - mBuffers.removeAt(0); - return b; - } -} - -void MediaBufferPuller::putBuffer(MediaBuffer* buffer) { - Mutex::Autolock autolock(mLock); - mReleaseBuffers.push(buffer); - mReleaseCond.signal(); -} - -int MediaBufferPuller::acquireThreadStart(void* arg) { - MediaBufferPuller* self = (MediaBufferPuller*)arg; - self->acquireThreadFunc(); - return 0; -} - -int MediaBufferPuller::releaseThreadStart(void* arg) { - MediaBufferPuller* self = (MediaBufferPuller*)arg; - self->releaseThreadFunc(); - return 0; -} - -void MediaBufferPuller::acquireThreadFunc() { - mLock.lock(); - - // Wait for the start signal - while (!mAskToStart && !mAskToStop) { - mAcquireCond.wait(mLock); - } - - // Loop until we are asked to stop, or there is nothing more to read - while (!mAskToStop) { - MediaBuffer* pBuffer; - mLock.unlock(); - status_t result = mSource->read(&pBuffer, NULL); - mLock.lock(); - mSourceError = result; - if (result == ERROR_END_OF_STREAM && pBuffer != NULL) { - mAskToStop = true; - } else if (result != OK) { - break; - } - mBuffers.push(pBuffer); - mUserCond.signal(); - } - - mAcquireStopped = true; - mUserCond.signal(); - mLock.unlock(); -} - -void MediaBufferPuller::releaseThreadFunc() { - mLock.lock(); - - // Wait for the start signal - while (!mAskToStart && !mAskToStop) { - mReleaseCond.wait(mLock); - } - - // Loop until we are asked to stop - while (1) { - if (mReleaseBuffers.empty()) { - if (mAskToStop) { - break; - } else { - mReleaseCond.wait(mLock); - continue; - } - } - MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0); - mReleaseBuffers.removeAt(0); - mLock.unlock(); - pBuffer->release(); - mLock.lock(); - } - - mReleaseStopped = true; - mUserCond.signal(); - mLock.unlock(); -} - -}; // namespace android diff --git a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h b/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h deleted file mode 100644 index 4863003..0000000 --- a/frameworks/videoedit/stagefrightshells/MediaBufferPuller.h +++ /dev/null @@ -1,110 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef _MEDIA_BUFFER_PULLER_H -#define _MEDIA_BUFFER_PULLER_H - -#include -#include - - -namespace android { - -struct MediaSource; -struct MediaBuffer; - -/* - * An object of this class can pull a list of media buffers - * from a MediaSource repeatedly. The user can then get the - * buffers from that list. - */ -struct MediaBufferPuller { -public: - MediaBufferPuller(const sp& source); - ~MediaBufferPuller(); - - // Start to build up the list of the buffers. - void start(); - - // Release the list of the available buffers, and stop - // pulling buffers from the MediaSource. - void stop(); - - // Get a buffer from the list. If there is no buffer available - // at the time this method is called, NULL is returned. - MediaBuffer* getBufferBlocking(); - - // Get a buffer from the list. If there is no buffer available - // at the time this method is called, it blocks waiting for - // a buffer to become available or until stop() is called. - MediaBuffer* getBufferNonBlocking(); - - // Add a buffer to the end of the list available media buffers - void putBuffer(MediaBuffer* buffer); - - // Check whether the source returned an error or not. - bool hasMediaSourceReturnedError() const; - -private: - static int acquireThreadStart(void* arg); - void acquireThreadFunc(); - - static int releaseThreadStart(void* arg); - void releaseThreadFunc(); - - sp mSource; - Vector mBuffers; - Vector mReleaseBuffers; - - mutable Mutex mLock; - Condition mUserCond; // for the user of this class - Condition mAcquireCond; // for the acquire thread - Condition mReleaseCond; // for the release thread - - bool mAskToStart; // Asks the threads to start - bool mAskToStop; // Asks the threads to stop - bool mAcquireStopped; // The acquire thread has stopped - bool mReleaseStopped; // The release thread has stopped - status_t mSourceError; // Error returned by MediaSource read - - // Don't call me! - MediaBufferPuller(const MediaBufferPuller&); - MediaBufferPuller& operator=(const MediaBufferPuller&); -}; - -} // namespace android - -#endif // _MEDIA_BUFFER_PULLER_H diff --git a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp deleted file mode 100644 index b5cce1a..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditor3gpReader.cpp +++ /dev/null @@ -1,2069 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** -************************************************************************* -* @file VideoEditor3gpReader.cpp -* @brief StageFright shell 3GP Reader -************************************************************************* -*/ - -#define LOG_NDEBUG 1 -#define LOG_TAG "VIDEOEDITOR_3GPREADER" - -/** - * HEADERS - * - */ -#define VIDEOEDITOR_BITSTREAM_PARSER - -#include "M4OSA_Debug.h" -#include "VideoEditor3gpReader.h" -#include "M4SYS_AccessUnit.h" -#include "VideoEditorUtils.h" -#include "M4READER_3gpCom.h" -#include "M4_Common.h" -#include "M4OSA_FileWriter.h" - -#ifdef VIDEOEDITOR_BITSTREAM_PARSER -#include "M4OSA_CoreID.h" -#include "M4OSA_Error.h" -#include "M4OSA_Memory.h" -#include "M4_Utils.h" -#endif - -#include "ESDS.h" -#include "utils/Log.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include - -/** - * SOURCE CLASS - */ -namespace android { -/** - * ENGINE INTERFACE - */ - -const char *MEDIA_MIMETYPE_AUDIO_AACEXTENDED = "audio/mp4a-aacextended"; -/** - ************************************************************************ - * @brief Array of AMR NB/WB bitrates - * @note Array to match the mode and the bit rate - ************************************************************************ -*/ -const M4OSA_UInt32 VideoEditor3gpReader_AmrBitRate [2 /* 8kHz / 16kHz */] - [9 /* the bitrate mode */] = -{ - {4750, 5150, 5900, 6700, 7400, 7950, 10200, 12200, 0}, - {6600, 8850, 12650, 14250, 15850, 18250, 19850, 23050, 23850} -}; - -/** - ******************************************************************************* - * structure VideoEditor3gpReader_Context - * @brief:This structure defines the context of the StageFright 3GP shell Reader - ******************************************************************************* -*/ -typedef struct { - sp mDataSource; - sp mExtractor; - sp mAudioSource; - sp mVideoSource; - M4_StreamHandler* mAudioStreamHandler; - M4_StreamHandler* mVideoStreamHandler; - M4SYS_AccessUnit mAudioAu; - M4SYS_AccessUnit mVideoAu; - M4OSA_Time mMaxDuration; - int64_t mFileSize; - M4_StreamType mStreamType; - M4OSA_UInt32 mStreamId; - int32_t mTracks; - int32_t mCurrTrack; - M4OSA_Bool mAudioSeeking; - M4OSA_Time mAudioSeekTime; - M4OSA_Bool mVideoSeeking; - M4OSA_Time mVideoSeekTime; - -} VideoEditor3gpReader_Context; - -#ifdef VIDEOEDITOR_BITSTREAM_PARSER -/** - ************************************************************************ - * structure VideoEditor3gpReader_BitStreamParserContext - * @brief Internal BitStreamParser context - ************************************************************************ -*/ -typedef struct { - M4OSA_UInt32* mPbitStream; /**< bitstream pointer (32bits aligned) */ - M4OSA_Int32 mSize; /**< bitstream size in bytes */ - M4OSA_Int32 mIndex; /**< byte index */ - M4OSA_Int32 mBitIndex; /**< bit index */ - M4OSA_Int32 mStructSize; /**< size of structure */ -} VideoEditor3gpReader_BitStreamParserContext; - -/** - ******************************************************************************* - * @brief Allocates the context and initializes internal data. - * @param pContext (OUT) Pointer to the BitStreamParser context to create. - * @param bitStream A pointer to the bitstream - * @param size The size of the bitstream in bytes - ******************************************************************************* -*/ -static void VideoEditor3gpReader_BitStreamParserInit(void** pContext, - void* pBitStream, M4OSA_Int32 size) { - VideoEditor3gpReader_BitStreamParserContext* pStreamContext; - - *pContext=M4OSA_NULL; - pStreamContext = (VideoEditor3gpReader_BitStreamParserContext*)M4OSA_32bitAlignedMalloc( - sizeof(VideoEditor3gpReader_BitStreamParserContext), M4READER_3GP, - (M4OSA_Char*)"3GP BitStreamParser Context"); - if (M4OSA_NULL == pStreamContext) { - return; - } - pStreamContext->mPbitStream=(M4OSA_UInt32*)pBitStream; - pStreamContext->mSize=size; - pStreamContext->mIndex=0; - pStreamContext->mBitIndex=0; - pStreamContext->mStructSize = - sizeof(VideoEditor3gpReader_BitStreamParserContext); - - *pContext=pStreamContext; -} -/** - ********************************************************************** - * @brief Clean up context - * @param pContext (IN/OUT) BitStreamParser context. - ********************************************************************** -*/ -static void VideoEditor3gpReader_BitStreamParserCleanUp(void* pContext) { - free((M4OSA_Int32*)pContext); -} -/** - ***************************************************************************** - * @brief Read the next bits in the bitstream. - * @note The function does not update the bitstream pointer. - * @param pContext (IN/OUT) BitStreamParser context. - * @param length (IN) The number of bits to extract from the bitstream - * @return the read bits - ***************************************************************************** -*/ -static M4OSA_UInt32 VideoEditor3gpReader_BitStreamParserShowBits(void* pContext, - M4OSA_Int32 length) { - VideoEditor3gpReader_BitStreamParserContext* pStreamContext = - (VideoEditor3gpReader_BitStreamParserContext*)pContext; - - M4OSA_UInt32 u_mask; - M4OSA_UInt32 retval; - M4OSA_Int32 i_ovf; - - M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, - "VideoEditor3gpReader_BitStreamParserShowBits:invalid context pointer"); - - retval=(M4OSA_UInt32)GET_MEMORY32(pStreamContext->\ - mPbitStream[ pStreamContext->mIndex ]); - i_ovf = pStreamContext->mBitIndex + length - 32; - u_mask = (length >= 32) ? 0xffffffff: (1 << length) - 1; - - /* do we have enough bits availble in the current word(32bits)*/ - if (i_ovf <= 0) { - retval=(retval >> (- i_ovf)) & u_mask; - } else { - M4OSA_UInt32 u_nextword = (M4OSA_UInt32)GET_MEMORY32( - pStreamContext->mPbitStream[ pStreamContext->mIndex + 1 ]); - M4OSA_UInt32 u_msb_mask, u_msb_value, u_lsb_mask, u_lsb_value; - - u_msb_mask = ((1 << (32 - pStreamContext->mBitIndex)) - 1) << i_ovf; - u_msb_value = retval << i_ovf; - u_lsb_mask = (1 << i_ovf) - 1; - u_lsb_value = u_nextword >> (32 - i_ovf); - retval= (u_msb_value & u_msb_mask ) | (u_lsb_value & u_lsb_mask); - } - /* return the bits...*/ - return retval; -} -/** - ************************************************************************ - * @brief Increment the bitstream pointer of bits. - * @param pContext (IN/OUT) BitStreamParser context. - * @param length (IN) The number of bit to shift the bitstream - ************************************************************************ -*/ -static void VideoEditor3gpReader_BitStreamParserFlushBits(void* pContext, - M4OSA_Int32 length) { - VideoEditor3gpReader_BitStreamParserContext* pStreamContext=( - VideoEditor3gpReader_BitStreamParserContext*)pContext; - M4OSA_Int32 val; - - if (M4OSA_NULL == pStreamContext) { - return; - } - val=pStreamContext->mBitIndex + length; - /* update the bits...*/ - pStreamContext->mBitIndex += length; - - if (val - 32 >= 0) { - /* update the bits...*/ - pStreamContext->mBitIndex -= 32; - /* update the words*/ - pStreamContext->mIndex++; - } -} - -static M4OSA_UInt32 VideoEditor3gpReader_BitStreamParserGetBits( - void* pContext,M4OSA_Int32 bitPos, M4OSA_Int32 bitLength) { - VideoEditor3gpReader_BitStreamParserContext* pStreamContext = - (VideoEditor3gpReader_BitStreamParserContext*)pContext; - - M4OSA_Int32 bitLocation, bitIndex; - M4OSA_UInt32 retval=0; - - M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, - "VideoEditor3gpReader_BitStreamParserGetBits: invalid context pointer"); - - /* computes the word location*/ - bitLocation=bitPos/32; - bitIndex=(bitPos) % 32; - - if (bitLocation < pStreamContext->mSize) { - M4OSA_UInt32 u_mask; - M4OSA_Int32 i_ovf = bitIndex + bitLength - 32; - retval=(M4OSA_UInt32)GET_MEMORY32( - pStreamContext->mPbitStream[ bitLocation ]); - - u_mask = (bitLength >= 32) ? 0xffffffff: (1 << bitLength) - 1; - - if (i_ovf <= 0) { - retval=(retval >> (- i_ovf)) & u_mask; - } else { - M4OSA_UInt32 u_nextword = (M4OSA_UInt32)GET_MEMORY32( - pStreamContext->mPbitStream[ bitLocation + 1 ]); - M4OSA_UInt32 u_msb_mask, u_msb_value, u_lsb_mask, u_lsb_value; - - u_msb_mask = ((1 << (32 - bitIndex)) - 1) << i_ovf; - u_msb_value = retval << i_ovf; - u_lsb_mask = (1 << i_ovf) - 1; - u_lsb_value = u_nextword >> (32 - i_ovf); - retval= (u_msb_value & u_msb_mask ) | (u_lsb_value & u_lsb_mask); - } - } - return retval; -} - -static void VideoEditor3gpReader_BitStreamParserRestart(void* pContext) { - VideoEditor3gpReader_BitStreamParserContext* pStreamContext = - (VideoEditor3gpReader_BitStreamParserContext*)pContext; - - if (M4OSA_NULL == pStreamContext) { - return; - } - /* resets the bitstream pointers*/ - pStreamContext->mIndex=0; - pStreamContext->mBitIndex=0; -} -/** - ******************************************************************************* - * @brief Get a pointer to the current byte pointed by the bitstream pointer. - * @note It should be used carefully as the pointer is in the bitstream itself - * and no copy is made. - * @param pContext (IN/OUT) BitStreamParser context. - * @return Pointer to the current location in the bitstream - ******************************************************************************* -*/ -static M4OSA_UInt8* VideoEditor3gpReader_GetCurrentbitStreamPointer( - void* pContext) { - VideoEditor3gpReader_BitStreamParserContext* pStreamContext = - (VideoEditor3gpReader_BitStreamParserContext*)pContext; - M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, "invalid context pointer"); - - return (M4OSA_UInt8*)((M4OSA_UInt8*)pStreamContext->mPbitStream + \ - pStreamContext->mIndex * sizeof(M4OSA_UInt32) + \ - pStreamContext->mBitIndex/8) ; -} - -static M4OSA_Int32 VideoEditor3gpReader_BitStreamParserGetSize(void* pContext) { - VideoEditor3gpReader_BitStreamParserContext* pStreamContext = - (VideoEditor3gpReader_BitStreamParserContext*)pContext; - M4OSA_DEBUG_IF1((M4OSA_NULL==pStreamContext), 0, "invalid context pointer"); - - return pStreamContext->mSize; -} - - -static void VideoEditor3gpReader_MPEG4BitStreamParserInit(void** pContext, - void* pBitStream, M4OSA_Int32 size) { - VideoEditor3gpReader_BitStreamParserInit(pContext, pBitStream, size); -} -static M4OSA_Int32 VideoEditor3gpReader_GetMpegLengthFromInteger(void* pContext, - M4OSA_UInt32 val) { - M4OSA_UInt32 length=0; - M4OSA_UInt32 numBytes=0; - M4OSA_UInt32 b=0; - - M4OSA_DEBUG_IF1((M4OSA_NULL==pContext), 0, "invalid context pointer"); - - /* the length is encoded as a sequence of bytes. The highest bit is used - to indicate that the length continues on the next byte. - - The length can be: 0x80 0x80 0x80 0x22 - of just 0x22 (highest bit not set) - - */ - - do { - b=(val & ((0xff)<< (8 * numBytes)))>> (8 * numBytes); - length=(length << 7) | (b & 0x7f); - numBytes++; - } while ((b & 0x80) && numBytes < 4); - - return length; -} - -/** - ******************************************************************************* - * @brief Decode an MPEG4 Systems descriptor size from an encoded SDL size data - * @note The value is read from the current bitstream location. - * @param pContext (IN/OUT) BitStreamParser context. - * @return Size in a human readable form - ******************************************************************************* -*/ -static M4OSA_Int32 VideoEditor3gpReader_GetMpegLengthFromStream(void* pContext){ - M4OSA_UInt32 length=0; - M4OSA_UInt32 numBytes=0; - M4OSA_UInt32 b=0; - - M4OSA_DEBUG_IF1((M4OSA_NULL==pContext), 0, "invalid context pointer"); - - /* the length is encoded as a sequence of bytes. The highest bit is used - to indicate that the length continues on the next byte. - - The length can be: 0x80 0x80 0x80 0x22 - of just 0x22 (highest bit not set) - */ - - do { - b=VideoEditor3gpReader_BitStreamParserShowBits(pContext, 8); - VideoEditor3gpReader_BitStreamParserFlushBits(pContext, 8); - length=(length << 7) | (b & 0x7f); - numBytes++; - } while ((b & 0x80) && numBytes < 4); - - return length; -} -#endif /* VIDEOEDITOR_BITSTREAM_PARSER */ -/** -************************************************************************ -* @brief create an instance of the 3gp reader - * @note allocates the context - * - * @param pContext: (OUT) pointer on a reader context - * - * @return M4NO_ERROR there is no error - * @return M4ERR_ALLOC a memory allocation has failed - * @return M4ERR_PARAMETER at least one parameter is not valid -************************************************************************ -*/ - -M4OSA_ERR VideoEditor3gpReader_create(M4OSA_Context *pContext) { - VideoEditor3gpReader_Context* pC = NULL; - M4OSA_ERR err = M4NO_ERROR; - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext , M4ERR_PARAMETER); - - ALOGV("VideoEditor3gpReader_create begin"); - - /* Context allocation & initialization */ - SAFE_MALLOC(pC, VideoEditor3gpReader_Context, 1, "VideoEditor3gpReader"); - - memset(pC, sizeof(VideoEditor3gpReader_Context), 0); - - pC->mAudioStreamHandler = M4OSA_NULL; - pC->mAudioAu.dataAddress = M4OSA_NULL; - pC->mVideoStreamHandler = M4OSA_NULL; - pC->mVideoAu.dataAddress = M4OSA_NULL; - - pC->mAudioSeeking = M4OSA_FALSE; - pC->mAudioSeekTime = 0; - - pC->mVideoSeeking = M4OSA_FALSE; - pC->mVideoSeekTime = 0; - - pC->mMaxDuration = 0; - - *pContext=pC; - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditor3gpReader_create no error"); - } else { - ALOGV("VideoEditor3gpReader_create ERROR 0x%X", err); - } - ALOGV("VideoEditor3gpReader_create end "); - return err; -} - -/** -************************************************************************** -* @brief destroy the instance of the 3gp reader -* @note after this call the context is invalid -* @param context: (IN) Context of the reader -* @return M4NO_ERROR there is no error -* @return M4ERR_PARAMETER pContext parameter is not properly set -************************************************************************** -*/ - -M4OSA_ERR VideoEditor3gpReader_destroy(M4OSA_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditor3gpReader_Context* pC = M4OSA_NULL; - - ALOGV("VideoEditor3gpReader_destroy begin"); - - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - pC = (VideoEditor3gpReader_Context*)pContext; - - SAFE_FREE(pC->mAudioAu.dataAddress); - pC->mAudioAu.dataAddress = M4OSA_NULL; - SAFE_FREE(pC->mVideoAu.dataAddress); - pC->mVideoAu.dataAddress = M4OSA_NULL; - SAFE_FREE(pC); - pContext = M4OSA_NULL; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditor3gpReader_destroy no error"); - } - else - { - ALOGV("VideoEditor3gpReader_destroy ERROR 0x%X", err); - } - - ALOGV("VideoEditor3gpReader_destroy end "); - return err; -} - -/** -************************************************************************ -* @brief open the reader and initializes its created instance -* @note this function open the media file -* @param context: (IN) Context of the reader -* @param pFileDescriptor: (IN) Pointer to proprietary data identifying -* the media to open -* @return M4NO_ERROR there is no error -* @return M4ERR_PARAMETER the context is NULL -* @return M4ERR_UNSUPPORTED_MEDIA_TYPE -* the media is DRM protected -************************************************************************ -*/ - -M4OSA_ERR VideoEditor3gpReader_open(M4OSA_Context pContext, - M4OSA_Void* pFileDescriptor) { - VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)pContext; - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditor3gpReader_open start "); - M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, - "VideoEditor3gpReader_open: invalid context pointer"); - M4OSA_DEBUG_IF1((M4OSA_NULL == pFileDescriptor), M4ERR_PARAMETER, - "VideoEditor3gpReader_open: invalid pointer pFileDescriptor"); - - ALOGV("VideoEditor3gpReader_open Datasource start %s", - (char*)pFileDescriptor); - //pC->mDataSource = DataSource::CreateFromURI((char*)pFileDescriptor); - pC->mDataSource = new FileSource ((char*)pFileDescriptor); - - if (pC->mDataSource == NULL) { - ALOGV("VideoEditor3gpReader_open Datasource error"); - return M4ERR_PARAMETER; - } - - pC->mExtractor = MediaExtractor::Create(pC->mDataSource, - MEDIA_MIMETYPE_CONTAINER_MPEG4); - - if (pC->mExtractor == NULL) { - ALOGV("VideoEditor3gpReader_open extractor error"); - return M4ERR_PARAMETER; - } - - int32_t isDRMProtected = 0; - sp meta = pC->mExtractor->getMetaData(); - meta->findInt32(kKeyIsDRM, &isDRMProtected); - if (isDRMProtected) { - ALOGV("VideoEditorMp3Reader_open error - DRM Protected"); - return M4ERR_UNSUPPORTED_MEDIA_TYPE; - } - - M4OSA_UInt8 temp, trackCount; - const char *mime; - temp = 0; - trackCount = pC->mExtractor->countTracks(); - while (temp < trackCount) { - meta = pC->mExtractor->getTrackMetaData(temp); - CHECK(meta->findCString(kKeyMIMEType, &mime)); - if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_MPEG)) { - ALOGV("VideoEditorMp3Reader_open error - audio/mpeg is not supported"); - return M4ERR_READER_UNKNOWN_STREAM_TYPE; - } - if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_AACEXTENDED)) { - ALOGV("VideoEditorMp3Reader_open error - audio/aacextended is not supported"); - return M4ERR_READER_UNKNOWN_STREAM_TYPE; - } - temp++; - } - - ALOGV("VideoEditor3gpReader_open end "); - return err; -} - -/** -************************************************************************ -* @brief close the reader -* @note close the 3GP file -* @param context: (IN) Context of the reader -* @return M4NO_ERROR there is no error -* @return M4ERR_PARAMETER the context is NULL -* @return M4ERR_BAD_CONTEXT provided context is not a valid one -************************************************************************ -*/ -M4OSA_ERR VideoEditor3gpReader_close(M4OSA_Context context) { - VideoEditor3gpReader_Context *pC = (VideoEditor3gpReader_Context*)context; - M4READER_AudioSbrUserdata *pAudioSbrUserData; - M4_AccessUnit *pAU; - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditor3gpReader_close begin"); - - M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, - "VideoEditor3gpReader_close: invalid context pointer"); - - if (pC->mAudioStreamHandler) { - ALOGV("VideoEditor3gpReader_close Audio"); - - if (M4OSA_NULL != pC->mAudioStreamHandler->m_pDecoderSpecificInfo) { - free(pC->mAudioStreamHandler->\ - m_pDecoderSpecificInfo); - pC->mAudioStreamHandler->m_decoderSpecificInfoSize = 0; - pC->mAudioStreamHandler->m_pDecoderSpecificInfo = M4OSA_NULL; - } - - if ((M4DA_StreamTypeAudioAac == pC->mAudioStreamHandler->m_streamType) - && (M4OSA_NULL != pC->mAudioStreamHandler->m_pUserData)) { - pAudioSbrUserData = (M4READER_AudioSbrUserdata*)(\ - pC->mAudioStreamHandler->m_pUserData); - - pAU = (M4_AccessUnit*)pAudioSbrUserData->m_pFirstAU; - if (M4OSA_NULL != pAU) { - free(pAU); - } - - if (M4OSA_NULL != pAudioSbrUserData->m_pAacDecoderUserConfig) { - free(pAudioSbrUserData->\ - m_pAacDecoderUserConfig); - } - free(pAudioSbrUserData); - pC->mAudioStreamHandler->m_pUserData = M4OSA_NULL; - } - - if (pC->mAudioStreamHandler->m_pESDSInfo != M4OSA_NULL) { - free(pC->mAudioStreamHandler->m_pESDSInfo); - pC->mAudioStreamHandler->m_pESDSInfo = M4OSA_NULL; - pC->mAudioStreamHandler->m_ESDSInfoSize = 0; - } - /* Finally destroy the stream handler */ - free(pC->mAudioStreamHandler); - pC->mAudioStreamHandler = M4OSA_NULL; - - pC->mAudioSource->stop(); - pC->mAudioSource.clear(); - } - if (pC->mVideoStreamHandler) { - ALOGV("VideoEditor3gpReader_close Video "); - - if(M4OSA_NULL != pC->mVideoStreamHandler->m_pDecoderSpecificInfo) { - free(pC->mVideoStreamHandler->\ - m_pDecoderSpecificInfo); - pC->mVideoStreamHandler->m_decoderSpecificInfoSize = 0; - pC->mVideoStreamHandler->m_pDecoderSpecificInfo = M4OSA_NULL; - } - - if(M4OSA_NULL != pC->mVideoStreamHandler->m_pH264DecoderSpecificInfo) { - free(pC->mVideoStreamHandler->\ - m_pH264DecoderSpecificInfo); - pC->mVideoStreamHandler->m_H264decoderSpecificInfoSize = 0; - pC->mVideoStreamHandler->m_pH264DecoderSpecificInfo = M4OSA_NULL; - } - - if(pC->mVideoStreamHandler->m_pESDSInfo != M4OSA_NULL) { - free(pC->mVideoStreamHandler->m_pESDSInfo); - pC->mVideoStreamHandler->m_pESDSInfo = M4OSA_NULL; - pC->mVideoStreamHandler->m_ESDSInfoSize = 0; - } - - /* Finally destroy the stream handler */ - free(pC->mVideoStreamHandler); - pC->mVideoStreamHandler = M4OSA_NULL; - - pC->mVideoSource->stop(); - pC->mVideoSource.clear(); - } - pC->mExtractor.clear(); - pC->mDataSource.clear(); - - ALOGV("VideoEditor3gpReader_close end"); - return err; -} - -/** -************************************************************************ -* @brief get an option from the 3gp reader -* @note it allows the caller to retrieve a property value: -* -* @param context: (IN) Context of the reader -* @param optionId: (IN) indicates the option to get -* @param pValue: (OUT) pointer to structure or value (allocated -* by user) where option is stored -* -* @return M4NO_ERROR there is no error -* @return M4ERR_BAD_CONTEXT provided context is not a valid one -* @return M4ERR_PARAMETER at least one parameter is not properly set -* @return M4ERR_BAD_OPTION_ID when the option ID is not a valid one -* @return M4ERR_VIDEO_NOT_H263 No video stream H263 in file. -* @return M4ERR_NO_VIDEO_STREAM_RETRIEVED_YET -* Function 3gpReader_getNextStreamHandler must be called before -************************************************************************ -*/ -M4OSA_ERR VideoEditor3gpReader_getOption(M4OSA_Context context, - M4OSA_OptionID optionId, M4OSA_DataOption pValue) { - VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditor3gpReader_getOption begin %d", optionId); - - M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, - "invalid context pointer"); - M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, - "VideoEditor3gpReader_getOption: invalid pointer on value"); - - switch (optionId) { - case M4READER_kOptionID_Duration: - { - ALOGV("VideoEditor3gpReader_getOption duration %d",pC->mMaxDuration); - *(M4OSA_Time*)pValue = pC->mMaxDuration; - } - break; - case M4READER_kOptionID_Version: - /* not used */ - ALOGV("VideoEditor3gpReader_getOption: M4READER_kOptionID_Version"); - break; - - case M4READER_kOptionID_Copyright: - /* not used */ - ALOGV(">>>>>>> M4READER_kOptionID_Copyright"); - break; - - case M4READER_kOptionID_CreationTime: - /* not used */ - ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_CreationTime"); - break; - - case M4READER_kOptionID_Bitrate: - { - M4OSA_UInt32* pBitrate = (M4OSA_UInt32*)pValue; - - if (pC->mMaxDuration != 0) { - M4OSA_UInt32 ui32Tmp = (M4OSA_UInt32)pC->mMaxDuration; - *pBitrate = (M4OSA_UInt32)(pC->mFileSize * 8000.0 / pC->mMaxDuration); - } - ALOGV("VideoEditor3gpReader_getOption bitrate %ld", *pBitrate); - } - break; - case M4READER_3GP_kOptionID_H263Properties: - { - if(M4OSA_NULL == pC->mVideoStreamHandler) { - ALOGV("VideoEditor3gpReader_getOption no videoStream retrieved"); - - err = M4ERR_NO_VIDEO_STREAM_RETRIEVED_YET; - break; - } - if((M4DA_StreamTypeVideoH263 != pC->mVideoStreamHandler->\ - m_streamType) || (pC->mVideoStreamHandler->\ - m_decoderSpecificInfoSize < 7)) { - ALOGV("VideoEditor3gpReader_getOption DSI Size %d", - pC->mVideoStreamHandler->m_decoderSpecificInfoSize); - - err = M4ERR_VIDEO_NOT_H263; - break; - } - - /* MAGICAL in the decoder confi H263: the 7th byte is the profile - * number, 6th byte is the level number */ - ((M4READER_3GP_H263Properties *)pValue)->uiProfile = - pC->mVideoStreamHandler->m_pDecoderSpecificInfo[6]; - ((M4READER_3GP_H263Properties *)pValue)->uiLevel = - pC->mVideoStreamHandler->m_pDecoderSpecificInfo[5]; - ALOGV("VideoEditor3gpReader_getOption M4READER_3GP_kOptionID_\ - H263Properties end"); - } - break; - case M4READER_3GP_kOptionID_PurpleLabsDrm: - ALOGV("VideoEditor3gpReaderOption M4READER_3GP_kOptionID_PurpleLabsDrm"); - /* not used */ - break; - - case M4READER_kOptionID_GetNumberOfAudioAu: - /* not used */ - ALOGV("VideoEditor3gpReadeOption M4READER_kOptionID_GetNumberOfAudioAu"); - break; - - case M4READER_kOptionID_GetNumberOfVideoAu: - /* not used */ - ALOGV("VideoEditor3gpReader_getOption :GetNumberOfVideoAu"); - break; - - case M4READER_kOptionID_GetMetadata: - /* not used */ - ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_GetMetadata"); - break; - - case M4READER_kOptionID_3gpFtypBox: - /* used only for SEMC */ - ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_3gpFtypBox"); - err = M4ERR_BAD_OPTION_ID; //check this - break; - -#ifdef OPTIONID_GET_NEXT_VIDEO_CTS - case M4READER_3GP_kOptionID_getNextVideoCTS: - /* not used */ - ALOGV("VideoEditor3gpReader_getOption: getNextVideoCTS"); - break; -#endif - default: - { - err = M4ERR_BAD_OPTION_ID; - ALOGV("VideoEditor3gpReader_getOption M4ERR_BAD_OPTION_ID"); - } - break; - } - ALOGV("VideoEditor3gpReader_getOption end: optionID: x%x", optionId); - return err; -} -/** -************************************************************************ -* @brief set an option on the 3gp reader -* @note No option can be set yet. -* @param context: (IN) Context of the reader -* @param optionId: (IN) indicates the option to set -* @param pValue: (IN) pointer to structure or value (allocated -* by user) where option is stored -* @return M4NO_ERROR there is no error -* @return M4ERR_BAD_CONTEXT provided context is not a valid one -* @return M4ERR_PARAMETER at least one parameter is not properly set -* @return M4ERR_BAD_OPTION_ID when the option ID is not a valid one -************************************************************************ -*/ -M4OSA_ERR VideoEditor3gpReader_setOption(M4OSA_Context context, - M4OSA_OptionID optionId, M4OSA_DataOption pValue) { - VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - - /* Check function parameters */ - M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER, - "invalid context pointer"); - M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, - "invalid value pointer"); - - ALOGV("VideoEditor3gpReader_setOption begin %d",optionId); - - switch(optionId) { - case M4READER_kOptionID_SetOsaFileReaderFctsPtr: - break; - - case M4READER_3GP_kOptionID_AudioOnly: - break; - - case M4READER_3GP_kOptionID_VideoOnly: - break; - - case M4READER_3GP_kOptionID_FastOpenMode: - break; - - case M4READER_kOptionID_MaxMetadataSize: - break; - - default: - { - ALOGV("VideoEditor3gpReader_setOption: returns M4ERR_BAD_OPTION_ID"); - err = M4ERR_BAD_OPTION_ID; - } - break; - } - ALOGV("VideoEditor3gpReader_setOption end "); - return err; -} -/** - ************************************************************************ - * @brief fill the access unit structure with initialization values - * @param context: (IN) Context of the reader - * @param pStreamHandler: (IN) pointer to the stream handler to which - * the access unit will be associated - * @param pAccessUnit: (IN/OUT) pointer to the access unit (allocated - * by the caller) to initialize - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER at least one parameter is not properly set - ************************************************************************ -*/ -M4OSA_ERR VideoEditor3gpReader_fillAuStruct(M4OSA_Context context, - M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { - VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; - M4OSA_ERR err= M4NO_ERROR; - - M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_fillAuStruct: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_fillAuStruc invalid pointer to M4_StreamHandler"); - M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_fillAuStruct: invalid pointer to M4_AccessUnit"); - - ALOGV("VideoEditor3gpReader_fillAuStruct begin"); - - /* Initialize pAccessUnit structure */ - pAccessUnit->m_size = 0; - pAccessUnit->m_CTS = 0; - pAccessUnit->m_DTS = 0; - pAccessUnit->m_attribute = 0; - pAccessUnit->m_dataAddress = M4OSA_NULL; - pAccessUnit->m_maxsize = pStreamHandler->m_maxAUSize; - pAccessUnit->m_streamID = pStreamHandler->m_streamId; - pAccessUnit->m_structSize = sizeof(M4_AccessUnit); - - ALOGV("VideoEditor3gpReader_fillAuStruct end"); - return M4NO_ERROR; -} - -/** -******************************************************************************** -* @brief jump into the stream at the specified time -* @note -* @param context: (IN) Context of the reader -* @param pStreamHandler (IN) the stream handler of the stream to make jump -* @param pTime (I/O)IN the time to jump to (in ms) -* OUT the time to which the stream really jumped -* @return M4NO_ERROR there is no error -* @return M4ERR_PARAMETER at least one parameter is not properly set -******************************************************************************** -*/ -M4OSA_ERR VideoEditor3gpReader_jump(M4OSA_Context context, - M4_StreamHandler *pStreamHandler, M4OSA_Int32* pTime) { - VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - M4SYS_AccessUnit* pAu; - M4OSA_Time time64; - - M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_jump: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_jump: invalid pointer to M4_StreamHandler"); - M4OSA_DEBUG_IF1((pTime == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_jump: invalid time pointer"); - - ALOGV("VideoEditor3gpReader_jump begin"); - - if (*pTime == (pStreamHandler->m_duration)) { - *pTime -= 1; - } - time64 = (M4OSA_Time)*pTime; - - ALOGV("VideoEditor3gpReader_jump time us %ld ", time64); - - if ((pC->mAudioStreamHandler != M4OSA_NULL) && - (pStreamHandler->m_streamId == pC->mAudioStreamHandler->m_streamId)) - { - pAu = &pC->mAudioAu; - pAu->CTS = time64; - pAu->DTS = time64; - - time64 = time64 * 1000; /* Convert the time into micro sec */ - pC->mAudioSeeking = M4OSA_TRUE; - pC->mAudioSeekTime = time64; - ALOGV("VideoEditor3gpReader_jump AUDIO time us %ld ", time64); - } else if ((pC->mVideoStreamHandler != M4OSA_NULL) && - (pStreamHandler->m_streamId == pC->mVideoStreamHandler->m_streamId)) - { - pAu = &pC->mVideoAu; - pAu->CTS = time64; - pAu->DTS = time64; - - time64 = time64 * 1000; /* Convert the time into micro sec */ - pC->mVideoSeeking = M4OSA_TRUE; - pC->mVideoSeekTime = time64; - ALOGV("VideoEditor3gpReader_jump VIDEO time us %ld ", time64); - } else { - ALOGV("VideoEditor3gpReader_jump passed StreamHandler is not known\n"); - return M4ERR_PARAMETER; - } - time64 = time64 / 1000; /* Convert the time into milli sec */ - ALOGV("VideoEditor3gpReader_jump time ms before seekset %ld ", time64); - - *pTime = (M4OSA_Int32)time64; - - ALOGV("VideoEditor3gpReader_jump end"); - err = M4NO_ERROR; - return err; -} -/** -******************************************************************************** -* @brief reset the stream, that is seek it to beginning and make it ready -* @note -* @param context: (IN) Context of the reader -* @param pStreamHandler (IN) The stream handler of the stream to reset -* @return M4NO_ERROR there is no error -* @return M4ERR_PARAMETER at least one parameter is not properly set -******************************************************************************** -*/ -M4OSA_ERR VideoEditor3gpReader_reset(M4OSA_Context context, - M4_StreamHandler *pStreamHandler) { - VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - M4SYS_StreamID streamIdArray[2]; - M4SYS_AccessUnit* pAu; - M4OSA_Time time64 = 0; - - M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_reset: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_reset: invalid pointer to M4_StreamHandler"); - - ALOGV("VideoEditor3gpReader_reset begin"); - - if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) { - pAu = &pC->mAudioAu; - } else if (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) { - pAu = &pC->mVideoAu; - } else { - ALOGV("VideoEditor3gpReader_reset passed StreamHandler is not known\n"); - return M4ERR_PARAMETER; - } - - pAu->CTS = time64; - pAu->DTS = time64; - - ALOGV("VideoEditor3gpReader_reset end"); - return err; -} - -/** -******************************************************************************** -* @brief Gets an access unit (AU) from the stream handler source. -* @note An AU is the smallest possible amount of data to be decoded by decoder -* -* @param context: (IN) Context of the reader -* @param pStreamHandler (IN) The stream handler of the stream to make jump -* @param pAccessUnit (IO) Pointer to access unit to fill with read data -* @return M4NO_ERROR there is no error -* @return M4ERR_PARAMETER at least one parameter is not properly set -* @returns M4ERR_ALLOC memory allocation failed -* @returns M4WAR_NO_MORE_AU there are no more access unit in the stream -******************************************************************************** -*/ -M4OSA_ERR VideoEditor3gpReader_getNextAu(M4OSA_Context context, - M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { - VideoEditor3gpReader_Context* pC=(VideoEditor3gpReader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - M4SYS_AccessUnit* pAu; - int64_t tempTime64 = 0; - MediaBuffer *mMediaBuffer = NULL; - MediaSource::ReadOptions options; - M4OSA_Bool flag = M4OSA_FALSE; - status_t error; - int32_t i32Tmp = 0; - - M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_getNextAu: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_getNextAu: invalid pointer to M4_StreamHandler"); - M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_getNextAu: invalid pointer to M4_AccessUnit"); - - ALOGV("VideoEditor3gpReader_getNextAu begin"); - - if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) { - ALOGV("VideoEditor3gpReader_getNextAu audio stream"); - pAu = &pC->mAudioAu; - if (pC->mAudioSeeking == M4OSA_TRUE) { - ALOGV("VideoEditor3gpReader_getNextAu audio seek time: %ld", - pC->mAudioSeekTime); - options.setSeekTo(pC->mAudioSeekTime); - pC->mAudioSource->read(&mMediaBuffer, &options); - - mMediaBuffer->meta_data()->findInt64(kKeyTime, - (int64_t*)&tempTime64); - options.clearSeekTo(); - pC->mAudioSeeking = M4OSA_FALSE; - flag = M4OSA_TRUE; - } else { - ALOGV("VideoEditor3gpReader_getNextAu audio no seek:"); - pC->mAudioSource->read(&mMediaBuffer, &options); - if (mMediaBuffer != NULL) { - mMediaBuffer->meta_data()->findInt64(kKeyTime, - (int64_t*)&tempTime64); - } - } - } else if (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) { - ALOGV("VideoEditor3gpReader_getNextAu video steram "); - pAu = &pC->mVideoAu; - if(pC->mVideoSeeking == M4OSA_TRUE) { - flag = M4OSA_TRUE; - ALOGV("VideoEditor3gpReader_getNextAu seek: %ld",pC->mVideoSeekTime); - options.setSeekTo(pC->mVideoSeekTime, - MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); - do - { - if (mMediaBuffer != NULL) { - ALOGV("VideoEditor3gpReader_getNextAu free the MediaBuffer"); - mMediaBuffer->release(); - } - error = pC->mVideoSource->read(&mMediaBuffer, &options); - ALOGV("VE3gpReader_getNextAu MediaBuffer %x , error %d", - mMediaBuffer, error); - if (mMediaBuffer != NULL) - { - if (mMediaBuffer->meta_data()->findInt32(kKeyIsSyncFrame, - &i32Tmp) && i32Tmp) { - ALOGV("SYNC FRAME FOUND--%d", i32Tmp); - pAu->attribute = AU_RAP; - } - else { - pAu->attribute = AU_P_Frame; - } - mMediaBuffer->meta_data()->findInt64(kKeyTime, - (int64_t*)&tempTime64); - } else { - break; - } - options.clearSeekTo(); - } while(tempTime64 < pC->mVideoSeekTime); - - ALOGV("VE3gpReader_getNextAu: video time with seek = %lld:", - tempTime64); - pC->mVideoSeeking = M4OSA_FALSE; - } else { - ALOGV("VideoEditor3gpReader_getNextAu video no seek:"); - pC->mVideoSource->read(&mMediaBuffer, &options); - - if(mMediaBuffer != NULL) { - if (mMediaBuffer->meta_data()->findInt32(kKeyIsSyncFrame, - &i32Tmp) && i32Tmp) { - ALOGV("SYNC FRAME FOUND--%d", i32Tmp); - pAu->attribute = AU_RAP; - } - else { - pAu->attribute = AU_P_Frame; - } - mMediaBuffer->meta_data()->findInt64(kKeyTime, - (int64_t*)&tempTime64); - ALOGV("VE3gpReader_getNextAu: video no seek time = %lld:", - tempTime64); - }else { - ALOGV("VE3gpReader_getNextAu:video no seek time buffer is NULL"); - } - } - } else { - ALOGV("VideoEditor3gpReader_getNextAu M4ERR_PARAMETER"); - return M4ERR_PARAMETER; - } - - if (mMediaBuffer != NULL) { - if( (pAu->dataAddress == NULL) || (pAu->size < \ - mMediaBuffer->range_length())) { - if(pAu->dataAddress != NULL) { - free((M4OSA_Int32*)pAu->dataAddress); - pAu->dataAddress = NULL; - } - ALOGV("Buffer lenght = %d ,%d",(mMediaBuffer->range_length() +\ - 3) & ~0x3,(mMediaBuffer->range_length())); - - pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc( - (mMediaBuffer->range_length() + 3) & ~0x3,M4READER_3GP, - (M4OSA_Char*)"pAccessUnit->m_dataAddress" ); - if(pAu->dataAddress == NULL) { - ALOGV("VideoEditor3gpReader_getNextAu malloc failed"); - return M4ERR_ALLOC; - } - } - pAu->size = mMediaBuffer->range_length(); - - memcpy((void *)pAu->dataAddress, - (void *)((const char *)mMediaBuffer->data() + mMediaBuffer->range_offset()), - mMediaBuffer->range_length()); - - if( (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) && - (pStreamHandler->m_streamType == M4DA_StreamTypeVideoMpeg4Avc) ) { - M4OSA_UInt32 size = mMediaBuffer->range_length(); - M4OSA_UInt8 *lbuffer; - - lbuffer = (M4OSA_UInt8 *) pAu->dataAddress; - ALOGV("pAccessUnit->m_dataAddress size = %x",size); - - lbuffer[0] = (size >> 24) & 0xFF; - lbuffer[1] = (size >> 16) & 0xFF; - lbuffer[2] = (size >> 8) & 0xFF; - lbuffer[3] = (size) & 0xFF; - } - - pAu->CTS = tempTime64; - - pAu->CTS = pAu->CTS / 1000; //converting the microsec to millisec - ALOGV("VideoEditor3gpReader_getNextAu CTS = %ld",pAu->CTS); - - pAu->DTS = pAu->CTS; - if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) { - pAu->attribute = M4SYS_kFragAttrOk; - } - mMediaBuffer->release(); - - pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress; - pAccessUnit->m_size = pAu->size; - pAccessUnit->m_maxsize = pAu->size; - pAccessUnit->m_CTS = pAu->CTS; - pAccessUnit->m_DTS = pAu->DTS; - pAccessUnit->m_attribute = pAu->attribute; - - } else { - ALOGV("VideoEditor3gpReader_getNextAu: M4WAR_NO_MORE_AU (EOS) reached"); - pAccessUnit->m_size = 0; - err = M4WAR_NO_MORE_AU; - } - options.clearSeekTo(); - - pAu->nbFrag = 0; - mMediaBuffer = NULL; - ALOGV("VideoEditor3gpReader_getNextAu end "); - - return err; -} -/** - ******************************************************************************* - * @brief Split the AVC DSI in its different components and write it in - * ONE memory buffer - * @note - * @param pStreamHandler: (IN/OUT) The MPEG4-AVC stream - * @param pDecoderConfigLocal: (IN) The DSI buffer - * @param decoderConfigSizeLocal: (IN) The DSI buffer size - * @return M4NO_ERROR there is no error - * @return ERR_FILE_SYNTAX_ERROR pDecoderConfigLocal is NULL - ******************************************************************************* -*/ -static M4OSA_ERR VideoEditor3gpReader_AnalyseAvcDsi( - M4_StreamHandler *pStreamHandler, M4OSA_Int32* pDecoderConfigLocal, - M4OSA_Int32 decoderConfigSizeLocal) { - struct _avcSpecificInfo *pAvcSpecInfo = M4OSA_NULL; - M4OSA_UInt32 uiSpecInfoSize; - M4OSA_Context pBitParserContext = M4OSA_NULL; - M4OSA_MemAddr8 pPos; - - /** - * First parsing to get the total allocation size (we must not do - * multiple malloc, but only one instead) */ - { - M4OSA_Int32 val; - M4OSA_UInt32 i,j; - M4OSA_UInt8 nalUnitLength; - M4OSA_UInt8 numOfSequenceParameterSets; - M4OSA_UInt32 uiTotalSizeOfSPS = 0; - M4OSA_UInt8 numOfPictureParameterSets; - M4OSA_UInt32 uiTotalSizeOfPPS = 0; - M4OSA_UInt32 uiSize; - struct _avcSpecificInfo avcSpIf; - - avcSpIf.m_nalUnitLength = 0; - - if (M4OSA_NULL == pDecoderConfigLocal) { - return M4ERR_READER3GP_DECODER_CONFIG_ERROR; - } - - VideoEditor3gpReader_MPEG4BitStreamParserInit(&pBitParserContext, - pDecoderConfigLocal, decoderConfigSizeLocal); - - if (M4OSA_NULL == pBitParserContext) { - return M4ERR_ALLOC; - } - - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- configuration version */ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- avc profile indication*/ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- profile compatibility */ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- avc level indication*/ - val=VideoEditor3gpReader_BitStreamParserShowBits(pBitParserContext, 8); - /* 6 bits reserved 111111b 2 bits length Size minus one*/ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* m_nalUnitLength */ - - nalUnitLength = (M4OSA_UInt8)((val & 0x03) + 1);/*0b11111100*/ - if (nalUnitLength > 4) { - pStreamHandler->m_decoderSpecificInfoSize = 0; - pStreamHandler->m_pDecoderSpecificInfo = M4OSA_NULL; - VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); - } else { - /** - * SPS table */ - val=VideoEditor3gpReader_BitStreamParserShowBits(pBitParserContext, - 8);/* 3 bits-reserved 111b-5 bits number of sequence parameter set*/ - numOfSequenceParameterSets = val & 0x1F; - /*1F instead of E0*/ /*0b11100000*/ /*Number of seq parameter sets*/ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - for (i=0; i < numOfSequenceParameterSets; i++) { - /** - * Get the size of this element */ - uiSize = - (M4OSA_UInt32)VideoEditor3gpReader_BitStreamParserShowBits( - pBitParserContext, 16); - uiTotalSizeOfSPS += uiSize; - VideoEditor3gpReader_BitStreamParserFlushBits( - pBitParserContext, 16); - /** - *Read the element(dont keep it, we only want size right now) */ - for (j=0; jm_nalUnitLength = nalUnitLength; - pAvcSpecInfo->m_numOfSequenceParameterSets = - numOfSequenceParameterSets; - pAvcSpecInfo->m_numOfPictureParameterSets = - numOfPictureParameterSets; - - /* We place the SPS param sets table after m_pPictureParameterSet */ - pAvcSpecInfo->m_pSequenceParameterSet= (struct _parameterSet*)( - (M4OSA_MemAddr8)(&pAvcSpecInfo->m_pPictureParameterSet) + - sizeof(pAvcSpecInfo->m_pPictureParameterSet)); - /*We place the PPS param sets table after the SPS param sets table*/ - pAvcSpecInfo->m_pPictureParameterSet = (struct _parameterSet*)( - (M4OSA_MemAddr8)(pAvcSpecInfo->m_pSequenceParameterSet) + - (numOfSequenceParameterSets * sizeof(struct _parameterSet))); - /**< The data will be placed after the PPS param sets table */ - pPos = (M4OSA_MemAddr8)pAvcSpecInfo->m_pPictureParameterSet + - (numOfPictureParameterSets * sizeof(struct _parameterSet)); - - /** - * reset the bit parser */ - VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); - } - } - - /** - * Second parsing to copy the data */ - if (M4OSA_NULL != pAvcSpecInfo) { - M4OSA_Int32 i,j; - - VideoEditor3gpReader_MPEG4BitStreamParserInit(&pBitParserContext, - pDecoderConfigLocal, decoderConfigSizeLocal); - - if (M4OSA_NULL == pBitParserContext) { - free(pAvcSpecInfo); - return M4ERR_ALLOC; - } - - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- configuration version */ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- avc profile indication*/ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- profile compatibility */ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 8 bits -- avc level indication*/ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* m_nalUnitLength */ - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* 3 bits -- reserved 111b -- 5 bits number of sequence parameter set*/ - - for (i=0; i < pAvcSpecInfo->m_numOfSequenceParameterSets; i++) { - pAvcSpecInfo->m_pSequenceParameterSet[i].m_length = - (M4OSA_UInt16)VideoEditor3gpReader_BitStreamParserShowBits( - pBitParserContext, 16); - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext,16); - - pAvcSpecInfo->m_pSequenceParameterSet[i].m_pParameterSetUnit = - (M4OSA_UInt8*)pPos; /**< current position in the buffer */ - pPos += pAvcSpecInfo->m_pSequenceParameterSet[i].m_length; - /**< increment the position in the buffer */ - for (j=0; jm_pSequenceParameterSet[i].m_length;j++){ - pAvcSpecInfo->m_pSequenceParameterSet[i].m_pParameterSetUnit[j]= - (M4OSA_UInt8)VideoEditor3gpReader_BitStreamParserShowBits( - pBitParserContext, 8); - VideoEditor3gpReader_BitStreamParserFlushBits( - pBitParserContext, 8); - } - } - - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext, 8); - /* number of pîcture parameter set*/ - - for (i=0; i < pAvcSpecInfo->m_numOfPictureParameterSets; i++) { - pAvcSpecInfo->m_pPictureParameterSet[i].m_length = - (M4OSA_UInt16)VideoEditor3gpReader_BitStreamParserShowBits( - pBitParserContext, 16); - VideoEditor3gpReader_BitStreamParserFlushBits(pBitParserContext,16); - - pAvcSpecInfo->m_pPictureParameterSet[i].m_pParameterSetUnit = - (M4OSA_UInt8*)pPos; /**< current position in the buffer */ - pPos += pAvcSpecInfo->m_pPictureParameterSet[i].m_length; - /**< increment the position in the buffer */ - for (j=0; jm_pPictureParameterSet[i].m_length; j++) { - pAvcSpecInfo->m_pPictureParameterSet[i].m_pParameterSetUnit[j] = - (M4OSA_UInt8)VideoEditor3gpReader_BitStreamParserShowBits( - pBitParserContext, 8); - VideoEditor3gpReader_BitStreamParserFlushBits( - pBitParserContext, 8); - } - } - VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); - pStreamHandler->m_decoderSpecificInfoSize = uiSpecInfoSize; - pStreamHandler->m_pDecoderSpecificInfo = (M4OSA_UInt8*)pAvcSpecInfo; - } - pStreamHandler->m_H264decoderSpecificInfoSize = decoderConfigSizeLocal; - pStreamHandler->m_pH264DecoderSpecificInfo = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - decoderConfigSizeLocal, M4READER_3GP, - (M4OSA_Char*)"MPEG-4 AVC DecoderSpecific"); - if (M4OSA_NULL == pStreamHandler->m_pH264DecoderSpecificInfo) { - goto cleanup; - } - - memcpy((void * ) pStreamHandler->m_pH264DecoderSpecificInfo, - (void * )pDecoderConfigLocal, - pStreamHandler->m_H264decoderSpecificInfoSize); - return M4NO_ERROR; -cleanup: - VideoEditor3gpReader_BitStreamParserCleanUp(pBitParserContext); - return M4ERR_READER3GP_DECODER_CONFIG_ERROR; -} -/** -******************************************************************************** -* @brief Get the next stream found in the 3gp file -* @note -* @param context: (IN) Context of the reader -* @param pMediaFamily: OUT) pointer to a user allocated -* M4READER_MediaFamily that will be filled -* with the media family of the found stream -* @param pStreamHandler:(OUT) pointer to StreamHandler that will be allocated -* and filled with the found stream description -* @return M4NO_ERROR there is no error -* @return M4ERR_BAD_CONTEXT provided context is not a valid one -* @return M4ERR_PARAMETER at least one parameter is not properly set -* @return M4WAR_NO_MORE_STREAM no more available stream in the media -******************************************************************************** -*/ -M4OSA_ERR VideoEditor3gpReader_getNextStreamHandler(M4OSA_Context context, - M4READER_MediaFamily *pMediaFamily, - M4_StreamHandler **pStreamHandler) { - VideoEditor3gpReader_Context* pC=(VideoEditor3gpReader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - M4SYS_StreamID streamIdArray[2]; - M4SYS_StreamDescription streamDesc; - M4_AudioStreamHandler* pAudioStreamHandler; - M4_VideoStreamHandler* pVideoStreamHandler; - M4OSA_Int8 *DecoderSpecificInfo = M4OSA_NULL; - M4OSA_Int32 decoderSpecificInfoSize =0, maxAUSize = 0; - - M4_StreamType streamType = M4DA_StreamTypeUnknown; - M4OSA_UInt8 temp, i, trackCount; - M4OSA_Bool haveAudio = M4OSA_FALSE; - M4OSA_Bool haveVideo = M4OSA_FALSE; - sp meta = NULL; - int64_t Duration = 0; - M4OSA_UInt8* DecoderSpecific = M4OSA_NULL ; - uint32_t type; - const void *data; - size_t size; - const void *codec_specific_data; - size_t codec_specific_data_size; - M4OSA_Int32 ptempTime; - M4OSA_Int32 avgFPS=0; - - ALOGV("VideoEditor3gpReader_getNextStreamHandler begin"); - - M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_getNextStreamHandler: invalid context"); - M4OSA_DEBUG_IF1((pMediaFamily == 0), M4ERR_PARAMETER, - "getNextStreamHandler: invalid pointer to MediaFamily"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "getNextStreamHandler: invalid pointer to StreamHandler"); - - trackCount = pC->mExtractor->countTracks(); - temp = pC->mCurrTrack; - - if(temp >= trackCount) { - ALOGV("VideoEditor3gpReader_getNextStreamHandler error = %d", - M4WAR_NO_MORE_STREAM); - return (M4WAR_NO_MORE_STREAM); - } else { - const char *mime; - meta = pC->mExtractor->getTrackMetaData(temp); - CHECK(meta->findCString(kKeyMIMEType, &mime)); - - if (!haveVideo && !strncasecmp(mime, "video/", 6)) { - pC->mVideoSource = pC->mExtractor->getTrack(temp); - pC->mVideoSource->start(); - - *pMediaFamily = M4READER_kMediaFamilyVideo; - haveVideo = true; - ALOGV("VideoEditor3gpReader_getNextStreamHandler getTrack called"); - if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) { - streamType = M4DA_StreamTypeVideoMpeg4Avc; - } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) { - streamType = M4DA_StreamTypeVideoH263; - } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) { - streamType = M4DA_StreamTypeVideoMpeg4; - } else { - ALOGV("VideoEditor3gpReaderGetNextStreamHandler streamTypeNONE"); - } - ALOGV("VideoEditor3gpReader_getNextStreamHandler: stream type: %d ", - streamType); - - if(streamType != M4DA_StreamTypeUnknown) { - pC->mStreamType = streamType; - pC->mStreamId = pC->mCurrTrack; - - pVideoStreamHandler = (M4_VideoStreamHandler*)M4OSA_32bitAlignedMalloc - (sizeof(M4_VideoStreamHandler), M4READER_3GP, - (M4OSA_Char*)"M4_VideoStreamHandler"); - if (M4OSA_NULL == pVideoStreamHandler) { - return M4ERR_ALLOC; - } - pVideoStreamHandler->m_structSize=sizeof(M4_VideoStreamHandler); - - meta->findInt32(kKeyWidth, - (int32_t*)&(pVideoStreamHandler->m_videoWidth)); - meta->findInt32(kKeyHeight, - (int32_t*)&(pVideoStreamHandler->m_videoHeight)); - - (*pStreamHandler) = (M4_StreamHandler*)(pVideoStreamHandler); - meta->findInt64(kKeyDuration, - (int64_t*)&(Duration)); - ((*pStreamHandler)->m_duration) = - (int32_t)((Duration)/1000); // conversion to mS - pC->mMaxDuration = ((*pStreamHandler)->m_duration); - ALOGV("VideoEditor3gpReader_getNextStreamHandler m_duration %d", - (*pStreamHandler)->m_duration); - - off64_t fileSize = 0; - pC->mDataSource->getSize(&fileSize); - pC->mFileSize = fileSize; - - ALOGV("VideoEditor3gpReader_getNextStreamHandler m_fileSize %d", - pC->mFileSize); - - meta->findInt32(kKeyMaxInputSize, (int32_t*)&(maxAUSize)); - if(maxAUSize == 0) { - maxAUSize = 70000; - } - (*pStreamHandler)->m_maxAUSize = maxAUSize; - ALOGV("<<<<<<<<<< video: mMaxAUSize from MP4 extractor: %d", - (*pStreamHandler)->m_maxAUSize); - - ((M4_StreamHandler*)pVideoStreamHandler)->m_averageBitRate = - (pC->mFileSize * 8000)/pC->mMaxDuration; - ALOGV("VideoEditor3gpReader_getNextStreamHandler m_averageBitrate %d", - ((M4_StreamHandler*)pVideoStreamHandler)->m_averageBitRate); - - - meta->findInt32(kKeyFrameRate, - (int32_t*)&(avgFPS)); - ALOGV("<<<<<<<<<< video: Average FPS from MP4 extractor: %d", - avgFPS); - - pVideoStreamHandler->m_averageFrameRate =(M4OSA_Float) avgFPS; - ALOGV("<<<<<<<<<< video: Average FPS from MP4 extractor in FLOAT: %f", - pVideoStreamHandler->m_averageFrameRate); - - // Get the video rotation degree - int32_t rotationDegree; - if(!meta->findInt32(kKeyRotation, &rotationDegree)) { - rotationDegree = 0; - } - pVideoStreamHandler->videoRotationDegrees = rotationDegree; - - pC->mVideoStreamHandler = - (M4_StreamHandler*)(pVideoStreamHandler); - - /* Get the DSI info */ - if(M4DA_StreamTypeVideoH263 == streamType) { - if (meta->findData(kKeyD263, &type, &data, &size)) { - (*pStreamHandler)->m_decoderSpecificInfoSize = size; - if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { - DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - (*pStreamHandler)->m_decoderSpecificInfoSize, - M4READER_3GP,(M4OSA_Char*)"H263 DSI"); - if (M4OSA_NULL == DecoderSpecific) { - return M4ERR_ALLOC; - } - memcpy((void *)DecoderSpecific, - (void *)data, size); - (*pStreamHandler)->m_pDecoderSpecificInfo = - DecoderSpecific; - } - else { - (*pStreamHandler)->m_pDecoderSpecificInfo = - M4OSA_NULL; - (*pStreamHandler)->m_decoderSpecificInfoSize = 0; - } - (*pStreamHandler)->m_pESDSInfo = M4OSA_NULL; - (*pStreamHandler)->m_ESDSInfoSize = 0; - (*pStreamHandler)->m_pH264DecoderSpecificInfo = M4OSA_NULL; - (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0; - } else { - ALOGV("VE_getNextStreamHandler: H263 dsi not found"); - (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL; - (*pStreamHandler)->m_decoderSpecificInfoSize = 0; - (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0; - (*pStreamHandler)->m_pH264DecoderSpecificInfo = - M4OSA_NULL; - (*pStreamHandler)->m_pESDSInfo = M4OSA_NULL; - (*pStreamHandler)->m_ESDSInfoSize = 0; - } - } - else if(M4DA_StreamTypeVideoMpeg4Avc == streamType) { - if(meta->findData(kKeyAVCC, &type, &data, &size)) { - decoderSpecificInfoSize = size; - if (decoderSpecificInfoSize != 0) { - DecoderSpecificInfo = (M4OSA_Int8*)M4OSA_32bitAlignedMalloc( - decoderSpecificInfoSize, M4READER_3GP, - (M4OSA_Char*)"H264 DecoderSpecific" ); - if (M4OSA_NULL == DecoderSpecificInfo) { - ALOGV("VideoEditor3gp_getNextStream is NULL "); - return M4ERR_ALLOC; - } - memcpy((void *)DecoderSpecificInfo, - (void *)data, decoderSpecificInfoSize); - } else { - ALOGV("DSI Size %d", decoderSpecificInfoSize); - DecoderSpecificInfo = M4OSA_NULL; - } - } - (*pStreamHandler)->m_pESDSInfo = M4OSA_NULL; - (*pStreamHandler)->m_ESDSInfoSize = 0; - - err = VideoEditor3gpReader_AnalyseAvcDsi(*pStreamHandler, - (M4OSA_Int32*)DecoderSpecificInfo, decoderSpecificInfoSize); - - if (M4NO_ERROR != err) { - return err; - } - ALOGV("decsize %d, h264decsize %d: %d", (*pStreamHandler)->\ - m_decoderSpecificInfoSize, (*pStreamHandler)->\ - m_H264decoderSpecificInfoSize); - - if(M4OSA_NULL != DecoderSpecificInfo) { - free(DecoderSpecificInfo); - DecoderSpecificInfo = M4OSA_NULL; - } - } else if( (M4DA_StreamTypeVideoMpeg4 == streamType) ) { - if (meta->findData(kKeyESDS, &type, &data, &size)) { - ESDS esds((const char *)data, size); - CHECK_EQ(esds.InitCheck(), (status_t)OK); - - (*pStreamHandler)->m_ESDSInfoSize = size; - (*pStreamHandler)->m_pESDSInfo = (M4OSA_UInt8*)\ - M4OSA_32bitAlignedMalloc((*pStreamHandler)->m_ESDSInfoSize, - M4READER_3GP, (M4OSA_Char*)"M4V DecoderSpecific" ); - if (M4OSA_NULL == (*pStreamHandler)->m_pESDSInfo) { - return M4ERR_ALLOC; - } - memcpy((void *)(*pStreamHandler)->\ - m_pESDSInfo, (void *)data, size); - - esds.getCodecSpecificInfo(&codec_specific_data, - &codec_specific_data_size); - ALOGV("VE MP4 dsisize: %d, %x", codec_specific_data_size, - codec_specific_data); - - (*pStreamHandler)->m_decoderSpecificInfoSize = - codec_specific_data_size; - if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { - DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - (*pStreamHandler)->m_decoderSpecificInfoSize, - M4READER_3GP, (M4OSA_Char*)" DecoderSpecific" ); - if (M4OSA_NULL == DecoderSpecific) { - return M4ERR_ALLOC; - } - memcpy((void *)DecoderSpecific, - (void *)codec_specific_data, - codec_specific_data_size); - (*pStreamHandler)->m_pDecoderSpecificInfo = - DecoderSpecific; - } - else { - (*pStreamHandler)->m_pDecoderSpecificInfo = - M4OSA_NULL; - } - (*pStreamHandler)->m_pH264DecoderSpecificInfo = - M4OSA_NULL; - (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0; - } - } else { - ALOGV("VideoEditor3gpReader_getNextStream NO video stream"); - return M4ERR_READER_UNKNOWN_STREAM_TYPE; - } - } - else { - ALOGV("VideoEditor3gpReader_getNextStream NO video stream"); - return M4ERR_READER_UNKNOWN_STREAM_TYPE; - } - - } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { - ALOGV("VideoEditor3gpReader_getNextStream audio getTrack called"); - pC->mAudioSource = pC->mExtractor->getTrack(pC->mCurrTrack); - pC->mAudioSource->start(); - *pMediaFamily = M4READER_kMediaFamilyAudio; - - if(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { - streamType = M4DA_StreamTypeAudioAmrNarrowBand; - } else if(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { - streamType = M4DA_StreamTypeAudioAmrWideBand; - } - else if(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { - streamType = M4DA_StreamTypeAudioAac; - } else { - ALOGV("VideoEditor3gpReader_getNextStrea streamtype Unknown "); - } - if(streamType != M4DA_StreamTypeUnknown) { - pC->mStreamType = streamType; - pC->mStreamId = pC->mCurrTrack; - - ALOGV("VE streamtype %d ,id %d", streamType, pC->mCurrTrack); - - pAudioStreamHandler = (M4_AudioStreamHandler*)M4OSA_32bitAlignedMalloc - (sizeof(M4_AudioStreamHandler), M4READER_3GP, - (M4OSA_Char*)"M4_AudioStreamHandler"); - if (M4OSA_NULL == pAudioStreamHandler) { - return M4ERR_ALLOC; - } - pAudioStreamHandler->m_structSize=sizeof(M4_AudioStreamHandler); - pAudioStreamHandler->m_byteSampleSize = 0; - pAudioStreamHandler->m_nbChannels = 0; - pAudioStreamHandler->m_samplingFrequency= 0; - pAudioStreamHandler->m_byteFrameLength = 0; - - (*pStreamHandler) = (M4_StreamHandler*)(pAudioStreamHandler); - pC->mAudioStreamHandler = - (M4_StreamHandler*)(pAudioStreamHandler); - (*pStreamHandler)->m_averageBitRate = 0; - haveAudio = true; - pC->mAudioStreamHandler=(M4_StreamHandler*)pAudioStreamHandler; - pC->mAudioStreamHandler->m_pESDSInfo = M4OSA_NULL; - pC->mAudioStreamHandler->m_ESDSInfoSize = 0; - - meta->findInt32(kKeyMaxInputSize, (int32_t*)&(maxAUSize)); - if(maxAUSize == 0) { - maxAUSize = 70000; - } - (*pStreamHandler)->m_maxAUSize = maxAUSize; - ALOGV("VE Audio mMaxAUSize from MP4 extractor: %d", maxAUSize); - } - if((M4DA_StreamTypeAudioAmrNarrowBand == streamType) || - (M4DA_StreamTypeAudioAmrWideBand == streamType)) { - M4OSA_UInt32 freqIndex = 0; /**< AMR NB */ - M4OSA_UInt32 modeSet; - M4OSA_UInt32 i; - M4OSA_Context pBitParserContext = M4OSA_NULL; - - if(M4DA_StreamTypeAudioAmrWideBand == streamType) { - freqIndex = 1; /**< AMR WB */ - } - - if (meta->findData(kKeyESDS, &type, &data, &size)) { - ESDS esds((const char *)data, size); - CHECK_EQ(esds.InitCheck(), (status_t)OK); - - esds.getCodecSpecificInfo(&codec_specific_data, - &codec_specific_data_size); - (*pStreamHandler)->m_decoderSpecificInfoSize = - codec_specific_data_size; - - if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { - DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - (*pStreamHandler)->m_decoderSpecificInfoSize, - M4READER_3GP, (M4OSA_Char*)"AMR DecoderSpecific" ); - if (M4OSA_NULL == DecoderSpecific) { - return M4ERR_ALLOC; - } - memcpy((void *)DecoderSpecific, - (void *)codec_specific_data, - codec_specific_data_size); - (*pStreamHandler)->m_pDecoderSpecificInfo = - DecoderSpecific; - } else { - (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL; - } - } else { - M4OSA_UChar AmrDsi[] = - {'P','H','L','P',0x00, 0x00, 0x80, 0x00, 0x01,}; - (*pStreamHandler)->m_decoderSpecificInfoSize = 9; - DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - (*pStreamHandler)->m_decoderSpecificInfoSize, - M4READER_3GP, (M4OSA_Char*)"PHLP DecoderSpecific" ); - if (M4OSA_NULL == DecoderSpecific) { - return M4ERR_ALLOC; - } - if(freqIndex ==0) { - AmrDsi[8] = 0x01; - } else { - AmrDsi[8] = 0x02; - } - for(i = 0; i< 9; i++) { - DecoderSpecific[i] = AmrDsi[i]; - } - (*pStreamHandler)->m_pDecoderSpecificInfo = DecoderSpecific; - } - (*pStreamHandler)->m_averageBitRate = - VideoEditor3gpReader_AmrBitRate[freqIndex][7]; - } else if((M4DA_StreamTypeAudioAac == streamType)) { - if (meta->findData(kKeyESDS, &type, &data, &size)) { - ESDS esds((const char *)data, size); - CHECK_EQ(esds.InitCheck(), (status_t)OK); - - (*pStreamHandler)->m_ESDSInfoSize = size; - (*pStreamHandler)->m_pESDSInfo = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - (*pStreamHandler)->m_ESDSInfoSize, M4READER_3GP, - (M4OSA_Char*)"AAC DecoderSpecific" ); - if (M4OSA_NULL == (*pStreamHandler)->m_pESDSInfo) { - return M4ERR_ALLOC; - } - memcpy((void *)(*pStreamHandler)->m_pESDSInfo, - (void *)data, size); - esds.getCodecSpecificInfo(&codec_specific_data, - &codec_specific_data_size); - - ALOGV("VEdsi %d,%x",codec_specific_data_size, - codec_specific_data); - - (*pStreamHandler)->m_decoderSpecificInfoSize = - codec_specific_data_size; - if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) { - DecoderSpecific = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - (*pStreamHandler)->m_decoderSpecificInfoSize, - M4READER_3GP, (M4OSA_Char*)"AAC DecoderSpecific" ); - if (M4OSA_NULL == DecoderSpecific) { - return M4ERR_ALLOC; - } - memcpy((void *)DecoderSpecific, - (void *)codec_specific_data, - codec_specific_data_size); - (*pStreamHandler)->m_pDecoderSpecificInfo = - DecoderSpecific; - } else { - (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL; - } - } - } else { - ALOGV("VideoEditor3gpReader_getNextStream mStreamType: none "); - return M4ERR_READER_UNKNOWN_STREAM_TYPE; - } - } else { - ALOGV("VE noaudio-video stream:pC->mCurrTrack = %d ",pC->mCurrTrack); - pC->mCurrTrack++; //Increment current track to get the next track - return M4ERR_READER_UNKNOWN_STREAM_TYPE; - } - ALOGV("VE StreamType: %d, stremhandler %x",streamType, *pStreamHandler ); - (*pStreamHandler)->m_streamType = streamType; - (*pStreamHandler)->m_streamId = pC->mStreamId; - (*pStreamHandler)->m_pUserData = M4OSA_NULL; - (*pStreamHandler)->m_structSize = sizeof(M4_StreamHandler); - (*pStreamHandler)->m_bStreamIsOK = M4OSA_TRUE; - - meta->findInt64(kKeyDuration, - (int64_t*)&(Duration)); - - (*pStreamHandler)->m_duration = (int32_t)(Duration / 1000); - - pC->mMaxDuration = ((*pStreamHandler)->m_duration); - ALOGV("VE str duration duration: %d ", (*pStreamHandler)->m_duration); - - /* In AAC case: Put the first AU in pAudioStreamHandler->m_pUserData - *since decoder has to know if stream contains SBR data(Implicit sig) */ - if(M4DA_StreamTypeAudioAac == (*pStreamHandler)->m_streamType) { - M4READER_AudioSbrUserdata* pAudioSbrUserdata; - - pAudioSbrUserdata = (M4READER_AudioSbrUserdata*)M4OSA_32bitAlignedMalloc( - sizeof(M4READER_AudioSbrUserdata),M4READER_3GP, - (M4OSA_Char*)"M4READER_AudioSbrUserdata"); - if (M4OSA_NULL == pAudioSbrUserdata) { - err = M4ERR_ALLOC; - goto Error; - } - (*pStreamHandler)->m_pUserData = pAudioSbrUserdata; - pAudioSbrUserdata->m_bIsSbrEnabled = M4OSA_FALSE; - - pAudioSbrUserdata->m_pFirstAU = (M4_AccessUnit*)M4OSA_32bitAlignedMalloc( - sizeof(M4_AccessUnit),M4READER_3GP, (M4OSA_Char*)"1st AAC AU"); - if (M4OSA_NULL == pAudioSbrUserdata->m_pFirstAU) { - pAudioSbrUserdata->m_pAacDecoderUserConfig = M4OSA_NULL; - err = M4ERR_ALLOC; - goto Error; - } - pAudioSbrUserdata->m_pAacDecoderUserConfig = (M4_AacDecoderConfig*)\ - M4OSA_32bitAlignedMalloc(sizeof(M4_AacDecoderConfig),M4READER_3GP, - (M4OSA_Char*)"m_pAacDecoderUserConfig"); - if (M4OSA_NULL == pAudioSbrUserdata->m_pAacDecoderUserConfig) { - err = M4ERR_ALLOC; - goto Error; - } - } - if(M4DA_StreamTypeAudioAac == (*pStreamHandler)->m_streamType) { - M4_AudioStreamHandler* pAudioStreamHandler = - (M4_AudioStreamHandler*)(*pStreamHandler); - M4READER_AudioSbrUserdata* pUserData = (M4READER_AudioSbrUserdata*)\ - (pAudioStreamHandler->m_basicProperties.m_pUserData); - - err = VideoEditor3gpReader_fillAuStruct(pC, (*pStreamHandler), - (M4_AccessUnit*)pUserData->m_pFirstAU); - if (M4NO_ERROR != err) { - goto Error; - } - err = VideoEditor3gpReader_getNextAu(pC, (*pStreamHandler), - (M4_AccessUnit*)pUserData->m_pFirstAU); - - /* - * 1. "M4WAR_NO_MORE_AU == err" indicates that there is no more - * access unit from the current track. In other words, there - * is only a single access unit from the current track, and - * the parsing of this track has reached EOS. The reason why - * the first access unit needs to be parsed here is because for - * some audio codec (like AAC), the very first access unit - * must be decoded before its configuration/encoding parameters - * (such as # of channels and sample rate) can be correctly - * determined. - * - * 2. "trackCount > pC->mCurrTrack" indicates that there are other - * tracks to be parsed, in addition to the current track. - * - * When both conditions 1 & 2 hold, other tracks should be - * parsed. Thus, we should not bail out. - */ - if (M4WAR_NO_MORE_AU == err && trackCount > pC->mCurrTrack) { - err = M4NO_ERROR; - } - - if (M4NO_ERROR != err) { - goto Error; - } - err = VideoEditor3gpReader_reset(pC, (*pStreamHandler)); - if (M4NO_ERROR != err) { - goto Error; - } - } - } - pC->mCurrTrack++; //Increment the current track to get next track - ALOGV("pC->mCurrTrack = %d",pC->mCurrTrack); - - if (!haveAudio && !haveVideo) { - *pMediaFamily=M4READER_kMediaFamilyUnknown; - return M4ERR_READER_UNKNOWN_STREAM_TYPE; - } -Error: - ALOGV("VideoEditor3gpReader_getNextStreamHandler end error = %d",err); - return err; -} - -M4OSA_ERR VideoEditor3gpReader_getPrevRapTime(M4OSA_Context context, - M4_StreamHandler *pStreamHandler, M4OSA_Int32* pTime) -{ - VideoEditor3gpReader_Context *pC = (VideoEditor3gpReader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - MediaBuffer *mMediaBuffer = M4OSA_NULL; - MediaSource::ReadOptions options; - M4OSA_Time time64; - int64_t tempTime64 = 0; - status_t error; - - ALOGV("VideoEditor3gpReader_getPrevRapTime begin"); - - M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_getPrevRapTime: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_getPrevRapTime invalid pointer to StreamHandler"); - M4OSA_DEBUG_IF1((pTime == 0), M4ERR_PARAMETER, - "VideoEditor3gpReader_getPrevRapTime: invalid time pointer"); - if (*pTime == (pStreamHandler->m_duration)) { - *pTime -= 1; - } - - time64 = (M4OSA_Time)*pTime * 1000; - - ALOGV("VideoEditor3gpReader_getPrevRapTime seek time: %ld",time64); - options.setSeekTo(time64, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); - error = pC->mVideoSource->read(&mMediaBuffer, &options); - if (error != OK) { - //Can not get the previous Sync. - //Must be end of stream. - return M4WAR_NO_MORE_AU; - } - - mMediaBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&tempTime64); - ALOGV("VideoEditor3gpReader_getPrevRapTime read time %ld, %x", tempTime64, - mMediaBuffer); - - *pTime = (M4OSA_Int32)(tempTime64 / 1000); - - if(mMediaBuffer != M4OSA_NULL) { - ALOGV(" mMediaBuffer size = %d length %d", mMediaBuffer->size(), - mMediaBuffer->range_length()); - mMediaBuffer->release(); - mMediaBuffer = M4OSA_NULL; - } - options.clearSeekTo(); - - if(error != OK) { - ALOGV("VideoEditor3gpReader_getPrevRapTime end \ - M4WAR_READER_INFORMATION_NOT_PRESENT"); - return M4WAR_READER_INFORMATION_NOT_PRESENT; - } else { - ALOGV("VideoEditor3gpReader_getPrevRapTime end: err %x", err); - err = M4NO_ERROR; - return err; - } -} - -extern "C" { -M4OSA_ERR VideoEditor3gpReader_getInterface(M4READER_MediaType *pMediaType, - M4READER_GlobalInterface **pRdrGlobalInterface, - M4READER_DataInterface **pRdrDataInterface) { - - M4OSA_ERR err = M4NO_ERROR; - - VIDEOEDITOR_CHECK(M4OSA_NULL != pMediaType, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrGlobalInterface, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrDataInterface, M4ERR_PARAMETER); - - ALOGV("VideoEditor3gpReader_getInterface begin"); - ALOGV("VideoEditor3gpReader_getInterface %d 0x%x 0x%x", *pMediaType, - *pRdrGlobalInterface,*pRdrDataInterface); - - SAFE_MALLOC(*pRdrGlobalInterface, M4READER_GlobalInterface, 1, - "VideoEditor3gpReader_getInterface"); - SAFE_MALLOC(*pRdrDataInterface, M4READER_DataInterface, 1, - "VideoEditor3gpReader_getInterface"); - - *pMediaType = M4READER_kMediaType3GPP; - - (*pRdrGlobalInterface)->m_pFctCreate = VideoEditor3gpReader_create; - (*pRdrGlobalInterface)->m_pFctDestroy = VideoEditor3gpReader_destroy; - (*pRdrGlobalInterface)->m_pFctOpen = VideoEditor3gpReader_open; - (*pRdrGlobalInterface)->m_pFctClose = VideoEditor3gpReader_close; - (*pRdrGlobalInterface)->m_pFctGetOption = VideoEditor3gpReader_getOption; - (*pRdrGlobalInterface)->m_pFctSetOption = VideoEditor3gpReader_setOption; - (*pRdrGlobalInterface)->m_pFctGetNextStream = - VideoEditor3gpReader_getNextStreamHandler; - (*pRdrGlobalInterface)->m_pFctFillAuStruct = - VideoEditor3gpReader_fillAuStruct; - (*pRdrGlobalInterface)->m_pFctStart = M4OSA_NULL; - (*pRdrGlobalInterface)->m_pFctStop = M4OSA_NULL; - (*pRdrGlobalInterface)->m_pFctJump = VideoEditor3gpReader_jump; - (*pRdrGlobalInterface)->m_pFctReset = VideoEditor3gpReader_reset; - (*pRdrGlobalInterface)->m_pFctGetPrevRapTime = - VideoEditor3gpReader_getPrevRapTime; - (*pRdrDataInterface)->m_pFctGetNextAu = VideoEditor3gpReader_getNextAu; - (*pRdrDataInterface)->m_readerContext = M4OSA_NULL; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditor3gpReader_getInterface no error"); - } else { - SAFE_FREE(*pRdrGlobalInterface); - SAFE_FREE(*pRdrDataInterface); - - ALOGV("VideoEditor3gpReader_getInterface ERROR 0x%X", err); - } - ALOGV("VideoEditor3gpReader_getInterface end"); - return err; -} - -} /* extern "C" */ - -} /* namespace android */ - - diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp deleted file mode 100644 index 2e8c5d1..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorAudioDecoder.cpp +++ /dev/null @@ -1,1087 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** -************************************************************************* -* @file VideoEditorAudioDecoder.cpp -* @brief StageFright shell Audio Decoder -************************************************************************* -*/ - -#define LOG_NDEBUG 1 -#define LOG_TAG "VIDEOEDITOR_AUDIODECODER" - -#include "M4OSA_Debug.h" -#include "VideoEditorAudioDecoder.h" -#include "VideoEditorUtils.h" -#include "M4MCS_InternalTypes.h" - -#include "utils/Log.h" -#include "utils/Vector.h" -#include -#include -#include -#include -#include -#include - -/******************** - * DEFINITIONS * - ********************/ -// Version -#define VIDEOEDITOR_AUDIO_DECODER_VERSION_MAJOR 1 -#define VIDEOEDITOR_AUDIO_DECODER_VERSION_MINOR 0 -#define VIDEOEDITOR_AUDIO_DECODER_VERSION_REV 0 - -// Force using software decoder as engine does not support prefetch -#define VIDEOEDITOR_FORCECODEC kSoftwareCodecsOnly - -namespace android { - -struct VideoEditorAudioDecoderSource : public MediaSource { - public: - static sp Create( - const sp& format, void *decoderShellContext); - virtual status_t start(MetaData *params = NULL); - virtual status_t stop(); - virtual sp getFormat(); - virtual status_t read(MediaBuffer **buffer, - const ReadOptions *options = NULL); - virtual void storeBuffer(MediaBuffer *buffer); - - protected: - virtual ~VideoEditorAudioDecoderSource(); - - private: - enum State { - CREATED, - STARTED, - ERROR - }; - VideoEditorAudioDecoderSource(const sp& format, - void *decoderShellContext); - sp mFormat; - Vector mBuffers; - Mutex mLock; // protects mBuffers - bool mIsEOS; - State mState; - void* mDecShellContext; - // Don't call me. - VideoEditorAudioDecoderSource(const VideoEditorAudioDecoderSource&); - VideoEditorAudioDecoderSource& operator=( - const VideoEditorAudioDecoderSource &); -}; - -/** - ****************************************************************************** - * structure VideoEditorAudioDecoder_Context - * @brief This structure defines the context of the StageFright audio decoder - * shell - ****************************************************************************** -*/ - -typedef struct { - M4AD_Type mDecoderType; - M4_AudioStreamHandler* mAudioStreamHandler; - sp mDecoderSource; - OMXClient mClient; - sp mDecoder; - int32_t mNbOutputChannels; - uint32_t mNbInputFrames; - uint32_t mNbOutputFrames; - M4READER_DataInterface *m_pReader; - M4_AccessUnit* m_pNextAccessUnitToDecode; - M4OSA_ERR readerErrCode; - int32_t timeStampMs; - -} VideoEditorAudioDecoder_Context; - -sp VideoEditorAudioDecoderSource::Create( - const sp& format, void *decoderShellContext) { - - sp aSource = - new VideoEditorAudioDecoderSource(format, decoderShellContext); - - return aSource; -} - -VideoEditorAudioDecoderSource::VideoEditorAudioDecoderSource( - const sp& format, void* decoderShellContext): - mFormat(format), - mIsEOS(false), - mState(CREATED), - mDecShellContext(decoderShellContext) { -} - -VideoEditorAudioDecoderSource::~VideoEditorAudioDecoderSource() { - - if( STARTED == mState ) { - stop(); - } -} - -status_t VideoEditorAudioDecoderSource::start(MetaData *meta) { - status_t err = OK; - - if( CREATED != mState ) { - ALOGV("VideoEditorAudioDecoderSource::start: invalid state %d", mState); - return UNKNOWN_ERROR; - } - - mState = STARTED; - -cleanUp: - ALOGV("VideoEditorAudioDecoderSource::start END (0x%x)", err); - return err; -} - -status_t VideoEditorAudioDecoderSource::stop() { - Mutex::Autolock autolock(mLock); - status_t err = OK; - - ALOGV("VideoEditorAudioDecoderSource::stop begin"); - - if( STARTED != mState ) { - ALOGV("VideoEditorAudioDecoderSource::stop: invalid state %d", mState); - return UNKNOWN_ERROR; - } - - if (!mBuffers.empty()) { - int n = mBuffers.size(); - for (int i = 0; i < n; i++) { - mBuffers.itemAt(i)->release(); - } - ALOGW("VideoEditorAudioDecoderSource::stop : %d buffer remained", n); - mBuffers.clear(); - } - - mState = CREATED; - - ALOGV("VideoEditorAudioDecoderSource::stop END (0x%x)", err); - return err; -} - -sp VideoEditorAudioDecoderSource::getFormat() { - - ALOGV("VideoEditorAudioDecoderSource::getFormat"); - return mFormat; -} - -static MediaBuffer* readBufferFromReader( - VideoEditorAudioDecoder_Context* pDecContext) { - M4OSA_ERR lerr = M4NO_ERROR; - M4_AccessUnit* pAccessUnit = pDecContext->m_pNextAccessUnitToDecode; - - // Get next AU from reader. - lerr = pDecContext->m_pReader->m_pFctGetNextAu( - pDecContext->m_pReader->m_readerContext, - (M4_StreamHandler*)pDecContext->mAudioStreamHandler, - pAccessUnit); - - if (lerr == M4WAR_NO_MORE_AU) { - ALOGV("readBufferFromReader : EOS"); - return NULL; - } - - pDecContext->timeStampMs = pAccessUnit->m_CTS; - - MediaBuffer* newBuffer = new MediaBuffer((size_t)pAccessUnit->m_size); - memcpy((void *)((M4OSA_Int8*)newBuffer->data() + newBuffer->range_offset()), - (void *)pAccessUnit->m_dataAddress, pAccessUnit->m_size); - newBuffer->meta_data()->setInt64(kKeyTime, (pAccessUnit->m_CTS * 1000LL)); - return newBuffer; -} - -status_t VideoEditorAudioDecoderSource::read(MediaBuffer **buffer, - const ReadOptions *options) { - Mutex::Autolock autolock(mLock); - MediaSource::ReadOptions readOptions; - - VideoEditorAudioDecoder_Context* pDecContext = - (VideoEditorAudioDecoder_Context *)mDecShellContext; - - if ( STARTED != mState ) { - ALOGV("VideoEditorAudioDecoderSource::read invalid state %d", mState); - return UNKNOWN_ERROR; - } - - // Get a buffer from the reader if we don't have any - if(mBuffers.empty()) { - MediaBuffer* newBuffer = readBufferFromReader(pDecContext); - if (!newBuffer) { - *buffer = NULL; - pDecContext->readerErrCode = M4WAR_NO_MORE_AU; - return ERROR_END_OF_STREAM; - } - mBuffers.push(newBuffer); - } - *buffer = mBuffers.itemAt(0); - mBuffers.removeAt(0); - - return OK; -} - -void VideoEditorAudioDecoderSource::storeBuffer(MediaBuffer *buffer) { - Mutex::Autolock autolock(mLock); - VideoEditorAudioDecoder_Context* pDecContext = - (VideoEditorAudioDecoder_Context *)mDecShellContext; - - ALOGV("VideoEditorAudioDecoderSource::storeBuffer begin"); - - // If the user didn't give us a buffer, get it from the reader. - if(buffer == NULL) { - MediaBuffer* newBuffer = readBufferFromReader(pDecContext); - if (!newBuffer) { - pDecContext->readerErrCode = M4WAR_NO_MORE_AU; - return; - } - buffer = newBuffer; - } - - mBuffers.push(buffer); - ALOGV("VideoEditorAudioDecoderSource::storeBuffer END"); -} - -/******************** - * TOOLS * - ********************/ - -M4OSA_ERR VideoEditorAudioDecoder_getBits(M4OSA_Int8* pData, - M4OSA_UInt32 dataSize, M4OSA_UInt8 nbBits, M4OSA_Int32* pResult, - M4OSA_UInt32* pOffset) { - - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt32 startByte = 0; - M4OSA_UInt32 startBit = 0; - M4OSA_UInt32 endByte = 0; - M4OSA_UInt32 endBit = 0; - M4OSA_UInt32 currentByte = 0; - M4OSA_UInt32 result = 0; - M4OSA_UInt32 ui32Tmp = 0; - M4OSA_UInt32 ui32Mask = 0; - - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pData, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pOffset, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(32 >= nbBits, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK((*pOffset + nbBits) <= 8*dataSize, M4ERR_PARAMETER); - - ALOGV("VideoEditorAudioDecoder_getBits begin"); - - startByte = (*pOffset) >> 3; - endByte = (*pOffset + nbBits) >> 3; - startBit = (*pOffset) % 8; - endBit = (*pOffset + nbBits) % 8; - currentByte = startByte; - - // Extract the requested nunber of bits from memory - while( currentByte <= endByte) { - ui32Mask = 0x000000FF; - if( currentByte == startByte ) { - ui32Mask >>= startBit; - } - ui32Tmp = ui32Mask & ((M4OSA_UInt32)pData[currentByte]); - if( currentByte == endByte ) { - ui32Tmp >>= (8-endBit); - result <<= endBit; - } else { - result <<= 8; - } - result |= ui32Tmp; - currentByte++; - } - - *pResult = result; - *pOffset += nbBits; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_getBits no error"); - } else { - ALOGV("VideoEditorAudioDecoder_getBits ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_getBits end"); - return err; -} - - -#define FREQ_TABLE_SIZE 16 -const M4OSA_UInt32 AD_AAC_FREQ_TABLE[FREQ_TABLE_SIZE] = - {96000, 88200, 64000, 48000, 44100, - 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350, 0, 0, 0}; - - -M4OSA_ERR VideoEditorAudioDecoder_parse_AAC_DSI(M4OSA_Int8* pDSI, - M4OSA_UInt32 dsiSize, AAC_DEC_STREAM_PROPS* pProperties) { - - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt32 offset = 0; - M4OSA_Int32 result = 0; - M4OSA_Int32 extensionAudioObjectType = 0; - - ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI begin"); - - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pDSI, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pProperties, M4ERR_PARAMETER); - - // Get the object type - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 5, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - switch( result ) { - case 2: - /* Audio Object Type is 2 (AAC Low Complexity) */ - pProperties->aPSPresent = 0; - pProperties->aSBRPresent = 0; - break; - case 5: - /* Audio Object Type is 5 (Spectral Band Replication) */ - pProperties->aPSPresent = 0; - pProperties->aSBRPresent = 1; - break; - case 29: - /* Audio Object Type is 29 (Parametric Stereo) */ - pProperties->aPSPresent = 1; - pProperties->aSBRPresent = 1; - break; - default: - ALOGV("parse_AAC_DSI ERROR : object type %d is not supported", - result); - VIDEOEDITOR_CHECK(!"invalid AAC object type", M4ERR_BAD_OPTION_ID); - break; - } - pProperties->aAudioObjectType = (M4OSA_Int32)result; - - // Get the frequency index - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - if (result == 0x0f) { - // Get the frequency index again - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 24, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - } - VIDEOEDITOR_CHECK((0 <= result) && (FREQ_TABLE_SIZE > result), - M4ERR_PARAMETER); - pProperties->aSampFreq = AD_AAC_FREQ_TABLE[result]; - pProperties->aExtensionSampFreq = 0; - - // Get the number of channels - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - pProperties->aNumChan = (M4OSA_UInt32)result; - - if (pProperties->aAudioObjectType == 5) { - extensionAudioObjectType = pProperties->aAudioObjectType; - // Get extension sampling frequency index - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - if (result == 0x0f) { - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 24, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - } - VIDEOEDITOR_CHECK((0 <= result) && (FREQ_TABLE_SIZE > result), - M4ERR_PARAMETER); - pProperties->aExtensionSampFreq = AD_AAC_FREQ_TABLE[result]; - // Get the object type again - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 5, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - pProperties->aAudioObjectType = (M4OSA_Int32)result; - } - - // It's for implicit signal the presence of SBR data with AAC-LC audio object type(AOT = 2) - if (pProperties->aAudioObjectType == 2) { /* parseGASpecificConfig begin*/ - // Get frame length flag - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - // Get depends on core coder - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - if (result) { - // Get core coder delay - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - } - // Get extension flag - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - if (result) { - // Get extension flag3 - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - } - }/* parseGASpecificConfig end*/ - - if (extensionAudioObjectType != 5 && (dsiSize*8 - offset) >= 16) { - // get sync extension type - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 11, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - if (result == 0x2b7) { - ALOGV("found syncExtension"); - // Get extension Audio Object Type - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 5, &extensionAudioObjectType, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - // get SBR present flag - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 1, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - pProperties->aSBRPresent = result; - if (result == 1) { - // Get extension sampling frequency index - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 4, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - if (result == 0x0f) { - // Get extension sampling frequency index again - err = VideoEditorAudioDecoder_getBits(pDSI, dsiSize, 24, &result, &offset); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - } - VIDEOEDITOR_CHECK((0 <= result) && (FREQ_TABLE_SIZE > result), - M4ERR_PARAMETER); - pProperties->aExtensionSampFreq = AD_AAC_FREQ_TABLE[result]; - } - } - } - - // Set the max PCM samples per channel - pProperties->aMaxPCMSamplesPerCh = (pProperties->aSBRPresent) ? 2048 : 1024; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI no error"); - } else { - ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI end"); - return err; -} - -/******************** - * ENGINE INTERFACE * - ********************/ - -M4OSA_ERR VideoEditorAudioDecoder_destroy(M4AD_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; - - ALOGV("VideoEditorAudioDecoder_destroy begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; - - // Stop the graph - if( M4OSA_NULL != pDecoderContext->mDecoder.get() ) { - pDecoderContext->mDecoder->stop(); - } - - // Destroy the graph - pDecoderContext->mDecoderSource.clear(); - pDecoderContext->mDecoder.clear(); - pDecoderContext->mClient.disconnect(); - - SAFE_FREE(pDecoderContext); - pContext = M4OSA_NULL; - ALOGV("VideoEditorAudioDecoder_destroy : DONE"); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_destroy no error"); - } else { - ALOGV("VideoEditorAudioDecoder_destroy ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_destroy : end"); - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_create(M4AD_Type decoderType, - M4AD_Context* pContext, M4_AudioStreamHandler* pStreamHandler, - void* pUserData) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; - AAC_DEC_STREAM_PROPS aacProperties; - status_t result = OK; - sp decoderMetaData = NULL; - const char* mime = NULL; - uint32_t codecFlags = 0; - - ALOGV("VideoEditorAudioDecoder_create begin: decoderType %d", decoderType); - - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler, M4ERR_PARAMETER); - - // Context allocation & initialization - SAFE_MALLOC(pDecoderContext, VideoEditorAudioDecoder_Context, 1, - "AudioDecoder"); - pDecoderContext->mDecoderType = decoderType; - pDecoderContext->mAudioStreamHandler = pStreamHandler; - - pDecoderContext->mNbInputFrames = 0; - pDecoderContext->mNbOutputFrames = 0; - pDecoderContext->readerErrCode = M4NO_ERROR; - pDecoderContext->timeStampMs = -1; - - ALOGV("VideoEditorAudioDecoder_create : maxAUSize %d", - pDecoderContext->mAudioStreamHandler->m_basicProperties.m_maxAUSize); - - // Create the meta data for the decoder - decoderMetaData = new MetaData; - switch( pDecoderContext->mDecoderType ) { - case M4AD_kTypeAMRNB: - // StageFright parameters - mime = MEDIA_MIMETYPE_AUDIO_AMR_NB; - // Engine parameters - pDecoderContext->mAudioStreamHandler->m_byteFrameLength = 160; - // Number of bytes per sample - pDecoderContext->mAudioStreamHandler->m_byteSampleSize = 2; - pDecoderContext->mAudioStreamHandler->m_samplingFrequency = 8000; - pDecoderContext->mAudioStreamHandler->m_nbChannels = 1; - break; - - case M4AD_kTypeAMRWB: - // StageFright parameters - mime = MEDIA_MIMETYPE_AUDIO_AMR_WB; - - pDecoderContext->mAudioStreamHandler->m_byteFrameLength = 160; - // Number of bytes per sample - pDecoderContext->mAudioStreamHandler->m_byteSampleSize = 2; - pDecoderContext->mAudioStreamHandler->m_samplingFrequency = 16000; - pDecoderContext->mAudioStreamHandler->m_nbChannels = 1; - break; - - case M4AD_kTypeAAC: - // Reject ADTS & ADIF (or any incorrect type) - VIDEOEDITOR_CHECK(M4DA_StreamTypeAudioAac == - pDecoderContext->mAudioStreamHandler->\ - m_basicProperties.m_streamType,M4ERR_PARAMETER); - - // StageFright parameters - mime = MEDIA_MIMETYPE_AUDIO_AAC; - - decoderMetaData->setData(kKeyESDS, kTypeESDS, - pStreamHandler->m_basicProperties.m_pESDSInfo, - pStreamHandler->m_basicProperties.m_ESDSInfoSize); - - // Engine parameters - // Retrieve sampling frequency and number of channels from the DSI - err = VideoEditorAudioDecoder_parse_AAC_DSI( - (M4OSA_Int8*)pStreamHandler->m_basicProperties.\ - m_pDecoderSpecificInfo, - pStreamHandler->m_basicProperties.m_decoderSpecificInfoSize, - &aacProperties); - - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - pDecoderContext->mAudioStreamHandler->m_byteFrameLength = 1024; - // Number of bytes per sample - pDecoderContext->mAudioStreamHandler->m_byteSampleSize = 2; - pDecoderContext->mAudioStreamHandler->m_samplingFrequency = - aacProperties.aSampFreq; - pDecoderContext->mAudioStreamHandler->m_nbChannels = - aacProperties.aNumChan; - - // Copy the stream properties into userdata - if( M4OSA_NULL != pUserData ) { - memcpy((void *)pUserData, - (void *)&aacProperties, - sizeof(AAC_DEC_STREAM_PROPS)); - } - break; - - case M4AD_kTypeMP3: - // StageFright parameters - mime = MEDIA_MIMETYPE_AUDIO_MPEG; - break; - - default: - VIDEOEDITOR_CHECK(!"AudioDecoder_open : incorrect input format", - M4ERR_STATE); - break; - } - decoderMetaData->setCString(kKeyMIMEType, mime); - decoderMetaData->setInt32(kKeySampleRate, - (int32_t)pDecoderContext->mAudioStreamHandler->m_samplingFrequency); - decoderMetaData->setInt32(kKeyChannelCount, - pDecoderContext->mAudioStreamHandler->m_nbChannels); - decoderMetaData->setInt64(kKeyDuration, - (int64_t)pDecoderContext->mAudioStreamHandler->\ - m_basicProperties.m_duration); - - // Create the decoder source - pDecoderContext->mDecoderSource = VideoEditorAudioDecoderSource::Create( - decoderMetaData, (void *)pDecoderContext); - VIDEOEDITOR_CHECK(NULL != pDecoderContext->mDecoderSource.get(), - M4ERR_STATE); - - // Connect to the OMX client - result = pDecoderContext->mClient.connect(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - - // Create the OMX codec -#ifdef VIDEOEDITOR_FORCECODEC - codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; -#endif /* VIDEOEDITOR_FORCECODEC */ - - pDecoderContext->mDecoder = OMXCodec::Create(pDecoderContext->\ - mClient.interface(), - decoderMetaData, false, pDecoderContext->mDecoderSource, NULL, - codecFlags); - VIDEOEDITOR_CHECK(NULL != pDecoderContext->mDecoder.get(), M4ERR_STATE); - - // Get the output channels, the decoder might overwrite the input metadata - pDecoderContext->mDecoder->getFormat()->findInt32(kKeyChannelCount, - &pDecoderContext->mNbOutputChannels); - ALOGV("VideoEditorAudioDecoder_create : output chan %d", - pDecoderContext->mNbOutputChannels); - - // Start the decoder - result = pDecoderContext->mDecoder->start(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - - *pContext = pDecoderContext; - ALOGV("VideoEditorAudioDecoder_create : DONE"); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_create no error"); - } else { - VideoEditorAudioDecoder_destroy(pDecoderContext); - *pContext = M4OSA_NULL; - ALOGV("VideoEditorAudioDecoder_create ERROR 0x%X", err); - } - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_create_AAC(M4AD_Context* pContext, - M4_AudioStreamHandler* pStreamHandler, void* pUserData) { - - return VideoEditorAudioDecoder_create( - M4AD_kTypeAAC, pContext, pStreamHandler,pUserData); -} - - -M4OSA_ERR VideoEditorAudioDecoder_create_AMRNB(M4AD_Context* pContext, - M4_AudioStreamHandler* pStreamHandler, void* pUserData) { - - return VideoEditorAudioDecoder_create( - M4AD_kTypeAMRNB, pContext, pStreamHandler, pUserData); -} - - -M4OSA_ERR VideoEditorAudioDecoder_create_AMRWB(M4AD_Context* pContext, - M4_AudioStreamHandler* pStreamHandler, void* pUserData) { - - return VideoEditorAudioDecoder_create( - M4AD_kTypeAMRWB, pContext, pStreamHandler, pUserData); -} - - -M4OSA_ERR VideoEditorAudioDecoder_create_MP3(M4AD_Context* pContext, - M4_AudioStreamHandler* pStreamHandler, void* pUserData) { - - return VideoEditorAudioDecoder_create( - M4AD_kTypeMP3, pContext, pStreamHandler, pUserData); -} - -M4OSA_ERR VideoEditorAudioDecoder_processInputBuffer( - M4AD_Context pContext, M4AD_Buffer* pInputBuffer) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; - MediaBuffer* buffer = NULL; - - ALOGV("VideoEditorAudioDecoder_processInputBuffer begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - - pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; - - if( M4OSA_NULL != pInputBuffer ) { - buffer = new MediaBuffer((size_t)pInputBuffer->m_bufferSize); - memcpy((void *)((M4OSA_Int8*)buffer->data() + buffer->range_offset()), - (void *)pInputBuffer->m_dataAddress, pInputBuffer->m_bufferSize); - buffer->meta_data()->setInt64(kKeyTime, pInputBuffer->m_timeStampUs); - } - pDecoderContext->mDecoderSource->storeBuffer(buffer); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_processInputBuffer no error"); - } else { - ALOGV("VideoEditorAudioDecoder_processInputBuffer ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_processInputBuffer end"); - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_processOutputBuffer(M4AD_Context pContext, - MediaBuffer* buffer, M4AD_Buffer* pOuputBuffer) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; - int32_t i32Tmp = 0; - int64_t i64Tmp = 0; - status_t result = OK; - - ALOGV("VideoEditorAudioDecoder_processOutputBuffer begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pOuputBuffer, M4ERR_PARAMETER); - - pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; - - // Process the returned data - if( 0 == buffer->range_length() ) { - // Decoder has no data yet, nothing unusual - goto cleanUp; - } - - pDecoderContext->mNbOutputFrames++; - - if( pDecoderContext->mAudioStreamHandler->m_nbChannels == - (M4OSA_UInt32)pDecoderContext->mNbOutputChannels ) { - // Just copy the PCMs - pOuputBuffer->m_bufferSize = (M4OSA_UInt32)buffer->range_length(); - memcpy((void *)pOuputBuffer->m_dataAddress, - (void *)(((M4OSA_MemAddr8)buffer->data())+buffer->range_offset()), - buffer->range_length()); - } else if( pDecoderContext->mAudioStreamHandler->m_nbChannels < - (M4OSA_UInt32)pDecoderContext->mNbOutputChannels ) { - // The decoder forces stereo output, downsample - pOuputBuffer->m_bufferSize = (M4OSA_UInt32)(buffer->range_length()/2); - M4OSA_Int16* pDataIn = ((M4OSA_Int16*)buffer->data()) + - buffer->range_offset(); - M4OSA_Int16* pDataOut = (M4OSA_Int16*)pOuputBuffer->m_dataAddress; - M4OSA_Int16* pDataEnd = pDataIn + \ - (buffer->range_length()/sizeof(M4OSA_Int16)); - while( pDataIn < pDataEnd ) { - *pDataOut = *pDataIn; - pDataIn+=2; - pDataOut++; - } - } else { - // The decoder forces mono output, not supported - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); - } - -cleanUp: - // Release the buffer - buffer->release(); - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_processOutputBuffer no error"); - } else { - pOuputBuffer->m_bufferSize = 0; - ALOGV("VideoEditorAudioDecoder_processOutputBuffer ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_processOutputBuffer end"); - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_step(M4AD_Context pContext, - M4AD_Buffer* pInputBuffer, M4AD_Buffer* pOutputBuffer, - M4OSA_Bool bJump) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; - status_t result = OK; - MediaBuffer* outputBuffer = NULL; - - ALOGV("VideoEditorAudioDecoder_step begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; - pDecoderContext->mNbInputFrames++; - - // Push the input buffer to the decoder source - err = VideoEditorAudioDecoder_processInputBuffer(pDecoderContext, - pInputBuffer); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Read - result = pDecoderContext->mDecoder->read(&outputBuffer, NULL); - if (INFO_FORMAT_CHANGED == result) { - ALOGV("VideoEditorAudioDecoder_step: Audio decoder \ - returned INFO_FORMAT_CHANGED"); - CHECK(outputBuffer == NULL); - sp meta = pDecoderContext->mDecoder->getFormat(); - int32_t sampleRate, channelCount; - - CHECK(meta->findInt32(kKeySampleRate, &sampleRate)); - CHECK(meta->findInt32(kKeyChannelCount, &channelCount)); - ALOGV("VideoEditorAudioDecoder_step: samplingFreq = %d", sampleRate); - ALOGV("VideoEditorAudioDecoder_step: channelCnt = %d", channelCount); - pDecoderContext->mAudioStreamHandler->m_samplingFrequency = - (uint32_t)sampleRate; - pDecoderContext->mAudioStreamHandler->m_nbChannels = - (uint32_t)channelCount; - pDecoderContext->mNbOutputChannels = channelCount; - - return M4WAR_INFO_FORMAT_CHANGE; - } else if (ERROR_END_OF_STREAM == result) { - ALOGV("VideoEditorAudioDecoder_step: Audio decoder \ - returned ERROR_END_OF_STREAM"); - pDecoderContext->readerErrCode = M4WAR_NO_MORE_AU; - return M4WAR_NO_MORE_AU; - } else if (OK != result) { - return M4ERR_STATE; - } - - // Convert the PCM buffer - err = VideoEditorAudioDecoder_processOutputBuffer(pDecoderContext, - outputBuffer, pOutputBuffer); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_step no error"); - } else { - ALOGV("VideoEditorAudioDecoder_step ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_step end"); - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_getVersion(M4_VersionInfo* pVersionInfo) { - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditorAudioDecoder_getVersion begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pVersionInfo, M4ERR_PARAMETER); - - pVersionInfo->m_major = VIDEOEDITOR_AUDIO_DECODER_VERSION_MAJOR; - pVersionInfo->m_minor = VIDEOEDITOR_AUDIO_DECODER_VERSION_MINOR; - pVersionInfo->m_revision = VIDEOEDITOR_AUDIO_DECODER_VERSION_REV; - pVersionInfo->m_structSize = sizeof(M4_VersionInfo); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_getVersion no error"); - } else { - ALOGV("VideoEditorAudioDecoder_getVersion ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_getVersion end"); - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_setOption(M4AD_Context pContext, - M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; - - ALOGV("VideoEditorAudioDecoder_setOption begin 0x%X", optionID); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; - - switch( optionID ) { - case M4AD_kOptionID_UserParam: - ALOGV("VideoEditorAudioDecodersetOption UserParam is not supported"); - err = M4ERR_NOT_IMPLEMENTED; - break; - - case M4AD_kOptionID_3gpReaderInterface: - ALOGV("VideoEditorAudioDecodersetOption 3gpReaderInterface"); - pDecoderContext->m_pReader = - (M4READER_DataInterface *)optionValue; - break; - - case M4AD_kOptionID_AudioAU: - ALOGV("VideoEditorAudioDecodersetOption AudioAU"); - pDecoderContext->m_pNextAccessUnitToDecode = - (M4_AccessUnit *)optionValue; - break; - - default: - ALOGV("VideoEditorAudioDecoder_setOption unsupported optionId 0x%X", - optionID); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); - break; - } - -cleanUp: - if( ((M4OSA_UInt32)M4NO_ERROR == err) || ((M4OSA_UInt32)M4ERR_NOT_IMPLEMENTED == err) ) { - ALOGV("VideoEditorAudioDecoder_setOption error 0x%X", err); - } else { - ALOGV("VideoEditorAudioDecoder_setOption ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_setOption end"); - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_getOption(M4AD_Context pContext, - M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL; - - ALOGV("VideoEditorAudioDecoder_getOption begin: optionID 0x%X", optionID); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pDecoderContext = (VideoEditorAudioDecoder_Context*)pContext; - - switch( optionID ) { - - case M4AD_kOptionID_GetAudioAUErrCode: - *(uint32_t *)optionValue = pDecoderContext->readerErrCode; - break; - - case M4AD_kOptionID_AudioNbChannels: - *(uint32_t *)optionValue = - pDecoderContext->mAudioStreamHandler->m_nbChannels; - break; - - case M4AD_kOptionID_AudioSampFrequency: - *(uint32_t *)optionValue = - pDecoderContext->mAudioStreamHandler->m_samplingFrequency; - break; - - case M4AD_kOptionID_AuCTS: - *(uint32_t *)optionValue = pDecoderContext->timeStampMs; - break; - - default: - ALOGV("VideoEditorAudioDecoder_getOption unsupported optionId 0x%X", - optionID); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); - break; - } - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_getOption no error"); - } else { - ALOGV("VideoEditorAudioDecoder_getOption ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_getOption end"); - return err; -} - -M4OSA_ERR VideoEditorAudioDecoder_getInterface(M4AD_Type decoderType, - M4AD_Type* pDecoderType, M4AD_Interface** pDecoderInterface) { - - M4OSA_ERR err = M4NO_ERROR; - - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pDecoderType, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pDecoderInterface, M4ERR_PARAMETER); - - ALOGV("VideoEditorAudioDecoder_getInterface begin %d 0x%x 0x%x", - decoderType, pDecoderType, pDecoderInterface); - - SAFE_MALLOC(*pDecoderInterface, M4AD_Interface, 1, - "VideoEditorAudioDecoder"); - - *pDecoderType = decoderType; - - switch( decoderType ) { - case M4AD_kTypeAMRNB: - (*pDecoderInterface)->m_pFctCreateAudioDec = - VideoEditorAudioDecoder_create_AMRNB; - break; - case M4AD_kTypeAMRWB: - (*pDecoderInterface)->m_pFctCreateAudioDec = - VideoEditorAudioDecoder_create_AMRWB; - break; - case M4AD_kTypeAAC: - (*pDecoderInterface)->m_pFctCreateAudioDec = - VideoEditorAudioDecoder_create_AAC; - break; - case M4AD_kTypeMP3: - (*pDecoderInterface)->m_pFctCreateAudioDec = - VideoEditorAudioDecoder_create_MP3; - break; - default: - ALOGV("VEAD_getInterface ERROR: unsupported type %d", decoderType); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); - break; - } - (*pDecoderInterface)->m_pFctDestroyAudioDec = - VideoEditorAudioDecoder_destroy; - (*pDecoderInterface)->m_pFctResetAudioDec = M4OSA_NULL; - (*pDecoderInterface)->m_pFctStartAudioDec = M4OSA_NULL; - (*pDecoderInterface)->m_pFctStepAudioDec = - VideoEditorAudioDecoder_step; - (*pDecoderInterface)->m_pFctGetVersionAudioDec = - VideoEditorAudioDecoder_getVersion; - (*pDecoderInterface)->m_pFctSetOptionAudioDec = - VideoEditorAudioDecoder_setOption; - (*pDecoderInterface)->m_pFctGetOptionAudioDec = - VideoEditorAudioDecoder_getOption; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioDecoder_getInterface no error"); - } else { - *pDecoderInterface = M4OSA_NULL; - ALOGV("VideoEditorAudioDecoder_getInterface ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioDecoder_getInterface end"); - return err; -} - - -extern "C" { - -M4OSA_ERR VideoEditorAudioDecoder_getInterface_AAC(M4AD_Type* pDecoderType, - M4AD_Interface** pDecoderInterface) { - ALOGV("TEST: AAC VideoEditorAudioDecoder_getInterface no error"); - return VideoEditorAudioDecoder_getInterface( - M4AD_kTypeAAC, pDecoderType, pDecoderInterface); -} - -M4OSA_ERR VideoEditorAudioDecoder_getInterface_AMRNB(M4AD_Type* pDecoderType, - M4AD_Interface** pDecoderInterface) { - ALOGV("TEST: AMR VideoEditorAudioDecoder_getInterface no error"); - return VideoEditorAudioDecoder_getInterface( - M4AD_kTypeAMRNB, pDecoderType, pDecoderInterface); -} - -M4OSA_ERR VideoEditorAudioDecoder_getInterface_AMRWB(M4AD_Type* pDecoderType, - M4AD_Interface** pDecoderInterface) { - - return VideoEditorAudioDecoder_getInterface( - M4AD_kTypeAMRWB, pDecoderType, pDecoderInterface); -} - -M4OSA_ERR VideoEditorAudioDecoder_getInterface_MP3(M4AD_Type* pDecoderType, - M4AD_Interface** pDecoderInterface) { - - return VideoEditorAudioDecoder_getInterface( - M4AD_kTypeMP3, pDecoderType, pDecoderInterface); -} - -} // extern "C" - -} // namespace android diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp deleted file mode 100644 index 70140d0..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorAudioEncoder.cpp +++ /dev/null @@ -1,777 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -/** -************************************************************************* -* @file VideoEditorAudioEncoder.cpp -* @brief StageFright shell Audio Encoder -************************************************************************* -*/ - -#define LOG_NDEBUG 1 -#define LOG_TAG "VIDEOEDITOR_AUDIOENCODER" - -#include "M4OSA_Debug.h" -#include "VideoEditorAudioEncoder.h" -#include "VideoEditorUtils.h" - -#include "utils/Log.h" -#include -#include -#include -#include -#include -#include - -/*** DEFINITIONS ***/ -// Force using software encoder as engine does not support prefetch -#define VIDEOEDITOR_FORCECODEC kSoftwareCodecsOnly - -namespace android { -struct VideoEditorAudioEncoderSource : public MediaSource { - public: - static sp Create( - const sp &format); - virtual status_t start(MetaData *params = NULL); - virtual status_t stop(); - virtual sp getFormat(); - virtual status_t read(MediaBuffer **buffer, - const ReadOptions *options = NULL); - virtual int32_t storeBuffer(MediaBuffer *buffer); - - protected: - virtual ~VideoEditorAudioEncoderSource(); - - private: - struct MediaBufferChain { - MediaBuffer* buffer; - MediaBufferChain* nextLink; - }; - enum State { - CREATED, - STARTED, - ERROR - }; - - MediaBufferChain* mFirstBufferLink; - MediaBufferChain* mLastBufferLink; - int32_t mNbBuffer; - State mState; - sp mEncFormat; - - VideoEditorAudioEncoderSource(const sp &format); - - // Don't call me. - VideoEditorAudioEncoderSource(const VideoEditorAudioEncoderSource&); - VideoEditorAudioEncoderSource& operator=( - const VideoEditorAudioEncoderSource&); -}; - -sp VideoEditorAudioEncoderSource::Create( - const sp &format) { - - ALOGV("VideoEditorAudioEncoderSource::Create"); - sp aSource = - new VideoEditorAudioEncoderSource(format); - - return aSource; -} - -VideoEditorAudioEncoderSource::VideoEditorAudioEncoderSource( - const sp &format): - mFirstBufferLink(NULL), - mLastBufferLink(NULL), - mNbBuffer(0), - mState(CREATED), - mEncFormat(format) { - ALOGV("VideoEditorAudioEncoderSource::VideoEditorAudioEncoderSource"); -} - - -VideoEditorAudioEncoderSource::~VideoEditorAudioEncoderSource() { - ALOGV("VideoEditorAudioEncoderSource::~VideoEditorAudioEncoderSource"); - - if( STARTED == mState ) { - stop(); - } -} - -status_t VideoEditorAudioEncoderSource::start(MetaData *meta) { - status_t err = OK; - - ALOGV("VideoEditorAudioEncoderSource::start"); - - if( CREATED != mState ) { - ALOGV("VideoEditorAudioEncoderSource::start ERROR : invalid state %d", - mState); - return UNKNOWN_ERROR; - } - - mState = STARTED; - -cleanUp: - ALOGV("VideoEditorAudioEncoderSource::start END (0x%x)", err); - return err; -} - -status_t VideoEditorAudioEncoderSource::stop() { - status_t err = OK; - - ALOGV("VideoEditorAudioEncoderSource::stop"); - - if( STARTED != mState ) { - ALOGV("VideoEditorAudioEncoderSource::stop ERROR: invalid state %d", - mState); - return UNKNOWN_ERROR; - } - - int32_t i = 0; - MediaBufferChain* tmpLink = NULL; - while( mFirstBufferLink ) { - i++; - tmpLink = mFirstBufferLink; - mFirstBufferLink = mFirstBufferLink->nextLink; - delete tmpLink; - } - ALOGV("VideoEditorAudioEncoderSource::stop : %d buffer remained", i); - mFirstBufferLink = NULL; - mLastBufferLink = NULL; - - mState = CREATED; - - ALOGV("VideoEditorAudioEncoderSource::stop END (0x%x)", err); - return err; -} - -sp VideoEditorAudioEncoderSource::getFormat() { - ALOGV("VideoEditorAudioEncoderSource::getFormat"); - return mEncFormat; -} - -status_t VideoEditorAudioEncoderSource::read(MediaBuffer **buffer, - const ReadOptions *options) { - MediaSource::ReadOptions readOptions; - status_t err = OK; - MediaBufferChain* tmpLink = NULL; - - ALOGV("VideoEditorAudioEncoderSource::read"); - - if ( STARTED != mState ) { - ALOGV("VideoEditorAudioEncoderSource::read ERROR : invalid state %d", - mState); - return UNKNOWN_ERROR; - } - - if( NULL == mFirstBufferLink ) { - *buffer = NULL; - ALOGV("VideoEditorAudioEncoderSource::read : EOS"); - return ERROR_END_OF_STREAM; - } - *buffer = mFirstBufferLink->buffer; - - tmpLink = mFirstBufferLink; - mFirstBufferLink = mFirstBufferLink->nextLink; - if( NULL == mFirstBufferLink ) { - mLastBufferLink = NULL; - } - delete tmpLink; - mNbBuffer--; - - ALOGV("VideoEditorAudioEncoderSource::read END (0x%x)", err); - return err; -} - -int32_t VideoEditorAudioEncoderSource::storeBuffer(MediaBuffer *buffer) { - status_t err = OK; - - ALOGV("VideoEditorAudioEncoderSource::storeBuffer"); - - MediaBufferChain* newLink = new MediaBufferChain; - newLink->buffer = buffer; - newLink->nextLink = NULL; - if( NULL != mLastBufferLink ) { - mLastBufferLink->nextLink = newLink; - } else { - mFirstBufferLink = newLink; - } - mLastBufferLink = newLink; - mNbBuffer++; - - ALOGV("VideoEditorAudioEncoderSource::storeBuffer END"); - return mNbBuffer; -} - -/******************** - * ENGINE INTERFACE * - ********************/ -/** - ****************************************************************************** - * structure VideoEditorAudioEncoder_Context - * @brief This structure defines the context of the StageFright audio - * encoder shell - ****************************************************************************** -*/ -typedef struct { - M4ENCODER_AudioFormat mFormat; - M4ENCODER_AudioParams* mCodecParams; - M4ENCODER_AudioDecSpecificInfo mDSI; - sp mEncoderSource; - OMXClient mClient; - sp mEncoder; - uint32_t mNbInputFrames; - uint32_t mNbOutputFrames; - int64_t mFirstOutputCts; - int64_t mLastOutputCts; -} VideoEditorAudioEncoder_Context; - -M4OSA_ERR VideoEditorAudioEncoder_cleanup(M4OSA_Context pContext) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV("VideoEditorAudioEncoder_cleanup begin"); - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; - - SAFE_FREE(pEncoderContext->mDSI.pInfo); - SAFE_FREE(pEncoderContext); - pContext = M4OSA_NULL; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_cleanup no error"); - } else { - ALOGV("VideoEditorAudioEncoder_cleanup ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_cleanup end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_init(M4ENCODER_AudioFormat format, - M4OSA_Context* pContext, M4OSA_Void* pUserData) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV(" VideoEditorAudioEncoder_init begin: format %d", format); - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - SAFE_MALLOC(pEncoderContext, VideoEditorAudioEncoder_Context, 1, - "VideoEditorAudioEncoder"); - pEncoderContext->mFormat = format; - - *pContext = pEncoderContext; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_init no error"); - } else { - VideoEditorAudioEncoder_cleanup(pEncoderContext); - *pContext = M4OSA_NULL; - ALOGV("VideoEditorAudioEncoder_init ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_init end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_init_AAC(M4OSA_Context* pContext, - M4OSA_Void* pUserData) { - return VideoEditorAudioEncoder_init(M4ENCODER_kAAC, pContext, pUserData); -} - -M4OSA_ERR VideoEditorAudioEncoder_init_AMRNB(M4OSA_Context* pContext, - M4OSA_Void* pUserData) { - return VideoEditorAudioEncoder_init(M4ENCODER_kAMRNB, pContext, pUserData); -} - -M4OSA_ERR VideoEditorAudioEncoder_init_MP3(M4OSA_Context* pContext, - M4OSA_Void* pUserData) { - return VideoEditorAudioEncoder_init(M4ENCODER_kMP3, pContext, pUserData); -} - -M4OSA_ERR VideoEditorAudioEncoder_close(M4OSA_Context pContext) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV("VideoEditorAudioEncoder_close begin"); - - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; - - SAFE_FREE(pEncoderContext->mCodecParams); - - pEncoderContext->mEncoder->stop(); - pEncoderContext->mEncoder.clear(); - pEncoderContext->mClient.disconnect(); - pEncoderContext->mEncoderSource.clear(); - - ALOGV("AudioEncoder_close:IN %d frames,OUT %d frames from %lld to %lld", - pEncoderContext->mNbInputFrames, - pEncoderContext->mNbOutputFrames, pEncoderContext->mFirstOutputCts, - pEncoderContext->mLastOutputCts); - - if( pEncoderContext->mNbInputFrames != pEncoderContext->mNbInputFrames ) { - ALOGV("VideoEditorAudioEncoder_close:some frames were not encoded %d %d", - pEncoderContext->mNbInputFrames, pEncoderContext->mNbInputFrames); - } - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_close no error"); - } else { - ALOGV("VideoEditorAudioEncoder_close ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_close begin end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_open(M4OSA_Context pContext, - M4ENCODER_AudioParams *pParams, M4ENCODER_AudioDecSpecificInfo *pDSI, - M4OSA_Context pGrabberContext) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - status_t result = OK; - sp encoderMetadata = NULL; - const char* mime = NULL; - int32_t iNbChannel = 0; - uint32_t codecFlags = 0; - - ALOGV("VideoEditorAudioEncoder_open begin"); - - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pDSI, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; - pDSI->pInfo = M4OSA_NULL; - pDSI->infoSize = 0; - - pEncoderContext->mNbInputFrames = 0; - pEncoderContext->mNbOutputFrames = 0; - pEncoderContext->mFirstOutputCts = -1; - pEncoderContext->mLastOutputCts = -1; - - // Allocate & initialize the encoding parameters - ALOGV("VideoEditorAudioEncoder_open : params F=%d CN=%d BR=%d F=%d", - pParams->Frequency, pParams->ChannelNum, pParams->Bitrate, - pParams->Format); - SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_AudioParams, 1, - "VIDEOEDITOR CodecParams"); - pEncoderContext->mCodecParams->Frequency = pParams->Frequency; - pEncoderContext->mCodecParams->ChannelNum = pParams->ChannelNum; - pEncoderContext->mCodecParams->Bitrate = pParams->Bitrate; - pEncoderContext->mCodecParams->Format = pParams->Format; - - // Check output format consistency - VIDEOEDITOR_CHECK(pEncoderContext->mCodecParams->Format == - pEncoderContext->mFormat, M4ERR_PARAMETER); - - /** - * StageFright graph building - */ - // Create the meta data for the encoder - encoderMetadata = new MetaData; - switch( pEncoderContext->mCodecParams->Format ) { - case M4ENCODER_kAAC: - { - mime = MEDIA_MIMETYPE_AUDIO_AAC; - break; - } - case M4ENCODER_kAMRNB: - { - mime = MEDIA_MIMETYPE_AUDIO_AMR_NB; - break; - } - default: - { - VIDEOEDITOR_CHECK(!"AudioEncoder_open : incorrect input format", - M4ERR_PARAMETER); - break; - } - } - encoderMetadata->setCString(kKeyMIMEType, mime); - encoderMetadata->setInt32(kKeySampleRate, - (int32_t)pEncoderContext->mCodecParams->Frequency); - encoderMetadata->setInt32(kKeyBitRate, - (int32_t)pEncoderContext->mCodecParams->Bitrate); - - switch( pEncoderContext->mCodecParams->ChannelNum ) { - case M4ENCODER_kMono: - { - iNbChannel = 1; - break; - } - case M4ENCODER_kStereo: - { - iNbChannel = 2; - break; - } - default: - { - VIDEOEDITOR_CHECK(!"AudioEncoder_open : incorrect channel number", - M4ERR_STATE); - break; - } - } - encoderMetadata->setInt32(kKeyChannelCount, iNbChannel); - - // Create the encoder source - pEncoderContext->mEncoderSource = VideoEditorAudioEncoderSource::Create( - encoderMetadata); - VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoderSource.get(), - M4ERR_STATE); - - // Connect to the OMX client - result = pEncoderContext->mClient.connect(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - - // Create the OMX codec -#ifdef VIDEOEDITOR_FORCECODEC - codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; -#endif /* VIDEOEDITOR_FORCECODEC */ - // FIXME: - // We are moving away to use software AACEncoder and instead use OMX-based - // software AAC audio encoder. We want to use AACEncoder for now. After we - // fix the interface issue with the OMX-based AAC audio encoder, we should - // then set the component name back to NULL to allow the system to pick up - // the right AAC audio encoder. - pEncoderContext->mEncoder = OMXCodec::Create( - pEncoderContext->mClient.interface(), encoderMetadata, true, - pEncoderContext->mEncoderSource, "AACEncoder" /* component name */, - codecFlags); - VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); - - // Start the graph - result = pEncoderContext->mEncoder->start(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - - // Get AAC DSI, this code can only work with software encoder - if( M4ENCODER_kAAC == pEncoderContext->mCodecParams->Format ) { - int32_t isCodecConfig = 0; - MediaBuffer* buffer = NULL; - - // Read once to get the DSI - result = pEncoderContext->mEncoder->read(&buffer, NULL); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - VIDEOEDITOR_CHECK(buffer->meta_data()->findInt32(kKeyIsCodecConfig, - &isCodecConfig) && isCodecConfig, M4ERR_STATE); - - // Save the DSI - pEncoderContext->mDSI.infoSize = (M4OSA_UInt32)buffer->range_length(); - SAFE_MALLOC(pEncoderContext->mDSI.pInfo, M4OSA_Int8, - pEncoderContext->mDSI.infoSize, "Encoder header"); - - memcpy((void *)pEncoderContext->mDSI.pInfo, - (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->range_offset()), - pEncoderContext->mDSI.infoSize); - - buffer->release(); - *pDSI = pEncoderContext->mDSI; - } - ALOGV("VideoEditorAudioEncoder_open : DONE"); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_open no error"); - } else { - VideoEditorAudioEncoder_close(pEncoderContext); - ALOGV("VideoEditorAudioEncoder_open ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_open end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_processInputBuffer(M4OSA_Context pContext, - M4ENCODER_AudioBuffer* pInBuffer) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - M4OSA_Int8* pData = M4OSA_NULL; - MediaBuffer* buffer = NULL; - int32_t nbBuffer = 0; - - ALOGV("VideoEditorAudioEncoder_processInputBuffer begin"); - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; - - switch( pEncoderContext->mCodecParams->ChannelNum ) { - case M4ENCODER_kMono: - case M4ENCODER_kStereo: - // Let the MediaBuffer own the data so we don't have to free it - buffer = new MediaBuffer((size_t)pInBuffer->pTableBufferSize[0]); - pData = (M4OSA_Int8*)buffer->data() + buffer->range_offset(); - memcpy((void *)pData, (void *)pInBuffer->pTableBuffer[0], - pInBuffer->pTableBufferSize[0]); - break; - default: - ALOGV("VEAE_processInputBuffer unsupported channel configuration %d", - pEncoderContext->mCodecParams->ChannelNum); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); - break; - } - - ALOGV("VideoEditorAudioEncoder_processInputBuffer : store %d bytes", - buffer->range_length()); - // Push the buffer to the source - nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_processInputBuffer no error"); - } else { - if( NULL != buffer ) { - buffer->release(); - } - ALOGV("VideoEditorAudioEncoder_processInputBuffer ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_processInputBuffer end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_processOutputBuffer(M4OSA_Context pContext, - MediaBuffer* buffer, M4ENCODER_AudioBuffer* pOutBuffer) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - M4OSA_UInt32 Cts = 0; - int32_t i32Tmp = 0; - int64_t i64Tmp = 0; - status_t result = OK; - - ALOGV("VideoEditorAudioEncoder_processOutputBuffer begin"); - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pOutBuffer, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; - - // Process the returned AU - if( 0 == buffer->range_length() ) { - // Encoder has no data yet, nothing unusual - ALOGV("VideoEditorAudioEncoder_processOutputBuffer : buffer is empty"); - pOutBuffer->pTableBufferSize[0] = 0; - goto cleanUp; - } - if( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ) { - /* This should not happen with software encoder, - * DSI was retrieved beforehand */ - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_STATE); - } else { - // Check the CTS - VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), - M4ERR_STATE); - Cts = (M4OSA_Int32)(i64Tmp/1000); - - pEncoderContext->mNbOutputFrames++; - if( 0 > pEncoderContext->mFirstOutputCts ) { - pEncoderContext->mFirstOutputCts = i64Tmp; - } - pEncoderContext->mLastOutputCts = i64Tmp; - - // Format the AU - memcpy((void *)pOutBuffer->pTableBuffer[0], - (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->range_offset()), - buffer->range_length()); - pOutBuffer->pTableBufferSize[0] = (M4OSA_UInt32)buffer->range_length(); - } - -cleanUp: - // Release the buffer - buffer->release(); - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_processOutputBuffer no error"); - } else { - ALOGV("VideoEditorAudioEncoder_processOutputBuffer ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_processOutputBuffer end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_step(M4OSA_Context pContext, - M4ENCODER_AudioBuffer* pInBuffer, M4ENCODER_AudioBuffer* pOutBuffer) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - status_t result = OK; - MediaBuffer* buffer = NULL; - - ALOGV("VideoEditorAudioEncoder_step begin"); - - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pInBuffer, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pOutBuffer, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; - pEncoderContext->mNbInputFrames++; - - // Push the input buffer to the encoder source - err = VideoEditorAudioEncoder_processInputBuffer(pEncoderContext,pInBuffer); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Read - result = pEncoderContext->mEncoder->read(&buffer, NULL); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - - // Provide the encoded AU to the writer - err = VideoEditorAudioEncoder_processOutputBuffer(pEncoderContext, buffer, - pOutBuffer); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_step no error"); - } else { - ALOGV("VideoEditorAudioEncoder_step ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_step end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_getOption(M4OSA_Context pContext, - M4OSA_OptionID optionID, M4OSA_DataOption* optionValue) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV("VideoEditorAudioEncoder_getOption begin optionID 0x%X", optionID); - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext; - - switch( optionID ) { - default: - ALOGV("VideoEditorAudioEncoder_getOption: unsupported optionId 0x%X", - optionID); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); - break; - } - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_getOption no error"); - } else { - ALOGV("VideoEditorAudioEncoder_getOption ERROR 0x%X", err); - } - ALOGV("VideoEditorAudioEncoder_getOption end"); - return err; -} - -M4OSA_ERR VideoEditorAudioEncoder_getInterface( - M4ENCODER_AudioFormat format, M4ENCODER_AudioFormat* pFormat, - M4ENCODER_AudioGlobalInterface** pEncoderInterface) { - M4OSA_ERR err = M4NO_ERROR; - - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); - - ALOGV("VideoEditorAudioEncoder_getInterface 0x%x 0x%x",pFormat, - pEncoderInterface); - SAFE_MALLOC(*pEncoderInterface, M4ENCODER_AudioGlobalInterface, 1, - "AudioEncoder"); - - *pFormat = format; - - switch( format ) { - case M4ENCODER_kAAC: - { - (*pEncoderInterface)->pFctInit = VideoEditorAudioEncoder_init_AAC; - break; - } - case M4ENCODER_kAMRNB: - { - (*pEncoderInterface)->pFctInit = VideoEditorAudioEncoder_init_AMRNB; - break; - } - case M4ENCODER_kMP3: - { - (*pEncoderInterface)->pFctInit = VideoEditorAudioEncoder_init_MP3; - break; - } - default: - { - ALOGV("VideoEditorAudioEncoder_getInterface: unsupported format %d", - format); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); - break; - } - } - (*pEncoderInterface)->pFctCleanUp = VideoEditorAudioEncoder_cleanup; - (*pEncoderInterface)->pFctOpen = VideoEditorAudioEncoder_open; - (*pEncoderInterface)->pFctClose = VideoEditorAudioEncoder_close; - (*pEncoderInterface)->pFctStep = VideoEditorAudioEncoder_step; - (*pEncoderInterface)->pFctGetOption = VideoEditorAudioEncoder_getOption; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorAudioEncoder_getInterface no error"); - } else { - *pEncoderInterface = M4OSA_NULL; - ALOGV("VideoEditorAudioEncoder_getInterface ERROR 0x%X", err); - } - return err; -} -extern "C" { - -M4OSA_ERR VideoEditorAudioEncoder_getInterface_AAC( - M4ENCODER_AudioFormat* pFormat, - M4ENCODER_AudioGlobalInterface** pEncoderInterface) { - return VideoEditorAudioEncoder_getInterface( - M4ENCODER_kAAC, pFormat, pEncoderInterface); -} - -M4OSA_ERR VideoEditorAudioEncoder_getInterface_AMRNB( - M4ENCODER_AudioFormat* pFormat, - M4ENCODER_AudioGlobalInterface** pEncoderInterface) { - - return VideoEditorAudioEncoder_getInterface( - M4ENCODER_kAMRNB, pFormat, pEncoderInterface); -} - -M4OSA_ERR VideoEditorAudioEncoder_getInterface_MP3( - M4ENCODER_AudioFormat* pFormat, - M4ENCODER_AudioGlobalInterface** pEncoderInterface) { - ALOGV("VideoEditorAudioEncoder_getInterface_MP3 no error"); - - return VideoEditorAudioEncoder_getInterface( - M4ENCODER_kMP3, pFormat, pEncoderInterface); -} - -} // extern "C" - -} // namespace android diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp deleted file mode 100644 index 2ddea80..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.cpp +++ /dev/null @@ -1,331 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** -************************************************************************* -* @file VideoEditorBuffer.cpp -* @brief StageFright shell Buffer -************************************************************************* -*/ -#undef M4OSA_TRACE_LEVEL -#define M4OSA_TRACE_LEVEL 1 - -#include "VideoEditorBuffer.h" -#include "utils/Log.h" - -#define VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE 40 - -#define VIDEOEDITOR_SAFE_FREE(p) \ -{ \ - if(M4OSA_NULL != p) \ - { \ - free(p); \ - p = M4OSA_NULL; \ - } \ -} - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, - * M4OSA_UInt32 nbBuffers) - * @brief Allocate a pool of nbBuffers buffers - * - * @param ppool : IN The buffer pool to create - * @param nbBuffers : IN The number of buffers in the pool - * @param poolName : IN a name given to the pool - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, - M4OSA_UInt32 nbBuffers, M4OSA_Char* poolName) -{ - M4OSA_ERR lerr = M4NO_ERROR; - VIDEOEDITOR_BUFFER_Pool* pool; - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : ppool = 0x%x nbBuffers = %d ", - ppool, nbBuffers); - - pool = M4OSA_NULL; - pool = (VIDEOEDITOR_BUFFER_Pool*)M4OSA_32bitAlignedMalloc( - sizeof(VIDEOEDITOR_BUFFER_Pool), VIDEOEDITOR_BUFFER_EXTERNAL, - (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: pool")); - if (M4OSA_NULL == pool) - { - lerr = M4ERR_ALLOC; - goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; - } - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool buffers"); - pool->pNXPBuffer = M4OSA_NULL; - pool->pNXPBuffer = (VIDEOEDITOR_BUFFER_Buffer*)M4OSA_32bitAlignedMalloc( - sizeof(VIDEOEDITOR_BUFFER_Buffer)*nbBuffers, - VIDEOEDITOR_BUFFER_EXTERNAL, - (M4OSA_Char*)("BUFFER_allocatePool: pNXPBuffer")); - if(M4OSA_NULL == pool->pNXPBuffer) - { - lerr = M4ERR_ALLOC; - goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; - } - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool name buffer"); - pool->poolName = M4OSA_NULL; - pool->poolName = (M4OSA_Char*)M4OSA_32bitAlignedMalloc( - VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE,VIDEOEDITOR_BUFFER_EXTERNAL, - (M4OSA_Char*)("VIDEOEDITOR_BUFFER_allocatePool: poolname")); - if(pool->poolName == M4OSA_NULL) - { - lerr = M4ERR_ALLOC; - goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup; - } - - ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Assigning Pool name buffer"); - - memset((void *)pool->poolName, 0,VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE); - if (strlen((const char *)poolName) < VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE) { - memcpy((void *)pool->poolName, (void *)poolName, - strlen((const char *)poolName)); - } - pool->NB = nbBuffers; - -VIDEOEDITOR_BUFFER_allocatePool_Cleanup: - if(M4NO_ERROR != lerr) - { - VIDEOEDITOR_SAFE_FREE(pool->pNXPBuffer); - VIDEOEDITOR_SAFE_FREE(pool->poolName); - VIDEOEDITOR_SAFE_FREE(pool); - } - *ppool = pool; - ALOGV("VIDEOEDITOR_BUFFER_allocatePool END"); - - return lerr; -} - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(VIDEOEDITOR_BUFFER_Pool* ppool) - * @brief Deallocate a buffer pool - * - * @param ppool : IN The buffer pool to free - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(VIDEOEDITOR_BUFFER_Pool* ppool) -{ - M4OSA_ERR err; - M4OSA_UInt32 j = 0; - - ALOGV("VIDEOEDITOR_BUFFER_freePool_Ext : ppool = 0x%x", ppool); - - err = M4NO_ERROR; - - for (j = 0; j < ppool->NB; j++) - { - if(M4OSA_NULL != ppool->pNXPBuffer[j].mBuffer) - { - ppool->pNXPBuffer[j].mBuffer->release(); - ppool->pNXPBuffer[j].state = VIDEOEDITOR_BUFFER_kEmpty; - ppool->pNXPBuffer[j].mBuffer = M4OSA_NULL; - ppool->pNXPBuffer[j].size = 0; - ppool->pNXPBuffer[j].buffCTS = -1; - } - } - - if(ppool != M4OSA_NULL) - { - SAFE_FREE(ppool->pNXPBuffer); - SAFE_FREE(ppool->poolName); - SAFE_FREE(ppool); - } - - return(err); -} - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, - * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) - * @brief Returns a buffer in a given state - * - * @param ppool : IN The buffer pool - * @param desiredState : IN The buffer state - * @param pNXPBuffer : IN The selected buffer - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, - VIDEOEDITOR_BUFFER_State desiredState, - VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_Bool bFound = M4OSA_FALSE; - M4OSA_UInt32 i, ibuf; - - ALOGV("VIDEOEDITOR_BUFFER_getBuffer from %s in state=%d", - ppool->poolName, desiredState); - - ibuf = 0; - - for (i=0; i < ppool->NB; i++) - { - bFound = (ppool->pNXPBuffer[i].state == desiredState); - if (bFound) - { - ibuf = i; - break; - } - } - - if(!bFound) - { - ALOGV("VIDEOEDITOR_BUFFER_getBuffer No buffer available in state %d", - desiredState); - *pNXPBuffer = M4OSA_NULL; - return M4ERR_NO_BUFFER_AVAILABLE; - } - - /* case where a buffer has been found */ - *pNXPBuffer = &(ppool->pNXPBuffer[ibuf]); - - ALOGV("VIDEOEDITOR_BUFFER_getBuffer: idx = %d", ibuf); - - return(err); -} - -/** - ************************************************************************ - void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool, - * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) - * @brief Returns a buffer in a given state - * - * @param ppool : IN The buffer pool - * @param desiredState : IN The buffer state - * @param pNXPBuffer : IN The selected buffer - * @return Error code - ************************************************************************ -*/ -void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool) -{ - M4OSA_Bool bFound = M4OSA_FALSE; - M4OSA_UInt32 i, ibuf; - M4_MediaTime candidateTimeStamp = (M4_MediaTime)0x7ffffff; - ibuf = 0; - - for (i=0; i < ppool->NB; i++) - { - bFound = (ppool->pNXPBuffer[i].state == VIDEOEDITOR_BUFFER_kEmpty); - if (bFound) - { - break; - } - } - - if(!bFound) - { - for(i = 0; i< ppool->NB; i++) - { - if(ppool->pNXPBuffer[i].state == VIDEOEDITOR_BUFFER_kFilled) - { - if(ppool->pNXPBuffer[i].buffCTS <= candidateTimeStamp) - { - bFound = M4OSA_TRUE; - candidateTimeStamp = ppool->pNXPBuffer[i].buffCTS; - ibuf = i; - } - } - } - - if(M4OSA_TRUE == bFound) - { - if(M4OSA_NULL != ppool->pNXPBuffer[ibuf].mBuffer) { - ppool->pNXPBuffer[ibuf].mBuffer->release(); - ppool->pNXPBuffer[ibuf].state = VIDEOEDITOR_BUFFER_kEmpty; - ppool->pNXPBuffer[ibuf].mBuffer = M4OSA_NULL; - ppool->pNXPBuffer[ibuf].size = 0; - ppool->pNXPBuffer[ibuf].buffCTS = -1; - } - } - - } - -} -M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers_Ext(VIDEOEDITOR_BUFFER_Pool* pool, - M4OSA_UInt32 lSize) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt32 index, i, j; - - /** - * Initialize all the buffers in the pool */ - for(index = 0; index < pool->NB; index++) - { - pool->pNXPBuffer[index].mBuffer = M4OSA_NULL; - pool->pNXPBuffer[index].size = 0; - pool->pNXPBuffer[index].state = VIDEOEDITOR_BUFFER_kEmpty; - pool->pNXPBuffer[index].idx = index; - pool->pNXPBuffer[index].buffCTS = -1; - } - return err; -} - -M4OSA_ERR VIDEOEDITOR_BUFFER_getOldestBuffer(VIDEOEDITOR_BUFFER_Pool *pool, - VIDEOEDITOR_BUFFER_State desiredState, - VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt32 index, j; - M4_MediaTime candidateTimeStamp = (M4_MediaTime)0x7ffffff; - M4OSA_Bool bFound = M4OSA_FALSE; - - *pNXPBuffer = M4OSA_NULL; - for(index = 0; index< pool->NB; index++) - { - if(pool->pNXPBuffer[index].state == desiredState) - { - if(pool->pNXPBuffer[index].buffCTS <= candidateTimeStamp) - { - bFound = M4OSA_TRUE; - candidateTimeStamp = pool->pNXPBuffer[index].buffCTS; - *pNXPBuffer = &(pool->pNXPBuffer[index]); - } - } - } - if(M4OSA_FALSE == bFound) - { - ALOGV("VIDEOEDITOR_BUFFER_getOldestBuffer WARNING no buffer available"); - err = M4ERR_NO_BUFFER_AVAILABLE; - } - return err; -} diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h b/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h deleted file mode 100644 index b2b510f..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorBuffer.h +++ /dev/null @@ -1,182 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** -************************************************************************* -* @file VideoEditorBuffer.h -* @brief StageFright shell Buffer -************************************************************************* -*/ -#ifndef VIDEOEDITOR_BUFFER_H -#define VIDEOEDITOR_BUFFER_H - -#include "M4OSA_Types.h" -#include "M4OSA_Debug.h" -#include "M4OSA_Memory.h" -#include "M4OSA_CharStar.h" -#include "M4_Utils.h" - -#include "LV_Macros.h" - -#include -using namespace android; - -/*--- Core id for VIDEOEDITOR Buffer allocations ---*/ -#define VIDEOEDITOR_BUFFER_EXTERNAL 0x012F - -/* ----- errors -----*/ -#define M4ERR_NO_BUFFER_AVAILABLE \ - M4OSA_ERR_CREATE(M4_ERR,VIDEOEDITOR_BUFFER_EXTERNAL,0x000001) -#define M4ERR_NO_BUFFER_MATCH \ - M4OSA_ERR_CREATE(M4_ERR,VIDEOEDITOR_BUFFER_EXTERNAL,0x000002) - -typedef enum { - VIDEOEDITOR_BUFFER_kEmpty = 0, - VIDEOEDITOR_BUFFER_kFilled, -} VIDEOEDITOR_BUFFER_State; - -/** - ************************************************************************ - * Structure LVOMX_BUFFER_Buffer - * @brief One OMX Buffer and data related to it - ************************************************************************ -*/ -typedef struct { - MediaBuffer* mBuffer; /**< Pointer to the data for intel platform*/ - M4OSA_UInt32 size; - VIDEOEDITOR_BUFFER_State state; /**< Buffer state */ - M4OSA_UInt32 idx; /**< Index of the buffer inside the pool */ - M4_MediaTime buffCTS; /**< Time stamp of the buffer */ -} VIDEOEDITOR_BUFFER_Buffer; - -/** - ************************************************************************ - * Structure LVOMX_BUFFER_Pool - * @brief Structure to manage buffers - ************************************************************************ -*/ -typedef struct { - VIDEOEDITOR_BUFFER_Buffer* pNXPBuffer; - M4OSA_UInt32 NB; - M4OSA_Char* poolName; -} VIDEOEDITOR_BUFFER_Pool; - -#ifdef __cplusplus -extern "C" -{ -#endif //__cplusplus - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, - * M4OSA_UInt32 nbBuffers) - * @brief Allocate a pool of nbBuffers buffers - * - * @param ppool : IN The buffer pool to create - * @param nbBuffers : IN The number of buffers in the pool - * @param poolName : IN a name given to the pool - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_allocatePool(VIDEOEDITOR_BUFFER_Pool** ppool, - M4OSA_UInt32 nbBuffers, M4OSA_Char* poolName); - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(LVOMX_BUFFER_Pool* ppool) - * @brief Deallocate a buffer pool - * - * @param ppool : IN The buffer pool to free - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_freePool(VIDEOEDITOR_BUFFER_Pool* ppool); - -/** - ************************************************************************ -M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(LVOMX_BUFFER_Pool* ppool) - * @brief Deallocate a buffer pool - * - * @param ppool : IN The buffer pool to free - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_freePool_Ext(VIDEOEDITOR_BUFFER_Pool* ppool); - -/** - ************************************************************************ - M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, - * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) - * @brief Returns a buffer in a given state - * - * @param ppool : IN The buffer pool - * @param desiredState : IN The buffer state - * @param pNXPBuffer : IN The selected buffer - * @return Error code - ************************************************************************ -*/ -M4OSA_ERR VIDEOEDITOR_BUFFER_getBuffer(VIDEOEDITOR_BUFFER_Pool* ppool, - VIDEOEDITOR_BUFFER_State desiredState, - VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer); - -/** - ************************************************************************ - void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool, - * VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer) - * @brief Make sure there are buffers for decoder - * - * @param ppool : IN The buffer pool - * @param desiredState : IN The buffer state - * @return Error code - ************************************************************************ -*/ -void VIDEOEDITOR_BUFFER_getBufferForDecoder(VIDEOEDITOR_BUFFER_Pool* ppool); - -M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers(VIDEOEDITOR_BUFFER_Pool* ppool, - M4OSA_UInt32 lSize); - -M4OSA_ERR VIDEOEDITOR_BUFFER_initPoolBuffers_Ext(VIDEOEDITOR_BUFFER_Pool* ppool, - M4OSA_UInt32 lSize); - - -M4OSA_ERR VIDEOEDITOR_BUFFER_getOldestBuffer(VIDEOEDITOR_BUFFER_Pool *pool, - VIDEOEDITOR_BUFFER_State desiredState, - VIDEOEDITOR_BUFFER_Buffer** pNXPBuffer); - -#ifdef __cplusplus -} -#endif //__cplusplus -#endif /*VIDEOEDITOR_BUFFER_H*/ - diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp deleted file mode 100644 index 815c242..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorMp3Reader.cpp +++ /dev/null @@ -1,823 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** -************************************************************************* -* @file VideoEditorMp3Reader.cpp -* @brief StageFright shell MP3 Reader -************************************************************************* -*/ -#define LOG_NDEBUG 1 -#define LOG_TAG "VIDEOEDITOR_MP3READER" - -/** - * HEADERS - * - */ -#include "M4OSA_Debug.h" -#include "M4SYS_AccessUnit.h" -#include "VideoEditorMp3Reader.h" -#include "VideoEditorUtils.h" - -#include "utils/Log.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include - -/** - * SOURCE CLASS - */ - -namespace android { -/** - * ENGINE INTERFACE - */ - -/** - ************************************************************************** - * structure VideoEditorMp3Reader_Context - * @brief This structure defines the context of the SF MP3 reader shell. - ************************************************************************** - */ -typedef struct { - sp mDataSource; - sp mExtractor; - sp mMediaSource; - M4_AudioStreamHandler* mAudioStreamHandler; - M4SYS_AccessUnit mAudioAu; - M4OSA_Time mMaxDuration; - M4OSA_UInt8 mStreamNumber; - M4OSA_Bool mSeeking; - M4OSA_Time mSeekTime; - uint32_t mExtractorFlags; -} VideoEditorMp3Reader_Context; - -/** - **************************************************************************** - * @brief create an instance of the MP3 reader - * @note allocates the context - * - * @param pContext: (OUT) pointer on a reader context - * - * @return M4NO_ERROR there is no error - * @return M4ERR_ALLOC a memory allocation has failed - * @return M4ERR_PARAMETER at least one parameter is not valid - **************************************************************************** -*/ -M4OSA_ERR VideoEditorMp3Reader_create(M4OSA_Context *pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorMp3Reader_Context *pReaderContext = M4OSA_NULL; - - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - ALOGV("VideoEditorMp3Reader_create begin"); - - /* Context allocation & initialization */ - SAFE_MALLOC(pReaderContext, VideoEditorMp3Reader_Context, 1, - "VideoEditorMp3Reader"); - - pReaderContext->mAudioStreamHandler = M4OSA_NULL; - pReaderContext->mAudioAu.dataAddress = M4OSA_NULL; - pReaderContext->mMaxDuration = 0; - *pContext = pReaderContext; - -cleanUp: - if (M4NO_ERROR == err) { - ALOGV("VideoEditorMp3Reader_create no error"); - } else { - ALOGV("VideoEditorMp3Reader_create ERROR 0x%X", err); - } - ALOGV("VideoEditorMp3Reader_create end"); - return err; -} - -/** - ******************************************************************************* - * @brief destroy the instance of the MP3 reader - * @note after this call the context is invalid - * @param context: (IN) Context of the reader - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER The input parameter is not properly set - ******************************************************************************* -*/ -M4OSA_ERR VideoEditorMp3Reader_destroy(M4OSA_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)pContext; - - VIDEOEDITOR_CHECK(M4OSA_NULL != pReaderContext, M4ERR_PARAMETER); - ALOGV("VideoEditorMp3Reader_destroy begin"); - - SAFE_FREE(pReaderContext); -cleanUp: - if (M4NO_ERROR == err) { - ALOGV("VideoEditorMp3Reader_destroy no error"); - } else { - ALOGV("VideoEditorMp3Reader_destroy ERROR 0x%X", err); - } - ALOGV("VideoEditorMp3Reader_destroy end"); - return err; -} -/** - ****************************************************************************** - * @brief open the reader and initializes its created instance - * @note this function opens the MP3 file - * @param context: (IN) Context of the reader - * @param pFileDescriptor: (IN) Pointer to proprietary data identifying - * the media to open - - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER the context is NULL - * @return M4ERR_BAD_CONTEXT provided context is not a valid one - * @return M4ERR_UNSUPPORTED_MEDIA_TYPE the media is DRM protected - ****************************************************************************** -*/ -M4OSA_ERR VideoEditorMp3Reader_open(M4OSA_Context context, - M4OSA_Void* pFileDescriptor){ - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditorMp3Reader_open begin"); - /* Check function parameters*/ - M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, - "VideoEditorMp3Reader_open: invalid context pointer"); - M4OSA_DEBUG_IF1((M4OSA_NULL == pFileDescriptor), M4ERR_PARAMETER, - "VideoEditorMp3Reader_open: invalid pointer pFileDescriptor"); - - ALOGV("VideoEditorMp3Reader_open Datasource start %s", - (char*)pFileDescriptor); - pReaderContext->mDataSource = new FileSource ((char*)pFileDescriptor); - ALOGV("VideoEditorMp3Reader_open Datasource end"); - - if (pReaderContext->mDataSource == NULL) { - ALOGV("VideoEditorMp3Reader_open Datasource error"); - return UNKNOWN_ERROR; - } - - ALOGV("VideoEditorMp3Reader_open extractor start"); - pReaderContext->mExtractor = MediaExtractor::Create( - pReaderContext->mDataSource,MEDIA_MIMETYPE_AUDIO_MPEG); - ALOGV("VideoEditorMp3Reader_open extractor end"); - - if (pReaderContext->mExtractor == NULL) { - ALOGV("VideoEditorMp3Reader_open extractor error"); - return UNKNOWN_ERROR; - } - pReaderContext->mStreamNumber = 0; - - int32_t isDRMProtected = 0; - sp meta = pReaderContext->mExtractor->getMetaData(); - meta->findInt32(kKeyIsDRM, &isDRMProtected); - if (isDRMProtected) { - ALOGV("VideoEditorMp3Reader_open error - DRM Protected"); - return M4ERR_UNSUPPORTED_MEDIA_TYPE; - } - - ALOGV("VideoEditorMp3Reader_open end"); - return err; -} -/** - ************************************************************************** - * @brief close the reader - * @note this function closes the MP3 reader - * @param context: (IN) Context of the reader - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER the context is NULL - ************************************************************************** -*/ -M4OSA_ERR VideoEditorMp3Reader_close(M4OSA_Context context) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditorMp3Reader_close begin"); - /* Check function parameters */ - M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, - "VideoEditorMp3Reader_close: invalid context pointer"); - - if (pReaderContext->mAudioStreamHandler != NULL) { - if (M4OSA_NULL != pReaderContext->mAudioStreamHandler->\ - m_basicProperties.m_pDecoderSpecificInfo) { - free(pReaderContext->mAudioStreamHandler->\ - m_basicProperties.m_pDecoderSpecificInfo); - pReaderContext->mAudioStreamHandler->m_basicProperties.\ - m_decoderSpecificInfoSize = 0; - pReaderContext->mAudioStreamHandler->m_basicProperties.\ - m_pDecoderSpecificInfo = M4OSA_NULL; - } - - /* Finally destroy the stream handler */ - free(pReaderContext->mAudioStreamHandler); - pReaderContext->mAudioStreamHandler = M4OSA_NULL; - - if (pReaderContext->mAudioAu.dataAddress != NULL) { - free(pReaderContext->mAudioAu.dataAddress); - pReaderContext->mAudioAu.dataAddress = NULL; - } - } - - pReaderContext->mMediaSource->stop(); - pReaderContext->mMediaSource.clear(); - pReaderContext->mExtractor.clear(); - pReaderContext->mDataSource.clear(); - - ALOGV("VideoEditorMp3Reader_close end "); - return err; -} -/** - ****************************************************************************** - * @brief get an option value from the reader - * @note - * it allows the caller to retrieve a property value: - * - * @param context: (IN) Context of the reader - * @param optionId: (IN) indicates the option to get - * @param pValue: (OUT) pointer to structure or value (allocated - * by user) where option is stored - * - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER at least one parameter is not properly set - * @return M4ERR_BAD_OPTION_ID when the option ID is not a valid one - ****************************************************************************** -*/ -M4OSA_ERR VideoEditorMp3Reader_getOption(M4OSA_Context context, - M4OSA_OptionID optionId, M4OSA_DataOption pValue) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditorMp3Reader_getOption begin: optionId= %d ",(int)optionId); - - M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, - "invalid value pointer"); - M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, - "invalid value pointer"); - - switch(optionId) { - case M4READER_kOptionID_Duration: - { - ALOGV("Mp3Reader duration=%ld",pReaderContext->mMaxDuration); - *(M4OSA_Time*)pValue = pReaderContext->mMaxDuration; - } - break; - - case M4READER_kOptionID_Bitrate: - { - M4OSA_UInt32* pBitrate = (M4OSA_UInt32*)pValue; - if (M4OSA_NULL != pReaderContext->mAudioStreamHandler) { - *pBitrate = pReaderContext->mAudioStreamHandler->\ - m_basicProperties.m_averageBitRate; - } else { - pBitrate = 0; - err = M4ERR_PARAMETER; - } - } - break; - - case M4READER_kOptionID_Mp3Id3v1Tag: - break; - - case M4READER_kOptionID_Mp3Id3v2Tag: - break; - - case M4READER_kOptionID_GetMetadata: - break; - - default : - { - ALOGV("VideoEditorMp3Reader_getOption: M4ERR_BAD_OPTION_ID"); - err = M4ERR_BAD_OPTION_ID; - } - } - ALOGV("VideoEditorMp3Reader_getOption end "); - return err; -} -/** - ****************************************************************************** - * @brief set an option value of the reader - * @note - * it allows the caller to set a property value: - * - * @param context: (IN) Context of the reader - * @param optionId: (IN) Identifier indicating the option to set - * @param pValue: (IN) Pointer to structure or value (allocated - * by user) where option is stored - * - * @return M4NO_ERROR There is no error - * @return M4ERR_BAD_OPTION_ID The option ID is not a valid one - * @return M4ERR_STATE State automaton is not applied - * @return M4ERR_PARAMETER The option parameter is invalid - ****************************************************************************** -*/ -M4OSA_ERR VideoEditorMp3Reader_setOption(M4OSA_Context context, - M4OSA_OptionID optionId, M4OSA_DataOption pValue) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditorMp3Reader_Context begin: optionId: %d Value: %d ", - (int)optionId,(int)pValue); - - M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER, - "invalid context pointer"); - M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER, - "invalid value pointer"); - - switch(optionId) { - case M4READER_kOptionID_SetOsaFileReaderFctsPtr: - default : - { - err = M4NO_ERROR; - } - } - ALOGV("VideoEditorMp3Reader_Context end "); - return err; -} -/** - ****************************************************************************** - * @brief jump into the stream at the specified time - * @note - * @param context: (IN) Context of the reader - * @param pStreamHandler(IN) stream description of the stream to make jump - * @param pTime (I/O)IN:the time to jump to (in ms) - * OUT: the time to which the stream really jumped - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER at least one parameter is not properly set - ****************************************************************************** -*/ -M4OSA_ERR VideoEditorMp3Reader_jump(M4OSA_Context context, - M4_StreamHandler *pStreamHandler, M4OSA_Int32* pTime) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4SYS_StreamID streamIdArray[2]; - M4OSA_ERR err = M4NO_ERROR; - M4SYS_AccessUnit* pAu; - M4OSA_Time time64 = (M4OSA_Time)*pTime; - - ALOGV("VideoEditorMp3Reader_jump begin"); - M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_jump: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_jump: invalid pointer to M4_StreamHandler"); - M4OSA_DEBUG_IF1((pTime == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_jump: invalid time pointer"); - - if(pStreamHandler == (M4_StreamHandler*)pReaderContext->\ - mAudioStreamHandler){ - pAu = &pReaderContext->mAudioAu; - } else { - ALOGV("VideoEditorMp3Reader_jump: passed StreamHandler is not known"); - return M4ERR_PARAMETER; - } - - streamIdArray[0] = pStreamHandler->m_streamId; - streamIdArray[1] = 0; - - ALOGV("VideoEditorMp3Reader_jump time ms %ld ", time64); - - pAu->CTS = time64; - pAu->DTS = time64; - - time64 = time64 * 1000; /* Convert the time into micro sec */ - ALOGV("VideoEditorMp3Reader_jump time us %ld ", time64); - - pReaderContext->mSeeking = M4OSA_TRUE; - pReaderContext->mSeekTime = time64; - - time64 = time64 / 1000; /* Convert the time into milli sec */ - *pTime = (M4OSA_Int32)time64; - ALOGV("VideoEditorMp3Reader_jump end "); - return err; -} -/** - ******************************************************************************* - * @brief Get the next stream found in the media file - * - * @param context: (IN) Context of the reader - * @param pMediaFamily: (OUT) pointer to a user allocated - * M4READER_MediaFamily that will be filled with - * the media family of the found stream - * @param pStreamHandler: (OUT) pointer to a stream handler that will be - * allocated and filled with stream description - * - * @return M4NO_ERROR there is no error - * @return M4WAR_NO_MORE_STREAM no more available stream in the media - * @return M4ERR_PARAMETER at least one parameter is not properly set - ******************************************************************************* -*/ -M4OSA_ERR VideoEditorMp3Reader_getNextStream(M4OSA_Context context, - M4READER_MediaFamily *pMediaFamily, - M4_StreamHandler **pStreamHandlerParam) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - M4SYS_StreamID streamIdArray[2]; - M4SYS_StreamDescription streamDesc; - M4_AudioStreamHandler* pAudioStreamHandler; - M4_StreamHandler* pStreamHandler; - M4OSA_UInt8 type, temp; - M4OSA_Bool haveAudio = M4OSA_FALSE; - sp meta = NULL; - int64_t Duration; - - ALOGV("VideoEditorMp3Reader_getNextStream begin"); - M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_getNextStream: invalid context"); - M4OSA_DEBUG_IF1((pMediaFamily == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_getNextStream: invalid pointer to MediaFamily"); - M4OSA_DEBUG_IF1((pStreamHandlerParam == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_getNextStream: invalid pointer to StreamHandler"); - - ALOGV("VideoEditorMp3Reader_getNextStream stream number = %d", - pReaderContext->mStreamNumber); - if (pReaderContext->mStreamNumber >= 1) { - ALOGV("VideoEditorMp3Reader_getNextStream max number of stream reached"); - return M4WAR_NO_MORE_STREAM; - } - pReaderContext->mStreamNumber = pReaderContext->mStreamNumber + 1; - ALOGV("VideoEditorMp3Reader_getNextStream number of Tracks%d", - pReaderContext->mExtractor->countTracks()); - for (temp = 0; temp < pReaderContext->mExtractor->countTracks(); temp++) { - meta = pReaderContext->mExtractor->getTrackMetaData(temp); - const char *mime; - CHECK(meta->findCString(kKeyMIMEType, &mime)); - - if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { - pReaderContext->mMediaSource = - pReaderContext->mExtractor->getTrack(temp); - pReaderContext->mMediaSource->start(); - haveAudio = true; - } - - if (haveAudio) { - break; - } - } - - if (!haveAudio) { - ALOGV("VideoEditorMp3Reader_getNextStream no more stream "); - pReaderContext->mDataSource.clear(); - return M4WAR_NO_MORE_STREAM; - } - - pReaderContext->mExtractorFlags = pReaderContext->mExtractor->flags(); - *pMediaFamily = M4READER_kMediaFamilyAudio; - - streamDesc.duration = meta->findInt64(kKeyDuration, &Duration); - streamDesc.duration = (M4OSA_Time)Duration/1000; - - meta->findInt32(kKeyBitRate, (int32_t*)&streamDesc.averageBitrate); - meta->findInt32(kKeySampleRate, (int32_t*)&streamDesc.timeScale); - ALOGV("Bitrate = %d, SampleRate = %d duration = %lld", - streamDesc.averageBitrate,streamDesc.timeScale,Duration/1000); - - streamDesc.streamType = M4SYS_kMP3; - streamDesc.profileLevel = 0xFF ; - streamDesc.streamID = pReaderContext->mStreamNumber; - streamDesc.decoderSpecificInfo = M4OSA_NULL; - streamDesc.decoderSpecificInfoSize = 0; - streamDesc.maxBitrate = streamDesc.averageBitrate; - - /* Allocate the audio stream handler and set its parameters */ - pAudioStreamHandler = (M4_AudioStreamHandler*)M4OSA_32bitAlignedMalloc( - sizeof(M4_AudioStreamHandler), M4READER_MP3, - (M4OSA_Char*)"M4_AudioStreamHandler"); - - if (pAudioStreamHandler == M4OSA_NULL) { - ALOGV("VideoEditorMp3Reader_getNextStream malloc failed"); - pReaderContext->mMediaSource->stop(); - pReaderContext->mMediaSource.clear(); - pReaderContext->mDataSource.clear(); - - return M4ERR_ALLOC; - } - pStreamHandler =(M4_StreamHandler*)(pAudioStreamHandler); - *pStreamHandlerParam = pStreamHandler; - pReaderContext->mAudioStreamHandler = pAudioStreamHandler; - - pAudioStreamHandler->m_structSize = sizeof(M4_AudioStreamHandler); - - if (meta == NULL) { - ALOGV("VideoEditorMp3Reader_getNextStream meta is NULL"); - } - - pAudioStreamHandler->m_samplingFrequency = streamDesc.timeScale; - pStreamHandler->m_pDecoderSpecificInfo = - (M4OSA_UInt8*)(streamDesc.decoderSpecificInfo); - pStreamHandler->m_decoderSpecificInfoSize = - streamDesc.decoderSpecificInfoSize; - - meta->findInt32(kKeyChannelCount, - (int32_t*)&pAudioStreamHandler->m_nbChannels); - pAudioStreamHandler->m_byteFrameLength = 1152; - pAudioStreamHandler->m_byteSampleSize = 2; - - pStreamHandler->m_pUserData = NULL; - pStreamHandler->m_streamId = streamDesc.streamID; - pStreamHandler->m_duration = streamDesc.duration; - pReaderContext->mMaxDuration = streamDesc.duration; - pStreamHandler->m_averageBitRate = streamDesc.averageBitrate; - - pStreamHandler->m_maxAUSize = 0; - pStreamHandler->m_streamType = M4DA_StreamTypeAudioMp3; - - ALOGV("VideoEditorMp3Reader_getNextStream end "); - return err; -} - -/** - ******************************************************************************* - * @brief fill the access unit structure with initialization values - * @param context: (IN) Context of the reader - * @param pStreamHandler: (IN) pointer to the stream handler to which - * the access unit will be associated - * @param pAccessUnit: (IN/OUT) pointer to the access unit (allocated by - * the caller) to initialize - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER at least one parameter is not properly set - ******************************************************************************* -*/ -M4OSA_ERR VideoEditorMp3Reader_fillAuStruct(M4OSA_Context context, - M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4SYS_AccessUnit *pAu; - - M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_fillAuStruct: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_fillAuStruct invalid pointer to StreamHandler"); - M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_fillAuStruct: invalid pointer to M4_AccessUnit"); - - ALOGV("VideoEditorMp3Reader_fillAuStruct start "); - if(pStreamHandler == (M4_StreamHandler*)pReaderContext->\ - mAudioStreamHandler){ - pAu = &pReaderContext->mAudioAu; - } else { - ALOGV("VideoEditorMp3Reader_fillAuStruct StreamHandler is not known"); - return M4ERR_PARAMETER; - } - - /* Initialize pAu structure */ - pAu->dataAddress = M4OSA_NULL; - pAu->size = 0; - pAu->CTS = 0; - pAu->DTS = 0; - pAu->attribute = 0; - pAu->nbFrag = 0; - - /* Initialize pAccessUnit structure */ - pAccessUnit->m_size = 0; - pAccessUnit->m_CTS = 0; - pAccessUnit->m_DTS = 0; - pAccessUnit->m_attribute = 0; - pAccessUnit->m_dataAddress = M4OSA_NULL; - pAccessUnit->m_maxsize = pStreamHandler->m_maxAUSize; - pAccessUnit->m_streamID = pStreamHandler->m_streamId; - pAccessUnit->m_structSize = sizeof(M4_AccessUnit); - - ALOGV("VideoEditorMp3Reader_fillAuStruct end"); - return M4NO_ERROR; -} - -/** - ******************************************************************************* - * @brief reset the stream, i.e seek it to the beginning - * @note - * @param context: (IN) Context of the reader - * @param pStreamHandler (IN) The stream handler of the stream to reset - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER at least one parameter is not properly set - ******************************************************************************* -*/ -M4OSA_ERR VideoEditorMp3Reader_reset(M4OSA_Context context, - M4_StreamHandler *pStreamHandler) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - - M4OSA_ERR err = M4NO_ERROR; - M4SYS_StreamID streamIdArray[2]; - M4SYS_AccessUnit* pAu; - M4OSA_Time time64 = 0; - - ALOGV("VideoEditorMp3Reader_reset start"); - M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_reset: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_reset: invalid pointer to M4_StreamHandler"); - - if (pStreamHandler == (M4_StreamHandler*)pReaderContext->\ - mAudioStreamHandler) { - pAu = &pReaderContext->mAudioAu; - } else { - ALOGV("VideoEditorMp3Reader_reset StreamHandler is not known"); - return M4ERR_PARAMETER; - } - streamIdArray[0] = pStreamHandler->m_streamId; - streamIdArray[1] = 0; - pAu->CTS = time64; - pAu->DTS = time64; - - pReaderContext->mSeeking = M4OSA_TRUE; - pReaderContext->mSeekTime = time64; - - ALOGV("VideoEditorMp3Reader_reset end"); - return err; -} -/** - ******************************************************************************* - * @brief Gets an access unit (AU) from the stream handler source. - * @note AU is the smallest possible amount of data to be decoded by decoder - * - * @param context: (IN) Context of the reader - * @param pStreamHandler (IN) The stream handler of the stream to make jump - * @param pAccessUnit (I/O)Pointer to an access unit to fill with read data - * @return M4NO_ERROR there is no error - * @return M4ERR_PARAMETER at least one parameter is not properly set - * @returns M4ERR_ALLOC memory allocation failed - * @returns M4WAR_NO_MORE_AU there are no more access unit in the stream - ******************************************************************************* -*/ -M4OSA_ERR VideoEditorMp3Reader_getNextAu(M4OSA_Context context, - M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { - VideoEditorMp3Reader_Context *pReaderContext = - (VideoEditorMp3Reader_Context*)context; - M4OSA_ERR err = M4NO_ERROR; - M4SYS_AccessUnit* pAu; - MediaBuffer *mAudioBuffer; - MediaSource::ReadOptions options; - - ALOGV("VideoEditorMp3Reader_getNextAu start"); - M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_getNextAu: invalid context"); - M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_StreamHandler"); - M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, - "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_AccessUnit"); - - if (pStreamHandler == (M4_StreamHandler*)pReaderContext->\ - mAudioStreamHandler) { - pAu = &pReaderContext->mAudioAu; - } else { - ALOGV("VideoEditorMp3Reader_getNextAu: StreamHandler is not known\n"); - return M4ERR_PARAMETER; - } - - if (pReaderContext->mSeeking) { - options.setSeekTo(pReaderContext->mSeekTime); - } - - pReaderContext->mMediaSource->read(&mAudioBuffer, &options); - - if (mAudioBuffer != NULL) { - if ((pAu->dataAddress == NULL) || - (pAu->size < mAudioBuffer->range_length())) { - if (pAu->dataAddress != NULL) { - free((M4OSA_Int32*)pAu->dataAddress); - pAu->dataAddress = NULL; - } - pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc( - (mAudioBuffer->range_length() + 3) & ~0x3, - M4READER_MP3, (M4OSA_Char*)"pAccessUnit->m_dataAddress" ); - - if (pAu->dataAddress == NULL) { - ALOGV("VideoEditorMp3Reader_getNextAu malloc failed"); - pReaderContext->mMediaSource->stop(); - pReaderContext->mMediaSource.clear(); - pReaderContext->mDataSource.clear(); - - return M4ERR_ALLOC; - } - } - pAu->size = mAudioBuffer->range_length(); - memcpy((M4OSA_MemAddr8)pAu->dataAddress, - (const char *)mAudioBuffer->data() + mAudioBuffer->range_offset(), - mAudioBuffer->range_length()); - - mAudioBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&pAu->CTS); - - - pAu->CTS = pAu->CTS / 1000; /*converting the microsec to millisec */ - pAu->DTS = pAu->CTS; - pAu->attribute = M4SYS_kFragAttrOk; - mAudioBuffer->release(); - - ALOGV("VideoEditorMp3Reader_getNextAu AU CTS = %ld",pAu->CTS); - - pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress; - pAccessUnit->m_size = pAu->size; - pAccessUnit->m_CTS = pAu->CTS; - pAccessUnit->m_DTS = pAu->DTS; - pAccessUnit->m_attribute = pAu->attribute; - } else { - ALOGV("VideoEditorMp3Reader_getNextAu EOS reached."); - pAccessUnit->m_size=0; - err = M4WAR_NO_MORE_AU; - } - pAu->nbFrag = 0; - - options.clearSeekTo(); - pReaderContext->mSeeking = M4OSA_FALSE; - mAudioBuffer = NULL; - ALOGV("VideoEditorMp3Reader_getNextAu end"); - - return err; -} - -extern "C" { - -M4OSA_ERR VideoEditorMp3Reader_getInterface( - M4READER_MediaType *pMediaType, - M4READER_GlobalInterface **pRdrGlobalInterface, - M4READER_DataInterface **pRdrDataInterface) { - M4OSA_ERR err = M4NO_ERROR; - - ALOGV("VideoEditorMp3Reader_getInterface: begin"); - /* Input parameters check */ - VIDEOEDITOR_CHECK(M4OSA_NULL != pMediaType, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrGlobalInterface, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrDataInterface, M4ERR_PARAMETER); - - SAFE_MALLOC(*pRdrGlobalInterface, M4READER_GlobalInterface, 1, - "VideoEditorMp3Reader_getInterface"); - SAFE_MALLOC(*pRdrDataInterface, M4READER_DataInterface, 1, - "VideoEditorMp3Reader_getInterface"); - - *pMediaType = M4READER_kMediaTypeMP3; - - (*pRdrGlobalInterface)->m_pFctCreate = VideoEditorMp3Reader_create; - (*pRdrGlobalInterface)->m_pFctDestroy = VideoEditorMp3Reader_destroy; - (*pRdrGlobalInterface)->m_pFctOpen = VideoEditorMp3Reader_open; - (*pRdrGlobalInterface)->m_pFctClose = VideoEditorMp3Reader_close; - (*pRdrGlobalInterface)->m_pFctGetOption = VideoEditorMp3Reader_getOption; - (*pRdrGlobalInterface)->m_pFctSetOption = VideoEditorMp3Reader_setOption; - (*pRdrGlobalInterface)->m_pFctGetNextStream = - VideoEditorMp3Reader_getNextStream; - (*pRdrGlobalInterface)->m_pFctFillAuStruct = - VideoEditorMp3Reader_fillAuStruct; - (*pRdrGlobalInterface)->m_pFctStart = M4OSA_NULL; - (*pRdrGlobalInterface)->m_pFctStop = M4OSA_NULL; - (*pRdrGlobalInterface)->m_pFctJump = VideoEditorMp3Reader_jump; - (*pRdrGlobalInterface)->m_pFctReset = VideoEditorMp3Reader_reset; - (*pRdrGlobalInterface)->m_pFctGetPrevRapTime = M4OSA_NULL; - - (*pRdrDataInterface)->m_pFctGetNextAu = VideoEditorMp3Reader_getNextAu; - (*pRdrDataInterface)->m_readerContext = M4OSA_NULL; - -cleanUp: - if( M4NO_ERROR == err ) - { - ALOGV("VideoEditorMp3Reader_getInterface no error"); - } - else - { - SAFE_FREE(*pRdrGlobalInterface); - SAFE_FREE(*pRdrDataInterface); - - ALOGV("VideoEditorMp3Reader_getInterface ERROR 0x%X", err); - } - ALOGV("VideoEditorMp3Reader_getInterface: end"); - return err; -} -} /* extern "C" */ -} /* namespace android */ diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp deleted file mode 100644 index 260c091..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoDecoder.cpp +++ /dev/null @@ -1,1810 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -/** -************************************************************************* -* @file VideoEditorVideoDecoder.cpp -* @brief StageFright shell video decoder -************************************************************************* -*/ -#define LOG_NDEBUG 1 -#define LOG_TAG "VIDEOEDITOR_VIDEODECODER" -/******************* - * HEADERS * - *******************/ - -#include "VideoEditorBuffer.h" -#include "VideoEditorVideoDecoder_internal.h" -#include "VideoEditorUtils.h" -#include "M4VD_Tools.h" - -#include -#include -#include -/******************** - * DEFINITIONS * - ********************/ -#define MAX_DEC_BUFFERS 4 - -#define THUMBNAIL_THRES 3000 - -/******************** - * SOURCE CLASS * - ********************/ -using namespace android; -static M4OSA_ERR copyBufferToQueue( - VideoEditorVideoDecoder_Context* pDecShellContext, - MediaBuffer* pDecodedBuffer); - -class VideoEditorVideoDecoderSource : public MediaSource { - public: - - VideoEditorVideoDecoderSource( - const sp &format, - VIDEOEDITOR_CodecType codecType, - void *decoderShellContext); - - virtual status_t start(MetaData *params = NULL); - virtual status_t stop(); - virtual sp getFormat(); - virtual status_t read( - MediaBuffer **buffer, const ReadOptions *options = NULL); - - protected : - virtual ~VideoEditorVideoDecoderSource(); - - private: - sp mFormat; - MediaBuffer* mBuffer; - MediaBufferGroup* mGroup; - Mutex mLock; - VideoEditorVideoDecoder_Context* mpDecShellContext; - int32_t mMaxAUSize; - bool mStarted; - VIDEOEDITOR_CodecType mCodecType; - - // Don't call me - VideoEditorVideoDecoderSource(const VideoEditorVideoDecoderSource &); - VideoEditorVideoDecoderSource &operator=( - const VideoEditorVideoDecoderSource &); -}; - -VideoEditorVideoDecoderSource::VideoEditorVideoDecoderSource( - const sp &format, VIDEOEDITOR_CodecType codecType, - void *decoderShellContext) : - mFormat(format), - mBuffer(NULL), - mGroup(NULL), - mStarted(false), - mCodecType(codecType) { - mpDecShellContext = (VideoEditorVideoDecoder_Context*) decoderShellContext; -} - -VideoEditorVideoDecoderSource::~VideoEditorVideoDecoderSource() { - if (mStarted == true) { - stop(); - } -} - -status_t VideoEditorVideoDecoderSource::start( - MetaData *params) { - - if (!mStarted) { - if (mFormat->findInt32(kKeyMaxInputSize, &mMaxAUSize) == false) { - ALOGE("Could not find kKeyMaxInputSize"); - return ERROR_MALFORMED; - } - - mGroup = new MediaBufferGroup; - if (mGroup == NULL) { - ALOGE("FATAL: memory limitation ! "); - return NO_MEMORY; - } - - mGroup->add_buffer(new MediaBuffer(mMaxAUSize)); - - mStarted = true; - } - return OK; -} - -status_t VideoEditorVideoDecoderSource::stop() { - if (mStarted) { - if (mBuffer != NULL) { - - // FIXME: - // Why do we need to check on the ref count? - int ref_count = mBuffer->refcount(); - ALOGV("MediaBuffer refcount is %d",ref_count); - for (int i = 0; i < ref_count; ++i) { - mBuffer->release(); - } - - mBuffer = NULL; - } - delete mGroup; - mGroup = NULL; - mStarted = false; - } - return OK; -} - -sp VideoEditorVideoDecoderSource::getFormat() { - Mutex::Autolock autolock(mLock); - - return mFormat; -} - -status_t VideoEditorVideoDecoderSource::read(MediaBuffer** buffer_out, - const ReadOptions *options) { - - Mutex::Autolock autolock(mLock); - if (options != NULL) { - int64_t time_us; - MediaSource::ReadOptions::SeekMode mode; - options->getSeekTo(&time_us, &mode); - if (mode != MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC) { - ALOGE("Unexpected read options"); - return BAD_VALUE; - } - - M4OSA_ERR err; - M4OSA_Int32 rapTime = time_us / 1000; - - /*--- Retrieve the previous RAP time ---*/ - err = mpDecShellContext->m_pReaderGlobal->m_pFctGetPrevRapTime( - mpDecShellContext->m_pReader->m_readerContext, - (M4_StreamHandler*)mpDecShellContext->m_pVideoStreamhandler, - &rapTime); - - if (err == M4WAR_READER_INFORMATION_NOT_PRESENT) { - /* No RAP table, jump backward and predecode */ - rapTime -= 40000; - if(rapTime < 0) rapTime = 0; - } else if (err != OK) { - ALOGE("get rap time error = 0x%x\n", (uint32_t)err); - return UNKNOWN_ERROR; - } - - err = mpDecShellContext->m_pReaderGlobal->m_pFctJump( - mpDecShellContext->m_pReader->m_readerContext, - (M4_StreamHandler*)mpDecShellContext->m_pVideoStreamhandler, - &rapTime); - - if (err != OK) { - ALOGE("jump err = 0x%x\n", (uint32_t)err); - return BAD_VALUE; - } - } - - *buffer_out = NULL; - - M4OSA_ERR lerr = mGroup->acquire_buffer(&mBuffer); - if (lerr != OK) { - return lerr; - } - mBuffer->meta_data()->clear(); // clear all the meta data - - if (mStarted) { - //getNext AU from reader. - M4_AccessUnit* pAccessUnit = mpDecShellContext->m_pNextAccessUnitToDecode; - lerr = mpDecShellContext->m_pReader->m_pFctGetNextAu( - mpDecShellContext->m_pReader->m_readerContext, - (M4_StreamHandler*)mpDecShellContext->m_pVideoStreamhandler, - pAccessUnit); - if (lerr == M4WAR_NO_DATA_YET || lerr == M4WAR_NO_MORE_AU) { - *buffer_out = NULL; - return ERROR_END_OF_STREAM; - } - - //copy the reader AU buffer to mBuffer - M4OSA_UInt32 lSize = (pAccessUnit->m_size > (M4OSA_UInt32)mMaxAUSize)\ - ? (M4OSA_UInt32)mMaxAUSize : pAccessUnit->m_size; - memcpy((void *)mBuffer->data(),(void *)pAccessUnit->m_dataAddress, - lSize); - - mBuffer->set_range(0, lSize); - int64_t frameTimeUs = (int64_t) (pAccessUnit->m_CTS * 1000); - mBuffer->meta_data()->setInt64(kKeyTime, frameTimeUs); - - // Replace the AU start code for H264 - if (VIDEOEDITOR_kH264VideoDec == mCodecType) { - uint8_t *data =(uint8_t *)mBuffer->data() + mBuffer->range_offset(); - data[0]=0; - data[1]=0; - data[2]=0; - data[3]=1; - } - mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, - (pAccessUnit->m_attribute == 0x04)? 1 : 0); - *buffer_out = mBuffer; - } - return OK; -} - -static M4OSA_UInt32 VideoEditorVideoDecoder_GetBitsFromMemory( - VIDEOEDITOR_VIDEO_Bitstream_ctxt* parsingCtxt, M4OSA_UInt32 nb_bits) { - return (M4VD_Tools_GetBitsFromMemory((M4VS_Bitstream_ctxt*) parsingCtxt, - nb_bits)); -} - -M4OSA_ERR VideoEditorVideoDecoder_internalParseVideoDSI(M4OSA_UInt8* pVol, - M4OSA_Int32 aVolSize, M4DECODER_MPEG4_DecoderConfigInfo* pDci, - M4DECODER_VideoSize* pVideoSize) { - - VIDEOEDITOR_VIDEO_Bitstream_ctxt parsingCtxt; - M4OSA_UInt32 code, j; - M4OSA_MemAddr8 start; - M4OSA_UInt8 i; - M4OSA_UInt32 time_incr_length; - M4OSA_UInt8 vol_verid=0, b_hierarchy_type; - - /* Parsing variables */ - M4OSA_UInt8 video_object_layer_shape = 0; - M4OSA_UInt8 sprite_enable = 0; - M4OSA_UInt8 reduced_resolution_vop_enable = 0; - M4OSA_UInt8 scalability = 0; - M4OSA_UInt8 enhancement_type = 0; - M4OSA_UInt8 complexity_estimation_disable = 0; - M4OSA_UInt8 interlaced = 0; - M4OSA_UInt8 sprite_warping_points = 0; - M4OSA_UInt8 sprite_brightness_change = 0; - M4OSA_UInt8 quant_precision = 0; - - /* Fill the structure with default parameters */ - pVideoSize->m_uiWidth = 0; - pVideoSize->m_uiHeight = 0; - - pDci->uiTimeScale = 0; - pDci->uiProfile = 0; - pDci->uiUseOfResynchMarker = 0; - pDci->bDataPartition = M4OSA_FALSE; - pDci->bUseOfRVLC = M4OSA_FALSE; - - /* Reset the bitstream context */ - parsingCtxt.stream_byte = 0; - parsingCtxt.stream_index = 8; - parsingCtxt.in = (M4OSA_MemAddr8) pVol; - - start = (M4OSA_MemAddr8) pVol; - - /* Start parsing */ - while (parsingCtxt.in - start < aVolSize) { - code = VideoEditorVideoDecoder_GetBitsFromMemory(&parsingCtxt, 8); - if (code == 0) { - code = VideoEditorVideoDecoder_GetBitsFromMemory(&parsingCtxt, 8); - if (code == 0) { - code = VideoEditorVideoDecoder_GetBitsFromMemory(&parsingCtxt,8); - if (code == 1) { - /* start code found */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 8); - - /* ----- 0x20..0x2F : video_object_layer_start_code ----- */ - - if ((code > 0x1F) && (code < 0x30)) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 8); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - if (code == 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 4); - vol_verid = (M4OSA_UInt8)code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 3); - } - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 4); - if (code == 15) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 16); - } - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - if (code == 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 3); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - if (code == 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 32); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 31); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 16); - } - } - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 2); - /* Need to save it for vop parsing */ - video_object_layer_shape = (M4OSA_UInt8)code; - - if (code != 0) { - return 0; /* only rectangular case supported */ - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 16); - pDci->uiTimeScale = code; - - /* Computes time increment length */ - j = code - 1; - for (i = 0; (i < 32) && (j != 0); j >>=1) { - i++; - } - time_incr_length = (i == 0) ? 1 : i; - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - if (code == 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, time_incr_length); - } - - if(video_object_layer_shape != 1) { /* 1 = Binary */ - if(video_object_layer_shape == 0) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* Marker bit */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 13);/* Width */ - pVideoSize->m_uiWidth = code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* Marker bit */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 13);/* Height */ - pVideoSize->m_uiHeight = code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* Marker bit */ - } - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* interlaced */ - interlaced = (M4OSA_UInt8)code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* OBMC disable */ - - if(vol_verid == 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* sprite enable */ - sprite_enable = (M4OSA_UInt8)code; - } else { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 2);/* sprite enable */ - sprite_enable = (M4OSA_UInt8)code; - } - if ((sprite_enable == 1) || (sprite_enable == 2)) { - if (sprite_enable != 2) { - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 13);/* sprite width */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* Marker bit */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 13);/* sprite height */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* Marker bit */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 13);/* sprite l coordinate */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* Marker bit */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 13);/* sprite top coordinate */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* Marker bit */ - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 6);/* sprite warping points */ - sprite_warping_points = (M4OSA_UInt8)code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 2);/* sprite warping accuracy */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* sprite brightness change */ - sprite_brightness_change = (M4OSA_UInt8)code; - if (sprite_enable != 2) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - } - } - if ((vol_verid != 1) && (video_object_layer_shape != 0)){ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* sadct disable */ - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); /* not 8 bits */ - if (code) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 4);/* quant precision */ - quant_precision = (M4OSA_UInt8)code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 4);/* bits per pixel */ - } - - /* greyscale not supported */ - if(video_object_layer_shape == 3) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 3); - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* quant type */ - if (code) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* load intra quant mat */ - if (code) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 8);/* */ - i = 1; - while (i < 64) { - code = - VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 8); - if (code == 0) { - break; - } - i++; - } - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* load non intra quant mat */ - if (code) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 8);/* */ - i = 1; - while (i < 64) { - code = - VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 8); - if (code == 0) { - break; - } - i++; - } - } - } - - if (vol_verid != 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* quarter sample */ - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* complexity estimation disable */ - complexity_estimation_disable = (M4OSA_UInt8)code; - if (!code) { - //return M4ERR_NOT_IMPLEMENTED; - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* resync marker disable */ - pDci->uiUseOfResynchMarker = (code) ? 0 : 1; - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* data partitionned */ - pDci->bDataPartition = (code) ? M4OSA_TRUE : M4OSA_FALSE; - if (code) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* reversible VLC */ - pDci->bUseOfRVLC = (code) ? M4OSA_TRUE : M4OSA_FALSE; - } - - if (vol_verid != 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* newpred */ - if (code) { - //return M4ERR_PARAMETER; - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1); - reduced_resolution_vop_enable = (M4OSA_UInt8)code; - } - - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* scalability */ - scalability = (M4OSA_UInt8)code; - if (code) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* hierarchy type */ - b_hierarchy_type = (M4OSA_UInt8)code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 4);/* ref layer id */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* ref sampling direct */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5);/* hor sampling factor N */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5);/* hor sampling factor M */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5);/* vert sampling factor N */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5);/* vert sampling factor M */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* enhancement type */ - enhancement_type = (M4OSA_UInt8)code; - if ((!b_hierarchy_type) && - (video_object_layer_shape == 1)) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* use ref shape */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* use ref texture */ - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5); - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 5); - } - } - break; - } - - /* ----- 0xB0 : visual_object_sequence_start_code ----- */ - - else if(code == 0xB0) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 8);/* profile_and_level_indication */ - pDci->uiProfile = (M4OSA_UInt8)code; - } - - /* ----- 0xB5 : visual_object_start_code ----- */ - - else if(code == 0xB5) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 1);/* is object layer identifier */ - if (code == 1) { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 4); /* visual object verid */ - vol_verid = (M4OSA_UInt8)code; - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 3); - } else { - code = VideoEditorVideoDecoder_GetBitsFromMemory( - &parsingCtxt, 7); /* Realign on byte */ - vol_verid = 1; - } - } - - /* ----- end ----- */ - } else { - if ((code >> 2) == 0x20) { - /* H263 ...-> wrong*/ - break; - } - } - } - } - } - return M4NO_ERROR; -} - -M4VIFI_UInt8 M4VIFI_SemiplanarYVU420toYUV420(void *user_data, - M4VIFI_UInt8 *inyuv, M4VIFI_ImagePlane *PlaneOut ) { - M4VIFI_UInt8 return_code = M4VIFI_OK; - M4VIFI_UInt8 *outyuv = - ((M4VIFI_UInt8*)&(PlaneOut[0].pac_data[PlaneOut[0].u_topleft])); - int32_t width = PlaneOut[0].u_width; - int32_t height = PlaneOut[0].u_height; - - int32_t outYsize = width * height; - uint32_t *outy = (uint32_t *) outyuv; - uint16_t *outcb = - (uint16_t *) &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]); - uint16_t *outcr = - (uint16_t *) &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]); - - /* Y copying */ - memcpy((void *)outy, (void *)inyuv, outYsize); - - /* U & V copying */ - uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize); - for (int32_t i = height >> 1; i > 0; --i) { - for (int32_t j = width >> 2; j > 0; --j) { - uint32_t temp = *inyuv_4++; - uint32_t tempU = temp & 0xFF; - tempU = tempU | ((temp >> 8) & 0xFF00); - - uint32_t tempV = (temp >> 8) & 0xFF; - tempV = tempV | ((temp >> 16) & 0xFF00); - - // Flip U and V - *outcb++ = tempV; - *outcr++ = tempU; - } - } - return return_code; -} -void logSupportDecodersAndCapabilities(M4DECODER_VideoDecoders* decoders) { - VideoDecoder *pDecoder; - VideoComponentCapabilities *pOmxComponents = NULL; - VideoProfileLevel *pProfileLevel = NULL; - pDecoder = decoders->decoder; - for (size_t i = 0; i< decoders->decoderNumber; i++) { - ALOGV("Supported Codec[%d] :%d", i, pDecoder->codec); - pOmxComponents = pDecoder->component; - for(size_t j = 0; j < pDecoder->componentNumber; j++) { - pProfileLevel = pOmxComponents->profileLevel; - ALOGV("-->component %d", j); - for(size_t k = 0; k < pOmxComponents->profileNumber; k++) { - ALOGV("-->profile:%ld maxLevel:%ld", pProfileLevel->mProfile, - pProfileLevel->mLevel); - pProfileLevel++; - } - pOmxComponents++; - } - pDecoder++; - } -} - -M4OSA_ERR queryVideoDecoderCapabilities - (M4DECODER_VideoDecoders** decoders) { - M4OSA_ERR err = M4NO_ERROR; - const char *kMimeTypes[] = { - MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_MPEG4, - MEDIA_MIMETYPE_VIDEO_H263 - }; - - int32_t supportFormats = sizeof(kMimeTypes) / sizeof(kMimeTypes[0]); - M4DECODER_VideoDecoders *pDecoders; - VideoDecoder *pDecoder; - VideoComponentCapabilities *pOmxComponents = NULL; - VideoProfileLevel *pProfileLevel = NULL; - OMXClient client; - status_t status = OK; - SAFE_MALLOC(pDecoders, M4DECODER_VideoDecoders, 1, "VideoDecoders"); - SAFE_MALLOC(pDecoder, VideoDecoder, supportFormats, - "VideoDecoder"); - pDecoders->decoder = pDecoder; - - pDecoders->decoderNumber= supportFormats; - status = client.connect(); - CHECK(status == OK); - for (size_t k = 0; k < sizeof(kMimeTypes) / sizeof(kMimeTypes[0]); - ++k) { - Vector results; - CHECK_EQ(QueryCodecs(client.interface(), kMimeTypes[k], - true, // queryDecoders - &results), (status_t)OK); - - if (results.size()) { - SAFE_MALLOC(pOmxComponents, VideoComponentCapabilities, - results.size(), "VideoComponentCapabilities"); - ALOGV("K=%d",k); - pDecoder->component = pOmxComponents; - pDecoder->componentNumber = results.size(); - } - - for (size_t i = 0; i < results.size(); ++i) { - ALOGV(" decoder '%s' supports ", - results[i].mComponentName.string()); - - if (results[i].mProfileLevels.size() == 0) { - ALOGV("NOTHING.\n"); - continue; - } - -#if 0 - // FIXME: - // We should ignore the software codecs and make IsSoftwareCodec() - // part of pubic API from OMXCodec.cpp - if (IsSoftwareCodec(results[i].mComponentName.string())) { - ALOGV("Ignore software codec %s", results[i].mComponentName.string()); - continue; - } -#endif - - // Count the supported profiles - int32_t profileNumber = 0; - int32_t profile = -1; - for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) { - const CodecProfileLevel &profileLevel = - results[i].mProfileLevels[j]; - // FIXME: assume that the profiles are ordered - if (profileLevel.mProfile != profile) { - profile = profileLevel.mProfile; - profileNumber++; - } - } - SAFE_MALLOC(pProfileLevel, VideoProfileLevel, - profileNumber, "VideoProfileLevel"); - pOmxComponents->profileLevel = pProfileLevel; - pOmxComponents->profileNumber = profileNumber; - - // Get the max Level for each profile. - int32_t maxLevel = -1; - profile = -1; - profileNumber = 0; - for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) { - const CodecProfileLevel &profileLevel = - results[i].mProfileLevels[j]; - if (profile == -1 && maxLevel == -1) { - profile = profileLevel.mProfile; - maxLevel = profileLevel.mLevel; - pProfileLevel->mProfile = profile; - pProfileLevel->mLevel = maxLevel; - ALOGV("%d profile: %ld, max level: %ld", - __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel); - } - if (profileLevel.mProfile != profile) { - profile = profileLevel.mProfile; - maxLevel = profileLevel.mLevel; - profileNumber++; - pProfileLevel++; - pProfileLevel->mProfile = profile; - pProfileLevel->mLevel = maxLevel; - ALOGV("%d profile: %ld, max level: %ld", - __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel); - } else if (profileLevel.mLevel > maxLevel) { - maxLevel = profileLevel.mLevel; - pProfileLevel->mLevel = maxLevel; - ALOGV("%d profile: %ld, max level: %ld", - __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel); - } - - } - pOmxComponents++; - } - if (!strcmp(MEDIA_MIMETYPE_VIDEO_AVC, kMimeTypes[k])) - pDecoder->codec = M4DA_StreamTypeVideoMpeg4Avc; - if (!strcmp(MEDIA_MIMETYPE_VIDEO_MPEG4, kMimeTypes[k])) - pDecoder->codec = M4DA_StreamTypeVideoMpeg4; - if (!strcmp(MEDIA_MIMETYPE_VIDEO_H263, kMimeTypes[k])) - pDecoder->codec = M4DA_StreamTypeVideoH263; - - pDecoder++; - } - - logSupportDecodersAndCapabilities(pDecoders); - *decoders = pDecoders; -cleanUp: - return err; -} -/******************** - * ENGINE INTERFACE * - ********************/ -M4OSA_ERR VideoEditorVideoDecoder_configureFromMetadata(M4OSA_Context pContext, - MetaData* meta) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoDecoder_Context* pDecShellContext = M4OSA_NULL; - bool success = OK; - int32_t width = 0; - int32_t height = 0; - int32_t frameSize = 0; - int32_t vWidth, vHeight; - int32_t cropLeft, cropTop, cropRight, cropBottom; - - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != meta, M4ERR_PARAMETER); - - ALOGV("VideoEditorVideoDecoder_configureFromMetadata begin"); - - pDecShellContext = (VideoEditorVideoDecoder_Context*)pContext; - - success = meta->findInt32(kKeyWidth, &vWidth); - VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); - success = meta->findInt32(kKeyHeight, &vHeight); - VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); - - ALOGV("vWidth = %d, vHeight = %d", vWidth, vHeight); - - pDecShellContext->mGivenWidth = vWidth; - pDecShellContext->mGivenHeight = vHeight; - - if (!meta->findRect( - kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) { - - cropLeft = cropTop = 0; - cropRight = vWidth - 1; - cropBottom = vHeight - 1; - - ALOGV("got dimensions only %d x %d", width, height); - } else { - ALOGV("got crop rect %d, %d, %d, %d", - cropLeft, cropTop, cropRight, cropBottom); - } - - pDecShellContext->mCropRect.left = cropLeft; - pDecShellContext->mCropRect.right = cropRight; - pDecShellContext->mCropRect.top = cropTop; - pDecShellContext->mCropRect.bottom = cropBottom; - - width = cropRight - cropLeft + 1; - height = cropBottom - cropTop + 1; - - ALOGV("VideoDecoder_configureFromMetadata : W=%d H=%d", width, height); - VIDEOEDITOR_CHECK((0 != width) && (0 != height), M4ERR_PARAMETER); - - if( (M4OSA_NULL != pDecShellContext->m_pDecBufferPool) && - (pDecShellContext->m_pVideoStreamhandler->m_videoWidth == \ - (uint32_t)width) && - (pDecShellContext->m_pVideoStreamhandler->m_videoHeight == \ - (uint32_t)height) ) { - // No need to reconfigure - goto cleanUp; - } - ALOGV("VideoDecoder_configureFromMetadata reset: W=%d H=%d", width, height); - // Update the stream handler parameters - pDecShellContext->m_pVideoStreamhandler->m_videoWidth = width; - pDecShellContext->m_pVideoStreamhandler->m_videoHeight = height; - frameSize = (width * height * 3) / 2; - - // Configure the buffer pool - if( M4OSA_NULL != pDecShellContext->m_pDecBufferPool ) { - ALOGV("VideoDecoder_configureFromMetadata : reset the buffer pool"); - VIDEOEDITOR_BUFFER_freePool_Ext(pDecShellContext->m_pDecBufferPool); - pDecShellContext->m_pDecBufferPool = M4OSA_NULL; - } - err = VIDEOEDITOR_BUFFER_allocatePool(&pDecShellContext->m_pDecBufferPool, - MAX_DEC_BUFFERS, (M4OSA_Char*)"VIDEOEDITOR_DecodedBufferPool"); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - err = VIDEOEDITOR_BUFFER_initPoolBuffers_Ext(pDecShellContext->m_pDecBufferPool, - frameSize + pDecShellContext->mGivenWidth * 2); - - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoDecoder_configureFromMetadata no error"); - } else { - if( (M4OSA_NULL != pDecShellContext) && \ - (M4OSA_NULL != pDecShellContext->m_pDecBufferPool) ) { - VIDEOEDITOR_BUFFER_freePool_Ext(pDecShellContext->m_pDecBufferPool); - pDecShellContext->m_pDecBufferPool = M4OSA_NULL; - } - ALOGV("VideoEditorVideoDecoder_configureFromMetadata ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoDecoder_configureFromMetadata end"); - return err; -} - -M4OSA_ERR VideoEditorVideoDecoder_destroy(M4OSA_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoDecoder_Context* pDecShellContext = - (VideoEditorVideoDecoder_Context*)pContext; - - // Input parameters check - ALOGV("VideoEditorVideoDecoder_destroy begin"); - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - // Release the color converter - delete pDecShellContext->mI420ColorConverter; - - // Release memory - if( pDecShellContext->m_pDecBufferPool != M4OSA_NULL ) { - VIDEOEDITOR_BUFFER_freePool_Ext(pDecShellContext->m_pDecBufferPool); - pDecShellContext->m_pDecBufferPool = M4OSA_NULL; - } - - // Destroy the graph - if( pDecShellContext->mVideoDecoder != NULL ) { - ALOGV("### VideoEditorVideoDecoder_destroy : releasing decoder"); - pDecShellContext->mVideoDecoder->stop(); - pDecShellContext->mVideoDecoder.clear(); - } - - pDecShellContext->mClient.disconnect(); - pDecShellContext->mReaderSource.clear(); - - SAFE_FREE(pDecShellContext); - pContext = NULL; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoDecoder_destroy no error"); - } else { - ALOGV("VideoEditorVideoDecoder_destroy ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoDecoder_destroy end"); - return err; -} - -M4OSA_ERR VideoEditorVideoDecoder_create(M4OSA_Context *pContext, - M4_StreamHandler *pStreamHandler, - M4READER_GlobalInterface *pReaderGlobalInterface, - M4READER_DataInterface *pReaderDataInterface, - M4_AccessUnit *pAccessUnit, M4OSA_Void *pUserData) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoDecoder_Context* pDecShellContext = M4OSA_NULL; - status_t status = OK; - bool success = TRUE; - int32_t colorFormat = 0; - M4OSA_UInt32 size = 0; - sp decoderMetadata = NULL; - int decoderOutput = OMX_COLOR_FormatYUV420Planar; - - ALOGV("VideoEditorVideoDecoder_create begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pReaderDataInterface, M4ERR_PARAMETER); - - // Context allocation & initialization - SAFE_MALLOC(pDecShellContext, VideoEditorVideoDecoder_Context, 1, - "VideoEditorVideoDecoder"); - pDecShellContext->m_pVideoStreamhandler = - (M4_VideoStreamHandler*)pStreamHandler; - pDecShellContext->m_pNextAccessUnitToDecode = pAccessUnit; - pDecShellContext->m_pReaderGlobal = pReaderGlobalInterface; - pDecShellContext->m_pReader = pReaderDataInterface; - pDecShellContext->m_lastDecodedCTS = -1; - pDecShellContext->m_lastRenderCts = -1; - - pDecShellContext->mThumbnail = 0; - - switch( pStreamHandler->m_streamType ) { - case M4DA_StreamTypeVideoH263: - pDecShellContext->mDecoderType = VIDEOEDITOR_kH263VideoDec; - break; - case M4DA_StreamTypeVideoMpeg4: - pDecShellContext->mDecoderType = VIDEOEDITOR_kMpeg4VideoDec; - // Parse the VOL header - err = VideoEditorVideoDecoder_internalParseVideoDSI( - (M4OSA_UInt8*)pDecShellContext->m_pVideoStreamhandler->\ - m_basicProperties.m_pDecoderSpecificInfo, - pDecShellContext->m_pVideoStreamhandler->\ - m_basicProperties.m_decoderSpecificInfoSize, - &pDecShellContext->m_Dci, &pDecShellContext->m_VideoSize); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - break; - case M4DA_StreamTypeVideoMpeg4Avc: - pDecShellContext->mDecoderType = VIDEOEDITOR_kH264VideoDec; - break; - default: - VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", - M4ERR_PARAMETER); - break; - } - - pDecShellContext->mNbInputFrames = 0; - pDecShellContext->mFirstInputCts = -1.0; - pDecShellContext->mLastInputCts = -1.0; - pDecShellContext->mNbRenderedFrames = 0; - pDecShellContext->mFirstRenderedCts = -1.0; - pDecShellContext->mLastRenderedCts = -1.0; - pDecShellContext->mNbOutputFrames = 0; - pDecShellContext->mFirstOutputCts = -1; - pDecShellContext->mLastOutputCts = -1; - pDecShellContext->m_pDecBufferPool = M4OSA_NULL; - - // Calculate the interval between two video frames. - CHECK(pDecShellContext->m_pVideoStreamhandler->m_averageFrameRate > 0); - pDecShellContext->mFrameIntervalMs = - 1000.0 / pDecShellContext->m_pVideoStreamhandler->m_averageFrameRate; - - /** - * StageFright graph building - */ - decoderMetadata = new MetaData; - switch( pDecShellContext->mDecoderType ) { - case VIDEOEDITOR_kH263VideoDec: - decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); - break; - case VIDEOEDITOR_kMpeg4VideoDec: - decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); - decoderMetadata->setData(kKeyESDS, kTypeESDS, - pStreamHandler->m_pESDSInfo, - pStreamHandler->m_ESDSInfoSize); - break; - case VIDEOEDITOR_kH264VideoDec: - decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); - decoderMetadata->setData(kKeyAVCC, kTypeAVCC, - pStreamHandler->m_pH264DecoderSpecificInfo, - pStreamHandler->m_H264decoderSpecificInfoSize); - break; - default: - VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", - M4ERR_PARAMETER); - break; - } - - decoderMetadata->setInt32(kKeyMaxInputSize, pStreamHandler->m_maxAUSize); - decoderMetadata->setInt32(kKeyWidth, - pDecShellContext->m_pVideoStreamhandler->m_videoWidth); - decoderMetadata->setInt32(kKeyHeight, - pDecShellContext->m_pVideoStreamhandler->m_videoHeight); - - // Create the decoder source - pDecShellContext->mReaderSource = new VideoEditorVideoDecoderSource( - decoderMetadata, pDecShellContext->mDecoderType, - (void *)pDecShellContext); - VIDEOEDITOR_CHECK(NULL != pDecShellContext->mReaderSource.get(), - M4ERR_SF_DECODER_RSRC_FAIL); - - // Connect to the OMX client - status = pDecShellContext->mClient.connect(); - VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); - - // Create the decoder - pDecShellContext->mVideoDecoder = OMXCodec::Create( - pDecShellContext->mClient.interface(), - decoderMetadata, false, pDecShellContext->mReaderSource); - VIDEOEDITOR_CHECK(NULL != pDecShellContext->mVideoDecoder.get(), - M4ERR_SF_DECODER_RSRC_FAIL); - - - // Get the output color format - success = pDecShellContext->mVideoDecoder->getFormat()->findInt32( - kKeyColorFormat, &colorFormat); - VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); - pDecShellContext->decOuputColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; - - pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyWidth, - pDecShellContext->m_pVideoStreamhandler->m_videoWidth); - pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyHeight, - pDecShellContext->m_pVideoStreamhandler->m_videoHeight); - - // Get the color converter - pDecShellContext->mI420ColorConverter = new I420ColorConverter; - if (pDecShellContext->mI420ColorConverter->isLoaded()) { - decoderOutput = pDecShellContext->mI420ColorConverter->getDecoderOutputFormat(); - } - - if (decoderOutput == OMX_COLOR_FormatYUV420Planar) { - delete pDecShellContext->mI420ColorConverter; - pDecShellContext->mI420ColorConverter = NULL; - } - - ALOGI("decoder output format = 0x%X\n", decoderOutput); - - // Configure the buffer pool from the metadata - err = VideoEditorVideoDecoder_configureFromMetadata(pDecShellContext, - pDecShellContext->mVideoDecoder->getFormat().get()); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Start the graph - status = pDecShellContext->mVideoDecoder->start(); - VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); - - *pContext = (M4OSA_Context)pDecShellContext; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoDecoder_create no error"); - } else { - VideoEditorVideoDecoder_destroy(pDecShellContext); - *pContext = M4OSA_NULL; - ALOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoDecoder_create : DONE"); - return err; -} - -M4OSA_ERR VideoEditorVideoSoftwareDecoder_create(M4OSA_Context *pContext, - M4_StreamHandler *pStreamHandler, - M4READER_GlobalInterface *pReaderGlobalInterface, - M4READER_DataInterface *pReaderDataInterface, - M4_AccessUnit *pAccessUnit, M4OSA_Void *pUserData) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoDecoder_Context* pDecShellContext = M4OSA_NULL; - status_t status = OK; - bool success = TRUE; - int32_t colorFormat = 0; - M4OSA_UInt32 size = 0; - sp decoderMetadata = NULL; - - ALOGV("VideoEditorVideoDecoder_create begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pReaderDataInterface, M4ERR_PARAMETER); - - // Context allocation & initialization - SAFE_MALLOC(pDecShellContext, VideoEditorVideoDecoder_Context, 1, - "VideoEditorVideoDecoder"); - pDecShellContext->m_pVideoStreamhandler = - (M4_VideoStreamHandler*)pStreamHandler; - pDecShellContext->m_pNextAccessUnitToDecode = pAccessUnit; - pDecShellContext->m_pReaderGlobal = pReaderGlobalInterface; - pDecShellContext->m_pReader = pReaderDataInterface; - pDecShellContext->m_lastDecodedCTS = -1; - pDecShellContext->m_lastRenderCts = -1; - switch( pStreamHandler->m_streamType ) { - case M4DA_StreamTypeVideoH263: - pDecShellContext->mDecoderType = VIDEOEDITOR_kH263VideoDec; - break; - case M4DA_StreamTypeVideoMpeg4: - pDecShellContext->mDecoderType = VIDEOEDITOR_kMpeg4VideoDec; - // Parse the VOL header - err = VideoEditorVideoDecoder_internalParseVideoDSI( - (M4OSA_UInt8*)pDecShellContext->m_pVideoStreamhandler->\ - m_basicProperties.m_pDecoderSpecificInfo, - pDecShellContext->m_pVideoStreamhandler->\ - m_basicProperties.m_decoderSpecificInfoSize, - &pDecShellContext->m_Dci, &pDecShellContext->m_VideoSize); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - break; - case M4DA_StreamTypeVideoMpeg4Avc: - pDecShellContext->mDecoderType = VIDEOEDITOR_kH264VideoDec; - break; - default: - VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", - M4ERR_PARAMETER); - break; - } - - pDecShellContext->mNbInputFrames = 0; - pDecShellContext->mFirstInputCts = -1.0; - pDecShellContext->mLastInputCts = -1.0; - pDecShellContext->mNbRenderedFrames = 0; - pDecShellContext->mFirstRenderedCts = -1.0; - pDecShellContext->mLastRenderedCts = -1.0; - pDecShellContext->mNbOutputFrames = 0; - pDecShellContext->mFirstOutputCts = -1; - pDecShellContext->mLastOutputCts = -1; - pDecShellContext->m_pDecBufferPool = M4OSA_NULL; - - /** - * StageFright graph building - */ - decoderMetadata = new MetaData; - switch( pDecShellContext->mDecoderType ) { - case VIDEOEDITOR_kH263VideoDec: - decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); - break; - case VIDEOEDITOR_kMpeg4VideoDec: - decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); - decoderMetadata->setData(kKeyESDS, kTypeESDS, - pStreamHandler->m_pESDSInfo, - pStreamHandler->m_ESDSInfoSize); - break; - case VIDEOEDITOR_kH264VideoDec: - decoderMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); - decoderMetadata->setData(kKeyAVCC, kTypeAVCC, - pStreamHandler->m_pH264DecoderSpecificInfo, - pStreamHandler->m_H264decoderSpecificInfoSize); - break; - default: - VIDEOEDITOR_CHECK(!"VideoDecoder_create : incorrect stream type", - M4ERR_PARAMETER); - break; - } - - decoderMetadata->setInt32(kKeyMaxInputSize, pStreamHandler->m_maxAUSize); - decoderMetadata->setInt32(kKeyWidth, - pDecShellContext->m_pVideoStreamhandler->m_videoWidth); - decoderMetadata->setInt32(kKeyHeight, - pDecShellContext->m_pVideoStreamhandler->m_videoHeight); - - // Create the decoder source - pDecShellContext->mReaderSource = new VideoEditorVideoDecoderSource( - decoderMetadata, pDecShellContext->mDecoderType, - (void *)pDecShellContext); - VIDEOEDITOR_CHECK(NULL != pDecShellContext->mReaderSource.get(), - M4ERR_SF_DECODER_RSRC_FAIL); - - // Connect to the OMX client - status = pDecShellContext->mClient.connect(); - VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); - - ALOGI("Using software codecs only"); - // Create the decoder - pDecShellContext->mVideoDecoder = OMXCodec::Create( - pDecShellContext->mClient.interface(), - decoderMetadata, false, pDecShellContext->mReaderSource,NULL,OMXCodec::kSoftwareCodecsOnly); - VIDEOEDITOR_CHECK(NULL != pDecShellContext->mVideoDecoder.get(), - M4ERR_SF_DECODER_RSRC_FAIL); - - // Get the output color format - success = pDecShellContext->mVideoDecoder->getFormat()->findInt32( - kKeyColorFormat, &colorFormat); - VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER); - pDecShellContext->decOuputColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; - - pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyWidth, - pDecShellContext->m_pVideoStreamhandler->m_videoWidth); - pDecShellContext->mVideoDecoder->getFormat()->setInt32(kKeyHeight, - pDecShellContext->m_pVideoStreamhandler->m_videoHeight); - - // Configure the buffer pool from the metadata - err = VideoEditorVideoDecoder_configureFromMetadata(pDecShellContext, - pDecShellContext->mVideoDecoder->getFormat().get()); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Start the graph - status = pDecShellContext->mVideoDecoder->start(); - VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL); - - *pContext = (M4OSA_Context)pDecShellContext; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoDecoder_create no error"); - } else { - VideoEditorVideoDecoder_destroy(pDecShellContext); - *pContext = M4OSA_NULL; - ALOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoDecoder_create : DONE"); - return err; -} - - -M4OSA_ERR VideoEditorVideoDecoder_getOption(M4OSA_Context context, - M4OSA_OptionID optionId, M4OSA_DataOption pValue) { - M4OSA_ERR lerr = M4NO_ERROR; - VideoEditorVideoDecoder_Context* pDecShellContext = - (VideoEditorVideoDecoder_Context*) context; - M4_VersionInfo* pVersionInfo; - M4DECODER_VideoSize* pVideoSize; - M4OSA_UInt32* pNextFrameCts; - M4OSA_UInt32 *plastDecodedFrameCts; - M4DECODER_AVCProfileLevel* profile; - M4DECODER_MPEG4_DecoderConfigInfo* pDecConfInfo; - - ALOGV("VideoEditorVideoDecoder_getOption begin"); - - switch (optionId) { - case M4DECODER_kOptionID_AVCLastDecodedFrameCTS: - plastDecodedFrameCts = (M4OSA_UInt32 *) pValue; - *plastDecodedFrameCts = pDecShellContext->m_lastDecodedCTS; - break; - - case M4DECODER_kOptionID_Version: - pVersionInfo = (M4_VersionInfo*)pValue; - - pVersionInfo->m_major = VIDEOEDITOR_VIDEC_SHELL_VER_MAJOR; - pVersionInfo->m_minor= VIDEOEDITOR_VIDEC_SHELL_VER_MINOR; - pVersionInfo->m_revision = VIDEOEDITOR_VIDEC_SHELL_VER_REVISION; - pVersionInfo->m_structSize=sizeof(M4_VersionInfo); - break; - - case M4DECODER_kOptionID_VideoSize: - /** Only VPS uses this Option ID. */ - pVideoSize = (M4DECODER_VideoSize*)pValue; - pDecShellContext->mVideoDecoder->getFormat()->findInt32(kKeyWidth, - (int32_t*)(&pVideoSize->m_uiWidth)); - pDecShellContext->mVideoDecoder->getFormat()->findInt32(kKeyHeight, - (int32_t*)(&pVideoSize->m_uiHeight)); - ALOGV("VideoEditorVideoDecoder_getOption : W=%d H=%d", - pVideoSize->m_uiWidth, pVideoSize->m_uiHeight); - break; - - case M4DECODER_kOptionID_NextRenderedFrameCTS: - /** How to get this information. SF decoder does not provide this. * - ** Let us provide last decoded frame CTS as of now. * - ** Only VPS uses this Option ID. */ - pNextFrameCts = (M4OSA_UInt32 *)pValue; - *pNextFrameCts = pDecShellContext->m_lastDecodedCTS; - break; - case M4DECODER_MPEG4_kOptionID_DecoderConfigInfo: - if(pDecShellContext->mDecoderType == VIDEOEDITOR_kMpeg4VideoDec) { - (*(M4DECODER_MPEG4_DecoderConfigInfo*)pValue) = - pDecShellContext->m_Dci; - } - break; - default: - lerr = M4ERR_BAD_OPTION_ID; - break; - - } - - ALOGV("VideoEditorVideoDecoder_getOption: end with err = 0x%x", lerr); - return lerr; -} - -M4OSA_ERR VideoEditorVideoDecoder_setOption(M4OSA_Context context, - M4OSA_OptionID optionId, M4OSA_DataOption pValue) { - M4OSA_ERR lerr = M4NO_ERROR; - VideoEditorVideoDecoder_Context *pDecShellContext = - (VideoEditorVideoDecoder_Context*) context; - - ALOGV("VideoEditorVideoDecoder_setOption begin"); - - switch (optionId) { - case M4DECODER_kOptionID_OutputFilter: { - M4DECODER_OutputFilter* pOutputFilter = - (M4DECODER_OutputFilter*) pValue; - pDecShellContext->m_pFilter = - (M4VIFI_PlanConverterFunctionType*)pOutputFilter->\ - m_pFilterFunction; - pDecShellContext->m_pFilterUserData = - pOutputFilter->m_pFilterUserData; - } - break; - case M4DECODER_kOptionID_DeblockingFilter: - break; - case M4DECODER_kOptionID_VideoDecodersThumbnailMode: - pDecShellContext->mThumbnail = *((M4OSA_Int32 *)pValue); - break; - default: - lerr = M4ERR_BAD_CONTEXT; - break; - } - - ALOGV("VideoEditorVideoDecoder_setOption: end with err = 0x%x", lerr); - return lerr; -} - -M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context, - M4_MediaTime* pTime, M4OSA_Bool bJump, M4OSA_UInt32 tolerance) { - M4OSA_ERR lerr = M4NO_ERROR; - VideoEditorVideoDecoder_Context* pDecShellContext = - (VideoEditorVideoDecoder_Context*) context; - int64_t lFrameTime; - MediaBuffer* pDecoderBuffer = NULL; - MediaBuffer* pNextBuffer = NULL; - status_t errStatus; - bool needSeek = bJump; - bool needSave = M4OSA_TRUE; - - ALOGV("VideoEditorVideoDecoder_decode begin"); - - if( M4OSA_TRUE == pDecShellContext->mReachedEOS ) { - // Do not call read(), it could lead to a freeze - ALOGV("VideoEditorVideoDecoder_decode : EOS already reached"); - lerr = M4WAR_NO_MORE_AU; - goto VIDEOEDITOR_VideoDecode_cleanUP; - } - if(pDecShellContext->m_lastDecodedCTS >= *pTime) { - ALOGV("VideoDecoder_decode: Already decoded up to this time CTS = %lf.", - pDecShellContext->m_lastDecodedCTS); - goto VIDEOEDITOR_VideoDecode_cleanUP; - } - if(M4OSA_TRUE == bJump) { - ALOGV("VideoEditorVideoDecoder_decode: Jump called"); - pDecShellContext->m_lastDecodedCTS = -1; - pDecShellContext->m_lastRenderCts = -1; - } - - pDecShellContext->mNbInputFrames++; - if (0 > pDecShellContext->mFirstInputCts){ - pDecShellContext->mFirstInputCts = *pTime; - } - pDecShellContext->mLastInputCts = *pTime; - - while (pDecoderBuffer == NULL || pDecShellContext->m_lastDecodedCTS + tolerance < *pTime) { - ALOGV("VideoEditorVideoDecoder_decode, frameCTS = %lf, DecodeUpTo = %lf", - pDecShellContext->m_lastDecodedCTS, *pTime); - if (M4OSA_TRUE == needSave) { - VIDEOEDITOR_BUFFER_getBufferForDecoder(pDecShellContext->m_pDecBufferPool); - } else { - needSave = M4OSA_TRUE; - } - // Read the buffer from the stagefright decoder - if (needSeek) { - MediaSource::ReadOptions options; - int64_t time_us = *pTime * 1000; - options.setSeekTo(time_us, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); - errStatus = pDecShellContext->mVideoDecoder->read(&pNextBuffer, &options); - needSeek = false; - } else { - errStatus = pDecShellContext->mVideoDecoder->read(&pNextBuffer); - } - - // Handle EOS and format change - if (errStatus == ERROR_END_OF_STREAM) { - ALOGV("End of stream reached, returning M4WAR_NO_MORE_AU "); - pDecShellContext->mReachedEOS = M4OSA_TRUE; - lerr = M4WAR_NO_MORE_AU; - // If we decoded a buffer before EOS, we still need to put it - // into the queue. - if (pDecoderBuffer && bJump) { - pDecoderBuffer->add_ref(); - copyBufferToQueue(pDecShellContext, pDecoderBuffer); - } - goto VIDEOEDITOR_VideoDecode_cleanUP; - } else if (INFO_FORMAT_CHANGED == errStatus) { - ALOGV("VideoDecoder_decode : source returns INFO_FORMAT_CHANGED"); - lerr = VideoEditorVideoDecoder_configureFromMetadata( - pDecShellContext, - pDecShellContext->mVideoDecoder->getFormat().get()); - if( M4NO_ERROR != lerr ) { - ALOGV("!!! VideoEditorVideoDecoder_decode ERROR : " - "VideoDecoder_configureFromMetadata returns 0x%X", lerr); - break; - } - continue; - } else if (errStatus != OK) { - ALOGE("VideoEditorVideoDecoder_decode ERROR:0x%x(%d)", - errStatus,errStatus); - lerr = errStatus; - goto VIDEOEDITOR_VideoDecode_cleanUP; - } - - // The OMXCodec client should expect to receive 0-length buffers - // and drop the 0-length buffers. - if (pNextBuffer->range_length() == 0) { - pNextBuffer->release(); - pNextBuffer = NULL; - needSave = M4OSA_FALSE; - continue; - } - - pDecoderBuffer = pNextBuffer; - - // Record the timestamp of last decoded buffer - pDecoderBuffer->meta_data()->findInt64(kKeyTime, &lFrameTime); - pDecShellContext->m_lastDecodedCTS = (M4_MediaTime)(lFrameTime/1000); - ALOGV("VideoEditorVideoDecoder_decode,decoded frametime = %lf,size = %d", - (M4_MediaTime)lFrameTime, pDecoderBuffer->size() ); - - /* - * We need to save a buffer if bJump == false to a queue. These - * buffers have a timestamp >= the target time, *pTime (for instance, - * the transition between two videos, or a trimming postion inside - * one video), since they are part of the transition clip or the - * trimmed video. - * - * If *pTime does not have the same value as any of the existing - * video frames, we would like to get the buffer right before *pTime - * and in the transcoding phrase, this video frame will be encoded - * as a key frame and becomes the first video frame for the transition or the - * trimmed video to be generated. This buffer must also be queued. - * - */ - int64_t targetTimeMs = - pDecShellContext->m_lastDecodedCTS + - pDecShellContext->mFrameIntervalMs + - tolerance; - - if (bJump && pDecShellContext->mThumbnail) { - ALOGV("pDecShellContext->mFrameIntervalMs = %lld, tolerance = %d", (int64_t)pDecShellContext->mFrameIntervalMs, tolerance); - ALOGI("mThumbnail mode: currTimeMs = %lld, targetTimeMs = %lld", targetTimeMs, (int64_t)*pTime); - if (targetTimeMs + THUMBNAIL_THRES < (int64_t)*pTime) { - lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer); - if (lerr != M4NO_ERROR) { - goto VIDEOEDITOR_VideoDecode_cleanUP; - } - break; - } - } - - if (!bJump || targetTimeMs > *pTime) { - lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer); - if (lerr != M4NO_ERROR) { - goto VIDEOEDITOR_VideoDecode_cleanUP; - } - } else { - if (pDecoderBuffer != NULL) { - pDecoderBuffer->release(); - pDecoderBuffer = NULL; - } - needSave = M4OSA_FALSE; - } - } - - pDecShellContext->mNbOutputFrames++; - if ( 0 > pDecShellContext->mFirstOutputCts ) { - pDecShellContext->mFirstOutputCts = *pTime; - } - pDecShellContext->mLastOutputCts = *pTime; - -VIDEOEDITOR_VideoDecode_cleanUP: - *pTime = pDecShellContext->m_lastDecodedCTS; - - ALOGV("VideoEditorVideoDecoder_decode: end with 0x%x", lerr); - return lerr; -} - -static M4OSA_ERR copyBufferToQueue( - VideoEditorVideoDecoder_Context* pDecShellContext, - MediaBuffer* pDecoderBuffer) { - - M4OSA_ERR lerr = M4NO_ERROR; - VIDEOEDITOR_BUFFER_Buffer* tmpDecBuffer; - - // Get a buffer from the queue - lerr = VIDEOEDITOR_BUFFER_getBuffer(pDecShellContext->m_pDecBufferPool, - VIDEOEDITOR_BUFFER_kEmpty, &tmpDecBuffer); - if (lerr == (M4OSA_UInt32)M4ERR_NO_BUFFER_AVAILABLE) { - lerr = VIDEOEDITOR_BUFFER_getOldestBuffer( - pDecShellContext->m_pDecBufferPool, - VIDEOEDITOR_BUFFER_kFilled, &tmpDecBuffer); - tmpDecBuffer->mBuffer->release(); - tmpDecBuffer->state = VIDEOEDITOR_BUFFER_kEmpty; - tmpDecBuffer->mBuffer = NULL; - tmpDecBuffer->size = 0; - tmpDecBuffer->buffCTS = -1; - lerr = M4NO_ERROR; - } - - if (lerr != M4NO_ERROR) return lerr; - - // Color convert or copy from the given MediaBuffer to our buffer - if (pDecShellContext->mI420ColorConverter) { - tmpDecBuffer->mBuffer = pDecoderBuffer; - } else if (pDecShellContext->decOuputColorFormat == OMX_COLOR_FormatYUV420Planar) { - int32_t width = pDecShellContext->m_pVideoStreamhandler->m_videoWidth; - int32_t height = pDecShellContext->m_pVideoStreamhandler->m_videoHeight; - int32_t yPlaneSize = width * height; - int32_t uvPlaneSize = width * height / 4; - int32_t offsetSrc = 0; - - if (( width == pDecShellContext->mGivenWidth ) && - ( height == pDecShellContext->mGivenHeight )) - { - M4OSA_MemAddr8 pTmpBuff = (M4OSA_MemAddr8)pDecoderBuffer->data() + pDecoderBuffer->range_offset(); - - memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset()), (void *)pTmpBuff, yPlaneSize); - - offsetSrc += pDecShellContext->mGivenWidth * pDecShellContext->mGivenHeight; - memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset() + yPlaneSize), - (void *)(pTmpBuff + offsetSrc), uvPlaneSize); - - offsetSrc += (pDecShellContext->mGivenWidth >> 1) * (pDecShellContext->mGivenHeight >> 1); - memcpy((void *)((M4OSA_MemAddr8)tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset() + yPlaneSize + uvPlaneSize), - (void *)(pTmpBuff + offsetSrc), uvPlaneSize); - } - else - { - M4OSA_MemAddr8 pTmpBuff = (M4OSA_MemAddr8)pDecoderBuffer->data() + pDecoderBuffer->range_offset(); - M4OSA_MemAddr8 pTmpBuffDst = (M4OSA_MemAddr8)(tmpDecBuffer->mBuffer->data() + tmpDecBuffer->mBuffer->range_offset()); - int32_t index; - - for ( index = 0; index < height; index++) - { - memcpy((void *)pTmpBuffDst, (void *)pTmpBuff, width); - pTmpBuffDst += width; - pTmpBuff += pDecShellContext->mGivenWidth; - } - - pTmpBuff += (pDecShellContext->mGivenWidth * ( pDecShellContext->mGivenHeight - height)); - for ( index = 0; index < height >> 1; index++) - { - memcpy((void *)pTmpBuffDst, (void *)pTmpBuff, width >> 1); - pTmpBuffDst += width >> 1; - pTmpBuff += pDecShellContext->mGivenWidth >> 1; - } - - pTmpBuff += ((pDecShellContext->mGivenWidth * (pDecShellContext->mGivenHeight - height)) / 4); - for ( index = 0; index < height >> 1; index++) - { - memcpy((void *)pTmpBuffDst, (void *)pTmpBuff, width >> 1); - pTmpBuffDst += width >> 1; - pTmpBuff += pDecShellContext->mGivenWidth >> 1; - } - } - } else { - ALOGE("VideoDecoder_decode: unexpected color format 0x%X", - pDecShellContext->decOuputColorFormat); - lerr = M4ERR_PARAMETER; - } - - tmpDecBuffer->buffCTS = pDecShellContext->m_lastDecodedCTS; - tmpDecBuffer->state = VIDEOEDITOR_BUFFER_kFilled; - tmpDecBuffer->size = pDecoderBuffer->range_length(); - - return lerr; -} - -M4OSA_ERR VideoEditorVideoDecoder_render(M4OSA_Context context, - M4_MediaTime* pTime, M4VIFI_ImagePlane* pOutputPlane, - M4OSA_Bool bForceRender) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoDecoder_Context* pDecShellContext = - (VideoEditorVideoDecoder_Context*) context; - M4OSA_UInt32 i; - M4OSA_UInt8* p_buf_src, *p_buf_dest; - M4VIFI_ImagePlane tmpPlaneIn, tmpPlaneOut; - VIDEOEDITOR_BUFFER_Buffer* pTmpVIDEOEDITORBuffer, *pRenderVIDEOEDITORBuffer - = M4OSA_NULL; - M4_MediaTime candidateTimeStamp = -1; - M4OSA_Bool bFound = M4OSA_FALSE; - - ALOGV("VideoEditorVideoDecoder_render begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != context, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pTime, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pOutputPlane, M4ERR_PARAMETER); - - // The output buffer is already allocated, just copy the data - if ( (*pTime <= pDecShellContext->m_lastRenderCts) && - (M4OSA_FALSE == bForceRender) ) { - ALOGV("VIDEOEDITOR_VIDEO_render Frame in the past"); - err = M4WAR_VIDEORENDERER_NO_NEW_FRAME; - goto cleanUp; - } - ALOGV("VideoDecoder_render: lastRendered time = %lf,requested render time = " - "%lf", pDecShellContext->m_lastRenderCts, *pTime); - - /** - * Find the buffer appropriate for rendering. */ - for (i=0; i < pDecShellContext->m_pDecBufferPool->NB; i++) { - pTmpVIDEOEDITORBuffer = &pDecShellContext->m_pDecBufferPool\ - ->pNXPBuffer[i]; - if (pTmpVIDEOEDITORBuffer->state == VIDEOEDITOR_BUFFER_kFilled) { - /** Get the buffer with appropriate timestamp */ - if ( (pTmpVIDEOEDITORBuffer->buffCTS >= pDecShellContext->\ - m_lastRenderCts) && - (pTmpVIDEOEDITORBuffer->buffCTS <= *pTime) && - (pTmpVIDEOEDITORBuffer->buffCTS > candidateTimeStamp)) { - bFound = M4OSA_TRUE; - pRenderVIDEOEDITORBuffer = pTmpVIDEOEDITORBuffer; - candidateTimeStamp = pTmpVIDEOEDITORBuffer->buffCTS; - ALOGV("VideoDecoder_render: found a buffer with timestamp = %lf", - candidateTimeStamp); - } - } - } - if (M4OSA_FALSE == bFound) { - err = M4WAR_VIDEORENDERER_NO_NEW_FRAME; - goto cleanUp; - } - - ALOGV("VideoEditorVideoDecoder_render 3 output %d %d %d %d", - pOutputPlane[0].u_width, pOutputPlane[0].u_height, - pOutputPlane[0].u_topleft, pOutputPlane[0].u_stride); - - pDecShellContext->m_lastRenderCts = candidateTimeStamp; - - if( M4OSA_NULL != pDecShellContext->m_pFilter ) { - // Filtering was requested - M4VIFI_ImagePlane tmpPlane[2]; - // Prepare the output image for conversion - tmpPlane[0].u_width = - pDecShellContext->m_pVideoStreamhandler->m_videoWidth; - tmpPlane[0].u_height = - pDecShellContext->m_pVideoStreamhandler->m_videoHeight; - tmpPlane[0].u_topleft = 0; - tmpPlane[0].u_stride = tmpPlane[0].u_width; - tmpPlane[0].pac_data = (M4VIFI_UInt8*)(pRenderVIDEOEDITORBuffer->mBuffer->data() + pRenderVIDEOEDITORBuffer->mBuffer->range_offset()); - tmpPlane[1].u_width = tmpPlane[0].u_width; - tmpPlane[1].u_height = tmpPlane[0].u_height>>1; - tmpPlane[1].u_topleft = 0; - tmpPlane[1].u_stride = tmpPlane[0].u_stride; - tmpPlane[1].pac_data = tmpPlane[0].pac_data + - (tmpPlane[0].u_stride * tmpPlane[0].u_height); - ALOGV("VideoEditorVideoDecoder_render W = %d H = %d", - tmpPlane[0].u_width,tmpPlane[0].u_height); - pDecShellContext->m_pFilter(M4OSA_NULL, &tmpPlane[0], pOutputPlane); - } else { - // Just copy the YUV420P buffer - M4OSA_MemAddr8 tempBuffPtr = - (M4OSA_MemAddr8)(pRenderVIDEOEDITORBuffer->mBuffer->data() + pRenderVIDEOEDITORBuffer->mBuffer->range_offset()); - M4OSA_UInt32 tempWidth = - pDecShellContext->m_pVideoStreamhandler->m_videoWidth; - M4OSA_UInt32 tempHeight = - pDecShellContext->m_pVideoStreamhandler->m_videoHeight; - - memcpy((void *) pOutputPlane[0].pac_data, (void *)tempBuffPtr, - tempWidth * tempHeight); - tempBuffPtr += (tempWidth * tempHeight); - memcpy((void *) pOutputPlane[1].pac_data, (void *)tempBuffPtr, - tempWidth * (tempHeight>>1)); - } - - pDecShellContext->mNbRenderedFrames++; - if ( 0 > pDecShellContext->mFirstRenderedCts ) { - pDecShellContext->mFirstRenderedCts = *pTime; - } - pDecShellContext->mLastRenderedCts = *pTime; - -cleanUp: - if( M4NO_ERROR == err ) { - *pTime = pDecShellContext->m_lastRenderCts; - ALOGV("VideoEditorVideoDecoder_render no error"); - } else { - ALOGV("VideoEditorVideoDecoder_render ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoDecoder_render end"); - return err; -} - -M4OSA_ERR VideoEditorVideoDecoder_getInterface(M4DECODER_VideoType decoderType, - M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { - M4DECODER_VideoInterface* pDecoderInterface = M4OSA_NULL; - - pDecoderInterface = (M4DECODER_VideoInterface*)M4OSA_32bitAlignedMalloc( - sizeof(M4DECODER_VideoInterface), M4DECODER_EXTERNAL, - (M4OSA_Char*)"VideoEditorVideoDecoder_getInterface" ); - if (M4OSA_NULL == pDecoderInterface) { - return M4ERR_ALLOC; - } - - *pDecoderType = decoderType; - - pDecoderInterface->m_pFctCreate = VideoEditorVideoDecoder_create; - pDecoderInterface->m_pFctDestroy = VideoEditorVideoDecoder_destroy; - pDecoderInterface->m_pFctGetOption = VideoEditorVideoDecoder_getOption; - pDecoderInterface->m_pFctSetOption = VideoEditorVideoDecoder_setOption; - pDecoderInterface->m_pFctDecode = VideoEditorVideoDecoder_decode; - pDecoderInterface->m_pFctRender = VideoEditorVideoDecoder_render; - - *pDecInterface = (M4OSA_Context)pDecoderInterface; - return M4NO_ERROR; -} - -M4OSA_ERR VideoEditorVideoDecoder_getSoftwareInterface(M4DECODER_VideoType decoderType, - M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { - M4DECODER_VideoInterface* pDecoderInterface = M4OSA_NULL; - - pDecoderInterface = (M4DECODER_VideoInterface*)M4OSA_32bitAlignedMalloc( - sizeof(M4DECODER_VideoInterface), M4DECODER_EXTERNAL, - (M4OSA_Char*)"VideoEditorVideoDecoder_getInterface" ); - if (M4OSA_NULL == pDecoderInterface) { - return M4ERR_ALLOC; - } - - *pDecoderType = decoderType; - - pDecoderInterface->m_pFctCreate = VideoEditorVideoSoftwareDecoder_create; - pDecoderInterface->m_pFctDestroy = VideoEditorVideoDecoder_destroy; - pDecoderInterface->m_pFctGetOption = VideoEditorVideoDecoder_getOption; - pDecoderInterface->m_pFctSetOption = VideoEditorVideoDecoder_setOption; - pDecoderInterface->m_pFctDecode = VideoEditorVideoDecoder_decode; - pDecoderInterface->m_pFctRender = VideoEditorVideoDecoder_render; - - *pDecInterface = (M4OSA_Context)pDecoderInterface; - return M4NO_ERROR; -} -extern "C" { - -M4OSA_ERR VideoEditorVideoDecoder_getInterface_MPEG4( - M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { - return VideoEditorVideoDecoder_getInterface(M4DECODER_kVideoTypeMPEG4, - pDecoderType, pDecInterface); -} - -M4OSA_ERR VideoEditorVideoDecoder_getInterface_H264( - M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { - return VideoEditorVideoDecoder_getInterface(M4DECODER_kVideoTypeAVC, - pDecoderType, pDecInterface); - -} - -M4OSA_ERR VideoEditorVideoDecoder_getSoftwareInterface_MPEG4( - M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { - return VideoEditorVideoDecoder_getSoftwareInterface(M4DECODER_kVideoTypeMPEG4, - pDecoderType, pDecInterface); -} - -M4OSA_ERR VideoEditorVideoDecoder_getSoftwareInterface_H264( - M4DECODER_VideoType *pDecoderType, M4OSA_Context *pDecInterface) { - return VideoEditorVideoDecoder_getSoftwareInterface(M4DECODER_kVideoTypeAVC, - pDecoderType, pDecInterface); - -} - -M4OSA_ERR VideoEditorVideoDecoder_getVideoDecodersAndCapabilities( - M4DECODER_VideoDecoders** decoders) { - return queryVideoDecoderCapabilities(decoders); -} - -} // extern "C" diff --git a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp b/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp deleted file mode 100644 index c63d251..0000000 --- a/frameworks/videoedit/stagefrightshells/VideoEditorVideoEncoder.cpp +++ /dev/null @@ -1,1284 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** -************************************************************************* -* @file VideoEditorVideoEncoder.cpp -* @brief StageFright shell video encoder -************************************************************************* -*/ -#define LOG_NDEBUG 1 -#define LOG_TAG "VIDEOEDITOR_VIDEOENCODER" - -/******************* - * HEADERS * - *******************/ -#include "M4OSA_Debug.h" -#include "M4SYS_AccessUnit.h" -#include "VideoEditorVideoEncoder.h" -#include "MediaBufferPuller.h" -#include - -#include -#include "utils/Log.h" -#include "utils/Vector.h" -#include -#include -#include -#include -#include -#include -#include -#include "OMX_Video.h" - -#include "IntelVideoEditorEncoderSource.h" -#include "IntelVideoEditorAVCEncoder.h" -#include "IntelVideoEditorH263Encoder.h" -#include "IntelVideoEditorUtils.h" - -/******************** - * DEFINITIONS * - ********************/ - -// Force using hardware encoder -#define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly - -#if !defined(VIDEOEDITOR_FORCECODEC) - #error "Cannot force DSI retrieval if codec type is not fixed" -#endif - -#define WIDTH_1080P_INTEL 1920 -#define HEIGHT_1080P_INTEL 1080 -/******************** - * SOURCE CLASS * - ********************/ - -namespace android { - -struct VideoEditorVideoEncoderSource : public MediaSource { - public: - static sp Create( - const sp &format); - virtual status_t start(MetaData *params = NULL); - virtual status_t stop(); - virtual sp getFormat(); - virtual status_t read(MediaBuffer **buffer, - const ReadOptions *options = NULL); - virtual int32_t storeBuffer(MediaBuffer *buffer); - virtual int32_t getNumberOfBuffersInQueue(); - - protected: - virtual ~VideoEditorVideoEncoderSource(); - - private: - struct MediaBufferChain { - MediaBuffer* buffer; - MediaBufferChain* nextLink; - }; - enum State { - CREATED, - STARTED, - ERROR - }; - VideoEditorVideoEncoderSource(const sp &format); - - // Don't call me - VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &); - VideoEditorVideoEncoderSource &operator=( - const VideoEditorVideoEncoderSource &); - - MediaBufferChain* mFirstBufferLink; - MediaBufferChain* mLastBufferLink; - int32_t mNbBuffer; - bool mIsEOS; - State mState; - sp mEncFormat; - Mutex mLock; - Condition mBufferCond; -}; - -sp VideoEditorVideoEncoderSource::Create( - const sp &format) { - - sp aSource = - new VideoEditorVideoEncoderSource(format); - return aSource; -} - -VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource( - const sp &format): - mFirstBufferLink(NULL), - mLastBufferLink(NULL), - mNbBuffer(0), - mIsEOS(false), - mState(CREATED), - mEncFormat(format) { - ALOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource"); -} - -VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() { - - // Safety clean up - if( STARTED == mState ) { - stop(); - } -} - -status_t VideoEditorVideoEncoderSource::start(MetaData *meta) { - status_t err = OK; - - ALOGV("VideoEditorVideoEncoderSource::start() begin"); - - if( CREATED != mState ) { - ALOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState); - return UNKNOWN_ERROR; - } - mState = STARTED; - - ALOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err); - return err; -} - -status_t VideoEditorVideoEncoderSource::stop() { - status_t err = OK; - - ALOGV("VideoEditorVideoEncoderSource::stop() begin"); - - if( STARTED != mState ) { - ALOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState); - return UNKNOWN_ERROR; - } - - // Release the buffer chain - int32_t i = 0; - MediaBufferChain* tmpLink = NULL; - while( mFirstBufferLink ) { - i++; - tmpLink = mFirstBufferLink; - mFirstBufferLink = mFirstBufferLink->nextLink; - delete tmpLink; - } - ALOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i); - mFirstBufferLink = NULL; - mLastBufferLink = NULL; - - mState = CREATED; - - ALOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err); - return err; -} - -sp VideoEditorVideoEncoderSource::getFormat() { - - ALOGV("VideoEditorVideoEncoderSource::getFormat"); - return mEncFormat; -} - -status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer, - const ReadOptions *options) { - Mutex::Autolock autolock(mLock); - MediaSource::ReadOptions readOptions; - status_t err = OK; - MediaBufferChain* tmpLink = NULL; - - ALOGV("VideoEditorVideoEncoderSource::read() begin"); - - if ( STARTED != mState ) { - ALOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState); - return UNKNOWN_ERROR; - } - - while (mFirstBufferLink == NULL && !mIsEOS) { - mBufferCond.wait(mLock); - } - - // End of stream? - if (mFirstBufferLink == NULL) { - *buffer = NULL; - ALOGV("VideoEditorVideoEncoderSource::read : EOS"); - return ERROR_END_OF_STREAM; - } - - // Get a buffer from the chain - *buffer = mFirstBufferLink->buffer; - tmpLink = mFirstBufferLink; - mFirstBufferLink = mFirstBufferLink->nextLink; - - if ( NULL == mFirstBufferLink ) { - mLastBufferLink = NULL; - } - delete tmpLink; - mNbBuffer--; - - ALOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err); - return err; -} - -int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) { - Mutex::Autolock autolock(mLock); - status_t err = OK; - - ALOGV("VideoEditorVideoEncoderSource::storeBuffer() begin"); - - if( NULL == buffer ) { - ALOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS"); - mIsEOS = true; - } else { - MediaBufferChain* newLink = new MediaBufferChain; - newLink->buffer = buffer; - newLink->nextLink = NULL; - if( NULL != mLastBufferLink ) { - mLastBufferLink->nextLink = newLink; - } else { - mFirstBufferLink = newLink; - } - mLastBufferLink = newLink; - mNbBuffer++; - } - mBufferCond.signal(); - ALOGV("VideoEditorVideoEncoderSource::storeBuffer() end"); - return mNbBuffer; -} - -int32_t VideoEditorVideoEncoderSource::getNumberOfBuffersInQueue() { - Mutex::Autolock autolock(mLock); - return mNbBuffer; -} - -/** - ****************************************************************************** - * structure VideoEditorVideoEncoder_Context - * @brief This structure defines the context of the StageFright video encoder - * shell - ****************************************************************************** -*/ -typedef enum { - CREATED = 0x1, - OPENED = 0x2, - STARTED = 0x4, - BUFFERING = 0x8, - READING = 0x10 -} VideoEditorVideoEncoder_State; - -typedef struct { - VideoEditorVideoEncoder_State mState; - M4ENCODER_Format mFormat; - M4WRITER_DataInterface* mWriterDataInterface; - M4VPP_apply_fct* mPreProcFunction; - M4VPP_Context mPreProcContext; - M4SYS_AccessUnit* mAccessUnit; - M4ENCODER_Params* mCodecParams; - M4ENCODER_Header mHeader; - H264MCS_ProcessEncodedNALU_fct* mH264NALUPostProcessFct; - M4OSA_Context mH264NALUPostProcessCtx; - M4OSA_UInt32 mLastCTS; - sp mEncoderSource; - OMXClient mClient; - sp mEncoder; - OMX_COLOR_FORMATTYPE mEncoderColorFormat; - MediaBufferPuller* mPuller; - I420ColorConverter* mI420ColorConverter; - - uint32_t mNbInputFrames; - double mFirstInputCts; - double mLastInputCts; - uint32_t mNbOutputFrames; - int64_t mFirstOutputCts; - int64_t mLastOutputCts; - - MediaProfiles *mVideoEditorProfile; - int32_t mMaxPrefetchFrames; -} VideoEditorVideoEncoder_Context; - -/******************** - * TOOLS * - ********************/ - -M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext, - sp metaData) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - status_t result = OK; - int32_t nbBuffer = 0; - int32_t stride = 0; - int32_t height = 0; - int32_t framerate = 0; - int32_t isCodecConfig = 0; - size_t size = 0; - uint32_t codecFlags = 0; - MediaBuffer* inputBuffer = NULL; - MediaBuffer* outputBuffer = NULL; - sp encoderSource = NULL; - sp encoder = NULL;; - OMXClient client; - - ALOGV("VideoEditorVideoEncoder_getDSI begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); - - // Create the encoder source - encoderSource = IntelVideoEditorEncoderSource::Create(metaData); - VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE); - - // Create Hardware encoder - - encoder = new IntelVideoEditorAVCEncoder(encoderSource,metaData); - VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE); - - /** - * Send fake frames and retrieve the DSI - */ - // Send a fake frame to the source - metaData->findInt32(kKeyStride, &stride); - metaData->findInt32(kKeyHeight, &height); - metaData->findInt32(kKeySampleRate, &framerate); - - result = encoder->start(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - - encoderSource->requestBuffer(&inputBuffer); - - inputBuffer->meta_data()->setInt64(kKeyTime, 0); - nbBuffer = encoderSource->storeBuffer(inputBuffer); - encoderSource->storeBuffer(NULL); // Signal EOS - - // Call read once to get the DSI - result = encoder->read(&outputBuffer, NULL); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32( - kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE); - - VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE); - if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { - // For H264, format the DSI - LOGV("outputBuffer->range_offset() = %d, outputBuffer->range_length() = %d", - outputBuffer->range_offset(), outputBuffer->range_length()); - result = buildAVCCodecSpecificData( - (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), - (size_t*)(&(pEncoderContext->mHeader.Size)), - (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(), - outputBuffer->range_length(), encoder->getFormat().get()); - outputBuffer->release(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - } else { - // For MPEG4, just copy the DSI - pEncoderContext->mHeader.Size = - (M4OSA_UInt32)outputBuffer->range_length(); - SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, - pEncoderContext->mHeader.Size, "Encoder header"); - memcpy((void *)pEncoderContext->mHeader.pBuf, - (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()), - pEncoderContext->mHeader.Size); - outputBuffer->release(); - } - - result = encoder->stop(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - -cleanUp: - // Destroy the graph - if ( encoder != NULL ) { encoder.clear(); } - //client.disconnect(); - if ( encoderSource != NULL ) { encoderSource.clear(); } - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_getDSI no error"); - } else { - ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_getDSI end"); - return err; -} -/******************** - * ENGINE INTERFACE * - ********************/ - -M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV("VideoEditorVideoEncoder_cleanup begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); - - // Release memory - SAFE_FREE(pEncoderContext->mHeader.pBuf); - SAFE_FREE(pEncoderContext); - pContext = M4OSA_NULL; - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_cleanup no error"); - } else { - ALOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_cleanup end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format, - M4ENCODER_Context* pContext, - M4WRITER_DataInterface* pWriterDataInterface, - M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt, - M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) { - - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - int encoderInput = OMX_COLOR_FormatYUV420Planar; - - ALOGV("VideoEditorVideoEncoder_init begin: format %d", format); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER); - - // Context allocation & initialization - SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1, - "VideoEditorVideoEncoder"); - pEncoderContext->mState = CREATED; - pEncoderContext->mFormat = format; - pEncoderContext->mWriterDataInterface = pWriterDataInterface; - pEncoderContext->mPreProcFunction = pVPPfct; - pEncoderContext->mPreProcContext = pVPPctxt; - pEncoderContext->mPuller = NULL; - - // Get color converter and determine encoder input format - pEncoderContext->mI420ColorConverter = new I420ColorConverter; - if (pEncoderContext->mI420ColorConverter->isLoaded()) { - encoderInput = pEncoderContext->mI420ColorConverter->getEncoderInputFormat(); - } - if (encoderInput == OMX_COLOR_FormatYUV420Planar) { - delete pEncoderContext->mI420ColorConverter; - pEncoderContext->mI420ColorConverter = NULL; - } - pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput; - ALOGI("encoder input format = 0x%X\n", encoderInput); - - *pContext = pEncoderContext; - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_init no error"); - } else { - VideoEditorVideoEncoder_cleanup(pEncoderContext); - *pContext = M4OSA_NULL; - ALOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_init end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext, - M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, - M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) - { - - return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext, - pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); -} - - -M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext, - M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, - M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) - { - - return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext, - pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); -} - - -M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext, - M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, - M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) - { - - return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext, - pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); -} - -M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV("VideoEditorVideoEncoder_close begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); - - // Release memory - SAFE_FREE(pEncoderContext->mCodecParams); - - // Destroy the graph - pEncoderContext->mEncoder.clear(); - // pEncoderContext->mClient.disconnect(); - pEncoderContext->mEncoderSource.clear(); - - delete pEncoderContext->mPuller; - pEncoderContext->mPuller = NULL; - - delete pEncoderContext->mI420ColorConverter; - pEncoderContext->mI420ColorConverter = NULL; - - // Set the new state - pEncoderContext->mState = CREATED; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_close no error"); - } else { - ALOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_close end"); - return err; -} - - -M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext, - M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - M4ENCODER_Params* pCodecParams = M4OSA_NULL; - status_t result = OK; - sp encoderMetadata = NULL; - const char* mime = NULL; - int32_t iProfile = 0; - int32_t iLevel = 0; - - int32_t iFrameRate = 0; - uint32_t codecFlags = 0; - - ALOGV(">>> VideoEditorVideoEncoder_open begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pAU, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - pCodecParams = (M4ENCODER_Params*)pParams; - VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); - - // Context initialization - pEncoderContext->mAccessUnit = pAU; - pEncoderContext->mVideoEditorProfile = MediaProfiles::getInstance(); - pEncoderContext->mMaxPrefetchFrames = - pEncoderContext->mVideoEditorProfile->getVideoEditorCapParamByName( - "maxPrefetchYUVFrames"); - - // Allocate & initialize the encoding parameters - SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1, - "VideoEditorVideoEncoder"); - - - pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat; - pEncoderContext->mCodecParams->InputFrameWidth = - pCodecParams->InputFrameWidth; - pEncoderContext->mCodecParams->InputFrameHeight = - pCodecParams->InputFrameHeight; - pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth; - pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight; - pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate; - pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate; - pEncoderContext->mCodecParams->Format = pCodecParams->Format; - pEncoderContext->mCodecParams->videoProfile = pCodecParams->videoProfile; - pEncoderContext->mCodecParams->videoLevel= pCodecParams->videoLevel; - - // Check output format consistency and resolution - VIDEOEDITOR_CHECK( - pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat, - M4ERR_PARAMETER); - if (pEncoderContext->mCodecParams->FrameWidth != WIDTH_1080P_INTEL && - pEncoderContext->mCodecParams->FrameHeight != HEIGHT_1080P_INTEL) { - VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, - M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, - M4ERR_PARAMETER); - } - - /** - * StageFright graph building - */ - - // Create the meta data for the encoder - encoderMetadata = new MetaData; - switch( pEncoderContext->mCodecParams->Format ) { - case M4ENCODER_kH263: - mime = MEDIA_MIMETYPE_VIDEO_H263; - break; - case M4ENCODER_kMPEG4: - mime = MEDIA_MIMETYPE_VIDEO_MPEG4; - break; - case M4ENCODER_kH264: - mime = MEDIA_MIMETYPE_VIDEO_AVC; - break; - default: - VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format", - M4ERR_PARAMETER); - break; - } - iProfile = pEncoderContext->mCodecParams->videoProfile; - iLevel = pEncoderContext->mCodecParams->videoLevel; - ALOGV("Encoder mime %s profile %d, level %d", - mime,iProfile, iLevel); - ALOGV("Encoder w %d, h %d, bitrate %d, fps %d", - pEncoderContext->mCodecParams->FrameWidth, - pEncoderContext->mCodecParams->FrameHeight, - pEncoderContext->mCodecParams->Bitrate, - pEncoderContext->mCodecParams->FrameRate); - CHECK(iProfile != 0x7fffffff); - CHECK(iLevel != 0x7fffffff); - - encoderMetadata->setCString(kKeyMIMEType, mime); - encoderMetadata->setInt32(kKeyVideoProfile, iProfile); - //FIXME: - // Temp: Do not set the level for Mpeg4 / H.263 Enc - // as OMX.Nvidia.mp4.encoder and OMX.Nvidia.h263.encoder - // return 0x80001019 - if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH264) { - encoderMetadata->setInt32(kKeyVideoLevel, iLevel); - } - encoderMetadata->setInt32(kKeyWidth, - (int32_t)pEncoderContext->mCodecParams->FrameWidth); - encoderMetadata->setInt32(kKeyStride, - (int32_t)pEncoderContext->mCodecParams->FrameWidth); - encoderMetadata->setInt32(kKeyHeight, - (int32_t)pEncoderContext->mCodecParams->FrameHeight); - encoderMetadata->setInt32(kKeySliceHeight, - (int32_t)pEncoderContext->mCodecParams->FrameHeight); - - switch( pEncoderContext->mCodecParams->FrameRate ) { - case M4ENCODER_k5_FPS: iFrameRate = 5; break; - case M4ENCODER_k7_5_FPS: iFrameRate = 8; break; - case M4ENCODER_k10_FPS: iFrameRate = 10; break; - case M4ENCODER_k12_5_FPS: iFrameRate = 13; break; - case M4ENCODER_k15_FPS: iFrameRate = 15; break; - case M4ENCODER_k20_FPS: iFrameRate = 20; break; - case M4ENCODER_k25_FPS: iFrameRate = 25; break; - case M4ENCODER_k30_FPS: iFrameRate = 30; break; - case M4ENCODER_kVARIABLE_FPS: - iFrameRate = 30; - ALOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30"); - break; - case M4ENCODER_kUSE_TIMESCALE: - iFrameRate = 30; - ALOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE: set to 30"); - break; - - default: - VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate", - M4ERR_STATE); - break; - } - encoderMetadata->setInt32(kKeyFrameRate, iFrameRate); - encoderMetadata->setInt32(kKeyBitRate, - (int32_t)pEncoderContext->mCodecParams->Bitrate); - encoderMetadata->setInt32(kKeyIFramesInterval, 1); - - encoderMetadata->setInt32(kKeyColorFormat, - pEncoderContext->mEncoderColorFormat); - - if (pEncoderContext->mCodecParams->Format != M4ENCODER_kH263) { - // Get the encoder DSI - err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - } - - // Create the encoder source - pEncoderContext->mEncoderSource = IntelVideoEditorEncoderSource::Create( - encoderMetadata); - VIDEOEDITOR_CHECK( - NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE); - - // Create the HW Encoder - if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH264) { - pEncoderContext->mEncoder = new IntelVideoEditorAVCEncoder( - pEncoderContext->mEncoderSource, encoderMetadata); - } else if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH263) { - pEncoderContext->mEncoder = new IntelVideoEditorH263Encoder( - pEncoderContext->mEncoderSource, encoderMetadata); - } - VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); - ALOGV("VideoEditorVideoEncoder_open : DONE"); - pEncoderContext->mPuller = new MediaBufferPuller( - pEncoderContext->mEncoder); - - // Set the new state - pEncoderContext->mState = OPENED; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_open no error"); - } else { - VideoEditorVideoEncoder_close(pEncoderContext); - ALOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_open end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( - M4ENCODER_Context pContext, M4OSA_Double Cts, - M4OSA_Bool bReachedEOS) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - M4VIFI_ImagePlane pOutPlane[2]; - MediaBuffer* buffer = NULL; - int32_t nbBuffer = 0; - - ALOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts %f", Cts); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - pOutPlane[0].pac_data = M4OSA_NULL; - pOutPlane[1].pac_data = M4OSA_NULL; - - if ( M4OSA_FALSE == bReachedEOS ) { - M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth * - pEncoderContext->mCodecParams->FrameHeight; - M4OSA_UInt32 sizeU = sizeY >> 2; - M4OSA_UInt32 size = sizeY + 2*sizeU; - M4OSA_UInt8* pData = M4OSA_NULL; - pEncoderContext->mEncoderSource->requestBuffer(&buffer); - pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset(); - - // Prepare the output image for pre-processing - pOutPlane[0].u_width = pEncoderContext->mCodecParams->FrameWidth; - pOutPlane[0].u_height = pEncoderContext->mCodecParams->FrameHeight; - pOutPlane[0].u_topleft = 0; - pOutPlane[0].u_stride = pOutPlane[0].u_width; - pOutPlane[1].u_width = pOutPlane[0].u_width; - pOutPlane[1].u_height = pOutPlane[0].u_height/2; - pOutPlane[1].u_topleft = 0; - pOutPlane[1].u_stride = pOutPlane[0].u_stride; - - pOutPlane[0].pac_data = pData; - pOutPlane[1].pac_data = pData + sizeY; - - // Apply pre-processing - err = pEncoderContext->mPreProcFunction( - pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Set the metadata - buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000)); - } - - // Push the buffer to the source, a NULL buffer, notifies the source of EOS - nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err); - } else { - if( NULL != buffer ) { - buffer->release(); - } - ALOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_processInputBuffer end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( - M4ENCODER_Context pContext, MediaBuffer* buffer) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - M4OSA_UInt32 Cts = 0; - uint8_t *data; - uint32_t length; - int32_t i32Tmp = 0; - int64_t i64Tmp = 0; - status_t result = OK; - LOGV("VideoEditorVideoEncoder_processOutputBuffer begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - - // Process the returned AU - if ( 0 == buffer->range_length() ) { - // Encoder has no data yet, nothing unusual - LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty"); - goto cleanUp; - } - VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER); - if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){ - LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d",buffer->range_length()); - removeAVCCodecSpecificData(&data,&length,(const uint8_t*) buffer->data(),buffer->range_length(),NULL); - buffer->set_range(buffer->range_offset() + length, buffer->range_length() - length); - } - - // Check the CTS - VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), - M4ERR_STATE); - - pEncoderContext->mNbOutputFrames++; - if ( 0 > pEncoderContext->mFirstOutputCts ) { - pEncoderContext->mFirstOutputCts = i64Tmp; - } - pEncoderContext->mLastOutputCts = i64Tmp; - - Cts = (M4OSA_Int32)(i64Tmp/1000); - LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)", - pEncoderContext->mNbOutputFrames, i64Tmp, Cts, - pEncoderContext->mLastCTS); - if ( Cts < pEncoderContext->mLastCTS || Cts < pEncoderContext->mAccessUnit->CTS ) { - LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going " - "backwards %d < %d(or %lld)", Cts, pEncoderContext->mLastCTS, - pEncoderContext->mAccessUnit->CTS); - goto cleanUp; - } - LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d", - Cts, pEncoderContext->mLastCTS); - - // Retrieve the AU container - err = pEncoderContext->mWriterDataInterface->pStartAU( - pEncoderContext->mWriterDataInterface->pWriterContext, - pEncoderContext->mAccessUnit->stream->streamID, - pEncoderContext->mAccessUnit); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Format the AU - VIDEOEDITOR_CHECK( - buffer->range_length() <= pEncoderContext->mAccessUnit->size, - M4ERR_PARAMETER); - // Remove H264 AU start code - if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { - if (!memcmp((const uint8_t *)buffer->data() + \ - buffer->range_offset(), "\x00\x00\x00\x01", 4) ) { - buffer->set_range(buffer->range_offset() + 4, - buffer->range_length() - 4); - } - } - - if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) && - (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) { - // H264 trimming case, NALU post processing is needed - M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size; - err = pEncoderContext->mH264NALUPostProcessFct( - pEncoderContext->mH264NALUPostProcessCtx, - (M4OSA_UInt8*)buffer->data()+buffer->range_offset(), - buffer->range_length(), - (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress, - &outputSize); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize; - } else { - // The AU can just be copied - memcpy((void *)pEncoderContext->mAccessUnit->\ - dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\ - range_offset()), buffer->range_length()); - pEncoderContext->mAccessUnit->size = - (M4OSA_UInt32)buffer->range_length(); - } - - if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){ - pEncoderContext->mAccessUnit->attribute = AU_RAP; - } else { - pEncoderContext->mAccessUnit->attribute = AU_P_Frame; - } - pEncoderContext->mLastCTS = Cts; - pEncoderContext->mAccessUnit->CTS = Cts; - pEncoderContext->mAccessUnit->DTS = Cts; - - LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d", - pEncoderContext->mAccessUnit->dataAddress, - *pEncoderContext->mAccessUnit->dataAddress, - pEncoderContext->mAccessUnit->size, - pEncoderContext->mAccessUnit->CTS); - - // Write the AU - err = pEncoderContext->mWriterDataInterface->pProcessAU( - pEncoderContext->mWriterDataInterface->pWriterContext, - pEncoderContext->mAccessUnit->stream->streamID, - pEncoderContext->mAccessUnit); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - -cleanUp: - if( M4NO_ERROR == err ) { - LOGV("VideoEditorVideoEncoder_processOutputBuffer no error"); - } else { - if (pEncoderContext != NULL) { - SAFE_FREE(pEncoderContext->mHeader.pBuf); - pEncoderContext->mHeader.Size = 0; - } - LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err); - } - LOGV("VideoEditorVideoEncoder_processOutputBuffer end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext, - M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts, - M4ENCODER_FrameMode FrameMode) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - status_t result = OK; - MediaBuffer* outputBuffer = NULL; - - ALOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode); - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - if ( STARTED == pEncoderContext->mState ) { - pEncoderContext->mState = BUFFERING; - } - VIDEOEDITOR_CHECK( - (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE); - - pEncoderContext->mNbInputFrames++; - if ( 0 > pEncoderContext->mFirstInputCts ) { - pEncoderContext->mFirstInputCts = Cts; - } - pEncoderContext->mLastInputCts = Cts; - - ALOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode, - Cts, pEncoderContext->mLastCTS); - - // Push the input buffer to the encoder source - err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts, - M4OSA_FALSE); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Notify the source in case of EOS - if ( M4ENCODER_kLastFrame == FrameMode ) { - err = VideoEditorVideoEncoder_processInputBuffer( - pEncoderContext, 0, M4OSA_TRUE); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - } - - if ( BUFFERING == pEncoderContext->mState ) { - // Prefetch is complete, start reading - pEncoderContext->mState = READING; - } - // Read - while (1) { - MediaBuffer *outputBuffer = - pEncoderContext->mPuller->getBufferNonBlocking(); - - if (outputBuffer == NULL) { - break; - } else { - // Provide the encoded AU to the writer - err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext, - outputBuffer); - pEncoderContext->mPuller->putBuffer(outputBuffer); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - } - } - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_encode no error"); - } else { - ALOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_encode end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - status_t result = OK; - - ALOGV("VideoEditorVideoEncoder_start begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); - - pEncoderContext->mNbInputFrames = 0; - pEncoderContext->mFirstInputCts = -1.0; - pEncoderContext->mLastInputCts = -1.0; - pEncoderContext->mNbOutputFrames = 0; - pEncoderContext->mFirstOutputCts = -1; - pEncoderContext->mLastOutputCts = -1; - - result = pEncoderContext->mEncoder->start(); - VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); - - pEncoderContext->mPuller->start(); - - // Set the new state - pEncoderContext->mState = STARTED; - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_start no error"); - } else { - ALOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_start end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - MediaBuffer* outputBuffer = NULL; - status_t result = OK; - - ALOGV("VideoEditorVideoEncoder_stop begin"); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - - // Send EOS again to make sure the source doesn't block. - err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0, - M4OSA_TRUE); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - // Process the remaining buffers if necessary - if ( (BUFFERING | READING) & pEncoderContext->mState ) { - while (1) { - MediaBuffer *outputBuffer = - pEncoderContext->mPuller->getBufferBlocking(); - - if (outputBuffer == NULL) break; - - err = VideoEditorVideoEncoder_processOutputBuffer( - pEncoderContext, outputBuffer); - pEncoderContext->mPuller->putBuffer(outputBuffer); - VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); - - } - - pEncoderContext->mState = STARTED; - } - - // Stop the graph module if necessary - if ( STARTED == pEncoderContext->mState ) { - pEncoderContext->mPuller->stop(); - pEncoderContext->mEncoder->stop(); - pEncoderContext->mState = OPENED; - } - - if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) { - ALOGW("Some frames were not encoded: input(%d) != output(%d)", - pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames); - } - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_stop no error"); - } else { - ALOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_stop end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) { - ALOGW("regulBitRate is not implemented"); - return M4NO_ERROR; -} - -M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext, - M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - - switch( optionID ) { - case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr: - pEncoderContext->mH264NALUPostProcessFct = - (H264MCS_ProcessEncodedNALU_fct*)optionValue; - break; - case M4ENCODER_kOptionID_H264ProcessNALUContext: - pEncoderContext->mH264NALUPostProcessCtx = - (M4OSA_Context)optionValue; - break; - default: - ALOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X", - optionID); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); - break; - } - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_setOption no error"); - } else { - ALOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err); - } - ALOGV("VideoEditorVideoEncoder_setOption end"); - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext, - M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { - M4OSA_ERR err = M4NO_ERROR; - VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; - - ALOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID); - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); - pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; - - switch( optionID ) { - case M4ENCODER_kOptionID_EncoderHeader: - VIDEOEDITOR_CHECK( - M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE); - *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader); - break; - default: - ALOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X", - optionID); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); - break; - } - -cleanUp: - if ( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_getOption no error"); - } else { - ALOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err); - } - return err; -} - -M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format, - M4ENCODER_Format* pFormat, - M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ - M4OSA_ERR err = M4NO_ERROR; - - // Input parameters check - VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); - VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); - - ALOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat, - pEncoderInterface, mode); - - SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1, - "VideoEditorVideoEncoder"); - - *pFormat = format; - - switch( format ) { - case M4ENCODER_kH263: - { - (*pEncoderInterface)->pFctInit = - VideoEditorVideoEncoder_init_H263; - break; - } - case M4ENCODER_kMPEG4: - { - (*pEncoderInterface)->pFctInit = - VideoEditorVideoEncoder_init_MPEG4; - break; - } - case M4ENCODER_kH264: - { - (*pEncoderInterface)->pFctInit = - VideoEditorVideoEncoder_init_H264; - break; - } - default: - ALOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d", - format); - VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); - break; - } - (*pEncoderInterface)->pFctOpen = VideoEditorVideoEncoder_open; - (*pEncoderInterface)->pFctStart = VideoEditorVideoEncoder_start; - (*pEncoderInterface)->pFctStop = VideoEditorVideoEncoder_stop; - (*pEncoderInterface)->pFctPause = M4OSA_NULL; - (*pEncoderInterface)->pFctResume = M4OSA_NULL; - (*pEncoderInterface)->pFctClose = VideoEditorVideoEncoder_close; - (*pEncoderInterface)->pFctCleanup = VideoEditorVideoEncoder_cleanup; - (*pEncoderInterface)->pFctRegulBitRate = - VideoEditorVideoEncoder_regulBitRate; - (*pEncoderInterface)->pFctEncode = VideoEditorVideoEncoder_encode; - (*pEncoderInterface)->pFctSetOption = VideoEditorVideoEncoder_setOption; - (*pEncoderInterface)->pFctGetOption = VideoEditorVideoEncoder_getOption; - -cleanUp: - if( M4NO_ERROR == err ) { - ALOGV("VideoEditorVideoEncoder_getInterface no error"); - } else { - *pEncoderInterface = M4OSA_NULL; - ALOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err); - } - return err; -} - -extern "C" { - -M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat, - M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ - LOGI("VideoEditorVideoEncoder_getInterface_H263: Intel Version"); - return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat, - pEncoderInterface, mode); -} - -M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat, - M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ - LOGI("VideoEditorVideoEncoder_getInterface_MPEG4: Intel Version"); - return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat, - pEncoderInterface, mode); -} - -M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat, - M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ - LOGI("VideoEditorVideoEncoder_getInterface_H264: Intel Version"); - return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat, - pEncoderInterface, mode); - -} - -} // extern "C" - -} // namespace android diff --git a/frameworks/videoedit/vss/Android.mk b/frameworks/videoedit/vss/Android.mk deleted file mode 100644 index cf3af92..0000000 --- a/frameworks/videoedit/vss/Android.mk +++ /dev/null @@ -1,67 +0,0 @@ -# -# Copyright (C) 2011 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -LOCAL_PATH:= $(call my-dir) - -include $(CLEAR_VARS) - -LOCAL_MODULE:= libvss_intel - -LOCAL_COPY_HEADERS_TO := videoeditornv12 - -LOCAL_COPY_HEADERS := M4xVSS_NV12.h \ - M4MCS_NV12.h \ - EditVideo_NV12.h - -LOCAL_SRC_FILES:= \ - M4AIR_API_NV12.c \ - M4MCS_VideoPreProcessing_NV12.c \ - M4VSS3GPP_EditVideo_NV12.c \ - M4xVSS_internal_NV12.c \ - -LOCAL_MODULE_TAGS := optional - - -LOCAL_SHARED_LIBRARIES := \ - libcutils \ - libutils \ - libvideoeditor_osal \ - -LOCAL_STATIC_LIBRARIES := \ - liblvpp_intel \ - - -LOCAL_C_INCLUDES += \ - $(TOP)/frameworks/av/libvideoeditor/osal/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/common/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/mcs/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/inc \ - $(TOP)/frameworks/av/libvideoeditor/vss/stagefrightshells/inc \ - $(TOP)/frameworks/av/libvideoeditor/lvpp \ - $(TOP)/frameworks/native/include/media/openmax \ - $(TARGET_OUT_HEADERS)/videoeditornv12 - -LOCAL_SHARED_LIBRARIES += libdl - -LOCAL_CFLAGS += -DDECODE_GIF_ON_SAVING \ - -DVIDEOEDITOR_INTEL_NV12_VERSION - -# All of the shared libraries we link against. -LOCAL_LDLIBS := \ - -lpthread -ldl - -include $(BUILD_STATIC_LIBRARY) - diff --git a/frameworks/videoedit/vss/EditVideo_NV12.h b/frameworks/videoedit/vss/EditVideo_NV12.h deleted file mode 100644 index 43950f9..0000000 --- a/frameworks/videoedit/vss/EditVideo_NV12.h +++ /dev/null @@ -1,59 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel~@~Ys prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -#ifndef EDITVIDEO_NV12_H -#define EDITVIDEO_NV12_H - -M4OSA_ERR M4VSS3GPP_intSetNv12PlaneFromARGB888( - M4VSS3GPP_InternalEditContext *pC, M4VSS3GPP_ClipContext* pClipCtxt); - -M4OSA_ERR M4VSS3GPP_intRotateVideo_NV12(M4VIFI_ImagePlane* pPlaneIn, - M4OSA_UInt32 rotationDegree); - -M4OSA_ERR M4VSS3GPP_intApplyRenderingMode_NV12(M4VSS3GPP_InternalEditContext *pC, - M4xVSS_MediaRendering renderingMode, M4VIFI_ImagePlane* pInplane, - M4VIFI_ImagePlane* pOutplane); - -unsigned char M4VFL_modifyLumaWithScale_NV12(M4ViComImagePlane *plane_in, - M4ViComImagePlane *plane_out, unsigned long lum_factor, - void *user_data); - -unsigned char M4VIFI_ImageBlendingonNV12 (void *pUserData, - M4ViComImagePlane *pPlaneIn1, M4ViComImagePlane *pPlaneIn2, - M4ViComImagePlane *pPlaneOut, UInt32 Progress); - -#endif diff --git a/frameworks/videoedit/vss/M4AIR_API_NV12.c b/frameworks/videoedit/vss/M4AIR_API_NV12.c deleted file mode 100644 index a2de772..0000000 --- a/frameworks/videoedit/vss/M4AIR_API_NV12.c +++ /dev/null @@ -1,1201 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - ************************************************************************* - * @file M4AIR_API_NV12.c - * @brief Area of Interest Resizer API - ************************************************************************* - */ - -#define M4AIR_YUV420_FORMAT_SUPPORTED -#define M4AIR_YUV420A_FORMAT_SUPPORTED -#define M4AIR_NV12_FORMAT_SUPPORTED -#define M4AIR_NV12A_FORMAT_SUPPORTED - -/******************************* INCLUDES *******************************/ -#include "M4OSA_Types.h" -#include "M4OSA_Error.h" -#include "M4OSA_CoreID.h" -#include "M4OSA_Mutex.h" -#include "M4OSA_Memory.h" -#include "M4VIFI_FiltersAPI.h" -#include "M4AIR_API.h" -#include "M4OSA_Debug.h" -#include "M4AIR_API_NV12.h" - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat) - * @brief This function initialize an instance of the AIR. - * @param pContext: (IN/OUT) Address of the context to create - * @param inputFormat: (IN) input format type. - * @return M4NO_ERROR: there is no error - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType - * @return M4ERR_ALLOC: No more memory is available - ****************************************************************************** - */ - -/************************ M4AIR INTERNAL TYPES DEFINITIONS ***********************/ - -/** - ****************************************************************************** - * enum M4AIR_States - * @brief The following enumeration defines the internal states of the AIR. - ****************************************************************************** - */ -typedef enum -{ - M4AIR_kCreated, /**< State after M4AIR_create has been called */ - M4AIR_kConfigured /**< State after M4AIR_configure has been called */ -}M4AIR_States; - - -/** - ****************************************************************************** - * struct M4AIR_InternalContext - * @brief The following structure is the internal context of the AIR. - ****************************************************************************** - */ -typedef struct -{ - M4AIR_States m_state; /**< Internal state */ - M4AIR_InputFormatType m_inputFormat; /**< Input format like YUV420Planar, - RGB565, JPG, etc ... */ - M4AIR_Params m_params; /**< Current input Parameter of the processing */ - M4OSA_UInt32 u32_x_inc[4]; /**< ratio between input and ouput width for YUV */ - M4OSA_UInt32 u32_y_inc[4]; /**< ratio between input and ouput height for YUV */ - M4OSA_UInt32 u32_x_accum_start[4]; /**< horizontal initial accumulator value */ - M4OSA_UInt32 u32_y_accum_start[4]; /**< Vertical initial accumulator value */ - M4OSA_UInt32 u32_x_accum[4]; /**< save of horizontal accumulator value */ - M4OSA_UInt32 u32_y_accum[4]; /**< save of vertical accumulator value */ - M4OSA_UInt8* pu8_data_in[4]; /**< Save of input plane pointers - in case of stripe mode */ - M4OSA_UInt32 m_procRows; /**< Number of processed rows, - used in stripe mode only */ - M4OSA_Bool m_bOnlyCopy; /**< Flag to know if we just perform a copy - or a bilinear interpolation */ - M4OSA_Bool m_bFlipX; /**< Depend on output orientation, used during - processing to revert processing order in X - coordinates */ - M4OSA_Bool m_bFlipY; /**< Depend on output orientation, used during - processing to revert processing order in Y - coordinates */ - M4OSA_Bool m_bRevertXY; /**< Depend on output orientation, used during - processing to revert X and Y processing order - (+-90?rotation) */ -}M4AIR_InternalContext; - -/********************************* MACROS *******************************/ -#define M4ERR_CHECK_NULL_RETURN_VALUE(retval, pointer)\ - if ((pointer) == M4OSA_NULL) return ((M4OSA_ERR)(retval)); - - -M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat) -{ - M4OSA_ERR err = M4NO_ERROR ; - M4AIR_InternalContext* pC = M4OSA_NULL ; - - /* Check that the address on the context is not NULL */ - M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ; - - *pContext = M4OSA_NULL ; - - /* Internal Context creation */ - pC = (M4AIR_InternalContext*)M4OSA_32bitAlignedMalloc(sizeof(M4AIR_InternalContext), - M4AIR,(M4OSA_Char *)"AIR internal context") ; - M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, pC) ; - - - /* Check if the input format is supported */ - switch(inputFormat) - { -#ifdef M4AIR_NV12_FORMAT_SUPPORTED - case M4AIR_kNV12P: - break ; -#endif -#ifdef M4AIR_NV12A_FORMAT_SUPPORTED - case M4AIR_kNV12AP: - break ; -#endif - -#ifdef M4AIR_YUV420_FORMAT_SUPPORTED - case M4AIR_kYUV420P: - break ; -#endif -#ifdef M4AIR_YUV420A_FORMAT_SUPPORTED - case M4AIR_kYUV420AP: - break ; -#endif - default: - err = M4ERR_AIR_FORMAT_NOT_SUPPORTED; - goto M4AIR_create_cleanup ; - } - - /**< Save input format and update state */ - pC->m_inputFormat = inputFormat; - pC->m_state = M4AIR_kCreated; - - /* Return the context to the caller */ - *pContext = pC ; - - return M4NO_ERROR ; - -M4AIR_create_cleanup: - /* Error management : we destroy the context if needed */ - if(M4OSA_NULL != pC) - { - free(pC) ; - } - - *pContext = M4OSA_NULL ; - - return err ; -} - - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext) - * @brief This function destroys an instance of the AIR component - * @param pContext: (IN) Context identifying the instance to destroy - * @return M4NO_ERROR: there is no error - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). - * @return M4ERR_STATE: Internal state is incompatible with this function call. - ****************************************************************************** - */ -M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext) -{ - M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ; - - M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ; - - /**< Check state */ - if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state)) - { - return M4ERR_STATE; - } - free(pC) ; - - return M4NO_ERROR ; - -} - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams) - * @brief This function will configure the AIR. - * @note It will set the input and output coordinates and sizes, - * and indicates if we will proceed in stripe or not. - * In case a M4AIR_get in stripe mode was on going, it will cancel this previous - * processing and reset the get process. - * @param pContext: (IN) Context identifying the instance - * @param pParams->m_bOutputStripe:(IN) Stripe mode. - * @param pParams->m_inputCoord: (IN) X,Y coordinates of the first valid pixel in input. - * @param pParams->m_inputSize: (IN) input ROI size. - * @param pParams->m_outputSize: (IN) output size. - * @return M4NO_ERROR: there is no error - * @return M4ERR_ALLOC: No more memory space to add a new effect. - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). - * @return M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported. - ****************************************************************************** - */ -M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams) -{ - M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ; - M4OSA_UInt32 i,u32_width_in, u32_width_out, u32_height_in, u32_height_out; - M4OSA_UInt32 nb_planes; - - M4OSA_TRACE1_0("M4AIR_configure_NV12 start"); - M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext); - - if(M4AIR_kNV12P == pC->m_inputFormat) - { - nb_planes = 2; - } - else - { - nb_planes = 3; - } - - /**< Check state */ - if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state)) - { - return M4ERR_STATE; - } - - /** Save parameters */ - pC->m_params = *pParams; - - /* Check for the input&output width and height are even */ - if(((pC->m_params.m_inputSize.m_height)&0x1) || - ((pC->m_params.m_outputSize.m_height)&0x1)) - { - return M4ERR_AIR_ILLEGAL_FRAME_SIZE; - } - - if(((pC->m_params.m_inputSize.m_width)&0x1)|| - ((pC->m_params.m_outputSize.m_width)&0x1)) - { - return M4ERR_AIR_ILLEGAL_FRAME_SIZE; - } - if(((pC->m_params.m_inputSize.m_width) == (pC->m_params.m_outputSize.m_width)) - &&((pC->m_params.m_inputSize.m_height) == (pC->m_params.m_outputSize.m_height))) - { - /**< No resize in this case, we will just copy input in output */ - pC->m_bOnlyCopy = M4OSA_TRUE; - } - else - { - pC->m_bOnlyCopy = M4OSA_FALSE; - - /**< Initialize internal variables used for resize filter */ - for(i=0;im_params.m_inputSize.m_width; - u32_height_in = ((i==0)||(i==2))?pC->m_params.m_inputSize.m_height:\ - (pC->m_params.m_inputSize.m_height+1)>>1; - u32_width_out = pC->m_params.m_outputSize.m_width; - u32_height_out = ((i==0)||(i==2))?pC->m_params.m_outputSize.m_height:\ - (pC->m_params.m_outputSize.m_height+1)>>1; - - M4OSA_TRACE1_4("u32_width_in =%d, u32_height_in = %d, u32_width_out = %d, u32_height_out = %d",\ - u32_width_in, u32_height_in, u32_width_out, u32_height_out); - - /* Compute horizontal ratio between src and destination width.*/ - if (u32_width_out >= u32_width_in) - { - if (i == 1) - { - pC->u32_x_inc[i] = ((u32_width_in-2) * 0x10000) / (u32_width_out-2); - } - else - { - pC->u32_x_inc[i] = ((u32_width_in-1) * 0x10000) / (u32_width_out-1); - } - } - else - { - pC->u32_x_inc[i] = (u32_width_in * 0x10000) / (u32_width_out); - } - - /* Compute vertical ratio between src and destination height.*/ - if (u32_height_out >= u32_height_in) - { - pC->u32_y_inc[i] = ((u32_height_in - 1) * 0x10000) / (u32_height_out-1); - } - else - { - pC->u32_y_inc[i] = (u32_height_in * 0x10000) / (u32_height_out); - } - - /* - Calculate initial accumulator value : u32_y_accum_start. - u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5 - */ - if (pC->u32_y_inc[i] >= 0x10000) - { - /* - Keep the fractionnal part, assimung that integer part is coded - on the 16 high bits and the fractionnal on the 15 low bits - */ - pC->u32_y_accum_start[i] = pC->u32_y_inc[i] & 0xffff; - - if (!pC->u32_y_accum_start[i]) - { - pC->u32_y_accum_start[i] = 0x10000; - } - - pC->u32_y_accum_start[i] >>= 1; - } - else - { - pC->u32_y_accum_start[i] = 0; - } - /**< Take into account that Y coordinate can be odd - in this case we have to put a 0.5 offset - for U and V plane as there a 2 times sub-sampled vs Y*/ - if((pC->m_params.m_inputCoord.m_y&0x1)&&(i==1)) - { - pC->u32_y_accum_start[i] += 0x8000; - } - - /* - Calculate initial accumulator value : u32_x_accum_start. - u32_x_accum_start is coded on 15 bits, and represents a value between - 0 and 0.5 - */ - - if (pC->u32_x_inc[i] >= 0x10000) - { - pC->u32_x_accum_start[i] = pC->u32_x_inc[i] & 0xffff; - - if (!pC->u32_x_accum_start[i]) - { - pC->u32_x_accum_start[i] = 0x10000; - } - - pC->u32_x_accum_start[i] >>= 1; - } - else - { - pC->u32_x_accum_start[i] = 0; - } - /**< Take into account that X coordinate can be odd - in this case we have to put a 0.5 offset - for U and V plane as there a 2 times sub-sampled vs Y*/ - if((pC->m_params.m_inputCoord.m_x&0x1)&&(i==1)) - { - pC->u32_x_accum_start[i] += 0x8000; - } - - M4OSA_TRACE1_4("u32_x_inc = 0x%x, u32_y_inc = 0x%x, u32_x_accum_start = 0x%x, u32_y_accum_start = 0x%x",\ - pC->u32_x_inc[i], pC->u32_y_inc[i], \ - pC->u32_x_accum_start[i], pC->u32_y_accum_start[i]); - } - } - - /**< Reset variable used for stripe mode */ - pC->m_procRows = 0; - - /**< Initialize var for X/Y processing order according to orientation */ - pC->m_bFlipX = M4OSA_FALSE; - pC->m_bFlipY = M4OSA_FALSE; - pC->m_bRevertXY = M4OSA_FALSE; - switch(pParams->m_outputOrientation) - { - case M4COMMON_kOrientationTopLeft: - break; - case M4COMMON_kOrientationTopRight: - pC->m_bFlipX = M4OSA_TRUE; - break; - case M4COMMON_kOrientationBottomRight: - pC->m_bFlipX = M4OSA_TRUE; - pC->m_bFlipY = M4OSA_TRUE; - break; - case M4COMMON_kOrientationBottomLeft: - pC->m_bFlipY = M4OSA_TRUE; - break; - case M4COMMON_kOrientationLeftTop: - pC->m_bRevertXY = M4OSA_TRUE; - break; - case M4COMMON_kOrientationRightTop: - pC->m_bRevertXY = M4OSA_TRUE; - pC->m_bFlipY = M4OSA_TRUE; - break; - case M4COMMON_kOrientationRightBottom: - pC->m_bRevertXY = M4OSA_TRUE; - pC->m_bFlipX = M4OSA_TRUE; - pC->m_bFlipY = M4OSA_TRUE; - break; - case M4COMMON_kOrientationLeftBottom: - pC->m_bRevertXY = M4OSA_TRUE; - pC->m_bFlipX = M4OSA_TRUE; - break; - default: - return M4ERR_PARAMETER; - } - /**< Update state */ - pC->m_state = M4AIR_kConfigured; - - return M4NO_ERROR ; -} - - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut) - * @brief This function will provide the requested resized area of interest according to - * settings provided in M4AIR_configure. - * @note In case the input format type is JPEG, input plane(s) - * in pIn is not used. In normal mode, dimension specified in output plane(s) structure - * must be the same than the one specified in M4AIR_configure. In stripe mode, only the - * width will be the same, height will be taken as the stripe height (typically 16). - * In normal mode, this function is call once to get the full output picture. - * In stripe mode, it is called for each stripe till the whole picture has been - * retrieved,and the position of the output stripe in the output picture - * is internally incremented at each step. - * Any call to M4AIR_configure during stripe process will reset this one to the - * beginning of the output picture. - * @param pContext: (IN) Context identifying the instance - * @param pIn: (IN) Plane structure containing input Plane(s). - * @param pOut: (IN/OUT) Plane structure containing output Plane(s). - * @return M4NO_ERROR: there is no error - * @return M4ERR_ALLOC: No more memory space to add a new effect. - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). - ****************************************************************************** - */ -M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, - M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut) -{ - M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ; - M4OSA_UInt32 i,j,k,u32_x_frac,u32_y_frac,u32_x_accum,u32_y_accum,u32_shift; - M4OSA_UInt8 *pu8_data_in, *pu8_data_in_org, *pu8_data_in_tmp, *pu8_data_out; - M4OSA_UInt8 *pu8_src_top; - M4OSA_UInt8 *pu8_src_bottom; - M4OSA_UInt32 u32_temp_value; - M4OSA_Int32 i32_tmp_offset; - M4OSA_UInt32 nb_planes; - - M4OSA_TRACE1_0("M4AIR_get_NV12 start"); - - M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ; - - /**< Check state */ - if(M4AIR_kConfigured != pC->m_state) - { - return M4ERR_STATE; - } - - if(M4AIR_kNV12P == pC->m_inputFormat) - { - nb_planes = 2; - } - else - { - nb_planes = 3; - } - - /**< Loop on each Plane */ - for(i=0;im_params.m_inputCoord.m_x,pC->m_params.m_inputCoord.m_y); - - if((M4OSA_FALSE == pC->m_params.m_bOutputStripe)\ - ||((M4OSA_TRUE == pC->m_params.m_bOutputStripe)&&(0 == pC->m_procRows))) - { - /**< For input, take care about ROI */ - pu8_data_in = pIn[i].pac_data + ((pIn[i].u_topleft >> u32_shift) << u32_shift) + - ((pC->m_params.m_inputCoord.m_x >> u32_shift) << u32_shift) - + (pC->m_params.m_inputCoord.m_y >> u32_shift) * pIn[i].u_stride; - - /** Go at end of line/column in case X/Y scanning is flipped */ - if(M4OSA_TRUE == pC->m_bFlipX) - { - pu8_data_in += pC->m_params.m_inputSize.m_width -1 ; - } - if(M4OSA_TRUE == pC->m_bFlipY) - { - pu8_data_in += ((pC->m_params.m_inputSize.m_height>>u32_shift) -1)\ - * pIn[i].u_stride; - } - - /**< Initialize accumulators in case we are using it (bilinear interpolation) */ - if( M4OSA_FALSE == pC->m_bOnlyCopy) - { - pC->u32_x_accum[i] = pC->u32_x_accum_start[i]; - pC->u32_y_accum[i] = pC->u32_y_accum_start[i]; - } - - } - else - { - /**< In case of stripe mode for other than first stripe, we need to recover input - pointer from internal context */ - pu8_data_in = pC->pu8_data_in[i]; - } - - /**< In every mode, output data are at the beginning of the output plane */ - pu8_data_out = pOut[i].pac_data + ((pOut[i].u_topleft >> u32_shift) << u32_shift); - - M4OSA_TRACE1_2("pOut[%d].u_topleft = %d",i,pOut[i].u_topleft); - - /**< Initialize input offset applied after each pixel */ - if(M4OSA_FALSE == pC->m_bFlipY) - { - i32_tmp_offset = pIn[i].u_stride; - } - else - { - i32_tmp_offset = -pIn[i].u_stride; - } - - /**< In this case, no bilinear interpolation is needed as input and output dimensions - are the same */ - if( M4OSA_TRUE == pC->m_bOnlyCopy) - { - /**< No +-90?rotation */ - if(M4OSA_FALSE == pC->m_bRevertXY) - { - /**< No flip on X abscissa */ - if(M4OSA_FALSE == pC->m_bFlipX) - { - /**< Loop on each row */ - for(j=0;jm_bFlipY) - { - pu8_data_in += pIn[i].u_stride; - } - else - { - pu8_data_in -= pIn[i].u_stride; - } - } - } - else - { - /**< Loop on each row */ - for(j=0;jm_bFlipX) - { - pu8_data_in ++; - } - else - { - pu8_data_in --; - } - } - } - } - /**< Bilinear interpolation */ - else - { - if(0 == i) /**< Y plane */ - { - /**No +-90?rotation */ - if(M4OSA_FALSE == pC->m_bRevertXY) - { - /**< Loop on each row */ - for(j=0;ju32_y_accum[i]>>12)&15; - - /* Reinit horizontal weight factor */ - u32_x_accum = pC->u32_x_accum_start[i]; - - - if(M4OSA_TRUE == pC->m_bFlipX) - { - - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Fraction of Horizontal - weight factor */ - - pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ; - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[1]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += pC->u32_x_inc[i]; - } - } - else - { - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Fraction of Horizontal - weight factor */ - - pu8_src_top = pu8_data_in + (u32_x_accum >> 16); - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += pC->u32_x_inc[i]; - } - } - - pu8_data_out += pOut[i].u_stride - pOut[i].u_width; - - /* Update vertical accumulator */ - pC->u32_y_accum[i] += pC->u32_y_inc[i]; - if (pC->u32_y_accum[i]>>16) - { - pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset; - pC->u32_y_accum[i] &= 0xffff; - } - } - } - /** +-90?rotation */ - else - { - pu8_data_in_org = pu8_data_in; - - /**< Loop on each output row */ - for(j=0;ju32_x_accum[i]>>12)&15; - - /* Reinit accumulator */ - u32_y_accum = pC->u32_y_accum_start[i]; - - if (M4OSA_TRUE == pC->m_bFlipX) - { - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Vertical weight factor */ - - pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1; - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[1]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update vertical accumulator */ - u32_y_accum += pC->u32_y_inc[i]; - if (u32_y_accum>>16) - { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; - u32_y_accum &= 0xffff; - } - } - } - else - { - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Vertical weight factor */ - - pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16); - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update vertical accumulator */ - u32_y_accum += pC->u32_y_inc[i]; - if (u32_y_accum>>16) - { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; - u32_y_accum &= 0xffff; - } - } - } - pu8_data_out += pOut[i].u_stride - pOut[i].u_width; - - /* Update horizontal accumulator */ - pC->u32_x_accum[i] += pC->u32_x_inc[i]; - pu8_data_in = pu8_data_in_org; - } - } - } - else if(1 == i) /**< U&V plane */ - { - /**No +-90?rotation */ - if(M4OSA_FALSE == pC->m_bRevertXY) - { - /**< Loop on each row */ - for(j=0;ju32_y_accum[i]>>12)&15; - - /* Reinit horizontal weight factor */ - u32_x_accum = pC->u32_x_accum_start[i]; - - if(M4OSA_TRUE == pC->m_bFlipX) - { - - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Fraction of Horizontal - weight factor */ - - pu8_src_top = (pu8_data_in - ((u32_x_accum >> 16) << 1)) -2 ; - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* U plane weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[2]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - - // handle V plane - pu8_src_top = pu8_src_top + 1 ; - - pu8_src_bottom = pu8_src_bottom + 1; - - /* V plane weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[2]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += pC->u32_x_inc[i]; - } - } - - else - { - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Fraction of Horizontal - weight factor */ - - pu8_src_top = pu8_data_in + ((u32_x_accum >> 16) << 1); - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* U plane weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - // handle V plane - pu8_src_top = pu8_src_top + 1; - - pu8_src_bottom = pu8_src_bottom + 1; - - /* V plane weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += pC->u32_x_inc[i]; - } - } - - pu8_data_out += pOut[i].u_stride - pOut[i].u_width; - - /* Update vertical accumulator */ - pC->u32_y_accum[i] += pC->u32_y_inc[i]; - if (pC->u32_y_accum[i]>>16) - { - pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset; - pC->u32_y_accum[i] &= 0xffff; - } - } - } - /** +-90?rotation */ - else - { - pu8_data_in_org = pu8_data_in; - - /**< Loop on each output row */ - for(j=0;ju32_x_accum[i]>>12)&15; - - /* Reinit accumulator */ - u32_y_accum = pC->u32_y_accum_start[i]; - - if(M4OSA_TRUE == pC->m_bFlipX) - { - - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Vertical weight factor */ - - - pu8_src_top = (pu8_data_in - ((pC->u32_x_accum[i] >> 16) << 1)) - 2; - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[2]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - // handle V plane - u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */ - - - pu8_src_top = pu8_src_top + 1; - - pu8_src_bottom = pu8_src_bottom + 1; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[2]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[2]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update vertical accumulator */ - u32_y_accum += pC->u32_y_inc[i]; - if (u32_y_accum>>16) - { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; - u32_y_accum &= 0xffff; - } - - } - } - else - { - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Vertical weight factor */ - - pu8_src_top = pu8_data_in + ((pC->u32_x_accum[i] >> 16) << 1); - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - // handle V plane - pu8_src_top = pu8_src_top + 1; - - pu8_src_bottom = pu8_src_bottom + 1; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[2]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[2]*u32_x_frac)*u32_y_frac )>>8); - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update vertical accumulator */ - u32_y_accum += pC->u32_y_inc[i]; - if (u32_y_accum>>16) - { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; - u32_y_accum &= 0xffff; - } - } - } - pu8_data_out += pOut[i].u_stride - pOut[i].u_width; - - /* Update horizontal accumulator */ - pC->u32_x_accum[i] += pC->u32_x_inc[i]; - pu8_data_in = pu8_data_in_org; - } - } - } - else /**< alpha plane */ - { - /**No +-90?rotation */ - if(M4OSA_FALSE == pC->m_bRevertXY) - { - - /**< Loop on each row */ - for(j=0;ju32_y_accum[i]>>12)&15; - - /* Reinit horizontal weight factor */ - u32_x_accum = pC->u32_x_accum_start[i]; - - if(M4OSA_TRUE == pC->m_bFlipX) - { - - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Fraction of Horizontal - weight factor */ - - pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ; - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[1]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - u32_temp_value= (u32_temp_value >> 7)*0xff; - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += pC->u32_x_inc[i]; - } - } - - else - { - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Fraction of Horizontal - weight factor */ - - pu8_src_top = pu8_data_in + (u32_x_accum >> 16); - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); - - u32_temp_value= (u32_temp_value >> 7)*0xff; - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update horizontal accumulator */ - u32_x_accum += pC->u32_x_inc[i]; - } - - } - - pu8_data_out += pOut[i].u_stride - pOut[i].u_width; - - /* Update vertical accumulator */ - pC->u32_y_accum[i] += pC->u32_y_inc[i]; - if (pC->u32_y_accum[i]>>16) - { - pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset; - pC->u32_y_accum[i] &= 0xffff; - } - } - - } /**< M4OSA_FALSE == pC->m_bRevertXY */ - /** +-90?rotation */ - else - { - pu8_data_in_org = pu8_data_in; - - /**< Loop on each output row */ - for(j=0;ju32_x_accum[i]>>12)&15; - - /* Reinit accumulator */ - u32_y_accum = pC->u32_y_accum_start[i]; - - if(M4OSA_TRUE == pC->m_bFlipX) - { - - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Vertical weight factor */ - - pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1; - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) + - pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[1]*(16-u32_x_frac) + - pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8); - - u32_temp_value= (u32_temp_value >> 7)*0xff; - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update vertical accumulator */ - u32_y_accum += pC->u32_y_inc[i]; - if (u32_y_accum>>16) - { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; - u32_y_accum &= 0xffff; - } - } - } - else - { - /**< Loop on each output pixel in a row */ - for(k=0;k> 12)&15; /* Vertical weight factor */ - - pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16); - - pu8_src_bottom = pu8_src_top + i32_tmp_offset; - - /* Weighted combination */ - u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) + - pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) + - (pu8_src_bottom[0]*(16-u32_x_frac) + - pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8); - - u32_temp_value= (u32_temp_value >> 7)*0xff; - - *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value; - - /* Update vertical accumulator */ - u32_y_accum += pC->u32_y_inc[i]; - if (u32_y_accum>>16) - { - pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset; - u32_y_accum &= 0xffff; - } - } - } - pu8_data_out += pOut[i].u_stride - pOut[i].u_width; - - /* Update horizontal accumulator */ - pC->u32_x_accum[i] += pC->u32_x_inc[i]; - - pu8_data_in = pu8_data_in_org; - } - } /**< M4OSA_TRUE == pC->m_bRevertXY */ - }/** 2 == i */ - } - /**< In case of stripe mode, save current input pointer */ - if (M4OSA_TRUE == pC->m_params.m_bOutputStripe) - { - pC->pu8_data_in[i] = pu8_data_in; - } - } - - /**< Update number of processed rows, reset it if we have finished - with the whole processing */ - pC->m_procRows += pOut[0].u_height; - if(M4OSA_FALSE == pC->m_bRevertXY) - { - if(pC->m_params.m_outputSize.m_height <= pC->m_procRows) pC->m_procRows = 0; - } - else - { - if(pC->m_params.m_outputSize.m_width <= pC->m_procRows) pC->m_procRows = 0; - } - - return M4NO_ERROR ; - -} - - - diff --git a/frameworks/videoedit/vss/M4AIR_API_NV12.h b/frameworks/videoedit/vss/M4AIR_API_NV12.h deleted file mode 100644 index a34c561..0000000 --- a/frameworks/videoedit/vss/M4AIR_API_NV12.h +++ /dev/null @@ -1,132 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** -************************************************************************* - * @file M4AIR_API_NV12.h - * @brief Area of Interest Resizer API - * @note -************************************************************************* -*/ - -#ifndef M4AIR_API_NV12_H -#define M4AIR_API_NV12_H -/******************************* INCLUDES *******************************/ -#include "M4OSA_Types.h" -#include "M4OSA_Error.h" -#include "M4OSA_CoreID.h" -#include "M4OSA_Mutex.h" -#include "M4OSA_Memory.h" -#include "M4VIFI_FiltersAPI.h" -#include "M4Common_types.h" -#include "M4AIR_API.h" - - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat); - * @brief This function initialize an instance of the AIR. - * @param pContext: (IN/OUT) Address of the context to create - * @param inputFormat: (IN) input format type. - * @return M4NO_ERROR: there is no error - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType - * @return M4ERR_ALLOC: No more memory is available - ****************************************************************************** -*/ -M4OSA_ERR M4AIR_create_NV12(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat); - - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext) - * @brief This function destroys an instance of the AIR component - * @param pContext: (IN) Context identifying the instance to destroy - * @return M4NO_ERROR: there is no error - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). - * @return M4ERR_STATE: Internal state is incompatible with this function call. - ****************************************************************************** -*/ -M4OSA_ERR M4AIR_cleanUp_NV12(M4OSA_Context pContext); - - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams) - * @brief This function will configure the AIR. - * @note It will set the input and output coordinates and sizes, - * and indicates if we will proceed in stripe or not. - * In case a M4AIR_get in stripe mode was on going, it will cancel this previous - * processing and reset the get process. - * @param pContext: (IN) Context identifying the instance - * @param pParams->m_bOutputStripe:(IN) Stripe mode. - * @param pParams->m_inputCoord: (IN) X,Y coordinates of the first valid pixel in input. - * @param pParams->m_inputSize: (IN) input ROI size. - * @param pParams->m_outputSize: (IN) output size. - * @return M4NO_ERROR: there is no error - * @return M4ERR_ALLOC: No more memory space to add a new effect. - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). - * @return M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported. - ****************************************************************************** -*/ -M4OSA_ERR M4AIR_configure_NV12(M4OSA_Context pContext, M4AIR_Params* pParams); - - -/** - ****************************************************************************** - * M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut) - * @brief This function will provide the requested resized area of interest according to - * settings provided in M4AIR_configure. - * @note In case the input format type is JPEG, input plane(s) - * in pIn is not used. In normal mode, dimension specified in output plane(s) structure - * must be the same than the one specified in M4AIR_configure. In stripe mode, only - * the width will be the same, height will be taken as the stripe height (typically 16). - * In normal mode, this function is call once to get the full output picture. In stripe - * mode, it is called for each stripe till the whole picture has been retrieved,and - * the position of the output stripe in the output picture is internally incremented - * at each step. - * Any call to M4AIR_configure during stripe process will reset this one to the - * beginning of the output picture. - * @param pContext: (IN) Context identifying the instance - * @param pIn: (IN) Plane structure containing input Plane(s). - * @param pOut: (IN/OUT) Plane structure containing output Plane(s). - * @return M4NO_ERROR: there is no error - * @return M4ERR_ALLOC: No more memory space to add a new effect. - * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). - ****************************************************************************** -*/ -M4OSA_ERR M4AIR_get_NV12(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut); - -#endif - diff --git a/frameworks/videoedit/vss/M4MCS_NV12.h b/frameworks/videoedit/vss/M4MCS_NV12.h deleted file mode 100644 index afa68ab..0000000 --- a/frameworks/videoedit/vss/M4MCS_NV12.h +++ /dev/null @@ -1,42 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -#ifndef M4MCS_NV12_H -#define M4MCS_NV12_H - -M4OSA_ERR M4MCS_intApplyVPP_NV12(M4VPP_Context pContext, - M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut); - -#endif diff --git a/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c b/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c deleted file mode 100644 index b9ce04c..0000000 --- a/frameworks/videoedit/vss/M4MCS_VideoPreProcessing_NV12.c +++ /dev/null @@ -1,416 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - ****************************************************************************** - * M4OSA_ERR M4MCS_intApplyVPP_NV12(M4VPP_Context pContext, M4VIFI_ImagePlane* pPlaneIn, - * M4VIFI_ImagePlane* pPlaneOut) - * @brief Do the video rendering and the resize (if needed) - * @note It is called by the video encoder - * @param pContext (IN) VPP context, which actually is the MCS internal context in our case - * @param pPlaneIn (IN) Contains the image - * @param pPlaneOut (IN/OUT) Pointer to an array of 2 planes that will contain the output - * NV12 image - * @return M4NO_ERROR: No error - * @return M4MCS_ERR_VIDEO_DECODE_ERROR: the video decoding failed - * @return M4MCS_ERR_RESIZE_ERROR: the resizing failed - * @return Any error returned by an underlaying module - ****************************************************************************** - */ - - -/** - ******************************************************************** - * Includes - ******************************************************************** - */ -/* OSAL headers */ -#include "M4OSA_Memory.h" /* OSAL memory management */ -#include "M4OSA_Debug.h" /* OSAL debug management */ - - -/* Core headers */ -#include "M4MCS_InternalTypes.h" -#include "M4MCS_ErrorCodes.h" - -/** - * Video preprocessing interface definition */ -#include "M4VPP_API.h" - -/** - * Video filters */ -#include "M4VIFI_FiltersAPI.h" /**< for M4VIFI_ResizeBilinearYUV420toYUV420() */ -#include "M4AIR_API_NV12.h" -#include "VideoEditorToolsNV12.h" - -#define UV_PLANE_BORDER_VALUE 0x80 - -M4OSA_ERR M4MCS_intApplyVPP_NV12(M4VPP_Context pContext, - M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut) -{ - M4OSA_ERR err = M4NO_ERROR; - -/* This part is used only if video codecs are compiled*/ -#ifndef M4MCS_AUDIOONLY - /** - * The VPP context is actually the MCS context! */ - M4MCS_InternalContext *pC = (M4MCS_InternalContext*)(pContext); - - M4_MediaTime mtCts = pC->dViDecCurrentCts; - - /** - * When Closing after an error occured, it may happen that pReaderVideoAU->m_dataAddress has - * not been allocated yet. When closing in pause mode, the decoder can be null. - * We don't want an error to be returned because it would interrupt the close process and - * thus some resources would be locked. So we return M4NO_ERROR. - */ - /* Initialize to black plane the output plane if the media rendering - is black borders */ - if(pC->MediaRendering == M4MCS_kBlackBorders) - { - memset((void *)pPlaneOut[0].pac_data,Y_PLANE_BORDER_VALUE, - (pPlaneOut[0].u_height*pPlaneOut[0].u_stride)); - memset((void *)pPlaneOut[1].pac_data,UV_PLANE_BORDER_VALUE, - (pPlaneOut[1].u_height*pPlaneOut[1].u_stride)); - } - else if ((M4OSA_NULL == pC->ReaderVideoAU.m_dataAddress) || - (M4OSA_NULL == pC->pViDecCtxt)) - { - /** - * We must fill the input of the encoder with a dummy image, because - * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */ - memset((void *)pPlaneOut[0].pac_data,0, - pPlaneOut[0].u_stride * pPlaneOut[0].u_height); - memset((void *)pPlaneOut[1].pac_data,0, - pPlaneOut[1].u_stride * pPlaneOut[1].u_height); - - M4OSA_TRACE1_0("M4MCS_intApplyVPP_NV12: pReaderVideoAU->m_dataAddress is M4OSA_NULL,\ - returning M4NO_ERROR"); - return M4NO_ERROR; - } - if(pC->isRenderDup == M4OSA_FALSE) - { - /** - * m_pPreResizeFrame different than M4OSA_NULL means that resizing is needed */ - if (M4OSA_NULL != pC->pPreResizeFrame) - { - /** FB 2008/10/20: - Used for cropping and black borders*/ - M4AIR_Params Params; - - M4OSA_TRACE3_0("M4MCS_intApplyVPP_NV12: Need to resize"); - err = pC->m_pVideoDecoder->m_pFctRender(pC->pViDecCtxt, &mtCts, - pC->pPreResizeFrame, M4OSA_TRUE); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4MCS_intApplyVPP_NV12: m_pFctRender returns 0x%x!", err); - return err; - } - - if(pC->MediaRendering == M4MCS_kResizing) - { - /* - * Call the resize filter. From the intermediate frame to the encoder - * image plane - */ - err = M4VIFI_ResizeBilinearNV12toNV12(M4OSA_NULL, - pC->pPreResizeFrame, pPlaneOut); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4MCS_intApplyVPP_NV12: M4ViFilResizeBilinearNV12toNV12\ - returns 0x%x!", err); - return err; - } - } - else - { - M4VIFI_ImagePlane pImagePlanesTemp[2]; - M4VIFI_ImagePlane* pPlaneTemp; - M4OSA_UInt8* pOutPlaneY = pPlaneOut[0].pac_data + - pPlaneOut[0].u_topleft; - M4OSA_UInt8* pOutPlaneUV = pPlaneOut[1].pac_data + - pPlaneOut[1].u_topleft; - M4OSA_UInt8* pInPlaneY = M4OSA_NULL; - M4OSA_UInt8* pInPlaneUV = M4OSA_NULL; - M4OSA_UInt32 i = 0; - - /*FB 2008/10/20: to keep media aspect ratio*/ - /*Initialize AIR Params*/ - Params.m_inputCoord.m_x = 0; - Params.m_inputCoord.m_y = 0; - Params.m_inputSize.m_height = pC->pPreResizeFrame->u_height; - Params.m_inputSize.m_width = pC->pPreResizeFrame->u_width; - Params.m_outputSize.m_width = pPlaneOut->u_width; - Params.m_outputSize.m_height = pPlaneOut->u_height; - Params.m_bOutputStripe = M4OSA_FALSE; - Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; - /** - Media rendering: Black borders*/ - if(pC->MediaRendering == M4MCS_kBlackBorders) - { - pImagePlanesTemp[0].u_width = pPlaneOut[0].u_width; - pImagePlanesTemp[0].u_height = pPlaneOut[0].u_height; - pImagePlanesTemp[0].u_stride = pPlaneOut[0].u_width; - pImagePlanesTemp[0].u_topleft = 0; - - pImagePlanesTemp[1].u_width = pPlaneOut[1].u_width; - pImagePlanesTemp[1].u_height = pPlaneOut[1].u_height; - pImagePlanesTemp[1].u_stride = pPlaneOut[1].u_width; - pImagePlanesTemp[1].u_topleft = 0; - - - /* Allocates plan in local image plane structure */ - pImagePlanesTemp[0].pac_data = - (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[0]\ - .u_width * pImagePlanesTemp[0].u_height, M4VS, - (M4OSA_Char *)"M4xVSS_PictureCallbackFct: temporary plane bufferY") ; - if(pImagePlanesTemp[0].pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("Error alloc in M4MCS_intApplyVPP_NV12"); - return M4ERR_ALLOC; - } - pImagePlanesTemp[1].pac_data = - (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[1]\ - .u_width * pImagePlanesTemp[1].u_height, M4VS, - (M4OSA_Char *)"M4xVSS_PictureCallbackFct: temporary plane bufferU") ; - if(pImagePlanesTemp[1].pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("Error alloc in M4MCS_intApplyVPP_NV12"); - return M4ERR_ALLOC; - } - - pInPlaneY = pImagePlanesTemp[0].pac_data ; - pInPlaneUV = pImagePlanesTemp[1].pac_data ; - - memset((void *)pImagePlanesTemp[0].pac_data,Y_PLANE_BORDER_VALUE, - (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); - memset((void *)pImagePlanesTemp[1].pac_data,UV_PLANE_BORDER_VALUE, - (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); - if((M4OSA_UInt32)((pC->pPreResizeFrame->u_height * pPlaneOut->u_width)\ - /pC->pPreResizeFrame->u_width) <= pPlaneOut->u_height) - //Params.m_inputSize.m_height < Params.m_inputSize.m_width) - { - /*it is height so black borders will be on the top and on the bottom side*/ - Params.m_outputSize.m_width = pPlaneOut->u_width; - Params.m_outputSize.m_height = - (M4OSA_UInt32) - ((pC->pPreResizeFrame->u_height * pPlaneOut->u_width)\ - /pC->pPreResizeFrame->u_width); - /*number of lines at the top*/ - pImagePlanesTemp[0].u_topleft = - (M4MCS_ABS((M4OSA_Int32) - (pImagePlanesTemp[0].u_height\ - -Params.m_outputSize.m_height)>>1)) * - pImagePlanesTemp[0].u_stride; - pImagePlanesTemp[0].u_height = Params.m_outputSize.m_height; - pImagePlanesTemp[1].u_topleft = - (M4MCS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height\ - -(Params.m_outputSize.m_height>>1)))>>1)\ - * pImagePlanesTemp[1].u_stride; - pImagePlanesTemp[1].u_height = Params.m_outputSize.m_height>>1; - - } - else - { - /*it is width so black borders will be on the left and right side*/ - Params.m_outputSize.m_height = pPlaneOut->u_height; - Params.m_outputSize.m_width = - (M4OSA_UInt32)((pC->pPreResizeFrame->u_width - * pPlaneOut->u_height)\ - /pC->pPreResizeFrame->u_height); - - pImagePlanesTemp[0].u_topleft = - (M4MCS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width-\ - Params.m_outputSize.m_width)>>1)); - pImagePlanesTemp[0].u_width = Params.m_outputSize.m_width; - pImagePlanesTemp[1].u_topleft = - (M4MCS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width-\ - Params.m_outputSize.m_width))>>1); - pImagePlanesTemp[1].u_width = Params.m_outputSize.m_width; - - } - - /*Width and height have to be even*/ - Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; - Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; - Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; - Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; - pImagePlanesTemp[0].u_width = (pImagePlanesTemp[0].u_width>>1)<<1; - pImagePlanesTemp[1].u_width = (pImagePlanesTemp[1].u_width>>1)<<1; - pImagePlanesTemp[0].u_height = (pImagePlanesTemp[0].u_height>>1)<<1; - pImagePlanesTemp[1].u_height = (pImagePlanesTemp[1].u_height>>1)<<1; - - - /*Check that values are coherent*/ - if(Params.m_inputSize.m_height == Params.m_outputSize.m_height) - { - Params.m_inputSize.m_width = Params.m_outputSize.m_width; - } - else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width) - { - Params.m_inputSize.m_height = Params.m_outputSize.m_height; - } - pPlaneTemp = pImagePlanesTemp; - } - /** - Media rendering: Cropping*/ - if(pC->MediaRendering == M4MCS_kCropping) - { - Params.m_outputSize.m_height = pPlaneOut->u_height; - Params.m_outputSize.m_width = pPlaneOut->u_width; - if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\ - /Params.m_outputSize.m_width>1)<<1; - Params.m_inputCoord.m_y = - (M4OSA_Int32)((M4OSA_Int32) - ((pC->pPreResizeFrame->u_height\ - - Params.m_inputSize.m_height))>>1); - } - else - { - /*width will be cropped*/ - Params.m_inputSize.m_width = - (M4OSA_UInt32)((Params.m_outputSize.m_width\ - * Params.m_inputSize.m_height) / - Params.m_outputSize.m_height); - Params.m_inputSize.m_width = - (Params.m_inputSize.m_width>>1)<<1; - Params.m_inputCoord.m_x = - (M4OSA_Int32)((M4OSA_Int32) - ((pC->pPreResizeFrame->u_width\ - - Params.m_inputSize.m_width))>>1); - } - pPlaneTemp = pPlaneOut; - } - /** - * Call AIR functions */ - if(M4OSA_NULL == pC->m_air_context) - { - err = M4AIR_create_NV12(&pC->m_air_context, M4AIR_kNV12P); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ - Error when initializing AIR_NV12: 0x%x", err); - return err; - } - } - - err = M4AIR_configure_NV12(pC->m_air_context, &Params); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ - Error when configuring AIR_NV12: 0x%x", err); - M4AIR_cleanUp_NV12(pC->m_air_context); - return err; - } - - err = M4AIR_get_NV12(pC->m_air_context, pC->pPreResizeFrame, - pPlaneTemp); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ - Error when getting AIR_NV12 plane: 0x%x", err); - M4AIR_cleanUp_NV12(pC->m_air_context); - return err; - } - if(pC->MediaRendering == M4MCS_kBlackBorders) - { - for(i=0; im_pVideoDecoder->m_pFctRender(pC->pViDecCtxt, - &mtCts, pPlaneOut, - M4OSA_TRUE); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4MCS_intApplyVPP_NV12: m_pFctRender returns 0x%x!", err); - return err; - } - } - pC->lastDecodedPlane = pPlaneOut; - } - else - { - /* Copy last decoded plane to output plane */ - memcpy((void *)pPlaneOut[0].pac_data, - (void *)pC->lastDecodedPlane[0].pac_data, - (pPlaneOut[0].u_height * pPlaneOut[0].u_width)); - memcpy((void *)pPlaneOut[1].pac_data, - (void *)pC->lastDecodedPlane[1].pac_data, - (pPlaneOut[1].u_height * pPlaneOut[1].u_width)); - - pC->lastDecodedPlane = pPlaneOut; - } - - -#endif /*M4MCS_AUDIOONLY*/ - M4OSA_TRACE3_0("M4MCS_intApplyVPP_NV12: returning M4NO_ERROR"); - return M4NO_ERROR; -} - diff --git a/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c b/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c deleted file mode 100644 index 0871f92..0000000 --- a/frameworks/videoedit/vss/M4VSS3GPP_EditVideo_NV12.c +++ /dev/null @@ -1,660 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Our header */ -#include "M4VSS3GPP_API.h" -#include "M4VSS3GPP_InternalTypes.h" -#include "M4VSS3GPP_InternalFunctions.h" -#include "M4VSS3GPP_InternalConfig.h" -#include "M4VSS3GPP_ErrorCodes.h" - -// StageFright encoders require %16 resolution -#include "M4ENCODER_common.h" -/** - * OSAL headers */ -#include "M4OSA_Memory.h" /**< OSAL memory management */ -#include "M4OSA_Debug.h" /**< OSAL debug management */ - -#include "M4AIR_API_NV12.h" -#include "VideoEditorToolsNV12.h" - -#define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) ) -#define Y_PLANE_BORDER_VALUE 0x00 -#define UV_PLANE_BORDER_VALUE 0x80 - -/** -****************************************************************************** -* M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, -* M4OSA_FileReadPointer* pFileReadPtr, -* M4VIFI_ImagePlane* pImagePlanes, -* M4OSA_UInt32 width, -* M4OSA_UInt32 height); -* @brief It Coverts and resizes a ARGB8888 image to NV12 -* @note -* @param pFileIn (IN) The ARGB888 input file -* @param pFileReadPtr (IN) Pointer on filesystem functions -* @param pImagePlanes (IN/OUT) Pointer on NV12 output planes allocated by the user. -* ARGB8888 image will be converted and resized to output -* NV12 plane size -* @param width (IN) width of the ARGB8888 -* @param height (IN) height of the ARGB8888 -* @return M4NO_ERROR: No error -* @return M4ERR_ALLOC: memory error -* @return M4ERR_PARAMETER: At least one of the function parameters is null -****************************************************************************** -*/ - -M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, - M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, - M4OSA_UInt32 width, M4OSA_UInt32 height) -{ - M4OSA_Context pARGBIn; - M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; - M4OSA_UInt32 frameSize_argb = width * height * 4; - M4OSA_UInt32 frameSize_rgb888 = width * height * 3; - M4OSA_UInt32 i = 0,j= 0; - M4OSA_ERR err = M4NO_ERROR; - - M4OSA_UInt8 *pArgbPlane = - (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, - M4VS, (M4OSA_Char*)"argb data"); - if (pArgbPlane == M4OSA_NULL) { - M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12: \ - Failed to allocate memory for ARGB plane"); - return M4ERR_ALLOC; - } - - /* Get file size */ - err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); - if (err != M4NO_ERROR) { - M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 : \ - Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); - free(pArgbPlane); - pArgbPlane = M4OSA_NULL; - goto cleanup; - } - - err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane, - &frameSize_argb); - if (err != M4NO_ERROR) { - M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 \ - Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err); - pFileReadPtr->closeRead(pARGBIn); - free(pArgbPlane); - pArgbPlane = M4OSA_NULL; - goto cleanup; - } - - err = pFileReadPtr->closeRead(pARGBIn); - if(err != M4NO_ERROR) { - M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 \ - Can not close ARGB8888 file %s, error: 0x%x\n",pFileIn, err); - free(pArgbPlane); - pArgbPlane = M4OSA_NULL; - goto cleanup; - } - - rgbPlane1.pac_data = - (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, - M4VS, (M4OSA_Char*)"RGB888 plane1"); - if(rgbPlane1.pac_data == M4OSA_NULL) { - M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 \ - Failed to allocate memory for rgb plane1"); - free(pArgbPlane); - return M4ERR_ALLOC; - } - rgbPlane1.u_height = height; - rgbPlane1.u_width = width; - rgbPlane1.u_stride = width*3; - rgbPlane1.u_topleft = 0; - - - /** Remove the alpha channel */ - for (i=0, j = 0; i < frameSize_argb; i++) { - if ((i % 4) == 0) continue; - rgbPlane1.pac_data[j] = pArgbPlane[i]; - j++; - } - free(pArgbPlane); - - /** - * Check if resizing is required with color conversion */ - if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) { - - frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3; - rgbPlane2.pac_data = - (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS, - (M4OSA_Char*)"rgb Plane2"); - if(rgbPlane2.pac_data == M4OSA_NULL) { - M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2"); - free(rgbPlane1.pac_data); - return M4ERR_ALLOC; - } - rgbPlane2.u_height = pImagePlanes->u_height; - rgbPlane2.u_width = pImagePlanes->u_width; - rgbPlane2.u_stride = pImagePlanes->u_width*3; - rgbPlane2.u_topleft = 0; - - /* Resizing */ - err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, - &rgbPlane1, &rgbPlane2); - free(rgbPlane1.pac_data); - if(err != M4NO_ERROR) { - M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err); - free(rgbPlane2.pac_data); - return err; - } - - /*Converting Resized RGB888 to NV12 */ - err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane2, pImagePlanes); - free(rgbPlane2.pac_data); - if(err != M4NO_ERROR) { - M4OSA_TRACE1_1("error converting from RGB888 to NV12: 0x%x\n", err); - return err; - } - } else { - err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane1, pImagePlanes); - if(err != M4NO_ERROR) { - M4OSA_TRACE1_1("error when converting from RGB to NV12: 0x%x\n", err); - } - free(rgbPlane1.pac_data); - } -cleanup: - M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 exit"); - return err; -} - - -M4OSA_ERR M4VSS3GPP_intApplyRenderingMode_NV12(M4VSS3GPP_InternalEditContext *pC, - M4xVSS_MediaRendering renderingMode, M4VIFI_ImagePlane* pInplane, - M4VIFI_ImagePlane* pOutplane) -{ - - M4OSA_ERR err = M4NO_ERROR; - M4AIR_Params airParams; - M4VIFI_ImagePlane pImagePlanesTemp[2]; - M4OSA_UInt32 i = 0; - - M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode_NV12 begin"); - - if (renderingMode == M4xVSS_kBlackBorders) { - memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE, - (pOutplane[0].u_height*pOutplane[0].u_stride)); - memset((void *)pOutplane[1].pac_data, UV_PLANE_BORDER_VALUE, - (pOutplane[1].u_height*pOutplane[1].u_stride)); - } - - if (renderingMode == M4xVSS_kResizing) { - /** - * Call the resize filter. - * From the intermediate frame to the encoder image plane */ - err = M4VIFI_ResizeBilinearNV12toNV12(M4OSA_NULL, - pInplane, pOutplane); - if (M4NO_ERROR != err) { - M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ - M4ViFilResizeBilinearNV12toNV12 returns 0x%x!", err); - return err; - } - } else { - M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL; - M4OSA_UInt8* pOutPlaneY = - pOutplane[0].pac_data + pOutplane[0].u_topleft; - M4OSA_UInt8* pOutPlaneUV = - pOutplane[1].pac_data + pOutplane[1].u_topleft; - - M4OSA_UInt8* pInPlaneY = M4OSA_NULL; - M4OSA_UInt8* pInPlaneUV = M4OSA_NULL; - - /* To keep media aspect ratio*/ - /* Initialize AIR Params*/ - airParams.m_inputCoord.m_x = 0; - airParams.m_inputCoord.m_y = 0; - airParams.m_inputSize.m_height = pInplane->u_height; - airParams.m_inputSize.m_width = pInplane->u_width; - airParams.m_outputSize.m_width = pOutplane->u_width; - airParams.m_outputSize.m_height = pOutplane->u_height; - airParams.m_bOutputStripe = M4OSA_FALSE; - airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft; - - /** - Media rendering: Black borders*/ - if (renderingMode == M4xVSS_kBlackBorders) { - pImagePlanesTemp[0].u_width = pOutplane[0].u_width; - pImagePlanesTemp[0].u_height = pOutplane[0].u_height; - pImagePlanesTemp[0].u_stride = pOutplane[0].u_width; - pImagePlanesTemp[0].u_topleft = 0; - - pImagePlanesTemp[1].u_width = pOutplane[1].u_width; - pImagePlanesTemp[1].u_height = pOutplane[1].u_height; - pImagePlanesTemp[1].u_stride = pOutplane[1].u_width; - pImagePlanesTemp[1].u_topleft = 0; - - - /** - * Allocates plan in local image plane structure */ - pImagePlanesTemp[0].pac_data = - (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, - M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ; - if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) { - M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode_NV12: Alloc Error"); - return M4ERR_ALLOC; - } - pImagePlanesTemp[1].pac_data = - (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( - pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, - M4VS, (M4OSA_Char *)"pImagePlaneTemp UV") ; - if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) { - M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode_NV12: Alloc Error"); - free(pImagePlanesTemp[0].pac_data); - return M4ERR_ALLOC; - } - - pInPlaneY = pImagePlanesTemp[0].pac_data ; - pInPlaneUV = pImagePlanesTemp[1].pac_data ; - - memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE, - (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); - memset((void *)pImagePlanesTemp[1].pac_data, UV_PLANE_BORDER_VALUE, - (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); - - M4OSA_UInt32 height = - (pInplane->u_height * pOutplane->u_width) /pInplane->u_width; - - if (height <= pOutplane->u_height) { - /** - * Black borders will be on the top and the bottom side */ - airParams.m_outputSize.m_width = pOutplane->u_width; - airParams.m_outputSize.m_height = height; - /** - * Number of lines at the top */ - pImagePlanesTemp[0].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height - - airParams.m_outputSize.m_height)>>1)) * - pImagePlanesTemp[0].u_stride; - pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height; - pImagePlanesTemp[1].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height - - (airParams.m_outputSize.m_height>>1)))>>1) * - pImagePlanesTemp[1].u_stride; - pImagePlanesTemp[1].u_topleft = ((pImagePlanesTemp[1].u_topleft>>1)<<1); - pImagePlanesTemp[1].u_height = - airParams.m_outputSize.m_height>>1; - - } else { - /** - * Black borders will be on the left and right side */ - airParams.m_outputSize.m_height = pOutplane->u_height; - airParams.m_outputSize.m_width = - (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height); - - pImagePlanesTemp[0].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width - - airParams.m_outputSize.m_width)>>1)); - pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width; - pImagePlanesTemp[1].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width - - airParams.m_outputSize.m_width))>>1); - pImagePlanesTemp[1].u_topleft = ((pImagePlanesTemp[1].u_topleft>>1)<<1); - pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width; - } - - /** - * Width and height have to be even */ - airParams.m_outputSize.m_width = - (airParams.m_outputSize.m_width>>1)<<1; - airParams.m_outputSize.m_height = - (airParams.m_outputSize.m_height>>1)<<1; - airParams.m_inputSize.m_width = - (airParams.m_inputSize.m_width>>1)<<1; - airParams.m_inputSize.m_height = - (airParams.m_inputSize.m_height>>1)<<1; - pImagePlanesTemp[0].u_width = - (pImagePlanesTemp[0].u_width>>1)<<1; - pImagePlanesTemp[1].u_width = - (pImagePlanesTemp[1].u_width>>1)<<1; - pImagePlanesTemp[0].u_height = - (pImagePlanesTemp[0].u_height>>1)<<1; - pImagePlanesTemp[1].u_height = - (pImagePlanesTemp[1].u_height>>1)<<1; - - /** - * Check that values are coherent */ - if (airParams.m_inputSize.m_height == - airParams.m_outputSize.m_height) { - airParams.m_inputSize.m_width = - airParams.m_outputSize.m_width; - } else if (airParams.m_inputSize.m_width == - airParams.m_outputSize.m_width) { - airParams.m_inputSize.m_height = - airParams.m_outputSize.m_height; - } - pPlaneTemp = pImagePlanesTemp; - } - - /** - * Media rendering: Cropping*/ - if (renderingMode == M4xVSS_kCropping) { - airParams.m_outputSize.m_height = pOutplane->u_height; - airParams.m_outputSize.m_width = pOutplane->u_width; - if ((airParams.m_outputSize.m_height * - airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width < - airParams.m_inputSize.m_height) { - /* Height will be cropped */ - airParams.m_inputSize.m_height = - (M4OSA_UInt32)((airParams.m_outputSize.m_height * - airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width); - airParams.m_inputSize.m_height = - (airParams.m_inputSize.m_height>>1)<<1; - airParams.m_inputCoord.m_y = - (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height - - airParams.m_inputSize.m_height))>>1); - } else { - /* Width will be cropped */ - airParams.m_inputSize.m_width = - (M4OSA_UInt32)((airParams.m_outputSize.m_width * - airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height); - airParams.m_inputSize.m_width = - (airParams.m_inputSize.m_width>>1)<<1; - airParams.m_inputCoord.m_x = - (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width - - airParams.m_inputSize.m_width))>>1); - } - pPlaneTemp = pOutplane; - } - /** - * Call AIR functions */ - if (M4OSA_NULL == pC->m_air_context) { - err = M4AIR_create_NV12(&pC->m_air_context, M4AIR_kNV12P); - if(err != M4NO_ERROR) { - M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode_NV12: \ - M4AIR_create returned error 0x%x", err); - goto cleanUp; - } - } - - err = M4AIR_configure_NV12(pC->m_air_context, &airParams); - if (err != M4NO_ERROR) { - M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode_NV12: \ - Error when configuring AIR: 0x%x", err); - M4AIR_cleanUp_NV12(pC->m_air_context); - goto cleanUp; - } - - err = M4AIR_get_NV12(pC->m_air_context, pInplane, pPlaneTemp); - if (err != M4NO_ERROR) { - M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode_NV12: \ - Error when getting AIR plane: 0x%x", err); - M4AIR_cleanUp_NV12(pC->m_air_context); - goto cleanUp; - } - - if (renderingMode == M4xVSS_kBlackBorders) { - for (i=0; ipPlaneYuv = - (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( - 2*sizeof(M4VIFI_ImagePlane), M4VS, - (M4OSA_Char*)"pPlaneYuv"); - - if (pClipCtxt->pPlaneYuv == M4OSA_NULL) { - return M4ERR_ALLOC; - } - - pClipCtxt->pPlaneYuv[0].u_height = - pClipCtxt->pSettings->ClipProperties.uiStillPicHeight; - pClipCtxt->pPlaneYuv[0].u_width = - pClipCtxt->pSettings->ClipProperties.uiStillPicWidth; - pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width; - pClipCtxt->pPlaneYuv[0].u_topleft = 0; - - pClipCtxt->pPlaneYuv[0].pac_data = - (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( - pClipCtxt->pPlaneYuv[0].u_height* - pClipCtxt->pPlaneYuv[0].u_width * 1.5, - M4VS, (M4OSA_Char*)"imageClip YUV data"); - if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) { - free(pClipCtxt->pPlaneYuv); - return M4ERR_ALLOC; - } - - pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; - pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width; - pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width; - pClipCtxt->pPlaneYuv[1].u_topleft = 0; - pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)( - pClipCtxt->pPlaneYuv[0].pac_data + - pClipCtxt->pPlaneYuv[0].u_height * - pClipCtxt->pPlaneYuv[0].u_width); - - - err = M4VSS3GPP_internalConvertAndResizeARGB8888toNV12 ( - pClipCtxt->pSettings->pFile, - pC->pOsaFileReadPtr, - pClipCtxt->pPlaneYuv, - pClipCtxt->pSettings->ClipProperties.uiStillPicWidth, - pClipCtxt->pSettings->ClipProperties.uiStillPicHeight); - if (M4NO_ERROR != err) { - free(pClipCtxt->pPlaneYuv[0].pac_data); - free(pClipCtxt->pPlaneYuv); - return err; - } - - // Set the YUV data to the decoder using setoption - err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption ( - pClipCtxt->pViDecCtxt, - M4DECODER_kOptionID_DecYuvData, - (M4OSA_DataOption)pClipCtxt->pPlaneYuv); // FIXME: not sure when call this - if (M4NO_ERROR != err) { - free(pClipCtxt->pPlaneYuv[0].pac_data); - free(pClipCtxt->pPlaneYuv); - return err; - } - - pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE; - - // Allocate Yuv plane with effect - pClipCtxt->pPlaneYuvWithEffect = - (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( - 2*sizeof(M4VIFI_ImagePlane), M4VS, - (M4OSA_Char*)"pPlaneYuvWithEffect"); - if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) { - free(pClipCtxt->pPlaneYuv[0].pac_data); - free(pClipCtxt->pPlaneYuv); - return M4ERR_ALLOC; - } - - pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight; - pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth; - pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth; - pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0; - - pClipCtxt->pPlaneYuvWithEffect[0].pac_data = - (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( - pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5, - M4VS, (M4OSA_Char*)"imageClip YUV data"); - if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) { - free(pClipCtxt->pPlaneYuv[0].pac_data); - free(pClipCtxt->pPlaneYuv); - free(pClipCtxt->pPlaneYuvWithEffect); - return M4ERR_ALLOC; - } - - pClipCtxt->pPlaneYuvWithEffect[1].u_height = - pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; - pClipCtxt->pPlaneYuvWithEffect[1].u_width = - pClipCtxt->pPlaneYuvWithEffect[0].u_width; - pClipCtxt->pPlaneYuvWithEffect[1].u_stride = - pClipCtxt->pPlaneYuvWithEffect[1].u_width; - pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0; - pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)( - pClipCtxt->pPlaneYuvWithEffect[0].pac_data + - pClipCtxt->pPlaneYuvWithEffect[0].u_height * - pClipCtxt->pPlaneYuvWithEffect[0].u_width); - - err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( - pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous, - (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect); - if (M4NO_ERROR != err) { - free(pClipCtxt->pPlaneYuv[0].pac_data); - free(pClipCtxt->pPlaneYuv); - free(pClipCtxt->pPlaneYuvWithEffect); - return err; - } - - return M4NO_ERROR; -} - - -M4OSA_ERR M4VSS3GPP_intRotateVideo_NV12(M4VIFI_ImagePlane* pPlaneIn, - M4OSA_UInt32 rotationDegree) -{ - M4OSA_ERR err = M4NO_ERROR; - M4VIFI_ImagePlane outPlane[2]; - - if (rotationDegree != 180) { - // Swap width and height of in plane - outPlane[0].u_width = pPlaneIn[0].u_height; - outPlane[0].u_height = pPlaneIn[0].u_width; - outPlane[0].u_stride = outPlane[0].u_width; - outPlane[0].u_topleft = 0; - outPlane[0].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( - (outPlane[0].u_stride*outPlane[0].u_height), M4VS, - (M4OSA_Char*)("out Y plane for rotation")); - if (outPlane[0].pac_data == M4OSA_NULL) { - return M4ERR_ALLOC; - } - - outPlane[1].u_width = outPlane[0].u_width; - outPlane[1].u_height = outPlane[0].u_height >> 1; - outPlane[1].u_stride = outPlane[1].u_width; - outPlane[1].u_topleft = 0; - outPlane[1].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( - (outPlane[1].u_stride*outPlane[1].u_height), M4VS, - (M4OSA_Char*)("out U plane for rotation")); - if (outPlane[1].pac_data == M4OSA_NULL) { - free((void *)outPlane[0].pac_data); - return M4ERR_ALLOC; - } - } - - switch(rotationDegree) { - case 90: - M4VIFI_Rotate90RightNV12toNV12(M4OSA_NULL, pPlaneIn, outPlane); - break; - - case 180: - // In plane rotation, so planeOut = planeIn - M4VIFI_Rotate180NV12toNV12(M4OSA_NULL, pPlaneIn, pPlaneIn); - break; - - case 270: - M4VIFI_Rotate90LeftNV12toNV12(M4OSA_NULL, pPlaneIn, outPlane); - break; - - default: - M4OSA_TRACE1_1("invalid rotation param %d", (int)rotationDegree); - err = M4ERR_PARAMETER; - break; - } - - if (rotationDegree != 180) { - memset((void *)pPlaneIn[0].pac_data, 0, - (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); - memset((void *)pPlaneIn[1].pac_data, 0, - (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); - - // Copy Y, U and V planes - memcpy((void *)pPlaneIn[0].pac_data, (void *)outPlane[0].pac_data, - (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); - memcpy((void *)pPlaneIn[1].pac_data, (void *)outPlane[1].pac_data, - (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); - - free((void *)outPlane[0].pac_data); - free((void *)outPlane[1].pac_data); - - // Swap the width and height of the in plane - uint32_t temp = 0; - temp = pPlaneIn[0].u_width; - pPlaneIn[0].u_width = pPlaneIn[0].u_height; - pPlaneIn[0].u_height = temp; - pPlaneIn[0].u_stride = pPlaneIn[0].u_width; - - pPlaneIn[1].u_width = pPlaneIn[0].u_width; - pPlaneIn[1].u_height = pPlaneIn[0].u_height >> 1; - pPlaneIn[1].u_stride = pPlaneIn[1].u_width; - - } - - return err; -} - diff --git a/frameworks/videoedit/vss/M4xVSS_NV12.h b/frameworks/videoedit/vss/M4xVSS_NV12.h deleted file mode 100644 index 91f1527..0000000 --- a/frameworks/videoedit/vss/M4xVSS_NV12.h +++ /dev/null @@ -1,83 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -#ifndef M4XVSS_NV12_H -#define M4XVSS_NV12_H - -M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, - M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, - M4OSA_UInt32 width, M4OSA_UInt32 height); - -M4OSA_ERR M4xVSS_PictureCallbackFct_NV12(M4OSA_Void* pPictureCtxt, - M4VIFI_ImagePlane* pImagePlanes, M4OSA_Double* pPictureDuration); - -M4OSA_ERR M4VSS3GPP_externalVideoEffectColor_NV12(M4OSA_Void *pFunctionContext, - M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut, - M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind); - -M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming_NV12(M4OSA_Void *userData, - M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut, - M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind); - -M4OSA_ERR M4xVSS_internalConvertRGBtoNV12(M4xVSS_FramingStruct* framingCtx); - -M4OSA_ERR M4xVSS_AlphaMagic_NV12(M4OSA_Void *userData, M4VIFI_ImagePlane *PlaneIn1, - M4VIFI_ImagePlane *PlaneIn2, M4VIFI_ImagePlane *PlaneOut, - M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind); - -M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, - M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, - M4OSA_UInt32 width,M4OSA_UInt32 height); - -M4OSA_ERR M4xVSS_internalConvertARGB888toNV12_FrammingEffect(M4OSA_Context pContext, - M4VSS3GPP_EffectSettings* pEffect, M4xVSS_FramingStruct* framingCtx, - M4VIDEOEDITING_VideoFrameSize OutputVideoResolution); - -M4OSA_ERR M4xVSS_AlphaMagicBlending_NV12(M4OSA_Void *userData, - M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, - M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, - M4OSA_UInt32 uiTransitionKind); - -M4OSA_ERR M4xVSS_SlideTransition_NV12(M4OSA_Void *userData, - M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, - M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, - M4OSA_UInt32 uiTransitionKind); - -M4OSA_ERR M4xVSS_FadeBlackTransition_NV12(M4OSA_Void *userData, - M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, - M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, - M4OSA_UInt32 uiTransitionKind); - -#endif diff --git a/frameworks/videoedit/vss/M4xVSS_internal_NV12.c b/frameworks/videoedit/vss/M4xVSS_internal_NV12.c deleted file mode 100644 index d92cb39..0000000 --- a/frameworks/videoedit/vss/M4xVSS_internal_NV12.c +++ /dev/null @@ -1,3533 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "M4OSA_Debug.h" -#include "M4OSA_CharStar.h" - -#include "NXPSW_CompilerSwitches.h" - -#include "M4VSS3GPP_API.h" -#include "M4VSS3GPP_ErrorCodes.h" - -#include "M4xVSS_API.h" -#include "M4xVSS_Internal.h" - - -/*for rgb16 color effect*/ -#include "M4VIFI_Defines.h" -#include "M4VIFI_Clip.h" - -/** - * component includes */ -#include "M4VFL_transition.h" /**< video effects */ - -/* Internal header file of VSS is included because of MMS use case */ -#include "M4VSS3GPP_InternalTypes.h" - -/*Exif header files to add image rendering support (cropping, black borders)*/ -#include "M4EXIFC_CommonAPI.h" -// StageFright encoders require %16 resolution -#include "M4ENCODER_common.h" - -#include "M4AIR_API_NV12.h" -#include "VideoEditorToolsNV12.h" - -#define TRANSPARENT_COLOR 0x7E0 -#define LUM_FACTOR_MAX 10 - -/** - ****************************************************************************** - * M4VIFI_UInt8 M4VIFI_RGB565toNV12 (void *pUserData, - * M4VIFI_ImagePlane *pPlaneIn, - * M4VIFI_ImagePlane *pPlaneOut) - * @brief transform RGB565 image to a NV12 image. - * @note Convert RGB565 to NV12, - * Loop on each row ( 2 rows by 2 rows ) - * Loop on each column ( 2 col by 2 col ) - * Get 4 RGB samples from input data and build 4 output Y samples - * and each single U & V data - * end loop on col - * end loop on row - * @param pUserData: (IN) User Specific Data - * @param pPlaneIn: (IN) Pointer to RGB565 Plane - * @param pPlaneOut: (OUT) Pointer to NV12 buffer Plane - * @return M4VIFI_OK: there is no error - * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD - * @return M4VIFI_ILLEGAL_FRAME_WIDTH: YUV Plane width is ODD - ****************************************************************************** -*/ - -M4VIFI_UInt8 M4VIFI_RGB565toNV12(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, - M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_UInt32 u32_width, u32_height; - M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_UV; - M4VIFI_UInt32 u32_stride_rgb, u32_stride_2rgb; - M4VIFI_UInt32 u32_col, u32_row; - - M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11; - M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11; - M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11; - M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11; - M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11; - M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11; - M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v; - M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data; - M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn; - M4VIFI_UInt16 u16_pix1, u16_pix2, u16_pix3, u16_pix4; - - /* Check planes height are appropriate */ - if ((pPlaneIn->u_height != pPlaneOut[0].u_height) || - (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1))) - { - return M4VIFI_ILLEGAL_FRAME_HEIGHT; - } - - /* Check planes width are appropriate */ - if ((pPlaneIn->u_width != pPlaneOut[0].u_width) || - (pPlaneOut[0].u_width != pPlaneOut[1].u_width)) - { - return M4VIFI_ILLEGAL_FRAME_WIDTH; - } - - /* Set the pointer to the beginning of the output data buffers */ - pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft; - pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft; - pu8_v_data = pu8_u_data + 1; - - /* Set the pointer to the beginning of the input data buffers */ - pu8_rgbn_data = pPlaneIn->pac_data + pPlaneIn->u_topleft; - - /* Get the size of the output image */ - u32_width = pPlaneOut[0].u_width; - u32_height = pPlaneOut[0].u_height; - - /* Set the size of the memory jumps corresponding to row jump in each output plane */ - u32_stride_Y = pPlaneOut[0].u_stride; - u32_stride2_Y = u32_stride_Y << 1; - u32_stride_UV = pPlaneOut[1].u_stride; - - /* Set the size of the memory jumps corresponding to row jump in input plane */ - u32_stride_rgb = pPlaneIn->u_stride; - u32_stride_2rgb = u32_stride_rgb << 1; - - - /* Loop on each row of the output image, input coordinates are estimated from output ones */ - /* Two YUV rows are computed at each pass */ - - for (u32_row = u32_height ;u32_row != 0; u32_row -=2) - { - /* Current Y plane row pointers */ - pu8_yn = pu8_y_data; - /* Next Y plane row pointers */ - pu8_ys = pu8_yn + u32_stride_Y; - /* Current U plane row pointer */ - pu8_u = pu8_u_data; - /* Current V plane row pointer */ - pu8_v = pu8_v_data; - - pu8_rgbn = pu8_rgbn_data; - - /* Loop on each column of the output image */ - for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) - { - /* Get four RGB 565 samples from input data */ - u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn); - u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE)); - u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb)); - u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE)); - - /* Unpack RGB565 to 8bit R, G, B */ - /* (x,y) */ - GET_RGB565(i32_r00,i32_g00,i32_b00,u16_pix1); - /* (x+1,y) */ - GET_RGB565(i32_r10,i32_g10,i32_b10,u16_pix2); - /* (x,y+1) */ - GET_RGB565(i32_r01,i32_g01,i32_b01,u16_pix3); - /* (x+1,y+1) */ - GET_RGB565(i32_r11,i32_g11,i32_b11,u16_pix4); - - /* Convert RGB value to YUV */ - i32_u00 = U16(i32_r00, i32_g00, i32_b00); - i32_v00 = V16(i32_r00, i32_g00, i32_b00); - /* luminance value */ - i32_y00 = Y16(i32_r00, i32_g00, i32_b00); - - i32_u10 = U16(i32_r10, i32_g10, i32_b10); - i32_v10 = V16(i32_r10, i32_g10, i32_b10); - /* luminance value */ - i32_y10 = Y16(i32_r10, i32_g10, i32_b10); - - i32_u01 = U16(i32_r01, i32_g01, i32_b01); - i32_v01 = V16(i32_r01, i32_g01, i32_b01); - /* luminance value */ - i32_y01 = Y16(i32_r01, i32_g01, i32_b01); - - i32_u11 = U16(i32_r11, i32_g11, i32_b11); - i32_v11 = V16(i32_r11, i32_g11, i32_b11); - /* luminance value */ - i32_y11 = Y16(i32_r11, i32_g11, i32_b11); - - /* Store luminance data */ - pu8_yn[0] = (M4VIFI_UInt8)i32_y00; - pu8_yn[1] = (M4VIFI_UInt8)i32_y10; - pu8_ys[0] = (M4VIFI_UInt8)i32_y01; - pu8_ys[1] = (M4VIFI_UInt8)i32_y11; - - /* Store chroma data */ - *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2); - *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2); - - /* Prepare for next column */ - pu8_rgbn += (CST_RGB_16_SIZE<<1); - /* Update current Y plane line pointer*/ - pu8_yn += 2; - /* Update next Y plane line pointer*/ - pu8_ys += 2; - /* Update U plane line pointer*/ - pu8_u += 2; - /* Update V plane line pointer*/ - pu8_v += 2; - } /* End of horizontal scanning */ - - /* Prepare pointers for the next row */ - pu8_y_data += u32_stride2_Y; - pu8_u_data += u32_stride_UV; - pu8_v_data += u32_stride_UV; - pu8_rgbn_data += u32_stride_2rgb; - - - } /* End of vertical scanning */ - - return M4VIFI_OK; -} - - -unsigned char M4VFL_modifyLumaWithScale_NV12(M4ViComImagePlane *plane_in, - M4ViComImagePlane *plane_out, unsigned long lum_factor, - void *user_data) -{ - unsigned short *p_src, *p_dest, *p_src_line, *p_dest_line; - unsigned char *p_csrc, *p_cdest, *p_csrc_line, *p_cdest_line; - unsigned long pix_src; - unsigned long u_outpx, u_outpx2; - unsigned long u_width, u_stride, u_stride_out,u_height, pix; - long i, j; - - /* copy or filter chroma */ - u_width = plane_in[1].u_width; - u_height = plane_in[1].u_height; - u_stride = plane_in[1].u_stride; - u_stride_out = plane_out[1].u_stride; - p_cdest_line = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft]; - p_csrc_line = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft]; - - if (lum_factor > 256) - { - /* copy chroma */ - for (j = u_height; j != 0; j--) - { - memcpy((void *)p_cdest_line, (void *)p_csrc_line, u_width); - p_cdest_line += u_stride_out; - p_csrc_line += u_stride; - } - } - else - { - /* filter chroma */ - pix = (1024 - lum_factor) << 7; - for (j = u_height; j != 0; j--) - { - p_cdest = p_cdest_line; - p_csrc = p_csrc_line; - for (i = u_width; i != 0; i--) - { - *p_cdest++ = ((pix + (*p_csrc++ & 0xFF) * lum_factor) >> LUM_FACTOR_MAX); - } - p_cdest_line += u_stride_out; - p_csrc_line += u_stride; - } - } - - /* apply luma factor */ - u_width = plane_in[0].u_width; - u_height = plane_in[0].u_height; - u_stride = (plane_in[0].u_stride >> 1); - u_stride_out = (plane_out[0].u_stride >> 1); - p_dest = (unsigned short *) &plane_out[0].pac_data[plane_out[0].u_topleft]; - p_src = (unsigned short *) &plane_in[0].pac_data[plane_in[0].u_topleft]; - p_dest_line = p_dest; - p_src_line = p_src; - - for (j = u_height; j != 0; j--) - { - p_dest = p_dest_line; - p_src = p_src_line; - for (i = (u_width >> 1); i != 0; i--) - { - pix_src = (unsigned long) *p_src++; - pix = pix_src & 0xFF; - u_outpx = ((pix * lum_factor) >> LUM_FACTOR_MAX); - pix = ((pix_src & 0xFF00) >> 8); - u_outpx2 = (((pix * lum_factor) >> LUM_FACTOR_MAX)<< 8) ; - *p_dest++ = (unsigned short) (u_outpx2 | u_outpx); - } - p_dest_line += u_stride_out; - p_src_line += u_stride; - } - return 0; -} - -unsigned char M4VIFI_ImageBlendingonNV12 (void *pUserData, - M4ViComImagePlane *pPlaneIn1, M4ViComImagePlane *pPlaneIn2, - M4ViComImagePlane *pPlaneOut, UInt32 Progress) -{ - UInt8 *pu8_data_Y_start1, *pu8_data_Y_start2, *pu8_data_Y_start3; - UInt8 *pu8_data_UV_start1, *pu8_data_UV_start2, *pu8_data_UV_start3; - UInt8 *pu8_data_Y_current1, *pu8_data_Y_next1; - UInt8 *pu8_data_Y_current2, *pu8_data_Y_next2; - UInt8 *pu8_data_Y_current3, *pu8_data_Y_next3; - UInt8 *pu8_data_UV1, *pu8_data_UV2, *pu8_data_UV3; - UInt32 u32_stride_Y1, u32_stride2_Y1, u32_stride_UV1; - UInt32 u32_stride_Y2, u32_stride2_Y2, u32_stride_UV2; - UInt32 u32_stride_Y3, u32_stride2_Y3, u32_stride_UV3; - UInt32 u32_height, u32_width; - UInt32 u32_blendfactor, u32_startA, u32_endA, u32_blend_inc, u32_x_accum; - UInt32 u32_col, u32_row, u32_rangeA, u32_progress; - UInt32 u32_U1,u32_V1,u32_U2,u32_V2, u32_Y1, u32_Y2; - - /* Check the Y plane height is EVEN and image plane heights are same */ - if( (IS_EVEN(pPlaneIn1[0].u_height) == FALSE) || - (IS_EVEN(pPlaneIn2[0].u_height) == FALSE) || - (IS_EVEN(pPlaneOut[0].u_height) == FALSE) || - (pPlaneIn1[0].u_height != pPlaneOut[0].u_height) || - (pPlaneIn2[0].u_height != pPlaneOut[0].u_height) ) - { - return M4VIFI_ILLEGAL_FRAME_HEIGHT; - } - - /* Check the Y plane width is EVEN and image plane widths are same */ - if( (IS_EVEN(pPlaneIn1[0].u_width) == FALSE) || - (IS_EVEN(pPlaneIn2[0].u_width) == FALSE) || - (IS_EVEN(pPlaneOut[0].u_width) == FALSE) || - (pPlaneIn1[0].u_width != pPlaneOut[0].u_width) || - (pPlaneIn2[0].u_width != pPlaneOut[0].u_width) ) - { - return M4VIFI_ILLEGAL_FRAME_WIDTH; - } - /* Set the pointer to the beginning of the input1 NV12 image planes */ - pu8_data_Y_start1 = pPlaneIn1[0].pac_data + pPlaneIn1[0].u_topleft; - pu8_data_UV_start1 = pPlaneIn1[1].pac_data + pPlaneIn1[1].u_topleft; - - /* Set the pointer to the beginning of the input2 NV12 image planes */ - pu8_data_Y_start2 = pPlaneIn2[0].pac_data + pPlaneIn2[0].u_topleft; - pu8_data_UV_start2 = pPlaneIn2[1].pac_data + pPlaneIn2[1].u_topleft; - - /* Set the pointer to the beginning of the output NV12 image planes */ - pu8_data_Y_start3 = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft; - pu8_data_UV_start3 = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft; - - /* Set the stride for the next row in each input1 NV12 plane */ - u32_stride_Y1 = pPlaneIn1[0].u_stride; - u32_stride_UV1 = pPlaneIn1[1].u_stride; - - /* Set the stride for the next row in each input2 NV12 plane */ - u32_stride_Y2 = pPlaneIn2[0].u_stride; - u32_stride_UV2 = pPlaneIn2[1].u_stride; - - /* Set the stride for the next row in each output NV12 plane */ - u32_stride_Y3 = pPlaneOut[0].u_stride; - u32_stride_UV3 = pPlaneOut[1].u_stride; - - u32_stride2_Y1 = u32_stride_Y1 << 1; - u32_stride2_Y2 = u32_stride_Y2 << 1; - u32_stride2_Y3 = u32_stride_Y3 << 1; - - /* Get the size of the output image */ - u32_height = pPlaneOut[0].u_height; - u32_width = pPlaneOut[0].u_width; - - /* User Specified Progress value */ - u32_progress = Progress; - - /* Map Progress value from (0 - 1000) to (0 - 1024) -> for optimisation */ - if(u32_progress < 1000) - u32_progress = ((u32_progress << 10) / 1000); - else - u32_progress = 1024; - - /* Set the range of blendingfactor */ - if(u32_progress <= 512) - { - u32_startA = 0; - u32_endA = (u32_progress << 1); - } - else /* u32_progress > 512 */ - { - u32_startA = (u32_progress - 512) << 1; - u32_endA = 1024; - } - u32_rangeA = u32_endA - u32_startA; - - /* Set the increment of blendingfactor for each element in the image row */ - if ((u32_width >= u32_rangeA) && (u32_rangeA > 0) ) - { - u32_blend_inc = ((u32_rangeA-1) * MAX_SHORT) / (u32_width - 1); - } - else /* (u32_width < u32_rangeA) || (u32_rangeA < 0) */ - { - u32_blend_inc = (u32_rangeA * MAX_SHORT) / (u32_width); - } - /* Two YUV420 rows are computed at each pass */ - for (u32_row = u32_height; u32_row != 0; u32_row -=2) - { - /* Set pointers to the beginning of the row for each input image1 plane */ - pu8_data_Y_current1 = pu8_data_Y_start1; - pu8_data_UV1 = pu8_data_UV_start1; - - /* Set pointers to the beginning of the row for each input image2 plane */ - pu8_data_Y_current2 = pu8_data_Y_start2; - pu8_data_UV2 = pu8_data_UV_start2; - - /* Set pointers to the beginning of the row for each output image plane */ - pu8_data_Y_current3 = pu8_data_Y_start3; - pu8_data_UV3 = pu8_data_UV_start3; - - /* Set pointers to the beginning of the next row for image luma plane */ - pu8_data_Y_next1 = pu8_data_Y_current1 + u32_stride_Y1; - pu8_data_Y_next2 = pu8_data_Y_current2 + u32_stride_Y2; - pu8_data_Y_next3 = pu8_data_Y_current3 + u32_stride_Y3; - - /* Initialise blendfactor */ - u32_blendfactor = u32_startA; - /* Blendfactor Increment accumulator */ - u32_x_accum = 0; - - /* Loop on each column of the output image */ - for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) - { - /* Update the blending factor */ - u32_blendfactor = u32_startA + (u32_x_accum >> 16); - - /* Get Luma value (x,y) of input Image1 */ - u32_Y1 = *pu8_data_Y_current1++; - - /* Get chrominance2 value */ - u32_U1 = *pu8_data_UV1++; - u32_V1 = *pu8_data_UV1++; - - /* Get Luma value (x,y) of input Image2 */ - u32_Y2 = *pu8_data_Y_current2++; - - /* Get chrominance2 value */ - u32_U2 = *pu8_data_UV2++; - u32_V2 = *pu8_data_UV2++; - - /* Compute Luma value (x,y) of Output image */ - *pu8_data_Y_current3++ = (UInt8)((u32_blendfactor * u32_Y2 + - (1024 - u32_blendfactor)*u32_Y1) >> 10); - /* Compute chroma(U) value of Output image */ - *pu8_data_UV3++ = (UInt8)((u32_blendfactor * u32_U2 + - (1024 - u32_blendfactor)*u32_U1) >> 10); - /* Compute chroma(V) value of Output image */ - *pu8_data_UV3++ = (UInt8)((u32_blendfactor * u32_V2 + - (1024 - u32_blendfactor)*u32_V1) >> 10); - - /* Get Luma value (x,y+1) of input Image1 */ - u32_Y1 = *pu8_data_Y_next1++; - - /* Get Luma value (x,y+1) of input Image2 */ - u32_Y2 = *pu8_data_Y_next2++; - - /* Compute Luma value (x,y+1) of Output image*/ - *pu8_data_Y_next3++ = (UInt8)((u32_blendfactor * u32_Y2 + - (1024 - u32_blendfactor)*u32_Y1) >> 10); - /* Update accumulator */ - u32_x_accum += u32_blend_inc; - - /* Update the blending factor */ - u32_blendfactor = u32_startA + (u32_x_accum >> 16); - - /* Get Luma value (x+1,y) of input Image1 */ - u32_Y1 = *pu8_data_Y_current1++; - - /* Get Luma value (x+1,y) of input Image2 */ - u32_Y2 = *pu8_data_Y_current2++; - - /* Compute Luma value (x+1,y) of Output image*/ - *pu8_data_Y_current3++ = (UInt8)((u32_blendfactor * u32_Y2 + - (1024 - u32_blendfactor)*u32_Y1) >> 10); - - /* Get Luma value (x+1,y+1) of input Image1 */ - u32_Y1 = *pu8_data_Y_next1++; - - /* Get Luma value (x+1,y+1) of input Image2 */ - u32_Y2 = *pu8_data_Y_next2++; - - /* Compute Luma value (x+1,y+1) of Output image*/ - *pu8_data_Y_next3++ = (UInt8)((u32_blendfactor * u32_Y2 + - (1024 - u32_blendfactor)*u32_Y1) >> 10); - /* Update accumulator */ - u32_x_accum += u32_blend_inc; - - /* Working pointers are incremented just after each storage */ - - }/* End of row scanning */ - - /* Update working pointer of input image1 for next row */ - pu8_data_Y_start1 += u32_stride2_Y1; - pu8_data_UV_start1 += u32_stride_UV1; - - /* Update working pointer of input image2 for next row */ - pu8_data_Y_start2 += u32_stride2_Y2; - pu8_data_UV_start2 += u32_stride_UV2; - - /* Update working pointer of output image for next row */ - pu8_data_Y_start3 += u32_stride2_Y3; - pu8_data_UV_start3 += u32_stride_UV3; - - }/* End of column scanning */ - - return M4VIFI_OK; -} - - -/** - ****************************************************************************** - * M4VIFI_UInt8 M4VIFI_RGB565toNV12 (void *pUserData, - * M4VIFI_ImagePlane *pPlaneIn, - * M4VIFI_ImagePlane *pPlaneOut) - * @brief transform RGB565 image to a NV12 image. - * @note Convert RGB565 to NV12, - * Loop on each row ( 2 rows by 2 rows ) - * Loop on each column ( 2 col by 2 col ) - * Get 4 RGB samples from input data and build 4 output Y samples - * and each single U & V data - * end loop on col - * end loop on row - * @param pUserData: (IN) User Specific Data - * @param pPlaneIn: (IN) Pointer to RGB565 Plane - * @param pPlaneOut: (OUT) Pointer to NV12 buffer Plane - * @return M4VIFI_OK: there is no error - * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD - * @return M4VIFI_ILLEGAL_FRAME_WIDTH: YUV Plane width is ODD - ****************************************************************************** -*/ -M4VIFI_UInt8 M4VIFI_xVSS_RGB565toNV12(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, - M4VIFI_ImagePlane *pPlaneOut) -{ - M4VIFI_UInt32 u32_width, u32_height; - M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_UV; - M4VIFI_UInt32 u32_stride_rgb, u32_stride_2rgb; - M4VIFI_UInt32 u32_col, u32_row; - - M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11; - M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11; - M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11; - M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11; - M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11; - M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11; - M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v; - M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data; - M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn; - M4VIFI_UInt16 u16_pix1, u16_pix2, u16_pix3, u16_pix4; - M4VIFI_UInt8 count_null=0; - - /* Check planes height are appropriate */ - if( (pPlaneIn->u_height != pPlaneOut[0].u_height) || - (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1))) - { - return M4VIFI_ILLEGAL_FRAME_HEIGHT; - } - - /* Check planes width are appropriate */ - if( (pPlaneIn->u_width != pPlaneOut[0].u_width) || - (pPlaneOut[0].u_width != pPlaneOut[1].u_width)) - { - return M4VIFI_ILLEGAL_FRAME_WIDTH; - } - - /* Set the pointer to the beginning of the output data buffers */ - pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft; - pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft; - pu8_v_data = pu8_u_data + 1; - - /* Set the pointer to the beginning of the input data buffers */ - pu8_rgbn_data = pPlaneIn->pac_data + pPlaneIn->u_topleft; - - /* Get the size of the output image */ - u32_width = pPlaneOut[0].u_width; - u32_height = pPlaneOut[0].u_height; - - /* Set the size of the memory jumps corresponding to row jump in each output plane */ - u32_stride_Y = pPlaneOut[0].u_stride; - u32_stride2_Y = u32_stride_Y << 1; - u32_stride_UV = pPlaneOut[1].u_stride; - - /* Set the size of the memory jumps corresponding to row jump in input plane */ - u32_stride_rgb = pPlaneIn->u_stride; - u32_stride_2rgb = u32_stride_rgb << 1; - - /* Loop on each row of the output image, input coordinates are estimated from output ones */ - /* Two YUV rows are computed at each pass */ - for (u32_row = u32_height ;u32_row != 0; u32_row -=2) - { - /* Current Y plane row pointers */ - pu8_yn = pu8_y_data; - /* Next Y plane row pointers */ - pu8_ys = pu8_yn + u32_stride_Y; - /* Current U plane row pointer */ - pu8_u = pu8_u_data; - /* Current V plane row pointer */ - pu8_v = pu8_v_data; - - pu8_rgbn = pu8_rgbn_data; - - /* Loop on each column of the output image */ - for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) - { - /* Get four RGB 565 samples from input data */ - u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn); - u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE)); - u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb)); - u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE)); - - /* Unpack RGB565 to 8bit R, G, B */ - /* (x,y) */ - GET_RGB565(i32_b00,i32_g00,i32_r00,u16_pix1); - /* (x+1,y) */ - GET_RGB565(i32_b10,i32_g10,i32_r10,u16_pix2); - /* (x,y+1) */ - GET_RGB565(i32_b01,i32_g01,i32_r01,u16_pix3); - /* (x+1,y+1) */ - GET_RGB565(i32_b11,i32_g11,i32_r11,u16_pix4); - /* If RGB is transparent color (0, 63, 0), we transform it to white (31,63,31) */ - if(i32_b00 == 0 && i32_g00 == 63 && i32_r00 == 0) - { - i32_b00 = 31; - i32_r00 = 31; - } - if(i32_b10 == 0 && i32_g10 == 63 && i32_r10 == 0) - { - i32_b10 = 31; - i32_r10 = 31; - } - if(i32_b01 == 0 && i32_g01 == 63 && i32_r01 == 0) - { - i32_b01 = 31; - i32_r01 = 31; - } - if(i32_b11 == 0 && i32_g11 == 63 && i32_r11 == 0) - { - i32_b11 = 31; - i32_r11 = 31; - } - /* Convert RGB value to YUV */ - i32_u00 = U16(i32_r00, i32_g00, i32_b00); - i32_v00 = V16(i32_r00, i32_g00, i32_b00); - /* luminance value */ - i32_y00 = Y16(i32_r00, i32_g00, i32_b00); - - i32_u10 = U16(i32_r10, i32_g10, i32_b10); - i32_v10 = V16(i32_r10, i32_g10, i32_b10); - /* luminance value */ - i32_y10 = Y16(i32_r10, i32_g10, i32_b10); - - i32_u01 = U16(i32_r01, i32_g01, i32_b01); - i32_v01 = V16(i32_r01, i32_g01, i32_b01); - /* luminance value */ - i32_y01 = Y16(i32_r01, i32_g01, i32_b01); - - i32_u11 = U16(i32_r11, i32_g11, i32_b11); - i32_v11 = V16(i32_r11, i32_g11, i32_b11); - /* luminance value */ - i32_y11 = Y16(i32_r11, i32_g11, i32_b11); - /* Store luminance data */ - pu8_yn[0] = (M4VIFI_UInt8)i32_y00; - pu8_yn[1] = (M4VIFI_UInt8)i32_y10; - pu8_ys[0] = (M4VIFI_UInt8)i32_y01; - pu8_ys[1] = (M4VIFI_UInt8)i32_y11; - *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2); - *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2); - /* Prepare for next column */ - pu8_rgbn += (CST_RGB_16_SIZE<<1); - /* Update current Y plane line pointer*/ - pu8_yn += 2; - /* Update next Y plane line pointer*/ - pu8_ys += 2; - /* Update U plane line pointer*/ - pu8_u += 2; - /* Update V plane line pointer*/ - pu8_v += 2; - } /* End of horizontal scanning */ - - /* Prepare pointers for the next row */ - pu8_y_data += u32_stride2_Y; - pu8_u_data += u32_stride_UV; - pu8_v_data += u32_stride_UV; - pu8_rgbn_data += u32_stride_2rgb; - - - } /* End of vertical scanning */ - - return M4VIFI_OK; -} - - -/** - ****************************************************************************** - * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, - * M4OSA_FileReadPointer* pFileReadPtr, - * M4VIFI_ImagePlane* pImagePlanes, - * M4OSA_UInt32 width, - * M4OSA_UInt32 height); - * @brief It Coverts and resizes a ARGB8888 image to NV12 - * @note - * @param pFileIn (IN) The Image input file - * @param pFileReadPtr (IN) Pointer on filesystem functions - * @param pImagePlanes (IN/OUT) Pointer on NV12 output planes allocated by the user - * ARGB8888 image will be converted and resized to output - * NV12 plane size - *@param width (IN) width of the ARGB8888 - *@param height (IN) height of the ARGB8888 - * @return M4NO_ERROR: No error - * @return M4ERR_ALLOC: memory error - * @return M4ERR_PARAMETER: At least one of the function parameters is null - ****************************************************************************** - */ - -M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toNV12(M4OSA_Void* pFileIn, - M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane* pImagePlanes, - M4OSA_UInt32 width,M4OSA_UInt32 height) -{ - M4OSA_Context pARGBIn; - M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; - M4OSA_UInt32 frameSize_argb=(width * height * 4); - M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data. - M4OSA_UInt32 i = 0,j= 0; - M4OSA_ERR err=M4NO_ERROR; - - - M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, - M4VS, (M4OSA_Char*)"Image argb data"); - M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 Entering :"); - if(pTmpData == M4OSA_NULL) { - M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 :\ - Failed to allocate memory for Image clip"); - return M4ERR_ALLOC; - } - - M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 :width and height %d %d", - width ,height); - /* Get file size (mandatory for chunk decoding) */ - err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 :\ - Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); - free(pTmpData); - pTmpData = M4OSA_NULL; - goto cleanup; - } - - err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 Can't close ARGB8888\ - file %s, error: 0x%x\n",pFileIn, err); - pFileReadPtr->closeRead(pARGBIn); - free(pTmpData); - pTmpData = M4OSA_NULL; - goto cleanup; - } - - err = pFileReadPtr->closeRead(pARGBIn); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toNV12 Can't close ARGB8888 \ - file %s, error: 0x%x\n",pFileIn, err); - free(pTmpData); - pTmpData = M4OSA_NULL; - goto cleanup; - } - - rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, - (M4OSA_Char*)"Image clip RGB888 data"); - if(rgbPlane1.pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 \ - Failed to allocate memory for Image clip"); - free(pTmpData); - return M4ERR_ALLOC; - } - - rgbPlane1.u_height = height; - rgbPlane1.u_width = width; - rgbPlane1.u_stride = width*3; - rgbPlane1.u_topleft = 0; - - - /** Remove the alpha channel */ - for (i=0, j = 0; i < frameSize_argb; i++) { - if ((i % 4) == 0) continue; - rgbPlane1.pac_data[j] = pTmpData[i]; - j++; - } - free(pTmpData); - - /* To Check if resizing is required with color conversion */ - if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) - { - M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 Resizing :"); - frameSize = ( pImagePlanes->u_width * pImagePlanes->u_height * 3); - rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, - (M4OSA_Char*)"Image clip RGB888 data"); - if(rgbPlane2.pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); - free(pTmpData); - return M4ERR_ALLOC; - } - - rgbPlane2.u_height = pImagePlanes->u_height; - rgbPlane2.u_width = pImagePlanes->u_width; - rgbPlane2.u_stride = pImagePlanes->u_width*3; - rgbPlane2.u_topleft = 0; - - /* Resizing RGB888 to RGB888 */ - err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err); - free(rgbPlane2.pac_data); - free(rgbPlane1.pac_data); - return err; - } - /*Converting Resized RGB888 to YUV420 */ - err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane2, pImagePlanes); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("error when converting from RGB888 to NV12: 0x%x\n", err); - free(rgbPlane2.pac_data); - free(rgbPlane1.pac_data); - return err; - } - free(rgbPlane2.pac_data); - free(rgbPlane1.pac_data); - - M4OSA_TRACE1_0("RGB to YUV done"); - - - } - else - { - M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 NO Resizing :"); - err = M4VIFI_RGB888toNV12(M4OSA_NULL, &rgbPlane1, pImagePlanes); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); - } - free(rgbPlane1.pac_data); - - M4OSA_TRACE1_0("RGB to YUV done"); - } -cleanup: - M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toNV12 leaving :"); - return err; -} - - -/** - ****************************************************************************** - * M4OSA_ERR M4xVSS_internalConvertARGB8888toNV12(M4OSA_Void* pFileIn, - * M4OSA_FileReadPointer* pFileReadPtr, - * M4VIFI_ImagePlane* pImagePlanes, - * M4OSA_UInt32 width, - * M4OSA_UInt32 height); - * @brief It Coverts a ARGB8888 image to NV12 - * @note - * @param pFileIn (IN) The Image input file - * @param pFileReadPtr (IN) Pointer on filesystem functions - * @param pImagePlanes (IN/OUT) Pointer on NV12 output planes allocated by the user - * ARGB8888 image will be converted and resized to output - * NV12 plane size - * @param width (IN) width of the ARGB8888 - * @param height (IN) height of the ARGB8888 - * @return M4NO_ERROR: No error - * @return M4ERR_ALLOC: memory error - * @return M4ERR_PARAMETER: At least one of the function parameters is null - ****************************************************************************** - */ - -M4OSA_ERR M4xVSS_internalConvertARGB8888toNV12(M4OSA_Void* pFileIn, - M4OSA_FileReadPointer* pFileReadPtr, M4VIFI_ImagePlane** pImagePlanes, - M4OSA_UInt32 width, M4OSA_UInt32 height) -{ - M4OSA_ERR err = M4NO_ERROR; - M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL; - - yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(2*sizeof(M4VIFI_ImagePlane), - M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoNV12: Output plane NV12"); - if(yuvPlane == M4OSA_NULL) { - M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toNV12:\ - Failed to allocate memory for Image clip"); - return M4ERR_ALLOC; - } - yuvPlane[0].u_height = height; - yuvPlane[0].u_width = width; - yuvPlane[0].u_stride = width; - yuvPlane[0].u_topleft = 0; - yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \ - * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data"); - - if(yuvPlane[0].pac_data == M4OSA_NULL) { - M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toNV12 \ - Failed to allocate memory for Image clip"); - free(yuvPlane); - return M4ERR_ALLOC; - } - - yuvPlane[1].u_height = yuvPlane[0].u_height >>1; - yuvPlane[1].u_width = yuvPlane[0].u_width; - yuvPlane[1].u_stride = yuvPlane[1].u_width; - yuvPlane[1].u_topleft = 0; - yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \ - * yuvPlane[0].u_width); - - err = M4xVSS_internalConvertAndResizeARGB8888toNV12(pFileIn,pFileReadPtr, - yuvPlane, width, height); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toNV12 return error: 0x%x\n", err); - free(yuvPlane); - return err; - } - - *pImagePlanes = yuvPlane; - - M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toNV12: Leaving"); - return err; - -} - -/** - ****************************************************************************** - * M4OSA_ERR M4xVSS_PictureCallbackFct_NV12 (M4OSA_Void* pPictureCtxt, - * M4VIFI_ImagePlane* pImagePlanes, - * M4OSA_UInt32* pPictureDuration); - * @brief It feeds the PTO3GPP with NV12 pictures. - * @note This function is given to the PTO3GPP in the M4PTO3GPP_Params structure - * @param pContext (IN) The integrator own context - * @param pImagePlanes(IN/OUT) Pointer to an array of three valid image planes - * @param pPictureDuration(OUT) Duration of the returned picture - * - * @return M4NO_ERROR: No error - * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one - * @return M4ERR_PARAMETER: At least one of the function parameters is null - ****************************************************************************** - */ -M4OSA_ERR M4xVSS_PictureCallbackFct_NV12(M4OSA_Void* pPictureCtxt, - M4VIFI_ImagePlane* pImagePlanes, M4OSA_Double* pPictureDuration) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt8 last_frame_flag = 0; - M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt); - - /*Used for pan&zoom*/ - M4OSA_UInt8 tempPanzoomXa = 0; - M4OSA_UInt8 tempPanzoomXb = 0; - M4AIR_Params Params; - /**/ - - /*Used for cropping and black borders*/ - M4OSA_Context pPictureContext = M4OSA_NULL; - M4OSA_FilePosition pictureSize = 0 ; - M4OSA_UInt8* pictureBuffer = M4OSA_NULL; - //M4EXIFC_Context pExifContext = M4OSA_NULL; - M4EXIFC_BasicTags pBasicTags; - M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0]; - M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1]; - - /**/ - - /** - * Check input parameters */ - M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt), M4ERR_PARAMETER, - "M4xVSS_PictureCallbackFct_NV12: pPictureCtxt is M4OSA_NULL"); - M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes), M4ERR_PARAMETER, - "M4xVSS_PictureCallbackFct_NV12: pImagePlanes is M4OSA_NULL"); - M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER, - "M4xVSS_PictureCallbackFct_NV12: pPictureDuration is M4OSA_NULL"); - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering"); - /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/ - if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0) - { - pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE; - } - - /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/ - if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) - { - /** - * Convert and resize input ARGB8888 file to NV12 */ - /*To support ARGB8888 : */ - M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct_NV12 1: width and height %d %d", - pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); - err = M4xVSS_internalConvertAndResizeARGB8888toNV12(pC->m_FileIn, - pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width, - pC->m_pPto3GPPparams->height); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when decoding JPEG: 0x%x\n", err); - return err; - } - } - /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/ - else - { - /** - * Computes ratios */ - if(pC->m_pDecodedPlane == M4OSA_NULL) - { - /** - * Convert input ARGB8888 file to NV12 */ - M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct_NV12 2: width and height %d %d", - pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); - err = M4xVSS_internalConvertARGB8888toNV12(pC->m_FileIn, pC->m_pFileReadPtr, - &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when decoding JPEG: 0x%x\n", err); - if(pC->m_pDecodedPlane != M4OSA_NULL) - { - /* NV12 planar is returned but allocation is made only once - (contigous planes in memory) */ - if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL) - { - free(pC->m_pDecodedPlane->pac_data); - } - free(pC->m_pDecodedPlane); - pC->m_pDecodedPlane = M4OSA_NULL; - } - return err; - } - } - - /*Initialize AIR Params*/ - Params.m_inputCoord.m_x = 0; - Params.m_inputCoord.m_y = 0; - Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; - Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; - Params.m_outputSize.m_width = pImagePlanes->u_width; - Params.m_outputSize.m_height = pImagePlanes->u_height; - Params.m_bOutputStripe = M4OSA_FALSE; - Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; - - /*Initialize Exif params structure*/ - pBasicTags.orientation = M4COMMON_kOrientationUnknown; - - /** - Pan&zoom params*/ - if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom) - { - /*Save ratio values, they can be reused if the new ratios are 0*/ - tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa; - tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb; - - /*Check that the ratio is not 0*/ - /*Check (a) parameters*/ - if(pC->m_pPto3GPPparams->PanZoomXa == 0) - { - M4OSA_UInt8 maxRatio = 0; - if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= - pC->m_pPto3GPPparams->PanZoomTopleftYa) - { - /*The ratio is 0, that means the area of the picture defined with (a) - parameters is bigger than the image size*/ - if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000) - { - /*The oversize is maxRatio*/ - maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000; - } - } - else - { - /*The ratio is 0, that means the area of the picture defined with (a) - parameters is bigger than the image size*/ - if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000) - { - /*The oversize is maxRatio*/ - maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000; - } - } - /*Modify the (a) parameters:*/ - if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio) - { - /*The (a) topleft parameters can be moved to keep the same area size*/ - pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio; - } - else - { - /*Move the (a) topleft parameter to 0 but the ratio will be also further - modified to match the image size*/ - pC->m_pPto3GPPparams->PanZoomTopleftXa = 0; - } - if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio) - { - /*The (a) topleft parameters can be moved to keep the same area size*/ - pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio; - } - else - { - /*Move the (a) topleft parameter to 0 but the ratio will be also further - modified to match the image size*/ - pC->m_pPto3GPPparams->PanZoomTopleftYa = 0; - } - /*The new ratio is the original one*/ - pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa; - if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000) - { - /*Change the ratio if the area of the picture defined with (a) parameters is - bigger than the image size*/ - pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa; - } - if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000) - { - /*Change the ratio if the area of the picture defined with (a) parameters is - bigger than the image size*/ - pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa; - } - } - /*Check (b) parameters*/ - if(pC->m_pPto3GPPparams->PanZoomXb == 0) - { - M4OSA_UInt8 maxRatio = 0; - if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= - pC->m_pPto3GPPparams->PanZoomTopleftYb) - { - /*The ratio is 0, that means the area of the picture defined with (b) - parameters is bigger than the image size*/ - if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000) - { - /*The oversize is maxRatio*/ - maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000; - } - } - else - { - /*The ratio is 0, that means the area of the picture defined with (b) - parameters is bigger than the image size*/ - if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000) - { - /*The oversize is maxRatio*/ - maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000; - } - } - /*Modify the (b) parameters:*/ - if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio) - { - /*The (b) topleft parameters can be moved to keep the same area size*/ - pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio; - } - else - { - /*Move the (b) topleft parameter to 0 but the ratio will be also further - modified to match the image size*/ - pC->m_pPto3GPPparams->PanZoomTopleftXb = 0; - } - if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio) - { - /*The (b) topleft parameters can be moved to keep the same area size*/ - pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio; - } - else - { - /*Move the (b) topleft parameter to 0 but the ratio will be also further - modified to match the image size*/ - pC->m_pPto3GPPparams->PanZoomTopleftYb = 0; - } - /*The new ratio is the original one*/ - pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb; - if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000) - { - /*Change the ratio if the area of the picture defined with (b) parameters is - bigger than the image size*/ - pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb; - } - if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000) - { - /*Change the ratio if the area of the picture defined with (b) parameters is - bigger than the image size*/ - pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb; - } - } - - /** - * Computes AIR parameters */ -/* Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * - (pC->m_pPto3GPPparams->PanZoomTopleftXa + - (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \ - - pC->m_pPto3GPPparams->PanZoomTopleftXa) * - pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; - Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * - (pC->m_pPto3GPPparams->PanZoomTopleftYa + - (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\ - - pC->m_pPto3GPPparams->PanZoomTopleftYa) * - pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; - - Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * - (pC->m_pPto3GPPparams->PanZoomXa + - (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * - pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; - - Params.m_inputSize.m_height = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * - (pC->m_pPto3GPPparams->PanZoomXa + - (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * - pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; - */ - // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter - // will be x-1 max for x no. of frames - Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * - (pC->m_pPto3GPPparams->PanZoomTopleftXa + - (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\ - - pC->m_pPto3GPPparams->PanZoomTopleftXa) * - pC->m_ImageCounter) / ((M4OSA_Double)pC->m_NbImage-1))) / 1000)); - Params.m_inputCoord.m_y = - (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * - (pC->m_pPto3GPPparams->PanZoomTopleftYa + - (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\ - - pC->m_pPto3GPPparams->PanZoomTopleftYa) * - pC->m_ImageCounter) / ((M4OSA_Double)pC->m_NbImage-1))) / 1000)); - - Params.m_inputSize.m_width = - (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * - (pC->m_pPto3GPPparams->PanZoomXa + - (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\ - - pC->m_pPto3GPPparams->PanZoomXa) * - pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); - - Params.m_inputSize.m_height = - (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * - (pC->m_pPto3GPPparams->PanZoomXa + - (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \ - - pC->m_pPto3GPPparams->PanZoomXa) * - pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); - - - if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\ - > pC->m_pDecodedPlane->u_width) - { - Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \ - - Params.m_inputCoord.m_x; - } - - if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\ - > pC->m_pDecodedPlane->u_height) - { - Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\ - - Params.m_inputCoord.m_y; - } - - Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; - Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; - } - - - - /** - Picture rendering: Black borders*/ - - if(pC->m_mediaRendering == M4xVSS_kBlackBorders) - { - memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE, - (pImagePlanes[0].u_height*pImagePlanes[0].u_stride)); - memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE, - (pImagePlanes[1].u_height*pImagePlanes[1].u_stride)); - /** - First without pan&zoom*/ - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Black borders"); - if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) - { - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Black borders without panzoom"); - - switch(pBasicTags.orientation) - { - default: - case M4COMMON_kOrientationUnknown: - Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; - case M4COMMON_kOrientationTopLeft: - case M4COMMON_kOrientationTopRight: - case M4COMMON_kOrientationBottomRight: - case M4COMMON_kOrientationBottomLeft: - if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ - /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) - //Params.m_inputSize.m_height < Params.m_inputSize.m_width) - { - /*it is height so black borders will be on the top and on the bottom side*/ - Params.m_outputSize.m_width = pImagePlanes->u_width; - Params.m_outputSize.m_height = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \ - * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); - /*number of lines at the top*/ - pImagePlanes[0].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ - -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; - pImagePlanes[0].u_height = Params.m_outputSize.m_height; - pImagePlanes[1].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ - -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride; - pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; - - } - else - { - /*it is width so black borders will be on the left and right side*/ - Params.m_outputSize.m_height = pImagePlanes->u_height; - Params.m_outputSize.m_width = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ - * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); - - pImagePlanes[0].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ - -Params.m_outputSize.m_width))>>1); - pImagePlanes[0].u_width = Params.m_outputSize.m_width; - pImagePlanes[1].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)((pImagePlanes[1].u_width\ - -Params.m_outputSize.m_width)>>1))); - pImagePlanes[1].u_width = Params.m_outputSize.m_width; - - } - break; - case M4COMMON_kOrientationLeftTop: - case M4COMMON_kOrientationLeftBottom: - case M4COMMON_kOrientationRightTop: - case M4COMMON_kOrientationRightBottom: - if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ - /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) - //Params.m_inputSize.m_height > Params.m_inputSize.m_width) - { - /*it is height so black borders will be on the top and on - the bottom side*/ - Params.m_outputSize.m_height = pImagePlanes->u_width; - Params.m_outputSize.m_width = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ - * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height); - /*number of lines at the top*/ - pImagePlanes[0].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ - -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; - pImagePlanes[0].u_height = Params.m_outputSize.m_width; - pImagePlanes[1].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ - -(Params.m_outputSize.m_width>>1)))>>1)\ - *pImagePlanes[1].u_stride)+1; - pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; - - } - else - { - /*it is width so black borders will be on the left and right side*/ - Params.m_outputSize.m_width = pImagePlanes->u_height; - Params.m_outputSize.m_height = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ - * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width); - - pImagePlanes[0].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ - -Params.m_outputSize.m_height))>>1))+1; - pImagePlanes[0].u_width = Params.m_outputSize.m_height; - pImagePlanes[1].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ - -(Params.m_outputSize.m_height>>1)))))+1; - pImagePlanes[1].u_width = Params.m_outputSize.m_height; - - } - break; - } - } - - /** - Secondly with pan&zoom*/ - else - { - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Black borders with panzoom"); - switch(pBasicTags.orientation) - { - default: - case M4COMMON_kOrientationUnknown: - Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; - case M4COMMON_kOrientationTopLeft: - case M4COMMON_kOrientationTopRight: - case M4COMMON_kOrientationBottomRight: - case M4COMMON_kOrientationBottomLeft: - /*NO ROTATION*/ - if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ - /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) - //Params.m_inputSize.m_height < Params.m_inputSize.m_width) - { - /*Black borders will be on the top and bottom of the output video*/ - /*Maximum output height if the input image aspect ratio is kept and if - the output width is the screen width*/ - M4OSA_UInt32 tempOutputSizeHeight = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ - * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); - M4OSA_UInt32 tempInputSizeHeightMax = 0; - M4OSA_UInt32 tempFinalInputHeight = 0; - /*The output width is the screen width*/ - Params.m_outputSize.m_width = pImagePlanes->u_width; - tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; - - /*Maximum input height according to the maximum output height - (proportional to the maximum output height)*/ - tempInputSizeHeightMax = (pImagePlanes->u_height\ - *Params.m_inputSize.m_height)/tempOutputSizeHeight; - tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; - - /*Check if the maximum possible input height is contained into the - input image height*/ - if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height) - { - /*The maximum possible input height is contained in the input - image height, - that means no black borders, the input pan zoom area will be extended - so that the input AIR height will be the maximum possible*/ - if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ - <= Params.m_inputCoord.m_y - && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ - <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\ - + Params.m_inputSize.m_height)) - { - /*The input pan zoom area can be extended symmetrically on the - top and bottom side*/ - Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \ - - Params.m_inputSize.m_height)>>1); - } - else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ - -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) - { - /*There is not enough place above the input pan zoom area to - extend it symmetrically, - so extend it to the maximum on the top*/ - Params.m_inputCoord.m_y = 0; - } - else - { - /*There is not enough place below the input pan zoom area to - extend it symmetrically, - so extend it to the maximum on the bottom*/ - Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \ - - tempInputSizeHeightMax; - } - /*The input height of the AIR is the maximum possible height*/ - Params.m_inputSize.m_height = tempInputSizeHeightMax; - } - else - { - /*The maximum possible input height is greater than the input - image height, - that means black borders are necessary to keep aspect ratio - The input height of the AIR is all the input image height*/ - Params.m_outputSize.m_height = - (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\ - /Params.m_inputSize.m_height; - Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; - Params.m_inputCoord.m_y = 0; - Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; - pImagePlanes[0].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ - -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; - pImagePlanes[0].u_height = Params.m_outputSize.m_height; - pImagePlanes[1].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ - -(Params.m_outputSize.m_height>>1)))>>1)\ - *pImagePlanes[1].u_stride); - pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; - - } - } - else - { - /*Black borders will be on the left and right side of the output video*/ - /*Maximum output width if the input image aspect ratio is kept and if the - output height is the screen height*/ - M4OSA_UInt32 tempOutputSizeWidth = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ - * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); - M4OSA_UInt32 tempInputSizeWidthMax = 0; - M4OSA_UInt32 tempFinalInputWidth = 0; - /*The output height is the screen height*/ - Params.m_outputSize.m_height = pImagePlanes->u_height; - tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; - - /*Maximum input width according to the maximum output width - (proportional to the maximum output width)*/ - tempInputSizeWidthMax = - (pImagePlanes->u_width*Params.m_inputSize.m_width)\ - /tempOutputSizeWidth; - tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; - - /*Check if the maximum possible input width is contained into the input - image width*/ - if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width) - { - /*The maximum possible input width is contained in the input - image width, - that means no black borders, the input pan zoom area will be extended - so that the input AIR width will be the maximum possible*/ - if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \ - <= Params.m_inputCoord.m_x - && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\ - <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ - + Params.m_inputSize.m_width)) - { - /*The input pan zoom area can be extended symmetrically on the - right and left side*/ - Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\ - - Params.m_inputSize.m_width)>>1); - } - else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ - -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) - { - /*There is not enough place above the input pan zoom area to - extend it symmetrically, - so extend it to the maximum on the left*/ - Params.m_inputCoord.m_x = 0; - } - else - { - /*There is not enough place below the input pan zoom area - to extend it symmetrically, - so extend it to the maximum on the right*/ - Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \ - - tempInputSizeWidthMax; - } - /*The input width of the AIR is the maximum possible width*/ - Params.m_inputSize.m_width = tempInputSizeWidthMax; - } - else - { - /*The maximum possible input width is greater than the input - image width, - that means black borders are necessary to keep aspect ratio - The input width of the AIR is all the input image width*/ - Params.m_outputSize.m_width =\ - (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\ - /Params.m_inputSize.m_width; - Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; - Params.m_inputCoord.m_x = 0; - Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; - pImagePlanes[0].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ - -Params.m_outputSize.m_width))>>1); - pImagePlanes[0].u_width = Params.m_outputSize.m_width; - pImagePlanes[1].u_topleft = - (M4xVSS_ABS((M4OSA_Int32)((pImagePlanes[1].u_width\ - -Params.m_outputSize.m_width)>>1))); - pImagePlanes[1].u_width = Params.m_outputSize.m_width; - - } - } - break; - case M4COMMON_kOrientationLeftTop: - case M4COMMON_kOrientationLeftBottom: - case M4COMMON_kOrientationRightTop: - case M4COMMON_kOrientationRightBottom: - /*ROTATION*/ - if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ - /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) - //Params.m_inputSize.m_height > Params.m_inputSize.m_width) - { - /*Black borders will be on the left and right side of the output video*/ - /*Maximum output height if the input image aspect ratio is kept and if - the output height is the screen width*/ - M4OSA_UInt32 tempOutputSizeHeight = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ - /pC->m_pDecodedPlane->u_height); - M4OSA_UInt32 tempInputSizeHeightMax = 0; - M4OSA_UInt32 tempFinalInputHeight = 0; - /*The output width is the screen height*/ - Params.m_outputSize.m_height = pImagePlanes->u_width; - Params.m_outputSize.m_width= pImagePlanes->u_height; - tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; - - /*Maximum input height according to the maximum output height - (proportional to the maximum output height)*/ - tempInputSizeHeightMax = - (pImagePlanes->u_height*Params.m_inputSize.m_width)\ - /tempOutputSizeHeight; - tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; - - /*Check if the maximum possible input height is contained into the - input image width (rotation included)*/ - if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width) - { - /*The maximum possible input height is contained in the input - image width (rotation included), - that means no black borders, the input pan zoom area will be extended - so that the input AIR width will be the maximum possible*/ - if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \ - <= Params.m_inputCoord.m_x - && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\ - <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ - + Params.m_inputSize.m_width)) - { - /*The input pan zoom area can be extended symmetrically on the - right and left side*/ - Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \ - - Params.m_inputSize.m_width)>>1); - } - else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ - -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) - { - /*There is not enough place on the left of the input pan - zoom area to extend it symmetrically, - so extend it to the maximum on the left*/ - Params.m_inputCoord.m_x = 0; - } - else - { - /*There is not enough place on the right of the input pan zoom - area to extend it symmetrically, - so extend it to the maximum on the right*/ - Params.m_inputCoord.m_x = - pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax; - } - /*The input width of the AIR is the maximum possible width*/ - Params.m_inputSize.m_width = tempInputSizeHeightMax; - } - else - { - /*The maximum possible input height is greater than the input - image width (rotation included), - that means black borders are necessary to keep aspect ratio - The input width of the AIR is all the input image width*/ - Params.m_outputSize.m_width = - (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\ - /Params.m_inputSize.m_width; - Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; - Params.m_inputCoord.m_x = 0; - Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; - pImagePlanes[0].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ - -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; - pImagePlanes[0].u_height = Params.m_outputSize.m_width; - pImagePlanes[1].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ - -(Params.m_outputSize.m_width>>1)))>>1)\ - *pImagePlanes[1].u_stride)+1; - pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; - - } - } - else - { - /*Black borders will be on the top and bottom of the output video*/ - /*Maximum output width if the input image aspect ratio is kept and if - the output width is the screen height*/ - M4OSA_UInt32 tempOutputSizeWidth = - (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\ - /pC->m_pDecodedPlane->u_width); - M4OSA_UInt32 tempInputSizeWidthMax = 0; - M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0; - /*The output height is the screen width*/ - Params.m_outputSize.m_width = pImagePlanes->u_height; - Params.m_outputSize.m_height= pImagePlanes->u_width; - tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; - - /*Maximum input width according to the maximum output width - (proportional to the maximum output width)*/ - tempInputSizeWidthMax = - (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth; - tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; - - /*Check if the maximum possible input width is contained into the input - image height (rotation included)*/ - if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height) - { - /*The maximum possible input width is contained in the input - image height (rotation included), - that means no black borders, the input pan zoom area will be extended - so that the input AIR height will be the maximum possible*/ - if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \ - <= Params.m_inputCoord.m_y - && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\ - <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \ - + Params.m_inputSize.m_height)) - { - /*The input pan zoom area can be extended symmetrically on - the right and left side*/ - Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \ - - Params.m_inputSize.m_height)>>1); - } - else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ - -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) - { - /*There is not enough place on the top of the input pan zoom - area to extend it symmetrically, - so extend it to the maximum on the top*/ - Params.m_inputCoord.m_y = 0; - } - else - { - /*There is not enough place on the bottom of the input pan zoom - area to extend it symmetrically, - so extend it to the maximum on the bottom*/ - Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\ - - tempInputSizeWidthMax; - } - /*The input height of the AIR is the maximum possible height*/ - Params.m_inputSize.m_height = tempInputSizeWidthMax; - } - else - { - /*The maximum possible input width is greater than the input\ - image height (rotation included), - that means black borders are necessary to keep aspect ratio - The input height of the AIR is all the input image height*/ - Params.m_outputSize.m_height = - (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\ - /Params.m_inputSize.m_height; - Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; - Params.m_inputCoord.m_y = 0; - Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; - pImagePlanes[0].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ - -Params.m_outputSize.m_height))>>1))+1; - pImagePlanes[0].u_width = Params.m_outputSize.m_height; - pImagePlanes[1].u_topleft = - ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ - -(Params.m_outputSize.m_height>>1)))))+1; - pImagePlanes[1].u_width = Params.m_outputSize.m_height; - - } - } - break; - } - } - - /*Width and height have to be even*/ - Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; - Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; - Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; - Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; - pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1; - pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1; - - pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1; - pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1; - - - /*Check that values are coherent*/ - if(Params.m_inputSize.m_height == Params.m_outputSize.m_height) - { - Params.m_inputSize.m_width = Params.m_outputSize.m_width; - } - else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width) - { - Params.m_inputSize.m_height = Params.m_outputSize.m_height; - } - } - - /** - Picture rendering: Resizing and Cropping*/ - if(pC->m_mediaRendering != M4xVSS_kBlackBorders) - { - - switch(pBasicTags.orientation) - { - default: - case M4COMMON_kOrientationUnknown: - Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; - case M4COMMON_kOrientationTopLeft: - case M4COMMON_kOrientationTopRight: - case M4COMMON_kOrientationBottomRight: - case M4COMMON_kOrientationBottomLeft: - Params.m_outputSize.m_height = pImagePlanes->u_height; - Params.m_outputSize.m_width = pImagePlanes->u_width; - break; - case M4COMMON_kOrientationLeftTop: - case M4COMMON_kOrientationLeftBottom: - case M4COMMON_kOrientationRightTop: - case M4COMMON_kOrientationRightBottom: - Params.m_outputSize.m_height = pImagePlanes->u_width; - Params.m_outputSize.m_width = pImagePlanes->u_height; - break; - } - } - - /** - Picture rendering: Cropping*/ - if(pC->m_mediaRendering == M4xVSS_kCropping) - { - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Cropping"); - if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\ - /Params.m_outputSize.m_width>1)<<1; - if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) - { - Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\ - ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1); - } - else - { - Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\ - ((tempHeight - Params.m_inputSize.m_height))>>1); - } - } - else - { - M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width; - /*width will be cropped*/ - Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width \ - * Params.m_inputSize.m_height) /Params.m_outputSize.m_height); - Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; - if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) - { - Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\ - ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1); - } - else - { - Params.m_inputCoord.m_x += (M4OSA_Int32)\ - (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1); - } - } - } - - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Before AIR functions"); - - /** - * Call AIR functions */ - if(M4OSA_NULL == pC->m_air_context) - { - err = M4AIR_create_NV12(&pC->m_air_context, M4AIR_kNV12P); - - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: After M4AIR_create_NV12"); - - if(err != M4NO_ERROR) - { - free(pC->m_pDecodedPlane[0].pac_data); - free(pC->m_pDecodedPlane); - pC->m_pDecodedPlane = M4OSA_NULL; - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12:\ - Error when initializing AIR: 0x%x", err); - return err; - } - } - - err = M4AIR_configure_NV12(pC->m_air_context, &Params); - - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: After M4AIR_configure_NV12"); - - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12:\ - Error when configuring AIR: 0x%x", err); - M4AIR_cleanUp_NV12(pC->m_air_context); - free(pC->m_pDecodedPlane[0].pac_data); - free(pC->m_pDecodedPlane); - pC->m_pDecodedPlane = M4OSA_NULL; - return err; - } - - err = M4AIR_get_NV12(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when getting AIR plane: 0x%x", err); - M4AIR_cleanUp_NV12(pC->m_air_context); - free(pC->m_pDecodedPlane[0].pac_data); - free(pC->m_pDecodedPlane); - pC->m_pDecodedPlane = M4OSA_NULL; - return err; - } - pImagePlanes[0] = pImagePlanes1; - pImagePlanes[1] = pImagePlanes2; - - } - - - /** - * Increment the image counter */ - pC->m_ImageCounter++; - - /** - * Check end of sequence */ - last_frame_flag = (pC->m_ImageCounter >= pC->m_NbImage); - - /** - * Keep the picture duration */ - *pPictureDuration = pC->m_timeDuration; - - if (1 == last_frame_flag) - { - if(M4OSA_NULL != pC->m_air_context) - { - err = M4AIR_cleanUp(pC->m_air_context); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct_NV12: Error when cleaning AIR: 0x%x", err); - return err; - } - } - if(M4OSA_NULL != pC->m_pDecodedPlane) - { - free(pC->m_pDecodedPlane[0].pac_data); - free(pC->m_pDecodedPlane); - pC->m_pDecodedPlane = M4OSA_NULL; - } - return M4PTO3GPP_WAR_LAST_PICTURE; - } - - M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct_NV12: Leaving "); - return M4NO_ERROR; -} - -/** - ****************************************************************************** - * prototype M4OSA_ERR M4xVSS_internalConvertRGBtoNV12(M4xVSS_FramingStruct* framingCtx) - * @brief This function converts an RGB565 plane to NV12 planar - * @note It is used only for framing effect - * It allocates output YUV planes - * @param framingCtx (IN) The framing struct containing input RGB565 plane - * - * @return M4NO_ERROR: No error - * @return M4ERR_PARAMETER: At least one of the function parameters is null - * @return M4ERR_ALLOC: Allocation error (no more memory) - ****************************************************************************** - */ -M4OSA_ERR M4xVSS_internalConvertRGBtoNV12(M4xVSS_FramingStruct* framingCtx) -{ - M4OSA_ERR err; - - /** - * Allocate output NV12 planes */ - framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(2*sizeof(M4VIFI_ImagePlane), - M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoNV12: Output plane NV12"); - if(framingCtx->FramingYuv == M4OSA_NULL) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoNV12"); - return M4ERR_ALLOC; - } - framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width; - framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height; - framingCtx->FramingYuv[0].u_topleft = 0; - framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width; - framingCtx->FramingYuv[0].pac_data = - (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\ - *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\ - "Alloc for the Convertion output YUV"); - - if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoNV12"); - return M4ERR_ALLOC; - } - framingCtx->FramingYuv[1].u_width = framingCtx->FramingRgb->u_width; - framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1; - framingCtx->FramingYuv[1].u_topleft = 0; - framingCtx->FramingYuv[1].u_stride = framingCtx->FramingRgb->u_width; - framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \ - + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height; - - /** - * Convert input RGB 565 to NV12 to be able to merge it with output video in framing - effect */ - err = M4VIFI_xVSS_RGB565toNV12(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoNV12:\ - error when converting from RGB to NV12: 0x%x\n", err); - } - - framingCtx->duration = 0; - framingCtx->previousClipTime = -1; - framingCtx->previewOffsetClipTime = -1; - - /** - * Only one element in the chained list (no animated image with RGB buffer...) */ - framingCtx->pCurrent = framingCtx; - framingCtx->pNext = framingCtx; - - return M4NO_ERROR; -} - - -static M4OSA_ERR M4xVSS_internalProbeFramingBoundaryNV12(M4xVSS_FramingStruct *framingCtx) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_UInt32 topleft_x, topleft_y, botright_x, botright_y; - M4OSA_UInt32 isTopLeftFound, isBottomRightFound; - M4OSA_UInt32 u32_width, u32_height; - M4OSA_UInt32 u32_stride_rgb, u32_stride_2rgb; - M4OSA_UInt32 u32_col, u32_row; - M4OSA_UInt8 *pu8_rgbn_data, *pu8_rgbn; - M4OSA_UInt16 u16_pix1, u16_pix2, u16_pix3, u16_pix4; - - M4OSA_TRACE1_0("M4xVSS_internalProbeFramingBoundary starts!"); - - if (!framingCtx->exportmode) { - M4OSA_TRACE1_0("Err: not in export mode!"); - return err; - } - topleft_x = 0; - topleft_y = 0; - botright_x = 0; - botright_y = 0; - isTopLeftFound = 0; - isBottomRightFound = 0; - framingCtx->framing_topleft_x = 0; - framingCtx->framing_topleft_y = 0; - framingCtx->framing_bottomright_x = 0; - framingCtx->framing_bottomright_y = 0; - - - /* Set the pointer to the beginning of the input data buffers */ - pu8_rgbn_data = framingCtx->FramingRgb->pac_data + framingCtx->FramingRgb->u_topleft; - - u32_width = framingCtx->FramingRgb->u_width; - u32_height = framingCtx->FramingRgb->u_height; - - /* Set the size of the memory jumps corresponding to row jump in input plane */ - u32_stride_rgb = framingCtx->FramingRgb->u_stride; - u32_stride_2rgb = u32_stride_rgb << 1; - - - /* Loop on each row of the output image, input coordinates are estimated from output ones */ - /* Two YUV rows are computed at each pass */ - for (u32_row = u32_height ;u32_row != 0; u32_row -=2) - { - pu8_rgbn = pu8_rgbn_data; - - /* Loop on each column of the output image */ - for (u32_col = u32_width; u32_col != 0 ; u32_col -=2) - { - /* Get four RGB 565 samples from input data */ - u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn); - u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE)); - u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb)); - u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE)); - M4OSA_TRACE1_4("u16_pix1 = 0x%x, u16_pix2 = 0x%x, u16_pix3 = 0x%x, u16_pix4 = 0x%x", - u16_pix1, u16_pix2, u16_pix3, u16_pix4); - if (u16_pix1 != 0xE007 && u16_pix2 != 0xE007 && - u16_pix3 != 0xE007 && u16_pix4 != 0xE007 && !isTopLeftFound) - { - topleft_x = u32_width - (u32_col+1); - topleft_y = u32_height - (u32_row+1); - isTopLeftFound = 1; - } - if (u16_pix1 != 0xE007 && u16_pix2 != 0xE007 && - u16_pix3 != 0xE007 && u16_pix4 != 0xE007) - { - botright_x = u32_width - (u32_col+1); - botright_y = u32_height - (u32_row+1); - isBottomRightFound = 1; - } - - /* Prepare for next column */ - pu8_rgbn += (CST_RGB_16_SIZE<<1); - } /* End of horizontal scanning */ - - /* Prepare pointers for the next row */ - pu8_rgbn_data += u32_stride_2rgb; - - } - M4OSA_TRACE1_2("isTopLeftFound = %d, isBottomRightFound = %d", isTopLeftFound, isBottomRightFound); - if (isTopLeftFound && isTopLeftFound) - { - if ((topleft_x < botright_x) && (topleft_y < botright_y)) - { - framingCtx->framing_topleft_x = (((topleft_x + 1)>>1)<<1) + 2; - framingCtx->framing_topleft_y = (((topleft_y + 1)>>1)<<1) + 2; - framingCtx->framing_bottomright_x = (((botright_x- 1)>>1)<<1) - 1; - framingCtx->framing_bottomright_y = (((botright_y- 1)>>1)<<1) - 1; - M4OSA_TRACE1_2("framingCtx->framing_topleft_x = %d, framingCtx->framing_topleft_y = %d", - framingCtx->framing_topleft_x, framingCtx->framing_topleft_y); - M4OSA_TRACE1_2("framingCtx->framing_bottomright_x = %d, framingCtx->framing_bottomright_y = %d", - framingCtx->framing_bottomright_x, framingCtx->framing_bottomright_y); - } - else - { - M4OSA_TRACE1_0("Err: invalid topleft and bottomright!"); - } - } - else - { - M4OSA_TRACE1_0("Err: fail to find framing boundaries!"); - } - return M4NO_ERROR; -} - - -/** - ****************************************************************************** - * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toNV12_FrammingEffect(M4OSA_Context pContext, - * M4VSS3GPP_EffectSettings* pEffect, - * M4xVSS_FramingStruct* framingCtx, - M4VIDEOEDITING_VideoFrameSize OutputVideoResolution) - * - * @brief This function converts ARGB8888 input file to NV12 when used for framming effect - * @note The input ARGB8888 file path is contained in the pEffect structure - * If the ARGB8888 must be resized to fit output video size, this function - * will do it. - * @param pContext (IN) The integrator own context - * @param pEffect (IN) The effect structure containing all informations on - * the file to decode, resizing ... - * @param framingCtx (IN/OUT) Structure in which the output RGB will be stored - * - * @return M4NO_ERROR: No error - * @return M4ERR_PARAMETER: At least one of the function parameters is null - * @return M4ERR_ALLOC: Allocation error (no more memory) - * @return M4ERR_FILE_NOT_FOUND: File not found. - ****************************************************************************** - */ - -M4OSA_ERR M4xVSS_internalConvertARGB888toNV12_FrammingEffect(M4OSA_Context pContext, - M4VSS3GPP_EffectSettings* pEffect, M4xVSS_FramingStruct* framingCtx, - M4VIDEOEDITING_VideoFrameSize OutputVideoResolution) -{ - M4OSA_ERR err = M4NO_ERROR; - M4OSA_Context pARGBIn; - M4OSA_UInt32 file_size; - M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; - M4OSA_UInt32 width, height, width_out, height_out; - M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath; - M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); - M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; - /*UTF conversion support*/ - M4OSA_Char* pDecodedPath = M4OSA_NULL; - M4OSA_UInt32 i = 0,j = 0; - M4VIFI_ImagePlane rgbPlane; - M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4); - M4OSA_UInt32 frameSize; - M4OSA_UInt32 tempAlphaPercent = 0; - M4VIFI_UInt8* TempPacData = M4OSA_NULL; - M4OSA_UInt16 *ptr = M4OSA_NULL; - M4OSA_UInt32 z = 0; - - M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: Entering "); - - M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toNV12_FrammingEffect width and height %d %d ", - framingCtx->width,framingCtx->height); - - M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\ - "Image argb data"); - if(pTmpData == M4OSA_NULL) { - M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); - return M4ERR_ALLOC; - } - /** - * UTF conversion: convert the file path into the customer format*/ - pDecodedPath = pFile; - - if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL - && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) - { - M4OSA_UInt32 length = 0; - err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile, - (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\ - M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); - free(pTmpData); - pTmpData = M4OSA_NULL; - return err; - } - pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; - } - - /** - * End of the conversion, now use the decoded path*/ - - /* Open input ARGB8888 file and store it into memory */ - err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead); - - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err); - free(pTmpData); - pTmpData = M4OSA_NULL; - return err; - } - - err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); - if(err != M4NO_ERROR) - { - xVSS_context->pFileReadPtr->closeRead(pARGBIn); - free(pTmpData); - pTmpData = M4OSA_NULL; - return err; - } - - - err = xVSS_context->pFileReadPtr->closeRead(pARGBIn); - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err); - free(pTmpData); - pTmpData = M4OSA_NULL; - return err; - } - - - rgbPlane.u_height = framingCtx->height; - rgbPlane.u_width = framingCtx->width; - rgbPlane.u_stride = rgbPlane.u_width*3; - rgbPlane.u_topleft = 0; - - frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data - rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)), - M4VS, (M4OSA_Char*)"Image clip RGB888 data"); - if(rgbPlane.pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); - free(pTmpData); - return M4ERR_ALLOC; - } - - M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ - Remove the alpha channel "); - - /* premultiplied alpha % on RGB */ - for (i=0, j = 0; i < frameSize_argb; i += 4) { - /* this is alpha value */ - if ((i % 4) == 0) - { - tempAlphaPercent = pTmpData[i]; - } - - /* R */ - rgbPlane.pac_data[j] = pTmpData[i+1]; - j++; - - /* G */ - if (tempAlphaPercent > 0) { - rgbPlane.pac_data[j] = pTmpData[i+2]; - j++; - } else {/* In case of alpha value 0, make GREEN to 255 */ - rgbPlane.pac_data[j] = 255; //pTmpData[i+2]; - j++; - } - - /* B */ - rgbPlane.pac_data[j] = pTmpData[i+3]; - j++; - } - - free(pTmpData); - pTmpData = M4OSA_NULL; - - /* convert RGB888 to RGB565 */ - - /* allocate temp RGB 565 buffer */ - TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize + - (4 * (framingCtx->width + framingCtx->height + 1)), - M4VS, (M4OSA_Char*)"Image clip RGB565 data"); - if (TempPacData == M4OSA_NULL) { - M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data"); - free(rgbPlane.pac_data); - return M4ERR_ALLOC; - } - - ptr = (M4OSA_UInt16 *)TempPacData; - z = 0; - - for (i = 0; i < j ; i += 3) - { - ptr[z++] = PACK_RGB565(0, rgbPlane.pac_data[i], - rgbPlane.pac_data[i+1], - rgbPlane.pac_data[i+2]); - } - - /* free the RBG888 and assign RGB565 */ - free(rgbPlane.pac_data); - rgbPlane.pac_data = TempPacData; - - /** - * Check if output sizes are odd */ - if(rgbPlane.u_height % 2 != 0) - { - M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data; - M4OSA_UInt32 i; - M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ - output height is odd "); - output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2; - - for(i=0; iduration = 0; - framingCtx->previousClipTime = -1; - framingCtx->previewOffsetClipTime = -1; - - /** - * Only one element in the chained list (no animated image ...) */ - framingCtx->pCurrent = framingCtx; - framingCtx->pNext = framingCtx; - - /** - * Get output width/height */ - switch(OutputVideoResolution) - //switch(xVSS_context->pSettings->xVSS.outputVideoSize) - { - case M4VIDEOEDITING_kSQCIF: - width_out = 128; - height_out = 96; - break; - case M4VIDEOEDITING_kQQVGA: - width_out = 160; - height_out = 120; - break; - case M4VIDEOEDITING_kQCIF: - width_out = 176; - height_out = 144; - break; - case M4VIDEOEDITING_kQVGA: - width_out = 320; - height_out = 240; - break; - case M4VIDEOEDITING_kCIF: - width_out = 352; - height_out = 288; - break; - case M4VIDEOEDITING_kVGA: - width_out = 640; - height_out = 480; - break; - case M4VIDEOEDITING_kWVGA: - width_out = 800; - height_out = 480; - break; - case M4VIDEOEDITING_kNTSC: - width_out = 720; - height_out = 480; - break; - case M4VIDEOEDITING_k640_360: - width_out = 640; - height_out = 360; - break; - case M4VIDEOEDITING_k854_480: - // StageFright encoders require %16 resolution - width_out = M4ENCODER_854_480_Width; - height_out = 480; - break; - case M4VIDEOEDITING_k1280_720: - width_out = 1280; - height_out = 720; - break; - case M4VIDEOEDITING_k1080_720: - // StageFright encoders require %16 resolution - width_out = M4ENCODER_1080_720_Width; - height_out = 720; - break; - case M4VIDEOEDITING_k960_720: - width_out = 960; - height_out = 720; - break; - case M4VIDEOEDITING_k1920_1080: - width_out = 1920; - height_out = M4ENCODER_1920_1080_Height; - break; - /** - * If output video size is not given, we take QCIF size, - * should not happen, because already done in M4xVSS_sendCommand */ - default: - width_out = 176; - height_out = 144; - break; - } - - /** - * Allocate output planes structures */ - framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS, - (M4OSA_Char *)"Framing Output plane RGB"); - if(framingCtx->FramingRgb == M4OSA_NULL) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); - return M4ERR_ALLOC; - } - /** - * Resize RGB if needed */ - if((pEffect->xVSS.bResize) && - (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out)) - { - width = width_out; - height = height_out; - - M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: \ - New Width and height %d %d ",width,height); - - framingCtx->FramingRgb->u_height = height_out; - framingCtx->FramingRgb->u_width = width_out; - framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; - framingCtx->FramingRgb->u_topleft = 0; - - framingCtx->FramingRgb->pac_data = - (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\ - FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS, - (M4OSA_Char *)"Framing Output pac_data RGB"); - - if(framingCtx->FramingRgb->pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("Allocation error in \ - M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); - free(framingCtx->FramingRgb); - free(rgbPlane.pac_data); - return M4ERR_ALLOC; - } - - M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: Resizing Needed "); - M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ - rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width); - - err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb); - - if(err != M4NO_ERROR) - { - M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toNV12_FrammingEffect :\ - when resizing RGB plane: 0x%x\n", err); - return err; - } - - if(rgbPlane.pac_data != M4OSA_NULL) - { - free(rgbPlane.pac_data); - rgbPlane.pac_data = M4OSA_NULL; - } - } - else - { - - M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ - Resizing Not Needed "); - - width = rgbPlane.u_width; - height = rgbPlane.u_height; - framingCtx->FramingRgb->u_height = height; - framingCtx->FramingRgb->u_width = width; - framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; - framingCtx->FramingRgb->u_topleft = 0; - framingCtx->FramingRgb->pac_data = rgbPlane.pac_data; - } - - - if(pEffect->xVSS.bResize) - { - /** - * Force topleft to 0 for pure framing effect */ - framingCtx->topleft_x = 0; - framingCtx->topleft_y = 0; - } - - - /** - * Convert RGB output to NV12 to be able to merge it with output video in framing - effect */ - framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(2*sizeof(M4VIFI_ImagePlane), M4VS, - (M4OSA_Char *)"Framing Output plane NV12"); - if(framingCtx->FramingYuv == M4OSA_NULL) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); - free(framingCtx->FramingRgb->pac_data); - return M4ERR_ALLOC; - } - - // Alloc for Y, U and V planes - framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1; - framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1; - framingCtx->FramingYuv[0].u_topleft = 0; - framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1; - framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc - ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS, - (M4OSA_Char *)"Alloc for the output Y"); - if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toNV12_FrammingEffect"); - free(framingCtx->FramingYuv); - free(framingCtx->FramingRgb->pac_data); - return M4ERR_ALLOC; - } - framingCtx->FramingYuv[1].u_width = framingCtx->FramingYuv[0].u_width; - framingCtx->FramingYuv[1].u_height = (framingCtx->FramingYuv[0].u_height)>>1; - framingCtx->FramingYuv[1].u_topleft = 0; - framingCtx->FramingYuv[1].u_stride = framingCtx->FramingYuv[1].u_width; - - framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( - framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS, - (M4OSA_Char *)"Alloc for the output U&V"); - if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) { - free(framingCtx->FramingYuv[0].pac_data); - free(framingCtx->FramingYuv); - free(framingCtx->FramingRgb->pac_data); - return M4ERR_ALLOC; - } - - M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect:\ - convert RGB to YUV "); - - err = M4VIFI_RGB565toNV12(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); - - if (framingCtx->exportmode) { - M4xVSS_internalProbeFramingBoundaryNV12(framingCtx); - } - - if (err != M4NO_ERROR) - { - M4OSA_TRACE1_1("SPS png: error when converting from RGB to NV12: 0x%x\n", err); - } - M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toNV12_FrammingEffect: Leaving "); - return err; -} - -/** - ****************************************************************************** - * prototype M4VSS3GPP_externalVideoEffectColor_NV12(M4OSA_Void *pFunctionContext, - * M4VIFI_ImagePlane *PlaneIn, - * M4VIFI_ImagePlane *PlaneOut, - * M4VSS3GPP_ExternalProgress *pProgress, - * M4OSA_UInt32 uiEffectKind) - * - * @brief This function apply a color effect on an input NV12 planar frame - * @note - * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) - * @param PlaneIn (IN) Input NV12 planar - * @param PlaneOut (IN/OUT) Output NV12 planar - * @param pProgress (IN/OUT) Progress indication (0-100) - * @param uiEffectKind (IN) Unused - * - * @return M4VIFI_OK: No error - ****************************************************************************** - */ -M4OSA_ERR M4VSS3GPP_externalVideoEffectColor_NV12(M4OSA_Void *pFunctionContext, - M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut, - M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind) -{ - M4VIFI_Int32 plane_number; - M4VIFI_UInt32 i,j,wStep; - M4VIFI_UInt8 *p_buf_src, *p_buf_dest,*p_buf_dest_uv; - M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext; - M4VIFI_UInt32 uvTmp,uvTmp1,uvTmp2,u_wTmp,u_wTmp1; - M4VIFI_UInt32 *p_buf_dest_uv32; - uvTmp = uvTmp1 = uvTmp2 = 0; - - - for (plane_number = 0; plane_number < 2; plane_number++) - { - p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]); - p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]); - - for (i = 0; i < PlaneOut[plane_number].u_height; i++) - { - /** - * Chrominance */ - if(plane_number==1) - { - p_buf_dest_uv32 = (M4VIFI_UInt32*)p_buf_dest; - //switch ((M4OSA_UInt32)pFunctionContext) - // commented because a structure for the effects context exist - switch (ColorContext->colorEffectType) - { - case M4xVSS_kVideoEffectType_BlackAndWhite: - memset((void *)p_buf_dest,128, - PlaneIn[plane_number].u_width); - break; - case M4xVSS_kVideoEffectType_Pink: - memset((void *)p_buf_dest,255, - PlaneIn[plane_number].u_width); - break; - case M4xVSS_kVideoEffectType_Green: - memset((void *)p_buf_dest,0, - PlaneIn[plane_number].u_width); - break; - case M4xVSS_kVideoEffectType_Sepia: - { - uvTmp1 = 139; - uvTmp2 = 117 | (uvTmp1 << 8); - uvTmp = uvTmp2 | (uvTmp2 << 16); - - u_wTmp = PlaneIn[plane_number].u_width; - - u_wTmp1 = u_wTmp >> 4; - for(wStep = 0; wStep < u_wTmp1; wStep++) - { - *p_buf_dest_uv32++ = uvTmp; - *p_buf_dest_uv32++ = uvTmp; - *p_buf_dest_uv32++ = uvTmp; - *p_buf_dest_uv32++ = uvTmp; - } - u_wTmp1 = u_wTmp - ((u_wTmp>>4)<<4); // equal to u_wTmp % 16 - p_buf_dest_uv = (M4VIFI_UInt8*)p_buf_dest_uv32; - for(j=0; j< u_wTmp1; j++) - { - if (j%2 == 0) - { - *p_buf_dest_uv++ = 117; - } - else - { - *p_buf_dest_uv++ = 139; - } - } - break; - } - case M4xVSS_kVideoEffectType_Negative: - memcpy((void *)p_buf_dest, - (void *)p_buf_src ,PlaneOut[plane_number].u_width); - break; - - case M4xVSS_kVideoEffectType_ColorRGB16: - { - M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; - - /*first get the r, g, b*/ - b = (ColorContext->rgb16ColorData & 0x001f); - g = (ColorContext->rgb16ColorData & 0x07e0)>>5; - r = (ColorContext->rgb16ColorData & 0xf800)>>11; - - /*keep y, but replace u and v*/ - u = U16(r, g, b); - v = V16(r, g, b); - uvTmp1 = (M4OSA_UInt8)v; - uvTmp2 = ((M4OSA_UInt8)u) | (uvTmp1 << 8); - uvTmp = uvTmp2 | (uvTmp2 << 16); - - u_wTmp = PlaneIn[plane_number].u_width; - - u_wTmp1 = u_wTmp >> 2; - for(wStep = 0; wStep < u_wTmp1; wStep++) - { - *p_buf_dest_uv32++ = uvTmp; - } - u_wTmp1 = u_wTmp - ((u_wTmp>>2)<<2); // equal to u_wTmp % 4 - p_buf_dest_uv = (M4VIFI_UInt8*)p_buf_dest_uv32; - if(u_wTmp1 == 0) { - break; - } else if(u_wTmp1 == 1) { - *p_buf_dest_uv = (M4OSA_UInt8)u; - } else if(u_wTmp1 == 2) { - *p_buf_dest_uv++ = (M4OSA_UInt8)u; - *p_buf_dest_uv = (M4OSA_UInt8)v; - } else { - *p_buf_dest_uv++ = (M4OSA_UInt8)u; - *p_buf_dest_uv++ = (M4OSA_UInt8)v; - *p_buf_dest_uv = (M4OSA_UInt8)u; - } - break; - } - case M4xVSS_kVideoEffectType_Gradient: - { - M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; - - /*first get the r, g, b*/ - b = (ColorContext->rgb16ColorData & 0x001f); - g = (ColorContext->rgb16ColorData & 0x07e0)>>5; - r = (ColorContext->rgb16ColorData & 0xf800)>>11; - - /*for color gradation*/ - b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height)); - g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height)); - r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height)); - - /*keep y, but replace u and v*/ - u = U16(r, g, b); - v = V16(r, g, b); - uvTmp1 = (M4OSA_UInt8)v; - uvTmp2 = ((M4OSA_UInt8)u) | (uvTmp1 << 8); - uvTmp = uvTmp2 | (uvTmp2 << 16); - - u_wTmp = PlaneIn[plane_number].u_width; - - u_wTmp1 = u_wTmp >> 2; - for(wStep = 0; wStep < u_wTmp1; wStep++) - { - *p_buf_dest_uv32++ = uvTmp; - } - u_wTmp1 = u_wTmp - ((u_wTmp>>2)<<2); // equal to u_wTmp % 4 - p_buf_dest_uv = (M4VIFI_UInt8*)p_buf_dest_uv32; - if(u_wTmp1 == 0) { - break; - } else if(u_wTmp1 == 1) { - *p_buf_dest_uv = (M4OSA_UInt8)u; - } else if(u_wTmp1 == 2) { - *p_buf_dest_uv++ = (M4OSA_UInt8)u; - *p_buf_dest_uv = (M4OSA_UInt8)v; - } else { - *p_buf_dest_uv++ = (M4OSA_UInt8)u; - *p_buf_dest_uv++ = (M4OSA_UInt8)v; - *p_buf_dest_uv = (M4OSA_UInt8)u; - } - break; - } - default: - break; - } - } - /** - * Luminance */ - else - { - //switch ((M4OSA_UInt32)pFunctionContext) - // commented because a structure for the effects context exist - switch (ColorContext->colorEffectType) - { - case M4xVSS_kVideoEffectType_Negative: - for(j=0; j>8); - M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; - -#ifndef DECODE_GIF_ON_SAVING - Framing = (M4xVSS_FramingStruct *)userData; - currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent; - FramingRGB = Framing->FramingRgb->pac_data; -#endif /*DECODE_GIF_ON_SAVING*/ - - /*FB*/ -#ifdef DECODE_GIF_ON_SAVING - M4OSA_ERR err; - Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; - currentFraming = (M4xVSS_FramingStruct *)Framing; - FramingRGB = Framing->FramingRgb->pac_data; -#endif /*DECODE_GIF_ON_SAVING*/ - /*end FB*/ - - /** - * Initialize input / output plane pointers */ - p_in_Y += PlaneIn[0].u_topleft; - p_in_UV += PlaneIn[1].u_topleft; - - p_out0 = PlaneOut[0].pac_data; - p_out1 = PlaneOut[1].pac_data; - - /** - * Depending on time, initialize Framing frame to use */ - if(Framing->previousClipTime == -1) - { - Framing->previousClipTime = pProgress->uiOutputTime; - } - - /** - * If the current clip time has reach the duration of one frame of the framing picture - * we need to step to next framing picture */ - - Framing->previousClipTime = pProgress->uiOutputTime; - FramingRGB = currentFraming->FramingRgb->pac_data; - topleft[0] = currentFraming->topleft_x; - topleft[1] = currentFraming->topleft_y; - - M4OSA_TRACE1_2("currentFraming->topleft_x = %d, currentFraming->topleft_y = %d", - currentFraming->topleft_x,currentFraming->topleft_y); - - topleft_x = currentFraming->framing_topleft_x; - topleft_y = currentFraming->framing_topleft_y; - botright_x = currentFraming->framing_bottomright_x; - botright_y = currentFraming->framing_bottomright_y; - - M4OSA_TRACE1_4("topleft_x = %d, topleft_y = %d, botright_x = %d, botright_y = %d", - topleft_x,topleft_y, botright_x, botright_y); - - /*Alpha blending support*/ - M4OSA_Float alphaBlending = 1; - - M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =\ - (M4xVSS_internalEffectsAlphaBlending*)\ - ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct; - - if(alphaBlendingStruct != M4OSA_NULL) - { - if(pProgress->uiProgress \ - < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10)) - { - if(alphaBlendingStruct->m_fadeInTime == 0) { - alphaBlending = alphaBlendingStruct->m_start / 100; - } else { - alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\ - - alphaBlendingStruct->m_start)\ - *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10)); - alphaBlending += alphaBlendingStruct->m_start; - alphaBlending /= 100; - } - } - else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\ - m_fadeInTime*10) && pProgress->uiProgress < 1000\ - - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10)) - { - alphaBlending = (M4OSA_Float)\ - ((M4OSA_Float)alphaBlendingStruct->m_middle/100); - } - else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\ - (alphaBlendingStruct->m_fadeOutTime*10)) - { - if(alphaBlendingStruct->m_fadeOutTime == 0) { - alphaBlending = alphaBlendingStruct->m_end / 100; - } else { - alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \ - - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\ - /(alphaBlendingStruct->m_fadeOutTime*10); - alphaBlending += alphaBlendingStruct->m_end; - alphaBlending /= 100; - } - } - } - - M4VIFI_UInt8 alphaBlending_int8 = (M4VIFI_UInt8)(alphaBlending * 255); - - for( x=0 ; x < PlaneIn[0].u_height ; x++) - { - if((x topleft[1] + botright_y)) - { - if(x&0x01) - { - memcpy(p_out0+x*PlaneOut[0].u_stride, - p_in_Y+x*PlaneIn[0].u_stride, - PlaneOut[0].u_width); - } - else - { - memcpy(p_out0+x*PlaneOut[0].u_stride, - p_in_Y+x*PlaneIn[0].u_stride, - PlaneOut[0].u_width); - memcpy(p_out1+(x>>1)*PlaneOut[1].u_stride, - p_in_UV+(x>>1)*PlaneIn[1].u_stride, - PlaneOut[1].u_width); - } - } - else - { - if(x&0x01) - { - for(y=0; y < PlaneIn[0].u_width ; y++) - { - if((y>=topleft[0]+topleft_x) && (y<=topleft[0]+botright_x)) - { - *( p_out0+y+x*PlaneOut[0].u_stride)= - (M4VIFI_UInt8)(((*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\ - +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending_int8 - + (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(255-alphaBlending_int8))>>8); - } - else - { - *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride); - } - } - } - else - { - for(y=0 ; y < PlaneIn[0].u_width ; y++) - { - if((y>=topleft[0]+topleft_x) && (y<=topleft[0]+botright_x)) - { - *( p_out0+y+x*PlaneOut[0].u_stride)= - (M4VIFI_UInt8)(((*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\ - +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending_int8 - +(*(p_in_Y+y+x*PlaneIn[0].u_stride))*(255-alphaBlending_int8))>>8); - - *( p_out1+y+(x>>1)*PlaneOut[1].u_stride)= - (M4VIFI_UInt8)(((*(currentFraming->FramingYuv[1].pac_data+(y-topleft[0])\ - +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\ - *alphaBlending_int8 + - *(p_in_UV+y+(x>>1)*PlaneIn[1].u_stride)*(255-alphaBlending_int8))>>8); - } - else - { - *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride); - *( p_out1+y+(x>>1)*PlaneOut[1].u_stride)= *(p_in_UV+y+(x>>1)*PlaneIn[1].u_stride); - } - } - } - } - } - return M4VIFI_OK; -} - -/** - ****************************************************************************** - * prototype M4xVSS_AlphaMagic_NV12( M4OSA_Void *userData, - * M4VIFI_ImagePlane PlaneIn1[2], - * M4VIFI_ImagePlane PlaneIn2[2], - * M4VIFI_ImagePlane *PlaneOut, - * M4VSS3GPP_ExternalProgress *pProgress, - * M4OSA_UInt32 uiTransitionKind) - * - * @brief This function apply a color effect on an input NV12 planar frame - * @note - * @param userData (IN) Contains a pointer on a settings structure - * @param PlaneIn1 (IN) Input NV12 planar from video 1 - * @param PlaneIn2 (IN) Input NV12 planar from video 2 - * @param PlaneOut (IN/OUT) Output NV12 planar - * @param pProgress (IN/OUT) Progress indication (0-100) - * @param uiTransitionKind(IN) Unused - * - * @return M4VIFI_OK: No error - ****************************************************************************** - */ -M4OSA_ERR M4xVSS_AlphaMagic_NV12( M4OSA_Void *userData, M4VIFI_ImagePlane *PlaneIn1, - M4VIFI_ImagePlane *PlaneIn2, M4VIFI_ImagePlane *PlaneOut, - M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind) -{ - M4OSA_ERR err; - M4xVSS_internal_AlphaMagicSettings* alphaContext; - M4VIFI_Int32 alphaProgressLevel; - - M4VIFI_ImagePlane* planeswap; - M4VIFI_UInt32 x,y; - - M4VIFI_UInt8 *p_out0; - M4VIFI_UInt8 *p_out1; - - M4VIFI_UInt8 *alphaMask; - /* "Old image" */ - M4VIFI_UInt8 *p_in1_Y; - M4VIFI_UInt8 *p_in1_UV; - /* "New image" */ - M4VIFI_UInt8 *p_in2_Y; - M4VIFI_UInt8 *p_in2_UV; - - M4OSA_TRACE1_0("M4xVSS_AlphaMagic_NV12 begin"); - - err = M4NO_ERROR; - - alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; - - alphaProgressLevel = (pProgress->uiProgress * 128)/1000; - - if( alphaContext->isreverse != M4OSA_FALSE) - { - alphaProgressLevel = 128 - alphaProgressLevel; - planeswap = PlaneIn1; - PlaneIn1 = PlaneIn2; - PlaneIn2 = planeswap; - } - - p_out0 = PlaneOut[0].pac_data; - p_out1 = PlaneOut[1].pac_data; - - alphaMask = alphaContext->pPlane->pac_data; - - /* "Old image" */ - p_in1_Y = PlaneIn1[0].pac_data; - p_in1_UV = PlaneIn1[1].pac_data; - - /* "New image" */ - p_in2_Y = PlaneIn2[0].pac_data; - p_in2_UV = PlaneIn2[1].pac_data; - - /** - * For each column ... */ - for( y=0; yu_height; y++ ) - { - /** - * ... and each row of the alpha mask */ - for( x=0; xu_width; x++ ) - { - /** - * If the value of the current pixel of the alpha mask is > to the current time - * ( current time is normalized on [0-255] ) */ - if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] ) - { - /* We keep "old image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); - *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= - *(p_in1_UV+x+(y>>1)*PlaneIn1[1].u_stride); - } - else - { - /* We take "new image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); - *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= - *(p_in2_UV+x+(y>>1)*PlaneIn2[1].u_stride); - } - } - } - - M4OSA_TRACE1_0("M4xVSS_AlphaMagic_NV12 end"); - - return(err); -} - -/** - ****************************************************************************** - * prototype M4xVSS_AlphaMagicBlending_NV12( M4OSA_Void *userData, - * M4VIFI_ImagePlane PlaneIn1[2], - * M4VIFI_ImagePlane PlaneIn2[2], - * M4VIFI_ImagePlane *PlaneOut, - * M4VSS3GPP_ExternalProgress *pProgress, - * M4OSA_UInt32 uiTransitionKind) - * - * @brief This function apply a color effect on an input NV12 planar frame - * @note - * @param userData (IN) Contains a pointer on a settings structure - * @param PlaneIn1 (IN) Input NV12 planar from video 1 - * @param PlaneIn2 (IN) Input NV12 planar from video 2 - * @param PlaneOut (IN/OUT) Output NV12 planar - * @param pProgress (IN/OUT) Progress indication (0-100) - * @param uiTransitionKind(IN) Unused - * - * @return M4VIFI_OK: No error - ****************************************************************************** - */ -M4OSA_ERR M4xVSS_AlphaMagicBlending_NV12(M4OSA_Void *userData, - M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, - M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, - M4OSA_UInt32 uiTransitionKind) -{ - M4OSA_ERR err; - - M4xVSS_internal_AlphaMagicSettings* alphaContext; - M4VIFI_Int32 alphaProgressLevel; - M4VIFI_Int32 alphaBlendLevelMin; - M4VIFI_Int32 alphaBlendLevelMax; - M4VIFI_Int32 alphaBlendRange; - - M4VIFI_ImagePlane* planeswap; - M4VIFI_UInt32 x,y; - M4VIFI_Int32 alphaMaskValue; - - M4VIFI_UInt8 *p_out0; - M4VIFI_UInt8 *p_out1; - M4VIFI_UInt8 *alphaMask; - - /* "Old image" */ - M4VIFI_UInt8 *p_in1_Y; - M4VIFI_UInt8 *p_in1_UV; - - /* "New image" */ - M4VIFI_UInt8 *p_in2_Y; - M4VIFI_UInt8 *p_in2_UV; - - M4OSA_TRACE1_0("M4xVSS_AlphaMagicBlending_NV12 begin"); - - err = M4NO_ERROR; - - alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; - - alphaProgressLevel = (pProgress->uiProgress * 128)/1000; - - if( alphaContext->isreverse != M4OSA_FALSE) - { - alphaProgressLevel = 128 - alphaProgressLevel; - planeswap = PlaneIn1; - PlaneIn1 = PlaneIn2; - PlaneIn2 = planeswap; - } - - alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold; - - alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold; - - alphaBlendRange = (alphaContext->blendingthreshold)*2; - - p_out0 = PlaneOut[0].pac_data; - p_out1 = PlaneOut[1].pac_data; - - - alphaMask = alphaContext->pPlane->pac_data; - - /* "Old image" */ - p_in1_Y = PlaneIn1[0].pac_data; - p_in1_UV = PlaneIn1[1].pac_data; - - /* "New image" */ - p_in2_Y = PlaneIn2[0].pac_data; - p_in2_UV = PlaneIn2[1].pac_data; - - - /* apply Alpha Magic on each pixel */ - for( y=0; yu_height; y++ ) - { - if (y%2 == 0) - { - for( x=0; xu_width; x++ ) - { - alphaMaskValue = alphaMask[x+y*PlaneOut->u_width]; - if( alphaBlendLevelMax < alphaMaskValue ) - { - /* We keep "old image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); - *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= - *(p_in1_UV+x+(y>>1)*PlaneIn1[1].u_stride); - } - else if( (alphaBlendLevelMin < alphaMaskValue)&& - (alphaMaskValue <= alphaBlendLevelMax ) ) - { - /* We blend "old and new image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8) - (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride)) - +(alphaBlendLevelMax-alphaMaskValue)\ - *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange ); - - *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\ - (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_UV+x+(y>>1)\ - *PlaneIn1[1].u_stride)) - +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_UV+x+(y>>1)\ - *PlaneIn2[1].u_stride)) )/alphaBlendRange ); - } - else - { - /* We take "new image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); - *( p_out1+x+(y>>1)*PlaneOut[1].u_stride)= - *(p_in2_UV+x+(y>>1)*PlaneIn2[1].u_stride); - - } - } - } - else - { - for( x=0; xu_width; x++ ) - { - alphaMaskValue = alphaMask[x+y*PlaneOut->u_width]; - if( alphaBlendLevelMax < alphaMaskValue ) - { - /* We keep "old image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); - } - else if( (alphaBlendLevelMin < alphaMaskValue)&& - (alphaMaskValue <= alphaBlendLevelMax ) ) - { - /* We blend "old and new image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8) - (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride)) - +(alphaBlendLevelMax-alphaMaskValue)\ - *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange ); - } - else - { - /* We take "new image" in output plane */ - *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); - } - } - } - } - - M4OSA_TRACE1_0("M4xVSS_AlphaMagicBlending_NV12 end"); - - return(err); -} - - -#define M4XXX_SampleAddress_X86(plane, x, y) ( (plane).pac_data + (plane).u_topleft + (y)\ - * (plane).u_stride + (x) ) - -static void M4XXX_CopyPlane_X86(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source) -{ - M4OSA_UInt32 height, width, sourceStride, destStride, y; - M4OSA_MemAddr8 sourceWalk, destWalk; - - /* cache the vars used in the loop so as to avoid them being repeatedly fetched and - recomputed from memory. */ - height = dest->u_height; - width = dest->u_width; - - sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress_X86(*source, 0, 0); - sourceStride = source->u_stride; - - destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress_X86(*dest, 0, 0); - destStride = dest->u_stride; - - for (y=0; ydirection) - || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) ) - { - /* horizontal slide */ - shiftUV = ((PlaneOut[1]).u_width/2 * pProgress->uiProgress)/1000; - M4OSA_TRACE1_2("M4xVSS_SlideTransition_NV12 upper: shiftUV = %d,progress = %d", - shiftUV,pProgress->uiProgress ); - if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) - { - /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV - (since it's a shift from the left frame) so that we start out on the right - (i.e. not left) frame, it - being from the previous clip. */ - return M4xVSS_HorizontalSlideTransition_NV12(PlaneIn2, PlaneIn1, PlaneOut, - (PlaneOut[1]).u_width/2 - shiftUV); - } - else /* Left out, right in*/ - { - return M4xVSS_HorizontalSlideTransition_NV12(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); - } - } - else - { - /* vertical slide */ - shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000; - M4OSA_TRACE1_2("M4xVSS_SlideTransition_NV12 bottom: shiftUV = %d,progress = %d",shiftUV, - pProgress->uiProgress ); - if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction) - { - /* Put the previous clip frame top, the next clip frame bottom. */ - return M4xVSS_VerticalSlideTransition_NV12(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); - } - else /* Bottom out, top in */ - { - return M4xVSS_VerticalSlideTransition_NV12(PlaneIn2, PlaneIn1, PlaneOut, - (PlaneOut[1]).u_height - shiftUV); - } - } -} - -/** - ****************************************************************************** - * prototype M4xVSS_FadeBlackTransition_NV12(M4OSA_Void *pFunctionContext, - * M4VIFI_ImagePlane *PlaneIn, - * M4VIFI_ImagePlane *PlaneOut, - * M4VSS3GPP_ExternalProgress *pProgress, - * M4OSA_UInt32 uiEffectKind) - * - * @brief This function apply a fade to black and then a fade from black - * @note - * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) - * @param PlaneIn (IN) Input NV12 planar - * @param PlaneOut (IN/OUT) Output NV12 planar - * @param pProgress (IN/OUT) Progress indication (0-100) - * @param uiEffectKind (IN) Unused - * - * @return M4VIFI_OK: No error - ****************************************************************************** - */ -M4OSA_ERR M4xVSS_FadeBlackTransition_NV12(M4OSA_Void *userData, - M4VIFI_ImagePlane *PlaneIn1, M4VIFI_ImagePlane *PlaneIn2, - M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, - M4OSA_UInt32 uiTransitionKind) -{ - M4OSA_Int32 tmp = 0; - M4OSA_ERR err = M4NO_ERROR; - - if((pProgress->uiProgress) < 500) - { - /** - * Compute where we are in the effect (scale is 0->1024) */ - tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 ); - - /** - * Apply the darkening effect */ - err = M4VFL_modifyLumaWithScale_NV12((M4ViComImagePlane*)PlaneIn1, - (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition_NV12: M4VFL_modifyLumaWithScale_NV12 returns\ - error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); - return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; - } - } - else - { - /** - * Compute where we are in the effect (scale is 0->1024). */ - tmp = (M4OSA_Int32)((((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024); - - /** - * Apply the darkening effect */ - err = M4VFL_modifyLumaWithScale_NV12((M4ViComImagePlane*)PlaneIn2, - (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition_NV12:\ - M4VFL_modifyLumaWithScale_NV12 returns error 0x%x,\ - returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); - return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; - } - } - - return M4VIFI_OK; -} - -- cgit v1.2.3 From e8279114a789236b157f83a455533546b2b32779 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Sun, 25 Nov 2012 18:52:42 -0500 Subject: [PORT FROM MAIN] libmix: fix the crash issue when playing corrupted clips BZ: 69519 Enhance the error handling to fix the crash issue when playing corrupted clips Signed-off-by: Weian Chen Change-Id: Ic8e5510d6154cd282b125d03697104fe80d6b232 Reviewed-on: http://android.intel.com:8080/80840 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- .../viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c | 9 ++++++++- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 3 ++- videodecoder/VideoDecoderMPEG4.cpp | 3 +++ 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index f5784c3..93aed35 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -442,6 +442,11 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse int32_t getbits=0; //DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); + + // Trying to parse more header data as it is more important than frame data + if (parser->bitstream_error > MP4_HDR_ERROR_MASK) + return ret; + do { vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; @@ -579,8 +584,10 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse } while (0); mp4_set_hdr_bitstream_error(parser, true, ret); - if (ret != MP4_STATUS_OK) + if (ret != MP4_STATUS_OK) { parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; + return ret; + } //DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); // POPULATE WORKLOAD ITEM diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 5e4c887..abe9f82 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -199,7 +199,8 @@ uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) { // this should never happen!!!! WTRACE ("Short video header is parsed.\n"); - vbp_on_vop_svh_mp42(pcontext, list_index); + // vbp_on_vop_svh_mp42(pcontext, list_index); + return VBP_TYPE; } } break; diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 2ec6123..5843b47 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -495,6 +495,9 @@ Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *pic case MP4_VOP_TYPE_B: picParam->vop_fields.bits.backward_reference_vop_coding_type = mLastVOPCodingType; // WEIRD, CHECK AGAIN !!!!!!! + if (mLastReference == NULL || mForwardReference == NULL) { + return DECODE_NO_REFERENCE; + } picParam->forward_reference_picture = mLastReference->renderBuffer.surface; picParam->backward_reference_picture = mForwardReference->renderBuffer.surface; break; -- cgit v1.2.3 From 0a73e39be76d973c5f061c83208607824fd1c19e Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Tue, 4 Dec 2012 14:19:55 +0800 Subject: [PORT FROM MAIN] libmix: DPB flushing enhancement BZ: 70555 For H264, we have extra operation on flush to update DPB. When entering into h264_dpb_flush_dpb(), we fall into a loop leading to ANR. The solution is to disable dpb output and add a dpb size check before updating DPB to avoid ANR issue. Change-Id: Ie5957247ec269a9916716a799b2278df4a484cc6 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/81190 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c | 2 +- mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 704c180..cb81d27 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -3492,7 +3492,7 @@ void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_compl // output frames in POC order if (output_all) { - while (p_dpb->used_size - keep_complement) { + while ((p_dpb->used_size > 0) && (p_dpb->used_size - keep_complement)) { h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames); } } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c index 473c64e..7748b74 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c @@ -575,8 +575,8 @@ static void viddec_h264_flush(void *parent, void *ctxt) struct h264_viddec_parser* parser = ctxt; h264_Info * pInfo = &(parser->info); - /* flush the dpb and output all */ - h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames); + /* just flush dpb and disable output */ + h264_dpb_flush_dpb(pInfo, 0, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames); /* reset the dpb to the initial state, avoid parser store wrong data to dpb in next slice parsing */ -- cgit v1.2.3 From 1323024998d0527773b21f7aea6eef4fbf0a8442 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 13 Dec 2012 21:08:46 +0800 Subject: Support xvid clips with more than 1 warp point (1/2) BZ: 74615 The patch is an enhancement to support xvid clips with more than 1 warp point. In the mix parser, we just keep parsing the VOPs and don't report errors to OMX-IL layer. The patch also handles the [S|B] packed frame situations. Change-Id: I8533e6be0acac3cd56eb4d10cd1db889800274e9 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/81198 Reviewed-by: Guo, Nana N Reviewed-by: Tang, Richard Reviewed-by: Chen, Tianmi Reviewed-by: Jiang, Fei Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: Wang, Elaine Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c | 4 +--- videodecoder/VideoDecoderMPEG4.cpp | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c index 93aed35..988883a 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c @@ -228,10 +228,8 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) cxt->no_of_sprite_warping_points = code >> 3; if (cxt->no_of_sprite_warping_points > 1) { - DEB("Error: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", + DEB("Warning: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", cxt->no_of_sprite_warping_points); - ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); - break; } if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change)) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 5843b47..fcfcb2d 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -278,8 +278,6 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { int codingType = picParam->vop_fields.bits.vop_coding_type; if (codingType == MP4_VOP_TYPE_S && picParam->no_of_sprite_warping_points > 1) { WTRACE("Hardware only supports up to one warping point (stationary or translation)"); - // TODO: we actually can't decode this frame - return DECODE_FAIL; } if (picData->vop_coded == 0) { @@ -371,7 +369,7 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { mAcquiredBuffer->renderBuffer.flag = 0; mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; - if (codingType == MP4_VOP_TYPE_I || codingType == MP4_VOP_TYPE_P) { + if (codingType != MP4_VOP_TYPE_B) { mLastVOPCodingType = codingType; mLastVOPTimeIncrement = picData->vop_time_increment; } -- cgit v1.2.3 From 5280be67d5b9cd9dcef25d7b78c5907fd0122c02 Mon Sep 17 00:00:00 2001 From: Kun Jiang Date: Fri, 7 Dec 2012 13:17:53 +0800 Subject: [PORT FROM MAIN]framwork: MediaScan can't work again after adb push some speical file to phone BZ: 72174 from my test it happens in the AsfExtractor.cpp. It will in initialize() => for() forever. Because the objectSize =0 then offset will be == fix value, then readAt() == 24, so forever. In common, offset + objectSize will reach another header. But this bad file(may be made by somebody) is different, it reachs a bad another header. So it can happen this issue. If you want to know the bad file name,you can refer patch:(80715) and (79686 Patch Set 9). Change-Id: Idcc458e23c7f31275559883e2fff42fd54e95fd1 Signed-off-by: Kun Jiang Reviewed-on: http://android.intel.com:8080/82326 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- frameworks/asf_extractor/AsfExtractor.cpp | 763 ++++++++++++++++++++++++++++++ 1 file changed, 763 insertions(+) create mode 100644 frameworks/asf_extractor/AsfExtractor.cpp diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp new file mode 100644 index 0000000..9c1aff4 --- /dev/null +++ b/frameworks/asf_extractor/AsfExtractor.cpp @@ -0,0 +1,763 @@ +/************************************************************************************* + * INTEL CONFIDENTIAL + * Copyright 2011 Intel Corporation All Rights Reserved. + * The source code contained or described herein and all documents related + * to the source code ("Material") are owned by Intel Corporation or its + * suppliers or licensors. Title to the Material remains with Intel + * Corporation or its suppliers and licensors. The Material contains trade + * secrets and proprietary and confidential information of Intel or its + * suppliers and licensors. The Material is protected by worldwide copyright + * and trade secret laws and treaty provisions. No part of the Material may + * be used, copied, reproduced, modified, published, uploaded, posted, + * transmitted, distributed, or disclosed in any way without Intel's prior + * express written permission. + * + * No license under any patent, copyright, trade secret or other intellectual + * property right is granted to or conferred upon you by disclosure or delivery + * of the Materials, either expressly, by implication, inducement, estoppel or + * otherwise. Any license under such intellectual property rights must be express + * and approved by Intel in writing. + ************************************************************************************/ +/* +* Copyright (C) 2011 The Android Open Source Project +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + +//#define LOG_NDEBUG 0 +#define LOG_TAG "AsfExtractor" +#include + +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "MetaDataExt.h" +#include "MediaBufferPool.h" +#include "AsfStreamParser.h" +#include "AsfExtractor.h" + + +namespace android { + +// The audio format tags that represent the input categories supported +// by the Windows Media Audio decoder, don't change it +enum WMAAudioFormats { + WAVE_FORMAT_MSAUDIO1 = 0x160, + WAVE_FORMAT_WMAUDIO2 = 0x161, + WAVE_FORMAT_WMAUDIO3X = 0x162, + WAVE_FORMAT_WMAUDIO_LOSSLESS = 0x163, + WAVE_FORMAT_WMAVOICE9 = 0x000A, + WAVE_FORMAT_WMAVOICE10 = 0x000B, +}; + +class ASFSource : public MediaSource { +public: + ASFSource(const sp &extractor, int trackIndex) + : mExtractor(extractor), + mTrackIndex(trackIndex) { + } + + virtual status_t start(MetaData *params = NULL) { + return OK; + } + + virtual status_t stop() { + return OK; + } + + virtual sp getFormat() { + return mExtractor->getTrackMetaData(mTrackIndex, 0); + } + + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL) { + return mExtractor->read(mTrackIndex, buffer, options); + } + +protected: + virtual ~ASFSource() { + mExtractor = NULL; + } + +private: + sp mExtractor; + int mTrackIndex; + + ASFSource(const ASFSource &); + ASFSource &operator=(const ASFSource &); +}; + + +AsfExtractor::AsfExtractor(const sp &source) + : mDataSource(source), + mInitialized(false), + mHasIndexObject(false), + mFirstTrack(NULL), + mLastTrack(NULL), + mReadLock(), + mFileMetaData(new MetaData), + mParser(NULL), + mHeaderObjectSize(0), + mDataObjectSize(0), + mDataPacketBeginOffset(0), + mDataPacketEndOffset(0), + mDataPacketCurrentOffset(0), + mDataPacketSize(0), + mDataPacketData(NULL) { + mParser = new AsfStreamParser; +} + +AsfExtractor::~AsfExtractor() { + uninitialize(); + mDataSource = NULL; + mFileMetaData = NULL; + delete mParser; + mParser = NULL; +} + +sp AsfExtractor::getMetaData() { + status_t err = initialize(); + if (err != OK) { + return new MetaData; + } + + return mFileMetaData; +} + +size_t AsfExtractor::countTracks() { + status_t err = initialize(); + if (err != OK) { + return 0; + } + + size_t n = 0; + Track *track = mFirstTrack; + while (track) { + ++n; + track = track->next; + } + + ALOGV("track count is %d", n); + return n; +} + +sp AsfExtractor::getTrackMetaData(size_t index, uint32_t flags) { + status_t err = initialize(); + if (err != OK) { + return NULL; + } + + Track *track = getTrackByTrackIndex(index); + if (track == NULL) { + return NULL; + } + + // There is no thumbnail data so ignore flags: kIncludeExtensiveMetaData + return track->meta; +} + +sp AsfExtractor::getTrack(size_t index) { + status_t err; + if ((err = initialize()) != OK) { + return NULL; + } + + Track *track = getTrackByTrackIndex(index); + if (track == NULL) { + return NULL; + } + + // Assume this track is active + track->skipTrack = false; + return new ASFSource(this, index); +} + +status_t AsfExtractor::read( + int trackIndex, + MediaBuffer **buffer, + const MediaSource::ReadOptions *options) { + Track *track = getTrackByTrackIndex(trackIndex); + if (track == NULL) { + return BAD_VALUE; + } + + int64_t seekTimeUs; + MediaSource::ReadOptions::SeekMode mode; + if (!mParser->hasVideo() || (mParser->hasVideo() && mHasIndexObject)) { + if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { + status_t err = seek_l(track, seekTimeUs, mode); + if (err != OK) { + return err; + } + } + } else { + ALOGW("No index object. Seek may not be supported!!!"); + } + + return read_l(track, buffer); +} + +status_t AsfExtractor::initialize() { + if (mInitialized) { + return OK; + } + + status_t status = OK; + // header object is the first mandatory object. The first 16 bytes + // is GUID of object, the following 8 bytes is size of object + if (mDataSource->readAt(16, &mHeaderObjectSize, 8) != 8) { + return ERROR_IO; + } + + uint8_t* headerObjectData = new uint8_t [mHeaderObjectSize]; + if (headerObjectData == NULL) { + return NO_MEMORY; + } + + if (mDataSource->readAt(0, headerObjectData, mHeaderObjectSize) != mHeaderObjectSize) { + return ERROR_IO; + } + status = mParser->parseHeaderObject(headerObjectData, mHeaderObjectSize); + if (status != ASF_PARSER_SUCCESS) { + ALOGE("Failed to parse header object."); + return ERROR_MALFORMED; + } + + delete [] headerObjectData; + headerObjectData = NULL; + + uint8_t dataObjectHeaderData[ASF_DATA_OBJECT_HEADER_SIZE]; + if (mDataSource->readAt(mHeaderObjectSize, dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE) + != ASF_DATA_OBJECT_HEADER_SIZE) { + return ERROR_IO; + } + status = mParser->parseDataObjectHeader(dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE); + if (status != ASF_PARSER_SUCCESS) { + ALOGE("Failed to parse data object header."); + return ERROR_MALFORMED; + } + + // first 16 bytes is GUID of data object + mDataObjectSize = *(uint64_t*)(dataObjectHeaderData + 16); + mDataPacketBeginOffset = mHeaderObjectSize + ASF_DATA_OBJECT_HEADER_SIZE; + mDataPacketEndOffset = mHeaderObjectSize + mDataObjectSize; + mDataPacketCurrentOffset = mDataPacketBeginOffset; + + // allocate memory for data packet + mDataPacketSize = mParser->getDataPacketSize(); + mDataPacketData = new uint8_t [mDataPacketSize]; + if (mDataPacketData == NULL) { + return NO_MEMORY; + } + + const AsfFileMediaInfo *fileMediaInfo = mParser->getFileInfo(); + if (fileMediaInfo && fileMediaInfo->seekable) { + uint64_t offset = mDataPacketEndOffset; + + // Find simple index object for time seeking. + // object header include 16 bytes of object GUID and 8 bytes of object size. + uint8_t objectHeader[24]; + int64_t objectSize; + for (;;) { + if (mDataSource->readAt(offset, objectHeader, 24) != 24) { + break; + } + + objectSize = *(int64_t *)(objectHeader + 16); + if (!AsfStreamParser::isSimpleIndexObject(objectHeader)) { + offset += objectSize; + if (objectSize == 0) { + ALOGW("WARN: The file's objectSize is zero,ingore this header."); + offset += 24; + } + continue; + } + mHasIndexObject = true; + uint8_t* indexObjectData = new uint8_t [objectSize]; + if (indexObjectData == NULL) { + // don't report as error, we just lose time seeking capability. + break; + } + if (mDataSource->readAt(offset, indexObjectData, objectSize) == objectSize) { + // Ignore return value + mParser->parseSimpleIndexObject(indexObjectData, objectSize); + } + delete [] indexObjectData; + break; + } + } + + if (mParser->hasVideo()) { + ALOGV("MEDIA_MIMETYPE_CONTAINER_ASF"); + mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_ASF); + } else if (mParser->hasAudio() && mParser->getAudioInfo()->codecID >= WAVE_FORMAT_MSAUDIO1 && + mParser->getAudioInfo()->codecID <= WAVE_FORMAT_WMAUDIO_LOSSLESS) { + LOGV("MEDIA_MIMETYPE_AUDIO_WMA", mParser->getAudioInfo()->codecID); + mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_WMA); + } else { + ALOGE("Content does not have neither audio nor video."); + return ERROR_UNSUPPORTED; + } + + // duration returned from parser is in 100-nanosecond unit, converted it to microseconds (us) + ALOGV("Duration is %.2f (sec)", mParser->getDuration()/1E7); + mFileMetaData->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); + + setupTracks(); + mInitialized = true; + return OK; +} + +void AsfExtractor::uninitialize() { + if (mDataPacketData) { + delete [] mDataPacketData; + mDataPacketData = NULL; + } + mDataPacketSize = 0; + + Track* track = mFirstTrack; + MediaBuffer* p; + while (track != NULL) { + track->meta = NULL; + if (track->bufferActive) { + track->bufferActive->release(); + track->bufferActive = NULL; + } + + int size = track->bufferQueue.size(); + for (int i = 0; i < size; i++) { + p = track->bufferQueue.editItemAt(i); + p->release(); + } + + track->bufferQueue.clear(); + delete track->bufferPool; + + track->meta = NULL; + mFirstTrack = track->next; + delete track; + track = mFirstTrack; + } + mFirstTrack = NULL; + mLastTrack = NULL; +} + +static const char* FourCC2MIME(uint32_t fourcc) { + // The first charater of FOURCC characters appears in the least-significant byte + // WVC1 => 0x31435657 + switch (fourcc) { + //case FOURCC('W', 'M', 'V', '1'): + //case FOURCC('W', 'M', 'V', '2'): + //case FOURCC('W', 'M', 'V', 'A'): + case FOURCC('1', 'V', 'M', 'W'): + ALOGW("WMV1 format is not supported."); + return "video/wmv1"; + case FOURCC('2', 'V', 'M', 'W'): + ALOGW("WMV2 format is not supported."); + return "video/wmv2"; + case FOURCC('A', 'V', 'M', 'W'): + ALOGW("WMV Advanced profile, assuming as WVC1 for now"); + return MEDIA_MIMETYPE_VIDEO_WMV; + //case FOURCC('W', 'M', 'V', '3'): + //case FOURCC('W', 'V', 'C', '1'): + case FOURCC('3', 'V', 'M', 'W'): + case FOURCC('1', 'C', 'V', 'W'): + return MEDIA_MIMETYPE_VIDEO_WMV; + default: + ALOGE("Unknown video format."); + return "video/unknown-type"; + } +} + +static const char* CodecID2MIME(uint32_t codecID) { + switch (codecID) { + // WMA version 1 + case WAVE_FORMAT_MSAUDIO1: + // WMA version 2 (7, 8, 9 series) + case WAVE_FORMAT_WMAUDIO2: + return MEDIA_MIMETYPE_AUDIO_WMA; + // WMA 9 lossless + case WAVE_FORMAT_WMAUDIO_LOSSLESS: + //return MEDIA_MIMETYPE_AUDIO_WMA_LOSSLESS; + return MEDIA_MIMETYPE_AUDIO_WMA; + // WMA voice 9 + case WAVE_FORMAT_WMAVOICE9: + // WMA voice 10 + case WAVE_FORMAT_WMAVOICE10: + ALOGW("WMA voice 9/10 is not supported."); + return "audio/wma-voice"; + default: + ALOGE("Unsupported Audio codec ID: %#x", codecID); + return "audio/unknown-type"; + } +} + + +status_t AsfExtractor::setupTracks() { + AsfAudioStreamInfo* audioInfo = mParser->getAudioInfo(); + AsfVideoStreamInfo* videoInfo = mParser->getVideoInfo(); + Track* track; + while (audioInfo || videoInfo) { + track = new Track; + if (mLastTrack == NULL) { + mFirstTrack = track; + mLastTrack = track; + } else { + mLastTrack->next = track; + mLastTrack = track; + } + + // this flag will be set to false within getTrack + track->skipTrack = true; + track->seekCompleted = false; + track->next = NULL; + track->meta = new MetaData; + track->bufferActive = NULL; + track->bufferPool = new MediaBufferPool; + + if (audioInfo) { + LOGV("streamNumber = %d\n, encryptedContentFlag= %d\n, timeOffset = %lld\n, + codecID = %d\n, numChannels=%d\n, sampleRate=%d\n, avgBitRate = %d\n, + blockAlignment =%d\n, bitsPerSample=%d\n, codecDataSize=%d\n", + audioInfo->streamNumber, audioInfo->encryptedContentFlag, + audioInfo->timeOffset, audioInfo->codecID, audioInfo->numChannels, + audioInfo->sampleRate, audioInfo->avgByteRate*8, audioInfo->blockAlignment, + audioInfo->bitsPerSample, audioInfo->codecDataSize); + + track->streamNumber = audioInfo->streamNumber; + track->encrypted = audioInfo->encryptedContentFlag; + track->meta->setInt32(kKeyChannelCount, audioInfo->numChannels); + track->meta->setInt32(kKeySampleRate, audioInfo->sampleRate); + track->meta->setInt32(kKeyWmaBlockAlign, audioInfo->blockAlignment); + track->meta->setInt32(kKeyBitPerSample, audioInfo->bitsPerSample); + track->meta->setInt32(kKeyBitRate, audioInfo->avgByteRate*8); + track->meta->setInt32(kKeyWmaFormatTag, audioInfo->codecID); + + if (audioInfo->codecDataSize) { + track->meta->setData( + kKeyConfigData, + kTypeConfigData, + audioInfo->codecData, + audioInfo->codecDataSize); + } + // duration returned is in 100-nanosecond unit + track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); + track->meta->setCString(kKeyMIMEType, CodecID2MIME(audioInfo->codecID)); + track->meta->setInt32(kKeySuggestedBufferSize, mParser->getDataPacketSize()); + audioInfo = audioInfo->next; + } else { + track->streamNumber = videoInfo->streamNumber; + track->encrypted = videoInfo->encryptedContentFlag; + track->meta->setInt32(kKeyWidth, videoInfo->width); + track->meta->setInt32(kKeyHeight, videoInfo->height); + if (videoInfo->codecDataSize) { + track->meta->setData( + kKeyConfigData, + kTypeConfigData, + videoInfo->codecData, + videoInfo->codecDataSize); + } + // duration returned is in 100-nanosecond unit + track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); + track->meta->setCString(kKeyMIMEType, FourCC2MIME(videoInfo->fourCC)); + int maxSize = mParser->getMaxObjectSize(); + if (maxSize == 0) { + // estimated maximum packet size. + maxSize = 10 * mParser->getDataPacketSize(); + } + track->meta->setInt32(kKeySuggestedBufferSize, maxSize); + if (mHasIndexObject) { + // set arbitary thumbnail time + track->meta->setInt64(kKeyThumbnailTime, mParser->getDuration() / (SCALE_100_NANOSEC_TO_USEC * 2)); + } else { + track->meta->setInt64(kKeyThumbnailTime, 0); + } + videoInfo = videoInfo->next; + } + } + + return OK; +} + +status_t AsfExtractor::seek_l(Track* track, int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) { + Mutex::Autolock lockSeek(mReadLock); + + // It is expected seeking will happen on all the tracks with the same seeking options. + // Only the first track receiving the seeking command will perform seeking and all other + // tracks just siliently ignore it. + + // TODO: potential problems in the following case: + // audio seek + // video read + // video seek + // video read + + if (track->seekCompleted) { + // seeking is completed through a different track + track->seekCompleted = false; + return OK; + } + + uint64_t targetSampleTimeUs = 0; + + // seek to next sync sample or previous sync sample + bool nextSync = false; + switch (mode) { + case MediaSource::ReadOptions::SEEK_NEXT_SYNC: + nextSync = true; + break; + // Always seek to the closest previous sync frame + case MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC: + case MediaSource::ReadOptions::SEEK_CLOSEST_SYNC: + + // Not supported, already seek to sync frame, so will not set kKeyTargetTime on bufferActive. + case MediaSource::ReadOptions::SEEK_CLOSEST: + default: + break; + } + + uint32_t packetNumber; + uint64_t targetTime; + // parser takes seek time in 100-nanosecond unit and returns target time in 100-nanosecond as well. + if (!mParser->seek(seekTimeUs * SCALE_100_NANOSEC_TO_USEC, nextSync, packetNumber, targetTime)) { + ALOGV("Seeking failed."); + return ERROR_END_OF_STREAM; + } + ALOGV("seek time = %.2f secs, actual time = %.2f secs", seekTimeUs/1E6, targetTime / 1E7); + + // convert to microseconds + targetSampleTimeUs = targetTime / SCALE_100_NANOSEC_TO_USEC; + mDataPacketCurrentOffset = mDataPacketBeginOffset + packetNumber * mDataPacketSize; + ALOGV("data packet offset = %lld", mDataPacketCurrentOffset); + + // flush all pending buffers on all the tracks + Track* temp = mFirstTrack; + while (temp != NULL) { + Mutex::Autolock lockTrack(temp->lock); + if (temp->bufferActive) { + temp->bufferActive->release(); + temp->bufferActive = NULL; + } + + int size = temp->bufferQueue.size(); + for (int i = 0; i < size; i++) { + MediaBuffer* buffer = temp->bufferQueue.editItemAt(i); + buffer->release(); + } + temp->bufferQueue.clear(); + + if (temp != track) { + // notify all other tracks seeking is completed. + // this flag is reset when seeking request is made on each track. + // don't set this flag on the driving track so a new seek can be made. + temp->seekCompleted = true; + } + temp = temp->next; + } + + return OK; +} + +status_t AsfExtractor::read_l(Track *track, MediaBuffer **buffer) { + status_t err = OK; + while (err == OK) { + Mutex::Autolock lock(track->lock); + if (track->bufferQueue.size() != 0) { + *buffer = track->bufferQueue[0]; + track->bufferQueue.removeAt(0); + return OK; + } + track->lock.unlock(); + + err = readPacket(); + } + ALOGE("read_l failed."); + return err; +} + +status_t AsfExtractor::readPacket() { + Mutex::Autolock lock(mReadLock); + if (mDataPacketCurrentOffset + mDataPacketSize > mDataPacketEndOffset) { + ALOGI("readPacket hits end of stream."); + return ERROR_END_OF_STREAM; + } + + if (mDataSource->readAt(mDataPacketCurrentOffset, mDataPacketData, mDataPacketSize) != + mDataPacketSize) { + return ERROR_END_OF_STREAM; + } + + // update next read position + mDataPacketCurrentOffset += mDataPacketSize; + AsfPayloadDataInfo *payloads = NULL; + int status = mParser->parseDataPacket(mDataPacketData, mDataPacketSize, &payloads); + if (status != ASF_PARSER_SUCCESS || payloads == NULL) { + ALOGE("Failed to parse data packet. status = %d", status); + return ERROR_END_OF_STREAM; + } + + AsfPayloadDataInfo* payload = payloads; + while (payload) { + Track* track = getTrackByStreamNumber(payload->streamNumber); + if (track == NULL || track->skipTrack) { + payload = payload->next; + continue; + } + if (payload->mediaObjectLength == payload->payloadSize || + payload->offsetIntoMediaObject == 0) { + // a comple object or the first payload of fragmented object + MediaBuffer *buffer = NULL; + status = track->bufferPool->acquire_buffer( + payload->mediaObjectLength, &buffer); + if (status != OK) { + ALOGE("Failed to acquire buffer."); + mParser->releasePayloadDataInfo(payloads); + return status; + } + memcpy(buffer->data(), + payload->payloadData, + payload->payloadSize); + + buffer->set_range(0, payload->mediaObjectLength); + // kKeyTime is in microsecond unit (usecs) + // presentationTime is in mililsecond unit (ms) + buffer->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); + + if (payload->keyframe) { + buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); + } + + if (payload->mediaObjectLength == payload->payloadSize) { + Mutex::Autolock lockTrack(track->lock); + // a complete object + track->bufferQueue.push(buffer); + } else { + // the first payload of a fragmented object + track->bufferActive = buffer; + if (track->encrypted) { + Mutex::Autolock lockTrack(track->lock); + MediaBuffer* copy = NULL; + track->bufferPool->acquire_buffer(payload->payloadSize, ©); + copy->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); + memcpy(copy->data(), payload->payloadData, payload->payloadSize); + copy->set_range(0, payload->payloadSize); + track->bufferQueue.push(copy); + } + } + } else { + if (track->bufferActive == NULL) { + ALOGE("Receiving corrupt or discontinuous data packet."); + payload = payload->next; + continue; + } + // TODO: check object number and buffer size!!!!!!!!!!!!!! + // the last payload or the middle payload of a fragmented object + memcpy( + (uint8_t*)track->bufferActive->data() + payload->offsetIntoMediaObject, + payload->payloadData, + payload->payloadSize); + + if (payload->offsetIntoMediaObject + payload->payloadSize == + payload->mediaObjectLength) { + // the last payload of a fragmented object + // for encrypted content, push a cloned media buffer to vector instead. + if (!track->encrypted) + { + Mutex::Autolock lockTrack(track->lock); + track->bufferQueue.push(track->bufferActive); + track->bufferActive = NULL; + } else { + Mutex::Autolock lockTrack(track->lock); + track->bufferActive->set_range(payload->offsetIntoMediaObject, payload->payloadSize); + track->bufferQueue.push(track->bufferActive); + track->bufferActive = NULL; + } + } else { + // middle payload of a fragmented object + if (track->encrypted) { + Mutex::Autolock lockTrack(track->lock); + MediaBuffer* copy = NULL; + int64_t keytime; + track->bufferPool->acquire_buffer(payload->payloadSize, ©); + track->bufferActive->meta_data()->findInt64(kKeyTime, &keytime); + copy->meta_data()->setInt64(kKeyTime, keytime); + memcpy(copy->data(), payload->payloadData, payload->payloadSize); + copy->set_range(0, payload->payloadSize); + track->bufferQueue.push(copy); + } + } + } + payload = payload->next; + }; + + mParser->releasePayloadDataInfo(payloads); + return OK; +} + +AsfExtractor::Track* AsfExtractor::getTrackByTrackIndex(int index) { + Track *track = mFirstTrack; + while (index > 0) { + if (track == NULL) { + return NULL; + } + + track = track->next; + --index; + } + return track; +} + +AsfExtractor::Track* AsfExtractor::getTrackByStreamNumber(int stream) { + Track *track = mFirstTrack; + while (track != NULL) { + if (track->streamNumber == stream) { + return track; + } + track = track->next; + } + return NULL; +} + +bool SniffAsf( + const sp &source, + String8 *mimeType, + float *confidence, + sp *) { + uint8_t guid[16]; + if (source->readAt(0, guid, 16) != 16) { + return false; + } + if (!AsfStreamParser::isHeaderObject(guid)) { + return false; + } + + *mimeType = MEDIA_MIMETYPE_CONTAINER_ASF; + *confidence = 0.4f; + return true; +} + +} // namespace android + -- cgit v1.2.3 From 045e38ce8d0628c6d9881feba2dd066ee6a8f7a9 Mon Sep 17 00:00:00 2001 From: nguo Date: Wed, 5 Dec 2012 10:08:59 +0800 Subject: [PORT FROM MAIN] HW JPEG Decoder libva implement on MRFLD BZ: 69051 libVA API part in libmix/imagedecoder Change-Id: I20957c9ab31733f703ee2a730ede921c02b7f132 Signed-off-by: nguo Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/81202 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- Android.mk | 2 +- imagedecoder/Android.mk | 37 ++ imagedecoder/ImageDecoderTrace.c | 45 +++ imagedecoder/ImageDecoderTrace.h | 78 ++++ imagedecoder/JPEGDecoder.c | 746 +++++++++++++++++++++++++++++++++++++++ imagedecoder/JPEGDecoder.h | 103 ++++++ imagedecoder/JPEGParser.c | 118 +++++++ imagedecoder/JPEGParser.h | 108 ++++++ 8 files changed, 1236 insertions(+), 1 deletion(-) create mode 100644 imagedecoder/Android.mk create mode 100644 imagedecoder/ImageDecoderTrace.c create mode 100644 imagedecoder/ImageDecoderTrace.h create mode 100644 imagedecoder/JPEGDecoder.c create mode 100644 imagedecoder/JPEGDecoder.h create mode 100644 imagedecoder/JPEGParser.c create mode 100644 imagedecoder/JPEGParser.h diff --git a/Android.mk b/Android.mk index 2948f35..17d0811 100644 --- a/Android.mk +++ b/Android.mk @@ -8,5 +8,5 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk - +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/imagedecoder/Android.mk endif diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk new file mode 100644 index 0000000..3c5f87c --- /dev/null +++ b/imagedecoder/Android.mk @@ -0,0 +1,37 @@ +#ifeq ($(strip $(USE_INTEL_JPEGDEC)),true) + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES += \ + JPEGDecoder.c \ + JPEGParser.c \ + ImageDecoderTrace.c + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libva + +LOCAL_COPY_HEADERS_TO := libjpeg_hw + +LOCAL_COPY_HEADERS := \ + JPEGDecoder.h \ + JPEGParser.h \ + ImageDecoderTrace.h + +LOCAL_SHARED_LIBRARIES += \ + libcutils \ + libva-android \ + libva \ + libva-tpi \ + +LOCAL_LDLIBS += -lpthread +LOCAL_CFLAGS += -Wno-multichar +LOCAL_CFLAGS += -DUSE_INTEL_JPEGDEC +LOCAL_MODULE:= libjpeg_hw +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) + +#endif + diff --git a/imagedecoder/ImageDecoderTrace.c b/imagedecoder/ImageDecoderTrace.c new file mode 100644 index 0000000..3336b85 --- /dev/null +++ b/imagedecoder/ImageDecoderTrace.c @@ -0,0 +1,45 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#include "ImageDecoderTrace.h" + +#ifdef ENABLE_IMAGE_DECODER_TRACE + +void TraceImageDecoder(const char* cat, const char* fun, int line, const char* format, ...) +{ + if (NULL == cat || NULL == fun || NULL == format) + return; + + printf("%s %s(#%d): ", cat, fun, line); + va_list args; + va_start(args, format); + vprintf(format, args); + va_end(args); + printf("\n"); +} + +#endif + diff --git a/imagedecoder/ImageDecoderTrace.h b/imagedecoder/ImageDecoderTrace.h new file mode 100644 index 0000000..1f67415 --- /dev/null +++ b/imagedecoder/ImageDecoderTrace.h @@ -0,0 +1,78 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef IMAGE_DECODER_TRACE_H_ +#define IMAGE_DECODER_TRACE_H_ + + +#define ENABLE_IMAGE_DECODER_TRACE +//#define ANDROID + + +#ifdef ENABLE_IMAGE_DECODER_TRACE + +#ifndef ANDROID + +#include +#include + +extern void TraceImageDecoder(const char* cat, const char* fun, int line, const char* format, ...); +#define IMAGE_DECODER_TRACE(cat, format, ...) \ +TraceImageDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) + +#define ETRACE(format, ...) IMAGE_DECODER_TRACE("ERROR: ", format, ##__VA_ARGS__) +#define WTRACE(format, ...) IMAGE_DECODER_TRACE("WARNING: ", format, ##__VA_ARGS__) +#define ITRACE(format, ...) IMAGE_DECODER_TRACE("INFO: ", format, ##__VA_ARGS__) +#define VTRACE(format, ...) IMAGE_DECODER_TRACE("VERBOSE: ", format, ##__VA_ARGS__) + +#else +// for Android OS + +//#define LOG_NDEBUG 0 + +#define LOG_TAG "ImageDecoder" + +#include +#define ETRACE(...) ALOGE(__VA_ARGS__) +#define WTRACE(...) ALOGW(__VA_ARGS__) +#define ITRACE(...) ALOGI(__VA_ARGS__) +#define VTRACE(...) ALOGV(__VA_ARGS__) + +#endif + + +#else + +#define ETRACE(format, ...) +#define WTRACE(format, ...) +#define ITRACE(format, ...) +#define VTRACE(format, ...) + + +#endif /* ENABLE_VIDEO_DECODER_TRACE*/ + +#endif /*IMAGE_DECODER_TRACE_H_*/ + + diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c new file mode 100644 index 0000000..b1ecb49 --- /dev/null +++ b/imagedecoder/JPEGDecoder.c @@ -0,0 +1,746 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* +*/ + +#include "va/va_tpi.h" +#include "JPEGDecoder.h" +#include "ImageDecoderTrace.h" +#include "JPEGParser.h" +#include + +#define JPEG_MAX_SETS_HUFFMAN_TABLES 2 + +#define TABLE_CLASS_DC 0 +#define TABLE_CLASS_AC 1 +#define TABLE_CLASS_NUM 2 + +/* + * Initialize VA API related stuff + * + * We will check the return value of jva_initialize + * to determine which path will be use (SW or HW) + * + */ +Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) { + /* + * Please note that we won't check the input parameters to follow the + * convention of libjpeg duo to we need these parameters to do error handling, + * and if these parameters are invalid, means the whole stack is crashed, so check + * them here and return false is meaningless, same situation for all internal methods + * related to VA API + */ + int va_major_version; + int va_minor_version; +#if 0 + int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; + int va_num_profiles, va_num_entrypoints; + + VAProfile *va_profiles = NULL; + VAEntrypoint *va_entrypoints = NULL; +#endif + VAStatus va_status = VA_STATUS_SUCCESS; + Decode_Status status = DECODE_SUCCESS; + int index; + + if (jd_libva_ptr->initialized) + return DECODE_NOT_STARTED; + + jd_libva_ptr->android_display = (Display*)malloc(sizeof(Display)); + if (jd_libva_ptr->android_display == NULL) { + return DECODE_MEMORY_FAIL; + } + jd_libva_ptr->va_display = vaGetDisplay (jd_libva_ptr->android_display); + + if (jd_libva_ptr->va_display == NULL) { + ETRACE("vaGetDisplay failed."); + free (jd_libva_ptr->android_display); + return DECODE_DRIVER_FAIL; + } + va_status = vaInitialize(jd_libva_ptr->va_display, &va_major_version, &va_minor_version); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaInitialize failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } +#if 0 + /*get the max number for profiles/entrypoints/attribs*/ + va_max_num_profiles = vaMaxNumProfiles(jd_libva_ptr->va_display); + va_max_num_entrypoints = vaMaxNumEntrypoints(jd_libva_ptr->va_display); + va_max_num_attribs = vaMaxNumConfigAttributes(jd_libva_ptr->va_display); + + va_profiles = malloc (sizeof(VAProfile)*va_max_num_profiles); + va_entrypoints = malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); + + if (va_profiles == NULL || va_entrypoints ==NULL) { + jd_libva_ptr->initialized = TRUE; // make sure we can call into jva_deinitialize() + jdva_deinitialize (jd_libva_ptr); + return DECODE_MEMORY_FAIL; + } + + va_status = vaQueryConfigProfiles (jd_libva_ptr->va_display, va_profiles, &va_num_profiles); + + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaQueryConfigProfiles failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } + + /*check whether profile is supported*/ + for(index= 0; index < va_num_profiles; index++) { + if(VAProfileJPEGBaseline == va_profiles[index]) + break; + } + + if(index == va_num_profiles) { + WTRACE("Profile not surportted\n"); + status = DECODE_FAIL; + goto cleanup; + + } + + va_status = vaQueryConfigEntrypoints(jd_libva_ptr->va_display, VAProfileJPEGBaseline, va_entrypoints, &va_num_entrypoints); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaQueryConfigProfiles failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } + /* traverse entrypoints arrary to see whether VLD is there */ + for (index = 0; index < va_num_entrypoints; index ++) { + if (va_entrypoints[index] == VAEntrypointVLD) + break; + } +#endif + VAConfigAttrib attrib; + attrib.type = VAConfigAttribRTFormat; + va_status = vaGetConfigAttributes(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaGetConfigAttributes failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } + if ((VA_RT_FORMAT_YUV444 & attrib.value) == 0) { + WTRACE("Format not surportted\n"); + status = DECODE_FAIL; + goto cleanup; + } + + va_status = vaCreateConfig(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1, &(jd_libva_ptr->va_config)); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateConfig failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } + jd_libva_ptr->initialized = TRUE; + status = DECODE_SUCCESS; + +cleanup: +#if 0 + /*free profiles and entrypoints*/ + if (va_profiles) + free(va_profiles); + + if (va_entrypoints) + free (va_entrypoints); +#endif + if (!status) { + jd_libva_ptr->initialized = TRUE; // make sure we can call into jva_deinitialize() + jdva_deinitialize (jd_libva_ptr); + return status; + } + + return status; +} + +void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) { + if (!(jd_libva_ptr->initialized)) { + return; + } + + if (jd_libva_ptr->JPEGParser) { + free(jd_libva_ptr->JPEGParser); + jd_libva_ptr->JPEGParser = NULL; + } + + if (jd_libva_ptr->va_display) { + vaTerminate(jd_libva_ptr->va_display); + jd_libva_ptr->va_display = NULL; + } + + if (jd_libva_ptr->android_display) { + free(jd_libva_ptr->android_display); + jd_libva_ptr->android_display = NULL; + } + + jd_libva_ptr->initialized = FALSE; + return; +} + +Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { + VAStatus va_status = VA_STATUS_SUCCESS; + Decode_Status status; + jd_libva_ptr->image_width = jd_libva_ptr->picture_param_buf.picture_width; + jd_libva_ptr->image_height = jd_libva_ptr->picture_param_buf.picture_height; + jd_libva_ptr->surface_count = 1; + jd_libva_ptr->va_surfaces = (VASurfaceID *) malloc(sizeof(VASurfaceID)*jd_libva_ptr->surface_count); + if (jd_libva_ptr->va_surfaces == NULL) { + return DECODE_MEMORY_FAIL; + } + va_status = vaCreateSurfaces(jd_libva_ptr->va_display, VA_RT_FORMAT_YUV444, + (((jd_libva_ptr->image_width + 7) & (~7)) + 15) & (~15), + (((jd_libva_ptr->image_width + 7) & (~7)) + 15) & (~15), + jd_libva_ptr->va_surfaces, + jd_libva_ptr->surface_count, NULL, 0); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateSurfaces failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } + va_status = vaCreateContext(jd_libva_ptr->va_display, jd_libva_ptr->va_config, + (( ( jd_libva_ptr->image_width + 7 ) & ( ~7 )) + 15 ) & ( ~15 ), + ((( jd_libva_ptr->image_height + 7 ) & ( ~7 )) + 15 ) & ( ~15 ), + 0, //VA_PROGRESSIVE + jd_libva_ptr->va_surfaces, + jd_libva_ptr->surface_count, &(jd_libva_ptr->va_context)); + + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateContext failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + + } + jd_libva_ptr->resource_allocated = TRUE; + return status; +cleanup: + + if (jd_libva_ptr->va_surfaces) { + free (jd_libva_ptr->va_surfaces); + jd_libva_ptr->va_surfaces = NULL; + } + jdva_deinitialize (jd_libva_ptr); + + return status; +} + +Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) { + Decode_Status status = DECODE_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + + if (!(jd_libva_ptr->resource_allocated)) { + return status; + } + + if (!(jd_libva_ptr->va_display)) { + return status; //most likely the resource are already released and HW jpeg is deinitialize, return directly + } + + /* + * It is safe to destroy Surface/Config/Context severl times + * and it is also safe even their value is NULL + */ + + va_status = vaDestroySurfaces(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaDestroySurfaces failed. va_status = 0x%x", va_status); + return DECODE_DRIVER_FAIL; + } + + if (jd_libva_ptr->va_surfaces) { + free (jd_libva_ptr->va_surfaces); + jd_libva_ptr->va_surfaces = NULL; + } + + va_status = vaDestroyConfig(jd_libva_ptr->va_display, jd_libva_ptr->va_config); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaDestroyConfig failed. va_status = 0x%x", va_status); + return DECODE_DRIVER_FAIL; + } + + jd_libva_ptr->va_config = NULL; + + va_status = vaDestroyContext(jd_libva_ptr->va_display, jd_libva_ptr->va_context); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaDestroyContext failed. va_status = 0x%x", va_status); + return DECODE_DRIVER_FAIL; + } + + jd_libva_ptr->va_context = NULL; + + jd_libva_ptr->resource_allocated = FALSE; + + return va_status; +} + +Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { + Decode_Status status = DECODE_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + VABufferID desc_buf[5]; + uint32_t bitstream_buffer_size; + uint32_t scan_idx = 0; + uint32_t buf_idx = 0; + uint32_t chopping = VA_SLICE_DATA_FLAG_ALL; + uint32_t bytes_remaining = jd_libva_ptr->file_size; + uint32_t src_offset = 0; + bitstream_buffer_size = 1024*512*5; + + va_status = vaBeginPicture(jd_libva_ptr->va_display, jd_libva_ptr->va_context, jd_libva_ptr->va_surfaces[0]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaBeginPicture failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferJPEG), 1, &jd_libva_ptr->picture_param_buf, &desc_buf[buf_idx]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + buf_idx++; + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAIQMatrixBufferType, sizeof(VAIQMatrixParameterBufferJPEG), 1, &jd_libva_ptr->qmatrix_buf, &desc_buf[buf_idx]); + + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + buf_idx++; + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAHuffmanTableBufferType, sizeof(VAHuffmanTableParameterBufferJPEG), 1, &jd_libva_ptr->hufman_table_buf, &desc_buf[buf_idx]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + buf_idx++; + do { + /* Get Bitstream Buffer */ + uint32_t bytes = ( bytes_remaining < bitstream_buffer_size ) ? bytes_remaining : bitstream_buffer_size; + bytes_remaining -= bytes; + /* Get Slice Control Buffer */ + VASliceParameterBufferJPEG dest_scan_ctrl[JPEG_MAX_COMPONENTS]; + uint32_t src_idx; + uint32_t dest_idx = 0; + memset(dest_scan_ctrl, 0, sizeof(dest_scan_ctrl)); + for (src_idx = scan_idx; src_idx < jd_libva_ptr->scan_ctrl_count ; src_idx++) { + if (jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset) { + /* new scan, reset state machine */ + chopping = VA_SLICE_DATA_FLAG_ALL; + fprintf(stderr,"Scan:%i FileOffset:%x Bytes:%x \n", src_idx, + jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset, + jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_size ); + /* does the slice end in the buffer */ + if (jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset + jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_size > bytes + src_offset) { + chopping = VA_SLICE_DATA_FLAG_BEGIN; + } + } else { + if (jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_size > bytes) { + chopping = VA_SLICE_DATA_FLAG_MIDDLE; + } else { + if ((chopping == VA_SLICE_DATA_FLAG_BEGIN) || (chopping == VA_SLICE_DATA_FLAG_MIDDLE)) { + chopping = VA_SLICE_DATA_FLAG_END; + } + } + } + dest_scan_ctrl[dest_idx].slice_data_flag = chopping; + dest_scan_ctrl[dest_idx].slice_data_offset = ((chopping == VA_SLICE_DATA_FLAG_ALL) || (chopping == VA_SLICE_DATA_FLAG_BEGIN) )? +jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset - src_offset : 0; + + const int32_t bytes_in_seg = bytes - dest_scan_ctrl[dest_idx].slice_data_offset; + const uint32_t scan_data = (bytes_in_seg < jd_libva_ptr->slice_param_buf[src_idx].slice_data_size) ? bytes_in_seg : jd_libva_ptr->slice_param_buf[src_idx].slice_data_size ; + jd_libva_ptr->slice_param_buf[src_idx].slice_data_offset = 0; + jd_libva_ptr->slice_param_buf[src_idx].slice_data_size -= scan_data; + dest_scan_ctrl[dest_idx].slice_data_size = scan_data; + dest_scan_ctrl[dest_idx].num_components = jd_libva_ptr->slice_param_buf[src_idx].num_components; + dest_scan_ctrl[dest_idx].restart_interval = jd_libva_ptr->slice_param_buf[src_idx].restart_interval; + memcpy(&dest_scan_ctrl[dest_idx].components, & jd_libva_ptr->slice_param_buf[ src_idx ].components, + sizeof(jd_libva_ptr->slice_param_buf[ src_idx ].components) ); + dest_idx++; + if ((chopping == VA_SLICE_DATA_FLAG_ALL) || (chopping == VA_SLICE_DATA_FLAG_END)) { /* all good good */ + } else { + break; + } + } + scan_idx = src_idx; + /* Get Slice Control Buffer */ + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceParameterBufferType, sizeof(VASliceParameterBufferJPEG) * dest_idx, 1, dest_scan_ctrl, &desc_buf[buf_idx]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + buf_idx++; + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceDataBufferType, bytes, 1, &jd_libva_ptr->bitstream_buf[ src_offset ], &desc_buf[buf_idx]); + buf_idx++; + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + va_status = vaRenderPicture( jd_libva_ptr->va_display, jd_libva_ptr->va_context, desc_buf, buf_idx); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaRenderPicture failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + buf_idx = 0; + + src_offset += bytes; + } while (bytes_remaining); + + va_status = vaEndPicture(jd_libva_ptr->va_display, jd_libva_ptr->va_context); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaRenderPicture failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + return status; + } + + va_status = vaSyncSurface(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0]); + if (va_status != VA_STATUS_SUCCESS) { + WTRACE("vaSyncSurface failed. va_status = 0x%x", va_status); + } +#if 0 + uint8_t* rgb_buf; + int32_t data_len = 0; + uint32_t surface_width, surface_height; + surface_width = (( ( jd_libva_ptr->image_width + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); + surface_height = (( ( jd_libva_ptr->image_height + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); + + rgb_buf = (uint8_t*) malloc((surface_width * surface_height) << 2); + if(rgb_buf == NULL){ + return DECODE_MEMORY_FAIL; + } + va_status = vaPutSurfaceBuf(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0], rgb_buf, &data_len, 0, 0, surface_width, surface_height, 0, 0, surface_width, surface_height, NULL, 0, 0); + + buf = rgb_buf; +// dump RGB data + { + FILE *pf_tmp = fopen("img_out.rgb", "wb"); + if(pf_tmp == NULL) + ETRACE("Open file error"); + fwrite(rgb_buf, 1, surface_width * surface_height * 4, pf_tmp); + fclose(pf_tmp); + } +#endif +#if 0 + va_status = vaDeriveImage(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0], &(jd_libva_ptr->surface_image)); + if (va_status != VA_STATUS_SUCCESS) { + ERREXIT1 (cinfo, JERR_VA_DRIVEIMAGE, va_status); + } + + va_status = vaMapBuffer(jd_libva_ptr->va_display, jd_libva_ptr->surface_image.buf, (void **)& (jd_libva_ptr->image_buf)); + if (va_status != VA_STATUS_SUCCESS) { + ERREXIT1 (cinfo, JERR_VA_MAPBUFFER, va_status); + } + + va_status = vaUnmapBuffer(jd_libva_ptr->va_display, jd_libva_ptr->surface_image.buf); + if (va_status != VA_STATUS_SUCCESS) { + ERREXIT1(cinfo, JERR_VA_MAPBUFFER, va_status); + } + + va_status = vaDestroyImage(jd_libva_ptr->va_display, jd_libva_ptr->surface_image.image_id); + + if (va_status != VA_STATUS_SUCCESS) { + ERREXIT1 (cinfo, JERR_VA_MAPBUFFER, va_status); + } +#endif + return status; +} + +Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { + uint32_t component_order = 0 ; + uint32_t dqt_ind = 0; + uint32_t dht_ind = 0; + uint32_t scan_ind = 0; + boolean frame_marker_found = FALSE; + + uint8_t marker = jd_libva_ptr->JPEGParser->getNextMarker(jd_libva_ptr->JPEGParser); + + while (marker != CODE_EOI) { + switch (marker) { + // If the marker is an APP marker skip over the data + case CODE_APP0: + case CODE_APP1: + case CODE_APP2: + case CODE_APP3: + case CODE_APP4: + case CODE_APP5: + case CODE_APP6: + case CODE_APP7: + case CODE_APP8: + case CODE_APP9: + case CODE_APP10: + case CODE_APP11: + case CODE_APP12: + case CODE_APP13: + case CODE_APP14: + case CODE_APP15: { + + uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2) - 2; + jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, bytes_to_burn); + break; + } + // Store offset to DQT data to avoid parsing bitstream in user mode + case CODE_DQT: { + jd_libva_ptr->dqt_byte_offset[dqt_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); + dqt_ind++; + uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes( jd_libva_ptr->JPEGParser, 2 ) - 2; + jd_libva_ptr->JPEGParser->burnBytes( jd_libva_ptr->JPEGParser, bytes_to_burn ); + break; + } + // Throw exception for all SOF marker other than SOF0 + case CODE_SOF1: + case CODE_SOF2: + case CODE_SOF3: + case CODE_SOF5: + case CODE_SOF6: + case CODE_SOF7: + case CODE_SOF8: + case CODE_SOF9: + case CODE_SOF10: + case CODE_SOF11: + case CODE_SOF13: + case CODE_SOF14: + case CODE_SOF15: { + fprintf(stderr, "ERROR: unsupport SOF\n"); + break; + } + // Parse component information in SOF marker + case CODE_SOF_BASELINE: { + frame_marker_found = TRUE; + + jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, 2); // Throw away frame header length + jd_libva_ptr->picture_param_buf.sample_precision = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + if (jd_libva_ptr->picture_param_buf.sample_precision != 8) { + return DECODE_PARSER_FAIL; + } + // Extract pic width and height + jd_libva_ptr->picture_param_buf.picture_height = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); + jd_libva_ptr->picture_param_buf.picture_width = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); + jd_libva_ptr->picture_param_buf.num_components = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + + if (jd_libva_ptr->picture_param_buf.num_components > JPEG_MAX_COMPONENTS) { + return DECODE_PARSER_FAIL; + } + uint8_t comp_ind; + for (comp_ind = 0; comp_ind < jd_libva_ptr->picture_param_buf.num_components; comp_ind++) { + jd_libva_ptr->picture_param_buf.components[comp_ind].component_id = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + + uint8_t hv_sampling = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + jd_libva_ptr->picture_param_buf.components[comp_ind].h_sampling_factor = hv_sampling >> 4; + jd_libva_ptr->picture_param_buf.components[comp_ind].v_sampling_factor = hv_sampling & 0xf; + jd_libva_ptr->picture_param_buf.components[comp_ind].quantiser_table_selector = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + } + + + break; + } + // Store offset to DHT data to avoid parsing bitstream in user mode + case CODE_DHT: { + jd_libva_ptr->dht_byte_offset[dht_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); + dht_ind++; + uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2) - 2; + jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, bytes_to_burn ); + break; + } + // Parse component information in SOS marker + case CODE_SOS: { + jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, 2); + uint32_t component_in_scan = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + uint8_t comp_ind; + + for (comp_ind = 0; comp_ind < component_in_scan; comp_ind++) { + uint8_t comp_id = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + uint8_t comp_data_ind; + for (comp_data_ind = 0; comp_data_ind < jd_libva_ptr->picture_param_buf.num_components; comp_data_ind++) { + if (comp_id == jd_libva_ptr->picture_param_buf.components[comp_data_ind].component_id) { + jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].component_selector = comp_data_ind; + break; + } + } + uint8_t huffman_tables = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].dc_table_selector = huffman_tables >> 4; + jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].ac_table_selector = huffman_tables & 0xf; + } + uint32_t curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Ss + if (curr_byte != 0) { + return DECODE_PARSER_FAIL; + } + curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Se + if (curr_byte != 0x3f) { + return DECODE_PARSER_FAIL; + } + curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Ah, Al + if (curr_byte != 0) { + return DECODE_PARSER_FAIL; + } + // Set slice control variables needed + jd_libva_ptr->slice_param_buf[scan_ind].slice_data_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); + jd_libva_ptr->slice_param_buf[scan_ind].num_components = component_in_scan; + if (scan_ind) { + /* If there is more than one scan, the slice for all but the final scan should only run up to the beginning of the next scan */ + jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = + (jd_libva_ptr->slice_param_buf[scan_ind].slice_data_offset - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset );; + } + scan_ind++; + jd_libva_ptr->scan_ctrl_count++; // gsDXVA2Globals.uiScanCtrlCount + break; + } + case CODE_DRI: { + uint32_t size = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); + jd_libva_ptr->slice_param_buf[scan_ind].restart_interval = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); + jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, (size - 4)); + break; + } + default: + break; + } + + marker = jd_libva_ptr->JPEGParser->getNextMarker(jd_libva_ptr->JPEGParser); + } + + jd_libva_ptr->quant_tables_num = dqt_ind; + jd_libva_ptr->huffman_tables_num = dht_ind; + + /* The slice for the last scan should run up to the end of the picture */ + jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->file_size - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); + + // throw AppException if SOF0 isn't found + if (!frame_marker_found) { + ETRACE("EEORR: Reached end of bitstream while trying to parse headers\n"); + return DECODE_PARSER_FAIL; + } + + parseTableData(jd_libva_ptr); + + return DECODE_SUCCESS; + +} + +Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { + Decode_Status status; + CJPEGParse* parser = (CJPEGParse*)malloc(sizeof(CJPEGParse)); + if (parser == NULL) + return DECODE_MEMORY_FAIL; + + parserInitialize(parser, jd_libva_ptr->bitstream_buf, jd_libva_ptr->file_size); + + // Parse Quant tables + memset(&jd_libva_ptr->qmatrix_buf, 0, sizeof(jd_libva_ptr->qmatrix_buf)); + uint32_t dqt_ind; + for (dqt_ind = 0; dqt_ind < jd_libva_ptr->quant_tables_num; dqt_ind++) { + if (parser->setByteOffset(parser, jd_libva_ptr->dqt_byte_offset[dqt_ind])) { + // uint32_t uiTableBytes = parser->readBytes( 2 ) - 2; + uint32_t table_bytes = parser->readBytes( parser, 2 ) - 2; + do { + uint32_t table_info = parser->readNextByte(parser); + table_bytes--; + uint32_t table_length = table_bytes > 64 ? 64 : table_bytes; + uint32_t table_precision = table_info >> 4; + if (table_precision != 0) { + return DECODE_PARSER_FAIL; + } + uint32_t table_id = table_info & 0xf; + if (table_id >= JPEG_MAX_QUANT_TABLES) { + return DECODE_PARSER_FAIL; + } + jd_libva_ptr->qmatrix_buf.load_quantiser_table[dqt_ind] = table_id; + + // Pull Quant table data from bitstream + uint32_t byte_ind; + for (byte_ind = 0; byte_ind < table_length; byte_ind++) { + jd_libva_ptr->qmatrix_buf.quantiser_table[table_id][byte_ind] = parser->readNextByte(parser); + } + table_bytes -= table_length; + } while (table_bytes); + } + } + + // Parse Huffman tables + memset(&jd_libva_ptr->hufman_table_buf, 0, sizeof(jd_libva_ptr->hufman_table_buf)); + uint32_t dht_ind; + for (dht_ind = 0; dht_ind < jd_libva_ptr->huffman_tables_num; dht_ind++) { + if (parser->setByteOffset(parser, jd_libva_ptr->dht_byte_offset[dht_ind])) { + uint32_t table_bytes = parser->readBytes( parser, 2 ) - 2; + do { + uint32_t table_info = parser->readNextByte(parser); + table_bytes--; + uint32_t table_class = table_info >> 4; // Identifies whether the table is for AC or DC + if (table_class >= TABLE_CLASS_NUM) { + return DECODE_PARSER_FAIL; + } + uint32_t table_id = table_info & 0xf; + if (table_id >= JPEG_MAX_SETS_HUFFMAN_TABLES) { + return DECODE_PARSER_FAIL; + } + if (table_class == 0) { + uint8_t* bits = parser->getCurrentIndex(parser); + // Find out the number of entries in the table + uint32_t table_entries = 0; + uint32_t bit_ind; + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind] = bits[bit_ind]; + table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind]; + } + + // Create table of code values + parser->burnBytes(parser, 16); + table_bytes -= 16; + uint32_t tbl_ind; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].dc_values[tbl_ind] = parser->readNextByte(parser); + table_bytes--; + } + + } else { // for AC class + uint8_t* bits = parser->getCurrentIndex(parser); + // Find out the number of entries in the table + uint32_t table_entries = 0; + uint32_t bit_ind; + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind] = bits[bit_ind]; + table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind]; + } + + // Create table of code values + parser->burnBytes(parser, 16); + table_bytes -= 16; + uint32_t tbl_ind; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].ac_values[tbl_ind] = parser->readNextByte(parser); + table_bytes--; + } + }//end of else + + } while (table_bytes); + } + } + + if (parser) { + free(parser); + parser = NULL; + } + return DECODE_SUCCESS; +} + diff --git a/imagedecoder/JPEGDecoder.h b/imagedecoder/JPEGDecoder.h new file mode 100644 index 0000000..efc8091 --- /dev/null +++ b/imagedecoder/JPEGDecoder.h @@ -0,0 +1,103 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* +*/ + +#ifndef JDLIBVA_H +#define JDLIBVA_H + +#include "JPEGParser.h" +#include +#include +//#include +#include "va/va_dec_jpeg.h" + +#define Display unsigned int +#define BOOL int + +#define JPEG_MAX_COMPONENTS 4 +#define JPEG_MAX_QUANT_TABLES 4 + +typedef struct { + Display * android_display; + uint32_t surface_count; + VADisplay va_display; + VAContextID va_context; + VASurfaceID* va_surfaces; + VAConfigID va_config; + + VAPictureParameterBufferJPEG picture_param_buf; + VASliceParameterBufferJPEG slice_param_buf[JPEG_MAX_COMPONENTS]; + VAIQMatrixParameterBufferJPEG qmatrix_buf; + VAHuffmanTableParameterBufferJPEG hufman_table_buf; + + uint32_t dht_byte_offset[4]; + uint32_t dqt_byte_offset[4]; + uint32_t huffman_tables_num; + uint32_t quant_tables_num; + + uint8_t* bitstream_buf; + uint32_t image_width; + uint32_t image_height; + uint32_t scan_ctrl_count; + + uint8_t * image_buf; + VAImage surface_image; + boolean hw_state_ready; + boolean hw_caps_ready; + boolean hw_path; + boolean initialized; + boolean resource_allocated; + + uint32_t file_size; + uint32_t rotation; + CJPEGParse* JPEGParser; + +} jd_libva_struct; + +typedef enum { + DECODE_NOT_STARTED = -6, + DECODE_INVALID_DATA = -5, + DECODE_DRIVER_FAIL = -4, + DECODE_PARSER_FAIL = -3, + DECODE_MEMORY_FAIL = -2, + DECODE_FAIL = -1, + DECODE_SUCCESS = 0, + +} IMAGE_DECODE_STATUS; + +typedef int32_t Decode_Status; + +extern jd_libva_struct jd_libva; + +Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr); +void jdva_deinitialize (jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf); +Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr); +Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr); +Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr); +#endif diff --git a/imagedecoder/JPEGParser.c b/imagedecoder/JPEGParser.c new file mode 100644 index 0000000..d724008 --- /dev/null +++ b/imagedecoder/JPEGParser.c @@ -0,0 +1,118 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* +*/ + +#include "JPEGParser.h" + +#include +#include +#include +#include + +uint8_t readNextByte(CJPEGParse* parser) { + uint8_t byte = 0; + + if (parser->parse_index < parser->buff_size) { + byte = *( parser->stream_buff + parser->parse_index ); + parser->parse_index++; + } + + if (parser->parse_index == parser->buff_size) { + parser->end_of_buff = TRUE; + } + + return byte; +} + +uint32_t readBytes( CJPEGParse* parser, uint32_t bytes_to_read ) { + uint32_t bytes = 0; + + while (bytes_to_read-- && !endOfBuffer(parser)) { + bytes |= ( (uint32_t)readNextByte(parser) << ( bytes_to_read * 8 ) ); + } + + return bytes; +} + +void burnBytes( CJPEGParse* parser, uint32_t bytes_to_burn ) { + parser->parse_index += bytes_to_burn; + + if (parser->parse_index >= parser->buff_size) { + parser->parse_index = parser->buff_size - 1; + parser->end_of_buff = TRUE; + } +} + +uint8_t getNextMarker(CJPEGParse* parser) { + while (!endOfBuffer(parser)) { + if (readNextByte(parser) == 0xff) { + break; + } + } + + return readNextByte(parser); +} + +boolean setByteOffset(CJPEGParse* parser, uint32_t byte_offset) +{ + boolean offset_found = FALSE; + + if (byte_offset < parser->buff_size) { + parser->parse_index = byte_offset; + offset_found = TRUE; +// end_of_buff = FALSE; + } + + return offset_found; +} + +uint32_t getByteOffset(CJPEGParse* parser) { + return parser->parse_index; +} + +boolean endOfBuffer(CJPEGParse* parser) { + return parser->end_of_buff; +} + +uint8_t* getCurrentIndex(CJPEGParse* parser) { + return parser->stream_buff + parser->parse_index; +} + +void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_size) { + parser->parse_index = 0; + parser->buff_size = buff_size; + parser->stream_buff = stream_buff; + parser->end_of_buff = FALSE; + parser->readNextByte = readNextByte; + parser->readBytes = readBytes; + parser->burnBytes = burnBytes; + parser->getNextMarker = getNextMarker; + parser->getByteOffset = getByteOffset; + parser->endOfBuffer = endOfBuffer; + parser->getCurrentIndex = getCurrentIndex; + parser->setByteOffset= setByteOffset; +} diff --git a/imagedecoder/JPEGParser.h b/imagedecoder/JPEGParser.h new file mode 100644 index 0000000..9e8ebd1 --- /dev/null +++ b/imagedecoder/JPEGParser.h @@ -0,0 +1,108 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* +*/ + +#ifndef _JPEG_PARSE_H_ +#define _JPEG_PARSE_H_ + +#include + +#ifndef boolean +#define boolean int +#endif + +#ifndef TRUE +#define TRUE 1 +#endif +#ifndef FALSE +#define FALSE 0 +#endif + +// Marker Codes +#define CODE_SOF_BASELINE 0xC0 +#define CODE_SOF1 0xC1 +#define CODE_SOF2 0xC2 +#define CODE_SOF3 0xC3 +#define CODE_SOF5 0xC5 +#define CODE_SOF6 0xC6 +#define CODE_SOF7 0xC7 +#define CODE_SOF8 0xC8 +#define CODE_SOF9 0xC9 +#define CODE_SOF10 0xCA +#define CODE_SOF11 0xCB +#define CODE_SOF13 0xCD +#define CODE_SOF14 0xCE +#define CODE_SOF15 0xCF +#define CODE_DHT 0xC4 +#define CODE_RST0 0xD0 +#define CODE_RST1 0xD1 +#define CODE_RST2 0xD2 +#define CODE_RST3 0xD3 +#define CODE_RST4 0xD4 +#define CODE_RST5 0xD5 +#define CODE_RST6 0xD6 +#define CODE_RST7 0xD7 +#define CODE_SOI 0xD8 +#define CODE_EOI 0xD9 +#define CODE_SOS 0xDA +#define CODE_DQT 0xDB +#define CODE_DRI 0xDD +#define CODE_APP0 0xE0 +#define CODE_APP1 0xE1 +#define CODE_APP2 0xE2 +#define CODE_APP3 0xE3 +#define CODE_APP4 0xE4 +#define CODE_APP5 0xE5 +#define CODE_APP6 0xE6 +#define CODE_APP7 0xE7 +#define CODE_APP8 0xE8 +#define CODE_APP9 0xE9 +#define CODE_APP10 0xEA +#define CODE_APP11 0xEB +#define CODE_APP12 0xEC +#define CODE_APP13 0xED +#define CODE_APP14 0xEE +#define CODE_APP15 0xEF +typedef struct _CJPEGParse CJPEGParse; +struct _CJPEGParse { + uint8_t* stream_buff; + uint32_t parse_index; + uint32_t buff_size; + boolean end_of_buff; + uint8_t (*readNextByte)(CJPEGParse* parser); + uint32_t (*readBytes)( CJPEGParse* parser, uint32_t bytes_to_read ); + void (*burnBytes)( CJPEGParse* parser, uint32_t bytes_to_burn ); + uint8_t (*getNextMarker)(CJPEGParse* parser); + uint32_t (*getByteOffset)(CJPEGParse* parser); + boolean (*endOfBuffer)(CJPEGParse* parser); + uint8_t* (*getCurrentIndex)(CJPEGParse* parser); + boolean (*setByteOffset)( CJPEGParse* parser, uint32_t byte_offset ); +}; + +void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_size); +#endif // _JPEG_PARSE_H_ + -- cgit v1.2.3 From 99747da2d39eb410a10c8846fce01c323043d3d8 Mon Sep 17 00:00:00 2001 From: hding3 Date: Mon, 24 Dec 2012 14:34:35 +0800 Subject: [PORT FROM R4.1]fix the messy issue when play the exported video againmainline BZ: 72393 BZ: 72393 root cause: in the mix parser, the array size of frame store(fs) in struct h264_DecodedPictureBuffer is 18, but will receive an index 31 to rewrite the entry value, unfortunately, this behavior will overwrite the member of first_mb_in_slice in h264_Slice_Header_t for struct h264_DecodedPictureBuffer and h264_Slice_Header_t are members of in the same struct of h264_Info. The patch add an parameter check to fix the issue. Change-Id: I6fba249a38673cf9c7af9ea9dc71063f470e01f7 Orig-Change-Id: I784fb6c76693b11a8c14d561755f7f1a92efb946 Signed-off-by: Gu, Wangyi Signed-off-by: hding3 Reviewed-on: http://android.intel.com:8080/83610 Reviewed-by: Feng, Wei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c index e7dd6a7..2efa5c3 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c @@ -675,9 +675,12 @@ void h264_update_frame_type(h264_Info * pInfo ) { if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET); + if (pInfo->dpb.fs_dec_idc < NUM_DPB_FRAME_STORES) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET); //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff; //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc; + } } else -- cgit v1.2.3 From 58eb6faacdaf51b8c5844e92091693e94b41233f Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Mon, 24 Dec 2012 15:03:00 +0800 Subject: VideoEncode: set graphic buffer stride accordingly. BZ: 76032 Graphic buffer's width and its stride has following relations: (0, 512] ==> 512 (512, 1024] ==> 1024 (1024, 1280] ==> 1280 (1280, ==> 2048 Set accordingly. Change-Id: I537a412166bd15f934fd130a5081d0901e6155b1 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/83615 Reviewed-by: Yuan, Shengquan Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderBase.cpp | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index d0e6d66..55012d7 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1563,7 +1563,14 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { vaSurfaceAttrib.count = 1; // OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar - vaSurfaceAttrib.luma_stride = (mComParams.resolution.width + 0x1ff) & (~0x1ff); + if (mComParams.resolution.width <= 512) + vaSurfaceAttrib.luma_stride = 512; + else if (mComParams.resolution.width > 512 && mComParams.resolution.width <= 1024) + vaSurfaceAttrib.luma_stride = 1024; + else if (mComParams.resolution.width > 1024 && mComParams.resolution.width <= 1280) + vaSurfaceAttrib.luma_stride = 1280; + else + vaSurfaceAttrib.luma_stride = 2048; vaSurfaceAttrib.pixel_format = map->vinfo.format; vaSurfaceAttrib.width = mComParams.resolution.width; vaSurfaceAttrib.height = mComParams.resolution.height; -- cgit v1.2.3 From 0fc5da81c8afac3286df6e97195edf9ca2fcaf69 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 24 Oct 2012 15:05:10 +0800 Subject: initial version for MRFLD HiP support BZ: 76823 1. Support thread safe on Queue operations 2. Support getOutput in different type codec 3. Support getOutput multi calling for some output format 4. Support non-block / timeout mode 5. Support EOS 6. Support B frame without reconstructed frame output, MRFLD done in driver, MFLD done in libMIX 7. Support baseline/highprofile select 8. Support HiP parameters 9. Support CodedBuffer number setting 10. Support auto frame type detection for both HiP and baseline 11. Support add the high profile parameter and change the type of timestamp 12. Support refine the encode/getout block/nonblock mode with List container 13. Support auto reconstructed and reference frame management in driver, remove MFLD logic (Done) 14. Support the new libva VAEncSliceParameterBufferH264 structure, substitute VAEncSliceParameterBuffer(BZ 75766) 15. refine the name style, refine the frame type detect, B frame has/not impact the frame num of GOP 16. refine the slice_type assignment 17. Support frame skip on MFLD Change-Id: I8aeb006ec09d10379707c412df21b5a1d2426d17 Signed-off-by: Zhao Liang Signed-off-by: jiguoliang Reviewed-on: http://android.intel.com:8080/71392 Reviewed-by: Yuan, Shengquan Reviewed-by: Ding, Haitao Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- test/Android.mk | 4 + test/mix_encoder.cpp | 25 +- videoencoder/Android.mk | 3 +- videoencoder/VideoEncoderAVC.cpp | 173 ++++--- videoencoder/VideoEncoderAVC.h | 12 +- videoencoder/VideoEncoderBase.cpp | 968 +++++++++++++++-------------------- videoencoder/VideoEncoderBase.h | 107 ++-- videoencoder/VideoEncoderDef.h | 72 ++- videoencoder/VideoEncoderH263.cpp | 26 +- videoencoder/VideoEncoderH263.h | 12 +- videoencoder/VideoEncoderInterface.h | 5 +- videoencoder/VideoEncoderLog.h | 3 +- videoencoder/VideoEncoderMP4.cpp | 61 +-- videoencoder/VideoEncoderMP4.h | 14 +- 14 files changed, 683 insertions(+), 802 deletions(-) diff --git a/test/Android.mk b/test/Android.mk index 2f4d6a8..abded5d 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -45,6 +45,10 @@ LOCAL_SHARED_LIBRARIES := \ libva-android \ libva-tpi \ libgui \ + libui \ + libutils \ + libcutils \ + libhardware \ libbinder LOCAL_MODULE_TAGS := optional diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 464b759..4662947 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -257,7 +257,7 @@ Encode_Status SetVideoEncoderParam() { memset(&tmpStoreMetaDataInBuffers,0x00,sizeof(VideoParamsStoreMetaDataInBuffers)); gVideoEncoder->getParameters(&tmpStoreMetaDataInBuffers); gVideoEncoder->setParameters(&tmpStoreMetaDataInBuffers); - +#if 0 VideoParamsUpstreamBuffer tmpVideoParamsUpstreamBuffer; tmpVideoParamsUpstreamBuffer.bufCnt = 0; gVideoEncoder->setParameters(&tmpVideoParamsUpstreamBuffer); @@ -275,7 +275,7 @@ Encode_Status SetVideoEncoderParam() { VideoParamsUsrptrBuffer tmpVideoParamsUsrptrBuffer; tmpVideoParamsUsrptrBuffer.width = 0; gVideoEncoder->getParameters(&tmpVideoParamsUsrptrBuffer); - +#endif //---------------------add for libmix encode code coverage test // VideoEncodeBase.cpp file setConfig && getConfig code coverage test // only for VCM mode @@ -346,6 +346,9 @@ Encode_Status SetVideoEncoderParam() { // for VideoConfigTypeAVCIntraPeriod derivedSetConfig && derivedGetConfig VideoConfigAVCIntraPeriod configAVCIntraPeriod; gVideoEncoder->getConfig(&configAVCIntraPeriod); + configAVCIntraPeriod.ipPeriod = 1; + configAVCIntraPeriod.intraPeriod = 30; + configAVCIntraPeriod.idrInterval = 1; gVideoEncoder->setConfig(&configAVCIntraPeriod); VideoConfigTypeIDRReq tmpVideoConfigTypeIDRReq; gVideoEncoder->setConfig(&tmpVideoConfigTypeIDRReq); @@ -989,30 +992,24 @@ for(int i=0; i<1; i++) InBuf.data = data; InBuf.size = size; InBuf.bufAvailable = true; + InBuf.type = FTYPE_UNKNOWN; + InBuf.flag = 0; ret = gVideoEncoder->encode(&InBuf); CHECK_ENCODE_STATUS("encode"); + if (i > 0) { ret = gVideoEncoder->getOutput(&OutBuf); CHECK_ENCODE_STATUS("getOutput"); - CHECK_ENCODE_STATUS_RETURN("getOutput"); - // printf("OutBuf.dataSize = %d .........\n", OutBuf.dataSize); +// printf("OutBuf.dataSize = %d, flag=0x%08x .........\n", OutBuf.dataSize, OutBuf.flag); fwrite(OutBuf.data, 1, OutBuf.dataSize, file); - + } printf("Encoding %d Frames \r", i+1); fflush(stdout); } + ret = gVideoEncoder->getOutput(&OutBuf); fclose(file); - VideoStatistics stat; - if (gVideoEncoder->getStatistics(&stat) == ENCODE_SUCCESS) - { - printf("\nVideoStatistics\n"); - printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", \ - stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \ - stat.min_encode_time, stat.min_encode_frame ); - } - gVideoEncoder->stop(); releaseVideoEncoder(gVideoEncoder); gVideoEncoder = NULL; diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 7c8314a..0b13e7e 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -2,7 +2,6 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) #VIDEO_ENC_LOG_ENABLE := true -#VIDEO_ENC_STATISTICS_ENABLE := true LOCAL_SRC_FILES := \ VideoEncoderBase.cpp \ @@ -16,11 +15,13 @@ LOCAL_SRC_FILES := \ LOCAL_C_INCLUDES := \ $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libva \ + $(TOPDIR)/frameworks/native/include \ #LOCAL_LDLIBS += -lpthread LOCAL_SHARED_LIBRARIES := \ libcutils \ + libutils \ libva \ libva-android \ libva-tpi \ diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index c4bf805..bf88da7 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -20,6 +20,7 @@ VideoEncoderAVC::VideoEncoderAVC() mVideoParamsAVC.sliceNum.iSliceNum = 2; mVideoParamsAVC.sliceNum.pSliceNum = 2; mVideoParamsAVC.idrInterval = 2; + mVideoParamsAVC.ipPeriod = 1; mVideoParamsAVC.maxSliceSize = 0; mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB; mSliceNum = 2; @@ -94,6 +95,7 @@ Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncCon } mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval; + mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod; mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod; mNewHeader = true; break; @@ -154,6 +156,7 @@ Encode_Status VideoEncoderAVC:: derivedGetConfig( configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval; configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod; + configAVCIntraPeriod->ipPeriod = mVideoParamsAVC.ipPeriod; break; } @@ -192,30 +195,68 @@ Encode_Status VideoEncoderAVC:: derivedGetConfig( return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { +Encode_Status VideoEncoderAVC::updateFrameInfo(EncodeTask* task) { + uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; + FrameType frametype; + uint32_t frame_num = mFrameNum; + + if (mVideoParamsAVC.idrInterval != 0) { + if(mVideoParamsAVC.ipPeriod > 1) + frame_num = frame_num % (idrPeroid + 1); + else if(mComParams.intraPeriod != 0) + frame_num = frame_num % idrPeroid ; + } + + if(frame_num ==0){ + frametype = FTYPE_IDR; + }else if(mComParams.intraPeriod ==0) + // only I frame need intraPeriod=idrInterval=ipPeriod=0 + frametype = FTYPE_I; + else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame + if(mComParams.intraPeriod != 0 && (frame_num > 1) &&((frame_num -1)%mComParams.intraPeriod == 0)) + frametype = FTYPE_I; + else + frametype = FTYPE_P; + } else { + if(mComParams.intraPeriod != 0 &&((frame_num-1)%mComParams.intraPeriod == 0)&&(frame_num >mComParams.intraPeriod)) + frametype = FTYPE_I; + else{ + frame_num = frame_num%mComParams.intraPeriod; + if(frame_num == 0) + frametype = FTYPE_B; + else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0) + frametype = FTYPE_P; + else + frametype = FTYPE_B; + } + } + + if (frametype == FTYPE_IDR || frametype == FTYPE_I) + task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + + if (frametype != task->type) { + const char* FrameTypeStr[10] = {"UNKNOWN", "I", "P", "B", "SI", "SP", "EI", "EP", "S", "IDR"}; + if ((uint32_t) task->type < 9) + LOG_V("libMIX thinks it is %s Frame, the input is %s Frame", FrameTypeStr[frametype], FrameTypeStr[task->type]); + else + LOG_V("Wrong Frame type %d, type may not be initialized ?\n", task->type); + } + +//temparily comment out to avoid uninitialize error +// if (task->type == FTYPE_UNKNOWN || (uint32_t) task->type > 9) + task->type = frametype; + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - bool useLocalBuffer = false; - uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; LOG_V("Begin\n"); - CHECK_NULL_RETURN_IFFAIL(outBuffer); - - setKeyFrame(idrPeroid); - - // prepare for output, map the coded buffer - ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); - CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); switch (outBuffer->format) { - case OUTPUT_EVERYTHING: - case OUTPUT_FRAME_DATA: { - // Output whatever we have - ret = VideoEncoderBase::outputAllData(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); - break; - } case OUTPUT_CODEC_DATA: { // Output the codec data ret = outputCodecData(outBuffer); @@ -251,26 +292,10 @@ Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { LOG_I("out size is = %d\n", outBuffer->dataSize); - // cleanup, unmap the coded buffer if all - // data has been copied out - ret = VideoEncoderBase::cleanupForOutput(); CLEAN_UP: - if (ret < ENCODE_SUCCESS) { - if (outBuffer->data && (useLocalBuffer == true)) { - delete[] outBuffer->data; - outBuffer->data = NULL; - useLocalBuffer = false; - } - // error happens, unmap the buffer - if (mCodedBufferMapped) { - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; - mCurSegment = NULL; - } - } LOG_V("End\n"); return ret; } @@ -481,7 +506,6 @@ Encode_Status VideoEncoderAVC::outputOneNALU( mOffsetInSeg += (nalSize + nalOffset); outBuffer->dataSize = sizeToBeCopied; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; outBuffer->remainingSize = 0; } else { // if nothing to be copied out, set flag to invalid @@ -500,7 +524,6 @@ Encode_Status VideoEncoderAVC::outputOneNALU( } else { LOG_V("End of stream\n"); outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; } } @@ -554,7 +577,6 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf // so the remainingSize size may larger than the remaining data size outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; LOG_E("Buffer size too small\n"); return ENCODE_BUFFER_TOO_SMALL; } @@ -569,7 +591,6 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf outBuffer->dataSize = sizeCopiedHere; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; break; } @@ -579,7 +600,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { +Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); @@ -592,7 +613,7 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { CHECK_ENCODE_STATUS_RETURN("renderHrd"); } - ret = renderSequenceParams(); + ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); mNewHeader = false; //Set to require new header filed to false } @@ -628,10 +649,10 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { mRenderFrameRate = false; } - ret = renderPictureParams(); + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(); + ret = renderSliceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); LOG_V( "End\n"); @@ -745,7 +766,7 @@ int VideoEncoderAVC::calcLevel(int numMbs) { return level; } -Encode_Status VideoEncoderAVC::renderSequenceParams() { +Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH264 avcSeqParams = {}; @@ -767,7 +788,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { CHECK_VA_STATUS_RETURN("vaCreateBuffer"); vaStatus = vaMapBuffer(mVADisplay, mRcParamBuf, (void **)&miscEncRCParamBuf); CHECK_VA_STATUS_RETURN("vaMapBuffer"); - + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, VAEncMiscParameterBufferType, sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterFrameRate), @@ -776,7 +797,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { CHECK_VA_STATUS_RETURN("vaCreateBuffer"); vaStatus = vaMapBuffer(mVADisplay, mFrameRateParamBuf, (void **)&miscEncFrameRateParamBuf); CHECK_VA_STATUS_RETURN("vaMapBuffer"); - + miscEncRCParamBuf->type = VAEncMiscParameterTypeRateControl; rcMiscParam = (VAEncMiscParameterRateControl *)miscEncRCParamBuf->data; miscEncFrameRateParamBuf->type = VAEncMiscParameterTypeFrameRate; @@ -785,6 +806,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { // avcSeqParams.level_idc = mLevel; avcSeqParams.intra_period = mComParams.intraPeriod; avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; + avcSeqParams.ip_period = mVideoParamsAVC.ipPeriod; avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; @@ -822,7 +844,9 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { } // This is a temporary fix suggested by Binglin for bad encoding quality issue - avcSeqParams.max_num_ref_frames = 1; // TODO: We need a long term design for this field + avcSeqParams.max_num_ref_frames = 1; + if(avcSeqParams.ip_period > 1) + avcSeqParams.max_num_ref_frames = 2; LOG_V("===h264 sequence params===\n"); LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id); @@ -847,28 +871,27 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { sizeof(avcSeqParams), 1, &avcSeqParams, &mSeqParamBuf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - - vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1); - CHECK_VA_STATUS_RETURN("vaRenderPicture"); vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mFrameRateParamBuf, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::renderPictureParams() { +Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferH264 avcPicParams = {}; LOG_V( "Begin\n\n"); // set picture params for HW - avcPicParams.ReferenceFrames[0].picture_id= mRefSurface; - avcPicParams.CurrPic.picture_id= mRecSurface; - avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; + avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface[0]; + avcPicParams.CurrPic.picture_id= task->rec_surface; + avcPicParams.coded_buf = task->coded_buffer; //avcPicParams.picture_width = mComParams.resolution.width; //avcPicParams.picture_height = mComParams.resolution.height; avcPicParams.last_picture = 0; @@ -876,7 +899,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams() { LOG_V("======h264 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id); LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id); - LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); +// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf); //LOG_I( "picture_width = %d\n", avcPicParams.picture_width); //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height); @@ -897,7 +920,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams() { } -Encode_Status VideoEncoderAVC::renderSliceParams() { +Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -906,8 +929,8 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { uint32_t sliceHeightInMB = 0; uint32_t maxSliceNum = 0; uint32_t minSliceNum = 0; - int actualSliceHeightInMB = 0; - int startRowInMB = 0; + uint32_t actualSliceHeightInMB = 0; + uint32_t startRowInMB = 0; uint32_t modulus = 0; LOG_V( "Begin\n\n"); @@ -915,7 +938,7 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { maxSliceNum = (mComParams.resolution.height + 15) / 16; minSliceNum = 1; - if (mIsIntra) { + if (task->type == FTYPE_I || task->type == FTYPE_IDR) { sliceNum = mVideoParamsAVC.sliceNum.iSliceNum; } else { sliceNum = mVideoParamsAVC.sliceNum.pSliceNum; @@ -938,14 +961,18 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), + sizeof(VAEncSliceParameterBufferH264), sliceNum, NULL, &mSliceParamBuf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - VAEncSliceParameterBuffer *sliceParams, *currentSlice; + VAEncSliceParameterBufferH264 *sliceParams, *currentSlice; + vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams); CHECK_VA_STATUS_RETURN("vaMapBuffer"); + memset(sliceParams, 0 , sizeof(VAEncSliceParameterBufferH264)); + if(!sliceParams) + return ENCODE_NULL_PTR; currentSlice = sliceParams; startRowInMB = 0; @@ -956,25 +983,29 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { actualSliceHeightInMB ++; } - // starting MB row number for this slice - currentSlice->start_row_number = startRowInMB; + // starting MB row number for this slice, suppose macroblock 16x16 + currentSlice->macroblock_address = startRowInMB * mComParams.resolution.width /16; // slice height measured in MB - currentSlice->slice_height = actualSliceHeightInMB; - currentSlice->slice_flags.bits.is_intra = mIsIntra; - currentSlice->slice_flags.bits.disable_deblocking_filter_idc - = mComParams.disableDeblocking; + currentSlice->num_macroblocks = actualSliceHeightInMB * mComParams.resolution.width /16; + if(task->type == FTYPE_I||task->type == FTYPE_IDR) + currentSlice->slice_type = 2; + else if(task->type == FTYPE_P) + currentSlice->slice_type = 0; + else if(task->type == FTYPE_B) + currentSlice->slice_type = 1; + currentSlice->disable_deblocking_filter_idc = mComParams.disableDeblocking; // This is a temporary fix suggested by Binglin for bad encoding quality issue // TODO: We need a long term design for this field - currentSlice->slice_flags.bits.uses_long_term_ref = 0; - currentSlice->slice_flags.bits.is_long_term_ref = 0; + //currentSlice->slice_flags.bits.uses_long_term_ref = 0; + //currentSlice->slice_flags.bits.is_long_term_ref = 0; LOG_V("======AVC slice params======\n"); LOG_I( "slice_index = %d\n", (int) sliceIndex); - LOG_I( "start_row_number = %d\n", (int) currentSlice->start_row_number); - LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->slice_height); - LOG_I( "slice.is_intra = %d\n", (int) currentSlice->slice_flags.bits.is_intra); - LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->slice_flags.bits.disable_deblocking_filter_idc); + LOG_I( "macroblock_address = %d\n", (int) currentSlice->macroblock_address); + LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->num_macroblocks); + LOG_I( "slice.type = %d\n", (int) currentSlice->slice_type); + LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->disable_deblocking_filter_idc); startRowInMB += actualSliceHeightInMB; } diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h index b57ef67..1248a3e 100644 --- a/videoencoder/VideoEncoderAVC.h +++ b/videoencoder/VideoEncoderAVC.h @@ -18,7 +18,6 @@ public: ~VideoEncoderAVC() {}; virtual Encode_Status start(); - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams); virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams); @@ -27,8 +26,9 @@ public: protected: - virtual Encode_Status sendEncodeCommand(void); - + virtual Encode_Status sendEncodeCommand(EncodeTask *task); + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer); + virtual Encode_Status updateFrameInfo(EncodeTask* task); private: // Local Methods @@ -40,9 +40,9 @@ private: Encode_Status renderMaxSliceSize(); Encode_Status renderAIR(); - Encode_Status renderSequenceParams(); - Encode_Status renderPictureParams(); - Encode_Status renderSliceParams(); + Encode_Status renderSequenceParams(EncodeTask *task); + Encode_Status renderPictureParams(EncodeTask *task); + Encode_Status renderSliceParams(EncodeTask *task); int calcLevel(int numMbs); public: diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 55012d7..b52cbd1 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -12,7 +12,6 @@ #include #include -#undef DUMP_SRC_DATA // To dump source data // API declaration extern "C" { VAStatus vaLockSurface(VADisplay dpy, @@ -33,48 +32,36 @@ VAStatus vaUnlockSurface(VADisplay dpy, ); } VideoEncoderBase::VideoEncoderBase() - :mInitialized(false) + :mInitialized(true) + ,mStarted(false) ,mVADisplay(NULL) - ,mVAContext(0) - ,mVAConfig(0) + ,mVAContext(VA_INVALID_ID) + ,mVAConfig(VA_INVALID_ID) ,mVAEntrypoint(VAEntrypointEncSlice) - ,mCurSegment(NULL) - ,mOffsetInSeg(0) - ,mTotalSize(0) - ,mTotalSizeCopied(0) - ,mForceKeyFrame(false) + ,mCodedBufSize(0) ,mNewHeader(false) - ,mFirstFrame (true) + //,mAutoReference(17 /*VAConfigAttribEncAutoReference*/) ,mRenderMaxSliceSize(false) ,mRenderQP (false) ,mRenderAIR(false) ,mRenderFrameRate(false) ,mRenderBitRate(false) ,mRenderHrd(false) - ,mLastCodedBuffer(0) - ,mOutCodedBuffer(0) ,mSeqParamBuf(0) ,mPicParamBuf(0) ,mSliceParamBuf(0) - ,mSurfaces(NULL) - ,mSurfaceCnt(0) - ,mSrcSurfaceMapList(NULL) - ,mCurSurface(VA_INVALID_SURFACE) ,mRefSurface(VA_INVALID_SURFACE) ,mRecSurface(VA_INVALID_SURFACE) - ,mLastSurface(VA_INVALID_SURFACE) - ,mLastInputRawBuffer(NULL) - ,mEncodedFrames(0) ,mFrameNum(0) - ,mCodedBufSize(0) - ,mCodedBufIndex(0) - ,mPicSkipped(false) - ,mIsIntra(true) ,mSliceSizeOverflow(false) + ,mCurOutputTask(NULL) + ,mOutCodedBuffer(0) ,mCodedBufferMapped(false) - ,mDataCopiedOut(false) - ,mKeyFrame(true) - ,mInitCheck(true) { + ,mCurSegment(NULL) + ,mOffsetInSeg(0) + ,mTotalSize(0) + ,mTotalSizeCopied(0) + ,mFrameSkipped(false){ VAStatus vaStatus = VA_STATUS_SUCCESS; // here the display can be any value, use following one @@ -84,8 +71,6 @@ VideoEncoderBase::VideoEncoderBase() int minorVersion = -1; setDefaultParams(); - mVACodedBuffer [0] = 0; - mVACodedBuffer [1] = 0; LOG_V("vaGetDisplay \n"); mVADisplay = vaGetDisplay(&display); @@ -97,19 +82,17 @@ VideoEncoderBase::VideoEncoderBase() LOG_V("vaInitialize \n"); if (vaStatus != VA_STATUS_SUCCESS) { LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus); - mInitCheck = false; + mInitialized = false; } -#ifdef VIDEO_ENC_STATISTICS_ENABLE - memset(&mVideoStat, 0, sizeof(VideoStatistics)); - mVideoStat.min_encode_time = 0xFFFFFFFF; -#endif - } VideoEncoderBase::~VideoEncoderBase() { VAStatus vaStatus = VA_STATUS_SUCCESS; + + stop(); + vaStatus = vaTerminate(mVADisplay); LOG_V( "vaTerminate\n"); if (vaStatus != VA_STATUS_SUCCESS) { @@ -123,32 +106,24 @@ Encode_Status VideoEncoderBase::start() { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - VASurfaceID surfaces[2]; - int32_t index = -1; - SurfaceMap *map = mSrcSurfaceMapList; - uint32_t stride_aligned = 0; - uint32_t height_aligned = 0; - VAConfigAttrib vaAttrib[2]; - uint32_t maxSize = 0; + if (!mInitialized) { + LOGE("Encoder Initialize fail can not start"); + return ENCODE_DRIVER_FAIL; + } - if (mInitialized) { + if (mStarted) { LOG_V("Encoder has been started\n"); return ENCODE_ALREADY_INIT; } - if (!mInitCheck) { - LOGE("Encoder Initialize fail can not start"); - return ENCODE_DRIVER_FAIL; - } - + VAConfigAttrib vaAttrib[2]; vaAttrib[0].type = VAConfigAttribRTFormat; vaAttrib[1].type = VAConfigAttribRateControl; vaAttrib[0].value = VA_RT_FORMAT_YUV420; vaAttrib[1].value = mComParams.rcMode; LOG_V( "======VA Configuration======\n"); - LOG_I( "profile = %d\n", mComParams.profile); LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint); LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type); @@ -161,10 +136,9 @@ Encode_Status VideoEncoderBase::start() { vaStatus = vaCreateConfig( mVADisplay, mComParams.profile, mVAEntrypoint, &vaAttrib[0], 2, &(mVAConfig)); - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateConfig"); + CHECK_VA_STATUS_RETURN("vaCreateConfig"); if (mComParams.rcMode == VA_RC_VCM) { - // Following three features are only enabled in VCM mode mRenderMaxSliceSize = true; mRenderAIR = true; @@ -173,10 +147,10 @@ Encode_Status VideoEncoderBase::start() { LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n"); + VASurfaceID surfaces[2]; VASurfaceAttributeTPI attribute_tpi; - - stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; - height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; + uint32_t stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; + uint32_t height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; attribute_tpi.luma_stride = stride_aligned; @@ -188,415 +162,357 @@ Encode_Status VideoEncoderBase::start() { attribute_tpi.pixel_format = VA_FOURCC_NV12; attribute_tpi.type = VAExternalMemoryNULL; - vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, - VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); +#ifndef AUTO_REFERENCE + vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, + VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + mRefSurface = surfaces[0]; + mRecSurface = surfaces[1]; +#endif - mRefSurface = surfaces[0]; - mRecSurface = surfaces[1]; + //Prepare all Surfaces to be added into Context + uint32_t contextSurfaceCnt; +#ifndef AUTO_REFERENCE + contextSurfaceCnt = 2 + mSrcSurfaceMapList.size(); +#else + contextSurfaceCnt = mSrcSurfaceMapList.size(); +#endif - //count total surface id already allocated - mSurfaceCnt = 2; - - while(map) { - mSurfaceCnt ++; - map = map->next; - } + VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt]; + int32_t index = -1; + SurfaceMap *map = NULL; + android::List::iterator map_node; - mSurfaces = new VASurfaceID[mSurfaceCnt]; - map = mSrcSurfaceMapList; - while(map) { - mSurfaces[++index] = map->surface; - map->added = true; - map = map->next; + for(map_node = mSrcSurfaceMapList.begin(); map_node != mSrcSurfaceMapList.end(); map_node++) + { + contextSurfaces[++index] = (*map_node)->surface; + (*map_node)->added = true; } - mSurfaces[++index] = mRefSurface; - mSurfaces[++index] = mRecSurface; + +#ifndef AUTO_REFERENCE + contextSurfaces[++index] = mRefSurface; + contextSurfaces[++index] = mRecSurface; +#endif //Initialize and save the VA context ID LOG_V( "vaCreateContext\n"); - vaStatus = vaCreateContext(mVADisplay, mVAConfig, mComParams.resolution.width, mComParams.resolution.height, - 0, mSurfaces, mSurfaceCnt, + 0, contextSurfaces, contextSurfaceCnt, &(mVAContext)); - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateContext"); - LOG_I("Created libva context width %d, height %d\n", - mComParams.resolution.width, mComParams.resolution.height); - - ret = getMaxOutSize(&maxSize); - CHECK_ENCODE_STATUS_CLEANUP("getMaxOutSize"); - - // Create coded buffer for output - vaStatus = vaCreateBuffer(mVADisplay, mVAContext, - VAEncCodedBufferType, - mCodedBufSize, - 1, NULL, - &(mVACodedBuffer[0])); + delete [] contextSurfaces; - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType"); + CHECK_VA_STATUS_RETURN("vaCreateContext"); - // Create coded buffer for output - vaStatus = vaCreateBuffer(mVADisplay, mVAContext, - VAEncCodedBufferType, - mCodedBufSize, - 1, NULL, - &(mVACodedBuffer[1])); + LOG_I("Success to create libva context width %d, height %d\n", + mComParams.resolution.width, mComParams.resolution.height); - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType"); + uint32_t maxSize = 0; + ret = getMaxOutSize(&maxSize); + CHECK_ENCODE_STATUS_RETURN("getMaxOutSize"); - mFirstFrame = true; + // Create CodedBuffer for output + VABufferID VACodedBuffer; -CLEAN_UP: + for(uint32_t i = 0; i mVideoStat.max_encode_time) { - mVideoStat.max_encode_time = encode_time; - mVideoStat.max_encode_frame = mFrameNum; + //Prepare CodedBuffer + mCodedBuffer_Lock.lock(); + if(mVACodedBufferList.empty()){ + if(timeout == FUNC_BLOCK) + mCodedBuffer_Cond.wait(mCodedBuffer_Lock); + else if (timeout > 0) + if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){ + mCodedBuffer_Lock.unlock(); + LOG_E("Time out wait for Coded buffer.\n"); + return ENCODE_DEVICE_BUSY; + } + else {//Nonblock + mCodedBuffer_Lock.unlock(); + LOG_E("Coded buffer is not ready now.\n"); + return ENCODE_DEVICE_BUSY; + } } - if (encode_time < mVideoStat.min_encode_time) { - mVideoStat.min_encode_time = encode_time; - mVideoStat.min_encode_frame = mFrameNum; - } + if(mVACodedBufferList.empty()){ + mCodedBuffer_Lock.unlock(); + return ENCODE_DEVICE_BUSY; + } + VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin()); + mVACodedBufferList.erase(mVACodedBufferList.begin()); + mCodedBuffer_Lock.unlock(); + + LOG_V("CodedBuffer ID 0x%08x\n", coded_buf); + + //All resources are ready, start to assemble EncodeTask + EncodeTask* task = new EncodeTask(); + + task->completed = false; + task->enc_surface = sid; + task->coded_buffer = coded_buf; + task->timestamp = inBuffer->timeStamp; + task->in_data = inBuffer->data; + + //Setup frame info, like flag ( SYNCFRAME), frame number, type etc + task->type = inBuffer->type; + task->flag = inBuffer->flag; + PrepareFrameInfo(task); + +#ifndef AUTO_REFERENCE + //Setup ref /rec frames + //TODO: B frame support, temporary use same logic + switch (inBuffer->type) { + case FTYPE_UNKNOWN: + case FTYPE_IDR: + case FTYPE_I: + case FTYPE_P: + { + if(!mFrameSkipped) { + VASurfaceID tmpSurface = mRecSurface; + mRecSurface = mRefSurface; + mRefSurface = tmpSurface; + } + + task->ref_surface[0] = mRefSurface; + task->ref_surface[1] = VA_INVALID_SURFACE; + task->rec_surface = mRecSurface; - mVideoStat.average_encode_time += encode_time; + break; + } + case FTYPE_B: + default: + LOG_V("Something wrong, B frame may not be supported in this mode\n"); + ret = ENCODE_NOT_SUPPORTED; + goto CLEAN_UP; + } +#else + task->ref_surface[0] = VA_INVALID_SURFACE; + task->ref_surface[1] = VA_INVALID_SURFACE; + task->rec_surface = VA_INVALID_SURFACE; #endif - return status; -} - -Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { - - Encode_Status ret = ENCODE_SUCCESS; - VAStatus vaStatus = VA_STATUS_SUCCESS; - uint8_t *buf = NULL; - - inBuffer->bufAvailable = false; - if (mNewHeader) mFrameNum = 0; - - // current we use one surface for source data, - // one for reference and one for reconstructed - decideFrameType(); - ret = manageSrcSurface(inBuffer); - CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); - - // Start encoding process - LOG_V( "vaBeginPicture\n"); - LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext); - LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurSurface); - LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay); - -#ifdef DUMP_SRC_DATA - - if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){ - - FILE *fp = fopen("/data/data/dump_encoder.yuv", "wb"); - VAImage image; - uint8_t *usrptr = NULL; - uint32_t stride = 0; - uint32_t frameSize = 0; - - vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &image); - CHECK_VA_STATUS_RETURN("vaDeriveImage"); - - LOG_V( "vaDeriveImage Done\n"); - - frameSize = image.data_size; - stride = image.pitches[0]; - - LOG_I("Source Surface/Image information --- start ---- :"); - LOG_I("surface = 0x%08x\n",(uint32_t)mCurFrame->surface); - LOG_I("image->pitches[0] = %d\n", image.pitches[0]); - LOG_I("image->pitches[1] = %d\n", image.pitches[1]); - LOG_I("image->offsets[0] = %d\n", image.offsets[0]); - LOG_I("image->offsets[1] = %d\n", image.offsets[1]); - LOG_I("image->num_planes = %d\n", image.num_planes); - LOG_I("image->width = %d\n", image.width); - LOG_I("image->height = %d\n", image.height); - LOG_I ("frameSize= %d\n", image.data_size); - LOG_I("Source Surface/Image information ----end ----"); - - vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) &usrptr); - CHECK_VA_STATUS_RETURN("vaMapBuffer"); - - fwrite(usrptr, frameSize, 1, fp); - fflush(fp); - fclose(fp); - - vaStatus = vaUnmapBuffer(mVADisplay, image.buf); - CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - - vaStatus = vaDestroyImage(mVADisplay, image.image_id); - CHECK_VA_STATUS_RETURN("vaDestroyImage"); - } -#endif + //======Start Encoding, add task to list====== + LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface); - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); - CHECK_VA_STATUS_RETURN("vaBeginPicture"); + vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface); + CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture"); - ret = sendEncodeCommand(); - CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + ret = sendEncodeCommand(task); + CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand"); vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); + CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture"); - LOG_V( "vaEndPicture\n"); + LOG_V("Add Task %p into Encode Task list\n", task); + mEncodeTask_Lock.lock(); + mEncodeTaskList.push_back(task); + mEncodeTask_Cond.signal(); + mEncodeTask_Lock.unlock(); - if (mFirstFrame) { - updateProperities(); - decideFrameType(); - } - - LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastSurface); - vaStatus = vaSyncSurface(mVADisplay, mLastSurface); - if (vaStatus != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaSyncSurface\n"); - } - - mOutCodedBuffer = mLastCodedBuffer; - - // Need map buffer before calling query surface below to get - // the right skip frame flag for current frame - // It is a requirement of video driver - vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - - if (mFirstFrame) { - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); - CHECK_VA_STATUS_RETURN("vaBeginPicture"); - - ret = sendEncodeCommand(); - CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); - - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); - - mKeyFrame = true; - } - - // Query the status of last surface to check if its next frame is skipped - VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); - CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); - - mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + mFrameNum ++; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - if (mPicSkipped) - mVideoStat.skipped_frames ++; -#endif + LOG_V("encode return Success\n"); - mLastSurface = VA_INVALID_SURFACE; - updateProperities(); - mCurSurface = VA_INVALID_SURFACE; + return ENCODE_SUCCESS; - if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true; +CLEAN_UP: - LOG_V("ref the current inBuffer\n"); + delete task; + mCodedBuffer_Lock.lock(); + mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used + mCodedBuffer_Cond.signal(); + mCodedBuffer_Lock.unlock(); - mLastInputRawBuffer = inBuffer; - mFirstFrame = false; + LOG_V("encode return error=%x\n", ret); - return ENCODE_SUCCESS; + return ret; } -Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { +/* + 1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list. + 2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes, + start output data + 3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure + on non-block/block with timeout modes. + 4. if complete all output data, curoutputtask should be set NULL +*/ +Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - uint8_t *buf = NULL; - VASurfaceID tmpSurface = VA_INVALID_SURFACE; - - inBuffer->bufAvailable = false; - if (mNewHeader) mFrameNum = 0; - - // current we use one surface for source data, - // one for reference and one for reconstructed - decideFrameType(); - ret = manageSrcSurface(inBuffer); - CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); - - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); - CHECK_VA_STATUS_RETURN("vaBeginPicture"); + bool useLocalBuffer = false; - ret = sendEncodeCommand(); - CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + CHECK_NULL_RETURN_IFFAIL(outBuffer); - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); + if (mCurOutputTask == NULL) { + mEncodeTask_Lock.lock(); + if(mEncodeTaskList.empty()) { + LOG_V("getOutput CurrentTask is NULL\n"); + if(timeout == FUNC_BLOCK) { + LOG_V("waiting for task....\n"); + mEncodeTask_Cond.wait(mEncodeTask_Lock); + } else if (timeout > 0) { + LOG_V("waiting for task in % ms....\n", timeout); + if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) { + mEncodeTask_Lock.unlock(); + LOG_E("Time out wait for encode task.\n"); + return ENCODE_DATA_NOT_READY; + } + } else {//Nonblock + mEncodeTask_Lock.unlock(); + return ENCODE_DATA_NOT_READY; + } + } - LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurSurface); - vaStatus = vaSyncSurface(mVADisplay, mCurSurface); - if (vaStatus != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaSyncSurface\n"); + if(mEncodeTaskList.empty()){ + mEncodeTask_Lock.unlock(); + return ENCODE_DATA_NOT_READY; + } + mCurOutputTask = *(mEncodeTaskList.begin()); + mEncodeTaskList.erase(mEncodeTaskList.begin()); + mEncodeTask_Lock.unlock(); } - mOutCodedBuffer = mVACodedBuffer[mCodedBufIndex]; - - // Need map buffer before calling query surface below to get - // the right skip frame flag for current frame - // It is a requirement of video driver - vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - - mPicSkipped = false; - if (!mFirstFrame) { - // Query the status of last surface to check if its next frame is skipped + //sync/query/wait task if not completed + if (mCurOutputTask->completed == false) { + uint8_t *buf = NULL; VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); - CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); - mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; - } - mLastSurface = mCurSurface; - mCurSurface = VA_INVALID_SURFACE; + if (timeout == FUNC_BLOCK) { + //block mode, direct sync surface to output data - mEncodedFrames ++; - mFrameNum ++; + LOG_I ("block mode, vaSyncSurface ID = 0x%08x\n", mCurOutputTask->enc_surface); + vaStatus = vaSyncSurface(mVADisplay, mCurOutputTask->enc_surface); + CHECK_VA_STATUS_GOTO_CLEANUP("vaSyncSurface"); - if (!mPicSkipped) { - tmpSurface = mRecSurface; - mRecSurface = mRefSurface; - mRefSurface = tmpSurface; - } + mOutCodedBuffer = mCurOutputTask->coded_buffer; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - if (mPicSkipped) - mVideoStat.skipped_frames ++; -#endif + // Check frame skip + // Need map buffer before calling query surface below to get the right skip frame flag for current frame + // It is a requirement of video driver + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - inBuffer->bufAvailable = true; - return ENCODE_SUCCESS; -} + vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); + CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); + mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped; -void VideoEncoderBase::setKeyFrame(int32_t keyFramePeriod) { + mCurOutputTask->completed = true; - // For first getOutput async mode, the mFrameNum already increased to 2, and of course is key frame - // frame 0 is already encoded and will be outputed here - // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call - if (!mComParams.syncEncMode) { - if (mFrameNum > 2) { - if (keyFramePeriod != 0 && - (((mFrameNum - 2) % keyFramePeriod) == 0)) { - mKeyFrame = true; - } else { - mKeyFrame = false; - } - } else if (mFrameNum == 2) { - mKeyFrame = true; - } - } else { - if (mFrameNum > 1) { - if (keyFramePeriod != 0 && - (((mFrameNum - 1) % keyFramePeriod) == 0)) { - mKeyFrame = true; - } else { - mKeyFrame = false; + } else { + //For both block with timeout and non-block mode, query surface, if ready, output data + LOG_I ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface); + + vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); + if (vaSurfaceStatus & VASurfaceReady) { + mOutCodedBuffer = mCurOutputTask->coded_buffer; + mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped; + mCurOutputTask->completed = true; + //if need to call SyncSurface again ? + + } else {//not ready yet + ret = ENCODE_DATA_NOT_READY; + goto CLEAN_UP; } - } else if (mFrameNum == 1) { - mKeyFrame = true; - } - } -} -Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) { - - Encode_Status ret = ENCODE_SUCCESS; - VAStatus vaStatus = VA_STATUS_SUCCESS; - bool useLocalBuffer = false; - - CHECK_NULL_RETURN_IFFAIL(outBuffer); - - LOG_V("Begin\n"); + } - if (outBuffer->format != OUTPUT_EVERYTHING && outBuffer->format != OUTPUT_FRAME_DATA) { - LOG_E("Output buffer mode not supported\n"); - goto CLEAN_UP; } - setKeyFrame(mComParams.intraPeriod); - + //start to output data ret = prepareForOutput(outBuffer, &useLocalBuffer); CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); - ret = outputAllData(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + //copy all flags to outBuffer + outBuffer->flag = mCurOutputTask->flag; + outBuffer->type = mCurOutputTask->type; + outBuffer->timeStamp = mCurOutputTask->timestamp; + + if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) { + ret = outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + }else { + ret = getExtFormatOutput(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput"); + } LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize); ret = cleanupForOutput(); CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput"); + LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped); + + return ENCODE_SUCCESS; + CLEAN_UP: - if (ret < ENCODE_SUCCESS) { - if (outBuffer->data && (useLocalBuffer == true)) { - delete[] outBuffer->data; - outBuffer->data = NULL; - useLocalBuffer = false; - } + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } - if (mCodedBufferMapped) { - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; - mCurSegment = NULL; - } + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; } - LOG_V("End\n"); + delete mCurOutputTask; + mCurOutputTask = NULL; + mCodedBuffer_Lock.lock(); + mVACodedBufferList.push_back(mOutCodedBuffer); + mCodedBuffer_Cond.signal(); + mCodedBuffer_Lock.unlock(); + + LOG_V("getOutput return error=%x\n", ret); return ret; } - void VideoEncoderBase::flush() { LOG_V( "Begin\n"); // reset the properities - mEncodedFrames = 0; mFrameNum = 0; - mPicSkipped = false; - mIsIntra = true; LOG_V( "end\n"); } @@ -606,57 +522,69 @@ Encode_Status VideoEncoderBase::stop() { VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; SurfaceMap *map = NULL; + EncodeTask *task = NULL; LOG_V( "Begin\n"); - if (mSurfaces) { - delete [] mSurfaces; - mSurfaces = NULL; - } - // It is possible that above pointers have been allocated - // before we set mInitialized to true - if (!mInitialized) { + // before we set mStarted to true + if (!mStarted) { LOG_V("Encoder has been stopped\n"); return ENCODE_SUCCESS; } - LOG_V( "vaDestroyContext\n"); - vaStatus = vaDestroyContext(mVADisplay, mVAContext); - CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); + mCodedBuffer_Lock.lock(); + mVACodedBufferList.clear(); + mCodedBuffer_Lock.unlock(); + mCodedBuffer_Cond.broadcast(); - LOG_V( "vaDestroyConfig\n"); - vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); - CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); + //Delete all uncompleted tasks + mEncodeTask_Lock.lock(); + while(! mEncodeTaskList.empty()) + { + delete *mEncodeTaskList.begin(); + mEncodeTaskList.erase(mEncodeTaskList.begin()); + } + mEncodeTask_Lock.unlock(); + mEncodeTask_Cond.broadcast(); - // Release Src Surface Buffer Map + //Release Src Surface Buffer Map, destroy surface manually since it is not added into context LOG_V( "Rlease Src Surface Map\n"); - - map = mSrcSurfaceMapList; - while(map) { - if (! map->added) { - //destroy surface by itself - LOG_V( "Rlease Src Surface Buffer not added into vaContext\n"); - vaDestroySurfaces(mVADisplay, &map->surface, 1); + while(! mSrcSurfaceMapList.empty()) + { + if (! (*mSrcSurfaceMapList.begin())->added) { + LOG_V( "Rlease the Src Surface Buffer not added into vaContext\n"); + vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface), 1); } - SurfaceMap *tmp = map; - map = map->next; - delete tmp; + delete (*mSrcSurfaceMapList.begin()); + mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin()); + } + + LOG_V( "vaDestroyContext\n"); + if (mVAContext != VA_INVALID_ID) { + vaStatus = vaDestroyContext(mVADisplay, mVAContext); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); + } + + LOG_V( "vaDestroyConfig\n"); + if (mVAConfig != VA_INVALID_ID) { + vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); } CLEAN_UP: - mInitialized = false; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - LOG_V("Encoder Statistics:\n"); - LOG_V(" %d frames Encoded, %d frames Skipped\n", mEncodedFrames, mVideoStat.skipped_frames); - LOG_V(" Encode time: Average(%d us), Max(%d us @Frame No.%d), Min(%d us @Frame No.%d)\n", \ - mVideoStat.average_encode_time / mEncodedFrames, mVideoStat.max_encode_time, \ - mVideoStat.max_encode_frame, mVideoStat.min_encode_time, mVideoStat.min_encode_frame); + mStarted = false; + mSliceSizeOverflow = false; + mCurOutputTask= NULL; + mOutCodedBuffer = 0; + mCodedBufferMapped = false; + mCurSegment = NULL; + mOffsetInSeg =0; + mTotalSize = 0; + mTotalSizeCopied = 0; + mFrameSkipped = false; - memset(&mVideoStat, 0, sizeof(VideoStatistics)); - mVideoStat.min_encode_time = 0xFFFFFFFF; -#endif LOG_V( "end\n"); return ret; } @@ -753,13 +681,23 @@ Encode_Status VideoEncoderBase::cleanupForOutput() { vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); mCodedBufferMapped = false; + mTotalSize = 0; + mOffsetInSeg = 0; + mTotalSizeCopied = 0; + + delete mCurOutputTask; + mCurOutputTask = NULL; + mCodedBuffer_Lock.lock(); + mVACodedBufferList.push_back(mOutCodedBuffer); + mCodedBuffer_Cond.signal(); + mCodedBuffer_Lock.unlock(); + + LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer); } return ENCODE_SUCCESS; } - -Encode_Status VideoEncoderBase::outputAllData( - VideoEncOutputBuffer *outBuffer) { +Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) { // Data size been copied for every single call uint32_t sizeCopiedHere = 0; @@ -794,7 +732,6 @@ Encode_Status VideoEncoderBase::outputAllData( outBuffer->dataSize = outBuffer->bufferSize; outBuffer->remainingSize = mTotalSize - mTotalSizeCopied; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; return ENCODE_BUFFER_TOO_SMALL; } @@ -802,7 +739,6 @@ Encode_Status VideoEncoderBase::outputAllData( outBuffer->dataSize = sizeCopiedHere; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; return ENCODE_SUCCESS; } @@ -838,6 +774,7 @@ void VideoEncoderBase::setDefaultParams() { mComParams.airParams.airAuto = 1; mComParams.disableDeblocking = 2; mComParams.syncEncMode = false; + mComParams.codedBufNum = 2; mHrdParam.bufferSize = 0; mHrdParam.initBufferFullness = 0; @@ -852,7 +789,7 @@ Encode_Status VideoEncoderBase::setParameters( CHECK_NULL_RETURN_IFFAIL(videoEncParams); LOG_I("Config type = %d\n", (int)videoEncParams->type); - if (mInitialized) { + if (mStarted) { LOG_E("Encoder has been initialized, should use setConfig to change configurations\n"); return ENCODE_ALREADY_INIT; } @@ -862,10 +799,11 @@ Encode_Status VideoEncoderBase::setParameters( VideoParamsCommon *paramsCommon = reinterpret_cast (videoEncParams); - if (paramsCommon->size != sizeof (VideoParamsCommon)) { return ENCODE_INVALID_PARAMS; } + if(paramsCommon->codedBufNum < 2) + paramsCommon->codedBufNum =2; mComParams = *paramsCommon; break; } @@ -1029,7 +967,7 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { // workaround #if 0 - if (!mInitialized) { + if (!mStarted) { LOG_E("Encoder has not initialized yet, can't call setConfig\n"); return ENCODE_NOT_INIT; } @@ -1200,51 +1138,29 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { return ret; } -void VideoEncoderBase:: decideFrameType () { - - LOG_I( "mEncodedFrames = %d\n", mEncodedFrames); - LOG_I( "mFrameNum = %d\n", mFrameNum); - LOG_I( "mIsIntra = %d\n", mIsIntra); - - // determine the picture type - if (mComParams.intraPeriod == 0) { - if (mFrameNum == 0) - mIsIntra = true; - else - mIsIntra = false; - } else if ((mFrameNum % mComParams.intraPeriod) == 0) { - mIsIntra = true; - } else { - mIsIntra = false; - } +void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) { + if (mNewHeader) mFrameNum = 0; + LOG_I( "mFrameNum = %d ", mFrameNum); - LOG_I( "mIsIntra = %d\n",mIsIntra); + updateFrameInfo(task) ; } +Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) { -void VideoEncoderBase:: updateProperities () { - - VASurfaceID tmp = VA_INVALID_SURFACE; - LOG_V( "Begin\n"); - - mEncodedFrames ++; - mFrameNum ++; - mLastCodedBuffer = mVACodedBuffer[mCodedBufIndex]; - mCodedBufIndex ++; - mCodedBufIndex %=2; + task->type = FTYPE_P; - mLastSurface = mCurSurface; + // determine the picture type + if (mFrameNum == 0) + task->type = FTYPE_I; + if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0)) + task->type = FTYPE_I; - if (!mPicSkipped) { - tmp = mRecSurface; - mRecSurface = mRefSurface; - mRefSurface = tmp; - } + if (task->type == FTYPE_I) + task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; - LOG_V( "End\n"); + return ENCODE_SUCCESS; } - Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { uint32_t size = mComParams.resolution.width * mComParams.resolution.height; @@ -1282,25 +1198,6 @@ Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderBase::getStatistics (VideoStatistics *videoStat) { - -#ifdef VIDEO_ENC_STATISTICS_ENABLE - if (videoStat != NULL) { - videoStat->total_frames = mEncodedFrames; - videoStat->skipped_frames = mVideoStat.skipped_frames; - videoStat->average_encode_time = mVideoStat.average_encode_time / mEncodedFrames; - videoStat->max_encode_time = mVideoStat.max_encode_time; - videoStat->max_encode_frame = mVideoStat.max_encode_frame; - videoStat->min_encode_time = mVideoStat.min_encode_time; - videoStat->min_encode_frame = mVideoStat.min_encode_frame; - } - - return ENCODE_SUCCESS; -#else - return ENCODE_NOT_SUPPORTED; -#endif -} - Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) { @@ -1317,7 +1214,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( LOG_V( "Begin\n"); // If encode session has been configured, we can not request surface creation anymore - if (mInitialized) { + if (mStarted) { LOG_E( "Already Initialized, can not request VA surface anymore\n"); return ENCODE_WRONG_STATE; } @@ -1387,9 +1284,8 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( map->vinfo.format = VA_FOURCC_NV12; map->vinfo.s3dformat = 0xffffffff; map->added = false; - map->next = NULL; - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); LOG_I( "surface = 0x%08x\n",(uint32_t)surface); LOG_I("image->pitches[0] = %d\n", image.pitches[0]); @@ -1436,7 +1332,7 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS } for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) { - if (findSurfaceMapByValue(mSrcSurfaceMapList, upStreamBuffer->bufList[i]) != NULL) //already mapped + if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL) //already mapped continue; //wrap upstream buffer into vaSurface @@ -1456,18 +1352,12 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS } map->vinfo.s3dformat = 0xFFFFFFFF; map->added = false; - map->next = NULL; status = surfaceMapping(map); if (status == ENCODE_SUCCESS) - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); else delete map; - - if (mSrcSurfaceMapList == NULL) { - LOG_E ("mSrcSurfaceMapList should not be NULL now, maybe meet mapping error\n"); - return ENCODE_NO_MEMORY; - } } return status; @@ -1493,7 +1383,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { VASurfaceAttributeTPI vaSurfaceAttrib; uint32_t buf; - + vaSurfaceAttrib.buffers = &buf; vaStatus = vaLockSurface( @@ -1536,7 +1426,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { LOG_I("Surface ID created from Kbuf = 0x%08x", surface); map->surface = surface; - + return ret; } @@ -1608,12 +1498,12 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { uint32_t lumaOffset = 0; uint32_t chromaUOffset = map->vinfo.height * map->vinfo.lumaStride; uint32_t chromaVOffset = chromaUOffset + 1; - + VASurfaceAttributeTPI vaSurfaceAttrib; uint32_t buf; vaSurfaceAttrib.buffers = &buf; - + vaSurfaceAttrib.count = 1; vaSurfaceAttrib.size = map->vinfo.lumaStride * map->vinfo.height * 3 / 2; vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; @@ -1635,7 +1525,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { LOG_I("Surface ID created from Kbuf = 0x%08x", map->value); map->surface = surface; - + return ret; } @@ -1667,7 +1557,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) { CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); map->surface = surface; - + return ret; } @@ -1745,9 +1635,9 @@ LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, va return status; } -Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { +Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) { - Encode_Status ret = ENCODE_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; MetadataBufferType type; int32_t value; ValueInfo vinfo; @@ -1757,13 +1647,13 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { IntelMetadataBuffer imb; SurfaceMap *map = NULL; - - if (mStoreMetaDataInBuffers.isEnabled) { + + if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { //fail to parse buffer - return ENCODE_NO_REQUEST_DATA; + return ENCODE_NO_REQUEST_DATA; } imb.GetType(type); @@ -1772,20 +1662,21 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { //raw mode LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (! inBuffer->data || inBuffer->size == 0) { - return ENCODE_NULL_PTR; + return ENCODE_NULL_PTR; } type = MetadataBufferTypeUser; value = (int32_t)inBuffer->data; } - + + //find if mapped - map = findSurfaceMapByValue(mSrcSurfaceMapList, value); + map = (SurfaceMap*) findSurfaceMapByValue(value); - if (map) { + if (map) { //has mapped, get surfaceID directly LOG_I("direct find surface %d from value %x\n", map->surface, value); - mCurSurface = map->surface; + *sid = map->surface; return ret; } @@ -1793,8 +1684,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { //if no found from list, then try to map value with parameters LOG_I("not find surface from cache with value %x, start mapping if enough information\n", value); - if (mStoreMetaDataInBuffers.isEnabled) { - + if (mStoreMetaDataInBuffers.isEnabled) { + //if type is MetadataBufferTypeGrallocSource, use default parameters if (type == MetadataBufferTypeGrallocSource) { vinfo.mode = MEM_MODE_GFXHANDLE; @@ -1806,15 +1697,15 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { vinfo.chromStride = mComParams.resolution.width; vinfo.format = VA_FOURCC_NV12; vinfo.s3dformat = 0xFFFFFFFF; - } else { + } else { //get all info mapping needs imb.GetValueInfo(pvinfo); imb.GetExtraValues(extravalues, extravalues_count); } - + } else { - //raw mode + //raw mode vinfo.mode = MEM_MODE_MALLOC; vinfo.handle = 0; vinfo.size = inBuffer->size; @@ -1836,26 +1727,25 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { map->value = value; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); map->added = false; - map->next = NULL; ret = surfaceMapping(map); if (ret == ENCODE_SUCCESS) { LOG_I("surface mapping success, map value %x into surface %d\n", value, map->surface); - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); } else { delete map; LOG_E("surface mapping failed, wrong info or meet serious error\n"); return ret; - } + } - mCurSurface = map->surface; + *sid = map->surface; } else { //can't map due to no info LOG_E("surface mapping failed, missing information\n"); return ENCODE_NO_REQUEST_DATA; } - + if (extravalues) { //map more using same ValueInfo for(unsigned int i=0; ivalue = extravalues[i]; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); map->added = false; - map->next = NULL; ret = surfaceMapping(map); if (ret == ENCODE_SUCCESS) { LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->surface); - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); } else { delete map; map = NULL; @@ -1877,67 +1766,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } } } - - return ret; -} - -SurfaceMap *VideoEncoderBase::appendSurfaceMap( - SurfaceMap *head, SurfaceMap *map) { - - if (head == NULL) { - return map; - } - - SurfaceMap *node = head; - SurfaceMap *tail = NULL; - - while (node != NULL) { - tail = node; - node = node->next; - } - tail->next = map; - - return head; -} - -SurfaceMap *VideoEncoderBase::removeSurfaceMap( - SurfaceMap *head, SurfaceMap *map) { - - SurfaceMap *node = head; - SurfaceMap *tmpNode = NULL; - - if (head == map) { - tmpNode = head->next; - map->next = NULL; - return tmpNode; - } - - while (node != NULL) { - if (node->next == map) - break; - node = node->next; - } - if (node != NULL) { - node->next = map->next; - } - - map->next = NULL; - return head; -} - -SurfaceMap *VideoEncoderBase::findSurfaceMapByValue( - SurfaceMap *head, int32_t value) { - - SurfaceMap *node = head; - - while (node != NULL) { - if (node->value == value) - break; - node = node->next; - } - - return node; + return ret; } Encode_Status VideoEncoderBase::renderDynamicBitrate() { @@ -2063,3 +1893,17 @@ Encode_Status VideoEncoderBase::renderHrd() { return ENCODE_SUCCESS; } + +SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { + android::List::iterator node; + + for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++) + { + if ((*node)->value == value) + return *node; + else + continue; + } + + return NULL; +} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 9ab7bc6..924c4da 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -14,15 +14,30 @@ #include "VideoEncoderDef.h" #include "VideoEncoderInterface.h" #include "IntelMetadataBuffer.h" +#include +#include +//#define AUTO_REFERENCE struct SurfaceMap { VASurfaceID surface; MetadataBufferType type; int32_t value; ValueInfo vinfo; - uint32_t index; bool added; - SurfaceMap *next; +}; + +struct EncodeTask { + VASurfaceID enc_surface; + VASurfaceID ref_surface[2]; + VASurfaceID rec_surface; + VABufferID coded_buffer; + + FrameType type; + int flag; + int64_t timestamp; //corresponding input frame timestamp + uint8_t *in_data; //input buffer data + + bool completed; //if encode task is done complet by HW }; class VideoEncoderBase : IVideoEncoder { @@ -34,7 +49,7 @@ public: virtual Encode_Status start(void); virtual void flush(void); virtual Encode_Status stop(void); - virtual Encode_Status encode(VideoEncRawBuffer *inBuffer); + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout); /* * getOutput can be called several time for a frame (such as first time codec data, and second time others) @@ -42,30 +57,26 @@ public: * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL * and caller should provide a big enough buffer and call again */ - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout); virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig); virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig); - virtual Encode_Status getMaxOutSize(uint32_t *maxSize); - virtual Encode_Status getStatistics(VideoStatistics *videoStat); protected: - virtual Encode_Status sendEncodeCommand(void) = 0; + virtual Encode_Status sendEncodeCommand(EncodeTask* task) = 0; virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0; + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) = 0; + virtual Encode_Status updateFrameInfo(EncodeTask* task) ; - Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); - Encode_Status cleanupForOutput(); - Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); Encode_Status renderDynamicFrameRate(); Encode_Status renderDynamicBitrate(); Encode_Status renderHrd(); - void setKeyFrame(int32_t keyFramePeriod); private: void setDefaultParams(void); @@ -78,41 +89,29 @@ private: Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map); Encode_Status surfaceMappingForMalloc(SurfaceMap *map); Encode_Status surfaceMapping(SurfaceMap *map); + SurfaceMap *findSurfaceMapByValue(int32_t value); + Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid); + void PrepareFrameInfo(EncodeTask* task); - SurfaceMap *appendSurfaceMap( - SurfaceMap *head, SurfaceMap *map); - SurfaceMap *removeSurfaceMap( - SurfaceMap *head, SurfaceMap *map); - SurfaceMap *findSurfaceMapByValue( - SurfaceMap *head, int32_t value); - - Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer); - void updateProperities(void); - void decideFrameType(void); -// Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer); - Encode_Status syncEncode(VideoEncRawBuffer *inBuffer); - Encode_Status asyncEncode(VideoEncRawBuffer *inBuffer); + Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); + Encode_Status cleanupForOutput(); + Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); protected: bool mInitialized; + bool mStarted; VADisplay mVADisplay; VAContextID mVAContext; VAConfigID mVAConfig; VAEntrypoint mVAEntrypoint; - VACodedBufferSegment *mCurSegment; - uint32_t mOffsetInSeg; - uint32_t mTotalSize; - uint32_t mTotalSizeCopied; VideoParamsCommon mComParams; VideoParamsHRD mHrdParam; VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers; - bool mForceKeyFrame; bool mNewHeader; - bool mFirstFrame; bool mRenderMaxSliceSize; //Max Slice Size bool mRenderQP; @@ -121,50 +120,36 @@ protected: bool mRenderBitRate; bool mRenderHrd; - VABufferID mVACodedBuffer[2]; - VABufferID mLastCodedBuffer; - VABufferID mOutCodedBuffer; VABufferID mSeqParamBuf; VABufferID mRcParamBuf; VABufferID mFrameRateParamBuf; VABufferID mPicParamBuf; VABufferID mSliceParamBuf; - VASurfaceID *mSurfaces; - uint32_t mSurfaceCnt; - - SurfaceMap *mSrcSurfaceMapList; - - //for new design - VASurfaceID mCurSurface; //current input surface to be encoded - VASurfaceID mRefSurface; //reference surface - VASurfaceID mRecSurface; //reconstructed surface - VASurfaceID mLastSurface; //last surface + android::List mSrcSurfaceMapList; //all mapped surface info list from input buffer + android::List mEncodeTaskList; //all encode tasks list + android::List mVACodedBufferList; //all available codedbuffer list - VideoEncRawBuffer *mLastInputRawBuffer; - - uint32_t mEncodedFrames; + VASurfaceID mRefSurface; //reference surface, only used in base + VASurfaceID mRecSurface; //reconstructed surface, only used in base uint32_t mFrameNum; uint32_t mCodedBufSize; - uint32_t mCodedBufIndex; - bool mPicSkipped; - bool mIsIntra; bool mSliceSizeOverflow; - bool mCodedBufferMapped; - bool mDataCopiedOut; - bool mKeyFrame; - int32_t mInitCheck; + //Current Outputting task + EncodeTask *mCurOutputTask; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - VideoStatistics mVideoStat; -#endif + //Current outputting CodedBuffer status + VABufferID mOutCodedBuffer; + bool mCodedBufferMapped; + VACodedBufferSegment *mCurSegment; + uint32_t mOffsetInSeg; + uint32_t mTotalSize; + uint32_t mTotalSizeCopied; + android::Mutex mCodedBuffer_Lock, mEncodeTask_Lock; + android::Condition mCodedBuffer_Cond, mEncodeTask_Cond; - // Constants - static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2; - static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8; + bool mFrameSkipped; }; - - #endif /* __VIDEO_ENCODER_BASE_H__ */ diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index b9feca2..f5174aa 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -32,7 +32,9 @@ enum { ENCODE_SUCCESS = 0, ENCODE_ALREADY_INIT = 1, ENCODE_SLICESIZE_OVERFLOW = 2, - ENCODE_BUFFER_TOO_SMALL = 3 // The buffer passed to encode is too small to contain encoded data + ENCODE_BUFFER_TOO_SMALL = 3, // The buffer passed to encode is too small to contain encoded data + ENCODE_DEVICE_BUSY = 4, + ENCODE_DATA_NOT_READY = 5, }; typedef enum { @@ -42,6 +44,7 @@ typedef enum { OUTPUT_ONE_NAL = 4, OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8, OUTPUT_LENGTH_PREFIXED = 16, + OUTPUT_CODEDBUFFER = 32, OUTPUT_BUFFER_LAST } VideoOutputFormat; @@ -102,6 +105,23 @@ enum VideoBufferSharingMode { BUFFER_LAST }; +typedef enum { + FTYPE_UNKNOWN = 0, // Unknown + FTYPE_I = 1, // General I-frame type + FTYPE_P = 2, // General P-frame type + FTYPE_B = 3, // General B-frame type + FTYPE_SI = 4, // H.263 SI-frame type + FTYPE_SP = 5, // H.263 SP-frame type + FTYPE_EI = 6, // H.264 EI-frame type + FTYPE_EP = 7, // H.264 EP-frame type + FTYPE_S = 8, // MPEG-4 S-frame type + FTYPE_IDR = 9, // IDR-frame type +}FrameType; + +//function call mode +#define FUNC_BLOCK 0xFFFFFFFF +#define FUNC_NONBLOCK 0 + // Output buffer flag #define ENCODE_BUFFERFLAG_ENDOFFRAME 0x00000001 #define ENCODE_BUFFERFLAG_PARTIALFRAME 0x00000002 @@ -110,6 +130,8 @@ enum VideoBufferSharingMode { #define ENCODE_BUFFERFLAG_DATACORRUPT 0x00000010 #define ENCODE_BUFFERFLAG_DATAINVALID 0x00000020 #define ENCODE_BUFFERFLAG_SLICEOVERFOLOW 0x00000040 +#define ENCODE_BUFFERFLAG_ENDOFSTREAM 0x00000080 +#define ENCODE_BUFFERFLAG_NSTOPFRAME 0x00000100 typedef struct { uint8_t *data; @@ -118,14 +140,18 @@ typedef struct { uint32_t remainingSize; int flag; //Key frame, Codec Data etc VideoOutputFormat format; //output format - uint64_t timeStamp; //reserved + int64_t timeStamp; //reserved + FrameType type; + uint8_t *in_data; //indicate corresponding input data } VideoEncOutputBuffer; typedef struct { uint8_t *data; uint32_t size; bool bufAvailable; //To indicate whether this buffer can be reused - uint64_t timeStamp; //reserved + int64_t timeStamp; //reserved + FrameType type; //frame type expected to be encoded + int flag; // flag to indicate buffer property } VideoEncRawBuffer; struct VideoEncSurfaceBuffer { @@ -304,6 +330,8 @@ struct VideoParamsCommon : VideoParamConfigSet { AirParams airParams; uint32_t disableDeblocking; bool syncEncMode; + //CodedBuffer properties + uint32_t codedBufNum; VideoParamsCommon() { type = VideoParamsTypeCommon; @@ -327,6 +355,7 @@ struct VideoParamsCommon : VideoParamConfigSet { this->airParams = other.airParams; this->disableDeblocking = other.disableDeblocking; this->syncEncMode = other.syncEncMode; + this->codedBufNum = other.codedBufNum; return *this; } }; @@ -336,10 +365,23 @@ struct VideoParamsAVC : VideoParamConfigSet { uint8_t VUIFlag; int32_t maxSliceSize; uint32_t idrInterval; + uint32_t ipPeriod; + uint32_t refFrames; SliceNum sliceNum; AVCDelimiterType delimiterType; Cropping crop; SamplingAspectRatio SAR; + uint32_t refIdx10ActiveMinus1; + uint32_t refIdx11ActiveMinus1; + bool bFrameMBsOnly; + bool bMBAFF; + bool bEntropyCodingCABAC; + bool bWeightedPPrediction; + uint32_t weightedBipredicitonMode; + bool bConstIpred ; + bool bDirect8x8Inference; + bool bDirectSpatialTemporal; + uint32_t cabacInitIdc; VideoParamsAVC() { type = VideoParamsTypeAVC; @@ -354,6 +396,8 @@ struct VideoParamsAVC : VideoParamConfigSet { this->VUIFlag = other.VUIFlag; this->maxSliceSize = other.maxSliceSize; this->idrInterval = other.idrInterval; + this->ipPeriod = other.ipPeriod; + this->refFrames = other.refFrames; this->sliceNum = other.sliceNum; this->delimiterType = other.delimiterType; this->crop.LeftOffset = other.crop.LeftOffset; @@ -363,6 +407,17 @@ struct VideoParamsAVC : VideoParamConfigSet { this->SAR.SarWidth = other.SAR.SarWidth; this->SAR.SarHeight = other.SAR.SarHeight; + this->refIdx10ActiveMinus1 = other.refIdx10ActiveMinus1; + this->refIdx11ActiveMinus1 = other.refIdx11ActiveMinus1; + this->bFrameMBsOnly = other.bFrameMBsOnly; + this->bMBAFF = other.bMBAFF; + this->bEntropyCodingCABAC = other.bEntropyCodingCABAC; + this->bWeightedPPrediction = other.bWeightedPPrediction; + this->weightedBipredicitonMode = other.weightedBipredicitonMode; + this->bConstIpred = other.bConstIpred; + this->bDirect8x8Inference = other.bDirect8x8Inference; + this->bDirectSpatialTemporal = other.bDirectSpatialTemporal; + this->cabacInitIdc = other.cabacInitIdc; return *this; } }; @@ -450,6 +505,7 @@ struct VideoConfigAVCIntraPeriod : VideoParamConfigSet { uint32_t idrInterval; //How many Intra frame will have a IDR frame uint32_t intraPeriod; + uint32_t ipPeriod; }; struct VideoConfigNALSize : VideoParamConfigSet { @@ -512,14 +568,4 @@ struct VideoConfigSliceNum : VideoParamConfigSet { SliceNum sliceNum; }; -typedef struct { - uint32_t total_frames; - uint32_t skipped_frames; - uint32_t average_encode_time; - uint32_t max_encode_time; - uint32_t max_encode_frame; - uint32_t min_encode_time; - uint32_t min_encode_frame; -}VideoStatistics; - #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index 7371d7a..b9b9c99 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -16,20 +16,20 @@ VideoEncoderH263::VideoEncoderH263() { mComParams.profile = (VAProfile)PROFILE_H263BASELINE; } -Encode_Status VideoEncoderH263::sendEncodeCommand(void) { +Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); if (mFrameNum == 0) { - ret = renderSequenceParams(); + ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } - ret = renderPictureParams(); + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(); + ret = renderSliceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); LOG_V( "End\n"); @@ -37,7 +37,7 @@ Encode_Status VideoEncoderH263::sendEncodeCommand(void) { } -Encode_Status VideoEncoderH263::renderSequenceParams() { +Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH263 h263SequenceParam = {}; @@ -78,7 +78,7 @@ Encode_Status VideoEncoderH263::renderSequenceParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderH263::renderPictureParams() { +Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferH263 h263PictureParams = {}; @@ -86,18 +86,18 @@ Encode_Status VideoEncoderH263::renderPictureParams() { LOG_V( "Begin\n\n"); // set picture params for HW - h263PictureParams.reference_picture = mRefSurface; - h263PictureParams.reconstructed_picture = mRecSurface; - h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; + h263PictureParams.reference_picture = task->ref_surface[0]; + h263PictureParams.reconstructed_picture = task->rec_surface; + h263PictureParams.coded_buf = task->coded_buffer; h263PictureParams.picture_width = mComParams.resolution.width; h263PictureParams.picture_height = mComParams.resolution.height; - h263PictureParams.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + h263PictureParams.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; LOG_V("======h263 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture); LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture); LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf); - LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); +// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "picture_width = %d\n", h263PictureParams.picture_width); LOG_I( "picture_height = %d\n",h263PictureParams.picture_height); LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type); @@ -117,7 +117,7 @@ Encode_Status VideoEncoderH263::renderPictureParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderH263::renderSliceParams() { +Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceHeight; @@ -145,7 +145,7 @@ Encode_Status VideoEncoderH263::renderSliceParams() { sliceParams->start_row_number = 0; // slice height measured in MB sliceParams->slice_height = sliceHeightInMB; - sliceParams->slice_flags.bits.is_intra = mIsIntra; + sliceParams->slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0; LOG_V("======h263 slice params======\n"); diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h index 2113e2f..a8578dd 100644 --- a/videoencoder/VideoEncoderH263.h +++ b/videoencoder/VideoEncoderH263.h @@ -20,7 +20,7 @@ public: virtual ~VideoEncoderH263() {}; protected: - virtual Encode_Status sendEncodeCommand(void); + virtual Encode_Status sendEncodeCommand(EncodeTask *task); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { return ENCODE_SUCCESS; } @@ -33,12 +33,16 @@ protected: virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_SUCCESS; } + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { + return ENCODE_NOT_SUPPORTED; + } + //virtual Encode_Status updateFrameInfo(EncodeTask* task); // Local Methods private: - Encode_Status renderSequenceParams(); - Encode_Status renderPictureParams(); - Encode_Status renderSliceParams(); + Encode_Status renderSequenceParams(EncodeTask *task); + Encode_Status renderPictureParams(EncodeTask *task); + Encode_Status renderSliceParams(EncodeTask *task); }; #endif /* __VIDEO_ENCODER_H263_H__ */ diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h index 243e4a1..da1c6ec 100644 --- a/videoencoder/VideoEncoderInterface.h +++ b/videoencoder/VideoEncoderInterface.h @@ -17,14 +17,13 @@ public: virtual Encode_Status start(void) = 0; virtual Encode_Status stop(void) = 0; virtual void flush(void) = 0; - virtual Encode_Status encode(VideoEncRawBuffer *inBuffer) = 0; - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer) = 0; + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout = FUNC_BLOCK) = 0; + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout = FUNC_BLOCK) = 0; virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0; - virtual Encode_Status getStatistics(VideoStatistics *videoStat) = 0; }; #endif /* VIDEO_ENCODER_INTERFACE_H_ */ diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h index 49c34df..0f0eeae 100644 --- a/videoencoder/VideoEncoderLog.h +++ b/videoencoder/VideoEncoderLog.h @@ -23,7 +23,8 @@ __android_log_print(level, comp, "%s():%d: "format, \ __FUNCTION__, __LINE__, ##__VA_ARGS__) -#if 1 +#define VIDEO_ENC_LOG_ENABLE +#if 1 #ifdef VIDEO_ENC_LOG_ENABLE #define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) #define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__) diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index 6e0263b..1bedc63 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -85,30 +85,15 @@ Encode_Status VideoEncoderMP4::outputConfigData( return ret; } - -Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) { +Encode_Status VideoEncoderMP4::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - bool useLocalBuffer = false; LOG_V("Begin\n"); CHECK_NULL_RETURN_IFFAIL(outBuffer); - setKeyFrame(mComParams.intraPeriod); - - // prepare for output, map the coded buffer - ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); - CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); - switch (outBuffer->format) { - case OUTPUT_EVERYTHING: - case OUTPUT_FRAME_DATA: { - // Output whatever we have - ret = VideoEncoderBase::outputAllData(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); - break; - } case OUTPUT_CODEC_DATA: { // Output the codec config data ret = outputConfigData(outBuffer); @@ -123,32 +108,14 @@ Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) { LOG_I("out size is = %d\n", outBuffer->dataSize); - // cleanup, unmap the coded buffer if all - // data has been copied out - ret = VideoEncoderBase::cleanupForOutput(); CLEAN_UP: - if (ret < ENCODE_SUCCESS) { - if (outBuffer->data && (useLocalBuffer == true)) { - delete[] outBuffer->data; - outBuffer->data = NULL; - useLocalBuffer = false; - } - - // error happens, unmap the buffer - if (mCodedBufferMapped) { - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; - mCurSegment = NULL; - } - } LOG_V("End\n"); return ret; } - -Encode_Status VideoEncoderMP4::renderSequenceParams() { +Encode_Status VideoEncoderMP4::renderSequenceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferMPEG4 mp4SequenceParams = {}; @@ -202,26 +169,26 @@ Encode_Status VideoEncoderMP4::renderSequenceParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderMP4::renderPictureParams() { +Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferMPEG4 mpeg4_pic_param = {}; LOG_V( "Begin\n\n"); // set picture params for HW - mpeg4_pic_param.reference_picture = mRefSurface; - mpeg4_pic_param.reconstructed_picture = mRecSurface; - mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex]; + mpeg4_pic_param.reference_picture = task->ref_surface[0]; + mpeg4_pic_param.reconstructed_picture = task->rec_surface; + mpeg4_pic_param.coded_buf = task->coded_buffer; mpeg4_pic_param.picture_width = mComParams.resolution.width; mpeg4_pic_param.picture_height = mComParams.resolution.height; mpeg4_pic_param.vop_time_increment= mFrameNum; - mpeg4_pic_param.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + mpeg4_pic_param.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; LOG_V("======mpeg4 picture params======\n"); LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture); LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture); LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf); - LOG_I("coded_buf_index = %d\n", mCodedBufIndex); +// LOG_I("coded_buf_index = %d\n", mCodedBufIndex); LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width); LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height); LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment); @@ -242,7 +209,7 @@ Encode_Status VideoEncoderMP4::renderPictureParams() { } -Encode_Status VideoEncoderMP4::renderSliceParams() { +Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceHeight; @@ -259,7 +226,7 @@ Encode_Status VideoEncoderMP4::renderSliceParams() { sliceParams.start_row_number = 0; sliceParams.slice_height = sliceHeightInMB; - sliceParams.slice_flags.bits.is_intra = mIsIntra; + sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0; LOG_V("======mpeg4 slice params======\n"); @@ -282,19 +249,19 @@ Encode_Status VideoEncoderMP4::renderSliceParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderMP4::sendEncodeCommand(void) { +Encode_Status VideoEncoderMP4::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); if (mFrameNum == 0) { - ret = renderSequenceParams(); + ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } - ret = renderPictureParams(); + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(); + ret = renderSliceParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); LOG_V( "End\n"); diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h index b453023..7e579c0 100644 --- a/videoencoder/VideoEncoderMP4.h +++ b/videoencoder/VideoEncoderMP4.h @@ -19,11 +19,10 @@ public: VideoEncoderMP4(); virtual ~VideoEncoderMP4() {}; - Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); +// Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); protected: - virtual Encode_Status sendEncodeCommand(void); - + virtual Encode_Status sendEncodeCommand(EncodeTask *task); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { return ENCODE_SUCCESS; } @@ -36,13 +35,16 @@ protected: virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_SUCCESS; } + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer); + //virtual Encode_Status updateFrameInfo(EncodeTask* task); + // Local Methods private: Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize); Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer); - Encode_Status renderSequenceParams(); - Encode_Status renderPictureParams(); - Encode_Status renderSliceParams(); + Encode_Status renderSequenceParams(EncodeTask *task); + Encode_Status renderPictureParams(EncodeTask *task); + Encode_Status renderSliceParams(EncodeTask *task); unsigned char mProfileLevelIndication; uint32_t mFixedVOPTimeIncrement; -- cgit v1.2.3 From 5cf0a576cc3261bec49008983ad6137cfc28644e Mon Sep 17 00:00:00 2001 From: Juan Antonio Gozalvez Herrero Date: Tue, 15 Jan 2013 11:26:27 +0100 Subject: Revert "initial version for MRFLD HiP support" BZ: 76823 This reverts commit fc61070b3ace2bfeed85908672a41b0258fda363. This reverts patch http://android.intel.com:8080/#/c/71392/ Change-Id: I113db91ee644d1ee8b0b4cb21f9c0ded74a9037f Orig-Change-Id: I8aeb006ec09d10379707c412df21b5a1d2426d17 Reviewed-on: http://android.intel.com:8080/86747 Reviewed-by: Gozalvez Herrero, Juan AntonioX Tested-by: Gozalvez Herrero, Juan AntonioX Reviewed-by: cactus Tested-by: cactus --- test/Android.mk | 4 - test/mix_encoder.cpp | 25 +- videoencoder/Android.mk | 3 +- videoencoder/VideoEncoderAVC.cpp | 173 +++---- videoencoder/VideoEncoderAVC.h | 12 +- videoencoder/VideoEncoderBase.cpp | 968 ++++++++++++++++++++--------------- videoencoder/VideoEncoderBase.h | 107 ++-- videoencoder/VideoEncoderDef.h | 72 +-- videoencoder/VideoEncoderH263.cpp | 26 +- videoencoder/VideoEncoderH263.h | 12 +- videoencoder/VideoEncoderInterface.h | 5 +- videoencoder/VideoEncoderLog.h | 3 +- videoencoder/VideoEncoderMP4.cpp | 61 ++- videoencoder/VideoEncoderMP4.h | 14 +- 14 files changed, 802 insertions(+), 683 deletions(-) diff --git a/test/Android.mk b/test/Android.mk index abded5d..2f4d6a8 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -45,10 +45,6 @@ LOCAL_SHARED_LIBRARIES := \ libva-android \ libva-tpi \ libgui \ - libui \ - libutils \ - libcutils \ - libhardware \ libbinder LOCAL_MODULE_TAGS := optional diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 4662947..464b759 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -257,7 +257,7 @@ Encode_Status SetVideoEncoderParam() { memset(&tmpStoreMetaDataInBuffers,0x00,sizeof(VideoParamsStoreMetaDataInBuffers)); gVideoEncoder->getParameters(&tmpStoreMetaDataInBuffers); gVideoEncoder->setParameters(&tmpStoreMetaDataInBuffers); -#if 0 + VideoParamsUpstreamBuffer tmpVideoParamsUpstreamBuffer; tmpVideoParamsUpstreamBuffer.bufCnt = 0; gVideoEncoder->setParameters(&tmpVideoParamsUpstreamBuffer); @@ -275,7 +275,7 @@ Encode_Status SetVideoEncoderParam() { VideoParamsUsrptrBuffer tmpVideoParamsUsrptrBuffer; tmpVideoParamsUsrptrBuffer.width = 0; gVideoEncoder->getParameters(&tmpVideoParamsUsrptrBuffer); -#endif + //---------------------add for libmix encode code coverage test // VideoEncodeBase.cpp file setConfig && getConfig code coverage test // only for VCM mode @@ -346,9 +346,6 @@ Encode_Status SetVideoEncoderParam() { // for VideoConfigTypeAVCIntraPeriod derivedSetConfig && derivedGetConfig VideoConfigAVCIntraPeriod configAVCIntraPeriod; gVideoEncoder->getConfig(&configAVCIntraPeriod); - configAVCIntraPeriod.ipPeriod = 1; - configAVCIntraPeriod.intraPeriod = 30; - configAVCIntraPeriod.idrInterval = 1; gVideoEncoder->setConfig(&configAVCIntraPeriod); VideoConfigTypeIDRReq tmpVideoConfigTypeIDRReq; gVideoEncoder->setConfig(&tmpVideoConfigTypeIDRReq); @@ -992,24 +989,30 @@ for(int i=0; i<1; i++) InBuf.data = data; InBuf.size = size; InBuf.bufAvailable = true; - InBuf.type = FTYPE_UNKNOWN; - InBuf.flag = 0; ret = gVideoEncoder->encode(&InBuf); CHECK_ENCODE_STATUS("encode"); - if (i > 0) { ret = gVideoEncoder->getOutput(&OutBuf); CHECK_ENCODE_STATUS("getOutput"); -// printf("OutBuf.dataSize = %d, flag=0x%08x .........\n", OutBuf.dataSize, OutBuf.flag); + CHECK_ENCODE_STATUS_RETURN("getOutput"); + // printf("OutBuf.dataSize = %d .........\n", OutBuf.dataSize); fwrite(OutBuf.data, 1, OutBuf.dataSize, file); - } + printf("Encoding %d Frames \r", i+1); fflush(stdout); } - ret = gVideoEncoder->getOutput(&OutBuf); fclose(file); + VideoStatistics stat; + if (gVideoEncoder->getStatistics(&stat) == ENCODE_SUCCESS) + { + printf("\nVideoStatistics\n"); + printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", \ + stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \ + stat.min_encode_time, stat.min_encode_frame ); + } + gVideoEncoder->stop(); releaseVideoEncoder(gVideoEncoder); gVideoEncoder = NULL; diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 0b13e7e..7c8314a 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -2,6 +2,7 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) #VIDEO_ENC_LOG_ENABLE := true +#VIDEO_ENC_STATISTICS_ENABLE := true LOCAL_SRC_FILES := \ VideoEncoderBase.cpp \ @@ -15,13 +16,11 @@ LOCAL_SRC_FILES := \ LOCAL_C_INCLUDES := \ $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libva \ - $(TOPDIR)/frameworks/native/include \ #LOCAL_LDLIBS += -lpthread LOCAL_SHARED_LIBRARIES := \ libcutils \ - libutils \ libva \ libva-android \ libva-tpi \ diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index bf88da7..c4bf805 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -20,7 +20,6 @@ VideoEncoderAVC::VideoEncoderAVC() mVideoParamsAVC.sliceNum.iSliceNum = 2; mVideoParamsAVC.sliceNum.pSliceNum = 2; mVideoParamsAVC.idrInterval = 2; - mVideoParamsAVC.ipPeriod = 1; mVideoParamsAVC.maxSliceSize = 0; mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB; mSliceNum = 2; @@ -95,7 +94,6 @@ Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncCon } mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval; - mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod; mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod; mNewHeader = true; break; @@ -156,7 +154,6 @@ Encode_Status VideoEncoderAVC:: derivedGetConfig( configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval; configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod; - configAVCIntraPeriod->ipPeriod = mVideoParamsAVC.ipPeriod; break; } @@ -195,68 +192,30 @@ Encode_Status VideoEncoderAVC:: derivedGetConfig( return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::updateFrameInfo(EncodeTask* task) { - uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; - FrameType frametype; - uint32_t frame_num = mFrameNum; - - if (mVideoParamsAVC.idrInterval != 0) { - if(mVideoParamsAVC.ipPeriod > 1) - frame_num = frame_num % (idrPeroid + 1); - else if(mComParams.intraPeriod != 0) - frame_num = frame_num % idrPeroid ; - } - - if(frame_num ==0){ - frametype = FTYPE_IDR; - }else if(mComParams.intraPeriod ==0) - // only I frame need intraPeriod=idrInterval=ipPeriod=0 - frametype = FTYPE_I; - else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame - if(mComParams.intraPeriod != 0 && (frame_num > 1) &&((frame_num -1)%mComParams.intraPeriod == 0)) - frametype = FTYPE_I; - else - frametype = FTYPE_P; - } else { - if(mComParams.intraPeriod != 0 &&((frame_num-1)%mComParams.intraPeriod == 0)&&(frame_num >mComParams.intraPeriod)) - frametype = FTYPE_I; - else{ - frame_num = frame_num%mComParams.intraPeriod; - if(frame_num == 0) - frametype = FTYPE_B; - else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0) - frametype = FTYPE_P; - else - frametype = FTYPE_B; - } - } - - if (frametype == FTYPE_IDR || frametype == FTYPE_I) - task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; - - if (frametype != task->type) { - const char* FrameTypeStr[10] = {"UNKNOWN", "I", "P", "B", "SI", "SP", "EI", "EP", "S", "IDR"}; - if ((uint32_t) task->type < 9) - LOG_V("libMIX thinks it is %s Frame, the input is %s Frame", FrameTypeStr[frametype], FrameTypeStr[task->type]); - else - LOG_V("Wrong Frame type %d, type may not be initialized ?\n", task->type); - } - -//temparily comment out to avoid uninitialize error -// if (task->type == FTYPE_UNKNOWN || (uint32_t) task->type > 9) - task->type = frametype; - - return ENCODE_SUCCESS; -} - -Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { +Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; + bool useLocalBuffer = false; + uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; LOG_V("Begin\n"); + CHECK_NULL_RETURN_IFFAIL(outBuffer); + + setKeyFrame(idrPeroid); + + // prepare for output, map the coded buffer + ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); + CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); switch (outBuffer->format) { + case OUTPUT_EVERYTHING: + case OUTPUT_FRAME_DATA: { + // Output whatever we have + ret = VideoEncoderBase::outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + break; + } case OUTPUT_CODEC_DATA: { // Output the codec data ret = outputCodecData(outBuffer); @@ -292,10 +251,26 @@ Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffe LOG_I("out size is = %d\n", outBuffer->dataSize); + // cleanup, unmap the coded buffer if all + // data has been copied out + ret = VideoEncoderBase::cleanupForOutput(); CLEAN_UP: + if (ret < ENCODE_SUCCESS) { + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } + // error happens, unmap the buffer + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; + } + } LOG_V("End\n"); return ret; } @@ -506,6 +481,7 @@ Encode_Status VideoEncoderAVC::outputOneNALU( mOffsetInSeg += (nalSize + nalOffset); outBuffer->dataSize = sizeToBeCopied; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; outBuffer->remainingSize = 0; } else { // if nothing to be copied out, set flag to invalid @@ -524,6 +500,7 @@ Encode_Status VideoEncoderAVC::outputOneNALU( } else { LOG_V("End of stream\n"); outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; } } @@ -577,6 +554,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf // so the remainingSize size may larger than the remaining data size outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; LOG_E("Buffer size too small\n"); return ENCODE_BUFFER_TOO_SMALL; } @@ -591,6 +569,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf outBuffer->dataSize = sizeCopiedHere; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; break; } @@ -600,7 +579,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { +Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); @@ -613,7 +592,7 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { CHECK_ENCODE_STATUS_RETURN("renderHrd"); } - ret = renderSequenceParams(task); + ret = renderSequenceParams(); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); mNewHeader = false; //Set to require new header filed to false } @@ -649,10 +628,10 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { mRenderFrameRate = false; } - ret = renderPictureParams(task); + ret = renderPictureParams(); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(task); + ret = renderSliceParams(); CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); LOG_V( "End\n"); @@ -766,7 +745,7 @@ int VideoEncoderAVC::calcLevel(int numMbs) { return level; } -Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { +Encode_Status VideoEncoderAVC::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH264 avcSeqParams = {}; @@ -788,7 +767,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { CHECK_VA_STATUS_RETURN("vaCreateBuffer"); vaStatus = vaMapBuffer(mVADisplay, mRcParamBuf, (void **)&miscEncRCParamBuf); CHECK_VA_STATUS_RETURN("vaMapBuffer"); - + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, VAEncMiscParameterBufferType, sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterFrameRate), @@ -797,7 +776,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { CHECK_VA_STATUS_RETURN("vaCreateBuffer"); vaStatus = vaMapBuffer(mVADisplay, mFrameRateParamBuf, (void **)&miscEncFrameRateParamBuf); CHECK_VA_STATUS_RETURN("vaMapBuffer"); - + miscEncRCParamBuf->type = VAEncMiscParameterTypeRateControl; rcMiscParam = (VAEncMiscParameterRateControl *)miscEncRCParamBuf->data; miscEncFrameRateParamBuf->type = VAEncMiscParameterTypeFrameRate; @@ -806,7 +785,6 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { // avcSeqParams.level_idc = mLevel; avcSeqParams.intra_period = mComParams.intraPeriod; avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; - avcSeqParams.ip_period = mVideoParamsAVC.ipPeriod; avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; @@ -844,9 +822,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { } // This is a temporary fix suggested by Binglin for bad encoding quality issue - avcSeqParams.max_num_ref_frames = 1; - if(avcSeqParams.ip_period > 1) - avcSeqParams.max_num_ref_frames = 2; + avcSeqParams.max_num_ref_frames = 1; // TODO: We need a long term design for this field LOG_V("===h264 sequence params===\n"); LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id); @@ -871,27 +847,28 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { sizeof(avcSeqParams), 1, &avcSeqParams, &mSeqParamBuf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mFrameRateParamBuf, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); - vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1); - CHECK_VA_STATUS_RETURN("vaRenderPicture"); return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { +Encode_Status VideoEncoderAVC::renderPictureParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferH264 avcPicParams = {}; LOG_V( "Begin\n\n"); // set picture params for HW - avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface[0]; - avcPicParams.CurrPic.picture_id= task->rec_surface; - avcPicParams.coded_buf = task->coded_buffer; + avcPicParams.ReferenceFrames[0].picture_id= mRefSurface; + avcPicParams.CurrPic.picture_id= mRecSurface; + avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; //avcPicParams.picture_width = mComParams.resolution.width; //avcPicParams.picture_height = mComParams.resolution.height; avcPicParams.last_picture = 0; @@ -899,7 +876,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { LOG_V("======h264 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id); LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id); -// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); + LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf); //LOG_I( "picture_width = %d\n", avcPicParams.picture_width); //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height); @@ -920,7 +897,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { } -Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { +Encode_Status VideoEncoderAVC::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -929,8 +906,8 @@ Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { uint32_t sliceHeightInMB = 0; uint32_t maxSliceNum = 0; uint32_t minSliceNum = 0; - uint32_t actualSliceHeightInMB = 0; - uint32_t startRowInMB = 0; + int actualSliceHeightInMB = 0; + int startRowInMB = 0; uint32_t modulus = 0; LOG_V( "Begin\n\n"); @@ -938,7 +915,7 @@ Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { maxSliceNum = (mComParams.resolution.height + 15) / 16; minSliceNum = 1; - if (task->type == FTYPE_I || task->type == FTYPE_IDR) { + if (mIsIntra) { sliceNum = mVideoParamsAVC.sliceNum.iSliceNum; } else { sliceNum = mVideoParamsAVC.sliceNum.pSliceNum; @@ -961,18 +938,14 @@ Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBufferH264), + sizeof(VAEncSliceParameterBuffer), sliceNum, NULL, &mSliceParamBuf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - VAEncSliceParameterBufferH264 *sliceParams, *currentSlice; - + VAEncSliceParameterBuffer *sliceParams, *currentSlice; vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams); CHECK_VA_STATUS_RETURN("vaMapBuffer"); - memset(sliceParams, 0 , sizeof(VAEncSliceParameterBufferH264)); - if(!sliceParams) - return ENCODE_NULL_PTR; currentSlice = sliceParams; startRowInMB = 0; @@ -983,29 +956,25 @@ Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { actualSliceHeightInMB ++; } - // starting MB row number for this slice, suppose macroblock 16x16 - currentSlice->macroblock_address = startRowInMB * mComParams.resolution.width /16; + // starting MB row number for this slice + currentSlice->start_row_number = startRowInMB; // slice height measured in MB - currentSlice->num_macroblocks = actualSliceHeightInMB * mComParams.resolution.width /16; - if(task->type == FTYPE_I||task->type == FTYPE_IDR) - currentSlice->slice_type = 2; - else if(task->type == FTYPE_P) - currentSlice->slice_type = 0; - else if(task->type == FTYPE_B) - currentSlice->slice_type = 1; - currentSlice->disable_deblocking_filter_idc = mComParams.disableDeblocking; + currentSlice->slice_height = actualSliceHeightInMB; + currentSlice->slice_flags.bits.is_intra = mIsIntra; + currentSlice->slice_flags.bits.disable_deblocking_filter_idc + = mComParams.disableDeblocking; // This is a temporary fix suggested by Binglin for bad encoding quality issue // TODO: We need a long term design for this field - //currentSlice->slice_flags.bits.uses_long_term_ref = 0; - //currentSlice->slice_flags.bits.is_long_term_ref = 0; + currentSlice->slice_flags.bits.uses_long_term_ref = 0; + currentSlice->slice_flags.bits.is_long_term_ref = 0; LOG_V("======AVC slice params======\n"); LOG_I( "slice_index = %d\n", (int) sliceIndex); - LOG_I( "macroblock_address = %d\n", (int) currentSlice->macroblock_address); - LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->num_macroblocks); - LOG_I( "slice.type = %d\n", (int) currentSlice->slice_type); - LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->disable_deblocking_filter_idc); + LOG_I( "start_row_number = %d\n", (int) currentSlice->start_row_number); + LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->slice_height); + LOG_I( "slice.is_intra = %d\n", (int) currentSlice->slice_flags.bits.is_intra); + LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->slice_flags.bits.disable_deblocking_filter_idc); startRowInMB += actualSliceHeightInMB; } diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h index 1248a3e..b57ef67 100644 --- a/videoencoder/VideoEncoderAVC.h +++ b/videoencoder/VideoEncoderAVC.h @@ -18,6 +18,7 @@ public: ~VideoEncoderAVC() {}; virtual Encode_Status start(); + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams); virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams); @@ -26,9 +27,8 @@ public: protected: - virtual Encode_Status sendEncodeCommand(EncodeTask *task); - virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer); - virtual Encode_Status updateFrameInfo(EncodeTask* task); + virtual Encode_Status sendEncodeCommand(void); + private: // Local Methods @@ -40,9 +40,9 @@ private: Encode_Status renderMaxSliceSize(); Encode_Status renderAIR(); - Encode_Status renderSequenceParams(EncodeTask *task); - Encode_Status renderPictureParams(EncodeTask *task); - Encode_Status renderSliceParams(EncodeTask *task); + Encode_Status renderSequenceParams(); + Encode_Status renderPictureParams(); + Encode_Status renderSliceParams(); int calcLevel(int numMbs); public: diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index b52cbd1..55012d7 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -12,6 +12,7 @@ #include #include +#undef DUMP_SRC_DATA // To dump source data // API declaration extern "C" { VAStatus vaLockSurface(VADisplay dpy, @@ -32,36 +33,48 @@ VAStatus vaUnlockSurface(VADisplay dpy, ); } VideoEncoderBase::VideoEncoderBase() - :mInitialized(true) - ,mStarted(false) + :mInitialized(false) ,mVADisplay(NULL) - ,mVAContext(VA_INVALID_ID) - ,mVAConfig(VA_INVALID_ID) + ,mVAContext(0) + ,mVAConfig(0) ,mVAEntrypoint(VAEntrypointEncSlice) - ,mCodedBufSize(0) + ,mCurSegment(NULL) + ,mOffsetInSeg(0) + ,mTotalSize(0) + ,mTotalSizeCopied(0) + ,mForceKeyFrame(false) ,mNewHeader(false) - //,mAutoReference(17 /*VAConfigAttribEncAutoReference*/) + ,mFirstFrame (true) ,mRenderMaxSliceSize(false) ,mRenderQP (false) ,mRenderAIR(false) ,mRenderFrameRate(false) ,mRenderBitRate(false) ,mRenderHrd(false) + ,mLastCodedBuffer(0) + ,mOutCodedBuffer(0) ,mSeqParamBuf(0) ,mPicParamBuf(0) ,mSliceParamBuf(0) + ,mSurfaces(NULL) + ,mSurfaceCnt(0) + ,mSrcSurfaceMapList(NULL) + ,mCurSurface(VA_INVALID_SURFACE) ,mRefSurface(VA_INVALID_SURFACE) ,mRecSurface(VA_INVALID_SURFACE) + ,mLastSurface(VA_INVALID_SURFACE) + ,mLastInputRawBuffer(NULL) + ,mEncodedFrames(0) ,mFrameNum(0) + ,mCodedBufSize(0) + ,mCodedBufIndex(0) + ,mPicSkipped(false) + ,mIsIntra(true) ,mSliceSizeOverflow(false) - ,mCurOutputTask(NULL) - ,mOutCodedBuffer(0) ,mCodedBufferMapped(false) - ,mCurSegment(NULL) - ,mOffsetInSeg(0) - ,mTotalSize(0) - ,mTotalSizeCopied(0) - ,mFrameSkipped(false){ + ,mDataCopiedOut(false) + ,mKeyFrame(true) + ,mInitCheck(true) { VAStatus vaStatus = VA_STATUS_SUCCESS; // here the display can be any value, use following one @@ -71,6 +84,8 @@ VideoEncoderBase::VideoEncoderBase() int minorVersion = -1; setDefaultParams(); + mVACodedBuffer [0] = 0; + mVACodedBuffer [1] = 0; LOG_V("vaGetDisplay \n"); mVADisplay = vaGetDisplay(&display); @@ -82,17 +97,19 @@ VideoEncoderBase::VideoEncoderBase() LOG_V("vaInitialize \n"); if (vaStatus != VA_STATUS_SUCCESS) { LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus); - mInitialized = false; + mInitCheck = false; } +#ifdef VIDEO_ENC_STATISTICS_ENABLE + memset(&mVideoStat, 0, sizeof(VideoStatistics)); + mVideoStat.min_encode_time = 0xFFFFFFFF; +#endif + } VideoEncoderBase::~VideoEncoderBase() { VAStatus vaStatus = VA_STATUS_SUCCESS; - - stop(); - vaStatus = vaTerminate(mVADisplay); LOG_V( "vaTerminate\n"); if (vaStatus != VA_STATUS_SUCCESS) { @@ -106,24 +123,32 @@ Encode_Status VideoEncoderBase::start() { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; + VASurfaceID surfaces[2]; + int32_t index = -1; + SurfaceMap *map = mSrcSurfaceMapList; + uint32_t stride_aligned = 0; + uint32_t height_aligned = 0; - if (!mInitialized) { - LOGE("Encoder Initialize fail can not start"); - return ENCODE_DRIVER_FAIL; - } + VAConfigAttrib vaAttrib[2]; + uint32_t maxSize = 0; - if (mStarted) { + if (mInitialized) { LOG_V("Encoder has been started\n"); return ENCODE_ALREADY_INIT; } - VAConfigAttrib vaAttrib[2]; + if (!mInitCheck) { + LOGE("Encoder Initialize fail can not start"); + return ENCODE_DRIVER_FAIL; + } + vaAttrib[0].type = VAConfigAttribRTFormat; vaAttrib[1].type = VAConfigAttribRateControl; vaAttrib[0].value = VA_RT_FORMAT_YUV420; vaAttrib[1].value = mComParams.rcMode; LOG_V( "======VA Configuration======\n"); + LOG_I( "profile = %d\n", mComParams.profile); LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint); LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type); @@ -136,9 +161,10 @@ Encode_Status VideoEncoderBase::start() { vaStatus = vaCreateConfig( mVADisplay, mComParams.profile, mVAEntrypoint, &vaAttrib[0], 2, &(mVAConfig)); - CHECK_VA_STATUS_RETURN("vaCreateConfig"); + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateConfig"); if (mComParams.rcMode == VA_RC_VCM) { + // Following three features are only enabled in VCM mode mRenderMaxSliceSize = true; mRenderAIR = true; @@ -147,10 +173,10 @@ Encode_Status VideoEncoderBase::start() { LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n"); - VASurfaceID surfaces[2]; VASurfaceAttributeTPI attribute_tpi; - uint32_t stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; - uint32_t height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; + + stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; + height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; attribute_tpi.luma_stride = stride_aligned; @@ -162,357 +188,415 @@ Encode_Status VideoEncoderBase::start() { attribute_tpi.pixel_format = VA_FOURCC_NV12; attribute_tpi.type = VAExternalMemoryNULL; -#ifndef AUTO_REFERENCE - vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, - VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); - mRefSurface = surfaces[0]; - mRecSurface = surfaces[1]; -#endif - - //Prepare all Surfaces to be added into Context - uint32_t contextSurfaceCnt; -#ifndef AUTO_REFERENCE - contextSurfaceCnt = 2 + mSrcSurfaceMapList.size(); -#else - contextSurfaceCnt = mSrcSurfaceMapList.size(); -#endif + vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, + VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); - VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt]; - int32_t index = -1; - SurfaceMap *map = NULL; - android::List::iterator map_node; + mRefSurface = surfaces[0]; + mRecSurface = surfaces[1]; - for(map_node = mSrcSurfaceMapList.begin(); map_node != mSrcSurfaceMapList.end(); map_node++) - { - contextSurfaces[++index] = (*map_node)->surface; - (*map_node)->added = true; + //count total surface id already allocated + mSurfaceCnt = 2; + + while(map) { + mSurfaceCnt ++; + map = map->next; } -#ifndef AUTO_REFERENCE - contextSurfaces[++index] = mRefSurface; - contextSurfaces[++index] = mRecSurface; -#endif + mSurfaces = new VASurfaceID[mSurfaceCnt]; + map = mSrcSurfaceMapList; + while(map) { + mSurfaces[++index] = map->surface; + map->added = true; + map = map->next; + } + mSurfaces[++index] = mRefSurface; + mSurfaces[++index] = mRecSurface; //Initialize and save the VA context ID LOG_V( "vaCreateContext\n"); + vaStatus = vaCreateContext(mVADisplay, mVAConfig, mComParams.resolution.width, mComParams.resolution.height, - 0, contextSurfaces, contextSurfaceCnt, + 0, mSurfaces, mSurfaceCnt, &(mVAContext)); + CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateContext"); - delete [] contextSurfaces; - - CHECK_VA_STATUS_RETURN("vaCreateContext"); - - LOG_I("Success to create libva context width %d, height %d\n", + LOG_I("Created libva context width %d, height %d\n", mComParams.resolution.width, mComParams.resolution.height); - uint32_t maxSize = 0; ret = getMaxOutSize(&maxSize); - CHECK_ENCODE_STATUS_RETURN("getMaxOutSize"); + CHECK_ENCODE_STATUS_CLEANUP("getMaxOutSize"); - // Create CodedBuffer for output - VABufferID VACodedBuffer; + // Create coded buffer for output + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncCodedBufferType, + mCodedBufSize, + 1, NULL, + &(mVACodedBuffer[0])); - for(uint32_t i = 0; i 0) - if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){ - mCodedBuffer_Lock.unlock(); - LOG_E("Time out wait for Coded buffer.\n"); - return ENCODE_DEVICE_BUSY; - } - else {//Nonblock - mCodedBuffer_Lock.unlock(); - LOG_E("Coded buffer is not ready now.\n"); - return ENCODE_DEVICE_BUSY; - } + Encode_Status status; + + if (mComParams.syncEncMode) { + LOG_I("Sync Enocde Mode, no optimization, no one frame delay\n"); + status = syncEncode(inBuffer); + } else { + LOG_I("Async Enocde Mode, HW/SW works in parallel, introduce one frame delay\n"); + status = asyncEncode(inBuffer); } - if(mVACodedBufferList.empty()){ - mCodedBuffer_Lock.unlock(); - return ENCODE_DEVICE_BUSY; - } - VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin()); - mVACodedBufferList.erase(mVACodedBufferList.begin()); - mCodedBuffer_Lock.unlock(); - - LOG_V("CodedBuffer ID 0x%08x\n", coded_buf); - - //All resources are ready, start to assemble EncodeTask - EncodeTask* task = new EncodeTask(); - - task->completed = false; - task->enc_surface = sid; - task->coded_buffer = coded_buf; - task->timestamp = inBuffer->timeStamp; - task->in_data = inBuffer->data; - - //Setup frame info, like flag ( SYNCFRAME), frame number, type etc - task->type = inBuffer->type; - task->flag = inBuffer->flag; - PrepareFrameInfo(task); - -#ifndef AUTO_REFERENCE - //Setup ref /rec frames - //TODO: B frame support, temporary use same logic - switch (inBuffer->type) { - case FTYPE_UNKNOWN: - case FTYPE_IDR: - case FTYPE_I: - case FTYPE_P: - { - if(!mFrameSkipped) { - VASurfaceID tmpSurface = mRecSurface; - mRecSurface = mRefSurface; - mRefSurface = tmpSurface; - } - - task->ref_surface[0] = mRefSurface; - task->ref_surface[1] = VA_INVALID_SURFACE; - task->rec_surface = mRecSurface; +#ifdef VIDEO_ENC_STATISTICS_ENABLE + struct timespec ts2; + clock_gettime(CLOCK_MONOTONIC, &ts2); - break; - } - case FTYPE_B: - default: - LOG_V("Something wrong, B frame may not be supported in this mode\n"); - ret = ENCODE_NOT_SUPPORTED; - goto CLEAN_UP; - } -#else - task->ref_surface[0] = VA_INVALID_SURFACE; - task->ref_surface[1] = VA_INVALID_SURFACE; - task->rec_surface = VA_INVALID_SURFACE; + uint32_t encode_time = (ts2.tv_sec - ts1.tv_sec) * 1000000 + (ts2.tv_nsec - ts1.tv_nsec) / 1000; + if (encode_time > mVideoStat.max_encode_time) { + mVideoStat.max_encode_time = encode_time; + mVideoStat.max_encode_frame = mFrameNum; + } + + if (encode_time < mVideoStat.min_encode_time) { + mVideoStat.min_encode_time = encode_time; + mVideoStat.min_encode_frame = mFrameNum; + } + + mVideoStat.average_encode_time += encode_time; #endif - //======Start Encoding, add task to list====== - LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface); + return status; +} + +Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + uint8_t *buf = NULL; + + inBuffer->bufAvailable = false; + if (mNewHeader) mFrameNum = 0; + + // current we use one surface for source data, + // one for reference and one for reconstructed + decideFrameType(); + ret = manageSrcSurface(inBuffer); + CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); + + // Start encoding process + LOG_V( "vaBeginPicture\n"); + LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext); + LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurSurface); + LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay); + +#ifdef DUMP_SRC_DATA + + if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){ + + FILE *fp = fopen("/data/data/dump_encoder.yuv", "wb"); + VAImage image; + uint8_t *usrptr = NULL; + uint32_t stride = 0; + uint32_t frameSize = 0; + + vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &image); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + + LOG_V( "vaDeriveImage Done\n"); + + frameSize = image.data_size; + stride = image.pitches[0]; + + LOG_I("Source Surface/Image information --- start ---- :"); + LOG_I("surface = 0x%08x\n",(uint32_t)mCurFrame->surface); + LOG_I("image->pitches[0] = %d\n", image.pitches[0]); + LOG_I("image->pitches[1] = %d\n", image.pitches[1]); + LOG_I("image->offsets[0] = %d\n", image.offsets[0]); + LOG_I("image->offsets[1] = %d\n", image.offsets[1]); + LOG_I("image->num_planes = %d\n", image.num_planes); + LOG_I("image->width = %d\n", image.width); + LOG_I("image->height = %d\n", image.height); + LOG_I ("frameSize= %d\n", image.data_size); + LOG_I("Source Surface/Image information ----end ----"); + + vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) &usrptr); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + fwrite(usrptr, frameSize, 1, fp); + fflush(fp); + fclose(fp); + + vaStatus = vaUnmapBuffer(mVADisplay, image.buf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaDestroyImage(mVADisplay, image.image_id); + CHECK_VA_STATUS_RETURN("vaDestroyImage"); + } +#endif - vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface); - CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture"); + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); + CHECK_VA_STATUS_RETURN("vaBeginPicture"); - ret = sendEncodeCommand(task); - CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand"); + ret = sendEncodeCommand(); + CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture"); + CHECK_VA_STATUS_RETURN("vaEndPicture"); - LOG_V("Add Task %p into Encode Task list\n", task); - mEncodeTask_Lock.lock(); - mEncodeTaskList.push_back(task); - mEncodeTask_Cond.signal(); - mEncodeTask_Lock.unlock(); + LOG_V( "vaEndPicture\n"); - mFrameNum ++; + if (mFirstFrame) { + updateProperities(); + decideFrameType(); + } - LOG_V("encode return Success\n"); + LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastSurface); + vaStatus = vaSyncSurface(mVADisplay, mLastSurface); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaSyncSurface\n"); + } - return ENCODE_SUCCESS; + mOutCodedBuffer = mLastCodedBuffer; -CLEAN_UP: + // Need map buffer before calling query surface below to get + // the right skip frame flag for current frame + // It is a requirement of video driver + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - delete task; - mCodedBuffer_Lock.lock(); - mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used - mCodedBuffer_Cond.signal(); - mCodedBuffer_Lock.unlock(); + if (mFirstFrame) { + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); + CHECK_VA_STATUS_RETURN("vaBeginPicture"); - LOG_V("encode return error=%x\n", ret); + ret = sendEncodeCommand(); + CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); - return ret; + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS_RETURN("vaEndPicture"); + + mKeyFrame = true; + } + + // Query the status of last surface to check if its next frame is skipped + VASurfaceStatus vaSurfaceStatus; + vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); + CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); + + mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + +#ifdef VIDEO_ENC_STATISTICS_ENABLE + if (mPicSkipped) + mVideoStat.skipped_frames ++; +#endif + + mLastSurface = VA_INVALID_SURFACE; + updateProperities(); + mCurSurface = VA_INVALID_SURFACE; + + if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true; + + LOG_V("ref the current inBuffer\n"); + + mLastInputRawBuffer = inBuffer; + mFirstFrame = false; + + return ENCODE_SUCCESS; } -/* - 1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list. - 2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes, - start output data - 3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure - on non-block/block with timeout modes. - 4. if complete all output data, curoutputtask should be set NULL -*/ -Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) { +Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - bool useLocalBuffer = false; + uint8_t *buf = NULL; + VASurfaceID tmpSurface = VA_INVALID_SURFACE; - CHECK_NULL_RETURN_IFFAIL(outBuffer); + inBuffer->bufAvailable = false; + if (mNewHeader) mFrameNum = 0; - if (mCurOutputTask == NULL) { - mEncodeTask_Lock.lock(); - if(mEncodeTaskList.empty()) { - LOG_V("getOutput CurrentTask is NULL\n"); - if(timeout == FUNC_BLOCK) { - LOG_V("waiting for task....\n"); - mEncodeTask_Cond.wait(mEncodeTask_Lock); - } else if (timeout > 0) { - LOG_V("waiting for task in % ms....\n", timeout); - if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) { - mEncodeTask_Lock.unlock(); - LOG_E("Time out wait for encode task.\n"); - return ENCODE_DATA_NOT_READY; - } - } else {//Nonblock - mEncodeTask_Lock.unlock(); - return ENCODE_DATA_NOT_READY; - } - } + // current we use one surface for source data, + // one for reference and one for reconstructed + decideFrameType(); + ret = manageSrcSurface(inBuffer); + CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); - if(mEncodeTaskList.empty()){ - mEncodeTask_Lock.unlock(); - return ENCODE_DATA_NOT_READY; - } - mCurOutputTask = *(mEncodeTaskList.begin()); - mEncodeTaskList.erase(mEncodeTaskList.begin()); - mEncodeTask_Lock.unlock(); + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); + CHECK_VA_STATUS_RETURN("vaBeginPicture"); + + ret = sendEncodeCommand(); + CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS_RETURN("vaEndPicture"); + + LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurSurface); + vaStatus = vaSyncSurface(mVADisplay, mCurSurface); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG_W( "Failed vaSyncSurface\n"); } - //sync/query/wait task if not completed - if (mCurOutputTask->completed == false) { - uint8_t *buf = NULL; + mOutCodedBuffer = mVACodedBuffer[mCodedBufIndex]; + + // Need map buffer before calling query surface below to get + // the right skip frame flag for current frame + // It is a requirement of video driver + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + + mPicSkipped = false; + if (!mFirstFrame) { + // Query the status of last surface to check if its next frame is skipped VASurfaceStatus vaSurfaceStatus; + vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); + CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); + mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + } - if (timeout == FUNC_BLOCK) { - //block mode, direct sync surface to output data + mLastSurface = mCurSurface; + mCurSurface = VA_INVALID_SURFACE; - LOG_I ("block mode, vaSyncSurface ID = 0x%08x\n", mCurOutputTask->enc_surface); - vaStatus = vaSyncSurface(mVADisplay, mCurOutputTask->enc_surface); - CHECK_VA_STATUS_GOTO_CLEANUP("vaSyncSurface"); + mEncodedFrames ++; + mFrameNum ++; - mOutCodedBuffer = mCurOutputTask->coded_buffer; + if (!mPicSkipped) { + tmpSurface = mRecSurface; + mRecSurface = mRefSurface; + mRefSurface = tmpSurface; + } - // Check frame skip - // Need map buffer before calling query surface below to get the right skip frame flag for current frame - // It is a requirement of video driver - vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); +#ifdef VIDEO_ENC_STATISTICS_ENABLE + if (mPicSkipped) + mVideoStat.skipped_frames ++; +#endif - vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); - CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); - mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped; + inBuffer->bufAvailable = true; + return ENCODE_SUCCESS; +} - mCurOutputTask->completed = true; +void VideoEncoderBase::setKeyFrame(int32_t keyFramePeriod) { - } else { - //For both block with timeout and non-block mode, query surface, if ready, output data - LOG_I ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface); - - vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); - if (vaSurfaceStatus & VASurfaceReady) { - mOutCodedBuffer = mCurOutputTask->coded_buffer; - mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped; - mCurOutputTask->completed = true; - //if need to call SyncSurface again ? - - } else {//not ready yet - ret = ENCODE_DATA_NOT_READY; - goto CLEAN_UP; + // For first getOutput async mode, the mFrameNum already increased to 2, and of course is key frame + // frame 0 is already encoded and will be outputed here + // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call + if (!mComParams.syncEncMode) { + if (mFrameNum > 2) { + if (keyFramePeriod != 0 && + (((mFrameNum - 2) % keyFramePeriod) == 0)) { + mKeyFrame = true; + } else { + mKeyFrame = false; } - + } else if (mFrameNum == 2) { + mKeyFrame = true; + } + } else { + if (mFrameNum > 1) { + if (keyFramePeriod != 0 && + (((mFrameNum - 1) % keyFramePeriod) == 0)) { + mKeyFrame = true; + } else { + mKeyFrame = false; + } + } else if (mFrameNum == 1) { + mKeyFrame = true; } + } +} +Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + bool useLocalBuffer = false; + + CHECK_NULL_RETURN_IFFAIL(outBuffer); + + LOG_V("Begin\n"); + + if (outBuffer->format != OUTPUT_EVERYTHING && outBuffer->format != OUTPUT_FRAME_DATA) { + LOG_E("Output buffer mode not supported\n"); + goto CLEAN_UP; } - //start to output data + setKeyFrame(mComParams.intraPeriod); + ret = prepareForOutput(outBuffer, &useLocalBuffer); CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); - //copy all flags to outBuffer - outBuffer->flag = mCurOutputTask->flag; - outBuffer->type = mCurOutputTask->type; - outBuffer->timeStamp = mCurOutputTask->timestamp; - - if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) { - ret = outputAllData(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); - }else { - ret = getExtFormatOutput(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput"); - } + ret = outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize); ret = cleanupForOutput(); CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput"); - LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped); - - return ENCODE_SUCCESS; - CLEAN_UP: - if (outBuffer->data && (useLocalBuffer == true)) { - delete[] outBuffer->data; - outBuffer->data = NULL; - useLocalBuffer = false; - } + if (ret < ENCODE_SUCCESS) { + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } - if (mCodedBufferMapped) { - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; - mCurSegment = NULL; + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; + } } - delete mCurOutputTask; - mCurOutputTask = NULL; - mCodedBuffer_Lock.lock(); - mVACodedBufferList.push_back(mOutCodedBuffer); - mCodedBuffer_Cond.signal(); - mCodedBuffer_Lock.unlock(); - - LOG_V("getOutput return error=%x\n", ret); + LOG_V("End\n"); return ret; } + void VideoEncoderBase::flush() { LOG_V( "Begin\n"); // reset the properities + mEncodedFrames = 0; mFrameNum = 0; + mPicSkipped = false; + mIsIntra = true; LOG_V( "end\n"); } @@ -522,69 +606,57 @@ Encode_Status VideoEncoderBase::stop() { VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; SurfaceMap *map = NULL; - EncodeTask *task = NULL; LOG_V( "Begin\n"); + if (mSurfaces) { + delete [] mSurfaces; + mSurfaces = NULL; + } + // It is possible that above pointers have been allocated - // before we set mStarted to true - if (!mStarted) { + // before we set mInitialized to true + if (!mInitialized) { LOG_V("Encoder has been stopped\n"); return ENCODE_SUCCESS; } - mCodedBuffer_Lock.lock(); - mVACodedBufferList.clear(); - mCodedBuffer_Lock.unlock(); - mCodedBuffer_Cond.broadcast(); + LOG_V( "vaDestroyContext\n"); + vaStatus = vaDestroyContext(mVADisplay, mVAContext); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); - //Delete all uncompleted tasks - mEncodeTask_Lock.lock(); - while(! mEncodeTaskList.empty()) - { - delete *mEncodeTaskList.begin(); - mEncodeTaskList.erase(mEncodeTaskList.begin()); - } - mEncodeTask_Lock.unlock(); - mEncodeTask_Cond.broadcast(); + LOG_V( "vaDestroyConfig\n"); + vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); - //Release Src Surface Buffer Map, destroy surface manually since it is not added into context + // Release Src Surface Buffer Map LOG_V( "Rlease Src Surface Map\n"); - while(! mSrcSurfaceMapList.empty()) - { - if (! (*mSrcSurfaceMapList.begin())->added) { - LOG_V( "Rlease the Src Surface Buffer not added into vaContext\n"); - vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface), 1); - } - delete (*mSrcSurfaceMapList.begin()); - mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin()); - } - - LOG_V( "vaDestroyContext\n"); - if (mVAContext != VA_INVALID_ID) { - vaStatus = vaDestroyContext(mVADisplay, mVAContext); - CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); - } - LOG_V( "vaDestroyConfig\n"); - if (mVAConfig != VA_INVALID_ID) { - vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); - CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); + map = mSrcSurfaceMapList; + while(map) { + if (! map->added) { + //destroy surface by itself + LOG_V( "Rlease Src Surface Buffer not added into vaContext\n"); + vaDestroySurfaces(mVADisplay, &map->surface, 1); + } + SurfaceMap *tmp = map; + map = map->next; + delete tmp; } CLEAN_UP: + mInitialized = false; - mStarted = false; - mSliceSizeOverflow = false; - mCurOutputTask= NULL; - mOutCodedBuffer = 0; - mCodedBufferMapped = false; - mCurSegment = NULL; - mOffsetInSeg =0; - mTotalSize = 0; - mTotalSizeCopied = 0; - mFrameSkipped = false; +#ifdef VIDEO_ENC_STATISTICS_ENABLE + LOG_V("Encoder Statistics:\n"); + LOG_V(" %d frames Encoded, %d frames Skipped\n", mEncodedFrames, mVideoStat.skipped_frames); + LOG_V(" Encode time: Average(%d us), Max(%d us @Frame No.%d), Min(%d us @Frame No.%d)\n", \ + mVideoStat.average_encode_time / mEncodedFrames, mVideoStat.max_encode_time, \ + mVideoStat.max_encode_frame, mVideoStat.min_encode_time, mVideoStat.min_encode_frame); + memset(&mVideoStat, 0, sizeof(VideoStatistics)); + mVideoStat.min_encode_time = 0xFFFFFFFF; +#endif LOG_V( "end\n"); return ret; } @@ -681,23 +753,13 @@ Encode_Status VideoEncoderBase::cleanupForOutput() { vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); mCodedBufferMapped = false; - mTotalSize = 0; - mOffsetInSeg = 0; - mTotalSizeCopied = 0; - - delete mCurOutputTask; - mCurOutputTask = NULL; - mCodedBuffer_Lock.lock(); - mVACodedBufferList.push_back(mOutCodedBuffer); - mCodedBuffer_Cond.signal(); - mCodedBuffer_Lock.unlock(); - - LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer); } return ENCODE_SUCCESS; } -Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) { + +Encode_Status VideoEncoderBase::outputAllData( + VideoEncOutputBuffer *outBuffer) { // Data size been copied for every single call uint32_t sizeCopiedHere = 0; @@ -732,6 +794,7 @@ Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) { outBuffer->dataSize = outBuffer->bufferSize; outBuffer->remainingSize = mTotalSize - mTotalSizeCopied; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; return ENCODE_BUFFER_TOO_SMALL; } @@ -739,6 +802,7 @@ Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) { outBuffer->dataSize = sizeCopiedHere; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; return ENCODE_SUCCESS; } @@ -774,7 +838,6 @@ void VideoEncoderBase::setDefaultParams() { mComParams.airParams.airAuto = 1; mComParams.disableDeblocking = 2; mComParams.syncEncMode = false; - mComParams.codedBufNum = 2; mHrdParam.bufferSize = 0; mHrdParam.initBufferFullness = 0; @@ -789,7 +852,7 @@ Encode_Status VideoEncoderBase::setParameters( CHECK_NULL_RETURN_IFFAIL(videoEncParams); LOG_I("Config type = %d\n", (int)videoEncParams->type); - if (mStarted) { + if (mInitialized) { LOG_E("Encoder has been initialized, should use setConfig to change configurations\n"); return ENCODE_ALREADY_INIT; } @@ -799,11 +862,10 @@ Encode_Status VideoEncoderBase::setParameters( VideoParamsCommon *paramsCommon = reinterpret_cast (videoEncParams); + if (paramsCommon->size != sizeof (VideoParamsCommon)) { return ENCODE_INVALID_PARAMS; } - if(paramsCommon->codedBufNum < 2) - paramsCommon->codedBufNum =2; mComParams = *paramsCommon; break; } @@ -967,7 +1029,7 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { // workaround #if 0 - if (!mStarted) { + if (!mInitialized) { LOG_E("Encoder has not initialized yet, can't call setConfig\n"); return ENCODE_NOT_INIT; } @@ -1138,29 +1200,51 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { return ret; } -void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) { - if (mNewHeader) mFrameNum = 0; - LOG_I( "mFrameNum = %d ", mFrameNum); +void VideoEncoderBase:: decideFrameType () { + + LOG_I( "mEncodedFrames = %d\n", mEncodedFrames); + LOG_I( "mFrameNum = %d\n", mFrameNum); + LOG_I( "mIsIntra = %d\n", mIsIntra); + + // determine the picture type + if (mComParams.intraPeriod == 0) { + if (mFrameNum == 0) + mIsIntra = true; + else + mIsIntra = false; + } else if ((mFrameNum % mComParams.intraPeriod) == 0) { + mIsIntra = true; + } else { + mIsIntra = false; + } - updateFrameInfo(task) ; + LOG_I( "mIsIntra = %d\n",mIsIntra); } -Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) { - task->type = FTYPE_P; +void VideoEncoderBase:: updateProperities () { - // determine the picture type - if (mFrameNum == 0) - task->type = FTYPE_I; - if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0)) - task->type = FTYPE_I; + VASurfaceID tmp = VA_INVALID_SURFACE; + LOG_V( "Begin\n"); - if (task->type == FTYPE_I) - task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + mEncodedFrames ++; + mFrameNum ++; + mLastCodedBuffer = mVACodedBuffer[mCodedBufIndex]; + mCodedBufIndex ++; + mCodedBufIndex %=2; - return ENCODE_SUCCESS; + mLastSurface = mCurSurface; + + if (!mPicSkipped) { + tmp = mRecSurface; + mRecSurface = mRefSurface; + mRefSurface = tmp; + } + + LOG_V( "End\n"); } + Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { uint32_t size = mComParams.resolution.width * mComParams.resolution.height; @@ -1198,6 +1282,25 @@ Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { return ENCODE_SUCCESS; } +Encode_Status VideoEncoderBase::getStatistics (VideoStatistics *videoStat) { + +#ifdef VIDEO_ENC_STATISTICS_ENABLE + if (videoStat != NULL) { + videoStat->total_frames = mEncodedFrames; + videoStat->skipped_frames = mVideoStat.skipped_frames; + videoStat->average_encode_time = mVideoStat.average_encode_time / mEncodedFrames; + videoStat->max_encode_time = mVideoStat.max_encode_time; + videoStat->max_encode_frame = mVideoStat.max_encode_frame; + videoStat->min_encode_time = mVideoStat.min_encode_time; + videoStat->min_encode_frame = mVideoStat.min_encode_frame; + } + + return ENCODE_SUCCESS; +#else + return ENCODE_NOT_SUPPORTED; +#endif +} + Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) { @@ -1214,7 +1317,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( LOG_V( "Begin\n"); // If encode session has been configured, we can not request surface creation anymore - if (mStarted) { + if (mInitialized) { LOG_E( "Already Initialized, can not request VA surface anymore\n"); return ENCODE_WRONG_STATE; } @@ -1284,8 +1387,9 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( map->vinfo.format = VA_FOURCC_NV12; map->vinfo.s3dformat = 0xffffffff; map->added = false; + map->next = NULL; - mSrcSurfaceMapList.push_back(map); + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); LOG_I( "surface = 0x%08x\n",(uint32_t)surface); LOG_I("image->pitches[0] = %d\n", image.pitches[0]); @@ -1332,7 +1436,7 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS } for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) { - if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL) //already mapped + if (findSurfaceMapByValue(mSrcSurfaceMapList, upStreamBuffer->bufList[i]) != NULL) //already mapped continue; //wrap upstream buffer into vaSurface @@ -1352,12 +1456,18 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS } map->vinfo.s3dformat = 0xFFFFFFFF; map->added = false; + map->next = NULL; status = surfaceMapping(map); if (status == ENCODE_SUCCESS) - mSrcSurfaceMapList.push_back(map); + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); else delete map; + + if (mSrcSurfaceMapList == NULL) { + LOG_E ("mSrcSurfaceMapList should not be NULL now, maybe meet mapping error\n"); + return ENCODE_NO_MEMORY; + } } return status; @@ -1383,7 +1493,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { VASurfaceAttributeTPI vaSurfaceAttrib; uint32_t buf; - + vaSurfaceAttrib.buffers = &buf; vaStatus = vaLockSurface( @@ -1426,7 +1536,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { LOG_I("Surface ID created from Kbuf = 0x%08x", surface); map->surface = surface; - + return ret; } @@ -1498,12 +1608,12 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { uint32_t lumaOffset = 0; uint32_t chromaUOffset = map->vinfo.height * map->vinfo.lumaStride; uint32_t chromaVOffset = chromaUOffset + 1; - + VASurfaceAttributeTPI vaSurfaceAttrib; uint32_t buf; vaSurfaceAttrib.buffers = &buf; - + vaSurfaceAttrib.count = 1; vaSurfaceAttrib.size = map->vinfo.lumaStride * map->vinfo.height * 3 / 2; vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; @@ -1525,7 +1635,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { LOG_I("Surface ID created from Kbuf = 0x%08x", map->value); map->surface = surface; - + return ret; } @@ -1557,7 +1667,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) { CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); map->surface = surface; - + return ret; } @@ -1635,9 +1745,9 @@ LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, va return status; } -Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) { +Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { - Encode_Status ret = ENCODE_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; MetadataBufferType type; int32_t value; ValueInfo vinfo; @@ -1647,13 +1757,13 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA IntelMetadataBuffer imb; SurfaceMap *map = NULL; - - if (mStoreMetaDataInBuffers.isEnabled) { + + if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { //fail to parse buffer - return ENCODE_NO_REQUEST_DATA; + return ENCODE_NO_REQUEST_DATA; } imb.GetType(type); @@ -1662,21 +1772,20 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA //raw mode LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (! inBuffer->data || inBuffer->size == 0) { - return ENCODE_NULL_PTR; + return ENCODE_NULL_PTR; } type = MetadataBufferTypeUser; value = (int32_t)inBuffer->data; } - - + //find if mapped - map = (SurfaceMap*) findSurfaceMapByValue(value); + map = findSurfaceMapByValue(mSrcSurfaceMapList, value); - if (map) { + if (map) { //has mapped, get surfaceID directly LOG_I("direct find surface %d from value %x\n", map->surface, value); - *sid = map->surface; + mCurSurface = map->surface; return ret; } @@ -1684,8 +1793,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA //if no found from list, then try to map value with parameters LOG_I("not find surface from cache with value %x, start mapping if enough information\n", value); - if (mStoreMetaDataInBuffers.isEnabled) { - + if (mStoreMetaDataInBuffers.isEnabled) { + //if type is MetadataBufferTypeGrallocSource, use default parameters if (type == MetadataBufferTypeGrallocSource) { vinfo.mode = MEM_MODE_GFXHANDLE; @@ -1697,15 +1806,15 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA vinfo.chromStride = mComParams.resolution.width; vinfo.format = VA_FOURCC_NV12; vinfo.s3dformat = 0xFFFFFFFF; - } else { + } else { //get all info mapping needs imb.GetValueInfo(pvinfo); imb.GetExtraValues(extravalues, extravalues_count); } - + } else { - //raw mode + //raw mode vinfo.mode = MEM_MODE_MALLOC; vinfo.handle = 0; vinfo.size = inBuffer->size; @@ -1727,25 +1836,26 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA map->value = value; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); map->added = false; + map->next = NULL; ret = surfaceMapping(map); if (ret == ENCODE_SUCCESS) { LOG_I("surface mapping success, map value %x into surface %d\n", value, map->surface); - mSrcSurfaceMapList.push_back(map); + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); } else { delete map; LOG_E("surface mapping failed, wrong info or meet serious error\n"); return ret; - } + } - *sid = map->surface; + mCurSurface = map->surface; } else { //can't map due to no info LOG_E("surface mapping failed, missing information\n"); return ENCODE_NO_REQUEST_DATA; } - + if (extravalues) { //map more using same ValueInfo for(unsigned int i=0; ivalue = extravalues[i]; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); map->added = false; + map->next = NULL; ret = surfaceMapping(map); if (ret == ENCODE_SUCCESS) { LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->surface); - mSrcSurfaceMapList.push_back(map); + mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); } else { delete map; map = NULL; @@ -1766,10 +1877,69 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA } } } - + return ret; } +SurfaceMap *VideoEncoderBase::appendSurfaceMap( + SurfaceMap *head, SurfaceMap *map) { + + if (head == NULL) { + return map; + } + + SurfaceMap *node = head; + SurfaceMap *tail = NULL; + + while (node != NULL) { + tail = node; + node = node->next; + } + tail->next = map; + + return head; +} + +SurfaceMap *VideoEncoderBase::removeSurfaceMap( + SurfaceMap *head, SurfaceMap *map) { + + SurfaceMap *node = head; + SurfaceMap *tmpNode = NULL; + + if (head == map) { + tmpNode = head->next; + map->next = NULL; + return tmpNode; + } + + while (node != NULL) { + if (node->next == map) + break; + node = node->next; + } + + if (node != NULL) { + node->next = map->next; + } + + map->next = NULL; + return head; +} + +SurfaceMap *VideoEncoderBase::findSurfaceMapByValue( + SurfaceMap *head, int32_t value) { + + SurfaceMap *node = head; + + while (node != NULL) { + if (node->value == value) + break; + node = node->next; + } + + return node; +} + Encode_Status VideoEncoderBase::renderDynamicBitrate() { VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -1893,17 +2063,3 @@ Encode_Status VideoEncoderBase::renderHrd() { return ENCODE_SUCCESS; } - -SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { - android::List::iterator node; - - for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++) - { - if ((*node)->value == value) - return *node; - else - continue; - } - - return NULL; -} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 924c4da..9ab7bc6 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -14,30 +14,15 @@ #include "VideoEncoderDef.h" #include "VideoEncoderInterface.h" #include "IntelMetadataBuffer.h" -#include -#include -//#define AUTO_REFERENCE struct SurfaceMap { VASurfaceID surface; MetadataBufferType type; int32_t value; ValueInfo vinfo; + uint32_t index; bool added; -}; - -struct EncodeTask { - VASurfaceID enc_surface; - VASurfaceID ref_surface[2]; - VASurfaceID rec_surface; - VABufferID coded_buffer; - - FrameType type; - int flag; - int64_t timestamp; //corresponding input frame timestamp - uint8_t *in_data; //input buffer data - - bool completed; //if encode task is done complet by HW + SurfaceMap *next; }; class VideoEncoderBase : IVideoEncoder { @@ -49,7 +34,7 @@ public: virtual Encode_Status start(void); virtual void flush(void); virtual Encode_Status stop(void); - virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout); + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer); /* * getOutput can be called several time for a frame (such as first time codec data, and second time others) @@ -57,26 +42,30 @@ public: * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL * and caller should provide a big enough buffer and call again */ - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout); + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig); virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig); + virtual Encode_Status getMaxOutSize(uint32_t *maxSize); + virtual Encode_Status getStatistics(VideoStatistics *videoStat); protected: - virtual Encode_Status sendEncodeCommand(EncodeTask* task) = 0; + virtual Encode_Status sendEncodeCommand(void) = 0; virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0; - virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) = 0; - virtual Encode_Status updateFrameInfo(EncodeTask* task) ; + Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); + Encode_Status cleanupForOutput(); + Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); Encode_Status renderDynamicFrameRate(); Encode_Status renderDynamicBitrate(); Encode_Status renderHrd(); + void setKeyFrame(int32_t keyFramePeriod); private: void setDefaultParams(void); @@ -89,29 +78,41 @@ private: Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map); Encode_Status surfaceMappingForMalloc(SurfaceMap *map); Encode_Status surfaceMapping(SurfaceMap *map); - SurfaceMap *findSurfaceMapByValue(int32_t value); - Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid); - void PrepareFrameInfo(EncodeTask* task); - Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); - Encode_Status cleanupForOutput(); - Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); + SurfaceMap *appendSurfaceMap( + SurfaceMap *head, SurfaceMap *map); + SurfaceMap *removeSurfaceMap( + SurfaceMap *head, SurfaceMap *map); + SurfaceMap *findSurfaceMapByValue( + SurfaceMap *head, int32_t value); + + Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer); + void updateProperities(void); + void decideFrameType(void); +// Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer); + Encode_Status syncEncode(VideoEncRawBuffer *inBuffer); + Encode_Status asyncEncode(VideoEncRawBuffer *inBuffer); protected: bool mInitialized; - bool mStarted; VADisplay mVADisplay; VAContextID mVAContext; VAConfigID mVAConfig; VAEntrypoint mVAEntrypoint; + VACodedBufferSegment *mCurSegment; + uint32_t mOffsetInSeg; + uint32_t mTotalSize; + uint32_t mTotalSizeCopied; VideoParamsCommon mComParams; VideoParamsHRD mHrdParam; VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers; + bool mForceKeyFrame; bool mNewHeader; + bool mFirstFrame; bool mRenderMaxSliceSize; //Max Slice Size bool mRenderQP; @@ -120,36 +121,50 @@ protected: bool mRenderBitRate; bool mRenderHrd; + VABufferID mVACodedBuffer[2]; + VABufferID mLastCodedBuffer; + VABufferID mOutCodedBuffer; VABufferID mSeqParamBuf; VABufferID mRcParamBuf; VABufferID mFrameRateParamBuf; VABufferID mPicParamBuf; VABufferID mSliceParamBuf; - android::List mSrcSurfaceMapList; //all mapped surface info list from input buffer - android::List mEncodeTaskList; //all encode tasks list - android::List mVACodedBufferList; //all available codedbuffer list + VASurfaceID *mSurfaces; + uint32_t mSurfaceCnt; + + SurfaceMap *mSrcSurfaceMapList; + + //for new design + VASurfaceID mCurSurface; //current input surface to be encoded + VASurfaceID mRefSurface; //reference surface + VASurfaceID mRecSurface; //reconstructed surface + VASurfaceID mLastSurface; //last surface - VASurfaceID mRefSurface; //reference surface, only used in base - VASurfaceID mRecSurface; //reconstructed surface, only used in base + VideoEncRawBuffer *mLastInputRawBuffer; + + uint32_t mEncodedFrames; uint32_t mFrameNum; uint32_t mCodedBufSize; + uint32_t mCodedBufIndex; + bool mPicSkipped; + bool mIsIntra; bool mSliceSizeOverflow; + bool mCodedBufferMapped; + bool mDataCopiedOut; + bool mKeyFrame; - //Current Outputting task - EncodeTask *mCurOutputTask; + int32_t mInitCheck; - //Current outputting CodedBuffer status - VABufferID mOutCodedBuffer; - bool mCodedBufferMapped; - VACodedBufferSegment *mCurSegment; - uint32_t mOffsetInSeg; - uint32_t mTotalSize; - uint32_t mTotalSizeCopied; - android::Mutex mCodedBuffer_Lock, mEncodeTask_Lock; - android::Condition mCodedBuffer_Cond, mEncodeTask_Cond; +#ifdef VIDEO_ENC_STATISTICS_ENABLE + VideoStatistics mVideoStat; +#endif - bool mFrameSkipped; + // Constants + static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2; + static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8; }; + + #endif /* __VIDEO_ENCODER_BASE_H__ */ diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index f5174aa..b9feca2 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -32,9 +32,7 @@ enum { ENCODE_SUCCESS = 0, ENCODE_ALREADY_INIT = 1, ENCODE_SLICESIZE_OVERFLOW = 2, - ENCODE_BUFFER_TOO_SMALL = 3, // The buffer passed to encode is too small to contain encoded data - ENCODE_DEVICE_BUSY = 4, - ENCODE_DATA_NOT_READY = 5, + ENCODE_BUFFER_TOO_SMALL = 3 // The buffer passed to encode is too small to contain encoded data }; typedef enum { @@ -44,7 +42,6 @@ typedef enum { OUTPUT_ONE_NAL = 4, OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8, OUTPUT_LENGTH_PREFIXED = 16, - OUTPUT_CODEDBUFFER = 32, OUTPUT_BUFFER_LAST } VideoOutputFormat; @@ -105,23 +102,6 @@ enum VideoBufferSharingMode { BUFFER_LAST }; -typedef enum { - FTYPE_UNKNOWN = 0, // Unknown - FTYPE_I = 1, // General I-frame type - FTYPE_P = 2, // General P-frame type - FTYPE_B = 3, // General B-frame type - FTYPE_SI = 4, // H.263 SI-frame type - FTYPE_SP = 5, // H.263 SP-frame type - FTYPE_EI = 6, // H.264 EI-frame type - FTYPE_EP = 7, // H.264 EP-frame type - FTYPE_S = 8, // MPEG-4 S-frame type - FTYPE_IDR = 9, // IDR-frame type -}FrameType; - -//function call mode -#define FUNC_BLOCK 0xFFFFFFFF -#define FUNC_NONBLOCK 0 - // Output buffer flag #define ENCODE_BUFFERFLAG_ENDOFFRAME 0x00000001 #define ENCODE_BUFFERFLAG_PARTIALFRAME 0x00000002 @@ -130,8 +110,6 @@ typedef enum { #define ENCODE_BUFFERFLAG_DATACORRUPT 0x00000010 #define ENCODE_BUFFERFLAG_DATAINVALID 0x00000020 #define ENCODE_BUFFERFLAG_SLICEOVERFOLOW 0x00000040 -#define ENCODE_BUFFERFLAG_ENDOFSTREAM 0x00000080 -#define ENCODE_BUFFERFLAG_NSTOPFRAME 0x00000100 typedef struct { uint8_t *data; @@ -140,18 +118,14 @@ typedef struct { uint32_t remainingSize; int flag; //Key frame, Codec Data etc VideoOutputFormat format; //output format - int64_t timeStamp; //reserved - FrameType type; - uint8_t *in_data; //indicate corresponding input data + uint64_t timeStamp; //reserved } VideoEncOutputBuffer; typedef struct { uint8_t *data; uint32_t size; bool bufAvailable; //To indicate whether this buffer can be reused - int64_t timeStamp; //reserved - FrameType type; //frame type expected to be encoded - int flag; // flag to indicate buffer property + uint64_t timeStamp; //reserved } VideoEncRawBuffer; struct VideoEncSurfaceBuffer { @@ -330,8 +304,6 @@ struct VideoParamsCommon : VideoParamConfigSet { AirParams airParams; uint32_t disableDeblocking; bool syncEncMode; - //CodedBuffer properties - uint32_t codedBufNum; VideoParamsCommon() { type = VideoParamsTypeCommon; @@ -355,7 +327,6 @@ struct VideoParamsCommon : VideoParamConfigSet { this->airParams = other.airParams; this->disableDeblocking = other.disableDeblocking; this->syncEncMode = other.syncEncMode; - this->codedBufNum = other.codedBufNum; return *this; } }; @@ -365,23 +336,10 @@ struct VideoParamsAVC : VideoParamConfigSet { uint8_t VUIFlag; int32_t maxSliceSize; uint32_t idrInterval; - uint32_t ipPeriod; - uint32_t refFrames; SliceNum sliceNum; AVCDelimiterType delimiterType; Cropping crop; SamplingAspectRatio SAR; - uint32_t refIdx10ActiveMinus1; - uint32_t refIdx11ActiveMinus1; - bool bFrameMBsOnly; - bool bMBAFF; - bool bEntropyCodingCABAC; - bool bWeightedPPrediction; - uint32_t weightedBipredicitonMode; - bool bConstIpred ; - bool bDirect8x8Inference; - bool bDirectSpatialTemporal; - uint32_t cabacInitIdc; VideoParamsAVC() { type = VideoParamsTypeAVC; @@ -396,8 +354,6 @@ struct VideoParamsAVC : VideoParamConfigSet { this->VUIFlag = other.VUIFlag; this->maxSliceSize = other.maxSliceSize; this->idrInterval = other.idrInterval; - this->ipPeriod = other.ipPeriod; - this->refFrames = other.refFrames; this->sliceNum = other.sliceNum; this->delimiterType = other.delimiterType; this->crop.LeftOffset = other.crop.LeftOffset; @@ -407,17 +363,6 @@ struct VideoParamsAVC : VideoParamConfigSet { this->SAR.SarWidth = other.SAR.SarWidth; this->SAR.SarHeight = other.SAR.SarHeight; - this->refIdx10ActiveMinus1 = other.refIdx10ActiveMinus1; - this->refIdx11ActiveMinus1 = other.refIdx11ActiveMinus1; - this->bFrameMBsOnly = other.bFrameMBsOnly; - this->bMBAFF = other.bMBAFF; - this->bEntropyCodingCABAC = other.bEntropyCodingCABAC; - this->bWeightedPPrediction = other.bWeightedPPrediction; - this->weightedBipredicitonMode = other.weightedBipredicitonMode; - this->bConstIpred = other.bConstIpred; - this->bDirect8x8Inference = other.bDirect8x8Inference; - this->bDirectSpatialTemporal = other.bDirectSpatialTemporal; - this->cabacInitIdc = other.cabacInitIdc; return *this; } }; @@ -505,7 +450,6 @@ struct VideoConfigAVCIntraPeriod : VideoParamConfigSet { uint32_t idrInterval; //How many Intra frame will have a IDR frame uint32_t intraPeriod; - uint32_t ipPeriod; }; struct VideoConfigNALSize : VideoParamConfigSet { @@ -568,4 +512,14 @@ struct VideoConfigSliceNum : VideoParamConfigSet { SliceNum sliceNum; }; +typedef struct { + uint32_t total_frames; + uint32_t skipped_frames; + uint32_t average_encode_time; + uint32_t max_encode_time; + uint32_t max_encode_frame; + uint32_t min_encode_time; + uint32_t min_encode_frame; +}VideoStatistics; + #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index b9b9c99..7371d7a 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -16,20 +16,20 @@ VideoEncoderH263::VideoEncoderH263() { mComParams.profile = (VAProfile)PROFILE_H263BASELINE; } -Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) { +Encode_Status VideoEncoderH263::sendEncodeCommand(void) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); if (mFrameNum == 0) { - ret = renderSequenceParams(task); + ret = renderSequenceParams(); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } - ret = renderPictureParams(task); + ret = renderPictureParams(); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(task); + ret = renderSliceParams(); CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); LOG_V( "End\n"); @@ -37,7 +37,7 @@ Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) { } -Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *task) { +Encode_Status VideoEncoderH263::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH263 h263SequenceParam = {}; @@ -78,7 +78,7 @@ Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *task) { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) { +Encode_Status VideoEncoderH263::renderPictureParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferH263 h263PictureParams = {}; @@ -86,18 +86,18 @@ Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) { LOG_V( "Begin\n\n"); // set picture params for HW - h263PictureParams.reference_picture = task->ref_surface[0]; - h263PictureParams.reconstructed_picture = task->rec_surface; - h263PictureParams.coded_buf = task->coded_buffer; + h263PictureParams.reference_picture = mRefSurface; + h263PictureParams.reconstructed_picture = mRecSurface; + h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; h263PictureParams.picture_width = mComParams.resolution.width; h263PictureParams.picture_height = mComParams.resolution.height; - h263PictureParams.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + h263PictureParams.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; LOG_V("======h263 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture); LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture); LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf); -// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); + LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "picture_width = %d\n", h263PictureParams.picture_width); LOG_I( "picture_height = %d\n",h263PictureParams.picture_height); LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type); @@ -117,7 +117,7 @@ Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) { +Encode_Status VideoEncoderH263::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceHeight; @@ -145,7 +145,7 @@ Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) { sliceParams->start_row_number = 0; // slice height measured in MB sliceParams->slice_height = sliceHeightInMB; - sliceParams->slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; + sliceParams->slice_flags.bits.is_intra = mIsIntra; sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0; LOG_V("======h263 slice params======\n"); diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h index a8578dd..2113e2f 100644 --- a/videoencoder/VideoEncoderH263.h +++ b/videoencoder/VideoEncoderH263.h @@ -20,7 +20,7 @@ public: virtual ~VideoEncoderH263() {}; protected: - virtual Encode_Status sendEncodeCommand(EncodeTask *task); + virtual Encode_Status sendEncodeCommand(void); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { return ENCODE_SUCCESS; } @@ -33,16 +33,12 @@ protected: virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_SUCCESS; } - virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { - return ENCODE_NOT_SUPPORTED; - } - //virtual Encode_Status updateFrameInfo(EncodeTask* task); // Local Methods private: - Encode_Status renderSequenceParams(EncodeTask *task); - Encode_Status renderPictureParams(EncodeTask *task); - Encode_Status renderSliceParams(EncodeTask *task); + Encode_Status renderSequenceParams(); + Encode_Status renderPictureParams(); + Encode_Status renderSliceParams(); }; #endif /* __VIDEO_ENCODER_H263_H__ */ diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h index da1c6ec..243e4a1 100644 --- a/videoencoder/VideoEncoderInterface.h +++ b/videoencoder/VideoEncoderInterface.h @@ -17,13 +17,14 @@ public: virtual Encode_Status start(void) = 0; virtual Encode_Status stop(void) = 0; virtual void flush(void) = 0; - virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout = FUNC_BLOCK) = 0; - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout = FUNC_BLOCK) = 0; + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer) = 0; + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer) = 0; virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0; + virtual Encode_Status getStatistics(VideoStatistics *videoStat) = 0; }; #endif /* VIDEO_ENCODER_INTERFACE_H_ */ diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h index 0f0eeae..49c34df 100644 --- a/videoencoder/VideoEncoderLog.h +++ b/videoencoder/VideoEncoderLog.h @@ -23,8 +23,7 @@ __android_log_print(level, comp, "%s():%d: "format, \ __FUNCTION__, __LINE__, ##__VA_ARGS__) -#define VIDEO_ENC_LOG_ENABLE -#if 1 +#if 1 #ifdef VIDEO_ENC_LOG_ENABLE #define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) #define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__) diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index 1bedc63..6e0263b 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -85,15 +85,30 @@ Encode_Status VideoEncoderMP4::outputConfigData( return ret; } -Encode_Status VideoEncoderMP4::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { + +Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; + bool useLocalBuffer = false; LOG_V("Begin\n"); CHECK_NULL_RETURN_IFFAIL(outBuffer); + setKeyFrame(mComParams.intraPeriod); + + // prepare for output, map the coded buffer + ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); + CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); + switch (outBuffer->format) { + case OUTPUT_EVERYTHING: + case OUTPUT_FRAME_DATA: { + // Output whatever we have + ret = VideoEncoderBase::outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + break; + } case OUTPUT_CODEC_DATA: { // Output the codec config data ret = outputConfigData(outBuffer); @@ -108,14 +123,32 @@ Encode_Status VideoEncoderMP4::getExtFormatOutput(VideoEncOutputBuffer *outBuffe LOG_I("out size is = %d\n", outBuffer->dataSize); + // cleanup, unmap the coded buffer if all + // data has been copied out + ret = VideoEncoderBase::cleanupForOutput(); CLEAN_UP: + if (ret < ENCODE_SUCCESS) { + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } + + // error happens, unmap the buffer + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; + } + } LOG_V("End\n"); return ret; } -Encode_Status VideoEncoderMP4::renderSequenceParams(EncodeTask *task) { + +Encode_Status VideoEncoderMP4::renderSequenceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferMPEG4 mp4SequenceParams = {}; @@ -169,26 +202,26 @@ Encode_Status VideoEncoderMP4::renderSequenceParams(EncodeTask *task) { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) { +Encode_Status VideoEncoderMP4::renderPictureParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferMPEG4 mpeg4_pic_param = {}; LOG_V( "Begin\n\n"); // set picture params for HW - mpeg4_pic_param.reference_picture = task->ref_surface[0]; - mpeg4_pic_param.reconstructed_picture = task->rec_surface; - mpeg4_pic_param.coded_buf = task->coded_buffer; + mpeg4_pic_param.reference_picture = mRefSurface; + mpeg4_pic_param.reconstructed_picture = mRecSurface; + mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex]; mpeg4_pic_param.picture_width = mComParams.resolution.width; mpeg4_pic_param.picture_height = mComParams.resolution.height; mpeg4_pic_param.vop_time_increment= mFrameNum; - mpeg4_pic_param.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + mpeg4_pic_param.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; LOG_V("======mpeg4 picture params======\n"); LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture); LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture); LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf); -// LOG_I("coded_buf_index = %d\n", mCodedBufIndex); + LOG_I("coded_buf_index = %d\n", mCodedBufIndex); LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width); LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height); LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment); @@ -209,7 +242,7 @@ Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) { } -Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) { +Encode_Status VideoEncoderMP4::renderSliceParams() { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceHeight; @@ -226,7 +259,7 @@ Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) { sliceParams.start_row_number = 0; sliceParams.slice_height = sliceHeightInMB; - sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; + sliceParams.slice_flags.bits.is_intra = mIsIntra; sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0; LOG_V("======mpeg4 slice params======\n"); @@ -249,19 +282,19 @@ Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderMP4::sendEncodeCommand(EncodeTask *task) { +Encode_Status VideoEncoderMP4::sendEncodeCommand(void) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); if (mFrameNum == 0) { - ret = renderSequenceParams(task); + ret = renderSequenceParams(); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } - ret = renderPictureParams(task); + ret = renderPictureParams(); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(task); + ret = renderSliceParams(); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); LOG_V( "End\n"); diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h index 7e579c0..b453023 100644 --- a/videoencoder/VideoEncoderMP4.h +++ b/videoencoder/VideoEncoderMP4.h @@ -19,10 +19,11 @@ public: VideoEncoderMP4(); virtual ~VideoEncoderMP4() {}; -// Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); + Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); protected: - virtual Encode_Status sendEncodeCommand(EncodeTask *task); + virtual Encode_Status sendEncodeCommand(void); + virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { return ENCODE_SUCCESS; } @@ -35,16 +36,13 @@ protected: virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_SUCCESS; } - virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer); - //virtual Encode_Status updateFrameInfo(EncodeTask* task); - // Local Methods private: Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize); Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer); - Encode_Status renderSequenceParams(EncodeTask *task); - Encode_Status renderPictureParams(EncodeTask *task); - Encode_Status renderSliceParams(EncodeTask *task); + Encode_Status renderSequenceParams(); + Encode_Status renderPictureParams(); + Encode_Status renderSliceParams(); unsigned char mProfileLevelIndication; uint32_t mFixedVOPTimeIncrement; -- cgit v1.2.3 From e283b41d35bddb5ec4fb6c8a9a328c9f492d1dfd Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 8 Jan 2013 16:25:26 +0800 Subject: libmix: optimize video decoder MW to avoid unnecessary delay when playback starts BZ: 78808 optimization: (1) if AVC ES is baseline profile, low delay mode is automatically enabled (2) if DPB size of AVC ES is smaller than OUTPUT_WINDOW_SIZE, DPB will be used as output window size Change-Id: Ic14d535c294f4fa043848f0a9e295368248a6fc1 Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/85536 Reviewed-by: Qiu, Junhai Reviewed-by: Feng, Wei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: Liang, Dan Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderAVC.cpp | 9 ++++++++- videodecoder/VideoDecoderBase.cpp | 8 +------- videodecoder/VideoDecoderBase.h | 8 +++++++- 3 files changed, 16 insertions(+), 9 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 8863738..15328bd 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -605,13 +605,20 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { vaProfile = VAProfileH264ConstrainedBaseline; } } + + VideoDecoderBase::setOutputWindowSize(DPBSize); + // for 1080p, limit the total surface to 19, according the hardware limitation // change the max surface number from 19->10 to workaround memory shortage // remove the workaround if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) { DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER; } - VideoDecoderBase::setOutputWindowSize(DPBSize); + + // for baseline profile, enable low delay mode automatically + if (data->codec_data->profile_idc == 66) { + enableLowDelayMode(true); + } return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile); } diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index d5a47dd..0dd07ae 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -33,12 +33,6 @@ #define MINIMUM_POC 0x80000000 #define ANDROID_DISPLAY_HANDLE 0x18C34078 -// TODO: check what is the best number. Must be at least 2 to support one backward reference frame. -// Currently set to 8 to support 7 backward reference frames. This value is used for AVC frame reordering only. -// e.g: -// POC: 4P, 8P, 10P, 6B and mNextOutputPOC = 5 -#define OUTPUT_WINDOW_SIZE 8 - VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) : mDisplay(NULL), mVADisplay(NULL), @@ -442,7 +436,7 @@ VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) { output = p; outputleastpoc = p; } - if (poc == mNextOutputPOC || count == OUTPUT_WINDOW_SIZE) { + if (poc == mNextOutputPOC || count == mOutputWindowSize) { if (output != NULL) { // this indicates two cases: // 1) the next output POC is found. diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 8ebd067..048da3c 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -40,6 +40,11 @@ extern "C" { typedef unsigned int Display; #endif +// TODO: check what is the best number. Must be at least 2 to support one backward reference frame. +// Currently set to 8 to support 7 backward reference frames. This value is used for AVC frame reordering only. +// e.g: +// POC: 4P, 8P, 10P, 6B and mNextOutputPOC = 5 +#define OUTPUT_WINDOW_SIZE 8 class VideoDecoderBase : public IVideoDecoder { public: @@ -144,8 +149,9 @@ private: protected: void ManageReference(bool enable) {mManageReference = enable;} void setOutputMethod(OUTPUT_METHOD method) {mOutputMethod = method;} - void setOutputWindowSize(int32_t size) {mOutputWindowSize = size;} + void setOutputWindowSize(int32_t size) {mOutputWindowSize = (size < OUTPUT_WINDOW_SIZE) ? size : OUTPUT_WINDOW_SIZE;} void querySurfaceRenderStatus(VideoSurfaceBuffer* surface); + void enableLowDelayMode(bool enable) {mLowDelay = enable;} }; -- cgit v1.2.3 From 66a836b0bf8fac0923e800ff5b0d68db2e6800d0 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 14 Jan 2013 19:24:40 +0800 Subject: libmix: fix klocwork issue in asfparse BZ: 73428 fix klocwork issue in asfparse, intialize the status in parseHeaderExtensionObject Signed-off-by: ywan171 Change-Id: Iebe68b317c4a1a81e375158076247505942bfd02 Reviewed-on: http://android.intel.com:8080/86536 Reviewed-by: Feng, Wei Reviewed-by: Wang, Yi A Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- asfparser/AsfHeaderParser.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asfparser/AsfHeaderParser.cpp b/asfparser/AsfHeaderParser.cpp index 4665d76..795e7ea 100644 --- a/asfparser/AsfHeaderParser.cpp +++ b/asfparser/AsfHeaderParser.cpp @@ -334,7 +334,7 @@ int AsfHeaderParser::onExtendedStreamPropertiesObject(uint8_t *buffer, uint32_t int AsfHeaderParser::parseHeaderExtensionObject(uint8_t* buffer, uint32_t size) { // No empty space, padding, leading, or trailing bytes are allowed in the extention data - int status; + int status = ASF_PARSER_SUCCESS; do { if (size < sizeof(AsfObject)) { return ASF_PARSER_BAD_DATA; -- cgit v1.2.3 From 2cdae0703ee6594e7b18e7f6d2b259cd447f1aa7 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Mon, 14 Jan 2013 15:58:51 +0800 Subject: mix-vbp: refine the parser of h263 to support PLUSPTYPE BZ: 79135 Change-Id: I15e578c2bf6019feaa0762340831cf8a35d1ebdc Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/86484 Reviewed-by: Feng, Wei Reviewed-by: Chen, Tianmi Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- .../fw/codecs/mp4/parser/viddec_mp4_shortheader.c | 156 +++++++++++---------- 1 file changed, 85 insertions(+), 71 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c index 4125a6c..513c0f1 100644 --- a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c +++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c @@ -81,7 +81,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p getbits = viddec_pm_get_bits(parent, &data, 5); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->vop_quant = (data & 0x1f); - //zero bit + //cpm getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); if ( 0 != (data & 0x1)) @@ -96,48 +96,94 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->ufep = (data & 0x7); //ufep - if (svh->ufep == 0x0) - { - DEB("Info: don't support to handle the 0x000 case of Update Full Extended PTYPE\n"); - ret = MP4_STATUS_NOTSUPPORT; - break; - } - else if (svh->ufep == 0x1) + if (svh->ufep == 1 || svh->ufep == 0) { - //source format + //OPPTYPE + if (svh->ufep == 1) + { + //source format + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->source_format = (data & 0x7); + if (svh->source_format < 1 || svh->source_format > 6) + { + DEB("Error: bad value of source_format\n"); + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //optional indicators + getbits = viddec_pm_get_bits(parent, &data, 8); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + optional_indicators_8bits = data; + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x7)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 1 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x7)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + } + + //MPPTYPE + //picture coding type getbits = viddec_pm_get_bits(parent, &data, 3); BREAK_GETBITS_REQD_MISSING(getbits, ret); - svh->source_format = (data & 0x7); - if (svh->source_format < 1 || svh->source_format > 6) + svh->picture_coding_type = (data & 0x7); + if (svh->picture_coding_type > 1) { - DEB("Error: bad value of source_format\n"); - ret = MP4_STATUS_PARSE_ERROR; + DEB("Info: only support I and P frames\n"); + ret = MP4_STATUS_NOTSUPPORT; break; } - //optional indicators - getbits = viddec_pm_get_bits(parent, &data, 8); - BREAK_GETBITS_REQD_MISSING(getbits, ret); - optional_indicators_8bits = data; - //reserved zero bits - getbits = viddec_pm_get_bits(parent, &data, 3); + //optional RPR mode + getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if ( 0 != (data & 0x7)) + if ( 0 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; } - //marker bit + //optional PRU mode getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if ( 1 != (data & 0x1)) + if ( 0 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; } + //vop rounding type + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->vop_rounding_type = (data & 0x1); //reserved zero bits - getbits = viddec_pm_get_bits(parent, &data, 3); + getbits = viddec_pm_get_bits(parent, &data, 2); BREAK_GETBITS_REQD_MISSING(getbits, ret); - if ( 0 != (data & 0x7)) + if ( 0 != (data & 0x3)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 1 != (data & 0x1)) { ret = MP4_STATUS_PARSE_ERROR; break; @@ -149,62 +195,23 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p ret = MP4_STATUS_NOTSUPPORT; break; } - //MPPTYPE - //picture coding type - getbits = viddec_pm_get_bits(parent, &data, 3); - BREAK_GETBITS_REQD_MISSING(getbits, ret); - svh->picture_coding_type = (data & 0x7); - if (svh->picture_coding_type > 1) - { - DEB("Info: only support I and P frames\n"); - ret = MP4_STATUS_NOTSUPPORT; - break; - } - //optional RPR mode - getbits = viddec_pm_get_bits(parent, &data, 1); - BREAK_GETBITS_REQD_MISSING(getbits, ret); - if ( 0 != (data & 0x1)) - { - ret = MP4_STATUS_PARSE_ERROR; - break; - } - //optional PRU mode + + //cpm getbits = viddec_pm_get_bits(parent, &data, 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); if ( 0 != (data & 0x1)) { - ret = MP4_STATUS_PARSE_ERROR; - break; - } - //vop rounding type - getbits = viddec_pm_get_bits(parent, &data, 1); - BREAK_GETBITS_REQD_MISSING(getbits, ret); - svh->vop_rounding_type = (data & 0x1); - //reserved zero bits - getbits = viddec_pm_get_bits(parent, &data, 2); - BREAK_GETBITS_REQD_MISSING(getbits, ret); - if ( 0 != (data & 0x3)) - { - ret = MP4_STATUS_PARSE_ERROR; - break; - } - //marker bit - getbits = viddec_pm_get_bits(parent, &data, 1); - BREAK_GETBITS_REQD_MISSING(getbits, ret); - if ( 1 != (data & 0x1)) - { - ret = MP4_STATUS_PARSE_ERROR; + ret = MP4_STATUS_NOTSUPPORT; break; } - //cpm - getbits = viddec_pm_get_bits(parent, &data, 1); - BREAK_GETBITS_REQD_MISSING(getbits, ret); + + //CPFMT if (svh->ufep == 1 && svh->source_format == 6) - { //CPFMT + { //Pixel Aspect Ratio getbits = viddec_pm_get_bits(parent, &data, 4); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->pixel_aspect_ratio_code = (data & 0xf); - // + //Picture Width Indication getbits = viddec_pm_get_bits(parent, &data, 9); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->picture_width_indication = (data & 0x1ff); @@ -216,12 +223,19 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p ret = MP4_STATUS_PARSE_ERROR; break; } - // + //Picture Height Indication getbits = viddec_pm_get_bits(parent, &data, 9); BREAK_GETBITS_REQD_MISSING(getbits, ret); svh->picture_height_indication = (data & 0x1ff); + + if (svh->pixel_aspect_ratio_code == 0xf) + { + //EPAR + viddec_pm_get_bits(parent, &data, 16); + } } + //custom PCF if (optional_indicators_8bits & 0x80) { viddec_pm_get_bits(parent, &data, 8); viddec_pm_get_bits(parent, &data, 2); -- cgit v1.2.3 From 63d57cf99d5fe61a013fa09a857873edf118ceb5 Mon Sep 17 00:00:00 2001 From: jiguoliang Date: Wed, 16 Jan 2013 16:17:59 -0500 Subject: initial version for HiP encoding support BZ: 76823 1. Support thread safe on Queue operations 2. Support getOutput in different type codec 3. Support getOutput multi calling for some output format 4. Support non-block / timeout mode 5. Support EOS 6. Support B frame without reconstructed frame output, MRFLD done in driver, MFLD done in libMIX 7. Support baseline/highprofile select 8. Support HiP parameters 9. Support CodedBuffer number setting 10. Support auto frame type detection for both HiP and baseline 11. Support add the high profile parameter and change the type of timestamp 12. Support refine the encode/getout block/nonblock mode with List container 13. Support auto reconstructed and reference frame management in driver, remove MFLD logic (Done) 14. Support the new libva VAEncSliceParameterBufferH264 structure, substitute VAEncSliceParameterBuffer(BZ 75766) 15. refine the name style, refine the frame type detect, B frame has/not impact the frame num of GOP 16. refine the slice_type assignment 17. Support frame skip on MFLD 18. fix the klockwork issue Change-Id: Ifbc230d8d0985e4411ac5b79f04d29a6edcf501d Signed-off-by: jiguoliang Reviewed-on: http://android.intel.com:8080/87040 Reviewed-by: Yuan, Shengquan Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- test/Android.mk | 4 + test/mix_encoder.cpp | 25 +- videoencoder/Android.mk | 3 +- videoencoder/VideoEncoderAVC.cpp | 177 ++++--- videoencoder/VideoEncoderAVC.h | 12 +- videoencoder/VideoEncoderBase.cpp | 970 +++++++++++++++-------------------- videoencoder/VideoEncoderBase.h | 107 ++-- videoencoder/VideoEncoderDef.h | 72 ++- videoencoder/VideoEncoderH263.cpp | 26 +- videoencoder/VideoEncoderH263.h | 12 +- videoencoder/VideoEncoderInterface.h | 5 +- videoencoder/VideoEncoderLog.h | 3 +- videoencoder/VideoEncoderMP4.cpp | 62 +-- videoencoder/VideoEncoderMP4.h | 14 +- 14 files changed, 686 insertions(+), 806 deletions(-) diff --git a/test/Android.mk b/test/Android.mk index 2f4d6a8..abded5d 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -45,6 +45,10 @@ LOCAL_SHARED_LIBRARIES := \ libva-android \ libva-tpi \ libgui \ + libui \ + libutils \ + libcutils \ + libhardware \ libbinder LOCAL_MODULE_TAGS := optional diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 464b759..4662947 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -257,7 +257,7 @@ Encode_Status SetVideoEncoderParam() { memset(&tmpStoreMetaDataInBuffers,0x00,sizeof(VideoParamsStoreMetaDataInBuffers)); gVideoEncoder->getParameters(&tmpStoreMetaDataInBuffers); gVideoEncoder->setParameters(&tmpStoreMetaDataInBuffers); - +#if 0 VideoParamsUpstreamBuffer tmpVideoParamsUpstreamBuffer; tmpVideoParamsUpstreamBuffer.bufCnt = 0; gVideoEncoder->setParameters(&tmpVideoParamsUpstreamBuffer); @@ -275,7 +275,7 @@ Encode_Status SetVideoEncoderParam() { VideoParamsUsrptrBuffer tmpVideoParamsUsrptrBuffer; tmpVideoParamsUsrptrBuffer.width = 0; gVideoEncoder->getParameters(&tmpVideoParamsUsrptrBuffer); - +#endif //---------------------add for libmix encode code coverage test // VideoEncodeBase.cpp file setConfig && getConfig code coverage test // only for VCM mode @@ -346,6 +346,9 @@ Encode_Status SetVideoEncoderParam() { // for VideoConfigTypeAVCIntraPeriod derivedSetConfig && derivedGetConfig VideoConfigAVCIntraPeriod configAVCIntraPeriod; gVideoEncoder->getConfig(&configAVCIntraPeriod); + configAVCIntraPeriod.ipPeriod = 1; + configAVCIntraPeriod.intraPeriod = 30; + configAVCIntraPeriod.idrInterval = 1; gVideoEncoder->setConfig(&configAVCIntraPeriod); VideoConfigTypeIDRReq tmpVideoConfigTypeIDRReq; gVideoEncoder->setConfig(&tmpVideoConfigTypeIDRReq); @@ -989,30 +992,24 @@ for(int i=0; i<1; i++) InBuf.data = data; InBuf.size = size; InBuf.bufAvailable = true; + InBuf.type = FTYPE_UNKNOWN; + InBuf.flag = 0; ret = gVideoEncoder->encode(&InBuf); CHECK_ENCODE_STATUS("encode"); + if (i > 0) { ret = gVideoEncoder->getOutput(&OutBuf); CHECK_ENCODE_STATUS("getOutput"); - CHECK_ENCODE_STATUS_RETURN("getOutput"); - // printf("OutBuf.dataSize = %d .........\n", OutBuf.dataSize); +// printf("OutBuf.dataSize = %d, flag=0x%08x .........\n", OutBuf.dataSize, OutBuf.flag); fwrite(OutBuf.data, 1, OutBuf.dataSize, file); - + } printf("Encoding %d Frames \r", i+1); fflush(stdout); } + ret = gVideoEncoder->getOutput(&OutBuf); fclose(file); - VideoStatistics stat; - if (gVideoEncoder->getStatistics(&stat) == ENCODE_SUCCESS) - { - printf("\nVideoStatistics\n"); - printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", \ - stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \ - stat.min_encode_time, stat.min_encode_frame ); - } - gVideoEncoder->stop(); releaseVideoEncoder(gVideoEncoder); gVideoEncoder = NULL; diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 7c8314a..0b13e7e 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -2,7 +2,6 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) #VIDEO_ENC_LOG_ENABLE := true -#VIDEO_ENC_STATISTICS_ENABLE := true LOCAL_SRC_FILES := \ VideoEncoderBase.cpp \ @@ -16,11 +15,13 @@ LOCAL_SRC_FILES := \ LOCAL_C_INCLUDES := \ $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libva \ + $(TOPDIR)/frameworks/native/include \ #LOCAL_LDLIBS += -lpthread LOCAL_SHARED_LIBRARIES := \ libcutils \ + libutils \ libva \ libva-android \ libva-tpi \ diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index c4bf805..4c2661a 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -20,6 +20,7 @@ VideoEncoderAVC::VideoEncoderAVC() mVideoParamsAVC.sliceNum.iSliceNum = 2; mVideoParamsAVC.sliceNum.pSliceNum = 2; mVideoParamsAVC.idrInterval = 2; + mVideoParamsAVC.ipPeriod = 1; mVideoParamsAVC.maxSliceSize = 0; mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB; mSliceNum = 2; @@ -94,6 +95,7 @@ Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncCon } mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval; + mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod; mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod; mNewHeader = true; break; @@ -154,6 +156,7 @@ Encode_Status VideoEncoderAVC:: derivedGetConfig( configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval; configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod; + configAVCIntraPeriod->ipPeriod = mVideoParamsAVC.ipPeriod; break; } @@ -192,30 +195,67 @@ Encode_Status VideoEncoderAVC:: derivedGetConfig( return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { - - Encode_Status ret = ENCODE_SUCCESS; - VAStatus vaStatus = VA_STATUS_SUCCESS; - bool useLocalBuffer = false; +Encode_Status VideoEncoderAVC::updateFrameInfo(EncodeTask* task) { uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; + FrameType frametype; + uint32_t frame_num = mFrameNum; + + if (mVideoParamsAVC.idrInterval != 0) { + if(mVideoParamsAVC.ipPeriod > 1) + frame_num = frame_num % (idrPeroid + 1); + else if(mComParams.intraPeriod != 0) + frame_num = frame_num % idrPeroid ; + } + + if(frame_num ==0){ + frametype = FTYPE_IDR; + }else if(mComParams.intraPeriod ==0) + // only I frame need intraPeriod=idrInterval=ipPeriod=0 + frametype = FTYPE_I; + else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame + if(mComParams.intraPeriod != 0 && (frame_num > 1) &&((frame_num -1)%mComParams.intraPeriod == 0)) + frametype = FTYPE_I; + else + frametype = FTYPE_P; + } else { + if(mComParams.intraPeriod != 0 &&((frame_num-1)%mComParams.intraPeriod == 0)&&(frame_num >mComParams.intraPeriod)) + frametype = FTYPE_I; + else{ + frame_num = frame_num%mComParams.intraPeriod; + if(frame_num == 0) + frametype = FTYPE_B; + else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0) + frametype = FTYPE_P; + else + frametype = FTYPE_B; + } + } - LOG_V("Begin\n"); - CHECK_NULL_RETURN_IFFAIL(outBuffer); + if (frametype == FTYPE_IDR || frametype == FTYPE_I) + task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + + if (frametype != task->type) { + const char* FrameTypeStr[10] = {"UNKNOWN", "I", "P", "B", "SI", "SP", "EI", "EP", "S", "IDR"}; + if ((uint32_t) task->type < 9) + LOG_V("libMIX thinks it is %s Frame, the input is %s Frame", FrameTypeStr[frametype], FrameTypeStr[task->type]); + else + LOG_V("Wrong Frame type %d, type may not be initialized ?\n", task->type); + } + +//temparily comment out to avoid uninitialize error +// if (task->type == FTYPE_UNKNOWN || (uint32_t) task->type > 9) + task->type = frametype; - setKeyFrame(idrPeroid); + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; - // prepare for output, map the coded buffer - ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); - CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); + LOG_V("Begin\n"); switch (outBuffer->format) { - case OUTPUT_EVERYTHING: - case OUTPUT_FRAME_DATA: { - // Output whatever we have - ret = VideoEncoderBase::outputAllData(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); - break; - } case OUTPUT_CODEC_DATA: { // Output the codec data ret = outputCodecData(outBuffer); @@ -251,26 +291,10 @@ Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) { LOG_I("out size is = %d\n", outBuffer->dataSize); - // cleanup, unmap the coded buffer if all - // data has been copied out - ret = VideoEncoderBase::cleanupForOutput(); CLEAN_UP: - if (ret < ENCODE_SUCCESS) { - if (outBuffer->data && (useLocalBuffer == true)) { - delete[] outBuffer->data; - outBuffer->data = NULL; - useLocalBuffer = false; - } - // error happens, unmap the buffer - if (mCodedBufferMapped) { - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; - mCurSegment = NULL; - } - } LOG_V("End\n"); return ret; } @@ -481,7 +505,6 @@ Encode_Status VideoEncoderAVC::outputOneNALU( mOffsetInSeg += (nalSize + nalOffset); outBuffer->dataSize = sizeToBeCopied; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; outBuffer->remainingSize = 0; } else { // if nothing to be copied out, set flag to invalid @@ -500,7 +523,6 @@ Encode_Status VideoEncoderAVC::outputOneNALU( } else { LOG_V("End of stream\n"); outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; } } @@ -554,7 +576,6 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf // so the remainingSize size may larger than the remaining data size outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; LOG_E("Buffer size too small\n"); return ENCODE_BUFFER_TOO_SMALL; } @@ -569,7 +590,6 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf outBuffer->dataSize = sizeCopiedHere; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; break; } @@ -579,7 +599,7 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { +Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); @@ -592,7 +612,7 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { CHECK_ENCODE_STATUS_RETURN("renderHrd"); } - ret = renderSequenceParams(); + ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); mNewHeader = false; //Set to require new header filed to false } @@ -628,10 +648,10 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(void) { mRenderFrameRate = false; } - ret = renderPictureParams(); + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(); + ret = renderSliceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); LOG_V( "End\n"); @@ -745,7 +765,7 @@ int VideoEncoderAVC::calcLevel(int numMbs) { return level; } -Encode_Status VideoEncoderAVC::renderSequenceParams() { +Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH264 avcSeqParams = {}; @@ -756,7 +776,6 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { int level; uint32_t frameRateNum = mComParams.frameRate.frameRateNum; uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; - const char* device_info; LOG_V( "Begin\n\n"); vaStatus = vaCreateBuffer(mVADisplay, mVAContext, @@ -767,7 +786,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { CHECK_VA_STATUS_RETURN("vaCreateBuffer"); vaStatus = vaMapBuffer(mVADisplay, mRcParamBuf, (void **)&miscEncRCParamBuf); CHECK_VA_STATUS_RETURN("vaMapBuffer"); - + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, VAEncMiscParameterBufferType, sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterFrameRate), @@ -776,7 +795,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { CHECK_VA_STATUS_RETURN("vaCreateBuffer"); vaStatus = vaMapBuffer(mVADisplay, mFrameRateParamBuf, (void **)&miscEncFrameRateParamBuf); CHECK_VA_STATUS_RETURN("vaMapBuffer"); - + miscEncRCParamBuf->type = VAEncMiscParameterTypeRateControl; rcMiscParam = (VAEncMiscParameterRateControl *)miscEncRCParamBuf->data; miscEncFrameRateParamBuf->type = VAEncMiscParameterTypeFrameRate; @@ -785,6 +804,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { // avcSeqParams.level_idc = mLevel; avcSeqParams.intra_period = mComParams.intraPeriod; avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval; + avcSeqParams.ip_period = mVideoParamsAVC.ipPeriod; avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16; avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16; @@ -822,7 +842,9 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { } // This is a temporary fix suggested by Binglin for bad encoding quality issue - avcSeqParams.max_num_ref_frames = 1; // TODO: We need a long term design for this field + avcSeqParams.max_num_ref_frames = 1; + if(avcSeqParams.ip_period > 1) + avcSeqParams.max_num_ref_frames = 2; LOG_V("===h264 sequence params===\n"); LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id); @@ -847,28 +869,27 @@ Encode_Status VideoEncoderAVC::renderSequenceParams() { sizeof(avcSeqParams), 1, &avcSeqParams, &mSeqParamBuf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - - vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1); - CHECK_VA_STATUS_RETURN("vaRenderPicture"); vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mFrameRateParamBuf, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::renderPictureParams() { +Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferH264 avcPicParams = {}; LOG_V( "Begin\n\n"); // set picture params for HW - avcPicParams.ReferenceFrames[0].picture_id= mRefSurface; - avcPicParams.CurrPic.picture_id= mRecSurface; - avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; + avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface[0]; + avcPicParams.CurrPic.picture_id= task->rec_surface; + avcPicParams.coded_buf = task->coded_buffer; //avcPicParams.picture_width = mComParams.resolution.width; //avcPicParams.picture_height = mComParams.resolution.height; avcPicParams.last_picture = 0; @@ -876,7 +897,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams() { LOG_V("======h264 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id); LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id); - LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); +// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf); //LOG_I( "picture_width = %d\n", avcPicParams.picture_width); //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height); @@ -897,7 +918,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams() { } -Encode_Status VideoEncoderAVC::renderSliceParams() { +Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -906,8 +927,8 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { uint32_t sliceHeightInMB = 0; uint32_t maxSliceNum = 0; uint32_t minSliceNum = 0; - int actualSliceHeightInMB = 0; - int startRowInMB = 0; + uint32_t actualSliceHeightInMB = 0; + uint32_t startRowInMB = 0; uint32_t modulus = 0; LOG_V( "Begin\n\n"); @@ -915,7 +936,7 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { maxSliceNum = (mComParams.resolution.height + 15) / 16; minSliceNum = 1; - if (mIsIntra) { + if (task->type == FTYPE_I || task->type == FTYPE_IDR) { sliceNum = mVideoParamsAVC.sliceNum.iSliceNum; } else { sliceNum = mVideoParamsAVC.sliceNum.pSliceNum; @@ -938,14 +959,20 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), + sizeof(VAEncSliceParameterBufferH264), sliceNum, NULL, &mSliceParamBuf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - VAEncSliceParameterBuffer *sliceParams, *currentSlice; + VAEncSliceParameterBufferH264 *sliceParams, *currentSlice; + vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams); CHECK_VA_STATUS_RETURN("vaMapBuffer"); + if(!sliceParams) + return ENCODE_NULL_PTR; + memset(sliceParams, 0 , sizeof(VAEncSliceParameterBufferH264)); + if(!sliceParams) + return ENCODE_NULL_PTR; currentSlice = sliceParams; startRowInMB = 0; @@ -956,25 +983,29 @@ Encode_Status VideoEncoderAVC::renderSliceParams() { actualSliceHeightInMB ++; } - // starting MB row number for this slice - currentSlice->start_row_number = startRowInMB; + // starting MB row number for this slice, suppose macroblock 16x16 + currentSlice->macroblock_address = startRowInMB * mComParams.resolution.width /16; // slice height measured in MB - currentSlice->slice_height = actualSliceHeightInMB; - currentSlice->slice_flags.bits.is_intra = mIsIntra; - currentSlice->slice_flags.bits.disable_deblocking_filter_idc - = mComParams.disableDeblocking; + currentSlice->num_macroblocks = actualSliceHeightInMB * mComParams.resolution.width /16; + if(task->type == FTYPE_I||task->type == FTYPE_IDR) + currentSlice->slice_type = 2; + else if(task->type == FTYPE_P) + currentSlice->slice_type = 0; + else if(task->type == FTYPE_B) + currentSlice->slice_type = 1; + currentSlice->disable_deblocking_filter_idc = mComParams.disableDeblocking; // This is a temporary fix suggested by Binglin for bad encoding quality issue // TODO: We need a long term design for this field - currentSlice->slice_flags.bits.uses_long_term_ref = 0; - currentSlice->slice_flags.bits.is_long_term_ref = 0; + //currentSlice->slice_flags.bits.uses_long_term_ref = 0; + //currentSlice->slice_flags.bits.is_long_term_ref = 0; LOG_V("======AVC slice params======\n"); LOG_I( "slice_index = %d\n", (int) sliceIndex); - LOG_I( "start_row_number = %d\n", (int) currentSlice->start_row_number); - LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->slice_height); - LOG_I( "slice.is_intra = %d\n", (int) currentSlice->slice_flags.bits.is_intra); - LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->slice_flags.bits.disable_deblocking_filter_idc); + LOG_I( "macroblock_address = %d\n", (int) currentSlice->macroblock_address); + LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->num_macroblocks); + LOG_I( "slice.type = %d\n", (int) currentSlice->slice_type); + LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->disable_deblocking_filter_idc); startRowInMB += actualSliceHeightInMB; } diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h index b57ef67..1248a3e 100644 --- a/videoencoder/VideoEncoderAVC.h +++ b/videoencoder/VideoEncoderAVC.h @@ -18,7 +18,6 @@ public: ~VideoEncoderAVC() {}; virtual Encode_Status start(); - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams); virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams); @@ -27,8 +26,9 @@ public: protected: - virtual Encode_Status sendEncodeCommand(void); - + virtual Encode_Status sendEncodeCommand(EncodeTask *task); + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer); + virtual Encode_Status updateFrameInfo(EncodeTask* task); private: // Local Methods @@ -40,9 +40,9 @@ private: Encode_Status renderMaxSliceSize(); Encode_Status renderAIR(); - Encode_Status renderSequenceParams(); - Encode_Status renderPictureParams(); - Encode_Status renderSliceParams(); + Encode_Status renderSequenceParams(EncodeTask *task); + Encode_Status renderPictureParams(EncodeTask *task); + Encode_Status renderSliceParams(EncodeTask *task); int calcLevel(int numMbs); public: diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 55012d7..83126c6 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -12,7 +12,6 @@ #include #include -#undef DUMP_SRC_DATA // To dump source data // API declaration extern "C" { VAStatus vaLockSurface(VADisplay dpy, @@ -33,48 +32,36 @@ VAStatus vaUnlockSurface(VADisplay dpy, ); } VideoEncoderBase::VideoEncoderBase() - :mInitialized(false) + :mInitialized(true) + ,mStarted(false) ,mVADisplay(NULL) - ,mVAContext(0) - ,mVAConfig(0) + ,mVAContext(VA_INVALID_ID) + ,mVAConfig(VA_INVALID_ID) ,mVAEntrypoint(VAEntrypointEncSlice) - ,mCurSegment(NULL) - ,mOffsetInSeg(0) - ,mTotalSize(0) - ,mTotalSizeCopied(0) - ,mForceKeyFrame(false) + ,mCodedBufSize(0) ,mNewHeader(false) - ,mFirstFrame (true) + //,mAutoReference(17 /*VAConfigAttribEncAutoReference*/) ,mRenderMaxSliceSize(false) ,mRenderQP (false) ,mRenderAIR(false) ,mRenderFrameRate(false) ,mRenderBitRate(false) ,mRenderHrd(false) - ,mLastCodedBuffer(0) - ,mOutCodedBuffer(0) ,mSeqParamBuf(0) ,mPicParamBuf(0) ,mSliceParamBuf(0) - ,mSurfaces(NULL) - ,mSurfaceCnt(0) - ,mSrcSurfaceMapList(NULL) - ,mCurSurface(VA_INVALID_SURFACE) ,mRefSurface(VA_INVALID_SURFACE) ,mRecSurface(VA_INVALID_SURFACE) - ,mLastSurface(VA_INVALID_SURFACE) - ,mLastInputRawBuffer(NULL) - ,mEncodedFrames(0) ,mFrameNum(0) - ,mCodedBufSize(0) - ,mCodedBufIndex(0) - ,mPicSkipped(false) - ,mIsIntra(true) ,mSliceSizeOverflow(false) + ,mCurOutputTask(NULL) + ,mOutCodedBuffer(0) ,mCodedBufferMapped(false) - ,mDataCopiedOut(false) - ,mKeyFrame(true) - ,mInitCheck(true) { + ,mCurSegment(NULL) + ,mOffsetInSeg(0) + ,mTotalSize(0) + ,mTotalSizeCopied(0) + ,mFrameSkipped(false){ VAStatus vaStatus = VA_STATUS_SUCCESS; // here the display can be any value, use following one @@ -84,8 +71,6 @@ VideoEncoderBase::VideoEncoderBase() int minorVersion = -1; setDefaultParams(); - mVACodedBuffer [0] = 0; - mVACodedBuffer [1] = 0; LOG_V("vaGetDisplay \n"); mVADisplay = vaGetDisplay(&display); @@ -97,19 +82,17 @@ VideoEncoderBase::VideoEncoderBase() LOG_V("vaInitialize \n"); if (vaStatus != VA_STATUS_SUCCESS) { LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus); - mInitCheck = false; + mInitialized = false; } -#ifdef VIDEO_ENC_STATISTICS_ENABLE - memset(&mVideoStat, 0, sizeof(VideoStatistics)); - mVideoStat.min_encode_time = 0xFFFFFFFF; -#endif - } VideoEncoderBase::~VideoEncoderBase() { VAStatus vaStatus = VA_STATUS_SUCCESS; + + stop(); + vaStatus = vaTerminate(mVADisplay); LOG_V( "vaTerminate\n"); if (vaStatus != VA_STATUS_SUCCESS) { @@ -123,32 +106,24 @@ Encode_Status VideoEncoderBase::start() { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - VASurfaceID surfaces[2]; - int32_t index = -1; - SurfaceMap *map = mSrcSurfaceMapList; - uint32_t stride_aligned = 0; - uint32_t height_aligned = 0; - VAConfigAttrib vaAttrib[2]; - uint32_t maxSize = 0; + if (!mInitialized) { + LOGE("Encoder Initialize fail can not start"); + return ENCODE_DRIVER_FAIL; + } - if (mInitialized) { + if (mStarted) { LOG_V("Encoder has been started\n"); return ENCODE_ALREADY_INIT; } - if (!mInitCheck) { - LOGE("Encoder Initialize fail can not start"); - return ENCODE_DRIVER_FAIL; - } - + VAConfigAttrib vaAttrib[2]; vaAttrib[0].type = VAConfigAttribRTFormat; vaAttrib[1].type = VAConfigAttribRateControl; vaAttrib[0].value = VA_RT_FORMAT_YUV420; vaAttrib[1].value = mComParams.rcMode; LOG_V( "======VA Configuration======\n"); - LOG_I( "profile = %d\n", mComParams.profile); LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint); LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type); @@ -161,10 +136,9 @@ Encode_Status VideoEncoderBase::start() { vaStatus = vaCreateConfig( mVADisplay, mComParams.profile, mVAEntrypoint, &vaAttrib[0], 2, &(mVAConfig)); - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateConfig"); + CHECK_VA_STATUS_RETURN("vaCreateConfig"); if (mComParams.rcMode == VA_RC_VCM) { - // Following three features are only enabled in VCM mode mRenderMaxSliceSize = true; mRenderAIR = true; @@ -173,10 +147,10 @@ Encode_Status VideoEncoderBase::start() { LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n"); + VASurfaceID surfaces[2]; VASurfaceAttributeTPI attribute_tpi; - - stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; - height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; + uint32_t stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; + uint32_t height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; attribute_tpi.luma_stride = stride_aligned; @@ -188,415 +162,356 @@ Encode_Status VideoEncoderBase::start() { attribute_tpi.pixel_format = VA_FOURCC_NV12; attribute_tpi.type = VAExternalMemoryNULL; - vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, - VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); +#ifndef AUTO_REFERENCE + vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, + VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + mRefSurface = surfaces[0]; + mRecSurface = surfaces[1]; +#endif - mRefSurface = surfaces[0]; - mRecSurface = surfaces[1]; + //Prepare all Surfaces to be added into Context + uint32_t contextSurfaceCnt; +#ifndef AUTO_REFERENCE + contextSurfaceCnt = 2 + mSrcSurfaceMapList.size(); +#else + contextSurfaceCnt = mSrcSurfaceMapList.size(); +#endif - //count total surface id already allocated - mSurfaceCnt = 2; - - while(map) { - mSurfaceCnt ++; - map = map->next; - } + VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt]; + int32_t index = -1; + android::List::iterator map_node; - mSurfaces = new VASurfaceID[mSurfaceCnt]; - map = mSrcSurfaceMapList; - while(map) { - mSurfaces[++index] = map->surface; - map->added = true; - map = map->next; + for(map_node = mSrcSurfaceMapList.begin(); map_node != mSrcSurfaceMapList.end(); map_node++) + { + contextSurfaces[++index] = (*map_node)->surface; + (*map_node)->added = true; } - mSurfaces[++index] = mRefSurface; - mSurfaces[++index] = mRecSurface; + +#ifndef AUTO_REFERENCE + contextSurfaces[++index] = mRefSurface; + contextSurfaces[++index] = mRecSurface; +#endif //Initialize and save the VA context ID LOG_V( "vaCreateContext\n"); - vaStatus = vaCreateContext(mVADisplay, mVAConfig, mComParams.resolution.width, mComParams.resolution.height, - 0, mSurfaces, mSurfaceCnt, + 0, contextSurfaces, contextSurfaceCnt, &(mVAContext)); - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateContext"); - LOG_I("Created libva context width %d, height %d\n", - mComParams.resolution.width, mComParams.resolution.height); + delete [] contextSurfaces; - ret = getMaxOutSize(&maxSize); - CHECK_ENCODE_STATUS_CLEANUP("getMaxOutSize"); + CHECK_VA_STATUS_RETURN("vaCreateContext"); - // Create coded buffer for output - vaStatus = vaCreateBuffer(mVADisplay, mVAContext, - VAEncCodedBufferType, - mCodedBufSize, - 1, NULL, - &(mVACodedBuffer[0])); - - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType"); - - // Create coded buffer for output - vaStatus = vaCreateBuffer(mVADisplay, mVAContext, - VAEncCodedBufferType, - mCodedBufSize, - 1, NULL, - &(mVACodedBuffer[1])); + LOG_I("Success to create libva context width %d, height %d\n", + mComParams.resolution.width, mComParams.resolution.height); - CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType"); + uint32_t maxSize = 0; + ret = getMaxOutSize(&maxSize); + CHECK_ENCODE_STATUS_RETURN("getMaxOutSize"); - mFirstFrame = true; + // Create CodedBuffer for output + VABufferID VACodedBuffer; -CLEAN_UP: + for(uint32_t i = 0; i 0) + if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){ + mCodedBuffer_Lock.unlock(); + LOG_E("Time out wait for Coded buffer.\n"); + return ENCODE_DEVICE_BUSY; + } + else {//Nonblock + mCodedBuffer_Lock.unlock(); + LOG_E("Coded buffer is not ready now.\n"); + return ENCODE_DEVICE_BUSY; + } } -#ifdef VIDEO_ENC_STATISTICS_ENABLE - struct timespec ts2; - clock_gettime(CLOCK_MONOTONIC, &ts2); - - uint32_t encode_time = (ts2.tv_sec - ts1.tv_sec) * 1000000 + (ts2.tv_nsec - ts1.tv_nsec) / 1000; - if (encode_time > mVideoStat.max_encode_time) { - mVideoStat.max_encode_time = encode_time; - mVideoStat.max_encode_frame = mFrameNum; - } + if(mVACodedBufferList.empty()){ + mCodedBuffer_Lock.unlock(); + return ENCODE_DEVICE_BUSY; + } + VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin()); + mVACodedBufferList.erase(mVACodedBufferList.begin()); + mCodedBuffer_Lock.unlock(); + + LOG_V("CodedBuffer ID 0x%08x\n", coded_buf); + + //All resources are ready, start to assemble EncodeTask + EncodeTask* task = new EncodeTask(); + + task->completed = false; + task->enc_surface = sid; + task->coded_buffer = coded_buf; + task->timestamp = inBuffer->timeStamp; + task->in_data = inBuffer->data; + + //Setup frame info, like flag ( SYNCFRAME), frame number, type etc + task->type = inBuffer->type; + task->flag = inBuffer->flag; + PrepareFrameInfo(task); + +#ifndef AUTO_REFERENCE + //Setup ref /rec frames + //TODO: B frame support, temporary use same logic + switch (inBuffer->type) { + case FTYPE_UNKNOWN: + case FTYPE_IDR: + case FTYPE_I: + case FTYPE_P: + { + if(!mFrameSkipped) { + VASurfaceID tmpSurface = mRecSurface; + mRecSurface = mRefSurface; + mRefSurface = tmpSurface; + } + + task->ref_surface[0] = mRefSurface; + task->ref_surface[1] = VA_INVALID_SURFACE; + task->rec_surface = mRecSurface; - if (encode_time < mVideoStat.min_encode_time) { - mVideoStat.min_encode_time = encode_time; - mVideoStat.min_encode_frame = mFrameNum; - } - - mVideoStat.average_encode_time += encode_time; + break; + } + case FTYPE_B: + default: + LOG_V("Something wrong, B frame may not be supported in this mode\n"); + ret = ENCODE_NOT_SUPPORTED; + goto CLEAN_UP; + } +#else + task->ref_surface[0] = VA_INVALID_SURFACE; + task->ref_surface[1] = VA_INVALID_SURFACE; + task->rec_surface = VA_INVALID_SURFACE; #endif - return status; -} - -Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) { - - Encode_Status ret = ENCODE_SUCCESS; - VAStatus vaStatus = VA_STATUS_SUCCESS; - uint8_t *buf = NULL; - - inBuffer->bufAvailable = false; - if (mNewHeader) mFrameNum = 0; - - // current we use one surface for source data, - // one for reference and one for reconstructed - decideFrameType(); - ret = manageSrcSurface(inBuffer); - CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); - - // Start encoding process - LOG_V( "vaBeginPicture\n"); - LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext); - LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurSurface); - LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay); + //======Start Encoding, add task to list====== + LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface); -#ifdef DUMP_SRC_DATA + vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface); + CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture"); - if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){ - - FILE *fp = fopen("/data/data/dump_encoder.yuv", "wb"); - VAImage image; - uint8_t *usrptr = NULL; - uint32_t stride = 0; - uint32_t frameSize = 0; - - vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &image); - CHECK_VA_STATUS_RETURN("vaDeriveImage"); - - LOG_V( "vaDeriveImage Done\n"); - - frameSize = image.data_size; - stride = image.pitches[0]; - - LOG_I("Source Surface/Image information --- start ---- :"); - LOG_I("surface = 0x%08x\n",(uint32_t)mCurFrame->surface); - LOG_I("image->pitches[0] = %d\n", image.pitches[0]); - LOG_I("image->pitches[1] = %d\n", image.pitches[1]); - LOG_I("image->offsets[0] = %d\n", image.offsets[0]); - LOG_I("image->offsets[1] = %d\n", image.offsets[1]); - LOG_I("image->num_planes = %d\n", image.num_planes); - LOG_I("image->width = %d\n", image.width); - LOG_I("image->height = %d\n", image.height); - LOG_I ("frameSize= %d\n", image.data_size); - LOG_I("Source Surface/Image information ----end ----"); - - vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) &usrptr); - CHECK_VA_STATUS_RETURN("vaMapBuffer"); - - fwrite(usrptr, frameSize, 1, fp); - fflush(fp); - fclose(fp); - - vaStatus = vaUnmapBuffer(mVADisplay, image.buf); - CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - - vaStatus = vaDestroyImage(mVADisplay, image.image_id); - CHECK_VA_STATUS_RETURN("vaDestroyImage"); - } -#endif - - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); - CHECK_VA_STATUS_RETURN("vaBeginPicture"); - - ret = sendEncodeCommand(); - CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + ret = sendEncodeCommand(task); + CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand"); vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); - - LOG_V( "vaEndPicture\n"); - - if (mFirstFrame) { - updateProperities(); - decideFrameType(); - } - - LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastSurface); - vaStatus = vaSyncSurface(mVADisplay, mLastSurface); - if (vaStatus != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaSyncSurface\n"); - } - - mOutCodedBuffer = mLastCodedBuffer; + CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture"); - // Need map buffer before calling query surface below to get - // the right skip frame flag for current frame - // It is a requirement of video driver - vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + LOG_V("Add Task %p into Encode Task list\n", task); + mEncodeTask_Lock.lock(); + mEncodeTaskList.push_back(task); + mEncodeTask_Cond.signal(); + mEncodeTask_Lock.unlock(); - if (mFirstFrame) { - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); - CHECK_VA_STATUS_RETURN("vaBeginPicture"); - - ret = sendEncodeCommand(); - CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); - - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); - - mKeyFrame = true; - } - - // Query the status of last surface to check if its next frame is skipped - VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); - CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); - - mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; + mFrameNum ++; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - if (mPicSkipped) - mVideoStat.skipped_frames ++; -#endif + LOG_V("encode return Success\n"); - mLastSurface = VA_INVALID_SURFACE; - updateProperities(); - mCurSurface = VA_INVALID_SURFACE; + return ENCODE_SUCCESS; - if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true; +CLEAN_UP: - LOG_V("ref the current inBuffer\n"); + delete task; + mCodedBuffer_Lock.lock(); + mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used + mCodedBuffer_Cond.signal(); + mCodedBuffer_Lock.unlock(); - mLastInputRawBuffer = inBuffer; - mFirstFrame = false; + LOG_V("encode return error=%x\n", ret); - return ENCODE_SUCCESS; + return ret; } -Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) { +/* + 1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list. + 2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes, + start output data + 3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure + on non-block/block with timeout modes. + 4. if complete all output data, curoutputtask should be set NULL +*/ +Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; - uint8_t *buf = NULL; - VASurfaceID tmpSurface = VA_INVALID_SURFACE; - - inBuffer->bufAvailable = false; - if (mNewHeader) mFrameNum = 0; - - // current we use one surface for source data, - // one for reference and one for reconstructed - decideFrameType(); - ret = manageSrcSurface(inBuffer); - CHECK_ENCODE_STATUS_RETURN("manageSrcSurface"); - - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface); - CHECK_VA_STATUS_RETURN("vaBeginPicture"); + bool useLocalBuffer = false; - ret = sendEncodeCommand(); - CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand"); + CHECK_NULL_RETURN_IFFAIL(outBuffer); - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS_RETURN("vaEndPicture"); + if (mCurOutputTask == NULL) { + mEncodeTask_Lock.lock(); + if(mEncodeTaskList.empty()) { + LOG_V("getOutput CurrentTask is NULL\n"); + if(timeout == FUNC_BLOCK) { + LOG_V("waiting for task....\n"); + mEncodeTask_Cond.wait(mEncodeTask_Lock); + } else if (timeout > 0) { + LOG_V("waiting for task in % ms....\n", timeout); + if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) { + mEncodeTask_Lock.unlock(); + LOG_E("Time out wait for encode task.\n"); + return ENCODE_DATA_NOT_READY; + } + } else {//Nonblock + mEncodeTask_Lock.unlock(); + return ENCODE_DATA_NOT_READY; + } + } - LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurSurface); - vaStatus = vaSyncSurface(mVADisplay, mCurSurface); - if (vaStatus != VA_STATUS_SUCCESS) { - LOG_W( "Failed vaSyncSurface\n"); + if(mEncodeTaskList.empty()){ + mEncodeTask_Lock.unlock(); + return ENCODE_DATA_NOT_READY; + } + mCurOutputTask = *(mEncodeTaskList.begin()); + mEncodeTaskList.erase(mEncodeTaskList.begin()); + mEncodeTask_Lock.unlock(); } - mOutCodedBuffer = mVACodedBuffer[mCodedBufIndex]; - - // Need map buffer before calling query surface below to get - // the right skip frame flag for current frame - // It is a requirement of video driver - vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - - mPicSkipped = false; - if (!mFirstFrame) { - // Query the status of last surface to check if its next frame is skipped + //sync/query/wait task if not completed + if (mCurOutputTask->completed == false) { + uint8_t *buf = NULL; VASurfaceStatus vaSurfaceStatus; - vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus); - CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); - mPicSkipped = vaSurfaceStatus & VASurfaceSkipped; - } - mLastSurface = mCurSurface; - mCurSurface = VA_INVALID_SURFACE; + if (timeout == FUNC_BLOCK) { + //block mode, direct sync surface to output data - mEncodedFrames ++; - mFrameNum ++; + LOG_I ("block mode, vaSyncSurface ID = 0x%08x\n", mCurOutputTask->enc_surface); + vaStatus = vaSyncSurface(mVADisplay, mCurOutputTask->enc_surface); + CHECK_VA_STATUS_GOTO_CLEANUP("vaSyncSurface"); - if (!mPicSkipped) { - tmpSurface = mRecSurface; - mRecSurface = mRefSurface; - mRefSurface = tmpSurface; - } + mOutCodedBuffer = mCurOutputTask->coded_buffer; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - if (mPicSkipped) - mVideoStat.skipped_frames ++; -#endif + // Check frame skip + // Need map buffer before calling query surface below to get the right skip frame flag for current frame + // It is a requirement of video driver + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - inBuffer->bufAvailable = true; - return ENCODE_SUCCESS; -} + vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); + CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); + mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped; -void VideoEncoderBase::setKeyFrame(int32_t keyFramePeriod) { + mCurOutputTask->completed = true; - // For first getOutput async mode, the mFrameNum already increased to 2, and of course is key frame - // frame 0 is already encoded and will be outputed here - // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call - if (!mComParams.syncEncMode) { - if (mFrameNum > 2) { - if (keyFramePeriod != 0 && - (((mFrameNum - 2) % keyFramePeriod) == 0)) { - mKeyFrame = true; - } else { - mKeyFrame = false; - } - } else if (mFrameNum == 2) { - mKeyFrame = true; - } - } else { - if (mFrameNum > 1) { - if (keyFramePeriod != 0 && - (((mFrameNum - 1) % keyFramePeriod) == 0)) { - mKeyFrame = true; - } else { - mKeyFrame = false; + } else { + //For both block with timeout and non-block mode, query surface, if ready, output data + LOG_I ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface); + + vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); + if (vaSurfaceStatus & VASurfaceReady) { + mOutCodedBuffer = mCurOutputTask->coded_buffer; + mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped; + mCurOutputTask->completed = true; + //if need to call SyncSurface again ? + + } else {//not ready yet + ret = ENCODE_DATA_NOT_READY; + goto CLEAN_UP; } - } else if (mFrameNum == 1) { - mKeyFrame = true; - } - } -} - -Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) { - Encode_Status ret = ENCODE_SUCCESS; - VAStatus vaStatus = VA_STATUS_SUCCESS; - bool useLocalBuffer = false; - - CHECK_NULL_RETURN_IFFAIL(outBuffer); - - LOG_V("Begin\n"); + } - if (outBuffer->format != OUTPUT_EVERYTHING && outBuffer->format != OUTPUT_FRAME_DATA) { - LOG_E("Output buffer mode not supported\n"); - goto CLEAN_UP; } - setKeyFrame(mComParams.intraPeriod); - + //start to output data ret = prepareForOutput(outBuffer, &useLocalBuffer); CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); - ret = outputAllData(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + //copy all flags to outBuffer + outBuffer->flag = mCurOutputTask->flag; + outBuffer->type = mCurOutputTask->type; + outBuffer->timeStamp = mCurOutputTask->timestamp; + + if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) { + ret = outputAllData(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); + }else { + ret = getExtFormatOutput(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput"); + } LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize); ret = cleanupForOutput(); CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput"); + LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped); + + return ENCODE_SUCCESS; + CLEAN_UP: - if (ret < ENCODE_SUCCESS) { - if (outBuffer->data && (useLocalBuffer == true)) { - delete[] outBuffer->data; - outBuffer->data = NULL; - useLocalBuffer = false; - } + if (outBuffer->data && (useLocalBuffer == true)) { + delete[] outBuffer->data; + outBuffer->data = NULL; + useLocalBuffer = false; + } - if (mCodedBufferMapped) { - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; - mCurSegment = NULL; - } + if (mCodedBufferMapped) { + vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + mCodedBufferMapped = false; + mCurSegment = NULL; } - LOG_V("End\n"); + delete mCurOutputTask; + mCurOutputTask = NULL; + mCodedBuffer_Lock.lock(); + mVACodedBufferList.push_back(mOutCodedBuffer); + mCodedBuffer_Cond.signal(); + mCodedBuffer_Lock.unlock(); + + LOG_V("getOutput return error=%x\n", ret); return ret; } - void VideoEncoderBase::flush() { LOG_V( "Begin\n"); // reset the properities - mEncodedFrames = 0; mFrameNum = 0; - mPicSkipped = false; - mIsIntra = true; LOG_V( "end\n"); } @@ -605,58 +520,68 @@ Encode_Status VideoEncoderBase::stop() { VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; - SurfaceMap *map = NULL; LOG_V( "Begin\n"); - if (mSurfaces) { - delete [] mSurfaces; - mSurfaces = NULL; - } - // It is possible that above pointers have been allocated - // before we set mInitialized to true - if (!mInitialized) { + // before we set mStarted to true + if (!mStarted) { LOG_V("Encoder has been stopped\n"); return ENCODE_SUCCESS; } - LOG_V( "vaDestroyContext\n"); - vaStatus = vaDestroyContext(mVADisplay, mVAContext); - CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); + mCodedBuffer_Lock.lock(); + mVACodedBufferList.clear(); + mCodedBuffer_Lock.unlock(); + mCodedBuffer_Cond.broadcast(); - LOG_V( "vaDestroyConfig\n"); - vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); - CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); + //Delete all uncompleted tasks + mEncodeTask_Lock.lock(); + while(! mEncodeTaskList.empty()) + { + delete *mEncodeTaskList.begin(); + mEncodeTaskList.erase(mEncodeTaskList.begin()); + } + mEncodeTask_Lock.unlock(); + mEncodeTask_Cond.broadcast(); - // Release Src Surface Buffer Map + //Release Src Surface Buffer Map, destroy surface manually since it is not added into context LOG_V( "Rlease Src Surface Map\n"); - - map = mSrcSurfaceMapList; - while(map) { - if (! map->added) { - //destroy surface by itself - LOG_V( "Rlease Src Surface Buffer not added into vaContext\n"); - vaDestroySurfaces(mVADisplay, &map->surface, 1); + while(! mSrcSurfaceMapList.empty()) + { + if (! (*mSrcSurfaceMapList.begin())->added) { + LOG_V( "Rlease the Src Surface Buffer not added into vaContext\n"); + vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface), 1); } - SurfaceMap *tmp = map; - map = map->next; - delete tmp; + delete (*mSrcSurfaceMapList.begin()); + mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin()); + } + + LOG_V( "vaDestroyContext\n"); + if (mVAContext != VA_INVALID_ID) { + vaStatus = vaDestroyContext(mVADisplay, mVAContext); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext"); + } + + LOG_V( "vaDestroyConfig\n"); + if (mVAConfig != VA_INVALID_ID) { + vaStatus = vaDestroyConfig(mVADisplay, mVAConfig); + CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig"); } CLEAN_UP: - mInitialized = false; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - LOG_V("Encoder Statistics:\n"); - LOG_V(" %d frames Encoded, %d frames Skipped\n", mEncodedFrames, mVideoStat.skipped_frames); - LOG_V(" Encode time: Average(%d us), Max(%d us @Frame No.%d), Min(%d us @Frame No.%d)\n", \ - mVideoStat.average_encode_time / mEncodedFrames, mVideoStat.max_encode_time, \ - mVideoStat.max_encode_frame, mVideoStat.min_encode_time, mVideoStat.min_encode_frame); + mStarted = false; + mSliceSizeOverflow = false; + mCurOutputTask= NULL; + mOutCodedBuffer = 0; + mCodedBufferMapped = false; + mCurSegment = NULL; + mOffsetInSeg =0; + mTotalSize = 0; + mTotalSizeCopied = 0; + mFrameSkipped = false; - memset(&mVideoStat, 0, sizeof(VideoStatistics)); - mVideoStat.min_encode_time = 0xFFFFFFFF; -#endif LOG_V( "end\n"); return ret; } @@ -721,6 +646,9 @@ Encode_Status VideoEncoderBase::prepareForOutput( outBuffer->flag = 0; if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW; + if (!mCurSegment) + return ENCODE_FAIL; + if (mCurSegment->size < mOffsetInSeg) { LOG_E("mCurSegment->size < mOffsetInSeg\n"); return ENCODE_FAIL; @@ -753,13 +681,23 @@ Encode_Status VideoEncoderBase::cleanupForOutput() { vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); mCodedBufferMapped = false; + mTotalSize = 0; + mOffsetInSeg = 0; + mTotalSizeCopied = 0; + + delete mCurOutputTask; + mCurOutputTask = NULL; + mCodedBuffer_Lock.lock(); + mVACodedBufferList.push_back(mOutCodedBuffer); + mCodedBuffer_Cond.signal(); + mCodedBuffer_Lock.unlock(); + + LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer); } return ENCODE_SUCCESS; } - -Encode_Status VideoEncoderBase::outputAllData( - VideoEncOutputBuffer *outBuffer) { +Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) { // Data size been copied for every single call uint32_t sizeCopiedHere = 0; @@ -794,7 +732,6 @@ Encode_Status VideoEncoderBase::outputAllData( outBuffer->dataSize = outBuffer->bufferSize; outBuffer->remainingSize = mTotalSize - mTotalSizeCopied; outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; return ENCODE_BUFFER_TOO_SMALL; } @@ -802,7 +739,6 @@ Encode_Status VideoEncoderBase::outputAllData( outBuffer->dataSize = sizeCopiedHere; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; - if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; mCurSegment = NULL; return ENCODE_SUCCESS; } @@ -838,6 +774,7 @@ void VideoEncoderBase::setDefaultParams() { mComParams.airParams.airAuto = 1; mComParams.disableDeblocking = 2; mComParams.syncEncMode = false; + mComParams.codedBufNum = 2; mHrdParam.bufferSize = 0; mHrdParam.initBufferFullness = 0; @@ -852,7 +789,7 @@ Encode_Status VideoEncoderBase::setParameters( CHECK_NULL_RETURN_IFFAIL(videoEncParams); LOG_I("Config type = %d\n", (int)videoEncParams->type); - if (mInitialized) { + if (mStarted) { LOG_E("Encoder has been initialized, should use setConfig to change configurations\n"); return ENCODE_ALREADY_INIT; } @@ -862,10 +799,11 @@ Encode_Status VideoEncoderBase::setParameters( VideoParamsCommon *paramsCommon = reinterpret_cast (videoEncParams); - if (paramsCommon->size != sizeof (VideoParamsCommon)) { return ENCODE_INVALID_PARAMS; } + if(paramsCommon->codedBufNum < 2) + paramsCommon->codedBufNum =2; mComParams = *paramsCommon; break; } @@ -1029,7 +967,7 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { // workaround #if 0 - if (!mInitialized) { + if (!mStarted) { LOG_E("Encoder has not initialized yet, can't call setConfig\n"); return ENCODE_NOT_INIT; } @@ -1200,51 +1138,29 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { return ret; } -void VideoEncoderBase:: decideFrameType () { - - LOG_I( "mEncodedFrames = %d\n", mEncodedFrames); - LOG_I( "mFrameNum = %d\n", mFrameNum); - LOG_I( "mIsIntra = %d\n", mIsIntra); - - // determine the picture type - if (mComParams.intraPeriod == 0) { - if (mFrameNum == 0) - mIsIntra = true; - else - mIsIntra = false; - } else if ((mFrameNum % mComParams.intraPeriod) == 0) { - mIsIntra = true; - } else { - mIsIntra = false; - } +void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) { + if (mNewHeader) mFrameNum = 0; + LOG_I( "mFrameNum = %d ", mFrameNum); - LOG_I( "mIsIntra = %d\n",mIsIntra); + updateFrameInfo(task) ; } +Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) { -void VideoEncoderBase:: updateProperities () { - - VASurfaceID tmp = VA_INVALID_SURFACE; - LOG_V( "Begin\n"); - - mEncodedFrames ++; - mFrameNum ++; - mLastCodedBuffer = mVACodedBuffer[mCodedBufIndex]; - mCodedBufIndex ++; - mCodedBufIndex %=2; + task->type = FTYPE_P; - mLastSurface = mCurSurface; + // determine the picture type + if (mFrameNum == 0) + task->type = FTYPE_I; + if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0)) + task->type = FTYPE_I; - if (!mPicSkipped) { - tmp = mRecSurface; - mRecSurface = mRefSurface; - mRefSurface = tmp; - } + if (task->type == FTYPE_I) + task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; - LOG_V( "End\n"); + return ENCODE_SUCCESS; } - Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { uint32_t size = mComParams.resolution.width * mComParams.resolution.height; @@ -1282,25 +1198,6 @@ Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderBase::getStatistics (VideoStatistics *videoStat) { - -#ifdef VIDEO_ENC_STATISTICS_ENABLE - if (videoStat != NULL) { - videoStat->total_frames = mEncodedFrames; - videoStat->skipped_frames = mVideoStat.skipped_frames; - videoStat->average_encode_time = mVideoStat.average_encode_time / mEncodedFrames; - videoStat->max_encode_time = mVideoStat.max_encode_time; - videoStat->max_encode_frame = mVideoStat.max_encode_frame; - videoStat->min_encode_time = mVideoStat.min_encode_time; - videoStat->min_encode_frame = mVideoStat.min_encode_frame; - } - - return ENCODE_SUCCESS; -#else - return ENCODE_NOT_SUPPORTED; -#endif -} - Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) { @@ -1317,7 +1214,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( LOG_V( "Begin\n"); // If encode session has been configured, we can not request surface creation anymore - if (mInitialized) { + if (mStarted) { LOG_E( "Already Initialized, can not request VA surface anymore\n"); return ENCODE_WRONG_STATE; } @@ -1387,9 +1284,8 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( map->vinfo.format = VA_FOURCC_NV12; map->vinfo.s3dformat = 0xffffffff; map->added = false; - map->next = NULL; - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); LOG_I( "surface = 0x%08x\n",(uint32_t)surface); LOG_I("image->pitches[0] = %d\n", image.pitches[0]); @@ -1436,7 +1332,7 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS } for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) { - if (findSurfaceMapByValue(mSrcSurfaceMapList, upStreamBuffer->bufList[i]) != NULL) //already mapped + if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL) //already mapped continue; //wrap upstream buffer into vaSurface @@ -1456,18 +1352,12 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS } map->vinfo.s3dformat = 0xFFFFFFFF; map->added = false; - map->next = NULL; status = surfaceMapping(map); if (status == ENCODE_SUCCESS) - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); else delete map; - - if (mSrcSurfaceMapList == NULL) { - LOG_E ("mSrcSurfaceMapList should not be NULL now, maybe meet mapping error\n"); - return ENCODE_NO_MEMORY; - } } return status; @@ -1493,7 +1383,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { VASurfaceAttributeTPI vaSurfaceAttrib; uint32_t buf; - + vaSurfaceAttrib.buffers = &buf; vaStatus = vaLockSurface( @@ -1536,7 +1426,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { LOG_I("Surface ID created from Kbuf = 0x%08x", surface); map->surface = surface; - + return ret; } @@ -1608,12 +1498,12 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { uint32_t lumaOffset = 0; uint32_t chromaUOffset = map->vinfo.height * map->vinfo.lumaStride; uint32_t chromaVOffset = chromaUOffset + 1; - + VASurfaceAttributeTPI vaSurfaceAttrib; uint32_t buf; vaSurfaceAttrib.buffers = &buf; - + vaSurfaceAttrib.count = 1; vaSurfaceAttrib.size = map->vinfo.lumaStride * map->vinfo.height * 3 / 2; vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; @@ -1635,7 +1525,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { LOG_I("Surface ID created from Kbuf = 0x%08x", map->value); map->surface = surface; - + return ret; } @@ -1667,7 +1557,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) { CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); map->surface = surface; - + return ret; } @@ -1745,9 +1635,9 @@ LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, va return status; } -Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { +Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) { - Encode_Status ret = ENCODE_SUCCESS; + Encode_Status ret = ENCODE_SUCCESS; MetadataBufferType type; int32_t value; ValueInfo vinfo; @@ -1757,13 +1647,13 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { IntelMetadataBuffer imb; SurfaceMap *map = NULL; - - if (mStoreMetaDataInBuffers.isEnabled) { + + if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) { //fail to parse buffer - return ENCODE_NO_REQUEST_DATA; + return ENCODE_NO_REQUEST_DATA; } imb.GetType(type); @@ -1772,20 +1662,21 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { //raw mode LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); if (! inBuffer->data || inBuffer->size == 0) { - return ENCODE_NULL_PTR; + return ENCODE_NULL_PTR; } type = MetadataBufferTypeUser; value = (int32_t)inBuffer->data; } - + + //find if mapped - map = findSurfaceMapByValue(mSrcSurfaceMapList, value); + map = (SurfaceMap*) findSurfaceMapByValue(value); - if (map) { + if (map) { //has mapped, get surfaceID directly LOG_I("direct find surface %d from value %x\n", map->surface, value); - mCurSurface = map->surface; + *sid = map->surface; return ret; } @@ -1793,8 +1684,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { //if no found from list, then try to map value with parameters LOG_I("not find surface from cache with value %x, start mapping if enough information\n", value); - if (mStoreMetaDataInBuffers.isEnabled) { - + if (mStoreMetaDataInBuffers.isEnabled) { + //if type is MetadataBufferTypeGrallocSource, use default parameters if (type == MetadataBufferTypeGrallocSource) { vinfo.mode = MEM_MODE_GFXHANDLE; @@ -1806,15 +1697,15 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { vinfo.chromStride = mComParams.resolution.width; vinfo.format = VA_FOURCC_NV12; vinfo.s3dformat = 0xFFFFFFFF; - } else { + } else { //get all info mapping needs imb.GetValueInfo(pvinfo); imb.GetExtraValues(extravalues, extravalues_count); } - + } else { - //raw mode + //raw mode vinfo.mode = MEM_MODE_MALLOC; vinfo.handle = 0; vinfo.size = inBuffer->size; @@ -1836,26 +1727,25 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { map->value = value; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); map->added = false; - map->next = NULL; ret = surfaceMapping(map); if (ret == ENCODE_SUCCESS) { LOG_I("surface mapping success, map value %x into surface %d\n", value, map->surface); - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); } else { delete map; LOG_E("surface mapping failed, wrong info or meet serious error\n"); return ret; - } + } - mCurSurface = map->surface; + *sid = map->surface; } else { //can't map due to no info LOG_E("surface mapping failed, missing information\n"); return ENCODE_NO_REQUEST_DATA; } - + if (extravalues) { //map more using same ValueInfo for(unsigned int i=0; ivalue = extravalues[i]; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); map->added = false; - map->next = NULL; ret = surfaceMapping(map); if (ret == ENCODE_SUCCESS) { LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->surface); - mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map); + mSrcSurfaceMapList.push_back(map); } else { delete map; map = NULL; @@ -1877,67 +1766,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) { } } } - - return ret; -} - -SurfaceMap *VideoEncoderBase::appendSurfaceMap( - SurfaceMap *head, SurfaceMap *map) { - - if (head == NULL) { - return map; - } - - SurfaceMap *node = head; - SurfaceMap *tail = NULL; - - while (node != NULL) { - tail = node; - node = node->next; - } - tail->next = map; - - return head; -} - -SurfaceMap *VideoEncoderBase::removeSurfaceMap( - SurfaceMap *head, SurfaceMap *map) { - - SurfaceMap *node = head; - SurfaceMap *tmpNode = NULL; - - if (head == map) { - tmpNode = head->next; - map->next = NULL; - return tmpNode; - } - - while (node != NULL) { - if (node->next == map) - break; - node = node->next; - } - if (node != NULL) { - node->next = map->next; - } - - map->next = NULL; - return head; -} - -SurfaceMap *VideoEncoderBase::findSurfaceMapByValue( - SurfaceMap *head, int32_t value) { - - SurfaceMap *node = head; - - while (node != NULL) { - if (node->value == value) - break; - node = node->next; - } - - return node; + return ret; } Encode_Status VideoEncoderBase::renderDynamicBitrate() { @@ -2063,3 +1893,17 @@ Encode_Status VideoEncoderBase::renderHrd() { return ENCODE_SUCCESS; } + +SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { + android::List::iterator node; + + for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++) + { + if ((*node)->value == value) + return *node; + else + continue; + } + + return NULL; +} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 9ab7bc6..924c4da 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -14,15 +14,30 @@ #include "VideoEncoderDef.h" #include "VideoEncoderInterface.h" #include "IntelMetadataBuffer.h" +#include +#include +//#define AUTO_REFERENCE struct SurfaceMap { VASurfaceID surface; MetadataBufferType type; int32_t value; ValueInfo vinfo; - uint32_t index; bool added; - SurfaceMap *next; +}; + +struct EncodeTask { + VASurfaceID enc_surface; + VASurfaceID ref_surface[2]; + VASurfaceID rec_surface; + VABufferID coded_buffer; + + FrameType type; + int flag; + int64_t timestamp; //corresponding input frame timestamp + uint8_t *in_data; //input buffer data + + bool completed; //if encode task is done complet by HW }; class VideoEncoderBase : IVideoEncoder { @@ -34,7 +49,7 @@ public: virtual Encode_Status start(void); virtual void flush(void); virtual Encode_Status stop(void); - virtual Encode_Status encode(VideoEncRawBuffer *inBuffer); + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout); /* * getOutput can be called several time for a frame (such as first time codec data, and second time others) @@ -42,30 +57,26 @@ public: * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL * and caller should provide a big enough buffer and call again */ - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout); virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams); virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig); virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig); - virtual Encode_Status getMaxOutSize(uint32_t *maxSize); - virtual Encode_Status getStatistics(VideoStatistics *videoStat); protected: - virtual Encode_Status sendEncodeCommand(void) = 0; + virtual Encode_Status sendEncodeCommand(EncodeTask* task) = 0; virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0; + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) = 0; + virtual Encode_Status updateFrameInfo(EncodeTask* task) ; - Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); - Encode_Status cleanupForOutput(); - Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); Encode_Status renderDynamicFrameRate(); Encode_Status renderDynamicBitrate(); Encode_Status renderHrd(); - void setKeyFrame(int32_t keyFramePeriod); private: void setDefaultParams(void); @@ -78,41 +89,29 @@ private: Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map); Encode_Status surfaceMappingForMalloc(SurfaceMap *map); Encode_Status surfaceMapping(SurfaceMap *map); + SurfaceMap *findSurfaceMapByValue(int32_t value); + Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid); + void PrepareFrameInfo(EncodeTask* task); - SurfaceMap *appendSurfaceMap( - SurfaceMap *head, SurfaceMap *map); - SurfaceMap *removeSurfaceMap( - SurfaceMap *head, SurfaceMap *map); - SurfaceMap *findSurfaceMapByValue( - SurfaceMap *head, int32_t value); - - Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer); - void updateProperities(void); - void decideFrameType(void); -// Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer); - Encode_Status syncEncode(VideoEncRawBuffer *inBuffer); - Encode_Status asyncEncode(VideoEncRawBuffer *inBuffer); + Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); + Encode_Status cleanupForOutput(); + Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); protected: bool mInitialized; + bool mStarted; VADisplay mVADisplay; VAContextID mVAContext; VAConfigID mVAConfig; VAEntrypoint mVAEntrypoint; - VACodedBufferSegment *mCurSegment; - uint32_t mOffsetInSeg; - uint32_t mTotalSize; - uint32_t mTotalSizeCopied; VideoParamsCommon mComParams; VideoParamsHRD mHrdParam; VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers; - bool mForceKeyFrame; bool mNewHeader; - bool mFirstFrame; bool mRenderMaxSliceSize; //Max Slice Size bool mRenderQP; @@ -121,50 +120,36 @@ protected: bool mRenderBitRate; bool mRenderHrd; - VABufferID mVACodedBuffer[2]; - VABufferID mLastCodedBuffer; - VABufferID mOutCodedBuffer; VABufferID mSeqParamBuf; VABufferID mRcParamBuf; VABufferID mFrameRateParamBuf; VABufferID mPicParamBuf; VABufferID mSliceParamBuf; - VASurfaceID *mSurfaces; - uint32_t mSurfaceCnt; - - SurfaceMap *mSrcSurfaceMapList; - - //for new design - VASurfaceID mCurSurface; //current input surface to be encoded - VASurfaceID mRefSurface; //reference surface - VASurfaceID mRecSurface; //reconstructed surface - VASurfaceID mLastSurface; //last surface + android::List mSrcSurfaceMapList; //all mapped surface info list from input buffer + android::List mEncodeTaskList; //all encode tasks list + android::List mVACodedBufferList; //all available codedbuffer list - VideoEncRawBuffer *mLastInputRawBuffer; - - uint32_t mEncodedFrames; + VASurfaceID mRefSurface; //reference surface, only used in base + VASurfaceID mRecSurface; //reconstructed surface, only used in base uint32_t mFrameNum; uint32_t mCodedBufSize; - uint32_t mCodedBufIndex; - bool mPicSkipped; - bool mIsIntra; bool mSliceSizeOverflow; - bool mCodedBufferMapped; - bool mDataCopiedOut; - bool mKeyFrame; - int32_t mInitCheck; + //Current Outputting task + EncodeTask *mCurOutputTask; -#ifdef VIDEO_ENC_STATISTICS_ENABLE - VideoStatistics mVideoStat; -#endif + //Current outputting CodedBuffer status + VABufferID mOutCodedBuffer; + bool mCodedBufferMapped; + VACodedBufferSegment *mCurSegment; + uint32_t mOffsetInSeg; + uint32_t mTotalSize; + uint32_t mTotalSizeCopied; + android::Mutex mCodedBuffer_Lock, mEncodeTask_Lock; + android::Condition mCodedBuffer_Cond, mEncodeTask_Cond; - // Constants - static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2; - static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8; + bool mFrameSkipped; }; - - #endif /* __VIDEO_ENCODER_BASE_H__ */ diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index b9feca2..f5174aa 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -32,7 +32,9 @@ enum { ENCODE_SUCCESS = 0, ENCODE_ALREADY_INIT = 1, ENCODE_SLICESIZE_OVERFLOW = 2, - ENCODE_BUFFER_TOO_SMALL = 3 // The buffer passed to encode is too small to contain encoded data + ENCODE_BUFFER_TOO_SMALL = 3, // The buffer passed to encode is too small to contain encoded data + ENCODE_DEVICE_BUSY = 4, + ENCODE_DATA_NOT_READY = 5, }; typedef enum { @@ -42,6 +44,7 @@ typedef enum { OUTPUT_ONE_NAL = 4, OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8, OUTPUT_LENGTH_PREFIXED = 16, + OUTPUT_CODEDBUFFER = 32, OUTPUT_BUFFER_LAST } VideoOutputFormat; @@ -102,6 +105,23 @@ enum VideoBufferSharingMode { BUFFER_LAST }; +typedef enum { + FTYPE_UNKNOWN = 0, // Unknown + FTYPE_I = 1, // General I-frame type + FTYPE_P = 2, // General P-frame type + FTYPE_B = 3, // General B-frame type + FTYPE_SI = 4, // H.263 SI-frame type + FTYPE_SP = 5, // H.263 SP-frame type + FTYPE_EI = 6, // H.264 EI-frame type + FTYPE_EP = 7, // H.264 EP-frame type + FTYPE_S = 8, // MPEG-4 S-frame type + FTYPE_IDR = 9, // IDR-frame type +}FrameType; + +//function call mode +#define FUNC_BLOCK 0xFFFFFFFF +#define FUNC_NONBLOCK 0 + // Output buffer flag #define ENCODE_BUFFERFLAG_ENDOFFRAME 0x00000001 #define ENCODE_BUFFERFLAG_PARTIALFRAME 0x00000002 @@ -110,6 +130,8 @@ enum VideoBufferSharingMode { #define ENCODE_BUFFERFLAG_DATACORRUPT 0x00000010 #define ENCODE_BUFFERFLAG_DATAINVALID 0x00000020 #define ENCODE_BUFFERFLAG_SLICEOVERFOLOW 0x00000040 +#define ENCODE_BUFFERFLAG_ENDOFSTREAM 0x00000080 +#define ENCODE_BUFFERFLAG_NSTOPFRAME 0x00000100 typedef struct { uint8_t *data; @@ -118,14 +140,18 @@ typedef struct { uint32_t remainingSize; int flag; //Key frame, Codec Data etc VideoOutputFormat format; //output format - uint64_t timeStamp; //reserved + int64_t timeStamp; //reserved + FrameType type; + uint8_t *in_data; //indicate corresponding input data } VideoEncOutputBuffer; typedef struct { uint8_t *data; uint32_t size; bool bufAvailable; //To indicate whether this buffer can be reused - uint64_t timeStamp; //reserved + int64_t timeStamp; //reserved + FrameType type; //frame type expected to be encoded + int flag; // flag to indicate buffer property } VideoEncRawBuffer; struct VideoEncSurfaceBuffer { @@ -304,6 +330,8 @@ struct VideoParamsCommon : VideoParamConfigSet { AirParams airParams; uint32_t disableDeblocking; bool syncEncMode; + //CodedBuffer properties + uint32_t codedBufNum; VideoParamsCommon() { type = VideoParamsTypeCommon; @@ -327,6 +355,7 @@ struct VideoParamsCommon : VideoParamConfigSet { this->airParams = other.airParams; this->disableDeblocking = other.disableDeblocking; this->syncEncMode = other.syncEncMode; + this->codedBufNum = other.codedBufNum; return *this; } }; @@ -336,10 +365,23 @@ struct VideoParamsAVC : VideoParamConfigSet { uint8_t VUIFlag; int32_t maxSliceSize; uint32_t idrInterval; + uint32_t ipPeriod; + uint32_t refFrames; SliceNum sliceNum; AVCDelimiterType delimiterType; Cropping crop; SamplingAspectRatio SAR; + uint32_t refIdx10ActiveMinus1; + uint32_t refIdx11ActiveMinus1; + bool bFrameMBsOnly; + bool bMBAFF; + bool bEntropyCodingCABAC; + bool bWeightedPPrediction; + uint32_t weightedBipredicitonMode; + bool bConstIpred ; + bool bDirect8x8Inference; + bool bDirectSpatialTemporal; + uint32_t cabacInitIdc; VideoParamsAVC() { type = VideoParamsTypeAVC; @@ -354,6 +396,8 @@ struct VideoParamsAVC : VideoParamConfigSet { this->VUIFlag = other.VUIFlag; this->maxSliceSize = other.maxSliceSize; this->idrInterval = other.idrInterval; + this->ipPeriod = other.ipPeriod; + this->refFrames = other.refFrames; this->sliceNum = other.sliceNum; this->delimiterType = other.delimiterType; this->crop.LeftOffset = other.crop.LeftOffset; @@ -363,6 +407,17 @@ struct VideoParamsAVC : VideoParamConfigSet { this->SAR.SarWidth = other.SAR.SarWidth; this->SAR.SarHeight = other.SAR.SarHeight; + this->refIdx10ActiveMinus1 = other.refIdx10ActiveMinus1; + this->refIdx11ActiveMinus1 = other.refIdx11ActiveMinus1; + this->bFrameMBsOnly = other.bFrameMBsOnly; + this->bMBAFF = other.bMBAFF; + this->bEntropyCodingCABAC = other.bEntropyCodingCABAC; + this->bWeightedPPrediction = other.bWeightedPPrediction; + this->weightedBipredicitonMode = other.weightedBipredicitonMode; + this->bConstIpred = other.bConstIpred; + this->bDirect8x8Inference = other.bDirect8x8Inference; + this->bDirectSpatialTemporal = other.bDirectSpatialTemporal; + this->cabacInitIdc = other.cabacInitIdc; return *this; } }; @@ -450,6 +505,7 @@ struct VideoConfigAVCIntraPeriod : VideoParamConfigSet { uint32_t idrInterval; //How many Intra frame will have a IDR frame uint32_t intraPeriod; + uint32_t ipPeriod; }; struct VideoConfigNALSize : VideoParamConfigSet { @@ -512,14 +568,4 @@ struct VideoConfigSliceNum : VideoParamConfigSet { SliceNum sliceNum; }; -typedef struct { - uint32_t total_frames; - uint32_t skipped_frames; - uint32_t average_encode_time; - uint32_t max_encode_time; - uint32_t max_encode_frame; - uint32_t min_encode_time; - uint32_t min_encode_frame; -}VideoStatistics; - #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index 7371d7a..b9b9c99 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -16,20 +16,20 @@ VideoEncoderH263::VideoEncoderH263() { mComParams.profile = (VAProfile)PROFILE_H263BASELINE; } -Encode_Status VideoEncoderH263::sendEncodeCommand(void) { +Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); if (mFrameNum == 0) { - ret = renderSequenceParams(); + ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } - ret = renderPictureParams(); + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(); + ret = renderSliceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); LOG_V( "End\n"); @@ -37,7 +37,7 @@ Encode_Status VideoEncoderH263::sendEncodeCommand(void) { } -Encode_Status VideoEncoderH263::renderSequenceParams() { +Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferH263 h263SequenceParam = {}; @@ -78,7 +78,7 @@ Encode_Status VideoEncoderH263::renderSequenceParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderH263::renderPictureParams() { +Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferH263 h263PictureParams = {}; @@ -86,18 +86,18 @@ Encode_Status VideoEncoderH263::renderPictureParams() { LOG_V( "Begin\n\n"); // set picture params for HW - h263PictureParams.reference_picture = mRefSurface; - h263PictureParams.reconstructed_picture = mRecSurface; - h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex]; + h263PictureParams.reference_picture = task->ref_surface[0]; + h263PictureParams.reconstructed_picture = task->rec_surface; + h263PictureParams.coded_buf = task->coded_buffer; h263PictureParams.picture_width = mComParams.resolution.width; h263PictureParams.picture_height = mComParams.resolution.height; - h263PictureParams.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + h263PictureParams.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; LOG_V("======h263 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture); LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture); LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf); - LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); +// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "picture_width = %d\n", h263PictureParams.picture_width); LOG_I( "picture_height = %d\n",h263PictureParams.picture_height); LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type); @@ -117,7 +117,7 @@ Encode_Status VideoEncoderH263::renderPictureParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderH263::renderSliceParams() { +Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceHeight; @@ -145,7 +145,7 @@ Encode_Status VideoEncoderH263::renderSliceParams() { sliceParams->start_row_number = 0; // slice height measured in MB sliceParams->slice_height = sliceHeightInMB; - sliceParams->slice_flags.bits.is_intra = mIsIntra; + sliceParams->slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0; LOG_V("======h263 slice params======\n"); diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h index 2113e2f..a8578dd 100644 --- a/videoencoder/VideoEncoderH263.h +++ b/videoencoder/VideoEncoderH263.h @@ -20,7 +20,7 @@ public: virtual ~VideoEncoderH263() {}; protected: - virtual Encode_Status sendEncodeCommand(void); + virtual Encode_Status sendEncodeCommand(EncodeTask *task); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { return ENCODE_SUCCESS; } @@ -33,12 +33,16 @@ protected: virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_SUCCESS; } + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { + return ENCODE_NOT_SUPPORTED; + } + //virtual Encode_Status updateFrameInfo(EncodeTask* task); // Local Methods private: - Encode_Status renderSequenceParams(); - Encode_Status renderPictureParams(); - Encode_Status renderSliceParams(); + Encode_Status renderSequenceParams(EncodeTask *task); + Encode_Status renderPictureParams(EncodeTask *task); + Encode_Status renderSliceParams(EncodeTask *task); }; #endif /* __VIDEO_ENCODER_H263_H__ */ diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h index 243e4a1..da1c6ec 100644 --- a/videoencoder/VideoEncoderInterface.h +++ b/videoencoder/VideoEncoderInterface.h @@ -17,14 +17,13 @@ public: virtual Encode_Status start(void) = 0; virtual Encode_Status stop(void) = 0; virtual void flush(void) = 0; - virtual Encode_Status encode(VideoEncRawBuffer *inBuffer) = 0; - virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer) = 0; + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout = FUNC_BLOCK) = 0; + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout = FUNC_BLOCK) = 0; virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0; virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0; virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0; - virtual Encode_Status getStatistics(VideoStatistics *videoStat) = 0; }; #endif /* VIDEO_ENCODER_INTERFACE_H_ */ diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h index 49c34df..3b8910d 100644 --- a/videoencoder/VideoEncoderLog.h +++ b/videoencoder/VideoEncoderLog.h @@ -23,7 +23,8 @@ __android_log_print(level, comp, "%s():%d: "format, \ __FUNCTION__, __LINE__, ##__VA_ARGS__) -#if 1 +//#define VIDEO_ENC_LOG_ENABLE +#if 1 #ifdef VIDEO_ENC_LOG_ENABLE #define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) #define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__) diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index 6e0263b..8afb215 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -85,30 +85,14 @@ Encode_Status VideoEncoderMP4::outputConfigData( return ret; } - -Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) { +Encode_Status VideoEncoderMP4::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { Encode_Status ret = ENCODE_SUCCESS; - VAStatus vaStatus = VA_STATUS_SUCCESS; - bool useLocalBuffer = false; LOG_V("Begin\n"); CHECK_NULL_RETURN_IFFAIL(outBuffer); - setKeyFrame(mComParams.intraPeriod); - - // prepare for output, map the coded buffer - ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer); - CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); - switch (outBuffer->format) { - case OUTPUT_EVERYTHING: - case OUTPUT_FRAME_DATA: { - // Output whatever we have - ret = VideoEncoderBase::outputAllData(outBuffer); - CHECK_ENCODE_STATUS_CLEANUP("outputAllData"); - break; - } case OUTPUT_CODEC_DATA: { // Output the codec config data ret = outputConfigData(outBuffer); @@ -123,32 +107,14 @@ Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) { LOG_I("out size is = %d\n", outBuffer->dataSize); - // cleanup, unmap the coded buffer if all - // data has been copied out - ret = VideoEncoderBase::cleanupForOutput(); CLEAN_UP: - if (ret < ENCODE_SUCCESS) { - if (outBuffer->data && (useLocalBuffer == true)) { - delete[] outBuffer->data; - outBuffer->data = NULL; - useLocalBuffer = false; - } - - // error happens, unmap the buffer - if (mCodedBufferMapped) { - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; - mCurSegment = NULL; - } - } LOG_V("End\n"); return ret; } - -Encode_Status VideoEncoderMP4::renderSequenceParams() { +Encode_Status VideoEncoderMP4::renderSequenceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferMPEG4 mp4SequenceParams = {}; @@ -202,26 +168,26 @@ Encode_Status VideoEncoderMP4::renderSequenceParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderMP4::renderPictureParams() { +Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferMPEG4 mpeg4_pic_param = {}; LOG_V( "Begin\n\n"); // set picture params for HW - mpeg4_pic_param.reference_picture = mRefSurface; - mpeg4_pic_param.reconstructed_picture = mRecSurface; - mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex]; + mpeg4_pic_param.reference_picture = task->ref_surface[0]; + mpeg4_pic_param.reconstructed_picture = task->rec_surface; + mpeg4_pic_param.coded_buf = task->coded_buffer; mpeg4_pic_param.picture_width = mComParams.resolution.width; mpeg4_pic_param.picture_height = mComParams.resolution.height; mpeg4_pic_param.vop_time_increment= mFrameNum; - mpeg4_pic_param.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; + mpeg4_pic_param.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive; LOG_V("======mpeg4 picture params======\n"); LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture); LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture); LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf); - LOG_I("coded_buf_index = %d\n", mCodedBufIndex); +// LOG_I("coded_buf_index = %d\n", mCodedBufIndex); LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width); LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height); LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment); @@ -242,7 +208,7 @@ Encode_Status VideoEncoderMP4::renderPictureParams() { } -Encode_Status VideoEncoderMP4::renderSliceParams() { +Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; uint32_t sliceHeight; @@ -259,7 +225,7 @@ Encode_Status VideoEncoderMP4::renderSliceParams() { sliceParams.start_row_number = 0; sliceParams.slice_height = sliceHeightInMB; - sliceParams.slice_flags.bits.is_intra = mIsIntra; + sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0; LOG_V("======mpeg4 slice params======\n"); @@ -282,19 +248,19 @@ Encode_Status VideoEncoderMP4::renderSliceParams() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderMP4::sendEncodeCommand(void) { +Encode_Status VideoEncoderMP4::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); if (mFrameNum == 0) { - ret = renderSequenceParams(); + ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } - ret = renderPictureParams(); + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(); + ret = renderSliceParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); LOG_V( "End\n"); diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h index b453023..7e579c0 100644 --- a/videoencoder/VideoEncoderMP4.h +++ b/videoencoder/VideoEncoderMP4.h @@ -19,11 +19,10 @@ public: VideoEncoderMP4(); virtual ~VideoEncoderMP4() {}; - Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); +// Encode_Status getOutput(VideoEncOutputBuffer *outBuffer); protected: - virtual Encode_Status sendEncodeCommand(void); - + virtual Encode_Status sendEncodeCommand(EncodeTask *task); virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) { return ENCODE_SUCCESS; } @@ -36,13 +35,16 @@ protected: virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) { return ENCODE_SUCCESS; } + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer); + //virtual Encode_Status updateFrameInfo(EncodeTask* task); + // Local Methods private: Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize); Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer); - Encode_Status renderSequenceParams(); - Encode_Status renderPictureParams(); - Encode_Status renderSliceParams(); + Encode_Status renderSequenceParams(EncodeTask *task); + Encode_Status renderPictureParams(EncodeTask *task); + Encode_Status renderSliceParams(EncodeTask *task); unsigned char mProfileLevelIndication; uint32_t mFixedVOPTimeIncrement; -- cgit v1.2.3 From 3d51be5e6edadd2a4973c94136f8c1ddacb7be12 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 18 Jan 2013 06:04:02 +0800 Subject: To fix one xvid clip "can't play" problem BZ: 80655 The clip itself has some problems. There is no N-VOP following a {P|B} packed frame. When libmix encounters such a situation, it will flush all the surfaces. This will cause the new frame can't obtain free surfaces. In this patch, it will try to decode the new I or P frame instead of flushing the surfaces. Change-Id: If809b8b85503c84b6f84b1fb7f4293d15e023c12 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/87334 Reviewed-by: Wang, Yi A Reviewed-by: Chen, Tianmi Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderMPEG4.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index fcfcb2d..e975415 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -181,9 +181,6 @@ Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { // for example: {PB} B N P B B P... if (picData->vop_coded == 1 && codingType != MP4_VOP_TYPE_B) { WTRACE("Invalid coding type while waiting for n-vop for packed frame."); - // timestamp of P frame in the queue is not correct. - // TODO: handle timestamp - flush(); mExpectingNVOP = false; } } -- cgit v1.2.3 From eb88da2a0dd32b2b2fbe739b021ad923bc4c3486 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 14 Jan 2013 21:13:08 +0800 Subject: libmix: fix klocwork issue in videoDecoderMPEG4 BZ: 73428 fix klocwork issue in VideoDecoderMPEG4, move check to decodeFrame. Signed-off-by: ywan171 Change-Id: I927bd2931291e719c6bde19ac21c872a1a4e5393 Reviewed-on: http://android.intel.com:8080/86544 Reviewed-by: Wang, Yi A Reviewed-by: Feng, Wei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderMPEG4.cpp | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index e975415..7bd9f22 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -86,14 +86,6 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { (void**)&data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); - // When the MPEG4 parser gets the invaild parameters, add the check - // and return error to OMX to avoid mediaserver crash. - if (data && data->picture_data && - (data->picture_data->picture_param.vop_width == 0 - || data->picture_data->picture_param.vop_height == 0)) { - return DECODE_FAIL; - } - if (!mVAStarted) { status = startVA(data); CHECK_STATUS("startVA"); @@ -126,6 +118,13 @@ Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data return DECODE_SUCCESS; } + // When the MPEG4 parser gets the invaild parameters, add the check + // and return error to OMX to avoid mediaserver crash. + if (data->picture_data && (data->picture_data->picture_param.vop_width == 0 + || data->picture_data->picture_param.vop_height == 0)) { + return DECODE_FAIL; + } + uint64_t lastPTS = mCurrentPTS; mCurrentPTS = buffer->timeStamp; -- cgit v1.2.3 From 4cd032a92bb7e921325f09b657056eae71a3d6bb Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 15 Jan 2013 14:13:31 +0800 Subject: libmix: fix Klocwork issue in parser of h264,vc1,mpeg4 BZ: 73428 fix Klockwork issue in parser Change-Id: Ib144a599b06b1389c99c3b11267c3fb4b4c47a50 Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/86674 Reviewed-by: Feng, Wei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c | 8 +++++--- mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c | 2 ++ mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 7 +++++++ 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index cb81d27..010e77b 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -2426,7 +2426,7 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) int32_t temp_frame_num = 0; int32_t idx, prev_idc; int32_t prev_frame_num_plus1_wrap; - uint32_t temp; + uint32_t temp = 0; int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); seq_param_set_used_ptr active_sps = &pInfo->active_SPS; h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; @@ -2467,9 +2467,11 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) */ else if (pInfo->img.frame_num != pInfo->img.PreviousFrameNum) { - if (MaxFrameNum) + if (MaxFrameNum) { ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); - + } else { + temp = (uint32_t)pInfo->img.PreviousFrameNum + 1; + } prev_frame_num_plus1_wrap = temp; if (pInfo->img.frame_num != prev_frame_num_plus1_wrap) { diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c index b39f4ad..4996e28 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c +++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c @@ -454,6 +454,8 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, VC1_GET_BITS(1, tempValue); bpp->invert = (uint8_t) tempValue; + bpp->imode = -1; + if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode,VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK) { return status; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index abe9f82..859e817 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -476,6 +476,10 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) { picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1); query_data->picture_data = picture_data; + if (picture_data == NULL) { + query_data->number_picture_data = 0; + return; + } } query_data->number_picture_data = 1; } @@ -491,6 +495,9 @@ void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) if (picture_data->next_picture_data == NULL) { picture_data->next_picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1); + if (picture_data->next_picture_data == NULL) { + return; + } } query_data->number_picture_data++; -- cgit v1.2.3 From c5c62f0eb5153dc559441278d793109af5588557 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Mon, 21 Jan 2013 15:53:19 +0800 Subject: mix-vbp: A fix to parse zig-zag-scanned scaling table from H.264 bitstream BZ: 80567 The root cause is that scaling tables written into H264 bitstream follow zigzag scanning while mix-vbp doesn't correct scaling table index while parsing it. Change-Id: I7b78b4244f110bc8e588f70d3af4b07098f76f89 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/87694 Reviewed-by: cactus Reviewed-by: Liang, Dan Reviewed-by: Feng, Wei Reviewed-by: Cheng, Yao Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c index 2efa5c3..9e36b7c 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c @@ -27,7 +27,6 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOf int32_t j, scanj; int32_t delta_scale, lastScale, nextScale; -#if 0 const uint8_t ZZ_SCAN[16] = { 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15 }; @@ -38,7 +37,6 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOf 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 }; -#endif lastScale = 8; nextScale = 8; @@ -46,7 +44,7 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOf for (j=0; j Date: Tue, 22 Jan 2013 11:21:05 +0800 Subject: libmix: A fix to enable VP8 HW 1080P playback on Merrifield VV BZ: 81807 A fix to enable VP8 HW playback on Merrifield VV Change-Id: I77c60e99214bc3838bb663259911dd67ed45acda Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/87857 Reviewed-by: Wang, Yi A Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/Android.mk | 3 +- mix_vbp/viddec_fw/fw/parser/Android.mk | 3 +- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 6 +++ mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c | 24 ++++------- videodecoder/Android.mk | 3 +- videodecoder/VideoDecoderVP8.cpp | 59 ++++++++++++++++++---------- videodecoder/VideoDecoderVP8.h | 1 + 7 files changed, 59 insertions(+), 40 deletions(-) diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index 3df2e0a..819ded1 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -12,7 +12,8 @@ include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser/Android MERRIFIELD_DEVICE := \ mrfl_vp \ mrfl_hvp \ - mrfl_sle + mrfl_sle \ + merr_vv ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/parser/Android.mk endif diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 6e403da..40410a0 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -48,7 +48,8 @@ endif MERRIFIELD_DEVICE := \ mrfl_vp \ mrfl_hvp \ - mrfl_sle + mrfl_sle \ + merr_vv ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) LOCAL_SRC_FILES += vbp_vp8_parser.c LOCAL_C_INCLUDES += $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/include diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 53988ab..1e888a9 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -334,6 +334,12 @@ typedef struct _vbp_codec_data_vp8 int refresh_golden_frame; int refresh_last_frame; + /* cropping information */ + int crop_top; + int crop_bottom; + int crop_left; + int crop_right; + int golden_copied; int altref_copied; } vbp_codec_data_vp8; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c index 25aa85d..89ef068 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c @@ -64,6 +64,8 @@ uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext) /* entry point not needed */ pcontext->parser_ops->is_frame_start = NULL; + pcontext->parser_ops->flush = NULL; + return VBP_OK; } @@ -302,20 +304,7 @@ static void vbp_add_probs_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *quer VAProbabilityDataBufferVP8 *prob_data = query_data->prob_data; /* DCT coefficients probability */ - int i, j, k, l; - for (i = 0; i < 4; i++) - { - for (j = 0; j < 8; j++) - { - for (k = 0; k < 3; k++) - { - for (l = 0; l < 11; l++) - { - prob_data->dct_coeff_probs[i][j][k][l] = fc->DCT_Coefficients[i][j][k][l]; - } - } - } - } + memcpy(prob_data->dct_coeff_probs, fc->DCT_Coefficients, 4*8*3*11*sizeof(uint8_t)); } static void vbp_set_codec_data_vp8(vp8_viddec_parser *parser, vbp_codec_data_vp8* codec_data) @@ -326,11 +315,14 @@ static void vbp_set_codec_data_vp8(vp8_viddec_parser *parser, vbp_codec_data_vp8 codec_data->version_num = pi->frame_tag.version; codec_data->show_frame = pi->frame_tag.show_frame; - //codec_data->frame_width = pi->width; - //codec_data->frame_height = pi->height; codec_data->frame_width = ((pi->width + 15) / 16) * 16; codec_data->frame_height = ((pi->height + 15) / 16) * 16; + codec_data->crop_top = 0; + codec_data->crop_bottom = codec_data->frame_height - pi->height; + codec_data->crop_left = 0; + codec_data->crop_right = codec_data->frame_width - pi->width; + codec_data->refresh_alt_frame = pi->refresh_af; codec_data->refresh_golden_frame = pi->refresh_gf; codec_data->refresh_last_frame = pi->refresh_lf; diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 9c335f3..2998d4e 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -48,7 +48,8 @@ LOCAL_MODULE := libva_videodecoder MERRIFIELD_DEVICE := \ mrfl_vp \ mrfl_hvp \ - mrfl_sle + mrfl_sle \ + merr_vv ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) LOCAL_SRC_FILES += VideoDecoderVP8.cpp LOCAL_CFLAGS += -DUSE_HW_VP8 diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index bc6274c..c08eb6a 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -45,6 +45,13 @@ void VideoDecoderVP8::invalidateReferenceFrames(int toggle) { } } +void VideoDecoderVP8::clearAsReference(int toggle, int ref_type) { + ReferenceFrameBuffer ref = mRFBs[toggle][ref_type]; + if (ref.surfaceBuffer) { + ref.surfaceBuffer->asReferernce = false; + } +} + void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) { int32_t width = data->codec_data->frame_width; int32_t height = data->codec_data->frame_height; @@ -60,6 +67,12 @@ void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) { ITRACE("Video size is changed."); } + mVideoFormatInfo.cropLeft = data->codec_data->crop_left; + mVideoFormatInfo.cropRight = data->codec_data->crop_right; + mVideoFormatInfo.cropTop = data->codec_data->crop_top; + mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom; + ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d", data->codec_data->crop_left, data->codec_data->crop_top, data->codec_data->crop_bottom); + mVideoFormatInfo.valid = true; } @@ -105,6 +118,9 @@ void VideoDecoderVP8::stop(void) { void VideoDecoderVP8::flush(void) { VideoDecoderBase::flush(); + + invalidateReferenceFrames(0); + invalidateReferenceFrames(1); } Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { @@ -144,6 +160,14 @@ Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v return DECODE_SUCCESS; } + if (VP8_KEY_FRAME == data->codec_data->frame_type) { + updateFormatInfo(data); + if (mSizeChanged == true) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + } + if (data->codec_data->frame_type == VP8_SKIPPED_FRAME) { // Do nothing for skip frame as the last frame will be rendered agian by natively return DECODE_SUCCESS; @@ -310,6 +334,20 @@ void VideoDecoderVP8::updateReferenceFrames(vbp_data_vp8 *data) { /* Refresh alternative frame reference buffer using the currently reconstructed frame */ refreshAltReference(data); + + /* Update reference frames */ + for (int i = 0; i < VP8_REF_SIZE; i++) { + VideoSurfaceBuffer *p = mRFBs[1][i].surfaceBuffer; + int j; + for (j = 0; j < VP8_REF_SIZE; j++) { + if (p == mRFBs[0][j].surfaceBuffer) { + break; + } + if (j == VP8_REF_SIZE) { + clearAsReference(1, i); + } + } + } } void VideoDecoderVP8::refreshLastReference(vbp_data_vp8 *data) { @@ -321,13 +359,6 @@ void VideoDecoderVP8::refreshLastReference(vbp_data_vp8 *data) { if (data->codec_data->refresh_last_frame) { mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer = mAcquiredBuffer; mRFBs[0][VP8_LAST_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface; - - if (mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer) { - mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer->asReferernce = false; - } - } - - if (mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer) { mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer->asReferernce = true; } } @@ -353,13 +384,6 @@ void VideoDecoderVP8::refreshGoldenReference(vbp_data_vp8 *data) { if (data->codec_data->refresh_golden_frame) { mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer = mAcquiredBuffer; mRFBs[0][VP8_GOLDEN_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface; - - if (mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer) { - mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer->asReferernce = false; - } - } - - if (mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer) { mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer->asReferernce = true; } } @@ -385,13 +409,6 @@ void VideoDecoderVP8::refreshAltReference(vbp_data_vp8 *data) { if (data->codec_data->refresh_alt_frame) { mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer = mAcquiredBuffer; mRFBs[0][VP8_ALT_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface; - - if (mRFBs[1][VP8_ALT_REF_PIC].surfaceBuffer) { - mRFBs[1][VP8_ALT_REF_PIC].surfaceBuffer->asReferernce = false; - } - } - - if (mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer) { mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer->asReferernce = true; } } diff --git a/videodecoder/VideoDecoderVP8.h b/videodecoder/VideoDecoderVP8.h index f3b64e9..61db40d 100644 --- a/videodecoder/VideoDecoderVP8.h +++ b/videodecoder/VideoDecoderVP8.h @@ -49,6 +49,7 @@ private: void refreshAltReference(vbp_data_vp8 *data); void updateFormatInfo(vbp_data_vp8 *data); void invalidateReferenceFrames(int toggle); + void clearAsReference(int toggle, int ref_type); private: enum { -- cgit v1.2.3 From 0ec905337c498fa6533b06ae11623a629561a72d Mon Sep 17 00:00:00 2001 From: wanglili Date: Wed, 16 Jan 2013 01:38:02 +0800 Subject: Extend MAX_GRAPHIC_BUFFER_NUM to 64 to support VPP.[Video-MRFLD] BZ: 83081 In current design, VPP(Video Post-Processing) will apply for extra graphic buffers from native window, the same buffer queue as decoder, so we need to extend it to 64 so far to enable VPP UC. After evaluating VPP performance, we can shrink it to the actual number needed. Change-Id: I8492024c36656e05199a01abf5a403313f93549e Reviewed-on: http://android.intel.com:8080/87676 Reviewed-by: Wang, Lili A Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderDefs.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 54ce618..b527577 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -113,7 +113,8 @@ struct VideoDecodeBuffer { }; -#define MAX_GRAPHIC_BUFFER_NUM (16 + 1 + 11) // max DPB + 1 + AVC_EXTRA_NUM +//#define MAX_GRAPHIC_BUFFER_NUM (16 + 1 + 11) // max DPB + 1 + AVC_EXTRA_NUM +#define MAX_GRAPHIC_BUFFER_NUM 64 // extended for VPP struct VideoConfigBuffer { uint8_t *data; -- cgit v1.2.3 From b9129f02973c9b47cc7345425890391a88c1e17b Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 6 Feb 2013 09:22:42 +0800 Subject: libmix: A fix to refine VP8 reference frame managment BZ: 85153 A fix to refine VP8 reference frame managment Change-Id: I0bc1a3ce93855ade4b07bc26446db59634e0bc8f Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/90385 Reviewed-by: cactus Reviewed-by: Wang, Yi A Reviewed-by: Jiang, Fei Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: Gozalvez Herrero, Juan AntonioX Tested-by: Gozalvez Herrero, Juan AntonioX --- videodecoder/VideoDecoderVP8.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index c08eb6a..3a0c2ac 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -343,9 +343,9 @@ void VideoDecoderVP8::updateReferenceFrames(vbp_data_vp8 *data) { if (p == mRFBs[0][j].surfaceBuffer) { break; } - if (j == VP8_REF_SIZE) { - clearAsReference(1, i); - } + } + if (j == VP8_REF_SIZE) { + clearAsReference(1, i); } } } -- cgit v1.2.3 From 688c0e88d6c0dba961f2f3cfe34c7cc0b3ad2599 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 29 Jan 2013 13:03:09 +0800 Subject: change to support new gralloc buffer for Camera v2 BZ: 84445 Change-Id: Ied6a3eb81cbf73feab511668037574a5bd2b8e8f Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/89062 Reviewed-by: cactus Reviewed-by: Yuan, Shengquan Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- test/Android.mk | 1 + test/mix_encoder.cpp | 270 +++++++++++++++++++------------------- videoencoder/Android.mk | 1 + videoencoder/VideoEncoderBase.cpp | 25 ++-- 4 files changed, 146 insertions(+), 151 deletions(-) diff --git a/test/Android.mk b/test/Android.mk index abded5d..e734b7b 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -35,6 +35,7 @@ LOCAL_SRC_FILES := \ LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libmix_videoencoder \ + $(TARGET_OUT_HEADERS)/pvr \ $(TOP)/frameworks/base/include/display \ $(LOCAL_PATH) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 4662947..20ebca8 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -15,6 +15,7 @@ #include #include +#include #define CHECK_ENCODE_STATUS(FUNC)\ if (ret < ENCODE_SUCCESS) { \ @@ -53,9 +54,9 @@ static uint32_t gEncFrames = 15; static const int gSrcFrames = 15; static uint32_t gAllocatedSize; -static uint32_t gWidth = 1280; -static uint32_t gHeight = 720; -static uint32_t gStride = 1280; +static uint32_t gSrcWidth = 1280; +static uint32_t gSrcHeight = 720; +static uint32_t gSrcStride = 1280; static uint32_t gFrameRate = 30; static uint32_t gEncodeWidth = 0; static uint32_t gEncodeHeight = 0; @@ -63,7 +64,7 @@ static uint32_t gEncodeHeight = 0; static char* gFile = (char*)"out.264"; static uint32_t gMode = 0; //0:Camera malloc , 1: WiDi clone, 2: WiDi ext, 3: WiDi user, 4: Raw, 5: SurfaceMediaSource -static const char* gModeString[9] = {"Camera malloc", "WiDi clone", "WiDi ext", "WiDi user", "Raw", "GrallocSource(Composer)", "GrallocSource(Gralloc)","MappingSurfaceForCI","Camera malloc For Extra Value"}; +static const char* gModeString[10] = {"Camera malloc", "WiDi clone", "WiDi ext", "WiDi user", "Raw", "GrallocSource(Composer)", "GrallocSource(Gralloc)", "GrallocSource(Camerav2)", "MappingSurfaceForCI", "Camera malloc For Extra Value"}; static const char* gRCModeString[4] ={"NO_RC", "CBR", "VBR", "VCM"}; @@ -131,15 +132,15 @@ static void gfx_init() printf("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); exit(-1); } - + gAllocMod = (gralloc_module_t const *)gModule; err = gralloc_open(gModule, &gAllocDev); if (err) { printf("FATAL: gralloc open failed\n"); exit(-1); - } - + } + } static int gfx_alloc(uint32_t w, uint32_t h, int format, @@ -153,7 +154,7 @@ static int gfx_alloc(uint32_t w, uint32_t h, int format, w, h, format, usage, err, strerror(-err)); exit(-1); } - + return err; } @@ -166,7 +167,7 @@ static int gfx_free(buffer_handle_t handle) printf("free(...) failed %d (%s)\n", err, strerror(-err)); exit(-1); } - + return err; } @@ -184,7 +185,7 @@ static int gfx_lock(buffer_handle_t handle, printf("lock(...) failed %d (%s)", err, strerror(-err)); exit(-1); } - + return err; } @@ -195,20 +196,21 @@ static int gfx_unlock(buffer_handle_t handle) err = gAllocMod->unlock(gAllocMod, handle); if (err) { - printf("unlock(...) failed %d (%s)", err, strerror(-err)); + printf("unlock(...) failed %d (%s)\n", err, strerror(-err)); exit(-1); } - + return err; } - + Encode_Status SetVideoEncoderParam() { Encode_Status ret = ENCODE_SUCCESS; ret = gVideoEncoder->getParameters(&gEncoderParams); CHECK_ENCODE_STATUS("getParameters"); - + + printf("Set Encoding Width=%d, Height=%d\n", gEncodeWidth, gEncodeHeight); gEncoderParams.resolution.height = gEncodeHeight; gEncoderParams.resolution.width = gEncodeWidth; gEncoderParams.frameRate.frameRateDenom = 1; @@ -240,7 +242,7 @@ Encode_Status SetVideoEncoderParam() { gEncoderParams->rcParams.targetPercentage = 0; gEncoderParams->rcParams.bitRate = 10000; gEncoderParams->rcMode = RATE_CONTROL_CBR; - gEncoderParams->refreshType = VIDEO_ENC_NONIR; + gEncoderParams->refreshType = VIDEO_ENC_NONIR; #endif ret = gVideoEncoder->setParameters(&gEncoderParams); @@ -325,7 +327,7 @@ Encode_Status SetVideoEncoderParam() { // VideoEncodeAVC.cpp file derivedSetConfig && derivedGetConfig code coverage test // for VideoConfigTypeNALSize derivedSetConfig && derivedGetConfig VideoConfigNALSize configNalSize; - configNalSize.maxSliceSize = 8*gWidth*gHeight*1.5; + configNalSize.maxSliceSize = 8*gEncodeWidth*gEncodeHeight*1.5; gVideoEncoder->setConfig(&configNalSize); gVideoEncoder->getConfig(&configNalSize); @@ -360,8 +362,8 @@ Encode_Status SetVideoEncoderParam() { ret = gVideoEncoder->setParameters(&gStoreMetaDataInBuffers); CHECK_ENCODE_STATUS("setParameters StoreMetaDataInBuffers"); - } - + } + return ret; } @@ -380,31 +382,31 @@ static int YUV_generator_planar(int width, int height, int jj, xpos, ypos; ypos = (row / box_width) & 0x1; - + for (jj=0; jjmode = MEM_MODE_MALLOC; vinfo->handle = 0; vinfo->size = size; - vinfo->width = gWidth; - vinfo->height = gHeight; - vinfo->lumaStride = gStride; - vinfo->chromStride = gStride; + vinfo->width = gSrcWidth; + vinfo->height = gSrcHeight; + vinfo->lumaStride = gSrcStride; + vinfo->chromStride = gSrcStride; vinfo->format = STRING_TO_FOURCC("NV12"); vinfo->s3dformat = 0xFFFFFFFF; @@ -447,23 +449,23 @@ void MallocExternalMemoryWithExtraValues() //malloc external memory, and not need to set into encoder before start() void MallocExternalMemory() { - uint32_t size = gWidth * gHeight * 3 /2; + uint32_t size = gSrcStride * gSrcHeight * 3 /2; ValueInfo* vinfo = new ValueInfo; vinfo->mode = MEM_MODE_MALLOC; vinfo->handle = 0; vinfo->size = size; - vinfo->width = gWidth; - vinfo->height = gHeight; - vinfo->lumaStride = gStride; - vinfo->chromStride = gStride; + vinfo->width = gSrcWidth; + vinfo->height = gSrcHeight; + vinfo->lumaStride = gSrcStride; + vinfo->chromStride = gSrcStride; vinfo->format = STRING_TO_FOURCC("NV12"); vinfo->s3dformat = 0xFFFFFFFF; for(int i = 0; i < gSrcFrames; i ++) { gMallocPtr[i] = (uint8_t*)malloc(size + 4095); - gUsrptr[i] = (uint8_t*)((((int )gMallocPtr[i] + 4095) / 4096 ) * 4096); + gUsrptr[i] = (uint8_t*)((((uint32_t )gMallocPtr[i] + 4095) / 4096 ) * 4096); gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); @@ -480,12 +482,12 @@ void GetAllUsrptr() paramsUsrptrBuffer.type = VideoParamsTypeUsrptrBuffer; paramsUsrptrBuffer.size = sizeof(VideoParamsUsrptrBuffer); - paramsUsrptrBuffer.expectedSize = gWidth * gHeight * 3 / 2; + paramsUsrptrBuffer.expectedSize = gSrcWidth * gSrcHeight * 3 / 2; paramsUsrptrBuffer.format = STRING_TO_FOURCC("NV12"); - paramsUsrptrBuffer.width = gWidth; - paramsUsrptrBuffer.height = gHeight; + paramsUsrptrBuffer.width = gSrcWidth; + paramsUsrptrBuffer.height = gSrcHeight; - for(int i = 0; i < gSrcFrames; i ++) + for(int i = 0; i < gSrcFrames; i ++) { ret = gVideoEncoder->getParameters(¶msUsrptrBuffer); if(ret != ENCODE_SUCCESS ) { @@ -495,11 +497,11 @@ void GetAllUsrptr() } gAllocatedSize = paramsUsrptrBuffer.actualSize; gUsrptr[i] = paramsUsrptrBuffer.usrPtr; - gStride = paramsUsrptrBuffer.stride; + gSrcStride = paramsUsrptrBuffer.stride; gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeEncoder, (int32_t)gUsrptr[i]); } - + } void CreateUserSurfaces(int mode) @@ -508,9 +510,9 @@ void CreateUserSurfaces(int mode) int majorVersion = -1; int minorVersion = -1; VAStatus vaStatus; - + gVADisplay = vaGetDisplay(&display); - + if (gVADisplay == NULL) { printf("vaGetDisplay failed."); } @@ -522,17 +524,17 @@ void CreateUserSurfaces(int mode) VASurfaceAttributeTPI attribute_tpi; - attribute_tpi.size = gWidth * gHeight * 3 /2; - attribute_tpi.luma_stride = gWidth; - attribute_tpi.chroma_u_stride = gWidth; - attribute_tpi.chroma_v_stride = gWidth; + attribute_tpi.size = gSrcWidth * gSrcHeight * 3 /2; + attribute_tpi.luma_stride = gSrcWidth; + attribute_tpi.chroma_u_stride = gSrcWidth; + attribute_tpi.chroma_v_stride = gSrcWidth; attribute_tpi.luma_offset = 0; - attribute_tpi.chroma_u_offset = gWidth * gHeight; - attribute_tpi.chroma_v_offset = gWidth * gHeight; + attribute_tpi.chroma_u_offset = gSrcWidth * gSrcHeight; + attribute_tpi.chroma_v_offset = gSrcWidth * gSrcHeight; attribute_tpi.pixel_format = VA_FOURCC_NV12; attribute_tpi.type = VAExternalMemoryNULL; - vaStatus = vaCreateSurfacesWithAttribute(gVADisplay, gWidth, gHeight, VA_RT_FORMAT_YUV420, + vaStatus = vaCreateSurfacesWithAttribute(gVADisplay, gSrcWidth, gSrcHeight, VA_RT_FORMAT_YUV420, gSrcFrames, gSurface, &attribute_tpi); if (vaStatus != VA_STATUS_SUCCESS) { @@ -546,10 +548,10 @@ void CreateUserSurfaces(int mode) upstreamParam.bufferMode = BUFFER_SHARING_KBUFHANDLE; ExternalBufferAttrib attrib; - attrib.realWidth = gWidth; - attrib.realHeight = gHeight; - attrib.lumaStride = gStride; - attrib.chromStride = gStride; + attrib.realWidth = gSrcWidth; + attrib.realHeight = gSrcHeight; + attrib.lumaStride = gSrcStride; + attrib.chromStride = gSrcStride; attrib.format = VA_FOURCC_NV12; upstreamParam.bufAttrib = &attrib; @@ -562,7 +564,7 @@ void CreateUserSurfaces(int mode) uint32_t lumaOffset = 0; uint32_t chromaUOffset = 0; uint32_t chromaVOffset = 0; - + for(int i = 0; i < gSrcFrames; i++) { vaStatus = vaLockSurface( gVADisplay, (VASurfaceID)gSurface[i], @@ -587,7 +589,7 @@ void CreateUserSurfaces(int mode) }else{ - for (int i = 0; i < gSrcFrames; i++) + for (int i = 0; i < gSrcFrames; i++) list[i] = gSurface[i]; } @@ -600,8 +602,8 @@ void CreateUserSurfaces(int mode) printf("Failed setParameters, Status = %d\n", ret); } delete list; - - //get usrptr for uploading src pictures + + //get usrptr for uploading src pictures VAImage surface_image; for (int i=0; imode = MEM_MODE_CI; vinfo->handle = 0; vinfo->size = size; - vinfo->width = gWidth; - vinfo->height = gHeight; - vinfo->lumaStride = gStride; - vinfo->chromStride = gStride; + vinfo->width = gSrcWidth; + vinfo->height = gSrcHeight; + vinfo->lumaStride = gSrcStride; + vinfo->chromStride = gSrcStride; vinfo->format = STRING_TO_FOURCC("NV12"); vinfo->s3dformat = 0xFFFFFFFF; @@ -649,85 +651,69 @@ void CreateSurfaceMappingForCI() } delete vinfo; } -void CreateGfxhandle() +void CreateGfxhandle(int color) { sp composer(ComposerService::getComposerService()); gGraphicBufferAlloc = composer->createGraphicBufferAlloc(); - - uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN | GraphicBuffer::USAGE_SW_READ_OFTEN; // | GraphicBuffer::USAGE_HW_COMPOSER; -// int format = HAL_PIXEL_FORMAT_YV12; - int format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h - int32_t error; -/* - int adjusted_width, adjusted_height; - if (0) { - ; - } else if (512 >= gWidth) { - adjusted_width = 512; - } else if (1024 >= gWidth) { - adjusted_width = 1024; - } else if (1280 >= gWidth) { - adjusted_width = 1280; - } else if (2048 >= gWidth) { - adjusted_width = 2048; - } else if (4096 >= gWidth) { - adjusted_width = 4096; - } else { - adjusted_width = (gWidth + 0x1f) & ~0x1f; - } - adjusted_height = (gHeight + 0x1f) & ~0x1f; + uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_HW_COMPOSER; + int format = HAL_PIXEL_FORMAT_NV12; + if (color == 1) + format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h + + int32_t error; -printf("adjust width=%d, height=%d\n", adjusted_width, adjusted_height); -*/ for(int i = 0; i < gSrcFrames; i ++) { sp graphicBuffer( gGraphicBufferAlloc->createGraphicBuffer( - gWidth, gHeight, format, usage, &error)); -// adjusted_width, adjusted_height, format, usage, &error)); + gSrcWidth, gSrcHeight, format, usage, &error)); gGraphicBuffer[i] = graphicBuffer; - graphicBuffer->lock(GraphicBuffer::USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i])); + graphicBuffer->lock(usage | GraphicBuffer::USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i])); gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)gGraphicBuffer[i]->handle); graphicBuffer->unlock(); + + IMG_native_handle_t* h = (IMG_native_handle_t*) gGraphicBuffer[i]->handle; + gSrcStride = h->iWidth; + gSrcHeight = h->iHeight; } } -void CreateGralloc() +void CreateGralloc(int color) { - int usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_HW_TEXTURE; -// int format = HAL_PIXEL_FORMAT_YV12; - int format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h + int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER; + int format = HAL_PIXEL_FORMAT_NV12; + if (color == 1) + format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h gfx_init(); - void* vaddr; buffer_handle_t handle; - + for(int i = 0; i < gSrcFrames; i ++) { - gfx_alloc(gWidth, gHeight, format, usage, &handle, (int32_t*)&gStride); - gfx_lock(handle, GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, gWidth, gHeight, &vaddr); - printf("vaddr= %p\n", vaddr); - gUsrptr[i] = (uint8_t*)vaddr; + gfx_alloc(gSrcWidth, gSrcHeight, format, usage, &handle, (int32_t*)&gSrcStride); + gfx_lock(handle, usage | GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, gSrcWidth, gSrcHeight, (void**)(&gUsrptr[i])); gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)handle); gfx_unlock(handle); + IMG_native_handle_t* h = (IMG_native_handle_t*) handle; + gSrcHeight = h->iHeight; } } int CheckArgs(int argc, char* argv[]) { char c; - - while ((c =getopt(argc, argv,"b:c:r:w:h:m:f:n:s:o:e:z:?") ) != EOF) { + + while ((c =getopt(argc, argv,"b:c:r:w:h:k:g:m:f:n:s:o:e:z:?") ) != EOF) { switch (c) { case 'w': - gWidth = atoi(optarg); - gStride = gWidth; + gSrcWidth = atoi(optarg); + gSrcStride = gSrcWidth; break; case 'h': - gHeight = atoi(optarg); + gSrcHeight = atoi(optarg); break; case 'n': gEncFrames = atoi(optarg); @@ -767,28 +753,34 @@ int CheckArgs(int argc, char* argv[]) break; case '?': default: - printf("\n./mix_encode -c -b -r -w -h -k -g -n -m -s -f \n"); + printf("\n./mix_encode -c -b -r -w -h -k -g -n -m -s -f \n"); printf("\nCodec:\n"); printf("0: H264 (default)\n1: MPEG4\n2: H263\n"); printf("\nRate control:\n"); printf("0: NO_RC \n1: CBR (default)\n2: VBR\n3: VCM\n"); printf("\nMode:\n"); - printf("0: Camera malloc (default)\n1: WiDi clone\n2: WiDi ext\n3: WiDi user\n4: Raw\n5: GrallocSource(Composer)\n6: GrallocSource(Gralloc)\n"); - exit(0); + printf("0: Camera malloc (default)\n1: WiDi clone\n2: WiDi ext\n3: WiDi user\n4: Raw\n5: GrallocSource(Composer)\n6: GrallocSource(Gralloc)\n7: GrallocSource(Camera)\n"); + exit(0); } } - if (gMode == 5 || gMode == 6) + if (gEncodeWidth == 0 || gEncodeHeight == 0) { - gWidth = ((gWidth + 15 ) / 16 ) * 16; - gHeight = ((gHeight + 15 ) / 16 ) * 16; + gEncodeWidth = gSrcWidth; + gEncodeHeight = gSrcHeight; } - if (gEncodeWidth == 0 || gEncodeHeight == 0) + gSrcWidth = ((gSrcWidth + 15 ) / 16 ) * 16; + gSrcHeight = ((gSrcHeight + 15 ) / 16 ) * 16; + gSrcStride = gSrcWidth; + + if (gMode == 4) { - gEncodeWidth = gWidth; - gEncodeHeight = gHeight; + gEncodeWidth = gSrcWidth; + gEncodeHeight = gSrcHeight; + gSrcStride = gSrcWidth; } + return 0; } @@ -844,7 +836,7 @@ int main(int argc, char* argv[]) break; case 3: gRC = RATE_CONTROL_VCM; - break; + break; default: printf("Not support this rate control mode\n"); return 1; @@ -884,7 +876,7 @@ int main(int argc, char* argv[]) else printf("\nStart codec is null only for code coverage test ....\n"); //add for video encode libmix code coverage test--end - printf("Mode is %s, RC mode is %s, Width=%d, Height=%d, Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, file is %s, outputnalformat is %s\n\n", gModeString[gMode], gRCModeString[gRCMode], gWidth, gHeight, gBitrate, gEncFrames, gSyncEncMode, gFile,gOutPutFormatString[gOutPutFormat]); + printf("Mode is %s, RC mode is %s, Width=%d, Height=%d, Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, file is %s, outputnalformat is %s\n\n", gModeString[gMode], gRCModeString[gRCMode], gSrcWidth, gSrcHeight, gBitrate, gEncFrames, gSyncEncMode, gFile,gOutPutFormatString[gOutPutFormat]); //sleep(10); @@ -926,15 +918,18 @@ for(int i=0; i<1; i++) MallocExternalMemory(); break; case 5: //SurfaceMediaSource - CreateGfxhandle(); + CreateGfxhandle(1); break; case 6: //Gralloc - CreateGralloc(); + CreateGralloc(1); + break; + case 7: //Gralloc nv12 format + CreateGfxhandle(0); break; - case 7: //SurfaceMappingForCI + case 8: //SurfaceMappingForCI CreateSurfaceMappingForCI(); break; - case 8: //Camera malloc with extra values + case 9: //Camera malloc with extra values MallocExternalMemoryWithExtraValues(); break; default: @@ -942,9 +937,10 @@ for(int i=0; i<1; i++) } //upload src data + printf("Fill src picture width=%d, Height=%d\n", gSrcStride, gSrcHeight); for(int i=0; istart(); CHECK_ENCODE_STATUS("start"); @@ -958,7 +954,7 @@ for(int i=0; i<1; i++) } //input buffers - VideoEncRawBuffer InBuf; + VideoEncRawBuffer InBuf; uint8_t *data; uint32_t size; @@ -977,7 +973,7 @@ for(int i=0; i<1; i++) OutBuf.data = out; OutBuf.format = goutputformat; - printf("\n"); + printf("\n"); for(unsigned int i=0; igetOutput(&OutBuf); fclose(file); - + gVideoEncoder->stop(); releaseVideoEncoder(gVideoEncoder); gVideoEncoder = NULL; - + switch(gMode) { case 0: //camera malloc @@ -1040,14 +1036,14 @@ for(int i=0; i<1; i++) case 6: //Gralloc buffer_handle_t handle; for(int i=0; iGetValue((int32_t&)handle); gfx_free(handle); } - } - break; + } + break; } for(int i=0; i #include +#include // API declaration extern "C" { @@ -1452,25 +1453,21 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { LOG_I("gfxhandle = %d\n", map->value); vaSurfaceAttrib.count = 1; - // OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar - if (mComParams.resolution.width <= 512) - vaSurfaceAttrib.luma_stride = 512; - else if (mComParams.resolution.width > 512 && mComParams.resolution.width <= 1024) - vaSurfaceAttrib.luma_stride = 1024; - else if (mComParams.resolution.width > 1024 && mComParams.resolution.width <= 1280) - vaSurfaceAttrib.luma_stride = 1280; - else - vaSurfaceAttrib.luma_stride = 2048; - vaSurfaceAttrib.pixel_format = map->vinfo.format; - vaSurfaceAttrib.width = mComParams.resolution.width; - vaSurfaceAttrib.height = mComParams.resolution.height; + // color fmrat may be OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar or HAL_PIXEL_FORMAT_NV12 + IMG_native_handle_t* h = (IMG_native_handle_t*) map->value; + LOG_I("IMG_native_handle_t h->iWidth=%d, h->iHeight=%d, h->iFormat=%x\n", h->iWidth, h->iHeight, h->iFormat); + + vaSurfaceAttrib.luma_stride = h->iWidth; + vaSurfaceAttrib.pixel_format = h->iFormat; + vaSurfaceAttrib.width = h->iWidth; + vaSurfaceAttrib.height = h->iHeight; vaSurfaceAttrib.type = VAExternalMemoryAndroidGrallocBuffer; vaSurfaceAttrib.buffers[0] = (uint32_t) map->value; vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, - map->vinfo.width, - map->vinfo.height, + h->iWidth, + h->iHeight, VA_RT_FORMAT_YUV420, 1, &surface, -- cgit v1.2.3 From 94f6386e728295a4bc7c4d33f3b3f386c15c5f43 Mon Sep 17 00:00:00 2001 From: jiguoliang Date: Sat, 5 Jan 2013 16:42:34 -0500 Subject: update new rule for IpPeriod/IntraPeriod/IdrInterval and enable auto reference feature BZ: 87331 1. refine frame_type detection including 11 cases{I(IDR), I(IDR)+P, I(IDR)+P+B}, 2. query profile configure, 3. query auto reference configure, enhance it 4. change the profile level default value, and change the ipPeroid default value. Change-Id: Ia3dc3d376c557219c2382c1590c4624f4856a7e8 Signed-off-by: jiguoliang Reviewed-on: http://android.intel.com:8080/85120 Reviewed-by: cactus Reviewed-by: Yuan, Shengquan Reviewed-by: Zhao, Leo Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 50 +++++++++++---- videoencoder/VideoEncoderBase.cpp | 126 +++++++++++++++++++++++++++++++------- videoencoder/VideoEncoderBase.h | 10 ++- videoencoder/VideoEncoderDef.h | 16 ++++- videoencoder/VideoEncoderH263.cpp | 11 +++- videoencoder/VideoEncoderMP4.cpp | 11 +++- 6 files changed, 182 insertions(+), 42 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 4c2661a..ae0293b 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -15,6 +15,13 @@ VideoEncoderAVC::VideoEncoderAVC() :VideoEncoderBase() { + if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264High) == ENCODE_SUCCESS){ + mComParams.profile = VAProfileH264High; + mComParams.level = 42; + }else if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264Main) == ENCODE_SUCCESS){ + mComParams.profile = VAProfileH264Main; + mComParams.level = 41; + } mVideoParamsAVC.basicUnitSize = 0; mVideoParamsAVC.VUIFlag = 0; mVideoParamsAVC.sliceNum.iSliceNum = 2; @@ -30,6 +37,7 @@ VideoEncoderAVC::VideoEncoderAVC() mVideoParamsAVC.crop.BottomOffset = 0; mVideoParamsAVC.SAR.SarWidth = 0; mVideoParamsAVC.SAR.SarHeight = 0; + mAutoReferenceSurfaceNum = 4; } Encode_Status VideoEncoderAVC::start() { @@ -60,6 +68,12 @@ Encode_Status VideoEncoderAVC::derivedSetParams(VideoParamConfigSet *videoEncPar return ENCODE_INVALID_PARAMS; } + if(encParamsAVC->ipPeriod == 0 || encParamsAVC->ipPeriod >4) + return ENCODE_INVALID_PARAMS; + + if((mComParams.intraPeriod >1)&&(mComParams.intraPeriod % encParamsAVC->ipPeriod !=0)) + return ENCODE_INVALID_PARAMS; + mVideoParamsAVC = *encParamsAVC; return ENCODE_SUCCESS; } @@ -94,6 +108,11 @@ Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncCon return ENCODE_INVALID_PARAMS; } + if(configAVCIntraPeriod->ipPeriod == 0 || configAVCIntraPeriod->ipPeriod >4) + return ENCODE_INVALID_PARAMS; + if((configAVCIntraPeriod->intraPeriod >1)&&(configAVCIntraPeriod->intraPeriod % configAVCIntraPeriod->ipPeriod !=0)) + return ENCODE_INVALID_PARAMS; + mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval; mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod; mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod; @@ -199,29 +218,34 @@ Encode_Status VideoEncoderAVC::updateFrameInfo(EncodeTask* task) { uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval; FrameType frametype; uint32_t frame_num = mFrameNum; + uint32_t intraPeriod = mComParams.intraPeriod; - if (mVideoParamsAVC.idrInterval != 0) { + if (idrPeroid != 0) { if(mVideoParamsAVC.ipPeriod > 1) frame_num = frame_num % (idrPeroid + 1); - else if(mComParams.intraPeriod != 0) + else frame_num = frame_num % idrPeroid ; + }else{ + if (mComParams.intraPeriod == 0) + intraPeriod = 0xFFFFFFFF; } + if(frame_num ==0){ frametype = FTYPE_IDR; - }else if(mComParams.intraPeriod ==0) + }else if(intraPeriod ==1) // only I frame need intraPeriod=idrInterval=ipPeriod=0 frametype = FTYPE_I; else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame - if(mComParams.intraPeriod != 0 && (frame_num > 1) &&((frame_num -1)%mComParams.intraPeriod == 0)) + if((frame_num > 1) &&((frame_num -1)%intraPeriod == 0)) frametype = FTYPE_I; else frametype = FTYPE_P; - } else { - if(mComParams.intraPeriod != 0 &&((frame_num-1)%mComParams.intraPeriod == 0)&&(frame_num >mComParams.intraPeriod)) + } else { + if(((frame_num-1)%intraPeriod == 0)&&(frame_num >intraPeriod)) frametype = FTYPE_I; else{ - frame_num = frame_num%mComParams.intraPeriod; + frame_num = frame_num%intraPeriod; if(frame_num == 0) frametype = FTYPE_B; else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0) @@ -887,17 +911,19 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { LOG_V( "Begin\n\n"); // set picture params for HW - avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface[0]; - avcPicParams.CurrPic.picture_id= task->rec_surface; + if(mAutoReference == false){ + avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface; + avcPicParams.CurrPic.picture_id= task->rec_surface; + }else { + for(int i =0; i< mAutoReferenceSurfaceNum; i++) + avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i]; + } avcPicParams.coded_buf = task->coded_buffer; - //avcPicParams.picture_width = mComParams.resolution.width; - //avcPicParams.picture_height = mComParams.resolution.height; avcPicParams.last_picture = 0; LOG_V("======h264 picture params======\n"); LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id); LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id); -// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex); LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf); //LOG_I( "picture_width = %d\n", avcPicParams.picture_width); //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 0ea91c9..d0a4d89 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -41,7 +41,6 @@ VideoEncoderBase::VideoEncoderBase() ,mVAEntrypoint(VAEntrypointEncSlice) ,mCodedBufSize(0) ,mNewHeader(false) - //,mAutoReference(17 /*VAConfigAttribEncAutoReference*/) ,mRenderMaxSliceSize(false) ,mRenderQP (false) ,mRenderAIR(false) @@ -53,7 +52,10 @@ VideoEncoderBase::VideoEncoderBase() ,mSliceParamBuf(0) ,mRefSurface(VA_INVALID_SURFACE) ,mRecSurface(VA_INVALID_SURFACE) + ,mAutoRefSurfaces(NULL) ,mFrameNum(0) + ,mAutoReference(false) + ,mAutoReferenceSurfaceNum(4) ,mSliceSizeOverflow(false) ,mCurOutputTask(NULL) ,mOutCodedBuffer(0) @@ -85,7 +87,6 @@ VideoEncoderBase::VideoEncoderBase() LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus); mInitialized = false; } - } VideoEncoderBase::~VideoEncoderBase() { @@ -118,25 +119,32 @@ Encode_Status VideoEncoderBase::start() { return ENCODE_ALREADY_INIT; } - VAConfigAttrib vaAttrib[2]; + queryAutoReferenceConfig(mComParams.profile); + + VAConfigAttrib vaAttrib[3]; vaAttrib[0].type = VAConfigAttribRTFormat; vaAttrib[1].type = VAConfigAttribRateControl; + vaAttrib[2].type = VAConfigAttribEncAutoReference; vaAttrib[0].value = VA_RT_FORMAT_YUV420; vaAttrib[1].value = mComParams.rcMode; + vaAttrib[2].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED; LOG_V( "======VA Configuration======\n"); LOG_I( "profile = %d\n", mComParams.profile); LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint); LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type); LOG_I( "vaAttrib[1].type = %d\n", vaAttrib[1].type); + LOG_I( "vaAttrib[2].type = %d\n", vaAttrib[2].type); LOG_I( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value); LOG_I( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value); + LOG_I( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value); LOG_V( "vaCreateConfig\n"); vaStatus = vaCreateConfig( mVADisplay, mComParams.profile, mVAEntrypoint, &vaAttrib[0], 2, &(mVAConfig)); +// &vaAttrib[0], 3, &(mVAConfig)); //uncomment this after psb_video supports CHECK_VA_STATUS_RETURN("vaCreateConfig"); if (mComParams.rcMode == VA_RC_VCM) { @@ -150,7 +158,12 @@ Encode_Status VideoEncoderBase::start() { VASurfaceID surfaces[2]; VASurfaceAttributeTPI attribute_tpi; - uint32_t stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; + uint32_t stride_aligned; + if(mAutoReference == false) + stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; + else + stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. + uint32_t height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; @@ -163,21 +176,25 @@ Encode_Status VideoEncoderBase::start() { attribute_tpi.pixel_format = VA_FOURCC_NV12; attribute_tpi.type = VAExternalMemoryNULL; -#ifndef AUTO_REFERENCE + if(mAutoReference == false){ vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); mRefSurface = surfaces[0]; mRecSurface = surfaces[1]; -#endif + }else { + mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum]; + vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, + VA_RT_FORMAT_YUV420, mAutoReferenceSurfaceNum, mAutoRefSurfaces, &attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + } //Prepare all Surfaces to be added into Context uint32_t contextSurfaceCnt; -#ifndef AUTO_REFERENCE + if(mAutoReference == false ) contextSurfaceCnt = 2 + mSrcSurfaceMapList.size(); -#else - contextSurfaceCnt = mSrcSurfaceMapList.size(); -#endif + else + contextSurfaceCnt = mAutoReferenceSurfaceNum + mSrcSurfaceMapList.size(); VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt]; int32_t index = -1; @@ -189,10 +206,13 @@ Encode_Status VideoEncoderBase::start() { (*map_node)->added = true; } -#ifndef AUTO_REFERENCE + if(mAutoReference == false){ contextSurfaces[++index] = mRefSurface; contextSurfaces[++index] = mRecSurface; -#endif + } else { + for (int i=0; i < mAutoReferenceSurfaceNum; i++) + contextSurfaces[++index] = mAutoRefSurfaces[i]; + } //Initialize and save the VA context ID LOG_V( "vaCreateContext\n"); @@ -288,14 +308,14 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t tim task->enc_surface = sid; task->coded_buffer = coded_buf; task->timestamp = inBuffer->timeStamp; - task->in_data = inBuffer->data; + task->priv = inBuffer->priv; //Setup frame info, like flag ( SYNCFRAME), frame number, type etc task->type = inBuffer->type; task->flag = inBuffer->flag; PrepareFrameInfo(task); -#ifndef AUTO_REFERENCE + if(mAutoReference == false){ //Setup ref /rec frames //TODO: B frame support, temporary use same logic switch (inBuffer->type) { @@ -310,8 +330,7 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t tim mRefSurface = tmpSurface; } - task->ref_surface[0] = mRefSurface; - task->ref_surface[1] = VA_INVALID_SURFACE; + task->ref_surface = mRefSurface; task->rec_surface = mRecSurface; break; @@ -322,12 +341,10 @@ Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t tim ret = ENCODE_NOT_SUPPORTED; goto CLEAN_UP; } -#else - task->ref_surface[0] = VA_INVALID_SURFACE; - task->ref_surface[1] = VA_INVALID_SURFACE; + }else { + task->ref_surface = VA_INVALID_SURFACE; task->rec_surface = VA_INVALID_SURFACE; -#endif - + } //======Start Encoding, add task to list====== LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface); @@ -464,6 +481,7 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint3 outBuffer->flag = mCurOutputTask->flag; outBuffer->type = mCurOutputTask->type; outBuffer->timeStamp = mCurOutputTask->timestamp; + outBuffer->priv = mCurOutputTask->priv; if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) { ret = outputAllData(outBuffer); @@ -530,6 +548,10 @@ Encode_Status VideoEncoderBase::stop() { LOG_V("Encoder has been stopped\n"); return ENCODE_SUCCESS; } + if (mAutoRefSurfaces) { + delete[] mAutoRefSurfaces; + mAutoRefSurfaces = NULL; + } mCodedBuffer_Lock.lock(); mVACodedBufferList.clear(); @@ -698,6 +720,42 @@ Encode_Status VideoEncoderBase::cleanupForOutput() { return ENCODE_SUCCESS; } +Encode_Status VideoEncoderBase::queryProfileLevelConfig(VADisplay dpy, VAProfile profile) { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEntrypoint entryPtr[8]; + int i, entryPtrNum; + + if(profile == VAProfileH264Main) //need to be fixed + return ENCODE_NOT_SUPPORTED; + + vaStatus = vaQueryConfigEntrypoints(dpy, profile, entryPtr, &entryPtrNum); + CHECK_VA_STATUS_RETURN("vaQueryConfigEntrypoints"); + + for(i=0; i (videoEncParams); + + if (profilelevel->size != sizeof (VideoParamsProfileLevel)) { + return ENCODE_INVALID_PARAMS; + } + + profilelevel->level = 0; + if(queryProfileLevelConfig(mVADisplay, profilelevel->profile) == ENCODE_SUCCESS){ + profilelevel->isSupported = true; + if(profilelevel->profile == VAProfileH264High) + profilelevel->level = 42; + else if(profilelevel->profile == VAProfileH264Main) + profilelevel->level = 42; + else if(profilelevel->profile == VAProfileH264Baseline) + profilelevel->level = 41; + else{ + profilelevel->level = 0; + profilelevel->isSupported = false; + } + } + } + case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 924c4da..554173e 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -17,7 +17,6 @@ #include #include -//#define AUTO_REFERENCE struct SurfaceMap { VASurfaceID surface; MetadataBufferType type; @@ -28,14 +27,14 @@ struct SurfaceMap { struct EncodeTask { VASurfaceID enc_surface; - VASurfaceID ref_surface[2]; + VASurfaceID ref_surface; VASurfaceID rec_surface; VABufferID coded_buffer; FrameType type; int flag; int64_t timestamp; //corresponding input frame timestamp - uint8_t *in_data; //input buffer data + void *priv; //input buffer data bool completed; //if encode task is done complet by HW }; @@ -77,6 +76,7 @@ protected: Encode_Status renderDynamicFrameRate(); Encode_Status renderDynamicBitrate(); Encode_Status renderHrd(); + Encode_Status queryProfileLevelConfig(VADisplay dpy, VAProfile profile); private: void setDefaultParams(void); @@ -96,6 +96,7 @@ private: Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer); Encode_Status cleanupForOutput(); Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); + Encode_Status queryAutoReferenceConfig(VAProfile profile); protected: @@ -125,6 +126,7 @@ protected: VABufferID mFrameRateParamBuf; VABufferID mPicParamBuf; VABufferID mSliceParamBuf; + VASurfaceID* mAutoRefSurfaces; android::List mSrcSurfaceMapList; //all mapped surface info list from input buffer android::List mEncodeTaskList; //all encode tasks list @@ -134,6 +136,8 @@ protected: VASurfaceID mRecSurface; //reconstructed surface, only used in base uint32_t mFrameNum; uint32_t mCodedBufSize; + bool mAutoReference; + uint32_t mAutoReferenceSurfaceNum; bool mSliceSizeOverflow; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index f5174aa..b1674be 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -142,7 +142,7 @@ typedef struct { VideoOutputFormat format; //output format int64_t timeStamp; //reserved FrameType type; - uint8_t *in_data; //indicate corresponding input data + void *priv; //indicate corresponding input data } VideoEncOutputBuffer; typedef struct { @@ -152,6 +152,7 @@ typedef struct { int64_t timeStamp; //reserved FrameType type; //frame type expected to be encoded int flag; // flag to indicate buffer property + void *priv; //indicate corresponding input data } VideoEncRawBuffer; struct VideoEncSurfaceBuffer { @@ -288,6 +289,7 @@ enum VideoParamConfigType { VideoParamsTypeUsrptrBuffer, VideoParamsTypeHRD, VideoParamsTypeStoreMetaDataInBuffers, + VideoParamsTypeProfileLevel, VideoConfigTypeFrameRate, VideoConfigTypeBitRate, @@ -476,6 +478,18 @@ struct VideoParamsStoreMetaDataInBuffers : VideoParamConfigSet { bool isEnabled; }; +struct VideoParamsProfileLevel : VideoParamConfigSet { + + VideoParamsProfileLevel() { + type = VideoParamsTypeProfileLevel; + size = sizeof(VideoParamsProfileLevel); + } + + VAProfile profile; + uint32_t level; + bool isSupported; +}; + struct VideoConfigFrameRate : VideoParamConfigSet { VideoConfigFrameRate() { diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp index b9b9c99..8891c06 100644 --- a/videoencoder/VideoEncoderH263.cpp +++ b/videoencoder/VideoEncoderH263.cpp @@ -14,6 +14,7 @@ VideoEncoderH263::VideoEncoderH263() { mComParams.profile = (VAProfile)PROFILE_H263BASELINE; + mAutoReferenceSurfaceNum = 2; } Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) { @@ -86,8 +87,14 @@ Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) { LOG_V( "Begin\n\n"); // set picture params for HW - h263PictureParams.reference_picture = task->ref_surface[0]; - h263PictureParams.reconstructed_picture = task->rec_surface; + if(mAutoReference == false){ + h263PictureParams.reference_picture = task->ref_surface; + h263PictureParams.reconstructed_picture = task->rec_surface; + }else { + h263PictureParams.reference_picture = mAutoRefSurfaces[0]; + h263PictureParams.reconstructed_picture = mAutoRefSurfaces[1]; + } + h263PictureParams.coded_buf = task->coded_buffer; h263PictureParams.picture_width = mComParams.resolution.width; h263PictureParams.picture_height = mComParams.resolution.height; diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp index 8afb215..50abc68 100644 --- a/videoencoder/VideoEncoderMP4.cpp +++ b/videoencoder/VideoEncoderMP4.cpp @@ -17,6 +17,7 @@ VideoEncoderMP4::VideoEncoderMP4() :mProfileLevelIndication(3) ,mFixedVOPTimeIncrement(0) { mComParams.profile = (VAProfile)PROFILE_MPEG4SIMPLE; + mAutoReferenceSurfaceNum = 2; } Encode_Status VideoEncoderMP4::getHeaderPos( @@ -175,8 +176,14 @@ Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) { LOG_V( "Begin\n\n"); // set picture params for HW - mpeg4_pic_param.reference_picture = task->ref_surface[0]; - mpeg4_pic_param.reconstructed_picture = task->rec_surface; + if(mAutoReference == false){ + mpeg4_pic_param.reference_picture = task->ref_surface; + mpeg4_pic_param.reconstructed_picture = task->rec_surface; + }else { + mpeg4_pic_param.reference_picture = mAutoRefSurfaces[0]; + mpeg4_pic_param.reconstructed_picture = mAutoRefSurfaces[1]; + } + mpeg4_pic_param.coded_buf = task->coded_buffer; mpeg4_pic_param.picture_width = mComParams.resolution.width; mpeg4_pic_param.picture_height = mComParams.resolution.height; -- cgit v1.2.3 From 6173b1e3dc70776b6e4d0f4372f020b19633077f Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 20 Feb 2013 14:14:43 +0800 Subject: Fix Merrifield stride 64 alignment issue, copy data into alternative buffer BZ: 87174 on Merrifield Camera video capture, if find input frame stride is not 64 aligned, allocate alternative surface internally, and copy data line by line from camera buffer. Currently it is only enabled for Camera Malloc buffer mode. Change-Id: I67ceb87d799664e7bc79e9c74293e156a7795c4f Reviewed-on: http://android.intel.com:8080/92706 Reviewed-by: Zhao, Leo Reviewed-by: cactus Reviewed-by: Yuan, Shengquan Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 153 +++++++++++++++++++++++++++++++++++++- videoencoder/VideoEncoderBase.h | 2 + 2 files changed, 152 insertions(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index d0a4d89..b934a5e 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -575,6 +575,8 @@ Encode_Status VideoEncoderBase::stop() { if (! (*mSrcSurfaceMapList.begin())->added) { LOG_V( "Rlease the Src Surface Buffer not added into vaContext\n"); vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface), 1); + if ((*mSrcSurfaceMapList.begin())->surface_backup != VA_INVALID_SURFACE) + vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface_backup), 1); } delete (*mSrcSurfaceMapList.begin()); mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin()); @@ -1355,6 +1357,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( } map->surface = surface; + map->surface_backup = VA_INVALID_SURFACE; map->type = MetadataBufferTypeEncoder; map->value = (int32_t)*usrptr; map->vinfo.mode = (MemMode)MEM_MODE_USRPTR; @@ -1421,6 +1424,7 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS //wrap upstream buffer into vaSurface SurfaceMap *map = new SurfaceMap; + map->surface_backup = VA_INVALID_SURFACE; map->type = MetadataBufferTypeUser; map->value = upStreamBuffer->bufList[i]; map->vinfo.mode = (MemMode)upStreamBuffer->bufferMode; @@ -1671,9 +1675,35 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromMalloc"); - LOG_I("Surface ID created from Malloc = 0x%08x", map->value); + LOG_I("Surface ID created from Malloc = 0x%08x\n", map->value); + + //Merrifield limitation, should use mAutoReference to check if on Merr + if ( (mAutoReference == false) || (map->vinfo.lumaStride % 64 == 0) ) + map->surface = surface; + else { + map->surface_backup = surface; + + VASurfaceID surfaceId; + VASurfaceAttributeTPI attribute_tpi; + uint32_t stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; + + attribute_tpi.size = stride_aligned * mComParams.resolution.height * 3 / 2; + attribute_tpi.luma_stride = stride_aligned; + attribute_tpi.chroma_u_stride = stride_aligned; + attribute_tpi.chroma_v_stride = stride_aligned; + attribute_tpi.luma_offset = 0; + attribute_tpi.chroma_u_offset = stride_aligned * mComParams.resolution.height; + attribute_tpi.chroma_v_offset = stride_aligned * mComParams.resolution.height; + attribute_tpi.pixel_format = VA_FOURCC_NV12; + attribute_tpi.type = VAExternalMemoryNULL; + + vaCreateSurfacesWithAttribute(mVADisplay, mComParams.resolution.width, mComParams.resolution.height, + VA_RT_FORMAT_YUV420, 1, &surfaceId, &attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); - map->surface = surface; + map->surface = surfaceId; + LOG_E("Due to 64 alignment, an alternative Surface ID 0x%08x created\n", surfaceId); + } return ret; } @@ -1756,7 +1786,11 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA //has mapped, get surfaceID directly LOG_I("direct find surface %d from value %x\n", map->surface, value); *sid = map->surface; - + if (map->surface_backup != VA_INVALID_SURFACE) { + //need to copy data + LOG_I("Need copy surfaces from %x to %x\n", map->surface_backup, *sid); + ret = copySurfaces(map->surface_backup, *sid); + } return ret; } @@ -1802,6 +1836,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA if (pvinfo){ //map according info, and add to surfacemap list map = new SurfaceMap; + map->surface_backup = VA_INVALID_SURFACE; map->type = type; map->value = value; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); @@ -1818,6 +1853,11 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA } *sid = map->surface; + if (map->surface_backup != VA_INVALID_SURFACE) { + //need to copy data + LOG_I("Need copy surfaces from %x to %x\n", map->surface_backup, *sid); + ret = copySurfaces(map->surface_backup, *sid); + } } else { //can't map due to no info @@ -1829,6 +1869,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA //map more using same ValueInfo for(unsigned int i=0; isurface_backup = VA_INVALID_SURFACE; map->type = type; map->value = extravalues[i]; memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); @@ -1986,3 +2027,109 @@ SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { return NULL; } + +Encode_Status VideoEncoderBase::copySurfaces(VASurfaceID srcId, VASurfaceID destId) { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + + uint32_t width = mComParams.resolution.width; + uint32_t height = mComParams.resolution.height; + + uint32_t i, j; + + VAImage srcImage, destImage; + uint8_t *pSrcBuffer, *pDestBuffer; + + uint8_t *srcY, *dstY; + uint8_t *srcU, *srcV; + uint8_t *srcUV, *dstUV; + + LOG_I("src Surface ID = 0x%08x, dest Surface ID = 0x%08x\n", (uint32_t) srcId, (uint32_t) destId); + + vaStatus = vaDeriveImage(mVADisplay, srcId, &srcImage); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + vaStatus = vaMapBuffer(mVADisplay, srcImage.buf, (void **)&pSrcBuffer); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + LOG_V("Src Image information\n"); + LOG_I("srcImage.pitches[0] = %d\n", srcImage.pitches[0]); + LOG_I("srcImage.pitches[1] = %d\n", srcImage.pitches[1]); + LOG_I("srcImage.offsets[0] = %d\n", srcImage.offsets[0]); + LOG_I("srcImage.offsets[1] = %d\n", srcImage.offsets[1]); + LOG_I("srcImage.num_planes = %d\n", srcImage.num_planes); + LOG_I("srcImage.width = %d\n", srcImage.width); + LOG_I("srcImage.height = %d\n", srcImage.height); + + vaStatus = vaDeriveImage(mVADisplay, destId, &destImage); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + vaStatus = vaMapBuffer(mVADisplay, destImage.buf, (void **)&pDestBuffer); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + LOG_V("Dest Image information\n"); + LOG_I("destImage.pitches[0] = %d\n", destImage.pitches[0]); + LOG_I("destImage.pitches[1] = %d\n", destImage.pitches[1]); + LOG_I("destImage.offsets[0] = %d\n", destImage.offsets[0]); + LOG_I("destImage.offsets[1] = %d\n", destImage.offsets[1]); + LOG_I("destImage.num_planes = %d\n", destImage.num_planes); + LOG_I("destImage.width = %d\n", destImage.width); + LOG_I("destImage.height = %d\n", destImage.height); + + if (mComParams.rawFormat == RAW_FORMAT_YUV420) { + + srcY = pSrcBuffer +srcImage.offsets[0]; + srcU = pSrcBuffer + srcImage.offsets[1]; + srcV = pSrcBuffer + srcImage.offsets[2]; + dstY = pDestBuffer + destImage.offsets[0]; + dstUV = pDestBuffer + destImage.offsets[1]; + + for (i = 0; i < height; i ++) { + memcpy(dstY, srcY, width); + srcY += srcImage.pitches[0]; + dstY += destImage.pitches[0]; + } + + for (i = 0; i < height / 2; i ++) { + for (j = 0; j < width; j+=2) { + dstUV [j] = srcU [j / 2]; + dstUV [j + 1] = srcV [j / 2]; + } + srcU += srcImage.pitches[1]; + srcV += srcImage.pitches[2]; + dstUV += destImage.pitches[1]; + } + }else if (mComParams.rawFormat == RAW_FORMAT_NV12) { + + srcY = pSrcBuffer + srcImage.offsets[0]; + dstY = pDestBuffer + destImage.offsets[0]; + srcUV = pSrcBuffer + srcImage.offsets[1]; + dstUV = pDestBuffer + destImage.offsets[1]; + + for (i = 0; i < height; i++) { + memcpy(dstY, srcY, width); + srcY += srcImage.pitches[0]; + dstY += destImage.pitches[0]; + } + + for (i = 0; i < height / 2; i++) { + memcpy(dstUV, srcUV, width); + srcUV += srcImage.pitches[1]; + dstUV += destImage.pitches[1]; + } + } else { + LOG_E("Raw format not supoort\n"); + return ENCODE_FAIL; + } + + vaStatus = vaUnmapBuffer(mVADisplay, srcImage.buf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + vaStatus = vaDestroyImage(mVADisplay, srcImage.image_id); + CHECK_VA_STATUS_RETURN("vaDestroyImage"); + + vaStatus = vaUnmapBuffer(mVADisplay, destImage.buf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + vaStatus = vaDestroyImage(mVADisplay, destImage.image_id); + CHECK_VA_STATUS_RETURN("vaDestroyImage"); + + return ENCODE_SUCCESS; +} + diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 554173e..764ccce 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -19,6 +19,7 @@ struct SurfaceMap { VASurfaceID surface; + VASurfaceID surface_backup; MetadataBufferType type; int32_t value; ValueInfo vinfo; @@ -97,6 +98,7 @@ private: Encode_Status cleanupForOutput(); Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); Encode_Status queryAutoReferenceConfig(VAProfile profile); + Encode_Status copySurfaces(VASurfaceID srcId, VASurfaceID destId); protected: -- cgit v1.2.3 From 69bbd387843900a1f52749eb56dd8df3fb0b10b1 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 20 Feb 2013 16:13:38 +0800 Subject: libmix: remove target names in makefiles for VP8 BZ: 83713 Remove target names in makefiles. Change-Id: I20a2a64dae6de3ce0f91c24ce7b22577b9482a32 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/92718 Reviewed-by: cactus Reviewed-by: Feng, Wei Reviewed-by: Fert, Laurent Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/Android.mk | 7 +------ mix_vbp/viddec_fw/fw/parser/Android.mk | 7 +------ videodecoder/Android.mk | 7 +------ 3 files changed, 3 insertions(+), 18 deletions(-) diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index 819ded1..edf358d 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -9,11 +9,6 @@ include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/parser/Android include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser/Android.mk # Add source codes for Merrifield -MERRIFIELD_DEVICE := \ - mrfl_vp \ - mrfl_hvp \ - mrfl_sle \ - merr_vv -ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) +ifeq ($(TARGET_BOARD_PLATFORM),merrifield) include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/parser/Android.mk endif diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 40410a0..7fba3b2 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -45,12 +45,7 @@ LOCAL_CFLAGS += -DVBP_TRACE LOCAL_SHARED_LIBRARIES += liblog endif -MERRIFIELD_DEVICE := \ - mrfl_vp \ - mrfl_hvp \ - mrfl_sle \ - merr_vv -ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) +ifeq ($(TARGET_BOARD_PLATFORM),merrifield) LOCAL_SRC_FILES += vbp_vp8_parser.c LOCAL_C_INCLUDES += $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/include LOCAL_CFLAGS += -DUSE_HW_VP8 diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 2998d4e..26a869e 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -45,12 +45,7 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videodecoder # Add source codes for Merrifield -MERRIFIELD_DEVICE := \ - mrfl_vp \ - mrfl_hvp \ - mrfl_sle \ - merr_vv -ifneq ($(filter $(REF_DEVICE_NAME),$(MERRIFIELD_DEVICE)),) +ifeq ($(TARGET_BOARD_PLATFORM),merrifield) LOCAL_SRC_FILES += VideoDecoderVP8.cpp LOCAL_CFLAGS += -DUSE_HW_VP8 endif -- cgit v1.2.3 From 07ab17cb9d99cc963d1367409b0eb4f43a4194de Mon Sep 17 00:00:00 2001 From: ywan171 Date: Thu, 21 Feb 2013 15:54:47 +0800 Subject: libmixvbp: optimize the resync marker search for MPEG4 BZ: 87125 optimize the resync marker search for MPEG4 elementary stream to accelerate the parser for multi-slice MPEG4 frame Change-Id: I5ac82f7dbbf0c727ca978b68369c5f9cb4c2652a Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/92695 Reviewed-by: Liang, Dan Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: Qiu, Junhai Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c | 35 +++++++++++++++++++++++++-- 1 file changed, 33 insertions(+), 2 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c index 859e817..beb21b7 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c @@ -1021,7 +1021,7 @@ uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) return ret; } - +#define SEARCH_SYNC_OPT uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) { vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; @@ -1089,6 +1089,7 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) while (1) { +#ifndef SEARCH_SYNC_OPT getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length); // return VBP_OK as resync_marker may not be present @@ -1100,7 +1101,37 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) BREAK_GETBITS_FAIL(getbits, ret); continue; } +#else + // read 3 bytes since resync_marker_length is between 17 bits and 23 bits + if (parent->getbits.bstrm_buf.buf_index + 3 > parent->getbits.bstrm_buf.buf_end) + { + break; + } + + code = parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index] << 16 | + parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+1] << 8 | + parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+2]; + + if (code >> (24-resync_marker_length) != 1) + { + int byte0 = code & 0xff; + int byte1 = (code >> 8) & 0xff; + if (byte0 != 0) + { + parent->getbits.bstrm_buf.buf_index += 3; + } + else if (byte1 != 0) + { + parent->getbits.bstrm_buf.buf_index += 2; + } + else + { + parent->getbits.bstrm_buf.buf_index += 1; + } + continue; + } +#endif // We found resync_marker viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); @@ -1152,7 +1183,7 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) if (bit_offset) { // byte-align parsing position - getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + getbits = viddec_pm_skip_bits(parent, 8 - bit_offset); if (getbits == -1) { ETRACE("Failed to align parser to byte position.\n"); -- cgit v1.2.3 From b24a0492d1c20e8efdbd94ec377321a360e3ab34 Mon Sep 17 00:00:00 2001 From: jiguoliang Date: Mon, 25 Feb 2013 15:39:23 -0500 Subject: Create one script for the middleware auto test BZ: 89042 The script makes sense at testing whether the new feature was enabled correctly. Change-Id: Ifa8c54985fabaa7d49d85814682815a3e39b86a2 Signed-off-by: jiguoliang Reviewed-on: http://android.intel.com:8080/93532 Reviewed-by: cactus Reviewed-by: Yuan, Shengquan Reviewed-by: Zhao, Leo Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- test/encoder.sh | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100755 test/encoder.sh diff --git a/test/encoder.sh b/test/encoder.sh new file mode 100755 index 0000000..7e88a3b --- /dev/null +++ b/test/encoder.sh @@ -0,0 +1,25 @@ +#!/bin/sh +#simulate Video capture use cases, default codec:H264, default rate control: CBR +adb shell mix_encoder -w 1920 -h 1080 -f /data/record_1080P.264 +adb shell mix_encoder -w 1280 -h 720 -f /data/record_720P.264 +adb shell mix_encoder -w 720 -h 480 -f /data/record_480P.264 + +#simulate Video edit use cases +adb shell mix_encoder -w 1920 -h 1080 -m 1 -f /data/videoedit_1080P.264 +adb shell mix_encoder -w 1280 -h 720 -m 1 -f /data/videoedit_720P.264 +adb shell mix_encoder -w 720 -h 480 -m 1 -f /data/videoedit_480P.264 + +#simulate WIDI use cases +adb shell mix_encoder -w 1920 -h 1080 -m 2 -f /data/widi_1080P.264 +adb shell mix_encoder -w 1280 -h 720 -m 2 -f /data/widi_720P.264 +adb shell mix_encoder -w 720 -h 480 -m 2 -f /data/widi_480P.264 + +for i in record videoedit widi +do + for j in 1080P 720P 480P + do + TEMP_NAME=${i}_$j.264 + echo $TEMP_NAME + adb pull /data/$TEMP_NAME . + done +done -- cgit v1.2.3 From fcbcf4aa42477d857147d7c6b969317478016f17 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Wed, 6 Mar 2013 15:52:25 +0800 Subject: VideoEncoder: Modify VP8 encoder to cope with vaapi upgrade BZ: 90494 Modify VP8 encoder to cope with vaapi upgrade Change-Id: I9c4246873f0f39f995ed52950d12a0539c7c49e9 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/95267 Reviewed-by: cactus Reviewed-by: Jiang, Fei Reviewed-by: Yuan, Shengquan Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/Android.mk | 1 + videoencoder/VideoEncoderBase.cpp | 12 +- videoencoder/VideoEncoderDef.h | 38 +++++++ videoencoder/VideoEncoderHost.cpp | 4 + videoencoder/VideoEncoderVP8.cpp | 229 ++++++++++++++++++++++++++++++++++++++ videoencoder/VideoEncoderVP8.h | 53 +++++++++ 6 files changed, 333 insertions(+), 4 deletions(-) create mode 100644 videoencoder/VideoEncoderVP8.cpp create mode 100644 videoencoder/VideoEncoderVP8.h diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 98b10e0..bf29710 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -8,6 +8,7 @@ LOCAL_SRC_FILES := \ VideoEncoderAVC.cpp \ VideoEncoderH263.cpp \ VideoEncoderMP4.cpp \ + VideoEncoderVP8.cpp \ VideoEncoderHost.cpp # LOCAL_CFLAGS := diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index b934a5e..bfc4da7 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -920,7 +920,8 @@ Encode_Status VideoEncoderBase::setParameters( case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: - case VideoParamsTypeVC1: { + case VideoParamsTypeVC1: + case VideoParamsTypeVP8: { ret = derivedSetParams(videoEncParams); break; } @@ -1030,7 +1031,8 @@ Encode_Status VideoEncoderBase::getParameters( case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: - case VideoParamsTypeVC1: { + case VideoParamsTypeVC1: + case VideoParamsTypeVP8: { derivedGetParams(videoEncParams); break; } @@ -1125,7 +1127,8 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeAVCIntraPeriod: case VideoConfigTypeNALSize: case VideoConfigTypeIDRRequest: - case VideoConfigTypeSliceNum: { + case VideoConfigTypeSliceNum: + case VideoConfigTypeVP8: { ret = derivedSetConfig(videoEncConfig); break; @@ -1210,7 +1213,8 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeAVCIntraPeriod: case VideoConfigTypeNALSize: case VideoConfigTypeIDRRequest: - case VideoConfigTypeSliceNum: { + case VideoConfigTypeSliceNum: + case VideoConfigTypeVP8: { ret = derivedGetConfig(videoEncConfig); break; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index b1674be..6c47e34 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -290,6 +290,7 @@ enum VideoParamConfigType { VideoParamsTypeHRD, VideoParamsTypeStoreMetaDataInBuffers, VideoParamsTypeProfileLevel, + VideoParamsTypeVP8, VideoConfigTypeFrameRate, VideoConfigTypeBitRate, @@ -301,6 +302,7 @@ enum VideoParamConfigType { VideoConfigTypeNALSize, VideoConfigTypeIDRRequest, VideoConfigTypeSliceNum, + VideoConfigTypeVP8, VideoParamsConfigExtension }; @@ -582,4 +584,40 @@ struct VideoConfigSliceNum : VideoParamConfigSet { SliceNum sliceNum; }; +struct VideoParamsVP8 : VideoParamConfigSet { + + uint32_t profile; + uint32_t error_resilient; + uint32_t num_token_partitions; + uint32_t kf_auto; + uint32_t kf_min_dist; + uint32_t kf_max_dist; + uint32_t quality_setting; + uint32_t min_qp; + uint32_t max_qp; + uint32_t rc_undershoot; + uint32_t rc_overshoot; + uint32_t hrd_buf_size; + uint32_t hrd_buf_initial_fullness; + uint32_t hrd_buf_optimal_fullness; + + VideoParamsVP8() { + type = VideoParamsTypeVP8; + size = sizeof(VideoParamsVP8); + } +}; + +struct VideoConfigVP8 : VideoParamConfigSet { + + uint32_t force_kf; + uint32_t no_ref_last; + uint32_t no_ref_gf; + uint32_t no_ref_arf; + + VideoConfigVP8 () { + type = VideoConfigTypeVP8; + size = sizeof(VideoConfigVP8); + } +}; + #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp index aed2bb9..bd9cec7 100644 --- a/videoencoder/VideoEncoderHost.cpp +++ b/videoencoder/VideoEncoderHost.cpp @@ -9,6 +9,7 @@ #include "VideoEncoderMP4.h" #include "VideoEncoderH263.h" #include "VideoEncoderAVC.h" +#include "VideoEncoderVP8.h" #include "VideoEncoderHost.h" #include "VideoEncoderLog.h" #include @@ -31,6 +32,9 @@ IVideoEncoder *createVideoEncoder(const char *mimeType) { strcasecmp(mimeType, "video/mp4v-es") == 0) { VideoEncoderMP4 *p = new VideoEncoderMP4(); return (IVideoEncoder *)p; + } else if (strcasecmp(mimeType, "video/x-webm") == 0) { + VideoEncoderVP8 *p = new VideoEncoderVP8(); + return (IVideoEncoder *)p; } else { LOG_E ("Unknown mime type: %s", mimeType); } diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp new file mode 100644 index 0000000..eb1b294 --- /dev/null +++ b/videoencoder/VideoEncoderVP8.cpp @@ -0,0 +1,229 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#include +#include +#include "VideoEncoderLog.h" +#include "VideoEncoderVP8.h" +#include +#include + +VideoEncoderVP8::VideoEncoderVP8() + :VideoEncoderBase() { + + mVideoParamsVP8.profile = 0; + mVideoParamsVP8.error_resilient = 0; + mVideoParamsVP8.num_token_partitions = 4; + mVideoParamsVP8.kf_auto = 0; + mVideoParamsVP8.kf_min_dist = 0; + mVideoParamsVP8.kf_max_dist = 0; + mVideoParamsVP8.quality_setting = 0; + mVideoParamsVP8.min_qp = 0; + mVideoParamsVP8.max_qp = 0; + mVideoParamsVP8.rc_undershoot = 100; + mVideoParamsVP8.rc_overshoot = 100; + mVideoParamsVP8.hrd_buf_size = 500; + mVideoParamsVP8.hrd_buf_initial_fullness = 200; + mVideoParamsVP8.hrd_buf_optimal_fullness = 200; + + mVideoConfigVP8.force_kf = 0; + mVideoConfigVP8.no_ref_last = 0; + mVideoConfigVP8.no_ref_gf = 0; + mVideoConfigVP8.no_ref_arf = 0; + + mComParams.profile = VAProfileVP8Version0_3; +} + +VideoEncoderVP8::~VideoEncoderVP8() { +} + +Encode_Status VideoEncoderVP8::renderSequenceParams() { + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncSequenceParameterBufferVP8 vp8SeqParam; + uint32_t frameRateNum = mComParams.frameRate.frameRateNum; + uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; + LOG_V( "Begin\n"); + + vp8SeqParam.frame_width = mComParams.resolution.width; + vp8SeqParam.frame_height = mComParams.resolution.height; + vp8SeqParam.frame_rate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; + vp8SeqParam.error_resilient = mVideoParamsVP8.error_resilient; + vp8SeqParam.kf_auto = mVideoParamsVP8.kf_auto; + vp8SeqParam.kf_min_dist = mVideoParamsVP8.kf_min_dist; + vp8SeqParam.kf_max_dist = mVideoParamsVP8.kf_max_dist; + vp8SeqParam.bits_per_second = mComParams.rcParams.bitRate; + vp8SeqParam.min_qp = mVideoParamsVP8.min_qp; + vp8SeqParam.max_qp = mVideoParamsVP8.max_qp; + vp8SeqParam.rc_undershoot = mVideoParamsVP8.rc_undershoot; + vp8SeqParam.rc_overshoot = mVideoParamsVP8.rc_overshoot; + vp8SeqParam.hrd_buf_size = mVideoParamsVP8.hrd_buf_size; + vp8SeqParam.hrd_buf_initial_fullness = mVideoParamsVP8.hrd_buf_initial_fullness; + vp8SeqParam.hrd_buf_optimal_fullness = mVideoParamsVP8.hrd_buf_optimal_fullness; + memcpy(vp8SeqParam.reference_frames, mVP8InternalFrames, sizeof(mVP8InternalFrames)); + + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSequenceParameterBufferType, + sizeof(vp8SeqParam), + 1, &vp8SeqParam, + &mSeqParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V( "End\n"); + return ret; +} + +Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { + Encode_Status ret = ENCODE_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncPictureParameterBufferVP8 vp8PicParam; + LOG_V( "Begin\n"); + + vp8PicParam.coded_buf = task->coded_buffer; + vp8PicParam.pic_flags.bits.force_kf = mVideoConfigVP8.force_kf; + vp8PicParam.pic_flags.bits.no_ref_last = mVideoConfigVP8.no_ref_last; + vp8PicParam.pic_flags.bits.no_ref_gf = mVideoConfigVP8.no_ref_gf; + vp8PicParam.pic_flags.bits.no_ref_arf = mVideoConfigVP8.no_ref_arf; + + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncPictureParameterBufferType, + sizeof(vp8PicParam), + 1, &vp8PicParam, + &mPicParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + LOG_V( "End\n"); + return ret; +} + +Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { + + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n"); + + if (mFrameNum == 0) { + ret = renderSequenceParams(); + CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); + } + + ret = renderPictureParams(task); + CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + + LOG_V( "End\n"); + return ret; +} + +Encode_Status VideoEncoderVP8::start() { + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n"); + + ret = VideoEncoderBase::start (); + CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start"); + + uint32_t stride_aligned = 0; + uint32_t height_aligned = 0; + + VASurfaceAttributeTPI attribute_tpi; + VAStatus vaStatus = VA_STATUS_SUCCESS; + + stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; + height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; + + attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; + attribute_tpi.luma_stride = stride_aligned; + attribute_tpi.chroma_u_stride = stride_aligned; + attribute_tpi.chroma_v_stride = stride_aligned; + attribute_tpi.luma_offset = 0; + attribute_tpi.chroma_u_offset = stride_aligned * height_aligned; + attribute_tpi.chroma_v_offset = stride_aligned * height_aligned; + attribute_tpi.pixel_format = VA_FOURCC_NV12; + attribute_tpi.type = VAExternalMemoryNULL; + + vaStatus = vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, + VA_RT_FORMAT_YUV420, VP8_INTERNAL_FRAME_LAST, mVP8InternalFrames, &attribute_tpi); + CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + LOG_V( "end\n"); + return ret; +} + +Encode_Status VideoEncoderVP8::stop() { + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n"); + + VAStatus vaStatus = VA_STATUS_SUCCESS; + vaStatus = vaDestroySurfaces(mVADisplay, mVP8InternalFrames, VP8_INTERNAL_FRAME_LAST); + CHECK_VA_STATUS_RETURN("vaDestroySurfaces"); + + ret = VideoEncoderBase::stop (); + CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::stop"); + + LOG_V( "end\n"); + + return ret; +} + +Encode_Status VideoEncoderVP8::derivedSetParams(VideoParamConfigSet *videoEncParams) { + + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + VideoParamsVP8 *encParamsVP8 = reinterpret_cast (videoEncParams); + + if (encParamsVP8->size != sizeof(VideoParamsVP8)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoParamsVP8 = *encParamsVP8; + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderVP8::derivedGetParams(VideoParamConfigSet *videoEncParams) { + + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + VideoParamsVP8 *encParamsVP8 = reinterpret_cast (videoEncParams); + + if (encParamsVP8->size != sizeof(VideoParamsVP8)) { + return ENCODE_INVALID_PARAMS; + } + + *encParamsVP8 = mVideoParamsVP8; + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncConfig) { + + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); + VideoConfigVP8 *encConfigVP8 = reinterpret_cast (videoEncConfig); + + if (encConfigVP8->size != sizeof(VideoConfigVP8)) { + return ENCODE_INVALID_PARAMS; + } + + *encConfigVP8 = mVideoConfigVP8; + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncConfig) { + + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); + VideoConfigVP8 *encConfigVP8 = reinterpret_cast (videoEncConfig); + + if (encConfigVP8->size != sizeof(VideoConfigVP8)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoConfigVP8 = *encConfigVP8; + return ENCODE_SUCCESS; +} diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h new file mode 100644 index 0000000..671b279 --- /dev/null +++ b/videoencoder/VideoEncoderVP8.h @@ -0,0 +1,53 @@ +/* + INTEL CONFIDENTIAL + Copyright 2011 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + +#ifndef __VIDEO_ENCODER_VP8_H__ +#define __VIDEO_ENCODER_VP8_H__ + +#include "VideoEncoderBase.h" + +/** + * VP8 Encoder class, derived from VideoEncoderBase + */ +class VideoEncoderVP8: public VideoEncoderBase { +public: + VideoEncoderVP8(); + virtual ~VideoEncoderVP8(); + + virtual Encode_Status start(void); + virtual Encode_Status stop(void); + +protected: + virtual Encode_Status sendEncodeCommand(EncodeTask *task); + virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams); + virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams); + virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig); + virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig); + virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) { + return ENCODE_NOT_SUPPORTED; + } + + // Local Methods +private: + enum { + LRF = 0, + GRF = 1, + ARF = 2, + RCF = 3, + VP8_INTERNAL_FRAME_LAST + }; + + Encode_Status renderSequenceParams(); + Encode_Status renderPictureParams(EncodeTask *task); + + VideoConfigVP8 mVideoConfigVP8; + VideoParamsVP8 mVideoParamsVP8; + VASurfaceID mVP8InternalFrames[VP8_INTERNAL_FRAME_LAST]; +}; + +#endif /* __VIDEO_ENCODER_VP8_H__ */ -- cgit v1.2.3 From d6e5ed0cf9d0a242c25efe4961bc540f1861e96d Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Thu, 21 Mar 2013 17:47:09 +0800 Subject: VE: resolve compile issue on baytrail BZ: 94688 Remove dependance on IMG legacy data structure. Change-Id: Id09e9dd4317f3da1938e7ade6605113a35c40d75 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/97699 Reviewed-by: cactus Reviewed-by: Feng, Wei Reviewed-by: Gummadi, Latha C Tested-by: Gummadi, Latha C Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index bfc4da7..04250ab 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -11,7 +11,6 @@ #include "IntelMetadataBuffer.h" #include #include -#include // API declaration extern "C" { @@ -1544,20 +1543,20 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { vaSurfaceAttrib.count = 1; // color fmrat may be OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar or HAL_PIXEL_FORMAT_NV12 - IMG_native_handle_t* h = (IMG_native_handle_t*) map->value; - LOG_I("IMG_native_handle_t h->iWidth=%d, h->iHeight=%d, h->iFormat=%x\n", h->iWidth, h->iHeight, h->iFormat); + //IMG_native_handle_t* h = (IMG_native_handle_t*) map->value; + //LOG_I("IMG_native_handle_t h->iWidth=%d, h->iHeight=%d, h->iFormat=%x\n", h->iWidth, h->iHeight, h->iFormat); - vaSurfaceAttrib.luma_stride = h->iWidth; - vaSurfaceAttrib.pixel_format = h->iFormat; - vaSurfaceAttrib.width = h->iWidth; - vaSurfaceAttrib.height = h->iHeight; + vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; + vaSurfaceAttrib.pixel_format = map->vinfo.format; + vaSurfaceAttrib.width = map->vinfo.width; + vaSurfaceAttrib.height = map->vinfo.height; vaSurfaceAttrib.type = VAExternalMemoryAndroidGrallocBuffer; vaSurfaceAttrib.buffers[0] = (uint32_t) map->value; vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, - h->iWidth, - h->iHeight, + map->vinfo.width, + map->vinfo.height, VA_RT_FORMAT_YUV420, 1, &surface, -- cgit v1.2.3 From ccb98248572151727bda7e3ccf27281c4ff76625 Mon Sep 17 00:00:00 2001 From: jiguoliang Date: Fri, 22 Mar 2013 10:53:31 -0400 Subject: Fix the 720P corruption issue BZ: 93083 The height of the ref/recon surface should be 32 alignment on the merrifield platform. Signed-off-by: jiguoliang Change-Id: I03e9a01b1cb02a9dbbd3f0499ff65381f6fa1f9e Reviewed-on: http://android.intel.com:8080/97842 Reviewed-by: cactus Reviewed-by: Yuan, Shengquan Reviewed-by: Ji, Guoliang Reviewed-by: Zhao, Leo Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 04250ab..3442958 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -157,13 +157,15 @@ Encode_Status VideoEncoderBase::start() { VASurfaceID surfaces[2]; VASurfaceAttributeTPI attribute_tpi; - uint32_t stride_aligned; - if(mAutoReference == false) + uint32_t stride_aligned, height_aligned; + if(mAutoReference == false){ stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; - else + height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; + }else{ stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. + height_aligned = ((mComParams.resolution.height + 31) / 32 ) * 32; + } - uint32_t height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; attribute_tpi.luma_stride = stride_aligned; -- cgit v1.2.3 From 27cb27f68fbc44b55383b2a9fffaf3691cb82637 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 27 Mar 2013 14:11:45 +0800 Subject: libmix: Update slice parameter buffer for VP8 BZ: 93296 Update slice parameter buffer for VP8 Change-Id: Ib02e058e383068069fb526cfec191ba56903f0d9 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/98564 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 2 +- mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c | 37 +++++++++++++++------------- videodecoder/VideoDecoderVP8.cpp | 2 +- 3 files changed, 22 insertions(+), 19 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 1e888a9..0655e07 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -349,7 +349,7 @@ typedef struct _vbp_slice_data_vp8 uint8 *buffer_addr; uint32 slice_offset; uint32 slice_size; - VASliceParameterBufferBase slc_parms; /* pointer to slice parms */ + VASliceParameterBufferVP8 slc_parms; /* pointer to slice parms */ } vbp_slice_data_vp8; typedef struct _vbp_picture_data_vp8 diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c index 89ef068..7a41f43 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c @@ -239,18 +239,18 @@ uint32 vbp_process_parsing_result_vp8( vbp_context *pcontext, int i) switch (parser->info.frame_tag.frame_type) { case KEY_FRAME: - ITRACE("This is a key frame."); + //ITRACE("This is a key frame."); parser->info.decoded_frame_number++; break; case INTER_FRAME: - ITRACE("This is an inter frame."); + //ITRACE("This is an inter frame."); parser->info.decoded_frame_number++; break; case SKIPPED_FRAME: - ITRACE("This is skipped frame. We have done nothing."); + WTRACE("This is skipped frame. We have done nothing."); break; default: - WTRACE("Unknown frame type %d", parser->info.frame_tag.frame_type); + ETRACE("Unknown frame type %d", parser->info.frame_tag.frame_type); break; } @@ -359,7 +359,6 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu pic_parms->pic_fields.value = 0; pic_parms->pic_fields.bits.key_frame = pi->frame_tag.frame_type; pic_parms->pic_fields.bits.version = pi->frame_tag.version; - pic_parms->partition_size[0] = pi->frame_tag.first_part_size; /* Segmentation */ pic_parms->pic_fields.bits.segmentation_enabled = pi->Segmentation.Enabled; @@ -413,14 +412,6 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu pic_parms->pic_fields.bits.mb_no_coeff_skip = pi->mb_no_coeff_skip; pic_parms->pic_fields.bits.mb_skip_coeff = pi->mb_skip_coeff; - - /* Token Partitions */ - pic_parms->num_of_partitions = pi->partition_count; - for (i = 1; i < 9; i++) - { - pic_parms->partition_size[i] = pi->partition_size[i - 1]; - } - pic_parms->prob_skip_false = pi->prob_skip_false; pic_parms->prob_intra = pi->prob_intra; pic_parms->prob_last = pi->prob_lf; @@ -446,9 +437,6 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu pic_parms->alt_ref_frame = VA_INVALID_SURFACE; pic_parms->out_of_loop_frame = VA_INVALID_SURFACE; //Reserved for future use - /* the offset to the first bit of MB from the first byte of slice data */ - pic_parms->macroblock_offset = pi->header_bits; - /* specify the slice number */ pic_data->num_slices = 0; @@ -459,6 +447,7 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * { vp8_Info *pi = &(parser->info); uint32_t pic_index = query_data->num_pictures - 1; + uint32_t part_index = 0; if (pic_index < 0) { ETRACE("Invalid picture data index."); @@ -472,7 +461,7 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * slc_data->slice_offset = 0; slc_data->slice_size = pi->source_sz; - VASliceParameterBufferBase *slc_parms = &(slc_data->slc_parms); + VASliceParameterBufferVP8 *slc_parms = &(slc_data->slc_parms); /* number of bytes in the slice data buffer for this slice */ slc_parms->slice_data_size = slc_data->slice_size; @@ -482,6 +471,20 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * /* see VA_SLICE_DATA_FLAG_XXX definitions */ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + /* the offset to the first bit of MB from the first byte of slice data */ + slc_parms->macroblock_offset = pi->header_bits; + + /* Token Partitions */ + slc_parms->num_of_partitions = pi->partition_count; + slc_parms->partition_size[0] = pi->frame_tag.first_part_size; + for (part_index = 1; part_index < 9; part_index++) + { + slc_parms->partition_size[part_index] = pi->partition_size[part_index - 1]; + } + + /* This field specifies the offset to the first byte of partition data */ + slc_parms->partition_data_offset = slc_parms->slice_data_offset; + pic_data->num_slices++; if (pic_data->num_slices > VP8_MAX_NUM_SLICES) { ETRACE("Number of slices (%d) per picture exceeds the limit (%d).", pic_data->num_slices, VP8_MAX_NUM_SLICES); diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 3a0c2ac..ea9270e 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -263,7 +263,7 @@ Decode_Status VideoDecoderVP8::decodePicture(vbp_data_vp8 *data, int32_t picInde mVADisplay, mVAContext, VASliceParameterBufferType, - sizeof(VASliceParameterBufferBase), + sizeof(VASliceParameterBufferVP8), 1, &(picData->slc_data[i].slc_parms), &bufferIDs[bufferIDCount]); -- cgit v1.2.3 From 3738c1d41d89a78d064ba2324684faed55b9a0b2 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 26 Mar 2013 16:47:11 +0800 Subject: Calculate crop in MW according to setting width / height BZ: 95362 uniform CTP/MERR to set crop info in MW Change-Id: Ia7dcc233d587018661d8629069b76d6d3e063355 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/98558 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index ae0293b..0f4d84b 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -856,8 +856,22 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { avcSeqParams.frame_crop_right_offset = mVideoParamsAVC.crop.RightOffset; avcSeqParams.frame_crop_top_offset = mVideoParamsAVC.crop.TopOffset; avcSeqParams.frame_crop_bottom_offset = mVideoParamsAVC.crop.BottomOffset; - } else + } else { avcSeqParams.frame_cropping_flag = false; + + if (mComParams.resolution.width & 0xf) { + avcSeqParams.frame_cropping_flag = true; + uint32_t AWidth = (mComParams.resolution.width + 0xf) & (~0xf); + avcSeqParams.frame_crop_right_offset = ( AWidth - mComParams.resolution.width ) / 2; + } + + if (mComParams.resolution.height & 0xf) { + avcSeqParams.frame_cropping_flag = true; + uint32_t AHeight = (mComParams.resolution.height + 0xf) & (~0xf); + avcSeqParams.frame_crop_bottom_offset = ( AHeight - mComParams.resolution.height ) / 2; + } + } + if(avcSeqParams.vui_parameters_present_flag && (mVideoParamsAVC.SAR.SarWidth || mVideoParamsAVC.SAR.SarHeight)) { avcSeqParams.vui_fields.bits.aspect_ratio_info_present_flag = true; avcSeqParams.aspect_ratio_idc = 0xff /* Extended_SAR */; -- cgit v1.2.3 From 17a995f0758a9ecc9400bada90f0090204d17c72 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 26 Mar 2013 01:27:39 +0800 Subject: Pass VC1 profile info to the vpg driver BZ: 95448 The VPG driver requires libmix to pass VC1 profile to it. Change-Id: I9aa70b27c1073d094975b272b0658d0b78ec5cfe Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/98086 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 1 + 1 file changed, 1 insertion(+) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 751227e..4a8d6d3 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -1013,6 +1013,7 @@ static void vbp_pack_picture_params_vc1( (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0; pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB; + pic_parms->sequence_fields.bits.profile = seqLayerHeader->PROFILE; } -- cgit v1.2.3 From 71331036994bddb94e9f8e4a9df05c4df68c21a9 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 28 Mar 2013 02:16:44 +0800 Subject: Add new types for VC1 thumbnail generation BZ: 95811 The type passed from VAVideoDecoder is "x-ms-wmv". Add support for this type. Otherwise, you will get "Unknown mime type" error. Change-Id: I06b070e8d25cde02cc149173e3193b1bb5b811b6 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/98650 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderHost.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index f990fa6..ba04797 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -41,7 +41,8 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { } if (strcasecmp(mimeType, "video/wmv") == 0 || - strcasecmp(mimeType, "video/vc1") == 0) { + strcasecmp(mimeType, "video/vc1") == 0 || + strcasecmp(mimeType, "video/x-ms-wmv") == 0) { VideoDecoderWMV *p = new VideoDecoderWMV(mimeType); return (IVideoDecoder *)p; } else if (strcasecmp(mimeType, "video/avc") == 0 || -- cgit v1.2.3 From 6db342d75ce221ac0ff8597976d5c5ffb96cbf01 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 29 Mar 2013 00:46:50 +0800 Subject: Build VP8 libraries for baytrail BZ: 96392 Modify the makefile to build VP8 libraries for baytrail. Signed-off-by: wfeng6 Change-Id: Ia9f8e68bcc8591d75eb8cd683de012687e81f0a2 Reviewed-on: http://android.intel.com:8080/98865 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/Android.mk | 6 ++++-- mix_vbp/viddec_fw/fw/parser/Android.mk | 6 +++++- videodecoder/Android.mk | 7 +++++-- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index edf358d..93e9bd3 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -8,7 +8,9 @@ include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/parser/Androi include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser/Android.mk -# Add source codes for Merrifield -ifeq ($(TARGET_BOARD_PLATFORM),merrifield) +PLATFORM_SUPPORT_VP8 := \ + merrifield \ + baytrail +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_VP8)),) include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/parser/Android.mk endif diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 7fba3b2..4bc8b9b 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -45,7 +45,11 @@ LOCAL_CFLAGS += -DVBP_TRACE LOCAL_SHARED_LIBRARIES += liblog endif -ifeq ($(TARGET_BOARD_PLATFORM),merrifield) +PLATFORM_SUPPORT_VP8 := \ + merrifield \ + baytrail + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_VP8)),) LOCAL_SRC_FILES += vbp_vp8_parser.c LOCAL_C_INCLUDES += $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/include LOCAL_CFLAGS += -DUSE_HW_VP8 diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 26a869e..dc86bec 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -44,8 +44,11 @@ LOCAL_COPY_HEADERS := \ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videodecoder -# Add source codes for Merrifield -ifeq ($(TARGET_BOARD_PLATFORM),merrifield) +PLATFORM_SUPPORT_VP8 := \ + merrifield \ + baytrail + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_VP8)),) LOCAL_SRC_FILES += VideoDecoderVP8.cpp LOCAL_CFLAGS += -DUSE_HW_VP8 endif -- cgit v1.2.3 From 5f889c024362ba0cd0ab1fabe786facd7fcfeb93 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 2 Apr 2013 21:13:00 +0800 Subject: Add support for VP8 playback on Baytrail BZ: 96392 This patch is a work around. REVERT ME later. Change-Id: I8f51c3bd5dfb411d555a787d09d73326beb0fd01 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/99378 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/Android.mk | 4 ++++ videodecoder/VideoDecoderBase.cpp | 16 ++++++++++++++++ 2 files changed, 20 insertions(+) mode change 100644 => 100755 videodecoder/VideoDecoderBase.cpp diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index dc86bec..4f82d66 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -21,6 +21,10 @@ LOCAL_C_INCLUDES := \ #LOCAL_LDLIBS += -lpthread +ifeq ($(TARGET_BOARD_PLATFORM),baytrail) +LOCAL_CFLAGS += -DLOAD_PVR_DRIVER +endif + LOCAL_SHARED_LIBRARIES := \ libcutils \ libmixvbp \ diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp old mode 100644 new mode 100755 index 0dd07ae..e5285ca --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -279,6 +279,13 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { } //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp); + +#ifdef LOAD_PVR_DRIVER + if (useGraphicBuffer ) { + vaSyncSurface(mVADisplay, output->renderBuffer.surface); + } +#endif + return &(output->renderBuffer); } @@ -735,10 +742,19 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { return DECODE_DRIVER_FAIL; } +#ifdef LOAD_PVR_DRIVER + ITRACE("load pvr driver.\n"); + setenv("LIBVA_DRIVER_NAME", "pvr", 1); +#endif + int majorVersion, minorVersion; vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); CHECK_VA_STATUS("vaInitialize"); +#ifdef LOAD_PVR_DRIVER + unsetenv("LIBVA_DRIVER_NAME"); +#endif + if (mConfigBuffer.frameRate > 45 && mVideoFormatInfo.height >= 1080) { // ugly workaround here // for fps > 45 and height > 1080, we will force to -- cgit v1.2.3 From b095739ff29d8ab2c57d3d6790c8bc0ef8f1ef87 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Fri, 1 Mar 2013 17:51:17 +0800 Subject: VE: Enhance MIX to support VAEncPackedHeader. BZ: 94688 Major modified areas are: 1. Command sequence, add manually built non-VCL header 2. Sequence/Picture/Slice parameters setting Change-Id: I17d7df288570db1ffabf35ea16c6f5293e776166 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/94561 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/Android.mk | 4 + videoencoder/VideoEncoderAVC.cpp | 168 ++++++++++++++++- videoencoder/VideoEncoderAVC.h | 8 + videoencoder/VideoEncoderBase.cpp | 21 ++- videoencoder/VideoEncoderBase.h | 2 + videoencoder/bitstream.h | 386 ++++++++++++++++++++++++++++++++++++++ 6 files changed, 578 insertions(+), 11 deletions(-) create mode 100644 videoencoder/bitstream.h diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index bf29710..af21f9f 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -46,6 +46,10 @@ ifeq ($(VIDEO_ENC_STATISTICS_ENABLE),true) LOCAL_CPPFLAGS += -DVIDEO_ENC_STATISTICS_ENABLE endif +ifeq ($(REF_PRODUCT_NAME),baylake) + LOCAL_C_FLAGS += -DBAYLAKE +endif + LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videoencoder diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 0f4d84b..ea1d3f9 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -12,6 +12,7 @@ #include "VideoEncoderAVC.h" #include #include +#include VideoEncoderAVC::VideoEncoderAVC() :VideoEncoderBase() { @@ -38,6 +39,13 @@ VideoEncoderAVC::VideoEncoderAVC() mVideoParamsAVC.SAR.SarWidth = 0; mVideoParamsAVC.SAR.SarHeight = 0; mAutoReferenceSurfaceNum = 4; + + packed_seq_header_param_buf_id = VA_INVALID_ID; + packed_seq_buf_id = VA_INVALID_ID; + packed_pic_header_param_buf_id = VA_INVALID_ID; + packed_pic_buf_id = VA_INVALID_ID; + packed_sei_header_param_buf_id = VA_INVALID_ID; /* the SEI buffer */ + packed_sei_buf_id = VA_INVALID_ID; } Encode_Status VideoEncoderAVC::start() { @@ -627,7 +635,6 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); - if (mFrameNum == 0 || mNewHeader) { if (mRenderHrd) { @@ -675,6 +682,14 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + if (mFrameNum == 0 && (mEncPackedHeaders != VA_ATTRIB_NOT_SUPPORTED)) { + ret = renderPackedSequenceParams(task); + CHECK_ENCODE_STATUS_RETURN("renderPackedSequenceParams"); + + ret = renderPackedPictureParams(task); + CHECK_ENCODE_STATUS_RETURN("renderPackedPictureParams"); + } + ret = renderSliceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); @@ -846,7 +861,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { avcSeqParams.intra_period = mComParams.intraPeriod; //avcSeqParams.vui_flag = 248; avcSeqParams.vui_parameters_present_flag = mVideoParamsAVC.VUIFlag; - avcSeqParams.seq_parameter_set_id = 8; + avcSeqParams.seq_parameter_set_id = 0; if (mVideoParamsAVC.crop.LeftOffset || mVideoParamsAVC.crop.RightOffset || mVideoParamsAVC.crop.TopOffset || @@ -880,7 +895,9 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { } // This is a temporary fix suggested by Binglin for bad encoding quality issue - avcSeqParams.max_num_ref_frames = 1; + avcSeqParams.max_num_ref_frames = (mEncMaxRefFrames != VA_ATTRIB_NOT_SUPPORTED) ? + mEncMaxRefFrames : 1; + if(avcSeqParams.ip_period > 1) avcSeqParams.max_num_ref_frames = 2; @@ -897,6 +914,17 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { LOG_I( "min_qp = %d\n", rcMiscParam->min_qp); LOG_I( "basic_unit_size = %d\n", rcMiscParam->basic_unit_size); + // Not sure whether these settings work for all drivers + avcSeqParams.seq_fields.bits.frame_mbs_only_flag = 1; + avcSeqParams.seq_fields.bits.pic_order_cnt_type = 0; + avcSeqParams.seq_fields.bits.direct_8x8_inference_flag = 0; + + avcSeqParams.seq_fields.bits.log2_max_frame_num_minus4 = 0; + avcSeqParams.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 2; + avcSeqParams.time_scale = 900; + avcSeqParams.num_units_in_tick = 15; /* Tc = num_units_in_tick / time_sacle */ + // Not sure whether these settings work for all drivers + vaStatus = vaUnmapBuffer(mVADisplay, mRcParamBuf); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); vaStatus = vaUnmapBuffer(mVADisplay, mFrameRateParamBuf); @@ -917,21 +945,93 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { return ENCODE_SUCCESS; } +Encode_Status VideoEncoderAVC::renderPackedSequenceParams(EncodeTask *task) { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncSequenceParameterBufferH264 *avcSeqParams; + VAEncPackedHeaderParameterBuffer packed_header_param_buffer; + unsigned char *packed_seq_buffer = NULL; + unsigned int length_in_bits, offset_in_bytes; + + vaStatus = vaMapBuffer(mVADisplay, mSeqParamBuf, (void **)&avcSeqParams); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + length_in_bits = build_packed_seq_buffer(&packed_seq_buffer, mComParams.profile, avcSeqParams); + packed_header_param_buffer.type = VAEncPackedHeaderSequence; + packed_header_param_buffer.bit_length = length_in_bits; + packed_header_param_buffer.has_emulation_bytes = 0; + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncPackedHeaderParameterBufferType, + sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer, + &packed_seq_header_param_buf_id); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncPackedHeaderDataBufferType, + (length_in_bits + 7) / 8, 1, packed_seq_buffer, + &packed_seq_buf_id); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_seq_header_param_buf_id, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_seq_buf_id, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + vaStatus = vaUnmapBuffer(mVADisplay, mSeqParamBuf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + free(packed_seq_buffer); + return vaStatus; +} Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncPictureParameterBufferH264 avcPicParams = {}; + uint32_t RefFrmIdx; LOG_V( "Begin\n\n"); // set picture params for HW - if(mAutoReference == false){ + if (mAutoReference == false) { + for (RefFrmIdx = 0; RefFrmIdx < 16; RefFrmIdx++) { + avcPicParams.ReferenceFrames[RefFrmIdx].picture_id = VA_INVALID_ID; + avcPicParams.ReferenceFrames[RefFrmIdx].flags = VA_PICTURE_H264_INVALID; + } avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface; + avcPicParams.ReferenceFrames[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; avcPicParams.CurrPic.picture_id= task->rec_surface; + // Not sure whether these settings work for all drivers + avcPicParams.CurrPic.TopFieldOrderCnt = mFrameNum * 2; + + avcPicParams.pic_fields.bits.transform_8x8_mode_flag = 0; + avcPicParams.seq_parameter_set_id = 0; + avcPicParams.pic_parameter_set_id = 0; + + avcPicParams.last_picture = 0; + avcPicParams.frame_num = 0; + + avcPicParams.pic_init_qp = 26; + avcPicParams.num_ref_idx_l0_active_minus1 = 0; + avcPicParams.num_ref_idx_l1_active_minus1 = 0; + + avcPicParams.pic_fields.bits.idr_pic_flag = 0; + avcPicParams.pic_fields.bits.reference_pic_flag = 0; + avcPicParams.pic_fields.bits.entropy_coding_mode_flag = 1; + avcPicParams.pic_fields.bits.weighted_pred_flag = 0; + avcPicParams.pic_fields.bits.weighted_bipred_idc = 0; + avcPicParams.pic_fields.bits.transform_8x8_mode_flag = 0; + avcPicParams.pic_fields.bits.deblocking_filter_control_present_flag = 1; + + avcPicParams.frame_num = mFrameNum; + avcPicParams.pic_fields.bits.idr_pic_flag = (mFrameNum == 0); + avcPicParams.pic_fields.bits.reference_pic_flag = 1; + // Not sure whether these settings work for all drivers }else { for(int i =0; i< mAutoReferenceSurfaceNum; i++) avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i]; } + avcPicParams.coded_buf = task->coded_buffer; avcPicParams.last_picture = 0; @@ -957,6 +1057,45 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { return ENCODE_SUCCESS; } +Encode_Status VideoEncoderAVC::renderPackedPictureParams(EncodeTask *task) { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncPictureParameterBufferH264 *avcPicParams; + VAEncPackedHeaderParameterBuffer packed_header_param_buffer; + unsigned char *packed_pic_buffer = NULL; + unsigned int length_in_bits, offset_in_bytes; + + vaStatus = vaMapBuffer(mVADisplay, mPicParamBuf, (void **)&avcPicParams); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + length_in_bits = build_packed_pic_buffer(&packed_pic_buffer, avcPicParams); + packed_header_param_buffer.type = VAEncPackedHeaderPicture; + packed_header_param_buffer.bit_length = length_in_bits; + packed_header_param_buffer.has_emulation_bytes = 0; + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncPackedHeaderParameterBufferType, + sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer, + &packed_pic_header_param_buf_id); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncPackedHeaderDataBufferType, + (length_in_bits + 7) / 8, 1, packed_pic_buffer, + &packed_pic_buf_id); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_pic_header_param_buf_id, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_pic_buf_id, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + vaStatus = vaUnmapBuffer(mVADisplay, mSeqParamBuf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + free(packed_pic_buffer); + return vaStatus; +} Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { @@ -970,6 +1109,7 @@ Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { uint32_t actualSliceHeightInMB = 0; uint32_t startRowInMB = 0; uint32_t modulus = 0; + uint32_t RefFrmIdx; LOG_V( "Begin\n\n"); @@ -1047,6 +1187,26 @@ Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { LOG_I( "slice.type = %d\n", (int) currentSlice->slice_type); LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->disable_deblocking_filter_idc); + // Not sure whether these settings work for all drivers + currentSlice->pic_parameter_set_id = 0; + currentSlice->pic_order_cnt_lsb = mFrameNum * 2; + currentSlice->direct_spatial_mv_pred_flag = 0; + currentSlice->num_ref_idx_l0_active_minus1 = 0; /* FIXME: ??? */ + currentSlice->num_ref_idx_l1_active_minus1 = 0; + currentSlice->cabac_init_idc = 0; + currentSlice->slice_qp_delta = 0; + currentSlice->disable_deblocking_filter_idc = 0; + currentSlice->slice_alpha_c0_offset_div2 = 2; + currentSlice->slice_beta_offset_div2 = 2; + currentSlice->idr_pic_id = 0; + for (RefFrmIdx = 0; RefFrmIdx < 32; RefFrmIdx++) { + currentSlice->RefPicList0[RefFrmIdx].picture_id = VA_INVALID_ID; + currentSlice->RefPicList0[RefFrmIdx].flags = VA_PICTURE_H264_INVALID; + } + currentSlice->RefPicList0[0].picture_id = task->ref_surface; + currentSlice->RefPicList0[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + // Not sure whether these settings work for all drivers + startRowInMB += actualSliceHeightInMB; } diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h index 1248a3e..f33755b 100644 --- a/videoencoder/VideoEncoderAVC.h +++ b/videoencoder/VideoEncoderAVC.h @@ -44,11 +44,19 @@ private: Encode_Status renderPictureParams(EncodeTask *task); Encode_Status renderSliceParams(EncodeTask *task); int calcLevel(int numMbs); + Encode_Status renderPackedSequenceParams(EncodeTask *task); + Encode_Status renderPackedPictureParams(EncodeTask *task); public: VideoParamsAVC mVideoParamsAVC; uint32_t mSliceNum; + VABufferID packed_seq_header_param_buf_id; + VABufferID packed_seq_buf_id; + VABufferID packed_pic_header_param_buf_id; + VABufferID packed_pic_buf_id; + VABufferID packed_sei_header_param_buf_id; /* the SEI buffer */ + VABufferID packed_sei_buf_id; }; diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 3442958..64f5a02 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -120,10 +120,20 @@ Encode_Status VideoEncoderBase::start() { queryAutoReferenceConfig(mComParams.profile); - VAConfigAttrib vaAttrib[3]; + VAConfigAttrib vaAttrib[5]; vaAttrib[0].type = VAConfigAttribRTFormat; vaAttrib[1].type = VAConfigAttribRateControl; vaAttrib[2].type = VAConfigAttribEncAutoReference; + vaAttrib[3].type = VAConfigAttribEncPackedHeaders; + vaAttrib[4].type = VAConfigAttribEncMaxRefFrames; + + vaStatus = vaGetConfigAttributes(mVADisplay, mComParams.profile, + VAEntrypointEncSlice, &vaAttrib[0], 5); + CHECK_VA_STATUS_RETURN("vaGetConfigAttributes"); + + mEncPackedHeaders = vaAttrib[3].value; + mEncMaxRefFrames = vaAttrib[4].value; + vaAttrib[0].value = VA_RT_FORMAT_YUV420; vaAttrib[1].value = mComParams.rcMode; vaAttrib[2].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED; @@ -220,13 +230,12 @@ Encode_Status VideoEncoderBase::start() { vaStatus = vaCreateContext(mVADisplay, mVAConfig, mComParams.resolution.width, mComParams.resolution.height, - 0, contextSurfaces, contextSurfaceCnt, + VA_PROGRESSIVE, contextSurfaces, contextSurfaceCnt, &(mVAContext)); + CHECK_VA_STATUS_RETURN("vaCreateContext"); delete [] contextSurfaces; - CHECK_VA_STATUS_RETURN("vaCreateContext"); - LOG_I("Success to create libva context width %d, height %d\n", mComParams.resolution.width, mComParams.resolution.height); @@ -1302,13 +1311,11 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( SurfaceMap *map = NULL; LOG_V( "Begin\n"); - // If encode session has been configured, we can not request surface creation anymore if (mStarted) { LOG_E( "Already Initialized, can not request VA surface anymore\n"); return ENCODE_WRONG_STATE; } - if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) { LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n"); return ENCODE_NULL_PTR; @@ -1333,7 +1340,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( attribute_tpi.pixel_format = VA_FOURCC_NV12; attribute_tpi.type = VAExternalMemoryNULL; - vaCreateSurfacesWithAttribute(mVADisplay, width, height, VA_RT_FORMAT_YUV420, + vaStatus = vaCreateSurfacesWithAttribute(mVADisplay, width, height, VA_RT_FORMAT_YUV420, 1, &surface, &attribute_tpi); CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 764ccce..53b078b 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -140,6 +140,8 @@ protected: uint32_t mCodedBufSize; bool mAutoReference; uint32_t mAutoReferenceSurfaceNum; + uint32_t mEncPackedHeaders; + uint32_t mEncMaxRefFrames; bool mSliceSizeOverflow; diff --git a/videoencoder/bitstream.h b/videoencoder/bitstream.h new file mode 100644 index 0000000..2fe98fd --- /dev/null +++ b/videoencoder/bitstream.h @@ -0,0 +1,386 @@ +#ifndef __BITSTREAM_H__ +#define __BITSTREAM_H__ + +#include +#include + +struct bitstream { + unsigned int *buffer; + int bit_offset; + int max_size_in_dword; +}; + +#define BITSTREAM_ALLOCATE_STEPPING 4096 + +static unsigned int va_swap32(unsigned int val) +{ + unsigned char *pval = (unsigned char *)&val; + + return ((pval[0] << 24) | + (pval[1] << 16) | + (pval[2] << 8) | + (pval[3] << 0)); +} + +static void bitstream_start(bitstream *bs) +{ + bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING; + bs->buffer = (unsigned int*)calloc(bs->max_size_in_dword * sizeof(int), 1); + bs->bit_offset = 0; +} + +static void bitstream_end(bitstream *bs) +{ + int pos = (bs->bit_offset >> 5); + int bit_offset = (bs->bit_offset & 0x1f); + int bit_left = 32 - bit_offset; + + if (bit_offset) { + bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left)); + } +} + +static void bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits) +{ + int pos = (bs->bit_offset >> 5); + int bit_offset = (bs->bit_offset & 0x1f); + int bit_left = 32 - bit_offset; + + if (!size_in_bits) + return; + + bs->bit_offset += size_in_bits; + + if (bit_left > size_in_bits) { + bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val); + } else { + size_in_bits -= bit_left; + bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits); + bs->buffer[pos] = va_swap32(bs->buffer[pos]); + + if (pos + 1 == bs->max_size_in_dword) { + bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING; + bs->buffer = (unsigned int*)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int)); + } + + bs->buffer[pos + 1] = val; + } +} + +static void bitstream_put_ue(bitstream *bs, unsigned int val) +{ + int size_in_bits = 0; + int tmp_val = ++val; + + while (tmp_val) { + tmp_val >>= 1; + size_in_bits++; + } + + bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero + bitstream_put_ui(bs, val, size_in_bits); +} + +static void bitstream_put_se(bitstream *bs, int val) +{ + unsigned int new_val; + + if (val <= 0) + new_val = -2 * val; + else + new_val = 2 * val - 1; + + bitstream_put_ue(bs, new_val); +} + +static void bitstream_byte_aligning(bitstream *bs, int bit) +{ + int bit_offset = (bs->bit_offset & 0x7); + int bit_left = 8 - bit_offset; + int new_val; + + if (!bit_offset) + return; + + assert(bit == 0 || bit == 1); + + if (bit) + new_val = (1 << bit_left) - 1; + else + new_val = 0; + + bitstream_put_ui(bs, new_val, bit_left); +} + +static void rbsp_trailing_bits(bitstream *bs) +{ + bitstream_put_ui(bs, 1, 1); + bitstream_byte_aligning(bs, 0); +} + +static void nal_start_code_prefix(bitstream *bs) +{ + bitstream_put_ui(bs, 0x00000001, 32); +} + +static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type) +{ + bitstream_put_ui(bs, 0, 1); /* forbidden_zero_bit: 0 */ + bitstream_put_ui(bs, nal_ref_idc, 2); + bitstream_put_ui(bs, nal_unit_type, 5); +} + +#define NAL_REF_IDC_NONE 0 +#define NAL_REF_IDC_LOW 1 +#define NAL_REF_IDC_MEDIUM 2 +#define NAL_REF_IDC_HIGH 3 + +#define NAL_NON_IDR 1 +#define NAL_IDR 5 +#define NAL_SPS 7 +#define NAL_PPS 8 +#define NAL_SEI 6 + +#define SLICE_TYPE_P 0 +#define SLICE_TYPE_B 1 +#define SLICE_TYPE_I 2 + +#define ENTROPY_MODE_CAVLC 0 +#define ENTROPY_MODE_CABAC 1 + +#define PROFILE_IDC_BASELINE 66 +#define PROFILE_IDC_MAIN 77 +#define PROFILE_IDC_HIGH 100 + +static void sps_rbsp(bitstream *bs, VAProfile profile, int frame_bit_rate, VAEncSequenceParameterBufferH264 *seq_param) +{ + int profile_idc; + int constraint_set_flag; + + if (profile == VAProfileH264High) { + profile_idc = PROFILE_IDC_HIGH; + constraint_set_flag |= (1 << 3); /* Annex A.2.4 */ + } + else if (profile == VAProfileH264Main) { + profile_idc = PROFILE_IDC_MAIN; + constraint_set_flag |= (1 << 1); /* Annex A.2.2 */ + } else { + profile_idc = PROFILE_IDC_BASELINE; + constraint_set_flag |= (1 << 0); /* Annex A.2.1 */ + } + + bitstream_put_ui(bs, profile_idc, 8); /* profile_idc */ + bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1); /* constraint_set0_flag */ + bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1); /* constraint_set1_flag */ + bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1); /* constraint_set2_flag */ + bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1); /* constraint_set3_flag */ + bitstream_put_ui(bs, 0, 4); /* reserved_zero_4bits */ + bitstream_put_ui(bs, seq_param->level_idc, 8); /* level_idc */ + bitstream_put_ue(bs, seq_param->seq_parameter_set_id); /* seq_parameter_set_id */ + + if ( profile_idc == PROFILE_IDC_HIGH) { + bitstream_put_ue(bs, 1); /* chroma_format_idc = 1, 4:2:0 */ + bitstream_put_ue(bs, 0); /* bit_depth_luma_minus8 */ + bitstream_put_ue(bs, 0); /* bit_depth_chroma_minus8 */ + bitstream_put_ui(bs, 0, 1); /* qpprime_y_zero_transform_bypass_flag */ + bitstream_put_ui(bs, 0, 1); /* seq_scaling_matrix_present_flag */ + } + + bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */ + bitstream_put_ue(bs, seq_param->seq_fields.bits.pic_order_cnt_type); /* pic_order_cnt_type */ + + if (seq_param->seq_fields.bits.pic_order_cnt_type == 0) + bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4); /* log2_max_pic_order_cnt_lsb_minus4 */ + else { + assert(0); + } + + bitstream_put_ue(bs, seq_param->max_num_ref_frames); /* num_ref_frames */ + bitstream_put_ui(bs, 0, 1); /* gaps_in_frame_num_value_allowed_flag */ + + bitstream_put_ue(bs, seq_param->picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */ + bitstream_put_ue(bs, seq_param->picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */ + bitstream_put_ui(bs, seq_param->seq_fields.bits.frame_mbs_only_flag, 1); /* frame_mbs_only_flag */ + + if (!seq_param->seq_fields.bits.frame_mbs_only_flag) { + assert(0); + } + + bitstream_put_ui(bs, seq_param->seq_fields.bits.direct_8x8_inference_flag, 1); /* direct_8x8_inference_flag */ + bitstream_put_ui(bs, seq_param->frame_cropping_flag, 1); /* frame_cropping_flag */ + + if (seq_param->frame_cropping_flag) { + bitstream_put_ue(bs, seq_param->frame_crop_left_offset); /* frame_crop_left_offset */ + bitstream_put_ue(bs, seq_param->frame_crop_right_offset); /* frame_crop_right_offset */ + bitstream_put_ue(bs, seq_param->frame_crop_top_offset); /* frame_crop_top_offset */ + bitstream_put_ue(bs, seq_param->frame_crop_bottom_offset); /* frame_crop_bottom_offset */ + } + + if ( frame_bit_rate < 0 ) { + bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */ + } else { + bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */ + bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */ + bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */ + bitstream_put_ui(bs, 0, 1); /* video_signal_type_present_flag */ + bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */ + bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */ + { + bitstream_put_ui(bs, 15, 32); + bitstream_put_ui(bs, 900, 32); + bitstream_put_ui(bs, 1, 1); + } + bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */ + { + // hrd_parameters + bitstream_put_ue(bs, 0); /* cpb_cnt_minus1 */ + bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */ + bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */ + + bitstream_put_ue(bs, frame_bit_rate - 1); /* bit_rate_value_minus1[0] */ + bitstream_put_ue(bs, frame_bit_rate*8 - 1); /* cpb_size_value_minus1[0] */ + bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */ + + bitstream_put_ui(bs, 23, 5); /* initial_cpb_removal_delay_length_minus1 */ + bitstream_put_ui(bs, 23, 5); /* cpb_removal_delay_length_minus1 */ + bitstream_put_ui(bs, 23, 5); /* dpb_output_delay_length_minus1 */ + bitstream_put_ui(bs, 23, 5); /* time_offset_length */ + } + bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */ + bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */ + + bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */ + bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */ + } + + rbsp_trailing_bits(bs); /* rbsp_trailing_bits */ +} + +static void pps_rbsp(bitstream *bs, VAEncPictureParameterBufferH264 *pic_param) +{ + + bitstream_put_ue(bs, pic_param->pic_parameter_set_id); /* pic_parameter_set_id */ + bitstream_put_ue(bs, pic_param->seq_parameter_set_id); /* seq_parameter_set_id */ + + bitstream_put_ui(bs, pic_param->pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */ + + bitstream_put_ui(bs, 0, 1); /* pic_order_present_flag: 0 */ + + bitstream_put_ue(bs, 0); /* num_slice_groups_minus1 */ + + bitstream_put_ue(bs, pic_param->num_ref_idx_l0_active_minus1); /* num_ref_idx_l0_active_minus1 */ + bitstream_put_ue(bs, pic_param->num_ref_idx_l1_active_minus1); /* num_ref_idx_l1_active_minus1 1 */ + + bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_pred_flag, 1); /* weighted_pred_flag: 0 */ + bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_bipred_idc, 2); /* weighted_bipred_idc: 0 */ + + bitstream_put_se(bs, pic_param->pic_init_qp - 26); /* pic_init_qp_minus26 */ + bitstream_put_se(bs, 0); /* pic_init_qs_minus26 */ + bitstream_put_se(bs, 0); /* chroma_qp_index_offset */ + + bitstream_put_ui(bs, pic_param->pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */ + bitstream_put_ui(bs, 0, 1); /* constrained_intra_pred_flag */ + bitstream_put_ui(bs, 0, 1); /* redundant_pic_cnt_present_flag */ + + /* more_rbsp_data */ + bitstream_put_ui(bs, pic_param->pic_fields.bits.transform_8x8_mode_flag, 1); /*transform_8x8_mode_flag */ + bitstream_put_ui(bs, 0, 1); /* pic_scaling_matrix_present_flag */ + bitstream_put_se(bs, pic_param->second_chroma_qp_index_offset ); /*second_chroma_qp_index_offset */ + + rbsp_trailing_bits(bs); +} + +int build_packed_seq_buffer(unsigned char **header_buffer, VAProfile profile, VAEncSequenceParameterBufferH264 *seq_param) +{ + bitstream bs; + + bitstream_start(&bs); + nal_start_code_prefix(&bs); + nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS); + sps_rbsp(&bs, profile, seq_param->bits_per_second, seq_param); + bitstream_end(&bs); + + *header_buffer = (unsigned char *)bs.buffer; + return bs.bit_offset; +} + +int build_packed_pic_buffer(unsigned char **header_buffer, VAEncPictureParameterBufferH264 *pic_param) +{ + bitstream bs; + + bitstream_start(&bs); + nal_start_code_prefix(&bs); + nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS); + pps_rbsp(&bs, pic_param); + bitstream_end(&bs); + + *header_buffer = (unsigned char *)bs.buffer; + return bs.bit_offset; +} + +int build_packed_sei_buffer_timing(unsigned int init_cpb_removal_length, + unsigned int init_cpb_removal_delay, + unsigned int init_cpb_removal_delay_offset, + unsigned int cpb_removal_length, + unsigned int cpb_removal_delay, + unsigned int dpb_output_length, + unsigned int dpb_output_delay, + unsigned char **sei_buffer) +{ + unsigned char *byte_buf; + int bp_byte_size, i, pic_byte_size; + + bitstream nal_bs; + bitstream sei_bp_bs, sei_pic_bs; + + bitstream_start(&sei_bp_bs); + bitstream_put_ue(&sei_bp_bs, 0); /*seq_parameter_set_id*/ + bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay, cpb_removal_length); + bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay_offset, cpb_removal_length); + if ( sei_bp_bs.bit_offset & 0x7) { + bitstream_put_ui(&sei_bp_bs, 1, 1); + } + bitstream_end(&sei_bp_bs); + bp_byte_size = (sei_bp_bs.bit_offset + 7) / 8; + + bitstream_start(&sei_pic_bs); + bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length); + bitstream_put_ui(&sei_pic_bs, dpb_output_delay, dpb_output_length); + if ( sei_pic_bs.bit_offset & 0x7) { + bitstream_put_ui(&sei_pic_bs, 1, 1); + } + bitstream_end(&sei_pic_bs); + pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8; + + bitstream_start(&nal_bs); + nal_start_code_prefix(&nal_bs); + nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI); + + /* Write the SEI buffer period data */ + bitstream_put_ui(&nal_bs, 0, 8); + bitstream_put_ui(&nal_bs, bp_byte_size, 8); + + byte_buf = (unsigned char *)sei_bp_bs.buffer; + for(i = 0; i < bp_byte_size; i++) { + bitstream_put_ui(&nal_bs, byte_buf[i], 8); + } + free(byte_buf); + /* write the SEI timing data */ + bitstream_put_ui(&nal_bs, 0x01, 8); + bitstream_put_ui(&nal_bs, pic_byte_size, 8); + + byte_buf = (unsigned char *)sei_pic_bs.buffer; + for(i = 0; i < pic_byte_size; i++) { + bitstream_put_ui(&nal_bs, byte_buf[i], 8); + } + free(byte_buf); + + rbsp_trailing_bits(&nal_bs); + bitstream_end(&nal_bs); + + *sei_buffer = (unsigned char *)nal_bs.buffer; + + return nal_bs.bit_offset; +} + +#endif -- cgit v1.2.3 From 7e0d428c4da4197faff8a63430fa3a1c3d72bdda Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Wed, 27 Mar 2013 11:17:38 +0800 Subject: VE: create surfaces for gralloc buffer according to its dimension BZ: 95915 Use gralloc buffer's resolution to create surfaces. Change-Id: I8b2991d04ce0a01bbb852bb1f618045ac2766eed Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/98533 Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/Android.mk | 4 ++-- videoencoder/VideoEncoderBase.cpp | 21 ++++++++++++++++++--- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index af21f9f..bf3f8b6 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -46,8 +46,8 @@ ifeq ($(VIDEO_ENC_STATISTICS_ENABLE),true) LOCAL_CPPFLAGS += -DVIDEO_ENC_STATISTICS_ENABLE endif -ifeq ($(REF_PRODUCT_NAME),baylake) - LOCAL_C_FLAGS += -DBAYLAKE +ifeq ($(ENABLE_IMG_GRAPHICS),true) + LOCAL_CFLAGS += -DIMG_GFX endif LOCAL_MODULE_TAGS := optional diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 64f5a02..2150f6a 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -11,6 +11,9 @@ #include "IntelMetadataBuffer.h" #include #include +#ifdef IMG_GFX +#include +#endif // API declaration extern "C" { @@ -1551,21 +1554,33 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { LOG_I("gfxhandle = %d\n", map->value); vaSurfaceAttrib.count = 1; +#ifdef IMG_GFX // color fmrat may be OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar or HAL_PIXEL_FORMAT_NV12 - //IMG_native_handle_t* h = (IMG_native_handle_t*) map->value; - //LOG_I("IMG_native_handle_t h->iWidth=%d, h->iHeight=%d, h->iFormat=%x\n", h->iWidth, h->iHeight, h->iFormat); - + IMG_native_handle_t* h = (IMG_native_handle_t*) map->value; + LOG_I("IMG_native_handle_t h->iWidth=%d, h->iHeight=%d, h->iFormat=%x\n", h->iWidth, h->iHeight, h->iFormat); + vaSurfaceAttrib.luma_stride = h->iWidth; + vaSurfaceAttrib.pixel_format = h->iFormat; + vaSurfaceAttrib.width = h->iWidth; + vaSurfaceAttrib.height = h->iHeight; + +#else vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; vaSurfaceAttrib.pixel_format = map->vinfo.format; vaSurfaceAttrib.width = map->vinfo.width; vaSurfaceAttrib.height = map->vinfo.height; +#endif vaSurfaceAttrib.type = VAExternalMemoryAndroidGrallocBuffer; vaSurfaceAttrib.buffers[0] = (uint32_t) map->value; vaStatus = vaCreateSurfacesWithAttribute( mVADisplay, +#ifdef IMG_GFX + h->iWidth, + h->iHeight, +#else map->vinfo.width, map->vinfo.height, +#endif VA_RT_FORMAT_YUV420, 1, &surface, -- cgit v1.2.3 From 72bde7101fc8774cb47bc484d31fafcfb8266375 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Thu, 28 Mar 2013 19:20:15 +0800 Subject: VE: enable video record on Baytrail. BZ: 94688 Temporary solution. Use memory copy to convey frames from camera to video. Change-Id: I1c3112f977b51a8a545838eeaf23efcbd97c9ae5 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/98900 Reviewed-by: cactus Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/Android.mk | 5 +++ videoencoder/VideoEncoderAVC.cpp | 12 +++++- videoencoder/VideoEncoderBase.cpp | 77 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 1 deletion(-) diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index bf3f8b6..69dc636 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -2,6 +2,7 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) #VIDEO_ENC_LOG_ENABLE := true +#NO_BUFFER_SHARE := true LOCAL_SRC_FILES := \ VideoEncoderBase.cpp \ @@ -42,6 +43,10 @@ ifeq ($(VIDEO_ENC_LOG_ENABLE),true) LOCAL_CPPFLAGS += -DVIDEO_ENC_LOG_ENABLE endif +ifeq ($(NO_BUFFER_SHARE),true) +LOCAL_CPPFLAGS += -DNO_BUFFER_SHARE +endif + ifeq ($(VIDEO_ENC_STATISTICS_ENABLE),true) LOCAL_CPPFLAGS += -DVIDEO_ENC_STATISTICS_ENABLE endif diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index ea1d3f9..f84d11c 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -953,6 +953,8 @@ Encode_Status VideoEncoderAVC::renderPackedSequenceParams(EncodeTask *task) { unsigned char *packed_seq_buffer = NULL; unsigned int length_in_bits, offset_in_bytes; + LOG_V("Begin\n"); + vaStatus = vaMapBuffer(mVADisplay, mSeqParamBuf, (void **)&avcSeqParams); CHECK_VA_STATUS_RETURN("vaMapBuffer"); @@ -982,6 +984,9 @@ Encode_Status VideoEncoderAVC::renderPackedSequenceParams(EncodeTask *task) { CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); free(packed_seq_buffer); + + LOG_V("End\n"); + return vaStatus; } @@ -1017,7 +1022,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { avcPicParams.pic_fields.bits.idr_pic_flag = 0; avcPicParams.pic_fields.bits.reference_pic_flag = 0; - avcPicParams.pic_fields.bits.entropy_coding_mode_flag = 1; + avcPicParams.pic_fields.bits.entropy_coding_mode_flag = 0; avcPicParams.pic_fields.bits.weighted_pred_flag = 0; avcPicParams.pic_fields.bits.weighted_bipred_idc = 0; avcPicParams.pic_fields.bits.transform_8x8_mode_flag = 0; @@ -1065,6 +1070,8 @@ Encode_Status VideoEncoderAVC::renderPackedPictureParams(EncodeTask *task) { unsigned char *packed_pic_buffer = NULL; unsigned int length_in_bits, offset_in_bytes; + LOG_V("Begin\n"); + vaStatus = vaMapBuffer(mVADisplay, mPicParamBuf, (void **)&avcPicParams); CHECK_VA_STATUS_RETURN("vaMapBuffer"); @@ -1094,6 +1101,9 @@ Encode_Status VideoEncoderAVC::renderPackedPictureParams(EncodeTask *task) { CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); free(packed_pic_buffer); + + LOG_V("End\n"); + return vaStatus; } diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 2150f6a..a3f5109 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -231,8 +231,13 @@ Encode_Status VideoEncoderBase::start() { //Initialize and save the VA context ID LOG_V( "vaCreateContext\n"); vaStatus = vaCreateContext(mVADisplay, mVAConfig, +#ifdef IMG_GFX mComParams.resolution.width, mComParams.resolution.height, +#else + stride_aligned, + height_aligned, +#endif VA_PROGRESSIVE, contextSurfaces, contextSurfaceCnt, &(mVAContext)); CHECK_VA_STATUS_RETURN("vaCreateContext"); @@ -652,6 +657,21 @@ Encode_Status VideoEncoderBase::prepareForOutput( mTotalSize += vaCodedSeg->size; status = vaCodedSeg->status; +#ifndef IMG_GFX + uint8_t *pTemp; + uint32_t ii; + pTemp = (uint8_t*)vaCodedSeg->buf; + for(ii = 0; ii < 16;){ + if (*(pTemp + ii) == 0xFF) + ii++; + else + break; + } + if (ii > 0) { + mOffsetInSeg = ii; + } +#endif + if (!mSliceSizeOverflow) { mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; @@ -1671,6 +1691,51 @@ Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) { return ret; } +#if NO_BUFFER_SHARE +static VAStatus upload_yuv_to_surface(VADisplay va_dpy, + SurfaceMap *map, VASurfaceID surface_id, int picture_width, + int picture_height) +{ + VAImage surface_image; + VAStatus vaStatus; + unsigned char *surface_p = NULL; + unsigned char *y_src, *uv_src; + unsigned char *y_dst, *uv_dst; + int y_size = map->vinfo.height * map->vinfo.lumaStride; + int row, col; + + vaStatus = vaDeriveImage(va_dpy, surface_id, &surface_image); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + + vaStatus = vaMapBuffer(va_dpy, surface_image.buf, (void**)&surface_p); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + y_src = (unsigned char*)map->value; + uv_src = (unsigned char*)map->value + y_size; /* UV offset for NV12 */ + + y_dst = surface_p + surface_image.offsets[0]; + uv_dst = surface_p + surface_image.offsets[1]; /* UV offset for NV12 */ + + /* Y plane */ + for (row = 0; row < picture_height; row++) { + memcpy(y_dst, y_src, picture_width); + y_dst += surface_image.pitches[0]; + y_src += map->vinfo.lumaStride; + } + + for (row = 0; row < (picture_height / 2); row++) { + memcpy(uv_dst, uv_src, picture_width); + uv_dst += surface_image.pitches[1]; + uv_src += map->vinfo.chromStride; + } + + vaUnmapBuffer(va_dpy, surface_image.buf); + vaDestroyImage(va_dpy, surface_image.image_id); + + return vaStatus; +} +#endif + Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { if (!map) @@ -1679,7 +1744,18 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { VAStatus vaStatus = VA_STATUS_SUCCESS; Encode_Status ret = ENCODE_SUCCESS; VASurfaceID surface; +#if NO_BUFFER_SHARE + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, + map->vinfo.width, map->vinfo.height, &surface, 1, + NULL, 0); + CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); + map->surface = surface; + vaStatus = upload_yuv_to_surface(mVADisplay, map, surface, + mComParams.resolution.width, mComParams.resolution.height); + CHECK_ENCODE_STATUS_RETURN("upload_yuv_to_surface"); + +#else VASurfaceAttributeTPI vaSurfaceAttrib; uint32_t buf; @@ -1731,6 +1807,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { map->surface = surfaceId; LOG_E("Due to 64 alignment, an alternative Surface ID 0x%08x created\n", surfaceId); } +#endif return ret; } -- cgit v1.2.3 From 87adb152c2b8219f7c731f052da67244f756eacc Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 8 Apr 2013 08:59:11 +0800 Subject: libmix: wait for the decode buffer ready if the surface is not composed by surfaceFlinger BZ: 92956 if the surfaceTexture is not composed by surfaceFlinger, for example, video edit preview mode and HTML5 inline mode. the waiting for decode buffer ready should be done in decode thread to avoid tearing or color line in such use case, because the app will not pass the NV12 buffer to HWC, and the NV12 buffer will be disposed by openGL and convert to RGB in application. Change-Id: I0a7f60b93f5ca1cbc2cf5cdcaf34f6044712a548 Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/100387 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index e5285ca..bd9ffd4 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -233,6 +233,9 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { mOutputTail = NULL; } vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); + if (mConfigBuffer.nativeWindow == NULL && useGraphicBuffer) { + vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); + } return &(outputByPos->renderBuffer); } @@ -286,6 +289,10 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { } #endif + if (mConfigBuffer.nativeWindow == NULL && useGraphicBuffer) { + vaSyncSurface(mVADisplay, output->renderBuffer.surface); + } + return &(output->renderBuffer); } -- cgit v1.2.3 From 57e0c100dbf91f45d094ee984098056720718aa2 Mon Sep 17 00:00:00 2001 From: Nana GUo Date: Fri, 12 Apr 2013 11:45:08 -0400 Subject: libmix: refine HW JPEG decoder following DDK_3_3_20 BZ: 99010 refine HW JPEG decoder in parse part following DDK_3_3_20 and memory leak issue Change-Id: I7c05ce09405082c22de6c201ae2c53c97bd45d16 Signed-off-by: Nana GUo Reviewed-on: http://android.intel.com:8080/101682 Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- imagedecoder/JPEGDecoder.c | 242 ++++++++++++++++++++------------------------- imagedecoder/JPEGDecoder.h | 2 + 2 files changed, 111 insertions(+), 133 deletions(-) diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c index b1ecb49..f692ede 100644 --- a/imagedecoder/JPEGDecoder.c +++ b/imagedecoder/JPEGDecoder.c @@ -53,18 +53,11 @@ Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) { * them here and return false is meaningless, same situation for all internal methods * related to VA API */ - int va_major_version; - int va_minor_version; -#if 0 - int va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs; - int va_num_profiles, va_num_entrypoints; - - VAProfile *va_profiles = NULL; - VAEntrypoint *va_entrypoints = NULL; -#endif + uint32_t va_major_version = 0; + uint32_t va_minor_version = 0; VAStatus va_status = VA_STATUS_SUCCESS; Decode_Status status = DECODE_SUCCESS; - int index; + uint32_t index; if (jd_libva_ptr->initialized) return DECODE_NOT_STARTED; @@ -86,54 +79,7 @@ Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) { status = DECODE_DRIVER_FAIL; goto cleanup; } -#if 0 - /*get the max number for profiles/entrypoints/attribs*/ - va_max_num_profiles = vaMaxNumProfiles(jd_libva_ptr->va_display); - va_max_num_entrypoints = vaMaxNumEntrypoints(jd_libva_ptr->va_display); - va_max_num_attribs = vaMaxNumConfigAttributes(jd_libva_ptr->va_display); - - va_profiles = malloc (sizeof(VAProfile)*va_max_num_profiles); - va_entrypoints = malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints); - if (va_profiles == NULL || va_entrypoints ==NULL) { - jd_libva_ptr->initialized = TRUE; // make sure we can call into jva_deinitialize() - jdva_deinitialize (jd_libva_ptr); - return DECODE_MEMORY_FAIL; - } - - va_status = vaQueryConfigProfiles (jd_libva_ptr->va_display, va_profiles, &va_num_profiles); - - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaQueryConfigProfiles failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } - - /*check whether profile is supported*/ - for(index= 0; index < va_num_profiles; index++) { - if(VAProfileJPEGBaseline == va_profiles[index]) - break; - } - - if(index == va_num_profiles) { - WTRACE("Profile not surportted\n"); - status = DECODE_FAIL; - goto cleanup; - - } - - va_status = vaQueryConfigEntrypoints(jd_libva_ptr->va_display, VAProfileJPEGBaseline, va_entrypoints, &va_num_entrypoints); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaQueryConfigProfiles failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } - /* traverse entrypoints arrary to see whether VLD is there */ - for (index = 0; index < va_num_entrypoints; index ++) { - if (va_entrypoints[index] == VAEntrypointVLD) - break; - } -#endif VAConfigAttrib attrib; attrib.type = VAConfigAttribRTFormat; va_status = vaGetConfigAttributes(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1); @@ -166,7 +112,7 @@ cleanup: if (va_entrypoints) free (va_entrypoints); #endif - if (!status) { + if (status) { jd_libva_ptr->initialized = TRUE; // make sure we can call into jva_deinitialize() jdva_deinitialize (jd_libva_ptr); return status; @@ -201,7 +147,7 @@ void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) { Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { VAStatus va_status = VA_STATUS_SUCCESS; - Decode_Status status; + Decode_Status status = DECODE_SUCCESS; jd_libva_ptr->image_width = jd_libva_ptr->picture_param_buf.picture_width; jd_libva_ptr->image_height = jd_libva_ptr->picture_param_buf.picture_height; jd_libva_ptr->surface_count = 1; @@ -210,8 +156,8 @@ Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { return DECODE_MEMORY_FAIL; } va_status = vaCreateSurfaces(jd_libva_ptr->va_display, VA_RT_FORMAT_YUV444, - (((jd_libva_ptr->image_width + 7) & (~7)) + 15) & (~15), - (((jd_libva_ptr->image_width + 7) & (~7)) + 15) & (~15), + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count, NULL, 0); if (va_status != VA_STATUS_SUCCESS) { @@ -220,8 +166,8 @@ Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { goto cleanup; } va_status = vaCreateContext(jd_libva_ptr->va_display, jd_libva_ptr->va_config, - (( ( jd_libva_ptr->image_width + 7 ) & ( ~7 )) + 15 ) & ( ~15 ), - ((( jd_libva_ptr->image_height + 7 ) & ( ~7 )) + 15 ) & ( ~15 ), + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, 0, //VA_PROGRESSIVE jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count, &(jd_libva_ptr->va_context)); @@ -298,13 +244,13 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { Decode_Status status = DECODE_SUCCESS; VAStatus va_status = VA_STATUS_SUCCESS; VABufferID desc_buf[5]; - uint32_t bitstream_buffer_size; + uint32_t bitstream_buffer_size = 0; uint32_t scan_idx = 0; uint32_t buf_idx = 0; uint32_t chopping = VA_SLICE_DATA_FLAG_ALL; - uint32_t bytes_remaining = jd_libva_ptr->file_size; - uint32_t src_offset = 0; - bitstream_buffer_size = 1024*512*5; + uint32_t bytes_remaining = jd_libva_ptr->eoi_offset - jd_libva_ptr->soi_offset; + uint32_t src_offset = jd_libva_ptr->soi_offset; + bitstream_buffer_size = 1024*1024*5; va_status = vaBeginPicture(jd_libva_ptr->va_display, jd_libva_ptr->va_context, jd_libva_ptr->va_surfaces[0]); if (va_status != VA_STATUS_SUCCESS) { @@ -340,7 +286,7 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { bytes_remaining -= bytes; /* Get Slice Control Buffer */ VASliceParameterBufferJPEG dest_scan_ctrl[JPEG_MAX_COMPONENTS]; - uint32_t src_idx; + uint32_t src_idx = 0; uint32_t dest_idx = 0; memset(dest_scan_ctrl, 0, sizeof(dest_scan_ctrl)); for (src_idx = scan_idx; src_idx < jd_libva_ptr->scan_ctrl_count ; src_idx++) { @@ -365,7 +311,7 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { } dest_scan_ctrl[dest_idx].slice_data_flag = chopping; dest_scan_ctrl[dest_idx].slice_data_offset = ((chopping == VA_SLICE_DATA_FLAG_ALL) || (chopping == VA_SLICE_DATA_FLAG_BEGIN) )? -jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset - src_offset : 0; +jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; const int32_t bytes_in_seg = bytes - dest_scan_ctrl[dest_idx].slice_data_offset; const uint32_t scan_data = (bytes_in_seg < jd_libva_ptr->slice_param_buf[src_idx].slice_data_size) ? bytes_in_seg : jd_libva_ptr->slice_param_buf[src_idx].slice_data_size ; @@ -477,8 +423,12 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { uint8_t marker = jd_libva_ptr->JPEGParser->getNextMarker(jd_libva_ptr->JPEGParser); - while (marker != CODE_EOI) { + while (marker != CODE_EOI &&( !jd_libva_ptr->JPEGParser->endOfBuffer(jd_libva_ptr->JPEGParser))) { switch (marker) { + case CODE_SOI: { + jd_libva_ptr->soi_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - 2; + break; + } // If the marker is an APP marker skip over the data case CODE_APP0: case CODE_APP1: @@ -503,10 +453,15 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { } // Store offset to DQT data to avoid parsing bitstream in user mode case CODE_DQT: { - jd_libva_ptr->dqt_byte_offset[dqt_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); - dqt_ind++; - uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes( jd_libva_ptr->JPEGParser, 2 ) - 2; - jd_libva_ptr->JPEGParser->burnBytes( jd_libva_ptr->JPEGParser, bytes_to_burn ); + if (dqt_ind < 4) { + jd_libva_ptr->dqt_byte_offset[dqt_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - jd_libva_ptr->soi_offset; + dqt_ind++; + uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes( jd_libva_ptr->JPEGParser, 2 ) - 2; + jd_libva_ptr->JPEGParser->burnBytes( jd_libva_ptr->JPEGParser, bytes_to_burn ); + } else { + ETRACE("ERROR: Decoder does not support more than 4 Quant Tables\n"); + return DECODE_PARSER_FAIL; + } break; } // Throw exception for all SOF marker other than SOF0 @@ -523,7 +478,7 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { case CODE_SOF13: case CODE_SOF14: case CODE_SOF15: { - fprintf(stderr, "ERROR: unsupport SOF\n"); + ETRACE("ERROR: unsupport SOF\n"); break; } // Parse component information in SOF marker @@ -531,8 +486,9 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { frame_marker_found = TRUE; jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, 2); // Throw away frame header length - jd_libva_ptr->picture_param_buf.sample_precision = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - if (jd_libva_ptr->picture_param_buf.sample_precision != 8) { + uint8_t sample_precision = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); + if (sample_precision != 8) { + ETRACE("sample_precision is not supported\n"); return DECODE_PARSER_FAIL; } // Extract pic width and height @@ -543,7 +499,7 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { if (jd_libva_ptr->picture_param_buf.num_components > JPEG_MAX_COMPONENTS) { return DECODE_PARSER_FAIL; } - uint8_t comp_ind; + uint8_t comp_ind = 0; for (comp_ind = 0; comp_ind < jd_libva_ptr->picture_param_buf.num_components; comp_ind++) { jd_libva_ptr->picture_param_buf.components[comp_ind].component_id = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); @@ -558,17 +514,22 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { } // Store offset to DHT data to avoid parsing bitstream in user mode case CODE_DHT: { - jd_libva_ptr->dht_byte_offset[dht_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); - dht_ind++; - uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2) - 2; - jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, bytes_to_burn ); + if (dht_ind < 4) { + jd_libva_ptr->dht_byte_offset[dht_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - jd_libva_ptr->soi_offset; + dht_ind++; + uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2) - 2; + jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, bytes_to_burn ); + } else { + ETRACE("ERROR: Decoder does not support more than 4 Huff Tables\n"); + return DECODE_PARSER_FAIL; + } break; } // Parse component information in SOS marker case CODE_SOS: { jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, 2); uint32_t component_in_scan = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - uint8_t comp_ind; + uint8_t comp_ind = 0; for (comp_ind = 0; comp_ind < component_in_scan; comp_ind++) { uint8_t comp_id = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); @@ -596,7 +557,7 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { return DECODE_PARSER_FAIL; } // Set slice control variables needed - jd_libva_ptr->slice_param_buf[scan_ind].slice_data_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); + jd_libva_ptr->slice_param_buf[scan_ind].slice_data_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - jd_libva_ptr->soi_offset; jd_libva_ptr->slice_param_buf[scan_ind].num_components = component_in_scan; if (scan_ind) { /* If there is more than one scan, the slice for all but the final scan should only run up to the beginning of the next scan */ @@ -618,13 +579,18 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { } marker = jd_libva_ptr->JPEGParser->getNextMarker(jd_libva_ptr->JPEGParser); + // If the EOI code is found, store the byte offset before the parsing finishes + if( marker == CODE_EOI ) { + jd_libva_ptr->eoi_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); + } + } jd_libva_ptr->quant_tables_num = dqt_ind; jd_libva_ptr->huffman_tables_num = dht_ind; /* The slice for the last scan should run up to the end of the picture */ - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->file_size - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); + jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->eoi_offset - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); // throw AppException if SOF0 isn't found if (!frame_marker_found) { @@ -639,7 +605,6 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { } Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { - Decode_Status status; CJPEGParse* parser = (CJPEGParse*)malloc(sizeof(CJPEGParse)); if (parser == NULL) return DECODE_MEMORY_FAIL; @@ -648,7 +613,7 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { // Parse Quant tables memset(&jd_libva_ptr->qmatrix_buf, 0, sizeof(jd_libva_ptr->qmatrix_buf)); - uint32_t dqt_ind; + uint32_t dqt_ind = 0; for (dqt_ind = 0; dqt_ind < jd_libva_ptr->quant_tables_num; dqt_ind++) { if (parser->setByteOffset(parser, jd_libva_ptr->dqt_byte_offset[dqt_ind])) { // uint32_t uiTableBytes = parser->readBytes( 2 ) - 2; @@ -662,15 +627,18 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { return DECODE_PARSER_FAIL; } uint32_t table_id = table_info & 0xf; - if (table_id >= JPEG_MAX_QUANT_TABLES) { - return DECODE_PARSER_FAIL; - } + jd_libva_ptr->qmatrix_buf.load_quantiser_table[dqt_ind] = table_id; - // Pull Quant table data from bitstream - uint32_t byte_ind; - for (byte_ind = 0; byte_ind < table_length; byte_ind++) { - jd_libva_ptr->qmatrix_buf.quantiser_table[table_id][byte_ind] = parser->readNextByte(parser); + if (table_id < JPEG_MAX_QUANT_TABLES) { + // Pull Quant table data from bitstream + uint32_t byte_ind; + for (byte_ind = 0; byte_ind < table_length; byte_ind++) { + jd_libva_ptr->qmatrix_buf.quantiser_table[table_id][byte_ind] = parser->readNextByte(parser); + } + } else { + ETRACE("DQT table ID is not supported"); + parser->burnBytes(parser, table_length); } table_bytes -= table_length; } while (table_bytes); @@ -679,7 +647,7 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { // Parse Huffman tables memset(&jd_libva_ptr->hufman_table_buf, 0, sizeof(jd_libva_ptr->hufman_table_buf)); - uint32_t dht_ind; + uint32_t dht_ind = 0; for (dht_ind = 0; dht_ind < jd_libva_ptr->huffman_tables_num; dht_ind++) { if (parser->setByteOffset(parser, jd_libva_ptr->dht_byte_offset[dht_ind])) { uint32_t table_bytes = parser->readBytes( parser, 2 ) - 2; @@ -687,51 +655,59 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { uint32_t table_info = parser->readNextByte(parser); table_bytes--; uint32_t table_class = table_info >> 4; // Identifies whether the table is for AC or DC - if (table_class >= TABLE_CLASS_NUM) { - return DECODE_PARSER_FAIL; - } uint32_t table_id = table_info & 0xf; - if (table_id >= JPEG_MAX_SETS_HUFFMAN_TABLES) { - return DECODE_PARSER_FAIL; - } - if (table_class == 0) { - uint8_t* bits = parser->getCurrentIndex(parser); - // Find out the number of entries in the table - uint32_t table_entries = 0; - uint32_t bit_ind; - for (bit_ind = 0; bit_ind < 16; bit_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind] = bits[bit_ind]; - table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind]; - } - // Create table of code values - parser->burnBytes(parser, 16); - table_bytes -= 16; - uint32_t tbl_ind; - for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].dc_values[tbl_ind] = parser->readNextByte(parser); - table_bytes--; - } + if ((table_class < TABLE_CLASS_NUM) && (table_id < JPEG_MAX_SETS_HUFFMAN_TABLES)) { + if (table_class == 0) { + uint8_t* bits = parser->getCurrentIndex(parser); + // Find out the number of entries in the table + uint32_t table_entries = 0; + uint32_t bit_ind; + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind] = bits[bit_ind]; + table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind]; + } - } else { // for AC class - uint8_t* bits = parser->getCurrentIndex(parser); + // Create table of code values + parser->burnBytes(parser, 16); + table_bytes -= 16; + uint32_t tbl_ind; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].dc_values[tbl_ind] = parser->readNextByte(parser); + table_bytes--; + } + + } else { // for AC class + uint8_t* bits = parser->getCurrentIndex(parser); + // Find out the number of entries in the table + uint32_t table_entries = 0; + uint32_t bit_ind = 0; + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind] = bits[bit_ind]; + table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind]; + } + + // Create table of code values + parser->burnBytes(parser, 16); + table_bytes -= 16; + uint32_t tbl_ind = 0; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + jd_libva_ptr->hufman_table_buf.huffman_table[table_id].ac_values[tbl_ind] = parser->readNextByte(parser); + table_bytes--; + } + }//end of else + } else { // Find out the number of entries in the table + ETRACE("DHT table ID is not supported"); uint32_t table_entries = 0; - uint32_t bit_ind; - for (bit_ind = 0; bit_ind < 16; bit_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind] = bits[bit_ind]; - table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind]; - } - - // Create table of code values - parser->burnBytes(parser, 16); - table_bytes -= 16; - uint32_t tbl_ind; - for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].ac_values[tbl_ind] = parser->readNextByte(parser); + uint32_t bit_ind = 0; + for(bit_ind = 0; bit_ind < 16; bit_ind++) { + table_entries += parser->readNextByte(parser); table_bytes--; } - }//end of else + parser->burnBytes(parser, table_entries); + table_bytes -= table_entries; + } } while (table_bytes); } diff --git a/imagedecoder/JPEGDecoder.h b/imagedecoder/JPEGDecoder.h index efc8091..b1e84b0 100644 --- a/imagedecoder/JPEGDecoder.h +++ b/imagedecoder/JPEGDecoder.h @@ -58,6 +58,8 @@ typedef struct { uint32_t dqt_byte_offset[4]; uint32_t huffman_tables_num; uint32_t quant_tables_num; + uint32_t soi_offset; + uint32_t eoi_offset; uint8_t* bitstream_buf; uint32_t image_width; -- cgit v1.2.3 From d0101a261d44ae3f4cad1f02410ff1a80ddb9112 Mon Sep 17 00:00:00 2001 From: Nana GUo Date: Fri, 12 Apr 2013 13:42:35 -0400 Subject: libmix: refine code with libva rebase in HW JPEG BZ: 99978 refine code with libva rebase in HW JPEG for data structure rename Change-Id: I660a2123d562159847dd51750358d3b08af7c28a Signed-off-by: Nana GUo Reviewed-on: http://android.intel.com:8080/101683 Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- imagedecoder/JPEGDecoder.c | 10 +++++----- imagedecoder/JPEGDecoder.h | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c index f692ede..ccbea4e 100644 --- a/imagedecoder/JPEGDecoder.c +++ b/imagedecoder/JPEGDecoder.c @@ -258,14 +258,14 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { status = DECODE_DRIVER_FAIL; return status; } - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferJPEG), 1, &jd_libva_ptr->picture_param_buf, &desc_buf[buf_idx]); + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferJPEGBaseline), 1, &jd_libva_ptr->picture_param_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; return status; } buf_idx++; - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAIQMatrixBufferType, sizeof(VAIQMatrixParameterBufferJPEG), 1, &jd_libva_ptr->qmatrix_buf, &desc_buf[buf_idx]); + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferJPEGBaseline), 1, &jd_libva_ptr->qmatrix_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); @@ -273,7 +273,7 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { return status; } buf_idx++; - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAHuffmanTableBufferType, sizeof(VAHuffmanTableParameterBufferJPEG), 1, &jd_libva_ptr->hufman_table_buf, &desc_buf[buf_idx]); + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAHuffmanTableBufferType, sizeof(VAHuffmanTableBufferJPEGBaseline), 1, &jd_libva_ptr->hufman_table_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; @@ -285,7 +285,7 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { uint32_t bytes = ( bytes_remaining < bitstream_buffer_size ) ? bytes_remaining : bitstream_buffer_size; bytes_remaining -= bytes; /* Get Slice Control Buffer */ - VASliceParameterBufferJPEG dest_scan_ctrl[JPEG_MAX_COMPONENTS]; + VASliceParameterBufferJPEGBaseline dest_scan_ctrl[JPEG_MAX_COMPONENTS]; uint32_t src_idx = 0; uint32_t dest_idx = 0; memset(dest_scan_ctrl, 0, sizeof(dest_scan_ctrl)); @@ -330,7 +330,7 @@ jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; } scan_idx = src_idx; /* Get Slice Control Buffer */ - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceParameterBufferType, sizeof(VASliceParameterBufferJPEG) * dest_idx, 1, dest_scan_ctrl, &desc_buf[buf_idx]); + va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceParameterBufferType, sizeof(VASliceParameterBufferJPEGBaseline) * dest_idx, 1, dest_scan_ctrl, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; diff --git a/imagedecoder/JPEGDecoder.h b/imagedecoder/JPEGDecoder.h index b1e84b0..8e4ee17 100644 --- a/imagedecoder/JPEGDecoder.h +++ b/imagedecoder/JPEGDecoder.h @@ -49,10 +49,10 @@ typedef struct { VASurfaceID* va_surfaces; VAConfigID va_config; - VAPictureParameterBufferJPEG picture_param_buf; - VASliceParameterBufferJPEG slice_param_buf[JPEG_MAX_COMPONENTS]; - VAIQMatrixParameterBufferJPEG qmatrix_buf; - VAHuffmanTableParameterBufferJPEG hufman_table_buf; + VAPictureParameterBufferJPEGBaseline picture_param_buf; + VASliceParameterBufferJPEGBaseline slice_param_buf[JPEG_MAX_COMPONENTS]; + VAIQMatrixBufferJPEGBaseline qmatrix_buf; + VAHuffmanTableBufferJPEGBaseline hufman_table_buf; uint32_t dht_byte_offset[4]; uint32_t dqt_byte_offset[4]; -- cgit v1.2.3 From 83bb27286e32a809f4cead6d4e81fd5305d426f1 Mon Sep 17 00:00:00 2001 From: Fei Jiang Date: Wed, 17 Apr 2013 02:15:31 +0800 Subject: Load different video drivers according to video codecs BZ: 98522 On the baytrail platform, there are two video decoder hardwares. The Intel GEN will handle H264 and VC1 decode and VXD will handle MPEG4, H263 and VP8 decode. GEN uses i965 driver and VxD uses pvr driver. We have to load different drivers according to the codec type of the video. Use setenv before vaInitialize and unsetenv after it can work. But there is one problem for thumbnail generation case, multiple threads will call setenv/unsetenv. One thread just call set env to pvr, but another thread maybe call set env to i915, which will lead to previous thread didn't load correct library. The solution here is to pass driver name through vaDisplay in function vaGetDisplay, change vaDisplay from type unsigned int to char * and set driver name to vaDisplay. Take pvr_drv_video for example, need set vaDisplay= "libva_driver_name=pvr". Change-Id: I671fb8817e2f76eb8f4baaf8d63cdbc75ba90f77 Signed-off-by: Fei Jiang Reviewed-on: http://android.intel.com:8080/102308 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 24 +++++++++++++----------- videodecoder/VideoDecoderBase.h | 4 ++++ 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index bd9ffd4..b1e9541 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -739,29 +739,29 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { } // Display is defined as "unsigned int" - +#ifndef LOAD_PVR_DRIVER mDisplay = new Display; *mDisplay = ANDROID_DISPLAY_HANDLE; - +#else + if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) { + ITRACE("Using GEN driver"); + mDisplay = "libva_driver_name=i965"; + } + else { + ITRACE("Using PVR driver"); + mDisplay = "libva_driver_name=pvr"; + } +#endif mVADisplay = vaGetDisplay(mDisplay); if (mVADisplay == NULL) { ETRACE("vaGetDisplay failed."); return DECODE_DRIVER_FAIL; } -#ifdef LOAD_PVR_DRIVER - ITRACE("load pvr driver.\n"); - setenv("LIBVA_DRIVER_NAME", "pvr", 1); -#endif - int majorVersion, minorVersion; vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); CHECK_VA_STATUS("vaInitialize"); -#ifdef LOAD_PVR_DRIVER - unsetenv("LIBVA_DRIVER_NAME"); -#endif - if (mConfigBuffer.frameRate > 45 && mVideoFormatInfo.height >= 1080) { // ugly workaround here // for fps > 45 and height > 1080, we will force to @@ -943,7 +943,9 @@ Decode_Status VideoDecoderBase::terminateVA(void) { } if (mDisplay) { +#ifndef LOAD_PVR_DRIVER delete mDisplay; +#endif mDisplay = NULL; } diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 048da3c..a1ad265 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -37,8 +37,12 @@ extern "C" { } #ifndef Display +#ifdef LOAD_PVR_DRIVER +typedef char Display; +#else typedef unsigned int Display; #endif +#endif // TODO: check what is the best number. Must be at least 2 to support one backward reference frame. // Currently set to 8 to support 7 backward reference frames. This value is used for AVC frame reordering only. -- cgit v1.2.3 From c9817c6c40268cfac965f13988bd85a90ff74cd7 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Mon, 8 Apr 2013 17:14:31 +0800 Subject: VE: Fix KW issue in bitstream.h BZ: 98594 Fix KW issue in bitstream.h Change-Id: I7b6731b69840b84149307a472a49b106910832e7 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/103950 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/bitstream.h | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/videoencoder/bitstream.h b/videoencoder/bitstream.h index 2fe98fd..6a87a87 100644 --- a/videoencoder/bitstream.h +++ b/videoencoder/bitstream.h @@ -1,3 +1,26 @@ +/* + * Copyright (c) 2012 Intel Corporation. All Rights Reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sub license, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice (including the + * next paragraph) shall be included in all copies or substantial portions + * of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. + * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR + * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ #ifndef __BITSTREAM_H__ #define __BITSTREAM_H__ @@ -61,6 +84,8 @@ static void bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits) if (pos + 1 == bs->max_size_in_dword) { bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING; bs->buffer = (unsigned int*)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int)); + if (bs->buffer == NULL) + abort(); } bs->buffer[pos + 1] = val; @@ -154,8 +179,8 @@ static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type) static void sps_rbsp(bitstream *bs, VAProfile profile, int frame_bit_rate, VAEncSequenceParameterBufferH264 *seq_param) { - int profile_idc; - int constraint_set_flag; + int profile_idc = 0; + int constraint_set_flag = 0; if (profile == VAProfileH264High) { profile_idc = PROFILE_IDC_HIGH; -- cgit v1.2.3 From d2da409a23a9dbf5e2653d97fe8da662a18212fd Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Tue, 16 Apr 2013 16:22:32 +0800 Subject: libmix: Make VA parameter used consistent with open-source driver BZ: 99544 Parse load_quantiser_table/load_huffman_table as 0/1 flag; Make component_selector starting from 1 instead of 0. Change-Id: Ic2a0fd7f06a6e6c60faf78ea1960e6d3a70ed2ec Signed-off-by: Cheng Yao Reviewed-on: http://android.intel.com:8080/103728 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- imagedecoder/Android.mk | 6 ++ imagedecoder/JPEGDecoder.c | 188 ++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 192 insertions(+), 2 deletions(-) diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index 3c5f87c..cb81ed5 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -28,6 +28,12 @@ LOCAL_SHARED_LIBRARIES += \ LOCAL_LDLIBS += -lpthread LOCAL_CFLAGS += -Wno-multichar LOCAL_CFLAGS += -DUSE_INTEL_JPEGDEC + +BAYLAKE_PRODUCT := baylake +ifneq ($(filter $(TARGET_PRODUCT),$(BAYLAKE_PRODUCT)),) +LOCAL_CFLAGS += -D__BAYLAKE__ +endif + LOCAL_MODULE:= libjpeg_hw LOCAL_MODULE_TAGS := optional diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c index ccbea4e..baf54e4 100644 --- a/imagedecoder/JPEGDecoder.c +++ b/imagedecoder/JPEGDecoder.c @@ -38,6 +38,101 @@ #define TABLE_CLASS_AC 1 #define TABLE_CLASS_NUM 2 +static int appendFile(unsigned char* _fileName, void* _buf, int _bufLen) +{ + static int firstOpen = 1; + FILE * fp = NULL; + if( NULL == _buf || _bufLen <= 0 ) return (-1); + + if(firstOpen) + { + fp = fopen(_fileName, "wb"); + firstOpen = 0; + } + else + { + fp = fopen(_fileName, "ab"); + } + + if( NULL == fp ) + { + return (-1); + } + + fwrite(_buf, _bufLen, 1, fp); + + fclose(fp); + fp = NULL; + + return 0; +} +#define DUMPYUVFILE "/data/mcgdump.yuv" +int dump_yuv_image(VAImage va_image, unsigned char *pImage_Src, + float CbCr_h_sampling_factor, float CbCr_v_sampling_factor, int actW, int actH) +{ + int num_bytes, nWidth, nHeight, nAWidth, nAHeight; + unsigned char *pSrc_Y, *pSrc_UV, *pDst, *pDst_Y, *pDst_U, *pDst_V, *pSrcTmp, *pSrc_U, *pSrc_V; + int i, j; + + ITRACE("Image width = %d, Height = %d\n", va_image.width, va_image.height); + + pSrc_Y = pImage_Src; + pSrc_U = pSrc_Y + va_image.offsets[1]; + pSrc_V = pSrc_U + va_image.offsets[2]; + ITRACE("offset = %p, %p, %p\n", pSrc_Y, pSrc_U, pSrc_V); + ITRACE("offset = %d, %d, %d\n", va_image.offsets[0], va_image.offsets[1], va_image.offsets[2]); + ITRACE("pitch = %d, %d, %d\n", va_image.pitches[0], va_image.pitches[1], va_image.pitches[2]); + +// Y + nWidth = va_image.pitches[0]; + nHeight = va_image.height; + num_bytes = nWidth * nHeight; + if (NULL == (pDst_Y = (unsigned char*) malloc(num_bytes))) { + return 0; + } + for (i = 0; i < nHeight; i++) + { + memcpy(pDst_Y + i * nWidth, pSrc_Y + i * va_image.pitches[0], nWidth); + } + ITRACE(" Y (WxH) %d x %d, bytes = %d\n", nWidth, nHeight, num_bytes); + appendFile(DUMPYUVFILE, pDst_Y, nWidth*nHeight); + +//U + nWidth = va_image.pitches[0] * CbCr_h_sampling_factor; + nHeight = va_image.height * CbCr_v_sampling_factor; + num_bytes = nWidth * nHeight; + if (NULL == (pDst_U = (unsigned char*) malloc(num_bytes))) { + return 0; + } + for (i = 0; i < nHeight; i++) + { + memcpy(pDst_U + i * nWidth, pSrc_U + i * va_image.pitches[1], nWidth); + } + ITRACE(" U (WxH) %d x %d, bytes = %d\n", nWidth, nHeight, num_bytes); + appendFile(DUMPYUVFILE, pDst_U, nWidth*nHeight); + + pSrc_V = pSrc_U + nHeight * va_image.pitches[1]; + +//V + nWidth = va_image.pitches[0] * CbCr_h_sampling_factor; + nHeight = va_image.height * CbCr_v_sampling_factor; + num_bytes = nWidth * nHeight; + if (NULL == (pDst_V = (unsigned char*) malloc(num_bytes))) { + return 0; + } + for (i = 0; i < nHeight; i++) + { + memcpy(pDst_V + i * nWidth, pSrc_V + i * va_image.pitches[2], nWidth); + } + ITRACE(" V (WxH) %d x %d, bytes = %d\n", nWidth, nHeight, num_bytes); + appendFile(DUMPYUVFILE, pDst_V, nWidth*nHeight); + + if(pDst != NULL) + free(pDst); + + return 0; +} + /* * Initialize VA API related stuff * @@ -145,6 +240,62 @@ void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) { return; } +unsigned int jdva_get_surface_format(jd_libva_struct * jd_libva_ptr, VASurfaceAttrib * fourcc) { + int h1, h2, h3, v1, v2, v3; + h1 = jd_libva_ptr->picture_param_buf.components[0].h_sampling_factor; + h2 = jd_libva_ptr->picture_param_buf.components[1].h_sampling_factor; + h3 = jd_libva_ptr->picture_param_buf.components[2].h_sampling_factor; + v1 = jd_libva_ptr->picture_param_buf.components[0].v_sampling_factor; + v2 = jd_libva_ptr->picture_param_buf.components[1].v_sampling_factor; + v3 = jd_libva_ptr->picture_param_buf.components[2].v_sampling_factor; + + fourcc->type = VASurfaceAttribPixelFormat; + fourcc->flags = VA_SURFACE_ATTRIB_SETTABLE; + fourcc->value.type = VAGenericValueTypeInteger; + + if (h1 == 2 && h2 == 1 && h3 == 1 && + v1 == 2 && v2 == 1 && v3 == 1) { + fourcc->value.value.i = VA_FOURCC_IMC3; + return VA_RT_FORMAT_YUV420; + } + else if (h1 == 2 && h2 == 1 && h3 == 1 && + v1 == 1 && v2 == 1 && v3 == 1) { + fourcc->value.value.i = VA_FOURCC_422H; + return VA_RT_FORMAT_YUV422; + } + else if (h1 == 1 && h2 == 1 && h3 == 1 && + v1 == 1 && v2 == 1 && v3 == 1) { + fourcc->value.value.i = VA_FOURCC_444P; + return VA_RT_FORMAT_YUV444; + } + else if (h1 == 4 && h2 == 1 && h3 == 1 && + v1 == 1 && v2 == 1 && v3 == 1) { + fourcc->value.value.i = VA_FOURCC_411P; + return VA_RT_FORMAT_YUV411; + } + else if (h1 == 1 && h2 == 1 && h3 == 1 && + v1 == 2 && v2 == 1 && v3 == 1) { + fourcc->value.value.i = VA_FOURCC_422V; + return VA_RT_FORMAT_YUV422; + } + else if (h1 == 2 && h2 == 1 && h3 == 1 && + v1 == 2 && v2 == 2 && v3 == 2) { + fourcc->value.value.i = VA_FOURCC_422H; + return VA_RT_FORMAT_YUV422; + } + else if (h2 == 2 && h2 == 2 && h3 == 2 && + v1 == 2 && v2 == 1 && v3 == 1) { + fourcc->value.value.i = VA_FOURCC_422V; + return VA_RT_FORMAT_YUV422; + } + else + { + fourcc->value.value.i = VA_FOURCC('4','0','0','P'); + return VA_RT_FORMAT_YUV400; + } + +} + Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { VAStatus va_status = VA_STATUS_SUCCESS; Decode_Status status = DECODE_SUCCESS; @@ -155,11 +306,22 @@ Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { if (jd_libva_ptr->va_surfaces == NULL) { return DECODE_MEMORY_FAIL; } + + VASurfaceAttrib fourcc; + unsigned int surface_format = jdva_get_surface_format(jd_libva_ptr, &fourcc); +#ifdef __BAYLAKE__ + va_status = vaCreateSurfaces(jd_libva_ptr->va_display, surface_format, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + jd_libva_ptr->va_surfaces, + jd_libva_ptr->surface_count, &fourcc, 1); +#else va_status = vaCreateSurfaces(jd_libva_ptr->va_display, VA_RT_FORMAT_YUV444, jd_libva_ptr->image_width, jd_libva_ptr->image_height, jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count, NULL, 0); +#endif if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateSurfaces failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; @@ -400,6 +562,27 @@ jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; ERREXIT1 (cinfo, JERR_VA_MAPBUFFER, va_status); } + char fcc[5]; + fcc[0] = jd_libva_ptr->surface_image.format.fourcc & 0xff; + fcc[1] = (jd_libva_ptr->surface_image.format.fourcc >> 8 )& 0xff; + fcc[2] = (jd_libva_ptr->surface_image.format.fourcc >> 16) & 0xff; + fcc[3] = (jd_libva_ptr->surface_image.format.fourcc >> 24)& 0xff; + fcc[4] = '\0'; + ITRACE("Derived image:"); + ITRACE("\t%u bytes", jd_libva_ptr->surface_image.data_size); + ITRACE("\tfourcc='%s'", fcc); + ITRACE("\tpitches=[%u %u %u]", jd_libva_ptr->surface_image.pitches[0], jd_libva_ptr->surface_image.pitches[1], jd_libva_ptr->surface_image.pitches[2]); + ITRACE("\toffsets=[%u %u %u]", jd_libva_ptr->surface_image.offsets[0], jd_libva_ptr->surface_image.offsets[1], jd_libva_ptr->surface_image.offsets[2]); + +#ifdef __BAYLAKE__ + float CbCr_h = ((float)jd_libva_ptr->picture_param_buf.components[1].h_sampling_factor) / jd_libva_ptr->picture_param_buf.components[0].h_sampling_factor; + float CbCr_v = ((float)jd_libva_ptr->picture_param_buf.components[1].v_sampling_factor) / jd_libva_ptr->picture_param_buf.components[0].v_sampling_factor; + dump_yuv_image(jd_libva_ptr->surface_image, jd_libva_ptr->image_buf, + CbCr_h, CbCr_v, jd_libva_ptr->image_width, jd_libva_ptr->image_height); +#else + dump_yuv_image(jd_libva_ptr->surface_image, jd_libva_ptr->image_buf, + 1, 1, jd_libva_ptr->image_width, jd_libva_ptr->image_height); +#endif va_status = vaUnmapBuffer(jd_libva_ptr->va_display, jd_libva_ptr->surface_image.buf); if (va_status != VA_STATUS_SUCCESS) { ERREXIT1(cinfo, JERR_VA_MAPBUFFER, va_status); @@ -536,7 +719,7 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { uint8_t comp_data_ind; for (comp_data_ind = 0; comp_data_ind < jd_libva_ptr->picture_param_buf.num_components; comp_data_ind++) { if (comp_id == jd_libva_ptr->picture_param_buf.components[comp_data_ind].component_id) { - jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].component_selector = comp_data_ind; + jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].component_selector = comp_data_ind + 1; break; } } @@ -628,7 +811,7 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { } uint32_t table_id = table_info & 0xf; - jd_libva_ptr->qmatrix_buf.load_quantiser_table[dqt_ind] = table_id; + jd_libva_ptr->qmatrix_buf.load_quantiser_table[table_id] = 1; if (table_id < JPEG_MAX_QUANT_TABLES) { // Pull Quant table data from bitstream @@ -656,6 +839,7 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { table_bytes--; uint32_t table_class = table_info >> 4; // Identifies whether the table is for AC or DC uint32_t table_id = table_info & 0xf; + jd_libva_ptr->hufman_table_buf.load_huffman_table[table_id] = 1; if ((table_class < TABLE_CLASS_NUM) && (table_id < JPEG_MAX_SETS_HUFFMAN_TABLES)) { if (table_class == 0) { -- cgit v1.2.3 From 8fe140df71094afb09a6c136d340ce5ccbed0715 Mon Sep 17 00:00:00 2001 From: Suneel Kandru Date: Fri, 26 Apr 2013 14:53:03 -0700 Subject: Enable WIDEVINE for Merrifield platform. BZ: 103732 Created New VideoDecoderAVCSecure source and header file for Merrifield platform. Moved clovertrail files to ctp folder. Change-Id: Id513416356c3868e88a880c8fccda43bfc720566 Signed-off-by: Suneel Kandru Signed-off-by: Andy Qiu Reviewed-on: http://android.intel.com:8080/105181 Reviewed-by: cactus Reviewed-by: Poornachandran, Rajesh Tested-by: Post, DavidX J Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/Android.mk | 23 +- videodecoder/VideoDecoderAVCSecure.cpp | 515 -------------------- videodecoder/VideoDecoderAVCSecure.h | 83 ---- .../securevideo/baytrail/VideoDecoderAVCSecure.cpp | 515 ++++++++++++++++++++ .../securevideo/baytrail/VideoDecoderAVCSecure.h | 83 ++++ .../securevideo/ctp/VideoDecoderAVCSecure.cpp | 515 ++++++++++++++++++++ .../securevideo/ctp/VideoDecoderAVCSecure.h | 83 ++++ .../merrifield/VideoDecoderAVCSecure.cpp | 519 +++++++++++++++++++++ .../securevideo/merrifield/VideoDecoderAVCSecure.h | 83 ++++ 9 files changed, 1816 insertions(+), 603 deletions(-) delete mode 100644 videodecoder/VideoDecoderAVCSecure.cpp delete mode 100644 videodecoder/VideoDecoderAVCSecure.h create mode 100644 videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h create mode 100644 videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h create mode 100644 videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 4f82d66..8ba8163 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -1,7 +1,6 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) - LOCAL_SRC_FILES := \ VideoDecoderHost.cpp \ VideoDecoderBase.cpp \ @@ -9,8 +8,7 @@ LOCAL_SRC_FILES := \ VideoDecoderMPEG4.cpp \ VideoDecoderAVC.cpp \ VideoDecoderPAVC.cpp \ - VideoDecoderAVCSecure.cpp \ - VideoDecoderTrace.cpp + VideoDecoderTrace.cpp \ # LOCAL_CFLAGS := @@ -19,12 +17,28 @@ LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libmixvbp -#LOCAL_LDLIBS += -lpthread +ifeq ($(TARGET_BOARD_PLATFORM),clovertrail) +LOCAL_SRC_FILES += securevideo/ctp/VideoDecoderAVCSecure.cpp + +LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/ctp +endif + +ifeq ($(TARGET_BOARD_PLATFORM),merrifield) +LOCAL_SRC_FILES += securevideo/merrifield/VideoDecoderAVCSecure.cpp + +LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/merrifield +endif ifeq ($(TARGET_BOARD_PLATFORM),baytrail) +LOCAL_SRC_FILES += securevideo/baytrail/VideoDecoderAVCSecure.cpp + +LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/baytrail + LOCAL_CFLAGS += -DLOAD_PVR_DRIVER endif +#LOCAL_LDLIBS += -lpthread + LOCAL_SHARED_LIBRARIES := \ libcutils \ libmixvbp \ @@ -35,7 +49,6 @@ LOCAL_SHARED_LIBRARIES := \ #LOCAL_CFLAGS += -DANDROID - #LOCAL_SHARED_LIBRARIES += liblog LOCAL_COPY_HEADERS_TO := libmix_videodecoder diff --git a/videodecoder/VideoDecoderAVCSecure.cpp b/videodecoder/VideoDecoderAVCSecure.cpp deleted file mode 100644 index 3bcfd70..0000000 --- a/videodecoder/VideoDecoderAVCSecure.cpp +++ /dev/null @@ -1,515 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - -#include "VideoDecoderAVCSecure.h" -#include "VideoDecoderTrace.h" -#include - - -#define STARTCODE_00 0x00 -#define STARTCODE_01 0x01 -#define STARTCODE_PREFIX_LEN 3 -#define NALU_TYPE_MASK 0x1F - - -// mask for little endian, to mast the second and fourth bytes in the byte stream -#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 -#define STARTCODE_MASK1 0x0000FF00 //0x000000FF - - -typedef enum { - NAL_UNIT_TYPE_unspecified0 = 0, - NAL_UNIT_TYPE_SLICE, - NAL_UNIT_TYPE_DPA, - NAL_UNIT_TYPE_DPB, - NAL_UNIT_TYPE_DPC, - NAL_UNIT_TYPE_IDR, - NAL_UNIT_TYPE_SEI, - NAL_UNIT_TYPE_SPS, - NAL_UNIT_TYPE_PPS, - NAL_UNIT_TYPE_Acc_unit_delimiter, - NAL_UNIT_TYPE_EOSeq, - NAL_UNIT_TYPE_EOstream, - NAL_UNIT_TYPE_filler_data, - NAL_UNIT_TYPE_SPS_extension, - NAL_UNIT_TYPE_Reserved14, - NAL_UNIT_TYPE_Reserved15, - NAL_UNIT_TYPE_Reserved16, - NAL_UNIT_TYPE_Reserved17, - NAL_UNIT_TYPE_Reserved18, - NAL_UNIT_TYPE_ACP, - NAL_UNIT_TYPE_Reserved20, - NAL_UNIT_TYPE_Reserved21, - NAL_UNIT_TYPE_Reserved22, - NAL_UNIT_TYPE_Reserved23, - NAL_UNIT_TYPE_unspecified24, -} NAL_UNIT_TYPE; - -#ifndef min -#define min(X, Y) ((X) <(Y) ? (X) : (Y)) -#endif - - -static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; - - -VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) - : VideoDecoderAVC(mimeType), - mNaluHeaderBuffer(NULL), - mInputBuffer(NULL) { - - memset(&mMetadata, 0, sizeof(NaluMetadata)); - memset(&mByteStream, 0, sizeof(NaluByteStream)); -} - -VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { -} - -Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { - Decode_Status status = VideoDecoderAVC::start(buffer); - if (status != DECODE_SUCCESS) { - return status; - } - - mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; - mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; - mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; - - if (mMetadata.naluInfo == NULL || - mByteStream.byteStream == NULL || - mNaluHeaderBuffer == NULL) { - ETRACE("Failed to allocate memory."); - // TODO: release all allocated memory - return DECODE_MEMORY_FAIL; - } - return status; -} - -void VideoDecoderAVCSecure::stop(void) { - VideoDecoderAVC::stop(); - - if (mMetadata.naluInfo) { - delete [] mMetadata.naluInfo; - mMetadata.naluInfo = NULL; - } - - if (mByteStream.byteStream) { - delete [] mByteStream.byteStream; - mByteStream.byteStream = NULL; - } - - if (mNaluHeaderBuffer) { - delete [] mNaluHeaderBuffer; - mNaluHeaderBuffer = NULL; - } -} - -Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { - Decode_Status status; - int32_t sizeAccumulated = 0; - int32_t sizeLeft = 0; - uint8_t *pByteStream = NULL; - NaluInfo *pNaluInfo = mMetadata.naluInfo; - - if (buffer->flag & IS_SECURE_DATA) { - pByteStream = buffer->data; - sizeLeft = buffer->size; - mInputBuffer = NULL; - } else { - status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); - CHECK_STATUS("parseAnnexBStream"); - pByteStream = mByteStream.byteStream; - sizeLeft = mByteStream.streamPos; - mInputBuffer = buffer->data; - } - if (sizeLeft < 4) { - ETRACE("Not enough data to read number of NALU."); - return DECODE_INVALID_DATA; - } - - // read number of NALU - memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); - pByteStream += 4; - sizeLeft -= 4; - - if (mMetadata.naluNumber == 0) { - WTRACE("Number of NALU is ZERO!"); - return DECODE_SUCCESS; - } - - for (int32_t i = 0; i < mMetadata.naluNumber; i++) { - if (sizeLeft < 12) { - ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); - return DECODE_INVALID_DATA; - } - sizeLeft -= 12; - // read NALU offset - memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - // read NALU size - memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - // read NALU header length - memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - if (sizeLeft < pNaluInfo->naluHeaderLen) { - ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); - return DECODE_INVALID_DATA; - } - - sizeLeft -= pNaluInfo->naluHeaderLen; - - if (pNaluInfo->naluHeaderLen) { - // copy start code prefix to buffer - memcpy(mNaluHeaderBuffer + sizeAccumulated, - startcodePrefix, - STARTCODE_PREFIX_LEN); - sizeAccumulated += STARTCODE_PREFIX_LEN; - - // copy NALU header - memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); - pByteStream += pNaluInfo->naluHeaderLen; - - sizeAccumulated += pNaluInfo->naluHeaderLen; - } else { - WTRACE("header len is zero for NALU %d", i); - } - - // for next NALU - pNaluInfo++; - } - - buffer->data = mNaluHeaderBuffer; - buffer->size = sizeAccumulated; - - return VideoDecoderAVC::decode(buffer); -} - - -Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { - - Decode_Status status; - VAStatus vaStatus; - uint32_t bufferIDCount = 0; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID bufferIDs[4]; - - vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); - vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); - VAPictureParameterBufferH264 *picParam = picData->pic_parms; - VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); - - if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { - // either condition indicates start of a new frame - if (sliceParam->first_mb_in_slice != 0) { - WTRACE("The first slice is lost."); - // TODO: handle the first slice lost - } - if (mDecodingFrame) { - // interlace content, complete decoding the first field - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS("vaEndPicture"); - - // for interlace content, top field may be valid only after the second field is parsed - mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; - } - - // Check there is no reference frame loss before decoding a frame - - // Update the reference frames and surface IDs for DPB and current frame - status = updateDPB(picParam); - CHECK_STATUS("updateDPB"); - - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - status = updateReferenceFrames(picData); - CHECK_STATUS("updateReferenceFrames"); - - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); - CHECK_VA_STATUS("vaBeginPicture"); - - // start decoding a frame - mDecodingFrame = true; - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferH264), - 1, - picParam, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); - bufferIDCount++; - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferH264), - 1, - data->IQ_matrix_buf, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); - bufferIDCount++; - } - - status = setReference(sliceParam); - CHECK_STATUS("setReference"); - - // find which naluinfo is correlated to current slice - int naluIndex = 0; - uint32_t accumulatedHeaderLen = 0; - uint32_t headerLen = 0; - for (; naluIndex < mMetadata.naluNumber; naluIndex++) { - headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; - if (headerLen == 0) { - WTRACE("lenght of current NAL unit is 0."); - continue; - } - accumulatedHeaderLen += STARTCODE_PREFIX_LEN; - if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { - break; - } - accumulatedHeaderLen += headerLen; - } - - if (sliceData->slice_offset != accumulatedHeaderLen) { - WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); - } - - sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; - sliceData->slice_size = sliceParam->slice_data_size; - - // no need to update: - // sliceParam->slice_data_offset - 0 always - // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), - 1, - sliceParam, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); - bufferIDCount++; - - // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit - // offset points to first byte of NAL unit - uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; - if (mInputBuffer != NULL) { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - mInputBuffer + sliceOffset, - &bufferIDs[bufferIDCount]); - } else { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAProtectedSliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - (uint8_t*)sliceOffset, // IMR offset - &bufferIDs[bufferIDCount]); - } - CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - bufferIDCount++; - - vaStatus = vaRenderPicture( - mVADisplay, - mVAContext, - bufferIDs, - bufferIDCount); - CHECK_VA_STATUS("vaRenderPicture"); - - return DECODE_SUCCESS; -} - - -// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. -// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. -int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { - uint8_t *ptr; - uint32_t left = 0, data = 0, phase = 0; - uint8_t mask1 = 0, mask2 = 0; - - /* Meaning of phase: - 0: initial status, "0x000001" bytes are not found so far; - 1: one "0x00" byte is found; - 2: two or more consecutive "0x00" bytes" are found; - 3: "0x000001" patten is found ; - 4: if there is one more byte after "0x000001"; - */ - - left = length; - ptr = (uint8_t *) (stream + offset); - phase = 0; - - // parse until there is more data and start code not found - while ((left > 0) && (phase < 3)) { - // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. - if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { - while (left > 3) { - data = *((uint32_t *)ptr); - mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); - mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); - // If second byte and fourth byte are not zero's then we cannot have a start code here, - // as we need two consecutive zero bytes for a start code pattern. - if (mask1 && mask2) { - // skip 4 bytes and start over - ptr += 4; - left -=4; - continue; - } else { - break; - } - } - } - - // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time - if (left > 0) { - if (*ptr == STARTCODE_00) { - phase++; - if (phase > 2) { - // more than 2 consecutive '0x00' bytes is found - phase = 2; - } - } else if ((*ptr == STARTCODE_01) && (phase == 2)) { - // start code is found - phase = 3; - } else { - // reset lookup - phase = 0; - } - ptr++; - left--; - } - } - - if ((left > 0) && (phase == 3)) { - phase = 4; - // return offset of position following the pattern in the buffer which matches "0x000001" byte string - return (int32_t)(ptr - stream); - } - return -1; -} - - -Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { - uint8_t naluType; - int32_t naluHeaderLen; - - naluType = *(uint8_t *)(stream + naluStream->naluOffset); - naluType &= NALU_TYPE_MASK; - // first update nalu header length based on nalu type - if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { - // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes - naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); - } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { - //sps, pps, sei, etc, return the entire NAL unit in clear - naluHeaderLen = naluStream->naluLen; - } else { - return DECODE_FRAME_DROPPED; - } - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); - naluStream->streamPos += 4; - - if (naluHeaderLen) { - memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); - naluStream->streamPos += naluHeaderLen; - } - return DECODE_SUCCESS; -} - - -// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container -Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { - int32_t naluOffset, offset, left; - NaluInfo *info; - uint32_t ret = DECODE_SUCCESS; - - naluOffset = 0; - offset = 0; - left = length; - - // leave 4 bytes to copy nalu count - naluStream->streamPos = 4; - naluStream->naluCount = 0; - memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); - - for (; ;) { - naluOffset = findNalUnitOffset(stream, offset, left); - if (naluOffset == -1) { - break; - } - - if (naluStream->naluCount == 0) { - naluStream->naluOffset = naluOffset; - } else { - naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; - ret = copyNaluHeader(stream, naluStream); - if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { - LOGW("copyNaluHeader returned %d", ret); - return ret; - } - // starting position for next NALU - naluStream->naluOffset = naluOffset; - } - - if (ret == DECODE_SUCCESS) { - naluStream->naluCount++; - } - - // update next lookup position and length - offset = naluOffset + 1; // skip one byte of NAL unit type - left = length - offset; - } - - if (naluStream->naluCount > 0) { - naluStream->naluLen = length - naluStream->naluOffset; - memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); - // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED - copyNaluHeader(stream, naluStream); - return DECODE_SUCCESS; - } - - LOGW("number of valid NALU is 0!"); - return DECODE_SUCCESS; -} - diff --git a/videodecoder/VideoDecoderAVCSecure.h b/videodecoder/VideoDecoderAVCSecure.h deleted file mode 100644 index af5ae44..0000000 --- a/videodecoder/VideoDecoderAVCSecure.h +++ /dev/null @@ -1,83 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - -#ifndef VIDEO_DECODER_AVC_SECURE_H_ -#define VIDEO_DECODER_AVC_SECURE_H_ - -#include "VideoDecoderAVC.h" - - -class VideoDecoderAVCSecure : public VideoDecoderAVC { -public: - VideoDecoderAVCSecure(const char *mimeType); - virtual ~VideoDecoderAVCSecure(); - - virtual Decode_Status start(VideoConfigBuffer *buffer); - virtual void stop(void); - - // data in the decoded buffer is all encrypted. - virtual Decode_Status decode(VideoDecodeBuffer *buffer); - -private: - enum { - MAX_SLICE_HEADER_SIZE = 30, - MAX_NALU_HEADER_BUFFER = 8192, - MAX_NALU_NUMBER = 400, // > 4096/12 - }; - - // Information of Network Abstraction Layer Unit - struct NaluInfo { - int32_t naluOffset; // offset of NAL unit in the firewalled buffer - int32_t naluLen; // length of NAL unit - int32_t naluHeaderLen; // length of NAL unit header - }; - - struct NaluMetadata { - NaluInfo *naluInfo; - int32_t naluNumber; // number of NAL units - }; - - struct NaluByteStream { - int32_t naluOffset; - int32_t naluLen; - int32_t streamPos; - uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData - int32_t naluCount; - }; - - virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); - int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); - Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); - Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); - -private: - NaluMetadata mMetadata; - NaluByteStream mByteStream; - uint8_t *mNaluHeaderBuffer; - uint8_t *mInputBuffer; -}; - - - -#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..3bcfd70 --- /dev/null +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp @@ -0,0 +1,515 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + + +#define STARTCODE_00 0x00 +#define STARTCODE_01 0x01 +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F + + +// mask for little endian, to mast the second and fourth bytes in the byte stream +#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 +#define STARTCODE_MASK1 0x0000FF00 //0x000000FF + + +typedef enum { + NAL_UNIT_TYPE_unspecified0 = 0, + NAL_UNIT_TYPE_SLICE, + NAL_UNIT_TYPE_DPA, + NAL_UNIT_TYPE_DPB, + NAL_UNIT_TYPE_DPC, + NAL_UNIT_TYPE_IDR, + NAL_UNIT_TYPE_SEI, + NAL_UNIT_TYPE_SPS, + NAL_UNIT_TYPE_PPS, + NAL_UNIT_TYPE_Acc_unit_delimiter, + NAL_UNIT_TYPE_EOSeq, + NAL_UNIT_TYPE_EOstream, + NAL_UNIT_TYPE_filler_data, + NAL_UNIT_TYPE_SPS_extension, + NAL_UNIT_TYPE_Reserved14, + NAL_UNIT_TYPE_Reserved15, + NAL_UNIT_TYPE_Reserved16, + NAL_UNIT_TYPE_Reserved17, + NAL_UNIT_TYPE_Reserved18, + NAL_UNIT_TYPE_ACP, + NAL_UNIT_TYPE_Reserved20, + NAL_UNIT_TYPE_Reserved21, + NAL_UNIT_TYPE_Reserved22, + NAL_UNIT_TYPE_Reserved23, + NAL_UNIT_TYPE_unspecified24, +} NAL_UNIT_TYPE; + +#ifndef min +#define min(X, Y) ((X) <(Y) ? (X) : (Y)) +#endif + + +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mInputBuffer(NULL) { + + memset(&mMetadata, 0, sizeof(NaluMetadata)); + memset(&mByteStream, 0, sizeof(NaluByteStream)); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; + mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mMetadata.naluInfo == NULL || + mByteStream.byteStream == NULL || + mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory."); + // TODO: release all allocated memory + return DECODE_MEMORY_FAIL; + } + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mMetadata.naluInfo) { + delete [] mMetadata.naluInfo; + mMetadata.naluInfo = NULL; + } + + if (mByteStream.byteStream) { + delete [] mByteStream.byteStream; + mByteStream.byteStream = NULL; + } + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sizeLeft = 0; + uint8_t *pByteStream = NULL; + NaluInfo *pNaluInfo = mMetadata.naluInfo; + + if (buffer->flag & IS_SECURE_DATA) { + pByteStream = buffer->data; + sizeLeft = buffer->size; + mInputBuffer = NULL; + } else { + status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); + CHECK_STATUS("parseAnnexBStream"); + pByteStream = mByteStream.byteStream; + sizeLeft = mByteStream.streamPos; + mInputBuffer = buffer->data; + } + if (sizeLeft < 4) { + ETRACE("Not enough data to read number of NALU."); + return DECODE_INVALID_DATA; + } + + // read number of NALU + memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); + pByteStream += 4; + sizeLeft -= 4; + + if (mMetadata.naluNumber == 0) { + WTRACE("Number of NALU is ZERO!"); + return DECODE_SUCCESS; + } + + for (int32_t i = 0; i < mMetadata.naluNumber; i++) { + if (sizeLeft < 12) { + ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); + return DECODE_INVALID_DATA; + } + sizeLeft -= 12; + // read NALU offset + memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU size + memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU header length + memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + if (sizeLeft < pNaluInfo->naluHeaderLen) { + ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); + return DECODE_INVALID_DATA; + } + + sizeLeft -= pNaluInfo->naluHeaderLen; + + if (pNaluInfo->naluHeaderLen) { + // copy start code prefix to buffer + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + + // copy NALU header + memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); + pByteStream += pNaluInfo->naluHeaderLen; + + sizeAccumulated += pNaluInfo->naluHeaderLen; + } else { + WTRACE("header len is zero for NALU %d", i); + } + + // for next NALU + pNaluInfo++; + } + + buffer->data = mNaluHeaderBuffer; + buffer->size = sizeAccumulated; + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which naluinfo is correlated to current slice + int naluIndex = 0; + uint32_t accumulatedHeaderLen = 0; + uint32_t headerLen = 0; + for (; naluIndex < mMetadata.naluNumber; naluIndex++) { + headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; + if (headerLen == 0) { + WTRACE("lenght of current NAL unit is 0."); + continue; + } + accumulatedHeaderLen += STARTCODE_PREFIX_LEN; + if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { + break; + } + accumulatedHeaderLen += headerLen; + } + + if (sliceData->slice_offset != accumulatedHeaderLen) { + WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); + } + + sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; + sliceData->slice_size = sliceParam->slice_data_size; + + // no need to update: + // sliceParam->slice_data_offset - 0 always + // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; + if (mInputBuffer != NULL) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + mInputBuffer + sliceOffset, + &bufferIDs[bufferIDCount]); + } else { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + (uint8_t*)sliceOffset, // IMR offset + &bufferIDs[bufferIDCount]); + } + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. +// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. +int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { + uint8_t *ptr; + uint32_t left = 0, data = 0, phase = 0; + uint8_t mask1 = 0, mask2 = 0; + + /* Meaning of phase: + 0: initial status, "0x000001" bytes are not found so far; + 1: one "0x00" byte is found; + 2: two or more consecutive "0x00" bytes" are found; + 3: "0x000001" patten is found ; + 4: if there is one more byte after "0x000001"; + */ + + left = length; + ptr = (uint8_t *) (stream + offset); + phase = 0; + + // parse until there is more data and start code not found + while ((left > 0) && (phase < 3)) { + // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { + while (left > 3) { + data = *((uint32_t *)ptr); + mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); + mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); + // If second byte and fourth byte are not zero's then we cannot have a start code here, + // as we need two consecutive zero bytes for a start code pattern. + if (mask1 && mask2) { + // skip 4 bytes and start over + ptr += 4; + left -=4; + continue; + } else { + break; + } + } + } + + // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time + if (left > 0) { + if (*ptr == STARTCODE_00) { + phase++; + if (phase > 2) { + // more than 2 consecutive '0x00' bytes is found + phase = 2; + } + } else if ((*ptr == STARTCODE_01) && (phase == 2)) { + // start code is found + phase = 3; + } else { + // reset lookup + phase = 0; + } + ptr++; + left--; + } + } + + if ((left > 0) && (phase == 3)) { + phase = 4; + // return offset of position following the pattern in the buffer which matches "0x000001" byte string + return (int32_t)(ptr - stream); + } + return -1; +} + + +Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { + uint8_t naluType; + int32_t naluHeaderLen; + + naluType = *(uint8_t *)(stream + naluStream->naluOffset); + naluType &= NALU_TYPE_MASK; + // first update nalu header length based on nalu type + if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { + // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes + naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); + } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { + //sps, pps, sei, etc, return the entire NAL unit in clear + naluHeaderLen = naluStream->naluLen; + } else { + return DECODE_FRAME_DROPPED; + } + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); + naluStream->streamPos += 4; + + if (naluHeaderLen) { + memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); + naluStream->streamPos += naluHeaderLen; + } + return DECODE_SUCCESS; +} + + +// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container +Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { + int32_t naluOffset, offset, left; + NaluInfo *info; + uint32_t ret = DECODE_SUCCESS; + + naluOffset = 0; + offset = 0; + left = length; + + // leave 4 bytes to copy nalu count + naluStream->streamPos = 4; + naluStream->naluCount = 0; + memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); + + for (; ;) { + naluOffset = findNalUnitOffset(stream, offset, left); + if (naluOffset == -1) { + break; + } + + if (naluStream->naluCount == 0) { + naluStream->naluOffset = naluOffset; + } else { + naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; + ret = copyNaluHeader(stream, naluStream); + if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { + LOGW("copyNaluHeader returned %d", ret); + return ret; + } + // starting position for next NALU + naluStream->naluOffset = naluOffset; + } + + if (ret == DECODE_SUCCESS) { + naluStream->naluCount++; + } + + // update next lookup position and length + offset = naluOffset + 1; // skip one byte of NAL unit type + left = length - offset; + } + + if (naluStream->naluCount > 0) { + naluStream->naluLen = length - naluStream->naluOffset; + memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); + // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED + copyNaluHeader(stream, naluStream); + return DECODE_SUCCESS; + } + + LOGW("number of valid NALU is 0!"); + return DECODE_SUCCESS; +} + diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..af5ae44 --- /dev/null +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + + // data in the decoded buffer is all encrypted. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + enum { + MAX_SLICE_HEADER_SIZE = 30, + MAX_NALU_HEADER_BUFFER = 8192, + MAX_NALU_NUMBER = 400, // > 4096/12 + }; + + // Information of Network Abstraction Layer Unit + struct NaluInfo { + int32_t naluOffset; // offset of NAL unit in the firewalled buffer + int32_t naluLen; // length of NAL unit + int32_t naluHeaderLen; // length of NAL unit header + }; + + struct NaluMetadata { + NaluInfo *naluInfo; + int32_t naluNumber; // number of NAL units + }; + + struct NaluByteStream { + int32_t naluOffset; + int32_t naluLen; + int32_t streamPos; + uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData + int32_t naluCount; + }; + + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); + Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); + Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); + +private: + NaluMetadata mMetadata; + NaluByteStream mByteStream; + uint8_t *mNaluHeaderBuffer; + uint8_t *mInputBuffer; +}; + + + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..3bcfd70 --- /dev/null +++ b/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp @@ -0,0 +1,515 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + + +#define STARTCODE_00 0x00 +#define STARTCODE_01 0x01 +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F + + +// mask for little endian, to mast the second and fourth bytes in the byte stream +#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 +#define STARTCODE_MASK1 0x0000FF00 //0x000000FF + + +typedef enum { + NAL_UNIT_TYPE_unspecified0 = 0, + NAL_UNIT_TYPE_SLICE, + NAL_UNIT_TYPE_DPA, + NAL_UNIT_TYPE_DPB, + NAL_UNIT_TYPE_DPC, + NAL_UNIT_TYPE_IDR, + NAL_UNIT_TYPE_SEI, + NAL_UNIT_TYPE_SPS, + NAL_UNIT_TYPE_PPS, + NAL_UNIT_TYPE_Acc_unit_delimiter, + NAL_UNIT_TYPE_EOSeq, + NAL_UNIT_TYPE_EOstream, + NAL_UNIT_TYPE_filler_data, + NAL_UNIT_TYPE_SPS_extension, + NAL_UNIT_TYPE_Reserved14, + NAL_UNIT_TYPE_Reserved15, + NAL_UNIT_TYPE_Reserved16, + NAL_UNIT_TYPE_Reserved17, + NAL_UNIT_TYPE_Reserved18, + NAL_UNIT_TYPE_ACP, + NAL_UNIT_TYPE_Reserved20, + NAL_UNIT_TYPE_Reserved21, + NAL_UNIT_TYPE_Reserved22, + NAL_UNIT_TYPE_Reserved23, + NAL_UNIT_TYPE_unspecified24, +} NAL_UNIT_TYPE; + +#ifndef min +#define min(X, Y) ((X) <(Y) ? (X) : (Y)) +#endif + + +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mInputBuffer(NULL) { + + memset(&mMetadata, 0, sizeof(NaluMetadata)); + memset(&mByteStream, 0, sizeof(NaluByteStream)); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; + mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mMetadata.naluInfo == NULL || + mByteStream.byteStream == NULL || + mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory."); + // TODO: release all allocated memory + return DECODE_MEMORY_FAIL; + } + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mMetadata.naluInfo) { + delete [] mMetadata.naluInfo; + mMetadata.naluInfo = NULL; + } + + if (mByteStream.byteStream) { + delete [] mByteStream.byteStream; + mByteStream.byteStream = NULL; + } + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sizeLeft = 0; + uint8_t *pByteStream = NULL; + NaluInfo *pNaluInfo = mMetadata.naluInfo; + + if (buffer->flag & IS_SECURE_DATA) { + pByteStream = buffer->data; + sizeLeft = buffer->size; + mInputBuffer = NULL; + } else { + status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); + CHECK_STATUS("parseAnnexBStream"); + pByteStream = mByteStream.byteStream; + sizeLeft = mByteStream.streamPos; + mInputBuffer = buffer->data; + } + if (sizeLeft < 4) { + ETRACE("Not enough data to read number of NALU."); + return DECODE_INVALID_DATA; + } + + // read number of NALU + memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); + pByteStream += 4; + sizeLeft -= 4; + + if (mMetadata.naluNumber == 0) { + WTRACE("Number of NALU is ZERO!"); + return DECODE_SUCCESS; + } + + for (int32_t i = 0; i < mMetadata.naluNumber; i++) { + if (sizeLeft < 12) { + ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); + return DECODE_INVALID_DATA; + } + sizeLeft -= 12; + // read NALU offset + memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU size + memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU header length + memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + if (sizeLeft < pNaluInfo->naluHeaderLen) { + ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); + return DECODE_INVALID_DATA; + } + + sizeLeft -= pNaluInfo->naluHeaderLen; + + if (pNaluInfo->naluHeaderLen) { + // copy start code prefix to buffer + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + + // copy NALU header + memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); + pByteStream += pNaluInfo->naluHeaderLen; + + sizeAccumulated += pNaluInfo->naluHeaderLen; + } else { + WTRACE("header len is zero for NALU %d", i); + } + + // for next NALU + pNaluInfo++; + } + + buffer->data = mNaluHeaderBuffer; + buffer->size = sizeAccumulated; + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which naluinfo is correlated to current slice + int naluIndex = 0; + uint32_t accumulatedHeaderLen = 0; + uint32_t headerLen = 0; + for (; naluIndex < mMetadata.naluNumber; naluIndex++) { + headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; + if (headerLen == 0) { + WTRACE("lenght of current NAL unit is 0."); + continue; + } + accumulatedHeaderLen += STARTCODE_PREFIX_LEN; + if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { + break; + } + accumulatedHeaderLen += headerLen; + } + + if (sliceData->slice_offset != accumulatedHeaderLen) { + WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); + } + + sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; + sliceData->slice_size = sliceParam->slice_data_size; + + // no need to update: + // sliceParam->slice_data_offset - 0 always + // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; + if (mInputBuffer != NULL) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + mInputBuffer + sliceOffset, + &bufferIDs[bufferIDCount]); + } else { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + (uint8_t*)sliceOffset, // IMR offset + &bufferIDs[bufferIDCount]); + } + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. +// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. +int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { + uint8_t *ptr; + uint32_t left = 0, data = 0, phase = 0; + uint8_t mask1 = 0, mask2 = 0; + + /* Meaning of phase: + 0: initial status, "0x000001" bytes are not found so far; + 1: one "0x00" byte is found; + 2: two or more consecutive "0x00" bytes" are found; + 3: "0x000001" patten is found ; + 4: if there is one more byte after "0x000001"; + */ + + left = length; + ptr = (uint8_t *) (stream + offset); + phase = 0; + + // parse until there is more data and start code not found + while ((left > 0) && (phase < 3)) { + // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { + while (left > 3) { + data = *((uint32_t *)ptr); + mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); + mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); + // If second byte and fourth byte are not zero's then we cannot have a start code here, + // as we need two consecutive zero bytes for a start code pattern. + if (mask1 && mask2) { + // skip 4 bytes and start over + ptr += 4; + left -=4; + continue; + } else { + break; + } + } + } + + // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time + if (left > 0) { + if (*ptr == STARTCODE_00) { + phase++; + if (phase > 2) { + // more than 2 consecutive '0x00' bytes is found + phase = 2; + } + } else if ((*ptr == STARTCODE_01) && (phase == 2)) { + // start code is found + phase = 3; + } else { + // reset lookup + phase = 0; + } + ptr++; + left--; + } + } + + if ((left > 0) && (phase == 3)) { + phase = 4; + // return offset of position following the pattern in the buffer which matches "0x000001" byte string + return (int32_t)(ptr - stream); + } + return -1; +} + + +Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { + uint8_t naluType; + int32_t naluHeaderLen; + + naluType = *(uint8_t *)(stream + naluStream->naluOffset); + naluType &= NALU_TYPE_MASK; + // first update nalu header length based on nalu type + if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { + // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes + naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); + } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { + //sps, pps, sei, etc, return the entire NAL unit in clear + naluHeaderLen = naluStream->naluLen; + } else { + return DECODE_FRAME_DROPPED; + } + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); + naluStream->streamPos += 4; + + if (naluHeaderLen) { + memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); + naluStream->streamPos += naluHeaderLen; + } + return DECODE_SUCCESS; +} + + +// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container +Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { + int32_t naluOffset, offset, left; + NaluInfo *info; + uint32_t ret = DECODE_SUCCESS; + + naluOffset = 0; + offset = 0; + left = length; + + // leave 4 bytes to copy nalu count + naluStream->streamPos = 4; + naluStream->naluCount = 0; + memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); + + for (; ;) { + naluOffset = findNalUnitOffset(stream, offset, left); + if (naluOffset == -1) { + break; + } + + if (naluStream->naluCount == 0) { + naluStream->naluOffset = naluOffset; + } else { + naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; + ret = copyNaluHeader(stream, naluStream); + if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { + LOGW("copyNaluHeader returned %d", ret); + return ret; + } + // starting position for next NALU + naluStream->naluOffset = naluOffset; + } + + if (ret == DECODE_SUCCESS) { + naluStream->naluCount++; + } + + // update next lookup position and length + offset = naluOffset + 1; // skip one byte of NAL unit type + left = length - offset; + } + + if (naluStream->naluCount > 0) { + naluStream->naluLen = length - naluStream->naluOffset; + memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); + // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED + copyNaluHeader(stream, naluStream); + return DECODE_SUCCESS; + } + + LOGW("number of valid NALU is 0!"); + return DECODE_SUCCESS; +} + diff --git a/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h b/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..af5ae44 --- /dev/null +++ b/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + + // data in the decoded buffer is all encrypted. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + enum { + MAX_SLICE_HEADER_SIZE = 30, + MAX_NALU_HEADER_BUFFER = 8192, + MAX_NALU_NUMBER = 400, // > 4096/12 + }; + + // Information of Network Abstraction Layer Unit + struct NaluInfo { + int32_t naluOffset; // offset of NAL unit in the firewalled buffer + int32_t naluLen; // length of NAL unit + int32_t naluHeaderLen; // length of NAL unit header + }; + + struct NaluMetadata { + NaluInfo *naluInfo; + int32_t naluNumber; // number of NAL units + }; + + struct NaluByteStream { + int32_t naluOffset; + int32_t naluLen; + int32_t streamPos; + uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData + int32_t naluCount; + }; + + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); + Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); + Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); + +private: + NaluMetadata mMetadata; + NaluByteStream mByteStream; + uint8_t *mNaluHeaderBuffer; + uint8_t *mInputBuffer; +}; + + + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..ac9309d --- /dev/null +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp @@ -0,0 +1,519 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + + +#define STARTCODE_00 0x00 +#define STARTCODE_01 0x01 +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F + + +// mask for little endian, to mast the second and fourth bytes in the byte stream +#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 +#define STARTCODE_MASK1 0x0000FF00 //0x000000FF + + +typedef enum { + NAL_UNIT_TYPE_unspecified0 = 0, + NAL_UNIT_TYPE_SLICE, + NAL_UNIT_TYPE_DPA, + NAL_UNIT_TYPE_DPB, + NAL_UNIT_TYPE_DPC, + NAL_UNIT_TYPE_IDR, + NAL_UNIT_TYPE_SEI, + NAL_UNIT_TYPE_SPS, + NAL_UNIT_TYPE_PPS, + NAL_UNIT_TYPE_Acc_unit_delimiter, + NAL_UNIT_TYPE_EOSeq, + NAL_UNIT_TYPE_EOstream, + NAL_UNIT_TYPE_filler_data, + NAL_UNIT_TYPE_SPS_extension, + NAL_UNIT_TYPE_Reserved14, + NAL_UNIT_TYPE_Reserved15, + NAL_UNIT_TYPE_Reserved16, + NAL_UNIT_TYPE_Reserved17, + NAL_UNIT_TYPE_Reserved18, + NAL_UNIT_TYPE_ACP, + NAL_UNIT_TYPE_Reserved20, + NAL_UNIT_TYPE_Reserved21, + NAL_UNIT_TYPE_Reserved22, + NAL_UNIT_TYPE_Reserved23, + NAL_UNIT_TYPE_unspecified24, +} NAL_UNIT_TYPE; + +#ifndef min +#define min(X, Y) ((X) <(Y) ? (X) : (Y)) +#endif + + +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mInputBuffer(NULL) { + + memset(&mMetadata, 0, sizeof(NaluMetadata)); + memset(&mByteStream, 0, sizeof(NaluByteStream)); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; + mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mMetadata.naluInfo == NULL || + mByteStream.byteStream == NULL || + mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory."); + // TODO: release all allocated memory + return DECODE_MEMORY_FAIL; + } + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mMetadata.naluInfo) { + delete [] mMetadata.naluInfo; + mMetadata.naluInfo = NULL; + } + + if (mByteStream.byteStream) { + delete [] mByteStream.byteStream; + mByteStream.byteStream = NULL; + } + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sizeLeft = 0; + uint8_t *pByteStream = NULL; + NaluInfo *pNaluInfo = mMetadata.naluInfo; + + if (buffer->flag & IS_SECURE_DATA) { + // NALU headers are appended to encrypted video bitstream + // |...encrypted video bitstream (16 bytes aligned)...| 4 bytes of header size |...NALU headers..| + pByteStream = buffer->data + buffer->size + 4; + sizeLeft = *(int32_t *)(buffer->data + buffer->size); + ITRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size); + mInputBuffer = buffer->data; + } else { + status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); + CHECK_STATUS("parseAnnexBStream"); + pByteStream = mByteStream.byteStream; + sizeLeft = mByteStream.streamPos; + mInputBuffer = buffer->data; + } + if (sizeLeft < 4) { + ETRACE("Not enough data to read number of NALU."); + return DECODE_INVALID_DATA; + } + + // read number of NALU + memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); + pByteStream += 4; + sizeLeft -= 4; + + if (mMetadata.naluNumber == 0) { + WTRACE("Number of NALU is ZERO!"); + return DECODE_SUCCESS; + } + + for (int32_t i = 0; i < mMetadata.naluNumber; i++) { + if (sizeLeft < 12) { + ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); + return DECODE_INVALID_DATA; + } + sizeLeft -= 12; + // read NALU offset + memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU size + memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU header length + memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + + if (sizeLeft < pNaluInfo->naluHeaderLen) { + ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); + return DECODE_INVALID_DATA; + } + + sizeLeft -= pNaluInfo->naluHeaderLen; + + if (pNaluInfo->naluHeaderLen) { + // copy start code prefix to buffer + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + + // copy NALU header + memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); + pByteStream += pNaluInfo->naluHeaderLen; + + sizeAccumulated += pNaluInfo->naluHeaderLen; + } else { + WTRACE("header len is zero for NALU %d", i); + } + + // for next NALU + pNaluInfo++; + } + + buffer->data = mNaluHeaderBuffer; + buffer->size = sizeAccumulated; + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which naluinfo is correlated to current slice + int naluIndex = 0; + uint32_t accumulatedHeaderLen = 0; + uint32_t headerLen = 0; + for (; naluIndex < mMetadata.naluNumber; naluIndex++) { + headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; + if (headerLen == 0) { + WTRACE("lenght of current NAL unit is 0."); + continue; + } + accumulatedHeaderLen += STARTCODE_PREFIX_LEN; + if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { + break; + } + accumulatedHeaderLen += headerLen; + } + + if (sliceData->slice_offset != accumulatedHeaderLen) { + WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); + } + + sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; + uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; + uint32_t slice_offset_shift = sliceOffset % 16; + sliceParam->slice_data_offset += slice_offset_shift; + sliceParam->slice_data_size += slice_offset_shift; + sliceData->slice_size = (sliceParam->slice_data_size + 0xF) & ~0xF; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + + if (mInputBuffer != NULL) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //Slice size + 1, // num_elements + mInputBuffer + sliceOffset - slice_offset_shift, + &bufferIDs[bufferIDCount]); + } else { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + (uint8_t*)sliceOffset, // IMR offset + &bufferIDs[bufferIDCount]); + } + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. +// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. +int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { + uint8_t *ptr; + uint32_t left = 0, data = 0, phase = 0; + uint8_t mask1 = 0, mask2 = 0; + + /* Meaning of phase: + 0: initial status, "0x000001" bytes are not found so far; + 1: one "0x00" byte is found; + 2: two or more consecutive "0x00" bytes" are found; + 3: "0x000001" patten is found ; + 4: if there is one more byte after "0x000001"; + */ + + left = length; + ptr = (uint8_t *) (stream + offset); + phase = 0; + + // parse until there is more data and start code not found + while ((left > 0) && (phase < 3)) { + // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { + while (left > 3) { + data = *((uint32_t *)ptr); + mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); + mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); + // If second byte and fourth byte are not zero's then we cannot have a start code here, + // as we need two consecutive zero bytes for a start code pattern. + if (mask1 && mask2) { + // skip 4 bytes and start over + ptr += 4; + left -=4; + continue; + } else { + break; + } + } + } + + // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time + if (left > 0) { + if (*ptr == STARTCODE_00) { + phase++; + if (phase > 2) { + // more than 2 consecutive '0x00' bytes is found + phase = 2; + } + } else if ((*ptr == STARTCODE_01) && (phase == 2)) { + // start code is found + phase = 3; + } else { + // reset lookup + phase = 0; + } + ptr++; + left--; + } + } + + if ((left > 0) && (phase == 3)) { + phase = 4; + // return offset of position following the pattern in the buffer which matches "0x000001" byte string + return (int32_t)(ptr - stream); + } + return -1; +} + + +Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { + uint8_t naluType; + int32_t naluHeaderLen; + + naluType = *(uint8_t *)(stream + naluStream->naluOffset); + naluType &= NALU_TYPE_MASK; + // first update nalu header length based on nalu type + if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { + // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes + naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); + } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { + //sps, pps, sei, etc, return the entire NAL unit in clear + naluHeaderLen = naluStream->naluLen; + } else { + return DECODE_FRAME_DROPPED; + } + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); + naluStream->streamPos += 4; + + if (naluHeaderLen) { + memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); + naluStream->streamPos += naluHeaderLen; + } + return DECODE_SUCCESS; +} + + +// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container +Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { + int32_t naluOffset, offset, left; + NaluInfo *info; + uint32_t ret = DECODE_SUCCESS; + + naluOffset = 0; + offset = 0; + left = length; + + // leave 4 bytes to copy nalu count + naluStream->streamPos = 4; + naluStream->naluCount = 0; + memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); + + for (; ;) { + naluOffset = findNalUnitOffset(stream, offset, left); + if (naluOffset == -1) { + break; + } + + if (naluStream->naluCount == 0) { + naluStream->naluOffset = naluOffset; + } else { + naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; + ret = copyNaluHeader(stream, naluStream); + if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { + LOGW("copyNaluHeader returned %d", ret); + return ret; + } + // starting position for next NALU + naluStream->naluOffset = naluOffset; + } + + if (ret == DECODE_SUCCESS) { + naluStream->naluCount++; + } + + // update next lookup position and length + offset = naluOffset + 1; // skip one byte of NAL unit type + left = length - offset; + } + + if (naluStream->naluCount > 0) { + naluStream->naluLen = length - naluStream->naluOffset; + memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); + // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED + copyNaluHeader(stream, naluStream); + return DECODE_SUCCESS; + } + + LOGW("number of valid NALU is 0!"); + return DECODE_SUCCESS; +} + diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..af5ae44 --- /dev/null +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + + // data in the decoded buffer is all encrypted. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + enum { + MAX_SLICE_HEADER_SIZE = 30, + MAX_NALU_HEADER_BUFFER = 8192, + MAX_NALU_NUMBER = 400, // > 4096/12 + }; + + // Information of Network Abstraction Layer Unit + struct NaluInfo { + int32_t naluOffset; // offset of NAL unit in the firewalled buffer + int32_t naluLen; // length of NAL unit + int32_t naluHeaderLen; // length of NAL unit header + }; + + struct NaluMetadata { + NaluInfo *naluInfo; + int32_t naluNumber; // number of NAL units + }; + + struct NaluByteStream { + int32_t naluOffset; + int32_t naluLen; + int32_t streamPos; + uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData + int32_t naluCount; + }; + + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); + Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); + Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); + +private: + NaluMetadata mMetadata; + NaluByteStream mByteStream; + uint8_t *mNaluHeaderBuffer; + uint8_t *mInputBuffer; +}; + + + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ -- cgit v1.2.3 From 8404a0c1abd82f0919644e6cb7c3377c2e0c9263 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 24 Apr 2013 14:27:50 +0800 Subject: Make width aligned with 16 during calculating marcoblocks BZ: 97703 For some special width/height encode setting, width isn't aligned with 16, but during calculating marcoblocks we need to use 16 aligned marcoblcok address Change-Id: I50fe4bfcf66a1963480764404304780a596841a4 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/104295 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index f84d11c..8d4b8aa 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -1174,9 +1174,9 @@ Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) { } // starting MB row number for this slice, suppose macroblock 16x16 - currentSlice->macroblock_address = startRowInMB * mComParams.resolution.width /16; + currentSlice->macroblock_address = startRowInMB * ((mComParams.resolution.width + 0xf) & ~0xf) / 16; // slice height measured in MB - currentSlice->num_macroblocks = actualSliceHeightInMB * mComParams.resolution.width /16; + currentSlice->num_macroblocks = actualSliceHeightInMB * ((mComParams.resolution.width + 0xf) & ~0xf) / 16; if(task->type == FTYPE_I||task->type == FTYPE_IDR) currentSlice->slice_type = 2; else if(task->type == FTYPE_P) -- cgit v1.2.3 From 5d926fcfa2c14bb90d311a81add63035c5af34ad Mon Sep 17 00:00:00 2001 From: gji2 Date: Wed, 17 Apr 2013 10:38:49 -0400 Subject: enable the high profile features such as CABAC BZ:101039 Pass the CABAC param to the user space driver. Initialize the VideoParamsAVC structure in the H264 encoder construction function. Refine the AVC baseline profile encoding feature setting. Change-Id: Ifd8720ed5f5eb9540a12693b509b53f55164eed7 Signed-off-by: gji2 Reviewed-on: http://android.intel.com:8080/103951 Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- test/mix_encoder.cpp | 1 + videoencoder/VideoEncoderAVC.cpp | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 20ebca8..6a1af76 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -221,6 +221,7 @@ Encode_Status SetVideoEncoderParam() { switch(gCodec) { case 0: + gEncoderParams.profile = (VAProfile)VAProfileH264Baseline; break; case 1: gEncoderParams.profile = (VAProfile)VAProfileMPEG4Simple; diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 8d4b8aa..457b421 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -38,6 +38,10 @@ VideoEncoderAVC::VideoEncoderAVC() mVideoParamsAVC.crop.BottomOffset = 0; mVideoParamsAVC.SAR.SarWidth = 0; mVideoParamsAVC.SAR.SarHeight = 0; + mVideoParamsAVC.bEntropyCodingCABAC = 0; + mVideoParamsAVC.bWeightedPPrediction = 0; + mVideoParamsAVC.bDirect8x8Inference = 0; + mVideoParamsAVC.bConstIpred = 0; mAutoReferenceSurfaceNum = 4; packed_seq_header_param_buf_id = VA_INVALID_ID; @@ -83,6 +87,11 @@ Encode_Status VideoEncoderAVC::derivedSetParams(VideoParamConfigSet *videoEncPar return ENCODE_INVALID_PARAMS; mVideoParamsAVC = *encParamsAVC; + if(mComParams.profile == VAProfileH264Baseline){ + mVideoParamsAVC.bEntropyCodingCABAC = 0; + mVideoParamsAVC.bDirect8x8Inference = 0; + mVideoParamsAVC.bWeightedPPrediction = 0; + } return ENCODE_SUCCESS; } @@ -1037,6 +1046,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i]; } + avcPicParams.pic_fields.bits.entropy_coding_mode_flag = mVideoParamsAVC.bEntropyCodingCABAC; avcPicParams.coded_buf = task->coded_buffer; avcPicParams.last_picture = 0; -- cgit v1.2.3 From ec41650967e32c7b720302696ad1d13f08550c7a Mon Sep 17 00:00:00 2001 From: pingshix Date: Wed, 8 May 2013 10:12:02 +0800 Subject: Enhance the libmix test tool. BZ: 94345 Add the picture encoding in the libmix test tool and refine the reconstructure frame setting. Change-Id: I8dc564943cd1251fcef34f71fb8d5530b3ddd56a Signed-off-by: jiguoliang Signed-off-by: pingshix Reviewed-on: http://android.intel.com:8080/106712 Reviewed-by: cactus Reviewed-by: buildbot Tested-by: buildbot --- test/loadsurface.h | 290 + test/loadsurface_yuv.h | 28836 +++++++++++++++++++++++++++++++++++++ test/mix_encoder.cpp | 21 +- videoencoder/VideoEncoderAVC.cpp | 1 + 4 files changed, 29135 insertions(+), 13 deletions(-) create mode 100644 test/loadsurface.h create mode 100644 test/loadsurface_yuv.h diff --git a/test/loadsurface.h b/test/loadsurface.h new file mode 100644 index 0000000..a13932b --- /dev/null +++ b/test/loadsurface.h @@ -0,0 +1,290 @@ +/* + * Copyright (c) 2008-2009 Intel Corporation. All Rights Reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sub license, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice (including the + * next paragraph) shall be included in all copies or substantial portions + * of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. + * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR + * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ +#include "loadsurface_yuv.h" +#define CHECK_ENCODE_STATUS(FUNC)\ + if (ret < ENCODE_SUCCESS) { \ + printf(FUNC" Failed. ret = 0x%08x\n", ret); \ + return -1; \ + } + +#define CHECK_ENCODE_STATUS_RETURN(FUNC)\ + if (ret != ENCODE_SUCCESS) { \ + printf(FUNC"Failed. ret = 0x%08x\n", ret); \ + return -1; \ + } + + +static int scale_2dimage(unsigned char *src_img, int src_imgw, int src_imgh, + unsigned char *dst_img, int dst_imgw, int dst_imgh) +{ + int row=0, col=0; + + for (row=0; row #include #include - -#define CHECK_ENCODE_STATUS(FUNC)\ - if (ret < ENCODE_SUCCESS) { \ - printf(FUNC" Failed. ret = 0x%08x\n", ret); \ - return -1; \ - } - -#define CHECK_ENCODE_STATUS_RETURN(FUNC)\ - if (ret != ENCODE_SUCCESS) { \ - printf(FUNC"Failed. ret = 0x%08x\n", ret); \ - return -1; \ - } +#include "loadsurface.h" static const char *AVC_MIME_TYPE = "video/h264"; static const char *MPEG4_MIME_TYPE = "video/mpeg4"; @@ -221,7 +210,6 @@ Encode_Status SetVideoEncoderParam() { switch(gCodec) { case 0: - gEncoderParams.profile = (VAProfile)VAProfileH264Baseline; break; case 1: gEncoderParams.profile = (VAProfile)VAProfileMPEG4Simple; @@ -416,6 +404,13 @@ static int YUV_generator_planar(int width, int height, row_shift += 2; if (row_shift==box_width) row_shift = 0; + YUV_blend_with_pic(width,height, + Y_start, Y_pitch, + U_start, U_pitch, + V_start, V_pitch, + 1, 70); + + return 0; } diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 457b421..a21063f 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -1042,6 +1042,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { avcPicParams.pic_fields.bits.reference_pic_flag = 1; // Not sure whether these settings work for all drivers }else { + avcPicParams.CurrPic.picture_id= VA_INVALID_SURFACE; for(int i =0; i< mAutoReferenceSurfaceNum; i++) avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i]; } -- cgit v1.2.3 From 8ac9fd811314ad77069a2731326c5e3f1bb64146 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Wed, 27 Mar 2013 00:26:18 -0400 Subject: remove code coverage test related code from libmix test tool BZ: 94345 remove code coverage test related code from mix_encoder.cpp; Will commit a new tool for code coverage test to local git server. Change-Id: I05b7aefee271330b7274c9eb7f9d0dc8e4dd183f Signed-off-by: Liu Bolun Reviewed-on: http://android.intel.com:8080/106713 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- test/mix_encoder.cpp | 88 ++-------------------------------------------------- 1 file changed, 3 insertions(+), 85 deletions(-) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 80d4b7c..83bc76b 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -21,11 +21,7 @@ static const char *AVC_MIME_TYPE = "video/h264"; static const char *MPEG4_MIME_TYPE = "video/mpeg4"; static const char *H263_MIME_TYPE = "video/h263"; -//add for video encode libmix code coverage test--start -//add two mine type define only for code coverage test -static const char *MPEG4_MIME_TYPE_SP= "video/mp4v-es"; -static const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg"; -//add for video encode libmix code coverage test--end + static const int box_width = 128; static VideoParamsAVC gVideoParamsAVC; @@ -267,67 +263,6 @@ Encode_Status SetVideoEncoderParam() { tmpVideoParamsUsrptrBuffer.width = 0; gVideoEncoder->getParameters(&tmpVideoParamsUsrptrBuffer); #endif - //---------------------add for libmix encode code coverage test - // VideoEncodeBase.cpp file setConfig && getConfig code coverage test - // only for VCM mode - if(gRC == RATE_CONTROL_VCM) - { - // for setConfig && getConfig default case - VideoConfigAIR configAIR1; - memset(&configAIR1,0x00,sizeof(VideoConfigAIR)); - gVideoEncoder->setConfig(&configAIR1); - gVideoEncoder->getConfig(&configAIR1); - - // VideoConfigTypeAIR setConfig and getConfig - VideoConfigAIR configAIR; - configAIR.airParams.airAuto = 0; - configAIR.airParams.airMBs = 0; - configAIR.airParams.airThreshold = 0; - gVideoEncoder->setConfig(&configAIR); - gVideoEncoder->getConfig(&configAIR); - - // VideoConfigTypeBitRate setConfig and getConfig - VideoConfigBitRate configBitRate; - configBitRate.rcParams.bitRate = gBitrate; - configBitRate.rcParams.initQP = 15; - configBitRate.rcParams.minQP = 1; - configBitRate.rcParams.windowSize = 50; - configBitRate.rcParams.targetPercentage = 95; - gVideoEncoder->setConfig(&configBitRate); - gVideoEncoder->getConfig(&configBitRate); - - // for VideoConfigTypeSliceNum derivedSetConfig && derivedGetConfig - VideoConfigSliceNum configSliceNum; - gVideoEncoder->getConfig(&configSliceNum); - gVideoEncoder->setConfig(&configSliceNum); - - VideoConfigIntraRefreshType configIntraRefreshType; - configIntraRefreshType.refreshType = VIDEO_ENC_AIR;//VIDEO_ENC_AIR - gVideoEncoder->setConfig(&configIntraRefreshType); - gVideoEncoder->getConfig(&configIntraRefreshType); - - // VideoConfigTypeFrameRate setConfig and getConfig - VideoConfigFrameRate configFrameRate; - configFrameRate.frameRate.frameRateDenom = 1; - configFrameRate.frameRate.frameRateNum = gFrameRate; - gVideoEncoder->setConfig(&configFrameRate); - gVideoEncoder->getConfig(&configFrameRate); - - // VideoEncodeAVC.cpp file derivedSetConfig && derivedGetConfig code coverage test - // for VideoConfigTypeNALSize derivedSetConfig && derivedGetConfig - VideoConfigNALSize configNalSize; - configNalSize.maxSliceSize = 8*gEncodeWidth*gEncodeHeight*1.5; - gVideoEncoder->setConfig(&configNalSize); - gVideoEncoder->getConfig(&configNalSize); - - VideoParamsHRD paramsHRD; - paramsHRD.bufferSize = (uint32_t)(gBitrate/gFrameRate) * 1024 * 8; - paramsHRD.initBufferFullness = (uint32_t)(gBitrate/gFrameRate); - gVideoEncoder->setParameters(¶msHRD); - gVideoEncoder->getParameters(¶msHRD); - } - else - { // VideoConfigTypeCyclicFrameInterval setConfig and getConfig VideoConfigCyclicFrameInterval configCyclicFrameInterval; configCyclicFrameInterval.cyclicFrameInterval = 30; @@ -343,7 +278,7 @@ Encode_Status SetVideoEncoderParam() { gVideoEncoder->setConfig(&configAVCIntraPeriod); VideoConfigTypeIDRReq tmpVideoConfigTypeIDRReq; gVideoEncoder->setConfig(&tmpVideoConfigTypeIDRReq); - } + if (gMode != 4) { @@ -803,20 +738,9 @@ int main(int argc, char* argv[]) case 2: codec = H263_MIME_TYPE; break; -//add for video encode libmix code coverage test--start - case 3: - codec = MPEG4_MIME_TYPE_SP; - break; - case 4: - codec = MEDIA_MIMETYPE_IMAGE_JPEG; - break; - case 5: - codec = NULL; - break; default: printf("Not support this type codec\n"); return 1; -//add for video encode libmix code coverage test--end } switch(gRCMode) @@ -866,12 +790,11 @@ int main(int argc, char* argv[]) return 1; } -//add for video encode libmix code coverage test--start if(codec != NULL) printf("\nStart %s Encoding ....\n", codec); else printf("\nStart codec is null only for code coverage test ....\n"); -//add for video encode libmix code coverage test--end + printf("Mode is %s, RC mode is %s, Width=%d, Height=%d, Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, file is %s, outputnalformat is %s\n\n", gModeString[gMode], gRCModeString[gRCMode], gSrcWidth, gSrcHeight, gBitrate, gEncFrames, gSyncEncMode, gFile,gOutPutFormatString[gOutPutFormat]); //sleep(10); @@ -886,11 +809,6 @@ for(int i=0; i<1; i++) return 1; } - // Adding for code coverage test - // VideoEncoderBase.cpp uncalled function - // VideoEncoderBase::flush() - gVideoEncoder->flush(); - //set parameter SetVideoEncoderParam(); -- cgit v1.2.3 From ea956fec9766f3f15639bf4a27d19d515067129e Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Mon, 13 May 2013 11:38:29 +0800 Subject: libmix: Enable VP8 hardware thumbnail generation on Merrifield BZ: 107110 Enable VP8 hardware thumbnail generation on Merrifield Change-Id: I541d850dbf1d032cb7c4f87d1fa9af20a69bcb44 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/107236 Reviewed-by: Ding, Haitao Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderVP8.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index ea9270e..f5362f4 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -160,7 +160,7 @@ Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v return DECODE_SUCCESS; } - if (VP8_KEY_FRAME == data->codec_data->frame_type) { + if (VP8_KEY_FRAME == data->codec_data->frame_type && !mSizeChanged) { updateFormatInfo(data); if (mSizeChanged == true) { mSizeChanged = false; -- cgit v1.2.3 From 4ce77ae6246861fd3ad25fa9afafb7eff9f93f1f Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Mon, 13 May 2013 13:48:27 +0800 Subject: libmix: remove partition_data_offset from VASliceParameterBufferVP8 BZ: 104446 Remove partition_data_offset from VASliceParameterBufferVP8 in libmix. Change-Id: I324a80779f9609ebdaf574bcdd834dbb9db49fe2 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/107237 Reviewed-by: Ding, Haitao Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c | 3 --- 1 file changed, 3 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c index 7a41f43..5ac5402 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c @@ -482,9 +482,6 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * slc_parms->partition_size[part_index] = pi->partition_size[part_index - 1]; } - /* This field specifies the offset to the first byte of partition data */ - slc_parms->partition_data_offset = slc_parms->slice_data_offset; - pic_data->num_slices++; if (pic_data->num_slices > VP8_MAX_NUM_SLICES) { ETRACE("Number of slices (%d) per picture exceeds the limit (%d).", pic_data->num_slices, VP8_MAX_NUM_SLICES); -- cgit v1.2.3 From 8e3293531cf3e6ab91fb94cdcb5a0c15253b4031 Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Thu, 23 May 2013 13:52:25 +0800 Subject: libmix: Add libMIX environment variables to BoardConfig.mk BZ: 107291 Move environment variables from libmix makefile to BoardConfig.mk Change-Id: I430948fe5fd7ae212bb8473c74988a0026a518da Signed-off-by: Cheng Yao Reviewed-on: http://android.intel.com:8080/107838 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Reviewed-by: buildbot Tested-by: buildbot --- imagedecoder/Android.mk | 5 +- imagedecoder/JPEGDecoder.c | 4 +- mix_vbp/Android.mk | 5 +- mix_vbp/viddec_fw/fw/parser/Android.mk | 6 +- videodecoder/Android.mk | 23 +- videodecoder/VideoDecoderHost.cpp | 7 +- .../clovertrail/VideoDecoderAVCSecure.cpp | 515 +++++++++++++++++++++ .../clovertrail/VideoDecoderAVCSecure.h | 83 ++++ .../securevideo/ctp/VideoDecoderAVCSecure.cpp | 515 --------------------- .../securevideo/ctp/VideoDecoderAVCSecure.h | 83 ---- 10 files changed, 614 insertions(+), 632 deletions(-) create mode 100644 videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h delete mode 100644 videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp delete mode 100644 videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index cb81ed5..9bf4336 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -29,9 +29,8 @@ LOCAL_LDLIBS += -lpthread LOCAL_CFLAGS += -Wno-multichar LOCAL_CFLAGS += -DUSE_INTEL_JPEGDEC -BAYLAKE_PRODUCT := baylake -ifneq ($(filter $(TARGET_PRODUCT),$(BAYLAKE_PRODUCT)),) -LOCAL_CFLAGS += -D__BAYLAKE__ +ifeq ($(JPEGDEC_USES_GEN),true) +LOCAL_CFLAGS += -DJPEGDEC_USES_GEN endif LOCAL_MODULE:= libjpeg_hw diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c index baf54e4..10d4d1c 100644 --- a/imagedecoder/JPEGDecoder.c +++ b/imagedecoder/JPEGDecoder.c @@ -309,7 +309,7 @@ Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { VASurfaceAttrib fourcc; unsigned int surface_format = jdva_get_surface_format(jd_libva_ptr, &fourcc); -#ifdef __BAYLAKE__ +#ifdef JPEGDEC_USES_GEN va_status = vaCreateSurfaces(jd_libva_ptr->va_display, surface_format, jd_libva_ptr->image_width, jd_libva_ptr->image_height, @@ -574,7 +574,7 @@ jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; ITRACE("\tpitches=[%u %u %u]", jd_libva_ptr->surface_image.pitches[0], jd_libva_ptr->surface_image.pitches[1], jd_libva_ptr->surface_image.pitches[2]); ITRACE("\toffsets=[%u %u %u]", jd_libva_ptr->surface_image.offsets[0], jd_libva_ptr->surface_image.offsets[1], jd_libva_ptr->surface_image.offsets[2]); -#ifdef __BAYLAKE__ +#ifdef JPEGDEC_USES_GEN float CbCr_h = ((float)jd_libva_ptr->picture_param_buf.components[1].h_sampling_factor) / jd_libva_ptr->picture_param_buf.components[0].h_sampling_factor; float CbCr_v = ((float)jd_libva_ptr->picture_param_buf.components[1].v_sampling_factor) / jd_libva_ptr->picture_param_buf.components[0].v_sampling_factor; dump_yuv_image(jd_libva_ptr->surface_image, jd_libva_ptr->image_buf, diff --git a/mix_vbp/Android.mk b/mix_vbp/Android.mk index 93e9bd3..b9e887e 100644 --- a/mix_vbp/Android.mk +++ b/mix_vbp/Android.mk @@ -8,9 +8,6 @@ include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/parser/Androi include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/mp4/parser/Android.mk include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vc1/parser/Android.mk -PLATFORM_SUPPORT_VP8 := \ - merrifield \ - baytrail -ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_VP8)),) +ifeq ($(USE_HW_VP8),true) include $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/parser/Android.mk endif diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 4bc8b9b..659b473 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -45,11 +45,7 @@ LOCAL_CFLAGS += -DVBP_TRACE LOCAL_SHARED_LIBRARIES += liblog endif -PLATFORM_SUPPORT_VP8 := \ - merrifield \ - baytrail - -ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_VP8)),) +ifeq ($(USE_HW_VP8),true) LOCAL_SRC_FILES += vbp_vp8_parser.c LOCAL_C_INCLUDES += $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/include LOCAL_CFLAGS += -DUSE_HW_VP8 diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 8ba8163..1c47729 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -17,23 +17,12 @@ LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libmixvbp -ifeq ($(TARGET_BOARD_PLATFORM),clovertrail) -LOCAL_SRC_FILES += securevideo/ctp/VideoDecoderAVCSecure.cpp - -LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/ctp -endif - -ifeq ($(TARGET_BOARD_PLATFORM),merrifield) -LOCAL_SRC_FILES += securevideo/merrifield/VideoDecoderAVCSecure.cpp - -LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/merrifield +ifeq ($(USE_INTEL_SECURE_AVC),true) +LOCAL_SRC_FILES += securevideo/$(TARGET_BOARD_PLATFORM)/VideoDecoderAVCSecure.cpp +LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/$(TARGET_BOARD_PLATFORM) endif ifeq ($(TARGET_BOARD_PLATFORM),baytrail) -LOCAL_SRC_FILES += securevideo/baytrail/VideoDecoderAVCSecure.cpp - -LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/baytrail - LOCAL_CFLAGS += -DLOAD_PVR_DRIVER endif @@ -61,11 +50,7 @@ LOCAL_COPY_HEADERS := \ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videodecoder -PLATFORM_SUPPORT_VP8 := \ - merrifield \ - baytrail - -ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_VP8)),) +ifeq ($(USE_HW_VP8),true) LOCAL_SRC_FILES += VideoDecoderVP8.cpp LOCAL_CFLAGS += -DUSE_HW_VP8 endif diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index ba04797..0181343 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -26,7 +26,9 @@ #include "VideoDecoderMPEG4.h" #include "VideoDecoderAVC.h" #include "VideoDecoderPAVC.h" +#ifdef USE_INTEL_SECURE_AVC #include "VideoDecoderAVCSecure.h" +#endif #ifdef USE_HW_VP8 #include "VideoDecoderVP8.h" #endif @@ -58,10 +60,13 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { } else if (strcasecmp(mimeType, "video/pavc") == 0) { VideoDecoderAVC *p = new VideoDecoderPAVC(mimeType); return (IVideoDecoder *)p; - } else if (strcasecmp(mimeType, "video/avc-secure") == 0) { + } +#ifdef USE_INTEL_SECURE_AVC + else if (strcasecmp(mimeType, "video/avc-secure") == 0) { VideoDecoderAVC *p = new VideoDecoderAVCSecure(mimeType); return (IVideoDecoder *)p; } +#endif #ifdef USE_HW_VP8 else if (strcasecmp(mimeType, "video/vp8") == 0 || strcasecmp(mimeType, "video/x-vnd.on2.vp8") == 0) { diff --git a/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..3bcfd70 --- /dev/null +++ b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp @@ -0,0 +1,515 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + + +#define STARTCODE_00 0x00 +#define STARTCODE_01 0x01 +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F + + +// mask for little endian, to mast the second and fourth bytes in the byte stream +#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 +#define STARTCODE_MASK1 0x0000FF00 //0x000000FF + + +typedef enum { + NAL_UNIT_TYPE_unspecified0 = 0, + NAL_UNIT_TYPE_SLICE, + NAL_UNIT_TYPE_DPA, + NAL_UNIT_TYPE_DPB, + NAL_UNIT_TYPE_DPC, + NAL_UNIT_TYPE_IDR, + NAL_UNIT_TYPE_SEI, + NAL_UNIT_TYPE_SPS, + NAL_UNIT_TYPE_PPS, + NAL_UNIT_TYPE_Acc_unit_delimiter, + NAL_UNIT_TYPE_EOSeq, + NAL_UNIT_TYPE_EOstream, + NAL_UNIT_TYPE_filler_data, + NAL_UNIT_TYPE_SPS_extension, + NAL_UNIT_TYPE_Reserved14, + NAL_UNIT_TYPE_Reserved15, + NAL_UNIT_TYPE_Reserved16, + NAL_UNIT_TYPE_Reserved17, + NAL_UNIT_TYPE_Reserved18, + NAL_UNIT_TYPE_ACP, + NAL_UNIT_TYPE_Reserved20, + NAL_UNIT_TYPE_Reserved21, + NAL_UNIT_TYPE_Reserved22, + NAL_UNIT_TYPE_Reserved23, + NAL_UNIT_TYPE_unspecified24, +} NAL_UNIT_TYPE; + +#ifndef min +#define min(X, Y) ((X) <(Y) ? (X) : (Y)) +#endif + + +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mInputBuffer(NULL) { + + memset(&mMetadata, 0, sizeof(NaluMetadata)); + memset(&mByteStream, 0, sizeof(NaluByteStream)); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; + mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mMetadata.naluInfo == NULL || + mByteStream.byteStream == NULL || + mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory."); + // TODO: release all allocated memory + return DECODE_MEMORY_FAIL; + } + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mMetadata.naluInfo) { + delete [] mMetadata.naluInfo; + mMetadata.naluInfo = NULL; + } + + if (mByteStream.byteStream) { + delete [] mByteStream.byteStream; + mByteStream.byteStream = NULL; + } + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sizeLeft = 0; + uint8_t *pByteStream = NULL; + NaluInfo *pNaluInfo = mMetadata.naluInfo; + + if (buffer->flag & IS_SECURE_DATA) { + pByteStream = buffer->data; + sizeLeft = buffer->size; + mInputBuffer = NULL; + } else { + status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); + CHECK_STATUS("parseAnnexBStream"); + pByteStream = mByteStream.byteStream; + sizeLeft = mByteStream.streamPos; + mInputBuffer = buffer->data; + } + if (sizeLeft < 4) { + ETRACE("Not enough data to read number of NALU."); + return DECODE_INVALID_DATA; + } + + // read number of NALU + memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); + pByteStream += 4; + sizeLeft -= 4; + + if (mMetadata.naluNumber == 0) { + WTRACE("Number of NALU is ZERO!"); + return DECODE_SUCCESS; + } + + for (int32_t i = 0; i < mMetadata.naluNumber; i++) { + if (sizeLeft < 12) { + ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); + return DECODE_INVALID_DATA; + } + sizeLeft -= 12; + // read NALU offset + memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU size + memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU header length + memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + if (sizeLeft < pNaluInfo->naluHeaderLen) { + ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); + return DECODE_INVALID_DATA; + } + + sizeLeft -= pNaluInfo->naluHeaderLen; + + if (pNaluInfo->naluHeaderLen) { + // copy start code prefix to buffer + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + + // copy NALU header + memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); + pByteStream += pNaluInfo->naluHeaderLen; + + sizeAccumulated += pNaluInfo->naluHeaderLen; + } else { + WTRACE("header len is zero for NALU %d", i); + } + + // for next NALU + pNaluInfo++; + } + + buffer->data = mNaluHeaderBuffer; + buffer->size = sizeAccumulated; + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which naluinfo is correlated to current slice + int naluIndex = 0; + uint32_t accumulatedHeaderLen = 0; + uint32_t headerLen = 0; + for (; naluIndex < mMetadata.naluNumber; naluIndex++) { + headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; + if (headerLen == 0) { + WTRACE("lenght of current NAL unit is 0."); + continue; + } + accumulatedHeaderLen += STARTCODE_PREFIX_LEN; + if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { + break; + } + accumulatedHeaderLen += headerLen; + } + + if (sliceData->slice_offset != accumulatedHeaderLen) { + WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); + } + + sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; + sliceData->slice_size = sliceParam->slice_data_size; + + // no need to update: + // sliceParam->slice_data_offset - 0 always + // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; + if (mInputBuffer != NULL) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + mInputBuffer + sliceOffset, + &bufferIDs[bufferIDCount]); + } else { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + (uint8_t*)sliceOffset, // IMR offset + &bufferIDs[bufferIDCount]); + } + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. +// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. +int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { + uint8_t *ptr; + uint32_t left = 0, data = 0, phase = 0; + uint8_t mask1 = 0, mask2 = 0; + + /* Meaning of phase: + 0: initial status, "0x000001" bytes are not found so far; + 1: one "0x00" byte is found; + 2: two or more consecutive "0x00" bytes" are found; + 3: "0x000001" patten is found ; + 4: if there is one more byte after "0x000001"; + */ + + left = length; + ptr = (uint8_t *) (stream + offset); + phase = 0; + + // parse until there is more data and start code not found + while ((left > 0) && (phase < 3)) { + // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { + while (left > 3) { + data = *((uint32_t *)ptr); + mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); + mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); + // If second byte and fourth byte are not zero's then we cannot have a start code here, + // as we need two consecutive zero bytes for a start code pattern. + if (mask1 && mask2) { + // skip 4 bytes and start over + ptr += 4; + left -=4; + continue; + } else { + break; + } + } + } + + // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time + if (left > 0) { + if (*ptr == STARTCODE_00) { + phase++; + if (phase > 2) { + // more than 2 consecutive '0x00' bytes is found + phase = 2; + } + } else if ((*ptr == STARTCODE_01) && (phase == 2)) { + // start code is found + phase = 3; + } else { + // reset lookup + phase = 0; + } + ptr++; + left--; + } + } + + if ((left > 0) && (phase == 3)) { + phase = 4; + // return offset of position following the pattern in the buffer which matches "0x000001" byte string + return (int32_t)(ptr - stream); + } + return -1; +} + + +Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { + uint8_t naluType; + int32_t naluHeaderLen; + + naluType = *(uint8_t *)(stream + naluStream->naluOffset); + naluType &= NALU_TYPE_MASK; + // first update nalu header length based on nalu type + if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { + // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes + naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); + } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { + //sps, pps, sei, etc, return the entire NAL unit in clear + naluHeaderLen = naluStream->naluLen; + } else { + return DECODE_FRAME_DROPPED; + } + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); + naluStream->streamPos += 4; + + if (naluHeaderLen) { + memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); + naluStream->streamPos += naluHeaderLen; + } + return DECODE_SUCCESS; +} + + +// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container +Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { + int32_t naluOffset, offset, left; + NaluInfo *info; + uint32_t ret = DECODE_SUCCESS; + + naluOffset = 0; + offset = 0; + left = length; + + // leave 4 bytes to copy nalu count + naluStream->streamPos = 4; + naluStream->naluCount = 0; + memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); + + for (; ;) { + naluOffset = findNalUnitOffset(stream, offset, left); + if (naluOffset == -1) { + break; + } + + if (naluStream->naluCount == 0) { + naluStream->naluOffset = naluOffset; + } else { + naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; + ret = copyNaluHeader(stream, naluStream); + if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { + LOGW("copyNaluHeader returned %d", ret); + return ret; + } + // starting position for next NALU + naluStream->naluOffset = naluOffset; + } + + if (ret == DECODE_SUCCESS) { + naluStream->naluCount++; + } + + // update next lookup position and length + offset = naluOffset + 1; // skip one byte of NAL unit type + left = length - offset; + } + + if (naluStream->naluCount > 0) { + naluStream->naluLen = length - naluStream->naluOffset; + memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); + // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED + copyNaluHeader(stream, naluStream); + return DECODE_SUCCESS; + } + + LOGW("number of valid NALU is 0!"); + return DECODE_SUCCESS; +} + diff --git a/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..af5ae44 --- /dev/null +++ b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + + // data in the decoded buffer is all encrypted. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + enum { + MAX_SLICE_HEADER_SIZE = 30, + MAX_NALU_HEADER_BUFFER = 8192, + MAX_NALU_NUMBER = 400, // > 4096/12 + }; + + // Information of Network Abstraction Layer Unit + struct NaluInfo { + int32_t naluOffset; // offset of NAL unit in the firewalled buffer + int32_t naluLen; // length of NAL unit + int32_t naluHeaderLen; // length of NAL unit header + }; + + struct NaluMetadata { + NaluInfo *naluInfo; + int32_t naluNumber; // number of NAL units + }; + + struct NaluByteStream { + int32_t naluOffset; + int32_t naluLen; + int32_t streamPos; + uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData + int32_t naluCount; + }; + + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); + Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); + Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); + +private: + NaluMetadata mMetadata; + NaluByteStream mByteStream; + uint8_t *mNaluHeaderBuffer; + uint8_t *mInputBuffer; +}; + + + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp deleted file mode 100644 index 3bcfd70..0000000 --- a/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.cpp +++ /dev/null @@ -1,515 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - -#include "VideoDecoderAVCSecure.h" -#include "VideoDecoderTrace.h" -#include - - -#define STARTCODE_00 0x00 -#define STARTCODE_01 0x01 -#define STARTCODE_PREFIX_LEN 3 -#define NALU_TYPE_MASK 0x1F - - -// mask for little endian, to mast the second and fourth bytes in the byte stream -#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 -#define STARTCODE_MASK1 0x0000FF00 //0x000000FF - - -typedef enum { - NAL_UNIT_TYPE_unspecified0 = 0, - NAL_UNIT_TYPE_SLICE, - NAL_UNIT_TYPE_DPA, - NAL_UNIT_TYPE_DPB, - NAL_UNIT_TYPE_DPC, - NAL_UNIT_TYPE_IDR, - NAL_UNIT_TYPE_SEI, - NAL_UNIT_TYPE_SPS, - NAL_UNIT_TYPE_PPS, - NAL_UNIT_TYPE_Acc_unit_delimiter, - NAL_UNIT_TYPE_EOSeq, - NAL_UNIT_TYPE_EOstream, - NAL_UNIT_TYPE_filler_data, - NAL_UNIT_TYPE_SPS_extension, - NAL_UNIT_TYPE_Reserved14, - NAL_UNIT_TYPE_Reserved15, - NAL_UNIT_TYPE_Reserved16, - NAL_UNIT_TYPE_Reserved17, - NAL_UNIT_TYPE_Reserved18, - NAL_UNIT_TYPE_ACP, - NAL_UNIT_TYPE_Reserved20, - NAL_UNIT_TYPE_Reserved21, - NAL_UNIT_TYPE_Reserved22, - NAL_UNIT_TYPE_Reserved23, - NAL_UNIT_TYPE_unspecified24, -} NAL_UNIT_TYPE; - -#ifndef min -#define min(X, Y) ((X) <(Y) ? (X) : (Y)) -#endif - - -static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; - - -VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) - : VideoDecoderAVC(mimeType), - mNaluHeaderBuffer(NULL), - mInputBuffer(NULL) { - - memset(&mMetadata, 0, sizeof(NaluMetadata)); - memset(&mByteStream, 0, sizeof(NaluByteStream)); -} - -VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { -} - -Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { - Decode_Status status = VideoDecoderAVC::start(buffer); - if (status != DECODE_SUCCESS) { - return status; - } - - mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; - mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; - mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; - - if (mMetadata.naluInfo == NULL || - mByteStream.byteStream == NULL || - mNaluHeaderBuffer == NULL) { - ETRACE("Failed to allocate memory."); - // TODO: release all allocated memory - return DECODE_MEMORY_FAIL; - } - return status; -} - -void VideoDecoderAVCSecure::stop(void) { - VideoDecoderAVC::stop(); - - if (mMetadata.naluInfo) { - delete [] mMetadata.naluInfo; - mMetadata.naluInfo = NULL; - } - - if (mByteStream.byteStream) { - delete [] mByteStream.byteStream; - mByteStream.byteStream = NULL; - } - - if (mNaluHeaderBuffer) { - delete [] mNaluHeaderBuffer; - mNaluHeaderBuffer = NULL; - } -} - -Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { - Decode_Status status; - int32_t sizeAccumulated = 0; - int32_t sizeLeft = 0; - uint8_t *pByteStream = NULL; - NaluInfo *pNaluInfo = mMetadata.naluInfo; - - if (buffer->flag & IS_SECURE_DATA) { - pByteStream = buffer->data; - sizeLeft = buffer->size; - mInputBuffer = NULL; - } else { - status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); - CHECK_STATUS("parseAnnexBStream"); - pByteStream = mByteStream.byteStream; - sizeLeft = mByteStream.streamPos; - mInputBuffer = buffer->data; - } - if (sizeLeft < 4) { - ETRACE("Not enough data to read number of NALU."); - return DECODE_INVALID_DATA; - } - - // read number of NALU - memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); - pByteStream += 4; - sizeLeft -= 4; - - if (mMetadata.naluNumber == 0) { - WTRACE("Number of NALU is ZERO!"); - return DECODE_SUCCESS; - } - - for (int32_t i = 0; i < mMetadata.naluNumber; i++) { - if (sizeLeft < 12) { - ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); - return DECODE_INVALID_DATA; - } - sizeLeft -= 12; - // read NALU offset - memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - // read NALU size - memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - // read NALU header length - memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - if (sizeLeft < pNaluInfo->naluHeaderLen) { - ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); - return DECODE_INVALID_DATA; - } - - sizeLeft -= pNaluInfo->naluHeaderLen; - - if (pNaluInfo->naluHeaderLen) { - // copy start code prefix to buffer - memcpy(mNaluHeaderBuffer + sizeAccumulated, - startcodePrefix, - STARTCODE_PREFIX_LEN); - sizeAccumulated += STARTCODE_PREFIX_LEN; - - // copy NALU header - memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); - pByteStream += pNaluInfo->naluHeaderLen; - - sizeAccumulated += pNaluInfo->naluHeaderLen; - } else { - WTRACE("header len is zero for NALU %d", i); - } - - // for next NALU - pNaluInfo++; - } - - buffer->data = mNaluHeaderBuffer; - buffer->size = sizeAccumulated; - - return VideoDecoderAVC::decode(buffer); -} - - -Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { - - Decode_Status status; - VAStatus vaStatus; - uint32_t bufferIDCount = 0; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID bufferIDs[4]; - - vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); - vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); - VAPictureParameterBufferH264 *picParam = picData->pic_parms; - VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); - - if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { - // either condition indicates start of a new frame - if (sliceParam->first_mb_in_slice != 0) { - WTRACE("The first slice is lost."); - // TODO: handle the first slice lost - } - if (mDecodingFrame) { - // interlace content, complete decoding the first field - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS("vaEndPicture"); - - // for interlace content, top field may be valid only after the second field is parsed - mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; - } - - // Check there is no reference frame loss before decoding a frame - - // Update the reference frames and surface IDs for DPB and current frame - status = updateDPB(picParam); - CHECK_STATUS("updateDPB"); - - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - status = updateReferenceFrames(picData); - CHECK_STATUS("updateReferenceFrames"); - - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); - CHECK_VA_STATUS("vaBeginPicture"); - - // start decoding a frame - mDecodingFrame = true; - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferH264), - 1, - picParam, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); - bufferIDCount++; - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferH264), - 1, - data->IQ_matrix_buf, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); - bufferIDCount++; - } - - status = setReference(sliceParam); - CHECK_STATUS("setReference"); - - // find which naluinfo is correlated to current slice - int naluIndex = 0; - uint32_t accumulatedHeaderLen = 0; - uint32_t headerLen = 0; - for (; naluIndex < mMetadata.naluNumber; naluIndex++) { - headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; - if (headerLen == 0) { - WTRACE("lenght of current NAL unit is 0."); - continue; - } - accumulatedHeaderLen += STARTCODE_PREFIX_LEN; - if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { - break; - } - accumulatedHeaderLen += headerLen; - } - - if (sliceData->slice_offset != accumulatedHeaderLen) { - WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); - } - - sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; - sliceData->slice_size = sliceParam->slice_data_size; - - // no need to update: - // sliceParam->slice_data_offset - 0 always - // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), - 1, - sliceParam, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); - bufferIDCount++; - - // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit - // offset points to first byte of NAL unit - uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; - if (mInputBuffer != NULL) { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - mInputBuffer + sliceOffset, - &bufferIDs[bufferIDCount]); - } else { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAProtectedSliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - (uint8_t*)sliceOffset, // IMR offset - &bufferIDs[bufferIDCount]); - } - CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - bufferIDCount++; - - vaStatus = vaRenderPicture( - mVADisplay, - mVAContext, - bufferIDs, - bufferIDCount); - CHECK_VA_STATUS("vaRenderPicture"); - - return DECODE_SUCCESS; -} - - -// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. -// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. -int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { - uint8_t *ptr; - uint32_t left = 0, data = 0, phase = 0; - uint8_t mask1 = 0, mask2 = 0; - - /* Meaning of phase: - 0: initial status, "0x000001" bytes are not found so far; - 1: one "0x00" byte is found; - 2: two or more consecutive "0x00" bytes" are found; - 3: "0x000001" patten is found ; - 4: if there is one more byte after "0x000001"; - */ - - left = length; - ptr = (uint8_t *) (stream + offset); - phase = 0; - - // parse until there is more data and start code not found - while ((left > 0) && (phase < 3)) { - // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. - if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { - while (left > 3) { - data = *((uint32_t *)ptr); - mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); - mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); - // If second byte and fourth byte are not zero's then we cannot have a start code here, - // as we need two consecutive zero bytes for a start code pattern. - if (mask1 && mask2) { - // skip 4 bytes and start over - ptr += 4; - left -=4; - continue; - } else { - break; - } - } - } - - // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time - if (left > 0) { - if (*ptr == STARTCODE_00) { - phase++; - if (phase > 2) { - // more than 2 consecutive '0x00' bytes is found - phase = 2; - } - } else if ((*ptr == STARTCODE_01) && (phase == 2)) { - // start code is found - phase = 3; - } else { - // reset lookup - phase = 0; - } - ptr++; - left--; - } - } - - if ((left > 0) && (phase == 3)) { - phase = 4; - // return offset of position following the pattern in the buffer which matches "0x000001" byte string - return (int32_t)(ptr - stream); - } - return -1; -} - - -Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { - uint8_t naluType; - int32_t naluHeaderLen; - - naluType = *(uint8_t *)(stream + naluStream->naluOffset); - naluType &= NALU_TYPE_MASK; - // first update nalu header length based on nalu type - if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { - // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes - naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); - } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { - //sps, pps, sei, etc, return the entire NAL unit in clear - naluHeaderLen = naluStream->naluLen; - } else { - return DECODE_FRAME_DROPPED; - } - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); - naluStream->streamPos += 4; - - if (naluHeaderLen) { - memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); - naluStream->streamPos += naluHeaderLen; - } - return DECODE_SUCCESS; -} - - -// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container -Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { - int32_t naluOffset, offset, left; - NaluInfo *info; - uint32_t ret = DECODE_SUCCESS; - - naluOffset = 0; - offset = 0; - left = length; - - // leave 4 bytes to copy nalu count - naluStream->streamPos = 4; - naluStream->naluCount = 0; - memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); - - for (; ;) { - naluOffset = findNalUnitOffset(stream, offset, left); - if (naluOffset == -1) { - break; - } - - if (naluStream->naluCount == 0) { - naluStream->naluOffset = naluOffset; - } else { - naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; - ret = copyNaluHeader(stream, naluStream); - if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { - LOGW("copyNaluHeader returned %d", ret); - return ret; - } - // starting position for next NALU - naluStream->naluOffset = naluOffset; - } - - if (ret == DECODE_SUCCESS) { - naluStream->naluCount++; - } - - // update next lookup position and length - offset = naluOffset + 1; // skip one byte of NAL unit type - left = length - offset; - } - - if (naluStream->naluCount > 0) { - naluStream->naluLen = length - naluStream->naluOffset; - memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); - // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED - copyNaluHeader(stream, naluStream); - return DECODE_SUCCESS; - } - - LOGW("number of valid NALU is 0!"); - return DECODE_SUCCESS; -} - diff --git a/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h b/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h deleted file mode 100644 index af5ae44..0000000 --- a/videodecoder/securevideo/ctp/VideoDecoderAVCSecure.h +++ /dev/null @@ -1,83 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - -#ifndef VIDEO_DECODER_AVC_SECURE_H_ -#define VIDEO_DECODER_AVC_SECURE_H_ - -#include "VideoDecoderAVC.h" - - -class VideoDecoderAVCSecure : public VideoDecoderAVC { -public: - VideoDecoderAVCSecure(const char *mimeType); - virtual ~VideoDecoderAVCSecure(); - - virtual Decode_Status start(VideoConfigBuffer *buffer); - virtual void stop(void); - - // data in the decoded buffer is all encrypted. - virtual Decode_Status decode(VideoDecodeBuffer *buffer); - -private: - enum { - MAX_SLICE_HEADER_SIZE = 30, - MAX_NALU_HEADER_BUFFER = 8192, - MAX_NALU_NUMBER = 400, // > 4096/12 - }; - - // Information of Network Abstraction Layer Unit - struct NaluInfo { - int32_t naluOffset; // offset of NAL unit in the firewalled buffer - int32_t naluLen; // length of NAL unit - int32_t naluHeaderLen; // length of NAL unit header - }; - - struct NaluMetadata { - NaluInfo *naluInfo; - int32_t naluNumber; // number of NAL units - }; - - struct NaluByteStream { - int32_t naluOffset; - int32_t naluLen; - int32_t streamPos; - uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData - int32_t naluCount; - }; - - virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); - int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); - Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); - Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); - -private: - NaluMetadata mMetadata; - NaluByteStream mByteStream; - uint8_t *mNaluHeaderBuffer; - uint8_t *mInputBuffer; -}; - - - -#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ -- cgit v1.2.3 From e35e22a8327975cf753ef93f4607ebb26ddc2bc9 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 14 May 2013 17:32:37 +0800 Subject: set max reference frame to 1 BZ: 107416 in psb_video, if max reference frame is 4, the level will be re-calculated with new rule so set to 1 as only 1 frame referenced during encoding Change-Id: I40d670af67b8f2620b9f0194c7c633edadbd990b Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/107839 Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index a21063f..57ff5d5 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -903,9 +903,7 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { avcSeqParams.sar_height = mVideoParamsAVC.SAR.SarHeight; } - // This is a temporary fix suggested by Binglin for bad encoding quality issue - avcSeqParams.max_num_ref_frames = (mEncMaxRefFrames != VA_ATTRIB_NOT_SUPPORTED) ? - mEncMaxRefFrames : 1; + avcSeqParams.max_num_ref_frames = 1; if(avcSeqParams.ip_period > 1) avcSeqParams.max_num_ref_frames = 2; -- cgit v1.2.3 From 456ceeb4a799e2331a5f1f45bad0bf1c1b2011a2 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 22 May 2013 14:11:51 +0800 Subject: not move offset when output codecdata and keep SPS/PPS integrity in each sequence BZ: 102624 on Merrifield, PPS is always changed for each sequence, so keep SPS/PPS in each sequence header and avoid issue in seeking after IMG updates its firmware, PPS will be same in while session. see IMG ticket #25280 Change-Id: Id86f276d28bba88b6967f79d2595c5ae99789fca Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/109164 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 57ff5d5..b4f0222 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -494,8 +494,8 @@ Encode_Status VideoEncoderAVC::outputCodecData( if (headerSize <= outBuffer->bufferSize) { memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize); - mTotalSizeCopied += headerSize; - mOffsetInSeg += headerSize; +// mTotalSizeCopied += headerSize; +// mOffsetInSeg += headerSize; outBuffer->dataSize = headerSize; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; -- cgit v1.2.3 From eb380708ecb7cc1c9e4b76ee2d09079d3be3643a Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Thu, 23 May 2013 16:36:46 +0800 Subject: libmix: Add handling for VP8 frames not for display BZ: 108816 Add handling for VP8 frames not for display Change-Id: I834c397e13dd8cf20a9410d31f3da22b78ab7e71 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/109575 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 21 ++++++++++++++------- videodecoder/VideoDecoderBase.h | 1 + videodecoder/VideoDecoderVP8.cpp | 2 ++ 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index b1e9541..1eb6b3d 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -47,6 +47,7 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mForwardReference(NULL), mDecodingFrame(false), mSizeChanged(false), + mShowFrame(true), // private member variables mLowDelay(false), @@ -593,7 +594,11 @@ Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) { CHECK_STATUS(); } // frame is successfly decoded to the current surface, it is ready for output - mAcquiredBuffer->renderBuffer.renderDone = false; + if (mShowFrame) { + mAcquiredBuffer->renderBuffer.renderDone = false; + } else { + mAcquiredBuffer->renderBuffer.renderDone = true; + } // decoder must set "asReference and referenceFrame" flags properly @@ -615,13 +620,15 @@ Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) { mLastReference = mAcquiredBuffer; } // add to the output list - if (mOutputHead == NULL) { - mOutputHead = mAcquiredBuffer; - } else { - mOutputTail->next = mAcquiredBuffer; + if (mShowFrame) { + if (mOutputHead == NULL) { + mOutputHead = mAcquiredBuffer; + } else { + mOutputTail->next = mAcquiredBuffer; + } + mOutputTail = mAcquiredBuffer; + mOutputTail->next = NULL; } - mOutputTail = mAcquiredBuffer; - mOutputTail->next = NULL; //VTRACE("Pushing POC %d to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6); diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index a1ad265..2f45fec 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -112,6 +112,7 @@ protected: VideoConfigBuffer mConfigBuffer; // only store configure meta data. bool mDecodingFrame; // indicate whether a frame is being decoded bool mSizeChanged; // indicate whether video size is changed. + bool mShowFrame; // indicate whether the decoded frame is for display enum { // TODO: move this to vbp_loader.h diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index f5362f4..c1a3545 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -138,6 +138,8 @@ Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { (void**)&data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); + mShowFrame = data->codec_data->show_frame; + if (!mVAStarted) { status = startVA(data); CHECK_STATUS("startVA"); -- cgit v1.2.3 From 978071c1dfd7f32349c0f1a9f65afe1e3ae2146e Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Fri, 17 May 2013 17:25:55 +0800 Subject: VE: wrap sw h.263 encoder in MIX BZ: 104172 We use proprietary Metadata mode and we need to resolve it in our proprietary library. Change-Id: Id4415eadfd8eb9bf39629c0221ff31b46fec7a73 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/109864 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/Android.mk | 22 ++ videoencoder/PVSoftMPEG4Encoder.cpp | 470 ++++++++++++++++++++++++++++++++++++ videoencoder/PVSoftMPEG4Encoder.h | 68 ++++++ videoencoder/VideoEncoderHost.cpp | 11 + 4 files changed, 571 insertions(+) create mode 100644 videoencoder/PVSoftMPEG4Encoder.cpp create mode 100644 videoencoder/PVSoftMPEG4Encoder.h diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 69dc636..e156ec2 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -4,6 +4,14 @@ include $(CLEAR_VARS) #VIDEO_ENC_LOG_ENABLE := true #NO_BUFFER_SHARE := true +ifeq ($(ENABLE_IMG_GRAPHICS),) +LOCAL_CFLAGS += -DBX_RC \ + -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF= + +LOCAL_STATIC_LIBRARIES := \ + libstagefright_m4vh263enc +endif + LOCAL_SRC_FILES := \ VideoEncoderBase.cpp \ VideoEncoderAVC.cpp \ @@ -12,6 +20,10 @@ LOCAL_SRC_FILES := \ VideoEncoderVP8.cpp \ VideoEncoderHost.cpp +ifeq ($(ENABLE_IMG_GRAPHICS),) + LOCAL_SRC_FILES += PVSoftMPEG4Encoder.cpp +endif + # LOCAL_CFLAGS := LOCAL_C_INCLUDES := \ @@ -20,6 +32,16 @@ LOCAL_C_INCLUDES := \ $(TOPDIR)/frameworks/native/include \ $(TARGET_OUT_HEADERS)/pvr +ifeq ($(ENABLE_IMG_GRAPHICS),) +LOCAL_C_INCLUDES += \ + frameworks/av/media/libstagefright/codecs/m4v_h263/enc/include \ + frameworks/av/media/libstagefright/codecs/m4v_h263/enc/src \ + frameworks/av/media/libstagefright/codecs/common/include \ + frameworks/native/include/media/openmax \ + frameworks/native/include/media/hardware \ + frameworks/av/media/libstagefright/include +endif + #LOCAL_LDLIBS += -lpthread LOCAL_SHARED_LIBRARIES := \ diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp new file mode 100644 index 0000000..084db78 --- /dev/null +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -0,0 +1,470 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "PVSoftMPEG4Encoder" +#include + +#include "mp4enc_api.h" +#include "OMX_Video.h" + +#include +#include +#include +#include +#include + +#include "PVSoftMPEG4Encoder.h" +#include "VideoEncoderLog.h" + +inline static void ConvertYUV420SemiPlanarToYUV420Planar( + uint8_t *inyuv, uint8_t* outyuv, + int32_t width, int32_t height) { + + int32_t outYsize = width * height; + uint32_t *outy = (uint32_t *) outyuv; + uint16_t *outcb = (uint16_t *) (outyuv + outYsize); + uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2)); + + /* Y copying */ + memcpy(outy, inyuv, outYsize); + + /* U & V copying */ + uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize); + for (int32_t i = height >> 1; i > 0; --i) { + for (int32_t j = width >> 2; j > 0; --j) { + uint32_t temp = *inyuv_4++; + uint32_t tempU = temp & 0xFF; + tempU = tempU | ((temp >> 8) & 0xFF00); + + uint32_t tempV = (temp >> 8) & 0xFF; + tempV = tempV | ((temp >> 16) & 0xFF00); + + // Flip U and V + *outcb++ = tempU; + *outcr++ = tempV; + } + } +} + +inline static void trimBuffer(uint8_t *dataIn, uint8_t *dataOut, + int32_t width, int32_t height, + int32_t stride) { + int32_t h; + uint8_t *y_start, *uv_start, *_y_start, *_uv_start; + y_start = dataOut; + uv_start = dataOut + width * height; + _y_start = dataIn; + _uv_start = dataIn + stride * height; + + for (h = 0; h < height; h++) + memcpy(y_start + h * width, _y_start + h * stride, width); + for (h = 0; h < height / 2; h++) + memcpy(uv_start + h * width, + _uv_start + h * stride, width); +} + +PVSoftMPEG4Encoder::PVSoftMPEG4Encoder(const char *name) + : mEncodeMode(COMBINE_MODE_WITH_ERR_RES), + mVideoWidth(176), + mVideoHeight(144), + mVideoFrameRate(30), + mVideoBitRate(192000), + mVideoColorFormat(OMX_COLOR_FormatYUV420SemiPlanar), + mStoreMetaDataInBuffers(true), + mIDRFrameRefreshIntervalInSec(1), + mNumInputFrames(-1), + mStarted(false), + mSawInputEOS(false), + mSignalledError(false), + mHandle(new tagvideoEncControls), + mEncParams(new tagvideoEncOptions), + mInputFrameData(NULL) +{ + + if (!strcmp(name, "OMX.google.h263.encoder")) { + mEncodeMode = H263_MODE; + } else { + CHECK(!strcmp(name, "OMX.google.mpeg4.encoder")); + } + + setDefaultParams(); + + LOG_I("Construct PVSoftMPEG4Encoder"); +} + +PVSoftMPEG4Encoder::~PVSoftMPEG4Encoder() { + LOG_I("Destruct PVSoftMPEG4Encoder"); + releaseEncoder(); + +} + +void PVSoftMPEG4Encoder::setDefaultParams() { + + // Set default value for input parameters + mComParams.profile = VAProfileH264Baseline; + mComParams.level = 41; + mComParams.rawFormat = RAW_FORMAT_NV12; + mComParams.frameRate.frameRateNum = 30; + mComParams.frameRate.frameRateDenom = 1; + mComParams.resolution.width = 0; + mComParams.resolution.height = 0; + mComParams.intraPeriod = 30; + mComParams.rcMode = RATE_CONTROL_NONE; + mComParams.rcParams.initQP = 15; + mComParams.rcParams.minQP = 0; + mComParams.rcParams.bitRate = 640000; + mComParams.rcParams.targetPercentage= 0; + mComParams.rcParams.windowSize = 0; + mComParams.rcParams.disableFrameSkip = 0; + mComParams.rcParams.disableBitsStuffing = 1; + mComParams.cyclicFrameInterval = 30; + mComParams.refreshType = VIDEO_ENC_NONIR; + mComParams.airParams.airMBs = 0; + mComParams.airParams.airThreshold = 0; + mComParams.airParams.airAuto = 1; + mComParams.disableDeblocking = 2; + mComParams.syncEncMode = false; + mComParams.codedBufNum = 2; + +} + +Encode_Status PVSoftMPEG4Encoder::initEncParams() { + CHECK(mHandle != NULL); + memset(mHandle, 0, sizeof(tagvideoEncControls)); + + CHECK(mEncParams != NULL); + memset(mEncParams, 0, sizeof(tagvideoEncOptions)); + if (!PVGetDefaultEncOption(mEncParams, 0)) { + LOG_E("Failed to get default encoding parameters"); + return ENCODE_FAIL; + } + mEncParams->encMode = mEncodeMode; + mEncParams->encWidth[0] = mVideoWidth; + mEncParams->encHeight[0] = mVideoHeight; + mEncParams->encFrameRate[0] = mVideoFrameRate; + mEncParams->rcType = VBR_1; + mEncParams->vbvDelay = 5.0f; + + // FIXME: + // Add more profile and level support for MPEG4 encoder + mEncParams->profile_level = CORE_PROFILE_LEVEL2; + mEncParams->packetSize = 32; + mEncParams->rvlcEnable = PV_OFF; + mEncParams->numLayers = 1; + mEncParams->timeIncRes = 1000; + mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate; + + mEncParams->bitRate[0] = mVideoBitRate; + mEncParams->iQuant[0] = 15; + mEncParams->pQuant[0] = 12; + mEncParams->quantType[0] = 0; + mEncParams->noFrameSkipped = PV_OFF; + + mTrimedInputData = + (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1); + CHECK(mTrimedInputData != NULL); + + if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + // Color conversion is needed. + CHECK(mInputFrameData == NULL); + mInputFrameData = + (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1); + CHECK(mInputFrameData != NULL); + } + + // PV's MPEG4 encoder requires the video dimension of multiple + if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) { + LOG_E("Video frame size %dx%d must be a multiple of 16", + mVideoWidth, mVideoHeight); + return ENCODE_INVALID_PARAMS; + } + + // Set IDR frame refresh interval + if (mIDRFrameRefreshIntervalInSec < 0) { + mEncParams->intraPeriod = -1; + } else if (mIDRFrameRefreshIntervalInSec == 0) { + mEncParams->intraPeriod = 1; // All I frames + } else { + mEncParams->intraPeriod = + (mIDRFrameRefreshIntervalInSec * mVideoFrameRate); + } + + mEncParams->numIntraMB = 0; + mEncParams->sceneDetect = PV_ON; + mEncParams->searchRange = 16; + mEncParams->mv8x8Enable = PV_OFF; + mEncParams->gobHeaderInterval = 0; + mEncParams->useACPred = PV_ON; + mEncParams->intraDCVlcTh = 0; + + return ENCODE_SUCCESS; +} + +Encode_Status PVSoftMPEG4Encoder::initEncoder() { + LOG_V("Begin\n"); + + CHECK(!mStarted); + + Encode_Status ret = ENCODE_SUCCESS; + if (ENCODE_SUCCESS != (ret = initEncParams())) { + LOG_E("Failed to initialized encoder params"); + mSignalledError = true; + return ret; + } + + if (!PVInitVideoEncoder(mHandle, mEncParams)) { + LOG_E("Failed to initialize the encoder"); + mSignalledError = true; + return ENCODE_FAIL; + } + + mNumInputFrames = -1; // 1st buffer for codec specific data + mStarted = true; + mCurTimestampUs = 0; + mLastTimestampUs = 0; + mVolHeaderLength = 256; + + LOG_V("End\n"); + + return ENCODE_SUCCESS; +} + +Encode_Status PVSoftMPEG4Encoder::releaseEncoder() { + LOG_V("Begin\n"); + + if (!mStarted) { + return ENCODE_SUCCESS; + } + + PVCleanUpVideoEncoder(mHandle); + + delete mTrimedInputData; + mTrimedInputData = NULL; + + delete mInputFrameData; + mInputFrameData = NULL; + + delete mEncParams; + mEncParams = NULL; + + delete mHandle; + mHandle = NULL; + + mStarted = false; + + LOG_V("End\n"); + + return ENCODE_SUCCESS; +} + +Encode_Status PVSoftMPEG4Encoder::setParameters( + VideoParamConfigSet *videoEncParams) +{ + + Encode_Status ret = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + LOG_I("Config type = %d\n", (int)videoEncParams->type); + + if (mStarted) { + LOG_E("Encoder has been initialized, should use setConfig to change configurations\n"); + return ENCODE_ALREADY_INIT; + } + + switch (videoEncParams->type) { + case VideoParamsTypeCommon: { + + VideoParamsCommon *paramsCommon = + reinterpret_cast (videoEncParams); + if (paramsCommon->size != sizeof (VideoParamsCommon)) { + return ENCODE_INVALID_PARAMS; + } + if(paramsCommon->codedBufNum < 2) + paramsCommon->codedBufNum =2; + mComParams = *paramsCommon; + + mVideoWidth = mComParams.resolution.width; + mVideoHeight = mComParams.resolution.height; + mVideoFrameRate = mComParams.frameRate.frameRateNum / \ + mComParams.frameRate.frameRateDenom; + mVideoBitRate = mComParams.rcParams.bitRate; + mVideoColorFormat = OMX_COLOR_FormatYUV420SemiPlanar; + break; + } + + case VideoParamsTypeStoreMetaDataInBuffers: { + VideoParamsStoreMetaDataInBuffers *metadata = + reinterpret_cast (videoEncParams); + + if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) { + return ENCODE_INVALID_PARAMS; + } + + mStoreMetaDataInBuffers = metadata->isEnabled; + + break; + } + + default: { + LOG_I ("Wrong ParamType here\n"); + break; + } + } + + return ret; +} + +Encode_Status PVSoftMPEG4Encoder::getParameters( + VideoParamConfigSet *videoEncParams) { + + Encode_Status ret = ENCODE_SUCCESS; + CHECK_NULL_RETURN_IFFAIL(videoEncParams); + LOG_I("Config type = %d\n", (int)videoEncParams->type); + + switch (videoEncParams->type) { + case VideoParamsTypeCommon: { + + VideoParamsCommon *paramsCommon = + reinterpret_cast (videoEncParams); + + if (paramsCommon->size != sizeof (VideoParamsCommon)) { + return ENCODE_INVALID_PARAMS; + } + *paramsCommon = mComParams; + break; + } + + case VideoParamsTypeStoreMetaDataInBuffers: { + VideoParamsStoreMetaDataInBuffers *metadata = + reinterpret_cast (videoEncParams); + + if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) { + return ENCODE_INVALID_PARAMS; + } + + metadata->isEnabled = mStoreMetaDataInBuffers; + + break; + } + + default: { + LOG_I ("Wrong ParamType here\n"); + break; + } + + } + return ret; +} + +Encode_Status PVSoftMPEG4Encoder::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout) +{ + LOG_V("Begin\n"); + + Encode_Status ret = ENCODE_SUCCESS; + + if (mCurTimestampUs <= inBuffer->timeStamp) { + mLastTimestampUs = mCurTimestampUs; + mCurTimestampUs = inBuffer->timeStamp; + } + + if (mNumInputFrames < 0) { + if (!PVGetVolHeader(mHandle, mVolHeader, &mVolHeaderLength, 0)) { + LOG_E("Failed to get VOL header"); + mSignalledError = true; + return ENCODE_FAIL; + } + LOG_I("Output VOL header: %d bytes", mVolHeaderLength); + mNumInputFrames++; + //return ENCODE_SUCCESS; + } + + if (mStoreMetaDataInBuffers) { + IntelMetadataBuffer imb; + int32_t type; + int32_t value; + ValueInfo vinfo; + ValueInfo *pvinfo = &vinfo; + CHECK(IMB_SUCCESS == imb.UnSerialize(inBuffer->data, inBuffer->size)); + imb.GetType((::MetadataBufferType&)type); + imb.GetValue(value); + imb.GetValueInfo(pvinfo); + if (mNumInputFrames == 0) + LOG_I("%d %d %d\n", pvinfo->width, + pvinfo->height, pvinfo->lumaStride); + trimBuffer((uint8_t*)value, mTrimedInputData, pvinfo->width, pvinfo->height, pvinfo->lumaStride); + } else { + memcpy(mTrimedInputData, inBuffer->data, + (mVideoWidth * mVideoHeight * 3 ) >> 1); + } + + if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) { + ConvertYUV420SemiPlanarToYUV420Planar( + mTrimedInputData, mInputFrameData, mVideoWidth, mVideoHeight); + } else { + memcpy(mTrimedInputData, mInputFrameData, + (mVideoWidth * mVideoHeight * 3 ) >> 1); + } + + LOG_V("End\n"); + + return ret; +} + +Encode_Status PVSoftMPEG4Encoder::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) +{ + LOG_V("Begin\n"); + + Encode_Status ret = ENCODE_SUCCESS; + uint8_t *outPtr = outBuffer->data; + int32_t dataLength = outBuffer->bufferSize; + outBuffer->flag = 0; + + if ((mEncodeMode == COMBINE_MODE_WITH_ERR_RES) && + (outBuffer->format == OUTPUT_CODEC_DATA)) { + memcpy(outPtr, mVolHeader, mVolHeaderLength); + ++mNumInputFrames; + outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG; + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + outBuffer->dataSize = mVolHeaderLength; + outBuffer->remainingSize = 0; + return ENCODE_SUCCESS; + } + + outBuffer->timeStamp = mLastTimestampUs; + LOG_I("info.mTimeUs %lld\n", outBuffer->timeStamp); + + VideoEncFrameIO vin, vout; + memset(&vin, 0, sizeof(vin)); + memset(&vout, 0, sizeof(vout)); + vin.height = ((mVideoHeight + 15) >> 4) << 4; + vin.pitch = ((mVideoWidth + 15) >> 4) << 4; + vin.timestamp = (outBuffer->timeStamp + 500) / 1000; // in ms + vin.yChan = mInputFrameData; + vin.uChan = vin.yChan + vin.height * vin.pitch; + vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2); + + unsigned long modTimeMs = 0; + int32_t nLayer = 0; + MP4HintTrack hintTrack; + if (!PVEncodeVideoFrame(mHandle, &vin, &vout, + &modTimeMs, outPtr, &dataLength, &nLayer) || + !PVGetHintTrack(mHandle, &hintTrack)) { + LOG_E("Failed to encode frame or get hink track at frame %lld", + mNumInputFrames); + mSignalledError = true; + ret = ENCODE_FAIL; + } + LOG_I("dataLength %d\n", dataLength); + CHECK(NULL == PVGetOverrunBuffer(mHandle)); + if (hintTrack.CodeType == 0) { // I-frame serves as sync frame + outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME; + } + + ++mNumInputFrames; + + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + outBuffer->dataSize = dataLength; + + LOG_V("End\n"); + + return ret; +} + diff --git a/videoencoder/PVSoftMPEG4Encoder.h b/videoencoder/PVSoftMPEG4Encoder.h new file mode 100644 index 0000000..e7c5c30 --- /dev/null +++ b/videoencoder/PVSoftMPEG4Encoder.h @@ -0,0 +1,68 @@ +#ifndef __PV_SOFT_MPEG4_ENCODER__ +#define __PV_SOFT_MPEG4_ENCODER__ + +#include +#include +#include "VideoEncoderDef.h" +#include "VideoEncoderInterface.h" +#include "IntelMetadataBuffer.h" + +#include +#include +#include "SimpleSoftOMXComponent.h" +#include "mp4enc_api.h" + +class PVSoftMPEG4Encoder : IVideoEncoder { + +public: + PVSoftMPEG4Encoder(const char *name); + virtual ~PVSoftMPEG4Encoder(); + + virtual Encode_Status start(void) {return initEncoder();} + virtual void flush(void) { } + virtual Encode_Status stop(void) {return releaseEncoder();} + virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout); + + virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout); + + virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams); + virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams); + virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) {return ENCODE_SUCCESS;} + virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) {return ENCODE_SUCCESS;} + virtual Encode_Status getMaxOutSize(uint32_t *maxSize) {return ENCODE_SUCCESS;} + +private: + void setDefaultParams(void); + VideoParamsCommon mComParams; + + MP4EncodingMode mEncodeMode; + int32_t mVideoWidth; + int32_t mVideoHeight; + int32_t mVideoFrameRate; + int32_t mVideoBitRate; + int32_t mVideoColorFormat; + bool mStoreMetaDataInBuffers; + int32_t mIDRFrameRefreshIntervalInSec; + + int64_t mNumInputFrames; + bool mStarted; + bool mSawInputEOS; + bool mSignalledError; + int64_t mCurTimestampUs; + int64_t mLastTimestampUs; + + tagvideoEncControls *mHandle; + tagvideoEncOptions *mEncParams; + uint8_t *mInputFrameData; + uint8_t *mTrimedInputData; + uint8_t mVolHeader[256]; + int32_t mVolHeaderLength; + + Encode_Status initEncParams(); + Encode_Status initEncoder(); + Encode_Status releaseEncoder(); + + DISALLOW_EVIL_CONSTRUCTORS(PVSoftMPEG4Encoder); +}; + +#endif diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp index bd9cec7..f636d9c 100644 --- a/videoencoder/VideoEncoderHost.cpp +++ b/videoencoder/VideoEncoderHost.cpp @@ -10,6 +10,9 @@ #include "VideoEncoderH263.h" #include "VideoEncoderAVC.h" #include "VideoEncoderVP8.h" +#ifndef IMG_GFX +#include "PVSoftMPEG4Encoder.h" +#endif #include "VideoEncoderHost.h" #include "VideoEncoderLog.h" #include @@ -26,11 +29,19 @@ IVideoEncoder *createVideoEncoder(const char *mimeType) { VideoEncoderAVC *p = new VideoEncoderAVC(); return (IVideoEncoder *)p; } else if (strcasecmp(mimeType, "video/h263") == 0) { +#ifdef IMG_GFX VideoEncoderH263 *p = new VideoEncoderH263(); +#else + PVSoftMPEG4Encoder *p = new PVSoftMPEG4Encoder("OMX.google.h263.encoder"); +#endif return (IVideoEncoder *)p; } else if (strcasecmp(mimeType, "video/mpeg4") == 0 || strcasecmp(mimeType, "video/mp4v-es") == 0) { +#ifdef IMG_GFX VideoEncoderMP4 *p = new VideoEncoderMP4(); +#else + PVSoftMPEG4Encoder *p = new PVSoftMPEG4Encoder("OMX.google.mpeg4.encoder"); +#endif return (IVideoEncoder *)p; } else if (strcasecmp(mimeType, "video/x-webm") == 0) { VideoEncoderVP8 *p = new VideoEncoderVP8(); -- cgit v1.2.3 From fdae5caa9bbd36953aa6ba95e7748cff3a491c76 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Mon, 6 May 2013 15:53:18 +0800 Subject: libmix: optimize the logic of checking available buffer BZ: 105774 The old logic will call querySurfaceRenderStatus for every allocated buffers on output port which is very time-consuming. Change-Id: I5ce86e2db888720a457bf08ecd64fb0a492a4a37 Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/106697 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 1eb6b3d..7f1b9f8 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -501,11 +501,11 @@ bool VideoDecoderBase::checkBufferAvail(void) { for (int32_t i = 0; i < mNumSurfaces; i++) { buffer = mSurfaceBuffers + i; - querySurfaceRenderStatus(buffer); if (buffer->asReferernce == false && - buffer->renderBuffer.renderDone == true && - buffer->renderBuffer.driverRenderDone == true) { - return true; + buffer->renderBuffer.renderDone == true) { + querySurfaceRenderStatus(buffer); + if (buffer->renderBuffer.driverRenderDone == true) + return true; } } return false; -- cgit v1.2.3 From 881a19522487b77ca1b2f9fa75a45f36fd81d0ab Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Fri, 31 May 2013 16:10:15 +0800 Subject: libmix: Add missing USE_INTEL_SECURE_AVC in Makefile BZ: 112331 This compiler option is used to enable SecureAVC compiling Change-Id: I384efa703c57d0a76b561a06d60ed67adab17cd9 Signed-off-by: Cheng Yao Reviewed-on: http://android.intel.com:8080/110978 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/Android.mk | 1 + 1 file changed, 1 insertion(+) diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 1c47729..d032d70 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -20,6 +20,7 @@ LOCAL_C_INCLUDES := \ ifeq ($(USE_INTEL_SECURE_AVC),true) LOCAL_SRC_FILES += securevideo/$(TARGET_BOARD_PLATFORM)/VideoDecoderAVCSecure.cpp LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/$(TARGET_BOARD_PLATFORM) +LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC endif ifeq ($(TARGET_BOARD_PLATFORM),baytrail) -- cgit v1.2.3 From 671ff3db3b69e443088452c650c9596d2d62a268 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Mon, 3 Jun 2013 14:51:04 +0800 Subject: VE: fix KW issue in h263 encoder BZ: 98594 Fix KW issue in PVSoftMPEG4Encoder Change-Id: I980dddc82b50fbca55bc88ad6d89866f4e3d06b7 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/111318 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/PVSoftMPEG4Encoder.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp index 084db78..fa8ec0d 100644 --- a/videoencoder/PVSoftMPEG4Encoder.cpp +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -388,7 +388,10 @@ Encode_Status PVSoftMPEG4Encoder::encode(VideoEncRawBuffer *inBuffer, uint32_t t if (mNumInputFrames == 0) LOG_I("%d %d %d\n", pvinfo->width, pvinfo->height, pvinfo->lumaStride); - trimBuffer((uint8_t*)value, mTrimedInputData, pvinfo->width, pvinfo->height, pvinfo->lumaStride); + if (pvinfo != NULL) + trimBuffer((uint8_t*)value, mTrimedInputData, pvinfo->width, pvinfo->height, pvinfo->lumaStride); + else + LOG_E("failed to parse metadata info"); } else { memcpy(mTrimedInputData, inBuffer->data, (mVideoWidth * mVideoHeight * 3 ) >> 1); @@ -450,6 +453,7 @@ Encode_Status PVSoftMPEG4Encoder::getOutput(VideoEncOutputBuffer *outBuffer, uin LOG_E("Failed to encode frame or get hink track at frame %lld", mNumInputFrames); mSignalledError = true; + hintTrack.CodeType = 0; ret = ENCODE_FAIL; } LOG_I("dataLength %d\n", dataLength); -- cgit v1.2.3 From ed093d0a178a949b34fdc48ad0a7f9fc8fdb80c0 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Wed, 5 Jun 2013 16:41:17 +0800 Subject: Fix one VC1 clip playback issue BZ: 113609 Sometimes, the offset calculated in AsfSimpleIndexParser::seek is larger than the predefined threshold. In this situation, the targetTime will be 0. This is the root cause of seeking to the end but the progress bar goes to the beginning. In this fix, we don't simply returns an error code. Instead, the maximum value is given to the offset. Also in this patch, the useless framework folder is removed. Change-Id: I89ba770fc5ac0767b319845a9d9ef23fe74c3a6c Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/113487 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- asfparser/AsfIndexParser.cpp | 4 +- frameworks/asf_extractor/AsfExtractor.cpp | 763 ------------------------------ 2 files changed, 2 insertions(+), 765 deletions(-) delete mode 100644 frameworks/asf_extractor/AsfExtractor.cpp diff --git a/asfparser/AsfIndexParser.cpp b/asfparser/AsfIndexParser.cpp index e80f529..0dc8130 100644 --- a/asfparser/AsfIndexParser.cpp +++ b/asfparser/AsfIndexParser.cpp @@ -99,11 +99,11 @@ int AsfSimpleIndexParser::seek( } if (offset >= mIndexInfo->indexEntriesCount) { - return ASF_PARSER_BAD_VALUE; + offset = mIndexInfo->indexEntriesCount; } if (INDEX_ENTRY_SIZE * offset > mIndexInfo->indexSize - INDEX_ENTRY_SIZE) { - return ASF_PARSER_BAD_VALUE; + offset = mIndexInfo->indexSize/INDEX_ENTRY_SIZE - 1; } targetTime = offset * mIndexInfo->indexEntryTimeInterval; diff --git a/frameworks/asf_extractor/AsfExtractor.cpp b/frameworks/asf_extractor/AsfExtractor.cpp deleted file mode 100644 index 9c1aff4..0000000 --- a/frameworks/asf_extractor/AsfExtractor.cpp +++ /dev/null @@ -1,763 +0,0 @@ -/************************************************************************************* - * INTEL CONFIDENTIAL - * Copyright 2011 Intel Corporation All Rights Reserved. - * The source code contained or described herein and all documents related - * to the source code ("Material") are owned by Intel Corporation or its - * suppliers or licensors. Title to the Material remains with Intel - * Corporation or its suppliers and licensors. The Material contains trade - * secrets and proprietary and confidential information of Intel or its - * suppliers and licensors. The Material is protected by worldwide copyright - * and trade secret laws and treaty provisions. No part of the Material may - * be used, copied, reproduced, modified, published, uploaded, posted, - * transmitted, distributed, or disclosed in any way without Intel's prior - * express written permission. - * - * No license under any patent, copyright, trade secret or other intellectual - * property right is granted to or conferred upon you by disclosure or delivery - * of the Materials, either expressly, by implication, inducement, estoppel or - * otherwise. Any license under such intellectual property rights must be express - * and approved by Intel in writing. - ************************************************************************************/ -/* -* Copyright (C) 2011 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - - -//#define LOG_NDEBUG 0 -#define LOG_TAG "AsfExtractor" -#include - -#include - -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "MetaDataExt.h" -#include "MediaBufferPool.h" -#include "AsfStreamParser.h" -#include "AsfExtractor.h" - - -namespace android { - -// The audio format tags that represent the input categories supported -// by the Windows Media Audio decoder, don't change it -enum WMAAudioFormats { - WAVE_FORMAT_MSAUDIO1 = 0x160, - WAVE_FORMAT_WMAUDIO2 = 0x161, - WAVE_FORMAT_WMAUDIO3X = 0x162, - WAVE_FORMAT_WMAUDIO_LOSSLESS = 0x163, - WAVE_FORMAT_WMAVOICE9 = 0x000A, - WAVE_FORMAT_WMAVOICE10 = 0x000B, -}; - -class ASFSource : public MediaSource { -public: - ASFSource(const sp &extractor, int trackIndex) - : mExtractor(extractor), - mTrackIndex(trackIndex) { - } - - virtual status_t start(MetaData *params = NULL) { - return OK; - } - - virtual status_t stop() { - return OK; - } - - virtual sp getFormat() { - return mExtractor->getTrackMetaData(mTrackIndex, 0); - } - - virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL) { - return mExtractor->read(mTrackIndex, buffer, options); - } - -protected: - virtual ~ASFSource() { - mExtractor = NULL; - } - -private: - sp mExtractor; - int mTrackIndex; - - ASFSource(const ASFSource &); - ASFSource &operator=(const ASFSource &); -}; - - -AsfExtractor::AsfExtractor(const sp &source) - : mDataSource(source), - mInitialized(false), - mHasIndexObject(false), - mFirstTrack(NULL), - mLastTrack(NULL), - mReadLock(), - mFileMetaData(new MetaData), - mParser(NULL), - mHeaderObjectSize(0), - mDataObjectSize(0), - mDataPacketBeginOffset(0), - mDataPacketEndOffset(0), - mDataPacketCurrentOffset(0), - mDataPacketSize(0), - mDataPacketData(NULL) { - mParser = new AsfStreamParser; -} - -AsfExtractor::~AsfExtractor() { - uninitialize(); - mDataSource = NULL; - mFileMetaData = NULL; - delete mParser; - mParser = NULL; -} - -sp AsfExtractor::getMetaData() { - status_t err = initialize(); - if (err != OK) { - return new MetaData; - } - - return mFileMetaData; -} - -size_t AsfExtractor::countTracks() { - status_t err = initialize(); - if (err != OK) { - return 0; - } - - size_t n = 0; - Track *track = mFirstTrack; - while (track) { - ++n; - track = track->next; - } - - ALOGV("track count is %d", n); - return n; -} - -sp AsfExtractor::getTrackMetaData(size_t index, uint32_t flags) { - status_t err = initialize(); - if (err != OK) { - return NULL; - } - - Track *track = getTrackByTrackIndex(index); - if (track == NULL) { - return NULL; - } - - // There is no thumbnail data so ignore flags: kIncludeExtensiveMetaData - return track->meta; -} - -sp AsfExtractor::getTrack(size_t index) { - status_t err; - if ((err = initialize()) != OK) { - return NULL; - } - - Track *track = getTrackByTrackIndex(index); - if (track == NULL) { - return NULL; - } - - // Assume this track is active - track->skipTrack = false; - return new ASFSource(this, index); -} - -status_t AsfExtractor::read( - int trackIndex, - MediaBuffer **buffer, - const MediaSource::ReadOptions *options) { - Track *track = getTrackByTrackIndex(trackIndex); - if (track == NULL) { - return BAD_VALUE; - } - - int64_t seekTimeUs; - MediaSource::ReadOptions::SeekMode mode; - if (!mParser->hasVideo() || (mParser->hasVideo() && mHasIndexObject)) { - if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { - status_t err = seek_l(track, seekTimeUs, mode); - if (err != OK) { - return err; - } - } - } else { - ALOGW("No index object. Seek may not be supported!!!"); - } - - return read_l(track, buffer); -} - -status_t AsfExtractor::initialize() { - if (mInitialized) { - return OK; - } - - status_t status = OK; - // header object is the first mandatory object. The first 16 bytes - // is GUID of object, the following 8 bytes is size of object - if (mDataSource->readAt(16, &mHeaderObjectSize, 8) != 8) { - return ERROR_IO; - } - - uint8_t* headerObjectData = new uint8_t [mHeaderObjectSize]; - if (headerObjectData == NULL) { - return NO_MEMORY; - } - - if (mDataSource->readAt(0, headerObjectData, mHeaderObjectSize) != mHeaderObjectSize) { - return ERROR_IO; - } - status = mParser->parseHeaderObject(headerObjectData, mHeaderObjectSize); - if (status != ASF_PARSER_SUCCESS) { - ALOGE("Failed to parse header object."); - return ERROR_MALFORMED; - } - - delete [] headerObjectData; - headerObjectData = NULL; - - uint8_t dataObjectHeaderData[ASF_DATA_OBJECT_HEADER_SIZE]; - if (mDataSource->readAt(mHeaderObjectSize, dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE) - != ASF_DATA_OBJECT_HEADER_SIZE) { - return ERROR_IO; - } - status = mParser->parseDataObjectHeader(dataObjectHeaderData, ASF_DATA_OBJECT_HEADER_SIZE); - if (status != ASF_PARSER_SUCCESS) { - ALOGE("Failed to parse data object header."); - return ERROR_MALFORMED; - } - - // first 16 bytes is GUID of data object - mDataObjectSize = *(uint64_t*)(dataObjectHeaderData + 16); - mDataPacketBeginOffset = mHeaderObjectSize + ASF_DATA_OBJECT_HEADER_SIZE; - mDataPacketEndOffset = mHeaderObjectSize + mDataObjectSize; - mDataPacketCurrentOffset = mDataPacketBeginOffset; - - // allocate memory for data packet - mDataPacketSize = mParser->getDataPacketSize(); - mDataPacketData = new uint8_t [mDataPacketSize]; - if (mDataPacketData == NULL) { - return NO_MEMORY; - } - - const AsfFileMediaInfo *fileMediaInfo = mParser->getFileInfo(); - if (fileMediaInfo && fileMediaInfo->seekable) { - uint64_t offset = mDataPacketEndOffset; - - // Find simple index object for time seeking. - // object header include 16 bytes of object GUID and 8 bytes of object size. - uint8_t objectHeader[24]; - int64_t objectSize; - for (;;) { - if (mDataSource->readAt(offset, objectHeader, 24) != 24) { - break; - } - - objectSize = *(int64_t *)(objectHeader + 16); - if (!AsfStreamParser::isSimpleIndexObject(objectHeader)) { - offset += objectSize; - if (objectSize == 0) { - ALOGW("WARN: The file's objectSize is zero,ingore this header."); - offset += 24; - } - continue; - } - mHasIndexObject = true; - uint8_t* indexObjectData = new uint8_t [objectSize]; - if (indexObjectData == NULL) { - // don't report as error, we just lose time seeking capability. - break; - } - if (mDataSource->readAt(offset, indexObjectData, objectSize) == objectSize) { - // Ignore return value - mParser->parseSimpleIndexObject(indexObjectData, objectSize); - } - delete [] indexObjectData; - break; - } - } - - if (mParser->hasVideo()) { - ALOGV("MEDIA_MIMETYPE_CONTAINER_ASF"); - mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_ASF); - } else if (mParser->hasAudio() && mParser->getAudioInfo()->codecID >= WAVE_FORMAT_MSAUDIO1 && - mParser->getAudioInfo()->codecID <= WAVE_FORMAT_WMAUDIO_LOSSLESS) { - LOGV("MEDIA_MIMETYPE_AUDIO_WMA", mParser->getAudioInfo()->codecID); - mFileMetaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_WMA); - } else { - ALOGE("Content does not have neither audio nor video."); - return ERROR_UNSUPPORTED; - } - - // duration returned from parser is in 100-nanosecond unit, converted it to microseconds (us) - ALOGV("Duration is %.2f (sec)", mParser->getDuration()/1E7); - mFileMetaData->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); - - setupTracks(); - mInitialized = true; - return OK; -} - -void AsfExtractor::uninitialize() { - if (mDataPacketData) { - delete [] mDataPacketData; - mDataPacketData = NULL; - } - mDataPacketSize = 0; - - Track* track = mFirstTrack; - MediaBuffer* p; - while (track != NULL) { - track->meta = NULL; - if (track->bufferActive) { - track->bufferActive->release(); - track->bufferActive = NULL; - } - - int size = track->bufferQueue.size(); - for (int i = 0; i < size; i++) { - p = track->bufferQueue.editItemAt(i); - p->release(); - } - - track->bufferQueue.clear(); - delete track->bufferPool; - - track->meta = NULL; - mFirstTrack = track->next; - delete track; - track = mFirstTrack; - } - mFirstTrack = NULL; - mLastTrack = NULL; -} - -static const char* FourCC2MIME(uint32_t fourcc) { - // The first charater of FOURCC characters appears in the least-significant byte - // WVC1 => 0x31435657 - switch (fourcc) { - //case FOURCC('W', 'M', 'V', '1'): - //case FOURCC('W', 'M', 'V', '2'): - //case FOURCC('W', 'M', 'V', 'A'): - case FOURCC('1', 'V', 'M', 'W'): - ALOGW("WMV1 format is not supported."); - return "video/wmv1"; - case FOURCC('2', 'V', 'M', 'W'): - ALOGW("WMV2 format is not supported."); - return "video/wmv2"; - case FOURCC('A', 'V', 'M', 'W'): - ALOGW("WMV Advanced profile, assuming as WVC1 for now"); - return MEDIA_MIMETYPE_VIDEO_WMV; - //case FOURCC('W', 'M', 'V', '3'): - //case FOURCC('W', 'V', 'C', '1'): - case FOURCC('3', 'V', 'M', 'W'): - case FOURCC('1', 'C', 'V', 'W'): - return MEDIA_MIMETYPE_VIDEO_WMV; - default: - ALOGE("Unknown video format."); - return "video/unknown-type"; - } -} - -static const char* CodecID2MIME(uint32_t codecID) { - switch (codecID) { - // WMA version 1 - case WAVE_FORMAT_MSAUDIO1: - // WMA version 2 (7, 8, 9 series) - case WAVE_FORMAT_WMAUDIO2: - return MEDIA_MIMETYPE_AUDIO_WMA; - // WMA 9 lossless - case WAVE_FORMAT_WMAUDIO_LOSSLESS: - //return MEDIA_MIMETYPE_AUDIO_WMA_LOSSLESS; - return MEDIA_MIMETYPE_AUDIO_WMA; - // WMA voice 9 - case WAVE_FORMAT_WMAVOICE9: - // WMA voice 10 - case WAVE_FORMAT_WMAVOICE10: - ALOGW("WMA voice 9/10 is not supported."); - return "audio/wma-voice"; - default: - ALOGE("Unsupported Audio codec ID: %#x", codecID); - return "audio/unknown-type"; - } -} - - -status_t AsfExtractor::setupTracks() { - AsfAudioStreamInfo* audioInfo = mParser->getAudioInfo(); - AsfVideoStreamInfo* videoInfo = mParser->getVideoInfo(); - Track* track; - while (audioInfo || videoInfo) { - track = new Track; - if (mLastTrack == NULL) { - mFirstTrack = track; - mLastTrack = track; - } else { - mLastTrack->next = track; - mLastTrack = track; - } - - // this flag will be set to false within getTrack - track->skipTrack = true; - track->seekCompleted = false; - track->next = NULL; - track->meta = new MetaData; - track->bufferActive = NULL; - track->bufferPool = new MediaBufferPool; - - if (audioInfo) { - LOGV("streamNumber = %d\n, encryptedContentFlag= %d\n, timeOffset = %lld\n, - codecID = %d\n, numChannels=%d\n, sampleRate=%d\n, avgBitRate = %d\n, - blockAlignment =%d\n, bitsPerSample=%d\n, codecDataSize=%d\n", - audioInfo->streamNumber, audioInfo->encryptedContentFlag, - audioInfo->timeOffset, audioInfo->codecID, audioInfo->numChannels, - audioInfo->sampleRate, audioInfo->avgByteRate*8, audioInfo->blockAlignment, - audioInfo->bitsPerSample, audioInfo->codecDataSize); - - track->streamNumber = audioInfo->streamNumber; - track->encrypted = audioInfo->encryptedContentFlag; - track->meta->setInt32(kKeyChannelCount, audioInfo->numChannels); - track->meta->setInt32(kKeySampleRate, audioInfo->sampleRate); - track->meta->setInt32(kKeyWmaBlockAlign, audioInfo->blockAlignment); - track->meta->setInt32(kKeyBitPerSample, audioInfo->bitsPerSample); - track->meta->setInt32(kKeyBitRate, audioInfo->avgByteRate*8); - track->meta->setInt32(kKeyWmaFormatTag, audioInfo->codecID); - - if (audioInfo->codecDataSize) { - track->meta->setData( - kKeyConfigData, - kTypeConfigData, - audioInfo->codecData, - audioInfo->codecDataSize); - } - // duration returned is in 100-nanosecond unit - track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); - track->meta->setCString(kKeyMIMEType, CodecID2MIME(audioInfo->codecID)); - track->meta->setInt32(kKeySuggestedBufferSize, mParser->getDataPacketSize()); - audioInfo = audioInfo->next; - } else { - track->streamNumber = videoInfo->streamNumber; - track->encrypted = videoInfo->encryptedContentFlag; - track->meta->setInt32(kKeyWidth, videoInfo->width); - track->meta->setInt32(kKeyHeight, videoInfo->height); - if (videoInfo->codecDataSize) { - track->meta->setData( - kKeyConfigData, - kTypeConfigData, - videoInfo->codecData, - videoInfo->codecDataSize); - } - // duration returned is in 100-nanosecond unit - track->meta->setInt64(kKeyDuration, mParser->getDuration() / SCALE_100_NANOSEC_TO_USEC); - track->meta->setCString(kKeyMIMEType, FourCC2MIME(videoInfo->fourCC)); - int maxSize = mParser->getMaxObjectSize(); - if (maxSize == 0) { - // estimated maximum packet size. - maxSize = 10 * mParser->getDataPacketSize(); - } - track->meta->setInt32(kKeySuggestedBufferSize, maxSize); - if (mHasIndexObject) { - // set arbitary thumbnail time - track->meta->setInt64(kKeyThumbnailTime, mParser->getDuration() / (SCALE_100_NANOSEC_TO_USEC * 2)); - } else { - track->meta->setInt64(kKeyThumbnailTime, 0); - } - videoInfo = videoInfo->next; - } - } - - return OK; -} - -status_t AsfExtractor::seek_l(Track* track, int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) { - Mutex::Autolock lockSeek(mReadLock); - - // It is expected seeking will happen on all the tracks with the same seeking options. - // Only the first track receiving the seeking command will perform seeking and all other - // tracks just siliently ignore it. - - // TODO: potential problems in the following case: - // audio seek - // video read - // video seek - // video read - - if (track->seekCompleted) { - // seeking is completed through a different track - track->seekCompleted = false; - return OK; - } - - uint64_t targetSampleTimeUs = 0; - - // seek to next sync sample or previous sync sample - bool nextSync = false; - switch (mode) { - case MediaSource::ReadOptions::SEEK_NEXT_SYNC: - nextSync = true; - break; - // Always seek to the closest previous sync frame - case MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC: - case MediaSource::ReadOptions::SEEK_CLOSEST_SYNC: - - // Not supported, already seek to sync frame, so will not set kKeyTargetTime on bufferActive. - case MediaSource::ReadOptions::SEEK_CLOSEST: - default: - break; - } - - uint32_t packetNumber; - uint64_t targetTime; - // parser takes seek time in 100-nanosecond unit and returns target time in 100-nanosecond as well. - if (!mParser->seek(seekTimeUs * SCALE_100_NANOSEC_TO_USEC, nextSync, packetNumber, targetTime)) { - ALOGV("Seeking failed."); - return ERROR_END_OF_STREAM; - } - ALOGV("seek time = %.2f secs, actual time = %.2f secs", seekTimeUs/1E6, targetTime / 1E7); - - // convert to microseconds - targetSampleTimeUs = targetTime / SCALE_100_NANOSEC_TO_USEC; - mDataPacketCurrentOffset = mDataPacketBeginOffset + packetNumber * mDataPacketSize; - ALOGV("data packet offset = %lld", mDataPacketCurrentOffset); - - // flush all pending buffers on all the tracks - Track* temp = mFirstTrack; - while (temp != NULL) { - Mutex::Autolock lockTrack(temp->lock); - if (temp->bufferActive) { - temp->bufferActive->release(); - temp->bufferActive = NULL; - } - - int size = temp->bufferQueue.size(); - for (int i = 0; i < size; i++) { - MediaBuffer* buffer = temp->bufferQueue.editItemAt(i); - buffer->release(); - } - temp->bufferQueue.clear(); - - if (temp != track) { - // notify all other tracks seeking is completed. - // this flag is reset when seeking request is made on each track. - // don't set this flag on the driving track so a new seek can be made. - temp->seekCompleted = true; - } - temp = temp->next; - } - - return OK; -} - -status_t AsfExtractor::read_l(Track *track, MediaBuffer **buffer) { - status_t err = OK; - while (err == OK) { - Mutex::Autolock lock(track->lock); - if (track->bufferQueue.size() != 0) { - *buffer = track->bufferQueue[0]; - track->bufferQueue.removeAt(0); - return OK; - } - track->lock.unlock(); - - err = readPacket(); - } - ALOGE("read_l failed."); - return err; -} - -status_t AsfExtractor::readPacket() { - Mutex::Autolock lock(mReadLock); - if (mDataPacketCurrentOffset + mDataPacketSize > mDataPacketEndOffset) { - ALOGI("readPacket hits end of stream."); - return ERROR_END_OF_STREAM; - } - - if (mDataSource->readAt(mDataPacketCurrentOffset, mDataPacketData, mDataPacketSize) != - mDataPacketSize) { - return ERROR_END_OF_STREAM; - } - - // update next read position - mDataPacketCurrentOffset += mDataPacketSize; - AsfPayloadDataInfo *payloads = NULL; - int status = mParser->parseDataPacket(mDataPacketData, mDataPacketSize, &payloads); - if (status != ASF_PARSER_SUCCESS || payloads == NULL) { - ALOGE("Failed to parse data packet. status = %d", status); - return ERROR_END_OF_STREAM; - } - - AsfPayloadDataInfo* payload = payloads; - while (payload) { - Track* track = getTrackByStreamNumber(payload->streamNumber); - if (track == NULL || track->skipTrack) { - payload = payload->next; - continue; - } - if (payload->mediaObjectLength == payload->payloadSize || - payload->offsetIntoMediaObject == 0) { - // a comple object or the first payload of fragmented object - MediaBuffer *buffer = NULL; - status = track->bufferPool->acquire_buffer( - payload->mediaObjectLength, &buffer); - if (status != OK) { - ALOGE("Failed to acquire buffer."); - mParser->releasePayloadDataInfo(payloads); - return status; - } - memcpy(buffer->data(), - payload->payloadData, - payload->payloadSize); - - buffer->set_range(0, payload->mediaObjectLength); - // kKeyTime is in microsecond unit (usecs) - // presentationTime is in mililsecond unit (ms) - buffer->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); - - if (payload->keyframe) { - buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); - } - - if (payload->mediaObjectLength == payload->payloadSize) { - Mutex::Autolock lockTrack(track->lock); - // a complete object - track->bufferQueue.push(buffer); - } else { - // the first payload of a fragmented object - track->bufferActive = buffer; - if (track->encrypted) { - Mutex::Autolock lockTrack(track->lock); - MediaBuffer* copy = NULL; - track->bufferPool->acquire_buffer(payload->payloadSize, ©); - copy->meta_data()->setInt64(kKeyTime,(uint64_t) payload->presentationTime * 1000); - memcpy(copy->data(), payload->payloadData, payload->payloadSize); - copy->set_range(0, payload->payloadSize); - track->bufferQueue.push(copy); - } - } - } else { - if (track->bufferActive == NULL) { - ALOGE("Receiving corrupt or discontinuous data packet."); - payload = payload->next; - continue; - } - // TODO: check object number and buffer size!!!!!!!!!!!!!! - // the last payload or the middle payload of a fragmented object - memcpy( - (uint8_t*)track->bufferActive->data() + payload->offsetIntoMediaObject, - payload->payloadData, - payload->payloadSize); - - if (payload->offsetIntoMediaObject + payload->payloadSize == - payload->mediaObjectLength) { - // the last payload of a fragmented object - // for encrypted content, push a cloned media buffer to vector instead. - if (!track->encrypted) - { - Mutex::Autolock lockTrack(track->lock); - track->bufferQueue.push(track->bufferActive); - track->bufferActive = NULL; - } else { - Mutex::Autolock lockTrack(track->lock); - track->bufferActive->set_range(payload->offsetIntoMediaObject, payload->payloadSize); - track->bufferQueue.push(track->bufferActive); - track->bufferActive = NULL; - } - } else { - // middle payload of a fragmented object - if (track->encrypted) { - Mutex::Autolock lockTrack(track->lock); - MediaBuffer* copy = NULL; - int64_t keytime; - track->bufferPool->acquire_buffer(payload->payloadSize, ©); - track->bufferActive->meta_data()->findInt64(kKeyTime, &keytime); - copy->meta_data()->setInt64(kKeyTime, keytime); - memcpy(copy->data(), payload->payloadData, payload->payloadSize); - copy->set_range(0, payload->payloadSize); - track->bufferQueue.push(copy); - } - } - } - payload = payload->next; - }; - - mParser->releasePayloadDataInfo(payloads); - return OK; -} - -AsfExtractor::Track* AsfExtractor::getTrackByTrackIndex(int index) { - Track *track = mFirstTrack; - while (index > 0) { - if (track == NULL) { - return NULL; - } - - track = track->next; - --index; - } - return track; -} - -AsfExtractor::Track* AsfExtractor::getTrackByStreamNumber(int stream) { - Track *track = mFirstTrack; - while (track != NULL) { - if (track->streamNumber == stream) { - return track; - } - track = track->next; - } - return NULL; -} - -bool SniffAsf( - const sp &source, - String8 *mimeType, - float *confidence, - sp *) { - uint8_t guid[16]; - if (source->readAt(0, guid, 16) != 16) { - return false; - } - if (!AsfStreamParser::isHeaderObject(guid)) { - return false; - } - - *mimeType = MEDIA_MIMETYPE_CONTAINER_ASF; - *confidence = 0.4f; - return true; -} - -} // namespace android - -- cgit v1.2.3 From 3c7244f4d1659b7c9669bc09fa2a7065cc932c79 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Thu, 20 Jun 2013 10:32:33 +0800 Subject: libmix: add vaSyncSurface to ensure video buffer is ready before rendering. BZ: 112346 Reverted patch 100387 since vaSyncSurface will be called in libmix in any circumstances. Change-Id: Ib908227db4a392b607d9ac250f2fd3626edee6a7 Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/114528 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 7f1b9f8..ea3ce25 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -234,9 +234,8 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { mOutputTail = NULL; } vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); - if (mConfigBuffer.nativeWindow == NULL && useGraphicBuffer) { - vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); - } + if (useGraphicBuffer) + vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); return &(outputByPos->renderBuffer); } @@ -284,15 +283,8 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp); -#ifdef LOAD_PVR_DRIVER - if (useGraphicBuffer ) { - vaSyncSurface(mVADisplay, output->renderBuffer.surface); - } -#endif - - if (mConfigBuffer.nativeWindow == NULL && useGraphicBuffer) { + if (useGraphicBuffer) vaSyncSurface(mVADisplay, output->renderBuffer.surface); - } return &(output->renderBuffer); } -- cgit v1.2.3 From 388bd385d3d9cb7bb3882ea18ad1150628a5230c Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Tue, 18 Jun 2013 16:41:22 +0800 Subject: Configure SW h263 encoder works in raw mode by default. BZ: 115782 Configure SW h263 encoder works in raw mode by default. Change-Id: I987acd8e4f95b693acad5459adbc512b4f8f090d Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/114212 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/PVSoftMPEG4Encoder.cpp | 46 +++++++++++++++++++++++++++++++++++-- videoencoder/PVSoftMPEG4Encoder.h | 39 +++++++++++++++++++++++++++++++ 2 files changed, 83 insertions(+), 2 deletions(-) diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp index fa8ec0d..5e5bfee 100644 --- a/videoencoder/PVSoftMPEG4Encoder.cpp +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -1,3 +1,42 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* INTEL CONFIDENTIAL + * Copyright (c) 2009 Intel Corporation. All rights reserved. + * + * The source code contained or described herein and all documents + * related to the source code ("Material") are owned by Intel + * Corporation or its suppliers or licensors. Title to the + * Material remains with Intel Corporation or its suppliers and + * licensors. The Material contains trade secrets and proprietary + * and confidential information of Intel or its suppliers and + * licensors. The Material is protected by worldwide copyright and + * trade secret laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, + * posted, transmitted, distributed, or disclosed in any way without + * Intel's prior express written permission. + * + * No license under any patent, copyright, trade secret or other + * intellectual property right is granted to or conferred upon you + * by disclosure or delivery of the Materials, either expressly, by + * implication, inducement, estoppel or otherwise. Any license + * under such intellectual property rights must be express and + * approved by Intel in writing. + * + */ + //#define LOG_NDEBUG 0 #define LOG_TAG "PVSoftMPEG4Encoder" #include @@ -68,7 +107,7 @@ PVSoftMPEG4Encoder::PVSoftMPEG4Encoder(const char *name) mVideoFrameRate(30), mVideoBitRate(192000), mVideoColorFormat(OMX_COLOR_FormatYUV420SemiPlanar), - mStoreMetaDataInBuffers(true), + mStoreMetaDataInBuffers(false), mIDRFrameRefreshIntervalInSec(1), mNumInputFrames(-1), mStarted(false), @@ -81,13 +120,16 @@ PVSoftMPEG4Encoder::PVSoftMPEG4Encoder(const char *name) if (!strcmp(name, "OMX.google.h263.encoder")) { mEncodeMode = H263_MODE; + LOG_I("construct h263 encoder"); } else { CHECK(!strcmp(name, "OMX.google.mpeg4.encoder")); + LOG_I("construct mpeg4 encoder"); } setDefaultParams(); LOG_I("Construct PVSoftMPEG4Encoder"); + } PVSoftMPEG4Encoder::~PVSoftMPEG4Encoder() { @@ -431,7 +473,7 @@ Encode_Status PVSoftMPEG4Encoder::getOutput(VideoEncOutputBuffer *outBuffer, uin return ENCODE_SUCCESS; } - outBuffer->timeStamp = mLastTimestampUs; + outBuffer->timeStamp = mCurTimestampUs; LOG_I("info.mTimeUs %lld\n", outBuffer->timeStamp); VideoEncFrameIO vin, vout; diff --git a/videoencoder/PVSoftMPEG4Encoder.h b/videoencoder/PVSoftMPEG4Encoder.h index e7c5c30..5eecc05 100644 --- a/videoencoder/PVSoftMPEG4Encoder.h +++ b/videoencoder/PVSoftMPEG4Encoder.h @@ -1,3 +1,42 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* INTEL CONFIDENTIAL + * Copyright (c) 2009 Intel Corporation. All rights reserved. + * + * The source code contained or described herein and all documents + * related to the source code ("Material") are owned by Intel + * Corporation or its suppliers or licensors. Title to the + * Material remains with Intel Corporation or its suppliers and + * licensors. The Material contains trade secrets and proprietary + * and confidential information of Intel or its suppliers and + * licensors. The Material is protected by worldwide copyright and + * trade secret laws and treaty provisions. No part of the Material + * may be used, copied, reproduced, modified, published, uploaded, + * posted, transmitted, distributed, or disclosed in any way without + * Intel's prior express written permission. + * + * No license under any patent, copyright, trade secret or other + * intellectual property right is granted to or conferred upon you + * by disclosure or delivery of the Materials, either expressly, by + * implication, inducement, estoppel or otherwise. Any license + * under such intellectual property rights must be express and + * approved by Intel in writing. + * + */ + #ifndef __PV_SOFT_MPEG4_ENCODER__ #define __PV_SOFT_MPEG4_ENCODER__ -- cgit v1.2.3 From a9f738e22a2f664eaf7821e1f299d94c1216f5f1 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Tue, 18 Jun 2013 11:37:10 +0800 Subject: libmix: Allocate video decoder's output buffers based on the real needs BZ: 116761 Change-Id: I4124a337c2f0fb9153728e8a51be5a6412e9b5a8 Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/114245 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 13 ++++++++++++- videodecoder/VideoDecoderBase.cpp | 3 +++ videodecoder/VideoDecoderBase.h | 3 ++- videodecoder/VideoDecoderDefs.h | 1 + 4 files changed, 18 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 15328bd..5730795 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -593,7 +593,6 @@ void VideoDecoderAVC::clearAsReference(int toggle) { Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { int32_t DPBSize = getDPBSize(data); - updateFormatInfo(data); //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline VAProfile vaProfile = VAProfileH264High; @@ -607,6 +606,7 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { } VideoDecoderBase::setOutputWindowSize(DPBSize); + updateFormatInfo(data); // for 1080p, limit the total surface to 19, according the hardware limitation // change the max surface number from 19->10 to workaround memory shortage @@ -677,6 +677,17 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { data->codec_data->crop_right, data->codec_data->crop_bottom); + // The number of actual buffer needed is + // outputQueue + nativewindow_owned + (diff > 0 ? diff : 1) + widi_need_max + 1(available buffer) + // while outputQueue = DPB < 8? DPB :8 + // and diff = Reference + 1 - ouputQueue + int diff = data->codec_data->num_ref_frames + 1 - mOutputWindowSize; + mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + 4 /* Owned by native window */ + + (diff > 0 ? diff : 1) + + 6 /* WiDi maximum needs */ + + 1; + ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded); + mVideoFormatInfo.valid = true; } diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index ea3ce25..2379eaa 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -708,6 +708,9 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { } if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ + if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber) + return DECODE_FORMAT_CHANGE; + numSurface = mConfigBuffer.surfaceNumber; // if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode, // we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 2f45fec..f0c60cf 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -114,6 +114,8 @@ protected: bool mSizeChanged; // indicate whether video size is changed. bool mShowFrame; // indicate whether the decoded frame is for display + int32_t mOutputWindowSize; // indicate limit of number of outstanding frames for output + enum { // TODO: move this to vbp_loader.h VBP_INVALID = 0xFF, @@ -135,7 +137,6 @@ private: bool mRawOutput; // whether to output NV12 raw data bool mManageReference; // this should stay true for VC1/MP4 decoder, and stay false for AVC decoder. AVC handles reference frame using DPB OUTPUT_METHOD mOutputMethod; - int32_t mOutputWindowSize; // indicate limit of number of outstanding frames for output int32_t mNumSurfaces; VideoSurfaceBuffer *mSurfaceBuffers; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index b527577..672020c 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -179,6 +179,7 @@ struct VideoFormatInfo { int32_t bitrate; int32_t framerateNom; int32_t framerateDenom; + int32_t actualBufferNeeded; VideoExtensionBuffer *ext; }; -- cgit v1.2.3 From 3d19dea0ebdf28ea67ff5f4ead62408267c67ef3 Mon Sep 17 00:00:00 2001 From: "Liu, Bolun" Date: Tue, 21 May 2013 15:59:24 +0800 Subject: Refine VP8 encode middleware BZ: 103440 Add VP8 encoder option in mix_encoder test tool; Add render slice parameter for vp8 encode; Change ref frame management for vp8 encode; Clean up unused code for vp8 encode. Change-Id: If57158788baa18a71fdd57ccbcef93e5ac552f39 Signed-off-by: Liu, Bolun Reviewed-on: http://android.intel.com:8080/109160 Reviewed-by: cactus Reviewed-by: Ding, Haitao Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- test/mix_encoder.cpp | 11 +++-- videoencoder/VideoEncoderVP8.cpp | 92 +++++++++++++++++++--------------------- videoencoder/VideoEncoderVP8.h | 13 +----- 3 files changed, 54 insertions(+), 62 deletions(-) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 83bc76b..a9c3ebb 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -21,6 +21,7 @@ static const char *AVC_MIME_TYPE = "video/h264"; static const char *MPEG4_MIME_TYPE = "video/mpeg4"; static const char *H263_MIME_TYPE = "video/h263"; +static const char *VPX_MIME_TYPE = "video/x-webm"; static const int box_width = 128; @@ -30,7 +31,7 @@ static VideoParamsCommon gEncoderParams; static VideoParamsStoreMetaDataInBuffers gStoreMetaDataInBuffers; static VideoRateControl gRC = RATE_CONTROL_CBR; -static int gCodec = 0; //0: H264, 1: MPEG4, 2: H263 +static int gCodec = 0; //0: H264, 1: MPEG4, 2: H263 3: VPX static int gRCMode = 1; //0: NO_RC, 1: CBR, 2: VBR, 3: VCM static int gBitrate = 1280000; @@ -686,8 +687,8 @@ int CheckArgs(int argc, char* argv[]) default: printf("\n./mix_encode -c -b -r -w -h -k -g -n -m -s -f \n"); printf("\nCodec:\n"); - printf("0: H264 (default)\n1: MPEG4\n2: H263\n"); - printf("\nRate control:\n"); + printf("0: H264 (default)\n1: MPEG4\n2: H263\n3: VPX\n"); + printf("\nRate control:\n"); printf("0: NO_RC \n1: CBR (default)\n2: VBR\n3: VCM\n"); printf("\nMode:\n"); printf("0: Camera malloc (default)\n1: WiDi clone\n2: WiDi ext\n3: WiDi user\n4: Raw\n5: GrallocSource(Composer)\n6: GrallocSource(Gralloc)\n7: GrallocSource(Camera)\n"); @@ -738,6 +739,10 @@ int main(int argc, char* argv[]) case 2: codec = H263_MIME_TYPE; break; + case 3: + codec = VPX_MIME_TYPE; + break; + default: printf("Not support this type codec\n"); return 1; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index eb1b294..06c23b2 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -65,8 +65,8 @@ Encode_Status VideoEncoderVP8::renderSequenceParams() { vp8SeqParam.hrd_buf_size = mVideoParamsVP8.hrd_buf_size; vp8SeqParam.hrd_buf_initial_fullness = mVideoParamsVP8.hrd_buf_initial_fullness; vp8SeqParam.hrd_buf_optimal_fullness = mVideoParamsVP8.hrd_buf_optimal_fullness; - memcpy(vp8SeqParam.reference_frames, mVP8InternalFrames, sizeof(mVP8InternalFrames)); - +// memcpy(vp8SeqParam.reference_frames, mVP8InternalFrames, sizeof(mVP8InternalFrames)); + memcpy(vp8SeqParam.reference_frames, mAutoRefSurfaces, sizeof(mAutoRefSurfaces) * mAutoReferenceSurfaceNum); vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -110,72 +110,68 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { return ret; } -Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { +Encode_Status VideoEncoderVP8::renderSliceParams(EncodeTask *task) { - Encode_Status ret = ENCODE_SUCCESS; - LOG_V( "Begin\n"); + VAStatus vaStatus = VA_STATUS_SUCCESS; + uint32_t sliceHeight; + uint32_t sliceHeightInMB; - if (mFrameNum == 0) { - ret = renderSequenceParams(); - CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); - } + VAEncSliceParameterBuffer sliceParams; - ret = renderPictureParams(task); - CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + LOG_V( "Begin\n\n"); - LOG_V( "End\n"); - return ret; -} + sliceHeight = mComParams.resolution.height; + sliceHeight += 15; + sliceHeight &= (~15); + sliceHeightInMB = sliceHeight / 16; -Encode_Status VideoEncoderVP8::start() { - Encode_Status ret = ENCODE_SUCCESS; - LOG_V( "Begin\n"); + sliceParams.start_row_number = 0; + sliceParams.slice_height = sliceHeightInMB; + sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; + sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0; - ret = VideoEncoderBase::start (); - CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start"); + LOG_V("======VP8 slice params======\n"); + LOG_I( "start_row_number = %d\n", (int) sliceParams.start_row_number); + LOG_I( "sliceHeightInMB = %d\n", (int) sliceParams.slice_height); + LOG_I( "is_intra = %d\n", (int) sliceParams.slice_flags.bits.is_intra); - uint32_t stride_aligned = 0; - uint32_t height_aligned = 0; + vaStatus = vaCreateBuffer( + mVADisplay, mVAContext, + VAEncSliceParameterBufferType, + sizeof(VAEncSliceParameterBuffer), + 1, &sliceParams, + &mSliceParamBuf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - VASurfaceAttributeTPI attribute_tpi; - VAStatus vaStatus = VA_STATUS_SUCCESS; + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); - stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; - height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; - - attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; - attribute_tpi.luma_stride = stride_aligned; - attribute_tpi.chroma_u_stride = stride_aligned; - attribute_tpi.chroma_v_stride = stride_aligned; - attribute_tpi.luma_offset = 0; - attribute_tpi.chroma_u_offset = stride_aligned * height_aligned; - attribute_tpi.chroma_v_offset = stride_aligned * height_aligned; - attribute_tpi.pixel_format = VA_FOURCC_NV12; - attribute_tpi.type = VAExternalMemoryNULL; - - vaStatus = vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, - VA_RT_FORMAT_YUV420, VP8_INTERNAL_FRAME_LAST, mVP8InternalFrames, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); LOG_V( "end\n"); - return ret; + return ENCODE_SUCCESS; } -Encode_Status VideoEncoderVP8::stop() { + +Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { + Encode_Status ret = ENCODE_SUCCESS; - LOG_V( "Begin\n"); + LOG_V( "Begin\n"); - VAStatus vaStatus = VA_STATUS_SUCCESS; - vaStatus = vaDestroySurfaces(mVADisplay, mVP8InternalFrames, VP8_INTERNAL_FRAME_LAST); - CHECK_VA_STATUS_RETURN("vaDestroySurfaces"); + if (mFrameNum == 0) { + ret = renderSequenceParams(); + CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); + } - ret = VideoEncoderBase::stop (); - CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::stop"); + ret = renderPictureParams(task); + CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - LOG_V( "end\n"); + ret = renderSliceParams(task); + CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); + LOG_V( "End\n"); return ret; } + Encode_Status VideoEncoderVP8::derivedSetParams(VideoParamConfigSet *videoEncParams) { CHECK_NULL_RETURN_IFFAIL(videoEncParams); diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index 671b279..ebf2df9 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -19,8 +19,7 @@ public: VideoEncoderVP8(); virtual ~VideoEncoderVP8(); - virtual Encode_Status start(void); - virtual Encode_Status stop(void); + protected: virtual Encode_Status sendEncodeCommand(EncodeTask *task); @@ -34,20 +33,12 @@ protected: // Local Methods private: - enum { - LRF = 0, - GRF = 1, - ARF = 2, - RCF = 3, - VP8_INTERNAL_FRAME_LAST - }; - Encode_Status renderSequenceParams(); Encode_Status renderPictureParams(EncodeTask *task); + Encode_Status renderSliceParams(EncodeTask *task); VideoConfigVP8 mVideoConfigVP8; VideoParamsVP8 mVideoParamsVP8; - VASurfaceID mVP8InternalFrames[VP8_INTERNAL_FRAME_LAST]; }; #endif /* __VIDEO_ENCODER_VP8_H__ */ -- cgit v1.2.3 From ae0180bc80199df2b8947ac635e34029a6cf8d82 Mon Sep 17 00:00:00 2001 From: "liu, Bolun" Date: Mon, 27 May 2013 14:48:37 +0800 Subject: Update VP8 encode middleware BZ: 103440 Due to VIED FW parameter sensitivity, config the reasonable SPS/PPS for VP8 encode to avoid VSP timeout and system hang. Remove usused variable. Change-Id: I4835ca3fa0d96f6e0ae3218e45f10f4e272d1e92 Signed-off-by: liu, Bolun Reviewed-on: http://android.intel.com:8080/110893 Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderDef.h | 1 - videoencoder/VideoEncoderVP8.cpp | 16 ++++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 6c47e34..4f36303 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -592,7 +592,6 @@ struct VideoParamsVP8 : VideoParamConfigSet { uint32_t kf_auto; uint32_t kf_min_dist; uint32_t kf_max_dist; - uint32_t quality_setting; uint32_t min_qp; uint32_t max_qp; uint32_t rc_undershoot; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 06c23b2..d8d05b0 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -19,17 +19,16 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoParamsVP8.profile = 0; mVideoParamsVP8.error_resilient = 0; mVideoParamsVP8.num_token_partitions = 4; - mVideoParamsVP8.kf_auto = 0; + mVideoParamsVP8.kf_auto = 1; mVideoParamsVP8.kf_min_dist = 0; - mVideoParamsVP8.kf_max_dist = 0; - mVideoParamsVP8.quality_setting = 0; - mVideoParamsVP8.min_qp = 0; - mVideoParamsVP8.max_qp = 0; + mVideoParamsVP8.kf_max_dist = 30; + mVideoParamsVP8.min_qp = 4; + mVideoParamsVP8.max_qp = 63; mVideoParamsVP8.rc_undershoot = 100; mVideoParamsVP8.rc_overshoot = 100; - mVideoParamsVP8.hrd_buf_size = 500; - mVideoParamsVP8.hrd_buf_initial_fullness = 200; - mVideoParamsVP8.hrd_buf_optimal_fullness = 200; + mVideoParamsVP8.hrd_buf_size = 6000; + mVideoParamsVP8.hrd_buf_initial_fullness = 4000; + mVideoParamsVP8.hrd_buf_optimal_fullness = 5000; mVideoConfigVP8.force_kf = 0; mVideoConfigVP8.no_ref_last = 0; @@ -94,6 +93,7 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { vp8PicParam.pic_flags.bits.no_ref_last = mVideoConfigVP8.no_ref_last; vp8PicParam.pic_flags.bits.no_ref_gf = mVideoConfigVP8.no_ref_gf; vp8PicParam.pic_flags.bits.no_ref_arf = mVideoConfigVP8.no_ref_arf; + vp8PicParam.pic_flags.value = 0; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, -- cgit v1.2.3 From 1c2b1045da5ac5b72ffe7c2eb3e5c5e514ef2b45 Mon Sep 17 00:00:00 2001 From: liubolun Date: Sat, 8 Jun 2013 14:24:10 +0800 Subject: Update VP8 encode middleware in libmix layer BZ: 114083 VP8 encode FW had special requirement about ref frame stride align and buffer size. Width and Height should be the nearest integer multiple of 64; Buffer size should be the pointer to 32-byte aligned buffer. Change-Id: Ied0613f5867b6e3a4b2d05100650e09388bdeba5 Signed-off-by: liubolun Reviewed-on: http://android.intel.com:8080/112508 Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index a3f5109..b88be1b 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -175,12 +175,23 @@ Encode_Status VideoEncoderBase::start() { stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; }else{ - stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. - height_aligned = ((mComParams.resolution.height + 31) / 32 ) * 32; + if(mComParams.profile == VAProfileVP8Version0_3) + { + stride_aligned = ((mComParams.resolution.width + 64 + 63) / 64 ) * 64; //for vsp stride + height_aligned = ((mComParams.resolution.height + 64 + 63) / 64 ) * 64; + } + else + { + stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. + height_aligned = ((mComParams.resolution.height + 31) / 32 ) * 32; + } } + if(mComParams.profile == VAProfileVP8Version0_3) + attribute_tpi.size = stride_aligned * height_aligned + stride_aligned * ((((mComParams.resolution.height + 1) / 2 + 32)+63)/64) *64;// FW need w*h + w*chrom_height + else + attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; - attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; attribute_tpi.luma_stride = stride_aligned; attribute_tpi.chroma_u_stride = stride_aligned; attribute_tpi.chroma_v_stride = stride_aligned; -- cgit v1.2.3 From a5de4067574816567cf7101df472e22da9b41d3d Mon Sep 17 00:00:00 2001 From: liubolun Date: Wed, 19 Jun 2013 10:46:59 +0800 Subject: Refine VP8 encode middleware. BZ: 114083 VSP FW require source surface width align with 32. Otherwise, it may cause FW time out. Change-Id: If62aeb335bc1b116c9702dcccb6d81876d0d7014 Signed-off-by: liubolun Reviewed-on: http://android.intel.com:8080/114205 Reviewed-by: cactus Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderBase.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index b88be1b..935db86 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1799,7 +1799,11 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { VASurfaceID surfaceId; VASurfaceAttributeTPI attribute_tpi; - uint32_t stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; + unsigned int stride_aligned; + if(mComParams.profile == VAProfileVP8Version0_3) + stride_aligned = ((mComParams.resolution.width + 31) / 32 ) * 32; + else + stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; attribute_tpi.size = stride_aligned * mComParams.resolution.height * 3 / 2; attribute_tpi.luma_stride = stride_aligned; -- cgit v1.2.3 From 27e15fb4f917ef5ebe20c5b6b31e6edce01d530a Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Mon, 17 Jun 2013 16:23:58 +0800 Subject: HW JPEG: enable HW CSC for BYT BZ: 116496 Implemented HW CSC after HW decode for BYT Change-Id: I58da183fee2e4316f526ffdf73b5db3863d169e0 Signed-off-by: Cheng Yao Reviewed-on: http://android.intel.com:8080/115240 Reviewed-by: Shi, PingX Reviewed-by: cactus Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- imagedecoder/Android.mk | 3 + imagedecoder/JPEGDecoder.c | 734 +++++++++++++++++++++++++++++++++------------ imagedecoder/JPEGDecoder.h | 14 +- 3 files changed, 553 insertions(+), 198 deletions(-) diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index 9bf4336..ccc6f0f 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -10,6 +10,7 @@ LOCAL_SRC_FILES += \ LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ + $(TOP)/external/jpeg \ $(TARGET_OUT_HEADERS)/libva LOCAL_COPY_HEADERS_TO := libjpeg_hw @@ -24,12 +25,14 @@ LOCAL_SHARED_LIBRARIES += \ libva-android \ libva \ libva-tpi \ + libhardware LOCAL_LDLIBS += -lpthread LOCAL_CFLAGS += -Wno-multichar LOCAL_CFLAGS += -DUSE_INTEL_JPEGDEC ifeq ($(JPEGDEC_USES_GEN),true) +LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS) LOCAL_CFLAGS += -DJPEGDEC_USES_GEN endif diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c index 10d4d1c..6a1337b 100644 --- a/imagedecoder/JPEGDecoder.c +++ b/imagedecoder/JPEGDecoder.c @@ -23,14 +23,23 @@ * * Authors: * Nana Guo +* Yao Cheng * */ #include "va/va_tpi.h" +#include "va/va_vpp.h" +#include "va/va_drmcommon.h" #include "JPEGDecoder.h" #include "ImageDecoderTrace.h" #include "JPEGParser.h" #include +#include "jerror.h" +#include +#ifdef JPEGDEC_USES_GEN +#include "ufo/graphics.h" +#define INTEL_VPG_GRALLOC_MODULE_PERFORM_GET_BO_NAME 4 +#endif #define JPEG_MAX_SETS_HUFFMAN_TABLES 2 @@ -38,99 +47,140 @@ #define TABLE_CLASS_AC 1 #define TABLE_CLASS_NUM 2 -static int appendFile(unsigned char* _fileName, void* _buf, int _bufLen) +// for config +#define HW_DECODE_MIN_WIDTH 100 // for JPEG smaller than this, use SW decode +#define HW_DECODE_MIN_HEIGHT 100 // for JPEG smaller than this, use SW decode + +// for debug +#define DECODE_DUMP_FILE "" // no dump by default +#define YUY2_DUMP_FILE "" // no dump by default +#define RGBA_DUMP_FILE "" // no dump by default + +#define JD_CHECK(err, label) \ + if (err) { \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +#define JD_CHECK_RET(err, label, retcode) \ + if (err) { \ + status = retcode; \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +const char * fourcc2str(uint32_t fourcc) { - static int firstOpen = 1; - FILE * fp = NULL; - if( NULL == _buf || _bufLen <= 0 ) return (-1); - - if(firstOpen) - { - fp = fopen(_fileName, "wb"); - firstOpen = 0; - } - else - { - fp = fopen(_fileName, "ab"); - } - - if( NULL == fp ) - { - return (-1); - } - - fwrite(_buf, _bufLen, 1, fp); - - fclose(fp); - fp = NULL; - - return 0; + static char str[5]; + memset(str, 0, sizeof str); + str[0] = fourcc & 0xff; + str[1] = (fourcc >> 8 )& 0xff; + str[2] = (fourcc >> 16) & 0xff; + str[3] = (fourcc >> 24)& 0xff; + str[4] = '\0'; + return str; } -#define DUMPYUVFILE "/data/mcgdump.yuv" -int dump_yuv_image(VAImage va_image, unsigned char *pImage_Src, - float CbCr_h_sampling_factor, float CbCr_v_sampling_factor, int actW, int actH) + +// VPG supports only YUY2->RGBA, YUY2->NV12_TILED now +// needs to convert IMC3/YV16/444P to YUY2 before HW CSC +static void write_to_YUY2(uint8_t *pDst, + uint32_t dst_stride, + VAImage *pImg, + uint8_t *pSrc) { - int num_bytes, nWidth, nHeight, nAWidth, nAHeight; - unsigned char *pSrc_Y, *pSrc_UV, *pDst, *pDst_Y, *pDst_U, *pDst_V, *pSrcTmp, *pSrc_U, *pSrc_V; - int i, j; - - ITRACE("Image width = %d, Height = %d\n", va_image.width, va_image.height); - - pSrc_Y = pImage_Src; - pSrc_U = pSrc_Y + va_image.offsets[1]; - pSrc_V = pSrc_U + va_image.offsets[2]; - ITRACE("offset = %p, %p, %p\n", pSrc_Y, pSrc_U, pSrc_V); - ITRACE("offset = %d, %d, %d\n", va_image.offsets[0], va_image.offsets[1], va_image.offsets[2]); - ITRACE("pitch = %d, %d, %d\n", va_image.pitches[0], va_image.pitches[1], va_image.pitches[2]); - -// Y - nWidth = va_image.pitches[0]; - nHeight = va_image.height; - num_bytes = nWidth * nHeight; - if (NULL == (pDst_Y = (unsigned char*) malloc(num_bytes))) { - return 0; - } - for (i = 0; i < nHeight; i++) - { - memcpy(pDst_Y + i * nWidth, pSrc_Y + i * va_image.pitches[0], nWidth); - } - ITRACE(" Y (WxH) %d x %d, bytes = %d\n", nWidth, nHeight, num_bytes); - appendFile(DUMPYUVFILE, pDst_Y, nWidth*nHeight); - -//U - nWidth = va_image.pitches[0] * CbCr_h_sampling_factor; - nHeight = va_image.height * CbCr_v_sampling_factor; - num_bytes = nWidth * nHeight; - if (NULL == (pDst_U = (unsigned char*) malloc(num_bytes))) { - return 0; - } - for (i = 0; i < nHeight; i++) - { - memcpy(pDst_U + i * nWidth, pSrc_U + i * va_image.pitches[1], nWidth); - } - ITRACE(" U (WxH) %d x %d, bytes = %d\n", nWidth, nHeight, num_bytes); - appendFile(DUMPYUVFILE, pDst_U, nWidth*nHeight); - - pSrc_V = pSrc_U + nHeight * va_image.pitches[1]; - -//V - nWidth = va_image.pitches[0] * CbCr_h_sampling_factor; - nHeight = va_image.height * CbCr_v_sampling_factor; - num_bytes = nWidth * nHeight; - if (NULL == (pDst_V = (unsigned char*) malloc(num_bytes))) { - return 0; - } - for (i = 0; i < nHeight; i++) - { - memcpy(pDst_V + i * nWidth, pSrc_V + i * va_image.pitches[2], nWidth); - } - ITRACE(" V (WxH) %d x %d, bytes = %d\n", nWidth, nHeight, num_bytes); - appendFile(DUMPYUVFILE, pDst_V, nWidth*nHeight); - - if(pDst != NULL) - free(pDst); - - return 0; + uint8_t *pY, *pU, *pV; + float h_samp_factor, v_samp_factor; + int row, col; + switch (pImg->format.fourcc) { + case VA_FOURCC_IMC3: + h_samp_factor = 0.5; + v_samp_factor = 0.5; + break; + case VA_FOURCC_422H: + h_samp_factor = 0.5; + v_samp_factor = 1; + break; + case VA_FOURCC_444P: + h_samp_factor = 1; + v_samp_factor = 1; + break; + default: + // non-supported + ETRACE("%s to YUY2: Not-supported input YUV format", fourcc2str(pImg->format.fourcc)); + return; + } + pY = pSrc + pImg->offsets[0]; + pU = pSrc + pImg->offsets[1]; + pV = pSrc + pImg->offsets[2]; + for (row = 0; row < pImg->height; ++row) { + for (col = 0; col < pImg->width; ++col) { + // Y + *(pDst + 2 * col) = *(pY + col); + uint32_t actual_col = h_samp_factor * col; + if (col % 2 == 1) { + // U + *(pDst + 2 * col + 1) = *(pU + actual_col); + } + else { + // V + *(pDst + 2 * col + 1) = *(pV + actual_col); + } + } + pDst += dst_stride * 2; + pY += pImg->pitches[0]; + uint32_t actual_row = row * v_samp_factor; + pU = pSrc + pImg->offsets[1] + actual_row * pImg->pitches[1]; + pV = pSrc + pImg->offsets[2] + actual_row * pImg->pitches[2]; + } +} + +static void write_to_file(char *file, VAImage *pImg, uint8_t *pSrc) +{ + FILE *fp = fopen(file, "wb"); + if (!fp) { + return; + } + uint8_t *pY, *pU, *pV; + float h_samp_factor, v_samp_factor; + int row, col; + ITRACE("Dumping decoded YUV to %s", file); + switch (pImg->format.fourcc) { + case VA_FOURCC_IMC3: + h_samp_factor = 0.5; + v_samp_factor = 0.5; + break; + case VA_FOURCC_422H: + h_samp_factor = 0.5; + v_samp_factor = 1; + break; + case VA_FOURCC_444P: + h_samp_factor = 1; + v_samp_factor = 1; + break; + default: + // non-supported + ETRACE("%s to YUY2: Not-supported input YUV format", fourcc2str(pImg->format.fourcc)); + return; + } + pY = pSrc + pImg->offsets[0]; + pU = pSrc + pImg->offsets[1]; + pV = pSrc + pImg->offsets[2]; + // Y + for (row = 0; row < pImg->height; ++row) { + fwrite(pY, 1, pImg->width, fp); + pY += pImg->pitches[0]; + } + // U + for (row = 0; row < pImg->height * v_samp_factor; ++row) { + fwrite(pU, 1, pImg->width * h_samp_factor, fp); + pU += pImg->pitches[1]; + } + // V + for (row = 0; row < pImg->height * v_samp_factor; ++row) { + fwrite(pV, 1, pImg->width * h_samp_factor, fp); + pV += pImg->pitches[2]; + } + fclose(fp); } /* @@ -237,10 +287,333 @@ void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) { } jd_libva_ptr->initialized = FALSE; + ITRACE("jdva_deinitialize finished"); return; } -unsigned int jdva_get_surface_format(jd_libva_struct * jd_libva_ptr, VASurfaceAttrib * fourcc) { +static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceID surface, char ** buf, uint32_t rows) +{ +#if 0 // dump RGB to file + uint8_t* rgb_buf; + int32_t data_len = 0; + uint32_t surface_width, surface_height; + surface_width = (( ( jd_libva_ptr->image_width + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); + surface_height = (( ( jd_libva_ptr->image_height + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); + + rgb_buf = (uint8_t*) malloc((surface_width * surface_height) << 2); + if(rgb_buf == NULL){ + return DECODE_MEMORY_FAIL; + } + va_status = vaPutSurfaceBuf(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0], rgb_buf, &data_len, 0, 0, surface_width, surface_height, 0, 0, surface_width, surface_height, NULL, 0, 0); + + buf = rgb_buf; + // dump RGB data + { + FILE *pf_tmp = fopen("img_out.rgb", "wb"); + if(pf_tmp == NULL) + ETRACE("Open file error"); + fwrite(rgb_buf, 1, surface_width * surface_height * 4, pf_tmp); + fclose(pf_tmp); + } +#endif + +#ifdef JPEGDEC_USES_GEN + VAImage decoded_img; + uint8_t *decoded_buf = NULL; + int row, col; + VAStatus vpp_status; + uint8_t *pSrc, *pDst; + VADisplay display = NULL; + VAContextID context = VA_INVALID_ID; + VASurfaceID dst_surface = VA_INVALID_ID, src_surface = VA_INVALID_ID; + VAConfigID config = VA_INVALID_ID; + VAConfigAttrib vpp_attrib; + VASurfaceAttrib dst_surf_attrib, src_surf_attrib; + buffer_handle_t dst_handle, src_handle; + int32_t dst_stride, src_stride; + uint32_t dst_buf_name, src_buf_name; + VAProcPipelineParameterBuffer vpp_param; + VABufferID vpp_pipeline_buf = VA_INVALID_ID; + int major_version, minor_version; + uint8_t *src_gralloc_buf, *dst_gralloc_buf; + hw_module_t const* module = NULL; + alloc_device_t *alloc_dev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + VAProcPipelineCaps vpp_pipeline_cap ; + VARectangle src_rect, dst_rect; + int err; + Display vppdpy; + FILE *fp; + Decode_Status status = DECODE_SUCCESS; + VASurfaceAttribExternalBuffers vaSurfaceExternBufIn, vaSurfaceExternBufOut; + + decoded_img.image_id = VA_INVALID_ID; + vpp_status = vaDeriveImage(jd_libva_ptr->va_display, surface, &decoded_img); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vpp_status = vaMapBuffer(jd_libva_ptr->va_display, decoded_img.buf, (void **)&decoded_buf); + if (vpp_status) { + vaDestroyImage(jd_libva_ptr->va_display, decoded_img.image_id); + } + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + write_to_file(DECODE_DUMP_FILE, &decoded_img, decoded_buf); + + ITRACE("Start HW CSC: color %s=>RGBA8888", fourcc2str(jd_libva_ptr->fourcc)); + err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &alloc_dev); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + err = alloc_dev->alloc(alloc_dev, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + HAL_PIXEL_FORMAT_YCbCr_422_I, // YUY2 + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_WRITE_MASK | GRALLOC_USAGE_SW_READ_MASK, + (buffer_handle_t *)&src_handle, + &src_stride); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + ITRACE("src_gralloc_buf: handle=%u, stride=%u", src_handle, src_stride); + err = alloc_dev->alloc(alloc_dev, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + HAL_PIXEL_FORMAT_RGBA_8888, + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_WRITE_MASK | GRALLOC_USAGE_SW_READ_MASK, + (buffer_handle_t *)&dst_handle, + &dst_stride); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + ITRACE("dst_gralloc_buf: handle=%u, stride=%u", dst_handle, dst_stride); + + err = gralloc_module->perform(gralloc_module, INTEL_VPG_GRALLOC_MODULE_PERFORM_GET_BO_NAME, src_handle, &src_buf_name); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + err = gralloc_module->perform(gralloc_module, INTEL_VPG_GRALLOC_MODULE_PERFORM_GET_BO_NAME, dst_handle, &dst_buf_name); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + // copy decoded buf into the gralloc buf in YUY2 format + err = gralloc_module->lock(gralloc_module, src_handle, + GRALLOC_USAGE_SW_WRITE_MASK, + 0, 0, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + &src_gralloc_buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + ITRACE("Convert %s buf into YUY2:", fourcc2str(jd_libva_ptr->fourcc)); + + write_to_YUY2(src_gralloc_buf, + src_stride, + &decoded_img, + decoded_buf); + err = gralloc_module->unlock(gralloc_module, src_handle); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + fp = fopen(YUY2_DUMP_FILE, "wb"); + if (fp) { + ITRACE("DUMP YUY2 to " YUY2_DUMP_FILE); + err = gralloc_module->lock(gralloc_module, src_handle, + GRALLOC_USAGE_SW_READ_MASK, + 0, 0, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + &src_gralloc_buf); + unsigned char *pYUV = src_gralloc_buf; + int loop; + for(loop=0;loopimage_height;loop++) + { + fwrite(pYUV, 2, jd_libva_ptr->image_width, fp); + pYUV += 2 * src_stride; + } + gralloc_module->unlock(gralloc_module, src_handle); + fclose(fp); + } + + vaUnmapBuffer(jd_libva_ptr->va_display, decoded_img.buf); + vaDestroyImage(jd_libva_ptr->va_display, decoded_img.image_id); + decoded_buf= NULL; + + display = vaGetDisplay (&vppdpy); + vpp_status = vaInitialize(display, &major_version, &minor_version); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vpp_attrib.type = VAConfigAttribRTFormat; + vpp_attrib.value = VA_RT_FORMAT_YUV420; + vpp_status = vaCreateConfig(display, + VAProfileNone, + VAEntrypointVideoProc, + &vpp_attrib, + 1, + &config); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vpp_status = vaCreateContext(display, + config, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + 0, + NULL, + 0, + &context); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vaSurfaceExternBufIn.pixel_format = VA_FOURCC_YUY2; + vaSurfaceExternBufIn.width = jd_libva_ptr->image_width; + vaSurfaceExternBufIn.height = jd_libva_ptr->image_height; + vaSurfaceExternBufIn.pitches[0] = src_stride * 2; // YUY2 is 16bit + vaSurfaceExternBufIn.buffers = &src_buf_name; + vaSurfaceExternBufIn.num_buffers = 1; + vaSurfaceExternBufIn.flags = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; + src_surf_attrib.type = VASurfaceAttribExternalBufferDescriptor; + src_surf_attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; + src_surf_attrib.value.type = VAGenericValueTypePointer; + src_surf_attrib.value.value.p = (void *)&vaSurfaceExternBufIn; + vpp_status = vaCreateSurfaces(display, + VA_RT_FORMAT_YUV422, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + &src_surface, + 1, + &src_surf_attrib, + 1); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vaSurfaceExternBufOut.pixel_format = VA_FOURCC_ARGB; + vaSurfaceExternBufOut.width = jd_libva_ptr->image_width; + vaSurfaceExternBufOut.height = jd_libva_ptr->image_height; + vaSurfaceExternBufOut.pitches[0] = dst_stride * 4; // RGBA is 32bit + vaSurfaceExternBufOut.buffers = &dst_buf_name; + vaSurfaceExternBufOut.num_buffers = 1; + vaSurfaceExternBufOut.flags = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; + dst_surf_attrib.type = VASurfaceAttribExternalBufferDescriptor; + dst_surf_attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; + dst_surf_attrib.value.type = VAGenericValueTypePointer; + dst_surf_attrib.value.value.p = (void *)&vaSurfaceExternBufOut; + vpp_status = vaCreateSurfaces(display, + VA_RT_FORMAT_RGB32, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + &dst_surface, + 1, + &dst_surf_attrib, + 1); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + ITRACE("vaCreateSurfaces got surface %u=>%u", src_surface, dst_surface); + //query caps for pipeline + vpp_status = vaQueryVideoProcPipelineCaps(display, + context, + NULL, + 0, + &vpp_pipeline_cap); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + src_rect.x = dst_rect.x = 0; + src_rect.y = dst_rect.y = 0; + src_rect.width = dst_rect.width = jd_libva_ptr->image_width; + src_rect.height = dst_rect.height = jd_libva_ptr->image_height; + ITRACE("from (%d, %d, %u, %u) to (%d, %d, %u, %u)", + src_rect.x, src_rect.y, src_rect.width, src_rect.height, + dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height); + vpp_param.surface = src_surface; + vpp_param.output_region = &dst_rect; + vpp_param.surface_region = &src_rect; + vpp_param.surface_color_standard = VAProcColorStandardBT601; //csc + vpp_param.output_background_color = 0x8000; //colorfill + vpp_param.output_color_standard = VAProcColorStandardNone; + vpp_param.filter_flags = VA_FRAME_PICTURE; + vpp_param.filters = NULL; + //vpp_param.pipeline_flags = 1084929476; + vpp_param.num_filters = 0; + vpp_param.forward_references = 0; + vpp_param.num_forward_references = 0; + vpp_param.backward_references = 0; + vpp_param.num_backward_references = 0; + vpp_param.blend_state = NULL; + vpp_param.rotation_state = VA_ROTATION_NONE; + vpp_status = vaCreateBuffer(display, + context, + VAProcPipelineParameterBufferType, + sizeof(VAProcPipelineParameterBuffer), + 1, + &vpp_param, + &vpp_pipeline_buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vpp_status = vaBeginPicture(display, + context, + dst_surface); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + //Render the picture + vpp_status = vaRenderPicture(display, + context, + &vpp_pipeline_buf, + 1); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vpp_status = vaEndPicture(display, context); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + vpp_status = vaSyncSurface(display, dst_surface); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + ITRACE("Finished HW CSC YUY2=>RGBA8888"); + + // gralloc lock + copy + err = gralloc_module->lock(gralloc_module, dst_handle, + GRALLOC_USAGE_SW_READ_MASK, + 0, 0, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + &dst_gralloc_buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + ITRACE("Copy RGBA8888 buffer (%ux%u) to skia buffer (%ux%u)", + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + buf[1] - buf[0], + rows); + pSrc = dst_gralloc_buf; + fp = fopen(RGBA_DUMP_FILE, "wb"); + if (fp) + ITRACE("dumping RGBA8888 to " RGBA_DUMP_FILE); + // FIXME: is it RGBA? or BGRA? or ARGB? + for (row = 0; row < rows; ++ row) { + memcpy(buf[row], pSrc, 4 * jd_libva_ptr->image_width); + if (fp) + fwrite(pSrc, 4, jd_libva_ptr->image_width, fp); + pSrc += dst_stride * 4; + } + if (fp) + fclose(fp); + gralloc_module->unlock(gralloc_module, dst_handle); + +cleanup: + if (vpp_pipeline_buf != VA_INVALID_ID) + vaDestroyBuffer(display, vpp_pipeline_buf); + if (dst_surface != VA_INVALID_ID) + vaDestroySurfaces(display, &dst_surface, 1); + if (src_surface != VA_INVALID_ID) + vaDestroySurfaces(display, &src_surface, 1); + if (context != VA_INVALID_ID) + vaDestroyContext(display, context); + if (config != VA_INVALID_ID) + vaDestroyConfig(display, config); + if (display) + vaTerminate(display); + if (alloc_dev) { + alloc_dev->free(alloc_dev, dst_handle); + alloc_dev->free(alloc_dev, src_handle); + gralloc_close(alloc_dev); + } + return status; +#else + // TODO: CSC with Gralloc On PVR platform +#endif + + + +} + +static unsigned int getSurfaceFormat(jd_libva_struct * jd_libva_ptr, VASurfaceAttrib * fourcc) { int h1, h2, h3, v1, v2, v3; h1 = jd_libva_ptr->picture_param_buf.components[0].h_sampling_factor; h2 = jd_libva_ptr->picture_param_buf.components[1].h_sampling_factor; @@ -271,6 +644,7 @@ unsigned int jdva_get_surface_format(jd_libva_struct * jd_libva_ptr, VASurfaceAt else if (h1 == 4 && h2 == 1 && h3 == 1 && v1 == 1 && v2 == 1 && v3 == 1) { fourcc->value.value.i = VA_FOURCC_411P; + ITRACE("SurfaceFormat: 411P"); return VA_RT_FORMAT_YUV411; } else if (h1 == 1 && h2 == 1 && h3 == 1 && @@ -308,7 +682,8 @@ Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { } VASurfaceAttrib fourcc; - unsigned int surface_format = jdva_get_surface_format(jd_libva_ptr, &fourcc); + unsigned int surface_format = getSurfaceFormat(jd_libva_ptr, &fourcc); + jd_libva_ptr->fourcc = fourcc.value.value.i; #ifdef JPEGDEC_USES_GEN va_status = vaCreateSurfaces(jd_libva_ptr->va_display, surface_format, jd_libva_ptr->image_width, @@ -322,27 +697,22 @@ Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count, NULL, 0); #endif - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateSurfaces failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } + JD_CHECK(va_status, cleanup); va_status = vaCreateContext(jd_libva_ptr->va_display, jd_libva_ptr->va_config, jd_libva_ptr->image_width, jd_libva_ptr->image_height, 0, //VA_PROGRESSIVE jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count, &(jd_libva_ptr->va_context)); - if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateContext failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - + return DECODE_DRIVER_FAIL; } + jd_libva_ptr->resource_allocated = TRUE; return status; cleanup: + jd_libva_ptr->resource_allocated = FALSE; if (jd_libva_ptr->va_surfaces) { free (jd_libva_ptr->va_surfaces); @@ -350,7 +720,7 @@ cleanup: } jdva_deinitialize (jd_libva_ptr); - return status; + return DECODE_DRIVER_FAIL; } Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) { @@ -369,12 +739,14 @@ Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) { * It is safe to destroy Surface/Config/Context severl times * and it is also safe even their value is NULL */ - va_status = vaDestroySurfaces(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count); + + va_status = vaDestroyContext(jd_libva_ptr->va_display, jd_libva_ptr->va_context); if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaDestroySurfaces failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; + ETRACE("vaDestroyContext failed. va_status = 0x%x", va_status); + return DECODE_DRIVER_FAIL; } + jd_libva_ptr->va_context = NULL; if (jd_libva_ptr->va_surfaces) { free (jd_libva_ptr->va_surfaces); @@ -387,32 +759,33 @@ Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) { return DECODE_DRIVER_FAIL; } +cleanup: jd_libva_ptr->va_config = NULL; - va_status = vaDestroyContext(jd_libva_ptr->va_display, jd_libva_ptr->va_context); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaDestroyContext failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; - } - - jd_libva_ptr->va_context = NULL; - jd_libva_ptr->resource_allocated = FALSE; return va_status; } -Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { +Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) { Decode_Status status = DECODE_SUCCESS; VAStatus va_status = VA_STATUS_SUCCESS; VABufferID desc_buf[5]; uint32_t bitstream_buffer_size = 0; uint32_t scan_idx = 0; uint32_t buf_idx = 0; + char **buf = jd_libva_ptr->output_image; + uint32_t lines = jd_libva_ptr->output_lines; uint32_t chopping = VA_SLICE_DATA_FLAG_ALL; - uint32_t bytes_remaining = jd_libva_ptr->eoi_offset - jd_libva_ptr->soi_offset; + uint32_t bytes_remaining; + + if (jd_libva_ptr->eoi_offset) + bytes_remaining = jd_libva_ptr->eoi_offset - jd_libva_ptr->soi_offset; + else + bytes_remaining = jd_libva_ptr->file_size - jd_libva_ptr->soi_offset; uint32_t src_offset = jd_libva_ptr->soi_offset; - bitstream_buffer_size = 1024*1024*5; + uint32_t cpy_row; + bitstream_buffer_size = cinfo->src->bytes_in_buffer;//1024*1024*5; va_status = vaBeginPicture(jd_libva_ptr->va_display, jd_libva_ptr->va_context, jd_libva_ptr->va_surfaces[0]); if (va_status != VA_STATUS_SUCCESS) { @@ -422,7 +795,7 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { } va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferJPEGBaseline), 1, &jd_libva_ptr->picture_param_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + ETRACE("vaCreateBuffer VAPictureParameterBufferType failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; return status; } @@ -430,14 +803,14 @@ Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf) { va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferJPEGBaseline), 1, &jd_libva_ptr->qmatrix_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + ETRACE("vaCreateBuffer VAIQMatrixBufferType failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; return status; } buf_idx++; va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAHuffmanTableBufferType, sizeof(VAHuffmanTableBufferJPEGBaseline), 1, &jd_libva_ptr->hufman_table_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + ETRACE("vaCreateBuffer VAHuffmanTableBufferType failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; return status; } @@ -494,7 +867,7 @@ jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; /* Get Slice Control Buffer */ va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceParameterBufferType, sizeof(VASliceParameterBufferJPEGBaseline) * dest_idx, 1, dest_scan_ctrl, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); + ETRACE("vaCreateBuffer VASliceParameterBufferType failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; return status; } @@ -502,7 +875,6 @@ jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceDataBufferType, bytes, 1, &jd_libva_ptr->bitstream_buf[ src_offset ], &desc_buf[buf_idx]); buf_idx++; if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer failed. va_status = 0x%x", va_status); status = DECODE_DRIVER_FAIL; return status; } @@ -528,81 +900,24 @@ jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; if (va_status != VA_STATUS_SUCCESS) { WTRACE("vaSyncSurface failed. va_status = 0x%x", va_status); } -#if 0 - uint8_t* rgb_buf; - int32_t data_len = 0; - uint32_t surface_width, surface_height; - surface_width = (( ( jd_libva_ptr->image_width + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); - surface_height = (( ( jd_libva_ptr->image_height + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); - - rgb_buf = (uint8_t*) malloc((surface_width * surface_height) << 2); - if(rgb_buf == NULL){ - return DECODE_MEMORY_FAIL; - } - va_status = vaPutSurfaceBuf(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0], rgb_buf, &data_len, 0, 0, surface_width, surface_height, 0, 0, surface_width, surface_height, NULL, 0, 0); - - buf = rgb_buf; -// dump RGB data - { - FILE *pf_tmp = fopen("img_out.rgb", "wb"); - if(pf_tmp == NULL) - ETRACE("Open file error"); - fwrite(rgb_buf, 1, surface_width * surface_height * 4, pf_tmp); - fclose(pf_tmp); - } -#endif -#if 0 - va_status = vaDeriveImage(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0], &(jd_libva_ptr->surface_image)); - if (va_status != VA_STATUS_SUCCESS) { - ERREXIT1 (cinfo, JERR_VA_DRIVEIMAGE, va_status); - } - - va_status = vaMapBuffer(jd_libva_ptr->va_display, jd_libva_ptr->surface_image.buf, (void **)& (jd_libva_ptr->image_buf)); - if (va_status != VA_STATUS_SUCCESS) { - ERREXIT1 (cinfo, JERR_VA_MAPBUFFER, va_status); - } - - char fcc[5]; - fcc[0] = jd_libva_ptr->surface_image.format.fourcc & 0xff; - fcc[1] = (jd_libva_ptr->surface_image.format.fourcc >> 8 )& 0xff; - fcc[2] = (jd_libva_ptr->surface_image.format.fourcc >> 16) & 0xff; - fcc[3] = (jd_libva_ptr->surface_image.format.fourcc >> 24)& 0xff; - fcc[4] = '\0'; - ITRACE("Derived image:"); - ITRACE("\t%u bytes", jd_libva_ptr->surface_image.data_size); - ITRACE("\tfourcc='%s'", fcc); - ITRACE("\tpitches=[%u %u %u]", jd_libva_ptr->surface_image.pitches[0], jd_libva_ptr->surface_image.pitches[1], jd_libva_ptr->surface_image.pitches[2]); - ITRACE("\toffsets=[%u %u %u]", jd_libva_ptr->surface_image.offsets[0], jd_libva_ptr->surface_image.offsets[1], jd_libva_ptr->surface_image.offsets[2]); - -#ifdef JPEGDEC_USES_GEN - float CbCr_h = ((float)jd_libva_ptr->picture_param_buf.components[1].h_sampling_factor) / jd_libva_ptr->picture_param_buf.components[0].h_sampling_factor; - float CbCr_v = ((float)jd_libva_ptr->picture_param_buf.components[1].v_sampling_factor) / jd_libva_ptr->picture_param_buf.components[0].v_sampling_factor; - dump_yuv_image(jd_libva_ptr->surface_image, jd_libva_ptr->image_buf, - CbCr_h, CbCr_v, jd_libva_ptr->image_width, jd_libva_ptr->image_height); -#else - dump_yuv_image(jd_libva_ptr->surface_image, jd_libva_ptr->image_buf, - 1, 1, jd_libva_ptr->image_width, jd_libva_ptr->image_height); -#endif - va_status = vaUnmapBuffer(jd_libva_ptr->va_display, jd_libva_ptr->surface_image.buf); - if (va_status != VA_STATUS_SUCCESS) { - ERREXIT1(cinfo, JERR_VA_MAPBUFFER, va_status); - } - va_status = vaDestroyImage(jd_libva_ptr->va_display, jd_libva_ptr->surface_image.image_id); + status = doColorConversion(jd_libva_ptr, + jd_libva_ptr->va_surfaces[0], + buf, lines); - if (va_status != VA_STATUS_SUCCESS) { - ERREXIT1 (cinfo, JERR_VA_MAPBUFFER, va_status); - } -#endif + ITRACE("Successfully decoded picture"); return status; +cleanup: + return DECODE_DRIVER_FAIL; } -Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { +Decode_Status parseBitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) { uint32_t component_order = 0 ; uint32_t dqt_ind = 0; uint32_t dht_ind = 0; uint32_t scan_ind = 0; boolean frame_marker_found = FALSE; + int i; uint8_t marker = jd_libva_ptr->JPEGParser->getNextMarker(jd_libva_ptr->JPEGParser); @@ -680,6 +995,14 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { jd_libva_ptr->picture_param_buf.num_components = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); if (jd_libva_ptr->picture_param_buf.num_components > JPEG_MAX_COMPONENTS) { + ETRACE("ERROR: reached max components\n"); + return DECODE_PARSER_FAIL; + } + if (jd_libva_ptr->picture_param_buf.picture_height < HW_DECODE_MIN_HEIGHT + || jd_libva_ptr->picture_param_buf.picture_width < HW_DECODE_MIN_WIDTH) { + ITRACE("PERFORMANCE: %ux%u JPEG will decode faster with SW\n", + jd_libva_ptr->picture_param_buf.picture_width, + jd_libva_ptr->picture_param_buf.picture_height); return DECODE_PARSER_FAIL; } uint8_t comp_ind = 0; @@ -729,14 +1052,17 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { } uint32_t curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Ss if (curr_byte != 0) { + ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); return DECODE_PARSER_FAIL; } curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Se if (curr_byte != 0x3f) { + ETRACE("ERROR: curr_byte 0x%08x != 0x3f\n", curr_byte); return DECODE_PARSER_FAIL; } curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Ah, Al if (curr_byte != 0) { + ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); return DECODE_PARSER_FAIL; } // Set slice control variables needed @@ -773,24 +1099,41 @@ Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr) { jd_libva_ptr->huffman_tables_num = dht_ind; /* The slice for the last scan should run up to the end of the picture */ - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->eoi_offset - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); - + if (jd_libva_ptr->eoi_offset) { + jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->eoi_offset - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); + } + else { + jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->file_size - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); + } // throw AppException if SOF0 isn't found if (!frame_marker_found) { ETRACE("EEORR: Reached end of bitstream while trying to parse headers\n"); return DECODE_PARSER_FAIL; } - parseTableData(jd_libva_ptr); - - return DECODE_SUCCESS; + Decode_Status status = parseTableData(cinfo, jd_libva_ptr); + if (status != DECODE_SUCCESS) { + ETRACE("ERROR: Parsing table data returns %d", status); + } + cinfo->original_image_width = jd_libva_ptr->picture_param_buf.picture_width; /* nominal image width (from SOF marker) */ + cinfo->image_width = jd_libva_ptr->picture_param_buf.picture_width; /* nominal image width (from SOF marker) */ + cinfo->image_height = jd_libva_ptr->picture_param_buf.picture_height; /* nominal image height */ + cinfo->num_components = jd_libva_ptr->picture_param_buf.num_components; /* # of color components in JPEG image */ + cinfo->jpeg_color_space = JCS_YCbCr; /* colorspace of JPEG image */ + cinfo->out_color_space = JCS_RGB; /* colorspace for output */ + cinfo->src->bytes_in_buffer = jd_libva_ptr->file_size; + + ITRACE("Successfully parsed table"); + return status; } -Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { +Decode_Status parseTableData(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) { CJPEGParse* parser = (CJPEGParse*)malloc(sizeof(CJPEGParse)); - if (parser == NULL) + if (parser == NULL) { + ETRACE("%s ERROR: Parsing table data returns %d", __FUNCTION__, DECODE_MEMORY_FAIL); return DECODE_MEMORY_FAIL; + } parserInitialize(parser, jd_libva_ptr->bitstream_buf, jd_libva_ptr->file_size); @@ -807,6 +1150,7 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { uint32_t table_length = table_bytes > 64 ? 64 : table_bytes; uint32_t table_precision = table_info >> 4; if (table_precision != 0) { + ETRACE("%s ERROR: Parsing table data returns %d", __FUNCTION__, DECODE_PARSER_FAIL); return DECODE_PARSER_FAIL; } uint32_t table_id = table_info & 0xf; @@ -820,7 +1164,7 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { jd_libva_ptr->qmatrix_buf.quantiser_table[table_id][byte_ind] = parser->readNextByte(parser); } } else { - ETRACE("DQT table ID is not supported"); + ETRACE("%s DQT table ID is not supported", __FUNCTION__); parser->burnBytes(parser, table_length); } table_bytes -= table_length; @@ -882,7 +1226,7 @@ Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr) { }//end of else } else { // Find out the number of entries in the table - ETRACE("DHT table ID is not supported"); + ETRACE("%s DHT table ID is not supported", __FUNCTION__); uint32_t table_entries = 0; uint32_t bit_ind = 0; for(bit_ind = 0; bit_ind < 16; bit_ind++) { diff --git a/imagedecoder/JPEGDecoder.h b/imagedecoder/JPEGDecoder.h index 8e4ee17..a77db7a 100644 --- a/imagedecoder/JPEGDecoder.h +++ b/imagedecoder/JPEGDecoder.h @@ -34,6 +34,10 @@ #include //#include #include "va/va_dec_jpeg.h" +#include +#define HAVE_BOOLEAN +#include "jpeglib.h" +#include #define Display unsigned int #define BOOL int @@ -78,6 +82,9 @@ typedef struct { uint32_t rotation; CJPEGParse* JPEGParser; + char ** output_image; + uint32_t output_lines; + uint32_t fourcc; } jd_libva_struct; typedef enum { @@ -97,9 +104,10 @@ extern jd_libva_struct jd_libva; Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr); void jdva_deinitialize (jd_libva_struct * jd_libva_ptr); -Decode_Status jdva_decode (jd_libva_struct * jd_libva_ptr, uint8_t* buf); +Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr); Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr); -Decode_Status parseBitstream(jd_libva_struct * jd_libva_ptr); -Decode_Status parseTableData(jd_libva_struct * jd_libva_ptr); +Decode_Status parseBitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); +Decode_Status parseTableData(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); + #endif -- cgit v1.2.3 From 1f4776105e4aa48c5a474d37305f64e1deb580ac Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Tue, 2 Jul 2013 11:10:43 +0800 Subject: libmix: change the type of fields in VABoolCoderContextVPX BZ: 120268 Change the type of fields in VABoolCoderContextVPX to "unsigned char". Change-Id: I007352e4226249b982bd079d2a418f939cd3aaba Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/116512 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c index 5ac5402..72dcfa9 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vp8_parser.c @@ -428,7 +428,7 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu /* Bool coder */ pic_parms->bool_coder_ctx.range = pi->bool_coder.range; - pic_parms->bool_coder_ctx.value = pi->bool_coder.value; + pic_parms->bool_coder_ctx.value = (pi->bool_coder.value >> 24) & 0xFF; pic_parms->bool_coder_ctx.count = pi->bool_coder.count; //pic_parms->current_picture = VA_INVALID_SURFACE; -- cgit v1.2.3 From 3511cc1ffb75ef2a5db6c8e6137f83da6a6a5386 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Sun, 28 Apr 2013 11:18:18 +0800 Subject: libmix: remove fps related code as it won't be used anymore BZ: 104348 Revert patch#40781: takes fps info to set render mode Change-Id: I8c93c1f267146d5c34c2e9e0d8601ed63aae8254 Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/106696 Reviewed-by: cactus Reviewed-by: Feng, Wei Reviewed-by: Shi, PingX Tested-by: Ding, Haitao Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderBase.cpp | 11 ----------- videodecoder/VideoDecoderDefs.h | 1 - 2 files changed, 12 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 2379eaa..8f95ac2 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -764,17 +764,6 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); CHECK_VA_STATUS("vaInitialize"); - if (mConfigBuffer.frameRate > 45 && mVideoFormatInfo.height >= 1080) { - // ugly workaround here - // for fps > 45 and height > 1080, we will force to - // use surfaceTexture render mode duo to performance issue - VADisplayAttribute renderMode; - renderMode.type = VADisplayAttribRenderMode; - renderMode.value = VA_RENDER_MODE_EXTERNAL_GPU; - vaStatus = vaSetDisplayAttributes(mVADisplay, &renderMode, 1); - CHECK_VA_STATUS("vaSetDisplayAttributes"); - } - if ((int32_t)profile != VAProfileSoftwareDecoding) { //We are requesting RT attributes attrib.type = VAConfigAttribRTFormat; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 672020c..4f44adf 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -121,7 +121,6 @@ struct VideoConfigBuffer { int32_t size; int32_t width; int32_t height; - int32_t frameRate; int32_t surfaceNumber; VAProfile profile; uint32_t flag; -- cgit v1.2.3 From cc52810d7e48f7fd6a4a1bcb84a9bf52fce117d4 Mon Sep 17 00:00:00 2001 From: jimlai Date: Mon, 17 Jun 2013 16:23:23 +0800 Subject: Fix missing NextMarker in JPEGDecoder BZ: 112609 Change-Id: I3576f8116607b94a2119a865525597aa8b1c261f Signed-off-by: Lai, Jim Reviewed-on: http://android.intel.com:8080/113751 Reviewed-by: cactus Reviewed-by: Guo, Nana N Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- imagedecoder/JPEGParser.c | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/imagedecoder/JPEGParser.c b/imagedecoder/JPEGParser.c index d724008..4ad13b8 100644 --- a/imagedecoder/JPEGParser.c +++ b/imagedecoder/JPEGParser.c @@ -73,8 +73,12 @@ uint8_t getNextMarker(CJPEGParse* parser) { break; } } - - return readNextByte(parser); + /* check the next byte to make sure we don't miss the real marker*/ + uint8_t tempNextByte = readNextByte(parser); + if (tempNextByte == 0xff) + return readNextByte(parser); + else + return tempNextByte; } boolean setByteOffset(CJPEGParse* parser, uint32_t byte_offset) -- cgit v1.2.3 From b210aae3846ca2dcece746b58e8d5b7bb83d265b Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Tue, 9 Jul 2013 15:24:51 +0800 Subject: Extend VideoConfigVP8 Structure to enable VP8 encode BZ: 122118 Set some picture parameter for VP8 encode otherwise FW will hang. Change-Id: Ie2991c2b33a109d35943d1c2192e5fca96108210 Signed-off-by: Liu Bolun Reviewed-on: http://android.intel.com:8080/117864 Reviewed-by: Ding, Haitao Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderDef.h | 6 ++++++ videoencoder/VideoEncoderVP8.cpp | 18 +++++++++++++++--- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 4f36303..08f9e7a 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -612,6 +612,12 @@ struct VideoConfigVP8 : VideoParamConfigSet { uint32_t no_ref_last; uint32_t no_ref_gf; uint32_t no_ref_arf; + uint32_t refresh_last; + uint32_t refresh_golden_frame; + uint32_t refresh_alternate_frame; + uint32_t refresh_entropy_probs; + uint32_t value; + unsigned char sharpness_level; VideoConfigVP8 () { type = VideoConfigTypeVP8; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index d8d05b0..84a0746 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -32,8 +32,14 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoConfigVP8.force_kf = 0; mVideoConfigVP8.no_ref_last = 0; - mVideoConfigVP8.no_ref_gf = 0; - mVideoConfigVP8.no_ref_arf = 0; + mVideoConfigVP8.no_ref_gf = 1; + mVideoConfigVP8.no_ref_arf = 1; + mVideoConfigVP8.refresh_last = 1; + mVideoConfigVP8.refresh_golden_frame = 1; + mVideoConfigVP8.refresh_alternate_frame = 1; + mVideoConfigVP8.refresh_entropy_probs = 0; + mVideoConfigVP8.value = 0; + mVideoConfigVP8.sharpness_level = 0; mComParams.profile = VAProfileVP8Version0_3; } @@ -93,7 +99,13 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { vp8PicParam.pic_flags.bits.no_ref_last = mVideoConfigVP8.no_ref_last; vp8PicParam.pic_flags.bits.no_ref_gf = mVideoConfigVP8.no_ref_gf; vp8PicParam.pic_flags.bits.no_ref_arf = mVideoConfigVP8.no_ref_arf; - vp8PicParam.pic_flags.value = 0; + vp8PicParam.pic_flags.bits.refresh_last = mVideoConfigVP8.refresh_last; + vp8PicParam.pic_flags.bits.refresh_golden_frame = mVideoConfigVP8.refresh_golden_frame; + vp8PicParam.pic_flags.bits.refresh_alternate_frame = mVideoConfigVP8.refresh_alternate_frame; + vp8PicParam.pic_flags.bits.refresh_entropy_probs = mVideoConfigVP8.refresh_entropy_probs; + vp8PicParam.pic_flags.bits.num_token_partitions = 2; +// vp8PicParam.pic_flags.value = mVideoConfigVP8.value; + vp8PicParam.sharpness_level = mVideoConfigVP8.sharpness_level; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, -- cgit v1.2.3 From 89cb9693d8aa1dfeae592d08a327aa0996b82674 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Thu, 30 May 2013 17:35:32 +0800 Subject: new middleware level video encoder test tool BZ: 123149 1. Support MIX/OMXCODEC 2. Support MALLOC/VASURFACE/GFX/GRALLOC source 3. Support MP4Writer/RAWWriter 4. Support MetadataMode / RAW Mode 5. Support VASURFACE & OMXCodec cross process 6. Support MPEG4/H263/AVC Change-Id: I84127eb643c69fc673b6d91d718e2a75a5c6bdd4 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/118587 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- test/Android.mk | 39 ++ test/mix_encoder2.cpp | 1492 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1531 insertions(+) create mode 100755 test/mix_encoder2.cpp diff --git a/test/Android.mk b/test/Android.mk index e734b7b..f443a06 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -56,3 +56,42 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := mix_encoder include $(BUILD_EXECUTABLE) +# For mix_encoder2 +# ===================================================== + +include $(CLEAR_VARS) + +#VIDEO_ENC_LOG_ENABLE := true + +LOCAL_SRC_FILES := \ + mix_encoder2.cpp + +LOCAL_C_INCLUDES := \ + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmix_videoencoder \ + $(TARGET_OUT_HEADERS)/pvr \ + $(TOP)/frameworks/base/include/display \ + $(TOP)/frameworks/av/media/libstagefright \ + $(TOP)/frameworks/native/include/media/openmax \ + $(LOCAL_PATH) + +LOCAL_SHARED_LIBRARIES := \ + libintelmetadatabuffer \ + libva_videoencoder \ + libva \ + libva-android \ + libva-tpi \ + libgui \ + libui \ + libutils \ + libcutils \ + libhardware \ + libbinder \ + libstagefright \ + liblog \ + libstagefright_foundation + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := mix_encoder2 + +include $(BUILD_EXECUTABLE) diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp new file mode 100755 index 0000000..2fd2b1c --- /dev/null +++ b/test/mix_encoder2.cpp @@ -0,0 +1,1492 @@ +//framework +#include +#include +#include +#include +#include +#include +#include +#include +//#include + +//libmix +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include "loadsurface.h" + +#include +#include +#include +#include + +using namespace android; + +#define ENABLE_LOG true + +#if ENABLE_LOG +//#define LOG(x, ...) printf("%s:"x, __FUNCTION__, ##__VA_ARGS__) +#define LOG printf +#else +#define LOG(...) +#endif + +enum { + kYUV420SP = 0, + kYUV420P = 1, +}; + +static const int BOX_WIDTH = 64; +static const int PRELOAD_FRAME_NUM = 16; +uint32_t gNumFramesOutput = 0; + +#define CHECK_ENC_STATUS(FUNC)\ + if (ret < ENCODE_SUCCESS) { \ + printf(FUNC" Failed. ret = 0x%08x\n", ret); \ + return UNKNOWN_ERROR; \ + } + +#define CHECK_STATUS(err)\ + if (err != OK) { \ + printf("%s:%d: Failed. ret = 0x%08x\n", __FUNCTION__, __LINE__, err); \ + return err; \ + } + +class DummySource : public MediaSource { + +public: + DummySource(int width, int height, int stride, int nFrames, int fps, + bool metadata, const char* yuv) + : mWidth(width), + mHeight(height), + mStride(stride), + mMaxNumFrames(nFrames), + mFrameRate(fps), + mMetadata(metadata), + mYuvfile(yuv), + mYuvhandle(NULL){ + + if (mMetadata) + mSize = 128; + else + mSize = mStride * mHeight * 3 /2; + + for(int i=0; i getFormat() { + sp meta = new MetaData; + meta->setInt32(kKeyWidth, mWidth); + meta->setInt32(kKeyHeight, mHeight); + meta->setInt32(kKeyStride, mStride); + meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + + return meta; + } + + virtual status_t start(MetaData *params) { +// LOG("begin\n"); + gNumFramesOutput = 0; + createResource (); + + if (mYuvfile == NULL) { + //upload src data + LOG("Fill src picture width=%d, Height=%d\n", mStride, mHeight); + for(int i=0; i 0) { + ret = fread(mUsrptr[gNumFramesOutput % PRELOAD_FRAME_NUM] + mStride*mHeight*3/2 - readsize, 1, readsize, mYuvhandle); + + if (ret <= 0) { + (*buffer)->release(); + if (feof(mYuvhandle)) { + printf("Read from YUV file EOS"); + return ERROR_END_OF_STREAM; + }else + return ferror(mYuvhandle); + } + readsize -= ret; + + // LOG("loading from file, ret=%d, readsize=%d\n", ret, readsize); + } + } + + uint8_t * data; + uint32_t size; + + if (mMetadata) { + mIMB[gNumFramesOutput % PRELOAD_FRAME_NUM]->Serialize(data, size); + //LOG("use metadata mode\n"); + }else { + data = mUsrptr[gNumFramesOutput % PRELOAD_FRAME_NUM]; + size = mSize; + } + + memcpy ((*buffer)->data(), data, size); + (*buffer)->set_range(0, size); + (*buffer)->meta_data()->clear(); + (*buffer)->meta_data()->setInt64( + kKeyTime, (gNumFramesOutput * 1000000) / mFrameRate); + + ++gNumFramesOutput; + if (gNumFramesOutput % 10 ==0) + fprintf(stderr, "."); + return OK; + } + + virtual status_t createResource() { + return OK; + } + +protected: + virtual ~DummySource() { + for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) + delete mIMB[i]; + } + +private: + + int YUV_generator_planar(int width, int height, + unsigned char *Y_start, int Y_pitch, + unsigned char *U_start, int U_pitch, + unsigned char *V_start, int V_pitch, + int UV_interleave) + { + static int row_shift = 0; + int row; + + /* copy Y plane */ + for (row=0;rowSetValueInfo(&vinfo); +// LOG("Malloc address=%x\n", mUsrptr[i]); + } + + return OK; + } + +private: + uint8_t* mMallocPtr[PRELOAD_FRAME_NUM]; + +}; + + +class MemHeapSource : public DummySource { +public: + + MemHeapSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : + DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + } + + ~MemHeapSource() { +// for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) + // delete mMallocPtr[i]; + } + + //malloc external memory, and not need to set into encoder before start() + status_t createResource() + { + uint32_t size = mStride * mHeight * 3 /2; + + ValueInfo vinfo; + vinfo.mode = MEM_MODE_MALLOC; + vinfo.handle = 0; + vinfo.size = size; + vinfo.width = mWidth; + vinfo.height = mHeight; + vinfo.lumaStride = mStride; + vinfo.chromStride = mStride; + vinfo.format = STRING_TO_FOURCC("NV12"); + vinfo.s3dformat = 0xFFFFFFFF; + + mHeap = new MemoryHeapBase(PRELOAD_FRAME_NUM * size); + + for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) + { + mBuffers[i] = new MemoryBase(mHeap, i * size, size); + + mUsrptr[i] = (uint8_t*) mBuffers[i]->pointer(); + + mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t) mUsrptr[i]); + mIMB[i]->SetValueInfo(&vinfo); + LOG("MemHeap address=%x\n", mUsrptr[i]); + } + + return OK; + } + +private: + sp mHeap; + sp mBuffers[PRELOAD_FRAME_NUM]; + +}; + +extern "C" { + VAStatus vaLockSurface(VADisplay dpy, + VASurfaceID surface, + unsigned int *fourcc, + unsigned int *luma_stride, + unsigned int *chroma_u_stride, + unsigned int *chroma_v_stride, + unsigned int *luma_offset, + unsigned int *chroma_u_offset, + unsigned int *chroma_v_offset, + unsigned int *buffer_name, + void **buffer + ); + + VAStatus vaUnlockSurface(VADisplay dpy, + VASurfaceID surface + ); +} + +class VASurfaceSource : public DummySource { +public: + VASurfaceSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : + DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + mMode = 1; + } + + virtual ~VASurfaceSource() { + vaDestroySurfaces(mVADisplay, mSurfaces, PRELOAD_FRAME_NUM); + } + + status_t createResource() + { + unsigned int display = 0; + int majorVersion = -1; + int minorVersion = -1; + VAStatus vaStatus; + + mVADisplay = vaGetDisplay(&display); + + if (mVADisplay == NULL) { + LOG("vaGetDisplay failed."); + } + + vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG( "Failed vaInitialize, vaStatus = %d\n", vaStatus); + } + + VASurfaceAttributeTPI attribute_tpi; + + attribute_tpi.size = mWidth * mHeight * 3 /2; + attribute_tpi.luma_stride = mWidth; + attribute_tpi.chroma_u_stride = mWidth; + attribute_tpi.chroma_v_stride = mWidth; + attribute_tpi.luma_offset = 0; + attribute_tpi.chroma_u_offset = mWidth * mHeight; + attribute_tpi.chroma_v_offset = mWidth * mHeight; + attribute_tpi.pixel_format = VA_FOURCC_NV12; + attribute_tpi.type = VAExternalMemoryNULL; + + vaStatus = vaCreateSurfacesWithAttribute(mVADisplay, mWidth, mHeight, VA_RT_FORMAT_YUV420, + PRELOAD_FRAME_NUM, mSurfaces, &attribute_tpi); + + if (vaStatus != VA_STATUS_SUCCESS) { + LOG( "Failed vaCreateSurfaces, vaStatus = %d\n", vaStatus); + } + + if (mMode == 1){ + uint32_t fourCC = 0; + uint32_t lumaStride = 0; + uint32_t chromaUStride = 0; + uint32_t chromaVStride = 0; + uint32_t lumaOffset = 0; + uint32_t chromaUOffset = 0; + uint32_t chromaVOffset = 0; + + for(int i = 0; i < PRELOAD_FRAME_NUM; i++) { + vaStatus = vaLockSurface( + mVADisplay, (VASurfaceID)mSurfaces[i], + &fourCC, &lumaStride, &chromaUStride, &chromaVStride, + &lumaOffset, &chromaUOffset, &chromaVOffset, &mKBufHandle[i], NULL); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG( "Failed vaLockSurface, vaStatus = %d\n", vaStatus); + } +#if 0 + LOG("lumaStride = %d", lumaStride); + LOG("chromaUStride = %d\n", chromaUStride); + LOG("chromaVStride = %d\n", chromaVStride); + LOG("lumaOffset = %d\n", lumaOffset); + LOG("chromaUOffset = %d\n", chromaUOffset); + LOG("chromaVOffset = %d\n", chromaVOffset); + LOG("kBufHandle = 0x%08x\n", mKBufHandle[i]); + LOG("fourCC = %d\n", fourCC); +#endif + vaStatus = vaUnlockSurface(mVADisplay, (VASurfaceID)mSurfaces[i]); + + } + } + + //get usrptr for uploading src pictures + VAImage surface_image; + ValueInfo vinfo; + memset(&vinfo, 0, sizeof(ValueInfo)); + vinfo.mode = MEM_MODE_SURFACE; + vinfo.handle = (uint32_t) mVADisplay; + vinfo.size = 0; + vinfo.width = mWidth; + vinfo.height = mHeight; + vinfo.lumaStride = mStride; + vinfo.chromStride = mStride; + vinfo.format = STRING_TO_FOURCC("NV12"); + vinfo.s3dformat = 0xFFFFFFFF; + + for (int i = 0; i < PRELOAD_FRAME_NUM; i++) { + vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &surface_image); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG("Failed vaDeriveImage, vaStatus = %d\n", vaStatus); + } + + vaMapBuffer(mVADisplay, surface_image.buf, (void**)&mUsrptr[i]); + if (vaStatus != VA_STATUS_SUCCESS) { + LOG("Failed vaMapBuffer, vaStatus = %d\n", vaStatus); + } + + vaUnmapBuffer(mVADisplay, surface_image.buf); + vaDestroyImage(mVADisplay, surface_image.image_id); + + + + if (mMode == 0) + mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, mSurfaces[i]); + else { + mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, mKBufHandle[i]); + vinfo.mode = MEM_MODE_KBUFHANDLE; + vinfo.handle = 0; + } + + mIMB[i]->SetValueInfo(&vinfo); + } + + return OK; + } + +private: + //for WiDi user mode + VADisplay mVADisplay; + VASurfaceID mSurfaces[PRELOAD_FRAME_NUM]; + + //for WiDi ext mode + uint32_t mKBufHandle[PRELOAD_FRAME_NUM]; + int mMode; +}; + + +class GfxSource : public DummySource { + +public: + GfxSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : + DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + mColor = 0; + mWidth = ((mWidth + 15 ) / 16 ) * 16; + mHeight = ((mHeight + 15 ) / 16 ) * 16; + mStride = mWidth; + } + + virtual ~GfxSource() { + // for(int i=0; i composer(ComposerService::getComposerService()); + mGraphicBufferAlloc = composer->createGraphicBufferAlloc(); + + uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE;// | GraphicBuffer::USAGE_HW_COMPOSER; + int format = HAL_PIXEL_FORMAT_NV12; + if (mColor == 1) + format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h + + int32_t error; + + for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) + { + sp graphicBuffer( + mGraphicBufferAlloc->createGraphicBuffer( + mWidth, mHeight, format, usage, &error)); + + if (graphicBuffer.get() == NULL) { + printf("GFX createGraphicBuffer failed\n"); + return UNKNOWN_ERROR; + } + mGraphicBuffer[i] = graphicBuffer; + graphicBuffer->lock(usage | GraphicBuffer::USAGE_SW_WRITE_OFTEN |GraphicBuffer::USAGE_SW_READ_OFTEN, (void**)(&mUsrptr[i])); + + mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)mGraphicBuffer[i]->handle); + graphicBuffer->unlock(); + + IMG_native_handle_t* h = (IMG_native_handle_t*) mGraphicBuffer[i]->handle; + mStride = h->iWidth; + mHeight = h->iHeight; + +//printf("mStride=%d, height=%d, format=%x", mStride, mHeight, h->iFormat); + } + + return OK; + } + +private: + //for gfxhandle + sp mGraphicBufferAlloc; + sp mGraphicBuffer[PRELOAD_FRAME_NUM]; + + int mColor; +}; + +class GrallocSource : public DummySource { + +public: + GrallocSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : + DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + } + + virtual ~GrallocSource () { + for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) + gfx_free(mHandle[i]); + } + + status_t createResource() + { + int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER; + int format = HAL_PIXEL_FORMAT_NV12; + if (mColor == 1) + format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h + + gfx_init(); + + for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) + { + if (gfx_alloc(mWidth, mHeight, format, usage, &mHandle[i], (int32_t*)&mStride) != 0) + return UNKNOWN_ERROR; + if (gfx_lock(mHandle[i], usage | GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, mWidth, mHeight, (void**)(&mUsrptr[i])) != 0) + return UNKNOWN_ERROR; + mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)mHandle[i]); + gfx_unlock(mHandle[i]); + IMG_native_handle_t* h = (IMG_native_handle_t*) mHandle[i]; + mHeight = h->iHeight; + } + + return OK; + } + +private: + void gfx_init() + { + int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &mModule); + if (err) { + LOG("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + return; + } + + mAllocMod = (gralloc_module_t const *)mModule; + + err = gralloc_open(mModule, &mAllocDev); + if (err) { + LOG("FATAL: gralloc open failed\n"); + } + + } + + int gfx_alloc(uint32_t w, uint32_t h, int format, + int usage, buffer_handle_t* handle, int32_t* stride) + { + int err; + + err = mAllocDev->alloc(mAllocDev, w, h, format, usage, handle, stride); + if (err) { + LOG("alloc(%u, %u, %d, %08x, ...) failed %d (%s)\n", + w, h, format, usage, err, strerror(-err)); + } + + return err; + } + + int gfx_free(buffer_handle_t handle) + { + int err; + + err = mAllocDev->free(mAllocDev, handle); + if (err) { + LOG("free(...) failed %d (%s)\n", err, strerror(-err)); + } + + return err; + } + + int gfx_lock(buffer_handle_t handle, + int usage, int left, int top, int width, int height, + void** vaddr) + { + int err; + + err = mAllocMod->lock(mAllocMod, handle, usage, + left, top, width, height, vaddr); + + if (err){ + LOG("lock(...) failed %d (%s)", err, strerror(-err)); + } + + return err; + } + + + int gfx_unlock(buffer_handle_t handle) + { + int err; + + err = mAllocMod->unlock(mAllocMod, handle); + if (err) { + LOG("unlock(...) failed %d (%s)\n", err, strerror(-err)); + } + + return err; + } + +private: + hw_module_t const *mModule; + gralloc_module_t const *mAllocMod; /* get by force hw_module_t */ + alloc_device_t *mAllocDev; /* get by gralloc_open */ + + buffer_handle_t mHandle[PRELOAD_FRAME_NUM]; + int mColor; +}; + +static const char *AVC_MIME_TYPE = "video/h264"; +static const char *MPEG4_MIME_TYPE = "video/mpeg4"; +static const char *H263_MIME_TYPE = "video/h263"; + +class MixEncoder : public MediaSource { + +public: + MixEncoder(const sp &source, const sp &meta, int rcmode, uint32_t flag) { + mFirstFrame = false; + mSrcEOS = false; + mEncoderFlag = flag; + mEncodeFrameCount = 0; + mSource = source; + + const char *mime; + bool success = meta->findCString(kKeyMIMEType, &mime); + CHECK(success); + + mCodec = mime; + if (strcmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) == 0) { + mMixCodec = (char*) MPEG4_MIME_TYPE; + } else if (strcmp(mime, MEDIA_MIMETYPE_VIDEO_H263) == 0) { + mMixCodec = (char*) H263_MIME_TYPE; + } else { + mMixCodec = (char*) AVC_MIME_TYPE; + } + + success = meta->findInt32(kKeyWidth, &mWidth); + CHECK(success); + + success = meta->findInt32(kKeyHeight, &mHeight); + CHECK(success); + + success = meta->findInt32(kKeyFrameRate, &mFPS); + CHECK(success); + + success = meta->findInt32(kKeyBitRate, &mBitrate); + CHECK(success); + + success = meta->findInt32('itqp', &mInitQP); + CHECK(success); + + success = meta->findInt32('mnqp', &mMinQP); + CHECK(success); + + success = meta->findInt32('iapd', &mIntraPeriod); + CHECK(success); + + success = meta->findInt32('wsiz', &mWinSize); + CHECK(success); + + success = meta->findInt32('idri', &mIdrInt); + CHECK(success); + + success = meta->findInt32('difs', &mDisableFrameSkip); + CHECK(success); + +// const char *RCMODE[] = {"VBR", "CBR", "VCM", "NO_RC", NULL}; + VideoRateControl RC_MODES[] = {RATE_CONTROL_VBR, + RATE_CONTROL_CBR, + RATE_CONTROL_VCM, + RATE_CONTROL_NONE}; + + mRCMode = RC_MODES[rcmode]; + } + + virtual sp getFormat() { + sp meta = new MetaData; + meta->setInt32(kKeyWidth, mWidth); + meta->setInt32(kKeyHeight, mHeight); +// meta->setInt32(kKeyColorFormat, mColorFormat); + meta->setCString(kKeyMIMEType, mCodec); + return meta; + } + + virtual status_t start(MetaData *params) { + Encode_Status ret; + status_t err; + + //create video encoder + mVideoEncoder = createVideoEncoder(mMixCodec); + if (!mVideoEncoder) { + printf("MIX::createVideoEncoder failed\n"); + return UNKNOWN_ERROR; + } + + //set parameter + err = SetVideoEncoderParam(); + CHECK_STATUS(err); + + //start + ret = mVideoEncoder->start(); + CHECK_ENC_STATUS("MIX::Start"); + + uint32_t maxsize; + mVideoEncoder->getMaxOutSize(&maxsize); + mGroup.add_buffer(new MediaBuffer(maxsize)); + mGroup.add_buffer(new MediaBuffer(maxsize)); + mGroup.add_buffer(new MediaBuffer(maxsize)); + + return mSource->start(); + } + + virtual status_t stop() { + Encode_Status ret; + + ret = mVideoEncoder->stop(); + CHECK_ENC_STATUS("MIX::stop"); + + return OK; + } + + status_t encode(MediaBuffer* in) { + status_t err = OK; + Encode_Status ret; + + VideoEncRawBuffer InBuf; + InBuf.data = (uint8_t *) in->data() + in->range_offset(); + InBuf.size = in->range_length(); + InBuf.bufAvailable = true; + InBuf.type = FTYPE_UNKNOWN; + InBuf.flag = 0; + in->meta_data()->findInt64(kKeyTime, &InBuf.timeStamp); + InBuf.priv = (void*)in; + +#if 0 + if (mEncodeFrameCount > 1 && mEncodeFrameCount % 60 == 0){ + VideoParamConfigSet configIDRRequest; + configIDRRequest.type = VideoConfigTypeIDRRequest; + mVideoEncoder->setConfig(&configIDRRequest); + printf("MIX::encode request IDR\n"); + } +#endif + ret = mVideoEncoder->encode(&InBuf); + if (ret < ENCODE_SUCCESS) { + printf("MIX::encode failed, ret=%d\n", ret); + in->release(); + return UNKNOWN_ERROR; + } + + mEncodeFrameCount ++; + return err; + } + + status_t getoutput(MediaBuffer* out, VideoOutputFormat format) { + Encode_Status ret; + + VideoEncOutputBuffer OutBuf; + OutBuf.bufferSize = out->size() ; + OutBuf.dataSize = 0; + OutBuf.data = (uint8_t *) out->data() + out->range_offset(); + OutBuf.format = format; + OutBuf.flag = 0; + OutBuf.timeStamp = 0; + + ret = mVideoEncoder->getOutput(&OutBuf); + if (ret < ENCODE_SUCCESS) { + if ((ret == ENCODE_NO_REQUEST_DATA) && (strcmp(mMixCodec, H263_MIME_TYPE) == 0)) { + printf("H263 FrameSkip happens at Frame #%d\n", mEncodeFrameCount); + OutBuf.dataSize = 0; + } else { + printf("MIX::getOutput failed, ret=%d\n", ret); + out->release(); + return UNKNOWN_ERROR; + } + } + + out->set_range(0, OutBuf.dataSize); + out->meta_data()->clear(); + out->meta_data()->setInt64(kKeyTime, OutBuf.timeStamp); + out->meta_data()->setInt64(kKeyDecodingTime, OutBuf.timeStamp); + + bool isSync = (OutBuf.flag & ENCODE_BUFFERFLAG_SYNCFRAME); + bool isCsd = (OutBuf.flag & ENCODE_BUFFERFLAG_CODECCONFIG); + out->meta_data()->setInt32(kKeyIsSyncFrame, isSync); + out->meta_data()->setInt32(kKeyIsCodecConfig, isCsd); + + //for h263 frame skip case, ENCODE_NO_REQUEST_DATA is returned, but priv is not set. + //to handle properly, need to change libmix to set priv in outbuf even in wrong case + if (format != OUTPUT_CODEC_DATA && (ret != ENCODE_NO_REQUEST_DATA)) { + MediaBuffer* in = (MediaBuffer*) OutBuf.priv; + in->release(); + } + + return OK; + } + + virtual status_t read(MediaBuffer **buffer, const MediaSource::ReadOptions *options) { + + status_t err; + Encode_Status ret; + + if (mSrcEOS) + return ERROR_END_OF_STREAM; + + //write rest data of first frame after outputting csd, only for H264/MPEG4 + if (mFirstFrame) { + err = mGroup.acquire_buffer(buffer); + CHECK_STATUS(err); + + err = getoutput(*buffer, OUTPUT_EVERYTHING); + CHECK_STATUS(err); + + mFirstFrame = false; + return OK; + } + + //input buffers + int loop=1; + if (mEncodeFrameCount == 0) + loop = 2; + + for(int i=0; iread (&src); + if (err == ERROR_END_OF_STREAM) { + LOG ("\nReach Resource EOS, still need to get final frame encoded data\n"); + mSrcEOS = true; + }else { + CHECK_STATUS(err); + + err = encode(src); + CHECK_STATUS(err); + } + } + + //output buffers + err = mGroup.acquire_buffer(buffer); + CHECK_STATUS(err); + + VideoOutputFormat format; + if (mEncodeFrameCount == 2 && (strcasecmp(mMixCodec, H263_MIME_TYPE) != 0)) { + format = OUTPUT_CODEC_DATA; + mFirstFrame = true; + }else + format = OUTPUT_EVERYTHING; + + err = getoutput(*buffer, format); + CHECK_STATUS(err); + + return OK; + } + + virtual ~MixEncoder() { + releaseVideoEncoder(mVideoEncoder); + } + +private: + + MixEncoder(const MixEncoder &); + MixEncoder &operator=(const MixEncoder &); + + status_t SetVideoEncoderParam() { + + Encode_Status ret = ENCODE_SUCCESS; + + ret = mVideoEncoder->getParameters(&mEncoderParams); + CHECK_ENC_STATUS("MIX::getParameters"); + + LOG("Set Encoding Width=%d, Height=%d\n", mWidth, mHeight); + mEncoderParams.resolution.height = mHeight; + mEncoderParams.resolution.width = mWidth; + mEncoderParams.frameRate.frameRateDenom = 1; + mEncoderParams.frameRate.frameRateNum = mFPS; + mEncoderParams.rcMode = mRCMode; + + if (strcmp(mMixCodec, MPEG4_MIME_TYPE) == 0) { + mEncoderParams.profile = (VAProfile)VAProfileMPEG4Simple; + } else if (strcmp(mMixCodec, H263_MIME_TYPE) == 0) { + mEncoderParams.profile = (VAProfile)VAProfileH263Baseline; + } else { + mEncoderParams.profile = (VAProfile)VAProfileH264Baseline; + } + + mEncoderParams.rcParams.bitRate = mBitrate; + mEncoderParams.rcParams.initQP = mInitQP; + mEncoderParams.rcParams.minQP = mMinQP; + mEncoderParams.rcParams.windowSize = mWinSize; + mEncoderParams.intraPeriod = mIntraPeriod; + + ret = mVideoEncoder->setParameters(&mEncoderParams); + CHECK_ENC_STATUS("MIX::setParameters VideoParamsCommon"); + + mStoreMetaDataInBuffers.isEnabled = (mEncoderFlag & OMXCodec::kStoreMetaDataInVideoBuffers); + + ret = mVideoEncoder->setParameters(&mStoreMetaDataInBuffers); + CHECK_ENC_STATUS("MIX::setParameters StoreMetaDataInBuffers"); + + if (strcmp(mMixCodec, MPEG4_MIME_TYPE) == 0) { + VideoParamsAVC AVCParam; + mVideoEncoder->getParameters(&AVCParam); + AVCParam.idrInterval = mIdrInt; + mVideoEncoder->setParameters(&AVCParam); + } + +#if 1 + VideoConfigBitRate configBitrate; + mVideoEncoder->getConfig(&configBitrate); + configBitrate.rcParams.disableBitsStuffing = 0; + configBitrate.rcParams.disableFrameSkip = mDisableFrameSkip; + mVideoEncoder->setConfig(&configBitrate); +#endif + return OK; + } + +private: + + const char* mMixCodec; + const char* mCodec; + int mBitrate; + int mWidth; + int mHeight; + int mFPS; + int mIntraPeriod; + int mEncodeFrameCount; + uint32_t mEncoderFlag; + int mInitQP; + int mMinQP; + int mWinSize; + int mIdrInt; + int mDisableFrameSkip; + +// int mSyncMode; + bool mFirstFrame; + bool mSrcEOS; + + IVideoEncoder *mVideoEncoder; + VideoParamsCommon mEncoderParams; + VideoParamsAVC mVideoParamsAVC; + VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers; + VideoRateControl mRCMode; + + sp mSource; + MediaBufferGroup mGroup; + +}; + +class RawWriter : public MediaWriter { +public: + RawWriter(char* file) { + mFile = file; + mRunning = false; + } + + status_t addSource(const sp &source) { + mSource = source; + return OK; + } + + bool reachedEOS() { + return mEOS; + } + + status_t start(MetaData *params = NULL) { + + mSource->start(); + + mRunning = true; + mEOS = false; + + mFilehandle = fopen(mFile, "w+"); + if (mFilehandle == NULL) + return errno; + + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThread, &attr, RawWriter::ThreadFunc, this); + pthread_attr_destroy(&attr); + + return OK; + } + + status_t stop() { + mRunning = false; + void *dummy; + pthread_join(mThread, &dummy); + fclose(mFilehandle); + return OK; + } + + status_t pause() { + return OK; + } + +private: + static void *ThreadFunc(void *me) { + RawWriter *writer = static_cast(me); + + status_t err = OK; + + while (writer->mRunning) { + MediaBuffer* buffer; + err = writer->mSource->read(&buffer, NULL); + + if (err == OK) { + fwrite(buffer->data()+buffer->range_offset(), 1, buffer->range_length(), writer->mFilehandle); +// LOG("RawWriter::threadfunc fwrite size=%d\n", buffer->range_length()); + buffer->release(); + continue; + }else { + if (err != ERROR_END_OF_STREAM) + LOG("RawWriter::threadfunc err=%d\n", err); + writer->mEOS = true; + writer->mRunning = false; + fflush(writer->mFilehandle); + return NULL; + } + } + + return NULL; + } + +public: + + const char* mFile; + FILE* mFilehandle; + sp mSource; + pthread_t mThread; + bool mRunning; + bool mEOS; +}; + + +void usage() { + printf("2nd generation Mix_encoder\n"); + printf("Usage: mix_encoder2 [options]\n\n"); + printf(" -a/--initQP set initQP, default 0\n"); + printf(" -b/--bitrate set bitrate bps, default 10M\n"); + printf(" -c/--codec select codec, like H264(default), MPEG4, H263\n"); + printf(" -d/--intraPeriod set IntraPeriod, default 30\n"); + printf(" -e/--encoder select encoder, like MIX(default), OMXCODEC\n"); + printf(" -f set output file name\n"); + printf(" -i/--yuv select yuv generate method, AUTO(default) or from yuv file\n"); + printf(" -j/--winSize set window size, default 1000\n"); + printf(" -k/--encWidth -g/--encHeight set encoder width/height, default 1280*720\n"); + printf(" -l/--idrInterval set IdrInterval, default 1\n"); + printf(" -m/--disableMetadata disable Metadata Mode(default enabled)\n"); + printf(" -n/--count set source frame number, default 30\n"); + printf(" -o/--outputformat set output file format, like MP4(default), RAW\n"); + printf(" -p/--fps set frame rate, default 30\n"); + printf(" -q/--minQP set minQP, default 0\n"); + printf(" -r/--rcMode set rc mode, like VBR(default), CBR, VCM, NO_RC\n"); + printf(" -s/--src select source, like MALLOC(default), VASURFACE, GFX, GRALLOC, CAMERASOURCE, MEMHEAP\n"); + printf(" -w -h set source width /height, default 1280*720\n"); + printf(" -t/--disableFrameSkip disable frame skip, default is false\n"); + printf("\n"); + +} + +int main(int argc, char* argv[]) +{ + int SrcType = 0; + int SrcWidth = 1280; + int SrcHeight = 720; + int SrcStride = 1280; + int SrcFrameNum = 30; + bool MetadataMode = true; + bool SoftCodec = false; + int SrcFps = 30; + int EncType = 0; + int EncCodec = 0; + int EncRCMode = 0; + int EncBitrate = 10000000; + int EncWidth = 1280; + int EncHeight = 720; + int InitQP = 0; + int MinQP = 0; + int WinSize = 1000; + int IdrInt = 1; + int IntraPeriod = 30; + int DisableFrameSkip = 0; + int OutFormat = 0; +// bool SyncMode = false; + char* OutFileName = "out.264"; + const char* Yuvfile = NULL; + + android::ProcessState::self()->startThreadPool(); + + const char *short_opts = "a:b:c:d:e:f:g:h:i:j:k:l:m:n:o:p:q:r:s:t:q:u:v:w:x:y:z:?"; + const struct option long_opts[] = { + {"help", no_argument, NULL, '?'}, + {"file", required_argument, NULL, 'f'}, + {"src", required_argument, NULL, 's'}, + {"yuv", required_argument, NULL, 'i'}, + {"srcWidth", required_argument, NULL, 'w'}, + {"srcHeight", required_argument, NULL, 'h'}, + {"disableMetadata", no_argument, NULL, 'm'}, + {"count", required_argument, NULL, 'n'}, + {"encoder", required_argument, NULL, 'e'}, + {"codec", required_argument, NULL, 'c'}, + {"bitrate", required_argument, NULL, 'b'}, + {"output", required_argument, NULL, 'o'}, + {"fps", required_argument, NULL, 'p'}, + {"rcMode", required_argument, NULL, 'r'}, + {"encWidth", required_argument, NULL, 'k'}, + {"encHeight", required_argument, NULL, 'g'}, + {"initQP", required_argument, NULL, 'a'}, + {"minQP", required_argument, NULL, 'q'}, + {"intraPeriod", required_argument, NULL, 'd'}, + {"winSize", required_argument, NULL, 'j'}, + {"idrInt", required_argument, NULL, 'l'}, + {"disableFrameSkip", no_argument, NULL, 't'}, + {0, 0, 0, 0} + }; + + char c; + + const char *SRCTYPE[] = {"MALLOC", "VASURFACE", "GFX", "GRALLOC", + "CAMERASOURCE", "SURFACEMEDIASOURCE", "MEMHEAP", NULL}; + const char *ENCTYPE[] = {"MIX", "OMXCODEC", NULL}; + const char *CODEC[] = {"H264", "MPEG4", "H263", NULL}; + const char *RCMODE[] = {"VBR", "CBR", "VCM", "NO_RC", NULL}; + const char *OUTFORMAT[] = {"MP4", "RAW", NULL}; + + while ((c = getopt_long(argc, argv, short_opts, long_opts, NULL) ) != EOF) { + switch (c) { + case 'a': + InitQP = atoi(optarg); + break; + + case 'b': + EncBitrate = atoi(optarg); + break; + + case 'c': + for (int i = 0; CODEC[i] != NULL; i++) { + if (strcasecmp (optarg, CODEC[i]) == 0) { + EncCodec = i; + break; + }else + continue; + } + + break; + + case 'd': + IntraPeriod = atoi(optarg); + break; + + case 'e': + for (int i = 0; ENCTYPE[i] != NULL; i++) { + if (strcasecmp (optarg, ENCTYPE[i]) == 0) { + EncType = i; + break; + }else + continue; + } + + break; + + case 'f': + OutFileName = optarg; + break; + + case 'g': + EncHeight = atoi(optarg); + break; + + case 'h': + SrcHeight = atoi(optarg); + EncHeight = SrcHeight; + break; + + case 'i': + if (strcasecmp (optarg, "AUTO") == 0) + Yuvfile = NULL; + else + Yuvfile = optarg; + + break; + + case 'j': + WinSize = atoi(optarg); + break; + + case 'k': + EncWidth = atoi(optarg); + break; + + case 'l': + IdrInt = atoi(optarg); + break; + + case 'm': + MetadataMode = false; + break; + + case 'n': + SrcFrameNum = atoi(optarg); + break; + + case 'o': + for (int i = 0; OUTFORMAT[i] != NULL; i++) { + if (strcasecmp (optarg, OUTFORMAT[i]) == 0) { + OutFormat = i; + break; + }else + continue; + } + + break; + + case 'p': + SrcFps = atoi(optarg); + break; + + case 'q': + MinQP = atoi(optarg); + break; + + case 'r': + for (int i = 0; RCMODE[i] != NULL; i++) { + if (strcasecmp (optarg, RCMODE[i]) == 0) { + EncRCMode = i; + break; + }else + continue; + } + + break; + + case 's': + for (int i = 0; SRCTYPE[i] != NULL; i++) { + if (strcasecmp (optarg, SRCTYPE[i]) == 0) { + SrcType = i; + break; + }else + continue; + } + + break; + + case 't': + DisableFrameSkip = 1; + break; + + case 'w': + SrcWidth = atoi(optarg); + SrcStride = SrcWidth; + EncWidth = SrcWidth; + break; + + case '?': + default: + usage(); + exit(0); + } + } + + //export encoding parameters summary + printf("=========================================\n"); + printf("Source:\n"); + printf("Type: %s, Width: %d, Height: %d, Stride: %d\n", SRCTYPE[SrcType], SrcWidth, SrcHeight, SrcStride); + printf("FPS: %d, YUV: %s, Metadata: %d\n", SrcFps, Yuvfile, MetadataMode); + + printf("\nEncoder:\n"); + printf("Type: %s, Codec: %s, Width: %d, Height: %d\n", ENCTYPE[EncType], CODEC[EncCodec], EncWidth, EncHeight); + printf("RC: %s, Bitrate: %d bps, initQP: %d, minQP: %d\n", RCMODE[EncRCMode], EncBitrate, InitQP, MinQP); + printf("winSize: %d, IdrInterval: %d, IntraPeriod: %d, FPS: %d \n", WinSize, IdrInt, IntraPeriod, SrcFps); + printf("Frameskip: %d\n", !DisableFrameSkip); + + printf("\nOut:\n"); + printf("Type: %s, File: %s\n", OUTFORMAT[OutFormat], OutFileName); + printf("=========================================\n"); + + sp source; + sp encoder; + sp writer; + + //setup source + if (SrcType == 0) { + source = new MallocSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, + SrcFps, MetadataMode, Yuvfile); + } else if (SrcType == 1) { + source = new VASurfaceSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, + SrcFps, MetadataMode, Yuvfile); + } else if (SrcType == 2) { + source = new GfxSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, + SrcFps, MetadataMode, Yuvfile); + } else if (SrcType == 3) { + source = new GrallocSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, + SrcFps, MetadataMode, Yuvfile); + } else if (SrcType == 4) { +// source = new CameraSource(); + } else if (SrcType == 5) { + // source = new SurfaceMediaSource(); + } else if (SrcType == 6) { + source = new MemHeapSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, + SrcFps, MetadataMode, Yuvfile); + } + + printf("Setup Encoder\n"); + //setup encoder + sp enc_meta = new MetaData; + switch (EncCodec) { + case 1: + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); + break; + case 2: + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); + break; + default: + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); + break; + } + + enc_meta->setInt32(kKeyWidth, EncWidth); + enc_meta->setInt32(kKeyHeight, EncHeight); + enc_meta->setInt32(kKeyFrameRate, SrcFps); + enc_meta->setInt32(kKeyBitRate, EncBitrate); + enc_meta->setInt32(kKeyStride, EncWidth); + enc_meta->setInt32(kKeySliceHeight, EncHeight); + enc_meta->setInt32(kKeyIFramesInterval, 1); + enc_meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); + + //only valid for MIX encoder + enc_meta->setInt32('itqp', InitQP); + enc_meta->setInt32('mnqp', MinQP); + enc_meta->setInt32('iapd', IntraPeriod); + enc_meta->setInt32('wsiz', WinSize); + enc_meta->setInt32('idri', WinSize); + enc_meta->setInt32('difs', DisableFrameSkip); + + uint32_t encoder_flags = 0; + if (MetadataMode) + encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers; + if (SoftCodec) + encoder_flags |= OMXCodec::kPreferSoftwareCodecs; + + OMXClient client; + + if (EncType == 1) { + CHECK_EQ(client.connect(), (status_t)OK); + + encoder = OMXCodec::Create( + client.interface(), enc_meta, true /* createEncoder */, source, + 0, encoder_flags); + } else { + encoder = new MixEncoder(source, enc_meta, EncRCMode, encoder_flags); + } + + //setup output + printf("Setup Writer\n"); + + if (OutFormat == 0) + writer = new MPEG4Writer(OutFileName); + else + writer = new RawWriter(OutFileName); + + writer->addSource(encoder); + + printf("Start encoding\n"); + + int64_t start = systemTime(); + CHECK_EQ((status_t)OK, writer->start()); + while (!writer->reachedEOS()) { + usleep(100000); + } + status_t err = writer->stop(); + int64_t end = systemTime(); + + if (EncType == 1) { + client.disconnect(); + } + + printf("Stop Encoding\n"); + + if (err != OK && err != ERROR_END_OF_STREAM) { + LOG("record failed: %d\n", err); + return 1; + } + + enc_meta.clear(); + + printf("encoding %d frames in %lld us\n", gNumFramesOutput, (end-start)/1000); + printf("encoding speed is: %.2f fps\n", (gNumFramesOutput * 1E9) / (end-start)); + + return 1; +} + -- cgit v1.2.3 From 443528ee649ee9ed9b327fd424dc8e3d1ecfb93e Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Mon, 15 Jul 2013 14:22:36 +0800 Subject: Support setting num_unit_in_tick and time_scale in libmix BZ: 123457 Support setting num_unit_in_tick and time_scale in H264 SPS VUI header. But make sure VUIFlag is set in VideoParamsAVC if want to it is vaild. Change-Id: I3a1e86b08eda6a6f44ae272b7faea6f5f7e8f156 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/118896 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videoencoder/VideoEncoderAVC.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index b4f0222..665cc7e 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -870,6 +870,8 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { avcSeqParams.intra_period = mComParams.intraPeriod; //avcSeqParams.vui_flag = 248; avcSeqParams.vui_parameters_present_flag = mVideoParamsAVC.VUIFlag; + avcSeqParams.num_units_in_tick = frameRateDenom; + avcSeqParams.time_scale = 2 * frameRateNum; avcSeqParams.seq_parameter_set_id = 0; if (mVideoParamsAVC.crop.LeftOffset || mVideoParamsAVC.crop.RightOffset || @@ -928,8 +930,8 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { avcSeqParams.seq_fields.bits.log2_max_frame_num_minus4 = 0; avcSeqParams.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 2; - avcSeqParams.time_scale = 900; - avcSeqParams.num_units_in_tick = 15; /* Tc = num_units_in_tick / time_sacle */ +// avcSeqParams.time_scale = 900; +// avcSeqParams.num_units_in_tick = 15; /* Tc = num_units_in_tick / time_sacle */ // Not sure whether these settings work for all drivers vaStatus = vaUnmapBuffer(mVADisplay, mRcParamBuf); -- cgit v1.2.3 From 8e74eb381912eb2ae3f8495d77feba5d9e433c76 Mon Sep 17 00:00:00 2001 From: Paul Zurcher Date: Tue, 16 Jul 2013 09:51:23 -0700 Subject: DRM-WV: fix video corruption with protected content playback BZ: 89857 Fix for video corruption in the bottom right corner on Widevine protected content on Merrifield. Change-Id: Ia16dd6e32d34128ab68892e13ed5a10fbfe9b1af Signed-off-by: Paul Zurcher Reviewed-on: http://android.intel.com:8080/119088 Reviewed-by: Poornachandran, Rajesh Reviewed-by: Kandru, Suneel Reviewed-by: Qiu, Junhai Tested-by: Post, DavidX J Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp index ac9309d..cd8b2ca 100644 --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp @@ -309,8 +309,7 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; uint32_t slice_offset_shift = sliceOffset % 16; sliceParam->slice_data_offset += slice_offset_shift; - sliceParam->slice_data_size += slice_offset_shift; - sliceData->slice_size = (sliceParam->slice_data_size + 0xF) & ~0xF; + sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF; vaStatus = vaCreateBuffer( mVADisplay, -- cgit v1.2.3 From 22e94a8ef3f6f39a36b852dc200509ec6beab359 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 16 Jul 2013 14:16:46 +0800 Subject: libmix: fix the wrong reference frame issue when the POC of successive frames are same BZ: 118207 122198 Fix this wrong reference frame issue by looking up latest ref frame in the DPB with specified POC in case successive frames have same POC Orig-Change-Id: I09f80dab92e720314db7f48a0eeaebcad1f73d7d Orig-Change-Id: I30f10a8f2662e3aaa001388096c843a6846fcde4 Change-Id: Ifaea1ab43c6a8f7452e6e9a84c6c6f6aa98ed9de Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/119118 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- videodecoder/VideoDecoderAVC.cpp | 21 ++++++++++++++++++++- videodecoder/VideoDecoderAVC.h | 1 + 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 5730795..530d8c9 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -414,7 +414,8 @@ Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) continue; } dpb->poc = getPOC(ref); - dpb->surfaceBuffer = findSurfaceBuffer(ref); + // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC + dpb->surfaceBuffer = findRefSurfaceBuffer(ref); if (dpb->surfaceBuffer == NULL) { ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic))); if (dpb->poc == getPOC(&(picParam->CurrPic))) { @@ -568,6 +569,24 @@ VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) { return dpb->surfaceBuffer; } } + // ETRACE("Unable to find surface for poc %d", getPOC(pic)); + return NULL; +} + +VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) { + DecodedPictureBuffer *dpb = mDPBs[mToggleDPB]; + // always looking for the latest one in the DPB, in case ref frames have same POC + dpb += (DPB_SIZE - 1); + for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) { + if (dpb->poc == pic->BottomFieldOrderCnt || + dpb->poc == pic->TopFieldOrderCnt) { + // TODO: remove these debugging codes + if (dpb->surfaceBuffer == NULL) { + ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic)); + } + return dpb->surfaceBuffer; + } + } ETRACE("Unable to find surface for poc %d", getPOC(pic)); return NULL; } diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 799ae2e..9c23e6b 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -50,6 +50,7 @@ protected: inline uint32_t getPOC(VAPictureH264 *pic); // Picture Order Count inline VASurfaceID findSurface(VAPictureH264 *pic); inline VideoSurfaceBuffer* findSurfaceBuffer(VAPictureH264 *pic); + inline VideoSurfaceBuffer* findRefSurfaceBuffer(VAPictureH264 *pic); inline void invalidateDPB(int toggle); inline void clearAsReference(int toggle); Decode_Status startVA(vbp_data_h264 *data); -- cgit v1.2.3 From 54feca7c850a0e1121ac94c07c4f8a75c158b3a8 Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Mon, 15 Jul 2013 10:57:13 +0800 Subject: JPEG: use normal VASurface for HW CSC instead of gralloc buffer BZ:116496 Change-Id: I90bdf5d1cf19027ef2a508433a1dab8eadaae928 Signed-off-by: Cheng Yao Reviewed-on: http://android.intel.com:8080/119822 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: buildbot Tested-by: buildbot --- imagedecoder/Android.mk | 3 +- imagedecoder/JPEGDecoder.c | 433 ++++++++++++++++++--------------------------- 2 files changed, 176 insertions(+), 260 deletions(-) diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index ccc6f0f..6795a06 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -24,8 +24,7 @@ LOCAL_SHARED_LIBRARIES += \ libcutils \ libva-android \ libva \ - libva-tpi \ - libhardware + libva-tpi LOCAL_LDLIBS += -lpthread LOCAL_CFLAGS += -Wno-multichar diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c index 6a1337b..9dfe0a1 100644 --- a/imagedecoder/JPEGDecoder.c +++ b/imagedecoder/JPEGDecoder.c @@ -35,11 +35,6 @@ #include "JPEGParser.h" #include #include "jerror.h" -#include -#ifdef JPEGDEC_USES_GEN -#include "ufo/graphics.h" -#define INTEL_VPG_GRALLOC_MODULE_PERFORM_GET_BO_NAME 4 -#endif #define JPEG_MAX_SETS_HUFFMAN_TABLES 2 @@ -126,7 +121,7 @@ static void write_to_YUY2(uint8_t *pDst, *(pDst + 2 * col + 1) = *(pV + actual_col); } } - pDst += dst_stride * 2; + pDst += dst_stride; pY += pImg->pitches[0]; uint32_t actual_row = row * v_samp_factor; pU = pSrc + pImg->offsets[1] + actual_row * pImg->pitches[1]; @@ -225,26 +220,12 @@ Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) { goto cleanup; } - VAConfigAttrib attrib; - attrib.type = VAConfigAttribRTFormat; - va_status = vaGetConfigAttributes(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaGetConfigAttributes failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } - if ((VA_RT_FORMAT_YUV444 & attrib.value) == 0) { + /*if ((VA_RT_FORMAT_YUV444 & attrib.value) == 0) { WTRACE("Format not surportted\n"); status = DECODE_FAIL; goto cleanup; - } + }*/ - va_status = vaCreateConfig(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1, &(jd_libva_ptr->va_config)); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateConfig failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } jd_libva_ptr->initialized = TRUE; status = DECODE_SUCCESS; @@ -293,149 +274,46 @@ void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) { static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceID surface, char ** buf, uint32_t rows) { -#if 0 // dump RGB to file - uint8_t* rgb_buf; - int32_t data_len = 0; - uint32_t surface_width, surface_height; - surface_width = (( ( jd_libva_ptr->image_width + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); - surface_height = (( ( jd_libva_ptr->image_height + 7 ) & ( ~7 )) + 15 ) & ( ~15 ); - - rgb_buf = (uint8_t*) malloc((surface_width * surface_height) << 2); - if(rgb_buf == NULL){ - return DECODE_MEMORY_FAIL; - } - va_status = vaPutSurfaceBuf(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0], rgb_buf, &data_len, 0, 0, surface_width, surface_height, 0, 0, surface_width, surface_height, NULL, 0, 0); - - buf = rgb_buf; - // dump RGB data - { - FILE *pf_tmp = fopen("img_out.rgb", "wb"); - if(pf_tmp == NULL) - ETRACE("Open file error"); - fwrite(rgb_buf, 1, surface_width * surface_height * 4, pf_tmp); - fclose(pf_tmp); - } -#endif - #ifdef JPEGDEC_USES_GEN VAImage decoded_img; uint8_t *decoded_buf = NULL; + VAImage yuy2_img; + uint8_t *yuy2_buf = NULL; + VAImage rgba_img; + uint8_t *rgba_buf = NULL; int row, col; VAStatus vpp_status; uint8_t *pSrc, *pDst; VADisplay display = NULL; VAContextID context = VA_INVALID_ID; - VASurfaceID dst_surface = VA_INVALID_ID, src_surface = VA_INVALID_ID; VAConfigID config = VA_INVALID_ID; VAConfigAttrib vpp_attrib; - VASurfaceAttrib dst_surf_attrib, src_surf_attrib; - buffer_handle_t dst_handle, src_handle; - int32_t dst_stride, src_stride; - uint32_t dst_buf_name, src_buf_name; VAProcPipelineParameterBuffer vpp_param; VABufferID vpp_pipeline_buf = VA_INVALID_ID; int major_version, minor_version; - uint8_t *src_gralloc_buf, *dst_gralloc_buf; - hw_module_t const* module = NULL; - alloc_device_t *alloc_dev = NULL; - struct gralloc_module_t *gralloc_module = NULL; VAProcPipelineCaps vpp_pipeline_cap ; VARectangle src_rect, dst_rect; int err; Display vppdpy; FILE *fp; + VASurfaceAttrib in_fourcc, out_fourcc; + VASurfaceID in_surf, out_surf; Decode_Status status = DECODE_SUCCESS; VASurfaceAttribExternalBuffers vaSurfaceExternBufIn, vaSurfaceExternBufOut; - decoded_img.image_id = VA_INVALID_ID; - vpp_status = vaDeriveImage(jd_libva_ptr->va_display, surface, &decoded_img); + yuy2_img.image_id = VA_INVALID_ID; + rgba_img.image_id = VA_INVALID_ID; + display = jd_libva_ptr->va_display; + + vpp_status = vaDeriveImage(display, surface, &decoded_img); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - vpp_status = vaMapBuffer(jd_libva_ptr->va_display, decoded_img.buf, (void **)&decoded_buf); - if (vpp_status) { - vaDestroyImage(jd_libva_ptr->va_display, decoded_img.image_id); - } + vpp_status = vaMapBuffer(display, decoded_img.buf, (void **)&decoded_buf); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); write_to_file(DECODE_DUMP_FILE, &decoded_img, decoded_buf); ITRACE("Start HW CSC: color %s=>RGBA8888", fourcc2str(jd_libva_ptr->fourcc)); - err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - gralloc_module = (struct gralloc_module_t *)module; - err = gralloc_open(module, &alloc_dev); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - err = alloc_dev->alloc(alloc_dev, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - HAL_PIXEL_FORMAT_YCbCr_422_I, // YUY2 - GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_WRITE_MASK | GRALLOC_USAGE_SW_READ_MASK, - (buffer_handle_t *)&src_handle, - &src_stride); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - ITRACE("src_gralloc_buf: handle=%u, stride=%u", src_handle, src_stride); - err = alloc_dev->alloc(alloc_dev, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - HAL_PIXEL_FORMAT_RGBA_8888, - GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_WRITE_MASK | GRALLOC_USAGE_SW_READ_MASK, - (buffer_handle_t *)&dst_handle, - &dst_stride); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - ITRACE("dst_gralloc_buf: handle=%u, stride=%u", dst_handle, dst_stride); - - err = gralloc_module->perform(gralloc_module, INTEL_VPG_GRALLOC_MODULE_PERFORM_GET_BO_NAME, src_handle, &src_buf_name); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - err = gralloc_module->perform(gralloc_module, INTEL_VPG_GRALLOC_MODULE_PERFORM_GET_BO_NAME, dst_handle, &dst_buf_name); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - // copy decoded buf into the gralloc buf in YUY2 format - err = gralloc_module->lock(gralloc_module, src_handle, - GRALLOC_USAGE_SW_WRITE_MASK, - 0, 0, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - &src_gralloc_buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - ITRACE("Convert %s buf into YUY2:", fourcc2str(jd_libva_ptr->fourcc)); - - write_to_YUY2(src_gralloc_buf, - src_stride, - &decoded_img, - decoded_buf); - err = gralloc_module->unlock(gralloc_module, src_handle); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - fp = fopen(YUY2_DUMP_FILE, "wb"); - if (fp) { - ITRACE("DUMP YUY2 to " YUY2_DUMP_FILE); - err = gralloc_module->lock(gralloc_module, src_handle, - GRALLOC_USAGE_SW_READ_MASK, - 0, 0, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - &src_gralloc_buf); - unsigned char *pYUV = src_gralloc_buf; - int loop; - for(loop=0;loopimage_height;loop++) - { - fwrite(pYUV, 2, jd_libva_ptr->image_width, fp); - pYUV += 2 * src_stride; - } - gralloc_module->unlock(gralloc_module, src_handle); - fclose(fp); - } - - vaUnmapBuffer(jd_libva_ptr->va_display, decoded_img.buf); - vaDestroyImage(jd_libva_ptr->va_display, decoded_img.image_id); - decoded_buf= NULL; - - display = vaGetDisplay (&vppdpy); - vpp_status = vaInitialize(display, &major_version, &minor_version); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); vpp_attrib.type = VAConfigAttribRTFormat; vpp_attrib.value = VA_RT_FORMAT_YUV420; @@ -457,48 +335,67 @@ static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceI &context); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - vaSurfaceExternBufIn.pixel_format = VA_FOURCC_YUY2; - vaSurfaceExternBufIn.width = jd_libva_ptr->image_width; - vaSurfaceExternBufIn.height = jd_libva_ptr->image_height; - vaSurfaceExternBufIn.pitches[0] = src_stride * 2; // YUY2 is 16bit - vaSurfaceExternBufIn.buffers = &src_buf_name; - vaSurfaceExternBufIn.num_buffers = 1; - vaSurfaceExternBufIn.flags = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; - src_surf_attrib.type = VASurfaceAttribExternalBufferDescriptor; - src_surf_attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; - src_surf_attrib.value.type = VAGenericValueTypePointer; - src_surf_attrib.value.value.p = (void *)&vaSurfaceExternBufIn; + in_surf = out_surf = VA_INVALID_ID; + in_fourcc.type = VASurfaceAttribPixelFormat; + in_fourcc.flags = VA_SURFACE_ATTRIB_SETTABLE; + in_fourcc.value.type = VAGenericValueTypeInteger; + in_fourcc.value.value.i = VA_FOURCC_YUY2; vpp_status = vaCreateSurfaces(display, - VA_RT_FORMAT_YUV422, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - &src_surface, - 1, - &src_surf_attrib, - 1); + VA_RT_FORMAT_YUV422, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + &in_surf, + 1, + &in_fourcc, + 1); + vpp_status = vaDeriveImage(display, in_surf, &yuy2_img); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - vaSurfaceExternBufOut.pixel_format = VA_FOURCC_ARGB; - vaSurfaceExternBufOut.width = jd_libva_ptr->image_width; - vaSurfaceExternBufOut.height = jd_libva_ptr->image_height; - vaSurfaceExternBufOut.pitches[0] = dst_stride * 4; // RGBA is 32bit - vaSurfaceExternBufOut.buffers = &dst_buf_name; - vaSurfaceExternBufOut.num_buffers = 1; - vaSurfaceExternBufOut.flags = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; - dst_surf_attrib.type = VASurfaceAttribExternalBufferDescriptor; - dst_surf_attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; - dst_surf_attrib.value.type = VAGenericValueTypePointer; - dst_surf_attrib.value.value.p = (void *)&vaSurfaceExternBufOut; + vpp_status = vaMapBuffer(display, yuy2_img.buf, (void **)&yuy2_buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + + write_to_YUY2(yuy2_buf, yuy2_img.pitches[0], &decoded_img, decoded_buf); + fp = fopen(YUY2_DUMP_FILE, "wb"); + if (fp) { + ITRACE("DUMP YUY2 to " YUY2_DUMP_FILE); + unsigned char *pYUV = yuy2_buf; + uint32_t loop; + for(loop=0;loopimage_height;loop++) + { + fwrite(pYUV, 2, jd_libva_ptr->image_width, fp); + pYUV += yuy2_img.pitches[0]; + } + fclose(fp); + } + vaUnmapBuffer(display, yuy2_img.buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + yuy2_buf = NULL; + vaDestroyImage(display, yuy2_img.image_id); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + yuy2_img.image_id = VA_INVALID_ID; + vaUnmapBuffer(display, decoded_img.buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + decoded_buf = NULL; + vaDestroyImage(display, decoded_img.image_id); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + decoded_img.image_id = VA_INVALID_ID; + + out_fourcc.type = VASurfaceAttribPixelFormat; + out_fourcc.flags = VA_SURFACE_ATTRIB_SETTABLE; + out_fourcc.value.type = VAGenericValueTypeInteger; + out_fourcc.value.value.i = VA_FOURCC_RGBA; vpp_status = vaCreateSurfaces(display, - VA_RT_FORMAT_RGB32, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - &dst_surface, - 1, - &dst_surf_attrib, - 1); + VA_RT_FORMAT_RGB32, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + &out_surf, + 1, + &out_fourcc, + 1); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - ITRACE("vaCreateSurfaces got surface %u=>%u", src_surface, dst_surface); + + ITRACE("vaCreateSurfaces got surface %u=>%u", in_surf, out_surf); //query caps for pipeline vpp_status = vaQueryVideoProcPipelineCaps(display, context, @@ -514,7 +411,7 @@ static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceI ITRACE("from (%d, %d, %u, %u) to (%d, %d, %u, %u)", src_rect.x, src_rect.y, src_rect.width, src_rect.height, dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height); - vpp_param.surface = src_surface; + vpp_param.surface = in_surf; vpp_param.output_region = &dst_rect; vpp_param.surface_region = &src_rect; vpp_param.surface_color_standard = VAProcColorStandardBT601; //csc @@ -522,7 +419,6 @@ static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceI vpp_param.output_color_standard = VAProcColorStandardNone; vpp_param.filter_flags = VA_FRAME_PICTURE; vpp_param.filters = NULL; - //vpp_param.pipeline_flags = 1084929476; vpp_param.num_filters = 0; vpp_param.forward_references = 0; vpp_param.num_forward_references = 0; @@ -541,7 +437,7 @@ static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceI vpp_status = vaBeginPicture(display, context, - dst_surface); + out_surf); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); //Render the picture @@ -554,63 +450,68 @@ static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceI vpp_status = vaEndPicture(display, context); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - vpp_status = vaSyncSurface(display, dst_surface); + vpp_status = vaSyncSurface(display, out_surf); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); ITRACE("Finished HW CSC YUY2=>RGBA8888"); - // gralloc lock + copy - err = gralloc_module->lock(gralloc_module, dst_handle, - GRALLOC_USAGE_SW_READ_MASK, - 0, 0, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - &dst_gralloc_buf); JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); ITRACE("Copy RGBA8888 buffer (%ux%u) to skia buffer (%ux%u)", jd_libva_ptr->image_width, jd_libva_ptr->image_height, buf[1] - buf[0], rows); - pSrc = dst_gralloc_buf; + + vpp_status = vaDeriveImage(display, out_surf, &rgba_img); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + vpp_status = vaMapBuffer(display, rgba_img.buf, (void **)&rgba_buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); fp = fopen(RGBA_DUMP_FILE, "wb"); if (fp) - ITRACE("dumping RGBA8888 to " RGBA_DUMP_FILE); - // FIXME: is it RGBA? or BGRA? or ARGB? - for (row = 0; row < rows; ++ row) { - memcpy(buf[row], pSrc, 4 * jd_libva_ptr->image_width); + ITRACE("DUMP RGBA to " RGBA_DUMP_FILE); + unsigned char *prgba = rgba_buf; + uint32_t loop; + for(loop=0;loopimage_height && loop < rows;loop++) + { + memcpy(buf[loop], prgba, 4 * jd_libva_ptr->image_width); if (fp) - fwrite(pSrc, 4, jd_libva_ptr->image_width, fp); - pSrc += dst_stride * 4; - } + fwrite(prgba, 4, jd_libva_ptr->image_width, fp); + prgba += rgba_img.pitches[0]; + } if (fp) - fclose(fp); - gralloc_module->unlock(gralloc_module, dst_handle); + fclose(fp); + vaUnmapBuffer(display, rgba_img.buf); + JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); + rgba_buf = NULL; + vaDestroyImage(display, rgba_img.image_id); + rgba_img.image_id = VA_INVALID_ID; cleanup: if (vpp_pipeline_buf != VA_INVALID_ID) vaDestroyBuffer(display, vpp_pipeline_buf); - if (dst_surface != VA_INVALID_ID) - vaDestroySurfaces(display, &dst_surface, 1); - if (src_surface != VA_INVALID_ID) - vaDestroySurfaces(display, &src_surface, 1); + if (in_surf != VA_INVALID_ID) + vaDestroySurfaces(display, &in_surf, 1); + if (out_surf != VA_INVALID_ID) + vaDestroySurfaces(display, &out_surf, 1); + if (rgba_buf) + vaUnmapBuffer(display, rgba_img.buf); + if (rgba_img.image_id != VA_INVALID_ID) + vaDestroyImage(display, rgba_img.image_id); + if (yuy2_buf) + vaUnmapBuffer(display, yuy2_img.buf); + if (yuy2_img.image_id != VA_INVALID_ID) + vaDestroyImage(display, yuy2_img.image_id); + if (decoded_buf) + vaUnmapBuffer(display, decoded_img.buf); + if (decoded_img.image_id != VA_INVALID_ID) + vaDestroyImage(display, decoded_img.image_id); if (context != VA_INVALID_ID) vaDestroyContext(display, context); if (config != VA_INVALID_ID) vaDestroyConfig(display, config); - if (display) - vaTerminate(display); - if (alloc_dev) { - alloc_dev->free(alloc_dev, dst_handle); - alloc_dev->free(alloc_dev, src_handle); - gralloc_close(alloc_dev); - } return status; #else - // TODO: CSC with Gralloc On PVR platform + return DECODE_SUCCESS; #endif - - - } static unsigned int getSurfaceFormat(jd_libva_struct * jd_libva_ptr, VASurfaceAttrib * fourcc) { @@ -681,33 +582,6 @@ Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { return DECODE_MEMORY_FAIL; } - VASurfaceAttrib fourcc; - unsigned int surface_format = getSurfaceFormat(jd_libva_ptr, &fourcc); - jd_libva_ptr->fourcc = fourcc.value.value.i; -#ifdef JPEGDEC_USES_GEN - va_status = vaCreateSurfaces(jd_libva_ptr->va_display, surface_format, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - jd_libva_ptr->va_surfaces, - jd_libva_ptr->surface_count, &fourcc, 1); -#else - va_status = vaCreateSurfaces(jd_libva_ptr->va_display, VA_RT_FORMAT_YUV444, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - jd_libva_ptr->va_surfaces, - jd_libva_ptr->surface_count, NULL, 0); -#endif - JD_CHECK(va_status, cleanup); - va_status = vaCreateContext(jd_libva_ptr->va_display, jd_libva_ptr->va_config, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - 0, //VA_PROGRESSIVE - jd_libva_ptr->va_surfaces, - jd_libva_ptr->surface_count, &(jd_libva_ptr->va_context)); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateContext failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; - } jd_libva_ptr->resource_allocated = TRUE; return status; @@ -739,28 +613,9 @@ Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) { * It is safe to destroy Surface/Config/Context severl times * and it is also safe even their value is NULL */ - va_status = vaDestroySurfaces(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count); - - va_status = vaDestroyContext(jd_libva_ptr->va_display, jd_libva_ptr->va_context); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaDestroyContext failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; - } - jd_libva_ptr->va_context = NULL; - - if (jd_libva_ptr->va_surfaces) { - free (jd_libva_ptr->va_surfaces); - jd_libva_ptr->va_surfaces = NULL; - } - - va_status = vaDestroyConfig(jd_libva_ptr->va_display, jd_libva_ptr->va_config); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaDestroyConfig failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; - } cleanup: - jd_libva_ptr->va_config = NULL; + jd_libva_ptr->va_config = VA_INVALID_ID; jd_libva_ptr->resource_allocated = FALSE; @@ -778,6 +633,47 @@ Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_pt uint32_t lines = jd_libva_ptr->output_lines; uint32_t chopping = VA_SLICE_DATA_FLAG_ALL; uint32_t bytes_remaining; + VAConfigAttrib attrib; + attrib.type = VAConfigAttribRTFormat; + va_status = vaGetConfigAttributes(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaGetConfigAttributes failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } + va_status = vaCreateConfig(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1, &(jd_libva_ptr->va_config)); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateConfig failed. va_status = 0x%x", va_status); + status = DECODE_DRIVER_FAIL; + goto cleanup; + } + VASurfaceAttrib fourcc; + unsigned int surface_format = getSurfaceFormat(jd_libva_ptr, &fourcc); + jd_libva_ptr->fourcc = fourcc.value.value.i; +#ifdef JPEGDEC_USES_GEN + va_status = vaCreateSurfaces(jd_libva_ptr->va_display, surface_format, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + jd_libva_ptr->va_surfaces, + jd_libva_ptr->surface_count, &fourcc, 1); +#else + va_status = vaCreateSurfaces(jd_libva_ptr->va_display, VA_RT_FORMAT_YUV444, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + jd_libva_ptr->va_surfaces, + jd_libva_ptr->surface_count, NULL, 0); +#endif + JD_CHECK(va_status, cleanup); + va_status = vaCreateContext(jd_libva_ptr->va_display, jd_libva_ptr->va_config, + jd_libva_ptr->image_width, + jd_libva_ptr->image_height, + 0, //VA_PROGRESSIVE + jd_libva_ptr->va_surfaces, + jd_libva_ptr->surface_count, &(jd_libva_ptr->va_context)); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateContext failed. va_status = 0x%x", va_status); + return DECODE_DRIVER_FAIL; + } if (jd_libva_ptr->eoi_offset) bytes_remaining = jd_libva_ptr->eoi_offset - jd_libva_ptr->soi_offset; @@ -901,11 +797,32 @@ jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; WTRACE("vaSyncSurface failed. va_status = 0x%x", va_status); } + va_status = vaDestroyContext(jd_libva_ptr->va_display, jd_libva_ptr->va_context); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaDestroyContext failed. va_status = 0x%x", va_status); + return DECODE_DRIVER_FAIL; + } + jd_libva_ptr->va_context = VA_INVALID_ID; + + + + va_status = vaDestroyConfig(jd_libva_ptr->va_display, jd_libva_ptr->va_config); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaDestroyConfig failed. va_status = 0x%x", va_status); + return DECODE_DRIVER_FAIL; + } status = doColorConversion(jd_libva_ptr, jd_libva_ptr->va_surfaces[0], buf, lines); - + va_status = vaDestroySurfaces(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count); ITRACE("Successfully decoded picture"); + + if (jd_libva_ptr->va_surfaces) { + free (jd_libva_ptr->va_surfaces); + jd_libva_ptr->va_surfaces = NULL; + } + + return status; cleanup: return DECODE_DRIVER_FAIL; -- cgit v1.2.3 From 3de9a8e67f9fb7013b65cfb870c63df4046ce49e Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 25 Jun 2013 17:57:56 +0800 Subject: Support AVC short format for protected video playback BZ: 97375 Support AVC short format for protected video playback on BYT platform. Change-Id: I83f677be0be60f0cd1e194ca5d5c0df7205f8d7f Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/100296 Reviewed-by: Shi, PingX Reviewed-by: Akula, VarshaX A Reviewed-by: Zurcher, Paul Reviewed-by: Poornachandran, Rajesh Tested-by: Sun, Hang L Reviewed-by: buildbot Tested-by: buildbot --- mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h | 64 + mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk | 33 + .../fw/codecs/h264/parser/h264parse_dpb.c | 9 +- .../codecs/h264/parser/viddec_h264secure_parse.c | 804 +++++++++ mix_vbp/viddec_fw/fw/parser/Android.mk | 7 + .../fw/parser/include/viddec_parser_ops.h | 6 + .../viddec_fw/fw/parser/vbp_h264secure_parser.c | 1830 ++++++++++++++++++++ .../viddec_fw/fw/parser/vbp_h264secure_parser.h | 70 + mix_vbp/viddec_fw/fw/parser/vbp_loader.c | 30 + mix_vbp/viddec_fw/fw/parser/vbp_loader.h | 17 +- mix_vbp/viddec_fw/fw/parser/vbp_utils.c | 43 + mix_vbp/viddec_fw/fw/parser/vbp_utils.h | 9 +- videodecoder/Android.mk | 9 +- videodecoder/VideoDecoderAVC.cpp | 13 +- videodecoder/VideoDecoderBase.cpp | 67 +- videodecoder/VideoDecoderBase.h | 5 + videodecoder/VideoDecoderHost.cpp | 4 +- .../securevideo/baytrail/VideoDecoderAVCSecure.cpp | 488 ++---- .../securevideo/baytrail/VideoDecoderAVCSecure.h | 46 +- videodecoder/securevideo/baytrail/secvideoparser.h | 157 ++ videodecoder/securevideo/baytrail/va_private.h | 77 + 21 files changed, 3415 insertions(+), 373 deletions(-) create mode 100644 mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c create mode 100644 mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.h create mode 100644 videodecoder/securevideo/baytrail/secvideoparser.h create mode 100644 videodecoder/securevideo/baytrail/va_private.h diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h index d6261d2..eac5541 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h +++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h @@ -1045,8 +1045,72 @@ extern "C" { } #endif +#ifdef USE_AVC_SHORT_FORMAT +#define MAX_OP 16 + +typedef struct _dec_ref_pic_marking_t { + union { + uint8_t flags; + struct { + uint8_t idr_pic_flag:1; + uint8_t no_output_of_prior_pics_flag:1; + uint8_t long_term_reference_flag:1; + uint8_t adaptive_ref_pic_marking_mode_flag:1; + }; + }; + struct { + uint8_t memory_management_control_operation; + union { + struct { + uint8_t difference_of_pic_nums_minus1; + } op1; + struct { + uint8_t long_term_pic_num; + } op2; + struct { + uint8_t difference_of_pic_nums_minus1; + uint8_t long_term_frame_idx; + } op3; + struct { + uint8_t max_long_term_frame_idx_plus1; + } op4; + struct { + uint8_t long_term_frame_idx; + } op6; + }; + } op[MAX_OP]; +} dec_ref_pic_marking_t; + + +typedef struct _slice_header_t { + uint8_t nal_unit_type; + uint8_t pps_id; + uint8_t padding; + union { + uint8_t flags; + struct { + uint8_t field_pic_flag:1; + uint8_t bottom_field_flag:1; + }; + }; + uint32_t first_mb_in_slice; + uint32_t frame_num; + uint16_t idr_pic_id; + uint16_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt[2]; + int32_t delta_pic_order_cnt_bottom; +} slice_header_t; + + +typedef struct _vbp_h264_sliceheader { + slice_header_t slice_header; + dec_ref_pic_marking_t ref_pic_marking; +} vbp_h264_sliceheader; + +#endif + #endif //_H264_H_ diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk index d3e4910..62fe53d 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/Android.mk @@ -30,3 +30,36 @@ LOCAL_SHARED_LIBRARIES := \ libmixvbp include $(BUILD_SHARED_LIBRARY) + +include $(CLEAR_VARS) +PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) +LOCAL_SRC_FILES := \ + h264parse.c \ + h264parse_bsd.c \ + h264parse_math.c \ + h264parse_mem.c \ + h264parse_sei.c \ + h264parse_pps.c \ + h264parse_sps.c \ + h264parse_dpb.c \ + h264parse_sh.c \ + viddec_h264secure_parse.c \ + mix_vbp_h264_stubs.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY -DUSE_AVC_SHORT_FORMAT + +LOCAL_C_INCLUDES := \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/parser/include \ + $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/h264/include + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_h264secure +LOCAL_SHARED_LIBRARIES := libmixvbp + +include $(BUILD_SHARED_LIBRARY) + +endif diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 010e77b..4415d54 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -2731,7 +2731,7 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc); h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host - +#ifndef USE_AVC_SHORT_FORMAT /// Add into drop-out list for all frms in dpb without display if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) { if ( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released @@ -2742,6 +2742,7 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac p_dpb->frame_numbers_need_to_be_dropped ++; } } +#endif } } @@ -2951,13 +2952,14 @@ void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t id h264_dpb_set_active_fs(p_dpb, fs_idc); viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); +#ifndef USE_AVC_SHORT_FORMAT //add to support frame relocation interface to host if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) { p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc; p_dpb->frame_numbers_need_to_be_removed ++; } - +#endif ///////////////////////////////////////// Reset FS p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC; @@ -3305,9 +3307,10 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int if (viddec_h264_get_is_non_existent(p_dpb->active_fs) == 0) { *existing = 1; +#ifndef USE_AVC_SHORT_FORMAT p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=p_dpb->active_fs->fs_idc; p_dpb->frame_numbers_need_to_be_displayed++; - +#endif //if(direct) //h264_dpb_remove_frame_from_dpb(p_dpb, p_dpb->active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos] } diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c new file mode 100644 index 0000000..55225ed --- /dev/null +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c @@ -0,0 +1,804 @@ +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" + +#include "viddec_fw_workload.h" +#include "viddec_pm.h" + +#include "h264.h" +#include "h264parse.h" + +#include "viddec_h264_parse.h" +#include "h264parse_dpb.h" + +/* Init function which can be called to intialized local context on open and flush and preserve*/ +void viddec_h264secure_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +{ + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + if (!preserve) + { + /* we don't initialize this data if we want to preserve + sequence and gop information */ + h264_init_sps_pps(parser,persist_mem); + } + /* picture level info which will always be initialized */ + h264_init_Info_under_sps_pps_level(pInfo); +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 0; +#endif + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +uint32_t viddec_h264secure_parse(void *parent, void *ctxt) +{ + struct h264_viddec_parser* parser = ctxt; + + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + + + uint8_t nal_ref_idc = 0; + + ///// Parse NAL Unit header + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + + ///// Check frame bounday for non-vcl elimitter + h264_check_previous_frame_end(pInfo); + + //////// Parse valid NAL unit + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + if (pInfo->got_start) { + pInfo->img.recovery_point_found |= 1; + } + + pInfo->sei_rp_received = 0; + + case h264_NAL_UNIT_TYPE_SLICE: + //////////////////////////////////////////////////////////////////////////// + // Step 1: Check start point + //////////////////////////////////////////////////////////////////////////// + // + /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I) + /// 1) No start point reached, append current ES buffer to workload and release it + /// 2) else, start parsing + // + //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR))) + //{ + //pInfo->img.recovery_point_found = 1; + //} + { + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = nal_ref_idc; + + if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + + +#ifndef VBP + if (pInfo->img.recovery_point_found == 0) { + pInfo->img.structure = FRAME; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + break; + } +#endif + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + + if (next_SliceHeader.sh_error & 3) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + + break; + } + pInfo->img.current_slice_num++; + + +#ifdef DUMP_HEADER_INFO + dump_slice_header(pInfo, &next_SliceHeader); +////h264_print_decoder_values(pInfo); +#endif + + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + +#ifdef DUMP_HEADER_INFO + dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); +#endif + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + + // + /// Emit out the New Frame + if (pInfo->img.g_new_frame) + { + h264_parse_emit_start_new_frame(parent, pInfo); + } + + h264_parse_emit_current_pic(parent, pInfo); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + + + ////////////////////////////////////////////////////////////// + // Step 4: DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + + + h264_dpb_update_ref_lists( pInfo); + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames) + { + pInfo->sw_bail = 1; + } +#endif +#endif +#ifdef DUMP_HEADER_INFO + dump_ref_list(pInfo); +#endif + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + + } + break; + + ///// * Main profile doesn't support Data Partition, skipped.... *//// + case h264_NAL_UNIT_TYPE_DPA: + case h264_NAL_UNIT_TYPE_DPB: + case h264_NAL_UNIT_TYPE_DPC: + //OS_INFO("***********************DP feature, not supported currently*******************\n"); + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + status = H264_STATUS_NOTSUPPORT; + break; + + //// * Parsing SEI info *//// + case h264_NAL_UNIT_TYPE_SEI: + status = H264_STATUS_OK; + + //OS_INFO("*****************************SEI**************************************\n"); + if (pInfo->sps_valid) { + //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW + pInfo->number_of_first_au_info_nal_before_first_slice++; + /// parsing the SEI info + status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo); + } + + //h264_rbsp_trailing_bits(pInfo); + break; + case h264_NAL_UNIT_TYPE_SPS: + { + //OS_INFO("*****************************SPS**************************************\n"); + /// + /// Can not define local SPS since the Current local stack size limitation! + /// Could be changed after the limitation gone + /// + uint8_t old_sps_id=0; + vui_seq_parameters_t_not_used vui_seq_not_used; + + old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + + + status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); + if (status == H264_STATUS_OK) { + h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); + pInfo->sps_valid = 1; + + if (1==pInfo->active_SPS.pic_order_cnt_type) { + h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); + } + +#ifdef DUMP_HEADER_INFO + dump_sps(&(pInfo->active_SPS)); +#endif + + } + ///// Restore the active SPS if new arrival's id changed + if (old_sps_id>=MAX_NUM_SPS) { + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + else { + if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + else { + //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + } + + pInfo->number_of_first_au_info_nal_before_first_slice++; + } + break; + case h264_NAL_UNIT_TYPE_PPS: + { + //OS_INFO("*****************************PPS**************************************\n"); + + uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; + + h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set)); + pInfo->number_of_first_au_info_nal_before_first_slice++; + + if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK) + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id); + if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated = 1; + } + if (pInfo->active_SPS.seq_parameter_set_id != 0xff) { + h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id); + pInfo->got_start = 1; + if (pInfo->sei_information.recovery_point) + { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if ((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + } + else + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } +#ifdef DUMP_HEADER_INFO + dump_pps(&(pInfo->active_PPS)); +#endif + } else { + if (old_sps_idactive_SPS), old_sps_id); + if (old_pps_idactive_PPS), old_pps_id); + } + + } //// End of PPS parsing + break; + + + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + + h264_parse_emit_eos(parent, pInfo); + h264_init_dpb(&(pInfo->dpb)); + + pInfo->is_current_workload_done=1; + + /* picture level info which will always be initialized */ + //h264_init_Info_under_sps_pps_level(pInfo); + + ////reset the pInfo here + //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false); + + + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: +#if 1 + ///// primary_pic_type + { + uint32_t code = 0xff; + int32_t ret = 0; + ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3); + + if (ret != -1) { + //if(pInfo->got_start && (code == 0)) + //{ + //pInfo->img.recovery_point_found |= 4; + //} + pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1; + status = H264_STATUS_OK; + } + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + } +#endif + + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_filler_data: + status = H264_STATUS_OK; + break; + case h264_NAL_UNIT_TYPE_ACP: + break; + case h264_NAL_UNIT_TYPE_SPS_extension: + case h264_NAL_UNIT_TYPE_unspecified: + case h264_NAL_UNIT_TYPE_unspecified2: + status = H264_STATUS_OK; + //nothing + break; + default: + status = H264_STATUS_OK; + break; + } + + //pInfo->old_nal_unit_type = pInfo->nal_unit_type; + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + case h264_NAL_UNIT_TYPE_SLICE: + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->old_nal_unit_type = pInfo->nal_unit_type; + break; + } + default: + break; + } + + return status; +} + +void viddec_h264secure_get_context_size(viddec_parser_memory_sizes_t *size) +{ + /* Should return size of my structure */ + size->context_size = sizeof(struct h264_viddec_parser); + size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all) + + MAX_NUM_PPS * sizeof(pic_param_set) + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE + + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +void viddec_h264secure_flush(void *parent, void *ctxt) +{ + int i; + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + /* just flush dpb and disable output */ + h264_dpb_flush_dpb(pInfo, 0, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames); + + /* reset the dpb to the initial state, avoid parser store + wrong data to dpb in next slice parsing */ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + for (i = 0; i < NUM_DPB_FRAME_STORES; i++) + { + p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + p_dpb->used_size = 0; + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + + return; +} + +h264_Status h264secure_Parse_Dec_Ref_Pic_Marking(h264_Info* pInfo, void *newdata, h264_Slice_Header_t*SliceHeader) +{ + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + + uint8_t i = 0; + uint32_t code; + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)sliceheader_p->ref_pic_marking.no_output_of_prior_pics_flag; + SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)sliceheader_p->ref_pic_marking.long_term_reference_flag; + pInfo->img.long_term_reference_flag = SliceHeader->sh_dec_refpic.long_term_reference_flag; + } + else + { + SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = sliceheader_p->ref_pic_marking.adaptive_ref_pic_marking_mode_flag; + + /////////////////////////////////////////////////////////////////////////////////////// + //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified + // Sliding window reference picture marking mode: A marking mode + // providing a first-in first-out mechanism for short-term reference pictures. + // Adaptive reference picture marking mode: A reference picture + // marking mode providing syntax elements to specify marking of + // reference pictures as unused for reference?and to assign long-term + // frame indices. + /////////////////////////////////////////////////////////////////////////////////////// + + if (SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) + { + do + { + if (i < NUM_MMCO_OPERATIONS) + { + code = sliceheader_p->ref_pic_marking.op[i].memory_management_control_operation; + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = code; + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) + { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = sliceheader_p->ref_pic_marking.op[i].op1.difference_of_pic_nums_minus1; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) + { + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = sliceheader_p->ref_pic_marking.op[i].op2.long_term_pic_num; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6) + { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = sliceheader_p->ref_pic_marking.op[i].op6.long_term_frame_idx; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = sliceheader_p->ref_pic_marking.op[i].op3.difference_of_pic_nums_minus1; + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = sliceheader_p->ref_pic_marking.op[i].op3.long_term_frame_idx; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) + { + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = sliceheader_p->ref_pic_marking.op[i].op4.max_long_term_frame_idx_plus1; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) + { + pInfo->img.curr_has_mmco_5 = 1; + } + } + + if (i >= NUM_MMCO_OPERATIONS) { + return H264_STATUS_ERROR; + } + } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); + } + } + + SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; + + return H264_STATUS_OK; +} + +uint32_t h264secure_Update_Slice_Header(h264_Info* pInfo, void *newdata, h264_Slice_Header_t *SliceHeader) +{ + h264_Status retStatus = H264_STATUS_OK; + uint8_t data; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + ///// first_mb_in_slice + SliceHeader->first_mb_in_slice = sliceheader_p->slice_header.first_mb_in_slice; + + SliceHeader->pic_parameter_id = (uint8_t)sliceheader_p->slice_header.pps_id; + retStatus = h264_active_par_set(pInfo, SliceHeader); + + switch (pInfo->active_SPS.profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + pInfo->active_PPS.transform_8x8_mode_flag=0; + pInfo->active_PPS.pic_scaling_matrix_present_flag =0; + pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; + default: + break; + } + + uint32_t code; + int32_t max_mb_num=0; + + SliceHeader->frame_num = (int32_t)sliceheader_p->slice_header.frame_num; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + SliceHeader->field_pic_flag = (uint8_t)sliceheader_p->slice_header.field_pic_flag; + + if (SliceHeader->field_pic_flag) + { + SliceHeader->bottom_field_flag = (uint8_t)sliceheader_p->slice_header.bottom_field_flag; + SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + if (SliceHeader->structure == FRAME) { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + } else { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; + } + + + if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { + SliceHeader->first_mb_in_slice <<=1; + } + + if (SliceHeader->first_mb_in_slice >= max_mb_num) { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = sliceheader_p->slice_header.idr_pic_id; + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + SliceHeader->pic_order_cnt_lsb = (uint32_t)sliceheader_p->slice_header.pic_order_cnt_lsb; + + + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = sliceheader_p->slice_header.delta_pic_order_cnt_bottom; + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if ((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = sliceheader_p->slice_header.delta_pic_order_cnt[0]; + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = sliceheader_p->slice_header.delta_pic_order_cnt[1]; + } + } +/* + if (pInfo->active_PPS.redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = sliceheader_p->slice_header.redundant_pic_cnt; + if (SliceHeader->redundant_pic_cnt > 127) { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } else { + SliceHeader->redundant_pic_cnt = 0; + } +*/ + //// + //// Parse Ref_pic marking if there + //// + if (SliceHeader->nal_ref_idc != 0) + { + if (h264secure_Parse_Dec_Ref_Pic_Marking(pInfo, newdata, SliceHeader) != H264_STATUS_OK) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } + retStatus = H264_STATUS_OK; + return retStatus; +} +uint32_t viddec_h264secure_update(void *parent, void *data, uint32_t size) +{ + viddec_pm_cxt_t * parser_cxt = (viddec_pm_cxt_t *)parent; + struct h264_viddec_parser* parser = (struct h264_viddec_parser*) &parser_cxt->codec_data[0]; + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) data; + + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + pInfo->nal_unit_type = sliceheader_p->slice_header.nal_unit_type & 0x1F; + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = (sliceheader_p->slice_header.nal_unit_type & 0x60) >> 5; + + if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + /// Pass slice header + status = h264secure_Update_Slice_Header(pInfo, sliceheader_p, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + pInfo->img.current_slice_num++; + + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + } + else ///////////////////////////////////////////////////// If Not a picture start + { + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + return status; +} diff --git a/mix_vbp/viddec_fw/fw/parser/Android.mk b/mix_vbp/viddec_fw/fw/parser/Android.mk index 659b473..646e910 100644 --- a/mix_vbp/viddec_fw/fw/parser/Android.mk +++ b/mix_vbp/viddec_fw/fw/parser/Android.mk @@ -51,4 +51,11 @@ LOCAL_C_INCLUDES += $(VENDORS_INTEL_MRST_MIXVBP_ROOT)/viddec_fw/fw/codecs/vp8/in LOCAL_CFLAGS += -DUSE_HW_VP8 endif +PLATFORM_SUPPORT_AVC_SHORT_FORMAT := \ + baytrail + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) +LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT +LOCAL_SRC_FILES += vbp_h264secure_parser.c +endif include $(BUILD_SHARED_LIBRARY) diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h index 561b179..66812f5 100644 --- a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h +++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h @@ -26,6 +26,9 @@ typedef uint32_t (*fn_is_frame_start)(void *ctxt); typedef uint32_t (*fn_gen_contrib_tags)(void *parent, uint32_t ignore_partial); typedef uint32_t (*fn_gen_assoc_tags)(void *parent); typedef void (*fn_flush_parser) (void *parent, void *ctxt); +#ifdef USE_AVC_SHORT_FORMAT +typedef uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size); +#endif typedef struct @@ -39,6 +42,9 @@ typedef struct fn_gen_contrib_tags gen_contrib_tags; fn_gen_assoc_tags gen_assoc_tags; fn_flush_parser flush; +#ifdef USE_AVC_SHORT_FORMAT + fn_update_data update_data; +#endif } viddec_parser_ops_t; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c new file mode 100644 index 0000000..498cbc4 --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c @@ -0,0 +1,1830 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#include +#include "h264.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_h264secure_parser.h" + +#define TERMINATE_KEY 0xFFFFFFFF + +typedef struct vbp_h264secure_parser_private vbp_h264secure_parser_private; + +typedef enum +{ + H264_BS_LENGTH_PREFIXED, + H264_BS_SC_PREFIXED, + H264_BS_SINGLE_NAL +} H264_BS_PATTERN; + +struct vbp_h264secure_parser_private +{ + /* number of bytes used to encode length of NAL payload. If parser does not receive configuration data + and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB + byte stream format. */ + int NAL_length_size; + + /* indicate if stream is length prefixed */ + int length_prefix_verified; + + H264_BS_PATTERN bitstream_pattern; + + uint8_t* start; + int32_t offset; + int32_t size; +}; + +/* default scaling list table */ +static unsigned char Default_4x4_Intra[16] = +{ + 6,13,20,28, + 13,20,28,32, + 20,28,32,37, + 28,32,37,42 +}; + +static unsigned char Default_4x4_Inter[16] = +{ + 10,14,20,24, + 14,20,24,27, + 20,24,27,30, + 24,27,30,34 +}; + +static unsigned char Default_8x8_Intra[64] = +{ + 6,10,13,16,18,23,25,27, + 10,11,16,18,23,25,27,29, + 13,16,18,23,25,27,29,31, + 16,18,23,25,27,29,31,33, + 18,23,25,27,29,31,33,36, + 23,25,27,29,31,33,36,38, + 25,27,29,31,33,36,38,40, + 27,29,31,33,36,38,40,42 +}; + +static unsigned char Default_8x8_Inter[64] = +{ + 9,13,15,17,19,21,22,24, + 13,13,17,19,21,22,24,25, + 15,17,19,21,22,24,25,27, + 17,19,21,22,24,25,27,28, + 19,21,22,24,25,27,28,30, + 21,22,24,25,27,28,30,32, + 22,24,25,27,28,30,32,33, + 24,25,27,28,30,32,33,35 +}; + +static unsigned char quant_flat[16] = +{ + 16,16,16,16, + 16,16,16,16, + 16,16,16,16, + 16,16,16,16 +}; + +static unsigned char quant8_flat[64] = +{ + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16 +}; + +static unsigned char* UseDefaultList[8] = +{ + Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra, + Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter, + Default_8x8_Intra, + Default_8x8_Inter +}; + +static uint8 h264_aspect_ratio_table[][2] = +{ + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + {24, 11}, + {20, 11}, + {32, 11}, + {80, 33}, + {18, 11}, + {15, 11}, + {64, 33}, + {160, 99}, + {4, 3}, + {3, 2}, + {2, 1}, + // reserved + {0, 0} +}; + + + +/** + * + */ +uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264secure_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264secure_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264secure_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->update_data = dlsym(pcontext->fd_parser, "viddec_h264secure_update"); + if (NULL == pcontext->parser_ops->update_data) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + /* entry point not needed */ + pcontext->parser_ops->is_wkld_done = NULL; + pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_frame_start = NULL; + return VBP_OK; +} + + +/** + * + */ +uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + pcontext->query_data = NULL; + vbp_data_h264 *query_data = NULL; + + query_data = vbp_malloc_set0(vbp_data_h264, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + + query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + pcontext->parser_private = NULL; + vbp_h264secure_parser_private *parser_private = NULL; + + parser_private = vbp_malloc_set0(vbp_h264secure_parser_private, 1); + if (NULL == parser_private) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->parser_private = (void *)parser_private; + + /* init the pointer */ + parser_private->start = 0; + parser_private->offset = 0; + parser_private->size = 0; + parser_private->NAL_length_size = 0; + parser_private->length_prefix_verified = 0; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + + return VBP_OK; + +cleanup: + vbp_free_query_data_h264secure(pcontext); + + return VBP_MEM; +} + +uint32 vbp_free_query_data_h264secure(vbp_context *pcontext) +{ + if (NULL != pcontext->parser_private) + { + free(pcontext->parser_private); + pcontext->parser_private = NULL; + } + + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + int i; + vbp_data_h264 *query_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + if (query_data->pic_data) + { + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + free(query_data->pic_data[i].slc_data); + free(query_data->pic_data[i].pic_parms); + } + free(query_data->pic_data); + } + + free(query_data->IQ_matrix_buf); + free(query_data->codec_data); + free(query_data); + + pcontext->query_data = NULL; + + return VBP_OK; +} + + +static inline uint16_t vbp_utils_ntohs(uint8_t* p) +{ + uint16_t i = ((*p) << 8) + ((*(p+1))); + return i; +} + +static inline uint32_t vbp_utils_ntohl(uint8_t* p) +{ + uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3))); + return i; +} + + +static inline void vbp_set_VAPicture_h264secure( + int curr_picture_structure, + int bottom_field, + frame_store* store, + VAPictureH264* pic) +{ + if (FRAME == curr_picture_structure) + { + if (FRAME != viddec_h264_get_dec_structure(store)) + { + WTRACE("Reference picture structure is not frame for current frame picture!"); + } + pic->flags = 0; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + if (FRAME == viddec_h264_get_dec_structure(store)) + { + WTRACE("reference picture structure is frame for current field picture!"); + } + if (bottom_field) + { + pic->flags = VA_PICTURE_H264_BOTTOM_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic->flags = VA_PICTURE_H264_TOP_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + } +} + +static inline void vbp_set_slice_ref_list_h264secure( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + int i, j; + int num_ref_idx_active = 0; + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + uint8_t* p_list = NULL; + VAPictureH264* refPicListX = NULL; + frame_store* fs = NULL; + + /* initialize ref picutre list, set picture id and flags to invalid. */ + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + for (j = 0; j < 32; j++) + { + refPicListX->picture_id = VA_INVALID_SURFACE; + refPicListX->frame_idx = 0; + refPicListX->flags = VA_PICTURE_H264_INVALID; + refPicListX->TopFieldOrderCnt = 0; + refPicListX->BottomFieldOrderCnt = 0; + refPicListX++; + } + } + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + + if ((i == 0) && + ((h264_PtypeB == slice_header->slice_type) || + (h264_PtypeP == slice_header->slice_type))) + { + num_ref_idx_active = slice_header->num_ref_idx_l0_active; + if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list0; + } + else + { + p_list = h264_parser->info.dpb.listX_0; + } + } + else if ((i == 1) && (h264_PtypeB == slice_header->slice_type)) + { + num_ref_idx_active = slice_header->num_ref_idx_l1_active; + if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list1; + } + else + { + p_list = h264_parser->info.dpb.listX_1; + } + } + else + { + num_ref_idx_active = 0; + p_list = NULL; + } + + + for (j = 0; j < num_ref_idx_active; j++) + { + fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]); + + /* bit 5 indicates if reference picture is bottom field */ + vbp_set_VAPicture_h264secure( + h264_parser->info.img.structure, + (p_list[j] & 0x20) >> 5, + fs, + refPicListX); + + refPicListX->frame_idx = fs->frame_num; + refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE; + refPicListX++; + } + } +} + +static inline void vbp_set_pre_weight_table_h264secure( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + int i, j; + + if ((((h264_PtypeP == slice_header->slice_type) || + (h264_PtypeB == slice_header->slice_type)) && + h264_parser->info.active_PPS.weighted_pred_flag) || + ((h264_PtypeB == slice_header->slice_type) && + (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) + { + slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; + slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; + slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag; + slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag; + slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag; + slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag; + + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i]; + slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i]; + slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i]; + slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i]; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j]; + slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j]; + slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j]; + slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j]; + } + } + } + else + { + /* default weight table */ + slc_parms->luma_log2_weight_denom = 5; + slc_parms->chroma_log2_weight_denom = 5; + slc_parms->luma_weight_l0_flag = 0; + slc_parms->luma_weight_l1_flag = 0; + slc_parms->chroma_weight_l0_flag = 0; + slc_parms->chroma_weight_l1_flag = 0; + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = 0; + slc_parms->luma_offset_l0[i] = 0; + slc_parms->luma_weight_l1[i] = 0; + slc_parms->luma_offset_l1[i] = 0; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = 0; + slc_parms->chroma_offset_l0[i][j] = 0; + slc_parms->chroma_weight_l1[i][j] = 0; + slc_parms->chroma_offset_l1[i][j] = 0; + } + } + } +} + + +static inline void vbp_set_reference_frames_h264secure( + struct h264_viddec_parser *parser, + VAPictureParameterBufferH264* pic_parms) +{ + int buffer_idx; + int frame_idx; + frame_store* store = NULL; + h264_DecodedPictureBuffer* dpb = &(parser->info.dpb); + /* initialize reference frames */ + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + pic_parms->num_ref_frames = 0; + + frame_idx = 0; + + /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */ + /* set short term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + + store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]]; + /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0) */ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + /* set long term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]]; + if (!viddec_h264_get_is_long_term(store)) + { + WTRACE("long term frame is not marked as long term."); + } + /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->long_term_frame_idx; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0)*/ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + pic_parms->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (frame_idx > parser->info.active_SPS.num_ref_frames) + { + WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).", + frame_idx, parser->info.active_SPS.num_ref_frames); + } +} + + +static inline void vbp_set_scaling_list_h264secure( + struct h264_viddec_parser *parser, + VAIQMatrixBufferH264* IQ_matrix_buf) +{ + int i; + int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0); + + if (parser->info.active_PPS.pic_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use PPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64); + } + } + } + else /* pic_scaling_list not present */ + { + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + /* SPS matrix present - use fallback rule B */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i], + 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i], + 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + //g_warning("invalid scaling list index."); + break; + } + } + else /* seq_scaling_matrix not present */ + { + /* SPS matrix not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } /* end of seq_scaling_matrix not present */ + } /* end of pic_scaling_list not present */ + } /* for loop for each index from 0 to 7 */ + } /* end of pic_scaling_matrix present */ + else + { + /* PPS matrix not present, use SPS information */ + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use SPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64); + } + } + } + else + { + /* SPS list not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } + } + } + else + { + /* SPS matrix not present - use flat lists */ + for (i = 0; i < 6; i++) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16); + } + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } + } + + if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) && + (parser->info.active_PPS.pic_scaling_matrix_present_flag || + parser->info.active_SPS.seq_scaling_matrix_present_flag)) + { + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } +} + +static void vbp_set_codec_data_h264secure( + struct h264_viddec_parser *parser, + vbp_data_h264 *query_data) +{ + vbp_codec_data_h264* codec_data = query_data->codec_data; + + /* The following variables are used to detect if there is new SPS or PPS */ + uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id; + uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id; + int frame_width = codec_data->frame_width; + int frame_height = codec_data->frame_height; + + /* parameter id */ + codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; + codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; + + /* profile and level */ + codec_data->profile_idc = parser->info.active_SPS.profile_idc; + codec_data->level_idc = parser->info.active_SPS.level_idc; + + + /*constraint flag sets (h.264 Spec v2009)*/ + codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4; + codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3; + codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; + codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1; + codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1; + + /* reference frames */ + codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && + !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) + { + /* no longer necessary: two fields share the same interlaced surface */ + /* codec_data->num_ref_frames *= 2; */ + } + + codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + + /* frame coding */ + codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + + /* frame dimension */ + codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16; + + codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; + + /* cropping information */ + codec_data->crop_left = 0; + codec_data->crop_right = 0; + codec_data->crop_top = 0; + codec_data->crop_bottom = 0; + if(parser->info.active_SPS.sps_disp.frame_cropping_flag) { + int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; + int ChromaArrayType = 0; + if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) { + if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) { + SubWidthC = 2; + SubHeightC = 2; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) { + SubWidthC = 2; + SubHeightC = 1; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) { + SubWidthC = 1; + SubHeightC = 1; + } + ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc; + } + + if(ChromaArrayType == 0) { + CropUnitX = 1; + CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + } else { + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag); + } + + codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; + codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1; + codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; + codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1; + } + + /* aspect ratio */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + codec_data->aspect_ratio_idc = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; + + if (codec_data->aspect_ratio_idc < 17) + { + codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0]; + codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1]; + } + else if (codec_data->aspect_ratio_idc == 255) + { + codec_data->sar_width = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; + + codec_data->sar_height = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + } + else + { + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + } + else + { + // unspecified + codec_data->aspect_ratio_idc = 0; + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + + /* video format */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + codec_data->video_format = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + } + else + { + // Unspecified video format + codec_data->video_format = 5; + } + + codec_data->video_full_range_flag = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; + + + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + codec_data->matrix_coefficients = + parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; + } + else + { + // Unspecified + codec_data->matrix_coefficients = 2; + } + + codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value; + + /* picture order type and count */ + codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + + + /* udpate sps and pps status */ + query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; + query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; + if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) + { + query_data->new_sps = 1; + query_data->new_pps = 1; + } +} + + +static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext, int list_index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + struct h264_viddec_parser* parser = NULL; + vbp_picture_data_h264* pic_data = NULL; + VAPictureParameterBufferH264* pic_parms = NULL; + + parser = (struct h264_viddec_parser *)cxt->codec_data; + + if (0 == parser->info.SliceHeader.first_mb_in_slice) + { + /* a new picture is parsed */ + query_data->num_pictures++; + } + + if (query_data->num_pictures == 0) + { + /* partial frame */ + query_data->num_pictures = 1; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + WTRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + pic_parms = pic_data->pic_parms; + + // relax this condition to support partial frame parsing + + //if (parser->info.SliceHeader.first_mb_in_slice == 0) + { + /** + * picture parameter only needs to be set once, + * even multiple slices may be encoded + */ + + /* VAPictureParameterBufferH264 */ + pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; + pic_parms->CurrPic.frame_idx = 0; + if (parser->info.img.field_pic_flag == 1) + { + if (parser->info.img.bottom_field_flag) + { + pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; + } + else + { + /* also OK set to 0 (from test suite) */ + pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; + } + } + else + { + pic_parms->CurrPic.flags = 0; /* frame picture */ + } + pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; + pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; + pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; + /* don't care if current frame is used as long term reference */ + if (parser->info.SliceHeader.nal_ref_idc != 0) + { + pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + + /* frame height in MBS */ + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; + pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + + + pic_parms->seq_fields.value = 0; + pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; + + /* new fields in libva 0.31 */ + pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; + pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag; + + + /* referened from UMG_Moorstown_TestSuites */ + pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; + + pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + pic_parms->slice_group_change_rate_minus1 = 0; + pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; + pic_parms->pic_init_qs_minus26 = 0; + pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; + pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; + + /* new LibVA fields in v0.31*/ + pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; + pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0; + + /* all slices in the pciture have the same field_pic_flag */ + pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; + pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; + + pic_parms->frame_num = parser->info.SliceHeader.frame_num; + + pic_parms->num_ref_idx_l0_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l0_active-1; + pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active-1; + } + + + /* set reference frames, and num_ref_frames */ + vbp_set_reference_frames_h264secure(parser, pic_parms); + if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + int frame_idx; + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + } + + return VBP_OK; +} + +static uint32_t vbp_add_slice_data_h264secure(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_data->buffer_addr = cxt->parse_cubby.buf; + slc_parms = &(slc_data->slc_parms); + + /* byte: how many bytes have been parsed */ + /* bit: bits parsed within the current parsing position */ + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + slc_data->nal_unit_type = h264_parser->info.nal_unit_type; + + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos; + + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = cxt->list.data[index].stpos; + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + pic_data->num_slices++; + + //vbp_update_reference_frames_h264_methodB(pic_data); + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + return VBP_OK; +} + + +static uint32_t vbp_update_slice_data_h264secure(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private; + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_parms = &(slc_data->slc_parms); + + slc_parms->slice_data_size = parser_private->size; + slc_parms->slice_data_offset = parser_private->offset; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = 0; + slc_data->buffer_addr = parser_private->start; + slc_data->slice_size = parser_private->size + parser_private->offset; + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + pic_data->num_slices++; + + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + return VBP_OK; +} + + + +/** +* parse decoder configuration data +*/ +uint32 vbp_parse_init_data_h264secure(vbp_context* pcontext) +{ + /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */ + + uint8 configuration_version = 0; + uint8 AVC_profile_indication = 0; + uint8 profile_compatibility = 0; + uint8 AVC_level_indication = 0; + uint8 length_size_minus_one = 0; + uint8 num_of_sequence_parameter_sets = 0; + uint8 num_of_picture_parameter_sets = 0; + uint16 sequence_parameter_set_length = 0; + uint16 picture_parameter_set_length = 0; + + int i = 0; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private; + //Enable emulation prevention + cxt->getbits.is_emul_reqd = 1; + + /* check if configuration data is start code prefix */ + viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + WTRACE("configuration data is start-code prefixed.\n"); + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + return vbp_parse_start_code_h264secure(pcontext); + } + + + uint8* cur_data = cxt->parse_cubby.buf; + + + if (cxt->parse_cubby.size < 6) + { + /* need at least 6 bytes to start parsing the structure, see spec 15 */ + return VBP_DATA; + } + + configuration_version = *cur_data++; + AVC_profile_indication = *cur_data++; + + /*ITRACE("Profile indication: %d", AVC_profile_indication); */ + + profile_compatibility = *cur_data++; + AVC_level_indication = *cur_data++; + + /* ITRACE("Level indication: %d", AVC_level_indication);*/ + /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */ + length_size_minus_one = (*cur_data) & 0x3; + + if (length_size_minus_one != 3) + { + WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); + } + + parser_private->NAL_length_size = length_size_minus_one + 1; + + cur_data++; + + /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */ + num_of_sequence_parameter_sets = (*cur_data) & 0x1f; + if (num_of_sequence_parameter_sets > 1) + { + WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets); + } + if (num_of_sequence_parameter_sets > MAX_NUM_SPS) + { + /* this would never happen as MAX_NUM_SPS = 32 */ + WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS); + } + cur_data++; + + cxt->list.num_items = 0; + for (i = 0; i < num_of_sequence_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse sequence_parameter_set_length */ + ETRACE("Not enough data to parse SPS length."); + return VBP_DATA; + } + + /* 16 bits */ + sequence_parameter_set_length = vbp_utils_ntohs(cur_data); + + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least sequence_parameter_set_length bytes for SPS */ + ETRACE("Not enough data to parse SPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length; + + cxt->list.num_items++; + + cur_data += sequence_parameter_set_length; + } + + if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) + { + /* need at least one more byte to parse num_of_picture_parameter_sets */ + ETRACE("Not enough data to parse number of PPS."); + return VBP_DATA; + } + + num_of_picture_parameter_sets = *cur_data++; + if (num_of_picture_parameter_sets > 1) + { + /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ + } + + for (i = 0; i < num_of_picture_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse picture_parameter_set_length */ + ETRACE("Not enough data to parse PPS length."); + return VBP_DATA; + } + + /* 16 bits */ + picture_parameter_set_length = vbp_utils_ntohs(cur_data); + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least picture_parameter_set_length bytes for PPS */ + ETRACE("Not enough data to parse PPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length; + + cxt->list.num_items++; + + cur_data += picture_parameter_set_length; + } + + if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size) + { + WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", + cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); + } + + parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED; + return VBP_OK; +} + +static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size) +{ + switch (*NAL_length_size) + { + case 4: + return vbp_utils_ntohl(p); + + case 3: + { + uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2))); + return i; + } + + case 2: + return vbp_utils_ntohs(p); + + case 1: + return *p; + + default: + WTRACE("invalid NAL_length_size: %d.", NAL_length_size); + /* default to 4 bytes for length */ + *NAL_length_size = 4; + return vbp_utils_ntohl(p); + } +} + +/** +** H.264 elementary stream does not have start code. +* instead, it is comprised of size of NAL unit and payload +* of NAL unit. See spec 15 (Sample format) +*/ + +/* Start code prefix is 001 which is 3 bytes. */ +#define H264_SC_SIZE 3 +uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private; + + /* reset query data for the new sample buffer */ + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + int i; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + cxt->list.num_items = 0; + + /* reset start position of first item to 0 in case there is only one item */ + cxt->list.data[0].stpos = 0; + + /* start code emulation prevention byte is present in NAL */ + cxt->getbits.is_emul_reqd = 1; + + if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t* cubby = NULL; + int32_t size_left = 0; + int32_t size_parsed = 0; + int32_t NAL_length = 0; + + cubby = &(cxt->parse_cubby); + + size_left = cubby->size; + + while (size_left >= parser_private->NAL_length_size) + { + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size); + if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size) + { + ETRACE("Invalid NAL_length parsed."); + break; + } + + size_parsed += parser_private->NAL_length_size; + cxt->list.data[cxt->list.num_items].stpos = size_parsed; + size_parsed += NAL_length; /* skip NAL bytes */ + /* end position is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); + break; + } + + size_left = cubby->size - size_parsed; + } + + if (size_left != 0 && parser_private->length_prefix_verified == 0) + { + WTRACE("Elementary stream is not aligned (%d).", size_left); + + /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will + * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed + */ + parser_private->length_prefix_verified = 1; + viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby; + + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&temp_cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + + /* found start code */ + if (ret == 1) + { + WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed."); + parser_private->NAL_length_size = 0; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + /* reset parsing data */ + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + cxt->list.num_items = 0; + } + } + } + + + if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t cubby; + /* memory copy without updating cxt->parse_cubby */ + cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = 0; + + while (1) + { + ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + if (cxt->list.num_items == 0) + { + cxt->list.data[0].stpos = cubby.sc_end_pos; + } + else + { + cxt->list.data[cxt->list.num_items].stpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE; + } + + cubby.phase = 0; + cubby.buf = cxt->parse_cubby.buf + + cxt->list.data[cxt->list.num_items].stpos; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos; + + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + if (cxt->list.num_items == 0) + { + cxt->list.num_items = 1; + parser_private->bitstream_pattern = H264_BS_SINGLE_NAL; + WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL."); + } + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + + } + + if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL) + { + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + return VBP_OK; +} + +/** +* +* process parsing result after a NAL unit is parsed +* +*/ +uint32 vbp_process_parsing_result_h264secure( vbp_context *pcontext, int i) +{ + if (i >= MAX_NUM_SLICES) + { + return VBP_PARM; + } + + uint32 error = VBP_OK; + + struct h264_viddec_parser* parser = NULL; + parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); + vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data; + switch (parser->info.nal_unit_type) + { + case h264_NAL_UNIT_TYPE_SLICE: + VTRACE("slice header is parsed."); + error = vbp_add_pic_data_h264secure(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264secure(pcontext, i); + } + break; + + case h264_NAL_UNIT_TYPE_IDR: + VTRACE("IDR header is parsed."); + error = vbp_add_pic_data_h264secure(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264secure(pcontext, i); + } + break; + case h264_NAL_UNIT_TYPE_SEI: + //ITRACE("SEI header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_SPS: + VTRACE("SPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_PPS: + VTRACE("PPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + VTRACE("ACC unit delimiter is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOSeq: + ITRACE("EOSeq is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOstream: + ITRACE("EOStream is parsed"); + break; + + default: + WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); + break; + } + + if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1) + { + WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures); + return (error == VBP_OK ? VBP_MULTI : error); + } + return error; +} + +/* +* +* fill query data structure after sample buffer is parsed +* +*/ +uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext) +{ + vbp_data_h264 *query_data = NULL; + struct h264_viddec_parser *parser = NULL; + struct vbp_h264_parser_private_t* private = NULL; + + parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + private = (struct vbp_h264_parser_private_t *)pcontext->parser_private; + + vbp_set_codec_data_h264secure(parser, query_data); + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* VQIAMatrixBufferH264 */ + vbp_set_scaling_list_h264secure(parser, query_data->IQ_matrix_buf); + + if (query_data->num_pictures > 0) + { + /* + * picture parameter buffer and slice parameter buffer have been populated + */ + } + else + { + /** + * add a dummy picture that contains picture parameters parsed + from SPS and PPS. + */ + vbp_add_pic_data_h264secure(pcontext, 0); + } + + return VBP_OK; +} + +uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size) +{ + uint32 error = VBP_OK; + uint32 offset = 0; + uint32 key = 0; + uint32 i,j; + + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private; + + int32_t sliceheadersize; + uint32_t slice_num = 0; + while (offset < size) { + memcpy(&key, (uint8_t *)newdata+offset, sizeof(uint32_t)); + if (key == TERMINATE_KEY) { + break; + } + slice_num++; + offset += sizeof(uint32_t); + + memcpy(&parser_private->start, (uint8_t *)newdata+offset, 4); + offset += 4; + + memcpy(&parser_private->offset, (uint8_t *)newdata+offset, sizeof(int32_t)); + offset += 4; + + memcpy(&parser_private->size, (uint8_t *)newdata+offset, sizeof(int32_t)); + offset += 4; + + sliceheadersize = sizeof(slice_header_t) + sizeof(dec_ref_pic_marking_t); + error = pcontext->parser_ops->update_data(pcontext->parser_cxt, + newdata+offset, sliceheadersize); + offset += sliceheadersize; + if (error != VBP_OK) + { + ETRACE("update_data error = 0x%x",error); + return error; + } + + error = vbp_add_pic_data_h264secure(pcontext, slice_num); + if (error != VBP_OK) + { + ETRACE("vbp_add_pic_data_h264secure error = 0x%x",error); + return error; + } + + error = vbp_update_slice_data_h264secure(pcontext, slice_num); + if (error != VBP_OK) + { + ETRACE("vbp_add_slice_data_h264secure error = 0x%x",error); + return error; + } + } + if (key != TERMINATE_KEY) + { + ETRACE("Don't find a terminated key 0xFFFFFF!"); + return VBP_DATA; + } else { + if (slice_num < 1) { + ETRACE("Don't find a valid slice header!"); + return VBP_DATA; + } + } + error = vbp_populate_query_data_h264secure(pcontext); + + if (error != VBP_OK) + { + ETRACE("vbp_populate_query_data_h264secure error = 0x%x",error); + return error; + } + return error; +} diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.h new file mode 100644 index 0000000..a55c07c --- /dev/null +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.h @@ -0,0 +1,70 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_H264SECURE_PARSER_H +#define VBP_H264SECURE_PARSER_H + +/* + * setup parser's entry points + */ +uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext); + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_h264secure(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_h264secure(vbp_context *pcontext); + +/* + * parse start code. Only support lenght prefixed mode. Start + * code prefixed is not supported. + */ +uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_h264secure(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext); + +/* + * update the parsing result with extra data + */ +uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size); + +#endif /*VBP_H264_PARSER_H*/ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c index 7797a78..972ab2d 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c @@ -173,3 +173,33 @@ uint32 vbp_flush(Handle hcontext) return error; } + +#ifdef USE_AVC_SHORT_FORMAT +uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == newdata) || (0 == size) || (NULL == data)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_update(pcontext, newdata, size, data); + + if (VBP_OK != error) + { + ETRACE("Failed to query parsing result: %d.", error); + } + return error; +} +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h index 0655e07..ad4b106 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h @@ -404,7 +404,10 @@ enum _vbp_parser_type VBP_MPEG4, VBP_H264, #ifdef USE_HW_VP8 - VBP_VP8 + VBP_VP8, +#endif +#ifdef USE_AVC_SHORT_FORMAT + VBP_H264SECURE, #endif }; @@ -457,5 +460,17 @@ uint32 vbp_query(Handle hcontext, void **data); uint32 vbp_flush(Handle hcontent); +#ifdef USE_AVC_SHORT_FORMAT +/* + * update the the vbp context using the new data + * @param hcontext: handle to VBP context. + * @param data: pointer to the new data buffer. + * @param size: size of new data buffer. + * @param data: pointer to hold a data blob that contains parsing result. + * @returns VBP_OK on success, anything else on failure. + * +*/ +uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data); +#endif #endif /* VBP_LOADER_H */ diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c index 3983387..527fc6a 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c @@ -35,6 +35,9 @@ #ifdef USE_HW_VP8 #include "vbp_vp8_parser.h" #endif +#ifdef USE_AVC_SHORT_FORMAT +#include "vbp_h264secure_parser.h" +#endif /* buffer counter */ @@ -127,6 +130,13 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) #endif break; #endif + +#ifdef USE_AVC_SHORT_FORMAT + case VBP_H264SECURE: + parser_name = "libmixvbp_h264secure.so"; + break; +#endif + default: WTRACE("Unsupported parser type!"); return VBP_TYPE; @@ -166,8 +176,16 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) SET_FUNC_POINTER(VBP_H264, h264); #ifdef USE_HW_VP8 SET_FUNC_POINTER(VBP_VP8, vp8); +#endif +#ifdef USE_AVC_SHORT_FORMAT + SET_FUNC_POINTER(VBP_H264SECURE, h264secure); #endif } +#ifdef USE_AVC_SHORT_FORMAT + if (pcontext->parser_type == VBP_H264SECURE) { + pcontext->func_update_data = vbp_update_data_h264secure; + } +#endif /* set entry points for parser operations: init @@ -584,3 +602,28 @@ uint32 vbp_utils_flush(vbp_context *pcontext) return VBP_OK; } + +#ifdef USE_AVC_SHORT_FORMAT +/** + * + * provide query data back to the consumer + * + */ +uint32 vbp_utils_update(vbp_context *pcontext, void *newdata, uint32 size, void **data) +{ + /* entry point, not need to validate input parameters. */ + uint32 error = VBP_OK; + + error = pcontext->func_update_data(pcontext,newdata,size); + + if (VBP_OK == error) + { + *data = pcontext->query_data; + } + else + { + *data = NULL; + } + return error; +} +#endif diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h index 073c2c2..7761c26 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h @@ -64,8 +64,9 @@ typedef uint32 (*function_parse_init_data)(vbp_context* cxt); typedef uint32 (*function_parse_start_code)(vbp_context* cxt); typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i); typedef uint32 (*function_populate_query_data)(vbp_context* cxt); - - +#ifdef USE_AVC_SHORT_FORMAT +typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size); +#endif struct vbp_context_t { @@ -103,7 +104,9 @@ struct vbp_context_t function_parse_start_code func_parse_start_code; function_process_parsing_result func_process_parsing_result; function_populate_query_data func_populate_query_data; - +#ifdef USE_AVC_SHORT_FORMAT + function_update_data func_update_data; +#endif }; diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index d032d70..7f3add5 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -8,7 +8,7 @@ LOCAL_SRC_FILES := \ VideoDecoderMPEG4.cpp \ VideoDecoderAVC.cpp \ VideoDecoderPAVC.cpp \ - VideoDecoderTrace.cpp \ + VideoDecoderTrace.cpp # LOCAL_CFLAGS := @@ -18,11 +18,18 @@ LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/libmixvbp ifeq ($(USE_INTEL_SECURE_AVC),true) +LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC LOCAL_SRC_FILES += securevideo/$(TARGET_BOARD_PLATFORM)/VideoDecoderAVCSecure.cpp LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/$(TARGET_BOARD_PLATFORM) LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC endif +PLATFORM_SUPPORT_AVC_SHORT_FORMAT := \ + baytrail +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) + LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT +endif + ifeq ($(TARGET_BOARD_PLATFORM),baytrail) LOCAL_CFLAGS += -DLOAD_PVR_DRIVER endif diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 530d8c9..e3d67dd 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -413,6 +413,9 @@ Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) if (ref->flags & VA_PICTURE_H264_INVALID) { continue; } +#ifdef USE_AVC_SHORT_FORMAT + ref->picture_id = findSurface(ref); +#endif dpb->poc = getPOC(ref); // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC dpb->surfaceBuffer = findRefSurfaceBuffer(ref); @@ -696,15 +699,23 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { data->codec_data->crop_right, data->codec_data->crop_bottom); + int diff = data->codec_data->num_ref_frames + 1 - mOutputWindowSize; + +#ifndef USE_AVC_SHORT_FORMAT // The number of actual buffer needed is // outputQueue + nativewindow_owned + (diff > 0 ? diff : 1) + widi_need_max + 1(available buffer) // while outputQueue = DPB < 8? DPB :8 // and diff = Reference + 1 - ouputQueue - int diff = data->codec_data->num_ref_frames + 1 - mOutputWindowSize; mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + 4 /* Owned by native window */ + (diff > 0 ? diff : 1) + 6 /* WiDi maximum needs */ + 1; +#else + // This is for protected video playback on Baytrail + mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + 2 /* Owned by native window */ + + (diff > 0 ? diff : 1) + + 1; +#endif ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded); mVideoFormatInfo.valid = true; diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 8f95ac2..c12ea5d 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -765,6 +765,10 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { CHECK_VA_STATUS("vaInitialize"); if ((int32_t)profile != VAProfileSoftwareDecoding) { +#ifdef USE_AVC_SHORT_FORMAT + status = getCodecSpecificConfigs(profile, &mVAConfig); + CHECK_STATUS("getCodecSpecificAttributes"); +#else //We are requesting RT attributes attrib.type = VAConfigAttribRTFormat; attrib.value = VA_RT_FORMAT_YUV420; @@ -777,6 +781,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { 1, &mVAConfig); CHECK_VA_STATUS("vaCreateConfig"); +#endif } mNumSurfaces = numSurface; @@ -787,8 +792,10 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { int32_t format = VA_RT_FORMAT_YUV420; if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { +#ifndef USE_AVC_SHORT_FORMAT format |= VA_RT_FORMAT_PROTECTED; WTRACE("Surface is protected."); +#endif } if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { mVASurfaceAttrib = new VASurfaceAttributeTPI; @@ -807,7 +814,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { mVASurfaceAttrib->height = mVideoFormatInfo.height; mVASurfaceAttrib->type = VAExternalMemoryAndroidGrallocBuffer; mVASurfaceAttrib->reserved[0] = (unsigned int)mConfigBuffer.nativeWindow; - + for (int i = 0; i < mNumSurfaces; i++) { mVASurfaceAttrib->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; } @@ -1029,6 +1036,7 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { imageFormat.fourcc = VA_FOURCC_NV12; imageFormat.byte_order = VA_LSB_FIRST; imageFormat.bits_per_pixel = 16; + vaStatus = vaCreateImage( mVADisplay, &imageFormat, @@ -1220,3 +1228,60 @@ void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) { } +// This function should be called before start() to load different type of parsers +#ifdef USE_AVC_SHORT_FORMAT +Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) { + if ((int32_t)type != VBP_INVALID) { + ITRACE("Parser Type = %d", (int32_t)type); + mParserType = type; + return DECODE_SUCCESS; + } + else { + ETRACE("Invalid parser type = %d", (int32_t)type); + return DECODE_NO_PARSER; + } +} + +Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void** vbpData) { + if (mParserHandle == NULL) { + return DECODE_NO_PARSER; + } + + uint32_t vbpStatus; + if (buffer == NULL || size <= 0) { + return DECODE_INVALID_DATA; + } + + vbpStatus = vbp_update(mParserHandle, buffer, size, vbpData); + CHECK_VBP_STATUS("vbp_update"); + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderBase::getCodecSpecificConfigs( + VAProfile profile, VAConfigID *config) +{ + VAStatus vaStatus; + VAConfigAttrib attrib; + attrib.type = VAConfigAttribRTFormat; + attrib.value = VA_RT_FORMAT_YUV420; + + if (config == NULL) { + ETRACE("Invalid parameter!"); + return DECODE_FAIL; + } + + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib, + 1, + config); + + CHECK_VA_STATUS("vaCreateConfig"); + + return DECODE_SUCCESS; +} +#endif + diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index f0c60cf..cb88622 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -87,6 +87,11 @@ protected: return ((a + 15) & (~15)); } +#ifdef USE_AVC_SHORT_FORMAT + Decode_Status updateBuffer(uint8_t *buffer, int32_t size, void** vbpData); + Decode_Status setParserType(_vbp_parser_type type); + virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID *config); +#endif private: Decode_Status mapSurface(void); Decode_Status getRawDataFromSurface(void); diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index 0181343..93e86c1 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -57,9 +57,6 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { strcasecmp(mimeType, "video/3gpp") == 0) { VideoDecoderMPEG4 *p = new VideoDecoderMPEG4(mimeType); return (IVideoDecoder *)p; - } else if (strcasecmp(mimeType, "video/pavc") == 0) { - VideoDecoderAVC *p = new VideoDecoderPAVC(mimeType); - return (IVideoDecoder *)p; } #ifdef USE_INTEL_SECURE_AVC else if (strcasecmp(mimeType, "video/avc-secure") == 0) { @@ -67,6 +64,7 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { return (IVideoDecoder *)p; } #endif + #ifdef USE_HW_VP8 else if (strcasecmp(mimeType, "video/vp8") == 0 || strcasecmp(mimeType, "video/x-vnd.on2.vp8") == 0) { diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp index 3bcfd70..4ded53f 100644 --- a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp @@ -22,65 +22,21 @@ * */ +#include "va_private.h" #include "VideoDecoderAVCSecure.h" #include "VideoDecoderTrace.h" #include - -#define STARTCODE_00 0x00 -#define STARTCODE_01 0x01 #define STARTCODE_PREFIX_LEN 3 #define NALU_TYPE_MASK 0x1F - - -// mask for little endian, to mast the second and fourth bytes in the byte stream -#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 -#define STARTCODE_MASK1 0x0000FF00 //0x000000FF - - -typedef enum { - NAL_UNIT_TYPE_unspecified0 = 0, - NAL_UNIT_TYPE_SLICE, - NAL_UNIT_TYPE_DPA, - NAL_UNIT_TYPE_DPB, - NAL_UNIT_TYPE_DPC, - NAL_UNIT_TYPE_IDR, - NAL_UNIT_TYPE_SEI, - NAL_UNIT_TYPE_SPS, - NAL_UNIT_TYPE_PPS, - NAL_UNIT_TYPE_Acc_unit_delimiter, - NAL_UNIT_TYPE_EOSeq, - NAL_UNIT_TYPE_EOstream, - NAL_UNIT_TYPE_filler_data, - NAL_UNIT_TYPE_SPS_extension, - NAL_UNIT_TYPE_Reserved14, - NAL_UNIT_TYPE_Reserved15, - NAL_UNIT_TYPE_Reserved16, - NAL_UNIT_TYPE_Reserved17, - NAL_UNIT_TYPE_Reserved18, - NAL_UNIT_TYPE_ACP, - NAL_UNIT_TYPE_Reserved20, - NAL_UNIT_TYPE_Reserved21, - NAL_UNIT_TYPE_Reserved22, - NAL_UNIT_TYPE_Reserved23, - NAL_UNIT_TYPE_unspecified24, -} NAL_UNIT_TYPE; - -#ifndef min -#define min(X, Y) ((X) <(Y) ? (X) : (Y)) -#endif - - +#define MAX_NALU_HEADER_BUFFER 8192 static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; - VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) : VideoDecoderAVC(mimeType), mNaluHeaderBuffer(NULL), - mInputBuffer(NULL) { - - memset(&mMetadata, 0, sizeof(NaluMetadata)); - memset(&mByteStream, 0, sizeof(NaluByteStream)); + mSliceHeaderBuffer(NULL) { + setParserType(VBP_H264SECURE); } VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { @@ -92,136 +48,161 @@ Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { return status; } - mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; - mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; - if (mMetadata.naluInfo == NULL || - mByteStream.byteStream == NULL || - mNaluHeaderBuffer == NULL) { - ETRACE("Failed to allocate memory."); - // TODO: release all allocated memory + if (mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory for mNaluHeaderBuffer"); + return DECODE_MEMORY_FAIL; + } + + mSliceHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + if (mSliceHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory for mSliceHeaderBuffer"); + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } return DECODE_MEMORY_FAIL; } + return status; } void VideoDecoderAVCSecure::stop(void) { VideoDecoderAVC::stop(); - if (mMetadata.naluInfo) { - delete [] mMetadata.naluInfo; - mMetadata.naluInfo = NULL; - } - - if (mByteStream.byteStream) { - delete [] mByteStream.byteStream; - mByteStream.byteStream = NULL; - } - if (mNaluHeaderBuffer) { delete [] mNaluHeaderBuffer; mNaluHeaderBuffer = NULL; } + + if (mSliceHeaderBuffer) { + delete [] mSliceHeaderBuffer; + mSliceHeaderBuffer = NULL; + } + } Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { Decode_Status status; int32_t sizeAccumulated = 0; + int32_t sliceHeaderSize = 0; int32_t sizeLeft = 0; - uint8_t *pByteStream = NULL; - NaluInfo *pNaluInfo = mMetadata.naluInfo; + int32_t sliceIdx = 0; + uint8_t naluType; + frame_info_t* pFrameInfo; if (buffer->flag & IS_SECURE_DATA) { - pByteStream = buffer->data; - sizeLeft = buffer->size; - mInputBuffer = NULL; + VTRACE("Decoding protected video ..."); + mIsEncryptData = 1; } else { - status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); - CHECK_STATUS("parseAnnexBStream"); - pByteStream = mByteStream.byteStream; - sizeLeft = mByteStream.streamPos; - mInputBuffer = buffer->data; - } - if (sizeLeft < 4) { - ETRACE("Not enough data to read number of NALU."); - return DECODE_INVALID_DATA; + VTRACE("Decoding clear video ..."); + mIsEncryptData = 0; + return VideoDecoderAVC::decode(buffer); } - // read number of NALU - memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); - pByteStream += 4; - sizeLeft -= 4; - - if (mMetadata.naluNumber == 0) { - WTRACE("Number of NALU is ZERO!"); - return DECODE_SUCCESS; + if (buffer->size != sizeof(frame_info_t)) { + ETRACE("Not enough data to read frame_info_t!"); + return DECODE_INVALID_DATA; } - - for (int32_t i = 0; i < mMetadata.naluNumber; i++) { - if (sizeLeft < 12) { - ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); - return DECODE_INVALID_DATA; - } - sizeLeft -= 12; - // read NALU offset - memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - // read NALU size - memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - // read NALU header length - memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; - - if (sizeLeft < pNaluInfo->naluHeaderLen) { - ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); - return DECODE_INVALID_DATA; - } - - sizeLeft -= pNaluInfo->naluHeaderLen; - - if (pNaluInfo->naluHeaderLen) { - // copy start code prefix to buffer + pFrameInfo = (frame_info_t*) buffer->data; + + memcpy(&mEncParam, pFrameInfo->pavp, sizeof(pavp_info_t)); + for (int32_t i = 0; i < pFrameInfo->num_nalus; i++) { + naluType = pFrameInfo->nalus[i].type & NALU_TYPE_MASK; + if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) { + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &sliceIdx, + sizeof(int32_t)); + sliceHeaderSize += 4; + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &pFrameInfo->data, + sizeof(uint8_t*)); + sliceHeaderSize += sizeof(uint8_t*); + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &pFrameInfo->nalus[i].offset, + sizeof(uint32_t)); + sliceHeaderSize += sizeof(uint32_t); + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &pFrameInfo->nalus[i].length, + sizeof(uint32_t)); + sliceHeaderSize += sizeof(uint32_t); + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + pFrameInfo->nalus[i].slice_header, + sizeof(slice_header_t)); + sliceHeaderSize += sizeof(slice_header_t); + if (pFrameInfo->nalus[i].type & 0x60) { + memcpy(mSliceHeaderBuffer+sliceHeaderSize, pFrameInfo->dec_ref_pic_marking, sizeof(dec_ref_pic_marking_t)); + } else { + memset(mSliceHeaderBuffer+sliceHeaderSize, 0, sizeof(dec_ref_pic_marking_t)); + } + sliceHeaderSize += sizeof(dec_ref_pic_marking_t); + sliceIdx++; + } else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) { memcpy(mNaluHeaderBuffer + sizeAccumulated, startcodePrefix, STARTCODE_PREFIX_LEN); sizeAccumulated += STARTCODE_PREFIX_LEN; - - // copy NALU header - memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); - pByteStream += pNaluInfo->naluHeaderLen; - - sizeAccumulated += pNaluInfo->naluHeaderLen; + memcpy(mNaluHeaderBuffer + sizeAccumulated, + pFrameInfo->nalus[i].data, + pFrameInfo->nalus[i].length); + sizeAccumulated += pFrameInfo->nalus[i].length; } else { - WTRACE("header len is zero for NALU %d", i); + WTRACE("Failure: DECODE_FRAME_DROPPED"); + return DECODE_FRAME_DROPPED; } + } + + vbp_data_h264 *data = NULL; - // for next NALU - pNaluInfo++; + if (sizeAccumulated > 0) { + status = VideoDecoderBase::parseBuffer( + mNaluHeaderBuffer, + sizeAccumulated, + false, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); } - buffer->data = mNaluHeaderBuffer; - buffer->size = sizeAccumulated; + if (sliceHeaderSize > 0) { + memset(mSliceHeaderBuffer + sliceHeaderSize, 0xFF, 4); + sliceHeaderSize += 4; + status = VideoDecoderBase::updateBuffer( + mSliceHeaderBuffer, + sliceHeaderSize, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::updateBuffer"); + } - return VideoDecoderAVC::decode(buffer); + if (!mVAStarted) { + if (data->has_sps && data->has_pps) { + status = startVA(data); + CHECK_STATUS("startVA"); + } else { + WTRACE("Can't start VA as either SPS or PPS is still not available."); + return DECODE_SUCCESS; + } + } + status = decodeFrame(buffer, data); + return status; } - Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { - Decode_Status status; VAStatus vaStatus; uint32_t bufferIDCount = 0; // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID bufferIDs[4]; + VABufferID bufferIDs[5]; vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); VAPictureParameterBufferH264 *picParam = picData->pic_parms; VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + VAEncryptionParameterBuffer encryptParam; if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { // either condition indicates start of a new frame @@ -238,16 +219,10 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; } - // Check there is no reference frame loss before decoding a frame - // Update the reference frames and surface IDs for DPB and current frame status = updateDPB(picParam); CHECK_STATUS("updateDPB"); - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - status = updateReferenceFrames(picData); - CHECK_STATUS("updateReferenceFrames"); - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); CHECK_VA_STATUS("vaBeginPicture"); @@ -275,70 +250,59 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); bufferIDCount++; - } - status = setReference(sliceParam); - CHECK_STATUS("setReference"); - - // find which naluinfo is correlated to current slice - int naluIndex = 0; - uint32_t accumulatedHeaderLen = 0; - uint32_t headerLen = 0; - for (; naluIndex < mMetadata.naluNumber; naluIndex++) { - headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; - if (headerLen == 0) { - WTRACE("lenght of current NAL unit is 0."); - continue; - } - accumulatedHeaderLen += STARTCODE_PREFIX_LEN; - if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { - break; + if (mIsEncryptData) { + memset(&encryptParam, 0, sizeof(VAEncryptionParameterBuffer)); + encryptParam.pavpCounterMode = 4; + encryptParam.pavpEncryptionType = 2; + encryptParam.hostEncryptMode = 2; + encryptParam.pavpHasBeenEnabled = 1; + encryptParam.app_id = 0; + memcpy(encryptParam.pavpAesCounter, mEncParam.iv, 16); + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + (VABufferType)VAEncryptionParameterBufferType, + sizeof(VAEncryptionParameterBuffer), + 1, + &encryptParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateEncryptionParameterBuffer"); + bufferIDCount++; } - accumulatedHeaderLen += headerLen; - } - if (sliceData->slice_offset != accumulatedHeaderLen) { - WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); } - - sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; - sliceData->slice_size = sliceParam->slice_data_size; - - // no need to update: - // sliceParam->slice_data_offset - 0 always - // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset - vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), + sizeof(VASliceParameterBufferH264Base), 1, sliceParam, &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); bufferIDCount++; - // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit - // offset points to first byte of NAL unit - uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; - if (mInputBuffer != NULL) { + if (mIsEncryptData) { vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VASliceDataBufferType, sliceData->slice_size, //size 1, //num_elements - mInputBuffer + sliceOffset, + sliceData->buffer_addr + sliceData->slice_offset, &bufferIDs[bufferIDCount]); } else { + // This is for clear video playback vaStatus = vaCreateBuffer( mVADisplay, mVAContext, - VAProtectedSliceDataBufferType, + VASliceDataBufferType, sliceData->slice_size, //size 1, //num_elements - (uint8_t*)sliceOffset, // IMR offset + sliceData->buffer_addr + sliceData->slice_offset, &bufferIDs[bufferIDCount]); } CHECK_VA_STATUS("vaCreateSliceDataBuffer"); @@ -354,162 +318,44 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p return DECODE_SUCCESS; } +Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs( + VAProfile profile, VAConfigID *config) +{ + VAStatus vaStatus; + VAConfigAttrib attrib[2]; -// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. -// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. -int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { - uint8_t *ptr; - uint32_t left = 0, data = 0, phase = 0; - uint8_t mask1 = 0, mask2 = 0; - - /* Meaning of phase: - 0: initial status, "0x000001" bytes are not found so far; - 1: one "0x00" byte is found; - 2: two or more consecutive "0x00" bytes" are found; - 3: "0x000001" patten is found ; - 4: if there is one more byte after "0x000001"; - */ - - left = length; - ptr = (uint8_t *) (stream + offset); - phase = 0; - - // parse until there is more data and start code not found - while ((left > 0) && (phase < 3)) { - // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. - if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { - while (left > 3) { - data = *((uint32_t *)ptr); - mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); - mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); - // If second byte and fourth byte are not zero's then we cannot have a start code here, - // as we need two consecutive zero bytes for a start code pattern. - if (mask1 && mask2) { - // skip 4 bytes and start over - ptr += 4; - left -=4; - continue; - } else { - break; - } - } - } - - // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time - if (left > 0) { - if (*ptr == STARTCODE_00) { - phase++; - if (phase > 2) { - // more than 2 consecutive '0x00' bytes is found - phase = 2; - } - } else if ((*ptr == STARTCODE_01) && (phase == 2)) { - // start code is found - phase = 3; - } else { - // reset lookup - phase = 0; - } - ptr++; - left--; - } + if (config == NULL) { + ETRACE("Invalid parameter!"); + return DECODE_FAIL; } - if ((left > 0) && (phase == 3)) { - phase = 4; - // return offset of position following the pattern in the buffer which matches "0x000001" byte string - return (int32_t)(ptr - stream); - } - return -1; -} + attrib[0].type = VAConfigAttribRTFormat; + attrib[0].value = VA_RT_FORMAT_YUV420; + attrib[1].type = VAConfigAttribDecSliceMode; + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1); -Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { - uint8_t naluType; - int32_t naluHeaderLen; - - naluType = *(uint8_t *)(stream + naluStream->naluOffset); - naluType &= NALU_TYPE_MASK; - // first update nalu header length based on nalu type - if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { - // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes - naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); - } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { - //sps, pps, sei, etc, return the entire NAL unit in clear - naluHeaderLen = naluStream->naluLen; + if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) + { + ITRACE("AVC short format used"); + attrib[1].value = VA_DEC_SLICE_MODE_BASE; + } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) { + ITRACE("AVC long format ssed"); + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; } else { - return DECODE_FRAME_DROPPED; + ETRACE("Unsupported Decode Slice Mode!"); + return DECODE_FAIL; } - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); - naluStream->streamPos += 4; - - if (naluHeaderLen) { - memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); - naluStream->streamPos += naluHeaderLen; - } - return DECODE_SUCCESS; -} - - -// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container -Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { - int32_t naluOffset, offset, left; - NaluInfo *info; - uint32_t ret = DECODE_SUCCESS; - - naluOffset = 0; - offset = 0; - left = length; - - // leave 4 bytes to copy nalu count - naluStream->streamPos = 4; - naluStream->naluCount = 0; - memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); - - for (; ;) { - naluOffset = findNalUnitOffset(stream, offset, left); - if (naluOffset == -1) { - break; - } - - if (naluStream->naluCount == 0) { - naluStream->naluOffset = naluOffset; - } else { - naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; - ret = copyNaluHeader(stream, naluStream); - if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { - LOGW("copyNaluHeader returned %d", ret); - return ret; - } - // starting position for next NALU - naluStream->naluOffset = naluOffset; - } - - if (ret == DECODE_SUCCESS) { - naluStream->naluCount++; - } - - // update next lookup position and length - offset = naluOffset + 1; // skip one byte of NAL unit type - left = length - offset; - } - - if (naluStream->naluCount > 0) { - naluStream->naluLen = length - naluStream->naluOffset; - memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); - // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED - copyNaluHeader(stream, naluStream); - return DECODE_SUCCESS; - } + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib[0], + 2, + config); + CHECK_VA_STATUS("vaCreateConfig"); - LOGW("number of valid NALU is 0!"); return DECODE_SUCCESS; } - diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h index af5ae44..2b2e489 100644 --- a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h @@ -26,58 +26,26 @@ #define VIDEO_DECODER_AVC_SECURE_H_ #include "VideoDecoderAVC.h" - +#include "secvideoparser.h" class VideoDecoderAVCSecure : public VideoDecoderAVC { public: VideoDecoderAVCSecure(const char *mimeType); virtual ~VideoDecoderAVCSecure(); - virtual Decode_Status start(VideoConfigBuffer *buffer); virtual void stop(void); - - // data in the decoded buffer is all encrypted. virtual Decode_Status decode(VideoDecodeBuffer *buffer); -private: - enum { - MAX_SLICE_HEADER_SIZE = 30, - MAX_NALU_HEADER_BUFFER = 8192, - MAX_NALU_NUMBER = 400, // > 4096/12 - }; - - // Information of Network Abstraction Layer Unit - struct NaluInfo { - int32_t naluOffset; // offset of NAL unit in the firewalled buffer - int32_t naluLen; // length of NAL unit - int32_t naluHeaderLen; // length of NAL unit header - }; - - struct NaluMetadata { - NaluInfo *naluInfo; - int32_t naluNumber; // number of NAL units - }; - - struct NaluByteStream { - int32_t naluOffset; - int32_t naluLen; - int32_t streamPos; - uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData - int32_t naluCount; - }; +protected: + virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); +private: virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); - int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); - Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); - Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); - private: - NaluMetadata mMetadata; - NaluByteStream mByteStream; + pavp_info_t mEncParam; uint8_t *mNaluHeaderBuffer; - uint8_t *mInputBuffer; + uint8_t *mSliceHeaderBuffer; + uint32_t mIsEncryptData; }; - - #endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/securevideo/baytrail/secvideoparser.h b/videodecoder/securevideo/baytrail/secvideoparser.h new file mode 100644 index 0000000..18f487d --- /dev/null +++ b/videodecoder/securevideo/baytrail/secvideoparser.h @@ -0,0 +1,157 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +*/ + +#ifndef SEC_VIDEO_PARSER_H_ +#define SEC_VIDEO_PARSER_H_ + +#include + +/* H264 start code values */ +typedef enum _h264_nal_unit_type +{ + h264_NAL_UNIT_TYPE_unspecified = 0, + h264_NAL_UNIT_TYPE_SLICE, + h264_NAL_UNIT_TYPE_DPA, + h264_NAL_UNIT_TYPE_DPB, + h264_NAL_UNIT_TYPE_DPC, + h264_NAL_UNIT_TYPE_IDR, + h264_NAL_UNIT_TYPE_SEI, + h264_NAL_UNIT_TYPE_SPS, + h264_NAL_UNIT_TYPE_PPS, + h264_NAL_UNIT_TYPE_Acc_unit_delimiter, + h264_NAL_UNIT_TYPE_EOSeq, + h264_NAL_UNIT_TYPE_EOstream, + h264_NAL_UNIT_TYPE_filler_data, + h264_NAL_UNIT_TYPE_SPS_extension, + h264_NAL_UNIT_TYPE_ACP = 19, + h264_NAL_UNIT_TYPE_Slice_extension = 20 +} h264_nal_unit_type_t; + +#define MAX_OP 16 + +enum dec_ref_pic_marking_flags { + IDR_PIC_FLAG = 0, + NO_OUTPUT_OF_PRIOR_PICS_FLAG, + LONG_TERM_REFERENCE_FLAG, + ADAPTIVE_REF_PIC_MARKING_MODE_FLAG +}; + +typedef struct _dec_ref_pic_marking_t { + union { + uint8_t flags; + struct { + uint8_t idr_pic_flag:1; + uint8_t no_output_of_prior_pics_flag:1; + uint8_t long_term_reference_flag:1; + uint8_t adaptive_ref_pic_marking_mode_flag:1; + }; + }; + struct { + uint8_t memory_management_control_operation; + union { + struct { + uint8_t difference_of_pic_nums_minus1; + } op1; + struct { + uint8_t long_term_pic_num; + } op2; + struct { + uint8_t difference_of_pic_nums_minus1; + uint8_t long_term_frame_idx; + } op3; + struct { + uint8_t max_long_term_frame_idx_plus1; + } op4; + struct { + uint8_t long_term_frame_idx; + } op6; + }; + } op[MAX_OP]; +} dec_ref_pic_marking_t; + +enum slice_header_flags { + FIELD_PIC_FLAG = 0, + BOTTOM_FIELD_FLAG +}; + +typedef struct _slice_header_t { + uint8_t nal_unit_type; + uint8_t pps_id; + uint8_t padding; // TODO: padding needed because flags in secfw impl. is a big-endian uint16_t + union { + uint8_t flags; + struct { + uint8_t field_pic_flag:1; + uint8_t bottom_field_flag:1; + }; + }; + uint32_t first_mb_in_slice; + uint32_t frame_num; + uint16_t idr_pic_id; + uint16_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt[2]; + int32_t delta_pic_order_cnt_bottom; +} slice_header_t; + +typedef struct { + uint8_t type; + uint32_t offset; + uint8_t* data; + uint32_t length; + slice_header_t* slice_header; +} nalu_info_t; + +typedef struct { + uint32_t iv[4]; + uint32_t mode; + uint32_t app_id; +} pavp_info_t; + +#define MAX_NUM_NALUS 20 + +typedef struct { + uint8_t* data; + uint32_t length; + pavp_info_t* pavp; + dec_ref_pic_marking_t* dec_ref_pic_marking; + uint32_t num_nalus; + nalu_info_t nalus[MAX_NUM_NALUS]; +} frame_info_t; + +int parser_init(void); +int parse_frame(uint8_t* frame, uint32_t frame_size, uint8_t* nalu_data, uint32_t* nalu_data_size); + +// DEBUG PRINTING +void print_slice_header(slice_header_t* slice_header); +void print_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking); +void print_data_bytes(uint8_t* data, uint32_t count); +void print_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size); + +// BYTESWAPPING +uint16_t byteswap_16(uint16_t word); +uint32_t byteswap_32(uint32_t dword); +void byteswap_slice_header(slice_header_t* slice_header); +void byteswap_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking); +void byteswap_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size); + +#endif /* SEC_VIDEO_PARSER_H_ */ diff --git a/videodecoder/securevideo/baytrail/va_private.h b/videodecoder/securevideo/baytrail/va_private.h new file mode 100644 index 0000000..067e334 --- /dev/null +++ b/videodecoder/securevideo/baytrail/va_private.h @@ -0,0 +1,77 @@ +/*===================== begin_copyright_notice ================================== + +INTEL CONFIDENTIAL +Copyright 2009-2012 +Intel Corporation All Rights Reserved. + +The source code contained or described herein and all documents related to the +source code ("Material") are owned by Intel Corporation or its suppliers or +licensors. Title to the Material remains with Intel Corporation or its suppliers +and licensors. The Material contains trade secrets and proprietary and confidential +information of Intel or its suppliers and licensors. The Material is protected by +worldwide copyright and trade secret laws and treaty provisions. No part of the +Material may be used, copied, reproduced, modified, published, uploaded, posted, +transmitted, distributed, or disclosed in any way without Intels prior express +written permission. + +No license under any patent, copyright, trade secret or other intellectual +property right is granted to or conferred upon you by disclosure or delivery +of the Materials, either expressly, by implication, inducement, estoppel +or otherwise. Any license under such intellectual property rights must be +express and approved by Intel in writing. + +File Name: va_private.h +Abstract: libva private API head file + +Environment: Linux/Android + +Notes: + +======================= end_copyright_notice ==================================*/ +#ifndef __VA_PRIVATE_H__ +#define __VA_PRIVATE_H__ +#include +#define ENABLE_PAVP_LINUX 1 +// Misc parameter for encoder +#define VAEncMiscParameterTypePrivate -2 +// encryption parameters for PAVP +#define VAEncryptionParameterBufferType -3 + +typedef struct _VAEncMiscParameterPrivate +{ + unsigned int target_usage; // Valid values 1-7 for AVC & MPEG2. + unsigned int reserved[7]; // Reserved for future use. +} VAEncMiscParameterPrivate; + +/*VAEncrytpionParameterBuffer*/ +typedef struct _VAEncryptionParameterBuffer +{ + //Not used currently + unsigned int encryptionSupport; + //Not used currently + unsigned int hostEncryptMode; + // For IV, Counter input + unsigned int pavpAesCounter[2][4]; + // not used currently + unsigned int pavpIndex; + // PAVP mode, CTR, CBC, DEDE etc + unsigned int pavpCounterMode; + unsigned int pavpEncryptionType; + // not used currently + unsigned int pavpInputSize[2]; + // not used currently + unsigned int pavpBufferSize[2]; + // not used currently + VABufferID pvap_buf; + // set to TRUE if protected media + unsigned int pavpHasBeenEnabled; + // not used currently + unsigned int IntermmediatedBufReq; + // not used currently + unsigned int uiCounterIncrement; + // AppId: PAVP sessin Index from application + unsigned int app_id; + +} VAEncryptionParameterBuffer; + +#endif -- cgit v1.2.3 From 0c800c0a9e616635cc660b27e54394832ff3c514 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Thu, 18 Jul 2013 16:27:36 +0800 Subject: Enhance mix_encode2 test tool to add VP8 encoder option BZ: 124301 Add VP8 encode option and IVF writer class into mix_encode2. Change-Id: I2f8faba86a26af61b845ddd9f342a200f3c020f7 Signed-off-by: Liu Bolun Reviewed-on: http://android.intel.com:8080/121789 Reviewed-by: Shi, PingX Reviewed-by: buildbot Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- test/mix_encoder2.cpp | 174 +++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 164 insertions(+), 10 deletions(-) diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 2fd2b1c..1d06e37 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -51,6 +51,8 @@ enum { static const int BOX_WIDTH = 64; static const int PRELOAD_FRAME_NUM = 16; uint32_t gNumFramesOutput = 0; +static unsigned int pts; + #define CHECK_ENC_STATUS(FUNC)\ if (ret < ENCODE_SUCCESS) { \ @@ -710,6 +712,8 @@ private: static const char *AVC_MIME_TYPE = "video/h264"; static const char *MPEG4_MIME_TYPE = "video/mpeg4"; static const char *H263_MIME_TYPE = "video/h263"; +static const char *VP8_MIME_TYPE = "video/x-webm"; + class MixEncoder : public MediaSource { @@ -730,6 +734,8 @@ public: mMixCodec = (char*) MPEG4_MIME_TYPE; } else if (strcmp(mime, MEDIA_MIMETYPE_VIDEO_H263) == 0) { mMixCodec = (char*) H263_MIME_TYPE; + } else if (strcmp(mime, MEDIA_MIMETYPE_VIDEO_VPX) == 0) { + mMixCodec = (char*) VP8_MIME_TYPE; } else { mMixCodec = (char*) AVC_MIME_TYPE; } @@ -939,10 +945,11 @@ public: CHECK_STATUS(err); VideoOutputFormat format; - if (mEncodeFrameCount == 2 && (strcasecmp(mMixCodec, H263_MIME_TYPE) != 0)) { + if ((mEncodeFrameCount == 2 && (strcasecmp(mMixCodec, H263_MIME_TYPE) != 0))&& + (mEncodeFrameCount == 2 && (strcasecmp(mMixCodec, VP8_MIME_TYPE) != 0))){ format = OUTPUT_CODEC_DATA; mFirstFrame = true; - }else + }else format = OUTPUT_EVERYTHING; err = getoutput(*buffer, format); @@ -978,7 +985,9 @@ private: mEncoderParams.profile = (VAProfile)VAProfileMPEG4Simple; } else if (strcmp(mMixCodec, H263_MIME_TYPE) == 0) { mEncoderParams.profile = (VAProfile)VAProfileH263Baseline; - } else { + } else if (strcmp(mMixCodec, VP8_MIME_TYPE) == 0) { + mEncoderParams.profile = (VAProfile)VAProfileVP8Version0_3; + } else { mEncoderParams.profile = (VAProfile)VAProfileH264Baseline; } @@ -1045,6 +1054,146 @@ private: }; +class IVFWriter : public MediaWriter { + +public: + const char* mFile; + FILE* mFilehandle; + sp mSource; + pthread_t mThread; + bool mRunning; + bool mEOS; + char vp8_file_header[32]; + +public: + IVFWriter(char* file) { + mFile = file; + mRunning = false; + } + + status_t addSource(const sp &source) { + mSource = source; + return OK; + } + + bool reachedEOS() { + return mEOS; + } + + status_t start(MetaData *params = NULL) { + + mSource->start(); + + mRunning = true; + mEOS = false; + + mFilehandle = fopen(mFile, "w+"); + if (mFilehandle == NULL) + return errno; + + write_ivf_file_header(params); + + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThread, &attr, IVFWriter::ThreadFunc, this); + pthread_attr_destroy(&attr); + + return OK; + } + + status_t stop() { + mRunning = false; + void *dummy; + pthread_join(mThread, &dummy); + fclose(mFilehandle); + return OK; + } + + status_t pause() { + return OK; + } + + + static void mem_put_le16(char *mem, unsigned int val) + { + mem[0] = val; + mem[1] = val>>8; + } + + static void mem_put_le32(char *mem, unsigned int val) + { + mem[0] = val; + mem[1] = val>>8; + mem[2] = val>>16; + mem[3] = val>>24; + } + + void write_ivf_file_header(MetaData *params) { + + int width,height,framerate; + params->findInt32(kKeyWidth, &width); + params->findInt32(kKeyHeight, &height); + params->findInt32(kKeyFrameRate, &framerate); + /* write ivf header */ + vp8_file_header[0] = 'D'; + vp8_file_header[1] = 'K'; + vp8_file_header[2] = 'I'; + vp8_file_header[3] = 'F'; + mem_put_le16(vp8_file_header+4, 0); /* version */ + mem_put_le16(vp8_file_header+6, 32); /* headersize */ + mem_put_le32(vp8_file_header+8, 0x30385056); /* headersize */ + mem_put_le16(vp8_file_header+12, width); /* width */ + mem_put_le16(vp8_file_header+14, height); /* height */ + mem_put_le32(vp8_file_header+16, framerate); /* rate default at 30 */ + mem_put_le32(vp8_file_header+20, 1); /* scale */ + mem_put_le32(vp8_file_header+24, 50); /* length just hardcode to 50*/ + mem_put_le32(vp8_file_header+28, 0); /* unused */ + fwrite(vp8_file_header, 1, 32, mFilehandle); + + } + +private: + + static void *ThreadFunc(void *me) { + IVFWriter *writer = static_cast(me); + + status_t err = OK; + char vp8_frame_header[12]; + unsigned int vp8_frame_length; + + + while (writer->mRunning) { + MediaBuffer* buffer; + err = writer->mSource->read(&buffer, NULL); + + if (err == OK) { + vp8_frame_length = buffer->range_length(); + mem_put_le32(vp8_frame_header, vp8_frame_length); + mem_put_le32(vp8_frame_header+4, pts&0xFFFFFFFF); + mem_put_le32(vp8_frame_header+8, pts >> 32); + fwrite(vp8_frame_header, 1, 12, writer->mFilehandle); + pts++; + fwrite(buffer->data()+buffer->range_offset(), 1, buffer->range_length(), writer->mFilehandle); + buffer->release(); + continue; + }else { + if (err != ERROR_END_OF_STREAM) + LOG("RawWriter::threadfunc err=%d\n", err); + writer->mEOS = true; + writer->mRunning = false; + fflush(writer->mFilehandle); + return NULL; + } + } + + return NULL; + } + +}; + + class RawWriter : public MediaWriter { public: RawWriter(char* file) { @@ -1138,7 +1287,7 @@ void usage() { printf("Usage: mix_encoder2 [options]\n\n"); printf(" -a/--initQP set initQP, default 0\n"); printf(" -b/--bitrate set bitrate bps, default 10M\n"); - printf(" -c/--codec select codec, like H264(default), MPEG4, H263\n"); + printf(" -c/--codec select codec, like H264(default), MPEG4, H263, VP8\n"); printf(" -d/--intraPeriod set IntraPeriod, default 30\n"); printf(" -e/--encoder select encoder, like MIX(default), OMXCODEC\n"); printf(" -f set output file name\n"); @@ -1148,7 +1297,7 @@ void usage() { printf(" -l/--idrInterval set IdrInterval, default 1\n"); printf(" -m/--disableMetadata disable Metadata Mode(default enabled)\n"); printf(" -n/--count set source frame number, default 30\n"); - printf(" -o/--outputformat set output file format, like MP4(default), RAW\n"); + printf(" -o/--outputformat set output file format, like MP4(default), RAW, IVF(only for VP8)\n"); printf(" -p/--fps set frame rate, default 30\n"); printf(" -q/--minQP set minQP, default 0\n"); printf(" -r/--rcMode set rc mode, like VBR(default), CBR, VCM, NO_RC\n"); @@ -1220,9 +1369,9 @@ int main(int argc, char* argv[]) const char *SRCTYPE[] = {"MALLOC", "VASURFACE", "GFX", "GRALLOC", "CAMERASOURCE", "SURFACEMEDIASOURCE", "MEMHEAP", NULL}; const char *ENCTYPE[] = {"MIX", "OMXCODEC", NULL}; - const char *CODEC[] = {"H264", "MPEG4", "H263", NULL}; + const char *CODEC[] = {"H264", "MPEG4", "H263", "VP8",NULL}; const char *RCMODE[] = {"VBR", "CBR", "VCM", "NO_RC", NULL}; - const char *OUTFORMAT[] = {"MP4", "RAW", NULL}; + const char *OUTFORMAT[] = {"MP4", "RAW", "IVF", NULL}; while ((c = getopt_long(argc, argv, short_opts, long_opts, NULL) ) != EOF) { switch (c) { @@ -1401,7 +1550,7 @@ int main(int argc, char* argv[]) SrcFps, MetadataMode, Yuvfile); } - printf("Setup Encoder\n"); + printf("Setup Encoder EncCodec is %d\n",EncCodec); //setup encoder sp enc_meta = new MetaData; switch (EncCodec) { @@ -1411,6 +1560,9 @@ int main(int argc, char* argv[]) case 2: enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); break; + case 3: + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VPX); + break; default: enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); break; @@ -1456,15 +1608,17 @@ int main(int argc, char* argv[]) if (OutFormat == 0) writer = new MPEG4Writer(OutFileName); - else + else if (OutFormat == 1) writer = new RawWriter(OutFileName); + else + writer = new IVFWriter(OutFileName); writer->addSource(encoder); printf("Start encoding\n"); int64_t start = systemTime(); - CHECK_EQ((status_t)OK, writer->start()); + CHECK_EQ((status_t)OK, writer->start(enc_meta.get())); while (!writer->reachedEOS()) { usleep(100000); } -- cgit v1.2.3 From 55ce5b67035b77294d449cd7db17a2a9bc972706 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Wed, 31 Jul 2013 10:44:17 +0800 Subject: Rebase VP8 encode middleware due to new libva interface. BZ: 125303 Add init_qp for VP8 encode in libmix layer;re-construct SPS/PPS data according to new libva interface;add rendering HRD/RC Param/FrameRate function. Change-Id: I7eaea105c1be582a665b052e226da47eba4a9dc6 Signed-off-by: Liu Bolun Signed-off-by: pingshix Signed-off-by: pingshix Reviewed-on: http://android.intel.com:8080/122111 Reviewed-by: buildbot Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderDef.h | 1 + videoencoder/VideoEncoderVP8.cpp | 141 +++++++++++++++++++++++++-------------- videoencoder/VideoEncoderVP8.h | 5 +- 3 files changed, 96 insertions(+), 51 deletions(-) diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 08f9e7a..426db02 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -594,6 +594,7 @@ struct VideoParamsVP8 : VideoParamConfigSet { uint32_t kf_max_dist; uint32_t min_qp; uint32_t max_qp; + uint32_t init_qp; uint32_t rc_undershoot; uint32_t rc_overshoot; uint32_t hrd_buf_size; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 84a0746..e8a05e1 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -24,6 +24,7 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoParamsVP8.kf_max_dist = 30; mVideoParamsVP8.min_qp = 4; mVideoParamsVP8.max_qp = 63; + mVideoParamsVP8.init_qp = 26; mVideoParamsVP8.rc_undershoot = 100; mVideoParamsVP8.rc_overshoot = 100; mVideoParamsVP8.hrd_buf_size = 6000; @@ -51,26 +52,16 @@ Encode_Status VideoEncoderVP8::renderSequenceParams() { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncSequenceParameterBufferVP8 vp8SeqParam; - uint32_t frameRateNum = mComParams.frameRate.frameRateNum; - uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; + LOG_V( "Begin\n"); vp8SeqParam.frame_width = mComParams.resolution.width; vp8SeqParam.frame_height = mComParams.resolution.height; - vp8SeqParam.frame_rate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; vp8SeqParam.error_resilient = mVideoParamsVP8.error_resilient; vp8SeqParam.kf_auto = mVideoParamsVP8.kf_auto; vp8SeqParam.kf_min_dist = mVideoParamsVP8.kf_min_dist; vp8SeqParam.kf_max_dist = mVideoParamsVP8.kf_max_dist; vp8SeqParam.bits_per_second = mComParams.rcParams.bitRate; - vp8SeqParam.min_qp = mVideoParamsVP8.min_qp; - vp8SeqParam.max_qp = mVideoParamsVP8.max_qp; - vp8SeqParam.rc_undershoot = mVideoParamsVP8.rc_undershoot; - vp8SeqParam.rc_overshoot = mVideoParamsVP8.rc_overshoot; - vp8SeqParam.hrd_buf_size = mVideoParamsVP8.hrd_buf_size; - vp8SeqParam.hrd_buf_initial_fullness = mVideoParamsVP8.hrd_buf_initial_fullness; - vp8SeqParam.hrd_buf_optimal_fullness = mVideoParamsVP8.hrd_buf_optimal_fullness; -// memcpy(vp8SeqParam.reference_frames, mVP8InternalFrames, sizeof(mVP8InternalFrames)); memcpy(vp8SeqParam.reference_frames, mAutoRefSurfaces, sizeof(mAutoRefSurfaces) * mAutoReferenceSurfaceNum); vaStatus = vaCreateBuffer( @@ -95,17 +86,19 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { LOG_V( "Begin\n"); vp8PicParam.coded_buf = task->coded_buffer; - vp8PicParam.pic_flags.bits.force_kf = mVideoConfigVP8.force_kf; - vp8PicParam.pic_flags.bits.no_ref_last = mVideoConfigVP8.no_ref_last; - vp8PicParam.pic_flags.bits.no_ref_gf = mVideoConfigVP8.no_ref_gf; - vp8PicParam.pic_flags.bits.no_ref_arf = mVideoConfigVP8.no_ref_arf; + vp8PicParam.pic_flags.value = 0; + vp8PicParam.ref_flags.bits.force_kf = mVideoConfigVP8.force_kf; //0; + if(!vp8PicParam.ref_flags.bits.force_kf) { + vp8PicParam.ref_flags.bits.no_ref_last = mVideoConfigVP8.no_ref_last; + vp8PicParam.ref_flags.bits.no_ref_arf = mVideoConfigVP8.no_ref_arf; + vp8PicParam.ref_flags.bits.no_ref_gf = mVideoConfigVP8.no_ref_gf; + } + vp8PicParam.pic_flags.bits.refresh_entropy_probs = 0; + vp8PicParam.sharpness_level = 2; + vp8PicParam.pic_flags.bits.num_token_partitions = 2; vp8PicParam.pic_flags.bits.refresh_last = mVideoConfigVP8.refresh_last; vp8PicParam.pic_flags.bits.refresh_golden_frame = mVideoConfigVP8.refresh_golden_frame; vp8PicParam.pic_flags.bits.refresh_alternate_frame = mVideoConfigVP8.refresh_alternate_frame; - vp8PicParam.pic_flags.bits.refresh_entropy_probs = mVideoConfigVP8.refresh_entropy_probs; - vp8PicParam.pic_flags.bits.num_token_partitions = 2; -// vp8PicParam.pic_flags.value = mVideoConfigVP8.value; - vp8PicParam.sharpness_level = mVideoConfigVP8.sharpness_level; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -122,44 +115,92 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { return ret; } -Encode_Status VideoEncoderVP8::renderSliceParams(EncodeTask *task) { +Encode_Status VideoEncoderVP8::renderRCParams(void) +{ + VABufferID rc_param_buf; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncMiscParameterBuffer *misc_param, *misc_param_tmp; + VAEncMiscParameterRateControl *misc_rate_ctrl; + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl), + 1,NULL,&rc_param_buf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - VAStatus vaStatus = VA_STATUS_SUCCESS; - uint32_t sliceHeight; - uint32_t sliceHeightInMB; + vaMapBuffer(mVADisplay, rc_param_buf,(void **)&misc_param); - VAEncSliceParameterBuffer sliceParams; + misc_param->type = VAEncMiscParameterTypeRateControl; + misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data; + memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl)); + misc_rate_ctrl->bits_per_second = mComParams.rcParams.bitRate; + misc_rate_ctrl->target_percentage = 100; + misc_rate_ctrl->window_size = 1000; + misc_rate_ctrl->initial_qp = mVideoParamsVP8.init_qp; + misc_rate_ctrl->min_qp = mVideoParamsVP8.min_qp; + misc_rate_ctrl->basic_unit_size = 0; + misc_rate_ctrl->max_qp = mVideoParamsVP8.max_qp; - LOG_V( "Begin\n\n"); + vaUnmapBuffer(mVADisplay, rc_param_buf); - sliceHeight = mComParams.resolution.height; - sliceHeight += 15; - sliceHeight &= (~15); - sliceHeightInMB = sliceHeight / 16; + vaStatus = vaRenderPicture(mVADisplay,mVAContext, &rc_param_buf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture");; + return 0; +} - sliceParams.start_row_number = 0; - sliceParams.slice_height = sliceHeightInMB; - sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0; - sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0; +Encode_Status VideoEncoderVP8::renderFrameRateParams(void) +{ + VABufferID framerate_param_buf; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncMiscParameterBuffer *misc_param, *misc_param_tmp; + VAEncMiscParameterFrameRate * misc_framerate; + uint32_t frameRateNum = mComParams.frameRate.frameRateNum; + uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; - LOG_V("======VP8 slice params======\n"); - LOG_I( "start_row_number = %d\n", (int) sliceParams.start_row_number); - LOG_I( "sliceHeightInMB = %d\n", (int) sliceParams.slice_height); - LOG_I( "is_intra = %d\n", (int) sliceParams.slice_flags.bits.is_intra); + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterFrameRate), + 1,NULL,&framerate_param_buf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - vaStatus = vaCreateBuffer( - mVADisplay, mVAContext, - VAEncSliceParameterBufferType, - sizeof(VAEncSliceParameterBuffer), - 1, &sliceParams, - &mSliceParamBuf); + vaMapBuffer(mVADisplay, framerate_param_buf,(void **)&misc_param); + misc_param->type = VAEncMiscParameterTypeFrameRate; + misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data; + memset(misc_framerate, 0, sizeof(*misc_framerate)); + misc_framerate->framerate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; + vaUnmapBuffer(mVADisplay, framerate_param_buf); + + vaStatus = vaRenderPicture(mVADisplay,mVAContext, &framerate_param_buf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture");; + + return 0; +} + +Encode_Status VideoEncoderVP8::renderHRDParams(void) +{ + VABufferID hrd_param_buf; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncMiscParameterBuffer *misc_param, *misc_param_tmp; + VAEncMiscParameterHRD * misc_hrd; //*misc_rate_ctrl; + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD), + 1,NULL,&hrd_param_buf); CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1); - CHECK_VA_STATUS_RETURN("vaRenderPicture"); + vaMapBuffer(mVADisplay, hrd_param_buf,(void **)&misc_param); + misc_param->type = VAEncMiscParameterTypeHRD; + misc_hrd = (VAEncMiscParameterHRD *)misc_param->data; + memset(misc_hrd, 0, sizeof(*misc_hrd)); + misc_hrd->buffer_size = 6000; + misc_hrd->initial_buffer_fullness = 4000; + misc_hrd->optimal_buffer_fullness = 5000; + vaUnmapBuffer(mVADisplay, hrd_param_buf); - LOG_V( "end\n"); - return ENCODE_SUCCESS; + vaStatus = vaRenderPicture(mVADisplay,mVAContext, &hrd_param_buf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture");; + + return 0; } @@ -169,6 +210,9 @@ Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { LOG_V( "Begin\n"); if (mFrameNum == 0) { + ret = renderFrameRateParams(); + ret = renderRCParams(); + ret = renderHRDParams(); ret = renderSequenceParams(); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } @@ -176,9 +220,6 @@ Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); - ret = renderSliceParams(task); - CHECK_ENCODE_STATUS_RETURN("renderSliceParams"); - LOG_V( "End\n"); return ret; } diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index ebf2df9..efc3199 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -35,7 +35,10 @@ protected: private: Encode_Status renderSequenceParams(); Encode_Status renderPictureParams(EncodeTask *task); - Encode_Status renderSliceParams(EncodeTask *task); + Encode_Status renderRCParams(void); + Encode_Status renderHRDParams(void); + Encode_Status renderFrameRateParams(void); + VideoConfigVP8 mVideoConfigVP8; VideoParamsVP8 mVideoParamsVP8; -- cgit v1.2.3 From ad1d56b1cf2a38c2db408b634e9fa079aa1f4cfd Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Mon, 29 Jul 2013 11:57:41 +0800 Subject: Use new vaCreateSurfaces API to map external buffers for video encoder BZ: 127566 Use new vaCreateSurfaces API to map external buffers for video encoder Change-Id: I8518f79fd1d07473721ef090bc00910a057f763b Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/121486 Reviewed-by: buildbot Reviewed-by: Yuan, Shengquan Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- test/mix_encoder2.cpp | 30 ++-- videoencoder/VideoEncoderBase.cpp | 317 +++++++++++++++----------------------- videoencoder/VideoEncoderBase.h | 2 +- 3 files changed, 139 insertions(+), 210 deletions(-) diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 1d06e37..5682d33 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -266,8 +266,6 @@ protected: DummySource(const DummySource &); DummySource &operator=(const DummySource &); -// int mMode = 0; //0:Camera malloc , 1: WiDi clone, 2: WiDi ext, 3: WiDi user, 4: Raw, 5: SurfaceMediaSource - //for uploading src pictures, also for Camera malloc, WiDi clone, raw mode usrptr storage uint8_t* mUsrptr[PRELOAD_FRAME_NUM]; @@ -341,6 +339,7 @@ public: status_t createResource() { uint32_t size = mStride * mHeight * 3 /2; + size += 0x0FFF; ValueInfo vinfo; vinfo.mode = MEM_MODE_MALLOC; @@ -359,7 +358,7 @@ public: { mBuffers[i] = new MemoryBase(mHeap, i * size, size); - mUsrptr[i] = (uint8_t*) mBuffers[i]->pointer(); + mUsrptr[i] = (uint8_t*) ((int) (mBuffers[i]->pointer() + 0x0FFF) & ~0x0FFF); mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t) mUsrptr[i]); mIMB[i]->SetValueInfo(&vinfo); @@ -396,9 +395,9 @@ extern "C" { class VASurfaceSource : public DummySource { public: - VASurfaceSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : + VASurfaceSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv, int mode) : DummySource (width, height, stride, nFrames, fps, mdata, yuv) { - mMode = 1; + mMode = mode; } virtual ~VASurfaceSource() { @@ -1005,7 +1004,7 @@ private: ret = mVideoEncoder->setParameters(&mStoreMetaDataInBuffers); CHECK_ENC_STATUS("MIX::setParameters StoreMetaDataInBuffers"); - if (strcmp(mMixCodec, MPEG4_MIME_TYPE) == 0) { + if (strcmp(mMixCodec, AVC_MIME_TYPE) == 0) { VideoParamsAVC AVCParam; mVideoEncoder->getParameters(&AVCParam); AVCParam.idrInterval = mIdrInt; @@ -1301,7 +1300,7 @@ void usage() { printf(" -p/--fps set frame rate, default 30\n"); printf(" -q/--minQP set minQP, default 0\n"); printf(" -r/--rcMode set rc mode, like VBR(default), CBR, VCM, NO_RC\n"); - printf(" -s/--src select source, like MALLOC(default), VASURFACE, GFX, GRALLOC, CAMERASOURCE, MEMHEAP\n"); + printf(" -s/--src select source, like MALLOC(default), VASURFACE, KBUFHANDLE, GFX, GRALLOC, MEMHEAP (CAMERASOURCE, not support yet) \n"); printf(" -w -h set source width /height, default 1280*720\n"); printf(" -t/--disableFrameSkip disable frame skip, default is false\n"); printf("\n"); @@ -1366,8 +1365,7 @@ int main(int argc, char* argv[]) char c; - const char *SRCTYPE[] = {"MALLOC", "VASURFACE", "GFX", "GRALLOC", - "CAMERASOURCE", "SURFACEMEDIASOURCE", "MEMHEAP", NULL}; + const char *SRCTYPE[] = {"MALLOC", "VASURFACE", "KBUFHANDLE", "GFX", "GRALLOC", "MEMHEAP", "CAMERASOURCE", "SURFACEMEDIASOURCE", NULL}; const char *ENCTYPE[] = {"MIX", "OMXCODEC", NULL}; const char *CODEC[] = {"H264", "MPEG4", "H263", "VP8",NULL}; const char *RCMODE[] = {"VBR", "CBR", "VCM", "NO_RC", NULL}; @@ -1534,20 +1532,22 @@ int main(int argc, char* argv[]) SrcFps, MetadataMode, Yuvfile); } else if (SrcType == 1) { source = new VASurfaceSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile); + SrcFps, MetadataMode, Yuvfile, 0); } else if (SrcType == 2) { + source = new VASurfaceSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, + SrcFps, MetadataMode, Yuvfile, 1); + } else if (SrcType == 3) { source = new GfxSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, SrcFps, MetadataMode, Yuvfile); - } else if (SrcType == 3) { + } else if (SrcType == 4) { source = new GrallocSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, SrcFps, MetadataMode, Yuvfile); - } else if (SrcType == 4) { -// source = new CameraSource(); } else if (SrcType == 5) { - // source = new SurfaceMediaSource(); - } else if (SrcType == 6) { source = new MemHeapSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, SrcFps, MetadataMode, Yuvfile); + } else{ + printf("Source Type is not supported\n"); + return 0; } printf("Setup Encoder EncCodec is %d\n",EncCodec); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 935db86..7a04da2 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -11,6 +11,7 @@ #include "IntelMetadataBuffer.h" #include #include +#include #ifdef IMG_GFX #include #endif @@ -169,7 +170,6 @@ Encode_Status VideoEncoderBase::start() { LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n"); VASurfaceID surfaces[2]; - VASurfaceAttributeTPI attribute_tpi; uint32_t stride_aligned, height_aligned; if(mAutoReference == false){ stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; @@ -187,32 +187,24 @@ Encode_Status VideoEncoderBase::start() { } } +#if 0 if(mComParams.profile == VAProfileVP8Version0_3) attribute_tpi.size = stride_aligned * height_aligned + stride_aligned * ((((mComParams.resolution.height + 1) / 2 + 32)+63)/64) *64;// FW need w*h + w*chrom_height else attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; - - attribute_tpi.luma_stride = stride_aligned; - attribute_tpi.chroma_u_stride = stride_aligned; - attribute_tpi.chroma_v_stride = stride_aligned; - attribute_tpi.luma_offset = 0; - attribute_tpi.chroma_u_offset = stride_aligned * height_aligned; - attribute_tpi.chroma_v_offset = stride_aligned * height_aligned; - attribute_tpi.pixel_format = VA_FOURCC_NV12; - attribute_tpi.type = VAExternalMemoryNULL; +#endif if(mAutoReference == false){ - vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, - VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, + stride_aligned, height_aligned, surfaces, 2, NULL, 0); mRefSurface = surfaces[0]; mRecSurface = surfaces[1]; }else { mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum]; - vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned, - VA_RT_FORMAT_YUV420, mAutoReferenceSurfaceNum, mAutoRefSurfaces, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, + stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, NULL, 0); } + CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); //Prepare all Surfaces to be added into Context uint32_t contextSurfaceCnt; @@ -682,8 +674,6 @@ Encode_Status VideoEncoderBase::prepareForOutput( mOffsetInSeg = ii; } #endif - - if (!mSliceSizeOverflow) { mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; } @@ -1362,27 +1352,21 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( return ENCODE_NOT_SUPPORTED; } - VASurfaceAttributeTPI attribute_tpi; - - attribute_tpi.size = expectedSize; - attribute_tpi.luma_stride = width; - attribute_tpi.chroma_u_stride = width; - attribute_tpi.chroma_v_stride = width; - attribute_tpi.luma_offset = 0; - attribute_tpi.chroma_u_offset = width*height; - attribute_tpi.chroma_v_offset = width*height; - attribute_tpi.pixel_format = VA_FOURCC_NV12; - attribute_tpi.type = VAExternalMemoryNULL; - - vaStatus = vaCreateSurfacesWithAttribute(mVADisplay, width, height, VA_RT_FORMAT_YUV420, - 1, &surface, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + ValueInfo vinfo; + vinfo.mode = MEM_MODE_SURFACE; + vinfo.width = width; + vinfo.height = height; + vinfo.lumaStride = width; + vinfo.size = expectedSize; + vinfo.format = format; + + surface = CreateSurfaceFromExternalBuf(0, vinfo); + if (surface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; vaStatus = vaDeriveImage(mVADisplay, surface, &image); CHECK_VA_STATUS_RETURN("vaDeriveImage"); - LOG_V( "vaDeriveImage Done\n"); - vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr); CHECK_VA_STATUS_RETURN("vaMapBuffer"); @@ -1427,15 +1411,12 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( LOG_I("image->num_planes = %d\n", image.num_planes); LOG_I("image->width = %d\n", image.width); LOG_I("image->height = %d\n", image.height); - LOG_I ("data_size = %d\n", image.data_size); LOG_I ("usrptr = 0x%p\n", *usrptr); LOG_I ("map->value = 0x%p\n ", (void *)map->value); vaStatus = vaUnmapBuffer(mVADisplay, image.buf); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - - vaStatus = vaDestroyImage(mVADisplay, image.image_id); CHECK_VA_STATUS_RETURN("vaDestroyImage"); @@ -1505,6 +1486,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { Encode_Status ret = ENCODE_SUCCESS; VASurfaceID surface; + //try to get kbufhandle from SurfaceID uint32_t fourCC = 0; uint32_t lumaStride = 0; uint32_t chromaUStride = 0; @@ -1514,11 +1496,6 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { uint32_t chromaVOffset = 0; uint32_t kBufHandle = 0; - VASurfaceAttributeTPI vaSurfaceAttrib; - uint32_t buf; - - vaSurfaceAttrib.buffers = &buf; - vaStatus = vaLockSurface( (VADisplay)map->vinfo.handle, (VASurfaceID)map->value, &fourCC, &lumaStride, &chromaUStride, &chromaVStride, @@ -1538,23 +1515,13 @@ Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { vaStatus = vaUnlockSurface((VADisplay)map->vinfo.handle, (VASurfaceID)map->value); CHECK_VA_STATUS_RETURN("vaUnlockSurface"); - vaSurfaceAttrib.count = 1; - vaSurfaceAttrib.size = map->vinfo.width * map->vinfo.height * 3 / 2; - vaSurfaceAttrib.luma_stride = lumaStride; - vaSurfaceAttrib.chroma_u_stride = chromaUStride; - vaSurfaceAttrib.chroma_v_stride = chromaVStride; - vaSurfaceAttrib.luma_offset = lumaOffset; - vaSurfaceAttrib.chroma_u_offset = chromaUOffset; - vaSurfaceAttrib.chroma_v_offset = chromaVOffset; - vaSurfaceAttrib.buffers[0] = kBufHandle; - vaSurfaceAttrib.pixel_format = fourCC; - vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer; - - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, map->vinfo.width, map->vinfo.height, VA_RT_FORMAT_YUV420, - 1, &surface, &vaSurfaceAttrib); + ValueInfo vinfo; + memcpy(&vinfo, &(map->vinfo), sizeof(ValueInfo)); + vinfo.mode = MEM_MODE_KBUFHANDLE; - CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); + surface = CreateSurfaceFromExternalBuf(kBufHandle, vinfo); + if (surface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; LOG_I("Surface ID created from Kbuf = 0x%08x", surface); @@ -1572,11 +1539,6 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { Encode_Status ret = ENCODE_SUCCESS; VASurfaceID surface; - VASurfaceAttributeTPI vaSurfaceAttrib; - uint32_t buf; - - vaSurfaceAttrib.buffers = &buf; - LOG_I("surfaceMappingForGfxHandle ......\n"); LOG_I("lumaStride = %d\n", map->vinfo.lumaStride); LOG_I("format = 0x%08x\n", map->vinfo.format); @@ -1584,41 +1546,22 @@ Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { LOG_I("height = %d\n", mComParams.resolution.height); LOG_I("gfxhandle = %d\n", map->value); - vaSurfaceAttrib.count = 1; + ValueInfo vinfo; + memcpy(&vinfo, &(map->vinfo), sizeof(ValueInfo)); + #ifdef IMG_GFX // color fmrat may be OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar or HAL_PIXEL_FORMAT_NV12 IMG_native_handle_t* h = (IMG_native_handle_t*) map->value; LOG_I("IMG_native_handle_t h->iWidth=%d, h->iHeight=%d, h->iFormat=%x\n", h->iWidth, h->iHeight, h->iFormat); - vaSurfaceAttrib.luma_stride = h->iWidth; - vaSurfaceAttrib.pixel_format = h->iFormat; - vaSurfaceAttrib.width = h->iWidth; - vaSurfaceAttrib.height = h->iHeight; - -#else - vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; - vaSurfaceAttrib.pixel_format = map->vinfo.format; - vaSurfaceAttrib.width = map->vinfo.width; - vaSurfaceAttrib.height = map->vinfo.height; + vinfo.lumaStride = h->iWidth; + vinfo.format = h->iFormat; + vinfo.width = h->iWidth; + vinfo.height = h->iHeight; #endif - vaSurfaceAttrib.type = VAExternalMemoryAndroidGrallocBuffer; - vaSurfaceAttrib.buffers[0] = (uint32_t) map->value; - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, -#ifdef IMG_GFX - h->iWidth, - h->iHeight, -#else - map->vinfo.width, - map->vinfo.height, -#endif - VA_RT_FORMAT_YUV420, - 1, - &surface, - &vaSurfaceAttrib); - - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); - LOG_V("Successfully create surfaces from native hanle"); + surface = CreateSurfaceFromExternalBuf(map->value, vinfo); + if (surface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; map->surface = surface; @@ -1636,33 +1579,10 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { Encode_Status ret = ENCODE_SUCCESS; VASurfaceID surface; - uint32_t lumaOffset = 0; - uint32_t chromaUOffset = map->vinfo.height * map->vinfo.lumaStride; - uint32_t chromaVOffset = chromaUOffset + 1; - - VASurfaceAttributeTPI vaSurfaceAttrib; - uint32_t buf; - - vaSurfaceAttrib.buffers = &buf; - - vaSurfaceAttrib.count = 1; - vaSurfaceAttrib.size = map->vinfo.lumaStride * map->vinfo.height * 3 / 2; - vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; - vaSurfaceAttrib.chroma_u_stride = map->vinfo.chromStride; - vaSurfaceAttrib.chroma_v_stride = map->vinfo.chromStride; - vaSurfaceAttrib.luma_offset = lumaOffset; - vaSurfaceAttrib.chroma_u_offset = chromaUOffset; - vaSurfaceAttrib.chroma_v_offset = chromaVOffset; - vaSurfaceAttrib.buffers[0] = map->value; - vaSurfaceAttrib.pixel_format = map->vinfo.format; - vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer; - - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, map->vinfo.width, map->vinfo.height, VA_RT_FORMAT_YUV420, - 1, &surface, &vaSurfaceAttrib); - - CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf"); - + map->vinfo.size = map->vinfo.lumaStride * map->vinfo.height * 1.5; + surface = CreateSurfaceFromExternalBuf(map->value, map->vinfo); + if (surface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; LOG_I("Surface ID created from Kbuf = 0x%08x", map->value); map->surface = surface; @@ -1670,38 +1590,6 @@ Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { return ret; } -Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) { - - if (!map) - return ENCODE_NULL_PTR; - - VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; - VASurfaceID surface; - - VASurfaceAttributeTPI vaSurfaceAttrib; - uint32_t buf; - - vaSurfaceAttrib.buffers = &buf; - - vaSurfaceAttrib.count = 1; - vaSurfaceAttrib.type = VAExternalMemoryCIFrame; - vaSurfaceAttrib.buffers[0] = (uint32_t)map->value; - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, - map->vinfo.width, - map->vinfo.height, - VA_RT_FORMAT_YUV420, - 1, - &surface, - &vaSurfaceAttrib); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); - - map->surface = surface; - - return ret; -} - #if NO_BUFFER_SHARE static VAStatus upload_yuv_to_surface(VADisplay va_dpy, SurfaceMap *map, VASurfaceID surface_id, int picture_width, @@ -1767,27 +1655,9 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { CHECK_ENCODE_STATUS_RETURN("upload_yuv_to_surface"); #else - VASurfaceAttributeTPI vaSurfaceAttrib; - uint32_t buf; - - vaSurfaceAttrib.buffers = &buf; - - vaSurfaceAttrib.count = 1; - vaSurfaceAttrib.width = map->vinfo.width; - vaSurfaceAttrib.height = map->vinfo.height; - vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride; - vaSurfaceAttrib.buffers[0] = map->value; - vaSurfaceAttrib.pixel_format = map->vinfo.format; - if (map->vinfo.mode == MEM_MODE_NONECACHE_USRPTR) - vaSurfaceAttrib.type = VAExternalMemoryNoneCacheUserPointer; - else - vaSurfaceAttrib.type = VAExternalMemoryUserPointer; - - vaStatus = vaCreateSurfacesWithAttribute( - mVADisplay, map->vinfo.width, map->vinfo.height, VA_RT_FORMAT_YUV420, - 1, &surface, &vaSurfaceAttrib); - - CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromMalloc"); + surface = CreateSurfaceFromExternalBuf(map->value, map->vinfo); + if (surface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; LOG_I("Surface ID created from Malloc = 0x%08x\n", map->value); @@ -1797,27 +1667,16 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { else { map->surface_backup = surface; + //TODO: need optimization for both width/height not aligned case VASurfaceID surfaceId; - VASurfaceAttributeTPI attribute_tpi; unsigned int stride_aligned; if(mComParams.profile == VAProfileVP8Version0_3) - stride_aligned = ((mComParams.resolution.width + 31) / 32 ) * 32; + stride_aligned = ((mComParams.resolution.width + 31) / 32 ) * 32; else - stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; - - attribute_tpi.size = stride_aligned * mComParams.resolution.height * 3 / 2; - attribute_tpi.luma_stride = stride_aligned; - attribute_tpi.chroma_u_stride = stride_aligned; - attribute_tpi.chroma_v_stride = stride_aligned; - attribute_tpi.luma_offset = 0; - attribute_tpi.chroma_u_offset = stride_aligned * mComParams.resolution.height; - attribute_tpi.chroma_v_offset = stride_aligned * mComParams.resolution.height; - attribute_tpi.pixel_format = VA_FOURCC_NV12; - attribute_tpi.type = VAExternalMemoryNULL; - - vaCreateSurfacesWithAttribute(mVADisplay, mComParams.resolution.width, mComParams.resolution.height, - VA_RT_FORMAT_YUV420, 1, &surfaceId, &attribute_tpi); - CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute"); + stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; + + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, + stride_aligned, map->vinfo.height, &surfaceId, 1, NULL, 0); map->surface = surfaceId; LOG_E("Due to 64 alignment, an alternative Surface ID 0x%08x created\n", surfaceId); @@ -1834,11 +1693,8 @@ Encode_Status VideoEncoderBase::surfaceMapping(SurfaceMap *map) { Encode_Status status; -LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, value=%x\n", map->vinfo.mode, map->vinfo.format, map->vinfo.lumaStride, map->vinfo.width, map->vinfo.height, map->value); + LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, value=%x\n", map->vinfo.mode, map->vinfo.format, map->vinfo.lumaStride, map->vinfo.width, map->vinfo.height, map->value); switch (map->vinfo.mode) { - case MEM_MODE_CI: - status = surfaceMappingForCI(map); - break; case MEM_MODE_SURFACE: status = surfaceMappingForSurface(map); break; @@ -1855,6 +1711,7 @@ LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, va case MEM_MODE_ION: case MEM_MODE_V4L2: case MEM_MODE_USRPTR: + case MEM_MODE_CI: default: status = ENCODE_NOT_SUPPORTED; break; @@ -2252,3 +2109,75 @@ Encode_Status VideoEncoderBase::copySurfaces(VASurfaceID srcId, VASurfaceID dest return ENCODE_SUCCESS; } +VASurfaceID VideoEncoderBase::CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo) { + VAStatus vaStatus; + VASurfaceAttribExternalBuffers extbuf; + VASurfaceAttrib attribs[2]; + VASurfaceID surface = VA_INVALID_SURFACE; + int type; + unsigned long data = value; + + extbuf.pixel_format = VA_FOURCC_NV12; + extbuf.width = vinfo.width; + extbuf.height = vinfo.height; + extbuf.data_size = vinfo.size; + if (extbuf.data_size == 0) + extbuf.data_size = vinfo.lumaStride * vinfo.height * 1.5; + extbuf.num_buffers = 1; + extbuf.num_planes = 3; + extbuf.pitches[0] = vinfo.lumaStride; + extbuf.pitches[1] = vinfo.lumaStride; + extbuf.pitches[2] = vinfo.lumaStride; + extbuf.pitches[3] = 0; + extbuf.offsets[0] = 0; + extbuf.offsets[1] = vinfo.lumaStride * vinfo.height; + extbuf.offsets[2] = extbuf.offsets[1]; + extbuf.offsets[3] = 0; + extbuf.buffers = &data; + extbuf.flags = 0; + extbuf.private_data = NULL; + + switch(vinfo.mode) { + case MEM_MODE_GFXHANDLE: + type = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + break; + case MEM_MODE_KBUFHANDLE: + type = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; + break; + case MEM_MODE_MALLOC: + type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR; + break; + case MEM_MODE_NONECACHE_USRPTR: + type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR; + extbuf.flags |= VA_SURFACE_EXTBUF_DESC_UNCACHED; + break; + case MEM_MODE_SURFACE: + type = VA_SURFACE_ATTRIB_MEM_TYPE_VA; + break; + case MEM_MODE_ION: + case MEM_MODE_V4L2: + case MEM_MODE_USRPTR: + case MEM_MODE_CI: + default: + //not support + return VA_INVALID_SURFACE; + } + + attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; + attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[0].value.type = VAGenericValueTypeInteger; + attribs[0].value.value.i = type; + + attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; + attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[1].value.type = VAGenericValueTypePointer; + attribs[1].value.value.p = (void *)&extbuf; + + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, vinfo.width, + vinfo.height, &surface, 1, attribs, 2); + if (vaStatus != VA_STATUS_SUCCESS) + LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus); + + return surface; +} + diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 53b078b..4354e16 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -86,7 +86,6 @@ private: uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr); Encode_Status surfaceMappingForSurface(SurfaceMap *map); Encode_Status surfaceMappingForGfxHandle(SurfaceMap *map); - Encode_Status surfaceMappingForCI(SurfaceMap *map); Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map); Encode_Status surfaceMappingForMalloc(SurfaceMap *map); Encode_Status surfaceMapping(SurfaceMap *map); @@ -99,6 +98,7 @@ private: Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); Encode_Status queryAutoReferenceConfig(VAProfile profile); Encode_Status copySurfaces(VASurfaceID srcId, VASurfaceID destId); + VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo); protected: -- cgit v1.2.3 From d384ba5cdd182643350b38f6ca23ac72c7bb6f19 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Mon, 29 Jul 2013 14:04:43 +0800 Subject: libmix: disable buffer reallocation when protected content is being played BZ: 125789 Protected content video playback will fail if the output buffer needs to bereallocated. Signed-off-by: Dan Liang Change-Id: I0be4e9115a55219f6b957fd70da007f58489f871 Reviewed-on: http://android.intel.com:8080/120693 Reviewed-by: Feng, Wei Reviewed-by: Guo, Nana N Reviewed-by: Poornachandran, Rajesh Tested-by: Sun, Hang L Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderAVC.cpp | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index e3d67dd..c075e8a 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -701,21 +701,19 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { int diff = data->codec_data->num_ref_frames + 1 - mOutputWindowSize; -#ifndef USE_AVC_SHORT_FORMAT - // The number of actual buffer needed is - // outputQueue + nativewindow_owned + (diff > 0 ? diff : 1) + widi_need_max + 1(available buffer) - // while outputQueue = DPB < 8? DPB :8 - // and diff = Reference + 1 - ouputQueue - mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + 4 /* Owned by native window */ - + (diff > 0 ? diff : 1) - + 6 /* WiDi maximum needs */ - + 1; -#else - // This is for protected video playback on Baytrail - mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + 2 /* Owned by native window */ - + (diff > 0 ? diff : 1) - + 1; -#endif + if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { + mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber; + } else { + // The number of actual buffer needed is + // outputQueue + nativewindow_owned + (diff > 0 ? diff : 1) + widi_need_max + 1(available buffer) + // while outputQueue = DPB < 8? DPB :8 + // and diff = Reference + 1 - ouputQueue + mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + 4 /* Owned by native window */ + + (diff > 0 ? diff : 1) + + 6 /* WiDi maximum needs */ + + 1; + } + ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded); mVideoFormatInfo.valid = true; -- cgit v1.2.3 From 23427acb70a50e0b91bac64e35b54935e98e45eb Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 9 Aug 2013 18:51:43 +0800 Subject: To fix two critical Klocwork issues in libmix BZ: 129623 Two critical issues in libmix: 1. Null pointer 'data' that comes from line 160 may be dereferenced at line 182. Also there is one similar error on line 190. File: libmix/videodecoder/securevideo/ baytrail/VideoDecoderAVCSecure.cpp:182 | decode() 2. Array 'op' of size 16 may use index value(s) 16. Also there are 6 similar errors on line(s) 542, 547, 552, 556, 557, 562. File: libmix/mix_vbp/viddec_fw/fw/codecs/ h264/parser/viddec_h264secure_parse.c:538 | h264secure_Parse_Dec_Ref_Pic_Marking() Fix: 1. Check the 'data' pointer before dereference it 2. Limit the idx value to be smaller than 16 Change-Id: Iba48665147822a23873ca8ef3c0fc5b0f3de03ee Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/124651 Reviewed-by: Shi, PingX Reviewed-by: Guo, Nana N Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c | 4 ++-- videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c index 55225ed..753618a 100644 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264secure_parse.c @@ -533,7 +533,7 @@ h264_Status h264secure_Parse_Dec_Ref_Pic_Marking(h264_Info* pInfo, void *newdata { do { - if (i < NUM_MMCO_OPERATIONS) + if (i < MAX_OP) { code = sliceheader_p->ref_pic_marking.op[i].memory_management_control_operation; SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = code; @@ -568,7 +568,7 @@ h264_Status h264secure_Parse_Dec_Ref_Pic_Marking(h264_Info* pInfo, void *newdata } } - if (i >= NUM_MMCO_OPERATIONS) { + if (i >= MAX_OP) { return H264_STATUS_ERROR; } } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp index 4ded53f..675b37a 100644 --- a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp @@ -178,6 +178,11 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("VideoDecoderBase::updateBuffer"); } + if (data == NULL) { + ETRACE("Invalid data returned by parser!"); + return DECODE_MEMORY_FAIL; + } + if (!mVAStarted) { if (data->has_sps && data->has_pps) { status = startVA(data); -- cgit v1.2.3 From ebc1b467fa6649903c0c00ac6ad829b153b4bcdd Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Thu, 1 Aug 2013 11:49:46 +0800 Subject: Add a convenient library to export vpp capability BZ: 128069 Add a convenient library to export video postprocessing capability. So intended users won't need to touch low level and complex vaapi. Change-Id: Idff180d657eba6f89e25a1c7ade431a3b3bdcbb6 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/124020 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- Android.mk | 3 + videovpp/Android.mk | 56 ++++++++ videovpp/VideoVPPBase.cpp | 348 ++++++++++++++++++++++++++++++++++++++++++++++ videovpp/VideoVPPBase.h | 133 ++++++++++++++++++ videovpp/test/main.cpp | 196 ++++++++++++++++++++++++++ 5 files changed, 736 insertions(+) create mode 100755 videovpp/Android.mk create mode 100644 videovpp/VideoVPPBase.cpp create mode 100644 videovpp/VideoVPPBase.h create mode 100644 videovpp/test/main.cpp diff --git a/Android.mk b/Android.mk index 17d0811..1b7383d 100644 --- a/Android.mk +++ b/Android.mk @@ -9,4 +9,7 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/imagedecoder/Android.mk +ifeq ($(ENABLE_IMG_GRAPHICS),) +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videovpp/Android.mk +endif endif diff --git a/videovpp/Android.mk b/videovpp/Android.mk new file mode 100755 index 0000000..bc53873 --- /dev/null +++ b/videovpp/Android.mk @@ -0,0 +1,56 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + VideoVPPBase.cpp + + +LOCAL_C_INCLUDES += \ + $(TARGET_OUT_HEADERS)/libwsbm \ + $(TARGET_OUT_HEADERS)/libpsb_drm \ + $(TARGET_OUT_HEADERS)/libva \ + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + libui \ + liblog \ + libhardware \ + libdrm \ + libdrm_intel \ + libwsbm \ + libva \ + libva-android \ + libva-tpi + +LOCAL_COPY_HEADERS_TO := libmix_videovpp + +LOCAL_COPY_HEADERS := \ + VideoVPPBase.h + +LOCAL_MODULE := libmix_videovpp + +LOCAL_MODULE_TAGS := eng + +include $(BUILD_SHARED_LIBRARY) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + test/main.cpp + +LOCAL_C_INCLUDES += \ + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmix_videovpp + +LOCAL_SHARED_LIBRARIES := \ + libhardware \ + libmix_videovpp + +LOCAL_MODULE := csc_vpp + +LOCAL_MODULE_TAGS := eng + + +include $(BUILD_EXECUTABLE) diff --git a/videovpp/VideoVPPBase.cpp b/videovpp/VideoVPPBase.cpp new file mode 100644 index 0000000..1564938 --- /dev/null +++ b/videovpp/VideoVPPBase.cpp @@ -0,0 +1,348 @@ +#include "VideoVPPBase.h" + +enum { + HAL_PIXEL_FORMAT_NV12_TILED_INTEL = 0x100, + HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL = 0x101, + +// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL +#if HAL_PIXEL_FORMAT_NV12_DEFINED + HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12, +#else + HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12_TILED_INTEL, +#endif + +// deprecated use HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL + HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_INTEL = 0x7FA00E00, + +// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL + HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_Tiled_INTEL = 0x7FA00F00, + +}; + +struct mfx_gralloc_drm_handle_t { + native_handle_t base; + int magic; + + int width; + int height; + int format; + int usage; + + int name; + int stride; + + int data_owner; + int data; +}; + +#define VPWRAPPER_NATIVE_DISPLAY 0x18c34078 + +#define CHECK_VA_STATUS(FUNC) \ + if (vret != VA_STATUS_SUCCESS) {\ + printf("[%d] " FUNC" failed with 0x%x\n", __LINE__, vret);\ + return vret;\ + } + +VPParameters::VPParameters(VideoVPPBase *vvb) { + va_display = vvb->va_display; + va_context = vvb->va_context; + + vret = init(); + + if (vret != VA_STATUS_SUCCESS) + mInitialized = false; + else + mInitialized = true; +} + +VPParameters* VPParameters::create(VideoVPPBase *vvb) { + VPParameters* v = new VPParameters(vvb); + + if (v->mInitialized) + return v; + else + return NULL; +} + +VAStatus VPParameters::init() { + num_supported_filters = VAProcFilterCount; + vret = vaQueryVideoProcFilters(va_display, + va_context, supported_filters, + &num_supported_filters); + CHECK_VA_STATUS("vaQueryVideoProcFilters"); + + for (size_t i = 0; i < num_supported_filters; i++) { + switch(supported_filters[i]) { + case VAProcFilterNoiseReduction: + { + num_denoise_caps = 1; + vret = vaQueryVideoProcFilterCaps(va_display, va_context, + VAProcFilterNoiseReduction, &denoise_caps, &num_denoise_caps); + CHECK_VA_STATUS("vaQueryVideoProcFilters"); + + nr.valid = true; + nr.min = denoise_caps.range.min_value; + nr.max = denoise_caps.range.max_value; + nr.def = denoise_caps.range.default_value; + nr.step = denoise_caps.range.step; + nr.cur = 0.0; + printf("VAProcFilterNoiseReduction"); + break; + } + default: + break; + } + } + + return vret; +} + +VAStatus VPParameters::buildfilters(VABufferID *filters, unsigned int *num_filter) { + for (int i = 0; i < num_filter_bufs; i++) { + switch (supported_filters[i]) { + case VAProcFilterNoiseReduction: + { + if (nr.cur != 0) { + denoise_buf.type = VAProcFilterNoiseReduction; + denoise_buf.value = nr.cur; + vret = vaCreateBuffer(va_display, va_context, + VAProcFilterParameterBufferType, + sizeof(denoise_buf), 1, &denoise_buf, &denoise_buf_id); + CHECK_VA_STATUS("vaCreateBuffer"); + filter_bufs[num_filter_bufs] = denoise_buf_id; + num_filter_bufs++; + } + break; + } + default: + break; + } + } + + memcpy(filters, filter_bufs, sizeof(VABufferID) * num_filter_bufs); + *num_filter = num_filter_bufs; + + return vret; +} + +VideoVPPBase::VideoVPPBase() + : mInitialized(false), + width(1280), + height(720), + va_display(NULL), + va_config(VA_INVALID_ID), + va_context(VA_INVALID_ID), + vpp_pipeline_buf(VA_INVALID_ID), + SrcSurf(VA_INVALID_SURFACE), + DstSurf(VA_INVALID_SURFACE) { + +} + +VAStatus VideoVPPBase::start() { + if (mInitialized) + return VA_STATUS_SUCCESS; + + int va_major_version, va_minor_version; + unsigned int nativeDisplay = VPWRAPPER_NATIVE_DISPLAY; + VAConfigAttrib vaAttrib; + + va_display = vaGetDisplay(&nativeDisplay); + + vret = vaInitialize(va_display, &va_major_version, &va_minor_version); + CHECK_VA_STATUS("vaInitialize"); + + vaAttrib.type = VAConfigAttribRTFormat; + vaAttrib.value = VA_RT_FORMAT_YUV420; + vret = vaCreateConfig(va_display, VAProfileNone, + VAEntrypointVideoProc, &vaAttrib, 1, &va_config); + CHECK_VA_STATUS("vaCreateConfig"); + + vret = vaCreateContext(va_display, va_config, width, + height, 0, NULL, 0, &va_context); + CHECK_VA_STATUS("vaCreateContext"); + + num_supported_filters = VAProcFilterCount; + vret = vaQueryVideoProcFilters(va_display, + va_context, supported_filters, + &num_supported_filters); + CHECK_VA_STATUS("vaQueryVideoProcFilters"); + + for (size_t i = 0; i < num_supported_filters; i++) { + switch(supported_filters[i]) { + case VAProcFilterDeblocking: + { + break; + } + default: + break; + } + } + + mInitialized = true; + + return vret; +} + +VAStatus VideoVPPBase::stop() { + if (!mInitialized) + return VA_STATUS_SUCCESS; + + int c = SrcSurfHandleMap.size(); + for (int i = 0; i < c; i++) { + SrcSurf = SrcSurfHandleMap.valueAt(i); + if (SrcSurf != VA_INVALID_SURFACE) { + vret = vaDestroySurfaces(va_display, &SrcSurf, 1); + CHECK_VA_STATUS("vaDestroySurfaces"); + } + printf("remove src surf %x\n", SrcSurf); + SrcSurfHandleMap.removeItemsAt(i); + } + + c = DstSurfHandleMap.size(); + for (int i = 0; i < c; i++) { + DstSurf = DstSurfHandleMap.valueAt(i); + if (DstSurf != VA_INVALID_SURFACE) { + vret = vaDestroySurfaces(va_display, &DstSurf, 1); + CHECK_VA_STATUS("vaDestroySurfaces"); + } + printf("remove dst surf %x\n", DstSurf); + DstSurfHandleMap.removeItemsAt(i); + } + + if (vpp_pipeline_buf != VA_INVALID_ID) { + vret = vaDestroyBuffer(va_display, vpp_pipeline_buf); + CHECK_VA_STATUS("vaDestroyBuffer"); + vpp_pipeline_buf = VA_INVALID_ID; + } + + if (va_context != VA_INVALID_ID) { + vret = vaDestroyContext(va_display, va_context); + CHECK_VA_STATUS("vaDestroyContext"); + va_context = VA_INVALID_ID; + } + + if (va_config != VA_INVALID_ID) { + vret = vaDestroyConfig(va_display, va_config); + CHECK_VA_STATUS("vaDestroyConfig"); + va_config = VA_INVALID_ID; + } + + if (va_display != NULL) { + vret = vaTerminate(va_display); + CHECK_VA_STATUS("vaTerminate"); + va_display = NULL; + } + + mInitialized = false; + + return vret; +} + +VAStatus VideoVPPBase::_CreateSurfaceFromGrallocHandle(RenderTarget rt, VASurfaceID *surf) { + unsigned int buffer; + VASurfaceAttrib SurfAttrib; + VASurfaceAttribExternalBuffers SurfExtBuf; + + SurfExtBuf.pixel_format = VA_FOURCC_NV12; + SurfExtBuf.width = rt.width; + SurfExtBuf.height = rt.height; + SurfExtBuf.pitches[0] = rt.stride; + buffer = rt.handle; + SurfExtBuf.buffers = (unsigned long*)&buffer; + SurfExtBuf.num_buffers = 1; + if (rt.type == RenderTarget::KERNEL_DRM) + SurfExtBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; + else + SurfExtBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + + SurfAttrib.type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; + SurfAttrib.flags = VA_SURFACE_ATTRIB_SETTABLE; + SurfAttrib.value.type = VAGenericValueTypePointer; + SurfAttrib.value.value.p = &SurfExtBuf; + + vret = vaCreateSurfaces(va_display, VA_RT_FORMAT_YUV420, + rt.width, rt.height, surf, 1, + &SurfAttrib, 1); + CHECK_VA_STATUS("vaCreateSurfaces"); + + return vret; +} + +VAStatus VideoVPPBase::_perform(VASurfaceID SrcSurf, VARectangle SrcRect, + VASurfaceID DstSurf, VARectangle DstRect, bool no_wait) { + vpp_param.surface = SrcSurf; + vpp_param.output_region = &DstRect; + vpp_param.surface_region = &SrcRect; + vpp_param.surface_color_standard = VAProcColorStandardBT601; + vpp_param.output_background_color = 0; + vpp_param.output_color_standard = VAProcColorStandardBT601; + vpp_param.filter_flags = VA_FRAME_PICTURE; + vpp_param.filters = NULL; + vpp_param.num_filters = 0; + vpp_param.forward_references = NULL; + vpp_param.num_forward_references = 0; + vpp_param.backward_references = NULL; + vpp_param.num_backward_references = 0; + vpp_param.blend_state = NULL; + vpp_param.rotation_state = VA_ROTATION_NONE; + + vret = vaCreateBuffer(va_display, va_context, + VAProcPipelineParameterBufferType, + sizeof(VAProcPipelineParameterBuffer), + 1, &vpp_param, &vpp_pipeline_buf); + CHECK_VA_STATUS("vaCreateBuffer"); + + vret = vaBeginPicture(va_display, va_context, DstSurf); + CHECK_VA_STATUS("vaBeginPicture"); + + vret = vaRenderPicture(va_display, va_context, &vpp_pipeline_buf, 1); + CHECK_VA_STATUS("vaRenderPicture"); + + vret = vaEndPicture(va_display, va_context); + CHECK_VA_STATUS("vaEndPicture"); + + if (!no_wait) { + vret = vaSyncSurface(va_display, DstSurf); + CHECK_VA_STATUS("vaSyncSurface"); + } + + return vret; +} + +VAStatus VideoVPPBase::perform(RenderTarget Src, RenderTarget Dst, VPParameters *vpp, bool no_wait) { + if (!mInitialized) { + vret = start(); + CHECK_VA_STATUS("start"); + } + + ssize_t i = SrcSurfHandleMap.indexOfKey(Src.handle); + if (i >= 0) { + SrcSurf = SrcSurfHandleMap.valueAt(i); + } else { + vret = _CreateSurfaceFromGrallocHandle(Src, &SrcSurf); + CHECK_VA_STATUS("_CreateSurfaceFromGrallocHandle"); + SrcSurfHandleMap.add(Src.handle, SrcSurf); + printf("add src surface %x\n", SrcSurf); + } + + i = DstSurfHandleMap.indexOfKey(Dst.handle); + if (i >= 0) { + DstSurf = DstSurfHandleMap.valueAt(i); + } else { + vret = _CreateSurfaceFromGrallocHandle(Dst, &DstSurf); + CHECK_VA_STATUS("_CreateSurfaceFromGrallocHandle"); + DstSurfHandleMap.add(Dst.handle, DstSurf); + printf("add dst surface %x\n", DstSurf); + } + + vret = vpp->buildfilters(filter_bufs, &num_filter_bufs); + CHECK_VA_STATUS("buildfilters"); + vret = _perform(SrcSurf, Src.rect, DstSurf, Dst.rect, no_wait); + CHECK_VA_STATUS("_perform"); + + return vret; +} + +VideoVPPBase::~VideoVPPBase() { + stop(); +} diff --git a/videovpp/VideoVPPBase.h b/videovpp/VideoVPPBase.h new file mode 100644 index 0000000..a3bdce2 --- /dev/null +++ b/videovpp/VideoVPPBase.h @@ -0,0 +1,133 @@ +#ifndef __VIDEO_VPP_BASE_H__ +#define __VIDEO_VPP_BASE_H__ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include + +class VideoVPPBase; + +struct FilterConfig { + bool valid; + int type; + float min, max, step, def; + float cur; +}; + +class VPParameters { +public: + static VPParameters* create(VideoVPPBase *); + ~VPParameters(); + VAStatus buildfilters(VABufferID *filter_bufs, unsigned int *num_filter_bufs); + void getNR(FilterConfig& NR) { memcpy(&NR, &nr, sizeof(FilterConfig)); } + void setNR(FilterConfig NR) { nr.cur = NR.cur; } + +private: + bool mInitialized; + VADisplay va_display; + VAContextID va_context; + VAStatus vret; + + VAProcFilterType supported_filters[VAProcFilterCount]; + unsigned int num_supported_filters; + + VAProcFilterCap denoise_caps, sharpen_caps, deblock_caps; + VAProcFilterCapColorBalance color_balance_caps[VAProcColorBalanceCount]; + unsigned int num_denoise_caps, num_color_balance_caps, num_sharpen_caps, num_deblock_caps; + + VAProcFilterParameterBuffer denoise_buf, sharpen_buf, deblock_buf; + VAProcFilterParameterBufferColorBalance balance_buf[VAProcColorBalanceCount]; + VABufferID sharpen_buf_id, denoise_buf_id, deblock_buf_id, balance_buf_id; + + VABufferID filter_bufs[VAProcFilterCount]; + unsigned int num_filter_bufs; + + FilterConfig nr; + FilterConfig deblock; + FilterConfig sharpen; + FilterConfig colorbalance[VAProcColorBalanceCount]; + + VPParameters(VideoVPPBase *); + VPParameters(const VPParameters&); + VPParameters &operator=(const VPParameters&); + + VAStatus init(); +}; + +struct RenderTarget { + enum bufType{ + KERNEL_DRM, + ANDROID_GRALLOC, + }; + + int width; + int height; + int stride; + bufType type; + int pixel_format; + int handle; + VARectangle rect; +}; + +class VideoVPPBase { +public: + VideoVPPBase(); + ~VideoVPPBase(); + VAStatus start(); + VAStatus stop(); + VAStatus perform(RenderTarget Src, RenderTarget Dst, VPParameters *vpp, bool no_wait); + +private: + bool mInitialized; + unsigned width, height; + VAStatus vret; + VADisplay va_display; + VAConfigID va_config; + VAContextID va_context; + VABufferID vpp_pipeline_buf; + VAProcPipelineParameterBuffer vpp_param; + VASurfaceAttrib SrcSurfAttrib, DstSurfAttrib; + VASurfaceAttribExternalBuffers SrcSurfExtBuf, DstSurfExtBuf; + VASurfaceID SrcSurf, DstSurf; + VASurfaceAttributeTPI attribs; + + VAProcFilterType supported_filters[VAProcFilterCount]; + unsigned int num_supported_filters; + + VAProcFilterCap denoise_caps, sharpen_caps, deblock_caps; + VAProcFilterCapColorBalance color_balance_caps[VAProcColorBalanceCount]; + unsigned int num_denoise_caps, num_color_balance_caps, num_sharpen_caps, num_deblock_caps; + + VAProcFilterParameterBuffer denoise_buf, sharpen_buf, deblock_buf; + VAProcFilterParameterBufferColorBalance balance_buf[VAProcColorBalanceCount]; + + VABufferID sharpen_buf_id, denoise_buf_id, deblock_buf_id, balance_buf_id; + + VABufferID filter_bufs[VAProcFilterCount]; + unsigned int num_filter_bufs; + + KeyedVector SrcSurfHandleMap; + KeyedVector DstSurfHandleMap; + + VideoVPPBase(const VideoVPPBase &); + VideoVPPBase &operator=(const VideoVPPBase &); + + VAStatus _perform(VASurfaceID SrcSurf, VARectangle SrcRect, + VASurfaceID DstSurf, VARectangle DstRect, bool no_wait); + + VAStatus _CreateSurfaceFromGrallocHandle(RenderTarget rt, VASurfaceID *surf); + + friend class VPParameters; + +}; + +#endif diff --git a/videovpp/test/main.cpp b/videovpp/test/main.cpp new file mode 100644 index 0000000..9c6e095 --- /dev/null +++ b/videovpp/test/main.cpp @@ -0,0 +1,196 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "VideoVPPBase.h" + +enum { + HAL_PIXEL_FORMAT_NV12_TILED_INTEL = 0x100, + HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL = 0x101, + +// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL +#if HAL_PIXEL_FORMAT_NV12_DEFINED + HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12, +#else + HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12_TILED_INTEL, +#endif + +// deprecated use HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL + HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_INTEL = 0x7FA00E00, + +// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL + HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_Tiled_INTEL = 0x7FA00F00, + +}; + +struct mfx_gralloc_drm_handle_t { + native_handle_t base; + int magic; + + int width; + int height; + int format; + int usage; + + int name; + int stride; + + int data_owner; + int data; +}; + +static void usage(const char *me) { + fprintf(stderr, "color space conversion\n" + "\t\tusage: %s -i input -o output\n" + "\t\t-w width -h height\n", + me); + + exit(1); +} + +#define VPWRAPPER_NATIVE_DISPLAY 0x18c34078 + +#define CHECK_VA_STATUS(FUNC) \ + if (vret != VA_STATUS_SUCCESS) {\ + printf("[%d] " FUNC" failed with 0x%x\n", __LINE__, vret);\ + return vret;\ + } + + +static inline unsigned long GetTickCount() +{ + struct timeval tv; + if (gettimeofday(&tv, NULL)) + return 0; + return tv.tv_usec / 1000 + tv.tv_sec * 1000; +} + +int main(int argc, char *argv[]) +{ + int width = 1280, height = 720; + int i, j, res; + const char *me = argv[0]; + char input[128], output[128]; + int has_input = 0; + int has_output = 0; + int has_width = 0; + int has_height = 0; + + while ((res = getopt(argc, argv, "i:o:w:h:")) >= 0) { + switch (res) { + case 'i': + { + strcpy(input, optarg); + has_input = 1; + break; + } + case 'o': + { + strcpy(output, optarg); + has_output = 1; + break; + } + case 'w': + { + width = atoi(optarg); + has_width = 1; + break; + } + case 'h': + { + height = atoi(optarg); + has_height = 1; + break; + } + default: + { + usage(me); + } + } + } + + if (!has_input || !has_output || !has_width || !has_height) + usage(me); + + hw_module_t const* module; + alloc_device_t *mAllocDev; + int32_t stride_YUY2, stride_NV12; + buffer_handle_t handle_YUY2, handle_NV12; + struct gralloc_module_t *gralloc_module; + struct mfx_gralloc_drm_handle_t *pGrallocHandle; + RenderTarget Src, Dst; + + res = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + gralloc_module = (struct gralloc_module_t*)module; + res = gralloc_open(module, &mAllocDev); + res = mAllocDev->alloc(mAllocDev, width, height, + HAL_PIXEL_FORMAT_YCbCr_422_I, + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE, + &handle_YUY2, &stride_YUY2); + if (res != 0) + printf("%d: alloc()\n", __LINE__); + else { + pGrallocHandle = (struct mfx_gralloc_drm_handle_t *)handle_YUY2; + printf("YUY2 %d %d %d\n", pGrallocHandle->width, + pGrallocHandle->height, stride_YUY2); + Src.width = pGrallocHandle->width; + Src.height = pGrallocHandle->height; + Src.stride = stride_YUY2; + Src.type = RenderTarget::KERNEL_DRM; + Src.handle = pGrallocHandle->name; + Src.rect.x = Src.rect.y = 0; + Src.rect.width = Src.width; + Src.rect.height = Src.height; + } + res = mAllocDev->alloc(mAllocDev, width, height, + HAL_PIXEL_FORMAT_NV12_TILED_INTEL, + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE, + &handle_NV12, &stride_NV12); + if (res != 0) + printf("%d: alloc()\n", __LINE__); + else { + pGrallocHandle = (struct mfx_gralloc_drm_handle_t *)handle_NV12; + printf("NV12 %d %d %d\n", pGrallocHandle->width, + pGrallocHandle->height, stride_NV12); + Dst.width = pGrallocHandle->width; + Dst.height = pGrallocHandle->height; + Dst.stride = stride_NV12; + Dst.type = RenderTarget::KERNEL_DRM; + Dst.handle = pGrallocHandle->name; + Dst.rect.x = 0; + Dst.rect.y = 0; + Dst.rect.width = Dst.width; + Dst.rect.height = Dst.height; + } + + VAStatus vret; + + VideoVPPBase * p = new VideoVPPBase(); + + p->start(); + + VPParameters *vpp = VPParameters::create(p); + + vret = p->perform(Src, Dst, vpp, false); + CHECK_VA_STATUS("doVp"); + + vret = p->perform(Src, Dst, vpp, false); + CHECK_VA_STATUS("doVp"); + + p->stop(); + + mAllocDev->free(mAllocDev, handle_YUY2); + mAllocDev->free(mAllocDev, handle_NV12); + + gralloc_close(mAllocDev); + + return 0; +} -- cgit v1.2.3 From 6289d22e26dd0c1b2db141d41b5bf6ddd86f8aa2 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Thu, 8 Aug 2013 14:38:26 +0800 Subject: Support vaQuerySurfaceAttributes API in libmix BZ: 129308 Change-Id: Icaa30baea7e13ce250d629f5f43b56881b8efa79 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/124674 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderBase.cpp | 43 ++++++++++++++++++++++++++++++++++++++- videoencoder/VideoEncoderBase.h | 4 ++++ 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 7a04da2..ca66b70 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -67,7 +67,8 @@ VideoEncoderBase::VideoEncoderBase() ,mOffsetInSeg(0) ,mTotalSize(0) ,mTotalSizeCopied(0) - ,mFrameSkipped(false){ + ,mFrameSkipped(false) + ,mSupportedSurfaceMemType(0){ VAStatus vaStatus = VA_STATUS_SUCCESS; // here the display can be any value, use following one @@ -160,6 +161,8 @@ Encode_Status VideoEncoderBase::start() { // &vaAttrib[0], 3, &(mVAConfig)); //uncomment this after psb_video supports CHECK_VA_STATUS_RETURN("vaCreateConfig"); + querySupportedSurfaceMemTypes(); + if (mComParams.rcMode == VA_RC_VCM) { // Following three features are only enabled in VCM mode mRenderMaxSliceSize = true; @@ -627,6 +630,7 @@ CLEAN_UP: mTotalSize = 0; mTotalSizeCopied = 0; mFrameSkipped = false; + mSupportedSurfaceMemType = 0; LOG_V( "end\n"); return ret; @@ -792,6 +796,40 @@ Encode_Status VideoEncoderBase::queryAutoReferenceConfig(VAProfile profile) { return ENCODE_SUCCESS; } +Encode_Status VideoEncoderBase::querySupportedSurfaceMemTypes() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + + unsigned int num = 0; + + VASurfaceAttrib* attribs = NULL; + + //get attribs number + vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num); + CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes"); + + if (num == 0) + return ENCODE_SUCCESS; + + attribs = new VASurfaceAttrib[num]; + + vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num); + CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes"); + + for(int i = 0; i < num; i ++) { + if (attribs[i].type == VASurfaceAttribMemoryType) { + mSupportedSurfaceMemType = attribs[i].value.value.i; + break; + } + else + continue; + } + + delete attribs; + + return ENCODE_SUCCESS; +} + Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) { // Data size been copied for every single call @@ -2163,6 +2201,9 @@ VASurfaceID VideoEncoderBase::CreateSurfaceFromExternalBuf(int32_t value, ValueI return VA_INVALID_SURFACE; } + if (mSupportedSurfaceMemType & type == 0) + return VA_INVALID_SURFACE; + attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; attribs[0].value.type = VAGenericValueTypeInteger; diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 4354e16..d43878b 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -97,6 +97,7 @@ private: Encode_Status cleanupForOutput(); Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer); Encode_Status queryAutoReferenceConfig(VAProfile profile); + Encode_Status querySupportedSurfaceMemTypes(); Encode_Status copySurfaces(VASurfaceID srcId, VASurfaceID destId); VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo); @@ -159,5 +160,8 @@ protected: android::Condition mCodedBuffer_Cond, mEncodeTask_Cond; bool mFrameSkipped; + + //supported surface memory types + int mSupportedSurfaceMemType; }; #endif /* __VIDEO_ENCODER_BASE_H__ */ -- cgit v1.2.3 From 96437b96516bd1579a3846af91f76d54093e4331 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Fri, 9 Aug 2013 14:35:13 +0800 Subject: Clip bitrate settings for sw h263 encoder. BZ: 129368 sw h263 encoder supports maximum bitrate of 2mbps. Change-Id: Ie4ac6f31f04947c807f3ebc2165cd4cd021f451e Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/124675 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/PVSoftMPEG4Encoder.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp index 5e5bfee..7103ea2 100644 --- a/videoencoder/PVSoftMPEG4Encoder.cpp +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -194,7 +194,7 @@ Encode_Status PVSoftMPEG4Encoder::initEncParams() { mEncParams->timeIncRes = 1000; mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate; - mEncParams->bitRate[0] = mVideoBitRate; + mEncParams->bitRate[0] = mVideoBitRate <= 2000000 ? mVideoBitRate : 2000000; mEncParams->iQuant[0] = 15; mEncParams->pQuant[0] = 12; mEncParams->quantType[0] = 0; -- cgit v1.2.3 From 8258681327ed950cf39ca0f1c39ffe5c12dca18f Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Thu, 8 Aug 2013 14:04:41 +0800 Subject: Using the new vaCreateSurface for VP8 encode. BZ: 129319 Replace vaCreateSurfacewithAttribe with vaCreateSurface in libmix layer; VP8 encode had special requirement about attribute list. For YUV file to IVF file test tool, it didn't need copy surface. So, just walk around it. Change-Id: I41d8b2bb165ddb62c4398424f7f2081001717384 Signed-off-by: Liu Bolun Reviewed-on: http://android.intel.com:8080/124632 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderBase.cpp | 50 ++++++++++++++++++++++++++++++++++++--- videoencoder/VideoEncoderBase.h | 5 ++++ 2 files changed, 52 insertions(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index ca66b70..e31ab8c 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -173,6 +173,8 @@ Encode_Status VideoEncoderBase::start() { LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n"); VASurfaceID surfaces[2]; + VASurfaceAttrib attrib_list[2]; + VASurfaceAttribExternalBuffers external_refbuf; uint32_t stride_aligned, height_aligned; if(mAutoReference == false){ stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; @@ -204,8 +206,14 @@ Encode_Status VideoEncoderBase::start() { mRecSurface = surfaces[1]; }else { mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum]; - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, - stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, NULL, 0); + if(mComParams.profile == VAProfileVP8Version0_3) + { + setupVP8RefExternalBuf(stride_aligned,height_aligned,&external_refbuf,&attrib_list[0]); + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420,stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, NULL, 0); + } + else + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, + stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, NULL, 0); } CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); @@ -1700,7 +1708,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { LOG_I("Surface ID created from Malloc = 0x%08x\n", map->value); //Merrifield limitation, should use mAutoReference to check if on Merr - if ( (mAutoReference == false) || (map->vinfo.lumaStride % 64 == 0) ) + if ((mComParams.profile == VAProfileVP8Version0_3)||((mAutoReference == false) || (map->vinfo.lumaStride % 64 == 0))) map->surface = surface; else { map->surface_backup = surface; @@ -2222,3 +2230,39 @@ VASurfaceID VideoEncoderBase::CreateSurfaceFromExternalBuf(int32_t value, ValueI return surface; } +Encode_Status VideoEncoderBase::setupVP8RefExternalBuf(uint32_t stride_aligned, + uint32_t height_aligned, + VASurfaceAttribExternalBuffers *buf, + VASurfaceAttrib *attrib_list) +{ + int ref_height_uv = (mComParams.resolution.height/ 2 + 32 + 63) & (~63); + buf->pixel_format = VA_FOURCC_NV12; + buf->width = stride_aligned; + buf->height = height_aligned; + buf->data_size = stride_aligned * height_aligned + stride_aligned * ref_height_uv; + buf->num_buffers = mAutoReferenceSurfaceNum; + buf->num_planes = 3; + buf->pitches[0] = stride_aligned; + buf->pitches[1] = stride_aligned; + buf->pitches[2] = stride_aligned; + buf->pitches[3] = 0; + buf->offsets[0] = 0; + buf->offsets[1] = stride_aligned*height_aligned; + buf->offsets[2] = buf->offsets[1]; + buf->offsets[3] = 0; + buf->buffers = (unsigned long *)calloc(buf->num_buffers, sizeof(unsigned long)); + buf->flags = 0; + buf->private_data = NULL; + + attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; + attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE; + attrib_list[0].value.type = VAGenericValueTypeInteger; + attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA; + + attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; + attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE; + attrib_list[1].value.type = VAGenericValueTypePointer; + attrib_list[1].value.value.p = (void *)buf; + + return 0; +} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index d43878b..82ac1f1 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -100,6 +100,11 @@ private: Encode_Status querySupportedSurfaceMemTypes(); Encode_Status copySurfaces(VASurfaceID srcId, VASurfaceID destId); VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo); + Encode_Status setupVP8RefExternalBuf(uint32_t stride_aligned, + uint32_t height_aligned, + VASurfaceAttribExternalBuffers *buf, + VASurfaceAttrib *attrib_list); + protected: -- cgit v1.2.3 From be4df6d358be567fe09bafbf45d692d06b03f2e9 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Thu, 8 Aug 2013 10:45:57 +0800 Subject: vpp: implementation improvement BZ: 128069 impl improvement. Change-Id: Ibb797f5e51823941f6d6ca3c4bd2f0817df92248 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/124649 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: Guo, Nana N Reviewed-by: cactus Tested-by: cactus --- videovpp/Android.mk | 5 +- videovpp/VideoVPPBase.cpp | 240 +++++++++++++++++++++++++++++----------------- videovpp/VideoVPPBase.h | 29 +++--- videovpp/test/main.cpp | 96 ++++++++++++++++--- 4 files changed, 249 insertions(+), 121 deletions(-) diff --git a/videovpp/Android.mk b/videovpp/Android.mk index bc53873..12004d1 100755 --- a/videovpp/Android.mk +++ b/videovpp/Android.mk @@ -31,7 +31,7 @@ LOCAL_COPY_HEADERS := \ LOCAL_MODULE := libmix_videovpp -LOCAL_MODULE_TAGS := eng +LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) @@ -41,6 +41,7 @@ LOCAL_SRC_FILES:= \ test/main.cpp LOCAL_C_INCLUDES += \ + $(TARGET_OUT_HEADERS)/libdrm \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libmix_videovpp @@ -50,7 +51,7 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE := csc_vpp -LOCAL_MODULE_TAGS := eng +LOCAL_MODULE_TAGS := optional include $(BUILD_EXECUTABLE) diff --git a/videovpp/VideoVPPBase.cpp b/videovpp/VideoVPPBase.cpp index 1564938..a57e0e6 100644 --- a/videovpp/VideoVPPBase.cpp +++ b/videovpp/VideoVPPBase.cpp @@ -1,41 +1,6 @@ #include "VideoVPPBase.h" -enum { - HAL_PIXEL_FORMAT_NV12_TILED_INTEL = 0x100, - HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL = 0x101, - -// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL -#if HAL_PIXEL_FORMAT_NV12_DEFINED - HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12, -#else - HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12_TILED_INTEL, -#endif - -// deprecated use HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL - HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_INTEL = 0x7FA00E00, - -// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL - HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_Tiled_INTEL = 0x7FA00F00, - -}; - -struct mfx_gralloc_drm_handle_t { - native_handle_t base; - int magic; - - int width; - int height; - int format; - int usage; - - int name; - int stride; - - int data_owner; - int data; -}; - -#define VPWRAPPER_NATIVE_DISPLAY 0x18c34078 +#define NATIVE_DISPLAY 0x18c34078 #define CHECK_VA_STATUS(FUNC) \ if (vret != VA_STATUS_SUCCESS) {\ @@ -74,57 +39,160 @@ VAStatus VPParameters::init() { for (size_t i = 0; i < num_supported_filters; i++) { switch(supported_filters[i]) { case VAProcFilterNoiseReduction: - { - num_denoise_caps = 1; - vret = vaQueryVideoProcFilterCaps(va_display, va_context, - VAProcFilterNoiseReduction, &denoise_caps, &num_denoise_caps); - CHECK_VA_STATUS("vaQueryVideoProcFilters"); - - nr.valid = true; - nr.min = denoise_caps.range.min_value; - nr.max = denoise_caps.range.max_value; - nr.def = denoise_caps.range.default_value; - nr.step = denoise_caps.range.step; - nr.cur = 0.0; - printf("VAProcFilterNoiseReduction"); - break; + num_denoise_caps = 1; + vret = vaQueryVideoProcFilterCaps(va_display, va_context, + VAProcFilterNoiseReduction, &denoise_caps, &num_denoise_caps); + CHECK_VA_STATUS("vaQueryVideoProcFilterCaps"); + + nr.valid = true; + nr.min = denoise_caps.range.min_value; + nr.max = denoise_caps.range.max_value; + nr.def = denoise_caps.range.default_value; + nr.step = denoise_caps.range.step; + nr.cur = 0.0; + break; + + case VAProcFilterSharpening: + num_sharpen_caps = 1; + vret = vaQueryVideoProcFilterCaps(va_display, va_context, + VAProcFilterSharpening, &sharpen_caps, &num_sharpen_caps); + CHECK_VA_STATUS("vaQueryVideoProcFilterCaps"); + + sharpen.valid = true; + sharpen.min = sharpen_caps.range.min_value; + sharpen.max = sharpen_caps.range.max_value; + sharpen.def = sharpen_caps.range.default_value; + sharpen.step = sharpen_caps.range.step; + sharpen.cur = 0.0; + break; + + case VAProcFilterDeblocking: + num_denoise_caps = 1; + vret = vaQueryVideoProcFilterCaps(va_display, va_context, + VAProcFilterDeblocking, &deblock_caps, &num_deblock_caps); + CHECK_VA_STATUS("vaQueryVideoProcFilterCaps"); + + deblock.valid = true; + deblock.min = deblock_caps.range.min_value; + deblock.max = deblock_caps.range.max_value; + deblock.def = deblock_caps.range.default_value; + deblock.step = deblock_caps.range.step; + deblock.cur = 0.0; + break; + + case VAProcFilterColorBalance: + num_color_balance_caps = VAProcColorBalanceCount; + vret = vaQueryVideoProcFilterCaps(va_display, va_context, + VAProcFilterColorBalance, &color_balance_caps, &num_color_balance_caps); + CHECK_VA_STATUS("vaQueryVideoProcFilterCaps"); + + for (size_t i = 0; i< num_color_balance_caps; i++) { + colorbalance[i].type = color_balance_caps[i].type; + colorbalance[i].valid = true; + colorbalance[i].min = color_balance_caps[i].range.min_value; + colorbalance[i].max = color_balance_caps[i].range.max_value; + colorbalance[i].def = color_balance_caps[i].range.default_value; + colorbalance[i].step = color_balance_caps[i].range.step; + colorbalance[i].cur = 0.0; } + break; + default: break; } } + vret = reset(true); + CHECK_VA_STATUS("reset"); + return vret; } -VAStatus VPParameters::buildfilters(VABufferID *filters, unsigned int *num_filter) { - for (int i = 0; i < num_filter_bufs; i++) { +VAStatus VPParameters::buildfilters() { + for (size_t i = 0; i < num_supported_filters; i++) { switch (supported_filters[i]) { case VAProcFilterNoiseReduction: - { - if (nr.cur != 0) { - denoise_buf.type = VAProcFilterNoiseReduction; - denoise_buf.value = nr.cur; - vret = vaCreateBuffer(va_display, va_context, - VAProcFilterParameterBufferType, - sizeof(denoise_buf), 1, &denoise_buf, &denoise_buf_id); - CHECK_VA_STATUS("vaCreateBuffer"); - filter_bufs[num_filter_bufs] = denoise_buf_id; - num_filter_bufs++; - } - break; + if (nr.cur != -1) { + denoise_buf.type = VAProcFilterNoiseReduction; + denoise_buf.value = nr.min + nr.cur * nr.step; + vret = vaCreateBuffer(va_display, va_context, + VAProcFilterParameterBufferType, + sizeof(denoise_buf), 1, &denoise_buf, &denoise_buf_id); + CHECK_VA_STATUS("vaCreateBuffer"); + filter_bufs[num_filter_bufs] = denoise_buf_id; + num_filter_bufs++; } + break; + + case VAProcFilterDeblocking: + if (deblock.cur != -1) { + deblock_buf.type = VAProcFilterDeblocking; + deblock_buf.value = deblock.min + deblock.cur * deblock.step; + vret = vaCreateBuffer(va_display, va_context, + VAProcFilterParameterBufferType, + sizeof(deblock_buf), 1, &deblock_buf, &deblock_buf_id); + CHECK_VA_STATUS("vaCreateBuffer"); + filter_bufs[num_filter_bufs] = deblock_buf_id; + num_filter_bufs++; + } + break; + + case VAProcFilterSharpening: + if (sharpen.cur != -1) { + sharpen_buf.type = VAProcFilterSharpening; + sharpen_buf.value = sharpen.cur; + vret = vaCreateBuffer(va_display, va_context, + VAProcFilterParameterBufferType, + sizeof(sharpen_buf), 1, &sharpen_buf, &sharpen_buf_id); + CHECK_VA_STATUS("vaCreateBuffer"); + filter_bufs[num_filter_bufs] = sharpen_buf_id; + num_filter_bufs++; + } + break; + + case VAProcFilterColorBalance: + break; + default: break; } } - memcpy(filters, filter_bufs, sizeof(VABufferID) * num_filter_bufs); - *num_filter = num_filter_bufs; + return vret; +} + +VAStatus VPParameters::reset(bool start) { + for (size_t i = 0; i < VAProcFilterCount; i++) { + if (start) { + filter_bufs[i] = VA_INVALID_ID; + } else { + if (filter_bufs[i] != VA_INVALID_ID) { + vret = vaDestroyBuffer(va_display, filter_bufs[i]); + CHECK_VA_STATUS("vaDestroyBuffer"); + filter_bufs[i] = VA_INVALID_ID; + } + } + } + num_filter_bufs = 0; + + sharpen_buf_id = denoise_buf_id = deblock_buf_id = balance_buf_id = VA_INVALID_ID; + memset(&deblock_buf, 0, sizeof(VAProcFilterParameterBuffer)); + memset(&sharpen_buf, 0, sizeof(VAProcFilterParameterBuffer)); + memset(&denoise_buf, 0, sizeof(VAProcFilterParameterBuffer)); + memset(&balance_buf, 0, + sizeof(VAProcFilterParameterBufferColorBalance)* VAProcColorBalanceCount); + + nr.cur = deblock.cur = sharpen.cur = -1; + for (size_t i = 0; i < VAProcColorBalanceCount; i++) + colorbalance[i].cur = -1; return vret; } +VPParameters::~VPParameters() { + reset(false); +} + VideoVPPBase::VideoVPPBase() : mInitialized(false), width(1280), @@ -143,7 +211,7 @@ VAStatus VideoVPPBase::start() { return VA_STATUS_SUCCESS; int va_major_version, va_minor_version; - unsigned int nativeDisplay = VPWRAPPER_NATIVE_DISPLAY; + unsigned int nativeDisplay = NATIVE_DISPLAY; VAConfigAttrib vaAttrib; va_display = vaGetDisplay(&nativeDisplay); @@ -161,23 +229,6 @@ VAStatus VideoVPPBase::start() { height, 0, NULL, 0, &va_context); CHECK_VA_STATUS("vaCreateContext"); - num_supported_filters = VAProcFilterCount; - vret = vaQueryVideoProcFilters(va_display, - va_context, supported_filters, - &num_supported_filters); - CHECK_VA_STATUS("vaQueryVideoProcFilters"); - - for (size_t i = 0; i < num_supported_filters; i++) { - switch(supported_filters[i]) { - case VAProcFilterDeblocking: - { - break; - } - default: - break; - } - } - mInitialized = true; return vret; @@ -243,7 +294,7 @@ VAStatus VideoVPPBase::_CreateSurfaceFromGrallocHandle(RenderTarget rt, VASurfac VASurfaceAttrib SurfAttrib; VASurfaceAttribExternalBuffers SurfExtBuf; - SurfExtBuf.pixel_format = VA_FOURCC_NV12; + SurfExtBuf.pixel_format = rt.pixel_format; SurfExtBuf.width = rt.width; SurfExtBuf.height = rt.height; SurfExtBuf.pitches[0] = rt.stride; @@ -260,7 +311,7 @@ VAStatus VideoVPPBase::_CreateSurfaceFromGrallocHandle(RenderTarget rt, VASurfac SurfAttrib.value.type = VAGenericValueTypePointer; SurfAttrib.value.value.p = &SurfExtBuf; - vret = vaCreateSurfaces(va_display, VA_RT_FORMAT_YUV420, + vret = vaCreateSurfaces(va_display, rt.format, rt.width, rt.height, surf, 1, &SurfAttrib, 1); CHECK_VA_STATUS("vaCreateSurfaces"); @@ -277,8 +328,8 @@ VAStatus VideoVPPBase::_perform(VASurfaceID SrcSurf, VARectangle SrcRect, vpp_param.output_background_color = 0; vpp_param.output_color_standard = VAProcColorStandardBT601; vpp_param.filter_flags = VA_FRAME_PICTURE; - vpp_param.filters = NULL; - vpp_param.num_filters = 0; + vpp_param.filters = filter_bufs; + vpp_param.num_filters = num_filter_bufs; vpp_param.forward_references = NULL; vpp_param.num_forward_references = 0; vpp_param.backward_references = NULL; @@ -306,6 +357,10 @@ VAStatus VideoVPPBase::_perform(VASurfaceID SrcSurf, VARectangle SrcRect, CHECK_VA_STATUS("vaSyncSurface"); } + vret = vaDestroyBuffer(va_display, vpp_pipeline_buf); + CHECK_VA_STATUS("vaDestroyBuffer"); + vpp_pipeline_buf = VA_INVALID_ID; + return vret; } @@ -335,8 +390,13 @@ VAStatus VideoVPPBase::perform(RenderTarget Src, RenderTarget Dst, VPParameters printf("add dst surface %x\n", DstSurf); } - vret = vpp->buildfilters(filter_bufs, &num_filter_bufs); - CHECK_VA_STATUS("buildfilters"); + if (vpp != NULL && (vpp->num_filter_bufs > 0 && vpp->num_filter_bufs < VAProcFilterCount)) { + memcpy(filter_bufs, vpp->filter_bufs, sizeof(VABufferID) * vpp->num_filter_bufs); + num_filter_bufs = vpp->num_filter_bufs; + } else { + num_filter_bufs = 0; + } + vret = _perform(SrcSurf, Src.rect, DstSurf, Dst.rect, no_wait); CHECK_VA_STATUS("_perform"); diff --git a/videovpp/VideoVPPBase.h b/videovpp/VideoVPPBase.h index a3bdce2..98449c8 100644 --- a/videovpp/VideoVPPBase.h +++ b/videovpp/VideoVPPBase.h @@ -17,6 +17,11 @@ class VideoVPPBase; struct FilterConfig { + enum strength { + LOW, + MEDIUM, + HIGH, + }; bool valid; int type; float min, max, step, def; @@ -27,9 +32,14 @@ class VPParameters { public: static VPParameters* create(VideoVPPBase *); ~VPParameters(); - VAStatus buildfilters(VABufferID *filter_bufs, unsigned int *num_filter_bufs); + VAStatus buildfilters(); + VAStatus reset(bool start); void getNR(FilterConfig& NR) { memcpy(&NR, &nr, sizeof(FilterConfig)); } - void setNR(FilterConfig NR) { nr.cur = NR.cur; } + void setNR(FilterConfig::strength str) { nr.cur = str; } + void getDeblock(FilterConfig &DBK) { memcpy(&DBK, &deblock, sizeof(FilterConfig)); } + void SetDeblock(FilterConfig::strength str) { deblock.cur = str; } + void getSharpen(FilterConfig &SHP) { memcpy(&SHP, &sharpen, sizeof(FilterConfig)); } + void setSharpen(FilterConfig SHP) { sharpen.cur = SHP.cur; } private: bool mInitialized; @@ -61,6 +71,8 @@ private: VPParameters &operator=(const VPParameters&); VAStatus init(); + + friend class VideoVPPBase; }; struct RenderTarget { @@ -73,6 +85,7 @@ struct RenderTarget { int height; int stride; bufType type; + int format; int pixel_format; int handle; VARectangle rect; @@ -100,18 +113,6 @@ private: VASurfaceID SrcSurf, DstSurf; VASurfaceAttributeTPI attribs; - VAProcFilterType supported_filters[VAProcFilterCount]; - unsigned int num_supported_filters; - - VAProcFilterCap denoise_caps, sharpen_caps, deblock_caps; - VAProcFilterCapColorBalance color_balance_caps[VAProcColorBalanceCount]; - unsigned int num_denoise_caps, num_color_balance_caps, num_sharpen_caps, num_deblock_caps; - - VAProcFilterParameterBuffer denoise_buf, sharpen_buf, deblock_buf; - VAProcFilterParameterBufferColorBalance balance_buf[VAProcColorBalanceCount]; - - VABufferID sharpen_buf_id, denoise_buf_id, deblock_buf_id, balance_buf_id; - VABufferID filter_bufs[VAProcFilterCount]; unsigned int num_filter_bufs; diff --git a/videovpp/test/main.cpp b/videovpp/test/main.cpp index 9c6e095..0900797 100644 --- a/videovpp/test/main.cpp +++ b/videovpp/test/main.cpp @@ -7,10 +7,16 @@ #include #include #include +#include #include #include "VideoVPPBase.h" +#include +#include + + +#define ALIGN(x, align) (((x) + (align) - 1) & (~((align) - 1))) enum { HAL_PIXEL_FORMAT_NV12_TILED_INTEL = 0x100, @@ -41,10 +47,14 @@ struct mfx_gralloc_drm_handle_t { int usage; int name; - int stride; - - int data_owner; - int data; + int pid; // creator + + mutable int other; // registered owner (pid) + mutable union { int data1; mutable drm_intel_bo *bo; }; // drm buffer object + union { int data2; uint32_t fb; }; // framebuffer id + int pitch; // buffer pitch (in bytes) + int allocWidth; // Allocated buffer width in pixels. + int allocHeight; // Allocated buffer height in lines. }; static void usage(const char *me) { @@ -83,6 +93,7 @@ int main(int argc, char *argv[]) int has_output = 0; int has_width = 0; int has_height = 0; + FILE *fIn, *fOut; while ((res = getopt(argc, argv, "i:o:w:h:")) >= 0) { switch (res) { @@ -90,12 +101,14 @@ int main(int argc, char *argv[]) { strcpy(input, optarg); has_input = 1; + fIn = fopen(input, "r"); break; } case 'o': { strcpy(output, optarg); has_output = 1; + fOut = fopen(output, "w+"); break; } case 'w': @@ -117,7 +130,7 @@ int main(int argc, char *argv[]) } } - if (!has_input || !has_output || !has_width || !has_height) + if (!has_input || !has_output || !has_width || !has_height || !fIn || !fOut) usage(me); hw_module_t const* module; @@ -127,23 +140,37 @@ int main(int argc, char *argv[]) struct gralloc_module_t *gralloc_module; struct mfx_gralloc_drm_handle_t *pGrallocHandle; RenderTarget Src, Dst; + void *vaddr[3]; res = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); gralloc_module = (struct gralloc_module_t*)module; res = gralloc_open(module, &mAllocDev); res = mAllocDev->alloc(mAllocDev, width, height, HAL_PIXEL_FORMAT_YCbCr_422_I, - GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE, + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | + GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, &handle_YUY2, &stride_YUY2); if (res != 0) printf("%d: alloc()\n", __LINE__); else { pGrallocHandle = (struct mfx_gralloc_drm_handle_t *)handle_YUY2; printf("YUY2 %d %d %d\n", pGrallocHandle->width, - pGrallocHandle->height, stride_YUY2); + pGrallocHandle->height, pGrallocHandle->pitch); + res = gralloc_module->lock(gralloc_module, handle_YUY2, + GRALLOC_USAGE_SW_WRITE_MASK, + 0, 0, width, height, (void**)&vaddr); + if (res != 0) { + printf("lock error\n"); + } else { + res = fread(vaddr[0], 1, width * height * 2, fIn); + printf("fread %d\n", res); + gralloc_module->unlock(gralloc_module, handle_YUY2); + } Src.width = pGrallocHandle->width; Src.height = pGrallocHandle->height; - Src.stride = stride_YUY2; + Src.stride = pGrallocHandle->pitch; + Src.format = VA_RT_FORMAT_YUV422; + Src.pixel_format = VA_FOURCC_YUY2; Src.type = RenderTarget::KERNEL_DRM; Src.handle = pGrallocHandle->name; Src.rect.x = Src.rect.y = 0; @@ -152,17 +179,20 @@ int main(int argc, char *argv[]) } res = mAllocDev->alloc(mAllocDev, width, height, HAL_PIXEL_FORMAT_NV12_TILED_INTEL, - GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE, + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | + GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, &handle_NV12, &stride_NV12); if (res != 0) printf("%d: alloc()\n", __LINE__); else { pGrallocHandle = (struct mfx_gralloc_drm_handle_t *)handle_NV12; printf("NV12 %d %d %d\n", pGrallocHandle->width, - pGrallocHandle->height, stride_NV12); + pGrallocHandle->height, pGrallocHandle->pitch); Dst.width = pGrallocHandle->width; Dst.height = pGrallocHandle->height; - Dst.stride = stride_NV12; + Dst.stride = pGrallocHandle->pitch; + Dst.format = VA_RT_FORMAT_YUV420; + Dst.pixel_format = VA_FOURCC_NV12; Dst.type = RenderTarget::KERNEL_DRM; Dst.handle = pGrallocHandle->name; Dst.rect.x = 0; @@ -179,14 +209,50 @@ int main(int argc, char *argv[]) VPParameters *vpp = VPParameters::create(p); - vret = p->perform(Src, Dst, vpp, false); - CHECK_VA_STATUS("doVp"); + if (vpp) { + FilterConfig filter; + vpp->getNR(filter); + printf("valid %d def %f step %f\n", filter.valid, filter.def, filter.step); + filter.cur = 0.5; + vpp->setNR(FilterConfig::LOW); + + vpp->buildfilters(); + + vret = p->perform(Src, Dst, vpp, false); + CHECK_VA_STATUS("doVp"); - vret = p->perform(Src, Dst, vpp, false); - CHECK_VA_STATUS("doVp"); + vret = p->perform(Src, Dst, vpp, false); + CHECK_VA_STATUS("doVp"); + + vpp->reset(false); + } p->stop(); + { + res = gralloc_module->lock(gralloc_module, handle_NV12, + GRALLOC_USAGE_SW_READ_MASK, + 0, 0, width, height, (void**)&vaddr); + if (res != 0) { + printf("lock error\n"); + } else { + unsigned char *pY = (unsigned char*)vaddr[0]; + unsigned char *pUV = pY + stride_NV12 * ALIGN(height, 32); + //unsigned char *pUV = pY + stride_NV12 * height; + for (res =0, i = 0; i < height; i++) { + res += fwrite(pY, 1, width, fOut); + pY += stride_NV12; + } + printf("fwrite %d\n", res); + for (res =0, i = 0; i < height / 2; i++) { + res += fwrite(pUV, 1, width, fOut); + pUV += stride_NV12; + } + printf("fwrite %d\n", res); + gralloc_module->unlock(gralloc_module, handle_NV12); + } + } + mAllocDev->free(mAllocDev, handle_YUY2); mAllocDev->free(mAllocDev, handle_NV12); -- cgit v1.2.3 From 45a007f28bd006caaa002b45ab4da4afb9a99e29 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 13 Aug 2013 17:29:30 +0800 Subject: support different color format in gralloc buffer mapping in libmix BZ: 130945 support different color format in gralloc buffer mapping in libmix, and update mix_encoder2 to fix some bugs and add support for surfacemediasource as well Change-Id: I3026aab9dae3fea45d109d9945f50bf2415e4f78 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/125866 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- test/mix_encoder2.cpp | 255 ++++++++++++++++++++++++++------------ videoencoder/VideoEncoderBase.cpp | 1 + 2 files changed, 178 insertions(+), 78 deletions(-) diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 5682d33..3bdd07f 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -7,7 +7,8 @@ #include #include #include -//#include +#include + //libmix #include @@ -21,6 +22,7 @@ #include #include #include +#include #include #include @@ -66,6 +68,61 @@ static unsigned int pts; return err; \ } +static int YUV_generator_planar(int width, int height, + unsigned char *Y_start, int Y_pitch, + unsigned char *U_start, int U_pitch, + unsigned char *V_start, int V_pitch, + int UV_interleave) +{ + static int row_shift = 0; + int row; + + /* copy Y plane */ + for (row=0;row composer(ComposerService::getComposerService()); mGraphicBufferAlloc = composer->createGraphicBufferAlloc(); - uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE;// | GraphicBuffer::USAGE_HW_COMPOSER; + uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN;// | GraphicBuffer::USAGE_HW_COMPOSER; int format = HAL_PIXEL_FORMAT_NV12; if (mColor == 1) format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h @@ -567,11 +565,16 @@ public: return UNKNOWN_ERROR; } mGraphicBuffer[i] = graphicBuffer; - graphicBuffer->lock(usage | GraphicBuffer::USAGE_SW_WRITE_OFTEN |GraphicBuffer::USAGE_SW_READ_OFTEN, (void**)(&mUsrptr[i])); + + void* vaddr[3]; + if (graphicBuffer->lock(usage, &vaddr[0]) != OK) + return UNKNOWN_ERROR; mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)mGraphicBuffer[i]->handle); graphicBuffer->unlock(); + mUsrptr[i] = (uint8_t*)vaddr[0]; + IMG_native_handle_t* h = (IMG_native_handle_t*) mGraphicBuffer[i]->handle; mStride = h->iWidth; mHeight = h->iHeight; @@ -595,6 +598,7 @@ class GrallocSource : public DummySource { public: GrallocSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + mColor = 0; } virtual ~GrallocSource () { @@ -604,7 +608,7 @@ public: status_t createResource() { - int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER; + int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_WRITE_OFTEN; int format = HAL_PIXEL_FORMAT_NV12; if (mColor == 1) format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h @@ -615,11 +619,15 @@ public: { if (gfx_alloc(mWidth, mHeight, format, usage, &mHandle[i], (int32_t*)&mStride) != 0) return UNKNOWN_ERROR; - if (gfx_lock(mHandle[i], usage | GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, mWidth, mHeight, (void**)(&mUsrptr[i])) != 0) + void* vaddr[3]; + + if (gfx_lock(mHandle[i], usage, 0, 0, mWidth, mHeight, &vaddr[0]) != 0) return UNKNOWN_ERROR; mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)mHandle[i]); gfx_unlock(mHandle[i]); + mUsrptr[i] = (uint8_t*)vaddr[0]; IMG_native_handle_t* h = (IMG_native_handle_t*) mHandle[i]; + mStride = h->iWidth; mHeight = h->iHeight; } @@ -708,6 +716,86 @@ private: int mColor; }; +class MixSurfaceMediaSource : public SurfaceMediaSource { + +public: + MixSurfaceMediaSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) + :SurfaceMediaSource(width, height){ + mMaxNumFrames = nFrames; + mFPS = fps; + } + + virtual ~MixSurfaceMediaSource() { + } + + status_t start(MetaData *params) { + mSTC = new SurfaceTextureClient(static_cast >(getBufferQueue())); + mANW = mSTC; + mRunning = true; + + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThread, &attr, MixSurfaceMediaSource::ThreadFunc, this); + pthread_attr_destroy(&attr); + + SurfaceMediaSource::start(params); + + return OK; + } + + status_t stop() { + mRunning = false; +// void *dummy; +// pthread_join(mThread, &dummy); + SurfaceMediaSource::stop(); + + return OK; + } + +public: + int mMaxNumFrames; + int mFPS; + +private: + sp mSTC; + sp mANW; + pthread_t mThread; + bool mRunning; + + static void *ThreadFunc(void *me) { + MixSurfaceMediaSource *source = static_cast(me); + + // get buffer directly from member + sp buf; + + for ( gNumFramesOutput = 0; gNumFramesOutput < source->mMaxNumFrames; gNumFramesOutput++) { + + ANativeWindowBuffer* anb; + native_window_set_buffers_format(source->mANW.get(), HAL_PIXEL_FORMAT_NV12); + native_window_dequeue_buffer_and_wait(source->mANW.get(), &anb); + // We do not fill the buffer in. Just queue it back. + sp buf(new GraphicBuffer(anb, false)); +#if 1 + uint8_t* img[3]; + buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img[0])); + IMG_native_handle_t* h = (IMG_native_handle_t*) buf->handle; + YUV_generator_planar(h->iWidth, h->iHeight, img[0], h->iWidth, img[0] + h->iWidth*h->iHeight, h->iWidth, 0, 0, 1); + buf->unlock(); +#endif + if (NO_ERROR != source->mANW->queueBuffer(source->mANW.get(), buf->getNativeBuffer(), -1)) + return NULL; + else + usleep(1000000 / source->mFPS); + } + + source->stop(); + + return NULL; + } +}; + static const char *AVC_MIME_TYPE = "video/h264"; static const char *MPEG4_MIME_TYPE = "video/mpeg4"; static const char *H263_MIME_TYPE = "video/h263"; @@ -769,6 +857,9 @@ public: success = meta->findInt32('difs', &mDisableFrameSkip); CHECK(success); + success = meta->findInt32('sync', &mSyncMode); + CHECK(success); + // const char *RCMODE[] = {"VBR", "CBR", "VCM", "NO_RC", NULL}; VideoRateControl RC_MODES[] = {RATE_CONTROL_VBR, RATE_CONTROL_CBR, @@ -921,19 +1012,20 @@ public: //input buffers int loop=1; - if (mEncodeFrameCount == 0) + if (mSyncMode == 0 && mEncodeFrameCount == 0) loop = 2; for(int i=0; iread (&src); + if (err == ERROR_END_OF_STREAM) { LOG ("\nReach Resource EOS, still need to get final frame encoded data\n"); mSrcEOS = true; + if (mSyncMode) + return ERROR_END_OF_STREAM; }else { CHECK_STATUS(err); - err = encode(src); CHECK_STATUS(err); } @@ -944,16 +1036,18 @@ public: CHECK_STATUS(err); VideoOutputFormat format; - if ((mEncodeFrameCount == 2 && (strcasecmp(mMixCodec, H263_MIME_TYPE) != 0))&& - (mEncodeFrameCount == 2 && (strcasecmp(mMixCodec, VP8_MIME_TYPE) != 0))){ + int n = 2; + if (mSyncMode) + n = 1; + + if ((mEncodeFrameCount == n && (strcasecmp(mMixCodec, H263_MIME_TYPE) != 0))&& + (mEncodeFrameCount == n && (strcasecmp(mMixCodec, VP8_MIME_TYPE) != 0))){ format = OUTPUT_CODEC_DATA; mFirstFrame = true; }else - format = OUTPUT_EVERYTHING; - + format = OUTPUT_EVERYTHING;; err = getoutput(*buffer, format); CHECK_STATUS(err); - return OK; } @@ -1037,8 +1131,8 @@ private: int mWinSize; int mIdrInt; int mDisableFrameSkip; + int mSyncMode; -// int mSyncMode; bool mFirstFrame; bool mSrcEOS; @@ -1330,7 +1424,7 @@ int main(int argc, char* argv[]) int IntraPeriod = 30; int DisableFrameSkip = 0; int OutFormat = 0; -// bool SyncMode = false; + int SyncMode = 0; char* OutFileName = "out.264"; const char* Yuvfile = NULL; @@ -1545,7 +1639,11 @@ int main(int argc, char* argv[]) } else if (SrcType == 5) { source = new MemHeapSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, SrcFps, MetadataMode, Yuvfile); - } else{ + } else if (SrcType == 7) { + source = new MixSurfaceMediaSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, + SrcFps, MetadataMode, Yuvfile); + SyncMode = 1; + }else{ printf("Source Type is not supported\n"); return 0; } @@ -1584,6 +1682,7 @@ int main(int argc, char* argv[]) enc_meta->setInt32('wsiz', WinSize); enc_meta->setInt32('idri', WinSize); enc_meta->setInt32('difs', DisableFrameSkip); + enc_meta->setInt32('sync', SyncMode); uint32_t encoder_flags = 0; if (MetadataMode) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index e31ab8c..f0d634e 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -2186,6 +2186,7 @@ VASurfaceID VideoEncoderBase::CreateSurfaceFromExternalBuf(int32_t value, ValueI switch(vinfo.mode) { case MEM_MODE_GFXHANDLE: type = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + extbuf.pixel_format = vinfo.format; break; case MEM_MODE_KBUFHANDLE: type = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; -- cgit v1.2.3 From 3470aa32803a03134629064a37308c3ac40fd7cd Mon Sep 17 00:00:00 2001 From: gji2 Date: Fri, 2 Aug 2013 13:19:45 +0800 Subject: Dynamic IDR frame setting BZ: 122760 Setting the nth frame to be IDR frame, this frame must not be B frame, and assign the picture parameter idr_pic_flag to 1 when force the frame to be IDR frame. Change-Id: I051d357869f77bf8468e4cbd07ad85c0a1f1f75a Signed-off-by: gji2 Reviewed-on: http://android.intel.com:8080/125426 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderAVC.cpp | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 665cc7e..6deeac6 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -149,8 +149,10 @@ Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncCon break; } case VideoConfigTypeIDRRequest: { - - mNewHeader = true; + if(mVideoParamsAVC.ipPeriod >1) + return ENCODE_FAIL; + else + mNewHeader = true; break; } case VideoConfigTypeSliceNum: { @@ -645,13 +647,13 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { LOG_V( "Begin\n"); if (mFrameNum == 0 || mNewHeader) { - if (mRenderHrd) { ret = renderHrd(); mRenderHrd = false; CHECK_ENCODE_STATUS_RETURN("renderHrd"); } + mFrameNum = 0; ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); mNewHeader = false; //Set to require new header filed to false @@ -1038,7 +1040,6 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { avcPicParams.pic_fields.bits.deblocking_filter_control_present_flag = 1; avcPicParams.frame_num = mFrameNum; - avcPicParams.pic_fields.bits.idr_pic_flag = (mFrameNum == 0); avcPicParams.pic_fields.bits.reference_pic_flag = 1; // Not sure whether these settings work for all drivers }else { @@ -1047,6 +1048,7 @@ Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) { avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i]; } + avcPicParams.pic_fields.bits.idr_pic_flag = (mFrameNum == 0); avcPicParams.pic_fields.bits.entropy_coding_mode_flag = mVideoParamsAVC.bEntropyCodingCABAC; avcPicParams.coded_buf = task->coded_buffer; avcPicParams.last_picture = 0; -- cgit v1.2.3 From adf53c53c0fcf04754bcd7e1f462daeac3508254 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Tue, 30 Jul 2013 09:19:28 +0800 Subject: Refine VP8 encode middlerware to fix messy blocks issue. BZ: 124706 Refine VP8 picture parameters;hardcode coded buffer size as 2M; remove render slice parameters for VP8 encode. Change-Id: If0d0c485f5382618cd53df3f757f0ab5b9f57002 Signed-off-by: Liu Bolun Signed-off-by: pingshix Reviewed-on: http://android.intel.com:8080/121814 Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderBase.cpp | 41 ++++++++++++++++++++++++++------------- videoencoder/VideoEncoderVP8.cpp | 4 +++- 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index f0d634e..2a84924 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1347,21 +1347,36 @@ Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) { return ENCODE_SUCCESS; } - // base on the rate control mode to calculate the defaule encoded buffer size - if (mComParams.rcMode == VA_RC_NONE) { - mCodedBufSize = (size * 400) / (16 * 16); - // set to value according to QP - } else { - mCodedBufSize = mComParams.rcParams.bitRate / 4; - } + // here, VP8 is different from AVC/H263 + if(mComParams.profile == VAProfileVP8Version0_3) // for VP8 encode + { + // According to VIED suggestions, in CBR mode, coded buffer should be the size of 3 bytes per luma pixel + // in CBR_HRD mode, coded buffer size should be 5 * rc_buf_sz * rc_target_bitrate; + // now we just hardcode mCodedBufSize as 2M to walk round coded buffer size issue; + /* + if(mComParams.rcMode == VA_RC_CBR) // CBR_HRD mode + mCodedBufSize = 5 * mComParams.rcParams.bitRate * 6000; + else // CBR mode + mCodedBufSize = 3 * mComParams.resolution.width * mComParams.resolution.height; + */ + mCodedBufSize = (2 * 1024 * 1024 + 31) & (~31); + } + else // for AVC/H263/MPEG4 encode + { + // base on the rate control mode to calculate the defaule encoded buffer size + if (mComParams.rcMode == VA_RC_NONE) { + mCodedBufSize = (size * 400) / (16 * 16); + // set to value according to QP + } else { + mCodedBufSize = mComParams.rcParams.bitRate / 4; + } - mCodedBufSize = - max (mCodedBufSize , (size * 400) / (16 * 16)); + mCodedBufSize = max (mCodedBufSize , (size * 400) / (16 * 16)); - // in case got a very large user input bit rate value - mCodedBufSize = - min(mCodedBufSize, (size * 1.5 * 8)); - mCodedBufSize = (mCodedBufSize + 15) &(~15); + // in case got a very large user input bit rate value + mCodedBufSize = min(mCodedBufSize, (size * 1.5 * 8)); + mCodedBufSize = (mCodedBufSize + 15) &(~15); + } *maxSize = mCodedBufSize; return ENCODE_SUCCESS; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index e8a05e1..10edc2b 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -40,7 +40,7 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoConfigVP8.refresh_alternate_frame = 1; mVideoConfigVP8.refresh_entropy_probs = 0; mVideoConfigVP8.value = 0; - mVideoConfigVP8.sharpness_level = 0; + mVideoConfigVP8.sharpness_level = 2; mComParams.profile = VAProfileVP8Version0_3; } @@ -55,6 +55,7 @@ Encode_Status VideoEncoderVP8::renderSequenceParams() { LOG_V( "Begin\n"); + memset(&(vp8SeqParam),0x00, sizeof(VAEncSequenceParameterBufferVP8)); vp8SeqParam.frame_width = mComParams.resolution.width; vp8SeqParam.frame_height = mComParams.resolution.height; vp8SeqParam.error_resilient = mVideoParamsVP8.error_resilient; @@ -85,6 +86,7 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { VAEncPictureParameterBufferVP8 vp8PicParam; LOG_V( "Begin\n"); + memset(&(vp8PicParam),0x00, sizeof(VAEncPictureParameterBufferVP8)); vp8PicParam.coded_buf = task->coded_buffer; vp8PicParam.pic_flags.value = 0; vp8PicParam.ref_flags.bits.force_kf = mVideoConfigVP8.force_kf; //0; -- cgit v1.2.3 From 908c059361f7a1b0d5bb8c57559ff696fd3b8653 Mon Sep 17 00:00:00 2001 From: Nana GUo Date: Thu, 29 Aug 2013 06:20:34 -0400 Subject: libmix-vbp: fix one vc1 clips play with green BZ: 130073 post_processing flag is not set correctly in parser Change-Id: I70c1314975a7d6a28a50b6c994b96d5d138fe67b Signed-off-by: Nana GUo Reviewed-on: http://android.intel.com:8080/129155 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 1 + 1 file changed, 1 insertion(+) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 4a8d6d3..13a5a89 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -863,6 +863,7 @@ static void vbp_pack_picture_params_vc1( pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM; pic_parms->rounding_control = picLayerHeader->RNDCTRL; pic_parms->post_processing = picLayerHeader->POSTPROC; + pic_parms->post_processing = seqLayerHeader->POSTPROCFLAG; /* fix this. Add RESPIC to parser. */ pic_parms->picture_resolution_index = 0; pic_parms->luma_scale = picLayerHeader->LUMSCALE; -- cgit v1.2.3 From 4a7548c9a5a854aed713bb0d558fb3f406b37a85 Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Fri, 6 Sep 2013 15:41:15 +0800 Subject: imagedecoder: standalone JPEG decode lib BZ: 135392 JPEG decode is now used by not only skia but also other modules like usb camera. A standalone lib wrapping VAAPI JPEG decoding is implemented for their use. Change-Id: I7fe07faacc810bd237f367b6149ca1cd35c9a773 Signed-off-by: Cheng Yao Reviewed-on: http://android.intel.com:8080/130405 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- imagedecoder/Android.mk | 89 +- imagedecoder/ImageDecoderTrace.c | 45 - imagedecoder/ImageDecoderTrace.cpp | 45 + imagedecoder/ImageDecoderTrace.h | 11 +- imagedecoder/JPEGBlitter.cpp | 88 ++ imagedecoder/JPEGBlitter.h | 53 ++ imagedecoder/JPEGBlitter_gen.cpp | 441 ++++++++++ imagedecoder/JPEGBlitter_img.cpp | 35 + imagedecoder/JPEGCommon.h | 186 ++++ imagedecoder/JPEGCommon_Gen.h | 41 + imagedecoder/JPEGCommon_Img.h | 34 + imagedecoder/JPEGDecoder.c | 1167 -------------------------- imagedecoder/JPEGDecoder.cpp | 849 +++++++++++++++++++ imagedecoder/JPEGDecoder.h | 140 ++- imagedecoder/JPEGDecoder_gen.cpp | 210 +++++ imagedecoder/JPEGDecoder_img.cpp | 99 +++ imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp | 499 +++++++++++ imagedecoder/JPEGDecoder_libjpeg_wrapper.h | 92 ++ imagedecoder/JPEGParser.c | 122 --- imagedecoder/JPEGParser.cpp | 124 +++ imagedecoder/JPEGParser.h | 21 +- imagedecoder/test/testdecode.cpp | 428 ++++++++++ 22 files changed, 3371 insertions(+), 1448 deletions(-) delete mode 100644 imagedecoder/ImageDecoderTrace.c create mode 100644 imagedecoder/ImageDecoderTrace.cpp create mode 100644 imagedecoder/JPEGBlitter.cpp create mode 100644 imagedecoder/JPEGBlitter.h create mode 100644 imagedecoder/JPEGBlitter_gen.cpp create mode 100644 imagedecoder/JPEGBlitter_img.cpp create mode 100644 imagedecoder/JPEGCommon.h create mode 100644 imagedecoder/JPEGCommon_Gen.h create mode 100644 imagedecoder/JPEGCommon_Img.h delete mode 100644 imagedecoder/JPEGDecoder.c create mode 100644 imagedecoder/JPEGDecoder.cpp create mode 100644 imagedecoder/JPEGDecoder_gen.cpp create mode 100644 imagedecoder/JPEGDecoder_img.cpp create mode 100644 imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp create mode 100644 imagedecoder/JPEGDecoder_libjpeg_wrapper.h delete mode 100644 imagedecoder/JPEGParser.c create mode 100644 imagedecoder/JPEGParser.cpp create mode 100644 imagedecoder/test/testdecode.cpp diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index 6795a06..a7a59b3 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -1,44 +1,105 @@ -#ifeq ($(strip $(USE_INTEL_JPEGDEC)),true) LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES += \ - JPEGDecoder.c \ - JPEGParser.c \ - ImageDecoderTrace.c + JPEGDecoder.cpp \ + JPEGBlitter.cpp \ + JPEGParser.cpp \ + ImageDecoderTrace.cpp LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ - $(TOP)/external/jpeg \ $(TARGET_OUT_HEADERS)/libva -LOCAL_COPY_HEADERS_TO := libjpeg_hw +LOCAL_COPY_HEADERS_TO := libjpegdec LOCAL_COPY_HEADERS := \ JPEGDecoder.h \ - JPEGParser.h \ + JPEGCommon.h \ ImageDecoderTrace.h LOCAL_SHARED_LIBRARIES += \ libcutils \ + libutils \ libva-android \ libva \ - libva-tpi + libva-tpi \ + libhardware LOCAL_LDLIBS += -lpthread LOCAL_CFLAGS += -Wno-multichar -LOCAL_CFLAGS += -DUSE_INTEL_JPEGDEC -ifeq ($(JPEGDEC_USES_GEN),true) -LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS) -LOCAL_CFLAGS += -DJPEGDEC_USES_GEN +ifeq ($(TARGET_BOARD_PLATFORM),baytrail) +LOCAL_SRC_FILES += JPEGBlitter_gen.cpp +LOCAL_SRC_FILES += JPEGDecoder_gen.cpp +else +LOCAL_SRC_FILES += JPEGBlitter_img.cpp +LOCAL_SRC_FILES += JPEGDecoder_img.cpp endif -LOCAL_MODULE:= libjpeg_hw +LOCAL_MODULE:= libjpegdec LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) -#endif +ifeq ($(TARGET_BOARD_PLATFORM),baytrail) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES += \ + test/testdecode.cpp + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libva + +LOCAL_SHARED_LIBRARIES += \ + libcutils \ + libutils \ + libva-android \ + libva \ + libva-tpi \ + libjpegdec \ + libhardware + +LOCAL_LDLIBS += -lpthread +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE:= testjpegdec +LOCAL_MODULE_TAGS := optional + +include $(BUILD_EXECUTABLE) +endif + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES += \ + JPEGDecoder_libjpeg_wrapper.cpp + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH) \ + $(TOP)/external/jpeg \ + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libjpegdec + +LOCAL_COPY_HEADERS_TO := libjpeg_hw + +LOCAL_COPY_HEADERS := \ + JPEGDecoder_libjpeg_wrapper.h + +LOCAL_SHARED_LIBRARIES += \ + libcutils \ + libutils \ + liblog \ + libjpegdec \ + libhardware + +LOCAL_LDLIBS += -lpthread +LOCAL_CFLAGS += -Wno-multichar +LOCAL_CFLAGS += -DUSE_INTEL_JPEGDEC + +LOCAL_MODULE:= libjpeg_hw +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/imagedecoder/ImageDecoderTrace.c b/imagedecoder/ImageDecoderTrace.c deleted file mode 100644 index 3336b85..0000000 --- a/imagedecoder/ImageDecoderTrace.c +++ /dev/null @@ -1,45 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2012 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - - - -#include "ImageDecoderTrace.h" - -#ifdef ENABLE_IMAGE_DECODER_TRACE - -void TraceImageDecoder(const char* cat, const char* fun, int line, const char* format, ...) -{ - if (NULL == cat || NULL == fun || NULL == format) - return; - - printf("%s %s(#%d): ", cat, fun, line); - va_list args; - va_start(args, format); - vprintf(format, args); - va_end(args); - printf("\n"); -} - -#endif - diff --git a/imagedecoder/ImageDecoderTrace.cpp b/imagedecoder/ImageDecoderTrace.cpp new file mode 100644 index 0000000..3336b85 --- /dev/null +++ b/imagedecoder/ImageDecoderTrace.cpp @@ -0,0 +1,45 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#include "ImageDecoderTrace.h" + +#ifdef ENABLE_IMAGE_DECODER_TRACE + +void TraceImageDecoder(const char* cat, const char* fun, int line, const char* format, ...) +{ + if (NULL == cat || NULL == fun || NULL == format) + return; + + printf("%s %s(#%d): ", cat, fun, line); + va_list args; + va_start(args, format); + vprintf(format, args); + va_end(args); + printf("\n"); +} + +#endif + diff --git a/imagedecoder/ImageDecoderTrace.h b/imagedecoder/ImageDecoderTrace.h index 1f67415..466b606 100644 --- a/imagedecoder/ImageDecoderTrace.h +++ b/imagedecoder/ImageDecoderTrace.h @@ -50,10 +50,17 @@ TraceImageDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #else // for Android OS -//#define LOG_NDEBUG 0 - +#ifdef LOG_TAG +#undef LOG_TAG +#endif #define LOG_TAG "ImageDecoder" +#ifdef LOG_NDEBUG +#undef LOG_NDEBUG +#endif +#define LOG_NDEBUG 0 + + #include #define ETRACE(...) ALOGE(__VA_ARGS__) #define WTRACE(...) ALOGW(__VA_ARGS__) diff --git a/imagedecoder/JPEGBlitter.cpp b/imagedecoder/JPEGBlitter.cpp new file mode 100644 index 0000000..cb1e917 --- /dev/null +++ b/imagedecoder/JPEGBlitter.cpp @@ -0,0 +1,88 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ +//#define LOG_NDEBUG 0 + +#include +#include +#include "JPEGBlitter.h" +#include "JPEGDecoder.h" + +#ifdef NDEBUG +#undef NDEBUG +#endif +#include +//#define LOG_TAG "JPEGBlitter" + +JpegBlitter::JpegBlitter() + :mDecoder(NULL), + mConfigId(VA_INVALID_ID), + mContextId(VA_INVALID_ID) +{ + // empty +} + +JpegBlitter::~JpegBlitter() +{ + if (mDecoder) { + destroyContext(); + } +} + +void JpegBlitter::destroyContext() +{ + if (mDecoder == NULL) + return; + + Mutex::Autolock autoLock(mLock); + if (mDecoder) { + vaDestroyContext(mDecoder->mDisplay, mContextId); + mContextId = VA_INVALID_ID; + vaDestroyConfig(mDecoder->mDisplay, mConfigId); + mConfigId = VA_INVALID_ID; + mDecoder = NULL; + } +} + +void JpegBlitter::setDecoder(JpegDecoder &decoder) +{ + destroyContext(); + Mutex::Autolock autoLock(mLock); + mDecoder = &decoder; + VAConfigAttrib vpp_attrib; + VAStatus st; + vpp_attrib.type = VAConfigAttribRTFormat; + vpp_attrib.value = VA_RT_FORMAT_YUV420; + st = vaCreateConfig(mDecoder->mDisplay, VAProfileNone, + VAEntrypointVideoProc, + &vpp_attrib, + 1, &mConfigId); + assert(st == VA_STATUS_SUCCESS); + st = vaCreateContext(mDecoder->mDisplay, mConfigId, 1920, 1080, 0, NULL, 0, &mContextId); + assert(st == VA_STATUS_SUCCESS); +} + diff --git a/imagedecoder/JPEGBlitter.h b/imagedecoder/JPEGBlitter.h new file mode 100644 index 0000000..9514b25 --- /dev/null +++ b/imagedecoder/JPEGBlitter.h @@ -0,0 +1,53 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ + +#ifndef JPEG_BLITTER_H +#define JPEG_BLITTER_H + +#include "../videovpp/VideoVPPBase.h" +#include "JPEGCommon.h" +#include + +class JpegDecoder; + +class JpegBlitter +{ +public: + JpegBlitter(); + virtual ~JpegBlitter(); + virtual void setDecoder(JpegDecoder &decoder); + virtual JpegDecodeStatus blit(RenderTarget &src, RenderTarget &dst); +private: + mutable Mutex mLock; + virtual void destroyContext(); + JpegDecoder *mDecoder; + VAConfigID mConfigId; + VAContextID mContextId; +}; + +#endif diff --git a/imagedecoder/JPEGBlitter_gen.cpp b/imagedecoder/JPEGBlitter_gen.cpp new file mode 100644 index 0000000..b1167d3 --- /dev/null +++ b/imagedecoder/JPEGBlitter_gen.cpp @@ -0,0 +1,441 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ +//#define LOG_NDEBUG 0 + +#include "JPEGBlitter.h" +#include "JPEGCommon_Gen.h" +#include "JPEGDecoder.h" + +#include +#include +#include "ImageDecoderTrace.h" + +#ifdef NDEBUG +#undef NDEBUG +#endif + +#include + +#define JD_CHECK(err, label) \ + if (err) { \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +#define JD_CHECK_RET(err, label, retcode) \ + if (err) { \ + status = retcode; \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +const VAProcColorStandardType fourcc2ColorStandard(uint32_t fourcc) +{ + switch(fourcc) { + case VA_FOURCC_NV12: + case VA_FOURCC_YUY2: + case VA_FOURCC_422H: + case VA_FOURCC_422V: + case VA_FOURCC_411P: + case VA_FOURCC_411R: + case VA_FOURCC_IMC3: + case VA_FOURCC_444P: + case VA_FOURCC_YV12: + return VAProcColorStandardBT601; + default: + return VAProcColorStandardNone; + } +} + +void write_to_file(const char *file, const VAImage *pImg, const uint8_t *pSrc) +{ + FILE *fp = fopen(file, "wb"); + if (!fp) { + return; + } + const uint8_t *pY, *pU, *pV, *pYUYV, *pRGBA, *pUV; + float h_samp_factor, v_samp_factor; + int row, col; + char fourccstr[5]; + VTRACE("Dumping %s buffer to %s", fourcc2str(fourccstr, pImg->format.fourcc), file); + switch (pImg->format.fourcc) { + case VA_FOURCC_IMC3: + h_samp_factor = 1; + v_samp_factor = 0.5; + break; + case VA_FOURCC_422H: + h_samp_factor = 0.5; + v_samp_factor = 1; + break; + case VA_FOURCC_444P: + h_samp_factor = 1; + v_samp_factor = 1; + break; + case VA_FOURCC_YUY2: + { + pYUYV = pSrc + pImg->offsets[0]; + VTRACE("YUY2 output width %u stride %u", pImg->width, pImg->pitches[0]); + for (row = 0; row < pImg->height; ++row) { + fwrite(pYUYV, 2, pImg->width, fp); + pYUYV += pImg->pitches[0]; + } + } + fclose(fp); + return; + case VA_FOURCC_NV12: + { + pY = pSrc + pImg->offsets[0]; + pUV = pSrc + pImg->offsets[1]; + VTRACE("NV12 output width %u stride %u, %u", pImg->width, pImg->pitches[0], pImg->pitches[1]); + for (row = 0; row < pImg->height; ++row) { + fwrite(pY, 1, pImg->width, fp); + pY += pImg->pitches[0]; + } + for (row = 0; row < pImg->height/2; ++row) { + fwrite(pUV, 1, pImg->width, fp); + pUV += pImg->pitches[1]; + } + } + fclose(fp); + return; + case VA_FOURCC_RGBA: + case VA_FOURCC_BGRA: + case VA_FOURCC_ARGB: + case VA_FOURCC('A', 'B', 'G', 'R'): + { + pRGBA = pSrc + pImg->offsets[0]; + VTRACE("RGBA output width %u stride %u", pImg->width, pImg->pitches[0]); + for (row = 0; row < pImg->height; ++row) { + fwrite(pRGBA, 4, pImg->width, fp); + pRGBA += pImg->pitches[0]; + } + } + fclose(fp); + return; + default: + // non-supported + { + char fourccstr[5]; + ETRACE("%s: Not-supported input YUV format", fourcc2str(fourccstr, pImg->format.fourcc)); + } + return; + } + pY = pSrc + pImg->offsets[0]; + pU = pSrc + pImg->offsets[1]; + pV = pSrc + pImg->offsets[2]; + // Y + for (row = 0; row < pImg->height; ++row) { + fwrite(pY, 1, pImg->width, fp); + pY += pImg->pitches[0]; + } + // U + for (row = 0; row < pImg->height * v_samp_factor; ++row) { + fwrite(pU, 1, pImg->width * h_samp_factor, fp); + pU += pImg->pitches[1]; + } + // V + for (row = 0; row < pImg->height * v_samp_factor; ++row) { + fwrite(pV, 1, pImg->width * h_samp_factor, fp); + pV += pImg->pitches[2]; + } + fclose(fp); +} + +static void write_to_YUY2(uint8_t *pDst, + uint32_t dst_w, + uint32_t dst_h, + uint32_t dst_stride, + const VAImage *pImg, + const uint8_t *pSrc) +{ + const uint8_t *pY, *pU, *pV; + float h_samp_factor, v_samp_factor; + int row, col; + char fourccstr[5]; + uint32_t copy_w = (dst_w < pImg->width)? dst_w: pImg->width; + uint32_t copy_h = (dst_h < pImg->height)? dst_h: pImg->height; + switch (pImg->format.fourcc) { + case VA_FOURCC_IMC3: + h_samp_factor = 0.5; + v_samp_factor = 0.5; + break; + case VA_FOURCC_422H: + h_samp_factor = 0.5; + v_samp_factor = 1; + break; + case VA_FOURCC_444P: + h_samp_factor = 1; + v_samp_factor = 1; + break; + default: + // non-supported + ETRACE("%s to YUY2: Not-supported input YUV format", fourcc2str(fourccstr, pImg->format.fourcc)); + return; + } + pY = pSrc + pImg->offsets[0]; + pU = pSrc + pImg->offsets[1]; + pV = pSrc + pImg->offsets[2]; + for (row = 0; row < copy_h; ++row) { + for (col = 0; col < copy_w; ++col) { + // Y + *(pDst + 2 * col) = *(pY + col); + uint32_t actual_col = h_samp_factor * col; + if (col % 2 == 1) { + // U + *(pDst + 2 * col + 1) = *(pU + actual_col); + } + else { + // V + *(pDst + 2 * col + 1) = *(pV + actual_col); + } + } + pDst += dst_stride; + pY += pImg->pitches[0]; + uint32_t actual_row = row * v_samp_factor; + pU = pSrc + pImg->offsets[1] + actual_row * pImg->pitches[1]; + pV = pSrc + pImg->offsets[2] + actual_row * pImg->pitches[2]; + } +} + +static void dumpSurface(const char* filename, VADisplay display, VASurfaceID surface) +{ + VAStatus st; + VAImage img; + uint8_t *buf; + st = vaDeriveImage(display, surface, &img); + if (st) { + ETRACE("vaDeriveImage failed with %d", st); + return; + } + uint32_t in_fourcc = img.format.fourcc; + VTRACE("Start dumping %s surface to %s", fourcc2str(NULL, in_fourcc), filename); + st = vaMapBuffer(display, img.buf, (void **)&buf); + if (st) { + ETRACE("vaMapBuffer failed with %d", st); + vaDestroyImage(display, img.image_id); + return; + } + VTRACE("start write_to_file"); + write_to_file(filename, &img, buf); + vaUnmapBuffer(display, img.buf); + vaDestroyImage(display, img.image_id); +} + +static void dumpGallocBuffer(const char* filename, + buffer_handle_t handle, + int width, + int height, + uint32_t fourcc) +{ + // NOT IMPLEMENTED +} + + +static JpegDecodeStatus swBlit(VADisplay display, VAContextID context, + VASurfaceID in_surf, VARectangle *in_rect, uint32_t in_fourcc, + VASurfaceID out_surf, VARectangle *out_rect, uint32_t out_fourcc) +{ + assert(out_fourcc == VA_FOURCC_YUY2); + assert((in_fourcc == VA_FOURCC_IMC3) || (in_fourcc == VA_FOURCC_422H) || (in_fourcc == VA_FOURCC_444P)); + VAStatus st; + char str[10]; + JpegDecodeStatus status; + VAImage in_img, out_img; + in_img.image_id = VA_INVALID_ID; + in_img.buf = VA_INVALID_ID; + out_img.image_id = VA_INVALID_ID; + out_img.buf = VA_INVALID_ID; + uint8_t *in_buf, *out_buf; + in_buf = out_buf = NULL; + st = vaDeriveImage(display, in_surf, &in_img); + JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); + st = vaDeriveImage(display, out_surf, &out_img); + JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); + st = vaMapBuffer(display, in_img.buf, (void **)&in_buf); + JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); + st = vaMapBuffer(display, out_img.buf, (void **)&out_buf); + JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); + VTRACE("%s in: %s, %ux%u, size %u, offset=%u,%u,%u, pitch=%u,%u,%u", __FUNCTION__, + fourcc2str(NULL, in_fourcc), + in_img.width, + in_img.height, + in_img.data_size, + in_img.offsets[0], in_img.offsets[1], in_img.offsets[2], + in_img.pitches[0], in_img.pitches[1], in_img.pitches[2]); + VTRACE("%s out: %s, %ux%u, size %u, offset=%u,%u,%u, pitch=%u,%u,%u", __FUNCTION__, + fourcc2str(NULL, out_fourcc), + out_img.width, + out_img.height, + out_img.data_size, + out_img.offsets[0], out_img.offsets[1], out_img.offsets[2], + out_img.pitches[0], out_img.pitches[1], out_img.pitches[2]); + write_to_YUY2(out_buf, out_img.width, out_img.height, out_img.pitches[0], &in_img, in_buf); + vaUnmapBuffer(display, in_img.buf); + vaUnmapBuffer(display, out_img.buf); + vaDestroyImage(display, in_img.image_id); + vaDestroyImage(display, out_img.image_id); + VTRACE("%s Finished SW CSC %s=>%s", __FUNCTION__, fourcc2str(str, in_fourcc), fourcc2str(str + 5, out_fourcc)); + return JD_SUCCESS; + +cleanup: + ETRACE("%s failed to do swBlit %s=>%s", __FUNCTION__, fourcc2str(str, in_fourcc), fourcc2str(str + 5, out_fourcc)); + if (in_buf != NULL) vaUnmapBuffer(display, in_img.buf); + if (out_buf != NULL) vaUnmapBuffer(display, out_img.buf); + if (in_img.image_id != VA_INVALID_ID) vaDestroyImage(display, in_img.image_id); + if (out_img.image_id != VA_INVALID_ID) vaDestroyImage(display, out_img.image_id); + return status; +} + +static JpegDecodeStatus hwBlit(VADisplay display, VAContextID context, + VASurfaceID in_surf, VARectangle *in_rect, uint32_t in_fourcc, + VASurfaceID out_surf, VARectangle *out_rect, uint32_t out_fourcc) +{ + VAProcPipelineCaps vpp_pipeline_cap ; + VABufferID vpp_pipeline_buf = VA_INVALID_ID; + VAProcPipelineParameterBuffer vpp_param; + VAStatus vpp_status; + JpegDecodeStatus status = JD_SUCCESS; + char str[10]; + nsecs_t t1, t2; + + memset(&vpp_param, 0, sizeof(VAProcPipelineParameterBuffer)); +#if PRE_TOUCH_SURFACE + //zeroSurfaces(display, &out_surf, 1); +#endif + t1 = systemTime(); + vpp_param.surface = in_surf; + vpp_param.output_region = out_rect; + vpp_param.surface_region = in_rect; + vpp_param.surface_color_standard = fourcc2ColorStandard(in_fourcc); + vpp_param.output_background_color = 0; + vpp_param.output_color_standard = fourcc2ColorStandard(out_fourcc); + vpp_param.filter_flags = VA_FRAME_PICTURE; + vpp_param.filters = NULL; + vpp_param.num_filters = 0; + vpp_param.forward_references = 0; + vpp_param.num_forward_references = 0; + vpp_param.backward_references = 0; + vpp_param.num_backward_references = 0; + vpp_param.blend_state = NULL; + vpp_param.rotation_state = VA_ROTATION_NONE; + vpp_status = vaCreateBuffer(display, + context, + VAProcPipelineParameterBufferType, + sizeof(VAProcPipelineParameterBuffer), + 1, + &vpp_param, + &vpp_pipeline_buf); + JD_CHECK_RET(vpp_status, cleanup, JD_RESOURCE_FAILURE); + + vpp_status = vaBeginPicture(display, + context, + out_surf); + JD_CHECK_RET(vpp_status, cleanup, JD_BLIT_FAILURE); + + //Render the picture + vpp_status = vaRenderPicture(display, + context, + &vpp_pipeline_buf, + 1); + JD_CHECK_RET(vpp_status, cleanup, JD_BLIT_FAILURE); + + vpp_status = vaEndPicture(display, context); + JD_CHECK_RET(vpp_status, cleanup, JD_BLIT_FAILURE); + + vaDestroyBuffer(display, vpp_pipeline_buf); + JD_CHECK_RET(vpp_status, cleanup, JD_BLIT_FAILURE); + t2 = systemTime(); + VTRACE("Finished HW CSC %s(%d,%d,%u,%u)=>%s(%d,%d,%u,%u) for %f ms", + fourcc2str(str, in_fourcc), + in_rect->x, in_rect->y, in_rect->width, in_rect->height, + fourcc2str(str + 5, out_fourcc), + out_rect->x, out_rect->y, out_rect->width, out_rect->height, + ns2us(t2 - t1)/1000.0); + + return JD_SUCCESS; +cleanup: + if (vpp_pipeline_buf != VA_INVALID_ID) + vaDestroyBuffer(display, vpp_pipeline_buf); + return status; +} + +static JpegDecodeStatus vaBlit(VADisplay display, VAContextID context, + VASurfaceID in_surf, VARectangle *in_rect, uint32_t in_fourcc, + VASurfaceID out_surf, VARectangle *out_rect, uint32_t out_fourcc) +{ + if (((in_fourcc == VA_FOURCC_422H) || + (in_fourcc == VA_FOURCC_NV12) || + (in_fourcc == VA_FOURCC_YUY2) || + (in_fourcc == VA_FOURCC_YV12) || + (in_fourcc == VA_FOURCC_RGBA)) + && + ((out_fourcc == VA_FOURCC_422H) || + (out_fourcc == VA_FOURCC_NV12) || + (out_fourcc == VA_FOURCC_YV12) || + (out_fourcc == VA_FOURCC_YUY2) || + (out_fourcc == VA_FOURCC_RGBA))) { + return hwBlit(display, context, in_surf, in_rect, in_fourcc, + out_surf, out_rect, out_fourcc); + } + else { + return swBlit(display, context, in_surf, in_rect, in_fourcc, + out_surf, out_rect, out_fourcc); + } +} + +JpegDecodeStatus JpegBlitter::blit(RenderTarget &src, RenderTarget &dst) +{ + if (mDecoder == NULL) + return JD_UNINITIALIZED; + JpegDecodeStatus st; + uint32_t src_fourcc, dst_fourcc; + char tmp[10]; + src_fourcc = pixelFormat2Fourcc(src.pixel_format); + dst_fourcc = pixelFormat2Fourcc(dst.pixel_format); + VASurfaceID src_surf = mDecoder->getSurfaceID(src); + if (src_surf == VA_INVALID_ID) { + ETRACE("%s invalid src %s target", __FUNCTION__, fourcc2str(NULL, src_fourcc)); + return JD_INVALID_RENDER_TARGET; + } + VASurfaceID dst_surf = mDecoder->getSurfaceID(dst); + if (dst_surf == VA_INVALID_ID) { + WTRACE("%s foreign dst target for JpegDecoder, create surface for it, not guaranteed to free it!!!", __FUNCTION__); + st = mDecoder->createSurfaceFromRenderTarget(dst, &dst_surf); + if (st != JD_SUCCESS || dst_surf == VA_INVALID_ID) { + ETRACE("%s failed to create surface for dst target", __FUNCTION__); + return JD_RESOURCE_FAILURE; + } + } + + VTRACE("%s blitting from %s to %s", __FUNCTION__, fourcc2str(tmp, src_fourcc), fourcc2str(tmp + 5, dst_fourcc)); + st = vaBlit(mDecoder->mDisplay, mContextId, src_surf, &src.rect, src_fourcc, + dst_surf, &dst.rect, dst_fourcc); + + return st; +} + diff --git a/imagedecoder/JPEGBlitter_img.cpp b/imagedecoder/JPEGBlitter_img.cpp new file mode 100644 index 0000000..d56ba98 --- /dev/null +++ b/imagedecoder/JPEGBlitter_img.cpp @@ -0,0 +1,35 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ + +#include "JPEGBlitter.h" + +JpegDecodeStatus JpegBlitter::blit(RenderTarget &src, RenderTarget &dst) +{ + return JD_OUTPUT_FORMAT_UNSUPPORTED; +} + diff --git a/imagedecoder/JPEGCommon.h b/imagedecoder/JPEGCommon.h new file mode 100644 index 0000000..6df6fcd --- /dev/null +++ b/imagedecoder/JPEGCommon.h @@ -0,0 +1,186 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ + +#ifndef JPEGCOMMON_H +#define JPEGCOMMON_H + +#include +#include +#include +#include + +#define JPEG_MAX_COMPONENTS 4 +#define JPEG_MAX_QUANT_TABLES 4 + + +#define RENDERTARGET_INTERNAL_BUFFER (RenderTarget::ANDROID_GRALLOC + 1) + +struct JpegInfo +{ + // in + uint8_t *buf; + size_t bufsize; + // out + uint32_t image_width; + uint32_t image_height; + uint32_t image_color_fourcc; + int image_pixel_format; + VAPictureParameterBufferJPEGBaseline picture_param_buf; + VASliceParameterBufferJPEGBaseline slice_param_buf[JPEG_MAX_COMPONENTS]; + VAIQMatrixBufferJPEGBaseline qmatrix_buf; + VAHuffmanTableBufferJPEGBaseline hufman_table_buf; + uint32_t dht_byte_offset[4]; + uint32_t dqt_byte_offset[4]; + uint32_t huffman_tables_num; + uint32_t quant_tables_num; + uint32_t soi_offset; + uint32_t eoi_offset; + uint32_t scan_ctrl_count; +}; + +enum JpegDecodeStatus +{ + JD_SUCCESS, + JD_UNINITIALIZED, + JD_ALREADY_INITIALIZED, + JD_RENDER_TARGET_TYPE_UNSUPPORTED, + JD_INPUT_FORMAT_UNSUPPORTED, + JD_OUTPUT_FORMAT_UNSUPPORTED, + JD_INVALID_RENDER_TARGET, + JD_RENDER_TARGET_NOT_INITIALIZED, + JD_CODEC_UNSUPPORTED, + JD_INITIALIZATION_ERROR, + JD_RESOURCE_FAILURE, + JD_DECODE_FAILURE, + JD_BLIT_FAILURE, + JD_ERROR_BITSTREAM, + JD_RENDER_TARGET_BUSY, +}; + + +inline char * fourcc2str(char * str, uint32_t fourcc) +{ + static char tmp[5]; + if (str == NULL) { + str = tmp; + memset(str, 0, sizeof str); + } + str[0] = fourcc & 0xff; + str[1] = (fourcc >> 8 )& 0xff; + str[2] = (fourcc >> 16) & 0xff; + str[3] = (fourcc >> 24)& 0xff; + str[4] = '\0'; + return str; +} + +inline int fourcc2VaFormat(uint32_t fourcc) +{ + switch(fourcc) { + case VA_FOURCC_422H: + case VA_FOURCC_422V: + case VA_FOURCC_YUY2: + return VA_RT_FORMAT_YUV422; + case VA_FOURCC_IMC3: + case VA_FOURCC_YV12: + case VA_FOURCC_NV12: + return VA_RT_FORMAT_YUV420; + case VA_FOURCC_444P: + return VA_RT_FORMAT_YUV444; + case VA_FOURCC_411P: + return VA_RT_FORMAT_YUV411; + case VA_FOURCC_BGRA: + case VA_FOURCC_ARGB: + case VA_FOURCC_RGBA: + return VA_RT_FORMAT_RGB32; + default: + return -1; + } +} + +inline uint32_t sampFactor2Fourcc(int h1, int h2, int h3, int v1, int v2, int v3) +{ + if (h1 == 2 && h2 == 1 && h3 == 1 && + v1 == 2 && v2 == 1 && v3 == 1) { + return VA_FOURCC_IMC3; + } + else if (h1 == 2 && h2 == 1 && h3 == 1 && + v1 == 1 && v2 == 1 && v3 == 1) { + return VA_FOURCC_422H; + } + else if (h1 == 1 && h2 == 1 && h3 == 1 && + v1 == 1 && v2 == 1 && v3 == 1) { + return VA_FOURCC_444P; + } + else if (h1 == 4 && h2 == 1 && h3 == 1 && + v1 == 1 && v2 == 1 && v3 == 1) { + return VA_FOURCC_411P; + } + else if (h1 == 1 && h2 == 1 && h3 == 1 && + v1 == 2 && v2 == 1 && v3 == 1) { + return VA_FOURCC_422V; + } + else if (h1 == 2 && h2 == 1 && h3 == 1 && + v1 == 2 && v2 == 2 && v3 == 2) { + return VA_FOURCC_422H; + } + else if (h2 == 2 && h2 == 2 && h3 == 2 && + v1 == 2 && v2 == 1 && v3 == 1) { + return VA_FOURCC_422V; + } + else + { + return VA_FOURCC('4','0','0','P'); + } +} + +inline int fourcc2LumaBitsPerPixel(uint32_t fourcc) +{ + switch(fourcc) { + case VA_FOURCC_422H: + case VA_FOURCC_422V: + case VA_FOURCC_IMC3: + case VA_FOURCC_YV12: + case VA_FOURCC_NV12: + case VA_FOURCC_444P: + case VA_FOURCC_411P: + return 1; + case VA_FOURCC_YUY2: + return 2; + case VA_FOURCC_BGRA: + case VA_FOURCC_ARGB: + case VA_FOURCC_RGBA: + return 4; + default: + return 1; + } +} + +extern int fourcc2PixelFormat(uint32_t fourcc); +extern uint32_t pixelFormat2Fourcc(int pixel_format); + +#endif diff --git a/imagedecoder/JPEGCommon_Gen.h b/imagedecoder/JPEGCommon_Gen.h new file mode 100644 index 0000000..2cc90ae --- /dev/null +++ b/imagedecoder/JPEGCommon_Gen.h @@ -0,0 +1,41 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ + +#ifndef JPEGCOMMON_GEN_H +#define JPEGCOMMON_GEN_H + +#include +#include +#include "JPEGCommon.h" +// temp workaround +#define HAL_PIXEL_FORMAT_YCbCr_422_H_INTEL HAL_PIXEL_FORMAT_YCrCb_422_H_INTEL // 422H (YU16) +#define HAL_PIXEL_FORMAT_IMC3 0x103 // IMC3 +#define HAL_PIXEL_FORMAT_444P 0x104 // 444P + +#endif + diff --git a/imagedecoder/JPEGCommon_Img.h b/imagedecoder/JPEGCommon_Img.h new file mode 100644 index 0000000..3473d20 --- /dev/null +++ b/imagedecoder/JPEGCommon_Img.h @@ -0,0 +1,34 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ + +#ifndef JPEGCOMMON_IMG_H +#define JPEGCOMMON_IMG_H + +#include "JPEGCommon.h" + +#endif diff --git a/imagedecoder/JPEGDecoder.c b/imagedecoder/JPEGDecoder.c deleted file mode 100644 index 9dfe0a1..0000000 --- a/imagedecoder/JPEGDecoder.c +++ /dev/null @@ -1,1167 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2012 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -* Authors: -* Nana Guo -* Yao Cheng -* -*/ - -#include "va/va_tpi.h" -#include "va/va_vpp.h" -#include "va/va_drmcommon.h" -#include "JPEGDecoder.h" -#include "ImageDecoderTrace.h" -#include "JPEGParser.h" -#include -#include "jerror.h" - -#define JPEG_MAX_SETS_HUFFMAN_TABLES 2 - -#define TABLE_CLASS_DC 0 -#define TABLE_CLASS_AC 1 -#define TABLE_CLASS_NUM 2 - -// for config -#define HW_DECODE_MIN_WIDTH 100 // for JPEG smaller than this, use SW decode -#define HW_DECODE_MIN_HEIGHT 100 // for JPEG smaller than this, use SW decode - -// for debug -#define DECODE_DUMP_FILE "" // no dump by default -#define YUY2_DUMP_FILE "" // no dump by default -#define RGBA_DUMP_FILE "" // no dump by default - -#define JD_CHECK(err, label) \ - if (err) { \ - ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ - goto label; \ - } - -#define JD_CHECK_RET(err, label, retcode) \ - if (err) { \ - status = retcode; \ - ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ - goto label; \ - } - -const char * fourcc2str(uint32_t fourcc) -{ - static char str[5]; - memset(str, 0, sizeof str); - str[0] = fourcc & 0xff; - str[1] = (fourcc >> 8 )& 0xff; - str[2] = (fourcc >> 16) & 0xff; - str[3] = (fourcc >> 24)& 0xff; - str[4] = '\0'; - return str; -} - -// VPG supports only YUY2->RGBA, YUY2->NV12_TILED now -// needs to convert IMC3/YV16/444P to YUY2 before HW CSC -static void write_to_YUY2(uint8_t *pDst, - uint32_t dst_stride, - VAImage *pImg, - uint8_t *pSrc) -{ - uint8_t *pY, *pU, *pV; - float h_samp_factor, v_samp_factor; - int row, col; - switch (pImg->format.fourcc) { - case VA_FOURCC_IMC3: - h_samp_factor = 0.5; - v_samp_factor = 0.5; - break; - case VA_FOURCC_422H: - h_samp_factor = 0.5; - v_samp_factor = 1; - break; - case VA_FOURCC_444P: - h_samp_factor = 1; - v_samp_factor = 1; - break; - default: - // non-supported - ETRACE("%s to YUY2: Not-supported input YUV format", fourcc2str(pImg->format.fourcc)); - return; - } - pY = pSrc + pImg->offsets[0]; - pU = pSrc + pImg->offsets[1]; - pV = pSrc + pImg->offsets[2]; - for (row = 0; row < pImg->height; ++row) { - for (col = 0; col < pImg->width; ++col) { - // Y - *(pDst + 2 * col) = *(pY + col); - uint32_t actual_col = h_samp_factor * col; - if (col % 2 == 1) { - // U - *(pDst + 2 * col + 1) = *(pU + actual_col); - } - else { - // V - *(pDst + 2 * col + 1) = *(pV + actual_col); - } - } - pDst += dst_stride; - pY += pImg->pitches[0]; - uint32_t actual_row = row * v_samp_factor; - pU = pSrc + pImg->offsets[1] + actual_row * pImg->pitches[1]; - pV = pSrc + pImg->offsets[2] + actual_row * pImg->pitches[2]; - } -} - -static void write_to_file(char *file, VAImage *pImg, uint8_t *pSrc) -{ - FILE *fp = fopen(file, "wb"); - if (!fp) { - return; - } - uint8_t *pY, *pU, *pV; - float h_samp_factor, v_samp_factor; - int row, col; - ITRACE("Dumping decoded YUV to %s", file); - switch (pImg->format.fourcc) { - case VA_FOURCC_IMC3: - h_samp_factor = 0.5; - v_samp_factor = 0.5; - break; - case VA_FOURCC_422H: - h_samp_factor = 0.5; - v_samp_factor = 1; - break; - case VA_FOURCC_444P: - h_samp_factor = 1; - v_samp_factor = 1; - break; - default: - // non-supported - ETRACE("%s to YUY2: Not-supported input YUV format", fourcc2str(pImg->format.fourcc)); - return; - } - pY = pSrc + pImg->offsets[0]; - pU = pSrc + pImg->offsets[1]; - pV = pSrc + pImg->offsets[2]; - // Y - for (row = 0; row < pImg->height; ++row) { - fwrite(pY, 1, pImg->width, fp); - pY += pImg->pitches[0]; - } - // U - for (row = 0; row < pImg->height * v_samp_factor; ++row) { - fwrite(pU, 1, pImg->width * h_samp_factor, fp); - pU += pImg->pitches[1]; - } - // V - for (row = 0; row < pImg->height * v_samp_factor; ++row) { - fwrite(pV, 1, pImg->width * h_samp_factor, fp); - pV += pImg->pitches[2]; - } - fclose(fp); -} - -/* - * Initialize VA API related stuff - * - * We will check the return value of jva_initialize - * to determine which path will be use (SW or HW) - * - */ -Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) { - /* - * Please note that we won't check the input parameters to follow the - * convention of libjpeg duo to we need these parameters to do error handling, - * and if these parameters are invalid, means the whole stack is crashed, so check - * them here and return false is meaningless, same situation for all internal methods - * related to VA API - */ - uint32_t va_major_version = 0; - uint32_t va_minor_version = 0; - VAStatus va_status = VA_STATUS_SUCCESS; - Decode_Status status = DECODE_SUCCESS; - uint32_t index; - - if (jd_libva_ptr->initialized) - return DECODE_NOT_STARTED; - - jd_libva_ptr->android_display = (Display*)malloc(sizeof(Display)); - if (jd_libva_ptr->android_display == NULL) { - return DECODE_MEMORY_FAIL; - } - jd_libva_ptr->va_display = vaGetDisplay (jd_libva_ptr->android_display); - - if (jd_libva_ptr->va_display == NULL) { - ETRACE("vaGetDisplay failed."); - free (jd_libva_ptr->android_display); - return DECODE_DRIVER_FAIL; - } - va_status = vaInitialize(jd_libva_ptr->va_display, &va_major_version, &va_minor_version); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaInitialize failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } - - /*if ((VA_RT_FORMAT_YUV444 & attrib.value) == 0) { - WTRACE("Format not surportted\n"); - status = DECODE_FAIL; - goto cleanup; - }*/ - - jd_libva_ptr->initialized = TRUE; - status = DECODE_SUCCESS; - -cleanup: -#if 0 - /*free profiles and entrypoints*/ - if (va_profiles) - free(va_profiles); - - if (va_entrypoints) - free (va_entrypoints); -#endif - if (status) { - jd_libva_ptr->initialized = TRUE; // make sure we can call into jva_deinitialize() - jdva_deinitialize (jd_libva_ptr); - return status; - } - - return status; -} - -void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) { - if (!(jd_libva_ptr->initialized)) { - return; - } - - if (jd_libva_ptr->JPEGParser) { - free(jd_libva_ptr->JPEGParser); - jd_libva_ptr->JPEGParser = NULL; - } - - if (jd_libva_ptr->va_display) { - vaTerminate(jd_libva_ptr->va_display); - jd_libva_ptr->va_display = NULL; - } - - if (jd_libva_ptr->android_display) { - free(jd_libva_ptr->android_display); - jd_libva_ptr->android_display = NULL; - } - - jd_libva_ptr->initialized = FALSE; - ITRACE("jdva_deinitialize finished"); - return; -} - -static Decode_Status doColorConversion(jd_libva_struct *jd_libva_ptr, VASurfaceID surface, char ** buf, uint32_t rows) -{ -#ifdef JPEGDEC_USES_GEN - VAImage decoded_img; - uint8_t *decoded_buf = NULL; - VAImage yuy2_img; - uint8_t *yuy2_buf = NULL; - VAImage rgba_img; - uint8_t *rgba_buf = NULL; - int row, col; - VAStatus vpp_status; - uint8_t *pSrc, *pDst; - VADisplay display = NULL; - VAContextID context = VA_INVALID_ID; - VAConfigID config = VA_INVALID_ID; - VAConfigAttrib vpp_attrib; - VAProcPipelineParameterBuffer vpp_param; - VABufferID vpp_pipeline_buf = VA_INVALID_ID; - int major_version, minor_version; - VAProcPipelineCaps vpp_pipeline_cap ; - VARectangle src_rect, dst_rect; - int err; - Display vppdpy; - FILE *fp; - VASurfaceAttrib in_fourcc, out_fourcc; - VASurfaceID in_surf, out_surf; - Decode_Status status = DECODE_SUCCESS; - VASurfaceAttribExternalBuffers vaSurfaceExternBufIn, vaSurfaceExternBufOut; - decoded_img.image_id = VA_INVALID_ID; - yuy2_img.image_id = VA_INVALID_ID; - rgba_img.image_id = VA_INVALID_ID; - display = jd_libva_ptr->va_display; - - vpp_status = vaDeriveImage(display, surface, &decoded_img); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - vpp_status = vaMapBuffer(display, decoded_img.buf, (void **)&decoded_buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - write_to_file(DECODE_DUMP_FILE, &decoded_img, decoded_buf); - - ITRACE("Start HW CSC: color %s=>RGBA8888", fourcc2str(jd_libva_ptr->fourcc)); - - vpp_attrib.type = VAConfigAttribRTFormat; - vpp_attrib.value = VA_RT_FORMAT_YUV420; - vpp_status = vaCreateConfig(display, - VAProfileNone, - VAEntrypointVideoProc, - &vpp_attrib, - 1, - &config); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - vpp_status = vaCreateContext(display, - config, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - 0, - NULL, - 0, - &context); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - in_surf = out_surf = VA_INVALID_ID; - in_fourcc.type = VASurfaceAttribPixelFormat; - in_fourcc.flags = VA_SURFACE_ATTRIB_SETTABLE; - in_fourcc.value.type = VAGenericValueTypeInteger; - in_fourcc.value.value.i = VA_FOURCC_YUY2; - vpp_status = vaCreateSurfaces(display, - VA_RT_FORMAT_YUV422, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - &in_surf, - 1, - &in_fourcc, - 1); - vpp_status = vaDeriveImage(display, in_surf, &yuy2_img); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - vpp_status = vaMapBuffer(display, yuy2_img.buf, (void **)&yuy2_buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - write_to_YUY2(yuy2_buf, yuy2_img.pitches[0], &decoded_img, decoded_buf); - fp = fopen(YUY2_DUMP_FILE, "wb"); - if (fp) { - ITRACE("DUMP YUY2 to " YUY2_DUMP_FILE); - unsigned char *pYUV = yuy2_buf; - uint32_t loop; - for(loop=0;loopimage_height;loop++) - { - fwrite(pYUV, 2, jd_libva_ptr->image_width, fp); - pYUV += yuy2_img.pitches[0]; - } - fclose(fp); - } - vaUnmapBuffer(display, yuy2_img.buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - yuy2_buf = NULL; - vaDestroyImage(display, yuy2_img.image_id); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - yuy2_img.image_id = VA_INVALID_ID; - vaUnmapBuffer(display, decoded_img.buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - decoded_buf = NULL; - vaDestroyImage(display, decoded_img.image_id); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - decoded_img.image_id = VA_INVALID_ID; - - out_fourcc.type = VASurfaceAttribPixelFormat; - out_fourcc.flags = VA_SURFACE_ATTRIB_SETTABLE; - out_fourcc.value.type = VAGenericValueTypeInteger; - out_fourcc.value.value.i = VA_FOURCC_RGBA; - vpp_status = vaCreateSurfaces(display, - VA_RT_FORMAT_RGB32, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - &out_surf, - 1, - &out_fourcc, - 1); - - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - ITRACE("vaCreateSurfaces got surface %u=>%u", in_surf, out_surf); - //query caps for pipeline - vpp_status = vaQueryVideoProcPipelineCaps(display, - context, - NULL, - 0, - &vpp_pipeline_cap); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - src_rect.x = dst_rect.x = 0; - src_rect.y = dst_rect.y = 0; - src_rect.width = dst_rect.width = jd_libva_ptr->image_width; - src_rect.height = dst_rect.height = jd_libva_ptr->image_height; - ITRACE("from (%d, %d, %u, %u) to (%d, %d, %u, %u)", - src_rect.x, src_rect.y, src_rect.width, src_rect.height, - dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height); - vpp_param.surface = in_surf; - vpp_param.output_region = &dst_rect; - vpp_param.surface_region = &src_rect; - vpp_param.surface_color_standard = VAProcColorStandardBT601; //csc - vpp_param.output_background_color = 0x8000; //colorfill - vpp_param.output_color_standard = VAProcColorStandardNone; - vpp_param.filter_flags = VA_FRAME_PICTURE; - vpp_param.filters = NULL; - vpp_param.num_filters = 0; - vpp_param.forward_references = 0; - vpp_param.num_forward_references = 0; - vpp_param.backward_references = 0; - vpp_param.num_backward_references = 0; - vpp_param.blend_state = NULL; - vpp_param.rotation_state = VA_ROTATION_NONE; - vpp_status = vaCreateBuffer(display, - context, - VAProcPipelineParameterBufferType, - sizeof(VAProcPipelineParameterBuffer), - 1, - &vpp_param, - &vpp_pipeline_buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - vpp_status = vaBeginPicture(display, - context, - out_surf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - //Render the picture - vpp_status = vaRenderPicture(display, - context, - &vpp_pipeline_buf, - 1); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - vpp_status = vaEndPicture(display, context); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - - vpp_status = vaSyncSurface(display, out_surf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - ITRACE("Finished HW CSC YUY2=>RGBA8888"); - - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - ITRACE("Copy RGBA8888 buffer (%ux%u) to skia buffer (%ux%u)", - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - buf[1] - buf[0], - rows); - - vpp_status = vaDeriveImage(display, out_surf, &rgba_img); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - vpp_status = vaMapBuffer(display, rgba_img.buf, (void **)&rgba_buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - fp = fopen(RGBA_DUMP_FILE, "wb"); - if (fp) - ITRACE("DUMP RGBA to " RGBA_DUMP_FILE); - unsigned char *prgba = rgba_buf; - uint32_t loop; - for(loop=0;loopimage_height && loop < rows;loop++) - { - memcpy(buf[loop], prgba, 4 * jd_libva_ptr->image_width); - if (fp) - fwrite(prgba, 4, jd_libva_ptr->image_width, fp); - prgba += rgba_img.pitches[0]; - } - if (fp) - fclose(fp); - vaUnmapBuffer(display, rgba_img.buf); - JD_CHECK_RET(vpp_status, cleanup, DECODE_DRIVER_FAIL); - rgba_buf = NULL; - vaDestroyImage(display, rgba_img.image_id); - rgba_img.image_id = VA_INVALID_ID; - -cleanup: - if (vpp_pipeline_buf != VA_INVALID_ID) - vaDestroyBuffer(display, vpp_pipeline_buf); - if (in_surf != VA_INVALID_ID) - vaDestroySurfaces(display, &in_surf, 1); - if (out_surf != VA_INVALID_ID) - vaDestroySurfaces(display, &out_surf, 1); - if (rgba_buf) - vaUnmapBuffer(display, rgba_img.buf); - if (rgba_img.image_id != VA_INVALID_ID) - vaDestroyImage(display, rgba_img.image_id); - if (yuy2_buf) - vaUnmapBuffer(display, yuy2_img.buf); - if (yuy2_img.image_id != VA_INVALID_ID) - vaDestroyImage(display, yuy2_img.image_id); - if (decoded_buf) - vaUnmapBuffer(display, decoded_img.buf); - if (decoded_img.image_id != VA_INVALID_ID) - vaDestroyImage(display, decoded_img.image_id); - if (context != VA_INVALID_ID) - vaDestroyContext(display, context); - if (config != VA_INVALID_ID) - vaDestroyConfig(display, config); - return status; -#else - return DECODE_SUCCESS; -#endif -} - -static unsigned int getSurfaceFormat(jd_libva_struct * jd_libva_ptr, VASurfaceAttrib * fourcc) { - int h1, h2, h3, v1, v2, v3; - h1 = jd_libva_ptr->picture_param_buf.components[0].h_sampling_factor; - h2 = jd_libva_ptr->picture_param_buf.components[1].h_sampling_factor; - h3 = jd_libva_ptr->picture_param_buf.components[2].h_sampling_factor; - v1 = jd_libva_ptr->picture_param_buf.components[0].v_sampling_factor; - v2 = jd_libva_ptr->picture_param_buf.components[1].v_sampling_factor; - v3 = jd_libva_ptr->picture_param_buf.components[2].v_sampling_factor; - - fourcc->type = VASurfaceAttribPixelFormat; - fourcc->flags = VA_SURFACE_ATTRIB_SETTABLE; - fourcc->value.type = VAGenericValueTypeInteger; - - if (h1 == 2 && h2 == 1 && h3 == 1 && - v1 == 2 && v2 == 1 && v3 == 1) { - fourcc->value.value.i = VA_FOURCC_IMC3; - return VA_RT_FORMAT_YUV420; - } - else if (h1 == 2 && h2 == 1 && h3 == 1 && - v1 == 1 && v2 == 1 && v3 == 1) { - fourcc->value.value.i = VA_FOURCC_422H; - return VA_RT_FORMAT_YUV422; - } - else if (h1 == 1 && h2 == 1 && h3 == 1 && - v1 == 1 && v2 == 1 && v3 == 1) { - fourcc->value.value.i = VA_FOURCC_444P; - return VA_RT_FORMAT_YUV444; - } - else if (h1 == 4 && h2 == 1 && h3 == 1 && - v1 == 1 && v2 == 1 && v3 == 1) { - fourcc->value.value.i = VA_FOURCC_411P; - ITRACE("SurfaceFormat: 411P"); - return VA_RT_FORMAT_YUV411; - } - else if (h1 == 1 && h2 == 1 && h3 == 1 && - v1 == 2 && v2 == 1 && v3 == 1) { - fourcc->value.value.i = VA_FOURCC_422V; - return VA_RT_FORMAT_YUV422; - } - else if (h1 == 2 && h2 == 1 && h3 == 1 && - v1 == 2 && v2 == 2 && v3 == 2) { - fourcc->value.value.i = VA_FOURCC_422H; - return VA_RT_FORMAT_YUV422; - } - else if (h2 == 2 && h2 == 2 && h3 == 2 && - v1 == 2 && v2 == 1 && v3 == 1) { - fourcc->value.value.i = VA_FOURCC_422V; - return VA_RT_FORMAT_YUV422; - } - else - { - fourcc->value.value.i = VA_FOURCC('4','0','0','P'); - return VA_RT_FORMAT_YUV400; - } - -} - -Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { - VAStatus va_status = VA_STATUS_SUCCESS; - Decode_Status status = DECODE_SUCCESS; - jd_libva_ptr->image_width = jd_libva_ptr->picture_param_buf.picture_width; - jd_libva_ptr->image_height = jd_libva_ptr->picture_param_buf.picture_height; - jd_libva_ptr->surface_count = 1; - jd_libva_ptr->va_surfaces = (VASurfaceID *) malloc(sizeof(VASurfaceID)*jd_libva_ptr->surface_count); - if (jd_libva_ptr->va_surfaces == NULL) { - return DECODE_MEMORY_FAIL; - } - - - jd_libva_ptr->resource_allocated = TRUE; - return status; -cleanup: - jd_libva_ptr->resource_allocated = FALSE; - - if (jd_libva_ptr->va_surfaces) { - free (jd_libva_ptr->va_surfaces); - jd_libva_ptr->va_surfaces = NULL; - } - jdva_deinitialize (jd_libva_ptr); - - return DECODE_DRIVER_FAIL; -} - -Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) { - Decode_Status status = DECODE_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - - if (!(jd_libva_ptr->resource_allocated)) { - return status; - } - - if (!(jd_libva_ptr->va_display)) { - return status; //most likely the resource are already released and HW jpeg is deinitialize, return directly - } - - /* - * It is safe to destroy Surface/Config/Context severl times - * and it is also safe even their value is NULL - */ - -cleanup: - jd_libva_ptr->va_config = VA_INVALID_ID; - - jd_libva_ptr->resource_allocated = FALSE; - - return va_status; -} - -Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) { - Decode_Status status = DECODE_SUCCESS; - VAStatus va_status = VA_STATUS_SUCCESS; - VABufferID desc_buf[5]; - uint32_t bitstream_buffer_size = 0; - uint32_t scan_idx = 0; - uint32_t buf_idx = 0; - char **buf = jd_libva_ptr->output_image; - uint32_t lines = jd_libva_ptr->output_lines; - uint32_t chopping = VA_SLICE_DATA_FLAG_ALL; - uint32_t bytes_remaining; - VAConfigAttrib attrib; - attrib.type = VAConfigAttribRTFormat; - va_status = vaGetConfigAttributes(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaGetConfigAttributes failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } - va_status = vaCreateConfig(jd_libva_ptr->va_display, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1, &(jd_libva_ptr->va_config)); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateConfig failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - goto cleanup; - } - VASurfaceAttrib fourcc; - unsigned int surface_format = getSurfaceFormat(jd_libva_ptr, &fourcc); - jd_libva_ptr->fourcc = fourcc.value.value.i; -#ifdef JPEGDEC_USES_GEN - va_status = vaCreateSurfaces(jd_libva_ptr->va_display, surface_format, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - jd_libva_ptr->va_surfaces, - jd_libva_ptr->surface_count, &fourcc, 1); -#else - va_status = vaCreateSurfaces(jd_libva_ptr->va_display, VA_RT_FORMAT_YUV444, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - jd_libva_ptr->va_surfaces, - jd_libva_ptr->surface_count, NULL, 0); -#endif - JD_CHECK(va_status, cleanup); - va_status = vaCreateContext(jd_libva_ptr->va_display, jd_libva_ptr->va_config, - jd_libva_ptr->image_width, - jd_libva_ptr->image_height, - 0, //VA_PROGRESSIVE - jd_libva_ptr->va_surfaces, - jd_libva_ptr->surface_count, &(jd_libva_ptr->va_context)); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateContext failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; - } - - if (jd_libva_ptr->eoi_offset) - bytes_remaining = jd_libva_ptr->eoi_offset - jd_libva_ptr->soi_offset; - else - bytes_remaining = jd_libva_ptr->file_size - jd_libva_ptr->soi_offset; - uint32_t src_offset = jd_libva_ptr->soi_offset; - uint32_t cpy_row; - bitstream_buffer_size = cinfo->src->bytes_in_buffer;//1024*1024*5; - - va_status = vaBeginPicture(jd_libva_ptr->va_display, jd_libva_ptr->va_context, jd_libva_ptr->va_surfaces[0]); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaBeginPicture failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - return status; - } - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferJPEGBaseline), 1, &jd_libva_ptr->picture_param_buf, &desc_buf[buf_idx]); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer VAPictureParameterBufferType failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - return status; - } - buf_idx++; - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferJPEGBaseline), 1, &jd_libva_ptr->qmatrix_buf, &desc_buf[buf_idx]); - - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer VAIQMatrixBufferType failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - return status; - } - buf_idx++; - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VAHuffmanTableBufferType, sizeof(VAHuffmanTableBufferJPEGBaseline), 1, &jd_libva_ptr->hufman_table_buf, &desc_buf[buf_idx]); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer VAHuffmanTableBufferType failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - return status; - } - buf_idx++; - do { - /* Get Bitstream Buffer */ - uint32_t bytes = ( bytes_remaining < bitstream_buffer_size ) ? bytes_remaining : bitstream_buffer_size; - bytes_remaining -= bytes; - /* Get Slice Control Buffer */ - VASliceParameterBufferJPEGBaseline dest_scan_ctrl[JPEG_MAX_COMPONENTS]; - uint32_t src_idx = 0; - uint32_t dest_idx = 0; - memset(dest_scan_ctrl, 0, sizeof(dest_scan_ctrl)); - for (src_idx = scan_idx; src_idx < jd_libva_ptr->scan_ctrl_count ; src_idx++) { - if (jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset) { - /* new scan, reset state machine */ - chopping = VA_SLICE_DATA_FLAG_ALL; - fprintf(stderr,"Scan:%i FileOffset:%x Bytes:%x \n", src_idx, - jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset, - jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_size ); - /* does the slice end in the buffer */ - if (jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset + jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_size > bytes + src_offset) { - chopping = VA_SLICE_DATA_FLAG_BEGIN; - } - } else { - if (jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_size > bytes) { - chopping = VA_SLICE_DATA_FLAG_MIDDLE; - } else { - if ((chopping == VA_SLICE_DATA_FLAG_BEGIN) || (chopping == VA_SLICE_DATA_FLAG_MIDDLE)) { - chopping = VA_SLICE_DATA_FLAG_END; - } - } - } - dest_scan_ctrl[dest_idx].slice_data_flag = chopping; - dest_scan_ctrl[dest_idx].slice_data_offset = ((chopping == VA_SLICE_DATA_FLAG_ALL) || (chopping == VA_SLICE_DATA_FLAG_BEGIN) )? -jd_libva_ptr->slice_param_buf[ src_idx ].slice_data_offset : 0; - - const int32_t bytes_in_seg = bytes - dest_scan_ctrl[dest_idx].slice_data_offset; - const uint32_t scan_data = (bytes_in_seg < jd_libva_ptr->slice_param_buf[src_idx].slice_data_size) ? bytes_in_seg : jd_libva_ptr->slice_param_buf[src_idx].slice_data_size ; - jd_libva_ptr->slice_param_buf[src_idx].slice_data_offset = 0; - jd_libva_ptr->slice_param_buf[src_idx].slice_data_size -= scan_data; - dest_scan_ctrl[dest_idx].slice_data_size = scan_data; - dest_scan_ctrl[dest_idx].num_components = jd_libva_ptr->slice_param_buf[src_idx].num_components; - dest_scan_ctrl[dest_idx].restart_interval = jd_libva_ptr->slice_param_buf[src_idx].restart_interval; - memcpy(&dest_scan_ctrl[dest_idx].components, & jd_libva_ptr->slice_param_buf[ src_idx ].components, - sizeof(jd_libva_ptr->slice_param_buf[ src_idx ].components) ); - dest_idx++; - if ((chopping == VA_SLICE_DATA_FLAG_ALL) || (chopping == VA_SLICE_DATA_FLAG_END)) { /* all good good */ - } else { - break; - } - } - scan_idx = src_idx; - /* Get Slice Control Buffer */ - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceParameterBufferType, sizeof(VASliceParameterBufferJPEGBaseline) * dest_idx, 1, dest_scan_ctrl, &desc_buf[buf_idx]); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer VASliceParameterBufferType failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - return status; - } - buf_idx++; - va_status = vaCreateBuffer(jd_libva_ptr->va_display, jd_libva_ptr->va_context, VASliceDataBufferType, bytes, 1, &jd_libva_ptr->bitstream_buf[ src_offset ], &desc_buf[buf_idx]); - buf_idx++; - if (va_status != VA_STATUS_SUCCESS) { - status = DECODE_DRIVER_FAIL; - return status; - } - va_status = vaRenderPicture( jd_libva_ptr->va_display, jd_libva_ptr->va_context, desc_buf, buf_idx); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaRenderPicture failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - return status; - } - buf_idx = 0; - - src_offset += bytes; - } while (bytes_remaining); - - va_status = vaEndPicture(jd_libva_ptr->va_display, jd_libva_ptr->va_context); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaRenderPicture failed. va_status = 0x%x", va_status); - status = DECODE_DRIVER_FAIL; - return status; - } - - va_status = vaSyncSurface(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces[0]); - if (va_status != VA_STATUS_SUCCESS) { - WTRACE("vaSyncSurface failed. va_status = 0x%x", va_status); - } - - va_status = vaDestroyContext(jd_libva_ptr->va_display, jd_libva_ptr->va_context); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaDestroyContext failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; - } - jd_libva_ptr->va_context = VA_INVALID_ID; - - - - va_status = vaDestroyConfig(jd_libva_ptr->va_display, jd_libva_ptr->va_config); - if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaDestroyConfig failed. va_status = 0x%x", va_status); - return DECODE_DRIVER_FAIL; - } - status = doColorConversion(jd_libva_ptr, - jd_libva_ptr->va_surfaces[0], - buf, lines); - va_status = vaDestroySurfaces(jd_libva_ptr->va_display, jd_libva_ptr->va_surfaces, jd_libva_ptr->surface_count); - ITRACE("Successfully decoded picture"); - - if (jd_libva_ptr->va_surfaces) { - free (jd_libva_ptr->va_surfaces); - jd_libva_ptr->va_surfaces = NULL; - } - - - return status; -cleanup: - return DECODE_DRIVER_FAIL; -} - -Decode_Status parseBitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) { - uint32_t component_order = 0 ; - uint32_t dqt_ind = 0; - uint32_t dht_ind = 0; - uint32_t scan_ind = 0; - boolean frame_marker_found = FALSE; - int i; - - uint8_t marker = jd_libva_ptr->JPEGParser->getNextMarker(jd_libva_ptr->JPEGParser); - - while (marker != CODE_EOI &&( !jd_libva_ptr->JPEGParser->endOfBuffer(jd_libva_ptr->JPEGParser))) { - switch (marker) { - case CODE_SOI: { - jd_libva_ptr->soi_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - 2; - break; - } - // If the marker is an APP marker skip over the data - case CODE_APP0: - case CODE_APP1: - case CODE_APP2: - case CODE_APP3: - case CODE_APP4: - case CODE_APP5: - case CODE_APP6: - case CODE_APP7: - case CODE_APP8: - case CODE_APP9: - case CODE_APP10: - case CODE_APP11: - case CODE_APP12: - case CODE_APP13: - case CODE_APP14: - case CODE_APP15: { - - uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2) - 2; - jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, bytes_to_burn); - break; - } - // Store offset to DQT data to avoid parsing bitstream in user mode - case CODE_DQT: { - if (dqt_ind < 4) { - jd_libva_ptr->dqt_byte_offset[dqt_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - jd_libva_ptr->soi_offset; - dqt_ind++; - uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes( jd_libva_ptr->JPEGParser, 2 ) - 2; - jd_libva_ptr->JPEGParser->burnBytes( jd_libva_ptr->JPEGParser, bytes_to_burn ); - } else { - ETRACE("ERROR: Decoder does not support more than 4 Quant Tables\n"); - return DECODE_PARSER_FAIL; - } - break; - } - // Throw exception for all SOF marker other than SOF0 - case CODE_SOF1: - case CODE_SOF2: - case CODE_SOF3: - case CODE_SOF5: - case CODE_SOF6: - case CODE_SOF7: - case CODE_SOF8: - case CODE_SOF9: - case CODE_SOF10: - case CODE_SOF11: - case CODE_SOF13: - case CODE_SOF14: - case CODE_SOF15: { - ETRACE("ERROR: unsupport SOF\n"); - break; - } - // Parse component information in SOF marker - case CODE_SOF_BASELINE: { - frame_marker_found = TRUE; - - jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, 2); // Throw away frame header length - uint8_t sample_precision = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - if (sample_precision != 8) { - ETRACE("sample_precision is not supported\n"); - return DECODE_PARSER_FAIL; - } - // Extract pic width and height - jd_libva_ptr->picture_param_buf.picture_height = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); - jd_libva_ptr->picture_param_buf.picture_width = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); - jd_libva_ptr->picture_param_buf.num_components = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - - if (jd_libva_ptr->picture_param_buf.num_components > JPEG_MAX_COMPONENTS) { - ETRACE("ERROR: reached max components\n"); - return DECODE_PARSER_FAIL; - } - if (jd_libva_ptr->picture_param_buf.picture_height < HW_DECODE_MIN_HEIGHT - || jd_libva_ptr->picture_param_buf.picture_width < HW_DECODE_MIN_WIDTH) { - ITRACE("PERFORMANCE: %ux%u JPEG will decode faster with SW\n", - jd_libva_ptr->picture_param_buf.picture_width, - jd_libva_ptr->picture_param_buf.picture_height); - return DECODE_PARSER_FAIL; - } - uint8_t comp_ind = 0; - for (comp_ind = 0; comp_ind < jd_libva_ptr->picture_param_buf.num_components; comp_ind++) { - jd_libva_ptr->picture_param_buf.components[comp_ind].component_id = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - - uint8_t hv_sampling = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - jd_libva_ptr->picture_param_buf.components[comp_ind].h_sampling_factor = hv_sampling >> 4; - jd_libva_ptr->picture_param_buf.components[comp_ind].v_sampling_factor = hv_sampling & 0xf; - jd_libva_ptr->picture_param_buf.components[comp_ind].quantiser_table_selector = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - } - - - break; - } - // Store offset to DHT data to avoid parsing bitstream in user mode - case CODE_DHT: { - if (dht_ind < 4) { - jd_libva_ptr->dht_byte_offset[dht_ind] = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - jd_libva_ptr->soi_offset; - dht_ind++; - uint32_t bytes_to_burn = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2) - 2; - jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, bytes_to_burn ); - } else { - ETRACE("ERROR: Decoder does not support more than 4 Huff Tables\n"); - return DECODE_PARSER_FAIL; - } - break; - } - // Parse component information in SOS marker - case CODE_SOS: { - jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, 2); - uint32_t component_in_scan = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - uint8_t comp_ind = 0; - - for (comp_ind = 0; comp_ind < component_in_scan; comp_ind++) { - uint8_t comp_id = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - uint8_t comp_data_ind; - for (comp_data_ind = 0; comp_data_ind < jd_libva_ptr->picture_param_buf.num_components; comp_data_ind++) { - if (comp_id == jd_libva_ptr->picture_param_buf.components[comp_data_ind].component_id) { - jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].component_selector = comp_data_ind + 1; - break; - } - } - uint8_t huffman_tables = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); - jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].dc_table_selector = huffman_tables >> 4; - jd_libva_ptr->slice_param_buf[scan_ind].components[comp_ind].ac_table_selector = huffman_tables & 0xf; - } - uint32_t curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Ss - if (curr_byte != 0) { - ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); - return DECODE_PARSER_FAIL; - } - curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Se - if (curr_byte != 0x3f) { - ETRACE("ERROR: curr_byte 0x%08x != 0x3f\n", curr_byte); - return DECODE_PARSER_FAIL; - } - curr_byte = jd_libva_ptr->JPEGParser->readNextByte(jd_libva_ptr->JPEGParser); // Ah, Al - if (curr_byte != 0) { - ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); - return DECODE_PARSER_FAIL; - } - // Set slice control variables needed - jd_libva_ptr->slice_param_buf[scan_ind].slice_data_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser) - jd_libva_ptr->soi_offset; - jd_libva_ptr->slice_param_buf[scan_ind].num_components = component_in_scan; - if (scan_ind) { - /* If there is more than one scan, the slice for all but the final scan should only run up to the beginning of the next scan */ - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = - (jd_libva_ptr->slice_param_buf[scan_ind].slice_data_offset - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset );; - } - scan_ind++; - jd_libva_ptr->scan_ctrl_count++; // gsDXVA2Globals.uiScanCtrlCount - break; - } - case CODE_DRI: { - uint32_t size = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); - jd_libva_ptr->slice_param_buf[scan_ind].restart_interval = jd_libva_ptr->JPEGParser->readBytes(jd_libva_ptr->JPEGParser, 2); - jd_libva_ptr->JPEGParser->burnBytes(jd_libva_ptr->JPEGParser, (size - 4)); - break; - } - default: - break; - } - - marker = jd_libva_ptr->JPEGParser->getNextMarker(jd_libva_ptr->JPEGParser); - // If the EOI code is found, store the byte offset before the parsing finishes - if( marker == CODE_EOI ) { - jd_libva_ptr->eoi_offset = jd_libva_ptr->JPEGParser->getByteOffset(jd_libva_ptr->JPEGParser); - } - - } - - jd_libva_ptr->quant_tables_num = dqt_ind; - jd_libva_ptr->huffman_tables_num = dht_ind; - - /* The slice for the last scan should run up to the end of the picture */ - if (jd_libva_ptr->eoi_offset) { - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->eoi_offset - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); - } - else { - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_size = (jd_libva_ptr->file_size - jd_libva_ptr->slice_param_buf[scan_ind - 1].slice_data_offset); - } - // throw AppException if SOF0 isn't found - if (!frame_marker_found) { - ETRACE("EEORR: Reached end of bitstream while trying to parse headers\n"); - return DECODE_PARSER_FAIL; - } - - Decode_Status status = parseTableData(cinfo, jd_libva_ptr); - if (status != DECODE_SUCCESS) { - ETRACE("ERROR: Parsing table data returns %d", status); - } - cinfo->original_image_width = jd_libva_ptr->picture_param_buf.picture_width; /* nominal image width (from SOF marker) */ - cinfo->image_width = jd_libva_ptr->picture_param_buf.picture_width; /* nominal image width (from SOF marker) */ - cinfo->image_height = jd_libva_ptr->picture_param_buf.picture_height; /* nominal image height */ - cinfo->num_components = jd_libva_ptr->picture_param_buf.num_components; /* # of color components in JPEG image */ - cinfo->jpeg_color_space = JCS_YCbCr; /* colorspace of JPEG image */ - cinfo->out_color_space = JCS_RGB; /* colorspace for output */ - cinfo->src->bytes_in_buffer = jd_libva_ptr->file_size; - - ITRACE("Successfully parsed table"); - return status; - -} - -Decode_Status parseTableData(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) { - CJPEGParse* parser = (CJPEGParse*)malloc(sizeof(CJPEGParse)); - if (parser == NULL) { - ETRACE("%s ERROR: Parsing table data returns %d", __FUNCTION__, DECODE_MEMORY_FAIL); - return DECODE_MEMORY_FAIL; - } - - parserInitialize(parser, jd_libva_ptr->bitstream_buf, jd_libva_ptr->file_size); - - // Parse Quant tables - memset(&jd_libva_ptr->qmatrix_buf, 0, sizeof(jd_libva_ptr->qmatrix_buf)); - uint32_t dqt_ind = 0; - for (dqt_ind = 0; dqt_ind < jd_libva_ptr->quant_tables_num; dqt_ind++) { - if (parser->setByteOffset(parser, jd_libva_ptr->dqt_byte_offset[dqt_ind])) { - // uint32_t uiTableBytes = parser->readBytes( 2 ) - 2; - uint32_t table_bytes = parser->readBytes( parser, 2 ) - 2; - do { - uint32_t table_info = parser->readNextByte(parser); - table_bytes--; - uint32_t table_length = table_bytes > 64 ? 64 : table_bytes; - uint32_t table_precision = table_info >> 4; - if (table_precision != 0) { - ETRACE("%s ERROR: Parsing table data returns %d", __FUNCTION__, DECODE_PARSER_FAIL); - return DECODE_PARSER_FAIL; - } - uint32_t table_id = table_info & 0xf; - - jd_libva_ptr->qmatrix_buf.load_quantiser_table[table_id] = 1; - - if (table_id < JPEG_MAX_QUANT_TABLES) { - // Pull Quant table data from bitstream - uint32_t byte_ind; - for (byte_ind = 0; byte_ind < table_length; byte_ind++) { - jd_libva_ptr->qmatrix_buf.quantiser_table[table_id][byte_ind] = parser->readNextByte(parser); - } - } else { - ETRACE("%s DQT table ID is not supported", __FUNCTION__); - parser->burnBytes(parser, table_length); - } - table_bytes -= table_length; - } while (table_bytes); - } - } - - // Parse Huffman tables - memset(&jd_libva_ptr->hufman_table_buf, 0, sizeof(jd_libva_ptr->hufman_table_buf)); - uint32_t dht_ind = 0; - for (dht_ind = 0; dht_ind < jd_libva_ptr->huffman_tables_num; dht_ind++) { - if (parser->setByteOffset(parser, jd_libva_ptr->dht_byte_offset[dht_ind])) { - uint32_t table_bytes = parser->readBytes( parser, 2 ) - 2; - do { - uint32_t table_info = parser->readNextByte(parser); - table_bytes--; - uint32_t table_class = table_info >> 4; // Identifies whether the table is for AC or DC - uint32_t table_id = table_info & 0xf; - jd_libva_ptr->hufman_table_buf.load_huffman_table[table_id] = 1; - - if ((table_class < TABLE_CLASS_NUM) && (table_id < JPEG_MAX_SETS_HUFFMAN_TABLES)) { - if (table_class == 0) { - uint8_t* bits = parser->getCurrentIndex(parser); - // Find out the number of entries in the table - uint32_t table_entries = 0; - uint32_t bit_ind; - for (bit_ind = 0; bit_ind < 16; bit_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind] = bits[bit_ind]; - table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind]; - } - - // Create table of code values - parser->burnBytes(parser, 16); - table_bytes -= 16; - uint32_t tbl_ind; - for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].dc_values[tbl_ind] = parser->readNextByte(parser); - table_bytes--; - } - - } else { // for AC class - uint8_t* bits = parser->getCurrentIndex(parser); - // Find out the number of entries in the table - uint32_t table_entries = 0; - uint32_t bit_ind = 0; - for (bit_ind = 0; bit_ind < 16; bit_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind] = bits[bit_ind]; - table_entries += jd_libva_ptr->hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind]; - } - - // Create table of code values - parser->burnBytes(parser, 16); - table_bytes -= 16; - uint32_t tbl_ind = 0; - for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { - jd_libva_ptr->hufman_table_buf.huffman_table[table_id].ac_values[tbl_ind] = parser->readNextByte(parser); - table_bytes--; - } - }//end of else - } else { - // Find out the number of entries in the table - ETRACE("%s DHT table ID is not supported", __FUNCTION__); - uint32_t table_entries = 0; - uint32_t bit_ind = 0; - for(bit_ind = 0; bit_ind < 16; bit_ind++) { - table_entries += parser->readNextByte(parser); - table_bytes--; - } - parser->burnBytes(parser, table_entries); - table_bytes -= table_entries; - } - - } while (table_bytes); - } - } - - if (parser) { - free(parser); - parser = NULL; - } - return DECODE_SUCCESS; -} - diff --git a/imagedecoder/JPEGDecoder.cpp b/imagedecoder/JPEGDecoder.cpp new file mode 100644 index 0000000..5e05464 --- /dev/null +++ b/imagedecoder/JPEGDecoder.cpp @@ -0,0 +1,849 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* Yao Cheng +* +*/ +//#define LOG_NDEBUG 0 + +#include +#include +#include "JPEGDecoder.h" +#include "JPEGParser.h" +#include "JPEGBlitter.h" +#include "ImageDecoderTrace.h" + +#ifdef NDEBUG +#undef NDEBUG +#endif +#include + +//#define LOG_TAG "ImageDecoder" + +#define JPEG_MAX_SETS_HUFFMAN_TABLES 2 + +#define TABLE_CLASS_DC 0 +#define TABLE_CLASS_AC 1 +#define TABLE_CLASS_NUM 2 + +// for config +#define HW_DECODE_MIN_WIDTH 100 // for JPEG smaller than this, use SW decode +#define HW_DECODE_MIN_HEIGHT 100 // for JPEG smaller than this, use SW decode + +typedef uint32_t Display; + +#define JD_CHECK(err, label) \ + if (err) { \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +#define JD_CHECK_RET(err, label, retcode) \ + if (err) { \ + status = retcode; \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +JpegDecoder::JpegDecoder() + :mInitialized(false), + mDisplay(0), + mConfigId(VA_INVALID_ID), + mContextId(VA_INVALID_ID), + mParser(NULL), + mBlitter(NULL) +{ + mParser = new CJPEGParse; + mBlitter = new JpegBlitter; + Display dpy; + int va_major_version, va_minor_version; + mDisplay = vaGetDisplay(&dpy); + vaInitialize(mDisplay, &va_major_version, &va_minor_version); +} +JpegDecoder::~JpegDecoder() +{ + if (mInitialized) { + WTRACE("Freeing JpegDecoder: not destroyed yet. Force destroy resource"); + deinit(); + } + delete mBlitter; + vaTerminate(mDisplay); + delete mParser; +} + +JpegDecoder::MapHandle JpegDecoder::mapData(RenderTarget &target, void ** data, uint32_t * offsets, uint32_t * pitches) +{ + JpegDecoder::MapHandle handle; + handle.img = NULL; + handle.valid = false; + VASurfaceID surf_id = getSurfaceID(target); + if (surf_id != VA_INVALID_ID) { + handle.img = new VAImage(); + if (handle.img == NULL) { + ETRACE("%s: create VAImage fail", __FUNCTION__); + return handle; + } + VAStatus st; + st = vaDeriveImage(mDisplay, surf_id, handle.img); + if (st != VA_STATUS_SUCCESS) { + delete handle.img; + handle.img = NULL; + ETRACE("%s: vaDeriveImage fail %d", __FUNCTION__, st); + return handle; + } + st = vaMapBuffer(mDisplay, handle.img->buf, data); + if (st != VA_STATUS_SUCCESS) { + vaDestroyImage(mDisplay, handle.img->image_id); + delete handle.img; + handle.img = NULL; + ETRACE("%s: vaMapBuffer fail %d", __FUNCTION__, st); + return handle; + } + handle.valid = true; + offsets[0] = handle.img->offsets[0]; + offsets[1] = handle.img->offsets[1]; + offsets[2] = handle.img->offsets[2]; + pitches[0] = handle.img->pitches[0]; + pitches[1] = handle.img->pitches[1]; + pitches[2] = handle.img->pitches[2]; + return handle; + } + ETRACE("%s: get Surface ID fail", __FUNCTION__); + return handle; +} + +void JpegDecoder::unmapData(RenderTarget &target, JpegDecoder::MapHandle maphandle) +{ + if (maphandle.valid == false) + return; + if (maphandle.img != NULL) { + vaUnmapBuffer(mDisplay, maphandle.img->buf); + vaDestroyImage(mDisplay, maphandle.img->image_id); + delete maphandle.img; + } +} + +JpegDecodeStatus JpegDecoder::init(int w, int h, RenderTarget **targets, int num) +{ + if (mInitialized) + return JD_ALREADY_INITIALIZED; + Mutex::Autolock autoLock(mLock); + mBlitter->setDecoder(*this); + if (!mInitialized) { + mGrallocSurfaceMap.clear(); + mDrmSurfaceMap.clear(); + mNormalSurfaceMap.clear(); + VAStatus st; + VASurfaceID surfid; + for (int i = 0; i < num; ++i) { + JpegDecodeStatus st = createSurfaceFromRenderTarget(*targets[i], &surfid); + if (st != JD_SUCCESS || surfid == VA_INVALID_ID) { + ETRACE("%s failed to create surface from RenderTarget handle 0x%x", + __FUNCTION__, targets[i]->handle); + return JD_RESOURCE_FAILURE; + } + } + VAConfigAttrib attrib; + + attrib.type = VAConfigAttribRTFormat; + st = vaGetConfigAttributes(mDisplay, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1); + if (st != VA_STATUS_SUCCESS) { + ETRACE("vaGetConfigAttributes failed. va_status = 0x%x", st); + return JD_INITIALIZATION_ERROR; + } + st = vaCreateConfig(mDisplay, VAProfileJPEGBaseline, VAEntrypointVLD, &attrib, 1, &mConfigId); + if (st != VA_STATUS_SUCCESS) { + ETRACE("vaCreateConfig failed. va_status = 0x%x", st); + return JD_INITIALIZATION_ERROR; + } + mContextId = VA_INVALID_ID; + size_t gmsize = mGrallocSurfaceMap.size(); + size_t dmsize = mDrmSurfaceMap.size(); + size_t nmsize = mNormalSurfaceMap.size(); + VASurfaceID *surfaces = new VASurfaceID[gmsize + dmsize + nmsize]; + for (size_t i = 0; i < gmsize + dmsize + nmsize; ++i) { + if (i < gmsize) + surfaces[i] = mGrallocSurfaceMap.valueAt(i); + else if (i < gmsize + dmsize) + surfaces[i] = mDrmSurfaceMap.valueAt(i - gmsize); + else + surfaces[i] = mNormalSurfaceMap.valueAt(i - gmsize - dmsize); + } + st = vaCreateContext(mDisplay, mConfigId, + w, h, + 0, + surfaces, gmsize + dmsize + nmsize, + &mContextId); + delete[] surfaces; + if (st != VA_STATUS_SUCCESS) { + ETRACE("vaCreateContext failed. va_status = 0x%x", st); + return JD_INITIALIZATION_ERROR; + } + + VTRACE("vaconfig = %u, vacontext = %u", mConfigId, mContextId); + mInitialized = true; + } + return JD_SUCCESS; +} + +JpegDecodeStatus JpegDecoder::blit(RenderTarget &src, RenderTarget &dst) +{ + return mBlitter->blit(src, dst); +} + +JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) +{ + uint32_t component_order = 0 ; + uint32_t dqt_ind = 0; + uint32_t dht_ind = 0; + uint32_t scan_ind = 0; + bool frame_marker_found = false; + int i; + + parserInitialize(mParser, jpginfo.buf, jpginfo.bufsize); + + uint8_t marker = mParser->getNextMarker(mParser); + + while (marker != CODE_EOI &&( !mParser->endOfBuffer(mParser))) { + switch (marker) { + case CODE_SOI: { + jpginfo.soi_offset = mParser->getByteOffset(mParser) - 2; + break; + } + // If the marker is an APP marker skip over the data + case CODE_APP0: + case CODE_APP1: + case CODE_APP2: + case CODE_APP3: + case CODE_APP4: + case CODE_APP5: + case CODE_APP6: + case CODE_APP7: + case CODE_APP8: + case CODE_APP9: + case CODE_APP10: + case CODE_APP11: + case CODE_APP12: + case CODE_APP13: + case CODE_APP14: + case CODE_APP15: { + + uint32_t bytes_to_burn = mParser->readBytes(mParser, 2) - 2; + mParser->burnBytes(mParser, bytes_to_burn); + break; + } + // Store offset to DQT data to avoid parsing bitstream in user mode + case CODE_DQT: { + if (dqt_ind < 4) { + jpginfo.dqt_byte_offset[dqt_ind] = mParser->getByteOffset(mParser) - jpginfo.soi_offset; + dqt_ind++; + uint32_t bytes_to_burn = mParser->readBytes(mParser, 2 ) - 2; + mParser->burnBytes( mParser, bytes_to_burn ); + } else { + ETRACE("ERROR: Decoder does not support more than 4 Quant Tables\n"); + return JD_ERROR_BITSTREAM; + } + break; + } + // Throw exception for all SOF marker other than SOF0 + case CODE_SOF1: + case CODE_SOF2: + case CODE_SOF3: + case CODE_SOF5: + case CODE_SOF6: + case CODE_SOF7: + case CODE_SOF8: + case CODE_SOF9: + case CODE_SOF10: + case CODE_SOF11: + case CODE_SOF13: + case CODE_SOF14: + case CODE_SOF15: { + ETRACE("ERROR: unsupport SOF\n"); + break; + } + // Parse component information in SOF marker + case CODE_SOF_BASELINE: { + frame_marker_found = true; + + mParser->burnBytes(mParser, 2); // Throw away frame header length + uint8_t sample_precision = mParser->readNextByte(mParser); + if (sample_precision != 8) { + ETRACE("sample_precision is not supported\n"); + return JD_ERROR_BITSTREAM; + } + // Extract pic width and height + jpginfo.picture_param_buf.picture_height = mParser->readBytes(mParser, 2); + jpginfo.picture_param_buf.picture_width = mParser->readBytes(mParser, 2); + jpginfo.picture_param_buf.num_components = mParser->readNextByte(mParser); + + if (jpginfo.picture_param_buf.num_components > JPEG_MAX_COMPONENTS) { + ETRACE("ERROR: reached max components\n"); + return JD_ERROR_BITSTREAM; + } + if (jpginfo.picture_param_buf.picture_height < HW_DECODE_MIN_HEIGHT + || jpginfo.picture_param_buf.picture_width < HW_DECODE_MIN_WIDTH) { + VTRACE("PERFORMANCE: %ux%u JPEG will decode faster with SW\n", + jpginfo.picture_param_buf.picture_width, + jpginfo.picture_param_buf.picture_height); + return JD_ERROR_BITSTREAM; + } + uint8_t comp_ind = 0; + for (comp_ind = 0; comp_ind < jpginfo.picture_param_buf.num_components; comp_ind++) { + jpginfo.picture_param_buf.components[comp_ind].component_id = mParser->readNextByte(mParser); + + uint8_t hv_sampling = mParser->readNextByte(mParser); + jpginfo.picture_param_buf.components[comp_ind].h_sampling_factor = hv_sampling >> 4; + jpginfo.picture_param_buf.components[comp_ind].v_sampling_factor = hv_sampling & 0xf; + jpginfo.picture_param_buf.components[comp_ind].quantiser_table_selector = mParser->readNextByte(mParser); + } + + + break; + } + // Store offset to DHT data to avoid parsing bitstream in user mode + case CODE_DHT: { + if (dht_ind < 4) { + jpginfo.dht_byte_offset[dht_ind] = mParser->getByteOffset(mParser) - jpginfo.soi_offset; + dht_ind++; + uint32_t bytes_to_burn = mParser->readBytes(mParser, 2) - 2; + mParser->burnBytes(mParser, bytes_to_burn ); + } else { + ETRACE("ERROR: Decoder does not support more than 4 Huff Tables\n"); + return JD_ERROR_BITSTREAM; + } + break; + } + // Parse component information in SOS marker + case CODE_SOS: { + mParser->burnBytes(mParser, 2); + uint32_t component_in_scan = mParser->readNextByte(mParser); + uint8_t comp_ind = 0; + + for (comp_ind = 0; comp_ind < component_in_scan; comp_ind++) { + uint8_t comp_id = mParser->readNextByte(mParser); + uint8_t comp_data_ind; + for (comp_data_ind = 0; comp_data_ind < jpginfo.picture_param_buf.num_components; comp_data_ind++) { + if (comp_id == jpginfo.picture_param_buf.components[comp_data_ind].component_id) { + jpginfo.slice_param_buf[scan_ind].components[comp_ind].component_selector = comp_data_ind + 1; + break; + } + } + uint8_t huffman_tables = mParser->readNextByte(mParser); + jpginfo.slice_param_buf[scan_ind].components[comp_ind].dc_table_selector = huffman_tables >> 4; + jpginfo.slice_param_buf[scan_ind].components[comp_ind].ac_table_selector = huffman_tables & 0xf; + } + uint32_t curr_byte = mParser->readNextByte(mParser); // Ss + if (curr_byte != 0) { + ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); + return JD_ERROR_BITSTREAM; + } + curr_byte = mParser->readNextByte(mParser); // Se + if (curr_byte != 0x3f) { + ETRACE("ERROR: curr_byte 0x%08x != 0x3f\n", curr_byte); + return JD_ERROR_BITSTREAM; + } + curr_byte = mParser->readNextByte(mParser); // Ah, Al + if (curr_byte != 0) { + ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); + return JD_ERROR_BITSTREAM; + } + // Set slice control variables needed + jpginfo.slice_param_buf[scan_ind].slice_data_offset = mParser->getByteOffset(mParser) - jpginfo.soi_offset; + jpginfo.slice_param_buf[scan_ind].num_components = component_in_scan; + if (scan_ind) { + /* If there is more than one scan, the slice for all but the final scan should only run up to the beginning of the next scan */ + jpginfo.slice_param_buf[scan_ind - 1].slice_data_size = + (jpginfo.slice_param_buf[scan_ind].slice_data_offset - jpginfo.slice_param_buf[scan_ind - 1].slice_data_offset );; + } + scan_ind++; + jpginfo.scan_ctrl_count++; // gsDXVA2Globals.uiScanCtrlCount + break; + } + case CODE_DRI: { + uint32_t size = mParser->readBytes(mParser, 2); + jpginfo.slice_param_buf[scan_ind].restart_interval = mParser->readBytes(mParser, 2); + mParser->burnBytes(mParser, (size - 4)); + break; + } + default: + break; + } + + marker = mParser->getNextMarker(mParser); + // If the EOI code is found, store the byte offset before the parsing finishes + if( marker == CODE_EOI ) { + jpginfo.eoi_offset = mParser->getByteOffset(mParser); + } + + } + + jpginfo.quant_tables_num = dqt_ind; + jpginfo.huffman_tables_num = dht_ind; + + /* The slice for the last scan should run up to the end of the picture */ + if (jpginfo.eoi_offset) { + jpginfo.slice_param_buf[scan_ind - 1].slice_data_size = (jpginfo.eoi_offset - jpginfo.slice_param_buf[scan_ind - 1].slice_data_offset); + } + else { + jpginfo.slice_param_buf[scan_ind - 1].slice_data_size = (jpginfo.bufsize - jpginfo.slice_param_buf[scan_ind - 1].slice_data_offset); + } + // throw AppException if SOF0 isn't found + if (!frame_marker_found) { + ETRACE("EEORR: Reached end of bitstream while trying to parse headers\n"); + return JD_ERROR_BITSTREAM; + } + + JpegDecodeStatus status = parseTableData(jpginfo); + if (status != JD_SUCCESS) { + ETRACE("ERROR: Parsing table data returns %d", status); + return JD_ERROR_BITSTREAM; + } + + jpginfo.image_width = jpginfo.picture_param_buf.picture_width; + jpginfo.image_height = jpginfo.picture_param_buf.picture_height; + jpginfo.image_color_fourcc = sampFactor2Fourcc(jpginfo.picture_param_buf.components[0].h_sampling_factor, + jpginfo.picture_param_buf.components[1].h_sampling_factor, + jpginfo.picture_param_buf.components[2].h_sampling_factor, + jpginfo.picture_param_buf.components[0].v_sampling_factor, + jpginfo.picture_param_buf.components[1].v_sampling_factor, + jpginfo.picture_param_buf.components[2].v_sampling_factor); + jpginfo.image_pixel_format = fourcc2PixelFormat(jpginfo.image_color_fourcc); + + VTRACE("%s jpg %ux%u, fourcc=%s, pixelformat=0x%x", + __FUNCTION__, jpginfo.image_width, jpginfo.image_height, fourcc2str(NULL, jpginfo.image_color_fourcc), + jpginfo.image_pixel_format); + + if (!jpegColorFormatSupported(jpginfo)) + return JD_INPUT_FORMAT_UNSUPPORTED; + return JD_SUCCESS; +} + +JpegDecodeStatus JpegDecoder::createSurfaceFromRenderTarget(RenderTarget &target, VASurfaceID *surfid) +{ + if (target.type == RENDERTARGET_INTERNAL_BUFFER) { + JpegDecodeStatus st = createSurfaceInternal(target.width, + target.height, + target.pixel_format, + target.handle, + surfid); + if (st != JD_SUCCESS) + return st; + mNormalSurfaceMap.add(target.handle, *surfid); + VTRACE("%s added surface %u (internal buffer id %d) to SurfaceList", + __PRETTY_FUNCTION__, *surfid, target.handle); + } + else { + switch (target.type) { + case RenderTarget::KERNEL_DRM: + { + JpegDecodeStatus st = createSurfaceDrm(target.width, + target.height, + target.pixel_format, + (unsigned long)target.handle, + target.stride, + surfid); + if (st != JD_SUCCESS) + return st; + mDrmSurfaceMap.add((unsigned long)target.handle, *surfid); + VTRACE("%s added surface %u (Drm handle %d) to DrmSurfaceMap", + __PRETTY_FUNCTION__, *surfid, target.handle); + } + break; + case RenderTarget::ANDROID_GRALLOC: + { + JpegDecodeStatus st = createSurfaceGralloc(target.width, + target.height, + target.pixel_format, + (buffer_handle_t)target.handle, + target.stride, + surfid); + if (st != JD_SUCCESS) + return st; + mGrallocSurfaceMap.add((buffer_handle_t)target.handle, *surfid); + VTRACE("%s added surface %u (Gralloc handle %d) to DrmSurfaceMap", + __PRETTY_FUNCTION__, *surfid, target.handle); + } + break; + default: + return JD_RENDER_TARGET_TYPE_UNSUPPORTED; + } + } + return JD_SUCCESS; +} + +JpegDecodeStatus JpegDecoder::createSurfaceInternal(int width, int height, int pixel_format, int handle, VASurfaceID *surf_id) +{ + VAStatus va_status; + VASurfaceAttrib attrib; + attrib.type = VASurfaceAttribPixelFormat; + attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; + attrib.value.type = VAGenericValueTypeInteger; + uint32_t fourcc = pixelFormat2Fourcc(pixel_format); + uint32_t vaformat = fourcc2VaFormat(fourcc); + attrib.value.value.i = fourcc; + VTRACE("enter %s, pixel_format 0x%x, fourcc %s", __FUNCTION__, pixel_format, fourcc2str(NULL, fourcc)); + va_status = vaCreateSurfaces(mDisplay, + vaformat, + width, + height, + surf_id, + 1, + &attrib, + 1); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("%s: createSurface (format %u, fourcc %s) returns %d", __PRETTY_FUNCTION__, vaformat, fourcc2str(NULL, fourcc), va_status); + return JD_RESOURCE_FAILURE; + } + return JD_SUCCESS; +} + +VASurfaceID JpegDecoder::getSurfaceID(RenderTarget &target) const +{ + int index; + if (target.type == RENDERTARGET_INTERNAL_BUFFER) { + index = mNormalSurfaceMap.indexOfKey(target.handle); + if (index < 0) + return VA_INVALID_ID; + else + return mNormalSurfaceMap.valueAt(index); + } + switch (target.type) { + case RenderTarget::KERNEL_DRM: + index = mDrmSurfaceMap.indexOfKey((unsigned long)target.handle); + if (index < 0) + return VA_INVALID_ID; + else + return mDrmSurfaceMap.valueAt(index); + case RenderTarget::ANDROID_GRALLOC: + index = mGrallocSurfaceMap.indexOfKey((buffer_handle_t)target.handle); + if (index < 0) + return VA_INVALID_ID; + else + return mGrallocSurfaceMap.valueAt(index); + default: + assert(false); + } + return VA_INVALID_ID; +} + +JpegDecodeStatus JpegDecoder::sync(RenderTarget &target) +{ + VASurfaceID surf_id = getSurfaceID(target); + if (surf_id == VA_INVALID_ID) + return JD_INVALID_RENDER_TARGET; + vaSyncSurface(mDisplay, surf_id); + return JD_SUCCESS; +} +bool JpegDecoder::busy(RenderTarget &target) const +{ + VASurfaceStatus surf_st; + VASurfaceID surf_id = getSurfaceID(target); + if (surf_id == VA_INVALID_ID) + return false; + VAStatus st = vaQuerySurfaceStatus(mDisplay, surf_id, &surf_st); + if (st != VA_STATUS_SUCCESS) + return false; + return surf_st != VASurfaceReady; +} + + +JpegDecodeStatus JpegDecoder::decode(JpegInfo &jpginfo, RenderTarget &target) +{ + VAStatus va_status = VA_STATUS_SUCCESS; + VASurfaceStatus surf_status; + VABufferID desc_buf[5]; + uint32_t bitstream_buffer_size = 0; + uint32_t scan_idx = 0; + uint32_t buf_idx = 0; + uint32_t chopping = VA_SLICE_DATA_FLAG_ALL; + uint32_t bytes_remaining; + VASurfaceID surf_id = getSurfaceID(target); + if (surf_id == VA_INVALID_ID) + return JD_RENDER_TARGET_NOT_INITIALIZED; + va_status = vaQuerySurfaceStatus(mDisplay, surf_id, &surf_status); + if (surf_status != VASurfaceReady) + return JD_RENDER_TARGET_BUSY; + + if (jpginfo.eoi_offset) + bytes_remaining = jpginfo.eoi_offset - jpginfo.soi_offset; + else + bytes_remaining = jpginfo.bufsize - jpginfo.soi_offset; + uint32_t src_offset = jpginfo.soi_offset; + uint32_t cpy_row; + bitstream_buffer_size = jpginfo.bufsize;//cinfo->src->bytes_in_buffer;//1024*1024*5; + + Vector buf_list; + va_status = vaBeginPicture(mDisplay, mContextId, surf_id); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaBeginPicture failed. va_status = 0x%x", va_status); + return JD_DECODE_FAILURE; + } + va_status = vaCreateBuffer(mDisplay, mContextId, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferJPEGBaseline), 1, &jpginfo.picture_param_buf, &desc_buf[buf_idx]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer VAPictureParameterBufferType failed. va_status = 0x%x", va_status); + return JD_RESOURCE_FAILURE; + } + buf_list.add(desc_buf[buf_idx++]); + va_status = vaCreateBuffer(mDisplay, mContextId, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferJPEGBaseline), 1, &jpginfo.qmatrix_buf, &desc_buf[buf_idx]); + + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer VAIQMatrixBufferType failed. va_status = 0x%x", va_status); + return JD_RESOURCE_FAILURE; + } + buf_list.add(desc_buf[buf_idx++]); + va_status = vaCreateBuffer(mDisplay, mContextId, VAHuffmanTableBufferType, sizeof(VAHuffmanTableBufferJPEGBaseline), 1, &jpginfo.hufman_table_buf, &desc_buf[buf_idx]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer VAHuffmanTableBufferType failed. va_status = 0x%x", va_status); + return JD_RESOURCE_FAILURE; + } + buf_list.add(desc_buf[buf_idx++]); + + do { + /* Get Bitstream Buffer */ + uint32_t bytes = ( bytes_remaining < bitstream_buffer_size ) ? bytes_remaining : bitstream_buffer_size; + bytes_remaining -= bytes; + /* Get Slice Control Buffer */ + VASliceParameterBufferJPEGBaseline dest_scan_ctrl[JPEG_MAX_COMPONENTS]; + uint32_t src_idx = 0; + uint32_t dest_idx = 0; + memset(dest_scan_ctrl, 0, sizeof(dest_scan_ctrl)); + for (src_idx = scan_idx; src_idx < jpginfo.scan_ctrl_count ; src_idx++) { + if (jpginfo.slice_param_buf[ src_idx ].slice_data_offset) { + /* new scan, reset state machine */ + chopping = VA_SLICE_DATA_FLAG_ALL; + VTRACE("Scan:%i FileOffset:%x Bytes:%x \n", src_idx, + jpginfo.slice_param_buf[ src_idx ].slice_data_offset, + jpginfo.slice_param_buf[ src_idx ].slice_data_size ); + /* does the slice end in the buffer */ + if (jpginfo.slice_param_buf[ src_idx ].slice_data_offset + jpginfo.slice_param_buf[ src_idx ].slice_data_size > bytes + src_offset) { + chopping = VA_SLICE_DATA_FLAG_BEGIN; + } + } else { + if (jpginfo.slice_param_buf[ src_idx ].slice_data_size > bytes) { + chopping = VA_SLICE_DATA_FLAG_MIDDLE; + } else { + if ((chopping == VA_SLICE_DATA_FLAG_BEGIN) || (chopping == VA_SLICE_DATA_FLAG_MIDDLE)) { + chopping = VA_SLICE_DATA_FLAG_END; + } + } + } + dest_scan_ctrl[dest_idx].slice_data_flag = chopping; + + if ((chopping == VA_SLICE_DATA_FLAG_ALL) || (chopping == VA_SLICE_DATA_FLAG_BEGIN)) + dest_scan_ctrl[dest_idx].slice_data_offset = jpginfo.slice_param_buf[ src_idx ].slice_data_offset; + else + dest_scan_ctrl[dest_idx].slice_data_offset = 0; + + const int32_t bytes_in_seg = bytes - dest_scan_ctrl[dest_idx].slice_data_offset; + const uint32_t scan_data = (bytes_in_seg < jpginfo.slice_param_buf[src_idx].slice_data_size) ? bytes_in_seg : jpginfo.slice_param_buf[src_idx].slice_data_size ; + jpginfo.slice_param_buf[src_idx].slice_data_offset = 0; + jpginfo.slice_param_buf[src_idx].slice_data_size -= scan_data; + dest_scan_ctrl[dest_idx].slice_data_size = scan_data; + dest_scan_ctrl[dest_idx].num_components = jpginfo.slice_param_buf[src_idx].num_components; + dest_scan_ctrl[dest_idx].restart_interval = jpginfo.slice_param_buf[src_idx].restart_interval; + memcpy(&dest_scan_ctrl[dest_idx].components, & jpginfo.slice_param_buf[ src_idx ].components, + sizeof(jpginfo.slice_param_buf[ src_idx ].components) ); + dest_idx++; + if ((chopping == VA_SLICE_DATA_FLAG_ALL) || (chopping == VA_SLICE_DATA_FLAG_END)) { /* all good good */ + } else { + break; + } + } + scan_idx = src_idx; + /* Get Slice Control Buffer */ + va_status = vaCreateBuffer(mDisplay, mContextId, VASliceParameterBufferType, sizeof(VASliceParameterBufferJPEGBaseline) * dest_idx, 1, dest_scan_ctrl, &desc_buf[buf_idx]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer VASliceParameterBufferType failed. va_status = 0x%x", va_status); + return JD_RESOURCE_FAILURE; + } + buf_list.add(desc_buf[buf_idx++]); + va_status = vaCreateBuffer(mDisplay, mContextId, VASliceDataBufferType, bytes, 1, &jpginfo.buf[ src_offset ], &desc_buf[buf_idx]); + buf_list.add(desc_buf[buf_idx++]); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaCreateBuffer VASliceDataBufferType (%u bytes) failed. va_status = 0x%x", bytes, va_status); + return JD_RESOURCE_FAILURE; + } + va_status = vaRenderPicture( mDisplay, mContextId, desc_buf, buf_idx); + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaRenderPicture failed. va_status = 0x%x", va_status); + return JD_DECODE_FAILURE; + } + buf_idx = 0; + + src_offset += bytes; + } while (bytes_remaining); + + va_status = vaEndPicture(mDisplay, mContextId); + + while(buf_list.size() > 0) { + vaDestroyBuffer(mDisplay, buf_list.top()); + buf_list.pop(); + } + if (va_status != VA_STATUS_SUCCESS) { + ETRACE("vaEndPicture failed. va_status = 0x%x", va_status); + return JD_DECODE_FAILURE; + } + return JD_SUCCESS; +} +void JpegDecoder::deinit() +{ + if (mInitialized) { + Mutex::Autolock autoLock(mLock); + if (mInitialized) { + vaDestroyContext(mDisplay, mContextId); + vaDestroyConfig(mDisplay, mConfigId); + mInitialized = false; + size_t gralloc_size = mGrallocSurfaceMap.size(); + size_t drm_size = mDrmSurfaceMap.size(); + size_t internal_surf_size = mNormalSurfaceMap.size(); + for (size_t i = 0; i < gralloc_size; ++i) { + VASurfaceID surf_id = mGrallocSurfaceMap.valueAt(i); + vaDestroySurfaces(mDisplay, &surf_id, 1); + } + for (size_t i = 0; i < drm_size; ++i) { + VASurfaceID surf_id = mDrmSurfaceMap.valueAt(i); + vaDestroySurfaces(mDisplay, &surf_id, 1); + } + for (size_t i = 0; i < internal_surf_size; ++i) { + VASurfaceID surf_id = mNormalSurfaceMap.valueAt(i); + vaDestroySurfaces(mDisplay, &surf_id, 1); + } + mGrallocSurfaceMap.clear(); + mDrmSurfaceMap.clear(); + mNormalSurfaceMap.clear(); + } + } +} + +JpegDecodeStatus JpegDecoder::parseTableData(JpegInfo &jpginfo) { + parserInitialize(mParser, jpginfo.buf, jpginfo.bufsize); + // Parse Quant tables + memset(&jpginfo.qmatrix_buf, 0, sizeof(jpginfo.qmatrix_buf)); + uint32_t dqt_ind = 0; + for (dqt_ind = 0; dqt_ind < jpginfo.quant_tables_num; dqt_ind++) { + if (mParser->setByteOffset(mParser, jpginfo.dqt_byte_offset[dqt_ind])) { + // uint32_t uiTableBytes = mParser->readBytes( 2 ) - 2; + uint32_t table_bytes = mParser->readBytes( mParser, 2 ) - 2; + do { + uint32_t table_info = mParser->readNextByte(mParser); + table_bytes--; + uint32_t table_length = table_bytes > 64 ? 64 : table_bytes; + uint32_t table_precision = table_info >> 4; + if (table_precision != 0) { + ETRACE("%s ERROR: Parsing table data returns %d", __FUNCTION__, JD_ERROR_BITSTREAM); + return JD_ERROR_BITSTREAM; + } + uint32_t table_id = table_info & 0xf; + + jpginfo.qmatrix_buf.load_quantiser_table[table_id] = 1; + + if (table_id < JPEG_MAX_QUANT_TABLES) { + // Pull Quant table data from bitstream + uint32_t byte_ind; + for (byte_ind = 0; byte_ind < table_length; byte_ind++) { + jpginfo.qmatrix_buf.quantiser_table[table_id][byte_ind] = mParser->readNextByte(mParser); + } + } else { + ETRACE("%s DQT table ID is not supported", __FUNCTION__); + mParser->burnBytes(mParser, table_length); + } + table_bytes -= table_length; + } while (table_bytes); + } + } + + // Parse Huffman tables + memset(&jpginfo.hufman_table_buf, 0, sizeof(jpginfo.hufman_table_buf)); + uint32_t dht_ind = 0; + for (dht_ind = 0; dht_ind < jpginfo.huffman_tables_num; dht_ind++) { + if (mParser->setByteOffset(mParser, jpginfo.dht_byte_offset[dht_ind])) { + uint32_t table_bytes = mParser->readBytes( mParser, 2 ) - 2; + do { + uint32_t table_info = mParser->readNextByte(mParser); + table_bytes--; + uint32_t table_class = table_info >> 4; // Identifies whether the table is for AC or DC + uint32_t table_id = table_info & 0xf; + jpginfo.hufman_table_buf.load_huffman_table[table_id] = 1; + + if ((table_class < TABLE_CLASS_NUM) && (table_id < JPEG_MAX_SETS_HUFFMAN_TABLES)) { + if (table_class == 0) { + uint8_t* bits = mParser->getCurrentIndex(mParser); + // Find out the number of entries in the table + uint32_t table_entries = 0; + uint32_t bit_ind; + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jpginfo.hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind] = bits[bit_ind]; + table_entries += jpginfo.hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind]; + } + + // Create table of code values + mParser->burnBytes(mParser, 16); + table_bytes -= 16; + uint32_t tbl_ind; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + jpginfo.hufman_table_buf.huffman_table[table_id].dc_values[tbl_ind] = mParser->readNextByte(mParser); + table_bytes--; + } + + } else { // for AC class + uint8_t* bits = mParser->getCurrentIndex(mParser); + // Find out the number of entries in the table + uint32_t table_entries = 0; + uint32_t bit_ind = 0; + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jpginfo.hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind] = bits[bit_ind]; + table_entries += jpginfo.hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind]; + } + + // Create table of code values + mParser->burnBytes(mParser, 16); + table_bytes -= 16; + uint32_t tbl_ind = 0; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + jpginfo.hufman_table_buf.huffman_table[table_id].ac_values[tbl_ind] = mParser->readNextByte(mParser); + table_bytes--; + } + }//end of else + } else { + // Find out the number of entries in the table + ETRACE("%s DHT table ID is not supported", __FUNCTION__); + uint32_t table_entries = 0; + uint32_t bit_ind = 0; + for(bit_ind = 0; bit_ind < 16; bit_ind++) { + table_entries += mParser->readNextByte(mParser); + table_bytes--; + } + mParser->burnBytes(mParser, table_entries); + table_bytes -= table_entries; + } + + } while (table_bytes); + } + } + + return JD_SUCCESS; +} + diff --git a/imagedecoder/JPEGDecoder.h b/imagedecoder/JPEGDecoder.h index a77db7a..f46611f 100644 --- a/imagedecoder/JPEGDecoder.h +++ b/imagedecoder/JPEGDecoder.h @@ -1,5 +1,5 @@ /* INTEL CONFIDENTIAL -* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. * Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents @@ -23,91 +23,67 @@ * * Authors: * Nana Guo +* Yao Cheng * */ -#ifndef JDLIBVA_H -#define JDLIBVA_H -#include "JPEGParser.h" -#include -#include -//#include -#include "va/va_dec_jpeg.h" -#include -#define HAVE_BOOLEAN -#include "jpeglib.h" -#include +#ifndef JPEGDEC_H +#define JPEGDEC_H + +#include "../videovpp/VideoVPPBase.h" +#include +#include +#include "JPEGCommon.h" +using namespace android; + +struct CJPEGParse; +class JpegBlitter; + +// Non thread-safe +class JpegDecoder +{ +friend class JpegBlitter; +public: + struct MapHandle + { + friend class JpegDecoder; + public: + bool valid; + private: + VAImage *img; + }; + JpegDecoder(); + virtual ~JpegDecoder(); + virtual JpegDecodeStatus init(int width, int height, RenderTarget **targets, int num); + virtual void deinit(); + virtual JpegDecodeStatus parse(JpegInfo &jpginfo); + virtual JpegDecodeStatus decode(JpegInfo &jpginfo, RenderTarget &target); + virtual JpegDecodeStatus sync(RenderTarget &target); + virtual bool busy(RenderTarget &target) const; + virtual JpegDecodeStatus blit(RenderTarget &src, RenderTarget &dst); + virtual MapHandle mapData(RenderTarget &target, void ** data, uint32_t * offsets, uint32_t * pitches); + virtual void unmapData(RenderTarget &target, MapHandle maphandle); +private: + bool mInitialized; + mutable Mutex mLock; + VADisplay mDisplay; + VAConfigID mConfigId; + VAContextID mContextId; + CJPEGParse *mParser; + JpegBlitter *mBlitter; + KeyedVector mGrallocSurfaceMap; + KeyedVector mDrmSurfaceMap; + KeyedVector mNormalSurfaceMap; + virtual VASurfaceID getSurfaceID(RenderTarget &target) const; + virtual JpegDecodeStatus parseTableData(JpegInfo &jpginfo); + virtual bool jpegColorFormatSupported(JpegInfo &jpginfo) const; + virtual JpegDecodeStatus createSurfaceFromRenderTarget(RenderTarget &target, VASurfaceID *surf_id); + virtual JpegDecodeStatus createSurfaceInternal(int width, int height, int pixel_format, int handle, VASurfaceID *surf_id); + virtual JpegDecodeStatus createSurfaceDrm(int width, int height, int pixel_format, unsigned long boname, int stride, VASurfaceID *surf_id); + virtual JpegDecodeStatus createSurfaceGralloc(int width, int height, int pixel_format, buffer_handle_t handle, int stride, VASurfaceID *surf_id); +}; -#define Display unsigned int -#define BOOL int - -#define JPEG_MAX_COMPONENTS 4 -#define JPEG_MAX_QUANT_TABLES 4 - -typedef struct { - Display * android_display; - uint32_t surface_count; - VADisplay va_display; - VAContextID va_context; - VASurfaceID* va_surfaces; - VAConfigID va_config; - - VAPictureParameterBufferJPEGBaseline picture_param_buf; - VASliceParameterBufferJPEGBaseline slice_param_buf[JPEG_MAX_COMPONENTS]; - VAIQMatrixBufferJPEGBaseline qmatrix_buf; - VAHuffmanTableBufferJPEGBaseline hufman_table_buf; - - uint32_t dht_byte_offset[4]; - uint32_t dqt_byte_offset[4]; - uint32_t huffman_tables_num; - uint32_t quant_tables_num; - uint32_t soi_offset; - uint32_t eoi_offset; - - uint8_t* bitstream_buf; - uint32_t image_width; - uint32_t image_height; - uint32_t scan_ctrl_count; - - uint8_t * image_buf; - VAImage surface_image; - boolean hw_state_ready; - boolean hw_caps_ready; - boolean hw_path; - boolean initialized; - boolean resource_allocated; - - uint32_t file_size; - uint32_t rotation; - CJPEGParse* JPEGParser; - - char ** output_image; - uint32_t output_lines; - uint32_t fourcc; -} jd_libva_struct; - -typedef enum { - DECODE_NOT_STARTED = -6, - DECODE_INVALID_DATA = -5, - DECODE_DRIVER_FAIL = -4, - DECODE_PARSER_FAIL = -3, - DECODE_MEMORY_FAIL = -2, - DECODE_FAIL = -1, - DECODE_SUCCESS = 0, - -} IMAGE_DECODE_STATUS; - -typedef int32_t Decode_Status; - -extern jd_libva_struct jd_libva; - -Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr); -void jdva_deinitialize (jd_libva_struct * jd_libva_ptr); -Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); -Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr); -Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr); -Decode_Status parseBitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); -Decode_Status parseTableData(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); #endif + diff --git a/imagedecoder/JPEGDecoder_gen.cpp b/imagedecoder/JPEGDecoder_gen.cpp new file mode 100644 index 0000000..9b5f242 --- /dev/null +++ b/imagedecoder/JPEGDecoder_gen.cpp @@ -0,0 +1,210 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Yao Cheng +* +*/ +//#define LOG_NDEBUG 0 + +#include "va/va.h" +#include "va/va_vpp.h" +#include "va/va_drmcommon.h" +#include "JPEGDecoder.h" +#include "ImageDecoderTrace.h" +#include +#include +#include +#include "JPEGCommon_Gen.h" + +int fourcc2PixelFormat(uint32_t fourcc) +{ + switch(fourcc) { + case VA_FOURCC_YV12: + return HAL_PIXEL_FORMAT_YV12; + case VA_FOURCC_422H: + return HAL_PIXEL_FORMAT_YCbCr_422_H_INTEL; + case VA_FOURCC_YUY2: + return HAL_PIXEL_FORMAT_YCbCr_422_I; + case VA_FOURCC_NV12: + return HAL_PIXEL_FORMAT_NV12_TILED_INTEL; + case VA_FOURCC_RGBA: + return HAL_PIXEL_FORMAT_RGBA_8888; + case VA_FOURCC_IMC3: + return HAL_PIXEL_FORMAT_IMC3; + case VA_FOURCC_444P: + return HAL_PIXEL_FORMAT_444P; + case VA_FOURCC_422V: + case VA_FOURCC_411P: + default: + return -1; + } +} +uint32_t pixelFormat2Fourcc(int pixel_format) +{ + switch(pixel_format) { + case HAL_PIXEL_FORMAT_YV12: + return VA_FOURCC_YV12; + case HAL_PIXEL_FORMAT_YCbCr_422_H_INTEL: + return VA_FOURCC_422H; + case HAL_PIXEL_FORMAT_YCbCr_422_I: + return VA_FOURCC_YUY2; + case HAL_PIXEL_FORMAT_NV12_TILED_INTEL: + return VA_FOURCC_NV12; + case HAL_PIXEL_FORMAT_RGBA_8888: + return VA_FOURCC_RGBA; + case HAL_PIXEL_FORMAT_444P: + return VA_FOURCC_444P; + case HAL_PIXEL_FORMAT_IMC3: + return VA_FOURCC_IMC3; + default: + return 0; + } +} + +//#define LOG_TAG "ImageDecoder" + +#define JD_CHECK(err, label) \ + if (err) { \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +#define JD_CHECK_RET(err, label, retcode) \ + if (err) { \ + status = retcode; \ + ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +bool JpegDecoder::jpegColorFormatSupported(JpegInfo &jpginfo) const +{ + return (jpginfo.image_color_fourcc == VA_FOURCC_IMC3) || + (jpginfo.image_color_fourcc == VA_FOURCC_422H) || + (jpginfo.image_color_fourcc == VA_FOURCC_444P); +} + +JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, int pixel_format, unsigned long boname, int stride, VASurfaceID *surf_id) +{ + VAStatus st; + VASurfaceAttrib attrib_list; + VASurfaceAttribExternalBuffers vaSurfaceExternBuf; + uint32_t fourcc = pixelFormat2Fourcc(pixel_format); + vaSurfaceExternBuf.pixel_format = fourcc; + VTRACE("%s extBuf.pixel_format is %s", __FUNCTION__, fourcc2str(NULL, fourcc)); + vaSurfaceExternBuf.width = width; + vaSurfaceExternBuf.height = height; + vaSurfaceExternBuf.pitches[0] = stride; + vaSurfaceExternBuf.buffers = &boname; + vaSurfaceExternBuf.num_buffers = 1; + vaSurfaceExternBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; + attrib_list.type = VASurfaceAttribExternalBufferDescriptor; + attrib_list.flags = VA_SURFACE_ATTRIB_SETTABLE; + attrib_list.value.type = VAGenericValueTypePointer; + attrib_list.value.value.p = (void *)&vaSurfaceExternBuf; + + st = vaCreateSurfaces(mDisplay, + fourcc2VaFormat(fourcc), + width, + height, + surf_id, + 1, + &attrib_list, + 1); + VTRACE("%s createSurface DRM for vaformat %u, fourcc %s", __FUNCTION__, fourcc2VaFormat(fourcc), fourcc2str(NULL, fourcc)); + if (st != VA_STATUS_SUCCESS) { + ETRACE("%s: vaCreateSurfaces returns %d", __PRETTY_FUNCTION__, st); + return JD_RESOURCE_FAILURE; + } + return JD_SUCCESS; +} + +JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, int pixel_format, buffer_handle_t handle, int stride, VASurfaceID *surf_id) +{ + unsigned long boname; + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + JpegDecodeStatus st; + + uint32_t fourcc = pixelFormat2Fourcc(pixel_format); + VTRACE("enter %s, pixel_format 0x%x, fourcc %s", __FUNCTION__, pixel_format, fourcc2str(NULL, fourcc)); + if ((fourcc != VA_FOURCC_422H) || + (fourcc != VA_FOURCC_YUY2) || + (fourcc != VA_FOURCC_RGBA)){ + VASurfaceAttrib attrib; + attrib.type = VASurfaceAttribPixelFormat; + attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; + attrib.value.type = VAGenericValueTypeInteger; + attrib.value.value.i = fourcc; + VAStatus va_status = vaCreateSurfaces(mDisplay, + fourcc2VaFormat(fourcc), + width, + height, + surf_id, + 1, + &attrib, + 1); + VTRACE("%s createSurface for %s", __FUNCTION__, fourcc2str(NULL, fourcc)); + if (va_status != VA_STATUS_SUCCESS) + return JD_RESOURCE_FAILURE; + return JD_SUCCESS; + } + + int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + if (err) { + ETRACE("%s failed to get gralloc module", __PRETTY_FUNCTION__); + st = JD_RESOURCE_FAILURE; + } + JD_CHECK(err, cleanup); + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &allocdev); + if (err) { + ETRACE("%s failed to open alloc device", __PRETTY_FUNCTION__); + st = JD_RESOURCE_FAILURE; + } + JD_CHECK(err, cleanup); + err = gralloc_module->perform(gralloc_module, + INTEL_UFO_GRALLOC_MODULE_PERFORM_GET_BO_NAME, + handle, + &boname); + if (err) { + ETRACE("%s failed to get boname via gralloc->perform", __PRETTY_FUNCTION__); + st = JD_RESOURCE_FAILURE; + } + JD_CHECK(err, cleanup); + VTRACE("YAO %s fourcc %s luma_stride is %d", __FUNCTION__, + fourcc2str(NULL, fourcc), stride); + + gralloc_close(allocdev); + return createSurfaceDrm(width, height, pixel_format, boname, stride, surf_id); +cleanup: + if (allocdev) + gralloc_close(allocdev); + return st; +} + + + + + diff --git a/imagedecoder/JPEGDecoder_img.cpp b/imagedecoder/JPEGDecoder_img.cpp new file mode 100644 index 0000000..d90559d --- /dev/null +++ b/imagedecoder/JPEGDecoder_img.cpp @@ -0,0 +1,99 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* Yao Cheng +* +*/ + +#include "JPEGCommon_Img.h" +#include "JPEGDecoder.h" + +int fourcc2PixelFormat(uint32_t fourcc) +{ + switch(fourcc) { + case VA_FOURCC_YV12: + return HAL_PIXEL_FORMAT_YV12; + case VA_FOURCC_YUY2: + return HAL_PIXEL_FORMAT_YCbCr_422_I; + case VA_FOURCC_RGBA: + return HAL_PIXEL_FORMAT_RGBA_8888; + default: + return -1; + } +} +uint32_t pixelFormat2Fourcc(int pixel_format) +{ + switch(pixel_format) { + case HAL_PIXEL_FORMAT_YV12: + return VA_FOURCC_YV12; + case HAL_PIXEL_FORMAT_YCbCr_422_I: + return VA_FOURCC_YUY2; + case HAL_PIXEL_FORMAT_RGBA_8888: + return VA_FOURCC_RGBA; + default: + return 0; + } +} + + +bool JpegDecoder::jpegColorFormatSupported(JpegInfo &jpginfo) const +{ + return (jpginfo.image_color_fourcc == VA_FOURCC_IMC3) || + (jpginfo.image_color_fourcc == VA_FOURCC_422H) || + (jpginfo.image_color_fourcc == VA_FOURCC_444P); +} + +JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, int pixel_format, unsigned long boname, int stride, VASurfaceID *surf_id) +{ + return JD_RENDER_TARGET_TYPE_UNSUPPORTED; +} + +JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, int pixel_format, buffer_handle_t handle, int stride, VASurfaceID *surf_id) +{ + VAStatus st; + VASurfaceAttributeTPI attrib_tpi; + uint32_t va_format = VA_RT_FORMAT_YUV444; + attrib_tpi.count = 1; + attrib_tpi.luma_stride = stride; + attrib_tpi.pixel_format = pixel_format; + attrib_tpi.width = width; + attrib_tpi.height = height; + attrib_tpi.type = VAExternalMemoryAndroidGrallocBuffer; + attrib_tpi.buffers = (uint32_t*)&handle; + + st = vaCreateSurfacesWithAttribute( + mDisplay, + width, + height, + va_format, + 1, + surf_id, + &attrib_tpi); + if (st != VA_STATUS_SUCCESS) + return JD_RESOURCE_FAILURE; + return JD_SUCCESS; +} + + diff --git a/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp b/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp new file mode 100644 index 0000000..edfaac6 --- /dev/null +++ b/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp @@ -0,0 +1,499 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* Yao Cheng +* +*/ + +/* + * Initialize VA API related stuff + * + * We will check the return value of jva_initialize + * to determine which path will be use (SW or HW) + * + */ +//#define LOG_NDEBUG 0 +#define LOG_TAG "ImageDecoder" + +#include +#include "JPEGDecoder_libjpeg_wrapper.h" +#include +#include +#include "JPEGDecoder.h" +#include +#include "va/va_dec_jpeg.h" + +#ifdef NDEBUG +#undef NDEBUG +#endif + +#include + +static Mutex jdlock; + +struct jdva_private +{ + JpegInfo jpg_info; + JpegDecoder decoder; + RenderTarget dec_buffer; + RenderTarget yuy2_buffer; + RenderTarget rgba_buffer; +}; + +static int internal_buffer_handle = 0; + +#define JD_CHECK(err, label) \ + if (err) { \ + ALOGE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +#define JD_CHECK_RET(err, label, retcode) \ + if (err) { \ + status = retcode; \ + ALOGE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + goto label; \ + } + +Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) +{ + /* + * Please note that we won't check the input parameters to follow the + * convention of libjpeg duo to we need these parameters to do error handling, + * and if these parameters are invalid, means the whole stack is crashed, so check + * them here and return false is meaningless, same situation for all internal methods + * related to VA API + */ + uint32_t va_major_version = 0; + uint32_t va_minor_version = 0; + VAStatus va_status = VA_STATUS_SUCCESS; + Decode_Status status = DECODE_SUCCESS; + + if (jd_libva_ptr->initialized) { + ALOGW("%s HW decode already initialized", __FUNCTION__); + return DECODE_NOT_STARTED; + } + + { + Mutex::Autolock autoLock(jdlock); + if (!(jd_libva_ptr->initialized)) { + jdva_private *priv = new jdva_private; + memset(&priv->jpg_info, 0, sizeof(JpegInfo)); + memset(&priv->dec_buffer, 0, sizeof(RenderTarget)); + memset(&priv->yuy2_buffer, 0, sizeof(RenderTarget)); + memset(&priv->rgba_buffer, 0, sizeof(RenderTarget)); + jd_libva_ptr->initialized = TRUE; + jd_libva_ptr->priv = (uint32_t)priv; + status = DECODE_SUCCESS; + } + } +cleanup: + if (status) { + jd_libva_ptr->initialized = TRUE; // make sure we can call into jva_deinitialize() + jdva_deinitialize (jd_libva_ptr); + return status; + } + + return status; +} +void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) +{ + if (!(jd_libva_ptr->initialized)) { + return; + } + { + Mutex::Autolock autoLock(jdlock); + if (jd_libva_ptr->initialized) { + jdva_private *p = (jdva_private*)jd_libva_ptr->priv; + delete p; + jd_libva_ptr->initialized = FALSE; + } + } + ALOGV("jdva_deinitialize finished"); + return; +} + +RenderTarget * create_render_target(RenderTarget* target, int width, int height, int pixel_format) +{ + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + buffer_handle_t handle; + uint32_t fourcc; + int stride, bpp, err; + fourcc = pixelFormat2Fourcc(pixel_format); + bpp = fourcc2LumaBitsPerPixel(fourcc); + if (target == NULL) { + ALOGE("%s malloc new RenderTarget failed", __FUNCTION__); + return NULL; + } + ALOGV("%s created %s target %p", __FUNCTION__, fourcc2str(NULL, fourcc), target); + if ((fourcc == VA_FOURCC_422H) || + (fourcc == VA_FOURCC_YUY2) || + (fourcc == VA_FOURCC_RGBA)){ + err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + if (err || !module) { + ALOGE("%s failed to get gralloc module", __FUNCTION__); + return NULL; + } + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &allocdev); + if (err || !allocdev) { + ALOGE("%s failed to open alloc device", __FUNCTION__); + return NULL; + } + err = allocdev->alloc(allocdev, + width, height, pixel_format, + GRALLOC_USAGE_HW_RENDER, + &handle, &stride); + if (err) { + gralloc_close(allocdev); + ALOGE("%s failed to allocate surface", __FUNCTION__); + return NULL; + } + target->type = RenderTarget::ANDROID_GRALLOC; + target->handle = (int)handle; + target->stride = stride * bpp; + } + else { + *((int*)(&target->type)) = RENDERTARGET_INTERNAL_BUFFER; + target->handle = internal_buffer_handle++; + } + target->width = width; + target->height = height; + target->pixel_format = pixel_format; + target->rect.x = target->rect.y = 0; + target->rect.width = target->width; + target->rect.height = target->height; + return target; +} + +void free_render_target(RenderTarget *target) +{ + if (target == NULL) + return; + uint32_t fourcc = pixelFormat2Fourcc(target->pixel_format); + if (target->type == RenderTarget::ANDROID_GRALLOC) { + buffer_handle_t handle = (buffer_handle_t)target->handle; + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + if (err || !module) { + ALOGE("%s failed to get gralloc module", __FUNCTION__); + return; + } + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &allocdev); + if (err || !allocdev) { + ALOGE("%s failed to get gralloc module", __FUNCTION__); + return; + } + allocdev->free(allocdev, handle); + gralloc_close(allocdev); + } + ALOGV("%s deleting %s target %p", __FUNCTION__, fourcc2str(NULL, fourcc), target); +} + +void dump_yuy2_target(RenderTarget *target, JpegDecoder *decoder, const char *filename) +{ + uint32_t fourcc = pixelFormat2Fourcc(target->pixel_format); + assert(fourcc == VA_FOURCC_YUY2); + uint8_t *data; + uint32_t offsets[3]; + uint32_t pitches[3]; + JpegDecoder::MapHandle maphandle = decoder->mapData(*target, (void**) &data, offsets, pitches); + assert (maphandle.valid); + FILE* fpdump = fopen(filename, "wb"); + if (fpdump) { + // YUYV + for (int i = 0; i < target->height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 1, target->width * 2, fpdump); + } + fclose(fpdump); + } + else { + ALOGW("%s failed to create %s", __FUNCTION__, filename); + } + decoder->unmapData(*target, maphandle); +} + +void dump_dec_target(RenderTarget *target, JpegDecoder *decoder, const char *filename) +{ + uint32_t fourcc = pixelFormat2Fourcc(target->pixel_format); + assert((fourcc == VA_FOURCC_IMC3) || + (fourcc == VA_FOURCC_411P) || + (fourcc == VA_FOURCC('4','0','0','P')) || + (fourcc == VA_FOURCC_422H) || + (fourcc == VA_FOURCC_422V) || + (fourcc == VA_FOURCC_444P)); + uint8_t *data; + uint32_t offsets[3]; + uint32_t pitches[3]; + JpegDecoder::MapHandle maphandle = decoder->mapData(*target, (void**) &data, offsets, pitches); + assert (maphandle.valid); + FILE* fpdump = fopen(filename, "wb"); + if(fpdump) { + float hfactor, vfactor; + switch (fourcc) { + case VA_FOURCC_IMC3: + hfactor = 1; + vfactor = 0.5; + break; + case VA_FOURCC_444P: + hfactor = vfactor = 1; + break; + case VA_FOURCC_422H: + hfactor = 0.5; + vfactor = 1; + break; + case VA_FOURCC('4','0','0','P'): + hfactor = vfactor = 0; + break; + case VA_FOURCC_411P: + hfactor = 0.25; + vfactor = 1; + break; + case VA_FOURCC_422V: + hfactor = 0.5; + vfactor = 1; + break; + default: + hfactor = vfactor = 1; + break; + } + // Y + for (int i = 0; i < target->height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 1, target->width, fpdump); + } + // U + for (int i = 0; i < target->height * vfactor; ++i) { + fwrite(data + offsets[1] + i * pitches[1], 1, target->width * hfactor, fpdump); + } + // V + for (int i = 0; i < target->height * vfactor; ++i) { + fwrite(data + offsets[2] + i * pitches[2], 1, target->width * hfactor, fpdump); + } + fclose(fpdump); + } + else { + ALOGW("%s failed to create %s", __FUNCTION__, filename); + } + decoder->unmapData(*target, maphandle); +} + + +Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) +{ + JpegDecodeStatus st; + char **outbuf = jd_libva_ptr->output_image; + uint32_t lines = jd_libva_ptr->output_lines; + jdva_private * priv = (jdva_private*)jd_libva_ptr->priv; + if (!priv) + return DECODE_DRIVER_FAIL; + + JpegInfo& jpginfo = priv->jpg_info; + + st = priv->decoder.decode(jpginfo, priv->dec_buffer); + if (st != JD_SUCCESS) { + ALOGE("%s: error decoding %s image", __FUNCTION__, fourcc2str(NULL, jpginfo.image_color_fourcc)); + return DECODE_DRIVER_FAIL; + } + ALOGI("%s successfully decoded JPEG with VAAPI", __FUNCTION__); + RenderTarget *src_target = &priv->dec_buffer; + //dump_dec_target(src_target, decoder,"/sdcard/dec_dump.yuv"); + + bool yuy2_csc = false; + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + buffer_handle_t handle; + int err; + uint8_t *data = NULL; + uint32_t offsets[3]; + uint32_t pitches[3]; + JpegDecoder::MapHandle maphandle; + FILE *rgbafile = NULL; + if (jpginfo.image_color_fourcc != VA_FOURCC_422H) + yuy2_csc = true; + + // CSC to YUY2 if needed + if (yuy2_csc) { + st = priv->decoder.blit(*src_target, priv->yuy2_buffer); + if (st != JD_SUCCESS) { + ALOGE("%s: error blitting to YUY2 buffer", __FUNCTION__); + goto cleanup; + } + //dump_yuy2_target(src_target, decoder,"/sdcard/yuy2_dump.yuv"); + src_target = &priv->yuy2_buffer; + } + + st = priv->decoder.blit(*src_target, priv->rgba_buffer); + if (st != JD_SUCCESS) { + ALOGE("%s: error blitting to RGBA buffer", __FUNCTION__); + goto cleanup; + } + maphandle = priv->decoder.mapData(priv->rgba_buffer, (void**) &data, offsets, pitches); + + //rgbafile = fopen("/sdcard/rgba_dump", "wb"); + + for (uint32_t i = 0; i < lines; ++i) { + if (outbuf[i] != NULL) { + //memcpy(outbuf[i], data + offsets[0] + i * pitches[0], 4 * jpginfo.image_width); + for (int j = 0; j < priv->rgba_buffer.width; ++j) { + // BGRA -> RGBA + // R + memcpy(outbuf[i] + 4 * j, data + offsets[0] + i * pitches[0] + 4 * j + 2, 1); + // G + memcpy(outbuf[i] + 4 * j + 1, data + offsets[0] + i * pitches[0] + 4 * j + 1, 1); + // B + memcpy(outbuf[i] + 4 * j + 2, data + offsets[0] + i * pitches[0] + 4 * j, 1); + // A + memcpy(outbuf[i] + 4 * j + 3, data + offsets[0] + i * pitches[0] + 4 * j + 3, 1); + } + } + else { + ALOGE("%s outbuf line %u is NULL", __FUNCTION__, i); + } + //if (rgbafile) { + // fwrite(data + offsets[0] + i * pitches[0], 1, 4 * rgba_target->width, rgbafile); + //} + } + //if (rgbafile) + // fclose(rgbafile); + ALOGI("%s successfully blitted RGBA from JPEG %s data", __FUNCTION__, fourcc2str(NULL, priv->jpg_info.image_color_fourcc)); + priv->decoder.unmapData(priv->rgba_buffer, maphandle); + return DECODE_SUCCESS; + +cleanup: + return DECODE_DRIVER_FAIL; +} + +Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) +{ + VAStatus va_status = VA_STATUS_SUCCESS; + Decode_Status status = DECODE_SUCCESS; + RenderTarget *dec_target, *yuy2_target, *rgba_target; + dec_target = yuy2_target = rgba_target = NULL; + JpegDecodeStatus st; + Mutex::Autolock autoLock(jdlock); + jdva_private *priv = (jdva_private*)jd_libva_ptr->priv; + jd_libva_ptr->image_width = priv->jpg_info.picture_param_buf.picture_width; + jd_libva_ptr->image_height = priv->jpg_info.picture_param_buf.picture_height; + dec_target = create_render_target(&priv->dec_buffer, jd_libva_ptr->image_width,jd_libva_ptr->image_height,fourcc2PixelFormat(priv->jpg_info.image_color_fourcc)); + if (dec_target == NULL) { + ALOGE("%s failed to create decode render target", __FUNCTION__); + return DECODE_MEMORY_FAIL; + } + rgba_target = create_render_target(&priv->rgba_buffer, jd_libva_ptr->image_width,jd_libva_ptr->image_height, HAL_PIXEL_FORMAT_RGBA_8888); + if (rgba_target == NULL) { + ALOGE("%s failed to create YUY2 csc buffer", __FUNCTION__); + free_render_target(dec_target); + return DECODE_MEMORY_FAIL; + } + yuy2_target = create_render_target(&priv->yuy2_buffer, jd_libva_ptr->image_width,jd_libva_ptr->image_height, HAL_PIXEL_FORMAT_YCbCr_422_I); + if (yuy2_target == NULL) { + ALOGE("%s failed to create RGBA csc buffer", __FUNCTION__); + free_render_target(dec_target); + free_render_target(rgba_target); + return DECODE_MEMORY_FAIL; + } + RenderTarget *targetlist[3] = { dec_target, yuy2_target, rgba_target }; + st = priv->decoder.init(jd_libva_ptr->image_width, jd_libva_ptr->image_height, targetlist, 3); + if (st != JD_SUCCESS) { + free_render_target(dec_target); + free_render_target(rgba_target); + free_render_target(yuy2_target); + ALOGE("%s failed to initialize resources for decoder: %d", __FUNCTION__, st); + return DECODE_DRIVER_FAIL; + } + + jd_libva_ptr->resource_allocated = TRUE; + ALOGV("%s successfully set up HW decode resource", __FUNCTION__); + return status; +cleanup: + jd_libva_ptr->resource_allocated = FALSE; + + jdva_deinitialize (jd_libva_ptr); + + return DECODE_DRIVER_FAIL; +} +Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) +{ + Decode_Status status = DECODE_SUCCESS; + VAStatus va_status = VA_STATUS_SUCCESS; + int i; + + if (!(jd_libva_ptr->resource_allocated)) { + ALOGW("%s decoder resource not yet allocated", __FUNCTION__); + return status; + } + Mutex::Autolock autoLock(jdlock); + + ALOGV("%s deiniting priv 0x%x", __FUNCTION__, jd_libva_ptr->priv); + jdva_private *priv = (jdva_private*)jd_libva_ptr->priv; + if (priv) { + priv->decoder.deinit(); + free_render_target(&priv->dec_buffer); + free_render_target(&priv->yuy2_buffer); + free_render_target(&priv->rgba_buffer); + } + /* + * It is safe to destroy Surface/Config/Context severl times + * and it is also safe even their value is NULL + */ + +cleanup: + + jd_libva_ptr->resource_allocated = FALSE; + + return va_status; +} +Decode_Status jdva_parse_bitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) +{ + jdva_private * priv = (jdva_private*)jd_libva_ptr->priv; + if (!priv) + return DECODE_DRIVER_FAIL; + JpegInfo& jpginfo = priv->jpg_info; + jpginfo.buf = jd_libva_ptr->bitstream_buf; + jpginfo.bufsize = jd_libva_ptr->file_size; + JpegDecodeStatus st = priv->decoder.parse(jpginfo); + if (st != JD_SUCCESS) { + ALOGE("%s parser for HW decode failed: %d", __FUNCTION__, st); + return DECODE_PARSER_FAIL; + } + + jd_libva_ptr->image_width = jpginfo.image_width; + jd_libva_ptr->image_height = jpginfo.image_height; + cinfo->original_image_width = jpginfo.picture_param_buf.picture_width; /* nominal image width (from SOF marker) */ + cinfo->image_width = jpginfo.picture_param_buf.picture_width; /* nominal image width (from SOF marker) */ + cinfo->image_height = jpginfo.picture_param_buf.picture_height; /* nominal image height */ + cinfo->num_components = jpginfo.picture_param_buf.num_components; /* # of color components in JPEG image */ + cinfo->jpeg_color_space = JCS_YCbCr; /* colorspace of JPEG image */ + cinfo->out_color_space = JCS_RGB; /* colorspace for output */ + cinfo->src->bytes_in_buffer = jd_libva_ptr->file_size; + return DECODE_SUCCESS; +} + diff --git a/imagedecoder/JPEGDecoder_libjpeg_wrapper.h b/imagedecoder/JPEGDecoder_libjpeg_wrapper.h new file mode 100644 index 0000000..c9d060b --- /dev/null +++ b/imagedecoder/JPEGDecoder_libjpeg_wrapper.h @@ -0,0 +1,92 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* Yao Cheng +* +*/ + +#ifndef JDLIBVA_H +#define JDLIBVA_H + +#include +#include +#include "jpeglib.h" + +#define Display unsigned int +#define BOOL int + +#define JPEG_MAX_COMPONENTS 4 +#define JPEG_MAX_QUANT_TABLES 4 + +typedef struct { + uint8_t* bitstream_buf; + uint32_t image_width; + uint32_t image_height; + + boolean hw_state_ready; + boolean hw_caps_ready; + boolean hw_path; + boolean initialized; + boolean resource_allocated; + + uint32_t file_size; + uint32_t rotation; + + char ** output_image; + uint32_t output_lines; + + uint32_t priv; +} jd_libva_struct; + +typedef enum { + DECODE_NOT_STARTED = -6, + DECODE_INVALID_DATA = -5, + DECODE_DRIVER_FAIL = -4, + DECODE_PARSER_FAIL = -3, + DECODE_MEMORY_FAIL = -2, + DECODE_FAIL = -1, + DECODE_SUCCESS = 0, + +} IMAGE_DECODE_STATUS; + +/*********************** for libjpeg ****************************/ +typedef int32_t Decode_Status; +extern jd_libva_struct jd_libva; +#ifdef __cplusplus +extern "C" { +#endif +Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr); +void jdva_deinitialize (jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_parse_bitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); +#ifdef __cplusplus +} +#endif + + +#endif + diff --git a/imagedecoder/JPEGParser.c b/imagedecoder/JPEGParser.c deleted file mode 100644 index 4ad13b8..0000000 --- a/imagedecoder/JPEGParser.c +++ /dev/null @@ -1,122 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2012 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -* Authors: -* Nana Guo -* -*/ - -#include "JPEGParser.h" - -#include -#include -#include -#include - -uint8_t readNextByte(CJPEGParse* parser) { - uint8_t byte = 0; - - if (parser->parse_index < parser->buff_size) { - byte = *( parser->stream_buff + parser->parse_index ); - parser->parse_index++; - } - - if (parser->parse_index == parser->buff_size) { - parser->end_of_buff = TRUE; - } - - return byte; -} - -uint32_t readBytes( CJPEGParse* parser, uint32_t bytes_to_read ) { - uint32_t bytes = 0; - - while (bytes_to_read-- && !endOfBuffer(parser)) { - bytes |= ( (uint32_t)readNextByte(parser) << ( bytes_to_read * 8 ) ); - } - - return bytes; -} - -void burnBytes( CJPEGParse* parser, uint32_t bytes_to_burn ) { - parser->parse_index += bytes_to_burn; - - if (parser->parse_index >= parser->buff_size) { - parser->parse_index = parser->buff_size - 1; - parser->end_of_buff = TRUE; - } -} - -uint8_t getNextMarker(CJPEGParse* parser) { - while (!endOfBuffer(parser)) { - if (readNextByte(parser) == 0xff) { - break; - } - } - /* check the next byte to make sure we don't miss the real marker*/ - uint8_t tempNextByte = readNextByte(parser); - if (tempNextByte == 0xff) - return readNextByte(parser); - else - return tempNextByte; -} - -boolean setByteOffset(CJPEGParse* parser, uint32_t byte_offset) -{ - boolean offset_found = FALSE; - - if (byte_offset < parser->buff_size) { - parser->parse_index = byte_offset; - offset_found = TRUE; -// end_of_buff = FALSE; - } - - return offset_found; -} - -uint32_t getByteOffset(CJPEGParse* parser) { - return parser->parse_index; -} - -boolean endOfBuffer(CJPEGParse* parser) { - return parser->end_of_buff; -} - -uint8_t* getCurrentIndex(CJPEGParse* parser) { - return parser->stream_buff + parser->parse_index; -} - -void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_size) { - parser->parse_index = 0; - parser->buff_size = buff_size; - parser->stream_buff = stream_buff; - parser->end_of_buff = FALSE; - parser->readNextByte = readNextByte; - parser->readBytes = readBytes; - parser->burnBytes = burnBytes; - parser->getNextMarker = getNextMarker; - parser->getByteOffset = getByteOffset; - parser->endOfBuffer = endOfBuffer; - parser->getCurrentIndex = getCurrentIndex; - parser->setByteOffset= setByteOffset; -} diff --git a/imagedecoder/JPEGParser.cpp b/imagedecoder/JPEGParser.cpp new file mode 100644 index 0000000..1d6ab26 --- /dev/null +++ b/imagedecoder/JPEGParser.cpp @@ -0,0 +1,124 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +* Authors: +* Nana Guo +* +*/ + +#include "JPEGParser.h" + +#include +#include +#include +#include + +bool endOfBuffer(CJPEGParse* parser); + +uint8_t readNextByte(CJPEGParse* parser) { + uint8_t byte = 0; + + if (parser->parse_index < parser->buff_size) { + byte = *( parser->stream_buff + parser->parse_index ); + parser->parse_index++; + } + + if (parser->parse_index == parser->buff_size) { + parser->end_of_buff = true; + } + + return byte; +} + +uint32_t readBytes( CJPEGParse* parser, uint32_t bytes_to_read ) { + uint32_t bytes = 0; + + while (bytes_to_read-- && !endOfBuffer(parser)) { + bytes |= ( (uint32_t)readNextByte(parser) << ( bytes_to_read * 8 ) ); + } + + return bytes; +} + +void burnBytes( CJPEGParse* parser, uint32_t bytes_to_burn ) { + parser->parse_index += bytes_to_burn; + + if (parser->parse_index >= parser->buff_size) { + parser->parse_index = parser->buff_size - 1; + parser->end_of_buff = true; + } +} + +uint8_t getNextMarker(CJPEGParse* parser) { + while (!endOfBuffer(parser)) { + if (readNextByte(parser) == 0xff) { + break; + } + } + /* check the next byte to make sure we don't miss the real marker*/ + uint8_t tempNextByte = readNextByte(parser); + if (tempNextByte == 0xff) + return readNextByte(parser); + else + return tempNextByte; +} + +bool setByteOffset(CJPEGParse* parser, uint32_t byte_offset) +{ + bool offset_found = false; + + if (byte_offset < parser->buff_size) { + parser->parse_index = byte_offset; + offset_found = true; +// end_of_buff = false; + } + + return offset_found; +} + +uint32_t getByteOffset(CJPEGParse* parser) { + return parser->parse_index; +} + +bool endOfBuffer(CJPEGParse* parser) { + return parser->end_of_buff; +} + +uint8_t* getCurrentIndex(CJPEGParse* parser) { + return parser->stream_buff + parser->parse_index; +} + +void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_size) { + parser->parse_index = 0; + parser->buff_size = buff_size; + parser->stream_buff = stream_buff; + parser->end_of_buff = false; + parser->readNextByte = readNextByte; + parser->readBytes = readBytes; + parser->burnBytes = burnBytes; + parser->getNextMarker = getNextMarker; + parser->getByteOffset = getByteOffset; + parser->endOfBuffer = endOfBuffer; + parser->getCurrentIndex = getCurrentIndex; + parser->setByteOffset= setByteOffset; +} diff --git a/imagedecoder/JPEGParser.h b/imagedecoder/JPEGParser.h index 9e8ebd1..be6ac4d 100644 --- a/imagedecoder/JPEGParser.h +++ b/imagedecoder/JPEGParser.h @@ -31,17 +31,6 @@ #include -#ifndef boolean -#define boolean int -#endif - -#ifndef TRUE -#define TRUE 1 -#endif -#ifndef FALSE -#define FALSE 0 -#endif - // Marker Codes #define CODE_SOF_BASELINE 0xC0 #define CODE_SOF1 0xC1 @@ -87,20 +76,20 @@ #define CODE_APP13 0xED #define CODE_APP14 0xEE #define CODE_APP15 0xEF -typedef struct _CJPEGParse CJPEGParse; -struct _CJPEGParse { + +struct CJPEGParse { uint8_t* stream_buff; uint32_t parse_index; uint32_t buff_size; - boolean end_of_buff; + bool end_of_buff; uint8_t (*readNextByte)(CJPEGParse* parser); uint32_t (*readBytes)( CJPEGParse* parser, uint32_t bytes_to_read ); void (*burnBytes)( CJPEGParse* parser, uint32_t bytes_to_burn ); uint8_t (*getNextMarker)(CJPEGParse* parser); uint32_t (*getByteOffset)(CJPEGParse* parser); - boolean (*endOfBuffer)(CJPEGParse* parser); + bool (*endOfBuffer)(CJPEGParse* parser); uint8_t* (*getCurrentIndex)(CJPEGParse* parser); - boolean (*setByteOffset)( CJPEGParse* parser, uint32_t byte_offset ); + bool (*setByteOffset)( CJPEGParse* parser, uint32_t byte_offset ); }; void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_size); diff --git a/imagedecoder/test/testdecode.cpp b/imagedecoder/test/testdecode.cpp new file mode 100644 index 0000000..6823b85 --- /dev/null +++ b/imagedecoder/test/testdecode.cpp @@ -0,0 +1,428 @@ +#include "JPEGDecoder.h" +#include "JPEGBlitter.h" +#include "JPEGCommon_Gen.h" +#include +#include +#include +#undef NDEBUG +#include +#include + +#define JPGFILE "/sdcard/1280x720xYUV422H.jpg" + +RenderTarget& init_render_target(RenderTarget &target, int width, int height, int pixel_format) +{ + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + buffer_handle_t handle; + uint32_t fourcc; + int stride, bpp, err; + fourcc = pixelFormat2Fourcc(pixel_format); + bpp = fourcc2LumaBitsPerPixel(fourcc); + err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + if (err || !module) { + printf("%s failed to get gralloc module\n", __PRETTY_FUNCTION__); + assert(false); + } + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &allocdev); + if (err || !allocdev) { + printf("%s failed to open alloc device\n", __PRETTY_FUNCTION__); + assert(false); + } + err = allocdev->alloc(allocdev, + width, + height, + pixel_format, + GRALLOC_USAGE_HW_RENDER, + &handle, + &stride); + if (err) { + gralloc_close(allocdev); + printf("%s failed to allocate surface %d, %dx%d, pixelformat %x\n", __PRETTY_FUNCTION__, err, + width, height, pixel_format); + assert(false); + } + target.type = RenderTarget::ANDROID_GRALLOC; + target.handle = (int)handle; + target.width = width; + target.height = height; + target.pixel_format = pixel_format; + target.rect.x = target.rect.y = 0; + target.rect.width = target.width; + target.rect.height = target.height; + target.stride = stride * bpp; + return target; +} + +void deinit_render_target(RenderTarget &target) +{ + buffer_handle_t handle = (buffer_handle_t)target.handle; + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + if (err || !module) { + printf("%s failed to get gralloc module\n", __PRETTY_FUNCTION__); + return; + } + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &allocdev); + if (err || !allocdev) { + printf("%s failed to get gralloc module\n", __PRETTY_FUNCTION__); + return; + } + allocdev->free(allocdev, handle); + gralloc_close(allocdev); +} + +void decode_blit_functionality_test() +{ + JpegDecodeStatus st; + JpegInfo jpginfo; + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + buffer_handle_t handle; + JpegDecoder decoder; + JpegBlitter blitter; + blitter.setDecoder(decoder); + RenderTarget targets[5]; + RenderTarget *dec_target, *blit_nv12_target, *blit_rgba_target, *blit_yuy2_target, *blit_yv12_target; + FILE* fp = fopen(JPGFILE, "rb"); + assert(fp); + fseek(fp, 0, SEEK_END); + jpginfo.bufsize = ftell(fp); + fseek(fp, 0, SEEK_SET); + jpginfo.buf = new uint8_t[jpginfo.bufsize]; + fread(jpginfo.buf, 1, jpginfo.bufsize, fp); + fclose(fp); + + printf("finished loading src file: size %u\n", jpginfo.bufsize); + st = decoder.parse(jpginfo); + assert(st == JD_SUCCESS); + + init_render_target(targets[0], jpginfo.image_width, jpginfo.image_height, jpginfo.image_pixel_format); + init_render_target(targets[1], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_NV12_TILED_INTEL); + init_render_target(targets[2], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_RGBA_8888); + init_render_target(targets[3], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_YCbCr_422_I); + init_render_target(targets[4], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_YV12); + dec_target = &targets[0]; + blit_nv12_target = &targets[1]; + blit_rgba_target = &targets[2]; + blit_yuy2_target = &targets[3]; + blit_yv12_target = &targets[4]; + dec_target->rect.x = blit_nv12_target->rect.x = blit_yuy2_target->rect.x = blit_rgba_target->rect.x = blit_yv12_target->rect.x = 0; + dec_target->rect.y = blit_nv12_target->rect.y = blit_yuy2_target->rect.y = blit_rgba_target->rect.y = blit_yv12_target->rect.y = 0; + dec_target->rect.width = blit_nv12_target->rect.width = blit_yuy2_target->rect.width = blit_rgba_target->rect.width = blit_yv12_target->rect.width = jpginfo.image_width; + dec_target->rect.height = blit_nv12_target->rect.height = blit_yuy2_target->rect.height = blit_rgba_target->rect.height = blit_yv12_target->rect.height = jpginfo.image_height; + RenderTarget* targetlist[5] = {dec_target, blit_nv12_target, blit_rgba_target, blit_yuy2_target, blit_yv12_target }; + //st = decoder.init(jpginfo.image_width, jpginfo.image_height, targetlist, 5); + st = decoder.init(jpginfo.image_width, jpginfo.image_height, &dec_target, 1); + assert(st == JD_SUCCESS); + + //jpginfo.render_target = dec_target; + st = decoder.decode(jpginfo, *dec_target); + printf("decode returns %d\n", st); + assert(st == JD_SUCCESS); + + uint8_t *data; + uint32_t offsets[3]; + uint32_t pitches[3]; + JpegDecoder::MapHandle maphandle = decoder.mapData(*dec_target, (void**) &data, offsets, pitches); + assert (maphandle.valid); + FILE* fpdump = fopen("/sdcard/dec_dump.yuv", "wb"); + assert(fpdump); + // Y + for (int i = 0; i < dec_target->height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 1, dec_target->width, fpdump); + } + // U + for (int i = 0; i < dec_target->height; ++i) { + fwrite(data + offsets[1] + i * pitches[1], 1, dec_target->width/2, fpdump); + } + // V + for (int i = 0; i < dec_target->height; ++i) { + fwrite(data + offsets[2] + i * pitches[2], 1, dec_target->width/2, fpdump); + } + fclose(fpdump); + printf("Dumped decoded YUV to /sdcard/dec_dump.yuv\n"); + decoder.unmapData(*dec_target, maphandle); + + st = decoder.blit(*dec_target, *blit_nv12_target); + assert(st == JD_SUCCESS); + + maphandle = decoder.mapData(*blit_nv12_target, (void**) &data, offsets, pitches); + assert (maphandle.valid); + fpdump = fopen("/sdcard/nv12_dump.yuv", "wb"); + assert(fpdump); + // Y + for (int i = 0; i < blit_nv12_target->height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 1, blit_nv12_target->width, fpdump); + } + // UV + for (int i = 0; i < blit_nv12_target->height/2; ++i) { + fwrite(data + offsets[1] + i * pitches[1], 1, blit_nv12_target->width, fpdump); + } + fclose(fpdump); + printf("Dumped converted NV12 to /sdcard/nv12_dump.yuv\n"); + decoder.unmapData(*blit_nv12_target, maphandle); + + st = decoder.blit(*dec_target, *blit_yuy2_target); + assert(st == JD_SUCCESS); + maphandle = decoder.mapData(*blit_yuy2_target, (void**) &data, offsets, pitches); + assert (maphandle.valid); + fpdump = fopen("/sdcard/yuy2_dump.yuv", "wb"); + assert(fpdump); + // YUYV + for (int i = 0; i < blit_yuy2_target->height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 2, blit_yuy2_target->width, fpdump); + } + fclose(fpdump); + printf("Dumped converted YUY2 to /sdcard/yuy2_dump.yuv\n"); + decoder.unmapData(*blit_yuy2_target, maphandle); + + st = decoder.blit(*dec_target, *blit_rgba_target); + assert(st == JD_SUCCESS); + maphandle = decoder.mapData(*blit_rgba_target, (void**) &data, offsets, pitches); + assert (maphandle.valid); + fpdump = fopen("/sdcard/rgba_dump.yuv", "wb"); + assert(fpdump); + // RGBA + for (int i = 0; i < blit_rgba_target->height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 4, blit_rgba_target->width, fpdump); + } + fclose(fpdump); + printf("Dumped converted RGBA to /sdcard/rgba_dump.yuv\n"); + decoder.unmapData(*blit_rgba_target, maphandle); + + st = decoder.blit(*dec_target, *blit_yv12_target); + assert(st == JD_SUCCESS); + maphandle = decoder.mapData(*blit_yv12_target, (void**) &data, offsets, pitches); + assert (maphandle.valid); + fpdump = fopen("/sdcard/yv12_dump.yuv", "wb"); + assert(fpdump); + // YV12 + for (int i = 0; i < blit_yv12_target->height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 1, blit_yv12_target->width, fpdump); + } + for (int i = 0; i < blit_yv12_target->height/2; ++i) { + fwrite(data + offsets[1] + i * pitches[1], 1, blit_yv12_target->width/2, fpdump); + } + for (int i = 0; i < blit_yv12_target->height/2; ++i) { + fwrite(data + offsets[2] + i * pitches[2], 1, blit_yv12_target->width/2, fpdump); + } + fclose(fpdump); + printf("Dumped converted YV12 to /sdcard/yv12_dump.yuv\n"); + decoder.unmapData(*blit_yv12_target, maphandle); + + + decoder.deinit(); + + deinit_render_target(*dec_target); + deinit_render_target(*blit_nv12_target); + deinit_render_target(*blit_yuy2_target); + deinit_render_target(*blit_rgba_target); + deinit_render_target(*blit_yv12_target); + delete[] jpginfo.buf; + +} + +enum target_state +{ + TARGET_FREE, + TARGET_DECODE, + TARGET_BLIT, +}; + +struct thread_param +{ + JpegDecoder *decoder; + RenderTarget *targets; + RenderTarget *nv12_targets; + RenderTarget *yuy2_targets; + RenderTarget *imc3_targets; + size_t target_count; + target_state *states; +}; + +static Mutex state_lock; + +void read_new_frame(JpegInfo &jpginfo) +{ + memset(&jpginfo, 0, sizeof(JpegInfo)); + FILE* fp = fopen(JPGFILE, "rb"); + assert(fp); + fseek(fp, 0, SEEK_END); + jpginfo.bufsize = ftell(fp); + fseek(fp, 0, SEEK_SET); + jpginfo.buf = new uint8_t[jpginfo.bufsize]; + fread(jpginfo.buf, 1, jpginfo.bufsize, fp); + fclose(fp); +} + +static bool exit_thread = false; + +#define VPP_DECODE_BATCH + +void* decode_frame_threadproc(void* data) +{ + thread_param *param = (thread_param*) data; + JpegInfo *jpginfos = new JpegInfo[param->target_count]; + int surface_id = 0; + int blit_surface_id = (surface_id + param->target_count - 1) % param->target_count; + while(!exit_thread) { + printf("%s blit %d and decode %d\n", __FUNCTION__, blit_surface_id, surface_id); + RenderTarget& cur_target = param->targets[surface_id]; +#ifdef VPP_DECODE_BATCH + RenderTarget& blit_target = param->targets[blit_surface_id]; + RenderTarget& blit_nv12_target = param->nv12_targets[blit_surface_id]; + RenderTarget& blit_yuy2_target = param->yuy2_targets[blit_surface_id]; + if (param->states[blit_surface_id] == TARGET_BLIT) { + printf("%s blit with surface %d\n", __FUNCTION__, blit_surface_id); + nsecs_t t1 = systemTime(); + if (param->decoder->busy(blit_target)) { + param->decoder->sync(blit_target); + nsecs_t t2 = systemTime(); + printf("%s wait surface %d decode took %f ms\n", __FUNCTION__, blit_surface_id, ns2us(t2 - t1)/1000.0); + param->states[blit_surface_id] = TARGET_FREE; + } + t1 = systemTime(); + param->decoder->blit(blit_target, blit_nv12_target); + nsecs_t t2 = systemTime(); + param->decoder->blit(blit_target, blit_yuy2_target); + nsecs_t t3 = systemTime(); + printf("%s blit %d NV12 took %f ms, YUY2 took %f ms\n", + __FUNCTION__, + blit_surface_id, ns2us(t2 - t1)/1000.0, + ns2us(t3 - t2)/1000.0); + param->states[blit_surface_id] = TARGET_FREE; + } +#endif + if (param->states[surface_id] != TARGET_FREE) { + printf("%s wait surface %d blit finish\n", __FUNCTION__, surface_id); + nsecs_t t1 = systemTime(); + while (param->states[surface_id] != TARGET_FREE) { + usleep(1000); + } + nsecs_t t2 = systemTime(); + printf("%s wait surface %d for decode/blit finish took %f ms\n", __FUNCTION__, surface_id, ns2us(t2 - t1)/1000.0); + } + JpegInfo &jpginfo = jpginfos[surface_id]; + read_new_frame(jpginfo); + nsecs_t t3 = systemTime(); + param->decoder->parse(jpginfo); + nsecs_t t4 = systemTime(); + printf("%s parse surface %d took %f ms\n", __FUNCTION__, surface_id, ns2us(t4 - t3)/1000.0); + param->states[surface_id] = TARGET_DECODE; + param->decoder->decode(jpginfo, cur_target); + nsecs_t t5 = systemTime(); + printf("%s decode surface %d took %f ms\n", __FUNCTION__, surface_id, ns2us(t5 - t4)/1000.0); + param->states[surface_id] = TARGET_BLIT; + surface_id = (surface_id + 1) % param->target_count; + blit_surface_id = (blit_surface_id + 1) % param->target_count; + } + delete[] jpginfos; + return NULL; +} + +void* blit_frame_threadproc(void* data) +{ + thread_param *param = (thread_param*) data; + int surface_id = 0; + while(!exit_thread) { + printf("%s blit %d->%d\n", __FUNCTION__, surface_id, surface_id); + RenderTarget& dec_target = param->targets[surface_id]; + RenderTarget& blit_target = param->nv12_targets[surface_id]; + if (param->states[surface_id] != TARGET_BLIT) { + printf("%s wait surface %d decoding finish\n", __FUNCTION__, surface_id); + nsecs_t t1 = systemTime(); + while (param->states[surface_id] != TARGET_BLIT) { + usleep(100); + } + nsecs_t t2 = systemTime(); + printf("%s wait surface %d for decode finish took %f ms\n", __FUNCTION__, surface_id, ns2us(t2 - t1)/1000.0); + } + nsecs_t t3 = systemTime(); + param->decoder->blit(dec_target, blit_target); + nsecs_t t4 = systemTime(); + printf("%s blit surface %d took %f ms\n", __FUNCTION__, surface_id, ns2us(t4 - t3)/1000.0); + param->states[surface_id] = TARGET_FREE; + surface_id = (surface_id + 1) % param->target_count; + } + return NULL; +} + +void parallel_decode_blit_test() +{ + RenderTarget **all_targets = new RenderTarget*[12]; + RenderTarget dec_targets[12]; + RenderTarget nv12_targets[12]; + RenderTarget yuy2_targets[12]; + RenderTarget imc3_targets[12]; + JpegInfo jpginfos[12]; + target_state states[12]; + for (int i = 0; i < 12; ++i) { + init_render_target(dec_targets[i], 1280, 720, fourcc2PixelFormat(VA_FOURCC_422H)); // 422H + init_render_target(nv12_targets[i], 1280, 720, fourcc2PixelFormat(VA_FOURCC_NV12)); // NV12 for video encode + init_render_target(yuy2_targets[i], 1280, 720, fourcc2PixelFormat(VA_FOURCC_YUY2)); // YUY2 for overlay + //init_render_target(imc3_targets[i], 1280, 720, HAL_PIXEL_FORMAT_IMC3); // IMC3 for libjpeg encode + jpginfos[i].buf = new uint8_t[2 * 1024 * 1024]; + all_targets[i] = &dec_targets[i]; + //all_targets[i + 12] = &nv12_targets[i]; + //all_targets[i + 24] = &yuy2_targets[i]; + //all_targets[i + 36] = &imc3_targets[i]; + states[i] = TARGET_FREE; + } + + exit_thread = false; + + pthread_attr_t dec_attr, blit_attr; + pthread_attr_init(&dec_attr); + pthread_attr_init(&blit_attr); + pthread_attr_setdetachstate(&dec_attr, PTHREAD_CREATE_JOINABLE); + pthread_attr_setdetachstate(&blit_attr, PTHREAD_CREATE_JOINABLE); + pthread_t dec_thread, blit_thread; + thread_param param; + param.nv12_targets = nv12_targets; + param.yuy2_targets = yuy2_targets; + param.imc3_targets = imc3_targets; + param.targets = dec_targets; + param.target_count = 12; + param.decoder = new JpegDecoder(); + //param.decoder->init(1280, 720, all_targets, 36); + param.decoder->init(1280, 720, all_targets, 12); + param.states = states; + pthread_create(&dec_thread, &dec_attr, decode_frame_threadproc, (void*)¶m); +#ifndef VPP_DECODE_BATCH + pthread_create(&blit_thread, &blit_attr, blit_frame_threadproc, (void*)¶m); +#endif + pthread_attr_destroy(&blit_attr); + pthread_attr_destroy(&dec_attr); + + // test for 1 minute + usleep(60 * 1000 * 1000); + exit_thread = true; + void *dummy; + pthread_join(dec_thread, &dummy); +#ifndef VPP_DECODE_BATCH + pthread_join(blit_thread, &dummy); +#endif + + for (int i = 0; i < 12; ++i) { + delete[] jpginfos[i].buf; + deinit_render_target(dec_targets[i]); + deinit_render_target(nv12_targets[i]); + deinit_render_target(yuy2_targets[i]); + //deinit_render_target(imc3_targets[i]); + } + delete[] all_targets; +} + +int main(int argc, char ** argv) +{ + //decode_blit_functionality_test(); + parallel_decode_blit_test(); + return 0; +} -- cgit v1.2.3 From 5b0a2f8c19d4fbfdbdf2baca5d1201aa06c61b40 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 26 Aug 2013 21:15:41 +0800 Subject: LibMIX vbp parser code re-structure BZ: 131068 The LibMIX vbp parser codes are re-structured and legacy codes are removded. Change-Id: I8216a21f39f29bce7ac5f6aaa25e164806e8f012 Signed-off-by: wfeng6 Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/130377 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/Android.mk | 13 + mixvbp/include/vbp_trace.h | 66 + mixvbp/include/viddec_debug.h | 31 + mixvbp/include/viddec_fw_common_defs.h | 223 ++ mixvbp/include/viddec_fw_decoder_host.h | 242 ++ mixvbp/include/viddec_fw_frame_attr.h | 294 ++ mixvbp/include/viddec_fw_item_types.h | 784 ++++ mixvbp/include/viddec_fw_parser_host.h | 237 ++ mixvbp/include/viddec_fw_workload.h | 152 + mixvbp/vbp_manager/Android.mk | 62 + mixvbp/vbp_manager/include/vbp_common.h | 9 + mixvbp/vbp_manager/include/viddec_parser_ops.h | 121 + mixvbp/vbp_manager/include/viddec_pm.h | 93 + mixvbp/vbp_manager/include/viddec_pm_parse.h | 24 + .../vbp_manager/include/viddec_pm_utils_bstream.h | 88 + mixvbp/vbp_manager/include/viddec_pm_utils_list.h | 50 + .../secvideo/baytrail/vbp_h264secure_parser.c | 1830 +++++++++ .../secvideo/baytrail/vbp_h264secure_parser.h | 70 + mixvbp/vbp_manager/vbp_h264_parser.c | 1751 ++++++++ mixvbp/vbp_manager/vbp_h264_parser.h | 67 + mixvbp/vbp_manager/vbp_loader.c | 205 + mixvbp/vbp_manager/vbp_loader.h | 476 +++ mixvbp/vbp_manager/vbp_mp42_parser.c | 1483 +++++++ mixvbp/vbp_manager/vbp_mp42_parser.h | 66 + mixvbp/vbp_manager/vbp_utils.c | 618 +++ mixvbp/vbp_manager/vbp_utils.h | 140 + mixvbp/vbp_manager/vbp_vc1_parser.c | 1126 ++++++ mixvbp/vbp_manager/vbp_vc1_parser.h | 70 + mixvbp/vbp_manager/vbp_vp8_parser.c | 532 +++ mixvbp/vbp_manager/vbp_vp8_parser.h | 67 + mixvbp/vbp_manager/viddec_parse_sc.c | 218 + mixvbp/vbp_manager/viddec_pm_parser_ops.c | 97 + mixvbp/vbp_manager/viddec_pm_utils_bstream.c | 500 +++ mixvbp/vbp_plugin/common/README | 1 + mixvbp/vbp_plugin/h264/Android.mk | 66 + mixvbp/vbp_plugin/h264/h264parse.c | 795 ++++ mixvbp/vbp_plugin/h264/h264parse_bsd.c | 228 ++ mixvbp/vbp_plugin/h264/h264parse_dpb.c | 4222 ++++++++++++++++++++ mixvbp/vbp_plugin/h264/h264parse_math.c | 84 + mixvbp/vbp_plugin/h264/h264parse_mem.c | 198 + mixvbp/vbp_plugin/h264/h264parse_pps.c | 194 + mixvbp/vbp_plugin/h264/h264parse_sei.c | 1138 ++++++ mixvbp/vbp_plugin/h264/h264parse_sh.c | 837 ++++ mixvbp/vbp_plugin/h264/h264parse_sps.c | 691 ++++ mixvbp/vbp_plugin/h264/include/h264.h | 1118 ++++++ mixvbp/vbp_plugin/h264/include/h264parse.h | 179 + mixvbp/vbp_plugin/h264/include/h264parse_dpb.h | 109 + mixvbp/vbp_plugin/h264/include/h264parse_sei.h | 314 ++ mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c | 574 +++ .../secvideo/baytrail/viddec_h264secure_parse.c | 802 ++++ mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 607 +++ mixvbp/vbp_plugin/h264/viddec_h264_workload.c | 1195 ++++++ mixvbp/vbp_plugin/mp2/include/mpeg2.h | 195 + mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h | 231 ++ mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c | 32 + mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c | 121 + mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c | 1039 +++++ mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c | 380 ++ mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c | 461 +++ mixvbp/vbp_plugin/mp4/Android.mk | 28 + mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h | 231 ++ mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c | 377 ++ .../mp4/viddec_mp4_decodevideoobjectplane.c | 98 + .../mp4/viddec_mp4_decodevideoobjectplane.h | 10 + mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c | 191 + mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h | 529 +++ mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c | 367 ++ mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h | 10 + .../vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c | 629 +++ .../vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h | 16 + .../vbp_plugin/mp4/viddec_mp4_videoobjectplane.c | 423 ++ .../vbp_plugin/mp4/viddec_mp4_videoobjectplane.h | 10 + mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c | 290 ++ mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h | 12 + mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c | 151 + mixvbp/vbp_plugin/vc1/Android.mk | 36 + mixvbp/vbp_plugin/vc1/include/vc1common.h | 143 + mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c | 30 + mixvbp/vbp_plugin/vc1/vc1.h | 236 ++ mixvbp/vbp_plugin/vc1/vc1parse.c | 604 +++ mixvbp/vbp_plugin/vc1/vc1parse.h | 140 + mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c | 716 ++++ mixvbp/vbp_plugin/vc1/vc1parse_bpic.c | 99 + mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c | 256 ++ mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h | 645 +++ mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c | 198 + mixvbp/vbp_plugin/vc1/vc1parse_huffman.c | 97 + mixvbp/vbp_plugin/vc1/vc1parse_ipic.c | 101 + mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c | 256 ++ mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c | 82 + mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c | 101 + mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c | 404 ++ mixvbp/vbp_plugin/vc1/vc1parse_ppic.c | 148 + mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c | 367 ++ mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c | 130 + mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c | 401 ++ mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c | 960 +++++ mixvbp/vbp_plugin/vp8/Android.mk | 24 + mixvbp/vbp_plugin/vp8/bool_coder.c | 95 + mixvbp/vbp_plugin/vp8/include/bool_coder.h | 54 + mixvbp/vbp_plugin/vp8/include/vp8.h | 356 ++ mixvbp/vbp_plugin/vp8/include/vp8_tables.h | 538 +++ mixvbp/vbp_plugin/vp8/include/vp8parse.h | 72 + mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c | 119 + mixvbp/vbp_plugin/vp8/vp8parse.c | 605 +++ 105 files changed, 39001 insertions(+) create mode 100644 mixvbp/Android.mk create mode 100755 mixvbp/include/vbp_trace.h create mode 100755 mixvbp/include/viddec_debug.h create mode 100644 mixvbp/include/viddec_fw_common_defs.h create mode 100644 mixvbp/include/viddec_fw_decoder_host.h create mode 100644 mixvbp/include/viddec_fw_frame_attr.h create mode 100644 mixvbp/include/viddec_fw_item_types.h create mode 100644 mixvbp/include/viddec_fw_parser_host.h create mode 100644 mixvbp/include/viddec_fw_workload.h create mode 100755 mixvbp/vbp_manager/Android.mk create mode 100755 mixvbp/vbp_manager/include/vbp_common.h create mode 100755 mixvbp/vbp_manager/include/viddec_parser_ops.h create mode 100755 mixvbp/vbp_manager/include/viddec_pm.h create mode 100755 mixvbp/vbp_manager/include/viddec_pm_parse.h create mode 100755 mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h create mode 100755 mixvbp/vbp_manager/include/viddec_pm_utils_list.h create mode 100644 mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c create mode 100644 mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h create mode 100755 mixvbp/vbp_manager/vbp_h264_parser.c create mode 100755 mixvbp/vbp_manager/vbp_h264_parser.h create mode 100755 mixvbp/vbp_manager/vbp_loader.c create mode 100755 mixvbp/vbp_manager/vbp_loader.h create mode 100755 mixvbp/vbp_manager/vbp_mp42_parser.c create mode 100755 mixvbp/vbp_manager/vbp_mp42_parser.h create mode 100755 mixvbp/vbp_manager/vbp_utils.c create mode 100755 mixvbp/vbp_manager/vbp_utils.h create mode 100755 mixvbp/vbp_manager/vbp_vc1_parser.c create mode 100755 mixvbp/vbp_manager/vbp_vc1_parser.h create mode 100755 mixvbp/vbp_manager/vbp_vp8_parser.c create mode 100755 mixvbp/vbp_manager/vbp_vp8_parser.h create mode 100755 mixvbp/vbp_manager/viddec_parse_sc.c create mode 100755 mixvbp/vbp_manager/viddec_pm_parser_ops.c create mode 100755 mixvbp/vbp_manager/viddec_pm_utils_bstream.c create mode 100644 mixvbp/vbp_plugin/common/README create mode 100755 mixvbp/vbp_plugin/h264/Android.mk create mode 100755 mixvbp/vbp_plugin/h264/h264parse.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_bsd.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_dpb.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_math.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_mem.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_pps.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_sei.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_sh.c create mode 100755 mixvbp/vbp_plugin/h264/h264parse_sps.c create mode 100755 mixvbp/vbp_plugin/h264/include/h264.h create mode 100755 mixvbp/vbp_plugin/h264/include/h264parse.h create mode 100755 mixvbp/vbp_plugin/h264/include/h264parse_dpb.h create mode 100755 mixvbp/vbp_plugin/h264/include/h264parse_sei.h create mode 100755 mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c create mode 100755 mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c create mode 100755 mixvbp/vbp_plugin/h264/viddec_h264_parse.c create mode 100755 mixvbp/vbp_plugin/h264/viddec_h264_workload.c create mode 100755 mixvbp/vbp_plugin/mp2/include/mpeg2.h create mode 100755 mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h create mode 100755 mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c create mode 100755 mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c create mode 100755 mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c create mode 100755 mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c create mode 100755 mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c create mode 100755 mixvbp/vbp_plugin/mp4/Android.mk create mode 100755 mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h create mode 100755 mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.c create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.h create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.h create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c create mode 100755 mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h create mode 100755 mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c create mode 100755 mixvbp/vbp_plugin/vc1/Android.mk create mode 100755 mixvbp/vbp_plugin/vc1/include/vc1common.h create mode 100755 mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1.h create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse.h create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_bpic.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_huffman.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_ipic.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_ppic.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c create mode 100755 mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c create mode 100755 mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c create mode 100755 mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c create mode 100755 mixvbp/vbp_plugin/vp8/Android.mk create mode 100755 mixvbp/vbp_plugin/vp8/bool_coder.c create mode 100755 mixvbp/vbp_plugin/vp8/include/bool_coder.h create mode 100755 mixvbp/vbp_plugin/vp8/include/vp8.h create mode 100755 mixvbp/vbp_plugin/vp8/include/vp8_tables.h create mode 100755 mixvbp/vbp_plugin/vp8/include/vp8parse.h create mode 100755 mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c create mode 100755 mixvbp/vbp_plugin/vp8/vp8parse.c diff --git a/mixvbp/Android.mk b/mixvbp/Android.mk new file mode 100644 index 0000000..01ddde2 --- /dev/null +++ b/mixvbp/Android.mk @@ -0,0 +1,13 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +MIXVBP_DIR := $(LOCAL_PATH) + +include $(MIXVBP_DIR)/vbp_manager/Android.mk +include $(MIXVBP_DIR)/vbp_plugin/h264/Android.mk +include $(MIXVBP_DIR)/vbp_plugin/mp4/Android.mk +include $(MIXVBP_DIR)/vbp_plugin/vc1/Android.mk + +ifeq ($(USE_HW_VP8),true) +include $(MIXVBP_DIR)/vbp_plugin/vp8/Android.mk +endif diff --git a/mixvbp/include/vbp_trace.h b/mixvbp/include/vbp_trace.h new file mode 100755 index 0000000..fde232c --- /dev/null +++ b/mixvbp/include/vbp_trace.h @@ -0,0 +1,66 @@ +/* + INTEL CONFIDENTIAL + Copyright 2009 Intel Corporation All Rights Reserved. + The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission. + + No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing. + */ + + +#ifndef VBP_TRACE_H_ +#define VBP_TRACE_H_ + + + +#define VBP_TRACE + + +#ifdef VBP_TRACE /* if VBP_TRACE is defined*/ + +#ifndef ANDROID + +#include +#include + +extern void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...); +#define VBP_TRACE_UTIL(cat, format, ...) \ +vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) + + +#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR: ", format, ##__VA_ARGS__) +#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ", format, ##__VA_ARGS__) +#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__) +#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__) + + +#else + +// For Android OS + +#define LOG_NDEBUG 0 + +#define LOG_TAG "MixVBP" + +#include +#define ETRACE(...) LOGE(__VA_ARGS__) +#define WTRACE(...) LOGW(__VA_ARGS__) +#define ITRACE(...) LOGI(__VA_ARGS__) +#define VTRACE(...) LOGV(__VA_ARGS__) + +#endif + + +#else /* if VBP_TRACE is not defined */ + +#define ETRACE(format, ...) +#define WTRACE(format, ...) +#define ITRACE(format, ...) +#define VTRACE(format, ...) + + +#endif /* VBP_TRACE*/ + + +#endif /*VBP_TRACE_H_*/ + + diff --git a/mixvbp/include/viddec_debug.h b/mixvbp/include/viddec_debug.h new file mode 100755 index 0000000..fcae102 --- /dev/null +++ b/mixvbp/include/viddec_debug.h @@ -0,0 +1,31 @@ +#ifndef VIDDEC_DEBUG_H +#define VIDDEC_DEBUG_H + +#ifndef VBP + +#ifdef HOST_ONLY +#include +#include +#define DEB OS_PRINT +#define FWTRACE OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ ); +// #define DEB(format, args...) +// #define FWTRACE +#define DEB_FNAME(format, args...) OS_PRINT("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) +#define CDEB(a, format, args...) if(a != 0) {DEB(format, ##args);} +#else +#define DEB(format, args...) +#define FWTRACE +#define CDEB(a, format, args...) +#define DEB_FNAME(format, args...) +#endif + +#else // VBP is defined + +#define DEB(format, args...) +#define FWTRACE +#define CDEB(a, format, args...) +#define DEB_FNAME(format, args...) + +#endif // end of VBP + +#endif diff --git a/mixvbp/include/viddec_fw_common_defs.h b/mixvbp/include/viddec_fw_common_defs.h new file mode 100644 index 0000000..2cc32b7 --- /dev/null +++ b/mixvbp/include/viddec_fw_common_defs.h @@ -0,0 +1,223 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_COMMON_DEFS_H +#define VIDDEC_FW_COMMON_DEFS_H + +#define VIDDEC_FW_PARSER_IPC_HOST_INT 0x87654321 +#define EMITTER_WORKLOAD_ENTRIES 2048 + +/* Maximum supported dependent views for H264 MVC. Based on spec this can be 1023 */ +#define MVC_MAX_SUPPORTED_VIEWS 1 + +/* This enum defines priority level for opening a stream */ +enum viddec_stream_priority +{ + viddec_stream_priority_BACKGROUND, /* Lowest priority stream */ + viddec_stream_priority_REALTIME, /* Real time highest priority stream */ + viddec_stream_priority_INVALID, +}; + +/* This enum defines supported flush types */ +enum viddec_stream_flushtype +{ + VIDDEC_STREAM_FLUSH_DISCARD, /* Reinitialise to start state */ + VIDDEC_STREAM_FLUSH_PRESERVE, /* Reinitialise to start state by preserving sequence info*/ +}; + +enum viddec_stream_inband_flags +{ + VIDDEC_STREAM_DEFAULT_FLAG=0, /* Default value for flags */ + VIDDEC_STREAM_EOS, /* End of stream message */ + VIDDEC_STREAM_DISCONTINUITY, /* new segment which forces flush and preserve */ +}; + +/* Message descriptor for Parser's Input and output queues. needs to be 8 byte aligned */ +typedef struct viddec_input_buffer +{ + unsigned int flags; /* Flags for Inband messages like EOS, valid range defined in viddec_stream_inband_flags */ + unsigned int phys;/* DDR addr of where ES/WKLD is at. */ + unsigned int len;/* size of buffer at phys_addr */ + unsigned int id;/* A id for the buffer which is not used or modified by the FW. */ +#ifdef HOST_ONLY + unsigned char *buf; /* virt pointer to buffer. This is a don't care for FW */ +#endif +} ipc_msg_data; + +typedef ipc_msg_data viddec_input_buffer_t; +typedef ipc_msg_data viddec_ipc_msg_data; + +/* Return types for interface functions */ +typedef enum +{ + VIDDEC_FW_SUCCESS, /* succesful with current operation */ + VIDDEC_FW_NORESOURCES, /* No resources to execute the requested functionality */ + VIDDEC_FW_FAILURE, /* Failed for Uknown reason */ + VIDDEC_FW_INVALID_PARAM, /* The parameters that were passed are Invalid */ + VIDDEC_FW_PORT_FULL, /* The operation failed since queue is full */ + VIDDEC_FW_PORT_EMPTY, /* The operation failed since queue is empty */ + VIDDEC_FW_NEED_FREE_WKLD, /* The operation failed since a free wkld is not available */ +} viddec_fw_return_types_t; + +/* Defines for Interrupt mask and status */ +typedef enum +{ + VIDDEC_FW_WKLD_DATA_AVAIL=1, /* A processed workload is available */ + VIDDEC_FW_INPUT_WATERMARK_REACHED=2, /* The input path is below the set watermark for current stream */ +} viddec_fw_parser_int_status_t; + +/* Defines for attributes on stream, If not set explicitly will be default values */ +typedef enum +{ + VIDDEC_FW_INPUT_Q_WATERMARK, /* Define for setting Input queue watermarks */ + VIDDEC_FW_STREAM_PRIORITY, /* Define for setting stream priority */ +} viddec_fw_stream_attributes_t; + +typedef struct +{ + unsigned int input_q_space; /* Num of messages that can be written to input queue */ + unsigned int output_q_data; /* Num of messages in output queue */ + unsigned int workload_q_status; /* Number of free wklds available to parser */ +} viddec_fw_q_status_t; + +typedef struct +{ + unsigned int to_fw_q_space; /* Num of messages that can be written to input queue */ + unsigned int from_fw_q_data; /* Num of messages in output queue */ +} viddec_fw_decoder_q_status_t; + +enum viddec_fw_decoder_int_status +{ + VIDDEC_FW_DECODER_INT_STATUS_STREAM_0 = (1<< 0), /* Decoder Stream 0 Requires Service */ + VIDDEC_FW_DECODER_INT_STATUS_STREAM_1 = (1<< 1), /* Decoder Stream 1 Requires Service */ + VIDDEC_FW_DECODER_INT_STATUS_STREAM_2 = (1<< 2), /* Decoder Stream 2 Requires Service */ + + + VIDDEC_FW_DECODER_INT_STATUS_STREAM_HIGH = (1<<30), /* Any Decoder Stream >= 30 Requires Service */ + VIDDEC_FW_DECODER_INT_STATUS_AUTO_API = (1<<31) /* An Auto-API Function has completed */ +}; + +/** Hardware Accelerated stream formats */ +typedef enum viddec_stream_format +{ + MFD_STREAM_FORMAT_MPEG=1, + MFD_STREAM_FORMAT_H264, + MFD_STREAM_FORMAT_VC1, + MFD_STREAM_FORMAT_MPEG42, + + MFD_STREAM_FORMAT_MAX, /* must be last */ + MFD_STREAM_FORMAT_INVALID +} viddec_stream_format; + +/* Workload specific error codes */ +enum viddec_fw_workload_error_codes +{ + VIDDEC_FW_WORKLOAD_SUCCESS = 0, + VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE = (1 << 0),/* Parser/Decoder detected a non decodable error with this workload */ + VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW = (1 << 1),/* Parser Detected more than 64 buffers between two start codes */ + VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW = (1 << 2),/* Parser Detected overflow of currently allocated workload memory */ + VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME = (1 << 3),/* This is impartial or empty frame which was flushed by Parser/Decoder */ + VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM = (1 << 4),/* This is impartial or empty frame from Parser/Decoder */ + VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED = (1 << 5),/* Parser Detected unsupported feature in the stream */ + /* First 8 bits reserved for Non Decodable errors */ + VIDDEC_FW_WORKLOAD_ERR_CONCEALED = (1 << 9),/* The decoder concealed some errors in this frame */ + VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE = (1 << 10),/* Deocder/parser detected at least one of the required reference frames is missing */ + VIDDEC_FW_WORKLOAD_ERR_IN_REFERENCE = (1 << 11),/* Deocder/parser detected at least one of the reference frames has errors in it */ + VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD = (1 << 12),/* Parser detected at least one of the fields are missing */ + VIDDEC_FW_WORKLOAD_ERR_PARTIAL_SLICE = (1 << 13),/* Deocder detected at least one of the fields are missing */ + VIDDEC_FW_WORKLOAD_ERR_MACROBLOCK = (1 << 14),/* Deocder detected macroblock errors */ + VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO = (1 << 16),/* Parser detected sequence information is missing */ + + VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17),/* Decoder/Parser detected errors in "top field" or "frame"*/ + VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18),/* Decoder/Parser detected errors in "bottom field" or "frame" */ + VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR = (1 << 19),/* Parser detected errors */ + +}; + +enum viddec_fw_mpeg2_error_codes +{ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR = (1 << 24),/* Parser detected corruption in sequence header. Will use the previous good sequence info, if found. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT = (1 << 25),/* Parser detected corruption in seqeunce extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT = (1 << 26),/* Parser detected corruption in sequence display extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR = (1 << 27),/* Parser detected corruption in GOP header. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR = (1 << 26),/* Parser detected corruption in picture header. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT = (1 << 27),/* Parser detected corruption in picture coding extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT = (1 << 28),/* Parser detected corruption in picture display extension. */ + VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT = (1 << 29),/* Parser detected corruption in quantization matrix extension. */ +}; + +#ifdef VBP + +#ifndef NULL +#define NULL (void*)0x0 +#endif + +#ifndef true +#define true 1 +#define false 0 +#endif + +#ifndef __cplusplus +#ifndef bool +typedef int bool; +#endif +#endif + +#endif +/* end of #ifdef VBP */ + +#endif diff --git a/mixvbp/include/viddec_fw_decoder_host.h b/mixvbp/include/viddec_fw_decoder_host.h new file mode 100644 index 0000000..d902520 --- /dev/null +++ b/mixvbp/include/viddec_fw_decoder_host.h @@ -0,0 +1,242 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ + +#ifndef VIDDEC_FW_DECODER_HOST_H +#define VIDDEC_FW_DECODER_HOST_H + +#ifdef __cplusplus +extern "C" { +#endif + +#include "viddec_fw_common_defs.h" + + /** @weakgroup viddec Fw Decoder interface Functions */ + /** @ingroup viddec_fw_decoder */ + /*@{*/ + + /** + This function returns the size required for loading fw. + @retval size : Required size. + */ + uint32_t viddec_fw_decoder_query_fwsize(void); + + /** + This function loads Decoder Firmware and initialises necessary state information. + @param[in] phys : Physical address on where firmware should be loaded. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len); + + /** + This function returns required size for global memory for all supported decoders. This is a synchronous message to FW. + @param[out] size : returns the size required. + @retval VIDDEC_FW_SUCCESS : Successfuly got required information from FW. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + */ + uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size); + + /** + This function sets global memory for the firmware to use.This is a synchronous message to FW. + @param[in] phys : Physical address on where global memory starts. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully setup global memory. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + */ + uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len); + + /** + This function returns the size required opening a stream. This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want information about. + @param[out] size : Size of memory required for opening a stream. + @retval VIDDEC_FW_SUCCESS : Successfuly talked to FW and got required size. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + */ + uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size); + + /** + This function opens requested codec.This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want to open. + @param[in] phys : Physical address of allocated memory for this codec. + @param[in] prority : Priority of stream. 1 for realtime and 0 for background. + @param[out] strm_handle : Handle of the opened stream. + @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. + @retval VIDDEC_FW_FAILURE : Failed to Open a stream. + */ + uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); + + + /** + This function closes stream.This a synchronous message to FW. + @param[in] strm_handle : Handle of the stream to close. + */ + void viddec_fw_decoder_closestream(uint32_t strm_handle); + + /** + This function allows to get current status of the decoder workload queues. If the current stream is active we return + number of input messages that can be written to input queue and the number of messages in output queue of the stream. + + Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT + Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is + written into output queue of a stream. + @param[in] strm_handle : The handle of stream that we want to get status of queues. + @param[out] status : The status of each queue gets updated in here. + @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. + @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. + */ + uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status); + + /** + This function flushes the current stream. This is a synchronous message to FW. + Before calling this function the host has to make sure the output queue of the firmware + is empty. After this function is executed the FW will read all entries in input + wkld buffer queue into output queue. After this operation the host has to read all entries + in output queue again to finish the flush operation. + @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. + @param[in] strm_handle : Handle of the stream we want to flush. + @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. + @retval VIDDEC_FW_FAILURE : Failed to flush a stream. + */ + uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type); + + /** + This function sends an input workload buffer. The host should provide required frame buffers in this workload before + sending it to fw. + @param[in] strm_handle : The handle of stream that we want to send workload buffer to. + @param[in] cur_wkld : The workload buffer we want to send. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. + */ + uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld); + + /** + This function gets the decoded workload from fw. + @param[in] strm_handle : The handle of stream that we want to read workload from. + @param[out] cur_wkld : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. + */ + uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld); + + /** + This function unloads Decoder Firmware and free's the resources allocated in Load fw. + If this function is called before load fw it will crash with a segmentation fault. + */ + void viddec_fw_decoder_deinit(void); + + /** + This function gets the major and minor revison numbers of the loaded firmware. + @param[out] major : The major revision number. + @param[out] minor : The minor revision number. + @param[out] build : The Internal Build number. + */ + void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); + + /** + This function returns the interrupt status of all streams which need to be processed. A value of zero + means no active streams which generated this interrupt. + */ + uint32_t viddec_fw_decoder_active_pending_interrupts(void); + + /** + This function clears the interrupts for all active streams represented by status input parameter. + The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts(). + @param[in] status : The status value that was returned by viddec_fw_decoder_active_pending_interrupts(). + */ + void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status); + + /** + This function enables/disables interrupt for the stream specified. + @param[in] strm_handle : The handle of stream that we want enable or disable interrupts for. + @param[in] enable : Boolean value if ==0 means disable Interrupts else enable. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed. + */ + uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable); + + /** + This function returns which stream interrupted in the past based on status, which is a snapshot of + interrupt status that was cleared in the past. The host has to call clear with status information + before calling this function again with status value. The Host should do this operation until this function + returns 0, which means all the streams that generated interrupt have been processed. + @param[out]strm_handle : The handle of a stream that generated interrupt. + @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). + @retval 1 : A valid stream handle was found. + @retval 0 : No more streams from the status which caused interrupt. + */ + uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle); + + /** + This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(), + This should be called after host performs all necessary actions for the stream. + @param[in] strm_handle : The handle of a stream that we want to clear to indicate we handled it. + @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). + @retval 1 : Operation was sucessful. + @retval 0 : Invalid stream handle was passed. + */ + uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle); + + /*@}*/ +#ifdef __cplusplus +} +#endif + +#endif//#ifndef VIDDEC_FW_DECODER_HOST_H diff --git a/mixvbp/include/viddec_fw_frame_attr.h b/mixvbp/include/viddec_fw_frame_attr.h new file mode 100644 index 0000000..4f4b479 --- /dev/null +++ b/mixvbp/include/viddec_fw_frame_attr.h @@ -0,0 +1,294 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_FRAME_ATTR_H +#define VIDDEC_FW_FRAME_ATTR_H + +#include "viddec_fw_item_types.h" + +#define VIDDEC_PANSCAN_MAX_OFFSETS 4 +#define VIDDEC_MAX_CPB_CNT 32 + +/** +This enumeration lists all the frame types defined by the MPEG, VC1 and H264 specifications. +Frame types applicable to a single codec are specified in the comments. +*/ +typedef enum +{ + VIDDEC_FRAME_TYPE_INVALID=0, /** Unknown type - default value */ + VIDDEC_FRAME_TYPE_IDR=0x1, /** IDR frame - h264 only */ + VIDDEC_FRAME_TYPE_I=0x2, /** I frame */ + VIDDEC_FRAME_TYPE_P=0x3, /** P frame */ + VIDDEC_FRAME_TYPE_B=0x4, /** B frame */ + VIDDEC_FRAME_TYPE_BI=0x5, /** BI frame - Intracoded B frame - vc1 only */ + VIDDEC_FRAME_TYPE_SKIP=0x6, /** Skipped frame - vc1 only */ + VIDDEC_FRAME_TYPE_D=0x7, /** D frame - mpeg1 only */ + VIDDEC_FRAME_TYPE_S=0x8, /** SVOP frame - mpeg4 only - sprite encoded frame - treat as P */ + VIDDEC_FRAME_TYPE_MAX, +} viddec_frame_type_t; + +/** +This structure contains the content size info extracted from the stream. +*/ +typedef struct viddec_rect_size +{ + unsigned int width; + unsigned int height; +} viddec_rect_size_t; + +/** +This structure contains MPEG2 specific pan scan offsets extracted from the stream. +*/ +typedef struct viddec_mpeg2_frame_center_offset +{ + int horz; + int vert; +} viddec_mpeg2_frame_center_offset_t; + +/** +This structure contains the MPEG2 specific frame attributes. +*/ +typedef struct viddec_mpeg2_frame_attributes +{ + /** + 10 bit unsigned integer corresponding to the display order of each coded picture + in the stream (or gop if gop header is present). + Refer to "temporal_reference" of the picture header in ITU-T H.262 Specification. + */ + unsigned int temporal_ref; + + /** + Pan/Scan rectangle info + Refer to the picture display extension in ITU-T H.262 Specification. + */ + viddec_mpeg2_frame_center_offset_t frame_center_offset[VIDDEC_PANSCAN_MAX_OFFSETS]; + unsigned int number_of_frame_center_offsets; + + /** + Top-Field first flag + Refer to "top_field_first" of the picture coding extension in ITU-T H.262 Specification. + */ + unsigned int top_field_first; + + /** + Progressive frame flag - Indicates if current frame is progressive or not. + Refer to "progressive_frame" of the picture coding extension in ITU-T H.262 Specification. + */ + unsigned int progressive_frame; + + /** + Frame/field polarity for each coded picture. + Refer to Table 6-14 in ITU-T H.262 Specification. + */ + unsigned int picture_struct; + + /** + Repeat field/frame flag. + Refer to "repeat_first_field" of the picture coding extension in ITU-T H.262 Specification. + */ + unsigned int repeat_first_field; + + +} viddec_mpeg2_frame_attributes_t; + +/** +This structure contains MPEG2 specific pan scan offsets extracted from the stream. +*/ +typedef struct viddec_vc1_pan_scan_window +{ + unsigned int hoffset; + unsigned int voffset; + unsigned int width; + unsigned int height; +} viddec_vc1_pan_scan_window_t; + +/** +This structure contains the VC1 specific frame attributes. +*/ +typedef struct viddec_vc1_frame_attributes +{ + /** + Temporal Reference of frame/field. + Refer to "TFCNTR" in the picture layer of the SMPTE VC1 Specification. + */ + unsigned int tfcntr; + + /** + Frame/field repeat information in the bitstream. + Refer to "RPTFRM", "TFF", "BFF" in the picture layer + of the SMPTE VC1 Specification. + */ + unsigned int rptfrm; + unsigned int tff; + unsigned int rff; + + /** + Pan-scan information in the bitstream. + Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET", + "PS_WIDTH" and "PS_HEIGHT" in the picture layer of the SMPTE VC1 Specification. + */ + unsigned int panscan_flag; + unsigned int ps_present; + unsigned int num_of_pan_scan_windows; + viddec_vc1_pan_scan_window_t pan_scan_window[VIDDEC_PANSCAN_MAX_OFFSETS]; + +} viddec_vc1_frame_attributes_t; + +/** +This structure contains the H264 specific frame attributes. +*/ +typedef struct viddec_h264_frame_attributes +{ + /** + used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame + */ + unsigned int used_for_reference; + /** + Picture Order Count for the current frame/field. + This value is computed using information from the bitstream. + Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification. + These fileds will be supported in future + */ + int top_field_poc; + int bottom_field_poc; + + /** + Display size, which is cropped from content size. + Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed + */ + viddec_rect_size_t cropped_size; + + /** + top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1 + */ + unsigned int top_field_first; + + /** + field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1 + */ + unsigned int field_pic_flag; + + /** + This data type holds view specific information of current frame. + The following information is packed into this data type: + view_id(0-9 bits): Assigned 10 bit value in the encoded stream. + priority_id(10-15bits): Assigned 6 bit priority id. + is_base_view(16th bit): Flag on true indicates current frame belongs to base view, else dependent view. + */ +#define viddec_fw_h264_mvc_get_view_id(x) viddec_fw_bitfields_extract( (x)->view_spcific_info, 0, 0x3FF) +#define viddec_fw_h264_mvc_set_view_id(x, val) viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 0, 0x3FF) +#define viddec_fw_h264_mvc_get_priority_id(x) viddec_fw_bitfields_extract( (x)->view_spcific_info, 10, 0x3F) +#define viddec_fw_h264_mvc_set_priority_id(x, val) viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 10, 0x3F) +#define viddec_fw_h264_mvc_get_is_base_view(x) viddec_fw_bitfields_extract( (x)->view_spcific_info, 16, 0x1) +#define viddec_fw_h264_mvc_set_is_base_view(x, val) viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 16, 0x1) + unsigned int view_spcific_info; +} viddec_h264_frame_attributes_t; + +/** +This structure contains the MPEG4 specific frame attributes. +*/ +typedef struct viddec_mpeg4_frame_attributes +{ + /** + Top-Field first flag + Refer to "top_field_first" of the Video Object Plane of the MPEG4 Spec. + */ + unsigned int top_field_first; + +} viddec_mpeg4_frame_attributes_t; + +/** +This structure groups all the frame attributes that are exported by the firmware. +The frame attributes are split into attributes that are common to all codecs and +that are specific to codec type. +*/ +typedef struct viddec_frame_attributes +{ + /** + Content size specified in the stream. + For MPEG2, refer to "horizontal_size_value, vertical_size_value" of the sequence header and + "horizontal_size_extension, vertical_size_extension" of the sequence extension in ITU-T H.262 Specification. + For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the + sequence parameter set in ITU-T H.264 Specification. + For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer, + "CODED_SIZE_FLAG", "CODED_WIDTH" and "CODED_HEIGHT" in the entrypoint layer of the SMPTE VC1 Specification. + */ + viddec_rect_size_t cont_size; + + /** + Type of frame populated in the workload. + frame_type contains the frame type for progressive frame and the field type for the top field for interlaced frames. + bottom_field_type contains the field type for the bottom field for interlaced frames. + For MPEG2, refer to "picture_coding_type" in picture header (Table 6-12) in ITU-T H.262 Specification. + For H264, refer to "slice_type" in slice header (Table 7-6) in ITU-T H.264 Specification. + For VC1, refer to "PTYPE" and FPTYPE in the picture layer (Tables 33, 34, 35, 105) in SMPTE VC1 Specification. + */ + viddec_frame_type_t frame_type; + viddec_frame_type_t bottom_field_type; + + /** Codec specific attributes */ + union + { + viddec_mpeg2_frame_attributes_t mpeg2; + viddec_vc1_frame_attributes_t vc1; + viddec_h264_frame_attributes_t h264; + viddec_mpeg4_frame_attributes_t mpeg4; + }; + +} viddec_frame_attributes_t; + +#endif /* VIDDEC_FRAME_ATTR_H */ diff --git a/mixvbp/include/viddec_fw_item_types.h b/mixvbp/include/viddec_fw_item_types.h new file mode 100644 index 0000000..472dff2 --- /dev/null +++ b/mixvbp/include/viddec_fw_item_types.h @@ -0,0 +1,784 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_ITEM_TYPES_H +#define VIDDEC_FW_ITEM_TYPES_H + + +/* The following macros are defined to pack data into 32 bit words. + mask: A 32 bit value of N 1 bits starting from lsb where N represents the length of data we are packing. + start: Bit start position of data we want. + ex: If we want to pack Height(16bits), width(16bits) where width is from (1:16) and height is from (17:32), these are + the start and mask values for width and height. + width: start = 0 mask=0xFFFF + Height:start= 16 mask=0xFFFF + + extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in + unsigned integer type. + insert: Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to + be a unsigned int of N bits starting with lsb. +*/ + +#define viddec_fw_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) ) +#define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start)))) + + +/* Workload items type. Each item here represents data that Parser detected ex:slice data which + is used either by host or decoder.*/ +typedef enum workload_item_type +{ + VIDDEC_WORKLOAD_INVALID =0x0,/* Unknown type */ + VIDDEC_WORKLOAD_PIXEL_ES =0x100,/* Slice data tag */ + VIDDEC_WORKLOAD_TAG =0x200,/* Frame association tag */ + VIDDEC_WORKLOAD_USERDATA =0x300,/* user data tag */ + + VIDDEC_WORKLOAD_IBUF_DONE =0x500,/* Es buffer completely used tag */ + VIDDEC_WORKLOAD_IBUF_CONTINUED =0x600,/* Es buffer partially used tag */ + VIDDEC_WORKLOAD_IBUF_DISCONTINUITY =0x700,/* Discontinuity tag on first workload after discontinuity */ + VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER =0x800, /* Reorder frames in DPB tag */ + VIDDEC_WORKLOAD_IBUF_EOS =0x900,/* EOS tag on last workload used for current stream */ + VIDDEC_WORKLOAD_SEQUENCE_INFO =0xa00,/* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */ + VIDDEC_WORKLOAD_DISPLAY_INFO =0xb00,/* MPEG2 Seq Disp Ext, H264 VUI */ + VIDDEC_WORKLOAD_GOP_INFO =0xc00,/* MPEG2 GOP, VC1 Entrypoint */ + VIDDEC_WORKLOAD_SEQ_USER_DATA =0xd00,/* MPEG2, VC1 Sequence Level User data */ + VIDDEC_WORKLOAD_GOP_USER_DATA =0xe00,/* MPEG2, VC1 Gop Level User data */ + VIDDEC_WORKLOAD_FRM_USER_DATA =0xf00,/* MPEG2 Picture User data, VC1 Frame User data */ + + VIDDEC_WORKLOAD_FLD_USER_DATA =0x1000,/* MPEG2, VC1 Field User data */ + VIDDEC_WORKLOAD_SLC_USER_DATA =0x1100,/* VC1 Slice User data */ + VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA =0x1200,/* MPEG4 Visual Object User data */ + VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C =0x1200,/* VC1 Only */ + VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA =0x1300,/* MPEG4 Video Object Layer User data */ + VIDDEC_WORKLOAD_H264_CROPPING =0x1400,/* H264 only */ + VIDDEC_WORKLOAD_H264_PAN_SCAN =0x1500,/* H264 only */ + VIDDEC_WORKLOAD_SEI_PIC_TIMING =0x1600,/* H264 only */ + VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT =0x1700,/* H264 only */ + VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED =0x1800,/* H264 only */ + VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED =0x1900,/* H264 only */ + VIDDEC_WORKLOAD_SEI_RECOVERY_POINT =0x1a00,/* H264 only */ + VIDDEC_WORKLOAD_MPEG2_SEQ_EXT =0x1b00,/* MPEG2 Only - Sequence Extension */ + VIDDEC_WORKLOAD_H264_MVC_SPS_VIEW_IDS =0x1c00,/* H264 only */ + VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ =0x1d00,/* MPEG4 Only - Visual Sequence */ + VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ =0x1e00,/* MPEG4 Only - Video Object Layer */ + VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ =0x1f00,/* MPEG4 Only - Group of Video Object Planes */ + + VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT =0x2000,/* MPEG4 Only - Video Plane with Short Header */ + VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO =0x2100,/* H264 only */ + + VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 =0x10000,/* required reference frames tag,last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 =0x20000,/* release frames tag, last eight bits indicate index in dpb*/ + VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 =0x30000,/* Display order in DPB tag, for H264 */ + VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 =0x40000,/* Release frames but not display, for H264 */ + VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 =0x50000,/* Release list while EOS, last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 =0x60000,/* Display list while EOS, last eight bits indicate index in dpb */ + VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 =0x70000,/* required for H264 as it needs whole DPB for each frame */ + VIDDEC_WORKLOAD_H264_REFR_LIST_0 =0x80000,/* ref list 0 for H264 */ + VIDDEC_WORKLOAD_H264_REFR_LIST_1 =0x90000,/* ref list 1 for H264 */ + VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY =0xa0000,/* eos items begin after this */ + + VIDDEC_WORKLOAD_DECODER_SPECIFIC =0x100000,/* pvt info for decoder tags */ + VIDDEC_WORKLOAD_MAX, +} workload_item_type; + +struct h264_witem_sps_mvc_id +{ + /* + 0-9: num_views_minus1 + 10-19: start index of views in current item. + 20-23: Number of valid items. + */ +#define viddec_fw_h264_sps_mvc_id_get_num_views_minus1(x) viddec_fw_bitfields_extract( (x)->num_views, 0, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_set_num_views_minus1(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 0, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_get_cur_start_index(x) viddec_fw_bitfields_extract( (x)->num_views, 10, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_set_cur_start_index(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 10, 0x3FF) +#define viddec_fw_h264_sps_mvc_id_get_num_cur_valid_items(x) viddec_fw_bitfields_extract( (x)->num_views, 20, 0x7) +#define viddec_fw_h264_sps_mvc_id_set_num_cur_valid_items(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 20, 0x7) + unsigned int num_views; + + /* We pack six id's into two integers.Each packed_view(integer) contains three 10 bit ids at 0-9, 10-19, 20-29 + These values can be extracted/set using viddec_fw_h264_sps_mvc_id_get_data_frm_index() + and viddec_fw_h264_sps_mvc_id_set_data_frm_index() functions. + */ +#define viddec_fw_h264_sps_mvc_id_max_packed_ids 6 /* Max number of packed ids in a workload item */ + unsigned int packed_view[2]; +}; + +/* This function extracts a 10 bit view id of index( <6) that was packed into h264_witem_sps_mvc_id structure */ +static inline unsigned int viddec_fw_h264_sps_mvc_id_get_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index) +{ + unsigned int start=0, *word; + + start = ((index > 2) ?(index - 3) : index) *10; + word = &(data->packed_view[(index > 2) ? 1:0]); + return viddec_fw_bitfields_extract(*word, start, 0x3FF); +} + +/* This function packs a 10 bit view id(val) at index( <6) in h264_witem_sps_mvc_id structure */ +static inline void viddec_fw_h264_sps_mvc_id_set_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index, unsigned int val) +{ + unsigned int start=0, *word; + + start = ((index > 2) ?(index - 3) : index) *10; + word = &(data->packed_view[(index > 2) ? 1:0]); + viddec_fw_bitfields_insert(*word, val, start, 0x3FF); +} + +/* 16-byte workload */ +typedef struct viddec_workload_item +{ + enum workload_item_type vwi_type; + union + { + struct + { + unsigned int es_phys_addr; + unsigned int es_phys_len; + unsigned int es_flags; + } es; + struct + { + unsigned int tag_phys_addr; + unsigned int tag_phys_len; + unsigned int tag_value; + } tag; + struct + { + unsigned int data_offset; + unsigned int data_payload[2]; + } data; + struct + { + signed int reference_id; /* Assigned by parser */ + unsigned int luma_phys_addr; /* assigned by host, for DM */ + unsigned int chroma_phys_addr; /* assigned by host, for DM */ + } ref_frame; + struct /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */ + { + signed int ref_table_offset; /* Index of first "reordered" */ + /* index from Current[] for Next[offset+0], Ref[offset+1], Ref[offset+2], Ref[offset+3] */ + unsigned int ref_reorder_00010203; + /* index from Current[] for Next[offset+4], Ref[offset+5], Ref[offset+6], Ref[offset+7] */ + unsigned int ref_reorder_04050607; + } ref_reorder; + struct + { + /* we pack a maximum of 11 bytes of user data and 1 byte for size */ + /* TODO: we can pack 12 bytes and use bottom 8 bits of type to indicate size */ +#define viddec_fw_get_user_data_size(x) ((x)->user_data.size) +#define viddec_fw_get_user_data(x) (unsigned char *)&((x)->user_data.data_payload[0]) + unsigned char size; + unsigned char data_payload[11]; + /* + ITEM TYPES WHICH use this: + VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED, VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED, + VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA, + VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA, + */ + } user_data; + struct + { + // Sequence Header Item I (From LSB): + // - horizontal_size_value - 12 bits + // - vertical_size_value - 12 bits + // - aspect_ratio_information - 4 bits + // - frame_rate_code - 4 bits +#define viddec_fw_mp2_sh_get_horizontal_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 0, 0xFFF) +#define viddec_fw_mp2_sh_get_vertical_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF) +#define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF) +#define viddec_fw_mp2_sh_get_frame_rate_code(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF) +#define viddec_fw_mp2_sh_set_horizontal_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 0, 0xFFF) +#define viddec_fw_mp2_sh_set_vertical_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF) +#define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF) +#define viddec_fw_mp2_sh_set_frame_rate_code(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF) + unsigned int seq_hdr_item_1; + + // Sequence Header Item II (From LSB): + // - bit_rate_value - 18 bits + // - vbv_buffer_size_value - 10 bits + // - remaining pad bits +#define viddec_fw_mp2_sh_get_bit_rate_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 0, 0x3FFFF) +#define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF) +#define viddec_fw_mp2_sh_set_bit_rate_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 0, 0x3FFFF) +#define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF) + unsigned int seq_hdr_item_2; + + unsigned int pad; + } mp2_sh; // mp2 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO + struct + { + // Sequence Extension Item I (From LSB): + // - profile_and_level_indication - 8 bits + // - progressive_sequence - 1 bit + // - chroma_format - 2 bits + // - horizontal_size_extension - 2 bits + // - vertical_size_extension - 2 bits + // - bit_rate_extension - 12 bits + // - remaining pad bits +#define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 0, 0xFF) +#define viddec_fw_mp2_se_get_progressive_sequence(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 8, 0x1) +#define viddec_fw_mp2_se_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 9, 0x3) +#define viddec_fw_mp2_se_get_horizontal_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3) +#define viddec_fw_mp2_se_get_vertical_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3) +#define viddec_fw_mp2_se_get_bit_rate_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF) +#define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 0, 0xFF) +#define viddec_fw_mp2_se_set_progressive_sequence(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 8, 0x1) +#define viddec_fw_mp2_se_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 9, 0x3) +#define viddec_fw_mp2_se_set_horizontal_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3) +#define viddec_fw_mp2_se_set_vertical_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3) +#define viddec_fw_mp2_se_set_bit_rate_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF) + unsigned int seq_ext_item_1; + + // Sequence Extension Item II (From LSB): + // - vbv_buffer_size_extension - 8 bits + // - frame_rate_extension_n - 2 bits + // - frame_rate_extension_d - 5 bits + // - remaining pad bits +#define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 0, 0xFF) +#define viddec_fw_mp2_se_get_frame_rate_extension_n(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 8, 0x3) +#define viddec_fw_mp2_se_get_frame_rate_extension_d(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F) +#define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 0, 0xFF) +#define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 8, 0x3) +#define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F) + unsigned int seq_ext_item_2; + + unsigned int pad; + } mp2_se; // mp2 item of type VIDDEC_WORKLOAD_MPEG2_SEQ_EXT + struct + { + // Sequence Display Extension Item I (From LSB): + // - display_horizontal_size - 14 bits + // - display_vertical_size - 14 bits + // - video_format - 3 bits + // - color_description - 1 bit +#define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 0, 0x3FFF) +#define viddec_fw_mp2_sde_get_display_vertical_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF) +#define viddec_fw_mp2_sde_get_video_format(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7) +#define viddec_fw_mp2_sde_get_color_description(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1) +#define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 0, 0x3FFF) +#define viddec_fw_mp2_sde_set_display_vertical_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF) +#define viddec_fw_mp2_sde_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7) +#define viddec_fw_mp2_sde_set_color_description(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1) + unsigned int seq_disp_ext_item_1; + + // Sequence Display Extension II (From LSB): + // - color_primaries - 8 bits + // - transfer_characteristics - 8 bits + // - remaining pad bits +#define viddec_fw_mp2_sde_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 0, 0xFF) +#define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 8, 0xFF) +#define viddec_fw_mp2_sde_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 0, 0xFF) +#define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 8, 0xFF) + unsigned int seq_disp_ext_item_2; + + unsigned int pad; + } mp2_sde; // mp2 item of type VIDDEC_WORKLOAD_DISPLAY_INFO + struct + { + // Group of Pictures Header Item I (From LSB): + // - closed_gop - 1 bit + // - broken_link - 1 bit + // - remaining pad bits +#define viddec_fw_mp2_gop_get_closed_gop(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 0, 0x1) +#define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 1, 0x1) +#define viddec_fw_mp2_gop_set_closed_gop(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 0, 0x1) +#define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 1, 0x1) + unsigned int gop_hdr_item_1; + + unsigned int pad1; + unsigned int pad2; + } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO + struct + { +#define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3) +#define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3) + +#define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7) +#define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7) + +#define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3) +#define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3) + +#define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) +#define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) + +#define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) +#define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) + +#define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) +#define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) + +#define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F) +#define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F) + +#define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7) +#define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7) + +#define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) +#define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) + +#define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) +#define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) + +#define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1) +#define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1) + +#define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1) +#define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1) + +#define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) +#define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) + + uint32_t size; // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12 + uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1 + uint32_t pad; + } vc1_sl; // vc1 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO + struct + { + // This item is populated when display_ext flag is set in the sequence layer + // therefore, no need to provide this flag +#define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF) +#define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF) + +#define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF) +#define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF) + +#define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1) +#define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1) + +#define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1) +#define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1) + +#define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1) +#define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1) + +#define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1) +#define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1) + +#define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF) +#define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF) + +#define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF) +#define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF) + +#define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF) +#define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF) + +#define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF) +#define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF) + +#define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF) +#define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF) + +#define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF) +#define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF) + +#define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF) +#define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF) + +#define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF) +#define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF) + + uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1 + uint32_t framerate; // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16 + uint32_t aspectsize; // aspect_ratio_horiz_size:8, aspect_ratio_vert_size:8, color_prim:8, transfer_char:8 + } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO + struct + { +#define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF) +#define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF) + +#define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF) +#define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF) + +#define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F) +#define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F) + +#define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7) +#define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7) + +#define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF) +#define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF) + +#define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7) +#define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7) + +#define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1) +#define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1) + +#define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) +#define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) + +#define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7) +#define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7) + +#define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) +#define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) + + uint32_t size; // horiz_size:16, vert_size:16 + uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1 + uint32_t pad; + } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C + struct + { +#define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) +#define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) + +#define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) +#define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) + +#define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) +#define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) + +#define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1) +#define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1) + +#define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1) +#define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1) + +#define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1) +#define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1) + +#define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1) +#define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1) + +#define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7) +#define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7) + +#define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) +#define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) + +#define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7) +#define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7) + + uint32_t size; // coded_size_flag:1, coded_width:12, coded_height:12 + uint32_t flags; // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3 + uint32_t pad; + } vc1_ep; // vc1 item of type VIDDEC_WORKLOAD_GOP_INFO + struct + { + /* + 0-7 bits for profile_idc. + 8-15 bits for level_idc. + 16-17 bits for chroma_format_idc. + 18-22 bits for num_ref_frames. + 23 for gaps_in_frame_num_value_allowed_flag. + 24 for frame_mbs_only_flag. + 25 for frame_cropping_flag. + 26 for vui_parameters_present_flag. + */ +#define viddec_fw_h264_sps_get_profile_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 0, 0xFF) +#define viddec_fw_h264_sps_set_profile_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 0, 0xFF) +#define viddec_fw_h264_sps_get_level_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 8, 0xFF) +#define viddec_fw_h264_sps_set_level_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 8, 0xFF) +#define viddec_fw_h264_sps_get_chroma_format_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 16, 0x3) +#define viddec_fw_h264_sps_set_chroma_format_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 16, 0x3) +#define viddec_fw_h264_sps_get_num_ref_frames(x) viddec_fw_bitfields_extract( (x)->sps_messages, 18, 0x1F) +#define viddec_fw_h264_sps_set_num_ref_frames(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 18, 0x1F) +#define viddec_fw_h264_sps_get_gaps_in_frame_num_value_allowed_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 23, 0x1) +#define viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 23, 0x1) +#define viddec_fw_h264_sps_get_frame_mbs_only_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 24, 0x1) +#define viddec_fw_h264_sps_set_frame_mbs_only_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 24, 0x1) +#define viddec_fw_h264_sps_get_frame_cropping_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 25, 0x1) +#define viddec_fw_h264_sps_set_frame_cropping_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 25, 0x1) +#define viddec_fw_h264_sps_get_vui_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 26, 0x1) +#define viddec_fw_h264_sps_set_vui_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 26, 0x1) + unsigned int sps_messages; + unsigned int pic_width_in_mbs_minus1; + unsigned int pic_height_in_map_units_minus1; + } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO + + struct h264_witem_sps_mvc_id h264_sps_mvc_id; + + struct + { +#define viddec_fw_h264_cropping_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF) +#define viddec_fw_h264_cropping_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF) +#define viddec_fw_h264_cropping_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF) +#define viddec_fw_h264_cropping_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF) + unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */ +#define viddec_fw_h264_cropping_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF) +#define viddec_fw_h264_cropping_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF) +#define viddec_fw_h264_cropping_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF) +#define viddec_fw_h264_cropping_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF) + unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */ + unsigned int pad; + } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING + + struct + { + /* 0 bit for aspect_ratio_info_present_flag + 1 st bit for video_signal_type_present_flag + 2 nd bit for colour_description_present_flag + 3 rd bit for timing_info_present_flag + 4 th bit for nal_hrd_parameters_present_flag + 5 th bit for vcl_hrd_parameters_present_flag + 6 th bit for fixed_frame_rate_flag + 7 th bit for pic_struct_present_flag + 8 th bit for low_delay_hrd_flag + 9,10,11 bits for video_format + */ +#define viddec_fw_h264_vui_get_aspect_ratio_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 0, 0x1) +#define viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 0, 0x1) +#define viddec_fw_h264_vui_get_video_signal_type_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 1, 0x1) +#define viddec_fw_h264_vui_set_video_signal_type_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 1, 0x1) +#define viddec_fw_h264_vui_get_colour_description_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 2, 0x1) +#define viddec_fw_h264_vui_set_colour_description_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 2, 0x1) +#define viddec_fw_h264_vui_get_timing_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 3, 0x1) +#define viddec_fw_h264_vui_set_timing_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 3, 0x1) +#define viddec_fw_h264_vui_get_nal_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 4, 0x1) +#define viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 4, 0x1) +#define viddec_fw_h264_vui_get_vcl_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 5, 0x1) +#define viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 5, 0x1) +#define viddec_fw_h264_vui_get_fixed_frame_rate_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 6, 0x1) +#define viddec_fw_h264_vui_set_fixed_frame_rate_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 6, 0x1) +#define viddec_fw_h264_vui_get_pic_struct_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 7, 0x1) +#define viddec_fw_h264_vui_set_pic_struct_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 7, 0x1) +#define viddec_fw_h264_vui_get_low_delay_hrd_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 8, 0x1) +#define viddec_fw_h264_vui_set_low_delay_hrd_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 8, 0x1) +#define viddec_fw_h264_vui_get_video_format(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 9, 0x7) +#define viddec_fw_h264_vui_set_video_format(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 9, 0x7) + unsigned int vui_flags_and_format; + +#define viddec_fw_h264_vui_get_aspect_ratio_idc(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 0, 0xFF) +#define viddec_fw_h264_vui_set_aspect_ratio_idc(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 0, 0xFF) +#define viddec_fw_h264_vui_get_colour_primaries(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 8, 0xFF) +#define viddec_fw_h264_vui_set_colour_primaries(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 8, 0xFF) +#define viddec_fw_h264_vui_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 16, 0xFF) +#define viddec_fw_h264_vui_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF) + /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */ + unsigned int aspc_color_transfer; + +#define viddec_fw_h264_vui_get_sar_width(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF) +#define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF) +#define viddec_fw_h264_vui_set_sar_width(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF) +#define viddec_fw_h264_vui_set_sar_height(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 0, 0xFFFF) + unsigned int sar_width_height; /* Lower 16 for height upper 16 for width */ + } h264_vui; // h264 item of type VIDDEC_WORKLOAD_DISPLAY_INFO + struct + { +#define viddec_fw_h264_vui_get_num_units_in_tick_flag(x) viddec_fw_bitfields_extract( (x)->num_units_in_tick, 0, 0xFFFFFFFF) +#define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val) viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF) +#define viddec_fw_h264_vui_get_time_scale_flag(x) viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF) +#define viddec_fw_h264_vui_set_time_scale_flag(x, val) viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF) + unsigned int num_units_in_tick; + unsigned int time_scale; + unsigned int pad1; + } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO + struct + { + unsigned int pic_struct; /* 4 bit length */ + unsigned int pad1; + unsigned int pad2; + } h264_sei_pic_timing; // h264 item of type VIDDEC_WORKLOAD_SEI_PIC_TIMING + struct + { + unsigned int pan_scan_rect_id; + +#define viddec_fw_h264_sei_pan_scan_get_cancel_flag(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 0, 0x1) +#define viddec_fw_h264_sei_pan_scan_get_cnt_minus1(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 1, 0x3) +#define viddec_fw_h264_sei_pan_scan_set_cancel_flag(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 0, 0x1) +#define viddec_fw_h264_sei_pan_scan_set_cnt_minus1(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 1, 0x3) + unsigned int pan_scan_cancel_and_cnt; /* 0 bit for cancel flag and 2 bits for cnt_minus1 */ + unsigned int pan_scan_rect_repetition_period; + } h264_sei_pan_scan; // h264 item of type VIDDEC_WORKLOAD_H264_PAN_SCAN + + struct + { + +#define viddec_fw_h264_pan_scan_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF) + unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */ + +#define viddec_fw_h264_pan_scan_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF) +#define viddec_fw_h264_pan_scan_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF) + unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */ + + unsigned int pad; + } h264_pan_scan_rect; // h264 item of type VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT + struct + { + unsigned int recovery_frame_cnt; +#define viddec_fw_h264_h264_sei_recovery_get_exact_match_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 0, 0x1) +#define viddec_fw_h264_h264_sei_recovery_get_broken_link_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 1, 0x1) +#define viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 0, 0x1) +#define viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 1, 0x1) + unsigned int broken_and_exctmatch_flags; /* 0 bit for exact match, 1 bit for brokenlink */ + + unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */ + + } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT + + + struct + { + // Visual Sequence (From LSB): + // - profile_and_level_indication - 8 bits +#define viddec_fw_mp4_vs_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->vs_item, 0, 0xFF) +#define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val, 0, 0xFF) + unsigned int vs_item; + + // Visual Object - video_signal_type + // - video_signal_type - 1b + // - video_format - 3b + // - video_range - 1b + // - colour_description - 1b +#define viddec_fw_mp4_vo_get_colour_description(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1) +#define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1) +#define viddec_fw_mp4_vo_get_video_range(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1) +#define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1) +#define viddec_fw_mp4_vo_get_video_format(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 1, 0x7) +#define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 1, 0x7) +#define viddec_fw_mp4_vo_get_video_signal_type(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 0, 0x1) +#define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 0, 0x1) + unsigned int video_signal_type; + + // Visual Object - video_signal_type + // - color_primaries - 8 bits + // - transfer_characteristics - 8 bits +#define viddec_fw_mp4_vo_get_transfer_char(x) viddec_fw_bitfields_extract( (x)->color_desc, 8, 0xFF) +#define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 8, 0xFF) +#define viddec_fw_mp4_vo_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->color_desc, 0, 0xFF) +#define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 0, 0xFF) + unsigned int color_desc; + } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ + + struct + { + // Video Object Layer(From LSB): + // - aspect_ratio_info - 4b + // - par_width - 8b + // - par_height - 8b + // - vol_control_param - 1b + // - chroma_format - 2b + // - interlaced - 1b + // - fixed_vop_rate - 1b +#define viddec_fw_mp4_vol_get_fixed_vop_rate(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1) +#define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1) +#define viddec_fw_mp4_vol_get_interlaced(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1) +#define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1) +#define viddec_fw_mp4_vol_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3) +#define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3) +#define viddec_fw_mp4_vol_get_control_param(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1) +#define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1) +#define viddec_fw_mp4_vol_get_par_height(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF) +#define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF) +#define viddec_fw_mp4_vol_get_par_width(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF) +#define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF) +#define viddec_fw_mp4_vol_get_aspect_ratio_info(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF) +#define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF) + unsigned int vol_aspect_ratio; + + // Video Object Layer(From LSB): + // - vbv_parameters - 1b + // - bit_rate - 30b +#define viddec_fw_mp4_vol_get_bit_rate(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF) +#define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF) +#define viddec_fw_mp4_vol_get_vbv_param(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1) +#define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1) + unsigned int vol_bit_rate; + + // Video Object Layer(From LSB): + // - fixed_vop_time_increment - 16b + // - vop_time_increment_resolution - 16b +#define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF) +#define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF) +#define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF) +#define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF) + unsigned int vol_frame_rate; + } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ + + struct + { + // Group of Video Object Planes(From LSB): + // - time_code - 18b + // - closed_gov - 1b + // - broken_link - 1b +#define viddec_fw_mp4_gvop_get_broken_link(x) viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1) +#define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1) +#define viddec_fw_mp4_gvop_get_closed_gov(x) viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1) +#define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1) +#define viddec_fw_mp4_gvop_get_time_code(x) viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF) +#define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF) + unsigned int gvop_info; + + unsigned int pad1; + unsigned int pad2; + } mp4_gvop; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ + + struct + { + // Group of Video Object Planes(From LSB): + // - source_format - 3b +#define viddec_fw_mp4_vpsh_get_source_format(x) viddec_fw_bitfields_extract((x)->info, 0, 0x7) +#define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7) + unsigned int info; + + unsigned int pad1; + unsigned int pad2; + } mp4_vpsh; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT + + unsigned int vwi_payload[3]; + }; +} viddec_workload_item_t; + + + +#endif /* VIDDEC_ITEM_TYPES_H */ diff --git a/mixvbp/include/viddec_fw_parser_host.h b/mixvbp/include/viddec_fw_parser_host.h new file mode 100644 index 0000000..550cf0a --- /dev/null +++ b/mixvbp/include/viddec_fw_parser_host.h @@ -0,0 +1,237 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ + +#ifndef VIDDEC_FW_PARSER_HOST_H +#define VIDDEC_FW_PARSER_HOST_H + +#ifdef __cplusplus +extern "C" { +#endif +#include "viddec_fw_common_defs.h" + + /** @weakgroup viddec Fw Parser interface Functions */ + /** @ingroup viddec_fw_parser */ + /*@{*/ + + /** + This function returns the size required for loading fw. + @retval size : Required size. + */ + uint32_t viddec_fw_parser_query_fwsize(void); + + /** + This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW. + @param[in] phys : Physical address on where firmware should be loaded. + @param[in] len : Length of data allocated at phys. + @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. + @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. + @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len); + + /** + This function returns the size required opening a stream. This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want information about. + @param[out] num_wklds : Number of wklds required for initialisation. + @param[out] size : Size of memory required for opening a stream. + */ + void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size); + + /** + This function opens requested codec.This a synchronous message to FW. + @param[in] codec_type : Type of codec that we want to open. + @param[in] phys : Physical address of allocated memory for this codec. + @param[in] prority : Priority of stream. 1 for realtime and 0 for background. + @param[out] strm_handle : Handle of the opened stream. + @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. + @retval VIDDEC_FW_FAILURE : Failed to Open a stream. + @retval VIDDEC_FW_NORESOURCES : Failed to Open a stream as we are out of resources. + */ + uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); + + /** + This function closes stream.This a synchronous message to FW. + For the close stream to be effective, host has to do flush with discard first and then close the stream. + @param[in] strm_handle : Handle of the stream to close. + */ + void viddec_fw_parser_closestream(uint32_t strm_handle); + + /** + This function flushes the current stream. This is a synchronous message to FW. + Before calling this function the host has to make sure the output queue of the firmware + is empty. After this function is executed the FW will read all entries in input + es buffer queue into a free or partial workload and push it into output queue. + After this operation the host has to read all entries in output queue again to + finish the flush operation. + @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. + @param[in] strm_handle : Handle of the stream we want to flush. + @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + @retval VIDDEC_FW_NEED_FREE_WKLD : Failed to flush sice a free wkld was not available. + */ + uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type); + + /** + This function sends an input es buffer. + @param[in] strm_handle : The handle of stream that we want to send es buffer to. + @param[in] message : The es buffer we want to send. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message); + + /** + This function gets the next processed workload. The host is required to add free workloads + to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue. + @param[in] strm_handle : The handle of stream that we want to read workload from. + @param[out] message : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message); + + /** + This function adds a free workload to current stream. + @param[in] strm_handle : The handle of stream that we want to write workload to. + @param[out] message : The workload descriptor. + @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. + @retval VIDDEC_FW_PORT_FULL : Workload port is full,unsuccesful in writing wkld. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message); + + /** + This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts. + The driver can disable/enable Interrupts if it needs for this particular stream. + + @param[in] strm_handle : The handle of stream that we want to get mask from + @param[in] mask : This is read as boolean variable, true to enable, false to disable. + @retval VIDDEC_FW_SUCCESS : Successfully set mask. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask); + /** + This function gets the interrupt status for current stream. + When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams, + by calling this function. The status is what the FW thinks the current state of stream is. The status information that + FW provides is complete information on all possible events that are defined. The host should only access this information + in its ISR at which state FW doesn't modify this information. + + @param[in] strm_handle : The handle of stream that we want to get mask from + @param[out] status : The status of the stream based on viddec_fw_parser_int_status_t enum. + @retval VIDDEC_FW_SUCCESS : Successfully in reading status. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status); + + /** + This function allows to set stream attributes that are supported. + @param[in] strm_handle : The handle of stream that we want to set attribute on. + @param[in] type : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t. + @param[in] value : The value of the type that we want to set. + @retval VIDDEC_FW_SUCCESS : Successfully Set the attribute. + @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. + */ + uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value); + + /** + This function allows to get current status of all the parser queues. If the current stream is active we return + number of inout messages that can be written to input queue, no of messages in output queue and number of + free available workloads the stream has. + Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT + Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or + a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT + FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee + one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT + to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will + give only one INT and host should try to empty output queue. + @param[in] strm_handle : The handle of stream that we want to get status of queues. + @param[out] status : The status of each queue gets updated in here. + @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. + @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. + */ + uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status); + + /** + This function unloads Parser Firmware and free's the resources allocated in Load fw. + If this function is called before load fw it will crash with a segmentation fault. + */ + void viddec_fw_parser_deinit(void); + + /** + This function gets the major and minor revison numbers of the loaded firmware. + @param[out] major : The major revision numner. + @param[out] minor : The minor revision number. + @param[out] build : The Internal Build number. + */ + void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); + + /** + This function clears the global interrupt. This is the last thing host calls before exiting ISR. + */ + void viddec_fw_parser_clear_global_interrupt(void); + + /*@}*/ +#ifdef __cplusplus +} +#endif + +#endif//#ifndef VIDDEC_FW_PARSER_HOST_H diff --git a/mixvbp/include/viddec_fw_workload.h b/mixvbp/include/viddec_fw_workload.h new file mode 100644 index 0000000..3b86270 --- /dev/null +++ b/mixvbp/include/viddec_fw_workload.h @@ -0,0 +1,152 @@ +/* + This file is provided under a dual BSD/GPLv2 license. When using or + redistributing this file, you may do so under either license. + + GPL LICENSE SUMMARY + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + + This program is free software; you can redistribute it and/or modify + it under the terms of version 2 of the GNU General Public License as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + The full GNU General Public License is included in this distribution + in the file called LICENSE.GPL. + + Contact Information: + + BSD LICENSE + + Copyright(c) 2007-2009 Intel Corporation. All rights reserved. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + * Neither the name of Intel Corporation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +#ifndef VIDDEC_FW_WORKLOAD_H +#define VIDDEC_FW_WORKLOAD_H + +#include +#include "viddec_fw_item_types.h" +#include "viddec_fw_frame_attr.h" +#include "viddec_fw_common_defs.h" + +#define VIDDEC_WORKLOAD_FLAGS_ES_START_FRAME (1 << 0) +#define VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE (1 << 1) +#define VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE (1 << 2) +#define VIDDEC_WORKLOAD_FLAGS_ES_END_FRAME (1 << 3) + +#define VIDDEC_FRAME_REFERENCE_IS_VALID (0x1<<1) +// PIP Output Frame request bits +#define BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE 24 +#define BMSK_VIDDEC_FRAME_REFERENCE_PIP_MODE (0x3<> ((y) << 3))& 0xFF) << ((z) << 3)) +#define SWAP_WORD(x) ( SWAP_BYTE((x),0,3) | SWAP_BYTE((x),1,2) |SWAP_BYTE((x),2,1) |SWAP_BYTE((x),3,0)) + +#define DEB + +#endif diff --git a/mixvbp/vbp_manager/include/viddec_parser_ops.h b/mixvbp/vbp_manager/include/viddec_parser_ops.h new file mode 100755 index 0000000..b7e9984 --- /dev/null +++ b/mixvbp/vbp_manager/include/viddec_parser_ops.h @@ -0,0 +1,121 @@ +#ifndef VIDDEC_PARSER_OPS_H +#define VIDDEC_PARSER_OPS_H + +#include "viddec_fw_workload.h" +#include + +#define VIDDEC_PARSE_INVALID_POS 0xFFFFFFFF + +typedef enum +{ + VIDDEC_PARSE_EOS = 0x0FFF, /* Dummy start code to force EOS */ + VIDDEC_PARSE_DISCONTINUITY, /* Dummy start code to force completion and flush */ +} viddec_parser_inband_messages_t; + +typedef struct +{ + uint32_t context_size; + uint32_t persist_size; +} viddec_parser_memory_sizes_t; + +typedef void (*fn_init)(void *ctxt, uint32_t *persist, uint32_t preserve); +typedef uint32_t (*fn_parse_sc) (void *ctxt, void *pcxt, void *sc_state); +typedef uint32_t (*fn_parse_syntax) (void *parent, void *ctxt); +typedef void (*fn_get_cxt_size) (viddec_parser_memory_sizes_t *size); +typedef uint32_t (*fn_is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors); +typedef uint32_t (*fn_is_frame_start)(void *ctxt); +typedef uint32_t (*fn_gen_contrib_tags)(void *parent, uint32_t ignore_partial); +typedef uint32_t (*fn_gen_assoc_tags)(void *parent); +typedef void (*fn_flush_parser) (void *parent, void *ctxt); +#ifdef USE_AVC_SHORT_FORMAT +typedef uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size); +#endif + + +typedef struct +{ + fn_init init; + fn_parse_sc parse_sc; + fn_parse_syntax parse_syntax; + fn_get_cxt_size get_cxt_size; + fn_is_wkld_done is_wkld_done; + fn_is_frame_start is_frame_start; + fn_gen_contrib_tags gen_contrib_tags; + fn_gen_assoc_tags gen_assoc_tags; + fn_flush_parser flush; +#ifdef USE_AVC_SHORT_FORMAT + fn_update_data update_data; +#endif +} viddec_parser_ops_t; + + +typedef enum +{ + VIDDEC_PARSE_ERROR = 0xF0, + VIDDEC_PARSE_SUCESS = 0xF1, + VIDDEC_PARSE_FRMDONE = 0xF2, +} viddec_parser_error_t; + +/* + * + *Functions used by Parsers + * + */ + +/* This function returns the requested number of bits(<=32) and increments au byte position. + */ +int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits); + +/* This function returns requested number of bits(<=32) with out incrementing au byte position + */ +int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); + +/* This function skips requested number of bits(<=32) by incrementing au byte position. + */ +int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits); + +/* This function appends a work item to current/next workload. + */ +int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next); + +/* This function gets current byte and bit positions and information on whether an emulation byte is present after +current byte. + */ +int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul); + +/* This function appends Pixel tag to current work load starting from current position to end of au unit. + */ +int32_t viddec_pm_append_pixeldata(void *parent); + +/* This function appends Pixel tag to next work load starting from current position to end of au unit. + */ +int32_t viddec_pm_append_pixeldata_next(void *parent); + +/* This function provides the workload header for pasers to fill in attribute values + */ +viddec_workload_t* viddec_pm_get_header(void *parent); + +/* This function provides the next workload header for pasers to fill in attribute values + */ +viddec_workload_t* viddec_pm_get_next_header(void *parent); + +/* Returns the current byte value where offset is on */ +uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte); + +/* Tells us if there is more data that need to parse */ +int32_t viddec_pm_is_nomoredata(void *parent); + +/* This function appends misc tag to work load starting from start position to end position of au unit */ +int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next); + +void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error); + +void viddec_pm_set_late_frame_detect(void *parent); + +static inline void viddec_fw_reset_workload_item(viddec_workload_item_t *wi) +{ + wi->vwi_payload[0] = wi->vwi_payload[1] = wi->vwi_payload[2] = 0; +} + +void viddec_pm_setup_userdata(viddec_workload_item_t *wi); +#endif diff --git a/mixvbp/vbp_manager/include/viddec_pm.h b/mixvbp/vbp_manager/include/viddec_pm.h new file mode 100755 index 0000000..45b884b --- /dev/null +++ b/mixvbp/vbp_manager/include/viddec_pm.h @@ -0,0 +1,93 @@ +#ifndef VIDDEC_PM_H +#define VIDDEC_PM_H + +#include +#include "viddec_pm_utils_bstream.h" +#include "viddec_pm_parse.h" +#include "viddec_parser_ops.h" + +#define SC_DETECT_BUF_SIZE 1024 +#define MAX_CODEC_CXT_SIZE 4096 + +typedef enum +{ + PM_SUCCESS = 0, + /* Messages to indicate more ES data */ + PM_NO_DATA = 0x100, + /* Messages to indicate SC found */ + PM_SC_FOUND = 0x200, + PM_FIRST_SC_FOUND = 0x201, + /* Messages to indicate Frame done */ + PM_WKLD_DONE = 0x300, + /* Messages to indicate Error conditions */ + PM_OVERFLOW = 0x400, + /* Messages to indicate inband conditions */ + PM_INBAND_MESSAGES = 0x500, + PM_EOS = 0x501, + PM_DISCONTINUITY = 0x502, +} pm_parse_state_t; + +/* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers + cur_es points to current es buffer we are parsing. */ +typedef struct +{ + int32_t list_index; /* current index of list */ + uint32_t cur_offset; + uint32_t cur_size; + viddec_input_buffer_t *cur_es; +} viddec_pm_sc_cur_buf_t; + +typedef struct +{ + uint32_t pending_tags[MAX_IBUFS_PER_SC]; + uint8_t dummy; + uint8_t frame_done; + uint8_t first_buf_aligned; + uint8_t using_next; +} vidded_pm_pending_tags_t; + +/* This structure holds all necessary data required by parser manager for stream parsing. + */ +typedef struct +{ + /* Actual buffer where data gets DMA'd. 8 padding bytes for alignment */ + uint8_t scbuf[SC_DETECT_BUF_SIZE + 8]; + viddec_sc_parse_cubby_cxt_t parse_cubby; + viddec_pm_utils_list_t list; + /* Place to store tags to be added to next to next workload */ + viddec_pm_sc_cur_buf_t cur_buf; + //viddec_emitter emitter; + viddec_pm_utils_bstream_cxt_t getbits; + viddec_sc_prefix_state_t sc_prefix_info; + vidded_pm_pending_tags_t pending_tags; + uint8_t word_align_dummy; + uint8_t late_frame_detect; + uint8_t frame_start_found; + uint8_t found_fm_st_in_current_au; + uint32_t next_workload_error_eos; + uint32_t pending_inband_tags; +#ifdef VBP + uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3]; +#else + uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2]; +#endif +} viddec_pm_cxt_t; + +/* + * + * Functions used by Parser kernel + * + */ + +/* This is for initialising parser manager context to default values */ +void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean); + +/* This is the main parse function which returns state information that parser kernel can understand.*/ +uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf); + +void viddec_pm_init_ops(); + +void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time); + +uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size); +#endif diff --git a/mixvbp/vbp_manager/include/viddec_pm_parse.h b/mixvbp/vbp_manager/include/viddec_pm_parse.h new file mode 100755 index 0000000..beca8d7 --- /dev/null +++ b/mixvbp/vbp_manager/include/viddec_pm_parse.h @@ -0,0 +1,24 @@ +#ifndef VIDDEC_PM_PARSE_H +#define VIDDEC_PM_PARSE_H + +#include +/* This structure is used by first pass parsing(sc detect), the pm passes information on number of bytes + that needs to be parsed and if start code found then sc_end_pos contains the index of last sc code byte + in the current buffer */ +typedef struct +{ + uint32_t size; /* size pointed to by buf */ + uint8_t *buf; /* ptr to data */ + int32_t sc_end_pos; /* return value end position of sc */ + uint32_t phase; /* phase information(state) for sc */ +} viddec_sc_parse_cubby_cxt_t; + +typedef struct +{ + uint16_t next_sc; + uint8_t second_scprfx_length; + uint8_t first_sc_detect; +} viddec_sc_prefix_state_t; + +uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state); +#endif diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h new file mode 100755 index 0000000..999a067 --- /dev/null +++ b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h @@ -0,0 +1,88 @@ +#ifndef VIDDEC_PM_UTILS_BSTREAM_H +#define VIDDEC_PM_UTILS_BSTREAM_H + +#include "viddec_pm_utils_list.h" + +#define CUBBY_SIZE 1024 +#define SCRATCH_SIZE 20 +#define MIN_DATA 8 + +typedef struct +{ +#ifdef VBP + uint8_t *buf; +#else + uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */ +#endif + uint32_t buf_st; /* start pos in buf */ + uint32_t buf_end; /* first invalid byte in buf */ + uint32_t buf_index; /* current index in buf */ + uint32_t buf_bitoff; /* bit offset in current index position */ +} viddec_pm_utils_bstream_buf_cxt_t; + +typedef struct +{ + uint8_t buf_scratch[SCRATCH_SIZE];/* scratch for boundary reads*/ + uint32_t st; /* start index of valid byte */ + uint32_t size;/* Total number of bytes in current buffer */ + uint32_t bitoff; /* bit offset in first valid byte */ +} viddec_pm_utils_bstream_scratch_cxt_t; + +typedef struct +{ +#ifdef VBP + /* counter of emulation prevention byte */ + uint32_t emulation_byte_counter; +#endif + /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store + the bstream buffer's first valid byte index wrt to accessunit in this variable */ + uint32_t au_pos; + /* This is for keeping track of which list item was used to load data last */ + uint32_t list_off; + /* This is for tracking emulation prevention bytes */ + uint32_t phase; + /* This flag tells us whether to look for emulation prevention or not */ + uint32_t is_emul_reqd; + /* A pointer to list of es buffers which contribute to current access unit */ + viddec_pm_utils_list_t *list; + /* scratch buffer to stage data on boundaries and reloads */ + viddec_pm_utils_bstream_scratch_cxt_t scratch; + /* Actual context which has valid data for get bits functionality */ + viddec_pm_utils_bstream_buf_cxt_t bstrm_buf; +} viddec_pm_utils_bstream_cxt_t; + +void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul); + +int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits); + +int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip); + +int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte); + +uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt); + +uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt); + +void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt); + +/* + This function gets bit and byte position of where we are in the current AU. We always return the position of next byte to be + read. + is_emul on true indicates we are on second zero byte in emulation prevention sequence. + */ +static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul) +{ + uint32_t phase=cxt->phase; + + *bit = cxt->bstrm_buf.buf_bitoff; + *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st); + if (cxt->phase > 0) + { + phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 ); + } + /* Assumption: we will never be parked on 0x3 byte of emulation prevention sequence */ + *is_emul = (cxt->is_emul_reqd) && (phase > 0) && + (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) && + (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3); +} +#endif diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_list.h b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h new file mode 100755 index 0000000..0e650d5 --- /dev/null +++ b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h @@ -0,0 +1,50 @@ +#ifndef VIDDEC_PM_COMMON_LIST_H +#define VIDDEC_PM_COMMON_LIST_H + +/* Limitation:This is the maximum numbers of es buffers between start codes. Needs to change if we encounter + a case if this is not sufficent */ +#ifdef VBP +#define MAX_IBUFS_PER_SC 512 +#else +#define MAX_IBUFS_PER_SC 64 +#endif + +/* This structure is for storing information on byte position in the current access unit. + stpos is the au byte index of first byte in current es buffer.edpos is the au byte index+1 of last + valid byte in current es buffer.*/ +typedef struct +{ + uint32_t stpos; + uint32_t edpos; +} viddec_pm_utils_au_bytepos_t; + +/* this structure is for storing all necessary information for list handling */ +typedef struct +{ + uint16_t num_items; /* Number of buffers in List */ + uint16_t first_scprfx_length; /* Length of first sc prefix in this list */ + int32_t start_offset; /* starting offset of unused data including sc prefix in first buffer */ + int32_t end_offset; /* Offset of unsused data in last buffer including 2nd sc prefix */ + //viddec_input_buffer_t sc_ibuf[MAX_IBUFS_PER_SC]; /* Place to store buffer descriptors */ + viddec_pm_utils_au_bytepos_t data[MAX_IBUFS_PER_SC]; /* place to store au byte positions */ + int32_t total_bytes; /* total bytes for current access unit including first sc prefix*/ +} viddec_pm_utils_list_t; + +/* This function initialises the list to default values */ +void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt); +#ifndef VBP +/* This function adds a new entry to list and will emit tags if needed */ +uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf); + +/* This function updates au byte position of the current list. This should be called after sc codes are detected and before + syntax parsing as get bits requires this to be initialized. */ +void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length); + +/* This function walks through the list and removes consumed buffers based on total bytes. It then moves + unused entires to the top of list. */ +void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length); + +/* this function returns 1 if the requested byte is not found. If found returns list and offset into list */ +uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset); +#endif +#endif diff --git a/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c new file mode 100644 index 0000000..498cbc4 --- /dev/null +++ b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c @@ -0,0 +1,1830 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#include +#include "h264.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_h264secure_parser.h" + +#define TERMINATE_KEY 0xFFFFFFFF + +typedef struct vbp_h264secure_parser_private vbp_h264secure_parser_private; + +typedef enum +{ + H264_BS_LENGTH_PREFIXED, + H264_BS_SC_PREFIXED, + H264_BS_SINGLE_NAL +} H264_BS_PATTERN; + +struct vbp_h264secure_parser_private +{ + /* number of bytes used to encode length of NAL payload. If parser does not receive configuration data + and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB + byte stream format. */ + int NAL_length_size; + + /* indicate if stream is length prefixed */ + int length_prefix_verified; + + H264_BS_PATTERN bitstream_pattern; + + uint8_t* start; + int32_t offset; + int32_t size; +}; + +/* default scaling list table */ +static unsigned char Default_4x4_Intra[16] = +{ + 6,13,20,28, + 13,20,28,32, + 20,28,32,37, + 28,32,37,42 +}; + +static unsigned char Default_4x4_Inter[16] = +{ + 10,14,20,24, + 14,20,24,27, + 20,24,27,30, + 24,27,30,34 +}; + +static unsigned char Default_8x8_Intra[64] = +{ + 6,10,13,16,18,23,25,27, + 10,11,16,18,23,25,27,29, + 13,16,18,23,25,27,29,31, + 16,18,23,25,27,29,31,33, + 18,23,25,27,29,31,33,36, + 23,25,27,29,31,33,36,38, + 25,27,29,31,33,36,38,40, + 27,29,31,33,36,38,40,42 +}; + +static unsigned char Default_8x8_Inter[64] = +{ + 9,13,15,17,19,21,22,24, + 13,13,17,19,21,22,24,25, + 15,17,19,21,22,24,25,27, + 17,19,21,22,24,25,27,28, + 19,21,22,24,25,27,28,30, + 21,22,24,25,27,28,30,32, + 22,24,25,27,28,30,32,33, + 24,25,27,28,30,32,33,35 +}; + +static unsigned char quant_flat[16] = +{ + 16,16,16,16, + 16,16,16,16, + 16,16,16,16, + 16,16,16,16 +}; + +static unsigned char quant8_flat[64] = +{ + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16 +}; + +static unsigned char* UseDefaultList[8] = +{ + Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra, + Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter, + Default_8x8_Intra, + Default_8x8_Inter +}; + +static uint8 h264_aspect_ratio_table[][2] = +{ + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + {24, 11}, + {20, 11}, + {32, 11}, + {80, 33}, + {18, 11}, + {15, 11}, + {64, 33}, + {160, 99}, + {4, 3}, + {3, 2}, + {2, 1}, + // reserved + {0, 0} +}; + + + +/** + * + */ +uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264secure_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264secure_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264secure_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->update_data = dlsym(pcontext->fd_parser, "viddec_h264secure_update"); + if (NULL == pcontext->parser_ops->update_data) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + /* entry point not needed */ + pcontext->parser_ops->is_wkld_done = NULL; + pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_frame_start = NULL; + return VBP_OK; +} + + +/** + * + */ +uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + pcontext->query_data = NULL; + vbp_data_h264 *query_data = NULL; + + query_data = vbp_malloc_set0(vbp_data_h264, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + + query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + pcontext->parser_private = NULL; + vbp_h264secure_parser_private *parser_private = NULL; + + parser_private = vbp_malloc_set0(vbp_h264secure_parser_private, 1); + if (NULL == parser_private) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->parser_private = (void *)parser_private; + + /* init the pointer */ + parser_private->start = 0; + parser_private->offset = 0; + parser_private->size = 0; + parser_private->NAL_length_size = 0; + parser_private->length_prefix_verified = 0; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + + return VBP_OK; + +cleanup: + vbp_free_query_data_h264secure(pcontext); + + return VBP_MEM; +} + +uint32 vbp_free_query_data_h264secure(vbp_context *pcontext) +{ + if (NULL != pcontext->parser_private) + { + free(pcontext->parser_private); + pcontext->parser_private = NULL; + } + + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + int i; + vbp_data_h264 *query_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + if (query_data->pic_data) + { + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + free(query_data->pic_data[i].slc_data); + free(query_data->pic_data[i].pic_parms); + } + free(query_data->pic_data); + } + + free(query_data->IQ_matrix_buf); + free(query_data->codec_data); + free(query_data); + + pcontext->query_data = NULL; + + return VBP_OK; +} + + +static inline uint16_t vbp_utils_ntohs(uint8_t* p) +{ + uint16_t i = ((*p) << 8) + ((*(p+1))); + return i; +} + +static inline uint32_t vbp_utils_ntohl(uint8_t* p) +{ + uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3))); + return i; +} + + +static inline void vbp_set_VAPicture_h264secure( + int curr_picture_structure, + int bottom_field, + frame_store* store, + VAPictureH264* pic) +{ + if (FRAME == curr_picture_structure) + { + if (FRAME != viddec_h264_get_dec_structure(store)) + { + WTRACE("Reference picture structure is not frame for current frame picture!"); + } + pic->flags = 0; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + if (FRAME == viddec_h264_get_dec_structure(store)) + { + WTRACE("reference picture structure is frame for current field picture!"); + } + if (bottom_field) + { + pic->flags = VA_PICTURE_H264_BOTTOM_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic->flags = VA_PICTURE_H264_TOP_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + } +} + +static inline void vbp_set_slice_ref_list_h264secure( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + int i, j; + int num_ref_idx_active = 0; + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + uint8_t* p_list = NULL; + VAPictureH264* refPicListX = NULL; + frame_store* fs = NULL; + + /* initialize ref picutre list, set picture id and flags to invalid. */ + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + for (j = 0; j < 32; j++) + { + refPicListX->picture_id = VA_INVALID_SURFACE; + refPicListX->frame_idx = 0; + refPicListX->flags = VA_PICTURE_H264_INVALID; + refPicListX->TopFieldOrderCnt = 0; + refPicListX->BottomFieldOrderCnt = 0; + refPicListX++; + } + } + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + + if ((i == 0) && + ((h264_PtypeB == slice_header->slice_type) || + (h264_PtypeP == slice_header->slice_type))) + { + num_ref_idx_active = slice_header->num_ref_idx_l0_active; + if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list0; + } + else + { + p_list = h264_parser->info.dpb.listX_0; + } + } + else if ((i == 1) && (h264_PtypeB == slice_header->slice_type)) + { + num_ref_idx_active = slice_header->num_ref_idx_l1_active; + if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list1; + } + else + { + p_list = h264_parser->info.dpb.listX_1; + } + } + else + { + num_ref_idx_active = 0; + p_list = NULL; + } + + + for (j = 0; j < num_ref_idx_active; j++) + { + fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]); + + /* bit 5 indicates if reference picture is bottom field */ + vbp_set_VAPicture_h264secure( + h264_parser->info.img.structure, + (p_list[j] & 0x20) >> 5, + fs, + refPicListX); + + refPicListX->frame_idx = fs->frame_num; + refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE; + refPicListX++; + } + } +} + +static inline void vbp_set_pre_weight_table_h264secure( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + int i, j; + + if ((((h264_PtypeP == slice_header->slice_type) || + (h264_PtypeB == slice_header->slice_type)) && + h264_parser->info.active_PPS.weighted_pred_flag) || + ((h264_PtypeB == slice_header->slice_type) && + (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) + { + slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; + slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; + slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag; + slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag; + slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag; + slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag; + + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i]; + slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i]; + slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i]; + slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i]; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j]; + slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j]; + slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j]; + slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j]; + } + } + } + else + { + /* default weight table */ + slc_parms->luma_log2_weight_denom = 5; + slc_parms->chroma_log2_weight_denom = 5; + slc_parms->luma_weight_l0_flag = 0; + slc_parms->luma_weight_l1_flag = 0; + slc_parms->chroma_weight_l0_flag = 0; + slc_parms->chroma_weight_l1_flag = 0; + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = 0; + slc_parms->luma_offset_l0[i] = 0; + slc_parms->luma_weight_l1[i] = 0; + slc_parms->luma_offset_l1[i] = 0; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = 0; + slc_parms->chroma_offset_l0[i][j] = 0; + slc_parms->chroma_weight_l1[i][j] = 0; + slc_parms->chroma_offset_l1[i][j] = 0; + } + } + } +} + + +static inline void vbp_set_reference_frames_h264secure( + struct h264_viddec_parser *parser, + VAPictureParameterBufferH264* pic_parms) +{ + int buffer_idx; + int frame_idx; + frame_store* store = NULL; + h264_DecodedPictureBuffer* dpb = &(parser->info.dpb); + /* initialize reference frames */ + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + pic_parms->num_ref_frames = 0; + + frame_idx = 0; + + /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */ + /* set short term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + + store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]]; + /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0) */ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + /* set long term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]]; + if (!viddec_h264_get_is_long_term(store)) + { + WTRACE("long term frame is not marked as long term."); + } + /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->long_term_frame_idx; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0)*/ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + pic_parms->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (frame_idx > parser->info.active_SPS.num_ref_frames) + { + WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).", + frame_idx, parser->info.active_SPS.num_ref_frames); + } +} + + +static inline void vbp_set_scaling_list_h264secure( + struct h264_viddec_parser *parser, + VAIQMatrixBufferH264* IQ_matrix_buf) +{ + int i; + int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0); + + if (parser->info.active_PPS.pic_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use PPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64); + } + } + } + else /* pic_scaling_list not present */ + { + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + /* SPS matrix present - use fallback rule B */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i], + 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i], + 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + //g_warning("invalid scaling list index."); + break; + } + } + else /* seq_scaling_matrix not present */ + { + /* SPS matrix not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } /* end of seq_scaling_matrix not present */ + } /* end of pic_scaling_list not present */ + } /* for loop for each index from 0 to 7 */ + } /* end of pic_scaling_matrix present */ + else + { + /* PPS matrix not present, use SPS information */ + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use SPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64); + } + } + } + else + { + /* SPS list not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } + } + } + else + { + /* SPS matrix not present - use flat lists */ + for (i = 0; i < 6; i++) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16); + } + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } + } + + if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) && + (parser->info.active_PPS.pic_scaling_matrix_present_flag || + parser->info.active_SPS.seq_scaling_matrix_present_flag)) + { + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } +} + +static void vbp_set_codec_data_h264secure( + struct h264_viddec_parser *parser, + vbp_data_h264 *query_data) +{ + vbp_codec_data_h264* codec_data = query_data->codec_data; + + /* The following variables are used to detect if there is new SPS or PPS */ + uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id; + uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id; + int frame_width = codec_data->frame_width; + int frame_height = codec_data->frame_height; + + /* parameter id */ + codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; + codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; + + /* profile and level */ + codec_data->profile_idc = parser->info.active_SPS.profile_idc; + codec_data->level_idc = parser->info.active_SPS.level_idc; + + + /*constraint flag sets (h.264 Spec v2009)*/ + codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4; + codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3; + codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; + codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1; + codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1; + + /* reference frames */ + codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && + !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) + { + /* no longer necessary: two fields share the same interlaced surface */ + /* codec_data->num_ref_frames *= 2; */ + } + + codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + + /* frame coding */ + codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + + /* frame dimension */ + codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16; + + codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; + + /* cropping information */ + codec_data->crop_left = 0; + codec_data->crop_right = 0; + codec_data->crop_top = 0; + codec_data->crop_bottom = 0; + if(parser->info.active_SPS.sps_disp.frame_cropping_flag) { + int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; + int ChromaArrayType = 0; + if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) { + if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) { + SubWidthC = 2; + SubHeightC = 2; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) { + SubWidthC = 2; + SubHeightC = 1; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) { + SubWidthC = 1; + SubHeightC = 1; + } + ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc; + } + + if(ChromaArrayType == 0) { + CropUnitX = 1; + CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + } else { + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag); + } + + codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; + codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1; + codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; + codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1; + } + + /* aspect ratio */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + codec_data->aspect_ratio_idc = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; + + if (codec_data->aspect_ratio_idc < 17) + { + codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0]; + codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1]; + } + else if (codec_data->aspect_ratio_idc == 255) + { + codec_data->sar_width = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; + + codec_data->sar_height = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + } + else + { + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + } + else + { + // unspecified + codec_data->aspect_ratio_idc = 0; + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + + /* video format */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + codec_data->video_format = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + } + else + { + // Unspecified video format + codec_data->video_format = 5; + } + + codec_data->video_full_range_flag = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; + + + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + codec_data->matrix_coefficients = + parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; + } + else + { + // Unspecified + codec_data->matrix_coefficients = 2; + } + + codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value; + + /* picture order type and count */ + codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + + + /* udpate sps and pps status */ + query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; + query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; + if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) + { + query_data->new_sps = 1; + query_data->new_pps = 1; + } +} + + +static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext, int list_index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + struct h264_viddec_parser* parser = NULL; + vbp_picture_data_h264* pic_data = NULL; + VAPictureParameterBufferH264* pic_parms = NULL; + + parser = (struct h264_viddec_parser *)cxt->codec_data; + + if (0 == parser->info.SliceHeader.first_mb_in_slice) + { + /* a new picture is parsed */ + query_data->num_pictures++; + } + + if (query_data->num_pictures == 0) + { + /* partial frame */ + query_data->num_pictures = 1; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + WTRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + pic_parms = pic_data->pic_parms; + + // relax this condition to support partial frame parsing + + //if (parser->info.SliceHeader.first_mb_in_slice == 0) + { + /** + * picture parameter only needs to be set once, + * even multiple slices may be encoded + */ + + /* VAPictureParameterBufferH264 */ + pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; + pic_parms->CurrPic.frame_idx = 0; + if (parser->info.img.field_pic_flag == 1) + { + if (parser->info.img.bottom_field_flag) + { + pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; + } + else + { + /* also OK set to 0 (from test suite) */ + pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; + } + } + else + { + pic_parms->CurrPic.flags = 0; /* frame picture */ + } + pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; + pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; + pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; + /* don't care if current frame is used as long term reference */ + if (parser->info.SliceHeader.nal_ref_idc != 0) + { + pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + + /* frame height in MBS */ + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; + pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + + + pic_parms->seq_fields.value = 0; + pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; + + /* new fields in libva 0.31 */ + pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; + pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag; + + + /* referened from UMG_Moorstown_TestSuites */ + pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; + + pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + pic_parms->slice_group_change_rate_minus1 = 0; + pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; + pic_parms->pic_init_qs_minus26 = 0; + pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; + pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; + + /* new LibVA fields in v0.31*/ + pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; + pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0; + + /* all slices in the pciture have the same field_pic_flag */ + pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; + pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; + + pic_parms->frame_num = parser->info.SliceHeader.frame_num; + + pic_parms->num_ref_idx_l0_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l0_active-1; + pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active-1; + } + + + /* set reference frames, and num_ref_frames */ + vbp_set_reference_frames_h264secure(parser, pic_parms); + if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + int frame_idx; + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + } + + return VBP_OK; +} + +static uint32_t vbp_add_slice_data_h264secure(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_data->buffer_addr = cxt->parse_cubby.buf; + slc_parms = &(slc_data->slc_parms); + + /* byte: how many bytes have been parsed */ + /* bit: bits parsed within the current parsing position */ + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + slc_data->nal_unit_type = h264_parser->info.nal_unit_type; + + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos; + + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = cxt->list.data[index].stpos; + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + pic_data->num_slices++; + + //vbp_update_reference_frames_h264_methodB(pic_data); + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + return VBP_OK; +} + + +static uint32_t vbp_update_slice_data_h264secure(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private; + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_parms = &(slc_data->slc_parms); + + slc_parms->slice_data_size = parser_private->size; + slc_parms->slice_data_offset = parser_private->offset; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = 0; + slc_data->buffer_addr = parser_private->start; + slc_data->slice_size = parser_private->size + parser_private->offset; + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + pic_data->num_slices++; + + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + return VBP_OK; +} + + + +/** +* parse decoder configuration data +*/ +uint32 vbp_parse_init_data_h264secure(vbp_context* pcontext) +{ + /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */ + + uint8 configuration_version = 0; + uint8 AVC_profile_indication = 0; + uint8 profile_compatibility = 0; + uint8 AVC_level_indication = 0; + uint8 length_size_minus_one = 0; + uint8 num_of_sequence_parameter_sets = 0; + uint8 num_of_picture_parameter_sets = 0; + uint16 sequence_parameter_set_length = 0; + uint16 picture_parameter_set_length = 0; + + int i = 0; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private; + //Enable emulation prevention + cxt->getbits.is_emul_reqd = 1; + + /* check if configuration data is start code prefix */ + viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + WTRACE("configuration data is start-code prefixed.\n"); + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + return vbp_parse_start_code_h264secure(pcontext); + } + + + uint8* cur_data = cxt->parse_cubby.buf; + + + if (cxt->parse_cubby.size < 6) + { + /* need at least 6 bytes to start parsing the structure, see spec 15 */ + return VBP_DATA; + } + + configuration_version = *cur_data++; + AVC_profile_indication = *cur_data++; + + /*ITRACE("Profile indication: %d", AVC_profile_indication); */ + + profile_compatibility = *cur_data++; + AVC_level_indication = *cur_data++; + + /* ITRACE("Level indication: %d", AVC_level_indication);*/ + /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */ + length_size_minus_one = (*cur_data) & 0x3; + + if (length_size_minus_one != 3) + { + WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); + } + + parser_private->NAL_length_size = length_size_minus_one + 1; + + cur_data++; + + /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */ + num_of_sequence_parameter_sets = (*cur_data) & 0x1f; + if (num_of_sequence_parameter_sets > 1) + { + WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets); + } + if (num_of_sequence_parameter_sets > MAX_NUM_SPS) + { + /* this would never happen as MAX_NUM_SPS = 32 */ + WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS); + } + cur_data++; + + cxt->list.num_items = 0; + for (i = 0; i < num_of_sequence_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse sequence_parameter_set_length */ + ETRACE("Not enough data to parse SPS length."); + return VBP_DATA; + } + + /* 16 bits */ + sequence_parameter_set_length = vbp_utils_ntohs(cur_data); + + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least sequence_parameter_set_length bytes for SPS */ + ETRACE("Not enough data to parse SPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length; + + cxt->list.num_items++; + + cur_data += sequence_parameter_set_length; + } + + if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) + { + /* need at least one more byte to parse num_of_picture_parameter_sets */ + ETRACE("Not enough data to parse number of PPS."); + return VBP_DATA; + } + + num_of_picture_parameter_sets = *cur_data++; + if (num_of_picture_parameter_sets > 1) + { + /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ + } + + for (i = 0; i < num_of_picture_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse picture_parameter_set_length */ + ETRACE("Not enough data to parse PPS length."); + return VBP_DATA; + } + + /* 16 bits */ + picture_parameter_set_length = vbp_utils_ntohs(cur_data); + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least picture_parameter_set_length bytes for PPS */ + ETRACE("Not enough data to parse PPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length; + + cxt->list.num_items++; + + cur_data += picture_parameter_set_length; + } + + if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size) + { + WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", + cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); + } + + parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED; + return VBP_OK; +} + +static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size) +{ + switch (*NAL_length_size) + { + case 4: + return vbp_utils_ntohl(p); + + case 3: + { + uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2))); + return i; + } + + case 2: + return vbp_utils_ntohs(p); + + case 1: + return *p; + + default: + WTRACE("invalid NAL_length_size: %d.", NAL_length_size); + /* default to 4 bytes for length */ + *NAL_length_size = 4; + return vbp_utils_ntohl(p); + } +} + +/** +** H.264 elementary stream does not have start code. +* instead, it is comprised of size of NAL unit and payload +* of NAL unit. See spec 15 (Sample format) +*/ + +/* Start code prefix is 001 which is 3 bytes. */ +#define H264_SC_SIZE 3 +uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private; + + /* reset query data for the new sample buffer */ + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + int i; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + cxt->list.num_items = 0; + + /* reset start position of first item to 0 in case there is only one item */ + cxt->list.data[0].stpos = 0; + + /* start code emulation prevention byte is present in NAL */ + cxt->getbits.is_emul_reqd = 1; + + if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t* cubby = NULL; + int32_t size_left = 0; + int32_t size_parsed = 0; + int32_t NAL_length = 0; + + cubby = &(cxt->parse_cubby); + + size_left = cubby->size; + + while (size_left >= parser_private->NAL_length_size) + { + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size); + if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size) + { + ETRACE("Invalid NAL_length parsed."); + break; + } + + size_parsed += parser_private->NAL_length_size; + cxt->list.data[cxt->list.num_items].stpos = size_parsed; + size_parsed += NAL_length; /* skip NAL bytes */ + /* end position is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); + break; + } + + size_left = cubby->size - size_parsed; + } + + if (size_left != 0 && parser_private->length_prefix_verified == 0) + { + WTRACE("Elementary stream is not aligned (%d).", size_left); + + /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will + * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed + */ + parser_private->length_prefix_verified = 1; + viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby; + + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&temp_cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + + /* found start code */ + if (ret == 1) + { + WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed."); + parser_private->NAL_length_size = 0; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + /* reset parsing data */ + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + cxt->list.num_items = 0; + } + } + } + + + if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t cubby; + /* memory copy without updating cxt->parse_cubby */ + cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = 0; + + while (1) + { + ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + if (cxt->list.num_items == 0) + { + cxt->list.data[0].stpos = cubby.sc_end_pos; + } + else + { + cxt->list.data[cxt->list.num_items].stpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE; + } + + cubby.phase = 0; + cubby.buf = cxt->parse_cubby.buf + + cxt->list.data[cxt->list.num_items].stpos; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos; + + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + if (cxt->list.num_items == 0) + { + cxt->list.num_items = 1; + parser_private->bitstream_pattern = H264_BS_SINGLE_NAL; + WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL."); + } + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + + } + + if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL) + { + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + return VBP_OK; +} + +/** +* +* process parsing result after a NAL unit is parsed +* +*/ +uint32 vbp_process_parsing_result_h264secure( vbp_context *pcontext, int i) +{ + if (i >= MAX_NUM_SLICES) + { + return VBP_PARM; + } + + uint32 error = VBP_OK; + + struct h264_viddec_parser* parser = NULL; + parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); + vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data; + switch (parser->info.nal_unit_type) + { + case h264_NAL_UNIT_TYPE_SLICE: + VTRACE("slice header is parsed."); + error = vbp_add_pic_data_h264secure(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264secure(pcontext, i); + } + break; + + case h264_NAL_UNIT_TYPE_IDR: + VTRACE("IDR header is parsed."); + error = vbp_add_pic_data_h264secure(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264secure(pcontext, i); + } + break; + case h264_NAL_UNIT_TYPE_SEI: + //ITRACE("SEI header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_SPS: + VTRACE("SPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_PPS: + VTRACE("PPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + VTRACE("ACC unit delimiter is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOSeq: + ITRACE("EOSeq is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOstream: + ITRACE("EOStream is parsed"); + break; + + default: + WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); + break; + } + + if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1) + { + WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures); + return (error == VBP_OK ? VBP_MULTI : error); + } + return error; +} + +/* +* +* fill query data structure after sample buffer is parsed +* +*/ +uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext) +{ + vbp_data_h264 *query_data = NULL; + struct h264_viddec_parser *parser = NULL; + struct vbp_h264_parser_private_t* private = NULL; + + parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + private = (struct vbp_h264_parser_private_t *)pcontext->parser_private; + + vbp_set_codec_data_h264secure(parser, query_data); + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* VQIAMatrixBufferH264 */ + vbp_set_scaling_list_h264secure(parser, query_data->IQ_matrix_buf); + + if (query_data->num_pictures > 0) + { + /* + * picture parameter buffer and slice parameter buffer have been populated + */ + } + else + { + /** + * add a dummy picture that contains picture parameters parsed + from SPS and PPS. + */ + vbp_add_pic_data_h264secure(pcontext, 0); + } + + return VBP_OK; +} + +uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size) +{ + uint32 error = VBP_OK; + uint32 offset = 0; + uint32 key = 0; + uint32 i,j; + + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private; + + int32_t sliceheadersize; + uint32_t slice_num = 0; + while (offset < size) { + memcpy(&key, (uint8_t *)newdata+offset, sizeof(uint32_t)); + if (key == TERMINATE_KEY) { + break; + } + slice_num++; + offset += sizeof(uint32_t); + + memcpy(&parser_private->start, (uint8_t *)newdata+offset, 4); + offset += 4; + + memcpy(&parser_private->offset, (uint8_t *)newdata+offset, sizeof(int32_t)); + offset += 4; + + memcpy(&parser_private->size, (uint8_t *)newdata+offset, sizeof(int32_t)); + offset += 4; + + sliceheadersize = sizeof(slice_header_t) + sizeof(dec_ref_pic_marking_t); + error = pcontext->parser_ops->update_data(pcontext->parser_cxt, + newdata+offset, sliceheadersize); + offset += sliceheadersize; + if (error != VBP_OK) + { + ETRACE("update_data error = 0x%x",error); + return error; + } + + error = vbp_add_pic_data_h264secure(pcontext, slice_num); + if (error != VBP_OK) + { + ETRACE("vbp_add_pic_data_h264secure error = 0x%x",error); + return error; + } + + error = vbp_update_slice_data_h264secure(pcontext, slice_num); + if (error != VBP_OK) + { + ETRACE("vbp_add_slice_data_h264secure error = 0x%x",error); + return error; + } + } + if (key != TERMINATE_KEY) + { + ETRACE("Don't find a terminated key 0xFFFFFF!"); + return VBP_DATA; + } else { + if (slice_num < 1) { + ETRACE("Don't find a valid slice header!"); + return VBP_DATA; + } + } + error = vbp_populate_query_data_h264secure(pcontext); + + if (error != VBP_OK) + { + ETRACE("vbp_populate_query_data_h264secure error = 0x%x",error); + return error; + } + return error; +} diff --git a/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h new file mode 100644 index 0000000..a55c07c --- /dev/null +++ b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h @@ -0,0 +1,70 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_H264SECURE_PARSER_H +#define VBP_H264SECURE_PARSER_H + +/* + * setup parser's entry points + */ +uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext); + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_h264secure(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_h264secure(vbp_context *pcontext); + +/* + * parse start code. Only support lenght prefixed mode. Start + * code prefixed is not supported. + */ +uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_h264secure(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext); + +/* + * update the parsing result with extra data + */ +uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size); + +#endif /*VBP_H264_PARSER_H*/ diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c new file mode 100755 index 0000000..3f6400d --- /dev/null +++ b/mixvbp/vbp_manager/vbp_h264_parser.c @@ -0,0 +1,1751 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#include + +#include "h264.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_h264_parser.h" + +typedef struct vbp_h264_parser_private_t vbp_h264_parser_private; + +typedef enum +{ + H264_BS_LENGTH_PREFIXED, + H264_BS_SC_PREFIXED, + H264_BS_SINGLE_NAL +} H264_BS_PATTERN; + +struct vbp_h264_parser_private_t +{ + /* number of bytes used to encode length of NAL payload. If parser does not receive configuration data + and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB + byte stream format. */ + int NAL_length_size; + + /* indicate if stream is length prefixed */ + int length_prefix_verified; + + H264_BS_PATTERN bitstream_pattern; +}; + +/* default scaling list table */ +unsigned char Default_4x4_Intra[16] = +{ + 6,13,20,28, + 13,20,28,32, + 20,28,32,37, + 28,32,37,42 +}; + +unsigned char Default_4x4_Inter[16] = +{ + 10,14,20,24, + 14,20,24,27, + 20,24,27,30, + 24,27,30,34 +}; + +unsigned char Default_8x8_Intra[64] = +{ + 6,10,13,16,18,23,25,27, + 10,11,16,18,23,25,27,29, + 13,16,18,23,25,27,29,31, + 16,18,23,25,27,29,31,33, + 18,23,25,27,29,31,33,36, + 23,25,27,29,31,33,36,38, + 25,27,29,31,33,36,38,40, + 27,29,31,33,36,38,40,42 +}; + +unsigned char Default_8x8_Inter[64] = +{ + 9,13,15,17,19,21,22,24, + 13,13,17,19,21,22,24,25, + 15,17,19,21,22,24,25,27, + 17,19,21,22,24,25,27,28, + 19,21,22,24,25,27,28,30, + 21,22,24,25,27,28,30,32, + 22,24,25,27,28,30,32,33, + 24,25,27,28,30,32,33,35 +}; + +unsigned char quant_flat[16] = +{ + 16,16,16,16, + 16,16,16,16, + 16,16,16,16, + 16,16,16,16 +}; + +unsigned char quant8_flat[64] = +{ + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16 +}; + +unsigned char* UseDefaultList[8] = +{ + Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra, + Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter, + Default_8x8_Intra, + Default_8x8_Inter +}; + +static uint8 h264_aspect_ratio_table[][2] = +{ + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + {24, 11}, + {20, 11}, + {32, 11}, + {80, 33}, + {18, 11}, + {15, 11}, + {64, 33}, + {160, 99}, + {4, 3}, + {3, 2}, + {2, 1}, + // reserved + {0, 0} +}; + + + +/** + * + */ +uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } +#ifdef VBP + pcontext->parser_ops->is_wkld_done = NULL; +#else + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done"); + if (NULL == pcontext->parser_ops->is_wkld_done) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } +#endif + + pcontext->parser_ops->flush = dlsym(pcontext->fd_parser, "viddec_h264_flush");; + if (NULL == pcontext->parser_ops->flush) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + /* entry point not needed */ + pcontext->parser_ops->is_frame_start = NULL; + return VBP_OK; +} + + +/** + * + */ +uint32 vbp_allocate_query_data_h264(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + pcontext->query_data = NULL; + vbp_data_h264 *query_data = NULL; + + query_data = vbp_malloc_set0(vbp_data_h264, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + + query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + pcontext->parser_private = NULL; + vbp_h264_parser_private *parser_private = NULL; + + parser_private = vbp_malloc_set0(vbp_h264_parser_private, 1); + if (NULL == parser_private) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->parser_private = (void *)parser_private; + + /* init the pointer */ + parser_private->NAL_length_size = 0; + + parser_private->length_prefix_verified = 0; + + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + + return VBP_OK; + +cleanup: + vbp_free_query_data_h264(pcontext); + + return VBP_MEM; +} + +uint32 vbp_free_query_data_h264(vbp_context *pcontext) +{ + if (NULL != pcontext->parser_private) + { + free(pcontext->parser_private); + pcontext->parser_private = NULL; + } + + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + int i; + vbp_data_h264 *query_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + if (query_data->pic_data) + { + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + free(query_data->pic_data[i].slc_data); + free(query_data->pic_data[i].pic_parms); + } + free(query_data->pic_data); + } + + free(query_data->IQ_matrix_buf); + free(query_data->codec_data); + free(query_data); + + pcontext->query_data = NULL; + + return VBP_OK; +} + + +static inline uint16_t vbp_utils_ntohs(uint8_t* p) +{ + uint16_t i = ((*p) << 8) + ((*(p+1))); + return i; +} + +static inline uint32_t vbp_utils_ntohl(uint8_t* p) +{ + uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3))); + return i; +} + + +static inline void vbp_set_VAPicture_h264( + int curr_picture_structure, + int bottom_field, + frame_store* store, + VAPictureH264* pic) +{ + if (FRAME == curr_picture_structure) + { + if (FRAME != viddec_h264_get_dec_structure(store)) + { + WTRACE("Reference picture structure is not frame for current frame picture!"); + } + pic->flags = 0; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + if (FRAME == viddec_h264_get_dec_structure(store)) + { + WTRACE("reference picture structure is frame for current field picture!"); + } + if (bottom_field) + { + pic->flags = VA_PICTURE_H264_BOTTOM_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic->flags = VA_PICTURE_H264_TOP_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + } +} + +static inline void vbp_set_slice_ref_list_h264( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + int i, j; + int num_ref_idx_active = 0; + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + uint8_t* p_list = NULL; + VAPictureH264* refPicListX = NULL; + frame_store* fs = NULL; + + /* initialize ref picutre list, set picture id and flags to invalid. */ + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + for (j = 0; j < 32; j++) + { + refPicListX->picture_id = VA_INVALID_SURFACE; + refPicListX->frame_idx = 0; + refPicListX->flags = VA_PICTURE_H264_INVALID; + refPicListX->TopFieldOrderCnt = 0; + refPicListX->BottomFieldOrderCnt = 0; + refPicListX++; + } + } + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + + if ((i == 0) && + ((h264_PtypeB == slice_header->slice_type) || + (h264_PtypeP == slice_header->slice_type))) + { + num_ref_idx_active = slice_header->num_ref_idx_l0_active; + if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list0; + } + else + { + p_list = h264_parser->info.dpb.listX_0; + } + } + else if ((i == 1) && (h264_PtypeB == slice_header->slice_type)) + { + num_ref_idx_active = slice_header->num_ref_idx_l1_active; + if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list1; + } + else + { + p_list = h264_parser->info.dpb.listX_1; + } + } + else + { + num_ref_idx_active = 0; + p_list = NULL; + } + + + for (j = 0; j < num_ref_idx_active; j++) + { + fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]); + + /* bit 5 indicates if reference picture is bottom field */ + vbp_set_VAPicture_h264( + h264_parser->info.img.structure, + (p_list[j] & 0x20) >> 5, + fs, + refPicListX); + + refPicListX->frame_idx = fs->frame_num; + refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE; + refPicListX++; + } + } +} + +static inline void vbp_set_pre_weight_table_h264( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + int i, j; + + if ((((h264_PtypeP == slice_header->slice_type) || + (h264_PtypeB == slice_header->slice_type)) && + h264_parser->info.active_PPS.weighted_pred_flag) || + ((h264_PtypeB == slice_header->slice_type) && + (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) + { + slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; + slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; + slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag; + slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag; + slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag; + slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag; + + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i]; + slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i]; + slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i]; + slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i]; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j]; + slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j]; + slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j]; + slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j]; + } + } + } + else + { + /* default weight table */ + slc_parms->luma_log2_weight_denom = 5; + slc_parms->chroma_log2_weight_denom = 5; + slc_parms->luma_weight_l0_flag = 0; + slc_parms->luma_weight_l1_flag = 0; + slc_parms->chroma_weight_l0_flag = 0; + slc_parms->chroma_weight_l1_flag = 0; + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = 0; + slc_parms->luma_offset_l0[i] = 0; + slc_parms->luma_weight_l1[i] = 0; + slc_parms->luma_offset_l1[i] = 0; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = 0; + slc_parms->chroma_offset_l0[i][j] = 0; + slc_parms->chroma_weight_l1[i][j] = 0; + slc_parms->chroma_offset_l1[i][j] = 0; + } + } + } +} + + +static inline void vbp_set_reference_frames_h264( + struct h264_viddec_parser *parser, + VAPictureParameterBufferH264* pic_parms) +{ + int buffer_idx; + int frame_idx; + frame_store* store = NULL; + h264_DecodedPictureBuffer* dpb = &(parser->info.dpb); + /* initialize reference frames */ + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + pic_parms->num_ref_frames = 0; + + frame_idx = 0; + + /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */ + /* set short term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + + store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]]; + /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0) */ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + /* set long term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]]; + if (!viddec_h264_get_is_long_term(store)) + { + WTRACE("long term frame is not marked as long term."); + } + /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0)*/ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + pic_parms->num_ref_frames = frame_idx; + + if (frame_idx > parser->info.active_SPS.num_ref_frames) + { + WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).", + frame_idx, parser->info.active_SPS.num_ref_frames); + } +} + + +static inline void vbp_set_scaling_list_h264( + struct h264_viddec_parser *parser, + VAIQMatrixBufferH264* IQ_matrix_buf) +{ + int i; + int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0); + + if (parser->info.active_PPS.pic_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use PPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64); + } + } + } + else /* pic_scaling_list not present */ + { + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + /* SPS matrix present - use fallback rule B */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i], + 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i], + 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + //g_warning("invalid scaling list index."); + break; + } + } + else /* seq_scaling_matrix not present */ + { + /* SPS matrix not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } /* end of seq_scaling_matrix not present */ + } /* end of pic_scaling_list not present */ + } /* for loop for each index from 0 to 7 */ + } /* end of pic_scaling_matrix present */ + else + { + /* PPS matrix not present, use SPS information */ + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use SPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64); + } + } + } + else + { + /* SPS list not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } + } + } + else + { + /* SPS matrix not present - use flat lists */ + for (i = 0; i < 6; i++) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16); + } + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } + } + + if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) && + (parser->info.active_PPS.pic_scaling_matrix_present_flag || + parser->info.active_SPS.seq_scaling_matrix_present_flag)) + { + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } +} + +static void vbp_set_codec_data_h264( + struct h264_viddec_parser *parser, + vbp_data_h264 *query_data) +{ + vbp_codec_data_h264* codec_data = query_data->codec_data; + + /* The following variables are used to detect if there is new SPS or PPS */ + uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id; + uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id; + int frame_width = codec_data->frame_width; + int frame_height = codec_data->frame_height; + + /* parameter id */ + codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; + codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; + + /* profile and level */ + codec_data->profile_idc = parser->info.active_SPS.profile_idc; + codec_data->level_idc = parser->info.active_SPS.level_idc; + + + /*constraint flag sets (h.264 Spec v2009)*/ + codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4; + codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3; + codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; + codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1; + codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1; + + /* reference frames */ + codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && + !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) + { + /* no longer necessary: two fields share the same interlaced surface */ + /* codec_data->num_ref_frames *= 2; */ + } + + codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + + /* frame coding */ + codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + + /* frame dimension */ + codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16; + + codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; + + /* cropping information */ + codec_data->crop_left = 0; + codec_data->crop_right = 0; + codec_data->crop_top = 0; + codec_data->crop_bottom = 0; + if(parser->info.active_SPS.sps_disp.frame_cropping_flag) { + int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; + int ChromaArrayType = 0; + if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) { + if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) { + SubWidthC = 2; + SubHeightC = 2; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) { + SubWidthC = 2; + SubHeightC = 1; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) { + SubWidthC = 1; + SubHeightC = 1; + } + ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc; + } + + if(ChromaArrayType == 0) { + CropUnitX = 1; + CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + } else { + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag); + } + + codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; + codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1; + codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; + codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1; + } + + /* aspect ratio */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + codec_data->aspect_ratio_idc = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; + + if (codec_data->aspect_ratio_idc < 17) + { + codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0]; + codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1]; + } + else if (codec_data->aspect_ratio_idc == 255) + { + codec_data->sar_width = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; + + codec_data->sar_height = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + } + else + { + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + } + else + { + // unspecified + codec_data->aspect_ratio_idc = 0; + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + + /* video format */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + codec_data->video_format = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + } + else + { + // Unspecified video format + codec_data->video_format = 5; + } + + codec_data->video_full_range_flag = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; + + + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + codec_data->matrix_coefficients = + parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; + } + else + { + // Unspecified + codec_data->matrix_coefficients = 2; + } + + codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value; + + /* picture order type and count */ + codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + + + /* udpate sps and pps status */ + query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; + query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; + if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) + { + query_data->new_sps = 1; + query_data->new_pps = 1; + } +} + + +static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + struct h264_viddec_parser* parser = NULL; + vbp_picture_data_h264* pic_data = NULL; + VAPictureParameterBufferH264* pic_parms = NULL; + + parser = (struct h264_viddec_parser *)cxt->codec_data; + + if (0 == parser->info.SliceHeader.first_mb_in_slice) + { + /* a new picture is parsed */ + query_data->num_pictures++; + } + + if (query_data->num_pictures == 0) + { + /* partial frame */ + query_data->num_pictures = 1; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + WTRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + pic_parms = pic_data->pic_parms; + + // relax this condition to support partial frame parsing + + //if (parser->info.SliceHeader.first_mb_in_slice == 0) + { + /** + * picture parameter only needs to be set once, + * even multiple slices may be encoded + */ + + /* VAPictureParameterBufferH264 */ + pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; + pic_parms->CurrPic.frame_idx = 0; + if (parser->info.img.field_pic_flag == 1) + { + if (parser->info.img.bottom_field_flag) + { + pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; + } + else + { + /* also OK set to 0 (from test suite) */ + pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; + } + } + else + { + pic_parms->CurrPic.flags = 0; /* frame picture */ + } + pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; + pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; + pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; + + /* don't care if current frame is used as long term reference */ + if (parser->info.SliceHeader.nal_ref_idc != 0) + { + pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + + /* frame height in MBS */ + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; + pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + + + pic_parms->seq_fields.value = 0; + pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; + + /* new fields in libva 0.31 */ + pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; + pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag; + + + /* referened from UMG_Moorstown_TestSuites */ + pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; + + pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + pic_parms->slice_group_change_rate_minus1 = 0; + pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; + pic_parms->pic_init_qs_minus26 = 0; + pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; + pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; + + /* new LibVA fields in v0.31*/ + pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; + pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0; + + /* all slices in the pciture have the same field_pic_flag */ + pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; + pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; + + pic_parms->frame_num = parser->info.SliceHeader.frame_num; + } + + + /* set reference frames, and num_ref_frames */ + vbp_set_reference_frames_h264(parser, pic_parms); + if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + int frame_idx; + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + /* num of reference frame is 0 if current picture is IDR */ + pic_parms->num_ref_frames = 0; + } + else + { + /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ + } + + return VBP_OK; +} + +static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_data->buffer_addr = cxt->parse_cubby.buf; + slc_parms = &(slc_data->slc_parms); + + /* byte: how many bytes have been parsed */ + /* bit: bits parsed within the current parsing position */ + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + slc_data->nal_unit_type = h264_parser->info.nal_unit_type; + + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = cxt->list.data[index].stpos; + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* bit offset from NAL start code to the beginning of slice data */ + slc_parms->slice_data_bit_offset = bit + byte * 8; + + if (is_emul) + { + WTRACE("next byte is emulation prevention byte."); + /*slc_parms->slice_data_bit_offset += 8; */ + } + + if (cxt->getbits.emulation_byte_counter != 0) + { + slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8; + } + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + slc_parms->slice_type = slice_header->slice_type; + + slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag; + + slc_parms->num_ref_idx_l0_active_minus1 = 0; + slc_parms->num_ref_idx_l1_active_minus1 = 0; + if (slice_header->slice_type == h264_PtypeI) + { + } + else if (slice_header->slice_type == h264_PtypeP) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + } + else if (slice_header->slice_type == h264_PtypeB) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1; + } + else + { + WTRACE("slice type %d is not supported.", slice_header->slice_type); + } + + slc_parms->cabac_init_idc = slice_header->cabac_init_idc; + slc_parms->slice_qp_delta = slice_header->slice_qp_delta; + slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc; + slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2; + slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2; + + + vbp_set_pre_weight_table_h264(h264_parser, slc_parms); + vbp_set_slice_ref_list_h264(h264_parser, slc_parms); + + + pic_data->num_slices++; + + //vbp_update_reference_frames_h264_methodB(pic_data); + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + /*if (pic_data->num_slices > 1) + { + ITRACE("number of slices per picture is %d.", pic_data->num_slices); + }*/ + return VBP_OK; +} + +/** +* parse decoder configuration data +*/ +uint32 vbp_parse_init_data_h264(vbp_context* pcontext) +{ + /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */ + + uint8 configuration_version = 0; + uint8 AVC_profile_indication = 0; + uint8 profile_compatibility = 0; + uint8 AVC_level_indication = 0; + uint8 length_size_minus_one = 0; + uint8 num_of_sequence_parameter_sets = 0; + uint8 num_of_picture_parameter_sets = 0; + uint16 sequence_parameter_set_length = 0; + uint16 picture_parameter_set_length = 0; + + int i = 0; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private; + //Enable emulation prevention + cxt->getbits.is_emul_reqd = 1; + + /* check if configuration data is start code prefix */ + viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + WTRACE("configuration data is start-code prefixed.\n"); + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + return vbp_parse_start_code_h264(pcontext); + } + + + uint8* cur_data = cxt->parse_cubby.buf; + + + if (cxt->parse_cubby.size < 6) + { + /* need at least 6 bytes to start parsing the structure, see spec 15 */ + return VBP_DATA; + } + + configuration_version = *cur_data++; + AVC_profile_indication = *cur_data++; + + /*ITRACE("Profile indication: %d", AVC_profile_indication); */ + + profile_compatibility = *cur_data++; + AVC_level_indication = *cur_data++; + + /* ITRACE("Level indication: %d", AVC_level_indication);*/ + /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */ + length_size_minus_one = (*cur_data) & 0x3; + + if (length_size_minus_one != 3) + { + WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); + } + + parser_private->NAL_length_size = length_size_minus_one + 1; + + cur_data++; + + /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */ + num_of_sequence_parameter_sets = (*cur_data) & 0x1f; + if (num_of_sequence_parameter_sets > 1) + { + WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets); + } + if (num_of_sequence_parameter_sets > MAX_NUM_SPS) + { + /* this would never happen as MAX_NUM_SPS = 32 */ + WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS); + } + cur_data++; + + cxt->list.num_items = 0; + for (i = 0; i < num_of_sequence_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse sequence_parameter_set_length */ + ETRACE("Not enough data to parse SPS length."); + return VBP_DATA; + } + + /* 16 bits */ + sequence_parameter_set_length = vbp_utils_ntohs(cur_data); + + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least sequence_parameter_set_length bytes for SPS */ + ETRACE("Not enough data to parse SPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length; + + cxt->list.num_items++; + + cur_data += sequence_parameter_set_length; + } + + if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) + { + /* need at least one more byte to parse num_of_picture_parameter_sets */ + ETRACE("Not enough data to parse number of PPS."); + return VBP_DATA; + } + + num_of_picture_parameter_sets = *cur_data++; + if (num_of_picture_parameter_sets > 1) + { + /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ + } + + for (i = 0; i < num_of_picture_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse picture_parameter_set_length */ + ETRACE("Not enough data to parse PPS length."); + return VBP_DATA; + } + + /* 16 bits */ + picture_parameter_set_length = vbp_utils_ntohs(cur_data); + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least picture_parameter_set_length bytes for PPS */ + ETRACE("Not enough data to parse PPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length; + + cxt->list.num_items++; + + cur_data += picture_parameter_set_length; + } + + if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size) + { + WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", + cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); + } + + parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED; + return VBP_OK; +} + +static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size) +{ + switch (*NAL_length_size) + { + case 4: + return vbp_utils_ntohl(p); + + case 3: + { + uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2))); + return i; + } + + case 2: + return vbp_utils_ntohs(p); + + case 1: + return *p; + + default: + WTRACE("invalid NAL_length_size: %d.", NAL_length_size); + /* default to 4 bytes for length */ + *NAL_length_size = 4; + return vbp_utils_ntohl(p); + } +} + +/** +** H.264 elementary stream does not have start code. +* instead, it is comprised of size of NAL unit and payload +* of NAL unit. See spec 15 (Sample format) +*/ + +/* Start code prefix is 001 which is 3 bytes. */ +#define H264_SC_SIZE 3 +uint32 vbp_parse_start_code_h264(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private; + + /* reset query data for the new sample buffer */ + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + int i; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + cxt->list.num_items = 0; + + /* reset start position of first item to 0 in case there is only one item */ + cxt->list.data[0].stpos = 0; + + /* start code emulation prevention byte is present in NAL */ + cxt->getbits.is_emul_reqd = 1; + + if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t* cubby = NULL; + int32_t size_left = 0; + int32_t size_parsed = 0; + int32_t NAL_length = 0; + + cubby = &(cxt->parse_cubby); + + size_left = cubby->size; + + while (size_left >= parser_private->NAL_length_size) + { + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size); + if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size) + { + ETRACE("Invalid NAL_length parsed."); + break; + } + + size_parsed += parser_private->NAL_length_size; + cxt->list.data[cxt->list.num_items].stpos = size_parsed; + size_parsed += NAL_length; /* skip NAL bytes */ + /* end position is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); + break; + } + + size_left = cubby->size - size_parsed; + } + + if (size_left != 0 && parser_private->length_prefix_verified == 0) + { + WTRACE("Elementary stream is not aligned (%d).", size_left); + + /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will + * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed + */ + parser_private->length_prefix_verified = 1; + viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby; + + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&temp_cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + + /* found start code */ + if (ret == 1) + { + WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed."); + parser_private->NAL_length_size = 0; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + /* reset parsing data */ + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + cxt->list.num_items = 0; + } + } + } + + + if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t cubby; + /* memory copy without updating cxt->parse_cubby */ + cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = 0; + + while (1) + { + ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + if (cxt->list.num_items == 0) + { + cxt->list.data[0].stpos = cubby.sc_end_pos; + } + else + { + cxt->list.data[cxt->list.num_items].stpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE; + } + + cubby.phase = 0; + cubby.buf = cxt->parse_cubby.buf + + cxt->list.data[cxt->list.num_items].stpos; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos; + + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + if (cxt->list.num_items == 0) + { + cxt->list.num_items = 1; + parser_private->bitstream_pattern = H264_BS_SINGLE_NAL; + WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL."); + } + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + + } + + if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL) + { + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + + return VBP_OK; +} + +/** +* +* process parsing result after a NAL unit is parsed +* +*/ +uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) +{ + if (i >= MAX_NUM_SLICES) + { + return VBP_PARM; + } + + uint32 error = VBP_OK; + + struct h264_viddec_parser* parser = NULL; + parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); + vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data; + switch (parser->info.nal_unit_type) + { + case h264_NAL_UNIT_TYPE_SLICE: + //ITRACE("slice header is parsed."); + error = vbp_add_pic_data_h264(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264(pcontext, i); + } + break; + + case h264_NAL_UNIT_TYPE_IDR: + //ITRACE("IDR header is parsed."); + error = vbp_add_pic_data_h264(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264(pcontext, i); + } + break; + case h264_NAL_UNIT_TYPE_SEI: + //ITRACE("SEI header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_SPS: + ITRACE("SPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_PPS: + ITRACE("PPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + //ITRACE("ACC unit delimiter is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOSeq: + ITRACE("EOSeq is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOstream: + ITRACE("EOStream is parsed"); + break; + + default: + WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); + break; + } + + if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1) + { + WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures); + return (error == VBP_OK ? VBP_MULTI : error); + } + return error; +} + +/* +* +* fill query data structure after sample buffer is parsed +* +*/ +uint32 vbp_populate_query_data_h264(vbp_context *pcontext) +{ + vbp_data_h264 *query_data = NULL; + struct h264_viddec_parser *parser = NULL; + struct vbp_h264_parser_private_t* private = NULL; + + parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + private = (struct vbp_h264_parser_private_t *)pcontext->parser_private; + + vbp_set_codec_data_h264(parser, query_data); + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* VQIAMatrixBufferH264 */ + vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf); + + if (query_data->num_pictures > 0) + { + /* + * picture parameter buffer and slice parameter buffer have been populated + */ + } + else + { + /** + * add a dummy picture that contains picture parameters parsed + from SPS and PPS. + */ + vbp_add_pic_data_h264(pcontext, 0); + } + + return VBP_OK; +} + + + diff --git a/mixvbp/vbp_manager/vbp_h264_parser.h b/mixvbp/vbp_manager/vbp_h264_parser.h new file mode 100755 index 0000000..0094edb --- /dev/null +++ b/mixvbp/vbp_manager/vbp_h264_parser.h @@ -0,0 +1,67 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_H264_PARSER_H +#define VBP_H264_PARSER_H + +/* + * setup parser's entry points + */ +uint32 vbp_init_parser_entries_h264(vbp_context *pcontext); + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_h264(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_h264(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_h264(vbp_context *pcontext); + +/* + * parse start code. Only support lenght prefixed mode. Start + * code prefixed is not supported. + */ +uint32 vbp_parse_start_code_h264(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_h264(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_h264(vbp_context *pcontext); + + + +#endif /*VBP_H264_PARSER_H*/ diff --git a/mixvbp/vbp_manager/vbp_loader.c b/mixvbp/vbp_manager/vbp_loader.c new file mode 100755 index 0000000..972ab2d --- /dev/null +++ b/mixvbp/vbp_manager/vbp_loader.c @@ -0,0 +1,205 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#include "vbp_loader.h" +#include "vbp_utils.h" + +/** + * + */ +uint32 vbp_open(uint32 parser_type, Handle *hcontext) +{ + vbp_context **ppcontext; + uint32 error; + + if (NULL == hcontext) + { + return VBP_PARM; + } + + *hcontext = NULL; /* prepare for failure. */ + + ppcontext = (vbp_context **)hcontext; + + // TODO: check if vbp context has been created. + + + error = vbp_utils_create_context(parser_type, ppcontext); + if (VBP_OK != error) + { + ETRACE("Failed to create context: %d.", error); + } + + return error; +} + +/** + * + */ +uint32 vbp_close(Handle hcontext) +{ + uint32 error; + + if (NULL == hcontext) + { + return VBP_PARM; + } + + vbp_context *pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + /* not a valid vbp context. */ + ETRACE("context is not initialized"); + return VBP_INIT; + } + error = vbp_utils_destroy_context(pcontext); + if (VBP_OK != error) + { + ETRACE("Failed to destroy context: %d.", error); + } + + return error; +} + + +/** + * + */ +uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == data) || (0 == size)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag); + + if (VBP_OK != error) + { + ETRACE("Failed to parse buffer: %d.", error); + } + return error; +} + +/** + * + */ +uint32 vbp_query(Handle hcontext, void **data) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == data)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_query(pcontext, data); + + if (VBP_OK != error) + { + ETRACE("Failed to query parsing result: %d.", error); + } + return error; +} + +/** + * + */ +uint32 vbp_flush(Handle hcontext) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if (NULL == hcontext) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_flush(pcontext); + + return error; +} + +#ifdef USE_AVC_SHORT_FORMAT +uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data) +{ + vbp_context *pcontext; + uint32 error = VBP_OK; + + if ((NULL == hcontext) || (NULL == newdata) || (0 == size) || (NULL == data)) + { + ETRACE("Invalid input parameters."); + return VBP_PARM; + } + + pcontext = (vbp_context *)hcontext; + + if (MAGIC_NUMBER != pcontext->identifier) + { + ETRACE("context is not initialized"); + return VBP_INIT; + } + + error = vbp_utils_update(pcontext, newdata, size, data); + + if (VBP_OK != error) + { + ETRACE("Failed to query parsing result: %d.", error); + } + return error; +} +#endif diff --git a/mixvbp/vbp_manager/vbp_loader.h b/mixvbp/vbp_manager/vbp_loader.h new file mode 100755 index 0000000..ad4b106 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_loader.h @@ -0,0 +1,476 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_LOADER_H +#define VBP_LOADER_H + +#include + +#ifdef USE_HW_VP8 +#include +#endif + +#ifndef TRUE +#define TRUE 1 +#endif + +#ifndef FALSE +#define FALSE 0 +#endif + + +#ifndef uint8 +typedef unsigned char uint8; +#endif +#ifndef uint16 +typedef unsigned short uint16; +#endif +#ifndef uint32 +typedef unsigned int uint32; +#endif + +typedef void *Handle; + +/* + * MPEG-4 Part 2 data structure + */ + +typedef struct _vbp_codec_data_mp42 +{ + uint8 profile_and_level_indication; + uint32 video_object_layer_width; + uint32 video_object_layer_height; + + // 0 for unspecified, PAL/NTSC/SECAM + uint8 video_format; + + // 0 short range, 1 full range + uint8 video_range; + + // default 2 (unspecified), 1 for BT709. + uint8 matrix_coefficients; + + uint8 short_video_header; + + // always exist for mpeg-4, + uint8 aspect_ratio_info; + uint8 par_width; + uint8 par_height; + + // bit rate + int bit_rate; +} vbp_codec_data_mp42; + +typedef struct _vbp_slice_data_mp42 +{ + uint8* buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferMPEG4 slice_param; +} vbp_slice_data_mp42; + +typedef struct _vbp_picture_data_mp42 vbp_picture_data_mp42; + +struct _vbp_picture_data_mp42 +{ + uint8 vop_coded; + uint16 vop_time_increment; + /* indicates if current buffer contains parameter for the first slice of the picture */ + uint8 new_picture_flag; + VAPictureParameterBufferMPEG4 picture_param; + vbp_slice_data_mp42 slice_data; + + vbp_picture_data_mp42* next_picture_data; +}; + +typedef struct _vbp_data_mp42 +{ + vbp_codec_data_mp42 codec_data; + VAIQMatrixBufferMPEG4 iq_matrix_buffer; + + uint32 number_picture_data; + uint32 number_pictures; + + vbp_picture_data_mp42 *picture_data; + +} vbp_data_mp42; + +/* + * H.264 data structure + */ + +typedef struct _vbp_codec_data_h264 +{ + uint8 pic_parameter_set_id; + uint8 seq_parameter_set_id; + + uint8 profile_idc; + uint8 level_idc; + /*constraint flag sets (h.264 Spec v2009)*/ + uint8 constraint_set0_flag; + uint8 constraint_set1_flag; + uint8 constraint_set2_flag; + uint8 constraint_set3_flag; + uint8 constraint_set4_flag; + + uint8 num_ref_frames; + uint8 gaps_in_frame_num_value_allowed_flag; + + uint8 frame_mbs_only_flag; + uint8 mb_adaptive_frame_field_flag; + + int frame_width; + int frame_height; + + uint8 vui_parameters_present_flag; + + /* aspect ratio */ + uint8 aspect_ratio_idc; + uint16 sar_width; + uint16 sar_height; + + /* cropping information */ + int crop_top; + int crop_bottom; + int crop_left; + int crop_right; + + /* video fromat */ + + // default 5 unspecified + uint8 video_format; + uint8 video_full_range_flag; + + // default 2 unspecified + uint8 matrix_coefficients; + + uint8 pic_order_cnt_type; + int log2_max_pic_order_cnt_lsb_minus4; + + int bit_rate; + +} vbp_codec_data_h264; + +typedef struct _vbp_slice_data_h264 +{ + uint8* buffer_addr; + + uint32 slice_offset; /* slice data offset */ + + uint32 slice_size; /* slice data size */ + + uint8 nal_unit_type; + + VASliceParameterBufferH264 slc_parms; + +} vbp_slice_data_h264; + + +typedef struct _vbp_picture_data_h264 +{ + VAPictureParameterBufferH264* pic_parms; + + uint32 num_slices; + + vbp_slice_data_h264* slc_data; + +} vbp_picture_data_h264; + + +typedef struct _vbp_data_h264 +{ + /* rolling counter of buffers sent by vbp_parse */ + uint32 buf_number; + + uint32 num_pictures; + + /* if SPS has been received */ + uint8 has_sps; + + /* if PPS has been received */ + uint8 has_pps; + + uint8 new_sps; + + uint8 new_pps; + + vbp_picture_data_h264* pic_data; + + /** + * do we need to send matrix to VA for each picture? If not, we need + * a flag indicating whether it is updated. + */ + VAIQMatrixBufferH264* IQ_matrix_buf; + + vbp_codec_data_h264* codec_data; + +} vbp_data_h264; + +/* + * vc1 data structure + */ +typedef struct _vbp_codec_data_vc1 +{ + /* Sequence layer. */ + uint8 PROFILE; + uint8 LEVEL; + uint8 POSTPROCFLAG; + uint8 PULLDOWN; + uint8 INTERLACE; + uint8 TFCNTRFLAG; + uint8 FINTERPFLAG; + uint8 PSF; + + // default 2: unspecified + uint8 MATRIX_COEF; + + /* Entry point layer. */ + uint8 BROKEN_LINK; + uint8 CLOSED_ENTRY; + uint8 PANSCAN_FLAG; + uint8 REFDIST_FLAG; + uint8 LOOPFILTER; + uint8 FASTUVMC; + uint8 EXTENDED_MV; + uint8 DQUANT; + uint8 VSTRANSFORM; + uint8 OVERLAP; + uint8 QUANTIZER; + uint16 CODED_WIDTH; + uint16 CODED_HEIGHT; + uint8 EXTENDED_DMV; + uint8 RANGE_MAPY_FLAG; + uint8 RANGE_MAPY; + uint8 RANGE_MAPUV_FLAG; + uint8 RANGE_MAPUV; + + /* Others. */ + uint8 RANGERED; + uint8 MAXBFRAMES; + uint8 MULTIRES; + uint8 SYNCMARKER; + uint8 RNDCTRL; + uint8 REFDIST; + uint16 widthMB; + uint16 heightMB; + + uint8 INTCOMPFIELD; + uint8 LUMSCALE2; + uint8 LUMSHIFT2; + + // aspect ratio + + // default unspecified + uint8 ASPECT_RATIO; + + uint8 ASPECT_HORIZ_SIZE; + uint8 ASPECT_VERT_SIZE; + // bit rate + int bit_rate; +} vbp_codec_data_vc1; + +typedef struct _vbp_slice_data_vc1 +{ + uint8 *buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferVC1 slc_parms; /* pointer to slice parms */ +} vbp_slice_data_vc1; + + +typedef struct _vbp_picture_data_vc1 +{ + uint32 picture_is_skipped; /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */ + VAPictureParameterBufferVC1 *pic_parms; /* current parsed picture header */ + uint32 size_bitplanes; /* based on number of MBs */ + uint8 *packed_bitplanes; /* contains up to three bitplanes packed for libVA */ + uint32 num_slices; /* number of slices. always at least one */ + vbp_slice_data_vc1 *slc_data; /* pointer to array of slice data */ +} vbp_picture_data_vc1; + +typedef struct _vbp_data_vc1 +{ + uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */ + vbp_codec_data_vc1 *se_data; /* parsed SH/EPs */ + + uint32 num_pictures; + + vbp_picture_data_vc1* pic_data; +} vbp_data_vc1; + +#ifdef USE_HW_VP8 +typedef struct _vbp_codec_data_vp8 +{ + uint8 frame_type; + uint8 version_num; + int show_frame; + + uint32 frame_width; + uint32 frame_height; + + int refresh_alt_frame; + int refresh_golden_frame; + int refresh_last_frame; + + /* cropping information */ + int crop_top; + int crop_bottom; + int crop_left; + int crop_right; + + int golden_copied; + int altref_copied; +} vbp_codec_data_vp8; + +typedef struct _vbp_slice_data_vp8 +{ + uint8 *buffer_addr; + uint32 slice_offset; + uint32 slice_size; + VASliceParameterBufferVP8 slc_parms; /* pointer to slice parms */ +} vbp_slice_data_vp8; + +typedef struct _vbp_picture_data_vp8 +{ + VAPictureParameterBufferVP8* pic_parms; /* current parsed picture header */ + + uint32 num_slices; /* number of slices. always one for VP8 */ + vbp_slice_data_vp8 *slc_data; /* pointer to array of slice data */ +} vbp_picture_data_vp8; + +typedef struct _vbp_data_vp8 +{ + uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */ + vbp_codec_data_vp8 *codec_data; + + uint32 num_pictures; + + vbp_picture_data_vp8* pic_data; + + VAProbabilityDataBufferVP8* prob_data; + VAIQMatrixBufferVP8* IQ_matrix_buf; +} vbp_data_vp8; +#endif + +enum _picture_type +{ + VC1_PTYPE_I, + VC1_PTYPE_P, + VC1_PTYPE_B, + VC1_PTYPE_BI, + VC1_PTYPE_SKIPPED +}; + +enum _vbp_parser_error +{ + VBP_OK, + VBP_TYPE, + VBP_LOAD, + VBP_INIT, + VBP_DATA, + VBP_DONE, + VBP_MEM, + VBP_PARM, + VBP_PARTIAL, + VBP_MULTI +}; + +enum _vbp_parser_type +{ + VBP_VC1, + VBP_MPEG2, + VBP_MPEG4, + VBP_H264, +#ifdef USE_HW_VP8 + VBP_VP8, +#endif +#ifdef USE_AVC_SHORT_FORMAT + VBP_H264SECURE, +#endif +}; + + +/* + * open video bitstream parser to parse a specific media type. + * @param parser_type: one of the types defined in #vbp_parser_type + * @param hcontext: pointer to hold returned VBP context handle. + * @return VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_open(uint32 parser_type, Handle *hcontext); + +/* + * close video bitstream parser. + * @param hcontext: VBP context handle. + * @returns VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_close(Handle hcontext); + +/* + * parse bitstream. + * @param hcontext: handle to VBP context. + * @param data: pointer to bitstream buffer. + * @param size: size of bitstream buffer. + * @param init_flag: 1 if buffer contains bitstream configuration data, 0 otherwise. + * @return VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag); + +/* + * query parsing result. + * @param hcontext: handle to VBP context. + * @param data: pointer to hold a data blob that contains parsing result. + * Structure of data blob is determined by the media type. + * @return VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_query(Handle hcontext, void **data); + + +/* + * flush any un-parsed bitstream. + * @param hcontext: handle to VBP context. + * @returns VBP_OK on success, anything else on failure. + * + */ +uint32 vbp_flush(Handle hcontent); + + +#ifdef USE_AVC_SHORT_FORMAT +/* + * update the the vbp context using the new data + * @param hcontext: handle to VBP context. + * @param data: pointer to the new data buffer. + * @param size: size of new data buffer. + * @param data: pointer to hold a data blob that contains parsing result. + * @returns VBP_OK on success, anything else on failure. + * +*/ +uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data); +#endif + +#endif /* VBP_LOADER_H */ diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c new file mode 100755 index 0000000..6eff5a0 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_mp42_parser.c @@ -0,0 +1,1483 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + + +#include + +#include +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_mp42_parser.h" +#include "vbp_common.h" +#include "viddec_mp4_parse.h" + + + +typedef struct vbp_mp42_parser_private_t vbp_mp42_parser_private; + +struct vbp_mp42_parser_private_t +{ + bool short_video_header; +}; + +static uint8 mp4_aspect_ratio_table[][2] = +{ + // forbidden + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + + // reserved + {0, 0} +}; + + +/* + * Some divX avi files contains 2 frames in one gstbuffer. + */ + + +uint32 vbp_get_sc_pos_mp42( + uint8 *buf, + uint32 length, + uint32 *sc_end_pos, + uint8 *is_normal_sc, + uint8* resync_marker, + const bool svh_search); + +void vbp_on_vop_mp42(vbp_context *pcontext, int list_index); +void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index); +void vbp_fill_codec_data(vbp_context *pcontext); +vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data); +uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index); +uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index); +uint32 vbp_process_video_packet_mp42(vbp_context *pcontext); + +static inline uint32 vbp_sprite_trajectory_mp42( + void *parent, + mp4_VideoObjectLayer_t *vidObjLay, + mp4_VideoObjectPlane_t *vidObjPlane); + + +static inline uint32 vbp_sprite_dmv_length_mp42( + void * parent, + int32_t *dmv_length); + + +/** + * + */ +uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + // absolutely impossible, just sanity check + return VBP_PARM; + } + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init"); + if (pcontext->parser_ops->init == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } +#ifdef VBP + pcontext->parser_ops->parse_sc = NULL; +#else + pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4"); + if (pcontext->parser_ops->parse_sc == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } +#endif + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse"); + if (pcontext->parser_ops->parse_syntax == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size =dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size"); + if (pcontext->parser_ops->get_cxt_size == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } +#ifdef VBP + pcontext->parser_ops->is_wkld_done = NULL; +#else + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done"); + if (pcontext->parser_ops->is_wkld_done == NULL) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } +#endif + + /* entry point not needed */ + pcontext->parser_ops->flush = NULL; + + return VBP_OK; +} + + +/* + * For the codec_data passed by gstreamer + */ +uint32 vbp_parse_init_data_mp42(vbp_context *pcontext) +{ + uint32 ret = VBP_OK; + ret = vbp_parse_start_code_mp42(pcontext); + return ret; +} + +uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private; + + uint8 is_svh = 0; + uint32 current_sc = parser->current_sc; + is_svh = parser->cur_sc_prefix ? false : true; + + if (!is_svh) + { + // remove prefix from current_sc + current_sc &= 0x0FF; + switch (current_sc) + { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + VTRACE ("Visual Object Sequence is parsed.\n"); + query_data->codec_data.profile_and_level_indication + = parser->info.profile_and_level_indication; + VTRACE ("profile_and_level_indication = 0x%x\n", parser->info.profile_and_level_indication); + break; + + case MP4_SC_VIDEO_OBJECT_PLANE: + //VTRACE ("Video Object Plane is parsed.\n"); + vbp_on_vop_mp42(pcontext, list_index); + break; + + default: + if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && + (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) + { + VTRACE ("Video Object Layer is parsed\n"); + parser_private->short_video_header = FALSE; + vbp_fill_codec_data(pcontext); + } + else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX && + current_sc >= MP4_SC_VIDEO_OBJECT_MIN) + { + if (parser->sc_seen == MP4_SC_SEEN_SVH) + { + // this should never happen!!!! + WTRACE ("Short video header is parsed.\n"); + // vbp_on_vop_svh_mp42(pcontext, list_index); + return VBP_TYPE; + } + } + break; + } + } + else + { + if (parser->sc_seen == MP4_SC_SEEN_SVH) + { + //VTRACE ("Short video header is parsed.\n"); + vbp_on_vop_svh_mp42(pcontext, list_index); + } + } + + return VBP_OK; +} + + + +/* +* partial frame handling: +* +* h.263: picture header is lost if the first GOB is discarded, a redudant pic header must be +* conveyed in the packet (RFC 4629) for each following GOB, otherwise, +* picture can't be decoded. +* +* MPEG4: VideoObjectPlane header is lost if the first slice is discarded. However, picture +* is still decodable as long as the header_extension_code is 1 in video_packet_header. +* +*MPEG-4 with short header: video_plane_with_short_header is lost if the first GOB +* is discarded. As this header is not duplicated (RFC 3016), picture is not decodable. +* +* In sum: +* If buffer contains the 32-bit start code (0x000001xx), proceed as normal. +* +* If buffer contains 22-bits of "0000 0000 0000 0000 1000 00", which indicates h.263 +* picture start code or short_video_start_marker, proceed as normal. +* +* If buffer contains 22-bits of "0000 0000 0000 0000 1XXX XX", (when XXX XX starts from 000 01), which +* indicates h.263 Group Start code or gob_resync_marker of gob_layer in MPEG-4 with +* short header, we should report packet as a partial frame - no more parsing is needed. +* +* If buffer contains a string of 0 between 16 bits and 22 bits, followed by 1-bit of '1', which indicates a resync-marker, +* the buffer will be immeidately parsed and num_items is set to 0. +*/ +uint32 vbp_parse_start_code_mp42(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint8 *buf = NULL; + uint32 size = 0; + uint32 sc_end_pos = -1; + uint32 bytes_parsed = 0; + viddec_mp4_parser_t *pinfo = NULL; + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private; + + + // reset query data for the new sample buffer + query_data->number_picture_data= 0; + query_data->number_pictures = 0; + + // emulation prevention byte is not needed + cxt->getbits.is_emul_reqd = 0; + + cxt->list.num_items = 0; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + + buf = cxt->parse_cubby.buf; + size = cxt->parse_cubby.size; + + pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]); + + uint8 is_normal_sc = 0; + uint8 resync_marker = 0; + uint32 found_sc = 0; + uint32 ret = VBP_OK; + + while (1) + { + found_sc = vbp_get_sc_pos_mp42( + buf + bytes_parsed, + size - bytes_parsed, + &sc_end_pos, + &is_normal_sc, + &resync_marker, + parser_private->short_video_header); + + if (found_sc) + { + cxt->list.data[cxt->list.num_items].stpos = bytes_parsed + sc_end_pos - 3; + if (cxt->list.num_items != 0) + { + cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed + sc_end_pos - 3; + } + bytes_parsed += sc_end_pos; + + cxt->list.num_items++; + pinfo->cur_sc_prefix = is_normal_sc; + } + else + { + if (cxt->list.num_items != 0) + { + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + else + { + WTRACE ("No start-code is found in cubby buffer! The size of cubby is %d\n", size); + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + + if (resync_marker) + { + // either the first slice (GOB) is lost or parser receives a single slice (GOB) + if (parser_private->short_video_header) + { + // TODO: revisit if HW supportd GOB layer decoding for h.263 + WTRACE("Partial frame: GOB buffer.\n"); + ret = VBP_PARTIAL; + } + else + { + WTRACE("Partial frame: video packet header buffer.\n"); + ret = vbp_process_video_packet_mp42(pcontext); + } + + // set num_items to 0 so buffer will not be parsed again + cxt->list.num_items = 0; + } + else + { + ETRACE("Invalid data received.\n"); + cxt->list.num_items = 0; + return VBP_DATA; + } + + break; + } + } + } + + return ret; +} + +uint32 vbp_populate_query_data_mp42(vbp_context *pcontext) +{ +#if 0 + vbp_dump_query_data(pcontext); +#endif + return VBP_OK; +} + +vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data) +{ + vbp_picture_data_mp42 *picture_data = query_data->picture_data; + int num_pictures = query_data->number_picture_data; + while (num_pictures > 1) + { + picture_data = picture_data->next_picture_data; + num_pictures--; + } + + return picture_data; +} + +void vbp_fill_codec_data(vbp_context *pcontext) +{ + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + vbp_codec_data_mp42* codec_data = &(query_data->codec_data); + vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private; + + codec_data->bit_rate = parser->info.VisualObject.VideoObject.VOLControlParameters.bit_rate; + + codec_data->profile_and_level_indication + = parser->info.profile_and_level_indication; + + codec_data->video_object_layer_width = + parser->info.VisualObject.VideoObject.video_object_layer_width; + + codec_data->video_object_layer_height = + parser->info.VisualObject.VideoObject.video_object_layer_height; + + if (parser->info.VisualObject.VideoSignalType.is_video_signal_type) + { + codec_data->video_format = + parser->info.VisualObject.VideoSignalType.video_format; + } + else + { + // Unspecified video format + codec_data->video_format = 5; + } + + codec_data->video_range = + parser->info.VisualObject.VideoSignalType.video_range; + + if (parser->info.VisualObject.VideoSignalType.is_colour_description) + { + codec_data->matrix_coefficients = + parser->info.VisualObject.VideoSignalType.matrix_coefficients; + } + else if (parser_private->short_video_header) + { + // SMPTE 170M + codec_data->matrix_coefficients = 6; + } + else + { + // ITU-R Recommendation BT.709 + codec_data->matrix_coefficients = 1; + } + + codec_data->short_video_header = parser_private->short_video_header; + + // aspect ratio + codec_data->aspect_ratio_info = parser->info.VisualObject.VideoObject.aspect_ratio_info; + if (codec_data->aspect_ratio_info < 6) + { + codec_data->par_width = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][0]; + codec_data->par_height = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][1]; + } + else if (codec_data->aspect_ratio_info == 15) + { + codec_data->par_width = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_width; + codec_data->par_height = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_height; + } + else + { + codec_data->par_width = 0; + codec_data->par_height = 0; + } +} + +void vbp_fill_slice_data(vbp_context *pcontext, int list_index) +{ + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + + if (!parser->info.VisualObject.VideoObject.short_video_header) + { + vbp_process_slices_mp42(pcontext, list_index); + } + else + { + vbp_process_slices_svh_mp42(pcontext, list_index); + } +} + +void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) +{ + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + vbp_picture_data_mp42 *picture_data = NULL; + VAPictureParameterBufferMPEG4 *picture_param = NULL; + + if (new_picture_flag) + { + query_data->number_pictures++; + } + + picture_data = query_data->picture_data; + if (picture_data == NULL || query_data->number_picture_data == 0) + { + // first entry + if (picture_data == NULL) + { + picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1); + query_data->picture_data = picture_data; + if (picture_data == NULL) { + query_data->number_picture_data = 0; + return; + } + } + query_data->number_picture_data = 1; + } + else + { + // find the last active one + int i = query_data->number_picture_data; + while (i > 1) + { + picture_data = picture_data->next_picture_data; + i--; + } + if (picture_data->next_picture_data == NULL) + { + picture_data->next_picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1); + if (picture_data->next_picture_data == NULL) { + return; + } + } + + query_data->number_picture_data++; + + picture_data = picture_data->next_picture_data; + } + + picture_param = &(picture_data->picture_param); + + uint8 idx = 0; + + picture_data->new_picture_flag = new_picture_flag; + + picture_data->vop_coded + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded; + + + + picture_data->vop_time_increment = + parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment; + + // fill picture_param + + + /* + * NOTE: for short video header, the parser saves vop_width and vop_height + * to VOL->video_object_layer_width and VOL->video_object_layer_height + */ + picture_param->vop_width + = parser->info.VisualObject.VideoObject.video_object_layer_width; + picture_param->vop_height + = parser->info.VisualObject.VideoObject.video_object_layer_height; + + picture_param->forward_reference_picture = VA_INVALID_SURFACE; + picture_param->backward_reference_picture = VA_INVALID_SURFACE; + + // Fill VAPictureParameterBufferMPEG4::vol_fields + + picture_param->vol_fields.bits.short_video_header + = parser->info.VisualObject.VideoObject.short_video_header; + picture_param->vol_fields.bits.chroma_format + = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format; + + // TODO: find out why testsuite always set this value to be 0 + picture_param->vol_fields.bits.chroma_format = 0; + + picture_param->vol_fields.bits.interlaced + = parser->info.VisualObject.VideoObject.interlaced; + picture_param->vol_fields.bits.obmc_disable + = parser->info.VisualObject.VideoObject.obmc_disable; + picture_param->vol_fields.bits.sprite_enable + = parser->info.VisualObject.VideoObject.sprite_enable; + picture_param->vol_fields.bits.sprite_warping_accuracy + = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy; + picture_param->vol_fields.bits.quant_type + = parser->info.VisualObject.VideoObject.quant_type; + picture_param->vol_fields.bits.quarter_sample + = parser->info.VisualObject.VideoObject.quarter_sample; + picture_param->vol_fields.bits.data_partitioned + = parser->info.VisualObject.VideoObject.data_partitioned; + picture_param->vol_fields.bits.reversible_vlc + = parser->info.VisualObject.VideoObject.reversible_vlc; + picture_param->vol_fields.bits.resync_marker_disable + = parser->info.VisualObject.VideoObject.resync_marker_disable; + picture_param->no_of_sprite_warping_points + = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points; + + for (idx = 0; idx < 3; idx++) + { + picture_param->sprite_trajectory_du[idx] + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx]; + picture_param->sprite_trajectory_dv[idx] + = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx]; + } + + picture_param->quant_precision + = parser->info.VisualObject.VideoObject.quant_precision; + + // fill VAPictureParameterBufferMPEG4::vop_fields + + + if (!parser->info.VisualObject.VideoObject.short_video_header) + { + picture_param->vop_fields.bits.vop_coding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type; + } + else + { + picture_param->vop_fields.bits.vop_coding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type; + } + + // TODO: fill picture_param->vop_fields.bits.backward_reference_vop_coding_type + // This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7 + + if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) + { + picture_param->vop_fields.bits.backward_reference_vop_coding_type + = picture_param->vop_fields.bits.vop_coding_type; + } + + picture_param->vop_fields.bits.vop_rounding_type + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type; + picture_param->vop_fields.bits.intra_dc_vlc_thr + = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr; + picture_param->vop_fields.bits.top_field_first + = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first; + picture_param->vop_fields.bits.alternate_vertical_scan_flag + = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag; + + picture_param->vop_fcode_forward + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward; + picture_param->vop_fcode_backward + = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward; + picture_param->vop_time_increment_resolution + = parser->info.VisualObject.VideoObject.vop_time_increment_resolution; + + // short header related + picture_param->num_gobs_in_vop + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop; + picture_param->num_macroblocks_in_gob + = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob; + + // for direct mode prediction + picture_param->TRB = parser->info.VisualObject.VideoObject.TRB; + picture_param->TRD = parser->info.VisualObject.VideoObject.TRD; +} + +void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) +{ + viddec_mp4_parser_t *parser = + (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]); + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + + mp4_VOLQuant_mat_t *quant_mat_info = + &(parser->info.VisualObject.VideoObject.quant_mat_info); + + VAIQMatrixBufferMPEG4 *iq_matrix = NULL; + + iq_matrix = &(query_data->iq_matrix_buffer); + + iq_matrix->load_intra_quant_mat = 1; //quant_mat_info->load_intra_quant_mat; + iq_matrix->load_non_intra_quant_mat = 1; // = quant_mat_info->load_nonintra_quant_mat; + memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64); + memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64); +} + + +void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) +{ + vbp_fill_codec_data(pcontext); + vbp_fill_picture_param(pcontext, 1); + vbp_fill_iq_matrix_buffer(pcontext); + vbp_fill_slice_data(pcontext, list_index); +} + +void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index) +{ + vbp_fill_codec_data(pcontext); + vbp_fill_picture_param(pcontext, 1); + vbp_fill_iq_matrix_buffer(pcontext); + vbp_fill_slice_data(pcontext, list_index); +} + +uint32 vbp_get_sc_pos_mp42( + uint8 *buf, + uint32 length, + uint32 *sc_end_pos, + uint8 *is_normal_sc, + uint8 *resync_marker, + const bool svh_search) +{ + uint8 *ptr = buf; + uint32 size; + uint32 data_left = 0, phase = 0, ret = 0; + size = 0; + + data_left = length; + *sc_end_pos = -1; + + /* parse until there is more data and start code not found */ + while ((data_left > 0) && (phase < 3)) + { + /* Check if we are byte aligned & phase=0, if thats the case we can check + work at a time instead of byte*/ + if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) + { + while (data_left > 3) + { + uint32 data; + char mask1 = 0, mask2 = 0; + + data = *((uint32 *) ptr); +#ifndef MFDBIGENDIAN + data = SWAP_WORD(data); +#endif + mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); + mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); + /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need + two consecutive zero bytes for a start code pattern */ + if (mask1 && mask2) + { + /* Success so skip 4 bytes and start over */ + ptr += 4; + size += 4; + data_left -= 4; + continue; + } + else + { + break; + } + } + } + + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected + two zero bytes in the word so we look one byte at a time*/ + if (data_left > 0) + { + if (*ptr == FIRST_STARTCODE_BYTE) + { + /* Phase can be 3 only if third start code byte is found */ + phase++; + ptr++; + size++; + data_left--; + if (phase > 2) + { + phase = 2; + + if ((((uint32) ptr) & 0x3) == 0) + { + while (data_left > 3) + { + if (*((uint32 *) ptr) != 0) + { + break; + } + ptr += 4; + size += 4; + data_left -= 4; + } + } + } + } + else + { + uint8 normal_sc = 0, short_sc = 0; + if (phase == 2) + { + normal_sc = (*ptr == THIRD_STARTCODE_BYTE); + if (svh_search) + { + short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC)); + } + *is_normal_sc = normal_sc; + + // at least 16-bit 0, may be GOB start code or + // resync marker. + *resync_marker = 1; + } + + if (!(normal_sc | short_sc)) + { + phase = 0; + } + else + { + /* Match for start code so update context with byte position */ + *sc_end_pos = size; + phase = 3; + } + ptr++; + size++; + data_left--; + } + } + } + if ((data_left > 0) && (phase == 3)) + { + (*sc_end_pos)++; + phase++; + ret = 1; + } + + // Return 1 only if phase is 4, else always return 0 + return ret; +} + + +uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs) +{ + uint32 length = 0; + numOfMbs--; + do + { + numOfMbs >>= 1; + length++; + } + while (numOfMbs); + return length; +} + +uint32 vbp_parse_video_packet_header_mp42( + void *parent, + viddec_mp4_parser_t *parser_cxt, + uint16_t *quant_scale, + uint32 *macroblock_number) +{ + uint32 ret = VBP_DATA; + mp4_Info_t *pInfo = &(parser_cxt->info); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vidObjPlane = + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + uint32 code = 0; + int32_t getbits = 0; + + uint16_t _quant_scale = 0; + uint32 _macroblock_number = 0; + uint32 header_extension_codes = 0; + uint8 vop_coding_type = vidObjPlane->vop_coding_type; + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + { + return VBP_DATA; + } + + do + { + // get macroblock_number + uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4; + uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4; + uint32 length = vbp_macroblock_number_length_mp42(mbs_x * mbs_y); + + getbits = viddec_pm_get_bits(parent, &code, length); + BREAK_GETBITS_FAIL(getbits, ret); + + _macroblock_number = code; + + // quant_scale + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision); + BREAK_GETBITS_FAIL(getbits, ret); + _quant_scale = code; + } + + // header_extension_codes + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + header_extension_codes = code; + } + + if (header_extension_codes) + { + // modulo time base + do + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + } while (code); + + // marker_bit + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + + // vop_time_increment + uint32 numbits = 0; + numbits = vidObjLay->vop_time_increment_resolution_bits; + if (numbits == 0) + { + // ?? + numbits = 1; + } + getbits = viddec_pm_get_bits(parent, &code, numbits); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_time_increment = code; + + + // marker_bit + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + + // vop_coding_type + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_FAIL(getbits, ret); + + vop_coding_type = code & 0x3; + vidObjPlane->vop_coding_type = vop_coding_type; + + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + // intra_dc_vlc_thr + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + + vidObjPlane->intra_dc_vlc_thr = code; + if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && + (vop_coding_type == MP4_VOP_TYPE_S) && + (vidObjLay->sprite_info.no_of_sprite_warping_points> 0)) + { + if (vbp_sprite_trajectory_mp42(parent, vidObjLay, vidObjPlane) != VBP_OK) + { + break; + } + } + + if (vidObjLay->reduced_resolution_vop_enable && + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && + ((vop_coding_type == MP4_VOP_TYPE_I) || + (vop_coding_type == MP4_VOP_TYPE_P))) + { + // vop_reduced_resolution + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + } + + if (vop_coding_type != MP4_VOP_TYPE_I) + { + // vop_fcode_forward + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_fcode_forward = code; + } + + if (vop_coding_type == MP4_VOP_TYPE_B) + { + // vop_fcode_backward + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjPlane->vop_fcode_backward = code; + } + } + } + + if (vidObjLay->newpred_enable) + { + // New pred mode not supported in HW, but, does libva support this? + ret = VBP_DATA; + break; + } + + *quant_scale = _quant_scale; + *macroblock_number = _macroblock_number; + + ret = VBP_OK; + } + while (0); + return ret; +} + +uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt) +{ + mp4_Info_t *pInfo = &(parser_cxt->info); + mp4_VideoObjectPlane_t *vidObjPlane = + &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + uint32 resync_marker_length = 0; + if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) + { + resync_marker_length = 17; + } + else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) + { + uint8 fcode_max = vidObjPlane->vop_fcode_forward; + if (fcode_max < vidObjPlane->vop_fcode_backward) + { + fcode_max = vidObjPlane->vop_fcode_backward; + } + resync_marker_length = 16 + fcode_max; + + // resync_marker is max(15+fcode,17) zeros followed by a one + if (resync_marker_length < 18) + resync_marker_length = 18; + } + else + { + resync_marker_length = 16 + vidObjPlane->vop_fcode_forward; + } + return resync_marker_length; +} + +uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index) +{ + uint32 ret = VBP_OK; + + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = + (viddec_mp4_parser_t *) &(parent->codec_data[0]); + + vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data); + vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data); + VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param); + + uint8 is_emul = 0; + uint32 bit_offset = 0; + uint32 byte_offset = 0; + + // The offsets are relative to parent->parse_cubby.buf + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + slice_data->buffer_addr = parent->parse_cubby.buf; + + slice_data->slice_offset = + byte_offset + parent->list.data[list_index].stpos; + slice_data->slice_size = + parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset; + + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = 0; + slice_param->quant_scale + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant; + + return ret; +} +#define SEARCH_SYNC_OPT +uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); + + vbp_picture_data_mp42 *picture_data = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + VASliceParameterBufferMPEG4* slice_param = NULL; + + uint32 ret = VBP_OK; + + uint8 is_emul = 0; + uint32 bit_offset = 0; + uint32 byte_offset = 0; + + uint32 code = 0; + int32_t getbits = 0; + uint32 resync_marker_length = 0; + + /* The offsets are relative to parent->parse_cubby.buf */ + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); + slice_param = &(slice_data->slice_param); + + slice_data->buffer_addr = parent->parse_cubby.buf; + + slice_data->slice_offset = byte_offset + parent->list.data[list_index].stpos; + slice_data->slice_size = + parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset; + + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = 0; + slice_param->quant_scale + = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant; + + if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) + { + // no resync_marker + return VBP_OK; + } + + // scan for resync_marker + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + if (bit_offset) + { + // byte-aligned + getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); + if (getbits == -1) + { + return VBP_DATA; + } + } + + // get resync_marker_length + resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt); + + uint16_t quant_scale = 0; + uint32 macroblock_number = 0; + + while (1) + { +#ifndef SEARCH_SYNC_OPT + getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length); + + // return VBP_OK as resync_marker may not be present + BREAK_GETBITS_FAIL(getbits, ret); + + if (code != 1) + { + getbits = viddec_pm_get_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); + continue; + } +#else + + // read 3 bytes since resync_marker_length is between 17 bits and 23 bits + if (parent->getbits.bstrm_buf.buf_index + 3 > parent->getbits.bstrm_buf.buf_end) + { + break; + } + + code = parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index] << 16 | + parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+1] << 8 | + parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+2]; + + if (code >> (24-resync_marker_length) != 1) + { + int byte0 = code & 0xff; + int byte1 = (code >> 8) & 0xff; + if (byte0 != 0) + { + parent->getbits.bstrm_buf.buf_index += 3; + } + else if (byte1 != 0) + { + parent->getbits.bstrm_buf.buf_index += 2; + } + else + { + parent->getbits.bstrm_buf.buf_index += 1; + } + continue; + } +#endif + // We found resync_marker + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + // update slice data as we found resync_marker + slice_data->slice_size -= + (parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset); + slice_param->slice_data_size = slice_data->slice_size; + + // skip resync marker + getbits = viddec_pm_get_bits(parent, &code, resync_marker_length); + + // return VBP_DATA, this should never happen! + BREAK_GETBITS_FAIL(getbits, ret); + + // parse video_packet_header + ret = vbp_parse_video_packet_header_mp42(parent, parser_cxt, + &quant_scale, ¯oblock_number); + + if (ret != VBP_OK) + { + ETRACE("Failed to parse video packet header.\n"); + return ret; + } + + // new_picture_flag = 0, this is not the first slice of a picture + vbp_fill_picture_param(pcontext, 0); + + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); + slice_param = &(slice_data->slice_param); + + + viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul); + + slice_data->buffer_addr = parent->parse_cubby.buf; + + slice_data->slice_offset = + byte_offset + parent->list.data[list_index].stpos; + slice_data->slice_size = + parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset; + + slice_param->slice_data_size = slice_data->slice_size; + slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_param->slice_data_offset = 0; + slice_param->macroblock_offset = bit_offset; + slice_param->macroblock_number = macroblock_number; + slice_param->quant_scale = quant_scale; + + if (bit_offset) + { + // byte-align parsing position + getbits = viddec_pm_skip_bits(parent, 8 - bit_offset); + if (getbits == -1) + { + ETRACE("Failed to align parser to byte position.\n"); + return VBP_DATA; + } + } + + } + + return VBP_OK; +} + +uint32 vbp_process_video_packet_mp42(vbp_context *pcontext) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + viddec_pm_cxt_t *parent = pcontext->parser_cxt; + viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]); + uint32 code = 0; + int32_t getbits = 0; + + uint32 ret = VBP_DATA; + + + // setup bitstream parser + parent->getbits.list = &(parent->list); + + parent->getbits.bstrm_buf.buf = parent->parse_cubby.buf; + parent->getbits.bstrm_buf.buf_index = 0; + parent->getbits.bstrm_buf.buf_st = 0; + parent->getbits.bstrm_buf.buf_end = parent->parse_cubby.size; + parent->getbits.bstrm_buf.buf_bitoff = 0; + + parent->getbits.au_pos = 0; + parent->getbits.list_off = 0; + parent->getbits.phase = 0; + parent->getbits.emulation_byte_counter = 0; + + parent->list.start_offset = 0; + parent->list.end_offset = parent->parse_cubby.size; + parent->list.total_bytes = parent->parse_cubby.size; + + + // skip leading zero-byte + while (code == 0) + { + getbits = viddec_pm_get_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); + getbits = viddec_pm_peek_bits(parent, &code, 8); + BREAK_GETBITS_FAIL(getbits, ret); + } + + if (getbits != 0) + { + return VBP_DATA; + } + + // resync-marker is represented as 17-23 bits. (16-22 bits of 0) + // as 16-bit '0' has been skipped, we try to parse buffer bit by bit + // until bit 1 is encounted or up to 7 bits are parsed. + code = 0; + uint8 count = 0; + while (code == 0 && count < 7) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + count++; + } + + if (code == 0 || getbits != 0) + { + ETRACE("no resync-marker in the buffer.\n"); + return ret; + } + + // resync marker is skipped + uint16_t quant_scale = 0; + uint32 macroblock_number = 0; + + // parse video_packet_header + vbp_parse_video_packet_header_mp42(parent, parser_cxt, &quant_scale, ¯oblock_number); + + // new_picture_flag = 0, this is not the first slice of a picture + vbp_fill_picture_param(pcontext, 0); + + vbp_picture_data_mp42 *picture_data = NULL; + vbp_slice_data_mp42 *slice_data = NULL; + VASliceParameterBufferMPEG4* slice_param = NULL; + + picture_data = vbp_get_mp42_picture_data(query_data); + slice_data = &(picture_data->slice_data); + slice_param = &(slice_data->slice_param); + + ret = vbp_process_slices_mp42(pcontext, 0); + + // update slice's QP and macro_block number as it is set to 0 by default. + slice_param->macroblock_number = macroblock_number; + slice_param->quant_scale = quant_scale; + + // VOP must be coded! + picture_data->vop_coded = 1; + return ret; + +} + + +static inline uint32 vbp_sprite_dmv_length_mp42( + void * parent, + int32_t *dmv_length) +{ + uint32 code, skip; + int32_t getbits = 0; + uint32 ret = VBP_DATA; + *dmv_length = 0; + skip = 3; + do + { + getbits = viddec_pm_peek_bits(parent, &code, skip); + BREAK_GETBITS_FAIL(getbits, ret); + + if (code == 7) + { + viddec_pm_skip_bits(parent, skip); + getbits = viddec_pm_peek_bits(parent, &code, 9); + BREAK_GETBITS_FAIL(getbits, ret); + + skip = 1; + while ((code & 256) != 0) + { + // count number of 1 bits + code <<= 1; + skip++; + } + *dmv_length = 5 + skip; + } + else + { + skip = (code <= 1) ? 2 : 3; + *dmv_length = code - 1; + } + viddec_pm_skip_bits(parent, skip); + ret = VBP_OK; + + } + while (0); + return ret; +} + + +static inline uint32 vbp_sprite_trajectory_mp42( + void *parent, + mp4_VideoObjectLayer_t *vidObjLay, + mp4_VideoObjectPlane_t *vidObjPlane) +{ + uint32 code, i; + int32_t dmv_length = 0, dmv_code = 0, getbits = 0; + uint32 ret = VBP_OK; + for (i = 0; i < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) + { + ret = VBP_DATA; + ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); + if (ret != VBP_OK) + { + break; + } + if (dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); + BREAK_GETBITS_FAIL(getbits, ret); + dmv_code = (int32_t) code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + if (code != 1) + { + ret = VBP_DATA; + break; + } + vidObjPlane->warping_mv_code_du[i] = dmv_code; + // TODO: create another inline function to avoid code duplication + ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length); + if (ret != VBP_OK) + { + break; + } + // reset return value in case early break + ret = VBP_DATA; + if (dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length); + BREAK_GETBITS_FAIL(getbits, ret); + dmv_code = (int32_t) code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_FAIL(getbits, ret); + if (code != 1) + { + break; + } + vidObjPlane->warping_mv_code_dv[i] = dmv_code; + + // set to VBP_OK + ret = VBP_OK; + + } + return ret; +} + + +/* + * free memory of vbp_data_mp42 structure and its members + */ +uint32 vbp_free_query_data_mp42(vbp_context *pcontext) +{ + vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data; + vbp_picture_data_mp42* current = NULL; + vbp_picture_data_mp42* next = NULL; + + if (pcontext->parser_private) + { + free(pcontext->parser_private); + pcontext->parser_private = NULL; + } + if (query_data) + { + current = query_data->picture_data; + while (current != NULL) + { + next = current->next_picture_data; + free(current); + current = next; + } + + free(query_data); + } + + pcontext->query_data = NULL; + return VBP_OK; +} + +/* + * Allocate memory for vbp_data_mp42 structure and all its members. + */ +uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) +{ + vbp_data_mp42 *query_data; + pcontext->query_data = NULL; + + query_data = vbp_malloc_set0(vbp_data_mp42, 1); + if (query_data == NULL) + { + goto cleanup; + } + + pcontext->query_data = (void *) query_data; + query_data->picture_data = NULL; + query_data->number_picture_data = 0; + query_data->number_pictures = 0; + + pcontext->parser_private = NULL; + vbp_mp42_parser_private *parser_private = NULL; + + parser_private = vbp_malloc_set0(vbp_mp42_parser_private, 1); + if (NULL == parser_private) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->parser_private = (void *)parser_private; + + /* init the pointer */ + parser_private->short_video_header = TRUE; + return VBP_OK; + +cleanup: + + vbp_free_query_data_mp42(pcontext); + + return VBP_MEM; +} diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.h b/mixvbp/vbp_manager/vbp_mp42_parser.h new file mode 100755 index 0000000..93416b7 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_mp42_parser.h @@ -0,0 +1,66 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_MP42_PARSER_H +#define VBP_MP42_PARSER_H + +/* + * setup parser's entry points + */ + +uint32 vbp_init_parser_entries_mp42(vbp_context *pcontext); + + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_mp42(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_mp42(vbp_context *pcontext); + +/* + * parse start code. + */ +uint32 vbp_parse_start_code_mp42(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_mp42(vbp_context *pcontext); + +#endif /*VBP_MP42_PARSER_H*/ diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c new file mode 100755 index 0000000..72548f0 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -0,0 +1,618 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#include + +#include "vc1.h" +#include "h264.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_vc1_parser.h" +#include "vbp_h264_parser.h" +#include "vbp_mp42_parser.h" +#ifdef USE_HW_VP8 +#include "vbp_vp8_parser.h" +#endif +#ifdef USE_AVC_SHORT_FORMAT +#include "vbp_h264secure_parser.h" +#endif + + +/* buffer counter */ +uint32 buffer_counter = 0; + + +void* vbp_try_malloc0(uint32 size) { + void* pMem = malloc(size); + if (pMem) + memset(pMem, 0, size); + return pMem; +} + +/** + * + * uninitialize parser context + * + */ +static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext) +{ + uint32 error = VBP_OK; + + if (NULL == pcontext) + { + return error; + } + + /* not need to reset parser entry points. */ + + free(pcontext->parser_ops); + pcontext->parser_ops = NULL; + + + if (pcontext->fd_parser) + { + dlclose(pcontext->fd_parser); + pcontext->fd_parser = NULL; + } + + return error; +} + +/** + * + * initialize parser context + * + */ +static uint32 vbp_utils_initialize_context(vbp_context *pcontext) +{ + uint32 error = VBP_OK; + char *parser_name; + + switch (pcontext->parser_type) + { + case VBP_VC1: +#ifndef ANDROID + parser_name = "libmixvbp_vc1.so.0"; +#else + parser_name = "libmixvbp_vc1.so"; +#endif + break; + + /* MPEG-2 parser is not supported. */ + + /* case VBP_MPEG2: + parser_name = "libmixvbp_mpeg2.so.0"; + break;*/ + + case VBP_MPEG4: +#ifndef ANDROID + parser_name = "libmixvbp_mpeg4.so.0"; +#else + parser_name = "libmixvbp_mpeg4.so"; +#endif + break; + + case VBP_H264: +#ifndef ANDROID + parser_name = "libmixvbp_h264.so.0"; +#else + parser_name = "libmixvbp_h264.so"; +#endif + break; +#ifdef USE_HW_VP8 + case VBP_VP8: +#ifndef ANDROID + parser_name = "libmixvbp_vp8.so.0"; +#else + parser_name = "libmixvbp_vp8.so"; +#endif + break; +#endif + +#ifdef USE_AVC_SHORT_FORMAT + case VBP_H264SECURE: + parser_name = "libmixvbp_h264secure.so"; + break; +#endif + + default: + WTRACE("Unsupported parser type!"); + return VBP_TYPE; + } + + pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY); + if (NULL == pcontext->fd_parser) + { + ETRACE("Failed to load parser %s.", parser_name); + error = VBP_LOAD; + goto cleanup; + } + + pcontext->parser_ops = vbp_malloc(viddec_parser_ops_t, 1); + if (NULL == pcontext->parser_ops) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + +#define SET_FUNC_POINTER(X, Y)\ + case X:\ + pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\ + pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\ + pcontext->func_free_query_data = vbp_free_query_data_##Y;\ + pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\ + pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\ + pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\ + pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\ + break; + + switch (pcontext->parser_type) + { + SET_FUNC_POINTER(VBP_VC1, vc1); + SET_FUNC_POINTER(VBP_MPEG4, mp42); + SET_FUNC_POINTER(VBP_H264, h264); +#ifdef USE_HW_VP8 + SET_FUNC_POINTER(VBP_VP8, vp8); +#endif +#ifdef USE_AVC_SHORT_FORMAT + SET_FUNC_POINTER(VBP_H264SECURE, h264secure); +#endif + } +#ifdef USE_AVC_SHORT_FORMAT + if (pcontext->parser_type == VBP_H264SECURE) { + pcontext->func_update_data = vbp_update_data_h264secure; + } +#endif + + /* set entry points for parser operations: + init + parse_sc + parse_syntax + get_cxt_size + is_wkld_done + is_frame_start + */ + error = pcontext->func_init_parser_entries(pcontext); + +cleanup: + + if (VBP_OK != error) + { + /* no need to log error. the loader would have done so already. */ + vbp_utils_uninitialize_context(pcontext); + } + + return error; +} + +/** +* +* free allocated memory. +* +*/ +static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext) +{ + if (NULL == pcontext) + { + return VBP_OK; + } + + if (pcontext->func_free_query_data) + { + pcontext->func_free_query_data(pcontext); + } + + free(pcontext->workload2); + pcontext->workload2 = NULL; + + free(pcontext->workload1); + pcontext->workload1 = NULL; + + free(pcontext->persist_mem); + pcontext->persist_mem = NULL; + + free(pcontext->parser_cxt); + pcontext->parser_cxt = NULL; + + return VBP_OK; +} + + +/** + * + * allocate memory + * + */ +static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) +{ + /* pcontext is guaranteed to be valid input. */ + uint32 error = VBP_OK; + viddec_parser_memory_sizes_t sizes; + + pcontext->parser_cxt = vbp_malloc(viddec_pm_cxt_t, 1); + if (NULL == pcontext->parser_cxt) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* invoke parser entry to get context size */ + /* no return value, should always succeed. */ + pcontext->parser_ops->get_cxt_size(&sizes); + + /* allocate persistent memory for parser */ + if (sizes.persist_size) + { + pcontext->persist_mem = malloc(sizes.persist_size); + if (NULL == pcontext->persist_mem) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + } + else + { + /* OK for VC-1, MPEG2 and MPEG4. */ + if ((VBP_VC1 == pcontext->parser_type) || + (VBP_MPEG2 == pcontext->parser_type) || + (VBP_MPEG4 == pcontext->parser_type) +#ifdef USE_HW_VP8 + || (VBP_VP8 == pcontext->parser_type) +#endif +) + { + pcontext->persist_mem = NULL; + } + else + { + /* mandatory for H.264 */ + ETRACE("Failed to allocate memory"); + error = VBP_TYPE; + goto cleanup; + } + } + + /* allocate a new workload with 1000 items. */ + pcontext->workload1 = malloc(sizeof(viddec_workload_t) + + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); + if (NULL == pcontext->workload1) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* allocate a second workload with 1000 items. */ + pcontext->workload2 = malloc(sizeof(viddec_workload_t) + + (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); + if (NULL == pcontext->workload2) + { + ETRACE("Failed to allocate memory"); + error = VBP_MEM; + goto cleanup; + } + + /* allocate format-specific query data */ + error = pcontext->func_allocate_query_data(pcontext); + +cleanup: + if (error != VBP_OK) + { + vbp_utils_free_parser_memory(pcontext); + } + return error; +} + + + +/** + * + * parse the elementary sample buffer or codec configuration data + * + */ +static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + uint32 error = VBP_OK; + int i; + + /* reset list number. func_parse_init_data or func_parse_start_code will + * set it equal to number of sequence headers, picture headers or slices headers + * found in the sample buffer + */ + cxt->list.num_items = 0; + + /** + * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1 + * for H.264 and MPEG-4, VC1 advanced profile and set to 0 + * for VC1 simple or main profile when parsing the frame + * buffer. When parsing the sequence header, it must be set to 1 + * always. + * + * PARSER IMPLEMENTOR: set this flag in the parser. + */ + + /* + if ((codec_type == VBP_H264) || (codec_type == VBP_MPEG4)) + { + cxt->getbits.is_emul_reqd = 1; + } + */ + + + /* populate the list.*/ + if (init_data_flag) + { + error = pcontext->func_parse_init_data(pcontext); + } + else + { + error = pcontext->func_parse_start_code(pcontext); + } + + if (VBP_OK != error) + { + ETRACE("Failed to parse the start code!"); + return error; + } + + /* set up bitstream buffer */ + cxt->getbits.list = &(cxt->list); + + /* setup buffer pointer */ + cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf; + + // TODO: check if cxt->getbits.is_emul_reqd is set properly + + for (i = 0; i < cxt->list.num_items; i++) + { + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = cxt->list.data[i].stpos; + cxt->list.end_offset = cxt->list.data[i].edpos; + cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos; + + /* invoke parse entry point to parse the buffer */ + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + + /* can't return error for now. Neet further investigation */ +#if 0 + if (0 != error) + { + ETRACE("failed to parse the syntax: %d!", error); + return error; + } +#endif + + /* process parsing result */ + error = pcontext->func_process_parsing_result(pcontext, i); + + if (VBP_MULTI == error) { + return VBP_OK; + } + else if (0 != error) + { + ETRACE("Failed to process parsing result."); + return error; + } + } + + return VBP_OK; +} + + +/** + * + * create the parser context + * + */ +uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) +{ + uint32 error = VBP_OK; + vbp_context *pcontext = NULL; + + /* prevention from the failure */ + *ppcontext = NULL; + + pcontext = vbp_malloc_set0(vbp_context, 1); + if (NULL == pcontext) + { + error = VBP_MEM; + goto cleanup; + } + + pcontext->parser_type = parser_type; + + /* load parser, initialize parser operators and entry points */ + error = vbp_utils_initialize_context(pcontext); + if (VBP_OK != error) + { + goto cleanup; + } + + /* allocate parser context, persistent memory, query data and workload */ + error = vbp_utils_allocate_parser_memory(pcontext); + if (VBP_OK != error) + { + goto cleanup; + } + + viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0); + pcontext->parser_cxt->cur_buf.list_index = -1; + pcontext->parser_cxt->parse_cubby.phase = 0; + + /* invoke the entry point to initialize the parser. */ + pcontext->parser_ops->init( + (uint32_t *)pcontext->parser_cxt->codec_data, + (uint32_t *)pcontext->persist_mem, + FALSE); + + /* set up to find the first start code. */ + pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1; + + /* indicates initialized OK. */ + pcontext->identifier = MAGIC_NUMBER; + *ppcontext = pcontext; + error = VBP_OK; + +cleanup: + + if (VBP_OK != error) + { + vbp_utils_free_parser_memory(pcontext); + vbp_utils_uninitialize_context(pcontext); + free(pcontext); + pcontext = NULL; + } + + return error; +} + +/** + * + * destroy the context. + * + */ +uint32 vbp_utils_destroy_context(vbp_context *pcontext) +{ + /* entry point, not need to validate input parameters. */ + vbp_utils_free_parser_memory(pcontext); + vbp_utils_uninitialize_context(pcontext); + free(pcontext); + pcontext = NULL; + + return VBP_OK; +} + + +/** + * + * parse the sample buffer or parser configuration data. + * + */ +uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag) +{ + /* entry point, not need to validate input parameters. */ + + uint32 error = VBP_OK; + + //ITRACE("buffer counter: %d",buffer_counter); + + /* reset bit offset */ + pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0; + + + /* set up cubby. */ + pcontext->parser_cxt->parse_cubby.buf = data; + pcontext->parser_cxt->parse_cubby.size = size; + pcontext->parser_cxt->parse_cubby.phase = 0; + + error = vbp_utils_parse_es_buffer(pcontext, init_data_flag); + + /* rolling count of buffers. */ + if (0 == init_data_flag) + { + buffer_counter++; + } + return error; +} + +/** + * + * provide query data back to the consumer + * + */ +uint32 vbp_utils_query(vbp_context *pcontext, void **data) +{ + /* entry point, not need to validate input parameters. */ + uint32 error = VBP_OK; + + error = pcontext->func_populate_query_data(pcontext); + if (VBP_OK == error) + { + *data = pcontext->query_data; + } + else + { + *data = NULL; + } + return error; +} + +/** + * + * flush parsing buffer. Currently always succeed. + * + */ +uint32 vbp_utils_flush(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + if (ops->flush != NULL) { + ops->flush((void *)cxt, (void *)&(cxt->codec_data[0])); + } + return VBP_OK; +} + + +#ifdef USE_AVC_SHORT_FORMAT +/** + * + * provide query data back to the consumer + * + */ +uint32 vbp_utils_update(vbp_context *pcontext, void *newdata, uint32 size, void **data) +{ + /* entry point, not need to validate input parameters. */ + uint32 error = VBP_OK; + + error = pcontext->func_update_data(pcontext,newdata,size); + + if (VBP_OK == error) + { + *data = pcontext->query_data; + } + else + { + *data = NULL; + } + return error; +} +#endif diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h new file mode 100755 index 0000000..7761c26 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_utils.h @@ -0,0 +1,140 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_UTILS_H +#define VBP_UTILS_H + +#include "viddec_parser_ops.h" +#include "viddec_pm_parse.h" +#include "viddec_pm.h" +#include "vbp_trace.h" +#include + +#define MAGIC_NUMBER 0x0DEADBEEF +#define MAX_WORKLOAD_ITEMS 1000 + +/* maximum 256 slices per sample buffer */ +#define MAX_NUM_SLICES 256 + +/* maximum two pictures per sample buffer */ +#define MAX_NUM_PICTURES 2 + + +#define vbp_malloc(struct_type, n_structs) \ + ((struct_type *) malloc(sizeof(struct_type) * n_structs)) + +#define vbp_malloc_set0(struct_type, n_structs) \ + ((struct_type *) vbp_try_malloc0(sizeof(struct_type) * n_structs)) + + + +extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state); + +/* rolling counter of sample buffer */ +extern uint32 buffer_counter; + +typedef struct vbp_context_t vbp_context; + +typedef uint32 (*function_init_parser_entries)(vbp_context* cxt); +typedef uint32 (*function_allocate_query_data)(vbp_context* cxt); +typedef uint32 (*function_free_query_data)(vbp_context* cxt); +typedef uint32 (*function_parse_init_data)(vbp_context* cxt); +typedef uint32 (*function_parse_start_code)(vbp_context* cxt); +typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i); +typedef uint32 (*function_populate_query_data)(vbp_context* cxt); +#ifdef USE_AVC_SHORT_FORMAT +typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size); +#endif + +struct vbp_context_t +{ + /* magic number */ + uint32 identifier; + + /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */ + uint32 parser_type; + + /* handle to parser (shared object) */ + void *fd_parser; + + /* parser (shared object) entry points */ + viddec_parser_ops_t *parser_ops; + + /* parser context */ + viddec_pm_cxt_t *parser_cxt; + + /* work load */ + viddec_workload_t *workload1, *workload2; + + /* persistent memory for parser */ + uint32 *persist_mem; + + /* format specific query data */ + void *query_data; + + /* parser type specific data*/ + void *parser_private; + + function_init_parser_entries func_init_parser_entries; + function_allocate_query_data func_allocate_query_data; + function_free_query_data func_free_query_data; + function_parse_init_data func_parse_init_data; + function_parse_start_code func_parse_start_code; + function_process_parsing_result func_process_parsing_result; + function_populate_query_data func_populate_query_data; +#ifdef USE_AVC_SHORT_FORMAT + function_update_data func_update_data; +#endif +}; + + +void* vbp_try_malloc0(uint32 size); + +/** + * create VBP context + */ +uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext); + +/* + * destroy VBP context + */ +uint32 vbp_utils_destroy_context(vbp_context *pcontext); + +/* + * parse bitstream + */ +uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag); + +/* + * query parsing result + */ +uint32 vbp_utils_query(vbp_context *pcontext, void **data); + +/* + * flush un-parsed bitstream + */ +uint32 vbp_utils_flush(vbp_context *pcontext); + +#endif /* VBP_UTILS_H */ diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c new file mode 100755 index 0000000..4a8d6d3 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_vc1_parser.c @@ -0,0 +1,1126 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include +#include + +#include "vc1.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_vc1_parser.h" + +/* maximum number of Macroblock divided by 2, see va.h */ +#define MAX_BITPLANE_SIZE 16384 + +/* Start code prefix is 001 which is 3 bytes. */ +#define PREFIX_SIZE 3 + +static uint32 b_fraction_table[][9] = { + /* num 0 1 2 3 4 5 6 7 8 den */ + /* 0 */ { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + /* 1 */ { 0, 0, 0, 1, 3, 5, 9, 11, 17 }, + /* 2 */ { 0, 0, 0, 2, 0, 6, 0, 12, 0 }, + /* 3 */ { 0, 0, 0, 0, 4, 7, 0, 13, 18 }, + /* 4 */ { 0, 0, 0, 0, 0, 8, 0, 14, 0 }, + /* 5 */ { 0, 0, 0, 0, 0, 0, 10, 15, 19 }, + /* 6 */ { 0, 0, 0, 0, 0, 0, 0, 16, 0 }, + /* 7 */ { 0, 0, 0, 0, 0, 0, 0, 0, 20 } +}; + + +static uint8 vc1_aspect_ratio_table[][2] = +{ + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + {24, 11}, + {20, 11}, + {32, 11}, + {80, 33}, + {18, 11}, + {15, 11}, + {64, 33}, + {160, 99}, + + // reserved + {0, 0} +}; + + + +/** + * set parser entry points + */ +uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + /* impossible, just sanity check */ + return VBP_PARM; + } + + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done"); + if (NULL == pcontext->parser_ops->is_wkld_done) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame"); + if (NULL == pcontext->parser_ops->is_frame_start) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + /* entry point not needed */ + pcontext->parser_ops->flush = NULL; + + return VBP_OK; +} + +/** + * allocate query data structure + */ +uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + /* impossible, just sanity check */ + return VBP_PARM; + } + + pcontext->query_data = NULL; + + vbp_data_vc1 *query_data = NULL; + query_data = vbp_malloc_set0(vbp_data_vc1, 1); + if (NULL == query_data) + { + return VBP_MEM; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->se_data = vbp_malloc_set0(vbp_codec_data_vc1, 1); + if (NULL == query_data->se_data) + { + goto cleanup; + } + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vc1, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVC1, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + + query_data->pic_data[i].packed_bitplanes = vbp_try_malloc0(MAX_BITPLANE_SIZE); + if (NULL == query_data->pic_data[i].packed_bitplanes) + { + goto cleanup; + } + + query_data->pic_data[i].slc_data = vbp_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1)); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + return VBP_OK; + +cleanup: + vbp_free_query_data_vc1(pcontext); + + return VBP_MEM; +} + + +/** + * free query data structure + */ +uint32 vbp_free_query_data_vc1(vbp_context *pcontext) +{ + vbp_data_vc1 *query_data = NULL; + + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + query_data = (vbp_data_vc1 *)pcontext->query_data; + + if (query_data->pic_data) + { + int i = 0; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + free(query_data->pic_data[i].slc_data); + free(query_data->pic_data[i].packed_bitplanes); + free(query_data->pic_data[i].pic_parms); + } + } + + free(query_data->pic_data); + + free(query_data->se_data); + + free(query_data); + + pcontext->query_data = NULL; + + return VBP_OK; +} + + +/** + * We want to create a list of buffer segments where each segment is a start + * code followed by all the data up to the next start code or to the end of + * the buffer. In VC-1, it is common to get buffers with no start codes. The + * parser proper, doesn't really handle the situation where there are no SCs. + * In this case, I will bypass the stripping of the SC code and assume a frame. + */ +static uint32 vbp_parse_start_code_helper_vc1( + viddec_pm_cxt_t *cxt, + viddec_parser_ops_t *ops, + int init_data_flag) +{ + uint32_t ret = VBP_OK; + viddec_sc_parse_cubby_cxt_t cubby; + + /* make copy of cubby */ + /* this doesn't copy the buffer, merely the structure that holds the buffer */ + /* pointer. Below, where we call parse_sc() the code starts the search for */ + /* SCs at the beginning of the buffer pointed to by the cubby, so in our */ + /* cubby copy we increment the pointer as we move through the buffer. If */ + /* you think of each start code followed either by another start code or the */ + /* end of the buffer, then parse_sc() is returning information relative to */ + /* current segment. */ + + cubby = cxt->parse_cubby; + + cxt->list.num_items = 0; + cxt->list.data[0].stpos = 0; + cxt->getbits.is_emul_reqd = 1; + + /* codec initialization data is always start code prefixed. (may not start at position 0) + * sample buffer for AP has three start code patterns here: + * pattern 0: no start code at all, the whole buffer is a single segment item + * pattern 1: start codes for all segment items + * pattern 2: no start code for the first segment item, start codes for the rest segment items + */ + + bool is_pattern_two = FALSE; + + unsigned char start_code = 0; + + while (1) + { + /* parse the created buffer for sc */ + ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info)); + if (ret == 1) + { + cubby.phase = 0; + start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos); +#if 1 + if (0 == init_data_flag && + PREFIX_SIZE != cubby.sc_end_pos && + 0 == cxt->list.num_items) + { + /* buffer does not have start code at the beginning */ + vc1_viddec_parser_t *parser = NULL; + vc1_metadata_t *seqLayerHeader = NULL; + + parser = (vc1_viddec_parser_t *)cxt->codec_data; + seqLayerHeader = &(parser->info.metadata); + if (1 == seqLayerHeader->INTERLACE) + { + /* this is a hack for interlaced field coding */ + /* handle field interlace coding. One sample contains two fields, where: + * the first field does not have start code prefix, + * the second field has start code prefix. + */ + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + is_pattern_two = TRUE; + } + } +#endif + if (cxt->list.num_items == 0) /* found first SC. */ + { + /* sc_end_pos gets us to the SC type. We need to back up to the first zero */ + cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE; + } + else + { + /* First we set the end position of the last segment. */ + /* Since the SC parser searches from SC type to SC type and the */ + /* sc_end_pos is relative to this segment only, we merely add */ + /* sc_end_pos to the start to find the end. */ + cxt->list.data[cxt->list.num_items - 1].edpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + + /* Then we set the start position of the current segment. */ + /* So I need to subtract 1 ??? */ + cxt->list.data[cxt->list.num_items].stpos = + cxt->list.data[cxt->list.num_items - 1].edpos; + + if (is_pattern_two) + { + cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE; + /* restore to normal pattern */ + is_pattern_two = FALSE; + } + } + /* We need to set up the cubby buffer for the next time through parse_sc(). */ + /* But even though we want the list to contain a segment as described */ + /* above, we want the cubby buffer to start just past the prefix, or it will */ + /* find the same SC again. So I bump the cubby buffer past the prefix. */ + cubby.buf = /*cubby.buf +*/ + cxt->parse_cubby.buf + + cxt->list.data[cxt->list.num_items].stpos + + PREFIX_SIZE; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos - + PREFIX_SIZE; + + if (start_code >= 0x0A && start_code <= 0x0F) + { + /* only put known start code to the list + * 0x0A: end of sequence + * 0x0B: slice header + * 0x0C: frame header + * 0x0D: field header + * 0x0E: entry point header + * 0x0F: sequence header + */ + cxt->list.num_items++; + } + else + { + ITRACE("skipping unknown start code :%d", start_code); + } + + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + /* we get here, if we reach the end of the buffer while looking or a SC. */ + /* If we never found a SC, then num_items will never get incremented. */ + if (cxt->list.num_items == 0) + { + /* If we don't find a SC we probably still have a frame of data. */ + /* So let's bump the num_items or else later we will not parse the */ + /* frame. */ + cxt->list.num_items = 1; + } + /* now we can set the end position of the last segment. */ + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + return VBP_OK; +} + +/* +* parse initialization data (decoder configuration data) +* for VC1 advanced profile, data is sequence header and +* entry pointer header. +* for VC1 main/simple profile, data format +* is defined in VC1 spec: Annex J, (Decoder initialization metadata +* structure 1 and structure 3 +*/ +uint32 vbp_parse_init_data_vc1(vbp_context *pcontext) +{ + /** + * init data (aka decoder configuration data) must + * be start-code prefixed + */ + + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + return vbp_parse_start_code_helper_vc1(cxt, ops, 1); +} + + + +/** +* Parse start codes, VC1 main/simple profile does not have start code; +* VC1 advanced may not have start code either. +*/ +uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + viddec_parser_ops_t *ops = pcontext->parser_ops; + + vc1_viddec_parser_t *parser = NULL; + vc1_metadata_t *seqLayerHeader = NULL; + + vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data; + + /* Reset query data for the new sample buffer */ + int i = 0; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->num_pictures = 0; + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].picture_is_skipped = 0; + } + + parser = (vc1_viddec_parser_t *)cxt->codec_data; + seqLayerHeader = &(parser->info.metadata); + + + /* WMV codec data will have a start code, but the WMV picture data won't. */ + if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE) + { + return vbp_parse_start_code_helper_vc1(cxt, ops, 0); + } + else + { + /* WMV: vc1 simple or main profile. No start code present. */ + + /* must set is_emul_reqd to 0! */ + cxt->getbits.is_emul_reqd = 0; + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + return VBP_OK; +} + + +/** + * + */ +static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 *current_bit) +{ + uint8 value; + + value = (data[*current_word] >> *current_bit) & 1; + + /* Fix up bit/byte offsets. endianess?? */ + if (*current_bit < 31) + { + ++(*current_bit); + } + else + { + ++(*current_word); + *current_bit = 0; + } + + return value; +} + + +/** + * + */ +static uint32 vbp_pack_bitplane_vc1( + uint32 *from_plane, + uint8 *to_plane, + uint32 width, + uint32 height, + uint32 nibble_shift) +{ + uint32 error = VBP_OK; + uint32 current_word = 0; + uint32 current_bit = 0; /* must agree with number in vbp_get_bit_vc1 */ + uint32 i, j, n; + uint8 value; + uint32 stride = 0; + + stride = 32 * ((width + 31) / 32); + + for (i = 0, n = 0; i < height; i++) + { + for (j = 0; j < stride; j++) + { + if (j < width) + { + value = vbp_get_bit_vc1( + from_plane, + ¤t_word, + ¤t_bit); + + to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4)); + n++; + } + else + { + break; + } + } + if (stride > width) + { + current_word++; + current_bit = 0; + } + } + + return error; +} + + +/** + * + */ +static inline uint32 vbp_map_bfraction(uint32 numerator, uint32 denominator) +{ + uint32 b_fraction = 0; + + if ((numerator < 8) && (denominator < 9)) + { + b_fraction = b_fraction_table[numerator][denominator]; + } + + return b_fraction; +} + +/** + * + */ +static uint32 vbp_pack_bitplanes_vc1( + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) +{ + uint32 error = VBP_OK; + if (0 == pic_data->pic_parms->bitplane_present.value) + { + /* return if bitplane is not present */ + pic_data->size_bitplanes = 0; + memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE); + return error; + } + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + + + /* set bit plane size */ + pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2; + + + memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes); + + /* see libva library va.h for nibble bit */ + switch (picLayerHeader->PTYPE) + { + case VC1_I_FRAME: + case VC1_BI_FRAME: + if (picLayerHeader->OVERFLAGS.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->OVERFLAGS.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->ACPRED.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->ACPRED.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->FIELDTX.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->FIELDTX.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->MVTYPEMB.imode || + picLayerHeader->DIRECTMB.imode || + picLayerHeader->SKIPMB.imode || + picLayerHeader->FORWARDMB.imode) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + + case VC1_P_FRAME: + if (picLayerHeader->MVTYPEMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->MVTYPEMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->SKIPMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->SKIPMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->DIRECTMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->DIRECTMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->FIELDTX.imode || + picLayerHeader->FORWARDMB.imode || + picLayerHeader->ACPRED.imode || + picLayerHeader->OVERFLAGS.imode ) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + + case VC1_B_FRAME: + if (picLayerHeader->FORWARDMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->FORWARDMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 2); + } + if (picLayerHeader->SKIPMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->SKIPMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 1); + } + if (picLayerHeader->DIRECTMB.imode) + { + vbp_pack_bitplane_vc1( + picLayerHeader->DIRECTMB.databits, + pic_data->packed_bitplanes, + seqLayerHeader->widthMB, + seqLayerHeader->heightMB, + 0); + } + /* sanity check */ + if (picLayerHeader->MVTYPEMB.imode || + picLayerHeader->FIELDTX.imode || + picLayerHeader->ACPRED.imode || + picLayerHeader->OVERFLAGS.imode) + { + ETRACE("Unexpected bit-plane type."); + error = VBP_TYPE; + } + break; + } + return error; +} + + +/** + * fill the query data structure after sequence header, entry point header + * or a complete frame is parsed. + * NOTE: currently partial frame is not handled properly + */ +uint32 vbp_populate_query_data_vc1(vbp_context *pcontext) +{ + uint32 error = VBP_OK; + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + + vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; + + /* first we get the SH/EP data. Can we cut down on this? */ + vbp_codec_data_vc1 *se_data = query_data->se_data; + + + uint32_t curHrdNum = seqLayerHeader->HRD_NUM_LEAKY_BUCKETS; + + se_data->bit_rate = curHrdNum ? + seqLayerHeader->hrd_initial_state.sLeakyBucket[curHrdNum -1].HRD_RATE : + seqLayerHeader->hrd_initial_state.sLeakyBucket[0].HRD_RATE; + + se_data->PROFILE = seqLayerHeader->PROFILE; + se_data->LEVEL = seqLayerHeader->LEVEL; + se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG; + se_data->PULLDOWN = seqLayerHeader->PULLDOWN; + se_data->INTERLACE = seqLayerHeader->INTERLACE; + se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG; + se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG; + se_data->PSF = seqLayerHeader->PSF; + + // color matrix + if (seqLayerHeader->COLOR_FORMAT_FLAG) + { + se_data->MATRIX_COEF = seqLayerHeader->MATRIX_COEF; + } + else + { + //ITU-R BT. 601-5. + se_data->MATRIX_COEF = 6; + } + + // aspect ratio + if (seqLayerHeader->ASPECT_RATIO_FLAG == 1) + { + se_data->ASPECT_RATIO = seqLayerHeader->ASPECT_RATIO; + if (se_data->ASPECT_RATIO < 14) + { + se_data->ASPECT_HORIZ_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][0]; + se_data->ASPECT_VERT_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][1]; + } + else if (se_data->ASPECT_RATIO == 15) + { + se_data->ASPECT_HORIZ_SIZE = seqLayerHeader->ASPECT_HORIZ_SIZE; + se_data->ASPECT_VERT_SIZE = seqLayerHeader->ASPECT_VERT_SIZE; + } + else // se_data->ASPECT_RATIO == 14 + { + se_data->ASPECT_HORIZ_SIZE = 0; + se_data->ASPECT_VERT_SIZE = 0; + } + } + else + { + // unspecified + se_data->ASPECT_RATIO = 0; + se_data->ASPECT_HORIZ_SIZE = 0; + se_data->ASPECT_VERT_SIZE = 0; + } + + se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK; + se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY; + se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG; + se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG; + se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER; + se_data->FASTUVMC = seqLayerHeader->FASTUVMC; + se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV; + se_data->DQUANT = seqLayerHeader->DQUANT; + se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM; + se_data->OVERLAP = seqLayerHeader->OVERLAP; + se_data->QUANTIZER = seqLayerHeader->QUANTIZER; + se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1; + se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1; + se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV; + se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG; + se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY; + se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG; + se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV; + se_data->RANGERED = seqLayerHeader->RANGERED; + se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES; + se_data->MULTIRES = seqLayerHeader->MULTIRES; + se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER; + se_data->RNDCTRL = seqLayerHeader->RNDCTRL; + se_data->REFDIST = seqLayerHeader->REFDIST; + se_data->widthMB = seqLayerHeader->widthMB; + se_data->heightMB = seqLayerHeader->heightMB; + se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD; + se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2; + se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2; + + /* update buffer number */ + query_data->buf_number = buffer_counter; + + if (query_data->num_pictures > 2) + { + WTRACE("sampe buffer contains %d pictures", query_data->num_pictures); + } + return error; +} + + + +static void vbp_pack_picture_params_vc1( + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + vc1_metadata_t *seqLayerHeader = &(parser->info.metadata); + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + + + VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms; + + /* Then we get the picture header data. Picture type need translation. */ + pic_parms->forward_reference_picture = VA_INVALID_SURFACE; + pic_parms->backward_reference_picture = VA_INVALID_SURFACE; + pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE; + + pic_parms->sequence_fields.value = 0; + pic_parms->sequence_fields.bits.pulldown = seqLayerHeader->PULLDOWN; + pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE; + pic_parms->sequence_fields.bits.tfcntrflag = seqLayerHeader->TFCNTRFLAG; + pic_parms->sequence_fields.bits.finterpflag = seqLayerHeader->FINTERPFLAG; + pic_parms->sequence_fields.bits.psf = seqLayerHeader->PSF; + pic_parms->sequence_fields.bits.multires = seqLayerHeader->MULTIRES; + pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP; + pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER; + pic_parms->sequence_fields.bits.rangered = seqLayerHeader->RANGERED; + pic_parms->sequence_fields.bits.max_b_frames = seqLayerHeader->MAXBFRAMES; + + pic_parms->coded_width = (seqLayerHeader->width + 1) << 1; + pic_parms->coded_height = (seqLayerHeader->height + 1) << 1; + + pic_parms->entrypoint_fields.value = 0; + pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY; + pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK; + pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER; + pic_parms->entrypoint_fields.bits.panscan_flag = seqLayerHeader->PANSCAN_FLAG; + + pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER; + pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC; + + pic_parms->range_mapping_fields.value = 0; + pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG; + pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY; + pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG; + pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV; + + pic_parms->b_picture_fraction = + vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN); + + pic_parms->cbp_table = picLayerHeader->CBPTAB; + pic_parms->mb_mode_table = picLayerHeader->MBMODETAB; + pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM; + pic_parms->rounding_control = picLayerHeader->RNDCTRL; + pic_parms->post_processing = picLayerHeader->POSTPROC; + /* fix this. Add RESPIC to parser. */ + pic_parms->picture_resolution_index = 0; + pic_parms->luma_scale = picLayerHeader->LUMSCALE; + pic_parms->luma_shift = picLayerHeader->LUMSHIFT; + + pic_parms->picture_fields.value = 0; + switch (picLayerHeader->PTYPE) + { + case VC1_I_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I; + break; + + case VC1_P_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P; + break; + + case VC1_B_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B; + break; + + case VC1_BI_FRAME: + pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI; + break; + + case VC1_SKIPPED_FRAME: + pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED; + break; + + default: + // TODO: handle this case + break; + } + pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM; + if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE) + { + /* simple or main profile, top field flag is not present, default to 1.*/ + pic_parms->picture_fields.bits.top_field_first = 1; + } + else + { + pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF; + } + + pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField); + /* This seems to be set based on the MVMODE and MVMODE2 syntax. */ + /* This is a hack. Probably will need refining. */ + if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) || + (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2)) + { + pic_parms->picture_fields.bits.intensity_compensation = 1; + } + else + { + pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP; + } + + /* Lets store the raw-mode BP bits. */ + pic_parms->raw_coding.value = 0; + pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB; + pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB; + pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB; + pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX; + pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB; + pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED; + pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS; + + /* imode 1/0 indicates bitmap presence in Pic Hdr. */ + pic_parms->bitplane_present.value = 0; + + pic_parms->bitplane_present.flags.bp_mv_type_mb = + pic_parms->raw_coding.flags.mv_type_mb ? 1 : + (picLayerHeader->MVTYPEMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_direct_mb = + pic_parms->raw_coding.flags.direct_mb ? 1 : + (picLayerHeader->DIRECTMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_skip_mb = + pic_parms->raw_coding.flags.skip_mb ? 1 : + (picLayerHeader->SKIPMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_field_tx = + pic_parms->raw_coding.flags.field_tx ? 1 : + (picLayerHeader->FIELDTX.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_forward_mb = + pic_parms->raw_coding.flags.forward_mb ? 1 : + (picLayerHeader->FORWARDMB.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_ac_pred = + pic_parms->raw_coding.flags.ac_pred ? 1 : + (picLayerHeader->ACPRED.imode ? 1: 0); + + pic_parms->bitplane_present.flags.bp_overflags = + pic_parms->raw_coding.flags.overflags ? 1 : + (picLayerHeader->OVERFLAGS.imode ? 1: 0); + + pic_parms->reference_fields.value = 0; + pic_parms->reference_fields.bits.reference_distance_flag = + seqLayerHeader->REFDIST_FLAG; + + pic_parms->reference_fields.bits.reference_distance = + seqLayerHeader->REFDIST; + + pic_parms->reference_fields.bits.num_reference_pictures = + picLayerHeader->NUMREF; + + pic_parms->reference_fields.bits.reference_field_pic_indicator = + picLayerHeader->REFFIELD; + + pic_parms->mv_fields.value = 0; + pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE; + pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2; + + pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB; + pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB; + pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH; + pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB; + pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV; + pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE; + pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV; + pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE; + + pic_parms->pic_quantizer_fields.value = 0; + pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT; + pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER; + pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP; + pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT; + pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant; + pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM; + pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE; + pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE; + pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE; + pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL; + pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT; + + pic_parms->transform_fields.value = 0; + pic_parms->transform_fields.bits.variable_sized_transform_flag = + seqLayerHeader->VSTRANSFORM; + + pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF; + pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM; + + pic_parms->transform_fields.bits.transform_ac_codingset_idx1 = + (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0; + + pic_parms->transform_fields.bits.transform_ac_codingset_idx2 = + (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0; + + pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB; + pic_parms->sequence_fields.bits.profile = seqLayerHeader->PROFILE; +} + + +static void vbp_pack_slice_data_vc1( + vbp_context *pcontext, + int index, + vbp_picture_data_vc1* pic_data) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos; + uint32 bit; + uint32 byte; + uint8 is_emul; + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); + VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms); + + /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/ + + slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos; + slc_data->slice_size = slice_size; + slc_data->slice_offset = 0; + + slc_parms->slice_data_size = slc_data->slice_size; + slc_parms->slice_data_offset = 0; + + /* fix this. we need to be able to handle partial slices. */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + slc_parms->macroblock_offset = bit + byte * 8; + + /* fix this. we need o get the slice_vertical_position from the code */ + slc_parms->slice_vertical_position = pic_data->num_slices; + + pic_data->num_slices++; +} + +/** + * process parsing result + */ +uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 error = VBP_OK; + + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + if (parser->start_code != VC1_SC_FRM && + parser->start_code != VC1_SC_FLD && + parser->start_code != VC1_SC_SLC) + { + /* only handle frame data, field data and slice data here + */ + return VBP_OK; + } + vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data; + + if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) + { + query_data->num_pictures++; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + if (query_data->num_pictures == 0) + { + ETRACE("Unexpected num of pictures."); + return VBP_DATA; + } + + /* start packing data */ + int picture_index = query_data->num_pictures - 1; + vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]); + + if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD) + { + /* setup picture parameter first*/ + vbp_pack_picture_params_vc1(pcontext, index, pic_data); + + /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */ + error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data); + if (VBP_OK != error) + { + ETRACE("Failed to pack bitplane."); + return error; + } + + } + + /* Always pack slice parameter. The first macroblock in the picture CANNOT + * be preceeded by a slice header, so we will have first slice parsed always. + * + */ + + if (pic_data->num_slices >= MAX_NUM_SLICES) + { + ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + /* set up slice parameter */ + vbp_pack_slice_data_vc1(pcontext, index, pic_data); + + + return VBP_OK; +} diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.h b/mixvbp/vbp_manager/vbp_vc1_parser.h new file mode 100755 index 0000000..aec7a56 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_vc1_parser.h @@ -0,0 +1,70 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VBP_VC1_PARSER_H +#define VBP_VC1_PARSER_H + + +/* + * setup parser's entry pointer + */ +uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext); + +/* + * allocate query data structure - vbp_vc1_data + */ +uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext); + +/* + * free query data structure + */ +uint32 vbp_free_query_data_vc1(vbp_context *pcontext); + +/* + * parse bitstream configuration data + */ +uint32 vbp_parse_init_data_vc1(vbp_context *pcontext); + +/* + * parse bitstream start code and fill the viddec_input_buffer_t list. + * WMV has no start code so the whole buffer will be treated as a single frame. + * For VC1 progressive, if start code is not found, the whole buffer will be treated as a + * single frame as well. + * For VC1 interlace, the first field is not start code prefixed, but the second field + * is always start code prefixed. + */ +uint32 vbp_parse_start_code_vc1(vbp_context *pcontext); + +/* + * processe parsing result + */ +uint32 vbp_process_parsing_result_vc1(vbp_context *pcontext, int list_index); + +/* + * populate query data structure + */ +uint32 vbp_populate_query_data_vc1(vbp_context *pcontext); + + +#endif /*VBP_VC1_PARSER_H*/ diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c new file mode 100755 index 0000000..72dcfa9 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -0,0 +1,532 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include + +#include "vp8.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_vp8_parser.h" + +uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vp8_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = NULL; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vp8_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vp8_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = NULL; + + /* entry point not needed */ + pcontext->parser_ops->is_frame_start = NULL; + + pcontext->parser_ops->flush = NULL; + + return VBP_OK; +} + +uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + vbp_data_vp8 *query_data = vbp_malloc_set0(vbp_data_vp8, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vp8, VP8_MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i = 0; + for (i = 0; i < VP8_MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVP8, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_vp8, VP8_MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + query_data->codec_data = vbp_malloc_set0(vbp_codec_data_vp8, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + query_data->prob_data = vbp_malloc_set0(VAProbabilityDataBufferVP8, 1); + if (NULL == query_data->prob_data) + { + goto cleanup; + } + + query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferVP8, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + pcontext->parser_private = NULL; + + return VBP_OK; + +cleanup: + vbp_free_query_data_vp8(pcontext); + + return VBP_MEM; +} + +uint32 vbp_free_query_data_vp8(vbp_context *pcontext) +{ + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + vbp_data_vp8 *query_data = (vbp_data_vp8 *)pcontext->query_data; + if (query_data->pic_data) + { + int i = 0; + for (i = 0; i < VP8_MAX_NUM_PICTURES; i++) + { + if (query_data->pic_data[i].pic_parms) + { + free(query_data->pic_data[i].pic_parms); + query_data->pic_data[i].pic_parms = NULL; + } + if (query_data->pic_data[i].slc_data) + { + free(query_data->pic_data[i].slc_data); + query_data->pic_data[i].slc_data = NULL; + } + } + free(query_data->pic_data); + query_data->pic_data = NULL; + } + + if (query_data->codec_data) + { + free(query_data->codec_data); + query_data->codec_data = NULL; + } + + if (query_data->prob_data) + { + free(query_data->prob_data); + query_data->prob_data = NULL; + } + + if (query_data->IQ_matrix_buf) + { + free(query_data->IQ_matrix_buf); + query_data->IQ_matrix_buf = NULL; + } + + free(query_data); + pcontext->query_data = NULL; + + return VBP_OK; +} + + +/** +* parse decoder configuration data +*/ +uint32 vbp_parse_init_data_vp8(vbp_context* pcontext) +{ + // could never be there + return VBP_OK; +} + +uint32 vbp_parse_start_code_vp8(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint8 *buf = cxt->parse_cubby.buf; + uint32 length = cxt->parse_cubby.size; + if (length < 3) + { + return VBP_DATA; + } + + // check whether it is a key frame + if ((length >= 10) && !(buf[0] & 0x01)) + { + uint8 *c = buf + 3; + + // check start code + if ((c[0] != 0x9d) || (c[1] != 0x01) || (c[2] != 0x2a)) + { + return VBP_PARM; + } + } + + // ugly behavior + cxt->list.num_items = 1; + + vbp_data_vp8 *query_data = (vbp_data_vp8*)pcontext->query_data; + query_data->num_pictures = 0; + + return VBP_OK; +} + +/** +* +* process parsing result after a NAL unit is parsed +* +*/ +uint32 vbp_process_parsing_result_vp8( vbp_context *pcontext, int i) +{ + vp8_viddec_parser *parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data; + switch (parser->info.frame_tag.frame_type) + { + case KEY_FRAME: + //ITRACE("This is a key frame."); + parser->info.decoded_frame_number++; + break; + case INTER_FRAME: + //ITRACE("This is an inter frame."); + parser->info.decoded_frame_number++; + break; + case SKIPPED_FRAME: + WTRACE("This is skipped frame. We have done nothing."); + break; + default: + ETRACE("Unknown frame type %d", parser->info.frame_tag.frame_type); + break; + } + + //ITRACE("Decoded frame ID = %d", parser->info.decoded_frame_number); + + return VBP_OK; +} + +static void vbp_add_quantization_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + vp8_Info *pi = &(parser->info); + VAIQMatrixBufferVP8 *IQ_buf = query_data->IQ_matrix_buf; + + int i = 0; + if (pi->Segmentation.Enabled) + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta) + { + IQ_buf->quantization_index[i][0] = pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i]; + } + else + { + int temp = pi->Quantization.Y1_AC + pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i]; + IQ_buf->quantization_index[i][0] = (temp >= 0) ? ((temp <= MAX_QINDEX) ? temp : MAX_QINDEX) : 0; + } + } + } + else + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + IQ_buf->quantization_index[i][0] = pi->Quantization.Y1_AC; + } + } + + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + IQ_buf->quantization_index[i][1] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y1_DC_Delta; + IQ_buf->quantization_index[i][2] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_DC_Delta; + IQ_buf->quantization_index[i][3] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_AC_Delta; + IQ_buf->quantization_index[i][4] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_DC_Delta; + IQ_buf->quantization_index[i][5] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_AC_Delta; + } +} + +static void vbp_add_probs_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + FrameContextData *fc = &(parser->info.FrameContext); + VAProbabilityDataBufferVP8 *prob_data = query_data->prob_data; + + /* DCT coefficients probability */ + memcpy(prob_data->dct_coeff_probs, fc->DCT_Coefficients, 4*8*3*11*sizeof(uint8_t)); +} + +static void vbp_set_codec_data_vp8(vp8_viddec_parser *parser, vbp_codec_data_vp8* codec_data) +{ + vp8_Info *pi = &(parser->info); + + codec_data->frame_type = pi->frame_tag.frame_type; + codec_data->version_num = pi->frame_tag.version; + codec_data->show_frame = pi->frame_tag.show_frame; + + codec_data->frame_width = ((pi->width + 15) / 16) * 16; + codec_data->frame_height = ((pi->height + 15) / 16) * 16; + + codec_data->crop_top = 0; + codec_data->crop_bottom = codec_data->frame_height - pi->height; + codec_data->crop_left = 0; + codec_data->crop_right = codec_data->frame_width - pi->width; + + codec_data->refresh_alt_frame = pi->refresh_af; + codec_data->refresh_golden_frame = pi->refresh_gf; + codec_data->refresh_last_frame = pi->refresh_lf; + + codec_data->golden_copied = pi->golden_copied; + codec_data->altref_copied = pi->altref_copied; +} + +static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + vp8_Info *pi = &(parser->info); + query_data->num_pictures++; + + if (query_data->num_pictures > 1) + { + ETRACE("Num of pictures (%d) per sample buffer exceeds the limit %d.", query_data->num_pictures, VP8_MAX_NUM_PICTURES); + return VBP_DATA; + } + + int i = 0; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_data_index]); + VAPictureParameterBufferVP8 *pic_parms = pic_data->pic_parms; + + pic_parms->frame_width = pi->width; + pic_parms->frame_height = pi->height; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.key_frame = pi->frame_tag.frame_type; + pic_parms->pic_fields.bits.version = pi->frame_tag.version; + + /* Segmentation */ + pic_parms->pic_fields.bits.segmentation_enabled = pi->Segmentation.Enabled; + pic_parms->pic_fields.bits.update_mb_segmentation_map = pi->Segmentation.UpdateMap; + pic_parms->pic_fields.bits.update_segment_feature_data = pi->Segmentation.UpdateData; + memcpy(pic_parms->mb_segment_tree_probs, pi->Segmentation.TreeProbs, sizeof(unsigned char) * MB_FEATURE_TREE_PROBS); + + /* Loop filter data */ + pic_parms->pic_fields.bits.filter_type = pi->LoopFilter.Type; + pic_parms->pic_fields.bits.sharpness_level = pi->LoopFilter.Sharpness; + pic_parms->pic_fields.bits.loop_filter_adj_enable = pi->LoopFilter.DeltaEnabled; + pic_parms->pic_fields.bits.mode_ref_lf_delta_update = pi->LoopFilter.DeltaUpdate; + + int baseline_filter_level[MAX_MB_SEGMENTS]; + if (pi->Segmentation.Enabled) + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta) + { + baseline_filter_level[i] = pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i]; + } + else + { + baseline_filter_level[i] = pi->LoopFilter.Level + pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i]; + baseline_filter_level[i] = (baseline_filter_level[i] >= 0) ? ((baseline_filter_level[i] <= MAX_LOOP_FILTER) ? baseline_filter_level[i] : MAX_LOOP_FILTER) : 0; /* Clamp to valid range */ + } + } + } + else + { + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + baseline_filter_level[i] = pi->LoopFilter.Level; + } + } + for (i = 0; i < MAX_MB_SEGMENTS; i++) + { + pic_parms->loop_filter_level[i] = baseline_filter_level[i]; + } + if ((pic_parms->pic_fields.bits.version == 0) || (pic_parms->pic_fields.bits.version == 1)) + { + pic_parms->pic_fields.bits.loop_filter_disable = pic_parms->loop_filter_level[0] > 0 ? true : false; + } + memcpy(pic_parms->loop_filter_deltas_ref_frame, pi->LoopFilter.DeltasRef, sizeof(char) * MAX_REF_LF_DELTAS); + memcpy(pic_parms->loop_filter_deltas_mode, pi->LoopFilter.DeltasMode, sizeof(char) * MAX_MODE_LF_DELTAS); + + pic_parms->pic_fields.bits.sign_bias_golden = pi->sign_bias_golden; + pic_parms->pic_fields.bits.sign_bias_alternate = pi->sign_bias_alternate; + + pic_parms->pic_fields.bits.mb_no_coeff_skip = pi->mb_no_coeff_skip; + pic_parms->pic_fields.bits.mb_skip_coeff = pi->mb_skip_coeff; + + pic_parms->prob_skip_false = pi->prob_skip_false; + pic_parms->prob_intra = pi->prob_intra; + pic_parms->prob_last = pi->prob_lf; + pic_parms->prob_gf = pi->prob_gf; + + FrameContextData *fc = &(parser->info.FrameContext); + memcpy(pic_parms->y_mode_probs, fc->Y_Mode_Prob, sizeof(unsigned char) * 4); + memcpy(pic_parms->uv_mode_probs, fc->UV_Mode_Prob, sizeof(unsigned char) * 3); + /* Motion vector context */ + for (i = 0; i < 2; i++) + { + memcpy(pic_parms->mv_probs[i], fc->MVContext[i], sizeof(unsigned char) * 19); + } + + /* Bool coder */ + pic_parms->bool_coder_ctx.range = pi->bool_coder.range; + pic_parms->bool_coder_ctx.value = (pi->bool_coder.value >> 24) & 0xFF; + pic_parms->bool_coder_ctx.count = pi->bool_coder.count; + + //pic_parms->current_picture = VA_INVALID_SURFACE; + pic_parms->last_ref_frame = VA_INVALID_SURFACE; + pic_parms->golden_ref_frame = VA_INVALID_SURFACE; + pic_parms->alt_ref_frame = VA_INVALID_SURFACE; + pic_parms->out_of_loop_frame = VA_INVALID_SURFACE; //Reserved for future use + + /* specify the slice number */ + pic_data->num_slices = 0; + + return VBP_OK; +} + +static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data) +{ + vp8_Info *pi = &(parser->info); + uint32_t pic_index = query_data->num_pictures - 1; + uint32_t part_index = 0; + if (pic_index < 0) + { + ETRACE("Invalid picture data index."); + return VBP_DATA; + } + + vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_index]); + vbp_slice_data_vp8 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); + + slc_data->buffer_addr = pi->source; + slc_data->slice_offset = 0; + slc_data->slice_size = pi->source_sz; + + VASliceParameterBufferVP8 *slc_parms = &(slc_data->slc_parms); + /* number of bytes in the slice data buffer for this slice */ + slc_parms->slice_data_size = slc_data->slice_size; + + /* the offset to the first byte of slice data */ + slc_parms->slice_data_offset = 0; + + /* see VA_SLICE_DATA_FLAG_XXX definitions */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + /* the offset to the first bit of MB from the first byte of slice data */ + slc_parms->macroblock_offset = pi->header_bits; + + /* Token Partitions */ + slc_parms->num_of_partitions = pi->partition_count; + slc_parms->partition_size[0] = pi->frame_tag.first_part_size; + for (part_index = 1; part_index < 9; part_index++) + { + slc_parms->partition_size[part_index] = pi->partition_size[part_index - 1]; + } + + pic_data->num_slices++; + if (pic_data->num_slices > VP8_MAX_NUM_SLICES) { + ETRACE("Number of slices (%d) per picture exceeds the limit (%d).", pic_data->num_slices, VP8_MAX_NUM_SLICES); + return VBP_DATA; + } + return VBP_OK; +} + +/* +* +* fill query data structure after sample buffer is parsed +* +*/ +uint32 vbp_populate_query_data_vp8(vbp_context *pcontext) +{ + int32_t error = VBP_OK; + + vbp_data_vp8 *query_data = NULL; + vp8_viddec_parser *parser = NULL; + + parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_vp8 *)pcontext->query_data; + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* Populate picture data */ + error = vbp_add_pic_data_vp8(parser, query_data); + + /* Populate slice data */ + if (error == VBP_OK) + { + error = vbp_add_slice_data_vp8(parser, query_data); + if (error != VBP_OK) + return error; + } + + /* Populate codec data */ + vbp_set_codec_data_vp8(parser, query_data->codec_data); + + /* Populate probability table */ + vbp_add_probs_data_vp8(parser, query_data); + + /* Populate quantization */ + vbp_add_quantization_data_vp8(parser, query_data); + + return VBP_OK; +} diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.h b/mixvbp/vbp_manager/vbp_vp8_parser.h new file mode 100755 index 0000000..3b6407e --- /dev/null +++ b/mixvbp/vbp_manager/vbp_vp8_parser.h @@ -0,0 +1,67 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_VP8_PARSER_H +#define VBP_VP8_PARSER_H + +/* + * setup parser's entry points + */ +uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext); + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_vp8(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_vp8(vbp_context *pcontext); + +/* + * parse start code. Only support lenght prefixed mode. Start + * code prefixed is not supported. + */ +uint32 vbp_parse_start_code_vp8(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_vp8(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_vp8(vbp_context *pcontext); + + + +#endif /*VBP_VP8_PARSER_H*/ diff --git a/mixvbp/vbp_manager/viddec_parse_sc.c b/mixvbp/vbp_manager/viddec_parse_sc.c new file mode 100755 index 0000000..b3f8d0b --- /dev/null +++ b/mixvbp/vbp_manager/viddec_parse_sc.c @@ -0,0 +1,218 @@ +#include "viddec_pm_parse.h" + +#ifndef MFDBIGENDIAN +uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) +{ + uint8_t *ptr; + uint32_t data_left=0, phase = 0, ret = 0; + uint32_t single_byte_table[3][2] = {{1, 0}, {2, 0}, {2, 3}}; + viddec_sc_parse_cubby_cxt_t *cxt; + /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. + Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. + if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern + we are looking for. Its incremented to 4 once we see a byte after this pattern */ + cxt = ( viddec_sc_parse_cubby_cxt_t *)in; + data_left = cxt->size; + ptr = cxt->buf; + phase = cxt->phase; + cxt->sc_end_pos = -1; + pcxt=pcxt; + + /* parse until there is more data and start code not found */ + while ((data_left > 0) && (phase < 3)) + { + /* Check if we are 16 bytes aligned & phase=0 & more than 16 bytes left, + if thats the case we can check work at a time instead of byte */ + + if (((((uint32_t)ptr) & 0xF) == 0) && (phase == 0) && (data_left > 0xF)) + { + // 15 14 13 12 11 10 09 08 07 06 05 04 03 02 01 00 -- check 16 bytes at one time + // 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? -- if no 00 at byte position: 15,13,11,09,07,05,03,01 + // it is impossible to have 0x010000 at these 16 bytes. + // so we cound drop 16 bytes one time (increase ptr, decrease data_left and keep phase = 0) + __asm__( + //Data input + "movl %1, %%ecx\n\t" //ptr-->ecx + "movl %0, %%eax\n\t" //data_left-->eax + + //Main compare loop + "MATCH_8_ZERO:\n\t" + "pxor %%xmm0,%%xmm0\n\t" //0 --> xmm0 + "pcmpeqb (%%ecx),%%xmm0\n\t" //uint128_data[ptr] eq xmm0 --> xmm0 , For each byte do calculation, (byte == 0x00)?0xFF:0x00 + "pmovmskb %%xmm0, %%edx\n\t" //xmm0(128)-->edx(32), edx[0]=xmm0[7], edx[1]=xmm0[15], ... , edx[15]=xmm0[127], edx[31-16]=0x0000 + "test $0xAAAA, %%edx\n\t" //edx& 1010 1010 1010 1010b + "jnz DATA_RET\n\t" //Not equal to zero means that at least one byte is 0x00. + + "PREPARE_NEXT_MATCH:\n\t" + "add $0x10, %%ecx\n\t" //16 + ecx --> ecx + "sub $0x10, %%eax\n\t" //eax-16 --> eax + "cmp $0x10, %%eax\n\t" //eax >= 16? + "jge MATCH_8_ZERO\n\t" //search next 16 bytes + + "DATA_RET:\n\t" + "movl %%ecx, %1\n\t" //ecx --> ptr + "movl %%eax, %0\n\t" //eax --> data_left + : "+m"(data_left), "+m"(ptr) //data_left --> eax, ptr -> ecx + : + :"eax", "ecx", "edx", "xmm0" + ); + + if (data_left <= 0) + { + break; + } + } + + //check byte one by one + // (*ptr) 0 1 >=2 + // phase=0 1 0 0 + // phase=1 2 0 0 + // phase=2 2 3 0 + if (*ptr >= 2) + { + phase = 0; + } + else + { + phase = single_byte_table[phase][*ptr]; + } + ptr ++; + data_left --; + } + if ((data_left > 0) && (phase == 3)) + { + viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; + cxt->sc_end_pos = cxt->size - data_left; + state->next_sc = cxt->buf[cxt->sc_end_pos]; + state->second_scprfx_length = 3; + phase++; + ret = 1; + } + cxt->phase = phase; + /* Return SC found only if phase is 4, else always success */ + return ret; +} + +#else +#define FIRST_STARTCODE_BYTE 0x00 +#define SECOND_STARTCODE_BYTE 0x00 +#define THIRD_STARTCODE_BYTE 0x01 + +/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ +/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ +/* these are little-endian defines */ +#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */ +#define SC_BYTE_MASK1 0x000000ff /* little-endian */ + +/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success. + The conext is updated with current phase and sc_code position in the buffer. +*/ +uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) +{ + uint8_t *ptr; + uint32_t size; + uint32_t data_left=0, phase = 0, ret = 0; + viddec_sc_parse_cubby_cxt_t *cxt; + /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. + Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. + if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern + we are looking for. Its incremented to 4 once we see a byte after this pattern */ + cxt = ( viddec_sc_parse_cubby_cxt_t *)in; + size = 0; + data_left = cxt->size; + ptr = cxt->buf; + phase = cxt->phase; + cxt->sc_end_pos = -1; + pcxt=pcxt; + + /* parse until there is more data and start code not found */ + while ((data_left > 0) &&(phase < 3)) + { + /* Check if we are byte aligned & phase=0, if thats the case we can check + work at a time instead of byte*/ + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) + { + while (data_left > 3) + { + uint32_t data; + char mask1 = 0, mask2=0; + + data = *((uint32_t *)ptr); + mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); + mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); + /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need + two consecutive zero bytes for a start code pattern */ + if (mask1 && mask2) + {/* Success so skip 4 bytes and start over */ + ptr+=4; + size+=4; + data_left-=4; + continue; + } + else + { + break; + } + } + } + + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected + two zero bytes in the word so we look one byte at a time*/ + if (data_left > 0) + { + if (*ptr == FIRST_STARTCODE_BYTE) + {/* Phase can be 3 only if third start code byte is found */ + phase++; + ptr++; + size++; + data_left--; + if (phase > 2) + { + phase = 2; + + if ( (((uint32_t)ptr) & 0x3) == 0 ) + { + while ( data_left > 3 ) + { + if (*((uint32_t *)ptr) != 0) + { + break; + } + ptr+=4; + size+=4; + data_left-=4; + } + } + } + } + else + { + if ((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2)) + {/* Match for start code so update context with byte position */ + phase = 3; + cxt->sc_end_pos = size; + } + else + { + phase = 0; + } + ptr++; + size++; + data_left--; + } + } + } + if ((data_left > 0) && (phase == 3)) + { + viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; + cxt->sc_end_pos++; + state->next_sc = cxt->buf[cxt->sc_end_pos]; + state->second_scprfx_length = 3; + phase++; + ret = 1; + } + cxt->phase = phase; + /* Return SC found only if phase is 4, else always success */ + return ret; +} +#endif diff --git a/mixvbp/vbp_manager/viddec_pm_parser_ops.c b/mixvbp/vbp_manager/viddec_pm_parser_ops.c new file mode 100755 index 0000000..6879a6a --- /dev/null +++ b/mixvbp/vbp_manager/viddec_pm_parser_ops.c @@ -0,0 +1,97 @@ +#include +#include +#include "viddec_pm.h" +#include "viddec_parser_ops.h" +#include "viddec_pm_utils_bstream.h" + +int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1); + if (ret == -1) + { + DEB("FAILURE!!!! getbits returned %d\n", ret); + } + + return ret; +} + +int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0); + return ret; +} + +int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits); + return ret; +} + +int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_t *is_emul) +{ + int32_t ret = 1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits)); + viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul); + + return ret; + +} + +int32_t viddec_pm_is_nomoredata(void *parent) +{ + int32_t ret=0; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits)); + return ret; +} + +uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte) +{ + int32_t ret=-1; + viddec_pm_cxt_t *cxt; + + cxt = (viddec_pm_cxt_t *)parent; + ret = viddec_pm_utils_bstream_get_current_byte(&(cxt->getbits), byte); + return ret; +} + +void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error) +{ + viddec_pm_cxt_t *cxt; + cxt = (viddec_pm_cxt_t *)parent; + cxt->next_workload_error_eos = error; +} + +void viddec_pm_set_late_frame_detect(void *parent) +{ + viddec_pm_cxt_t *cxt; + cxt = (viddec_pm_cxt_t *)parent; + cxt->late_frame_detect = true; +} + +int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next) +{ + return 1; +} + +void viddec_pm_setup_userdata(viddec_workload_item_t *wi) +{ + wi=wi; +} diff --git a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c new file mode 100755 index 0000000..6939fef --- /dev/null +++ b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c @@ -0,0 +1,500 @@ +#include +#include +#include "viddec_pm_utils_bstream.h" + +/* Internal data structure for calculating required bits. */ +typedef union +{ + uint8_t byte[8]; + uint32_t word[2]; +} viddec_pm_utils_getbits_t; + +void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt); +uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index); +extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); + +/* Bytes left in cubby buffer which were not consumed yet */ +static inline uint32_t viddec_pm_utils_bstream_bytesincubby(viddec_pm_utils_bstream_buf_cxt_t *cxt) +{ + return (cxt->buf_end - cxt->buf_index); +} + +/* + This function checks to see if we are at the last valid byte for current access unit. +*/ +uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt) +{ + uint32_t data_remaining = 0; + uint8_t ret = 0; + + /* How much data is remaining including current byte to be processed.*/ + data_remaining = cxt->list->total_bytes - (cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st)); + + /* Start code prefix can be 000001 or 0000001. We always only check for 000001. + data_reamining should be 1 for 000001, as we don't count sc prefix and 1 represents current byte. + data_reamining should be 2 for 00000001, as we don't count sc prefix its current byte and extra 00 as we check for 000001. + NOTE: This is used for H264 only. + */ + switch (data_remaining) + { + case 2: + /* If next byte is 0 and its the last byte in access unit */ + ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0); + break; + case 1: + /* if the current byte is last byte */ + ret = 1; + break; + default: + break; + } + return ret; +} + +#ifndef VBP +/* + This function returns true if cubby buffer has the last byte of access unit. +*/ +uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt) +{ + uint32_t last_byte_offset_plus_one=0; + uint8_t ret = 0; + /* Check to see if the last byte Acces unit offset is the last byte for current access unit. + End represents the first invalid byte, so (end - st) will give number of bytes.*/ + last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st); + if ((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes) + { + ret = 1; + } + return ret; +} +#endif + +/* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */ +static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt) +{ + cxt->st = cxt->size = cxt->bitoff=0; +} +#ifndef VBP +/* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if + we need to go to next es buffer +*/ +static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset) +{ + uint32_t ret=0; + int32_t val=0; + val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes; + val = val - (int32_t)offset; + if (val > 0) ret = (uint32_t)val; + return val; +} + +/* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by + lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter + at returns index of ES buffer in list which has byte_offset +*/ +static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt, + uint32_t *lst_index, + uint32_t byte_offset, + uint32_t *physaddr) +{ + viddec_pm_utils_list_t *list; + uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */ + + list = cxt->list; + while (*lst_index < list->num_items) + { + /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */ + last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes; + if (byte_offset < last_byte_offst) + {/* Found a match so return with data remaining */ + bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset); + *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index); + break; + } + *lst_index+=1; + } + return bytes_left; +} + +/* This function is for copying trailing bytes of cubby bitstream buffer to scratch buffer */ +static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes) +{ + uint32_t i=0; + for (i=0; ibuf_scratch[i] = *data; + data++; + cxt->size++; + } +} + +/* This function is for copying trailing bytes from scratch buffer to bitstream buffer */ +static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data) +{ + uint32_t i=0; + for (i=0; isize; i++) + { + *data = cxt->buf_scratch[i]; + data++; + } +} +#endif + +/* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */ +static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream, + viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/ + uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/ + uint32_t *phase, /* Phase for emulation */ + uint32_t num_bytes,/* requested number of bytes*/ + uint32_t emul_reqd, /* On true we look for emulation prevention */ + uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/ + ) +{ + int32_t ret = 1; + uint8_t cur_byte = 0, valid_bytes_read = 0; + *act_bytes = 0; + + while (valid_bytes_read < num_bytes) + { + cur_byte = bstream->buf[bstream->buf_index + *act_bytes]; + if (emul_reqd && (cur_byte == 0x3) &&(*phase == 2)) + {/* skip emulation byte. we update the phase only if emulation prevention is enabled */ + *phase = 0; + } + else + { + data->byte[valid_bytes_read] = cur_byte; + /* + We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past. + From second byte onwards we always look to update phase. + */ + if ((*act_bytes != 0) || (is_offset_zero)) + { + if (cur_byte == 0) + { + /* Update phase only if emulation prevention is required */ + *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 ); + } + else + { + *phase=0; + } + } + valid_bytes_read++; + } + *act_bytes +=1; + } + /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array + has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */ + if ((bstream->buf_index + *act_bytes -1) >= bstream->buf_end) + { + ret = -1; + } + return ret; +} + +/* + This function checks to see if we have minimum amount of data else tries to reload as much as it can. + Always returns the data left in current buffer in parameter. +*/ +static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left) +{ +#ifdef VBP + *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); +#else + uint8_t isReload=0; + + *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); + /* If we have minimum data we should continue, else try to read more data */ + if (*data_left bstrm_buf)); + /* Break out of loop if we reached last byte or we have enough data */ + isReload = !((*data_left >= MIN_DATA) || (viddec_pm_utils_bstream_nomoredata(cxt) == 1)); + } + } +#endif +} + +/* + This function moves the stream position by N bits(parameter bits). The bytes parameter tells us how many bytes were + read for this N bits(can be different due to emulation bytes). +*/ +static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_buf_cxt_t *bstream, uint32_t bits, uint32_t bytes) +{ + if ((bits & 0x7) == 0) + { + bstream->buf_bitoff = 0; + bstream->buf_index +=bytes; + } + else + { + bstream->buf_bitoff = bits & 0x7; + bstream->buf_index +=(bytes - 1); + } +} + +/* + This function skips emulation byte if necessary. + During Normal flow we skip emulation byte only if we read at least one bit after the the two zero bytes. + However in some cases we might send data to HW without reading the next bit, in which case we are on + emulation byte. To avoid sending invalid data, this function has to be called first to skip. +*/ + +void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt) +{ + viddec_pm_utils_bstream_buf_cxt_t *bstream = &(cxt->bstrm_buf); + + if (cxt->is_emul_reqd && + (cxt->phase >= 2) && + (bstream->buf_bitoff == 0) && + (bstream->buf[bstream->buf_index] == 0x3) ) + { + bstream->buf_index += 1; + cxt->phase = 0; + } +} + +#ifndef VBP +/* + This function gets physical address of the requested au offset(pos). +*/ + +uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index) +{ + uint32_t ret = 0, last_byte_offst=0; + viddec_pm_utils_list_t *list; + + list = cxt->list; + while (lst_index < list->num_items) + { + last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes; + if (pos < last_byte_offst) + { +#ifndef MFDBIGENDIAN + ret = (uint32_t)list->sc_ibuf[lst_index].buf; +#else + ret = list->sc_ibuf[lst_index].phys; +#endif + ret +=(pos - list->data[lst_index].stpos); + if (lst_index == 0) ret+=list->start_offset; + break; + } + lst_index++; + } + return ret; +} + +/* + Actual reload function which uses dma to refill bitstream buffer. +*/ +void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt) +{ + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + + /* Update current offset positions */ + cxt->au_pos += (bstream->buf_index - bstream->buf_st); + bstream->buf_st = bstream->buf_index; + /* copy leftover bytes into scratch */ + { + int32_t cur_bytes=0; + viddec_pm_utils_bstream_scratch_init(&(cxt->scratch)); + cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); + if (cur_bytes > 0) + { + viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes); + cxt->scratch.bitoff = bstream->buf_bitoff; + } + } + /* Initiate DMA and copyback scratch data */ + { + uint32_t data_left = 0, ddr_mask=0; + /* calculate necesary aligmnets and copy data */ + { + uint32_t ddr_addr=0, data_wrote=0; + uint32_t byte_pos; + /* byte pos points to the position from where we want to read data.*/ + byte_pos = cxt->au_pos + cxt->scratch.size; + data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr); + if (data_left > CUBBY_SIZE) + { + data_left = CUBBY_SIZE; + } + if (data_left != 0) + { + ddr_mask = ddr_addr & 0x3; + ddr_addr = ddr_addr & ~0x3; + data_wrote = cp_using_dma(ddr_addr, (uint32_t)&(bstream->buf[MIN_DATA]), (data_left + ddr_mask), 0, 1); + } + } + /* copy scratch data back to buffer and update offsets */ + { + uint32_t index=0; + index = MIN_DATA + ddr_mask; + index -= cxt->scratch.size; + viddec_pm_utils_bstream_scratch_copyfrom(&(cxt->scratch), &(bstream->buf[index])); + bstream->buf_st = bstream->buf_index = index; + bstream->buf_end = data_left + cxt->scratch.size + bstream->buf_st; + bstream->buf_bitoff = cxt->scratch.bitoff; + } + } +} +#endif + +/* + Init function called by parser manager after sc code detected. +*/ +void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul) +{ +#ifdef VBP + cxt->emulation_byte_counter = 0; +#endif + + cxt->au_pos = 0; + cxt->list = list; + cxt->list_off = 0; + cxt->phase = 0; + cxt->is_emul_reqd = is_emul; + cxt->bstrm_buf.buf_st = cxt->bstrm_buf.buf_end = cxt->bstrm_buf.buf_index = cxt->bstrm_buf.buf_bitoff = 0; +} + +/* Get the requested byte position. If the byte is already present in cubby its returned + else we seek forward and get the requested byte. + Limitation:Once we seek forward we can't return back. +*/ +int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte) +{ + int32_t ret = -1; + uint32_t data_left=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + viddec_pm_utils_check_bstream_reload(cxt, &data_left); + if (data_left != 0) + { + *byte = bstream->buf[bstream->buf_index]; + ret = 1; + } + return ret; +} + +/* + Function to skip N bits ( N<= 32). +*/ +int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits) +{ + int32_t ret = -1; + uint32_t data_left=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + viddec_pm_utils_check_bstream_reload(cxt, &data_left); + if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + { + uint8_t bytes_required=0; + + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + if (bytes_required <= data_left) + { + viddec_pm_utils_getbits_t data; + uint32_t act_bytes =0; + if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + { + uint32_t total_bits=0; + total_bits=num_bits+bstream->buf_bitoff; + viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); + ret=1; + +#ifdef VBP + if (act_bytes > bytes_required) + { + cxt->emulation_byte_counter = act_bytes - bytes_required; + } +#endif + } + } + } + return ret; +} + +/* + Function to get N bits ( N<= 32). +*/ +int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip) +{ + uint32_t data_left=0; + int32_t ret = -1; + /* STEP 1: Make sure that we have at least minimum data before we calculate bits */ + viddec_pm_utils_check_bstream_reload(cxt, &data_left); + + if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + { + uint32_t bytes_required=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + + /* Step 2: Make sure we have bytes for requested bits */ + if (bytes_required <= data_left) + { + uint32_t act_bytes, phase; + viddec_pm_utils_getbits_t data; + phase = cxt->phase; + /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ + if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + { + uint32_t total_bits=0; + uint32_t shift_by=0; + /* zero out upper bits */ + /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts + in single statement */ + data.byte[0] <<= bstream->buf_bitoff; + data.byte[0] >>= bstream->buf_bitoff; + +#ifndef MFDBIGENDIAN + data.word[0] = SWAP_WORD(data.word[0]); + data.word[1] = SWAP_WORD(data.word[1]); +#endif + total_bits = num_bits+bstream->buf_bitoff; + if (total_bits > 32) + { + /* We have to use both the words to get required data */ + shift_by = total_bits - 32; + data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by)); + } + else + { + shift_by = 32 - total_bits; + data.word[0] = data.word[0] >> shift_by; + } + *out = data.word[0]; + if (skip) + { + /* update au byte position if needed */ + viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); + cxt->phase = phase; + +#ifdef VBP + if (act_bytes > bytes_required) + { + cxt->emulation_byte_counter += act_bytes - bytes_required; + } +#endif + } + + ret =1; + } + } + } + return ret; +} diff --git a/mixvbp/vbp_plugin/common/README b/mixvbp/vbp_plugin/common/README new file mode 100644 index 0000000..938f24a --- /dev/null +++ b/mixvbp/vbp_plugin/common/README @@ -0,0 +1 @@ +Some common codes will be added here. diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk new file mode 100755 index 0000000..f015988 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/Android.mk @@ -0,0 +1,66 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + h264parse.c \ + h264parse_bsd.c \ + h264parse_math.c \ + h264parse_mem.c \ + h264parse_sei.c \ + h264parse_sh.c \ + h264parse_pps.c \ + h264parse_sps.c \ + h264parse_dpb.c \ + viddec_h264_parse.c \ + mix_vbp_h264_stubs.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/include \ + $(MIXVBP_DIR)/include \ + $(MIXVBP_DIR)/vbp_manager/include \ + $(MIXVBP_DIR)/vbp_manager/h264/include + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_h264 + +LOCAL_SHARED_LIBRARIES := \ + libmixvbp + +include $(BUILD_SHARED_LIBRARY) + +include $(CLEAR_VARS) +PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) +LOCAL_SRC_FILES := \ + h264parse.c \ + h264parse_bsd.c \ + h264parse_math.c \ + h264parse_mem.c \ + h264parse_sei.c \ + h264parse_pps.c \ + h264parse_sps.c \ + h264parse_dpb.c \ + h264parse_sh.c \ + secvideo/baytrail/viddec_h264secure_parse.c \ + mix_vbp_h264_stubs.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY -DUSE_AVC_SHORT_FORMAT + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/include \ + $(MIXVBP_DIR)/include \ + $(MIXVBP_DIR)/vbp_manager/include \ + $(MIXVBP_DIR)/vbp_manager/h264/include + + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_h264secure +LOCAL_SHARED_LIBRARIES := libmixvbp + +include $(BUILD_SHARED_LIBRARY) + +endif diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c new file mode 100755 index 0000000..9e36b7c --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse.c @@ -0,0 +1,795 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: h264 parser +// +///////////////////////////////////////////////////////////////////////*/ + + +#include "h264.h" +#include "h264parse.h" +#include "h264parse_dpb.h" + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + + + +h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo) +{ + int32_t j, scanj; + int32_t delta_scale, lastScale, nextScale; + + const uint8_t ZZ_SCAN[16] = + { 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15 + }; + + const uint8_t ZZ_SCAN8[64] = + { 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 + }; + + lastScale = 8; + nextScale = 8; + scanj = 0; + + for (j=0; jSliceHeader; + + /////////////////////////////////////////////////// + // Reload SPS/PPS while + // 1) Start of Frame (in case of context switch) + // 2) PPS id changed + /////////////////////////////////////////////////// + if ((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id)) + { +#ifndef WIN32 + h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id); + + if (pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + + if (pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated =1; + h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); + h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); + } + else + { + if (h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id)) + { + pInfo->Is_SPS_updated =1; + h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); + h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); + } + } + +#else + pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id]; + pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id]; +#endif + + if (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected + } + } + else { + if ((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + } + + + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1): \ + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1); + + + return H264_STATUS_OK; +}; //// End of h264_active_par_set + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////// +// Parse slice header info +////////////////////////////////////////////////// +h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status retStatus = H264_STATUS_ERROR; + + //////////////////////////////////////////////////// + //// Parse slice header info + //// Part1: not depend on the active PPS/SPS + //// Part2/3: depend on the active parset + ////////////////////////////////////////////////// + + //retStatus = h264_Parse_Slice_Header_1(pInfo); + + SliceHeader->sh_error = 0; + + if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) + { + ////////////////////////////////////////// + //// Active parameter set for this slice + ////////////////////////////////////////// + retStatus = h264_active_par_set(pInfo, SliceHeader); + } + + if (retStatus == H264_STATUS_OK) { + switch (pInfo->active_SPS.profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + pInfo->active_PPS.transform_8x8_mode_flag=0; + pInfo->active_PPS.pic_scaling_matrix_present_flag =0; + pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; + + default: + break; + } + + if ( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 2; + } + else if ( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 4; + } + + } else { + SliceHeader->sh_error |= 1; + } + + + //if(SliceHeader->sh_error) { + //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + //} + + + + ////////////////////////////////// + //// Parse slice data (MB loop) + ////////////////////////////////// + //retStatus = h264_Parse_Slice_Data(pInfo); + { + //uint32_t data = 0; + //if( viddec_pm_peek_bits(parent, &data, 32) == -1) + //retStatus = H264_STATUS_ERROR; + } + //h264_Parse_rbsp_trailing_bits(pInfo); + + return retStatus; +} + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc) +{ + h264_Status ret = H264_STATUS_ERROR; + + //h264_NAL_Unit_t* NAL = &pInfo->NAL; + uint32_t code; +#if 0 + viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24); + viddec_pm_get_bits(parent, &code, 1); //forbidden_zero_bit + + viddec_pm_get_bits(parent, &code, 2); + SliceHeader->nal_ref_idc = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 5); + pInfo->nal_unit_type = (uint8_t)code; +#else +#ifdef VBP + if ( viddec_pm_get_bits(parent, &code, 8) != -1) +#else + //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type + if ( viddec_pm_get_bits(parent, &code, 32) != -1) +#endif + { + *nal_ref_idc = (uint8_t)((code>>5)&0x3); + pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f); + ret = H264_STATUS_OK; + } +#endif + + return ret; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +/*! + ************************************************************************ + * \brief + * set defaults for old_slice + * NAL unit of a picture" + ************************************************************************ + */ +#ifndef INT_MAX +#define INT_MAX 0xFFFFFFFF +#endif + +#ifndef UINT_MAX +#define UINT_MAX 0x7FFFFFFF +#endif + +void h264_init_old_slice(h264_Info* pInfo) +{ + pInfo->SliceHeader.field_pic_flag = 0; + + pInfo->SliceHeader.pic_parameter_id = 0xFF; + + pInfo->SliceHeader.frame_num = INT_MAX; + + pInfo->SliceHeader.nal_ref_idc = 0xFF; + + pInfo->SliceHeader.idr_flag = 0; + + pInfo->SliceHeader.pic_order_cnt_lsb = UINT_MAX; + pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX; + + pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX; + pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX; + + return; +} + + +void h264_init_img(h264_Info* pInfo) +{ + h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) ); + + + return; +} + + +void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem) +{ + int32_t i; + + h264_Info * pInfo = &(parser->info); + + parser->sps_pps_ddr_paddr = (uint32_t)persist_mem; + + pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr; + pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all); + pInfo->OFFSET_REF_FRAME_PADDR_GL = pInfo->PPS_PADDR_GL + MAX_NUM_PPS * sizeof(pic_param_set); + pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + + h264_memset( &(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used) ); + h264_memset( &(pInfo->active_PPS), 0x0, sizeof(pic_param_set) ); + + /* Global for SPS & PPS */ + for (i=0; iactive_SPS.seq_parameter_set_id = 0xff; + h264_Parse_Copy_Sps_To_DDR (pInfo, &(pInfo->active_SPS), i); + } + for (i=0; iactive_PPS.seq_parameter_set_id = 0xff; + h264_Parse_Copy_Pps_To_DDR (pInfo, &(pInfo->active_PPS), i); + } + + pInfo->active_SPS.seq_parameter_set_id = 0xff; + pInfo->sps_valid = 0; + pInfo->got_start = 0; + + return; +} + + +void h264_init_Info_under_sps_pps_level(h264_Info* pInfo) +{ + int32_t i=0; + + h264_memset( &(pInfo->dpb), 0x0, sizeof(h264_DecodedPictureBuffer) ); + h264_memset( &(pInfo->SliceHeader), 0x0, sizeof(h264_Slice_Header_t) ); + h264_memset( &(pInfo->old_slice), 0x0, sizeof(OldSliceParams) ); + h264_memset( &(pInfo->sei_information), 0x0, sizeof(sei_info) ); + h264_memset( &(pInfo->img), 0x0, sizeof(h264_img_par) ); + + pInfo->h264_list_replacement = 0; + + pInfo->h264_pwt_start_byte_offset = 0; + pInfo->h264_pwt_start_bit_offset = 0; + pInfo->h264_pwt_end_byte_offset = 0; + pInfo->h264_pwt_end_bit_offset = 0; + pInfo->h264_pwt_enabled = 0; + + for (i=0; i<32; i++) + { + pInfo->slice_ref_list0[i] = 0; + pInfo->slice_ref_list1[i] = 0; + } + + pInfo->qm_present_list = 0; + + pInfo->nal_unit_type = 0; + pInfo->old_nal_unit_type = 0xff; + + pInfo->push_to_cur = 0; + pInfo->Is_first_frame_in_stream = 1; + pInfo->Is_SPS_updated = 0; + pInfo->number_of_first_au_info_nal_before_first_slice = 0; + + pInfo->is_frame_boundary_detected_by_non_slice_nal = 0; + pInfo->is_frame_boundary_detected_by_slice_nal = 0; + pInfo->is_current_workload_done = 0; + + pInfo->sei_rp_received = 0; + pInfo->last_I_frame_idc = 255; + pInfo->wl_err_curr = 0; + pInfo->wl_err_next = 0; + + pInfo->primary_pic_type_plus_one = 0; + pInfo->sei_b_state_ready = 0; + + /* Init old slice structure */ + h264_init_old_slice(pInfo); + + /* init_dpb */ + h264_init_dpb(&(pInfo->dpb)); + + /* init_sei */ + h264_sei_stream_initialise(pInfo); + +} + +void h264_init_Info(h264_Info* pInfo) +{ + h264_memset(pInfo, 0x0, sizeof(h264_Info)); + + pInfo->old_nal_unit_type = 0xff; + + pInfo->Is_first_frame_in_stream =1; + pInfo->img.frame_count = 0; + pInfo->last_I_frame_idc = 255; + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +///////////////////////////////////////////////////// +// +// Judge whether it is the first VCL of a new picture +// +///////////////////////////////////////////////////// +int32_t h264_is_second_field(h264_Info * pInfo) +{ + h264_Slice_Header_t cur_slice = pInfo->SliceHeader; + OldSliceParams old_slice = pInfo->old_slice; + + int result = 0; + + //pInfo->img.second_field = 0; + + /// is it second field? + + //OS_INFO( "xxx is_used = %d\n", pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used); + + if (cur_slice.structure != FRAME) + { + if ( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ) + &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )) + { + if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag)) + { + + if (old_slice.structure != cur_slice.structure) + { + + if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1: + (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \ + ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0) || // Condition 2: + (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0))) + { + //pInfo->img.second_field = 1; + result = 1; + } + } + } + + + } + + + } + + + + return result; + +} //// End of h264_is_second_field + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice) +{ + int result = 0; + + if (pInfo->number_of_first_au_info_nal_before_first_slice) + { + pInfo->number_of_first_au_info_nal_before_first_slice = 0; + return 1; + } + + + + result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); + result |= (old_slice.frame_num != cur_slice.frame_num); + result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); + if (cur_slice.field_pic_flag && old_slice.field_pic_flag) + { + result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag); + } + + result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ + ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0)); + result |= ( old_slice.idr_flag != cur_slice.idr_flag); + + if (cur_slice.idr_flag && old_slice.idr_flag) + { + result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id); + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb); + result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom); + } + + if (pInfo->active_SPS.pic_order_cnt_type == 1) + { + result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]); + result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]); + } + + return result; +} + + +int32_t h264_check_previous_frame_end(h264_Info * pInfo) +{ + int result = 0; + + if ( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) ) + { + + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->img.current_slice_num = 0; + + if ((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) { + pInfo->is_frame_boundary_detected_by_non_slice_nal =1; + pInfo->is_current_workload_done=1; + result=1; + } + break; + } + default: + break; + } + + } + + return result; + +} + + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////// +// 1) Update old slice structure for frame boundary detection +////////////////////////////////////////////////////////////// +void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader) +{ + pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id; + + pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num; + + pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag; + + if (pInfo->SliceHeader.field_pic_flag) + { + pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; + } + + pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc; + + pInfo->old_slice.structure = pInfo->SliceHeader.structure; + + pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag; + if (pInfo->SliceHeader.idr_flag) + { + pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id; + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + pInfo->old_slice.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; + pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; + } + + if (pInfo->active_SPS.pic_order_cnt_type == 1) + { + pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; + pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; + } + + ////////////////////////////// Next to current + memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t)); + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// Initialization for new picture +////////////////////////////////////////////////////////////////////////////// +void h264_update_img_info(h264_Info * pInfo ) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + pInfo->img.frame_num = pInfo->SliceHeader.frame_num; + pInfo->img.structure = pInfo->SliceHeader.structure; + + pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag; + pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; + + pInfo->img.MbaffFrameFlag = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag)); + pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type; + + if (pInfo->img.pic_order_cnt_type == 1) { + pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle; + pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag; + pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic; + pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field; + } + + pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb; + //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb; + pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom; + pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0]; + pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1]; + + + pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num; + + pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag; + + ////////////////////////////////////////////////// Check SEI recovery point + if (pInfo->sei_information.recovery_point) { + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum; + } + + if (pInfo->SliceHeader.idr_flag) + pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num; + + + + /////////////////////////////////////////////////Resolution Change + pInfo->img.curr_has_mmco_5 = 0; + + if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)|| + (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) ) + { + int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0; + + // If resolution changed, reset the soft DPB here + h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics); + } + + return; + +} ///// End of init new frame + + +void h264_update_frame_type(h264_Info * pInfo ) +{ + +//update frame type + if (pInfo->img.structure == FRAME) + { + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + if (pInfo->dpb.fs_dec_idc < NUM_DPB_FRAME_STORES) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET); + //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff; + //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc; + } + + } + else + { +#if 1 + switch (pInfo->SliceHeader.slice_type) + { + case h264_PtypeB: + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET); + break; + case h264_PtypeSP: + case h264_PtypeP: + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET); + break; + case h264_PtypeI: + case h264_PtypeSI: + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET); + } + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; + + break; + default: + break; + + } +#endif + + } + + } + else if (pInfo->img.structure == TOP_FIELD) + { + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));; + } + else + { + switch (pInfo->SliceHeader.slice_type) + { + case h264_PtypeB: + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + break; + case h264_PtypeSP: + case h264_PtypeP: + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + break; + case h264_PtypeI: + case h264_PtypeSI: + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + } + if (pInfo->sei_rp_received) + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc; + else + pInfo->last_I_frame_idc = 255; + break; + default: + break; + + } + + } + + + } else if (pInfo->img.structure == BOTTOM_FIELD) + { + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));; + } + else + { + switch (pInfo->SliceHeader.slice_type) + { + case h264_PtypeB: + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + break; + case h264_PtypeSP: + case h264_PtypeP: + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + break; + case h264_PtypeI: + case h264_PtypeSI: + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID) + { + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + } + if (pInfo->sei_rp_received) + pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc + PUT_LIST_INDEX_FIELD_BIT(1); + else + pInfo->last_I_frame_idc = 255; + + break; + default: + break; + + } + + } + + } + return; + +} + + +//////#endif ///////////// IFDEF H264_PARSE_C/////////////////// + diff --git a/mixvbp/vbp_plugin/h264/h264parse_bsd.c b/mixvbp/vbp_plugin/h264/h264parse_bsd.c new file mode 100755 index 0000000..40c7559 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_bsd.c @@ -0,0 +1,228 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: h264 bistream decoding +// +///////////////////////////////////////////////////////////////////////*/ + + +#include "h264.h" +#include "h264parse.h" +#include "viddec_parser_ops.h" + + + + + +/** + get_codeNum :Get codenum based on sec 9.1 of H264 spec. + @param cxt : Buffer adress & size are part inputs, the cxt is updated + with codeNum & sign on sucess. + Assumption: codeNum is a max of 32 bits + + @retval 1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code. + @retval 0 : Couldn't find a code in the current buffer. + be freed. +*/ + +uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) +{ + int32_t leadingZeroBits= 0; + uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; + uint32_t codeNum =0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + uint8_t is_first_byte = 1; + uint32_t length =0; + uint32_t bits_need_add_in_first_byte =0; + int32_t bits_operation_result=0; + + //remove warning + pInfo = pInfo; + + ////// Step 1: parse through zero bits until we find a bit with value 1. + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + + while (!match) + { + if ((bits_offset != 0) && ( is_first_byte == 1)) + { + //we handle byte at a time, if we have offset then for first + // byte handle only 8 - offset bits + noOfBits = (uint8_t)(8 - bits_offset); + bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits); + + + temp = (temp << bits_offset); + if (temp!=0) + { + bits_need_add_in_first_byte = bits_offset; + } + is_first_byte =0; + } + else + { + noOfBits = 8;/* always 8 bits as we read a byte at a time */ + bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); + + } + + if (-1==bits_operation_result) + { + return MAX_INT32_VALUE; + } + + if (temp != 0) + { + // if byte!=0 we have at least one bit with value 1. + count=1; + while (((temp & 0x80) != 0x80) && (count <= noOfBits)) + { + count++; + temp = temp <<1; + } + //At this point we get the bit position of 1 in current byte(count). + + match = 1; + leadingZeroBits += count; + } + else + { + // we don't have a 1 in current byte + leadingZeroBits += noOfBits; + } + + if (!match) + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, noOfBits); + } + else + { + //actually move the bitoff by viddec_pm_get_bits + viddec_pm_get_bits(parent, &temp, count); + } + + } + ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value. + + + if (match) + { + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + /* bit position in current byte */ + //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7); + count = ((count + bits_need_add_in_first_byte)& 0x7); + + leadingZeroBits --; + length = leadingZeroBits; + codeNum = 0; + noOfBits = 8 - count; + + + while (leadingZeroBits > 0) + { + if (noOfBits < (uint32_t)leadingZeroBits) + { + viddec_pm_get_bits(parent, &temp, noOfBits); + + + codeNum = (codeNum << noOfBits) | temp; + leadingZeroBits -= noOfBits; + } + else + { + viddec_pm_get_bits(parent, &temp, leadingZeroBits); + + codeNum = (codeNum << leadingZeroBits) | temp; + leadingZeroBits = 0; + } + + + noOfBits = 8; + } + // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits). + codeNum = codeNum + (1 << length) -1; + + } + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + if (bits_offset!=0) + { + viddec_pm_peek_bits(parent, &temp, 8-bits_offset); + } + + return codeNum; +} + + +/*---------------------------------------*/ +/*---------------------------------------*/ +int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) +{ + int32_t sval = 0; + signed char sign; + + sval = h264_get_codeNum(parent , pInfo); + + if (bIsSigned) //get signed integer golomb code else the value is unsigned + { + sign = (sval & 0x1)?1:-1; + sval = (sval +1) >> 1; + sval = sval * sign; + } + + return sval; +} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned) + +/// +/// Check whether more RBSP data left in current NAL +/// +uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) +{ + uint8_t cnt = 0; + + uint8_t is_emul =0; + uint8_t cur_byte = 0; + int32_t shift_bits =0; + uint32_t ctr_bit = 0; + uint32_t bits_offset =0, byte_offset =0; + + //remove warning + pInfo = pInfo; + + if (!viddec_pm_is_nomoredata(parent)) + return 1; + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + shift_bits = 7-bits_offset; + + // read one byte + viddec_pm_get_cur_byte(parent, &cur_byte); + + ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; + + // a stop bit has to be one + if (ctr_bit==0) + return 1; + + while (shift_bits>=0 && !cnt) + { + cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit + } + + return (cnt); +} + + + +///////////// EOF///////////////////// + diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c new file mode 100755 index 0000000..13adb1b --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c @@ -0,0 +1,4222 @@ +/*! + *********************************************************************** + * \file: h264_dpb_ctl.c + * + *********************************************************************** + */ + +#include "viddec_parser_ops.h" + +#include "viddec_fw_workload.h" +#include "viddec_pm.h" + + +//#include +#include "h264parse.h" +#include "h264parse_dpb.h" +//#include "h264_debug.h" + +#ifndef NULL +#define NULL 0 +#endif +//#ifndef USER_MODE +//#define NULL 0 +//#endif + +///////////////////////// DPB init ////////////////////////////////////////// +////////////////////////////////////////////////////////////////////////////// +// Init DPB +// Description: init dpb, which should be called while open +// +////////////////////////////////////////////////////////////////////////////// + +void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb) +{ + int32_t i; + + //// Init DPB to zero + //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) ); + + + for (i=0; ifs[i].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + p_dpb->used_size = 0; + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + + return; +} + + +///////////////////////// Reference list management ////////////////////////// + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_add_ref_list () +// +// Adds an idc to the long term reference list +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) +{ + p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc; + p_dpb->ref_frames_in_buffer++; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_add_ltref_list () +// +// Adds an idc to the long term reference list +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) +{ + p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc; + p_dpb->ltref_frames_in_buffer++; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_update_all_ref_lists (h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting) +// +// Decide whether the current picture needs to be added to the reference lists +// active_fs should be set-up prior to calling this function +// +// Check if we need to search the lists here +// or can we go straight to adding to ref lists.. +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExisting) +{ + if (NonExisting) + h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc); + else + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + //if(active_fs->is_reference) + if (p_dpb->active_fs->frame.used_for_reference) + { + if (viddec_h264_get_is_long_term(p_dpb->active_fs)) + { + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) + h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); + else + { + uint32_t found_in_list = 0, i = 0; + for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) { + if (p_dpb->fs_ltref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1; + } + + if (found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); + } + } + else + { + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) { + h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc); + } else + { + uint32_t found_in_list = 0, i = 0; + + for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++) + { + if (p_dpb->fs_ref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1; + } + + if (found_in_list == 0) h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc); + } + } + } + + return; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// Set active fs +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index) +{ + p_dpb->active_fs = &p_dpb->fs[index]; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// Sort reference list +////////////////////////////////////////////////////////////////////////////// + +void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t desc) +{ + int32_t j, k, temp, idc; + + // Dodgy looking for embedded code here... + if (size > 1) + { + for (j = 0; j < size-1; j = j + 1) { + for (k = j + 1; k < size; k = k + 1) { + if ((desc & (sort_indices[j] < sort_indices[k]))| + (~desc & (sort_indices[j] > sort_indices[k])) ) + { + temp = sort_indices[k]; + sort_indices[k] = sort_indices[j]; + sort_indices[j] = temp; + idc = list[k]; + list[k] = list[j]; + list[j] = idc; + } + } + } + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_pic_is_bottom_field_ref () +// +// Used to sort a list based on a corresponding sort indices +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_pic_is_bottom_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term) +{ + int32_t temp; + if (long_term) temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && (p_dpb->active_fs->bottom_field.is_long_term)) ? 1 : 0; + else temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && !(p_dpb->active_fs->bottom_field.is_long_term)) ? 1 : 0; + + return temp; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_pic_is_top_field_ref () +// +// Used to sort a list based on a corresponding sort indices +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_pic_is_top_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term) +{ + int32_t temp; + if (long_term) + temp = ((p_dpb->active_fs->top_field.used_for_reference) && (p_dpb->active_fs->top_field.is_long_term)) ? 1 : 0; + else + temp = ((p_dpb->active_fs->top_field.used_for_reference) && !(p_dpb->active_fs->top_field.is_long_term)) ? 1 : 0; + + return temp; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_gen_pic_list_from_frame_list () +// +// Used to sort a list based on a corresponding sort indices +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, uint8_t *pic_list, uint8_t *frame_list, int32_t currPicStructure, int32_t list_size, int32_t long_term) +{ + int32_t top_idx, bot_idx, got_pic, list_idx; + int32_t lterm; + + list_idx = 0; + lterm = (long_term)? 1:0; + + if (list_size) { + + + top_idx = 0; + bot_idx = 0; + + if (currPicStructure == TOP_FIELD) { + while ((top_idx < list_size)||(bot_idx < list_size)) + { + /////////////////////////////////////////// ref Top Field + got_pic = 0; + while ((top_idx < list_size) & ~got_pic) + { + h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) + { + if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) + { + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field + list_idx++; + got_pic = 1; + } + } + top_idx++; + } + + /////////////////////////////////////////// ref Bottom Field + got_pic = 0; + while ((bot_idx < list_size) & ~got_pic) + { + h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) + { + if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) + { + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field + list_idx++; + got_pic = 1; + } + } + bot_idx++; + } + } + } + + /////////////////////////////////////////////// current Bottom Field + if (currPicStructure == BOTTOM_FIELD) { + while ((top_idx < list_size)||(bot_idx < list_size)) + { + /////////////////////////////////////////// ref Top Field + got_pic = 0; + while ((bot_idx < list_size) && (!(got_pic))) + { + h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) { + if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) { + // short term ref pic + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field + list_idx++; + got_pic = 1; + } + } + bot_idx++; + } + + /////////////////////////////////////////// ref Bottom Field + got_pic = 0; + while ((top_idx < list_size) && (!(got_pic))) + { + h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); + if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) { + if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) { + // short term ref pic + pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field + list_idx++; + got_pic = 1; + } + } + top_idx++; + } + } + } + } + + return list_idx; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_ref_list () +// +// Removes an idc from the refernce list and updates list after +// + +void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc) +{ + uint8_t idx = 0; + int32_t Found = 0; + + while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found))) + { + if (p_dpb->fs_ref_idc[idx] == ref_idc) + Found = 1; + else + idx++; + } + + if (Found) + { + // Move the remainder of the list up one + while (idx < p_dpb->ref_frames_in_buffer - 1) { + p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1]; + idx ++; + } + + p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one + p_dpb->ref_frames_in_buffer--; + } + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_ltref_list () +// +// Removes an idc from the long term reference list and updates list after +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_idc) +{ + uint8_t idx = 0; + int32_t Found = 0; + + while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found))) + { + if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1; + else idx++; + } + + if (Found) + { + // Move the remainder of the list up one + while (idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1)) + { + p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1]; + idx ++; + } + p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one + + p_dpb->ltref_frames_in_buffer--; + } + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_init_lists () +// +// Used to initialise the reference lists +// Also assigns picture numbers and long term picture numbers if P OR B slice +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_update_ref_lists(h264_Info * pInfo) +{ + h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb; + + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + uint8_t list0idx, list0idx_1, listltidx; + uint8_t idx; + + uint8_t add_top, add_bottom, diff; + uint8_t list_idc; + uint8_t check_non_existing, skip_picture; + + + uint8_t gen_pic_fs_list0[16]; + uint8_t gen_pic_fs_list1[16]; + uint8_t gen_pic_fs_listlt[16]; + uint8_t gen_pic_pic_list[32]; // check out these sizes... + + uint8_t sort_fs_idc[16]; + int32_t list_sort_number[16]; + +#ifdef DUMP_HEADER_INFO + static int cc1 = 0; + //OS_INFO("-------------cc1= %d\n",cc1); /////// DEBUG info + if (cc1 == 255) + idx = 0; +#endif + + list0idx = list0idx_1 = listltidx = 0; + + if (pInfo->SliceHeader.structure == FRAME) + { + ////////////////////////////////////////////////// short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3)&&(p_dpb->active_fs->frame.used_for_reference == 3)) + { + if (p_dpb->active_fs->frame_num > pInfo->img.frame_num) + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum; + else + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num; + + p_dpb->active_fs->frame.pic_num = p_dpb->active_fs->frame_num_wrap; + + // Use this opportunity to sort list for a p-frame + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = p_dpb->active_fs->frame.pic_num; + list0idx++; + } + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) + p_dpb->listX_0[idx] = (sort_fs_idc[idx]); // frame + + p_dpb->listXsize[0] = list0idx; + } + + ////////////////////////////////////////////////// long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3) && (p_dpb->active_fs->frame.used_for_reference == 3)) + { + p_dpb->active_fs->frame.long_term_pic_num = p_dpb->active_fs->frame.long_term_frame_idx; + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[list0idx-p_dpb->listXsize[0]] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[list0idx-p_dpb->listXsize[0]] = p_dpb->active_fs->frame.long_term_pic_num; + list0idx++; + } + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0); + for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + } + p_dpb->listXsize[0] = list0idx; + } + } + else /// Field base + { + if (pInfo->SliceHeader.structure == TOP_FIELD) + { + add_top = 1; + add_bottom = 0; + } + else + { + add_top = 0; + add_bottom = 1; + } + + ////////////////////////////////////////////P0: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (p_dpb->active_fs->frame.used_for_reference) + { + if (p_dpb->active_fs->frame_num > pInfo->SliceHeader.frame_num) { + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum; + } else { + p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num; + } + + if ((p_dpb->active_fs->frame.used_for_reference)&0x1) { + p_dpb->active_fs->top_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_top; + } + + if ((p_dpb->active_fs->frame.used_for_reference)&0x2) { + p_dpb->active_fs->bottom_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_bottom; + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = p_dpb->active_fs->frame_num_wrap; + list0idx++; + } + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) { + gen_pic_fs_list0[idx] = sort_fs_idc[idx]; + } + + p_dpb->listXsize[0] = 0; + p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[0]; idx++) + { + p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; + } + } + + ////////////////////////////////////////////P0: long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) { + p_dpb->active_fs->top_field.long_term_pic_num = (p_dpb->active_fs->top_field.long_term_frame_idx << 1) + add_top; + } + + if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) { + p_dpb->active_fs->bottom_field.long_term_pic_num = (p_dpb->active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom; + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx; + listltidx++; + } + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); + for (idx = 0; idx < listltidx; idx++) { + gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; + } + list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); + + for (idx = 0; idx < list0idx_1; idx++) { + p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; + } + p_dpb->listXsize[0] += list0idx_1; + } + } + + + if (pInfo->SliceHeader.slice_type == h264_PtypeI) + { + p_dpb->listXsize[0] = 0; + p_dpb->listXsize[1] = 0; + return; + } + + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { + //// Forward done above + p_dpb->listXsize[1] = 0; + } + + + // B-Slice + // Do not include non-existing frames for B-pictures when cnt_type is zero + + if (pInfo->SliceHeader.slice_type == h264_PtypeB) + { + list0idx = list0idx_1 = listltidx = 0; + skip_picture = 0; + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + check_non_existing = 1; + else + check_non_existing = 0; + + if (pInfo->SliceHeader.structure == FRAME) + { + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) + { + if (check_non_existing) + { + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; + else skip_picture = 0; + } + + if (skip_picture == 0) + { + if ((p_dpb->active_fs->frame.used_for_reference==3) && (!(p_dpb->active_fs->frame.is_long_term))) + { + if (pInfo->img.framepoc >= p_dpb->active_fs->frame.poc) + { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = p_dpb->active_fs->frame.poc; + list0idx++; + } + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = sort_fs_idc[idx]; + } + + list0idx_1 = list0idx; + + /////////////////////////////////////////B0: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) + { + if (check_non_existing) + { + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; + else skip_picture = 0; + } + + if (skip_picture == 0) + { + if ((p_dpb->active_fs->frame.used_for_reference) && (!(p_dpb->active_fs->frame.is_long_term))) + { + if (pInfo->img.framepoc < p_dpb->active_fs->frame.poc) + { + sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc; + list0idx++; + } + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); + for (idx = list0idx_1; idx < list0idx; idx++) { + p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1]; + } + + for (idx = 0; idx < list0idx_1; idx++) { + p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx]; + } + + for (idx = list0idx_1; idx < list0idx; idx++) { + p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx]; + } + + p_dpb->listXsize[0] = list0idx; + p_dpb->listXsize[1] = list0idx; + + /////////////////////////////////////////B0: long term handling + list0idx = 0; + + // Can non-existent pics be set as long term?? + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3)) + { + // if we have two fields, both must be long-term + sort_fs_idc[list0idx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[list0idx] = p_dpb->active_fs->frame.long_term_pic_num; + list0idx++; + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0); + for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1) + { + p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + } + + p_dpb->listXsize[0] += list0idx; + p_dpb->listXsize[1] += list0idx; + } + else // Field + { + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (viddec_h264_get_is_used(p_dpb->active_fs)) { + if (check_non_existing) { + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) + skip_picture = 1; + else + skip_picture = 0; + } + + if (skip_picture == 0) { + if (pInfo->img.ThisPOC >= p_dpb->active_fs->frame.poc) { + sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx] = p_dpb->active_fs->frame.poc; + list0idx++; + } + } + } + } + + h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); + for (idx = 0; idx < list0idx; idx = idx + 1) { + gen_pic_fs_list0[idx] = sort_fs_idc[idx]; + } + + list0idx_1 = list0idx; + + ///////////////////////////////////////////// B1: Short term handling + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + if (viddec_h264_get_is_used(p_dpb->active_fs)) + { + if (check_non_existing) { + if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) + skip_picture = 1; + else + skip_picture = 0; + } + + if (skip_picture == 0) { + if (pInfo->img.ThisPOC < p_dpb->active_fs->frame.poc) { + sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; + list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc; + list0idx++; + } + } + } + } + + ///// Generate frame list from sorted fs + ///// + h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); + for (idx = list0idx_1; idx < list0idx; idx++) + gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1]; + + for (idx = 0; idx < list0idx_1; idx++) + gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx]; + + for (idx = list0idx_1; idx < list0idx; idx++) + gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx]; + + ///// Generate List_X0 + ///// + p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[0]; idx++) + p_dpb->listX_0[idx] = gen_pic_pic_list[idx]; + + //// Generate List X1 + //// + p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0); + + for (idx = 0; idx < p_dpb->listXsize[1]; idx++) + p_dpb->listX_1[idx] = gen_pic_pic_list[idx]; + + ///////////////////////////////////////////// B1: long term handling + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx]; + list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx; + listltidx++; + } + + h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); + for (idx = 0; idx < listltidx; idx++) + gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; + + list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); + + for (idx = 0; idx < list0idx_1; idx++) + { + p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; + p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx]; + } + + p_dpb->listXsize[0] += list0idx_1; + p_dpb->listXsize[1] += list0idx_1; + } + } + + // Setup initial list sizes at this point + p_dpb->nInitListSize[0] = p_dpb->listXsize[0]; + p_dpb->nInitListSize[1] = p_dpb->listXsize[1]; + if (pInfo->SliceHeader.slice_type != h264_PtypeI) + { + if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1)) + { + // check if lists are identical, if yes swap first two elements of listX[1] + diff = 0; + for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1) + { + if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1; + } + + + if (!(diff)) + { + list_idc = p_dpb->listX_1[0]; + p_dpb->listX_1[0] = p_dpb->listX_1[1]; + p_dpb->listX_1[1] = list_idc; + } + } + + // set max size + if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active) + { + p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active; + } + + + if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active) + { + p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active; + } + + + + } + + + + /// DPB reorder list + h264_dpb_reorder_lists(pInfo); + + return; +} //// End of init_dpb_list + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_short_term_pic () +// +// Sets active_fs to point to frame store containing picture with given picNum +// Sets field_flag, bottom_field and err_flag based on the picture and whether +// it is available or not... +// +static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic_num, int32_t *bottom_field_bit) +{ + register uint32_t idx; + register frame_param_ptr temp_fs; + + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + *bottom_field_bit = 0; + for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++) + { + temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]]; + if (pInfo->SliceHeader.structure == FRAME) + { + if (temp_fs->frame.used_for_reference == 3) + if (!(temp_fs->frame.is_long_term)) + if (temp_fs->frame.pic_num == pic_num) return temp_fs; + } + else // current picture is a field + { + if (temp_fs->frame.used_for_reference&0x1) + if (!(temp_fs->top_field.is_long_term)) + if (temp_fs->top_field.pic_num == pic_num) + { + return temp_fs; + } + + if (temp_fs->frame.used_for_reference&0x2) + if (!(temp_fs->bottom_field.is_long_term)) + if (temp_fs->bottom_field.pic_num == pic_num) + { + *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); + return temp_fs; + } + } + } + return NULL; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_long_term_pic () +// +// Sets active_fs to point to frame store containing picture with given picNum +// + +static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long_term_pic_num, int32_t *bottom_field_bit) +{ + register uint32_t idx; + register frame_param_ptr temp_fs; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + *bottom_field_bit = 0; + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]]; + if (pInfo->SliceHeader.structure == FRAME) + { + if (temp_fs->frame.used_for_reference == 3) + if (temp_fs->frame.is_long_term) + if (temp_fs->frame.long_term_pic_num == long_term_pic_num) + return temp_fs; + } + else + { + if (temp_fs->frame.used_for_reference&0x1) + if (temp_fs->top_field.is_long_term) + if (temp_fs->top_field.long_term_pic_num == long_term_pic_num) + return temp_fs; + + if (temp_fs->frame.used_for_reference&0x2) + if (temp_fs->bottom_field.is_long_term) + if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num) + { + *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1); + return temp_fs; + } + } + } + return NULL; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_reorder_ref_pic_list () +// +// Used to sort a list based on a corresponding sort indices +// + +struct list_value_t +{ + int32_t value; + struct list_value_t *next; +}; + +struct linked_list_t +{ + struct list_value_t *begin; + struct list_value_t *end; + struct list_value_t *entry; + struct list_value_t *prev_entry; + struct list_value_t list[32]; +}; + +static void linked_list_initialize (struct linked_list_t *lp, uint8_t *vp, int32_t size) +{ + struct list_value_t *lvp; + + lvp = lp->list; + lp->begin = lvp; + lp->entry = lvp; + lp->end = lvp + (size-1); + lp->prev_entry = NULL; + + while (lvp <= lp->end) + { + lvp->value = *(vp++); + lvp->next = lvp + 1; + lvp++; + } + lp->end->next = NULL; + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value) +{ + register struct list_value_t *lvp = lp->entry; + register struct list_value_t *lvp_prev; + + if (lvp == NULL) { + lp->end->value = list_value; // replace the end entry + } else if ((lp->begin==lp->end)||(lvp==lp->end)) // replece the begin/end entry and set the entry to NULL + { + lp->entry->value = list_value; + lp->prev_entry = lp->entry; + lp->entry = NULL; + } + else if (lvp->value==list_value) // the entry point matches + { + lp->prev_entry = lvp; + lp->entry = lvp->next; + } + else if (lvp->next == lp->end) // the entry is just before the end + { + // replace the end and swap the end and entry points + // lvp + // prev_entry => entry => old_end + // old_end & new_prev_entry => new_end & entry + lp->end->value = list_value; + + if (lp->prev_entry) + lp->prev_entry->next = lp->end; + else + lp->begin = lp->end; + + lp->prev_entry = lp->end; + lp->end->next = lvp; + lp->end = lvp; + lvp->next = NULL; + } + else + { + lvp_prev = NULL; + while (lvp->next) // do not check the end but we'll be in the loop at least once + { + if (lvp->value == list_value) break; + lvp_prev = lvp; + lvp = lvp->next; + } + lvp->value = list_value; // force end matches + if (lvp_prev != NULL) + { + // remove lvp from the list + lvp_prev->next = lvp->next; + } + if (lvp==lp->end) lp->end = lvp_prev; + + // insert lvp in front of lp->entry + if (lp->entry==lp->begin) + { + lvp->next = lp->begin; + lp->begin = lvp; + } + else + { + lvp->next = lp->entry; + lp->prev_entry->next = lvp; + } + lp->prev_entry = lvp; + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +static void linked_list_output (struct linked_list_t *lp, int32_t *vp) +{ + register int32_t *ip1; + register struct list_value_t *lvp; + + lvp = lp->begin; + ip1 = vp; + while (lvp) + { + *(ip1++) = lvp->value; + lvp = lvp->next; + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +int32_t h264_dpb_reorder_ref_pic_list(h264_Info * pInfo,int32_t list_num, int32_t num_ref_idx_active) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint8_t *remapping_of_pic_nums_idc; + list_reordering_num_t *list_reordering_num; + int32_t bottom_field_bit; + + int32_t maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num; + int32_t refIdxLX; + int32_t i; + + int32_t PicList[32] = {0}; + struct linked_list_t ll; + struct linked_list_t *lp = ≪ // should consider use the scratch space + + // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu + register frame_param_ptr temp_fs; + register int32_t temp; + register uint8_t *ip1; + + maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + + if (list_num == 0) // i.e list 0 + { + ip1 = p_dpb->listX_0; + remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc; + list_reordering_num = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num; + } + else + { + ip1 = p_dpb->listX_1; + remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc; + list_reordering_num = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num; + } + + + linked_list_initialize (lp, ip1, num_ref_idx_active); + + currPicNum = pInfo->SliceHeader.frame_num; + if (pInfo->SliceHeader.structure != FRAME) + { + + /* The reason it is + 1 I think, is because the list is based on polarity + expand later... + */ + maxPicNum <<= 1; + currPicNum <<= 1; + currPicNum++; + } + + picNumLXPred = currPicNum; + refIdxLX = 0; + + for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++) + { + if (i > MAX_NUM_REF_FRAMES) + { + break; + } + + if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering + { + temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1); + if (remapping_of_pic_nums_idc[i] == 0) + { + temp = picNumLXPred - temp; + if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum; + else picNumLXNoWrap = temp; + } + else // (remapping_of_pic_nums_idc[i] == 1) + { + temp += picNumLXPred; + if (temp >= maxPicNum) picNumLXNoWrap = temp - maxPicNum; + else picNumLXNoWrap = temp; + } + + // Updates for next iteration of the loop + picNumLXPred = picNumLXNoWrap; + + if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum; + else pic_num = picNumLXNoWrap; + + temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit); + if (temp_fs) + { + temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); + linked_list_reorder (lp, temp); + } + } + else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering + { + pic_num = list_reordering_num[i].long_term_pic_num; + + temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit); + if (temp_fs) + { + temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc); + linked_list_reorder (lp, temp); + } + } + } + + linked_list_output (lp, PicList); + + if (0 == list_num ) + { + for (i=0; islice_ref_list0[i]=(uint8_t)PicList[i]; + } + } + else + { + for (i=0; islice_ref_list1[i]=(uint8_t)PicList[i]; + } + } + + + // Instead of updating the now reordered list here, just write it down... + // This way, we can continue to hold the initialised list in p_dpb->listX_0 + // and therefore not need to update it every slice + + //h264_dpb_write_list(list_num, PicList, num_ref_idx_active); + + return num_ref_idx_active; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + + +void h264_dpb_RP_check_list (h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint8_t *p_list = pInfo->slice_ref_list0; + + // + // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away! + // + + if ((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + } + + + // + // Repare Ref list if it damaged with RP recovery only + // + if ((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received) + { + + int32_t idx, rp_found = 0; + + if ( ((pInfo->SliceHeader.num_ref_idx_l0_active == 1)&&(pInfo->SliceHeader.structure == FRAME)) || + ((pInfo->SliceHeader.num_ref_idx_l0_active == 2)&&(pInfo->SliceHeader.structure != FRAME)) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list0; + } + else + { + p_list = pInfo->dpb.listX_0; + //pInfo->sei_rp_received = 0; + //return; + } + + + for (idx = 0; idx < p_dpb->used_size; idx++) { + if (p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) { + rp_found = 1; + break; + } + } + if (rp_found) { +#if 0 + int32_t poc; + + ///// Clear long-term ref list + for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); + } + + ///// Clear short-term ref list + //while(p_dpb->used_size>1) + for (idx = 0; idx < p_dpb->used_size; idx++) + { + int32_t idx_pos; + //// find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos); + + //// Remove all frames in previous GOP + if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc)) + { + // Remove from ref-list + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]); + + // Output from DPB + //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0)) + { + //int32_t existing; + //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing); + //p_dpb->last_output_poc = poc; + } + //h264_dpb_remove_frame_from_dpb(p_dpb, idx); // Remove dpb.fs_dpb_idc[pos] + + } + } +#endif + + ///// Set the reference to last I frame + if ( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0])) + { + /// Repaire the reference list now + h264_dpb_unmark_for_reference(p_dpb, p_list[0]); + h264_dpb_remove_ref_list(p_dpb, p_list[0]); + p_list[0] = pInfo->last_I_frame_idc; + if (pInfo->SliceHeader.structure != FRAME) + p_list[1] = (pInfo->last_I_frame_idc ^ 0x20); + } + } + } + + pInfo->sei_rp_received = 0; + pInfo->sei_b_state_ready = 1; + + } + + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_reorder_lists () +// +// Used to sort a list based on a corresponding sort indices +// + +void h264_dpb_reorder_lists(h264_Info * pInfo) +{ + int32_t currSliceType = pInfo->SliceHeader.slice_type; + + if (currSliceType == h264_PtypeP ) + { + /////////////////////////////////////////////// Reordering reference list for P slice + /// Forward reordering + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); + else + { + + } + pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; + } else if (currSliceType == h264_PtypeB) + { + /////////////////////////////////////////////// Reordering reference list for B slice + /// Forward reordering + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active); + else + { + + } + pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; + + /// Backward reordering + if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) + h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active); + else + { + + } + pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active; + } + + //// Check if need recover reference list with previous recovery point + if (!pInfo->img.second_field) + { + h264_dpb_RP_check_list(pInfo); + } + + + return; +} + +////////////////////////////////////////// DPB management ////////////////////// + +////////////////////////////////////////////////////////////////////////////// +// avc_dpb_get_non_output_frame_number () +// +// get total non output frame number in the DPB. +// +static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo) +{ + int32_t idx; + int32_t number=0; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if (viddec_h264_get_is_output(p_dpb->active_fs) == 0) + { + (number)++; + } + } + + return number; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +//// Store previous picture in DPB, and then update DPB queue, remove unused frames from DPB + +void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExisting, int32_t use_old) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t used_for_reference; + int32_t is_direct_output; + int32_t second_field_stored = 0; + int32_t poc; + int32_t pos; + int32_t flag; + int32_t first_field_non_ref = 0; + int32_t idr_flag; + + if (NonExisting) { + if (p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC) + return; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + } else { + if (p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC) + return; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + } + + if (NonExisting == 0) + { + //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1; + pInfo->img.last_has_mmco_5 = 0; + pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag; + + //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag); + used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0); + + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) + { + case(TOP_FIELD) : { + p_dpb->active_fs->top_field.used_for_reference = used_for_reference; + viddec_h264_set_is_top_used(p_dpb->active_fs, 1); + //p_dpb->active_fs->crc_field_coded = 1; + } + break; + case(BOTTOM_FIELD): { + p_dpb->active_fs->bottom_field.used_for_reference = used_for_reference << 1; + viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1); + //p_dpb->active_fs->crc_field_coded = 1; + } + break; + default: { + p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); + //if(pInfo->img.MbaffFrameFlag) p_dpb->active_fs->crc_field_coded = 1; + + } + break; + } + + //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image; + //if (freeze_assert) sei_information.disp_frozen = 1; + + idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag; + if (idr_flag) { + h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag); + } else { + // adaptive memory management + if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) { + h264_dpb_adaptive_memory_management(pInfo); + } + } + // Reset the active frame store - could have changed in mem management ftns + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if ((viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD)) + { + // check for frame store with same pic_number -- always true in my case, YH + // when we allocate frame store for the second field, we make sure the frame store for the second + // field is the one that contains the first field of the frame- see h264_dpb_init_frame_store() + // This is different from JM model. + // In this way we don't need to move image data around and can reduce memory bandwidth. + // simply check if the check if the other field has been decoded or not + + if (viddec_h264_get_is_used(p_dpb->active_fs) != 0) + { + if (pInfo->img.second_field) + { + h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 0, NonExisting, use_old); + second_field_stored = 1; + } + } + } + } + else + { // Set up locals for non-existing frames + used_for_reference = 1; + + p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); + viddec_h264_set_dec_structure(p_dpb->active_fs, FRAME); + pInfo->img.structure = FRAME; + } + + is_direct_output = 0; + if (NonExisting == 0) + { + if (p_dpb->used_size >= p_dpb->BumpLevel) + { + // non-reference frames may be output directly + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if ((used_for_reference == 0) && (viddec_h264_get_is_used(p_dpb->active_fs) == 3)) + { + h264_dpb_get_smallest_poc (p_dpb, &poc, &pos); + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + if ((pos == MPD_DPB_FS_NULL_IDC) || (pInfo->img.ThisPOC < poc)) + { + is_direct_output = 1; + } + } + } + } + + if (NonExisting) { + h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); + } else if (pInfo->SliceHeader.idr_flag == 0) { + if (used_for_reference) { + if (pInfo->img.second_field == 0) { + if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) { + h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); + } + } + } + } + + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + //if (is_direct_output == 0) + { + if ((pInfo->img.second_field == 0) || (NonExisting)) + { + h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 1, NonExisting, use_old); + } + + // In an errored stream we saw a condition where + // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel, + // which in itself is an error, but this means first_field_non_ref will + // not get set and causes problems for h264_dpb_queue_update() + if ((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) { + if (used_for_reference == 0) + if (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel) + first_field_non_ref = 1; + } + + } + + if (NonExisting) + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + else + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (NonExisting == 0) + { + if ((pInfo->img.second_field == 1) || (pInfo->img.structure == FRAME)) + { + //h264_send_new_decoded_frame(); + if ((p_dpb->OutputCtrl) && (is_direct_output == 0)) + h264_dpb_output_one_frame_from_dpb(pInfo, 0, 0,pInfo->active_SPS.num_ref_frames); + + // Pictures inserted by this point - check if we have reached the specified output + // level (if one has been specified) so we can begin on next call + + /* + Fixed HSD 212625---------------should compare OutputLevel with non-output frame number in dpb, not the used number in dpb + if((p_dpb->OutputLevelValid)&&(p_dpb->OutputCtrl == 0)) + { + if(p_dpb->used_size == p_dpb->OutputLevel) + p_dpb->OutputCtrl = 1; + } + */ + + if (p_dpb->OutputLevelValid) + { + int32_t non_output_frame_number=0; + non_output_frame_number = avc_dpb_get_non_output_frame_number(pInfo); + + if (non_output_frame_number == p_dpb->OutputLevel) + p_dpb->OutputCtrl = 1; + else + p_dpb->OutputCtrl = 0; + } + else { + p_dpb->OutputCtrl = 0; + } + } + } + + while (p_dpb->used_size > (p_dpb->BumpLevel + first_field_non_ref)) + //while(p_dpb->used_size > p_dpb->BumpLevel) + { + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + // + // Do not output "direct output" pictures until the sempahore has been set that the pic is + // decoded!! + // + if (is_direct_output) { + h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames); + //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + // + // Add reference pictures into Reference list + // + if (used_for_reference) { + h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting); + } + + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + + return; +} ////////////// End of DPB store pic + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_insert_picture_in_dpb () +// +// Insert the decoded picture into the DPB. A free DPB position is necessary +// for frames, . +// This ftn tends to fill out the framestore's top level parameters from the +// storable picture's parameters within it. It is called from h264_dpb_store_picture_in_dpb() +// +// This function finishes by updating the reference lists - this means it must be called after +// h264_dpb_sliding_window_memory_management() +// +// In the case of a frame it will call h264_dpb_split_field() +// In the case of the second field of a complementary field pair it calls h264_dpb_combine_field() +// + +void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference, int32_t add2dpb, int32_t NonExisting, int32_t use_old) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + if (NonExisting == 0) { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + p_dpb->active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num; + } + else { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); + p_dpb->active_fs->frame_num = p_dpb->active_fs->frame.pic_num; + } + + if (add2dpb) { + p_dpb->fs_dpb_idc[p_dpb->used_size] = p_dpb->active_fs->fs_idc; + p_dpb->used_size++; + } + + + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) + { + case FRAME : { + viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); + p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + if (used_for_reference) + { + p_dpb->active_fs->frame.used_for_reference = 3; + if (p_dpb->active_fs->frame.is_long_term) + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 3); + } + // Split frame to 2 fields for prediction + h264_dpb_split_field(p_dpb, pInfo); + + } + break; + case TOP_FIELD : { + viddec_h264_set_is_top_used(p_dpb->active_fs, 1); + + p_dpb->active_fs->top_field.used_for_reference = used_for_reference; + if (used_for_reference) + { + p_dpb->active_fs->frame.used_for_reference |= 0x1; + if (p_dpb->active_fs->top_field.is_long_term) + { + viddec_h264_set_is_top_long_term(p_dpb->active_fs, 1); + p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->top_field.long_term_frame_idx; + } + } + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { + h264_dpb_combine_field(p_dpb, use_old); // generate frame view + } + else + { + p_dpb->active_fs->frame.poc = p_dpb->active_fs->top_field.poc; + } + + } + break; + case BOTTOM_FIELD : { + viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1); + + p_dpb->active_fs->bottom_field.used_for_reference = (used_for_reference<<1); + if (used_for_reference) + { + p_dpb->active_fs->frame.used_for_reference |= 0x2; + if (p_dpb->active_fs->bottom_field.is_long_term) + { + viddec_h264_set_is_bottom_long_term(p_dpb->active_fs, 1); + p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->bottom_field.long_term_frame_idx; + } + } + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { + h264_dpb_combine_field(p_dpb, use_old); // generate frame view + } + else + { + p_dpb->active_fs->frame.poc = p_dpb->active_fs->bottom_field.poc; + } + + } + break; + } + /* + if ( gRestartMode.LastRestartType == RESTART_SEI ) + { + if ( p_dpb->active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1; + } + + gRestartMode.LastRestartType = 0xFFFF; + */ + + return; +} ////// End of insert picture in DPB + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_unmark_short_term_for_reference () +// +// Adaptive Memory Management: Mark short term picture unused +// + +void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1) +{ + int32_t picNumX; + int32_t currPicNum; + uint32_t idx; + int32_t unmark_done; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + if (pInfo->img.structure == FRAME) + currPicNum = pInfo->img.frame_num; + else + currPicNum = (pInfo->img.frame_num << 1) + 1; + + picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); + + unmark_done = 0; + + for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (pInfo->img.structure == FRAME) + { + /* If all pic numbers in the list are different (and they should be) + we should terminate the for loop the moment we match pic numbers, + no need to continue to check - hence set unmark_done + */ + + if ((p_dpb->active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 0) && + (p_dpb->active_fs->frame.pic_num == picNumX)) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); + unmark_done = 1; + } + } + else + { + /* + If we wish to unmark a short-term picture by picture number when the current picture + is a field, we have to unmark the corresponding field as unused for reference, + and also if it was part of a frame or complementary reference field pair, the + frame is to be marked as unused. However the opposite field may still be used as a + reference for future fields + + How will this affect the reference list update ftn coming after?? + + */ + if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&& + (p_dpb->active_fs->top_field.pic_num == picNumX) ) + { + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->frame.used_for_reference &= 2; + + unmark_done = 1; + + //Check if other field is used for short-term reference, if not remove from list... + if (p_dpb->active_fs->bottom_field.used_for_reference == 0) + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) && + (p_dpb->active_fs->bottom_field.pic_num == picNumX) ) + { + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->frame.used_for_reference &= 1; + + unmark_done = 1; + + //Check if other field is used for reference, if not remove from list... + if (p_dpb->active_fs->top_field.used_for_reference == 0) + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + } + } + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +//////////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_unmark_long_term_for_reference () +// +// Adaptive Memory Management: Mark long term picture unused +// +// In a frame situation the long_term_pic_num will refer to another frame. +// Thus we can call h264_dpb_unmark_for_long_term_reference() and then remove the picture +// from the list +// +// If the current picture is a field, long_term_pic_num will refer to another field +// It is also the case that each individual field should have a unique picture number +// 8.2.5.4.2 suggests that when curr pic is a field, an mmco == 2 operation +// should be accompanied by a second op to unmark the other field as being unused +/////////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long_term_pic_num) +{ + uint32_t idx; + int32_t unmark_done; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + unmark_done = 0; + for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (!(unmark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (pInfo->img.structure == FRAME) + { + if ((p_dpb->active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(p_dpb->active_fs)==3) && + (p_dpb->active_fs->frame.long_term_pic_num == long_term_pic_num)) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + unmark_done = 1; + } + } + else + { + /// Check top field + if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) && + (p_dpb->active_fs->top_field.long_term_pic_num == long_term_pic_num) ) + { + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->top_field.is_long_term = 0; + p_dpb->active_fs->frame.used_for_reference &= 2; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 2); + + unmark_done = 1; + + //Check if other field is used for long term reference, if not remove from list... + if ((p_dpb->active_fs->bottom_field.used_for_reference == 0) || (p_dpb->active_fs->bottom_field.is_long_term == 0)) + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + + /// Check Bottom field + if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) && + (p_dpb->active_fs->bottom_field.long_term_pic_num == long_term_pic_num) ) + { + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->bottom_field.is_long_term = 0; + p_dpb->active_fs->frame.used_for_reference &= 1; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 1); + + unmark_done = 1; + //Check if other field is used for long term reference, if not remove from list... + if ((p_dpb->active_fs->top_field.used_for_reference == 0) || (p_dpb->active_fs->top_field.is_long_term == 0)) + { + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + } + } // field structure + } //for(idx) + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_pic_struct_by_pic_num +// +// Searches the fields appearing in short term reference list +// Returns the polarity of the field with pic_num = picNumX +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int32_t picNumX) +{ + uint32_t idx; + int32_t pic_struct = INVALID; + int32_t found = 0; + + for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&& + (p_dpb->active_fs->top_field.pic_num == picNumX) ) + { + found = 1; + pic_struct = TOP_FIELD; + + } + if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) && + (p_dpb->active_fs->bottom_field.pic_num == picNumX) ) + { + found = 1; + pic_struct = BOTTOM_FIELD; + + } + } + + return pic_struct; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_assign_long_term_frame_idx () +// +// Assign a long term frame index to a short term picture +// Both lists must be updated as part of this process... +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1, int32_t long_term_frame_idx) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t picNumX; + int32_t currPicNum; + int32_t polarity = 0; + + if (pInfo->img.structure == FRAME) { + currPicNum = pInfo->img.frame_num; + } else { + currPicNum = (pInfo->img.frame_num << 1) + 1; + } + + picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); + + // remove frames / fields with same long_term_frame_idx + if (pInfo->img.structure == FRAME) { + h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); + } else { + polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); + + if (polarity != INVALID) + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->active_fs->fs_idc, polarity); + } + + h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX); + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_update_max_long_term_frame_idx () +// +// Set new max long_term_frame_idx +// + +void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb,int32_t max_long_term_frame_idx_plus1) +{ + //h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t idx; + int32_t temp; + int32_t removed_count; + int32_t idx2 = 0; + + p_dpb->max_long_term_pic_idx = max_long_term_frame_idx_plus1 - 1; + + temp = p_dpb->ltref_frames_in_buffer; + removed_count = 0; + + // check for invalid frames + for (idx = 0; idx < temp; idx++) + { + idx2 = idx - removed_count; + if (idx2 < 16 && idx2 > 0) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]); + + if (p_dpb->active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx) + { + removed_count++; + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]); + } + } + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_unmark_all_short_term_for_reference () +// +// Unmark all short term refernce pictures +// + +void h264_dpb_mm_unmark_all_short_term_for_reference (h264_DecodedPictureBuffer *p_dpb) +{ + int32_t idx; + int32_t temp = p_dpb->ref_frames_in_buffer; + + for (idx = 0; idx < temp; idx++) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + } + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mm_mark_current_picture_long_term () +// +// Marks the current picture as long term after unmarking any long term picture +// already assigned with the same long term frame index +// + +void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx) +{ + int32_t picNumX; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) + { + h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + p_dpb->active_fs->frame.is_long_term = 1; + p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->frame.long_term_pic_num = long_term_frame_idx; + } + else + { + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD) + { + picNumX = (p_dpb->active_fs->top_field.pic_num << 1) + 1; + p_dpb->active_fs->top_field.is_long_term = 1; + p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + + // Assign long-term pic num + p_dpb->active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + } + else + { + picNumX = (p_dpb->active_fs->bottom_field.pic_num << 1) + 1; + p_dpb->active_fs->bottom_field.is_long_term = 1; + p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + + // Assign long-term pic num + p_dpb->active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1; + + } + h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(p_dpb->active_fs)); + } + // Add to long term list + //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc); + + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx () +// +// Mark a long-term reference frame or complementary field pair unused for referemce +// NOTE: Obviously this ftn cannot be used to unmark individual fields... +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx) +{ + uint32_t idx; + for (idx =0; idx < p_dpb->ltref_frames_in_buffer; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + + if (p_dpb->active_fs->long_term_frame_idx == long_term_frame_idx) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]); + } + } + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_long_term_field_for_reference_by_frame_idx () +// +// Mark a long-term reference field unused for reference. However if it is the +// complementary field (opposite polarity) of the picture stored in fs_idc, +// we do not unmark it +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity) +{ + uint32_t idx; + int32_t found = 0; + int32_t is_complement = 0; + + for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); + if (p_dpb->active_fs->long_term_frame_idx == long_term_frame_idx) + { + if (p_dpb->active_fs->fs_idc == fs_idc) + { + // Again these seem like redundant checks but for safety while until JM is updated + if (polarity == TOP_FIELD) + is_complement = (p_dpb->active_fs->bottom_field.is_long_term)? 1:0; + else if (polarity == BOTTOM_FIELD) + is_complement = (p_dpb->active_fs->top_field.is_long_term) ? 1:0; + } + found = 1; + } + } + + if (found) { + if (is_complement == 0) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx-1]); + } + } + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mark_pic_long_term () +// +// This is used on a picture already in the dpb - i.e. not for the current picture +// dpb_split / dpb_combine field will perform ftnality in that case +// +// Marks a picture as used for long-term reference. Adds it to the long-term +// reference list. Also removes it from the short term reference list if required +// +// Note: if the current picture is a frame, the picture to be marked will be a +// short-term reference frame or short-term complemenetary reference field pair +// We use the pic_num assigned to the frame part of the structure to locate it +// Both its fields will have their long_term_frame_idx and long_term_pic_num +// assigned to be equal to long_term_frame_idx +// +// If the current picture is a field, the picture to be marked will be a +// short-term reference field. We use the pic_nums assigned to the field parts of +// the structure to identify the appropriate field. We assign the long_term_frame_idx +// of the field equal to long_term_frame_idx. +// +// We also check to see if this marking has resulted in both fields of the frame +// becoming long_term. If it has, we update the frame part of the structure by +// setting its long_term_frame_idx +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint32_t idx; + int32_t mark_done; + int32_t polarity = 0; + + mark_done = 0; + + if (pInfo->img.structure == FRAME) + { + for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(mark_done)); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); + + if (p_dpb->active_fs->frame.used_for_reference == 3) + { + if ((!(p_dpb->active_fs->frame.is_long_term))&&(p_dpb->active_fs->frame.pic_num == picNumX)) + { + p_dpb->active_fs->long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + + p_dpb->active_fs->frame.is_long_term = 1; + p_dpb->active_fs->top_field.is_long_term = 1; + p_dpb->active_fs->bottom_field.is_long_term = 1; + + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 3); + mark_done = 1; + + // Assign long-term pic num + p_dpb->active_fs->frame.long_term_pic_num = long_term_frame_idx; + p_dpb->active_fs->top_field.long_term_pic_num = long_term_frame_idx; + p_dpb->active_fs->bottom_field.long_term_pic_num = long_term_frame_idx; + // Add to long term list + h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + // Remove from short-term list + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); + } + } + } + } + else + { + polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); + p_dpb->active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG + + if (polarity == TOP_FIELD) + { + p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->top_field.is_long_term = 1; + viddec_h264_set_is_top_long_term(p_dpb->active_fs, 1); + + // Assign long-term pic num + p_dpb->active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0); + + } + else if (polarity == BOTTOM_FIELD) + { + p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx; + p_dpb->active_fs->bottom_field.is_long_term = 1; + viddec_h264_set_is_bottom_long_term(p_dpb->active_fs, 1); + + // Assign long-term pic num + p_dpb->active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0); + } + + if (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3) + { + p_dpb->active_fs->frame.is_long_term = 1; + p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx; + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); + } + else + { + // We need to add this idc to the long term ref list... + h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); + + // If the opposite field is not a short term reference, remove it from the + // short term list. Since we know top field is a reference but both are not long term + // we can simply check that both fields are not references... + if (p_dpb->active_fs->frame.used_for_reference != 3) + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); + } + } + return; +} ///// End of mark pic long term + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_adaptive_memory_management () +// +// Perform Adaptive memory control decoded reference picture marking process +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_adaptive_memory_management (h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t idx; + + idx = 0; + + while (idx < pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count) + { + switch (pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx]) + { + case 1: { //Mark a short-term reference picture as �unused for reference? + h264_dpb_mm_unmark_short_term_for_reference(pInfo, + pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]); + } + break; + case 2: { //Mark a long-term reference picture as �unused for reference? + h264_dpb_mm_unmark_long_term_for_reference(pInfo, + pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]); + } + break; + case 3: { //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it + h264_dpb_mm_assign_long_term_frame_idx(pInfo, + pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx], + pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); + } + break; + case 4: { //Specify the maximum long-term frame index and + //mark all long-term reference pictureshaving long-term frame indices greater than + //the maximum value as "unused for reference" + h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb, + pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]); + } + break; + case 5: { //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to + // "no long-term frame indices" + h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb); + h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0); + pInfo->img.last_has_mmco_5 = 1; + } + break; + case 6: { //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it + h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb, + pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); + } + break; + } + idx++; + } + + + if (pInfo->img.last_has_mmco_5) + { + pInfo->img.frame_num = 0; + pInfo->SliceHeader.frame_num=0; + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) + { + pInfo->img.bottompoc -= p_dpb->active_fs->frame.poc; + pInfo->img.toppoc -= p_dpb->active_fs->frame.poc; + + + p_dpb->active_fs->frame.poc = 0; + p_dpb->active_fs->frame.pic_num = 0; + p_dpb->active_fs->frame_num = 0; + } + + else if (viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD) + { + p_dpb->active_fs->top_field.poc = p_dpb->active_fs->top_field.pic_num = 0; + pInfo->img.toppoc = p_dpb->active_fs->top_field.poc; + } + else if (viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD) + { + p_dpb->active_fs->bottom_field.poc = p_dpb->active_fs->bottom_field.pic_num = 0; + pInfo->img.bottompoc = 0; + } + + h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field,pInfo->active_SPS.num_ref_frames); + } + // Reset the marking count operations for the current picture... + pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count = 0; + + return; +} ////// End of adaptive memory management + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_gaps_in_frame_num_mem_management () +// +// Produces a set of frame_nums pertaining to "non-existing" pictures +// Calls h264_dpb_store_picture_in_dpb +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) +{ + int32_t temp_frame_num = 0; + int32_t idx, prev_idc; + int32_t prev_frame_num_plus1_wrap; + uint32_t temp = 0; + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + seq_param_set_used_ptr active_sps = &pInfo->active_SPS; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + pInfo->img.gaps_in_frame_num = 0; + + // pInfo->img.last_has_mmco_5 set thru store_picture_in_dpb + if (pInfo->img.last_has_mmco_5) + { + // If the previous picture was an unpaired field, mark it as a dangler + if (p_dpb->used_size) + { + idx = p_dpb->used_size-1; + prev_idc = p_dpb->fs_dpb_idc[idx]; + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + p_dpb->active_fs->frame_num =0; + } + } + pInfo->img.PreviousFrameNumOffset = 0; + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum = 0; + + } + + // Check for gaps in frame_num + if (pInfo->SliceHeader.idr_flag) { + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + } + // Have we re-started following a recovery point message? + /* + else if(got_sei_recovery || aud_got_restart){ + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + //got_sei_recovery = 0; + //aud_got_restart = 0; + } + */ + else if (pInfo->img.frame_num != pInfo->img.PreviousFrameNum) + { + if (MaxFrameNum) { + ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); + } else { + temp = (uint32_t)pInfo->img.PreviousFrameNum + 1; + } + prev_frame_num_plus1_wrap = temp; + if (pInfo->img.frame_num != prev_frame_num_plus1_wrap) + { + pInfo->img.gaps_in_frame_num = (pInfo->img.frame_num < pInfo->img.PreviousFrameNum)? ((MaxFrameNum + pInfo->img.frame_num -1) - pInfo->img.PreviousFrameNum): (pInfo->img.frame_num - pInfo->img.PreviousFrameNum - 1); + // We should test for an error here - should infer an unintentional loss of pictures + } + } + + + //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) { + if (pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) { + // infer an unintentional loss of pictures + // only invoke following process for a conforming bitstream + // when gaps_in_frame_num_value_allowed_flag is equal to 1 + pInfo->img.gaps_in_frame_num = 0; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + //mfd_printf("ERROR STREAM??\n"); + ////// Error handling here---- + } + + /////// Removed following OLO source (Sodaville H.D) + //else if (pInfo->img.gaps_in_frame_num > active_sps->num_ref_frames) { + // // No need to produce any more non-existent frames than the amount required to flush the dpb + // pInfo->img.gaps_in_frame_num = active_sps->num_ref_frames; + //mfd_printf("gaps in frame: %d\n", gaps_in_frame_num); + //} + + // If the previous picture was an unpaired field, mark it as a dangler + if (p_dpb->used_size) + { + idx = p_dpb->used_size-1; + prev_idc = p_dpb->fs_dpb_idc[idx]; + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + if (viddec_h264_get_is_used(p_dpb->active_fs) != 3) { + h264_dpb_mark_dangling_field(p_dpb, p_dpb->active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + } + } + + while (temp_frame_num < pInfo->img.gaps_in_frame_num) + { + h264_dpb_assign_frame_store(pInfo, 1); + + // Set up initial markings - not sure if all are needed + viddec_h264_set_dec_structure(p_dpb->active_fs, FRAME); + + if (MaxFrameNum) + ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); + + p_dpb->active_fs->frame.pic_num = temp; + p_dpb->active_fs->long_term_frame_idx = 0; + p_dpb->active_fs->frame.long_term_pic_num = 0; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); + + // Note the call below will overwrite some aspects of the img structure with info relating to the + // non-existent picture + // However, since this is called before h264_hdr_decoding_poc() for the current existing picture + // it should be o.k. + if (pInfo->img.pic_order_cnt_type) + h264_hdr_decoding_poc(pInfo, 1, temp); + + pInfo->img.structure = FRAME; + p_dpb->active_fs->frame.poc = pInfo->img.framepoc; + + // call store_picture_in_dpb + + h264_dpb_store_previous_picture_in_dpb(pInfo, 1, 0); + + h264_hdr_post_poc(pInfo, 1, temp, 0); + + temp_frame_num++; + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_for_reference () +// +// Mark FrameStore unused for reference. Removes it from the short term reference list +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) +{ + h264_dpb_set_active_fs(p_dpb, fs_idc); + + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) p_dpb->active_fs->top_field.used_for_reference = 0; + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) p_dpb->active_fs->bottom_field.used_for_reference = 0; + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) p_dpb->active_fs->frame.used_for_reference = 0; + + p_dpb->active_fs->frame.used_for_reference = 0; + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_unmark_for_long_term_reference () +// +// mark FrameStore unused for reference and reset long term flags +// This function does not remove it form the long term list +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) +{ + h264_dpb_set_active_fs(p_dpb, fs_idc); + + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) + { + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->top_field.is_long_term = 0; + } + + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) + { + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->bottom_field.is_long_term = 0; + } + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) + { + p_dpb->active_fs->frame.used_for_reference = 0; + p_dpb->active_fs->frame.is_long_term = 0; + } + + p_dpb->active_fs->frame.used_for_reference = 0; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_mark_dangling_field +// +// Tells HW previous field was dangling +// Marks it in SW as so +// Takes appropriate actions. - sys_data needs thought through... +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc) +{ + + h264_dpb_set_active_fs(p_dpb, fs_idc); + + //PRINTF(MFD_NONE, " fs_idc = %d DANGLING_TYPE = %d \n", fs_idc, reason); + /* + Make the check that it has not already been marked + This covers the situation of a dangling field followed by a + frame which is direct output (i.e. never entered into the dpb). + In this case we could attempt to mark the prev unpaired field + as a dangler twice which would upset the HW dpb_disp_q count + */ + + if (viddec_h264_get_is_dangling(p_dpb->active_fs) == 0) + { + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) + { + case TOP_FIELD: + viddec_h264_set_is_dangling(p_dpb->active_fs, 1); + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc); + break; + case BOTTOM_FIELD: + //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc); + viddec_h264_set_is_dangling(p_dpb->active_fs, 1); + break; + default: + //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc); + break; + } + + //h264_send_new_decoded_frame(); + } + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_is_used_for_reference () +// +// Check if one of the frames/fields in active_fs is used for reference +// +void h264_dpb_is_used_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t * flag) +{ + + /* Check out below for embedded */ + *flag = 0; + if (p_dpb->active_fs->frame.used_for_reference) + *flag = 1; + else if (viddec_h264_get_is_used(p_dpb->active_fs) ==3) // frame + *flag = p_dpb->active_fs->frame.used_for_reference; + else + { + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) // top field + *flag = p_dpb->active_fs->top_field.used_for_reference; + if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) // bottom field + *flag = *flag || p_dpb->active_fs->bottom_field.used_for_reference; + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_idr_memory_management () +// +// Perform Memory management for idr pictures +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr active_sps, int32_t no_output_of_prior_pics_flag) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + uint32_t idx; + uint32_t i; + int32_t DPB_size; + int32_t FrameSizeInBytes, FrameSizeInMbs; + uint32_t data; + int32_t num_ref_frames = active_sps->num_ref_frames; + int32_t level_idc = active_sps->level_idc; + uint32_t temp_bump_level=0; + + + /// H.D----- + /// There are 2 kinds of dpb flush defined, one is with display, the other is without display + /// The function name dpb_flush actually is just the first, and the 2nd one is for error case or no_prior_output + /// We will rewrite the code below to make it clean and clear + /// + if (no_output_of_prior_pics_flag) + { + + // free all stored pictures + for (idx = 0; idx < p_dpb->used_size; idx = idx + 1) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",p_dpb->active_fs->fs_idc, p_dpb->active_fs->first_dsn); + viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); + //if( (p_dpb->active_fs->frame_sent == 0x01) && (p_dpb->active_fs->is_output == 0x0)) + { + //DECODED_FRAME sent but not DISPLAY_FRAME + h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc); + h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); + //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host +#ifndef USE_AVC_SHORT_FORMAT + /// Add into drop-out list for all frms in dpb without display + if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) { + if ( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released + p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx]; + p_dpb->frame_numbers_need_to_be_removed ++; + } else { //// This frame will be removed without display + p_dpb->frame_id_need_to_be_dropped[p_dpb->frame_numbers_need_to_be_dropped] = p_dpb->fs_dpb_idc[idx]; + p_dpb->frame_numbers_need_to_be_dropped ++; + } + } +#endif + } + + } + + ////////////////////////////////////////// Reset Reference list + for (i = 0; i < p_dpb->ref_frames_in_buffer; i++) + p_dpb->fs_ref_idc[i] = MPD_DPB_FS_NULL_IDC; + + for (i = 0; i < p_dpb->ltref_frames_in_buffer; i++) + p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC; + + ////////////////////////////////////////// Reset DPB and dpb list + for (i = 0; i < p_dpb->used_size; i++) { + p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + + p_dpb->used_size = 0; + p_dpb->ref_frames_in_buffer = 0; + p_dpb->ltref_frames_in_buffer = 0; + + p_dpb->last_output_poc = 0x80000000; + } + else { + h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, num_ref_frames); + } + + if (p_dpb->fs_dec_idc != MPD_DPB_FS_NULL_IDC) // added condition for use of DPB initialization + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + if (pInfo->img.long_term_reference_flag) + { + p_dpb->max_long_term_pic_idx = 0; + switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) + { + case FRAME : + p_dpb->active_fs->frame.is_long_term = 1; + case TOP_FIELD : + p_dpb->active_fs->top_field.is_long_term = 1; + case BOTTOM_FIELD : + p_dpb->active_fs->bottom_field.is_long_term = 1; + } + p_dpb->active_fs->long_term_frame_idx = 0; + } + else + { + p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC; + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); + } + } + + p_dpb->OutputLevel = 0; + p_dpb->OutputLevelValid = 0; + p_dpb->OutputCtrl = 0; + + + // Set up bumping level - do this every time a parameters set is activated... + if (active_sps->sps_disp.vui_parameters_present_flag) + { + if (active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) + { + //p_dpb->OutputLevel = active_sps->sps_disp.vui_seq_parameters.num_reorder_frames; + //p_dpb->OutputLevelValid = 1; + } + } + + // Set up bumping level - do this every time a parameters set is activated... + switch (level_idc) + { + case h264_Level1b: + case h264_Level1: + { + if ((active_sps->profile_idc < 100) && ((active_sps->constraint_set_flags & 0x1) == 0)) { + DPB_size = 338; + } + else { + DPB_size = 149; + } + + break; + } + case h264_Level11: + { + DPB_size = 338; + break; + } + case h264_Level12: + case h264_Level13: + case h264_Level2: + { + DPB_size = 891; + break; + } + case h264_Level21: + { + DPB_size = 1782; + break; + } + case h264_Level22: + case h264_Level3: + { + DPB_size = 3038; + break; + } + case h264_Level31: + { + DPB_size = 6750; + break; + } + case h264_Level32: + { + DPB_size = 7680; + break; + } + case h264_Level4: + case h264_Level41: + { + DPB_size = 12288; + break; + } + case h264_Level42: + { + DPB_size = 13056; + break; + } + case h264_Level5: + { + DPB_size = 41400; + break; + } + case h264_Level51: + { + DPB_size = 69120; + break; + } + default : + DPB_size = 69120; + break; + } + + FrameSizeInMbs = pInfo->img.PicWidthInMbs * pInfo->img.FrameHeightInMbs; + FrameSizeInBytes = (FrameSizeInMbs << 8) + (FrameSizeInMbs << 7); + + if (FrameSizeInBytes) + { + + temp_bump_level = ldiv_mod_u((DPB_size << 10), FrameSizeInBytes, &data); + + if (temp_bump_level > 255) + { + p_dpb->BumpLevel = 255; + } + else + { + p_dpb->BumpLevel = (uint8_t)temp_bump_level; + } + } + + if (p_dpb->BumpLevel == 0) + p_dpb->BumpLevel = active_sps->num_ref_frames + 1; + + if (p_dpb->BumpLevel > 16) + p_dpb->BumpLevel = 16; + + + if (active_sps->sps_disp.vui_parameters_present_flag && active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) { + + if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) { + //MFD_PARSER_DEBUG(ERROR_H264_DPB); + //// err handling here + + //// For some ilegal clips, the max dpb length described in vui might exceed the sps's value + //// To guarantee normal playback, just select the vui value to override + p_dpb->BumpLevel = active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering; + } + else { + p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ? + (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering) : 1; + } + } + + + // A new sequence means automatic frame release + //sei_information.disp_frozen = 0; + + return; +} //// End --- dpb_idr_memory_management + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_frame_from_dpb () +// +// remove one frame from DPB +// The parameter index, is the location of the frame to be removed in the +// fs_dpb_idc list. The used size is decremented by one +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx) +{ + int32_t fs_idc; + uint32_t i; + + fs_idc = p_dpb->fs_dpb_idc[idx]; + + h264_dpb_set_active_fs(p_dpb, fs_idc); + viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); + +#ifndef USE_AVC_SHORT_FORMAT + //add to support frame relocation interface to host + if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) + { + p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc; + p_dpb->frame_numbers_need_to_be_removed ++; + } +#endif + ///////////////////////////////////////// Reset FS + p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC; + + /////Remove unused frame from dpb-list + i = idx; + while ( (i + 1)< p_dpb->used_size) + { + p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1]; + i ++; + } + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + + //////////////////////////// + p_dpb->used_size--; + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_remove_unused_frame_from_dpb () +// +// Remove a picture from DPB which is no longer needed. +// Search for a frame which is not used for reference and has previously been placed +// in the output queue - if find one call h264_dpb_remove_frame_from_dpb() and +// set flag 1 +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag) +{ + uint32_t idx; + int32_t first_non_exist_valid, non_exist_idx; + int32_t used_for_reference = 0; + + *flag = 0; + first_non_exist_valid = 0x0; + non_exist_idx = 0x0; + + for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_is_used_for_reference(p_dpb, &used_for_reference); + + //if( (used_for_reference == 0x0 ) && active_fs->is_output && active_fs->is_non_existent == 0x0) + //{ + //PRINTF(MFD_NONE, " requesting to send FREE: fs_idc = %d fb_id = %d \n", active_fs->fs_idc, active_fs->fb_id); + //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1); + //} + + if (viddec_h264_get_is_output(p_dpb->active_fs) && (used_for_reference == 0)) + { + h264_dpb_remove_frame_from_dpb(p_dpb, idx); + *flag = 1; + } + /* + /////// Removed following OLO source (Sodaville H.D) + else if ( (first_non_exist_valid == 0x0) && p_dpb->active_fs->is_non_existent ) + { + first_non_exist_valid = 0x01; + non_exist_idx = idx; + } + */ + } + /* + /////// Removed following OLO source (Sodaville H.D) + if ( *flag == 0x0 && first_non_exist_valid) { + h264_dpb_remove_frame_from_dpb(p_dpb,non_exist_idx); + *flag = 1; + } + */ + return; +} //// End of h264_dpb_remove_unused_frame_from_dpb + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_get_smallest_poc () +// +// find smallest POC in the DPB which has not as yet been output +// This function only checks for frames and dangling fields... +// unless the dpb used size is one, in which case it will accept an unpaired field +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos) +{ + int32_t poc_int; + uint32_t idx; + int32_t first_non_output = 1; + + *pos = MPD_DPB_FS_NULL_IDC; + + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]); + poc_int = p_dpb->active_fs->frame.poc; + + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if (viddec_h264_get_is_output(p_dpb->active_fs) == 0) + { + //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc); + if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) || (viddec_h264_get_is_dangling(p_dpb->active_fs))) + { + if (first_non_output) + { + *pos = idx; + first_non_output = 0; + poc_int = p_dpb->active_fs->frame.poc; + } + else if (poc_int > p_dpb->active_fs->frame.poc) + { + poc_int = p_dpb->active_fs->frame.poc; + *pos = idx; + } + } + else if (p_dpb->used_size == 1) + { + poc_int = p_dpb->active_fs->frame.poc; + *pos = idx; + } + } + } + + *poc = poc_int; + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_split_field () +// +// Extract field information from a frame +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_split_field (h264_DecodedPictureBuffer *p_dpb, h264_Info * pInfo) +{ + + //p_dpb->active_fs->frame.poc = p_dpb->active_fs->frame.poc; + // p_dpb->active_fs->top_field.poc = p_dpb->active_fs->frame.poc; + // This line changed on 11/05/05 KMc + p_dpb->active_fs->top_field.poc = pInfo->img.toppoc; + p_dpb->active_fs->bottom_field.poc = pInfo->img.bottompoc; + + p_dpb->active_fs->top_field.used_for_reference = p_dpb->active_fs->frame.used_for_reference & 1; + p_dpb->active_fs->bottom_field.used_for_reference = p_dpb->active_fs->frame.used_for_reference >> 1; + + p_dpb->active_fs->top_field.is_long_term = p_dpb->active_fs->frame.is_long_term; + p_dpb->active_fs->bottom_field.is_long_term = p_dpb->active_fs->frame.is_long_term; + + p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx; + p_dpb->active_fs->top_field.long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx; + p_dpb->active_fs->bottom_field.long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx; + + + // Assign field mvs attached to MB-Frame buffer to the proper buffer + //! Generate field MVs from Frame MVs + // ... + // these will be done in RTL through using proper memory mapping + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_combine_field (int32_t use_old) +// +// Generate a frame from top and bottom fields +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_combine_field(h264_DecodedPictureBuffer *p_dpb, int32_t use_old) +{ + + //remove warning + use_old = use_old; + + p_dpb->active_fs->frame.poc = (p_dpb->active_fs->top_field.poc < p_dpb->active_fs->bottom_field.poc)? + p_dpb->active_fs->top_field.poc: p_dpb->active_fs->bottom_field.poc; + + //p_dpb->active_fs->frame.poc = p_dpb->active_fs->poc; + + + p_dpb->active_fs->frame.used_for_reference = p_dpb->active_fs->top_field.used_for_reference |(p_dpb->active_fs->bottom_field.used_for_reference); + + p_dpb->active_fs->frame.is_long_term = p_dpb->active_fs->top_field.is_long_term |(p_dpb->active_fs->bottom_field.is_long_term <<1); + + if (p_dpb->active_fs->frame.is_long_term) + p_dpb->active_fs->frame.long_term_frame_idx = p_dpb->active_fs->long_term_frame_idx; + + return; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_sliding_window_memory_management () +// +// Perform Sliding window decoded reference picture marking process +// It must be the reference frame, complementary reference field pair +// or non-paired reference field that has the smallest value of +// FrameNumWrap which is marked as unused for reference. Note : We CANNOT +// simply use frame_num!!!! +// +// Although we hold frame_num_wrap in SW, currently, this is not +// being updated for every picture (the b-picture parameter non-update +// phenomenon of the reference software) +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, int32_t NonExisting, int32_t num_ref_frames) +{ + // if this is a reference pic with sliding window, unmark first ref frame + // should this be (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer) + // Rem: adaptive marking can be on a slice by slice basis so we + // could have pictures merked as long term reference in adaptive marking and then + // the marking mode changed back to sliding_window_memory_management + if (p_dpb->ref_frames_in_buffer >= (num_ref_frames - p_dpb->ltref_frames_in_buffer)) + { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + + if (NonExisting == 0) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); + viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0); + } + } +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_store_picture_in_dpb () +// +// First we run the marking procedure. +// Then, before we add the current frame_store to the list of refernce stores we run some checks +// These include checking the number of existing reference frames +// in DPB and if necessary, flushing frames. +// +// \param NonExisting +// If non-zero this is called to store a non-existing frame resulting from gaps_in_frame_num +////////////////////////////////////////////////////////////////////////////// + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_frame_output () +// +// If direct == 1, Directly output a frame without storing it in the p_dpb-> +// Therefore we must set is_used to 0, which I guess means it will not appear +// in the fs_dpb_idc list and is_output to 1 which means it should be in the +// fs_output_idc list. +// +// If it is a non-existing pcture we do not actually place it in the output queue +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + h264_dpb_set_active_fs(p_dpb, fs_idc); + + //h264_dpb_push_output_queue(); + if (pInfo->sei_information.disp_frozen) + { + // check pocs + if (p_dpb->active_fs->top_field.poc >= pInfo->sei_information.freeze_POC) + { + if (p_dpb->active_fs->top_field.poc < pInfo->sei_information.release_POC) + { + viddec_h264_set_is_top_skipped(p_dpb->active_fs, 1); + } + else + { + pInfo->sei_information.disp_frozen = 0; + } + } + + if (p_dpb->active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC) + { + if (p_dpb->active_fs->bottom_field.poc < pInfo->sei_information.release_POC) + { + viddec_h264_set_is_bottom_skipped(p_dpb->active_fs, 1); + } + else + { + pInfo->sei_information.disp_frozen = 0; + } + } + } + + if ( viddec_h264_get_broken_link_picture(p_dpb->active_fs) ) + pInfo->sei_information.broken_link = 1; + + if ( pInfo->sei_information.broken_link) + { + // Check if this was the recovery point picture - going to have recovery point on + // a frame basis + if (viddec_h264_get_recovery_pt_picture(p_dpb->active_fs)) + { + pInfo->sei_information.broken_link = 0; + // Also reset wait on sei recovery point picture + p_dpb->WaitSeiRecovery = 0; + } + else + { + viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3); + } + } + else + { + // even if this is not a broken - link, we need to follow SEI recovery point rules + // Did we use SEI recovery point for th elast restart? + if ( p_dpb->WaitSeiRecovery ) + { + if ( viddec_h264_get_recovery_pt_picture(p_dpb->active_fs) ) { + p_dpb->WaitSeiRecovery = 0; + } else { + viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3); + } + } + } + + if ( p_dpb->SuspendOutput ) + { + if ( viddec_h264_get_open_gop_entry(p_dpb->active_fs) ) { + p_dpb->SuspendOutput = 0; + } else { + viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3); + } + } + + //h264_send_new_display_frame(0x0); + viddec_h264_set_is_output(p_dpb->active_fs, 1); + + if (viddec_h264_get_is_non_existent(p_dpb->active_fs) == 0) + { + *existing = 1; +#ifndef USE_AVC_SHORT_FORMAT + p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=p_dpb->active_fs->fs_idc; + p_dpb->frame_numbers_need_to_be_displayed++; +#endif + //if(direct) + //h264_dpb_remove_frame_from_dpb(p_dpb, p_dpb->active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos] + } + else + { + *existing = 0; + } + + if (direct) { + viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); + p_dpb->active_fs->frame.used_for_reference = 0; + p_dpb->active_fs->top_field.used_for_reference = 0; + p_dpb->active_fs->bottom_field.used_for_reference = 0; + p_dpb->active_fs->fs_idc = MPD_DPB_FS_NULL_IDC; + } + return; +} ///////// End of dpb frame output + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_output_one_frame_from_dpb () +// +// Output one frame stored in the DPB. Basiclly this results in its placment +// in the fs_output_idc list. +// Placement in the output queue should cause an automatic removal from the dpb +// if the frame store is not being used as a reference +// This may need another param for a frame request so that it definitely outputs one non-exiosting frame +////////////////////////////////////////////////////////////////////////////// +int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int32_t request, int32_t num_ref_frames) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + int32_t poc; + int32_t pos; + int32_t used_for_reference; + + int32_t existing = 0; + int32_t is_refused = 0; + int32_t is_pushed = 0; + + //remove warning + request = request; + + if (direct) + { + h264_dpb_frame_output(pInfo, p_dpb->fs_dec_idc, 1, &existing); + } + else + { + if (p_dpb->used_size != 0) + { + // Should this be dpb.not_as_yet_output_num > 0 ?? + // There should maybe be a is_refused == 0 condition instead... + while ((p_dpb->used_size > 0) && (existing == 0) && (is_refused == 0)) + { + // find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); + if (pos != MPD_DPB_FS_NULL_IDC) + { + // put it into the output queue + h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); + + p_dpb->last_output_poc = poc; + if (existing) is_pushed = 1; + // If non-reference, free frame store and move empty store to end of buffer + + h264_dpb_is_used_for_reference(p_dpb, &used_for_reference); + if (!(used_for_reference)) + h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] + } + else + { + int32_t flag; + uint32_t idx; + + // This is basically an error condition caused by too many reference frames in the DPB. + // It should only happen in errored streams, and can happen if this picture had an MMCO, + // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have + // unmarked the oldest reference frame. + h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames); + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + + if (flag == 0) { + for (idx = 0; idx < p_dpb->used_size; idx++) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_is_used_for_reference(p_dpb, &used_for_reference); + + if (used_for_reference) { + break; + } + } + + if (idx < p_dpb->used_size) { + // Short term + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); + + // Long term + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_dpb_idc[idx]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_dpb_idc[idx]); + + // Remove from DPB + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + } + return 1; + } + } + } + } + + return is_pushed; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_queue_update +// +// This should be called anytime the output queue might be changed +////////////////////////////////////////////////////////////////////////////// + +int32_t h264_dpb_queue_update(h264_Info* pInfo,int32_t push, int32_t direct, int32_t frame_request, int32_t num_ref_frames) +{ + + int32_t frame_output = 0; + + if (push) + { + frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, direct, 0, num_ref_frames); + } + else if (frame_request) + { + frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, 0, 1,num_ref_frames); + } + + + return frame_output; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_flush_dpb () +// +// Unmarks all reference pictures in the short-term and long term lists and +// in doing so resets the lists. +// +// Flushing the dpb, adds all the current frames in the dpb, not already on the output list +// to the output list and removes them from the dpb (they will all be marked as unused for +// reference first) +////////////////////////////////////////////////////////////////////////////// + +void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t idx, flag; + int32_t ref_frames_in_buffer; + + ref_frames_in_buffer = p_dpb->ref_frames_in_buffer; + + for (idx = 0; idx < ref_frames_in_buffer; idx++) { + h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]); + h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]); + } + + ref_frames_in_buffer = p_dpb->ltref_frames_in_buffer; + + for (idx = 0; idx < ref_frames_in_buffer; idx++) + { + h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[0]); + h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]); + } + + // output frames in POC order + if (output_all) { + while ((p_dpb->used_size > 0) && (p_dpb->used_size - keep_complement)) { + h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames); + } + } + + flag = 1; + while (flag) { + h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); + } + + return; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_reset_dpb () +// +// Used to reset the contents of dpb +// Must calculate memory (aligned) pointers for each of the possible frame stores +// +// Also want to calculate possible max dpb size in terms of frames +// We should have an active SPS when we call this ftn to calc bumping level +////////////////////////////////////////////////////////////////////////////// +void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, int32_t SizeChange, int32_t no_output_of_prior_pics_flag) +{ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + int32_t num_ref_frames = pInfo->active_SPS.num_ref_frames; + + + // If half way through a frame then Frame in progress will still be high, + // so mark the previous field as a dangling field. This is also needed to + // keep cs7050_sif_dpb_disp_numb_ptr correct. Better to reset instead? + if (p_dpb->used_size) + { + int32_t idx; + idx = p_dpb->used_size-1; + if (p_dpb->fs_dpb_idc[idx] != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); + + if (viddec_h264_get_is_used(p_dpb->active_fs) != 3) + h264_dpb_mark_dangling_field(p_dpb, p_dpb->active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET + } + } + + // initialize software DPB + if (p_dpb->active_fs) { + viddec_h264_set_dec_structure(p_dpb->active_fs, INVALID); + } + h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1 + + + // May always be a size change which calls this function now... + // could eliminate below branch + if (SizeChange) + { + + /*** + Note : 21/03/2005 14:16 + Danger asociated with resetting curr_alloc_mem as it would allow the FW top reallocate + frame stores from 0 -> NUM_FRAME_STORES again - could lead to queue overflow and corruption + + Placed in size change condition in the hope that this will only ensure dpb is empty + and thus this behaviour is valid before continuing again + ***/ + + + p_dpb->PicWidthInMbs = PicWidthInMbs; + p_dpb->FrameHeightInMbs = FrameHeightInMbs; + + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + //Flush the current DPB. + h264_dpb_flush_dpb(pInfo, 1,0,num_ref_frames); + } + + return; +} ///// End of reset DPB + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +// --------------------------------------------------------------------------- +// Note that if an 'missing_pip_fb' condition exists, the message will +// sent to the host each time setup_free_fb is called. However, since this +// condition is not expected to happen if pre-defined steps are followed, we let +// it be for now and will change it if required. Basically, as long as host +// enables PiP after adding PiP buffers and disables PiP before removing buffers +// and matches PiP fb_id's with normal decode fb_id's this condition should +// not occur. +// --------------------------------------------------------------------------- +int32_t dpb_setup_free_fb( h264_DecodedPictureBuffer *p_dpb, uint8_t* fb_id, pip_setting_t* pip_setting ) +{ + uint8_t idx; + + //remove warning + pip_setting = pip_setting; + + + for (idx = 0; idx < NUM_DPB_FRAME_STORES; idx++) + { + if (p_dpb->fs[idx].fs_idc == MPD_DPB_FS_NULL_IDC) + { + *fb_id = idx; + break; + } + } + + if (idx == NUM_DPB_FRAME_STORES) + return 1; + + p_dpb->fs[idx].fs_idc = idx; + + return 0; + +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_assign_frame_store () +// +// may need a non-existing option parameter +// + +int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) +{ + uint8_t idc = MPD_DPB_FS_NULL_IDC; + pip_setting_t pip_setting; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + + + while ( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) { + /// + /// Generally this is triggered a error case, no more frame buffer avaliable for next + /// What we do here is just remove one with min-POC before get more info + /// + + int32_t pos = 0, poc = 0, existing = 1; + + // find smallest non-output POC + h264_dpb_get_smallest_poc(p_dpb, &poc, &pos); + if (pos != MPD_DPB_FS_NULL_IDC) + { + // put it into the output queue + h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing); + p_dpb->last_output_poc = poc; + h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos] + } + } + + + if (NonExisting) { + p_dpb->fs_non_exist_idc = idc; + } else { + p_dpb->fs_dec_idc = idc; + } + + //add to support frame relocation interface to host + if (!NonExisting) + { + p_dpb->frame_numbers_need_to_be_allocated = 1; + p_dpb->frame_id_need_to_be_allocated = p_dpb->fs_dec_idc; + } + + + ///////////////////////////////h264_dpb_reset_fs(); + h264_dpb_set_active_fs(p_dpb, idc); + p_dpb->active_fs->fs_flag_1 = 0; + p_dpb->active_fs->fs_flag_2 = 0; + viddec_h264_set_is_non_existent(p_dpb->active_fs, NonExisting); + viddec_h264_set_is_output(p_dpb->active_fs, (NonExisting?1:0)); + + p_dpb->active_fs->pic_type = ((FRAME_TYPE_INVALID<is_used is reset on removal from dpb, no need for it here + // ->poc would only be changed when we overwrite on insert_Picture_in_dpb() + // but would be used by get_smallest_poc() + // ->top.poc would also not be overwritten until a new valid value comes along, + // but I don't think it is used before then so no need to reset + //p_dpb->active_fs->is_long_term = 0; + p_dpb->active_fs->frame.used_for_reference = 0; + p_dpb->active_fs->frame.poc = 0; + + return 1; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_update_queue_dangling_field (h264_Info * pInfo) +// +// Update DPB for Dangling field special case +// +void h264_dpb_update_queue_dangling_field(h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; + int32_t prev_pic_unpaired_field = 0; + + if (dpb_ptr->used_size > dpb_ptr->BumpLevel) + { + if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3) + { + prev_pic_unpaired_field = 1; + } + } + + if (pInfo->img.structure != FRAME) + { + // To prove this is the second field, + // 1) The previous picture is an (as yet) unpaired field + if (prev_pic_unpaired_field) + { + // If we establish the previous pic was an unpaired field and this picture is not + // its complement, the previous picture was a dangling field + if (pInfo->img.second_field == 0) { + while (dpb_ptr->used_size > dpb_ptr->BumpLevel) + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + } + } + } + else if (prev_pic_unpaired_field) { + while (dpb_ptr->used_size > dpb_ptr->BumpLevel) + h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame + } + } + + + return; +} ///// End of init Frame Store + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_dpb_init_frame_store (h264_Info * pInfo) +// +// Set the frame store to be used in decoding the picture +// + +void h264_dpb_init_frame_store(h264_Info * pInfo) +{ + h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb; + + int32_t free_fs_found; + int32_t idx = 0; + int32_t prev_pic_unpaired_field = 0; + int32_t prev_idc = MPD_DPB_FS_NULL_IDC; + int32_t structure = pInfo->img.structure; + + if (dpb_ptr->used_size) + { + idx = dpb_ptr->used_size-1; + prev_idc = dpb_ptr->fs_dpb_idc[idx]; + } + + if (prev_idc != MPD_DPB_FS_NULL_IDC) + { + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3) + { + //PRINTF(MFD_NONE, " FN: %d p_dpb->active_fs->is_used = %d \n", (h264_frame_number+1), p_dpb->active_fs->is_used); + prev_pic_unpaired_field = 1; + } + } + + //if ((pInfo->img.curr_has_mmco_5) || (pInfo->img.idr_flag)) curr_fld_not_prev_comp = 1; + + if (structure != FRAME) + { + + // To prove this is the second field, + // 1) The previous picture is an (as yet) unpaired field + if (prev_pic_unpaired_field) + { + // If we establish the previous pic was an unpaired field and this picture is not + // its complement, the previous picture was a dangling field + if (pInfo->img.second_field == 0) + h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc); //, DANGLING_TYPE_FIELD + } + } + else if (prev_pic_unpaired_field) { + h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc); //, DANGLING_TYPE_FRAME + } + + free_fs_found = 0; + + // If this is not a second field, we must find a free space for the current picture + if (!(pInfo->img.second_field)) + { + dpb_ptr->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + free_fs_found = h264_dpb_assign_frame_store(pInfo, 0); + //h264_frame_number++; + //PRINTF(MFD_NONE, " FN: %d (inc) fs_idc = %d \n", (h264_frame_number+1), dpb.fs_dec_idc); + } + + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dec_idc); + + ////////////// TODO: THe following init +#if 1 + if ( pInfo->img.second_field) { + //p_dpb->active_fs->second_dsn = pInfo->img.dsn; + //p_dpb->active_fs->prev_dsn = pInfo->img.prev_dsn; + if (dpb_ptr->active_fs->pic_type == FRAME_TYPE_IDR || + dpb_ptr->active_fs->pic_type == FRAME_TYPE_I) { + + viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 1); + } else { + viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 0); + } + + } + else { + //p_dpb->active_fs->first_dsn = pInfo->img.dsn; + //p_dpb->active_fs->prev_dsn = pInfo->img.prev_dsn; + viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 0); + } + + if (pInfo->img.structure == FRAME) { + //dpb_ptr->active_fs->second_dsn = 0x0; + } + + if ( pInfo->sei_information.broken_link_pic ) + { + viddec_h264_set_broken_link_picture(dpb_ptr->active_fs, 1); + pInfo->sei_information.broken_link_pic = 0; + } + + if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0)) + viddec_h264_set_recovery_pt_picture(dpb_ptr->active_fs, 1); + + //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr)) + if (pInfo->img.recovery_point_found == 6) + { + viddec_h264_set_open_gop_entry(dpb_ptr->active_fs, 1); + pInfo->dpb.SuspendOutput = 1; + } +#endif + + if ((pInfo->img.second_field) || (free_fs_found)) + { + viddec_h264_set_dec_structure(dpb_ptr->active_fs, pInfo->img.structure); + viddec_h264_set_is_output(dpb_ptr->active_fs, 0); + + switch (pInfo->img.structure) + { + case (FRAME) : { + dpb_ptr->active_fs->frame.pic_num = pInfo->img.frame_num; + dpb_ptr->active_fs->frame.long_term_frame_idx = 0; + dpb_ptr->active_fs->frame.long_term_pic_num = 0; + dpb_ptr->active_fs->frame.used_for_reference = 0; + dpb_ptr->active_fs->frame.is_long_term = 0; + //dpb_ptr->active_fs->frame.structure = pInfo->img.structure; + dpb_ptr->active_fs->frame.poc = pInfo->img.framepoc; + } + break; + case (TOP_FIELD) : { + dpb_ptr->active_fs->top_field.pic_num = pInfo->img.frame_num; + dpb_ptr->active_fs->top_field.long_term_frame_idx = 0; + dpb_ptr->active_fs->top_field.long_term_pic_num = 0; + dpb_ptr->active_fs->top_field.used_for_reference = 0; + dpb_ptr->active_fs->top_field.is_long_term = 0; + //dpb_ptr->active_fs->top_field.structure = pInfo->img.structure; + dpb_ptr->active_fs->top_field.poc = pInfo->img.toppoc; + } + break; + case(BOTTOM_FIELD) : { + dpb_ptr->active_fs->bottom_field.pic_num = pInfo->img.frame_num; + dpb_ptr->active_fs->bottom_field.long_term_frame_idx = 0; + dpb_ptr->active_fs->bottom_field.long_term_pic_num = 0; + dpb_ptr->active_fs->bottom_field.used_for_reference = 0; + dpb_ptr->active_fs->bottom_field.is_long_term = 0; + //dpb_ptr->active_fs->bottom_field.structure = pInfo->img.structure; + dpb_ptr->active_fs->bottom_field.poc = pInfo->img.bottompoc; + } + break; + } + } + else + { + // Need to drop a frame or something here + } + + return; +} ///// End of init Frame Store + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// Decoding POC for current Picture +// 1) pic_order_cnt_type (0, 1, 2) +// +////////////////////////////////////////////////////////////////////////////// + +void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num) +{ + int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4)); + int32_t delta_pic_order_count[2]; + int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + + int32_t AbsFrameNum =0; + int32_t ExpectedDeltaPerPicOrderCntCycle =0; + int32_t PicOrderCntCycleCnt = 0; + int32_t FrameNumInPicOrderCntCycle =0; + int32_t ExpectedPicOrderCnt =0; + + int32_t actual_frame_num =0; + + + + if (NonExisting) actual_frame_num = frame_num; + else actual_frame_num = pInfo->img.frame_num; + + switch (pInfo->active_SPS.pic_order_cnt_type) + { + case 0: + if (NonExisting != 0) break; + + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = 0; + } + else if (pInfo->img.last_has_mmco_5) + { + if (pInfo->img.last_pic_bottom_field) + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = 0; + } + else + { + pInfo->img.PicOrderCntMsb = 0; + pInfo->img.PrevPicOrderCntLsb = pInfo->img.toppoc; + } + } + + // Calculate the MSBs of current picture + if ((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb) && + ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) ) + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb; + } else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) && + ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) ) + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb; + } else + { + pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb; + } + + // 2nd + + if (pInfo->img.field_pic_flag==0) + { + //frame pix + pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom; + pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301 + } + else if (pInfo->img.bottom_field_flag==0) + { //top field + pInfo->img.ThisPOC= pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + } + else + { //bottom field + pInfo->img.ThisPOC= pInfo->img.bottompoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; + } + pInfo->img.framepoc=pInfo->img.ThisPOC; + + if ( pInfo->img.frame_num != pInfo->old_slice.frame_num) + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + + if (pInfo->SliceHeader.nal_ref_idc) + { + pInfo->img.PrevPicOrderCntLsb = pInfo->img.pic_order_cnt_lsb; + pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; + } + + break; + case 1: { + if (NonExisting) + { + delta_pic_order_count[0] = 0; + delta_pic_order_count[1] = 0; + } + else + { + delta_pic_order_count[0] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : pInfo->img.delta_pic_order_cnt[0]; + delta_pic_order_count[1] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : + ( (!pInfo->active_PPS.pic_order_present_flag) && (!(pInfo->img.field_pic_flag))) ? 0 : + pInfo->img.delta_pic_order_cnt[1]; + } + + // this if branch should not be taken during processing of a gap_in_frame_num pic since + // an IDR picture cannot produce non-existent frames... + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.FrameNumOffset = 0; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (pInfo->img.frame_num) + { + pInfo->sw_bail = 1; + } +#endif +#endif + } + else + { + + if (actual_frame_num < pInfo->img.PreviousFrameNum) + { + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; + } + else + { + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; + } + } + + // pInfo->img.num_ref_frames_in_pic_order_cnt_cycle set from SPS + // so constant between existent and non-existent frames + if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) + AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; + else + AbsFrameNum = 0; + + // pInfo->img.disposable_flag should never be true for a non-existent frame since these are always + // references... + if ((pInfo->SliceHeader.nal_ref_idc == 0) && (AbsFrameNum > 0)) AbsFrameNum = AbsFrameNum - 1; + + // 3rd + ExpectedDeltaPerPicOrderCntCycle = pInfo->active_SPS.expectedDeltaPerPOCCycle; + + if (AbsFrameNum) + { + // Rem: pInfo->img.num_ref_frames_in_pic_order_cnt_cycle takes max value of 255 (8 bit) + // Frame NUm may be 2^16 (17 bits) + // I guess we really have to treat AbsFrameNum as a 32 bit number + uint32_t temp = 0; + int32_t i=0; + int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; + + if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle) + PicOrderCntCycleCnt = ldiv_mod_u((uint32_t)(AbsFrameNum-1), (uint32_t)pInfo->img.num_ref_frames_in_pic_order_cnt_cycle, &temp); + + ExpectedPicOrderCnt = mult_u((uint32_t)PicOrderCntCycleCnt, (uint32_t)ExpectedDeltaPerPicOrderCntCycle); + + FrameNumInPicOrderCntCycle = temp; + + //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle; +#ifndef USER_MODE + h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id); + for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) + ExpectedPicOrderCnt += offset_for_ref_frame[i]; +#else + for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) + ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i]; +#endif + } + else { + ExpectedPicOrderCnt = 0; + } + + if (pInfo->SliceHeader.nal_ref_idc == 0) + ExpectedPicOrderCnt += pInfo->img.offset_for_non_ref_pic; + + if (!(pInfo->img.field_pic_flag)) + { + pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; + pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[1]; + pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; + pInfo->img.ThisPOC = pInfo->img.framepoc; + } + else if (!(pInfo->img.bottom_field_flag)) + { + //top field + pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0]; + pInfo->img.ThisPOC = pInfo->img.toppoc; + pInfo->img.bottompoc = 0; + } + else + { + //bottom field + pInfo->img.toppoc = 0; + pInfo->img.bottompoc = ExpectedPicOrderCnt + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[0]; + pInfo->img.ThisPOC = pInfo->img.bottompoc; + } + + //CONFORMANCE_ISSUE + pInfo->img.framepoc=pInfo->img.ThisPOC; + + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum=pInfo->img.frame_num; + pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset; + + } + break; + case 2: { // POC MODE 2 + if (pInfo->SliceHeader.idr_flag) + { + pInfo->img.FrameNumOffset = 0; + pInfo->img.framepoc = 0; + pInfo->img.toppoc = 0; + pInfo->img.bottompoc = 0; + pInfo->img.ThisPOC = 0; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (pInfo->img.frame_num) + { + pInfo->sw_bail = 1; + } +#endif +#endif + } + else + { + if (pInfo->img.last_has_mmco_5) + { + pInfo->img.PreviousFrameNum = 0; + pInfo->img.PreviousFrameNumOffset = 0; + } + if (actual_frame_num < pInfo->img.PreviousFrameNum) + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum; + else + pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; + + AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; + if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1; + else pInfo->img.ThisPOC = (AbsFrameNum<<1); + + if (!(pInfo->img.field_pic_flag)) + { + pInfo->img.toppoc = pInfo->img.ThisPOC; + pInfo->img.bottompoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + else if (!(pInfo->img.bottom_field_flag)) + { + pInfo->img.toppoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + else + { + pInfo->img.bottompoc = pInfo->img.ThisPOC; + pInfo->img.framepoc = pInfo->img.ThisPOC; + } + } + + //CONFORMANCE_ISSUE + pInfo->img.PreviousFrameNum = pInfo->img.frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + } + break; + default: + break; + } + + return; +} //// End of decoding_POC + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +////////////////////////////////////////////////////////////////////////////// +// h264_hdr_post_poc () +// +////////////////////////////////////////////////////////////////////////////// + +void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, int32_t use_old) +{ + int32_t actual_frame_num = (NonExisting)? frame_num : + (use_old)? pInfo->old_slice.frame_num : + pInfo->img.frame_num; + + int32_t disposable_flag = (use_old)?(pInfo->old_slice.nal_ref_idc == 0) : + (pInfo->SliceHeader.nal_ref_idc == 0); + + switch (pInfo->img.pic_order_cnt_type) + { + case 0: { + pInfo->img.PreviousFrameNum = actual_frame_num; + if ((disposable_flag == 0) && (NonExisting == 0)) + { + pInfo->img.PrevPicOrderCntLsb = (use_old)? pInfo->old_slice.pic_order_cnt_lsb : + pInfo->SliceHeader.pic_order_cnt_lsb; + pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; + } + } + break; + case 1: { + pInfo->img.PreviousFrameNum = actual_frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + } + break; + case 2: { + pInfo->img.PreviousFrameNum = actual_frame_num; + pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; + + } + break; + + default: { + } break; + } + + return; +} ///// End of h264_hdr_post_poc + + diff --git a/mixvbp/vbp_plugin/h264/h264parse_math.c b/mixvbp/vbp_plugin/h264/h264parse_math.c new file mode 100755 index 0000000..ec48cc8 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_math.c @@ -0,0 +1,84 @@ +//#include "math.h" +// Arithmatic functions using add & subtract + +#include "h264parse.h" + +uint32_t mult_u(register uint32_t var1, register uint32_t var2) +{ + + register unsigned long var_out = 0; + + while (var2 > 0) + { + + if (var2 & 0x01) + { + var_out += var1; + } + var2 >>= 1; + var1 <<= 1; + } + return var_out; + +}// mult_u + +uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod) +{ + register unsigned long div = b; + register unsigned long res = 0; + register unsigned long bit = 0x1; + + if (!div) + { + *mod = 0; + return 0xffffffff ; // Div by 0 + } + + if (a < b) + { + *mod = a; + return 0; // It won't even go once + } + + while (!(div & 0x80000000)) + { + div <<= 1; + bit <<= 1; + } + + while (bit) + { + if (div <= a) + { + res |= bit; + a -= div; + } + div >>= 1; + bit >>= 1; + } + *mod = a; + return res; +}// ldiv_mod_u + + +unsigned ldiv_u(register unsigned a, register unsigned b) +{ + register unsigned div = b << 16; + register unsigned res = 0; + register unsigned bit = 0x10000; + + while (bit) + { + div >>= 1; + bit >>= 1; + if (div < a) + { + res |= bit; + a -= div; + } + } + + return res; +} + + diff --git a/mixvbp/vbp_plugin/h264/h264parse_mem.c b/mixvbp/vbp_plugin/h264/h264parse_mem.c new file mode 100755 index 0000000..b5a0145 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_mem.c @@ -0,0 +1,198 @@ + +/*! + *********************************************************************** + * \file: h264_dpb_ctl.c + * + *********************************************************************** + */ + +//#include + +#include "h264parse.h" + + +// --------------------------------------------------------------------------- +// IMPORTANT: note that in this implementation int c is an int not a char +// --------------------------------------------------------------------------- +void* h264_memset( void* buf, uint32_t c, uint32_t num ) +{ + uint32_t* buf32 = (uint32_t*)buf; + uint32_t size32 = ( num >> 2 ); + uint32_t i; + + for ( i = 0; i < size32; i++ ) + { + *buf32++ = c; + } + + return buf; +} + + +void* h264_memcpy( void* dest, void* src, uint32_t num ) +{ + int32_t* dest32 = (int32_t*)dest; + int32_t* src32 = (int32_t*)src; + uint32_t size32 = ( num >> 2 ); + uint32_t i; + + for ( i = 0; i < size32; i++ ) + { + *dest32++ = *src32++; + } + + return dest; +} + + +#ifndef USER_MODE + +//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem +void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) +{ + uint32_t copy_size = sizeof(pic_param_set); + uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + + if (nPPSId < MAX_NUM_PPS) + { + cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0); + } + + return; + +} +//end of h264_Parse_Copy_Pps_To_DDR + + +// h264_Parse_Copy_Pps_From_DDR copy a pps with nPPSId from ddr mem to local PPS +void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) +{ + + uint32_t copy_size= sizeof(pic_param_set); + uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + + if ( nPPSId < MAX_NUM_PPS) + { + cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0); + } + + return; +} +//end of h264_Parse_Copy_Pps_From_DDR + + +//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem with nSPSId +void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) +{ + uint32_t copy_size = sizeof(seq_param_set_used); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0); + } + + //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); + + + return; +} + +//end of h264_Parse_Copy_Sps_To_DDR + + +// h264_Parse_Copy_Sps_From_DDR copy a sps with nSPSId from ddr mem to local SPS +void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) +{ + uint32_t copy_size= sizeof(seq_param_set_used); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0); + } + + return; + +} +//end of h264_Parse_Copy_Sps_From_DDR + +//h264_Parse_Copy_Offset_Ref_Frames_To_DDR () copy local offset_ref_frames to ddr mem with nSPSId +void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId) +{ + uint32_t copy_size = sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; + + if (nSPSId < MAX_NUM_SPS) + { + //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 1, 0); + h264_memcpy((int32_t *)offset_ref_frames_entry_ptr,pOffset_ref_frames, copy_size); + } + + return; +} + +//end of h264_Parse_Copy_Offset_Ref_Frames_To_DDR + + +// h264_Parse_Copy_Offset_Ref_Frames_From_DDR copy a offset_ref_frames with nSPSId from ddr mem to local offset_ref_frames +void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId) +{ + uint32_t copy_size= sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; + uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size; + + if (nSPSId < MAX_NUM_SPS) + { + //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 0, 0); + h264_memcpy(pOffset_ref_frames, (int32_t *)offset_ref_frames_entry_ptr, copy_size); + } + + return; + +} +//end of h264_Parse_Copy_Offset_Ref_Frames_From_DDR + + +//h264_Parse_Check_Sps_Updated_Flag () copy local sps to ddr mem with nSPSId +uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) +{ + uint32_t is_updated=0; + uint32_t copy_size = sizeof(uint32_t); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0); + } + + //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); + + + return is_updated; +} + +//end of h264_Parse_Check_Sps_Updated_Flag + + +// h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS +void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) +{ + uint32_t is_updated=0; + uint32_t copy_size= sizeof(uint32_t); + uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + + if (nSPSId < MAX_NUM_SPS) + { + cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0); + } + + return; + +} +//end of h264_Parse_Clear_Sps_Updated_Flag + + +#endif + + diff --git a/mixvbp/vbp_plugin/h264/h264parse_pps.c b/mixvbp/vbp_plugin/h264/h264parse_pps.c new file mode 100755 index 0000000..17f0930 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_pps.c @@ -0,0 +1,194 @@ + + +#include "h264.h" +#include "h264parse.h" + +/*---------------------------------------------*/ +/*---------------------------------------------*/ +/*---------------------------------------------*/ +h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet) +{ + h264_Status ret = H264_PPS_ERROR; + + //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet; + uint32_t code=0, i = 0; + + do { + ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id + code = h264_GetVLCElement(parent, pInfo, false); + if (code > MAX_PIC_PARAMS) { + break; + } + PictureParameterSet->pic_parameter_set_id = (uint8_t)code; + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code > 255) + { + pInfo->sw_bail = 1; + } +#endif +#endif + + code = h264_GetVLCElement(parent, pInfo, false); + if (code > MAX_NUM_SPS-1) { + break; + } + PictureParameterSet->seq_parameter_set_id = (uint8_t)code; + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code > 31) + { + pInfo->sw_bail = 1; + } +#endif +#endif + ///// entropy_coding_mode_flag + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; + ///// pic_order_present_flag + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->pic_order_present_flag = (uint8_t)code; + + PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false); + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (PictureParameterSet->num_slice_groups_minus1 > 8) + { + pInfo->sw_bail = 1; + } +#endif +#endif + // + // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0 + // + if (PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS) + break; + + PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1; + PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + + //// PPS->num_ref_idx_l0_active --- [0,32] + if (((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES)) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + + //// weighting prediction + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->weighted_pred_flag = (uint8_t)code; + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code > 2) + { + pInfo->sw_bail = 1; + } +#endif +#endif + viddec_pm_get_bits(parent, &code, 2); + PictureParameterSet->weighted_bipred_idc = (uint8_t)code; + + //// QP + PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true); + PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true); + if (((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP)) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((12 < PictureParameterSet->chroma_qp_index_offset) || (-12 > PictureParameterSet->chroma_qp_index_offset) ) + { + pInfo->sw_bail = 1; + } +#endif +#endif + //// Deblocking ctl parameters + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->constrained_intra_pred_flag = (uint8_t)code; + + if ( viddec_pm_get_bits(parent, &code, 1) == -1) + break; + PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code; + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code && (pInfo->active_SPS.profile_idc != h264_ProfileBaseline)) + { + pInfo->sw_bail = 1; + } +#endif +#endif + //// Check if have more RBSP Data for additional parameters + if (h264_More_RBSP_Data(parent, pInfo)) + { + viddec_pm_get_bits(parent, &code, 1); + PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code; + + if ( viddec_pm_get_bits(parent, &code, 1) == -1) + break; + PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code; + + if (PictureParameterSet->pic_scaling_matrix_present_flag) + { + uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1); + for (i=0; ipic_scaling_list_present_flag[i] = (uint8_t)code; + + if (PictureParameterSet->pic_scaling_list_present_flag[i]) + { + if (i<6) + h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo); + else + h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); + } + } + } + + PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12)) + { + pInfo->sw_bail = 1; + } +#endif +#endif + } + else + { + PictureParameterSet->transform_8x8_mode_flag = 0; + PictureParameterSet->pic_scaling_matrix_present_flag = 0; + PictureParameterSet->second_chroma_qp_index_offset = PictureParameterSet->chroma_qp_index_offset; + } + + ret = H264_STATUS_OK; + } while (0); + + //h264_Parse_rbsp_trailing_bits(pInfo); + return ret; +} + +////////// EOF/////////////// + diff --git a/mixvbp/vbp_plugin/h264/h264parse_sei.c b/mixvbp/vbp_plugin/h264/h264parse_sei.c new file mode 100755 index 0000000..f70e64c --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_sei.c @@ -0,0 +1,1138 @@ +#define H264_PARSE_SEI_C + +#ifdef H264_PARSE_SEI_C + +#include "h264.h" +#include "h264parse.h" +#include "h264parse_dpb.h" + +#include "viddec_parser_ops.h" + +#include "viddec_fw_item_types.h" +#include "viddec_fw_workload.h" + +////////////////////////////////////////////////////////////////////////////// +// avc_sei_stream_initialise () +// +// + +void h264_sei_stream_initialise (h264_Info* pInfo) +{ + pInfo->sei_information.capture_POC = 0; + pInfo->sei_information.disp_frozen = 0; + pInfo->sei_information.release_POC = 0; + pInfo->sei_information.capture_fn = 0; + pInfo->sei_information.recovery_fn = 0xFFFFFFFF; + pInfo->sei_information.scan_format = 0; + pInfo->sei_information.broken_link_pic = 0; + return; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_buffering_period(void *parent,h264_Info* pInfo) +{ + h264_Status ret = H264_STATUS_SEI_ERROR; + + h264_SEI_buffering_period_t* sei_msg_ptr; + h264_SEI_buffering_period_t sei_buffering_period; + int32_t SchedSelIdx; + int num_bits = 0; + + sei_msg_ptr = (h264_SEI_buffering_period_t *)(&sei_buffering_period); + + do { + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; + } + else if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1; + } + + sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->seq_param_set_id >= NUM_SPS) + break; + + //check if this id is same as the id of the current SPS //fix + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + break; + + for (SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; SchedSelIdx++) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_nal, num_bits); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_nal, num_bits); + } + } + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) + { + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + break; + + for (SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; SchedSelIdx++) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_vcl, num_bits); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_vcl, num_bits); + } + } + + ret = H264_STATUS_OK; + } while (0); + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo) +{ + int32_t CpbDpbDelaysPresentFlag = 0; + h264_SEI_pic_timing_t* sei_msg_ptr; + h264_SEI_pic_timing_t sei_pic_timing; + int32_t num_bits_cpb = 0, num_bits_dpb = 0, time_offset_length = 0; + uint32_t code; + uint32_t clock_timestamp_flag = 0; + uint32_t full_timestamp_flag = 0; + uint32_t seconds_flag = 0; + uint32_t minutes_flag = 0; + uint32_t hours_flag = 0; + uint32_t time_offset = 0; + + + + + sei_msg_ptr = (h264_SEI_pic_timing_t *)(&sei_pic_timing); + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag) + { + num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 +1; + num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 + 1; + time_offset_length = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_time_offset_length; + } + else if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 +1; + num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 + 1; + } + + + CpbDpbDelaysPresentFlag = 1; // as per amphion code + if (CpbDpbDelaysPresentFlag) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->cpb_removal_delay, num_bits_cpb); + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->dpb_output_delay, num_bits_dpb); + } + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag) + { + int32_t i = 0, NumClockTS = 0; + + viddec_workload_item_t wi; + + wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; + viddec_pm_get_bits(parent, &code , 4); + sei_msg_ptr->pic_struct = (uint8_t)code; + + + if ((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) { + pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE; + } else { + pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED; + } + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING; + wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct; + +#ifndef VBP + //Push to current if we are in first frame, or we do not detect previous frame end + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + + if (sei_msg_ptr->pic_struct < 3) { + NumClockTS = 1; + } else if ((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) { + NumClockTS = 2; + } else { + NumClockTS = 3; + } + + for (i = 0; i < NumClockTS; i++) + { + viddec_pm_get_bits(parent, &code , 1); + clock_timestamp_flag = code; + //sei_msg_ptr->clock_timestamp_flag[i] = (uint8_t)code; + + if (clock_timestamp_flag) + { + viddec_pm_get_bits(parent, &code , 2); + //sei_msg_ptr->ct_type[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->nuit_field_based_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 5); + //sei_msg_ptr->counting_type[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->full_timestamp_flag[i] = (uint8_t)code; + full_timestamp_flag = code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->discontinuity_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->cnt_dropped_flag[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 8); + //sei_msg_ptr->n_frames[i] = (uint8_t)code; + + + if (full_timestamp_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->seconds_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->minutes_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 5); + //sei_msg_ptr->hours_value[i] = (uint8_t)code; + } + else + { + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->seconds_flag[i] = (uint8_t)code; + seconds_flag = code; + + if (seconds_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->seconds_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->minutes_flag[i] = (uint8_t)code; + minutes_flag = code; + + if (minutes_flag) + { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->minutes_value[i] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + //sei_msg_ptr->hours_flag[i] = (uint8_t)code; + hours_flag = code; + + if (hours_flag) { + viddec_pm_get_bits(parent, &code , 6); + //sei_msg_ptr->hours_value[i] = (uint8_t)code; + } + } + } + } + + if (time_offset_length > 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&time_offset, time_offset_length); + } + } + } + } + + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo) +{ + h264_SEI_pan_scan_rectangle_t* sei_msg_ptr; + h264_SEI_pan_scan_rectangle_t sei_pan_scan; + uint32_t code; + + viddec_workload_item_t wi; + + h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) ); + + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN; + + sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan); + + sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false); + + wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code; + viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag); + + if (!sei_msg_ptr->pan_scan_rect_cancel_flag) + { + int32_t i; + sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1); + if (sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1) + { + return H264_STATUS_SEI_ERROR; + } + for (i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) + { + sei_msg_ptr->pan_scan_rect_left_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_right_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_top_offset[i] = h264_GetVLCElement(parent, pInfo, true); + sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true); + } + sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false); + wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period; + } +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + + if (!sei_msg_ptr->pan_scan_rect_cancel_flag) + { + int32_t i; + + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT; + + for (i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) + { + viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]); + viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]); + viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]); + viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif + } + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_filler_payload(void *parent,h264_Info* pInfo, uint32_t payload_size) +{ + + h264_SEI_filler_payload_t* sei_msg_ptr; + h264_SEI_filler_payload_t sei_filler_payload; + uint32_t k; + uint32_t code; + + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_filler_payload_t *)(&sei_filler_payload); + for (k=0; k < payload_size; k++) + { + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->ff_byte = (uint8_t)code; + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payload_size) +{ + + h264_SEI_userdata_registered_t* sei_msg_ptr; + h264_SEI_userdata_registered_t sei_userdata_registered; + uint32_t i; + int32_t byte = 0; + uint32_t code = 0; + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED; + wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered); + + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->itu_t_t35_country_code = (uint8_t)code; + + if (sei_msg_ptr->itu_t_t35_country_code != 0xff) { + i = 1; + } else { + viddec_pm_get_bits(parent, &code , 8); + sei_msg_ptr->itu_t_t35_country_code_extension_byte = (uint8_t)code; + i = 2; + } + + + wi.user_data.size =0; + do + { + + viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); + if (wi.user_data.size < 11) + { + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + } + wi.user_data.size++; + + if (11 == wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + wi.user_data.size =0; + } + + i++; + } while (i < payload_size); + + if (0!=wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t payload_size) +{ + + h264_SEI_userdata_unregistered_t* sei_msg_ptr; + h264_SEI_userdata_unregistered_t sei_userdata_unregistered; + uint32_t i; + int32_t byte = 0; + uint32_t code; + + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED; + + //remove warning + pInfo = pInfo; + + sei_msg_ptr = (h264_SEI_userdata_unregistered_t *)(&sei_userdata_unregistered); + + for (i = 0; i < 4; i++) + { + viddec_pm_get_bits(parent, &code , 32); + sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code; + } + + wi.user_data.size =0; + for (i = 16; i < payload_size; i++) + { + + viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); + if (wi.user_data.size < 11) + { + wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; + } + wi.user_data.size++; + + if (11 == wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + wi.user_data.size =0; + } + } + + if (0!=wi.user_data.size) + { + viddec_pm_setup_userdata(&wi); +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); +#endif + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) +{ + + h264_SEI_recovery_point_t* sei_msg_ptr; + h264_SEI_recovery_point_t sei_recovery_point; + uint32_t code; + viddec_workload_item_t wi; + + + sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point); + + sei_msg_ptr->recovery_frame_cnt = h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->exact_match_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->broken_link_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 2); + sei_msg_ptr->changing_slice_group_idc = (uint8_t)code; + + pInfo->sei_information.recovery_point = 1; + pInfo->sei_information.recovery_frame_cnt = (int32_t) sei_msg_ptr->recovery_frame_cnt; + pInfo->sei_information.capture_fn = 1; + pInfo->sei_information.broken_link_pic = sei_msg_ptr->broken_link_flag; + + if (pInfo->got_start) { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if ((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + + // + /// Append workload for SEI + // + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT; + wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt; + viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag); + viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag); + wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc; +#ifndef VBP + //cur is first frame + viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); +#endif + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_dec_ref_pic_marking_rep(void *parent,h264_Info* pInfo) +{ + + h264_SEI_decoded_ref_pic_marking_repetition_t* sei_msg_ptr; + h264_SEI_decoded_ref_pic_marking_repetition_t sei_ref_pic; + uint32_t code; + + sei_msg_ptr = (h264_SEI_decoded_ref_pic_marking_repetition_t *)(&sei_ref_pic); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->original_idr_flag = (uint8_t)code; + + sei_msg_ptr->original_frame_num = h264_GetVLCElement(parent, pInfo, false); + + if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->orignal_field_pic_flag = (uint8_t)code; + + if (sei_msg_ptr->orignal_field_pic_flag) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->original_bottom_field_pic_flag = (uint8_t)code; + } + } + h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, &pInfo->SliceHeader); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_spare_pic(void *parent,h264_Info* pInfo) +{ + + //h264_SEI_spare_picture_t* sei_msg_ptr; + + //remove warning + pInfo = pInfo; + parent = parent; + + //sei_msg_ptr = (h264_SEI_spare_picture_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_scene_info(void *parent,h264_Info* pInfo) +{ + + h264_SEI_scene_info_t* sei_msg_ptr; + h264_SEI_scene_info_t sei_scene_info; + uint32_t code; + + sei_msg_ptr = (h264_SEI_scene_info_t*)(&sei_scene_info); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->scene_info_present_flag = (uint8_t)code; + + if (sei_msg_ptr->scene_info_present_flag) + { + sei_msg_ptr->scene_id = h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->scene_transitioning_type= h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->scene_transitioning_type > 3) + { + sei_msg_ptr->second_scene_id = h264_GetVLCElement(parent, pInfo, false); + } + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_sub_seq_info(void *parent,h264_Info* pInfo) +{ + + h264_SEI_sub_sequence_info_t* sei_msg_ptr; + h264_SEI_sub_sequence_info_t sei_sub_sequence_info; + uint32_t code; + + sei_msg_ptr = (h264_SEI_sub_sequence_info_t *)(&sei_sub_sequence_info); + + sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo,false); + sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo,false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->first_ref_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->leading_non_ref_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->last_pic_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->sub_seq_frame_num_flag = (uint8_t)code; + + + if (sei_msg_ptr->sub_seq_frame_num_flag) + { + sei_msg_ptr->sub_seq_frame_num = h264_GetVLCElement(parent, pInfo,false); + } + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_sub_seq_layer(void *parent,h264_Info* pInfo) +{ + + h264_SEI_sub_sequence_layer_t* sei_msg_ptr; + h264_SEI_sub_sequence_layer_t sei_sub_sequence_layer; + int32_t layer; + uint32_t code; + + sei_msg_ptr = (h264_SEI_sub_sequence_layer_t *)(&sei_sub_sequence_layer); + sei_msg_ptr->num_sub_seq_layers_minus1 = h264_GetVLCElement(parent, pInfo,false); + + if (sei_msg_ptr->num_sub_seq_layers_minus1 >= MAX_SUB_SEQ_LAYERS) + { + return H264_STATUS_SEI_ERROR; + } + + for (layer = 0; layer <= sei_msg_ptr->num_sub_seq_layers_minus1; layer++) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->accurate_statistics_flag[layer] = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_bit_rate[layer] = (uint16_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_frame_rate[layer] = (uint16_t)code; + + } + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_sub_seq(void *parent,h264_Info* pInfo) +{ + int32_t n; + uint32_t code; + + h264_SEI_sub_sequence_t* sei_msg_ptr; + h264_SEI_sub_sequence_t sei_sub_sequence; + + sei_msg_ptr = (h264_SEI_sub_sequence_t *)(&sei_sub_sequence); + + sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->duration_flag = (uint8_t)code; + + if (sei_msg_ptr->duration_flag) + { + viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->sub_seq_duration, 32); + } + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->average_rate_flag = (uint8_t)code; + + if (sei_msg_ptr->average_rate_flag) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->average_statistics_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_bit_rate = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 16); + sei_msg_ptr->average_frame_rate = (uint8_t)code; + + } + sei_msg_ptr->num_referenced_subseqs = h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->num_referenced_subseqs >= MAX_NUM_REF_SUBSEQS) + { + return H264_STATUS_SEI_ERROR; + } + + for (n = 0; n < sei_msg_ptr->num_referenced_subseqs; n++) + { + sei_msg_ptr->ref_sub_seq_layer_num= h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->ref_sub_seq_id= h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->ref_sub_seq_direction = (uint8_t)code; + } + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_full_frame_freeze(void *parent,h264_Info* pInfo) +{ + + h264_SEI_full_frame_freeze_t* sei_msg_ptr; + h264_SEI_full_frame_freeze_t sei_full_frame_freeze; + + sei_msg_ptr = (h264_SEI_full_frame_freeze_t *)(&sei_full_frame_freeze); + + sei_msg_ptr->full_frame_freeze_repetition_period= h264_GetVLCElement(parent, pInfo, false); + + pInfo->sei_information.capture_POC = 1; + pInfo->sei_information.freeze_rep_period = sei_msg_ptr->full_frame_freeze_repetition_period; + //pInfo->img.sei_freeze_this_image = 1; + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_full_frame_freeze_release(void *parent,h264_Info* pInfo) +{ + //remove warning + parent = parent; + pInfo = pInfo; + + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_full_frame_snapshot(void *parent,h264_Info* pInfo) +{ + + h264_SEI_full_frame_snapshot_t* sei_msg_ptr; + h264_SEI_full_frame_snapshot_t sei_full_frame_snapshot; + + sei_msg_ptr = (h264_SEI_full_frame_snapshot_t *)(&sei_full_frame_snapshot); + + sei_msg_ptr->snapshot_id = h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_progressive_segement_start(void *parent,h264_Info* pInfo) +{ + + h264_SEI_progressive_segment_start_t* sei_msg_ptr; + h264_SEI_progressive_segment_start_t sei_progressive_segment_start; + + sei_msg_ptr = (h264_SEI_progressive_segment_start_t *)(&sei_progressive_segment_start); + + sei_msg_ptr->progressive_refinement_id= h264_GetVLCElement(parent, pInfo, false); + sei_msg_ptr->num_refinement_steps_minus1= h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_progressive_segment_end(void *parent,h264_Info* pInfo) +{ + + h264_SEI_progressive_segment_end_t* sei_msg_ptr; + h264_SEI_progressive_segment_end_t sei_progressive_segment_end; + + sei_msg_ptr = (h264_SEI_progressive_segment_end_t *)(&sei_progressive_segment_end); + + sei_msg_ptr->progressive_refinement_id = h264_GetVLCElement(parent, pInfo, false); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_motion_constrained_slice_grp_set(void *parent, h264_Info* pInfo) +{ + int32_t i; + uint32_t code; + h264_SEI_motion_constrained_slice_group_t* sei_msg_ptr; + h264_SEI_motion_constrained_slice_group_t sei_motion_constrained_slice_group; + + sei_msg_ptr = (h264_SEI_motion_constrained_slice_group_t *)(&sei_motion_constrained_slice_group); + + sei_msg_ptr->num_slice_groups_in_set_minus1= h264_GetVLCElement(parent, pInfo, false); + if (sei_msg_ptr->num_slice_groups_in_set_minus1 >= MAX_NUM_SLICE_GRPS) + { + return H264_STATUS_SEI_ERROR; + } + + for (i=0; i<= sei_msg_ptr->num_slice_groups_in_set_minus1; i++) + { + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->slice_group_id[i] = (uint8_t)code; + } + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->exact_sample_value_match_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code , 1); + sei_msg_ptr->pan_scan_rect_flag = (uint8_t)code; + + + if (sei_msg_ptr->pan_scan_rect_flag) + { + sei_msg_ptr->pan_scan_rect_id= h264_GetVLCElement(parent, pInfo, false); + } + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_film_grain_characteristics(void *parent,h264_Info* pInfo) +{ + //OS_INFO("Not supported SEI\n"); + + //remove warning + parent = parent; + pInfo = pInfo; + + + + + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_deblocking_filter_display_preferences(void *parent,h264_Info* pInfo) +{ + + //h264_SEI_deblocking_filter_display_pref_t* sei_msg_ptr; + + //remove warning + parent = parent; + pInfo = pInfo; + + //sei_msg_ptr = (h264_SEI_deblocking_filter_display_pref_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_sei_stereo_video_info(void *parent,h264_Info* pInfo) +{ + + //h264_SEI_stereo_video_info_t* sei_msg_ptr; + + //remove warning + parent = parent; + pInfo = pInfo; + + + //sei_msg_ptr = (h264_SEI_stereo_video_info_t *)(&user_data->user_data[0]); + + //OS_INFO("Not supported SEI\n"); + return H264_STATUS_OK; +} +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +uint32_t h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size) +{ + int32_t k, byte_index, user_data_byte_index; + uint32_t i; + int32_t word, bits; + uint32_t user_data; + //h264_SEI_reserved_t* sei_msg_ptr; + //h264_SEI_reserved_t sei_reserved; + + //remove warning + pInfo = pInfo; + + //sei_msg_ptr = (h264_SEI_reserved_t *)(&sei_reserved); + + byte_index = 0; + word = 0; + user_data_byte_index = 0x0; + + for (i = 0, k = 0; i < payload_size; i++) + { + if (byte_index == 0) word = 0; + viddec_pm_get_bits(parent, (uint32_t *)&bits, 8); + + switch (byte_index) + { + case 1: + word = (bits << 8) | word; + break; + case 2: + word = (bits << 16) | word; + break; + case 3: + word = (bits << 24) | word; + break; + default : + word = bits; + break; + } + + if (byte_index == 3) + { + byte_index = 0; + user_data = word; + k++; + } + else + { + byte_index++; + } + + user_data_byte_index++; + if ( user_data_byte_index == MAX_USER_DATA_SIZE) + { + //user_data->user_data_size = user_data_byte_index; + //sei_msg_ptr = (h264_SEI_reserved_t *)(&user_data->user_data[0]); + byte_index = 0; + word = 0; + user_data_byte_index = 0x0; + } + } + + if (byte_index) + user_data = word; + + //user_data->user_data_size = user_data_byte_index; + + return user_data_byte_index; + +// return H264_STATUS_OK; +} + +////// TODO +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize) +{ + //int32_t bit_equal_to_zero; + h264_Status status = H264_STATUS_OK; + + //removing warning + payloadSize = payloadSize; + + switch (payloadType) + { + case SEI_BUF_PERIOD: + status = h264_sei_buffering_period(parent, pInfo); + break; + case SEI_PIC_TIMING: + status = h264_sei_pic_timing(parent, pInfo); + break; + case SEI_PAN_SCAN: + status = h264_sei_pan_scan(parent, pInfo); + break; + case SEI_FILLER_PAYLOAD: + status = h264_sei_filler_payload(parent, pInfo, payloadSize); + break; + case SEI_REG_USERDATA: + status = h264_sei_userdata_reg(parent, pInfo, payloadSize); + break; + case SEI_UNREG_USERDATA: + status = h264_sei_userdata_unreg(parent, pInfo, payloadSize); + break; + case SEI_RECOVERY_POINT: + h264_sei_recovery_point(parent, pInfo); + break; + case SEI_DEC_REF_PIC_MARKING_REP: + status = h264_sei_dec_ref_pic_marking_rep(parent, pInfo); + break; + case SEI_SPARE_PIC: + status = h264_sei_spare_pic(parent, pInfo); + break; + case SEI_SCENE_INFO: + status = h264_sei_scene_info(parent, pInfo); + break; + case SEI_SUB_SEQ_INFO: + status = h264_sei_sub_seq_info(parent, pInfo); + break; + case SEI_SUB_SEQ_LAYER: + status = h264_sei_sub_seq_layer(parent, pInfo); + break; + case SEI_SUB_SEQ: + status = h264_sei_sub_seq(parent, pInfo); + break; + case SEI_FULL_FRAME_FREEZE: + status = h264_sei_full_frame_freeze(parent, pInfo); + break; + case SEI_FULL_FRAME_FREEZE_RELEASE: + h264_sei_full_frame_freeze_release(parent, pInfo); + break; + case SEI_FULL_FRAME_SNAPSHOT: + status = h264_sei_full_frame_snapshot(parent, pInfo); + break; + case SEI_PROGRESSIVE_SEGMENT_START: + status = h264_sei_progressive_segement_start(parent, pInfo); + break; + case SEI_PROGRESSIVE_SEGMENT_END: + status = h264_sei_progressive_segment_end(parent, pInfo); + break; + case SEI_MOTION_CONSTRAINED_SLICE_GRP_SET: + status = h264_sei_motion_constrained_slice_grp_set(parent, pInfo); + break; + case SEI_FILM_GRAIN_CHARACTERISTICS: + status = h264_sei_film_grain_characteristics(parent, pInfo); + break; + case SEI_DEBLK_FILTER_DISPLAY_PREFERENCE: + status = h264_sei_deblocking_filter_display_preferences(parent, pInfo); + break; + case SEI_STEREO_VIDEO_INFO: + status = h264_sei_stereo_video_info(parent, pInfo); + break; + default: + status = (h264_Status)h264_sei_reserved_sei_message(parent, pInfo, payloadSize); + break; + } + + /* + viddec_pm_get_bits(parent, (uint32_t *)&tmp, 1); + + if(tmp == 0x1) // if byte is not aligned + { + while(pInfo->bitoff != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&bit_equal_to_zero, 1); + } + } + */ + return status; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent, h264_Info* pInfo) +{ + h264_Status status = H264_STATUS_OK; + int32_t payload_type, payload_size; + uint32_t next_8_bits = 0,bits_offset=0,byte_offset = 0; + uint8_t is_emul = 0; + int32_t bits_operation_result = 0; + + do { + //// payload_type + payload_type = 0; + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + while (next_8_bits == 0xFF) + { + bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + if (-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + payload_type += 255; + + } + //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + payload_type += next_8_bits; + + //// payload_size + payload_size = 0; + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + while (next_8_bits == 0xFF) + { + payload_size += 255; + bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + if (-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + } + //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8); + payload_size += next_8_bits; + + //PRINTF(MFD_NONE, " SEI: payload type = %d, payload size = %d \n", payload_type, payload_size); + + + ///////////////////////////////// + // Parse SEI payloads + ///////////////////////////////// + status = h264_SEI_payload(parent, pInfo, (h264_sei_payloadtype)payload_type, payload_size); + if (status != H264_STATUS_OK) + break; + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + // OS_INFO("SEI byte_offset 3= %d, bits_offset=%d\n", byte_offset, bits_offset); + + if (bits_offset!=0) + { + viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8-bits_offset); + } + + bits_operation_result = viddec_pm_peek_bits(parent, (uint32_t *)&next_8_bits, 8); + if (-1 == bits_operation_result) + { + status = H264_STATUS_SEI_ERROR; + return status; + } + + // OS_INFO("next_8_bits = %08x\n", next_8_bits); + + } while (next_8_bits != 0x80); + + //} while (h264_More_RBSP_Data(parent, pInfo) && status == H264_STATUS_OK); + + return status; +} + +#endif + diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c new file mode 100755 index 0000000..9db8cee --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c @@ -0,0 +1,837 @@ +//#define H264_PARSE_SLICE_HDR +//#ifdef H264_PARSE_SLICE_HDR + +#include "h264.h" +#include "h264parse.h" + +extern int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul); + + +/*-----------------------------------------------------------------------------------------*/ +// Slice header 1---- +// 1) first_mb_in_slice, slice_type, pic_parameter_id +/*-----------------------------------------------------------------------------------------*/ +h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_STATUS_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_type =0; + uint32_t data =0; + + do { + ///// first_mb_in_slice + SliceHeader->first_mb_in_slice = h264_GetVLCElement(parent, pInfo, false); + + ///// slice_type + slice_type = h264_GetVLCElement(parent, pInfo, false); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (slice_type > 9) + { + pInfo->sw_bail = 1; + } +#endif +#endif + SliceHeader->slice_type = (slice_type%5); + + if (SliceHeader->slice_type > h264_PtypeI) { + ret = H264_STATUS_NOTSUPPORT; + break; + } + + + ////// pic_parameter_id + data = h264_GetVLCElement(parent, pInfo, false); + if (data > MAX_PIC_PARAMS) { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + ret = H264_PPS_INVALID_PIC_ID; + break; + } + SliceHeader->pic_parameter_id = (uint8_t)data; + ret = H264_STATUS_OK; + } while (0); + + return ret; +} + +/*-----------------------------------------------------------------------------------------*/ +// slice header 2 +// frame_num +// field_pic_flag, structure +// idr_pic_id +// pic_order_cnt_lsb, delta_pic_order_cnt_bottom +/*-----------------------------------------------------------------------------------------*/ + +h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + uint32_t code; + int32_t max_mb_num=0; + + do { + //////////////////////////////////// Slice header part 2////////////////// + + /// Frame_num + viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4); + SliceHeader->frame_num = (int32_t)code; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->field_pic_flag = (uint8_t)code; + + if (SliceHeader->field_pic_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->bottom_field_flag = (uint8_t)code; + + SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + if (SliceHeader->structure == FRAME) { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + } else { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; + } + + + ///if(pInfo->img.MbaffFrameFlag) + if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { + SliceHeader->first_mb_in_slice <<=1; + } + + if (SliceHeader->first_mb_in_slice >= max_mb_num) + break; + + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (SliceHeader->idr_pic_id > 65535) + { + pInfo->sw_bail = 1; + } +#endif +#endif + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4); + SliceHeader->pic_order_cnt_lsb = (uint32_t)code; + + + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if ((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true); + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true); + } + } + + if (pInfo->active_PPS.redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->redundant_pic_cnt > 127) + break; + } else { + SliceHeader->redundant_pic_cnt = 0; + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + +/*-----------------------------------------------------------------------------------------*/ +// slice header 3 +// (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT, ref_pic_remark, alpha, beta, etc) +/*-----------------------------------------------------------------------------------------*/ + +h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_alpha_c0_offset, slice_beta_offset; + uint32_t code; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + do { + /// direct_spatial_mv_pred_flag + if (SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code , 1); + SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code; + } + else + { + SliceHeader->direct_spatial_mv_pred_flag = 0; + } + + // + // Reset ref_idx and Overide it if exist + // + SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active; + SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active; + + if ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code; + + if (SliceHeader->num_ref_idx_active_override_flag) + { + SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; + if (SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + } + } + } + + if (SliceHeader->slice_type != h264_PtypeB) { + SliceHeader->num_ref_idx_l1_active = 0; + } + + if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) + { + break; + } + + if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + + + //// + //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW + //// + if (((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + { + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + pInfo->h264_pwt_enabled = 1; + pInfo->h264_pwt_start_byte_offset = byte_offset; + pInfo->h264_pwt_start_bit_offset = bits_offset; + + if (h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + if (0 == bits_offset) + { + pInfo->h264_pwt_end_byte_offset = byte_offset-1; + pInfo->h264_pwt_end_bit_offset = 8; + } + else + { + pInfo->h264_pwt_end_byte_offset = byte_offset; + pInfo->h264_pwt_end_bit_offset = bits_offset; + } + + } + + + + //// + //// Parse Ref_pic marking if there + //// + if (SliceHeader->nal_ref_idc != 0) + { + if (h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; + } + } + + if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); + } + else + { + SliceHeader->cabac_init_idc = 0; + } + + if (SliceHeader->cabac_init_idc > 2) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + + SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); + if ( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26))) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + + if ((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) ) + { + if (SliceHeader->slice_type == h264_PtypeSP) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sp_for_switch_flag = (uint8_t)code; + + } + SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); + + if ( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + } + if (pInfo->active_PPS.deblocking_filter_control_present_flag) + { + SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->disable_deblocking_filter_idc != 1) + { + SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; + if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + + SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; + if (slice_beta_offset < -12 || slice_beta_offset > 12) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + } + else + { + SliceHeader->slice_alpha_c0_offset_div2 = 0; + SliceHeader->slice_beta_offset_div2 = 0; + } + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + + +/*--------------------------------------------------------------------------------------------------*/ +// +// The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num +// specify the change from the initial reference picture lists to the reference picture lists to be used +// for decoding the slice + +// reordering_of_pic_nums_idc: +// 0: abs_diff_pic_num_minus1 is present and corresponds to a difference to subtract from a picture number prediction value +// 1: abs_diff_pic_num_minus1 is present and corresponds to a difference to add to a picture number prediction value +// 2: long_term_pic_num is present and specifies the long-term picture number for a reference picture +// 3: End loop for reordering of the initial reference picture list +// +/*--------------------------------------------------------------------------------------------------*/ + +h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t reorder= -1; + uint32_t code; + + + if ((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)code; + + if (SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag) + { + + reorder= -1; + do + { + reorder++; + + if (reorder > MAX_NUM_REF_FRAMES) + { + return H264_SliceHeader_ERROR; + } + + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + } + else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + } + + } while (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3); + } + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)code; + + if (SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag) + { + + reorder = -1; + do + { + reorder++; + if (reorder > MAX_NUM_REF_FRAMES) + { + return H264_SliceHeader_ERROR; + } + SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + } + else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + } + } while (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3); + } + } + + //currently just two reference frames but in case mroe than two, then should use an array for the above structures that is why reorder + return H264_STATUS_OK; + +} + +#ifdef VBP +h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + uint32_t i =0, j=0; + uint32_t flag; + + SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (SliceHeader->sh_predwttbl.luma_log2_weight_denom > 7) + { + pInfo->sw_bail = 1; + } +#endif +#endif + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); + } +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (SliceHeader->sh_predwttbl.chroma_log2_weight_denom > 7) + { + pInfo->sw_bail = 1; + } +#endif +#endif + for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif + if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif + if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif + if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((-128 > flag) || (127 < flag)) + { + pInfo->sw_bail = 1; + } +#endif +#endif + if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; +} ///// End of h264_Parse_Pred_Weight_Table + +#else + +/*--------------------------------------------------------------------------------------------------*/ +// +// Parse Prediction weight table +// Note: This table will be reparsed in HW Accelerator, so needn't keep it in parser +// +/*--------------------------------------------------------------------------------------------------*/ + + +h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + uint32_t i =0, j=0; + uint32_t flag, val; + //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader; + + //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom"); + val = h264_GetVLCElement(parent, pInfo, false); + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom"); + val = h264_GetVLCElement(parent,pInfo, false); + } + + for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + { + //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + + //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag) + if (flag) + { + //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + } + else + { + //SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + //SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if (flag) + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0"); + val = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + { + //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if (flag) + { + //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + } + else + { + //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag"); + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + if (flag) + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1"); + val = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j=0; j <2; j++) + { + //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; +} ///// End of h264_Parse_Pred_Weight_Table + +#endif + +/*--------------------------------------------------------------------------------------------------*/ +// The syntax elements specify marking of the reference pictures. +// 1)IDR: no_output_of_prior_pics_flag, +// long_term_reference_flag, +// 2)NonIDR: adaptive_ref_pic_marking_mode_flag, +// memory_management_control_operation, +// difference_of_pic_nums_minus1, +// long_term_frame_idx, +// long_term_pic_num, and +// max_long_term_frame_idx_plus1 +// +//The marking of a reference picture can be "unused for reference", "used for short-term reference", or "used for longterm +// reference", but only one among these three. +/*--------------------------------------------------------------------------------------------------*/ + + +h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + uint8_t i = 0; + uint32_t code = 0; + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)code; + pInfo->img.long_term_reference_flag = (uint8_t)code; + } + else + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)code; + + /////////////////////////////////////////////////////////////////////////////////////// + //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified + // 0 Sliding window reference picture marking mode: A marking mode + // providing a first-in first-out mechanism for short-term reference pictures. + // 1 Adaptive reference picture marking mode: A reference picture + // marking mode providing syntax elements to specify marking of + // reference pictures as �unused for reference?and to assign long-term + // frame indices. + /////////////////////////////////////////////////////////////////////////////////////// + + if (SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) + { + do + { + if (i < NUM_MMCO_OPERATIONS) + { + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) + { + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) + { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) + { + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) + { + pInfo->img.curr_has_mmco_5 = 1; + } + } + + if (i >= NUM_MMCO_OPERATIONS) { + return H264_STATUS_ERROR; + } + + } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); + } + } + + + + + SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; + + return H264_STATUS_OK; +} + + + +//#endif diff --git a/mixvbp/vbp_plugin/h264/h264parse_sps.c b/mixvbp/vbp_plugin/h264/h264parse_sps.c new file mode 100755 index 0000000..431892b --- /dev/null +++ b/mixvbp/vbp_plugin/h264/h264parse_sps.c @@ -0,0 +1,691 @@ +//#define H264_PARSE_SPS_C +//#ifdef H264_PARSE_SPS_C + +#include "h264.h" +#include "h264parse.h" +#ifdef VBP +#include +#endif + + +/// SPS extension unit (unit_type = 13) +/// +#if 0 +h264_Status h264_Parse_SeqParameterSet_Extension(void *parent,h264_Info * pInfo) +{ + /*h264_SPS_Extension_RBSP_t* SPS_ext = pInfo->p_active_SPS_ext; + + SPS_ext->seq_parameter_set_id = h264_GetVLCElement(pInfo, false); + if(SPS_ext->seq_parameter_set_id > MAX_SEQ_PARAMS-1) + { + return H264_SPS_ERROR; + } + SPS_ext->aux_format_idc = h264_GetVLCElement(pInfo, false); + if(SPS_ext->aux_format_idc > 3) + { + return H264_SPS_ERROR; + } + if(SPS_ext->aux_format_idc != 0) + { + SPS_ext->bit_depth_aux_minus8 = h264_GetVLCElement(pInfo, false); + if(SPS_ext->bit_depth_aux_minus8 + 8 > 12) + { + return H264_SPS_ERROR; + } + + SPS_ext->alpha_incr_flag = h264_GetBits(pInfo, 1, "alpha_incr_flag"); + if(SPS_ext->alpha_incr_flag > 1) + { + return H264_SPS_ERROR; + } + + SPS_ext->alpha_opaque_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_opaque_value"); //+8 to get the bit_depth value + SPS_ext->alpha_transparent_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_transparent_value"); //+8 to get the bit_depth value + } + SPS_ext->additional_extension_flag = h264_GetBits(pInfo, 1, "additional_extension_flag"); + */ + return H264_STATUS_OK; +} +#endif + + +h264_Status h264_Parse_HRD_Parameters(void *parent, h264_Info* pInfo, int nal_hrd,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used) +{ + //seq_param_set_ptr SPS = pInfo->p_active_SPS; + int32_t i = 0; + uint32_t code; + + + if (nal_hrd) + { + SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + if (SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + { + return H264_SPS_ERROR; + } + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale = (uint8_t)(code>>4); + pVUI_Seq_Not_Used->nal_hrd_cpb_size_scale = (uint8_t)(code & 0xf); + + for (i=0; i<=SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; i++) + { + pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->nal_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->nal_hrd_parameters.cbr_flag[i] = (uint8_t)code; + } + + if ( viddec_pm_get_bits(parent, &code, 20) == -1) + return H264_SPS_ERROR; + + SPS->sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); + SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; + SPS->sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; + SPS->sps_disp.vui_seq_parameters.nal_hrd_time_offset_length = (uint8_t)(code&0x1f);; + + } + else + { + SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); + + if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT) + { + return H264_SPS_ERROR; + } + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale = (uint8_t)(code>>4); + pVUI_Seq_Not_Used->vcl_hrd_cpb_size_scale = (uint8_t)(code&0xf); + + for (i=0; i<=SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; i++) + { + pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->vcl_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->vcl_hrd_parameters.cbr_flag[i] = (uint8_t)code; + } + + if ( viddec_pm_get_bits(parent, &code, 20) == -1) + return H264_SPS_ERROR; + + SPS->sps_disp.vui_seq_parameters.vcl_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f); + SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);; + SPS->sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);; + SPS->sps_disp.vui_seq_parameters.vcl_hrd_time_offset_length = (uint8_t)(code&0x1f);; + } + + return H264_STATUS_OK; +} + + + +h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used) +{ + h264_Status ret = H264_STATUS_OK; + //seq_param_set_ptr SPS = pInfo->p_active_SPS; + int32_t nal_hrd = 0; + uint32_t code; + + do { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag = (uint8_t)code; + + + if (SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc == h264_AR_Extended_SAR) + { + viddec_pm_get_bits(parent, &code, 16); + SPS->sps_disp.vui_seq_parameters.sar_width = (uint16_t)code; + + viddec_pm_get_bits(parent, &code, 16); + SPS->sps_disp.vui_seq_parameters.sar_height = (uint16_t)code; + + } + } + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->overscan_info_present_flag = (uint8_t)code; + + if (pVUI_Seq_Not_Used->overscan_info_present_flag) + { + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->overscan_appropriate_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + viddec_pm_get_bits(parent, &code, 3); + SPS->sps_disp.vui_seq_parameters.video_format = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code; +#ifdef VBP + SPS->sps_disp.vui_seq_parameters.video_full_range_flag = (uint8_t)code; +#endif + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.colour_description_present_flag) + { + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.colour_primaries = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 8); + SPS->sps_disp.vui_seq_parameters.transfer_characteristics = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 8); + pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code; +#ifdef VBP + SPS->sps_disp.vui_seq_parameters.matrix_coefficients = (uint8_t)code; +#endif + } + } + + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->chroma_location_info_present_flag = (uint8_t)code; + + if (pVUI_Seq_Not_Used->chroma_location_info_present_flag) + { + pVUI_Seq_Not_Used->chroma_sample_loc_type_top_field = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->chroma_sample_loc_type_bottom_field = h264_GetVLCElement(parent, pInfo, false); + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.timing_info_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + viddec_pm_get_bits(parent, &code, 32); + SPS->sps_disp.vui_seq_parameters.num_units_in_tick = (uint32_t)code; + + viddec_pm_get_bits(parent, &code, 32); + SPS->sps_disp.vui_seq_parameters.time_scale = (uint32_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.fixed_frame_rate_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + { + nal_hrd = 1; + ret = h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1) + { + nal_hrd = 0; + ret = (h264_Status)h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used); + } + + if ((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) + { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.low_delay_hrd_flag = (uint8_t)code; + } + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.vui_seq_parameters.pic_struct_present_flag = (uint8_t)code; + + if (viddec_pm_get_bits(parent, &code, 1) == -1) { + ret = H264_STATUS_ERROR; + break; + } + SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag = (uint8_t)code; + + if (SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag) + { + viddec_pm_get_bits(parent, &code, 1); + pVUI_Seq_Not_Used->motion_vectors_over_pic_boundaries_flag = (uint8_t)code; + + pVUI_Seq_Not_Used->max_bytes_per_pic_denom = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->max_bits_per_mb_denom = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->log2_max_mv_length_horizontal = h264_GetVLCElement(parent, pInfo, false); + pVUI_Seq_Not_Used->log2_max_mv_length_vertical = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.vui_seq_parameters.num_reorder_frames = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering = h264_GetVLCElement(parent, pInfo, false); + + if (SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering == MAX_INT32_VALUE) + ret = H264_STATUS_ERROR; + } + } while (0); + + return ret; +} + + +h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame) +{ + h264_Status ret = H264_SPS_ERROR; + + int32_t i = 0, tmp = 0; + int32_t PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs; + uint32_t code = 0; + uint32_t data = 0; + + //SPS->profile_idc = h264_GetBits(pInfo, 8, "Profile"); + viddec_pm_get_bits(parent, &code, 8); + SPS->profile_idc = (uint8_t)code; + + switch (SPS->profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + case h264_ProfileHigh10: + case h264_ProfileHigh422: + case h264_ProfileHigh444: + case h264_ProfileHigh: + break; + default: +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + return H264_SPS_INVALID_PROFILE; + break; + } + + //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag"); + //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag"); //should be 1 + //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag"); + //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag"); + +#ifdef VBP + viddec_pm_get_bits(parent, &code, 5); //constraint flag set0...set4 (h.264 Spec v2009) + SPS->constraint_set_flags = (uint8_t)code; + + //// reserved_zero_3bits + viddec_pm_get_bits(parent, (uint32_t *)&code, 3); //3bits zero reserved (h.264 Spec v2009) +#else + + viddec_pm_get_bits(parent, &code, 4); + SPS->constraint_set_flags = (uint8_t)code; + + //// reserved_zero_4bits + viddec_pm_get_bits(parent, (uint32_t *)&code, 4); +#endif +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if (code != 0) + { + pInfo->sw_bail = 1; + } +#endif +#endif + viddec_pm_get_bits(parent, &code, 8); + SPS->level_idc = (uint8_t)code; + + switch (SPS->level_idc) + { + case h264_Level1b: + case h264_Level1: + case h264_Level11: + case h264_Level12: + case h264_Level13: + case h264_Level2: + case h264_Level21: + case h264_Level22: + case h264_Level3: + case h264_Level31: + case h264_Level32: + case h264_Level4: + case h264_Level41: + case h264_Level42: + case h264_Level5: + case h264_Level51: + break; + default: +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + return H264_SPS_INVALID_LEVEL; + } + + do { + SPS->seq_parameter_set_id = h264_GetVLCElement(parent, pInfo, false); + + //// seq_parameter_set_id ---[0,31] + if (SPS->seq_parameter_set_id > MAX_NUM_SPS -1) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } +#ifdef VBP + SPS->sps_disp.separate_colour_plane_flag = 0; +#endif + + if ((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) || + (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) ) + { + //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > H264_CHROMA_422) + break; + SPS->sps_disp.chroma_format_idc = (uint8_t)data; + //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {} + +#ifdef VBP + if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.separate_colour_plane_flag = (uint8_t)data; + } +#endif + //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel + data = h264_GetVLCElement(parent, pInfo, false); + if ( data) + break; + SPS->bit_depth_luma_minus8 = (uint8_t)data; + + //// bit_depth_chroma_minus8 ---[0,4] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data ) + break; + SPS->bit_depth_chroma_minus8 = (uint8_t)data; + + + viddec_pm_get_bits(parent, &code, 1); + SPS->lossless_qpprime_y_zero_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->seq_scaling_matrix_present_flag = (uint8_t)code; + + if (SPS->seq_scaling_matrix_present_flag == 1) + { + //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12; + int n_ScalingList = 8; /// We do not support 444 currrently + + for (i=0; iseq_scaling_list_present_flag[i] = (uint8_t)code; + + if (SPS->seq_scaling_list_present_flag[i]) + { + if (i<6) + h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo); + else + h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); + } + } + } + } + else + { + SPS->sps_disp.chroma_format_idc = 1; + SPS->seq_scaling_matrix_present_flag = 0; + + SPS->bit_depth_luma_minus8 = 0; + SPS->bit_depth_chroma_minus8 = 0; + //h264_SetDefaultScalingLists(pInfo); + } + + //// log2_max_frame_num_minus4 ---[0,12] + data = (h264_GetVLCElement(parent, pInfo, false)); + if ( data > 12) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + SPS->log2_max_frame_num_minus4 = (uint8_t)data; + + //// pic_order_cnt_type ---- [0,2] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > 2) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + SPS->pic_order_cnt_type = (uint8_t)data; + + + SPS->expectedDeltaPerPOCCycle = 0; + if (SPS->pic_order_cnt_type == 0) { + SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false); + } else if (SPS->pic_order_cnt_type == 1) { + viddec_pm_get_bits(parent, &code, 1); + SPS->delta_pic_order_always_zero_flag = (uint8_t)code; + + SPS->offset_for_non_ref_pic = h264_GetVLCElement(parent, pInfo, true); + SPS->offset_for_top_to_bottom_field = h264_GetVLCElement(parent, pInfo, true); + + //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > 255) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data; + + + //Alloc memory for frame offset -- FIXME + for (i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++) + { + /////SPS->offset_for_ref_frame[i] could be removed from SPS +#ifndef USER_MODE + tmp = h264_GetVLCElement(parent, pInfo, true); + pOffset_ref_frame[i]=tmp; + SPS->expectedDeltaPerPOCCycle += tmp; +#else + tmp = h264_GetVLCElement(parent, pInfo, true); + SPS->offset_for_ref_frame[i]=tmp; + SPS->expectedDeltaPerPOCCycle += tmp; +#endif + } + } + + //// num_ref_frames ---[0,16] + data = h264_GetVLCElement(parent, pInfo, false); + if ( data > 16) + { +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 1; +#endif +#endif + break; + } + SPS->num_ref_frames = (uint8_t)data; + + viddec_pm_get_bits(parent, &code, 1); + SPS->gaps_in_frame_num_value_allowed_flag = (uint8_t)code; + + + SPS->sps_disp.pic_width_in_mbs_minus1 = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.pic_height_in_map_units_minus1 = h264_GetVLCElement(parent, pInfo, false); + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.frame_mbs_only_flag = (uint8_t)code; + + /// err check for size + PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1); + PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1); + FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1); + if ((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128)) + break; + + if (!SPS->sps_disp.frame_mbs_only_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code; + } + + //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1); + //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code; + + viddec_pm_get_bits(parent, &code, 1); + SPS->sps_disp.frame_cropping_flag = (uint8_t)code; + + if (SPS->sps_disp.frame_cropping_flag) + { + SPS->sps_disp.frame_crop_rect_left_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_right_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_top_offset = h264_GetVLCElement(parent, pInfo, false); + SPS->sps_disp.frame_crop_rect_bottom_offset = h264_GetVLCElement(parent, pInfo, false); + } + + //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1 + if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0) { + break; + } + + ////// vui_parameters + if (viddec_pm_get_bits(parent, &code, 1) == -1) + break; + SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code; + ret = H264_STATUS_OK; + + if (SPS->sps_disp.vui_parameters_present_flag) + { +#ifndef VBP + ret = h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); +#else + // Ignore VUI parsing result + h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); + if (SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag) + { + i = SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; + uint32_t bit_rate_value = 0; + bit_rate_value = pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] + 1; + bit_rate_value *= pow(2, 6 + pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale); + SPS->sps_disp.vui_seq_parameters.bit_rate_value = bit_rate_value; + } + /* + else if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag) + { + i = SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; + uint32_t bit_rate_value = 0; + bit_rate_value = pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] + 1; + bit_rate_value *= pow(2, 6 + pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale); + SPS->sps_disp.vui_seq_parameters.bit_rate_value = bit_rate_value; + }*/ + +#endif + } + } while (0); +#ifdef VBP + if (SPS->sps_disp.vui_seq_parameters.bit_rate_value == 0) + { + int maxBR = 0; + switch(SPS->level_idc) + { + case h264_Level1: + maxBR = 64; + break; + + case h264_Level1b: + maxBR = 128; + break; + + case h264_Level11: + maxBR = 192; + break; + + case h264_Level12: + maxBR = 384; + break; + + case h264_Level13: + maxBR = 768; + break; + + case h264_Level2: + maxBR = 2000; + break; + + case h264_Level21: + case h264_Level22: + maxBR = 4000; + break; + + case h264_Level3: + maxBR = 10000; + break; + + case h264_Level31: + maxBR = 14000; + break; + + case h264_Level32: + case h264_Level4: + maxBR = 20000; + break; + + case h264_Level41: + case h264_Level42: + maxBR = 50000; + break; + + case h264_Level5: + maxBR = 135000; + break; + + case h264_Level51: + maxBR = 240000; + break; + } + + uint32_t cpbBrVclFactor = 1200; + if (SPS->profile_idc == 100) + { + cpbBrVclFactor = 1500; // HIGH + } + else if (SPS->profile_idc == 110) + { + cpbBrVclFactor = 3600; // HIGH 10 + } + else if (SPS->profile_idc == 122 || + SPS->profile_idc == 144) + { + cpbBrVclFactor = 4800; // HIGH 4:2:2 and HIGH 4:4:4 + } + + SPS->sps_disp.vui_seq_parameters.bit_rate_value = maxBR * cpbBrVclFactor; + } +#endif + + //h264_Parse_rbsp_trailing_bits(pInfo); + + return ret; +} + +//#endif + diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h new file mode 100755 index 0000000..eac5541 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -0,0 +1,1118 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: MPEG-4 header. +// +*/ + + +#ifndef _H264_H_ +#define _H264_H_ + +#ifdef HOST_ONLY +#include +#include +#include +#endif + +#include "stdint.h" +#include "viddec_debug.h" + +#include "viddec_fw_workload.h" +#include "h264parse_sei.h" + +#ifdef VBP +//#define SW_ERROR_CONCEALEMNT +#endif + +#ifdef WIN32 +#define mfd_printf OS_INFO +#endif + +#ifdef H264_VERBOSE +#define PRINTF(format, args...) OS_INFO("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) +#else +//#define PRINTF(args...) +#endif + +//#pragma warning(disable : 4710) // function not inlined +//#pragma warning(disable : 4514) // unreferenced inline function has been removed CL +//#pragma warning(disable : 4100) // unreferenced formal parameter CL + +#ifdef __cplusplus +extern "C" { +#endif + +#define MAX_INT32_VALUE 0x7fffffff + +#define MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE 256 +#define MAX_CPB_CNT 32 +#define MAX_NUM_SLICE_GRPS 1 //As per Annex A for high profile, the num_slice_groups_minus1 is 0 +#define MAX_PIC_LIST_NUM 8 + +//#define MAX_PIC_SIZE_IN_MAP_UNITS 1024 //0 ???????? Henry +#define MAX_NUM_REF_IDX_L0_ACTIVE 32 +//#define STARTCODE_BUF_SIZE 2048+1024 + +#define NUM_MMCO_OPERATIONS 17 + +// Used to check whether the SEI RP is the only way for recovery (cisco contents) +// This threshold will decide the interval of recovery even no error detected if no IDR during this time +#define SEI_REC_CHECK_TH 8 + +//SPS +#define MAX_NUM_SPS 32 +#define SCL_DEFAULT 1 + +//PPS +#define MAX_PIC_PARAMS 255 +#define MAX_NUM_REF_FRAMES 32 +#define MAX_QP 51 +#define MAX_NUM_PPS 256 + +#define PUT_FS_IDC_BITS(w) (w&0x1F) +#define PUT_LIST_INDEX_FIELD_BIT(w) ((w&0x1)<<5) +#define PUT_LIST_LONG_TERM_BITS(w) ((w&0x1)<<6) +#define PUT_LIST_PTR_LIST_ID_BIT(id) (id<<5) + + +// DPB +#define FRAME_FLAG_DANGLING_TOP_FIELD ( 0x1 << 3 ) +#define FRAME_FLAG_DANGLING_BOTTOM_FIELD ( 0x1 << 4 ) + +#define MPD_DPB_FS_NULL_IDC 31 // May need to be changed if we alter gaps_in_frame_num to use + +#define MFD_H264_MAX_FRAME_BUFFERS 17 +#define NUM_DPB_FRAME_STORES (MFD_H264_MAX_FRAME_BUFFERS + 1) // 1 extra for storign non-existent pictures. + +//Scalling Matrix Type +#define PPS_QM 0 +#define SPS_QM 1 +#define FB_QM 2 +#define DEFAULT_QM 3 + +//Frame Type +#define FRAME_TYPE_IDR 0x00 +#define FRAME_TYPE_I 0x01 +#define FRAME_TYPE_P 0x02 +#define FRAME_TYPE_B 0x03 +#define FRAME_TYPE_INVALID 0x04 + + +#define FRAME_TYPE_FRAME_OFFSET 3 +#define FRAME_TYPE_TOP_OFFSET 3 +#define FRAME_TYPE_BOTTOM_OFFSET 0 +#define FRAME_TYPE_STRUCTRUE_OFFSET 6 + +//// Error handling +#define FIELD_ERR_OFFSET 17 //offset for Field error flag ----refer to the structure definition viddec_fw_workload_error_codes in viddec_fw_common_defs.h + +////Bits Handling +#define h264_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) ) +#define h264_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start)))) + + +//// PIP + typedef enum _pip_setting_t + { + PIP_SCALER_DISABLED, + PIP_SCALE_FACTOR_1_BY_4, + PIP_SCALE_FACTOR_1_BY_2, + PIP_SCALER_INVALID, + + } pip_setting_t; + + +#ifdef VERBOSE +#define DEBUGGETBITS(args...) OS_INFO( args ) +#else +//#define DEBUGGETBITS(args...) +#endif + + /* status codes */ + typedef enum _h264_Status + { + H264_STATUS_EOF = 1, // end of file + H264_STATUS_OK = 0, // no error + H264_STATUS_NO_MEM = 2, // out of memory + H264_STATUS_FILE_ERROR = 3, // file error + H264_STATUS_NOTSUPPORT = 4, // not supported mode + H264_STATUS_PARSE_ERROR = 5, // fail in parse MPEG-4 stream + H264_STATUS_ERROR = 6, // unknown/unspecified error + H264_NAL_ERROR, + H264_SPS_INVALID_PROFILE, + H264_SPS_INVALID_LEVEL, + H264_SPS_INVALID_SEQ_PARAM_ID, + H264_SPS_ERROR, + H264_PPS_INVALID_PIC_ID, + H264_PPS_INVALID_SEQ_ID, + H264_PPS_ERROR, + H264_SliceHeader_INVALID_MB, + H264_SliceHeader_ERROR, + H264_FRAME_DONE, + H264_SLICE_DONE, + H264_STATUS_POLL_ONCE_ERROR, + H264_STATUS_DEC_MEMINIT_ERROR, + H264_STATUS_NAL_UNIT_TYPE_ERROR, + H264_STATUS_SEI_ERROR, + H264_STATUS_SEI_DONE, + } h264_Status; + + + + typedef enum _picture_structure_t + { + TOP_FIELD = 1, + BOTTOM_FIELD = 2, + FRAME = 3, + INVALID = 4 + } picture_structure_t; + +///// Chorma format + + typedef enum _h264_chroma_format_t + { + H264_CHROMA_MONOCHROME, + H264_CHROMA_420, + H264_CHROMA_422, + H264_CHROMA_444, + } h264_chroma_format_t; + + /* H264 start code values */ + typedef enum _h264_nal_unit_type + { + h264_NAL_UNIT_TYPE_unspecified = 0, + h264_NAL_UNIT_TYPE_SLICE, + h264_NAL_UNIT_TYPE_DPA, + h264_NAL_UNIT_TYPE_DPB, + h264_NAL_UNIT_TYPE_DPC, + h264_NAL_UNIT_TYPE_IDR, + h264_NAL_UNIT_TYPE_SEI, + h264_NAL_UNIT_TYPE_SPS, + h264_NAL_UNIT_TYPE_PPS, + h264_NAL_UNIT_TYPE_Acc_unit_delimiter, + h264_NAL_UNIT_TYPE_EOSeq, + h264_NAL_UNIT_TYPE_EOstream, + h264_NAL_UNIT_TYPE_filler_data, + h264_NAL_UNIT_TYPE_SPS_extension, + h264_NAL_UNIT_TYPE_Reserved1 =14, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved2 =15, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved3 =16, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved4 =17, /*14-18*/ + h264_NAL_UNIT_TYPE_Reserved5 =18, /*14-18*/ + h264_NAL_UNIT_TYPE_ACP =19, + h264_NAL_UNIT_TYPE_Reserved6 =20, /*20-23*/ + h264_NAL_UNIT_TYPE_unspecified2 =24, /*24-31*/ + } h264_nal_unit_type; + +#define h264_NAL_PRIORITY_HIGHEST 3 +#define h264_NAL_PRIORITY_HIGH 2 +#define h264_NAL_PRIRITY_LOW 1 +#define h264_NAL_PRIORITY_DISPOSABLE 0 + + + typedef enum _h264_Profile + { + h264_ProfileBaseline = 66, /** Baseline profile */ + h264_ProfileMain = 77, /** Main profile */ + h264_ProfileExtended = 88, /** Extended profile */ + h264_ProfileHigh = 100 , /** High profile */ + h264_ProfileHigh10 = 110, /** High 10 profile */ + h264_ProfileHigh422 = 122, /** High profile 4:2:2 */ + h264_ProfileHigh444 = 144, /** High profile 4:4:4 */ + } h264_Profile; + + + typedef enum _h264_Level + { + h264_Level1b = 9, /** Level 1b */ + h264_Level1 = 10, /** Level 1 */ + h264_Level11 = 11, /** Level 1.1 */ + h264_Level12 = 12, /** Level 1.2 */ + h264_Level13 = 13, /** Level 1.3 */ + h264_Level2 = 20, /** Level 2 */ + h264_Level21 = 21, /** Level 2.1 */ + h264_Level22 = 22, /** Level 2.2 */ + h264_Level3 = 30, /** Level 3 */ + h264_Level31 = 31, /** Level 3.1 */ + h264_Level32 = 32, /** Level 3.2 */ + h264_Level4 = 40, /** Level 4 */ + h264_Level41 = 41, /** Level 4.1 */ + h264_Level42 = 42, /** Level 4.2 */ + h264_Level5 = 50, /** Level 5 */ + h264_Level51 = 51, /** Level 5.1 */ + h264_LevelReserved = 255 /** Unknown profile */ + } h264_Level; + + + typedef enum _h264_video_format + { + h264_Component =0, + h264_PAL, + h264_NTSC, + h264_SECAM, + h264_MAC, + h264_unspecified, + h264_Reserved6, + h264_Reserved7 + } h264_video_format; + + + typedef enum _h264_fcm + { + h264_ProgressiveFrame = 0, + h264_InterlacedFrame = 1, + h264_InterlacedField = 3, + h264_PictureFormatNone + } h264_fcm; + + +///// Define the picture types [] + typedef enum _h264_ptype_t + { + h264_PtypeP = 0, + h264_PtypeB = 1, + h264_PtypeI = 2, + h264_PtypeSP = 3, + h264_PtypeSI = 4, + h264_Ptype_unspecified, + } h264_ptype_t; + + +///// Aspect ratio + typedef enum _h264_aspect_ratio + { + h264_AR_Unspecified = 0, + h264_AR_1_1 = 1, + h264_AR_12_11 = 2, + h264_AR_10_11 = 3, + h264_AR_16_11 = 4, + h264_AR_40_33 = 5, + h264_AR_24_11 = 6, + h264_AR_20_11 = 7, + h264_AR_32_11 = 8, + h264_AR_80_33 = 9, + h264_AR_18_11 = 10, + h264_AR_15_11 = 11, + h264_AR_64_33 = 12, + h264_AR_160_99 = 13, + h264_AR_4_3 = 14, + h264_AR_3_2 = 15, + h264_AR_2_1 = 16, + h264_AR_RESERVED = 17, + h264_AR_Extended_SAR = 255, + } h264_aspect_ratio; + + +////////////////////////////////////////////// + +////////////////////////////////////////////// +// storable_picture + + /* Structure details + If all members remain ints + Size = 11 ints, i.e. 44 bytes + */ + + typedef struct + { + int32_t poc; + int32_t pic_num; + + int32_t long_term_pic_num; + + uint8_t long_term_frame_idx; + uint8_t is_long_term; + uint8_t used_for_reference; + uint8_t pad_flag; // Used to indicate the status + + } storable_picture, *storable_picture_ptr; + +////////////////////////////////////////////// +// frame store + + /* Structure details + If all members remain ints + Size = 46 ints, i.e. 184 bytes + */ + + typedef struct _frame_store + { + storable_picture frame; + storable_picture top_field; + storable_picture bottom_field; + + int32_t frame_num; + + int32_t frame_num_wrap; + + + uint8_t fs_idc; + uint8_t pic_type; //bit7 structure: 1 frame , 0 field; + //bit4,5,6 top field (frame) pic type, 00 IDR 01 I 10 P 11 B 100 INVALID + //bit1,2,3 bottom pic type, 00 IDR 01 I 10 P 11 B 100 INVALID + uint8_t long_term_frame_idx; // No two frame stores may have the same long-term frame index + +#define viddec_h264_get_dec_structure(x) h264_bitfields_extract( (x)->fs_flag_1, 0, 0x03) +#define viddec_h264_set_dec_structure(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 0, 0x03) +#define viddec_h264_get_is_used(x) h264_bitfields_extract( (x)->fs_flag_1, 2, 0x03) +#define viddec_h264_set_is_frame_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x03) +#define viddec_h264_set_is_top_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x01) +#define viddec_h264_set_is_bottom_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 3, 0x01) +#define viddec_h264_get_is_skipped(x) h264_bitfields_extract( (x)->fs_flag_1, 4, 0x03) +#define viddec_h264_set_is_frame_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x03) +#define viddec_h264_set_is_top_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x01) +#define viddec_h264_set_is_bottom_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 5, 0x01) +#define viddec_h264_get_is_long_term(x) h264_bitfields_extract( (x)->fs_flag_1, 6, 0x03) +#define viddec_h264_set_is_frame_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x03) +#define viddec_h264_set_is_top_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x01) +#define viddec_h264_set_is_bottom_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 7, 0x01) + uint8_t fs_flag_1; + + +#define viddec_h264_get_is_non_existent(x) h264_bitfields_extract( (x)->fs_flag_2, 0, 0x01) +#define viddec_h264_set_is_non_existent(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 0, 0x01) +#define viddec_h264_get_is_output(x) h264_bitfields_extract( (x)->fs_flag_2, 1, 0x01) +#define viddec_h264_set_is_output(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 1, 0x01) +#define viddec_h264_get_is_dangling(x) h264_bitfields_extract( (x)->fs_flag_2, 2, 0x01) +#define viddec_h264_set_is_dangling(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 2, 0x01) +#define viddec_h264_get_recovery_pt_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 3, 0x01) +#define viddec_h264_set_recovery_pt_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 3, 0x01) +#define viddec_h264_get_broken_link_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 4, 0x01) +#define viddec_h264_set_broken_link_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 4, 0x01) +#define viddec_h264_get_open_gop_entry(x) h264_bitfields_extract( (x)->fs_flag_2, 5, 0x01) +#define viddec_h264_set_open_gop_entry(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 5, 0x01) +#define viddec_h264_get_first_field_intra(x) h264_bitfields_extract( (x)->fs_flag_2, 6, 0x01) +#define viddec_h264_set_first_field_intra(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 6, 0x01) + uint8_t fs_flag_2; + + uint8_t fs_flag_reserve_1; + uint8_t fs_flag_reserve_2; + uint8_t fs_flag_reserve_3; + + // If non-reference, may have skipped pixel decode + //uint8_t non_ref_skipped; + } frame_store, *frame_param_ptr; + +//! Decoded Picture Buffer + typedef struct _h264_decoded_picture_buffer + { + /// + int32_t last_output_poc; + int32_t max_long_term_pic_idx; + + //// Resolutions + int32_t PicWidthInMbs; + int32_t FrameHeightInMbs; + + frame_store fs[NUM_DPB_FRAME_STORES]; + frame_store* active_fs; + + uint8_t fs_ref_idc[16]; + uint8_t fs_ltref_idc[16]; + + uint8_t fs_dpb_idc[NUM_DPB_FRAME_STORES+2]; + + uint8_t listX_0[33+3]; // [bit5}:field_flag:0 for top, 1 for bottom, [bit4~0]:fs_idc + uint8_t listX_1[33+3]; + + uint8_t listXsize[2]; // 1 to 32 + uint8_t nInitListSize[2]; + + //uint32_t size; + uint8_t fs_dec_idc; + uint8_t fs_non_exist_idc; + uint8_t BumpLevel; + uint8_t used_size; + + uint8_t OutputLevel; + uint8_t OutputLevelValid; + uint8_t OutputCtrl; + uint8_t num_ref_frames; + + uint8_t ref_frames_in_buffer; + uint8_t ltref_frames_in_buffer; + uint8_t SuspendOutput; + uint8_t WaitSeiRecovery; + + + uint8_t frame_numbers_need_to_be_allocated; + uint8_t frame_id_need_to_be_allocated; + + //// frame list to release from dpb, need be displayed + uint8_t frame_numbers_need_to_be_removed; + uint8_t frame_id_need_to_be_removed[17]; + + //// frame list to removed from dpb but not display + uint8_t frame_numbers_need_to_be_dropped; + uint8_t frame_id_need_to_be_dropped[17]; + + //// frame list to display (in display order) + uint8_t frame_numbers_need_to_be_displayed; + uint8_t frame_id_need_to_be_displayed[17]; + + + } h264_DecodedPictureBuffer; + + +////////////////////////////////////////////// +// qm_matrix_set + typedef struct _qm_matrix_set + { +// uint8_t scaling_default_vector; + uint8_t scaling_list[56]; // 0 to 23 for qm 0 to 5 (4x4), 24 to 55 for qm 6 & 7 (8x8) + + } qm_matrix_set, *qm_matrix_set_ptr; + + /* + ///////// Currently not enabled in parser fw/////////////////// + typedef struct _h264_SPS_Extension_RBSP { + int32_t seq_parameter_set_id; //UE + int32_t aux_format_idc; //UE + int32_t bit_depth_aux_minus8; //UE + int32_t alpha_incr_flag; + int32_t alpha_opaque_value; + int32_t alpha_transparent_value; + int32_t additional_extension_flag; + // h264_rbsp_trail_set* rbsp_trail_ptr; + }h264_SPS_Extension_RBSP_t; + */ + + typedef struct _h264_hrd_param_set { + int32_t bit_rate_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 + int32_t cpb_size_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2 + + uint8_t cbr_flag[MAX_CPB_CNT]; // u(1) * 32 + + } h264_hrd_param_set, *h264_hrd_param_set_ptr; + + typedef struct _vui_seq_parameters_t_used + { + uint32_t num_units_in_tick; // u(32) + uint32_t time_scale; // u(32) + + int32_t num_reorder_frames; // ue(v), 0 to max_dec_frame_buffering + int32_t max_dec_frame_buffering; // ue(v), 0 to MaxDpbSize, specified in subclause A.3 + + uint16_t sar_width; // u(16) + uint16_t sar_height; // u(16) + + uint8_t aspect_ratio_info_present_flag; // u(1) + uint8_t aspect_ratio_idc; // u(8) + uint8_t video_signal_type_present_flag; // u(1) + uint8_t video_format; // u(3) +#ifdef VBP + uint8_t video_full_range_flag; // u(1) + uint8_t matrix_coefficients; // u(8) + uint32_t bit_rate_value; +#endif + + uint8_t colour_description_present_flag; // u(1) + uint8_t colour_primaries; // u(8) + uint8_t transfer_characteristics; // u(8) + uint8_t timing_info_present_flag; // u(1) + + uint8_t fixed_frame_rate_flag; // u(1) + uint8_t low_delay_hrd_flag; // u(1) + uint8_t bitstream_restriction_flag; // u(1) + uint8_t pic_struct_present_flag; + + uint8_t nal_hrd_parameters_present_flag; // u(1) + uint8_t nal_hrd_cpb_removal_delay_length_minus1; // u(5) + uint8_t nal_hrd_dpb_output_delay_length_minus1; // u(5) + uint8_t nal_hrd_time_offset_length; // u(5) + + uint8_t nal_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 + uint8_t nal_hrd_initial_cpb_removal_delay_length_minus1; // u(5) + uint8_t vcl_hrd_parameters_present_flag; // u(1) + uint8_t vcl_hrd_cpb_removal_delay_length_minus1; // u(5) + + uint8_t vcl_hrd_dpb_output_delay_length_minus1; // u(5) + uint8_t vcl_hrd_time_offset_length; // u(5) + uint8_t vcl_hrd_cpb_cnt_minus1; // ue(v), 0 to 31 + uint8_t vcl_hrd_initial_cpb_removal_delay_length_minus1; // u(5) + + /////// Here should be kept as 32-bits aligned for next structures + /// 2 structures for NAL&VCL HRD + + + } vui_seq_parameters_t_used; + + + typedef struct _vui_seq_parameters_t_not_used + { + int16_t chroma_sample_loc_type_top_field; // ue(v) + int16_t chroma_sample_loc_type_bottom_field; // ue(v) + + uint8_t overscan_info_present_flag; // u(1) + uint8_t overscan_appropriate_flag; // u(1) + + uint8_t video_full_range_flag; // u(1) + uint8_t matrix_coefficients; // u(8) + + uint8_t chroma_location_info_present_flag; // u(1) + uint8_t max_bytes_per_pic_denom; // ue(v), 0 to 16 + uint8_t max_bits_per_mb_denom; // ue(v), 0 to 16 + uint8_t log2_max_mv_length_vertical; // ue(v), 0 to 16, default to 16 + uint8_t log2_max_mv_length_horizontal; // ue(v), 0 to 16, default to 16 + + uint8_t motion_vectors_over_pic_boundaries_flag; // u(1) + + uint8_t nal_hrd_bit_rate_scale; // u(4) + uint8_t nal_hrd_cpb_size_scale; // u(4) + + uint8_t vcl_hrd_bit_rate_scale; // u(4) + uint8_t vcl_hrd_cpb_size_scale; // u(4) + + h264_hrd_param_set nal_hrd_parameters; + h264_hrd_param_set vcl_hrd_parameters; + + + } vui_seq_parameters_t_not_used, *vui_seq_parameters_t_not_used_ptr; + + +////////////////////////////////////////////// +// picture parameter set + + typedef struct _PPS_PAR + { + //int32_t DOUBLE_ALIGN valid; // indicates the parameter set is valid + + int32_t pic_init_qp_minus26; // se(v), -26 to +25 + int32_t pic_init_qs_minus26; // se(v), -26 to +25 + int32_t chroma_qp_index_offset; // se(v), -12 to +12 + int32_t second_chroma_qp_index_offset; + + uint8_t pic_parameter_set_id; // ue(v), 0 to 255, restricted to 0 to 127 by MPD_CTRL_MAXPPS = 128 + uint8_t seq_parameter_set_id; // ue(v), 0 to 31 + uint8_t entropy_coding_mode_flag; // u(1) + uint8_t pic_order_present_flag; // u(1) + + uint8_t num_slice_groups_minus1; // ue(v), shall be 0 for MP + // Below are not relevant for main profile... + uint8_t slice_group_map_type; // ue(v), 0 to 6 + uint8_t num_ref_idx_l0_active; // ue(v), 0 to 31 + uint8_t num_ref_idx_l1_active; // ue(v), 0 to 31 + + uint8_t weighted_pred_flag; // u(1) + uint8_t weighted_bipred_idc; // u(2) + uint8_t deblocking_filter_control_present_flag; // u(1) + uint8_t constrained_intra_pred_flag; // u(1) + + uint8_t redundant_pic_cnt_present_flag; // u(1) + uint8_t transform_8x8_mode_flag; + uint8_t pic_scaling_matrix_present_flag; + uint8_t pps_status_flag; + + //// Keep here with 32-bits aligned + uint8_t pic_scaling_list_present_flag[MAX_PIC_LIST_NUM]; + + qm_matrix_set pps_qm; + + uint8_t ScalingList4x4[6][16]; + uint8_t ScalingList8x8[2][64]; + uint8_t UseDefaultScalingMatrix4x4Flag[6+2]; + uint8_t UseDefaultScalingMatrix8x8Flag[6+2]; + + } pic_param_set, *pic_param_set_ptr, h264_PicParameterSet_t; + + typedef union _list_reordering_num_t + { + int32_t abs_diff_pic_num_minus1; + int32_t long_term_pic_num; + } list_reordering_num_t; + + typedef struct _h264_Ref_Pic_List_Reordering ////size = 8*33+ 1 + 33 + { + list_reordering_num_t list_reordering_num[MAX_NUM_REF_FRAMES+1]; + + uint8_t ref_pic_list_reordering_flag; + uint8_t reordering_of_pic_nums_idc[MAX_NUM_REF_FRAMES+1]; //UE + + } h264_Ref_Pic_List_Reordering_t; + + typedef enum _H264_DANGLING_TYPE + { + DANGLING_TYPE_LAST_FIELD, + DANGLING_TYPE_DPB_RESET, + DANGLING_TYPE_FIELD, + DANGLING_TYPE_FRAME, + DANGLING_TYPE_GAP_IN_FRAME + + } H264_DANGLING_TYPE; + + + typedef struct _h264_Dec_Ref_Pic_Marking //size = 17*4*2 + 17*3 + 4 + 1 + { + int32_t difference_of_pic_num_minus1[NUM_MMCO_OPERATIONS]; + int32_t long_term_pic_num[NUM_MMCO_OPERATIONS]; + + /// MMCO + uint8_t memory_management_control_operation[NUM_MMCO_OPERATIONS]; + uint8_t max_long_term_frame_idx_plus1[NUM_MMCO_OPERATIONS]; + uint8_t long_term_frame_idx[NUM_MMCO_OPERATIONS]; + uint8_t long_term_reference_flag; + + uint8_t adaptive_ref_pic_marking_mode_flag; + uint8_t dec_ref_pic_marking_count; + uint8_t no_output_of_prior_pics_flag; + + uint8_t pad; + } h264_Dec_Ref_Pic_Marking_t; + + + + typedef struct old_slice_par + { + int32_t frame_num; + int32_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt_bottom; + int32_t delta_pic_order_cnt[2]; + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t nal_ref_idc; + uint8_t structure; + + uint8_t idr_flag; + uint8_t idr_pic_id; + uint8_t pic_parameter_id; + uint8_t status; + } OldSliceParams; + +#ifdef VBP + typedef struct _h264__pred_weight_table + { + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; + uint8_t luma_weight_l0_flag; + int16_t luma_weight_l0[32]; + int8_t luma_offset_l0[32]; + uint8_t chroma_weight_l0_flag; + int16_t chroma_weight_l0[32][2]; + int8_t chroma_offset_l0[32][2]; + + uint8_t luma_weight_l1_flag; + int16_t luma_weight_l1[32]; + int8_t luma_offset_l1[32]; + uint8_t chroma_weight_l1_flag; + int16_t chroma_weight_l1[32][2]; + int8_t chroma_offset_l1[32][2]; + } h264_pred_weight_table; +#endif + + typedef struct _h264_Slice_Header + { + int32_t first_mb_in_slice; //UE + int32_t frame_num; //UV + int32_t pic_order_cnt_lsb; //UV + int32_t delta_pic_order_cnt_bottom; //SE + int32_t delta_pic_order_cnt[2]; //SE + int32_t redundant_pic_cnt; //UE + + uint32_t num_ref_idx_l0_active; //UE + uint32_t num_ref_idx_l1_active; //UE + + int32_t slice_qp_delta; //SE + int32_t slice_qs_delta; //SE + int32_t slice_alpha_c0_offset_div2; //SE + int32_t slice_beta_offset_div2; //SE + int32_t slice_group_change_cycle; //UV + +#ifdef VBP + h264_pred_weight_table sh_predwttbl; +#endif + + ///// Flags or IDs + //h264_ptype_t slice_type; //UE + uint8_t slice_type; + uint8_t nal_ref_idc; + uint8_t structure; + uint8_t pic_parameter_id; //UE + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t idr_flag; //UE + uint8_t idr_pic_id; //UE + + uint8_t sh_error; + uint8_t cabac_init_idc; //UE + uint8_t sp_for_switch_flag; + uint8_t disable_deblocking_filter_idc; //UE + + uint8_t direct_spatial_mv_pred_flag; + uint8_t num_ref_idx_active_override_flag; + int16_t current_slice_nr; + + //// For Ref list reordering + h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; + h264_Ref_Pic_List_Reordering_t sh_refpic_l0; + h264_Ref_Pic_List_Reordering_t sh_refpic_l1; + + } h264_Slice_Header_t; + + +#define MAX_USER_DATA_SIZE 1024 + typedef struct _h264_user_data_t + { + h264_sei_payloadtype user_data_type; + + int32_t user_data_id; + int32_t dsn; + int32_t user_data_size; + int32_t user_data[MAX_USER_DATA_SIZE>>2]; + } h264_user_data_t; + +// SPS DISPLAY parameters: seq_param_set_disp, *seq_param_set_disp_ptr; + typedef struct _SPS_DISP + { + ///// VUI info + vui_seq_parameters_t_used vui_seq_parameters; //size = + + ///// Resolution + int16_t pic_width_in_mbs_minus1; + int16_t pic_height_in_map_units_minus1; + + ///// Cropping + int16_t frame_crop_rect_left_offset; + int16_t frame_crop_rect_right_offset; + + int16_t frame_crop_rect_top_offset; + int16_t frame_crop_rect_bottom_offset; + + ///// Flags + uint8_t frame_mbs_only_flag; + uint8_t mb_adaptive_frame_field_flag; + uint8_t direct_8x8_inference_flag; + uint8_t frame_cropping_flag; +#ifdef VBP + uint8_t separate_colour_plane_flag; +#endif + + uint16_t vui_parameters_present_flag; + uint16_t chroma_format_idc; + } seq_param_set_disp, *seq_param_set_disp_ptr; + + +////SPS: seq_param_set, *seq_param_set_ptr; + + typedef struct _SPS_PAR_USED + { + uint32_t is_updated; + + /////////// Required for display section ////////////////////////// + seq_param_set_disp sps_disp; + + int32_t expectedDeltaPerPOCCycle; + int32_t offset_for_non_ref_pic; // se(v), -2^31 to (2^31)-1, 32-bit integer + int32_t offset_for_top_to_bottom_field; // se(v), -2^31 to (2^31)-1, 32-bit integer + + /////////// IDC + uint8_t profile_idc; // u(8), 0x77 for MP + uint8_t constraint_set_flags; // bit 0 to 3 for set0 to set3 + uint8_t level_idc; // u(8) + uint8_t seq_parameter_set_id; // ue(v), 0 to 31 + + + uint8_t pic_order_cnt_type; // ue(v), 0 to 2 + uint8_t log2_max_frame_num_minus4; // ue(v), 0 to 12 + uint8_t log2_max_pic_order_cnt_lsb_minus4; // ue(v), 0 to 12 + uint8_t num_ref_frames_in_pic_order_cnt_cycle; // ue(v), 0 to 255 + + //int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; // se(v), -2^31 to (2^31)-1, 32-bit integer + uint8_t num_ref_frames; // ue(v), 0 to 16, + uint8_t gaps_in_frame_num_value_allowed_flag; // u(1) + // This is my addition, we should calculate this once and leave it with the sps + // as opposed to calculating it each time in h264_hdr_decoding_POC() + + uint8_t delta_pic_order_always_zero_flag; // u(1) + uint8_t residual_colour_transform_flag; + + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t lossless_qpprime_y_zero_flag; + uint8_t seq_scaling_matrix_present_flag; + + uint8_t seq_scaling_list_present_flag[MAX_PIC_LIST_NUM]; //0-7 + + //// Combine the scaling matrix to word ( 24 + 32) + uint8_t ScalingList4x4[6][16]; + uint8_t ScalingList8x8[2][64]; + uint8_t UseDefaultScalingMatrix4x4Flag[6]; + uint8_t UseDefaultScalingMatrix8x8Flag[6]; + + } seq_param_set_used, *seq_param_set_used_ptr; + + + typedef struct _SPS_PAR_ALL + { + + seq_param_set_used sps_par_used; + vui_seq_parameters_t_not_used sps_vui_par_not_used; + + } seq_param_set_all, *seq_param_set_all_ptr; + + +///// Image control parameter//////////// + typedef struct _h264_img_par + { + int32_t frame_num; // decoding num of current frame + int32_t frame_count; // count of decoded frames + int32_t current_slice_num; + int32_t gaps_in_frame_num; + + // POC decoding + int32_t num_ref_frames_in_pic_order_cnt_cycle; + int32_t delta_pic_order_always_zero_flag; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + + int32_t pic_order_cnt_lsb; + int32_t pic_order_cnt_msb; + int32_t delta_pic_order_cnt_bottom; + int32_t delta_pic_order_cnt[2]; + + int32_t PicOrderCntMsb; + int32_t CurrPicOrderCntMsb; + int32_t PrevPicOrderCntLsb; + + int32_t FrameNumOffset; + + int32_t PreviousFrameNum; + int32_t PreviousFrameNumOffset; + + int32_t toppoc; + int32_t bottompoc; + int32_t framepoc; + int32_t ThisPOC; + + //int32_t sei_freeze_this_image; + + ///////////////////// Resolutions + int32_t PicWidthInMbs; + int32_t FrameHeightInMbs; + + ///////////////////// MMCO + uint8_t last_has_mmco_5; + uint8_t curr_has_mmco_5; + + /////////////////// Flags + uint8_t g_new_frame; + uint8_t g_new_pic; + + uint8_t structure; + uint8_t second_field; // Set to one if this is the second field of a set of paired fields... + uint8_t field_pic_flag; + uint8_t last_pic_bottom_field; + + uint8_t bottom_field_flag; + uint8_t MbaffFrameFlag; + uint8_t no_output_of_prior_pics_flag; + uint8_t long_term_reference_flag; + + uint8_t skip_this_pic; + uint8_t pic_order_cnt_type; + // Recovery + uint8_t recovery_point_found; + uint8_t used_for_reference; + } h264_img_par; + + + typedef struct _h264_slice_reg_data + { + uint32_t h264_bsd_slice_p1; // 0x150 + //uint32_t h264_mpr_list0[8]; // from 0x380 to 0x3BC + uint32_t h264_bsd_slice_p2; // 0x154 + uint32_t h264_bsd_slice_start; // 0x158 + + } h264_slice_data; + + + typedef struct _h264_pic_data + { + uint32_t h264_dpb_init; // 0x40 + //info For current pic + uint32_t h264_cur_bsd_img_init; // 0x140 + uint32_t h264_cur_mpr_tf_poc; // 0x300 + uint32_t h264_cur_mpr_bf_poc; // 0x304 + + //info For framess in DPB + //uint32_t h264_dpb_bsd_img_init[16]; //0x140 + //uint32_t h264_dpb_mpr_tf_poc[16]; // 0x300 + //uint32_t h264_dpb_mpr_bf_poc[16]; // 0x304 + } h264_pic_data; + + enum h264_workload_item_type + { + VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_H264_PIC_REG, + VIDDEC_WORKLOAD_H264_DPB_FRAME_POC, + VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET, + VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET, + VIDDEC_WORKLOAD_H264_PWT_ES_BYTES, + VIDDEC_WORKLOAD_H264_SCALING_MATRIX, + VIDDEC_WORKLOAD_H264_DEBUG + }; + + + +//////////////////////////////////////////// + /* Full Info set*/ +//////////////////////////////////////////// + typedef struct _h264_Info + { + + h264_DecodedPictureBuffer dpb; + + //// Structures + //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address + seq_param_set_used active_SPS; + pic_param_set active_PPS; + + + h264_Slice_Header_t SliceHeader; + OldSliceParams old_slice; + sei_info sei_information; + + h264_img_par img; + + uint32_t SPS_PADDR_GL; + uint32_t PPS_PADDR_GL; + uint32_t OFFSET_REF_FRAME_PADDR_GL; + uint32_t TMP_OFFSET_REFFRM_PADDR_GL; + + uint32_t h264_list_replacement; + + uint32_t h264_pwt_start_byte_offset; + uint32_t h264_pwt_start_bit_offset; + uint32_t h264_pwt_end_byte_offset; + uint32_t h264_pwt_end_bit_offset; + uint32_t h264_pwt_enabled; + + uint32_t sps_valid; + + uint8_t slice_ref_list0[32]; + uint8_t slice_ref_list1[32]; + + + uint8_t qm_present_list; + //h264_NAL_Unit_t + uint8_t nal_unit_type; + uint8_t old_nal_unit_type; + uint8_t got_start; + + //workload + uint8_t push_to_cur; + uint8_t Is_first_frame_in_stream; + uint8_t Is_SPS_updated; + uint8_t number_of_first_au_info_nal_before_first_slice; + + uint8_t is_frame_boundary_detected_by_non_slice_nal; + uint8_t is_frame_boundary_detected_by_slice_nal; + uint8_t is_current_workload_done; + uint8_t primary_pic_type_plus_one; //AUD---[0,7] + + //Error handling + uint8_t sei_rp_received; + uint8_t last_I_frame_idc; + uint8_t sei_b_state_ready; + uint8_t gop_err_flag; + + + uint32_t wl_err_curr; + uint32_t wl_err_next; +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + uint32_t sw_bail; +#endif +#endif + } h264_Info; + + + + struct h264_viddec_parser + { + uint32_t sps_pps_ddr_paddr; + h264_Info info; + }; + +#ifdef __cplusplus +} +#endif + +#ifdef USE_AVC_SHORT_FORMAT +#define MAX_OP 16 + +typedef struct _dec_ref_pic_marking_t { + union { + uint8_t flags; + struct { + uint8_t idr_pic_flag:1; + uint8_t no_output_of_prior_pics_flag:1; + uint8_t long_term_reference_flag:1; + uint8_t adaptive_ref_pic_marking_mode_flag:1; + }; + }; + struct { + uint8_t memory_management_control_operation; + union { + struct { + uint8_t difference_of_pic_nums_minus1; + } op1; + struct { + uint8_t long_term_pic_num; + } op2; + struct { + uint8_t difference_of_pic_nums_minus1; + uint8_t long_term_frame_idx; + } op3; + struct { + uint8_t max_long_term_frame_idx_plus1; + } op4; + struct { + uint8_t long_term_frame_idx; + } op6; + }; + } op[MAX_OP]; +} dec_ref_pic_marking_t; + + +typedef struct _slice_header_t { + uint8_t nal_unit_type; + uint8_t pps_id; + uint8_t padding; + union { + uint8_t flags; + struct { + uint8_t field_pic_flag:1; + uint8_t bottom_field_flag:1; + }; + }; + uint32_t first_mb_in_slice; + uint32_t frame_num; + uint16_t idr_pic_id; + uint16_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt[2]; + int32_t delta_pic_order_cnt_bottom; +} slice_header_t; + + + + +typedef struct _vbp_h264_sliceheader { + slice_header_t slice_header; + dec_ref_pic_marking_t ref_pic_marking; +} vbp_h264_sliceheader; + +#endif + + + +#endif //_H264_H_ + + diff --git a/mixvbp/vbp_plugin/h264/include/h264parse.h b/mixvbp/vbp_plugin/h264/include/h264parse.h new file mode 100755 index 0000000..2e7b817 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/include/h264parse.h @@ -0,0 +1,179 @@ +#ifndef __H264PARSE_H_ +#define __H264PARSE_H_ + +#include "h264.h" + +#ifndef MFD_FIRMWARE +#define true 1 +#define false 0 +#endif + +//////////////////////////////////////////////////////////////////// +// The following part is only for Parser Debug +/////////////////////////////////////////////////////////////////// + +#ifdef __cplusplus +extern "C" { +#endif + + + enum h264_debug_point_id + { + WARNING_H264_GENERAL = 0xff000000, + WARNING_H264_DPB, + WARNING_H264_REFLIST, + WARNING_H264_SPS, + WARNING_H264_PPS, + WARNING_H264_SEI, + WARNING_H264_VCL, + + ERROR_H264_GENERAL = 0xffff0000, + ERROR_H264_DPB, + ERROR_H264_REFLIST, + ERROR_H264_SPS, + ERROR_H264_PPS, + ERROR_H264_SEI, + ERROR_H264_VCL + }; + + static inline void MFD_PARSER_DEBUG(int debug_point_id) + { +#ifdef H264_MFD_DEBUG + + int p1,p2,p3,p4,p5,p6; + + p1 = 0x0BAD; + p2 = 0xC0DE; + p3 = debug_point_id; + p4=p5=p6 = 0; + + DEBUG_WRITE(p1,p2,p3,p4,p5,p6); +#endif + + debug_point_id = debug_point_id; + + return; + } + + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// Init functions +//////////////////////////////////////////////////////////////////// + extern void h264_init_old_slice(h264_Info* pInfo); + extern void h264_init_img(h264_Info* pInfo); + extern void h264_init_Info(h264_Info* pInfo); + extern void h264_init_Info_under_sps_pps_level(h264_Info* pInfo); + extern void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem); + + extern void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader); + extern void h264_sei_stream_initialise (h264_Info* pInfo); + extern void h264_update_img_info(h264_Info * pInfo ); + extern void h264_update_frame_type(h264_Info * pInfo ); + + extern int32_t h264_check_previous_frame_end(h264_Info * pInfo); + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// bsd functions +//////////////////////////////////////////////////////////////////// + extern uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo); +////// VLE and bit operation + extern uint32_t h264_get_codeNum(void *parent,h264_Info* pInfo); + extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSigned); + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// parse functions +//////////////////////////////////////////////////////////////////// + +//NAL + extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc); + +////// Slice header + extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_1(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + + +////// SPS + extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame); +//extern h264_Status h264_Parse_SeqParameterSet_Extension(void *parent, h264_Info * pInfo); + extern h264_Status h264_Parse_PicParameterSet(void *parent, h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet); + +////// SEI functions + h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent,h264_Info* pInfo); + h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize); + +////// + extern h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo); + extern h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + + + +///// Mem functions + extern void* h264_memset( void* buf, uint32_t c, uint32_t num ); + extern void* h264_memcpy( void* dest, void* src, uint32_t num ); + + extern void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); + extern void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId); + + extern void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); + extern void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId); + + extern void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); + extern void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId); + extern uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); + extern void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId); + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// workload functions +//////////////////////////////////////////////////////////////////// + + extern void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ); + + extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ); + + extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ); + extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo ); +#ifdef __cplusplus +} +#endif + +//////////////////////////////////////////////////////////////////// +///////////////////////////// utils functions +//////////////////////////////////////////////////////////////////// +extern int32_t h264_is_new_picture_start(h264_Info* pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice); +extern int32_t h264_is_second_field(h264_Info * pInfo); +///// Math functions +uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod); +uint32_t mult_u(uint32_t var1, uint32_t var2); + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// utils functions outside h264 +//////////////////////////////////////////////////////////////////// + +extern void *memset(void *s, int32_t c, uint32_t n); +extern void *memcpy(void *dest, const void *src, uint32_t n); +extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); +extern int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits); +extern int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); + + + +//////////////////////////////////////////////////////////////////// +///////////////////////////// Second level parse functions +//////////////////////////////////////////////////////////////////// + +#endif ////__H264PARSE_H_ + + + diff --git a/mixvbp/vbp_plugin/h264/include/h264parse_dpb.h b/mixvbp/vbp_plugin/h264/include/h264parse_dpb.h new file mode 100755 index 0000000..f7935a4 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/include/h264parse_dpb.h @@ -0,0 +1,109 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: MPEG-4 header. +// +*/ + + +#ifndef _H264_DPB_CTL_H_ +#define _H264_DPB_CTL_H_ + + +#include "h264.h" + +#ifdef __cplusplus +extern "C" { +#endif + +//////////////////////////////////////////////////////////////////// +///////////////////////////// Parser control functions +//////////////////////////////////////////////////////////////////// + +///// Reference list + extern void h264_dpb_update_ref_lists(h264_Info * pInfo); + extern void h264_dpb_reorder_lists(h264_Info * pInfo); + + extern void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting); + +///// POC + extern void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num); + extern void h264_hdr_post_poc(h264_Info* pInfo,int32_t NonExisting, int32_t frame_num, int32_t use_old); + +///// DPB buffer mangement + extern void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb); + + extern void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); + extern void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); + extern void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx); + extern void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity); + extern void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX); + extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc); + + extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo); + extern void h264_dpb_is_used_for_reference(h264_DecodedPictureBuffer * p_dpb, int32_t * flag); + + + extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index); + extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); + + extern void h264_dpb_idr_memory_management (h264_Info * pInfo, + seq_param_set_used_ptr active_sps, + int32_t no_output_of_prior_pics_flag); + + extern void h264_dpb_init_frame_store(h264_Info * pInfo); + extern void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, + int32_t SizeChange, int32_t no_output_of_prior_pics_flag); + + extern void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo); + + extern int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting); + + extern void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos); + extern void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag); + + extern void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, + int32_t NonExisting, + int32_t num_ref_frames); + extern int32_t h264_dpb_queue_update(h264_Info * pInfo, + int32_t push, + int32_t direct, + int32_t frame_request, + int32_t num_ref_frames); + + extern void h264_dpb_split_field (h264_DecodedPictureBuffer *p_dpb, h264_Info * pInfo); + extern void h264_dpb_combine_field(h264_DecodedPictureBuffer *p_dpb, int32_t use_old); + + extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo, + int32_t used_for_reference, + int32_t add2dpb, + int32_t NonExisting, + int32_t use_old); + + extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, + int32_t NonExisting, + int32_t use_old); + + extern void h264_dpb_adaptive_memory_management (h264_Info * pInfo); + + extern int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo, + int32_t direct, int32_t request, int32_t num_ref_frames); + + extern void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx); + extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing); + +//////////////////////////////////////////////////////////// Globals + +#ifdef __cplusplus +} +#endif + + +#endif //_H264_DPB_CTL_H_ + + diff --git a/mixvbp/vbp_plugin/h264/include/h264parse_sei.h b/mixvbp/vbp_plugin/h264/include/h264parse_sei.h new file mode 100755 index 0000000..f0a591d --- /dev/null +++ b/mixvbp/vbp_plugin/h264/include/h264parse_sei.h @@ -0,0 +1,314 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: MPEG-4 header. +// +*/ + + +#ifndef _H264_SEI_H_ +#define _H264_SEI_H_ + +#include "h264.h" + + +//defines for SEI +#define MAX_CPB_CNT 32 +#define MAX_NUM_CLOCK_TS 3 +#define MAX_PAN_SCAN_CNT 3 +#define MAX_NUM_SPARE_PICS 16 +#define MAX_SUB_SEQ_LAYERS 256 +#define MAX_SLICE_GRPS 1 // for high profile +#define NUM_SPS 32 +#define MAX_NUM_REF_SUBSEQS 256 + + +#define SEI_SCAN_FORMAT_INTERLACED 0x1 +#define SEI_SCAN_FORMAT_PROGRESSIVE 0x3 +#define SEI_SCAN_FORMAT_VALID(r) (r&0x1) +#define SEI_SCAN_FORMAT(r) ((r&0x2)>>1) + +typedef enum +{ + SEI_BUF_PERIOD = 0, + SEI_PIC_TIMING, + SEI_PAN_SCAN, + SEI_FILLER_PAYLOAD, + SEI_REG_USERDATA, + SEI_UNREG_USERDATA, + SEI_RECOVERY_POINT, + SEI_DEC_REF_PIC_MARKING_REP, + SEI_SPARE_PIC, + SEI_SCENE_INFO, + SEI_SUB_SEQ_INFO, + SEI_SUB_SEQ_LAYER, + SEI_SUB_SEQ, + SEI_FULL_FRAME_FREEZE, + SEI_FULL_FRAME_FREEZE_RELEASE, + SEI_FULL_FRAME_SNAPSHOT, + SEI_PROGRESSIVE_SEGMENT_START, + SEI_PROGRESSIVE_SEGMENT_END, + SEI_MOTION_CONSTRAINED_SLICE_GRP_SET, + SEI_FILM_GRAIN_CHARACTERISTICS, + SEI_DEBLK_FILTER_DISPLAY_PREFERENCE, + SEI_STEREO_VIDEO_INFO, + SEI_RESERVED, +} h264_sei_payloadtype; + + + +typedef struct _h264_SEI_buffering_period +{ + int32_t seq_param_set_id; + int32_t initial_cpb_removal_delay_nal; + int32_t initial_cpb_removal_delay_offset_nal; + int32_t initial_cpb_removal_delay_vcl; + int32_t initial_cpb_removal_delay_offset_vcl; + +} h264_SEI_buffering_period_t; + +typedef struct _h264_SEI_pic_timing +{ + int32_t cpb_removal_delay; + int32_t dpb_output_delay; + int32_t pic_struct; +} h264_SEI_pic_timing_t; + +#if 0 +int32_t clock_timestamp_flag[MAX_NUM_CLOCK_TS]; +int32_t ct_type[MAX_NUM_CLOCK_TS]; +int32_t nuit_field_based_flag[MAX_NUM_CLOCK_TS]; +int32_t counting_type[MAX_NUM_CLOCK_TS]; +int32_t full_timestamp_flag[MAX_NUM_CLOCK_TS]; +int32_t discontinuity_flag[MAX_NUM_CLOCK_TS]; +int32_t cnt_dropped_flag[MAX_NUM_CLOCK_TS]; +int32_t n_frames[MAX_NUM_CLOCK_TS]; +int32_t seconds_value[MAX_NUM_CLOCK_TS]; +int32_t minutes_value[MAX_NUM_CLOCK_TS]; +int32_t hours_value[MAX_NUM_CLOCK_TS]; +int32_t seconds_flag[MAX_NUM_CLOCK_TS]; +int32_t minutes_flag[MAX_NUM_CLOCK_TS]; +int32_t hours_flag[MAX_NUM_CLOCK_TS]; +int32_t time_offset[MAX_NUM_CLOCK_TS]; + +#endif + +typedef struct _h264_SEI_pan_scan_rectangle +{ + int32_t pan_scan_rect_id; + int32_t pan_scan_rect_cancel_flag; + int32_t pan_scan_cnt_minus1; + int32_t pan_scan_rect_left_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_right_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_top_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_bottom_offset[MAX_PAN_SCAN_CNT]; + int32_t pan_scan_rect_repetition_period; +} h264_SEI_pan_scan_rectangle_t; + +typedef struct _h264_SEI_filler_payload +{ + int32_t ff_byte; +} h264_SEI_filler_payload_t; + +typedef struct _h264_SEI_userdata_registered +{ + int32_t itu_t_t35_country_code; + int32_t itu_t_t35_country_code_extension_byte; + int32_t itu_t_t35_payload_byte; +} h264_SEI_userdata_registered_t; + +typedef struct _h264_SEI_userdata_unregistered +{ + int32_t uuid_iso_iec_11578[4]; + int32_t user_data_payload_byte; +} h264_SEI_userdata_unregistered_t; + +typedef struct _h264_SEI_recovery_point +{ + int32_t recovery_frame_cnt; + int32_t exact_match_flag; + int32_t broken_link_flag; + int32_t changing_slice_group_idc; +} h264_SEI_recovery_point_t; + +typedef struct _h264_SEI_decoded_ref_pic_marking_repetition +{ + int32_t original_idr_flag; + int32_t original_frame_num; + int32_t orignal_field_pic_flag; + int32_t original_bottom_field_pic_flag; + int32_t no_output_of_prior_pics_flag; + int32_t long_term_reference_flag; + int32_t adaptive_ref_pic_marking_mode_flag; + int32_t memory_management_control_operation; //UE + int32_t difference_of_pics_num_minus1; //UE + int32_t long_term_pic_num; //UE + int32_t long_term_frame_idx; //UE + int32_t max_long_term_frame_idx_plus1; //UE +} h264_SEI_decoded_ref_pic_marking_repetition_t; + +typedef struct _h264_SEI_spare_picture +{ + int32_t target_frame_num; + int32_t spare_field_flag; + int32_t target_bottom_field_flag; + int32_t num_spare_pics_minus1; + int32_t delta_spare_frame_num[MAX_NUM_SPARE_PICS]; + int32_t spare_bottom_field_flag[MAX_NUM_SPARE_PICS]; + int32_t spare_area_idc[MAX_NUM_SPARE_PICS]; // not complete +} h264_SEI_spare_picture_t; + +typedef struct _h264_SEI_scene_info +{ + int32_t scene_info_present_flag; + int32_t scene_id; + int32_t scene_transitioning_type; + int32_t second_scene_id; +} h264_SEI_scene_info_t; + +typedef struct _h264_SEI_sub_sequence_info +{ + int32_t sub_seq_layer_num; + int32_t sub_seq_id; + int32_t first_ref_pic_flag; + int32_t leading_non_ref_pic_flag; + int32_t last_pic_flag; + int32_t sub_seq_frame_num_flag; + int32_t sub_seq_frame_num; +} h264_SEI_sub_sequence_info_t; + +typedef struct _h264_SEI_sub_sequence_layer +{ + int32_t num_sub_seq_layers_minus1; + int32_t accurate_statistics_flag[MAX_SUB_SEQ_LAYERS]; + int32_t average_bit_rate[MAX_SUB_SEQ_LAYERS]; + int32_t average_frame_rate[MAX_SUB_SEQ_LAYERS]; +} h264_SEI_sub_sequence_layer_t; + +typedef struct _h264_SEI_sub_sequence +{ + int32_t sub_seq_layer_num; + int32_t sub_seq_id; + int32_t duration_flag; + int32_t sub_seq_duration; + int32_t average_rate_flag; + int32_t average_statistics_flag; + int32_t average_bit_rate; + int32_t average_frame_rate; + int32_t num_referenced_subseqs; + int32_t ref_sub_seq_layer_num; + int32_t ref_sub_seq_id; + int32_t ref_sub_seq_direction; +} h264_SEI_sub_sequence_t; + +typedef struct _h264_SEI_full_frame_freeze +{ + int32_t full_frame_freeze_repetition_period; +} h264_SEI_full_frame_freeze_t; + +typedef struct _h264_SEI_full_frame_snapshot +{ + int32_t snapshot_id; +} h264_SEI_full_frame_snapshot_t; + +typedef struct _h264_SEI_progressive_segment_start +{ + int32_t progressive_refinement_id; + int32_t num_refinement_steps_minus1; +} h264_SEI_progressive_segment_start_t; + +typedef struct _h264_SEI_progressive_segment_end +{ + int32_t progressive_refinement_id; +} h264_SEI_progressive_segment_end_t; + +typedef struct _h264_SEI_motion_constrained_slice_group +{ + int32_t num_slice_groups_in_set_minus1; + int32_t slice_group_id[MAX_SLICE_GRPS]; + int32_t exact_sample_value_match_flag; + int32_t pan_scan_rect_flag; + int32_t pan_scan_rect_id; +} h264_SEI_motion_constrained_slice_group_t; + +typedef struct _h264_SEI_deblocking_filter_display_pref +{ + int32_t devlocking_display_preference_cancel_flag; + int32_t display_prior_to_deblocking_preferred_flag; + int32_t dec_frame_buffering_constraint_flag; + int32_t deblocking_display_preference_repetition_period; +} h264_SEI_deblocking_filter_display_pref_t; + +typedef struct _h264_SEI_stereo_video_info +{ + int32_t field_views_flag; + int32_t top_field_is_left_view_flag; + int32_t curent_frame_is_left_view_flag; + int32_t next_frame_is_second_view_flag; + int32_t left_view_self_contained_flag; + int32_t right_view_self_contained_flag; +} h264_SEI_stereo_video_info_t; + +typedef struct _h264_SEI_reserved +{ + int32_t reserved_sei_message_payload_byte; +} h264_SEI_reserved_t; + + +//////////////////////////// +// SEI Info +///////////////////////////// + +typedef struct sei_info +{ + int32_t recovery_point; + int32_t recovery_frame_num; + + int32_t capture_POC; + int32_t freeze_POC; + int32_t release_POC; // The POC which when reached will allow display update to re-commence + int32_t disp_frozen; // Indicates display is currently frozen + int32_t freeze_rep_period; + int32_t recovery_frame_cnt; + int32_t capture_fn; + int32_t recovery_fn; + int32_t broken_link; + int32_t scan_format; + int32_t broken_link_pic; +} sei_info, *sei_info_ptr; + +/*typedef struct _h264_SEI +{ + h264_SEI_buffering_period_t buf_period; + h264_SEI_pic_timing_t pic_timing; + h264_SEI_pan_scan_rectangle_t pan_scan_timing; + h264_SEI_filler_payload_t filler_payload; + h264_SEI_userdata_registered_t userdata_reg; + h264_SEI_userdata_unregistered_t userdata_unreg; + h264_SEI_recovery_point_t recovery_point; + h264_SEI_decoded_ref_pic_marking_repetition_t dec_ref_pic_marking_rep; + h264_SEI_spare_picture_t spare_pic; + h264_SEI_scene_info_t scene_info; + h264_SEI_sub_sequence_info_t sub_sequence_info; + h264_SEI_sub_sequence_layer_t sub_sequence_layer; + h264_SEI_sub_sequence_t sub_sequence; + h264_SEI_full_frame_snapshot_t full_frame_snapshot; + h264_SEI_full_frame_t full_frame; + h264_SEI_progressive_segment_start_t progressive_segment_start; + h264_SEI_progressive_segment_end_t progressive_segment_end; + h264_SEI_motion_constrained_slice_group_t motion_constrained_slice_grp; + h264_SEI_deblocking_filter_display_pref_t deblk_filter_display_pref; + h264_SEI_stereo_video_info_t stereo_video_info; + h264_SEI_reserved_t reserved; +}h264_SEI_t; +*/ + + +#endif //_H264_SEI_H_ + + diff --git a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c new file mode 100755 index 0000000..b564d8b --- /dev/null +++ b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c @@ -0,0 +1,574 @@ +#include "viddec_parser_ops.h" +#include "h264.h" +#include "h264parse.h" +#include "viddec_fw_item_types.h" +#include "h264parse_dpb.h" + + +extern void* h264_memcpy( void* dest, void* src, uint32_t num ); + +uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) +{ + if (swap != 0) + { + //g_warning("swap copying is not implemented."); + } + + if (to_ddr) + { + memcpy((void*)ddr_addr, (void*)local_addr, size); + } + else + { + memcpy((void*)local_addr, (void*)ddr_addr, size); + } + + return (0); +} + +#if 0 +void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) +{ + + if (pInfo->Is_first_frame_in_stream) //new stream, fill new frame in cur + { + + pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->push_to_cur = 1; + + } + else // move to next for new frame + { + pInfo->push_to_cur = 0; + } + + + + //fill dpb managemnt info + + + + + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + pInfo->dpb.frame_numbers_need_to_be_removed =0; + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + +} + +void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) +{ + //// + //// Now we can flush out all frames in DPB fro display + if (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + pInfo->dpb.frame_numbers_need_to_be_removed =0; + +} + +void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) +{ + pInfo->qm_present_list=0; +} + +void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) +{ +#if 1 + uint32_t i, nitems=0; + + + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for (i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } + } + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for (i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } + } + } + + } + else + { + nitems =0; + } +#endif +} +#else + + +void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + h264_slice_data slice_data = {}; + + uint32_t i=0, nitems=0, data=0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + + ////////////////////// Update Reference list ////////////////// + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for (i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } + } + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for (i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } + } + } + + } + else + { + nitems =0; + } + /////file ref list 0 + // h264_parse_emit_ref_list(parent, pInfo, 0); + + /////file ref list 1 + //h264_parse_emit_ref_list(parent, pInfo, 1); + + ///////////////////////////////////// Slice Data //////////////////////////////// + // h264_fill_slice_data(pInfo, &slice_data); + + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_H264_SLICE_REG); + + wi.data.data_offset = slice_data.h264_bsd_slice_start; + wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; + wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent , &wi); + } + else + { + // viddec_pm_append_workitem_next( parent , &wi); + } + + + ///////////////////////////predict weight table item and data if have/////////////////////////// + if (pInfo->h264_pwt_enabled) + { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; + wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; + wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; + wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent , &wi); + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); + } + else + { + // viddec_pm_append_workitem_next( parent , &wi); + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); + } + } + + + ////////////////////////////////// Update ES Buffer for Slice /////////////////////// + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); + + if (pInfo->active_PPS.entropy_coding_mode_flag) + { + if (0!=bits_offset) { + data = data; // fix compilation warning + // don't skip byte-aligned bits as those bits are actually + // part of slice_data + //viddec_pm_get_bits(parent, &data, 8-bits_offset); + } + } + else + { + if (0!=bits_offset) { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; + wi.data.data_offset = bits_offset; + wi.data.data_payload[0]=0; + wi.data.data_payload[1]=0; + + if (pInfo->push_to_cur) { //cur is empty, fill new frame in cur + // viddec_pm_append_workitem( parent , &wi); + } + else { + //viddec_pm_append_workitem_next( parent , &wi); + } + } + } + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_pixeldata( parent ); + } + else + { + //viddec_pm_append_pixeldata_next( parent); + } + + return; +} + + +void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + const uint32_t *pl; + uint32_t i=0,nitems=0; + + h264_pic_data pic_data; + + pInfo->qm_present_list=0; + + //h264_parse_emit_4X4_scaling_matrix(parent, pInfo); + // h264_parse_emit_8X8_scaling_matrix(parent, pInfo); + + // h264_fill_pic_data(pInfo, &pic_data); + + // How many payloads must be generated + nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up + + pl = (const uint32_t *) &pic_data; + + // Dump slice data to an array of workitems, to do pl access non valid mem + for ( i = 0; i < nitems; i++ ) + { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PIC_REG; + wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + + return; +} + +void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + uint32_t i=0,nitems=0; + + ///////////////////////// Frame attributes////////////////////////// + + //Push data into current workload if first frame or frame_boundary already detected by non slice nal + if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) + { + //viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); + //pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->is_frame_boundary_detected_by_non_slice_nal=0; + pInfo->push_to_cur = 1; + //h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); + } + else // move to cur if frame boundary detected by previous non slice nal, or move to next if not + { + //viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + + pInfo->push_to_cur = 0; + //h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); + + pInfo->is_current_workload_done=1; + } + + ///////////////////// SPS///////////////////// + // h264_parse_emit_sps(parent, pInfo); + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + /////////////////////flust frames (do not display)///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; + + for (i=0; idpb.frame_id_need_to_be_dropped[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + } + + } + pInfo->dpb.frame_numbers_need_to_be_dropped =0; + + /////////////////////updata DPB frames///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id); + wi.ref_frame.reference_id = fs_id; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + } + + + /////////////////////updata dpb frames info (poc)///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; + wi.data.data_offset = fs_id; + //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); + + switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) + { + case (FRAME): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + case (TOP_FIELD): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = 0; + break; + }; + + case (BOTTOM_FIELD): { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + default : { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + break; + }; + } + + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + + } + } + + /////////////////////Alloc buffer for current Existing frame///////////////////// + if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated) + { + if (pInfo->push_to_cur) + { + // viddec_workload_t *wl_cur = viddec_pm_get_header (parent); + // wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + else + { + // viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + } + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + return; +} + + + +void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) +{ + + uint32_t nitems=0, i=0; + viddec_workload_item_t wi; + + //// + //// Now we can flush out all frames in DPB fro display + if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + //viddec_pm_append_workitem( parent, &wi ); + } + else + { + //viddec_pm_append_workitem_next( parent, &wi ); + } + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]); + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + // viddec_pm_append_workitem( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + else + { + // viddec_pm_append_workitem_next( parent, &wi ); + viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); + } + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + return; +} +#endif diff --git a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c new file mode 100755 index 0000000..103841e --- /dev/null +++ b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c @@ -0,0 +1,802 @@ +#include "viddec_parser_ops.h" + +#include "viddec_fw_workload.h" +#include "viddec_pm.h" + +#include "h264.h" +#include "h264parse.h" + +#include "h264parse_dpb.h" + +/* Init function which can be called to intialized local context on open and flush and preserve*/ +void viddec_h264secure_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +{ + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + if (!preserve) + { + /* we don't initialize this data if we want to preserve + sequence and gop information */ + h264_init_sps_pps(parser,persist_mem); + } + /* picture level info which will always be initialized */ + h264_init_Info_under_sps_pps_level(pInfo); +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 0; +#endif + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +uint32_t viddec_h264secure_parse(void *parent, void *ctxt) +{ + struct h264_viddec_parser* parser = ctxt; + + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + + + uint8_t nal_ref_idc = 0; + + ///// Parse NAL Unit header + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + + ///// Check frame bounday for non-vcl elimitter + h264_check_previous_frame_end(pInfo); + + //////// Parse valid NAL unit + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + if (pInfo->got_start) { + pInfo->img.recovery_point_found |= 1; + } + + pInfo->sei_rp_received = 0; + + case h264_NAL_UNIT_TYPE_SLICE: + //////////////////////////////////////////////////////////////////////////// + // Step 1: Check start point + //////////////////////////////////////////////////////////////////////////// + // + /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I) + /// 1) No start point reached, append current ES buffer to workload and release it + /// 2) else, start parsing + // + //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR))) + //{ + //pInfo->img.recovery_point_found = 1; + //} + { + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = nal_ref_idc; + + if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + + +#ifndef VBP + if (pInfo->img.recovery_point_found == 0) { + pInfo->img.structure = FRAME; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + break; + } +#endif + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + + if (next_SliceHeader.sh_error & 3) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + + break; + } + pInfo->img.current_slice_num++; + + +#ifdef DUMP_HEADER_INFO + dump_slice_header(pInfo, &next_SliceHeader); +////h264_print_decoder_values(pInfo); +#endif + + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + +#ifdef DUMP_HEADER_INFO + dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); +#endif + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + + // + /// Emit out the New Frame + if (pInfo->img.g_new_frame) + { + h264_parse_emit_start_new_frame(parent, pInfo); + } + + h264_parse_emit_current_pic(parent, pInfo); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + + + ////////////////////////////////////////////////////////////// + // Step 4: DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + + + h264_dpb_update_ref_lists( pInfo); + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames) + { + pInfo->sw_bail = 1; + } +#endif +#endif +#ifdef DUMP_HEADER_INFO + dump_ref_list(pInfo); +#endif + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + + } + break; + + ///// * Main profile doesn't support Data Partition, skipped.... *//// + case h264_NAL_UNIT_TYPE_DPA: + case h264_NAL_UNIT_TYPE_DPB: + case h264_NAL_UNIT_TYPE_DPC: + //OS_INFO("***********************DP feature, not supported currently*******************\n"); + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + status = H264_STATUS_NOTSUPPORT; + break; + + //// * Parsing SEI info *//// + case h264_NAL_UNIT_TYPE_SEI: + status = H264_STATUS_OK; + + //OS_INFO("*****************************SEI**************************************\n"); + if (pInfo->sps_valid) { + //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW + pInfo->number_of_first_au_info_nal_before_first_slice++; + /// parsing the SEI info + status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo); + } + + //h264_rbsp_trailing_bits(pInfo); + break; + case h264_NAL_UNIT_TYPE_SPS: + { + //OS_INFO("*****************************SPS**************************************\n"); + /// + /// Can not define local SPS since the Current local stack size limitation! + /// Could be changed after the limitation gone + /// + uint8_t old_sps_id=0; + vui_seq_parameters_t_not_used vui_seq_not_used; + + old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + + + status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); + if (status == H264_STATUS_OK) { + h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); + pInfo->sps_valid = 1; + + if (1==pInfo->active_SPS.pic_order_cnt_type) { + h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); + } + +#ifdef DUMP_HEADER_INFO + dump_sps(&(pInfo->active_SPS)); +#endif + + } + ///// Restore the active SPS if new arrival's id changed + if (old_sps_id>=MAX_NUM_SPS) { + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + else { + if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + else { + //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + } + + pInfo->number_of_first_au_info_nal_before_first_slice++; + } + break; + case h264_NAL_UNIT_TYPE_PPS: + { + //OS_INFO("*****************************PPS**************************************\n"); + + uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; + + h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set)); + pInfo->number_of_first_au_info_nal_before_first_slice++; + + if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK) + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id); + if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated = 1; + } + if (pInfo->active_SPS.seq_parameter_set_id != 0xff) { + h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id); + pInfo->got_start = 1; + if (pInfo->sei_information.recovery_point) + { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if ((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + } + else + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } +#ifdef DUMP_HEADER_INFO + dump_pps(&(pInfo->active_PPS)); +#endif + } else { + if (old_sps_idactive_SPS), old_sps_id); + if (old_pps_idactive_PPS), old_pps_id); + } + + } //// End of PPS parsing + break; + + + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + + h264_parse_emit_eos(parent, pInfo); + h264_init_dpb(&(pInfo->dpb)); + + pInfo->is_current_workload_done=1; + + /* picture level info which will always be initialized */ + //h264_init_Info_under_sps_pps_level(pInfo); + + ////reset the pInfo here + //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false); + + + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: +#if 1 + ///// primary_pic_type + { + uint32_t code = 0xff; + int32_t ret = 0; + ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3); + + if (ret != -1) { + //if(pInfo->got_start && (code == 0)) + //{ + //pInfo->img.recovery_point_found |= 4; + //} + pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1; + status = H264_STATUS_OK; + } + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + } +#endif + + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_filler_data: + status = H264_STATUS_OK; + break; + case h264_NAL_UNIT_TYPE_ACP: + break; + case h264_NAL_UNIT_TYPE_SPS_extension: + case h264_NAL_UNIT_TYPE_unspecified: + case h264_NAL_UNIT_TYPE_unspecified2: + status = H264_STATUS_OK; + //nothing + break; + default: + status = H264_STATUS_OK; + break; + } + + //pInfo->old_nal_unit_type = pInfo->nal_unit_type; + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + case h264_NAL_UNIT_TYPE_SLICE: + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->old_nal_unit_type = pInfo->nal_unit_type; + break; + } + default: + break; + } + + return status; +} + +void viddec_h264secure_get_context_size(viddec_parser_memory_sizes_t *size) +{ + /* Should return size of my structure */ + size->context_size = sizeof(struct h264_viddec_parser); + size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all) + + MAX_NUM_PPS * sizeof(pic_param_set) + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE + + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +void viddec_h264secure_flush(void *parent, void *ctxt) +{ + int i; + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + /* just flush dpb and disable output */ + h264_dpb_flush_dpb(pInfo, 0, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames); + + /* reset the dpb to the initial state, avoid parser store + wrong data to dpb in next slice parsing */ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + for (i = 0; i < NUM_DPB_FRAME_STORES; i++) + { + p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + p_dpb->used_size = 0; + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + + return; +} + +h264_Status h264secure_Parse_Dec_Ref_Pic_Marking(h264_Info* pInfo, void *newdata, h264_Slice_Header_t*SliceHeader) +{ + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + + uint8_t i = 0; + uint32_t code; + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)sliceheader_p->ref_pic_marking.no_output_of_prior_pics_flag; + SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)sliceheader_p->ref_pic_marking.long_term_reference_flag; + pInfo->img.long_term_reference_flag = SliceHeader->sh_dec_refpic.long_term_reference_flag; + } + else + { + SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = sliceheader_p->ref_pic_marking.adaptive_ref_pic_marking_mode_flag; + + /////////////////////////////////////////////////////////////////////////////////////// + //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified + // Sliding window reference picture marking mode: A marking mode + // providing a first-in first-out mechanism for short-term reference pictures. + // Adaptive reference picture marking mode: A reference picture + // marking mode providing syntax elements to specify marking of + // reference pictures as unused for reference?and to assign long-term + // frame indices. + /////////////////////////////////////////////////////////////////////////////////////// + + if (SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) + { + do + { + if (i < MAX_OP) + { + code = sliceheader_p->ref_pic_marking.op[i].memory_management_control_operation; + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = code; + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) + { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = sliceheader_p->ref_pic_marking.op[i].op1.difference_of_pic_nums_minus1; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) + { + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = sliceheader_p->ref_pic_marking.op[i].op2.long_term_pic_num; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6) + { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = sliceheader_p->ref_pic_marking.op[i].op6.long_term_frame_idx; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = sliceheader_p->ref_pic_marking.op[i].op3.difference_of_pic_nums_minus1; + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = sliceheader_p->ref_pic_marking.op[i].op3.long_term_frame_idx; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) + { + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = sliceheader_p->ref_pic_marking.op[i].op4.max_long_term_frame_idx_plus1; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) + { + pInfo->img.curr_has_mmco_5 = 1; + } + } + + if (i >= MAX_OP) { + return H264_STATUS_ERROR; + } + } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); + } + } + + SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; + + return H264_STATUS_OK; +} + +uint32_t h264secure_Update_Slice_Header(h264_Info* pInfo, void *newdata, h264_Slice_Header_t *SliceHeader) +{ + h264_Status retStatus = H264_STATUS_OK; + uint8_t data; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + ///// first_mb_in_slice + SliceHeader->first_mb_in_slice = sliceheader_p->slice_header.first_mb_in_slice; + + SliceHeader->pic_parameter_id = (uint8_t)sliceheader_p->slice_header.pps_id; + retStatus = h264_active_par_set(pInfo, SliceHeader); + + switch (pInfo->active_SPS.profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + pInfo->active_PPS.transform_8x8_mode_flag=0; + pInfo->active_PPS.pic_scaling_matrix_present_flag =0; + pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; + default: + break; + } + + uint32_t code; + int32_t max_mb_num=0; + + SliceHeader->frame_num = (int32_t)sliceheader_p->slice_header.frame_num; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + SliceHeader->field_pic_flag = (uint8_t)sliceheader_p->slice_header.field_pic_flag; + + if (SliceHeader->field_pic_flag) + { + SliceHeader->bottom_field_flag = (uint8_t)sliceheader_p->slice_header.bottom_field_flag; + SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + if (SliceHeader->structure == FRAME) { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + } else { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; + } + + + if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { + SliceHeader->first_mb_in_slice <<=1; + } + + if (SliceHeader->first_mb_in_slice >= max_mb_num) { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = sliceheader_p->slice_header.idr_pic_id; + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + SliceHeader->pic_order_cnt_lsb = (uint32_t)sliceheader_p->slice_header.pic_order_cnt_lsb; + + + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = sliceheader_p->slice_header.delta_pic_order_cnt_bottom; + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if ((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = sliceheader_p->slice_header.delta_pic_order_cnt[0]; + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = sliceheader_p->slice_header.delta_pic_order_cnt[1]; + } + } +/* + if (pInfo->active_PPS.redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = sliceheader_p->slice_header.redundant_pic_cnt; + if (SliceHeader->redundant_pic_cnt > 127) { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } else { + SliceHeader->redundant_pic_cnt = 0; + } +*/ + //// + //// Parse Ref_pic marking if there + //// + if (SliceHeader->nal_ref_idc != 0) + { + if (h264secure_Parse_Dec_Ref_Pic_Marking(pInfo, newdata, SliceHeader) != H264_STATUS_OK) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } + retStatus = H264_STATUS_OK; + return retStatus; +} +uint32_t viddec_h264secure_update(void *parent, void *data, uint32_t size) +{ + viddec_pm_cxt_t * parser_cxt = (viddec_pm_cxt_t *)parent; + struct h264_viddec_parser* parser = (struct h264_viddec_parser*) &parser_cxt->codec_data[0]; + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) data; + + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + pInfo->nal_unit_type = sliceheader_p->slice_header.nal_unit_type & 0x1F; + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = (sliceheader_p->slice_header.nal_unit_type & 0x60) >> 5; + + if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + /// Pass slice header + status = h264secure_Update_Slice_Header(pInfo, sliceheader_p, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + pInfo->img.current_slice_num++; + + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + } + else ///////////////////////////////////////////////////// If Not a picture start + { + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + return status; +} diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c new file mode 100755 index 0000000..c55db6b --- /dev/null +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -0,0 +1,607 @@ +#include "viddec_parser_ops.h" + +#include "viddec_fw_workload.h" +#include "viddec_pm.h" + +#include "h264.h" +#include "h264parse.h" + +#include "h264parse_dpb.h" + +/* Init function which can be called to intialized local context on open and flush and preserve*/ +#ifdef VBP +void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#else +static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#endif +{ + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + if (!preserve) + { + /* we don't initialize this data if we want to preserve + sequence and gop information */ + h264_init_sps_pps(parser,persist_mem); + } + /* picture level info which will always be initialized */ + h264_init_Info_under_sps_pps_level(pInfo); +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + pInfo->sw_bail = 0; +#endif +#endif + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +#ifdef VBP +uint32_t viddec_h264_parse(void *parent, void *ctxt) +#else +static uint32_t viddec_h264_parse(void *parent, void *ctxt) +#endif +{ + struct h264_viddec_parser* parser = ctxt; + + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + + + uint8_t nal_ref_idc = 0; + + ///// Parse NAL Unit header + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + + ///// Check frame bounday for non-vcl elimitter + h264_check_previous_frame_end(pInfo); + + //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type); + //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0); +#if 0 + devh_SVEN_WriteModuleEvent( NULL, + SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0, + pInfo->got_start,pInfo->nal_unit_type, pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num); +#endif + + //////// Parse valid NAL unit + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + if (pInfo->got_start) { + pInfo->img.recovery_point_found |= 1; + } + + pInfo->sei_rp_received = 0; + + case h264_NAL_UNIT_TYPE_SLICE: + //////////////////////////////////////////////////////////////////////////// + // Step 1: Check start point + //////////////////////////////////////////////////////////////////////////// + // + /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I) + /// 1) No start point reached, append current ES buffer to workload and release it + /// 2) else, start parsing + // + //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR))) + //{ + //pInfo->img.recovery_point_found = 1; + //} + { + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = nal_ref_idc; + + if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + + +#ifndef VBP + if (pInfo->img.recovery_point_found == 0) { + pInfo->img.structure = FRAME; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + break; + } +#endif + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + + if (next_SliceHeader.sh_error & 3) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + + break; + } + pInfo->img.current_slice_num++; + + +#ifdef DUMP_HEADER_INFO + dump_slice_header(pInfo, &next_SliceHeader); +////h264_print_decoder_values(pInfo); +#endif + + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + +#ifdef DUMP_HEADER_INFO + dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); +#endif + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + + // + /// Emit out the New Frame + if (pInfo->img.g_new_frame) + { + h264_parse_emit_start_new_frame(parent, pInfo); + } + + h264_parse_emit_current_pic(parent, pInfo); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + + + ////////////////////////////////////////////////////////////// + // Step 4: DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + + + h264_dpb_update_ref_lists( pInfo); + +#ifdef VBP +#ifdef SW_ERROR_CONCEALEMNT + if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames) + { + pInfo->sw_bail = 1; + } +#endif +#endif +#ifdef DUMP_HEADER_INFO + dump_ref_list(pInfo); +#endif + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + + } + break; + + ///// * Main profile doesn't support Data Partition, skipped.... *//// + case h264_NAL_UNIT_TYPE_DPA: + case h264_NAL_UNIT_TYPE_DPB: + case h264_NAL_UNIT_TYPE_DPC: + //OS_INFO("***********************DP feature, not supported currently*******************\n"); + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + status = H264_STATUS_NOTSUPPORT; + break; + + //// * Parsing SEI info *//// + case h264_NAL_UNIT_TYPE_SEI: + status = H264_STATUS_OK; + + //OS_INFO("*****************************SEI**************************************\n"); + if (pInfo->sps_valid) { + //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW + pInfo->number_of_first_au_info_nal_before_first_slice++; + /// parsing the SEI info + status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo); + } + + //h264_rbsp_trailing_bits(pInfo); + break; + case h264_NAL_UNIT_TYPE_SPS: + { + //OS_INFO("*****************************SPS**************************************\n"); + /// + /// Can not define local SPS since the Current local stack size limitation! + /// Could be changed after the limitation gone + /// + uint8_t old_sps_id=0; + vui_seq_parameters_t_not_used vui_seq_not_used; + + old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + + + status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); + if (status == H264_STATUS_OK) { + h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); + pInfo->sps_valid = 1; + + if (1==pInfo->active_SPS.pic_order_cnt_type) { + h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); + } + +#ifdef DUMP_HEADER_INFO + dump_sps(&(pInfo->active_SPS)); +#endif + + } + ///// Restore the active SPS if new arrival's id changed + if (old_sps_id>=MAX_NUM_SPS) { + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + else { + if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + else { + //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + } + + pInfo->number_of_first_au_info_nal_before_first_slice++; + } + break; + case h264_NAL_UNIT_TYPE_PPS: + { + //OS_INFO("*****************************PPS**************************************\n"); + + uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; + + h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set)); + pInfo->number_of_first_au_info_nal_before_first_slice++; + + if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK) + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id); + if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated = 1; + } + if (pInfo->active_SPS.seq_parameter_set_id != 0xff) { + h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id); + pInfo->got_start = 1; + if (pInfo->sei_information.recovery_point) + { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if ((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + } + else + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } +#ifdef DUMP_HEADER_INFO + dump_pps(&(pInfo->active_PPS)); +#endif + } else { + if (old_sps_idactive_SPS), old_sps_id); + if (old_pps_idactive_PPS), old_pps_id); + } + + } //// End of PPS parsing + break; + + + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + + h264_parse_emit_eos(parent, pInfo); + h264_init_dpb(&(pInfo->dpb)); + + pInfo->is_current_workload_done=1; + + /* picture level info which will always be initialized */ + //h264_init_Info_under_sps_pps_level(pInfo); + + ////reset the pInfo here + //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false); + + + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: +#if 1 + ///// primary_pic_type + { + uint32_t code = 0xff; + int32_t ret = 0; + ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3); + + if (ret != -1) { + //if(pInfo->got_start && (code == 0)) + //{ + //pInfo->img.recovery_point_found |= 4; + //} + pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1; + status = H264_STATUS_OK; + } + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + } +#endif + + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_filler_data: + status = H264_STATUS_OK; + break; + case h264_NAL_UNIT_TYPE_ACP: + break; + case h264_NAL_UNIT_TYPE_SPS_extension: + case h264_NAL_UNIT_TYPE_unspecified: + case h264_NAL_UNIT_TYPE_unspecified2: + status = H264_STATUS_OK; + //nothing + break; + default: + status = H264_STATUS_OK; + break; + } + + //pInfo->old_nal_unit_type = pInfo->nal_unit_type; + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + case h264_NAL_UNIT_TYPE_SLICE: + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->old_nal_unit_type = pInfo->nal_unit_type; + break; + } + default: + break; + } + + return status; +} + + + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +#ifndef VBP +static uint32_t viddec_h264_is_frame_start(void *ctxt) +{ + struct h264_viddec_parser* parser = ctxt; + uint32_t ret = 0; + + h264_Info * pInfo = &(parser->info); + + if (pInfo->img.g_new_frame) { + ret = 1; + } + + return ret; +} +#endif + +#ifndef VBP +uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, + uint32_t *codec_specific_errors) +{ + struct h264_viddec_parser* parser = ctxt; + uint32_t ret = VIDDEC_PARSE_SUCESS; + h264_Info * pInfo = &(parser->info); + uint8_t is_stream_forced_to_complete=false; + + is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc); + + if (is_stream_forced_to_complete || (pInfo->is_current_workload_done)) + { + viddec_workload_t *wl; + viddec_frame_attributes_t *attrs; + + wl = viddec_pm_get_header( parent ); + attrs = &wl->attrs; + + if ((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048)) + { + attrs->cont_size.width = 32; + attrs->cont_size.height = 32; + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + } + + *codec_specific_errors = pInfo->wl_err_curr; + pInfo->wl_err_curr = pInfo->wl_err_next; + pInfo->wl_err_next = 0; + + if (is_stream_forced_to_complete) + { + h264_parse_emit_eos(parent, pInfo); + } + ret = VIDDEC_PARSE_FRMDONE; + } + + return ret; +} +#endif + +#ifdef VBP +void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) +#else +static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) +#endif +{ + /* Should return size of my structure */ + size->context_size = sizeof(struct h264_viddec_parser); + size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all) + + MAX_NUM_PPS * sizeof(pic_param_set) + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE + + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +#ifdef VBP +void viddec_h264_flush(void *parent, void *ctxt) +#else +static void viddec_h264_flush(void *parent, void *ctxt) +#endif +{ + int i; + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + /* just flush dpb and disable output */ + h264_dpb_flush_dpb(pInfo, 0, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames); + + /* reset the dpb to the initial state, avoid parser store + wrong data to dpb in next slice parsing */ + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; + for (i = 0; i < NUM_DPB_FRAME_STORES; i++) + { + p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; + } + p_dpb->used_size = 0; + p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; + p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + + return; +} + +#ifndef VBP +void viddec_h264_get_ops(viddec_parser_ops_t *ops) +{ + ops->init = viddec_h264_init; + + ops->parse_syntax = viddec_h264_parse; + ops->get_cxt_size = viddec_h264_get_context_size; + ops->is_wkld_done = viddec_h264_wkld_done; + ops->is_frame_start = viddec_h264_is_frame_start; + ops->flush = viddec_h264_flush; + return; +} +#endif + diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_workload.c b/mixvbp/vbp_plugin/h264/viddec_h264_workload.c new file mode 100755 index 0000000..54c96db --- /dev/null +++ b/mixvbp/vbp_plugin/h264/viddec_h264_workload.c @@ -0,0 +1,1195 @@ +/* Any workload management goes in this file */ + +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "h264.h" +#include "h264parse.h" +#include "viddec_fw_item_types.h" +#include "h264parse_dpb.h" + + +#include "viddec_fw_workload.h" +#include +#include "viddec_pm_utils_bstream.h" + +// picture parameter 1 +#define PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT(w) (((uint32_t)w)&0x1) +#define PUT_BSD_PP1_SLICE_TYPE_BITS(w) ((((uint32_t)w)&0x7)<<1) +#define PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(w) ((((uint32_t)w)&0x3)<<4) +#define PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6) +#define PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(w) ((((uint32_t)w)&0x3F)<<8) +#define PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(w) ((((uint32_t)w)&0x3F)<<16) + +// picture parameter 2 +#define PUT_BSD_PP2_CABAC_INIT_IDC_BITS(w) (((uint32_t)w)&0x3) +#define PUT_BSD_PP2_QP_BITS(w) ((((uint32_t)w)&0x3F)<<2) +#define PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(w) ((((uint32_t)w)&0x3)<<8) +#define PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<10) +#define PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<14) +#define PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<18) +#define PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(w) ((((uint32_t)w)&0x1F)<<19) +#define PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(w) ((((uint32_t)w)&0x1F)<<24) + + +// slice start parameter +#define PUT_BSD_SS_START_ADDR_BITS(w) (((uint32_t)w)&0x7fff) // 14:0 current slice start address +#define PUT_BSD_SS_SKIP_FS_IDC_BITS(w) ((((uint32_t)w)&0x3f)<<16) // [5:0], [4:0] frame store idc, [5] - 0: top-filed, 1: bottom field +#define PUT_BSD_SS_SKIP_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<24) // 0: P-skip, 1: I-skip +#define PUT_BSD_SS_SKIP_REWIND_BITS(w) ((((uint32_t)w)&0xf)<<28) // number of MB or MBAFF pairs to rewind before skip + +//h264_dpb_init +#define PUT_FRAME_WIDTH_MB_BITS(w) (((uint32_t)w)&0x7F) +#define PUT_FRAME_HEIGHT_MB_BITS(w) ((((uint32_t)w)&0x7F)<<16) + +//dpb lut table init +//#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8) + +//h264 img init +#define PUT_BSD_IMAGE_STRUCTURE_BITS(w) (((uint32_t)w)&0x3) +#define PUT_BSD_IMAGE_IDR_BIT(w) ((((uint32_t)w)&0x1)<<2) +#define PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<3) +#define PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<4) +#define PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<5) +#define PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6) +#define PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<7) +#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8) + +#define PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<13) +#define PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<14) +#define PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<15) +#define PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<16) +#define PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(w) ((((uint32_t)w)&0xFF)<<17) +#define PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<25) + + +extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, + int32_t NonExisting, + int32_t use_old); + +extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); + + + +void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_Info *pInfo) +{ + + viddec_frame_attributes_t *attrs = &wl->attrs; + + + + //// Cont_size + attrs->cont_size.height = pInfo->img.FrameHeightInMbs*16; + attrs->cont_size.width = pInfo->img.PicWidthInMbs*16; + + //// The following attributes will be updated in slice level + attrs->h264.used_for_reference = 0; + attrs->h264.top_field_first = 0; + attrs->h264.top_field_poc = 0; + attrs->h264.bottom_field_poc = 0; + attrs->h264.field_pic_flag = 0; + +#if 1 +/// Double check the size late!!!!! + //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16; + //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16; + + if ( (pInfo->active_SPS.sps_disp.frame_cropping_flag) && + (pInfo->active_SPS.sps_disp.chroma_format_idc < 4)) + { + int32_t CropUnitX, CropUnitY; + int32_t SubWidthC, SubHeightC; + + if (pInfo->active_SPS.sps_disp.chroma_format_idc == 0) + { + CropUnitX = 1; + CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag; + } + else + { + SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1); + SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1) + - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1); + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag); + } + + if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY)) + { + attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); + //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); + } + } +/// Pan-Scan Info + +#endif + +} + + +static void h264_parse_update_frame_attributes(void *parent, h264_Info *pInfo) +{ + viddec_workload_t *wl_cur, *wl_next; + viddec_frame_attributes_t *attrs; + uint8_t frame_type=0; + + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + wl_cur = viddec_pm_get_header( parent ); + attrs = &wl_cur->attrs; + } + else + { + wl_next = viddec_pm_get_next_header (parent); + attrs = &wl_next->attrs; + } + + /////////update frame type + if ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)) + { + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET; + switch (frame_type) + { + case FRAME_TYPE_IDR: + attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; + break; + case FRAME_TYPE_I: + attrs->frame_type = VIDDEC_FRAME_TYPE_I; + break; + case FRAME_TYPE_P: + attrs->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case FRAME_TYPE_B: + attrs->frame_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; + break; + } + + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; + } + else + { + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET; + switch (frame_type) + { + case FRAME_TYPE_IDR: + attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; + break; + case FRAME_TYPE_I: + attrs->frame_type = VIDDEC_FRAME_TYPE_I; + break; + case FRAME_TYPE_P: + attrs->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case FRAME_TYPE_B: + attrs->frame_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; + break; + + } + + frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET; + switch (frame_type) + { + case FRAME_TYPE_IDR: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR; + break; + case FRAME_TYPE_I: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I; + break; + case FRAME_TYPE_P: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P; + break; + case FRAME_TYPE_B: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; + break; + + } + } + + /////////update is_referece flag + attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1; + + /////////update POC + attrs->h264.top_field_poc = pInfo->img.toppoc; + attrs->h264.bottom_field_poc = pInfo->img.bottompoc; + + //////// update TFF + if (attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) { + attrs->h264.top_field_first = 1; + } else { + attrs->h264.top_field_first = 0; + } + + /////// update field_pic_flag + //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag); + attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag; + + return; +} + + +static void h264_fill_slice_data(h264_Info *pInfo, h264_slice_data * p_slice_data) +{ + uint32_t data=0; + uint32_t first_mb_in_slice =0; + + + + ////////////fill pic parameters 1 + data = PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) + + PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) + + PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) + + PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag) + + PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active) + + PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active); + p_slice_data->h264_bsd_slice_p1 = data; + + + ///////////fill pic parameters 2 + data = PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) + + PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) + + PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) + + PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) + + PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) + + PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) + + PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) + + PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset); + + p_slice_data->h264_bsd_slice_p2 = data; + + /////////fill slice start + first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice; + + data = PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice); + data |= PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) | + PUT_BSD_SS_SKIP_TYPE_BIT(0) | + PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3)); + + p_slice_data->h264_bsd_slice_start = data; + +} + + +static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + uint32_t i=0, n_items=0; + uint32_t qm_type=0; + + + for ( i = 0; i < 6; i++ ) + { + qm_type = FB_QM; + if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first + { + if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix4x4Flag[i]) { + qm_type = DEFAULT_QM; + } else { + qm_type = SPS_QM; + } + } + } + + if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps + { + if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix4x4Flag[i]) { + qm_type = DEFAULT_QM; + } else { + qm_type = PPS_QM; + } + } + else + { + if ((i != 0) && (i != 3) && (i < 6)) { + pInfo->qm_present_list &= ~((0x1)<active_SPS.ScalingList4x4[i][n_items*8+0]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + + break; + } + case (PPS_QM): { + + for (n_items =0; n_items<2; n_items++) + { + wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + + break; + } + case (DEFAULT_QM): + { + + wi.data.data_offset = i + (DEFAULT_QM << 4); + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + break; + } + default: + { + break; + } + } + } + +} + +static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + uint32_t i=0, n_items=0; + uint32_t qm_type=0; + + for ( i = 6; i < 8; i++ ) + { + qm_type = FB_QM; + if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first + { + if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix8x8Flag[i-6]) + { + qm_type = DEFAULT_QM; + } + else + { + qm_type = SPS_QM; + } + } + } + + if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps + { + if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) + { + pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]) + { + qm_type = DEFAULT_QM; + } + else + { + qm_type = PPS_QM; + } + } + } + wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX; + + // data_offset 0x aa bb cc dd + // bb is the workload item offset + // cc is the qm_type + // dd is the matrix number + // + switch (qm_type) + { + case (SPS_QM): + { + for (n_items =0; n_items<8; n_items++) + { + wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + break; + } + case (PPS_QM): + { + for (n_items =0; n_items<8; n_items++) + { + wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); + wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24); + wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ + (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + break; + } + case (DEFAULT_QM): + { + wi.data.data_offset = i + (DEFAULT_QM << 4); + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + break; + } + default: { + break; + } + } + } + +} + + + +static void h264_fill_pic_data(h264_Info *pInfo, h264_pic_data * p_pic_data) +{ + uint32_t data=0; + uint32_t dec_idc =0; + uint32_t frame_structure =0; + + //fill h264_dpb_init + data = PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) + + PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs); + + p_pic_data->h264_dpb_init = data; + + ////////////////////////////////file current pic info + data = 0; + dec_idc = pInfo->dpb.fs_dec_idc; + frame_structure = pInfo->img.structure; + if (frame_structure == FRAME) + frame_structure=0; + //data = PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); + + //p_pic_data->h264_cur_bsd_img_init= data; + + data = PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure) + + PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + + PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) + + PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) + + PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) + + PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) + + PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) + + PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) + + PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) + + PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) + + PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) + + PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) + + PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) + + PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); + + p_pic_data->h264_cur_bsd_img_init= data; + + //to do: add qm list + //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) + + //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc); + + if (pInfo->img.structure == FRAME) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; + p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; + } else if (pInfo->img.structure == TOP_FIELD) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; + p_pic_data->h264_cur_mpr_bf_poc = 0; + } + else if (pInfo->img.structure == BOTTOM_FIELD) + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = 0; + p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; + } + else + { + // Write down POC + p_pic_data->h264_cur_mpr_tf_poc = 0; + p_pic_data->h264_cur_mpr_bf_poc = 0; + } + + return; +} + +static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) +{ + viddec_workload_item_t wi; + + if (pInfo->Is_SPS_updated) + { + viddec_fw_reset_workload_item(&wi); + wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; + + viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc); + viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc); + viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc); + viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames); + viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag); + viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag); + viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag); + viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag); + wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1; + wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1; + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + + viddec_fw_reset_workload_item(&wi); + if (pInfo->active_SPS.sps_disp.frame_cropping_flag) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING; + viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset); + viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset); + viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset); + viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset); + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + viddec_fw_reset_workload_item(&wi); + if (pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1) + { + wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; + viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag); + viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag); + viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag); + viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag); + viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag); + viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag); + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1) + { + viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc); + if (h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc) + { + viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width); + viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height); + } + } + + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag); + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries); + viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics); + } + viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format); + } + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag); + } + + if ( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) + || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) + { + viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag); + } + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + + viddec_fw_reset_workload_item(&wi); + + if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO; + + wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick; + wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->Is_SPS_updated =0; + + } + + return; +} + + + + +static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t list_id) +{ + uint32_t i=0, nitems=0, byte_index=0, data=0, data_writed=0; + uint8_t *p_list; + viddec_workload_item_t wi; + + if (0 == list_id) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0; + + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list0; + } + else + { + p_list = pInfo->dpb.listX_0; + } + } + else + { + nitems =0; + p_list = pInfo->dpb.listX_0; + } + } + else + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1; + + if ( h264_PtypeB==pInfo->SliceHeader.slice_type) + { + nitems = pInfo->SliceHeader.num_ref_idx_l1_active; + if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = pInfo->slice_ref_list1; + } + else + { + p_list = pInfo->dpb.listX_1; + } + } + else + { + nitems = 0; + p_list = pInfo->dpb.listX_1; + } + + } + + if (0 == nitems) + { + return; + } + + byte_index =0; + data_writed=0; + + + for (i=0; i < 32; i++) + { + if (byte_index == 0) data = 0; + + if (idpb.fs[ (p_list[i]&0x1f) ]))) + { + data |= (pInfo->h264_list_replacement) << byte_index; + } + else + { + data |= (p_list[i] & 0x7f) << byte_index; + } + } + else + { + data |= (0x80) << byte_index; + } + + + if (byte_index == 24) + { + byte_index = 0; + wi.data.data_offset = data_writed&(~0x1); + wi.data.data_payload[data_writed&0x1]=data; + + data =0; + + if (data_writed&0x1) + { + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + data_writed ++; + } + else + { + byte_index += 8; + } + } + +} + + + +void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + h264_slice_data slice_data; + + uint32_t i=0, nitems=0, data=0; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + ////////////////////// Update frame attributes///////////////// + h264_parse_update_frame_attributes(parent,pInfo); + + + if (pInfo->SliceHeader.sh_error) { + // Error type definition, refer to viddec_fw_common_defs.h + // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) + // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) + // if this is frame based, both 2 bits should be set + + if (pInfo->push_to_cur) { + pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); + } else { + pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); + } + } + + + ////////////////////// Update Reference list ////////////////// + if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) + { + if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) + { + nitems = pInfo->SliceHeader.num_ref_idx_l0_active; + + for (i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; + break; + } + } + } + else + { + nitems = pInfo->dpb.listXsize[0]; + + for (i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) + { + pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; + break; + } + } + } + + } + else + { + nitems =0; + } + /////file ref list 0 + h264_parse_emit_ref_list(parent, pInfo, 0); + + /////file ref list 1 + h264_parse_emit_ref_list(parent, pInfo, 1); + + ///////////////////////////////////// Slice Data //////////////////////////////// + h264_fill_slice_data(pInfo, &slice_data); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG; + + wi.data.data_offset = slice_data.h264_bsd_slice_start; + wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; + wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + + ///////////////////////////predict weight table item and data if have/////////////////////////// + if (pInfo->h264_pwt_enabled) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; + wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; + wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; + wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent , &wi, false); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); + } + else + { + viddec_pm_append_workitem( parent , &wi, true); + + wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; + wi.es.es_flags = 0; + viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); + } + } + + + ////////////////////////////////// Update ES Buffer for Slice /////////////////////// + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); + + if (pInfo->active_PPS.entropy_coding_mode_flag) + { + if (0!=bits_offset) { + viddec_pm_get_bits(parent, &data, 8-bits_offset); + } + } + else + { + if (0!=bits_offset) { + wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; + wi.data.data_offset = bits_offset; + wi.data.data_payload[0]=0; + wi.data.data_payload[1]=0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + } + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_pixeldata( parent ); + } + else + { + viddec_pm_append_pixeldata_next( parent); + } + + return; +} + + +void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + + const uint32_t *pl; + uint32_t i=0,nitems=0; + + h264_pic_data pic_data; + + pInfo->qm_present_list=0; + + h264_parse_emit_4X4_scaling_matrix(parent, pInfo); + h264_parse_emit_8X8_scaling_matrix(parent, pInfo); + + h264_fill_pic_data(pInfo, &pic_data); + + // How many payloads must be generated + nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up + + pl = (const uint32_t *) &pic_data; + + // Dump slice data to an array of workitems, to do pl access non valid mem + for ( i = 0; i < nitems; i++ ) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG; + wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + + return; +} + +void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) +{ + + viddec_workload_item_t wi; + uint32_t i=0,nitems=0; + + ///////////////////////// Frame attributes////////////////////////// + + //Push data into current workload if first frame or frame_boundary already detected by non slice nal + if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) + { + viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); + //pInfo->img.g_new_frame = 0; + pInfo->Is_first_frame_in_stream =0; + pInfo->is_frame_boundary_detected_by_non_slice_nal=0; + pInfo->push_to_cur = 1; + h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); + } + else // move to cur if frame boundary detected by previous non slice nal, or move to next if not + { + viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + + pInfo->push_to_cur = 0; + h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); + + pInfo->is_current_workload_done=1; + } + + ///////////////////// SPS///////////////////// + h264_parse_emit_sps(parent, pInfo); + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + /////////////////////flust frames (do not display)///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; + + for (i=0; idpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_dropped =0; + + /////////////////////updata DPB frames///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id; + wi.ref_frame.reference_id = fs_id; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + } + + /////////////////////updata dpb frames info (poc)///////////////////// + nitems = pInfo->dpb.used_size; + for (i=0; idpb.fs_dpb_idc[i]; + + if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) + { + wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; + wi.data.data_offset = fs_id; + //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); + + switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) + { + case (FRAME): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + case (TOP_FIELD): { + wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; + wi.data.data_payload[1] = 0; + break; + }; + + case (BOTTOM_FIELD): { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; + break; + }; + + default : { + wi.data.data_payload[0] = 0; + wi.data.data_payload[1] = 0; + break; + }; + } + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + } + + /////////////////////Alloc buffer for current Existing frame///////////////////// + if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated) + { + if (pInfo->push_to_cur) + { + viddec_workload_t *wl_cur = viddec_pm_get_header (parent); + wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + else + { + viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); + wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); + } + } + pInfo->dpb.frame_numbers_need_to_be_allocated =0; + + return; +} + + + +void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) +{ + + uint32_t nitems=0, i=0; + viddec_workload_item_t wi; + + + wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + + //// Now we can flush out all frames in DPB fro display + + if (MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc) + { + if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) + { + h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME + } + } + + + h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); + h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); + + + /////////////////////display frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; + + for (i=0; idpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + //cur is empty, fill new frame in cur + viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); + } + pInfo->dpb.frame_numbers_need_to_be_displayed =0; + + + /////////////////////release frames///////////////////// + nitems = pInfo->dpb.frame_numbers_need_to_be_removed; + + for (i=0; idpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + + if (pInfo->push_to_cur) //cur is empty, fill new frame in cur + { + viddec_pm_append_workitem( parent, &wi , false); + viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + else + { + viddec_pm_append_workitem( parent, &wi , true); + viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); + } + } + pInfo->dpb.frame_numbers_need_to_be_removed =0; + + return; +} + + + + + + diff --git a/mixvbp/vbp_plugin/mp2/include/mpeg2.h b/mixvbp/vbp_plugin/mp2/include/mpeg2.h new file mode 100755 index 0000000..4600f39 --- /dev/null +++ b/mixvbp/vbp_plugin/mp2/include/mpeg2.h @@ -0,0 +1,195 @@ +#ifndef _MPEG2_H +#define _MPEG2_H + +/** + * mpeg2.h + * ------- + * This file contains all the necessary enumerations and structures needed from + * the MPEG-2 Specification. + */ + +/* Max Pan-Scan offsets */ +#define MPEG2_MAX_VID_OFFSETS 3 + +/* Quantization matrix size */ +#define MPEG2_QUANT_MAT_SIZE 64 + +/* MPEG2 Start Code Values */ +typedef enum { + MPEG2_SC_PICTURE = 0x00, + MPEG2_SC_SLICE_HDR = 0x01, + MPEG2_SC_SLICE_MIN = 0x01, + MPEG2_SC_SLICE_MAX = 0xAF, + MPEG2_SC_USER_DATA = 0xB2, + MPEG2_SC_SEQ_HDR = 0xB3, + MPEG2_SC_SEQ_ERR = 0xB4, + MPEG2_SC_EXT = 0xB5, + MPEG2_SC_SEQ_END = 0xB7, + MPEG2_SC_GROUP = 0xB8, + MPEG2_SC_SYS_MIN = 0xB9, + MPEG2_SC_SYS_MAX = 0xFF, + MPEG2_SC_ALL = 0xFF +} mpeg2_start_codes; + +/* MPEG2 Extension Start Code ID */ +typedef enum { + MPEG2_EXT_SEQ = 1, + MPEG2_EXT_SEQ_DISP = 2, + MPEG2_EXT_QUANT_MAT = 3, + MPEG2_EXT_COPYRIGHT = 4, + MPEG2_EXT_SEQ_SCAL = 5, + MPEG2_EXT_PIC_DISP = 7, + MPEG2_EXT_PIC_CODING = 8, + MPEG2_EXT_PIC_SPA_SCAL = 9, + MPEG2_EXT_PIC_TEMP_SCAL = 10, + MPEG2_EXT_ALL = 11 +} mpeg2_ext_start_codes; + +/* MPEG2 Picture Coding Type Values */ +typedef enum { + MPEG2_PC_TYPE_FORBIDDEN = 0, + MPEG2_PC_TYPE_I = 1, + MPEG2_PC_TYPE_P = 2, + MPEG2_PC_TYPE_B = 3 +} mpeg2_picture_type; + +/* MPEG2 Picture Structure Type Values */ +typedef enum { + MPEG2_PIC_STRUCT_RESERVED = 0, + MPEG2_PIC_STRUCT_TOP = 1, + MPEG2_PIC_STRUCT_BOTTOM = 2, + MPEG2_PIC_STRUCT_FRAME = 3 +} mpeg2_picture_structure; + +/* MPEG2 Chroma Format Values */ +typedef enum { + MPEG2_CF_RESERVED = 0, + MPEG2_CF_420 = 1, + MPEG2_CF_422 = 2, + MPEG2_CF_444 = 3 +} mpeg2_chroma_format; + +/* MPEG2 Parser Structures */ +/* Sequence Header Info */ +struct mpeg2_sequence_hdr_info +{ + uint32_t horizontal_size_value; + uint32_t vertical_size_value; + uint32_t aspect_ratio_information; + uint32_t frame_rate_code; + uint32_t bit_rate_value; + uint32_t vbv_buffer_size_value; + uint32_t constrained_parameters_flag; +}; + +/* Group of Pictures Header Info */ +struct mpeg2_gop_hdr_info +{ + uint32_t closed_gop; + uint32_t broken_link; +}; + +/* Picture Header */ +struct mpeg2_picture_hdr_info +{ + uint32_t temporal_reference; + uint32_t picture_coding_type; + uint32_t full_pel_forward_vect; + uint32_t forward_f_code; + uint32_t full_pel_backward_vect; + uint32_t backward_f_code; +}; + +/* Sequence Extension Info */ +struct mpeg2_sequence_ext_info +{ + uint32_t profile_and_level_indication; + uint32_t progressive_sequence; + uint32_t chroma_format; + uint32_t horizontal_size_extension; + uint32_t vertical_size_extension; + uint32_t bit_rate_extension; + uint32_t vbv_buffer_size_extension; + uint32_t frame_rate_extension_n; + uint32_t frame_rate_extension_d; +}; + +/* Sequence Display Extension Info */ +struct mpeg2_sequence_disp_ext_info +{ + uint32_t video_format; + uint32_t colour_description; + uint32_t colour_primaries; + uint32_t transfer_characteristics; + uint32_t display_horizontal_size; + uint32_t display_vertical_size; +}; + +/* Sequence scalable extension Info */ +struct mpeg2_sequence_scal_ext_info +{ + uint32_t scalable_mode; +}; + +/* Picture Coding Extension */ +struct mpeg2_picture_coding_ext_info +{ + uint32_t fcode00; + uint32_t fcode01; + uint32_t fcode10; + uint32_t fcode11; + uint32_t intra_dc_precision; + uint32_t picture_structure; + uint32_t top_field_first; + uint32_t frame_pred_frame_dct; + uint32_t concealment_motion_vectors; + uint32_t q_scale_type; + uint32_t intra_vlc_format; + uint32_t alternate_scan; + uint32_t repeat_first_field; + uint32_t chroma_420_type; + uint32_t progressive_frame; + uint32_t composite_display_flag; +}; + +/* Picture Display Extension */ +struct mpeg2_picture_disp_ext_info +{ + uint32_t frame_center_horizontal_offset[MPEG2_MAX_VID_OFFSETS]; + uint32_t frame_center_vertical_offset[MPEG2_MAX_VID_OFFSETS]; +}; + +/* Quantization Matrix Extension */ +struct mpeg2_quant_ext_info +{ + uint32_t load_intra_quantiser_matrix; + uint32_t load_non_intra_quantiser_matrix; + uint32_t load_chroma_intra_quantiser_matrix; + uint32_t load_chroma_non_intra_quantiser_matrix; +}; + +/* Quantization Matrices */ +struct mpeg2_quant_matrices +{ + uint8_t intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; + uint8_t non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; + uint8_t chroma_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; + uint8_t chroma_non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE]; +}; + +/* MPEG2 Info */ +struct mpeg2_info +{ + struct mpeg2_sequence_hdr_info seq_hdr; + struct mpeg2_gop_hdr_info gop_hdr; + struct mpeg2_picture_hdr_info pic_hdr; + struct mpeg2_sequence_ext_info seq_ext; + struct mpeg2_sequence_disp_ext_info seq_disp_ext; + struct mpeg2_sequence_scal_ext_info seq_scal_ext; + struct mpeg2_picture_coding_ext_info pic_cod_ext; + struct mpeg2_picture_disp_ext_info pic_disp_ext; + struct mpeg2_quant_ext_info qnt_ext; + struct mpeg2_quant_matrices qnt_mat; +}; + +#endif diff --git a/mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h b/mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h new file mode 100755 index 0000000..22d6236 --- /dev/null +++ b/mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h @@ -0,0 +1,231 @@ +#ifndef _VIDDEC_MPEG2_H +#define _VIDDEC_MPEG2_H + +/** + * viddec_mpeg2.h + * -------------- + * This header file contains all the necessary state information and function + * prototypes for the MPEG2 parser. This header also defines the debug macros + * used by the MPEG2 parser to emit debug messages in host mode. + */ + +#include "viddec_fw_debug.h" +#include "viddec_parser_ops.h" +#include "mpeg2.h" + +/* Debug Print Macros */ +#define MPEG2_DEB(x...) DEB("MPEG2_Parser: "x) +#define MPEG2_FA_DEB(x...) DEB("MPEG2_Frame_attribute: "x) + +/* Bit masks */ +#define MPEG2_BIT_MASK_11 0x7ff /* Used for masking Height and Width */ +#define MPEG2_BIT_MASK_8 0xff /* Used fro masking start code byte */ +#define MPEG2_BIT_MASK_4 0xf /* Used for masking Level */ +#define MPEG2_BIT_MASK_3 0x7 /* Used for masking Profile */ + +/* MPEG2 Start code and prefix size */ +#define MPEG2_SC_AND_PREFIX_SIZE 32 + +/* Number of DMEM Workload Items */ +#define MPEG2_NUM_DMEM_WL_ITEMS 2 + +/* Number of Quantization Matrix Workload Items */ +#define MPEG2_NUM_QMAT_WL_ITEMS 32 + +/* Maximum supported content size */ +#define MPEG2_MAX_CONTENT_WIDTH 2048 +#define MPEG2_MAX_CONTENT_HEIGHT 2048 + +/* Others */ +#define MPEG2_BITS_EIGHT 8 + + +/* MPEG2 Stream Levels */ +typedef enum { + MPEG2_LEVEL_SEQ = 0, + MPEG2_LEVEL_GOP, + MPEG2_LEVEL_PIC +} mpeg2_stream_levels; + +/* MPEG2 Headers and Extensions */ +typedef enum { + MPEG2_HEADER_NONE = 0, + MPEG2_HEADER_SEQ = 1 << 0, + MPEG2_HEADER_SEQ_EXT = 1 << 1, + MPEG2_HEADER_SEQ_DISP_EXT = 1 << 2, + MPEG2_HEADER_GOP = 1 << 3, + MPEG2_HEADER_PIC = 1 << 4, + MPEG2_HEADER_PIC_COD_EXT = 1 << 5, + MPEG2_HEADER_PIC_DISP_EXT = 1 << 6, + MPEG2_HEADER_SEQ_SCAL_EXT = 1 << 7 +} mpeg2_headers; + +/* MPEG2 Parser Status Codes */ +typedef enum { + MPEG2_SUCCESS = 0, /* No error */ + MPEG2_FRAME_COMPLETE = 1, /* Frame parsing complete found */ + MPEG2_PARSE_ERROR = 2, /* Failure in parsing */ +} mpeg2_status; + +/* MPEG2 Current Workload Status Codes */ +typedef enum { + MPEG2_WL_EMPTY = 0, + MPEG2_WL_DMEM_DATA = (1 << 0), + MPEG2_WL_REF_INFO = (1 << 1), + MPEG2_WL_PARTIAL_SLICE = (1 << 2), + MPEG2_WL_DANGLING_FIELD = (1 << 3), + MPEG2_WL_COMPLETE = (1 << 4), + MPEG2_WL_MISSING_TF = (1 << 5), + MPEG2_WL_MISSING_BF = (1 << 6), + MPEG2_WL_UNSUPPORTED = (1 << 7), + /* Error codes */ + MPEG2_WL_CORRUPTED_SEQ_HDR = (1 << 8), + MPEG2_WL_CORRUPTED_SEQ_EXT = (1 << 9), + MPEG2_WL_CORRUPTED_SEQ_DISP_EXT = (1 << 10), + MPEG2_WL_CORRUPTED_GOP_HDR = (1 << 11), + MPEG2_WL_CORRUPTED_PIC_HDR = (1 << 12), + MPEG2_WL_CORRUPTED_PIC_COD_EXT = (1 << 13), + MPEG2_WL_CORRUPTED_PIC_DISP_EXT = (1 << 14), + MPEG2_WL_CORRUPTED_QMAT_EXT = (1 << 15), + /* Error concealment codes */ + MPEG2_WL_CONCEALED_PIC_COD_TYPE = (1 << 16), + MPEG2_WL_CONCEALED_PIC_STRUCT = (1 << 17), + MPEG2_WL_CONCEALED_CHROMA_FMT = (1 << 18), + /* Type of dangling field */ + MPEG2_WL_DANGLING_FIELD_TOP = (1 << 24), + MPEG2_WL_DANGLING_FIELD_BOTTOM = (1 << 25), + MPEG2_WL_REPEAT_FIELD = (1 << 26), +} mpeg2_wl_status_codes; + +/* MPEG2 Parser Workload types */ +typedef enum +{ + /* MPEG2 Decoder Specific data */ + VIDDEC_WORKLOAD_MPEG2_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + + /* MPEG2 Quantization Matrix data */ + VIDDEC_WORKLOAD_MPEG2_QMAT, + + /* Past reference frame */ + VIDDEC_WORKLOAD_MPEG2_REF_PAST = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + + /* Future reference frame */ + VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, + + /* Use current frame as reference */ + VIDDEC_WORKLOAD_MPEG2_REF_CURRENT_FRAME, + + /* User Data */ + VIDDEC_WORKLOAD_MPEG2_USERDATA = VIDDEC_WORKLOAD_USERDATA +} viddec_mpeg2_workloads; + +/* MPEG2 Decoder Specific Workitems */ +struct mpeg2_workitems +{ + /* Core Sequence Info 1 */ + uint32_t csi1; + + /* Core Sequence Info 2 */ + uint32_t csi2; + + /* Core Picture Info 1 */ + uint32_t cpi1; + + /* Core Picture Coding Extension Info 1 */ + uint32_t cpce1; + + /* Quantization Matrices */ + /* 0-15: Intra Quantization Matrix */ + /* 16-31: Non-Intra Quantization Matrix */ + /* 32-47: Chroma Intra Quantization Matrix */ + /* 48-63: Chroma Non-Intra Quantization Matrix */ + uint32_t qmat[MPEG2_QUANT_MAT_SIZE]; +}; + +/* MPEG2 Video Parser Context */ +struct viddec_mpeg2_parser +{ + /* MPEG2 Metadata Structure */ + struct mpeg2_info info; + + /* MPEG2 Workitems */ + struct mpeg2_workitems wi; + + /* Workload Status */ + uint32_t mpeg2_wl_status; + + /* Last parsed start code */ + int32_t mpeg2_last_parsed_sc; + + /* Last parsed slice start code. Used to start emitting workload items. */ + int32_t mpeg2_last_parsed_slice_sc; + + /* Current sequence headers parsed */ + uint8_t mpeg2_curr_seq_headers; + + /* Current frame headers parsed */ + uint8_t mpeg2_curr_frame_headers; + + /* Flag to indicate a valid sequence header was successfully parsed for */ + /* the current stream. */ + uint8_t mpeg2_valid_seq_hdr_parsed; + + /* Flag to indicate if quantization matrices are updated */ + uint8_t mpeg2_custom_qmat_parsed; + + /* Flag to indicate if reference table is updated with an entry */ + uint8_t mpeg2_ref_table_updated; + + /* Flag to indicate if the stream is MPEG2 */ + uint8_t mpeg2_stream; + + /* Flag to indicate if the previous picture metadata is parsed */ + uint8_t mpeg2_pic_metadata_complete; + + /* Number of active pan scan offsets */ + uint8_t mpeg2_num_pan_scan_offsets; + + /* Indicates the current stream level (Sequence/GOP/Picture) */ + /* Used for identifying the level for User Data */ + uint8_t mpeg2_stream_level; + + /* Flag to indicate if the current picture is interlaced or not */ + uint8_t mpeg2_picture_interlaced; + + /* Flag to indicate if the current field for interlaced picture is first */ + /* field or not. This flag is used only when mpeg2_picture_interlaced is */ + /* set to 1. */ + uint8_t mpeg2_first_field; + + /* Flag to indicate if the current parsed data has start of a frame */ + uint8_t mpeg2_frame_start; + + /* Temporal reference of the previous picture - Used to detect dangling fields */ + uint32_t mpeg2_prev_temp_ref; + + /* Previous picture structure - Used to identify the type of missing field */ + uint8_t mpeg2_prev_picture_structure; + + /* Flag to decide whether to use the current or next workload to dump workitems */ + uint8_t mpeg2_use_next_workload; + uint8_t mpeg2_first_slice_flag; +}; + +/* External Function Declarations */ +extern void *memset(void *s, int32_t c, uint32_t n); + +/* MPEG2 Parser Function Prototypes */ +void viddec_mpeg2_translate_attr (void *parent, void *ctxt); +void viddec_mpeg2_emit_workload (void *parent, void *ctxt); +void viddec_mpeg2_parse_seq_hdr (void *parent, void *ctxt); +void viddec_mpeg2_parse_gop_hdr (void *parent, void *ctxt); +void viddec_mpeg2_parse_pic_hdr (void *parent, void *ctxt); +void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt); +void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt); +void viddec_mpeg2_parse_ext (void *parent, void *ctxt); + +/* MPEG2 wrapper functions for workload operations */ +void viddec_mpeg2_append_workitem (void *parent, viddec_workload_item_t *wi, uint8_t flag); +void viddec_mpeg2_append_pixeldata (void *parent, uint8_t flag); +viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag); +#endif diff --git a/mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c b/mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c new file mode 100755 index 0000000..0394ec8 --- /dev/null +++ b/mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c @@ -0,0 +1,32 @@ +#include "viddec_mpeg2.h" +#include "viddec_fw_item_types.h" + + +void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t flag) +{ + return; +} + +void viddec_mpeg2_emit_workload(void *parent, void *ctxt) +{ + return; +} + +void viddec_mpeg2_append_pixeldata(void *parent, uint8_t flag) +{ + return; +} + +viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag) +{ + viddec_workload_t *ret; + if (flag) + { + ret = viddec_pm_get_next_header(parent); + } + else + { + ret = viddec_pm_get_header(parent); + } + return ret; +} diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c new file mode 100755 index 0000000..310f986 --- /dev/null +++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c @@ -0,0 +1,121 @@ +/** + * viddec_mpeg2_frame_attr.c + * ------------------------- + * This is a helper file for viddec_mpeg2_workload.c to translate the data + * stored in the parser context into frame attributes in the workload. + */ + +#include "viddec_mpeg2.h" + +/* viddec_mpeg2_print_attr() - Prints collected frame attributes */ +static inline void viddec_mpeg2_print_attr(viddec_frame_attributes_t *attr) +{ + unsigned int index = 0; + + MPEG2_FA_DEB("Content_Size=%dx%d\n", attr->cont_size.width, + attr->cont_size.height); + MPEG2_FA_DEB("Repeat=%d\n", attr->mpeg2.repeat_first_field); + MPEG2_FA_DEB("Frame_Type=%d\n", attr->frame_type); + MPEG2_FA_DEB("Temporal_Reference=%d\n", attr->mpeg2.temporal_ref); + MPEG2_FA_DEB("Top_Field_First=%d\n", attr->mpeg2.top_field_first); + MPEG2_FA_DEB("Progressive_Frame=%d\n", attr->mpeg2.progressive_frame); + MPEG2_FA_DEB("Picture_Struct=%d\n", attr->mpeg2.picture_struct); + MPEG2_FA_DEB("Pan_Scan_Offsets=%d\n", attr->mpeg2.number_of_frame_center_offsets); + + for (index = 0; index < attr->mpeg2.number_of_frame_center_offsets; index++) + { + MPEG2_FA_DEB("\tPan_Scan_Offset_%d= %dx%d\n", index, + attr->mpeg2.frame_center_offset[index].horz, + attr->mpeg2.frame_center_offset[index].vert); + } + + return; +} + +/* viddec_mpeg2_set_default_values() - Resets attributes that are optional */ +/* in the bitstream to their default values. */ +static inline void viddec_mpeg2_set_default_values(viddec_frame_attributes_t *attrs) +{ + unsigned int index = 0; + + attrs->mpeg2.number_of_frame_center_offsets = 0; + for (index = 0; index < MPEG2_MAX_VID_OFFSETS ; index++) + { + attrs->mpeg2.frame_center_offset[index].horz = 0; + attrs->mpeg2.frame_center_offset[index].vert = 0; + } + + return; +} + +/* viddec_mpeg2_translate_attr() - Translates metadata parsed into frame */ +/* attributes in the workload */ +void viddec_mpeg2_translate_attr(void *parent, void *ctxt) +{ + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get workload */ + viddec_workload_t *wl = viddec_pm_get_header( parent ); + + /* Get attributes in workload */ + viddec_frame_attributes_t *attrs = &wl->attrs; + + /* Get the default values for optional attributes */ + viddec_mpeg2_set_default_values(attrs); + + /* Populate attributes from parser context */ + /* Content Size */ + attrs->cont_size.height = ((parser->info.seq_ext.vertical_size_extension << 12) + | parser->info.seq_hdr.vertical_size_value); + attrs->cont_size.width = ((parser->info.seq_ext.horizontal_size_extension << 12) + | parser->info.seq_hdr.horizontal_size_value); + + /* Repeat field */ + attrs->mpeg2.repeat_first_field = parser->info.pic_cod_ext.repeat_first_field; + + /* Temporal Reference */ + attrs->mpeg2.temporal_ref = parser->info.pic_hdr.temporal_reference; + + /* Top field first */ + attrs->mpeg2.top_field_first = parser->info.pic_cod_ext.top_field_first; + + /* Progressive frame */ + attrs->mpeg2.progressive_frame = parser->info.pic_cod_ext.progressive_frame; + + /* Picture Structure */ + attrs->mpeg2.picture_struct = parser->info.pic_cod_ext.picture_structure; + + /* Populate the frame type */ + switch (parser->info.pic_hdr.picture_coding_type) + { + case MPEG2_PC_TYPE_I: + attrs->frame_type = VIDDEC_FRAME_TYPE_I; + break; + case MPEG2_PC_TYPE_P: + attrs->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case MPEG2_PC_TYPE_B: + attrs->frame_type = VIDDEC_FRAME_TYPE_B; + break; + default: + attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; + } + + /* Update PanScan data */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_DISP_EXT) + { + unsigned int index = 0; + attrs->mpeg2.number_of_frame_center_offsets = parser->mpeg2_num_pan_scan_offsets; + for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++) + { + attrs->mpeg2.frame_center_offset[index].horz = parser->info.pic_disp_ext.frame_center_horizontal_offset[index]; + attrs->mpeg2.frame_center_offset[index].vert = parser->info.pic_disp_ext.frame_center_vertical_offset[index]; + } + } + + /* Print frame attributes */ + viddec_mpeg2_print_attr(attrs); + + return; +} diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c new file mode 100755 index 0000000..037d6de --- /dev/null +++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c @@ -0,0 +1,1039 @@ +/** + * viddec_mpeg2_metadata.c + * ----------------------- + * This file contains all the routines to parse the information from MPEG2 + * elementary stream and store it in the parser context. Based on the data + * parsed, the state information in the context is updated. + * + * Headers currently parsed from MPEG2 stream include: + * - Sequence Header + * - Sequence Extension + * - Sequence Display Extension + * - GOP Header + * - Picture Header + * - Picture Coding Extension + * - Quantization Matrix Extension + * - Picture Display Extension + * + * The slice data is parsed and appended into workload in viddec_mpeg2_parse.c + */ + +#include "viddec_mpeg2.h" + +/* Default quantization matrix values */ +const uint8_t mpeg2_default_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = { + 8, 16, 19, 22, 26, 27, 29, 34, + 16, 16, 22, 24, 27, 29, 34, 37, + 19, 22, 26, 27, 29, 34, 34, 38, + 22, 22, 26, 27, 29, 34, 37, 40, + 22, 26, 27, 29, 32, 35, 40, 48, + 26, 27, 29, 32, 35, 40, 48, 58, + 26, 27, 29, 34, 38, 46, 56, 69, + 27, 29, 35, 38, 46, 56, 69, 83 +}; +const uint8_t mpeg2_default_non_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = { + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 16 +}; + +/* Matrix for converting scan order */ +const uint8_t mpeg2_classic_scan[MPEG2_QUANT_MAT_SIZE] = { + 0, 1, 8, 16, 9, 2, 3, 10, + 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, + 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, + 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, + 53, 60, 61, 54, 47, 55, 62, 63 +}; +const uint8_t mpeg2_alternate_scan[MPEG2_QUANT_MAT_SIZE] = { + 0, 8, 16, 24, 1, 9, 2, 10, + 17, 25, 32, 40, 48, 56, 57, 49, + 41, 33, 26, 18, 3, 11, 4, 12, + 19, 27, 34, 42, 50, 58, 35, 43, + 51, 59, 20, 28, 5, 13, 6, 14, + 21, 29, 36, 44, 52, 60, 37, 45, + 53, 61, 22, 30, 7, 15, 23, 31, + 38, 46, 54, 62, 39, 47, 55, 63 +}; + +/* Look-up tables for macro block address increment VLC */ +const uint8_t mb_addr_inc_tab1[16] = { + 0, 0, 7, 6, 5, 5, 4, 4, + 3, 3, 3, 3, 2, 2, 2, 2 +}; +const uint8_t mb_addr_inc_tab2[8] = { + 13, 12, 11, 10, 9, 9, 8, 8 +}; +const uint8_t mb_addr_inc_tab3[40] = { + 33, 32, 31, 30, 29, 28, 27, 26, + 25, 24, 23, 22, 21, 21, 20, 20, + 19, 19, 18, 18, 17, 17, 16, 16, + 15, 15, 15, 15, 15, 15, 15, 15, + 14, 14, 14, 14, 14, 14, 14, 14 +}; + +/* viddec_mpeg2_copy_default_matrix() - Copies quantization matrix from src */ +/* to dst */ +static inline void mpeg2_copy_matrix(const uint8_t *src, uint8_t *dst) +{ + register uint32_t index = 0; + for (index=0; index < MPEG2_QUANT_MAT_SIZE; index++) + dst[index] = src[index]; +} + +/* viddec_mpeg2_copy_matrix() - Copies next 64bytes in the stream into given */ +/* matrix */ +static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint32_t alternate_scan) +{ + int32_t ret = 1; + uint32_t index = 0, code = 0; + const uint8_t *zigzag_scan = (const uint8_t *) mpeg2_classic_scan; + + if (alternate_scan) + { + zigzag_scan = (const uint8_t *) mpeg2_alternate_scan; + } + + /* Start extracting matrix co-efficients and copy them in */ + /* inverse zigzag scan order */ + for (index = 0; index < MPEG2_QUANT_MAT_SIZE; index++) + { + ret = viddec_pm_get_bits(parent, &code, MPEG2_BITS_EIGHT); + /* Quantization values cannot be zero. If zero value if found, */ + /* further parsing is stopped and the existing values are used.*/ + if ((ret != 1) || (code == 0)) + { + ret = -1; + break; + } + matrix[zigzag_scan[index]] = (uint8_t)(code & 0xFF); + } + + return ret; +} + +/* viddec_mpeg2_parse_seq_hdr() - Parse sequence header metadata and store */ +/* in parser context */ +void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Horizontal Frame Size */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.horizontal_size_value, 12); + + /* Get Vertical Frame Size */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vertical_size_value, 12); + + /* Get Frame Aspect Ratio */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.aspect_ratio_information, 4); + + /* Get Frame Rate */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.frame_rate_code, 4); + + /* Get Bit Rate */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.bit_rate_value, 18); + + /* Skip Marker bit */ + ret_code |= viddec_pm_skip_bits(parent, 1); + + /* Get VBV Buffer Size Value */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vbv_buffer_size_value, 10); + + /* Get Constrained Parameters Flag */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.constrained_parameters_flag, 1); + + /* Quantization Matrix Support */ + /* Get Intra Quantizer matrix, if available or use default values */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.intra_quantiser_matrix, 0); + mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix); + } + else + { + if (!parser->mpeg2_custom_qmat_parsed) + { + mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.intra_quantiser_matrix); + mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix); + } + } + + /* Get Non-Intra Qualtizer matrix, if available or use default values */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_non_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.non_intra_quantiser_matrix, 0); + mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); + } + else + { + if (!parser->mpeg2_custom_qmat_parsed) + { + mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.non_intra_quantiser_matrix); + mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); + } + } + + /* Error handling */ + /* The return value from get_bits() function is accumulated. If the return value is not 1, */ + /* then there was an error getting the required information from the stream and the status */ + /* is updated for the current workload. */ + if (ret_code == 1) + { + /* This flag indicates a valid sequence header has been parsed and so even if */ + /* a sequence haeder is corrupted in the future, this valid sequence header */ + /* could be reused. */ + parser->mpeg2_valid_seq_hdr_parsed = true; + /* This flag indicates a valid custom quantization matrix has been parsed. */ + /* So, if in the future, there is an error parsing quantization matrix, the */ + /* parser will use the previously parsed custom values. */ + if ((parser->info.qnt_ext.load_intra_quantiser_matrix) + || (parser->info.qnt_ext.load_non_intra_quantiser_matrix)) + { + parser->mpeg2_custom_qmat_parsed = true; + } + MPEG2_DEB("Seqeunce header parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_HDR; + MPEG2_DEB("Sequence header corrupted.\n"); + } + + parser->mpeg2_stream = false; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ; + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ; + parser->mpeg2_stream_level = MPEG2_LEVEL_SEQ; + + return; +} + +/* viddec_mpeg2_parse_gop_hdr() - Parse group of pictures header info and */ +/* store it in parser context */ +void viddec_mpeg2_parse_gop_hdr(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Skip first 25 bits */ + /* Skip time_code */ + ret_code |= viddec_pm_skip_bits(parent, 25); + + /* Get closed gop info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.closed_gop, 1); + + /* Get broken link info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.broken_link, 1); + + if (ret_code == 1) + { + MPEG2_DEB("GOP Header parsed successfully.\n"); + } + else + { + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_GOP_HDR; + MPEG2_DEB("GOP header corrupted.\n"); + } + + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_GOP; + parser->mpeg2_stream_level = MPEG2_LEVEL_GOP; + + return; +} + +/* viddec_mpeg2_parse_pic_hdr() - Parse picture header info and store it in */ +/* parser context */ +void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt) +{ + int32_t ret_code = 0, found_error = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Temporal Reference info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.temporal_reference, 10); + + /* Get Picture Coding type and skip the following byte */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.picture_coding_type, 3); + + /* Error Handling and Concealment */ + /* Picture coding type should be one I, P or B */ + if ((parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) && + (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_P) && + (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_B)) + { + found_error = 1; + } + /* The first frame after a gop header should be a coded I picture as per */ + /* section 6.3.1 in MPEG2 Specification. */ + else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP) + { + if (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) + { + found_error = 1; + } + } + /* The first frame after a sequence header cannot be a coded B picture as per */ + /* section 6.1.1.6 in MPEG2 Specification. */ + else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ) + { + if (parser->info.pic_hdr.picture_coding_type == MPEG2_PC_TYPE_B) + { + found_error = 1; + } + } + + /* If there is an error parsing picture coding type, do error concealment and continue. */ + if ((ret_code != 1) || (found_error)) + { + if (found_error) + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR; + MPEG2_DEB("Picture header corrupted.\n"); + } + + /* Error concealment for picture coding type - Default to I picture. */ + parser->info.pic_hdr.picture_coding_type = MPEG2_PC_TYPE_I; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_COD_TYPE; + MPEG2_DEB("Picture Coding Type corrupted. Concealing to I type.\n"); + } + + /* Skip next 16 bits */ + /* Skip vbv_delay */ + ret_code |= viddec_pm_skip_bits(parent, 16); + + /* If Picture Coding type is either P or B then */ + /* Get forward vector code */ + if ((MPEG2_PC_TYPE_P == parser->info.pic_hdr.picture_coding_type) || + (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type)) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_forward_vect, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.forward_f_code, 3); + } + else + { + parser->info.pic_hdr.full_pel_forward_vect = 0; + parser->info.pic_hdr.forward_f_code = 0; + } + + /* If Picture coding type is B then */ + /* Get backward vector code */ + if (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_backward_vect, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.backward_f_code, 3); + } + else + { + parser->info.pic_hdr.full_pel_backward_vect = 0; + parser->info.pic_hdr.backward_f_code = 0; + } + + if (ret_code == 1) + { + MPEG2_DEB("Picture header parsed successfully.\n") + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR; + MPEG2_DEB("Picture header corrupted.\n"); + } + + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC; + parser->mpeg2_stream_level = MPEG2_LEVEL_PIC; + + return; +} + +/* viddec_mpeg2_parse_ext_seq() - Parse Sequence extension metadata and */ +/* store in parser context */ +void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Profile and Level info */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.profile_and_level_indication, 8); + + /* Get Progressive Sequence Flag */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.progressive_sequence, 1); + + /* Get Chroma Format */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.chroma_format, 2); + + /* Error Concealment */ + /* If there is an error parsing chroma format, do error concealment and continue. */ + if ((ret_code != 1) || (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED)) + { + if (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED) + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT; + MPEG2_DEB("Sequence extension corrupted.\n") + } + + /* Error concealment for chroma format - Default to 4:2:0 */ + parser->info.seq_ext.chroma_format = MPEG2_CF_420; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_CHROMA_FMT; + MPEG2_DEB("Chroma Format corrupted. Concealing to 4:2:0.\n"); + } + + /* Get Content Size Extension Data */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.horizontal_size_extension, 2); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vertical_size_extension, 2); + + /* Get Bit Rate Extension */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.bit_rate_extension, 12); + + /* Skip Marker bit */ + ret_code |= viddec_pm_skip_bits(parent, 1); + + /* Get VBV Buffer Size Extension Data */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vbv_buffer_size_extension, 8); + + /* Skip 1 bit */ + /* Skip low_delay */ + ret_code |= viddec_pm_skip_bits(parent, 1); + + /* Get Frame Rate extension data */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_n, 2); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_d, 5); + + if (ret_code == 1) + { + MPEG2_DEB("Sequence extension header parsed successfully.\n") + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT; + MPEG2_DEB("Sequence extension corrupted.\n") + } + + /* Check if the last parsed start code was that of sequence header. */ + /* If true, seq extension followed seq header => MPEG2 Stream */ + parser->mpeg2_stream = (parser->mpeg2_last_parsed_sc == MPEG2_SC_SEQ_HDR) ? true:false; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_EXT; + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_EXT; + + return; +} + +/* viddec_mpeg2_parse_ext_seq_disp() - Parse Sequence Display extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get video format */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.video_format, 3); + + /* Check if color description info is present */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_description, 1); + + /* If color description is found, get color primaries info */ + /* and transfer characteristics */ + if (parser->info.seq_disp_ext.colour_description) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_primaries, 8); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.transfer_characteristics, 8); + ret_code |= viddec_pm_skip_bits(parent, 8); + } + + /* Get Display Horizontal Size */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_horizontal_size, 14); + ret_code |= viddec_pm_skip_bits(parent, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_vertical_size, 14); + + if (ret_code == 1) + { + MPEG2_DEB("Sequence display extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_DISP_EXT; + MPEG2_DEB("Sequence display extension corrupted.\n") + } + + /* Set flag to indicate Sequence Display Extension is present */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_DISP_EXT; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_DISP_EXT; + + return; +} + +/* viddec_mpeg2_parse_ext_seq_scal() - Parse Sequence Scalable extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_seq_scal(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get video format */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_scal_ext.scalable_mode, 2); + + if (ret_code == 1) + { + MPEG2_DEB("Sequence scalable extension parsed successfully.\n"); + } + + /* Set flag to indicate Sequence Display Extension is present */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_SCAL_EXT; + parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_SCAL_EXT; + + return; +} + +/* viddec_mpeg2_parse_ext_pic() - Parse Picture Coding extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt) +{ + int32_t ret_code = 0, found_error = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get Forward/Backward, Horizontal/Vertical codes */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode00, 4); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode01, 4); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode10, 4); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode11, 4); + + /* Get Intra DC Precision */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_dc_precision, 2); + + /* Get Picture Structure */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.picture_structure, 2); + + /* Error Handling and Concealment */ + /* Picture structure should be frame, top field or bottom field */ + if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_RESERVED) + { + found_error = 1; + } + /* All pictures in progressive sequence should be frame picture */ + else if (parser->info.seq_ext.progressive_sequence) + { + if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME) + { + found_error = 1; + } + } + + /* If there is an error parsing picture structure, do error concealment and continue. */ + if ((ret_code != 1) || (found_error)) + { + if (found_error) + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT; + MPEG2_DEB("Picture coding extension corrupted.\n"); + } + + /* Error concealment for picture structure - Default to frame picture. */ + parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT; + MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n"); + } + + /* Get flags */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.top_field_first, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.frame_pred_frame_dct, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.concealment_motion_vectors, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.q_scale_type, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_vlc_format, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.alternate_scan, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.repeat_first_field, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.chroma_420_type, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.progressive_frame, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.composite_display_flag, 1); + + /* Error concealment for frame picture */ + if ((parser->info.pic_cod_ext.top_field_first) + || (parser->info.pic_cod_ext.frame_pred_frame_dct) + || (parser->info.pic_cod_ext.repeat_first_field) + || (parser->info.pic_cod_ext.progressive_frame)) + { + if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME) + { + parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT; + MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n"); + } + } + + if (ret_code == 1) + { + MPEG2_DEB("Picture coding extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT; + MPEG2_DEB("Picture coding extension corrupted.\n"); + } + + /* Dangling field detection */ + /* If the previous picture is the first field, then the temporal reference number */ + /* should match with the second field. Otherwise, one of the fields in the previous */ + /* picture is missing and dangling field error is marked. The workload containing */ + /* the previous picture is emitted out and current picture data is added to the next */ + /* workload. The mpeg2_use_next_workload variable is used as a flag to direct the */ + /* items into the current/next workload. */ + if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)) + { + if (parser->mpeg2_prev_temp_ref != parser->info.pic_hdr.temporal_reference) + { + /* Mark dangling field info in workload status */ + parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD; + if (parser->mpeg2_prev_picture_structure == MPEG2_PIC_STRUCT_BOTTOM) + { + parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_TOP; + } + else + { + parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_BOTTOM; + } + /* Set flag stating current workload is done */ + parser->mpeg2_pic_metadata_complete = true; + /* Set flag to use the next workload for adding workitems for */ + /* the current frame */ + parser->mpeg2_use_next_workload = true; + /* Toggle first field flag to compensate for missing field */ + parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true; + } + else + { + /* Same field repeated */ + if (parser->mpeg2_prev_picture_structure == parser->info.pic_cod_ext.picture_structure) + { + /* Mark unsupported in workload status */ + parser->mpeg2_wl_status |= MPEG2_WL_REPEAT_FIELD; + } + } + } + + /* Set context variables for interlaced picture handling */ + if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_FRAME) + { + /* Frame picture found. Reset variables used for interlaced fields picture. */ + parser->mpeg2_picture_interlaced = false; + parser->mpeg2_first_field = false; + parser->mpeg2_use_next_workload = false; + } + else + { + /* Interlaced fields picture found. */ + parser->mpeg2_picture_interlaced = true; + parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true; + } + + /* Set flags */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_COD_EXT; + parser->mpeg2_prev_temp_ref = parser->info.pic_hdr.temporal_reference; + parser->mpeg2_prev_picture_structure = parser->info.pic_cod_ext.picture_structure; + if ((!parser->mpeg2_picture_interlaced) + || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) + { + parser->mpeg2_frame_start = true; + } + + return; +} + +/* viddec_mpeg2_parse_ext_pic_disp() - Parse Picture Display extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + uint32_t index = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Determine number of offsets */ + if (parser->info.seq_ext.progressive_sequence) + { + if (parser->info.pic_cod_ext.repeat_first_field) + { + parser->mpeg2_num_pan_scan_offsets = + (parser->info.pic_cod_ext.top_field_first) ? 3 : 2; + } + else /* Not repeat field */ + parser->mpeg2_num_pan_scan_offsets = 1; + } + else /* Not progressive sequence */ + { + /* Check if picture structure is a field */ + if ((parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_TOP) || + (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_BOTTOM)) + { + parser->mpeg2_num_pan_scan_offsets = 1; + } + else + { + parser->mpeg2_num_pan_scan_offsets = + (parser->info.pic_cod_ext.repeat_first_field) ? 3 : 2; + } + } + + /* Get the offsets */ + for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++) + { + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_horizontal_offset[index], 16); + ret_code |= viddec_pm_skip_bits(parent, 1); + ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_vertical_offset[index], 16); + ret_code |= viddec_pm_skip_bits(parent, 1); + } + + if (ret_code == 1) + { + MPEG2_DEB("Picture display extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_DISP_EXT; + MPEG2_DEB("Picture display extension corrupted.\n"); + } + + /* Set flag to indicate picture display extension is found */ + parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_DISP_EXT; + return; +} + +/* viddec_mpeg2_parse_ext_quant() - Parse Quantization Matrix extension */ +/* metadata and store in parser context */ +void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt) +{ + int32_t ret_code = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Quantization Matrix Support */ + /* Get Intra Quantizer matrix, if available or use default values */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, + parser->info.qnt_mat.chroma_intra_quantiser_matrix); + } + + /* Get Non-Intra Qualtizer matrix, if available */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_non_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.non_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, + parser->info.qnt_mat.chroma_non_intra_quantiser_matrix); + } + + /* Get Chroma Intra Quantizer matrix, if available */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.chroma_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + } + + /* Get Chroma Non-Intra Quantizer matrix, if available */ + ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix, 1); + if (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) + { + ret_code |= mpeg2_get_quant_matrix(parent, + parser->info.qnt_mat.chroma_non_intra_quantiser_matrix, + parser->info.pic_cod_ext.alternate_scan); + } + + if (ret_code == 1) + { + MPEG2_DEB("Quantization matrix extension parsed successfully.\n"); + } + else + { + /* Setting status to mark parser error while emitting the current workload. */ + parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_QMAT_EXT; + MPEG2_DEB("Quantization matrix extension corrupted.\n"); + } + + /* Set quantization matrices updated flag */ + if ( (parser->info.qnt_ext.load_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_non_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) || + (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) ) + { + MPEG2_DEB("Custom quantization matrix found.\n"); + } + + return; +} + +/* viddec_mpeg2_parse_ext() - Parse extension metadata and store in parser */ +/* context */ +void viddec_mpeg2_parse_ext(void *parent, void *ctxt) +{ + uint32_t ext_code = 0; + + /* Get extension start code */ + viddec_pm_get_bits(parent, &ext_code, 4); + + /* Switch on extension type */ + switch ( ext_code ) + { + /* Sequence Extension Info */ + case MPEG2_EXT_SEQ: + viddec_mpeg2_parse_ext_seq(parent, ctxt); + break; + + /* Sequence Display Extension info */ + case MPEG2_EXT_SEQ_DISP: + viddec_mpeg2_parse_ext_seq_disp(parent, ctxt); + break; + + case MPEG2_EXT_SEQ_SCAL: + viddec_mpeg2_parse_ext_seq_scal(parent, ctxt); + break; + + /* Picture Coding Extension */ + case MPEG2_EXT_PIC_CODING: + viddec_mpeg2_parse_ext_pic(parent, ctxt); + break; + + /* Picture Display Extension */ + case MPEG2_EXT_PIC_DISP: + viddec_mpeg2_parse_ext_pic_disp(parent, ctxt); + break; + + /* Quantization Extension*/ + case MPEG2_EXT_QUANT_MAT: + viddec_mpeg2_parse_ext_quant(parent, ctxt); + break; + + default: + break; + } /* Switch, on extension type */ + + return; +} + +/* viddec_mpeg2_parse_ext() - Parse user data and append to workload. */ +void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt) +{ + uint32_t user_data = 0; + viddec_workload_item_t wi; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Set the user data level (SEQ/GOP/PIC) in the workitem type. */ + switch (parser->mpeg2_stream_level) + { + case MPEG2_LEVEL_SEQ: + { + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + } + case MPEG2_LEVEL_GOP: + { + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + } + case MPEG2_LEVEL_PIC: + { + wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; + break; + } + default: + { + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; + break; + } + } + + /* Read 1 byte of user data and store it in workitem for the current */ + /* stream level (SEQ/GOP/PIC). Keep adding data payloads till it reaches */ + /* size 11. When it is 11, the maximum user data payload size, append the */ + /* workitem. This loop is repeated till all user data is extracted and */ + /* appended. */ + wi.user_data.size = 0; + memset(&(wi.user_data), 0, sizeof(wi.user_data)); + while (viddec_pm_get_bits(parent, &user_data, MPEG2_BITS_EIGHT) != -1) + { + /* Store the valid byte in data payload */ + wi.user_data.data_payload[wi.user_data.size] = user_data; + wi.user_data.size++; + + /* When size exceeds payload size, append workitem and continue */ + if (wi.user_data.size >= 11) + { + viddec_pm_setup_userdata(&wi); + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + viddec_fw_reset_workload_item(&wi); + wi.user_data.size = 0; + } + } + /* If size is not 0, append remaining user data. */ + if (wi.user_data.size > 0) + { + viddec_pm_setup_userdata(&wi); + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + wi.user_data.size = 0; + } + + MPEG2_DEB("User data @ Level %d found.\n", parser->mpeg2_stream_level); + return; +} + +static inline uint32_t get_mb_addr_increment(uint32_t *data) +{ + if (*data >= 1024) + { + return 1; + } + else if (*data >= 128) + { + *data >>= 6; + return mb_addr_inc_tab1[*data]; + } + else if (*data >= 64) + { + *data >>= 3; + *data -= 8; + return mb_addr_inc_tab2[*data]; + } + else + { + *data -= 24; + return mb_addr_inc_tab3[*data]; + } +} + +static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t *first_mb) +{ + uint32_t mb_row = 0, mb_width = 0, prev_mb_addr = 0; + uint32_t temp = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + *first_mb = 0; + mb_row = ((parser->mpeg2_last_parsed_slice_sc & 0xFF) - 1); + mb_width = parser->info.seq_hdr.horizontal_size_value >> 4; + prev_mb_addr = (mb_row * mb_width) - 1; + + /* Skip slice start code */ + viddec_pm_skip_bits(parent, 32); + + if (parser->info.seq_hdr.vertical_size_value > 2800) + { + /* Get 3 bits of slice_vertical_position_extension */ + viddec_pm_get_bits(parent, &temp, 3); + mb_row += (temp << 7); + } + + /* Skip proprity_breakpoint if sequence scalable extension is present */ + if (parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_SCAL_EXT) + { + /* Skip 7 bits if scalable mode is 00 (Data partition) */ + if (parser->info.seq_scal_ext.scalable_mode == 0) + { + viddec_pm_skip_bits(parent, 7); + } + } + + /* Skip quantizer_scale */ + viddec_pm_skip_bits(parent, 5); + + /* Skip a few bits with slice information */ + temp = 0; + viddec_pm_peek_bits(parent, &temp, 1); + if (temp == 0x1) + { + /* Skip intra_slice_flag(1), intra_slice(1) and reserved_bits(7) */ + viddec_pm_skip_bits(parent, 9); + temp=0; + viddec_pm_peek_bits(parent, &temp, 1); + while (temp == 0x1) + { + /* Skip extra_bit_slice(1) and extra_information_slice(8) */ + viddec_pm_skip_bits(parent, 9); + temp=0; + viddec_pm_peek_bits(parent, &temp, 1); + } + } + + /* Skip extra_bit_slice flag */ + viddec_pm_skip_bits(parent, 1); + + /* Increment prev_mb_addr by 33 for every 11 bits of macroblock_escape string */ + temp=0; + viddec_pm_peek_bits(parent, &temp, 11); + while (temp == 0x8) + { + viddec_pm_skip_bits(parent, 11); + prev_mb_addr += 33; + temp=0; + viddec_pm_peek_bits(parent, &temp, 11); + } + + /* Get the mb_addr_increment and add it to prev_mb_addr to get the current mb number. */ + *first_mb = prev_mb_addr + get_mb_addr_increment(&temp); + MPEG2_DEB("First MB number in slice is 0x%08X.\n", *first_mb); + + return; +} + +/* Parse slice data to get the number of macroblocks in the current slice and then */ +/* append as pixel data. */ +void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt) +{ + uint32_t bit_off=0, start_byte=0, first_mb = 0; + uint8_t is_emul=0; + viddec_workload_item_t wi; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get current byte position */ + viddec_pm_get_au_pos(parent, &bit_off, &start_byte, &is_emul); + + /* Populate wi type */ + viddec_mpeg2_get_first_mb_number(parent, ctxt, &first_mb); + wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES; + wi.es.es_flags = (first_mb << 16); + + /* Append data from given byte position as pixel data */ + viddec_pm_append_misc_tags(parent, start_byte, (unsigned int) -1, &wi, !parser->mpeg2_use_next_workload); + return; +} diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c new file mode 100755 index 0000000..83d5340 --- /dev/null +++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c @@ -0,0 +1,380 @@ +/** + * viddec_mpeg2_parse.c + * -------------------- + * This file acts as the main interface between the parser manager and MPEG2 + * parser. All the operations done by the MPEG2 parser are defined here and + * functions pointers for each operation is returned to the parser manager. + */ + +#include "viddec_mpeg2.h" + +/* viddec_mpeg2_parser_init() - Initializes parser context. */ +static void viddec_mpeg2_parser_init +( + void *ctxt, + uint32_t *persist_mem, + uint32_t preserve +) +{ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Avoid compiler warning */ + persist_mem = persist_mem; + + /* Initialize state variables */ + parser->mpeg2_pic_metadata_complete = false; + parser->mpeg2_picture_interlaced = false; + parser->mpeg2_first_field = false; + parser->mpeg2_frame_start = false; + parser->mpeg2_ref_table_updated = false; + parser->mpeg2_use_next_workload = false; + parser->mpeg2_first_slice_flag = false; + parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; + parser->mpeg2_last_parsed_sc = MPEG2_SC_ALL; + parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX; + parser->mpeg2_wl_status = MPEG2_WL_EMPTY; + parser->mpeg2_prev_picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->mpeg2_prev_temp_ref = 0; + parser->mpeg2_num_pan_scan_offsets = 0; + + if (preserve) + { + /* Init all picture level header info */ + memset(&parser->info.pic_hdr, 0, sizeof(struct mpeg2_picture_hdr_info)); + memset(&parser->info.pic_cod_ext, 0, sizeof(struct mpeg2_picture_coding_ext_info)); + memset(&parser->info.pic_disp_ext, 0, sizeof(struct mpeg2_picture_disp_ext_info)); + } + else + { + /* Init all header info */ + memset(&parser->info, 0, sizeof(struct mpeg2_info)); + + parser->mpeg2_stream = false; + parser->mpeg2_custom_qmat_parsed = false; + parser->mpeg2_valid_seq_hdr_parsed = false; + parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; + } + + MPEG2_DEB("MPEG2 Parser: Context Initialized.\n"); + + return; +} + +/* viddec_mpeg2_get_context_size() - Returns the memory size required by the */ +/* MPEG2 parser. */ +static void viddec_mpeg2_get_context_size +( + viddec_parser_memory_sizes_t *size +) +{ + /* Should return size of my structure */ + size->context_size = sizeof(struct viddec_mpeg2_parser); + size->persist_size = 0; +} + +/* viddec_mpeg2_get_error_code() - Returns the error code for the current */ +/* workload. */ +static void viddec_mpeg2_get_error_code +( + struct viddec_mpeg2_parser *parser, + viddec_workload_t *wl, + uint32_t *error_code +) +{ + *error_code = 0; + + /* Dangling field error */ + if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD; + if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD_TOP) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_TOPFIELD; + } + else + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD; + } + } + + /* Repeated same field */ + if (parser->mpeg2_wl_status & MPEG2_WL_REPEAT_FIELD) + { + *error_code |= (VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); + } + + /* If workload is not complete, set non-decodeable flag */ + if (!(parser->mpeg2_wl_status & MPEG2_WL_COMPLETE)) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + } + + /* If reference info is not updated, set missing reference flag */ + if (!(parser->mpeg2_wl_status & MPEG2_WL_REF_INFO)) + { + *error_code |= VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE; + } + + /* Missing DMEM data flag and irrecoverable flag is set */ + if (!(parser->mpeg2_wl_status & MPEG2_WL_DMEM_DATA)) + { + *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ; + } + + /* Missing sequence header and irrecoverable flag is set */ + if ((!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ)) + && (!parser->mpeg2_valid_seq_hdr_parsed)) + { + *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ; + } + + /* Unsupported features found in stream */ + if (parser->mpeg2_wl_status & MPEG2_WL_UNSUPPORTED) + { + *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED + | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ; + } + + /* If frame type is unknown, default to I frame. */ + if ((wl->attrs.frame_type != VIDDEC_FRAME_TYPE_I) + && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_P) + && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_B)) + { + wl->attrs.frame_type = VIDDEC_FRAME_TYPE_I; + } + + /* If there is a mismatch between the frame type and reference information */ + /* then mark the workload as not decodable */ + if (wl->attrs.frame_type == VIDDEC_FRAME_TYPE_B) + { + if (wl->is_reference_frame != 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + } + else + { + if (wl->is_reference_frame == 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + } + + /* For non-decodable frames, do not set reference info so that the workload */ + /* manager does not increment ref count. */ + if (*error_code & VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE) + { + wl->is_reference_frame = 0; + } + + /* Corrupted header notification */ + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_HDR) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_DISP_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_GOP_HDR) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_HDR) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_COD_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_DISP_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT; + if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_QMAT_EXT) + *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT; + + MPEG2_DEB("Workload error code: 0x%8X.\n", *error_code); + return; +} + +/* viddec_mpeg2_is_start_frame() - Returns if the current chunk of parsed */ +/* data has start of a frame. */ +static uint32_t viddec_mpeg2_is_start_frame +( + void *ctxt +) +{ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + return (parser->mpeg2_frame_start); +} + +/* viddec_mpeg2_is_workload_done() - Returns current frame parsing status */ +/* to the parser manager. */ +static uint32_t viddec_mpeg2_is_workload_done +( + void *parent, + void *ctxt, + unsigned int next_sc, + uint32_t *codec_specific_errors +) +{ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + viddec_workload_t *wl = viddec_pm_get_header(parent); + uint32_t ret = VIDDEC_PARSE_SUCESS; + uint32_t frame_boundary = 0; + uint8_t force_frame_complete = 0; + parent = parent; + + /* Detect Frame Boundary */ + frame_boundary = ((MPEG2_SC_PICTURE == next_sc) || (MPEG2_SC_SEQ_HDR == next_sc) || (MPEG2_SC_GROUP == next_sc)); + if (frame_boundary) + { + parser->mpeg2_first_slice_flag = false; + } + + force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc)); + + if (force_frame_complete || (frame_boundary && (parser->mpeg2_pic_metadata_complete))) + { + if (!force_frame_complete) + { + parser->mpeg2_wl_status |= MPEG2_WL_COMPLETE; + parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX; + parser->mpeg2_pic_metadata_complete = false; + parser->mpeg2_first_slice_flag = false; + } + + viddec_mpeg2_get_error_code(parser, wl, codec_specific_errors); + parser->mpeg2_wl_status = MPEG2_WL_EMPTY; + parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; + /* Reset mpeg2_use_next_workload flag if it is set */ + if (parser->mpeg2_use_next_workload) + { + viddec_pm_set_late_frame_detect(parent); + parser->mpeg2_use_next_workload = false; + } + ret = VIDDEC_PARSE_FRMDONE; + } + return ret; +} + +/* viddec_mpeg2_parse() - Parse metadata info from the buffer for the prev */ +/* start code found. */ +static mpeg2_status viddec_mpeg2_parse +( + void *parent, + void *ctxt +) +{ + uint32_t current_sc = 0, sc_bits = MPEG2_SC_AND_PREFIX_SIZE; + int32_t ret = MPEG2_SUCCESS; + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Reset frame start flag. For Mpeg1 we want to set frame start after + we parsed pich header, since there is no extension*/ + parser->mpeg2_frame_start = (!parser->mpeg2_stream) && (parser->mpeg2_last_parsed_sc == MPEG2_SC_PICTURE); + + /* Peak current start code - First 32 bits of the stream */ + ret = viddec_pm_peek_bits(parent, ¤t_sc, sc_bits); + if (ret == -1) + { + MPEG2_DEB("Unable to get start code.\n"); + return MPEG2_PARSE_ERROR; + } + current_sc &= MPEG2_BIT_MASK_8; + MPEG2_DEB("Start Code found = 0x%.8X\n", current_sc); + + /* Get rid of the start code prefix for all start codes except slice */ + /* start codes. */ + if ((current_sc < MPEG2_SC_SLICE_MIN) || (current_sc > MPEG2_SC_SLICE_MAX)) + { + viddec_pm_skip_bits(parent, sc_bits); + } + + /* Parse Metadata based on the start code found */ + switch ( current_sc ) + { + /* Sequence Start Code */ + case MPEG2_SC_SEQ_HDR: + { + parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE; + viddec_mpeg2_parse_seq_hdr(parent, ctxt); + } + break; + + /* Picture Start Code */ + case MPEG2_SC_PICTURE: + { + viddec_mpeg2_parse_pic_hdr(parent, ctxt); + } + break; + + /* Extension Code */ + case MPEG2_SC_EXT: + { + viddec_mpeg2_parse_ext(parent, ctxt); + } + break; + + /* Group of Pictures Header */ + case MPEG2_SC_GROUP: + { + viddec_mpeg2_parse_gop_hdr(parent, ctxt); + } + break; + + /* Unused Start Code */ + case MPEG2_SC_SEQ_END: + case MPEG2_SC_SEQ_ERR: + break; + + /* User Data */ + case MPEG2_SC_USER_DATA: + { + viddec_mpeg2_parse_and_append_user_data(parent, ctxt); + } + break; + + default: + { + /* Slice Data - Append slice data to the workload */ + if ((current_sc >= MPEG2_SC_SLICE_MIN) && + (current_sc <= MPEG2_SC_SLICE_MAX)) + { + if (!parser->mpeg2_first_slice_flag) + { + /* At this point, all the metadata required by the MPEG2 */ + /* hardware for decoding is extracted and stored. So the */ + /* metadata can be packed into workitems and emitted out.*/ + viddec_mpeg2_emit_workload(parent, ctxt); + + /* If the current picture is progressive or it is the */ + /* second field of interlaced field picture then, set */ + /* the workload done flag. */ + if ((!parser->mpeg2_picture_interlaced) + || ((parser->mpeg2_picture_interlaced) && (!parser->mpeg2_first_field))) + { + parser->mpeg2_pic_metadata_complete = true; + } + else if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)) + { + parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE; + } + + parser->mpeg2_first_slice_flag = true; + } + parser->mpeg2_last_parsed_slice_sc = current_sc; + viddec_mpeg2_parse_and_append_slice_data(parent, ctxt); + parser->mpeg2_wl_status |= MPEG2_WL_PARTIAL_SLICE; + } + } + } /* Switch */ + + /* Save last parsed start code */ + parser->mpeg2_last_parsed_sc = current_sc; + return ret; +} + +/* viddec_mpeg2_get_ops() - Register parser ops with the parser manager. */ +void viddec_mpeg2_get_ops +( + viddec_parser_ops_t *ops +) +{ + ops->init = viddec_mpeg2_parser_init; + ops->parse_syntax = viddec_mpeg2_parse; + ops->get_cxt_size = viddec_mpeg2_get_context_size; + ops->is_wkld_done = viddec_mpeg2_is_workload_done; + ops->is_frame_start = viddec_mpeg2_is_start_frame; + return; +} + diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c new file mode 100755 index 0000000..42a42a9 --- /dev/null +++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c @@ -0,0 +1,461 @@ +/** + * viddec_mpeg2_workload.c + * ----------------------- + * This file packs the data parsed and stored in the context into workload and + * emits it out. The current list of workitems emitter into the workload + * include: + * + * - DMEM - Register Data + * - Past and Future picture references + * - Quantization matrix data + * + * Slice data gets appended into the workload in viddec_mpeg2_parse.c + * + * Also, the frame attributes are updated in the workload. + */ + +#include "viddec_mpeg2.h" +#include "viddec_fw_item_types.h" + +void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t next_wl) +{ + if (next_wl) + { + viddec_pm_append_workitem_next(parent, wi); + } + else + { + viddec_pm_append_workitem(parent, wi); + } + return; +} + +viddec_workload_t* viddec_mpeg2_get_header(void *parent, uint8_t next_wl) +{ + viddec_workload_t *ret; + if (next_wl) + { + ret = viddec_pm_get_next_header(parent); + } + else + { + ret = viddec_pm_get_header(parent); + } + return ret; +} + +/* viddec_mpeg2_set_seq_ext_defaults() - Sets non-zero default values for */ +/* sequence extension items in case sequence extension is not present. */ +static void viddec_mpeg2_set_seq_ext_defaults(struct viddec_mpeg2_parser *parser) +{ + parser->info.seq_ext.progressive_sequence = true; + parser->info.seq_ext.chroma_format = MPEG2_CF_420; +} + +/* viddec_mpeg2_set_pic_cod_ext_defaults() - Sets non-zero default values for*/ +/* picture coding extension items in case picture coding extension is not */ +/* present. */ +static void viddec_mpeg2_set_pic_cod_ext_defaults(struct viddec_mpeg2_parser *parser) +{ + parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME; + parser->info.pic_cod_ext.frame_pred_frame_dct = true; + parser->info.pic_cod_ext.progressive_frame = true; +} + +/* viddec_mpeg2_pack_qmat() - Packs the 256 byte quantization matrix data */ +/* 64 32-bit values. */ +#ifdef MFDBIGENDIAN +static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser) +{ + /* Quantization Matrix Support */ + /* Populate Quantization Matrices */ + uint32_t index = 0; + uint32_t *qmat_packed, *qmat_unpacked; + + /* When transferring the quantization matrix data from the parser */ + /* context into workload items, we are packing four 8 bit */ + /* quantization values into one DWORD (32 bits). To do this, the */ + /* array of values of type uint8_t, is typecast as uint32 * and */ + /* read. */ + qmat_packed = (uint32_t *) parser->wi.qmat; + qmat_unpacked = (uint32_t *) &parser->info.qnt_mat; + + for (index=0; indexwi.qmat; + qmat_unpacked = (uint8_t *) &parser->info.qnt_mat; + + for (index=0; indexwi.csi1 = 0x0; + parser->wi.csi2 = 0x0; + parser->wi.cpi1 = 0x0; + parser->wi.cpce1 = 0x0; + + /* Set defaults for missing fields */ + if (!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_EXT)) + { + viddec_mpeg2_set_seq_ext_defaults(parser); + } + if (!(parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_COD_EXT)) + { + viddec_mpeg2_set_pic_cod_ext_defaults(parser); + } + + /* Populate Core Sequence Info 1 */ + parser->wi.csi1 |= (parser->mpeg2_stream) << 1; + parser->wi.csi1 |= (parser->info.seq_hdr.constrained_parameters_flag) << 2; + parser->wi.csi1 |= (parser->info.seq_ext.progressive_sequence) << 3; + parser->wi.csi1 |= (parser->info.seq_ext.chroma_format) << 16; + parser->wi.csi1 |= (parser->info.qnt_ext.load_intra_quantiser_matrix) << 19; + parser->wi.csi1 |= (parser->info.qnt_ext.load_non_intra_quantiser_matrix) << 20; + parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) << 21; + parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) << 22; + MPEG2_DEB("Core Sequence Info 1: 0x%.8X\n", parser->wi.csi1); + + /* Populate Core Sequence Info 2 */ + parser->wi.csi2 |= (parser->info.seq_hdr.horizontal_size_value & MPEG2_BIT_MASK_11); + parser->wi.csi2 |= (parser->info.seq_hdr.vertical_size_value & MPEG2_BIT_MASK_11) << 14; + MPEG2_DEB("Core Sequence Info 2: 0x%.8X\n", parser->wi.csi2); + + /* Populate Core Picture Info */ + parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_forward_vect); + parser->wi.cpi1 |= (parser->info.pic_hdr.forward_f_code) << 1; + parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_backward_vect) << 4; + parser->wi.cpi1 |= (parser->info.pic_hdr.backward_f_code) << 5; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode00) << 8; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode01) << 12; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode10) << 16; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode11) << 20; + parser->wi.cpi1 |= (parser->info.pic_cod_ext.intra_dc_precision) << 24; + parser->wi.cpi1 |= (parser->info.pic_hdr.picture_coding_type-1) << 26; + MPEG2_DEB("Core Picture Info 1: 0x%.8X\n", parser->wi.cpi1); + + /* Populate Core Picture Extension Info */ + parser->wi.cpce1 |= (parser->info.pic_cod_ext.composite_display_flag); + parser->wi.cpce1 |= (parser->info.pic_cod_ext.progressive_frame) << 1; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.chroma_420_type) << 2; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.repeat_first_field) << 3; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.alternate_scan) << 4; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.intra_vlc_format) << 5; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.q_scale_type) << 6; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.concealment_motion_vectors) << 7; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.frame_pred_frame_dct) << 8; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.top_field_first) << 9; + parser->wi.cpce1 |= (parser->info.pic_cod_ext.picture_structure) << 10; + MPEG2_DEB("Core Picture Ext Info 1: 0x%.8X\n", parser->wi.cpce1); + + return; +} + +/* mpeg2_emit_display_frame() - Sends the frame id as a workload item. */ +static inline void mpeg2_emit_frameid(void *parent, int32_t wl_type, uint8_t flag) +{ + viddec_workload_item_t wi; + wi.vwi_type = wl_type; + + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_mpeg2_append_workitem( parent, &wi, flag ); +} + +/* mpeg2_send_ref_reorder() - Reorders reference frames */ +static inline void mpeg2_send_ref_reorder(void *parent, uint8_t flag) +{ + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + /* Reorder index 1 to index 0 only */ + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; + viddec_mpeg2_append_workitem( parent, &wi, flag ); +} + +/* viddec_mpeg2_manage_ref() - Manages frame references by inserting the */ +/* past and future references (if any) for every frame inserted in the */ +/* workload. */ +static void viddec_mpeg2_manage_ref(void *parent, void *ctxt) +{ + int32_t frame_id = 1; + int32_t frame_type; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload ); + wl->is_reference_frame = 0; + + /* Identify the frame type (I, P or B) */ + frame_type = parser->info.pic_hdr.picture_coding_type; + + /* Send reference frame information based on whether the picture is a */ + /* frame picture or field picture. */ + if ((!parser->mpeg2_picture_interlaced) + || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))) + { + /* Check if we need to reorder frame references/send frame for display */ + /* in case of I or P type */ + if (frame_type != MPEG2_PC_TYPE_B) + { + /* Checking reorder */ + if (parser->mpeg2_ref_table_updated) + { + mpeg2_send_ref_reorder(parent, parser->mpeg2_use_next_workload); + } + } + + /* Send reference frame workitems */ + switch (frame_type) + { + case MPEG2_PC_TYPE_I: + { + break; + } + case MPEG2_PC_TYPE_P: + { + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); + break; + } + case MPEG2_PC_TYPE_B: + { + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload); + mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, parser->mpeg2_use_next_workload); + } + } + + /* Set reference information updated flag */ + if (!parser->mpeg2_picture_interlaced) + { + parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO; + } + } + else + { + /* Set reference information updated flag for second fiel */ + parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO; + } + + /* Set the reference frame flags for I and P types */ + if (frame_type != MPEG2_PC_TYPE_B) + { + wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK); + parser->mpeg2_ref_table_updated = true; + } + + return; +} + +/* viddec_mpeg2_check_unsupported() - Check for unsupported feature in the stream */ +static void viddec_mpeg2_check_unsupported(void *parent, void *ctxt) +{ + unsigned int unsupported_feature_found = 0; + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Get workload */ + viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload ); + + /* Get attributes in workload */ + viddec_frame_attributes_t *attrs = &wl->attrs; + + /* Check for unsupported content size */ + unsupported_feature_found |= (attrs->cont_size.height > MPEG2_MAX_CONTENT_HEIGHT); + unsupported_feature_found |= (attrs->cont_size.width > MPEG2_MAX_CONTENT_WIDTH); + + /* Update parser status, if found */ + if (unsupported_feature_found) + { + parser->mpeg2_wl_status |= MPEG2_WL_UNSUPPORTED; + } + + return; +} + +/* viddec_mpeg2_append_metadata() - Appends meta data from the stream. */ +void viddec_mpeg2_append_metadata(void *parent, void *ctxt) +{ + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + viddec_workload_item_t wi; + + /* Append sequence info, if found with current frame */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; + + viddec_fw_mp2_sh_set_horizontal_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.horizontal_size_value); + viddec_fw_mp2_sh_set_vertical_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vertical_size_value); + viddec_fw_mp2_sh_set_aspect_ratio_information ( &(wi.mp2_sh) , parser->info.seq_hdr.aspect_ratio_information); + viddec_fw_mp2_sh_set_frame_rate_code ( &(wi.mp2_sh) , parser->info.seq_hdr.frame_rate_code); + viddec_fw_mp2_sh_set_bit_rate_value ( &(wi.mp2_sh) , parser->info.seq_hdr.bit_rate_value); + viddec_fw_mp2_sh_set_vbv_buffer_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vbv_buffer_size_value); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + /* Append sequence extension info, if found with current frame */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_EXT) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_MPEG2_SEQ_EXT; + + viddec_fw_mp2_se_set_profile_and_level_indication( &(wi.mp2_se) , parser->info.seq_ext.profile_and_level_indication); + viddec_fw_mp2_se_set_progressive_sequence ( &(wi.mp2_se) , parser->info.seq_ext.progressive_sequence); + viddec_fw_mp2_se_set_chroma_format ( &(wi.mp2_se) , parser->info.seq_ext.chroma_format); + viddec_fw_mp2_se_set_horizontal_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.horizontal_size_extension); + viddec_fw_mp2_se_set_vertical_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vertical_size_extension); + viddec_fw_mp2_se_set_bit_rate_extension ( &(wi.mp2_se) , parser->info.seq_ext.bit_rate_extension); + viddec_fw_mp2_se_set_vbv_buffer_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vbv_buffer_size_extension); + viddec_fw_mp2_se_set_frame_rate_extension_n ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_n); + viddec_fw_mp2_se_set_frame_rate_extension_d ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_d); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + /* Append Display info, if present */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_DISP_EXT) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; + + viddec_fw_mp2_sde_set_video_format ( &(wi.mp2_sde) , parser->info.seq_disp_ext.video_format); + viddec_fw_mp2_sde_set_color_description ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_description); + viddec_fw_mp2_sde_set_color_primaries ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_primaries); + viddec_fw_mp2_sde_set_transfer_characteristics( &(wi.mp2_sde) , parser->info.seq_disp_ext.transfer_characteristics); + viddec_fw_mp2_sde_set_display_horizontal_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_horizontal_size); + viddec_fw_mp2_sde_set_display_vertical_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_vertical_size); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + /* Append GOP info, if present */ + if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO; + + viddec_fw_mp2_gop_set_closed_gop ( &(wi.mp2_gop) , parser->info.gop_hdr.closed_gop); + viddec_fw_mp2_gop_set_broken_link( &(wi.mp2_gop) , parser->info.gop_hdr.broken_link); + + viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload); + } + + return; +} + +/* viddec_mpeg2_append_workitems() - Appends decoder specific workitems */ +/* to the workload starting at the address and length specified. */ +static void viddec_mpeg2_append_workitems +( + void *parent, + uint32_t* address, + int workitem_type, + int num_items, + uint8_t flag +) +{ + int32_t index=0; + const uint32_t* initial_address = address; + viddec_workload_item_t wi; + + for (index=0; index < num_items; index++) + { + wi.vwi_type = workitem_type; + wi.data.data_offset = (char *) address - (const char *) initial_address; + wi.data.data_payload[0] = address[0]; + wi.data.data_payload[1] = address[1]; + address += 2; + + viddec_mpeg2_append_workitem(parent, &wi, flag); + } + + return; +} + +/* viddec_mpeg2_emit_workload() - Emits MPEG2 parser generated work load */ +/* items. */ +/* Items include: MPEG2 DMEM Data, Quantization Matrices. */ +/* Pixel ES data sent separately whenever parser sees slice data */ +void viddec_mpeg2_emit_workload(void *parent, void *ctxt) +{ + MPEG2_DEB("Emitting workloads.\n"); + + /* Get MPEG2 Parser context */ + struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt; + + /* Append meta data workitems */ + viddec_mpeg2_append_metadata(parent, ctxt); + + /* Transfer metadata into attributes */ + viddec_mpeg2_translate_attr(parent, ctxt); + + /* Check for unsupported features in the stream and update parser status */ + viddec_mpeg2_check_unsupported(parent, ctxt); + + /* Transfer all stored metadata into MPEG2 Hardware Info */ + viddec_mpeg2_trans_metadata_workitems(parser); + + /* Send MPEG2 DMEM workitems */ + viddec_mpeg2_append_workitems(parent, + (uint32_t *) &parser->wi, + VIDDEC_WORKLOAD_MPEG2_DMEM, + MPEG2_NUM_DMEM_WL_ITEMS, + parser->mpeg2_use_next_workload); + parser->mpeg2_wl_status |= MPEG2_WL_DMEM_DATA; + MPEG2_DEB("Adding %d items as DMEM Data.\n", MPEG2_NUM_DMEM_WL_ITEMS); + + /* Send MPEG2 Quantization Matrix workitems, if updated */ + viddec_mpeg2_pack_qmat(parser); + viddec_mpeg2_append_workitems(parent, + (uint32_t *) parser->wi.qmat, + VIDDEC_WORKLOAD_MPEG2_QMAT, + MPEG2_NUM_QMAT_WL_ITEMS, + parser->mpeg2_use_next_workload); + MPEG2_DEB("Adding %d items as QMAT Data.\n", MPEG2_NUM_QMAT_WL_ITEMS); + + /* Manage reference frames */ + viddec_mpeg2_manage_ref(parent, ctxt); + + return; +} + diff --git a/mixvbp/vbp_plugin/mp4/Android.mk b/mixvbp/vbp_plugin/mp4/Android.mk new file mode 100755 index 0000000..da9ed15 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/Android.mk @@ -0,0 +1,28 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + viddec_mp4_visualobject.c \ + viddec_mp4_decodevideoobjectplane.c \ + viddec_mp4_parse.c \ + viddec_fw_mp4_workload.c \ + viddec_mp4_videoobjectplane.c \ + viddec_parse_sc_mp4.c \ + viddec_mp4_shortheader.c \ + viddec_mp4_videoobjectlayer.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES := \ + $(MIXVBP_DIR)/include \ + $(LOCAL_PATH)/include \ + $(MIXVBP_DIR)/vbp_manager/include + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_mpeg4 + +LOCAL_SHARED_LIBRARIES := \ + libmixvbp + +include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h b/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h new file mode 100755 index 0000000..bb772d4 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h @@ -0,0 +1,231 @@ +#ifndef VIDDEC_FW_MP4_H +#define VIDDEC_FW_MP4_H + +#include "viddec_fw_workload.h" + +enum viddec_fw_mp4_ref_frame_id +{ + VIDDEC_MP4_FRAME_CURRENT = 0, + VIDDEC_MP4_FRAME_PAST = 1, + VIDDEC_MP4_FRAME_FUTURE = 2, + VIDDEC_MP4_FRAME_MAX = 3, +}; + +enum mp4_workload_item_type +{ + VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + VIDDEC_WORKLOAD_MP4_FUTURE_FRAME, + VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_MP4_VOP_INFO, + VIDDEC_WORKLOAD_MP4_BVOP_INFO, + VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, + VIDDEC_WORKLOAD_MP4_IQUANT, + VIDDEC_WORKLOAD_MP4_NIQUANT, + VIDDEC_WORKLOAD_MP4_SVH, +}; + +enum viddec_fw_mp4_vop_coding_type_t +{ + VIDDEC_MP4_VOP_TYPE_I = 0, + VIDDEC_MP4_VOP_TYPE_P, + VIDDEC_MP4_VOP_TYPE_B, + VIDDEC_MP4_VOP_TYPE_S +}; + +// This structure contains the information extracted from the Video Object Layer. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOL_INFO, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Flags extracted from the Video Object Layer + // 0:0 - short_video_header + // 1:2 - vol_shape + // 3:3 - interlaced + // 4:4 - obmc_disable + // 5:5 - quarter_sample + // 6:6 - resync_marker_disable + // 7:7 - data_partitioned + // 8:8 - reversible_vlc +#define viddec_fw_mp4_get_reversible_vlc(x) viddec_fw_bitfields_extract((x)->vol_flags, 8, 0x1) +#define viddec_fw_mp4_set_reversible_vlc(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 8, 0x1) +#define viddec_fw_mp4_get_data_partitioned(x) viddec_fw_bitfields_extract((x)->vol_flags, 7, 0x1) +#define viddec_fw_mp4_set_data_partitioned(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 7, 0x1) +#define viddec_fw_mp4_get_resync_marker_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 6, 0x1) +#define viddec_fw_mp4_set_resync_marker_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 6, 0x1) +#define viddec_fw_mp4_get_quarter_sample(x) viddec_fw_bitfields_extract((x)->vol_flags, 5, 0x1) +#define viddec_fw_mp4_set_quarter_sample(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 5, 0x1) +#define viddec_fw_mp4_get_obmc_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 4, 0x1) +#define viddec_fw_mp4_set_obmc_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 4, 0x1) +#define viddec_fw_mp4_get_interlaced(x) viddec_fw_bitfields_extract((x)->vol_flags, 3, 0x1) +#define viddec_fw_mp4_set_interlaced(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 3, 0x1) +#define viddec_fw_mp4_get_vol_shape(x) viddec_fw_bitfields_extract((x)->vol_flags, 1, 0x3) +#define viddec_fw_mp4_set_vol_shape(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 1, 0x3) +#define viddec_fw_mp4_get_short_video_header_flag(x) viddec_fw_bitfields_extract((x)->vol_flags, 0, 0x1) +#define viddec_fw_mp4_set_short_video_header_flag(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 0, 0x1) + unsigned int vol_flags; + + // Size extracted from the Video Object Layer + // 0:12 - width + // 13:25 - height + // MFD_MPG4VD_MB_PER_ROW can be calculated as (width+15) >> 4 + // MFD_MPG4VD_MB_ROWS can be calculated as (height+15) >> 4 +#define viddec_fw_mp4_get_vol_width(x) viddec_fw_bitfields_extract((x)->vol_size, 13, 0x1FFF) +#define viddec_fw_mp4_set_vol_width(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 13, 0x1FFF) +#define viddec_fw_mp4_get_vol_height(x) viddec_fw_bitfields_extract((x)->vol_size, 0, 0x1FFF) +#define viddec_fw_mp4_set_vol_height(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 0, 0x1FFF) + unsigned int vol_size; + + // Sprite, time increments and quantization details from the Video Object Layer + // 0:15 - vop_time_increment_resolution + // 16:17 - sprite_enable + // 18:23 - sprite_warping_points + // 24:25 - sprite_warping_accuracy + // 26:29 - quant_precision + // 30:30 - quant_type +#define viddec_fw_mp4_get_quant_type(x) viddec_fw_bitfields_extract((x)->vol_item, 30, 0x1) +#define viddec_fw_mp4_set_quant_type(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 30, 0x1) +#define viddec_fw_mp4_get_quant_precision(x) viddec_fw_bitfields_extract((x)->vol_item, 26, 0xF) +#define viddec_fw_mp4_set_quant_precision(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 26, 0xF) +#define viddec_fw_mp4_get_sprite_warping_accuracy(x) viddec_fw_bitfields_extract((x)->vol_item, 24, 0x3) +#define viddec_fw_mp4_set_sprite_warping_accuracy(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 24, 0x3) +#define viddec_fw_mp4_get_sprite_warping_points(x) viddec_fw_bitfields_extract((x)->vol_item, 18, 0x3F) +#define viddec_fw_mp4_set_sprite_warping_points(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 18, 0x3F) +#define viddec_fw_mp4_get_sprite_enable(x) viddec_fw_bitfields_extract((x)->vol_item, 16, 0x3) +#define viddec_fw_mp4_set_sprite_enable(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 16, 0x3) +#define viddec_fw_mp4_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_item, 0, 0xFFFF) +#define viddec_fw_mp4_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 0, 0xFFFF) + unsigned int vol_item; + +} viddec_fw_mp4_vol_info_t; + +// This structure contains the information extracted from the Video Object Layer. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOP_INFO, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Frame Info - to populate register MFD_MPG4VD_BSP_FRAME_INFO + // 0:4 - current_frame_id + // 5:5 - current_field_frame + // 6:10 - future_frame_id + // 11:11 - future_field_frame + // 12:16 - past_frame_id + // 17:17 - past_field_frame +#define viddec_fw_mp4_get_past_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 17, 0x1) +#define viddec_fw_mp4_set_past_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 17, 0x1) +#define viddec_fw_mp4_get_past_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 12, 0x1F) +#define viddec_fw_mp4_set_past_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 12, 0x1F) +#define viddec_fw_mp4_get_future_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 11, 0x1) +#define viddec_fw_mp4_set_future_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 11, 0x1) +#define viddec_fw_mp4_get_future_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 6, 0x1F) +#define viddec_fw_mp4_set_future_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 6, 0x1F) +#define viddec_fw_mp4_get_current_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 5, 0x1) +#define viddec_fw_mp4_set_current_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 5, 0x1) +#define viddec_fw_mp4_get_current_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 0, 0x1F) +#define viddec_fw_mp4_set_current_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 0, 0x1F) + unsigned int frame_info; + + // Video Object Plane Info + // 0:1 - vop_coding_type + // 2:2 - vop_rounding_type + // 3:5 - intra_dc_vlc_thr + // 6:6 - top_field_first + // 7:7 - alternate_vertical_scan_flag + // 8:16 - vop_quant + // 17:19 - vop_fcode_forward + // 20:22 - vop_fcode_backward + // 23:31 - quant_scale +#define viddec_fw_mp4_get_vop_quant_scale(x) viddec_fw_bitfields_extract((x)->vop_data, 23, 0x1FF) +#define viddec_fw_mp4_set_vop_quant_scale(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 23, 0x1FF) +#define viddec_fw_mp4_get_vop_fcode_backward(x) viddec_fw_bitfields_extract((x)->vop_data, 20, 0x7) +#define viddec_fw_mp4_set_vop_fcode_backward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 20, 0x7) +#define viddec_fw_mp4_get_vop_fcode_forward(x) viddec_fw_bitfields_extract((x)->vop_data, 17, 0x7) +#define viddec_fw_mp4_set_vop_fcode_forward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 17, 0x7) +#define viddec_fw_mp4_get_vop_quant(x) viddec_fw_bitfields_extract((x)->vop_data, 8, 0x1FF) +#define viddec_fw_mp4_set_vop_quant(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 8, 0x1FF) +#define viddec_fw_mp4_get_alternate_vertical_scan_flag(x) viddec_fw_bitfields_extract((x)->vop_data, 7, 0x1) +#define viddec_fw_mp4_set_alternate_vertical_scan_flag(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 7, 0x1) +#define viddec_fw_mp4_get_top_field_first(x) viddec_fw_bitfields_extract((x)->vop_data, 6, 0x1) +#define viddec_fw_mp4_set_top_field_first(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 6, 0x1) +#define viddec_fw_mp4_get_intra_dc_vlc_thr(x) viddec_fw_bitfields_extract((x)->vop_data, 3, 0x7) +#define viddec_fw_mp4_set_intra_dc_vlc_thr(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 3, 0x7) +#define viddec_fw_mp4_get_vop_rounding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 2, 0x1) +#define viddec_fw_mp4_set_vop_rounding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 2, 0x1) +#define viddec_fw_mp4_get_vop_coding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 0, 0x3) +#define viddec_fw_mp4_set_vop_coding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 0, 0x3) + unsigned int vop_data; + + // No of bits used in first byte of MB data + unsigned int bit_offset; + +} viddec_fw_mp4_vop_info_t; + +// This structure contains the information extracted from the Video Object Layer. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_BVOP_INFO, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Frame period = T(first B-VOP after VOL) - T(past reference of first B-VOP after VOL) + unsigned int Tframe; + + // TRD is the difference in temporal reference of the temporally next reference VOP with + // temporally previous reference VOP, assuming B-VOPs or skipped VOPs in between. + unsigned int TRD; + + // TRB is the difference in temporal reference of the B-VOP and the previous reference VOP. + unsigned int TRB; + +} viddec_fw_mp4_bvop_info_t; + +// This structure contains the information extracted from the sprite trajectory. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, +// using the fields vwi_payload in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Sprite Trajectory can have dmv_codes for each warping point. + // 0:13 - warping_mv_code_dv + // 14:27 - warping_mv_code_du + // 28:31 - warping_point_index - identifies which warping point the warping code refers to. + // The default value for index is 0xF which should be treated as invalid. +#define viddec_fw_mp4_get_warping_point_index(x) viddec_fw_bitfields_extract((x), 28, 0xF) +#define viddec_fw_mp4_set_warping_point_index(x, val) viddec_fw_bitfields_insert((x), val, 28, 0xF) +#define viddec_fw_mp4_get_warping_mv_code_du(x) viddec_fw_bitfields_extract((x), 14, 0x3FFF) +#define viddec_fw_mp4_set_warping_mv_code_du(x, val) viddec_fw_bitfields_insert((x), val, 14, 0x3FFF) +#define viddec_fw_mp4_get_warping_mv_code_dv(x) viddec_fw_bitfields_extract((x), 0, 0x3FFF) +#define viddec_fw_mp4_set_warping_mv_code_dv(x, val) viddec_fw_bitfields_insert((x), val, 0, 0x3FFF) + unsigned int warping_mv_code[3]; +} viddec_fw_mp4_sprite_trajectory_t; + +// IQUANT entries will be populated in the workload using items of type VIDDEC_WORKLOAD_MP4_IQUANT and the +// vwi_payload array. The entries will be in the order in which they need to be programmed in the registers. +// There is no need for a separate structure for these values. + +// This structure contains the information extracted from the Video Plane with Short Header. +// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SVH, using +// the "vwi_payload" array in viddec_workload_item_t. +// TODO: Add default values in the comments for each item +typedef struct +{ + // Video Plane with Short Header + // 0:7 - temporal_reference + // 8:19 - num_macroblocks_in_gob + // 20:24 - num_gobs_in_vop + // 25:27 - num_rows_in_gob +#define viddec_fw_mp4_get_num_rows_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 25, 0x7) +#define viddec_fw_mp4_set_num_rows_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 25, 0x7) +#define viddec_fw_mp4_get_num_gobs_in_vop(x) viddec_fw_bitfields_extract((x)->svh_data, 20, 0x1F) +#define viddec_fw_mp4_set_num_gobs_in_vop(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 20, 0x1F) +#define viddec_fw_mp4_get_num_macroblocks_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 8, 0xFFF) +#define viddec_fw_mp4_set_num_macroblocks_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 8, 0xFFF) +#define viddec_fw_mp4_get_temporal_reference(x) viddec_fw_bitfields_extract((x)->svh_data, 0, 0xFF) +#define viddec_fw_mp4_set_temporal_reference(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 0, 0xFF) + unsigned int svh_data; + + unsigned int pad1; + unsigned int pad2; +} viddec_fw_mp4_svh_t; + +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c b/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c new file mode 100755 index 0000000..c9ec2fb --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c @@ -0,0 +1,377 @@ +#ifndef VBP +#include + +#include "viddec_fw_workload.h" +#include "viddec_parser_ops.h" +#include "viddec_fw_mp4.h" +#include "viddec_mp4_parse.h" + +uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_frame_attributes_t *attr = &(wl->attrs); + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + + memset(attr, 0, sizeof(viddec_frame_attributes_t)); + + attr->cont_size.width = vol->video_object_layer_width; + attr->cont_size.height = vol->video_object_layer_height; + + // Translate vop_coding_type + switch (vol->VideoObjectPlane.vop_coding_type) + { + case MP4_VOP_TYPE_B: + attr->frame_type = VIDDEC_FRAME_TYPE_B; + break; + case MP4_VOP_TYPE_P: + attr->frame_type = VIDDEC_FRAME_TYPE_P; + break; + case MP4_VOP_TYPE_S: + attr->frame_type = VIDDEC_FRAME_TYPE_S; + break; + case MP4_VOP_TYPE_I: + attr->frame_type = VIDDEC_FRAME_TYPE_I; + break; + default: + break; + } // switch on vop_coding_type + + attr->mpeg4.top_field_first = vol->VideoObjectPlane.top_field_first; + + return result; +} // viddec_fw_mp4_populate_attr + +uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_vol_info_t vol_info; + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + + memset(&vol_info, 0, sizeof(viddec_fw_mp4_vol_info_t)); + + // Get vol_flags + viddec_fw_mp4_set_reversible_vlc(&vol_info, vol->reversible_vlc); + viddec_fw_mp4_set_data_partitioned(&vol_info, vol->data_partitioned); + viddec_fw_mp4_set_resync_marker_disable(&vol_info, vol->resync_marker_disable); + viddec_fw_mp4_set_quarter_sample(&vol_info, vol->quarter_sample); + viddec_fw_mp4_set_obmc_disable(&vol_info, vol->obmc_disable); + viddec_fw_mp4_set_interlaced(&vol_info, vol->interlaced); + viddec_fw_mp4_set_vol_shape(&vol_info, vol->video_object_layer_shape); + viddec_fw_mp4_set_short_video_header_flag(&vol_info, vol->short_video_header); + + // Get vol_size + viddec_fw_mp4_set_vol_width(&vol_info, vol->video_object_layer_width); + viddec_fw_mp4_set_vol_height(&vol_info, vol->video_object_layer_height); + + // Get vol_item + viddec_fw_mp4_set_quant_type(&vol_info, vol->quant_type); + viddec_fw_mp4_set_quant_precision(&vol_info, vol->quant_precision); + viddec_fw_mp4_set_sprite_warping_accuracy(&vol_info, vol->sprite_info.sprite_warping_accuracy); + viddec_fw_mp4_set_sprite_warping_points(&vol_info, vol->sprite_info.no_of_sprite_warping_points); + viddec_fw_mp4_set_sprite_enable(&vol_info, vol->sprite_enable); + viddec_fw_mp4_set_vop_time_increment_resolution(&vol_info, vol->vop_time_increment_resolution); + + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOL_INFO; + wi.vwi_payload[0] = vol_info.vol_flags; + wi.vwi_payload[1] = vol_info.vol_size; + wi.vwi_payload[2] = vol_info.vol_item; + + result = viddec_pm_append_workitem(parent, &wi, false); + + return result; +} // viddec_fw_mp4_insert_vol_workitem + +uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_vop_info_t vop_info; + mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane); + uint32_t byte = 0; + unsigned char is_emul; + + memset(&vop_info, 0, sizeof(viddec_fw_mp4_vop_info_t)); + + // Get frame_info + viddec_fw_mp4_set_past_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_2].is_field); + viddec_fw_mp4_set_past_frame_id(&vop_info, VIDDEC_MP4_FRAME_PAST); + viddec_fw_mp4_set_future_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_1].is_field); + viddec_fw_mp4_set_future_frame_id(&vop_info, VIDDEC_MP4_FRAME_FUTURE); + viddec_fw_mp4_set_current_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_0].is_field); + viddec_fw_mp4_set_current_frame_id(&vop_info, VIDDEC_MP4_FRAME_CURRENT); + + // HW has a limitation that the enums for PAST(1), FUTURE(2) and CURRENT(0) cannot be changed and + // the spec does not support field pictures. Hence the field_frame bits are always zero. + // This gives us the constant 0x10200. + vop_info.frame_info = 0x10200; + + // Get vop_data + // Quant scale is in the video_packet_header or the gob_layer - both of which are parsed by the BSP + viddec_fw_mp4_set_vop_quant_scale(&vop_info, 0); + viddec_fw_mp4_set_vop_fcode_backward(&vop_info, vop->vop_fcode_backward); + viddec_fw_mp4_set_vop_fcode_forward(&vop_info, vop->vop_fcode_forward); + viddec_fw_mp4_set_vop_quant(&vop_info, vop->vop_quant); + viddec_fw_mp4_set_alternate_vertical_scan_flag(&vop_info, vop->alternate_vertical_scan_flag); + viddec_fw_mp4_set_top_field_first(&vop_info, vop->top_field_first); + viddec_fw_mp4_set_intra_dc_vlc_thr(&vop_info, vop->intra_dc_vlc_thr); + viddec_fw_mp4_set_vop_rounding_type(&vop_info, vop->vop_rounding_type); + viddec_fw_mp4_set_vop_coding_type(&vop_info, vop->vop_coding_type); + + // Get vol_item + result = viddec_pm_get_au_pos(parent, &vop_info.bit_offset, &byte, &is_emul); + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOP_INFO; + wi.vwi_payload[0] = vop_info.frame_info; + wi.vwi_payload[1] = vop_info.vop_data; + wi.vwi_payload[2] = vop_info.bit_offset; + + result = viddec_pm_append_workitem(parent, &wi, false); + + return result; +} // viddec_fw_mp4_insert_vop_workitem + +uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_svh_t svh_info; + mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); + + memset(&svh_info, 0, sizeof(viddec_fw_mp4_svh_t)); + + // Get svh_data + viddec_fw_mp4_set_temporal_reference(&svh_info, svh->temporal_reference); + viddec_fw_mp4_set_num_macroblocks_in_gob(&svh_info, svh->num_macroblocks_in_gob); + viddec_fw_mp4_set_num_gobs_in_vop(&svh_info, svh->num_gobs_in_vop); + viddec_fw_mp4_set_num_rows_in_gob(&svh_info, svh->num_rows_in_gob); + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SVH; + wi.vwi_payload[0] = svh_info.svh_data; + wi.vwi_payload[1] = svh_info.pad1; + wi.vwi_payload[2] = svh_info.pad2; + + result = viddec_pm_append_workitem(parent, &wi, false); + + return result; +} // viddec_fw_mp4_insert_vpsh_workitem + +uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + viddec_fw_mp4_sprite_trajectory_t sprite_info; + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane); + uint8_t no_of_entries_per_item = 3; + uint8_t no_of_sprite_workitems = 0; + uint8_t warp_index = 0; + int i, j; + + if (!vol->sprite_info.no_of_sprite_warping_points) + return result; + + no_of_sprite_workitems = (vol->sprite_info.no_of_sprite_warping_points > 3) ? 2 : 1; + + for (i=0; isprite_info.no_of_sprite_warping_points) + { + if (warp_index < 4) + { + viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index); + viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]); + viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]); + } + } + else + { + sprite_info.warping_mv_code[j] = 0xF << 28; + } + warp_index++; + } + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SPRT_TRAJ; + wi.vwi_payload[0] = sprite_info.warping_mv_code[0]; + wi.vwi_payload[1] = sprite_info.warping_mv_code[1]; + wi.vwi_payload[2] = sprite_info.warping_mv_code[2]; + + result = viddec_pm_append_workitem(parent, &wi, false); + } + + return result; +} // viddec_fw_mp4_insert_sprite_workitem + +uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_BVOP_INFO; + wi.vwi_payload[0] = vol->Tframe; + wi.vwi_payload[1] = vol->TRD; + wi.vwi_payload[2] = vol->TRB; + + result = viddec_pm_append_workitem(parent, &wi, false); + + return result; +} // viddec_fw_mp4_insert_bvop_workitem + +uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint32_t *qmat) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + uint8_t i; + + // No of items = (64/4 Dwords / 3 entries per workload item) + // 64 8b entries => 64 * 8 / 32 DWORDS => 64/4 DWORDS => 16 DWORDS + // Each item can store 3 DWORDS, 16 DWORDS => 16/3 items => 6 items + for (i=0; i<6; i++) + { + memset(&wi, 0, sizeof(viddec_workload_item_t)); + + if (intra_quant_flag) + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_IQUANT; + else + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_NIQUANT; + + if (i == 6) + { + wi.vwi_payload[0] = qmat[0]; + wi.vwi_payload[1] = 0; + wi.vwi_payload[2] = 0; + } + else + { + wi.vwi_payload[0] = qmat[0]; + wi.vwi_payload[1] = qmat[1]; + wi.vwi_payload[2] = qmat[2]; + } + + qmat += 3; + + result = viddec_pm_append_workitem(parent, &wi, false); + } + + return result; +} // viddec_fw_mp4_insert_qmat + +uint32_t viddec_fw_mp4_insert_inversequant_workitem(void *parent, mp4_VOLQuant_mat_t *qmat) +{ + uint32_t result = MP4_STATUS_OK; + + if (qmat->load_intra_quant_mat) + { + result = viddec_fw_mp4_insert_qmat(parent, true, (uint32_t *) &(qmat->intra_quant_mat)); + } + + if (qmat->load_nonintra_quant_mat) + { + result = viddec_fw_mp4_insert_qmat(parent, false, (uint32_t *) &(qmat->nonintra_quant_mat)); + } + + return result; +} // viddec_fw_mp4_insert_inversequant_workitem + +uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_PAST_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + result = viddec_pm_append_workitem(parent, &wi, false); + + return result; +} // viddec_fw_mp4_insert_past_frame_workitem + +uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + + wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_FUTURE_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + result = viddec_pm_append_workitem(parent, &wi, false); + + return result; +} // viddec_fw_mp4_insert_future_frame_workitem + +uint32_t viddec_fw_mp4_insert_reorder_workitem(void *parent) +{ + uint32_t result = MP4_STATUS_OK; + viddec_workload_item_t wi; + + // Move frame at location 1 of the reference table to location 0 + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; + + result = viddec_pm_append_workitem(parent, &wi, false); + + return result; +} // viddec_fw_mp4_insert_reorder_workitem + +uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt) +{ + uint32_t result = 0; + viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; + viddec_workload_t *wl = viddec_pm_get_header(parent); + + result = viddec_fw_mp4_populate_attr(wl, parser); + result = viddec_fw_mp4_insert_vol_workitem(parent, parser); + result = viddec_fw_mp4_insert_vop_workitem(parent, parser); + result = viddec_fw_mp4_insert_sprite_workitem(parent, parser); + result = viddec_fw_mp4_insert_inversequant_workitem(parent, &(parser->info.VisualObject.VideoObject.quant_mat_info)); + + if (parser->info.VisualObject.VideoObject.short_video_header) + result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser); + + if (!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded) + wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; + + // Send reference re-order tag for all reference frame types + if (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type != MP4_VOP_TYPE_B) + { + result = viddec_fw_mp4_insert_reorder_workitem(parent); + } + + // Handle vop_coding_type based information + switch (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type) + { + case MP4_VOP_TYPE_B: + result = viddec_fw_mp4_insert_bvop_workitem(parent, parser); + result = viddec_fw_mp4_insert_past_frame_workitem(parent); + result = viddec_fw_mp4_insert_future_frame_workitem(parent); + break; + case MP4_VOP_TYPE_P: + case MP4_VOP_TYPE_S: + result = viddec_fw_mp4_insert_past_frame_workitem(parent); + // Deliberate fall-thru to type I + case MP4_VOP_TYPE_I: + wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK); + // Swap reference information + parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1]; + parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0]; + break; + break; + default: + break; + } // switch on vop_coding_type + + result = viddec_pm_append_pixeldata(parent); + + return result; +} // viddec_fw_mp4_emit_workload +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.c new file mode 100755 index 0000000..021678e --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.c @@ -0,0 +1,98 @@ +#include "viddec_mp4_decodevideoobjectplane.h" + +mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo) +{ + mp4_Status_t status = MP4_STATUS_OK; + uint32_t vop_time=0; +// mp4_VisualObject_t *vo = &(pInfo->VisualObject); + mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject); + mp4_GroupOfVideoObjectPlane_t *gvop = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane); + mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + + // set VOP time + if (vol->short_video_header) + { + vop_time = vol->vop_sync_time + + pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.temporal_reference * 1001; + +// if (vo->currentFrame.time > vop_time) + { + vol->vop_sync_time += 256 * 1001; + vop_time += 256 * 1001; + } + } + else + { + if (vop->vop_coding_type == MP4_VOP_TYPE_B) + { + vop_time = vol->vop_sync_time_b + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment; + } + else + { + if (gvop->time_base > vol->vop_sync_time) + vol->vop_sync_time = gvop->time_base; + + vop_time = vol->vop_sync_time + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment; + + if (vol->vop_sync_time_b < vol->vop_sync_time) + vol->vop_sync_time_b = vol->vop_sync_time; + + if (vop->modulo_time_base != 0) + vol->vop_sync_time = vop_time - vop->vop_time_increment; + } + } + + if (vop->vop_coded) + { + switch (vop->vop_coding_type) + { + case MP4_VOP_TYPE_S: + if (vol->sprite_enable != MP4_SPRITE_GMC) + break; + // Deliberate fall-through from this case + case MP4_VOP_TYPE_I: + case MP4_VOP_TYPE_P: + // set past and future time for B-VOP + vol->pastFrameTime = vol->futureFrameTime; + vol->futureFrameTime = vop_time; + break; + default: + break; + } + } + + if (vop->vop_coded) +// || (vop_time != vo->currentFrame.time && vop_time != vo->pastFrame.time && vop_time != vo->futureFrame.time) ) + { + if (vop->vop_coding_type == MP4_VOP_TYPE_B) + { + if (!vol->Tframe) + vol->Tframe = (int) (vop_time); // - vo->pastFrame.time); + + if (vop->vop_coded) + { + vol->TRB = (int) (vop_time - vol->pastFrameTime); + vol->TRD = (int) (vol->futureFrameTime - vol->pastFrameTime); + + // defense from bad streams when B-VOPs are before Past and/or Future + if (vol->TRB <= 0) + vol->TRB = 1; + + if (vol->TRD <= 0) + vol->TRD = 2; + + if (vol->TRD <= vol->TRB) + { + vol->TRB = 1; + vol->TRD = 2; + } + + if (vol->Tframe >= vol->TRD) + vol->Tframe = vol->TRB; + } + } + } + + return status; +} // mp4_DecodeVideoObjectPlane + diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.h new file mode 100755 index 0000000..e03bcb0 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.h @@ -0,0 +1,10 @@ +#ifndef VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H +#define VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t *pInfo); + +//void mp4_copy_info_to_dmem(mp4_Info_t *pInfo, mp4_MBHWInterface *ptr_parameters); + +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c new file mode 100755 index 0000000..85eab1a --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c @@ -0,0 +1,191 @@ +#include +#include + +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" +#include "viddec_mp4_decodevideoobjectplane.h" +#include "viddec_mp4_shortheader.h" +#include "viddec_mp4_videoobjectlayer.h" +#include "viddec_mp4_videoobjectplane.h" +#include "viddec_mp4_visualobject.h" + +void viddec_mp4_get_context_size(viddec_parser_memory_sizes_t *size) +{ + /* Should return size of my structure */ + size->context_size = sizeof(viddec_mp4_parser_t); + size->persist_size = 0; + return; +} // viddec_mp4_get_context_size + +void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +{ + viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; + + persist_mem = persist_mem; + parser->is_frame_start = false; + parser->prev_sc = MP4_SC_INVALID; + parser->current_sc = MP4_SC_INVALID; + parser->cur_sc_prefix = false; + parser->next_sc_prefix = false; + parser->ignore_scs = false; + + if (preserve) + { + // Need to maintain information till VOL + parser->sc_seen &= MP4_SC_SEEN_VOL; + parser->bitstream_error &= MP4_HDR_ERROR_MASK; + + // Reset only frame related data + memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t)); + memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263)); + } + else + { + parser->sc_seen = MP4_SC_SEEN_INVALID; + parser->bitstream_error = MP4_BS_ERROR_NONE; + memset(&(parser->info), 0, sizeof(mp4_Info_t)); + } + + return; +} // viddec_mp4_init + +static uint32_t viddec_mp4_decodevop_and_emitwkld(void *parent, void *ctxt) +{ + int status = MP4_STATUS_OK; + viddec_mp4_parser_t *cxt = (viddec_mp4_parser_t *)ctxt; + + status = mp4_DecodeVideoObjectPlane(&(cxt->info)); + + return status; +} // viddec_mp4_decodevop_and_emitwkld + +uint32_t viddec_mp4_parse(void *parent, void *ctxt) +{ + uint32_t sc=0; + viddec_mp4_parser_t *cxt; + uint8_t is_svh=0; + int32_t getbits=0; + int32_t status = 0; + + cxt = (viddec_mp4_parser_t *)ctxt; + is_svh = (cxt->cur_sc_prefix) ? false: true; + if ((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1) + { + DEB("Start code not found\n"); + return VIDDEC_PARSE_ERROR; + } + + if (!is_svh) + { + viddec_pm_get_bits(parent, &sc, 32); + sc = sc & 0xFF; + cxt->current_sc = sc; + cxt->current_sc |= 0x100; + DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); + + switch (sc) + { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + { + status = mp4_Parse_VisualSequence(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE; + DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); + break; + } + case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC: + {/* Not required to do anything */ + break; + } + case MP4_SC_USER_DATA: + { /* Copy userdata to user-visible buffer (EMIT) */ + status = mp4_Parse_UserData(parent, cxt); + DEB("MP4_USER_DATA_SC: \n"); + break; + } + case MP4_SC_GROUP_OF_VOP: + { + status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt); + cxt->prev_sc = MP4_SC_GROUP_OF_VOP; + DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); + break; + } + case MP4_SC_VIDEO_SESSION_ERROR: + {/* Not required to do anything?? */ + break; + } + case MP4_SC_VISUAL_OBJECT: + { + status = mp4_Parse_VisualObject(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT; + DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); + break; + } + case MP4_SC_VIDEO_OBJECT_PLANE: + { + /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit + a starting bit offset after parsing the header. */ + status = mp4_Parse_VideoObjectPlane(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + // TODO: Fix this for interlaced + cxt->is_frame_start = true; + cxt->sc_seen |= MP4_SC_SEEN_VOP; + + DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); + break; + } + case MP4_SC_STUFFING: + { + break; + } + default: + { + if ( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) ) + { + status = mp4_Parse_VideoObjectLayer(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_VOL; + cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); + sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + } + // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN + else if (sc <= MP4_SC_VIDEO_OBJECT_MAX) + { + // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer + getbits = viddec_pm_get_bits(parent, &sc, 22); + if (getbits != -1) + { + cxt->current_sc = sc; + status = mp4_Parse_VideoObject_svh(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_SVH; + cxt->is_frame_start = true; + DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status); + } + } + else + { + DEB("UNKWON Cod:0x%08X\n", sc); + } + } + break; + } + } + else + { + viddec_pm_get_bits(parent, &sc, 22); + cxt->current_sc = sc; + DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); + status = mp4_Parse_VideoObject_svh(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_SVH; + cxt->is_frame_start = true; + DEB("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + } + + // Current sc becomes the previous sc + cxt->prev_sc = sc; + + return VIDDEC_PARSE_SUCESS; +} // viddec_mp4_parse + diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h new file mode 100755 index 0000000..305e09b --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h @@ -0,0 +1,529 @@ +#ifndef VIDDEC_MP4_PARSE_H +#define VIDDEC_MP4_PARSE_H + +#include "viddec_fw_mp4.h" + +/* Macros for MP4 start code detection */ +#define FIRST_STARTCODE_BYTE 0x00 +#define SECOND_STARTCODE_BYTE 0x00 +#define THIRD_STARTCODE_BYTE 0x01 +#define SHORT_THIRD_STARTCODE_BYTE 0x80 +#define SC_BYTE_MASK0 0x00ff0000 +#define SC_BYTE_MASK1 0x000000ff + +/* status codes */ +typedef enum +{ + MP4_STATUS_OK = 0, /* Success */ + MP4_STATUS_PARSE_ERROR = (1 << 0), /* Invalid syntax */ + MP4_STATUS_NOTSUPPORT = (1 << 1), /* unsupported feature */ + MP4_STATUS_REQD_DATA_ERROR = (1 << 2), /* supported data either invalid or missing */ +} mp4_Status_t; + +/* feature codes */ +typedef enum +{ + MP4_VOP_FEATURE_DEFAULT = 0, // Default VOP features, no code image update needed + MP4_VOP_FEATURE_SVH = 1, // VOP has Short Video Header + MP4_VOP_FEATURE_DP = 2 // VOP is Data Partitioned +} mp4_Vop_feature; + +/* MPEG-4 start code values: Table 6-3 */ +typedef enum +{ + MP4_SC_VIDEO_OBJECT_MIN = 0x00, + MP4_SC_VIDEO_OBJECT_MAX = 0x1F, + MP4_SC_VIDEO_OBJECT_LAYER_MIN = 0x20, + MP4_SC_VIDEO_OBJECT_LAYER_MAX = 0x2F, + MP4_SC_FGS_BP_MIN = 0x40, /* Unsupported */ + MP4_SC_FGS_BP_MAX = 0x5F, /* Unsupported */ + MP4_SC_VISUAL_OBJECT_SEQUENCE = 0xB0, + MP4_SC_VISUAL_OBJECT_SEQUENCE_EC = 0xB1, + MP4_SC_USER_DATA = 0xB2, + MP4_SC_GROUP_OF_VOP = 0xB3, + MP4_SC_VIDEO_SESSION_ERROR = 0xB4, + MP4_SC_VISUAL_OBJECT = 0xB5, + MP4_SC_VIDEO_OBJECT_PLANE = 0xB6, + MP4_SC_SLICE = 0xB7, /* Unsupported */ + MP4_SC_EXTENSION = 0xB8, /* Unsupported */ + MP4_SC_FGS_VOP = 0xB9, /* Unsupported */ + MP4_SC_FBA_OBJECT = 0xBA, /* Unsupported */ + MP4_SC_FBA_OBJECT_PLANE = 0xBB, /* Unsupported */ + MP4_SC_MESH_OBJECT = 0xBC, /* Unsupported */ + MP4_SC_MESH_OBJECT_PLANE = 0xBD, /* Unsupported */ + MP4_SC_STILL_TEXTURE_OBJECT = 0xBE, /* Unsupported */ + MP4_SC_TEXTURE_SPATIAL_LAYER = 0xBF, /* Unsupported */ + MP4_SC_TEXTURE_SNR_LAYER = 0xC0, /* Unsupported */ + MP4_SC_TEXTURE_TILE = 0xC1, /* Unsupported */ + MP4_SC_TEXTURE_SHAPE_LAYER = 0xC2, /* Unsupported */ + MP4_SC_STUFFING = 0xC3, + MP4_SC_SYTEM_MIN = 0xC6, /* Unsupported */ + MP4_SC_SYTEM_MAX = 0xFF, /* Unsupported */ + MP4_SC_INVALID = 0x100, /* Invalid */ +} mp4_start_code_values_t; + +/* MPEG-4 code values + ISO/IEC 14496-2:2004 table 6-6 */ +enum +{ + MP4_VISUAL_OBJECT_TYPE_VIDEO = 1, + MP4_VISUAL_OBJECT_TYPE_TEXTURE = 2, + MP4_VISUAL_OBJECT_TYPE_MESH = 3, + MP4_VISUAL_OBJECT_TYPE_FBA = 4, + MP4_VISUAL_OBJECT_TYPE_3DMESH = 5 +}; + +/* ISO/IEC 14496-2:2004 table 6-7 */ +enum +{ + MP4_VIDEO_FORMAT_COMPONENT = 0, + MP4_VIDEO_FORMAT_PAL = 1, + MP4_VIDEO_FORMAT_NTSC = 2, + MP4_VIDEO_FORMAT_SECAM = 3, + MP4_VIDEO_FORMAT_MAC = 4, + MP4_VIDEO_FORMAT_UNSPECIFIED = 5 +}; + +/* ISO/IEC 14496-2:2004 table 6-8..10 */ +enum +{ + MP4_VIDEO_COLORS_FORBIDDEN = 0, + MP4_VIDEO_COLORS_ITU_R_BT_709 = 1, + MP4_VIDEO_COLORS_UNSPECIFIED = 2, + MP4_VIDEO_COLORS_RESERVED = 3, + MP4_VIDEO_COLORS_ITU_R_BT_470_2_M = 4, + MP4_VIDEO_COLORS_ITU_R_BT_470_2_BG = 5, + MP4_VIDEO_COLORS_SMPTE_170M = 6, + MP4_VIDEO_COLORS_SMPTE_240M = 7, + MP4_VIDEO_COLORS_GENERIC_FILM = 8 +}; + +/* ISO/IEC 14496-2:2004 table 6-11 */ +enum +{ + MP4_VIDEO_OBJECT_TYPE_SIMPLE = 1, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_SCALABLE = 2, + MP4_VIDEO_OBJECT_TYPE_CORE = 3, + MP4_VIDEO_OBJECT_TYPE_MAIN = 4, + MP4_VIDEO_OBJECT_TYPE_NBIT = 5, + MP4_VIDEO_OBJECT_TYPE_2DTEXTURE = 6, + MP4_VIDEO_OBJECT_TYPE_2DMESH = 7, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_FACE = 8, + MP4_VIDEO_OBJECT_TYPE_STILL_SCALABLE_TEXTURE = 9, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_REAL_TIME_SIMPLE = 10, + MP4_VIDEO_OBJECT_TYPE_CORE_SCALABLE = 11, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_CODING_EFFICIENCY = 12, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_SCALABLE_TEXTURE = 13, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_FBA = 14, + MP4_VIDEO_OBJECT_TYPE_SIMPLE_STUDIO = 15, + MP4_VIDEO_OBJECT_TYPE_CORE_STUDIO = 16, + MP4_VIDEO_OBJECT_TYPE_ADVANCED_SIMPLE = 17, + MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE = 18 +}; + +/* ISO/IEC 14496-2:2004 table 6.17 (maximum defined video_object_layer_shape_extension) */ +#define MP4_SHAPE_EXT_NUM 13 + +/* ISO/IEC 14496-2:2004 table 6-14 */ +enum +{ + MP4_ASPECT_RATIO_FORBIDDEN = 0, + MP4_ASPECT_RATIO_1_1 = 1, + MP4_ASPECT_RATIO_12_11 = 2, + MP4_ASPECT_RATIO_10_11 = 3, + MP4_ASPECT_RATIO_16_11 = 4, + MP4_ASPECT_RATIO_40_33 = 5, + MP4_ASPECT_RATIO_EXTPAR = 15 +}; + +/* ISO/IEC 14496-2:2004 table 6-15 */ +#define MP4_CHROMA_FORMAT_420 1 + +/* ISO/IEC 14496-2:2004 table 6-16 */ +enum +{ + MP4_SHAPE_TYPE_RECTANGULAR = 0, + MP4_SHAPE_TYPE_BINARY = 1, + MP4_SHAPE_TYPE_BINARYONLY = 2, + MP4_SHAPE_TYPE_GRAYSCALE = 3 +}; + +/* ISO/IEC 14496-2:2004 table 6-19 */ +#define MP4_SPRITE_STATIC 1 +#define MP4_SPRITE_GMC 2 + +/* ISO/IEC 14496-2:2004 table 6-24 */ +enum +{ + MP4_VOP_TYPE_I = 0, + MP4_VOP_TYPE_P = 1, + MP4_VOP_TYPE_B = 2, + MP4_VOP_TYPE_S = 3, +}; + +/* ISO/IEC 14496-2:2004 table 6-26 */ +enum +{ + MP4_SPRITE_TRANSMIT_MODE_STOP = 0, + MP4_SPRITE_TRANSMIT_MODE_PIECE = 1, + MP4_SPRITE_TRANSMIT_MODE_UPDATE = 2, + MP4_SPRITE_TRANSMIT_MODE_PAUSE = 3 +}; + +/* ISO/IEC 14496-2:2004 table 7-3 */ +enum +{ + MP4_BAB_TYPE_MVDSZ_NOUPDATE = 0, + MP4_BAB_TYPE_MVDSNZ_NOUPDATE = 1, + MP4_BAB_TYPE_TRANSPARENT = 2, + MP4_BAB_TYPE_OPAQUE = 3, + MP4_BAB_TYPE_INTRACAE = 4, + MP4_BAB_TYPE_MVDSZ_INTERCAE = 5, + MP4_BAB_TYPE_MVDSNZ_INTERCAE = 6 +}; + +#define MP4_DC_MARKER 0x6B001 // 110 1011 0000 0000 0001 +#define MP4_MV_MARKER 0x1F001 // 1 1111 0000 0000 0001 + + +/* ISO/IEC 14496-2:2004 table G.1 */ +enum +{ + MP4_SIMPLE_PROFILE_LEVEL_1 = 0x01, + MP4_SIMPLE_PROFILE_LEVEL_2 = 0x02, + MP4_SIMPLE_PROFILE_LEVEL_3 = 0x03, + MP4_SIMPLE_PROFILE_LEVEL_4a = 0x04, + MP4_SIMPLE_PROFILE_LEVEL_5 = 0x05, + MP4_SIMPLE_PROFILE_LEVEL_6 = 0x06, + MP4_SIMPLE_PROFILE_LEVEL_0 = 0x08, + MP4_CORE_PROFILE_LEVEL_1 = 0x21, + MP4_CORE_PROFILE_LEVEL_2 = 0x22, + MP4_MAIN_PROFILE_LEVEL_2 = 0x32, + MP4_MAIN_PROFILE_LEVEL_3 = 0x33, + MP4_MAIN_PROFILE_LEVEL_4 = 0x34, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_1 = 0x91, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_2 = 0x92, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_3 = 0x93, + MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_4 = 0x94, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_1 = 0xB1, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_2 = 0xB2, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_3 = 0xB3, + MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_4 = 0xB4, + MP4_ADVANCED_CORE_PROFILE_LEVEL_1 = 0xC1, + MP4_ADVANCED_CORE_PROFILE_LEVEL_2 = 0xC2, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0 = 0xF0, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1 = 0xF1, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2 = 0xF2, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3 = 0xF3, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4 = 0xF4, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5 = 0xF5, + MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B = 0xF7 +}; + +/* Group Of Video Object Plane Info */ +typedef struct +{ + uint8_t closed_gov; + uint8_t broken_link; + uint8_t time_code_hours; + uint8_t time_code_minutes; + uint8_t time_code_seconds; + uint8_t dummy1; + uint16_t dummy2; + uint32_t time_base; +} mp4_GroupOfVideoObjectPlane_t; + + +/* Video Object Plane Info */ +typedef struct +{ + uint8_t vop_coding_type; + uint32_t modulo_time_base; + uint16_t vop_time_increment; + uint8_t vop_coded; + + uint16_t vop_id; + uint16_t vop_id_for_prediction; + uint8_t is_vop_id_for_prediction_indication; + uint8_t vop_rounding_type; + uint8_t vop_reduced_resolution; + uint8_t align_dummy; + + uint16_t vop_width; + uint16_t vop_height; + uint16_t vop_horizontal_mc_spatial_ref; + uint16_t vop_vertical_mc_spatial_ref; + + uint8_t background_composition; + uint8_t change_conv_ratio_disable; + uint8_t is_vop_constant_alpha; + uint8_t vop_constant_alpha_value; + uint8_t intra_dc_vlc_thr; + uint8_t top_field_first; + uint8_t alternate_vertical_scan_flag; + uint8_t sprite_transmit_mode; + + int32_t brightness_change_factor; + uint16_t vop_quant; + uint8_t vop_fcode_forward; + uint8_t vop_fcode_backward; + + uint16_t warping_mv_code_du[4]; + uint16_t warping_mv_code_dv[4]; + +} mp4_VideoObjectPlane_t; + +/* VOLControlParameters Info */ +typedef struct +{ + uint8_t chroma_format; + uint8_t low_delay; + uint8_t vbv_parameters; + uint8_t align_dummy1; + uint32_t bit_rate; + uint32_t vbv_buffer_size; + uint32_t vbv_occupancy; +} mp4_VOLControlParameters_t; + +/* Video Object Plane with short header Info */ +typedef struct _mp4_VideoObjectPlaneH263 +{ + uint8_t temporal_reference; + uint8_t split_screen_indicator; + uint8_t document_camera_indicator; + uint8_t full_picture_freeze_release; + uint8_t source_format; + uint8_t picture_coding_type; + uint8_t vop_quant; + uint16_t num_gobs_in_vop; + uint16_t num_macroblocks_in_gob; + uint8_t num_rows_in_gob; +#if 0 + uint8_t gob_number; + int gob_header_empty; + int gob_frame_id; + int quant_scale; +#endif + uint8_t vop_rounding_type; + //the following are required for PLUSPTYPE + uint8_t ufep; + uint16_t pixel_aspect_ratio_code; + uint16_t picture_width_indication; + uint16_t picture_height_indication; +} mp4_VideoObjectPlaneH263; + +typedef struct +{ + uint16_t sprite_width; + uint16_t sprite_height; + uint16_t sprite_left_coordinate; + uint16_t sprite_top_coordinate; + uint16_t no_of_sprite_warping_points; + uint16_t sprite_warping_accuracy; + uint16_t sprite_brightness_change; + uint16_t low_latency_sprite_enable; +} mp4_VOLSpriteInfo_t; + +typedef struct +{ + uint8_t load_intra_quant_mat; + uint8_t load_nonintra_quant_mat; + uint16_t align_dummy1; + uint8_t intra_quant_mat[64]; + uint8_t nonintra_quant_mat[64]; +} mp4_VOLQuant_mat_t; + +/* Video Object Layer Info */ +typedef struct +{ + uint8_t video_object_layer_id; /* Last 4 bits of start code. */ + uint8_t short_video_header; + uint8_t random_accessible_vol; + uint8_t video_object_type_indication; + + uint8_t is_object_layer_identifier; + uint8_t video_object_layer_verid; + uint8_t video_object_layer_priority; + uint8_t aspect_ratio_info; + + uint8_t aspect_ratio_info_par_width; + uint8_t aspect_ratio_info_par_height; + uint8_t align_dummy1; + uint8_t is_vol_control_parameters; + + mp4_VOLControlParameters_t VOLControlParameters; + + uint8_t video_object_layer_shape; + uint16_t vop_time_increment_resolution; + uint8_t vop_time_increment_resolution_bits; + + uint8_t fixed_vop_rate; + uint16_t fixed_vop_time_increment; + uint16_t video_object_layer_width; + uint16_t video_object_layer_height; + uint8_t interlaced; + + uint8_t obmc_disable; + uint8_t sprite_enable; + mp4_VOLSpriteInfo_t sprite_info; + uint8_t not_8_bit; + uint8_t quant_precision; + + uint8_t bits_per_pixel; + uint8_t quant_type; + mp4_VOLQuant_mat_t quant_mat_info; + uint8_t quarter_sample; + uint8_t complexity_estimation_disable; + + uint8_t resync_marker_disable; + uint8_t data_partitioned; + uint8_t reversible_vlc; + uint8_t newpred_enable; + + uint8_t reduced_resolution_vop_enable; // verid != 1 + uint8_t scalability; + uint8_t low_latency_sprite_enable; + + mp4_GroupOfVideoObjectPlane_t GroupOfVideoObjectPlane; + mp4_VideoObjectPlane_t VideoObjectPlane; + mp4_VideoObjectPlaneH263 VideoObjectPlaneH263; + + // for interlaced B-VOP direct mode + uint32_t Tframe; + // for B-VOP direct mode + uint32_t TRB, TRD; + // time increment of past and future VOP for B-VOP + uint32_t pastFrameTime, futureFrameTime; + // VOP global time + uint32_t vop_sync_time, vop_sync_time_b; + +} mp4_VideoObjectLayer_t; + +/* video_signal_type Info */ +typedef struct +{ + uint8_t is_video_signal_type; + uint8_t video_format; + uint8_t video_range; + uint8_t is_colour_description; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; +} mp4_VideoSignalType_t; + +typedef struct _mp4_Frame { + long long int time; +} mp4_Frame; + +/* Visual Object Info */ +typedef struct +{ + uint8_t is_visual_object_identifier; + uint8_t visual_object_verid; + uint8_t visual_object_priority; + uint8_t visual_object_type; + mp4_VideoSignalType_t VideoSignalType; + mp4_VideoObjectLayer_t VideoObject; + + mp4_Frame currentFrame; // current + mp4_Frame pastFrame; // reference in past + mp4_Frame futureFrame; // reference in future +} mp4_VisualObject_t; + +/* Full Info */ +typedef struct +{ + mp4_VisualObject_t VisualObject; + uint8_t profile_and_level_indication; +} mp4_Info_t; + +enum +{ + MP4_SC_SEEN_INVALID = 0x0, + MP4_SC_SEEN_VOL = 0x1, + MP4_SC_SEEN_VOP = 0x2, + MP4_SC_SEEN_SVH = 0x4, +}; + +enum +{ + MP4_BS_ERROR_NONE = (0 << 0), + MP4_BS_ERROR_HDR_PARSE = (1 << 0), + MP4_BS_ERROR_HDR_NONDEC = (1 << 1), + MP4_BS_ERROR_HDR_UNSUP = (1 << 2), + MP4_BS_ERROR_FRM_PARSE = (1 << 3), + MP4_BS_ERROR_FRM_NONDEC = (1 << 4), + MP4_BS_ERROR_FRM_UNSUP = (1 << 5), +}; + +#define MP4_HDR_ERROR_MASK (MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC | MP4_BS_ERROR_HDR_UNSUP) + +typedef enum +{ + VIDDEC_MP4_INDX_0 = 0, + VIDDEC_MP4_INDX_1 = 1, + VIDDEC_MP4_INDX_2 = 2, + VIDDEC_MP4_INDX_MAX = 3, +} viddec_fw_mp4_ref_index_t; + +typedef struct +{ + uint8_t is_field; +} viddec_mp4_ref_info_t; + +typedef struct +{ + // The relevant bitstream data for current stream + mp4_Info_t info; + + // The previous start code (without the prefix) + uint32_t prev_sc; + + // The current start code (without the prefix) + // TODO: Revisit for SVH + uint32_t current_sc; + + // Indicates if we look for both short and long video header or just the long video header + // If false, sc detection looks for both short and long video headers. + // If true, long video header has been seen and sc detection does not look for short video header any more. + uint8_t ignore_scs; + + // Indicates if the current start code prefix is long (if true). + uint8_t cur_sc_prefix; + + // Indicates if the next start code prefix is long (if true). + uint8_t next_sc_prefix; + + // Indicates start of a frame + uint8_t is_frame_start; + + // Indicates which start codes were seen for this workload + uint8_t sc_seen; + + // Indicates bitstream errors if any + uint16_t bitstream_error; + + // Reference frame information + viddec_mp4_ref_info_t ref_frame[VIDDEC_MP4_INDX_MAX]; + +} viddec_mp4_parser_t; + +#define BREAK_GETBITS_FAIL(x, ret) { \ + if(x == -1){ \ + ret = MP4_STATUS_PARSE_ERROR; \ + break;} \ + } + +#define BREAK_GETBITS_REQD_MISSING(x, ret) { \ + if(x == -1){ \ + ret = MP4_STATUS_REQD_DATA_ERROR; \ + break;} \ + } + +extern void *memset(void *s, int32_t c, uint32_t n); + +uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt); + +void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status); + +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c new file mode 100755 index 0000000..11c82d4 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c @@ -0,0 +1,367 @@ +#include +#include "viddec_mp4_shortheader.h" + +typedef struct +{ + uint16_t vop_width; + uint16_t vop_height; + uint16_t num_macroblocks_in_gob; + uint16_t num_gobs_in_vop; + uint8_t num_rows_in_gob; +} svh_src_fmt_params_t; + +const svh_src_fmt_params_t svh_src_fmt_defaults[5] = +{ + {128, 96, 8, 6, 1}, + {176, 144, 11, 9, 1}, + {352, 288, 22, 18, 1}, + {704, 576, 88, 18, 2}, + {1408, 1152, 352, 18, 4}, +}; + +mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Status_t ret = MP4_STATUS_OK; + unsigned int data; + mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); + int32_t getbits = 0; + uint8_t pei = 0; + uint8_t optional_indicators_8bits = 0; + + do + { + //temporal reference + getbits = viddec_pm_get_bits(parent, &data, 8); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->temporal_reference = (data & 0xff); + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 1 != (data & 0x1)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //zero bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x1)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //split_screen_indicator, document_camera_indicator, full_picture_freeze_release + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + //source format + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->source_format = (data & 0x7); + if (svh->source_format == 0 || svh->source_format == 6) + { + DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + if (svh->source_format != 7) + { + //picture coding type + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_coding_type = (data & 0x1); + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0xf)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //vop quant + getbits = viddec_pm_get_bits(parent, &data, 5); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->vop_quant = (data & 0x1f); + //cpm + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x1)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + } + else //extended PTYPE (PLUSPTYPE) + { + //ufep + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->ufep = (data & 0x7); //ufep + if (svh->ufep == 1 || svh->ufep == 0) + { + //OPPTYPE + if (svh->ufep == 1) + { + //source format + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->source_format = (data & 0x7); + if (svh->source_format < 1 || svh->source_format > 6) + { + DEB("Error: bad value of source_format\n"); + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //optional indicators + getbits = viddec_pm_get_bits(parent, &data, 8); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + optional_indicators_8bits = data; + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x7)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 1 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x7)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + } + + //MPPTYPE + //picture coding type + getbits = viddec_pm_get_bits(parent, &data, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_coding_type = (data & 0x7); + if (svh->picture_coding_type > 1) + { + DEB("Info: only support I and P frames\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + //optional RPR mode + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //optional PRU mode + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //vop rounding type + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->vop_rounding_type = (data & 0x1); + //reserved zero bits + getbits = viddec_pm_get_bits(parent, &data, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x3)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 1 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + } + else + { + DEB("Info: don't support to handle the other case of Update Full Extended PTYPE\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + //cpm + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 0 != (data & 0x1)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + //CPFMT + if (svh->ufep == 1 && svh->source_format == 6) + { //Pixel Aspect Ratio + getbits = viddec_pm_get_bits(parent, &data, 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->pixel_aspect_ratio_code = (data & 0xf); + //Picture Width Indication + getbits = viddec_pm_get_bits(parent, &data, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_width_indication = (data & 0x1ff); + //marker bit + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if ( 1 != (data & 0x1)) + { + ret = MP4_STATUS_PARSE_ERROR; + break; + } + //Picture Height Indication + getbits = viddec_pm_get_bits(parent, &data, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->picture_height_indication = (data & 0x1ff); + + if (svh->pixel_aspect_ratio_code == 0xf) + { + //EPAR + viddec_pm_get_bits(parent, &data, 16); + } + } + + //custom PCF + if (optional_indicators_8bits & 0x80) { + viddec_pm_get_bits(parent, &data, 8); + viddec_pm_get_bits(parent, &data, 2); + } + + viddec_pm_get_bits(parent, &data, 5); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + svh->vop_quant = (data & 0x1f); + } + //PEI + do + { + getbits = viddec_pm_get_bits(parent, &data, 1); // pei + BREAK_GETBITS_FAIL(getbits, ret); + pei = (data & 0x1); + if (0 != pei) + { + getbits = viddec_pm_get_bits(parent, &data, 8); // psupp + BREAK_GETBITS_FAIL(getbits, ret); + } + } while ( 1 == pei); + + // Anything after this needs to be fed to the decoder as PIXEL_ES + } while (0); + + return ret; +} + +mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Status_t ret=MP4_STATUS_OK; + mp4_Info_t *pInfo = &(parser->info); + mp4_VideoSignalType_t *vst = &(pInfo->VisualObject.VideoSignalType); + mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + mp4_VideoObjectPlaneH263 *svh = &(pInfo->VisualObject.VideoObject.VideoObjectPlaneH263); + uint8_t index = 0; + uint8_t k = 0; + + ret = mp4_Parse_VideoObjectPlane_svh(parent, parser); + if (ret == MP4_STATUS_OK) + { + // Populate defaults for the svh + vol->short_video_header = 1; + vol->video_object_layer_shape = MP4_SHAPE_TYPE_RECTANGULAR; + vol->obmc_disable = 1; + vol->quant_type = 0; + vol->resync_marker_disable = 1; + vol->data_partitioned = 0; + vol->reversible_vlc = 0; + vol->interlaced = 0; + vol->complexity_estimation_disable = 1; + vol->scalability = 0; + vol->not_8_bit = 0; + vol->bits_per_pixel = 8; + vol->quant_precision = 5; + vol->vop_time_increment_resolution = 30000; + vol->fixed_vop_time_increment = 1001; + vol->aspect_ratio_info = MP4_ASPECT_RATIO_12_11; + + vop->vop_rounding_type = svh->vop_rounding_type; + vop->vop_fcode_forward = 1; + vop->vop_coded = 1; + vop->vop_coding_type = svh->picture_coding_type ? MP4_VOP_TYPE_P: MP4_VOP_TYPE_I; + vop->vop_quant = svh->vop_quant; + + vst->colour_primaries = 1; + vst->transfer_characteristics = 1; + vst->matrix_coefficients = 6; + + if (svh->source_format >= 1 && svh->source_format <= 5) + { + index = svh->source_format - 1; + vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width; + vol->video_object_layer_height = svh_src_fmt_defaults[index].vop_height; + svh->num_macroblocks_in_gob = svh_src_fmt_defaults[index].num_macroblocks_in_gob; + svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop; + svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob; + } + else if (svh->source_format == 6) //custom format + { + vol->video_object_layer_width = (svh->picture_width_indication + 1)*4; + vol->video_object_layer_height = (svh->picture_height_indication)*4; + if (vol->video_object_layer_height < 404) + { + k = 1; + } + else if (vol->video_object_layer_height < 804) + { + k = 2; + } + else + { + k = 4; + } + svh->num_macroblocks_in_gob = (((vol->video_object_layer_width + 15) & ~15) /16)*k; + svh->num_gobs_in_vop = (((vol->video_object_layer_height + 15) & ~15)/(16*k)); + svh->num_rows_in_gob = k; + } + else + { + DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); + ret = MP4_STATUS_NOTSUPPORT; + return ret; + } + } + + mp4_set_hdr_bitstream_error(parser, false, ret); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT; + + wi.mp4_vpsh.info = 0; + wi.mp4_vpsh.pad1 = 0; + wi.mp4_vpsh.pad2 = 0; + + viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format); + + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); + if (ret == 1) + ret = MP4_STATUS_OK; + } + + return ret; +} diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h new file mode 100755 index 0000000..0d3181a --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h @@ -0,0 +1,10 @@ +#ifndef VIDDEC_MP4_SHORTHEADER_H +#define VIDDEC_MP4_SHORTHEADER_H +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *cxt); + +mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *cxt); + +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c new file mode 100755 index 0000000..c50ef3c --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c @@ -0,0 +1,629 @@ +#include +#include +#include "viddec_mp4_videoobjectlayer.h" +#ifndef VBP +const unsigned char mp4_DefaultIntraQuantMatrix[64] = { + 8, 17, 18, 19, 21, 23, 25, 27, + 17, 18, 19, 21, 23, 25, 27, 28, + 20, 21, 22, 23, 24, 26, 28, 30, + 21, 22, 23, 24, 26, 28, 30, 32, + 22, 23, 24, 26, 28, 30, 32, 35, + 23, 24, 26, 28, 30, 32, 35, 38, + 25, 26, 28, 30, 32, 35, 38, 41, + 27, 28, 30, 32, 35, 38, 41, 45 +}; +const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = { + 16, 17, 18, 19, 20, 21, 22, 23, + 17, 18, 19, 20, 21, 22, 23, 24, + 18, 19, 20, 21, 22, 23, 24, 25, + 19, 20, 21, 22, 23, 24, 26, 27, + 20, 21, 22, 23, 25, 26, 27, 28, + 21, 22, 23, 24, 26, 27, 28, 30, + 22, 23, 24, 26, 27, 28, 30, 31, + 23, 24, 25, 27, 28, 30, 31, 33 +}; + +#else +const unsigned char mp4_DefaultIntraQuantMatrix[64] = { + 8, 17, 17, 20, 18, 18, 19, 19, + 21, 21, 22, 22, 22, 21, 21, 23, + 23, 23, 23, 23, 23, 25, 24, 24, + 24, 24, 25, 25, 27, 27, 26, 26, + 26, 26, 26, 27, 28, 28, 28, 28, + 28, 28, 28, 30, 30, 30, 30, 30, + 30, 32, 32, 32, 32, 32, 35, 35, + 35, 35, 38, 38, 38, 41, 41, 45 +}; + +const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = { + 16, 17, 17, 18, 18, 18, 19, 19, + 19, 19, 20, 20, 20, 20, 20, 21, + 21, 21, 21, 21, 21, 22, 22, 22, + 22, 22, 22, 22, 23, 23, 23, 23, + 23, 23, 23, 23, 24, 24, 24, 25, + 24, 24, 24, 25, 26, 26, 26, 26, + 25, 27, 27, 27, 27, 27, 28, 28, + 28, 28, 30, 30, 30, 31, 31, 33 +}; + +#endif +const unsigned char mp4_ClassicalZigzag[64] = { + 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 +}; + +static inline int mp4_GetMacroBlockNumberSize(int nmb) +{ + int nb = 0; + nmb --; + do { + nmb >>= 1; + nb ++; + } while (nmb); + return nb; +} + +static inline void mp4_copy_default_table(const uint8_t *src, uint8_t *dst, uint32_t len) +{ + uint32_t i; + for (i=0; i< len; i++) + dst[i] = src[i]; +} + + +static inline mp4_Status_t mp4_Parse_QuantMatrix(void *parent, uint8_t *pQM) +{ + uint32_t i,code=0; + uint8_t last=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_OK; + + for (i = 0; i < 64; i ++) + { + getbits = viddec_pm_get_bits(parent, &code, 8); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if (code == 0) break; + pQM[mp4_ClassicalZigzag[i]] = (uint8_t)(code & 0xFF); + } + last = pQM[mp4_ClassicalZigzag[i-1]]; + for (; i < 64; i ++) + { + pQM[mp4_ClassicalZigzag[i]] = last; + } + return ret;; +} + +static inline uint8_t mp4_pvt_valid_object_type_indication(uint8_t val) +{ + return ((1 <= val) || (val <= 18)); +} + +static inline uint8_t mp4_pvt_valid_object_layer_verid(uint8_t val) +{ + uint8_t ret=false; + switch (val) + { + case 1: + case 2: + case 4: + case 5: + { + ret = true; + break; + } + default: + { + break; + } + } + return ret; +} + +static mp4_Status_t +mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_VOLControlParameters_t *cxt = &(parser->info.VisualObject.VideoObject.VOLControlParameters); + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + uint32_t code=0; + + do + { + getbits = viddec_pm_get_bits(parent, &(code), 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + cxt->chroma_format = (code >> 2) & 0x3; + cxt->low_delay = ((code & 0x2) > 0); + cxt->vbv_parameters = code & 0x1; + + if (cxt->chroma_format != MP4_CHROMA_FORMAT_420) + { + DEB("Warning: mp4_Parse_VideoObject:vol_control_parameters.chroma_format != 4:2:0\n"); + cxt->chroma_format= MP4_CHROMA_FORMAT_420; + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + ret = MP4_STATUS_NOTSUPPORT; + } + + if (cxt->vbv_parameters) + {/* TODO: Check for validity of marker bits */ + getbits = viddec_pm_get_bits(parent, &(code), 32); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* 32 bits= firsthalf(15) + M + LatterHalf(15) + M */ + cxt->bit_rate = (code & 0xFFFE) >> 1; // Get rid of 1 marker bit + cxt->bit_rate |= ((code & 0xFFFE0000) >> 2); // Get rid of 2 marker bits + + if (cxt->bit_rate == 0) + { + DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + ret = MP4_STATUS_NOTSUPPORT; + // Do we need to really break here? Why not just set an error and proceed + //break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 19); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* 19 bits= firsthalf(15) + M + LatterHalf(3)*/ + cxt->vbv_buffer_size = code & 0x7; + cxt->vbv_buffer_size |= ( (code >> 4) & 0x7FFF); + if (cxt->vbv_buffer_size == 0) + { + DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + ret = MP4_STATUS_NOTSUPPORT; + // Do we need to really break here? Why not just set an error and proceed + //break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 28); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* 28 bits= firsthalf(11) + M + LatterHalf(15) + M */ + code = code >>1; + cxt->vbv_occupancy = code & 0x7FFF; + code = code >>16; + cxt->vbv_occupancy |= (code & 0x07FF); + } + ret = MP4_STATUS_OK; + } while (0); + + return ret; +} + +static uint32_t mp4_pvt_count_number_of_bits(uint32_t val) +{ + uint32_t num_bits=0; + do { + val >>= 1; + num_bits++; + } while (val); + return num_bits; +} + +static mp4_Status_t +mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_VideoObjectLayer_t *vidObjLay = (&parser->info.VisualObject.VideoObject); + mp4_VOLSpriteInfo_t *cxt = &(vidObjLay->sprite_info); + uint32_t sprite_enable = vidObjLay->sprite_enable; + uint32_t code; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + + do { + if ((sprite_enable == MP4_SPRITE_STATIC) || + (sprite_enable == MP4_SPRITE_GMC)) + { + if (sprite_enable != MP4_SPRITE_GMC) + { + /* This is not a supported type by HW */ + DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + cxt->sprite_brightness_change = code & 0x1; + cxt->sprite_warping_accuracy = (code >> 1) & 0x3; + cxt->no_of_sprite_warping_points = code >> 3; + if (cxt->no_of_sprite_warping_points > 1) + { + DEB("Warning: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", + cxt->no_of_sprite_warping_points); + } + + if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change)) + { + DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + if (vidObjLay->sprite_enable != MP4_SPRITE_GMC) + { + DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + } + ret = MP4_STATUS_OK; + } while (0); + + return ret; +} + +static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t *vidObjLay) +{ + uint32_t code; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + mp4_VOLQuant_mat_t *quant = &(vidObjLay->quant_mat_info); + + do { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + quant->load_intra_quant_mat = code; + if (quant->load_intra_quant_mat) + { + mp4_Parse_QuantMatrix(parent, &(quant->intra_quant_mat[0])); + } + else + { + mp4_copy_default_table((const uint8_t *)&mp4_DefaultIntraQuantMatrix[0], (uint8_t *)&(quant->intra_quant_mat[0]), 64); + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + quant->load_nonintra_quant_mat = code; + if (quant->load_nonintra_quant_mat) + { + mp4_Parse_QuantMatrix(parent, &(quant->nonintra_quant_mat[0])); + } + else + { + mp4_copy_default_table((const uint8_t *)&mp4_DefaultNonIntraQuantMatrix[0], (uint8_t *)&(quant->nonintra_quant_mat[0]), 64); + } + ret = MP4_STATUS_OK; + } while (0); + return ret; +} + +static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t code; + mp4_Info_t *pInfo = &(parser->info); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + + do { + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) + { + /* TODO: check for validity of marker bits */ + getbits = viddec_pm_get_bits(parent, &(code), 29); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_layer_height = (code >> 1) & 0x1FFF; + vidObjLay->video_object_layer_width = (code >> 15) & 0x1FFF; + } + + getbits = viddec_pm_get_bits(parent, &(code), 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->interlaced = ((code & 0x2) > 0); + vidObjLay->obmc_disable = ((code & 0x1) > 0); + + { + uint32_t num_bits=1; + if (vidObjLay->video_object_layer_verid != 1) num_bits=2; + getbits = viddec_pm_get_bits(parent, &(code), num_bits); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->sprite_enable = code; + } + + ret = mp4_Parse_VOL_sprite(parent, parser); + if (ret != MP4_STATUS_OK) + { + break; + } + + if ((vidObjLay->video_object_layer_verid != 1) && + (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)) + { + /* not supported shape*/ + DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->not_8_bit = (code > 0 ); + if (vidObjLay->not_8_bit) + { + /* 8 bit is only supported mode*/ + DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + else + {/* We use default values since only 8 bit mode is supported */ + vidObjLay->quant_precision = 5; + vidObjLay->bits_per_pixel = 8; + } + + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE) + { + /* Should not get here as shape is checked earlier */ + DEB("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->quant_type = code; + if (vidObjLay->quant_type) + { + ret = mp4_Parse_VOL_quant_mat(parent, vidObjLay); + if (ret != MP4_STATUS_OK) + { + break; + } + } + + if (vidObjLay->video_object_layer_verid != 1) + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->quarter_sample = code; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->complexity_estimation_disable = code; + if (!vidObjLay->complexity_estimation_disable) + {/* complexity estimation not supported */ + DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->resync_marker_disable = ((code & 0x2) > 0); + vidObjLay->data_partitioned = code & 0x1; + if (vidObjLay->data_partitioned) + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->reversible_vlc = code; + } + + if (vidObjLay->video_object_layer_verid != 1) + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->newpred_enable = code; + if (vidObjLay->newpred_enable) + { + DEB("Error: NEWPRED mode is not supported\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->reduced_resolution_vop_enable = code; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidObjLay->scalability = code; + if (vidObjLay->scalability) + { + DEB("Error: VOL scalability is not supported\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + // No need to parse further - none of the fields are interesting to parser/decoder/user + ret = MP4_STATUS_OK; + } while (0); + return ret; +} + +mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t code; + mp4_Info_t *pInfo = &(parser->info); + mp4_VisualObject_t *visObj = &(pInfo->VisualObject); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + int32_t getbits=0; + +//DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); + + // Trying to parse more header data as it is more important than frame data + if (parser->bitstream_error > MP4_HDR_ERROR_MASK) + return ret; + + do { + vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; + + vidObjLay->short_video_header = 0; + vidObjLay->video_object_layer_id = (parser->current_sc & 0xF); + + getbits = viddec_pm_get_bits(parent, &code, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_type_indication = code & 0xFF; + vidObjLay->random_accessible_vol = ((code & 0x100) > 0); + + if (!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication)) + { /* Streams with "unknown" type mismatch with ref */ + DEB("Warning: video_object_type_indication = %d, forcing to 1\n", + vidObjLay->video_object_type_indication); + vidObjLay->video_object_type_indication = 1; + } + + if (vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE) + {/* This is not a supported type by HW */ + DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n", + vidObjLay->video_object_type_indication); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + else + { + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->is_object_layer_identifier = code; + vidObjLay->video_object_layer_verid = + (mp4_pvt_valid_object_layer_verid(visObj->visual_object_verid)) ? visObj->visual_object_verid : 1; + + if (vidObjLay->is_object_layer_identifier) + { + getbits = viddec_pm_get_bits(parent, &(code), 7); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_layer_priority = code & 0x7; + vidObjLay->video_object_layer_verid = (code >> 3) & 0xF; + if (!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid)) + { + DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n", + vidObjLay->video_object_layer_verid); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + /* Video object layer ID supercedes visual object ID */ + visObj->visual_object_verid = vidObjLay->video_object_layer_verid; + } + + getbits = viddec_pm_get_bits(parent, &(code), 4); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->aspect_ratio_info = code & 0xF; + if (vidObjLay->aspect_ratio_info == MP4_ASPECT_RATIO_EXTPAR) + { + getbits = viddec_pm_get_bits(parent, &(code), 16); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->aspect_ratio_info_par_width = (code >> 8) & 0xFF; + vidObjLay->aspect_ratio_info_par_height = code & 0xFF; + } + + getbits = viddec_pm_get_bits(parent, &(code), 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->is_vol_control_parameters = code; + if (vidObjLay->is_vol_control_parameters) + { + ret = mp4_pvt_VOL_volcontrolparameters(parent, parser); + if (ret != MP4_STATUS_OK) + { + break; + } + } + + getbits = viddec_pm_get_bits(parent, &(code), 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->video_object_layer_shape = code; + /* If shape is not rectangluar exit early without parsing */ + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + { + DEB("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n", + MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + if ((vidObjLay->video_object_layer_verid != 1) && + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)) + {/* Grayscale not supported */ + DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + + getbits = viddec_pm_get_bits(parent, &(code), 19); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + /* TODO: check validity of marker */ + vidObjLay->vop_time_increment_resolution = (code >> 2) & 0xFFFF; + vidObjLay->fixed_vop_rate = code & 0x1; + + if (vidObjLay->vop_time_increment_resolution == 0) + { + DEB("Error: 0 value for vop_time_increment_resolution\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + /* calculate number bits in vop_time_increment_resolution */ + vidObjLay->vop_time_increment_resolution_bits = (uint8_t)mp4_pvt_count_number_of_bits( + (uint32_t)(vidObjLay->vop_time_increment_resolution -1)); + + if (vidObjLay->fixed_vop_rate) + { + getbits = viddec_pm_get_bits(parent, &(code), vidObjLay->vop_time_increment_resolution_bits); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjLay->fixed_vop_time_increment = code; + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + ret = mp4_Parse_VOL_notbinaryonly(parent, parser); + if (ret != MP4_STATUS_OK) + { + break; + } + } + else + { + DEB("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n"); + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + break; + } + } + + vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; + ret = MP4_STATUS_OK; + } while (0); + + mp4_set_hdr_bitstream_error(parser, true, ret); + if (ret != MP4_STATUS_OK) { + parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; + return ret; + } +//DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); + + // POPULATE WORKLOAD ITEM +/* + { + viddec_workload_item_t wi; + viddec_workload_t *wl = viddec_pm_get_header(parent); + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ; + + wi.mp4_vol.vol_aspect_ratio = 0; + wi.mp4_vol.vol_bit_rate = 0; + wi.mp4_vol.vol_frame_rate = 0; + + viddec_fw_mp4_vol_set_aspect_ratio_info(&wi.mp4_vol, vidObjLay->aspect_ratio_info); + viddec_fw_mp4_vol_set_par_width(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_width); + viddec_fw_mp4_vol_set_par_height(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_height); + viddec_fw_mp4_vol_set_control_param(&wi.mp4_vol, vidObjLay->is_vol_control_parameters); + viddec_fw_mp4_vol_set_chroma_format(&wi.mp4_vol, vidObjLay->VOLControlParameters.chroma_format); + viddec_fw_mp4_vol_set_interlaced(&wi.mp4_vol, vidObjLay->interlaced); + viddec_fw_mp4_vol_set_fixed_vop_rate(&wi.mp4_vol, vidObjLay->fixed_vop_rate); + + viddec_fw_mp4_vol_set_vbv_param(&wi.mp4_vol, vidObjLay->VOLControlParameters.vbv_parameters); + viddec_fw_mp4_vol_set_bit_rate(&wi.mp4_vol, vidObjLay->VOLControlParameters.bit_rate); + + viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment); + viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution); + + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); + if (ret == 1) + ret = MP4_STATUS_OK; + + memset(&(wl->attrs), 0, sizeof(viddec_frame_attributes_t)); + + wl->attrs.cont_size.width = vidObjLay->video_object_layer_width; + wl->attrs.cont_size.height = vidObjLay->video_object_layer_height; + } +*/ + return ret; +} diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h new file mode 100755 index 0000000..f093553 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h @@ -0,0 +1,16 @@ +#ifndef VIDDEC_MP4_VIDEOOBJECTLAYER_H +#define VIDDEC_MP4_VIDEOOBJECTLAYER_H +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +void mp4_ResetVOL(mp4_Info_t *pInfo); + +mp4_Status_t mp4_InitVOL(mp4_Info_t *pInfo); + +mp4_Status_t mp4_FreeVOL(mp4_Info_t *pInfo); + +mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *cxt); + + + +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c new file mode 100755 index 0000000..97f36a3 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c @@ -0,0 +1,423 @@ +#include +#include "viddec_mp4_videoobjectplane.h" + +mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Info_t* pInfo = &(parser->info); + uint32_t code; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_REQD_DATA_ERROR; + mp4_GroupOfVideoObjectPlane_t *data; + uint32_t time_code = 0; + + data = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane); + + do + { + getbits = viddec_pm_get_bits(parent, &code, 20); + BREAK_GETBITS_FAIL(getbits, ret); + ret = MP4_STATUS_OK; + + data->broken_link = ((code & 0x1) > 0); + data->closed_gov = ((code & 0x2) > 0); + time_code = code = code >> 2; + data->time_code_seconds = code & 0x3F; + code = code >> 6; + if ((code & 1) == 0) + {/* SGA:Should we ignore marker bit? */ + DEB("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n"); + } + code = code >>1; + data->time_code_minutes = code & 0x3F; + code = code >> 6; + data->time_code_hours = code & 0x1F; + + // This is the timebase in full second units + data->time_base = data->time_code_seconds + (60*data->time_code_minutes) + (3600*data->time_code_hours); + // Need to convert this into no. of ticks + data->time_base *= pInfo->VisualObject.VideoObject.vop_time_increment_resolution; + + } while (0); + + mp4_set_hdr_bitstream_error(parser, true, ret); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ; + + wi.mp4_gvop.gvop_info = 0; + wi.mp4_gvop.pad1 = 0; + wi.mp4_gvop.pad2 = 0; + + viddec_fw_mp4_gvop_set_broken_link(&wi.mp4_gvop, data->broken_link); + viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov); + viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code); + + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); + if (ret == 1) + ret = MP4_STATUS_OK; + } + + return ret; +} + +static inline mp4_Status_t mp4_brightness_change(void *parent, int32_t *b_change) +{ + uint32_t code; + int32_t getbits=0; + + *b_change = 0; + getbits = viddec_pm_peek_bits(parent, &code, 4); + if (code == 15) + { + getbits = viddec_pm_skip_bits(parent, 4); + getbits = viddec_pm_get_bits(parent, &code, 10); + *b_change = 625 + code; + } + else if (code == 14) + { + getbits = viddec_pm_skip_bits(parent, 4); + getbits = viddec_pm_get_bits(parent, &code, 9); + *b_change = 113 + code; + } + else if (code >= 12) + { + getbits = viddec_pm_skip_bits(parent, 3); + getbits = viddec_pm_get_bits(parent, &code, 7); + *b_change = (code < 64) ? ((int32_t)code - 112) : ((int32_t)code - 15); + } + else if (code >= 8) + { + getbits = viddec_pm_skip_bits(parent, 2); + getbits = viddec_pm_get_bits(parent, &code, 6); + *b_change = (code < 32) ? ((int32_t)code - 48) : ((int32_t)code - 15); + } + else + { + getbits = viddec_pm_skip_bits(parent, 1); + getbits = viddec_pm_get_bits(parent, &code, 5); + *b_change = (code < 16) ? ((int32_t)code - 16) : ((int32_t)code - 15); + } + + return ( (getbits == -1) ? MP4_STATUS_PARSE_ERROR: MP4_STATUS_OK); +} +static inline int32_t mp4_Sprite_dmv_length(void * parent, int32_t *dmv_length) +{ + uint32_t code, skip; + int32_t getbits=0; + mp4_Status_t ret= MP4_STATUS_PARSE_ERROR; + *dmv_length=0; + skip=3; + do { + getbits = viddec_pm_peek_bits(parent, &code, skip); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + + if (code == 7) + { + viddec_pm_skip_bits(parent, skip); + getbits = viddec_pm_peek_bits(parent, &code, 9); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + + skip=1; + while ((code & 256) != 0) + {/* count number of 1 bits */ + code <<=1; + skip++; + } + *dmv_length = 5 + skip; + } + else + { + skip=(code <= 1) ? 2 : 3; + *dmv_length = code - 1; + } + viddec_pm_skip_bits(parent, skip); + ret= MP4_STATUS_OK; + + } while (0); + return ret; +} + +static inline mp4_Status_t +mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_VideoObjectPlane_t *vidObjPlane) +{ + uint32_t code, i; + int32_t dmv_length=0, dmv_code=0, getbits=0; + mp4_Status_t ret = MP4_STATUS_OK; + for (i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ ) + { + ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length); + if (ret != MP4_STATUS_OK) + { + break; + } + if (dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + dmv_code = (int32_t)code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if (code != 1) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + vidObjPlane->warping_mv_code_du[i] = dmv_code; + /* TODO: create another inline function to avoid code duplication */ + ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length); + if (ret != MP4_STATUS_OK) + { + break; + } + if (dmv_length <= 0) + { + dmv_code = 0; + } + else + { + getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + dmv_code = (int32_t)code; + if ((dmv_code & (1 << (dmv_length - 1))) == 0) + { + dmv_code -= (1 << dmv_length) - 1; + } + } + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + if (code != 1) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + vidObjPlane->warping_mv_code_dv[i] = dmv_code; + + } + return ret; +} + +static inline mp4_Status_t mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(void *parent, uint32_t *base) +{ + mp4_Status_t ret= MP4_STATUS_OK; + int32_t getbits=0; + uint32_t code = 0; + + *base = 0; + do + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + *base += code; + } while (code != 0); + return ret; +} + +mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t code; + mp4_Info_t *pInfo = &(parser->info); + mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject); + mp4_VideoObjectPlane_t *vidObjPlane = &(pInfo->VisualObject.VideoObject.VideoObjectPlane); + int32_t getbits=0; + mp4_Status_t ret= MP4_STATUS_PARSE_ERROR; + + do + { + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_coding_type = code & 0x3; + if ( mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(parent, + &(vidObjPlane->modulo_time_base)) == MP4_STATUS_REQD_DATA_ERROR) + { + break; + } + + getbits = viddec_pm_get_bits(parent, &code, 1); + /* TODO: check for marker bit validity */ + { + uint32_t numbits=0; + numbits = vidObjLay->vop_time_increment_resolution_bits; + if (numbits == 0) numbits=1; /*TODO:check if its greater than 16 bits ?? */ + getbits = viddec_pm_get_bits(parent, &code, numbits); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_time_increment = code; + } + + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + + vidObjPlane->vop_coded = code & 0x1; + if (vidObjPlane->vop_coded == 0) + { + ret = MP4_STATUS_OK;/* Exit point 1 */ + break; + } + + if (vidObjLay->newpred_enable) + { + /* New pred mode not supported in HW */ + DEB("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) && + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P) || + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S) && + (vidObjLay->sprite_enable == MP4_SPRITE_GMC)))) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_rounding_type = code; + } + + if (vidObjLay->reduced_resolution_vop_enable && + (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) && + ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) || + (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P))) + { + getbits = viddec_pm_get_bits(parent, &code, 1); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_reduced_resolution = code; + if (vidObjPlane->vop_reduced_resolution) + { + DEB("Error: mp4_Parse_VideoObjectPlane: Reduced Resolution vidObjPlane is not supported\n"); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + { + /* we support only rectangular shapes so the following logic is not required */ + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) && + (!vidObjLay->complexity_estimation_disable)) + { + /* Not required according to DE team */ + //read_vop_complexity_estimation_header(); + ret = MP4_STATUS_NOTSUPPORT; + break; + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->intra_dc_vlc_thr = code; + if (vidObjLay->interlaced) + { + getbits = viddec_pm_get_bits(parent, &code, 2); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->top_field_first = ((code & 0x2) > 0); + vidObjPlane->alternate_vertical_scan_flag = code & 0x1; + } + } + + if (((vidObjLay->sprite_enable == MP4_SPRITE_STATIC) || (vidObjLay->sprite_enable == MP4_SPRITE_GMC)) && + (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S)) + { + if (vidObjLay->sprite_info.no_of_sprite_warping_points > 0) { + if (mp4_Sprite_Trajectory(parent, vidObjLay, vidObjPlane) != MP4_STATUS_OK) { + break; + } + } + vidObjPlane->brightness_change_factor = 0; + if (vidObjLay->sprite_info.sprite_brightness_change) + { + int32_t change=0; + if (mp4_brightness_change(parent, &change) == MP4_STATUS_PARSE_ERROR) + { + break; + } + vidObjPlane->brightness_change_factor = change; + } + + if (vidObjLay->sprite_enable == MP4_SPRITE_STATIC) + { + /* SGA: IS decode sprite not required. Is static even supported */ + ret = MP4_STATUS_OK;/* Exit point 2 */ + break; + } + } + + if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) + { + // Length of vop_quant is specified by quant_precision + getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_quant = code; + if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + if (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I) + { + vidObjPlane->vop_fcode_forward = 0; + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_fcode_forward = code & 0x7; + if (vidObjPlane->vop_fcode_forward == 0) + { + DEB("Error: vop_fcode_forward == 0\n"); + break; + } + } + if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) + { + vidObjPlane->vop_fcode_backward = 0; + getbits = viddec_pm_get_bits(parent, &code, 3); + BREAK_GETBITS_REQD_MISSING(getbits, ret); + vidObjPlane->vop_fcode_backward = code &0x7; + if (vidObjPlane->vop_fcode_backward == 0) + { + DEB("Error: vop_fcode_backward == 0\n"); + break; + } + } + if (!vidObjLay->scalability) + { + if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) && + (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I)) + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + // The remaining data contains the macroblock information that is handled by the BSP + // The offsets to be sent to the BSP are obtained in the workload population + } + else + { + ret = MP4_STATUS_NOTSUPPORT; + break; + } + } + else + {/* Binary Not supported */ + ret = MP4_STATUS_NOTSUPPORT; + break; + } + /* Since we made it all the way here it a success condition */ + ret = MP4_STATUS_OK; /* Exit point 3 */ + } while (0); + + mp4_set_hdr_bitstream_error(parser, false, ret); + + return ret; +} // mp4_Parse_VideoObjectPlane diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.h new file mode 100755 index 0000000..0262ab2 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.h @@ -0,0 +1,10 @@ +#ifndef VIDDEC_MP4_VIDEOOBJECTPLANE_H +#define VIDDEC_MP4_VIDEOOBJECTPLANE_H +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser); + +mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser); + +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c new file mode 100755 index 0000000..d1ec032 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c @@ -0,0 +1,290 @@ +#include +#include "viddec_mp4_visualobject.h" + +static inline uint8_t mp4_pvt_isValid_verID(uint8_t id) +{ + uint8_t ret=true; + switch (id) + { + case 1: + case 2: + case 4: + case 5: + { + break; + } + default: + { + ret = false; + break; + } + } + return ret; +} // mp4_pvt_isValid_verID + +static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalType_t *vidSignal) +{ + uint32_t data=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + + /* Set default values defined in spec first */ + vidSignal->video_format = 5; + vidSignal->video_range = 0; + vidSignal->colour_primaries = 1; + vidSignal->transfer_characteristics = 1; + vidSignal->matrix_coefficients = 1; + do + { + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_FAIL(getbits, ret); + vidSignal->is_video_signal_type = (data > 0); + if (vidSignal->is_video_signal_type) + { + getbits = viddec_pm_get_bits(parent, &data, 5); + BREAK_GETBITS_FAIL(getbits, ret); + vidSignal->is_colour_description = data & 0x1; + vidSignal->video_range = ((data & 0x2) > 0); + data = data >> 2; + vidSignal->video_format = data & 0x7; + if (vidSignal->is_colour_description) + { + getbits = viddec_pm_get_bits(parent, &data, 24); + BREAK_GETBITS_FAIL(getbits, ret); + vidSignal->colour_primaries = (data >> 16) & 0xFF; + vidSignal->transfer_characteristics = (data >> 8) & 0xFF; + vidSignal->matrix_coefficients = data & 0xFF; + } + } + ret = MP4_STATUS_OK; + } while (0); + + return ret; +} // mp4_Parse_video_signal_type + +void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status) +{ + //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n", + // parser->bitstream_error, hdr_flag, parse_status); + + if (hdr_flag) + { + if (parse_status & MP4_STATUS_NOTSUPPORT) + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + if (parse_status & MP4_STATUS_PARSE_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_HDR_PARSE; + if (parse_status & MP4_STATUS_REQD_DATA_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; + parser->bitstream_error &= MP4_HDR_ERROR_MASK; + } + else + { + if (parse_status & MP4_STATUS_NOTSUPPORT) + parser->bitstream_error |= MP4_BS_ERROR_FRM_UNSUP; + if (parse_status & MP4_STATUS_PARSE_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_FRM_PARSE; + if (parse_status & MP4_STATUS_REQD_DATA_ERROR) + parser->bitstream_error |= MP4_BS_ERROR_FRM_NONDEC; + } + + //DEB("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error); + + return; +} // mp4_set_hdr_bitstream_error + +mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser) +{ + uint32_t data=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + + getbits = viddec_pm_get_bits(parent, &data, 8); + if (getbits != -1) + { + parser->info.profile_and_level_indication = data & 0xFF; + // If present, check for validity + switch (parser->info.profile_and_level_indication) + { + case MP4_SIMPLE_PROFILE_LEVEL_0: + case MP4_SIMPLE_PROFILE_LEVEL_1: + case MP4_SIMPLE_PROFILE_LEVEL_2: + case MP4_SIMPLE_PROFILE_LEVEL_3: + case MP4_SIMPLE_PROFILE_LEVEL_4a: + case MP4_SIMPLE_PROFILE_LEVEL_5: + case MP4_SIMPLE_PROFILE_LEVEL_6: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5: + case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B: + parser->bitstream_error = MP4_BS_ERROR_NONE; + ret = MP4_STATUS_OK; + break; + default: + parser->bitstream_error = MP4_BS_ERROR_HDR_UNSUP | MP4_BS_ERROR_HDR_NONDEC; + break; + } + } + else + { + parser->bitstream_error = MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC; + } + + return ret; +} // mp4_Parse_VisualSequence + +mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Info_t *pInfo = &(parser->info); + mp4_VisualObject_t *visObj = &(pInfo->VisualObject); + uint32_t data=0; + int32_t getbits=0; + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + + do + { + getbits = viddec_pm_get_bits(parent, &data, 1); + BREAK_GETBITS_FAIL(getbits, ret); + visObj->is_visual_object_identifier = (data > 0); + + visObj->visual_object_verid = 1; /* Default value as per spec */ + if (visObj->is_visual_object_identifier) + { + viddec_pm_get_bits(parent, &data, 7); + visObj->visual_object_priority = data & 0x7; + data = data >> 3; + if (mp4_pvt_isValid_verID(data & 0xF)) + { + visObj->visual_object_verid = data & 0xF; + } + else + { + DEB("Warning: Unsupported visual_object_verid\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + // Continue parsing as it is not a required field for decoder + } + } + + getbits = viddec_pm_get_bits(parent, &data, 4); + BREAK_GETBITS_FAIL(getbits, ret); + visObj->visual_object_type = data; + if (visObj->visual_object_type != MP4_VISUAL_OBJECT_TYPE_VIDEO) + { + /* VIDEO is the only supported type */ + DEB("Error: Unsupported object: visual_object_type != video ID\n"); + parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; + break; + } + + /* Not required to check for visual_object_type as we already handle it above */ + ret = mp4_Parse_video_signal_type(parent, &(visObj->VideoSignalType)); + + // No need to check for user data or visual object layer because they have a different start code + // and will not be part of this header + + } while (0); + + mp4_set_hdr_bitstream_error(parser, true, ret); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + mp4_VideoSignalType_t *vst = &(visObj->VideoSignalType); + + wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ; + + wi.mp4_vs_vo.vs_item = 0; + wi.mp4_vs_vo.video_signal_type = 0; + wi.mp4_vs_vo.color_desc = 0; + + viddec_fw_mp4_vs_set_profile_and_level_indication(&wi.mp4_vs_vo, pInfo->profile_and_level_indication); + + viddec_fw_mp4_vo_set_video_signal_type(&wi.mp4_vs_vo, vst->is_video_signal_type); + if (vst->is_video_signal_type) + { + viddec_fw_mp4_vo_set_video_range(&wi.mp4_vs_vo, vst->video_range); + viddec_fw_mp4_vo_set_video_format(&wi.mp4_vs_vo, vst->video_format); + viddec_fw_mp4_vo_set_colour_description(&wi.mp4_vs_vo, vst->is_colour_description); + if (vst->is_colour_description) + { + viddec_fw_mp4_vo_set_transfer_char(&wi.mp4_vs_vo, vst->transfer_characteristics); + viddec_fw_mp4_vo_set_color_primaries(&wi.mp4_vs_vo, vst->colour_primaries); + } + } + + int ret_val; + ret_val = viddec_pm_append_workitem(parent, &wi, false); + if (ret_val == 1) + ret = MP4_STATUS_OK; + } + + return ret; +} // mp4_Parse_VisualObject + +mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) +{ + mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; + uint32_t user_data; + viddec_workload_item_t wi; + + DEB("ParseUser-prev_sc: 0x%x\n", parser->prev_sc); + + /* find the scope based on start code sc */ + switch (parser->prev_sc) { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + case MP4_SC_VISUAL_OBJECT: + wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA; + break; + case MP4_SC_GROUP_OF_VOP: + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + case MP4_SC_VIDEO_OBJECT_LAYER_MIN: + wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA; + break; + default: + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen + break; + } + + /* Read 1 byte of user data and store it in workitem for the current stream level (VS/VO/VOL/GVOP). + Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, + append the workitem. This loop is repeated till all user data is extracted and appended. */ + wi.user_data.size = 0; + while (viddec_pm_get_bits(parent, &user_data, 8) != -1) + { + /* Store the valid byte in data payload */ + wi.user_data.data_payload[wi.user_data.size] = user_data; + wi.user_data.size++; + + /* When size exceeds payload size, append workitem and continue */ + if (wi.user_data.size >= 11) + { + viddec_pm_setup_userdata(&wi); + + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); + wi.user_data.size = 0; + } + } + /* If size is not 0, append remaining user data. */ + if (wi.user_data.size > 0) + { + int i; + for (i=wi.user_data.size; i<11; i++) + { + wi.user_data.data_payload[i] = 0; + } + viddec_pm_setup_userdata(&wi); + ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); + wi.user_data.size = 0; + } + + if (ret == 1) + ret = MP4_STATUS_OK; + + return ret; +} // mp4_Parse_UserData + diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h new file mode 100755 index 0000000..d1f5a23 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h @@ -0,0 +1,12 @@ +#ifndef VIDDEC_MP4_VISUALOBJECT_H +#define VIDDEC_MP4_VISUALOBJECT_H +#include "viddec_parser_ops.h" +#include "viddec_mp4_parse.h" + +mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser); + +mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser); + +mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser); + +#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c b/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c new file mode 100755 index 0000000..3988169 --- /dev/null +++ b/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c @@ -0,0 +1,151 @@ +#include "viddec_pm_parse.h" +#include "viddec_mp4_parse.h" + +/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success. + The conext is updated with current phase and sc_code position in the buffer. + + What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. + Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. + if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern + we are looking for. Its incremented to 4 once we see a byte after this pattern. + + For MP4 there are two startcode patterns LVH & SVH. LVH is same as other codecs (00 00 01), SVH + A.K.A H263 is (00 00 8X). So we have to look for both kind of start codes. The spec doesn't + explicitly say if both of them can exist in a stream? So current implemenation will assume + that only one of them is present in a given stream to simplify implementation. The reason it can + get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect + of SVH start code. +*/ +#ifndef VBP +uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) +{ + uint8_t *ptr; + uint32_t size; + uint32_t data_left=0, phase = 0, ret = 0; + viddec_sc_parse_cubby_cxt_t *cxt; + viddec_mp4_parser_t *p_info; + + cxt = ( viddec_sc_parse_cubby_cxt_t *)in; + viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; + size = 0; + data_left = cxt->size; + ptr = cxt->buf; + phase = cxt->phase; + cxt->sc_end_pos = -1; + p_info = (viddec_mp4_parser_t *)pcxt; + + /* parse until there is more data and start code not found */ + while ((data_left > 0) &&(phase < 3)) + { + /* Check if we are byte aligned & phase=0, if thats the case we can check + work at a time instead of byte*/ + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) + { + while (data_left > 3) + { + uint32_t data; + char mask1 = 0, mask2=0; + + data = *((uint32_t *)ptr); +#ifndef MFDBIGENDIAN + data = SWAP_WORD(data); +#endif + mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); + mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); + /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need + two consecutive zero bytes for a start code pattern */ + if (mask1 && mask2) + {/* Success so skip 4 bytes and start over */ + ptr+=4; + size+=4; + data_left-=4; + continue; + } + else + { + break; + } + } + } + + /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected + two zero bytes in the word so we look one byte at a time*/ + if (data_left > 0) + { + if (*ptr == FIRST_STARTCODE_BYTE) + {/* Phase can be 3 only if third start code byte is found */ + phase++; + ptr++; + size++; + data_left--; + if (phase > 2) + { + phase = 2; + + if ( (((uint32_t)ptr) & 0x3) == 0 ) + { + while ( data_left > 3 ) + { + if (*((uint32_t *)ptr) != 0) + { + break; + } + ptr+=4; + size+=4; + data_left-=4; + } + } + } + } + else + { + uint8_t normal_sc=0, short_sc=0; + if (phase == 2) + { + normal_sc = (*ptr == THIRD_STARTCODE_BYTE); + short_sc = (p_info->ignore_scs == 0) && (SHORT_THIRD_STARTCODE_BYTE == ( *ptr & 0xFC)); + } + + if (!(normal_sc | short_sc)) + { + phase = 0; + } + else + {/* Match for start code so update context with byte position */ + cxt->sc_end_pos = size; + phase = 3; + p_info->cur_sc_prefix = p_info->next_sc_prefix; + p_info->next_sc_prefix = (normal_sc) ? 1: 0; + if (normal_sc) + { + p_info->ignore_scs=1; + } + else + { + /* For short start code since start code is in one nibble just return at this point */ + phase += 1; + state->next_sc = *ptr; + state->second_scprfx_length = 2; + ret=1; + break; + } + } + ptr++; + size++; + data_left--; + } + } + } + if ((data_left > 0) && (phase == 3)) + { + cxt->sc_end_pos++; + state->next_sc = cxt->buf[cxt->sc_end_pos]; + state->second_scprfx_length = 3; + phase++; + ret = 1; + } + cxt->phase = phase; + /* Return SC found only if phase is 4, else always success */ + return ret; +} +#endif diff --git a/mixvbp/vbp_plugin/vc1/Android.mk b/mixvbp/vbp_plugin/vc1/Android.mk new file mode 100755 index 0000000..b0245fa --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/Android.mk @@ -0,0 +1,36 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + mix_vbp_vc1_stubs.c \ + vc1parse_bpic_adv.c \ + vc1parse_huffman.c \ + vc1parse_mv_com.c \ + vc1parse_ppic_adv.c \ + viddec_vc1_parse.c \ + vc1parse_bpic.c \ + vc1parse_common_tables.c \ + vc1parse_ipic_adv.c \ + vc1parse_pic_com_adv.c \ + vc1parse_ppic.c \ + vc1parse_bitplane.c \ + vc1parse.c \ + vc1parse_ipic.c \ + vc1parse_pic_com.c \ + vc1parse_vopdq.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES := \ + $(MIXVBP_DIR)/include \ + $(MIXVBP_DIR)/vbp_manager/include \ + $(MIXVBP_DIR)/vbp_plugin/vc1/include + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_vc1 + +LOCAL_SHARED_LIBRARIES := \ + libmixvbp + +include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_plugin/vc1/include/vc1common.h b/mixvbp/vbp_plugin/vc1/include/vc1common.h new file mode 100755 index 0000000..e4f1b3f --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/include/vc1common.h @@ -0,0 +1,143 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: VC1 header. +// +*/ + +#ifndef _VC1_COMMON_H_ +#define _VC1_COMMON_H_ + +/* If the pixel data is left near an emulation prevention sequence, the decoder will be unaware + unless we send some previous bytes */ +//#define PADDING_FOR_EMUL 3 +#define PADDING_FOR_EMUL 0 + +#define GET_BLSB( name, bitf ) BLSB_MFD_##name##_##bitf +#define GET_BMSK( name, bitf ) BMSK_MFD_##name##_##bitf + +#define BF_READ( name, bitf, value ) ((value & GET_BMSK(name, bitf) ) >> GET_BLSB(name, bitf) ) +#define BF_WRITE( name, bitf, value, data ) value = ((value & ~GET_BMSK(name, bitf)) | ((data) << GET_BLSB(name, bitf))) + +enum vc1_workload_item_type +{ + VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_VC1_BITOFFSET, + VIDDEC_WORKLOAD_VC1_BITPLANE0, + VIDDEC_WORKLOAD_VC1_BITPLANE1, + VIDDEC_WORKLOAD_VC1_BITPLANE2, + VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY, + VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP, + VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW, + VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW, + VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO, + VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO, + VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE, + VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + VIDDEC_WORKLOAD_VC1_FUTURE_FRAME, +}; + +typedef enum +{ + vc1_ProgressiveFrame = 0, + vc1_InterlacedFrame = 2, + vc1_InterlacedField = 3, + vc1_PictureFormatNone +} vc1_fcm; + +/** This enumeration defines the various frame types as defined in PTYPE syntax +element. +PTYPE interpretation depends on bitstream profile. The value that needs to get +programmed in the frame_type register 0x2218 is this generic enum obtained +from Canmore code. +Changing this enum to match the spec for each profile caused md5 mismatches. +TODO: Why are these the values to program - is this the case with reference decoder? +*/ +enum +{ + VC1_I_FRAME = (1 << 0), + VC1_P_FRAME = (1 << 1), + VC1_B_FRAME = (1 << 2), + VC1_BI_FRAME = VC1_I_FRAME | VC1_B_FRAME, + VC1_SKIPPED_FRAME = (1 << 3) | VC1_P_FRAME +}; + +enum { + vc1_FrameDone = 1 << 0, + vc1_FieldDone = 1 << 1, + vc1_SliceDone = 1 << 2, + vc1_Field1Done = 1 << 3, + vc1_Field2Done = 1 << 4, + vc1_FrameError = 1 << 8, +}; + +typedef struct { + /* 0x00 */ + uint32_t general; + /* 0x04 */ + uint32_t stream_format1; + /* 0x08 */ + uint32_t coded_size; + /* 0x0c */ + uint32_t stream_format2; + /* 0x10 */ + uint32_t entrypoint1; + /* 0x14 */ + uint32_t ap_range_map; + /* 0x18 */ + uint32_t frame_type; + /* 0x1c */ + uint32_t recon_control; + /* 0x20 */ + uint32_t mv_control; + /* 0x24 */ + uint32_t intcomp_fwd_top; + /* 0x28 */ + uint32_t ref_bfraction; + /* 0x2c */ + uint32_t blk_control; + /* 0x30 */ + uint32_t trans_data; + /* 0x34 */ + uint32_t vop_dquant; +#define NUM_REF_ID 4 + /* 0x38-0x48 */ uint32_t ref_frm_id[NUM_REF_ID]; + /* 0x48 */ + uint32_t fieldref_ctrl_id; + /* 0x4c */ + uint32_t auxfrmctrl; + /* 0x50 */ + uint32_t imgstruct; + /* 0x54 */ + uint32_t alt_frame_type; + /* 0x58 */ + uint32_t intcomp_fwd_bot; + /* 0x5c */ + uint32_t intcomp_bwd_top; + /* 0x60 */ + uint32_t intcomp_bwd_bot; + /* 0x14 */ + uint32_t smp_range_map; +} VC1D_SPR_REGS; + +/* +In VC1, past reference is the fwd reference and future reference is the backward reference +i.e. P frame has only a forward reference and B frame has both a forward and a backward reference. +*/ +enum { + VC1_FRAME_CURRENT_REF = 0, + VC1_FRAME_CURRENT_DIS, + VC1_FRAME_PAST, + VC1_FRAME_FUTURE, + VC1_FRAME_ALT +}; + +#endif //_VC1_COMMON_H_ + diff --git a/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c b/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c new file mode 100755 index 0000000..cffa7b8 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c @@ -0,0 +1,30 @@ +#include "vc1.h" + +void vc1_start_new_frame (void *parent, vc1_viddec_parser_t *parser ) +{ + return; +} + +void vc1_end_frame (vc1_viddec_parser_t *parser) +{ + return; +} + + +int32_t vc1_parse_emit_current_frame( void *parent, vc1_viddec_parser_t *parser ) +{ + return(0); +} + + +void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) +{ +} + +void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser) +{ +} + +void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser) +{ +} diff --git a/mixvbp/vbp_plugin/vc1/vc1.h b/mixvbp/vbp_plugin/vc1/vc1.h new file mode 100755 index 0000000..ca92d17 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1.h @@ -0,0 +1,236 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved. +// +// Description: VC1 header. +// +*/ + +#ifndef _VC1_H_ +#define _VC1_H_ + +#ifdef MFD_FIRMWARE +typedef unsigned int size_t; +#define LOG(...) +#else + +#include +#include +#include +#ifndef VBP +enum { + NONE = 0, + CRITICAL, + WARNING, + INFO, + DEBUG, +} ; + +#define vc1_log_level DEBUG + +#define LOG( log_lev, format, args ... ) \ + if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ , ## args ); } +#endif +#endif + +#include "viddec_fw_workload.h" +#include "vc1parse_common_defs.h" +#include "vc1common.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef VBP +#define LOG_CRIT(format, args ... ) LOG( CRITICAL, format, ## args) +#define LOG_WARN(format, args ... ) LOG( WARNING, format, ## args) +#define LOG_INFO(format, args ... ) LOG( INFO, format, ## args) +#define LOG_DEBUG(format, args ... ) LOG( DEBUG, format, ## args) +#else +#define LOG_CRIT(format, args ... ) +#define LOG_WARN(format, args ... ) +#define LOG_INFO(format, args ... ) +#define LOG_DEBUG(format, args ... ) +#endif + +// Seems to be hardware bug: DO NOT TRY TO SWAP BITPLANE0 and BITPLANE2 +// Block Control Register at offset 222C uses Bitplane_raw_ID0 to indicate directmb/fieldtx while +// and Bitplane_raw_ID2 for acpred/mvtypemb/forwardmb +// but when we send bitplane index 0 for directmb/fieldtx and bitplane index 2 for acpred/mvtypemb/forwardmb +// md5 mismatches are seen + typedef enum + { + BPP_FORWARDMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_ACPRED = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_MVTYPEMB = VIDDEC_WORKLOAD_VC1_BITPLANE0, + BPP_OVERFLAGS = VIDDEC_WORKLOAD_VC1_BITPLANE1, + BPP_SKIPMB = VIDDEC_WORKLOAD_VC1_BITPLANE1, + BPP_DIRECTMB = VIDDEC_WORKLOAD_VC1_BITPLANE2, + BPP_FIELDTX = VIDDEC_WORKLOAD_VC1_BITPLANE2, + } vc1_bpp_type_t; + + /* status codes */ + typedef enum { + VC1_STATUS_EOF = 1, // end of file + VC1_STATUS_OK = 0, // no error + VC1_STATUS_NO_MEM = 2, // out of memory + VC1_STATUS_FILE_ERROR = 2, // file error + VC1_STATUS_NOTSUPPORT = 2, // not supported mode + VC1_STATUS_PARSE_ERROR = 2, // fail in parse MPEG-4 stream + VC1_STATUS_ERROR = 2 // unknown/unspecified error + } vc1_Status; + + /* VC1 start code values */ + typedef enum { + vc1_Forbidden = 0x80,/*0x80-0xFF*/ + vc1_Reserved1 = 0x09,/*0x00-0x09*/ + vc1_Reserved2 = 0x10, + vc1_Reserved3 = 0x1A, + vc1_Reserved4 = 0x20,/*0x20-0x7F*/ + vc1_SCEndOfSequence = 0x0A, + vc1_SCSlice = 0x0B, + vc1_SCField = 0x0C, + vc1_SCFrameHeader = 0x0D, + vc1_SCEntryPointHeader = 0x0E, + vc1_SCSequenceHeader = 0x0F, + vc1_SCSliceUser = 0x1B, + vc1_SCFieldUser = 0x1C, + vc1_SCFrameUser = 0x1D, + vc1_SCEntryPointUser = 0x1E, + vc1_SCSequenceUser = 0x1F + } vc1_sc; + +#if 0 + typedef enum + { + vc1_ProfileSimple = 0, /** Simple profile */ + vc1_ProfileMain, /** Main profile */ + vc1_ProfileReserved, /** Reserved */ + vc1_ProfileAdvanced /** Advanced profile */ + } vc1_Profile; +#endif + + typedef enum + { + vc1_PtypeI = 1, + vc1_PtypeP = 2, + vc1_PtypeB = 4, + vc1_PtypeBI = 5, + vc1_PtypeSkipped = 8|2, + } vc1_ptype; + + typedef enum + { + vc1_PtypeII = 0, + vc1_PtypeIP = 1, + vc1_PtypePI = 2, + vc1_PtypePP = 3, + vc1_PtypeBB = 4, + vc1_PtypeBBI = 5, + vc1_PtypeBIB = 6, + vc1_PtypeBIBI = 7 + } vc1_fptype; + + typedef enum + { + vc1_Imode_Raw = 0, //0x0000 + vc1_Imode_Norm2, //0x10 + vc1_Imode_Diff2, //0x001 + vc1_Imode_Norm6, //0x11 + vc1_Imode_Diff6, //0x0001 + vc1_Imode_Rowskip, //0x010 + vc1_Imode_Colskip, //0x011 + } vc1_Imode; + + /* calculation of MAX_BITPLANE_SZ 2048/16x1088/16 pel= 128x68 bit used for bitplane + * as rows are packed in DWORDS + * we have (128)/32 * 68 Dwords needed for bitplane storage + */ +#define MAX_BITPLANE_SZ 272 + + /* Full Info */ + typedef struct { + unsigned char* bufptr; /* current frame, point to header or data */ + int bitoff; /* mostly point to next frame header or PSC */ + int picture_info_has_changed; + vc1_metadata_t metadata; + vc1_PictureLayerHeader picLayerHeader; + uint32_t bitplane[MAX_BITPLANE_SZ]; + } vc1_Info; + +#ifdef __cplusplus +} +#endif + +enum { + VC1_REF_FRAME_T_MINUS_1 = 0, + VC1_REF_FRAME_T_MINUS_2, + VC1_REF_FRAME_T_MINUS_0, + VC1_NUM_REFERENCE_FRAMES, +}; + +enum vc1_sc_seen_flags +{ + VC1_SC_INVALID = 0 << 0, + VC1_SC_SEQ = 1 << 0, + VC1_SC_EP = 1 << 1, + VC1_SC_FRM = 1 << 2, + VC1_SC_FLD = 1 << 3, + VC1_SC_SLC = 1 << 4, + VC1_SC_UD = 1 << 5, +}; +#define VC1_SEQ_MASK VC1_SC_SEQ +#define VC1_EP_MASK VC1_SC_SEQ | VC1_SC_EP +#define VC1_FRM_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM +#define VC1_FLD_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM | VC1_SC_FLD + +typedef struct { + int id; + uint32_t intcomp_top; + uint32_t intcomp_bot; + int fcm; /* frame coding mode */ + int type; + int anchor[2]; /* one per field */ + int rr_en; /* range reduction enable flag at sequence layer */ + int rr_frm; /* range reduction flag at picture layer */ + int tff; +} ref_frame_t; + +typedef struct +{ + uint32_t sc_seen_since_last_wkld; + uint32_t sc_seen; + uint32_t is_frame_start; + uint32_t is_second_start; + uint32_t is_reference_picture; + uint32_t intcomp_last[4]; /* for B frames */ + uint32_t intcomp_top[2]; + uint32_t intcomp_bot[2]; + vc1_Info info; + VC1D_SPR_REGS spr; + ref_frame_t ref_frame[VC1_NUM_REFERENCE_FRAMES]; +#ifdef VBP + /* A storage area is provided for each type of bit plane. Only one of */ + /* each type will ever be used for a picture and never more than three */ + /* bit-planes per picture, and often only one is used. We never clear */ + /* this data and writes into it when we need to. vc1parse_bitplane.c */ + /* makes use of these set them to one of the bitplane types included */ + /* in the picture header structure. Those sturctures are set every */ + /* time a picture parse begins. */ + uint32_t bp_forwardmb[4096]; + uint32_t bp_acpred[4096]; + uint32_t bp_mvtypemb[4096]; + uint32_t bp_overflags[4096]; + uint32_t bp_skipmb[4096]; + uint32_t bp_directmb[4096]; + uint32_t bp_fieldtx[4096]; + uint32_t start_code; +#endif +} vc1_viddec_parser_t; + +#endif //_VC1_H_ + diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.c b/mixvbp/vbp_plugin/vc1/vc1parse.c new file mode 100755 index 0000000..06ac094 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse.c @@ -0,0 +1,604 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 bitstream layers down to but not including +// macroblock layer. +// +*/ + +#include "vc1parse.h" + +#define VC1_PIXEL_IN_LUMA 16 + +/*------------------------------------------------------------------------------ + * Parse modified rcv file, start codes are inserted using rcv2vc1.c. + * source is in + * http://svn.jf.intel.com/svn/DHG_Src/CESWE_Src/DEV/trunk/sv/mfd/tools/utils. + * Assumme rcv file width < 90,112 pixel to differenciate from real VC1 + * advanced profile header. + * Original rcv description is in annex L + * Table 263 of SMPTE 421M. + */ +vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) +{ + uint32_t result; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_RcvSequenceHeader rcv; + + memset(&rcv, 0, sizeof(vc1_RcvSequenceHeader)); + + result = viddec_pm_get_bits(ctxt, &rcv.struct_a_rcv, 32); + md->width = rcv.struct_a.HORIZ_SIZE; + md->height = rcv.struct_a.VERT_SIZE; +#ifdef VBP + //The HRD rate and HRD buffer size may be encoded according to a 64 bit sequence header data structure B + //if there is no data strcuture B metadata contained in the bitstream, we will not be able to get the + //bitrate data, hence we set it to 0 for now + md->HRD_NUM_LEAKY_BUCKETS = 0; + md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0; +#endif + + result = viddec_pm_get_bits(ctxt, &rcv.struct_c_rcv, 32); + md->PROFILE = rcv.struct_c.PROFILE >> 2; + md->LOOPFILTER = rcv.struct_c.LOOPFILTER; + md->MULTIRES = rcv.struct_c.MULTIRES; + md->FASTUVMC = rcv.struct_c.FASTUVMC; + md->EXTENDED_MV = rcv.struct_c.EXTENDED_MV; + md->DQUANT = rcv.struct_c.DQUANT; + md->VSTRANSFORM = rcv.struct_c.VSTRANSFORM; + md->OVERLAP = rcv.struct_c.OVERLAP; + md->RANGERED = rcv.struct_c.RANGERED; + md->MAXBFRAMES = rcv.struct_c.MAXBFRAMES; + md->QUANTIZER = rcv.struct_c.QUANTIZER; + md->FINTERPFLAG = rcv.struct_c.FINTERPFLAG; +#ifdef VBP + md->SYNCMARKER = rcv.struct_c.SYNCMARKER; +#endif + + if ((md->PROFILE == VC1_PROFILE_SIMPLE) || + (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN)) + { + md->DQUANT = 0; + } + // TODO: NEED TO CHECK RESERVED BITS ARE 0 + + md->widthMB = (md->width + 15 ) / VC1_PIXEL_IN_LUMA; + md->heightMB = (md->height + 15) / VC1_PIXEL_IN_LUMA; + + DEB("rcv: beforemod: res: %dx%d\n", md->width, md->height); + + /* WL takes resolution in unit of 2 pel - sec. 6.2.13.1 */ + md->width = md->width/2 -1; + md->height = md->height/2 -1; + + DEB("rcv: res: %dx%d\n", md->width, md->height); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C; + + wi.vc1_sh_struct_a_c.size = 0; + wi.vc1_sh_struct_a_c.flags = 0; + wi.vc1_sh_struct_a_c.pad = 0; + + viddec_fw_vc1_set_rcv_horiz_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.HORIZ_SIZE); + viddec_fw_vc1_set_rcv_vert_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.VERT_SIZE); + + viddec_fw_vc1_set_rcv_bitrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.BITRTQ_POSTPROC); + viddec_fw_vc1_set_rcv_frmrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.FRMRTQ_POSTPROC); + viddec_fw_vc1_set_rcv_profile(&wi.vc1_sh_struct_a_c, rcv.struct_c.PROFILE); + viddec_fw_vc1_set_rcv_level(&wi.vc1_sh_struct_a_c, 0); + viddec_fw_vc1_set_rcv_cbr(&wi.vc1_sh_struct_a_c, 0); + viddec_fw_vc1_set_rcv_rangered(&wi.vc1_sh_struct_a_c, rcv.struct_c.RANGERED); + viddec_fw_vc1_set_rcv_maxbframes(&wi.vc1_sh_struct_a_c, rcv.struct_c.MAXBFRAMES); + viddec_fw_vc1_set_rcv_finterpflag(&wi.vc1_sh_struct_a_c, rcv.struct_c.FINTERPFLAG); + + result = viddec_pm_append_workitem(ctxt, &wi, false); + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse sequence layer. This function is only applicable to advanced profile + * as simple and main profiles use other mechanisms to communicate these + * metadata. + * Table 3 of SMPTE 421M. + * Table 13 of SMPTE 421M for HRD_PARAM(). + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_SequenceLayerHeader sh; + uint32_t result; + + memset(&sh, 0, sizeof(vc1_SequenceLayerHeader)); + + // PARSE SEQUENCE HEADER + result = viddec_pm_get_bits(ctxt, &sh.flags, 15); + if (result == 1) + { + md->PROFILE = sh.seq_flags.PROFILE; + md->LEVEL = sh.seq_flags.LEVEL; + md->CHROMAFORMAT = sh.seq_flags.COLORDIFF_FORMAT; + md->FRMRTQ = sh.seq_flags.FRMRTQ_POSTPROC; + md->BITRTQ = sh.seq_flags.BITRTQ_POSTPROC; + } + + result = viddec_pm_get_bits(ctxt, &sh.max_size, 32); + if (result == 1) + { + md->POSTPROCFLAG = sh.seq_max_size.POSTPROCFLAG; + md->width = sh.seq_max_size.MAX_CODED_WIDTH; + md->height = sh.seq_max_size.MAX_CODED_HEIGHT; + md->PULLDOWN = sh.seq_max_size.PULLDOWN; + md->INTERLACE = sh.seq_max_size.INTERLACE; + md->TFCNTRFLAG = sh.seq_max_size.TFCNTRFLAG; + md->FINTERPFLAG = sh.seq_max_size.FINTERPFLAG; + md->PSF = sh.seq_max_size.PSF; + } + + if (sh.seq_max_size.DISPLAY_EXT == 1) + { + result = viddec_pm_get_bits(ctxt, &sh.disp_size, 29); + if (result == 1) + { + if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1) + { + result = viddec_pm_get_bits(ctxt, &tempValue, 4); + sh.ASPECT_RATIO = tempValue; + if (sh.ASPECT_RATIO == 15) + { + result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16); + } +#ifdef VBP + md->ASPECT_RATIO_FLAG = 1; + md->ASPECT_RATIO = sh.ASPECT_RATIO; + md->ASPECT_HORIZ_SIZE = sh.seq_aspect_size.ASPECT_HORIZ_SIZE; + md->ASPECT_VERT_SIZE = sh.seq_aspect_size.ASPECT_VERT_SIZE; +#endif + } + + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.FRAMERATE_FLAG = tempValue; + if (sh.FRAMERATE_FLAG == 1) + { + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.FRAMERATEIND = tempValue; + if (sh.FRAMERATEIND == 0) + { + result = viddec_pm_get_bits(ctxt, &sh.framerate_fraction, 12); + } + else + { + result = viddec_pm_get_bits(ctxt, &tempValue, 16); + sh.FRAMERATEEXP = tempValue; + } + } + + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.COLOR_FORMAT_FLAG = tempValue; + if (sh.COLOR_FORMAT_FLAG == 1) + { + result = viddec_pm_get_bits(ctxt, &sh.color_format, 24); + } +#ifdef VBP + md->COLOR_FORMAT_FLAG = sh.COLOR_FORMAT_FLAG; + md->MATRIX_COEF = sh.seq_color_format.MATRIX_COEF; +#endif + } // Successful get of display size + } // DISPLAY_EXT is 1 + + result = viddec_pm_get_bits(ctxt, &tempValue, 1); + sh.HRD_PARAM_FLAG = tempValue; + if (sh.HRD_PARAM_FLAG == 1) + { + /* HRD_PARAM(). */ + result = viddec_pm_get_bits(ctxt, &tempValue, 5); + sh.HRD_NUM_LEAKY_BUCKETS = tempValue; + md->HRD_NUM_LEAKY_BUCKETS = sh.HRD_NUM_LEAKY_BUCKETS; +#ifndef VBP + // Skip the rest of the parsing - hrdinfo is not required for decode or for attributes +#else + { + uint8_t count; + uint8_t bitRateExponent; + uint8_t bufferSizeExponent; + + /* bit_rate_exponent */ + result = viddec_pm_get_bits(ctxt, &tempValue, 4); + bitRateExponent = (uint8_t)(tempValue + 6); + + /* buffer_size_exponent */ + result = viddec_pm_get_bits(ctxt, &tempValue, 4); + bufferSizeExponent = (uint8_t)(tempValue + 4); + md->hrd_initial_state.BUFFER_SIZE_EXPONENT = bufferSizeExponent; + + for(count = 0; count < sh.HRD_NUM_LEAKY_BUCKETS; count++) + { + /* hrd_rate */ + result = viddec_pm_get_bits(ctxt, &tempValue, 16); + md->hrd_initial_state.sLeakyBucket[count].HRD_RATE = + (uint32_t)(tempValue + 1) << bitRateExponent; + + /* hrd_buffer */ + result = viddec_pm_get_bits(ctxt, &tempValue, 16); + md->hrd_initial_state.sLeakyBucket[count].HRD_BUFFER = + (uint32_t)(tempValue + 1) << bufferSizeExponent; + } + } +#endif + } + else + { + md->HRD_NUM_LEAKY_BUCKETS = 0; +#ifdef VBP + md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0; +#endif + } + + md->widthMB = (((md->width + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA; + md->heightMB = (((md->height + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA; + + DEB("md: res: %dx%d\n", md->width, md->height); + DEB("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE); + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi_sl, wi_de; + + wi_sl.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; + + wi_sl.vc1_sl.size = 0; + wi_sl.vc1_sl.flags = 0; + wi_sl.vc1_sl.pad = 0; + + viddec_fw_vc1_set_profile(&wi_sl.vc1_sl, sh.seq_flags.PROFILE); + viddec_fw_vc1_set_level(&wi_sl.vc1_sl, sh.seq_flags.LEVEL); + viddec_fw_vc1_set_colordiff_format(&wi_sl.vc1_sl, sh.seq_flags.COLORDIFF_FORMAT); + viddec_fw_vc1_set_pulldown(&wi_sl.vc1_sl, sh.seq_max_size.PULLDOWN); + viddec_fw_vc1_set_max_coded_width(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_WIDTH); + viddec_fw_vc1_set_max_coded_height(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_HEIGHT); + + viddec_fw_vc1_set_bitrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.BITRTQ_POSTPROC); + viddec_fw_vc1_set_frmrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.FRMRTQ_POSTPROC); + viddec_fw_vc1_set_interlace(&wi_sl.vc1_sl, sh.seq_max_size.INTERLACE); + viddec_fw_vc1_set_tfcntrflag(&wi_sl.vc1_sl, sh.seq_max_size.TFCNTRFLAG); + viddec_fw_vc1_set_finterpflag(&wi_sl.vc1_sl, sh.seq_max_size.FINTERPFLAG); + viddec_fw_vc1_set_psf(&wi_sl.vc1_sl, sh.seq_max_size.PSF); + viddec_fw_vc1_set_display_ext(&wi_sl.vc1_sl, sh.seq_max_size.DISPLAY_EXT); + + result = viddec_pm_append_workitem(ctxt, &wi_sl, false); + + // send DISPLAY EXTENSION metadata if present + if (sh.seq_max_size.DISPLAY_EXT) + { + wi_de.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; + + wi_de.vc1_sl_de.size = 0; + wi_de.vc1_sl_de.framerate = 0; + wi_de.vc1_sl_de.aspectsize = 0; + + viddec_fw_vc1_set_disp_horiz_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_HORIZ_SIZE); + viddec_fw_vc1_set_disp_vert_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_VERT_SIZE); + viddec_fw_vc1_set_disp_aspect_ratio_flag(&wi_de.vc1_sl_de, sh.seq_disp_size.ASPECT_RATIO_FLAG); + viddec_fw_vc1_set_disp_color_format_flag(&wi_de.vc1_sl_de, sh.COLOR_FORMAT_FLAG); + viddec_fw_vc1_set_disp_framerate_flag(&wi_de.vc1_sl_de, sh.FRAMERATE_FLAG); + viddec_fw_vc1_set_disp_framerateind(&wi_de.vc1_sl_de, sh.FRAMERATEIND); + + viddec_fw_vc1_set_disp_aspect_ratio(&wi_de.vc1_sl_de, sh.ASPECT_RATIO); + viddec_fw_vc1_set_disp_frameratenr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATENR); + viddec_fw_vc1_set_disp_frameratedr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATEDR); + viddec_fw_vc1_set_disp_framerateexp(&wi_de.vc1_sl_de, sh.FRAMERATEEXP); + + viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_HORIZ_SIZE); + viddec_fw_vc1_set_disp_aspect_ratio_vert_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_VERT_SIZE); + viddec_fw_vc1_set_disp_color_prim(&wi_de.vc1_sl_de, sh.seq_color_format.COLOR_PRIM); + viddec_fw_vc1_set_disp_transfer_char(&wi_de.vc1_sl_de, sh.seq_color_format.TRANSFER_CHAR); + + result = viddec_pm_append_workitem(ctxt, &wi_de, false); + } + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse entry point layer. This function is only applicable for advanced + * profile and is used to signal a random access point and changes in coding + * control parameters. + * Table 14 of SMPTE 421M. + * Table 15 of SMPTE 421M for HRD_FULLNESS(). + *------------------------------------------------------------------------------ + */ +vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_EntryPointHeader ep; + uint32_t result; + uint32_t temp; + + memset(&ep, 0, sizeof(vc1_EntryPointHeader)); + + // PARSE ENTRYPOINT HEADER + result = viddec_pm_get_bits(ctxt, &ep.flags, 13); + if (result == 1) + { + // Skip the flags already peeked at (13) and the unneeded hrd_full data + // NOTE: HRD_NUM_LEAKY_BUCKETS is initialized to 0 when HRD_PARAM_FLAG is not present + int hrd_bits = md->HRD_NUM_LEAKY_BUCKETS * 8; + while (hrd_bits >= 32) + { + result = viddec_pm_skip_bits(ctxt, 32); + hrd_bits -= 32; + } + result = viddec_pm_skip_bits(ctxt, hrd_bits); + + md->REFDIST = 0; + md->BROKEN_LINK = ep.ep_flags.BROKEN_LINK; + md->CLOSED_ENTRY = ep.ep_flags.CLOSED_ENTRY; + md->PANSCAN_FLAG = ep.ep_flags.PANSCAN_FLAG; + md->REFDIST_FLAG = ep.ep_flags.REFDIST_FLAG; + md->LOOPFILTER = ep.ep_flags.LOOPFILTER; + md->FASTUVMC = ep.ep_flags.FASTUVMC; + md->EXTENDED_MV = ep.ep_flags.EXTENDED_MV; + md->DQUANT = ep.ep_flags.DQUANT; + md->VSTRANSFORM = ep.ep_flags.VSTRANSFORM; + md->OVERLAP = ep.ep_flags.OVERLAP; + md->QUANTIZER = ep.ep_flags.QUANTIZER; + + result = viddec_pm_get_bits(ctxt, &temp, 1); + if (result == 1) + { + ep.CODED_SIZE_FLAG = temp; + if (ep.CODED_SIZE_FLAG) + { + result = viddec_pm_get_bits(ctxt, &ep.size, 24); + md->width = ep.ep_size.CODED_WIDTH; + md->height = ep.ep_size.CODED_HEIGHT; + } + } + if (ep.ep_flags.EXTENDED_MV) + { + result = viddec_pm_get_bits(ctxt, &temp, 1); + md->EXTENDED_DMV = ep.EXTENDED_DMV = temp; + } + + result = viddec_pm_get_bits(ctxt, &temp, 1); + if (result == 1) + { + md->RANGE_MAPY_FLAG = ep.RANGE_MAPY_FLAG = temp; + if (ep.RANGE_MAPY_FLAG) + { + result = viddec_pm_get_bits(ctxt, &temp, 3); + md->RANGE_MAPY = ep.RANGE_MAPY = temp; + } + } + + result = viddec_pm_get_bits(ctxt, &temp, 1); + if (result == 1) + { + md->RANGE_MAPUV_FLAG = ep.RANGE_MAPUV_FLAG = temp; + if (ep.RANGE_MAPUV_FLAG) + { + result = viddec_pm_get_bits(ctxt, &temp, 3); + md->RANGE_MAPUV = ep.RANGE_MAPUV = temp; + } + } + } + + // POPULATE WORKLOAD ITEM + { + viddec_workload_item_t wi; + + wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO; + + wi.vc1_ep.size = 0; + wi.vc1_ep.flags = 0; + wi.vc1_ep.pad = 0; + + viddec_fw_vc1_set_ep_size_flag(&wi.vc1_ep, ep.CODED_SIZE_FLAG); + viddec_fw_vc1_set_ep_horiz_size(&wi.vc1_ep, ep.ep_size.CODED_WIDTH); + viddec_fw_vc1_set_ep_vert_size(&wi.vc1_ep, ep.ep_size.CODED_HEIGHT); + + viddec_fw_vc1_set_ep_broken_link(&wi.vc1_ep, ep.ep_flags.BROKEN_LINK); + viddec_fw_vc1_set_ep_closed_entry(&wi.vc1_ep, ep.ep_flags.CLOSED_ENTRY); + viddec_fw_vc1_set_ep_panscan_flag(&wi.vc1_ep, ep.ep_flags.PANSCAN_FLAG); + viddec_fw_vc1_set_ep_range_mapy_flag(&wi.vc1_ep, ep.RANGE_MAPY_FLAG); + viddec_fw_vc1_set_ep_range_mapy(&wi.vc1_ep, ep.RANGE_MAPY); + viddec_fw_vc1_set_ep_range_mapuv_flag(&wi.vc1_ep, ep.RANGE_MAPUV_FLAG); + viddec_fw_vc1_set_ep_range_mapuv(&wi.vc1_ep, ep.RANGE_MAPUV); + + result = viddec_pm_append_workitem(ctxt, &wi, false); + } + + DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT); + DEB("md: after ep: res: %dx%d\n", md->width, md->height); + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + uint32_t temp; + int i; + + for (i=0; imetadata.bp_raw[i] = true; + } + + if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) + { + VC1_PEEK_BITS(2, temp); /* fcm */ + if ( (pInfo->metadata.INTERLACE == 1) && (temp == VC1_FCM_FIELD_INTERLACE)) + { + status = vc1_ParseFieldHeader_Adv(ctxt, pInfo); + } + else + { + status = vc1_ParsePictureHeader_Adv(ctxt, pInfo); + } + } + else + { + status = vc1_ParsePictureHeader(ctxt, pInfo); + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse field picture layer. This function parses the field picture layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_PARSE_ERROR; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) { + if (picLayerHeader->CurrField == 0) + { + picLayerHeader->PTYPE = picLayerHeader->PTypeField1; + picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF); + } + else + { + picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF); + picLayerHeader->PTYPE = picLayerHeader->PTypeField2; + } + status = vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); + } + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse slice layer. This function parses the slice layer, which is only + * supported by advanced profile. + * Table 26 of SMPTE 421M but skipping parsing of macroblock layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + uint32_t SLICE_ADDR; + vc1_Status status = VC1_STATUS_OK; + + VC1_GET_BITS9(9, SLICE_ADDR); + VC1_GET_BITS9(1, tempValue); /* PIC_HEADER_FLAG. */ + if (tempValue == 1) { + uint8_t *last_bufptr = pInfo->bufptr; + uint32_t last_bitoff = pInfo->bitoff; + status = vc1_ParsePictureLayer(ctxt, pInfo); + pInfo->picture_info_has_changed = 1; + if ( status ) { + /* FIXME - is this a good way of handling this? Failed, see if it's for fields */ + pInfo->bufptr = last_bufptr; + pInfo->bitoff = last_bitoff; + status = vc1_ParseFieldHeader_Adv(ctxt, pInfo); + } + } else + pInfo->picture_info_has_changed = 0; + + pInfo->picLayerHeader.SLICE_ADDR = SLICE_ADDR; + + return status; +} + +/*------------------------------------------------------------------------------ + * This function parses the user data information as defined in SMPTE 421M annex F. + * It then appends that data to the workload. + * Assume the flush byte 0x80 is within the 3 bytes before next start code. + * let's put 1 byte per item first + *------------------------------------------------------------------------------ + */ +vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) +{ + vc1_Status status = VC1_STATUS_OK; + uint32_t user_data; + viddec_workload_item_t wi; + uint32_t ud_id; + + /* find the scope based on start code sc */ + switch (sc) { + case vc1_SCSequenceUser: + wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; + break; + case vc1_SCEntryPointUser: + wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; + break; + case vc1_SCFrameUser: + wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; + break; + case vc1_SCFieldUser: + wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA; + break; + case vc1_SCSliceUser: + wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA; + break; + default: + wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen + break; + } + + /* get identifier - 4 bytes*/ + // Extract this information but discard it for now + VC1_GET_BITS(32, ud_id); + + /* Read 1 byte of user data and store it in workitem for the current stream level (SEQ/GOP/PIC). + Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, + append the workitem. This loop is repeated till all user data is extracted and appended. */ + wi.user_data.size = 0; + while (viddec_pm_get_bits(ctxt, &user_data, 8) != -1) + { + /* Store the valid byte in data payload */ + wi.user_data.data_payload[wi.user_data.size] = user_data; + wi.user_data.size++; + + /* When size exceeds payload size, append workitem and continue */ + if (wi.user_data.size >= 11) + { + viddec_pm_setup_userdata(&wi); + viddec_pm_append_workitem(ctxt, &wi,false); + wi.user_data.size = 0; + } + if (user_data == 0x80) // flushing byte + break; + } + /* If size is not 0, append remaining user data. */ + if (wi.user_data.size > 0) + { + int i; + for (i=wi.user_data.size; i<11; i++) + { + wi.user_data.data_payload[i] = 0; + } + viddec_pm_setup_userdata(&wi); + viddec_pm_append_workitem(ctxt, &wi,false); + wi.user_data.size = 0; + } + + return(status); +} // vc1_ParseAndAppendUserData diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.h b/mixvbp/vbp_plugin/vc1/vc1parse.h new file mode 100755 index 0000000..e190fe1 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse.h @@ -0,0 +1,140 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Common functions for parsing VC-1 bitstreams. +// +*/ + +#ifndef _VC1PARSE_H_ +#define _VC1PARSE_H_ +#include +#include "viddec_parser_ops.h" +#include "vc1.h" + +/** @weakgroup vc1parse_defs VC-1 Parse Definitions */ +/** @ingroup vc1parse_defs */ +/*@{*/ + +/* This macro gets the next less-than-nine bits from the bitstream. It is +assumed that numBits is less than ten. */ +#ifdef VC1_VERBOSE +#include +#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__) +#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args) +#else +#define AUTO_TRACE +#define DEBUGBITS(...) +#endif + +extern void *memset(void *s, int32_t c, uint32_t n); + +/* This macro gets the next numBits from the bitstream. */ +#define VC1_GET_BITS VC1_GET_BITS9 +#define VC1_GET_BITS9(numBits, value) \ +{ uint32_t __tmp__; \ + viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ + value = __tmp__;\ + DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ +} + +#define VC1_PEEK_BITS(numBits, value) \ +{ uint32_t __tmp__; \ + viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ + value = __tmp__;\ + DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ +} + +/* This macro asserts if the condition is not true. */ +#ifdef VC1_VERBOSE +#define VC1_ASSERT(condition) \ +{ \ + if (! (condition)) \ + OS_INFO("Failed " #condition "!\n"); \ +} +#else +#define VC1_ASSERT(condition) +#endif + +/*@}*/ + +/** @weakgroup vc1parse VC-1 Parse Functions */ +/** @ingroup vc1parse */ +/*@{*/ + +extern const uint8_t VC1_MVMODE_LOW_TBL[]; +extern const uint8_t VC1_MVMODE_HIGH_TBL[]; +extern const int32_t VC1_BITPLANE_IMODE_TBL[]; +extern const int32_t VC1_BITPLANE_K_TBL[]; +extern const int32_t VC1_BFRACTION_TBL[]; +extern const int32_t VC1_REFDIST_TBL[]; + +void vc1_end_frame(vc1_viddec_parser_t *parser); + +/* Top-level functions to parse bitstream layers for rcv format. */ +vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse bitstream layers for the various profiles. */ +vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse headers for various picture layers for the +simple and main profiles. */ +vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo); + +/* Top-level functions to parse common part of the headers for various picture +layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_Adv (void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various progressive +picture layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various interlace frame +layers for the advanced profile. */ +vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse remainder part of the headers for various interlace frame +layers for the advanced profile. */ +vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo); + +/* Functions to parse syntax element in bitstream. */ +vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo); +vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo); +vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bptype); +vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable); +vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond); + +void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser); +void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser); +void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser); + + + + +/* function to handle user data */ +vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc); + +/*@}*/ + +#endif /* _VC1PARSE_H_. */ diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c new file mode 100755 index 0000000..4996e28 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c @@ -0,0 +1,716 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 bitstreams. +// +*/ + +#include "vc1parse.h" + +#ifdef VBP +#include "viddec_pm.h" +#endif + +/*----------------------------------------------------------------------------*/ + + +/* put one bit into a buffer + * used for bitplane decoding, each bit correspond to a MB + * HW requires row to start at DW (32 bits) boundary + * input: value - bit value + * mbx - image width in MB + * mby - image height in MB + * x - x location (column) of MB in MB unit + * y - y location (row) of MB in MB unit + * output: outp - buffer to fill + */ +//#define put_bit(value,x,y,mbx,mby,invert,outp) +static inline void put_bit( uint32_t value, int x, int y, int mbx, int mby, uint8_t invert, uint32_t* outp) +{ + int bit; + uint32_t *out; + + bit = mby; + + value ^= invert; + if (!value) return; /* assume buffer is initialized with zeros */ + + out = outp; + /* go to corresponding row location in DW unit */ + out += (( mbx + 31 ) >> 5) * y; + out += x >> 5; /* go to corresponding column location in DW unit */ + bit = x & 0x1f; /* compute remaining bits */ + *out |= 1 << bit; /* put bit */ +} + +/* if b is the bit at location (x,y) + * b = b^invert + * used for bitplane decoding, each bit correspond to a MB + * HW requires row to start at DW (32 bits) boundary + * input: value - bit value + * x - x location (column) of MB in MB unit + * y - y location (row) of MB in MB unit + * mbx - image width in MB + * output: outp - buffer to fill + * returns bit value + */ +static inline int xor_bit( int x, int y, int mbx, uint32_t invert, uint32_t* outp) +{ + int bit; + uint32_t *out; + uint8_t value; + //if (invert == 0) return; /* do nothing if XOR with 0 */ + + out = outp; + out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */ + out += x >> 5; /* go to corresponding row location in DW unit */ + bit = x & 0x1f; /* compute remaining bits */ + + if (invert == 1) + *out ^= (1 << bit); /* put XOR bit */ + value = (*out & (1 << bit)) >> bit; /* return bit value */ + + return(value); + +} + +/* get bit at location (x,y) + * used for bitplane decoding, each bit correspond to a MB + * HW requires row to start at DW (32 bits) boundary + * input: value - bit value + * x - x location (column) of MB in MB unit + * y - y location (row) of MB in MB unit + * mbx - image width in MB + * outp - bit buffer in dwords + * returns bit value + */ +static inline int get_bit( int x, int y, int mbx, uint32_t* outp) +{ + int bit; + uint32_t *out; + uint8_t value; + + out = outp; + out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */ + out += x >> 5; /* go to corresponding row location in DW unit */ + bit = x & 0x1f; /* compute remaining bits */ + value = (*out & (1 << bit)) >> bit; /* return bit value */ + + return(value); + +} + +static void vc1_InverseDiff(vc1_Bitplane *pBitplane, int32_t widthMB, int32_t heightMB) +{ + int32_t i, j, previousBit=0, temp; + + for (i = 0; i < heightMB; i++) + { + for (j = 0; j < widthMB; j++) + { + if ((i == 0 && j == 0)) + { + previousBit=xor_bit(j, i, widthMB, pBitplane->invert, + pBitplane->databits); + } + else if (j == 0) /* XOR with TOP */ + { + previousBit = get_bit(0, i-1, widthMB, pBitplane->databits); + temp=xor_bit(j, i, widthMB, previousBit, + pBitplane->databits); + previousBit = temp; + } + //TODO isSameAsTop can be optimized + else if (((i > 0) && (previousBit != + get_bit(j, i-1, widthMB, pBitplane->databits)))) + { + temp=xor_bit(j, i, widthMB, pBitplane->invert, + pBitplane->databits); + previousBit = temp; + } + else + { + temp=xor_bit(j, i, widthMB, previousBit, + pBitplane->databits); + previousBit = temp; + } + } + } +} + + +/*----------------------------------------------------------------------------*/ +/* implement normal 2 mode bitplane decoding, SMPTE 412M 8.7.3.2 + * width, height are in MB unit. + */ +static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, + int32_t width, int32_t height) +{ + int32_t i; + int32_t tmp_databits = 0; + + int32_t row[2], col[2]; + int8_t tmp=0; + + /* disable pBitplane->invert in the Norm2 decode stage of + VC1_BITPLANE_DIFF2_MODE */ + if (pBitplane->imode == VC1_BITPLANE_DIFF2_MODE) + { + tmp = pBitplane->invert; + pBitplane->invert=0; + } + + // By default, initialize the values for the even case + col[0] = 0; /* i%width; */ + row[0] = 0; /* i/width; */ + col[1] = 1; /* (i+1)%width; */ + row[1] = 0; /* (i+1)/width; */ + + // If width*height is odd, the first bit is the value of the bitplane + // for the first macroblock + if ((width*height) & 1) /* first bit if size is odd */ + { + VC1_GET_BITS(1, tmp_databits); + put_bit(tmp_databits, 0, 0, width, height, pBitplane->invert, + pBitplane->databits); + + // Modify initialization for odd sizes + col[0] = 1; /* i%width; */ + col[1] = 2; /* (i+1)%width; */ + + // Consider special case where width is 1 + if (width == 1) + { + col[0] = 0; /* i%width; */ + row[0] = 1; /* i/width; */ + col[1] = 0; /* (i+1)%width; */ + row[1] = 2; /* (i+1)/width; */ + } + } + + /* decode every pair of bits in natural scan order */ + for (i = (width*height) & 1; i < (width*height/2)*2; i += 2) + { + int32_t tmp = 0; + + //col[0]=i%width; + //row[0]=i/width; + //col[1]=(i+1)%width; + //row[1]=(i+1)/width; + + VC1_GET_BITS(1, tmp); + if (tmp == 0) + { + put_bit(0, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(0, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + else + { + VC1_GET_BITS(1, tmp); + if (tmp == 1) + { + put_bit(1, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(1, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + else + { + VC1_GET_BITS(1, tmp); + if (tmp == 0) + { + put_bit(1, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(0, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + else + { + put_bit(0, col[0],row[0], width, height, pBitplane->invert, + pBitplane->databits); + put_bit(1, col[1],row[1], width, height, pBitplane->invert, + pBitplane->databits); + } + } + } + + // Consider special case where width is 1 + if (width == 1) + { + row[0] += 2; + row[1] += 2; + } + else + { + col[0] += 2; /* i%width; */ + if ( col[0] >= width ) + { + // For odd sizes, col[0] can alternatively start at 0 and 1 + col[0] -= width; + row[0]++; + } + + col[1] += 2; /* (i+1)%width; */ + if ( col[1] >= width ) + { + // For odd sizes, col[1] can alternatively start at 0 and 1 + col[1] -= width; + row[1]++; + } + } + } + + /* restore value */ + pBitplane->invert=tmp; +} + +/*----------------------------------------------------------------------------*/ +/* compute Normal-6 mode bitplane decoding + * algorithm is described in SMPTE 421M 8.7.3.4 + * width, height are in MB unit. + */ +static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane, + int32_t width, int32_t height) +{ + vc1_Status status; + int32_t i, j, k; + int32_t ResidualX = 0; + int32_t ResidualY = 0; + uint8_t _2x3tiled = (((width%3)!=0)&&((height%3)==0)); + + int32_t row, col; + int8_t tmp=0; + + /* disable pBitplane->invert in the Norm2 decode stage of + VC1_BITPLANE_DIFF2_MODE */ + if (pBitplane->imode == VC1_BITPLANE_DIFF6_MODE) + { + tmp = pBitplane->invert; + pBitplane->invert=0; + } + + if (_2x3tiled) + { + int32_t sizeW = width/2; + int32_t sizeH = height/3; + + for (i = 0; i < sizeH; i++) + { + row = 3*i; /* compute row location for tile */ + + for (j = 0; j < sizeW; j++) + { + col = 2*j + (width & 1); /* compute column location for tile */ + + /* get k=sum(bi2^i) were i is the ith bit of the tile */ + status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL); + VC1_ASSERT(status == VC1_STATUS_OK); + + /* put bits in tile */ + put_bit(k&1, col, row, width, height, pBitplane->invert, + pBitplane->databits); + put_bit(((k&2)>>1), col+1, row, width, height, + pBitplane->invert,pBitplane->databits); + + put_bit(((k&4)>>2), col, row+1, width, height, + pBitplane->invert,pBitplane->databits); + put_bit(((k&8)>>3), col+1, row+1, width, height, + pBitplane->invert,pBitplane->databits); + + put_bit(((k&16)>>4), col, row+2, width, height, + pBitplane->invert,pBitplane->databits); + put_bit(((k&32)>>5), col+1, row+2, width, + height,pBitplane->invert, pBitplane->databits); + } + } + ResidualX = width & 1; + ResidualY = 0; + } + else /* 3x2 tile */ + { + int32_t sizeW = width/3; + int32_t sizeH = height/2; + + for (i = 0; i < sizeH; i++) + { + row = 2*i + (height&1) ; /* compute row location for tile */ + + for (j = 0; j < sizeW; j++) + { + col = 3*j + (width%3); /* compute column location for tile */ + + /* get k=sum(bi2^i) were i is the ith bit of the tile */ + status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL); + VC1_ASSERT(status == VC1_STATUS_OK); + + put_bit(k&1, col, row, width, height,pBitplane->invert, + pBitplane->databits); + put_bit((k&2)>>1, col+1, row, width, height, pBitplane->invert, + pBitplane->databits); + put_bit((k&4)>>2, col+2, row, width, height, pBitplane->invert, + pBitplane->databits); + + put_bit((k&8)>>3, col, row+1, width, height,pBitplane->invert, + pBitplane->databits); + put_bit((k&16)>>4, col+1, row+1, width, + height,pBitplane->invert, pBitplane->databits); + put_bit((k&32)>>5, col+2, row+1, width, + height,pBitplane->invert, pBitplane->databits); + } + } + ResidualX = width % 3; + ResidualY = height & 1; + } + + for (i = 0; i < ResidualX; i++) + { + int32_t ColSkip; + VC1_GET_BITS(1, ColSkip); + + //if (1 == ColSkip) + { + for (j = 0; j < height; j++) + { + int32_t Value = 0; + if (1 == ColSkip) VC1_GET_BITS(1, Value); + + put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); + } + } + } + + for (j = 0; j < ResidualY; j++) + { + int32_t RowSkip; + VC1_GET_BITS(1, RowSkip); + //if (1 == RowSkip) + { + for (i = ResidualX; i < width; i++) + { + int32_t Value = 0; + if (1 == RowSkip) VC1_GET_BITS(1, Value); + + put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits); + } + } + } + + /* restore value */ + pBitplane->invert=tmp; + +} + +/*----------------------------------------------------------------------------*/ +/* initialize bitplane to array of zeros + * each row begins with a dword + * input: + * width: widh in MB unit + * height: height in MB unit + * returns even bitplane size in dwords + */ +int initBitplane(vc1_Bitplane *pBitplane,uint32_t width, uint32_t height) +{ + int i; + int numDword = 0; + + numDword = ((width + 31)>>5) * height; + numDword += numDword & 1; /* add 1 in case numDword is odd */ + + for (i=0; idatabits[i] = 0; + return(numDword); +} + +/*----------------------------------------------------------------------------*/ +/* modified IPP code for bitplane decoding + * width: width in MB unit + * height: height in MB unit + */ +vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, + uint32_t width, uint32_t height, vc1_bpp_type_t bpnum) +{ + uint32_t i, j; + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + uint32_t biplaneSz; /* bitplane sz in dwords */ + vc1_Bitplane bp; + vc1_Bitplane *bpp = &bp; + + // By default, set imode to raw + pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = true; + + // bitplane data would be temporarily stored in the vc1 context + bpp->databits = pInfo->bitplane; + + /* init bitplane to zero, function retunr bitplane buffer size in dword */ + biplaneSz = initBitplane(bpp, width, height); + + VC1_GET_BITS(1, tempValue); + bpp->invert = (uint8_t) tempValue; + + bpp->imode = -1; + + if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode,VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK) + { + return status; + } + + // If the imode is VC1_BITPLANE_RAW_MODE: bitplane information is in the MB layer + // there is no need to parse for bitplane information in the picture layer + // Only bits need to be appropriately set in the block control register + // In all other modes, bitplane information follows and needs to be parsed and sent to the decoder + + if (bpp->imode == VC1_BITPLANE_NORM2_MODE) + { + vc1_Norm2ModeDecode(ctxt, bpp, width, height); + } + else if (bpp->imode == VC1_BITPLANE_DIFF2_MODE) + { + vc1_Norm2ModeDecode(ctxt, bpp, width, height); + vc1_InverseDiff(bpp, width, height); + } + else if (bpp->imode == VC1_BITPLANE_NORM6_MODE) + { + vc1_Norm6ModeDecode(ctxt, bpp, width, height); + + } + else if (bpp->imode == VC1_BITPLANE_DIFF6_MODE) + { + vc1_Norm6ModeDecode(ctxt, bpp, width, height); + vc1_InverseDiff(bpp, width, height); + } + else if (bpp->imode == VC1_BITPLANE_ROWSKIP_MODE) + { + + for (i = 0; i < height; i++) + { + VC1_GET_BITS(1, tempValue); + /* if tempValue==0, put row of zeros Dwords*/ + if (tempValue == 1) + { + for (j = 0; j < width; j++) + { + VC1_GET_BITS(1, tempValue); + put_bit( tempValue, j, i, width, height, bpp->invert,bpp->databits); + } + } + else if (bpp->invert) { //TO TEST + for (j = 0; j < width; j++) { + put_bit( 0, j, i, width, height, bpp->invert, bpp->databits); + } + } + } + + } + else if (bpp->imode == VC1_BITPLANE_COLSKIP_MODE) + { + for (i = 0; i < width; i++) + { + VC1_GET_BITS(1, tempValue); + /* if tempValue==0, and invert == 0, fill column with zeros */ + if (tempValue == 1) + { + for (j = 0; j < height; j++) + { + VC1_GET_BITS(1, tempValue); + put_bit( tempValue, i, j, width, height, bpp->invert, bpp->databits); + } + } + else if (bpp->invert) { // fill column with ones + for (j = 0; j < height; j++) { + put_bit( 0, i, j, width, height, bpp->invert, bpp->databits); + } + }//end for else + } + } + + if (bpp->imode != VC1_BITPLANE_RAW_MODE) + { + uint32_t* pl; + int sizeinbytes,nitems,i; + viddec_workload_item_t wi; + uint32_t *bit_dw; + + pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = false; + + sizeinbytes = ((( width + 31 ) / 32)) * (height) * 4; + + pl = bpp->databits; + bit_dw = bpp->databits; + + // How many payloads must be generated + nitems = (sizeinbytes + (sizeof(wi.data.data_payload) - 1)) / + sizeof(wi.data.data_payload); + + // Dump DMEM to an array of workitems + for ( i = 0; i < nitems; i++ ) + { + wi.vwi_type = bpnum; + wi.data.data_offset = (char *)pl - (char *)bit_dw; // offset within struct + + wi.data.data_payload[0] = pl[0]; + wi.data.data_payload[1] = pl[1]; + pl += 2; + + viddec_pm_append_workitem( ctxt, &wi, false); + } + } + +#ifdef VBP + { + viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)ctxt; + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data); + + if (biplaneSz > 4096) + { + /* bigger than we got, so let's bail with a non meaningful error. */ + return VC1_STATUS_ERROR; + } + + /* At this point bp contains the information we need for the bit-plane */ + /* bpnum is the enumeration that tells us which bitplane this is for. */ + /* pInfo->picLayerHeader.ACPRED is one of the bitplanes I need to fill.*/ + switch (bpnum) + { + case VIDDEC_WORKLOAD_VC1_BITPLANE0: + if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + { + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.FORWARDMB.invert = bp.invert; + pInfo->picLayerHeader.FORWARDMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_forwardmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.FORWARDMB.databits = parser->bp_forwardmb; + } + else + { + pInfo->picLayerHeader.raw_FORWARDMB = 1; + } + } + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) + { + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.ACPRED.invert = bp.invert; + pInfo->picLayerHeader.ACPRED.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_acpred[i] = bp.databits[i]; + } + pInfo->picLayerHeader.ACPRED.databits = parser->bp_acpred; + } + else + { + pInfo->picLayerHeader.raw_ACPRED = 1; + } + } + if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + { + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.MVTYPEMB.invert = bp.invert; + pInfo->picLayerHeader.MVTYPEMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_mvtypemb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.MVTYPEMB.databits = parser->bp_mvtypemb; + } + else + { + pInfo->picLayerHeader.raw_MVTYPEMB = 1; + } + } + break; + case VIDDEC_WORKLOAD_VC1_BITPLANE1: + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) + { + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.OVERFLAGS.invert = bp.invert; + pInfo->picLayerHeader.OVERFLAGS.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_overflags[i] = bp.databits[i]; + } + pInfo->picLayerHeader.OVERFLAGS.databits = parser->bp_overflags; + } + else + { + pInfo->picLayerHeader.raw_OVERFLAGS = 1; + } + } + if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) + { + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.SKIPMB.invert = bp.invert; + pInfo->picLayerHeader.SKIPMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_skipmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.SKIPMB.databits = parser->bp_skipmb; + } + else + { + pInfo->picLayerHeader.raw_SKIPMB = 1; + } + } + break; + case VIDDEC_WORKLOAD_VC1_BITPLANE2: + if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) ) + { + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.DIRECTMB.invert = bp.invert; + pInfo->picLayerHeader.DIRECTMB.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_directmb[i] = bp.databits[i]; + } + pInfo->picLayerHeader.DIRECTMB.databits = parser->bp_directmb; + } + else + { + pInfo->picLayerHeader.raw_DIRECTMB = 1; + } + } + if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) + || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) ) + { + if (bp.imode != VC1_BITPLANE_RAW_MODE) + { + pInfo->picLayerHeader.FIELDTX.invert = bp.invert; + pInfo->picLayerHeader.FIELDTX.imode = bp.imode; + for (i = 0; i < biplaneSz; i++) + { + parser->bp_fieldtx[i] = bp.databits[i]; + } + pInfo->picLayerHeader.FIELDTX.databits = parser->bp_fieldtx; + } + else + { + pInfo->picLayerHeader.raw_FIELDTX = 1; + } + } + break; + } + } +#endif + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bpic.c b/mixvbp/vbp_plugin/vc1/vc1parse_bpic.c new file mode 100755 index 0000000..15c9f53 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_bpic.c @@ -0,0 +1,99 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive B picture in simple +// or main profile bitstream. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive B picture for main + * profile bitstream. This parser starts after PTYPE was parsed but stops + * before parsing of macroblock layer. + * Table 21 of SMPTE 421M after processing up to PTYPE for B picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else picLayerHeader->HALFQP=0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return VC1_STATUS_OK; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c new file mode 100755 index 0000000..d7a5ccd --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c @@ -0,0 +1,256 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive B picture in advanced +// profile bitstream. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive B picture for advanced + * profile bitstream. + * Table 22 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ? + VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace B frame for advanced + * profile bitstream. + * Table 84 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->INTCOMP); + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + // EPC picLayerHeader->MVMODE = VC1_MVMODE_1MV; + VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace B field for advanced + * profile bitstream. + * Table 89 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader* picLayerHeader = &pInfo->picLayerHeader; + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if ((bit_count == 2) && (picLayerHeader->MVMODE == 0)) + bit_count++; + picLayerHeader->MVMODE = table[bit_count]; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) != + VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + + if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h b/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h new file mode 100755 index 0000000..e474800 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h @@ -0,0 +1,645 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Common definitions for parsing VC-1 bitstreams. +// +*/ + +#ifndef _VC1PARSE_COMMON_DEFS_H_ +#define _VC1PARSE_COMMON_DEFS_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include + + /** @weakgroup vc1parse_common_defs VC-1 Common Definitions */ + /** @ingroup vc1parse_common_defs */ + /*@{*/ + + /** This defines the maximum number of horizontal macroblocks in a picture. */ +#define VC1_WIDTH_MB_MAX ((2048+15)/16) + + /** This defines the maximum number of vertical macroblocks in a picture. */ +#define VC1_HEIGHT_MB_MAX ((1088+15)/16) + + /** This defines the maximum number of bitplane storage per picture. */ +#define VC1_MAX_BITPLANE_CHUNKS 3 + + /** This defines the value for an invalid BFRACTION syntax element. */ +#define VC1_BFRACTION_INVALID 0 + + /** This defines the value for BFRACTION syntax element that defines a BI + picture. */ +#define VC1_BFRACTION_BI 9 + + /** This enumeration defines the various supported profiles as defined in + PROFILE syntax element. */ + enum + { + VC1_PROFILE_SIMPLE, + VC1_PROFILE_MAIN, + VC1_PROFILE_RESERVED, + VC1_PROFILE_ADVANCED + }; + + /** This enumeration defines the frame coding mode as defined in FCM syntax + element. */ + enum + { + VC1_FCM_PROGRESSIVE, + VC1_FCM_FRAME_INTERLACE = 2, + VC1_FCM_FIELD_INTERLACE = 3 + }; + + /** This enumeration defines the various bitplane types as defined in IMODE + syntax element. */ + enum + { + VC1_BITPLANE_RAW_MODE, + VC1_BITPLANE_NORM2_MODE, + VC1_BITPLANE_DIFF2_MODE, + VC1_BITPLANE_NORM6_MODE, + VC1_BITPLANE_DIFF6_MODE, + VC1_BITPLANE_ROWSKIP_MODE, + VC1_BITPLANE_COLSKIP_MODE + }; + + /** This enumeration defines the various motion vector modes as defined in + MVMODE or MVMODE2 syntax element. */ + enum + { + VC1_MVMODE_1MV, +#ifdef VBP + VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_HPEL_1MV, +#else + VC1_MVMODE_HPEL_1MV, + VC1_MVMODE_HPELBI_1MV, +#endif + VC1_MVMODE_MIXED_MV, + VC1_MVMODE_INTENSCOMP + }; + + /** This enumeration defines the extended differential motion vector range flag + as defined in DMVRANGE syntax element. */ + enum + { + VC1_DMVRANGE_NONE, + VC1_DMVRANGE_HORIZONTAL_RANGE, + VC1_DMVRANGE_VERTICAL_RANGE, + VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE + }; + + /** This enumeration defines the intensity compensation field as defined in + INTCOMPFIELD syntax element. */ + enum + { + VC1_INTCOMP_TOP_FIELD = 1, + VC1_INTCOMP_BOTTOM_FIELD = 2, + VC1_INTCOMP_BOTH_FIELD = 3 + }; + + /** This enumeration defines the differential quantizer profiles as defined in + DQPROFILE syntax element. */ + enum + { + VC1_DQPROFILE_ALL4EDGES, + VC1_DQPROFILE_DBLEDGES, + VC1_DQPROFILE_SNGLEDGES, + VC1_DQPROFILE_ALLMBLKS + }; + + /** This enumeration defines the conditional overlap flag as defined in CONDOVER + syntax element. */ + enum + { + VC1_CONDOVER_FLAG_NONE = 0, + VC1_CONDOVER_FLAG_ALL = 2, + VC1_CONDOVER_FLAG_SOME = 3 + }; + + /** This enumeration defines the type of quantizer to be used and is derived + from bitstream syntax. */ + enum + { + VC1_QUANTIZER_NONUNIFORM, + VC1_QUANTIZER_UNIFORM + }; + + /** This structure represents the various bitplanes within VC-1 bitstream. */ + typedef struct + { + uint8_t invert; + int32_t imode; + uint32_t *databits; + } vc1_Bitplane; + +#ifdef VBP +#define VC1_MAX_HRD_NUM_LEAKY_BUCKETS 32 + + typedef struct + { + uint32_t HRD_RATE; /** Maximum bit rate in bits per second */ + uint32_t HRD_BUFFER; /** Buffer size in bits */ + uint32_t HRD_FULLNESS; /** Buffer fullness in complete bits */ + uint32_t HRD_FULLFRACTION; /** Numerator of fractional bit buffer fullness count */ + uint32_t HRD_FULLDENOMINATOR; /** Denominator of fractional bit buffer fullness count */ + } vc1_leaky_bucket; + + typedef struct _vc1_hrd_state + { + uint8_t BIT_RATE_EXPONENT; /** Buckets + (0 if none specified) */ + uint8_t BUFFER_SIZE_EXPONENT; + vc1_leaky_bucket sLeakyBucket[VC1_MAX_HRD_NUM_LEAKY_BUCKETS]; /** Per-bucket information */ + } vc1_hrd_state, *vc1_hrd_state_ptr; +#endif + + /** This structure represents all bitstream metadata needed for register programming. */ + typedef struct + { + // From Sequence Layer for Advanced Profile + uint8_t PROFILE; /** 2 bit(s). */ + uint8_t LEVEL; /** 3 bit(s). */ + uint8_t CHROMAFORMAT; /** 2 bit(s). */ + uint8_t FRMRTQ; /** 3 bit(s). */ + + uint8_t BITRTQ; /** 5 bit(s). */ + uint8_t POSTPROCFLAG; /** 1 bit(s). */ + uint8_t PULLDOWN; /** 1 bit(s). */ + uint8_t INTERLACE; /** 1 bit(s). */ + + uint8_t TFCNTRFLAG; /** 1 bit(s). */ + uint8_t FINTERPFLAG; /** 1 bit(s). */ + uint8_t PSF; /** 1 bit(s). */ + uint8_t HRD_NUM_LEAKY_BUCKETS; /** 5 bit(s). */ + + // From STRUCT_C + uint8_t MAXBFRAMES; /** 3 bit(s). */ + uint8_t MULTIRES; /** 1 bit(s). */ + + // From EntryPoint Layer for Advanced Profile + uint8_t BROKEN_LINK; + uint8_t CLOSED_ENTRY; + + uint8_t PANSCAN_FLAG; + uint8_t REFDIST_FLAG; + uint8_t LOOPFILTER; + uint8_t FASTUVMC; + + uint8_t EXTENDED_MV; + uint8_t DQUANT; + uint8_t VSTRANSFORM; + uint8_t OVERLAP; + + uint8_t QUANTIZER; + uint8_t EXTENDED_DMV; + uint8_t RANGE_MAPY_FLAG; + uint8_t RANGE_MAPY; + + uint8_t RANGE_MAPUV_FLAG; + uint8_t RANGE_MAPUV; + + // From Picture Header + uint8_t RANGERED; /** 1 bit(s). */ + uint8_t RNDCTRL; /** 1 bit(s), rcv specific. */ + + // REFDIST is present only in field-interlaced mode on I/I, I/P, P/I, P/P frames + // From Canmore, looks like this needs to be propagated to following B frames + uint8_t REFDIST; + uint8_t INTCOMPFIELD; /** ? bit(s)? */ + uint8_t LUMSCALE2; /** 6 bit(s). */ + uint8_t LUMSHIFT2; /** 6 bit(s). */ + + uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS]; + uint8_t res_1; + + // From SequenceLayerHeader, EntryPointHeader or Struct_A + uint16_t width; + uint16_t height; + uint16_t widthMB; + uint16_t heightMB; + +#ifdef VBP + uint8_t COLOR_FORMAT_FLAG; + uint8_t MATRIX_COEF; + uint8_t SYNCMARKER; + uint8_t ASPECT_RATIO_FLAG; + uint8_t ASPECT_RATIO; + uint8_t ASPECT_HORIZ_SIZE; + uint8_t ASPECT_VERT_SIZE; + vc1_hrd_state hrd_initial_state; +#endif + + } vc1_metadata_t; + + /** This structure represents the sequence header for advanced profile. */ + typedef struct + { + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned BITRTQ_POSTPROC:5; + unsigned FRMRTQ_POSTPROC:3; + unsigned COLORDIFF_FORMAT:2; + unsigned LEVEL:3; + unsigned PROFILE:2; + unsigned pad:17; + } seq_flags; +#else + struct + { + unsigned pad:17; + unsigned PROFILE:2; + unsigned LEVEL:3; + unsigned COLORDIFF_FORMAT:2; + unsigned FRMRTQ_POSTPROC:3; + unsigned BITRTQ_POSTPROC:5; + } seq_flags; +#endif + uint32_t flags; + }; + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned DISPLAY_EXT:1; + unsigned PSF:1; + unsigned RESERVED:1; + unsigned FINTERPFLAG:1; + unsigned TFCNTRFLAG:1; + unsigned INTERLACE:1; + unsigned PULLDOWN:1; + unsigned MAX_CODED_HEIGHT:12; + unsigned MAX_CODED_WIDTH:12; + unsigned POSTPROCFLAG:1; + } seq_max_size; +#else + struct + { + unsigned POSTPROCFLAG:1; + unsigned MAX_CODED_WIDTH:12; + unsigned MAX_CODED_HEIGHT:12; + unsigned PULLDOWN:1; + unsigned INTERLACE:1; + unsigned TFCNTRFLAG:1; + unsigned FINTERPFLAG:1; + unsigned RESERVED:1; + unsigned PSF:1; + unsigned DISPLAY_EXT:1; + } seq_max_size; +#endif + uint32_t max_size; + }; + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned ASPECT_RATIO_FLAG:1; + unsigned DISP_VERT_SIZE:14; + unsigned DISP_HORIZ_SIZE:14; + unsigned pad:3; + } seq_disp_size; +#else + struct + { + unsigned pad:3; + unsigned DISP_HORIZ_SIZE:14; + unsigned DISP_VERT_SIZE:14; + unsigned ASPECT_RATIO_FLAG:1; + } seq_disp_size; +#endif + uint32_t disp_size; + }; + + uint8_t ASPECT_RATIO; // 4 bits + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned ASPECT_VERT_SIZE:8; + unsigned ASPECT_HORIZ_SIZE:8; + unsigned pad:16; + } seq_aspect_size; +#else + struct + { + unsigned pad:16; + unsigned ASPECT_HORIZ_SIZE:8; + unsigned ASPECT_VERT_SIZE:8; + } seq_aspect_size; +#endif + uint32_t aspect_size; + }; + + uint8_t FRAMERATE_FLAG; // 1b + uint8_t FRAMERATEIND; // 1b + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned FRAMERATEDR:4; + unsigned FRAMERATENR:8; + unsigned pad:20; + } seq_framerate_fraction; +#else + struct + { + unsigned pad:20; + unsigned FRAMERATENR:8; + unsigned FRAMERATEDR:4; + } seq_framerate_fraction; +#endif + uint32_t framerate_fraction; + }; + + uint16_t FRAMERATEEXP; // 16b + uint8_t COLOR_FORMAT_FLAG; // 1b + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned MATRIX_COEF:8; + unsigned TRANSFER_CHAR:8; + unsigned COLOR_PRIM:8; + unsigned pad:8; + } seq_color_format; +#else + struct + { + unsigned pad:8; + unsigned COLOR_PRIM:8; + unsigned TRANSFER_CHAR:8; + unsigned MATRIX_COEF:8; + } seq_color_format; +#endif + uint32_t color_format; + }; + + uint8_t HRD_PARAM_FLAG; // 1b + uint8_t HRD_NUM_LEAKY_BUCKETS; // 5b + // No need to parse remaining items - not needed so far + } vc1_SequenceLayerHeader; + + /** This structure represents metadata for struct c. */ + typedef struct + { + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned res6:1; + unsigned FINTERPFLAG:1; + unsigned QUANTIZER:2; + unsigned MAXBFRAMES:3; + unsigned RANGERED:1; + unsigned SYNCMARKER:1; + unsigned OVERLAP:1; + unsigned res5:1; + unsigned VSTRANSFORM:1; + unsigned DQUANT:2; + unsigned EXTENDED_MV:1; + unsigned FASTUVMC:1; + unsigned res4:1; + unsigned MULTIRES:1; + unsigned res3:1; + unsigned LOOPFILTER:1; + unsigned BITRTQ_POSTPROC:5; + unsigned FRMRTQ_POSTPROC:3; + unsigned PROFILE:4; + } struct_c; +#else + struct + { + unsigned PROFILE:4; + unsigned FRMRTQ_POSTPROC:3; + unsigned BITRTQ_POSTPROC:5; + unsigned LOOPFILTER:1; + unsigned res3:1; + unsigned MULTIRES:1; + unsigned res4:1; + unsigned FASTUVMC:1; + unsigned EXTENDED_MV:1; + unsigned DQUANT:2; + unsigned VSTRANSFORM:1; + unsigned res5:1; + unsigned OVERLAP:1; + unsigned SYNCMARKER:1; + unsigned RANGERED:1; + unsigned MAXBFRAMES:3; + unsigned QUANTIZER:2; + unsigned FINTERPFLAG:1; + unsigned res6:1; + } struct_c; +#endif + uint32_t struct_c_rcv; + }; + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned VERT_SIZE:16; + unsigned HORIZ_SIZE:16; + } struct_a; +#else + struct + { + unsigned HORIZ_SIZE:16; + unsigned VERT_SIZE:16; + } struct_a; +#endif + uint32_t struct_a_rcv; + }; + + } vc1_RcvSequenceHeader; + + /** This structure represents metadata for entry point layers. */ + typedef struct + { + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned QUANTIZER:2; + unsigned OVERLAP:1; + unsigned VSTRANSFORM:1; + unsigned DQUANT:2; + unsigned EXTENDED_MV:1; + unsigned FASTUVMC:1; + unsigned LOOPFILTER:1; + unsigned REFDIST_FLAG:1; + unsigned PANSCAN_FLAG:1; + unsigned CLOSED_ENTRY:1; + unsigned BROKEN_LINK:1; + unsigned pad1:19; + } ep_flags; +#else + struct + { + unsigned pad1:19; + unsigned BROKEN_LINK:1; + unsigned CLOSED_ENTRY:1; + unsigned PANSCAN_FLAG:1; + unsigned REFDIST_FLAG:1; + unsigned LOOPFILTER:1; + unsigned FASTUVMC:1; + unsigned EXTENDED_MV:1; + unsigned DQUANT:2; + unsigned VSTRANSFORM:1; + unsigned OVERLAP:1; + unsigned QUANTIZER:2; + } ep_flags; +#endif + uint32_t flags; + }; + + // Skipping HRD data because it is not needed for our processing + + union + { +#ifndef MFDBIGENDIAN + struct + { + unsigned CODED_HEIGHT:12; + unsigned CODED_WIDTH:12; + unsigned pad2:8; + } ep_size; +#else + struct + { + unsigned pad2:8; + unsigned CODED_WIDTH:12; + unsigned CODED_HEIGHT:12; + } ep_size; +#endif + uint32_t size; + }; + + uint8_t CODED_SIZE_FLAG; /** 1 bit(s). */ + uint8_t EXTENDED_DMV; /** 1 bit(s). */ + uint8_t RANGE_MAPY_FLAG; /** 1 bit(s). */ + uint8_t RANGE_MAPY; /** 3 bit(s). */ + uint8_t RANGE_MAPUV_FLAG; /** 1 bit(s). */ + uint8_t RANGE_MAPUV; /** 3 bit(s). */ + } vc1_EntryPointHeader; + + /** This structure represents metadata for slice and picture layers. */ + typedef struct + { + /* Slice layer. */ + uint16_t SLICE_ADDR; /** 9 bit(s). */ + + /* Picture layer for simple or main profile. */ + uint8_t RANGEREDFRM; /** 1 bit(s). */ + uint8_t PTYPE; /** 4 bit(s)? */ + int8_t BFRACTION_NUM; /** ? bit(s). */ + int16_t BFRACTION_DEN; /** ? bit(s). */ + uint8_t PQINDEX; /** 5 bit(s). */ + uint8_t HALFQP; /** 1 bit(s). */ + uint8_t PQUANTIZER; /** 1 bit(s). */ + uint8_t MVRANGE; /** 3 bit(s)? */ + uint8_t MVMODE; /** 4 bit(s)? */ + uint8_t MVMODE2; /** 3 bit(s)? */ + uint8_t LUMSCALE; /** 6 bit(s). */ + uint8_t LUMSHIFT; /** 6 bit(s). */ + uint8_t MVTAB; /** 2 bit(s). */ + uint8_t CBPTAB; /** 2 bit(s). */ + uint8_t TTMBF; /** 1 bit(s). */ + uint8_t TTFRM; /** 2 bit(s). */ + uint8_t TRANSACFRM; /** 2 bit(s)? */ + uint8_t TRANSACFRM2; /** 2 bit(s)? */ + uint8_t TRANSDCTAB; /** 1 bit(s). */ + + /* Picture layer for advanced profile. */ + uint8_t FCM; /** 2 bit(s)? */ + uint8_t FPTYPE; /** 3 bit(s). */ + uint8_t TFCNTR; /** 8 bit(s) */ + uint8_t RPTFRM; /** 2 bit(s) */ + uint8_t TFF; /** 1 bit(s). */ + uint8_t RFF; /** 1 bit(s) */ + uint8_t RNDCTRL; /** 1 bit(s). */ + uint8_t UVSAMP; /** 1 bit(s). */ + uint8_t POSTPROC; /** 2 bit(s). */ + uint8_t CONDOVER; /** 2 bit(s)? */ + uint8_t DMVRANGE; /** ? bit(s)? */ + uint8_t MV4SWITCH; /** 1 bit(s). */ + uint8_t INTCOMP; /** 1 bit(s). */ + uint8_t MBMODETAB; /** 2 bit(s). */ + uint8_t MV2BPTAB; /** 2 bit(s). */ + uint8_t MV4BPTAB; /** 2 bit(s). */ + uint8_t NUMREF; /** 1 bit(s). */ + uint8_t REFFIELD; /** 1 bit(s). */ + + /* PAN SCAN */ + uint8_t PS_PRESENT; /** 1 bit(s). */ + uint8_t number_of_pan_scan_window; /** 4 max. */ + viddec_vc1_pan_scan_window_t PAN_SCAN_WINDOW[VIDDEC_PANSCAN_MAX_OFFSETS]; + + /* VOPDQUANT. */ + uint8_t PQDIFF; /** 3 bit(s). */ + uint8_t ABSPQ; /** 5 bit(s). */ + uint8_t DQUANTFRM; /** 1 bit(s). */ + uint8_t DQPROFILE; /** 2 bit(s). */ + uint8_t DQSBEDGE; /** 2 bit(s). */ + uint8_t DQBILEVEL; /** 1 bit(s). */ + + /* Others. */ + uint8_t PTypeField1; + uint8_t PTypeField2; + uint32_t PQUANT; + uint8_t CurrField; + uint8_t BottomField; + uint32_t UniformQuant; + +#ifdef VBP + uint8_t raw_MVTYPEMB; + uint8_t raw_DIRECTMB; + uint8_t raw_SKIPMB; + uint8_t raw_ACPRED; + uint8_t raw_FIELDTX; + uint8_t raw_OVERFLAGS; + uint8_t raw_FORWARDMB; + + vc1_Bitplane MVTYPEMB; + vc1_Bitplane DIRECTMB; + vc1_Bitplane SKIPMB; + vc1_Bitplane ACPRED; + vc1_Bitplane FIELDTX; + vc1_Bitplane OVERFLAGS; + vc1_Bitplane FORWARDMB; + uint32_t ALTPQUANT; + uint8_t DQDBEDGE; +#endif + + } vc1_PictureLayerHeader; + + /*@}*/ + +#ifdef __cplusplus +} +#endif /* __cplusplus. */ + +#endif /* _VC1PARSE_COMMON_DEFS_H_. */ diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c b/mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c new file mode 100755 index 0000000..896e18c --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c @@ -0,0 +1,198 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Contains tables for VLC decoding of syntax elements in simple +// or main profile of VC-1 bitstream. +// +*/ + +#include "vc1parse.h" + +const uint8_t VC1_MVMODE_LOW_TBL[] = +{ + VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_1MV, + VC1_MVMODE_HPEL_1MV, + VC1_MVMODE_MIXED_MV, + VC1_MVMODE_INTENSCOMP +}; + +const uint8_t VC1_MVMODE_HIGH_TBL[] = +{ + VC1_MVMODE_1MV, + VC1_MVMODE_MIXED_MV, + VC1_MVMODE_HPEL_1MV, + VC1_MVMODE_HPELBI_1MV, + VC1_MVMODE_INTENSCOMP +}; + +const int32_t VC1_BITPLANE_IMODE_TBL[] = +{ + 4, /* max bits */ + 1, /* total subtables */ + 4, /* subtable sizes */ + + 0, /* 1-bit codes */ + 2, /* 2-bit codes */ + 2, VC1_BITPLANE_NORM2_MODE, + 3, VC1_BITPLANE_NORM6_MODE, + 3, /* 3-bit codes */ + 1, VC1_BITPLANE_DIFF2_MODE, + 2, VC1_BITPLANE_ROWSKIP_MODE, + 3, VC1_BITPLANE_COLSKIP_MODE, + 2, /* 4-bit codes */ + 0, VC1_BITPLANE_RAW_MODE, + 1, VC1_BITPLANE_DIFF6_MODE, + -1 +}; + +/* This VLC table is used for decoding of k in bitplane. */ +const int32_t VC1_BITPLANE_K_TBL[] = +{ + 13, /* max bits */ + 2, /* total subtables */ + 6,7,/* subtable sizes */ + + 1, /* 1-bit codes */ + 1, 0 , + 0, /* 2-bit codes */ + 0, /* 3-bit codes */ + 6, /* 4-bit codes */ + 2, 1, 3, 2, 4, 4, 5, 8, + 6, 16, 7, 32, + 0, /* 5-bit codes */ + 1, /* 6-bit codes */ + (3 << 1)| 1, 63, + 0, /* 7-bit codes */ + 15, /* 8-bit codes */ + 0, 3, 1, 5, 2, 6, 3, 9, + 4, 10, 5, 12, 6, 17, 7, 18, + 8, 20, 9, 24, 10, 33, 11, 34, + 12, 36, 13, 40, 14, 48, + 6, /* 9-bit codes */ + (3 << 4)| 7, 31, + (3 << 4)| 6, 47, + (3 << 4)| 5, 55, + (3 << 4)| 4, 59, + + (3 << 4)| 3, 61, + (3 << 4)| 2, 62, + 20, /* 10-bit codes */ + (1 << 6)| 11, 11, + (1 << 6)| 7, 7 , + (1 << 6)| 13, 13, + (1 << 6)| 14, 14, + + (1 << 6)| 19, 19, + (1 << 6)| 21, 21, + (1 << 6)| 22, 22, + (1 << 6)| 25, 25, + + (1 << 6)| 26, 26, + (1 << 6)| 28, 28, + (1 << 6)| 3, 35, + (1 << 6)| 5, 37, + + (1 << 6)| 6, 38, + (1 << 6)| 9, 41, + (1 << 6)| 10, 42, + (1 << 6)| 12, 44, + + (1 << 6)| 17, 49, + (1 << 6)| 18, 50, + (1 << 6)| 20, 52, + (1 << 6)| 24, 56, + 0, /* 11-bit codes */ + 0, /* 12-bit codes */ + 15, /* 13-bit codes */ + (3 << 8)| 14, 15, + (3 << 8)| 13, 23, + (3 << 8)| 12, 27, + (3 << 8)| 11, 29, + + (3 << 8)| 10, 30, + (3 << 8)| 9, 39, + (3 << 8)| 8, 43, + (3 << 8)| 7, 45, + + (3 << 8)| 6, 46, + (3 << 8)| 5, 51, + (3 << 8)| 4, 53, + (3 << 8)| 3, 54, + + (3 << 8)| 2, 57, + (3 << 8)| 1, 58, + (3 << 8)| 0, 60, + -1 +}; + +/* This VLC table is used for decoding of BFRACTION. */ +const int32_t VC1_BFRACTION_TBL[] = +{ + 7, /* max bits */ + 2, /* total subtables */ + 3,4, /* subtable sizes */ + 0, /* 1-bit codes */ + 0, /* 2-bit codes */ + 7, /* 3-bit codes */ + 0x00,1,2, 0x01,1,3, 0x02,2,3, 0x03,1,4, + 0x04,3,4, 0x05,1,5, 0x06,2,5, + 0, /* 4-bit codes */ + 0, /* 5-bit codes */ + 0, /* 6-bit codes */ + 16, /* 7-bit codes */ + 0x70, 3,5, 0x71, 4,5, 0x72, 1,6, 0x73, 5,6, + 0x74, 1,7, 0x75, 2,7, 0x76, 3,7, 0x77, 4,7, + 0x78, 5,7, 0x79, 6,7, 0x7A, 1,8, 0x7B, 3,8, + 0x7C, 5,8, 0x7D, 7,8, + 0x7E, VC1_BFRACTION_INVALID,VC1_BFRACTION_INVALID, + 0x7F, VC1_BFRACTION_BI, VC1_BFRACTION_BI, + + -1 +}; + +/* This table is used for VLC decoding of REFDIST. */ +const int32_t VC1_REFDIST_TBL[] = +{ + 16, /* Max bits. */ + 3, /* Total sub-tables. */ + 5, 6, 5, /* Sub-table sizes. */ + + 0, /* 1-bit codes. */ + 3, /* 2-bit codes. */ + 0, 0, 1, 1, 2, 2, + 1, /* 3-bit codes. */ + 6, 3, + 1, /* 4-bit codes. */ + 14, 4, + 1, /* 5-bit codes. */ + 30, 5, + 1, /* 6-bit codes. */ + 62, 6, + 1, /* 7-bit codes. */ + 126, 7, + 1, /* 8-bit codes. */ + 254, 8, + 1, /* 9-bit codes. */ + 510, 9, + 1, /* 10-bit codes. */ + 1022, 10, + 1, /* 11-bit codes. */ + 2046, 11, + 1, /* 12-bit codes. */ + 4094, 12, + 1, /* 13-bit codes. */ + 8190, 13, + 1, /* 14-bit codes. */ + 16382, 14, + 1, /* 15-bit codes. */ + 32766, 15, + 1, /* 16-bit codes. */ + 65534, 16, + -1 /* end of table. */ +}; diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_huffman.c b/mixvbp/vbp_plugin/vc1/vc1parse_huffman.c new file mode 100755 index 0000000..5dc9b4d --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_huffman.c @@ -0,0 +1,97 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VLC syntax elements within VC-1 bitstream. +// +*/ + +#include "vc1parse.h" + +/*----------------------------------------------------------------------------*/ + +vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable) +{ + uint32_t tempValue; + const int32_t *pTable = pDecodeTable; + vc1_Status status = VC1_STATUS_OK; + int32_t i, j, maxBits, loopCount, totalBits, value; + + maxBits = *pTable++; + loopCount = *pTable++; + totalBits = 0; + for (i = 0; i < loopCount; i++) + totalBits += *pTable++; + + if (totalBits != maxBits) + return VC1_STATUS_PARSE_ERROR; + + value = 0; + for (i = 0; i < maxBits; i++) + { + VC1_GET_BITS9(1, tempValue); + value = (value << 1) | tempValue; + loopCount = *pTable++; + if (loopCount == -1) + break; + for (j = 0; j < loopCount; j++) + { + if (value == *pTable++) + { + *pDst = *pTable; + return status; + } + else + pTable++; + } + } + + return status; +} + +/*----------------------------------------------------------------------------*/ + +vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, + int8_t *pFirst, int16_t *pSecond) +{ + uint32_t tempValue; + const int32_t *pTable = pDecodeTable; + vc1_Status status = VC1_STATUS_OK; + int32_t i, j, maxBits, loopCount, totalBits, value; + + maxBits = *pTable++; + loopCount = *pTable++; + totalBits = 0; + for (i = 0; i < loopCount; i++) + totalBits += *pTable++; + + if (totalBits != maxBits) + return VC1_STATUS_PARSE_ERROR; + + value = 0; + for (i = 0; i < maxBits; i++) + { + VC1_GET_BITS9(1, tempValue); + value = (value << 1) | tempValue; + loopCount = *pTable++; + if (loopCount == -1) + break; + for (j = 0; j < loopCount; j++) + { + if (value == *pTable++) + { + *pFirst = *pTable++; + *pSecond = *pTable; + return status; + } + else + pTable += 2; + } + } + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ipic.c b/mixvbp/vbp_plugin/vc1/vc1parse_ipic.c new file mode 100755 index 0000000..3d8192a --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_ipic.c @@ -0,0 +1,101 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive I picture in simple +// or main profile bitstream or progressive BI picture in main profile +// bitstream. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive I picture for simple + * or main profile bitstream or progressive BI picture in main profile + * bitstream. This parser starts after PTYPE was parsed but stops before + * parsing of macroblock layer. + * Table 16 of SMPTE 421M after processing up to PTYPE for I picture. + * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7. + For each I or BI frame, RND shall be set to 1 */ + if (md->PROFILE != VC1_PROFILE_ADVANCED) + { + picLayerHeader->RNDCTRL = md->RNDCTRL | 1 ; + md->RNDCTRL = picLayerHeader->RNDCTRL; + } + + + if (picLayerHeader->PTYPE == VC1_BI_FRAME) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) + != VC1_STATUS_OK) + { + return status; + } + if (picLayerHeader->BFRACTION_DEN != VC1_BFRACTION_BI) + return VC1_STATUS_PARSE_ERROR; + } + + VC1_GET_BITS9(7, tempValue); /* BF. */ + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else picLayerHeader->HALFQP=0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + /* MVRANGE but only for main profile. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->MULTIRES == 1 && picLayerHeader->PTYPE != VC1_BI_FRAME) + { + VC1_GET_BITS9(2, tempValue); /* RESPIC. */ + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c new file mode 100755 index 0000000..4e98d96 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c @@ -0,0 +1,256 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive I or BI picture in +// advanced profile bitstream. +// +*/ + +#include "vc1parse.h" +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive I or BI picture for + * advanced profile bitstream. + * Table 18 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + { + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + { + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace I or BI frame for + * advanced profile bitstream. + * Table 82 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK) + { + return status; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK) + { + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK) + { + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace I or BI field for + * advanced profile bitstream. + * Table 87 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + // Reset MVMODE when the second field is an I picture + // to avoid carrying forward the mvmode values from previous field + // especially the intensity compensation value + picLayerHeader->MVMODE = 0; + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) { + DEB("Error parsing I field \n"); + return status; + } + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + VC1_GET_BITS9(2, tempValue); /* POSTPROC. */ + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) != + VC1_STATUS_OK) + { + DEB("Error parsing I field \n"); + return status; + } + + if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8)) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (picLayerHeader->CONDOVER) + { + VC1_GET_BITS9(1, picLayerHeader->CONDOVER); + if (! picLayerHeader->CONDOVER) + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL; + else + { + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME; + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, + (md->heightMB+1)/2, BPP_OVERFLAGS)) != + VC1_STATUS_OK) + { + DEB("Error parsing I field \n"); + return status; + } + } + } + else + picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + if (picLayerHeader->TRANSACFRM2) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2); + picLayerHeader->TRANSACFRM2 += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + status = vc1_VOPDQuant(ctxt, pInfo); + if (status != VC1_STATUS_OK) { + DEB("Error parsing I field \n"); + return status; + } + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c b/mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c new file mode 100755 index 0000000..55373b4 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c @@ -0,0 +1,82 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 syntax elements MVRANGE and DMVRANGE. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse syntax element MVRANGE, which exists for main and advanced profiles. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->EXTENDED_MV == 1) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + if (picLayerHeader->MVRANGE) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + if (picLayerHeader->MVRANGE) + { + VC1_GET_BITS9(1, picLayerHeader->MVRANGE); + picLayerHeader->MVRANGE += 1; + } + picLayerHeader->MVRANGE += 1; + } + } + else + picLayerHeader->MVRANGE = 0; + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse syntax element DMVRANGE. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->EXTENDED_DMV == 1) + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_NONE; + else + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_HORIZONTAL_RANGE; + else + { + VC1_GET_BITS9(1, picLayerHeader->DMVRANGE); + if (picLayerHeader->DMVRANGE == 0) + picLayerHeader->DMVRANGE = VC1_DMVRANGE_VERTICAL_RANGE; + else + { + picLayerHeader->DMVRANGE = + VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE; + } + } + } + } + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c new file mode 100755 index 0000000..87f8426 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c @@ -0,0 +1,101 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for simple and main profiles. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for simple or + * main profile down to macroblock layer. + * Table 16 of SMPTE 421M after processing up to PTYPE for I picture. + * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture. + * Table 19 of SMPTE 421M after processing up to PTYPE for P picture. + * Table 21 of SMPTE 421M after processing up to PTYPE for B picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo) +{ + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + int32_t result; + + if (md->PROFILE != VC1_PROFILE_ADVANCED) + { + // As per spec, for main/simple profile, if the size of the coded picture is <= 1B, + // it shall be treated as a skipped frame. + // In content with skipped frames, the data is "00". + // rcv to vc1 conversion process adds an additional byte (0x80) to the picture, hence + // the data looks like "00 80" + // Hence if data is <= 2B, we will consider it skipped (check for 16+1b, if it fails, the frame is skipped). + result = viddec_pm_peek_bits(ctxt, &tempValue, 17); + if (result == -1) + { + picLayerHeader->PTYPE = VC1_SKIPPED_FRAME; + return status; + } + } + + if (md->FINTERPFLAG == 1) + { + VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */ + } + + VC1_GET_BITS9(2, tempValue); /* FRMCNT. */ + + if (md->RANGERED == 1) + { + VC1_GET_BITS9(1, picLayerHeader->RANGEREDFRM); + } + + if (md->MAXBFRAMES == 0) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE == 0) + picLayerHeader->PTYPE = VC1_I_FRAME; + else + picLayerHeader->PTYPE = VC1_P_FRAME; + } + else + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE == 0) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE == 0) { + picLayerHeader->PTYPE = VC1_B_FRAME; /* Or VC1_BI_FRAME. */ + /* if peek(7) = 0b1111111 then ptype = bi */ + VC1_PEEK_BITS( 7, tempValue ); + if ( tempValue == 0x7f ) + picLayerHeader->PTYPE = VC1_BI_FRAME; + } else + picLayerHeader->PTYPE = VC1_I_FRAME; + } + else + picLayerHeader->PTYPE = VC1_P_FRAME; + } + + if (picLayerHeader->PTYPE == VC1_I_FRAME || + picLayerHeader->PTYPE == VC1_BI_FRAME) + { + status = vc1_ParsePictureHeader_ProgressiveIpicture(ctxt, pInfo); + } + else if (picLayerHeader->PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_ProgressivePpicture(ctxt, pInfo); + else if (picLayerHeader->PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_ProgressiveBpicture(ctxt, pInfo); + else + status = VC1_STATUS_PARSE_ERROR; + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c new file mode 100755 index 0000000..dcfdb26 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c @@ -0,0 +1,404 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for advanced profile. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for advanced + * profile down to POSTPROC syntax element. + * Table 18 of SMPTE 421M for progressive I or BI picture. + * Table 20 of SMPTE 421M for progressive P picture. + * Table 22 of SMPTE 421M for progressive B picture. + * Table 23 of SMPTE 421M for skipped picture. + * Table 82 of SMPTE 421M for interlace I or BI frame. + * Table 83 of SMPTE 421M for interlace P frame. + * Table 84 of SMPTE 421M for interlace B frame. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t i = 0; + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + uint32_t number_of_pan_scan_window; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->INTERLACE == 1) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; + return VC1_STATUS_PARSE_ERROR; + } + else + picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + + + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + { + VC1_GET_BITS9(1, picLayerHeader->PTYPE); + if (picLayerHeader->PTYPE) + picLayerHeader->PTYPE = VC1_SKIPPED_FRAME; + else + picLayerHeader->PTYPE = VC1_BI_FRAME; + } + else + picLayerHeader->PTYPE = VC1_I_FRAME; + } + else + picLayerHeader->PTYPE = VC1_B_FRAME; + } + else + picLayerHeader->PTYPE = VC1_P_FRAME; + + if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) + { + if (md->TFCNTRFLAG) + { + VC1_GET_BITS9(8, picLayerHeader->TFCNTR); /* TFCNTR. */ + } + } + + if (md->PULLDOWN) + { + if ((md->INTERLACE == 0) || (md->PSF == 1)) + { + VC1_GET_BITS9(2, picLayerHeader->RPTFRM); + } + else + { + VC1_GET_BITS9(1, picLayerHeader->TFF); + VC1_GET_BITS9(1, picLayerHeader->RFF); + } + } + + if (md->PANSCAN_FLAG == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); /* PS_PRESENT. */ + if (picLayerHeader->PS_PRESENT == 1) + { + if ((md->INTERLACE == 1) && + (md->PSF == 0)) + { + if (md->PULLDOWN == 1) + number_of_pan_scan_window = 2 + picLayerHeader->RFF; + else + number_of_pan_scan_window = 2; + } + else + { + if (md->PULLDOWN == 1) + number_of_pan_scan_window = 1 + picLayerHeader->RPTFRM; + else + number_of_pan_scan_window = 1; + } + picLayerHeader->number_of_pan_scan_window = number_of_pan_scan_window; + + for (i = 0; i < number_of_pan_scan_window; i++) + { + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ + } + } + } + + if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME) + { + VC1_GET_BITS9(1, picLayerHeader->RNDCTRL); + md->RNDCTRL = picLayerHeader->RNDCTRL; + + if ((md->INTERLACE == 1) || + (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE)) + { + VC1_GET_BITS9(1, picLayerHeader->UVSAMP); + } + + if ((md->FINTERPFLAG == 1) && + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + { + VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */ + } + + if ((picLayerHeader->PTYPE == VC1_B_FRAME) && + (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE)) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) + != VC1_STATUS_OK) + { + return status; + } + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + } + + return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses the picture header for advanced + * profile down to BFRACTION syntax element. + * Table 85 of SMPTE 421M. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint32_t i = 0; + vc1_Status status = VC1_STATUS_OK; + uint32_t number_of_pan_scan_window; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + { + VC1_GET_BITS9(1, picLayerHeader->FCM); + if (picLayerHeader->FCM) + picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE; + else + picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE; + } + else + picLayerHeader->FCM = VC1_FCM_PROGRESSIVE; + if (picLayerHeader->FCM != VC1_FCM_FIELD_INTERLACE) + return VC1_STATUS_PARSE_ERROR; + + VC1_GET_BITS9(3, picLayerHeader->FPTYPE); + if (picLayerHeader->FPTYPE == 0) + { + picLayerHeader->PTypeField1 = VC1_I_FRAME; + picLayerHeader->PTypeField2 = VC1_I_FRAME; + } + else if (picLayerHeader->FPTYPE == 1) + { + picLayerHeader->PTypeField1 = VC1_I_FRAME; + picLayerHeader->PTypeField2 = VC1_P_FRAME; + } + else if (picLayerHeader->FPTYPE == 2) + { + picLayerHeader->PTypeField1 = VC1_P_FRAME; + picLayerHeader->PTypeField2 = VC1_I_FRAME; + } + else if (picLayerHeader->FPTYPE == 3) + { + picLayerHeader->PTypeField1 = VC1_P_FRAME; + picLayerHeader->PTypeField2 = VC1_P_FRAME; + } + else if (picLayerHeader->FPTYPE == 4) + { + picLayerHeader->PTypeField1 = VC1_B_FRAME; + picLayerHeader->PTypeField2 = VC1_B_FRAME; + } + else if (picLayerHeader->FPTYPE == 5) + { + picLayerHeader->PTypeField1 = VC1_B_FRAME; + picLayerHeader->PTypeField2 = VC1_BI_FRAME; + } + else if (picLayerHeader->FPTYPE == 6) + { + picLayerHeader->PTypeField1 = VC1_BI_FRAME; + picLayerHeader->PTypeField2 = VC1_B_FRAME; + } + else if (picLayerHeader->FPTYPE == 7) + { + picLayerHeader->PTypeField1 = VC1_BI_FRAME; + picLayerHeader->PTypeField2 = VC1_BI_FRAME; + } + + if (md->TFCNTRFLAG) + { + VC1_GET_BITS9(8, picLayerHeader->TFCNTR); + } + + if (md->PULLDOWN == 1) + { + if (md->PSF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->RPTFRM); + } + else + { + VC1_GET_BITS9(1, picLayerHeader->TFF); + VC1_GET_BITS9(1, picLayerHeader->RFF); + } + } else + picLayerHeader->TFF = 1; + + if (md->PANSCAN_FLAG == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); + if (picLayerHeader->PS_PRESENT) + { + if (md->PULLDOWN) + number_of_pan_scan_window = 2 + picLayerHeader->RFF; + else + number_of_pan_scan_window = 2; + picLayerHeader->number_of_pan_scan_window =number_of_pan_scan_window; + + for (i = 0; i < number_of_pan_scan_window; i++) + { + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */ + VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */ + VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */ + } + } + } + VC1_GET_BITS9(1, md->RNDCTRL); + +#ifdef VBP + picLayerHeader->RNDCTRL = md->RNDCTRL; +#endif + + VC1_GET_BITS9(1, picLayerHeader->UVSAMP); + + if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3)) + { + int32_t tmp; + if ((status = vc1_DecodeHuffmanOne(ctxt, &tmp, + VC1_REFDIST_TBL)) != VC1_STATUS_OK) + { + return status; + } + md->REFDIST = tmp; + } else if (md->REFDIST_FLAG == 0) { + md->REFDIST = 0; + } + + if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7)) + { + if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL, + &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) != + VC1_STATUS_OK) + { + return status; + } + } + + if (picLayerHeader->CurrField == 0) + { + picLayerHeader->PTYPE = picLayerHeader->PTypeField1; + picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF); + } + else + { + picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF); + picLayerHeader->PTYPE = picLayerHeader->PTypeField2; + } + + return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo); +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function calls the appropriate function to further + * parse the picture header for advanced profile down to macroblock layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_PARSE_ERROR; + + if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE) + { + if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + { + status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo); + } + else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_ProgressivePpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_ProgressiveBpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE) + { + if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) || + (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME)) + { + status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo); + } + else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME) + status = vc1_ParsePictureHeader_InterlacePpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) + status = vc1_ParsePictureHeader_InterlaceBpicture_Adv(ctxt, pInfo); + else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) + { + int ptype; + if ( pInfo->picLayerHeader.CurrField == 0) + ptype = pInfo->picLayerHeader.PTypeField1; + else + ptype = pInfo->picLayerHeader.PTypeField2; + + if ((ptype == VC1_I_FRAME) || + (ptype == VC1_BI_FRAME)) + { + status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo); + } + else if (ptype == VC1_P_FRAME) + status = vc1_ParseFieldHeader_InterlacePpicture_Adv(ctxt, pInfo); + else if (ptype == VC1_B_FRAME) + status = vc1_ParseFieldHeader_InterlaceBpicture_Adv(ctxt, pInfo); + else if (ptype == VC1_SKIPPED_FRAME) + status = VC1_STATUS_OK; + } + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c b/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c new file mode 100755 index 0000000..b921af0 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c @@ -0,0 +1,148 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive P picture in simple +// or main profile bitstream. +// +*/ + +#include "vc1parse.h" + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive P picture for simple + * or main profile bitstream. This parser starts after PTYPE was parsed but + * stops before parsing of macroblock layer. + * Table 19 of SMPTE 421M after processing up to PTYPE for P picture. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + uint32_t tempValue; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7. + It toggles back and forth between 0 and 1 for P frames */ + if (md->PROFILE != VC1_PROFILE_ADVANCED) + { + picLayerHeader->RNDCTRL = md->RNDCTRL ^ 1 ; + md->RNDCTRL = picLayerHeader->RNDCTRL; + } + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else picLayerHeader->HALFQP=0; + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->MULTIRES == 1) + VC1_GET_BITS9(2, tempValue); /* RESPIC. */ + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 3)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 3) + bit_count += picLayerHeader->MVMODE; + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + } + else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || + ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && + (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) + { + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_MVTYPEMB)) + != VC1_STATUS_OK) + { + return status; + } + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c new file mode 100755 index 0000000..99edf6f --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c @@ -0,0 +1,367 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 picture layer for progressive P picture in advanced +// profile bitstream. +// +*/ +#include +#include "vc1parse.h" +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses progressive P picture for advanced + * profile bitstream. + * Table 20 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 3)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 3) + bit_count += picLayerHeader->MVMODE; + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || + ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && + (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV))) + { + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_MVTYPEMB)) != + VC1_STATUS_OK) + { + return status; + } + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MVTAB); + VC1_GET_BITS9(2, picLayerHeader->CBPTAB); + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace P frame for advanced + * profile bitstream. + * Table 83 of SMPTE 421M after processing up to POSTPROC by + * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + /* MVRANGE. */ + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + /* DMVRANGE. */ + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + VC1_GET_BITS9(1, picLayerHeader->MV4SWITCH); + + VC1_GET_BITS9(1, picLayerHeader->INTCOMP); + if (picLayerHeader->INTCOMP) + { + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + + if ((status = vc1_DecodeBitplane(ctxt, pInfo, + md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK) + { + return status; + } + + VC1_GET_BITS9(2, picLayerHeader->MBMODETAB); + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */ + + if (picLayerHeader->MV4SWITCH == 1) + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} + +/*------------------------------------------------------------------------------ + * Parse picture layer. This function parses interlace P field for advanced + * profile bitstream. + * Table 88 of SMPTE 421M after processing up to BFRACTION by + * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock + * layer. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo) +{ + uint8_t bit_count; + const uint8_t *table; + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + + VC1_GET_BITS9(5, picLayerHeader->PQINDEX); + if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQINDEX <= 8) + { + VC1_GET_BITS9(1, picLayerHeader->HALFQP); + } + else + picLayerHeader->HALFQP = 0; + + + if (md->QUANTIZER == 1) + { + VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER); + picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER; + } + + if (md->POSTPROCFLAG == 1) + { + VC1_GET_BITS9(2, picLayerHeader->POSTPROC); + } + + VC1_GET_BITS9(1, picLayerHeader->NUMREF); + + if (picLayerHeader->NUMREF == 0) + { + VC1_GET_BITS9(1, picLayerHeader->REFFIELD); + } + + if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) { + DEB("Error in vc1_MVRangeDecode \n"); + return status; + } + + if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (picLayerHeader->PQUANT > 12) + table = VC1_MVMODE_LOW_TBL; + else + table = VC1_MVMODE_HIGH_TBL; + + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + while ((picLayerHeader->MVMODE == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE == 0) { + VC1_GET_BITS9(1, picLayerHeader->MVMODE); + + if ( picLayerHeader->MVMODE == 1) + bit_count ++; + + bit_count++; + } + picLayerHeader->MVMODE = table[bit_count]; + + if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) + { + bit_count = 0; + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2)) + { + VC1_GET_BITS9(1, picLayerHeader->MVMODE2); + bit_count++; + } + if (bit_count == 2 && picLayerHeader->MVMODE2 == 0) + bit_count++; + picLayerHeader->MVMODE2 = table[bit_count]; + + VC1_GET_BITS9(1, md->INTCOMPFIELD); + if (md->INTCOMPFIELD == 1) + md->INTCOMPFIELD = VC1_INTCOMP_BOTH_FIELD; + else + { + VC1_GET_BITS9(1, md->INTCOMPFIELD); + if (md->INTCOMPFIELD == 1) + md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD; + else + md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD; + } + VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); /* LUMSCALE1. */ + VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); /* LUMSHIFT1. */ + if ( md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD ) { + md->LUMSCALE2 = picLayerHeader->LUMSCALE; + md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; + } + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) + { + VC1_GET_BITS9(6, md->LUMSCALE2); + VC1_GET_BITS9(6, md->LUMSHIFT2); + } + } + else +#ifdef VBP + picLayerHeader->MVMODE2 = 0; +#else + picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; +#endif + + VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); + + if (picLayerHeader->NUMREF) + { + VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */ + } + else + { + VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */ + } + + VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ + +#ifdef VBP + if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) +#else + if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV) +#endif + { + VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ + } + + if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK) + return status; + + if (md->VSTRANSFORM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TTMBF); + if (picLayerHeader->TTMBF == 1) + { + VC1_GET_BITS9(2, picLayerHeader->TTFRM); + } + } + + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + if (picLayerHeader->TRANSACFRM == 1) + { + VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM); + picLayerHeader->TRANSACFRM += 2; + } + picLayerHeader->TRANSACFRM2 = 0; + + VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB); + + /* Skip parsing of macroblock layer. */ + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c b/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c new file mode 100755 index 0000000..95b556c --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c @@ -0,0 +1,130 @@ +/* /////////////////////////////////////////////////////////////////////// +// +// INTEL CORPORATION PROPRIETARY INFORMATION +// This software is supplied under the terms of a license agreement or +// nondisclosure agreement with Intel Corporation and may not be copied +// or disclosed except in accordance with the terms of that agreement. +// Copyright (c) 2008 Intel Corporation. All Rights Reserved. +// +// Description: Parses VC-1 syntax elements VOPDQUANT and DQUANT. +// +*/ + +#include "vc1parse.h" + +#define VC1_UNDEF_PQUANT 0 + +static const uint8_t MapPQIndToQuant_Impl[] = +{ + VC1_UNDEF_PQUANT, + 1, 2, 3, 4, 5, 6, 7, 8, + 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 27, 29, 31 +}; + +/*------------------------------------------------------------------------------ + * Parse syntax element VOPDQuant as defined in Table 24 of SMPTE 421M. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + if (md->DQUANT == 0) + return status; + + if (md->DQUANT == 2) + { + VC1_GET_BITS9(3, picLayerHeader->PQDIFF); + if (picLayerHeader->PQDIFF == 7) + { + VC1_GET_BITS9(5, picLayerHeader->ABSPQ); + } + } + else + { + VC1_GET_BITS9(1, picLayerHeader->DQUANTFRM); + if (picLayerHeader->DQUANTFRM == 1) + { + VC1_GET_BITS9(2, picLayerHeader->DQPROFILE); + if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_SNGLEDGES) + { + VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); + } + else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_DBLEDGES) + { +#ifdef VBP + VC1_GET_BITS9(2, picLayerHeader->DQDBEDGE); +#else + VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */ +#endif + } + else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS) + { + VC1_GET_BITS9(1, picLayerHeader->DQBILEVEL); + } + if (! (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS && + picLayerHeader->DQBILEVEL == 0)) + { + VC1_GET_BITS9(3, picLayerHeader->PQDIFF); + if (picLayerHeader->PQDIFF == 7) + { + VC1_GET_BITS9(5, picLayerHeader->ABSPQ); + } + } + } + } +#ifdef VBP + if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2)) + { + if (picLayerHeader->PQDIFF == 7) + { + picLayerHeader->ALTPQUANT = picLayerHeader->ABSPQ; + } + else + { + picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1; + } + } +#endif + return status; +} + +/*------------------------------------------------------------------------------ + * Compute value for PQUANT syntax element that does not exist in bitstreams for + * progressive I and BI pictures. + *------------------------------------------------------------------------------ + */ + +vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo) +{ + vc1_Status status = VC1_STATUS_OK; + vc1_metadata_t *md = &pInfo->metadata; + vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader; + + picLayerHeader->PQUANT = picLayerHeader->PQINDEX; + picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM; + + if (md->QUANTIZER == 0) + { + if (picLayerHeader->PQINDEX < 9) + picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM; + else + { + picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM; + picLayerHeader->PQUANT = + MapPQIndToQuant_Impl[picLayerHeader->PQINDEX]; + } + } + else + { + if (md->QUANTIZER == 2) + picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM; + } + + return status; +} diff --git a/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c new file mode 100755 index 0000000..a9644d9 --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c @@ -0,0 +1,401 @@ +#include "viddec_parser_ops.h" // For parser helper functions +#include "vc1.h" // For the parser structure +#include "vc1parse.h" // For vc1 parser helper functions +#ifdef VBP +#include "viddec_pm.h" +#endif +#define vc1_is_frame_start_code( ch ) \ + (( vc1_SCField == ch ||vc1_SCSlice == ch || vc1_SCFrameHeader == ch ) ? 1 : 0) + +/* init function */ +#ifdef VBP +void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#else +static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +#endif +{ + vc1_viddec_parser_t *parser = ctxt; + int i; + + persist_mem = persist_mem; + + for (i=0; iref_frame[i].id = -1; /* first I frame checks that value */ + parser->ref_frame[i].anchor[0] = 1; + parser->ref_frame[i].anchor[1] = 1; + parser->ref_frame[i].intcomp_top = 0; + parser->ref_frame[i].intcomp_bot = 0; + parser->ref_frame[i].tff=0; + } + + parser->intcomp_top[0] = 0; + parser->intcomp_bot[0] = 0; + parser->intcomp_top[1] = 0; + parser->intcomp_bot[1] = 0; + parser->is_reference_picture = false; + + memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); + + if (preserve) + { + parser->sc_seen &= VC1_EP_MASK; + parser->sc_seen_since_last_wkld &= VC1_EP_MASK; + } + else + { + parser->sc_seen = VC1_SC_INVALID; + parser->sc_seen_since_last_wkld = VC1_SC_INVALID; + memset(&parser->info.metadata, 0, sizeof(parser->info.metadata)); + } + + return; +} // viddec_vc1_init + +static void vc1_swap_intcomp(vc1_viddec_parser_t *parser) +{ + parser->intcomp_top[1] = parser->intcomp_top[0]; + parser->intcomp_bot[1] = parser->intcomp_bot[0]; + parser->intcomp_top[0] = 0; + parser->intcomp_bot[0] = 0; + + return; +} // vc1_swap_intcomp + +#ifdef VBP +uint32_t viddec_vc1_parse(void *parent, void *ctxt) +#else +static uint32_t viddec_vc1_parse(void *parent, void *ctxt) +#endif +{ + vc1_viddec_parser_t *parser = ctxt; + uint32_t sc=0x0; + int32_t ret=0, status=0; + +#ifdef VBP + /* This works only if there is one slice and no start codes */ + /* A better fix would be to insert start codes it there aren't any. */ + ret = viddec_pm_peek_bits(parent, &sc, 32); + if ((sc > 0x0100) && (sc < 0x0200)) /* a Start code will be in this range. */ + { + ret = viddec_pm_get_bits(parent, &sc, 32); + } + else + { + /* In cases where we get a buffer with no start codes, we assume */ + /* that this is a frame of data. We may have to fix this later. */ + sc = vc1_SCFrameHeader; + } +#else + ret = viddec_pm_get_bits(parent, &sc, 32); +#endif + sc = sc & 0xFF; + parser->is_frame_start = 0; + parser->is_second_start = 0; + DEB("START_CODE = %02x\n", sc); + switch ( sc ) + { + case vc1_SCSequenceHeader: + { + uint32_t data; + parser->ref_frame[0].anchor[0] = 1; + parser->ref_frame[0].anchor[1] = 1; + parser->ref_frame[1].anchor[0] = 1; + parser->ref_frame[1].anchor[1] = 1; + memset( &parser->info.metadata, 0, sizeof(parser->info.metadata)); + /* look if we have a rcv header for main or simple profile */ + ret = viddec_pm_peek_bits(parent,&data ,2); + + if (data == 3) + { + status = vc1_ParseSequenceLayer(parent, &parser->info); + } + else + { + status = vc1_ParseRCVSequenceLayer(parent, &parser->info); + } + parser->sc_seen = VC1_SC_SEQ; + parser->sc_seen_since_last_wkld |= VC1_SC_SEQ; +#ifdef VBP + parser->start_code = VC1_SC_SEQ; + if (parser->info.metadata.HRD_NUM_LEAKY_BUCKETS == 0) + { + if (parser->info.metadata.PROFILE == VC1_PROFILE_SIMPLE) + { + switch(parser->info.metadata.LEVEL) + { + case 0: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 96000; + break; + case 1: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 384000; + break; + } + } + else if (parser->info.metadata.PROFILE == VC1_PROFILE_MAIN) + { + switch(parser->info.metadata.LEVEL) + { + case 0: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 2000000; + break; + case 1: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 10000000; + break; + case 2: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 20000000; + break; + } + } + else if (parser->info.metadata.PROFILE == VC1_PROFILE_ADVANCED) + { + switch(parser->info.metadata.LEVEL) + { + case 0: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 2000000; + break; + case 1: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 10000000; + break; + case 2: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 20000000; + break; + case 3: + parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 45000000; + break; + } + } + } + +#endif + break; + } + + case vc1_SCEntryPointHeader: + { + status = vc1_ParseEntryPointLayer(parent, &parser->info); + parser->sc_seen |= VC1_SC_EP; + // Clear all bits indicating data below ep header + parser->sc_seen &= VC1_EP_MASK; + parser->sc_seen_since_last_wkld |= VC1_SC_EP; +#ifdef VBP + parser->start_code = VC1_SC_EP; +#endif + break; + } + + case vc1_SCFrameHeader: + { + memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); + status = vc1_ParsePictureLayer(parent, &parser->info); + if ((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) || + (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME)) + { + vc1_swap_intcomp(parser); + } + parser->sc_seen |= VC1_SC_FRM; + // Clear all bits indicating data below frm header + parser->sc_seen &= VC1_FRM_MASK; + parser->sc_seen_since_last_wkld |= VC1_SC_FRM; + //vc1_start_new_frame ( parent, parser ); + + parser->is_frame_start = 1; + vc1_parse_emit_frame_start( parent, parser ); +#ifdef VBP + parser->start_code = VC1_SC_FRM; +#endif + break; + } + + case vc1_SCSlice: + { + status = vc1_ParseSliceLayer(parent, &parser->info); + parser->sc_seen_since_last_wkld |= VC1_SC_SLC; + + vc1_parse_emit_current_slice( parent, parser ); + +#ifdef VBP + parser->start_code = VC1_SC_SLC; +#endif + break; + } + + case vc1_SCField: + { + parser->info.picLayerHeader.SLICE_ADDR = 0; + parser->info.picLayerHeader.CurrField = 1; + parser->info.picLayerHeader.REFFIELD = 0; + parser->info.picLayerHeader.NUMREF = 0; + parser->info.picLayerHeader.MBMODETAB = 0; + parser->info.picLayerHeader.MV4SWITCH = 0; + parser->info.picLayerHeader.DMVRANGE = 0; + parser->info.picLayerHeader.MVTAB = 0; + parser->info.picLayerHeader.MVMODE = 0; + parser->info.picLayerHeader.MVRANGE = 0; +#ifdef VBP + parser->info.picLayerHeader.raw_MVTYPEMB = 0; + parser->info.picLayerHeader.raw_DIRECTMB = 0; + parser->info.picLayerHeader.raw_SKIPMB = 0; + parser->info.picLayerHeader.raw_ACPRED = 0; + parser->info.picLayerHeader.raw_FIELDTX = 0; + parser->info.picLayerHeader.raw_OVERFLAGS = 0; + parser->info.picLayerHeader.raw_FORWARDMB = 0; + + memset(&(parser->info.picLayerHeader.MVTYPEMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.DIRECTMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.SKIPMB), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.ACPRED), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.FIELDTX), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.OVERFLAGS), 0, sizeof(vc1_Bitplane)); + memset(&(parser->info.picLayerHeader.FORWARDMB), 0, sizeof(vc1_Bitplane)); + + parser->info.picLayerHeader.ALTPQUANT = 0; + parser->info.picLayerHeader.DQDBEDGE = 0; +#endif + + status = vc1_ParseFieldLayer(parent, &parser->info); + if ((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) || + (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME)) + { + //vc1_swap_intcomp(parser); + } + parser->sc_seen |= VC1_SC_FLD; + parser->sc_seen_since_last_wkld |= VC1_SC_FLD; + + parser->is_second_start = 1; + vc1_parse_emit_second_field_start( parent, parser ); +#ifdef VBP + parser->start_code = VC1_SC_FLD; +#endif + break; + } + + case vc1_SCSequenceUser: + case vc1_SCEntryPointUser: + case vc1_SCFrameUser: + case vc1_SCSliceUser: + case vc1_SCFieldUser: + {/* Handle user data */ + status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items + parser->sc_seen_since_last_wkld |= VC1_SC_UD; +#ifdef VBP + parser->start_code = VC1_SC_UD; +#endif + break; + } + + case vc1_SCEndOfSequence: + { + parser->sc_seen = VC1_SC_INVALID; + parser->sc_seen_since_last_wkld |= VC1_SC_INVALID; +#ifdef VBP + parser->start_code = VC1_SC_INVALID; +#endif + break; + } + default: /* Any other SC that is not handled */ + { + DEB("SC = %02x - unhandled\n", sc ); +#ifdef VBP + parser->start_code = VC1_SC_INVALID; +#endif + break; + } + } + + + + return VIDDEC_PARSE_SUCESS; +} // viddec_vc1_parse + +/** + If a picture header was seen and the next start code is a sequence header, entrypoint header, + end of sequence or another frame header, this api returns frame done. + If a sequence header and a frame header was not seen before this point, all the + information needed for decode is not present and parser errors are reported. +*/ +#ifdef VBP +uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) +#else +static uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) +#endif +{ + vc1_viddec_parser_t *parser = ctxt; + int ret = VIDDEC_PARSE_SUCESS; + parent = parent; + switch (next_sc) + { + case vc1_SCFrameHeader: + if (((parser->sc_seen_since_last_wkld & VC1_SC_EP) || + (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) && + (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM))) + { + break; + } + // Deliberate fall-thru case + case vc1_SCEntryPointHeader: + if ((next_sc == vc1_SCEntryPointHeader) && + (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) && + (!(parser->sc_seen_since_last_wkld & VC1_SC_EP))) + { + break; + } + // Deliberate fall-thru case + case vc1_SCSequenceHeader: + case vc1_SCEndOfSequence: + case VIDDEC_PARSE_EOS: + case VIDDEC_PARSE_DISCONTINUITY: + ret = VIDDEC_PARSE_FRMDONE; + // Set errors for progressive + if ((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM)) + *codec_specific_errors = 0; + else + *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + vc1_end_frame(parser); + parser->sc_seen_since_last_wkld = VC1_SC_INVALID; + // TODO: Need to check for interlaced + break; + default: + ret = VIDDEC_PARSE_SUCESS; + break; + } //switch + DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n", + next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld, + *codec_specific_errors, ret); + + return ret; +} // viddec_vc1_wkld_done + +#ifdef VBP +void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size) +#else +static void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size) +#endif +{ + size->context_size = sizeof(vc1_viddec_parser_t); + size->persist_size = 0; + return; +} // viddec_vc1_get_context_size + +#ifdef VBP +uint32_t viddec_vc1_is_start_frame(void *ctxt) +#else +static uint32_t viddec_vc1_is_start_frame(void *ctxt) +#endif +{ + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *) ctxt; + return parser->is_frame_start; +} // viddec_vc1_is_start_frame + +void viddec_vc1_get_ops(viddec_parser_ops_t *ops) +{ + ops->init = viddec_vc1_init; + ops->parse_syntax = viddec_vc1_parse; + ops->get_cxt_size = viddec_vc1_get_context_size; + ops->is_wkld_done = viddec_vc1_wkld_done; + ops->is_frame_start = viddec_vc1_is_start_frame; + return; +} // viddec_vc1_get_ops + diff --git a/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c b/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c new file mode 100755 index 0000000..cf6fa7f --- /dev/null +++ b/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c @@ -0,0 +1,960 @@ +/* Any workload management goes in this file */ + +#include "viddec_fw_debug.h" +#include "vc1.h" +#include "vc1parse.h" +#include "viddec_fw_workload.h" +#include +#include "viddec_pm_utils_bstream.h" + +/* this function returns workload frame types corresponding to VC1 PTYPES (frame types) + * VC1 frame types: can be found in vc1parse_common_defs.h + * workload frame types are in viddec_workload.h +*/ +static inline uint32_t vc1_populate_frame_type(uint32_t vc1_frame_type) +{ + uint32_t viddec_frame_type; + + switch (vc1_frame_type) + { + case VC1_I_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_I; + break; + case VC1_P_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_P; + break; + case VC1_B_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_B; + break; + case VC1_BI_FRAME: + viddec_frame_type = VIDDEC_FRAME_TYPE_BI; + break; + case VC1_SKIPPED_FRAME : + viddec_frame_type = VIDDEC_FRAME_TYPE_SKIP; + break; + default: + viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID; + break; + } // switch on vc1 frame type + + return(viddec_frame_type); +} // vc1_populate_frame_type + +static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_parser_t *parser) +{ + viddec_workload_t *wl = viddec_pm_get_header( parent ); + viddec_frame_attributes_t *attrs = &wl->attrs; + vc1_Info *info = &parser->info; + unsigned i; + + /* typical sequence layer and entry_point data */ + attrs->cont_size.height = info->metadata.height * 2 + 2; + attrs->cont_size.width = info->metadata.width * 2 + 2; + + /* frame type */ + /* we can have two fileds with different types for field interlace coding mode */ + if (info->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) { + attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1); + attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2); + } else { + attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE); + attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown + } + + /* frame counter */ + attrs->vc1.tfcntr = info->picLayerHeader.TFCNTR; + + /* TFF, repeat frame, field */ + attrs->vc1.tff = info->picLayerHeader.TFF; + attrs->vc1.rptfrm = info->picLayerHeader.RPTFRM; + attrs->vc1.rff = info->picLayerHeader.RFF; + + /* PAN Scan */ + attrs->vc1.ps_present = info->picLayerHeader.PS_PRESENT; + attrs->vc1.num_of_pan_scan_windows = info->picLayerHeader.number_of_pan_scan_window; + for (i=0; ivc1.num_of_pan_scan_windows; i++) { + attrs->vc1.pan_scan_window[i].hoffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset; + attrs->vc1.pan_scan_window[i].voffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset; + attrs->vc1.pan_scan_window[i].width = info->picLayerHeader.PAN_SCAN_WINDOW[i].width; + attrs->vc1.pan_scan_window[i].height = info->picLayerHeader.PAN_SCAN_WINDOW[i].height; + } //end for i + + return; +} // translate_parser_info_to_frame_attributes + +/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ +static inline void vc1_send_past_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi, false ); + return; +} + +/* send future frame item */ +static inline void vc1_send_future_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME; + wi.ref_frame.reference_id = 0; + wi.ref_frame.luma_phys_addr = 0; + wi.ref_frame.chroma_phys_addr = 0; + viddec_pm_append_workitem( parent, &wi, false ); + return; +} + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_reorder_ref_items(void *parent) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; + wi.ref_reorder.ref_table_offset = 0; + wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0 + wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + + +/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ +static inline void vc1_send_ref_fcm_items(void *parent, uint32_t past_fcm, uint32_t future_fcm) +{ + viddec_workload_item_t wi; + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE; + wi.vwi_payload[0]= 0; + wi.vwi_payload[1]= past_fcm; + wi.vwi_payload[2]= future_fcm; + viddec_pm_append_workitem( parent, &wi, false ); + return; +} + + + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SEQ_ENTRY_registers(void *parent, vc1_viddec_parser_t *parser) +{ + uint32_t stream_format1 = 0; + uint32_t stream_format2 = 0; + uint32_t entrypoint1 = 0; + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + + + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, stream_format1, md->PROFILE); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, LEVEL, stream_format1, md->LEVEL); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, CHROMAFORMAT, stream_format1, md->CHROMAFORMAT); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, FRMRTQ, stream_format1, md->FRMRTQ); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, BITRTQ, stream_format1, md->BITRTQ); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, POSTPRO, stream_format1, md->POSTPROCFLAG); + + + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PULLDOWN, stream_format2, md->PULLDOWN); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, stream_format2, md->INTERLACE); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, TFCNTRFLAG, stream_format2, md->TFCNTRFLAG); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, FINTERPFLAG, stream_format2, md->FINTERPFLAG); + BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PSF, stream_format2, md->PSF); + + + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, BROKEN_LINK, entrypoint1, md->BROKEN_LINK); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, CLOSED_ENTRY, entrypoint1, md->CLOSED_ENTRY); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, PANSCAN_FLAG, entrypoint1, md->PANSCAN_FLAG); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, REFDIST_FLAG, entrypoint1, md->REFDIST_FLAG); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, entrypoint1, md->LOOPFILTER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, entrypoint1, md->FASTUVMC); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, entrypoint1, md->EXTENDED_MV); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, entrypoint1, md->DQUANT); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, entrypoint1, md->VSTRANSFORM); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, entrypoint1, md->OVERLAP); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, entrypoint1, md->QUANTIZER); + BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, entrypoint1, md->EXTENDED_DMV); + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY; + + + wi.vwi_payload[0] = stream_format1; + wi.vwi_payload[1] = stream_format2; + wi.vwi_payload[2] = entrypoint1; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SIZE_AND_AP_RANGEMAP_registers(void *parent, vc1_viddec_parser_t *parser) +{ + uint32_t coded_size = 0; + uint32_t ap_range_map = 0; + + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, coded_size, md->width); + BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, coded_size, md->height); + + + /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/ + if (VC1_PROFILE_ADVANCED == md->PROFILE) + { + + + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, ap_range_map, md->RANGE_MAPY_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, ap_range_map, md->RANGE_MAPY); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, ap_range_map, md->RANGE_MAPUV_FLAG); + BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, ap_range_map, md->RANGE_MAPUV); + + + + + } + else + { + ap_range_map = 0; + } + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP; + + + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = coded_size; + wi.vwi_payload[2] = ap_range_map; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + + + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SLICE_FRAME_TYPE_INFO_registers(void *parent, vc1_viddec_parser_t *parser) +{ + uint32_t alt_frame_type = 0; + uint32_t frame_type = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, frame_type, pic->FCM); + BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, frame_type, pic->PTYPE); + + alt_frame_type = frame_type; + + if (VC1_PROFILE_ADVANCED == md->PROFILE) + { + if ( (VC1_P_FRAME == pic->PTYPE)||(VC1_B_FRAME == pic->PTYPE) ) + { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); + } + } + else + { + if ( VC1_SKIPPED_FRAME== pic->PTYPE) + { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, 0); + } else { + BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); + } + } + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO; + + + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = frame_type; + wi.vwi_payload[2] = alt_frame_type; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SLICE_CONTROL_INFO_registers(void *parent, vc1_viddec_parser_t *parser) +{ + uint32_t recon_control = 0; + uint32_t mv_control = 0; + uint32_t blk_control = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; + + int is_previous_ref_rr=0; + + vc1_metadata_t *md = &(parser->info.metadata); + + + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, recon_control, md->RNDCTRL); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, recon_control, pic->UVSAMP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, recon_control, pic->PQUANT); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, recon_control, pic->HALFQP); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, recon_control, pic->UniformQuant); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, recon_control, pic->POSTPROC); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, recon_control, pic->CONDOVER); + BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, recon_control, (pic->PQINDEX <= 8)); + + /* Get the range reduced status of the previous frame */ + switch (pic->PTYPE) + { + case VC1_P_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm; + break; + } + case VC1_B_FRAME: + { + is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm; + break; + } + default: + { + break; + } + } + + if (pic->RANGEREDFRM) + { + + if (!is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 1); + } + } + else + { + /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */ + if (is_previous_ref_rr) + { + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); + BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 0); + } + } // end for RR upscale + + + + + + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, mv_control, pic->MVRANGE); + if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP) + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE2); + else + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, mv_control, pic->MVTAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, mv_control, pic->DMVRANGE); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, mv_control, pic->MV4SWITCH); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, mv_control, pic->MBMODETAB); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, mv_control, + pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) )); + BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, mv_control, pic->REFFIELD); + + + + // BLOCK CONTROL REGISTER Offset 0x2C + BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, blk_control, pic->CBPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, blk_control, pic->TTMBF); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, blk_control, pic->TTFRM); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, blk_control, pic->MV2BPTAB); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, blk_control, pic->MV4BPTAB); + if ((pic->CurrField == 1) && (pic->SLICE_ADDR)) + { + int mby = md->height * 2 + 2; + mby = (mby + 15 ) / 16; + pic->SLICE_ADDR -= (mby/2); + } + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, blk_control, pic->SLICE_ADDR); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, blk_control, md->bp_raw[0]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, blk_control, md->bp_raw[1]); + BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, blk_control, md->bp_raw[2]); + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO; + + + wi.vwi_payload[0] = recon_control; + wi.vwi_payload[1] = mv_control; + wi.vwi_payload[2] = blk_control; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_SLICE_OTHER_INFO_registers(void *parent, vc1_viddec_parser_t *parser) +{ + uint32_t trans_data = 0; + uint32_t vop_dquant = 0; + uint32_t ref_bfraction = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, ref_bfraction, pic->BFRACTION_DEN); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, ref_bfraction, pic->BFRACTION_NUM); + BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, ref_bfraction, md->REFDIST); + + if (md->DQUANT) + { + if (pic->PQDIFF == 7) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->ABSPQ); + else if (pic->DQUANTFRM == 1) + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->PQUANT + pic->PQDIFF + 1); + } + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, vop_dquant, pic->DQUANTFRM); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, vop_dquant, pic->DQPROFILE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, vop_dquant, pic->DQSBEDGE); + BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, vop_dquant, pic->DQBILEVEL); + + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, trans_data, pic->TRANSACFRM); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, trans_data, pic->TRANSACFRM2); + BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, trans_data, pic->TRANSDCTAB); + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO; + + + wi.vwi_payload[0] = trans_data; + wi.vwi_payload[1] = vop_dquant; + wi.vwi_payload[2] = ref_bfraction; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + + + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(void *parent, vc1_viddec_parser_t *parser) +{ + uint32_t imgstruct = 0; + uint32_t fieldref_ctrl_id = 0; + uint32_t smp_rangemap = 0; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + if ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) { + BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, imgstruct, (pic->BottomField) ? 2 : 1); + } + + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, fieldref_ctrl_id, pic->BottomField); + BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, fieldref_ctrl_id, pic->CurrField); + if (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, 1); + } + else + { + BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]); + } + + if (VC1_PROFILE_ADVANCED != md->PROFILE) + { + if (pic->RANGEREDFRM) + { + //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, smp_rangemap, md->RANGE_MAPY_FLAG); + //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, smp_rangemap, md->RANGE_MAPUV_FLAG); + smp_rangemap = 0x11; + } + + } + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO; + + + wi.vwi_payload[0] = imgstruct; + wi.vwi_payload[1] = fieldref_ctrl_id; + wi.vwi_payload[2] = smp_rangemap; + + viddec_pm_append_workitem( parent, &wi, false ); + return; +} // send_reorder_ref_items + + +/* send reorder frame item to host + * future frame gets push to past */ +static inline void send_INT_COM_registers(void *parent, vc1_viddec_parser_t *parser) +{ + uint32_t intcomp_fwd_top = 0; + uint32_t intcomp_fwd_bot = 0; + uint32_t intcomp_bwd_top = 0; + uint32_t intcomp_bwd_bot = 0; + uint32_t intcomp_cur = 0; + + uint32_t POS_2nd_INTCOMP = 13; + uint32_t MASK_1st_INTCOMP = 0x1fff; + uint32_t MASK_2nd_INTCOMP = 0x3ffe000; + + vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); + viddec_workload_item_t wi; + + vc1_metadata_t *md = &(parser->info.metadata); + + + + if (VC1_SKIPPED_FRAME == pic->PTYPE) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top =0; + return; + } + + if ( VC1_FCM_FIELD_INTERLACE != pic->FCM ) + { + + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); + + if ( !((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) ) + intcomp_cur = 0; + + if ( (VC1_BI_FRAME==pic->PTYPE)||(VC1_B_FRAME==pic->PTYPE) ) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = 0; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = 0; + + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; + intcomp_fwd_bot = parser->intcomp_bot[1]; + + + if ( parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != (-1) ) + { + if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].type) + intcomp_fwd_top = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].intcomp_top; + } + else + { + if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) + intcomp_fwd_top = parser->intcomp_top[1]; + } + } + else + { //I,P TYPE + + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; + + if (VC1_FCM_FIELD_INTERLACE == parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm) + { + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_top |= intcomp_cur << POS_2nd_INTCOMP; + + intcomp_fwd_bot = parser->intcomp_bot[1]; + intcomp_fwd_bot |= intcomp_cur << POS_2nd_INTCOMP; + } + else + { + intcomp_fwd_top = intcomp_cur;// << POS_2nd_INTCOMP; + intcomp_fwd_bot = 0; + } + } + } + else + { + //FIELD INTERLACE + //if(0!=md->INTCOMPFIELD) + //No debugging + + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); + } + else + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); + } + + if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) + { + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); + BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); + } + + if (pic->MVMODE != VC1_MVMODE_INTENSCOMP) + { + intcomp_cur = 0; + } + + if (pic->CurrField == 0) + { + if (pic->TFF) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; + } + else + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; + } + } + else + { + if (pic->TFF) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; + } + else + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; + } + } + + if (pic->CurrField == 1) + { //SECOND FIELD + + if (VC1_B_FRAME != pic->PTYPE) + { + if (pic->TFF) + { + intcomp_bwd_top = intcomp_cur & MASK_1st_INTCOMP; + + intcomp_fwd_bot = (parser->intcomp_bot[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; //??????? + intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); + + intcomp_fwd_top = parser->intcomp_top[1]; + } + else + { + intcomp_bwd_bot= (intcomp_cur & MASK_2nd_INTCOMP)>>POS_2nd_INTCOMP; + + intcomp_fwd_top = (parser->intcomp_top[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; + intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP) << POS_2nd_INTCOMP; + + intcomp_fwd_bot = parser->intcomp_bot[1]; + } + } + else + { //B TYPE + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_bot = parser->intcomp_bot[1]; + + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; + } + } + else + { //FIRST FILED + + if ( (VC1_B_FRAME==pic->PTYPE)||(VC1_BI_FRAME==pic->PTYPE) ) + { + if (VC1_SKIPPED_FRAME!=parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) + { + intcomp_fwd_top = parser->intcomp_top[1]; + intcomp_fwd_bot = parser->intcomp_bot[1]; + } + + intcomp_bwd_top = parser->intcomp_top[0]; + intcomp_bwd_bot = parser->intcomp_bot[0]; + + } + else + { //I,P TYPE + + intcomp_fwd_top = parser->intcomp_top[1] & MASK_1st_INTCOMP; + intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP)<intcomp_bot[1] & MASK_1st_INTCOMP; + intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); + } //pic->PTYPE == I,P TYPE + } //pic->CurrField == 0 + } //VC1_FCM_FIELD_INTERLACE != pic->FCM + + if ( (VC1_B_FRAME != pic->PTYPE) && (VC1_BI_FRAME != pic->PTYPE) ) + { + parser->intcomp_top[1] = intcomp_fwd_top; + parser->intcomp_bot[1] = intcomp_fwd_bot; + + parser->intcomp_top[0] = intcomp_bwd_top; + parser->intcomp_bot[0] = intcomp_bwd_bot; + } + + //OS_INFO("intcomp_fwd_top = %d\n", intcomp_fwd_top); + //OS_INFO("intcomp_fwd_bot = %d\n", intcomp_fwd_bot); + + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW; + + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = intcomp_fwd_top; + wi.vwi_payload[2] = intcomp_fwd_bot; + + viddec_pm_append_workitem( parent, &wi, false ); + + wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW; + + wi.vwi_payload[0] = 0; + wi.vwi_payload[1] = intcomp_bwd_top; + wi.vwi_payload[2] = intcomp_bwd_bot; + + viddec_pm_append_workitem( parent, &wi, false ); + + + return; +} // send_reorder_ref_items + + +/** update workload with more workload items for ref and update values to store... + */ +void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) +{ + vc1_metadata_t *md = &(parser->info.metadata); + viddec_workload_t *wl = viddec_pm_get_header(parent); + int frame_type = parser->info.picLayerHeader.PTYPE; + int frame_id = 1; // new reference frame is assigned index 1 + + /* init */ + memset(&parser->spr, 0, sizeof(parser->spr)); + wl->is_reference_frame = 0; + + /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */ + if (parser->info.metadata.RANGE_MAPY_FLAG || + parser->info.metadata.RANGE_MAPUV_FLAG || + parser->info.picLayerHeader.RANGEREDFRM) + { + wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME; + } + + LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type); + + parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type)); + + /* reference / anchor frames processing + * we need to send reorder before reference frames */ + if (parser->is_reference_picture) + { + /* one frame has been sent */ + if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1) + { + /* there is a frame in the reference buffer, move it to the past */ + send_reorder_ref_items(parent); + } + } + + /* send workitems for reference frames */ + switch ( frame_type ) + { + case VC1_B_FRAME: + { + vc1_send_past_ref_items(parent); + vc1_send_future_ref_items(parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm); + break; + } + case VC1_SKIPPED_FRAME: + { + wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; + vc1_send_past_ref_items(parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); + break; + } + case VC1_P_FRAME: + { + vc1_send_past_ref_items( parent); + vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); + break; + } + default: + break; + } + + /* reference / anchor frames from previous code + * we may need it for frame reduction */ + if (parser->is_reference_picture) + { + wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK); + + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].id = frame_id; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].fcm = parser->info.picLayerHeader.FCM; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0] = (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME); + if (parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = (parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME); + } + else + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0]; + } + + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].type = parser->info.picLayerHeader.PTYPE; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_en = md->RANGERED; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_frm = parser->info.picLayerHeader.RANGEREDFRM; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].tff = parser->info.picLayerHeader.TFF; + + LOG_CRIT("anchor[0] = %d, anchor[1] = %d", + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0], + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] ); + } + + if ( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) + { + translate_parser_info_to_frame_attributes( parent, parser ); + return; + } + + translate_parser_info_to_frame_attributes( parent, parser ); + + + send_SEQ_ENTRY_registers(parent, parser); + send_SIZE_AND_AP_RANGEMAP_registers(parent, parser); + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + send_INT_COM_registers(parent, parser); + + { + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + wi.vwi_payload[0] = bit + (is_emul*8) ; + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); + } + + + viddec_pm_append_pixeldata( parent ); + + return; +} // vc1_start_new_frame + + +void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser) +{ + + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + send_INT_COM_registers(parent, parser); + + { + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + + wi.vwi_payload[0] = bit + (is_emul*8); + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); + } + + viddec_pm_append_pixeldata( parent ); + + return; + +} + + +void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser) +{ + send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); + send_SLICE_CONTROL_INFO_registers(parent, parser); + send_SLICE_OTHER_INFO_registers(parent, parser); + //send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); + //send_INT_COM_registers(parent, parser); + + { + viddec_workload_item_t wi; + uint32_t bit, byte; + uint8_t is_emul = 0; + + viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); + + // Send current bit offset and current slice + wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; + + // If slice data starts in the middle of the emulation prevention sequence - + // Special Case1----[is_emul = 1]: + // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data + // to the decoder starting at the first byte of 0s so that the decoder can detect the + // emulation prevention. But the actual data starts are offset 8 in this bit sequence. + + // Specail Case 2----[is_emul = 2]: + // If slice data starts in the middle of the emulation prevention sequence - + // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. + // + + wi.vwi_payload[0] = bit + (is_emul*8); + wi.vwi_payload[1] = 0xdeaddead; + wi.vwi_payload[2] = 0xdeaddead; + viddec_pm_append_workitem( parent, &wi, false ); + } + + viddec_pm_append_pixeldata( parent ); + + return; +} + + +void vc1_end_frame(vc1_viddec_parser_t *parser) +{ + /* update status of reference frames */ + if (parser->is_reference_picture) + { + parser->ref_frame[VC1_REF_FRAME_T_MINUS_2] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1]; + parser->ref_frame[VC1_REF_FRAME_T_MINUS_1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0]; + } + + return; +} // vc1_end_frame + diff --git a/mixvbp/vbp_plugin/vp8/Android.mk b/mixvbp/vbp_plugin/vp8/Android.mk new file mode 100755 index 0000000..03de2cf --- /dev/null +++ b/mixvbp/vbp_plugin/vp8/Android.mk @@ -0,0 +1,24 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + vp8parse.c \ + bool_coder.c \ + viddec_vp8_parse.c + +LOCAL_CFLAGS := -DVBP -DHOST_ONLY + +LOCAL_C_INCLUDES := \ + $(MIXVBP_DIR)/include \ + $(MIXVBP_DIR)/vbp_manager/include \ + $(LOCAL_PATH)/include + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_vp8 + +LOCAL_SHARED_LIBRARIES := \ + libmixvbp \ + liblog + +include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_plugin/vp8/bool_coder.c b/mixvbp/vbp_plugin/vp8/bool_coder.c new file mode 100755 index 0000000..746d63e --- /dev/null +++ b/mixvbp/vbp_plugin/vp8/bool_coder.c @@ -0,0 +1,95 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "bool_coder.h" + +uint32_t vp8_read_bits(BOOL_CODER *br, int32_t bits) +{ + uint32_t z = 0; + int bit; + for (bit=bits-1; bit>=0; bit--) + { + z |= (vp8_decode_bool(br, 128)<range = 255; + br->count = 8; + br->buffer = source; + br->pos = 0; + br->value = (br->buffer[0]<<24)+(br->buffer[1]<<16)+(br->buffer[2]<<8)+(br->buffer[3]); + br->pos += 4; +} + +int32_t vp8_decode_bool(BOOL_CODER *br, int32_t probability) +{ + uint32_t bit=0; + uint32_t split; + uint32_t bigsplit; + uint32_t count = br->count; + uint32_t range = br->range; + uint32_t value = br->value; + + split = 1 + (((range-1) * probability) >> 8); + bigsplit = (split<<24); + + range = split; + if(value >= bigsplit) + { + range = br->range-split; + value = value-bigsplit; + bit = 1; + } + + if(range>=0x80) + { + br->value = value; + br->range = range; + return bit; + } + else + { + do + { + range +=range; + value +=value; + + if (!--count) + { + count = 8; + value |= br->buffer[br->pos]; + br->pos++; + } + } + while(range < 0x80 ); + } + br->count = count; + br->value = value; + br->range = range; + return bit; +} diff --git a/mixvbp/vbp_plugin/vp8/include/bool_coder.h b/mixvbp/vbp_plugin/vp8/include/bool_coder.h new file mode 100755 index 0000000..57660b7 --- /dev/null +++ b/mixvbp/vbp_plugin/vp8/include/bool_coder.h @@ -0,0 +1,54 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef _BOOL_CODER_H_ +#define _BOOL_CODER_H_ + +#include +#include +#include +#include + +typedef struct _BOOL_CODER +{ + uint32_t range; // always idential to encoder's range + uint32_t value; // contains at least 24 significant bits + int32_t count; // # of bits shifted out of value, at most 7 + uint32_t pos; + uint8_t *buffer; // pointer to next compressed data byte to be read +} BOOL_CODER; + +typedef struct _BITREADER +{ + int32_t bitsinremainder; // # of bits still used in remainder + uint32_t remainder; // remaining bits from original long + const uint8_t *position; // character pointer position within data +} BITREADER; + +void vp8_start_decode(BOOL_CODER *br, uint8_t *source); +int32_t vp8_decode_bool(BOOL_CODER *br, int32_t probability); +uint32_t vp8_read_bits(BOOL_CODER *br, int32_t bits); + +#endif diff --git a/mixvbp/vbp_plugin/vp8/include/vp8.h b/mixvbp/vbp_plugin/vp8/include/vp8.h new file mode 100755 index 0000000..06a7e61 --- /dev/null +++ b/mixvbp/vbp_plugin/vp8/include/vp8.h @@ -0,0 +1,356 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef _VP8_H_ +#define _VP8_H_ +#include "bool_coder.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/* VP8 specifies only frame is supported */ +#define VP8_MAX_NUM_PICTURES 1 +/* VP8 has no definition of slice */ +#define VP8_MAX_NUM_SLICES 1 + +#define MAX_MB_SEGMENTS 4 +#define MB_FEATURE_TREE_PROBS 3 +#define MAX_REF_LF_DELTAS 4 +#define MAX_MODE_LF_DELTAS 4 +#define MAX_PARTITIONS 9 +#define BLOCK_TYPES 4 +#define COEF_BANDS 8 +#define PREV_COEF_CONTEXTS 3 +#define MAX_COEF_TOKENS 12 +#define MAX_ENTROPY_TOKENS 12 +#define SEGMENT_DELTADATA 0 +#define SEGMENT_ABSDATA 1 +#define MAX_LOOP_FILTER 63 +#define MAX_QINDEX 127 + + typedef uint8_t vp8_prob; + + typedef enum + { + /*!\brief Operation completed without error */ + VP8_NO_ERROR, + + /*!\brief Unspecified error */ + VP8_UNKNOWN_ERROR, + + /*!\brief Memory operation failed */ + VP8_MEMORY_ERROR, + + VP8_NO_INITIALIZATION, + + VP8_CORRUPT_FRAME, + + VP8_UNSUPPORTED_BITSTREAM, + + VP8_UNSUPPORTED_VERSION, + + VP8_INVALID_FRAME_SYNC_CODE, + + VP8_UNEXPECTED_END_OF_BITSTREAM, + + } vp8_Status; + + enum + { + VP8_MV_max = 1023, /* max absolute value of a MV component */ + VP8_MV_vals = (2 * VP8_MV_max) + 1, /* # possible values "" */ + + VP8_MV_long_width = 10, /* Large MVs have 9 bit magnitudes */ + VP8_MV_num_short = 8, /* magnitudes 0 through 7 */ + + /* probability offsets for coding each MV component */ + VP8_MV_pis_short = 0, /* short (<= 7) vs long (>= 8) */ + VP8_MV_Psign, /* sign for non-zero */ + VP8_MV_Pshort, /* 8 short values = 7-position tree */ + + VP8_MV_Pbits = VP8_MV_Pshort + VP8_MV_num_short - 1, /* mvlong_width long value bits */ + VP8_MV_Pcount = VP8_MV_Pbits + VP8_MV_long_width /* (with independent probabilities) */ + }; + + typedef enum + { + DC_PRED, // average of above and left pixels + V_PRED, // vertical prediction + H_PRED, // horizontal prediction + TM_PRED, // Truemotion prediction + B_PRED, // block based prediction, each block has its own prediction mode + NEARESTMV, + NEARMV, + ZEROMV, + NEWMV, + SPLITMV, + MB_MODE_COUNT + } VP8_MB_PREDICTION_MODE; + +// Segment Feature Masks +#define VP8_SEGMENT_ALTQ 0x01 +#define VP8_SEGMENT_ALT_LF 0x02 + +#define VP8_YMODES (B_PRED + 1) +#define VP8_UV_MODES (TM_PRED + 1) + +#define VP8_MVREFS (1 + SPLITMV - NEARESTMV) + + typedef enum + { + B_DC_PRED, // average of above and left pixels + B_TM_PRED, + + B_VE_PRED, // vertical prediction + B_HE_PRED, // horizontal prediction + + B_LD_PRED, + B_RD_PRED, + + B_VR_PRED, + B_VL_PRED, + B_HD_PRED, + B_HU_PRED, + + LEFT4X4, + ABOVE4X4, + ZERO4X4, + NEW4X4, + + B_MODE_COUNT + } VP8_B_PREDICTION_MODE; + +#define VP8_BINTRAMODES (B_HU_PRED + 1) /* 10 */ +#define VP8_SUBMVREFS (1 + NEW4X4 - LEFT4X4) + +// frame type + typedef enum + { + KEY_FRAME = 0, + INTER_FRAME, + SKIPPED_FRAME + } FRAME_TYPE; + + +// Color Space + typedef enum + { + REG_YUV = 0, /* Regular yuv */ + INT_YUV = 1 /* The type of yuv that can be tranfer to and from RGB through integer transform */ + } YUV_TYPE; + +// Clamp type + typedef enum + { + RECON_CLAMP_REQUIRED = 0, + RECON_CLAMP_NOTREQUIRED = 1 + } CLAMP_TYPE; + + /* Token partition */ + typedef enum + { + ONE_PARTITION = 0, + TWO_PARTITION = 1, + FOUR_PARTITION = 2, + EIGHT_PARTITION = 3 + } TOKEN_PARTITION; + +// Buffer copied + typedef enum + { + BufferCopied_NoneToGolden = 0, + BufferCopied_LastToGolden = 1, + BufferCopied_AltRefToGolden = 2 + } GoldenBufferCopiedType; + + typedef enum + { + BufferCopied_NoneToAltref = 0, + BufferCopied_LastToAltRef = 1, + BufferCopied_GoldenToAltRef = 2 + } AltRefBufferCopiedType; + +// Macroblock level features + typedef enum + { + MB_LVL_ALT_Q = 0, /* Use alternate Quantizer .... */ + MB_LVL_ALT_LF = 1, /* Use alternate loop filter value... */ + MB_LVL_MAX = 2 /* Number of MB level features supported */ + } MB_LVL_FEATURES; + +// Loop filter Type + typedef enum + { + NORMAL_LOOPFILTER = 0, + SIMPLE_LOOPFILTER = 1 + } LoopFilterType; + +// Segmentation data + typedef struct + { + uint8_t Enabled; + uint8_t UpdateMap; + uint8_t UpdateData; + uint8_t AbsDelta; + int8_t FeatureData[MB_LVL_MAX][MAX_MB_SEGMENTS]; + vp8_prob TreeProbs[MB_FEATURE_TREE_PROBS]; + } SegmentationData; + +// Loop filter data + typedef struct + { + LoopFilterType Type; + uint8_t Level; + uint8_t Sharpness; + uint8_t DeltaEnabled; + uint8_t DeltaUpdate; + int8_t DeltasRef[MAX_REF_LF_DELTAS]; + int8_t DeltasMode[MAX_MODE_LF_DELTAS]; + } LoopFilterData; + +// Quantization data + typedef struct + { + int8_t Y1_AC; + int8_t Y1_DC_Delta; + int8_t Y2_DC_Delta; + int8_t Y2_AC_Delta; + int8_t UV_DC_Delta; + int8_t UV_AC_Delta; + } QuantizationData; + +// Frame context + typedef struct + { + vp8_prob B_Mode_Prob[VP8_BINTRAMODES][VP8_BINTRAMODES][VP8_BINTRAMODES-1]; + vp8_prob Y_Mode_Prob [VP8_YMODES-1]; /* interframe intra mode probs */ + vp8_prob UV_Mode_Prob [VP8_UV_MODES-1]; + vp8_prob DCT_Coefficients [BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1]; + vp8_prob MVContext[2][VP8_MV_Pcount]; + vp8_prob Pre_MVContext[2][VP8_MV_Pcount]; //not to caculate the mvcost for the frame if mvc doesn't change. + } FrameContextData; + +// Extern to tables + extern const vp8_prob VP8_Coefficient_Default_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1]; + extern const vp8_prob VP8_Coefficient_Update_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1]; + extern const int VP8_MB_FeatureDataBits[MB_LVL_MAX]; + extern const vp8_prob VP8_BMode_Const[VP8_BINTRAMODES][VP8_BINTRAMODES][VP8_BINTRAMODES-1]; + extern const vp8_prob VP8_YMode_Const[VP8_YMODES-1]; + extern const vp8_prob VP8_UVMode_Const[VP8_UV_MODES-1]; + extern const vp8_prob VP8_MV_UpdateProbs[2][VP8_MV_Pcount], VP8_MV_DefaultMVContext[2][VP8_MV_Pcount]; + + typedef struct + { + FRAME_TYPE frame_type; + uint8_t version; + uint8_t show_frame; + uint32_t first_part_size; + } FrameTagHeader; + + typedef struct _vp8_Info + { + // Frame Tag Header + FrameTagHeader frame_tag; + + // Key Frame data + uint32_t width; + uint32_t height; + uint32_t horiz_scale; + uint32_t vert_scale; + YUV_TYPE clr_type; + CLAMP_TYPE clamp_type; + + vp8_prob prob_intra; + vp8_prob prob_lf; + vp8_prob prob_gf; + + uint8_t y_prob_valid; + uint8_t c_prob_valid; + + uint32_t header_bits; + uint32_t frame_data_offset; + + uint8_t *source; + uint32_t source_sz; + + // Decoded picture number + uint32_t decoded_frame_number; + + BOOL_CODER bool_coder; + + // Refresh flags + uint8_t refresh_lf; + + uint8_t refresh_gf; + uint8_t refresh_af; + uint8_t sign_bias_golden; + uint8_t sign_bias_alternate; + + GoldenBufferCopiedType golden_copied; + AltRefBufferCopiedType altref_copied; + + // Degmentation data + SegmentationData Segmentation; + + // Loop filter data + LoopFilterData LoopFilter; + + // Partitions + uint8_t partition_count; + uint8_t partition_number; + uint32_t partition_size[1< +#include "vp8.h" +#include "vp8parse.h" + +/* Init function which can be called to intialized local context on open and flush and preserve*/ +void viddec_vp8_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +{ + vp8_viddec_parser* parser = ctxt; + vp8_Info *pi = &(parser->info); + + /* Avoid compiler warning */ + persist_mem = persist_mem; + + if (!preserve) + { + /* Init frame header information */ + vp8_init_Info(pi); + } + else + { + /* Initialise the parser */ + pi->decoded_frame_number = 0; + pi->refresh_entropy_lf = 1; + } + + parser->got_start = 1; + return; +} + +uint32_t viddec_vp8_parse(void *parent, void *ctxt) +{ + vp8_Status status = VP8_NO_ERROR; + + vp8_viddec_parser *parser = (vp8_viddec_parser*)ctxt; + if (1 != parser->got_start) return VP8_NO_INITIALIZATION; + + vp8_Info *pi = &(parser->info); + viddec_pm_cxt_t *pm_cxt = (viddec_pm_cxt_t *)parent; + pi->source = pm_cxt->parse_cubby.buf; + pi->source_sz = pm_cxt->parse_cubby.size; + + if (pi->source_sz < 0) + { + return VP8_UNEXPECTED_END_OF_BITSTREAM; + } + else if (pi->source_sz == 0) + { + pi->frame_tag.frame_type = SKIPPED_FRAME; + status = VP8_NO_ERROR; + } + else if (pi->source_sz > 0) + { + status = vp8_parse_frame_header(parser); + } + + return status; +} + +uint32_t viddec_vp8_wkld_done(void *parent, void *ctxt, unsigned int next_sc, + uint32_t *codec_specific_errors) +{ + return 0; +} + +void viddec_vp8_get_context_size(viddec_parser_memory_sizes_t *size) +{ + /* Should return size of my structure */ + size->context_size = sizeof(vp8_viddec_parser); + size->persist_size = 0; + return; +} + +uint32_t viddec_vp8_is_frame_start(void *ctxt) +{ + vp8_viddec_parser* parser = ctxt; + + return parser->got_start; +} + +void viddec_vp8_get_ops(viddec_parser_ops_t *ops) +{ + ops->init = viddec_vp8_init; + + ops->parse_syntax = viddec_vp8_parse; + ops->get_cxt_size = viddec_vp8_get_context_size; + ops->is_wkld_done = viddec_vp8_wkld_done; + ops->is_frame_start = viddec_vp8_is_frame_start; + return; +} diff --git a/mixvbp/vbp_plugin/vp8/vp8parse.c b/mixvbp/vbp_plugin/vp8/vp8parse.c new file mode 100755 index 0000000..4f15736 --- /dev/null +++ b/mixvbp/vbp_plugin/vp8/vp8parse.c @@ -0,0 +1,605 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2012 Intel Corporation. All rights reserved. +* Copyright (c) Imagination Technologies Limited, UK +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "vp8_tables.h" +#include "vp8parse.h" + +static const uint8_t kVp8SyncCodeByte[] = {0x9d, 0x01, 0x2a}; + +void vp8_init_Info(vp8_Info *pi) +{ + memset(pi, 0, sizeof(vp8_Info)); + + /* Initialise the parser */ + pi->decoded_frame_number = 0; + pi->refresh_entropy_lf = 1; +} + +int32_t vp8_parse_frame_tag(FrameTagHeader *frame_tag, uint8_t *data, uint32_t data_sz) +{ + if (data_sz < 3) + { + return VP8_CORRUPT_FRAME; + } + + /* 1-bit frame type */ + frame_tag->frame_type = (FRAME_TYPE)(data[0] & 1); + + /* 3-bit version number */ + frame_tag->version = (data[0] >> 1) & 7; + if (frame_tag->version > 3) + { + return VP8_UNSUPPORTED_VERSION ; + } + + /* 1-bit show frame flag */ + frame_tag->show_frame = (data[0] >> 4) & 1; + + /* 19-bit field containing the sie of the first data partition in bytes */ + frame_tag->first_part_size = (data[0] | (data[1] << 8) | (data[2] << 16)) >> 5; + + return VP8_NO_ERROR; +} + +void vp8_init_frame(vp8_Info *pi) +{ + pi->golden_copied = BufferCopied_NoneToGolden; + pi->altref_copied = BufferCopied_NoneToAltref; + + if (pi->frame_tag.frame_type == KEY_FRAME) + { + /* Various keyframe initializations */ + /* vp8_prob data initialization */ + memcpy(pi->FrameContext.B_Mode_Prob, VP8_BMode_Const, sizeof(VP8_BMode_Const)); + memcpy(pi->FrameContext.Y_Mode_Prob, VP8_YMode_Const, sizeof(VP8_YMode_Const)); + memcpy(pi->FrameContext.UV_Mode_Prob, VP8_UVMode_Const, sizeof(VP8_UVMode_Const)); + memcpy(pi->FrameContext.MVContext, VP8_MV_DefaultMVContext, sizeof(VP8_MV_DefaultMVContext)); + memcpy(pi->FrameContext.DCT_Coefficients, VP8_Coefficient_Default_Probabilites, sizeof(VP8_Coefficient_Default_Probabilites)); + + /* reset the segment feature data to 0 with delta coding (Default state)*/ + memset(pi->Segmentation.FeatureData, 0, sizeof(pi->Segmentation.FeatureData)); + pi->Segmentation.AbsDelta = SEGMENT_DELTADATA; + + /* reset the mode ref deltasa for loop filter */ + memset(pi->LoopFilter.DeltasRef, 0, sizeof(pi->LoopFilter.DeltasRef)); + memset(pi->LoopFilter.DeltasMode, 0, sizeof(pi->LoopFilter.DeltasMode)); + + /* All buffers are implicitly updated on key frames */ + pi->refresh_gf = 1; + pi->refresh_af = 1; + + pi->sign_bias_golden = 0; + pi->sign_bias_alternate = 0; + } + else if (pi->frame_tag.frame_type == INTER_FRAME) + { + pi->refresh_gf = 0; + pi->refresh_af = 0; + } +} + +/* This function provides vp8_prob and value infomation for implementing + * segment adaptive adjustments to default decoder behaviors. + * The data parsed here applies to the entire frame. The adjustments can be + * quantization level or loop filter strength. + * */ +void vp8_parse_segmentation_adjustments_data(vp8_Info *pi) +{ + int i,j; + BOOL_CODER *bc = &(pi->bool_coder); + + const int *const mb_feature_data_bits = VP8_MB_FeatureDataBits; + + /* Is segmentation enabled */ + pi->Segmentation.Enabled = (uint8_t)vp8_decode_bool(bc, 128); //chapter 9.2 - macroblock uses segments ? 1: 0 + + if(pi->Segmentation.Enabled ) + { + /* Signal whether or not the segmentation map is being explicitly updated this frame */ + pi->Segmentation.UpdateMap = (uint8_t)vp8_decode_bool(bc, 128); + pi->Segmentation.UpdateData = (uint8_t)vp8_decode_bool(bc, 128); + + if (pi->Segmentation.UpdateData) + { + pi->Segmentation.AbsDelta = (uint8_t)vp8_decode_bool(bc, 128); + + memset(pi->Segmentation.FeatureData, 0, sizeof(pi->Segmentation.FeatureData)); + + /* For each segmentation feature (Quant and loop filter level) */ + for (i = 0; i < MB_LVL_MAX; ++i) + { + for (j = 0; j < MAX_MB_SEGMENTS; ++j) + { + /* Frame level data */ + if (vp8_decode_bool(bc, 128)) + { + /* Parse magnitude */ + pi->Segmentation.FeatureData[i][j] = (int8_t) vp8_read_bits(bc, mb_feature_data_bits[i]) ; + + /* Parse sign data */ + if (vp8_decode_bool(bc, 128)) + { + pi->Segmentation.FeatureData[i][j] = -pi->Segmentation.FeatureData[i][j]; + } + } + else + { + pi->Segmentation.FeatureData[i][j] = 0; + } + } + } + + } + + if (pi->Segmentation.UpdateMap) + { + /* Which macro block level features are enabled */ + memset(pi->Segmentation.TreeProbs, 255, sizeof(pi->Segmentation.TreeProbs)); + + /* Read the probs used to decode the segment id for each macro block */ + for (i = 0; i < MB_FEATURE_TREE_PROBS; ++i) + { + /* If not explicitly set value is defaulted to 255 by memset above */ + if (vp8_decode_bool(bc, 128)) + { + pi->Segmentation.TreeProbs[i] = (uint8_t)vp8_read_bits(bc, 8); + } + } + } + } +} + +/* VP8 supprots two types of loop filter. The data parsed in the header + * to support the selection of the type, strength and sharpness behavior + * of the loop filter used for the current frame. + */ +void vp8_parse_loop_filter_type_level(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read the loop filter level and type */ + pi->LoopFilter.Type = (LoopFilterType)vp8_decode_bool(bc, 128); + pi->LoopFilter.Level = (uint8_t)vp8_read_bits(bc, 6); + pi->LoopFilter.Sharpness = (uint8_t)vp8_read_bits(bc, 3); +} + +/* This function provides flag and value information for implmenting + * per-macroblock loop filter level adjustments to default decoder + * behaviors. Data parsed here applies to the entire frame. + */ +void vp8_parse_loop_filter_adjustments_data(vp8_Info *pi) +{ + int i; + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read in loop filter deltas applied at the MB level based on mode or ref frame */ + pi->LoopFilter.DeltaUpdate = 0; + pi->LoopFilter.DeltaEnabled = (uint8_t)vp8_decode_bool(bc, 128); + + if (pi->LoopFilter.DeltaEnabled) + { + /* Do the deltas need to be updated */ + pi->LoopFilter.DeltaUpdate = (uint8_t)vp8_decode_bool(bc, 128); + + if (pi->LoopFilter.DeltaUpdate) + { + /* Update based on reference */ + for (i = 0; i < MAX_REF_LF_DELTAS; ++i) + { + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasRef[i] = (int8_t)vp8_read_bits(bc, 6); + + /* Parse sign */ + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasRef[i] = -1 * pi->LoopFilter.DeltasRef[i]; + } + } + } + + /* Update based on macroblock mode */ + for (i = 0; i < MAX_MODE_LF_DELTAS; ++i) + { + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasMode[i] = (int8_t)vp8_read_bits(bc, 6); + + /* Parse sign */ + if (vp8_decode_bool(bc, 128)) + { + pi->LoopFilter.DeltasMode[i] = -1 * pi->LoopFilter.DeltasMode[i]; + } + } + } /* End for (i = 0; i < MAX_MODE_LF_DELTAS; ++i) */ + } /* End if (pi->LoopFilter.DeltaUpdate) */ + } +} + +/* Token partition and partition data offsets */ +void vp8_parse_token_partition_data(vp8_Info *pi, uint8_t *cx_size) +{ + BOOL_CODER *bc = &(pi->bool_coder); + uint8_t *partition = NULL; + uint8_t *source_end = pi->source + pi->source_sz; + uint32_t partition_size = 0, i = 0; + uint8_t *partition_size_ptr = NULL; + + /* Parse number of token partitions to use */ + pi->partition_count = 1 << (uint8_t)vp8_read_bits(bc, 2); + + /* Set up pointers to the first partition */ + partition = cx_size; + if (pi->partition_count > 1) + { + /* Each partition offset is written in 3 bytes */ + partition += 3 * (pi->partition_count - 1); + } + + for (i = 0; i < pi->partition_count; i++) + { + partition_size_ptr = cx_size + i * 3; + + if (i < pi->partition_count - 1) + { + pi->partition_size[i] = vp8_read_partition_size(partition_size_ptr); + } + else + { + /* Last offset can be calculated implictly */ + pi->partition_size[i] = source_end - partition; + } + + partition += pi->partition_size[i]; + } +} + +int32_t vp8_read_partition_size(uint8_t *cx_size) +{ + uint32_t size = cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16); + + return size; +} + +int read_q_delta(BOOL_CODER *bool_coder) +{ + int q_delta = 0; + + /* presence flag */ + if (vp8_decode_bool(bool_coder, 128)) + { + /* magnitude */ + q_delta = (uint8_t)vp8_read_bits(bool_coder, 4) ; + + /* sign */ + if (vp8_decode_bool(bool_coder, 128)) + { + q_delta = -q_delta; + } + } + + return q_delta; +} + +/* Read the default quantizers */ +void vp8_parse_dequantization_indices(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* AC 1st order Q = default as a baseline for other 5 items */ + pi->Quantization.Y1_AC = (int8_t)vp8_read_bits(bc, 7); + pi->Quantization.Y1_DC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.Y2_DC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.Y2_AC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.UV_DC_Delta = (int8_t)read_q_delta(bc); + pi->Quantization.UV_AC_Delta = (int8_t)read_q_delta(bc); +} + + +/* Determine if the golden frame or ARF buffer should be updated and how. + * For all non key frames the GF and ARF refresh flags and sign bias + * flags must be set explicitly. + */ +void vp8_parse_gf_af_refresh_flags(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read Golden and AltRef frame refresh */ + pi->refresh_gf = (uint8_t)vp8_decode_bool(bc, 128); + pi->refresh_af = (uint8_t)vp8_decode_bool(bc, 128); + + /* If not refreshed using the current reconstructed frame */ + if (0 == pi->refresh_gf) + { + /* 2 bit indicating which buffer is copied to golden frame */ + pi->golden_copied = (GoldenBufferCopiedType)(int8_t)vp8_read_bits(bc, 2); + } + else + { + /* No buffer is copied */ + pi->golden_copied = (GoldenBufferCopiedType)0; + } + + if (0 == pi->refresh_af) + { + /* 2 bit indicating which buffer is copied to alternative frame */ + pi->altref_copied = (AltRefBufferCopiedType)vp8_read_bits(bc, 2); + } + else + { + pi->altref_copied = (AltRefBufferCopiedType)0; + } + + pi->sign_bias_golden = (uint8_t)vp8_decode_bool(bc, 128); + pi->sign_bias_alternate = (uint8_t)vp8_decode_bool(bc, 128); + +} + +void vp8_parse_coef_probs_tree(vp8_Info *pi) +{ + int i, j, k, l; + + BOOL_CODER *bc = &(pi->bool_coder); + + /* DCT coeffienct probability tree update */ + for (i = 0; i < BLOCK_TYPES; i++) + { + for (j = 0; j < COEF_BANDS; j++) + { + for (k = 0; k < PREV_COEF_CONTEXTS; k++) + { + for (l = 0; l < MAX_COEF_TOKENS - 1; l++) + { + if (vp8_decode_bool(bc, VP8_Coefficient_Update_Probabilites[i][j][k][l])) + { + pi->FrameContext.DCT_Coefficients[i][j][k][l] = (vp8_prob)vp8_read_bits(bc, 8); + } + } + } + } + } +} + +/* Parse remaining non-key-frame only data from frame header */ +void vp8_parse_mb_mv_info(vp8_Info *pi) +{ + // read_mvcontexts + int i = 0; + + BOOL_CODER *bc = &(pi->bool_coder); + + do + { + const vp8_prob *up = VP8_MV_UpdateProbs[i]; + vp8_prob *p = pi->FrameContext.MVContext[i]; + vp8_prob *const pstop = p + VP8_MV_Pcount; + + do + { + if (vp8_decode_bool(bc , *up++ )) + { + const vp8_prob x = (vp8_prob)vp8_read_bits(bc, 7); + + *p = x ? x << 1 : 1; + } + } + while (++p < pstop); + } + while (++i < 2); +} + +/* Parse remaining non-key-frame only data from frame header */ +void vp8_parse_yuv_probs_update(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* Read probabilities */ + pi->prob_intra = (vp8_prob)vp8_read_bits(bc, 8); + pi->prob_lf = (vp8_prob)vp8_read_bits(bc, 8); + pi->prob_gf = (vp8_prob)vp8_read_bits(bc, 8); + + pi->y_prob_valid = (uint8_t)vp8_decode_bool(bc , 128); + if (1 == pi->y_prob_valid) + { + pi->FrameContext.Y_Mode_Prob[0] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.Y_Mode_Prob[1] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.Y_Mode_Prob[2] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.Y_Mode_Prob[3] = (vp8_prob)vp8_read_bits(bc, 8); + } + + pi->c_prob_valid = (uint8_t)vp8_decode_bool(bc , 128); + if (1 == pi->c_prob_valid) + { + pi->FrameContext.UV_Mode_Prob[0] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.UV_Mode_Prob[1] = (vp8_prob)vp8_read_bits(bc, 8); + pi->FrameContext.UV_Mode_Prob[2] = (vp8_prob)vp8_read_bits(bc, 8); + } +} + + +void vp8_parse_remaining_frame_header_data(vp8_Info *pi) +{ + BOOL_CODER *bc = &(pi->bool_coder); + + /* MB no coefficients skip */ + pi->mb_no_coeff_skip = (uint8_t)vp8_decode_bool(bc, 128); + + if (1 == pi->mb_no_coeff_skip) + { + pi->prob_skip_false = (vp8_prob)vp8_read_bits(bc, 8); + } + else + { + pi->mb_skip_coeff = 0; + } + + if (pi->frame_tag.frame_type == INTER_FRAME) + { + vp8_parse_yuv_probs_update(pi); + + /* Read motion vector info */ + vp8_parse_mb_mv_info(pi); + } + +} + +#if 0 +vp8_Status vp8_translate_parse_status(vp8_Status status) +{ + switch (status) + { + case VP8_UNSUPPORTED_VERSION: + LOGE("Parser returned VP8_UNSUPPORTED_VERSION"); + return VP8_UNSUPPORTED_VERSION; + case VP8_UNSUPPORTED_BITSTREAM: + LOGE("Parser returned VP8_UNSUPPORTED_BITSTREAM"); + return VP8_UNSUPPORTED_BITSTREAM; + case VP8_INVALID_FRAME_SYNC_CODE: + LOGE("Parser returned VP8_INVALID_FRAME_SYNC_CODE"); + return VP8_INVALID_FRAME_SYNC_CODE; + case VP8_UNEXPECTED_END_OF_BITSTREAM: + LOGE("Parser returned VP8_UNEXPECTED_END_OF_BITSTREAM"); + return VP8_UNEXPECTED_END_OF_BITSTREAM; + default: + LOGE("Parser returned VP8_UNKNOWN_ERROR"); + return VP8_UNKNOWN_ERROR; + } +} +#endif + +/* Parse VP8 frame header */ +int32_t vp8_parse_frame_header(vp8_viddec_parser *parser) +{ + vp8_Status ret = VP8_NO_ERROR; + + vp8_Info *pi = &(parser->info); + + uint8_t *data = pi->source; + uint32_t data_sz = pi->source_sz; + + if (0 == pi->refresh_entropy_lf) + { + memcpy(&(pi->FrameContext), &(pi->LastFrameContext), sizeof(FrameContextData)); + } + + /* Step 1 : parse frame tag containing 3 bytes*/ + ret = vp8_parse_frame_tag(&(pi->frame_tag), data, data_sz); + if (ret != VP8_NO_ERROR) + { + return ret; + } + + /* Pointer advances 3 bytes */ + data += 3; + + /* Start the frame data offset */ + pi->frame_data_offset = 3; + + /* Step 2 : parse key frame parameters*/ + if (pi->frame_tag.frame_type == KEY_FRAME) + { + /* Check sync code containg 3 bytes*/ + if ((data[0] != kVp8SyncCodeByte[0]) || (data[1] != kVp8SyncCodeByte[1]) || (data[2] != kVp8SyncCodeByte[2])) + { + return VP8_INVALID_FRAME_SYNC_CODE; + } + + pi->width = (data[3] | (data[4] << 8)) & 0x3fff; + pi->horiz_scale = data[4] >> 6; + pi->height = (data[5] | (data[6] << 8)) & 0x3fff; + pi->vert_scale = data[6] >> 6; + + /* Pointer advances 7 bytes in this case*/ + data += 7; + pi->frame_data_offset += 7; + } + + if (0 == pi->width || 0 == pi->height) + { + return VP8_UNSUPPORTED_BITSTREAM; + } + + /* Initialize frame parameters*/ + vp8_init_frame(pi); + + /* Initialize bool coder */ + BOOL_CODER *bc = &(pi->bool_coder); + vp8_start_decode(bc, (uint8_t*)data); + + /* Parse key frame parameters */ + if (pi->frame_tag.frame_type == KEY_FRAME) + { + pi->clr_type = (YUV_TYPE)vp8_decode_bool(bc, 128); + pi->clamp_type = (CLAMP_TYPE)vp8_decode_bool(bc, 128); + } + + /* Step 3 : parse macroblock-level segmentation flag */ + vp8_parse_segmentation_adjustments_data(pi); + + /* Step 4 : parse loop filter type and levels */ + vp8_parse_loop_filter_type_level(pi); + + /* Step 5 : parse macroblock-level loop filter adjustments */ + vp8_parse_loop_filter_adjustments_data(pi); + + /* Step 6: parse token partition and partition data offsets */ + vp8_parse_token_partition_data(pi, data + pi->frame_tag.first_part_size); + + /* Step 7: parse dequantization indices */ + vp8_parse_dequantization_indices(pi); + + /* For key frames, both golden frame and altref frame are refreshed/replaced by the current reconstructed frame, by default */ + if (pi->frame_tag.frame_type == INTER_FRAME) + { + /* Step 8: parse golden frame and altref frame refresh flags */ + vp8_parse_gf_af_refresh_flags(pi); + } + + /* Step 9: update proability to decode DCT coef */ + pi->refresh_entropy = (uint8_t)vp8_decode_bool(bc, 128); + if (pi->refresh_entropy == 0) + { + memcpy(&(pi->LastFrameContext), &(pi->FrameContext), sizeof(FrameContextData)); + } + + /* Step 10: refresh last frame buffer */ + pi->refresh_lf = (pi->frame_tag.frame_type == KEY_FRAME) || (uint8_t)(vp8_decode_bool(bc, 128)); + + /* Step 11: read coef vp8_prob tree */ + vp8_parse_coef_probs_tree(pi); + + /* Step 12: read remaining frame header data */ + vp8_parse_remaining_frame_header_data(pi); + + /* Hold the current offset in the bitstream */ + pi->frame_data_offset += pi->bool_coder.pos; + + /* Get the frame header bits */ + pi->header_bits = pi->frame_data_offset * 8 - 16 - pi->bool_coder.count; + + pi->refresh_entropy_lf = pi->refresh_entropy; + + return ret; +} -- cgit v1.2.3 From 3677c030504e9d6418117d44d66ab8ef177b9f53 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Fri, 6 Sep 2013 09:55:19 +0800 Subject: libmix: reset mSignalBufferSize and mSignalBufferPre array in terminateVA BZ: 134837 reset mSignalBufferSize and mSignalBufferPre array in terminateVA to fix the error in cts mediaCodec test in the test case, the mediacodec call mediaCodec.stop then mediaCodec.start to restart a component without distructure the component Change-Id: Iea966dbb422c34a09996f347a2b55933e8ca5825 Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/130319 Reviewed-by: Zhou, ChuX Reviewed-by: Feng, Wei Reviewed-by: Liang, Dan Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderBase.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index c12ea5d..a050f7b 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -142,7 +142,6 @@ Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) { } mLowDelay = buffer->flag & WANT_LOW_DELAY; mRawOutput = buffer->flag & WANT_RAW_OUTPUT; - mSignalBufferSize = 0; if (mRawOutput) { WTRACE("Output is raw data."); } @@ -167,7 +166,6 @@ void VideoDecoderBase::stop(void) { mNumSurfaces = 0; mSurfaceAcquirePos = 0; mNextOutputPOC = MINIMUM_POC; - mVideoFormatInfo.valid = false; if (mParserHandle){ vbp_close(mParserHandle); @@ -949,6 +947,10 @@ Decode_Status VideoDecoderBase::terminateVA(void) { mVAStarted = false; mInitialized = false; + mSignalBufferSize = 0; + for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { + mSignalBufferPre[i] = NULL; + } return DECODE_SUCCESS; } -- cgit v1.2.3 From 02f4c087721c3c1ae0ad702fd01bfaa82f7c31cc Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Fri, 6 Sep 2013 17:12:14 +0800 Subject: Allocate UNCACHED memory for reference frames BZ: 136510 Change-Id: Ie80b0bdd94fa5fafbfe219d6802e77e7fc4d8bfc Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/130655 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderBase.cpp | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 2a84924..a98a56d 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -172,9 +172,6 @@ Encode_Status VideoEncoderBase::start() { LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n"); - VASurfaceID surfaces[2]; - VASurfaceAttrib attrib_list[2]; - VASurfaceAttribExternalBuffers external_refbuf; uint32_t stride_aligned, height_aligned; if(mAutoReference == false){ stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; @@ -199,21 +196,30 @@ Encode_Status VideoEncoderBase::start() { attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; #endif + ValueInfo vinfo; + vinfo.mode = MEM_MODE_SURFACE; + vinfo.width = stride_aligned; + vinfo.height = height_aligned; + vinfo.lumaStride = stride_aligned; + vinfo.size = stride_aligned * height_aligned * 1.5; + vinfo.format = VA_FOURCC_NV12; + if(mAutoReference == false){ - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, - stride_aligned, height_aligned, surfaces, 2, NULL, 0); - mRefSurface = surfaces[0]; - mRecSurface = surfaces[1]; + mRefSurface = CreateSurfaceFromExternalBuf(0, vinfo); + mRecSurface = CreateSurfaceFromExternalBuf(0, vinfo); + }else { mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum]; - if(mComParams.profile == VAProfileVP8Version0_3) - { + + if(mComParams.profile == VAProfileVP8Version0_3){ + VASurfaceAttrib attrib_list[2]; + VASurfaceAttribExternalBuffers external_refbuf; setupVP8RefExternalBuf(stride_aligned,height_aligned,&external_refbuf,&attrib_list[0]); - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420,stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, NULL, 0); + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420,stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, attrib_list, 2); + } else { + for(int i = 0; i < mAutoReferenceSurfaceNum; i ++) + mAutoRefSurfaces[i] = CreateSurfaceFromExternalBuf(0, vinfo); } - else - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, - stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, NULL, 0); } CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); -- cgit v1.2.3 From 797a4da477dd182b0c8da59a01ba0d02c19d76f3 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Sun, 22 Sep 2013 15:26:19 +0800 Subject: Update SW h263 encoder to support gfx buffer BZ: 102512 Update SW h263 encoder to support gfx buffer Change-Id: I34cdeedf40301c91ac2741710c868a5c88a2467d Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/133536 Reviewed-by: Shi, PingX Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- videoencoder/Android.mk | 2 ++ videoencoder/PVSoftMPEG4Encoder.cpp | 33 +++++++++++++++++++------- videovpp/VideoVPPBase.cpp | 1 + videovpp/test/main.cpp | 46 +++++++++++++++++++++++++++---------- 4 files changed, 62 insertions(+), 20 deletions(-) diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index e156ec2..6b8b3a5 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -50,6 +50,8 @@ LOCAL_SHARED_LIBRARIES := \ libva \ libva-android \ libva-tpi \ + libui \ + libutils \ libintelmetadatabuffer #LOCAL_CFLAGS += -DANDROID diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp index 7103ea2..11ef971 100644 --- a/videoencoder/PVSoftMPEG4Encoder.cpp +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -50,9 +50,14 @@ #include #include +#include +#include + #include "PVSoftMPEG4Encoder.h" #include "VideoEncoderLog.h" +#define ALIGN(x, align) (((x) + (align) - 1) & (~((align) - 1))) + inline static void ConvertYUV420SemiPlanarToYUV420Planar( uint8_t *inyuv, uint8_t* outyuv, int32_t width, int32_t height) { @@ -85,13 +90,13 @@ inline static void ConvertYUV420SemiPlanarToYUV420Planar( inline static void trimBuffer(uint8_t *dataIn, uint8_t *dataOut, int32_t width, int32_t height, - int32_t stride) { + int32_t alignedHeight, int32_t stride) { int32_t h; uint8_t *y_start, *uv_start, *_y_start, *_uv_start; y_start = dataOut; uv_start = dataOut + width * height; _y_start = dataIn; - _uv_start = dataIn + stride * height; + _uv_start = dataIn + stride * alignedHeight; for (h = 0; h < height; h++) memcpy(y_start + h * width, _y_start + h * stride, width); @@ -421,19 +426,31 @@ Encode_Status PVSoftMPEG4Encoder::encode(VideoEncRawBuffer *inBuffer, uint32_t t IntelMetadataBuffer imb; int32_t type; int32_t value; + uint8_t *img; + const android::Rect rect(mVideoWidth, mVideoHeight); + android::status_t res; ValueInfo vinfo; ValueInfo *pvinfo = &vinfo; CHECK(IMB_SUCCESS == imb.UnSerialize(inBuffer->data, inBuffer->size)); imb.GetType((::MetadataBufferType&)type); imb.GetValue(value); imb.GetValueInfo(pvinfo); - if (mNumInputFrames == 0) - LOG_I("%d %d %d\n", pvinfo->width, - pvinfo->height, pvinfo->lumaStride); + if(pvinfo == NULL) { + res = android::GraphicBufferMapper::get().lock((buffer_handle_t)value, + GRALLOC_USAGE_SW_READ_MASK, + rect, (void**)&img); + } else { + img = (uint8_t*)value; + } if (pvinfo != NULL) - trimBuffer((uint8_t*)value, mTrimedInputData, pvinfo->width, pvinfo->height, pvinfo->lumaStride); - else - LOG_E("failed to parse metadata info"); + trimBuffer(img, mTrimedInputData, pvinfo->width, pvinfo->height, + pvinfo->height, pvinfo->lumaStride); + else { + //NV12 Y-TILED + trimBuffer(img, mTrimedInputData, mVideoWidth, mVideoHeight, + ALIGN(mVideoHeight, 32), ALIGN(mVideoWidth, 128)); + android::GraphicBufferMapper::get().unlock((buffer_handle_t)value); + } } else { memcpy(mTrimedInputData, inBuffer->data, (mVideoWidth * mVideoHeight * 3 ) >> 1); diff --git a/videovpp/VideoVPPBase.cpp b/videovpp/VideoVPPBase.cpp index a57e0e6..8265723 100644 --- a/videovpp/VideoVPPBase.cpp +++ b/videovpp/VideoVPPBase.cpp @@ -336,6 +336,7 @@ VAStatus VideoVPPBase::_perform(VASurfaceID SrcSurf, VARectangle SrcRect, vpp_param.num_backward_references = 0; vpp_param.blend_state = NULL; vpp_param.rotation_state = VA_ROTATION_NONE; + vpp_param.mirror_state = VA_MIRROR_NONE; vret = vaCreateBuffer(va_display, va_context, VAProcPipelineParameterBufferType, diff --git a/videovpp/test/main.cpp b/videovpp/test/main.cpp index 0900797..ce91027 100644 --- a/videovpp/test/main.cpp +++ b/videovpp/test/main.cpp @@ -29,6 +29,8 @@ enum { HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12_TILED_INTEL, #endif + HAL_PIXEL_FORMAT_YCrCb_422_H_INTEL = 0x102, // YV16 + // deprecated use HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_INTEL = 0x7FA00E00, @@ -146,9 +148,10 @@ int main(int argc, char *argv[]) gralloc_module = (struct gralloc_module_t*)module; res = gralloc_open(module, &mAllocDev); res = mAllocDev->alloc(mAllocDev, width, height, - HAL_PIXEL_FORMAT_YCbCr_422_I, - GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | - GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, + //HAL_PIXEL_FORMAT_YCbCr_422_I, + HAL_PIXEL_FORMAT_YCrCb_422_H_INTEL, + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE,// | + //GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, &handle_YUY2, &stride_YUY2); if (res != 0) printf("%d: alloc()\n", __LINE__); @@ -162,25 +165,38 @@ int main(int argc, char *argv[]) if (res != 0) { printf("lock error\n"); } else { - res = fread(vaddr[0], 1, width * height * 2, fIn); + //res = fread(vaddr[0], 1, width * height * 2, fIn); + for (i = 0; i < height; i++) + res += fread(vaddr[0] + i * pGrallocHandle->pitch, 1, width, fIn); + for (i = 0; i < height; i++) + res += fread(vaddr[0] + (2 * height + i) * pGrallocHandle->pitch, 1, + width / 2, fIn); + for (i = 0; i < height; i++) + res += fread(vaddr[0] + (height + i) * pGrallocHandle->pitch, 1, + width / 2, fIn); printf("fread %d\n", res); gralloc_module->unlock(gralloc_module, handle_YUY2); } Src.width = pGrallocHandle->width; Src.height = pGrallocHandle->height; Src.stride = pGrallocHandle->pitch; + //Src.format = VA_RT_FORMAT_YUV422; + //Src.pixel_format = VA_FOURCC_YUY2; Src.format = VA_RT_FORMAT_YUV422; - Src.pixel_format = VA_FOURCC_YUY2; - Src.type = RenderTarget::KERNEL_DRM; - Src.handle = pGrallocHandle->name; + Src.pixel_format = VA_FOURCC_422H; + //Src.type = RenderTarget::KERNEL_DRM; + //Src.handle = pGrallocHandle->name; + Src.type = RenderTarget::ANDROID_GRALLOC; + Src.handle = (unsigned int)handle_YUY2; Src.rect.x = Src.rect.y = 0; Src.rect.width = Src.width; Src.rect.height = Src.height; } res = mAllocDev->alloc(mAllocDev, width, height, - HAL_PIXEL_FORMAT_NV12_TILED_INTEL, - GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE | - GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, + //HAL_PIXEL_FORMAT_NV12_TILED_INTEL, + HAL_PIXEL_FORMAT_YCbCr_422_I, + GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE,// | + //GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, &handle_NV12, &stride_NV12); if (res != 0) printf("%d: alloc()\n", __LINE__); @@ -191,8 +207,10 @@ int main(int argc, char *argv[]) Dst.width = pGrallocHandle->width; Dst.height = pGrallocHandle->height; Dst.stride = pGrallocHandle->pitch; - Dst.format = VA_RT_FORMAT_YUV420; - Dst.pixel_format = VA_FOURCC_NV12; + //Dst.format = VA_RT_FORMAT_YUV420; + //Dst.pixel_format = VA_FOURCC_NV12; + Dst.format = VA_RT_FORMAT_YUV422; + Dst.pixel_format = VA_FOURCC_YUY2; Dst.type = RenderTarget::KERNEL_DRM; Dst.handle = pGrallocHandle->name; Dst.rect.x = 0; @@ -239,6 +257,7 @@ int main(int argc, char *argv[]) unsigned char *pY = (unsigned char*)vaddr[0]; unsigned char *pUV = pY + stride_NV12 * ALIGN(height, 32); //unsigned char *pUV = pY + stride_NV12 * height; + /* for (res =0, i = 0; i < height; i++) { res += fwrite(pY, 1, width, fOut); pY += stride_NV12; @@ -248,6 +267,9 @@ int main(int argc, char *argv[]) res += fwrite(pUV, 1, width, fOut); pUV += stride_NV12; } + */ + for (i = 0; i < height; i++) + res += fwrite(vaddr[0] + i * Dst.stride, 1, width * 2, fOut); printf("fwrite %d\n", res); gralloc_module->unlock(gralloc_module, handle_NV12); } -- cgit v1.2.3 From 9770cd361f2015751fbffcdf81fdb86b9dc6636c Mon Sep 17 00:00:00 2001 From: ywan171 Date: Sun, 22 Sep 2013 11:23:11 +0800 Subject: libmix: set EOS flag in the last output frame BZ: 134842 set eos flag in the last output frame if draining flag is set Change-Id: I7ca213833cf94cb495b0f6b029d441d079df11da Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/133482 Reviewed-by: Shi, PingX Reviewed-by: Ding, Haitao Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderBase.cpp | 6 ++++++ videodecoder/VideoDecoderDefs.h | 3 +++ 2 files changed, 9 insertions(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index a050f7b..f2d72d6 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -234,6 +234,9 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); if (useGraphicBuffer) vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); + if (draining && mOutputTail == NULL) { + outputByPos->renderBuffer.flag |= IS_EOS; + } return &(outputByPos->renderBuffer); } @@ -284,6 +287,9 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { if (useGraphicBuffer) vaSyncSurface(mVADisplay, output->renderBuffer.surface); + if (draining && mOutputTail == NULL) { + output->renderBuffer.flag |= IS_EOS; + } return &(output->renderBuffer); } diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 4f44adf..8f94cde 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -102,6 +102,9 @@ typedef enum { // indicate whether video decoder buffer contains secure data IS_SECURE_DATA = 0x8000, + // indicate it's the last output frame of the sequence + IS_EOS = 0x10000, + } VIDEO_BUFFER_FLAG; struct VideoDecodeBuffer { -- cgit v1.2.3 From 8d934c5231df4cdf5da29ef75fd83c8f131fc353 Mon Sep 17 00:00:00 2001 From: Eric Seibel Date: Fri, 13 Sep 2013 17:55:37 +0200 Subject: moorefield platform to support BZ: 135371 to solve compilation issue on merrplus build target based on moorefield platform. Change-Id: I62b31f0c8b9f3632be7e4a9e63647449491322db Signed-off-by: Eric Seibel Reviewed-on: http://android.intel.com:8080/132056 Reviewed-by: Ding, Haitao Reviewed-by: Yuan, Shengquan Reviewed-by: cactus Tested-by: cactus --- videodecoder/securevideo/moorefield | 1 + 1 file changed, 1 insertion(+) create mode 120000 videodecoder/securevideo/moorefield diff --git a/videodecoder/securevideo/moorefield b/videodecoder/securevideo/moorefield new file mode 120000 index 0000000..278d417 --- /dev/null +++ b/videodecoder/securevideo/moorefield @@ -0,0 +1 @@ +merrifield/ \ No newline at end of file -- cgit v1.2.3 From a6536dcd9a4a1a55d142e19bfbd87201cbac4b1e Mon Sep 17 00:00:00 2001 From: liubolun Date: Tue, 24 Sep 2013 14:20:31 +0800 Subject: Refine VP8 encode middlerware. BZ: 135380 Remove ref frame alignment calculation due to driver will do that instead; Add setconfig/getconfig for VP8 ref frame selection. Change-Id: Idaa01959e5b47a8b0f9d01a7b4d2eab10cca3bf3 Signed-off-by: Liu Bolun Reviewed-on: http://android.intel.com:8080/134059 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderBase.cpp | 75 ++---------------- videoencoder/VideoEncoderBase.h | 5 -- videoencoder/VideoEncoderDef.h | 79 +++++++++++-------- videoencoder/VideoEncoderVP8.cpp | 159 ++++++++++++++++++++++++++------------ videoencoder/VideoEncoderVP8.h | 1 + 5 files changed, 160 insertions(+), 159 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index a98a56d..41c069d 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -177,25 +177,11 @@ Encode_Status VideoEncoderBase::start() { stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; }else{ - if(mComParams.profile == VAProfileVP8Version0_3) - { - stride_aligned = ((mComParams.resolution.width + 64 + 63) / 64 ) * 64; //for vsp stride - height_aligned = ((mComParams.resolution.height + 64 + 63) / 64 ) * 64; - } - else - { + // this alignment is used for AVC. For vp8 encode, driver will handle the alignment stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. height_aligned = ((mComParams.resolution.height + 31) / 32 ) * 32; - } } -#if 0 - if(mComParams.profile == VAProfileVP8Version0_3) - attribute_tpi.size = stride_aligned * height_aligned + stride_aligned * ((((mComParams.resolution.height + 1) / 2 + 32)+63)/64) *64;// FW need w*h + w*chrom_height - else - attribute_tpi.size = stride_aligned * height_aligned * 3 / 2; -#endif - ValueInfo vinfo; vinfo.mode = MEM_MODE_SURFACE; vinfo.width = stride_aligned; @@ -210,16 +196,8 @@ Encode_Status VideoEncoderBase::start() { }else { mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum]; - - if(mComParams.profile == VAProfileVP8Version0_3){ - VASurfaceAttrib attrib_list[2]; - VASurfaceAttribExternalBuffers external_refbuf; - setupVP8RefExternalBuf(stride_aligned,height_aligned,&external_refbuf,&attrib_list[0]); - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420,stride_aligned, height_aligned, mAutoRefSurfaces, mAutoReferenceSurfaceNum, attrib_list, 2); - } else { - for(int i = 0; i < mAutoReferenceSurfaceNum; i ++) + for(int i = 0; i < mAutoReferenceSurfaceNum; i ++) mAutoRefSurfaces[i] = CreateSurfaceFromExternalBuf(0, vinfo); - } } CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); @@ -1214,8 +1192,8 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeNALSize: case VideoConfigTypeIDRRequest: case VideoConfigTypeSliceNum: - case VideoConfigTypeVP8: { - + case VideoConfigTypeVP8: + case VideoConfigTypeVP8ReferenceFrame: { ret = derivedSetConfig(videoEncConfig); break; } @@ -1737,12 +1715,8 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { //TODO: need optimization for both width/height not aligned case VASurfaceID surfaceId; unsigned int stride_aligned; - if(mComParams.profile == VAProfileVP8Version0_3) - stride_aligned = ((mComParams.resolution.width + 31) / 32 ) * 32; - else - stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; - - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, + stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, stride_aligned, map->vinfo.height, &surfaceId, 1, NULL, 0); map->surface = surfaceId; @@ -2251,40 +2225,3 @@ VASurfaceID VideoEncoderBase::CreateSurfaceFromExternalBuf(int32_t value, ValueI return surface; } - -Encode_Status VideoEncoderBase::setupVP8RefExternalBuf(uint32_t stride_aligned, - uint32_t height_aligned, - VASurfaceAttribExternalBuffers *buf, - VASurfaceAttrib *attrib_list) -{ - int ref_height_uv = (mComParams.resolution.height/ 2 + 32 + 63) & (~63); - buf->pixel_format = VA_FOURCC_NV12; - buf->width = stride_aligned; - buf->height = height_aligned; - buf->data_size = stride_aligned * height_aligned + stride_aligned * ref_height_uv; - buf->num_buffers = mAutoReferenceSurfaceNum; - buf->num_planes = 3; - buf->pitches[0] = stride_aligned; - buf->pitches[1] = stride_aligned; - buf->pitches[2] = stride_aligned; - buf->pitches[3] = 0; - buf->offsets[0] = 0; - buf->offsets[1] = stride_aligned*height_aligned; - buf->offsets[2] = buf->offsets[1]; - buf->offsets[3] = 0; - buf->buffers = (unsigned long *)calloc(buf->num_buffers, sizeof(unsigned long)); - buf->flags = 0; - buf->private_data = NULL; - - attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; - attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE; - attrib_list[0].value.type = VAGenericValueTypeInteger; - attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA; - - attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; - attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE; - attrib_list[1].value.type = VAGenericValueTypePointer; - attrib_list[1].value.value.p = (void *)buf; - - return 0; -} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 82ac1f1..d43878b 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -100,11 +100,6 @@ private: Encode_Status querySupportedSurfaceMemTypes(); Encode_Status copySurfaces(VASurfaceID srcId, VASurfaceID destId); VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo); - Encode_Status setupVP8RefExternalBuf(uint32_t stride_aligned, - uint32_t height_aligned, - VASurfaceAttribExternalBuffers *buf, - VASurfaceAttrib *attrib_list); - protected: diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 426db02..d91ed23 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -303,6 +303,7 @@ enum VideoParamConfigType { VideoConfigTypeIDRRequest, VideoConfigTypeSliceNum, VideoConfigTypeVP8, + VideoConfigTypeVP8ReferenceFrame, VideoParamsConfigExtension }; @@ -586,44 +587,54 @@ struct VideoConfigSliceNum : VideoParamConfigSet { struct VideoParamsVP8 : VideoParamConfigSet { - uint32_t profile; - uint32_t error_resilient; - uint32_t num_token_partitions; - uint32_t kf_auto; - uint32_t kf_min_dist; - uint32_t kf_max_dist; - uint32_t min_qp; - uint32_t max_qp; - uint32_t init_qp; - uint32_t rc_undershoot; - uint32_t rc_overshoot; - uint32_t hrd_buf_size; - uint32_t hrd_buf_initial_fullness; - uint32_t hrd_buf_optimal_fullness; - - VideoParamsVP8() { - type = VideoParamsTypeVP8; - size = sizeof(VideoParamsVP8); - } + uint32_t profile; + uint32_t error_resilient; + uint32_t num_token_partitions; + uint32_t kf_auto; + uint32_t kf_min_dist; + uint32_t kf_max_dist; + uint32_t min_qp; + uint32_t max_qp; + uint32_t init_qp; + uint32_t rc_undershoot; + uint32_t rc_overshoot; + uint32_t hrd_buf_size; + uint32_t hrd_buf_initial_fullness; + uint32_t hrd_buf_optimal_fullness; + + VideoParamsVP8() { + type = VideoParamsTypeVP8; + size = sizeof(VideoParamsVP8); + } }; struct VideoConfigVP8 : VideoParamConfigSet { - uint32_t force_kf; - uint32_t no_ref_last; - uint32_t no_ref_gf; - uint32_t no_ref_arf; - uint32_t refresh_last; - uint32_t refresh_golden_frame; - uint32_t refresh_alternate_frame; - uint32_t refresh_entropy_probs; - uint32_t value; - unsigned char sharpness_level; - - VideoConfigVP8 () { - type = VideoConfigTypeVP8; - size = sizeof(VideoConfigVP8); - } + uint32_t force_kf; + uint32_t refresh_entropy_probs; + uint32_t value; + unsigned char sharpness_level; + + VideoConfigVP8 () { + type = VideoConfigTypeVP8; + size = sizeof(VideoConfigVP8); + } +}; + +struct VideoConfigVP8ReferenceFrame : VideoParamConfigSet { + + uint32_t no_ref_last; + uint32_t no_ref_gf; + uint32_t no_ref_arf; + uint32_t refresh_last; + uint32_t refresh_golden_frame; + uint32_t refresh_alternate_frame; + + VideoConfigVP8ReferenceFrame () { + type = VideoConfigTypeVP8ReferenceFrame; + size = sizeof(VideoConfigVP8ReferenceFrame); + } }; + #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 10edc2b..617813d 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -16,33 +16,34 @@ VideoEncoderVP8::VideoEncoderVP8() :VideoEncoderBase() { - mVideoParamsVP8.profile = 0; - mVideoParamsVP8.error_resilient = 0; - mVideoParamsVP8.num_token_partitions = 4; - mVideoParamsVP8.kf_auto = 1; - mVideoParamsVP8.kf_min_dist = 0; - mVideoParamsVP8.kf_max_dist = 30; - mVideoParamsVP8.min_qp = 4; - mVideoParamsVP8.max_qp = 63; - mVideoParamsVP8.init_qp = 26; - mVideoParamsVP8.rc_undershoot = 100; - mVideoParamsVP8.rc_overshoot = 100; - mVideoParamsVP8.hrd_buf_size = 6000; - mVideoParamsVP8.hrd_buf_initial_fullness = 4000; - mVideoParamsVP8.hrd_buf_optimal_fullness = 5000; - - mVideoConfigVP8.force_kf = 0; - mVideoConfigVP8.no_ref_last = 0; - mVideoConfigVP8.no_ref_gf = 1; - mVideoConfigVP8.no_ref_arf = 1; - mVideoConfigVP8.refresh_last = 1; - mVideoConfigVP8.refresh_golden_frame = 1; - mVideoConfigVP8.refresh_alternate_frame = 1; - mVideoConfigVP8.refresh_entropy_probs = 0; - mVideoConfigVP8.value = 0; - mVideoConfigVP8.sharpness_level = 2; - - mComParams.profile = VAProfileVP8Version0_3; + mVideoParamsVP8.profile = 0; + mVideoParamsVP8.error_resilient = 0; + mVideoParamsVP8.num_token_partitions = 4; + mVideoParamsVP8.kf_auto = 1; + mVideoParamsVP8.kf_min_dist = 0; + mVideoParamsVP8.kf_max_dist = 30; + mVideoParamsVP8.min_qp = 4; + mVideoParamsVP8.max_qp = 63; + mVideoParamsVP8.init_qp = 26; + mVideoParamsVP8.rc_undershoot = 100; + mVideoParamsVP8.rc_overshoot = 100; + mVideoParamsVP8.hrd_buf_size = 6000; + mVideoParamsVP8.hrd_buf_initial_fullness = 4000; + mVideoParamsVP8.hrd_buf_optimal_fullness = 5000; + + mVideoConfigVP8.force_kf = 0; + mVideoConfigVP8.refresh_entropy_probs = 0; + mVideoConfigVP8.value = 0; + mVideoConfigVP8.sharpness_level = 2; + + mVideoConfigVP8ReferenceFrame.no_ref_last = 0; + mVideoConfigVP8ReferenceFrame.no_ref_gf = 0; + mVideoConfigVP8ReferenceFrame.no_ref_arf = 0; + mVideoConfigVP8ReferenceFrame.refresh_last = 1; + mVideoConfigVP8ReferenceFrame.refresh_golden_frame = 1; + mVideoConfigVP8ReferenceFrame.refresh_alternate_frame = 1; + + mComParams.profile = VAProfileVP8Version0_3; } VideoEncoderVP8::~VideoEncoderVP8() { @@ -91,16 +92,16 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { vp8PicParam.pic_flags.value = 0; vp8PicParam.ref_flags.bits.force_kf = mVideoConfigVP8.force_kf; //0; if(!vp8PicParam.ref_flags.bits.force_kf) { - vp8PicParam.ref_flags.bits.no_ref_last = mVideoConfigVP8.no_ref_last; - vp8PicParam.ref_flags.bits.no_ref_arf = mVideoConfigVP8.no_ref_arf; - vp8PicParam.ref_flags.bits.no_ref_gf = mVideoConfigVP8.no_ref_gf; + vp8PicParam.ref_flags.bits.no_ref_last = mVideoConfigVP8ReferenceFrame.no_ref_last; + vp8PicParam.ref_flags.bits.no_ref_arf = mVideoConfigVP8ReferenceFrame.no_ref_arf; + vp8PicParam.ref_flags.bits.no_ref_gf = mVideoConfigVP8ReferenceFrame.no_ref_gf; } vp8PicParam.pic_flags.bits.refresh_entropy_probs = 0; vp8PicParam.sharpness_level = 2; vp8PicParam.pic_flags.bits.num_token_partitions = 2; - vp8PicParam.pic_flags.bits.refresh_last = mVideoConfigVP8.refresh_last; - vp8PicParam.pic_flags.bits.refresh_golden_frame = mVideoConfigVP8.refresh_golden_frame; - vp8PicParam.pic_flags.bits.refresh_alternate_frame = mVideoConfigVP8.refresh_alternate_frame; + vp8PicParam.pic_flags.bits.refresh_last = mVideoConfigVP8ReferenceFrame.refresh_last; + vp8PicParam.pic_flags.bits.refresh_golden_frame = mVideoConfigVP8ReferenceFrame.refresh_golden_frame; + vp8PicParam.pic_flags.bits.refresh_alternate_frame = mVideoConfigVP8ReferenceFrame.refresh_alternate_frame; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -246,35 +247,91 @@ Encode_Status VideoEncoderVP8::derivedGetParams(VideoParamConfigSet *videoEncPar VideoParamsVP8 *encParamsVP8 = reinterpret_cast (videoEncParams); if (encParamsVP8->size != sizeof(VideoParamsVP8)) { - return ENCODE_INVALID_PARAMS; - } + return ENCODE_INVALID_PARAMS; + } - *encParamsVP8 = mVideoParamsVP8; - return ENCODE_SUCCESS; + *encParamsVP8 = mVideoParamsVP8; + return ENCODE_SUCCESS; } Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncConfig) { - CHECK_NULL_RETURN_IFFAIL(videoEncConfig); - VideoConfigVP8 *encConfigVP8 = reinterpret_cast (videoEncConfig); + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); - if (encConfigVP8->size != sizeof(VideoConfigVP8)) { - return ENCODE_INVALID_PARAMS; - } + switch (videoEncConfig->type) + { + case VideoConfigTypeVP8:{ + VideoConfigVP8 *encConfigVP8 = + reinterpret_cast (videoEncConfig); - *encConfigVP8 = mVideoConfigVP8; - return ENCODE_SUCCESS; + if (encConfigVP8->size != sizeof(VideoConfigVP8)) { + return ENCODE_INVALID_PARAMS; + } + + *encConfigVP8 = mVideoConfigVP8; + } + break; + + case VideoConfigTypeVP8ReferenceFrame:{ + + VideoConfigVP8ReferenceFrame *encConfigVP8ReferenceFrame = + reinterpret_cast (videoEncConfig); + + if (encConfigVP8ReferenceFrame->size != sizeof(VideoConfigVP8ReferenceFrame)) { + return ENCODE_INVALID_PARAMS; + } + + *encConfigVP8ReferenceFrame = mVideoConfigVP8ReferenceFrame; + + } + break; + + default: { + LOG_E ("Invalid Config Type"); + break; + } + } + + return ENCODE_SUCCESS; } Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncConfig) { - CHECK_NULL_RETURN_IFFAIL(videoEncConfig); - VideoConfigVP8 *encConfigVP8 = reinterpret_cast (videoEncConfig); + CHECK_NULL_RETURN_IFFAIL(videoEncConfig); - if (encConfigVP8->size != sizeof(VideoConfigVP8)) { - return ENCODE_INVALID_PARAMS; - } + //LOGE ("%s begin",__func__); - mVideoConfigVP8 = *encConfigVP8; - return ENCODE_SUCCESS; + switch (videoEncConfig->type) + { + case VideoConfigTypeVP8:{ + VideoConfigVP8 *encConfigVP8 = + reinterpret_cast (videoEncConfig); + + if (encConfigVP8->size != sizeof(VideoConfigVP8)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoConfigVP8 = *encConfigVP8; + } + break; + + case VideoConfigTypeVP8ReferenceFrame:{ + VideoConfigVP8ReferenceFrame *encConfigVP8ReferenceFrame = + reinterpret_cast (videoEncConfig); + + if (encConfigVP8ReferenceFrame->size != sizeof(VideoConfigVP8ReferenceFrame)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoConfigVP8ReferenceFrame = *encConfigVP8ReferenceFrame; + + } + break; + + default: { + LOG_E ("Invalid Config Type"); + break; + } + } + return ENCODE_SUCCESS; } diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index efc3199..cacb7c1 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -42,6 +42,7 @@ private: VideoConfigVP8 mVideoConfigVP8; VideoParamsVP8 mVideoParamsVP8; + VideoConfigVP8ReferenceFrame mVideoConfigVP8ReferenceFrame; }; #endif /* __VIDEO_ENCODER_VP8_H__ */ -- cgit v1.2.3 From 2161f9541bc694120ec51e542ef9e0c1ecb48112 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Mon, 23 Sep 2013 17:05:56 +0800 Subject: libmix: A fix to enable range mapped feature for VC1 BZ: 129080 In VC1 advanced profile, if one syntax element Range Mapping Luma Flag is present in Entry Point Header and equals 1, scaling should be performed after all other decoding stage (including loop-filter) have been performed. If out-of-loop post-processing is done on the render target, then we need to keep the in-loop decoded picture as a reference picture. However, video decoder failed to keep in-loop decoded picture. Change-Id: I0f163982fc9e8c30071c651670c6154bbf76596e Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/134060 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderBase.cpp | 23 ++++++++++++++++---- videodecoder/VideoDecoderBase.h | 4 +++- videodecoder/VideoDecoderWMV.cpp | 46 ++++++++++++++++++++++++++++++++++----- videodecoder/VideoDecoderWMV.h | 7 ++++++ 4 files changed, 70 insertions(+), 10 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index f2d72d6..6509e01 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -702,7 +702,7 @@ exit: } -Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { +Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, int32_t numExtraSurface) { VAStatus vaStatus = VA_STATUS_SUCCESS; Decode_Status status; VAConfigAttrib attrib; @@ -789,7 +789,9 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { } mNumSurfaces = numSurface; - mSurfaces = new VASurfaceID [mNumSurfaces]; + mNumExtraSurfaces = numExtraSurface; + mSurfaces = new VASurfaceID [mNumSurfaces + mNumExtraSurfaces]; + mExtraSurfaces = mSurfaces + mNumSurfaces; if (mSurfaces == NULL) { return DECODE_MEMORY_FAIL; } @@ -846,6 +848,19 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { } CHECK_VA_STATUS("vaCreateSurfaces"); + if (mNumExtraSurfaces != 0) { + vaStatus = vaCreateSurfaces( + mVADisplay, + format, + mVideoFormatInfo.width, + mVideoFormatInfo.height, + mExtraSurfaces, + mNumExtraSurfaces, + NULL, + 0); + CHECK_VA_STATUS("vaCreateSurfaces"); + } + mVideoFormatInfo.surfaceNumber = mNumSurfaces; mVideoFormatInfo.ctxSurfaces = mSurfaces; @@ -857,7 +872,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) { mVideoFormatInfo.height, 0, mSurfaces, - mNumSurfaces, + mNumSurfaces + mNumExtraSurfaces, &mVAContext); CHECK_VA_STATUS("vaCreateContext"); } @@ -924,7 +939,7 @@ Decode_Status VideoDecoderBase::terminateVA(void) { if (mSurfaces) { - vaDestroySurfaces(mVADisplay, mSurfaces, mNumSurfaces); + vaDestroySurfaces(mVADisplay, mSurfaces, mNumSurfaces + mNumExtraSurfaces); delete [] mSurfaces; mSurfaces = NULL; } diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index cb88622..eab4058 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -79,7 +79,7 @@ protected: virtual VideoSurfaceBuffer* findOutputByPoc(bool draining = false); virtual VideoSurfaceBuffer* findOutputByPct(bool draining = false); virtual VideoSurfaceBuffer* findOutputByPts(bool draining = false); - virtual Decode_Status setupVA(int32_t numSurface, VAProfile profile); + virtual Decode_Status setupVA(int32_t numSurface, VAProfile profile, int32_t numExtraSurface = 0); virtual Decode_Status terminateVA(void); virtual Decode_Status parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData); @@ -107,6 +107,8 @@ protected: VADisplay mVADisplay; VAContextID mVAContext; VAConfigID mVAConfig; + VASurfaceID *mExtraSurfaces; // extra surfaces array + int32_t mNumExtraSurfaces; bool mVAStarted; uint64_t mCurrentPTS; // current presentation time stamp (unit is unknown, depend on the framework: GStreamer 100-nanosec, Android: microsecond) // the following three member variables should be set using diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index 7bb9ece..41b3cda 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -30,7 +30,11 @@ VideoDecoderWMV::VideoDecoderWMV(const char *mimeType) : VideoDecoderBase(mimeType, VBP_VC1), mBufferIDs(NULL), mNumBufferIDs(0), - mConfigDataParsed(false) { + mConfigDataParsed(false), + mRangeMapped(false), + mDeblockedCurrPicIndex(0), + mDeblockedLastPicIndex(1), + mDeblockedForwardPicIndex(2) { } @@ -64,12 +68,22 @@ void VideoDecoderWMV::stop(void) { } mNumBufferIDs = 0; mConfigDataParsed = false; + mRangeMapped = false; + + mDeblockedCurrPicIndex = 0; + mDeblockedLastPicIndex = 1; + mDeblockedForwardPicIndex = 2; VideoDecoderBase::stop(); } void VideoDecoderWMV::flush(void) { VideoDecoderBase::flush(); + + mRangeMapped = false; + mDeblockedCurrPicIndex = 0; + mDeblockedLastPicIndex = 1; + mDeblockedForwardPicIndex = 2; } Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) { @@ -152,6 +166,9 @@ Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v } else { mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; } + + mRangeMapped = (data->se_data->RANGE_MAPY_FLAG || data->se_data->RANGE_MAPUV_FLAG || data->se_data->RANGERED); + int frameType = data->pic_data[0].pic_parms->picture_fields.bits.picture_type; mAcquiredBuffer->referenceFrame = (frameType == VC1_PTYPE_I || frameType == VC1_PTYPE_P); @@ -166,6 +183,10 @@ Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v } } + if (mRangeMapped) { + updateDeblockedPicIndexes(frameType); + } + // let outputSurfaceBuffer handle "asReference" for VC1 status = outputSurfaceBuffer(); return status; @@ -189,9 +210,9 @@ Decode_Status VideoDecoderWMV::decodePicture(vbp_data_vc1 *data, int32_t picInde status = setReference(picParams, picIndex, mAcquiredBuffer->renderBuffer.surface); CHECK_STATUS("setReference"); - if (data->se_data->LOOPFILTER) { - //Loop filter handling - picParams->inloop_decoded_picture = mAcquiredBuffer->renderBuffer.surface; + if (mRangeMapped) { + // keep the destination surface for the picture after decoding and in-loop filtering + picParams->inloop_decoded_picture = mExtraSurfaces[mDeblockedCurrPicIndex]; } else { picParams->inloop_decoded_picture = VA_INVALID_SURFACE; } @@ -334,6 +355,21 @@ Decode_Status VideoDecoderWMV::setReference( return DECODE_SUCCESS; } +void VideoDecoderWMV::updateDeblockedPicIndexes(int frameType) { + int32_t curPicIndex = mDeblockedCurrPicIndex; + + /* Out Loop (range map) buffers */ + if (frameType != VC1_PTYPE_SKIPPED) { + if ((frameType == VC1_PTYPE_I) || (frameType == VC1_PTYPE_P)) { + mDeblockedCurrPicIndex = mDeblockedLastPicIndex; + mDeblockedLastPicIndex = curPicIndex; + } else { + mDeblockedCurrPicIndex = mDeblockedForwardPicIndex; + mDeblockedForwardPicIndex = curPicIndex; + } + } +} + Decode_Status VideoDecoderWMV::updateConfigData( uint8_t *configData, int32_t configDataLen, @@ -393,7 +429,7 @@ Decode_Status VideoDecoderWMV::startVA(vbp_data_vc1 *data) { break; } - return VideoDecoderBase::setupVA(VC1_SURFACE_NUMBER, vaProfile); + return VideoDecoderBase::setupVA(VC1_SURFACE_NUMBER, vaProfile, VC1_EXTRA_SURFACE_NUMBER); } void VideoDecoderWMV::updateFormatInfo(vbp_data_vc1 *data) { diff --git a/videodecoder/VideoDecoderWMV.h b/videodecoder/VideoDecoderWMV.h index e1b0f99..6a4a6bb 100644 --- a/videodecoder/VideoDecoderWMV.h +++ b/videodecoder/VideoDecoderWMV.h @@ -42,6 +42,7 @@ private: Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_vc1 *data); Decode_Status decodePicture(vbp_data_vc1 *data, int32_t picIndex); Decode_Status setReference(VAPictureParameterBufferVC1 *params, int32_t picIndex, VASurfaceID current); + void updateDeblockedPicIndexes(int frameType); Decode_Status updateConfigData(uint8_t *configData, int32_t configDataLen, uint8_t **newConfigData, int32_t *newConfigDataLen); Decode_Status startVA(vbp_data_vc1 *data); void updateFormatInfo(vbp_data_vc1 *data); @@ -51,11 +52,17 @@ private: private: enum { VC1_SURFACE_NUMBER = 10, + VC1_EXTRA_SURFACE_NUMBER = 3, }; VABufferID *mBufferIDs; int32_t mNumBufferIDs; bool mConfigDataParsed; + bool mRangeMapped; + + int32_t mDeblockedCurrPicIndex; + int32_t mDeblockedLastPicIndex; + int32_t mDeblockedForwardPicIndex; }; -- cgit v1.2.3 From 5f67e25fa41194697c4a2701f00a94d434320a75 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 18 Sep 2013 12:12:12 +0800 Subject: libmix: fix a VC1 parser issue BZ: 129080 Two issues have been fixed in VC1 parser. 1. User data should be identified. 2. slice_vertical_position should be parsed from bitstream instead of being assigned slice number. Change-Id: Idaa5d40aa473b9f9771b0eb308c13d0553ddf2f9 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/133550 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c index 13a5a89..b42c812 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c @@ -340,7 +340,7 @@ static uint32 vbp_parse_start_code_helper_vc1( cxt->list.data[cxt->list.num_items].stpos - PREFIX_SIZE; - if (start_code >= 0x0A && start_code <= 0x0F) + if ((start_code >= 0x0A && start_code <= 0x0F) || (start_code >= 0x1B && start_code <= 0x1F)) { /* only put known start code to the list * 0x0A: end of sequence @@ -349,6 +349,7 @@ static uint32 vbp_parse_start_code_helper_vc1( * 0x0D: field header * 0x0E: entry point header * 0x0F: sequence header + * 0x1B ~ 0x1F: user data */ cxt->list.num_items++; } @@ -1030,6 +1031,9 @@ static void vbp_pack_slice_data_vc1( uint8 is_emul; viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms); @@ -1047,8 +1051,8 @@ static void vbp_pack_slice_data_vc1( slc_parms->macroblock_offset = bit + byte * 8; - /* fix this. we need o get the slice_vertical_position from the code */ - slc_parms->slice_vertical_position = pic_data->num_slices; + /* get the slice_vertical_position from the code */ + slc_parms->slice_vertical_position = (picLayerHeader->SLICE_ADDR % (pic_data->pic_parms->coded_height / 16)); pic_data->num_slices++; } -- cgit v1.2.3 From 2932e5fc812228987ecc50a8a771a328f4c502a6 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 16 Sep 2013 13:44:31 +0800 Subject: delete the dummy workload related code in mix parser for all the codec. BZ: 131068 Change-Id: I4fb72f352e163426700ba14d1fa72eb85b19c7fe Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/133559 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/include/viddec_debug.h | 31 - mixvbp/include/viddec_fw_decoder_host.h | 242 ---- mixvbp/include/viddec_fw_frame_attr.h | 2 - mixvbp/include/viddec_fw_item_types.h | 784 ------------- mixvbp/include/viddec_fw_parser_host.h | 237 ---- mixvbp/include/viddec_fw_workload.h | 152 --- mixvbp/vbp_manager/include/viddec_parser_ops.h | 19 - mixvbp/vbp_manager/include/viddec_pm.h | 58 +- mixvbp/vbp_manager/vbp_utils.c | 27 - mixvbp/vbp_manager/vbp_utils.h | 3 - mixvbp/vbp_manager/vbp_vp8_parser.c | 1 + mixvbp/vbp_manager/viddec_pm_parser_ops.c | 10 +- mixvbp/vbp_plugin/h264/h264parse_dpb.c | 1 - mixvbp/vbp_plugin/h264/h264parse_sei.c | 124 -- mixvbp/vbp_plugin/h264/include/h264.h | 14 +- mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c | 298 ----- mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 1 - mixvbp/vbp_plugin/h264/viddec_h264_workload.c | 1195 -------------------- mixvbp/vbp_plugin/mp4/Android.mk | 1 - mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h | 16 +- mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c | 377 ------ mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c | 1 - mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c | 17 - .../vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c | 37 - .../vbp_plugin/mp4/viddec_mp4_videoobjectplane.c | 19 - mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c | 96 -- mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h | 2 - mixvbp/vbp_plugin/vc1/include/vc1common.h | 4 +- mixvbp/vbp_plugin/vc1/vc1.h | 6 +- mixvbp/vbp_plugin/vc1/vc1parse.c | 182 --- mixvbp/vbp_plugin/vc1/vc1parse.h | 5 - mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c | 19 +- mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c | 1 - mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c | 960 ---------------- mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c | 1 - 35 files changed, 13 insertions(+), 4930 deletions(-) delete mode 100755 mixvbp/include/viddec_debug.h delete mode 100644 mixvbp/include/viddec_fw_decoder_host.h delete mode 100644 mixvbp/include/viddec_fw_item_types.h delete mode 100644 mixvbp/include/viddec_fw_parser_host.h delete mode 100644 mixvbp/include/viddec_fw_workload.h delete mode 100755 mixvbp/vbp_plugin/h264/viddec_h264_workload.c delete mode 100755 mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c delete mode 100755 mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c diff --git a/mixvbp/include/viddec_debug.h b/mixvbp/include/viddec_debug.h deleted file mode 100755 index fcae102..0000000 --- a/mixvbp/include/viddec_debug.h +++ /dev/null @@ -1,31 +0,0 @@ -#ifndef VIDDEC_DEBUG_H -#define VIDDEC_DEBUG_H - -#ifndef VBP - -#ifdef HOST_ONLY -#include -#include -#define DEB OS_PRINT -#define FWTRACE OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ ); -// #define DEB(format, args...) -// #define FWTRACE -#define DEB_FNAME(format, args...) OS_PRINT("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) -#define CDEB(a, format, args...) if(a != 0) {DEB(format, ##args);} -#else -#define DEB(format, args...) -#define FWTRACE -#define CDEB(a, format, args...) -#define DEB_FNAME(format, args...) -#endif - -#else // VBP is defined - -#define DEB(format, args...) -#define FWTRACE -#define CDEB(a, format, args...) -#define DEB_FNAME(format, args...) - -#endif // end of VBP - -#endif diff --git a/mixvbp/include/viddec_fw_decoder_host.h b/mixvbp/include/viddec_fw_decoder_host.h deleted file mode 100644 index d902520..0000000 --- a/mixvbp/include/viddec_fw_decoder_host.h +++ /dev/null @@ -1,242 +0,0 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or - redistributing this file, you may do so under either license. - - GPL LICENSE SUMMARY - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify - it under the terms of version 2 of the GNU General Public License as - published by the Free Software Foundation. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution - in the file called LICENSE.GPL. - - Contact Information: - - BSD LICENSE - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -*/ - -#ifndef VIDDEC_FW_DECODER_HOST_H -#define VIDDEC_FW_DECODER_HOST_H - -#ifdef __cplusplus -extern "C" { -#endif - -#include "viddec_fw_common_defs.h" - - /** @weakgroup viddec Fw Decoder interface Functions */ - /** @ingroup viddec_fw_decoder */ - /*@{*/ - - /** - This function returns the size required for loading fw. - @retval size : Required size. - */ - uint32_t viddec_fw_decoder_query_fwsize(void); - - /** - This function loads Decoder Firmware and initialises necessary state information. - @param[in] phys : Physical address on where firmware should be loaded. - @param[in] len : Length of data allocated at phys. - @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. - @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len); - - /** - This function returns required size for global memory for all supported decoders. This is a synchronous message to FW. - @param[out] size : returns the size required. - @retval VIDDEC_FW_SUCCESS : Successfuly got required information from FW. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. - */ - uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size); - - /** - This function sets global memory for the firmware to use.This is a synchronous message to FW. - @param[in] phys : Physical address on where global memory starts. - @param[in] len : Length of data allocated at phys. - @retval VIDDEC_FW_SUCCESS : Successfully setup global memory. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. - */ - uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len); - - /** - This function returns the size required opening a stream. This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want information about. - @param[out] size : Size of memory required for opening a stream. - @retval VIDDEC_FW_SUCCESS : Successfuly talked to FW and got required size. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. - */ - uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size); - - /** - This function opens requested codec.This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want to open. - @param[in] phys : Physical address of allocated memory for this codec. - @param[in] prority : Priority of stream. 1 for realtime and 0 for background. - @param[out] strm_handle : Handle of the opened stream. - @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. - @retval VIDDEC_FW_FAILURE : Failed to Open a stream. - */ - uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); - - - /** - This function closes stream.This a synchronous message to FW. - @param[in] strm_handle : Handle of the stream to close. - */ - void viddec_fw_decoder_closestream(uint32_t strm_handle); - - /** - This function allows to get current status of the decoder workload queues. If the current stream is active we return - number of input messages that can be written to input queue and the number of messages in output queue of the stream. - - Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT - Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is - written into output queue of a stream. - @param[in] strm_handle : The handle of stream that we want to get status of queues. - @param[out] status : The status of each queue gets updated in here. - @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. - @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. - */ - uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status); - - /** - This function flushes the current stream. This is a synchronous message to FW. - Before calling this function the host has to make sure the output queue of the firmware - is empty. After this function is executed the FW will read all entries in input - wkld buffer queue into output queue. After this operation the host has to read all entries - in output queue again to finish the flush operation. - @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. - @param[in] strm_handle : Handle of the stream we want to flush. - @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. - @retval VIDDEC_FW_FAILURE : Failed to flush a stream. - */ - uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type); - - /** - This function sends an input workload buffer. The host should provide required frame buffers in this workload before - sending it to fw. - @param[in] strm_handle : The handle of stream that we want to send workload buffer to. - @param[in] cur_wkld : The workload buffer we want to send. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. - */ - uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld); - - /** - This function gets the decoded workload from fw. - @param[in] strm_handle : The handle of stream that we want to read workload from. - @param[out] cur_wkld : The workload descriptor. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. - */ - uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld); - - /** - This function unloads Decoder Firmware and free's the resources allocated in Load fw. - If this function is called before load fw it will crash with a segmentation fault. - */ - void viddec_fw_decoder_deinit(void); - - /** - This function gets the major and minor revison numbers of the loaded firmware. - @param[out] major : The major revision number. - @param[out] minor : The minor revision number. - @param[out] build : The Internal Build number. - */ - void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); - - /** - This function returns the interrupt status of all streams which need to be processed. A value of zero - means no active streams which generated this interrupt. - */ - uint32_t viddec_fw_decoder_active_pending_interrupts(void); - - /** - This function clears the interrupts for all active streams represented by status input parameter. - The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts(). - @param[in] status : The status value that was returned by viddec_fw_decoder_active_pending_interrupts(). - */ - void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status); - - /** - This function enables/disables interrupt for the stream specified. - @param[in] strm_handle : The handle of stream that we want enable or disable interrupts for. - @param[in] enable : Boolean value if ==0 means disable Interrupts else enable. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed. - */ - uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable); - - /** - This function returns which stream interrupted in the past based on status, which is a snapshot of - interrupt status that was cleared in the past. The host has to call clear with status information - before calling this function again with status value. The Host should do this operation until this function - returns 0, which means all the streams that generated interrupt have been processed. - @param[out]strm_handle : The handle of a stream that generated interrupt. - @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). - @retval 1 : A valid stream handle was found. - @retval 0 : No more streams from the status which caused interrupt. - */ - uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle); - - /** - This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(), - This should be called after host performs all necessary actions for the stream. - @param[in] strm_handle : The handle of a stream that we want to clear to indicate we handled it. - @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts(). - @retval 1 : Operation was sucessful. - @retval 0 : Invalid stream handle was passed. - */ - uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle); - - /*@}*/ -#ifdef __cplusplus -} -#endif - -#endif//#ifndef VIDDEC_FW_DECODER_HOST_H diff --git a/mixvbp/include/viddec_fw_frame_attr.h b/mixvbp/include/viddec_fw_frame_attr.h index 4f4b479..3cea943 100644 --- a/mixvbp/include/viddec_fw_frame_attr.h +++ b/mixvbp/include/viddec_fw_frame_attr.h @@ -58,8 +58,6 @@ #ifndef VIDDEC_FW_FRAME_ATTR_H #define VIDDEC_FW_FRAME_ATTR_H -#include "viddec_fw_item_types.h" - #define VIDDEC_PANSCAN_MAX_OFFSETS 4 #define VIDDEC_MAX_CPB_CNT 32 diff --git a/mixvbp/include/viddec_fw_item_types.h b/mixvbp/include/viddec_fw_item_types.h deleted file mode 100644 index 472dff2..0000000 --- a/mixvbp/include/viddec_fw_item_types.h +++ /dev/null @@ -1,784 +0,0 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or - redistributing this file, you may do so under either license. - - GPL LICENSE SUMMARY - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify - it under the terms of version 2 of the GNU General Public License as - published by the Free Software Foundation. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution - in the file called LICENSE.GPL. - - Contact Information: - - BSD LICENSE - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -*/ -#ifndef VIDDEC_FW_ITEM_TYPES_H -#define VIDDEC_FW_ITEM_TYPES_H - - -/* The following macros are defined to pack data into 32 bit words. - mask: A 32 bit value of N 1 bits starting from lsb where N represents the length of data we are packing. - start: Bit start position of data we want. - ex: If we want to pack Height(16bits), width(16bits) where width is from (1:16) and height is from (17:32), these are - the start and mask values for width and height. - width: start = 0 mask=0xFFFF - Height:start= 16 mask=0xFFFF - - extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in - unsigned integer type. - insert: Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to - be a unsigned int of N bits starting with lsb. -*/ - -#define viddec_fw_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) ) -#define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start)))) - - -/* Workload items type. Each item here represents data that Parser detected ex:slice data which - is used either by host or decoder.*/ -typedef enum workload_item_type -{ - VIDDEC_WORKLOAD_INVALID =0x0,/* Unknown type */ - VIDDEC_WORKLOAD_PIXEL_ES =0x100,/* Slice data tag */ - VIDDEC_WORKLOAD_TAG =0x200,/* Frame association tag */ - VIDDEC_WORKLOAD_USERDATA =0x300,/* user data tag */ - - VIDDEC_WORKLOAD_IBUF_DONE =0x500,/* Es buffer completely used tag */ - VIDDEC_WORKLOAD_IBUF_CONTINUED =0x600,/* Es buffer partially used tag */ - VIDDEC_WORKLOAD_IBUF_DISCONTINUITY =0x700,/* Discontinuity tag on first workload after discontinuity */ - VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER =0x800, /* Reorder frames in DPB tag */ - VIDDEC_WORKLOAD_IBUF_EOS =0x900,/* EOS tag on last workload used for current stream */ - VIDDEC_WORKLOAD_SEQUENCE_INFO =0xa00,/* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */ - VIDDEC_WORKLOAD_DISPLAY_INFO =0xb00,/* MPEG2 Seq Disp Ext, H264 VUI */ - VIDDEC_WORKLOAD_GOP_INFO =0xc00,/* MPEG2 GOP, VC1 Entrypoint */ - VIDDEC_WORKLOAD_SEQ_USER_DATA =0xd00,/* MPEG2, VC1 Sequence Level User data */ - VIDDEC_WORKLOAD_GOP_USER_DATA =0xe00,/* MPEG2, VC1 Gop Level User data */ - VIDDEC_WORKLOAD_FRM_USER_DATA =0xf00,/* MPEG2 Picture User data, VC1 Frame User data */ - - VIDDEC_WORKLOAD_FLD_USER_DATA =0x1000,/* MPEG2, VC1 Field User data */ - VIDDEC_WORKLOAD_SLC_USER_DATA =0x1100,/* VC1 Slice User data */ - VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA =0x1200,/* MPEG4 Visual Object User data */ - VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C =0x1200,/* VC1 Only */ - VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA =0x1300,/* MPEG4 Video Object Layer User data */ - VIDDEC_WORKLOAD_H264_CROPPING =0x1400,/* H264 only */ - VIDDEC_WORKLOAD_H264_PAN_SCAN =0x1500,/* H264 only */ - VIDDEC_WORKLOAD_SEI_PIC_TIMING =0x1600,/* H264 only */ - VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT =0x1700,/* H264 only */ - VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED =0x1800,/* H264 only */ - VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED =0x1900,/* H264 only */ - VIDDEC_WORKLOAD_SEI_RECOVERY_POINT =0x1a00,/* H264 only */ - VIDDEC_WORKLOAD_MPEG2_SEQ_EXT =0x1b00,/* MPEG2 Only - Sequence Extension */ - VIDDEC_WORKLOAD_H264_MVC_SPS_VIEW_IDS =0x1c00,/* H264 only */ - VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ =0x1d00,/* MPEG4 Only - Visual Sequence */ - VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ =0x1e00,/* MPEG4 Only - Video Object Layer */ - VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ =0x1f00,/* MPEG4 Only - Group of Video Object Planes */ - - VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT =0x2000,/* MPEG4 Only - Video Plane with Short Header */ - VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO =0x2100,/* H264 only */ - - VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 =0x10000,/* required reference frames tag,last eight bits indicate index in dpb */ - VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 =0x20000,/* release frames tag, last eight bits indicate index in dpb*/ - VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 =0x30000,/* Display order in DPB tag, for H264 */ - VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 =0x40000,/* Release frames but not display, for H264 */ - VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 =0x50000,/* Release list while EOS, last eight bits indicate index in dpb */ - VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 =0x60000,/* Display list while EOS, last eight bits indicate index in dpb */ - VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 =0x70000,/* required for H264 as it needs whole DPB for each frame */ - VIDDEC_WORKLOAD_H264_REFR_LIST_0 =0x80000,/* ref list 0 for H264 */ - VIDDEC_WORKLOAD_H264_REFR_LIST_1 =0x90000,/* ref list 1 for H264 */ - VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY =0xa0000,/* eos items begin after this */ - - VIDDEC_WORKLOAD_DECODER_SPECIFIC =0x100000,/* pvt info for decoder tags */ - VIDDEC_WORKLOAD_MAX, -} workload_item_type; - -struct h264_witem_sps_mvc_id -{ - /* - 0-9: num_views_minus1 - 10-19: start index of views in current item. - 20-23: Number of valid items. - */ -#define viddec_fw_h264_sps_mvc_id_get_num_views_minus1(x) viddec_fw_bitfields_extract( (x)->num_views, 0, 0x3FF) -#define viddec_fw_h264_sps_mvc_id_set_num_views_minus1(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 0, 0x3FF) -#define viddec_fw_h264_sps_mvc_id_get_cur_start_index(x) viddec_fw_bitfields_extract( (x)->num_views, 10, 0x3FF) -#define viddec_fw_h264_sps_mvc_id_set_cur_start_index(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 10, 0x3FF) -#define viddec_fw_h264_sps_mvc_id_get_num_cur_valid_items(x) viddec_fw_bitfields_extract( (x)->num_views, 20, 0x7) -#define viddec_fw_h264_sps_mvc_id_set_num_cur_valid_items(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 20, 0x7) - unsigned int num_views; - - /* We pack six id's into two integers.Each packed_view(integer) contains three 10 bit ids at 0-9, 10-19, 20-29 - These values can be extracted/set using viddec_fw_h264_sps_mvc_id_get_data_frm_index() - and viddec_fw_h264_sps_mvc_id_set_data_frm_index() functions. - */ -#define viddec_fw_h264_sps_mvc_id_max_packed_ids 6 /* Max number of packed ids in a workload item */ - unsigned int packed_view[2]; -}; - -/* This function extracts a 10 bit view id of index( <6) that was packed into h264_witem_sps_mvc_id structure */ -static inline unsigned int viddec_fw_h264_sps_mvc_id_get_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index) -{ - unsigned int start=0, *word; - - start = ((index > 2) ?(index - 3) : index) *10; - word = &(data->packed_view[(index > 2) ? 1:0]); - return viddec_fw_bitfields_extract(*word, start, 0x3FF); -} - -/* This function packs a 10 bit view id(val) at index( <6) in h264_witem_sps_mvc_id structure */ -static inline void viddec_fw_h264_sps_mvc_id_set_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index, unsigned int val) -{ - unsigned int start=0, *word; - - start = ((index > 2) ?(index - 3) : index) *10; - word = &(data->packed_view[(index > 2) ? 1:0]); - viddec_fw_bitfields_insert(*word, val, start, 0x3FF); -} - -/* 16-byte workload */ -typedef struct viddec_workload_item -{ - enum workload_item_type vwi_type; - union - { - struct - { - unsigned int es_phys_addr; - unsigned int es_phys_len; - unsigned int es_flags; - } es; - struct - { - unsigned int tag_phys_addr; - unsigned int tag_phys_len; - unsigned int tag_value; - } tag; - struct - { - unsigned int data_offset; - unsigned int data_payload[2]; - } data; - struct - { - signed int reference_id; /* Assigned by parser */ - unsigned int luma_phys_addr; /* assigned by host, for DM */ - unsigned int chroma_phys_addr; /* assigned by host, for DM */ - } ref_frame; - struct /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */ - { - signed int ref_table_offset; /* Index of first "reordered" */ - /* index from Current[] for Next[offset+0], Ref[offset+1], Ref[offset+2], Ref[offset+3] */ - unsigned int ref_reorder_00010203; - /* index from Current[] for Next[offset+4], Ref[offset+5], Ref[offset+6], Ref[offset+7] */ - unsigned int ref_reorder_04050607; - } ref_reorder; - struct - { - /* we pack a maximum of 11 bytes of user data and 1 byte for size */ - /* TODO: we can pack 12 bytes and use bottom 8 bits of type to indicate size */ -#define viddec_fw_get_user_data_size(x) ((x)->user_data.size) -#define viddec_fw_get_user_data(x) (unsigned char *)&((x)->user_data.data_payload[0]) - unsigned char size; - unsigned char data_payload[11]; - /* - ITEM TYPES WHICH use this: - VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED, VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED, - VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA, - VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA, - */ - } user_data; - struct - { - // Sequence Header Item I (From LSB): - // - horizontal_size_value - 12 bits - // - vertical_size_value - 12 bits - // - aspect_ratio_information - 4 bits - // - frame_rate_code - 4 bits -#define viddec_fw_mp2_sh_get_horizontal_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 0, 0xFFF) -#define viddec_fw_mp2_sh_get_vertical_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF) -#define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF) -#define viddec_fw_mp2_sh_get_frame_rate_code(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF) -#define viddec_fw_mp2_sh_set_horizontal_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 0, 0xFFF) -#define viddec_fw_mp2_sh_set_vertical_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF) -#define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF) -#define viddec_fw_mp2_sh_set_frame_rate_code(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF) - unsigned int seq_hdr_item_1; - - // Sequence Header Item II (From LSB): - // - bit_rate_value - 18 bits - // - vbv_buffer_size_value - 10 bits - // - remaining pad bits -#define viddec_fw_mp2_sh_get_bit_rate_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 0, 0x3FFFF) -#define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF) -#define viddec_fw_mp2_sh_set_bit_rate_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 0, 0x3FFFF) -#define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF) - unsigned int seq_hdr_item_2; - - unsigned int pad; - } mp2_sh; // mp2 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO - struct - { - // Sequence Extension Item I (From LSB): - // - profile_and_level_indication - 8 bits - // - progressive_sequence - 1 bit - // - chroma_format - 2 bits - // - horizontal_size_extension - 2 bits - // - vertical_size_extension - 2 bits - // - bit_rate_extension - 12 bits - // - remaining pad bits -#define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 0, 0xFF) -#define viddec_fw_mp2_se_get_progressive_sequence(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 8, 0x1) -#define viddec_fw_mp2_se_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 9, 0x3) -#define viddec_fw_mp2_se_get_horizontal_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3) -#define viddec_fw_mp2_se_get_vertical_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3) -#define viddec_fw_mp2_se_get_bit_rate_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF) -#define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 0, 0xFF) -#define viddec_fw_mp2_se_set_progressive_sequence(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 8, 0x1) -#define viddec_fw_mp2_se_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 9, 0x3) -#define viddec_fw_mp2_se_set_horizontal_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3) -#define viddec_fw_mp2_se_set_vertical_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3) -#define viddec_fw_mp2_se_set_bit_rate_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF) - unsigned int seq_ext_item_1; - - // Sequence Extension Item II (From LSB): - // - vbv_buffer_size_extension - 8 bits - // - frame_rate_extension_n - 2 bits - // - frame_rate_extension_d - 5 bits - // - remaining pad bits -#define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 0, 0xFF) -#define viddec_fw_mp2_se_get_frame_rate_extension_n(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 8, 0x3) -#define viddec_fw_mp2_se_get_frame_rate_extension_d(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F) -#define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 0, 0xFF) -#define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 8, 0x3) -#define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F) - unsigned int seq_ext_item_2; - - unsigned int pad; - } mp2_se; // mp2 item of type VIDDEC_WORKLOAD_MPEG2_SEQ_EXT - struct - { - // Sequence Display Extension Item I (From LSB): - // - display_horizontal_size - 14 bits - // - display_vertical_size - 14 bits - // - video_format - 3 bits - // - color_description - 1 bit -#define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 0, 0x3FFF) -#define viddec_fw_mp2_sde_get_display_vertical_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF) -#define viddec_fw_mp2_sde_get_video_format(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7) -#define viddec_fw_mp2_sde_get_color_description(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1) -#define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 0, 0x3FFF) -#define viddec_fw_mp2_sde_set_display_vertical_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF) -#define viddec_fw_mp2_sde_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7) -#define viddec_fw_mp2_sde_set_color_description(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1) - unsigned int seq_disp_ext_item_1; - - // Sequence Display Extension II (From LSB): - // - color_primaries - 8 bits - // - transfer_characteristics - 8 bits - // - remaining pad bits -#define viddec_fw_mp2_sde_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 0, 0xFF) -#define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 8, 0xFF) -#define viddec_fw_mp2_sde_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 0, 0xFF) -#define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 8, 0xFF) - unsigned int seq_disp_ext_item_2; - - unsigned int pad; - } mp2_sde; // mp2 item of type VIDDEC_WORKLOAD_DISPLAY_INFO - struct - { - // Group of Pictures Header Item I (From LSB): - // - closed_gop - 1 bit - // - broken_link - 1 bit - // - remaining pad bits -#define viddec_fw_mp2_gop_get_closed_gop(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 0, 0x1) -#define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 1, 0x1) -#define viddec_fw_mp2_gop_set_closed_gop(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 0, 0x1) -#define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 1, 0x1) - unsigned int gop_hdr_item_1; - - unsigned int pad1; - unsigned int pad2; - } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO - struct - { -#define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3) -#define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3) - -#define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7) -#define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7) - -#define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3) -#define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3) - -#define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) -#define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) - -#define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) -#define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) - -#define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) -#define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) - -#define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F) -#define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F) - -#define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7) -#define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7) - -#define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) -#define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) - -#define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) -#define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) - -#define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1) -#define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1) - -#define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1) -#define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1) - -#define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) -#define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) - - uint32_t size; // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12 - uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1 - uint32_t pad; - } vc1_sl; // vc1 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO - struct - { - // This item is populated when display_ext flag is set in the sequence layer - // therefore, no need to provide this flag -#define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF) -#define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF) - -#define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF) -#define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF) - -#define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1) -#define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1) - -#define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1) -#define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1) - -#define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1) -#define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1) - -#define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1) -#define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1) - -#define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF) -#define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF) - -#define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF) -#define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF) - -#define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF) -#define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF) - -#define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF) -#define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF) - -#define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF) -#define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF) - -#define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF) -#define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF) - -#define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF) -#define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF) - -#define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF) -#define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF) - - uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1 - uint32_t framerate; // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16 - uint32_t aspectsize; // aspect_ratio_horiz_size:8, aspect_ratio_vert_size:8, color_prim:8, transfer_char:8 - } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO - struct - { -#define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF) -#define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF) - -#define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF) -#define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF) - -#define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F) -#define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F) - -#define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7) -#define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7) - -#define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF) -#define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF) - -#define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7) -#define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7) - -#define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1) -#define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1) - -#define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1) -#define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1) - -#define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7) -#define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7) - -#define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1) -#define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1) - - uint32_t size; // horiz_size:16, vert_size:16 - uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1 - uint32_t pad; - } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C - struct - { -#define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1) -#define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1) - -#define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF) -#define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF) - -#define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF) -#define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF) - -#define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1) -#define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1) - -#define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1) -#define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1) - -#define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1) -#define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1) - -#define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1) -#define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1) - -#define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7) -#define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7) - -#define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1) -#define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1) - -#define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7) -#define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7) - - uint32_t size; // coded_size_flag:1, coded_width:12, coded_height:12 - uint32_t flags; // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3 - uint32_t pad; - } vc1_ep; // vc1 item of type VIDDEC_WORKLOAD_GOP_INFO - struct - { - /* - 0-7 bits for profile_idc. - 8-15 bits for level_idc. - 16-17 bits for chroma_format_idc. - 18-22 bits for num_ref_frames. - 23 for gaps_in_frame_num_value_allowed_flag. - 24 for frame_mbs_only_flag. - 25 for frame_cropping_flag. - 26 for vui_parameters_present_flag. - */ -#define viddec_fw_h264_sps_get_profile_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 0, 0xFF) -#define viddec_fw_h264_sps_set_profile_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 0, 0xFF) -#define viddec_fw_h264_sps_get_level_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 8, 0xFF) -#define viddec_fw_h264_sps_set_level_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 8, 0xFF) -#define viddec_fw_h264_sps_get_chroma_format_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 16, 0x3) -#define viddec_fw_h264_sps_set_chroma_format_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 16, 0x3) -#define viddec_fw_h264_sps_get_num_ref_frames(x) viddec_fw_bitfields_extract( (x)->sps_messages, 18, 0x1F) -#define viddec_fw_h264_sps_set_num_ref_frames(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 18, 0x1F) -#define viddec_fw_h264_sps_get_gaps_in_frame_num_value_allowed_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 23, 0x1) -#define viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 23, 0x1) -#define viddec_fw_h264_sps_get_frame_mbs_only_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 24, 0x1) -#define viddec_fw_h264_sps_set_frame_mbs_only_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 24, 0x1) -#define viddec_fw_h264_sps_get_frame_cropping_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 25, 0x1) -#define viddec_fw_h264_sps_set_frame_cropping_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 25, 0x1) -#define viddec_fw_h264_sps_get_vui_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 26, 0x1) -#define viddec_fw_h264_sps_set_vui_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 26, 0x1) - unsigned int sps_messages; - unsigned int pic_width_in_mbs_minus1; - unsigned int pic_height_in_map_units_minus1; - } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO - - struct h264_witem_sps_mvc_id h264_sps_mvc_id; - - struct - { -#define viddec_fw_h264_cropping_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF) -#define viddec_fw_h264_cropping_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF) -#define viddec_fw_h264_cropping_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF) -#define viddec_fw_h264_cropping_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF) - unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */ -#define viddec_fw_h264_cropping_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF) -#define viddec_fw_h264_cropping_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF) -#define viddec_fw_h264_cropping_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF) -#define viddec_fw_h264_cropping_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF) - unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */ - unsigned int pad; - } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING - - struct - { - /* 0 bit for aspect_ratio_info_present_flag - 1 st bit for video_signal_type_present_flag - 2 nd bit for colour_description_present_flag - 3 rd bit for timing_info_present_flag - 4 th bit for nal_hrd_parameters_present_flag - 5 th bit for vcl_hrd_parameters_present_flag - 6 th bit for fixed_frame_rate_flag - 7 th bit for pic_struct_present_flag - 8 th bit for low_delay_hrd_flag - 9,10,11 bits for video_format - */ -#define viddec_fw_h264_vui_get_aspect_ratio_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 0, 0x1) -#define viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 0, 0x1) -#define viddec_fw_h264_vui_get_video_signal_type_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 1, 0x1) -#define viddec_fw_h264_vui_set_video_signal_type_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 1, 0x1) -#define viddec_fw_h264_vui_get_colour_description_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 2, 0x1) -#define viddec_fw_h264_vui_set_colour_description_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 2, 0x1) -#define viddec_fw_h264_vui_get_timing_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 3, 0x1) -#define viddec_fw_h264_vui_set_timing_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 3, 0x1) -#define viddec_fw_h264_vui_get_nal_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 4, 0x1) -#define viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 4, 0x1) -#define viddec_fw_h264_vui_get_vcl_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 5, 0x1) -#define viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 5, 0x1) -#define viddec_fw_h264_vui_get_fixed_frame_rate_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 6, 0x1) -#define viddec_fw_h264_vui_set_fixed_frame_rate_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 6, 0x1) -#define viddec_fw_h264_vui_get_pic_struct_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 7, 0x1) -#define viddec_fw_h264_vui_set_pic_struct_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 7, 0x1) -#define viddec_fw_h264_vui_get_low_delay_hrd_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 8, 0x1) -#define viddec_fw_h264_vui_set_low_delay_hrd_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 8, 0x1) -#define viddec_fw_h264_vui_get_video_format(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 9, 0x7) -#define viddec_fw_h264_vui_set_video_format(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 9, 0x7) - unsigned int vui_flags_and_format; - -#define viddec_fw_h264_vui_get_aspect_ratio_idc(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 0, 0xFF) -#define viddec_fw_h264_vui_set_aspect_ratio_idc(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 0, 0xFF) -#define viddec_fw_h264_vui_get_colour_primaries(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 8, 0xFF) -#define viddec_fw_h264_vui_set_colour_primaries(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 8, 0xFF) -#define viddec_fw_h264_vui_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 16, 0xFF) -#define viddec_fw_h264_vui_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF) - /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */ - unsigned int aspc_color_transfer; - -#define viddec_fw_h264_vui_get_sar_width(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF) -#define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF) -#define viddec_fw_h264_vui_set_sar_width(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF) -#define viddec_fw_h264_vui_set_sar_height(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 0, 0xFFFF) - unsigned int sar_width_height; /* Lower 16 for height upper 16 for width */ - } h264_vui; // h264 item of type VIDDEC_WORKLOAD_DISPLAY_INFO - struct - { -#define viddec_fw_h264_vui_get_num_units_in_tick_flag(x) viddec_fw_bitfields_extract( (x)->num_units_in_tick, 0, 0xFFFFFFFF) -#define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val) viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF) -#define viddec_fw_h264_vui_get_time_scale_flag(x) viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF) -#define viddec_fw_h264_vui_set_time_scale_flag(x, val) viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF) - unsigned int num_units_in_tick; - unsigned int time_scale; - unsigned int pad1; - } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO - struct - { - unsigned int pic_struct; /* 4 bit length */ - unsigned int pad1; - unsigned int pad2; - } h264_sei_pic_timing; // h264 item of type VIDDEC_WORKLOAD_SEI_PIC_TIMING - struct - { - unsigned int pan_scan_rect_id; - -#define viddec_fw_h264_sei_pan_scan_get_cancel_flag(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 0, 0x1) -#define viddec_fw_h264_sei_pan_scan_get_cnt_minus1(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 1, 0x3) -#define viddec_fw_h264_sei_pan_scan_set_cancel_flag(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 0, 0x1) -#define viddec_fw_h264_sei_pan_scan_set_cnt_minus1(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 1, 0x3) - unsigned int pan_scan_cancel_and_cnt; /* 0 bit for cancel flag and 2 bits for cnt_minus1 */ - unsigned int pan_scan_rect_repetition_period; - } h264_sei_pan_scan; // h264 item of type VIDDEC_WORKLOAD_H264_PAN_SCAN - - struct - { - -#define viddec_fw_h264_pan_scan_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF) -#define viddec_fw_h264_pan_scan_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF) -#define viddec_fw_h264_pan_scan_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF) -#define viddec_fw_h264_pan_scan_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF) - unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */ - -#define viddec_fw_h264_pan_scan_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF) -#define viddec_fw_h264_pan_scan_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF) -#define viddec_fw_h264_pan_scan_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF) -#define viddec_fw_h264_pan_scan_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF) - unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */ - - unsigned int pad; - } h264_pan_scan_rect; // h264 item of type VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT - struct - { - unsigned int recovery_frame_cnt; -#define viddec_fw_h264_h264_sei_recovery_get_exact_match_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 0, 0x1) -#define viddec_fw_h264_h264_sei_recovery_get_broken_link_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 1, 0x1) -#define viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 0, 0x1) -#define viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 1, 0x1) - unsigned int broken_and_exctmatch_flags; /* 0 bit for exact match, 1 bit for brokenlink */ - - unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */ - - } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT - - - struct - { - // Visual Sequence (From LSB): - // - profile_and_level_indication - 8 bits -#define viddec_fw_mp4_vs_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->vs_item, 0, 0xFF) -#define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val, 0, 0xFF) - unsigned int vs_item; - - // Visual Object - video_signal_type - // - video_signal_type - 1b - // - video_format - 3b - // - video_range - 1b - // - colour_description - 1b -#define viddec_fw_mp4_vo_get_colour_description(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1) -#define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1) -#define viddec_fw_mp4_vo_get_video_range(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1) -#define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1) -#define viddec_fw_mp4_vo_get_video_format(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 1, 0x7) -#define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 1, 0x7) -#define viddec_fw_mp4_vo_get_video_signal_type(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 0, 0x1) -#define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 0, 0x1) - unsigned int video_signal_type; - - // Visual Object - video_signal_type - // - color_primaries - 8 bits - // - transfer_characteristics - 8 bits -#define viddec_fw_mp4_vo_get_transfer_char(x) viddec_fw_bitfields_extract( (x)->color_desc, 8, 0xFF) -#define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 8, 0xFF) -#define viddec_fw_mp4_vo_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->color_desc, 0, 0xFF) -#define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 0, 0xFF) - unsigned int color_desc; - } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ - - struct - { - // Video Object Layer(From LSB): - // - aspect_ratio_info - 4b - // - par_width - 8b - // - par_height - 8b - // - vol_control_param - 1b - // - chroma_format - 2b - // - interlaced - 1b - // - fixed_vop_rate - 1b -#define viddec_fw_mp4_vol_get_fixed_vop_rate(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1) -#define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1) -#define viddec_fw_mp4_vol_get_interlaced(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1) -#define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1) -#define viddec_fw_mp4_vol_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3) -#define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3) -#define viddec_fw_mp4_vol_get_control_param(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1) -#define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1) -#define viddec_fw_mp4_vol_get_par_height(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF) -#define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF) -#define viddec_fw_mp4_vol_get_par_width(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF) -#define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF) -#define viddec_fw_mp4_vol_get_aspect_ratio_info(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF) -#define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF) - unsigned int vol_aspect_ratio; - - // Video Object Layer(From LSB): - // - vbv_parameters - 1b - // - bit_rate - 30b -#define viddec_fw_mp4_vol_get_bit_rate(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF) -#define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF) -#define viddec_fw_mp4_vol_get_vbv_param(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1) -#define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1) - unsigned int vol_bit_rate; - - // Video Object Layer(From LSB): - // - fixed_vop_time_increment - 16b - // - vop_time_increment_resolution - 16b -#define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF) -#define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF) -#define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF) -#define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF) - unsigned int vol_frame_rate; - } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ - - struct - { - // Group of Video Object Planes(From LSB): - // - time_code - 18b - // - closed_gov - 1b - // - broken_link - 1b -#define viddec_fw_mp4_gvop_get_broken_link(x) viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1) -#define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1) -#define viddec_fw_mp4_gvop_get_closed_gov(x) viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1) -#define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1) -#define viddec_fw_mp4_gvop_get_time_code(x) viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF) -#define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF) - unsigned int gvop_info; - - unsigned int pad1; - unsigned int pad2; - } mp4_gvop; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ - - struct - { - // Group of Video Object Planes(From LSB): - // - source_format - 3b -#define viddec_fw_mp4_vpsh_get_source_format(x) viddec_fw_bitfields_extract((x)->info, 0, 0x7) -#define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7) - unsigned int info; - - unsigned int pad1; - unsigned int pad2; - } mp4_vpsh; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT - - unsigned int vwi_payload[3]; - }; -} viddec_workload_item_t; - - - -#endif /* VIDDEC_ITEM_TYPES_H */ diff --git a/mixvbp/include/viddec_fw_parser_host.h b/mixvbp/include/viddec_fw_parser_host.h deleted file mode 100644 index 550cf0a..0000000 --- a/mixvbp/include/viddec_fw_parser_host.h +++ /dev/null @@ -1,237 +0,0 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or - redistributing this file, you may do so under either license. - - GPL LICENSE SUMMARY - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify - it under the terms of version 2 of the GNU General Public License as - published by the Free Software Foundation. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution - in the file called LICENSE.GPL. - - Contact Information: - - BSD LICENSE - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -*/ - -#ifndef VIDDEC_FW_PARSER_HOST_H -#define VIDDEC_FW_PARSER_HOST_H - -#ifdef __cplusplus -extern "C" { -#endif -#include "viddec_fw_common_defs.h" - - /** @weakgroup viddec Fw Parser interface Functions */ - /** @ingroup viddec_fw_parser */ - /*@{*/ - - /** - This function returns the size required for loading fw. - @retval size : Required size. - */ - uint32_t viddec_fw_parser_query_fwsize(void); - - /** - This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW. - @param[in] phys : Physical address on where firmware should be loaded. - @param[in] len : Length of data allocated at phys. - @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware. - @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware. - @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len); - - /** - This function returns the size required opening a stream. This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want information about. - @param[out] num_wklds : Number of wklds required for initialisation. - @param[out] size : Size of memory required for opening a stream. - */ - void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size); - - /** - This function opens requested codec.This a synchronous message to FW. - @param[in] codec_type : Type of codec that we want to open. - @param[in] phys : Physical address of allocated memory for this codec. - @param[in] prority : Priority of stream. 1 for realtime and 0 for background. - @param[out] strm_handle : Handle of the opened stream. - @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream. - @retval VIDDEC_FW_FAILURE : Failed to Open a stream. - @retval VIDDEC_FW_NORESOURCES : Failed to Open a stream as we are out of resources. - */ - uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority); - - /** - This function closes stream.This a synchronous message to FW. - For the close stream to be effective, host has to do flush with discard first and then close the stream. - @param[in] strm_handle : Handle of the stream to close. - */ - void viddec_fw_parser_closestream(uint32_t strm_handle); - - /** - This function flushes the current stream. This is a synchronous message to FW. - Before calling this function the host has to make sure the output queue of the firmware - is empty. After this function is executed the FW will read all entries in input - es buffer queue into a free or partial workload and push it into output queue. - After this operation the host has to read all entries in output queue again to - finish the flush operation. - @param[in] flush_type : Type of flush we want to perform.ex:flush and discard. - @param[in] strm_handle : Handle of the stream we want to flush. - @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - @retval VIDDEC_FW_NEED_FREE_WKLD : Failed to flush sice a free wkld was not available. - */ - uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type); - - /** - This function sends an input es buffer. - @param[in] strm_handle : The handle of stream that we want to send es buffer to. - @param[in] message : The es buffer we want to send. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message); - - /** - This function gets the next processed workload. The host is required to add free workloads - to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue. - @param[in] strm_handle : The handle of stream that we want to read workload from. - @param[out] message : The workload descriptor. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message); - - /** - This function adds a free workload to current stream. - @param[in] strm_handle : The handle of stream that we want to write workload to. - @param[out] message : The workload descriptor. - @retval VIDDEC_FW_SUCCESS : Successfully Sent the message. - @retval VIDDEC_FW_PORT_FULL : Workload port is full,unsuccesful in writing wkld. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message); - - /** - This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts. - The driver can disable/enable Interrupts if it needs for this particular stream. - - @param[in] strm_handle : The handle of stream that we want to get mask from - @param[in] mask : This is read as boolean variable, true to enable, false to disable. - @retval VIDDEC_FW_SUCCESS : Successfully set mask. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask); - /** - This function gets the interrupt status for current stream. - When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams, - by calling this function. The status is what the FW thinks the current state of stream is. The status information that - FW provides is complete information on all possible events that are defined. The host should only access this information - in its ISR at which state FW doesn't modify this information. - - @param[in] strm_handle : The handle of stream that we want to get mask from - @param[out] status : The status of the stream based on viddec_fw_parser_int_status_t enum. - @retval VIDDEC_FW_SUCCESS : Successfully in reading status. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status); - - /** - This function allows to set stream attributes that are supported. - @param[in] strm_handle : The handle of stream that we want to set attribute on. - @param[in] type : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t. - @param[in] value : The value of the type that we want to set. - @retval VIDDEC_FW_SUCCESS : Successfully Set the attribute. - @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid. - */ - uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value); - - /** - This function allows to get current status of all the parser queues. If the current stream is active we return - number of inout messages that can be written to input queue, no of messages in output queue and number of - free available workloads the stream has. - Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT - Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or - a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT - FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee - one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT - to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will - give only one INT and host should try to empty output queue. - @param[in] strm_handle : The handle of stream that we want to get status of queues. - @param[out] status : The status of each queue gets updated in here. - @retval VIDDEC_FW_SUCCESS : Successfully Got the status information. - @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream. - */ - uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status); - - /** - This function unloads Parser Firmware and free's the resources allocated in Load fw. - If this function is called before load fw it will crash with a segmentation fault. - */ - void viddec_fw_parser_deinit(void); - - /** - This function gets the major and minor revison numbers of the loaded firmware. - @param[out] major : The major revision numner. - @param[out] minor : The minor revision number. - @param[out] build : The Internal Build number. - */ - void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build); - - /** - This function clears the global interrupt. This is the last thing host calls before exiting ISR. - */ - void viddec_fw_parser_clear_global_interrupt(void); - - /*@}*/ -#ifdef __cplusplus -} -#endif - -#endif//#ifndef VIDDEC_FW_PARSER_HOST_H diff --git a/mixvbp/include/viddec_fw_workload.h b/mixvbp/include/viddec_fw_workload.h deleted file mode 100644 index 3b86270..0000000 --- a/mixvbp/include/viddec_fw_workload.h +++ /dev/null @@ -1,152 +0,0 @@ -/* - This file is provided under a dual BSD/GPLv2 license. When using or - redistributing this file, you may do so under either license. - - GPL LICENSE SUMMARY - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - - This program is free software; you can redistribute it and/or modify - it under the terms of version 2 of the GNU General Public License as - published by the Free Software Foundation. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. - The full GNU General Public License is included in this distribution - in the file called LICENSE.GPL. - - Contact Information: - - BSD LICENSE - - Copyright(c) 2007-2009 Intel Corporation. All rights reserved. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - * Neither the name of Intel Corporation nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -*/ -#ifndef VIDDEC_FW_WORKLOAD_H -#define VIDDEC_FW_WORKLOAD_H - -#include -#include "viddec_fw_item_types.h" -#include "viddec_fw_frame_attr.h" -#include "viddec_fw_common_defs.h" - -#define VIDDEC_WORKLOAD_FLAGS_ES_START_FRAME (1 << 0) -#define VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE (1 << 1) -#define VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE (1 << 2) -#define VIDDEC_WORKLOAD_FLAGS_ES_END_FRAME (1 << 3) - -#define VIDDEC_FRAME_REFERENCE_IS_VALID (0x1<<1) -// PIP Output Frame request bits -#define BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE 24 -#define BMSK_VIDDEC_FRAME_REFERENCE_PIP_MODE (0x3< #define VIDDEC_PARSE_INVALID_POS 0xFFFFFFFF @@ -74,9 +73,6 @@ int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); */ int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits); -/* This function appends a work item to current/next workload. - */ -int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next); /* This function gets current byte and bit positions and information on whether an emulation byte is present after current byte. @@ -91,13 +87,6 @@ int32_t viddec_pm_append_pixeldata(void *parent); */ int32_t viddec_pm_append_pixeldata_next(void *parent); -/* This function provides the workload header for pasers to fill in attribute values - */ -viddec_workload_t* viddec_pm_get_header(void *parent); - -/* This function provides the next workload header for pasers to fill in attribute values - */ -viddec_workload_t* viddec_pm_get_next_header(void *parent); /* Returns the current byte value where offset is on */ uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte); @@ -105,17 +94,9 @@ uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte); /* Tells us if there is more data that need to parse */ int32_t viddec_pm_is_nomoredata(void *parent); -/* This function appends misc tag to work load starting from start position to end position of au unit */ -int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next); void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error); void viddec_pm_set_late_frame_detect(void *parent); -static inline void viddec_fw_reset_workload_item(viddec_workload_item_t *wi) -{ - wi->vwi_payload[0] = wi->vwi_payload[1] = wi->vwi_payload[2] = 0; -} - -void viddec_pm_setup_userdata(viddec_workload_item_t *wi); #endif diff --git a/mixvbp/vbp_manager/include/viddec_pm.h b/mixvbp/vbp_manager/include/viddec_pm.h index 45b884b..e5fb679 100755 --- a/mixvbp/vbp_manager/include/viddec_pm.h +++ b/mixvbp/vbp_manager/include/viddec_pm.h @@ -2,6 +2,7 @@ #define VIDDEC_PM_H #include +#include "viddec_pm_utils_list.h" #include "viddec_pm_utils_bstream.h" #include "viddec_pm_parse.h" #include "viddec_parser_ops.h" @@ -9,42 +10,8 @@ #define SC_DETECT_BUF_SIZE 1024 #define MAX_CODEC_CXT_SIZE 4096 -typedef enum -{ - PM_SUCCESS = 0, - /* Messages to indicate more ES data */ - PM_NO_DATA = 0x100, - /* Messages to indicate SC found */ - PM_SC_FOUND = 0x200, - PM_FIRST_SC_FOUND = 0x201, - /* Messages to indicate Frame done */ - PM_WKLD_DONE = 0x300, - /* Messages to indicate Error conditions */ - PM_OVERFLOW = 0x400, - /* Messages to indicate inband conditions */ - PM_INBAND_MESSAGES = 0x500, - PM_EOS = 0x501, - PM_DISCONTINUITY = 0x502, -} pm_parse_state_t; -/* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers - cur_es points to current es buffer we are parsing. */ -typedef struct -{ - int32_t list_index; /* current index of list */ - uint32_t cur_offset; - uint32_t cur_size; - viddec_input_buffer_t *cur_es; -} viddec_pm_sc_cur_buf_t; -typedef struct -{ - uint32_t pending_tags[MAX_IBUFS_PER_SC]; - uint8_t dummy; - uint8_t frame_done; - uint8_t first_buf_aligned; - uint8_t using_next; -} vidded_pm_pending_tags_t; /* This structure holds all necessary data required by parser manager for stream parsing. */ @@ -54,18 +21,13 @@ typedef struct uint8_t scbuf[SC_DETECT_BUF_SIZE + 8]; viddec_sc_parse_cubby_cxt_t parse_cubby; viddec_pm_utils_list_t list; - /* Place to store tags to be added to next to next workload */ - viddec_pm_sc_cur_buf_t cur_buf; - //viddec_emitter emitter; viddec_pm_utils_bstream_cxt_t getbits; + //viddec_emitter emitter; viddec_sc_prefix_state_t sc_prefix_info; - vidded_pm_pending_tags_t pending_tags; uint8_t word_align_dummy; uint8_t late_frame_detect; uint8_t frame_start_found; - uint8_t found_fm_st_in_current_au; uint32_t next_workload_error_eos; - uint32_t pending_inband_tags; #ifdef VBP uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3]; #else @@ -73,21 +35,5 @@ typedef struct #endif } viddec_pm_cxt_t; -/* - * - * Functions used by Parser kernel - * - */ - -/* This is for initialising parser manager context to default values */ -void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean); - -/* This is the main parse function which returns state information that parser kernel can understand.*/ -uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf); - -void viddec_pm_init_ops(); - -void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time); -uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size); #endif diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index 72548f0..78bbb0e 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -225,12 +225,6 @@ static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext) pcontext->func_free_query_data(pcontext); } - free(pcontext->workload2); - pcontext->workload2 = NULL; - - free(pcontext->workload1); - pcontext->workload1 = NULL; - free(pcontext->persist_mem); pcontext->persist_mem = NULL; @@ -297,26 +291,6 @@ static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext) } } - /* allocate a new workload with 1000 items. */ - pcontext->workload1 = malloc(sizeof(viddec_workload_t) + - (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); - if (NULL == pcontext->workload1) - { - ETRACE("Failed to allocate memory"); - error = VBP_MEM; - goto cleanup; - } - - /* allocate a second workload with 1000 items. */ - pcontext->workload2 = malloc(sizeof(viddec_workload_t) + - (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t))); - if (NULL == pcontext->workload2) - { - ETRACE("Failed to allocate memory"); - error = VBP_MEM; - goto cleanup; - } - /* allocate format-specific query data */ error = pcontext->func_allocate_query_data(pcontext); @@ -475,7 +449,6 @@ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) } viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0); - pcontext->parser_cxt->cur_buf.list_index = -1; pcontext->parser_cxt->parse_cubby.phase = 0; /* invoke the entry point to initialize the parser. */ diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h index 7761c26..455951c 100755 --- a/mixvbp/vbp_manager/vbp_utils.h +++ b/mixvbp/vbp_manager/vbp_utils.h @@ -85,9 +85,6 @@ struct vbp_context_t /* parser context */ viddec_pm_cxt_t *parser_cxt; - /* work load */ - viddec_workload_t *workload1, *workload2; - /* persistent memory for parser */ uint32 *persist_mem; diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c index 72dcfa9..203ab75 100755 --- a/mixvbp/vbp_manager/vbp_vp8_parser.c +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -28,6 +28,7 @@ #include "vbp_loader.h" #include "vbp_utils.h" #include "vbp_vp8_parser.h" +#include "viddec_fw_common_defs.h" uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext) { diff --git a/mixvbp/vbp_manager/viddec_pm_parser_ops.c b/mixvbp/vbp_manager/viddec_pm_parser_ops.c index 6879a6a..1ba8aa8 100755 --- a/mixvbp/vbp_manager/viddec_pm_parser_ops.c +++ b/mixvbp/vbp_manager/viddec_pm_parser_ops.c @@ -3,6 +3,7 @@ #include "viddec_pm.h" #include "viddec_parser_ops.h" #include "viddec_pm_utils_bstream.h" +#include "viddec_fw_common_defs.h" int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) { @@ -86,12 +87,3 @@ void viddec_pm_set_late_frame_detect(void *parent) cxt->late_frame_detect = true; } -int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next) -{ - return 1; -} - -void viddec_pm_setup_userdata(viddec_workload_item_t *wi) -{ - wi=wi; -} diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c index 13adb1b..5f660de 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_dpb.c +++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c @@ -7,7 +7,6 @@ #include "viddec_parser_ops.h" -#include "viddec_fw_workload.h" #include "viddec_pm.h" diff --git a/mixvbp/vbp_plugin/h264/h264parse_sei.c b/mixvbp/vbp_plugin/h264/h264parse_sei.c index f70e64c..12f793d 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sei.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sei.c @@ -8,9 +8,6 @@ #include "viddec_parser_ops.h" -#include "viddec_fw_item_types.h" -#include "viddec_fw_workload.h" - ////////////////////////////////////////////////////////////////////////////// // avc_sei_stream_initialise () // @@ -132,9 +129,6 @@ h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo) { int32_t i = 0, NumClockTS = 0; - viddec_workload_item_t wi; - - wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; viddec_pm_get_bits(parent, &code , 4); sei_msg_ptr->pic_struct = (uint8_t)code; @@ -145,13 +139,6 @@ h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo) pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED; } - wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING; - wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct; - -#ifndef VBP - //Push to current if we are in first frame, or we do not detect previous frame end - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif if (sei_msg_ptr->pic_struct < 3) { NumClockTS = 1; @@ -255,29 +242,20 @@ h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo) h264_SEI_pan_scan_rectangle_t sei_pan_scan; uint32_t code; - viddec_workload_item_t wi; - h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) ); - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN; - sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan); sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false); - wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id; - viddec_pm_get_bits(parent, &code , 1); sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code; - viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag); if (!sei_msg_ptr->pan_scan_rect_cancel_flag) { int32_t i; sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false); - viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1); if (sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1) { return H264_STATUS_SEI_ERROR; @@ -290,31 +268,6 @@ h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo) sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true); } sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false); - wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period; - } -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - - if (!sei_msg_ptr->pan_scan_rect_cancel_flag) - { - int32_t i; - - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT; - - for (i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++) - { - viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]); - viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]); - viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]); - viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); -#endif - } } return H264_STATUS_OK; @@ -353,11 +306,7 @@ h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payloa uint32_t i; int32_t byte = 0; uint32_t code = 0; - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED; - wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0; - //remove warning pInfo = pInfo; sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered); @@ -374,39 +323,12 @@ h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payloa } - wi.user_data.size =0; do { - viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); - if (wi.user_data.size < 11) - { - wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; - } - wi.user_data.size++; - - if (11 == wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - wi.user_data.size =0; - } - i++; } while (i < payload_size); - if (0!=wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); -#endif - } - return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ @@ -421,10 +343,6 @@ h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t pay int32_t byte = 0; uint32_t code; - viddec_workload_item_t wi; - - wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED; - //remove warning pInfo = pInfo; @@ -436,35 +354,9 @@ h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t pay sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code; } - wi.user_data.size =0; for (i = 16; i < payload_size; i++) { - viddec_pm_get_bits(parent, (uint32_t *)&byte, 8); - if (wi.user_data.size < 11) - { - wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte; - } - wi.user_data.size++; - - if (11 == wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - wi.user_data.size =0; - } - } - - if (0!=wi.user_data.size) - { - viddec_pm_setup_userdata(&wi); -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream); -#endif } return H264_STATUS_OK; @@ -478,8 +370,6 @@ h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) h264_SEI_recovery_point_t* sei_msg_ptr; h264_SEI_recovery_point_t sei_recovery_point; uint32_t code; - viddec_workload_item_t wi; - sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point); @@ -507,20 +397,6 @@ h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) pInfo->sei_rp_received = 1; } - // - /// Append workload for SEI - // - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT; - wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt; - viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag); - viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag); - wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc; -#ifndef VBP - //cur is first frame - viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done))); -#endif - return H264_STATUS_OK; } /* ------------------------------------------------------------------------------------------ */ diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h index eac5541..6171e76 100755 --- a/mixvbp/vbp_plugin/h264/include/h264.h +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -21,9 +21,8 @@ #endif #include "stdint.h" -#include "viddec_debug.h" -#include "viddec_fw_workload.h" +#include "viddec_fw_common_defs.h" #include "h264parse_sei.h" #ifdef VBP @@ -947,17 +946,6 @@ extern "C" { //uint32_t h264_dpb_mpr_bf_poc[16]; // 0x304 } h264_pic_data; - enum h264_workload_item_type - { - VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC, - VIDDEC_WORKLOAD_H264_PIC_REG, - VIDDEC_WORKLOAD_H264_DPB_FRAME_POC, - VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET, - VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET, - VIDDEC_WORKLOAD_H264_PWT_ES_BYTES, - VIDDEC_WORKLOAD_H264_SCALING_MATRIX, - VIDDEC_WORKLOAD_H264_DEBUG - }; diff --git a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c index b564d8b..daad16c 100755 --- a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c +++ b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c @@ -1,7 +1,6 @@ #include "viddec_parser_ops.h" #include "h264.h" #include "h264parse.h" -#include "viddec_fw_item_types.h" #include "h264parse_dpb.h" @@ -128,7 +127,6 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; h264_slice_data slice_data = {}; uint32_t i=0, nitems=0, data=0; @@ -177,92 +175,11 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) /////file ref list 1 //h264_parse_emit_ref_list(parent, pInfo, 1); - ///////////////////////////////////// Slice Data //////////////////////////////// - // h264_fill_slice_data(pInfo, &slice_data); - - wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_H264_SLICE_REG); - - wi.data.data_offset = slice_data.h264_bsd_slice_start; - wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; - wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent , &wi); - } - else - { - // viddec_pm_append_workitem_next( parent , &wi); - } - - - ///////////////////////////predict weight table item and data if have/////////////////////////// - if (pInfo->h264_pwt_enabled) - { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; - wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; - wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; - wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent , &wi); - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); - } - else - { - // viddec_pm_append_workitem_next( parent , &wi); - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); - } - } - - ////////////////////////////////// Update ES Buffer for Slice /////////////////////// viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); - if (pInfo->active_PPS.entropy_coding_mode_flag) - { - if (0!=bits_offset) { - data = data; // fix compilation warning - // don't skip byte-aligned bits as those bits are actually - // part of slice_data - //viddec_pm_get_bits(parent, &data, 8-bits_offset); - } - } - else - { - if (0!=bits_offset) { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; - wi.data.data_offset = bits_offset; - wi.data.data_payload[0]=0; - wi.data.data_payload[1]=0; - - if (pInfo->push_to_cur) { //cur is empty, fill new frame in cur - // viddec_pm_append_workitem( parent , &wi); - } - else { - //viddec_pm_append_workitem_next( parent , &wi); - } - } - } - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_pixeldata( parent ); - } - else - { - //viddec_pm_append_pixeldata_next( parent); - } - return; } @@ -270,52 +187,23 @@ void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; - const uint32_t *pl; uint32_t i=0,nitems=0; h264_pic_data pic_data; pInfo->qm_present_list=0; - - //h264_parse_emit_4X4_scaling_matrix(parent, pInfo); - // h264_parse_emit_8X8_scaling_matrix(parent, pInfo); - - // h264_fill_pic_data(pInfo, &pic_data); - // How many payloads must be generated nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up pl = (const uint32_t *) &pic_data; - // Dump slice data to an array of workitems, to do pl access non valid mem - for ( i = 0; i < nitems; i++ ) - { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PIC_REG; - wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct - wi.data.data_payload[0] = pl[0]; - wi.data.data_payload[1] = pl[1]; - pl += 2; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - } - return; } void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) { - viddec_workload_item_t wi; uint32_t i=0,nitems=0; ///////////////////////// Frame attributes////////////////////////// @@ -343,165 +231,19 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) ///////////////////// SPS///////////////////// // h264_parse_emit_sps(parent, pInfo); - /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for (i=0; idpb.frame_id_need_to_be_displayed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - } - } pInfo->dpb.frame_numbers_need_to_be_displayed =0; /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for (i=0; idpb.frame_id_need_to_be_removed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_workitem( parent, &wi ); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - } - - } pInfo->dpb.frame_numbers_need_to_be_removed =0; /////////////////////flust frames (do not display)///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; - - for (i=0; idpb.frame_id_need_to_be_dropped[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_workitem( parent, &wi ); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - } - - } pInfo->dpb.frame_numbers_need_to_be_dropped =0; /////////////////////updata DPB frames///////////////////// - nitems = pInfo->dpb.used_size; - for (i=0; idpb.fs_dpb_idc[i]; - - if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id); - wi.ref_frame.reference_id = fs_id; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - } - } - - - /////////////////////updata dpb frames info (poc)///////////////////// - nitems = pInfo->dpb.used_size; - for (i=0; idpb.fs_dpb_idc[i]; - - if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; - wi.data.data_offset = fs_id; - //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); - - switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) - { - case (FRAME): { - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - case (TOP_FIELD): { - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = 0; - break; - }; - - case (BOTTOM_FIELD): { - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - default : { - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; - break; - }; - } - - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - - } - } /////////////////////Alloc buffer for current Existing frame///////////////////// - if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated) - { - if (pInfo->push_to_cur) - { - // viddec_workload_t *wl_cur = viddec_pm_get_header (parent); - // wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - else - { - // viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); - //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - } pInfo->dpb.frame_numbers_need_to_be_allocated =0; - return; } @@ -511,8 +253,6 @@ void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) { uint32_t nitems=0, i=0; - viddec_workload_item_t wi; - //// //// Now we can flush out all frames in DPB fro display if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) @@ -525,48 +265,10 @@ void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for (i=0; idpb.frame_id_need_to_be_displayed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - //viddec_pm_append_workitem( parent, &wi ); - } - else - { - //viddec_pm_append_workitem_next( parent, &wi ); - } - } pInfo->dpb.frame_numbers_need_to_be_displayed =0; /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for (i=0; idpb.frame_id_need_to_be_removed[i]); - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - // viddec_pm_append_workitem( parent, &wi ); - viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); - } - else - { - // viddec_pm_append_workitem_next( parent, &wi ); - viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); - } - } pInfo->dpb.frame_numbers_need_to_be_removed =0; return; diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c index c55db6b..db36c0b 100755 --- a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -1,6 +1,5 @@ #include "viddec_parser_ops.h" -#include "viddec_fw_workload.h" #include "viddec_pm.h" #include "h264.h" diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_workload.c b/mixvbp/vbp_plugin/h264/viddec_h264_workload.c deleted file mode 100755 index 54c96db..0000000 --- a/mixvbp/vbp_plugin/h264/viddec_h264_workload.c +++ /dev/null @@ -1,1195 +0,0 @@ -/* Any workload management goes in this file */ - -#include "viddec_fw_debug.h" -#include "viddec_parser_ops.h" -#include "h264.h" -#include "h264parse.h" -#include "viddec_fw_item_types.h" -#include "h264parse_dpb.h" - - -#include "viddec_fw_workload.h" -#include -#include "viddec_pm_utils_bstream.h" - -// picture parameter 1 -#define PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT(w) (((uint32_t)w)&0x1) -#define PUT_BSD_PP1_SLICE_TYPE_BITS(w) ((((uint32_t)w)&0x7)<<1) -#define PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(w) ((((uint32_t)w)&0x3)<<4) -#define PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6) -#define PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(w) ((((uint32_t)w)&0x3F)<<8) -#define PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(w) ((((uint32_t)w)&0x3F)<<16) - -// picture parameter 2 -#define PUT_BSD_PP2_CABAC_INIT_IDC_BITS(w) (((uint32_t)w)&0x3) -#define PUT_BSD_PP2_QP_BITS(w) ((((uint32_t)w)&0x3F)<<2) -#define PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(w) ((((uint32_t)w)&0x3)<<8) -#define PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<10) -#define PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<14) -#define PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<18) -#define PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(w) ((((uint32_t)w)&0x1F)<<19) -#define PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(w) ((((uint32_t)w)&0x1F)<<24) - - -// slice start parameter -#define PUT_BSD_SS_START_ADDR_BITS(w) (((uint32_t)w)&0x7fff) // 14:0 current slice start address -#define PUT_BSD_SS_SKIP_FS_IDC_BITS(w) ((((uint32_t)w)&0x3f)<<16) // [5:0], [4:0] frame store idc, [5] - 0: top-filed, 1: bottom field -#define PUT_BSD_SS_SKIP_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<24) // 0: P-skip, 1: I-skip -#define PUT_BSD_SS_SKIP_REWIND_BITS(w) ((((uint32_t)w)&0xf)<<28) // number of MB or MBAFF pairs to rewind before skip - -//h264_dpb_init -#define PUT_FRAME_WIDTH_MB_BITS(w) (((uint32_t)w)&0x7F) -#define PUT_FRAME_HEIGHT_MB_BITS(w) ((((uint32_t)w)&0x7F)<<16) - -//dpb lut table init -//#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8) - -//h264 img init -#define PUT_BSD_IMAGE_STRUCTURE_BITS(w) (((uint32_t)w)&0x3) -#define PUT_BSD_IMAGE_IDR_BIT(w) ((((uint32_t)w)&0x1)<<2) -#define PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<3) -#define PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<4) -#define PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<5) -#define PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6) -#define PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<7) -#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8) - -#define PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<13) -#define PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<14) -#define PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<15) -#define PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<16) -#define PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(w) ((((uint32_t)w)&0xFF)<<17) -#define PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<25) - - -extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo, - int32_t NonExisting, - int32_t use_old); - -extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames); - - - -void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_Info *pInfo) -{ - - viddec_frame_attributes_t *attrs = &wl->attrs; - - - - //// Cont_size - attrs->cont_size.height = pInfo->img.FrameHeightInMbs*16; - attrs->cont_size.width = pInfo->img.PicWidthInMbs*16; - - //// The following attributes will be updated in slice level - attrs->h264.used_for_reference = 0; - attrs->h264.top_field_first = 0; - attrs->h264.top_field_poc = 0; - attrs->h264.bottom_field_poc = 0; - attrs->h264.field_pic_flag = 0; - -#if 1 -/// Double check the size late!!!!! - //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16; - //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16; - - if ( (pInfo->active_SPS.sps_disp.frame_cropping_flag) && - (pInfo->active_SPS.sps_disp.chroma_format_idc < 4)) - { - int32_t CropUnitX, CropUnitY; - int32_t SubWidthC, SubHeightC; - - if (pInfo->active_SPS.sps_disp.chroma_format_idc == 0) - { - CropUnitX = 1; - CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag; - } - else - { - SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1); - SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1) - - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1); - CropUnitX = SubWidthC; - CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag); - } - - if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY)) - { - attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); - //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY); - } - } -/// Pan-Scan Info - -#endif - -} - - -static void h264_parse_update_frame_attributes(void *parent, h264_Info *pInfo) -{ - viddec_workload_t *wl_cur, *wl_next; - viddec_frame_attributes_t *attrs; - uint8_t frame_type=0; - - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - wl_cur = viddec_pm_get_header( parent ); - attrs = &wl_cur->attrs; - } - else - { - wl_next = viddec_pm_get_next_header (parent); - attrs = &wl_next->attrs; - } - - /////////update frame type - if ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)) - { - frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET; - switch (frame_type) - { - case FRAME_TYPE_IDR: - attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; - break; - case FRAME_TYPE_I: - attrs->frame_type = VIDDEC_FRAME_TYPE_I; - break; - case FRAME_TYPE_P: - attrs->frame_type = VIDDEC_FRAME_TYPE_P; - break; - case FRAME_TYPE_B: - attrs->frame_type = VIDDEC_FRAME_TYPE_B; - break; - default: - attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; - break; - } - - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; - } - else - { - frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET; - switch (frame_type) - { - case FRAME_TYPE_IDR: - attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; - break; - case FRAME_TYPE_I: - attrs->frame_type = VIDDEC_FRAME_TYPE_I; - break; - case FRAME_TYPE_P: - attrs->frame_type = VIDDEC_FRAME_TYPE_P; - break; - case FRAME_TYPE_B: - attrs->frame_type = VIDDEC_FRAME_TYPE_B; - break; - default: - attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; - break; - - } - - frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET; - switch (frame_type) - { - case FRAME_TYPE_IDR: - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR; - break; - case FRAME_TYPE_I: - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I; - break; - case FRAME_TYPE_P: - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P; - break; - case FRAME_TYPE_B: - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B; - break; - default: - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; - break; - - } - } - - /////////update is_referece flag - attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1; - - /////////update POC - attrs->h264.top_field_poc = pInfo->img.toppoc; - attrs->h264.bottom_field_poc = pInfo->img.bottompoc; - - //////// update TFF - if (attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) { - attrs->h264.top_field_first = 1; - } else { - attrs->h264.top_field_first = 0; - } - - /////// update field_pic_flag - //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag); - attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag; - - return; -} - - -static void h264_fill_slice_data(h264_Info *pInfo, h264_slice_data * p_slice_data) -{ - uint32_t data=0; - uint32_t first_mb_in_slice =0; - - - - ////////////fill pic parameters 1 - data = PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) + - PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) + - PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) + - PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag) + - PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active) + - PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active); - p_slice_data->h264_bsd_slice_p1 = data; - - - ///////////fill pic parameters 2 - data = PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) + - PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) + - PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) + - PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) + - PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) + - PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) + - PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) + - PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset); - - p_slice_data->h264_bsd_slice_p2 = data; - - /////////fill slice start - first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice; - - data = PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice); - data |= PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) | - PUT_BSD_SS_SKIP_TYPE_BIT(0) | - PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3)); - - p_slice_data->h264_bsd_slice_start = data; - -} - - -static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo ) -{ - - viddec_workload_item_t wi; - - uint32_t i=0, n_items=0; - uint32_t qm_type=0; - - - for ( i = 0; i < 6; i++ ) - { - qm_type = FB_QM; - if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first - { - if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix4x4Flag[i]) { - qm_type = DEFAULT_QM; - } else { - qm_type = SPS_QM; - } - } - } - - if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps - { - if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix4x4Flag[i]) { - qm_type = DEFAULT_QM; - } else { - qm_type = PPS_QM; - } - } - else - { - if ((i != 0) && (i != 3) && (i < 6)) { - pInfo->qm_present_list &= ~((0x1)<active_SPS.ScalingList4x4[i][n_items*8+0]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - - break; - } - case (PPS_QM): { - - for (n_items =0; n_items<2; n_items++) - { - wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); - wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - - break; - } - case (DEFAULT_QM): - { - - wi.data.data_offset = i + (DEFAULT_QM << 4); - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - break; - } - default: - { - break; - } - } - } - -} - -static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo ) -{ - - viddec_workload_item_t wi; - - uint32_t i=0, n_items=0; - uint32_t qm_type=0; - - for ( i = 6; i < 8; i++ ) - { - qm_type = FB_QM; - if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first - { - if (pInfo->active_SPS.seq_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_SPS.UseDefaultScalingMatrix8x8Flag[i-6]) - { - qm_type = DEFAULT_QM; - } - else - { - qm_type = SPS_QM; - } - } - } - - if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps - { - if (pInfo->active_PPS.pic_scaling_list_present_flag[i]) - { - pInfo->qm_present_list |= ((0x1)<active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]) - { - qm_type = DEFAULT_QM; - } - else - { - qm_type = PPS_QM; - } - } - } - wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX; - - // data_offset 0x aa bb cc dd - // bb is the workload item offset - // cc is the qm_type - // dd is the matrix number - // - switch (qm_type) - { - case (SPS_QM): - { - for (n_items =0; n_items<8; n_items++) - { - wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8); - wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - break; - } - case (PPS_QM): - { - for (n_items =0; n_items<8; n_items++) - { - wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8); - wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24); - wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+ - (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - break; - } - case (DEFAULT_QM): - { - wi.data.data_offset = i + (DEFAULT_QM << 4); - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - break; - } - default: { - break; - } - } - } - -} - - - -static void h264_fill_pic_data(h264_Info *pInfo, h264_pic_data * p_pic_data) -{ - uint32_t data=0; - uint32_t dec_idc =0; - uint32_t frame_structure =0; - - //fill h264_dpb_init - data = PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) + - PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs); - - p_pic_data->h264_dpb_init = data; - - ////////////////////////////////file current pic info - data = 0; - dec_idc = pInfo->dpb.fs_dec_idc; - frame_structure = pInfo->img.structure; - if (frame_structure == FRAME) - frame_structure=0; - //data = PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); - - //p_pic_data->h264_cur_bsd_img_init= data; - - data = PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure) + - PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + - PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) + - PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) + - PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) + - PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) + - PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) + - PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) + - PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) + - PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) + - PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) + - PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) + - PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) + - PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc); - - p_pic_data->h264_cur_bsd_img_init= data; - - //to do: add qm list - //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) + - //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc); - - if (pInfo->img.structure == FRAME) - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; - p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; - } else if (pInfo->img.structure == TOP_FIELD) - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc; - p_pic_data->h264_cur_mpr_bf_poc = 0; - } - else if (pInfo->img.structure == BOTTOM_FIELD) - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = 0; - p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc; - } - else - { - // Write down POC - p_pic_data->h264_cur_mpr_tf_poc = 0; - p_pic_data->h264_cur_mpr_bf_poc = 0; - } - - return; -} - -static void h264_parse_emit_sps(void *parent, h264_Info *pInfo) -{ - viddec_workload_item_t wi; - - if (pInfo->Is_SPS_updated) - { - viddec_fw_reset_workload_item(&wi); - wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; - - viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc); - viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc); - viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc); - viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames); - viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag); - viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag); - viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag); - viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag); - wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1; - wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1; - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - - viddec_fw_reset_workload_item(&wi); - if (pInfo->active_SPS.sps_disp.frame_cropping_flag) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING; - viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset); - viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset); - viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset); - viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset); - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - viddec_fw_reset_workload_item(&wi); - if (pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1) - { - wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; - viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag); - viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag); - viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag); - viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag); - viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag); - viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag); - - if (pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1) - { - viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc); - if (h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc) - { - viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width); - viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height); - } - } - - - if (pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) - { - viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag); - if (pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) - { - viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries); - viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics); - } - viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format); - } - - if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) - { - viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag); - } - - if ( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) - || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)) - { - viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag); - } - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - - viddec_fw_reset_workload_item(&wi); - - if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO; - - wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick; - wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->Is_SPS_updated =0; - - } - - return; -} - - - - -static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t list_id) -{ - uint32_t i=0, nitems=0, byte_index=0, data=0, data_writed=0; - uint8_t *p_list; - viddec_workload_item_t wi; - - if (0 == list_id) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0; - - if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) - { - nitems = pInfo->SliceHeader.num_ref_idx_l0_active; - if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - { - p_list = pInfo->slice_ref_list0; - } - else - { - p_list = pInfo->dpb.listX_0; - } - } - else - { - nitems =0; - p_list = pInfo->dpb.listX_0; - } - } - else - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1; - - if ( h264_PtypeB==pInfo->SliceHeader.slice_type) - { - nitems = pInfo->SliceHeader.num_ref_idx_l1_active; - if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag) - { - p_list = pInfo->slice_ref_list1; - } - else - { - p_list = pInfo->dpb.listX_1; - } - } - else - { - nitems = 0; - p_list = pInfo->dpb.listX_1; - } - - } - - if (0 == nitems) - { - return; - } - - byte_index =0; - data_writed=0; - - - for (i=0; i < 32; i++) - { - if (byte_index == 0) data = 0; - - if (idpb.fs[ (p_list[i]&0x1f) ]))) - { - data |= (pInfo->h264_list_replacement) << byte_index; - } - else - { - data |= (p_list[i] & 0x7f) << byte_index; - } - } - else - { - data |= (0x80) << byte_index; - } - - - if (byte_index == 24) - { - byte_index = 0; - wi.data.data_offset = data_writed&(~0x1); - wi.data.data_payload[data_writed&0x1]=data; - - data =0; - - if (data_writed&0x1) - { - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - data_writed ++; - } - else - { - byte_index += 8; - } - } - -} - - - -void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo ) -{ - - viddec_workload_item_t wi; - h264_slice_data slice_data; - - uint32_t i=0, nitems=0, data=0; - uint32_t bits_offset =0, byte_offset =0; - uint8_t is_emul =0; - - ////////////////////// Update frame attributes///////////////// - h264_parse_update_frame_attributes(parent,pInfo); - - - if (pInfo->SliceHeader.sh_error) { - // Error type definition, refer to viddec_fw_common_defs.h - // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) - // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) - // if this is frame based, both 2 bits should be set - - if (pInfo->push_to_cur) { - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); - } else { - pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET); - } - } - - - ////////////////////// Update Reference list ////////////////// - if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) ) - { - if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) - { - nitems = pInfo->SliceHeader.num_ref_idx_l0_active; - - for (i=0; idpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0) - { - pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80; - break; - } - } - } - else - { - nitems = pInfo->dpb.listXsize[0]; - - for (i=0; idpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0) - { - pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80; - break; - } - } - } - - } - else - { - nitems =0; - } - /////file ref list 0 - h264_parse_emit_ref_list(parent, pInfo, 0); - - /////file ref list 1 - h264_parse_emit_ref_list(parent, pInfo, 1); - - ///////////////////////////////////// Slice Data //////////////////////////////// - h264_fill_slice_data(pInfo, &slice_data); - - wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG; - - wi.data.data_offset = slice_data.h264_bsd_slice_start; - wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1; - wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2; - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - - ///////////////////////////predict weight table item and data if have/////////////////////////// - if (pInfo->h264_pwt_enabled) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET; - wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1; - wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset; - wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent , &wi, false); - - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1); - } - else - { - viddec_pm_append_workitem( parent , &wi, true); - - wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES; - wi.es.es_flags = 0; - viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0); - } - } - - - ////////////////////////////////// Update ES Buffer for Slice /////////////////////// - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset); - - if (pInfo->active_PPS.entropy_coding_mode_flag) - { - if (0!=bits_offset) { - viddec_pm_get_bits(parent, &data, 8-bits_offset); - } - } - else - { - if (0!=bits_offset) { - wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET; - wi.data.data_offset = bits_offset; - wi.data.data_payload[0]=0; - wi.data.data_payload[1]=0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - } - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_pixeldata( parent ); - } - else - { - viddec_pm_append_pixeldata_next( parent); - } - - return; -} - - -void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) -{ - - viddec_workload_item_t wi; - - const uint32_t *pl; - uint32_t i=0,nitems=0; - - h264_pic_data pic_data; - - pInfo->qm_present_list=0; - - h264_parse_emit_4X4_scaling_matrix(parent, pInfo); - h264_parse_emit_8X8_scaling_matrix(parent, pInfo); - - h264_fill_pic_data(pInfo, &pic_data); - - // How many payloads must be generated - nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up - - pl = (const uint32_t *) &pic_data; - - // Dump slice data to an array of workitems, to do pl access non valid mem - for ( i = 0; i < nitems; i++ ) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG; - wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct - wi.data.data_payload[0] = pl[0]; - wi.data.data_payload[1] = pl[1]; - pl += 2; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - - return; -} - -void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) -{ - - viddec_workload_item_t wi; - uint32_t i=0,nitems=0; - - ///////////////////////// Frame attributes////////////////////////// - - //Push data into current workload if first frame or frame_boundary already detected by non slice nal - if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) - { - viddec_workload_t *wl_cur = viddec_pm_get_header( parent ); - //pInfo->img.g_new_frame = 0; - pInfo->Is_first_frame_in_stream =0; - pInfo->is_frame_boundary_detected_by_non_slice_nal=0; - pInfo->push_to_cur = 1; - h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo); - } - else // move to cur if frame boundary detected by previous non slice nal, or move to next if not - { - viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); - - pInfo->push_to_cur = 0; - h264_translate_parser_info_to_frame_attributes(wl_next, pInfo); - - pInfo->is_current_workload_done=1; - } - - ///////////////////// SPS///////////////////// - h264_parse_emit_sps(parent, pInfo); - - /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for (i=0; idpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - - - /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for (i=0; idpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_removed =0; - - /////////////////////flust frames (do not display)///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_dropped; - - for (i=0; idpb.frame_id_need_to_be_dropped[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_dropped =0; - - /////////////////////updata DPB frames///////////////////// - nitems = pInfo->dpb.used_size; - for (i=0; idpb.fs_dpb_idc[i]; - - if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id; - wi.ref_frame.reference_id = fs_id; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - } - - /////////////////////updata dpb frames info (poc)///////////////////// - nitems = pInfo->dpb.used_size; - for (i=0; idpb.fs_dpb_idc[i]; - - if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0) - { - wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC; - wi.data.data_offset = fs_id; - //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc); - - switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id]))) - { - case (FRAME): { - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - case (TOP_FIELD): { - wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc; - wi.data.data_payload[1] = 0; - break; - }; - - case (BOTTOM_FIELD): { - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc; - break; - }; - - default : { - wi.data.data_payload[0] = 0; - wi.data.data_payload[1] = 0; - break; - }; - } - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - } - - /////////////////////Alloc buffer for current Existing frame///////////////////// - if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated) - { - if (pInfo->push_to_cur) - { - viddec_workload_t *wl_cur = viddec_pm_get_header (parent); - wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - else - { - viddec_workload_t *wl_next = viddec_pm_get_next_header (parent); - wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f); - } - } - pInfo->dpb.frame_numbers_need_to_be_allocated =0; - - return; -} - - - -void h264_parse_emit_eos( void *parent, h264_Info *pInfo ) -{ - - uint32_t nitems=0, i=0; - viddec_workload_item_t wi; - - - wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - - //// Now we can flush out all frames in DPB fro display - - if (MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc) - { - if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3) - { - h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME - } - } - - - h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0); - h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames); - - - /////////////////////display frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_displayed; - - for (i=0; idpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - //cur is empty, fill new frame in cur - viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur); - } - pInfo->dpb.frame_numbers_need_to_be_displayed =0; - - - /////////////////////release frames///////////////////// - nitems = pInfo->dpb.frame_numbers_need_to_be_removed; - - for (i=0; idpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i]; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - - if (pInfo->push_to_cur) //cur is empty, fill new frame in cur - { - viddec_pm_append_workitem( parent, &wi , false); - viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE); - } - else - { - viddec_pm_append_workitem( parent, &wi , true); - viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next); - } - } - pInfo->dpb.frame_numbers_need_to_be_removed =0; - - return; -} - - - - - - diff --git a/mixvbp/vbp_plugin/mp4/Android.mk b/mixvbp/vbp_plugin/mp4/Android.mk index da9ed15..f1d3577 100755 --- a/mixvbp/vbp_plugin/mp4/Android.mk +++ b/mixvbp/vbp_plugin/mp4/Android.mk @@ -6,7 +6,6 @@ LOCAL_SRC_FILES := \ viddec_mp4_visualobject.c \ viddec_mp4_decodevideoobjectplane.c \ viddec_mp4_parse.c \ - viddec_fw_mp4_workload.c \ viddec_mp4_videoobjectplane.c \ viddec_parse_sc_mp4.c \ viddec_mp4_shortheader.c \ diff --git a/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h b/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h index bb772d4..d6502d9 100755 --- a/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h +++ b/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h @@ -1,7 +1,8 @@ #ifndef VIDDEC_FW_MP4_H #define VIDDEC_FW_MP4_H -#include "viddec_fw_workload.h" +#include "viddec_fw_common_defs.h" +#include "viddec_fw_frame_attr.h" enum viddec_fw_mp4_ref_frame_id { @@ -11,19 +12,6 @@ enum viddec_fw_mp4_ref_frame_id VIDDEC_MP4_FRAME_MAX = 3, }; -enum mp4_workload_item_type -{ - VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, - VIDDEC_WORKLOAD_MP4_FUTURE_FRAME, - VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC, - VIDDEC_WORKLOAD_MP4_VOP_INFO, - VIDDEC_WORKLOAD_MP4_BVOP_INFO, - VIDDEC_WORKLOAD_MP4_SPRT_TRAJ, - VIDDEC_WORKLOAD_MP4_IQUANT, - VIDDEC_WORKLOAD_MP4_NIQUANT, - VIDDEC_WORKLOAD_MP4_SVH, -}; - enum viddec_fw_mp4_vop_coding_type_t { VIDDEC_MP4_VOP_TYPE_I = 0, diff --git a/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c b/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c deleted file mode 100755 index c9ec2fb..0000000 --- a/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c +++ /dev/null @@ -1,377 +0,0 @@ -#ifndef VBP -#include - -#include "viddec_fw_workload.h" -#include "viddec_parser_ops.h" -#include "viddec_fw_mp4.h" -#include "viddec_mp4_parse.h" - -uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser) -{ - uint32_t result = MP4_STATUS_OK; - viddec_frame_attributes_t *attr = &(wl->attrs); - mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); - - memset(attr, 0, sizeof(viddec_frame_attributes_t)); - - attr->cont_size.width = vol->video_object_layer_width; - attr->cont_size.height = vol->video_object_layer_height; - - // Translate vop_coding_type - switch (vol->VideoObjectPlane.vop_coding_type) - { - case MP4_VOP_TYPE_B: - attr->frame_type = VIDDEC_FRAME_TYPE_B; - break; - case MP4_VOP_TYPE_P: - attr->frame_type = VIDDEC_FRAME_TYPE_P; - break; - case MP4_VOP_TYPE_S: - attr->frame_type = VIDDEC_FRAME_TYPE_S; - break; - case MP4_VOP_TYPE_I: - attr->frame_type = VIDDEC_FRAME_TYPE_I; - break; - default: - break; - } // switch on vop_coding_type - - attr->mpeg4.top_field_first = vol->VideoObjectPlane.top_field_first; - - return result; -} // viddec_fw_mp4_populate_attr - -uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *parser) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - viddec_fw_mp4_vol_info_t vol_info; - mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); - - memset(&vol_info, 0, sizeof(viddec_fw_mp4_vol_info_t)); - - // Get vol_flags - viddec_fw_mp4_set_reversible_vlc(&vol_info, vol->reversible_vlc); - viddec_fw_mp4_set_data_partitioned(&vol_info, vol->data_partitioned); - viddec_fw_mp4_set_resync_marker_disable(&vol_info, vol->resync_marker_disable); - viddec_fw_mp4_set_quarter_sample(&vol_info, vol->quarter_sample); - viddec_fw_mp4_set_obmc_disable(&vol_info, vol->obmc_disable); - viddec_fw_mp4_set_interlaced(&vol_info, vol->interlaced); - viddec_fw_mp4_set_vol_shape(&vol_info, vol->video_object_layer_shape); - viddec_fw_mp4_set_short_video_header_flag(&vol_info, vol->short_video_header); - - // Get vol_size - viddec_fw_mp4_set_vol_width(&vol_info, vol->video_object_layer_width); - viddec_fw_mp4_set_vol_height(&vol_info, vol->video_object_layer_height); - - // Get vol_item - viddec_fw_mp4_set_quant_type(&vol_info, vol->quant_type); - viddec_fw_mp4_set_quant_precision(&vol_info, vol->quant_precision); - viddec_fw_mp4_set_sprite_warping_accuracy(&vol_info, vol->sprite_info.sprite_warping_accuracy); - viddec_fw_mp4_set_sprite_warping_points(&vol_info, vol->sprite_info.no_of_sprite_warping_points); - viddec_fw_mp4_set_sprite_enable(&vol_info, vol->sprite_enable); - viddec_fw_mp4_set_vop_time_increment_resolution(&vol_info, vol->vop_time_increment_resolution); - - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOL_INFO; - wi.vwi_payload[0] = vol_info.vol_flags; - wi.vwi_payload[1] = vol_info.vol_size; - wi.vwi_payload[2] = vol_info.vol_item; - - result = viddec_pm_append_workitem(parent, &wi, false); - - return result; -} // viddec_fw_mp4_insert_vol_workitem - -uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *parser) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - viddec_fw_mp4_vop_info_t vop_info; - mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane); - uint32_t byte = 0; - unsigned char is_emul; - - memset(&vop_info, 0, sizeof(viddec_fw_mp4_vop_info_t)); - - // Get frame_info - viddec_fw_mp4_set_past_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_2].is_field); - viddec_fw_mp4_set_past_frame_id(&vop_info, VIDDEC_MP4_FRAME_PAST); - viddec_fw_mp4_set_future_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_1].is_field); - viddec_fw_mp4_set_future_frame_id(&vop_info, VIDDEC_MP4_FRAME_FUTURE); - viddec_fw_mp4_set_current_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_0].is_field); - viddec_fw_mp4_set_current_frame_id(&vop_info, VIDDEC_MP4_FRAME_CURRENT); - - // HW has a limitation that the enums for PAST(1), FUTURE(2) and CURRENT(0) cannot be changed and - // the spec does not support field pictures. Hence the field_frame bits are always zero. - // This gives us the constant 0x10200. - vop_info.frame_info = 0x10200; - - // Get vop_data - // Quant scale is in the video_packet_header or the gob_layer - both of which are parsed by the BSP - viddec_fw_mp4_set_vop_quant_scale(&vop_info, 0); - viddec_fw_mp4_set_vop_fcode_backward(&vop_info, vop->vop_fcode_backward); - viddec_fw_mp4_set_vop_fcode_forward(&vop_info, vop->vop_fcode_forward); - viddec_fw_mp4_set_vop_quant(&vop_info, vop->vop_quant); - viddec_fw_mp4_set_alternate_vertical_scan_flag(&vop_info, vop->alternate_vertical_scan_flag); - viddec_fw_mp4_set_top_field_first(&vop_info, vop->top_field_first); - viddec_fw_mp4_set_intra_dc_vlc_thr(&vop_info, vop->intra_dc_vlc_thr); - viddec_fw_mp4_set_vop_rounding_type(&vop_info, vop->vop_rounding_type); - viddec_fw_mp4_set_vop_coding_type(&vop_info, vop->vop_coding_type); - - // Get vol_item - result = viddec_pm_get_au_pos(parent, &vop_info.bit_offset, &byte, &is_emul); - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOP_INFO; - wi.vwi_payload[0] = vop_info.frame_info; - wi.vwi_payload[1] = vop_info.vop_data; - wi.vwi_payload[2] = vop_info.bit_offset; - - result = viddec_pm_append_workitem(parent, &wi, false); - - return result; -} // viddec_fw_mp4_insert_vop_workitem - -uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *parser) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - viddec_fw_mp4_svh_t svh_info; - mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263); - - memset(&svh_info, 0, sizeof(viddec_fw_mp4_svh_t)); - - // Get svh_data - viddec_fw_mp4_set_temporal_reference(&svh_info, svh->temporal_reference); - viddec_fw_mp4_set_num_macroblocks_in_gob(&svh_info, svh->num_macroblocks_in_gob); - viddec_fw_mp4_set_num_gobs_in_vop(&svh_info, svh->num_gobs_in_vop); - viddec_fw_mp4_set_num_rows_in_gob(&svh_info, svh->num_rows_in_gob); - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SVH; - wi.vwi_payload[0] = svh_info.svh_data; - wi.vwi_payload[1] = svh_info.pad1; - wi.vwi_payload[2] = svh_info.pad2; - - result = viddec_pm_append_workitem(parent, &wi, false); - - return result; -} // viddec_fw_mp4_insert_vpsh_workitem - -uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t *parser) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - viddec_fw_mp4_sprite_trajectory_t sprite_info; - mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); - mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane); - uint8_t no_of_entries_per_item = 3; - uint8_t no_of_sprite_workitems = 0; - uint8_t warp_index = 0; - int i, j; - - if (!vol->sprite_info.no_of_sprite_warping_points) - return result; - - no_of_sprite_workitems = (vol->sprite_info.no_of_sprite_warping_points > 3) ? 2 : 1; - - for (i=0; isprite_info.no_of_sprite_warping_points) - { - if (warp_index < 4) - { - viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index); - viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]); - viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]); - } - } - else - { - sprite_info.warping_mv_code[j] = 0xF << 28; - } - warp_index++; - } - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SPRT_TRAJ; - wi.vwi_payload[0] = sprite_info.warping_mv_code[0]; - wi.vwi_payload[1] = sprite_info.warping_mv_code[1]; - wi.vwi_payload[2] = sprite_info.warping_mv_code[2]; - - result = viddec_pm_append_workitem(parent, &wi, false); - } - - return result; -} // viddec_fw_mp4_insert_sprite_workitem - -uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *parser) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject); - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_BVOP_INFO; - wi.vwi_payload[0] = vol->Tframe; - wi.vwi_payload[1] = vol->TRD; - wi.vwi_payload[2] = vol->TRB; - - result = viddec_pm_append_workitem(parent, &wi, false); - - return result; -} // viddec_fw_mp4_insert_bvop_workitem - -uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint32_t *qmat) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - uint8_t i; - - // No of items = (64/4 Dwords / 3 entries per workload item) - // 64 8b entries => 64 * 8 / 32 DWORDS => 64/4 DWORDS => 16 DWORDS - // Each item can store 3 DWORDS, 16 DWORDS => 16/3 items => 6 items - for (i=0; i<6; i++) - { - memset(&wi, 0, sizeof(viddec_workload_item_t)); - - if (intra_quant_flag) - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_IQUANT; - else - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_NIQUANT; - - if (i == 6) - { - wi.vwi_payload[0] = qmat[0]; - wi.vwi_payload[1] = 0; - wi.vwi_payload[2] = 0; - } - else - { - wi.vwi_payload[0] = qmat[0]; - wi.vwi_payload[1] = qmat[1]; - wi.vwi_payload[2] = qmat[2]; - } - - qmat += 3; - - result = viddec_pm_append_workitem(parent, &wi, false); - } - - return result; -} // viddec_fw_mp4_insert_qmat - -uint32_t viddec_fw_mp4_insert_inversequant_workitem(void *parent, mp4_VOLQuant_mat_t *qmat) -{ - uint32_t result = MP4_STATUS_OK; - - if (qmat->load_intra_quant_mat) - { - result = viddec_fw_mp4_insert_qmat(parent, true, (uint32_t *) &(qmat->intra_quant_mat)); - } - - if (qmat->load_nonintra_quant_mat) - { - result = viddec_fw_mp4_insert_qmat(parent, false, (uint32_t *) &(qmat->nonintra_quant_mat)); - } - - return result; -} // viddec_fw_mp4_insert_inversequant_workitem - -uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_PAST_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - result = viddec_pm_append_workitem(parent, &wi, false); - - return result; -} // viddec_fw_mp4_insert_past_frame_workitem - -uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - - wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_FUTURE_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - result = viddec_pm_append_workitem(parent, &wi, false); - - return result; -} // viddec_fw_mp4_insert_future_frame_workitem - -uint32_t viddec_fw_mp4_insert_reorder_workitem(void *parent) -{ - uint32_t result = MP4_STATUS_OK; - viddec_workload_item_t wi; - - // Move frame at location 1 of the reference table to location 0 - wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; - wi.ref_reorder.ref_table_offset = 0; - wi.ref_reorder.ref_reorder_00010203 = 0x01010203; - wi.ref_reorder.ref_reorder_04050607 = 0x04050607; - - result = viddec_pm_append_workitem(parent, &wi, false); - - return result; -} // viddec_fw_mp4_insert_reorder_workitem - -uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt) -{ - uint32_t result = 0; - viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt; - viddec_workload_t *wl = viddec_pm_get_header(parent); - - result = viddec_fw_mp4_populate_attr(wl, parser); - result = viddec_fw_mp4_insert_vol_workitem(parent, parser); - result = viddec_fw_mp4_insert_vop_workitem(parent, parser); - result = viddec_fw_mp4_insert_sprite_workitem(parent, parser); - result = viddec_fw_mp4_insert_inversequant_workitem(parent, &(parser->info.VisualObject.VideoObject.quant_mat_info)); - - if (parser->info.VisualObject.VideoObject.short_video_header) - result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser); - - if (!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded) - wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; - - // Send reference re-order tag for all reference frame types - if (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type != MP4_VOP_TYPE_B) - { - result = viddec_fw_mp4_insert_reorder_workitem(parent); - } - - // Handle vop_coding_type based information - switch (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type) - { - case MP4_VOP_TYPE_B: - result = viddec_fw_mp4_insert_bvop_workitem(parent, parser); - result = viddec_fw_mp4_insert_past_frame_workitem(parent); - result = viddec_fw_mp4_insert_future_frame_workitem(parent); - break; - case MP4_VOP_TYPE_P: - case MP4_VOP_TYPE_S: - result = viddec_fw_mp4_insert_past_frame_workitem(parent); - // Deliberate fall-thru to type I - case MP4_VOP_TYPE_I: - wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK); - // Swap reference information - parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1]; - parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0]; - break; - break; - default: - break; - } // switch on vop_coding_type - - result = viddec_pm_append_pixeldata(parent); - - return result; -} // viddec_fw_mp4_emit_workload -#endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c index 85eab1a..ac4fb2e 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c @@ -98,7 +98,6 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) } case MP4_SC_USER_DATA: { /* Copy userdata to user-visible buffer (EMIT) */ - status = mp4_Parse_UserData(parent, cxt); DEB("MP4_USER_DATA_SC: \n"); break; } diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c index 11c82d4..2a6a933 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c @@ -346,22 +346,5 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser mp4_set_hdr_bitstream_error(parser, false, ret); - // POPULATE WORKLOAD ITEM - { - viddec_workload_item_t wi; - - wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT; - - wi.mp4_vpsh.info = 0; - wi.mp4_vpsh.pad1 = 0; - wi.mp4_vpsh.pad2 = 0; - - viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format); - - ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - if (ret == 1) - ret = MP4_STATUS_OK; - } - return ret; } diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c index c50ef3c..1ed52f3 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c @@ -587,43 +587,6 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC; return ret; } -//DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); - // POPULATE WORKLOAD ITEM -/* - { - viddec_workload_item_t wi; - viddec_workload_t *wl = viddec_pm_get_header(parent); - - wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ; - - wi.mp4_vol.vol_aspect_ratio = 0; - wi.mp4_vol.vol_bit_rate = 0; - wi.mp4_vol.vol_frame_rate = 0; - - viddec_fw_mp4_vol_set_aspect_ratio_info(&wi.mp4_vol, vidObjLay->aspect_ratio_info); - viddec_fw_mp4_vol_set_par_width(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_width); - viddec_fw_mp4_vol_set_par_height(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_height); - viddec_fw_mp4_vol_set_control_param(&wi.mp4_vol, vidObjLay->is_vol_control_parameters); - viddec_fw_mp4_vol_set_chroma_format(&wi.mp4_vol, vidObjLay->VOLControlParameters.chroma_format); - viddec_fw_mp4_vol_set_interlaced(&wi.mp4_vol, vidObjLay->interlaced); - viddec_fw_mp4_vol_set_fixed_vop_rate(&wi.mp4_vol, vidObjLay->fixed_vop_rate); - - viddec_fw_mp4_vol_set_vbv_param(&wi.mp4_vol, vidObjLay->VOLControlParameters.vbv_parameters); - viddec_fw_mp4_vol_set_bit_rate(&wi.mp4_vol, vidObjLay->VOLControlParameters.bit_rate); - - viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment); - viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution); - - ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - if (ret == 1) - ret = MP4_STATUS_OK; - - memset(&(wl->attrs), 0, sizeof(viddec_frame_attributes_t)); - - wl->attrs.cont_size.width = vidObjLay->video_object_layer_width; - wl->attrs.cont_size.height = vidObjLay->video_object_layer_height; - } -*/ return ret; } diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c index 97f36a3..cbe88bd 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c @@ -41,25 +41,6 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t mp4_set_hdr_bitstream_error(parser, true, ret); - // POPULATE WORKLOAD ITEM - { - viddec_workload_item_t wi; - - wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ; - - wi.mp4_gvop.gvop_info = 0; - wi.mp4_gvop.pad1 = 0; - wi.mp4_gvop.pad2 = 0; - - viddec_fw_mp4_gvop_set_broken_link(&wi.mp4_gvop, data->broken_link); - viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov); - viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code); - - ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - if (ret == 1) - ret = MP4_STATUS_OK; - } - return ret; } diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c index d1ec032..ee54452 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c @@ -188,103 +188,7 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) mp4_set_hdr_bitstream_error(parser, true, ret); - // POPULATE WORKLOAD ITEM - { - viddec_workload_item_t wi; - mp4_VideoSignalType_t *vst = &(visObj->VideoSignalType); - - wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ; - - wi.mp4_vs_vo.vs_item = 0; - wi.mp4_vs_vo.video_signal_type = 0; - wi.mp4_vs_vo.color_desc = 0; - - viddec_fw_mp4_vs_set_profile_and_level_indication(&wi.mp4_vs_vo, pInfo->profile_and_level_indication); - - viddec_fw_mp4_vo_set_video_signal_type(&wi.mp4_vs_vo, vst->is_video_signal_type); - if (vst->is_video_signal_type) - { - viddec_fw_mp4_vo_set_video_range(&wi.mp4_vs_vo, vst->video_range); - viddec_fw_mp4_vo_set_video_format(&wi.mp4_vs_vo, vst->video_format); - viddec_fw_mp4_vo_set_colour_description(&wi.mp4_vs_vo, vst->is_colour_description); - if (vst->is_colour_description) - { - viddec_fw_mp4_vo_set_transfer_char(&wi.mp4_vs_vo, vst->transfer_characteristics); - viddec_fw_mp4_vo_set_color_primaries(&wi.mp4_vs_vo, vst->colour_primaries); - } - } - - int ret_val; - ret_val = viddec_pm_append_workitem(parent, &wi, false); - if (ret_val == 1) - ret = MP4_STATUS_OK; - } - return ret; } // mp4_Parse_VisualObject -mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser) -{ - mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; - uint32_t user_data; - viddec_workload_item_t wi; - - DEB("ParseUser-prev_sc: 0x%x\n", parser->prev_sc); - - /* find the scope based on start code sc */ - switch (parser->prev_sc) { - case MP4_SC_VISUAL_OBJECT_SEQUENCE: - wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; - break; - case MP4_SC_VISUAL_OBJECT: - wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA; - break; - case MP4_SC_GROUP_OF_VOP: - wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; - break; - case MP4_SC_VIDEO_OBJECT_LAYER_MIN: - wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA; - break; - default: - wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen - break; - } - - /* Read 1 byte of user data and store it in workitem for the current stream level (VS/VO/VOL/GVOP). - Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, - append the workitem. This loop is repeated till all user data is extracted and appended. */ - wi.user_data.size = 0; - while (viddec_pm_get_bits(parent, &user_data, 8) != -1) - { - /* Store the valid byte in data payload */ - wi.user_data.data_payload[wi.user_data.size] = user_data; - wi.user_data.size++; - - /* When size exceeds payload size, append workitem and continue */ - if (wi.user_data.size >= 11) - { - viddec_pm_setup_userdata(&wi); - - ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - wi.user_data.size = 0; - } - } - /* If size is not 0, append remaining user data. */ - if (wi.user_data.size > 0) - { - int i; - for (i=wi.user_data.size; i<11; i++) - { - wi.user_data.data_payload[i] = 0; - } - viddec_pm_setup_userdata(&wi); - ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false); - wi.user_data.size = 0; - } - - if (ret == 1) - ret = MP4_STATUS_OK; - - return ret; -} // mp4_Parse_UserData diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h index d1f5a23..a5afe74 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h @@ -7,6 +7,4 @@ mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser) mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser); -mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser); - #endif diff --git a/mixvbp/vbp_plugin/vc1/include/vc1common.h b/mixvbp/vbp_plugin/vc1/include/vc1common.h index e4f1b3f..8b3a8b8 100755 --- a/mixvbp/vbp_plugin/vc1/include/vc1common.h +++ b/mixvbp/vbp_plugin/vc1/include/vc1common.h @@ -26,7 +26,7 @@ enum vc1_workload_item_type { - VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC, + VIDDEC_WORKLOAD_VC1_DMEM, VIDDEC_WORKLOAD_VC1_BITOFFSET, VIDDEC_WORKLOAD_VC1_BITPLANE0, VIDDEC_WORKLOAD_VC1_BITPLANE1, @@ -40,7 +40,7 @@ enum vc1_workload_item_type VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO, VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO, VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE, - VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0, + VIDDEC_WORKLOAD_VC1_PAST_FRAME, VIDDEC_WORKLOAD_VC1_FUTURE_FRAME, }; diff --git a/mixvbp/vbp_plugin/vc1/vc1.h b/mixvbp/vbp_plugin/vc1/vc1.h index ca92d17..e7d0ac2 100755 --- a/mixvbp/vbp_plugin/vc1/vc1.h +++ b/mixvbp/vbp_plugin/vc1/vc1.h @@ -37,10 +37,10 @@ enum { #endif #endif -#include "viddec_fw_workload.h" -#include "vc1parse_common_defs.h" +#include "viddec_fw_common_defs.h" +#include "viddec_fw_frame_attr.h" #include "vc1common.h" - +#include "vc1parse_common_defs.h" #ifdef __cplusplus extern "C" { #endif diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.c b/mixvbp/vbp_plugin/vc1/vc1parse.c index 06ac094..bce1f57 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse.c @@ -79,31 +79,6 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) DEB("rcv: res: %dx%d\n", md->width, md->height); - // POPULATE WORKLOAD ITEM - { - viddec_workload_item_t wi; - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C; - - wi.vc1_sh_struct_a_c.size = 0; - wi.vc1_sh_struct_a_c.flags = 0; - wi.vc1_sh_struct_a_c.pad = 0; - - viddec_fw_vc1_set_rcv_horiz_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.HORIZ_SIZE); - viddec_fw_vc1_set_rcv_vert_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.VERT_SIZE); - - viddec_fw_vc1_set_rcv_bitrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.BITRTQ_POSTPROC); - viddec_fw_vc1_set_rcv_frmrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.FRMRTQ_POSTPROC); - viddec_fw_vc1_set_rcv_profile(&wi.vc1_sh_struct_a_c, rcv.struct_c.PROFILE); - viddec_fw_vc1_set_rcv_level(&wi.vc1_sh_struct_a_c, 0); - viddec_fw_vc1_set_rcv_cbr(&wi.vc1_sh_struct_a_c, 0); - viddec_fw_vc1_set_rcv_rangered(&wi.vc1_sh_struct_a_c, rcv.struct_c.RANGERED); - viddec_fw_vc1_set_rcv_maxbframes(&wi.vc1_sh_struct_a_c, rcv.struct_c.MAXBFRAMES); - viddec_fw_vc1_set_rcv_finterpflag(&wi.vc1_sh_struct_a_c, rcv.struct_c.FINTERPFLAG); - - result = viddec_pm_append_workitem(ctxt, &wi, false); - } - return status; } @@ -255,63 +230,6 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) DEB("md: res: %dx%d\n", md->width, md->height); DEB("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE); - // POPULATE WORKLOAD ITEM - { - viddec_workload_item_t wi_sl, wi_de; - - wi_sl.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO; - - wi_sl.vc1_sl.size = 0; - wi_sl.vc1_sl.flags = 0; - wi_sl.vc1_sl.pad = 0; - - viddec_fw_vc1_set_profile(&wi_sl.vc1_sl, sh.seq_flags.PROFILE); - viddec_fw_vc1_set_level(&wi_sl.vc1_sl, sh.seq_flags.LEVEL); - viddec_fw_vc1_set_colordiff_format(&wi_sl.vc1_sl, sh.seq_flags.COLORDIFF_FORMAT); - viddec_fw_vc1_set_pulldown(&wi_sl.vc1_sl, sh.seq_max_size.PULLDOWN); - viddec_fw_vc1_set_max_coded_width(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_WIDTH); - viddec_fw_vc1_set_max_coded_height(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_HEIGHT); - - viddec_fw_vc1_set_bitrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.BITRTQ_POSTPROC); - viddec_fw_vc1_set_frmrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.FRMRTQ_POSTPROC); - viddec_fw_vc1_set_interlace(&wi_sl.vc1_sl, sh.seq_max_size.INTERLACE); - viddec_fw_vc1_set_tfcntrflag(&wi_sl.vc1_sl, sh.seq_max_size.TFCNTRFLAG); - viddec_fw_vc1_set_finterpflag(&wi_sl.vc1_sl, sh.seq_max_size.FINTERPFLAG); - viddec_fw_vc1_set_psf(&wi_sl.vc1_sl, sh.seq_max_size.PSF); - viddec_fw_vc1_set_display_ext(&wi_sl.vc1_sl, sh.seq_max_size.DISPLAY_EXT); - - result = viddec_pm_append_workitem(ctxt, &wi_sl, false); - - // send DISPLAY EXTENSION metadata if present - if (sh.seq_max_size.DISPLAY_EXT) - { - wi_de.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO; - - wi_de.vc1_sl_de.size = 0; - wi_de.vc1_sl_de.framerate = 0; - wi_de.vc1_sl_de.aspectsize = 0; - - viddec_fw_vc1_set_disp_horiz_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_HORIZ_SIZE); - viddec_fw_vc1_set_disp_vert_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_VERT_SIZE); - viddec_fw_vc1_set_disp_aspect_ratio_flag(&wi_de.vc1_sl_de, sh.seq_disp_size.ASPECT_RATIO_FLAG); - viddec_fw_vc1_set_disp_color_format_flag(&wi_de.vc1_sl_de, sh.COLOR_FORMAT_FLAG); - viddec_fw_vc1_set_disp_framerate_flag(&wi_de.vc1_sl_de, sh.FRAMERATE_FLAG); - viddec_fw_vc1_set_disp_framerateind(&wi_de.vc1_sl_de, sh.FRAMERATEIND); - - viddec_fw_vc1_set_disp_aspect_ratio(&wi_de.vc1_sl_de, sh.ASPECT_RATIO); - viddec_fw_vc1_set_disp_frameratenr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATENR); - viddec_fw_vc1_set_disp_frameratedr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATEDR); - viddec_fw_vc1_set_disp_framerateexp(&wi_de.vc1_sl_de, sh.FRAMERATEEXP); - - viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_HORIZ_SIZE); - viddec_fw_vc1_set_disp_aspect_ratio_vert_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_VERT_SIZE); - viddec_fw_vc1_set_disp_color_prim(&wi_de.vc1_sl_de, sh.seq_color_format.COLOR_PRIM); - viddec_fw_vc1_set_disp_transfer_char(&wi_de.vc1_sl_de, sh.seq_color_format.TRANSFER_CHAR); - - result = viddec_pm_append_workitem(ctxt, &wi_de, false); - } - } - return status; } @@ -400,31 +318,6 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) } } - // POPULATE WORKLOAD ITEM - { - viddec_workload_item_t wi; - - wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO; - - wi.vc1_ep.size = 0; - wi.vc1_ep.flags = 0; - wi.vc1_ep.pad = 0; - - viddec_fw_vc1_set_ep_size_flag(&wi.vc1_ep, ep.CODED_SIZE_FLAG); - viddec_fw_vc1_set_ep_horiz_size(&wi.vc1_ep, ep.ep_size.CODED_WIDTH); - viddec_fw_vc1_set_ep_vert_size(&wi.vc1_ep, ep.ep_size.CODED_HEIGHT); - - viddec_fw_vc1_set_ep_broken_link(&wi.vc1_ep, ep.ep_flags.BROKEN_LINK); - viddec_fw_vc1_set_ep_closed_entry(&wi.vc1_ep, ep.ep_flags.CLOSED_ENTRY); - viddec_fw_vc1_set_ep_panscan_flag(&wi.vc1_ep, ep.ep_flags.PANSCAN_FLAG); - viddec_fw_vc1_set_ep_range_mapy_flag(&wi.vc1_ep, ep.RANGE_MAPY_FLAG); - viddec_fw_vc1_set_ep_range_mapy(&wi.vc1_ep, ep.RANGE_MAPY); - viddec_fw_vc1_set_ep_range_mapuv_flag(&wi.vc1_ep, ep.RANGE_MAPUV_FLAG); - viddec_fw_vc1_set_ep_range_mapuv(&wi.vc1_ep, ep.RANGE_MAPUV); - - result = viddec_pm_append_workitem(ctxt, &wi, false); - } - DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT); DEB("md: after ep: res: %dx%d\n", md->width, md->height); return status; @@ -527,78 +420,3 @@ vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo) return status; } -/*------------------------------------------------------------------------------ - * This function parses the user data information as defined in SMPTE 421M annex F. - * It then appends that data to the workload. - * Assume the flush byte 0x80 is within the 3 bytes before next start code. - * let's put 1 byte per item first - *------------------------------------------------------------------------------ - */ -vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc) -{ - vc1_Status status = VC1_STATUS_OK; - uint32_t user_data; - viddec_workload_item_t wi; - uint32_t ud_id; - - /* find the scope based on start code sc */ - switch (sc) { - case vc1_SCSequenceUser: - wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA; - break; - case vc1_SCEntryPointUser: - wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA; - break; - case vc1_SCFrameUser: - wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA; - break; - case vc1_SCFieldUser: - wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA; - break; - case vc1_SCSliceUser: - wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA; - break; - default: - wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen - break; - } - - /* get identifier - 4 bytes*/ - // Extract this information but discard it for now - VC1_GET_BITS(32, ud_id); - - /* Read 1 byte of user data and store it in workitem for the current stream level (SEQ/GOP/PIC). - Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size, - append the workitem. This loop is repeated till all user data is extracted and appended. */ - wi.user_data.size = 0; - while (viddec_pm_get_bits(ctxt, &user_data, 8) != -1) - { - /* Store the valid byte in data payload */ - wi.user_data.data_payload[wi.user_data.size] = user_data; - wi.user_data.size++; - - /* When size exceeds payload size, append workitem and continue */ - if (wi.user_data.size >= 11) - { - viddec_pm_setup_userdata(&wi); - viddec_pm_append_workitem(ctxt, &wi,false); - wi.user_data.size = 0; - } - if (user_data == 0x80) // flushing byte - break; - } - /* If size is not 0, append remaining user data. */ - if (wi.user_data.size > 0) - { - int i; - for (i=wi.user_data.size; i<11; i++) - { - wi.user_data.data_payload[i] = 0; - } - viddec_pm_setup_userdata(&wi); - viddec_pm_append_workitem(ctxt, &wi,false); - wi.user_data.size = 0; - } - - return(status); -} // vc1_ParseAndAppendUserData diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.h b/mixvbp/vbp_plugin/vc1/vc1parse.h index e190fe1..e8c5167 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse.h +++ b/mixvbp/vbp_plugin/vc1/vc1parse.h @@ -130,11 +130,6 @@ void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser); - - -/* function to handle user data */ -vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc); - /*@}*/ #endif /* _VC1PARSE_H_. */ diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c index 4996e28..94e4d42 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c @@ -533,8 +533,7 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, if (bpp->imode != VC1_BITPLANE_RAW_MODE) { uint32_t* pl; - int sizeinbytes,nitems,i; - viddec_workload_item_t wi; + int sizeinbytes,i; uint32_t *bit_dw; pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = false; @@ -544,22 +543,6 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, pl = bpp->databits; bit_dw = bpp->databits; - // How many payloads must be generated - nitems = (sizeinbytes + (sizeof(wi.data.data_payload) - 1)) / - sizeof(wi.data.data_payload); - - // Dump DMEM to an array of workitems - for ( i = 0; i < nitems; i++ ) - { - wi.vwi_type = bpnum; - wi.data.data_offset = (char *)pl - (char *)bit_dw; // offset within struct - - wi.data.data_payload[0] = pl[0]; - wi.data.data_payload[1] = pl[1]; - pl += 2; - - viddec_pm_append_workitem( ctxt, &wi, false); - } } #ifdef VBP diff --git a/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c index a9644d9..ad83a18 100755 --- a/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c +++ b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c @@ -278,7 +278,6 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) case vc1_SCSliceUser: case vc1_SCFieldUser: {/* Handle user data */ - status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items parser->sc_seen_since_last_wkld |= VC1_SC_UD; #ifdef VBP parser->start_code = VC1_SC_UD; diff --git a/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c b/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c deleted file mode 100755 index cf6fa7f..0000000 --- a/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c +++ /dev/null @@ -1,960 +0,0 @@ -/* Any workload management goes in this file */ - -#include "viddec_fw_debug.h" -#include "vc1.h" -#include "vc1parse.h" -#include "viddec_fw_workload.h" -#include -#include "viddec_pm_utils_bstream.h" - -/* this function returns workload frame types corresponding to VC1 PTYPES (frame types) - * VC1 frame types: can be found in vc1parse_common_defs.h - * workload frame types are in viddec_workload.h -*/ -static inline uint32_t vc1_populate_frame_type(uint32_t vc1_frame_type) -{ - uint32_t viddec_frame_type; - - switch (vc1_frame_type) - { - case VC1_I_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_I; - break; - case VC1_P_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_P; - break; - case VC1_B_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_B; - break; - case VC1_BI_FRAME: - viddec_frame_type = VIDDEC_FRAME_TYPE_BI; - break; - case VC1_SKIPPED_FRAME : - viddec_frame_type = VIDDEC_FRAME_TYPE_SKIP; - break; - default: - viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID; - break; - } // switch on vc1 frame type - - return(viddec_frame_type); -} // vc1_populate_frame_type - -static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_parser_t *parser) -{ - viddec_workload_t *wl = viddec_pm_get_header( parent ); - viddec_frame_attributes_t *attrs = &wl->attrs; - vc1_Info *info = &parser->info; - unsigned i; - - /* typical sequence layer and entry_point data */ - attrs->cont_size.height = info->metadata.height * 2 + 2; - attrs->cont_size.width = info->metadata.width * 2 + 2; - - /* frame type */ - /* we can have two fileds with different types for field interlace coding mode */ - if (info->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) { - attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1); - attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2); - } else { - attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE); - attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown - } - - /* frame counter */ - attrs->vc1.tfcntr = info->picLayerHeader.TFCNTR; - - /* TFF, repeat frame, field */ - attrs->vc1.tff = info->picLayerHeader.TFF; - attrs->vc1.rptfrm = info->picLayerHeader.RPTFRM; - attrs->vc1.rff = info->picLayerHeader.RFF; - - /* PAN Scan */ - attrs->vc1.ps_present = info->picLayerHeader.PS_PRESENT; - attrs->vc1.num_of_pan_scan_windows = info->picLayerHeader.number_of_pan_scan_window; - for (i=0; ivc1.num_of_pan_scan_windows; i++) { - attrs->vc1.pan_scan_window[i].hoffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset; - attrs->vc1.pan_scan_window[i].voffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset; - attrs->vc1.pan_scan_window[i].width = info->picLayerHeader.PAN_SCAN_WINDOW[i].width; - attrs->vc1.pan_scan_window[i].height = info->picLayerHeader.PAN_SCAN_WINDOW[i].height; - } //end for i - - return; -} // translate_parser_info_to_frame_attributes - -/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ -static inline void vc1_send_past_ref_items(void *parent) -{ - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - viddec_pm_append_workitem( parent, &wi, false ); - return; -} - -/* send future frame item */ -static inline void vc1_send_future_ref_items(void *parent) -{ - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME; - wi.ref_frame.reference_id = 0; - wi.ref_frame.luma_phys_addr = 0; - wi.ref_frame.chroma_phys_addr = 0; - viddec_pm_append_workitem( parent, &wi, false ); - return; -} - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_reorder_ref_items(void *parent) -{ - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER; - wi.ref_reorder.ref_table_offset = 0; - wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0 - wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same - viddec_pm_append_workitem( parent, &wi, false ); - return; -} // send_reorder_ref_items - - -/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */ -static inline void vc1_send_ref_fcm_items(void *parent, uint32_t past_fcm, uint32_t future_fcm) -{ - viddec_workload_item_t wi; - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE; - wi.vwi_payload[0]= 0; - wi.vwi_payload[1]= past_fcm; - wi.vwi_payload[2]= future_fcm; - viddec_pm_append_workitem( parent, &wi, false ); - return; -} - - - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_SEQ_ENTRY_registers(void *parent, vc1_viddec_parser_t *parser) -{ - uint32_t stream_format1 = 0; - uint32_t stream_format2 = 0; - uint32_t entrypoint1 = 0; - viddec_workload_item_t wi; - - vc1_metadata_t *md = &(parser->info.metadata); - - - - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, stream_format1, md->PROFILE); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, LEVEL, stream_format1, md->LEVEL); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, CHROMAFORMAT, stream_format1, md->CHROMAFORMAT); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, FRMRTQ, stream_format1, md->FRMRTQ); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, BITRTQ, stream_format1, md->BITRTQ); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, POSTPRO, stream_format1, md->POSTPROCFLAG); - - - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PULLDOWN, stream_format2, md->PULLDOWN); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, stream_format2, md->INTERLACE); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, TFCNTRFLAG, stream_format2, md->TFCNTRFLAG); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, FINTERPFLAG, stream_format2, md->FINTERPFLAG); - BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PSF, stream_format2, md->PSF); - - - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, BROKEN_LINK, entrypoint1, md->BROKEN_LINK); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, CLOSED_ENTRY, entrypoint1, md->CLOSED_ENTRY); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, PANSCAN_FLAG, entrypoint1, md->PANSCAN_FLAG); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, REFDIST_FLAG, entrypoint1, md->REFDIST_FLAG); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, entrypoint1, md->LOOPFILTER); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, entrypoint1, md->FASTUVMC); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, entrypoint1, md->EXTENDED_MV); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, entrypoint1, md->DQUANT); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, entrypoint1, md->VSTRANSFORM); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, entrypoint1, md->OVERLAP); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, entrypoint1, md->QUANTIZER); - BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, entrypoint1, md->EXTENDED_DMV); - - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY; - - - wi.vwi_payload[0] = stream_format1; - wi.vwi_payload[1] = stream_format2; - wi.vwi_payload[2] = entrypoint1; - - viddec_pm_append_workitem( parent, &wi, false ); - return; -} // send_reorder_ref_items - - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_SIZE_AND_AP_RANGEMAP_registers(void *parent, vc1_viddec_parser_t *parser) -{ - uint32_t coded_size = 0; - uint32_t ap_range_map = 0; - - viddec_workload_item_t wi; - - vc1_metadata_t *md = &(parser->info.metadata); - - - BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, coded_size, md->width); - BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, coded_size, md->height); - - - /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/ - if (VC1_PROFILE_ADVANCED == md->PROFILE) - { - - - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, ap_range_map, md->RANGE_MAPY_FLAG); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, ap_range_map, md->RANGE_MAPY); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, ap_range_map, md->RANGE_MAPUV_FLAG); - BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, ap_range_map, md->RANGE_MAPUV); - - - - - } - else - { - ap_range_map = 0; - } - - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP; - - - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = coded_size; - wi.vwi_payload[2] = ap_range_map; - - viddec_pm_append_workitem( parent, &wi, false ); - return; -} // send_reorder_ref_items - - - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_SLICE_FRAME_TYPE_INFO_registers(void *parent, vc1_viddec_parser_t *parser) -{ - uint32_t alt_frame_type = 0; - uint32_t frame_type = 0; - - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; - - vc1_metadata_t *md = &(parser->info.metadata); - - - BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, frame_type, pic->FCM); - BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, frame_type, pic->PTYPE); - - alt_frame_type = frame_type; - - if (VC1_PROFILE_ADVANCED == md->PROFILE) - { - if ( (VC1_P_FRAME == pic->PTYPE)||(VC1_B_FRAME == pic->PTYPE) ) - { - BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); - } - } - else - { - if ( VC1_SKIPPED_FRAME== pic->PTYPE) - { - BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, 0); - } else { - BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT); - } - } - - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO; - - - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = frame_type; - wi.vwi_payload[2] = alt_frame_type; - - viddec_pm_append_workitem( parent, &wi, false ); - return; -} // send_reorder_ref_items - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_SLICE_CONTROL_INFO_registers(void *parent, vc1_viddec_parser_t *parser) -{ - uint32_t recon_control = 0; - uint32_t mv_control = 0; - uint32_t blk_control = 0; - - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; - - int is_previous_ref_rr=0; - - vc1_metadata_t *md = &(parser->info.metadata); - - - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, recon_control, md->RNDCTRL); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, recon_control, pic->UVSAMP); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, recon_control, pic->PQUANT); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, recon_control, pic->HALFQP); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, recon_control, pic->UniformQuant); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, recon_control, pic->POSTPROC); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, recon_control, pic->CONDOVER); - BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, recon_control, (pic->PQINDEX <= 8)); - - /* Get the range reduced status of the previous frame */ - switch (pic->PTYPE) - { - case VC1_P_FRAME: - { - is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm; - break; - } - case VC1_B_FRAME: - { - is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm; - break; - } - default: - { - break; - } - } - - if (pic->RANGEREDFRM) - { - - if (!is_previous_ref_rr) - { - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 1); - } - } - else - { - /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */ - if (is_previous_ref_rr) - { - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1); - BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 0); - } - } // end for RR upscale - - - - - - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, mv_control, pic->MVRANGE); - if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP) - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE2); - else - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, mv_control, pic->MVMODE); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, mv_control, pic->MVTAB); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, mv_control, pic->DMVRANGE); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, mv_control, pic->MV4SWITCH); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, mv_control, pic->MBMODETAB); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, mv_control, - pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) )); - BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, mv_control, pic->REFFIELD); - - - - // BLOCK CONTROL REGISTER Offset 0x2C - BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, blk_control, pic->CBPTAB); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, blk_control, pic->TTMBF); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, blk_control, pic->TTFRM); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, blk_control, pic->MV2BPTAB); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, blk_control, pic->MV4BPTAB); - if ((pic->CurrField == 1) && (pic->SLICE_ADDR)) - { - int mby = md->height * 2 + 2; - mby = (mby + 15 ) / 16; - pic->SLICE_ADDR -= (mby/2); - } - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, blk_control, pic->SLICE_ADDR); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, blk_control, md->bp_raw[0]); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, blk_control, md->bp_raw[1]); - BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, blk_control, md->bp_raw[2]); - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO; - - - wi.vwi_payload[0] = recon_control; - wi.vwi_payload[1] = mv_control; - wi.vwi_payload[2] = blk_control; - - viddec_pm_append_workitem( parent, &wi, false ); - return; -} // send_reorder_ref_items - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_SLICE_OTHER_INFO_registers(void *parent, vc1_viddec_parser_t *parser) -{ - uint32_t trans_data = 0; - uint32_t vop_dquant = 0; - uint32_t ref_bfraction = 0; - - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; - - vc1_metadata_t *md = &(parser->info.metadata); - - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, ref_bfraction, pic->BFRACTION_DEN); - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, ref_bfraction, pic->BFRACTION_NUM); - BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, ref_bfraction, md->REFDIST); - - if (md->DQUANT) - { - if (pic->PQDIFF == 7) - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->ABSPQ); - else if (pic->DQUANTFRM == 1) - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->PQUANT + pic->PQDIFF + 1); - } - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, vop_dquant, pic->DQUANTFRM); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, vop_dquant, pic->DQPROFILE); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, vop_dquant, pic->DQSBEDGE); - BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, vop_dquant, pic->DQBILEVEL); - - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, trans_data, pic->TRANSACFRM); - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, trans_data, pic->TRANSACFRM2); - BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, trans_data, pic->TRANSDCTAB); - - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO; - - - wi.vwi_payload[0] = trans_data; - wi.vwi_payload[1] = vop_dquant; - wi.vwi_payload[2] = ref_bfraction; - - viddec_pm_append_workitem( parent, &wi, false ); - return; -} // send_reorder_ref_items - - - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(void *parent, vc1_viddec_parser_t *parser) -{ - uint32_t imgstruct = 0; - uint32_t fieldref_ctrl_id = 0; - uint32_t smp_rangemap = 0; - - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; - - vc1_metadata_t *md = &(parser->info.metadata); - - if ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) { - BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, imgstruct, (pic->BottomField) ? 2 : 1); - } - - BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, fieldref_ctrl_id, pic->BottomField); - BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, fieldref_ctrl_id, pic->CurrField); - if (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) - { - BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, 1); - } - else - { - BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]); - } - - if (VC1_PROFILE_ADVANCED != md->PROFILE) - { - if (pic->RANGEREDFRM) - { - //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, smp_rangemap, md->RANGE_MAPY_FLAG); - //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, smp_rangemap, md->RANGE_MAPUV_FLAG); - smp_rangemap = 0x11; - } - - } - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO; - - - wi.vwi_payload[0] = imgstruct; - wi.vwi_payload[1] = fieldref_ctrl_id; - wi.vwi_payload[2] = smp_rangemap; - - viddec_pm_append_workitem( parent, &wi, false ); - return; -} // send_reorder_ref_items - - -/* send reorder frame item to host - * future frame gets push to past */ -static inline void send_INT_COM_registers(void *parent, vc1_viddec_parser_t *parser) -{ - uint32_t intcomp_fwd_top = 0; - uint32_t intcomp_fwd_bot = 0; - uint32_t intcomp_bwd_top = 0; - uint32_t intcomp_bwd_bot = 0; - uint32_t intcomp_cur = 0; - - uint32_t POS_2nd_INTCOMP = 13; - uint32_t MASK_1st_INTCOMP = 0x1fff; - uint32_t MASK_2nd_INTCOMP = 0x3ffe000; - - vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader); - viddec_workload_item_t wi; - - vc1_metadata_t *md = &(parser->info.metadata); - - - - if (VC1_SKIPPED_FRAME == pic->PTYPE) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top =0; - return; - } - - if ( VC1_FCM_FIELD_INTERLACE != pic->FCM ) - { - - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); - - if ( !((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) ) - intcomp_cur = 0; - - if ( (VC1_BI_FRAME==pic->PTYPE)||(VC1_B_FRAME==pic->PTYPE) ) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = 0; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = 0; - - intcomp_bwd_top = parser->intcomp_top[0]; - intcomp_bwd_bot = parser->intcomp_bot[0]; - intcomp_fwd_bot = parser->intcomp_bot[1]; - - - if ( parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != (-1) ) - { - if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].type) - intcomp_fwd_top = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].intcomp_top; - } - else - { - if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) - intcomp_fwd_top = parser->intcomp_top[1]; - } - } - else - { //I,P TYPE - - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; - - if (VC1_FCM_FIELD_INTERLACE == parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm) - { - intcomp_fwd_top = parser->intcomp_top[1]; - intcomp_fwd_top |= intcomp_cur << POS_2nd_INTCOMP; - - intcomp_fwd_bot = parser->intcomp_bot[1]; - intcomp_fwd_bot |= intcomp_cur << POS_2nd_INTCOMP; - } - else - { - intcomp_fwd_top = intcomp_cur;// << POS_2nd_INTCOMP; - intcomp_fwd_bot = 0; - } - } - } - else - { - //FIELD INTERLACE - //if(0!=md->INTCOMPFIELD) - //No debugging - - if (md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD) - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); - } - else - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT); - } - - if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD) - { - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2); - BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2); - } - - if (pic->MVMODE != VC1_MVMODE_INTENSCOMP) - { - intcomp_cur = 0; - } - - if (pic->CurrField == 0) - { - if (pic->TFF) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; - } - else - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; - } - } - else - { - if (pic->TFF) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur; - } - else - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur; - } - } - - if (pic->CurrField == 1) - { //SECOND FIELD - - if (VC1_B_FRAME != pic->PTYPE) - { - if (pic->TFF) - { - intcomp_bwd_top = intcomp_cur & MASK_1st_INTCOMP; - - intcomp_fwd_bot = (parser->intcomp_bot[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; //??????? - intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); - - intcomp_fwd_top = parser->intcomp_top[1]; - } - else - { - intcomp_bwd_bot= (intcomp_cur & MASK_2nd_INTCOMP)>>POS_2nd_INTCOMP; - - intcomp_fwd_top = (parser->intcomp_top[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP; - intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP) << POS_2nd_INTCOMP; - - intcomp_fwd_bot = parser->intcomp_bot[1]; - } - } - else - { //B TYPE - intcomp_fwd_top = parser->intcomp_top[1]; - intcomp_fwd_bot = parser->intcomp_bot[1]; - - intcomp_bwd_top = parser->intcomp_top[0]; - intcomp_bwd_bot = parser->intcomp_bot[0]; - } - } - else - { //FIRST FILED - - if ( (VC1_B_FRAME==pic->PTYPE)||(VC1_BI_FRAME==pic->PTYPE) ) - { - if (VC1_SKIPPED_FRAME!=parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type) - { - intcomp_fwd_top = parser->intcomp_top[1]; - intcomp_fwd_bot = parser->intcomp_bot[1]; - } - - intcomp_bwd_top = parser->intcomp_top[0]; - intcomp_bwd_bot = parser->intcomp_bot[0]; - - } - else - { //I,P TYPE - - intcomp_fwd_top = parser->intcomp_top[1] & MASK_1st_INTCOMP; - intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP)<intcomp_bot[1] & MASK_1st_INTCOMP; - intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP); - } //pic->PTYPE == I,P TYPE - } //pic->CurrField == 0 - } //VC1_FCM_FIELD_INTERLACE != pic->FCM - - if ( (VC1_B_FRAME != pic->PTYPE) && (VC1_BI_FRAME != pic->PTYPE) ) - { - parser->intcomp_top[1] = intcomp_fwd_top; - parser->intcomp_bot[1] = intcomp_fwd_bot; - - parser->intcomp_top[0] = intcomp_bwd_top; - parser->intcomp_bot[0] = intcomp_bwd_bot; - } - - //OS_INFO("intcomp_fwd_top = %d\n", intcomp_fwd_top); - //OS_INFO("intcomp_fwd_bot = %d\n", intcomp_fwd_bot); - - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW; - - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = intcomp_fwd_top; - wi.vwi_payload[2] = intcomp_fwd_bot; - - viddec_pm_append_workitem( parent, &wi, false ); - - wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW; - - wi.vwi_payload[0] = 0; - wi.vwi_payload[1] = intcomp_bwd_top; - wi.vwi_payload[2] = intcomp_bwd_bot; - - viddec_pm_append_workitem( parent, &wi, false ); - - - return; -} // send_reorder_ref_items - - -/** update workload with more workload items for ref and update values to store... - */ -void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) -{ - vc1_metadata_t *md = &(parser->info.metadata); - viddec_workload_t *wl = viddec_pm_get_header(parent); - int frame_type = parser->info.picLayerHeader.PTYPE; - int frame_id = 1; // new reference frame is assigned index 1 - - /* init */ - memset(&parser->spr, 0, sizeof(parser->spr)); - wl->is_reference_frame = 0; - - /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */ - if (parser->info.metadata.RANGE_MAPY_FLAG || - parser->info.metadata.RANGE_MAPUV_FLAG || - parser->info.picLayerHeader.RANGEREDFRM) - { - wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME; - } - - LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type); - - parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type)); - - /* reference / anchor frames processing - * we need to send reorder before reference frames */ - if (parser->is_reference_picture) - { - /* one frame has been sent */ - if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1) - { - /* there is a frame in the reference buffer, move it to the past */ - send_reorder_ref_items(parent); - } - } - - /* send workitems for reference frames */ - switch ( frame_type ) - { - case VC1_B_FRAME: - { - vc1_send_past_ref_items(parent); - vc1_send_future_ref_items(parent); - vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm); - break; - } - case VC1_SKIPPED_FRAME: - { - wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME; - vc1_send_past_ref_items(parent); - vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); - break; - } - case VC1_P_FRAME: - { - vc1_send_past_ref_items( parent); - vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone); - break; - } - default: - break; - } - - /* reference / anchor frames from previous code - * we may need it for frame reduction */ - if (parser->is_reference_picture) - { - wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK); - - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].id = frame_id; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].fcm = parser->info.picLayerHeader.FCM; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0] = (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME); - if (parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = (parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME); - } - else - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0]; - } - - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].type = parser->info.picLayerHeader.PTYPE; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_en = md->RANGERED; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_frm = parser->info.picLayerHeader.RANGEREDFRM; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].tff = parser->info.picLayerHeader.TFF; - - LOG_CRIT("anchor[0] = %d, anchor[1] = %d", - parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0], - parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] ); - } - - if ( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) - { - translate_parser_info_to_frame_attributes( parent, parser ); - return; - } - - translate_parser_info_to_frame_attributes( parent, parser ); - - - send_SEQ_ENTRY_registers(parent, parser); - send_SIZE_AND_AP_RANGEMAP_registers(parent, parser); - send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); - send_SLICE_CONTROL_INFO_registers(parent, parser); - send_SLICE_OTHER_INFO_registers(parent, parser); - send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); - send_INT_COM_registers(parent, parser); - - { - viddec_workload_item_t wi; - uint32_t bit, byte; - uint8_t is_emul = 0; - - viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); - - // Send current bit offset and current slice - wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; - - // If slice data starts in the middle of the emulation prevention sequence - - // Special Case1----[is_emul = 1]: - // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data - // to the decoder starting at the first byte of 0s so that the decoder can detect the - // emulation prevention. But the actual data starts are offset 8 in this bit sequence. - - // Specail Case 2----[is_emul = 2]: - // If slice data starts in the middle of the emulation prevention sequence - - // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. - // - - wi.vwi_payload[0] = bit + (is_emul*8) ; - wi.vwi_payload[1] = 0xdeaddead; - wi.vwi_payload[2] = 0xdeaddead; - viddec_pm_append_workitem( parent, &wi, false ); - } - - - viddec_pm_append_pixeldata( parent ); - - return; -} // vc1_start_new_frame - - -void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser) -{ - - send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); - send_SLICE_CONTROL_INFO_registers(parent, parser); - send_SLICE_OTHER_INFO_registers(parent, parser); - send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); - send_INT_COM_registers(parent, parser); - - { - viddec_workload_item_t wi; - uint32_t bit, byte; - uint8_t is_emul = 0; - - viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); - - - // Send current bit offset and current slice - wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; - // If slice data starts in the middle of the emulation prevention sequence - - // Special Case1----[is_emul = 1]: - // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data - // to the decoder starting at the first byte of 0s so that the decoder can detect the - // emulation prevention. But the actual data starts are offset 8 in this bit sequence. - - // Specail Case 2----[is_emul = 2]: - // If slice data starts in the middle of the emulation prevention sequence - - // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. - // - - - wi.vwi_payload[0] = bit + (is_emul*8); - wi.vwi_payload[1] = 0xdeaddead; - wi.vwi_payload[2] = 0xdeaddead; - viddec_pm_append_workitem( parent, &wi, false ); - } - - viddec_pm_append_pixeldata( parent ); - - return; - -} - - -void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser) -{ - send_SLICE_FRAME_TYPE_INFO_registers(parent, parser); - send_SLICE_CONTROL_INFO_registers(parent, parser); - send_SLICE_OTHER_INFO_registers(parent, parser); - //send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser); - //send_INT_COM_registers(parent, parser); - - { - viddec_workload_item_t wi; - uint32_t bit, byte; - uint8_t is_emul = 0; - - viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul); - - // Send current bit offset and current slice - wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET; - - // If slice data starts in the middle of the emulation prevention sequence - - // Special Case1----[is_emul = 1]: - // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data - // to the decoder starting at the first byte of 0s so that the decoder can detect the - // emulation prevention. But the actual data starts are offset 8 in this bit sequence. - - // Specail Case 2----[is_emul = 2]: - // If slice data starts in the middle of the emulation prevention sequence - - // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte. - // - - wi.vwi_payload[0] = bit + (is_emul*8); - wi.vwi_payload[1] = 0xdeaddead; - wi.vwi_payload[2] = 0xdeaddead; - viddec_pm_append_workitem( parent, &wi, false ); - } - - viddec_pm_append_pixeldata( parent ); - - return; -} - - -void vc1_end_frame(vc1_viddec_parser_t *parser) -{ - /* update status of reference frames */ - if (parser->is_reference_picture) - { - parser->ref_frame[VC1_REF_FRAME_T_MINUS_2] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1]; - parser->ref_frame[VC1_REF_FRAME_T_MINUS_1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0]; - } - - return; -} // vc1_end_frame - diff --git a/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c index 03726c6..4eb5919 100755 --- a/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c +++ b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c @@ -25,7 +25,6 @@ #include "viddec_parser_ops.h" -#include "viddec_fw_workload.h" #include "viddec_pm.h" #include -- cgit v1.2.3 From 10f1922a7861d4d16f60716ffe6f1e17f9ebfda5 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Thu, 10 Oct 2013 13:05:13 +0800 Subject: check hardware capability before vaCreateConfig BZ: 143181 check hardware capability before vaCreateConfig, now it is only implemented in H263 Change-Id: Ia5bd841153e5085f6935b4ab00061a374dbec37d Signed-off-by: ywan171 Reviewed-on: http://android.intel.com:8080/137398 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderBase.cpp | 7 +++++++ videodecoder/VideoDecoderBase.h | 1 + videodecoder/VideoDecoderMPEG4.cpp | 25 +++++++++++++++++++++++++ videodecoder/VideoDecoderMPEG4.h | 4 ++++ 4 files changed, 37 insertions(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 6509e01..afd68df 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -769,6 +769,10 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i CHECK_VA_STATUS("vaInitialize"); if ((int32_t)profile != VAProfileSoftwareDecoding) { + + status = isHardwareSupported(profile); + CHECK_STATUS("isHardwareSupported"); + #ifdef USE_AVC_SHORT_FORMAT status = getCodecSpecificConfigs(profile, &mVAConfig); CHECK_STATUS("getCodecSpecificAttributes"); @@ -1307,4 +1311,7 @@ Decode_Status VideoDecoderBase::getCodecSpecificConfigs( return DECODE_SUCCESS; } #endif +Decode_Status VideoDecoderBase::isHardwareSupported(VAProfile profile) { + return DECODE_SUCCESS; +} diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index eab4058..1f69cf1 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -92,6 +92,7 @@ protected: Decode_Status setParserType(_vbp_parser_type type); virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID *config); #endif + virtual Decode_Status isHardwareSupported(VAProfile profile); private: Decode_Status mapSurface(void); Decode_Status getRawDataFromSurface(void); diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 7bd9f22..be3c662 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -30,6 +30,7 @@ VideoDecoderMPEG4::VideoDecoderMPEG4(const char *mimeType) : VideoDecoderBase(mimeType, VBP_MPEG4), mLastVOPTimeIncrement(0), mExpectingNVOP(false), + mIsShortHeader(false), mSendIQMatrixBuf(false), mLastVOPCodingType(MP4_VOP_TYPE_I) { } @@ -522,6 +523,8 @@ Decode_Status VideoDecoderMPEG4::startVA(vbp_data_mp42 *data) { vaProfile = VAProfileMPEG4Simple; } + mIsShortHeader = data->codec_data.short_video_header; + return VideoDecoderBase::setupVA(MP4_SURFACE_NUMBER, vaProfile); } @@ -573,3 +576,25 @@ void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) { //mVideoFormatInfo.bitrate = data->codec_data.bit_rate; mVideoFormatInfo.valid = true; } + +Decode_Status VideoDecoderMPEG4::isHardwareSupported(VAProfile profile) { + if (!mIsShortHeader) { + // TODO: add support for MPEG4 in the future; + return DECODE_SUCCESS; + } + + VAStatus vaStatus; + VAConfigAttrib cfgAttribs[2]; + cfgAttribs[0].type = VAConfigAttribMaxPictureWidth; + cfgAttribs[1].type = VAConfigAttribMaxPictureHeight; + vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH263Baseline, + VAEntrypointVLD, cfgAttribs, 2); + CHECK_VA_STATUS("vaGetConfigAttributes"); + if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) { + ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d", + cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height); + return DECODE_DRIVER_FAIL; + } + + return DECODE_SUCCESS; +} diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h index 234eaac..5f641ee 100644 --- a/videodecoder/VideoDecoderMPEG4.h +++ b/videodecoder/VideoDecoderMPEG4.h @@ -38,6 +38,9 @@ public: virtual void flush(void); virtual Decode_Status decode(VideoDecodeBuffer *buffer); +protected: + virtual Decode_Status isHardwareSupported(VAProfile profile); + private: Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data); Decode_Status beginDecodingFrame(vbp_data_mp42 *data); @@ -65,6 +68,7 @@ private: bool mSendIQMatrixBuf; // indicate if iq_matrix_buffer is sent to driver int32_t mLastVOPCodingType; bool mIsSyncFrame; // indicate if it is SyncFrame in container + bool mIsShortHeader; // indicate if it is short header format VideoExtensionBuffer mExtensionBuffer; PackedFrameData mPackedFrame; }; -- cgit v1.2.3 From 36a15982811745aed9806da84618cba5730bf7c2 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Thu, 26 Sep 2013 21:35:37 +0800 Subject: libmix: MPEG4 parser refinement BZ: 131068 This patch refined content: 1. replace DEB with xTRACE 2. remove legacy code about workload 3. tiny optimization based on the code logic Change-Id: Iecfe68f00ff876a29c7b0307f6925dc6bdea8fe3 Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/138144 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/vbp_manager/vbp_mp42_parser.c | 39 +++--- mixvbp/vbp_plugin/mp4/Android.mk | 26 ++-- mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c | 43 +++--- mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h | 4 +- mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c | 13 +- .../vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c | 43 +++--- .../vbp_plugin/mp4/viddec_mp4_videoobjectplane.c | 11 +- mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c | 9 +- mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c | 151 --------------------- 9 files changed, 98 insertions(+), 241 deletions(-) delete mode 100755 mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c index 6eff5a0..c2be912 100755 --- a/mixvbp/vbp_manager/vbp_mp42_parser.c +++ b/mixvbp/vbp_manager/vbp_mp42_parser.c @@ -105,16 +105,8 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) ETRACE ("Failed to set entry point." ); return VBP_LOAD; } -#ifdef VBP + pcontext->parser_ops->parse_sc = NULL; -#else - pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4"); - if (pcontext->parser_ops->parse_sc == NULL) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } -#endif pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse"); if (pcontext->parser_ops->parse_syntax == NULL) { @@ -128,16 +120,6 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) ETRACE ("Failed to set entry point." ); return VBP_LOAD; } -#ifdef VBP - pcontext->parser_ops->is_wkld_done = NULL; -#else - pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done"); - if (pcontext->parser_ops->is_wkld_done == NULL) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } -#endif /* entry point not needed */ pcontext->parser_ops->flush = NULL; @@ -663,6 +645,21 @@ void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index) vbp_fill_slice_data(pcontext, list_index); } +/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success. + The conext is updated with current phase and sc_code position in the buffer. + + What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. + Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. + if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern + we are looking for. Its incremented to 4 once we see a byte after this pattern. + + For MP4 there are two startcode patterns LVH & SVH. LVH is same as other codecs (00 00 01), SVH + A.K.A H263 is (00 00 8X). So we have to look for both kind of start codes. The spec doesn't + explicitly say if both of them can exist in a stream? So current implemenation will assume + that only one of them is present in a given stream to simplify implementation. The reason it can + get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect + of SVH start code. +*/ uint32 vbp_get_sc_pos_mp42( uint8 *buf, uint32 length, @@ -824,6 +821,7 @@ uint32 vbp_parse_video_packet_header_mp42( if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) { + ETRACE("VOL shape: %d is not supported",vidObjLay->video_object_layer_shape); return VBP_DATA; } @@ -941,6 +939,7 @@ uint32 vbp_parse_video_packet_header_mp42( if (vidObjLay->newpred_enable) { // New pred mode not supported in HW, but, does libva support this? + ETRACE("New pred mode not supported in HW"); ret = VBP_DATA; break; } @@ -1067,6 +1066,7 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) { // no resync_marker + VTRACE("resync marker is disabled"); return VBP_OK; } @@ -1078,6 +1078,7 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset); if (getbits == -1) { + WTRACE("FAILURE!!!! getbits %s : %d", __FUNCTION__, __LINE__); return VBP_DATA; } } diff --git a/mixvbp/vbp_plugin/mp4/Android.mk b/mixvbp/vbp_plugin/mp4/Android.mk index f1d3577..a8572e5 100755 --- a/mixvbp/vbp_plugin/mp4/Android.mk +++ b/mixvbp/vbp_plugin/mp4/Android.mk @@ -3,25 +3,25 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES := \ - viddec_mp4_visualobject.c \ - viddec_mp4_decodevideoobjectplane.c \ - viddec_mp4_parse.c \ - viddec_mp4_videoobjectplane.c \ - viddec_parse_sc_mp4.c \ - viddec_mp4_shortheader.c \ - viddec_mp4_videoobjectlayer.c + viddec_mp4_visualobject.c \ + viddec_mp4_decodevideoobjectplane.c \ + viddec_mp4_parse.c \ + viddec_mp4_videoobjectplane.c \ + viddec_mp4_shortheader.c \ + viddec_mp4_videoobjectlayer.c LOCAL_CFLAGS := -DVBP -DHOST_ONLY -LOCAL_C_INCLUDES := \ - $(MIXVBP_DIR)/include \ - $(LOCAL_PATH)/include \ - $(MIXVBP_DIR)/vbp_manager/include +LOCAL_C_INCLUDES := \ + $(MIXVBP_DIR)/include \ + $(LOCAL_PATH)/include \ + $(MIXVBP_DIR)/vbp_manager/include LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libmixvbp_mpeg4 -LOCAL_SHARED_LIBRARIES := \ - libmixvbp +LOCAL_SHARED_LIBRARIES := \ + libmixvbp \ + liblog include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c index ac4fb2e..f8a95ec 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c @@ -1,5 +1,6 @@ #include #include +#include #include "viddec_parser_ops.h" #include "viddec_mp4_parse.h" @@ -29,7 +30,13 @@ void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) parser->next_sc_prefix = false; parser->ignore_scs = false; - if (preserve) + if (!preserve) + { + parser->sc_seen = MP4_SC_SEEN_INVALID; + parser->bitstream_error = MP4_BS_ERROR_NONE; + memset(&(parser->info), 0, sizeof(mp4_Info_t)); + } + else { // Need to maintain information till VOL parser->sc_seen &= MP4_SC_SEEN_VOL; @@ -39,12 +46,6 @@ void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t)); memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263)); } - else - { - parser->sc_seen = MP4_SC_SEEN_INVALID; - parser->bitstream_error = MP4_BS_ERROR_NONE; - memset(&(parser->info), 0, sizeof(mp4_Info_t)); - } return; } // viddec_mp4_init @@ -71,7 +72,7 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) is_svh = (cxt->cur_sc_prefix) ? false: true; if ((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1) { - DEB("Start code not found\n"); + ETRACE("Start code not found\n"); return VIDDEC_PARSE_ERROR; } @@ -81,7 +82,7 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) sc = sc & 0xFF; cxt->current_sc = sc; cxt->current_sc |= 0x100; - DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); + VTRACE("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); switch (sc) { @@ -89,34 +90,36 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) { status = mp4_Parse_VisualSequence(parent, cxt); cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE; - DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); + VTRACE("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); break; } case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC: {/* Not required to do anything */ + VTRACE("MP4_SC_VISUAL_OBJECT_SEQUENCE_EC"); break; } case MP4_SC_USER_DATA: { /* Copy userdata to user-visible buffer (EMIT) */ - DEB("MP4_USER_DATA_SC: \n"); + VTRACE("MP4_USER_DATA_SC: \n"); break; } case MP4_SC_GROUP_OF_VOP: { status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt); cxt->prev_sc = MP4_SC_GROUP_OF_VOP; - DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); + VTRACE("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); break; } case MP4_SC_VIDEO_SESSION_ERROR: {/* Not required to do anything?? */ + VTRACE("MP4_SC_VIDEO_SESSION_ERROR"); break; } case MP4_SC_VISUAL_OBJECT: { status = mp4_Parse_VisualObject(parent, cxt); cxt->prev_sc = MP4_SC_VISUAL_OBJECT; - DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); + VTRACE("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); break; } case MP4_SC_VIDEO_OBJECT_PLANE: @@ -129,11 +132,12 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) cxt->is_frame_start = true; cxt->sc_seen |= MP4_SC_SEEN_VOP; - DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); + VTRACE("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); break; } case MP4_SC_STUFFING: { + VTRACE("MP4_SC_STUFFING"); break; } default: @@ -143,7 +147,7 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) status = mp4_Parse_VideoObjectLayer(parent, cxt); cxt->sc_seen = MP4_SC_SEEN_VOL; cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; - DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); + VTRACE("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; } // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN @@ -158,13 +162,12 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); cxt->sc_seen = MP4_SC_SEEN_SVH; cxt->is_frame_start = true; - DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); - DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status); + VTRACE("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); } } else { - DEB("UNKWON Cod:0x%08X\n", sc); + ETRACE("UNKWON Cod:0x%08X\n", sc); } } break; @@ -174,12 +177,12 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) { viddec_pm_get_bits(parent, &sc, 22); cxt->current_sc = sc; - DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); + VTRACE("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); status = mp4_Parse_VideoObject_svh(parent, cxt); status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); cxt->sc_seen = MP4_SC_SEEN_SVH; cxt->is_frame_start = true; - DEB("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + VTRACE("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); } // Current sc becomes the previous sc diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h index 305e09b..2d93812 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h @@ -511,19 +511,19 @@ typedef struct #define BREAK_GETBITS_FAIL(x, ret) { \ if(x == -1){ \ ret = MP4_STATUS_PARSE_ERROR; \ + ETRACE("ERROR in GETBITS (%s : %d)", __FILE__, __LINE__); \ break;} \ } #define BREAK_GETBITS_REQD_MISSING(x, ret) { \ if(x == -1){ \ ret = MP4_STATUS_REQD_DATA_ERROR; \ + ETRACE("ERROR in GETBITS_REQD (%s : %d)", __FILE__, __LINE__); \ break;} \ } extern void *memset(void *s, int32_t c, uint32_t n); -uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt); - void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status); #endif diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c index 2a6a933..daa1cf5 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c @@ -1,4 +1,5 @@ #include +#include #include "viddec_mp4_shortheader.h" typedef struct @@ -59,7 +60,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p svh->source_format = (data & 0x7); if (svh->source_format == 0 || svh->source_format == 6) { - DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); + ETRACE("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); ret = MP4_STATUS_NOTSUPPORT; break; } @@ -108,7 +109,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p svh->source_format = (data & 0x7); if (svh->source_format < 1 || svh->source_format > 6) { - DEB("Error: bad value of source_format\n"); + ETRACE("Error: bad value of source_format\n"); ret = MP4_STATUS_PARSE_ERROR; break; } @@ -149,7 +150,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p svh->picture_coding_type = (data & 0x7); if (svh->picture_coding_type > 1) { - DEB("Info: only support I and P frames\n"); + ETRACE("Info: only support I and P frames\n"); ret = MP4_STATUS_NOTSUPPORT; break; } @@ -192,7 +193,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *p } else { - DEB("Info: don't support to handle the other case of Update Full Extended PTYPE\n"); + ETRACE("Info: don't support to handle the other case of Update Full Extended PTYPE\n"); ret = MP4_STATUS_NOTSUPPORT; break; } @@ -332,13 +333,13 @@ mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser { k = 4; } - svh->num_macroblocks_in_gob = (((vol->video_object_layer_width + 15) & ~15) /16)*k; + svh->num_macroblocks_in_gob = (((vol->video_object_layer_width + 15) & ~15) /16)*k; svh->num_gobs_in_vop = (((vol->video_object_layer_height + 15) & ~15)/(16*k)); svh->num_rows_in_gob = k; } else { - DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); + ETRACE("Error: Bad value for VideoPlaneWithShortHeader.source_format\n"); ret = MP4_STATUS_NOTSUPPORT; return ret; } diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c index 1ed52f3..6ff6b28 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c @@ -1,5 +1,6 @@ #include #include +#include #include "viddec_mp4_videoobjectlayer.h" #ifndef VBP const unsigned char mp4_DefaultIntraQuantMatrix[64] = { @@ -139,7 +140,7 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) if (cxt->chroma_format != MP4_CHROMA_FORMAT_420) { - DEB("Warning: mp4_Parse_VideoObject:vol_control_parameters.chroma_format != 4:2:0\n"); + ETRACE("Warning: mp4_Parse_VideoObject:vol_control_parameters.chroma_format != 4:2:0\n"); cxt->chroma_format= MP4_CHROMA_FORMAT_420; parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; ret = MP4_STATUS_NOTSUPPORT; @@ -155,7 +156,7 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) if (cxt->bit_rate == 0) { - DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n"); + ETRACE("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n"); parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; ret = MP4_STATUS_NOTSUPPORT; // Do we need to really break here? Why not just set an error and proceed @@ -169,7 +170,7 @@ mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser) cxt->vbv_buffer_size |= ( (code >> 4) & 0x7FFF); if (cxt->vbv_buffer_size == 0) { - DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n"); + ETRACE("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n"); parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; ret = MP4_STATUS_NOTSUPPORT; // Do we need to really break here? Why not just set an error and proceed @@ -217,7 +218,7 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) if (sprite_enable != MP4_SPRITE_GMC) { /* This is not a supported type by HW */ - DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); + ETRACE("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -229,20 +230,20 @@ mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser) cxt->no_of_sprite_warping_points = code >> 3; if (cxt->no_of_sprite_warping_points > 1) { - DEB("Warning: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", + ETRACE("Warning: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n", cxt->no_of_sprite_warping_points); } if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change)) { - DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n"); + ETRACE("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } if (vidObjLay->sprite_enable != MP4_SPRITE_GMC) { - DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); + ETRACE("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -330,7 +331,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)) { /* not supported shape*/ - DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n"); + ETRACE("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -341,7 +342,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ if (vidObjLay->not_8_bit) { /* 8 bit is only supported mode*/ - DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n"); + ETRACE("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -354,7 +355,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE) { /* Should not get here as shape is checked earlier */ - DEB("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n"); + ETRACE("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -383,7 +384,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ vidObjLay->complexity_estimation_disable = code; if (!vidObjLay->complexity_estimation_disable) {/* complexity estimation not supported */ - DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n"); + ETRACE("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -406,7 +407,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ vidObjLay->newpred_enable = code; if (vidObjLay->newpred_enable) { - DEB("Error: NEWPRED mode is not supported\n"); + ETRACE("Error: NEWPRED mode is not supported\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -420,7 +421,7 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ vidObjLay->scalability = code; if (vidObjLay->scalability) { - DEB("Error: VOL scalability is not supported\n"); + ETRACE("Error: VOL scalability is not supported\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -440,7 +441,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse mp4_Status_t ret = MP4_STATUS_PARSE_ERROR; int32_t getbits=0; -//DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); +//VTRACE("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); // Trying to parse more header data as it is more important than frame data if (parser->bitstream_error > MP4_HDR_ERROR_MASK) @@ -459,14 +460,14 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse if (!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication)) { /* Streams with "unknown" type mismatch with ref */ - DEB("Warning: video_object_type_indication = %d, forcing to 1\n", + WTRACE("Warning: video_object_type_indication = %d, forcing to 1\n", vidObjLay->video_object_type_indication); vidObjLay->video_object_type_indication = 1; } if (vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE) {/* This is not a supported type by HW */ - DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n", + ETRACE("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n", vidObjLay->video_object_type_indication); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; @@ -487,7 +488,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse vidObjLay->video_object_layer_verid = (code >> 3) & 0xF; if (!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid)) { - DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n", + ETRACE("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n", vidObjLay->video_object_layer_verid); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; @@ -525,7 +526,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse /* If shape is not rectangluar exit early without parsing */ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) { - DEB("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n", + ETRACE("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n", MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; @@ -534,7 +535,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse if ((vidObjLay->video_object_layer_verid != 1) && (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)) {/* Grayscale not supported */ - DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n"); + ETRACE("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -547,7 +548,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse if (vidObjLay->vop_time_increment_resolution == 0) { - DEB("Error: 0 value for vop_time_increment_resolution\n"); + ETRACE("Error: 0 value for vop_time_increment_resolution\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } @@ -572,7 +573,7 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse } else { - DEB("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n"); + ETRACE("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n"); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c index cbe88bd..a28ecc2 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c @@ -1,4 +1,5 @@ #include +#include #include "viddec_mp4_videoobjectplane.h" mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser) @@ -25,7 +26,7 @@ mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t code = code >> 6; if ((code & 1) == 0) {/* SGA:Should we ignore marker bit? */ - DEB("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n"); + ETRACE("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n"); } code = code >>1; data->time_code_minutes = code & 0x3F; @@ -249,7 +250,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse if (vidObjLay->newpred_enable) { /* New pred mode not supported in HW */ - DEB("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n"); + ETRACE("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n"); ret = MP4_STATUS_NOTSUPPORT; break; } @@ -274,7 +275,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse vidObjPlane->vop_reduced_resolution = code; if (vidObjPlane->vop_reduced_resolution) { - DEB("Error: mp4_Parse_VideoObjectPlane: Reduced Resolution vidObjPlane is not supported\n"); + ETRACE("Error: mp4_Parse_VideoObjectPlane: Reduced Resolution vidObjPlane is not supported\n"); ret = MP4_STATUS_NOTSUPPORT; break; } @@ -356,7 +357,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse vidObjPlane->vop_fcode_forward = code & 0x7; if (vidObjPlane->vop_fcode_forward == 0) { - DEB("Error: vop_fcode_forward == 0\n"); + ETRACE("Error: vop_fcode_forward == 0\n"); break; } } @@ -368,7 +369,7 @@ mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parse vidObjPlane->vop_fcode_backward = code &0x7; if (vidObjPlane->vop_fcode_backward == 0) { - DEB("Error: vop_fcode_backward == 0\n"); + ETRACE("Error: vop_fcode_backward == 0\n"); break; } } diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c index ee54452..fc4c6cf 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c @@ -1,4 +1,5 @@ #include +#include #include "viddec_mp4_visualobject.h" static inline uint8_t mp4_pvt_isValid_verID(uint8_t id) @@ -64,7 +65,7 @@ static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalTyp void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status) { - //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n", + //VTRACE("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n", // parser->bitstream_error, hdr_flag, parse_status); if (hdr_flag) @@ -87,7 +88,7 @@ void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, parser->bitstream_error |= MP4_BS_ERROR_FRM_NONDEC; } - //DEB("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error); + //VTRACE("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error); return; } // mp4_set_hdr_bitstream_error @@ -161,7 +162,7 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) } else { - DEB("Warning: Unsupported visual_object_verid\n"); + ETRACE("Warning: Unsupported visual_object_verid\n"); parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; // Continue parsing as it is not a required field for decoder } @@ -173,7 +174,7 @@ mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser) if (visObj->visual_object_type != MP4_VISUAL_OBJECT_TYPE_VIDEO) { /* VIDEO is the only supported type */ - DEB("Error: Unsupported object: visual_object_type != video ID\n"); + ETRACE("Error: Unsupported object: visual_object_type != video ID\n"); parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP; break; } diff --git a/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c b/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c deleted file mode 100755 index 3988169..0000000 --- a/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c +++ /dev/null @@ -1,151 +0,0 @@ -#include "viddec_pm_parse.h" -#include "viddec_mp4_parse.h" - -/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success. - The conext is updated with current phase and sc_code position in the buffer. - - What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. - Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. - if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern - we are looking for. Its incremented to 4 once we see a byte after this pattern. - - For MP4 there are two startcode patterns LVH & SVH. LVH is same as other codecs (00 00 01), SVH - A.K.A H263 is (00 00 8X). So we have to look for both kind of start codes. The spec doesn't - explicitly say if both of them can exist in a stream? So current implemenation will assume - that only one of them is present in a given stream to simplify implementation. The reason it can - get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect - of SVH start code. -*/ -#ifndef VBP -uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state) -{ - uint8_t *ptr; - uint32_t size; - uint32_t data_left=0, phase = 0, ret = 0; - viddec_sc_parse_cubby_cxt_t *cxt; - viddec_mp4_parser_t *p_info; - - cxt = ( viddec_sc_parse_cubby_cxt_t *)in; - viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; - size = 0; - data_left = cxt->size; - ptr = cxt->buf; - phase = cxt->phase; - cxt->sc_end_pos = -1; - p_info = (viddec_mp4_parser_t *)pcxt; - - /* parse until there is more data and start code not found */ - while ((data_left > 0) &&(phase < 3)) - { - /* Check if we are byte aligned & phase=0, if thats the case we can check - work at a time instead of byte*/ - if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) - { - while (data_left > 3) - { - uint32_t data; - char mask1 = 0, mask2=0; - - data = *((uint32_t *)ptr); -#ifndef MFDBIGENDIAN - data = SWAP_WORD(data); -#endif - mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); - mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); - /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need - two consecutive zero bytes for a start code pattern */ - if (mask1 && mask2) - {/* Success so skip 4 bytes and start over */ - ptr+=4; - size+=4; - data_left-=4; - continue; - } - else - { - break; - } - } - } - - /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected - two zero bytes in the word so we look one byte at a time*/ - if (data_left > 0) - { - if (*ptr == FIRST_STARTCODE_BYTE) - {/* Phase can be 3 only if third start code byte is found */ - phase++; - ptr++; - size++; - data_left--; - if (phase > 2) - { - phase = 2; - - if ( (((uint32_t)ptr) & 0x3) == 0 ) - { - while ( data_left > 3 ) - { - if (*((uint32_t *)ptr) != 0) - { - break; - } - ptr+=4; - size+=4; - data_left-=4; - } - } - } - } - else - { - uint8_t normal_sc=0, short_sc=0; - if (phase == 2) - { - normal_sc = (*ptr == THIRD_STARTCODE_BYTE); - short_sc = (p_info->ignore_scs == 0) && (SHORT_THIRD_STARTCODE_BYTE == ( *ptr & 0xFC)); - } - - if (!(normal_sc | short_sc)) - { - phase = 0; - } - else - {/* Match for start code so update context with byte position */ - cxt->sc_end_pos = size; - phase = 3; - p_info->cur_sc_prefix = p_info->next_sc_prefix; - p_info->next_sc_prefix = (normal_sc) ? 1: 0; - if (normal_sc) - { - p_info->ignore_scs=1; - } - else - { - /* For short start code since start code is in one nibble just return at this point */ - phase += 1; - state->next_sc = *ptr; - state->second_scprfx_length = 2; - ret=1; - break; - } - } - ptr++; - size++; - data_left--; - } - } - } - if ((data_left > 0) && (phase == 3)) - { - cxt->sc_end_pos++; - state->next_sc = cxt->buf[cxt->sc_end_pos]; - state->second_scprfx_length = 3; - phase++; - ret = 1; - } - cxt->phase = phase; - /* Return SC found only if phase is 4, else always success */ - return ret; -} -#endif -- cgit v1.2.3 From 4699341e579c263fe83f0364126577692a4a52f6 Mon Sep 17 00:00:00 2001 From: Chang Ying Date: Sat, 12 Oct 2013 16:17:34 +0800 Subject: VideoVPP: fix resource leak in destructor BZ: 144487 You can't manipulate Android's KeyedVector when iterate through its items. Change-Id: I057cc0ddc2632bc3621f5ea4804c0a2703bd0148 Signed-off-by: Chang Ying Reviewed-on: http://android.intel.com:8080/138147 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videovpp/VideoVPPBase.cpp | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/videovpp/VideoVPPBase.cpp b/videovpp/VideoVPPBase.cpp index 8265723..09d2f92 100644 --- a/videovpp/VideoVPPBase.cpp +++ b/videovpp/VideoVPPBase.cpp @@ -1,10 +1,13 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "VideoVPPBase" + #include "VideoVPPBase.h" #define NATIVE_DISPLAY 0x18c34078 #define CHECK_VA_STATUS(FUNC) \ if (vret != VA_STATUS_SUCCESS) {\ - printf("[%d] " FUNC" failed with 0x%x\n", __LINE__, vret);\ + LOGE("[%d] " FUNC" failed with 0x%x\n", __LINE__, vret);\ return vret;\ } @@ -245,9 +248,9 @@ VAStatus VideoVPPBase::stop() { vret = vaDestroySurfaces(va_display, &SrcSurf, 1); CHECK_VA_STATUS("vaDestroySurfaces"); } - printf("remove src surf %x\n", SrcSurf); - SrcSurfHandleMap.removeItemsAt(i); + LOGV("remove src surf %x\n", SrcSurf); } + SrcSurfHandleMap.clear(); c = DstSurfHandleMap.size(); for (int i = 0; i < c; i++) { @@ -256,9 +259,9 @@ VAStatus VideoVPPBase::stop() { vret = vaDestroySurfaces(va_display, &DstSurf, 1); CHECK_VA_STATUS("vaDestroySurfaces"); } - printf("remove dst surf %x\n", DstSurf); - DstSurfHandleMap.removeItemsAt(i); + LOGV("remove dst surf %x\n", DstSurf); } + DstSurfHandleMap.clear(); if (vpp_pipeline_buf != VA_INVALID_ID) { vret = vaDestroyBuffer(va_display, vpp_pipeline_buf); @@ -378,7 +381,7 @@ VAStatus VideoVPPBase::perform(RenderTarget Src, RenderTarget Dst, VPParameters vret = _CreateSurfaceFromGrallocHandle(Src, &SrcSurf); CHECK_VA_STATUS("_CreateSurfaceFromGrallocHandle"); SrcSurfHandleMap.add(Src.handle, SrcSurf); - printf("add src surface %x\n", SrcSurf); + LOGV("add src surface %x\n", SrcSurf); } i = DstSurfHandleMap.indexOfKey(Dst.handle); @@ -388,7 +391,7 @@ VAStatus VideoVPPBase::perform(RenderTarget Src, RenderTarget Dst, VPParameters vret = _CreateSurfaceFromGrallocHandle(Dst, &DstSurf); CHECK_VA_STATUS("_CreateSurfaceFromGrallocHandle"); DstSurfHandleMap.add(Dst.handle, DstSurf); - printf("add dst surface %x\n", DstSurf); + LOGV("add dst surface %x\n", DstSurf); } if (vpp != NULL && (vpp->num_filter_bufs > 0 && vpp->num_filter_bufs < VAProcFilterCount)) { -- cgit v1.2.3 From fe67cf9b0660bee3653dc89dba026293b7597b32 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Mon, 30 Sep 2013 16:07:16 +0800 Subject: mix_vbp: remove the check which allows only width/height between 32 to 2048 BZ: 141542 Parser should not have such limitations. The check is supposed to happen in psb_video. Change-Id: I0837cdea7233bb6e9b2a13711a2d9f41c23d0605 Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/137743 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c | 2 -- 1 file changed, 2 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c index 431892b..161b0fd 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c @@ -536,8 +536,6 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1); PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1); FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1); - if ((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128)) - break; if (!SPS->sps_disp.frame_mbs_only_flag) { -- cgit v1.2.3 From 831d763a3a49876a6223c0bcc339e01a247f6383 Mon Sep 17 00:00:00 2001 From: liubolun Date: Thu, 10 Oct 2013 16:56:07 +0800 Subject: Fix qcif/cif Vp8 encode quality issue. BZ: 132033 Copy cacheable source surface to uncacheable for VP8 encode to avoid corruption issue. Signed-off-by: liubolun Change-Id: I27ac41595e0544bbfa9636bc93bdb1bde0e3688d Signed-off-by: liubolun Reviewed-on: http://android.intel.com:8080/137744 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/VideoEncoderBase.cpp | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 41c069d..af16152 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -178,8 +178,16 @@ Encode_Status VideoEncoderBase::start() { height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; }else{ // this alignment is used for AVC. For vp8 encode, driver will handle the alignment - stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. - height_aligned = ((mComParams.resolution.height + 31) / 32 ) * 32; + if(mComParams.profile == VAProfileVP8Version0_3) + { + stride_aligned = mComParams.resolution.width; + height_aligned = mComParams.resolution.height; + } + else + { + stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. + height_aligned = ((mComParams.resolution.height + 31) / 32 ) * 32; + } } ValueInfo vinfo; @@ -1707,7 +1715,7 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { LOG_I("Surface ID created from Malloc = 0x%08x\n", map->value); //Merrifield limitation, should use mAutoReference to check if on Merr - if ((mComParams.profile == VAProfileVP8Version0_3)||((mAutoReference == false) || (map->vinfo.lumaStride % 64 == 0))) + if ((mAutoReference == false) || (map->vinfo.lumaStride % 64 == 0)) map->surface = surface; else { map->surface_backup = surface; @@ -1715,7 +1723,10 @@ Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { //TODO: need optimization for both width/height not aligned case VASurfaceID surfaceId; unsigned int stride_aligned; - stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; + if(mComParams.profile == VAProfileVP8Version0_3) + stride_aligned = mComParams.resolution.width; + else + stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, stride_aligned, map->vinfo.height, &surfaceId, 1, NULL, 0); -- cgit v1.2.3 From a51e586ef0eeea974a39d389b07c70522cae5588 Mon Sep 17 00:00:00 2001 From: Nana GUo Date: Wed, 21 Aug 2013 09:43:54 -0400 Subject: libmix: use new vaCreateSurfaces API to map external buffer BZ: 132242 use new vaCreateSurfaces API to map external buffer for video decoder Change-Id: I046d86901f37b6c954aac144777756edd033dc74 Signed-off-by: Nana GUo Reviewed-on: http://android.intel.com:8080/126961 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderBase.cpp | 44 +++++++++++++++++++++++++++++---------- videodecoder/VideoDecoderBase.h | 2 +- 2 files changed, 34 insertions(+), 12 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index afd68df..79e33b7 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -808,35 +808,57 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i #endif } if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { - mVASurfaceAttrib = new VASurfaceAttributeTPI; + VASurfaceAttrib attribs[2]; + mVASurfaceAttrib = new VASurfaceAttribExternalBuffers; if (mVASurfaceAttrib == NULL) { return DECODE_MEMORY_FAIL; } - mVASurfaceAttrib->buffers= (unsigned int *)malloc(sizeof(unsigned int)*mNumSurfaces); + mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces); if (mVASurfaceAttrib->buffers == NULL) { return DECODE_MEMORY_FAIL; } - mVASurfaceAttrib->count = mNumSurfaces; - mVASurfaceAttrib->luma_stride = mConfigBuffer.graphicBufferStride; + mVASurfaceAttrib->num_buffers = mNumSurfaces; mVASurfaceAttrib->pixel_format = mConfigBuffer.graphicBufferColorFormat; mVASurfaceAttrib->width = mVideoFormatInfo.width; mVASurfaceAttrib->height = mVideoFormatInfo.height; - mVASurfaceAttrib->type = VAExternalMemoryAndroidGrallocBuffer; - mVASurfaceAttrib->reserved[0] = (unsigned int)mConfigBuffer.nativeWindow; + mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.height * 1.5; + mVASurfaceAttrib->num_planes = 2; + mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride; + mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride; + mVASurfaceAttrib->pitches[2] = 0; + mVASurfaceAttrib->pitches[3] = 0; + mVASurfaceAttrib->offsets[0] = 0; + mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.height; + mVASurfaceAttrib->offsets[2] = 0; + mVASurfaceAttrib->offsets[3] = 0; + mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow; + mVASurfaceAttrib->flags = 0; for (int i = 0; i < mNumSurfaces; i++) { mVASurfaceAttrib->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; } - vaStatus = vaCreateSurfacesWithAttribute( + attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; + attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[0].value.type = VAGenericValueTypeInteger; + attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + + attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; + attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[1].value.type = VAGenericValueTypePointer; + attribs[1].value.value.p = (void *)mVASurfaceAttrib; + + vaStatus = vaCreateSurfaces( mVADisplay, - mVideoFormatInfo.surfaceWidth, - mVideoFormatInfo.surfaceHeight, format, - mNumSurfaces, + mVideoFormatInfo.width, + mVideoFormatInfo.height, mSurfaces, - mVASurfaceAttrib); + mNumSurfaces, + attribs, + 2); + } else { vaStatus = vaCreateSurfaces( mVADisplay, diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 1f69cf1..292473c 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -151,7 +151,7 @@ private: VideoSurfaceBuffer *mOutputHead; // head of output buffer list VideoSurfaceBuffer *mOutputTail; // tail of output buffer list VASurfaceID *mSurfaces; // surfaces array - VASurfaceAttributeTPI *mVASurfaceAttrib; + VASurfaceAttribExternalBuffers *mVASurfaceAttrib; uint8_t **mSurfaceUserPtr; // mapped user space pointer int32_t mSurfaceAcquirePos; // position of surface to start acquiring int32_t mNextOutputPOC; // Picture order count of next output -- cgit v1.2.3 From c9b2c6bcc7e7f7cdc30cc4008eb5827c0d93bc96 Mon Sep 17 00:00:00 2001 From: jingzhi yu Date: Wed, 9 Oct 2013 17:44:07 +0800 Subject: =?UTF-8?q?[PORT=20FROM=20R42B]=C2=A0imagedecoder:=20modify=20head?= =?UTF-8?q?er=20file=20including?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit BZ: 129635 Issue: the header including takes reference of a PRIVATE folder Solution: change the header including to TARGET_OUT_HEADERS' subdirectory Change-Id: I147dfb5f0264400e0f825498c6f42405e83aa633 Orig-Change-Id: I93565acb0722542c18988cecdae7d2d37eab9492 Signed-off-by: jingzhi yu Reviewed-on: http://android.intel.com:8080/137074 Reviewed-by: Cheng, Yao Reviewed-by: Yang, Liang L Reviewed-by: Gong, Shuguang Tested-by: Gong, Shuguang Reviewed-by: cactus Tested-by: cactus Reviewed-on: http://android.intel.com:8080/138502 Reviewed-by: Koski, Anttu Tested-by: Koski, Anttu --- imagedecoder/Android.mk | 9 ++++++--- imagedecoder/JPEGDecoder.h | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index a7a59b3..b5d3c95 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -10,7 +10,8 @@ LOCAL_SRC_FILES += \ LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ - $(TARGET_OUT_HEADERS)/libva + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmix_videovpp LOCAL_COPY_HEADERS_TO := libjpegdec @@ -51,7 +52,8 @@ LOCAL_SRC_FILES += \ LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ - $(TARGET_OUT_HEADERS)/libva + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmix_videovpp LOCAL_SHARED_LIBRARIES += \ libcutils \ @@ -80,7 +82,8 @@ LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ $(TOP)/external/jpeg \ $(TARGET_OUT_HEADERS)/libva \ - $(TARGET_OUT_HEADERS)/libjpegdec + $(TARGET_OUT_HEADERS)/libjpegdec \ + $(TARGET_OUT_HEADERS)/libmix_videovpp LOCAL_COPY_HEADERS_TO := libjpeg_hw diff --git a/imagedecoder/JPEGDecoder.h b/imagedecoder/JPEGDecoder.h index f46611f..9c0cd9a 100644 --- a/imagedecoder/JPEGDecoder.h +++ b/imagedecoder/JPEGDecoder.h @@ -31,7 +31,7 @@ #ifndef JPEGDEC_H #define JPEGDEC_H -#include "../videovpp/VideoVPPBase.h" +#include #include #include #include "JPEGCommon.h" -- cgit v1.2.3 From 5cf93daa2da4da7f2b8a2d052a92807533ec1bd4 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Sun, 29 Sep 2013 17:25:58 +0800 Subject: libmix: remove redundant code for VC1 BZ: 131068 remove redundant code for VC1 Change-Id: Ifa78010f253346fa90842d0275677d223e585afe Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/138432 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/include/viddec_fw_common_defs.h | 5 - mixvbp/vbp_manager/vbp_vc1_parser.c | 15 +-- mixvbp/vbp_plugin/vc1/Android.mk | 8 +- mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c | 30 ----- mixvbp/vbp_plugin/vc1/vc1.h | 43 +------ mixvbp/vbp_plugin/vc1/vc1parse.c | 28 ++--- mixvbp/vbp_plugin/vc1/vc1parse.h | 30 +---- mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c | 5 - mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h | 166 +++---------------------- mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c | 9 +- mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c | 2 - mixvbp/vbp_plugin/vc1/vc1parse_ppic.c | 4 - mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c | 16 +-- mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c | 6 - mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c | 178 +-------------------------- 15 files changed, 49 insertions(+), 496 deletions(-) delete mode 100755 mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c diff --git a/mixvbp/include/viddec_fw_common_defs.h b/mixvbp/include/viddec_fw_common_defs.h index 2cc32b7..270327d 100644 --- a/mixvbp/include/viddec_fw_common_defs.h +++ b/mixvbp/include/viddec_fw_common_defs.h @@ -200,8 +200,6 @@ enum viddec_fw_mpeg2_error_codes VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT = (1 << 29),/* Parser detected corruption in quantization matrix extension. */ }; -#ifdef VBP - #ifndef NULL #define NULL (void*)0x0 #endif @@ -218,6 +216,3 @@ typedef int bool; #endif #endif -/* end of #ifdef VBP */ - -#endif diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c index 4a8d6d3..3026ec9 100755 --- a/mixvbp/vbp_manager/vbp_vc1_parser.c +++ b/mixvbp/vbp_manager/vbp_vc1_parser.c @@ -106,19 +106,8 @@ uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) return VBP_LOAD; } - pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done"); - if (NULL == pcontext->parser_ops->is_wkld_done) - { - ETRACE ("Failed to set entry point."); - return VBP_LOAD; - } - - pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame"); - if (NULL == pcontext->parser_ops->is_frame_start) - { - ETRACE ("Failed to set entry point."); - return VBP_LOAD; - } + pcontext->parser_ops->is_wkld_done = NULL; + pcontext->parser_ops->is_frame_start = NULL; /* entry point not needed */ pcontext->parser_ops->flush = NULL; diff --git a/mixvbp/vbp_plugin/vc1/Android.mk b/mixvbp/vbp_plugin/vc1/Android.mk index b0245fa..229e067 100755 --- a/mixvbp/vbp_plugin/vc1/Android.mk +++ b/mixvbp/vbp_plugin/vc1/Android.mk @@ -3,12 +3,11 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES := \ - mix_vbp_vc1_stubs.c \ + viddec_vc1_parse.c \ vc1parse_bpic_adv.c \ vc1parse_huffman.c \ vc1parse_mv_com.c \ vc1parse_ppic_adv.c \ - viddec_vc1_parse.c \ vc1parse_bpic.c \ vc1parse_common_tables.c \ vc1parse_ipic_adv.c \ @@ -20,8 +19,6 @@ LOCAL_SRC_FILES := \ vc1parse_pic_com.c \ vc1parse_vopdq.c -LOCAL_CFLAGS := -DVBP -DHOST_ONLY - LOCAL_C_INCLUDES := \ $(MIXVBP_DIR)/include \ $(MIXVBP_DIR)/vbp_manager/include \ @@ -31,6 +28,7 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libmixvbp_vc1 LOCAL_SHARED_LIBRARIES := \ - libmixvbp + libmixvbp \ + liblog include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c b/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c deleted file mode 100755 index cffa7b8..0000000 --- a/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c +++ /dev/null @@ -1,30 +0,0 @@ -#include "vc1.h" - -void vc1_start_new_frame (void *parent, vc1_viddec_parser_t *parser ) -{ - return; -} - -void vc1_end_frame (vc1_viddec_parser_t *parser) -{ - return; -} - - -int32_t vc1_parse_emit_current_frame( void *parent, vc1_viddec_parser_t *parser ) -{ - return(0); -} - - -void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser) -{ -} - -void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser) -{ -} - -void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser) -{ -} diff --git a/mixvbp/vbp_plugin/vc1/vc1.h b/mixvbp/vbp_plugin/vc1/vc1.h index e7d0ac2..dd522ee 100755 --- a/mixvbp/vbp_plugin/vc1/vc1.h +++ b/mixvbp/vbp_plugin/vc1/vc1.h @@ -13,29 +13,9 @@ #ifndef _VC1_H_ #define _VC1_H_ -#ifdef MFD_FIRMWARE -typedef unsigned int size_t; -#define LOG(...) -#else - #include #include #include -#ifndef VBP -enum { - NONE = 0, - CRITICAL, - WARNING, - INFO, - DEBUG, -} ; - -#define vc1_log_level DEBUG - -#define LOG( log_lev, format, args ... ) \ - if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ , ## args ); } -#endif -#endif #include "viddec_fw_common_defs.h" #include "viddec_fw_frame_attr.h" @@ -45,17 +25,6 @@ enum { extern "C" { #endif -#ifndef VBP -#define LOG_CRIT(format, args ... ) LOG( CRITICAL, format, ## args) -#define LOG_WARN(format, args ... ) LOG( WARNING, format, ## args) -#define LOG_INFO(format, args ... ) LOG( INFO, format, ## args) -#define LOG_DEBUG(format, args ... ) LOG( DEBUG, format, ## args) -#else -#define LOG_CRIT(format, args ... ) -#define LOG_WARN(format, args ... ) -#define LOG_INFO(format, args ... ) -#define LOG_DEBUG(format, args ... ) -#endif // Seems to be hardware bug: DO NOT TRY TO SWAP BITPLANE0 and BITPLANE2 // Block Control Register at offset 222C uses Bitplane_raw_ID0 to indicate directmb/fieldtx while @@ -190,11 +159,8 @@ enum vc1_sc_seen_flags typedef struct { int id; - uint32_t intcomp_top; - uint32_t intcomp_bot; int fcm; /* frame coding mode */ int type; - int anchor[2]; /* one per field */ int rr_en; /* range reduction enable flag at sequence layer */ int rr_frm; /* range reduction flag at picture layer */ int tff; @@ -202,18 +168,12 @@ typedef struct { typedef struct { - uint32_t sc_seen_since_last_wkld; uint32_t sc_seen; - uint32_t is_frame_start; - uint32_t is_second_start; uint32_t is_reference_picture; - uint32_t intcomp_last[4]; /* for B frames */ - uint32_t intcomp_top[2]; - uint32_t intcomp_bot[2]; vc1_Info info; VC1D_SPR_REGS spr; ref_frame_t ref_frame[VC1_NUM_REFERENCE_FRAMES]; -#ifdef VBP + /* A storage area is provided for each type of bit plane. Only one of */ /* each type will ever be used for a picture and never more than three */ /* bit-planes per picture, and often only one is used. We never clear */ @@ -229,7 +189,6 @@ typedef struct uint32_t bp_directmb[4096]; uint32_t bp_fieldtx[4096]; uint32_t start_code; -#endif } vc1_viddec_parser_t; #endif //_VC1_H_ diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.c b/mixvbp/vbp_plugin/vc1/vc1parse.c index bce1f57..62dfd84 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse.c @@ -11,6 +11,7 @@ // */ +#include #include "vc1parse.h" #define VC1_PIXEL_IN_LUMA 16 @@ -36,13 +37,11 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) result = viddec_pm_get_bits(ctxt, &rcv.struct_a_rcv, 32); md->width = rcv.struct_a.HORIZ_SIZE; md->height = rcv.struct_a.VERT_SIZE; -#ifdef VBP //The HRD rate and HRD buffer size may be encoded according to a 64 bit sequence header data structure B //if there is no data strcuture B metadata contained in the bitstream, we will not be able to get the //bitrate data, hence we set it to 0 for now md->HRD_NUM_LEAKY_BUCKETS = 0; md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0; -#endif result = viddec_pm_get_bits(ctxt, &rcv.struct_c_rcv, 32); md->PROFILE = rcv.struct_c.PROFILE >> 2; @@ -57,9 +56,7 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) md->MAXBFRAMES = rcv.struct_c.MAXBFRAMES; md->QUANTIZER = rcv.struct_c.QUANTIZER; md->FINTERPFLAG = rcv.struct_c.FINTERPFLAG; -#ifdef VBP md->SYNCMARKER = rcv.struct_c.SYNCMARKER; -#endif if ((md->PROFILE == VC1_PROFILE_SIMPLE) || (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN)) @@ -71,13 +68,13 @@ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo) md->widthMB = (md->width + 15 ) / VC1_PIXEL_IN_LUMA; md->heightMB = (md->height + 15) / VC1_PIXEL_IN_LUMA; - DEB("rcv: beforemod: res: %dx%d\n", md->width, md->height); + VTRACE("rcv: beforemod: res: %dx%d\n", md->width, md->height); /* WL takes resolution in unit of 2 pel - sec. 6.2.13.1 */ md->width = md->width/2 -1; md->height = md->height/2 -1; - DEB("rcv: res: %dx%d\n", md->width, md->height); + VTRACE("rcv: res: %dx%d\n", md->width, md->height); return status; } @@ -138,12 +135,10 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) { result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16); } -#ifdef VBP md->ASPECT_RATIO_FLAG = 1; md->ASPECT_RATIO = sh.ASPECT_RATIO; md->ASPECT_HORIZ_SIZE = sh.seq_aspect_size.ASPECT_HORIZ_SIZE; md->ASPECT_VERT_SIZE = sh.seq_aspect_size.ASPECT_VERT_SIZE; -#endif } result = viddec_pm_get_bits(ctxt, &tempValue, 1); @@ -169,10 +164,8 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) { result = viddec_pm_get_bits(ctxt, &sh.color_format, 24); } -#ifdef VBP md->COLOR_FORMAT_FLAG = sh.COLOR_FORMAT_FLAG; md->MATRIX_COEF = sh.seq_color_format.MATRIX_COEF; -#endif } // Successful get of display size } // DISPLAY_EXT is 1 @@ -184,9 +177,7 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) result = viddec_pm_get_bits(ctxt, &tempValue, 5); sh.HRD_NUM_LEAKY_BUCKETS = tempValue; md->HRD_NUM_LEAKY_BUCKETS = sh.HRD_NUM_LEAKY_BUCKETS; -#ifndef VBP - // Skip the rest of the parsing - hrdinfo is not required for decode or for attributes -#else + { uint8_t count; uint8_t bitRateExponent; @@ -214,21 +205,18 @@ vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo) (uint32_t)(tempValue + 1) << bufferSizeExponent; } } -#endif } else { md->HRD_NUM_LEAKY_BUCKETS = 0; -#ifdef VBP md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0; -#endif } md->widthMB = (((md->width + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA; md->heightMB = (((md->height + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA; - DEB("md: res: %dx%d\n", md->width, md->height); - DEB("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE); + VTRACE("md: res: %dx%d\n", md->width, md->height); + VTRACE("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE); return status; } @@ -318,8 +306,8 @@ vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo) } } - DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT); - DEB("md: after ep: res: %dx%d\n", md->width, md->height); + VTRACE("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT); + VTRACE("md: after ep: res: %dx%d\n", md->width, md->height); return status; } diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.h b/mixvbp/vbp_plugin/vc1/vc1parse.h index e8c5167..619c4fb 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse.h +++ b/mixvbp/vbp_plugin/vc1/vc1parse.h @@ -13,6 +13,7 @@ #ifndef _VC1PARSE_H_ #define _VC1PARSE_H_ #include +#include #include "viddec_parser_ops.h" #include "vc1.h" @@ -20,17 +21,6 @@ /** @ingroup vc1parse_defs */ /*@{*/ -/* This macro gets the next less-than-nine bits from the bitstream. It is -assumed that numBits is less than ten. */ -#ifdef VC1_VERBOSE -#include -#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__) -#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args) -#else -#define AUTO_TRACE -#define DEBUGBITS(...) -#endif - extern void *memset(void *s, int32_t c, uint32_t n); /* This macro gets the next numBits from the bitstream. */ @@ -38,27 +28,21 @@ extern void *memset(void *s, int32_t c, uint32_t n); #define VC1_GET_BITS9(numBits, value) \ { uint32_t __tmp__; \ viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ - value = __tmp__;\ - DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ + value = __tmp__; \ } #define VC1_PEEK_BITS(numBits, value) \ { uint32_t __tmp__; \ viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \ - value = __tmp__;\ - DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \ + value = __tmp__; \ } /* This macro asserts if the condition is not true. */ -#ifdef VC1_VERBOSE #define VC1_ASSERT(condition) \ { \ if (! (condition)) \ - OS_INFO("Failed " #condition "!\n"); \ + ETRACE("Failed " #condition "!\n"); \ } -#else -#define VC1_ASSERT(condition) -#endif /*@}*/ @@ -73,7 +57,6 @@ extern const int32_t VC1_BITPLANE_K_TBL[]; extern const int32_t VC1_BFRACTION_TBL[]; extern const int32_t VC1_REFDIST_TBL[]; -void vc1_end_frame(vc1_viddec_parser_t *parser); /* Top-level functions to parse bitstream layers for rcv format. */ vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo); @@ -125,11 +108,6 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint3 vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable); vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond); -void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser); -void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser); -void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser); - - /*@}*/ #endif /* _VC1PARSE_H_. */ diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c index 94e4d42..d2f2e7c 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c @@ -11,10 +11,7 @@ */ #include "vc1parse.h" - -#ifdef VBP #include "viddec_pm.h" -#endif /*----------------------------------------------------------------------------*/ @@ -545,7 +542,6 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, } -#ifdef VBP { viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)ctxt; vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data); @@ -693,7 +689,6 @@ vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, break; } } -#endif return status; } diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h b/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h index e474800..faed472 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h +++ b/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h @@ -76,13 +76,8 @@ extern "C" { enum { VC1_MVMODE_1MV, -#ifdef VBP VC1_MVMODE_HPELBI_1MV, VC1_MVMODE_HPEL_1MV, -#else - VC1_MVMODE_HPEL_1MV, - VC1_MVMODE_HPELBI_1MV, -#endif VC1_MVMODE_MIXED_MV, VC1_MVMODE_INTENSCOMP }; @@ -141,26 +136,23 @@ extern "C" { uint32_t *databits; } vc1_Bitplane; -#ifdef VBP #define VC1_MAX_HRD_NUM_LEAKY_BUCKETS 32 typedef struct { - uint32_t HRD_RATE; /** Maximum bit rate in bits per second */ - uint32_t HRD_BUFFER; /** Buffer size in bits */ - uint32_t HRD_FULLNESS; /** Buffer fullness in complete bits */ - uint32_t HRD_FULLFRACTION; /** Numerator of fractional bit buffer fullness count */ - uint32_t HRD_FULLDENOMINATOR; /** Denominator of fractional bit buffer fullness count */ + uint32_t HRD_RATE; /** Maximum bit rate in bits per second */ + uint32_t HRD_BUFFER; /** Buffer size in bits */ + uint32_t HRD_FULLNESS; /** Buffer fullness in complete bits */ + uint32_t HRD_FULLFRACTION; /** Numerator of fractional bit buffer fullness count */ + uint32_t HRD_FULLDENOMINATOR; /** Denominator of fractional bit buffer fullness count */ } vc1_leaky_bucket; typedef struct _vc1_hrd_state { - uint8_t BIT_RATE_EXPONENT; /** Buckets - (0 if none specified) */ - uint8_t BUFFER_SIZE_EXPONENT; - vc1_leaky_bucket sLeakyBucket[VC1_MAX_HRD_NUM_LEAKY_BUCKETS]; /** Per-bucket information */ + uint8_t BIT_RATE_EXPONENT; /** Buckets (0 if none specified) */ + uint8_t BUFFER_SIZE_EXPONENT; + vc1_leaky_bucket sLeakyBucket[VC1_MAX_HRD_NUM_LEAKY_BUCKETS]; /** Per-bucket information */ } vc1_hrd_state, *vc1_hrd_state_ptr; -#endif /** This structure represents all bitstream metadata needed for register programming. */ typedef struct @@ -227,7 +219,6 @@ extern "C" { uint16_t widthMB; uint16_t heightMB; -#ifdef VBP uint8_t COLOR_FORMAT_FLAG; uint8_t MATRIX_COEF; uint8_t SYNCMARKER; @@ -236,7 +227,6 @@ extern "C" { uint8_t ASPECT_HORIZ_SIZE; uint8_t ASPECT_VERT_SIZE; vc1_hrd_state hrd_initial_state; -#endif } vc1_metadata_t; @@ -245,7 +235,6 @@ extern "C" { { union { -#ifndef MFDBIGENDIAN struct { unsigned BITRTQ_POSTPROC:5; @@ -255,23 +244,12 @@ extern "C" { unsigned PROFILE:2; unsigned pad:17; } seq_flags; -#else - struct - { - unsigned pad:17; - unsigned PROFILE:2; - unsigned LEVEL:3; - unsigned COLORDIFF_FORMAT:2; - unsigned FRMRTQ_POSTPROC:3; - unsigned BITRTQ_POSTPROC:5; - } seq_flags; -#endif + uint32_t flags; }; union { -#ifndef MFDBIGENDIAN struct { unsigned DISPLAY_EXT:1; @@ -285,27 +263,12 @@ extern "C" { unsigned MAX_CODED_WIDTH:12; unsigned POSTPROCFLAG:1; } seq_max_size; -#else - struct - { - unsigned POSTPROCFLAG:1; - unsigned MAX_CODED_WIDTH:12; - unsigned MAX_CODED_HEIGHT:12; - unsigned PULLDOWN:1; - unsigned INTERLACE:1; - unsigned TFCNTRFLAG:1; - unsigned FINTERPFLAG:1; - unsigned RESERVED:1; - unsigned PSF:1; - unsigned DISPLAY_EXT:1; - } seq_max_size; -#endif + uint32_t max_size; }; union { -#ifndef MFDBIGENDIAN struct { unsigned ASPECT_RATIO_FLAG:1; @@ -313,15 +276,7 @@ extern "C" { unsigned DISP_HORIZ_SIZE:14; unsigned pad:3; } seq_disp_size; -#else - struct - { - unsigned pad:3; - unsigned DISP_HORIZ_SIZE:14; - unsigned DISP_VERT_SIZE:14; - unsigned ASPECT_RATIO_FLAG:1; - } seq_disp_size; -#endif + uint32_t disp_size; }; @@ -329,21 +284,13 @@ extern "C" { union { -#ifndef MFDBIGENDIAN struct { unsigned ASPECT_VERT_SIZE:8; unsigned ASPECT_HORIZ_SIZE:8; unsigned pad:16; } seq_aspect_size; -#else - struct - { - unsigned pad:16; - unsigned ASPECT_HORIZ_SIZE:8; - unsigned ASPECT_VERT_SIZE:8; - } seq_aspect_size; -#endif + uint32_t aspect_size; }; @@ -352,21 +299,13 @@ extern "C" { union { -#ifndef MFDBIGENDIAN struct { unsigned FRAMERATEDR:4; unsigned FRAMERATENR:8; unsigned pad:20; } seq_framerate_fraction; -#else - struct - { - unsigned pad:20; - unsigned FRAMERATENR:8; - unsigned FRAMERATEDR:4; - } seq_framerate_fraction; -#endif + uint32_t framerate_fraction; }; @@ -375,7 +314,6 @@ extern "C" { union { -#ifndef MFDBIGENDIAN struct { unsigned MATRIX_COEF:8; @@ -383,15 +321,7 @@ extern "C" { unsigned COLOR_PRIM:8; unsigned pad:8; } seq_color_format; -#else - struct - { - unsigned pad:8; - unsigned COLOR_PRIM:8; - unsigned TRANSFER_CHAR:8; - unsigned MATRIX_COEF:8; - } seq_color_format; -#endif + uint32_t color_format; }; @@ -405,7 +335,6 @@ extern "C" { { union { -#ifndef MFDBIGENDIAN struct { unsigned res6:1; @@ -428,48 +357,18 @@ extern "C" { unsigned FRMRTQ_POSTPROC:3; unsigned PROFILE:4; } struct_c; -#else - struct - { - unsigned PROFILE:4; - unsigned FRMRTQ_POSTPROC:3; - unsigned BITRTQ_POSTPROC:5; - unsigned LOOPFILTER:1; - unsigned res3:1; - unsigned MULTIRES:1; - unsigned res4:1; - unsigned FASTUVMC:1; - unsigned EXTENDED_MV:1; - unsigned DQUANT:2; - unsigned VSTRANSFORM:1; - unsigned res5:1; - unsigned OVERLAP:1; - unsigned SYNCMARKER:1; - unsigned RANGERED:1; - unsigned MAXBFRAMES:3; - unsigned QUANTIZER:2; - unsigned FINTERPFLAG:1; - unsigned res6:1; - } struct_c; -#endif + uint32_t struct_c_rcv; }; union { -#ifndef MFDBIGENDIAN struct { unsigned VERT_SIZE:16; unsigned HORIZ_SIZE:16; } struct_a; -#else - struct - { - unsigned HORIZ_SIZE:16; - unsigned VERT_SIZE:16; - } struct_a; -#endif + uint32_t struct_a_rcv; }; @@ -480,7 +379,6 @@ extern "C" { { union { -#ifndef MFDBIGENDIAN struct { unsigned QUANTIZER:2; @@ -496,23 +394,7 @@ extern "C" { unsigned BROKEN_LINK:1; unsigned pad1:19; } ep_flags; -#else - struct - { - unsigned pad1:19; - unsigned BROKEN_LINK:1; - unsigned CLOSED_ENTRY:1; - unsigned PANSCAN_FLAG:1; - unsigned REFDIST_FLAG:1; - unsigned LOOPFILTER:1; - unsigned FASTUVMC:1; - unsigned EXTENDED_MV:1; - unsigned DQUANT:2; - unsigned VSTRANSFORM:1; - unsigned OVERLAP:1; - unsigned QUANTIZER:2; - } ep_flags; -#endif + uint32_t flags; }; @@ -520,21 +402,13 @@ extern "C" { union { -#ifndef MFDBIGENDIAN struct { unsigned CODED_HEIGHT:12; unsigned CODED_WIDTH:12; unsigned pad2:8; } ep_size; -#else - struct - { - unsigned pad2:8; - unsigned CODED_WIDTH:12; - unsigned CODED_HEIGHT:12; - } ep_size; -#endif + uint32_t size; }; @@ -614,7 +488,6 @@ extern "C" { uint8_t BottomField; uint32_t UniformQuant; -#ifdef VBP uint8_t raw_MVTYPEMB; uint8_t raw_DIRECTMB; uint8_t raw_SKIPMB; @@ -631,8 +504,7 @@ extern "C" { vc1_Bitplane OVERFLAGS; vc1_Bitplane FORWARDMB; uint32_t ALTPQUANT; - uint8_t DQDBEDGE; -#endif + uint8_t DQDBEDGE; } vc1_PictureLayerHeader; diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c index 4e98d96..f437624 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c @@ -11,6 +11,7 @@ // */ +#include #include "vc1parse.h" /*------------------------------------------------------------------------------ * Parse picture layer. This function parses progressive I or BI picture for @@ -175,7 +176,7 @@ vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInf VC1_GET_BITS9(5, picLayerHeader->PQINDEX); if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) { - DEB("Error parsing I field \n"); + ETRACE("Error parsing I field \n"); return status; } @@ -198,7 +199,7 @@ vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInf md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) != VC1_STATUS_OK) { - DEB("Error parsing I field \n"); + ETRACE("Error parsing I field \n"); return status; } @@ -219,7 +220,7 @@ vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInf (md->heightMB+1)/2, BPP_OVERFLAGS)) != VC1_STATUS_OK) { - DEB("Error parsing I field \n"); + ETRACE("Error parsing I field \n"); return status; } } @@ -246,7 +247,7 @@ vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInf status = vc1_VOPDQuant(ctxt, pInfo); if (status != VC1_STATUS_OK) { - DEB("Error parsing I field \n"); + ETRACE("Error parsing I field \n"); return status; } diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c index dcfdb26..f37baf8 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c @@ -298,9 +298,7 @@ vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo) } VC1_GET_BITS9(1, md->RNDCTRL); -#ifdef VBP picLayerHeader->RNDCTRL = md->RNDCTRL; -#endif VC1_GET_BITS9(1, picLayerHeader->UVSAMP); diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c b/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c index b921af0..8fb7118 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c @@ -93,11 +93,7 @@ vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInf VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); } else -#ifdef VBP picLayerHeader->MVMODE2 = 0; -#else - picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; -#endif if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c index 99edf6f..bab3e9f 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c @@ -10,6 +10,8 @@ // profile bitstream. // */ + +#include #include #include "vc1parse.h" /*------------------------------------------------------------------------------ @@ -67,11 +69,7 @@ vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info * md->LUMSHIFT2 = picLayerHeader->LUMSHIFT; } else -#ifdef VBP picLayerHeader->MVMODE2 = 0; -#else - picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; -#endif if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) || ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) && @@ -245,7 +243,7 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf } if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) { - DEB("Error in vc1_MVRangeDecode \n"); + ETRACE("Error in vc1_MVRangeDecode \n"); return status; } @@ -311,11 +309,7 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf } } else -#ifdef VBP picLayerHeader->MVMODE2 = 0; -#else - picLayerHeader->MVMODE2 = picLayerHeader->MVMODE; -#endif VC1_GET_BITS9(3, picLayerHeader->MBMODETAB); @@ -330,11 +324,7 @@ vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInf VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */ -#ifdef VBP if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) -#else - if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV) -#endif { VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */ } diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c b/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c index 95b556c..00d6640 100755 --- a/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c +++ b/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c @@ -57,11 +57,7 @@ vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo) } else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_DBLEDGES) { -#ifdef VBP VC1_GET_BITS9(2, picLayerHeader->DQDBEDGE); -#else - VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */ -#endif } else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS) { @@ -78,7 +74,6 @@ vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo) } } } -#ifdef VBP if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2)) { if (picLayerHeader->PQDIFF == 7) @@ -90,7 +85,6 @@ vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo) picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1; } } -#endif return status; } diff --git a/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c index ad83a18..b3023ec 100755 --- a/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c +++ b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c @@ -1,18 +1,13 @@ -#include "viddec_parser_ops.h" // For parser helper functions +#include #include "vc1.h" // For the parser structure +#include "viddec_parser_ops.h" // For parser helper functions #include "vc1parse.h" // For vc1 parser helper functions -#ifdef VBP #include "viddec_pm.h" -#endif #define vc1_is_frame_start_code( ch ) \ (( vc1_SCField == ch ||vc1_SCSlice == ch || vc1_SCFrameHeader == ch ) ? 1 : 0) /* init function */ -#ifdef VBP void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) -#else -static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) -#endif { vc1_viddec_parser_t *parser = ctxt; int i; @@ -22,17 +17,9 @@ static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve for (i=0; iref_frame[i].id = -1; /* first I frame checks that value */ - parser->ref_frame[i].anchor[0] = 1; - parser->ref_frame[i].anchor[1] = 1; - parser->ref_frame[i].intcomp_top = 0; - parser->ref_frame[i].intcomp_bot = 0; parser->ref_frame[i].tff=0; } - parser->intcomp_top[0] = 0; - parser->intcomp_bot[0] = 0; - parser->intcomp_top[1] = 0; - parser->intcomp_bot[1] = 0; parser->is_reference_picture = false; memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); @@ -40,39 +27,22 @@ static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve if (preserve) { parser->sc_seen &= VC1_EP_MASK; - parser->sc_seen_since_last_wkld &= VC1_EP_MASK; } else { parser->sc_seen = VC1_SC_INVALID; - parser->sc_seen_since_last_wkld = VC1_SC_INVALID; memset(&parser->info.metadata, 0, sizeof(parser->info.metadata)); } return; } // viddec_vc1_init -static void vc1_swap_intcomp(vc1_viddec_parser_t *parser) -{ - parser->intcomp_top[1] = parser->intcomp_top[0]; - parser->intcomp_bot[1] = parser->intcomp_bot[0]; - parser->intcomp_top[0] = 0; - parser->intcomp_bot[0] = 0; - - return; -} // vc1_swap_intcomp - -#ifdef VBP uint32_t viddec_vc1_parse(void *parent, void *ctxt) -#else -static uint32_t viddec_vc1_parse(void *parent, void *ctxt) -#endif { vc1_viddec_parser_t *parser = ctxt; uint32_t sc=0x0; int32_t ret=0, status=0; -#ifdef VBP /* This works only if there is one slice and no start codes */ /* A better fix would be to insert start codes it there aren't any. */ ret = viddec_pm_peek_bits(parent, &sc, 32); @@ -86,22 +56,13 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) /* that this is a frame of data. We may have to fix this later. */ sc = vc1_SCFrameHeader; } -#else - ret = viddec_pm_get_bits(parent, &sc, 32); -#endif sc = sc & 0xFF; - parser->is_frame_start = 0; - parser->is_second_start = 0; - DEB("START_CODE = %02x\n", sc); + VTRACE("START_CODE = %02x\n", sc); switch ( sc ) { case vc1_SCSequenceHeader: { uint32_t data; - parser->ref_frame[0].anchor[0] = 1; - parser->ref_frame[0].anchor[1] = 1; - parser->ref_frame[1].anchor[0] = 1; - parser->ref_frame[1].anchor[1] = 1; memset( &parser->info.metadata, 0, sizeof(parser->info.metadata)); /* look if we have a rcv header for main or simple profile */ ret = viddec_pm_peek_bits(parent,&data ,2); @@ -115,8 +76,6 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) status = vc1_ParseRCVSequenceLayer(parent, &parser->info); } parser->sc_seen = VC1_SC_SEQ; - parser->sc_seen_since_last_wkld |= VC1_SC_SEQ; -#ifdef VBP parser->start_code = VC1_SC_SEQ; if (parser->info.metadata.HRD_NUM_LEAKY_BUCKETS == 0) { @@ -167,7 +126,6 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) } } -#endif break; } @@ -177,10 +135,7 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) parser->sc_seen |= VC1_SC_EP; // Clear all bits indicating data below ep header parser->sc_seen &= VC1_EP_MASK; - parser->sc_seen_since_last_wkld |= VC1_SC_EP; -#ifdef VBP parser->start_code = VC1_SC_EP; -#endif break; } @@ -188,37 +143,18 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) { memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader)); status = vc1_ParsePictureLayer(parent, &parser->info); - if ((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) || - (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) || - (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) || - (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME)) - { - vc1_swap_intcomp(parser); - } parser->sc_seen |= VC1_SC_FRM; // Clear all bits indicating data below frm header parser->sc_seen &= VC1_FRM_MASK; - parser->sc_seen_since_last_wkld |= VC1_SC_FRM; - //vc1_start_new_frame ( parent, parser ); - - parser->is_frame_start = 1; - vc1_parse_emit_frame_start( parent, parser ); -#ifdef VBP parser->start_code = VC1_SC_FRM; -#endif break; } case vc1_SCSlice: { status = vc1_ParseSliceLayer(parent, &parser->info); - parser->sc_seen_since_last_wkld |= VC1_SC_SLC; - - vc1_parse_emit_current_slice( parent, parser ); -#ifdef VBP parser->start_code = VC1_SC_SLC; -#endif break; } @@ -234,7 +170,6 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) parser->info.picLayerHeader.MVTAB = 0; parser->info.picLayerHeader.MVMODE = 0; parser->info.picLayerHeader.MVRANGE = 0; -#ifdef VBP parser->info.picLayerHeader.raw_MVTYPEMB = 0; parser->info.picLayerHeader.raw_DIRECTMB = 0; parser->info.picLayerHeader.raw_SKIPMB = 0; @@ -253,22 +188,10 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) parser->info.picLayerHeader.ALTPQUANT = 0; parser->info.picLayerHeader.DQDBEDGE = 0; -#endif status = vc1_ParseFieldLayer(parent, &parser->info); - if ((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) || - (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME)) - { - //vc1_swap_intcomp(parser); - } parser->sc_seen |= VC1_SC_FLD; - parser->sc_seen_since_last_wkld |= VC1_SC_FLD; - - parser->is_second_start = 1; - vc1_parse_emit_second_field_start( parent, parser ); -#ifdef VBP parser->start_code = VC1_SC_FLD; -#endif break; } @@ -278,123 +201,30 @@ static uint32_t viddec_vc1_parse(void *parent, void *ctxt) case vc1_SCSliceUser: case vc1_SCFieldUser: {/* Handle user data */ - parser->sc_seen_since_last_wkld |= VC1_SC_UD; -#ifdef VBP parser->start_code = VC1_SC_UD; -#endif break; } case vc1_SCEndOfSequence: { parser->sc_seen = VC1_SC_INVALID; - parser->sc_seen_since_last_wkld |= VC1_SC_INVALID; -#ifdef VBP parser->start_code = VC1_SC_INVALID; -#endif break; } default: /* Any other SC that is not handled */ { - DEB("SC = %02x - unhandled\n", sc ); -#ifdef VBP + WTRACE("SC = %02x - unhandled\n", sc ); parser->start_code = VC1_SC_INVALID; -#endif break; } } - - return VIDDEC_PARSE_SUCESS; } // viddec_vc1_parse -/** - If a picture header was seen and the next start code is a sequence header, entrypoint header, - end of sequence or another frame header, this api returns frame done. - If a sequence header and a frame header was not seen before this point, all the - information needed for decode is not present and parser errors are reported. -*/ -#ifdef VBP -uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) -#else -static uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors) -#endif -{ - vc1_viddec_parser_t *parser = ctxt; - int ret = VIDDEC_PARSE_SUCESS; - parent = parent; - switch (next_sc) - { - case vc1_SCFrameHeader: - if (((parser->sc_seen_since_last_wkld & VC1_SC_EP) || - (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) && - (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM))) - { - break; - } - // Deliberate fall-thru case - case vc1_SCEntryPointHeader: - if ((next_sc == vc1_SCEntryPointHeader) && - (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) && - (!(parser->sc_seen_since_last_wkld & VC1_SC_EP))) - { - break; - } - // Deliberate fall-thru case - case vc1_SCSequenceHeader: - case vc1_SCEndOfSequence: - case VIDDEC_PARSE_EOS: - case VIDDEC_PARSE_DISCONTINUITY: - ret = VIDDEC_PARSE_FRMDONE; - // Set errors for progressive - if ((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM)) - *codec_specific_errors = 0; - else - *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - vc1_end_frame(parser); - parser->sc_seen_since_last_wkld = VC1_SC_INVALID; - // TODO: Need to check for interlaced - break; - default: - ret = VIDDEC_PARSE_SUCESS; - break; - } //switch - DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n", - next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld, - *codec_specific_errors, ret); - - return ret; -} // viddec_vc1_wkld_done - -#ifdef VBP void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size) -#else -static void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size) -#endif { size->context_size = sizeof(vc1_viddec_parser_t); size->persist_size = 0; return; } // viddec_vc1_get_context_size - -#ifdef VBP -uint32_t viddec_vc1_is_start_frame(void *ctxt) -#else -static uint32_t viddec_vc1_is_start_frame(void *ctxt) -#endif -{ - vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *) ctxt; - return parser->is_frame_start; -} // viddec_vc1_is_start_frame - -void viddec_vc1_get_ops(viddec_parser_ops_t *ops) -{ - ops->init = viddec_vc1_init; - ops->parse_syntax = viddec_vc1_parse; - ops->get_cxt_size = viddec_vc1_get_context_size; - ops->is_wkld_done = viddec_vc1_wkld_done; - ops->is_frame_start = viddec_vc1_is_start_frame; - return; -} // viddec_vc1_get_ops - -- cgit v1.2.3 From 6962f253e3a95605b5111bb4788827df796b4d61 Mon Sep 17 00:00:00 2001 From: gji2 Date: Tue, 15 Oct 2013 01:38:15 +0800 Subject: refine the log function in the libmix encoder, dynamic enable the log fucntion BZ: 145154 enable the log function dynamically Enable: adb shell setprop libmix.debug 1 Disable: adb shell setprop libmix.debug 0 Change-Id: I73fcf982123f548fa974810987020bddff148625 Signed-off-by: gji2 Reviewed-on: http://android.intel.com:8080/138728 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videoencoder/Android.mk | 1 - videoencoder/VideoEncoderHost.cpp | 16 +++++++++++++--- videoencoder/VideoEncoderLog.h | 35 +++++++++-------------------------- 3 files changed, 22 insertions(+), 30 deletions(-) diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 6b8b3a5..5821c5c 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -1,7 +1,6 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) -#VIDEO_ENC_LOG_ENABLE := true #NO_BUFFER_SHARE := true ifeq ($(ENABLE_IMG_GRAPHICS),) diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp index f636d9c..00a8e40 100644 --- a/videoencoder/VideoEncoderHost.cpp +++ b/videoencoder/VideoEncoderHost.cpp @@ -14,13 +14,23 @@ #include "PVSoftMPEG4Encoder.h" #endif #include "VideoEncoderHost.h" -#include "VideoEncoderLog.h" #include +#include +#include + +int32_t gLogLevel = 0; IVideoEncoder *createVideoEncoder(const char *mimeType) { + char logLevelProp[PROPERTY_VALUE_MAX]; + + if (property_get("libmix.debug", logLevelProp, NULL)) { + gLogLevel = atoi(logLevelProp); + LOGD("Debug level is %d", gLogLevel); + } + if (mimeType == NULL) { - LOG_E("NULL mime type"); + LOGE("NULL mime type"); return NULL; } @@ -47,7 +57,7 @@ IVideoEncoder *createVideoEncoder(const char *mimeType) { VideoEncoderVP8 *p = new VideoEncoderVP8(); return (IVideoEncoder *)p; } else { - LOG_E ("Unknown mime type: %s", mimeType); + LOGE ("Unknown mime type: %s", mimeType); } return NULL; } diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h index 3b8910d..8176489 100644 --- a/videoencoder/VideoEncoderLog.h +++ b/videoencoder/VideoEncoderLog.h @@ -10,32 +10,14 @@ #define __VIDEO_ENCODER_LOG_H__ // Components -#define VIDEO_ENC_COMP "VideoEncoder" - -#include - -#define VIDEO_ENC_LOG_LEVEL_ERROR ANDROID_LOG_ERROR -#define VIDEO_ENC_LOG_LEVEL_WARNING ANDROID_LOG_WARN -#define VIDEO_ENC_LOG_LEVEL_INFO ANDROID_LOG_INFO -#define VIDEO_ENC_LOG_LEVEL_VERBOSE ANDROID_LOG_VERBOSE - -#define mix_log(comp, level, format, ...) \ - __android_log_print(level, comp, "%s():%d: "format, \ - __FUNCTION__, __LINE__, ##__VA_ARGS__) - -//#define VIDEO_ENC_LOG_ENABLE -#if 1 -#ifdef VIDEO_ENC_LOG_ENABLE -#define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__) -#define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__) -#define LOG_W(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_WARNING, format, ##__VA_ARGS__) -#else -#define LOG_V(format, ...) -#define LOG_I(format, ...) -#define LOG_W(format, ...) -#endif - -#define LOG_E(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_ERROR, format, ##__VA_ARGS__) +#include + +#define LOG_TAG "VideoEncoder" +#if 1 +#define LOG_V(...) LOGV_IF(gLogLevel, __VA_ARGS__) +#define LOG_I(...) LOGI_IF(gLogLevel, __VA_ARGS__) +#define LOG_W(...) LOGW_IF(gLogLevel, __VA_ARGS__) +#define LOG_E(...) LOGE_IF(gLogLevel, __VA_ARGS__) #else #define LOG_V printf #define LOG_I printf @@ -43,6 +25,7 @@ #define LOG_E printf #endif +extern int32_t gLogLevel; #define CHECK_VA_STATUS_RETURN(FUNC)\ if (vaStatus != VA_STATUS_SUCCESS) {\ LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\ -- cgit v1.2.3 From 42ac6cb290fc52da2785176ae78cb1d3b26aec6a Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Fri, 16 Aug 2013 15:36:51 +0800 Subject: Enable cross process buffer sharing BZ: 140092 Change-Id: Ia53b3a2306d2cc045a55e38b8c26336aa0f9de10 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/125820 Reviewed-by: Yuan, Shengquan Reviewed-by: Hu, Gang A Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- test/Android.mk | 49 ++++ test/BSClient.cpp | 198 +++++++++++++ test/BSServer.cpp | 21 ++ test/mix_encoder2.cpp | 278 ++++++++++++------ videoencoder/Android.mk | 5 + videoencoder/IntelMetadataBuffer.cpp | 553 ++++++++++++++++++++++++++++++++++- videoencoder/IntelMetadataBuffer.h | 61 ++++ videoencoder/VideoEncoderBase.cpp | 13 +- videoencoder/VideoEncoderBase.h | 4 + 9 files changed, 1084 insertions(+), 98 deletions(-) create mode 100755 test/BSClient.cpp create mode 100755 test/BSServer.cpp diff --git a/test/Android.mk b/test/Android.mk index f443a06..dbd0d06 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -22,6 +22,55 @@ LOCAL_MODULE := btest include $(BUILD_EXECUTABLE) +# For intelmetadatabuffer cross-process buffersharing test +# ===================================================== + +include $(CLEAR_VARS) + +#VIDEO_ENC_LOG_ENABLE := true + +LOCAL_SRC_FILES := \ + BSServer.cpp + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libmix_videoencoder \ + +LOCAL_SHARED_LIBRARIES := \ + libintelmetadatabuffer libutils libbinder + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := bserver + +include $(BUILD_EXECUTABLE) + +# For intelmetadatabuffer cross-process buffersharing test +# ===================================================== + +include $(CLEAR_VARS) + +#VIDEO_ENC_LOG_ENABLE := true + +LOCAL_SRC_FILES := \ + BSClient.cpp + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH) \ + $(TARGET_OUT_HEADERS)/libmix_videoencoder \ + +LOCAL_SHARED_LIBRARIES := \ + libintelmetadatabuffer libutils libbinder \ + libgui \ + libui \ + libutils \ + libcutils \ + libhardware \ + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := bclient + +include $(BUILD_EXECUTABLE) + # For mix_encoder # ===================================================== diff --git a/test/BSClient.cpp b/test/BSClient.cpp new file mode 100755 index 0000000..b368642 --- /dev/null +++ b/test/BSClient.cpp @@ -0,0 +1,198 @@ +#include "IntelMetadataBuffer.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace android; + +int main(int argc, char* argv[]) +{ + uint32_t tokenP = 0x80000000; + uint32_t tokenC = 0x40000000; + uint32_t tokenPC = 0xC0000000; + uint32_t token; +#ifdef INTEL_VIDEO_XPROC_SHARING + token = IntelMetadataBuffer::MakeSessionFlag(IntelMetadataBuffer::WEBRTC_BASE, true, false); +#endif + int memmode = 0; + int clearcontext = 1; + int waittime = 0; + int storefile = 0; + int isProvider = 1; + int service = 0; + + char c; + const char *short_opts = "a:b:c:d:e:f:g:h:i:j:k:l:m:n:o:p:q:r:s:t:q:u:v:w:x:y:z:?"; + + ProcessState::self()->startThreadPool(); + + while ((c = getopt_long(argc, argv, short_opts, NULL, NULL) ) != EOF) { + switch (c) { + case 't': + token = atoi(optarg); + break; + case 'm': + memmode = atoi(optarg); + break; + case 'c': + clearcontext = atoi(optarg); + break; + case 'w': + waittime = atoi(optarg); + break; + case 'f': + storefile = atoi(optarg); + break; + case 'p': + isProvider = atoi(optarg); + break; + case 's': + service = atoi(optarg); + break; + } + } + + if (service > 0) { + printf("Setup Service ....\n"); +#ifdef INTEL_VIDEO_XPROC_SHARING + IntelBufferSharingService::instantiate(); +#endif + } + + if (isProvider) { + + printf("Working as Provider ...\n"); + + IntelMetadataBuffer *mb1; + IMB_Result ret; + int32_t value; + uint8_t* bytes; + uint32_t size; + ValueInfo vi; + + if (memmode == 0) { + sp Heap = new MemoryHeapBase(10000); + sp Buffer1 = new MemoryBase(Heap, 0, 1000); +// sp Buffer2 = new MemoryBase(Heap, 1000, 1000); + memset(Buffer1->pointer(), 0xAA, 1000); + + mb1 = new IntelMetadataBuffer(); + ret = mb1->SetType(MetadataBufferTypeCameraSource); +#ifdef INTEL_VIDEO_XPROC_SHARING + mb1->SetSessionFlag(token); + if ((ret = mb1->ShareValue(Buffer1)) != IMB_SUCCESS) { + printf("IntelMetadataBuffer shareValue MemBase ret = %d failed\n", ret); + return 1; + } +#else + mb1->SetValue((int32_t)Buffer1->pointer()); +#endif + ret = mb1->SetValueInfo(&vi); + ret = mb1->Serialize(bytes, size); + // mb1->GetValue(value); + printf("original MemBase1 pointer is %x\n", Buffer1->pointer()); + + } else { + sp composer(ComposerService::getComposerService()); + sp GraphicBufferAlloc = composer->createGraphicBufferAlloc(); + + uint32_t usage = GraphicBuffer::USAGE_SW_WRITE_OFTEN | GraphicBuffer::USAGE_HW_TEXTURE; + int format = 0x3231564E; // = HAL_PIXEL_FORMAT_NV12 + // int format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar + int32_t error; + void* usrptr[3]; + + sp graphicBuffer(GraphicBufferAlloc->createGraphicBuffer( + 1280, 720, format, usage, &error)); + if (graphicBuffer.get() == NULL) + printf("create graphicbuffer failed\n"); + + status_t ret1=graphicBuffer->lock(usage, &usrptr[0]); + memset(usrptr[0], 0xAA, 4); + graphicBuffer->unlock(); + + mb1 = new IntelMetadataBuffer(); + ret = mb1->SetType(MetadataBufferTypeCameraSource); + +#ifdef INTEL_VIDEO_XPROC_SHARING + mb1->SetSessionFlag(token); + if ((ret = mb1->ShareValue(graphicBuffer)) != IMB_SUCCESS) { + printf("IntelMetadataBuffer shareValue graphicbuffer ret = %d failed\n", ret); + return 1; + } +#else + mb1->SetValue((int32_t)graphicBuffer->handle); +#endif + ret = mb1->SetValueInfo(&vi); + ret = mb1->Serialize(bytes, size); + + // mb1->GetValue(value); + printf("original graphicbuffer handle is %x\n", graphicBuffer->handle); + } + + if (storefile > 0) { + FILE* fp = fopen("/data/mdb.data", "wb+"); + if (fp != NULL) { + fwrite(bytes, 1, size, fp); + fclose(fp); + } + } + + delete mb1; + + }else { + + printf("Working as Consumer ...\n"); + + uint8_t bytes[128]; + uint32_t size; + + FILE* fp = fopen("/data/mdb.data", "rb"); + if (fp != NULL) + { + size = fread(bytes, 1, 128, fp); + fclose(fp); + } + + IntelMetadataBuffer mb1; + int32_t value; + IMB_Result res; + res = mb1.UnSerialize(bytes,size); + + if (IMB_SUCCESS == res) { + res = mb1.GetValue(value); + if (res != IMB_SUCCESS) + printf("Consumer GetValue failed, result=%x\n", res); + else + printf("Consumer get value =%x\n", value); + } else + printf("unserialize failed, result=%x\n", res); + + } + + if (waittime > 0) { + printf("waiting %d seconds .... \n", waittime); + sleep(waittime); + } + + if (clearcontext > 0) { + printf("Clearing %s Context ... \n", (isProvider > 0) ? "Provider":"Consumer"); +#ifdef INTEL_VIDEO_XPROC_SHARING + IntelMetadataBuffer::ClearContext(token, isProvider > 0); +#endif + } + + printf("Exit\n"); + return 1; +} + diff --git a/test/BSServer.cpp b/test/BSServer.cpp new file mode 100755 index 0000000..f3b822f --- /dev/null +++ b/test/BSServer.cpp @@ -0,0 +1,21 @@ +#include "IntelMetadataBuffer.h" +#include +#include +#include +#include +#include + +using namespace android; + +int main(int argc, char* argv[]) +{ + //start service + ProcessState::self()->startThreadPool(); +#ifdef INTEL_VIDEO_XPROC_SHARING + IntelBufferSharingService::instantiate(); +#endif + IPCThreadState::self()->joinThreadPool(); + return 1; +} + + diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 3bdd07f..0fa9096 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #include @@ -88,7 +88,6 @@ static int YUV_generator_planar(int width, int height, Y_row[jj] = 0xeb; if ((xpos == 1) && (ypos == 1)) Y_row[jj] = 0xeb; - if ((xpos == 1) && (ypos == 0)) Y_row[jj] = 0x10; if ((xpos == 0) && (ypos == 1)) @@ -109,7 +108,6 @@ static int YUV_generator_planar(int width, int height, memset (V_row,0x80,width/2); } } - row_shift += 8; // if (row_shift==BOX_WIDTH) row_shift = 0; @@ -126,25 +124,47 @@ static int YUV_generator_planar(int width, int height, class DummySource : public MediaSource { public: - DummySource(int width, int height, int stride, int nFrames, int fps, - bool metadata, const char* yuv) - : mWidth(width), - mHeight(height), - mStride(stride), - mMaxNumFrames(nFrames), - mFrameRate(fps), - mMetadata(metadata), - mYuvfile(yuv), - mYuvhandle(NULL){ + DummySource(const sp &meta, uint32_t flag) { + + bool success; + + success = meta->findInt32(kKeyWidth, &mWidth); + CHECK(success); + + success = meta->findInt32(kKeyHeight, &mHeight); + CHECK(success); + + success = meta->findInt32(kKeyStride, &mStride); + CHECK(success); + + success = meta->findInt32(kKeyFrameRate, &mFrameRate); + CHECK(success); + + success = meta->findInt32(kKeyColorFormat, &mColorFormat); + CHECK(success); + + success = meta->findCString('yuvf', &mYuvfile); + CHECK(success); + + success = meta->findInt32('fnum', &mMaxNumFrames); + CHECK(success); + + success = meta->findInt32('sflg', (int32_t*)&mSessionFlag); + CHECK(success); + + mYuvhandle = NULL; + + mMetadata = flag & OMXCodec::kStoreMetaDataInVideoBuffers; if (mMetadata) mSize = 128; else - mSize = mStride * mHeight * 3 /2; + mSize = mStride * mHeight * 3 / 2 ; for(int i=0; i getFormat() { @@ -163,12 +183,37 @@ public: gNumFramesOutput = 0; createResource (); - if (mYuvfile == NULL) { +#if 1 + { + int size= mStride * mHeight * 1.5; + void* tmp = malloc(size); + + int64_t start = systemTime(); + + for(int j=0; j<100; j++) { + for(int i=0; idata(), data, size); - (*buffer)->set_range(0, size); + size_t offset = 0; + if (mMetadata) + memcpy ((*buffer)->data(), data, size); + else { + offset = ((int)((*buffer)->data() + 0x0FFF) & ~0x0FFF) - (int)(*buffer)->data(); + memcpy ((*buffer)->data() + offset, data, size); + } + (*buffer)->set_range(offset, size); (*buffer)->meta_data()->clear(); (*buffer)->meta_data()->setInt64( kKeyTime, (gNumFramesOutput * 1000000) / mFrameRate); @@ -246,6 +297,10 @@ protected: virtual ~DummySource() { for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) delete mIMB[i]; + +#ifdef INTEL_VIDEO_XPROC_SHARING + IntelMetadataBuffer::ClearContext(mSessionFlag, true); +#endif } public: @@ -259,6 +314,9 @@ public: int mFrameRate; int mColorFormat; size_t mSize; + unsigned int mSessionFlag; + const char* mTAG; + // int64_t mNumFramesOutput; DummySource(const DummySource &); @@ -275,8 +333,10 @@ public: class MallocSource : public DummySource { public: - MallocSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : - DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + MallocSource(const sp &meta, uint32_t flag) : + DummySource (meta, flag) { + + mTAG = "Malloc"; } ~MallocSource() { @@ -306,7 +366,6 @@ public: //keep address 4K aligned mUsrptr[i] = (uint8_t*)((((uint32_t )mMallocPtr[i] + 4095) / 4096 ) * 4096); - mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t) mUsrptr[i]); mIMB[i]->SetValueInfo(&vinfo); // LOG("Malloc address=%x\n", mUsrptr[i]); @@ -324,8 +383,9 @@ private: class MemHeapSource : public DummySource { public: - MemHeapSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : - DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + MemHeapSource(const sp &meta, uint32_t flag) : + DummySource (meta, flag) { + mTAG = "MemHeap"; } ~MemHeapSource() { @@ -358,9 +418,16 @@ public: mUsrptr[i] = (uint8_t*) ((int) (mBuffers[i]->pointer() + 0x0FFF) & ~0x0FFF); - mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t) mUsrptr[i]); + mIMB[i] = new IntelMetadataBuffer(); + mIMB[i]->SetType(MetadataBufferTypeCameraSource); +#ifdef INTEL_VIDEO_XPROC_SHARING + mIMB[i]->SetSessionFlag(mSessionFlag); + mIMB[i]->ShareValue(mBuffers[i]); +#else + mIMB[i]->SetValue((int32_t)mUsrptr[i]); +#endif mIMB[i]->SetValueInfo(&vinfo); - LOG("MemHeap address=%x\n", mUsrptr[i]); + LOG("MemHeap local address=%x\n", mUsrptr[i]); } return OK; @@ -393,9 +460,10 @@ extern "C" { class VASurfaceSource : public DummySource { public: - VASurfaceSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv, int mode) : - DummySource (width, height, stride, nFrames, fps, mdata, yuv) { + VASurfaceSource(const sp &meta, uint32_t flag, int mode) : + DummySource (meta, flag) { mMode = mode; + mTAG = "VASurface"; } virtual ~VASurfaceSource() { @@ -529,12 +597,13 @@ private: class GfxSource : public DummySource { public: - GfxSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : - DummySource (width, height, stride, nFrames, fps, mdata, yuv) { - mColor = 0; + GfxSource(const sp &meta, uint32_t flag) : + DummySource (meta, flag) { + mWidth = ((mWidth + 15 ) / 16 ) * 16; mHeight = ((mHeight + 15 ) / 16 ) * 16; mStride = mWidth; + mTAG = "Gfx"; } virtual ~GfxSource() { @@ -548,17 +617,24 @@ public: mGraphicBufferAlloc = composer->createGraphicBufferAlloc(); uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN;// | GraphicBuffer::USAGE_HW_COMPOSER; - int format = HAL_PIXEL_FORMAT_NV12; - if (mColor == 1) - format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h - int32_t error; + ValueInfo vinfo; + memset(&vinfo, 0, sizeof(ValueInfo)); + vinfo.mode = MEM_MODE_GFXHANDLE; + vinfo.size = 0; + vinfo.width = mWidth; + vinfo.height = mHeight; + vinfo.lumaStride = mStride; + vinfo.chromStride = mStride; + vinfo.format = mColorFormat; + vinfo.s3dformat = 0xFFFFFFFF; + for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) { sp graphicBuffer( mGraphicBufferAlloc->createGraphicBuffer( - mWidth, mHeight, format, usage, &error)); + mWidth, mHeight, mColorFormat, usage, &error)); if (graphicBuffer.get() == NULL) { printf("GFX createGraphicBuffer failed\n"); @@ -570,7 +646,16 @@ public: if (graphicBuffer->lock(usage, &vaddr[0]) != OK) return UNKNOWN_ERROR; - mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)mGraphicBuffer[i]->handle); + mUsrptr[i] = (uint8_t*)vaddr[0]; + mIMB[i] = new IntelMetadataBuffer(); + mIMB[i]->SetType(MetadataBufferTypeCameraSource); +#ifdef INTEL_VIDEO_XPROC_SHARING + mIMB[i]->SetSessionFlag(mSessionFlag); + mIMB[i]->ShareValue(mGraphicBuffer[i]); +#else + mIMB[i]->SetValue((int32_t)mGraphicBuffer[i]->handle); +#endif + mIMB[i]->SetValueInfo(&vinfo); graphicBuffer->unlock(); mUsrptr[i] = (uint8_t*)vaddr[0]; @@ -589,16 +674,14 @@ private: //for gfxhandle sp mGraphicBufferAlloc; sp mGraphicBuffer[PRELOAD_FRAME_NUM]; - - int mColor; }; class GrallocSource : public DummySource { public: - GrallocSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) : - DummySource (width, height, stride, nFrames, fps, mdata, yuv) { - mColor = 0; + GrallocSource(const sp &meta, uint32_t flag) : + DummySource (meta, flag) { + mTAG = "Gralloc"; } virtual ~GrallocSource () { @@ -609,15 +692,12 @@ public: status_t createResource() { int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_WRITE_OFTEN; - int format = HAL_PIXEL_FORMAT_NV12; - if (mColor == 1) - format = 0x7FA00E00; // = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar in OMX_IVCommon.h gfx_init(); for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) { - if (gfx_alloc(mWidth, mHeight, format, usage, &mHandle[i], (int32_t*)&mStride) != 0) + if (gfx_alloc(mWidth, mHeight, mColorFormat, usage, &mHandle[i], (int32_t*)&mStride) != 0) return UNKNOWN_ERROR; void* vaddr[3]; @@ -630,7 +710,6 @@ public: mStride = h->iWidth; mHeight = h->iHeight; } - return OK; } @@ -713,13 +792,12 @@ private: alloc_device_t *mAllocDev; /* get by gralloc_open */ buffer_handle_t mHandle[PRELOAD_FRAME_NUM]; - int mColor; }; class MixSurfaceMediaSource : public SurfaceMediaSource { public: - MixSurfaceMediaSource(int width, int height, int stride, int nFrames, int fps, bool mdata, const char* yuv) + MixSurfaceMediaSource(int width, int height, int nFrames, int fps) :SurfaceMediaSource(width, height){ mMaxNumFrames = nFrames; mFPS = fps; @@ -729,7 +807,7 @@ public: } status_t start(MetaData *params) { - mSTC = new SurfaceTextureClient(static_cast >(getBufferQueue())); + mSTC = new Surface(getBufferQueue()); mANW = mSTC; mRunning = true; @@ -759,7 +837,7 @@ public: int mFPS; private: - sp mSTC; + sp mSTC; sp mANW; pthread_t mThread; bool mRunning; @@ -812,6 +890,7 @@ public: mEncodeFrameCount = 0; mSource = source; + mMeta = meta; const char *mime; bool success = meta->findCString(kKeyMIMEType, &mime); CHECK(success); @@ -869,16 +948,18 @@ public: mRCMode = RC_MODES[rcmode]; } - virtual sp getFormat() { - sp meta = new MetaData; - meta->setInt32(kKeyWidth, mWidth); - meta->setInt32(kKeyHeight, mHeight); + sp getFormat() { +#if 0 + mMeta = new MetaData; + mMeta->setInt32(kKeyWidth, mWidth); + mMeta->setInt32(kKeyHeight, mHeight); // meta->setInt32(kKeyColorFormat, mColorFormat); - meta->setCString(kKeyMIMEType, mCodec); - return meta; + mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); +#endif + return mMeta; } - virtual status_t start(MetaData *params) { + status_t start(MetaData *params) { Encode_Status ret; status_t err; @@ -906,7 +987,7 @@ public: return mSource->start(); } - virtual status_t stop() { + status_t stop() { Encode_Status ret; ret = mVideoEncoder->stop(); @@ -1144,7 +1225,7 @@ private: sp mSource; MediaBufferGroup mGroup; - + sp mMeta; }; class IVFWriter : public MediaWriter { @@ -1394,9 +1475,11 @@ void usage() { printf(" -p/--fps set frame rate, default 30\n"); printf(" -q/--minQP set minQP, default 0\n"); printf(" -r/--rcMode set rc mode, like VBR(default), CBR, VCM, NO_RC\n"); - printf(" -s/--src select source, like MALLOC(default), VASURFACE, KBUFHANDLE, GFX, GRALLOC, MEMHEAP (CAMERASOURCE, not support yet) \n"); + printf(" -s/--src select source, like MALLOC(default), VASURFACE, KBUFHANDLE, GFX, GRALLOC, MEMHEAP, SURFACEMEDIASOURCE (CAMERASOURCE, not support yet) \n"); + printf(" -t/--sessionFlag set sessionflag, default is 0\n"); + printf(" -u/--disableFrameSkip disable frame skip, default is false\n"); printf(" -w -h set source width /height, default 1280*720\n"); - printf(" -t/--disableFrameSkip disable frame skip, default is false\n"); + printf("\n"); } @@ -1426,7 +1509,8 @@ int main(int argc, char* argv[]) int OutFormat = 0; int SyncMode = 0; char* OutFileName = "out.264"; - const char* Yuvfile = NULL; + const char* Yuvfile = ""; + unsigned int SessionFlag = 0; android::ProcessState::self()->startThreadPool(); @@ -1453,7 +1537,8 @@ int main(int argc, char* argv[]) {"intraPeriod", required_argument, NULL, 'd'}, {"winSize", required_argument, NULL, 'j'}, {"idrInt", required_argument, NULL, 'l'}, - {"disableFrameSkip", no_argument, NULL, 't'}, + {"disableFrameSkip", no_argument, NULL, 'u'}, + {"sessionFlag", required_argument, NULL, 't'}, {0, 0, 0, 0} }; @@ -1584,6 +1669,10 @@ int main(int argc, char* argv[]) break; case 't': + SessionFlag = atoi(optarg); + break; + + case 'u': DisableFrameSkip = 1; break; @@ -1604,7 +1693,7 @@ int main(int argc, char* argv[]) printf("=========================================\n"); printf("Source:\n"); printf("Type: %s, Width: %d, Height: %d, Stride: %d\n", SRCTYPE[SrcType], SrcWidth, SrcHeight, SrcStride); - printf("FPS: %d, YUV: %s, Metadata: %d\n", SrcFps, Yuvfile, MetadataMode); + printf("FPS: %d, YUV: %s, Metadata: %d, SessionFlag: 0x%08x\n", SrcFps, Yuvfile, MetadataMode, SessionFlag); printf("\nEncoder:\n"); printf("Type: %s, Codec: %s, Width: %d, Height: %d\n", ENCTYPE[EncType], CODEC[EncCodec], EncWidth, EncHeight); @@ -1621,29 +1710,37 @@ int main(int argc, char* argv[]) sp writer; //setup source + sp src_meta = new MetaData; + src_meta->setInt32(kKeyWidth, SrcWidth); + src_meta->setInt32(kKeyHeight, SrcHeight); + src_meta->setInt32(kKeyStride, SrcStride); + src_meta->setInt32(kKeyFrameRate, SrcFps); + src_meta->setInt32(kKeyColorFormat, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar); //HAL_PIXEL_FORMAT_NV12 OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar = 0x7FA00E00, + + src_meta->setCString('yuvf', Yuvfile); + src_meta->setInt32('fnum', SrcFrameNum); + src_meta->setInt32('sflg', SessionFlag); + + uint32_t src_flags = 0; + if (MetadataMode) + src_flags |= OMXCodec::kStoreMetaDataInVideoBuffers; + if (SrcType == 0) { - source = new MallocSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile); + source = new MallocSource(src_meta, src_flags); } else if (SrcType == 1) { - source = new VASurfaceSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile, 0); + source = new VASurfaceSource(src_meta, src_flags, 0); } else if (SrcType == 2) { - source = new VASurfaceSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile, 1); + source = new VASurfaceSource(src_meta, src_flags, 1); } else if (SrcType == 3) { - source = new GfxSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile); + source = new GfxSource(src_meta, src_flags); } else if (SrcType == 4) { - source = new GrallocSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile); + source = new GrallocSource(src_meta, src_flags); } else if (SrcType == 5) { - source = new MemHeapSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile); + source = new MemHeapSource(src_meta, src_flags); } else if (SrcType == 7) { - source = new MixSurfaceMediaSource(SrcWidth, SrcHeight, SrcStride, SrcFrameNum, - SrcFps, MetadataMode, Yuvfile); + source = new MixSurfaceMediaSource(SrcWidth, SrcHeight, SrcFrameNum, SrcFps); SyncMode = 1; - }else{ + }else{ printf("Source Type is not supported\n"); return 0; } @@ -1712,16 +1809,28 @@ int main(int argc, char* argv[]) else writer = new IVFWriter(OutFileName); - writer->addSource(encoder); + status_t err; + err = writer->addSource(encoder); + if (err != OK) { + printf("Writer addSource failed %d\n", err); + return 0; + } + + err = writer->start(enc_meta.get()); + if (err != OK) { + printf("Writer start failed %d\n", err); + return 0; + } printf("Start encoding\n"); int64_t start = systemTime(); - CHECK_EQ((status_t)OK, writer->start(enc_meta.get())); + while (!writer->reachedEOS()) { usleep(100000); } - status_t err = writer->stop(); + + err = writer->stop(); int64_t end = systemTime(); if (EncType == 1) { @@ -1735,6 +1844,7 @@ int main(int argc, char* argv[]) return 1; } + src_meta.clear(); enc_meta.clear(); printf("encoding %d frames in %lld us\n", gNumFramesOutput, (end-start)/1000); diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 5821c5c..3ec61de 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -101,6 +101,11 @@ LOCAL_COPY_HEADERS_TO := libmix_videoencoder LOCAL_COPY_HEADERS := \ IntelMetadataBuffer.h +ifeq ($(INTEL_VIDEO_XPROC_SHARING),true) +LOCAL_SHARED_LIBRARIES := liblog libutils libbinder libgui \ + libui libcutils libhardware +endif + LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libintelmetadatabuffer diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index 4a2b42f..b7dcfc0 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -22,10 +22,315 @@ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +//#define LOG_NDEBUG 0 +#define LOG_TAG "IntelMetadataBuffer" +#include + #include "IntelMetadataBuffer.h" #include #include +#ifdef INTEL_VIDEO_XPROC_SHARING +#include +#include +#include +#include +#include +#include + +//#define TEST + +struct ShareMemMap { + uint32_t sessionflag; + int32_t value; + int32_t value_backup; + uint32_t type; + sp membase; + sp gbuffer; +}; + +List gShareMemMapList; +Mutex gShareMemMapListLock; + +enum { + SHARE_MEM = IBinder::FIRST_CALL_TRANSACTION, + GET_MEM, + CLEAR_MEM, +}; + +enum { + ST_MEMBASE = 0, + ST_GFX, + ST_MAX, +}; + +#define REMOTE_PROVIDER 0x80000000 +#define REMOTE_CONSUMER 0x40000000 + +static ShareMemMap* ReadMemObjFromBinder(const Parcel& data, uint32_t sessionflag, uint32_t value) { + + uint32_t type = data.readInt32(); + if (type >= ST_MAX) + return NULL; + + ShareMemMap* map = new ShareMemMap; + map->sessionflag = sessionflag; + map->type = type; + map->value_backup = value; + map->membase = NULL; + map->gbuffer= NULL; + +// LOGI("ReadMemObjFromBinder"); + + if (type == ST_MEMBASE) /*offset, size, heap*/ + { + ssize_t offset = data.readInt32(); + size_t size = data.readInt32(); + + sp heap = interface_cast(data.readStrongBinder()); + + sp mem = new MemoryBase(heap, offset, size); + if (mem == NULL) + { + delete map; + return NULL; + } + + map->value = (int32_t) ((int) ( mem->pointer() + 0x0FFF) & ~0x0FFF); + map->membase = mem; + +#ifdef TEST + ALOGI("membase heapID:%d, pointer:%x data:%x, aligned value:%x", \ + heap->getHeapID(), mem->pointer(), *((int *)(mem->pointer())), map->value); +#endif + + } + else if (type == ST_GFX) /*graphicbuffer*/ + { + sp buffer = new GraphicBuffer(); + if (buffer == NULL) + { + delete map; + return NULL; + } + data.read(*buffer); + + map->value = (uint32_t)buffer->handle; + map->gbuffer = buffer; + +#ifdef TEST + void* usrptr[3]; + buffer->lock(GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_READ_OFTEN, &usrptr[0]); + buffer->unlock(); + ALOGI("gfx handle:%x data:%x", (int32_t)buffer->handle, *((int *)usrptr[0])); +#endif + } + + gShareMemMapListLock.lock(); + gShareMemMapList.push_back(map); + gShareMemMapListLock.unlock(); + return map; +} + +static status_t WriteMemObjToBinder(Parcel& data, ShareMemMap* smem) { + + if (smem->type >= ST_MAX) + return BAD_VALUE; + +// LOGI("WriteMemObjToBinder"); + + data.writeInt32(smem->type); + + if (smem->type == ST_MEMBASE) /*offset, size, heap*/ + { + ssize_t offset; + size_t size; + sp heap = smem->membase->getMemory(&offset, &size); + data.writeInt32(offset); + data.writeInt32(size); + data.writeStrongBinder(heap->asBinder()); +#ifdef TEST + ALOGI("membase heapID:%d pointer:%x data:%x", \ + heap->getHeapID(), smem->membase->pointer(), *((int *)(smem->membase->pointer()))); +#endif + } + else if (smem->type == ST_GFX) /*graphicbuffer*/ + data.write(*(smem->gbuffer)); + + return NO_ERROR; +} + +static void ClearLocalMem(uint32_t sessionflag) +{ + List::iterator node; + + gShareMemMapListLock.lock(); + + for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); ) + { + if ((*node)->sessionflag == sessionflag) //remove all buffers belong to this session + { + (*node)->membase = NULL; + (*node)->gbuffer = NULL; + delete (*node); + node = gShareMemMapList.erase(node); + } + else + node ++; + } + + gShareMemMapListLock.unlock(); +} + +static ShareMemMap* FindShareMem(uint32_t sessionflag, int32_t value, bool isBackup) +{ + List::iterator node; + + gShareMemMapListLock.lock(); + for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); node++) + { + if (isBackup) + { + if ((*node)->sessionflag == sessionflag && (*node)->value_backup == value) + { + gShareMemMapListLock.unlock(); + return (*node); + } + } + else if ((*node)->sessionflag == sessionflag && (*node)->value == value) + { + gShareMemMapListLock.unlock(); + return (*node); + } + } + gShareMemMapListLock.unlock(); + + return NULL; +} + +static ShareMemMap* PopShareMem(uint32_t sessionflag, int32_t value) +{ + List::iterator node; + + gShareMemMapListLock.lock(); + for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); node++) + { + if ((*node)->sessionflag == sessionflag && (*node)->value == value) + { + gShareMemMapList.erase(node); + gShareMemMapListLock.unlock(); + return (*node); + } + } + gShareMemMapListLock.unlock(); + + return NULL; +} + +static void PushShareMem(ShareMemMap* &smem) +{ + gShareMemMapListLock.lock(); + gShareMemMapList.push_back(smem); + gShareMemMapListLock.unlock(); +} + +static sp GetIntelBufferSharingService() { + + sp sm = defaultServiceManager(); + sp binder = sm->checkService(String16("media.IntelBufferSharing")); + + if (binder == 0) + LOGE("media.IntelBufferSharing service is not published"); + + return binder; +} + +IntelBufferSharingService* IntelBufferSharingService::gBufferService = NULL; + +status_t IntelBufferSharingService::instantiate(){ + status_t ret = NO_ERROR; + + if (gBufferService == NULL) { + gBufferService = new IntelBufferSharingService(); + ret = defaultServiceManager()->addService(String16("media.IntelBufferSharing"), gBufferService); + LOGI("IntelBufferSharingService::instantiate() ret = %d\n", ret); + } + + return ret; +} + +status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { + + pid_t pid = data.readInt32(); + uint32_t sessionflag = data.readInt32(); + + switch(code) + { + case SHARE_MEM: + { + + if (pid == getpid()) //in same process, should not use binder + { + LOGE("onTransact in same process, wrong sessionflag?"); + return UNKNOWN_ERROR; + } + + int32_t value = data.readInt32(); + +// LOGI("onTransact SHARE_MEM value=%x", value); + + //different process + ShareMemMap* map = ReadMemObjFromBinder(data, sessionflag, value); + if (map == NULL) + return UNKNOWN_ERROR; + + reply->writeInt32(map->value); + + return NO_ERROR; + } + case CLEAR_MEM: + { +// LOGI("onTransact CLEAR_MEM sessionflag=%x", sessionflag); + + if (pid == getpid()) //in same process, should not use binder + { + //same process, return same pointer in data + LOGE("onTransact CLEAR_MEM in same process, wrong sessionflag?"); + return UNKNOWN_ERROR; + } + + ClearLocalMem(sessionflag); + return NO_ERROR; + } + case GET_MEM: + { + + if (pid == getpid()) //in same process, should not use binder + { + LOGE("onTransact GET_MEM in same process, wrong sessionflag?"); + return UNKNOWN_ERROR; + } + + int32_t value = data.readInt32(); + +// LOGI("onTransact GET_MEM value=%x", value); + + ShareMemMap* smem = FindShareMem(sessionflag, value, false); + if (smem && (NO_ERROR == WriteMemObjToBinder(*reply, smem))) + return NO_ERROR; + else + LOGE("onTransact GET_MEM: Not find mem"); + + return UNKNOWN_ERROR; + } + default: + return BBinder::onTransact(code, data, reply, flags); + + } + return NO_ERROR; +} +#endif + IntelMetadataBuffer::IntelMetadataBuffer() { mType = MetadataBufferTypeCameraSource; @@ -35,6 +340,9 @@ IntelMetadataBuffer::IntelMetadataBuffer() mExtraValues_Count = 0; mBytes = NULL; mSize = 0; +#ifdef INTEL_VIDEO_XPROC_SHARING + mSessionFlag = 0; +#endif } IntelMetadataBuffer::IntelMetadataBuffer(MetadataBufferType type, int32_t value) @@ -46,8 +354,11 @@ IntelMetadataBuffer::IntelMetadataBuffer(MetadataBufferType type, int32_t value) mExtraValues_Count = 0; mBytes = NULL; mSize = 0; +#ifdef INTEL_VIDEO_XPROC_SHARING + mSessionFlag = 0; +#endif } - + IntelMetadataBuffer::~IntelMetadataBuffer() { if (mInfo) @@ -64,6 +375,9 @@ IntelMetadataBuffer::~IntelMetadataBuffer() IntelMetadataBuffer::IntelMetadataBuffer(const IntelMetadataBuffer& imb) :mType(imb.mType), mValue(imb.mValue), mInfo(NULL), mExtraValues(NULL), mExtraValues_Count(imb.mExtraValues_Count), mBytes(NULL), mSize(imb.mSize) +#ifdef INTEL_VIDEO_XPROC_SHARING + ,mSessionFlag(imb.mSessionFlag) +#endif { if (imb.mInfo) mInfo = new ValueInfo(*imb.mInfo); @@ -90,6 +404,9 @@ const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuf mExtraValues_Count = imb.mExtraValues_Count; mBytes = NULL; mSize = imb.mSize; +#ifdef INTEL_VIDEO_XPROC_SHARING + mSessionFlag = imb.mSessionFlag; +#endif if (imb.mInfo) mInfo = new ValueInfo(*imb.mInfo); @@ -130,7 +447,47 @@ IMB_Result IntelMetadataBuffer::GetValue(int32_t& value) { value = mValue; +#ifndef INTEL_VIDEO_XPROC_SHARING + return IMB_SUCCESS; +#else + if ((mSessionFlag & REMOTE_CONSUMER) == 0) //no sharing or is local consumer + return IMB_SUCCESS; + + //try to find if it is already cached. + ShareMemMap* smem = FindShareMem(mSessionFlag, mValue, true); + if(smem) + { + value = smem->value; + return IMB_SUCCESS; + } + + //is remote provider and not find from cache, then pull from service + sp binder = GetIntelBufferSharingService(); + if (binder == 0) + return IMB_NO_SERVICE; + + //Detect IntelBufferSharingService, share mem to service + Parcel data, reply; + + //send pid, sessionflag, and memtype + pid_t pid = getpid(); + data.writeInt32(pid); + data.writeInt32(mSessionFlag); + data.writeInt32(mValue); + + //do transcation + if (binder->transact(GET_MEM, data, &reply) != NO_ERROR) + return IMB_SERVICE_FAIL; + + //get type/Mem OBJ + smem = ReadMemObjFromBinder(reply, mSessionFlag, mValue); + if (smem) + value = smem->value; + else + return IMB_SERVICE_FAIL; + return IMB_SUCCESS; +#endif } IMB_Result IntelMetadataBuffer::SetValue(int32_t value) @@ -157,7 +514,7 @@ IMB_Result IntelMetadataBuffer::SetValueInfo(ValueInfo* info) memcpy(mInfo, info, sizeof(ValueInfo)); } else - return IMB_INVAL_PARAM; + return IMB_INVAL_PARAM; return IMB_SUCCESS; } @@ -166,7 +523,7 @@ IMB_Result IntelMetadataBuffer::GetExtraValues(int32_t* &values, uint32_t& num) { values = mExtraValues; num = mExtraValues_Count; - + return IMB_SUCCESS; } @@ -182,7 +539,7 @@ IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num) if (mExtraValues == NULL) mExtraValues = new int32_t[num]; - + memcpy(mExtraValues, values, sizeof(int32_t) * num); mExtraValues_Count = num; } @@ -208,7 +565,7 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) data += 4; memcpy(&value, data, 4); data += 4; - + switch (type) { case MetadataBufferTypeCameraSource: @@ -216,16 +573,16 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) case MetadataBufferTypeUser: { if (extrasize >0 && extrasize < sizeof(ValueInfo)) - return IMB_INVAL_BUFFER; - - if (extrasize > sizeof(ValueInfo)) //has extravalues + return IMB_INVAL_BUFFER; + + if (extrasize > sizeof(ValueInfo)) //has extravalues { if ( (extrasize - sizeof(ValueInfo)) % 4 != 0 ) return IMB_INVAL_BUFFER; ExtraValues_Count = (extrasize - sizeof(ValueInfo)) / 4; } - - if (extrasize > 0) + + if (extrasize > 0) { info = new ValueInfo; memcpy(info, data, sizeof(ValueInfo)); @@ -259,7 +616,10 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) delete[] mExtraValues; mExtraValues = ExtraValues; mExtraValues_Count = ExtraValues_Count; - +#ifdef INTEL_VIDEO_XPROC_SHARING + if (mInfo != NULL) + mSessionFlag = mInfo->sessionFlag; +#endif return IMB_SUCCESS; } @@ -285,9 +645,12 @@ IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) ptr += 4; memcpy(ptr, &mValue, 4); ptr += 4; - + if (mInfo) { + #ifdef INTEL_VIDEO_XPROC_SHARING + mInfo->sessionFlag = mSessionFlag; + #endif memcpy(ptr, mInfo, sizeof(ValueInfo)); ptr += sizeof(ValueInfo); @@ -298,7 +661,7 @@ IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) data = mBytes; size = mSize; - + return IMB_SUCCESS; } @@ -306,3 +669,167 @@ uint32_t IntelMetadataBuffer::GetMaxBufferSize() { return 256; } + +#ifdef INTEL_VIDEO_XPROC_SHARING +IMB_Result IntelMetadataBuffer::GetSessionFlag(uint32_t& sessionflag) +{ + sessionflag = mSessionFlag; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::SetSessionFlag(uint32_t sessionflag) +{ + mSessionFlag = sessionflag; + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::ShareValue(sp mem) +{ + mValue = (int32_t)((int) ( mem->pointer() + 0x0FFF) & ~0x0FFF); + + if (mSessionFlag == 0) //no sharing + return IMB_SUCCESS; + + if (mSessionFlag & REMOTE_PROVIDER) //is remote provider + { + sp binder = GetIntelBufferSharingService(); + if (binder == 0) + return IMB_NO_SERVICE; + + //Detect IntelBufferSharingService, share mem to service + Parcel data, reply; + + //send pid, sessionflag, and value + pid_t pid = getpid(); + data.writeInt32(pid); + data.writeInt32(mSessionFlag); + data.writeInt32(mValue); + + //send type/obj (offset/size/MemHeap) + ShareMemMap smem; + smem.membase = mem; + smem.type = ST_MEMBASE; + if (WriteMemObjToBinder(data, &smem) != NO_ERROR) + return IMB_SERVICE_FAIL; + + //do transcation + if (binder->transact(SHARE_MEM, data, &reply) != NO_ERROR) + return IMB_SERVICE_FAIL; + + //set new value gotten from peer + mValue = reply.readInt32(); +// LOGI("ShareValue(membase) Get reply from sevice, new value:%x\n", mValue); + } + else //is local provider , direct access list + { + ShareMemMap* smem = new ShareMemMap; + smem->sessionflag = mSessionFlag; + smem->value = mValue; + smem->value_backup = mValue; + smem->type = ST_MEMBASE; + smem->membase = mem; + smem->gbuffer = NULL; + PushShareMem(smem); + } + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::ShareValue(sp gbuffer) +{ + mValue = (int32_t)gbuffer->handle; + + if (mSessionFlag == 0) //no sharing + return IMB_SUCCESS; + + if (mSessionFlag & REMOTE_PROVIDER == 0) //is remote provider + { + sp binder = GetIntelBufferSharingService(); + if (binder == 0) + return IMB_NO_SERVICE; + + Parcel data, reply; + + //send pid, sessionflag, and memtype + pid_t pid = getpid(); + data.writeInt32(pid); + data.writeInt32(mSessionFlag); + data.writeInt32(mValue); + + //send value/graphicbuffer obj + ShareMemMap smem; + smem.gbuffer = gbuffer; + smem.type = ST_GFX; + if (WriteMemObjToBinder(data, &smem) != NO_ERROR) + return IMB_SERVICE_FAIL; + + //do transcation + if (binder->transact(SHARE_MEM, data, &reply) != NO_ERROR) + return IMB_SERVICE_FAIL; + + //set new value gotten from peer + mValue = reply.readInt32(); +// LOGI("ShareValue(gfx) Get reply from sevice, new value:%x\n", mValue); + } + else //is local provider, direct access list + { + ShareMemMap* smem = new ShareMemMap; + smem->sessionflag = mSessionFlag; + smem->value = mValue; + smem->value_backup = mValue; + smem->type = ST_GFX; + smem->membase = NULL; + smem->gbuffer = gbuffer; + PushShareMem(smem); + } + + return IMB_SUCCESS; +} + +IMB_Result IntelMetadataBuffer::ClearContext(uint32_t sessionflag, bool isProvider) +{ + if (sessionflag == 0) //no sharing + return IMB_SUCCESS; + + //clear local firstly + ClearLocalMem(sessionflag); + + //clear mem on service if it is remote user + if ((isProvider && (sessionflag & REMOTE_PROVIDER)) || (!isProvider && (sessionflag & REMOTE_CONSUMER))) + { +// LOGI("CLEAR_MEM sessionflag=%x", sessionflag); + + sp binder = GetIntelBufferSharingService(); + if (binder == 0) + return IMB_NO_SERVICE; + + //Detect IntelBufferSharingService, unshare mem from service + Parcel data, reply; + + //send pid and sessionflag + pid_t pid = getpid(); + data.writeInt32(pid); + data.writeInt32(sessionflag); + + if (binder->transact(CLEAR_MEM, data, &reply) != NO_ERROR) + return IMB_SERVICE_FAIL; + } + + return IMB_SUCCESS; +} + +uint32_t IntelMetadataBuffer::MakeSessionFlag(uint16_t sindex, bool romoteProvider, bool remoteConsumer) +{ + uint32_t sessionflag = 0; + + if (romoteProvider) + sessionflag |= REMOTE_PROVIDER; + + if (remoteConsumer) + sessionflag |= REMOTE_PROVIDER; + + return sessionflag + sindex; +} +#endif diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index 6d8f978..00b6649 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -27,12 +27,25 @@ #include +//#define INTEL_VIDEO_XPROC_SHARING + +#ifdef INTEL_VIDEO_XPROC_SHARING +#include +#include + +using namespace android; +#endif + #define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24))) typedef enum { IMB_SUCCESS = 0, IMB_INVAL_PARAM = 1, IMB_INVAL_BUFFER = 2, +#ifdef INTEL_VIDEO_XPROC_SHARING + IMB_NO_SERVICE = 3, + IMB_SERVICE_FAIL = 4, +#endif }IMB_Result; typedef enum { @@ -57,6 +70,9 @@ typedef struct { uint32_t chromStride; //picture chrom stride uint32_t format; //color format uint32_t s3dformat; //S3D format +#ifdef INTEL_VIDEO_XPROC_SHARING + uint32_t sessionFlag; //for buffer sharing session +#endif }ValueInfo; typedef enum { @@ -102,7 +118,52 @@ private: uint8_t* mBytes; uint32_t mSize; + +#ifdef INTEL_VIDEO_XPROC_SHARING +public: + IMB_Result ShareValue(sp mem); + IMB_Result ShareValue(sp gbuffer); + + IMB_Result GetSessionFlag(uint32_t &sessionflag); + IMB_Result SetSessionFlag(uint32_t sessionflag); + + //Static, for clear context + static IMB_Result ClearContext(uint32_t sessionflag, bool isProvider = true); + + static const uint16_t CAMERA_BASE = 0x0000; + static const uint16_t WIDI_BASE = 0x1000; + static const uint16_t WEBRTC_BASE = 0x2000; + static const uint16_t VIDEOEDIT_BASE = 0x3000; + + static uint32_t MakeSessionFlag(uint16_t sindex, bool romoteProvider, bool remoteConsumer); + +private: + uint32_t mSessionFlag; +#endif + }; +#ifdef INTEL_VIDEO_XPROC_SHARING + +class IntelBufferSharingService : public BBinder +{ +private: + static IntelBufferSharingService *gBufferService; + +public: + static status_t instantiate(); + + IntelBufferSharingService(){ + LOGI("IntelBufferSharingService instance is created"); + } + + ~IntelBufferSharingService(){ + LOGI("IntelBufferSharingService instance is destroyed"); + } + + status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); +}; +#endif + #endif diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index af16152..8e90e9f 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -68,7 +68,11 @@ VideoEncoderBase::VideoEncoderBase() ,mTotalSize(0) ,mTotalSizeCopied(0) ,mFrameSkipped(false) - ,mSupportedSurfaceMemType(0){ + ,mSupportedSurfaceMemType(0) +#ifdef INTEL_VIDEO_XPROC_SHARING + ,mSessionFlag(0) +#endif + { VAStatus vaStatus = VA_STATUS_SUCCESS; // here the display can be any value, use following one @@ -106,6 +110,10 @@ VideoEncoderBase::~VideoEncoderBase() { } else { mVADisplay = NULL; } + +#ifdef INTEL_VIDEO_XPROC_SHARING + IntelMetadataBuffer::ClearContext(mSessionFlag, false); +#endif } Encode_Status VideoEncoderBase::start() { @@ -1806,6 +1814,9 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA value = (int32_t)inBuffer->data; } +#ifdef INTEL_VIDEO_XPROC_SHARING + imb.GetSessionFlag(mSessionFlag); +#endif //find if mapped map = (SurfaceMap*) findSurfaceMapByValue(value); diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index d43878b..75c3874 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -163,5 +163,9 @@ protected: //supported surface memory types int mSupportedSurfaceMemType; + +#ifdef INTEL_VIDEO_XPROC_SHARING + uint32_t mSessionFlag; +#endif }; #endif /* __VIDEO_ENCODER_BASE_H__ */ -- cgit v1.2.3 From d94c02b2166a2fc4b817804ec7aca9180b11421c Mon Sep 17 00:00:00 2001 From: Fei Jiang Date: Fri, 11 Oct 2013 21:38:58 +0800 Subject: libmix: fix GEN vaCreateSurfaces not workable issue on BYT BZ: 145088 set flag for attribute and switch pixel_format to VA_FOURCC_NV12 Need pass VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC as flag. Also need set pixel format as FOURCC, not HAL format. Change-Id: Ib6cd1d7c77cf3483a367b4e6d9ba6312ae181d65 Signed-off-by: Fei Jiang Reviewed-on: http://android.intel.com:8080/138145 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderBase.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 79e33b7..06fa57a 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -819,7 +819,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i return DECODE_MEMORY_FAIL; } mVASurfaceAttrib->num_buffers = mNumSurfaces; - mVASurfaceAttrib->pixel_format = mConfigBuffer.graphicBufferColorFormat; + mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12; mVASurfaceAttrib->width = mVideoFormatInfo.width; mVASurfaceAttrib->height = mVideoFormatInfo.height; mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.height * 1.5; @@ -833,7 +833,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i mVASurfaceAttrib->offsets[2] = 0; mVASurfaceAttrib->offsets[3] = 0; mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow; - mVASurfaceAttrib->flags = 0; + mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; for (int i = 0; i < mNumSurfaces; i++) { mVASurfaceAttrib->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; -- cgit v1.2.3 From 46cd40f9806d1f82356a6cf2bbb3aac346cce364 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 14 Oct 2013 19:08:43 +0800 Subject: libmix: VP8 parser refinement BZ: 131068 This patch refined content: 1. add trace in parser if meet error 2. remove non-used function Change-Id: Ib5c7520f0e55a4ed1ba59c41c4c131d9f2dd4ebb Reviewed-on: http://android.intel.com:8080/139257 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/vbp_manager/vbp_vp8_parser.c | 8 ++++- mixvbp/vbp_plugin/vp8/include/vp8parse.h | 2 +- mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c | 37 ++++++++--------------- mixvbp/vbp_plugin/vp8/vp8parse.c | 51 +++++++++++++++++++++----------- 4 files changed, 54 insertions(+), 44 deletions(-) diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c index 203ab75..51f8236 100755 --- a/mixvbp/vbp_manager/vbp_vp8_parser.c +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -205,6 +205,7 @@ uint32 vbp_parse_start_code_vp8(vbp_context *pcontext) uint32 length = cxt->parse_cubby.size; if (length < 3) { + ETRACE("failure: invalid len %d in parse start code", length); return VBP_DATA; } @@ -216,6 +217,7 @@ uint32 vbp_parse_start_code_vp8(vbp_context *pcontext) // check start code if ((c[0] != 0x9d) || (c[1] != 0x01) || (c[2] != 0x2a)) { + ETRACE("failure: check start code failure"); return VBP_PARM; } } @@ -484,7 +486,8 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * } pic_data->num_slices++; - if (pic_data->num_slices > VP8_MAX_NUM_SLICES) { + if (pic_data->num_slices > VP8_MAX_NUM_SLICES) + { ETRACE("Number of slices (%d) per picture exceeds the limit (%d).", pic_data->num_slices, VP8_MAX_NUM_SLICES); return VBP_DATA; } @@ -517,7 +520,10 @@ uint32 vbp_populate_query_data_vp8(vbp_context *pcontext) { error = vbp_add_slice_data_vp8(parser, query_data); if (error != VBP_OK) + { + ETRACE("add slice data error %d", error); return error; + } } /* Populate codec data */ diff --git a/mixvbp/vbp_plugin/vp8/include/vp8parse.h b/mixvbp/vbp_plugin/vp8/include/vp8parse.h index a5c9c13..063d635 100755 --- a/mixvbp/vbp_plugin/vp8/include/vp8parse.h +++ b/mixvbp/vbp_plugin/vp8/include/vp8parse.h @@ -34,7 +34,7 @@ extern "C" { int32_t vp8_parse_frame_tag(FrameTagHeader *frame_tag, uint8_t *data, uint32_t data_sz); -//vp8_Status vp8_translate_parse_status(vp8_Status status); +void vp8_translate_parse_status(vp8_Status status); void vp8_init_Info(vp8_Info *pi); diff --git a/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c index 4eb5919..860adc0 100755 --- a/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c +++ b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c @@ -61,7 +61,12 @@ uint32_t viddec_vp8_parse(void *parent, void *ctxt) vp8_Status status = VP8_NO_ERROR; vp8_viddec_parser *parser = (vp8_viddec_parser*)ctxt; - if (1 != parser->got_start) return VP8_NO_INITIALIZATION; + if (1 != parser->got_start) + { + status = VP8_NO_INITIALIZATION; + vp8_translate_parse_status(status); + return status; + } vp8_Info *pi = &(parser->info); viddec_pm_cxt_t *pm_cxt = (viddec_pm_cxt_t *)parent; @@ -70,7 +75,7 @@ uint32_t viddec_vp8_parse(void *parent, void *ctxt) if (pi->source_sz < 0) { - return VP8_UNEXPECTED_END_OF_BITSTREAM; + status = VP8_UNEXPECTED_END_OF_BITSTREAM; } else if (pi->source_sz == 0) { @@ -82,13 +87,12 @@ uint32_t viddec_vp8_parse(void *parent, void *ctxt) status = vp8_parse_frame_header(parser); } - return status; -} + if (status != VP8_NO_ERROR) + { + vp8_translate_parse_status(status); + } -uint32_t viddec_vp8_wkld_done(void *parent, void *ctxt, unsigned int next_sc, - uint32_t *codec_specific_errors) -{ - return 0; + return status; } void viddec_vp8_get_context_size(viddec_parser_memory_sizes_t *size) @@ -99,20 +103,3 @@ void viddec_vp8_get_context_size(viddec_parser_memory_sizes_t *size) return; } -uint32_t viddec_vp8_is_frame_start(void *ctxt) -{ - vp8_viddec_parser* parser = ctxt; - - return parser->got_start; -} - -void viddec_vp8_get_ops(viddec_parser_ops_t *ops) -{ - ops->init = viddec_vp8_init; - - ops->parse_syntax = viddec_vp8_parse; - ops->get_cxt_size = viddec_vp8_get_context_size; - ops->is_wkld_done = viddec_vp8_wkld_done; - ops->is_frame_start = viddec_vp8_is_frame_start; - return; -} diff --git a/mixvbp/vbp_plugin/vp8/vp8parse.c b/mixvbp/vbp_plugin/vp8/vp8parse.c index 4f15736..f3c789d 100755 --- a/mixvbp/vbp_plugin/vp8/vp8parse.c +++ b/mixvbp/vbp_plugin/vp8/vp8parse.c @@ -25,6 +25,7 @@ #include "vp8_tables.h" #include "vp8parse.h" +#include static const uint8_t kVp8SyncCodeByte[] = {0x9d, 0x01, 0x2a}; @@ -115,7 +116,7 @@ void vp8_parse_segmentation_adjustments_data(vp8_Info *pi) /* Is segmentation enabled */ pi->Segmentation.Enabled = (uint8_t)vp8_decode_bool(bc, 128); //chapter 9.2 - macroblock uses segments ? 1: 0 - if(pi->Segmentation.Enabled ) + if(pi->Segmentation.Enabled) { /* Signal whether or not the segmentation map is being explicitly updated this frame */ pi->Segmentation.UpdateMap = (uint8_t)vp8_decode_bool(bc, 128); @@ -465,29 +466,40 @@ void vp8_parse_remaining_frame_header_data(vp8_Info *pi) } -#if 0 -vp8_Status vp8_translate_parse_status(vp8_Status status) + +void vp8_translate_parse_status(vp8_Status status) { switch (status) { case VP8_UNSUPPORTED_VERSION: - LOGE("Parser returned VP8_UNSUPPORTED_VERSION"); - return VP8_UNSUPPORTED_VERSION; + ETRACE("Parser returns VP8_UNSUPPORTED_VERSION"); + break; case VP8_UNSUPPORTED_BITSTREAM: - LOGE("Parser returned VP8_UNSUPPORTED_BITSTREAM"); - return VP8_UNSUPPORTED_BITSTREAM; + ETRACE("Parser returns VP8_UNSUPPORTED_BITSTREAM"); + break; case VP8_INVALID_FRAME_SYNC_CODE: - LOGE("Parser returned VP8_INVALID_FRAME_SYNC_CODE"); - return VP8_INVALID_FRAME_SYNC_CODE; + ETRACE("Parser returns VP8_INVALID_FRAME_SYNC_CODE"); + break; case VP8_UNEXPECTED_END_OF_BITSTREAM: - LOGE("Parser returned VP8_UNEXPECTED_END_OF_BITSTREAM"); - return VP8_UNEXPECTED_END_OF_BITSTREAM; - default: - LOGE("Parser returned VP8_UNKNOWN_ERROR"); - return VP8_UNKNOWN_ERROR; + ETRACE("Parser returns VP8_UNEXPECTED_END_OF_BITSTREAM"); + break; + case VP8_CORRUPT_FRAME: + ETRACE("Parser returns VP8_CORRUPT_FRAME"); + break; + case VP8_MEMORY_ERROR: + ETRACE("Parser returns MEMORY_ERROR"); + break; + case VP8_NO_INITIALIZATION: + ETRACE("Parser returns VP8_NO_INITIALIZATION"); + break; + case VP8_UNKNOWN_ERROR: + ETRACE("Parser returns VP8_UNKNOWN_ERROR"); + break; } + + return; } -#endif + /* Parse VP8 frame header */ int32_t vp8_parse_frame_header(vp8_viddec_parser *parser) @@ -508,6 +520,7 @@ int32_t vp8_parse_frame_header(vp8_viddec_parser *parser) ret = vp8_parse_frame_tag(&(pi->frame_tag), data, data_sz); if (ret != VP8_NO_ERROR) { + vp8_translate_parse_status(ret); return ret; } @@ -523,7 +536,9 @@ int32_t vp8_parse_frame_header(vp8_viddec_parser *parser) /* Check sync code containg 3 bytes*/ if ((data[0] != kVp8SyncCodeByte[0]) || (data[1] != kVp8SyncCodeByte[1]) || (data[2] != kVp8SyncCodeByte[2])) { - return VP8_INVALID_FRAME_SYNC_CODE; + ret = VP8_INVALID_FRAME_SYNC_CODE; + vp8_translate_parse_status(ret); + return ret; } pi->width = (data[3] | (data[4] << 8)) & 0x3fff; @@ -538,7 +553,9 @@ int32_t vp8_parse_frame_header(vp8_viddec_parser *parser) if (0 == pi->width || 0 == pi->height) { - return VP8_UNSUPPORTED_BITSTREAM; + ret = VP8_UNSUPPORTED_BITSTREAM; + vp8_translate_parse_status(ret); + return ret; } /* Initialize frame parameters*/ -- cgit v1.2.3 From 405e2cd1c845ec819e0e66fc8dd728a1d7a78355 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Wed, 9 Oct 2013 14:41:52 +0800 Subject: mixvbp: h264 parser refinement BZ: 131068 1) remove redundant code; 2) add debug messages; Change-Id: I1a624a257fcfcd1eb25519912797c41dae7f42a8 Signed-off-by: Dan Liang Reviewed-on: http://android.intel.com:8080/139258 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/vbp_manager/include/viddec_pm.h | 5 +- .../vbp_manager/include/viddec_pm_utils_bstream.h | 12 +- mixvbp/vbp_manager/include/viddec_pm_utils_list.h | 18 - mixvbp/vbp_manager/vbp_h264_parser.c | 65 ++- mixvbp/vbp_manager/vbp_utils.c | 6 +- mixvbp/vbp_manager/viddec_parse_sc.c | 124 ------ mixvbp/vbp_manager/viddec_pm_parser_ops.c | 3 +- mixvbp/vbp_manager/viddec_pm_utils_bstream.c | 243 +---------- mixvbp/vbp_plugin/h264/Android.mk | 51 ++- mixvbp/vbp_plugin/h264/h264parse.c | 174 +++----- mixvbp/vbp_plugin/h264/h264parse_bsd.c | 33 +- mixvbp/vbp_plugin/h264/h264parse_dpb.c | 463 +++++++++++---------- mixvbp/vbp_plugin/h264/h264parse_mem.c | 4 - mixvbp/vbp_plugin/h264/h264parse_pps.c | 103 +---- mixvbp/vbp_plugin/h264/h264parse_sei.c | 25 +- mixvbp/vbp_plugin/h264/h264parse_sh.c | 300 +++---------- mixvbp/vbp_plugin/h264/h264parse_sps.c | 140 ++----- mixvbp/vbp_plugin/h264/include/h264.h | 46 +- mixvbp/vbp_plugin/h264/include/h264parse.h | 2 - .../secvideo/baytrail/viddec_h264secure_parse.c | 61 +-- mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 210 ++-------- 21 files changed, 574 insertions(+), 1514 deletions(-) diff --git a/mixvbp/vbp_manager/include/viddec_pm.h b/mixvbp/vbp_manager/include/viddec_pm.h index e5fb679..31bd0f4 100755 --- a/mixvbp/vbp_manager/include/viddec_pm.h +++ b/mixvbp/vbp_manager/include/viddec_pm.h @@ -28,11 +28,8 @@ typedef struct uint8_t late_frame_detect; uint8_t frame_start_found; uint32_t next_workload_error_eos; -#ifdef VBP + uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3]; -#else - uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2]; -#endif } viddec_pm_cxt_t; diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h index 999a067..ab2569f 100755 --- a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h +++ b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h @@ -9,11 +9,7 @@ typedef struct { -#ifdef VBP uint8_t *buf; -#else - uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */ -#endif uint32_t buf_st; /* start pos in buf */ uint32_t buf_end; /* first invalid byte in buf */ uint32_t buf_index; /* current index in buf */ @@ -30,10 +26,8 @@ typedef struct typedef struct { -#ifdef VBP /* counter of emulation prevention byte */ uint32_t emulation_byte_counter; -#endif /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store the bstream buffer's first valid byte index wrt to accessunit in this variable */ uint32_t au_pos; @@ -72,17 +66,17 @@ void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t */ static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul) { - uint32_t phase=cxt->phase; + uint32_t phase = cxt->phase; *bit = cxt->bstrm_buf.buf_bitoff; *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st); if (cxt->phase > 0) { - phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 ); + phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0) ? 1 : 0); } /* Assumption: we will never be parked on 0x3 byte of emulation prevention sequence */ *is_emul = (cxt->is_emul_reqd) && (phase > 0) && (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) && - (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3); + (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index + 1] == 0x3); } #endif diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_list.h b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h index 0e650d5..ac8a329 100755 --- a/mixvbp/vbp_manager/include/viddec_pm_utils_list.h +++ b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h @@ -3,11 +3,7 @@ /* Limitation:This is the maximum numbers of es buffers between start codes. Needs to change if we encounter a case if this is not sufficent */ -#ifdef VBP #define MAX_IBUFS_PER_SC 512 -#else -#define MAX_IBUFS_PER_SC 64 -#endif /* This structure is for storing information on byte position in the current access unit. stpos is the au byte index of first byte in current es buffer.edpos is the au byte index+1 of last @@ -32,19 +28,5 @@ typedef struct /* This function initialises the list to default values */ void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt); -#ifndef VBP -/* This function adds a new entry to list and will emit tags if needed */ -uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf); - -/* This function updates au byte position of the current list. This should be called after sc codes are detected and before - syntax parsing as get bits requires this to be initialized. */ -void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length); -/* This function walks through the list and removes consumed buffers based on total bytes. It then moves - unused entires to the top of list. */ -void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length); - -/* this function returns 1 if the requested byte is not found. If found returns list and offset into list */ -uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset); -#endif #endif diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c index 3f6400d..0a8f141 100755 --- a/mixvbp/vbp_manager/vbp_h264_parser.c +++ b/mixvbp/vbp_manager/vbp_h264_parser.c @@ -177,16 +177,6 @@ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) ETRACE ("Failed to set entry point." ); return VBP_LOAD; } -#ifdef VBP - pcontext->parser_ops->is_wkld_done = NULL; -#else - pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done"); - if (NULL == pcontext->parser_ops->is_wkld_done) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } -#endif pcontext->parser_ops->flush = dlsym(pcontext->fd_parser, "viddec_h264_flush");; if (NULL == pcontext->parser_ops->flush) @@ -884,27 +874,37 @@ static void vbp_set_codec_data_h264( codec_data->crop_right = 0; codec_data->crop_top = 0; codec_data->crop_bottom = 0; - if(parser->info.active_SPS.sps_disp.frame_cropping_flag) { + if(parser->info.active_SPS.sps_disp.frame_cropping_flag) + { int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; int ChromaArrayType = 0; - if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) { - if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) { + if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) + { + if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) + { SubWidthC = 2; SubHeightC = 2; - } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) { + } + else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) + { SubWidthC = 2; SubHeightC = 1; - } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) { + } + else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) + { SubWidthC = 1; SubHeightC = 1; } ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc; } - if(ChromaArrayType == 0) { + if(ChromaArrayType == 0) + { CropUnitX = 1; CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag; - } else { + } + else + { CropUnitX = SubWidthC; CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag); } @@ -1226,10 +1226,8 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) slc_parms->num_ref_idx_l0_active_minus1 = 0; slc_parms->num_ref_idx_l1_active_minus1 = 0; - if (slice_header->slice_type == h264_PtypeI) - { - } - else if (slice_header->slice_type == h264_PtypeP) + + if (slice_header->slice_type == h264_PtypeP) { slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; } @@ -1238,7 +1236,7 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1; } - else + else if (slice_header->slice_type != h264_PtypeI) { WTRACE("slice type %d is not supported.", slice_header->slice_type); } @@ -1249,24 +1247,17 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2; slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2; - vbp_set_pre_weight_table_h264(h264_parser, slc_parms); vbp_set_slice_ref_list_h264(h264_parser, slc_parms); - pic_data->num_slices++; - //vbp_update_reference_frames_h264_methodB(pic_data); if (pic_data->num_slices > MAX_NUM_SLICES) { ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); return VBP_DATA; } - /*if (pic_data->num_slices > 1) - { - ITRACE("number of slices per picture is %d.", pic_data->num_slices); - }*/ return VBP_OK; } @@ -1395,7 +1386,7 @@ uint32 vbp_parse_init_data_h264(vbp_context* pcontext) num_of_picture_parameter_sets = *cur_data++; if (num_of_picture_parameter_sets > 1) { - /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ + VTRACE("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); } for (i = 0; i < num_of_picture_parameter_sets; i++) @@ -1460,7 +1451,7 @@ static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size) return *p; default: - WTRACE("invalid NAL_length_size: %d.", NAL_length_size); + WTRACE("invalid NAL_length_size: %d.", *NAL_length_size); /* default to 4 bytes for length */ *NAL_length_size = 4; return vbp_utils_ntohl(p); @@ -1514,7 +1505,7 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size); if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size) { - ETRACE("Invalid NAL_length parsed."); + ETRACE("Invalid NAL_length parsed: 0x%x", NAL_length); break; } @@ -1565,7 +1556,6 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) } } - if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED) { viddec_sc_parse_cubby_cxt_t cubby; @@ -1629,7 +1619,6 @@ uint32 vbp_parse_start_code_h264(vbp_context *pcontext) cxt->list.data[0].edpos = cxt->parse_cubby.size; } - return VBP_OK; } @@ -1653,7 +1642,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) switch (parser->info.nal_unit_type) { case h264_NAL_UNIT_TYPE_SLICE: - //ITRACE("slice header is parsed."); + VTRACE("slice header is parsed."); error = vbp_add_pic_data_h264(pcontext, i); if (VBP_OK == error) { @@ -1662,7 +1651,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_IDR: - //ITRACE("IDR header is parsed."); + VTRACE("IDR header is parsed."); error = vbp_add_pic_data_h264(pcontext, i); if (VBP_OK == error) { @@ -1670,7 +1659,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) } break; case h264_NAL_UNIT_TYPE_SEI: - //ITRACE("SEI header is parsed."); + VTRACE("SEI header is parsed."); break; case h264_NAL_UNIT_TYPE_SPS: @@ -1682,7 +1671,7 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: - //ITRACE("ACC unit delimiter is parsed."); + VTRACE("ACC unit delimiter is parsed."); break; case h264_NAL_UNIT_TYPE_EOSeq: diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index 78bbb0e..0da65b5 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -387,18 +387,16 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); /* can't return error for now. Neet further investigation */ -#if 0 if (0 != error) { - ETRACE("failed to parse the syntax: %d!", error); - return error; + WTRACE("failed to parse the syntax: %d!", error); } -#endif /* process parsing result */ error = pcontext->func_process_parsing_result(pcontext, i); if (VBP_MULTI == error) { + ITRACE("Multiple frames are found in one bufffer."); return VBP_OK; } else if (0 != error) diff --git a/mixvbp/vbp_manager/viddec_parse_sc.c b/mixvbp/vbp_manager/viddec_parse_sc.c index b3f8d0b..d64a229 100755 --- a/mixvbp/vbp_manager/viddec_parse_sc.c +++ b/mixvbp/vbp_manager/viddec_parse_sc.c @@ -1,6 +1,5 @@ #include "viddec_pm_parse.h" -#ifndef MFDBIGENDIAN uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) { uint8_t *ptr; @@ -93,126 +92,3 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) return ret; } -#else -#define FIRST_STARTCODE_BYTE 0x00 -#define SECOND_STARTCODE_BYTE 0x00 -#define THIRD_STARTCODE_BYTE 0x01 - -/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ -/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */ -/* these are little-endian defines */ -#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */ -#define SC_BYTE_MASK1 0x000000ff /* little-endian */ - -/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success. - The conext is updated with current phase and sc_code position in the buffer. -*/ -uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) -{ - uint8_t *ptr; - uint32_t size; - uint32_t data_left=0, phase = 0, ret = 0; - viddec_sc_parse_cubby_cxt_t *cxt; - /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this. - Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found. - if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern - we are looking for. Its incremented to 4 once we see a byte after this pattern */ - cxt = ( viddec_sc_parse_cubby_cxt_t *)in; - size = 0; - data_left = cxt->size; - ptr = cxt->buf; - phase = cxt->phase; - cxt->sc_end_pos = -1; - pcxt=pcxt; - - /* parse until there is more data and start code not found */ - while ((data_left > 0) &&(phase < 3)) - { - /* Check if we are byte aligned & phase=0, if thats the case we can check - work at a time instead of byte*/ - if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) - { - while (data_left > 3) - { - uint32_t data; - char mask1 = 0, mask2=0; - - data = *((uint32_t *)ptr); - mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0)); - mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1)); - /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need - two consecutive zero bytes for a start code pattern */ - if (mask1 && mask2) - {/* Success so skip 4 bytes and start over */ - ptr+=4; - size+=4; - data_left-=4; - continue; - } - else - { - break; - } - } - } - - /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected - two zero bytes in the word so we look one byte at a time*/ - if (data_left > 0) - { - if (*ptr == FIRST_STARTCODE_BYTE) - {/* Phase can be 3 only if third start code byte is found */ - phase++; - ptr++; - size++; - data_left--; - if (phase > 2) - { - phase = 2; - - if ( (((uint32_t)ptr) & 0x3) == 0 ) - { - while ( data_left > 3 ) - { - if (*((uint32_t *)ptr) != 0) - { - break; - } - ptr+=4; - size+=4; - data_left-=4; - } - } - } - } - else - { - if ((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2)) - {/* Match for start code so update context with byte position */ - phase = 3; - cxt->sc_end_pos = size; - } - else - { - phase = 0; - } - ptr++; - size++; - data_left--; - } - } - } - if ((data_left > 0) && (phase == 3)) - { - viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state; - cxt->sc_end_pos++; - state->next_sc = cxt->buf[cxt->sc_end_pos]; - state->second_scprfx_length = 3; - phase++; - ret = 1; - } - cxt->phase = phase; - /* Return SC found only if phase is 4, else always success */ - return ret; -} -#endif diff --git a/mixvbp/vbp_manager/viddec_pm_parser_ops.c b/mixvbp/vbp_manager/viddec_pm_parser_ops.c index 1ba8aa8..8c0a1ec 100755 --- a/mixvbp/vbp_manager/viddec_pm_parser_ops.c +++ b/mixvbp/vbp_manager/viddec_pm_parser_ops.c @@ -4,6 +4,7 @@ #include "viddec_parser_ops.h" #include "viddec_pm_utils_bstream.h" #include "viddec_fw_common_defs.h" +#include int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) { @@ -14,7 +15,7 @@ int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1); if (ret == -1) { - DEB("FAILURE!!!! getbits returned %d\n", ret); + VTRACE("FAILURE? getbits returned %d", ret); } return ret; diff --git a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c index 6939fef..72d210b 100755 --- a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c +++ b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c @@ -51,95 +51,12 @@ uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cx return ret; } -#ifndef VBP -/* - This function returns true if cubby buffer has the last byte of access unit. -*/ -uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt) -{ - uint32_t last_byte_offset_plus_one=0; - uint8_t ret = 0; - /* Check to see if the last byte Acces unit offset is the last byte for current access unit. - End represents the first invalid byte, so (end - st) will give number of bytes.*/ - last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st); - if ((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes) - { - ret = 1; - } - return ret; -} -#endif /* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */ static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt) { - cxt->st = cxt->size = cxt->bitoff=0; + cxt->st = cxt->size = cxt->bitoff = 0; } -#ifndef VBP -/* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if - we need to go to next es buffer -*/ -static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset) -{ - uint32_t ret=0; - int32_t val=0; - val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes; - val = val - (int32_t)offset; - if (val > 0) ret = (uint32_t)val; - return val; -} - -/* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by - lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter - at returns index of ES buffer in list which has byte_offset -*/ -static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt, - uint32_t *lst_index, - uint32_t byte_offset, - uint32_t *physaddr) -{ - viddec_pm_utils_list_t *list; - uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */ - - list = cxt->list; - while (*lst_index < list->num_items) - { - /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */ - last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes; - if (byte_offset < last_byte_offst) - {/* Found a match so return with data remaining */ - bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset); - *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index); - break; - } - *lst_index+=1; - } - return bytes_left; -} - -/* This function is for copying trailing bytes of cubby bitstream buffer to scratch buffer */ -static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes) -{ - uint32_t i=0; - for (i=0; ibuf_scratch[i] = *data; - data++; - cxt->size++; - } -} - -/* This function is for copying trailing bytes from scratch buffer to bitstream buffer */ -static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data) -{ - uint32_t i=0; - for (i=0; isize; i++) - { - *data = cxt->buf_scratch[i]; - data++; - } -} -#endif /* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream, @@ -158,7 +75,7 @@ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t while (valid_bytes_read < num_bytes) { cur_byte = bstream->buf[bstream->buf_index + *act_bytes]; - if (emul_reqd && (cur_byte == 0x3) &&(*phase == 2)) + if (emul_reqd && (cur_byte == 0x3) && (*phase == 2)) {/* skip emulation byte. we update the phase only if emulation prevention is enabled */ *phase = 0; } @@ -174,18 +91,18 @@ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t if (cur_byte == 0) { /* Update phase only if emulation prevention is required */ - *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 ); + *phase += (((*phase < 2) && emul_reqd) ? 1: 0); } else { - *phase=0; + *phase = 0; } } valid_bytes_read++; } *act_bytes +=1; } - /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array + /* Check to see if we reached end during above operation. We might be out of range buts it's safe since our array has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */ if ((bstream->buf_index + *act_bytes -1) >= bstream->buf_end) { @@ -200,27 +117,7 @@ static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t */ static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left) { -#ifdef VBP *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); -#else - uint8_t isReload=0; - - *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); - /* If we have minimum data we should continue, else try to read more data */ - if (*data_left bstrm_buf)); - /* Break out of loop if we reached last byte or we have enough data */ - isReload = !((*data_left >= MIN_DATA) || (viddec_pm_utils_bstream_nomoredata(cxt) == 1)); - } - } -#endif } /* @@ -232,12 +129,12 @@ static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_bu if ((bits & 0x7) == 0) { bstream->buf_bitoff = 0; - bstream->buf_index +=bytes; + bstream->buf_index += bytes; } else { bstream->buf_bitoff = bits & 0x7; - bstream->buf_index +=(bytes - 1); + bstream->buf_index += (bytes - 1); } } @@ -255,110 +152,21 @@ void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t if (cxt->is_emul_reqd && (cxt->phase >= 2) && (bstream->buf_bitoff == 0) && - (bstream->buf[bstream->buf_index] == 0x3) ) + (bstream->buf[bstream->buf_index] == 0x3)) { bstream->buf_index += 1; cxt->phase = 0; } } -#ifndef VBP -/* - This function gets physical address of the requested au offset(pos). -*/ - -uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index) -{ - uint32_t ret = 0, last_byte_offst=0; - viddec_pm_utils_list_t *list; - - list = cxt->list; - while (lst_index < list->num_items) - { - last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes; - if (pos < last_byte_offst) - { -#ifndef MFDBIGENDIAN - ret = (uint32_t)list->sc_ibuf[lst_index].buf; -#else - ret = list->sc_ibuf[lst_index].phys; -#endif - ret +=(pos - list->data[lst_index].stpos); - if (lst_index == 0) ret+=list->start_offset; - break; - } - lst_index++; - } - return ret; -} - -/* - Actual reload function which uses dma to refill bitstream buffer. -*/ -void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt) -{ - viddec_pm_utils_bstream_buf_cxt_t *bstream; - - bstream = &(cxt->bstrm_buf); - - /* Update current offset positions */ - cxt->au_pos += (bstream->buf_index - bstream->buf_st); - bstream->buf_st = bstream->buf_index; - /* copy leftover bytes into scratch */ - { - int32_t cur_bytes=0; - viddec_pm_utils_bstream_scratch_init(&(cxt->scratch)); - cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf)); - if (cur_bytes > 0) - { - viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes); - cxt->scratch.bitoff = bstream->buf_bitoff; - } - } - /* Initiate DMA and copyback scratch data */ - { - uint32_t data_left = 0, ddr_mask=0; - /* calculate necesary aligmnets and copy data */ - { - uint32_t ddr_addr=0, data_wrote=0; - uint32_t byte_pos; - /* byte pos points to the position from where we want to read data.*/ - byte_pos = cxt->au_pos + cxt->scratch.size; - data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr); - if (data_left > CUBBY_SIZE) - { - data_left = CUBBY_SIZE; - } - if (data_left != 0) - { - ddr_mask = ddr_addr & 0x3; - ddr_addr = ddr_addr & ~0x3; - data_wrote = cp_using_dma(ddr_addr, (uint32_t)&(bstream->buf[MIN_DATA]), (data_left + ddr_mask), 0, 1); - } - } - /* copy scratch data back to buffer and update offsets */ - { - uint32_t index=0; - index = MIN_DATA + ddr_mask; - index -= cxt->scratch.size; - viddec_pm_utils_bstream_scratch_copyfrom(&(cxt->scratch), &(bstream->buf[index])); - bstream->buf_st = bstream->buf_index = index; - bstream->buf_end = data_left + cxt->scratch.size + bstream->buf_st; - bstream->buf_bitoff = cxt->scratch.bitoff; - } - } -} -#endif /* Init function called by parser manager after sc code detected. */ void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul) { -#ifdef VBP - cxt->emulation_byte_counter = 0; -#endif + cxt->emulation_byte_counter = 0; cxt->au_pos = 0; cxt->list = list; cxt->list_off = 0; @@ -393,7 +201,7 @@ int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t * int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits) { int32_t ret = -1; - uint32_t data_left=0; + uint32_t data_left = 0; viddec_pm_utils_bstream_buf_cxt_t *bstream; bstream = &(cxt->bstrm_buf); @@ -402,24 +210,22 @@ int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uin { uint8_t bytes_required=0; - bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + bytes_required = (bstream->buf_bitoff + num_bits + 7) >> 3; if (bytes_required <= data_left) { viddec_pm_utils_getbits_t data; uint32_t act_bytes =0; - if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) { - uint32_t total_bits=0; - total_bits=num_bits+bstream->buf_bitoff; + uint32_t total_bits = 0; + total_bits = num_bits + bstream->buf_bitoff; viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); - ret=1; + ret = 1; -#ifdef VBP if (act_bytes > bytes_required) { cxt->emulation_byte_counter = act_bytes - bytes_required; } -#endif } } } @@ -438,11 +244,11 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) { - uint32_t bytes_required=0; + uint32_t bytes_required = 0; viddec_pm_utils_bstream_buf_cxt_t *bstream; bstream = &(cxt->bstrm_buf); - bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + bytes_required = (bstream->buf_bitoff + num_bits + 7) >> 3; /* Step 2: Make sure we have bytes for requested bits */ if (bytes_required <= data_left) @@ -450,22 +256,21 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin uint32_t act_bytes, phase; viddec_pm_utils_getbits_t data; phase = cxt->phase; - /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ + /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required < actual_required bytes */ if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) { - uint32_t total_bits=0; - uint32_t shift_by=0; + uint32_t total_bits = 0; + uint32_t shift_by = 0; /* zero out upper bits */ /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts in single statement */ data.byte[0] <<= bstream->buf_bitoff; data.byte[0] >>= bstream->buf_bitoff; -#ifndef MFDBIGENDIAN data.word[0] = SWAP_WORD(data.word[0]); data.word[1] = SWAP_WORD(data.word[1]); -#endif - total_bits = num_bits+bstream->buf_bitoff; + + total_bits = num_bits + bstream->buf_bitoff; if (total_bits > 32) { /* We have to use both the words to get required data */ @@ -484,15 +289,13 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); cxt->phase = phase; -#ifdef VBP if (act_bytes > bytes_required) { cxt->emulation_byte_counter += act_bytes - bytes_required; } -#endif } - ret =1; + ret = 1; } } } diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk index f015988..2045d32 100755 --- a/mixvbp/vbp_plugin/h264/Android.mk +++ b/mixvbp/vbp_plugin/h264/Android.mk @@ -2,32 +2,31 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -LOCAL_SRC_FILES := \ - h264parse.c \ - h264parse_bsd.c \ - h264parse_math.c \ - h264parse_mem.c \ - h264parse_sei.c \ - h264parse_sh.c \ - h264parse_pps.c \ - h264parse_sps.c \ - h264parse_dpb.c \ - viddec_h264_parse.c \ +LOCAL_SRC_FILES := \ + h264parse.c \ + h264parse_bsd.c \ + h264parse_math.c \ + h264parse_mem.c \ + h264parse_sei.c \ + h264parse_sh.c \ + h264parse_pps.c \ + h264parse_sps.c \ + h264parse_dpb.c \ + viddec_h264_parse.c \ mix_vbp_h264_stubs.c -LOCAL_CFLAGS := -DVBP -DHOST_ONLY - -LOCAL_C_INCLUDES := \ - $(LOCAL_PATH)/include \ - $(MIXVBP_DIR)/include \ - $(MIXVBP_DIR)/vbp_manager/include \ - $(MIXVBP_DIR)/vbp_manager/h264/include +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/include \ + $(MIXVBP_DIR)/include \ + $(MIXVBP_DIR)/vbp_manager/include \ + $(MIXVBP_DIR)/vbp_manager/h264/include LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libmixvbp_h264 -LOCAL_SHARED_LIBRARIES := \ - libmixvbp +LOCAL_SHARED_LIBRARIES := \ + libmixvbp \ + liblog include $(BUILD_SHARED_LIBRARY) @@ -48,18 +47,18 @@ LOCAL_SRC_FILES := \ secvideo/baytrail/viddec_h264secure_parse.c \ mix_vbp_h264_stubs.c -LOCAL_CFLAGS := -DVBP -DHOST_ONLY -DUSE_AVC_SHORT_FORMAT +LOCAL_CFLAGS := -DUSE_AVC_SHORT_FORMAT LOCAL_C_INCLUDES := \ - $(LOCAL_PATH)/include \ - $(MIXVBP_DIR)/include \ - $(MIXVBP_DIR)/vbp_manager/include \ - $(MIXVBP_DIR)/vbp_manager/h264/include + $(LOCAL_PATH)/include \ + $(MIXVBP_DIR)/include \ + $(MIXVBP_DIR)/vbp_manager/include \ + $(MIXVBP_DIR)/vbp_manager/h264/include LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libmixvbp_h264secure -LOCAL_SHARED_LIBRARIES := libmixvbp +LOCAL_SHARED_LIBRARIES := libmixvbp liblog include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c index 9e36b7c..d4f9c9e 100755 --- a/mixvbp/vbp_plugin/h264/h264parse.c +++ b/mixvbp/vbp_plugin/h264/h264parse.c @@ -14,6 +14,8 @@ #include "h264.h" #include "h264parse.h" #include "h264parse_dpb.h" +#include + /* ------------------------------------------------------------------------------------------ */ @@ -38,22 +40,22 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOf 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63 }; - lastScale = 8; - nextScale = 8; + lastScale = 8; + nextScale = 8; scanj = 0; - for (j=0; jfirst_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id)) { -#ifndef WIN32 h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id); if (pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) { - return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + WTRACE("Invalid PPS detected"); + return H264_PPS_INVALID_PIC_ID; } if (pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id) { - pInfo->Is_SPS_updated =1; + pInfo->Is_SPS_updated = 1; h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); } - else + else if (h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id)) { - if (h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id)) - { - pInfo->Is_SPS_updated =1; - h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); - h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); - } + pInfo->Is_SPS_updated =1; + h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id); + h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id); } -#else - pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id]; - pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id]; -#endif - if (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS) { - return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected + WTRACE("Invalid SPS detected"); + return H264_PPS_INVALID_PIC_ID; } } - else { - if ((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)) + else + { + if ((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) + || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)) { - return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + WTRACE("Invalid SPS detected"); + return H264_PPS_INVALID_PIC_ID; } } - pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); - //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1); pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ - (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1): \ - ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1); - + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : \ + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); return H264_STATUS_OK; }; //// End of h264_active_par_set @@ -144,8 +139,6 @@ h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_ //// Part2/3: depend on the active parset ////////////////////////////////////////////////// - //retStatus = h264_Parse_Slice_Header_1(pInfo); - SliceHeader->sh_error = 0; if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) @@ -156,25 +149,26 @@ h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_ retStatus = h264_active_par_set(pInfo, SliceHeader); } - if (retStatus == H264_STATUS_OK) { + if (retStatus == H264_STATUS_OK) + { switch (pInfo->active_SPS.profile_idc) { case h264_ProfileBaseline: case h264_ProfileMain: case h264_ProfileExtended: - pInfo->active_PPS.transform_8x8_mode_flag=0; - pInfo->active_PPS.pic_scaling_matrix_present_flag =0; + pInfo->active_PPS.transform_8x8_mode_flag = 0; + pInfo->active_PPS.pic_scaling_matrix_present_flag = 0; pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; default: break; } - if ( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK) + if (h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK) { SliceHeader->sh_error |= 2; } - else if ( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK) + else if (h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK) { SliceHeader->sh_error |= 4; } @@ -214,30 +208,13 @@ h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref { h264_Status ret = H264_STATUS_ERROR; - //h264_NAL_Unit_t* NAL = &pInfo->NAL; uint32_t code; -#if 0 - viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24); - viddec_pm_get_bits(parent, &code, 1); //forbidden_zero_bit - - viddec_pm_get_bits(parent, &code, 2); - SliceHeader->nal_ref_idc = (uint8_t)code; - - viddec_pm_get_bits(parent, &code, 5); - pInfo->nal_unit_type = (uint8_t)code; -#else -#ifdef VBP - if ( viddec_pm_get_bits(parent, &code, 8) != -1) -#else - //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type - if ( viddec_pm_get_bits(parent, &code, 32) != -1) -#endif + if (viddec_pm_get_bits(parent, &code, 8) != -1) { - *nal_ref_idc = (uint8_t)((code>>5)&0x3); - pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f); + *nal_ref_idc = (uint8_t)((code >> 5) & 0x3); + pInfo->nal_unit_type = (uint8_t)((code >> 0) & 0x1f); ret = H264_STATUS_OK; } -#endif return ret; } @@ -348,7 +325,7 @@ void h264_init_Info_under_sps_pps_level(h264_Info* pInfo) pInfo->h264_pwt_end_bit_offset = 0; pInfo->h264_pwt_enabled = 0; - for (i=0; i<32; i++) + for (i = 0; i < 32; i++) { pInfo->slice_ref_list0[i] = 0; pInfo->slice_ref_list1[i] = 0; @@ -370,8 +347,6 @@ void h264_init_Info_under_sps_pps_level(h264_Info* pInfo) pInfo->sei_rp_received = 0; pInfo->last_I_frame_idc = 255; - pInfo->wl_err_curr = 0; - pInfo->wl_err_next = 0; pInfo->primary_pic_type_plus_one = 0; pInfo->sei_b_state_ready = 0; @@ -424,34 +399,27 @@ int32_t h264_is_second_field(h264_Info * pInfo) if (cur_slice.structure != FRAME) { - if ( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ) - &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )) + if (( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc) + && (3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc]))) + && (0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])))) { - if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag)) + if ((cur_slice.frame_num == old_slice.frame_num) || cur_slice.idr_flag) { - if (old_slice.structure != cur_slice.structure) { - - if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1: - (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \ - ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0) || // Condition 2: - (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0))) + if (((cur_slice.structure == TOP_FIELD && old_slice.structure == BOTTOM_FIELD) || // Condition 1: + (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \ + ((old_slice.nal_ref_idc == 0 && cur_slice.nal_ref_idc == 0) || // Condition 2: + (old_slice.nal_ref_idc != 0 && cur_slice.nal_ref_idc != 0))) { //pInfo->img.second_field = 1; result = 1; } } } - - } - - } - - return result; } //// End of h264_is_second_field @@ -472,8 +440,6 @@ int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_sli return 1; } - - result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); result |= (old_slice.frame_num != cur_slice.frame_num); result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); @@ -511,9 +477,9 @@ int32_t h264_check_previous_frame_end(h264_Info * pInfo) { int result = 0; - if ( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) ) + if ((h264_NAL_UNIT_TYPE_SLICE == pInfo->old_nal_unit_type) + || (h264_NAL_UNIT_TYPE_IDR == pInfo->old_nal_unit_type)) { - switch ( pInfo->nal_unit_type ) { case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: @@ -527,29 +493,22 @@ int32_t h264_check_previous_frame_end(h264_Info * pInfo) case h264_NAL_UNIT_TYPE_Reserved3: case h264_NAL_UNIT_TYPE_Reserved4: case h264_NAL_UNIT_TYPE_Reserved5: - { pInfo->img.current_slice_num = 0; - if ((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) { + if ((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) + { pInfo->is_frame_boundary_detected_by_non_slice_nal =1; pInfo->is_current_workload_done=1; result=1; } break; - } default: break; } - } - return result; - } - - - /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -570,7 +529,7 @@ void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeade pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag; } - pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc; + pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc; pInfo->old_slice.structure = pInfo->SliceHeader.structure; @@ -618,7 +577,8 @@ void h264_update_img_info(h264_Info * pInfo ) pInfo->img.MbaffFrameFlag = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag)); pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type; - if (pInfo->img.pic_order_cnt_type == 1) { + if (pInfo->img.pic_order_cnt_type == 1) + { pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle; pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag; pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic; @@ -646,27 +606,24 @@ void h264_update_img_info(h264_Info * pInfo ) pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num; - /////////////////////////////////////////////////Resolution Change pInfo->img.curr_has_mmco_5 = 0; - if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)|| - (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) ) + if ((pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)|| + (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs)) { - int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0; + int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag) ? pInfo->img.no_output_of_prior_pics_flag : 0; // If resolution changed, reset the soft DPB here h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics); } return; - } ///// End of init new frame void h264_update_frame_type(h264_Info * pInfo ) { - //update frame type if (pInfo->img.structure == FRAME) { @@ -682,7 +639,6 @@ void h264_update_frame_type(h264_Info * pInfo ) } else { -#if 1 switch (pInfo->SliceHeader.slice_type) { case h264_PtypeB: @@ -706,10 +662,7 @@ void h264_update_frame_type(h264_Info * pInfo ) break; } -#endif - } - } else if (pInfo->img.structure == TOP_FIELD) { @@ -726,12 +679,12 @@ void h264_update_frame_type(h264_Info * pInfo ) break; case h264_PtypeSP: case h264_PtypeP: - if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B) - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); + if (((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET)) >> FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET) | (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); break; case h264_PtypeI: case h264_PtypeSI: - if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID) + if (((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET)) >> FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID) { pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)); } @@ -752,25 +705,25 @@ void h264_update_frame_type(h264_Info * pInfo ) { if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));; + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET) | (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));; } else { switch (pInfo->SliceHeader.slice_type) { case h264_PtypeB: - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET) | (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); break; case h264_PtypeSP: case h264_PtypeP: - if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B) - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)) >> FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B) + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET) | (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); break; case h264_PtypeI: case h264_PtypeSI: - if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID) + if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET)) >> FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID) { - pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); + pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET) | (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET)); } if (pInfo->sei_rp_received) pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc + PUT_LIST_INDEX_FIELD_BIT(1); @@ -782,7 +735,6 @@ void h264_update_frame_type(h264_Info * pInfo ) break; } - } } diff --git a/mixvbp/vbp_plugin/h264/h264parse_bsd.c b/mixvbp/vbp_plugin/h264/h264parse_bsd.c index 40c7559..84f94b2 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_bsd.c +++ b/mixvbp/vbp_plugin/h264/h264parse_bsd.c @@ -60,20 +60,19 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) temp = (temp << bits_offset); - if (temp!=0) + if (temp != 0) { bits_need_add_in_first_byte = bits_offset; } - is_first_byte =0; + is_first_byte = 0; } else { noOfBits = 8;/* always 8 bits as we read a byte at a time */ bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); - } - if (-1==bits_operation_result) + if (-1 == bits_operation_result) { return MAX_INT32_VALUE; } @@ -81,7 +80,7 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) if (temp != 0) { // if byte!=0 we have at least one bit with value 1. - count=1; + count = 1; while (((temp & 0x80) != 0x80) && (count <= noOfBits)) { count++; @@ -122,7 +121,7 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) count = ((count + bits_need_add_in_first_byte)& 0x7); leadingZeroBits --; - length = leadingZeroBits; + length = leadingZeroBits; codeNum = 0; noOfBits = 8 - count; @@ -174,8 +173,8 @@ int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) if (bIsSigned) //get signed integer golomb code else the value is unsigned { - sign = (sval & 0x1)?1:-1; - sval = (sval +1) >> 1; + sign = (sval & 0x1) ? 1 : -1; + sval = (sval + 1) >> 1; sval = sval * sign; } @@ -189,11 +188,11 @@ uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) { uint8_t cnt = 0; - uint8_t is_emul =0; - uint8_t cur_byte = 0; - int32_t shift_bits =0; + uint8_t is_emul =0; + uint8_t cur_byte = 0; + int32_t shift_bits = 0; uint32_t ctr_bit = 0; - uint32_t bits_offset =0, byte_offset =0; + uint32_t bits_offset = 0, byte_offset =0; //remove warning pInfo = pInfo; @@ -203,20 +202,20 @@ uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - shift_bits = 7-bits_offset; + shift_bits = 7 - bits_offset; // read one byte viddec_pm_get_cur_byte(parent, &cur_byte); - ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; + ctr_bit = (cur_byte >> (shift_bits--)) & 0x01; // a stop bit has to be one - if (ctr_bit==0) + if (ctr_bit == 0) return 1; - while (shift_bits>=0 && !cnt) + while (shift_bits >= 0 && !cnt) { - cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit + cnt |= (((cur_byte) >> (shift_bits--)) & 0x01); // set up control bit } return (cnt); diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c index 5f660de..d238958 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_dpb.c +++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c @@ -9,18 +9,14 @@ #include "viddec_pm.h" - -//#include #include "h264parse.h" #include "h264parse_dpb.h" -//#include "h264_debug.h" +#include #ifndef NULL #define NULL 0 #endif -//#ifndef USER_MODE -//#define NULL 0 -//#endif + ///////////////////////// DPB init ////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// @@ -37,7 +33,7 @@ void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb) //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) ); - for (i=0; ifs[i].fs_idc = MPD_DPB_FS_NULL_IDC; p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; @@ -108,18 +104,22 @@ void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExi else { uint32_t found_in_list = 0, i = 0; - for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) { + for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) + { if (p_dpb->fs_ltref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1; } - if (found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); + if (found_in_list == 0) + h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc); } } else { - if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) { + if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) + { h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc); - } else + } + else { uint32_t found_in_list = 0, i = 0; @@ -162,7 +162,7 @@ void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t // Dodgy looking for embedded code here... if (size > 1) { - for (j = 0; j < size-1; j = j + 1) { + for (j = 0; j < size - 1; j = j + 1) { for (k = j + 1; k < size; k = k + 1) { if ((desc & (sort_indices[j] < sort_indices[k]))| (~desc & (sort_indices[j] > sort_indices[k])) ) @@ -232,23 +232,23 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, int32_t lterm; list_idx = 0; - lterm = (long_term)? 1:0; - - if (list_size) { - + lterm = (long_term)? 1 : 0; + if (list_size) + { top_idx = 0; bot_idx = 0; - if (currPicStructure == TOP_FIELD) { - while ((top_idx < list_size)||(bot_idx < list_size)) + if (currPicStructure == TOP_FIELD) + { + while ((top_idx < list_size) || (bot_idx < list_size)) { /////////////////////////////////////////// ref Top Field got_pic = 0; while ((top_idx < list_size) & ~got_pic) { h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); - if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) + if ((viddec_h264_get_is_used(p_dpb->active_fs)) & 0x1) { if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) { @@ -265,7 +265,7 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, while ((bot_idx < list_size) & ~got_pic) { h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); - if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) + if ((viddec_h264_get_is_used(p_dpb->active_fs)) & 0x2) { if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) { @@ -281,14 +281,14 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, /////////////////////////////////////////////// current Bottom Field if (currPicStructure == BOTTOM_FIELD) { - while ((top_idx < list_size)||(bot_idx < list_size)) + while ((top_idx < list_size) || (bot_idx < list_size)) { /////////////////////////////////////////// ref Top Field got_pic = 0; while ((bot_idx < list_size) && (!(got_pic))) { h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]); - if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) { + if ((viddec_h264_get_is_used(p_dpb->active_fs)) & 0x2) { if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) { // short term ref pic pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field @@ -304,7 +304,7 @@ int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, while ((top_idx < list_size) && (!(got_pic))) { h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]); - if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) { + if ((viddec_h264_get_is_used(p_dpb->active_fs)) & 0x1) { if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) { // short term ref pic pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field @@ -489,7 +489,8 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) if (pInfo->SliceHeader.slice_type == h264_PtypeP) { h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0); - for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) { + for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) + { p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; } p_dpb->listXsize[0] = list0idx; @@ -514,21 +515,27 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); if (p_dpb->active_fs->frame.used_for_reference) { - if (p_dpb->active_fs->frame_num > pInfo->SliceHeader.frame_num) { + if (p_dpb->active_fs->frame_num > pInfo->SliceHeader.frame_num) + { p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum; - } else { + } + else + { p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num; } - if ((p_dpb->active_fs->frame.used_for_reference)&0x1) { + if ((p_dpb->active_fs->frame.used_for_reference) & 0x1) + { p_dpb->active_fs->top_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_top; } - if ((p_dpb->active_fs->frame.used_for_reference)&0x2) { + if ((p_dpb->active_fs->frame.used_for_reference) & 0x2) + { p_dpb->active_fs->bottom_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_bottom; } - if (pInfo->SliceHeader.slice_type == h264_PtypeP) { + if (pInfo->SliceHeader.slice_type == h264_PtypeP) + { sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; list_sort_number[list0idx] = p_dpb->active_fs->frame_num_wrap; list0idx++; @@ -539,7 +546,8 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) if (pInfo->SliceHeader.slice_type == h264_PtypeP) { h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); - for (idx = 0; idx < list0idx; idx++) { + for (idx = 0; idx < list0idx; idx++) + { gen_pic_fs_list0[idx] = sort_fs_idc[idx]; } @@ -557,11 +565,13 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]); - if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) { + if (viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x1) + { p_dpb->active_fs->top_field.long_term_pic_num = (p_dpb->active_fs->top_field.long_term_frame_idx << 1) + add_top; } - if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) { + if (viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x2) + { p_dpb->active_fs->bottom_field.long_term_pic_num = (p_dpb->active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom; } @@ -576,19 +586,20 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) if (pInfo->SliceHeader.slice_type == h264_PtypeP) { h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0); - for (idx = 0; idx < listltidx; idx++) { + for (idx = 0; idx < listltidx; idx++) + { gen_pic_fs_listlt[idx] = sort_fs_idc[idx]; } list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1); - for (idx = 0; idx < list0idx_1; idx++) { + for (idx = 0; idx < list0idx_1; idx++) + { p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx]; } p_dpb->listXsize[0] += list0idx_1; } } - if (pInfo->SliceHeader.slice_type == h264_PtypeI) { p_dpb->listXsize[0] = 0; @@ -631,7 +642,7 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) if (skip_picture == 0) { - if ((p_dpb->active_fs->frame.used_for_reference==3) && (!(p_dpb->active_fs->frame.is_long_term))) + if ((p_dpb->active_fs->frame.used_for_reference == 3) && (!(p_dpb->active_fs->frame.is_long_term))) { if (pInfo->img.framepoc >= p_dpb->active_fs->frame.poc) { @@ -645,7 +656,8 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) } h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); - for (idx = 0; idx < list0idx; idx++) { + for (idx = 0; idx < list0idx; idx++) + { p_dpb->listX_0[idx] = sort_fs_idc[idx]; } @@ -680,17 +692,14 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) } h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0); - for (idx = list0idx_1; idx < list0idx; idx++) { + for (idx = list0idx_1; idx < list0idx; idx++) p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1]; - } - for (idx = 0; idx < list0idx_1; idx++) { + for (idx = 0; idx < list0idx_1; idx++) p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx]; - } - for (idx = list0idx_1; idx < list0idx; idx++) { + for (idx = list0idx_1; idx < list0idx; idx++) p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx]; - } p_dpb->listXsize[0] = list0idx; p_dpb->listXsize[1] = list0idx; @@ -715,8 +724,8 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0); for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1) { - p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; - p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + p_dpb->listX_0[idx] = (1 << 6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; + p_dpb->listX_1[idx] = (1 << 6) + sort_fs_idc[idx-p_dpb->listXsize[0]]; } p_dpb->listXsize[0] += list0idx; @@ -728,16 +737,20 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if (viddec_h264_get_is_used(p_dpb->active_fs)) { - if (check_non_existing) { + if (viddec_h264_get_is_used(p_dpb->active_fs)) + { + if (check_non_existing) + { if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; else skip_picture = 0; } - if (skip_picture == 0) { - if (pInfo->img.ThisPOC >= p_dpb->active_fs->frame.poc) { + if (skip_picture == 0) + { + if (pInfo->img.ThisPOC >= p_dpb->active_fs->frame.poc) + { sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx]; list_sort_number[list0idx] = p_dpb->active_fs->frame.poc; list0idx++; @@ -747,7 +760,8 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) } h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1); - for (idx = 0; idx < list0idx; idx = idx + 1) { + for (idx = 0; idx < list0idx; idx = idx + 1) + { gen_pic_fs_list0[idx] = sort_fs_idc[idx]; } @@ -759,15 +773,18 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); if (viddec_h264_get_is_used(p_dpb->active_fs)) { - if (check_non_existing) { + if (check_non_existing) + { if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1; else skip_picture = 0; } - if (skip_picture == 0) { - if (pInfo->img.ThisPOC < p_dpb->active_fs->frame.poc) { + if (skip_picture == 0) + { + if (pInfo->img.ThisPOC < p_dpb->active_fs->frame.poc) + { sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx]; list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc; list0idx++; @@ -863,12 +880,8 @@ void h264_dpb_update_ref_lists(h264_Info * pInfo) p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active; } - - } - - /// DPB reorder list h264_dpb_reorder_lists(pInfo); @@ -905,7 +918,7 @@ static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic } else // current picture is a field { - if (temp_fs->frame.used_for_reference&0x1) + if (temp_fs->frame.used_for_reference & 0x1) if (!(temp_fs->top_field.is_long_term)) if (temp_fs->top_field.pic_num == pic_num) { @@ -952,12 +965,12 @@ static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long } else { - if (temp_fs->frame.used_for_reference&0x1) + if (temp_fs->frame.used_for_reference & 0x1) if (temp_fs->top_field.is_long_term) if (temp_fs->top_field.long_term_pic_num == long_term_pic_num) return temp_fs; - if (temp_fs->frame.used_for_reference&0x2) + if (temp_fs->frame.used_for_reference & 0x2) if (temp_fs->bottom_field.is_long_term) if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num) { @@ -1247,9 +1260,9 @@ void h264_dpb_RP_check_list (h264_Info * pInfo) // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away! // - if ((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) { - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); + if ((pInfo->SliceHeader.slice_type == h264_PtypeB) && (pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) + { + WTRACE("Error happens in %s, further clarification is needed..."); } @@ -1261,8 +1274,8 @@ void h264_dpb_RP_check_list (h264_Info * pInfo) int32_t idx, rp_found = 0; - if ( ((pInfo->SliceHeader.num_ref_idx_l0_active == 1)&&(pInfo->SliceHeader.structure == FRAME)) || - ((pInfo->SliceHeader.num_ref_idx_l0_active == 2)&&(pInfo->SliceHeader.structure != FRAME)) ) + if (((pInfo->SliceHeader.num_ref_idx_l0_active == 1) && (pInfo->SliceHeader.structure == FRAME)) || + ((pInfo->SliceHeader.num_ref_idx_l0_active == 2) && (pInfo->SliceHeader.structure != FRAME))) { if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag) { @@ -1368,7 +1381,7 @@ void h264_dpb_reorder_lists(h264_Info * pInfo) { } - pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active; + pInfo->dpb.listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active; } else if (currSliceType == h264_PtypeB) { /////////////////////////////////////////////// Reordering reference list for B slice @@ -1388,7 +1401,7 @@ void h264_dpb_reorder_lists(h264_Info * pInfo) { } - pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active; + pInfo->dpb.listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active; } //// Check if need recover reference list with previous recovery point @@ -1397,7 +1410,6 @@ void h264_dpb_reorder_lists(h264_Info * pInfo) h264_dpb_RP_check_list(pInfo); } - return; } @@ -1446,11 +1458,14 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin int32_t first_field_non_ref = 0; int32_t idr_flag; - if (NonExisting) { + if (NonExisting) + { if (p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC) return; h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); - } else { + } + else + { if (p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC) return; h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); @@ -1467,24 +1482,21 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) { - case(TOP_FIELD) : { + case(TOP_FIELD): p_dpb->active_fs->top_field.used_for_reference = used_for_reference; viddec_h264_set_is_top_used(p_dpb->active_fs, 1); //p_dpb->active_fs->crc_field_coded = 1; - } break; - case(BOTTOM_FIELD): { + case(BOTTOM_FIELD): p_dpb->active_fs->bottom_field.used_for_reference = used_for_reference << 1; viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1); //p_dpb->active_fs->crc_field_coded = 1; - } break; - default: { - p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + default: + p_dpb->active_fs->frame.used_for_reference = used_for_reference ? 3 : 0; viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); //if(pInfo->img.MbaffFrameFlag) p_dpb->active_fs->crc_field_coded = 1; - } break; } @@ -1492,18 +1504,22 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin //if (freeze_assert) sei_information.disp_frozen = 1; idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag; - if (idr_flag) { + if (idr_flag) + { h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag); - } else { + } + else + { // adaptive memory management - if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) { + if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) + { h264_dpb_adaptive_memory_management(pInfo); } } // Reset the active frame store - could have changed in mem management ftns h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); - if ((viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD)) + if ((viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD) || (viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD)) { // check for frame store with same pic_number -- always true in my case, YH // when we allocate frame store for the second field, we make sure the frame store for the second @@ -1526,7 +1542,7 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin { // Set up locals for non-existing frames used_for_reference = 1; - p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + p_dpb->active_fs->frame.used_for_reference = used_for_reference ? 3 : 0; viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); viddec_h264_set_dec_structure(p_dpb->active_fs, FRAME); pInfo->img.structure = FRAME; @@ -1552,12 +1568,18 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin } } - if (NonExisting) { + if (NonExisting) + { h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); - } else if (pInfo->SliceHeader.idr_flag == 0) { - if (used_for_reference) { - if (pInfo->img.second_field == 0) { - if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) { + } + else if (pInfo->SliceHeader.idr_flag == 0) + { + if (used_for_reference) + { + if (pInfo->img.second_field == 0) + { + if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) + { h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames); } } @@ -1577,7 +1599,8 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel, // which in itself is an error, but this means first_field_non_ref will // not get set and causes problems for h264_dpb_queue_update() - if ((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) { + if ((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) + { if (used_for_reference == 0) if (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel) first_field_non_ref = 1; @@ -1620,7 +1643,8 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin else p_dpb->OutputCtrl = 0; } - else { + else + { p_dpb->OutputCtrl = 0; } } @@ -1637,7 +1661,8 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin // Do not output "direct output" pictures until the sempahore has been set that the pic is // decoded!! // - if (is_direct_output) { + if (is_direct_output) + { h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames); //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); } @@ -1645,13 +1670,13 @@ void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExistin // // Add reference pictures into Reference list // - if (used_for_reference) { + if (used_for_reference) + { h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting); } h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); - return; } ////////////// End of DPB store pic @@ -1678,16 +1703,19 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference { h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - if (NonExisting == 0) { + if (NonExisting == 0) + { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); p_dpb->active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num; } - else { + else + { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc); p_dpb->active_fs->frame_num = p_dpb->active_fs->frame.pic_num; } - if (add2dpb) { + if (add2dpb) + { p_dpb->fs_dpb_idc[p_dpb->used_size] = p_dpb->active_fs->fs_idc; p_dpb->used_size++; } @@ -1695,9 +1723,10 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference switch (viddec_h264_get_dec_structure(p_dpb->active_fs)) { - case FRAME : { + case FRAME : + { viddec_h264_set_is_frame_used(p_dpb->active_fs, 3); - p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0; + p_dpb->active_fs->frame.used_for_reference = used_for_reference ? 3 : 0; if (used_for_reference) { p_dpb->active_fs->frame.used_for_reference = 3; @@ -1709,7 +1738,8 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference } break; - case TOP_FIELD : { + case TOP_FIELD : + { viddec_h264_set_is_top_used(p_dpb->active_fs, 1); p_dpb->active_fs->top_field.used_for_reference = used_for_reference; @@ -1727,15 +1757,16 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference } else { - p_dpb->active_fs->frame.poc = p_dpb->active_fs->top_field.poc; + p_dpb->active_fs->frame.poc = p_dpb->active_fs->top_field.poc; } } break; - case BOTTOM_FIELD : { + case BOTTOM_FIELD : + { viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1); - p_dpb->active_fs->bottom_field.used_for_reference = (used_for_reference<<1); + p_dpb->active_fs->bottom_field.used_for_reference = (used_for_reference << 1); if (used_for_reference) { p_dpb->active_fs->frame.used_for_reference |= 0x2; @@ -1745,7 +1776,8 @@ void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->bottom_field.long_term_frame_idx; } } - if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) { + if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) + { h264_dpb_combine_field(p_dpb, use_old); // generate frame view } else @@ -1794,7 +1826,7 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff unmark_done = 0; - for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++) + for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); @@ -1825,7 +1857,7 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff How will this affect the reference list update ftn coming after?? */ - if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&& + if ((p_dpb->active_fs->frame.used_for_reference & 0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x01))&& (p_dpb->active_fs->top_field.pic_num == picNumX) ) { p_dpb->active_fs->top_field.used_for_reference = 0; @@ -1837,7 +1869,7 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff if (p_dpb->active_fs->bottom_field.used_for_reference == 0) h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]); } - if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) && + if ((p_dpb->active_fs->frame.used_for_reference & 0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x2)) && (p_dpb->active_fs->bottom_field.pic_num == picNumX) ) { p_dpb->active_fs->bottom_field.used_for_reference = 0; @@ -1886,7 +1918,7 @@ void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long if (pInfo->img.structure == FRAME) { - if ((p_dpb->active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(p_dpb->active_fs)==3) && + if ((p_dpb->active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3) && (p_dpb->active_fs->frame.long_term_pic_num == long_term_pic_num)) { h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]); @@ -1897,7 +1929,7 @@ void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long else { /// Check top field - if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) && + if ((p_dpb->active_fs->frame.used_for_reference & 0x1) && (viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x1) && (p_dpb->active_fs->top_field.long_term_pic_num == long_term_pic_num) ) { p_dpb->active_fs->top_field.used_for_reference = 0; @@ -1913,7 +1945,7 @@ void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long } /// Check Bottom field - if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) && + if ((p_dpb->active_fs->frame.used_for_reference & 0x2) && (viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x2) && (p_dpb->active_fs->bottom_field.long_term_pic_num == long_term_pic_num) ) { p_dpb->active_fs->bottom_field.used_for_reference = 0; @@ -1949,18 +1981,18 @@ int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int int32_t pic_struct = INVALID; int32_t found = 0; - for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++) + for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]); - if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&& + if ((p_dpb->active_fs->frame.used_for_reference & 0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x01))&& (p_dpb->active_fs->top_field.pic_num == picNumX) ) { found = 1; pic_struct = TOP_FIELD; } - if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) && + if ((p_dpb->active_fs->frame.used_for_reference & 0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs) & 0x2)) && (p_dpb->active_fs->bottom_field.pic_num == picNumX) ) { found = 1; @@ -1988,18 +2020,24 @@ void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t differenc int32_t currPicNum; int32_t polarity = 0; - if (pInfo->img.structure == FRAME) { + if (pInfo->img.structure == FRAME) + { currPicNum = pInfo->img.frame_num; - } else { + } + else + { currPicNum = (pInfo->img.frame_num << 1) + 1; } picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); // remove frames / fields with same long_term_frame_idx - if (pInfo->img.structure == FRAME) { + if (pInfo->img.structure == FRAME) + { h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx); - } else { + } + else + { polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX); if (polarity != INVALID) @@ -2182,7 +2220,8 @@ void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPict } } - if (found) { + if (found) + { if (is_complement == 0) { h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]); @@ -2332,40 +2371,40 @@ void h264_dpb_adaptive_memory_management (h264_Info * pInfo) { switch (pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx]) { - case 1: { //Mark a short-term reference picture as �unused for reference? + case 1: + //Mark a short-term reference picture as unused for reference? h264_dpb_mm_unmark_short_term_for_reference(pInfo, pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]); - } break; - case 2: { //Mark a long-term reference picture as �unused for reference? + case 2: + //Mark a long-term reference picture as unused for reference? h264_dpb_mm_unmark_long_term_for_reference(pInfo, pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]); - } break; - case 3: { //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it + case 3: + //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it h264_dpb_mm_assign_long_term_frame_idx(pInfo, pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx], pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); - } break; - case 4: { //Specify the maximum long-term frame index and + case 4: + //Specify the maximum long-term frame index and //mark all long-term reference pictureshaving long-term frame indices greater than //the maximum value as "unused for reference" h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb, pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]); - } break; - case 5: { //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to + case 5: + //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to // "no long-term frame indices" h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb); h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0); pInfo->img.last_has_mmco_5 = 1; - } break; - case 6: { //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it + case 6: + //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb, pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]); - } break; } idx++; @@ -2375,7 +2414,7 @@ void h264_dpb_adaptive_memory_management (h264_Info * pInfo) if (pInfo->img.last_has_mmco_5) { pInfo->img.frame_num = 0; - pInfo->SliceHeader.frame_num=0; + pInfo->SliceHeader.frame_num = 0; h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc); if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) @@ -2436,12 +2475,12 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) // If the previous picture was an unpaired field, mark it as a dangler if (p_dpb->used_size) { - idx = p_dpb->used_size-1; + idx = p_dpb->used_size - 1; prev_idc = p_dpb->fs_dpb_idc[idx]; if (prev_idc != MPD_DPB_FS_NULL_IDC) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - p_dpb->active_fs->frame_num =0; + p_dpb->active_fs->frame_num = 0; } } pInfo->img.PreviousFrameNumOffset = 0; @@ -2451,7 +2490,8 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) } // Check for gaps in frame_num - if (pInfo->SliceHeader.idr_flag) { + if (pInfo->SliceHeader.idr_flag) + { pInfo->img.PreviousFrameNum = pInfo->img.frame_num; } // Have we re-started following a recovery point message? @@ -2464,9 +2504,12 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) */ else if (pInfo->img.frame_num != pInfo->img.PreviousFrameNum) { - if (MaxFrameNum) { + if (MaxFrameNum) + { ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp); - } else { + } + else + { temp = (uint32_t)pInfo->img.PreviousFrameNum + 1; } prev_frame_num_plus1_wrap = temp; @@ -2479,16 +2522,13 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) { - if (pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) { + if (pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) + { // infer an unintentional loss of pictures // only invoke following process for a conforming bitstream // when gaps_in_frame_num_value_allowed_flag is equal to 1 pInfo->img.gaps_in_frame_num = 0; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + //mfd_printf("ERROR STREAM??\n"); ////// Error handling here---- } @@ -2503,12 +2543,13 @@ void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo) // If the previous picture was an unpaired field, mark it as a dangler if (p_dpb->used_size) { - idx = p_dpb->used_size-1; + idx = p_dpb->used_size - 1; prev_idc = p_dpb->fs_dpb_idc[idx]; if (prev_idc != MPD_DPB_FS_NULL_IDC) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); - if (viddec_h264_get_is_used(p_dpb->active_fs) != 3) { + if (viddec_h264_get_is_used(p_dpb->active_fs) != 3) + { h264_dpb_mark_dangling_field(p_dpb, p_dpb->active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME } } @@ -2563,8 +2604,8 @@ void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_ { h264_dpb_set_active_fs(p_dpb, fs_idc); - if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) p_dpb->active_fs->top_field.used_for_reference = 0; - if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) p_dpb->active_fs->bottom_field.used_for_reference = 0; + if (viddec_h264_get_is_used(p_dpb->active_fs) & 0x1) p_dpb->active_fs->top_field.used_for_reference = 0; + if (viddec_h264_get_is_used(p_dpb->active_fs) & 0x2) p_dpb->active_fs->bottom_field.used_for_reference = 0; if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) p_dpb->active_fs->frame.used_for_reference = 0; p_dpb->active_fs->frame.used_for_reference = 0; @@ -2586,13 +2627,13 @@ void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, i { h264_dpb_set_active_fs(p_dpb, fs_idc); - if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) + if (viddec_h264_get_is_used(p_dpb->active_fs) & 0x1) { p_dpb->active_fs->top_field.used_for_reference = 0; p_dpb->active_fs->top_field.is_long_term = 0; } - if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) + if (viddec_h264_get_is_used(p_dpb->active_fs) & 0x2) { p_dpb->active_fs->bottom_field.used_for_reference = 0; p_dpb->active_fs->bottom_field.is_long_term = 0; @@ -2674,13 +2715,13 @@ void h264_dpb_is_used_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t * *flag = 0; if (p_dpb->active_fs->frame.used_for_reference) *flag = 1; - else if (viddec_h264_get_is_used(p_dpb->active_fs) ==3) // frame + else if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) // frame *flag = p_dpb->active_fs->frame.used_for_reference; else { - if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) // top field + if (viddec_h264_get_is_used(p_dpb->active_fs) & 0x1) // top field *flag = p_dpb->active_fs->top_field.used_for_reference; - if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) // bottom field + if (viddec_h264_get_is_used(p_dpb->active_fs) & 0x2) // bottom field *flag = *flag || p_dpb->active_fs->bottom_field.used_for_reference; } } @@ -2752,7 +2793,8 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC; ////////////////////////////////////////// Reset DPB and dpb list - for (i = 0; i < p_dpb->used_size; i++) { + for (i = 0; i < p_dpb->used_size; i++) + { p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC; p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; } @@ -2962,10 +3004,10 @@ void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t id /////Remove unused frame from dpb-list i = idx; - while ( (i + 1)< p_dpb->used_size) + while ((i + 1)< p_dpb->used_size) { p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1]; - i ++; + i++; } p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC; @@ -3147,9 +3189,9 @@ void h264_dpb_combine_field(h264_DecodedPictureBuffer *p_dpb, int32_t use_old) //p_dpb->active_fs->frame.poc = p_dpb->active_fs->poc; - p_dpb->active_fs->frame.used_for_reference = p_dpb->active_fs->top_field.used_for_reference |(p_dpb->active_fs->bottom_field.used_for_reference); + p_dpb->active_fs->frame.used_for_reference = p_dpb->active_fs->top_field.used_for_reference | (p_dpb->active_fs->bottom_field.used_for_reference); - p_dpb->active_fs->frame.is_long_term = p_dpb->active_fs->top_field.is_long_term |(p_dpb->active_fs->bottom_field.is_long_term <<1); + p_dpb->active_fs->frame.is_long_term = p_dpb->active_fs->top_field.is_long_term | (p_dpb->active_fs->bottom_field.is_long_term << 1); if (p_dpb->active_fs->frame.is_long_term) p_dpb->active_fs->frame.long_term_frame_idx = p_dpb->active_fs->long_term_frame_idx; @@ -3389,7 +3431,7 @@ int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int3 // It should only happen in errored streams, and can happen if this picture had an MMCO, // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have // unmarked the oldest reference frame. - h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames); + h264_dpb_sliding_window_memory_management(p_dpb, 0, num_ref_frames); h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); if (flag == 0) { @@ -3493,14 +3535,17 @@ void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_compl } // output frames in POC order - if (output_all) { - while ((p_dpb->used_size > 0) && (p_dpb->used_size - keep_complement)) { + if (output_all) + { + while ((p_dpb->used_size > 0) && (p_dpb->used_size - keep_complement)) + { h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames); } } flag = 1; - while (flag) { + while (flag) + { h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag); } @@ -3543,7 +3588,8 @@ void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHe } // initialize software DPB - if (p_dpb->active_fs) { + if (p_dpb->active_fs) + { viddec_h264_set_dec_structure(p_dpb->active_fs, INVALID); } h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1 @@ -3629,7 +3675,8 @@ int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; - while ( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) { + while ( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) + { /// /// Generally this is triggered a error case, no more frame buffer avaliable for next /// What we do here is just remove one with min-POC before get more info @@ -3649,9 +3696,12 @@ int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) } - if (NonExisting) { + if (NonExisting) + { p_dpb->fs_non_exist_idc = idc; - } else { + } + else + { p_dpb->fs_dec_idc = idc; } @@ -3670,7 +3720,7 @@ int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting) viddec_h264_set_is_non_existent(p_dpb->active_fs, NonExisting); viddec_h264_set_is_output(p_dpb->active_fs, (NonExisting?1:0)); - p_dpb->active_fs->pic_type = ((FRAME_TYPE_INVALID<active_fs->pic_type = ((FRAME_TYPE_INVALID << FRAME_TYPE_TOP_OFFSET) | (FRAME_TYPE_INVALID << FRAME_TYPE_BOTTOM_OFFSET)); // Only put members in here which will not be reset somewhere else // and which could be used before they are overwritten again with @@ -3705,7 +3755,7 @@ void h264_dpb_update_queue_dangling_field(h264_Info * pInfo) { if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC) { - h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size - 1]); if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3) { prev_pic_unpaired_field = 1; @@ -3720,13 +3770,15 @@ void h264_dpb_update_queue_dangling_field(h264_Info * pInfo) { // If we establish the previous pic was an unpaired field and this picture is not // its complement, the previous picture was a dangling field - if (pInfo->img.second_field == 0) { + if (pInfo->img.second_field == 0) + { while (dpb_ptr->used_size > dpb_ptr->BumpLevel) h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame } } } - else if (prev_pic_unpaired_field) { + else if (prev_pic_unpaired_field) + { while (dpb_ptr->used_size > dpb_ptr->BumpLevel) h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame } @@ -3758,13 +3810,13 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) if (dpb_ptr->used_size) { - idx = dpb_ptr->used_size-1; + idx = dpb_ptr->used_size - 1; prev_idc = dpb_ptr->fs_dpb_idc[idx]; } if (prev_idc != MPD_DPB_FS_NULL_IDC) { - h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]); + h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size - 1]); if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3) { //PRINTF(MFD_NONE, " FN: %d p_dpb->active_fs->is_used = %d \n", (h264_frame_number+1), p_dpb->active_fs->is_used); @@ -3787,7 +3839,8 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc); //, DANGLING_TYPE_FIELD } } - else if (prev_pic_unpaired_field) { + else if (prev_pic_unpaired_field) + { h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc); //, DANGLING_TYPE_FRAME } @@ -3806,7 +3859,8 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) ////////////// TODO: THe following init #if 1 - if ( pInfo->img.second_field) { + if ( pInfo->img.second_field) + { //p_dpb->active_fs->second_dsn = pInfo->img.dsn; //p_dpb->active_fs->prev_dsn = pInfo->img.prev_dsn; if (dpb_ptr->active_fs->pic_type == FRAME_TYPE_IDR || @@ -3904,15 +3958,15 @@ void h264_dpb_init_frame_store(h264_Info * pInfo) void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num) { - int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4)); + int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4 + 4)); int32_t delta_pic_order_count[2]; int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); int32_t AbsFrameNum =0; - int32_t ExpectedDeltaPerPicOrderCntCycle =0; + int32_t ExpectedDeltaPerPicOrderCntCycle = 0; int32_t PicOrderCntCycleCnt = 0; - int32_t FrameNumInPicOrderCntCycle =0; - int32_t ExpectedPicOrderCnt =0; + int32_t FrameNumInPicOrderCntCycle = 0; + int32_t ExpectedPicOrderCnt = 0; int32_t actual_frame_num =0; @@ -3947,26 +4001,28 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame // Calculate the MSBs of current picture if ((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb) && - ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) ) + ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb ) >= (MaxPicOrderCntLsb >> 1))) { pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb; - } else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) && + } + else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) && ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) ) { pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb; - } else + } + else { pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb; } // 2nd - if (pInfo->img.field_pic_flag==0) + if (pInfo->img.field_pic_flag == 0) { //frame pix pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb; pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom; - pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301 + pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc) ? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301 } else if (pInfo->img.bottom_field_flag==0) { //top field @@ -3988,7 +4044,7 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame } break; - case 1: { + case 1: if (NonExisting) { delta_pic_order_count[0] = 0; @@ -4007,14 +4063,6 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame if (pInfo->SliceHeader.idr_flag) { pInfo->img.FrameNumOffset = 0; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (pInfo->img.frame_num) - { - pInfo->sw_bail = 1; - } -#endif -#endif } else { @@ -4060,16 +4108,12 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame FrameNumInPicOrderCntCycle = temp; //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle; -#ifndef USER_MODE h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id); for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) ExpectedPicOrderCnt += offset_for_ref_frame[i]; -#else - for (i = 0; i <= FrameNumInPicOrderCntCycle; i++) - ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i]; -#endif } - else { + else + { ExpectedPicOrderCnt = 0; } @@ -4105,9 +4149,9 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame pInfo->img.PreviousFrameNum=pInfo->img.frame_num; pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset; - } break; - case 2: { // POC MODE 2 + case 2: + // POC MODE 2 if (pInfo->SliceHeader.idr_flag) { pInfo->img.FrameNumOffset = 0; @@ -4115,14 +4159,6 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame pInfo->img.toppoc = 0; pInfo->img.bottompoc = 0; pInfo->img.ThisPOC = 0; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (pInfo->img.frame_num) - { - pInfo->sw_bail = 1; - } -#endif -#endif } else { @@ -4137,8 +4173,8 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset; AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num; - if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1; - else pInfo->img.ThisPOC = (AbsFrameNum<<1); + if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum << 1) - 1; + else pInfo->img.ThisPOC = (AbsFrameNum << 1); if (!(pInfo->img.field_pic_flag)) { @@ -4161,7 +4197,6 @@ void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame //CONFORMANCE_ISSUE pInfo->img.PreviousFrameNum = pInfo->img.frame_num; pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; - } break; default: break; @@ -4189,7 +4224,7 @@ void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, switch (pInfo->img.pic_order_cnt_type) { - case 0: { + case 0: pInfo->img.PreviousFrameNum = actual_frame_num; if ((disposable_flag == 0) && (NonExisting == 0)) { @@ -4197,22 +4232,18 @@ void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, pInfo->SliceHeader.pic_order_cnt_lsb; pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb; } - } break; - case 1: { + case 1: pInfo->img.PreviousFrameNum = actual_frame_num; pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; - } break; - case 2: { + case 2: pInfo->img.PreviousFrameNum = actual_frame_num; pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset; - - } break; - default: { - } break; + default: + break; } return; diff --git a/mixvbp/vbp_plugin/h264/h264parse_mem.c b/mixvbp/vbp_plugin/h264/h264parse_mem.c index b5a0145..0c39b17 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_mem.c +++ b/mixvbp/vbp_plugin/h264/h264parse_mem.c @@ -45,7 +45,6 @@ void* h264_memcpy( void* dest, void* src, uint32_t num ) } -#ifndef USER_MODE //h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) @@ -193,6 +192,3 @@ void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) //end of h264_Parse_Clear_Sps_Updated_Flag -#endif - - diff --git a/mixvbp/vbp_plugin/h264/h264parse_pps.c b/mixvbp/vbp_plugin/h264/h264parse_pps.c index 17f0930..2c4cc52 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_pps.c +++ b/mixvbp/vbp_plugin/h264/h264parse_pps.c @@ -1,7 +1,6 @@ - - #include "h264.h" #include "h264parse.h" +#include /*---------------------------------------------*/ /*---------------------------------------------*/ @@ -11,39 +10,26 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa h264_Status ret = H264_PPS_ERROR; //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet; - uint32_t code=0, i = 0; + uint32_t code = 0, i = 0; do { ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id code = h264_GetVLCElement(parent, pInfo, false); - if (code > MAX_PIC_PARAMS) { + if (code > MAX_PIC_PARAMS) + { + ETRACE("PPS id is out of range: %d", code); break; } PictureParameterSet->pic_parameter_set_id = (uint8_t)code; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (code > 255) - { - pInfo->sw_bail = 1; - } -#endif -#endif - code = h264_GetVLCElement(parent, pInfo, false); - if (code > MAX_NUM_SPS-1) { + if (code > MAX_NUM_SPS - 1) + { + ETRACE("SPS id in PPS is bad: %d", code); break; } PictureParameterSet->seq_parameter_set_id = (uint8_t)code; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (code > 31) - { - pInfo->sw_bail = 1; - } -#endif -#endif ///// entropy_coding_mode_flag viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; @@ -53,31 +39,22 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (PictureParameterSet->num_slice_groups_minus1 > 8) - { - pInfo->sw_bail = 1; - } -#endif -#endif // // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0 // if (PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS) + { + ETRACE("FMO is not supported"); break; + } - PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1; - PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; + PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false) + 1; //// PPS->num_ref_idx_l0_active --- [0,32] if (((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES)) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + ETRACE("The number in the ref list is greater than 32"); break; } @@ -85,39 +62,19 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->weighted_pred_flag = (uint8_t)code; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (code > 2) - { - pInfo->sw_bail = 1; - } -#endif -#endif viddec_pm_get_bits(parent, &code, 2); PictureParameterSet->weighted_bipred_idc = (uint8_t)code; //// QP PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true); PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true); - if (((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP)) + if (((PictureParameterSet->pic_init_qp_minus26 + 26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26 + 26) > MAX_QP)) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + ETRACE("QP is greater than 51"); break; } PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if ((12 < PictureParameterSet->chroma_qp_index_offset) || (-12 > PictureParameterSet->chroma_qp_index_offset) ) - { - pInfo->sw_bail = 1; - } -#endif -#endif //// Deblocking ctl parameters viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code; @@ -129,52 +86,36 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa break; PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (code && (pInfo->active_SPS.profile_idc != h264_ProfileBaseline)) - { - pInfo->sw_bail = 1; - } -#endif -#endif //// Check if have more RBSP Data for additional parameters if (h264_More_RBSP_Data(parent, pInfo)) { - viddec_pm_get_bits(parent, &code, 1); + viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code; - if ( viddec_pm_get_bits(parent, &code, 1) == -1) + if (viddec_pm_get_bits(parent, &code, 1) == -1) break; PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code; if (PictureParameterSet->pic_scaling_matrix_present_flag) { uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1); - for (i=0; ipic_scaling_list_present_flag[i] = (uint8_t)code; if (PictureParameterSet->pic_scaling_list_present_flag[i]) { - if (i<6) + if (i < 6) h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo); else - h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); + h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i - 6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i - 6], pInfo); } } } PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12)) - { - pInfo->sw_bail = 1; - } -#endif -#endif } else { diff --git a/mixvbp/vbp_plugin/h264/h264parse_sei.c b/mixvbp/vbp_plugin/h264/h264parse_sei.c index 12f793d..df18cc3 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sei.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sei.c @@ -7,6 +7,7 @@ #include "h264parse_dpb.h" #include "viddec_parser_ops.h" +#include ////////////////////////////////////////////////////////////////////////////// // avc_sei_stream_initialise () @@ -50,7 +51,10 @@ h264_Status h264_sei_buffering_period(void *parent,h264_Info* pInfo) sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false); if (sei_msg_ptr->seq_param_set_id >= NUM_SPS) + { + ETRACE("SEI parsing: SPS id is out of range: %d", sei_msg_ptr->seq_param_set_id); break; + } //check if this id is same as the id of the current SPS //fix @@ -133,18 +137,26 @@ h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo) sei_msg_ptr->pic_struct = (uint8_t)code; - if ((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) { + if ((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) + { pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE; - } else { + } + else + { pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED; } - if (sei_msg_ptr->pic_struct < 3) { + if (sei_msg_ptr->pic_struct < 3) + { NumClockTS = 1; - } else if ((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) { + } + else if ((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) + { NumClockTS = 2; - } else { + } + else + { NumClockTS = 3; } @@ -389,7 +401,8 @@ h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) pInfo->sei_information.capture_fn = 1; pInfo->sei_information.broken_link_pic = sei_msg_ptr->broken_link_flag; - if (pInfo->got_start) { + if (pInfo->got_start) + { pInfo->img.recovery_point_found |= 2; //// Enable the RP recovery if no IDR ---Cisco diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c index 9db8cee..0d8dc9d 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sh.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c @@ -3,6 +3,7 @@ #include "h264.h" #include "h264parse.h" +#include extern int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul); @@ -25,30 +26,21 @@ h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_ ///// slice_type slice_type = h264_GetVLCElement(parent, pInfo, false); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (slice_type > 9) - { - pInfo->sw_bail = 1; - } -#endif -#endif - SliceHeader->slice_type = (slice_type%5); - if (SliceHeader->slice_type > h264_PtypeI) { + SliceHeader->slice_type = (slice_type % 5); + + if (SliceHeader->slice_type > h264_PtypeI) + { + WTRACE("Slice type (%d) is not supported", SliceHeader->slice_type); ret = H264_STATUS_NOTSUPPORT; break; } - ////// pic_parameter_id data = h264_GetVLCElement(parent, pInfo, false); - if (data > MAX_PIC_PARAMS) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + if (data > MAX_PIC_PARAMS) + { + WTRACE("pic_parameter_id is invalid", data); ret = H264_PPS_INVALID_PIC_ID; break; } @@ -79,7 +71,7 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice //////////////////////////////////// Slice header part 2////////////////// /// Frame_num - viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4); + viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4 + 4); SliceHeader->frame_num = (int32_t)code; /// Picture structure @@ -98,46 +90,39 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice viddec_pm_get_bits(parent, &code, 1); SliceHeader->bottom_field_flag = (uint8_t)code; - SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; + SliceHeader->structure = SliceHeader->bottom_field_flag ? BOTTOM_FIELD: TOP_FIELD; } } ////// Check valid or not of first_mb_in_slice - if (SliceHeader->structure == FRAME) { + if (SliceHeader->structure == FRAME) + { max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; - } else { - max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; } - + else + { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs / 2; + } ///if(pInfo->img.MbaffFrameFlag) - if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { - SliceHeader->first_mb_in_slice <<=1; + if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) + { + SliceHeader->first_mb_in_slice <<= 1; } if (SliceHeader->first_mb_in_slice >= max_mb_num) break; - if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (SliceHeader->idr_pic_id > 65535) - { - pInfo->sw_bail = 1; - } -#endif -#endif } if (pInfo->active_SPS.pic_order_cnt_type == 0) { - viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4); + viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4 + 4); SliceHeader->pic_order_cnt_lsb = (uint32_t)code; - if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) { SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); @@ -162,7 +147,9 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); if (SliceHeader->redundant_pic_cnt > 127) break; - } else { + } + else + { SliceHeader->redundant_pic_cnt = 0; } @@ -221,22 +208,25 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; if (SliceHeader->slice_type == h264_PtypeB) { - SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1; + SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false) + 1; } } } - if (SliceHeader->slice_type != h264_PtypeB) { + if (SliceHeader->slice_type != h264_PtypeB) + { SliceHeader->num_ref_idx_l1_active = 0; } if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) { + WTRACE("ref index greater than expected during slice header parsing."); break; } if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) { + WTRACE("ref list reordering failed during slice header parsing."); break; } @@ -282,6 +272,7 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice { if (h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) { + WTRACE("ref pic marking failed during slice header parsing."); break; } } @@ -297,26 +288,17 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice if (SliceHeader->cabac_init_idc > 2) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif break; } SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); - if ( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26))) + if ((SliceHeader->slice_qp_delta > (25 - pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26 + pInfo->active_PPS.pic_init_qp_minus26))) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + WTRACE("slice_qp_delta value is invalid."); break; } - if ((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) ) + if ((SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeSI)) { if (SliceHeader->slice_type == h264_PtypeSP) { @@ -326,13 +308,9 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); - if ( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + if ((SliceHeader->slice_qs_delta > (25 - pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + WTRACE("slice_qp_delta value is invalid."); break; } } @@ -345,11 +323,6 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif break; } @@ -357,11 +330,6 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; if (slice_beta_offset < -12 || slice_beta_offset > 12) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif break; } } @@ -405,7 +373,6 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 int32_t reorder= -1; uint32_t code; - if ((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) { viddec_pm_get_bits(parent, &code, 1); @@ -413,8 +380,7 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 if (SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag) { - - reorder= -1; + reorder = -1; do { reorder++; @@ -445,7 +411,6 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 if (SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag) { - reorder = -1; do { @@ -472,46 +437,23 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 } -#ifdef VBP h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) { - uint32_t i =0, j=0; + uint32_t i = 0, j = 0; uint32_t flag; SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (SliceHeader->sh_predwttbl.luma_log2_weight_denom > 7) - { - pInfo->sw_bail = 1; - } -#endif -#endif if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) { SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); } -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (SliceHeader->sh_predwttbl.chroma_log2_weight_denom > 7) - { - pInfo->sw_bail = 1; - } -#endif -#endif - for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + + for (i = 0; i < SliceHeader->num_ref_idx_l0_active; i++) { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if ((-128 > flag) || (127 < flag)) - { - pInfo->sw_bail = 1; - } -#endif -#endif + if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) { SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); @@ -527,17 +469,10 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if ((-128 > flag) || (127 < flag)) - { - pInfo->sw_bail = 1; - } -#endif -#endif + if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) { - for (j=0; j <2; j++) + for (j = 0; j < 2; j++) { SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); @@ -545,7 +480,7 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } else { - for (j=0; j <2; j++) + for (j = 0; j < 2; j++) { SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; @@ -557,18 +492,11 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli if (SliceHeader->slice_type == h264_PtypeB) { - for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + for (i = 0; i < SliceHeader->num_ref_idx_l1_active; i++) { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if ((-128 > flag) || (127 < flag)) - { - pInfo->sw_bail = 1; - } -#endif -#endif + if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) { SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); @@ -584,17 +512,10 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if ((-128 > flag) || (127 < flag)) - { - pInfo->sw_bail = 1; - } -#endif -#endif + if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) { - for (j=0; j <2; j++) + for (j = 0; j < 2; j++) { SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true); @@ -602,7 +523,7 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } else { - for (j=0; j <2; j++) + for (j = 0; j < 2; j++) { SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; @@ -616,126 +537,6 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli return H264_STATUS_OK; } ///// End of h264_Parse_Pred_Weight_Table -#else - -/*--------------------------------------------------------------------------------------------------*/ -// -// Parse Prediction weight table -// Note: This table will be reparsed in HW Accelerator, so needn't keep it in parser -// -/*--------------------------------------------------------------------------------------------------*/ - - -h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) -{ - uint32_t i =0, j=0; - uint32_t flag, val; - //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader; - - //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom"); - val = h264_GetVLCElement(parent, pInfo, false); - - if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom"); - val = h264_GetVLCElement(parent,pInfo, false); - } - - for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) - { - //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - - //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag) - if (flag) - { - //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - } - else - { - //SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); - //SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; - } - - if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - if (flag) - { - for (j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0"); - val = h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for (j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; - } - } - } - - } - - if (SliceHeader->slice_type == h264_PtypeB) - { - for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++) - { - //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - if (flag) - { - //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - } - else - { - //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); - //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; - } - - if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) - { - //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag"); - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - if (flag) - { - for (j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1"); - val = h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for (j=0; j <2; j++) - { - //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; - } - } - } - - } - } - - return H264_STATUS_OK; -} ///// End of h264_Parse_Pred_Weight_Table - -#endif /*--------------------------------------------------------------------------------------------------*/ // The syntax elements specify marking of the reference pictures. @@ -824,14 +625,9 @@ h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_S } } - - - SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; return H264_STATUS_OK; } - - //#endif diff --git a/mixvbp/vbp_plugin/h264/h264parse_sps.c b/mixvbp/vbp_plugin/h264/h264parse_sps.c index 431892b..909f550 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sps.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sps.c @@ -3,9 +3,8 @@ #include "h264.h" #include "h264parse.h" -#ifdef VBP -#include -#endif +#include +#include /// SPS extension unit (unit_type = 13) @@ -169,9 +168,8 @@ h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_ viddec_pm_get_bits(parent, &code, 1); pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code; -#ifdef VBP + SPS->sps_disp.vui_seq_parameters.video_full_range_flag = (uint8_t)code; -#endif viddec_pm_get_bits(parent, &code, 1); SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code; @@ -186,9 +184,8 @@ h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_ viddec_pm_get_bits(parent, &code, 8); pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code; -#ifdef VBP + SPS->sps_disp.vui_seq_parameters.matrix_coefficients = (uint8_t)code; -#endif } } @@ -294,42 +291,16 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param case h264_ProfileHigh: break; default: -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif return H264_SPS_INVALID_PROFILE; break; } - //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag"); - //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag"); //should be 1 - //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag"); - //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag"); - -#ifdef VBP viddec_pm_get_bits(parent, &code, 5); //constraint flag set0...set4 (h.264 Spec v2009) SPS->constraint_set_flags = (uint8_t)code; //// reserved_zero_3bits viddec_pm_get_bits(parent, (uint32_t *)&code, 3); //3bits zero reserved (h.264 Spec v2009) -#else - - viddec_pm_get_bits(parent, &code, 4); - SPS->constraint_set_flags = (uint8_t)code; - //// reserved_zero_4bits - viddec_pm_get_bits(parent, (uint32_t *)&code, 4); -#endif -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if (code != 0) - { - pInfo->sw_bail = 1; - } -#endif -#endif viddec_pm_get_bits(parent, &code, 8); SPS->level_idc = (uint8_t)code; @@ -353,11 +324,6 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param case h264_Level51: break; default: -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif return H264_SPS_INVALID_LEVEL; } @@ -367,46 +333,44 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //// seq_parameter_set_id ---[0,31] if (SPS->seq_parameter_set_id > MAX_NUM_SPS -1) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + ETRACE("SPS id is out of range: %d", SPS->seq_parameter_set_id); break; } -#ifdef VBP SPS->sps_disp.separate_colour_plane_flag = 0; -#endif if ((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) || - (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) ) + (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444)) { //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2] data = h264_GetVLCElement(parent, pInfo, false); if ( data > H264_CHROMA_422) break; SPS->sps_disp.chroma_format_idc = (uint8_t)data; - //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {} -#ifdef VBP - if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) { + if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) + { viddec_pm_get_bits(parent, &code, 1); SPS->sps_disp.separate_colour_plane_flag = (uint8_t)data; } -#endif + //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel data = h264_GetVLCElement(parent, pInfo, false); - if ( data) + if (data) + { + ETRACE("A High Profile bitstream must have bit_depth_luma_minus8 equal to 0"); break; + } SPS->bit_depth_luma_minus8 = (uint8_t)data; //// bit_depth_chroma_minus8 ---[0,4] data = h264_GetVLCElement(parent, pInfo, false); - if ( data ) + if (data) + { + ETRACE("A High Profile bitstream must have bit_depth_chroma_minus8 equal to 0"); break; + } SPS->bit_depth_chroma_minus8 = (uint8_t)data; - viddec_pm_get_bits(parent, &code, 1); SPS->lossless_qpprime_y_zero_flag = (uint8_t)code; @@ -418,14 +382,14 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12; int n_ScalingList = 8; /// We do not support 444 currrently - for (i=0; iseq_scaling_list_present_flag[i] = (uint8_t)code; if (SPS->seq_scaling_list_present_flag[i]) { - if (i<6) + if (i < 6) h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo); else h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo); @@ -447,33 +411,28 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param data = (h264_GetVLCElement(parent, pInfo, false)); if ( data > 12) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + ETRACE("log2_max_frame_num_minus4 is over 12"); break; } SPS->log2_max_frame_num_minus4 = (uint8_t)data; //// pic_order_cnt_type ---- [0,2] data = h264_GetVLCElement(parent, pInfo, false); - if ( data > 2) + if (data > 2) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + ETRACE("pic_order_cnt_type is over 2"); break; } SPS->pic_order_cnt_type = (uint8_t)data; SPS->expectedDeltaPerPOCCycle = 0; - if (SPS->pic_order_cnt_type == 0) { + if (SPS->pic_order_cnt_type == 0) + { SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false); - } else if (SPS->pic_order_cnt_type == 1) { + } + else if (SPS->pic_order_cnt_type == 1) + { viddec_pm_get_bits(parent, &code, 1); SPS->delta_pic_order_always_zero_flag = (uint8_t)code; @@ -484,29 +443,18 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param data = h264_GetVLCElement(parent, pInfo, false); if ( data > 255) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + ETRACE("num_ref_frames_in_pic_order_cnt_cycle is out of range: %d", data); break; } SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data; - //Alloc memory for frame offset -- FIXME - for (i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++) + for (i = 0; i < SPS->num_ref_frames_in_pic_order_cnt_cycle; i++) { /////SPS->offset_for_ref_frame[i] could be removed from SPS -#ifndef USER_MODE - tmp = h264_GetVLCElement(parent, pInfo, true); - pOffset_ref_frame[i]=tmp; - SPS->expectedDeltaPerPOCCycle += tmp; -#else tmp = h264_GetVLCElement(parent, pInfo, true); - SPS->offset_for_ref_frame[i]=tmp; + pOffset_ref_frame[i] = tmp; SPS->expectedDeltaPerPOCCycle += tmp; -#endif } } @@ -514,11 +462,7 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param data = h264_GetVLCElement(parent, pInfo, false); if ( data > 16) { -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 1; -#endif -#endif + ETRACE("The number of reference frame should not be over 16, actuall: %d", data); break; } SPS->num_ref_frames = (uint8_t)data; @@ -535,9 +479,7 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param /// err check for size PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1); PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1); - FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1); - if ((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128)) - break; + FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag ? PicHeightInMapUnits : (PicHeightInMapUnits << 1); if (!SPS->sps_disp.frame_mbs_only_flag) { @@ -545,9 +487,6 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code; } - //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1); - //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs; - viddec_pm_get_bits(parent, &code, 1); SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code; @@ -563,21 +502,20 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param } //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1 - if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0) { + if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0) + { + ETRACE("frame_mbs_only_flag is equal to 0 but direct_8x8_inference_flag is not equal to 1"); break; } ////// vui_parameters - if (viddec_pm_get_bits(parent, &code, 1) == -1) - break; + viddec_pm_get_bits(parent, &code, 1); SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code; ret = H264_STATUS_OK; if (SPS->sps_disp.vui_parameters_present_flag) { -#ifndef VBP - ret = h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); -#else + // Ignore VUI parsing result h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used); if (SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag) @@ -598,10 +536,9 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param SPS->sps_disp.vui_seq_parameters.bit_rate_value = bit_rate_value; }*/ -#endif } } while (0); -#ifdef VBP + if (SPS->sps_disp.vui_seq_parameters.bit_rate_value == 0) { int maxBR = 0; @@ -680,7 +617,6 @@ h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param SPS->sps_disp.vui_seq_parameters.bit_rate_value = maxBR * cpbBrVclFactor; } -#endif //h264_Parse_rbsp_trailing_bits(pInfo); diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h index 6171e76..f0da7ed 100755 --- a/mixvbp/vbp_plugin/h264/include/h264.h +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -14,34 +14,15 @@ #ifndef _H264_H_ #define _H264_H_ -#ifdef HOST_ONLY #include #include #include -#endif #include "stdint.h" #include "viddec_fw_common_defs.h" #include "h264parse_sei.h" -#ifdef VBP -//#define SW_ERROR_CONCEALEMNT -#endif - -#ifdef WIN32 -#define mfd_printf OS_INFO -#endif - -#ifdef H264_VERBOSE -#define PRINTF(format, args...) OS_INFO("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args ) -#else -//#define PRINTF(args...) -#endif - -//#pragma warning(disable : 4710) // function not inlined -//#pragma warning(disable : 4514) // unreferenced inline function has been removed CL -//#pragma warning(disable : 4100) // unreferenced formal parameter CL #ifdef __cplusplus extern "C" { @@ -87,7 +68,7 @@ extern "C" { #define MPD_DPB_FS_NULL_IDC 31 // May need to be changed if we alter gaps_in_frame_num to use #define MFD_H264_MAX_FRAME_BUFFERS 17 -#define NUM_DPB_FRAME_STORES (MFD_H264_MAX_FRAME_BUFFERS + 1) // 1 extra for storign non-existent pictures. +#define NUM_DPB_FRAME_STORES (MFD_H264_MAX_FRAME_BUFFERS + 1) // 1 extra for storing non-existent pictures. //Scalling Matrix Type #define PPS_QM 0 @@ -108,8 +89,6 @@ extern "C" { #define FRAME_TYPE_BOTTOM_OFFSET 0 #define FRAME_TYPE_STRUCTRUE_OFFSET 6 -//// Error handling -#define FIELD_ERR_OFFSET 17 //offset for Field error flag ----refer to the structure definition viddec_fw_workload_error_codes in viddec_fw_common_defs.h ////Bits Handling #define h264_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) ) @@ -505,11 +484,10 @@ extern "C" { uint8_t aspect_ratio_idc; // u(8) uint8_t video_signal_type_present_flag; // u(1) uint8_t video_format; // u(3) -#ifdef VBP + uint8_t video_full_range_flag; // u(1) uint8_t matrix_coefficients; // u(8) uint32_t bit_rate_value; -#endif uint8_t colour_description_present_flag; // u(1) uint8_t colour_primaries; // u(8) @@ -684,7 +662,6 @@ extern "C" { uint8_t status; } OldSliceParams; -#ifdef VBP typedef struct _h264__pred_weight_table { uint8_t luma_log2_weight_denom; @@ -703,7 +680,6 @@ extern "C" { int16_t chroma_weight_l1[32][2]; int8_t chroma_offset_l1[32][2]; } h264_pred_weight_table; -#endif typedef struct _h264_Slice_Header { @@ -723,9 +699,7 @@ extern "C" { int32_t slice_beta_offset_div2; //SE int32_t slice_group_change_cycle; //UV -#ifdef VBP h264_pred_weight_table sh_predwttbl; -#endif ///// Flags or IDs //h264_ptype_t slice_type; //UE @@ -789,9 +763,7 @@ extern "C" { uint8_t mb_adaptive_frame_field_flag; uint8_t direct_8x8_inference_flag; uint8_t frame_cropping_flag; -#ifdef VBP uint8_t separate_colour_plane_flag; -#endif uint16_t vui_parameters_present_flag; uint16_t chroma_format_idc; @@ -1010,15 +982,6 @@ extern "C" { uint8_t last_I_frame_idc; uint8_t sei_b_state_ready; uint8_t gop_err_flag; - - - uint32_t wl_err_curr; - uint32_t wl_err_next; -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - uint32_t sw_bail; -#endif -#endif } h264_Info; @@ -1090,17 +1053,12 @@ typedef struct _slice_header_t { } slice_header_t; - - typedef struct _vbp_h264_sliceheader { slice_header_t slice_header; dec_ref_pic_marking_t ref_pic_marking; } vbp_h264_sliceheader; #endif - - - #endif //_H264_H_ diff --git a/mixvbp/vbp_plugin/h264/include/h264parse.h b/mixvbp/vbp_plugin/h264/include/h264parse.h index 2e7b817..1467932 100755 --- a/mixvbp/vbp_plugin/h264/include/h264parse.h +++ b/mixvbp/vbp_plugin/h264/include/h264parse.h @@ -3,10 +3,8 @@ #include "h264.h" -#ifndef MFD_FIRMWARE #define true 1 #define false 0 -#endif //////////////////////////////////////////////////////////////////// // The following part is only for Parser Debug diff --git a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c index 103841e..9490ddd 100755 --- a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c +++ b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c @@ -1,12 +1,13 @@ #include "viddec_parser_ops.h" -#include "viddec_fw_workload.h" #include "viddec_pm.h" #include "h264.h" #include "h264parse.h" #include "h264parse_dpb.h" +#include + /* Init function which can be called to intialized local context on open and flush and preserve*/ void viddec_h264secure_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) @@ -22,9 +23,6 @@ void viddec_h264secure_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve } /* picture level info which will always be initialized */ h264_init_Info_under_sps_pps_level(pInfo); -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 0; -#endif return; } @@ -92,16 +90,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) pInfo->primary_pic_type_plus_one = 0; - -#ifndef VBP - if (pInfo->img.recovery_point_found == 0) { - pInfo->img.structure = FRAME; - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - break; - } -#endif - //////////////////////////////////////////////////////////////////////////// // Step 2: Parsing slice header //////////////////////////////////////////////////////////////////////////// @@ -120,26 +108,13 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) pInfo->sei_information.recovery_point = 0; - if (next_SliceHeader.sh_error & 3) { - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - - // Error type definition, refer to viddec_fw_common_defs.h - // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) - // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) - // if this is frame based, both 2 bits should be set - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - + if (next_SliceHeader.sh_error & 3) + { break; } pInfo->img.current_slice_num++; -#ifdef DUMP_HEADER_INFO - dump_slice_header(pInfo, &next_SliceHeader); -////h264_print_decoder_values(pInfo); -#endif - - //////////////////////////////////////////////////////////////////////////// // Step 3: Processing if new picture coming // 1) if it's the second field @@ -187,10 +162,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) { h264_dpb_gaps_in_frame_num_mem_management(pInfo); } - -#ifdef DUMP_HEADER_INFO - dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); -#endif } // /// Decoding POC @@ -239,17 +210,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) h264_dpb_update_ref_lists( pInfo); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames) - { - pInfo->sw_bail = 1; - } -#endif -#endif -#ifdef DUMP_HEADER_INFO - dump_ref_list(pInfo); -#endif /// Emit out the current "good" slice h264_parse_emit_current_slice(parent, pInfo); @@ -260,8 +220,7 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) case h264_NAL_UNIT_TYPE_DPA: case h264_NAL_UNIT_TYPE_DPB: case h264_NAL_UNIT_TYPE_DPC: - //OS_INFO("***********************DP feature, not supported currently*******************\n"); - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + ETRACE("Data Partition is not supported currently\n"); status = H264_STATUS_NOTSUPPORT; break; @@ -301,11 +260,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) if (1==pInfo->active_SPS.pic_order_cnt_type) { h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); } - -#ifdef DUMP_HEADER_INFO - dump_sps(&(pInfo->active_SPS)); -#endif - } ///// Restore the active SPS if new arrival's id changed if (old_sps_id>=MAX_NUM_SPS) { @@ -358,9 +312,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) { h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); } -#ifdef DUMP_HEADER_INFO - dump_pps(&(pInfo->active_PPS)); -#endif } else { if (old_sps_idactive_SPS), old_sps_id); @@ -392,7 +343,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) break; case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: -#if 1 ///// primary_pic_type { uint32_t code = 0xff; @@ -410,7 +360,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) pInfo->number_of_first_au_info_nal_before_first_slice++; break; } -#endif case h264_NAL_UNIT_TYPE_Reserved1: case h264_NAL_UNIT_TYPE_Reserved2: diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c index db36c0b..2aa80b9 100755 --- a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -6,13 +6,10 @@ #include "h264parse.h" #include "h264parse_dpb.h" +#include /* Init function which can be called to intialized local context on open and flush and preserve*/ -#ifdef VBP void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) -#else -static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) -#endif { struct h264_viddec_parser* parser = ctxt; h264_Info * pInfo = &(parser->info); @@ -20,16 +17,13 @@ static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserv if (!preserve) { /* we don't initialize this data if we want to preserve - sequence and gop information */ + sequence and gop information. + */ h264_init_sps_pps(parser,persist_mem); } /* picture level info which will always be initialized */ h264_init_Info_under_sps_pps_level(pInfo); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - pInfo->sw_bail = 0; -#endif -#endif + return; } @@ -37,11 +31,7 @@ static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserv /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -#ifdef VBP uint32_t viddec_h264_parse(void *parent, void *ctxt) -#else -static uint32_t viddec_h264_parse(void *parent, void *ctxt) -#endif { struct h264_viddec_parser* parser = ctxt; @@ -49,7 +39,6 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) h264_Status status = H264_STATUS_ERROR; - uint8_t nal_ref_idc = 0; ///// Parse NAL Unit header @@ -59,25 +48,17 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) pInfo->nal_unit_type = 0; h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + VTRACE("Start parsing NAL unit, type = %d", pInfo->nal_unit_type); ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); - //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type); - //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0); -#if 0 - devh_SVEN_WriteModuleEvent( NULL, - SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0, - pInfo->got_start,pInfo->nal_unit_type, pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num); -#endif - //////// Parse valid NAL unit - switch ( pInfo->nal_unit_type ) + switch (pInfo->nal_unit_type) { case h264_NAL_UNIT_TYPE_IDR: - if (pInfo->got_start) { + if (pInfo->got_start) pInfo->img.recovery_point_found |= 1; - } pInfo->sei_rp_received = 0; @@ -102,32 +83,22 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); next_SliceHeader.nal_ref_idc = nal_ref_idc; - if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + if ((1 == pInfo->primary_pic_type_plus_one) && (pInfo->got_start)) { - pInfo->img.recovery_point_found |=4; + pInfo->img.recovery_point_found |= 4; } pInfo->primary_pic_type_plus_one = 0; - -#ifndef VBP - if (pInfo->img.recovery_point_found == 0) { - pInfo->img.structure = FRAME; - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - break; - } -#endif - //////////////////////////////////////////////////////////////////////////// // Step 2: Parsing slice header //////////////////////////////////////////////////////////////////////////// /// PWT - pInfo->h264_pwt_start_byte_offset=0; - pInfo->h264_pwt_start_bit_offset=0; - pInfo->h264_pwt_end_byte_offset=0; - pInfo->h264_pwt_end_bit_offset=0; - pInfo->h264_pwt_enabled =0; + pInfo->h264_pwt_start_byte_offset = 0; + pInfo->h264_pwt_start_bit_offset = 0; + pInfo->h264_pwt_end_byte_offset = 0; + pInfo->h264_pwt_end_bit_offset = 0; + pInfo->h264_pwt_enabled = 0; /// IDR flag next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); @@ -137,26 +108,14 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) pInfo->sei_information.recovery_point = 0; - if (next_SliceHeader.sh_error & 3) { - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - - // Error type definition, refer to viddec_fw_common_defs.h - // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17) - // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18) - // if this is frame based, both 2 bits should be set - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - + if (next_SliceHeader.sh_error & 3) + { + ETRACE("Slice Header parsing error.\n"); break; } pInfo->img.current_slice_num++; -#ifdef DUMP_HEADER_INFO - dump_slice_header(pInfo, &next_SliceHeader); -////h264_print_decoder_values(pInfo); -#endif - - //////////////////////////////////////////////////////////////////////////// // Step 3: Processing if new picture coming // 1) if it's the second field @@ -204,10 +163,6 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) { h264_dpb_gaps_in_frame_num_mem_management(pInfo); } - -#ifdef DUMP_HEADER_INFO - dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num); -#endif } // /// Decoding POC @@ -224,7 +179,6 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); } - // /// Emit out the New Frame if (pInfo->img.g_new_frame) { @@ -256,17 +210,6 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) h264_dpb_update_ref_lists( pInfo); -#ifdef VBP -#ifdef SW_ERROR_CONCEALEMNT - if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames) - { - pInfo->sw_bail = 1; - } -#endif -#endif -#ifdef DUMP_HEADER_INFO - dump_ref_list(pInfo); -#endif /// Emit out the current "good" slice h264_parse_emit_current_slice(parent, pInfo); @@ -277,8 +220,7 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) case h264_NAL_UNIT_TYPE_DPA: case h264_NAL_UNIT_TYPE_DPB: case h264_NAL_UNIT_TYPE_DPC: - //OS_INFO("***********************DP feature, not supported currently*******************\n"); - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + ETRACE("Data Partition is not supported currently\n"); status = H264_STATUS_NOTSUPPORT; break; @@ -287,7 +229,8 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) status = H264_STATUS_OK; //OS_INFO("*****************************SEI**************************************\n"); - if (pInfo->sps_valid) { + if (pInfo->sps_valid) + { //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW pInfo->number_of_first_au_info_nal_before_first_slice++; /// parsing the SEI info @@ -311,26 +254,23 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); - if (status == H264_STATUS_OK) { + if (status == H264_STATUS_OK) + { h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); pInfo->sps_valid = 1; - if (1==pInfo->active_SPS.pic_order_cnt_type) { + if (1 == pInfo->active_SPS.pic_order_cnt_type) + { h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); } - -#ifdef DUMP_HEADER_INFO - dump_sps(&(pInfo->active_SPS)); -#endif - } ///// Restore the active SPS if new arrival's id changed - if (old_sps_id>=MAX_NUM_SPS) { + if (old_sps_id >= MAX_NUM_SPS) { h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); pInfo->active_SPS.seq_parameter_set_id = 0xff; } else { - if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { + if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id) { h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); } else { @@ -375,13 +315,12 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) { h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); } -#ifdef DUMP_HEADER_INFO - dump_pps(&(pInfo->active_PPS)); -#endif - } else { - if (old_sps_idactive_SPS), old_sps_id); - if (old_pps_idactive_PPS), old_pps_id); } @@ -409,7 +348,6 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) break; case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: -#if 1 ///// primary_pic_type { uint32_t code = 0xff; @@ -427,7 +365,6 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) pInfo->number_of_first_au_info_nal_before_first_slice++; break; } -#endif case h264_NAL_UNIT_TYPE_Reserved1: case h264_NAL_UNIT_TYPE_Reserved2: @@ -481,75 +418,7 @@ static uint32_t viddec_h264_parse(void *parent, void *ctxt) return status; } - - - -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ -/* ------------------------------------------------------------------------------------------ */ -#ifndef VBP -static uint32_t viddec_h264_is_frame_start(void *ctxt) -{ - struct h264_viddec_parser* parser = ctxt; - uint32_t ret = 0; - - h264_Info * pInfo = &(parser->info); - - if (pInfo->img.g_new_frame) { - ret = 1; - } - - return ret; -} -#endif - -#ifndef VBP -uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, - uint32_t *codec_specific_errors) -{ - struct h264_viddec_parser* parser = ctxt; - uint32_t ret = VIDDEC_PARSE_SUCESS; - h264_Info * pInfo = &(parser->info); - uint8_t is_stream_forced_to_complete=false; - - is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc); - - if (is_stream_forced_to_complete || (pInfo->is_current_workload_done)) - { - viddec_workload_t *wl; - viddec_frame_attributes_t *attrs; - - wl = viddec_pm_get_header( parent ); - attrs = &wl->attrs; - - if ((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048)) - { - attrs->cont_size.width = 32; - attrs->cont_size.height = 32; - pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET); - } - - *codec_specific_errors = pInfo->wl_err_curr; - pInfo->wl_err_curr = pInfo->wl_err_next; - pInfo->wl_err_next = 0; - - if (is_stream_forced_to_complete) - { - h264_parse_emit_eos(parent, pInfo); - } - ret = VIDDEC_PARSE_FRMDONE; - } - - return ret; -} -#endif - -#ifdef VBP void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) -#else -static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) -#endif { /* Should return size of my structure */ size->context_size = sizeof(struct h264_viddec_parser); @@ -562,11 +431,7 @@ static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -#ifdef VBP void viddec_h264_flush(void *parent, void *ctxt) -#else -static void viddec_h264_flush(void *parent, void *ctxt) -#endif { int i; struct h264_viddec_parser* parser = ctxt; @@ -590,17 +455,4 @@ static void viddec_h264_flush(void *parent, void *ctxt) return; } -#ifndef VBP -void viddec_h264_get_ops(viddec_parser_ops_t *ops) -{ - ops->init = viddec_h264_init; - - ops->parse_syntax = viddec_h264_parse; - ops->get_cxt_size = viddec_h264_get_context_size; - ops->is_wkld_done = viddec_h264_wkld_done; - ops->is_frame_start = viddec_h264_is_frame_start; - ops->flush = viddec_h264_flush; - return; -} -#endif -- cgit v1.2.3 From 014d73906cd874387e9e05dbdb608e95d51df8ac Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 16 Oct 2013 13:12:46 +0800 Subject: libmix: Unify va interface to raw data buffer mode BZ: 144930 Unify va interface to raw data buffer mode Change-Id: I229b8a0238625a4c044efbbc1c007783aa1d2d74 Signed-off-by: Tianmi Chen Reviewed-on: http://android.intel.com:8080/139261 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- videodecoder/VideoDecoderBase.cpp | 125 +++++++++++++++++------------------ videodecoder/VideoDecoderBase.h | 3 +- videodecoder/VideoDecoderInterface.h | 2 +- 3 files changed, 62 insertions(+), 68 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 06fa57a..7f04e3d 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -589,6 +589,7 @@ Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) { status = getRawDataFromSurface(); CHECK_STATUS(); } + // frame is successfly decoded to the current surface, it is ready for output if (mShowFrame) { mAcquiredBuffer->renderBuffer.renderDone = false; @@ -693,7 +694,6 @@ Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { releaseSurfaceBuffer(); goto exit; } - status = outputSurfaceBuffer(); // fall through exit: @@ -1068,91 +1068,81 @@ Decode_Status VideoDecoderBase::mapSurface(void){ return DECODE_SUCCESS; } -Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { - if (mAcquiredBuffer == NULL) { - return DECODE_FAIL; +Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderBuffer, uint8_t *pRawData, uint32_t *pSize, bool internal) { + if (internal) { + if (mAcquiredBuffer == NULL) { + return DECODE_FAIL; + } + renderBuffer = &(mAcquiredBuffer->renderBuffer); } VAStatus vaStatus; VAImageFormat imageFormat; VAImage vaImage; - vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface); + vaStatus = vaSyncSurface(renderBuffer->display, renderBuffer->surface); CHECK_VA_STATUS("vaSyncSurface"); - vaImage.image_id = VA_INVALID_ID; - // driver currently only supports NV12 and IYUV format. - // byte_order information is from driver and hard-coded here - imageFormat.fourcc = VA_FOURCC_NV12; - imageFormat.byte_order = VA_LSB_FIRST; - imageFormat.bits_per_pixel = 16; - - vaStatus = vaCreateImage( - mVADisplay, - &imageFormat, - mVideoFormatInfo.width, - mVideoFormatInfo.height, - &vaImage); - CHECK_VA_STATUS("vaCreateImage"); - - vaStatus = vaGetImage( - mVADisplay, - mAcquiredBuffer->renderBuffer.surface, - 0, - 0, - vaImage.width, - vaImage.height, - vaImage.image_id); - CHECK_VA_STATUS("vaGetImage"); + vaStatus = vaDeriveImage(renderBuffer->display, renderBuffer->surface, &vaImage); + CHECK_VA_STATUS("vaDeriveImage"); void *pBuf = NULL; - vaStatus = vaMapBuffer(mVADisplay, vaImage.buf, &pBuf); + vaStatus = vaMapBuffer(renderBuffer->display, vaImage.buf, &pBuf); CHECK_VA_STATUS("vaMapBuffer"); - VideoFrameRawData *rawData = NULL; - if (mAcquiredBuffer->renderBuffer.rawData == NULL) { - rawData = new VideoFrameRawData; - if (rawData == NULL) { - return DECODE_MEMORY_FAIL; - } - memset(rawData, 0, sizeof(VideoFrameRawData)); - mAcquiredBuffer->renderBuffer.rawData = rawData; - } else { - rawData = mAcquiredBuffer->renderBuffer.rawData; - } // size in NV12 format uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight); uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop); int32_t size = cropWidth * cropHeight * 3 / 2; - if (rawData->data != NULL && rawData->size != size) { - delete [] rawData->data; - rawData->data = NULL; - rawData->size = 0; - } - if (rawData->data == NULL) { - rawData->data = new uint8_t [size]; + if (internal) { + VideoFrameRawData *rawData = NULL; + if (renderBuffer->rawData == NULL) { + rawData = new VideoFrameRawData; + if (rawData == NULL) { + return DECODE_MEMORY_FAIL; + } + memset(rawData, 0, sizeof(VideoFrameRawData)); + renderBuffer->rawData = rawData; + } else { + rawData = renderBuffer->rawData; + } + + if (rawData->data != NULL && rawData->size != size) { + delete [] rawData->data; + rawData->data = NULL; + rawData->size = 0; + } if (rawData->data == NULL) { - return DECODE_MEMORY_FAIL; + rawData->data = new uint8_t [size]; + if (rawData->data == NULL) { + return DECODE_MEMORY_FAIL; + } } + + rawData->own = true; // allocated by this library + rawData->width = cropWidth; + rawData->height = cropHeight; + rawData->pitch[0] = cropWidth; + rawData->pitch[1] = cropWidth; + rawData->pitch[2] = 0; // interleaved U/V, two planes + rawData->offset[0] = 0; + rawData->offset[1] = cropWidth * cropHeight; + rawData->offset[2] = cropWidth * cropHeight * 3 / 2; + rawData->size = size; + rawData->fourcc = 'NV12'; + + pRawData = rawData->data; + } else { + *pSize = size; } - rawData->own = true; // allocated by this library - rawData->width = cropWidth; - rawData->height = cropHeight; - rawData->pitch[0] = cropWidth; - rawData->pitch[1] = cropWidth; - rawData->pitch[2] = 0; // interleaved U/V, two planes - rawData->offset[0] = 0; - rawData->offset[1] = cropWidth * cropHeight; - rawData->offset[2] = cropWidth * cropHeight * 3 / 2; - rawData->size = size; - rawData->fourcc = 'NV12'; + if (size == (int32_t)vaImage.data_size) { - memcpy(rawData->data, pBuf, size); + memcpy(pRawData, pBuf, size); } else { // copy Y data uint8_t *src = (uint8_t*)pBuf; - uint8_t *dst = rawData->data; + uint8_t *dst = pRawData; int32_t row = 0; for (row = 0; row < cropHeight; row++) { memcpy(dst, src, cropWidth); @@ -1167,10 +1157,13 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(void) { src += vaImage.pitches[1]; } } - // TODO: image may not get destroyed if error happens. - if (vaImage.image_id != VA_INVALID_ID) { - vaDestroyImage(mVADisplay, vaImage.image_id); - } + + vaStatus = vaUnmapBuffer(renderBuffer->display, vaImage.buf); + CHECK_VA_STATUS("vaUnmapBuffer"); + + vaStatus = vaDestroyImage(renderBuffer->display, vaImage.image_id); + CHECK_VA_STATUS("vaDestroyImage"); + return DECODE_SUCCESS; } diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 292473c..7db79f5 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -87,6 +87,8 @@ protected: return ((a + 15) & (~15)); } + virtual Decode_Status getRawDataFromSurface(VideoRenderBuffer *renderBuffer = NULL, uint8_t *pRawData = NULL, uint32_t *pSize = NULL, bool internal = true); + #ifdef USE_AVC_SHORT_FORMAT Decode_Status updateBuffer(uint8_t *buffer, int32_t size, void** vbpData); Decode_Status setParserType(_vbp_parser_type type); @@ -95,7 +97,6 @@ protected: virtual Decode_Status isHardwareSupported(VAProfile profile); private: Decode_Status mapSurface(void); - Decode_Status getRawDataFromSurface(void); void initSurfaceBuffer(bool reset); bool mInitialized; diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h index 66d62e3..4373ad5 100644 --- a/videodecoder/VideoDecoderInterface.h +++ b/videodecoder/VideoDecoderInterface.h @@ -40,7 +40,7 @@ public: virtual const VideoFormatInfo* getFormatInfo(void) = 0; virtual Decode_Status signalRenderDone(void * graphichandler) = 0; virtual bool checkBufferAvail() = 0; - + virtual Decode_Status getRawDataFromSurface(VideoRenderBuffer *renderBuffer = NULL, uint8_t *pRawData = NULL, uint32_t *pSize = NULL, bool internal = true) = 0; }; #endif /* VIDEO_DECODER_INTERFACE_H_ */ -- cgit v1.2.3 From 3813ec750c8bf95aedf95ade20ef9543c79f0338 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 10 Sep 2013 14:01:14 +0800 Subject: [port from R42TW] Fix race condition for frame type judging BZ: 137717 When start encoding frame, if at this time, user send IDR request, it is possible to have race condition for frame type, so re-calculate in sendEncodeCommand Change-Id: Iddf6b6569bc9f85efda8cc3372d2e65c699e3021 Signed-off-by: Zhao Liang Reviewed-on: http://android.intel.com:8080/130948 Tested-by: Ding, Haitao Reviewed-by: cactus Tested-by: cactus Reviewed-on: http://android.intel.com:8080/140121 Reviewed-by: Shi, PingX Tested-by: Shi, PingX --- videoencoder/VideoEncoderAVC.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 6deeac6..0c021dc 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -656,7 +656,11 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { mFrameNum = 0; ret = renderSequenceParams(task); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); - mNewHeader = false; //Set to require new header filed to false + if (mNewHeader) { + mNewHeader = false; //Set to require new header filed to false + mFrameNum = 0; //reset mFrameNum to 0 + updateFrameInfo(task); //recalculate frame info if mNewHeader is set true after PrepareFrameInfo in encode() + } } if (mRenderMaxSliceSize && mVideoParamsAVC.maxSliceSize != 0) { -- cgit v1.2.3 From 5cbaaedd3eb4a499756e1a7f892ec8582e458835 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Tue, 15 Oct 2013 21:22:30 +0800 Subject: libmix: remove viddec_pm_peek_bits to optimize the mpeg4 parser BZ: 131068 In the viddec_mp4_parse, remove the unnecessary viddec_pm_peek_bits, this optimization will reduce half viddec_pm_utils_bstream_peekbits call times. Change-Id: I20b950f61eff41291e5baa3cb2823851e26b7bc7 Signed-off-by: Gu, Wangyi Reviewed-on: http://android.intel.com:8080/140123 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c | 201 ++++++++++++++++--------------- 1 file changed, 105 insertions(+), 96 deletions(-) diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c index f8a95ec..33e720d 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c @@ -70,119 +70,128 @@ uint32_t viddec_mp4_parse(void *parent, void *ctxt) cxt = (viddec_mp4_parser_t *)ctxt; is_svh = (cxt->cur_sc_prefix) ? false: true; - if ((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1) - { - ETRACE("Start code not found\n"); - return VIDDEC_PARSE_ERROR; - } if (!is_svh) { - viddec_pm_get_bits(parent, &sc, 32); - sc = sc & 0xFF; - cxt->current_sc = sc; - cxt->current_sc |= 0x100; - VTRACE("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); - - switch (sc) - { - case MP4_SC_VISUAL_OBJECT_SEQUENCE: - { - status = mp4_Parse_VisualSequence(parent, cxt); - cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE; - VTRACE("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); - break; - } - case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC: - {/* Not required to do anything */ - VTRACE("MP4_SC_VISUAL_OBJECT_SEQUENCE_EC"); - break; - } - case MP4_SC_USER_DATA: - { /* Copy userdata to user-visible buffer (EMIT) */ - VTRACE("MP4_USER_DATA_SC: \n"); - break; - } - case MP4_SC_GROUP_OF_VOP: - { - status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt); - cxt->prev_sc = MP4_SC_GROUP_OF_VOP; - VTRACE("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); - break; - } - case MP4_SC_VIDEO_SESSION_ERROR: - {/* Not required to do anything?? */ - VTRACE("MP4_SC_VIDEO_SESSION_ERROR"); - break; - } - case MP4_SC_VISUAL_OBJECT: + if (viddec_pm_get_bits(parent, &sc, 32) != -1) { - status = mp4_Parse_VisualObject(parent, cxt); - cxt->prev_sc = MP4_SC_VISUAL_OBJECT; - VTRACE("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); - break; - } - case MP4_SC_VIDEO_OBJECT_PLANE: - { - /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit - a starting bit offset after parsing the header. */ - status = mp4_Parse_VideoObjectPlane(parent, cxt); - status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); - // TODO: Fix this for interlaced - cxt->is_frame_start = true; - cxt->sc_seen |= MP4_SC_SEEN_VOP; + sc = sc & 0xFF; + cxt->current_sc = sc; + cxt->current_sc |= 0x100; + VTRACE("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); - VTRACE("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); - break; - } - case MP4_SC_STUFFING: - { - VTRACE("MP4_SC_STUFFING"); - break; - } - default: - { - if ( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) ) + switch (sc) + { + case MP4_SC_VISUAL_OBJECT_SEQUENCE: + { + status = mp4_Parse_VisualSequence(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE; + VTRACE("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n"); + break; + } + case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC: + {/* Not required to do anything */ + VTRACE("MP4_SC_VISUAL_OBJECT_SEQUENCE_EC"); + break; + } + case MP4_SC_USER_DATA: + { /* Copy userdata to user-visible buffer (EMIT) */ + VTRACE("MP4_USER_DATA_SC: \n"); + break; + } + case MP4_SC_GROUP_OF_VOP: + { + status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt); + cxt->prev_sc = MP4_SC_GROUP_OF_VOP; + VTRACE("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status); + break; + } + case MP4_SC_VIDEO_SESSION_ERROR: + {/* Not required to do anything?? */ + VTRACE("MP4_SC_VIDEO_SESSION_ERROR"); + break; + } + case MP4_SC_VISUAL_OBJECT: + { + status = mp4_Parse_VisualObject(parent, cxt); + cxt->prev_sc = MP4_SC_VISUAL_OBJECT; + VTRACE("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status); + break; + } + case MP4_SC_VIDEO_OBJECT_PLANE: + { + /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit + a starting bit offset after parsing the header. */ + status = mp4_Parse_VideoObjectPlane(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + // TODO: Fix this for interlaced + cxt->is_frame_start = true; + cxt->sc_seen |= MP4_SC_SEEN_VOP; + + VTRACE("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status); + break; + } + case MP4_SC_STUFFING: { - status = mp4_Parse_VideoObjectLayer(parent, cxt); - cxt->sc_seen = MP4_SC_SEEN_VOL; - cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; - VTRACE("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); - sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + VTRACE("MP4_SC_STUFFING"); + break; } - // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN - else if (sc <= MP4_SC_VIDEO_OBJECT_MAX) + default: { - // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer - getbits = viddec_pm_get_bits(parent, &sc, 22); - if (getbits != -1) + if ( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) ) { - cxt->current_sc = sc; - status = mp4_Parse_VideoObject_svh(parent, cxt); - status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); - cxt->sc_seen = MP4_SC_SEEN_SVH; - cxt->is_frame_start = true; - VTRACE("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + status = mp4_Parse_VideoObjectLayer(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_VOL; + cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + VTRACE("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status); + sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN; + } + // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN + else if (sc <= MP4_SC_VIDEO_OBJECT_MAX) + { + // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer + getbits = viddec_pm_get_bits(parent, &sc, 22); + if (getbits != -1) + { + cxt->current_sc = sc; + status = mp4_Parse_VideoObject_svh(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_SVH; + cxt->is_frame_start = true; + VTRACE("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + } + } + else + { + ETRACE("UNKWON Cod:0x%08X\n", sc); } } - else - { - ETRACE("UNKWON Cod:0x%08X\n", sc); + break; } } - break; + else + { + ETRACE("Start code not found\n"); + return VIDDEC_PARSE_ERROR; } } else { - viddec_pm_get_bits(parent, &sc, 22); - cxt->current_sc = sc; - VTRACE("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); - status = mp4_Parse_VideoObject_svh(parent, cxt); - status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); - cxt->sc_seen = MP4_SC_SEEN_SVH; - cxt->is_frame_start = true; - VTRACE("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + if (viddec_pm_get_bits(parent, &sc, 22) != -1) + { + cxt->current_sc = sc; + VTRACE("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc); + status = mp4_Parse_VideoObject_svh(parent, cxt); + status = viddec_mp4_decodevop_and_emitwkld(parent, cxt); + cxt->sc_seen = MP4_SC_SEEN_SVH; + cxt->is_frame_start = true; + VTRACE("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc); + } + else + { + ETRACE("Start code not found\n"); + return VIDDEC_PARSE_ERROR; + } } // Current sc becomes the previous sc -- cgit v1.2.3 From ed7668b0302985949f30d6cf6017e53bdc63ab5c Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 18 Oct 2013 04:11:07 +0800 Subject: Optimize the h264_get_codeNum in libmix BZ: 145915 Optimize the h264_get_codeNum in libmix to accelerate the h264 parsing process. Change-Id: I4a43da8331422f0d686dcee49545ab99bd7daaff Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/140125 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/include/vbp_trace.h | 2 +- mixvbp/vbp_plugin/h264/h264parse_bsd.c | 160 ++++++++++++++------------------- 2 files changed, 70 insertions(+), 92 deletions(-) mode change 100755 => 100644 mixvbp/vbp_plugin/h264/h264parse_bsd.c diff --git a/mixvbp/include/vbp_trace.h b/mixvbp/include/vbp_trace.h index fde232c..ba916b9 100755 --- a/mixvbp/include/vbp_trace.h +++ b/mixvbp/include/vbp_trace.h @@ -37,7 +37,7 @@ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) // For Android OS -#define LOG_NDEBUG 0 +//#define LOG_NDEBUG 0 #define LOG_TAG "MixVBP" diff --git a/mixvbp/vbp_plugin/h264/h264parse_bsd.c b/mixvbp/vbp_plugin/h264/h264parse_bsd.c old mode 100755 new mode 100644 index 84f94b2..e5664ce --- a/mixvbp/vbp_plugin/h264/h264parse_bsd.c +++ b/mixvbp/vbp_plugin/h264/h264parse_bsd.c @@ -14,9 +14,9 @@ #include "h264.h" #include "h264parse.h" #include "viddec_parser_ops.h" - - - +#include "viddec_pm_utils_bstream.h" +#include "viddec_pm.h" +#include "vbp_trace.h" /** @@ -33,7 +33,7 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) { int32_t leadingZeroBits= 0; - uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; + uint32_t temp = 0, match = 0, noOfBits = 0, count = 0; uint32_t codeNum =0; uint32_t bits_offset =0, byte_offset =0; uint8_t is_emul =0; @@ -42,120 +42,103 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) uint32_t bits_need_add_in_first_byte =0; int32_t bits_operation_result=0; - //remove warning - pInfo = pInfo; + viddec_pm_utils_bstream_cxt_t *cxt = &((viddec_pm_cxt_t *)parent)->getbits; + viddec_pm_utils_bstream_buf_cxt_t* bstream = &cxt->bstrm_buf; + uint8_t curr_byte; - ////// Step 1: parse through zero bits until we find a bit with value 1. - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + bits_offset = bstream->buf_bitoff; + + uint32_t total_bits, act_bytes; + uint32_t isemul = 0; + uint8_t *curr_addr = bstream->buf + bstream->buf_index; + uint32_t i = 0; + VTRACE("bstream->buf_bitoff = %d", bstream->buf_bitoff); + VTRACE("bstream->buf_index = %d", bstream->buf_index); while (!match) { - if ((bits_offset != 0) && ( is_first_byte == 1)) + curr_byte = *curr_addr++; + VTRACE("curr_byte = 0x%x", curr_byte); + if (cxt->phase >= 2 && curr_byte == 0x03) { + curr_byte = *curr_addr++; + isemul = 1; + cxt->phase = 0; + } + noOfBits = 8; + if (is_first_byte) { - //we handle byte at a time, if we have offset then for first - // byte handle only 8 - offset bits - noOfBits = (uint8_t)(8 - bits_offset); - bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits); - - - temp = (temp << bits_offset); - if (temp != 0) + is_first_byte = 0; + if (bits_offset != 0) { - bits_need_add_in_first_byte = bits_offset; + noOfBits = 8 - bits_offset; + curr_byte = curr_byte << bits_offset; } - is_first_byte = 0; } else { - noOfBits = 8;/* always 8 bits as we read a byte at a time */ - bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8); - } - - if (-1 == bits_operation_result) - { - return MAX_INT32_VALUE; + cxt->phase = curr_byte? 0: cxt->phase + 1; } - if (temp != 0) + if (curr_byte != 0) { - // if byte!=0 we have at least one bit with value 1. - count = 1; - while (((temp & 0x80) != 0x80) && (count <= noOfBits)) + count=1; + VTRACE("curr_byte & 0x80 = 0x%x", curr_byte & 0x80); + while (((curr_byte & 0x80) != 0x80) && (count <= noOfBits)) { + VTRACE("curr_byte & 0x80 = 0x%x", curr_byte & 0x80); count++; - temp = temp <<1; + curr_byte = curr_byte <<1; } - //At this point we get the bit position of 1 in current byte(count). - match = 1; leadingZeroBits += count; } else { - // we don't have a 1 in current byte leadingZeroBits += noOfBits; } - if (!match) + VTRACE("count = %d", count); + + total_bits = match ? count : noOfBits; + total_bits = noOfBits == 8? total_bits: total_bits + bits_offset; + + VTRACE("total_bits = %d", total_bits); + + act_bytes = 1 + isemul; + cxt->emulation_byte_counter += isemul; + isemul = 0; + if ((total_bits & 0x7) == 0) { - //actually move the bitoff by viddec_pm_get_bits - viddec_pm_get_bits(parent, &temp, noOfBits); + bstream->buf_bitoff = 0; + bstream->buf_index +=act_bytes; } else { - //actually move the bitoff by viddec_pm_get_bits - viddec_pm_get_bits(parent, &temp, count); + bstream->buf_bitoff = total_bits & 0x7; + bstream->buf_index += (act_bytes - 1); } - } - ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value. + VTRACE("leadingZeroBits = %d", leadingZeroBits); + VTRACE("bstream->buf_bitoff = %x", bstream->buf_bitoff); + VTRACE("bstream->buf_index = %x", bstream->buf_index); if (match) { - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - /* bit position in current byte */ - //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7); - count = ((count + bits_need_add_in_first_byte)& 0x7); - - leadingZeroBits --; - length = leadingZeroBits; + length = --leadingZeroBits; codeNum = 0; - noOfBits = 8 - count; - - - while (leadingZeroBits > 0) + if (length > 0) { - if (noOfBits < (uint32_t)leadingZeroBits) - { - viddec_pm_get_bits(parent, &temp, noOfBits); - - - codeNum = (codeNum << noOfBits) | temp; - leadingZeroBits -= noOfBits; - } - else + bits_operation_result = viddec_pm_get_bits(parent, &temp, leadingZeroBits); + if (-1 == bits_operation_result) { - viddec_pm_get_bits(parent, &temp, leadingZeroBits); - - codeNum = (codeNum << leadingZeroBits) | temp; - leadingZeroBits = 0; + VTRACE("h264_get_codeNum: viddec_pm_get_bits error!"); + length = 0; } - - - noOfBits = 8; + codeNum = temp; } - // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits). codeNum = codeNum + (1 << length) -1; - - } - - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - if (bits_offset!=0) - { - viddec_pm_peek_bits(parent, &temp, 8-bits_offset); } return codeNum; @@ -173,8 +156,8 @@ int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned) if (bIsSigned) //get signed integer golomb code else the value is unsigned { - sign = (sval & 0x1) ? 1 : -1; - sval = (sval + 1) >> 1; + sign = (sval & 0x1)?1:-1; + sval = (sval +1) >> 1; sval = sval * sign; } @@ -188,11 +171,11 @@ uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) { uint8_t cnt = 0; - uint8_t is_emul =0; - uint8_t cur_byte = 0; - int32_t shift_bits = 0; + uint8_t is_emul =0; + uint8_t cur_byte = 0; + int32_t shift_bits =0; uint32_t ctr_bit = 0; - uint32_t bits_offset = 0, byte_offset =0; + uint32_t bits_offset =0, byte_offset =0; //remove warning pInfo = pInfo; @@ -202,12 +185,12 @@ uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - shift_bits = 7 - bits_offset; + shift_bits = 7-bits_offset; // read one byte viddec_pm_get_cur_byte(parent, &cur_byte); - ctr_bit = (cur_byte >> (shift_bits--)) & 0x01; + ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; // a stop bit has to be one if (ctr_bit == 0) @@ -215,13 +198,8 @@ uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) while (shift_bits >= 0 && !cnt) { - cnt |= (((cur_byte) >> (shift_bits--)) & 0x01); // set up control bit + cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit } return (cnt); } - - - -///////////// EOF///////////////////// - -- cgit v1.2.3 From 0507efa626325499c47020dfd168b8579d279997 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 18 Oct 2013 15:39:44 +0800 Subject: Optimize getbits and peekbits functions in libmix BZ: 146000 Optimize getbits and peekbits functions in libmix. The getbits and peekbits are divided into emulation prevention and non-emulatioin prevention versions. Change-Id: I206fa743cd132b3005d096ea9bb8ff57466fb333 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/140126 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- .../vbp_manager/include/viddec_pm_utils_bstream.h | 3 +- mixvbp/vbp_manager/viddec_pm_parser_ops.c | 10 +- mixvbp/vbp_manager/viddec_pm_utils_bstream.c | 361 +++++++++++++++++++-- 3 files changed, 348 insertions(+), 26 deletions(-) diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h index ab2569f..3bf1857 100755 --- a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h +++ b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h @@ -49,7 +49,8 @@ void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_ int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits); -int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip); +int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); +int32_t viddec_pm_utils_bstream_getbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte); diff --git a/mixvbp/vbp_manager/viddec_pm_parser_ops.c b/mixvbp/vbp_manager/viddec_pm_parser_ops.c index 8c0a1ec..b10eb2d 100755 --- a/mixvbp/vbp_manager/viddec_pm_parser_ops.c +++ b/mixvbp/vbp_manager/viddec_pm_parser_ops.c @@ -12,10 +12,10 @@ int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) viddec_pm_cxt_t *cxt; cxt = (viddec_pm_cxt_t *)parent; - ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1); + ret = viddec_pm_utils_bstream_getbits(&(cxt->getbits), data, num_bits); if (ret == -1) { - VTRACE("FAILURE? getbits returned %d", ret); + VTRACE("FAILURE: getbits returned %d", ret); } return ret; @@ -27,7 +27,11 @@ int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits) viddec_pm_cxt_t *cxt; cxt = (viddec_pm_cxt_t *)parent; - ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0); + ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits); + if (ret == -1) + { + VTRACE("FAILURE: peekbits returned %d", ret); + } return ret; } diff --git a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c index 72d210b..edc8061 100755 --- a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c +++ b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c @@ -13,6 +13,12 @@ void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt); uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index); extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); +static int32_t viddec_pm_utils_bstream_peekbits_noemul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); +static int32_t viddec_pm_utils_bstream_peekbits_emul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); +static int32_t viddec_pm_utils_bstream_getbits_noemul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); +static int32_t viddec_pm_utils_bstream_getbits_emul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); + + /* Bytes left in cubby buffer which were not consumed yet */ static inline uint32_t viddec_pm_utils_bstream_bytesincubby(viddec_pm_utils_bstream_buf_cxt_t *cxt) { @@ -233,22 +239,206 @@ int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uin } /* - Function to get N bits ( N<= 32). + Function to get N bits (N<= 32). This function will update the bitstream position. +*/ +int32_t viddec_pm_utils_bstream_getbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits) +{ + if (cxt->is_emul_reqd) { + return viddec_pm_utils_bstream_getbits_emul(cxt, out, num_bits); + } else { + return viddec_pm_utils_bstream_getbits_noemul(cxt, out, num_bits); + } +} + +/* + Function to get N bits (N<= 32).This function will NOT update the bitstream position. */ -int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip) +int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits) +{ + if (cxt->is_emul_reqd) { + return viddec_pm_utils_bstream_peekbits_emul(cxt, out, num_bits); + } else { + return viddec_pm_utils_bstream_peekbits_noemul(cxt, out, num_bits); + } +} + +static inline int32_t getbytes_noemul(viddec_pm_utils_bstream_buf_cxt_t *bstream, + viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/ + uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/ + uint32_t *phase, /* Phase for emulation */ + uint32_t num_bytes,/* requested number of bytes*/ + uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/) { + int32_t ret = 1; + uint8_t cur_byte = 0, valid_bytes_read = 0; + *act_bytes = 0; + while (valid_bytes_read < num_bytes) + { + cur_byte = bstream->buf[bstream->buf_index + *act_bytes]; + data->byte[valid_bytes_read] = cur_byte; + valid_bytes_read++; + *act_bytes +=1; + } + /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array + has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */ + if ((bstream->buf_index + *act_bytes -1) >= bstream->buf_end) + { + ret = -1; + } + return ret; +} + + +/* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */ +static inline int32_t getbytes_emul(viddec_pm_utils_bstream_buf_cxt_t *bstream, + viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/ + uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/ + uint32_t *phase, /* Phase for emulation */ + uint32_t num_bytes,/* requested number of bytes*/ + uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/) +{ + int32_t ret = 1; + uint8_t cur_byte = 0, valid_bytes_read = 0; + uint32_t actual_bytes = 0; + *act_bytes = 0; + + uint8_t *curr_pos = (uint8_t *)(bstream->buf + bstream->buf_index); + + while (valid_bytes_read < num_bytes) + { + cur_byte = *curr_pos++; + // ITRACE("getbytes_emul cur_byte = 0x%x", cur_byte); + if ((cur_byte == 0x3) && (*phase == 2)) + {/* skip emulation byte. we update the phase only if emulation prevention is enabled */ + *phase = 0; + } + else + { + data->byte[valid_bytes_read] = cur_byte; + /* + We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past. + From second byte onwards we always look to update phase. + */ + if ((actual_bytes != 0) || (is_offset_zero)) + { + if (cur_byte == 0) + { + /* Update phase only if emulation prevention is required */ + *phase += (*phase < 2 ? 1:0 ); + } + else + { + *phase=0; + } + } + valid_bytes_read++; + } + actual_bytes++; + } + /* + Check to see if we reached end during above operation. We might be out of range buts it safe since our array + has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes + */ + + if ((bstream->buf_index + actual_bytes -1) >= bstream->buf_end) + { + ret = -1; + } + *act_bytes = actual_bytes; + return ret; +} + +static int32_t viddec_pm_utils_bstream_getbits_emul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits) +{ + uint32_t data_left=0; int32_t ret = -1; + /* STEP 1: Make sure that we have at least minimum data before we calculate bits */ - viddec_pm_utils_check_bstream_reload(cxt, &data_left); + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + data_left = bstream->buf_end - bstream->buf_index; + + uint32_t bytes_required=0; + uint32_t act_bytes = 0; + uint32_t phase; + viddec_pm_utils_getbits_t data; if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) { - uint32_t bytes_required = 0; - viddec_pm_utils_bstream_buf_cxt_t *bstream; + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; - bstream = &(cxt->bstrm_buf); - bytes_required = (bstream->buf_bitoff + num_bits + 7) >> 3; + /* Step 2: Make sure we have bytes for requested bits */ + if (bytes_required <= data_left) + { + phase = cxt->phase; + /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ + if (getbytes_emul(bstream, &data, &act_bytes, &phase, bytes_required, (bstream->buf_bitoff == 0)) != -1) + { + uint32_t total_bits=0; + uint32_t shift_by=0; + /* zero out upper bits */ + /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts + in single statement */ + data.byte[0] <<= bstream->buf_bitoff; + data.byte[0] >>= bstream->buf_bitoff; + data.word[0] = SWAP_WORD(data.word[0]); + data.word[1] = SWAP_WORD(data.word[1]); + total_bits = num_bits+bstream->buf_bitoff; + + if (total_bits > 32) + { + /* We have to use both the words to get required data */ + shift_by = total_bits - 32; + data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by)); + } + else + { + shift_by = 32 - total_bits; + data.word[0] = data.word[0] >> shift_by; + } + *out = data.word[0]; + + /* update au byte position if needed */ + if ((total_bits & 0x7) == 0) + { + bstream->buf_bitoff = 0; + bstream->buf_index +=act_bytes; + } + else + { + bstream->buf_bitoff = total_bits & 0x7; + bstream->buf_index +=(act_bytes - 1); + } + cxt->phase = phase; + if (act_bytes > bytes_required) + { + cxt->emulation_byte_counter += act_bytes - bytes_required; + } + + ret=1; + } + } + } + return ret; + +} + +static int32_t viddec_pm_utils_bstream_getbits_noemul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits) +{ + uint32_t data_left=0; + int32_t ret = -1; + /* STEP 1: Make sure that we have at least minimum data before we calculate bits */ + + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + data_left = bstream->buf_end - bstream->buf_index; + uint32_t bytes_required=0; + if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + { + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; /* Step 2: Make sure we have bytes for requested bits */ if (bytes_required <= data_left) @@ -256,21 +446,98 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin uint32_t act_bytes, phase; viddec_pm_utils_getbits_t data; phase = cxt->phase; - /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required < actual_required bytes */ - if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1) + /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ + if (getbytes_noemul(bstream, &data, &act_bytes, &phase, bytes_required, (bstream->buf_bitoff == 0)) != -1) { - uint32_t total_bits = 0; - uint32_t shift_by = 0; + uint32_t total_bits=0; + uint32_t shift_by=0; /* zero out upper bits */ /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts in single statement */ data.byte[0] <<= bstream->buf_bitoff; data.byte[0] >>= bstream->buf_bitoff; - data.word[0] = SWAP_WORD(data.word[0]); data.word[1] = SWAP_WORD(data.word[1]); + total_bits = num_bits+bstream->buf_bitoff; + if (total_bits > 32) + { + /* We have to use both the words to get required data */ + shift_by = total_bits - 32; + data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by)); + } + else + { + shift_by = 32 - total_bits; + data.word[0] = data.word[0] >> shift_by; + } + *out = data.word[0]; - total_bits = num_bits + bstream->buf_bitoff; + /* update au byte position if needed */ + if ((total_bits & 0x7) == 0) + { + bstream->buf_bitoff = 0; + bstream->buf_index +=act_bytes; + } + else + { + bstream->buf_bitoff = total_bits & 0x7; + bstream->buf_index +=(act_bytes - 1); + } + cxt->phase = phase; + if (act_bytes > bytes_required) + { + cxt->emulation_byte_counter += act_bytes - bytes_required; + } + + ret =1; + } + } + } + return ret; + +} + +static int32_t viddec_pm_utils_bstream_peekbits_emul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits) +{ + uint32_t data_left=0; + int32_t ret = -1; + /* STEP 1: Make sure that we have at least minimum data before we calculate bits */ + + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + data_left = bstream->buf_end - bstream->buf_index; + + uint32_t act_bytes = 0, phase; + viddec_pm_utils_getbits_t data; + uint32_t bytes_required=0; + + if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + { + uint32_t bytes_required=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + + /* Step 2: Make sure we have bytes for requested bits */ + if (bytes_required <= data_left) + { + phase = cxt->phase; + /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ + if (getbytes_emul(bstream, &data, &act_bytes, &phase, bytes_required, (bstream->buf_bitoff == 0)) != -1) + { + uint32_t total_bits=0; + uint32_t shift_by=0; + /* zero out upper bits */ + /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts + in single statement */ + data.byte[0] <<= bstream->buf_bitoff; + data.byte[0] >>= bstream->buf_bitoff; + + data.word[0] = SWAP_WORD(data.word[0]); + data.word[1] = SWAP_WORD(data.word[1]); + total_bits = num_bits+bstream->buf_bitoff; if (total_bits > 32) { /* We have to use both the words to get required data */ @@ -283,17 +550,67 @@ int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uin data.word[0] = data.word[0] >> shift_by; } *out = data.word[0]; - if (skip) + + ret =1; + } + } + } + return ret; +} + +static int32_t viddec_pm_utils_bstream_peekbits_noemul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits) +{ + uint32_t data_left=0; + int32_t ret = -1; + /* STEP 1: Make sure that we have at least minimum data before we calculate bits */ + //viddec_pm_utils_check_bstream_reload(cxt, &data_left); + + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + data_left = bstream->buf_end - bstream->buf_index; + uint32_t bytes_required=0; + + if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0)) + { + uint32_t bytes_required=0; + viddec_pm_utils_bstream_buf_cxt_t *bstream; + + bstream = &(cxt->bstrm_buf); + bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3; + + /* Step 2: Make sure we have bytes for requested bits */ + if (bytes_required <= data_left) + { + uint32_t act_bytes, phase; + viddec_pm_utils_getbits_t data; + phase = cxt->phase; + /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */ + if (getbytes_noemul(bstream, &data, &act_bytes, &phase, bytes_required, (bstream->buf_bitoff == 0)) != -1) + { + uint32_t total_bits=0; + uint32_t shift_by=0; + /* zero out upper bits */ + /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts + in single statement */ + data.byte[0] <<= bstream->buf_bitoff; + data.byte[0] >>= bstream->buf_bitoff; + + data.word[0] = SWAP_WORD(data.word[0]); + data.word[1] = SWAP_WORD(data.word[1]); + total_bits = num_bits+bstream->buf_bitoff; + if (total_bits > 32) { - /* update au byte position if needed */ - viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes); - cxt->phase = phase; - - if (act_bytes > bytes_required) - { - cxt->emulation_byte_counter += act_bytes - bytes_required; - } + /* We have to use both the words to get required data */ + shift_by = total_bits - 32; + data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by)); } + else + { + shift_by = 32 - total_bits; + data.word[0] = data.word[0] >> shift_by; + } + *out = data.word[0]; ret = 1; } -- cgit v1.2.3 From d4ac7201913a34768390fc815a3bf8fb0b9c2d15 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 18 Oct 2013 16:50:30 +0800 Subject: Improve the libmix parser efficiency by using short format BZ: 146002 On baytrail, the GEN hardware support AVC short format parsing. In multiple slice situation, the parsing work is a bottleneck for CPU. If we use AVC short format, some slice header parsing work can be done with GEN HW. This will increase the overall decoding performance. Change-Id: Ib2bff7f4cca6cde917700e501ee53e3f17c55a81 Signed-off-by: wfeng6 Reviewed-on: http://android.intel.com:8080/140127 Reviewed-by: Shi, PingX Tested-by: Shi, PingX Reviewed-by: cactus Tested-by: cactus --- mixvbp/vbp_manager/vbp_h264_parser.c | 19 ++++++++++ mixvbp/vbp_plugin/h264/Android.mk | 7 +++- mixvbp/vbp_plugin/h264/h264parse_sh.c | 10 ++++++ mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 8 ++--- videodecoder/VideoDecoderAVC.cpp | 58 +++++++++++++++++++++++++++++- videodecoder/VideoDecoderAVC.h | 4 ++- 6 files changed, 99 insertions(+), 7 deletions(-) diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c index 0a8f141..dd93ea7 100755 --- a/mixvbp/vbp_manager/vbp_h264_parser.c +++ b/mixvbp/vbp_manager/vbp_h264_parser.c @@ -545,6 +545,7 @@ static inline void vbp_set_reference_frames_h264( { pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) { pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; @@ -587,6 +588,11 @@ static inline void vbp_set_reference_frames_h264( if (viddec_h264_get_is_used(store)) { pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; + +#ifdef USE_AVC_SHORT_FORMAT + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->long_term_frame_idx; +#endif + if (FRAME == parser->info.img.structure) { pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; @@ -1149,6 +1155,13 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ } +#ifdef USE_AVC_SHORT_FORMAT + { + pic_parms->num_ref_idx_l0_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l0_active - 1; + pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active - 1; + } +#endif + return VBP_OK; } @@ -1197,6 +1210,11 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) /* whole slice is in this buffer */ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + slc_parms->slice_type = slice_header->slice_type; + +#ifndef USE_AVC_SHORT_FORMAT /* bit offset from NAL start code to the beginning of slice data */ slc_parms->slice_data_bit_offset = bit + byte * 8; @@ -1249,6 +1267,7 @@ static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index) vbp_set_pre_weight_table_h264(h264_parser, slc_parms); vbp_set_slice_ref_list_h264(h264_parser, slc_parms); +#endif pic_data->num_slices++; diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk index 2045d32..f661940 100755 --- a/mixvbp/vbp_plugin/h264/Android.mk +++ b/mixvbp/vbp_plugin/h264/Android.mk @@ -13,7 +13,7 @@ LOCAL_SRC_FILES := \ h264parse_sps.c \ h264parse_dpb.c \ viddec_h264_parse.c \ - mix_vbp_h264_stubs.c + mix_vbp_h264_stubs.c LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/include \ @@ -21,6 +21,11 @@ LOCAL_C_INCLUDES := \ $(MIXVBP_DIR)/vbp_manager/include \ $(MIXVBP_DIR)/vbp_manager/h264/include +PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) +LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT +endif + LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libmixvbp_h264 diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c index 0d8dc9d..71d16f6 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sh.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c @@ -224,6 +224,16 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice break; } +#ifdef USE_AVC_SHORT_FORMAT + bool keepParsing = false; + keepParsing = h264_is_new_picture_start(pInfo, *SliceHeader, pInfo->SliceHeader) && (SliceHeader->nal_ref_idc != 0); + if (!keepParsing) + { + ITRACE("short format parsing: no need to go on!"); + ret = H264_STATUS_OK; + break; + } +#endif if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) { WTRACE("ref list reordering failed during slice header parsing."); diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c index 2aa80b9..28a319a 100755 --- a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -207,9 +207,9 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field h264_update_frame_type(pInfo); - - h264_dpb_update_ref_lists( pInfo); - +#ifndef USE_AVC_SHORT_FORMAT + h264_dpb_update_ref_lists(pInfo); +#endif /// Emit out the current "good" slice h264_parse_emit_current_slice(parent, pInfo); @@ -285,7 +285,7 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) case h264_NAL_UNIT_TYPE_PPS: { //OS_INFO("*****************************PPS**************************************\n"); - + status = H264_STATUS_OK; uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index c075e8a..72131aa 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -295,10 +295,11 @@ Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picInde status = updateDPB(picParam); CHECK_STATUS("updateDPB"); +#ifndef USE_AVC_SHORT_FORMAT //We have to provide a hacked DPB rather than complete DPB for libva as workaround status = updateReferenceFrames(picData); CHECK_STATUS("updateReferenceFrames"); - +#endif vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); CHECK_VA_STATUS("vaBeginPicture"); @@ -328,6 +329,8 @@ Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picInde bufferIDCount++; } +#ifndef USE_AVC_SHORT_FORMAT + status = setReference(sliceParam); CHECK_STATUS("setReference"); @@ -339,6 +342,16 @@ Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picInde 1, sliceParam, &bufferIDs[bufferIDCount]); +#else + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264Base), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); +#endif CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); bufferIDCount++; @@ -835,3 +848,46 @@ int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) { return maxDPBSize; } +#ifdef USE_AVC_SHORT_FORMAT +Decode_Status VideoDecoderAVC::getCodecSpecificConfigs( + VAProfile profile, VAConfigID *config) +{ + VAStatus vaStatus; + VAConfigAttrib attrib[2]; + + if (config == NULL) { + ETRACE("Invalid parameter!"); + return DECODE_FAIL; + } + + attrib[0].type = VAConfigAttribRTFormat; + attrib[0].value = VA_RT_FORMAT_YUV420; + attrib[1].type = VAConfigAttribDecSliceMode; + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + + vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1); + + if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) + { + ITRACE("AVC short format used"); + attrib[1].value = VA_DEC_SLICE_MODE_BASE; + } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) { + ITRACE("AVC long format used"); + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + } else { + ETRACE("Unsupported Decode Slice Mode!"); + return DECODE_FAIL; + } + + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib[0], + 2, + config); + CHECK_VA_STATUS("vaCreateConfig"); + + return DECODE_SUCCESS; +} +#endif diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 9c23e6b..6f3855d 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -58,7 +58,9 @@ protected: Decode_Status handleNewSequence(vbp_data_h264 *data); bool isNewFrame(vbp_data_h264 *data, bool equalPTS); int32_t getDPBSize(vbp_data_h264 *data); - +#ifdef USE_AVC_SHORT_FORMAT + virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); +#endif private: struct DecodedPictureBuffer { VideoSurfaceBuffer *surfaceBuffer; -- cgit v1.2.3 From 6b06632e96c99f4efa0cb9d3c6183fa81066767f Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Thu, 24 Oct 2013 14:58:27 +0800 Subject: libmix: use surfaceWidth/surfaceHeight to reflect the buffer size allocated by gralloc BZ: 145977 720p surface buffers are always allocated in webrtc, in this scene, vaCreateSurfaces need accecpt surfaceWidth/surfaceHeight as the buffer size allocated by gralloc. Change-Id: I50cc6f5026c2f36b7659980955377ec20aa37974 Signed-off-by: Gu, Wangyi --- videodecoder/VideoDecoderBase.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 7f04e3d..c67ed7c 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -852,8 +852,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i vaStatus = vaCreateSurfaces( mVADisplay, format, - mVideoFormatInfo.width, - mVideoFormatInfo.height, + mVideoFormatInfo.surfaceWidth, + mVideoFormatInfo.surfaceHeight, mSurfaces, mNumSurfaces, attribs, -- cgit v1.2.3 From f9c91a13d0b81e88adcfacb84508bc82ac2963a1 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Tue, 22 Oct 2013 21:30:30 +0800 Subject: libmix: query decode hardware max resolution capability BZ: 146858 query decode hardware max resolution capability for AVC/MPEG4/VC1/VP8 Change-Id: Iad7636eb02ed05854c62f113932ef4836f6a3f54 Signed-off-by: Gu, Wangyi Signed-off-by: pingshix --- videodecoder/Android.mk | 1 - videodecoder/VideoDecoderAVC.cpp | 52 +++-------- videodecoder/VideoDecoderAVC.h | 5 +- videodecoder/VideoDecoderBase.cpp | 6 +- videodecoder/VideoDecoderBase.h | 2 +- videodecoder/VideoDecoderHost.cpp | 1 - videodecoder/VideoDecoderMPEG4.cpp | 10 +- videodecoder/VideoDecoderMPEG4.h | 2 +- videodecoder/VideoDecoderPAVC.cpp | 186 ------------------------------------- videodecoder/VideoDecoderPAVC.h | 59 ------------ videodecoder/VideoDecoderVP8.cpp | 18 ++++ videodecoder/VideoDecoderVP8.h | 3 + videodecoder/VideoDecoderWMV.cpp | 18 ++++ videodecoder/VideoDecoderWMV.h | 3 + 14 files changed, 65 insertions(+), 301 deletions(-) delete mode 100644 videodecoder/VideoDecoderPAVC.cpp delete mode 100644 videodecoder/VideoDecoderPAVC.h diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 7f3add5..243b49a 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -7,7 +7,6 @@ LOCAL_SRC_FILES := \ VideoDecoderWMV.cpp \ VideoDecoderMPEG4.cpp \ VideoDecoderAVC.cpp \ - VideoDecoderPAVC.cpp \ VideoDecoderTrace.cpp # LOCAL_CFLAGS := diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 72131aa..d34965d 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -848,46 +848,20 @@ int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) { return maxDPBSize; } -#ifdef USE_AVC_SHORT_FORMAT -Decode_Status VideoDecoderAVC::getCodecSpecificConfigs( - VAProfile profile, VAConfigID *config) -{ +Decode_Status VideoDecoderAVC::checkHardwareCapability(VAProfile profile) { +#ifndef USE_GEN_HW VAStatus vaStatus; - VAConfigAttrib attrib[2]; - - if (config == NULL) { - ETRACE("Invalid parameter!"); - return DECODE_FAIL; + VAConfigAttrib cfgAttribs[2]; + cfgAttribs[0].type = VAConfigAttribMaxPictureWidth; + cfgAttribs[1].type = VAConfigAttribMaxPictureHeight; + vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High, + VAEntrypointVLD, cfgAttribs, 2); + CHECK_VA_STATUS("vaGetConfigAttributes"); + if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) { + ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d", + cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height); + return DECODE_DRIVER_FAIL; } - - attrib[0].type = VAConfigAttribRTFormat; - attrib[0].value = VA_RT_FORMAT_YUV420; - attrib[1].type = VAConfigAttribDecSliceMode; - attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; - - vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1); - - if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) - { - ITRACE("AVC short format used"); - attrib[1].value = VA_DEC_SLICE_MODE_BASE; - } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) { - ITRACE("AVC long format used"); - attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; - } else { - ETRACE("Unsupported Decode Slice Mode!"); - return DECODE_FAIL; - } - - vaStatus = vaCreateConfig( - mVADisplay, - profile, - VAEntrypointVLD, - &attrib[0], - 2, - config); - CHECK_VA_STATUS("vaCreateConfig"); - +#endif return DECODE_SUCCESS; } -#endif diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 6f3855d..1b3280c 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -58,9 +58,8 @@ protected: Decode_Status handleNewSequence(vbp_data_h264 *data); bool isNewFrame(vbp_data_h264 *data, bool equalPTS); int32_t getDPBSize(vbp_data_h264 *data); -#ifdef USE_AVC_SHORT_FORMAT - virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); -#endif + virtual Decode_Status checkHardwareCapability(VAProfile profile); + private: struct DecodedPictureBuffer { VideoSurfaceBuffer *surfaceBuffer; diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index c67ed7c..f62c706 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -770,8 +770,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i if ((int32_t)profile != VAProfileSoftwareDecoding) { - status = isHardwareSupported(profile); - CHECK_STATUS("isHardwareSupported"); + status = checkHardwareCapability(profile); + CHECK_STATUS("checkHardwareCapability"); #ifdef USE_AVC_SHORT_FORMAT status = getCodecSpecificConfigs(profile, &mVAConfig); @@ -1326,7 +1326,7 @@ Decode_Status VideoDecoderBase::getCodecSpecificConfigs( return DECODE_SUCCESS; } #endif -Decode_Status VideoDecoderBase::isHardwareSupported(VAProfile profile) { +Decode_Status VideoDecoderBase::checkHardwareCapability(VAProfile profile) { return DECODE_SUCCESS; } diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 7db79f5..e823b22 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -94,7 +94,7 @@ protected: Decode_Status setParserType(_vbp_parser_type type); virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID *config); #endif - virtual Decode_Status isHardwareSupported(VAProfile profile); + virtual Decode_Status checkHardwareCapability(VAProfile profile); private: Decode_Status mapSurface(void); void initSurfaceBuffer(bool reset); diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index 93e86c1..973ab22 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -25,7 +25,6 @@ #include "VideoDecoderWMV.h" #include "VideoDecoderMPEG4.h" #include "VideoDecoderAVC.h" -#include "VideoDecoderPAVC.h" #ifdef USE_INTEL_SECURE_AVC #include "VideoDecoderAVCSecure.h" #endif diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index be3c662..b99ed2e 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -577,17 +577,13 @@ void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) { mVideoFormatInfo.valid = true; } -Decode_Status VideoDecoderMPEG4::isHardwareSupported(VAProfile profile) { - if (!mIsShortHeader) { - // TODO: add support for MPEG4 in the future; - return DECODE_SUCCESS; - } - +Decode_Status VideoDecoderMPEG4::checkHardwareCapability(VAProfile profile) { VAStatus vaStatus; VAConfigAttrib cfgAttribs[2]; cfgAttribs[0].type = VAConfigAttribMaxPictureWidth; cfgAttribs[1].type = VAConfigAttribMaxPictureHeight; - vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH263Baseline, + vaStatus = vaGetConfigAttributes(mVADisplay, + mIsShortHeader ? VAProfileH263Baseline : VAProfileMPEG4AdvancedSimple, VAEntrypointVLD, cfgAttribs, 2); CHECK_VA_STATUS("vaGetConfigAttributes"); if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) { diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h index 5f641ee..aa86330 100644 --- a/videodecoder/VideoDecoderMPEG4.h +++ b/videodecoder/VideoDecoderMPEG4.h @@ -39,7 +39,7 @@ public: virtual Decode_Status decode(VideoDecodeBuffer *buffer); protected: - virtual Decode_Status isHardwareSupported(VAProfile profile); + virtual Decode_Status checkHardwareCapability(VAProfile profile); private: Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data); diff --git a/videodecoder/VideoDecoderPAVC.cpp b/videodecoder/VideoDecoderPAVC.cpp deleted file mode 100644 index c05330a..0000000 --- a/videodecoder/VideoDecoderPAVC.cpp +++ /dev/null @@ -1,186 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - -#include "VideoDecoderPAVC.h" -#include "VideoDecoderTrace.h" -#include - -VideoDecoderPAVC::VideoDecoderPAVC(const char *mimeType) - : VideoDecoderAVC(mimeType), - mMetadata(NULL) { -} - -VideoDecoderPAVC::~VideoDecoderPAVC() { -} - -Decode_Status VideoDecoderPAVC::decode(VideoDecodeBuffer *buffer) { - // TODO: preprocessing protected content here - - mMetadata = NULL; - - if (buffer->flag & HAS_EXTRADATA) { - mMetadata = buffer->data + buffer->size; - } - - return VideoDecoderAVC::decode(buffer); -} - - -Decode_Status VideoDecoderPAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { - if (mMetadata == NULL) { - // non-protected content playback path - return VideoDecoderAVC::decodeSlice(data, picIndex, sliceIndex); - } - - Decode_Status status; - VAStatus vaStatus; - uint32_t bufferIDCount = 0; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID bufferIDs[4]; - - vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); - vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); - VAPictureParameterBufferH264 *picParam = picData->pic_parms; - VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); - - if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { - // either condition indicates start of a new frame - if (sliceParam->first_mb_in_slice != 0) { - WTRACE("The first slice is lost."); - // TODO: handle the first slice lost - } - if (mDecodingFrame) { - // interlace content, complete decoding the first field - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS("vaEndPicture"); - - // for interlace content, top field may be valid only after the second field is parsed - mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; - } - - // Check there is no reference frame loss before decoding a frame - - // Update the reference frames and surface IDs for DPB and current frame - status = updateDPB(picParam); - CHECK_STATUS("updateDPB"); - - //We have to provide a hacked DPB rather than complete DPB for libva as workaround - status = updateReferenceFrames(picData); - CHECK_STATUS("updateReferenceFrames"); - - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); - CHECK_VA_STATUS("vaBeginPicture"); - - // start decoding a frame - mDecodingFrame = true; - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAPictureParameterBufferType, - sizeof(VAPictureParameterBufferH264), - 1, - picParam, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); - bufferIDCount++; - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAIQMatrixBufferType, - sizeof(VAIQMatrixBufferH264), - 1, - data->IQ_matrix_buf, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); - bufferIDCount++; - } - - status = setReference(sliceParam); - CHECK_STATUS("setReference"); - - // find which medata is correlated to current slice - PAVCMetadata *pMetadata = (PAVCMetadata*)mMetadata; - uint32_t accumulatedClearNALUSize = 0; - uint32_t clearNALUSize = 0; - do { - clearNALUSize = pMetadata->clearHeaderSize + pMetadata->decryptionDataSize; - if (clearNALUSize == 0) { - LOGE("Could not find meta data for current NAL unit."); - return DECODE_INVALID_DATA; - } - - if (accumulatedClearNALUSize + clearNALUSize > sliceData->slice_offset) { - break; - } - accumulatedClearNALUSize += clearNALUSize; - pMetadata++; - } while (1); - - // add bytes that are encrypted - sliceParam->slice_data_size += pMetadata->encryptionDataSize; - sliceData->slice_size = sliceParam->slice_data_size; - - // no need to update: - // sliceParam->slice_data_offset - 0 always - // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset - - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceParameterBufferType, - sizeof(VASliceParameterBufferH264), - 1, - sliceParam, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); - bufferIDCount++; - - // sliceData->slice_offset - accumulatedClearNALUSize is the absolute offset to start codes of current NAL unit - // offset points to first byte of NAL unit - uint32_t offset = pMetadata->clearHeaderIMROffset + sliceData->slice_offset - accumulatedClearNALUSize; - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - //VASliceDataBufferType, - VAProtectedSliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - //sliceData->buffer_addr + sliceData->slice_offset, - (uint8_t*)offset, - &bufferIDs[bufferIDCount]); - CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - bufferIDCount++; - - vaStatus = vaRenderPicture( - mVADisplay, - mVAContext, - bufferIDs, - bufferIDCount); - CHECK_VA_STATUS("vaRenderPicture"); - - return DECODE_SUCCESS; -} - diff --git a/videodecoder/VideoDecoderPAVC.h b/videodecoder/VideoDecoderPAVC.h deleted file mode 100644 index 195c07d..0000000 --- a/videodecoder/VideoDecoderPAVC.h +++ /dev/null @@ -1,59 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - -#ifndef VIDEO_DECODER_PAVC_H_ -#define VIDEO_DECODER_PAVC_H_ - -#include "VideoDecoderAVC.h" - - -class VideoDecoderPAVC : public VideoDecoderAVC { -public: - VideoDecoderPAVC(const char *mimeType); - virtual ~VideoDecoderPAVC(); - - // data in the decoded buffer only contains clearHeader and decrypted data. - // encrypted data is not included in the buffer as it may contain start code emulation bytes. - virtual Decode_Status decode(VideoDecodeBuffer *buffer); - -private: - virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); - - // structure PAVCMetadata is appended after the VideodecodeBuffer::data + VideoDecoderBuffer::size - // number of structures is equal to number of nal units in the buffer. - struct PAVCMetadata - { - uint32_t clearHeaderSize; // 0 means no more meta data - uint32_t decryptionDataSize; - uint32_t encryptionDataSize; - uint32_t clearHeaderIMROffset; // always points to clear header in the IMR - }; - -private: - uint8_t *mMetadata; // pointer to metadata appended at end of buffer -}; - - - -#endif /* VIDEO_DECODER_PAVC_H_ */ diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index c1a3545..2e1180d 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -415,3 +415,21 @@ void VideoDecoderVP8::refreshAltReference(vbp_data_vp8 *data) { } } + +Decode_Status VideoDecoderVP8::checkHardwareCapability(VAProfile profile) { + VAStatus vaStatus; + VAConfigAttrib cfgAttribs[2]; + cfgAttribs[0].type = VAConfigAttribMaxPictureWidth; + cfgAttribs[1].type = VAConfigAttribMaxPictureHeight; + vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileVP8Version0_3, + VAEntrypointVLD, cfgAttribs, 2); + CHECK_VA_STATUS("vaGetConfigAttributes"); + if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) { + ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d", + cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height); + return DECODE_DRIVER_FAIL; + } + + return DECODE_SUCCESS; +} + diff --git a/videodecoder/VideoDecoderVP8.h b/videodecoder/VideoDecoderVP8.h index 61db40d..d9f88fd 100644 --- a/videodecoder/VideoDecoderVP8.h +++ b/videodecoder/VideoDecoderVP8.h @@ -38,6 +38,9 @@ public: virtual void flush(void); virtual Decode_Status decode(VideoDecodeBuffer *buffer); +protected: + virtual Decode_Status checkHardwareCapability(VAProfile profile); + private: Decode_Status decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vp8 *data); Decode_Status decodePicture(vbp_data_vp8 *data, int32_t picIndex); diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index 41b3cda..6b3f4fb 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -522,3 +522,21 @@ Decode_Status VideoDecoderWMV::parseBuffer(uint8_t *data, int32_t size, vbp_data } +Decode_Status VideoDecoderWMV::checkHardwareCapability(VAProfile profile) { + VAStatus vaStatus; + VAConfigAttrib cfgAttribs[2]; + cfgAttribs[0].type = VAConfigAttribMaxPictureWidth; + cfgAttribs[1].type = VAConfigAttribMaxPictureHeight; + vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileVC1Advanced, + VAEntrypointVLD, cfgAttribs, 2); + CHECK_VA_STATUS("vaGetConfigAttributes"); + if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) { + ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d", + cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height); + return DECODE_DRIVER_FAIL; + } + + return DECODE_SUCCESS; +} + + diff --git a/videodecoder/VideoDecoderWMV.h b/videodecoder/VideoDecoderWMV.h index 6a4a6bb..b201bbf 100644 --- a/videodecoder/VideoDecoderWMV.h +++ b/videodecoder/VideoDecoderWMV.h @@ -38,6 +38,9 @@ public: virtual void flush(void); virtual Decode_Status decode(VideoDecodeBuffer *buffer); +protected: + virtual Decode_Status checkHardwareCapability(VAProfile profile); + private: Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_vc1 *data); Decode_Status decodePicture(vbp_data_vc1 *data, int32_t picIndex); -- cgit v1.2.3 From 704520891a4bc88de7a2eb6634a24378845ce609 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Tue, 22 Oct 2013 21:31:34 +0800 Subject: libmix: Exclude the number of actual buffer needed calculation for AVC in BYT BZ: 146862 On MFLD/CTP/MERR platforms, extra buffers are allocated for the encoder and decoder buffer sharing in the WiDi use cases. Two extra buffers are needed for native window buffer cycling. The buffer numbers have been set as a fixed value in LibMIX. But on Baytrail platforms, these buffers are no longer needed. Change-Id: I5465930eca6ca491b62f4da267f6bcd097dc49f0 Signed-off-by: Gu, Wangyi --- videodecoder/Android.mk | 2 +- videodecoder/VideoDecoderAVC.cpp | 12 ++++++++++-- videodecoder/VideoDecoderBase.cpp | 4 ++-- videodecoder/VideoDecoderBase.h | 2 +- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 243b49a..18ccd15 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -30,7 +30,7 @@ ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) endif ifeq ($(TARGET_BOARD_PLATFORM),baytrail) -LOCAL_CFLAGS += -DLOAD_PVR_DRIVER +LOCAL_CFLAGS += -DUSE_GEN_HW endif #LOCAL_LDLIBS += -lpthread diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index d34965d..221db27 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -26,6 +26,11 @@ #include "VideoDecoderTrace.h" #include +// Macros for actual buffer needed calculation +#define WIDI_CONSUMED 6 +#define HDMI_CONSUMED 2 +#define NW_CONSUMED 2 + VideoDecoderAVC::VideoDecoderAVC(const char *mimeType) : VideoDecoderBase(mimeType, VBP_H264), mToggleDPB(0), @@ -721,9 +726,12 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { // outputQueue + nativewindow_owned + (diff > 0 ? diff : 1) + widi_need_max + 1(available buffer) // while outputQueue = DPB < 8? DPB :8 // and diff = Reference + 1 - ouputQueue - mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + 4 /* Owned by native window */ + mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */ + (diff > 0 ? diff : 1) - + 6 /* WiDi maximum needs */ +#ifndef USE_GEN_HW + + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */ + + WIDI_CONSUMED /* WiDi maximum needs */ +#endif + 1; } diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index f62c706..19b8fee 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -745,7 +745,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i } // Display is defined as "unsigned int" -#ifndef LOAD_PVR_DRIVER +#ifndef USE_GEN_HW mDisplay = new Display; *mDisplay = ANDROID_DISPLAY_HANDLE; #else @@ -986,7 +986,7 @@ Decode_Status VideoDecoderBase::terminateVA(void) { } if (mDisplay) { -#ifndef LOAD_PVR_DRIVER +#ifndef USE_GEN_HW delete mDisplay; #endif mDisplay = NULL; diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index e823b22..f2e5dee 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -37,7 +37,7 @@ extern "C" { } #ifndef Display -#ifdef LOAD_PVR_DRIVER +#ifdef USE_GEN_HW typedef char Display; #else typedef unsigned int Display; -- cgit v1.2.3 From 44869929706f6dcef976f6d9aeed77b9fa8d00d4 Mon Sep 17 00:00:00 2001 From: "Yu, Linda" Date: Thu, 24 Oct 2013 12:33:12 +0800 Subject: vpp num should omit in decoder buffer comparison BZ: 145717 vpp num should omit in decoder buffer comparison Change-Id: Id0e08fe0ff837b96fc3fa0c0979286eb5d181de3 Signed-off-by: Yu, Linda --- videodecoder/Android.mk | 4 ++++ videodecoder/VideoDecoderBase.cpp | 4 ++++ videodecoder/VideoDecoderDefs.h | 3 +++ 3 files changed, 11 insertions(+) diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 18ccd15..f7b7c80 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -1,6 +1,10 @@ LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) +ifeq ($(TARGET_HAS_VPP),true) +LOCAL_CFLAGS += -DTARGET_HAS_VPP +endif + LOCAL_SRC_FILES := \ VideoDecoderHost.cpp \ VideoDecoderBase.cpp \ diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 19b8fee..ec429a1 100755 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -712,7 +712,11 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i } if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ +#ifdef TARGET_HAS_VPP + if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber - mConfigBuffer.vppBufferNum) +#else if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber) +#endif return DECODE_FORMAT_CHANGE; numSurface = mConfigBuffer.surfaceNumber; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 8f94cde..d3deb56 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -135,6 +135,9 @@ struct VideoConfigBuffer { VideoExtensionBuffer *ext; void* nativeWindow; uint32_t rotationDegrees; +#ifdef TARGET_HAS_VPP + uint32_t vppBufferNum; +#endif }; struct VideoRenderBuffer { -- cgit v1.2.3 From c2c51ec018ee99e1b92a58e507c104b28c5d2208 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Fri, 25 Oct 2013 09:50:14 +0800 Subject: Fix typo for xproc buffer sharing BZ: 147558 change parameter order and fix typo Change-Id: I30299c1f92d2b93432ce992cec2fd1bca7d917f8 Signed-off-by: Zhao Liang --- test/BSClient.cpp | 2 +- videoencoder/IntelMetadataBuffer.cpp | 4 ++-- videoencoder/IntelMetadataBuffer.h | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/test/BSClient.cpp b/test/BSClient.cpp index b368642..bffb09d 100755 --- a/test/BSClient.cpp +++ b/test/BSClient.cpp @@ -22,7 +22,7 @@ int main(int argc, char* argv[]) uint32_t tokenPC = 0xC0000000; uint32_t token; #ifdef INTEL_VIDEO_XPROC_SHARING - token = IntelMetadataBuffer::MakeSessionFlag(IntelMetadataBuffer::WEBRTC_BASE, true, false); + token = IntelMetadataBuffer::MakeSessionFlag(true, false, IntelMetadataBuffer::WEBRTC_BASE); #endif int memmode = 0; int clearcontext = 1; diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index b7dcfc0..686f223 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -820,7 +820,7 @@ IMB_Result IntelMetadataBuffer::ClearContext(uint32_t sessionflag, bool isProvid return IMB_SUCCESS; } -uint32_t IntelMetadataBuffer::MakeSessionFlag(uint16_t sindex, bool romoteProvider, bool remoteConsumer) +uint32_t IntelMetadataBuffer::MakeSessionFlag(bool romoteProvider, bool remoteConsumer, uint16_t sindex) { uint32_t sessionflag = 0; @@ -828,7 +828,7 @@ uint32_t IntelMetadataBuffer::MakeSessionFlag(uint16_t sindex, bool romoteProvid sessionflag |= REMOTE_PROVIDER; if (remoteConsumer) - sessionflag |= REMOTE_PROVIDER; + sessionflag |= REMOTE_CONSUMER; return sessionflag + sindex; } diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index 00b6649..fb18317 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -135,7 +135,7 @@ public: static const uint16_t WEBRTC_BASE = 0x2000; static const uint16_t VIDEOEDIT_BASE = 0x3000; - static uint32_t MakeSessionFlag(uint16_t sindex, bool romoteProvider, bool remoteConsumer); + static uint32_t MakeSessionFlag(bool romoteProvider, bool remoteConsumer, uint16_t sindex); private: uint32_t mSessionFlag; -- cgit v1.2.3 From 5b9de5aafeac9ed7f2b2b4ab34a67264cc750f2a Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Fri, 25 Oct 2013 13:17:20 +0800 Subject: libmix: A fix to avoid decoding frame twice in case of format change BZ: 147579 In case of format change, VP8 decoder performs decoding one frame twice, then two same decoded buffer are output. This patch is to fix this issue. Change-Id: I97e75ec6e717328f127104175f7dd1150408c020 Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderVP8.cpp | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 2e1180d..24b7315 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -146,11 +146,7 @@ Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { } status = decodeFrame(buffer, data); - CHECK_STATUS("decodeFrame"); - if (mSizeChanged) { - mSizeChanged = false; - return DECODE_FORMAT_CHANGE; - } + return status; } @@ -162,11 +158,16 @@ Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v return DECODE_SUCCESS; } - if (VP8_KEY_FRAME == data->codec_data->frame_type && !mSizeChanged) { - updateFormatInfo(data); - if (mSizeChanged == true) { + if (VP8_KEY_FRAME == data->codec_data->frame_type) { + if (mSizeChanged) { mSizeChanged = false; return DECODE_FORMAT_CHANGE; + } else { + updateFormatInfo(data); + if (mSizeChanged == true) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } } } -- cgit v1.2.3 From 3a6635a0a3ef461a6da7232455502a1aef217e3b Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Wed, 23 Oct 2013 17:40:10 +0800 Subject: libmix: add support for Error Reporting needed by WebRTC BZ: 147912 The error will be reported when the frame is removed from output queue for rendering. Change-Id: Ibcf9dc0dcaf31c62b76a8f51acb13157b71403e1 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderAVC.cpp | 4 +++ videodecoder/VideoDecoderBase.cpp | 53 ++++++++++++++++++++++++++++++++---- videodecoder/VideoDecoderBase.h | 7 +++-- videodecoder/VideoDecoderDefs.h | 28 ++++++++++++++++++- videodecoder/VideoDecoderInterface.h | 2 +- videodecoder/VideoDecoderVP8.cpp | 2 ++ 6 files changed, 87 insertions(+), 9 deletions(-) mode change 100755 => 100644 videodecoder/VideoDecoderBase.cpp diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 221db27..e88b150 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -401,6 +401,10 @@ Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *slicePar if (!(ref->flags & VA_PICTURE_H264_INVALID)) { ref->picture_id = findSurface(ref); if (ref->picture_id == VA_INVALID_SURFACE) { + // Error DecodeRefMissing is counted once even there're multiple + mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1; + mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing; + if (mLastReference) { WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref)); ref->picture_id = mLastReference->renderBuffer.surface; diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp old mode 100755 new mode 100644 index ec429a1..5ec182f --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -207,7 +207,7 @@ const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) { return &mVideoFormatInfo; } -const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { +const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBuffer *outErrBuf) { VAStatus vaStatus; if (mVAStarted == false) { return NULL; @@ -232,11 +232,16 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { mOutputTail = NULL; } vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); - if (useGraphicBuffer) - vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); + if (useGraphicBuffer) { + if (vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface) == VA_STATUS_ERROR_DECODING_ERROR) { + fillDecodingErrors(&(outputByPos->renderBuffer)); + } + } if (draining && mOutputTail == NULL) { outputByPos->renderBuffer.flag |= IS_EOS; } + drainDecodingErrors(outErrBuf, &(outputByPos->renderBuffer)); + return &(outputByPos->renderBuffer); } @@ -284,12 +289,18 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) { //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp); - if (useGraphicBuffer) - vaSyncSurface(mVADisplay, output->renderBuffer.surface); + if (useGraphicBuffer) { + if (vaSyncSurface(mVADisplay, output->renderBuffer.surface) == VA_STATUS_ERROR_DECODING_ERROR) { + fillDecodingErrors(&(output->renderBuffer)); + } + } if (draining && mOutputTail == NULL) { output->renderBuffer.flag |= IS_EOS; } + + drainDecodingErrors(outErrBuf, &(output->renderBuffer)); + return &(output->renderBuffer); } @@ -574,6 +585,8 @@ Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) { mAcquiredBuffer->renderBuffer.flag = 0; mAcquiredBuffer->renderBuffer.renderDone = false; mAcquiredBuffer->asReferernce = false; + mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 0; + mAcquiredBuffer->renderBuffer.errBuf.timeStamp = INVALID_PTS; return DECODE_SUCCESS; } @@ -912,6 +925,10 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i return DECODE_MEMORY_FAIL; } + if (mConfigBuffer.flag & WANT_ERROR_REPORT) { + mErrReportEnabled = true; + } + initSurfaceBuffer(true); if ((int32_t)profile == VAProfileSoftwareDecoding) { @@ -998,6 +1015,7 @@ Decode_Status VideoDecoderBase::terminateVA(void) { mVAStarted = false; mInitialized = false; + mErrReportEnabled = false; mSignalBufferSize = 0; for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { mSignalBufferPre[i] = NULL; @@ -1334,3 +1352,28 @@ Decode_Status VideoDecoderBase::checkHardwareCapability(VAProfile profile) { return DECODE_SUCCESS; } +void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *CurrentSurface) { + if (mErrReportEnabled && outErrBuf && CurrentSurface) { + memcpy(outErrBuf, &(CurrentSurface->errBuf), sizeof(VideoErrorBuffer)); + + CurrentSurface->errBuf.errorNumber = 0; + CurrentSurface->errBuf.timeStamp = INVALID_PTS; + } +} + +void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *CurrentSurface) { + VAStatus ret; + + if (mErrReportEnabled) { + CurrentSurface->errBuf.timeStamp = CurrentSurface->timeStamp; + // TODO: is 10 a suitable number? + VASurfaceDecodeMBErrors err_drv_output[MAX_ERR_NUM - 1]; + ret = vaQuerySurfaceError(mVADisplay, CurrentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output); + for (int i = CurrentSurface->errBuf.errorNumber; i < MAX_ERR_NUM - 1; i++) { + if (err_drv_output[i].status != -1) { + CurrentSurface->errBuf.errorNumber++; + CurrentSurface->errBuf.errorArray[i].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; + } + } + } +} diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index f2e5dee..6527be6 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -60,7 +60,7 @@ public: virtual void stop(void); //virtual Decode_Status decode(VideoDecodeBuffer *buffer); virtual void flush(void); - virtual const VideoRenderBuffer* getOutput(bool draining = false); + virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL); virtual Decode_Status signalRenderDone(void * graphichandler); virtual const VideoFormatInfo* getFormatInfo(void); virtual bool checkBufferAvail(); @@ -98,11 +98,12 @@ protected: private: Decode_Status mapSurface(void); void initSurfaceBuffer(bool reset); + void drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *CurrentSurface); + void fillDecodingErrors(VideoRenderBuffer *CurrentSurface); bool mInitialized; pthread_mutex_t mLock; - protected: VideoFormatInfo mVideoFormatInfo; Display *mDisplay; @@ -125,6 +126,8 @@ protected: int32_t mOutputWindowSize; // indicate limit of number of outstanding frames for output + bool mErrReportEnabled; + enum { // TODO: move this to vbp_loader.h VBP_INVALID = 0xFF, diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index d3deb56..7a903c4 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -105,8 +105,20 @@ typedef enum { // indicate it's the last output frame of the sequence IS_EOS = 0x10000, + // indicate whether error reporting is needed + WANT_ERROR_REPORT = 0x20000, + } VIDEO_BUFFER_FLAG; +typedef enum +{ + DecodeSliceMissing = 0, + DecodeMBError = 1, + DecodeRefMissing = 2, +} VideoDecodeErrorType; + +#define MAX_ERR_NUM 10 + struct VideoDecodeBuffer { uint8_t *data; int32_t size; @@ -140,6 +152,19 @@ struct VideoConfigBuffer { #endif }; +struct VideoErrorInfo { + VideoDecodeErrorType type; + union { + typedef struct {uint32_t start_mb; uint32_t end_mb;} mb_pos; + } error_data; +}; + +struct VideoErrorBuffer { + uint32_t errorNumber; // Error number should be no more than MAX_ERR_NUM + int64_t timeStamp; // presentation time stamp + VideoErrorInfo errorArray[MAX_ERR_NUM]; +}; + struct VideoRenderBuffer { VASurfaceID surface; VADisplay display; @@ -152,8 +177,9 @@ struct VideoRenderBuffer { uint32_t flag; mutable volatile bool driverRenderDone; VideoFrameRawData *rawData; -}; + VideoErrorBuffer errBuf; +}; struct VideoSurfaceBuffer { VideoRenderBuffer renderBuffer; diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h index 4373ad5..79399c9 100644 --- a/videodecoder/VideoDecoderInterface.h +++ b/videodecoder/VideoDecoderInterface.h @@ -36,7 +36,7 @@ public: virtual void stop(void) = 0; virtual void flush() = 0; virtual Decode_Status decode(VideoDecodeBuffer *buffer) = 0; - virtual const VideoRenderBuffer* getOutput(bool draining = false) = 0; + virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL) = 0; virtual const VideoFormatInfo* getFormatInfo(void) = 0; virtual Decode_Status signalRenderDone(void * graphichandler) = 0; virtual bool checkBufferAvail() = 0; diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 24b7315..145c7b6 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -311,6 +311,8 @@ Decode_Status VideoDecoderVP8::setReference(VAPictureParameterBufferVP8 *picPara if (mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer == NULL || mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer == NULL || mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer == NULL) { + mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1; + mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing; return DECODE_NO_REFERENCE; } //mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer = mLastReference; -- cgit v1.2.3 From ec7854dd9773c00e24a521152d96f72ca270eae8 Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Fri, 25 Oct 2013 08:26:49 +0800 Subject: [PORT FROM R42B] use VPG defined YCbCr422H constant BZ: 140087 Use VPG defined YCbCr422H constant instead of internal workaround value Change-Id: I76923da27a6b1aeeeae099ec1b6edab9db07b91a Orig-Change-Id: I779c3293456631f26ca0a8087586ea2015a0a710 Signed-off-by: Cheng Yao --- imagedecoder/JPEGBlitter.h | 2 +- imagedecoder/JPEGCommon_Gen.h | 4 ---- imagedecoder/JPEGDecoder_gen.cpp | 29 ----------------------------- 3 files changed, 1 insertion(+), 34 deletions(-) diff --git a/imagedecoder/JPEGBlitter.h b/imagedecoder/JPEGBlitter.h index 9514b25..b9fcc08 100644 --- a/imagedecoder/JPEGBlitter.h +++ b/imagedecoder/JPEGBlitter.h @@ -29,7 +29,7 @@ #ifndef JPEG_BLITTER_H #define JPEG_BLITTER_H -#include "../videovpp/VideoVPPBase.h" +#include #include "JPEGCommon.h" #include diff --git a/imagedecoder/JPEGCommon_Gen.h b/imagedecoder/JPEGCommon_Gen.h index 2cc90ae..ce3bf08 100644 --- a/imagedecoder/JPEGCommon_Gen.h +++ b/imagedecoder/JPEGCommon_Gen.h @@ -32,10 +32,6 @@ #include #include #include "JPEGCommon.h" -// temp workaround -#define HAL_PIXEL_FORMAT_YCbCr_422_H_INTEL HAL_PIXEL_FORMAT_YCrCb_422_H_INTEL // 422H (YU16) -#define HAL_PIXEL_FORMAT_IMC3 0x103 // IMC3 -#define HAL_PIXEL_FORMAT_444P 0x104 // 444P #endif diff --git a/imagedecoder/JPEGDecoder_gen.cpp b/imagedecoder/JPEGDecoder_gen.cpp index 9b5f242..8fa25a8 100644 --- a/imagedecoder/JPEGDecoder_gen.cpp +++ b/imagedecoder/JPEGDecoder_gen.cpp @@ -50,10 +50,6 @@ int fourcc2PixelFormat(uint32_t fourcc) return HAL_PIXEL_FORMAT_NV12_TILED_INTEL; case VA_FOURCC_RGBA: return HAL_PIXEL_FORMAT_RGBA_8888; - case VA_FOURCC_IMC3: - return HAL_PIXEL_FORMAT_IMC3; - case VA_FOURCC_444P: - return HAL_PIXEL_FORMAT_444P; case VA_FOURCC_422V: case VA_FOURCC_411P: default: @@ -73,10 +69,6 @@ uint32_t pixelFormat2Fourcc(int pixel_format) return VA_FOURCC_NV12; case HAL_PIXEL_FORMAT_RGBA_8888: return VA_FOURCC_RGBA; - case HAL_PIXEL_FORMAT_444P: - return VA_FOURCC_444P; - case HAL_PIXEL_FORMAT_IMC3: - return VA_FOURCC_IMC3; default: return 0; } @@ -149,27 +141,6 @@ JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, int pi uint32_t fourcc = pixelFormat2Fourcc(pixel_format); VTRACE("enter %s, pixel_format 0x%x, fourcc %s", __FUNCTION__, pixel_format, fourcc2str(NULL, fourcc)); - if ((fourcc != VA_FOURCC_422H) || - (fourcc != VA_FOURCC_YUY2) || - (fourcc != VA_FOURCC_RGBA)){ - VASurfaceAttrib attrib; - attrib.type = VASurfaceAttribPixelFormat; - attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; - attrib.value.type = VAGenericValueTypeInteger; - attrib.value.value.i = fourcc; - VAStatus va_status = vaCreateSurfaces(mDisplay, - fourcc2VaFormat(fourcc), - width, - height, - surf_id, - 1, - &attrib, - 1); - VTRACE("%s createSurface for %s", __FUNCTION__, fourcc2str(NULL, fourcc)); - if (va_status != VA_STATUS_SUCCESS) - return JD_RESOURCE_FAILURE; - return JD_SUCCESS; - } int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); if (err) { -- cgit v1.2.3 From 803b7285d853112ac4b71de3d406f477c032279f Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 24 Oct 2013 03:30:43 +0800 Subject: [PORT FROM R42B-STABLE] To fix the corruption problem of widevine playback on BYT BZ: 146611 When the widevine clips have multiple slices in one frame, the corruptions will be observed. This is caused by the mismatched parameters between the LibMIX and i965 driver. In the fix, the PAVP encrypted buffer is passed as the VASliceDataBuffer just one time for each frame. The slice offset and slice size are passed in the VASliceParameterBuffer for each frame. Change-Id: I1f4fadae60e76ca91d46597cc5738f65d7e4c7dd Signed-off-by: wfeng6 --- .../securevideo/baytrail/VideoDecoderAVCSecure.cpp | 39 +++++++++------------- .../securevideo/baytrail/VideoDecoderAVCSecure.h | 1 + 2 files changed, 17 insertions(+), 23 deletions(-) diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp index 675b37a..06fb677 100644 --- a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp @@ -92,6 +92,7 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { uint8_t naluType; frame_info_t* pFrameInfo; + mFrameSize = 0; if (buffer->flag & IS_SECURE_DATA) { VTRACE("Decoding protected video ..."); mIsEncryptData = 1; @@ -107,6 +108,9 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { } pFrameInfo = (frame_info_t*) buffer->data; + mFrameSize = pFrameInfo->length; + VTRACE("mFrameSize = %d", mFrameSize); + memcpy(&mEncParam, pFrameInfo->pavp, sizeof(pavp_info_t)); for (int32_t i = 0; i < pFrameInfo->num_nalus; i++) { naluType = pFrameInfo->nalus[i].type & NALU_TYPE_MASK; @@ -277,7 +281,19 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p bufferIDCount++; } + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mFrameSize, //size + 1, //num_elements + sliceData->buffer_addr + sliceData->slice_offset, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + } + vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -290,29 +306,6 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); bufferIDCount++; - if (mIsEncryptData) { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - sliceData->buffer_addr + sliceData->slice_offset, - &bufferIDs[bufferIDCount]); - } else { - // This is for clear video playback - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - sliceData->buffer_addr + sliceData->slice_offset, - &bufferIDs[bufferIDCount]); - } - CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - bufferIDCount++; - vaStatus = vaRenderPicture( mVADisplay, mVAContext, diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h index 2b2e489..18289eb 100644 --- a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h @@ -46,6 +46,7 @@ private: uint8_t *mNaluHeaderBuffer; uint8_t *mSliceHeaderBuffer; uint32_t mIsEncryptData; + uint32_t mFrameSize; }; #endif /* VIDEO_DECODER_AVC_SECURE_H_ */ -- cgit v1.2.3 From 28d3258abce41bfe08ed3948bc722af6b82b443c Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 29 Oct 2013 13:28:24 +0800 Subject: mix_parser: check idx in h264_dpb_get_smallest_poc to fix klocwork issue BZ: 148198 check idx in h264_dpb_get_smallest_poc to fix klocwork issue Change-Id: I96a6ed242f17d82281707822ee37cc9bda836bcc Signed-off-by: ywan171 --- mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c index 4415d54..c459cf7 100755 --- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c +++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c @@ -3060,6 +3060,9 @@ void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, i for (idx = 0; idx < p_dpb->used_size; idx++) { + if (idx >= (NUM_DPB_FRAME_STORES + 2)) { + break; + } h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); if (viddec_h264_get_is_output(p_dpb->active_fs) == 0) -- cgit v1.2.3 From b6e058961dd963f35d7a949fe23762f0fe8e8dc2 Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Mon, 19 Aug 2013 15:53:59 -0700 Subject: [PORT FROM R42B-STABLE] libmix: Avoid returning unnecessary "format change" error code BZ: 127926 If video format is not really changed, don't return "DECODE_FORMAT_CHANGE" error code to caller. Change-Id: Ib88f5d1f66a704d472bec54ad9e78143cb0fbea5 Signed-off-by: Suneel Kandru --- mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c | 11 ++++++++--- .../viddec_fw/fw/parser/vbp_h264secure_parser.c | 11 ++++++++--- videodecoder/VideoDecoderAVC.cpp | 7 +++++-- videodecoder/VideoDecoderBase.cpp | 22 ++++++++-------------- 4 files changed, 29 insertions(+), 22 deletions(-) diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c index 3f6400d..7ba15a5 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c @@ -983,11 +983,16 @@ static void vbp_set_codec_data_h264( /* udpate sps and pps status */ - query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; - query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + query_data->new_sps = 0; + query_data->new_pps = 0; + if (query_data->has_sps && query_data->has_pps) + { + query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + } query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; - if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) + if (frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) { query_data->new_sps = 1; query_data->new_pps = 1; diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c index 498cbc4..7b09a9b 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c +++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264secure_parser.c @@ -982,11 +982,16 @@ static void vbp_set_codec_data_h264secure( /* udpate sps and pps status */ - query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; - query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + query_data->new_sps = 0; + query_data->new_pps = 0; + if (query_data->has_sps && query_data->has_pps) + { + query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + } query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; - if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) + if (frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) { query_data->new_sps = 1; query_data->new_pps = 1; diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index e88b150..5b8efef 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -676,10 +676,13 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { if ((mVideoFormatInfo.width != width || mVideoFormatInfo.height != height) && width && height) { + if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width || + VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) { + mSizeChanged = true; + ITRACE("Video size is changed."); + } mVideoFormatInfo.width = width; mVideoFormatInfo.height = height; - mSizeChanged = true; - ITRACE("Video size is changed."); } if (data->new_sps) { diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 5ec182f..b6874da 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -769,8 +769,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) { ITRACE("Using GEN driver"); mDisplay = "libva_driver_name=i965"; - } - else { + } else { ITRACE("Using PVR driver"); mDisplay = "libva_driver_name=pvr"; } @@ -886,8 +885,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i mNumSurfaces, NULL, 0); - mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; - mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height; + mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; + mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height; } CHECK_VA_STATUS("vaCreateSurfaces"); @@ -1044,7 +1043,7 @@ Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool return DECODE_SUCCESS; } -Decode_Status VideoDecoderBase::mapSurface(void){ +Decode_Status VideoDecoderBase::mapSurface(void) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAImage image; uint8_t *userPtr; @@ -1208,7 +1207,7 @@ void VideoDecoderBase::initSurfaceBuffer(bool reset) { mSurfaceBuffers[i].renderBuffer.rawData = NULL; mSurfaceBuffers[i].mappedData = NULL; } - if (useGraphicBuffer){ + if (useGraphicBuffer) { if (reset) { mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i]; mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false @@ -1220,11 +1219,9 @@ void VideoDecoderBase::initSurfaceBuffer(bool reset) { break; } } - } - else{ + } else { mSurfaceBuffers[i].renderBuffer.renderDone = false; } - } else { mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL; mSurfaceBuffers[i].renderBuffer.renderDone = true; @@ -1299,8 +1296,7 @@ Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) { ITRACE("Parser Type = %d", (int32_t)type); mParserType = type; return DECODE_SUCCESS; - } - else { + } else { ETRACE("Invalid parser type = %d", (int32_t)type); return DECODE_NO_PARSER; } @@ -1322,9 +1318,7 @@ Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void return DECODE_SUCCESS; } -Decode_Status VideoDecoderBase::getCodecSpecificConfigs( - VAProfile profile, VAConfigID *config) -{ +Decode_Status VideoDecoderBase::getCodecSpecificConfigs(VAProfile profile, VAConfigID *config) { VAStatus vaStatus; VAConfigAttrib attrib; attrib.type = VAConfigAttribRTFormat; -- cgit v1.2.3 From fb0aa18ee8e67ecbfe3e223218493da1899ae47b Mon Sep 17 00:00:00 2001 From: Li Zeng Date: Sat, 2 Nov 2013 05:28:55 +0800 Subject: libmix: revist error reporting code BZ: 147912 revist error reporting code Change-Id: Ibdbab27f31d9cbc19a3db8e75a7f2b9d19e05504 Signed-off-by: Li Zeng --- videodecoder/VideoDecoderBase.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index b6874da..cf20c24 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -233,9 +233,8 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBu } vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); if (useGraphicBuffer) { - if (vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface) == VA_STATUS_ERROR_DECODING_ERROR) { - fillDecodingErrors(&(outputByPos->renderBuffer)); - } + vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); + fillDecodingErrors(&(outputByPos->renderBuffer)); } if (draining && mOutputTail == NULL) { outputByPos->renderBuffer.flag |= IS_EOS; @@ -290,9 +289,8 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBu vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp); if (useGraphicBuffer) { - if (vaSyncSurface(mVADisplay, output->renderBuffer.surface) == VA_STATUS_ERROR_DECODING_ERROR) { - fillDecodingErrors(&(output->renderBuffer)); - } + vaSyncSurface(mVADisplay, output->renderBuffer.surface); + fillDecodingErrors(&(output->renderBuffer)); } if (draining && mOutputTail == NULL) { @@ -1361,8 +1359,10 @@ void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *CurrentSurface) { if (mErrReportEnabled) { CurrentSurface->errBuf.timeStamp = CurrentSurface->timeStamp; // TODO: is 10 a suitable number? - VASurfaceDecodeMBErrors err_drv_output[MAX_ERR_NUM - 1]; + VASurfaceDecodeMBErrors *err_drv_output; ret = vaQuerySurfaceError(mVADisplay, CurrentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output); + if (ret) + return; for (int i = CurrentSurface->errBuf.errorNumber; i < MAX_ERR_NUM - 1; i++) { if (err_drv_output[i].status != -1) { CurrentSurface->errBuf.errorNumber++; -- cgit v1.2.3 From 02f482c626bb64627cd83d106e23e58cf4e2b585 Mon Sep 17 00:00:00 2001 From: Weian Chen Date: Wed, 30 Oct 2013 16:50:58 -0700 Subject: libmix: set dynamic rotation degree to video driver BZ: 148092 When the rotation degree changes, set the new degree to video driver through API vaSetDisplayAttributes at runtime Change-Id: I10dc3480e315a0fc5e1ec4492108d784b9ba9920 Signed-off-by: Weian Chen --- videodecoder/VideoDecoderAVC.cpp | 2 ++ videodecoder/VideoDecoderBase.cpp | 23 +++++++++++++++++++++++ videodecoder/VideoDecoderBase.h | 2 ++ videodecoder/VideoDecoderDefs.h | 1 + videodecoder/VideoDecoderVP8.cpp | 2 ++ 5 files changed, 30 insertions(+) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 5b8efef..6396440 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -117,6 +117,8 @@ Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) { } } + VideoDecoderBase::setRotationDegrees(buffer); + status = decodeFrame(buffer, data); if (status == DECODE_MULTIPLE_FRAME) { buffer->ext = &mExtensionBuffer; diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index cf20c24..eaacfff 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -55,6 +55,7 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mManageReference(true), mOutputMethod(OUTPUT_BY_PCT), mOutputWindowSize(OUTPUT_WINDOW_SIZE), + mRotationDegrees(0), mNumSurfaces(0), mSurfaceBuffers(NULL), mOutputHead(NULL), @@ -1371,3 +1372,25 @@ void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *CurrentSurface) { } } } + +void VideoDecoderBase::setRotationDegrees(VideoDecodeBuffer *buffer) { + + if (mRotationDegrees != buffer->rotationDegrees) { + ITRACE("set new mRotationDegrees = %d", mRotationDegrees); + VADisplayAttribute rotate; + rotate.type = VADisplayAttribRotation; + rotate.value = VA_ROTATION_NONE; + if (buffer->rotationDegrees == 0) + rotate.value = VA_ROTATION_NONE; + else if (buffer->rotationDegrees == 90) + rotate.value = VA_ROTATION_90; + else if (buffer->rotationDegrees == 180) + rotate.value = VA_ROTATION_180; + else if (buffer->rotationDegrees == 270) + rotate.value = VA_ROTATION_270; + + vaSetDisplayAttributes(mVADisplay, &rotate, 1); + mRotationDegrees = buffer->rotationDegrees; + } +} + diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 6527be6..dcc1548 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -125,6 +125,7 @@ protected: bool mShowFrame; // indicate whether the decoded frame is for display int32_t mOutputWindowSize; // indicate limit of number of outstanding frames for output + int32_t mRotationDegrees; bool mErrReportEnabled; @@ -170,6 +171,7 @@ protected: void setOutputWindowSize(int32_t size) {mOutputWindowSize = (size < OUTPUT_WINDOW_SIZE) ? size : OUTPUT_WINDOW_SIZE;} void querySurfaceRenderStatus(VideoSurfaceBuffer* surface); void enableLowDelayMode(bool enable) {mLowDelay = enable;} + void setRotationDegrees(VideoDecodeBuffer *buffer); }; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 7a903c4..ea1c9f9 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -124,6 +124,7 @@ struct VideoDecodeBuffer { int32_t size; int64_t timeStamp; uint32_t flag; + uint32_t rotationDegrees; VideoExtensionBuffer *ext; }; diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 145c7b6..34fd866 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -145,6 +145,8 @@ Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("startVA"); } + VideoDecoderBase::setRotationDegrees(buffer); + status = decodeFrame(buffer, data); return status; -- cgit v1.2.3 From 8090e0029bb33d79fee837cef9449083e8b8dce8 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Mon, 4 Nov 2013 16:49:46 +0800 Subject: libmix: report missing reference frame in AVC decoder BZ: 147912 1)record the reference frame missing in updateDPB. 2)change the variable name to keep coding style consistency. Change-Id: Ib305e3cfe03a2f060e26730c8aab422d2e7afbae Signed-off-by: Dan Liang Signed-off-by: pingshix --- videodecoder/VideoDecoderAVC.cpp | 3 +++ videodecoder/VideoDecoderBase.cpp | 22 +++++++++++----------- videodecoder/VideoDecoderBase.h | 4 ++-- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 6396440..67ce66f 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -445,6 +445,9 @@ Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) dpb->surfaceBuffer = findRefSurfaceBuffer(ref); if (dpb->surfaceBuffer == NULL) { ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic))); + // Error DecodeRefMissing is counted once even there're multiple + mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1; + mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing; if (dpb->poc == getPOC(&(picParam->CurrPic))) { WTRACE("updateDPB: Using the current picture for missing reference."); dpb->surfaceBuffer = mAcquiredBuffer; diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index eaacfff..bb3bef5 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1345,29 +1345,29 @@ Decode_Status VideoDecoderBase::checkHardwareCapability(VAProfile profile) { return DECODE_SUCCESS; } -void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *CurrentSurface) { - if (mErrReportEnabled && outErrBuf && CurrentSurface) { - memcpy(outErrBuf, &(CurrentSurface->errBuf), sizeof(VideoErrorBuffer)); +void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface) { + if (mErrReportEnabled && outErrBuf && currentSurface) { + memcpy(outErrBuf, &(currentSurface->errBuf), sizeof(VideoErrorBuffer)); - CurrentSurface->errBuf.errorNumber = 0; - CurrentSurface->errBuf.timeStamp = INVALID_PTS; + currentSurface->errBuf.errorNumber = 0; + currentSurface->errBuf.timeStamp = INVALID_PTS; } } -void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *CurrentSurface) { +void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { VAStatus ret; if (mErrReportEnabled) { - CurrentSurface->errBuf.timeStamp = CurrentSurface->timeStamp; + currentSurface->errBuf.timeStamp = currentSurface->timeStamp; // TODO: is 10 a suitable number? VASurfaceDecodeMBErrors *err_drv_output; - ret = vaQuerySurfaceError(mVADisplay, CurrentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output); + ret = vaQuerySurfaceError(mVADisplay, currentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output); if (ret) return; - for (int i = CurrentSurface->errBuf.errorNumber; i < MAX_ERR_NUM - 1; i++) { + for (int i = currentSurface->errBuf.errorNumber; i < MAX_ERR_NUM - 1; i++) { if (err_drv_output[i].status != -1) { - CurrentSurface->errBuf.errorNumber++; - CurrentSurface->errBuf.errorArray[i].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; + currentSurface->errBuf.errorNumber++; + currentSurface->errBuf.errorArray[i].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; } } } diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index dcc1548..6bd0d1b 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -98,8 +98,8 @@ protected: private: Decode_Status mapSurface(void); void initSurfaceBuffer(bool reset); - void drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *CurrentSurface); - void fillDecodingErrors(VideoRenderBuffer *CurrentSurface); + void drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface); + void fillDecodingErrors(VideoRenderBuffer *currentSurface); bool mInitialized; pthread_mutex_t mLock; -- cgit v1.2.3 From ed17385741e23e4cc49023e4f83de116bcddac73 Mon Sep 17 00:00:00 2001 From: Andy Qiu Date: Tue, 5 Nov 2013 16:50:27 -0800 Subject: cleanup rotation degree settings. BZ: 149979 mRotationDegress is not initialized. setRotationDegress function is cleaned up so it can be used during VA setup. Change-Id: I0f8a62f4694f6048de131d4a170d5b1cf70878e7 Signed-off-by: Andy Qiu Signed-off-by: pingshix --- videodecoder/VideoDecoderAVC.cpp | 2 +- videodecoder/VideoDecoderBase.cpp | 52 +++++++++------------- videodecoder/VideoDecoderBase.h | 2 +- videodecoder/VideoDecoderVP8.cpp | 2 +- .../merrifield/VideoDecoderAVCSecure.cpp | 2 +- 5 files changed, 26 insertions(+), 34 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 67ce66f..45e7e9d 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -117,7 +117,7 @@ Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) { } } - VideoDecoderBase::setRotationDegrees(buffer); + VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees); status = decodeFrame(buffer, data); if (status == DECODE_MULTIPLE_FRAME) { diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index bb3bef5..c4d961a 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -935,19 +935,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i CHECK_STATUS("mapSurface") } - VADisplayAttribute rotate; - rotate.type = VADisplayAttribRotation; - rotate.value = VA_ROTATION_NONE; - if (mConfigBuffer.rotationDegrees == 0) - rotate.value = VA_ROTATION_NONE; - else if (mConfigBuffer.rotationDegrees == 90) - rotate.value = VA_ROTATION_90; - else if (mConfigBuffer.rotationDegrees == 180) - rotate.value = VA_ROTATION_180; - else if (mConfigBuffer.rotationDegrees == 270) - rotate.value = VA_ROTATION_270; - - vaStatus = vaSetDisplayAttributes(mVADisplay, &rotate, 1); + setRotationDegrees(mConfigBuffer.rotationDegrees); mVAStarted = true; return DECODE_SUCCESS; @@ -1373,24 +1361,28 @@ void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { } } -void VideoDecoderBase::setRotationDegrees(VideoDecodeBuffer *buffer) { +void VideoDecoderBase::setRotationDegrees(int32_t rotationDegrees) { + if (mRotationDegrees == rotationDegrees) { + return; + } - if (mRotationDegrees != buffer->rotationDegrees) { - ITRACE("set new mRotationDegrees = %d", mRotationDegrees); - VADisplayAttribute rotate; - rotate.type = VADisplayAttribRotation; + ITRACE("set new rotation degree: %d", rotationDegrees); + VADisplayAttribute rotate; + rotate.type = VADisplayAttribRotation; + rotate.value = VA_ROTATION_NONE; + if (rotationDegrees == 0) rotate.value = VA_ROTATION_NONE; - if (buffer->rotationDegrees == 0) - rotate.value = VA_ROTATION_NONE; - else if (buffer->rotationDegrees == 90) - rotate.value = VA_ROTATION_90; - else if (buffer->rotationDegrees == 180) - rotate.value = VA_ROTATION_180; - else if (buffer->rotationDegrees == 270) - rotate.value = VA_ROTATION_270; - - vaSetDisplayAttributes(mVADisplay, &rotate, 1); - mRotationDegrees = buffer->rotationDegrees; - } + else if (rotationDegrees == 90) + rotate.value = VA_ROTATION_90; + else if (rotationDegrees == 180) + rotate.value = VA_ROTATION_180; + else if (rotationDegrees == 270) + rotate.value = VA_ROTATION_270; + + VAStatus ret = vaSetDisplayAttributes(mVADisplay, &rotate, 1); + if (ret) { + ETRACE("Failed to set rotation degree."); + } + mRotationDegrees = rotationDegrees; } diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 6bd0d1b..b111bb3 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -171,7 +171,7 @@ protected: void setOutputWindowSize(int32_t size) {mOutputWindowSize = (size < OUTPUT_WINDOW_SIZE) ? size : OUTPUT_WINDOW_SIZE;} void querySurfaceRenderStatus(VideoSurfaceBuffer* surface); void enableLowDelayMode(bool enable) {mLowDelay = enable;} - void setRotationDegrees(VideoDecodeBuffer *buffer); + void setRotationDegrees(int32_t rotationDegrees); }; diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 34fd866..9bfd15b 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -145,7 +145,7 @@ Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("startVA"); } - VideoDecoderBase::setRotationDegrees(buffer); + VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees); status = decodeFrame(buffer, data); diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp index cd8b2ca..ab7bc7e 100644 --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp @@ -137,7 +137,7 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { // |...encrypted video bitstream (16 bytes aligned)...| 4 bytes of header size |...NALU headers..| pByteStream = buffer->data + buffer->size + 4; sizeLeft = *(int32_t *)(buffer->data + buffer->size); - ITRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size); + VTRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size); mInputBuffer = buffer->data; } else { status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); -- cgit v1.2.3 From da5f5dac4e84d17c9bc7c9238490a82488373ab2 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Tue, 5 Nov 2013 16:21:04 +0800 Subject: libmix: refine error report BZ: 147912 The previous implementation will count one less MB error. Change-Id: I3a7b5a19282928dd0f70ae999bf13433d9ae79f7 Signed-off-by: Dan Liang Signed-off-by: pingshix --- videodecoder/VideoDecoderBase.cpp | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index c4d961a..9fd96f9 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1348,14 +1348,18 @@ void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { if (mErrReportEnabled) { currentSurface->errBuf.timeStamp = currentSurface->timeStamp; // TODO: is 10 a suitable number? - VASurfaceDecodeMBErrors *err_drv_output; + VASurfaceDecodeMBErrors *err_drv_output = NULL; ret = vaQuerySurfaceError(mVADisplay, currentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output); - if (ret) + if (ret || !err_drv_output) { + WTRACE("vaQuerySurfaceError failed."); return; - for (int i = currentSurface->errBuf.errorNumber; i < MAX_ERR_NUM - 1; i++) { + } + + int offset = 0x1 & currentSurface->errBuf.errorNumber;// offset is either 0 or 1 + for (int i = 0; i < MAX_ERR_NUM - offset; i++) { if (err_drv_output[i].status != -1) { currentSurface->errBuf.errorNumber++; - currentSurface->errBuf.errorArray[i].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; + currentSurface->errBuf.errorArray[i + offset].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; } } } -- cgit v1.2.3 From eaa20ed5ea4f42aac369cfc39c23d45fd135fd9a Mon Sep 17 00:00:00 2001 From: "SUN,Jing" Date: Wed, 6 Nov 2013 14:37:59 +0800 Subject: Added a JPEG HW encoder's wrapper into libmix. BZ: 150063 Issue: facilitating LibVA JPEG HW capability's users. Solution: added that capability's wrapper into libmix. Change-Id: I6218db6c1459278a0d4b89ef5bafe42288a11de5 Signed-off-by: SUN,Jing --- Android.mk | 3 + imageencoder/Android.mk | 44 ++++ imageencoder/ImageEncoder.cpp | 594 ++++++++++++++++++++++++++++++++++++++++++ imageencoder/ImageEncoder.h | 90 +++++++ imageencoder/test/main.cpp | 559 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 1290 insertions(+) create mode 100644 imageencoder/Android.mk create mode 100644 imageencoder/ImageEncoder.cpp create mode 100644 imageencoder/ImageEncoder.h create mode 100644 imageencoder/test/main.cpp diff --git a/Android.mk b/Android.mk index 1b7383d..2639e2b 100644 --- a/Android.mk +++ b/Android.mk @@ -9,6 +9,9 @@ include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/imagedecoder/Android.mk +ifneq ($(TARGET_BOARD_PLATFORM),baytrail) +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/imageencoder/Android.mk +endif ifeq ($(ENABLE_IMG_GRAPHICS),) include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videovpp/Android.mk endif diff --git a/imageencoder/Android.mk b/imageencoder/Android.mk new file mode 100644 index 0000000..ff5b01b --- /dev/null +++ b/imageencoder/Android.mk @@ -0,0 +1,44 @@ +############################################### +# libmix_imageencoder # +############################################### +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= ImageEncoder.cpp + +LOCAL_C_INCLUDES += \ + $(TARGET_OUT_HEADERS)/libva + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + liblog \ + libva \ + libva-android \ + libva-tpi + +LOCAL_COPY_HEADERS_TO := libmix_imageencoder + +LOCAL_COPY_HEADERS := ImageEncoder.h + +LOCAL_MODULE := libmix_imageencoder +LOCAL_MODULE_TAGS := optional +include $(BUILD_SHARED_LIBRARY) + + +############################################### +# libmix_imageencoder's Test Application # +############################################### +include $(CLEAR_VARS) +LOCAL_SRC_FILES:= test/main.cpp + +LOCAL_C_INCLUDES += \ + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmix_imageencoder + +LOCAL_SHARED_LIBRARIES := \ + libmix_imageencoder + +LOCAL_MODULE := libmix_imageencoder_tester +LOCAL_MODULE_TAGS := optional +include $(BUILD_EXECUTABLE) diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp new file mode 100644 index 0000000..ae9232b --- /dev/null +++ b/imageencoder/ImageEncoder.cpp @@ -0,0 +1,594 @@ +/*#define LOG_NDEBUG 0*/ +#define LOG_TAG "IntelImageEncoder" + +#include +#include "ImageEncoder.h" + +IntelImageEncoder::IntelImageEncoder(void) +{ + /* Initialize variables */ + encoder_status = LIBVA_UNINITIALIZED; + quality = INTEL_IMAGE_ENCODER_DEFAULT_QUALITY; + + va_dpy = NULL; + memset((void *)&va_configattrib, 0, sizeof(va_configattrib)); + + images_count = 0; + memset((void *)va_surfaceid, 0x0, sizeof(va_surfaceid)); + memset((void *)surface_width, 0x0, sizeof(surface_width)); + memset((void *)surface_height, 0x0, sizeof(surface_height)); + memset((void *)surface_fourcc, 0x0, sizeof(surface_fourcc)); + + va_configid = 0; + va_contextid = 0; + context_width = 0; + context_height = 0; + context_fourcc = 0; + va_codedbufferid = 0; + coded_buf_size = 0; + + reserved_image_seq = -1; + + LOGV("IntelImageEncoder: done\n"); +} + +int IntelImageEncoder::initializeEncoder(void) +{ + int i =0; + VAStatus va_status; + int display_num = 0; + int major_version = -1, minor_version = -1; + const char *driver_version = NULL; + VAEntrypoint va_entrypoints[5]; + int va_entrypoints_count = 0; + + if (encoder_status != LIBVA_UNINITIALIZED) { + LOGE("initializeEncoder: already initialized!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + /* Get display */ + va_dpy = vaGetDisplay(&display_num); + if (NULL == va_dpy) { + LOGE("initializeEncoder: vaGetDisplay failed!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + /* Initialize */ + va_status = vaInitialize(va_dpy, &major_version, &minor_version); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("initializeEncoder: vaInitialize failed (%d)!\n", va_status); + return va_status; + } + LOGV("initializeEncoder: LibVA version: %d.%d\n", major_version, minor_version); + + /* Query driver version */ + driver_version = vaQueryVendorString(va_dpy); + if (NULL == driver_version) { + LOGE("initializeEncoder: vaQueryVendorString failed!\n"); + vaTerminate(va_dpy); + va_dpy = NULL; + return VA_STATUS_ERROR_OPERATION_FAILED; + } + LOGV("initializeEncoder: Driver version: %s\n", driver_version); + + /* Query JPEG baseline encoding's entrypoint */ + va_status = vaQueryConfigEntrypoints(va_dpy, VAProfileJPEGBaseline, va_entrypoints, + &va_entrypoints_count); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("initializeEncoder: vaQueryConfigEntrypoints failed (%d)!\n", va_status); + vaTerminate(va_dpy); + va_dpy = NULL; + return va_status; + } + + for (i=0; i < va_entrypoints_count; ++i) { + if (VAEntrypointEncPicture == va_entrypoints[i]) + break; + } + if (i == va_entrypoints_count) { + LOGE("initializeEncoder: no JPEG Baseline encoding entrypoint was found!\n"); + vaTerminate(va_dpy); + va_dpy = NULL; + return VA_STATUS_ERROR_UNIMPLEMENTED; + } + + /* Get supported configuration attributes */ + va_configattrib.type = VAConfigAttribRTFormat; + va_status = vaGetConfigAttributes(va_dpy, VAProfileJPEGBaseline, VAEntrypointEncPicture, + &va_configattrib, 1); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("initializeEncoder: vaGetConfigAttributes failed (%d)!\n", va_status); + vaTerminate(va_dpy); + va_dpy = NULL; + memset((void *)&va_configattrib, 0x0, sizeof(va_configattrib)); + return va_status; + } + + encoder_status = LIBVA_INITIALIZED; + LOGV("initializeEncoder: done\n"); + return VA_STATUS_SUCCESS; +} + +int IntelImageEncoder::createSourceSurface(int source_type, void *source_buffer, + unsigned int width,unsigned int height, + unsigned int stride, unsigned int fourcc, + int *image_seqp) +{ + int i =0; + VAStatus va_status; + VASurfaceAttribExternalBuffers va_surfacebuf; + VASurfaceAttrib va_surfaceattrib[2]; + unsigned long ptr = 0; + + if (LIBVA_UNINITIALIZED == encoder_status) { + LOGE("createSourceSurface: uninitialized, not ready to create surface!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if (images_count > INTEL_IMAGE_ENCODER_MAX_BUFFERS) { + LOGE("createSourceSurface: the max supported count was already reached!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if ((source_type != SURFACE_TYPE_USER_PTR) && + (source_type != SURFACE_TYPE_GRALLOC)) { + LOGE("createSourceSurface: buffer type 0x%x was not supported!\n", source_type); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + if (NULL == source_buffer) { + LOGE("createSourceSurface: the input buffer address can't be null!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } else if ((unsigned int)source_buffer & 0xFFF) { + LOGE("createSourceSurface: the input buffer wasn't aligned to 4096!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + if (stride % INTEL_IMAGE_ENCODER_REQUIRED_STRIDE) { + LOGE("createSourceSurface: the stride value %d is not alligned to %d!\n", + stride, INTEL_IMAGE_ENCODER_REQUIRED_STRIDE); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + if ((width % 2) || (height % 2)) { + LOGE("createSourceSurface: only even dimensions were supportd!\n"); + return VA_STATUS_ERROR_RESOLUTION_NOT_SUPPORTED; + } + + if (((fourcc != VA_RT_FORMAT_YUV420) && (fourcc != VA_RT_FORMAT_YUV422)) || + !(fourcc & va_configattrib.value)) { + /* Currently supported image formats: + * #define VA_RT_FORMAT_YUV420 0x00000001 + * #define VA_RT_FORMAT_YUV422 0x00000002 + */ + LOGE("createSourceSurface: the image format %d was not supported!\n", fourcc); + return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT; + } + + /* Find the first available image sequential number */ + for (i=0; i= LIBVA_CONTEXT_CREATED) { + LOGE("createContext: there already is an active context!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if ((first_image_seq < 0) || + (first_image_seq >= INTEL_IMAGE_ENCODER_MAX_BUFFERS) || + (0 == va_surfaceid[first_image_seq])) { + LOGE("createContext: invalid image sequential number!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + context_width = surface_width[first_image_seq]; + context_height = surface_height[first_image_seq]; + context_fourcc = surface_fourcc[first_image_seq]; + + /* Create a config */ + va_cur_configattrib.type = VAConfigAttribRTFormat; + va_cur_configattrib.value = context_fourcc; + va_status = vaCreateConfig(va_dpy, VAProfileJPEGBaseline, VAEntrypointEncPicture, + &va_cur_configattrib, 1, &va_configid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("createContext: vaCreateConfig failed (%d)!\n", va_status); + va_configid = 0; + return va_status; + } + + /* Create a context */ + va_status = vaCreateContext(va_dpy, va_configid, context_width, context_height, + VA_PROGRESSIVE, &va_surfaceid[first_image_seq], 1, &va_contextid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("createContext: vaCreateContext failed (%d)!\n", va_status); + va_contextid = 0; + vaDestroyConfig(va_dpy, va_configid); + va_configid = 0; + return va_status; + } + + + /* Create a coded buffer */ + coded_buf_size = (((context_width+15)/16)*((context_height+15)/16)*160) + 640; + coded_buf_size = (coded_buf_size+0xf) & ~0xf; + va_status = vaCreateBuffer(va_dpy, va_contextid, VAEncCodedBufferType, coded_buf_size, + 1, NULL, &va_codedbufferid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("createContext: vaCreateBuffer VAEncCodedBufferType failed (%d)!\n", va_status); + vaDestroyContext(va_dpy, va_contextid); + va_contextid = 0; + vaDestroyConfig(va_dpy, va_configid); + va_configid = 0; + va_codedbufferid = 0; + return va_status; + } + + *max_coded_sizep = coded_buf_size; + + encoder_status = LIBVA_CONTEXT_CREATED; + LOGV("createContext: done\n"); + return VA_STATUS_SUCCESS; +} + +int IntelImageEncoder::setQuality(unsigned int new_quality) +{ + if (quality == new_quality) { + return VA_STATUS_SUCCESS; + } + + if (LIBVA_ENCODING == encoder_status) { + LOGE("setQuality: can't update quality while encoding!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if ((new_quality > INTEL_IMAGE_ENCODER_MAX_QUALITY) || + (new_quality < INTEL_IMAGE_ENCODER_MIN_QUALITY)) { + LOGE("setQuality: invalid new quality value, not updated!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + quality = new_quality; + + LOGV("setQuality: quality was updated to %d\n", quality); + return VA_STATUS_SUCCESS; +} + +int IntelImageEncoder::encode(int image_seq, unsigned int new_quality) +{ + VAStatus va_status; + VAEncPictureParameterBufferJPEG va_picparabuffer; + VABufferID va_picparabufferid = 0; + + if (encoder_status < LIBVA_CONTEXT_CREATED) { + LOGE("encode: no context created to perform encoding!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } else if (encoder_status > LIBVA_CONTEXT_CREATED) { + LOGE("encode: there already is an active encoding task!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if ((image_seq < 0) || + (image_seq >= INTEL_IMAGE_ENCODER_MAX_BUFFERS) || + (0 == va_surfaceid[image_seq])) { + LOGE("encode: invalid image sequential number!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } else if ((context_width != surface_width[image_seq]) || + (context_height != surface_height[image_seq]) || + (context_fourcc != surface_fourcc[image_seq])) { + LOGE("encode: the input image didn't fit in the current context!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + /* Update quality */ + if (setQuality(new_quality) != VA_STATUS_SUCCESS) { + LOGE("encode: the input quality value was invalid, encoding aborted!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + /* Begin picture */ + va_status = vaBeginPicture(va_dpy, va_contextid, va_surfaceid[image_seq]); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("encode: vaBeginPicture failed (%d)!\n", va_status); + return va_status; + } + + /* Create a picture-parameter buffer */ + va_picparabuffer.picture_width = context_width; + va_picparabuffer.picture_height = context_height; + va_picparabuffer.reconstructed_picture= 0; + va_picparabuffer.coded_buf = va_codedbufferid; + va_picparabuffer.pic_flags.bits.profile = 0; /* Baseline */ + va_picparabuffer.pic_flags.bits.progressive = 0; /* Sequential */ + va_picparabuffer.pic_flags.bits.huffman = 1; /* Huffman */ + va_picparabuffer.pic_flags.bits.interleaved = 0; /* Non-interleaved */ + va_picparabuffer.pic_flags.bits.differential = 0; /* Non-differential */ + va_picparabuffer.sample_bit_depth = 8; /* 8-bits */ + va_picparabuffer.num_components = 3; /* 3-components */ + va_picparabuffer.quality = quality; /* JPEG ENC quality */ + va_status = vaCreateBuffer(va_dpy, va_contextid, VAEncPictureParameterBufferType, + sizeof(va_picparabuffer), 1, &va_picparabuffer,&va_picparabufferid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("encode: vaCreateBuffer VAEncPictureParameterBufferType failed (%d)!\n", va_status); + return va_status; + } + + /* Render picture */ + va_status = vaRenderPicture(va_dpy, va_contextid, &va_picparabufferid, 1); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("encode: vaRenderPicture failed (%d)!\n", va_status); + vaDestroyBuffer(va_dpy, va_picparabufferid); + return va_status; + } + + /* Destroy the used picture-parameter buffer */ + vaDestroyBuffer(va_dpy, va_picparabufferid); + + /* End picture */ + va_status = vaEndPicture(va_dpy, va_contextid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("encode: vaEndPicture failed (%d)!\n", va_status); + vaDestroyBuffer(va_dpy, va_picparabufferid); + return va_status; + } + + reserved_image_seq = image_seq; + encoder_status = LIBVA_ENCODING; + LOGV("encode: done\n"); + return VA_STATUS_SUCCESS; +} + +int IntelImageEncoder::getCoded(void *user_coded_buf, + unsigned int user_coded_buf_size, + unsigned int *coded_data_sizep) +{ + VAStatus va_status; + VACodedBufferSegment *va_codedbuffersegment = NULL; + + if ((NULL == user_coded_buf) || + (NULL == coded_data_sizep)) { + LOGE("getCoded: invalid NULL pointer as input paramter!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + if (user_coded_buf_size < coded_buf_size) { + LOGE("getCoded: the coded buffer was too small!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if (encoder_status != LIBVA_ENCODING) { + LOGE("getCoded: no encoding active to get coded data!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if (0 == va_surfaceid[reserved_image_seq]) { + LOGE("getCoded: invalid image, probably already destroyed!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + /* Sync surface */ + va_status = vaSyncSurface(va_dpy, va_surfaceid[reserved_image_seq]); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("getCoded: vaSyncSurface failed (%d)!\n", va_status); + reserved_image_seq = -1; + encoder_status = LIBVA_CONTEXT_CREATED; + return va_status; + } + + /* Map the coded buffer */ + va_status = vaMapBuffer(va_dpy, va_codedbufferid, (void **)&va_codedbuffersegment); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("getCoded: vaMapBuffer failed (%d)!\n", va_status); + reserved_image_seq = -1; + encoder_status = LIBVA_CONTEXT_CREATED; + return va_status; + } + + /* Mark the coded buffer empty */ + *coded_data_sizep = 0; + + /* Get the total size of coded data */ + while (va_codedbuffersegment != NULL) { + memcpy((void *)((unsigned int)user_coded_buf+*coded_data_sizep), + va_codedbuffersegment->buf, + va_codedbuffersegment->size); + *coded_data_sizep += va_codedbuffersegment->size; + va_codedbuffersegment = (VACodedBufferSegment *)va_codedbuffersegment->next; + } + + va_status = vaUnmapBuffer(va_dpy, va_codedbufferid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("getCoded: vaUnmapBuffer failed (%d)!\n", va_status); + } + + reserved_image_seq = -1; + encoder_status = LIBVA_CONTEXT_CREATED; + + LOGV("getCoded: done\n"); + return va_status; +} + +int IntelImageEncoder::destroySourceSurface(int image_seq) +{ + VAStatus va_status; + + if ((image_seq < 0) || ((unsigned int)image_seq >= images_count) || + (0 == va_surfaceid[image_seq])) { + LOGE("destroySourceSurface: invalid image sequential number!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } else if (image_seq == reserved_image_seq) { + LOGE("destroySourceSurface: Image %d was under encoding and can't be destroyed!\n", + image_seq); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if (LIBVA_UNINITIALIZED == encoder_status) { + LOGE("destroySourceSurface: uninitialized, not ready to destroy surface!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + /* Destroy a source surface */ + va_status = vaDestroySurfaces(va_dpy, &va_surfaceid[image_seq], 1); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("destroySourceSurface: vaDestroySurfaces failed (%d)!\n", va_status); + } + + va_surfaceid[image_seq] = 0; + surface_width[image_seq] = 0; + surface_height[image_seq] = 0; + surface_fourcc[image_seq] = 0; + + --images_count; + + return va_status; +} + +int IntelImageEncoder::destroyContext(void) +{ + VAStatus va_status, va_final_status; + + if (0 == va_contextid) { + LOGE("destroyContext: no context to destroy!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if (LIBVA_ENCODING == encoder_status) { + LOGE("destroyContext: encoding was ongoing, can't destroy context!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + /* Destroy the coded buffer */ + va_status = vaDestroyBuffer(va_dpy, va_codedbufferid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("createContext: vaDestroyBuffer VAEncCodedBufferType failed (%d)!\n", va_status); + } + va_final_status = va_status; + va_codedbufferid = 0; + coded_buf_size = 0; + + /* Destroy context */ + va_status = vaDestroyContext(va_dpy, va_contextid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("destroyContext: vaDestroyContext failed (%d)!\n", va_status); + } + va_final_status |= va_status; + va_contextid = 0; + context_width = 0; + context_height = 0; + context_fourcc = 0; + + /* Destroy config */ + va_status = vaDestroyConfig(va_dpy, va_configid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("destroyContext: vaDestroyConfig failed (%d)!\n", va_status); + } + va_final_status |= va_status; + va_configid = 0; + + encoder_status = LIBVA_INITIALIZED; + + LOGV("destroyContext: done\n"); + return va_final_status; +} + +int IntelImageEncoder::deinitializeEncoder(void) +{ + int i; + VAStatus va_status; + + if (NULL == va_dpy) { + LOGE("deinitializeEncoder: no LibVA display to deinitialized!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if (LIBVA_ENCODING == encoder_status) { + LOGE("deinitializeEncoder: encoding was ongoing, can't deinitialize LibVA!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } else if (LIBVA_CONTEXT_CREATED == encoder_status) { + /* Destroy context if it exists */ + destroyContext(); + } + + if (images_count > 0) { + for (i=0; i +#include +#include +#include +#include +#include +#include + +#define INTEL_IMAGE_ENCODER_DEFAULT_QUALITY 90 +#define INTEL_IMAGE_ENCODER_MAX_QUALITY 100 +#define INTEL_IMAGE_ENCODER_MIN_QUALITY 1 +#define INTEL_IMAGE_ENCODER_MAX_BUFFERS 64 +#define INTEL_IMAGE_ENCODER_REQUIRED_STRIDE 64 +#ifndef VA_FOURCC_YV16 +#define VA_FOURCC_YV16 0x36315659 +#endif +#define SURFACE_TYPE_USER_PTR 0x00000004 +#define SURFACE_TYPE_GRALLOC 0x00100000 + +class IntelImageEncoder { +public: + IntelImageEncoder(void); + ~IntelImageEncoder(void) {}; + int initializeEncoder(void); + int createSourceSurface(int source_type, void *source_buffer, + unsigned int width,unsigned int height, + unsigned int stride, unsigned int fourcc, + int *image_seqp); + int createContext(int first_image_seq, unsigned int *max_coded_sizep); + int createContext(unsigned int *max_coded_sizep) + { + return this->createContext(0, max_coded_sizep); + } + int setQuality(unsigned int new_quality); + int encode(int image_seq, unsigned int new_quality); + int encode(int image_seq) + { + return this->encode(image_seq, quality); + } + int encode(void) + { + return this->encode(0, quality); + } + int getCoded(void *user_coded_buf, + unsigned int user_coded_buf_size, + unsigned int *coded_data_sizep); + int destroySourceSurface(int image_seq); + int destroyContext(void); + int deinitializeEncoder(void); + +private: + typedef enum { + LIBVA_UNINITIALIZED = 0, + LIBVA_INITIALIZED, + LIBVA_CONTEXT_CREATED, + LIBVA_ENCODING, + }IntelImageEncoderStatus; + + /* Valid since LIBVA_UNINITIALIZED */ + IntelImageEncoderStatus encoder_status; + unsigned int quality; + + /* Valid Since LIBVA_INITIALIZED */ + VADisplay va_dpy; + VAConfigAttrib va_configattrib; + + /* Valid if a surface is created */ + unsigned int images_count; + VASurfaceID va_surfaceid[INTEL_IMAGE_ENCODER_MAX_BUFFERS]; + unsigned int surface_width[INTEL_IMAGE_ENCODER_MAX_BUFFERS]; + unsigned int surface_height[INTEL_IMAGE_ENCODER_MAX_BUFFERS]; + unsigned int surface_fourcc[INTEL_IMAGE_ENCODER_MAX_BUFFERS]; + + /* Valid since LIBVA_CONTEXT_CREATED */ + VAConfigID va_configid; + VAContextID va_contextid; + unsigned int context_width; + unsigned int context_height; + unsigned int context_fourcc; + VABufferID va_codedbufferid; + unsigned int coded_buf_size; + + /* Valid since LIBVA_ENCODING */ + int reserved_image_seq; +}; + +#endif /* __LIBMIX_INTEL_IMAGE_ENCODER_H__ */ diff --git a/imageencoder/test/main.cpp b/imageencoder/test/main.cpp new file mode 100644 index 0000000..1433c04 --- /dev/null +++ b/imageencoder/test/main.cpp @@ -0,0 +1,559 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "ImageEncoder.h" + +inline unsigned long long int current_time(/*bool fixed*/) +{ + struct timeval tv; + gettimeofday(&tv, NULL); + return (((unsigned long long)tv.tv_usec+(unsigned long long)tv.tv_sec*1000000) * 1000); +} + +#define PERF_DEF(counter) unsigned long long int COUNTER_##counter=0; +#define PERF_START(counter, fixed) { COUNTER_##counter = current_time(); } +#define PERF_STOP(counter, fixed) { COUNTER_##counter = current_time() - COUNTER_##counter; } +#define PERF_SET(counter, value) { COUNTER_##counter = value; } +#define PERF_GET(counter) (COUNTER_##counter) + +#define MAX_WIDTH 5120 +#define MAX_HEIGHT 5120 +#define DEFAULT_QUALITY 90 +#define DEFAULT_BURST 1 +#define YUV420_SAMPLE_SIZE 1.5 + +static void usage(const char* pname) +{ + fprintf(stderr, + "\n USAGE: %s -source [path] -width [value] -height [value] \n" + " -output [path] -burst [value] -quality [value] -fix\n\n" + " -source: declaring the source's file path.\n" + " -width: declaring the source's raw data width (0, 65536].\n" + " -height: declaring the source's raw data height (0, 65536].\n" + " -output: specifying the output JPEG's file path (.JPG or .jpg).\n" + " -burst (optional): enabling continuous encoding times (0, 50].\n" + " -quality (optional): setting image quality [0, 100].\n" + " -fix (optional): fixing CPU frequency for evaluating performance.\n\n" + ,pname); +} + +static bool match_key (char *arg, const char *keyword, int minchars) +{ + register int ca, ck; + register int nmatched = 0; + + while ((ca = *arg++) != '\0') { + if ((ck = *keyword++) == '\0') + return false; /* arg longer than keyword, mismatch */ + if (isupper(ca)) /* force arg to lcase (assume ck is already) */ + ca = tolower(ca); + if (ca != ck) + return false; /* mismatch */ + nmatched++; /* count matched characters */ + } + + if (nmatched < minchars) + return false; /* arg shorter than keyword, mismatch */ + + return true; /* Match */ +} + +int main(int argc, char** argv) +{ + const char *pname = argv[0]; + int argn; + char *arg; + + /* Parameter variables */ + char *source_name = NULL; + char *output_name = (char *)"./output.jpg"; + int quality = DEFAULT_QUALITY; + int burst = DEFAULT_BURST; + int width = 0, height = 0; + bool fix_cpu_frequency = false; + unsigned int fourcc_format = 0; + + /* Internal variables*/ + int i, j; + int stride = 0; + char final_output_name[128] = "\0"; + int source_fd = -1; + int output_fd = -1; + unsigned int source_size = 0, source_buffer_size = 0; + unsigned int output_size = 0, output_buffer_size = 0; + unsigned int read_size = 0, write_size = 0; + void *source_buffer = NULL, *output_buffer = NULL; + void *aligned_source_buffer = NULL, *current_position = NULL; + void *surface_buffer = NULL, *surface_buffer_ptr = NULL; + int status; + int image_seq = -1; + IntelImageEncoder image_encoder; + + /* For CPU frequency fixing */ + FILE *cpu_online_nr_fd = NULL, *cpu_available_max_fd = NULL, *cpu_available_min_fd = NULL; + FILE *cpu_scaling_max_fd = NULL, *cpu_scaling_min_fd = NULL, *cpu_cur_fd = NULL; + unsigned int cpu_online_nr = 0, cpu_available_max = 1000000, cpu_available_min = 0, cpu_cur = 0; + + /* For performance logging */ + PERF_DEF(init_driver_time); + PERF_DEF(create_source_time); + PERF_DEF(create_context_time); + PERF_DEF(prepare_encoding_time); + PERF_DEF(encode_time); + PERF_DEF(term_driver_time); + unsigned long long int total_time = 0; + double compression_rate = 0; + + /* Get the input parameters */ + if (1 >= argc) { + usage(pname); /* No argument */ + return 1; + } + + for (argn = 1; argn < argc; argn++) { + arg = argv[argn]; + if (*arg != '-') { + /* Every argument should begin with a '-' */ + usage(pname); + fprintf(stderr, "Every argument should begin with a '-'!\n"); + return 1; + } + arg++; + + if (match_key(arg, "width", strlen("width"))) { + if (++argn >= argc) { + usage(pname); /* "-width" should be followed by a specified width value*/ + fprintf(stderr, "-width should be followed by a specified width value!\n"); + return 1; + } + + if ((1 != sscanf(argv[argn], "%d", &width)) || (width <= 0)) { + usage(pname); /* Invalid width */ + fprintf(stderr, "Invalid width!\n"); + return 1; + } + + if ((width>MAX_WIDTH) || (width%2)) { + usage(pname); /* Unsupported width */ + fprintf(stderr, "Unsupported width: %d!\n", width); + return 1; + } + } else if (match_key(arg, "height", strlen("height"))) { + if (++argn >= argc) { + usage(pname); /* "-height" should be followed by a specified height value*/ + fprintf(stderr, "-height should be followed by a specified height value!\n"); + return 1; + } + + if ((1 != sscanf(argv[argn], "%d", &height)) || (height <= 0)) { + usage(pname); /* Invalid height */ + fprintf(stderr, "Invalid height!\n"); + return 1; + } + + if ((MAX_HEIGHT= argc) { + usage(pname); /* "-source" should be followed by a specified source path */ + fprintf(stderr, "-source should be followed by a specified source path!\n"); + return 1; + } + source_name = argv[argn]; + } else if (match_key(arg, "output", strlen("output"))) { + if (++argn >= argc) { + usage(pname); /* "-output" should be followed by a specified output file path */ + fprintf(stderr, "-output should be followed by a specified output file path!\n"); + return 1; + } + output_name = argv[argn]; + if ((strlen(output_name) <= 4) || + (strcmp(output_name+strlen(output_name)-4, ".jpg") && + strcmp(output_name+strlen(output_name)-4, ".JPG"))) { + usage(pname); /* Invalid output file name */ + fprintf(stderr, "Invalid output file name: %s!\n", output_name); + return 1; + } + } else if (match_key(arg, "burst", strlen("burst"))) { + if (++argn >= argc) { + usage(pname); /* "burst" should be followed by a quality value */ + fprintf(stderr, "-burst should be followed by a specified encoding times!\n"); + return 1; + } + + if ((1 != sscanf(argv[argn], "%d", &burst)) || (burst < 0) || (burst > 50)) { + usage(pname); /* Invalid burst times */ + fprintf(stderr, "Invalid burst times!\n"); + return 1; + } + } else if (match_key(arg, "quality", strlen("quality"))) { + if (++argn >= argc) { + usage(pname); /* "quality" should be followed by a quality value */ + fprintf(stderr, "-quality should be followed by a specified quality value!\n"); + return 1; + } + + if ((1 != sscanf(argv[argn], "%d", &quality)) || (quality < 0) || (quality > 100)) { + usage(pname); /* Invalid quality value */ + fprintf(stderr, "Invalid quality value!\n"); + return 1; + } + } else if (match_key(arg, "fix", strlen("fix"))) { + fix_cpu_frequency = true; + } else { + usage(pname); /* Unsupported argument */ + fprintf(stderr, "Unsupported argument: %s!\n", arg); + return 1; + } + } + + /* Validate the input parameters */ + if ((0 == width) || (0 == height)) { + usage(pname); + fprintf(stderr, "Width or height unset!\n"); + return 1; + } + + if (NULL == source_name) { + usage(pname); + fprintf(stderr, "Source file path unset!\n"); + return 1; + } + + /* Get the source image data */ + source_fd = open(source_name, O_RDONLY, 0664); + if (-1 == source_fd) { + fprintf(stderr, "Error opening source file: %s (%s)!\n", source_name, strerror(errno)); + return 1; + } + + source_size = width * height * YUV420_SAMPLE_SIZE; + stride = (width+0x3f) & (~0x3f); /* TopazHP requires stride must be an integral multiple of 64. */ + source_buffer_size = stride * height * YUV420_SAMPLE_SIZE; + + source_buffer = malloc(source_buffer_size+4096); + if (NULL == source_buffer) { + fprintf(stderr, "Fail to allocate source buffer: %d(%s)!\n", errno, strerror(errno)); + close(source_fd); + return 1; + } + memset(source_buffer, 0, source_buffer_size+4096); + aligned_source_buffer = (void *)((unsigned int)source_buffer - + ((unsigned int)source_buffer)%4096 + 4096); + + current_position = aligned_source_buffer; + for (i=0; i 0) { + fseek(cpu_available_max_fd, 0-readed, SEEK_CUR); + } + } + } while (1); + + if (0 == j) { + while (one_line[i] != ':') { + ++i; + } + ++i; /* The space bewteen ':' and a freq value */ + while (one_line[i] != '.') { + one_segment[j++] = one_line[i++]; + } + one_segment[j] = '\0'; + cpu_available_max = atoi((const char *)one_segment) * 1000; + } + fclose(cpu_available_max_fd); + + if (0 == cpu_available_max) { + cpu_available_max = 1000000; + fprintf(stderr, "\nCan't find CPU frequecency value and we assume 1.0GHz.\n"); + } + + printf("\n%u CPU(s) online, whose unscalable frequency is: %u.\n", + cpu_online_nr+1, cpu_available_max); + } else { + fscanf(cpu_available_max_fd, "%u", &cpu_available_max); + assert(cpu_available_max != 0); + fclose(cpu_available_max_fd); + + cpu_available_min_fd = fopen("/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_min_freq", + "r"); + assert(cpu_available_min_fd != NULL); + fscanf(cpu_available_min_fd, "%u", &cpu_available_min); + assert(cpu_available_min != 0); + fclose(cpu_available_min_fd); + + printf("\n%u CPU(s) online, whose MAX/MIN available frequency is: %u/%u.\n", + cpu_online_nr+1, cpu_available_max, cpu_available_min); + + for (i=0; i<=(int)cpu_online_nr; ++i) { + char fd_name[64]; + + sprintf(fd_name, "/sys/devices/system/cpu/cpu%u/cpufreq/scaling_max_freq", i); + cpu_scaling_max_fd = fopen(fd_name, "w"); + if (0 == i) { + assert(cpu_scaling_max_fd != NULL); + } else if ((i>0) && (NULL==cpu_scaling_max_fd)) { + fprintf(stderr, "No sysfs attribute to fix cpu%u's frequency!\n", i); + break; + } + fprintf(cpu_scaling_max_fd, "%u", cpu_available_max); + fclose(cpu_scaling_max_fd); + + sprintf(fd_name, "/sys/devices/system/cpu/cpu%u/cpufreq/scaling_min_freq", i); + cpu_scaling_min_fd = fopen(fd_name, "w"); + if (0 == i) { + assert(cpu_scaling_min_fd != NULL); + } else if ((i>0) && (NULL== cpu_scaling_min_fd)) { + fprintf(stderr, "No sysfs attribute to fix cpu%u's frequency!\n", i); + break; + } + fprintf(cpu_scaling_min_fd, "%u", cpu_available_max); + fclose(cpu_scaling_min_fd); + + sprintf(fd_name, "/sys/devices/system/cpu/cpu%u/cpufreq/scaling_cur_freq", i); + cpu_cur_fd = fopen(fd_name, "r"); + assert(cpu_cur_fd != NULL); + fscanf(cpu_cur_fd, "%u", &cpu_cur); + assert(cpu_cur == cpu_available_max); + fclose(cpu_cur_fd); + + printf("cpu%u's frequency is fixed to %u.\n", i, cpu_available_max); + + cpu_scaling_max_fd = cpu_scaling_min_fd = cpu_cur_fd = NULL; + cpu_cur = 0; + } + } + } + cpu_available_max = 1000000; + + /* Print encoding settings */ + printf("\n[INPUT]\n"); + printf("Source: %s\n", source_name); + printf("Width: %d\n", width); + printf("Height: %d\n", height); + printf("Output: %s\n", output_name); + printf("Burst: %d times\n", burst); + printf("Quality: %d\n", quality); + if (true == fix_cpu_frequency) + printf("Fix CPU frequency: true\n"); + else + printf("Fix CPU frequency: false\n"); + printf("\n[OUTPUT]\n"); + + /* Initialize encoder */ + PERF_START(init_driver_time, fix_cpu_frequency); + status = image_encoder.initializeEncoder(); + PERF_STOP(init_driver_time, fix_cpu_frequency); + if (status != 0) { + fprintf(stderr, "initializeEncoder failed (%d)!\n", status); + free(source_buffer); + return 1; + } + + /* Create a source surface*/ + PERF_START(create_source_time, fix_cpu_frequency); + status = image_encoder.createSourceSurface(SURFACE_TYPE_USER_PTR, aligned_source_buffer, + width, height, + stride, VA_RT_FORMAT_YUV420, + &image_seq); + PERF_STOP(create_source_time, fix_cpu_frequency); + if (status != 0) { + fprintf(stderr, "createSourceSurface failed (%d)!\n", status); + free(source_buffer); + image_encoder.deinitializeEncoder(); + return 1; + } + + /* Create context*/ + PERF_START(create_context_time, fix_cpu_frequency); + status = image_encoder.createContext(image_seq, &output_buffer_size); + PERF_STOP(create_context_time, fix_cpu_frequency); + if (status != 0) { + fprintf(stderr, "createContext failed (%d)!\n", status); + free(source_buffer); + image_encoder.deinitializeEncoder(); + return 1; + } + + output_buffer = malloc(output_buffer_size); + if (NULL == output_buffer) { + fprintf(stderr, "Fail to allocate output buffer: %d(%s)!\n", errno, strerror(errno)); + free(source_buffer); + image_encoder.deinitializeEncoder(); + return 1; + } + + printf("Init driver: %.3fms\n", (double)PERF_GET(init_driver_time)/cpu_available_max); + printf("Create source: %.3fms\n", (double)PERF_GET(create_source_time)/cpu_available_max); + printf("Create context: %.3fms\n", (double)PERF_GET(create_context_time)/cpu_available_max); + + /* Do the encoding */ + for (i=0; i0) && (NULL== cpu_scaling_min_fd)) { + fprintf(stderr, "No sysfs attribute to restore cpu%u's frequency!\n", i); + break; + } + + fprintf(cpu_scaling_min_fd, "%u", cpu_available_min); + fclose(cpu_scaling_min_fd); + + printf("cpu%u's frequency is restored.\n", i); + + cpu_scaling_min_fd = NULL; + } + } + } + + return 0; +} + -- cgit v1.2.3 From 41868fd267dc3442b3dc9d9051c332339edd562f Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Fri, 8 Nov 2013 15:13:07 +0800 Subject: libmix: lower delay of output one decoded buffer BZ: 146948 1) AVC: the original design will call endDecodingFrame for the previous frame when a new frame is detected; adjust the calling sequence for low delay mode; 2) VP8: enable low delay mode to allow one frame to be outputted at once. Change-Id: Iac6a941a713d1ed43c21ccc35591f015ad237ab8 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderAVC.cpp | 15 +++++++++++---- videodecoder/VideoDecoderBase.h | 2 +- videodecoder/VideoDecoderVP8.cpp | 1 + 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 45e7e9d..957897b 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -164,16 +164,23 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h uint64_t lastPTS = mCurrentPTS; mCurrentPTS = buffer->timeStamp; - //if (lastPTS != mCurrentPTS) { if (isNewFrame(data, lastPTS == mCurrentPTS)) { + if (mLowDelay) { + // start decoding a new frame + status = beginDecodingFrame(data); + CHECK_STATUS("beginDecodingFrame"); + } + // finish decoding the last frame status = endDecodingFrame(false); CHECK_STATUS("endDecodingFrame"); - // start decoding a new frame - status = beginDecodingFrame(data); - CHECK_STATUS("beginDecodingFrame"); + if (!mLowDelay) { + // start decoding a new frame + status = beginDecodingFrame(data); + CHECK_STATUS("beginDecodingFrame"); + } } else { status = continueDecodingFrame(data); CHECK_STATUS("continueDecodingFrame"); diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index b111bb3..5c620d0 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -105,6 +105,7 @@ private: pthread_mutex_t mLock; protected: + bool mLowDelay; // when true, decoded frame is immediately output for rendering VideoFormatInfo mVideoFormatInfo; Display *mDisplay; VADisplay mVADisplay; @@ -146,7 +147,6 @@ protected: }; private: - bool mLowDelay; // when true, decoded frame is immediately output for rendering bool mRawOutput; // whether to output NV12 raw data bool mManageReference; // this should stay true for VC1/MP4 decoder, and stay false for AVC decoder. AVC handles reference frame using DPB OUTPUT_METHOD mOutputMethod; diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 9bfd15b..f71099c 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -105,6 +105,7 @@ Decode_Status VideoDecoderVP8::start(VideoConfigBuffer *buffer) { status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); + enableLowDelayMode(true); status = startVA(data); return status; } -- cgit v1.2.3 From dd485f377535beb207f9997c92f436f825addd2c Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 12 Nov 2013 20:33:29 +0800 Subject: Fix the widevine crash problem BZ: 151018 The mErrReportEnabled is not initialized before use it. In this fix, mErrReportEnabled is set to false in the VideoDecoderBase constructor. Change-Id: I7c1b9d52a73fcc41264b70813c605c7be2a77e38 Signed-off-by: wfeng6 --- videodecoder/VideoDecoderBase.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 9fd96f9..00e9d94 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -68,7 +68,8 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mParserType(type), mParserHandle(NULL), mInitialized(false), - mSignalBufferSize(0) { + mSignalBufferSize(0), + mErrReportEnabled(false){ memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo)); memset(&mConfigBuffer, 0, sizeof(mConfigBuffer)); -- cgit v1.2.3 From efe5e1f6f5e31750d2ced333c5486439880723ea Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Fri, 1 Nov 2013 14:41:30 +0800 Subject: New Implementation for SurfaceMap BZ: 151334 Refine code and Support new features: a) Support copy for stride not 64 aligned gfx buffers, like 736x480 b) Refine code to support features in center, like 64 alignment, vp8 uncached memory, no buffer share on BYT. Separate codec modules use flag to make the extra features in itself. c) Support more color formats for input gfx buffers, it means libmix will do color converting internally. Change-Id: I7af44bc9d31b1a433768e28573c71ea7da722db9 Signed-off-by: Zhao Liang --- test/mix_encoder2.cpp | 106 ++++-- videoencoder/Android.mk | 6 + videoencoder/PVSoftMPEG4Encoder.cpp | 3 + videoencoder/VideoEncoderBase.cpp | 670 +++++----------------------------- videoencoder/VideoEncoderBase.h | 22 +- videoencoder/VideoEncoderDef.h | 2 + videoencoder/VideoEncoderLog.h | 3 +- videoencoder/VideoEncoderUtils.cpp | 692 ++++++++++++++++++++++++++++++++++++ videoencoder/VideoEncoderUtils.h | 70 ++++ 9 files changed, 960 insertions(+), 614 deletions(-) create mode 100644 videoencoder/VideoEncoderUtils.cpp create mode 100644 videoencoder/VideoEncoderUtils.h diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 0fa9096..f17311f 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -183,7 +183,7 @@ public: gNumFramesOutput = 0; createResource (); -#if 1 +#if 0 { int size= mStride * mHeight * 1.5; void* tmp = malloc(size); @@ -211,7 +211,14 @@ public: //upload src data LOG("Fill src pictures width=%d, Height=%d\n", mStride, mHeight); for(int i=0; i 0) { - ret = fread(mUsrptr[gNumFramesOutput % PRELOAD_FRAME_NUM] + mStride*mHeight*3/2 - readsize, 1, readsize, mYuvhandle); + if (mColorFormat == HAL_PIXEL_FORMAT_RGBA_8888) + ret = fread(mUsrptr[gNumFramesOutput % PRELOAD_FRAME_NUM] + mStride*mHeight*4 - readsize, 1, readsize, mYuvhandle); + else + ret = fread(mUsrptr[gNumFramesOutput % PRELOAD_FRAME_NUM] + mStride*mHeight*3/2 - readsize, 1, readsize, mYuvhandle); if (ret <= 0) { (*buffer)->release(); @@ -283,6 +300,8 @@ public: (*buffer)->meta_data()->setInt64( kKeyTime, (gNumFramesOutput * 1000000) / mFrameRate); + postSourceWriting(gNumFramesOutput % PRELOAD_FRAME_NUM); + ++gNumFramesOutput; if (gNumFramesOutput % 10 ==0) fprintf(stderr, "."); @@ -293,6 +312,8 @@ public: return OK; } + virtual void preSourceWriting(int i) {} + virtual void postSourceWriting(int i) {} protected: virtual ~DummySource() { for(int i = 0; i < PRELOAD_FRAME_NUM; i ++) @@ -642,11 +663,6 @@ public: } mGraphicBuffer[i] = graphicBuffer; - void* vaddr[3]; - if (graphicBuffer->lock(usage, &vaddr[0]) != OK) - return UNKNOWN_ERROR; - - mUsrptr[i] = (uint8_t*)vaddr[0]; mIMB[i] = new IntelMetadataBuffer(); mIMB[i]->SetType(MetadataBufferTypeCameraSource); #ifdef INTEL_VIDEO_XPROC_SHARING @@ -656,10 +672,7 @@ public: mIMB[i]->SetValue((int32_t)mGraphicBuffer[i]->handle); #endif mIMB[i]->SetValueInfo(&vinfo); - graphicBuffer->unlock(); - mUsrptr[i] = (uint8_t*)vaddr[0]; - IMG_native_handle_t* h = (IMG_native_handle_t*) mGraphicBuffer[i]->handle; mStride = h->iWidth; mHeight = h->iHeight; @@ -670,6 +683,17 @@ public: return OK; } + void preSourceWriting(int i) { + uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN;// | GraphicBuffer::USAGE_HW_COMPOSER; + void* vaddr[3]; + if (mGraphicBuffer[i]->lock(usage, &vaddr[0]) != OK) + printf("GfxSource lock failed\n"); + mUsrptr[i] = (uint8_t*)vaddr[0]; + } + void postSourceWriting(int i) { + mGraphicBuffer[i]->unlock(); + } + private: //for gfxhandle sp mGraphicBufferAlloc; @@ -699,13 +723,9 @@ public: { if (gfx_alloc(mWidth, mHeight, mColorFormat, usage, &mHandle[i], (int32_t*)&mStride) != 0) return UNKNOWN_ERROR; - void* vaddr[3]; - if (gfx_lock(mHandle[i], usage, 0, 0, mWidth, mHeight, &vaddr[0]) != 0) - return UNKNOWN_ERROR; mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)mHandle[i]); - gfx_unlock(mHandle[i]); - mUsrptr[i] = (uint8_t*)vaddr[0]; + IMG_native_handle_t* h = (IMG_native_handle_t*) mHandle[i]; mStride = h->iWidth; mHeight = h->iHeight; @@ -713,6 +733,17 @@ public: return OK; } + void preSourceWriting(int i) { + int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_WRITE_OFTEN; + void* vaddr[3]; + if (gfx_lock(mHandle[i], usage, 0, 0, mWidth, mHeight, &vaddr[0]) != 0) + printf("GrallocSource lock failed\n"); + mUsrptr[i] = (uint8_t*)vaddr[0]; + } + void postSourceWriting(int i) { + gfx_unlock(mHandle[i]); + } + private: void gfx_init() { @@ -939,6 +970,9 @@ public: success = meta->findInt32('sync', &mSyncMode); CHECK(success); + success = meta->findInt32('rawc', &mRawColor); + CHECK(success); + // const char *RCMODE[] = {"VBR", "CBR", "VCM", "NO_RC", NULL}; VideoRateControl RC_MODES[] = {RATE_CONTROL_VBR, RATE_CONTROL_CBR, @@ -1154,6 +1188,10 @@ private: mEncoderParams.frameRate.frameRateDenom = 1; mEncoderParams.frameRate.frameRateNum = mFPS; mEncoderParams.rcMode = mRCMode; + if (mRawColor == 2) + mEncoderParams.rawFormat = RAW_FORMAT_OPAQUE; + else + mEncoderParams.rawFormat = RAW_FORMAT_NV12; if (strcmp(mMixCodec, MPEG4_MIME_TYPE) == 0) { mEncoderParams.profile = (VAProfile)VAProfileMPEG4Simple; @@ -1213,6 +1251,7 @@ private: int mIdrInt; int mDisableFrameSkip; int mSyncMode; + int mRawColor; bool mFirstFrame; bool mSrcEOS; @@ -1459,25 +1498,26 @@ public: void usage() { printf("2nd generation Mix_encoder\n"); printf("Usage: mix_encoder2 [options]\n\n"); - printf(" -a/--initQP set initQP, default 0\n"); + printf(" -a/--initQP set initQP, default 0\n"); printf(" -b/--bitrate set bitrate bps, default 10M\n"); printf(" -c/--codec select codec, like H264(default), MPEG4, H263, VP8\n"); printf(" -d/--intraPeriod set IntraPeriod, default 30\n"); printf(" -e/--encoder select encoder, like MIX(default), OMXCODEC\n"); printf(" -f set output file name\n"); printf(" -i/--yuv select yuv generate method, AUTO(default) or from yuv file\n"); - printf(" -j/--winSize set window size, default 1000\n"); + printf(" -j/--winSize set window size, default 1000\n"); printf(" -k/--encWidth -g/--encHeight set encoder width/height, default 1280*720\n"); - printf(" -l/--idrInterval set IdrInterval, default 1\n"); + printf(" -l/--idrInterval set IdrInterval, default 1\n"); printf(" -m/--disableMetadata disable Metadata Mode(default enabled)\n"); printf(" -n/--count set source frame number, default 30\n"); printf(" -o/--outputformat set output file format, like MP4(default), RAW, IVF(only for VP8)\n"); printf(" -p/--fps set frame rate, default 30\n"); - printf(" -q/--minQP set minQP, default 0\n"); + printf(" -q/--minQP set minQP, default 0\n"); printf(" -r/--rcMode set rc mode, like VBR(default), CBR, VCM, NO_RC\n"); printf(" -s/--src select source, like MALLOC(default), VASURFACE, KBUFHANDLE, GFX, GRALLOC, MEMHEAP, SURFACEMEDIASOURCE (CAMERASOURCE, not support yet) \n"); - printf(" -t/--sessionFlag set sessionflag, default is 0\n"); + printf(" -t/--sessionFlag set sessionflag, default is 0\n"); printf(" -u/--disableFrameSkip disable frame skip, default is false\n"); + printf(" -v/--gfxColor set gfx color, default is 0(HAL_PIXEL_FORMAT_NV12), 1(OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar), 2(HAL_PIXEL_FORMAT_RGBA_8888)\n"); printf(" -w -h set source width /height, default 1280*720\n"); printf("\n"); @@ -1506,6 +1546,7 @@ int main(int argc, char* argv[]) int IdrInt = 1; int IntraPeriod = 30; int DisableFrameSkip = 0; + int GfxColor = 0; int OutFormat = 0; int SyncMode = 0; char* OutFileName = "out.264"; @@ -1539,6 +1580,7 @@ int main(int argc, char* argv[]) {"idrInt", required_argument, NULL, 'l'}, {"disableFrameSkip", no_argument, NULL, 'u'}, {"sessionFlag", required_argument, NULL, 't'}, + {"gfxColor", required_argument, NULL, 'v'}, {0, 0, 0, 0} }; @@ -1676,6 +1718,10 @@ int main(int argc, char* argv[]) DisableFrameSkip = 1; break; + case 'v': + GfxColor = atoi(optarg); + break; + case 'w': SrcWidth = atoi(optarg); SrcStride = SrcWidth; @@ -1689,6 +1735,10 @@ int main(int argc, char* argv[]) } } + //RGB is only valid in gfx/gralloc source mode + if ( GfxColor > 2 && (SrcType != 3 || SrcType != 4) ) + GfxColor = 0; + //export encoding parameters summary printf("=========================================\n"); printf("Source:\n"); @@ -1699,7 +1749,7 @@ int main(int argc, char* argv[]) printf("Type: %s, Codec: %s, Width: %d, Height: %d\n", ENCTYPE[EncType], CODEC[EncCodec], EncWidth, EncHeight); printf("RC: %s, Bitrate: %d bps, initQP: %d, minQP: %d\n", RCMODE[EncRCMode], EncBitrate, InitQP, MinQP); printf("winSize: %d, IdrInterval: %d, IntraPeriod: %d, FPS: %d \n", WinSize, IdrInt, IntraPeriod, SrcFps); - printf("Frameskip: %d\n", !DisableFrameSkip); + printf("Frameskip: %d, GfxColor: %s\n", !DisableFrameSkip, GfxColor > 0 ? (GfxColor > 1? "HAL_PIXEL_FORMAT_RGBA_8888":"OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar"):"HAL_PIXEL_FORMAT_NV12"); printf("\nOut:\n"); printf("Type: %s, File: %s\n", OUTFORMAT[OutFormat], OutFileName); @@ -1715,7 +1765,12 @@ int main(int argc, char* argv[]) src_meta->setInt32(kKeyHeight, SrcHeight); src_meta->setInt32(kKeyStride, SrcStride); src_meta->setInt32(kKeyFrameRate, SrcFps); - src_meta->setInt32(kKeyColorFormat, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar); //HAL_PIXEL_FORMAT_NV12 OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar = 0x7FA00E00, + if (GfxColor == 1) + src_meta->setInt32(kKeyColorFormat, 0x7FA00E00); //OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar + else if (GfxColor == 2) + src_meta->setInt32(kKeyColorFormat, HAL_PIXEL_FORMAT_RGBA_8888); + else + src_meta->setInt32(kKeyColorFormat, HAL_PIXEL_FORMAT_NV12); src_meta->setCString('yuvf', Yuvfile); src_meta->setInt32('fnum', SrcFrameNum); @@ -1777,9 +1832,10 @@ int main(int argc, char* argv[]) enc_meta->setInt32('mnqp', MinQP); enc_meta->setInt32('iapd', IntraPeriod); enc_meta->setInt32('wsiz', WinSize); - enc_meta->setInt32('idri', WinSize); + enc_meta->setInt32('idri', IdrInt); enc_meta->setInt32('difs', DisableFrameSkip); enc_meta->setInt32('sync', SyncMode); + enc_meta->setInt32('rawc', GfxColor); uint32_t encoder_flags = 0; if (MetadataMode) diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 3ec61de..480b5a1 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -17,6 +17,7 @@ LOCAL_SRC_FILES := \ VideoEncoderH263.cpp \ VideoEncoderMP4.cpp \ VideoEncoderVP8.cpp \ + VideoEncoderUtils.cpp \ VideoEncoderHost.cpp ifeq ($(ENABLE_IMG_GRAPHICS),) @@ -51,6 +52,7 @@ LOCAL_SHARED_LIBRARIES := \ libva-tpi \ libui \ libutils \ + libhardware \ libintelmetadatabuffer #LOCAL_CFLAGS += -DANDROID @@ -76,6 +78,10 @@ endif ifeq ($(ENABLE_IMG_GRAPHICS),true) LOCAL_CFLAGS += -DIMG_GFX + + ifeq ($(ENABLE_MRFL_GRAPHICS),true) + LOCAL_CFLAGS += -DMRFLD_GFX + endif endif LOCAL_MODULE_TAGS := optional diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp index 11ef971..47ddb12 100644 --- a/videoencoder/PVSoftMPEG4Encoder.cpp +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -132,6 +132,9 @@ PVSoftMPEG4Encoder::PVSoftMPEG4Encoder(const char *name) } setDefaultParams(); +#if NO_BUFFER_SHARE + mVASurfaceMappingAction |= MAPACT_COPY; +#endif LOG_I("Construct PVSoftMPEG4Encoder"); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 8e90e9f..f2ae139 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -11,30 +11,7 @@ #include "IntelMetadataBuffer.h" #include #include -#include -#ifdef IMG_GFX -#include -#endif -// API declaration -extern "C" { -VAStatus vaLockSurface(VADisplay dpy, - VASurfaceID surface, - unsigned int *fourcc, - unsigned int *luma_stride, - unsigned int *chroma_u_stride, - unsigned int *chroma_v_stride, - unsigned int *luma_offset, - unsigned int *chroma_u_offset, - unsigned int *chroma_v_offset, - unsigned int *buffer_name, - void **buffer -); - -VAStatus vaUnlockSurface(VADisplay dpy, - VASurfaceID surface -); -} VideoEncoderBase::VideoEncoderBase() :mInitialized(true) ,mStarted(false) @@ -69,6 +46,7 @@ VideoEncoderBase::VideoEncoderBase() ,mTotalSizeCopied(0) ,mFrameSkipped(false) ,mSupportedSurfaceMemType(0) + ,mVASurfaceMappingAction(0) #ifdef INTEL_VIDEO_XPROC_SHARING ,mSessionFlag(0) #endif @@ -131,6 +109,13 @@ Encode_Status VideoEncoderBase::start() { return ENCODE_ALREADY_INIT; } + if (mComParams.rawFormat != RAW_FORMAT_NV12) +#ifdef IMG_GFX + mVASurfaceMappingAction |= MAP_ACTION_COLORCONVERT; +#else + return ENCODE_NOT_SUPPORTED; +#endif + queryAutoReferenceConfig(mComParams.profile); VAConfigAttrib vaAttrib[5]; @@ -182,38 +167,32 @@ Encode_Status VideoEncoderBase::start() { uint32_t stride_aligned, height_aligned; if(mAutoReference == false){ - stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16; - height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16; + stride_aligned = (mComParams.resolution.width + 15) & ~15; + height_aligned = (mComParams.resolution.height + 15) & ~15; }else{ - // this alignment is used for AVC. For vp8 encode, driver will handle the alignment - if(mComParams.profile == VAProfileVP8Version0_3) - { - stride_aligned = mComParams.resolution.width; - height_aligned = mComParams.resolution.height; - } - else - { - stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; //on Merr, stride must be 64 aligned. - height_aligned = ((mComParams.resolution.height + 31) / 32 ) * 32; - } + // this alignment is used for AVC. For vp8 encode, driver will handle the alignment + if(mComParams.profile == VAProfileVP8Version0_3) + { + stride_aligned = mComParams.resolution.width; + height_aligned = mComParams.resolution.height; + mVASurfaceMappingAction |= MAP_ACTION_COPY; + } + else + { + stride_aligned = (mComParams.resolution.width + 63) & ~63; //on Merr, stride must be 64 aligned. + height_aligned = (mComParams.resolution.height + 31) & ~31; + mVASurfaceMappingAction |= MAP_ACTION_ALIGN64; + } } - ValueInfo vinfo; - vinfo.mode = MEM_MODE_SURFACE; - vinfo.width = stride_aligned; - vinfo.height = height_aligned; - vinfo.lumaStride = stride_aligned; - vinfo.size = stride_aligned * height_aligned * 1.5; - vinfo.format = VA_FOURCC_NV12; - if(mAutoReference == false){ - mRefSurface = CreateSurfaceFromExternalBuf(0, vinfo); - mRecSurface = CreateSurfaceFromExternalBuf(0, vinfo); + mRefSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned); + mRecSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned); }else { mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum]; for(int i = 0; i < mAutoReferenceSurfaceNum; i ++) - mAutoRefSurfaces[i] = CreateSurfaceFromExternalBuf(0, vinfo); + mAutoRefSurfaces[i] = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned); } CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); @@ -226,12 +205,12 @@ Encode_Status VideoEncoderBase::start() { VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt]; int32_t index = -1; - android::List::iterator map_node; + android::List::iterator map_node; for(map_node = mSrcSurfaceMapList.begin(); map_node != mSrcSurfaceMapList.end(); map_node++) { - contextSurfaces[++index] = (*map_node)->surface; - (*map_node)->added = true; + contextSurfaces[++index] = (*map_node)->getVASurface(); + (*map_node)->setTracked(); } if(mAutoReference == false){ @@ -604,12 +583,6 @@ Encode_Status VideoEncoderBase::stop() { LOG_V( "Rlease Src Surface Map\n"); while(! mSrcSurfaceMapList.empty()) { - if (! (*mSrcSurfaceMapList.begin())->added) { - LOG_V( "Rlease the Src Surface Buffer not added into vaContext\n"); - vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface), 1); - if ((*mSrcSurfaceMapList.begin())->surface_backup != VA_INVALID_SURFACE) - vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface_backup), 1); - } delete (*mSrcSurfaceMapList.begin()); mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin()); } @@ -1393,8 +1366,6 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( VAImage image; uint32_t index = 0; - SurfaceMap *map = NULL; - LOG_V( "Begin\n"); // If encode session has been configured, we can not request surface creation anymore if (mStarted) { @@ -1413,15 +1384,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( return ENCODE_NOT_SUPPORTED; } - ValueInfo vinfo; - vinfo.mode = MEM_MODE_SURFACE; - vinfo.width = width; - vinfo.height = height; - vinfo.lumaStride = width; - vinfo.size = expectedSize; - vinfo.format = format; - - surface = CreateSurfaceFromExternalBuf(0, vinfo); + surface = CreateNewVASurface(mVADisplay, width, height); if (surface == VA_INVALID_SURFACE) return ENCODE_DRIVER_FAIL; @@ -1441,29 +1404,6 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( *outsize = image.data_size; *stride = image.pitches[0]; - map = new SurfaceMap; - if (map == NULL) { - LOG_E( "new SurfaceMap failed\n"); - return ENCODE_NO_MEMORY; - } - - map->surface = surface; - map->surface_backup = VA_INVALID_SURFACE; - map->type = MetadataBufferTypeEncoder; - map->value = (int32_t)*usrptr; - map->vinfo.mode = (MemMode)MEM_MODE_USRPTR; - map->vinfo.handle = 0; - map->vinfo.size = 0; - map->vinfo.width = width; - map->vinfo.height = height; - map->vinfo.lumaStride = width; - map->vinfo.chromStride = width; - map->vinfo.format = VA_FOURCC_NV12; - map->vinfo.s3dformat = 0xffffffff; - map->added = false; - - mSrcSurfaceMapList.push_back(map); - LOG_I( "surface = 0x%08x\n",(uint32_t)surface); LOG_I("image->pitches[0] = %d\n", image.pitches[0]); LOG_I("image->pitches[1] = %d\n", image.pitches[1]); @@ -1474,7 +1414,6 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( LOG_I("image->height = %d\n", image.height); LOG_I ("data_size = %d\n", image.data_size); LOG_I ("usrptr = 0x%p\n", *usrptr); - LOG_I ("map->value = 0x%p\n ", (void *)map->value); vaStatus = vaUnmapBuffer(mVADisplay, image.buf); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); @@ -1489,12 +1428,36 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( return ENCODE_FAIL; } + VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType); + if (map == NULL) { + LOG_E( "new VASurfaceMap failed\n"); + return ENCODE_NO_MEMORY; + } + + map->setVASurface(surface); //special case, vasuface is set, so nothing do in doMapping +// map->setType(MetadataBufferTypeEncoder); + map->setValue((int32_t)*usrptr); + ValueInfo vinfo; + memset(&vinfo, 0, sizeof(ValueInfo)); + vinfo.mode = (MemMode)MEM_MODE_USRPTR; + vinfo.handle = 0; + vinfo.size = 0; + vinfo.width = width; + vinfo.height = height; + vinfo.lumaStride = width; + vinfo.chromStride = width; + vinfo.format = VA_FOURCC_NV12; + vinfo.s3dformat = 0xffffffff; + map->setValueInfo(vinfo); + map->doMapping(); + + mSrcSurfaceMapList.push_back(map); + ret = ENCODE_SUCCESS; return ret; } - Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) { Encode_Status status = ENCODE_SUCCESS; @@ -1510,24 +1473,25 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS continue; //wrap upstream buffer into vaSurface - SurfaceMap *map = new SurfaceMap; - - map->surface_backup = VA_INVALID_SURFACE; - map->type = MetadataBufferTypeUser; - map->value = upStreamBuffer->bufList[i]; - map->vinfo.mode = (MemMode)upStreamBuffer->bufferMode; - map->vinfo.handle = (uint32_t)upStreamBuffer->display; - map->vinfo.size = 0; + VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType); + +// map->setType(MetadataBufferTypeUser); + map->setValue(upStreamBuffer->bufList[i]); + ValueInfo vinfo; + memset(&vinfo, 0, sizeof(ValueInfo)); + vinfo.mode = (MemMode)upStreamBuffer->bufferMode; + vinfo.handle = (uint32_t)upStreamBuffer->display; + vinfo.size = 0; if (upStreamBuffer->bufAttrib) { - map->vinfo.width = upStreamBuffer->bufAttrib->realWidth; - map->vinfo.height = upStreamBuffer->bufAttrib->realHeight; - map->vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride; - map->vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride; - map->vinfo.format = upStreamBuffer->bufAttrib->format; + vinfo.width = upStreamBuffer->bufAttrib->realWidth; + vinfo.height = upStreamBuffer->bufAttrib->realHeight; + vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride; + vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride; + vinfo.format = upStreamBuffer->bufAttrib->format; } - map->vinfo.s3dformat = 0xFFFFFFFF; - map->added = false; - status = surfaceMapping(map); + vinfo.s3dformat = 0xFFFFFFFF; + map->setValueInfo(vinfo); + status = map->doMapping(); if (status == ENCODE_SUCCESS) mSrcSurfaceMapList.push_back(map); @@ -1538,248 +1502,6 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS return status; } -Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) { - - if (!map) - return ENCODE_NULL_PTR; - - VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; - VASurfaceID surface; - - //try to get kbufhandle from SurfaceID - uint32_t fourCC = 0; - uint32_t lumaStride = 0; - uint32_t chromaUStride = 0; - uint32_t chromaVStride = 0; - uint32_t lumaOffset = 0; - uint32_t chromaUOffset = 0; - uint32_t chromaVOffset = 0; - uint32_t kBufHandle = 0; - - vaStatus = vaLockSurface( - (VADisplay)map->vinfo.handle, (VASurfaceID)map->value, - &fourCC, &lumaStride, &chromaUStride, &chromaVStride, - &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL); - - CHECK_VA_STATUS_RETURN("vaLockSurface"); - LOG_I("Surface incoming = 0x%08x", map->value); - LOG_I("lumaStride = %d", lumaStride); - LOG_I("chromaUStride = %d", chromaUStride); - LOG_I("chromaVStride = %d", chromaVStride); - LOG_I("lumaOffset = %d", lumaOffset); - LOG_I("chromaUOffset = %d", chromaUOffset); - LOG_I("chromaVOffset = %d", chromaVOffset); - LOG_I("kBufHandle = 0x%08x", kBufHandle); - LOG_I("fourCC = %d", fourCC); - - vaStatus = vaUnlockSurface((VADisplay)map->vinfo.handle, (VASurfaceID)map->value); - CHECK_VA_STATUS_RETURN("vaUnlockSurface"); - - ValueInfo vinfo; - memcpy(&vinfo, &(map->vinfo), sizeof(ValueInfo)); - vinfo.mode = MEM_MODE_KBUFHANDLE; - - surface = CreateSurfaceFromExternalBuf(kBufHandle, vinfo); - if (surface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; - - LOG_I("Surface ID created from Kbuf = 0x%08x", surface); - - map->surface = surface; - - return ret; -} - -Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) { - - if (!map) - return ENCODE_NULL_PTR; - - VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; - VASurfaceID surface; - - LOG_I("surfaceMappingForGfxHandle ......\n"); - LOG_I("lumaStride = %d\n", map->vinfo.lumaStride); - LOG_I("format = 0x%08x\n", map->vinfo.format); - LOG_I("width = %d\n", mComParams.resolution.width); - LOG_I("height = %d\n", mComParams.resolution.height); - LOG_I("gfxhandle = %d\n", map->value); - - ValueInfo vinfo; - memcpy(&vinfo, &(map->vinfo), sizeof(ValueInfo)); - -#ifdef IMG_GFX - // color fmrat may be OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar or HAL_PIXEL_FORMAT_NV12 - IMG_native_handle_t* h = (IMG_native_handle_t*) map->value; - LOG_I("IMG_native_handle_t h->iWidth=%d, h->iHeight=%d, h->iFormat=%x\n", h->iWidth, h->iHeight, h->iFormat); - vinfo.lumaStride = h->iWidth; - vinfo.format = h->iFormat; - vinfo.width = h->iWidth; - vinfo.height = h->iHeight; -#endif - - surface = CreateSurfaceFromExternalBuf(map->value, vinfo); - if (surface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; - - map->surface = surface; - - LOG_V("surfaceMappingForGfxHandle: Done"); - return ret; -} - -Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) { - - if (!map) - return ENCODE_NULL_PTR; - - LOG_I("surfaceMappingForKbufHandle value=%d\n", map->value); - VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; - VASurfaceID surface; - - map->vinfo.size = map->vinfo.lumaStride * map->vinfo.height * 1.5; - surface = CreateSurfaceFromExternalBuf(map->value, map->vinfo); - if (surface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; - LOG_I("Surface ID created from Kbuf = 0x%08x", map->value); - - map->surface = surface; - - return ret; -} - -#if NO_BUFFER_SHARE -static VAStatus upload_yuv_to_surface(VADisplay va_dpy, - SurfaceMap *map, VASurfaceID surface_id, int picture_width, - int picture_height) -{ - VAImage surface_image; - VAStatus vaStatus; - unsigned char *surface_p = NULL; - unsigned char *y_src, *uv_src; - unsigned char *y_dst, *uv_dst; - int y_size = map->vinfo.height * map->vinfo.lumaStride; - int row, col; - - vaStatus = vaDeriveImage(va_dpy, surface_id, &surface_image); - CHECK_VA_STATUS_RETURN("vaDeriveImage"); - - vaStatus = vaMapBuffer(va_dpy, surface_image.buf, (void**)&surface_p); - CHECK_VA_STATUS_RETURN("vaMapBuffer"); - - y_src = (unsigned char*)map->value; - uv_src = (unsigned char*)map->value + y_size; /* UV offset for NV12 */ - - y_dst = surface_p + surface_image.offsets[0]; - uv_dst = surface_p + surface_image.offsets[1]; /* UV offset for NV12 */ - - /* Y plane */ - for (row = 0; row < picture_height; row++) { - memcpy(y_dst, y_src, picture_width); - y_dst += surface_image.pitches[0]; - y_src += map->vinfo.lumaStride; - } - - for (row = 0; row < (picture_height / 2); row++) { - memcpy(uv_dst, uv_src, picture_width); - uv_dst += surface_image.pitches[1]; - uv_src += map->vinfo.chromStride; - } - - vaUnmapBuffer(va_dpy, surface_image.buf); - vaDestroyImage(va_dpy, surface_image.image_id); - - return vaStatus; -} -#endif - -Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) { - - if (!map) - return ENCODE_NULL_PTR; - - VAStatus vaStatus = VA_STATUS_SUCCESS; - Encode_Status ret = ENCODE_SUCCESS; - VASurfaceID surface; -#if NO_BUFFER_SHARE - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, - map->vinfo.width, map->vinfo.height, &surface, 1, - NULL, 0); - CHECK_VA_STATUS_RETURN("vaCreateSurfaces"); - map->surface = surface; - - vaStatus = upload_yuv_to_surface(mVADisplay, map, surface, - mComParams.resolution.width, mComParams.resolution.height); - CHECK_ENCODE_STATUS_RETURN("upload_yuv_to_surface"); - -#else - surface = CreateSurfaceFromExternalBuf(map->value, map->vinfo); - if (surface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; - - LOG_I("Surface ID created from Malloc = 0x%08x\n", map->value); - - //Merrifield limitation, should use mAutoReference to check if on Merr - if ((mAutoReference == false) || (map->vinfo.lumaStride % 64 == 0)) - map->surface = surface; - else { - map->surface_backup = surface; - - //TODO: need optimization for both width/height not aligned case - VASurfaceID surfaceId; - unsigned int stride_aligned; - if(mComParams.profile == VAProfileVP8Version0_3) - stride_aligned = mComParams.resolution.width; - else - stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64; - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, - stride_aligned, map->vinfo.height, &surfaceId, 1, NULL, 0); - - map->surface = surfaceId; - LOG_E("Due to 64 alignment, an alternative Surface ID 0x%08x created\n", surfaceId); - } -#endif - - return ret; -} - -Encode_Status VideoEncoderBase::surfaceMapping(SurfaceMap *map) { - - if (!map) - return ENCODE_NULL_PTR; - - Encode_Status status; - - LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, height=%d, value=%x\n", map->vinfo.mode, map->vinfo.format, map->vinfo.lumaStride, map->vinfo.width, map->vinfo.height, map->value); - switch (map->vinfo.mode) { - case MEM_MODE_SURFACE: - status = surfaceMappingForSurface(map); - break; - case MEM_MODE_GFXHANDLE: - status = surfaceMappingForGfxHandle(map); - break; - case MEM_MODE_KBUFHANDLE: - status = surfaceMappingForKbufHandle(map); - break; - case MEM_MODE_MALLOC: - case MEM_MODE_NONECACHE_USRPTR: - status = surfaceMappingForMalloc(map); - break; - case MEM_MODE_ION: - case MEM_MODE_V4L2: - case MEM_MODE_USRPTR: - case MEM_MODE_CI: - default: - status = ENCODE_NOT_SUPPORTED; - break; - } - - return status; -} - Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) { Encode_Status ret = ENCODE_SUCCESS; @@ -1791,7 +1513,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA unsigned int extravalues_count = 0; IntelMetadataBuffer imb; - SurfaceMap *map = NULL; + VASurfaceMap *map = NULL; if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode @@ -1819,17 +1541,13 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA #endif //find if mapped - map = (SurfaceMap*) findSurfaceMapByValue(value); + map = (VASurfaceMap*) findSurfaceMapByValue(value); if (map) { - //has mapped, get surfaceID directly - LOG_I("direct find surface %d from value %x\n", map->surface, value); - *sid = map->surface; - if (map->surface_backup != VA_INVALID_SURFACE) { - //need to copy data - LOG_I("Need copy surfaces from %x to %x\n", map->surface_backup, *sid); - ret = copySurfaces(map->surface_backup, *sid); - } + //has mapped, get surfaceID directly and do all necessary actions + LOG_I("direct find surface %d from value %x\n", map->getVASurface(), value); + *sid = map->getVASurface(); + map->doMapping(); return ret; } @@ -1838,7 +1556,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA if (mStoreMetaDataInBuffers.isEnabled) { - //if type is MetadataBufferTypeGrallocSource, use default parameters + //if type is MetadataBufferTypeGrallocSource, use default parameters since no ValueInfo if (type == MetadataBufferTypeGrallocSource) { vinfo.mode = MEM_MODE_GFXHANDLE; vinfo.handle = 0; @@ -1853,7 +1571,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA //get all info mapping needs imb.GetValueInfo(pvinfo); imb.GetExtraValues(extravalues, extravalues_count); - } + } } else { @@ -1874,16 +1592,14 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA */ if (pvinfo){ //map according info, and add to surfacemap list - map = new SurfaceMap; - map->surface_backup = VA_INVALID_SURFACE; - map->type = type; - map->value = value; - memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); - map->added = false; - - ret = surfaceMapping(map); + map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType); + map->setValue(value); + map->setValueInfo(*pvinfo); + map->setAction(mVASurfaceMappingAction); + + ret = map->doMapping(); if (ret == ENCODE_SUCCESS) { - LOG_I("surface mapping success, map value %x into surface %d\n", value, map->surface); + LOG_I("surface mapping success, map value %x into surface %d\n", value, map->getVASurface()); mSrcSurfaceMapList.push_back(map); } else { delete map; @@ -1891,32 +1607,24 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA return ret; } - *sid = map->surface; - if (map->surface_backup != VA_INVALID_SURFACE) { - //need to copy data - LOG_I("Need copy surfaces from %x to %x\n", map->surface_backup, *sid); - ret = copySurfaces(map->surface_backup, *sid); - } + *sid = map->getVASurface(); } else { //can't map due to no info - LOG_E("surface mapping failed, missing information\n"); + LOG_E("surface mapping failed, missing information\n"); return ENCODE_NO_REQUEST_DATA; } if (extravalues) { //map more using same ValueInfo for(unsigned int i=0; isurface_backup = VA_INVALID_SURFACE; - map->type = type; - map->value = extravalues[i]; - memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo)); - map->added = false; - - ret = surfaceMapping(map); + map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType); + map->setValue(extravalues[i]); + map->setValueInfo(vinfo); + + ret = map->doMapping(); if (ret == ENCODE_SUCCESS) { - LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->surface); + LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->getVASurface()); mSrcSurfaceMapList.push_back(map); } else { delete map; @@ -1973,7 +1681,6 @@ Encode_Status VideoEncoderBase::renderDynamicBitrate() { vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1); CHECK_VA_STATUS_RETURN("vaRenderPicture"); @@ -2053,12 +1760,12 @@ Encode_Status VideoEncoderBase::renderHrd() { return ENCODE_SUCCESS; } -SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { - android::List::iterator node; +VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { + android::List::iterator node; for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++) { - if ((*node)->value == value) + if ((*node)->getValue() == value) return *node; else continue; @@ -2066,184 +1773,3 @@ SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { return NULL; } - -Encode_Status VideoEncoderBase::copySurfaces(VASurfaceID srcId, VASurfaceID destId) { - - VAStatus vaStatus = VA_STATUS_SUCCESS; - - uint32_t width = mComParams.resolution.width; - uint32_t height = mComParams.resolution.height; - - uint32_t i, j; - - VAImage srcImage, destImage; - uint8_t *pSrcBuffer, *pDestBuffer; - - uint8_t *srcY, *dstY; - uint8_t *srcU, *srcV; - uint8_t *srcUV, *dstUV; - - LOG_I("src Surface ID = 0x%08x, dest Surface ID = 0x%08x\n", (uint32_t) srcId, (uint32_t) destId); - - vaStatus = vaDeriveImage(mVADisplay, srcId, &srcImage); - CHECK_VA_STATUS_RETURN("vaDeriveImage"); - vaStatus = vaMapBuffer(mVADisplay, srcImage.buf, (void **)&pSrcBuffer); - CHECK_VA_STATUS_RETURN("vaMapBuffer"); - - LOG_V("Src Image information\n"); - LOG_I("srcImage.pitches[0] = %d\n", srcImage.pitches[0]); - LOG_I("srcImage.pitches[1] = %d\n", srcImage.pitches[1]); - LOG_I("srcImage.offsets[0] = %d\n", srcImage.offsets[0]); - LOG_I("srcImage.offsets[1] = %d\n", srcImage.offsets[1]); - LOG_I("srcImage.num_planes = %d\n", srcImage.num_planes); - LOG_I("srcImage.width = %d\n", srcImage.width); - LOG_I("srcImage.height = %d\n", srcImage.height); - - vaStatus = vaDeriveImage(mVADisplay, destId, &destImage); - CHECK_VA_STATUS_RETURN("vaDeriveImage"); - vaStatus = vaMapBuffer(mVADisplay, destImage.buf, (void **)&pDestBuffer); - CHECK_VA_STATUS_RETURN("vaMapBuffer"); - - LOG_V("Dest Image information\n"); - LOG_I("destImage.pitches[0] = %d\n", destImage.pitches[0]); - LOG_I("destImage.pitches[1] = %d\n", destImage.pitches[1]); - LOG_I("destImage.offsets[0] = %d\n", destImage.offsets[0]); - LOG_I("destImage.offsets[1] = %d\n", destImage.offsets[1]); - LOG_I("destImage.num_planes = %d\n", destImage.num_planes); - LOG_I("destImage.width = %d\n", destImage.width); - LOG_I("destImage.height = %d\n", destImage.height); - - if (mComParams.rawFormat == RAW_FORMAT_YUV420) { - - srcY = pSrcBuffer +srcImage.offsets[0]; - srcU = pSrcBuffer + srcImage.offsets[1]; - srcV = pSrcBuffer + srcImage.offsets[2]; - dstY = pDestBuffer + destImage.offsets[0]; - dstUV = pDestBuffer + destImage.offsets[1]; - - for (i = 0; i < height; i ++) { - memcpy(dstY, srcY, width); - srcY += srcImage.pitches[0]; - dstY += destImage.pitches[0]; - } - - for (i = 0; i < height / 2; i ++) { - for (j = 0; j < width; j+=2) { - dstUV [j] = srcU [j / 2]; - dstUV [j + 1] = srcV [j / 2]; - } - srcU += srcImage.pitches[1]; - srcV += srcImage.pitches[2]; - dstUV += destImage.pitches[1]; - } - }else if (mComParams.rawFormat == RAW_FORMAT_NV12) { - - srcY = pSrcBuffer + srcImage.offsets[0]; - dstY = pDestBuffer + destImage.offsets[0]; - srcUV = pSrcBuffer + srcImage.offsets[1]; - dstUV = pDestBuffer + destImage.offsets[1]; - - for (i = 0; i < height; i++) { - memcpy(dstY, srcY, width); - srcY += srcImage.pitches[0]; - dstY += destImage.pitches[0]; - } - - for (i = 0; i < height / 2; i++) { - memcpy(dstUV, srcUV, width); - srcUV += srcImage.pitches[1]; - dstUV += destImage.pitches[1]; - } - } else { - LOG_E("Raw format not supoort\n"); - return ENCODE_FAIL; - } - - vaStatus = vaUnmapBuffer(mVADisplay, srcImage.buf); - CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - vaStatus = vaDestroyImage(mVADisplay, srcImage.image_id); - CHECK_VA_STATUS_RETURN("vaDestroyImage"); - - vaStatus = vaUnmapBuffer(mVADisplay, destImage.buf); - CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - vaStatus = vaDestroyImage(mVADisplay, destImage.image_id); - CHECK_VA_STATUS_RETURN("vaDestroyImage"); - - return ENCODE_SUCCESS; -} - -VASurfaceID VideoEncoderBase::CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo) { - VAStatus vaStatus; - VASurfaceAttribExternalBuffers extbuf; - VASurfaceAttrib attribs[2]; - VASurfaceID surface = VA_INVALID_SURFACE; - int type; - unsigned long data = value; - - extbuf.pixel_format = VA_FOURCC_NV12; - extbuf.width = vinfo.width; - extbuf.height = vinfo.height; - extbuf.data_size = vinfo.size; - if (extbuf.data_size == 0) - extbuf.data_size = vinfo.lumaStride * vinfo.height * 1.5; - extbuf.num_buffers = 1; - extbuf.num_planes = 3; - extbuf.pitches[0] = vinfo.lumaStride; - extbuf.pitches[1] = vinfo.lumaStride; - extbuf.pitches[2] = vinfo.lumaStride; - extbuf.pitches[3] = 0; - extbuf.offsets[0] = 0; - extbuf.offsets[1] = vinfo.lumaStride * vinfo.height; - extbuf.offsets[2] = extbuf.offsets[1]; - extbuf.offsets[3] = 0; - extbuf.buffers = &data; - extbuf.flags = 0; - extbuf.private_data = NULL; - - switch(vinfo.mode) { - case MEM_MODE_GFXHANDLE: - type = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; - extbuf.pixel_format = vinfo.format; - break; - case MEM_MODE_KBUFHANDLE: - type = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; - break; - case MEM_MODE_MALLOC: - type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR; - break; - case MEM_MODE_NONECACHE_USRPTR: - type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR; - extbuf.flags |= VA_SURFACE_EXTBUF_DESC_UNCACHED; - break; - case MEM_MODE_SURFACE: - type = VA_SURFACE_ATTRIB_MEM_TYPE_VA; - break; - case MEM_MODE_ION: - case MEM_MODE_V4L2: - case MEM_MODE_USRPTR: - case MEM_MODE_CI: - default: - //not support - return VA_INVALID_SURFACE; - } - - if (mSupportedSurfaceMemType & type == 0) - return VA_INVALID_SURFACE; - - attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; - attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; - attribs[0].value.type = VAGenericValueTypeInteger; - attribs[0].value.value.i = type; - - attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; - attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; - attribs[1].value.type = VAGenericValueTypePointer; - attribs[1].value.value.p = (void *)&extbuf; - - vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, vinfo.width, - vinfo.height, &surface, 1, attribs, 2); - if (vaStatus != VA_STATUS_SUCCESS) - LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus); - - return surface; -} diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 75c3874..ca71f27 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -16,15 +16,7 @@ #include "IntelMetadataBuffer.h" #include #include - -struct SurfaceMap { - VASurfaceID surface; - VASurfaceID surface_backup; - MetadataBufferType type; - int32_t value; - ValueInfo vinfo; - bool added; -}; +#include "VideoEncoderUtils.h" struct EncodeTask { VASurfaceID enc_surface; @@ -84,12 +76,7 @@ private: Encode_Status setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer); Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr); - Encode_Status surfaceMappingForSurface(SurfaceMap *map); - Encode_Status surfaceMappingForGfxHandle(SurfaceMap *map); - Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map); - Encode_Status surfaceMappingForMalloc(SurfaceMap *map); - Encode_Status surfaceMapping(SurfaceMap *map); - SurfaceMap *findSurfaceMapByValue(int32_t value); + VASurfaceMap* findSurfaceMapByValue(int32_t value); Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid); void PrepareFrameInfo(EncodeTask* task); @@ -131,7 +118,7 @@ protected: VABufferID mSliceParamBuf; VASurfaceID* mAutoRefSurfaces; - android::List mSrcSurfaceMapList; //all mapped surface info list from input buffer + android::List mSrcSurfaceMapList; //all mapped surface info list from input buffer android::List mEncodeTaskList; //all encode tasks list android::List mVACodedBufferList; //all available codedbuffer list @@ -164,6 +151,9 @@ protected: //supported surface memory types int mSupportedSurfaceMemType; + //VASurface mapping extra action + int mVASurfaceMappingAction; + #ifdef INTEL_VIDEO_XPROC_SHARING uint32_t mSessionFlag; #endif diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index d91ed23..fa4c2a1 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -54,6 +54,8 @@ typedef enum { RAW_FORMAT_YUV422 = 2, RAW_FORMAT_YUV444 = 4, RAW_FORMAT_NV12 = 8, + RAW_FORMAT_RGBA = 16, + RAW_FORMAT_OPAQUE = 32, RAW_FORMAT_PROTECTED = 0x80000000, RAW_FORMAT_LAST } VideoRawFormat; diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h index 8176489..74760fe 100644 --- a/videoencoder/VideoEncoderLog.h +++ b/videoencoder/VideoEncoderLog.h @@ -9,10 +9,11 @@ #ifndef __VIDEO_ENCODER_LOG_H__ #define __VIDEO_ENCODER_LOG_H__ +#define LOG_TAG "VideoEncoder" + // Components #include -#define LOG_TAG "VideoEncoder" #if 1 #define LOG_V(...) LOGV_IF(gLogLevel, __VA_ARGS__) #define LOG_I(...) LOGI_IF(gLogLevel, __VA_ARGS__) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp new file mode 100644 index 0000000..69d2518 --- /dev/null +++ b/videoencoder/VideoEncoderUtils.cpp @@ -0,0 +1,692 @@ +#include "VideoEncoderLog.h" +#include "VideoEncoderUtils.h" +#include +#include + +#ifdef IMG_GFX +#include +//#include +#include + +static hw_module_t const *gModule = NULL; +static gralloc_module_t *gAllocMod = NULL; /* get by force hw_module_t */ +static alloc_device_t *gAllocDev = NULL; + +static int gfx_init(void) { + + int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &gModule); + if (err) { + LOG_E("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + return -1; + } else + LOG_I("hw_get_module returned\n"); + gAllocMod = (gralloc_module_t *)gModule; + + return 0; +} + +static int gfx_alloc(uint32_t w, uint32_t h, int format, + int usage, buffer_handle_t* handle, int32_t* stride) { + + int err; + + if (!gAllocDev) { + if (!gModule) { + if (gfx_init()) { + LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gralloc_open(gModule, &gAllocDev); + if (err) { + LOG_E("FATAL: gralloc open failed\n"); + return -1; + } + } + + err = gAllocDev->alloc(gAllocDev, w, h, format, usage, handle, stride); + if (err) { + LOG_E("alloc(%u, %u, %d, %08x, ...) failed %d (%s)\n", + w, h, format, usage, err, strerror(-err)); + } + + return err; +} + +static int gfx_free(buffer_handle_t handle) { + + int err; + + if (!gAllocDev) { + if (!gModule) { + if (gfx_init()) { + LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gralloc_open(gModule, &gAllocDev); + if (err) { + LOG_E("FATAL: gralloc open failed\n"); + return -1; + } + } + + err = gAllocDev->free(gAllocDev, handle); + if (err) { + LOG_E("free(...) failed %d (%s)\n", err, strerror(-err)); + } + + return err; +} + +static int gfx_lock(buffer_handle_t handle, int usage, + int left, int top, int width, int height, void** vaddr) { + + int err; + + if (!gAllocMod) { + if (gfx_init()) { + LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gAllocMod->lock(gAllocMod, handle, usage, + left, top, width, height, vaddr); + LOG_I("gfx_lock: handle is %x, usage is %x, vaddr is %x.\n", (unsigned int)handle, usage, (unsigned int)*vaddr); + + if (err){ + LOG_E("lock(...) failed %d (%s).\n", err, strerror(-err)); + return -1; + } else + LOG_I("lock returned with address %p\n", *vaddr); + + return err; +} + +static int gfx_unlock(buffer_handle_t handle) { + + int err; + + if (!gAllocMod) { + if (gfx_init()) { + LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gAllocMod->unlock(gAllocMod, handle); + if (err) { + LOG_E("unlock(...) failed %d (%s)", err, strerror(-err)); + return -1; + } else + LOG_I("unlock returned\n"); + + return err; +} + +static int gfx_Blit(buffer_handle_t src, buffer_handle_t dest, + int w, int h, int x, int y) +{ + int err; + + if (!gAllocMod) { + if (gfx_init()) { + LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + IMG_gralloc_module_public_t* GrallocMod = (IMG_gralloc_module_public_t*)gModule; + +#ifdef MRFLD_GFX + err = GrallocMod->Blit(GrallocMod, src, dest, w, h, 0, 0, 0); +#else + err = GrallocMod->Blit2(GrallocMod, src, dest, w, h, 0, 0); +#endif + + if (err) { + LOG_E("Blit(...) failed %d (%s)", err, strerror(-err)); + return -1; + } else + LOG_I("Blit returned\n"); + + return err; +} +#endif + +extern "C" { +VAStatus vaLockSurface(VADisplay dpy, + VASurfaceID surface, + unsigned int *fourcc, + unsigned int *luma_stride, + unsigned int *chroma_u_stride, + unsigned int *chroma_v_stride, + unsigned int *luma_offset, + unsigned int *chroma_u_offset, + unsigned int *chroma_v_offset, + unsigned int *buffer_name, + void **buffer +); + +VAStatus vaUnlockSurface(VADisplay dpy, + VASurfaceID surface +); +} + +VASurfaceMap::VASurfaceMap(VADisplay display, int hwcap) { + + mVADisplay = display; + mSupportedSurfaceMemType = hwcap; + mValue = 0; + mVASurface = VA_INVALID_SURFACE; + mTracked = false; + mAction = 0; + memset(&mVinfo, 0, sizeof(ValueInfo)); +#ifdef IMG_GFX + mGfxHandleAllocated = false; +#endif +} + +VASurfaceMap::~VASurfaceMap() { + + if (!mTracked && (mVASurface != VA_INVALID_SURFACE)) + vaDestroySurfaces(mVADisplay, &mVASurface, 1); + +#ifdef IMG_GFX + if (mGfxHandleAllocated) + gfx_free(mGfxHandle); +#endif +} + +Encode_Status VASurfaceMap::doMapping() { + + Encode_Status ret = ENCODE_SUCCESS; + + if (mVASurface == VA_INVALID_SURFACE) { + + bool AllocSurface = false; + mVASurfaceWidth = mVinfo.lumaStride; + mVASurfaceHeight = mVinfo.height; + + if (mAction & MAP_ACTION_ALIGN64 && mVASurfaceWidth % 64 != 0) { + //check if source is not 64 aligned, must allocate new 64 aligned vasurface(EXternalMemoryNULL) + mVASurfaceWidth = (mVASurfaceWidth + 63 ) & ~63; + mAction |= MAP_ACTION_COPY; + } + + if (mAction & MAP_ACTION_COPY) //must allocate new vasurface(EXternalMemoryNULL) + AllocSurface = true; + + if (mAction & MAP_ACTION_COLORCONVERT) { + + #ifdef IMG_GFX //only enable on IMG chip + /*only support gfx buffer, need allocate new gfx buffer, then map new one to vasurface */ + if (mVinfo.mode != MEM_MODE_GFXHANDLE) + return ENCODE_NOT_SUPPORTED; + + //do not trust valueinfo, directly get from structure + IMG_native_handle_t* h = (IMG_native_handle_t*) mValue; + //only allocate new buffer if color format is not NV12 + if (HAL_PIXEL_FORMAT_NV12 == h->iFormat || 0x7FA00E00 == h->iFormat) //OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar + mAction &= ~MAP_ACTION_COLORCONVERT; + else + AllocSurface = true; + LOG_I("src gfx buffer iFormat=%x, iWidth=%d, iHeight=%d in handle structure", h->iFormat, h->iWidth, h->iHeight); + #else + return ENCODE_NOT_SUPPORTED; + #endif + } + + if (AllocSurface) { + //allocate new buffer and map to vasurface + ret = doAllocation(); + CHECK_ENCODE_STATUS_RETURN("doAllocation"); + } else { + //direct map mem to vasurface + ret = MappingToVASurface(); + CHECK_ENCODE_STATUS_RETURN("MappingToVASurface"); + } + } + + if (mAction & MAP_ACTION_COLORCONVERT) { + ret = doActionColConv(); + CHECK_ENCODE_STATUS_RETURN("doActionColConv"); + } else if (mAction & MAP_ACTION_COPY) { + //keep src color format is NV12, then do copy + ret = doActionCopy(); + CHECK_ENCODE_STATUS_RETURN("doActionCopy"); + } + + return ENCODE_SUCCESS; +} + +Encode_Status VASurfaceMap::MappingToVASurface() { + + Encode_Status ret = ENCODE_SUCCESS; + + if (mVASurface != VA_INVALID_SURFACE) { + LOG_I("VASurface is already set before, nothing to do here\n"); + return ENCODE_SUCCESS; + } + LOG_I("MappingToVASurface mode=%d, value=%x\n", mVinfo.mode, mValue); + + const char *mode = NULL; + switch (mVinfo.mode) { + case MEM_MODE_SURFACE: + mode = "SURFACE"; + ret = MappingSurfaceID(); + break; + case MEM_MODE_GFXHANDLE: + mode = "GFXHANDLE"; + ret = MappingGfxHandle(); + break; + case MEM_MODE_KBUFHANDLE: + mode = "KBUFHANDLE"; + ret = MappingKbufHandle(); + break; + case MEM_MODE_MALLOC: + case MEM_MODE_NONECACHE_USRPTR: + mode = "MALLOC or NONCACHE_USRPTR"; + ret = MappingMallocPTR(); + break; + case MEM_MODE_ION: + case MEM_MODE_V4L2: + case MEM_MODE_USRPTR: + case MEM_MODE_CI: + default: + LOG_I("UnSupported memory mode 0x%08x", mVinfo.mode); + return ENCODE_NOT_SUPPORTED; + } + + LOG_I("%s: Format=%x, lumaStride=%d, width=%d, height=%d\n", mode, mVinfo.format, mVinfo.lumaStride, mVinfo.width, mVinfo.height); + LOG_I("vaSurface 0x%08x is created for value = 0x%08x", mVASurface, mValue); + + return ret; +} + +Encode_Status VASurfaceMap::MappingSurfaceID() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + VASurfaceID surface; + + //try to get kbufhandle from SurfaceID + uint32_t fourCC = 0; + uint32_t lumaStride = 0; + uint32_t chromaUStride = 0; + uint32_t chromaVStride = 0; + uint32_t lumaOffset = 0; + uint32_t chromaUOffset = 0; + uint32_t chromaVOffset = 0; + uint32_t kBufHandle = 0; + + vaStatus = vaLockSurface( + (VADisplay)mVinfo.handle, (VASurfaceID)mValue, + &fourCC, &lumaStride, &chromaUStride, &chromaVStride, + &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL); + + CHECK_VA_STATUS_RETURN("vaLockSurface"); + LOG_I("Surface incoming = 0x%08x", mValue); + LOG_I("lumaStride = %d", lumaStride); + LOG_I("chromaUStride = %d", chromaUStride); + LOG_I("chromaVStride = %d", chromaVStride); + LOG_I("lumaOffset = %d", lumaOffset); + LOG_I("chromaUOffset = %d", chromaUOffset); + LOG_I("chromaVOffset = %d", chromaVOffset); + LOG_I("kBufHandle = 0x%08x", kBufHandle); + LOG_I("fourCC = %d", fourCC); + + vaStatus = vaUnlockSurface((VADisplay)mVinfo.handle, (VASurfaceID)mValue); + CHECK_VA_STATUS_RETURN("vaUnlockSurface"); + + mVinfo.mode = MEM_MODE_KBUFHANDLE; + mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5; + + mVASurface = CreateSurfaceFromExternalBuf(kBufHandle, mVinfo); + if (mVASurface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; + + return ENCODE_SUCCESS; +} + +Encode_Status VASurfaceMap::MappingGfxHandle() { + + LOG_I("MappingGfxHandle %x......\n", mValue); + LOG_I("format = 0x%08x, lumaStride = %d in ValueInfo\n", mVinfo.format, mVinfo.lumaStride); + +#ifdef IMG_GFX + // color format may be OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar or HAL_PIXEL_FORMAT_NV12 + IMG_native_handle_t* h = (IMG_native_handle_t*) mValue; + mVinfo.lumaStride = h->iWidth; + mVinfo.format = h->iFormat; + mVinfo.width = h->iWidth; + mVinfo.height = h->iHeight; + LOG_I("Update ValueInfo with iWidth=%d, iHeight=%d, iFormat=%x in handle structure\n", h->iWidth, h->iHeight, h->iFormat); +#endif + + mVASurface = CreateSurfaceFromExternalBuf(mValue, mVinfo); + if (mVASurface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; + + return ENCODE_SUCCESS; +} + +Encode_Status VASurfaceMap::MappingKbufHandle() { + + LOG_I("MappingKbufHandle value=%d\n", mValue); + + mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5; + mVASurface = CreateSurfaceFromExternalBuf(mValue, mVinfo); + if (mVASurface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; + + return ENCODE_SUCCESS; +} + +Encode_Status VASurfaceMap::MappingMallocPTR() { + + mVASurface = CreateSurfaceFromExternalBuf(mValue, mVinfo); + if (mVASurface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; + + return ENCODE_SUCCESS; +} + +Encode_Status VASurfaceMap::doAllocation() { + + if (mAction & MAP_ACTION_COLORCONVERT) { + #ifdef IMG_GFX + //for gfx buffer color convert + int usage = GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE; + + int32_t stride; + if (gfx_alloc(mVASurfaceWidth, mVASurfaceHeight, /*0x7FA00E00*/HAL_PIXEL_FORMAT_NV12, usage, &mGfxHandle, &stride) != 0) + return ENCODE_DRIVER_FAIL; + + LOG_I("Create an new gfx buffer handle 0x%08x for color convert, width=%d, height=%d, stride=%d\n", + (unsigned int)mGfxHandle, mVASurfaceWidth, mVASurfaceHeight, stride); + + ValueInfo vinfo; + memset(&vinfo, 0, sizeof(ValueInfo)); + vinfo.mode = MEM_MODE_GFXHANDLE; + vinfo.width = mVASurfaceWidth; + vinfo.height = mVASurfaceHeight; + vinfo.lumaStride = stride; + mVASurface = CreateSurfaceFromExternalBuf((int32_t)mGfxHandle, vinfo); + if (mVASurface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; + mGfxHandleAllocated = true; + #else + return ENCODE_NOT_SUPPORTED; + #endif + + } else { + //for 64 align and uncached mem + LOG_I("Create an new vaSurface for Action 0x%08x\n", mAction); + mVASurface = CreateNewVASurface(mVADisplay, mVASurfaceWidth, mVASurfaceHeight); + if (mVASurface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; + } + + return ENCODE_SUCCESS; +} + +//always copy with same color format NV12 +Encode_Status VASurfaceMap::doActionCopy() { + + VAStatus vaStatus = VA_STATUS_SUCCESS; + + uint32_t width = 0, height = 0, stride = 0; + uint8_t *pSrcBuffer, *pDestBuffer; + + LOG_I("Copying Src Buffer data to VASurface\n"); + + if (mVinfo.mode != MEM_MODE_MALLOC && mVinfo.mode != MEM_MODE_GFXHANDLE) { + LOG_E("Not support copy in mode %d", mVinfo.mode); + return ENCODE_NOT_SUPPORTED; + } + + LOG_I("Src Buffer information\n"); + LOG_I("Mode = %d, width = %d, stride = %d, height = %d\n", + mVinfo.mode, mVinfo.width, mVinfo.lumaStride, mVinfo.height); + + uint32_t srcY_offset, srcUV_offset; + uint32_t srcY_pitch, srcUV_pitch; + + if (mVinfo.mode == MEM_MODE_MALLOC) { + width = mVinfo.width; + height = mVinfo.height; + stride = mVinfo.lumaStride; + pSrcBuffer = (uint8_t*) mValue; + srcY_offset = 0; + srcUV_offset = stride * height; + srcY_pitch = stride; + srcUV_pitch = stride; + } else { + + #ifdef IMG_GFX //only enable on IMG chips + int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN; + + //do not trust valueinfo, directly get from structure + IMG_native_handle_t* h = (IMG_native_handle_t*) mValue; + + //only copy expected to be encoded area + width = mVinfo.width; + height = mVinfo.height; + stride = h->iWidth; + + //only support HAL_PIXEL_FORMAT_NV12 & OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar + if (HAL_PIXEL_FORMAT_NV12 != h->iFormat && 0x7FA00E00 != h->iFormat) { + LOG_E("Not support gfx buffer format %x", h->iFormat); + return ENCODE_NOT_SUPPORTED; + } + + srcY_offset = 0; + srcUV_offset = stride * height; + srcY_pitch = stride; + srcUV_pitch = stride; + + //lock gfx handle with buffer real size + LOG_I("Width=%d,Height=%d,Format=%x in raw gfx handle\n", h->iWidth, h->iHeight, h->iFormat); + void* vaddr[3]; + if (gfx_lock((buffer_handle_t) mValue, usage, 0, 0, h->iWidth, h->iHeight, &vaddr[0]) != 0) + return ENCODE_DRIVER_FAIL; + pSrcBuffer = (uint8_t*)vaddr[0]; + #else + + return ENCODE_NOT_SUPPORTED; + #endif + } + + + VAImage destImage; + vaStatus = vaDeriveImage(mVADisplay, mVASurface, &destImage); + CHECK_VA_STATUS_RETURN("vaDeriveImage"); + vaStatus = vaMapBuffer(mVADisplay, destImage.buf, (void **)&pDestBuffer); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + LOG_I("\nDest VASurface information\n"); + LOG_I("pitches[0] = %d\n", destImage.pitches[0]); + LOG_I("pitches[1] = %d\n", destImage.pitches[1]); + LOG_I("offsets[0] = %d\n", destImage.offsets[0]); + LOG_I("offsets[1] = %d\n", destImage.offsets[1]); + LOG_I("num_planes = %d\n", destImage.num_planes); + LOG_I("width = %d\n", destImage.width); + LOG_I("height = %d\n", destImage.height); + + if (width > destImage.width || height > destImage.height) { + LOG_E("src buffer is bigger than destination buffer\n"); + return ENCODE_INVALID_PARAMS; + } + + uint8_t *srcY, *dstY; + uint8_t *srcU, *srcV; + uint8_t *srcUV, *dstUV; + + srcY = pSrcBuffer + srcY_offset; + dstY = pDestBuffer + destImage.offsets[0]; + srcUV = pSrcBuffer + srcUV_offset; + dstUV = pDestBuffer + destImage.offsets[1]; + + for (uint32_t i = 0; i < height; i++) { + memcpy(dstY, srcY, width); + srcY += srcY_pitch; + dstY += destImage.pitches[0]; + } + + for (uint32_t i = 0; i < height / 2; i++) { + memcpy(dstUV, srcUV, width); + srcUV += srcUV_pitch; + dstUV += destImage.pitches[1]; + } + + vaStatus = vaUnmapBuffer(mVADisplay, destImage.buf); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + vaStatus = vaDestroyImage(mVADisplay, destImage.image_id); + CHECK_VA_STATUS_RETURN("vaDestroyImage"); + +#ifdef IMG_GFX + if (mVinfo.mode == MEM_MODE_GFXHANDLE) { + //unlock gfx handle + gfx_unlock((buffer_handle_t) mValue); + } +#endif + LOG_I("Copying Src Buffer data to VASurface Complete\n"); + + return ENCODE_SUCCESS; +} + +Encode_Status VASurfaceMap::doActionColConv() { + +#ifdef IMG_GFX +LOG_I("gfx_Blit width=%d, height=%d\n", mVinfo.width, mVinfo.height); + if (gfx_Blit((buffer_handle_t)mValue, mGfxHandle, +// mVASurfaceWidth, mVASurfaceHeight, 0, 0) != 0) + mVinfo.width, mVinfo.height, 0, 0) != 0) + return ENCODE_DRIVER_FAIL; + return ENCODE_SUCCESS; +#else + return ENCODE_NOT_SUPPORTED; +#endif +} + +VASurfaceID VASurfaceMap::CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo) { + + VAStatus vaStatus; + VASurfaceAttribExternalBuffers extbuf; + VASurfaceAttrib attribs[2]; + VASurfaceID surface = VA_INVALID_SURFACE; + int type; + unsigned long data = value; + + extbuf.pixel_format = VA_FOURCC_NV12; + extbuf.width = vinfo.width; + extbuf.height = vinfo.height; + extbuf.data_size = vinfo.size; + if (extbuf.data_size == 0) + extbuf.data_size = vinfo.lumaStride * vinfo.height * 1.5; + extbuf.num_buffers = 1; + extbuf.num_planes = 3; + extbuf.pitches[0] = vinfo.lumaStride; + extbuf.pitches[1] = vinfo.lumaStride; + extbuf.pitches[2] = vinfo.lumaStride; + extbuf.pitches[3] = 0; + extbuf.offsets[0] = 0; + extbuf.offsets[1] = vinfo.lumaStride * vinfo.height; + extbuf.offsets[2] = extbuf.offsets[1]; + extbuf.offsets[3] = 0; + extbuf.buffers = &data; + extbuf.flags = 0; + extbuf.private_data = NULL; + + switch(vinfo.mode) { + case MEM_MODE_GFXHANDLE: + type = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + break; + case MEM_MODE_KBUFHANDLE: + type = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM; + break; + case MEM_MODE_MALLOC: + type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR; + break; + case MEM_MODE_NONECACHE_USRPTR: + type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR; + extbuf.flags |= VA_SURFACE_EXTBUF_DESC_UNCACHED; + break; + case MEM_MODE_SURFACE: + case MEM_MODE_ION: + case MEM_MODE_V4L2: + case MEM_MODE_USRPTR: + case MEM_MODE_CI: + default: + //not support + return VA_INVALID_SURFACE; + } + + if (!(mSupportedSurfaceMemType & type)) + return VA_INVALID_SURFACE; + + attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; + attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[0].value.type = VAGenericValueTypeInteger; + attribs[0].value.value.i = type; + + attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; + attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[1].value.type = VAGenericValueTypePointer; + attribs[1].value.value.p = (void *)&extbuf; + + vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, vinfo.width, + vinfo.height, &surface, 1, attribs, 2); + if (vaStatus != VA_STATUS_SUCCESS) + LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus); + + return surface; +} + +VASurfaceID CreateNewVASurface(VADisplay display, int32_t width, int32_t height) { + + VAStatus vaStatus; + VASurfaceID surface = VA_INVALID_SURFACE; + VASurfaceAttrib attribs[2]; + VASurfaceAttribExternalBuffers extbuf; + unsigned long data; + + extbuf.pixel_format = VA_FOURCC_NV12; + extbuf.width = width; + extbuf.height = height; + extbuf.data_size = width * height * 3 / 2; + extbuf.num_buffers = 1; + extbuf.num_planes = 3; + extbuf.pitches[0] = width; + extbuf.pitches[1] = width; + extbuf.pitches[2] = width; + extbuf.pitches[3] = 0; + extbuf.offsets[0] = 0; + extbuf.offsets[1] = width * height; + extbuf.offsets[2] = extbuf.offsets[1]; + extbuf.offsets[3] = 0; + extbuf.buffers = &data; + extbuf.flags = 0; + extbuf.private_data = NULL; + + attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; + attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[0].value.type = VAGenericValueTypeInteger; + attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA; + + attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; + attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; + attribs[1].value.type = VAGenericValueTypePointer; + attribs[1].value.value.p = (void *)&extbuf; + + vaStatus = vaCreateSurfaces(display, VA_RT_FORMAT_YUV420, width, + height, &surface, 1, attribs, 2); + if (vaStatus != VA_STATUS_SUCCESS) + LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus); + + return surface; +} + diff --git a/videoencoder/VideoEncoderUtils.h b/videoencoder/VideoEncoderUtils.h new file mode 100644 index 0000000..9f6b9cb --- /dev/null +++ b/videoencoder/VideoEncoderUtils.h @@ -0,0 +1,70 @@ +#ifndef __VIDEO_ENCODER_UTILS_H__ +#define __VIDEO_ENCODER_UTILS_H__ +#include +#include +#include "VideoEncoderDef.h" +#include "IntelMetadataBuffer.h" +#ifdef IMG_GFX +#include +#endif + +#define MAP_ACTION_COPY 0x00000001 //mem copy +#define MAP_ACTION_ALIGN64 0x00000002 //align 64 +#define MAP_ACTION_COLORCONVERT 0x00000004 //color convert +#define MAP_ACTION_RESIZE 0x00000008 //resize + +class VASurfaceMap { +public: + VASurfaceMap(VADisplay display, int hwcap); + ~VASurfaceMap(); + + Encode_Status doMapping(); + VASurfaceID getVASurface() {return mVASurface;} + int32_t getValue() {return mValue;} + ValueInfo* getValueInfo() {return &mVinfo;} + + void setVASurface(VASurfaceID surface) {mVASurface = surface;} + void setValue(int32_t value) {mValue = value;} + void setValueInfo(ValueInfo& vinfo) {memcpy(&mVinfo, &vinfo, sizeof(ValueInfo));} + void setTracked() {mTracked = true;} + void setAction(int32_t action) {mAction = action;} + +private: + Encode_Status doAllocation(); + Encode_Status doActionCopy(); + Encode_Status doActionColConv(); + Encode_Status MappingToVASurface(); + Encode_Status MappingSurfaceID(); + Encode_Status MappingGfxHandle(); + Encode_Status MappingKbufHandle(); + Encode_Status MappingMallocPTR(); + VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo); + + VADisplay mVADisplay; + + int32_t mValue; + + VASurfaceID mVASurface; + int32_t mVASurfaceWidth; + int32_t mVASurfaceHeight; + +// MetadataBufferType mType; + + ValueInfo mVinfo; + bool mTracked; + + int32_t mAction; + + int32_t mSupportedSurfaceMemType; + +#ifdef IMG_GFX + //special for gfx color format converter + buffer_handle_t mGfxHandle; + bool mGfxHandleAllocated; +#endif +}; + +VASurfaceID CreateNewVASurface(VADisplay display, int32_t width, int32_t height); + +#endif + -- cgit v1.2.3 From 3a162366ed0bc1f04cab7f1ce703ad7a6374d7d7 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Tue, 12 Nov 2013 22:57:55 +0800 Subject: libmix: refine error reporting to allow it to be enabled at any time. BZ: 147912 The old logic only allow error reporting to be enabled in setupVA. Change-Id: I7af6a0843a6d493e4830051938bc89e8910349c2 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderBase.cpp | 4 ---- videodecoder/VideoDecoderBase.h | 1 + videodecoder/VideoDecoderDefs.h | 4 ---- videodecoder/VideoDecoderInterface.h | 1 + 4 files changed, 2 insertions(+), 8 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 00e9d94..e231e11 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -924,10 +924,6 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i return DECODE_MEMORY_FAIL; } - if (mConfigBuffer.flag & WANT_ERROR_REPORT) { - mErrReportEnabled = true; - } - initSurfaceBuffer(true); if ((int32_t)profile == VAProfileSoftwareDecoding) { diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index 5c620d0..ae57546 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -64,6 +64,7 @@ public: virtual Decode_Status signalRenderDone(void * graphichandler); virtual const VideoFormatInfo* getFormatInfo(void); virtual bool checkBufferAvail(); + virtual void enableErrorReport(bool enabled = false) {mErrReportEnabled = enabled; }; protected: // each acquireSurfaceBuffer must be followed by a corresponding outputSurfaceBuffer or releaseSurfaceBuffer. diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index ea1c9f9..d62ad15 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -104,10 +104,6 @@ typedef enum { // indicate it's the last output frame of the sequence IS_EOS = 0x10000, - - // indicate whether error reporting is needed - WANT_ERROR_REPORT = 0x20000, - } VIDEO_BUFFER_FLAG; typedef enum diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h index 79399c9..4c6a039 100644 --- a/videodecoder/VideoDecoderInterface.h +++ b/videodecoder/VideoDecoderInterface.h @@ -41,6 +41,7 @@ public: virtual Decode_Status signalRenderDone(void * graphichandler) = 0; virtual bool checkBufferAvail() = 0; virtual Decode_Status getRawDataFromSurface(VideoRenderBuffer *renderBuffer = NULL, uint8_t *pRawData = NULL, uint32_t *pSize = NULL, bool internal = true) = 0; + virtual void enableErrorReport(bool enabled) = 0; }; #endif /* VIDEO_DECODER_INTERFACE_H_ */ -- cgit v1.2.3 From cb4340fbd22188f946b0016e5c72a37bcbe024ec Mon Sep 17 00:00:00 2001 From: Li Zeng Date: Wed, 13 Nov 2013 08:35:23 +0800 Subject: Video decode: add a flag for OMX IL to pass tiling information BZ: 151378 add a flag for OMX IL to pass tiling information Change-Id: Ie876b6cf74c83ba0105729b95ce293edb484a381 Signed-off-by: Li Zeng Signed-off-by: pingshix --- videodecoder/VideoDecoderBase.cpp | 2 ++ videodecoder/VideoDecoderDefs.h | 3 +++ 2 files changed, 5 insertions(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index e231e11..172604d 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -850,6 +850,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i mVASurfaceAttrib->offsets[3] = 0; mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow; mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + if (mConfigBuffer.flag & USE_TILING_MEMORY) + mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING; for (int i = 0; i < mNumSurfaces; i++) { mVASurfaceAttrib->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index d62ad15..84d90e2 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -104,6 +104,9 @@ typedef enum { // indicate it's the last output frame of the sequence IS_EOS = 0x10000, + + // indicate should allocate tiling surfaces + USE_TILING_MEMORY = 0x20000, } VIDEO_BUFFER_FLAG; typedef enum -- cgit v1.2.3 From fc1c75ab293697d543636a3a41bdf5e749edc780 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 12 Nov 2013 23:08:35 +0800 Subject: Use short format for AVC playback on BYT BZ: 151374 Use short format for AVC playback on BYT. It can also be used as a debug tool for widevine playback problem. Change-Id: Ife3ae3b8fa390a776e646c4265459ce6e8cdbe9a Signed-off-by: wfeng6 --- videodecoder/VideoDecoderAVC.cpp | 43 +++++++++++++++++++++++++++++++++++++++ videodecoder/VideoDecoderAVC.h | 3 +++ videodecoder/VideoDecoderBase.cpp | 8 ++++++-- videodecoder/VideoDecoderBase.h | 2 +- 4 files changed, 53 insertions(+), 3 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 957897b..4879252 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -892,3 +892,46 @@ Decode_Status VideoDecoderAVC::checkHardwareCapability(VAProfile profile) { #endif return DECODE_SUCCESS; } + +#ifdef USE_AVC_SHORT_FORMAT +Decode_Status VideoDecoderAVC::getCodecSpecificConfigs( + VAProfile profile, VAConfigID *config) +{ + VAStatus vaStatus; + VAConfigAttrib attrib[2]; + + if (config == NULL) { + ETRACE("Invalid parameter!"); + return DECODE_FAIL; + } + + attrib[0].type = VAConfigAttribRTFormat; + attrib[0].value = VA_RT_FORMAT_YUV420; + attrib[1].type = VAConfigAttribDecSliceMode; + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + + vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1); + + if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) { + ITRACE("AVC short format used"); + attrib[1].value = VA_DEC_SLICE_MODE_BASE; + } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) { + ITRACE("AVC long format ssed"); + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + } else { + ETRACE("Unsupported Decode Slice Mode!"); + return DECODE_FAIL; + } + + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib[0], + 2, + config); + CHECK_VA_STATUS("vaCreateConfig"); + + return DECODE_SUCCESS; +} +#endif diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 1b3280c..efc9f97 100644 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -59,6 +59,9 @@ protected: bool isNewFrame(vbp_data_h264 *data, bool equalPTS); int32_t getDPBSize(vbp_data_h264 *data); virtual Decode_Status checkHardwareCapability(VAProfile profile); +#ifdef USE_AVC_SHORT_FORMAT + virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); +#endif private: struct DecodedPictureBuffer { diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 172604d..12a99fd 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -78,6 +78,7 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) } pthread_mutex_init(&mLock, NULL); mVideoFormatInfo.mimeType = strdup(mimeType); + mUseGEN = false; } VideoDecoderBase::~VideoDecoderBase() { @@ -234,7 +235,7 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBu mOutputTail = NULL; } vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); - if (useGraphicBuffer) { + if (useGraphicBuffer && !mUseGEN) { vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); fillDecodingErrors(&(outputByPos->renderBuffer)); } @@ -290,7 +291,7 @@ const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBu //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp); - if (useGraphicBuffer) { + if (useGraphicBuffer && !mUseGEN) { vaSyncSurface(mVADisplay, output->renderBuffer.surface); fillDecodingErrors(&(output->renderBuffer)); } @@ -769,10 +770,13 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) { ITRACE("Using GEN driver"); mDisplay = "libva_driver_name=i965"; + mUseGEN = true; } else { ITRACE("Using PVR driver"); mDisplay = "libva_driver_name=pvr"; + mUseGEN = false; } + #endif mVADisplay = vaGetDisplay(mDisplay); if (mVADisplay == NULL) { diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index ae57546..80dd518 100644 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -165,7 +165,7 @@ private: void *mParserHandle; void *mSignalBufferPre[MAX_GRAPHIC_BUFFER_NUM]; uint32 mSignalBufferSize; - + bool mUseGEN; protected: void ManageReference(bool enable) {mManageReference = enable;} void setOutputMethod(OUTPUT_METHOD method) {mOutputMethod = method;} -- cgit v1.2.3 From 0f9c82e2290525d6aa3c51b4493dd77f6d5a619e Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Wed, 16 Oct 2013 17:30:26 +0800 Subject: [PDK KK ABSP] libmix: rename MetadataBufferType BZ: 141313 Rename MetadataBufferType as IntelMetadataBufferType to avoid data type redefinition. Change-Id: I8e74fa9b1c887d8bd6ab51c8667ac91037ff2399 Signed-off-by: Zhao Liang Signed-off-by: Dan Liang --- test/btest.cpp | 8 ++++---- test/mix_encoder.cpp | 16 ++++++++-------- test/mix_encoder2.cpp | 11 +++++------ videoencoder/IntelMetadataBuffer.cpp | 22 +++++++++++----------- videoencoder/IntelMetadataBuffer.h | 22 ++++++++++++---------- videoencoder/PVSoftMPEG4Encoder.cpp | 2 +- videoencoder/VideoEncoderBase.cpp | 8 ++++---- videoencoder/VideoEncoderBase.h | 9 +++++++++ 8 files changed, 54 insertions(+), 44 deletions(-) diff --git a/test/btest.cpp b/test/btest.cpp index 32f34a8..2806d36 100644 --- a/test/btest.cpp +++ b/test/btest.cpp @@ -13,8 +13,8 @@ int main(int argc, char* argv[]) uint32_t size; IMB_Result ret; - MetadataBufferType t1 = MetadataBufferTypeCameraSource; - MetadataBufferType t2; + IntelMetadataBufferType t1 = IntelMetadataBufferTypeCameraSource; + IntelMetadataBufferType t2; int32_t v1 = 0x00000010; int32_t v2 = 0; ValueInfo vi1, *vi2 = NULL; @@ -23,7 +23,7 @@ int main(int argc, char* argv[]) unsigned int count; if (argc > 1) - t1 = (MetadataBufferType) atoi(argv[1]); + t1 = (IntelMetadataBufferType) atoi(argv[1]); if (argc > 2) v1 = atoi(argv[2]); @@ -34,7 +34,7 @@ int main(int argc, char* argv[]) ret = mb1->SetType(t1); ret = mb1->SetValue(v1); mb1->GetMaxBufferSize(); - if (t1 != MetadataBufferTypeGrallocSource) { + if (t1 != IntelMetadataBufferTypeGrallocSource) { ret = mb1->SetValueInfo(&vi1); ret = mb1->SetExtraValues(ev1, 10); ret = mb1->SetExtraValues(ev1, 10); diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index a9c3ebb..75a3f5a 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -370,7 +370,7 @@ void MallocExternalMemoryWithExtraValues() { gUsrptr[i] = (uint8_t*)malloc(size); - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); gIMB[i]->SetValueInfo(vinfo); } @@ -399,7 +399,7 @@ void MallocExternalMemory() gMallocPtr[i] = (uint8_t*)malloc(size + 4095); gUsrptr[i] = (uint8_t*)((((uint32_t )gMallocPtr[i] + 4095) / 4096 ) * 4096); - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); gIMB[i]->SetValueInfo(vinfo); } @@ -431,7 +431,7 @@ void GetAllUsrptr() gUsrptr[i] = paramsUsrptrBuffer.usrPtr; gSrcStride = paramsUsrptrBuffer.stride; - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeEncoder, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeEncoder, (int32_t)gUsrptr[i]); } } @@ -552,9 +552,9 @@ void CreateUserSurfaces(int mode) vaDestroyImage(gVADisplay, surface_image.image_id); if (mode == 0) - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, gSurface[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeUser, gSurface[i]); else - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, gkBufHandle[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeUser, gkBufHandle[i]); } } @@ -577,7 +577,7 @@ void CreateSurfaceMappingForCI() { gUsrptr[i] = (uint8_t*)malloc(size); - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); gIMB[i]->SetValueInfo(vinfo); } @@ -603,7 +603,7 @@ void CreateGfxhandle(int color) gGraphicBuffer[i] = graphicBuffer; graphicBuffer->lock(usage | GraphicBuffer::USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i])); - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)gGraphicBuffer[i]->handle); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (int32_t)gGraphicBuffer[i]->handle); graphicBuffer->unlock(); IMG_native_handle_t* h = (IMG_native_handle_t*) gGraphicBuffer[i]->handle; @@ -627,7 +627,7 @@ void CreateGralloc(int color) { gfx_alloc(gSrcWidth, gSrcHeight, format, usage, &handle, (int32_t*)&gSrcStride); gfx_lock(handle, usage | GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, gSrcWidth, gSrcHeight, (void**)(&gUsrptr[i])); - gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)handle); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (int32_t)handle); gfx_unlock(handle); IMG_native_handle_t* h = (IMG_native_handle_t*) handle; gSrcHeight = h->iHeight; diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index f17311f..5b880f7 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -387,7 +387,7 @@ public: //keep address 4K aligned mUsrptr[i] = (uint8_t*)((((uint32_t )mMallocPtr[i] + 4095) / 4096 ) * 4096); - mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t) mUsrptr[i]); + mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t) mUsrptr[i]); mIMB[i]->SetValueInfo(&vinfo); // LOG("Malloc address=%x\n", mUsrptr[i]); } @@ -440,7 +440,7 @@ public: mUsrptr[i] = (uint8_t*) ((int) (mBuffers[i]->pointer() + 0x0FFF) & ~0x0FFF); mIMB[i] = new IntelMetadataBuffer(); - mIMB[i]->SetType(MetadataBufferTypeCameraSource); + mIMB[i]->SetType(IntelMetadataBufferTypeCameraSource); #ifdef INTEL_VIDEO_XPROC_SHARING mIMB[i]->SetSessionFlag(mSessionFlag); mIMB[i]->ShareValue(mBuffers[i]); @@ -591,9 +591,9 @@ public: if (mMode == 0) - mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, mSurfaces[i]); + mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeUser, mSurfaces[i]); else { - mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, mKBufHandle[i]); + mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeUser, mKBufHandle[i]); vinfo.mode = MEM_MODE_KBUFHANDLE; vinfo.handle = 0; } @@ -724,8 +724,7 @@ public: if (gfx_alloc(mWidth, mHeight, mColorFormat, usage, &mHandle[i], (int32_t*)&mStride) != 0) return UNKNOWN_ERROR; - mIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)mHandle[i]); - + mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (int32_t)mHandle[i]); IMG_native_handle_t* h = (IMG_native_handle_t*) mHandle[i]; mStride = h->iWidth; mHeight = h->iHeight; diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index 686f223..4d58904 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -333,7 +333,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data IntelMetadataBuffer::IntelMetadataBuffer() { - mType = MetadataBufferTypeCameraSource; + mType = IntelMetadataBufferTypeCameraSource; mValue = 0; mInfo = NULL; mExtraValues = NULL; @@ -345,7 +345,7 @@ IntelMetadataBuffer::IntelMetadataBuffer() #endif } -IntelMetadataBuffer::IntelMetadataBuffer(MetadataBufferType type, int32_t value) +IntelMetadataBuffer::IntelMetadataBuffer(IntelMetadataBufferType type, int32_t value) { mType = type; mValue = value; @@ -426,16 +426,16 @@ const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuf return *this; } -IMB_Result IntelMetadataBuffer::GetType(MetadataBufferType& type) +IMB_Result IntelMetadataBuffer::GetType(IntelMetadataBufferType& type) { type = mType; return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::SetType(MetadataBufferType type) +IMB_Result IntelMetadataBuffer::SetType(IntelMetadataBufferType type) { - if (type < MetadataBufferTypeLast) + if (type < IntelMetadataBufferTypeLast) mType = type; else return IMB_INVAL_PARAM; @@ -554,7 +554,7 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) if (!data || size == 0) return IMB_INVAL_PARAM; - MetadataBufferType type; + IntelMetadataBufferType type; int32_t value; uint32_t extrasize = size - 8; ValueInfo* info = NULL; @@ -568,9 +568,9 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) switch (type) { - case MetadataBufferTypeCameraSource: - case MetadataBufferTypeEncoder: - case MetadataBufferTypeUser: + case IntelMetadataBufferTypeCameraSource: + case IntelMetadataBufferTypeEncoder: + case IntelMetadataBufferTypeUser: { if (extrasize >0 && extrasize < sizeof(ValueInfo)) return IMB_INVAL_BUFFER; @@ -597,7 +597,7 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) break; } - case MetadataBufferTypeGrallocSource: + case IntelMetadataBufferTypeGrallocSource: if (extrasize > 0) return IMB_INVAL_BUFFER; @@ -627,7 +627,7 @@ IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) { if (mBytes == NULL) { - if (mType == MetadataBufferTypeGrallocSource && mInfo) + if (mType == IntelMetadataBufferTypeGrallocSource && mInfo) return IMB_INVAL_PARAM; //assemble bytes according members diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index fb18317..7518d23 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -76,24 +76,26 @@ typedef struct { }ValueInfo; typedef enum { - MetadataBufferTypeCameraSource = 0, //for CameraSource - MetadataBufferTypeGrallocSource = 1, //for SurfaceMediaSource - MetadataBufferTypeEncoder = 2, //for WiDi clone mode - MetadataBufferTypeUser = 3, //for WiDi user mode - MetadataBufferTypeLast = 4, //type number -}MetadataBufferType; + IntelMetadataBufferTypeCameraSource = 0, //same with kMetadataBufferTypeCameraSource in framework + IntelMetadataBufferTypeGrallocSource = 1, //same with kMetadataBufferTypeGrallocSource in framework + + IntelMetadataBufferTypeExtension = 0xFF, //intel extended type + IntelMetadataBufferTypeEncoder = IntelMetadataBufferTypeExtension, //for WiDi clone mode + IntelMetadataBufferTypeUser = IntelMetadataBufferTypeExtension + 1, //for WiDi user mode + IntelMetadataBufferTypeLast = IntelMetadataBufferTypeExtension + 2, //type number +}IntelMetadataBufferType; class IntelMetadataBuffer { public: IntelMetadataBuffer(); //for generator - IntelMetadataBuffer(MetadataBufferType type, int32_t value); //for quick generator + IntelMetadataBuffer(IntelMetadataBufferType type, int32_t value); //for quick generator ~IntelMetadataBuffer(); IntelMetadataBuffer(const IntelMetadataBuffer& imb); const IntelMetadataBuffer& operator=(const IntelMetadataBuffer& imb); - IMB_Result GetType(MetadataBufferType &type); - IMB_Result SetType(MetadataBufferType type); + IMB_Result GetType(IntelMetadataBufferType &type); + IMB_Result SetType(IntelMetadataBufferType type); IMB_Result GetValue(int32_t &value); IMB_Result SetValue(int32_t value); IMB_Result GetValueInfo(ValueInfo* &info); @@ -109,7 +111,7 @@ public: static uint32_t GetMaxBufferSize(); private: - MetadataBufferType mType; + IntelMetadataBufferType mType; int32_t mValue; ValueInfo* mInfo; diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp index 47ddb12..9a8d234 100644 --- a/videoencoder/PVSoftMPEG4Encoder.cpp +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -435,7 +435,7 @@ Encode_Status PVSoftMPEG4Encoder::encode(VideoEncRawBuffer *inBuffer, uint32_t t ValueInfo vinfo; ValueInfo *pvinfo = &vinfo; CHECK(IMB_SUCCESS == imb.UnSerialize(inBuffer->data, inBuffer->size)); - imb.GetType((::MetadataBufferType&)type); + imb.GetType((::IntelMetadataBufferType&)type); imb.GetValue(value); imb.GetValueInfo(pvinfo); if(pvinfo == NULL) { diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index f2ae139..d21f75b 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1505,7 +1505,7 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) { Encode_Status ret = ENCODE_SUCCESS; - MetadataBufferType type; + IntelMetadataBufferType type; int32_t value; ValueInfo vinfo; ValueInfo *pvinfo = &vinfo; @@ -1532,7 +1532,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA return ENCODE_NULL_PTR; } - type = MetadataBufferTypeUser; + type = IntelMetadataBufferTypeUser; value = (int32_t)inBuffer->data; } @@ -1556,8 +1556,8 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA if (mStoreMetaDataInBuffers.isEnabled) { - //if type is MetadataBufferTypeGrallocSource, use default parameters since no ValueInfo - if (type == MetadataBufferTypeGrallocSource) { + //if type is IntelMetadataBufferTypeGrallocSource, use default parameters since no ValueInfo + if (type == IntelMetadataBufferTypeGrallocSource) { vinfo.mode = MEM_MODE_GFXHANDLE; vinfo.handle = 0; vinfo.size = 0; diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index ca71f27..5ed51ad 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -18,6 +18,15 @@ #include #include "VideoEncoderUtils.h" +struct SurfaceMap { + VASurfaceID surface; + VASurfaceID surface_backup; + IntelMetadataBufferType type; + int32_t value; + ValueInfo vinfo; + bool added; +}; + struct EncodeTask { VASurfaceID enc_surface; VASurfaceID ref_surface; -- cgit v1.2.3 From fc2bd05e2f6d83eef3c951f97ecb6072100c0e68 Mon Sep 17 00:00:00 2001 From: Elaine Wang Date: Mon, 18 Nov 2013 23:37:12 +0800 Subject: [PORT FROM MAIN JB-MR2]Fix klocwork issue in videoencoder BZ: 152666 It's caused by a uninitialized variable, vinfo.sessionFlag. Set this variable to zero before using it. Change-Id: Ic1c8d50b16214853230da86a432247e0c83027aa Orig-Change-Id: I9a8f109b8fd595ad6091b2eebb6c247f915c7526 Signed-off-by: Elaine Wang --- videoencoder/VideoEncoderBase.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index d21f75b..52e68a1 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1567,6 +1567,9 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA vinfo.chromStride = mComParams.resolution.width; vinfo.format = VA_FOURCC_NV12; vinfo.s3dformat = 0xFFFFFFFF; +#ifdef INTEL_VIDEO_XPROC_SHARING + vinfo.sessionFlag = 0; +#endif } else { //get all info mapping needs imb.GetValueInfo(pvinfo); -- cgit v1.2.3 From 2bd99132443d68065922a2464a2101bd7d94cb17 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Fri, 15 Nov 2013 13:32:31 +0800 Subject: [PORT FROM MAIN JB-MR2]refine code to support gfx buffer better BZ: 152953 Support RGB to YUV converting, 64 not aligned gfx buffer on MRFLD, copy due to VP8 uncached memory. Uniformly use HAL_PIXEL_FORMAT_NV12 as final encode gfx buffer color format. Change-Id: I8ede20798fcdbd228bf24504858b2b048d731f29 Orig-Change-Id: I769234279f3426ce2c873aa9cc9a383b606fa189 Signed-off-by: Zhao Liang --- test/Android.mk | 35 +++ test/gfx_test.cpp | 423 +++++++++++++++++++++++++++++++++++++ test/mix_encoder2.cpp | 26 +-- videoencoder/VideoEncoderUtils.cpp | 325 +++++++++++++++++----------- videoencoder/VideoEncoderUtils.h | 11 +- 5 files changed, 682 insertions(+), 138 deletions(-) create mode 100755 test/gfx_test.cpp diff --git a/test/Android.mk b/test/Android.mk index dbd0d06..f387746 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -144,3 +144,38 @@ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := mix_encoder2 include $(BUILD_EXECUTABLE) + +# For gfx_test +# ===================================================== + +include $(CLEAR_VARS) + +#VIDEO_ENC_LOG_ENABLE := true + +LOCAL_SRC_FILES := \ + gfx_test.cpp + +LOCAL_C_INCLUDES := \ + $(TARGET_OUT_HEADERS)/pvr \ + $(TOP)/frameworks/base/include/display \ + $(TOP)/frameworks/av/media/libstagefright \ + $(LOCAL_PATH) + +LOCAL_SHARED_LIBRARIES := \ + libgui \ + libui \ + libutils \ + libcutils \ + libhardware \ + libbinder \ + libstagefright \ + libstagefright_foundation + +ifeq ($(ENABLE_MRFL_GRAPHICS),true) + LOCAL_CFLAGS += -DMRFLD_GFX +endif + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := gfx_test + +include $(BUILD_EXECUTABLE) diff --git a/test/gfx_test.cpp b/test/gfx_test.cpp new file mode 100755 index 0000000..b68a5f5 --- /dev/null +++ b/test/gfx_test.cpp @@ -0,0 +1,423 @@ +#include + +#include +#include +#include +#include + +#include +#include +#include + +#include + +using namespace android; + +// #define HAL_PIXEL_FORMAT_BGRA_8888 5 +// #define HAL_PIXEL_FORMAT_NV12 0x3231564E +#define OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar 0x7FA00E00 +#define OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar_Tiled 0x7FA00F00 + +static hw_module_t const *gModule = NULL; +static gralloc_module_t *gAllocMod = NULL; /* get by force + * hw_module_t */ +static alloc_device_t *gAllocDev = NULL; + +static int gfx_init(void) +{ + + int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &gModule); + if (err) { + printf("FATAL: can't find the %s module", + GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + + gAllocMod = (gralloc_module_t *) gModule; + + return 0; +} + +static int +gfx_alloc(uint32_t w, uint32_t h, int format, + int usage, buffer_handle_t * handle, int32_t * stride) +{ + + int err; + + if (!gAllocDev) { + if (!gModule) { + if (gfx_init()) { + printf("can't find the %s module", + GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gralloc_open(gModule, &gAllocDev); + if (err) { + printf("FATAL: gralloc open failed\n"); + return -1; + } + } + + err = gAllocDev->alloc(gAllocDev, w, h, format, usage, handle, stride); + if (err) { + printf("alloc(%u, %u, %d, %08x, ...) failed %d (%s)\n", + w, h, format, usage, err, strerror(-err)); + } + + return err; +} + +static int gfx_free(buffer_handle_t handle) +{ + + int err; + + if (!gAllocDev) { + if (!gModule) { + if (gfx_init()) { + printf("can't find the %s module", + GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gralloc_open(gModule, &gAllocDev); + if (err) { + printf("FATAL: gralloc open failed\n"); + return -1; + } + } + + err = gAllocDev->free(gAllocDev, handle); + if (err) { + printf("free(...) failed %d (%s)\n", err, strerror(-err)); + } + + return err; +} + +static int +gfx_lock(buffer_handle_t handle, int usage, + int left, int top, int width, int height, void **vaddr) +{ + + int err; + + if (!gAllocMod) { + if (gfx_init()) { + printf("can't find the %s module", + GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gAllocMod->lock(gAllocMod, handle, usage, + left, top, width, height, vaddr); +// printf("gfx_lock: handle is %x, usage is %x, vaddr is %x.\n", +// (unsigned int)handle, usage, (unsigned int)*vaddr); + + if (err) { + printf("lock(...) failed %d (%s).\n", err, strerror(-err)); + return -1; + } + + return err; +} + +static int gfx_unlock(buffer_handle_t handle) +{ + + int err; + + if (!gAllocMod) { + if (gfx_init()) { + printf("can't find the %s module", + GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + err = gAllocMod->unlock(gAllocMod, handle); + if (err) { + printf("unlock(...) failed %d (%s)", err, strerror(-err)); + return -1; + } + + return err; +} + +static int +gfx_Blit(buffer_handle_t src, buffer_handle_t dest, int w, int h, int x, int y) +{ + int err; + + if (!gAllocMod) { + if (gfx_init()) { + printf("can't find the %s module", + GRALLOC_HARDWARE_MODULE_ID); + return -1; + } + } + + IMG_gralloc_module_public_t *GrallocMod = + (IMG_gralloc_module_public_t *) gModule; + +#ifdef MRFLD_GFX + err = GrallocMod->Blit(GrallocMod, src, dest, w, h, 0, 0, 0); +#else + err = GrallocMod->Blit2(GrallocMod, src, dest, w, h, 0, 0); +#endif + + if (err) { + printf("Blit(...) failed %d (%s)", err, strerror(-err)); + return -1; + } + + return err; +} + +void help() +{ + printf("gfx-test:\n"); + printf(" -w/--srcW -h/--srcH \n"); + printf(" -k/--dstW -g/--dstH \n"); + printf(" -a/--blitW -b/--blitH \n"); + printf(" -m/--allocM <0: server, 1: native>\n"); +} + +#define ListSize 12 + +int main(int argc, char *argv[]) +{ + //for allocator test + int AllocMethod = 0; + int Width; + int Height; + int Stride; + int ColorFormat; + char *Name; + + //for Blit test + buffer_handle_t SrcHandle, DstHandle; + int SrcWidth = 720, SrcHeight = 1280, SrcStride; + int DstWidth = 720, DstHeight = 1280, DstStride; + int BlitWidth = SrcWidth, BlitHeight = SrcHeight; + void *SrcPtr[3]; + void *DstPtr[3]; + + const char *short_opts = "a:b:c:d:e:f:g:h:i:j:k:l:m:n:o:p:q:r:s:t:q:u:v:w:x:y:z:?"; + const struct option long_opts[] = { + {"help", no_argument, NULL, '?'}, + {"srcW", required_argument, NULL, 'w'}, + {"srcH", required_argument, NULL, 'h'}, + {"dstW", required_argument, NULL, 'k'}, + {"dstH", required_argument, NULL, 'g'}, + {"blitW", required_argument, NULL, 'a'}, + {"blitH", required_argument, NULL, 'b'}, + {"allocM", required_argument, NULL, 'm'}, + {0, 0, 0, 0} + }; + + char c; + while ((c = getopt_long(argc, argv, short_opts, long_opts, NULL) ) != EOF) { + switch (c) { + case 'a': + BlitWidth = atoi(optarg); + break; + + case 'b': + BlitHeight = atoi(optarg); + break; + + case 'w': + SrcWidth = atoi(optarg); + break; + + case 'h': + SrcHeight = atoi(optarg); + break; + + case 'k': + DstWidth = atoi(optarg); + break; + + case 'g': + DstHeight = atoi(optarg); + break; + + case '?': + help(); + return 1; + + case 'm': + AllocMethod = atoi(optarg); + break; + + } + } + + android::ProcessState::self()->startThreadPool(); + + struct Params { + int Width; + int Height; + int ColorFormat; + char *Name; + }; + + Params ParamList[ListSize] = { + {320, 240, HAL_PIXEL_FORMAT_BGRA_8888, + "HAL_PIXEL_FORMAT_BGRA_8888"}, + {320, 240, HAL_PIXEL_FORMAT_NV12, "HAL_PIXEL_FORMAT_NV12"}, + {320, 240, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar, + "OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar"}, + {720, 480, HAL_PIXEL_FORMAT_BGRA_8888, + "HAL_PIXEL_FORMAT_BGRA_8888"}, + {720, 480, HAL_PIXEL_FORMAT_NV12, "HAL_PIXEL_FORMAT_NV12"}, + {720, 480, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar, + "OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar"}, + {1280, 720, HAL_PIXEL_FORMAT_BGRA_8888, + "HAL_PIXEL_FORMAT_BGRA_8888"}, + {1280, 720, HAL_PIXEL_FORMAT_NV12, "HAL_PIXEL_FORMAT_NV12"}, + {1280, 720, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar, + "OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar"}, + {1920, 1080, HAL_PIXEL_FORMAT_BGRA_8888, + "HAL_PIXEL_FORMAT_BGRA_8888"}, + {1920, 1080, HAL_PIXEL_FORMAT_NV12, "HAL_PIXEL_FORMAT_NV12"}, + {1920, 1080, OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar, + "OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar"}, + }; + + buffer_handle_t Handle = NULL; + + // format memory structure test + for (int i = 0; i < ListSize; i++) { + Width = ParamList[i].Width; + Height = ParamList[i].Height; + ColorFormat = ParamList[i].ColorFormat; + Name = ParamList[i].Name; + + sp < GraphicBuffer > graphicBuffer; + + if (AllocMethod == 0) { + uint32_t usage = + GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer:: + USAGE_HW_RENDER; + int32_t error; + + sp < ISurfaceComposer > + composer(ComposerService::getComposerService()); + sp < IGraphicBufferAlloc > GraphicBufferAlloc = + composer->createGraphicBufferAlloc(); + + graphicBuffer = + GraphicBufferAlloc->createGraphicBuffer(Width, + Height, + ColorFormat, + usage, + &error); + + if (graphicBuffer.get() == NULL) { + printf("GFX createGraphicBuffer failed\n"); + return 0; + } + + Stride = graphicBuffer->getStride(); + Handle = graphicBuffer->handle; + + } else { + int usage = + GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER; + + if (gfx_alloc + (Width, Height, ColorFormat, usage, &Handle, + &Stride) + != 0) { + printf("Gralloc allocate failed\n"); + return 0; + } + + } + + printf("%s\n", Name); + printf + (" gfx handle 0x%08x, Width=%d, Height=%d, Stride=%d, ColorFormat=0x%08x\n", + Handle, Width, Height, Stride, ColorFormat); + IMG_native_handle_t *h = (IMG_native_handle_t *) Handle; + printf + (" IMG_native_handle_t iWidth=%d, iHeight=%d, iFormat=0x%08x\n\n", + h->iWidth, h->iHeight, h->iFormat); + + if (AllocMethod > 0) + gfx_free(Handle); + } + + // Blit test, from HAL_PIXEL_FORMAT_BGRA_8888 to HAL_PIXEL_FORMAT_NV12 + printf("Start Blit test ............\n"); + int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER; + + // src rgba + ColorFormat = HAL_PIXEL_FORMAT_BGRA_8888; + if (gfx_alloc(SrcWidth, SrcHeight, ColorFormat, usage, &SrcHandle, &SrcStride) + != 0) { + printf + ("Gralloc allocate handle1 HAL_PIXEL_FORMAT_BGRA_8888 failed\n"); + return 0; + } + // Fill data + if (gfx_lock + (SrcHandle, usage | GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, SrcWidth, + SrcHeight, &SrcPtr[0]) != 0) + return 0; + int data = 0x00FF0000; // AA RR GG BB + for (int i = 0; i < SrcStride * SrcHeight; i++) + memcpy(SrcPtr[0] + i * 4, &data, 4); + // dump to file + FILE *fp1 = fopen("/data/dump.rgb", "wb"); + fwrite(SrcPtr[0], 1, SrcStride * SrcHeight * 4, fp1); + fclose(fp1); + gfx_unlock(SrcHandle); + //============================================================================ + + // dest nv12 + ColorFormat = HAL_PIXEL_FORMAT_NV12; +// ColorFormat = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar; + if (gfx_alloc(DstWidth, DstHeight, ColorFormat, usage, &DstHandle, &DstStride) + != 0) { + printf + ("Gralloc allocate handle2 HAL_PIXEL_FORMAT_NV12 failed\n"); + return 0; + } + //=========================================================================== + + printf("\nSRC size: %dx%d, stride:%d\n", SrcWidth, SrcHeight, SrcStride); + printf("DST size: %dx%d, stride:%d\n", DstWidth, DstHeight, DstStride); + printf("Blit RECT: %dx%d \n", BlitWidth, BlitHeight); + + // blit start + if (gfx_Blit(SrcHandle, DstHandle, BlitWidth, BlitHeight, 0, 0) != 0) { + printf("Gralloc Blit failed\n"); + return 0; + } + + //============================================================================ + // dump data of dest + if (gfx_lock + (DstHandle, usage | GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, DstWidth, + DstHeight, &DstPtr[0]) != 0) + return 0; + FILE *fp2 = fopen("/data/dump.yuv", "wb"); + fwrite(DstPtr[0], 1, DstStride * DstHeight * 1.5, fp2); + fclose(fp2); + gfx_unlock(DstHandle); + + // release handles + gfx_free(SrcHandle); + gfx_free(DstHandle); + + printf("Complete Blit test ............\n"); + return 1; +} diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 5b880f7..7f65649 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -212,8 +212,8 @@ public: LOG("Fill src pictures width=%d, Height=%d\n", mStride, mHeight); for(int i=0; i 0) { - if (mColorFormat == HAL_PIXEL_FORMAT_RGBA_8888) + if (mColorFormat == HAL_PIXEL_FORMAT_BGRA_8888) ret = fread(mUsrptr[gNumFramesOutput % PRELOAD_FRAME_NUM] + mStride*mHeight*4 - readsize, 1, readsize, mYuvhandle); else ret = fread(mUsrptr[gNumFramesOutput % PRELOAD_FRAME_NUM] + mStride*mHeight*3/2 - readsize, 1, readsize, mYuvhandle); @@ -637,7 +637,7 @@ public: sp composer(ComposerService::getComposerService()); mGraphicBufferAlloc = composer->createGraphicBufferAlloc(); - uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN;// | GraphicBuffer::USAGE_HW_COMPOSER; + uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_HW_RENDER | GraphicBuffer::USAGE_SW_WRITE_OFTEN;// | GraphicBuffer::USAGE_HW_COMPOSER; int32_t error; ValueInfo vinfo; @@ -674,11 +674,11 @@ public: mIMB[i]->SetValueInfo(&vinfo); IMG_native_handle_t* h = (IMG_native_handle_t*) mGraphicBuffer[i]->handle; - mStride = h->iWidth; + mStride = graphicBuffer->getStride(); mHeight = h->iHeight; - -//printf("mStride=%d, height=%d, format=%x", mStride, mHeight, h->iFormat); + printf("GfxSource handle iWidth=%d, iHeight=%d, stride=%d, color=0x%08x\n", h->iWidth, h->iHeight, mStride, h->iFormat); } + printf("GfxSource1 width=%d, height=%d, stride=%d\n", mWidth, mHeight, mStride); return OK; } @@ -726,9 +726,11 @@ public: mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (int32_t)mHandle[i]); IMG_native_handle_t* h = (IMG_native_handle_t*) mHandle[i]; - mStride = h->iWidth; +// mWidth = h->iWidth; mHeight = h->iHeight; + printf("GrallocSource iWidth=%d, iHeight=%d, stride=%d, Color=0x%08x\n", h->iWidth, h->iHeight, mStride, h->iFormat); } + printf("GrallocSource width=%d, height=%d, stride=%d\n", mWidth, mHeight, mStride); return OK; } @@ -1516,7 +1518,7 @@ void usage() { printf(" -s/--src select source, like MALLOC(default), VASURFACE, KBUFHANDLE, GFX, GRALLOC, MEMHEAP, SURFACEMEDIASOURCE (CAMERASOURCE, not support yet) \n"); printf(" -t/--sessionFlag set sessionflag, default is 0\n"); printf(" -u/--disableFrameSkip disable frame skip, default is false\n"); - printf(" -v/--gfxColor set gfx color, default is 0(HAL_PIXEL_FORMAT_NV12), 1(OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar), 2(HAL_PIXEL_FORMAT_RGBA_8888)\n"); + printf(" -v/--gfxColor set gfx color, default is 0(HAL_PIXEL_FORMAT_NV12), 1(OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar), 2(HAL_PIXEL_FORMAT_BGRA_8888)\n"); printf(" -w -h set source width /height, default 1280*720\n"); printf("\n"); @@ -1748,7 +1750,7 @@ int main(int argc, char* argv[]) printf("Type: %s, Codec: %s, Width: %d, Height: %d\n", ENCTYPE[EncType], CODEC[EncCodec], EncWidth, EncHeight); printf("RC: %s, Bitrate: %d bps, initQP: %d, minQP: %d\n", RCMODE[EncRCMode], EncBitrate, InitQP, MinQP); printf("winSize: %d, IdrInterval: %d, IntraPeriod: %d, FPS: %d \n", WinSize, IdrInt, IntraPeriod, SrcFps); - printf("Frameskip: %d, GfxColor: %s\n", !DisableFrameSkip, GfxColor > 0 ? (GfxColor > 1? "HAL_PIXEL_FORMAT_RGBA_8888":"OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar"):"HAL_PIXEL_FORMAT_NV12"); + printf("Frameskip: %d, GfxColor: %s\n", !DisableFrameSkip, GfxColor > 0 ? (GfxColor > 1? "HAL_PIXEL_FORMAT_BGRA_8888":"OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar"):"HAL_PIXEL_FORMAT_NV12"); printf("\nOut:\n"); printf("Type: %s, File: %s\n", OUTFORMAT[OutFormat], OutFileName); @@ -1767,7 +1769,7 @@ int main(int argc, char* argv[]) if (GfxColor == 1) src_meta->setInt32(kKeyColorFormat, 0x7FA00E00); //OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar else if (GfxColor == 2) - src_meta->setInt32(kKeyColorFormat, HAL_PIXEL_FORMAT_RGBA_8888); + src_meta->setInt32(kKeyColorFormat, HAL_PIXEL_FORMAT_BGRA_8888); else src_meta->setInt32(kKeyColorFormat, HAL_PIXEL_FORMAT_NV12); diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 69d2518..746959c 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -5,9 +5,12 @@ #ifdef IMG_GFX #include -//#include #include +//#define GFX_DUMP + +#define OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar 0x7FA00E00 + static hw_module_t const *gModule = NULL; static gralloc_module_t *gAllocMod = NULL; /* get by force hw_module_t */ static alloc_device_t *gAllocDev = NULL; @@ -19,7 +22,7 @@ static int gfx_init(void) { LOG_E("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID); return -1; } else - LOG_I("hw_get_module returned\n"); + LOG_V("hw_get_module returned\n"); gAllocMod = (gralloc_module_t *)gModule; return 0; @@ -95,13 +98,13 @@ static int gfx_lock(buffer_handle_t handle, int usage, err = gAllocMod->lock(gAllocMod, handle, usage, left, top, width, height, vaddr); - LOG_I("gfx_lock: handle is %x, usage is %x, vaddr is %x.\n", (unsigned int)handle, usage, (unsigned int)*vaddr); + LOG_V("gfx_lock: handle is %x, usage is %x, vaddr is %x.\n", (unsigned int)handle, usage, (unsigned int)*vaddr); if (err){ LOG_E("lock(...) failed %d (%s).\n", err, strerror(-err)); return -1; } else - LOG_I("lock returned with address %p\n", *vaddr); + LOG_V("lock returned with address %p\n", *vaddr); return err; } @@ -122,7 +125,7 @@ static int gfx_unlock(buffer_handle_t handle) { LOG_E("unlock(...) failed %d (%s)", err, strerror(-err)); return -1; } else - LOG_I("unlock returned\n"); + LOG_V("unlock returned\n"); return err; } @@ -151,10 +154,65 @@ static int gfx_Blit(buffer_handle_t src, buffer_handle_t dest, LOG_E("Blit(...) failed %d (%s)", err, strerror(-err)); return -1; } else - LOG_I("Blit returned\n"); + LOG_V("Blit returned\n"); return err; } + +Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ + + /* only support OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar + HAL_PIXEL_FORMAT_NV12 + HAL_PIXEL_FORMAT_BGRA_8888 */ + IMG_native_handle_t* h = (IMG_native_handle_t*) handle; + + vinfo.width = h->iWidth; + vinfo.height = h->iHeight; + vinfo.lumaStride = h->iWidth; + + if (h->iFormat == HAL_PIXEL_FORMAT_NV12) { + #ifdef MRFLD_GFX + vinfo.lumaStride = (h->iWidth + 31) & ~31; //32 aligned + #else //on CTP + if (h->iWidth > 512) + vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned + else + vinfo.lumaStride = 512; + #endif + } else if (h->iFormat == HAL_PIXEL_FORMAT_BGRA_8888) { + vinfo.lumaStride = (h->iWidth + 31) & ~31; + } else if (h->iFormat == OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar) { + //nothing to do + } else + return ENCODE_NOT_SUPPORTED; + + vinfo.format = h->iFormat; + + LOG_I("GetGfxBufferInfo: gfx iWidth=%d, iHeight=%d, iFormat=%x in handle structure\n", h->iWidth, h->iHeight, h->iFormat); + LOG_I(" Actual Width=%d, Height=%d, Stride=%d\n\n", vinfo.width, vinfo.height, vinfo.lumaStride); + return ENCODE_SUCCESS; +} + +#ifdef GFX_DUMP +void DumpGfx(int32_t handle, char* filename) { + ValueInfo vinfo; + void* vaddr[3]; + FILE* fp; + int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN; + + GetGfxBufferInfo(handle, vinfo); + if (gfx_lock((buffer_handle_t)handle, usage, 0, 0, vinfo.width, vinfo.height, &vaddr[0]) != 0) + return ENCODE_DRIVER_FAIL; + fp = fopen(filename, "wb"); + fwrite(vaddr[0], 1, vinfo.lumaStride * vinfo.height * 4, fp); + fclose(fp); + LOG_I("dump %d bytes data to %s\n", vinfo.lumaStride * vinfo.height * 4, filename); + gfx_unlock((buffer_handle_t)handle); + + return; +} +#endif + #endif extern "C" { @@ -186,7 +244,7 @@ VASurfaceMap::VASurfaceMap(VADisplay display, int hwcap) { mAction = 0; memset(&mVinfo, 0, sizeof(ValueInfo)); #ifdef IMG_GFX - mGfxHandleAllocated = false; + mGfxHandle = NULL; #endif } @@ -196,7 +254,7 @@ VASurfaceMap::~VASurfaceMap() { vaDestroySurfaces(mVADisplay, &mVASurface, 1); #ifdef IMG_GFX - if (mGfxHandleAllocated) + if (mGfxHandle) gfx_free(mGfxHandle); #endif } @@ -207,54 +265,83 @@ Encode_Status VASurfaceMap::doMapping() { if (mVASurface == VA_INVALID_SURFACE) { - bool AllocSurface = false; - mVASurfaceWidth = mVinfo.lumaStride; - mVASurfaceHeight = mVinfo.height; - - if (mAction & MAP_ACTION_ALIGN64 && mVASurfaceWidth % 64 != 0) { - //check if source is not 64 aligned, must allocate new 64 aligned vasurface(EXternalMemoryNULL) - mVASurfaceWidth = (mVASurfaceWidth + 63 ) & ~63; - mAction |= MAP_ACTION_COPY; - } - - if (mAction & MAP_ACTION_COPY) //must allocate new vasurface(EXternalMemoryNULL) - AllocSurface = true; + int width = mVASurfaceWidth = mVinfo.width; + int height = mVASurfaceHeight = mVinfo.height; + int stride = mVASurfaceStride = mVinfo.lumaStride; if (mAction & MAP_ACTION_COLORCONVERT) { - #ifdef IMG_GFX //only enable on IMG chip - /*only support gfx buffer, need allocate new gfx buffer, then map new one to vasurface */ + //only support gfx buffer if (mVinfo.mode != MEM_MODE_GFXHANDLE) return ENCODE_NOT_SUPPORTED; - //do not trust valueinfo, directly get from structure - IMG_native_handle_t* h = (IMG_native_handle_t*) mValue; - //only allocate new buffer if color format is not NV12 - if (HAL_PIXEL_FORMAT_NV12 == h->iFormat || 0x7FA00E00 == h->iFormat) //OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar + #ifdef IMG_GFX //only enable on IMG chip + + //do not trust valueinfo for gfx case, directly get from structure + ValueInfo tmp; + + ret = GetGfxBufferInfo(mValue, tmp); + CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo"); + width = tmp.width; + height = tmp.height; + stride = tmp.lumaStride; + + if (HAL_PIXEL_FORMAT_NV12 == tmp.format || OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar == tmp.format) mAction &= ~MAP_ACTION_COLORCONVERT; - else - AllocSurface = true; - LOG_I("src gfx buffer iFormat=%x, iWidth=%d, iHeight=%d in handle structure", h->iFormat, h->iWidth, h->iHeight); + else { + //allocate new gfx buffer if format is not NV12 + int usage = GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE; + + //use same size with original and HAL_PIXEL_FORMAT_NV12 format + if (gfx_alloc(width, height, HAL_PIXEL_FORMAT_NV12, usage, &mGfxHandle, &stride) != 0) + return ENCODE_DRIVER_FAIL; + + LOG_I("Create an new gfx buffer handle 0x%08x for color convert, width=%d, height=%d, stride=%d\n", + (unsigned int)mGfxHandle, width, height, stride); + } + #else return ENCODE_NOT_SUPPORTED; #endif } - if (AllocSurface) { - //allocate new buffer and map to vasurface - ret = doAllocation(); - CHECK_ENCODE_STATUS_RETURN("doAllocation"); + if (mAction & MAP_ACTION_ALIGN64 && stride % 64 != 0) { + //check if stride is not 64 aligned, must allocate new 64 aligned vasurface + stride = (stride + 63 ) & ~63; + mAction |= MAP_ACTION_COPY; + } + + if (mAction & MAP_ACTION_COPY) { //must allocate new vasurface(EXternalMemoryNULL, uncached) + //allocate new vasurface + mVASurface = CreateNewVASurface(mVADisplay, stride, height); + if (mVASurface == VA_INVALID_SURFACE) + return ENCODE_DRIVER_FAIL; + mVASurfaceWidth = mVASurfaceStride = stride; + mVASurfaceHeight = height; + LOGI("create new vaSurface for MAP_ACTION_COPY\n"); } else { - //direct map mem to vasurface - ret = MappingToVASurface(); - CHECK_ENCODE_STATUS_RETURN("MappingToVASurface"); + #ifdef IMG_GFX + if (mGfxHandle != NULL) { + //map new gfx handle to vasurface + ret = MappingGfxHandle((int32_t)mGfxHandle); + CHECK_ENCODE_STATUS_RETURN("MappingGfxHandle"); + LOGI("map new allocated gfx handle to vaSurface\n"); + } else + #endif + { + //map original value to vasurface + ret = MappingToVASurface(); + CHECK_ENCODE_STATUS_RETURN("MappingToVASurface"); + } } } if (mAction & MAP_ACTION_COLORCONVERT) { ret = doActionColConv(); CHECK_ENCODE_STATUS_RETURN("doActionColConv"); - } else if (mAction & MAP_ACTION_COPY) { + } + + if (mAction & MAP_ACTION_COPY) { //keep src color format is NV12, then do copy ret = doActionCopy(); CHECK_ENCODE_STATUS_RETURN("doActionCopy"); @@ -277,20 +364,20 @@ Encode_Status VASurfaceMap::MappingToVASurface() { switch (mVinfo.mode) { case MEM_MODE_SURFACE: mode = "SURFACE"; - ret = MappingSurfaceID(); + ret = MappingSurfaceID(mValue); break; case MEM_MODE_GFXHANDLE: mode = "GFXHANDLE"; - ret = MappingGfxHandle(); + ret = MappingGfxHandle(mValue); break; case MEM_MODE_KBUFHANDLE: mode = "KBUFHANDLE"; - ret = MappingKbufHandle(); + ret = MappingKbufHandle(mValue); break; case MEM_MODE_MALLOC: case MEM_MODE_NONECACHE_USRPTR: mode = "MALLOC or NONCACHE_USRPTR"; - ret = MappingMallocPTR(); + ret = MappingMallocPTR(mValue); break; case MEM_MODE_ION: case MEM_MODE_V4L2: @@ -307,7 +394,7 @@ Encode_Status VASurfaceMap::MappingToVASurface() { return ret; } -Encode_Status VASurfaceMap::MappingSurfaceID() { +Encode_Status VASurfaceMap::MappingSurfaceID(int32_t value) { VAStatus vaStatus = VA_STATUS_SUCCESS; VASurfaceID surface; @@ -323,22 +410,17 @@ Encode_Status VASurfaceMap::MappingSurfaceID() { uint32_t kBufHandle = 0; vaStatus = vaLockSurface( - (VADisplay)mVinfo.handle, (VASurfaceID)mValue, + (VADisplay)mVinfo.handle, (VASurfaceID)value, &fourCC, &lumaStride, &chromaUStride, &chromaVStride, &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL); CHECK_VA_STATUS_RETURN("vaLockSurface"); - LOG_I("Surface incoming = 0x%08x", mValue); - LOG_I("lumaStride = %d", lumaStride); - LOG_I("chromaUStride = %d", chromaUStride); - LOG_I("chromaVStride = %d", chromaVStride); - LOG_I("lumaOffset = %d", lumaOffset); - LOG_I("chromaUOffset = %d", chromaUOffset); - LOG_I("chromaVOffset = %d", chromaVOffset); - LOG_I("kBufHandle = 0x%08x", kBufHandle); - LOG_I("fourCC = %d", fourCC); - - vaStatus = vaUnlockSurface((VADisplay)mVinfo.handle, (VASurfaceID)mValue); + LOG_I("Surface incoming = 0x%08x\n", value); + LOG_I("lumaStride = %d, chromaUStride = %d, chromaVStride=%d\n", lumaStride, chromaUStride, chromaVStride); + LOG_I("lumaOffset = %d, chromaUOffset = %d, chromaVOffset = %d\n", lumaOffset, chromaUOffset, chromaVOffset); + LOG_I("kBufHandle = 0x%08x, fourCC = %d\n", kBufHandle, fourCC); + + vaStatus = vaUnlockSurface((VADisplay)mVinfo.handle, (VASurfaceID)value); CHECK_VA_STATUS_RETURN("vaUnlockSurface"); mVinfo.mode = MEM_MODE_KBUFHANDLE; @@ -348,87 +430,73 @@ Encode_Status VASurfaceMap::MappingSurfaceID() { if (mVASurface == VA_INVALID_SURFACE) return ENCODE_DRIVER_FAIL; + mVASurfaceWidth = mVinfo.width; + mVASurfaceHeight = mVinfo.height; + mVASurfaceStride = mVinfo.lumaStride; return ENCODE_SUCCESS; } -Encode_Status VASurfaceMap::MappingGfxHandle() { +Encode_Status VASurfaceMap::MappingGfxHandle(int32_t value) { - LOG_I("MappingGfxHandle %x......\n", mValue); + LOG_I("MappingGfxHandle %x......\n", value); LOG_I("format = 0x%08x, lumaStride = %d in ValueInfo\n", mVinfo.format, mVinfo.lumaStride); + //default value for all HW platforms, maybe not accurate + mVASurfaceWidth = mVinfo.width; + mVASurfaceHeight = mVinfo.height; + mVASurfaceStride = mVinfo.lumaStride; + #ifdef IMG_GFX - // color format may be OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar or HAL_PIXEL_FORMAT_NV12 - IMG_native_handle_t* h = (IMG_native_handle_t*) mValue; - mVinfo.lumaStride = h->iWidth; - mVinfo.format = h->iFormat; - mVinfo.width = h->iWidth; - mVinfo.height = h->iHeight; - LOG_I("Update ValueInfo with iWidth=%d, iHeight=%d, iFormat=%x in handle structure\n", h->iWidth, h->iHeight, h->iFormat); + Encode_Status ret; + ValueInfo tmp; + + ret = GetGfxBufferInfo(value, tmp); + CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo"); + mVASurfaceWidth = tmp.width; + mVASurfaceHeight = tmp.height; + mVASurfaceStride = tmp.lumaStride; #endif - mVASurface = CreateSurfaceFromExternalBuf(mValue, mVinfo); + LOG_I("Mapping vasurface Width=%d, Height=%d, Stride=%d\n", mVASurfaceWidth, mVASurfaceHeight, mVASurfaceStride); + + ValueInfo vinfo; + memset(&vinfo, 0, sizeof(ValueInfo)); + vinfo.mode = MEM_MODE_GFXHANDLE; + vinfo.width = mVASurfaceWidth; + vinfo.height = mVASurfaceHeight; + vinfo.lumaStride = mVASurfaceStride; + mVASurface = CreateSurfaceFromExternalBuf(value, vinfo); if (mVASurface == VA_INVALID_SURFACE) return ENCODE_DRIVER_FAIL; return ENCODE_SUCCESS; } -Encode_Status VASurfaceMap::MappingKbufHandle() { +Encode_Status VASurfaceMap::MappingKbufHandle(int32_t value) { - LOG_I("MappingKbufHandle value=%d\n", mValue); + LOG_I("MappingKbufHandle value=%d\n", value); mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5; - mVASurface = CreateSurfaceFromExternalBuf(mValue, mVinfo); + mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo); if (mVASurface == VA_INVALID_SURFACE) return ENCODE_DRIVER_FAIL; + mVASurfaceWidth = mVinfo.width; + mVASurfaceHeight = mVinfo.height; + mVASurfaceStride = mVinfo.lumaStride; + return ENCODE_SUCCESS; } -Encode_Status VASurfaceMap::MappingMallocPTR() { +Encode_Status VASurfaceMap::MappingMallocPTR(int32_t value) { - mVASurface = CreateSurfaceFromExternalBuf(mValue, mVinfo); + mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo); if (mVASurface == VA_INVALID_SURFACE) return ENCODE_DRIVER_FAIL; - return ENCODE_SUCCESS; -} - -Encode_Status VASurfaceMap::doAllocation() { - - if (mAction & MAP_ACTION_COLORCONVERT) { - #ifdef IMG_GFX - //for gfx buffer color convert - int usage = GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE; - - int32_t stride; - if (gfx_alloc(mVASurfaceWidth, mVASurfaceHeight, /*0x7FA00E00*/HAL_PIXEL_FORMAT_NV12, usage, &mGfxHandle, &stride) != 0) - return ENCODE_DRIVER_FAIL; - - LOG_I("Create an new gfx buffer handle 0x%08x for color convert, width=%d, height=%d, stride=%d\n", - (unsigned int)mGfxHandle, mVASurfaceWidth, mVASurfaceHeight, stride); - - ValueInfo vinfo; - memset(&vinfo, 0, sizeof(ValueInfo)); - vinfo.mode = MEM_MODE_GFXHANDLE; - vinfo.width = mVASurfaceWidth; - vinfo.height = mVASurfaceHeight; - vinfo.lumaStride = stride; - mVASurface = CreateSurfaceFromExternalBuf((int32_t)mGfxHandle, vinfo); - if (mVASurface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; - mGfxHandleAllocated = true; - #else - return ENCODE_NOT_SUPPORTED; - #endif - - } else { - //for 64 align and uncached mem - LOG_I("Create an new vaSurface for Action 0x%08x\n", mAction); - mVASurface = CreateNewVASurface(mVADisplay, mVASurfaceWidth, mVASurfaceHeight); - if (mVASurface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; - } + mVASurfaceWidth = mVinfo.width; + mVASurfaceHeight = mVinfo.height; + mVASurfaceStride = mVinfo.lumaStride; return ENCODE_SUCCESS; } @@ -440,6 +508,7 @@ Encode_Status VASurfaceMap::doActionCopy() { uint32_t width = 0, height = 0, stride = 0; uint8_t *pSrcBuffer, *pDestBuffer; + int32_t handle = 0; LOG_I("Copying Src Buffer data to VASurface\n"); @@ -470,16 +539,23 @@ Encode_Status VASurfaceMap::doActionCopy() { int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN; //do not trust valueinfo, directly get from structure - IMG_native_handle_t* h = (IMG_native_handle_t*) mValue; + Encode_Status ret; + ValueInfo tmp; - //only copy expected to be encoded area - width = mVinfo.width; - height = mVinfo.height; - stride = h->iWidth; + if (mGfxHandle) + handle = (int32_t) mGfxHandle; + else + handle = mValue; + + ret = GetGfxBufferInfo(handle, tmp); + CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo"); + width = tmp.width; + height = tmp.height; + stride = tmp.lumaStride; //only support HAL_PIXEL_FORMAT_NV12 & OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar - if (HAL_PIXEL_FORMAT_NV12 != h->iFormat && 0x7FA00E00 != h->iFormat) { - LOG_E("Not support gfx buffer format %x", h->iFormat); + if (HAL_PIXEL_FORMAT_NV12 != tmp.format && OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar != tmp.format) { + LOG_E("Not support gfx buffer format %x", tmp.format); return ENCODE_NOT_SUPPORTED; } @@ -489,9 +565,8 @@ Encode_Status VASurfaceMap::doActionCopy() { srcUV_pitch = stride; //lock gfx handle with buffer real size - LOG_I("Width=%d,Height=%d,Format=%x in raw gfx handle\n", h->iWidth, h->iHeight, h->iFormat); void* vaddr[3]; - if (gfx_lock((buffer_handle_t) mValue, usage, 0, 0, h->iWidth, h->iHeight, &vaddr[0]) != 0) + if (gfx_lock((buffer_handle_t) handle, usage, 0, 0, width, height, &vaddr[0]) != 0) return ENCODE_DRIVER_FAIL; pSrcBuffer = (uint8_t*)vaddr[0]; #else @@ -550,7 +625,7 @@ Encode_Status VASurfaceMap::doActionCopy() { #ifdef IMG_GFX if (mVinfo.mode == MEM_MODE_GFXHANDLE) { //unlock gfx handle - gfx_unlock((buffer_handle_t) mValue); + gfx_unlock((buffer_handle_t) handle); } #endif LOG_I("Copying Src Buffer data to VASurface Complete\n"); @@ -561,12 +636,23 @@ Encode_Status VASurfaceMap::doActionCopy() { Encode_Status VASurfaceMap::doActionColConv() { #ifdef IMG_GFX -LOG_I("gfx_Blit width=%d, height=%d\n", mVinfo.width, mVinfo.height); + if (mGfxHandle == NULL) { + LOG_E("something wrong, why new gfxhandle is not allocated? \n"); + return ENCODE_FAIL; + } + + LOG_I("doActionColConv gfx_Blit width=%d, height=%d\n", mVinfo.width, mVinfo.height); if (gfx_Blit((buffer_handle_t)mValue, mGfxHandle, -// mVASurfaceWidth, mVASurfaceHeight, 0, 0) != 0) mVinfo.width, mVinfo.height, 0, 0) != 0) return ENCODE_DRIVER_FAIL; + + #ifdef GFX_DUMP + LOG_I("dumpping gfx data.....\n"); + DumpGfx(mValue, "/data/dump.rgb"); + DumpGfx((int32_t)mGfxHandle, "/data/dump.yuv"); + #endif return ENCODE_SUCCESS; + #else return ENCODE_NOT_SUPPORTED; #endif @@ -689,4 +775,3 @@ VASurfaceID CreateNewVASurface(VADisplay display, int32_t width, int32_t height) return surface; } - diff --git a/videoencoder/VideoEncoderUtils.h b/videoencoder/VideoEncoderUtils.h index 9f6b9cb..786a63b 100644 --- a/videoencoder/VideoEncoderUtils.h +++ b/videoencoder/VideoEncoderUtils.h @@ -30,14 +30,13 @@ public: void setAction(int32_t action) {mAction = action;} private: - Encode_Status doAllocation(); Encode_Status doActionCopy(); Encode_Status doActionColConv(); Encode_Status MappingToVASurface(); - Encode_Status MappingSurfaceID(); - Encode_Status MappingGfxHandle(); - Encode_Status MappingKbufHandle(); - Encode_Status MappingMallocPTR(); + Encode_Status MappingSurfaceID(int32_t value); + Encode_Status MappingGfxHandle(int32_t value); + Encode_Status MappingKbufHandle(int32_t value); + Encode_Status MappingMallocPTR(int32_t value); VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo); VADisplay mVADisplay; @@ -47,6 +46,7 @@ private: VASurfaceID mVASurface; int32_t mVASurfaceWidth; int32_t mVASurfaceHeight; + int32_t mVASurfaceStride; // MetadataBufferType mType; @@ -60,7 +60,6 @@ private: #ifdef IMG_GFX //special for gfx color format converter buffer_handle_t mGfxHandle; - bool mGfxHandleAllocated; #endif }; -- cgit v1.2.3 From 20d05e9ebb200e92a26c4802ec4cddd207b0a05b Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Wed, 27 Nov 2013 14:06:39 +0800 Subject: libmix: finish decoding the last frame even we encounter error when deocde the new frame BZ: 154409 finish decoding the last frame even we encounter error when deocde the new frame Change-Id: Iaa0c507c3c1ad209c3b8fe2783b104f76b5c1920 Signed-off-by: Gu, Wangyi --- videodecoder/VideoDecoderAVC.cpp | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 4879252..57d3557 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -169,7 +169,14 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h if (mLowDelay) { // start decoding a new frame status = beginDecodingFrame(data); - CHECK_STATUS("beginDecodingFrame"); + if (status != DECODE_SUCCESS) { + Decode_Status st = status; + // finish decoding the last frame if + // encounter error when decode the new frame + status = endDecodingFrame(false); + CHECK_STATUS("endDecodingFrame"); + return st; + } } // finish decoding the last frame -- cgit v1.2.3 From d509b0bd24f1b67e043907f7139202255825890b Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Thu, 28 Nov 2013 14:52:43 +0800 Subject: libmix: fix an issue for error report BZ:155091 When driver reports -1, we should terminate filling the error data buffer. Change-Id: I6136deccb0334b73159f3b73716bb7c5c7aebec0 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderBase.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 12a99fd..fed382e 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1363,7 +1363,7 @@ void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { if (err_drv_output[i].status != -1) { currentSurface->errBuf.errorNumber++; currentSurface->errBuf.errorArray[i + offset].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; - } + } else break; } } } -- cgit v1.2.3 From 70b9f8248b167eb58ecd9ae8b3b8ced86b037f15 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 4 Nov 2013 16:58:35 +0800 Subject: To enable the optimized libmix parser code BZ: 149341 This patch modifies the top-level Android.mk to enable the optimized libmix parser code. Change-Id: I027f36e55f6b476758203f25fee6d8aea0573c3c Signed-off-by: wfeng6 --- Android.mk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Android.mk b/Android.mk index 2639e2b..cbc7048 100644 --- a/Android.mk +++ b/Android.mk @@ -4,7 +4,7 @@ ifeq ($(INTEL_VA),true) include $(CLEAR_VARS) VENDORS_INTEL_MRST_LIBMIX_ROOT := $(LOCAL_PATH) -include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mix_vbp/Android.mk +include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/mixvbp/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/asfparser/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk -- cgit v1.2.3 From dc1446dc72fcc8f9c23cab72c9de29bf83e94b8b Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Fri, 29 Nov 2013 13:25:15 +0800 Subject: libmix: A fix to enable low delay mode for VP8 BZ: 154044 VP8 doesn't have config data, low delay mode should be enabled when calling startVA(). Change-Id: Iaed76040ce9e31052f63f5ba89cd1c57dc8cf3ed Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderVP8.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index f71099c..e4ef413 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -84,6 +84,8 @@ Decode_Status VideoDecoderVP8::startVA(vbp_data_vp8 *data) { return DECODE_PARSER_FAIL; } + enableLowDelayMode(true); + return VideoDecoderBase::setupVA(VP8_SURFACE_NUMBER + VP8_REF_SIZE, vaProfile); } @@ -105,7 +107,6 @@ Decode_Status VideoDecoderVP8::start(VideoConfigBuffer *buffer) { status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); - enableLowDelayMode(true); status = startVA(data); return status; } -- cgit v1.2.3 From 4ed0f434a70358b8845b6bed821e32478854ed8e Mon Sep 17 00:00:00 2001 From: liubolun Date: Fri, 22 Nov 2013 13:44:32 +0800 Subject: Render dynamic bitrate/framerate for VP8 encode. BZ: 153768 Send dynamic bitrate/framerate rendering command for VP8 encode. Change-Id: Ib30756f23254565517fbe51e51a8e42f7cf2c7a8 Signed-off-by: liubolun --- videoencoder/VideoEncoderDef.h | 6 +++--- videoencoder/VideoEncoderVP8.cpp | 14 ++++++++++++++ 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index fa4c2a1..0b11b05 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -537,11 +537,11 @@ struct VideoConfigNALSize : VideoParamConfigSet { uint32_t maxSliceSize; }; -struct VideoConfigResoltuion : VideoParamConfigSet { +struct VideoConfigResolution : VideoParamConfigSet { - VideoConfigResoltuion() { + VideoConfigResolution() { type = VideoConfigTypeResolution; - size = sizeof(VideoConfigResoltuion); + size = sizeof(VideoConfigResolution); } VideoResolution resolution; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 617813d..4f0c792 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -220,6 +220,20 @@ Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } + if (mRenderBitRate){ + ret = renderRCParams(); + CHECK_ENCODE_STATUS_RETURN("renderRCParams"); + + mRenderBitRate = false; + } + + if (mRenderFrameRate) { + ret = renderFrameRateParams(); + CHECK_ENCODE_STATUS_RETURN("renderFrameRateParams"); + + mRenderFrameRate = false; + } + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); -- cgit v1.2.3 From 7415649399cfe24f49e0d53c250d1d945cee1fae Mon Sep 17 00:00:00 2001 From: "SUN,Jing" Date: Mon, 2 Dec 2013 14:57:34 +0800 Subject: Corrected an IntelImageEncoder's issue that sometimes a created surface can't be destroyed. BZ: 155655 Issue: Due to a coding bug, sometimes a created surface can't be destroyed. Solution: Fixed that bug. Change-Id: I29b33f8d335eef0222705e502c9b68d533d00def Signed-off-by: SUN,Jing --- imageencoder/ImageEncoder.cpp | 3 +- imageencoder/ImageEncoder.h | 85 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 86 insertions(+), 2 deletions(-) diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp index ae9232b..485d572 100644 --- a/imageencoder/ImageEncoder.cpp +++ b/imageencoder/ImageEncoder.cpp @@ -474,8 +474,7 @@ int IntelImageEncoder::destroySourceSurface(int image_seq) { VAStatus va_status; - if ((image_seq < 0) || ((unsigned int)image_seq >= images_count) || - (0 == va_surfaceid[image_seq])) { + if ((image_seq < 0) || (images_count < 1) || (0 == va_surfaceid[image_seq])) { LOGE("destroySourceSurface: invalid image sequential number!\n"); return VA_STATUS_ERROR_INVALID_PARAMETER; } else if (image_seq == reserved_image_seq) { diff --git a/imageencoder/ImageEncoder.h b/imageencoder/ImageEncoder.h index 0d1aebf..31ce042 100644 --- a/imageencoder/ImageEncoder.h +++ b/imageencoder/ImageEncoder.h @@ -24,17 +24,83 @@ class IntelImageEncoder { public: IntelImageEncoder(void); ~IntelImageEncoder(void) {}; + + /* + * initializeEncoder initializes Intel JPEG HW encoder, + * only needs to be called once at booting unless errors happen. + * Return zero for success and non-zero for failure. + */ int initializeEncoder(void); + + /* + * createSourceSurface creates a surface for encoding. + * based on the input buffer taking source image data. + * This can be called after the encoder is initialized. + * Parameters: + * source_type: the type of source buffer: + * SURFACE_TYPE_USER_PTR for malloced buffer; + * SURFACE_TYPE_GRALLOC for gralloced buffer. + * source_buffer: the input source buffer address, which + * must be aligned to page-size + * width: the width of source image data + * height: the height of source image data + * stride: the stride of source image data + * fourcc: the fourcc value of source, VA_RT_FORMAT_YUV420 + * and VA_RT_FORMAT_YUV422 are supported. + * image_seqp: the pointer to a returned image sequential + * number to identity an image and its surface. + * Return zero for success and non-zero for failure. + */ int createSourceSurface(int source_type, void *source_buffer, unsigned int width,unsigned int height, unsigned int stride, unsigned int fourcc, int *image_seqp); + + /* + * createContext creates the encoding context, + * based on the parameters of a existing surface. + * This can be called after the encoder is initialized + * and at least one surface is created. + * Parameters: + * first_image_seq: the sequential number of image/surface + * to be based to create context, + * the created context will be set to its + * parameters: width, height, stride & fourcc. + * max_coded_sizep: the returned pointer to the max coded buffer. + * size for references, given the current context. + * Return zero for success and non-zero for failure. + */ int createContext(int first_image_seq, unsigned int *max_coded_sizep); int createContext(unsigned int *max_coded_sizep) { return this->createContext(0, max_coded_sizep); } + + /* + * setQuality changes the current quality value, + * whose default value is 90. + * This can be called after the encoder is initialized. + * Parameters: + * new_quality: the new quality value to be set. + * Return zero for success and non-zero for failure. + */ int setQuality(unsigned int new_quality); + + /* + * encode triggers an image/surface's JPEG encoding. + * This can be called after the encoder is initialize + * and context is created. + * Parameters: + * image_seq: the sequential number of an image/surface + * to be encoded. This has not to be the same + * as the one used to create context, but it + * has to be of the same parameters. + * If a surface with different paras needs to + * be encoded, another context based on its + * parameters is needed. + * new_quality: the new quality value to be set. + * Return zero for success and non-zero for failure. + */ int encode(int image_seq, unsigned int new_quality); int encode(int image_seq) { @@ -44,9 +110,28 @@ public: { return this->encode(0, quality); } + + /* + * getCoded waits for the current encoding task's completion. + * Only one encoding task can be triggered under an instance + * of IntelImageEncoder at any minute. + * This has not be called right after encode is called, + * instead, this can be called any minutes after the encoding + * is triggered to support both synced/asynced encoding usage. + * Parameters: + * user_coded_buf: the input buffer to take coded data. + * After getCoded is returned with no errors, + * this buffer will have the coded JPEG in it. + * user_coded_buf_size: the size of input buffer. + * If too small, an error'll be returned. + * coded_data_sizep: the returned pointer to the actual size + * value of coded JPEG. + * Return zero for success and non-zero for failure. + */ int getCoded(void *user_coded_buf, unsigned int user_coded_buf_size, unsigned int *coded_data_sizep); + int destroySourceSurface(int image_seq); int destroyContext(void); int deinitializeEncoder(void); -- cgit v1.2.3 From c50cf970fb6d294f44704c4c50035682b3aea414 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Mon, 2 Dec 2013 13:20:04 +0800 Subject: Refine libmix code. BZ: 153777 Fix CTS dequeue buffer crash; fix compile issue for test tool;fix hardcode issue in Android.mk Change-Id: I76c360fef97000b9ceabff2c88bbd3063e93a804 Signed-off-by: Liu Bolun --- test/Android.mk | 15 +++------------ test/BSClient.cpp | 4 ++-- test/mix_encoder2.cpp | 6 +++--- videoencoder/VideoEncoderUtils.cpp | 9 ++++++--- 4 files changed, 14 insertions(+), 20 deletions(-) diff --git a/test/Android.mk b/test/Android.mk index f387746..4886720 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -11,7 +11,6 @@ LOCAL_SRC_FILES := \ btest.cpp LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libmix_videoencoder \ LOCAL_SHARED_LIBRARIES := \ @@ -33,7 +32,6 @@ LOCAL_SRC_FILES := \ BSServer.cpp LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libmix_videoencoder \ LOCAL_SHARED_LIBRARIES := \ @@ -55,7 +53,6 @@ LOCAL_SRC_FILES := \ BSClient.cpp LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libmix_videoencoder \ LOCAL_SHARED_LIBRARIES := \ @@ -85,8 +82,6 @@ LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libmix_videoencoder \ $(TARGET_OUT_HEADERS)/pvr \ - $(TOP)/frameworks/base/include/display \ - $(LOCAL_PATH) LOCAL_SHARED_LIBRARIES := \ libintelmetadatabuffer \ @@ -119,10 +114,8 @@ LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libmix_videoencoder \ $(TARGET_OUT_HEADERS)/pvr \ - $(TOP)/frameworks/base/include/display \ - $(TOP)/frameworks/av/media/libstagefright \ - $(TOP)/frameworks/native/include/media/openmax \ - $(LOCAL_PATH) + $(call include-path-for, libstagefright) \ + $(call include-path-for, frameworks-openmax) \ LOCAL_SHARED_LIBRARIES := \ libintelmetadatabuffer \ @@ -157,9 +150,7 @@ LOCAL_SRC_FILES := \ LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/pvr \ - $(TOP)/frameworks/base/include/display \ - $(TOP)/frameworks/av/media/libstagefright \ - $(LOCAL_PATH) + $(call include-path-for, libstagefright) \ LOCAL_SHARED_LIBRARIES := \ libgui \ diff --git a/test/BSClient.cpp b/test/BSClient.cpp index bffb09d..0c3cb77 100755 --- a/test/BSClient.cpp +++ b/test/BSClient.cpp @@ -87,7 +87,7 @@ int main(int argc, char* argv[]) memset(Buffer1->pointer(), 0xAA, 1000); mb1 = new IntelMetadataBuffer(); - ret = mb1->SetType(MetadataBufferTypeCameraSource); + ret = mb1->SetType(IntelMetadataBufferTypeCameraSource); #ifdef INTEL_VIDEO_XPROC_SHARING mb1->SetSessionFlag(token); if ((ret = mb1->ShareValue(Buffer1)) != IMB_SUCCESS) { @@ -122,7 +122,7 @@ int main(int argc, char* argv[]) graphicBuffer->unlock(); mb1 = new IntelMetadataBuffer(); - ret = mb1->SetType(MetadataBufferTypeCameraSource); + ret = mb1->SetType(IntelMetadataBufferTypeCameraSource); #ifdef INTEL_VIDEO_XPROC_SHARING mb1->SetSessionFlag(token); diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 7f65649..ecdee3e 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -664,7 +664,7 @@ public: mGraphicBuffer[i] = graphicBuffer; mIMB[i] = new IntelMetadataBuffer(); - mIMB[i]->SetType(MetadataBufferTypeCameraSource); + mIMB[i]->SetType(IntelMetadataBufferTypeCameraSource); #ifdef INTEL_VIDEO_XPROC_SHARING mIMB[i]->SetSessionFlag(mSessionFlag); mIMB[i]->ShareValue(mGraphicBuffer[i]); @@ -932,7 +932,7 @@ public: mMixCodec = (char*) MPEG4_MIME_TYPE; } else if (strcmp(mime, MEDIA_MIMETYPE_VIDEO_H263) == 0) { mMixCodec = (char*) H263_MIME_TYPE; - } else if (strcmp(mime, MEDIA_MIMETYPE_VIDEO_VPX) == 0) { + } else if (strcmp(mime, MEDIA_MIMETYPE_VIDEO_VP8) == 0) { mMixCodec = (char*) VP8_MIME_TYPE; } else { mMixCodec = (char*) AVC_MIME_TYPE; @@ -1812,7 +1812,7 @@ int main(int argc, char* argv[]) enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); break; case 3: - enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VPX); + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP8); break; default: enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 746959c..917e8da 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -163,13 +163,16 @@ Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ /* only support OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar HAL_PIXEL_FORMAT_NV12 - HAL_PIXEL_FORMAT_BGRA_8888 */ + HAL_PIXEL_FORMAT_BGRA_8888 + HAL_PIXEL_FORMAT_BGRX_8888 */ IMG_native_handle_t* h = (IMG_native_handle_t*) handle; vinfo.width = h->iWidth; vinfo.height = h->iHeight; vinfo.lumaStride = h->iWidth; + LOG_I("GetGfxBufferInfo: gfx iWidth=%d, iHeight=%d, iFormat=%x in handle structure\n", h->iWidth, h->iHeight, h->iFormat); + if (h->iFormat == HAL_PIXEL_FORMAT_NV12) { #ifdef MRFLD_GFX vinfo.lumaStride = (h->iWidth + 31) & ~31; //32 aligned @@ -179,7 +182,8 @@ Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ else vinfo.lumaStride = 512; #endif - } else if (h->iFormat == HAL_PIXEL_FORMAT_BGRA_8888) { + } else if ((h->iFormat == HAL_PIXEL_FORMAT_BGRA_8888)|| + ((h->iFormat == HAL_PIXEL_FORMAT_BGRX_8888))) { vinfo.lumaStride = (h->iWidth + 31) & ~31; } else if (h->iFormat == OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar) { //nothing to do @@ -188,7 +192,6 @@ Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ vinfo.format = h->iFormat; - LOG_I("GetGfxBufferInfo: gfx iWidth=%d, iHeight=%d, iFormat=%x in handle structure\n", h->iWidth, h->iHeight, h->iFormat); LOG_I(" Actual Width=%d, Height=%d, Stride=%d\n\n", vinfo.width, vinfo.height, vinfo.lumaStride); return ENCODE_SUCCESS; } -- cgit v1.2.3 From c20aa0990d14c178ad59577288aaed19104e7f33 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 3 Dec 2013 13:16:12 +0800 Subject: Add new output format OUTPUT_NALLENGTHS_PREFIXED BZ: 156038 this new format structure is like: [out-band data] + bitstream [out-band data] is fixed 256 bytes, contains: 'nall' + nalnum + nallength[nalnum] each one is 4 bytes aligned, out-band data is marked with offset. VideoEncOutputBuffer structure is also extended to support offset. Change-Id: I572c547be606afe3f7e658e2ef08507f97ff2f3f Signed-off-by: Zhao Liang --- videoencoder/VideoEncoderAVC.cpp | 89 ++++++++++++++++++++++++++++++++++++++- videoencoder/VideoEncoderAVC.h | 1 + videoencoder/VideoEncoderBase.cpp | 1 + videoencoder/VideoEncoderDef.h | 4 +- 4 files changed, 92 insertions(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 0c021dc..85c1315 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -326,6 +326,13 @@ Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffe break; } + case OUTPUT_NALULENGTHS_PREFIXED: { + // Output nalu lengths ahead of bitstream + ret = outputNaluLengthsPrefixed(outBuffer); + CHECK_ENCODE_STATUS_CLEANUP("outputNaluLengthsPrefixed"); + break; + } + default: LOG_E("Invalid buffer mode\n"); ret = ENCODE_FAIL; @@ -496,8 +503,8 @@ Encode_Status VideoEncoderAVC::outputCodecData( if (headerSize <= outBuffer->bufferSize) { memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize); -// mTotalSizeCopied += headerSize; -// mOffsetInSeg += headerSize; + mTotalSizeCopied += headerSize; + mOffsetInSeg += headerSize; outBuffer->dataSize = headerSize; outBuffer->remainingSize = 0; outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; @@ -642,6 +649,84 @@ Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuf return ENCODE_SUCCESS; } +Encode_Status VideoEncoderAVC::outputNaluLengthsPrefixed(VideoEncOutputBuffer *outBuffer) { + + Encode_Status ret = ENCODE_SUCCESS; + uint32_t nalType = 0; + uint32_t nalSize = 0; + uint32_t nalOffset = 0; + uint32_t sizeCopiedHere = 0; + const uint32_t NALUINFO_OFFSET = 256; + uint32_t nalNum = 0; + + CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); + LOG_I("outputNaluLengthsPrefixed\n"); + + while (1) { + + if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) { + LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n"); + return ENCODE_FAIL; + } + + // we need to handle the whole bitstream NAL by NAL + ret = getOneNALUnit( + (uint8_t *)mCurSegment->buf + mOffsetInSeg, + mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status); + CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); + + if (nalSize + 4 <= outBuffer->bufferSize - NALUINFO_OFFSET - sizeCopiedHere) { + LOG_I("zhaoliang nalSize=%d, nalOffset=%d\n", nalSize, nalOffset); + + memcpy(outBuffer->data + NALUINFO_OFFSET + sizeCopiedHere, + (uint8_t *)mCurSegment->buf + mOffsetInSeg, nalSize + nalOffset); + + sizeCopiedHere += nalSize + nalOffset; + mTotalSizeCopied += nalSize + nalOffset; + mOffsetInSeg += (nalSize + nalOffset); + + } else { + outBuffer->dataSize = sizeCopiedHere; + // In case the start code is 3-byte length but we use 4-byte for length prefixed + // so the remainingSize size may larger than the remaining data size + outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100; + outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME; + LOG_E("Buffer size too small\n"); + return ENCODE_BUFFER_TOO_SMALL; + } + + nalNum ++; + uint32_t *nalLength = (uint32_t *) (outBuffer->data + (nalNum+1) * 4); + + *nalLength = nalSize + nalOffset; + + LOG_I("nalLength=%d\n", nalSize + nalOffset); + + // check if all data in current segment has been copied out + if (mCurSegment->size == mOffsetInSeg) { + if (mCurSegment->next != NULL) { + mCurSegment = (VACodedBufferSegment *)mCurSegment->next; + mOffsetInSeg = 0; + } else { + LOG_V("End of stream\n"); + outBuffer->dataSize = sizeCopiedHere; + outBuffer->remainingSize = 0; + outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME; + mCurSegment = NULL; + break; + } + } + } + + outBuffer->offset = NALUINFO_OFFSET; + uint32_t *nalHead = (uint32_t *) outBuffer->data; + LOG_I("zhaoliang nalHead =%x\n", nalHead); + *nalHead = 0x4E414C4C; //'nall' + *(++nalHead) = nalNum; + + return ENCODE_SUCCESS; +} + Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h index f33755b..c66acc3 100644 --- a/videoencoder/VideoEncoderAVC.h +++ b/videoencoder/VideoEncoderAVC.h @@ -37,6 +37,7 @@ private: Encode_Status outputCodecData(VideoEncOutputBuffer *outBuffer); Encode_Status outputOneNALU(VideoEncOutputBuffer *outBuffer, bool startCode); Encode_Status outputLengthPrefixed(VideoEncOutputBuffer *outBuffer); + Encode_Status outputNaluLengthsPrefixed(VideoEncOutputBuffer *outBuffer); Encode_Status renderMaxSliceSize(); Encode_Status renderAIR(); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 52e68a1..47b3f19 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -489,6 +489,7 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint3 CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput"); //copy all flags to outBuffer + outBuffer->offset = 0; outBuffer->flag = mCurOutputTask->flag; outBuffer->type = mCurOutputTask->type; outBuffer->timeStamp = mCurOutputTask->timestamp; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 0b11b05..2b48104 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -45,6 +45,7 @@ typedef enum { OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8, OUTPUT_LENGTH_PREFIXED = 16, OUTPUT_CODEDBUFFER = 32, + OUTPUT_NALULENGTHS_PREFIXED = 64, OUTPUT_BUFFER_LAST } VideoOutputFormat; @@ -138,7 +139,8 @@ typedef enum { typedef struct { uint8_t *data; uint32_t bufferSize; //buffer size - uint32_t dataSize; //actuall size + uint32_t dataSize; //actual size + uint32_t offset; //buffer offset uint32_t remainingSize; int flag; //Key frame, Codec Data etc VideoOutputFormat format; //output format -- cgit v1.2.3 From 84b17c750543dbaf44b4eb08d949191be3a56abe Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Wed, 4 Dec 2013 16:12:23 +0800 Subject: Create folders for secure libmix decoder for CHT, Merr+ and Moorefield BZ: 156066 The moorefield and merrplus folders copy the codes in merrifield as the start point. The cherrytrail folder copy the codes in baytrail. The whole secvideo folder structure is shown as below: securevideo/ |-- baytrail |-- cherrytrail |-- clovertrail |-- merrifield |-- merrplus `-- moorefield Change-Id: I95e37ffd9deb312b5423f5e5abb060581625d94c Signed-off-by: wfeng6 --- videodecoder/Android.mk | 12 +- .../cherrytrail/VideoDecoderAVCSecure.cpp | 359 ++++++++++++++ .../cherrytrail/VideoDecoderAVCSecure.h | 52 +++ .../securevideo/cherrytrail/secvideoparser.h | 157 +++++++ videodecoder/securevideo/cherrytrail/va_private.h | 77 +++ .../securevideo/merrplus/VideoDecoderAVCSecure.cpp | 518 +++++++++++++++++++++ .../securevideo/merrplus/VideoDecoderAVCSecure.h | 83 ++++ videodecoder/securevideo/moorefield | 1 - .../moorefield/VideoDecoderAVCSecure.cpp | 518 +++++++++++++++++++++ .../securevideo/moorefield/VideoDecoderAVCSecure.h | 83 ++++ 10 files changed, 1852 insertions(+), 8 deletions(-) create mode 100644 videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h create mode 100644 videodecoder/securevideo/cherrytrail/secvideoparser.h create mode 100644 videodecoder/securevideo/cherrytrail/va_private.h create mode 100644 videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h delete mode 120000 videodecoder/securevideo/moorefield create mode 100644 videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp create mode 100644 videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index f7b7c80..32a1851 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -27,15 +27,13 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/$(TARGET_BOARD_PLATFORM) LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC endif -PLATFORM_SUPPORT_AVC_SHORT_FORMAT := \ - baytrail -ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) - LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT +PLATFORM_USE_GEN_HW := \ + baytrail \ + cherrytrail +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_GEN_HW)),) + LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT -DUSE_GEN_HW endif -ifeq ($(TARGET_BOARD_PLATFORM),baytrail) -LOCAL_CFLAGS += -DUSE_GEN_HW -endif #LOCAL_LDLIBS += -lpthread diff --git a/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..06fb677 --- /dev/null +++ b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp @@ -0,0 +1,359 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "va_private.h" +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F +#define MAX_NALU_HEADER_BUFFER 8192 +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mSliceHeaderBuffer(NULL) { + setParserType(VBP_H264SECURE); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory for mNaluHeaderBuffer"); + return DECODE_MEMORY_FAIL; + } + + mSliceHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + if (mSliceHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory for mSliceHeaderBuffer"); + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } + return DECODE_MEMORY_FAIL; + } + + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } + + if (mSliceHeaderBuffer) { + delete [] mSliceHeaderBuffer; + mSliceHeaderBuffer = NULL; + } + +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sliceHeaderSize = 0; + int32_t sizeLeft = 0; + int32_t sliceIdx = 0; + uint8_t naluType; + frame_info_t* pFrameInfo; + + mFrameSize = 0; + if (buffer->flag & IS_SECURE_DATA) { + VTRACE("Decoding protected video ..."); + mIsEncryptData = 1; + } else { + VTRACE("Decoding clear video ..."); + mIsEncryptData = 0; + return VideoDecoderAVC::decode(buffer); + } + + if (buffer->size != sizeof(frame_info_t)) { + ETRACE("Not enough data to read frame_info_t!"); + return DECODE_INVALID_DATA; + } + pFrameInfo = (frame_info_t*) buffer->data; + + mFrameSize = pFrameInfo->length; + VTRACE("mFrameSize = %d", mFrameSize); + + memcpy(&mEncParam, pFrameInfo->pavp, sizeof(pavp_info_t)); + for (int32_t i = 0; i < pFrameInfo->num_nalus; i++) { + naluType = pFrameInfo->nalus[i].type & NALU_TYPE_MASK; + if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) { + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &sliceIdx, + sizeof(int32_t)); + sliceHeaderSize += 4; + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &pFrameInfo->data, + sizeof(uint8_t*)); + sliceHeaderSize += sizeof(uint8_t*); + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &pFrameInfo->nalus[i].offset, + sizeof(uint32_t)); + sliceHeaderSize += sizeof(uint32_t); + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + &pFrameInfo->nalus[i].length, + sizeof(uint32_t)); + sliceHeaderSize += sizeof(uint32_t); + + memcpy(mSliceHeaderBuffer + sliceHeaderSize, + pFrameInfo->nalus[i].slice_header, + sizeof(slice_header_t)); + sliceHeaderSize += sizeof(slice_header_t); + if (pFrameInfo->nalus[i].type & 0x60) { + memcpy(mSliceHeaderBuffer+sliceHeaderSize, pFrameInfo->dec_ref_pic_marking, sizeof(dec_ref_pic_marking_t)); + } else { + memset(mSliceHeaderBuffer+sliceHeaderSize, 0, sizeof(dec_ref_pic_marking_t)); + } + sliceHeaderSize += sizeof(dec_ref_pic_marking_t); + sliceIdx++; + } else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) { + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + memcpy(mNaluHeaderBuffer + sizeAccumulated, + pFrameInfo->nalus[i].data, + pFrameInfo->nalus[i].length); + sizeAccumulated += pFrameInfo->nalus[i].length; + } else { + WTRACE("Failure: DECODE_FRAME_DROPPED"); + return DECODE_FRAME_DROPPED; + } + } + + vbp_data_h264 *data = NULL; + + if (sizeAccumulated > 0) { + status = VideoDecoderBase::parseBuffer( + mNaluHeaderBuffer, + sizeAccumulated, + false, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + } + + if (sliceHeaderSize > 0) { + memset(mSliceHeaderBuffer + sliceHeaderSize, 0xFF, 4); + sliceHeaderSize += 4; + status = VideoDecoderBase::updateBuffer( + mSliceHeaderBuffer, + sliceHeaderSize, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::updateBuffer"); + } + + if (data == NULL) { + ETRACE("Invalid data returned by parser!"); + return DECODE_MEMORY_FAIL; + } + + if (!mVAStarted) { + if (data->has_sps && data->has_pps) { + status = startVA(data); + CHECK_STATUS("startVA"); + } else { + WTRACE("Can't start VA as either SPS or PPS is still not available."); + return DECODE_SUCCESS; + } + } + status = decodeFrame(buffer, data); + return status; +} + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[5]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + VAEncryptionParameterBuffer encryptParam; + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + + if (mIsEncryptData) { + memset(&encryptParam, 0, sizeof(VAEncryptionParameterBuffer)); + encryptParam.pavpCounterMode = 4; + encryptParam.pavpEncryptionType = 2; + encryptParam.hostEncryptMode = 2; + encryptParam.pavpHasBeenEnabled = 1; + encryptParam.app_id = 0; + memcpy(encryptParam.pavpAesCounter, mEncParam.iv, 16); + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + (VABufferType)VAEncryptionParameterBufferType, + sizeof(VAEncryptionParameterBuffer), + 1, + &encryptParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateEncryptionParameterBuffer"); + bufferIDCount++; + } + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mFrameSize, //size + 1, //num_elements + sliceData->buffer_addr + sliceData->slice_offset, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + } + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264Base), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs( + VAProfile profile, VAConfigID *config) +{ + VAStatus vaStatus; + VAConfigAttrib attrib[2]; + + if (config == NULL) { + ETRACE("Invalid parameter!"); + return DECODE_FAIL; + } + + attrib[0].type = VAConfigAttribRTFormat; + attrib[0].value = VA_RT_FORMAT_YUV420; + attrib[1].type = VAConfigAttribDecSliceMode; + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + + vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1); + + if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) + { + ITRACE("AVC short format used"); + attrib[1].value = VA_DEC_SLICE_MODE_BASE; + } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) { + ITRACE("AVC long format ssed"); + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + } else { + ETRACE("Unsupported Decode Slice Mode!"); + return DECODE_FAIL; + } + + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib[0], + 2, + config); + CHECK_VA_STATUS("vaCreateConfig"); + + return DECODE_SUCCESS; +} diff --git a/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..18289eb --- /dev/null +++ b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h @@ -0,0 +1,52 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" +#include "secvideoparser.h" + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +protected: + virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); + +private: + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); +private: + pavp_info_t mEncParam; + uint8_t *mNaluHeaderBuffer; + uint8_t *mSliceHeaderBuffer; + uint32_t mIsEncryptData; + uint32_t mFrameSize; +}; + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/securevideo/cherrytrail/secvideoparser.h b/videodecoder/securevideo/cherrytrail/secvideoparser.h new file mode 100644 index 0000000..18f487d --- /dev/null +++ b/videodecoder/securevideo/cherrytrail/secvideoparser.h @@ -0,0 +1,157 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +*/ + +#ifndef SEC_VIDEO_PARSER_H_ +#define SEC_VIDEO_PARSER_H_ + +#include + +/* H264 start code values */ +typedef enum _h264_nal_unit_type +{ + h264_NAL_UNIT_TYPE_unspecified = 0, + h264_NAL_UNIT_TYPE_SLICE, + h264_NAL_UNIT_TYPE_DPA, + h264_NAL_UNIT_TYPE_DPB, + h264_NAL_UNIT_TYPE_DPC, + h264_NAL_UNIT_TYPE_IDR, + h264_NAL_UNIT_TYPE_SEI, + h264_NAL_UNIT_TYPE_SPS, + h264_NAL_UNIT_TYPE_PPS, + h264_NAL_UNIT_TYPE_Acc_unit_delimiter, + h264_NAL_UNIT_TYPE_EOSeq, + h264_NAL_UNIT_TYPE_EOstream, + h264_NAL_UNIT_TYPE_filler_data, + h264_NAL_UNIT_TYPE_SPS_extension, + h264_NAL_UNIT_TYPE_ACP = 19, + h264_NAL_UNIT_TYPE_Slice_extension = 20 +} h264_nal_unit_type_t; + +#define MAX_OP 16 + +enum dec_ref_pic_marking_flags { + IDR_PIC_FLAG = 0, + NO_OUTPUT_OF_PRIOR_PICS_FLAG, + LONG_TERM_REFERENCE_FLAG, + ADAPTIVE_REF_PIC_MARKING_MODE_FLAG +}; + +typedef struct _dec_ref_pic_marking_t { + union { + uint8_t flags; + struct { + uint8_t idr_pic_flag:1; + uint8_t no_output_of_prior_pics_flag:1; + uint8_t long_term_reference_flag:1; + uint8_t adaptive_ref_pic_marking_mode_flag:1; + }; + }; + struct { + uint8_t memory_management_control_operation; + union { + struct { + uint8_t difference_of_pic_nums_minus1; + } op1; + struct { + uint8_t long_term_pic_num; + } op2; + struct { + uint8_t difference_of_pic_nums_minus1; + uint8_t long_term_frame_idx; + } op3; + struct { + uint8_t max_long_term_frame_idx_plus1; + } op4; + struct { + uint8_t long_term_frame_idx; + } op6; + }; + } op[MAX_OP]; +} dec_ref_pic_marking_t; + +enum slice_header_flags { + FIELD_PIC_FLAG = 0, + BOTTOM_FIELD_FLAG +}; + +typedef struct _slice_header_t { + uint8_t nal_unit_type; + uint8_t pps_id; + uint8_t padding; // TODO: padding needed because flags in secfw impl. is a big-endian uint16_t + union { + uint8_t flags; + struct { + uint8_t field_pic_flag:1; + uint8_t bottom_field_flag:1; + }; + }; + uint32_t first_mb_in_slice; + uint32_t frame_num; + uint16_t idr_pic_id; + uint16_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt[2]; + int32_t delta_pic_order_cnt_bottom; +} slice_header_t; + +typedef struct { + uint8_t type; + uint32_t offset; + uint8_t* data; + uint32_t length; + slice_header_t* slice_header; +} nalu_info_t; + +typedef struct { + uint32_t iv[4]; + uint32_t mode; + uint32_t app_id; +} pavp_info_t; + +#define MAX_NUM_NALUS 20 + +typedef struct { + uint8_t* data; + uint32_t length; + pavp_info_t* pavp; + dec_ref_pic_marking_t* dec_ref_pic_marking; + uint32_t num_nalus; + nalu_info_t nalus[MAX_NUM_NALUS]; +} frame_info_t; + +int parser_init(void); +int parse_frame(uint8_t* frame, uint32_t frame_size, uint8_t* nalu_data, uint32_t* nalu_data_size); + +// DEBUG PRINTING +void print_slice_header(slice_header_t* slice_header); +void print_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking); +void print_data_bytes(uint8_t* data, uint32_t count); +void print_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size); + +// BYTESWAPPING +uint16_t byteswap_16(uint16_t word); +uint32_t byteswap_32(uint32_t dword); +void byteswap_slice_header(slice_header_t* slice_header); +void byteswap_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking); +void byteswap_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size); + +#endif /* SEC_VIDEO_PARSER_H_ */ diff --git a/videodecoder/securevideo/cherrytrail/va_private.h b/videodecoder/securevideo/cherrytrail/va_private.h new file mode 100644 index 0000000..067e334 --- /dev/null +++ b/videodecoder/securevideo/cherrytrail/va_private.h @@ -0,0 +1,77 @@ +/*===================== begin_copyright_notice ================================== + +INTEL CONFIDENTIAL +Copyright 2009-2012 +Intel Corporation All Rights Reserved. + +The source code contained or described herein and all documents related to the +source code ("Material") are owned by Intel Corporation or its suppliers or +licensors. Title to the Material remains with Intel Corporation or its suppliers +and licensors. The Material contains trade secrets and proprietary and confidential +information of Intel or its suppliers and licensors. The Material is protected by +worldwide copyright and trade secret laws and treaty provisions. No part of the +Material may be used, copied, reproduced, modified, published, uploaded, posted, +transmitted, distributed, or disclosed in any way without Intels prior express +written permission. + +No license under any patent, copyright, trade secret or other intellectual +property right is granted to or conferred upon you by disclosure or delivery +of the Materials, either expressly, by implication, inducement, estoppel +or otherwise. Any license under such intellectual property rights must be +express and approved by Intel in writing. + +File Name: va_private.h +Abstract: libva private API head file + +Environment: Linux/Android + +Notes: + +======================= end_copyright_notice ==================================*/ +#ifndef __VA_PRIVATE_H__ +#define __VA_PRIVATE_H__ +#include +#define ENABLE_PAVP_LINUX 1 +// Misc parameter for encoder +#define VAEncMiscParameterTypePrivate -2 +// encryption parameters for PAVP +#define VAEncryptionParameterBufferType -3 + +typedef struct _VAEncMiscParameterPrivate +{ + unsigned int target_usage; // Valid values 1-7 for AVC & MPEG2. + unsigned int reserved[7]; // Reserved for future use. +} VAEncMiscParameterPrivate; + +/*VAEncrytpionParameterBuffer*/ +typedef struct _VAEncryptionParameterBuffer +{ + //Not used currently + unsigned int encryptionSupport; + //Not used currently + unsigned int hostEncryptMode; + // For IV, Counter input + unsigned int pavpAesCounter[2][4]; + // not used currently + unsigned int pavpIndex; + // PAVP mode, CTR, CBC, DEDE etc + unsigned int pavpCounterMode; + unsigned int pavpEncryptionType; + // not used currently + unsigned int pavpInputSize[2]; + // not used currently + unsigned int pavpBufferSize[2]; + // not used currently + VABufferID pvap_buf; + // set to TRUE if protected media + unsigned int pavpHasBeenEnabled; + // not used currently + unsigned int IntermmediatedBufReq; + // not used currently + unsigned int uiCounterIncrement; + // AppId: PAVP sessin Index from application + unsigned int app_id; + +} VAEncryptionParameterBuffer; + +#endif diff --git a/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..ab7bc7e --- /dev/null +++ b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp @@ -0,0 +1,518 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + + +#define STARTCODE_00 0x00 +#define STARTCODE_01 0x01 +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F + + +// mask for little endian, to mast the second and fourth bytes in the byte stream +#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 +#define STARTCODE_MASK1 0x0000FF00 //0x000000FF + + +typedef enum { + NAL_UNIT_TYPE_unspecified0 = 0, + NAL_UNIT_TYPE_SLICE, + NAL_UNIT_TYPE_DPA, + NAL_UNIT_TYPE_DPB, + NAL_UNIT_TYPE_DPC, + NAL_UNIT_TYPE_IDR, + NAL_UNIT_TYPE_SEI, + NAL_UNIT_TYPE_SPS, + NAL_UNIT_TYPE_PPS, + NAL_UNIT_TYPE_Acc_unit_delimiter, + NAL_UNIT_TYPE_EOSeq, + NAL_UNIT_TYPE_EOstream, + NAL_UNIT_TYPE_filler_data, + NAL_UNIT_TYPE_SPS_extension, + NAL_UNIT_TYPE_Reserved14, + NAL_UNIT_TYPE_Reserved15, + NAL_UNIT_TYPE_Reserved16, + NAL_UNIT_TYPE_Reserved17, + NAL_UNIT_TYPE_Reserved18, + NAL_UNIT_TYPE_ACP, + NAL_UNIT_TYPE_Reserved20, + NAL_UNIT_TYPE_Reserved21, + NAL_UNIT_TYPE_Reserved22, + NAL_UNIT_TYPE_Reserved23, + NAL_UNIT_TYPE_unspecified24, +} NAL_UNIT_TYPE; + +#ifndef min +#define min(X, Y) ((X) <(Y) ? (X) : (Y)) +#endif + + +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mInputBuffer(NULL) { + + memset(&mMetadata, 0, sizeof(NaluMetadata)); + memset(&mByteStream, 0, sizeof(NaluByteStream)); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; + mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mMetadata.naluInfo == NULL || + mByteStream.byteStream == NULL || + mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory."); + // TODO: release all allocated memory + return DECODE_MEMORY_FAIL; + } + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mMetadata.naluInfo) { + delete [] mMetadata.naluInfo; + mMetadata.naluInfo = NULL; + } + + if (mByteStream.byteStream) { + delete [] mByteStream.byteStream; + mByteStream.byteStream = NULL; + } + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sizeLeft = 0; + uint8_t *pByteStream = NULL; + NaluInfo *pNaluInfo = mMetadata.naluInfo; + + if (buffer->flag & IS_SECURE_DATA) { + // NALU headers are appended to encrypted video bitstream + // |...encrypted video bitstream (16 bytes aligned)...| 4 bytes of header size |...NALU headers..| + pByteStream = buffer->data + buffer->size + 4; + sizeLeft = *(int32_t *)(buffer->data + buffer->size); + VTRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size); + mInputBuffer = buffer->data; + } else { + status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); + CHECK_STATUS("parseAnnexBStream"); + pByteStream = mByteStream.byteStream; + sizeLeft = mByteStream.streamPos; + mInputBuffer = buffer->data; + } + if (sizeLeft < 4) { + ETRACE("Not enough data to read number of NALU."); + return DECODE_INVALID_DATA; + } + + // read number of NALU + memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); + pByteStream += 4; + sizeLeft -= 4; + + if (mMetadata.naluNumber == 0) { + WTRACE("Number of NALU is ZERO!"); + return DECODE_SUCCESS; + } + + for (int32_t i = 0; i < mMetadata.naluNumber; i++) { + if (sizeLeft < 12) { + ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); + return DECODE_INVALID_DATA; + } + sizeLeft -= 12; + // read NALU offset + memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU size + memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU header length + memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + + if (sizeLeft < pNaluInfo->naluHeaderLen) { + ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); + return DECODE_INVALID_DATA; + } + + sizeLeft -= pNaluInfo->naluHeaderLen; + + if (pNaluInfo->naluHeaderLen) { + // copy start code prefix to buffer + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + + // copy NALU header + memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); + pByteStream += pNaluInfo->naluHeaderLen; + + sizeAccumulated += pNaluInfo->naluHeaderLen; + } else { + WTRACE("header len is zero for NALU %d", i); + } + + // for next NALU + pNaluInfo++; + } + + buffer->data = mNaluHeaderBuffer; + buffer->size = sizeAccumulated; + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which naluinfo is correlated to current slice + int naluIndex = 0; + uint32_t accumulatedHeaderLen = 0; + uint32_t headerLen = 0; + for (; naluIndex < mMetadata.naluNumber; naluIndex++) { + headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; + if (headerLen == 0) { + WTRACE("lenght of current NAL unit is 0."); + continue; + } + accumulatedHeaderLen += STARTCODE_PREFIX_LEN; + if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { + break; + } + accumulatedHeaderLen += headerLen; + } + + if (sliceData->slice_offset != accumulatedHeaderLen) { + WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); + } + + sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; + uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; + uint32_t slice_offset_shift = sliceOffset % 16; + sliceParam->slice_data_offset += slice_offset_shift; + sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + + if (mInputBuffer != NULL) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //Slice size + 1, // num_elements + mInputBuffer + sliceOffset - slice_offset_shift, + &bufferIDs[bufferIDCount]); + } else { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + (uint8_t*)sliceOffset, // IMR offset + &bufferIDs[bufferIDCount]); + } + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. +// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. +int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { + uint8_t *ptr; + uint32_t left = 0, data = 0, phase = 0; + uint8_t mask1 = 0, mask2 = 0; + + /* Meaning of phase: + 0: initial status, "0x000001" bytes are not found so far; + 1: one "0x00" byte is found; + 2: two or more consecutive "0x00" bytes" are found; + 3: "0x000001" patten is found ; + 4: if there is one more byte after "0x000001"; + */ + + left = length; + ptr = (uint8_t *) (stream + offset); + phase = 0; + + // parse until there is more data and start code not found + while ((left > 0) && (phase < 3)) { + // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { + while (left > 3) { + data = *((uint32_t *)ptr); + mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); + mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); + // If second byte and fourth byte are not zero's then we cannot have a start code here, + // as we need two consecutive zero bytes for a start code pattern. + if (mask1 && mask2) { + // skip 4 bytes and start over + ptr += 4; + left -=4; + continue; + } else { + break; + } + } + } + + // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time + if (left > 0) { + if (*ptr == STARTCODE_00) { + phase++; + if (phase > 2) { + // more than 2 consecutive '0x00' bytes is found + phase = 2; + } + } else if ((*ptr == STARTCODE_01) && (phase == 2)) { + // start code is found + phase = 3; + } else { + // reset lookup + phase = 0; + } + ptr++; + left--; + } + } + + if ((left > 0) && (phase == 3)) { + phase = 4; + // return offset of position following the pattern in the buffer which matches "0x000001" byte string + return (int32_t)(ptr - stream); + } + return -1; +} + + +Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { + uint8_t naluType; + int32_t naluHeaderLen; + + naluType = *(uint8_t *)(stream + naluStream->naluOffset); + naluType &= NALU_TYPE_MASK; + // first update nalu header length based on nalu type + if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { + // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes + naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); + } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { + //sps, pps, sei, etc, return the entire NAL unit in clear + naluHeaderLen = naluStream->naluLen; + } else { + return DECODE_FRAME_DROPPED; + } + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); + naluStream->streamPos += 4; + + if (naluHeaderLen) { + memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); + naluStream->streamPos += naluHeaderLen; + } + return DECODE_SUCCESS; +} + + +// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container +Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { + int32_t naluOffset, offset, left; + NaluInfo *info; + uint32_t ret = DECODE_SUCCESS; + + naluOffset = 0; + offset = 0; + left = length; + + // leave 4 bytes to copy nalu count + naluStream->streamPos = 4; + naluStream->naluCount = 0; + memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); + + for (; ;) { + naluOffset = findNalUnitOffset(stream, offset, left); + if (naluOffset == -1) { + break; + } + + if (naluStream->naluCount == 0) { + naluStream->naluOffset = naluOffset; + } else { + naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; + ret = copyNaluHeader(stream, naluStream); + if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { + LOGW("copyNaluHeader returned %d", ret); + return ret; + } + // starting position for next NALU + naluStream->naluOffset = naluOffset; + } + + if (ret == DECODE_SUCCESS) { + naluStream->naluCount++; + } + + // update next lookup position and length + offset = naluOffset + 1; // skip one byte of NAL unit type + left = length - offset; + } + + if (naluStream->naluCount > 0) { + naluStream->naluLen = length - naluStream->naluOffset; + memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); + // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED + copyNaluHeader(stream, naluStream); + return DECODE_SUCCESS; + } + + LOGW("number of valid NALU is 0!"); + return DECODE_SUCCESS; +} + diff --git a/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..af5ae44 --- /dev/null +++ b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + + // data in the decoded buffer is all encrypted. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + enum { + MAX_SLICE_HEADER_SIZE = 30, + MAX_NALU_HEADER_BUFFER = 8192, + MAX_NALU_NUMBER = 400, // > 4096/12 + }; + + // Information of Network Abstraction Layer Unit + struct NaluInfo { + int32_t naluOffset; // offset of NAL unit in the firewalled buffer + int32_t naluLen; // length of NAL unit + int32_t naluHeaderLen; // length of NAL unit header + }; + + struct NaluMetadata { + NaluInfo *naluInfo; + int32_t naluNumber; // number of NAL units + }; + + struct NaluByteStream { + int32_t naluOffset; + int32_t naluLen; + int32_t streamPos; + uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData + int32_t naluCount; + }; + + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); + Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); + Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); + +private: + NaluMetadata mMetadata; + NaluByteStream mByteStream; + uint8_t *mNaluHeaderBuffer; + uint8_t *mInputBuffer; +}; + + + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ diff --git a/videodecoder/securevideo/moorefield b/videodecoder/securevideo/moorefield deleted file mode 120000 index 278d417..0000000 --- a/videodecoder/securevideo/moorefield +++ /dev/null @@ -1 +0,0 @@ -merrifield/ \ No newline at end of file diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp new file mode 100644 index 0000000..ab7bc7e --- /dev/null +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp @@ -0,0 +1,518 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#include "VideoDecoderAVCSecure.h" +#include "VideoDecoderTrace.h" +#include + + +#define STARTCODE_00 0x00 +#define STARTCODE_01 0x01 +#define STARTCODE_PREFIX_LEN 3 +#define NALU_TYPE_MASK 0x1F + + +// mask for little endian, to mast the second and fourth bytes in the byte stream +#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 +#define STARTCODE_MASK1 0x0000FF00 //0x000000FF + + +typedef enum { + NAL_UNIT_TYPE_unspecified0 = 0, + NAL_UNIT_TYPE_SLICE, + NAL_UNIT_TYPE_DPA, + NAL_UNIT_TYPE_DPB, + NAL_UNIT_TYPE_DPC, + NAL_UNIT_TYPE_IDR, + NAL_UNIT_TYPE_SEI, + NAL_UNIT_TYPE_SPS, + NAL_UNIT_TYPE_PPS, + NAL_UNIT_TYPE_Acc_unit_delimiter, + NAL_UNIT_TYPE_EOSeq, + NAL_UNIT_TYPE_EOstream, + NAL_UNIT_TYPE_filler_data, + NAL_UNIT_TYPE_SPS_extension, + NAL_UNIT_TYPE_Reserved14, + NAL_UNIT_TYPE_Reserved15, + NAL_UNIT_TYPE_Reserved16, + NAL_UNIT_TYPE_Reserved17, + NAL_UNIT_TYPE_Reserved18, + NAL_UNIT_TYPE_ACP, + NAL_UNIT_TYPE_Reserved20, + NAL_UNIT_TYPE_Reserved21, + NAL_UNIT_TYPE_Reserved22, + NAL_UNIT_TYPE_Reserved23, + NAL_UNIT_TYPE_unspecified24, +} NAL_UNIT_TYPE; + +#ifndef min +#define min(X, Y) ((X) <(Y) ? (X) : (Y)) +#endif + + +static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; + + +VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) + : VideoDecoderAVC(mimeType), + mNaluHeaderBuffer(NULL), + mInputBuffer(NULL) { + + memset(&mMetadata, 0, sizeof(NaluMetadata)); + memset(&mByteStream, 0, sizeof(NaluByteStream)); +} + +VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { +} + +Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + Decode_Status status = VideoDecoderAVC::start(buffer); + if (status != DECODE_SUCCESS) { + return status; + } + + mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; + mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; + mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; + + if (mMetadata.naluInfo == NULL || + mByteStream.byteStream == NULL || + mNaluHeaderBuffer == NULL) { + ETRACE("Failed to allocate memory."); + // TODO: release all allocated memory + return DECODE_MEMORY_FAIL; + } + return status; +} + +void VideoDecoderAVCSecure::stop(void) { + VideoDecoderAVC::stop(); + + if (mMetadata.naluInfo) { + delete [] mMetadata.naluInfo; + mMetadata.naluInfo = NULL; + } + + if (mByteStream.byteStream) { + delete [] mByteStream.byteStream; + mByteStream.byteStream = NULL; + } + + if (mNaluHeaderBuffer) { + delete [] mNaluHeaderBuffer; + mNaluHeaderBuffer = NULL; + } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + Decode_Status status; + int32_t sizeAccumulated = 0; + int32_t sizeLeft = 0; + uint8_t *pByteStream = NULL; + NaluInfo *pNaluInfo = mMetadata.naluInfo; + + if (buffer->flag & IS_SECURE_DATA) { + // NALU headers are appended to encrypted video bitstream + // |...encrypted video bitstream (16 bytes aligned)...| 4 bytes of header size |...NALU headers..| + pByteStream = buffer->data + buffer->size + 4; + sizeLeft = *(int32_t *)(buffer->data + buffer->size); + VTRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size); + mInputBuffer = buffer->data; + } else { + status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); + CHECK_STATUS("parseAnnexBStream"); + pByteStream = mByteStream.byteStream; + sizeLeft = mByteStream.streamPos; + mInputBuffer = buffer->data; + } + if (sizeLeft < 4) { + ETRACE("Not enough data to read number of NALU."); + return DECODE_INVALID_DATA; + } + + // read number of NALU + memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); + pByteStream += 4; + sizeLeft -= 4; + + if (mMetadata.naluNumber == 0) { + WTRACE("Number of NALU is ZERO!"); + return DECODE_SUCCESS; + } + + for (int32_t i = 0; i < mMetadata.naluNumber; i++) { + if (sizeLeft < 12) { + ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); + return DECODE_INVALID_DATA; + } + sizeLeft -= 12; + // read NALU offset + memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU size + memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + // read NALU header length + memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); + pByteStream += 4; + + + if (sizeLeft < pNaluInfo->naluHeaderLen) { + ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); + return DECODE_INVALID_DATA; + } + + sizeLeft -= pNaluInfo->naluHeaderLen; + + if (pNaluInfo->naluHeaderLen) { + // copy start code prefix to buffer + memcpy(mNaluHeaderBuffer + sizeAccumulated, + startcodePrefix, + STARTCODE_PREFIX_LEN); + sizeAccumulated += STARTCODE_PREFIX_LEN; + + // copy NALU header + memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); + pByteStream += pNaluInfo->naluHeaderLen; + + sizeAccumulated += pNaluInfo->naluHeaderLen; + } else { + WTRACE("header len is zero for NALU %d", i); + } + + // for next NALU + pNaluInfo++; + } + + buffer->data = mNaluHeaderBuffer; + buffer->size = sizeAccumulated; + + return VideoDecoderAVC::decode(buffer); +} + + +Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { + + Decode_Status status; + VAStatus vaStatus; + uint32_t bufferIDCount = 0; + // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data + VABufferID bufferIDs[4]; + + vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); + vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); + VAPictureParameterBufferH264 *picParam = picData->pic_parms; + VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + + if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { + // either condition indicates start of a new frame + if (sliceParam->first_mb_in_slice != 0) { + WTRACE("The first slice is lost."); + // TODO: handle the first slice lost + } + if (mDecodingFrame) { + // interlace content, complete decoding the first field + vaStatus = vaEndPicture(mVADisplay, mVAContext); + CHECK_VA_STATUS("vaEndPicture"); + + // for interlace content, top field may be valid only after the second field is parsed + mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; + } + + // Check there is no reference frame loss before decoding a frame + + // Update the reference frames and surface IDs for DPB and current frame + status = updateDPB(picParam); + CHECK_STATUS("updateDPB"); + + //We have to provide a hacked DPB rather than complete DPB for libva as workaround + status = updateReferenceFrames(picData); + CHECK_STATUS("updateReferenceFrames"); + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + // start decoding a frame + mDecodingFrame = true; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAPictureParameterBufferType, + sizeof(VAPictureParameterBufferH264), + 1, + picParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); + bufferIDCount++; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAIQMatrixBufferType, + sizeof(VAIQMatrixBufferH264), + 1, + data->IQ_matrix_buf, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); + bufferIDCount++; + } + + status = setReference(sliceParam); + CHECK_STATUS("setReference"); + + // find which naluinfo is correlated to current slice + int naluIndex = 0; + uint32_t accumulatedHeaderLen = 0; + uint32_t headerLen = 0; + for (; naluIndex < mMetadata.naluNumber; naluIndex++) { + headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; + if (headerLen == 0) { + WTRACE("lenght of current NAL unit is 0."); + continue; + } + accumulatedHeaderLen += STARTCODE_PREFIX_LEN; + if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { + break; + } + accumulatedHeaderLen += headerLen; + } + + if (sliceData->slice_offset != accumulatedHeaderLen) { + WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); + } + + sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; + uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; + uint32_t slice_offset_shift = sliceOffset % 16; + sliceParam->slice_data_offset += slice_offset_shift; + sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF; + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceParameterBufferType, + sizeof(VASliceParameterBufferH264), + 1, + sliceParam, + &bufferIDs[bufferIDCount]); + CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); + bufferIDCount++; + + // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit + // offset points to first byte of NAL unit + + if (mInputBuffer != NULL) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + sliceData->slice_size, //Slice size + 1, // num_elements + mInputBuffer + sliceOffset - slice_offset_shift, + &bufferIDs[bufferIDCount]); + } else { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAProtectedSliceDataBufferType, + sliceData->slice_size, //size + 1, //num_elements + (uint8_t*)sliceOffset, // IMR offset + &bufferIDs[bufferIDCount]); + } + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + bufferIDCount++; + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + bufferIDs, + bufferIDCount); + CHECK_VA_STATUS("vaRenderPicture"); + + return DECODE_SUCCESS; +} + + +// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. +// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. +int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { + uint8_t *ptr; + uint32_t left = 0, data = 0, phase = 0; + uint8_t mask1 = 0, mask2 = 0; + + /* Meaning of phase: + 0: initial status, "0x000001" bytes are not found so far; + 1: one "0x00" byte is found; + 2: two or more consecutive "0x00" bytes" are found; + 3: "0x000001" patten is found ; + 4: if there is one more byte after "0x000001"; + */ + + left = length; + ptr = (uint8_t *) (stream + offset); + phase = 0; + + // parse until there is more data and start code not found + while ((left > 0) && (phase < 3)) { + // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. + if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { + while (left > 3) { + data = *((uint32_t *)ptr); + mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); + mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); + // If second byte and fourth byte are not zero's then we cannot have a start code here, + // as we need two consecutive zero bytes for a start code pattern. + if (mask1 && mask2) { + // skip 4 bytes and start over + ptr += 4; + left -=4; + continue; + } else { + break; + } + } + } + + // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time + if (left > 0) { + if (*ptr == STARTCODE_00) { + phase++; + if (phase > 2) { + // more than 2 consecutive '0x00' bytes is found + phase = 2; + } + } else if ((*ptr == STARTCODE_01) && (phase == 2)) { + // start code is found + phase = 3; + } else { + // reset lookup + phase = 0; + } + ptr++; + left--; + } + } + + if ((left > 0) && (phase == 3)) { + phase = 4; + // return offset of position following the pattern in the buffer which matches "0x000001" byte string + return (int32_t)(ptr - stream); + } + return -1; +} + + +Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { + uint8_t naluType; + int32_t naluHeaderLen; + + naluType = *(uint8_t *)(stream + naluStream->naluOffset); + naluType &= NALU_TYPE_MASK; + // first update nalu header length based on nalu type + if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { + // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes + naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); + } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { + //sps, pps, sei, etc, return the entire NAL unit in clear + naluHeaderLen = naluStream->naluLen; + } else { + return DECODE_FRAME_DROPPED; + } + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); + naluStream->streamPos += 4; + + memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); + naluStream->streamPos += 4; + + if (naluHeaderLen) { + memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); + naluStream->streamPos += naluHeaderLen; + } + return DECODE_SUCCESS; +} + + +// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container +Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { + int32_t naluOffset, offset, left; + NaluInfo *info; + uint32_t ret = DECODE_SUCCESS; + + naluOffset = 0; + offset = 0; + left = length; + + // leave 4 bytes to copy nalu count + naluStream->streamPos = 4; + naluStream->naluCount = 0; + memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); + + for (; ;) { + naluOffset = findNalUnitOffset(stream, offset, left); + if (naluOffset == -1) { + break; + } + + if (naluStream->naluCount == 0) { + naluStream->naluOffset = naluOffset; + } else { + naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; + ret = copyNaluHeader(stream, naluStream); + if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { + LOGW("copyNaluHeader returned %d", ret); + return ret; + } + // starting position for next NALU + naluStream->naluOffset = naluOffset; + } + + if (ret == DECODE_SUCCESS) { + naluStream->naluCount++; + } + + // update next lookup position and length + offset = naluOffset + 1; // skip one byte of NAL unit type + left = length - offset; + } + + if (naluStream->naluCount > 0) { + naluStream->naluLen = length - naluStream->naluOffset; + memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); + // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED + copyNaluHeader(stream, naluStream); + return DECODE_SUCCESS; + } + + LOGW("number of valid NALU is 0!"); + return DECODE_SUCCESS; +} + diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h new file mode 100644 index 0000000..af5ae44 --- /dev/null +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h @@ -0,0 +1,83 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009-2011 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VIDEO_DECODER_AVC_SECURE_H_ +#define VIDEO_DECODER_AVC_SECURE_H_ + +#include "VideoDecoderAVC.h" + + +class VideoDecoderAVCSecure : public VideoDecoderAVC { +public: + VideoDecoderAVCSecure(const char *mimeType); + virtual ~VideoDecoderAVCSecure(); + + virtual Decode_Status start(VideoConfigBuffer *buffer); + virtual void stop(void); + + // data in the decoded buffer is all encrypted. + virtual Decode_Status decode(VideoDecodeBuffer *buffer); + +private: + enum { + MAX_SLICE_HEADER_SIZE = 30, + MAX_NALU_HEADER_BUFFER = 8192, + MAX_NALU_NUMBER = 400, // > 4096/12 + }; + + // Information of Network Abstraction Layer Unit + struct NaluInfo { + int32_t naluOffset; // offset of NAL unit in the firewalled buffer + int32_t naluLen; // length of NAL unit + int32_t naluHeaderLen; // length of NAL unit header + }; + + struct NaluMetadata { + NaluInfo *naluInfo; + int32_t naluNumber; // number of NAL units + }; + + struct NaluByteStream { + int32_t naluOffset; + int32_t naluLen; + int32_t streamPos; + uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData + int32_t naluCount; + }; + + virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); + int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); + Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); + Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); + +private: + NaluMetadata mMetadata; + NaluByteStream mByteStream; + uint8_t *mNaluHeaderBuffer; + uint8_t *mInputBuffer; +}; + + + +#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ -- cgit v1.2.3 From e331663fd992604462a7891ec17f6116877c111a Mon Sep 17 00:00:00 2001 From: Elaine Wang Date: Sat, 7 Dec 2013 01:10:38 +0800 Subject: Fix klocwork issue BZ: 152666 memset vinfo with 0 before it's used, in case new added field isn't initialized. Change-Id: Ie49e3c296783206745c7d123b609aba28a163ae0 Signed-off-by: Elaine Wang --- videoencoder/VideoEncoderBase.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 47b3f19..94fc65a 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1516,6 +1516,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA IntelMetadataBuffer imb; VASurfaceMap *map = NULL; + memset(&vinfo, 0, sizeof(ValueInfo)); if (mStoreMetaDataInBuffers.isEnabled) { //metadatabuffer mode LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size); @@ -1568,9 +1569,6 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA vinfo.chromStride = mComParams.resolution.width; vinfo.format = VA_FOURCC_NV12; vinfo.s3dformat = 0xFFFFFFFF; -#ifdef INTEL_VIDEO_XPROC_SHARING - vinfo.sessionFlag = 0; -#endif } else { //get all info mapping needs imb.GetValueInfo(pvinfo); -- cgit v1.2.3 From 384932a819d32fdcdd0a327d66b0326b09748f90 Mon Sep 17 00:00:00 2001 From: gji2 Date: Wed, 4 Dec 2013 21:32:21 +0800 Subject: Support the CIR feature BZ:156747 Support the CIR feature Change-Id: Ida993c8918597287b636f2130da5eec1da98effd Signed-off-by: gji2 --- videoencoder/VideoEncoderAVC.cpp | 50 +++++++++++++++++++++++++++++++++------ videoencoder/VideoEncoderAVC.h | 1 + videoencoder/VideoEncoderBase.cpp | 24 +++++++++++++++++++ videoencoder/VideoEncoderBase.h | 1 + videoencoder/VideoEncoderDef.h | 23 ++++++++++++++++++ 5 files changed, 92 insertions(+), 7 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 85c1315..eb729b0 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -770,6 +770,16 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { mRenderAIR = false; } + + if (mRenderCIR && + (mComParams.refreshType == VIDEO_ENC_CIR || + mComParams.refreshType == VIDEO_ENC_BOTH)) { + + ret = renderCIR(); + CHECK_ENCODE_STATUS_RETURN("renderCIR"); + + mRenderCIR = false; + } if (mRenderFrameRate) { @@ -838,16 +848,42 @@ Encode_Status VideoEncoderAVC::renderMaxSliceSize() { return ENCODE_SUCCESS; } -Encode_Status VideoEncoderAVC::renderAIR() { - +Encode_Status VideoEncoderAVC::renderCIR(){ VAStatus vaStatus = VA_STATUS_SUCCESS; - LOG_V( "Begin\n\n"); + LOG_I( "%s Begin\n", __FUNCTION__); - if (mComParams.rcMode != RATE_CONTROL_VCM) { + VABufferID miscParamBufferCIRid; + VAEncMiscParameterBuffer *misc_param; + VAEncMiscParameterCIR *misc_cir_param; - LOG_W("Not in VCM mode, but call send_AIR\n"); - return ENCODE_SUCCESS; - } + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterCIR), + 1, + NULL, + &miscParamBufferCIRid); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaStatus = vaMapBuffer(mVADisplay, miscParamBufferCIRid, (void **)&misc_param); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + + misc_param->type = VAEncMiscParameterTypeCIR; + misc_cir_param = (VAEncMiscParameterCIR *)misc_param->data; + misc_cir_param->cir_num_mbs = mComParams.cirParams.cir_num_mbs; + LOG_I( "cir_num_mbs %d \n", misc_cir_param->cir_num_mbs); + + vaUnmapBuffer(mVADisplay, miscParamBufferCIRid); + CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferCIRid, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture"); + + return ENCODE_SUCCESS; +} + +Encode_Status VideoEncoderAVC::renderAIR() { + VAStatus vaStatus = VA_STATUS_SUCCESS; + LOG_V( "Begin\n\n"); VAEncMiscParameterBuffer *miscEncParamBuf; VAEncMiscParameterAIR *airParams; diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h index c66acc3..2898b1d 100644 --- a/videoencoder/VideoEncoderAVC.h +++ b/videoencoder/VideoEncoderAVC.h @@ -41,6 +41,7 @@ private: Encode_Status renderMaxSliceSize(); Encode_Status renderAIR(); + Encode_Status renderCIR(); Encode_Status renderSequenceParams(EncodeTask *task); Encode_Status renderPictureParams(EncodeTask *task); Encode_Status renderSliceParams(EncodeTask *task); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 94fc65a..4e49a40 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -24,6 +24,7 @@ VideoEncoderBase::VideoEncoderBase() ,mRenderMaxSliceSize(false) ,mRenderQP (false) ,mRenderAIR(false) + ,mRenderCIR(false) ,mRenderFrameRate(false) ,mRenderBitRate(false) ,mRenderHrd(false) @@ -1178,6 +1179,18 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { mRenderAIR = true; break; } + case VideoConfigTypeCIR: { + + VideoConfigCIR *configCIR = reinterpret_cast (videoEncConfig); + + if (configCIR->size != sizeof (VideoConfigCIR)) { + return ENCODE_INVALID_PARAMS; + } + + mComParams.cirParams = configCIR->cirParams; + mRenderCIR = true; + break; + } case VideoConfigTypeAVCIntraPeriod: case VideoConfigTypeNALSize: case VideoConfigTypeIDRRequest: @@ -1264,6 +1277,17 @@ Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) { configAIR->airParams = mComParams.airParams; break; } + case VideoConfigTypeCIR: { + + VideoConfigCIR *configCIR = reinterpret_cast (videoEncConfig); + + if (configCIR->size != sizeof (VideoConfigCIR)) { + return ENCODE_INVALID_PARAMS; + } + + configCIR->cirParams = mComParams.cirParams; + break; + } case VideoConfigTypeAVCIntraPeriod: case VideoConfigTypeNALSize: case VideoConfigTypeIDRRequest: diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 5ed51ad..b9ee8ac 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -116,6 +116,7 @@ protected: bool mRenderMaxSliceSize; //Max Slice Size bool mRenderQP; bool mRenderAIR; + bool mRenderCIR; bool mRenderFrameRate; bool mRenderBitRate; bool mRenderHrd; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 2b48104..ee40fda 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -167,6 +167,17 @@ struct VideoEncSurfaceBuffer { VideoEncSurfaceBuffer *next; }; +struct CirParams { + uint32_t cir_num_mbs; + + CirParams &operator=(const CirParams &other) { + if (this == &other) return *this; + + this->cir_num_mbs = other.cir_num_mbs; + return *this; + } +}; + struct AirParams { uint32_t airMBs; uint32_t airThreshold; @@ -308,6 +319,7 @@ enum VideoParamConfigType { VideoConfigTypeSliceNum, VideoConfigTypeVP8, VideoConfigTypeVP8ReferenceFrame, + VideoConfigTypeCIR, VideoParamsConfigExtension }; @@ -337,6 +349,7 @@ struct VideoParamsCommon : VideoParamConfigSet { VideoIntraRefreshType refreshType; int32_t cyclicFrameInterval; AirParams airParams; + CirParams cirParams; uint32_t disableDeblocking; bool syncEncMode; //CodedBuffer properties @@ -569,6 +582,16 @@ struct VideoConfigCyclicFrameInterval : VideoParamConfigSet { int32_t cyclicFrameInterval; }; +struct VideoConfigCIR : VideoParamConfigSet { + + VideoConfigCIR() { + type = VideoConfigTypeCIR; + size = sizeof(VideoConfigCIR); + } + + CirParams cirParams; +}; + struct VideoConfigAIR : VideoParamConfigSet { VideoConfigAIR() { -- cgit v1.2.3 From b17fa10721d7a23ed158ee17f2b89ba8e4fe3f10 Mon Sep 17 00:00:00 2001 From: gji2 Date: Mon, 9 Dec 2013 02:50:57 +0800 Subject: Fix the wifi display fail problem. BZ:154696 Solve the gfx color format unsupported problem which could make the wifi display use case work. Change-Id: I1fde2496967985ad90a25c16993259020a05e23d Signed-off-by: gji2 --- videoencoder/VideoEncoderUtils.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 917e8da..210881e 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -164,6 +164,8 @@ Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ /* only support OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar HAL_PIXEL_FORMAT_NV12 HAL_PIXEL_FORMAT_BGRA_8888 + HAL_PIXEL_FORMAT_RGBA_8888 + HAL_PIXEL_FORMAT_RGBX_8888 HAL_PIXEL_FORMAT_BGRX_8888 */ IMG_native_handle_t* h = (IMG_native_handle_t*) handle; @@ -183,7 +185,9 @@ Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ vinfo.lumaStride = 512; #endif } else if ((h->iFormat == HAL_PIXEL_FORMAT_BGRA_8888)|| - ((h->iFormat == HAL_PIXEL_FORMAT_BGRX_8888))) { + (h->iFormat == HAL_PIXEL_FORMAT_RGBA_8888)|| + (h->iFormat == HAL_PIXEL_FORMAT_RGBX_8888)|| + (h->iFormat == HAL_PIXEL_FORMAT_BGRX_8888)) { vinfo.lumaStride = (h->iWidth + 31) & ~31; } else if (h->iFormat == OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar) { //nothing to do -- cgit v1.2.3 From 7f54274f7802a7e173b09d4aa8e31defc8c89634 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Fri, 6 Dec 2013 15:53:37 +0800 Subject: libmix: unify VP8 va parameter usage BZ: 152589 unify some VP8 VA parameter usage Change-Id: I242d13dc06e44dfe2e9bec6dbbc64bd3028285e8 Signed-off-by: Tianmi Chen --- mixvbp/vbp_manager/vbp_vp8_parser.c | 39 +++++++++++++++++++----------- mixvbp/vbp_plugin/vp8/include/vp8_tables.h | 4 ++- 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c index 51f8236..29ee8ad 100755 --- a/mixvbp/vbp_manager/vbp_vp8_parser.c +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -25,6 +25,7 @@ #include #include "vp8.h" +#include "vp8_tables.h" #include "vbp_loader.h" #include "vbp_utils.h" #include "vbp_vp8_parser.h" @@ -402,10 +403,10 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu { pic_parms->loop_filter_level[i] = baseline_filter_level[i]; } - if ((pic_parms->pic_fields.bits.version == 0) || (pic_parms->pic_fields.bits.version == 1)) - { - pic_parms->pic_fields.bits.loop_filter_disable = pic_parms->loop_filter_level[0] > 0 ? true : false; - } + + int profile = pic_parms->pic_fields.bits.version; + pic_parms->pic_fields.bits.loop_filter_disable = ((profile > 1) || (pic_parms->loop_filter_level[0] == 0)) ? true : false; + memcpy(pic_parms->loop_filter_deltas_ref_frame, pi->LoopFilter.DeltasRef, sizeof(char) * MAX_REF_LF_DELTAS); memcpy(pic_parms->loop_filter_deltas_mode, pi->LoopFilter.DeltasMode, sizeof(char) * MAX_MODE_LF_DELTAS); @@ -420,19 +421,28 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu pic_parms->prob_last = pi->prob_lf; pic_parms->prob_gf = pi->prob_gf; - FrameContextData *fc = &(parser->info.FrameContext); - memcpy(pic_parms->y_mode_probs, fc->Y_Mode_Prob, sizeof(unsigned char) * 4); - memcpy(pic_parms->uv_mode_probs, fc->UV_Mode_Prob, sizeof(unsigned char) * 3); - /* Motion vector context */ - for (i = 0; i < 2; i++) + if (pic_parms->pic_fields.bits.key_frame == KEY_FRAME) { - memcpy(pic_parms->mv_probs[i], fc->MVContext[i], sizeof(unsigned char) * 19); + memcpy(pic_parms->y_mode_probs, VP8_KF_YMode_Const, sizeof(VP8_KF_YMode_Const)); + memcpy(pic_parms->uv_mode_probs, VP8_KF_UVMode_Const, sizeof(VP8_KF_UVMode_Const)); + memcpy(pic_parms->mv_probs, VP8_MV_DefaultMVContext, sizeof(VP8_MV_DefaultMVContext)); + } + else + { + FrameContextData *fc = &(parser->info.FrameContext); + memcpy(pic_parms->y_mode_probs, fc->Y_Mode_Prob, sizeof(unsigned char) * 4); + memcpy(pic_parms->uv_mode_probs, fc->UV_Mode_Prob, sizeof(unsigned char) * 3); + /* Motion vector context */ + for (i = 0; i < 2; i++) + { + memcpy(pic_parms->mv_probs[i], fc->MVContext[i], sizeof(unsigned char) * 19); + } } /* Bool coder */ pic_parms->bool_coder_ctx.range = pi->bool_coder.range; pic_parms->bool_coder_ctx.value = (pi->bool_coder.value >> 24) & 0xFF; - pic_parms->bool_coder_ctx.count = pi->bool_coder.count; + pic_parms->bool_coder_ctx.count = 8 - (pi->bool_coder.count & 0x07); //pic_parms->current_picture = VA_INVALID_SURFACE; pic_parms->last_ref_frame = VA_INVALID_SURFACE; @@ -460,9 +470,10 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_index]); vbp_slice_data_vp8 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); - slc_data->buffer_addr = pi->source; + int slice_offset = (pi->frame_tag.frame_type == KEY_FRAME) ? 10 : 3; + slc_data->buffer_addr = pi->source + slice_offset; slc_data->slice_offset = 0; - slc_data->slice_size = pi->source_sz; + slc_data->slice_size = pi->source_sz - slice_offset; VASliceParameterBufferVP8 *slc_parms = &(slc_data->slc_parms); /* number of bytes in the slice data buffer for this slice */ @@ -475,7 +486,7 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; /* the offset to the first bit of MB from the first byte of slice data */ - slc_parms->macroblock_offset = pi->header_bits; + slc_parms->macroblock_offset = pi->header_bits - (slice_offset << 3); /* Token Partitions */ slc_parms->num_of_partitions = pi->partition_count; diff --git a/mixvbp/vbp_plugin/vp8/include/vp8_tables.h b/mixvbp/vbp_plugin/vp8/include/vp8_tables.h index 6980834..00c5ad2 100755 --- a/mixvbp/vbp_plugin/vp8/include/vp8_tables.h +++ b/mixvbp/vbp_plugin/vp8/include/vp8_tables.h @@ -532,7 +532,9 @@ const vp8_prob VP8_Coefficient_Default_Probabilites[BLOCK_TYPES] [COEF_BANDS] [P }; const vp8_prob VP8_YMode_Const[VP8_YMODES-1] = {112,86,140,37}; +const vp8_prob VP8_KF_YMode_Const[VP8_YMODES-1] = {145,156,163,128}; -const vp8_prob VP8_UVMode_Const[VP8_UV_MODES-1] = {162, 101,204}; +const vp8_prob VP8_UVMode_Const[VP8_UV_MODES-1] = {162,101,204}; +const vp8_prob VP8_KF_UVMode_Const[VP8_UV_MODES-1] = {142,114,183}; #endif -- cgit v1.2.3 From 414b38d8d07ebcb1e0788b857c17d58cc1b5a339 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Thu, 5 Dec 2013 15:20:46 +0800 Subject: Refine async mode return value BZ: 150664 Fix one async mode defect, use another error to identify work queue empty Change-Id: I726d86153cff5c59e97ed98c063d8b002dadb0d8 Signed-off-by: Zhao Liang --- videoencoder/VideoEncoderBase.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 4e49a40..0954fcc 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -426,7 +426,7 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint3 } } else {//Nonblock mEncodeTask_Lock.unlock(); - return ENCODE_DATA_NOT_READY; + return ENCODE_NO_REQUEST_DATA; } } @@ -476,9 +476,8 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint3 mCurOutputTask->completed = true; //if need to call SyncSurface again ? - } else {//not ready yet - ret = ENCODE_DATA_NOT_READY; - goto CLEAN_UP; + } else {//not encode complet yet, but keep all context and return directly + return ENCODE_DATA_NOT_READY; } } -- cgit v1.2.3 From 64ab3f5e6561c96f5ec1919080fd0117ad6fc5cb Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Tue, 17 Dec 2013 16:53:59 +0800 Subject: videodecoder: Video size change is independent of new sps BZ: 159376 Some malformed clips have more than one PPS and the second PPS will refer to a non-exist SPS so that it reports a new SPS is found. The log logic will set mSizeChanged to be true once data->new_sps is 1 and HandleFormatChange will be called. As the buffer containing the second PPS will be retained during format change, the decoder will fall into a infinite loop of format change. Change-Id: I8d5a3e22bf60d6abe2233fbf709605d490ea35f5 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderAVC.cpp | 5 ----- 1 file changed, 5 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 57d3557..5548625 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -704,11 +704,6 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { mVideoFormatInfo.height = height; } - if (data->new_sps) { - mSizeChanged = true; - ITRACE("New sequence is received. Assuming video size is changed."); - } - // video_range has default value of 0. mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag; -- cgit v1.2.3 From 68b4b30433fc9387f9c95512db3fdb07db135604 Mon Sep 17 00:00:00 2001 From: "SUN,Jing" Date: Tue, 17 Dec 2013 13:17:17 +0800 Subject: Enabled JPEG HW encoding based on gralloc source buffers to Libmix's IntelImageEncoder. BZ: 159383 Issue: Camera may use gralloc buffers as JPEG encoding source in the future. Solution: enabled it and added its test function. Change-Id: Iea0dbdd865690f7d39097526d9c77b254ae8f7de Signed-off-by: SUN,Jing --- imageencoder/Android.mk | 4 +- imageencoder/ImageEncoder.cpp | 7 +++- imageencoder/test/main.cpp | 97 +++++++++++++++++++++++++++++++++---------- 3 files changed, 84 insertions(+), 24 deletions(-) diff --git a/imageencoder/Android.mk b/imageencoder/Android.mk index ff5b01b..0323576 100644 --- a/imageencoder/Android.mk +++ b/imageencoder/Android.mk @@ -37,7 +37,9 @@ LOCAL_C_INCLUDES += \ $(TARGET_OUT_HEADERS)/libmix_imageencoder LOCAL_SHARED_LIBRARIES := \ - libmix_imageencoder + libmix_imageencoder \ + libui \ + libgui LOCAL_MODULE := libmix_imageencoder_tester LOCAL_MODULE_TAGS := optional diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp index 485d572..d63ad5e 100644 --- a/imageencoder/ImageEncoder.cpp +++ b/imageencoder/ImageEncoder.cpp @@ -140,8 +140,11 @@ int IntelImageEncoder::createSourceSurface(int source_type, void *source_buffer, if (NULL == source_buffer) { LOGE("createSourceSurface: the input buffer address can't be null!\n"); return VA_STATUS_ERROR_INVALID_PARAMETER; - } else if ((unsigned int)source_buffer & 0xFFF) { - LOGE("createSourceSurface: the input buffer wasn't aligned to 4096!\n"); + } + + if ((source_type == SURFACE_TYPE_USER_PTR) && + ((unsigned int)source_buffer & 0xFFF)) { + LOGE("createSourceSurface: the user input buffer wasn't aligned to 4096!\n"); return VA_STATUS_ERROR_INVALID_PARAMETER; } diff --git a/imageencoder/test/main.cpp b/imageencoder/test/main.cpp index 1433c04..9aa15a9 100644 --- a/imageencoder/test/main.cpp +++ b/imageencoder/test/main.cpp @@ -11,8 +11,16 @@ #include #include #include +#include +#include +#include +#include +#include +#include #include "ImageEncoder.h" +using namespace android; + inline unsigned long long int current_time(/*bool fixed*/) { struct timeval tv; @@ -41,6 +49,7 @@ static void usage(const char* pname) " -width: declaring the source's raw data width (0, 65536].\n" " -height: declaring the source's raw data height (0, 65536].\n" " -output: specifying the output JPEG's file path (.JPG or .jpg).\n" + " -surface: specifying the source surface type (0:malloc 1:gralloc).\n" " -burst (optional): enabling continuous encoding times (0, 50].\n" " -quality (optional): setting image quality [0, 100].\n" " -fix (optional): fixing CPU frequency for evaluating performance.\n\n" @@ -77,6 +86,7 @@ int main(int argc, char** argv) /* Parameter variables */ char *source_name = NULL; char *output_name = (char *)"./output.jpg"; + int surface_type = 0; int quality = DEFAULT_QUALITY; int burst = DEFAULT_BURST; int width = 0, height = 0; @@ -92,6 +102,8 @@ int main(int argc, char** argv) unsigned int source_size = 0, source_buffer_size = 0; unsigned int output_size = 0, output_buffer_size = 0; unsigned int read_size = 0, write_size = 0; + GraphicBuffer *gralloc_buffer = NULL; + void *gralloc_handle = NULL; void *source_buffer = NULL, *output_buffer = NULL; void *aligned_source_buffer = NULL, *current_position = NULL; void *surface_buffer = NULL, *surface_buffer_ptr = NULL; @@ -187,6 +199,19 @@ int main(int argc, char** argv) fprintf(stderr, "Invalid output file name: %s!\n", output_name); return 1; } + } else if (match_key(arg, "surface", strlen("surface"))) { + if (++argn >= argc) { + usage(pname); /* "surface" should be followed by a type */ + fprintf(stderr, "-sruface should be followed by a type!\n"); + return 1; + } + + if ((1 != sscanf(argv[argn], "%d", &surface_type)) || + (surface_type < 0) || (surface_type > 1)) { + usage(pname); /* Invalid surface type */ + fprintf(stderr, "Invalid surface type!\n"); + return 1; + } } else if (match_key(arg, "burst", strlen("burst"))) { if (++argn >= argc) { usage(pname); /* "burst" should be followed by a quality value */ @@ -241,18 +266,38 @@ int main(int argc, char** argv) } source_size = width * height * YUV420_SAMPLE_SIZE; - stride = (width+0x3f) & (~0x3f); /* TopazHP requires stride must be an integral multiple of 64. */ - source_buffer_size = stride * height * YUV420_SAMPLE_SIZE; - source_buffer = malloc(source_buffer_size+4096); - if (NULL == source_buffer) { - fprintf(stderr, "Fail to allocate source buffer: %d(%s)!\n", errno, strerror(errno)); - close(source_fd); - return 1; + if (0 == surface_type) { /* malloc */ + /* TopazHP requires stride must be an integral multiple of 64. */ + stride = (width+0x3f) & (~0x3f); + source_buffer_size = stride * height * YUV420_SAMPLE_SIZE; + + source_buffer = malloc(source_buffer_size+4096); + if (NULL == source_buffer) { + fprintf(stderr, "Fail to allocate source buffer: %d(%s)!\n", errno, strerror(errno)); + close(source_fd); + return 1; + } + memset(source_buffer, 0, source_buffer_size+4096); + aligned_source_buffer = (void *)((unsigned int)source_buffer - + ((unsigned int)source_buffer)%4096 + 4096); + } else { /* gralloc */ + gralloc_buffer = new GraphicBuffer(width, height, VA_FOURCC_NV12, + GraphicBuffer::USAGE_SW_WRITE_RARELY); + if (NULL == gralloc_buffer) { + fprintf(stderr, "Allocating GraphicBuffer failed!\n"); + close(source_fd); + return 1; + } + stride = (gralloc_buffer->getNativeBuffer())->stride; + gralloc_handle = (void *)((gralloc_buffer->getNativeBuffer())->handle); + + if(gralloc_buffer->lock(GRALLOC_USAGE_SW_WRITE_RARELY, &aligned_source_buffer)) { + fprintf(stderr, "Locking GraphicBuffer failed!\n"); + close(source_fd); + return 1; + } } - memset(source_buffer, 0, source_buffer_size+4096); - aligned_source_buffer = (void *)((unsigned int)source_buffer - - ((unsigned int)source_buffer)%4096 + 4096); current_position = aligned_source_buffer; for (i=0; iunlock(); + } + if (read_size != source_size) { fprintf(stderr, "Incorrect source file size: %d(%s)!\n", read_size, strerror(errno)); fprintf(stderr, "The correct size should be : %d.\n", source_size); - free(source_buffer); + if (source_buffer) free(source_buffer); return 1; } @@ -390,6 +440,10 @@ int main(int argc, char** argv) printf("Width: %d\n", width); printf("Height: %d\n", height); printf("Output: %s\n", output_name); + if (0 == surface_type) + printf("Surface: malloc\n"); + else + printf("Surface: gralloc\n"); printf("Burst: %d times\n", burst); printf("Quality: %d\n", quality); if (true == fix_cpu_frequency) @@ -404,20 +458,21 @@ int main(int argc, char** argv) PERF_STOP(init_driver_time, fix_cpu_frequency); if (status != 0) { fprintf(stderr, "initializeEncoder failed (%d)!\n", status); - free(source_buffer); + if (source_buffer) free(source_buffer); return 1; } /* Create a source surface*/ PERF_START(create_source_time, fix_cpu_frequency); - status = image_encoder.createSourceSurface(SURFACE_TYPE_USER_PTR, aligned_source_buffer, + status = image_encoder.createSourceSurface(surface_type? SURFACE_TYPE_GRALLOC:SURFACE_TYPE_USER_PTR, + surface_type? gralloc_handle:aligned_source_buffer, width, height, stride, VA_RT_FORMAT_YUV420, &image_seq); PERF_STOP(create_source_time, fix_cpu_frequency); if (status != 0) { fprintf(stderr, "createSourceSurface failed (%d)!\n", status); - free(source_buffer); + if (source_buffer) free(source_buffer); image_encoder.deinitializeEncoder(); return 1; } @@ -428,7 +483,7 @@ int main(int argc, char** argv) PERF_STOP(create_context_time, fix_cpu_frequency); if (status != 0) { fprintf(stderr, "createContext failed (%d)!\n", status); - free(source_buffer); + if (source_buffer) free(source_buffer); image_encoder.deinitializeEncoder(); return 1; } @@ -436,7 +491,7 @@ int main(int argc, char** argv) output_buffer = malloc(output_buffer_size); if (NULL == output_buffer) { fprintf(stderr, "Fail to allocate output buffer: %d(%s)!\n", errno, strerror(errno)); - free(source_buffer); + if (source_buffer) free(source_buffer); image_encoder.deinitializeEncoder(); return 1; } @@ -452,7 +507,7 @@ int main(int argc, char** argv) PERF_STOP(prepare_encoding_time, fix_cpu_frequency); if (status != 0) { fprintf(stderr, "encode failed (%d)!\n", status); - free(source_buffer); + if (source_buffer) free(source_buffer); free(output_buffer); image_encoder.deinitializeEncoder(); return 1; @@ -463,7 +518,7 @@ int main(int argc, char** argv) PERF_STOP(encode_time, fix_cpu_frequency); if (status != 0) { fprintf(stderr, "getCoded failed (%d)!\n", status); - free(source_buffer); + if (source_buffer) free(source_buffer); free(output_buffer); image_encoder.deinitializeEncoder(); return 1; @@ -482,7 +537,7 @@ int main(int argc, char** argv) if (-1 == output_fd) { fprintf(stderr, "Error opening output file: %s (%s)!\n", final_output_name, strerror(errno)); - free(source_buffer); + if (source_buffer) free(source_buffer); free(output_buffer); image_encoder.deinitializeEncoder(); return 1; @@ -493,7 +548,7 @@ int main(int argc, char** argv) if (write_size != output_size) { fprintf(stderr, "Fail to write coded data to output file: %d(%s)!\n", write_size , strerror(errno)); - free(source_buffer); + if (source_buffer) free(source_buffer); free(output_buffer); image_encoder.deinitializeEncoder(); return 1; @@ -502,7 +557,7 @@ int main(int argc, char** argv) output_fd = -1; } - free(source_buffer); + if (source_buffer) free(source_buffer); free(output_buffer); /* Deinitialize encoder */ -- cgit v1.2.3 From 87199f8cbf30c41458b66fe21cad007e8d05c949 Mon Sep 17 00:00:00 2001 From: Richard Chevalier Date: Tue, 17 Dec 2013 23:04:07 +0100 Subject: [REVERTME] Fix CHT libmix compilation issue BZ: 159728 vendor/intel/hardware/PRIVATE/libmix/videodecoder/VideoDecoderBase.cpp:1305: error: undefined reference to 'vbp_update' Change-Id: If16ab7823aba5ee3cba410d251b3f284a5d3b059 Signed-off-by: Richard Chevalier --- videodecoder/Android.mk | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 32a1851..a9613b9 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -28,8 +28,7 @@ LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC endif PLATFORM_USE_GEN_HW := \ - baytrail \ - cherrytrail + baytrail ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_GEN_HW)),) LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT -DUSE_GEN_HW endif -- cgit v1.2.3 From a3e1ca2320c7370923d2cf52c09e82d3b8742ba0 Mon Sep 17 00:00:00 2001 From: Richard Chevalier Date: Mon, 23 Dec 2013 13:21:20 +0100 Subject: Revert "[REVERTME] Fix CHT libmix compilation issue" BZ: 159728 This reverts commit b553d058d2e60e59c19f42afbb6d56a7e82478e5. Change-Id: Idb293206e11c3219266d20b7042f4dee376877c8 Signed-off-by: Richard Chevalier --- videodecoder/Android.mk | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index a9613b9..32a1851 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -28,7 +28,8 @@ LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC endif PLATFORM_USE_GEN_HW := \ - baytrail + baytrail \ + cherrytrail ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_GEN_HW)),) LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT -DUSE_GEN_HW endif -- cgit v1.2.3 From 42da452242237e5a25261c9c1e307a2d5300b2c2 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 19 Dec 2013 16:11:59 +0800 Subject: Fix CHT libmix compilation issue BZ: 159728 Fix the cherrytrail libmix build problem Change-Id: I75c75443a8307ed642805b9eebe572d2edec5729 Signed-off-by: wfeng6 --- mixvbp/vbp_manager/Android.mk | 3 ++- mixvbp/vbp_plugin/h264/Android.mk | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk index e5f722d..e30a0e0 100755 --- a/mixvbp/vbp_manager/Android.mk +++ b/mixvbp/vbp_manager/Android.mk @@ -51,7 +51,8 @@ LOCAL_CFLAGS += -DUSE_HW_VP8 endif PLATFORM_SUPPORT_AVC_SHORT_FORMAT := \ - baytrail + baytrail \ + cherrytrail ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk index f661940..d667227 100755 --- a/mixvbp/vbp_plugin/h264/Android.mk +++ b/mixvbp/vbp_plugin/h264/Android.mk @@ -21,7 +21,7 @@ LOCAL_C_INCLUDES := \ $(MIXVBP_DIR)/vbp_manager/include \ $(MIXVBP_DIR)/vbp_manager/h264/include -PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail +PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail cherrytrail ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT endif @@ -36,7 +36,7 @@ LOCAL_SHARED_LIBRARIES := \ include $(BUILD_SHARED_LIBRARY) include $(CLEAR_VARS) -PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail +PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail cherrytrail ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),) LOCAL_SRC_FILES := \ -- cgit v1.2.3 From 021fe0497d3fe69a89fb111e1bb45f527d1bac42 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 28 May 2013 13:22:04 +0800 Subject: refine DRC for WMV BZ: 154291 if the resolution change in the middle of WMV during playing, update the video format in order to info the upper layer of the change Change-Id: I6ad29d96c4d71478ddb7dc1d266c3dcd0ec3a580 Signed-off-by: ywan171 --- videodecoder/VideoDecoderWMV.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index 6b3f4fb..1017ad8 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -101,6 +101,18 @@ Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("startVA"); } + if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH || + mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) && + data->se_data->CODED_WIDTH && + data->se_data->CODED_HEIGHT) { + updateFormatInfo(data); + if (mSizeChanged) { + flushSurfaceBuffers(); + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + } + status = decodeFrame(buffer, data); CHECK_STATUS("decodeFrame"); if (mSizeChanged) { -- cgit v1.2.3 From 80be17031d0eea093d730c2fdea405c0e61880a4 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Fri, 13 Dec 2013 13:06:57 +0800 Subject: refine DRC for MPEG4/H263 BZ: 154291 if the resolution change in the middle of MPEG4/H263 during playing, update the video format in order to info the upper layer of the change Change-Id: Ie4e3ee4cf4fa5904c60e2a1d5266bcd3c03d6841 Signed-off-by: ywan171 --- videodecoder/VideoDecoderMPEG4.cpp | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index b99ed2e..0f72095 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -92,14 +92,21 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("startVA"); } - status = decodeFrame(buffer, data); - CHECK_STATUS("decodeFrame"); - if (mSizeChanged) { - mSizeChanged = false; + if ((mVideoFormatInfo.width != (int32_t)data->codec_data.video_object_layer_width || + mVideoFormatInfo.height != (int32_t)data->codec_data.video_object_layer_height) && + data->codec_data.video_object_layer_width && + data->codec_data.video_object_layer_height) { + // update encoded image size + mVideoFormatInfo.width = data->codec_data.video_object_layer_width; + mVideoFormatInfo.height = data->codec_data.video_object_layer_height; flushSurfaceBuffers(); + ITRACE("Video size is changed."); return DECODE_FORMAT_CHANGE; } + status = decodeFrame(buffer, data); + CHECK_STATUS("decodeFrame"); + return status; } -- cgit v1.2.3 From 57a96092435d46885052e42faf0b26e38932e7f0 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Fri, 13 Dec 2013 13:15:46 +0800 Subject: remove HW capablity check for VC1 on GEN driver BZ: 158110 remove HW capablity check for VC1 on GEN driver Change-Id: I37556e25279a96caf0eb31c83442810c87553698 Signed-off-by: ywan171 --- videodecoder/VideoDecoderWMV.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index 1017ad8..7ae7cb1 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -535,6 +535,7 @@ Decode_Status VideoDecoderWMV::parseBuffer(uint8_t *data, int32_t size, vbp_data Decode_Status VideoDecoderWMV::checkHardwareCapability(VAProfile profile) { +#ifndef USE_GEN_HW VAStatus vaStatus; VAConfigAttrib cfgAttribs[2]; cfgAttribs[0].type = VAConfigAttribMaxPictureWidth; @@ -547,7 +548,7 @@ Decode_Status VideoDecoderWMV::checkHardwareCapability(VAProfile profile) { cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height); return DECODE_DRIVER_FAIL; } - +#endif return DECODE_SUCCESS; } -- cgit v1.2.3 From 1a83d96f0b491b5b53a06a629d5e14e5765ce6db Mon Sep 17 00:00:00 2001 From: ywan171 Date: Wed, 18 Dec 2013 21:11:10 +0800 Subject: modify some va parameter to integration mix with GEN driver for DRC BZ: 158110 modify some va parameter to integration mix with GEN driver for DRC Change-Id: I244ccd763602d66bb8219ba391706790df5e576c Signed-off-by: ywan171 --- videodecoder/VideoDecoderBase.cpp | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index fed382e..cb6a663 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -840,16 +840,16 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i } mVASurfaceAttrib->num_buffers = mNumSurfaces; mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12; - mVASurfaceAttrib->width = mVideoFormatInfo.width; - mVASurfaceAttrib->height = mVideoFormatInfo.height; - mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.height * 1.5; + mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth; + mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight; + mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5; mVASurfaceAttrib->num_planes = 2; mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride; mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride; mVASurfaceAttrib->pitches[2] = 0; mVASurfaceAttrib->pitches[3] = 0; mVASurfaceAttrib->offsets[0] = 0; - mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.height; + mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight; mVASurfaceAttrib->offsets[2] = 0; mVASurfaceAttrib->offsets[3] = 0; mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow; @@ -900,8 +900,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i vaStatus = vaCreateSurfaces( mVADisplay, format, - mVideoFormatInfo.width, - mVideoFormatInfo.height, + mVideoFormatInfo.surfaceWidth, + mVideoFormatInfo.surfaceHeight, mExtraSurfaces, mNumExtraSurfaces, NULL, @@ -916,8 +916,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i vaStatus = vaCreateContext( mVADisplay, mVAConfig, - mVideoFormatInfo.width, - mVideoFormatInfo.height, + mVideoFormatInfo.surfaceWidth, + mVideoFormatInfo.surfaceHeight, 0, mSurfaces, mNumSurfaces + mNumExtraSurfaces, @@ -929,7 +929,6 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i if (mSurfaceBuffers == NULL) { return DECODE_MEMORY_FAIL; } - initSurfaceBuffer(true); if ((int32_t)profile == VAProfileSoftwareDecoding) { -- cgit v1.2.3 From 3e0364feade351c4f06cd3d52b8199d29f09b956 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Mon, 16 Sep 2013 11:24:02 +0800 Subject: Enable I frame only qp setting and max_qp setting BZ: 157407 Change-Id: I06429164a5080ade83b4ff424ade95bc510fc191 Signed-off-by: Zhao Liang --- test/mix_encoder2.cpp | 28 ++++++++++++++++++++++++++-- videoencoder/VideoEncoderAVC.cpp | 18 ++++++++++-------- videoencoder/VideoEncoderBase.cpp | 19 +++++++++++++++++-- videoencoder/VideoEncoderBase.h | 2 +- videoencoder/VideoEncoderDef.h | 9 +++++++++ 5 files changed, 63 insertions(+), 13 deletions(-) diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index ecdee3e..c1b0663 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -1044,6 +1044,25 @@ public: in->meta_data()->findInt64(kKeyTime, &InBuf.timeStamp); InBuf.priv = (void*)in; +#if 0 + if (mEncodeFrameCount == 1) { + VideoConfigBitRate configBitrate; + mVideoEncoder->getConfig(&configBitrate); + configBitrate.rcParams.minQP = 0; + configBitrate.rcParams.maxQP = 0; + mVideoEncoder->setConfig(&configBitrate); + } + + if (mEncodeFrameCount == 40) { + VideoConfigBitRate configBitrate; + mVideoEncoder->getConfig(&configBitrate); + configBitrate.rcParams.minQP = 0; + configBitrate.rcParams.maxQP = 0; + mVideoEncoder->setConfig(&configBitrate); + } + +#endif + #if 0 if (mEncodeFrameCount > 1 && mEncodeFrameCount % 60 == 0){ VideoParamConfigSet configIDRRequest; @@ -1207,7 +1226,12 @@ private: mEncoderParams.rcParams.bitRate = mBitrate; mEncoderParams.rcParams.initQP = mInitQP; mEncoderParams.rcParams.minQP = mMinQP; + mEncoderParams.rcParams.maxQP = 0; + mEncoderParams.rcParams.I_minQP = 0; + mEncoderParams.rcParams.I_maxQP = 0; mEncoderParams.rcParams.windowSize = mWinSize; + mEncoderParams.rcParams.disableBitsStuffing = 1; + mEncoderParams.rcParams.enableIntraFrameQPControl = 0; //change to 1 to enable I frame qp control mEncoderParams.intraPeriod = mIntraPeriod; ret = mVideoEncoder->setParameters(&mEncoderParams); @@ -1225,10 +1249,10 @@ private: mVideoEncoder->setParameters(&AVCParam); } -#if 1 +#if 0 VideoConfigBitRate configBitrate; mVideoEncoder->getConfig(&configBitrate); - configBitrate.rcParams.disableBitsStuffing = 0; + configBitrate.rcParams.disableBitsStuffing = 1; configBitrate.rcParams.disableFrameSkip = mDisableFrameSkip; mVideoEncoder->setConfig(&configBitrate); #endif diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index eb729b0..bfa1c78 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -660,7 +660,6 @@ Encode_Status VideoEncoderAVC::outputNaluLengthsPrefixed(VideoEncOutputBuffer *o uint32_t nalNum = 0; CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf); - LOG_I("outputNaluLengthsPrefixed\n"); while (1) { @@ -676,7 +675,6 @@ Encode_Status VideoEncoderAVC::outputNaluLengthsPrefixed(VideoEncOutputBuffer *o CHECK_ENCODE_STATUS_RETURN("getOneNALUnit"); if (nalSize + 4 <= outBuffer->bufferSize - NALUINFO_OFFSET - sizeCopiedHere) { - LOG_I("zhaoliang nalSize=%d, nalOffset=%d\n", nalSize, nalOffset); memcpy(outBuffer->data + NALUINFO_OFFSET + sizeCopiedHere, (uint8_t *)mCurSegment->buf + mOffsetInSeg, nalSize + nalOffset); @@ -700,8 +698,6 @@ Encode_Status VideoEncoderAVC::outputNaluLengthsPrefixed(VideoEncOutputBuffer *o *nalLength = nalSize + nalOffset; - LOG_I("nalLength=%d\n", nalSize + nalOffset); - // check if all data in current segment has been copied out if (mCurSegment->size == mOffsetInSeg) { if (mCurSegment->next != NULL) { @@ -720,7 +716,6 @@ Encode_Status VideoEncoderAVC::outputNaluLengthsPrefixed(VideoEncOutputBuffer *o outBuffer->offset = NALUINFO_OFFSET; uint32_t *nalHead = (uint32_t *) outBuffer->data; - LOG_I("zhaoliang nalHead =%x\n", nalHead); *nalHead = 0x4E414C4C; //'nall' *(++nalHead) = nalNum; @@ -731,6 +726,7 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); + if (mFrameNum == 0 || mNewHeader) { if (mRenderHrd) { ret = renderHrd(); @@ -754,11 +750,12 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { mRenderMaxSliceSize = false; } + if (mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I)) + mRenderBitRate = true; + if (mRenderBitRate) { - ret = VideoEncoderBase::renderDynamicBitrate(); + ret = VideoEncoderBase::renderDynamicBitrate(task); CHECK_ENCODE_STATUS_RETURN("renderDynamicBitrate"); - - mRenderBitRate = false; } if (mRenderAIR && @@ -990,6 +987,11 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { (unsigned int) (frameRateNum + frameRateDenom /2 ) / frameRateDenom; rcMiscParam->initial_qp = mComParams.rcParams.initQP; rcMiscParam->min_qp = mComParams.rcParams.minQP; + rcMiscParam->max_qp = mComParams.rcParams.maxQP; + if (mComParams.rcParams.enableIntraFrameQPControl) { + rcMiscParam->min_qp = mComParams.rcParams.I_minQP; + rcMiscParam->max_qp = mComParams.rcParams.I_maxQP; + } rcMiscParam->window_size = mComParams.rcParams.windowSize; //target bitrate is sent to libva through Sequence Parameter Buffer rcMiscParam->bits_per_second = 0; diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 0954fcc..aab4554 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -877,11 +877,15 @@ void VideoEncoderBase::setDefaultParams() { mComParams.rcMode = RATE_CONTROL_NONE; mComParams.rcParams.initQP = 15; mComParams.rcParams.minQP = 0; + mComParams.rcParams.maxQP = 0; + mComParams.rcParams.I_minQP = 0; + mComParams.rcParams.I_maxQP = 0; mComParams.rcParams.bitRate = 640000; mComParams.rcParams.targetPercentage= 0; mComParams.rcParams.windowSize = 0; mComParams.rcParams.disableFrameSkip = 0; mComParams.rcParams.disableBitsStuffing = 1; + mComParams.rcParams.enableIntraFrameQPControl = 0; mComParams.cyclicFrameInterval = 30; mComParams.refreshType = VIDEO_ENC_NONIR; mComParams.airParams.airMBs = 0; @@ -1662,7 +1666,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA return ret; } -Encode_Status VideoEncoderBase::renderDynamicBitrate() { +Encode_Status VideoEncoderBase::renderDynamicBitrate(EncodeTask* task) { VAStatus vaStatus = VA_STATUS_SUCCESS; LOG_V( "Begin\n\n"); @@ -1688,7 +1692,17 @@ Encode_Status VideoEncoderBase::renderDynamicBitrate() { bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate; bitrateControlParam->initial_qp = mComParams.rcParams.initQP; - bitrateControlParam->min_qp = mComParams.rcParams.minQP; + if(mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I)) { + bitrateControlParam->min_qp = mComParams.rcParams.I_minQP; + bitrateControlParam->max_qp = mComParams.rcParams.I_maxQP; + mRenderBitRate = true; + LOG_I("apply I min/max qp for IDR or I frame\n"); + } else { + bitrateControlParam->min_qp = mComParams.rcParams.minQP; + bitrateControlParam->max_qp = mComParams.rcParams.maxQP; + mRenderBitRate = false; + LOG_I("revert to original min/max qp after IDR or I frame\n"); + } bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage; bitrateControlParam->window_size = mComParams.rcParams.windowSize; bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip; @@ -1698,6 +1712,7 @@ Encode_Status VideoEncoderBase::renderDynamicBitrate() { LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second); LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp); LOG_I("min_qp = %d\n", bitrateControlParam->min_qp); + LOG_I("max_qp = %d\n", bitrateControlParam->max_qp); LOG_I("target_percentage = %d\n", bitrateControlParam->target_percentage); LOG_I("window_size = %d\n", bitrateControlParam->window_size); LOG_I("disable_frame_skip = %d\n", bitrateControlParam->rc_flags.bits.disable_frame_skip); diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index b9ee8ac..10f8141 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -76,7 +76,7 @@ protected: virtual Encode_Status updateFrameInfo(EncodeTask* task) ; Encode_Status renderDynamicFrameRate(); - Encode_Status renderDynamicBitrate(); + Encode_Status renderDynamicBitrate(EncodeTask* task); Encode_Status renderHrd(); Encode_Status queryProfileLevelConfig(VADisplay dpy, VAProfile profile); diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index ee40fda..4ff90fa 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -223,10 +223,14 @@ struct VideoRateControlParams { uint32_t bitRate; uint32_t initQP; uint32_t minQP; + uint32_t maxQP; + uint32_t I_minQP; + uint32_t I_maxQP; uint32_t windowSize; uint32_t targetPercentage; uint32_t disableFrameSkip; uint32_t disableBitsStuffing; + uint32_t enableIntraFrameQPControl; VideoRateControlParams &operator=(const VideoRateControlParams &other) { if (this == &other) return *this; @@ -234,10 +238,15 @@ struct VideoRateControlParams { this->bitRate = other.bitRate; this->initQP = other.initQP; this->minQP = other.minQP; + this->maxQP = other.maxQP; + this->I_minQP = other.I_minQP; + this->I_maxQP = other.I_maxQP; this->windowSize = other.windowSize; this->targetPercentage = other.targetPercentage; this->disableFrameSkip = other.disableFrameSkip; this->disableBitsStuffing = other.disableBitsStuffing; + this->enableIntraFrameQPControl = other.enableIntraFrameQPControl; + return *this; } }; -- cgit v1.2.3 From dc05fe98fefcbfed227f91e841655d2f78ccda4c Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Thu, 19 Dec 2013 13:34:42 +0800 Subject: libmix: fix a VC1 parser issue BZ: 129080 Two issues have been fixed in VC1 parser. 1. User data should be identified. 2. slice_vertical_position should be parsed from bitstream instead of being assigned slice number. Change-Id: I2164550d84720c2619cc2df963d59fc2999faf3a Signed-off-by: Tianmi Chen --- mixvbp/vbp_manager/vbp_vc1_parser.c | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c index 3026ec9..87dadd8 100755 --- a/mixvbp/vbp_manager/vbp_vc1_parser.c +++ b/mixvbp/vbp_manager/vbp_vc1_parser.c @@ -329,7 +329,7 @@ static uint32 vbp_parse_start_code_helper_vc1( cxt->list.data[cxt->list.num_items].stpos - PREFIX_SIZE; - if (start_code >= 0x0A && start_code <= 0x0F) + if ((start_code >= 0x0A && start_code <= 0x0F) || (start_code >= 0x1B && start_code <= 0x1F)) { /* only put known start code to the list * 0x0A: end of sequence @@ -338,6 +338,7 @@ static uint32 vbp_parse_start_code_helper_vc1( * 0x0D: field header * 0x0E: entry point header * 0x0F: sequence header + * 0x1B ~ 0x1F: user data */ cxt->list.num_items++; } @@ -1018,6 +1019,9 @@ static void vbp_pack_slice_data_vc1( uint8 is_emul; viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data; + vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader); + vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms); @@ -1035,8 +1039,7 @@ static void vbp_pack_slice_data_vc1( slc_parms->macroblock_offset = bit + byte * 8; - /* fix this. we need o get the slice_vertical_position from the code */ - slc_parms->slice_vertical_position = pic_data->num_slices; + slc_parms->slice_vertical_position = (picLayerHeader->SLICE_ADDR % (pic_data->pic_parms->coded_height / 16));; pic_data->num_slices++; } -- cgit v1.2.3 From 831da9aa5668bc323fba735332192566398f80e0 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Sun, 15 Dec 2013 18:36:04 +0800 Subject: Fix the widevine playback corruption problem on BYT BZ: 140808 In current LibMIX implementation, there is one defect to handle the MMCO commands. When the framenum of the slice is 0, the short term reference frames will NOT be un-marked to be "un-referenced". This makes the DPB management run into the error state and leads to the corrupted decode frames. Change-Id: I0907d5b357049d792318ec46cdc441c641257ae2 Signed-off-by: wfeng6 --- mixvbp/vbp_plugin/h264/h264parse_dpb.c | 9 +++++++++ 1 file changed, 9 insertions(+) mode change 100755 => 100644 mixvbp/vbp_plugin/h264/h264parse_dpb.c diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c old mode 100755 new mode 100644 index d238958..e0b7c8f --- a/mixvbp/vbp_plugin/h264/h264parse_dpb.c +++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c @@ -1815,6 +1815,8 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff int32_t currPicNum; uint32_t idx; int32_t unmark_done; + int32_t max_frame_num; + h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb; if (pInfo->img.structure == FRAME) @@ -1824,6 +1826,13 @@ void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t diff picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1); +#ifdef USE_AVC_SHORT_FORMAT + if (picNumX < 0) { + max_frame_num = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4); + picNumX = picNumX + max_frame_num; + } +#endif + unmark_done = 0; for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++) -- cgit v1.2.3 From 83532fac3e93e9ffba6d14e92d550d809433610b Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Tue, 24 Dec 2013 14:18:07 +0800 Subject: libmix: fix a Video Editor Green Bar issue BZ: 159760 Video Editor uses the width/heigth in the port definition of decoder when it exports moive. Some clip's container has the wrong width/height info and may cause green bar issue in the raw data mode. Change-Id: Ib7650b8149e3b7cb49442bc8ed4734980bfe93c4 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderAVC.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 5548625..e7fe570 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -738,6 +738,18 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { data->codec_data->crop_right, data->codec_data->crop_bottom); + if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)){ + // BUG 159760: the port definition's height were set according to the infomation in container. + // In some cases, this info is not right. Need to correct it by format change notification. + uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight); + uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop); + + if (width != cropWidth || height != cropHeight) { + mSizeChanged = true; + ITRACE("Raw data mode: video size is changed."); + } + } + int diff = data->codec_data->num_ref_frames + 1 - mOutputWindowSize; if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { -- cgit v1.2.3 From 843129f236653951562dd24ab021eb56e0aa1995 Mon Sep 17 00:00:00 2001 From: Chetan Rao Date: Mon, 23 Dec 2013 14:00:02 +0200 Subject: libmix: Use include-path-for MACRO for AOSP includes BZ: 160780 Instead of hardcoding the AOSP include paths from top of Android Build Tree, need to use include-path-for MACRO defined in build/core/pathmap.mk. It also defines the a mapping of project names to the path. New paths can be added in device/intel/common/BoardConfig.mk Change-Id: I7dc15ee6ad769bd8c260dabe14149f15e77da71c Signed-off-by: Chetan Rao --- imagedecoder/Android.mk | 2 +- videoencoder/Android.mk | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index b5d3c95..70928e7 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -80,7 +80,7 @@ LOCAL_SRC_FILES += \ LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ - $(TOP)/external/jpeg \ + $(call include-path-for, jpeg) \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libjpegdec \ $(TARGET_OUT_HEADERS)/libmix_videovpp diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk index 480b5a1..a78e0e4 100644 --- a/videoencoder/Android.mk +++ b/videoencoder/Android.mk @@ -29,7 +29,7 @@ endif LOCAL_C_INCLUDES := \ $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libva \ - $(TOPDIR)/frameworks/native/include \ + $(call include-path-for, frameworks-native) \ $(TARGET_OUT_HEADERS)/pvr ifeq ($(ENABLE_IMG_GRAPHICS),) -- cgit v1.2.3 From 6bbbaf9cffcdbcba6f2c2717e92e7b7634197cf2 Mon Sep 17 00:00:00 2001 From: gji2 Date: Thu, 26 Dec 2013 05:19:01 +0800 Subject: Fix the cir setting fail issue BZ: 158599 Fix the cir setting fail issue Change-Id: Ic90f5745f9b22b84d6b5d461206eb73f58b4d175 Signed-off-by: gji2 --- videoencoder/VideoEncoderAVC.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index bfa1c78..7b234b1 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -768,9 +768,7 @@ Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) { mRenderAIR = false; } - if (mRenderCIR && - (mComParams.refreshType == VIDEO_ENC_CIR || - mComParams.refreshType == VIDEO_ENC_BOTH)) { + if (mRenderCIR) { ret = renderCIR(); CHECK_ENCODE_STATUS_RETURN("renderCIR"); -- cgit v1.2.3 From 6fd0eeb4a56191933455c8f8e9a5cfa06963c9bc Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 25 Dec 2013 17:03:05 +0800 Subject: libmix: optimize thumbnail generation for VP8 1080p clip BZ: 161172 optimize thumbnail generation for VP8 1080p clip. Change-Id: I61ea103b21bdd611f3927e9b0a13aa40ac9d0fcf Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderVP8.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index e4ef413..03a6fd1 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -61,10 +61,13 @@ void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) { if ((mVideoFormatInfo.width != width || mVideoFormatInfo.height != height) && width && height) { + if ((VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width) || + (VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height)) { + mSizeChanged = true; + ITRACE("Video size is changed."); + } mVideoFormatInfo.width = width; mVideoFormatInfo.height = height; - mSizeChanged = true; - ITRACE("Video size is changed."); } mVideoFormatInfo.cropLeft = data->codec_data->crop_left; -- cgit v1.2.3 From ba711c4a2bc6fbc12544c7eaef8329cd0e6cc75d Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Fri, 27 Dec 2013 16:01:10 +0800 Subject: Only load i965 driver on the CHT platform BZ: 161384 The hybrid driver loading is only needed on the BYT platform, which has both VED and GEN hardware. On the CTH platform, there is only GEN hardware. So for VP8 decoding, we need to use i965 driver instead of psb driver Change-Id: I6731c7bd7c190ee9bb373ff7be4719320e02dee7 Signed-off-by: wfeng6 --- videodecoder/Android.mk | 6 ++++++ videodecoder/VideoDecoderBase.cpp | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 32a1851..32abd4e 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -34,6 +34,12 @@ ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_GEN_HW)),) LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT -DUSE_GEN_HW endif +PLATFORM_USE_HYBRID_DRIVER := \ + baytrail + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_HYBRID_DRIVER)),) + LOCAL_CFLAGS += -DUSE_HYBRID_DRIVER +endif #LOCAL_LDLIBS += -lpthread diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index cb6a663..5424784 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -763,7 +763,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i } // Display is defined as "unsigned int" -#ifndef USE_GEN_HW +#ifndef USE_HYBRID_DRIVER mDisplay = new Display; *mDisplay = ANDROID_DISPLAY_HANDLE; #else @@ -995,7 +995,7 @@ Decode_Status VideoDecoderBase::terminateVA(void) { } if (mDisplay) { -#ifndef USE_GEN_HW +#ifndef USE_HYBRID_DRIVER delete mDisplay; #endif mDisplay = NULL; -- cgit v1.2.3 From 4c3a66bb72be5c4bbf9f43366e79dc4b4dd3c38f Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Sat, 28 Dec 2013 13:46:41 +0800 Subject: libmix: fix a regression introduced in DRC changes. BZ: 161128 The width/height in the container is not reliable. Need to check if it matches the info parsed from stream syntax and send format change if necessary. Change-Id: Ifb5933dd3cf48255f375198746f2810052089f13 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderMPEG4.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 0f72095..d5963ee 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -92,6 +92,14 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("startVA"); } + if (mSizeChanged) { + // some container has the incorrect width/height. + // send the format change to OMX to update the crop info. + mSizeChanged = false; + ITRACE("Video size is changed during startVA"); + return DECODE_FORMAT_CHANGE; + } + if ((mVideoFormatInfo.width != (int32_t)data->codec_data.video_object_layer_width || mVideoFormatInfo.height != (int32_t)data->codec_data.video_object_layer_height) && data->codec_data.video_object_layer_width && -- cgit v1.2.3 From e2668f1163df2fd045d73b309f0be84e2ba9a5b5 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Tue, 31 Dec 2013 13:29:38 +0800 Subject: refine mix_encoder2 test tool and fix one return value fault in async mode BZ: 161842 Change-Id: I00216935d621419b69bbeef60821994ec40f6bbb Signed-off-by: Zhao Liang --- test/mix_encoder2.cpp | 155 ++++++++++++++++++++++---------------- videoencoder/VideoEncoderBase.cpp | 2 +- 2 files changed, 92 insertions(+), 65 deletions(-) diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index c1b0663..002fc20 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -237,6 +237,8 @@ public: virtual status_t read(MediaBuffer **buffer, const MediaSource::ReadOptions *options) { + static int64_t lastTS = 0; + if (gNumFramesOutput == mMaxNumFrames) { return ERROR_END_OF_STREAM; } @@ -298,13 +300,21 @@ public: (*buffer)->set_range(offset, size); (*buffer)->meta_data()->clear(); (*buffer)->meta_data()->setInt64( - kKeyTime, (gNumFramesOutput * 1000000) / mFrameRate); + kKeyTime, (gNumFramesOutput*100 / mFrameRate) * 10000); postSourceWriting(gNumFramesOutput % PRELOAD_FRAME_NUM); ++gNumFramesOutput; if (gNumFramesOutput % 10 ==0) fprintf(stderr, "."); + + int64_t currTS = systemTime(); + if (lastTS > 0) { + int32_t delayTS = (1000000 / mFrameRate) - (currTS - lastTS) / 1000; + if (delayTS > 0) + usleep(delayTS); + } + lastTS = systemTime(); return OK; } @@ -881,7 +891,6 @@ private: sp buf; for ( gNumFramesOutput = 0; gNumFramesOutput < source->mMaxNumFrames; gNumFramesOutput++) { - ANativeWindowBuffer* anb; native_window_set_buffers_format(source->mANW.get(), HAL_PIXEL_FORMAT_NV12); native_window_dequeue_buffer_and_wait(source->mANW.get(), &anb); @@ -896,8 +905,8 @@ private: #endif if (NO_ERROR != source->mANW->queueBuffer(source->mANW.get(), buf->getNativeBuffer(), -1)) return NULL; - else - usleep(1000000 / source->mFPS); +// else +// usleep(1000000 / source->mFPS); } source->stop(); @@ -916,8 +925,8 @@ class MixEncoder : public MediaSource { public: MixEncoder(const sp &source, const sp &meta, int rcmode, uint32_t flag) { - mFirstFrame = false; - mSrcEOS = false; + mFirstFrame = true; + mEOS = false; mEncoderFlag = flag; mEncodeFrameCount = 0; mSource = source; @@ -968,9 +977,6 @@ public: success = meta->findInt32('difs', &mDisableFrameSkip); CHECK(success); - success = meta->findInt32('sync', &mSyncMode); - CHECK(success); - success = meta->findInt32('rawc', &mRawColor); CHECK(success); @@ -1019,15 +1025,31 @@ public: mGroup.add_buffer(new MediaBuffer(maxsize)); mGroup.add_buffer(new MediaBuffer(maxsize)); - return mSource->start(); + err = mSource->start(); + if (err != OK) + return err; + + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThread, &attr, MixEncoder::ThreadFunc, this); + pthread_attr_destroy(&attr); + + return OK; } status_t stop() { Encode_Status ret; + mRunning = false; + void *dummy; + pthread_join(mThread, &dummy); + ret = mVideoEncoder->stop(); CHECK_ENC_STATUS("MIX::stop"); + mEOS = false; return OK; } @@ -1061,9 +1083,6 @@ public: mVideoEncoder->setConfig(&configBitrate); } -#endif - -#if 0 if (mEncodeFrameCount > 1 && mEncodeFrameCount % 60 == 0){ VideoParamConfigSet configIDRRequest; configIDRRequest.type = VideoConfigTypeIDRRequest; @@ -1093,17 +1112,28 @@ public: OutBuf.flag = 0; OutBuf.timeStamp = 0; - ret = mVideoEncoder->getOutput(&OutBuf); + ret = mVideoEncoder->getOutput(&OutBuf, 500); if (ret < ENCODE_SUCCESS) { - if ((ret == ENCODE_NO_REQUEST_DATA) && (strcmp(mMixCodec, H263_MIME_TYPE) == 0)) { - printf("H263 FrameSkip happens at Frame #%d\n", mEncodeFrameCount); - OutBuf.dataSize = 0; + if (ret == ENCODE_NO_REQUEST_DATA) { + if (mEOS) { + out->release(); + return ERROR_END_OF_STREAM; + } + + if (strcmp(mMixCodec, H263_MIME_TYPE) == 0) { + printf("H263 FrameSkip happens at Frame #%d\n", mEncodeFrameCount); + OutBuf.dataSize = 0; + }else { + out->release(); + return UNKNOWN_ERROR; + } } else { printf("MIX::getOutput failed, ret=%d\n", ret); out->release(); return UNKNOWN_ERROR; } - } + } else if (ret == ENCODE_DATA_NOT_READY) + ret = mVideoEncoder->getOutput(&OutBuf, FUNC_BLOCK); out->set_range(0, OutBuf.dataSize); out->meta_data()->clear(); @@ -1128,60 +1158,28 @@ public: virtual status_t read(MediaBuffer **buffer, const MediaSource::ReadOptions *options) { status_t err; - Encode_Status ret; - - if (mSrcEOS) - return ERROR_END_OF_STREAM; - //write rest data of first frame after outputting csd, only for H264/MPEG4 - if (mFirstFrame) { + if (mFirstFrame && (strcasecmp(mMixCodec, H263_MIME_TYPE) != 0) + && (strcasecmp(mMixCodec, VP8_MIME_TYPE) != 0)) { err = mGroup.acquire_buffer(buffer); CHECK_STATUS(err); - err = getoutput(*buffer, OUTPUT_EVERYTHING); + err = getoutput(*buffer, OUTPUT_CODEC_DATA); CHECK_STATUS(err); mFirstFrame = false; return OK; } - //input buffers - int loop=1; - if (mSyncMode == 0 && mEncodeFrameCount == 0) - loop = 2; - - for(int i=0; iread (&src); - - if (err == ERROR_END_OF_STREAM) { - LOG ("\nReach Resource EOS, still need to get final frame encoded data\n"); - mSrcEOS = true; - if (mSyncMode) - return ERROR_END_OF_STREAM; - }else { - CHECK_STATUS(err); - err = encode(src); - CHECK_STATUS(err); - } - } + mFirstFrame = false; - //output buffers + //output buffer err = mGroup.acquire_buffer(buffer); CHECK_STATUS(err); - VideoOutputFormat format; - int n = 2; - if (mSyncMode) - n = 1; - - if ((mEncodeFrameCount == n && (strcasecmp(mMixCodec, H263_MIME_TYPE) != 0))&& - (mEncodeFrameCount == n && (strcasecmp(mMixCodec, VP8_MIME_TYPE) != 0))){ - format = OUTPUT_CODEC_DATA; - mFirstFrame = true; - }else - format = OUTPUT_EVERYTHING;; - err = getoutput(*buffer, format); + err = getoutput(*buffer, OUTPUT_EVERYTHING); + if (err == ERROR_END_OF_STREAM) + return err; CHECK_STATUS(err); return OK; } @@ -1259,6 +1257,34 @@ private: return OK; } +private: + //encoding thread + static void *ThreadFunc(void *me) { + MixEncoder *encoder = static_cast(me); + + status_t err = OK; + + while (encoder->mRunning) { + MediaBuffer* src = NULL; + + err = encoder->mSource->read(&src); + + if (err == ERROR_END_OF_STREAM) { + encoder->mEOS = true; + return NULL; + }else { + if (err == OK) + err = encoder->encode(src); + if (err != OK) { + src->release(); + return NULL; + } + } + } + + return NULL; + } + private: const char* mMixCodec; @@ -1275,11 +1301,9 @@ private: int mWinSize; int mIdrInt; int mDisableFrameSkip; - int mSyncMode; int mRawColor; bool mFirstFrame; - bool mSrcEOS; IVideoEncoder *mVideoEncoder; VideoParamsCommon mEncoderParams; @@ -1290,6 +1314,11 @@ private: sp mSource; MediaBufferGroup mGroup; sp mMeta; + +public: + pthread_t mThread; + bool mRunning; + bool mEOS; }; class IVFWriter : public MediaWriter { @@ -1573,7 +1602,6 @@ int main(int argc, char* argv[]) int DisableFrameSkip = 0; int GfxColor = 0; int OutFormat = 0; - int SyncMode = 0; char* OutFileName = "out.264"; const char* Yuvfile = ""; unsigned int SessionFlag = 0; @@ -1819,7 +1847,6 @@ int main(int argc, char* argv[]) source = new MemHeapSource(src_meta, src_flags); } else if (SrcType == 7) { source = new MixSurfaceMediaSource(SrcWidth, SrcHeight, SrcFrameNum, SrcFps); - SyncMode = 1; }else{ printf("Source Type is not supported\n"); return 0; @@ -1859,7 +1886,6 @@ int main(int argc, char* argv[]) enc_meta->setInt32('wsiz', WinSize); enc_meta->setInt32('idri', IdrInt); enc_meta->setInt32('difs', DisableFrameSkip); - enc_meta->setInt32('sync', SyncMode); enc_meta->setInt32('rawc', GfxColor); uint32_t encoder_flags = 0; @@ -1911,8 +1937,9 @@ int main(int argc, char* argv[]) usleep(100000); } - err = writer->stop(); int64_t end = systemTime(); + err = writer->stop(); + if (EncType == 1) { client.disconnect(); diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index aab4554..c758d3a 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -422,7 +422,7 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint3 if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) { mEncodeTask_Lock.unlock(); LOG_E("Time out wait for encode task.\n"); - return ENCODE_DATA_NOT_READY; + return ENCODE_NO_REQUEST_DATA; } } else {//Nonblock mEncodeTask_Lock.unlock(); -- cgit v1.2.3 From f05c62cbc5e2819a7df252ca3afc9f7d16c1699f Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 30 Dec 2013 23:18:14 +0800 Subject: libmix: fix display incomplete issue when resolution change from small to large BZ: 159798 fix display incomplete issue when resolution change from small to large Change-Id: I794949c652619f0e0e450a71a081b87c2bff39d2 Signed-off-by: ywan171 --- videodecoder/VideoDecoderWMV.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index 7ae7cb1..6013062 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -105,12 +105,12 @@ Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) { mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) && data->se_data->CODED_WIDTH && data->se_data->CODED_HEIGHT) { - updateFormatInfo(data); - if (mSizeChanged) { - flushSurfaceBuffers(); - mSizeChanged = false; - return DECODE_FORMAT_CHANGE; - } + ITRACE("video size is changed from %dx%d to %dx%d", mVideoFormatInfo.width, mVideoFormatInfo.height, + data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT); + mVideoFormatInfo.width = data->se_data->CODED_WIDTH; + mVideoFormatInfo.height = data->se_data->CODED_HEIGHT; + flushSurfaceBuffers(); + return DECODE_FORMAT_CHANGE; } status = decodeFrame(buffer, data); -- cgit v1.2.3 From 0338a6950a1320e336e1571d4e8e803dfc8440f8 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Tue, 31 Dec 2013 13:14:32 +0800 Subject: Enable VP8 encode for screenrecord BZ: 161797 changing the mime name from "video/x-webm" to "video/x-vnd.on2.vp8" to enable access VP8 HW encode via MediaCodec::CreateByType method. Change-Id: I5fb77be40a651b47cc60e8a5f57019d46a6113ff Signed-off-by: Liu Bolun --- videoencoder/VideoEncoderHost.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp index 00a8e40..39b0e5d 100644 --- a/videoencoder/VideoEncoderHost.cpp +++ b/videoencoder/VideoEncoderHost.cpp @@ -53,7 +53,7 @@ IVideoEncoder *createVideoEncoder(const char *mimeType) { PVSoftMPEG4Encoder *p = new PVSoftMPEG4Encoder("OMX.google.mpeg4.encoder"); #endif return (IVideoEncoder *)p; - } else if (strcasecmp(mimeType, "video/x-webm") == 0) { + } else if (strcasecmp(mimeType, "video/x-vnd.on2.vp8") == 0) { VideoEncoderVP8 *p = new VideoEncoderVP8(); return (IVideoEncoder *)p; } else { -- cgit v1.2.3 From cea8ec2c269587d1dbe5b62ac4d0191d8acd1f55 Mon Sep 17 00:00:00 2001 From: "SUN,Jing" Date: Mon, 30 Dec 2013 11:09:13 +0800 Subject: Fixed KW issue 87954# for Libmix's IntelImageEncoder. BZ: 161637 Issue: KW issue 87954# was reported, which might cause buffer overflow. Solution: Fixed it by adding buffer index's checking. Change-Id: I44f25ab45daa580e1896fc5c8faac8de6b781176 Signed-off-by: SUN,Jing --- imageencoder/ImageEncoder.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp index d63ad5e..7427737 100644 --- a/imageencoder/ImageEncoder.cpp +++ b/imageencoder/ImageEncoder.cpp @@ -175,6 +175,10 @@ int IntelImageEncoder::createSourceSurface(int source_type, void *source_buffer, break; } } + if(INTEL_IMAGE_ENCODER_MAX_BUFFERS == i) { + LOGE("createSourceSurface: failed because the max surface count was reached!\n"); + return VA_STATUS_ERROR_ALLOCATION_FAILED; + } /* Allocate a source surface */ if (VA_RT_FORMAT_YUV420 == fourcc) -- cgit v1.2.3 From e64eb699fdfbf83496737905252aeeb4195dfe34 Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Mon, 30 Dec 2013 14:38:39 +0800 Subject: Update VP8 HRD parameters in encode middleware layer. BZ: 161665 Change the following parameters: mVideoParamsVP8.hrd_buf_size = 1000; mVideoParamsVP8.hrd_buf_initial_fullness = 500; mVideoParamsVP8.hrd_buf_optimal_fullness = 600; Change-Id: I930048d075b23927b192fea1b81d1642abdd9d48 Signed-off-by: Liu Bolun --- videoencoder/VideoEncoderVP8.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 4f0c792..3539bdd 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -27,9 +27,9 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoParamsVP8.init_qp = 26; mVideoParamsVP8.rc_undershoot = 100; mVideoParamsVP8.rc_overshoot = 100; - mVideoParamsVP8.hrd_buf_size = 6000; - mVideoParamsVP8.hrd_buf_initial_fullness = 4000; - mVideoParamsVP8.hrd_buf_optimal_fullness = 5000; + mVideoParamsVP8.hrd_buf_size = 1000; + mVideoParamsVP8.hrd_buf_initial_fullness = 500; + mVideoParamsVP8.hrd_buf_optimal_fullness = 600; mVideoConfigVP8.force_kf = 0; mVideoConfigVP8.refresh_entropy_probs = 0; @@ -195,9 +195,9 @@ Encode_Status VideoEncoderVP8::renderHRDParams(void) misc_param->type = VAEncMiscParameterTypeHRD; misc_hrd = (VAEncMiscParameterHRD *)misc_param->data; memset(misc_hrd, 0, sizeof(*misc_hrd)); - misc_hrd->buffer_size = 6000; - misc_hrd->initial_buffer_fullness = 4000; - misc_hrd->optimal_buffer_fullness = 5000; + misc_hrd->buffer_size = 1000; + misc_hrd->initial_buffer_fullness = 500; + misc_hrd->optimal_buffer_fullness = 600; vaUnmapBuffer(mVADisplay, hrd_param_buf); vaStatus = vaRenderPicture(mVADisplay,mVAContext, &hrd_param_buf, 1); -- cgit v1.2.3 From a26cfc905c24481a026babb4e7fceec91bd8e27f Mon Sep 17 00:00:00 2001 From: Liu Bolun Date: Fri, 3 Jan 2014 15:05:56 +0800 Subject: Enable Max Frame Size setting for VP8 encode BZ: 162278 Add related config code in libmix layer and passing down to driver. Change-Id: Idad5ecb0a3bf942f75b7e28c7bf1431265931c96 Signed-off-by: Liu Bolun --- videoencoder/VideoEncoderBase.cpp | 3 +- videoencoder/VideoEncoderBase.h | 1 + videoencoder/VideoEncoderDef.h | 13 +++++++ videoencoder/VideoEncoderVP8.cpp | 71 +++++++++++++++++++++++++++++++++++---- videoencoder/VideoEncoderVP8.h | 1 + 5 files changed, 82 insertions(+), 7 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index c758d3a..d35aca1 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1199,7 +1199,8 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeIDRRequest: case VideoConfigTypeSliceNum: case VideoConfigTypeVP8: - case VideoConfigTypeVP8ReferenceFrame: { + case VideoConfigTypeVP8ReferenceFrame: + case VideoConfigTypeVP8MaxFrameSize:{ ret = derivedSetConfig(videoEncConfig); break; } diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 10f8141..692162f 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -120,6 +120,7 @@ protected: bool mRenderFrameRate; bool mRenderBitRate; bool mRenderHrd; + bool mRenderMaxFrameSize; VABufferID mSeqParamBuf; VABufferID mRcParamBuf; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 4ff90fa..d65f50d 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -329,6 +329,7 @@ enum VideoParamConfigType { VideoConfigTypeVP8, VideoConfigTypeVP8ReferenceFrame, VideoConfigTypeCIR, + VideoConfigTypeVP8MaxFrameSize, VideoParamsConfigExtension }; @@ -637,6 +638,7 @@ struct VideoParamsVP8 : VideoParamConfigSet { uint32_t hrd_buf_size; uint32_t hrd_buf_initial_fullness; uint32_t hrd_buf_optimal_fullness; + uint32_t max_frame_size; VideoParamsVP8() { type = VideoParamsTypeVP8; @@ -672,5 +674,16 @@ struct VideoConfigVP8ReferenceFrame : VideoParamConfigSet { } }; +struct VideoConfigVP8MaxFrameSize : VideoParamConfigSet { + + VideoConfigVP8MaxFrameSize() { + type = VideoConfigTypeVP8MaxFrameSize; + size = sizeof(VideoConfigVP8MaxFrameSize); + } + + uint32_t max_frame_size; +}; + + #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 3539bdd..d9785e4 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -30,6 +30,7 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoParamsVP8.hrd_buf_size = 1000; mVideoParamsVP8.hrd_buf_initial_fullness = 500; mVideoParamsVP8.hrd_buf_optimal_fullness = 600; + mVideoParamsVP8.max_frame_size = 0; mVideoConfigVP8.force_kf = 0; mVideoConfigVP8.refresh_entropy_probs = 0; @@ -122,7 +123,7 @@ Encode_Status VideoEncoderVP8::renderRCParams(void) { VABufferID rc_param_buf; VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncMiscParameterBuffer *misc_param, *misc_param_tmp; + VAEncMiscParameterBuffer *misc_param; VAEncMiscParameterRateControl *misc_rate_ctrl; vaStatus = vaCreateBuffer(mVADisplay, mVAContext, @@ -155,7 +156,7 @@ Encode_Status VideoEncoderVP8::renderFrameRateParams(void) { VABufferID framerate_param_buf; VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncMiscParameterBuffer *misc_param, *misc_param_tmp; + VAEncMiscParameterBuffer *misc_param; VAEncMiscParameterFrameRate * misc_framerate; uint32_t frameRateNum = mComParams.frameRate.frameRateNum; uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom; @@ -183,8 +184,8 @@ Encode_Status VideoEncoderVP8::renderHRDParams(void) { VABufferID hrd_param_buf; VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncMiscParameterBuffer *misc_param, *misc_param_tmp; - VAEncMiscParameterHRD * misc_hrd; //*misc_rate_ctrl; + VAEncMiscParameterBuffer *misc_param; + VAEncMiscParameterHRD * misc_hrd; vaStatus = vaCreateBuffer(mVADisplay, mVAContext, VAEncMiscParameterBufferType, sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD), @@ -206,6 +207,30 @@ Encode_Status VideoEncoderVP8::renderHRDParams(void) return 0; } +Encode_Status VideoEncoderVP8::renderMaxFrameSizeParams(void) +{ + VABufferID max_frame_size_param_buf; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncMiscParameterBuffer *misc_param; + VAEncMiscParameterBufferMaxFrameSize * misc_maxframesize; + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD), + 1,NULL,&max_frame_size_param_buf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + vaMapBuffer(mVADisplay, max_frame_size_param_buf,(void **)&misc_param); + misc_param->type = VAEncMiscParameterTypeMaxFrameSize; + misc_maxframesize = (VAEncMiscParameterBufferMaxFrameSize *)misc_param->data; + memset(misc_maxframesize, 0, sizeof(*misc_maxframesize)); + misc_maxframesize->max_frame_size = mVideoParamsVP8.max_frame_size; + vaUnmapBuffer(mVADisplay, max_frame_size_param_buf); + + vaStatus = vaRenderPicture(mVADisplay,mVAContext, &max_frame_size_param_buf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture");; + + return 0; +} Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { @@ -217,6 +242,7 @@ Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { ret = renderRCParams(); ret = renderHRDParams(); ret = renderSequenceParams(); + ret = renderMaxFrameSizeParams(); CHECK_ENCODE_STATUS_RETURN("renderSequenceParams"); } @@ -234,6 +260,13 @@ Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { mRenderFrameRate = false; } + if (mRenderMaxFrameSize) { + ret = renderMaxFrameSizeParams(); + CHECK_ENCODE_STATUS_RETURN("renderMaxFrameSizeParams"); + + mRenderMaxFrameSize = false; + } + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); @@ -300,9 +333,22 @@ Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncCon } break; + case VideoConfigTypeVP8MaxFrameSize:{ + + VideoConfigVP8MaxFrameSize *encConfigVP8MaxFrameSize = + reinterpret_cast (videoEncConfig); + + if (encConfigVP8MaxFrameSize->size != sizeof(VideoConfigVP8MaxFrameSize)) { + return ENCODE_INVALID_PARAMS; + } + + encConfigVP8MaxFrameSize->max_frame_size = mVideoParamsVP8.max_frame_size; + } + break; + default: { - LOG_E ("Invalid Config Type"); - break; + LOG_E ("Invalid Config Type"); + break; } } @@ -342,6 +388,19 @@ Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncCon } break; + case VideoConfigTypeVP8MaxFrameSize:{ + VideoConfigVP8MaxFrameSize *encConfigVP8MaxFrameSize = + reinterpret_cast (videoEncConfig); + + if (encConfigVP8MaxFrameSize->size != sizeof(VideoConfigVP8MaxFrameSize)) { + return ENCODE_INVALID_PARAMS; + } + + mVideoParamsVP8.max_frame_size = encConfigVP8MaxFrameSize->max_frame_size; + mRenderMaxFrameSize = true; + } + break; + default: { LOG_E ("Invalid Config Type"); break; diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index cacb7c1..60ddca8 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -38,6 +38,7 @@ private: Encode_Status renderRCParams(void); Encode_Status renderHRDParams(void); Encode_Status renderFrameRateParams(void); + Encode_Status renderMaxFrameSizeParams(void); VideoConfigVP8 mVideoConfigVP8; -- cgit v1.2.3 From b1f77635e340cc2e5e37ce1c49279036450886fa Mon Sep 17 00:00:00 2001 From: ywan171 Date: Sun, 5 Jan 2014 21:07:07 +0800 Subject: libmix: performance optimization for dynamic resolution change BZ: 162482 When there is no need of buffer reallocation during dynamic resolution change do not flush output surface queue; just attach a DRC tag to the buffer of difference resolution, and send FormatChange to omx client when the buffer with DRC tag is output. Change-Id: I72e2da15b92357bb0dba8f41eae0339a58e89ff7 Signed-off-by: ywan171 --- videodecoder/VideoDecoderAVC.cpp | 17 +++++++++++------ videodecoder/VideoDecoderBase.cpp | 9 +++++++++ videodecoder/VideoDecoderDefs.h | 3 +++ videodecoder/VideoDecoderMPEG4.cpp | 25 ++++++++++++++++++++----- videodecoder/VideoDecoderVP8.cpp | 16 +++++++++++++--- videodecoder/VideoDecoderWMV.cpp | 37 +++++++++++++++++++++++++++---------- 6 files changed, 83 insertions(+), 24 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index e7fe570..49c2efd 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -201,11 +201,6 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h CHECK_STATUS("endDecodingFrame"); } #endif - - if (mSizeChanged) { - mSizeChanged = false; - return DECODE_FORMAT_CHANGE; - } return DECODE_SUCCESS; } @@ -233,6 +228,10 @@ Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) { mAcquiredBuffer->renderBuffer.flag = 0; mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; mAcquiredBuffer->pictureOrder = getPOC(picture); + if (mSizeChanged) { + mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; + mSizeChanged = false; + } status = continueDecodingFrame(data); // surface buffer is released if decode fails @@ -775,7 +774,13 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { updateFormatInfo(data); - if (mSizeChanged == false) { + bool noNeedFlush = false; + if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { + noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth) + && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight); + } + + if (mSizeChanged == false || noNeedFlush) { return DECODE_SUCCESS; } else { mSizeChanged = false; diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 5424784..82eaa4a 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -184,6 +184,15 @@ void VideoDecoderBase::flush(void) { endDecodingFrame(true); + VideoSurfaceBuffer *p = mOutputHead; + // check if there's buffer with DRC flag in the output queue + while (p) { + if (p->renderBuffer.flag & IS_RESOLUTION_CHANGE) { + mSizeChanged = true; + break; + } + p = p->next; + } // avoid setting mSurfaceAcquirePos to 0 as it may cause tearing // (surface is still being rendered) mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 84d90e2..9e7b414 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -107,6 +107,9 @@ typedef enum { // indicate should allocate tiling surfaces USE_TILING_MEMORY = 0x20000, + + // indicate the frame has resolution change + IS_RESOLUTION_CHANGE = 0x40000, } VIDEO_BUFFER_FLAG; typedef enum diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index d5963ee..939e563 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -71,6 +71,7 @@ void VideoDecoderMPEG4::stop(void) { Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { Decode_Status status; vbp_data_mp42 *data = NULL; + bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; if (buffer == NULL) { return DECODE_INVALID_DATA; } @@ -92,7 +93,7 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("startVA"); } - if (mSizeChanged) { + if (mSizeChanged && !useGraphicbuffer) { // some container has the incorrect width/height. // send the format change to OMX to update the crop info. mSizeChanged = false; @@ -105,11 +106,22 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { data->codec_data.video_object_layer_width && data->codec_data.video_object_layer_height) { // update encoded image size + ITRACE("Video size is changed. from %dx%d to %dx%d\n",mVideoFormatInfo.width,mVideoFormatInfo.height, + data->codec_data.video_object_layer_width,data->codec_data.video_object_layer_height); + bool noNeedFlush = false; mVideoFormatInfo.width = data->codec_data.video_object_layer_width; mVideoFormatInfo.height = data->codec_data.video_object_layer_height; - flushSurfaceBuffers(); - ITRACE("Video size is changed."); - return DECODE_FORMAT_CHANGE; + if (useGraphicbuffer) { + noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth) + && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight); + } + if (!noNeedFlush) { + flushSurfaceBuffers(); + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } else { + mSizeChanged = true; + } } status = decodeFrame(buffer, data); @@ -380,7 +392,10 @@ Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) { // TODO: set discontinuity flag mAcquiredBuffer->renderBuffer.flag = 0; mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; - + if (mSizeChanged) { + mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; + mSizeChanged = false; + } if (codingType != MP4_VOP_TYPE_B) { mLastVOPCodingType = codingType; mLastVOPTimeIncrement = picData->vop_time_increment; diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 03a6fd1..6cf584c 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -159,6 +159,7 @@ Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) { Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vp8 *data) { Decode_Status status; + bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; mCurrentPTS = buffer->timeStamp; if (0 == data->num_pictures || NULL == data->pic_data) { WTRACE("Number of pictures is 0."); @@ -166,12 +167,18 @@ Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v } if (VP8_KEY_FRAME == data->codec_data->frame_type) { - if (mSizeChanged) { + if (mSizeChanged && !useGraphicbuffer){ mSizeChanged = false; return DECODE_FORMAT_CHANGE; } else { updateFormatInfo(data); - if (mSizeChanged == true) { + bool noNeedFlush = false; + if (useGraphicbuffer) { + noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth) + && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight); + } + if (mSizeChanged == true && !noNeedFlush) { + flushSurfaceBuffers(); mSizeChanged = false; return DECODE_FORMAT_CHANGE; } @@ -196,7 +203,10 @@ Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v if (buffer->flag & WANT_DECODE_ONLY) { mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY; } - + if (mSizeChanged) { + mSizeChanged = false; + mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; + } // Here data->num_pictures is always equal to 1 for (int index = 0; index < data->num_pictures; index++) { diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index 6013062..de065c7 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -89,6 +89,7 @@ void VideoDecoderWMV::flush(void) { Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) { Decode_Status status; vbp_data_vc1 *data = NULL; + bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; if (buffer == NULL) { return DECODE_INVALID_DATA; } @@ -101,24 +102,36 @@ Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) { CHECK_STATUS("startVA"); } + if (mSizeChanged && !useGraphicbuffer) { + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } + if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH || mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) && data->se_data->CODED_WIDTH && data->se_data->CODED_HEIGHT) { - ITRACE("video size is changed from %dx%d to %dx%d", mVideoFormatInfo.width, mVideoFormatInfo.height, - data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT); - mVideoFormatInfo.width = data->se_data->CODED_WIDTH; - mVideoFormatInfo.height = data->se_data->CODED_HEIGHT; - flushSurfaceBuffers(); - return DECODE_FORMAT_CHANGE; + ITRACE("video size is changed from %dx%d to %dx%d", mVideoFormatInfo.width, mVideoFormatInfo.height, + data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT); + mVideoFormatInfo.width = data->se_data->CODED_WIDTH; + mVideoFormatInfo.height = data->se_data->CODED_HEIGHT; + bool noNeedFlush = false; + if (useGraphicbuffer) { + noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth) + && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight); + } + + if (noNeedFlush) { + mSizeChanged = true; + } else { + flushSurfaceBuffers(); + mSizeChanged = false; + return DECODE_FORMAT_CHANGE; + } } status = decodeFrame(buffer, data); CHECK_STATUS("decodeFrame"); - if (mSizeChanged) { - mSizeChanged = false; - return DECODE_FORMAT_CHANGE; - } return status; } @@ -168,6 +181,10 @@ Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v if (buffer->flag & WANT_DECODE_ONLY) { mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY; } + if (mSizeChanged) { + mSizeChanged = false; + mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; + } if (data->num_pictures > 1) { if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) { -- cgit v1.2.3 From d1af2a06581e05baf9959918023b8227436543e7 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Mon, 6 Jan 2014 23:05:50 +0800 Subject: mixvbp : reset the VOL quarter_sample if video_object_layer_verid = 1 BZ: 162218 reset the VOL quarter_sample if video_object_layer_verid = 1 to avoid corruption when transit video_object_layer_verid from !=1 to 1 Change-Id: I7d7e9fdf380bda6efceb927f09c0aa461c75861d Signed-off-by: ywan171 --- mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c index 6ff6b28..9a1a8d5 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c @@ -378,6 +378,10 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ BREAK_GETBITS_REQD_MISSING(getbits, ret); vidObjLay->quarter_sample = code; } + else + { + vidObjLay->quarter_sample = 0; + } getbits = viddec_pm_get_bits(parent, &(code), 1); BREAK_GETBITS_REQD_MISSING(getbits, ret); -- cgit v1.2.3 From 8ae3cea9e98e027930251fdb2d6cdb91548f8031 Mon Sep 17 00:00:00 2001 From: gji2 Date: Mon, 6 Jan 2014 21:48:26 +0800 Subject: Enhance the omx-component encode stack log print fucntion. BZ: 162650 Enhance the omx-component encode stack log print fucntion. To let the log be easily enabled without rebuild the omx-component stack. Usage: adb shell setprop omxenc.debug 1 Change-Id: I0ae6f56db39f5c4f2b702718a2fc28d0ac7aa7f6 Signed-off-by: gji2 --- videoencoder/IntelMetadataBuffer.h | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index 7518d23..ba0c9ba 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -35,7 +35,6 @@ using namespace android; #endif - #define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24))) typedef enum { @@ -156,11 +155,11 @@ public: static status_t instantiate(); IntelBufferSharingService(){ - LOGI("IntelBufferSharingService instance is created"); + ALOGI("IntelBufferSharingService instance is created"); } ~IntelBufferSharingService(){ - LOGI("IntelBufferSharingService instance is destroyed"); + ALOGI("IntelBufferSharingService instance is destroyed"); } status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); -- cgit v1.2.3 From c0e7f486c4bf117dc0d86a0a4479a94e0183a6d7 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Fri, 10 Jan 2014 10:22:42 +0800 Subject: libmix: let the decoder resume instead of stopping if vol is corrupted BZ: 163232 let the decoder resume instead of stopping if vol is corrupted and cause vop height & width 0 Change-Id: I81d11d638855fe91da1301a03a1bc5afbf3fca6a Signed-off-by: ywan171 --- videodecoder/VideoDecoderMPEG4.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 939e563..7f2151b 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -150,7 +150,7 @@ Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data // and return error to OMX to avoid mediaserver crash. if (data->picture_data && (data->picture_data->picture_param.vop_width == 0 || data->picture_data->picture_param.vop_height == 0)) { - return DECODE_FAIL; + return DECODE_PARSER_FAIL; } uint64_t lastPTS = mCurrentPTS; -- cgit v1.2.3 From 7bd3be630ee076e0c84ecea6477c79f15f1a568e Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Wed, 8 Jan 2014 13:02:20 +0800 Subject: libmix: add report of decoding error start mb and end mb position. BZ: 156373 report the decoding error region to OMX IL. Change-Id: Ibe453fadd5aa235a6c700086674004b108d788a1 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderBase.cpp | 11 +++++++++++ videodecoder/VideoDecoderDefs.h | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 82eaa4a..0f4c297 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1351,6 +1351,8 @@ void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRen currentSurface->errBuf.errorNumber = 0; currentSurface->errBuf.timeStamp = INVALID_PTS; } + if (outErrBuf) + VTRACE("%s: error number is %d", __FUNCTION__, outErrBuf->errorNumber); } void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { @@ -1371,8 +1373,17 @@ void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { if (err_drv_output[i].status != -1) { currentSurface->errBuf.errorNumber++; currentSurface->errBuf.errorArray[i + offset].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; + currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb = err_drv_output[i].start_mb; + currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb = err_drv_output[i].end_mb; + ITRACE("Error Index[%d]: type = %d, start_mb = %d, end_mb = %d", + currentSurface->errBuf.errorNumber - 1, + currentSurface->errBuf.errorArray[i + offset].type, + currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb, + currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb); } else break; } + ITRACE("%s: error number of current surface is %d, timestamp @%llu", + __FUNCTION__, currentSurface->errBuf.errorNumber, currentSurface->timeStamp); } } diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 9e7b414..d39bb78 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -158,7 +158,7 @@ struct VideoConfigBuffer { struct VideoErrorInfo { VideoDecodeErrorType type; union { - typedef struct {uint32_t start_mb; uint32_t end_mb;} mb_pos; + struct {uint32_t start_mb; uint32_t end_mb;} mb_pos; } error_data; }; -- cgit v1.2.3 From e1b2c72a5483c225c620c68c45c7ff3962597e7a Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 8 Jan 2014 18:16:31 +0800 Subject: libmix: A fix to set post-processing flag BZ: 161788 post-processing flag should be parsed from sequence header in advanced profile. Change-Id: I2222dede26bb5344c075afaba7a63c1711df1729 Signed-off-by: Tianmi Chen --- mixvbp/vbp_manager/vbp_vc1_parser.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c index 87dadd8..12e28e9 100755 --- a/mixvbp/vbp_manager/vbp_vc1_parser.c +++ b/mixvbp/vbp_manager/vbp_vc1_parser.c @@ -852,7 +852,7 @@ static void vbp_pack_picture_params_vc1( pic_parms->mb_mode_table = picLayerHeader->MBMODETAB; pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM; pic_parms->rounding_control = picLayerHeader->RNDCTRL; - pic_parms->post_processing = picLayerHeader->POSTPROC; + pic_parms->post_processing = seqLayerHeader->POSTPROCFLAG; /* fix this. Add RESPIC to parser. */ pic_parms->picture_resolution_index = 0; pic_parms->luma_scale = picLayerHeader->LUMSCALE; -- cgit v1.2.3 From 709f4581dd885594a3e1e08d3d3487375ef85fc5 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Thu, 9 Jan 2014 14:59:54 +0800 Subject: mixvbp: if the VOL is corrupted, do no update the incorrect param BZ: 163255 if the VOL is corrupted, do no update vol using the incorrect param, or else it will lead incorrect va param passed to driver and corruption happens Change-Id: If27d599e26125d05bb5eb1272932a9badbc33130 Signed-off-by: ywan171 --- mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c index 9a1a8d5..30f9ea3 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c @@ -526,16 +526,17 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse getbits = viddec_pm_get_bits(parent, &(code), 2); BREAK_GETBITS_REQD_MISSING(getbits, ret); - vidObjLay->video_object_layer_shape = code; /* If shape is not rectangluar exit early without parsing */ - if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) + if (code != MP4_SHAPE_TYPE_RECTANGULAR) { ETRACE("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n", - MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape); + MP4_SHAPE_TYPE_RECTANGULAR, code); ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); break; } + vidObjLay->video_object_layer_shape = code; + if ((vidObjLay->video_object_layer_verid != 1) && (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)) {/* Grayscale not supported */ -- cgit v1.2.3 From 7a661210e1299bc68aa141366e2c1d0d31cf32c1 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Fri, 10 Jan 2014 10:16:29 +0800 Subject: mixvbp: let the VOL be parsed if the previous header/frame is corrupted BZ: 163232 let later VOL be parsed if the previous header/frame is corrupted check width/height to avoid corrupted VOL make decoder crash. Change-Id: I6fdf2cc69a1b856e8cae48420d530f4fdf0fcbe3 Signed-off-by: ywan171 --- mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c index 30f9ea3..a538240 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c @@ -304,8 +304,15 @@ static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_ /* TODO: check for validity of marker bits */ getbits = viddec_pm_get_bits(parent, &(code), 29); BREAK_GETBITS_REQD_MISSING(getbits, ret); - vidObjLay->video_object_layer_height = (code >> 1) & 0x1FFF; - vidObjLay->video_object_layer_width = (code >> 15) & 0x1FFF; + int height = (code >> 1) & 0x1FFF; + int width = (code >> 15) & 0x1FFF; + if (width > 4096 || height > 4096) { + ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR); + ETRACE("width or height is invalid: width = %d, height = %d", width, height); + break; + } + vidObjLay->video_object_layer_height = height; + vidObjLay->video_object_layer_width = width; } getbits = viddec_pm_get_bits(parent, &(code), 2); @@ -447,10 +454,6 @@ mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parse //VTRACE("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret); - // Trying to parse more header data as it is more important than frame data - if (parser->bitstream_error > MP4_HDR_ERROR_MASK) - return ret; - do { vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE; -- cgit v1.2.3 From 9c6ea013ef6c8778152093548068b94beae18afd Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Wed, 15 Jan 2014 20:54:59 +0800 Subject: Add unit test for libmix video decoder BZ: 164731 Add unit test program for libmix video decoder Change-Id: Idc66cd31616c5f8acea438149cebb389d6648de8 Signed-off-by: wfeng6 --- test/Android.mk | 18 ++++++ test/mix_decoder.cpp | 158 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 176 insertions(+) create mode 100755 test/mix_decoder.cpp diff --git a/test/Android.mk b/test/Android.mk index 4886720..6140f34 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -1,5 +1,23 @@ LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := mix_decoder.cpp + +LOCAL_C_INCLUDES := \ + $(TARGET_OUT_HEADERS)/libva_videodecoder \ + $(TOP)/vendor/intel/hardware/PRIVATE/libmix/videodecoder \ + $(TARGET_OUT_HEADERS)/libva \ + $(TARGET_OUT_HEADERS)/libmixvbp \ + +LOCAL_SHARED_LIBRARIES := \ + libva_videodecoder liblog libva + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := mix_decoder + +include $(BUILD_EXECUTABLE) + # For intelmetadatabuffer test # ===================================================== diff --git a/test/mix_decoder.cpp b/test/mix_decoder.cpp new file mode 100755 index 0000000..ef4e310 --- /dev/null +++ b/test/mix_decoder.cpp @@ -0,0 +1,158 @@ +#define LOG_NDEBUG 0 +#define LOG_TAG "mix_decoder" +#include +#include +#include +#include +#include + +#define INPUTSIZE (4*1024*1024) +static int gImgWidth; +static int gImgHeight; +static int gCodec; +static int gOutputSize; + +void CheckArgs(int argc, char* argv[]) +{ + char c; + while ((c =getopt(argc, argv,"c:w:h:?") ) != EOF) { + switch (c) { + case 'w': + gImgWidth = atoi(optarg); + break; + case 'h': + gImgHeight = atoi(optarg); + break; + case 'c': + gCodec = atoi(optarg); + break; + case '?': + default: + ALOGI("./mix_encode -c Codec -w SrcWidth -h SrcHeight"); + exit(0); + } + } + + ALOGI("gImgWidth = %d, gImgHeight = %d, gCodec = %d", gImgWidth, gImgHeight, gCodec); + +} + + +int main(int argc, char* argv[]) +{ + FILE *fp_in, *fp_out; + char inputfilename[512]; + char outputfilename[512]; + int framenum = 1000; + int frameidx = 0; + + uint8 *inBuf = NULL; + uint8 *outBuf = NULL; + int in_size; + uint32_t out_size; + char *codecname = NULL; + + char codecnamelist[2][32] = {"video/avc", "video/avc-secure"}; + + CheckArgs(argc, argv); + + if (gImgWidth <= 0) { + ALOGE("Err: wrong video width = %d", gImgWidth); + return -1; + } + + if (gImgHeight <= 0) { + ALOGE("Err: wrong video height = %d", gImgHeight); + return -1; + } + + if (gCodec < 0) { + ALOGE("Err: wrong codec type = %d", gCodec); + return -1; + } + + gOutputSize = gImgWidth * gImgHeight * 3/2; + + VideoDecodeBuffer buffer; + int outidx = 0; + + IVideoDecoder *testDecoder = createVideoDecoder(codecnamelist[gCodec]); + if (testDecoder == NULL) { + ALOGE("Fail to create testDecoder!"); + return -1; + } + + ALOGV("Test decoder is starting..."); + VideoConfigBuffer configBuffer; + memset(&configBuffer, 0, sizeof(VideoConfigBuffer)); + + configBuffer.flag |= WANT_RAW_OUTPUT; + + configBuffer.width = gImgWidth; + configBuffer.height = gImgHeight; + + testDecoder->start(&configBuffer); + + inBuf = (uint8 *)malloc(INPUTSIZE); + if (inBuf == NULL) { + ALOGE("Fail to malloc the input buffer!"); + return -1; + } + + outBuf = (uint8 *)malloc(gOutputSize); + if (outBuf == NULL) { + ALOGE("Fail to malloc the output buffer!"); + return -1; + } + + for (frameidx = 0; frameidx < framenum; frameidx++) { + sprintf(inputfilename, "/data/decrypted_frame/decrypted_frame_%d.h264", frameidx); + if((fp_in = fopen(inputfilename,"rb")) == NULL) { + ALOGE("Fail to open inputfilename %s", inputfilename); + return -1; + } + fseek(fp_in, 0, SEEK_END); + in_size = ftell(fp_in); + ALOGV("%d frame input size = %d", frameidx, in_size); + rewind(fp_in); + if (in_size > INPUTSIZE) { + ALOGE("The bitstream size is bigger than the input buffer!"); + return -1; + } + fread(inBuf, 1, in_size, fp_in); + fclose(fp_in); + + memset(&buffer, 0, sizeof(VideoDecodeBuffer)); + buffer.data = inBuf; + buffer.size = in_size; + buffer.rotationDegrees = 0; + buffer.timeStamp = frameidx; + + testDecoder->decode(&buffer); + + const VideoRenderBuffer * renderbuf = testDecoder->getOutput(true); + + if (renderbuf != NULL) { + out_size = 0; + memset(outBuf, 0, gOutputSize); + + renderbuf->renderDone = true; + ALOGV("Output frame %d, out_size = %d", outidx, out_size); + sprintf(outputfilename, "/data/decodedframe/frame_%d.bin", outidx++); + if((fp_out = fopen(outputfilename,"wb")) == NULL) { + ALOGE("Fail to open outputfile: %s", outputfilename); + return -1; + } + + fwrite(renderbuf->rawData->data, 1,renderbuf->rawData->size,fp_out); + fflush(fp_out); + fclose(fp_out); + } + } + + testDecoder->stop(); + releaseVideoDecoder(testDecoder); + free(inBuf); + free(outBuf); + return 0; +} -- cgit v1.2.3 From ced260ce1d3916328f8b9f1cc5e7a310e29867f1 Mon Sep 17 00:00:00 2001 From: Cheng Yao Date: Tue, 7 Jan 2014 15:09:26 +0800 Subject: JPEG decode: new blitter on BYT BZ: 140105 1. Use CM for YUV->RGBA_linear blit on BYT 2. Use FOURCC instead of HAL_PIXEL_FORMAT in RenderTarget.pixel_format 3. Implement scaling output for BYT 4. Implement header-only parsing to optimize boundaryMode decode 5. Let libjpeg enable/disable available capabilities 5. Implement 422h->nv12+nv21+yv12+yuy2 blit (for usb-camera) Change-Id: Ie3df2707134533968721e942bbc81440e37efe0a Signed-off-by: Cheng Yao --- imagedecoder/Android.mk | 27 +- imagedecoder/ImageDecoderTrace.h | 5 - imagedecoder/JPEGBlitter.cpp | 50 +- imagedecoder/JPEGBlitter.h | 35 +- imagedecoder/JPEGBlitter_gen.cpp | 1142 ++++++++++++++++++++------ imagedecoder/JPEGBlitter_img.cpp | 35 +- imagedecoder/JPEGCommon.h | 73 +- imagedecoder/JPEGCommon_Gen.h | 1 - imagedecoder/JPEGCommon_Img.h | 1 - imagedecoder/JPEGDecoder.cpp | 898 ++++++++++++++------ imagedecoder/JPEGDecoder.h | 64 +- imagedecoder/JPEGDecoder_gen.cpp | 294 +++++-- imagedecoder/JPEGDecoder_img.cpp | 40 +- imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp | 592 +++++++------ imagedecoder/JPEGDecoder_libjpeg_wrapper.h | 26 +- imagedecoder/JPEGParser.cpp | 104 ++- imagedecoder/JPEGParser.h | 129 ++- imagedecoder/libjpeg_cm_genx.isa | Bin 0 -> 53770 bytes imagedecoder/test/testdecode.cpp | 833 ++++++++++++------- 19 files changed, 3119 insertions(+), 1230 deletions(-) create mode 100644 imagedecoder/libjpeg_cm_genx.isa diff --git a/imagedecoder/Android.mk b/imagedecoder/Android.mk index 70928e7..153d594 100644 --- a/imagedecoder/Android.mk +++ b/imagedecoder/Android.mk @@ -11,7 +11,6 @@ LOCAL_SRC_FILES += \ LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libva \ - $(TARGET_OUT_HEADERS)/libmix_videovpp LOCAL_COPY_HEADERS_TO := libjpegdec @@ -29,19 +28,29 @@ LOCAL_SHARED_LIBRARIES += \ libhardware LOCAL_LDLIBS += -lpthread -LOCAL_CFLAGS += -Wno-multichar +LOCAL_CFLAGS += -Wno-multichar -DLOG_TAG=\"ImageDecoder\" +LOCAL_CFLAGS += -DLOG_NDEBUG=0 ifeq ($(TARGET_BOARD_PLATFORM),baytrail) +GPGPU_OBJ_NAME := libjpeg_cm_genx.isa +GPGPU_OBJS += $(PRODUCT_OUT)/system/lib/$(GPGPU_OBJ_NAME) +LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS)/ufo LOCAL_SRC_FILES += JPEGBlitter_gen.cpp LOCAL_SRC_FILES += JPEGDecoder_gen.cpp +LOCAL_C_INCLUDES += $(TOP)/vendor/intel/hardware/PRIVATE/ufo/inc +LOCAL_CFLAGS += -Wno-non-virtual-dtor -DGFXGEN +LOCAL_LDFLAGS += -L$(INTEL_CM_RUNTIME)/lib/x86/ -l:igfxcmrt32.so +$(GPGPU_OBJS): + cp $(LOCAL_PATH)/$(GPGPU_OBJ_NAME) $@ else LOCAL_SRC_FILES += JPEGBlitter_img.cpp LOCAL_SRC_FILES += JPEGDecoder_img.cpp endif - LOCAL_MODULE:= libjpegdec LOCAL_MODULE_TAGS := optional +$(LOCAL_MODULE): $(GPGPU_OBJS) + include $(BUILD_SHARED_LIBRARY) ifeq ($(TARGET_BOARD_PLATFORM),baytrail) @@ -53,7 +62,6 @@ LOCAL_SRC_FILES += \ LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ $(TARGET_OUT_HEADERS)/libva \ - $(TARGET_OUT_HEADERS)/libmix_videovpp LOCAL_SHARED_LIBRARIES += \ libcutils \ @@ -66,6 +74,7 @@ LOCAL_SHARED_LIBRARIES += \ LOCAL_LDLIBS += -lpthread LOCAL_CFLAGS += -Wno-multichar +LOCAL_CFLAGS += -DLOG_NDEBUG=0 LOCAL_MODULE:= testjpegdec LOCAL_MODULE_TAGS := optional @@ -78,12 +87,15 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES += \ JPEGDecoder_libjpeg_wrapper.cpp +ifeq ($(TARGET_BOARD_PLATFORM),baytrail) +LOCAL_CFLAGS += -DGFXGEN +endif + LOCAL_C_INCLUDES += \ $(LOCAL_PATH) \ $(call include-path-for, jpeg) \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libjpegdec \ - $(TARGET_OUT_HEADERS)/libmix_videovpp LOCAL_COPY_HEADERS_TO := libjpeg_hw @@ -94,12 +106,15 @@ LOCAL_SHARED_LIBRARIES += \ libcutils \ libutils \ liblog \ + libva \ + libva-android \ libjpegdec \ libhardware LOCAL_LDLIBS += -lpthread -LOCAL_CFLAGS += -Wno-multichar +LOCAL_CFLAGS += -Wno-multichar -DLOG_TAG=\"ImageDecoder\" LOCAL_CFLAGS += -DUSE_INTEL_JPEGDEC +LOCAL_CFLAGS += -DLOG_NDEBUG=0 LOCAL_MODULE:= libjpeg_hw LOCAL_MODULE_TAGS := optional diff --git a/imagedecoder/ImageDecoderTrace.h b/imagedecoder/ImageDecoderTrace.h index 466b606..a3dadc0 100644 --- a/imagedecoder/ImageDecoderTrace.h +++ b/imagedecoder/ImageDecoderTrace.h @@ -50,11 +50,6 @@ TraceImageDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #else // for Android OS -#ifdef LOG_TAG -#undef LOG_TAG -#endif -#define LOG_TAG "ImageDecoder" - #ifdef LOG_NDEBUG #undef LOG_NDEBUG #endif diff --git a/imagedecoder/JPEGBlitter.cpp b/imagedecoder/JPEGBlitter.cpp index cb1e917..f6f0f95 100644 --- a/imagedecoder/JPEGBlitter.cpp +++ b/imagedecoder/JPEGBlitter.cpp @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -25,64 +24,29 @@ * Yao Cheng * */ -//#define LOG_NDEBUG 0 #include #include #include "JPEGBlitter.h" #include "JPEGDecoder.h" - #ifdef NDEBUG #undef NDEBUG #endif #include -//#define LOG_TAG "JPEGBlitter" -JpegBlitter::JpegBlitter() +JpegBlitter::JpegBlitter(VADisplay display, VAConfigID vpCfgId, VAContextID vpCtxId) :mDecoder(NULL), - mConfigId(VA_INVALID_ID), - mContextId(VA_INVALID_ID) + mDisplay(display), + mConfigId(vpCfgId), + mContextId(vpCtxId), + mPrivate(NULL), + mInitialized(false) { - // empty } JpegBlitter::~JpegBlitter() { - if (mDecoder) { - destroyContext(); - } + deinit(); } -void JpegBlitter::destroyContext() -{ - if (mDecoder == NULL) - return; - - Mutex::Autolock autoLock(mLock); - if (mDecoder) { - vaDestroyContext(mDecoder->mDisplay, mContextId); - mContextId = VA_INVALID_ID; - vaDestroyConfig(mDecoder->mDisplay, mConfigId); - mConfigId = VA_INVALID_ID; - mDecoder = NULL; - } -} - -void JpegBlitter::setDecoder(JpegDecoder &decoder) -{ - destroyContext(); - Mutex::Autolock autoLock(mLock); - mDecoder = &decoder; - VAConfigAttrib vpp_attrib; - VAStatus st; - vpp_attrib.type = VAConfigAttribRTFormat; - vpp_attrib.value = VA_RT_FORMAT_YUV420; - st = vaCreateConfig(mDecoder->mDisplay, VAProfileNone, - VAEntrypointVideoProc, - &vpp_attrib, - 1, &mConfigId); - assert(st == VA_STATUS_SUCCESS); - st = vaCreateContext(mDecoder->mDisplay, mConfigId, 1920, 1080, 0, NULL, 0, &mContextId); - assert(st == VA_STATUS_SUCCESS); -} diff --git a/imagedecoder/JPEGBlitter.h b/imagedecoder/JPEGBlitter.h index b9fcc08..9f828fe 100644 --- a/imagedecoder/JPEGBlitter.h +++ b/imagedecoder/JPEGBlitter.h @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -29,25 +28,49 @@ #ifndef JPEG_BLITTER_H #define JPEG_BLITTER_H -#include #include "JPEGCommon.h" #include +#include +#include +#include +#include + +using namespace android; class JpegDecoder; +typedef void* BlitEvent; class JpegBlitter { public: - JpegBlitter(); + JpegBlitter(VADisplay display, VAConfigID vpCfgId, VAContextID vpCtxId); virtual ~JpegBlitter(); - virtual void setDecoder(JpegDecoder &decoder); - virtual JpegDecodeStatus blit(RenderTarget &src, RenderTarget &dst); + virtual void init(JpegDecoder &dec); + virtual void deinit(); + virtual JpegDecodeStatus blit(RenderTarget &src, RenderTarget &dst, int scale_factor); + virtual JpegDecodeStatus getRgbaTile(RenderTarget &src, + uint8_t *sysmem, + int left, int top, int width, int height, int scale_factor); + virtual JpegDecodeStatus blitToLinearRgba(RenderTarget &src, + uint8_t *sysmem, + uint32_t width, uint32_t height, + BlitEvent &event, int scale_factor); + virtual JpegDecodeStatus blitToCameraSurfaces(RenderTarget &src, + buffer_handle_t dst_nv12, + buffer_handle_t dst_yuy2, + uint8_t *dst_nv21, + uint8_t *dst_yv12, + uint32_t width, uint32_t height, + BlitEvent &event); + virtual void syncBlit(BlitEvent &event); private: mutable Mutex mLock; - virtual void destroyContext(); JpegDecoder *mDecoder; + VADisplay mDisplay; VAConfigID mConfigId; VAContextID mContextId; + void *mPrivate; + bool mInitialized; }; #endif diff --git a/imagedecoder/JPEGBlitter_gen.cpp b/imagedecoder/JPEGBlitter_gen.cpp index b1167d3..e819883 100644 --- a/imagedecoder/JPEGBlitter_gen.cpp +++ b/imagedecoder/JPEGBlitter_gen.cpp @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -25,32 +24,45 @@ * Yao Cheng * */ -//#define LOG_NDEBUG 0 #include "JPEGBlitter.h" #include "JPEGCommon_Gen.h" #include "JPEGDecoder.h" - +#include #include #include #include "ImageDecoderTrace.h" +#include #ifdef NDEBUG #undef NDEBUG #endif - #include +#define NV12_INTERMEDIATE 0 +#define PRE_INIT_CM 1 +#define BLIT_METHOD_CM 1 // 0 for VA+GpuCopy method, 1 for pure CM method +#define DUMP_RGBA 0 + +#define CM_KERNEL_FUNC_NAME yuv_tiled_to_rgba_linear + #define JD_CHECK(err, label) \ if (err) { \ - ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + ETRACE("%s::%d: failed: %d", __FUNCTION__, __LINE__, err); \ goto label; \ } #define JD_CHECK_RET(err, label, retcode) \ if (err) { \ status = retcode; \ - ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ + ETRACE("%s::%d: failed: %d", __FUNCTION__, __LINE__, err); \ + goto label; \ + } + +#define JD_CM_CHECK_RET(err, label, retcode) \ + if (err) { \ + status = retcode; \ + ETRACE("CM %s::%d: failed: 0x%08x", __FUNCTION__, __LINE__, err); \ goto label; \ } @@ -59,11 +71,13 @@ const VAProcColorStandardType fourcc2ColorStandard(uint32_t fourcc) switch(fourcc) { case VA_FOURCC_NV12: case VA_FOURCC_YUY2: - case VA_FOURCC_422H: - case VA_FOURCC_422V: + case VA_FOURCC_UYVY: + case VA_FOURCC('4','0','0','P'): case VA_FOURCC_411P: case VA_FOURCC_411R: case VA_FOURCC_IMC3: + case VA_FOURCC_422H: + case VA_FOURCC_422V: case VA_FOURCC_444P: case VA_FOURCC_YV12: return VAProcColorStandardBT601; @@ -72,246 +86,7 @@ const VAProcColorStandardType fourcc2ColorStandard(uint32_t fourcc) } } -void write_to_file(const char *file, const VAImage *pImg, const uint8_t *pSrc) -{ - FILE *fp = fopen(file, "wb"); - if (!fp) { - return; - } - const uint8_t *pY, *pU, *pV, *pYUYV, *pRGBA, *pUV; - float h_samp_factor, v_samp_factor; - int row, col; - char fourccstr[5]; - VTRACE("Dumping %s buffer to %s", fourcc2str(fourccstr, pImg->format.fourcc), file); - switch (pImg->format.fourcc) { - case VA_FOURCC_IMC3: - h_samp_factor = 1; - v_samp_factor = 0.5; - break; - case VA_FOURCC_422H: - h_samp_factor = 0.5; - v_samp_factor = 1; - break; - case VA_FOURCC_444P: - h_samp_factor = 1; - v_samp_factor = 1; - break; - case VA_FOURCC_YUY2: - { - pYUYV = pSrc + pImg->offsets[0]; - VTRACE("YUY2 output width %u stride %u", pImg->width, pImg->pitches[0]); - for (row = 0; row < pImg->height; ++row) { - fwrite(pYUYV, 2, pImg->width, fp); - pYUYV += pImg->pitches[0]; - } - } - fclose(fp); - return; - case VA_FOURCC_NV12: - { - pY = pSrc + pImg->offsets[0]; - pUV = pSrc + pImg->offsets[1]; - VTRACE("NV12 output width %u stride %u, %u", pImg->width, pImg->pitches[0], pImg->pitches[1]); - for (row = 0; row < pImg->height; ++row) { - fwrite(pY, 1, pImg->width, fp); - pY += pImg->pitches[0]; - } - for (row = 0; row < pImg->height/2; ++row) { - fwrite(pUV, 1, pImg->width, fp); - pUV += pImg->pitches[1]; - } - } - fclose(fp); - return; - case VA_FOURCC_RGBA: - case VA_FOURCC_BGRA: - case VA_FOURCC_ARGB: - case VA_FOURCC('A', 'B', 'G', 'R'): - { - pRGBA = pSrc + pImg->offsets[0]; - VTRACE("RGBA output width %u stride %u", pImg->width, pImg->pitches[0]); - for (row = 0; row < pImg->height; ++row) { - fwrite(pRGBA, 4, pImg->width, fp); - pRGBA += pImg->pitches[0]; - } - } - fclose(fp); - return; - default: - // non-supported - { - char fourccstr[5]; - ETRACE("%s: Not-supported input YUV format", fourcc2str(fourccstr, pImg->format.fourcc)); - } - return; - } - pY = pSrc + pImg->offsets[0]; - pU = pSrc + pImg->offsets[1]; - pV = pSrc + pImg->offsets[2]; - // Y - for (row = 0; row < pImg->height; ++row) { - fwrite(pY, 1, pImg->width, fp); - pY += pImg->pitches[0]; - } - // U - for (row = 0; row < pImg->height * v_samp_factor; ++row) { - fwrite(pU, 1, pImg->width * h_samp_factor, fp); - pU += pImg->pitches[1]; - } - // V - for (row = 0; row < pImg->height * v_samp_factor; ++row) { - fwrite(pV, 1, pImg->width * h_samp_factor, fp); - pV += pImg->pitches[2]; - } - fclose(fp); -} - -static void write_to_YUY2(uint8_t *pDst, - uint32_t dst_w, - uint32_t dst_h, - uint32_t dst_stride, - const VAImage *pImg, - const uint8_t *pSrc) -{ - const uint8_t *pY, *pU, *pV; - float h_samp_factor, v_samp_factor; - int row, col; - char fourccstr[5]; - uint32_t copy_w = (dst_w < pImg->width)? dst_w: pImg->width; - uint32_t copy_h = (dst_h < pImg->height)? dst_h: pImg->height; - switch (pImg->format.fourcc) { - case VA_FOURCC_IMC3: - h_samp_factor = 0.5; - v_samp_factor = 0.5; - break; - case VA_FOURCC_422H: - h_samp_factor = 0.5; - v_samp_factor = 1; - break; - case VA_FOURCC_444P: - h_samp_factor = 1; - v_samp_factor = 1; - break; - default: - // non-supported - ETRACE("%s to YUY2: Not-supported input YUV format", fourcc2str(fourccstr, pImg->format.fourcc)); - return; - } - pY = pSrc + pImg->offsets[0]; - pU = pSrc + pImg->offsets[1]; - pV = pSrc + pImg->offsets[2]; - for (row = 0; row < copy_h; ++row) { - for (col = 0; col < copy_w; ++col) { - // Y - *(pDst + 2 * col) = *(pY + col); - uint32_t actual_col = h_samp_factor * col; - if (col % 2 == 1) { - // U - *(pDst + 2 * col + 1) = *(pU + actual_col); - } - else { - // V - *(pDst + 2 * col + 1) = *(pV + actual_col); - } - } - pDst += dst_stride; - pY += pImg->pitches[0]; - uint32_t actual_row = row * v_samp_factor; - pU = pSrc + pImg->offsets[1] + actual_row * pImg->pitches[1]; - pV = pSrc + pImg->offsets[2] + actual_row * pImg->pitches[2]; - } -} - -static void dumpSurface(const char* filename, VADisplay display, VASurfaceID surface) -{ - VAStatus st; - VAImage img; - uint8_t *buf; - st = vaDeriveImage(display, surface, &img); - if (st) { - ETRACE("vaDeriveImage failed with %d", st); - return; - } - uint32_t in_fourcc = img.format.fourcc; - VTRACE("Start dumping %s surface to %s", fourcc2str(NULL, in_fourcc), filename); - st = vaMapBuffer(display, img.buf, (void **)&buf); - if (st) { - ETRACE("vaMapBuffer failed with %d", st); - vaDestroyImage(display, img.image_id); - return; - } - VTRACE("start write_to_file"); - write_to_file(filename, &img, buf); - vaUnmapBuffer(display, img.buf); - vaDestroyImage(display, img.image_id); -} - -static void dumpGallocBuffer(const char* filename, - buffer_handle_t handle, - int width, - int height, - uint32_t fourcc) -{ - // NOT IMPLEMENTED -} - - -static JpegDecodeStatus swBlit(VADisplay display, VAContextID context, - VASurfaceID in_surf, VARectangle *in_rect, uint32_t in_fourcc, - VASurfaceID out_surf, VARectangle *out_rect, uint32_t out_fourcc) -{ - assert(out_fourcc == VA_FOURCC_YUY2); - assert((in_fourcc == VA_FOURCC_IMC3) || (in_fourcc == VA_FOURCC_422H) || (in_fourcc == VA_FOURCC_444P)); - VAStatus st; - char str[10]; - JpegDecodeStatus status; - VAImage in_img, out_img; - in_img.image_id = VA_INVALID_ID; - in_img.buf = VA_INVALID_ID; - out_img.image_id = VA_INVALID_ID; - out_img.buf = VA_INVALID_ID; - uint8_t *in_buf, *out_buf; - in_buf = out_buf = NULL; - st = vaDeriveImage(display, in_surf, &in_img); - JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); - st = vaDeriveImage(display, out_surf, &out_img); - JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); - st = vaMapBuffer(display, in_img.buf, (void **)&in_buf); - JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); - st = vaMapBuffer(display, out_img.buf, (void **)&out_buf); - JD_CHECK_RET(st, cleanup, JD_BLIT_FAILURE); - VTRACE("%s in: %s, %ux%u, size %u, offset=%u,%u,%u, pitch=%u,%u,%u", __FUNCTION__, - fourcc2str(NULL, in_fourcc), - in_img.width, - in_img.height, - in_img.data_size, - in_img.offsets[0], in_img.offsets[1], in_img.offsets[2], - in_img.pitches[0], in_img.pitches[1], in_img.pitches[2]); - VTRACE("%s out: %s, %ux%u, size %u, offset=%u,%u,%u, pitch=%u,%u,%u", __FUNCTION__, - fourcc2str(NULL, out_fourcc), - out_img.width, - out_img.height, - out_img.data_size, - out_img.offsets[0], out_img.offsets[1], out_img.offsets[2], - out_img.pitches[0], out_img.pitches[1], out_img.pitches[2]); - write_to_YUY2(out_buf, out_img.width, out_img.height, out_img.pitches[0], &in_img, in_buf); - vaUnmapBuffer(display, in_img.buf); - vaUnmapBuffer(display, out_img.buf); - vaDestroyImage(display, in_img.image_id); - vaDestroyImage(display, out_img.image_id); - VTRACE("%s Finished SW CSC %s=>%s", __FUNCTION__, fourcc2str(str, in_fourcc), fourcc2str(str + 5, out_fourcc)); - return JD_SUCCESS; - -cleanup: - ETRACE("%s failed to do swBlit %s=>%s", __FUNCTION__, fourcc2str(str, in_fourcc), fourcc2str(str + 5, out_fourcc)); - if (in_buf != NULL) vaUnmapBuffer(display, in_img.buf); - if (out_buf != NULL) vaUnmapBuffer(display, out_img.buf); - if (in_img.image_id != VA_INVALID_ID) vaDestroyImage(display, in_img.image_id); - if (out_img.image_id != VA_INVALID_ID) vaDestroyImage(display, out_img.image_id); - return status; -} - -static JpegDecodeStatus hwBlit(VADisplay display, VAContextID context, +static JpegDecodeStatus vaVppBlit(VADisplay display, VAContextID context, VASurfaceID in_surf, VARectangle *in_rect, uint32_t in_fourcc, VASurfaceID out_surf, VARectangle *out_rect, uint32_t out_fourcc) { @@ -324,9 +99,6 @@ static JpegDecodeStatus hwBlit(VADisplay display, VAContextID context, nsecs_t t1, t2; memset(&vpp_param, 0, sizeof(VAProcPipelineParameterBuffer)); -#if PRE_TOUCH_SURFACE - //zeroSurfaces(display, &out_surf, 1); -#endif t1 = systemTime(); vpp_param.surface = in_surf; vpp_param.output_region = out_rect; @@ -371,9 +143,9 @@ static JpegDecodeStatus hwBlit(VADisplay display, VAContextID context, JD_CHECK_RET(vpp_status, cleanup, JD_BLIT_FAILURE); t2 = systemTime(); VTRACE("Finished HW CSC %s(%d,%d,%u,%u)=>%s(%d,%d,%u,%u) for %f ms", - fourcc2str(str, in_fourcc), + fourcc2str(in_fourcc, str), in_rect->x, in_rect->y, in_rect->width, in_rect->height, - fourcc2str(str + 5, out_fourcc), + fourcc2str(out_fourcc, str + 5), out_rect->x, out_rect->y, out_rect->width, out_rect->height, ns2us(t2 - t1)/1000.0); @@ -388,38 +160,175 @@ static JpegDecodeStatus vaBlit(VADisplay display, VAContextID context, VASurfaceID in_surf, VARectangle *in_rect, uint32_t in_fourcc, VASurfaceID out_surf, VARectangle *out_rect, uint32_t out_fourcc) { + char fourccstr[10]; + ALOGD("%s, in %s, out %s", __FUNCTION__, fourcc2str(in_fourcc, fourccstr), fourcc2str(out_fourcc, fourccstr + 5)); if (((in_fourcc == VA_FOURCC_422H) || + (in_fourcc == VA_FOURCC_444P) || + (in_fourcc == VA_FOURCC_IMC3) || + (in_fourcc == VA_FOURCC_411P) || + (in_fourcc == VA_FOURCC_422V) || (in_fourcc == VA_FOURCC_NV12) || (in_fourcc == VA_FOURCC_YUY2) || + (in_fourcc == VA_FOURCC_UYVY) || (in_fourcc == VA_FOURCC_YV12) || + (in_fourcc == VA_FOURCC_BGRA) || (in_fourcc == VA_FOURCC_RGBA)) && ((out_fourcc == VA_FOURCC_422H) || + (out_fourcc == VA_FOURCC_444P) || + (out_fourcc == VA_FOURCC_IMC3) || + (out_fourcc == VA_FOURCC_411P) || + (out_fourcc == VA_FOURCC_422V) || (out_fourcc == VA_FOURCC_NV12) || (out_fourcc == VA_FOURCC_YV12) || (out_fourcc == VA_FOURCC_YUY2) || + (out_fourcc == VA_FOURCC_UYVY) || + (out_fourcc == VA_FOURCC_BGRA) || (out_fourcc == VA_FOURCC_RGBA))) { - return hwBlit(display, context, in_surf, in_rect, in_fourcc, + return vaVppBlit(display, context, in_surf, in_rect, in_fourcc, out_surf, out_rect, out_fourcc); } else { - return swBlit(display, context, in_surf, in_rect, in_fourcc, - out_surf, out_rect, out_fourcc); + return JD_INPUT_FORMAT_UNSUPPORTED; + } +} + +static CmDevice *pDev = NULL; +static CmProgram *pProgram = NULL; +static CmKernel *pKernel = NULL; +static Mutex cmLock; +void JpegBlitter::init(JpegDecoder &dec) +{ + if (!mInitialized) { + Mutex::Autolock autoLock(mLock); + if (!mInitialized) { + mDecoder = &dec; +#if PRE_INIT_CM + nsecs_t t1, t2; + t1 = t2 = systemTime(); +#if BLIT_METHOD_CM +#define ISA_FILE "/system/lib/libjpeg_cm_genx.isa" + if (!pDev || !pProgram) { + VTRACE("%s waiting for cm lock", __FUNCTION__); + Mutex::Autolock autoCmLock(cmLock); + VTRACE("%s got cm lock", __FUNCTION__); + if (!pDev || !pProgram) { + ITRACE("%s CM is not initialized yet, pre-init it", __FUNCTION__); + UINT ver; + INT result; + FILE* pIsaFile = NULL; + int codeSize; + BYTE* pIsaBytes = NULL; + result = CreateCmDevice(pDev, ver, mDisplay); + if (result != CM_SUCCESS) { + ETRACE("%s CreateCmDevice failed: %d", __FUNCTION__, result); + VTRACE("%s release cm lock", __FUNCTION__); + abort(); + } + + pIsaFile = fopen(ISA_FILE, "rb"); + if (pIsaFile==NULL) { + ETRACE("%s fopen failed", __FUNCTION__); + DestroyCmDevice(pDev); + VTRACE("%s release cm lock", __FUNCTION__); + abort(); + } + fseek (pIsaFile, 0, SEEK_END); + codeSize = ftell (pIsaFile); + rewind(pIsaFile); + if (codeSize==0) { + ETRACE("%s codesize failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + VTRACE("%s release cm lock", __FUNCTION__); + abort(); + } + pIsaBytes = (BYTE*) malloc(codeSize); + if (pIsaBytes==NULL) { + ETRACE("%s malloc failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + abort(); + } + if (fread(pIsaBytes, 1, codeSize, pIsaFile) != codeSize) { + ETRACE("%s fread failed", __FUNCTION__); + free(pIsaBytes); + DestroyCmDevice(pDev); + fclose(pIsaFile); + VTRACE("%s release cm lock", __FUNCTION__); + abort(); + } + fclose(pIsaFile); + pIsaFile = NULL; + + result = pDev->LoadProgram(pIsaBytes, codeSize, pProgram); + if (result != CM_SUCCESS) { + ETRACE("%s LoadProgram failed: %d", __FUNCTION__, result); + free(pIsaBytes); + DestroyCmDevice(pDev); + VTRACE("%s release cm lock", __FUNCTION__); + abort(); + } + free(pIsaBytes); + pIsaBytes = NULL; + + t2 = systemTime(); + VTRACE("%s CM pre-init succeded, took %.2f ms", __FUNCTION__, (t2-t1)/1000000.0); + } + VTRACE("%s release cm lock", __FUNCTION__); + } +#else + if (!pDev) { + ITRACE("%s CM is not initialized yet, pre-init it", __FUNCTION__); + UINT ver; + INT result; + result = CreateCmDevice(pDev, ver, mDisplay); + if (result != CM_SUCCESS || !pDev) { + ETRACE("%s CreateCmDevice returns %d", __FUNCTION__, result); + abort(); + } + t2 = systemTime(); + VTRACE("%s CM pre-init succeded, took %.2f ms", __FUNCTION__, (t2-t1)/1000000.0); + } +#endif +#endif + mInitialized = true; + } } } -JpegDecodeStatus JpegBlitter::blit(RenderTarget &src, RenderTarget &dst) +void JpegBlitter::deinit() { + if (mInitialized) { + Mutex::Autolock autoLock(mLock); + if (mInitialized) { +#if PRE_INIT_CM +#if BLIT_METHOD_CM + //if (pIsaBytes && pProgram && pDev) { + // free(pIsaBytes); + // pDev->DestroyProgram(pProgram); + // DestroyCmDevice(pDev); + //} +#endif +#endif + mInitialized = false; + } + } +} + +JpegDecodeStatus JpegBlitter::blit(RenderTarget &src, RenderTarget &dst, int scale_factor) +{ + assert(mInitialized); if (mDecoder == NULL) return JD_UNINITIALIZED; JpegDecodeStatus st; uint32_t src_fourcc, dst_fourcc; char tmp[10]; - src_fourcc = pixelFormat2Fourcc(src.pixel_format); - dst_fourcc = pixelFormat2Fourcc(dst.pixel_format); + src_fourcc = src.pixel_format; + dst_fourcc = dst.pixel_format; VASurfaceID src_surf = mDecoder->getSurfaceID(src); if (src_surf == VA_INVALID_ID) { - ETRACE("%s invalid src %s target", __FUNCTION__, fourcc2str(NULL, src_fourcc)); + ETRACE("%s invalid src %s target", __FUNCTION__, fourcc2str(src_fourcc)); return JD_INVALID_RENDER_TARGET; } VASurfaceID dst_surf = mDecoder->getSurfaceID(dst); @@ -432,10 +341,721 @@ JpegDecodeStatus JpegBlitter::blit(RenderTarget &src, RenderTarget &dst) } } - VTRACE("%s blitting from %s to %s", __FUNCTION__, fourcc2str(tmp, src_fourcc), fourcc2str(tmp + 5, dst_fourcc)); + VTRACE("%s blitting from %s to %s", __FUNCTION__, fourcc2str(src_fourcc, tmp), fourcc2str(dst_fourcc, tmp + 5)); st = vaBlit(mDecoder->mDisplay, mContextId, src_surf, &src.rect, src_fourcc, dst_surf, &dst.rect, dst_fourcc); return st; } +static JpegDecodeStatus blitToLinearRgba_va_gpucopy(JpegDecoder *decoder, + VADisplay dp, VAContextID ctx, RenderTarget &src, + uint8_t *sysmem, uint32_t width, uint32_t height, int scale_factor) +{ + CmQueue *pQueue = NULL; + CmSurface2D *pSurf= NULL; + CmEvent *pEvent = NULL; + INT result; + UINT ver; + RenderTarget target; + VASurfaceID surf; + nsecs_t t1, t2, t3, t4; + target.type = RenderTarget::INTERNAL_BUF; + target.pixel_format = VA_FOURCC_RGBA; + target.handle = generateHandle(); + target.width = aligned_width(width, SURF_TILING_Y); + target.height = aligned_height(height, SURF_TILING_Y); + target.stride = aligned_width(width, SURF_TILING_Y); + target.rect.x = target.rect.y = 0; + target.rect.width = width; + target.rect.height = height; + VASurfaceID src_surf = decoder->getSurfaceID(src); + if (src_surf == VA_INVALID_ID) { + ETRACE("%s invalid src %s target", __FUNCTION__, fourcc2str(src.pixel_format)); + return JD_INVALID_RENDER_TARGET; + } + JpegDecodeStatus st = decoder->createSurfaceFromRenderTarget(target, &surf); + if (st != JD_SUCCESS || surf == VA_INVALID_ID) { + ETRACE("%s failed to create surface for RGBA linear target", __FUNCTION__); + return JD_RESOURCE_FAILURE; + } + st = vaBlit(dp, ctx, src_surf, &src.rect, src.pixel_format, + surf, &target.rect, target.pixel_format); + if (st != JD_SUCCESS) { + ETRACE("%s failed to VA blit to RGBA", __FUNCTION__); + return JD_RESOURCE_FAILURE; + } + +#if DUMP_RGBA + uint8_t *data; + uint32_t offsets[3]; + uint32_t pitches[3]; + JpegDecoder::MapHandle hnd = decoder->mapData(target, (void**)&data, offsets, pitches); + assert(hnd); + char fname[128]; + sprintf(fname, "/sdcard/%dx%d.rgba", target.stride, target.height); + FILE *fdump = fopen(fname, "wb"); + assert(fdump); + fwrite(data, 4, target.height * target.stride, fdump); + fclose(fdump); + decoder->unmapData(target, hnd); +#endif + + if (st) { + ETRACE("%s: failed to blit to RGBA linear", __FUNCTION__); + decoder->destroySurface(target); + return JD_BLIT_FAILURE; + } + + t1 = systemTime(); +#if PRE_INIT_CM +#else + result = CreateCmDevice(pDev, ver, dp); + if (result != CM_SUCCESS || !pDev) { + ETRACE("%s CmCreateSurface2D returns %d", __FUNCTION__, result); + return JD_BLIT_FAILURE; + } +#endif + result = pDev->CreateSurface2D(surf, pSurf); + if (result != CM_SUCCESS || !pSurf) { + ETRACE("%s CmCreateSurface2D returns %d", __FUNCTION__, result); + DestroyCmDevice(pDev ); + return JD_BLIT_FAILURE; + } + result = pDev->CreateQueue( pQueue); + if (result != CM_SUCCESS || !pQueue) { + ETRACE("%s CmCreateQueue returns %d", __FUNCTION__, result); + pDev->DestroySurface(pSurf); + DestroyCmDevice( pDev ); + return JD_BLIT_FAILURE; + } + t2 = systemTime(); + result = pQueue->EnqueueCopyGPUToCPU(pSurf, sysmem, pEvent); + if (result != CM_SUCCESS) { + ETRACE("%s CmEnqueueCopyGPUToCPU returns %d", __FUNCTION__, result); + pDev->DestroySurface(pSurf); + DestroyCmDevice( pDev ); + return JD_BLIT_FAILURE; + } + t3 = systemTime(); + result = pDev->DestroySurface(pSurf); + if (result != CM_SUCCESS) { + WTRACE("%s CmDestroySurface returns %d", __FUNCTION__, result); + } +#if PRE_INIT_CM + assert(pDev); +#else + result = DestroyCmDevice(pDev); + if (result != CM_SUCCESS) { + WTRACE("%s DestroyCmDevice failed %d", __FUNCTION__, result); + } +#endif + t4 = systemTime(); + st = decoder->destroySurface(target); + if (st) { + WTRACE("%s: failed to destroy VA surface", __FUNCTION__); + } + ITRACE("%s: cm GpuCopy took %.2f+%.2f+%.2f ms", __FUNCTION__, + (t2 - t1)/1000000.0, + (t3 - t2)/1000000.0, + (t4 - t3)/1000000.0); + return st; +} + +JpegDecodeStatus JpegBlitter::getRgbaTile(RenderTarget &src, + uint8_t *sysmem, + int left, int top, int width, int height, int scale_factor) +{ +#define ISA_FILE "/system/lib/libjpeg_cm_genx.isa" +#define CM_GPU_TASK_WIDTH 8 +#define CM_GPU_TASK_HEIGHT 8 + VASurfaceID srcVaId; + + srcVaId = mDecoder->getSurfaceID(src); + JpegDecodeStatus status = JD_SUCCESS; + uint32_t aligned_w = width;//aligned_width(width, SURF_TILING_Y); + uint32_t aligned_h = height;//aligned_height(height, SURF_TILING_Y); + + CmThreadSpace *pThreadSpace = NULL; + CmTask *pKernelArray = NULL; + CmQueue *pQueue = NULL; + CmSurface2D *pInSurf= NULL; + SurfaceIndex *pInSurfId = NULL; + CmBufferUP *pOutBuf = NULL; + SurfaceIndex *pOutBufId = NULL; + CmEvent *pEvent = NULL; + UINT ver; + int threadswidth, threadsheight; + INT result; + DWORD dwTimeOutMs = -1; + uint32_t cm_in_fourcc; + threadswidth = aligned_w/CM_GPU_TASK_WIDTH; + threadsheight = aligned_h/CM_GPU_TASK_HEIGHT; + nsecs_t t1, t2, t3, t4, t5, t6, t7; + VTRACE("%s before holding cm lock", __FUNCTION__); + Mutex::Autolock autoLock(cmLock); + VTRACE("%s got cm lock", __FUNCTION__); + t1 = t2 = t3 = t4 = t5 = t6 = t7 = systemTime(); + +#if PRE_INIT_CM + assert(pDev && pProgram); +#else + FILE* pIsaFile = NULL; + int codeSize; + BYTE* pIsaBytes = NULL; + result = CreateCmDevice(pDev, ver, dp); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + pIsaFile = fopen(ISA_FILE, "rb"); + if (pIsaFile==NULL) { + ETRACE("%s fopen failed", __FUNCTION__); + DestroyCmDevice(pDev); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + fseek (pIsaFile, 0, SEEK_END); + codeSize = ftell (pIsaFile); + rewind(pIsaFile); + if (codeSize==0) { + ETRACE("%s codesize failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + pIsaBytes = (BYTE*) malloc(codeSize); + if (pIsaBytes==NULL) { + ETRACE("%s malloc failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + if (fread(pIsaBytes, 1, codeSize, pIsaFile) != codeSize) { + ETRACE("%s fread failed", __FUNCTION__); + free(pIsaFile); + fclose(pIsaFile); + DestroyCmDevice(pDev); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + fclose(pIsaFile); + pIsaFile = NULL; + + result = pDev->LoadProgram(pIsaBytes, codeSize, pProgram); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + free(pIsaBytes); + pIsaBytes = NULL; + VTRACE("%s cm init succeded", __FUNCTION__); +#endif + + t2 = systemTime(); + // create thread space + result = pDev->CreateKernel(pProgram, CM_KERNEL_FUNCTION(yuv_tiled_to_rgba_tile), pKernel); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + result = pDev->CreateSurface2D(srcVaId, pInSurf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pInSurf->GetIndex(pInSurfId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // create bufferUp from dst ptr + result = pDev->CreateBufferUP(aligned_w * aligned_h * 4, sysmem, pOutBuf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pOutBuf->GetIndex(pOutBufId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + result = pDev->CreateQueue( pQueue); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pDev->CreateThreadSpace(threadswidth, threadsheight, pThreadSpace); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pKernel->SetThreadCount( threadswidth* threadsheight ); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // enqueue csc + pKernel->SetKernelArg(0,sizeof(SurfaceIndex),pInSurfId); + pKernel->SetKernelArg(1,sizeof(SurfaceIndex),pOutBufId); + pKernel->SetKernelArg(2,sizeof(int),&left); + pKernel->SetKernelArg(3,sizeof(int),&top); + pKernel->SetKernelArg(4,sizeof(int),&aligned_w); + pKernel->SetKernelArg(5,sizeof(int),&aligned_h); + + cm_in_fourcc = src.pixel_format; + + pKernel->SetKernelArg(6,sizeof(uint32_t),&cm_in_fourcc); + result = pDev->CreateTask(pKernelArray); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pKernelArray->AddKernel (pKernel); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pQueue->Enqueue(pKernelArray, pEvent, pThreadSpace); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // wait kernel finish + t3 = systemTime(); + result = pEvent->WaitForTaskFinished(dwTimeOutMs); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + //event = NULL;//(BlitEvent)pEvent; + t4 = systemTime(); + +cleanup: + // destroy thread space/house cleaning + if (pOutBuf) pDev->DestroyBufferUP(pOutBuf); + t5 = systemTime(); + if (pInSurf) pDev->DestroySurface(pInSurf); + t6 = systemTime(); + if (pKernelArray) pDev->DestroyTask(pKernelArray); + if (pThreadSpace) pDev->DestroyThreadSpace(pThreadSpace); + if (pKernel) pDev->DestroyKernel(pKernel); +#if PRE_INIT_CM +#else + if (pProgram) pDev->DestroyProgram(pProgram); + if (pDev) DestroyCmDevice(pDev); +#endif + t7 = systemTime(); + + VTRACE("%s blit with CM %ux%u took %.2f + %.2f + %.2f + %.2f + %.2f + %.2f ms", __FUNCTION__, + width, height, + (t2 - t1)/1000000.0, + (t3 - t2)/1000000.0, + (t4 - t3)/1000000.0, + (t5 - t4)/1000000.0, + (t6 - t5)/1000000.0, + (t7 - t6)/1000000.0); + VTRACE("%s release cm lock", __FUNCTION__); + return status; +} + + +static JpegDecodeStatus blitToLinearRgba_cm(JpegDecoder *decoder, + VADisplay dp, VAContextID ctx, RenderTarget &src, uint8_t *sysmem, uint32_t width, uint32_t height, + BlitEvent &event, int scale_factor) +{ +#define ISA_FILE "/system/lib/libjpeg_cm_genx.isa" +#define CM_GPU_TASK_WIDTH 32 +#define CM_GPU_TASK_HEIGHT 8 + VASurfaceID srcVaId; + Mutex::Autolock autoLock(cmLock); + + srcVaId = decoder->getSurfaceID(src); + JpegDecodeStatus status = JD_SUCCESS; + uint32_t aligned_in_w = aligned_width(width, SURF_TILING_Y); + uint32_t aligned_in_h = aligned_height(height, SURF_TILING_Y); + uint32_t aligned_out_w = aligned_width(width/scale_factor, SURF_TILING_Y); + uint32_t aligned_out_h = aligned_height(height/scale_factor, SURF_TILING_Y); + +#if NV12_INTERMEDIATE + RenderTarget nv12_target; + VASurfaceID nv12_surf_id; + VASurfaceID nv12_surf; + nv12_target.type = RenderTarget::INTERNAL_BUF; + nv12_target.pixel_format = VA_FOURCC_NV12; + nv12_target.handle = generateHandle(); + nv12_target.width = aligned_in_w; + nv12_target.height = aligned_in_h; + nv12_target.stride = aligned_in_w; + nv12_target.rect.x = nv12_target.rect.y = 0; + nv12_target.rect.width = width; + nv12_target.rect.height = height; + status = decoder->createSurfaceFromRenderTarget(nv12_target, &nv12_surf_id); + if (status != JD_SUCCESS || nv12_surf_id == VA_INVALID_ID) { + ETRACE("%s failed to create surface for NV12 target", __FUNCTION__); + return JD_RESOURCE_FAILURE; + } + vaBlit(dp, ctx, srcVaId, &src.rect, src.pixel_format, + nv12_surf_id, &nv12_target.rect, VA_FOURCC_NV12); + srcVaId = nv12_surf_id; +#endif + + CmThreadSpace *pThreadSpace = NULL; + CmTask *pKernelArray = NULL; + CmQueue *pQueue = NULL; + CmSurface2D *pInSurf= NULL; + SurfaceIndex *pInSurfId = NULL; + CmBufferUP *pOutBuf = NULL; + SurfaceIndex *pOutBufId = NULL; + CmEvent *pEvent = NULL; + UINT ver; + int threadswidth, threadsheight; + INT result; + DWORD dwTimeOutMs = -1; + uint32_t cm_in_fourcc; + threadswidth = aligned_in_w/CM_GPU_TASK_WIDTH; + threadsheight = aligned_in_h/CM_GPU_TASK_HEIGHT; + nsecs_t t1, t2, t3, t4, t5, t6, t7, t8, t9, t10; + VTRACE("%s before holding cm lock", __FUNCTION__); + VTRACE("%s got cm lock", __FUNCTION__); + t1 = t2 = t3 = t4 = t5 = t6 = t7 = t8 = t9 = t10 = systemTime(); + +#if PRE_INIT_CM + assert(pDev && pProgram); +#else + FILE* pIsaFile = NULL; + int codeSize; + BYTE* pIsaBytes = NULL; + result = CreateCmDevice(pDev, ver, dp); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + pIsaFile = fopen(ISA_FILE, "rb"); + if (pIsaFile==NULL) { + ETRACE("%s fopen failed", __FUNCTION__); + DestroyCmDevice(pDev); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + fseek (pIsaFile, 0, SEEK_END); + codeSize = ftell (pIsaFile); + rewind(pIsaFile); + if (codeSize==0) { + ETRACE("%s codesize failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + pIsaBytes = (BYTE*) malloc(codeSize); + if (pIsaBytes==NULL) { + ETRACE("%s malloc failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + if (fread(pIsaBytes, 1, codeSize, pIsaFile) != codeSize) { + ETRACE("%s fread failed", __FUNCTION__); + free(pIsaFile); + fclose(pIsaFile); + DestroyCmDevice(pDev); + VTRACE("%s release cm lock", __FUNCTION__); + return JD_BLIT_FAILURE; + } + fclose(pIsaFile); + pIsaFile = NULL; + + result = pDev->LoadProgram(pIsaBytes, codeSize, pProgram); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + free(pIsaBytes); + pIsaBytes = NULL; + VTRACE("%s cm init succeded", __FUNCTION__); +#endif + + // create thread space + result = pDev->CreateKernel(pProgram, CM_KERNEL_FUNCTION(CM_KERNEL_FUNC_NAME), pKernel); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + VTRACE("Creating CmSurface from VASurface %d", srcVaId); + t2 = systemTime(); + result = pDev->CreateSurface2D(srcVaId, pInSurf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + result = pInSurf->GetIndex(pInSurfId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // create bufferUp from dst ptr + VTRACE("CmSurfaceID got"); + t3 = systemTime(); + result = pDev->CreateBufferUP(aligned_out_w * aligned_out_h * 4, sysmem, pOutBuf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pOutBuf->GetIndex(pOutBufId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + t4 = systemTime(); + result = pDev->CreateQueue( pQueue); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pDev->CreateThreadSpace(threadswidth, threadsheight, pThreadSpace); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pKernel->SetThreadCount( threadswidth* threadsheight ); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // enqueue csc + pKernel->SetKernelArg(0,sizeof(SurfaceIndex),pInSurfId); + pKernel->SetKernelArg(1,sizeof(SurfaceIndex),pOutBufId); + pKernel->SetKernelArg(2,sizeof(int),&aligned_out_w); +#if NV12_INTERMEDIATE + cm_in_fourcc = VA_FOURCC_NV12; +#else + cm_in_fourcc = src.pixel_format; +#endif + pKernel->SetKernelArg(3,sizeof(uint32_t),&cm_in_fourcc); + pKernel->SetKernelArg(4,sizeof(int), &scale_factor); + result = pDev->CreateTask(pKernelArray); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pKernelArray->AddKernel (pKernel); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pQueue->Enqueue(pKernelArray, pEvent, pThreadSpace); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // wait kernel finish + t5 = systemTime(); + result = pEvent->WaitForTaskFinished(dwTimeOutMs); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + event = NULL;//(BlitEvent)pEvent; + t6 = systemTime(); + +cleanup: +#if NV12_INTERMEDIATE + if (nv12_surf_id != VA_INVALID_ID) decoder->destroySurface(nv12_target); +#endif + // destroy thread space/house cleaning + if (pOutBuf) pDev->DestroyBufferUP(pOutBuf); + t7 = systemTime(); + if (pInSurf) pDev->DestroySurface(pInSurf); + t8 = systemTime(); + if (pKernelArray) pDev->DestroyTask(pKernelArray); + if (pThreadSpace) pDev->DestroyThreadSpace(pThreadSpace); + if (pKernel) pDev->DestroyKernel(pKernel); +#if PRE_INIT_CM +#else + if (pProgram) pDev->DestroyProgram(pProgram); + if (pDev) DestroyCmDevice(pDev); +#endif + t9 = systemTime(); + + VTRACE("%s blit with CM %ux%u(%dx) took %.2f + %.2f + %.2f + %.2f + %.2f + %.2f + %.2f + %.2f ms", __FUNCTION__, + width, height, scale_factor, + (t2 - t1)/1000000.0, + (t3 - t2)/1000000.0, + (t4 - t3)/1000000.0, + (t5 - t4)/1000000.0, + (t6 - t5)/1000000.0, + (t7 - t6)/1000000.0, + (t8 - t7)/1000000.0, + (t9 - t8)/1000000.0); + VTRACE("%s release cm lock", __FUNCTION__); + return status; +} + +JpegDecodeStatus JpegBlitter::blitToLinearRgba(RenderTarget &src, + uint8_t *sysmem, + uint32_t width, uint32_t height, + BlitEvent &event, int scale_factor) +{ + Mutex::Autolock autoLock(mDecoder->mLock); +#if BLIT_METHOD_CM + return blitToLinearRgba_cm(mDecoder, mDecoder->mDisplay, mContextId, src, sysmem, width, height, event, scale_factor); +#else + return blitToLinearRgba_va_gpucopy(mDecoder, mDecoder->mDisplay, mContextId, src, sysmem, width, height, scale_factor); +#endif +} + +JpegDecodeStatus JpegBlitter::blitToCameraSurfaces(RenderTarget &src, + buffer_handle_t dst_nv12, + buffer_handle_t dst_yuy2, + uint8_t *dst_nv21, + uint8_t *dst_yv12, + uint32_t width, uint32_t height, + BlitEvent &event) +{ +#define CM_GPU_TASK_WIDTH 32 +#define CM_GPU_TASK_HEIGHT 8 + VASurfaceID srcVaId, nv12_surf_id, yuy2_surf_id; + srcVaId = nv12_surf_id = yuy2_surf_id = VA_INVALID_ID; + srcVaId = mDecoder->getSurfaceID(src); + JpegDecodeStatus status = JD_SUCCESS; + uint32_t aligned_w = aligned_width(width, SURF_TILING_Y); + uint32_t aligned_h = aligned_height(height, SURF_TILING_Y); + + CmThreadSpace *pThreadSpace = NULL; + CmTask *pKernelArray = NULL; + CmQueue *pQueue = NULL; + CmSurface2D *pInSurf= NULL; + SurfaceIndex *pInSurfId = NULL; + CmSurface2D *pOutNV12Surf= NULL; + SurfaceIndex *pOutNV12SurfId = NULL; + CmSurface2D *pOutYUY2Surf= NULL; + SurfaceIndex *pOutYUY2SurfId = NULL; + CmBufferUP *pOutNV21Surf = NULL; + SurfaceIndex *pOutNV21SurfId = NULL; + CmBufferUP *pOutYV12Surf = NULL; + SurfaceIndex *pOutYV12SurfId = NULL; + uint8_t do_nv21, do_yv12; + do_nv21 = do_yv12 = 0; + CmEvent *pEvent = NULL; + RenderTarget nv12_target, yuy2_target; + UINT ver; + int threadswidth, threadsheight; + INT result; + DWORD dwTimeOutMs = -1; + uint32_t cm_in_fourcc; + threadswidth = aligned_w/CM_GPU_TASK_WIDTH; + threadsheight = aligned_h/CM_GPU_TASK_HEIGHT; + nsecs_t t1, t2, t3, t4, t5; + t1 = t2 = t3 = t4 = t5 = systemTime(); + VTRACE("%s before holding cm lock", __FUNCTION__); + Mutex::Autolock autoLock(cmLock); + +#if PRE_INIT_CM + assert(pDev && pProgram); +#else + result = CreateCmDevice(pDev, ver, dp); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + pIsaFile = fopen(ISA_FILE, "rb"); + if (pIsaFile==NULL) { + ETRACE("%s fopen failed", __FUNCTION__); + DestroyCmDevice(pDev); + return JD_BLIT_FAILURE; + } + fseek (pIsaFile, 0, SEEK_END); + codeSize = ftell (pIsaFile); + rewind(pIsaFile); + if (codeSize==0) { + ETRACE("%s codesize failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + return JD_BLIT_FAILURE; + } + pIsaBytes = (BYTE*) malloc(codeSize); + if (pIsaBytes==NULL) { + ETRACE("%s malloc failed", __FUNCTION__); + DestroyCmDevice(pDev); + fclose(pIsaFile); + return JD_BLIT_FAILURE; + } + if (fread(pIsaBytes, 1, codeSize, pIsaFile) != codeSize) { + ETRACE("%s fread failed", __FUNCTION__); + free(pIsaFile); + fclose(pIsaFile); + DestroyCmDevice(pDev); + return JD_BLIT_FAILURE; + } + fclose(pIsaFile); + pIsaFile = NULL; + + result = pDev->LoadProgram(pIsaBytes, codeSize, pProgram); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + free(pIsaBytes); + pIsaBytes = NULL; + VTRACE("%s cm init succeded", __FUNCTION__); +#endif + + t2 = systemTime(); + // create thread space + result = pDev->CreateKernel(pProgram, CM_KERNEL_FUNCTION(yuv422h_tiled_to_camera_surfaces), pKernel); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + + // src surface + result = pDev->CreateSurface2D(srcVaId, pInSurf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pInSurf->GetIndex(pInSurfId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // dst nv12 + yuy2 + nv12_target.handle = (int)dst_nv12; + nv12_target.type = RenderTarget::ANDROID_GRALLOC; + nv12_target.height = aligned_height(height, SURF_TILING_Y); + nv12_target.width = aligned_width(width, SURF_TILING_Y); + nv12_target.pixel_format = VA_FOURCC_NV12; + nv12_target.stride = nv12_target.width; + nv12_target.rect.x = nv12_target.rect.y = 0; + nv12_target.rect.width = nv12_target.width; + nv12_target.rect.height = nv12_target.height; + mDecoder->createSurfaceFromRenderTarget(nv12_target, &nv12_surf_id); + yuy2_target.handle = (int)dst_yuy2; + yuy2_target.type = RenderTarget::ANDROID_GRALLOC; + yuy2_target.height = aligned_height(height, SURF_TILING_Y); + yuy2_target.width = aligned_width(width, SURF_TILING_Y); + yuy2_target.pixel_format = VA_FOURCC_YUY2; + yuy2_target.stride = yuy2_target.width * 2; + yuy2_target.rect.x = yuy2_target.rect.y = 0; + yuy2_target.rect.width = yuy2_target.width; + yuy2_target.rect.height = yuy2_target.height; + mDecoder->createSurfaceFromRenderTarget(yuy2_target, &yuy2_surf_id); + result = pDev->CreateSurface2D(nv12_surf_id, pOutNV12Surf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pOutNV12Surf->GetIndex(pOutNV12SurfId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pDev->CreateSurface2D(yuy2_surf_id, pOutYUY2Surf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pOutYUY2Surf->GetIndex(pOutYUY2SurfId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // dst nv21 + if (dst_nv21) { + result = pDev->CreateBufferUP(aligned_w * aligned_h * 3 / 2, dst_nv21, pOutNV21Surf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pOutNV21Surf->GetIndex(pOutNV21SurfId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + do_nv21 = 1; + } + else { + pOutNV21SurfId = pInSurfId; + do_nv21 = 0; + } + // dst yv12 + if (dst_yv12) { + result = pDev->CreateBufferUP(aligned_w * aligned_h * 3 / 2, dst_yv12, pOutYV12Surf); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pOutYV12Surf->GetIndex(pOutYV12SurfId); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + do_yv12 = 1; + } + else { + pOutYV12SurfId = pInSurfId; + do_yv12 = 0; + } + result = pDev->CreateQueue( pQueue); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pDev->CreateThreadSpace(threadswidth, threadsheight, pThreadSpace); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pKernel->SetThreadCount( threadswidth* threadsheight ); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // enqueue csc + pKernel->SetKernelArg(0,sizeof(SurfaceIndex),pInSurfId); + pKernel->SetKernelArg(1,sizeof(SurfaceIndex),pOutNV12SurfId); + pKernel->SetKernelArg(2,sizeof(SurfaceIndex),pOutYUY2SurfId); + pKernel->SetKernelArg(3,sizeof(SurfaceIndex),pOutNV21SurfId); + pKernel->SetKernelArg(4,sizeof(SurfaceIndex),pOutYV12SurfId); + pKernel->SetKernelArg(5,sizeof(int),&aligned_h); + pKernel->SetKernelArg(6,sizeof(int),&aligned_w); + pKernel->SetKernelArg(7,sizeof(uint8_t),&do_nv21); + pKernel->SetKernelArg(8,sizeof(uint8_t),&do_yv12); + result = pDev->CreateTask(pKernelArray); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pKernelArray->AddKernel (pKernel); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + result = pQueue->Enqueue(pKernelArray, pEvent, pThreadSpace); + JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + // wait kernel finish + t3 = systemTime(); + //result = pEvent->WaitForTaskFinished(dwTimeOutMs); + //JD_CM_CHECK_RET(result, cleanup, JD_BLIT_FAILURE); + event = (BlitEvent)pEvent; + t4 = systemTime(); + +cleanup: + // destroy thread space/house cleaning + if (pOutYV12Surf) pDev->DestroyBufferUP(pOutYV12Surf); + if (pOutNV21Surf) pDev->DestroyBufferUP(pOutNV21Surf); + if (pOutYUY2Surf) pDev->DestroySurface(pOutYUY2Surf); + if (pOutNV12Surf) pDev->DestroySurface(pOutNV12Surf); + if (pInSurf) pDev->DestroySurface(pInSurf); + if (nv12_surf_id != VA_INVALID_ID) mDecoder->destroySurface(nv12_target); + if (yuy2_surf_id != VA_INVALID_ID) mDecoder->destroySurface(yuy2_target); + if (pKernelArray) pDev->DestroyTask(pKernelArray); + if (pThreadSpace) pDev->DestroyThreadSpace(pThreadSpace); + if (pKernel) pDev->DestroyKernel(pKernel); +#if PRE_INIT_CM +#else + if (pIsaBytes) free(pIsaBytes); + if (pIsaFile) fclose(pIsaFile); + if (pProgram) pDev->DestroyProgram(pProgram); + if (pDev) DestroyCmDevice(pDev); +#endif + t5 = systemTime(); + VTRACE("%s blit with CM took %.2f + %.2f + %.2f + %.2f ms", __FUNCTION__, + (t2 - t1)/1000000.0, + (t3 - t2)/1000000.0, + (t4 - t3)/1000000.0, + (t5 - t4)/1000000.0); + return status; +} + +void JpegBlitter::syncBlit(BlitEvent &event) +{ + nsecs_t now = systemTime(); + DWORD dwTimeOutMs = -1; + CmEvent *pEvent = (CmEvent*)event; + UINT64 executionTime; + if (event == NULL) + return; + INT result = pEvent->WaitForTaskFinished(dwTimeOutMs); + if (result != CM_SUCCESS) { + ETRACE("%s: Failed to sync blit event", __FUNCTION__); + } + else { + event = NULL; + VTRACE("%s: syncBlit took %.2f ms", __FUNCTION__, (systemTime()-now)/1000000.0); + } +} + diff --git a/imagedecoder/JPEGBlitter_img.cpp b/imagedecoder/JPEGBlitter_img.cpp index d56ba98..d714a14 100644 --- a/imagedecoder/JPEGBlitter_img.cpp +++ b/imagedecoder/JPEGBlitter_img.cpp @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -28,8 +27,40 @@ #include "JPEGBlitter.h" -JpegDecodeStatus JpegBlitter::blit(RenderTarget &src, RenderTarget &dst) +JpegDecodeStatus JpegBlitter::blit(RenderTarget &src, RenderTarget &dst, int scale_factor) { return JD_OUTPUT_FORMAT_UNSUPPORTED; } +JpegDecodeStatus JpegBlitter::blitToLinearRgba(RenderTarget &src, uint8_t *sysmem, uint32_t width, uint32_t height, BlitEvent &event, int scale_factor) +{ + return JD_OUTPUT_FORMAT_UNSUPPORTED; +} +JpegDecodeStatus JpegBlitter::getRgbaTile(RenderTarget &src, + uint8_t *sysmem, + int left, int top, int width, int height, int scale_factor) +{ + return JD_OUTPUT_FORMAT_UNSUPPORTED; +} +void JpegBlitter::init(JpegDecoder& /*dec*/) +{ + // Do nothing +} +void JpegBlitter::deinit() +{ + // Do nothing +} +void JpegBlitter::syncBlit(BlitEvent &event) +{ + // Do nothing +} +JpegDecodeStatus JpegBlitter::blitToCameraSurfaces(RenderTarget &src, + buffer_handle_t dst_nv12, + buffer_handle_t dst_yuy2, + uint8_t *dst_nv21, + uint8_t *dst_yv12, + uint32_t width, uint32_t height, + BlitEvent &event) +{ + return JD_OUTPUT_FORMAT_UNSUPPORTED; +} diff --git a/imagedecoder/JPEGCommon.h b/imagedecoder/JPEGCommon.h index 6df6fcd..790ad35 100644 --- a/imagedecoder/JPEGCommon.h +++ b/imagedecoder/JPEGCommon.h @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -33,23 +32,66 @@ #include #include #include +#include + +using namespace android; #define JPEG_MAX_COMPONENTS 4 #define JPEG_MAX_QUANT_TABLES 4 +#define SURF_TILING_NONE 0 +#define SURF_TILING_X 1 +#define SURF_TILING_Y 2 + +extern uint32_t aligned_width(uint32_t width, int tiling); +extern uint32_t aligned_height(uint32_t height, int tiling); + +struct RenderTarget { + enum bufType{ + KERNEL_DRM, + ANDROID_GRALLOC, + INTERNAL_BUF, + USER_PTR, + }; -#define RENDERTARGET_INTERNAL_BUFFER (RenderTarget::ANDROID_GRALLOC + 1) + int width; + int height; + int stride; + bufType type; + int format; + int pixel_format; + int handle; + VARectangle rect; +}; struct JpegInfo { - // in - uint8_t *buf; - size_t bufsize; + // in: use either buf+bufsize or inputs + union { + struct { + uint8_t *buf; + uint32_t bufsize; + }; + android::Vector *inputs; + }; + bool use_vector_input; + bool need_header_only; + // internal use + uint32_t component_order; + uint32_t dqt_ind; + uint32_t dht_ind; + uint32_t scan_ind; + bool frame_marker_found; + bool soi_parsed; + bool sof_parsed; + bool dqt_parsed; + bool dht_parsed; + bool sos_parsed; + bool dri_parsed; // out uint32_t image_width; uint32_t image_height; uint32_t image_color_fourcc; - int image_pixel_format; VAPictureParameterBufferJPEGBaseline picture_param_buf; VASliceParameterBufferJPEGBaseline slice_param_buf[JPEG_MAX_COMPONENTS]; VAIQMatrixBufferJPEGBaseline qmatrix_buf; @@ -80,10 +122,13 @@ enum JpegDecodeStatus JD_BLIT_FAILURE, JD_ERROR_BITSTREAM, JD_RENDER_TARGET_BUSY, + JD_IMAGE_TOO_SMALL, + JD_INSUFFICIENT_BYTE, + JD_UNIMPLEMENTED, }; -inline char * fourcc2str(char * str, uint32_t fourcc) +inline char * fourcc2str(uint32_t fourcc, char * str = NULL) { static char tmp[5]; if (str == NULL) { @@ -104,6 +149,7 @@ inline int fourcc2VaFormat(uint32_t fourcc) case VA_FOURCC_422H: case VA_FOURCC_422V: case VA_FOURCC_YUY2: + case VA_FOURCC_UYVY: return VA_RT_FORMAT_YUV422; case VA_FOURCC_IMC3: case VA_FOURCC_YV12: @@ -112,12 +158,16 @@ inline int fourcc2VaFormat(uint32_t fourcc) case VA_FOURCC_444P: return VA_RT_FORMAT_YUV444; case VA_FOURCC_411P: + case VA_FOURCC_411R: return VA_RT_FORMAT_YUV411; + case VA_FOURCC('4','0','0','P'): + return VA_RT_FORMAT_YUV400; case VA_FOURCC_BGRA: case VA_FOURCC_ARGB: case VA_FOURCC_RGBA: return VA_RT_FORMAT_RGB32; default: + // Add if needed return -1; } } @@ -140,6 +190,10 @@ inline uint32_t sampFactor2Fourcc(int h1, int h2, int h3, int v1, int v2, int v3 v1 == 1 && v2 == 1 && v3 == 1) { return VA_FOURCC_411P; } + else if (h1 == 1 && h2 == 1 && h3 == 1 && + v1 == 4 && v2 == 1 && v3 == 1) { + return VA_FOURCC_411R; + } else if (h1 == 1 && h2 == 1 && h3 == 1 && v1 == 2 && v2 == 1 && v3 == 1) { return VA_FOURCC_422V; @@ -168,18 +222,23 @@ inline int fourcc2LumaBitsPerPixel(uint32_t fourcc) case VA_FOURCC_NV12: case VA_FOURCC_444P: case VA_FOURCC_411P: + case VA_FOURCC_411R: + case VA_FOURCC('4','0','0','P'): return 1; case VA_FOURCC_YUY2: + case VA_FOURCC_UYVY: return 2; case VA_FOURCC_BGRA: case VA_FOURCC_ARGB: case VA_FOURCC_RGBA: return 4; default: + // Add if needed return 1; } } +// Platform dependent extern int fourcc2PixelFormat(uint32_t fourcc); extern uint32_t pixelFormat2Fourcc(int pixel_format); diff --git a/imagedecoder/JPEGCommon_Gen.h b/imagedecoder/JPEGCommon_Gen.h index ce3bf08..a07098a 100644 --- a/imagedecoder/JPEGCommon_Gen.h +++ b/imagedecoder/JPEGCommon_Gen.h @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel diff --git a/imagedecoder/JPEGCommon_Img.h b/imagedecoder/JPEGCommon_Img.h index 3473d20..1b568f9 100644 --- a/imagedecoder/JPEGCommon_Img.h +++ b/imagedecoder/JPEGCommon_Img.h @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel diff --git a/imagedecoder/JPEGDecoder.cpp b/imagedecoder/JPEGDecoder.cpp index 5e05464..624d226 100644 --- a/imagedecoder/JPEGDecoder.cpp +++ b/imagedecoder/JPEGDecoder.cpp @@ -26,7 +26,6 @@ * Yao Cheng * */ -//#define LOG_NDEBUG 0 #include #include @@ -40,8 +39,6 @@ #endif #include -//#define LOG_TAG "ImageDecoder" - #define JPEG_MAX_SETS_HUFFMAN_TABLES 2 #define TABLE_CLASS_DC 0 @@ -67,20 +64,41 @@ typedef uint32_t Display; goto label; \ } -JpegDecoder::JpegDecoder() +static int handlectr = 0; +int generateHandle() +{ + return handlectr++; +} + +JpegDecoder::JpegDecoder(VADisplay display, VAConfigID vpCfgId, VAContextID vpCtxId, bool use_blitter) :mInitialized(false), - mDisplay(0), + mDisplay(display), mConfigId(VA_INVALID_ID), mContextId(VA_INVALID_ID), mParser(NULL), - mBlitter(NULL) + mBlitter(NULL), + mParserInitialized(false), + mDispCreated(false) { mParser = new CJPEGParse; - mBlitter = new JpegBlitter; - Display dpy; - int va_major_version, va_minor_version; - mDisplay = vaGetDisplay(&dpy); - vaInitialize(mDisplay, &va_major_version, &va_minor_version); + mBsParser = new JpegBitstreamParser; + if (!display) { + assert(vpCfgId == VA_INVALID_ID); + assert(vpCtxId == VA_INVALID_ID); + assert(use_blitter == false); + Display dpy; + int va_major_version, va_minor_version; + mDisplay = vaGetDisplay(&dpy); + vaInitialize(mDisplay, &va_major_version, &va_minor_version); + mDispCreated = true; + } + if (use_blitter) { + assert(display != NULL); + assert(vpCfgId != VA_INVALID_ID); + assert(vpCtxId != VA_INVALID_ID); + mBlitter = new JpegBlitter(display, vpCfgId,vpCtxId); + } + VTRACE("%s CTOR succeded", __FUNCTION__); } JpegDecoder::~JpegDecoder() { @@ -88,73 +106,82 @@ JpegDecoder::~JpegDecoder() WTRACE("Freeing JpegDecoder: not destroyed yet. Force destroy resource"); deinit(); } + if (mBlitter) + mBlitter->deinit(); delete mBlitter; - vaTerminate(mDisplay); + if (mDispCreated) + vaTerminate(mDisplay); delete mParser; + delete mBsParser; + VTRACE("%s DTOR succeded", __FUNCTION__); } JpegDecoder::MapHandle JpegDecoder::mapData(RenderTarget &target, void ** data, uint32_t * offsets, uint32_t * pitches) { - JpegDecoder::MapHandle handle; - handle.img = NULL; - handle.valid = false; + VAImage *img = NULL; VASurfaceID surf_id = getSurfaceID(target); if (surf_id != VA_INVALID_ID) { - handle.img = new VAImage(); - if (handle.img == NULL) { + img = new VAImage(); + if (img == NULL) { ETRACE("%s: create VAImage fail", __FUNCTION__); - return handle; + return 0; } VAStatus st; - st = vaDeriveImage(mDisplay, surf_id, handle.img); + st = vaDeriveImage(mDisplay, surf_id, img); if (st != VA_STATUS_SUCCESS) { - delete handle.img; - handle.img = NULL; + delete img; + img = NULL; ETRACE("%s: vaDeriveImage fail %d", __FUNCTION__, st); - return handle; + return 0; } - st = vaMapBuffer(mDisplay, handle.img->buf, data); + st = vaMapBuffer(mDisplay, img->buf, data); if (st != VA_STATUS_SUCCESS) { - vaDestroyImage(mDisplay, handle.img->image_id); - delete handle.img; - handle.img = NULL; + vaDestroyImage(mDisplay, img->image_id); + delete img; + img = NULL; ETRACE("%s: vaMapBuffer fail %d", __FUNCTION__, st); - return handle; + return 0; } - handle.valid = true; - offsets[0] = handle.img->offsets[0]; - offsets[1] = handle.img->offsets[1]; - offsets[2] = handle.img->offsets[2]; - pitches[0] = handle.img->pitches[0]; - pitches[1] = handle.img->pitches[1]; - pitches[2] = handle.img->pitches[2]; - return handle; + offsets[0] = img->offsets[0]; + offsets[1] = img->offsets[1]; + offsets[2] = img->offsets[2]; + pitches[0] = img->pitches[0]; + pitches[1] = img->pitches[1]; + pitches[2] = img->pitches[2]; + VTRACE("%s: successfully mapped RenderTarget %p, handle %d, data=%p, offsets=[%u,%u,%u], pitches=[%u,%u,%u], size=%u, %ux%u, to handle.img %p", + __FUNCTION__, &target, target.handle, *data, offsets[0], offsets[1], offsets[2], + pitches[0], pitches[1], pitches[2], img->data_size, + img->width, img->height, img); + + return (uint32_t)img; } ETRACE("%s: get Surface ID fail", __FUNCTION__); - return handle; + return 0; } void JpegDecoder::unmapData(RenderTarget &target, JpegDecoder::MapHandle maphandle) { - if (maphandle.valid == false) - return; - if (maphandle.img != NULL) { - vaUnmapBuffer(mDisplay, maphandle.img->buf); - vaDestroyImage(mDisplay, maphandle.img->image_id); - delete maphandle.img; + if (maphandle != 0) { + vaUnmapBuffer(mDisplay, ((VAImage*)maphandle)->buf); + vaDestroyImage(mDisplay, ((VAImage*)maphandle)->image_id); + VTRACE("%s deleting VAImage %p", __FUNCTION__, ((VAImage*)maphandle)); + delete ((VAImage*)maphandle); } } JpegDecodeStatus JpegDecoder::init(int w, int h, RenderTarget **targets, int num) { - if (mInitialized) + if (mInitialized) { + VTRACE("%s already initialized", __FUNCTION__); return JD_ALREADY_INITIALIZED; + } Mutex::Autolock autoLock(mLock); - mBlitter->setDecoder(*this); if (!mInitialized) { + nsecs_t now = systemTime(); mGrallocSurfaceMap.clear(); mDrmSurfaceMap.clear(); mNormalSurfaceMap.clear(); + mUserptrSurfaceMap.clear(); VAStatus st; VASurfaceID surfid; for (int i = 0; i < num; ++i) { @@ -164,6 +191,8 @@ JpegDecodeStatus JpegDecoder::init(int w, int h, RenderTarget **targets, int num __FUNCTION__, targets[i]->handle); return JD_RESOURCE_FAILURE; } + VTRACE("%s successfully created surface %u for renderTarget %p, handle %d", + __FUNCTION__, surfid, targets[i], targets[i]->handle); } VAConfigAttrib attrib; @@ -182,14 +211,17 @@ JpegDecodeStatus JpegDecoder::init(int w, int h, RenderTarget **targets, int num size_t gmsize = mGrallocSurfaceMap.size(); size_t dmsize = mDrmSurfaceMap.size(); size_t nmsize = mNormalSurfaceMap.size(); + size_t umsize = mUserptrSurfaceMap.size(); VASurfaceID *surfaces = new VASurfaceID[gmsize + dmsize + nmsize]; - for (size_t i = 0; i < gmsize + dmsize + nmsize; ++i) { + for (size_t i = 0; i < gmsize + dmsize + nmsize + umsize; ++i) { if (i < gmsize) surfaces[i] = mGrallocSurfaceMap.valueAt(i); else if (i < gmsize + dmsize) surfaces[i] = mDrmSurfaceMap.valueAt(i - gmsize); - else + else if (i < gmsize + dmsize + nmsize) surfaces[i] = mNormalSurfaceMap.valueAt(i - gmsize - dmsize); + else + surfaces[i] = mUserptrSurfaceMap.valueAt(i - gmsize - dmsize - nmsize); } st = vaCreateContext(mDisplay, mConfigId, w, h, @@ -202,34 +234,121 @@ JpegDecodeStatus JpegDecoder::init(int w, int h, RenderTarget **targets, int num return JD_INITIALIZATION_ERROR; } - VTRACE("vaconfig = %u, vacontext = %u", mConfigId, mContextId); + VTRACE("JpegDecoder::init took %.2f ms", (systemTime() - now)/1000000.0); mInitialized = true; } return JD_SUCCESS; } -JpegDecodeStatus JpegDecoder::blit(RenderTarget &src, RenderTarget &dst) +JpegDecodeStatus JpegDecoder::blit(RenderTarget &src, RenderTarget &dst, int scale_factor) { - return mBlitter->blit(src, dst); + if (mBlitter) { + mBlitter->init(*this); + return mBlitter->blit(src, dst, scale_factor); + } + else + return JD_BLIT_FAILURE; } -JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) +JpegDecodeStatus JpegDecoder::getRgbaTile(RenderTarget &src, + uint8_t *sysmem, + int left, int top, int width, int height, int scale_factor) { - uint32_t component_order = 0 ; - uint32_t dqt_ind = 0; - uint32_t dht_ind = 0; - uint32_t scan_ind = 0; - bool frame_marker_found = false; - int i; + if (mBlitter) { + nsecs_t now = systemTime(); + mBlitter->init(*this); + nsecs_t t1 = systemTime(); + JpegDecodeStatus st = mBlitter->getRgbaTile(src, sysmem, left, top, width, height, scale_factor); + VTRACE("Decoder::%s took %.2f + %.2f ms", __FUNCTION__, + (t1-now)/1000000.0, (systemTime()-t1)/1000000.0); + return st; + } + else + return JD_BLIT_FAILURE; + +} + +JpegDecodeStatus JpegDecoder::blitToLinearRgba(RenderTarget &src, uint8_t *sysmem, uint32_t width, uint32_t height, BlitEvent &event, int scale_factor) +{ + if (mBlitter) { + nsecs_t now = systemTime(); + mBlitter->init(*this); + nsecs_t t1 = systemTime(); + JpegDecodeStatus st = mBlitter->blitToLinearRgba(src, sysmem, width, height, event, scale_factor); + VTRACE("Decoder::%s took %.2f + %.2f ms", __FUNCTION__, + (t1-now)/1000000.0, (systemTime()-t1)/1000000.0); + return st; + } + else + return JD_BLIT_FAILURE; +} + +JpegDecodeStatus JpegDecoder::blitToCameraSurfaces(RenderTarget &src, + buffer_handle_t dst_nv12, + buffer_handle_t dst_yuy2, + uint8_t *dst_nv21, + uint8_t *dst_yv12, + uint32_t width, uint32_t height, BlitEvent &event) +{ + if (mBlitter) { + nsecs_t now = systemTime(); + mBlitter->init(*this); + nsecs_t t1 = systemTime(); + JpegDecodeStatus st = mBlitter->blitToCameraSurfaces(src, dst_nv12, dst_yuy2, dst_nv21, dst_yv12, width, height, event); + VTRACE("Decoder::%s took %.2f + %.2f ms", __FUNCTION__, + (t1-now)/1000000.0, (systemTime()-t1)/1000000.0); + return st; + } + else + return JD_BLIT_FAILURE; +} - parserInitialize(mParser, jpginfo.buf, jpginfo.bufsize); +void JpegDecoder::syncBlit(BlitEvent &event) +{ + assert(mBlitter); + mBlitter->syncBlit(event); +} - uint8_t marker = mParser->getNextMarker(mParser); +JpegDecodeStatus JpegDecoder::parseHeader(JpegInfo &jpginfo) +{ +#define ROLLBACK_IF_FAIL(stmt) \ + do { \ + if (!stmt) { \ + VTRACE("%s::%d, parser failed at offset %u, remaining bytes %u, total bytes %zu", \ + __FUNCTION__, __LINE__, mBsParser->getByteOffset(), mBsParser->getRemainingBytes(), \ + bufsize); \ + goto rollback; \ + } \ + } while(0); - while (marker != CODE_EOI &&( !mParser->endOfBuffer(mParser))) { + int i; + uint32_t bufsize; + if (!mParserInitialized) { + Mutex::Autolock autoLock(mLock); + if (!mParserInitialized) { + if (jpginfo.use_vector_input) + mBsParser->set(jpginfo.inputs); + else + mBsParser->set(jpginfo.buf, jpginfo.bufsize); + mParserInitialized = true; + } + } + if (jpginfo.use_vector_input) + bufsize = jpginfo.inputs->size(); + else + bufsize = jpginfo.bufsize; + + uint8_t marker; + uint32_t rollbackoff; + rollbackoff = mBsParser->getByteOffset(); + ROLLBACK_IF_FAIL(mBsParser->tryGetNextMarker(&marker)); + + while (marker != CODE_EOI &&( !mBsParser->endOfBuffer())) { switch (marker) { case CODE_SOI: { - jpginfo.soi_offset = mParser->getByteOffset(mParser) - 2; + VTRACE("%s SOI at 0x%08x", __FUNCTION__, mBsParser->getByteOffset()); + jpginfo.soi_offset = mBsParser->getByteOffset() - 2; + jpginfo.soi_parsed = true; break; } // If the marker is an APP marker skip over the data @@ -249,22 +368,28 @@ JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) case CODE_APP13: case CODE_APP14: case CODE_APP15: { - - uint32_t bytes_to_burn = mParser->readBytes(mParser, 2) - 2; - mParser->burnBytes(mParser, bytes_to_burn); - break; + VTRACE("%s APP %x at 0x%08x", __FUNCTION__, marker, mBsParser->getByteOffset()); + uint32_t bytes_to_burn; + ROLLBACK_IF_FAIL(mBsParser->tryReadBytes(&bytes_to_burn, 2)); + bytes_to_burn -= 2; + ROLLBACK_IF_FAIL(mBsParser->tryBurnBytes(bytes_to_burn)); + break; } // Store offset to DQT data to avoid parsing bitstream in user mode case CODE_DQT: { - if (dqt_ind < 4) { - jpginfo.dqt_byte_offset[dqt_ind] = mParser->getByteOffset(mParser) - jpginfo.soi_offset; - dqt_ind++; - uint32_t bytes_to_burn = mParser->readBytes(mParser, 2 ) - 2; - mParser->burnBytes( mParser, bytes_to_burn ); + VTRACE("%s DQT at 0x%08x", __FUNCTION__, mBsParser->getByteOffset()); + if (jpginfo.dqt_ind < 4) { + jpginfo.dqt_byte_offset[jpginfo.dqt_ind] = mBsParser->getByteOffset() - jpginfo.soi_offset; + jpginfo.dqt_ind++; + uint32_t bytes_to_burn; + ROLLBACK_IF_FAIL(mBsParser->tryReadBytes(&bytes_to_burn, 2)); + bytes_to_burn -= 2; + ROLLBACK_IF_FAIL(mBsParser->tryBurnBytes(bytes_to_burn)); } else { ETRACE("ERROR: Decoder does not support more than 4 Quant Tables\n"); - return JD_ERROR_BITSTREAM; + return JD_CODEC_UNSUPPORTED; } + jpginfo.dqt_parsed = true; break; } // Throw exception for all SOF marker other than SOF0 @@ -284,20 +409,31 @@ JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) ETRACE("ERROR: unsupport SOF\n"); break; } - // Parse component information in SOF marker case CODE_SOF_BASELINE: { - frame_marker_found = true; - - mParser->burnBytes(mParser, 2); // Throw away frame header length - uint8_t sample_precision = mParser->readNextByte(mParser); + VTRACE("%s SOF_BASELINE at 0x%08x", __FUNCTION__, mBsParser->getByteOffset()); + ROLLBACK_IF_FAIL((mBsParser->getRemainingBytes() >= 10)); + jpginfo.frame_marker_found = true; + bool r; + ROLLBACK_IF_FAIL(mBsParser->tryBurnBytes(2)); // Throw away frame header length + uint8_t sample_precision; + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&sample_precision)); if (sample_precision != 8) { ETRACE("sample_precision is not supported\n"); - return JD_ERROR_BITSTREAM; + return JD_INPUT_FORMAT_UNSUPPORTED; } // Extract pic width and height - jpginfo.picture_param_buf.picture_height = mParser->readBytes(mParser, 2); - jpginfo.picture_param_buf.picture_width = mParser->readBytes(mParser, 2); - jpginfo.picture_param_buf.num_components = mParser->readNextByte(mParser); + uint32_t w, h; + ROLLBACK_IF_FAIL(mBsParser->tryReadBytes(&h, 2)); + ROLLBACK_IF_FAIL(mBsParser->tryReadBytes(&w, 2)); + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&jpginfo.picture_param_buf.num_components)); + jpginfo.picture_param_buf.picture_width = w; + jpginfo.picture_param_buf.picture_height = h; + VTRACE("%s pic wxh=%ux%u, %u components", __FUNCTION__, + jpginfo.picture_param_buf.picture_width, + jpginfo.picture_param_buf.picture_height, + jpginfo.picture_param_buf.num_components); + + ROLLBACK_IF_FAIL((mBsParser->getRemainingBytes() >= jpginfo.picture_param_buf.num_components * 3)); if (jpginfo.picture_param_buf.num_components > JPEG_MAX_COMPONENTS) { ETRACE("ERROR: reached max components\n"); @@ -308,110 +444,225 @@ JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) VTRACE("PERFORMANCE: %ux%u JPEG will decode faster with SW\n", jpginfo.picture_param_buf.picture_width, jpginfo.picture_param_buf.picture_height); - return JD_ERROR_BITSTREAM; + return JD_IMAGE_TOO_SMALL; } uint8_t comp_ind = 0; for (comp_ind = 0; comp_ind < jpginfo.picture_param_buf.num_components; comp_ind++) { - jpginfo.picture_param_buf.components[comp_ind].component_id = mParser->readNextByte(mParser); - - uint8_t hv_sampling = mParser->readNextByte(mParser); + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&jpginfo.picture_param_buf.components[comp_ind].component_id)); + uint8_t hv_sampling; + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&hv_sampling)); jpginfo.picture_param_buf.components[comp_ind].h_sampling_factor = hv_sampling >> 4; jpginfo.picture_param_buf.components[comp_ind].v_sampling_factor = hv_sampling & 0xf; - jpginfo.picture_param_buf.components[comp_ind].quantiser_table_selector = mParser->readNextByte(mParser); + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&jpginfo.picture_param_buf.components[comp_ind].quantiser_table_selector)); } + jpginfo.image_width = jpginfo.picture_param_buf.picture_width; + jpginfo.image_height = jpginfo.picture_param_buf.picture_height; + jpginfo.image_color_fourcc = sampFactor2Fourcc(jpginfo.picture_param_buf.components[0].h_sampling_factor, + jpginfo.picture_param_buf.components[1].h_sampling_factor, + jpginfo.picture_param_buf.components[2].h_sampling_factor, + jpginfo.picture_param_buf.components[0].v_sampling_factor, + jpginfo.picture_param_buf.components[1].v_sampling_factor, + jpginfo.picture_param_buf.components[2].v_sampling_factor); + VTRACE("%s jpg %ux%u, fourcc=%s", + __FUNCTION__, jpginfo.image_width, jpginfo.image_height, fourcc2str(jpginfo.image_color_fourcc)); + if (!jpegColorFormatSupported(jpginfo)) { + ETRACE("%s color format not supported", fourcc2str(jpginfo.image_color_fourcc)); + return JD_INPUT_FORMAT_UNSUPPORTED; + } + jpginfo.sof_parsed = true; break; } - // Store offset to DHT data to avoid parsing bitstream in user mode case CODE_DHT: { - if (dht_ind < 4) { - jpginfo.dht_byte_offset[dht_ind] = mParser->getByteOffset(mParser) - jpginfo.soi_offset; - dht_ind++; - uint32_t bytes_to_burn = mParser->readBytes(mParser, 2) - 2; - mParser->burnBytes(mParser, bytes_to_burn ); + VTRACE("%s DHT at 0x%08x", __FUNCTION__, mBsParser->getByteOffset()); + if (jpginfo.dht_ind < 4) { + jpginfo.dht_byte_offset[jpginfo.dht_ind] = mBsParser->getByteOffset() - jpginfo.soi_offset; + jpginfo.dht_ind++; + uint32_t bytes_to_burn; + if (!mBsParser->tryReadBytes(&bytes_to_burn, 2)) { + VTRACE("%s failed to read 2 bytes from 0x%08x, remaining 0x%08x, total 0x%08x", + __FUNCTION__, mBsParser->getByteOffset(), + mBsParser->getRemainingBytes(), bufsize); + jpginfo.dht_ind--; + goto rollback; + } + bytes_to_burn -= 2; + if (!mBsParser->tryBurnBytes(bytes_to_burn)) { + VTRACE("%s failed to burn %x bytes from 0x%08x, remaining 0x%08x, total 0x%08x", + __FUNCTION__, bytes_to_burn, mBsParser->getByteOffset(), + mBsParser->getRemainingBytes(), bufsize); + jpginfo.dht_ind--; + goto rollback; + } } else { ETRACE("ERROR: Decoder does not support more than 4 Huff Tables\n"); return JD_ERROR_BITSTREAM; } + jpginfo.dht_parsed = true; break; } // Parse component information in SOS marker case CODE_SOS: { - mParser->burnBytes(mParser, 2); - uint32_t component_in_scan = mParser->readNextByte(mParser); + VTRACE("%s SOS at 0x%08x", __FUNCTION__, mBsParser->getByteOffset()); + ROLLBACK_IF_FAIL(mBsParser->tryBurnBytes(2)); + uint8_t component_in_scan; + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&component_in_scan)); uint8_t comp_ind = 0; - + ROLLBACK_IF_FAIL((mBsParser->getRemainingBytes() >= 2 * component_in_scan + 3)); for (comp_ind = 0; comp_ind < component_in_scan; comp_ind++) { - uint8_t comp_id = mParser->readNextByte(mParser); + uint8_t comp_id; + mBsParser->tryReadNextByte(&comp_id); uint8_t comp_data_ind; for (comp_data_ind = 0; comp_data_ind < jpginfo.picture_param_buf.num_components; comp_data_ind++) { if (comp_id == jpginfo.picture_param_buf.components[comp_data_ind].component_id) { - jpginfo.slice_param_buf[scan_ind].components[comp_ind].component_selector = comp_data_ind + 1; + jpginfo.slice_param_buf[jpginfo.scan_ind].components[comp_ind].component_selector = comp_data_ind + 1; break; } } - uint8_t huffman_tables = mParser->readNextByte(mParser); - jpginfo.slice_param_buf[scan_ind].components[comp_ind].dc_table_selector = huffman_tables >> 4; - jpginfo.slice_param_buf[scan_ind].components[comp_ind].ac_table_selector = huffman_tables & 0xf; + uint8_t huffman_tables; + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&huffman_tables)); + jpginfo.slice_param_buf[jpginfo.scan_ind].components[comp_ind].dc_table_selector = huffman_tables >> 4; + jpginfo.slice_param_buf[jpginfo.scan_ind].components[comp_ind].ac_table_selector = huffman_tables & 0xf; } - uint32_t curr_byte = mParser->readNextByte(mParser); // Ss + uint8_t curr_byte; + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&curr_byte)); // Ss if (curr_byte != 0) { - ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); + ETRACE("ERROR: curr_byte 0x%08x (position 0x%08x) != 0\n", curr_byte, mBsParser->getByteOffset()); return JD_ERROR_BITSTREAM; } - curr_byte = mParser->readNextByte(mParser); // Se + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&curr_byte)); // Se if (curr_byte != 0x3f) { - ETRACE("ERROR: curr_byte 0x%08x != 0x3f\n", curr_byte); + ETRACE("ERROR: curr_byte 0x%08x (position 0x%08x) != 0x3f\n", curr_byte, mBsParser->getByteOffset()); return JD_ERROR_BITSTREAM; } - curr_byte = mParser->readNextByte(mParser); // Ah, Al + ROLLBACK_IF_FAIL(mBsParser->tryReadNextByte(&curr_byte)); // Ah, Al if (curr_byte != 0) { - ETRACE("ERROR: curr_byte 0x%08x != 0\n", curr_byte); + ETRACE("ERROR: curr_byte 0x%08x (position 0x%08x) != 0\n", curr_byte, mBsParser->getByteOffset()); return JD_ERROR_BITSTREAM; } // Set slice control variables needed - jpginfo.slice_param_buf[scan_ind].slice_data_offset = mParser->getByteOffset(mParser) - jpginfo.soi_offset; - jpginfo.slice_param_buf[scan_ind].num_components = component_in_scan; - if (scan_ind) { + jpginfo.slice_param_buf[jpginfo.scan_ind].slice_data_offset = mBsParser->getByteOffset() - jpginfo.soi_offset; + jpginfo.slice_param_buf[jpginfo.scan_ind].num_components = component_in_scan; + jpginfo.sos_parsed = true; + if (jpginfo.scan_ind) { /* If there is more than one scan, the slice for all but the final scan should only run up to the beginning of the next scan */ - jpginfo.slice_param_buf[scan_ind - 1].slice_data_size = - (jpginfo.slice_param_buf[scan_ind].slice_data_offset - jpginfo.slice_param_buf[scan_ind - 1].slice_data_offset );; + jpginfo.slice_param_buf[jpginfo.scan_ind - 1].slice_data_size = + (jpginfo.slice_param_buf[jpginfo.scan_ind].slice_data_offset - jpginfo.slice_param_buf[jpginfo.scan_ind - 1].slice_data_offset );; } - scan_ind++; + jpginfo.scan_ind++; jpginfo.scan_ctrl_count++; // gsDXVA2Globals.uiScanCtrlCount break; } case CODE_DRI: { - uint32_t size = mParser->readBytes(mParser, 2); - jpginfo.slice_param_buf[scan_ind].restart_interval = mParser->readBytes(mParser, 2); - mParser->burnBytes(mParser, (size - 4)); + rollbackoff = mBsParser->getByteOffset() - 2; + VTRACE("%s DRI at 0x%08x", __FUNCTION__, mBsParser->getByteOffset()); + uint32_t size; + ROLLBACK_IF_FAIL(mBsParser->tryReadBytes(&size, 2)); + uint32_t ri; + ROLLBACK_IF_FAIL(mBsParser->tryReadBytes(&ri, 2)); + jpginfo.slice_param_buf[jpginfo.scan_ind].restart_interval = ri; + ROLLBACK_IF_FAIL(mBsParser->tryBurnBytes(size - 4)); + jpginfo.dri_parsed = true; break; } default: break; } + if (jpginfo.need_header_only && + jpginfo.soi_parsed && jpginfo.sos_parsed && + jpginfo.sof_parsed && jpginfo.dqt_parsed && + jpginfo.dht_parsed) { + VTRACE("%s: for header_only, we've got all what we need. return now", __FUNCTION__); + return JD_SUCCESS; + } + else { + VTRACE("%s: soi %d, sos %d, sof %d, dqt %d, dht %d, dri %d, remaining %u", __FUNCTION__, + jpginfo.soi_parsed, + jpginfo.sos_parsed, + jpginfo.sof_parsed, + jpginfo.dqt_parsed, + jpginfo.dht_parsed, + jpginfo.dri_parsed, + mBsParser->getRemainingBytes()); + } + rollbackoff = mBsParser->getByteOffset(); + if (!mBsParser->tryGetNextMarker(&marker)) { + VTRACE("%s: can't get next marker, offset 0x%08x, need_header_only=%d", + __FUNCTION__, + mBsParser->getByteOffset(), + jpginfo.need_header_only); + if (jpginfo.need_header_only) { + mBsParser->trySetByteOffset(rollbackoff); + return JD_INSUFFICIENT_BYTE; + } + else { + return JD_SUCCESS; + } + } + else if (marker == 0) { + VTRACE("%s: got non-marker %x at offset 0x%08x", __FUNCTION__, marker, mBsParser->getByteOffset()); + return JD_SUCCESS; + } - marker = mParser->getNextMarker(mParser); // If the EOI code is found, store the byte offset before the parsing finishes if( marker == CODE_EOI ) { - jpginfo.eoi_offset = mParser->getByteOffset(mParser); + jpginfo.eoi_offset = mBsParser->getByteOffset(); + VTRACE("%s: got EOI at 0x%08x, stop parsing now", __FUNCTION__, jpginfo.eoi_offset); + return JD_SUCCESS; } - } + return JD_SUCCESS; +rollback: + mBsParser->trySetByteOffset(rollbackoff); + return JD_INSUFFICIENT_BYTE; +} - jpginfo.quant_tables_num = dqt_ind; - jpginfo.huffman_tables_num = dht_ind; +JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) +{ + if (!mParserInitialized) { + Mutex::Autolock autoLock(mLock); + if (!mParserInitialized) { + if (jpginfo.use_vector_input) + mBsParser->set(jpginfo.inputs); + else + mBsParser->set(jpginfo.buf, jpginfo.bufsize); + mParserInitialized = true; + } + } + JpegDecodeStatus st = parseHeader(jpginfo); + if (st) { + if (st != JD_INSUFFICIENT_BYTE) + ETRACE("%s header parsing failure: %d", __FUNCTION__, st); + return st; + } + if (jpginfo.need_header_only) + return JD_SUCCESS; + uint32_t bufsize; + if (jpginfo.use_vector_input) { + mBsParser->set(jpginfo.inputs); + bufsize = jpginfo.inputs->size(); + } + else { + mBsParser->set(jpginfo.buf, jpginfo.bufsize); + bufsize = jpginfo.bufsize; + } + assert(mParserInitialized); + assert (jpginfo.soi_parsed && jpginfo.sos_parsed && + jpginfo.sof_parsed && jpginfo.dqt_parsed && + jpginfo.dht_parsed); + jpginfo.quant_tables_num = jpginfo.dqt_ind; + jpginfo.huffman_tables_num = jpginfo.dht_ind; /* The slice for the last scan should run up to the end of the picture */ if (jpginfo.eoi_offset) { - jpginfo.slice_param_buf[scan_ind - 1].slice_data_size = (jpginfo.eoi_offset - jpginfo.slice_param_buf[scan_ind - 1].slice_data_offset); + jpginfo.slice_param_buf[jpginfo.scan_ind - 1].slice_data_size = (jpginfo.eoi_offset - jpginfo.slice_param_buf[jpginfo.scan_ind - 1].slice_data_offset); } else { - jpginfo.slice_param_buf[scan_ind - 1].slice_data_size = (jpginfo.bufsize - jpginfo.slice_param_buf[scan_ind - 1].slice_data_offset); + jpginfo.slice_param_buf[jpginfo.scan_ind - 1].slice_data_size = (bufsize - jpginfo.slice_param_buf[jpginfo.scan_ind - 1].slice_data_offset); } // throw AppException if SOF0 isn't found - if (!frame_marker_found) { + if (!jpginfo.frame_marker_found) { ETRACE("EEORR: Reached end of bitstream while trying to parse headers\n"); return JD_ERROR_BITSTREAM; } @@ -419,7 +670,7 @@ JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) JpegDecodeStatus status = parseTableData(jpginfo); if (status != JD_SUCCESS) { ETRACE("ERROR: Parsing table data returns %d", status); - return JD_ERROR_BITSTREAM; + return status; } jpginfo.image_width = jpginfo.picture_param_buf.picture_width; @@ -430,81 +681,94 @@ JpegDecodeStatus JpegDecoder::parse(JpegInfo &jpginfo) jpginfo.picture_param_buf.components[0].v_sampling_factor, jpginfo.picture_param_buf.components[1].v_sampling_factor, jpginfo.picture_param_buf.components[2].v_sampling_factor); - jpginfo.image_pixel_format = fourcc2PixelFormat(jpginfo.image_color_fourcc); - VTRACE("%s jpg %ux%u, fourcc=%s, pixelformat=0x%x", - __FUNCTION__, jpginfo.image_width, jpginfo.image_height, fourcc2str(NULL, jpginfo.image_color_fourcc), - jpginfo.image_pixel_format); + VTRACE("%s jpg %ux%u, fourcc=%s", + __FUNCTION__, jpginfo.image_width, jpginfo.image_height, fourcc2str(jpginfo.image_color_fourcc)); - if (!jpegColorFormatSupported(jpginfo)) + if (!jpegColorFormatSupported(jpginfo)) { + ETRACE("%s color format not supported", fourcc2str(jpginfo.image_color_fourcc)); return JD_INPUT_FORMAT_UNSUPPORTED; + } return JD_SUCCESS; } JpegDecodeStatus JpegDecoder::createSurfaceFromRenderTarget(RenderTarget &target, VASurfaceID *surfid) { - if (target.type == RENDERTARGET_INTERNAL_BUFFER) { - JpegDecodeStatus st = createSurfaceInternal(target.width, - target.height, - target.pixel_format, - target.handle, - surfid); - if (st != JD_SUCCESS) - return st; - mNormalSurfaceMap.add(target.handle, *surfid); - VTRACE("%s added surface %u (internal buffer id %d) to SurfaceList", - __PRETTY_FUNCTION__, *surfid, target.handle); - } - else { - switch (target.type) { - case RenderTarget::KERNEL_DRM: - { - JpegDecodeStatus st = createSurfaceDrm(target.width, - target.height, - target.pixel_format, - (unsigned long)target.handle, - target.stride, - surfid); - if (st != JD_SUCCESS) - return st; - mDrmSurfaceMap.add((unsigned long)target.handle, *surfid); - VTRACE("%s added surface %u (Drm handle %d) to DrmSurfaceMap", - __PRETTY_FUNCTION__, *surfid, target.handle); - } - break; - case RenderTarget::ANDROID_GRALLOC: - { - JpegDecodeStatus st = createSurfaceGralloc(target.width, - target.height, - target.pixel_format, - (buffer_handle_t)target.handle, - target.stride, - surfid); - if (st != JD_SUCCESS) - return st; - mGrallocSurfaceMap.add((buffer_handle_t)target.handle, *surfid); - VTRACE("%s added surface %u (Gralloc handle %d) to DrmSurfaceMap", - __PRETTY_FUNCTION__, *surfid, target.handle); - } - break; - default: - return JD_RENDER_TARGET_TYPE_UNSUPPORTED; + switch (target.type) { + case RenderTarget::KERNEL_DRM: + { + JpegDecodeStatus st = createSurfaceDrm(target.width, + target.height, + target.pixel_format, + (unsigned long)target.handle, + target.stride, + surfid); + if (st != JD_SUCCESS) + return st; + mDrmSurfaceMap.add((unsigned long)target.handle, *surfid); + VTRACE("%s added surface %u (Drm handle %d) to DrmSurfaceMap", + __PRETTY_FUNCTION__, *surfid, target.handle); + } + break; + case RenderTarget::ANDROID_GRALLOC: + { + JpegDecodeStatus st = createSurfaceGralloc(target.width, + target.height, + target.pixel_format, + (buffer_handle_t)target.handle, + target.stride, + surfid); + if (st != JD_SUCCESS) + return st; + mGrallocSurfaceMap.add((buffer_handle_t)target.handle, *surfid); + VTRACE("%s added surface %u (Gralloc handle %d) to DrmSurfaceMap", + __PRETTY_FUNCTION__, *surfid, target.handle); } + break; + case RenderTarget::INTERNAL_BUF: + { + JpegDecodeStatus st = createSurfaceInternal(target.width, + target.height, + target.pixel_format, + target.handle, + surfid); + if (st != JD_SUCCESS) + return st; + mNormalSurfaceMap.add(target.handle, *surfid); + VTRACE("%s added surface %u (internal buffer id %d) to SurfaceList", + __PRETTY_FUNCTION__, *surfid, target.handle); + } + break; + case RenderTarget::USER_PTR: + { + JpegDecodeStatus st = createSurfaceUserptr(target.width, + target.height, + target.pixel_format, + (uint8_t*)target.handle, + surfid); + if (st != JD_SUCCESS) + return st; + mUserptrSurfaceMap.add(target.handle, *surfid); + VTRACE("%s added surface %u (internal buffer id %d) to SurfaceList", + __PRETTY_FUNCTION__, *surfid, target.handle); + } + break; + default: + return JD_RENDER_TARGET_TYPE_UNSUPPORTED; } return JD_SUCCESS; } -JpegDecodeStatus JpegDecoder::createSurfaceInternal(int width, int height, int pixel_format, int handle, VASurfaceID *surf_id) +JpegDecodeStatus JpegDecoder::createSurfaceInternal(int width, int height, uint32_t fourcc, int handle, VASurfaceID *surf_id) { VAStatus va_status; VASurfaceAttrib attrib; attrib.type = VASurfaceAttribPixelFormat; attrib.flags = VA_SURFACE_ATTRIB_SETTABLE; attrib.value.type = VAGenericValueTypeInteger; - uint32_t fourcc = pixelFormat2Fourcc(pixel_format); uint32_t vaformat = fourcc2VaFormat(fourcc); attrib.value.value.i = fourcc; - VTRACE("enter %s, pixel_format 0x%x, fourcc %s", __FUNCTION__, pixel_format, fourcc2str(NULL, fourcc)); + VTRACE("enter %s, fourcc 0x%x, fourcc %s", __FUNCTION__, fourcc, fourcc2str(fourcc)); va_status = vaCreateSurfaces(mDisplay, vaformat, width, @@ -514,22 +778,48 @@ JpegDecodeStatus JpegDecoder::createSurfaceInternal(int width, int height, int p &attrib, 1); if (va_status != VA_STATUS_SUCCESS) { - ETRACE("%s: createSurface (format %u, fourcc %s) returns %d", __PRETTY_FUNCTION__, vaformat, fourcc2str(NULL, fourcc), va_status); + ETRACE("%s: createSurface (format %u, fourcc %s) returns %d", __PRETTY_FUNCTION__, vaformat, fourcc2str(fourcc), va_status); return JD_RESOURCE_FAILURE; } return JD_SUCCESS; } +JpegDecodeStatus JpegDecoder::destroySurface(RenderTarget &target) +{ + Mutex::Autolock autoLock(mLock); + VASurfaceID surf = getSurfaceID(target); + if (surf == VA_INVALID_ID) { + ETRACE("%s: failed to destroy surface type %d, handle %d", __FUNCTION__, target.type, target.handle); + return JD_INVALID_RENDER_TARGET; + } + switch(target.type) { + case RenderTarget::KERNEL_DRM: + mDrmSurfaceMap.removeItem((unsigned long)target.handle); + break; + case RenderTarget::ANDROID_GRALLOC: + mGrallocSurfaceMap.removeItem((buffer_handle_t)target.handle); + break; + case RenderTarget::INTERNAL_BUF: + mNormalSurfaceMap.removeItem(target.handle); + break; + case RenderTarget::USER_PTR: + mUserptrSurfaceMap.removeItem(target.handle); + break; + default: + break; + } + VTRACE("%s: succeeded destroying surface type %d, handle %d", __FUNCTION__, target.type, target.handle); + return JD_SUCCESS; +} + +JpegDecodeStatus JpegDecoder::destroySurface(VASurfaceID surf) +{ + return JD_UNIMPLEMENTED; +} + VASurfaceID JpegDecoder::getSurfaceID(RenderTarget &target) const { int index; - if (target.type == RENDERTARGET_INTERNAL_BUFFER) { - index = mNormalSurfaceMap.indexOfKey(target.handle); - if (index < 0) - return VA_INVALID_ID; - else - return mNormalSurfaceMap.valueAt(index); - } switch (target.type) { case RenderTarget::KERNEL_DRM: index = mDrmSurfaceMap.indexOfKey((unsigned long)target.handle); @@ -543,6 +833,18 @@ VASurfaceID JpegDecoder::getSurfaceID(RenderTarget &target) const return VA_INVALID_ID; else return mGrallocSurfaceMap.valueAt(index); + case RenderTarget::INTERNAL_BUF: + index = mNormalSurfaceMap.indexOfKey(target.handle); + if (index < 0) + return VA_INVALID_ID; + else + return mNormalSurfaceMap.valueAt(index); + case RenderTarget::USER_PTR: + index = mUserptrSurfaceMap.indexOfKey(target.handle); + if (index < 0) + return VA_INVALID_ID; + else + return mUserptrSurfaceMap.valueAt(index); default: assert(false); } @@ -576,24 +878,39 @@ JpegDecodeStatus JpegDecoder::decode(JpegInfo &jpginfo, RenderTarget &target) VASurfaceStatus surf_status; VABufferID desc_buf[5]; uint32_t bitstream_buffer_size = 0; + uint8_t* bufaddr = NULL; uint32_t scan_idx = 0; uint32_t buf_idx = 0; uint32_t chopping = VA_SLICE_DATA_FLAG_ALL; uint32_t bytes_remaining; VASurfaceID surf_id = getSurfaceID(target); - if (surf_id == VA_INVALID_ID) + nsecs_t now = systemTime(); + if (surf_id == VA_INVALID_ID) { + ETRACE("%s render_target %p, handle %d is not initailized by JpegDecoder", __FUNCTION__, &target, target.handle); return JD_RENDER_TARGET_NOT_INITIALIZED; + } va_status = vaQuerySurfaceStatus(mDisplay, surf_id, &surf_status); - if (surf_status != VASurfaceReady) + if (surf_status != VASurfaceReady) { + ETRACE("%s render_target %p, handle %d is still busy", __FUNCTION__, &target, target.handle); return JD_RENDER_TARGET_BUSY; + } + + if (jpginfo.use_vector_input) { + bitstream_buffer_size = jpginfo.inputs->size(); + bufaddr = const_cast(jpginfo.inputs->array()); + } + else { + bitstream_buffer_size = jpginfo.bufsize; + bufaddr = jpginfo.buf; + } if (jpginfo.eoi_offset) bytes_remaining = jpginfo.eoi_offset - jpginfo.soi_offset; else - bytes_remaining = jpginfo.bufsize - jpginfo.soi_offset; + bytes_remaining = bitstream_buffer_size - jpginfo.soi_offset; + uint32_t src_offset = jpginfo.soi_offset; uint32_t cpy_row; - bitstream_buffer_size = jpginfo.bufsize;//cinfo->src->bytes_in_buffer;//1024*1024*5; Vector buf_list; va_status = vaBeginPicture(mDisplay, mContextId, surf_id); @@ -601,24 +918,29 @@ JpegDecodeStatus JpegDecoder::decode(JpegInfo &jpginfo, RenderTarget &target) ETRACE("vaBeginPicture failed. va_status = 0x%x", va_status); return JD_DECODE_FAILURE; } + VTRACE("%s begin decode render target %p, handle %d", __FUNCTION__, &target, target.handle); va_status = vaCreateBuffer(mDisplay, mContextId, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferJPEGBaseline), 1, &jpginfo.picture_param_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer VAPictureParameterBufferType failed. va_status = 0x%x", va_status); return JD_RESOURCE_FAILURE; } + VTRACE("%s successfully created PicParamBuf, id=%u", __FUNCTION__, desc_buf[buf_idx]); buf_list.add(desc_buf[buf_idx++]); - va_status = vaCreateBuffer(mDisplay, mContextId, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferJPEGBaseline), 1, &jpginfo.qmatrix_buf, &desc_buf[buf_idx]); + va_status = vaCreateBuffer(mDisplay, mContextId, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferJPEGBaseline), 1, &jpginfo.qmatrix_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer VAIQMatrixBufferType failed. va_status = 0x%x", va_status); return JD_RESOURCE_FAILURE; } + VTRACE("%s successfully created IQMatrixBuf, id=%u", __FUNCTION__, desc_buf[buf_idx]); buf_list.add(desc_buf[buf_idx++]); + va_status = vaCreateBuffer(mDisplay, mContextId, VAHuffmanTableBufferType, sizeof(VAHuffmanTableBufferJPEGBaseline), 1, &jpginfo.hufman_table_buf, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer VAHuffmanTableBufferType failed. va_status = 0x%x", va_status); return JD_RESOURCE_FAILURE; } + VTRACE("%s successfully created HuffmanTableBuf, id=%u", __FUNCTION__, desc_buf[buf_idx]); buf_list.add(desc_buf[buf_idx++]); do { @@ -676,16 +998,18 @@ JpegDecodeStatus JpegDecoder::decode(JpegInfo &jpginfo, RenderTarget &target) /* Get Slice Control Buffer */ va_status = vaCreateBuffer(mDisplay, mContextId, VASliceParameterBufferType, sizeof(VASliceParameterBufferJPEGBaseline) * dest_idx, 1, dest_scan_ctrl, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { - ETRACE("vaCreateBuffer VASliceParameterBufferType failed. va_status = 0x%x", va_status); + ETRACE("vaCreateBuffer VASliceParameterBufferType failed. va_status = 0x%x, dest_idx=%d, buf_idx=%d", va_status, dest_idx, buf_idx); return JD_RESOURCE_FAILURE; } + VTRACE("vaCreateBuffer VASliceParameterBufferType succeeded. va_status = 0x%x, dest_idx=%d, buf_idx=%d", va_status, dest_idx, buf_idx); buf_list.add(desc_buf[buf_idx++]); - va_status = vaCreateBuffer(mDisplay, mContextId, VASliceDataBufferType, bytes, 1, &jpginfo.buf[ src_offset ], &desc_buf[buf_idx]); - buf_list.add(desc_buf[buf_idx++]); + va_status = vaCreateBuffer(mDisplay, mContextId, VASliceDataBufferType, bytes, 1, bufaddr + src_offset, &desc_buf[buf_idx]); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaCreateBuffer VASliceDataBufferType (%u bytes) failed. va_status = 0x%x", bytes, va_status); return JD_RESOURCE_FAILURE; } + VTRACE("%s successfully created SliceDataBuf, id=%u", __FUNCTION__, desc_buf[buf_idx]); + buf_list.add(desc_buf[buf_idx++]); va_status = vaRenderPicture( mDisplay, mContextId, desc_buf, buf_idx); if (va_status != VA_STATUS_SUCCESS) { ETRACE("vaRenderPicture failed. va_status = 0x%x", va_status); @@ -706,6 +1030,9 @@ JpegDecodeStatus JpegDecoder::decode(JpegInfo &jpginfo, RenderTarget &target) ETRACE("vaEndPicture failed. va_status = 0x%x", va_status); return JD_DECODE_FAILURE; } + + VTRACE("%s successfully ended picture, rendertarget %p, handle %d", __FUNCTION__, &target, target.handle); + VTRACE("JpegDecoder decode took %.2f ms", (systemTime() - now)/1000000.0); return JD_SUCCESS; } void JpegDecoder::deinit() @@ -719,6 +1046,7 @@ void JpegDecoder::deinit() size_t gralloc_size = mGrallocSurfaceMap.size(); size_t drm_size = mDrmSurfaceMap.size(); size_t internal_surf_size = mNormalSurfaceMap.size(); + size_t up_surf_size = mUserptrSurfaceMap.size(); for (size_t i = 0; i < gralloc_size; ++i) { VASurfaceID surf_id = mGrallocSurfaceMap.valueAt(i); vaDestroySurfaces(mDisplay, &surf_id, 1); @@ -731,117 +1059,141 @@ void JpegDecoder::deinit() VASurfaceID surf_id = mNormalSurfaceMap.valueAt(i); vaDestroySurfaces(mDisplay, &surf_id, 1); } + for (size_t i = 0; i < up_surf_size; ++i) { + VASurfaceID surf_id = mUserptrSurfaceMap.valueAt(i); + vaDestroySurfaces(mDisplay, &surf_id, 1); + } mGrallocSurfaceMap.clear(); mDrmSurfaceMap.clear(); mNormalSurfaceMap.clear(); + mUserptrSurfaceMap.clear(); + mBsParser->reset(); } } } JpegDecodeStatus JpegDecoder::parseTableData(JpegInfo &jpginfo) { - parserInitialize(mParser, jpginfo.buf, jpginfo.bufsize); - // Parse Quant tables +#define REPORT_BS_ERR_IF_FAIL(stmt) \ + do { \ + if (!stmt) { \ + ETRACE("%s::%d, bitstream error at offset %u, remaining bytes %u, total bytes %zu", \ + __FUNCTION__, __LINE__, mBsParser->getByteOffset(), mBsParser->getRemainingBytes(), \ + bufsize); \ + return JD_ERROR_BITSTREAM; \ + } \ + } while(0); + + assert(mParserInitialized); memset(&jpginfo.qmatrix_buf, 0, sizeof(jpginfo.qmatrix_buf)); uint32_t dqt_ind = 0; + uint32_t bufsize; + + if (jpginfo.use_vector_input) + bufsize = jpginfo.inputs->size(); + else + bufsize = jpginfo.bufsize; + for (dqt_ind = 0; dqt_ind < jpginfo.quant_tables_num; dqt_ind++) { - if (mParser->setByteOffset(mParser, jpginfo.dqt_byte_offset[dqt_ind])) { - // uint32_t uiTableBytes = mParser->readBytes( 2 ) - 2; - uint32_t table_bytes = mParser->readBytes( mParser, 2 ) - 2; - do { - uint32_t table_info = mParser->readNextByte(mParser); - table_bytes--; - uint32_t table_length = table_bytes > 64 ? 64 : table_bytes; - uint32_t table_precision = table_info >> 4; - if (table_precision != 0) { - ETRACE("%s ERROR: Parsing table data returns %d", __FUNCTION__, JD_ERROR_BITSTREAM); - return JD_ERROR_BITSTREAM; - } - uint32_t table_id = table_info & 0xf; + REPORT_BS_ERR_IF_FAIL(mBsParser->trySetByteOffset(jpginfo.dqt_byte_offset[dqt_ind])); + uint32_t table_bytes; + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadBytes(&table_bytes, 2 )); + table_bytes -= 2; + do { + uint8_t table_info; + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadNextByte(&table_info)); + table_bytes--; + uint32_t table_length = table_bytes > 64 ? 64 : table_bytes; + uint32_t table_precision = table_info >> 4; + REPORT_BS_ERR_IF_FAIL ((table_precision == 0)); + uint32_t table_id = table_info & 0xf; - jpginfo.qmatrix_buf.load_quantiser_table[table_id] = 1; + jpginfo.qmatrix_buf.load_quantiser_table[table_id] = 1; - if (table_id < JPEG_MAX_QUANT_TABLES) { - // Pull Quant table data from bitstream - uint32_t byte_ind; - for (byte_ind = 0; byte_ind < table_length; byte_ind++) { - jpginfo.qmatrix_buf.quantiser_table[table_id][byte_ind] = mParser->readNextByte(mParser); - } - } else { - ETRACE("%s DQT table ID is not supported", __FUNCTION__); - mParser->burnBytes(mParser, table_length); + if (table_id < JPEG_MAX_QUANT_TABLES) { + // Pull Quant table data from bitstream + uint32_t byte_ind; + for (byte_ind = 0; byte_ind < table_length; byte_ind++) { + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadNextByte(&jpginfo.qmatrix_buf.quantiser_table[table_id][byte_ind])); } - table_bytes -= table_length; - } while (table_bytes); - } + } else { + ETRACE("%s DQT table ID is not supported", __FUNCTION__); + REPORT_BS_ERR_IF_FAIL(mBsParser->tryBurnBytes(table_length)); + } + table_bytes -= table_length; + } while (table_bytes); } // Parse Huffman tables memset(&jpginfo.hufman_table_buf, 0, sizeof(jpginfo.hufman_table_buf)); uint32_t dht_ind = 0; for (dht_ind = 0; dht_ind < jpginfo.huffman_tables_num; dht_ind++) { - if (mParser->setByteOffset(mParser, jpginfo.dht_byte_offset[dht_ind])) { - uint32_t table_bytes = mParser->readBytes( mParser, 2 ) - 2; - do { - uint32_t table_info = mParser->readNextByte(mParser); - table_bytes--; - uint32_t table_class = table_info >> 4; // Identifies whether the table is for AC or DC - uint32_t table_id = table_info & 0xf; - jpginfo.hufman_table_buf.load_huffman_table[table_id] = 1; - - if ((table_class < TABLE_CLASS_NUM) && (table_id < JPEG_MAX_SETS_HUFFMAN_TABLES)) { - if (table_class == 0) { - uint8_t* bits = mParser->getCurrentIndex(mParser); - // Find out the number of entries in the table - uint32_t table_entries = 0; - uint32_t bit_ind; - for (bit_ind = 0; bit_ind < 16; bit_ind++) { - jpginfo.hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind] = bits[bit_ind]; - table_entries += jpginfo.hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind]; - } + REPORT_BS_ERR_IF_FAIL(mBsParser->trySetByteOffset(jpginfo.dht_byte_offset[dht_ind])); + uint32_t table_bytes; + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadBytes( &table_bytes, 2 )); + table_bytes -= 2; + do { + uint8_t table_info; + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadNextByte(&table_info)); + table_bytes--; + uint32_t table_class = table_info >> 4; // Identifies whether the table is for AC or DC + uint32_t table_id = table_info & 0xf; + jpginfo.hufman_table_buf.load_huffman_table[table_id] = 1; - // Create table of code values - mParser->burnBytes(mParser, 16); - table_bytes -= 16; - uint32_t tbl_ind; - for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { - jpginfo.hufman_table_buf.huffman_table[table_id].dc_values[tbl_ind] = mParser->readNextByte(mParser); - table_bytes--; - } + if ((table_class < TABLE_CLASS_NUM) && (table_id < JPEG_MAX_SETS_HUFFMAN_TABLES)) { + if (table_class == 0) { + //const uint8_t* bits = mBsParser->getCurrentIndex(); + // Find out the number of entries in the table + uint32_t table_entries = 0; + uint32_t bit_ind; + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jpginfo.hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind] = mBsParser->itemAt(mBsParser->getByteOffset() + bit_ind); + table_entries += jpginfo.hufman_table_buf.huffman_table[table_id].num_dc_codes[bit_ind]; + } - } else { // for AC class - uint8_t* bits = mParser->getCurrentIndex(mParser); - // Find out the number of entries in the table - uint32_t table_entries = 0; - uint32_t bit_ind = 0; - for (bit_ind = 0; bit_ind < 16; bit_ind++) { - jpginfo.hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind] = bits[bit_ind]; - table_entries += jpginfo.hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind]; - } + // Create table of code values + REPORT_BS_ERR_IF_FAIL(mBsParser->tryBurnBytes(16)); + table_bytes -= 16; + uint32_t tbl_ind; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadNextByte(&jpginfo.hufman_table_buf.huffman_table[table_id].dc_values[tbl_ind])); + table_bytes--; + } - // Create table of code values - mParser->burnBytes(mParser, 16); - table_bytes -= 16; - uint32_t tbl_ind = 0; - for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { - jpginfo.hufman_table_buf.huffman_table[table_id].ac_values[tbl_ind] = mParser->readNextByte(mParser); - table_bytes--; - } - }//end of else - } else { + } else { // for AC class + //const uint8_t* bits = mBsParser->getCurrentIndex(); // Find out the number of entries in the table - ETRACE("%s DHT table ID is not supported", __FUNCTION__); uint32_t table_entries = 0; uint32_t bit_ind = 0; - for(bit_ind = 0; bit_ind < 16; bit_ind++) { - table_entries += mParser->readNextByte(mParser); + for (bit_ind = 0; bit_ind < 16; bit_ind++) { + jpginfo.hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind] = mBsParser->itemAt(mBsParser->getByteOffset() + bit_ind);//bits[bit_ind]; + table_entries += jpginfo.hufman_table_buf.huffman_table[table_id].num_ac_codes[bit_ind]; + } + + // Create table of code values + REPORT_BS_ERR_IF_FAIL(mBsParser->tryBurnBytes(16)); + table_bytes -= 16; + uint32_t tbl_ind = 0; + for (tbl_ind = 0; tbl_ind < table_entries; tbl_ind++) { + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadNextByte(&jpginfo.hufman_table_buf.huffman_table[table_id].ac_values[tbl_ind])); table_bytes--; } - mParser->burnBytes(mParser, table_entries); - table_bytes -= table_entries; + }//end of else + } else { + // Find out the number of entries in the table + ETRACE("%s DHT table ID is not supported", __FUNCTION__); + uint32_t table_entries = 0; + uint32_t bit_ind = 0; + for(bit_ind = 0; bit_ind < 16; bit_ind++) { + uint8_t tmp; + REPORT_BS_ERR_IF_FAIL(mBsParser->tryReadNextByte(&tmp)); + table_entries += tmp; + table_bytes--; } + REPORT_BS_ERR_IF_FAIL(mBsParser->tryBurnBytes(table_entries)); + table_bytes -= table_entries; + } - } while (table_bytes); - } + } while (table_bytes); } return JD_SUCCESS; diff --git a/imagedecoder/JPEGDecoder.h b/imagedecoder/JPEGDecoder.h index 9c0cd9a..754bf6d 100644 --- a/imagedecoder/JPEGDecoder.h +++ b/imagedecoder/JPEGDecoder.h @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -31,29 +30,35 @@ #ifndef JPEGDEC_H #define JPEGDEC_H -#include #include #include +#include #include "JPEGCommon.h" +#include +#include +#include +#include +#include +#include +#include +#include + using namespace android; struct CJPEGParse; +class JpegBitstreamParser; class JpegBlitter; +typedef void* BlitEvent; + +extern int generateHandle(); // Non thread-safe class JpegDecoder { friend class JpegBlitter; public: - struct MapHandle - { - friend class JpegDecoder; - public: - bool valid; - private: - VAImage *img; - }; - JpegDecoder(); + typedef uint32_t MapHandle; + JpegDecoder(VADisplay display = NULL, VAConfigID vpCfgId = VA_INVALID_ID, VAContextID vpCtxId = VA_INVALID_ID, bool use_blitter = false); virtual ~JpegDecoder(); virtual JpegDecodeStatus init(int width, int height, RenderTarget **targets, int num); virtual void deinit(); @@ -61,27 +66,50 @@ public: virtual JpegDecodeStatus decode(JpegInfo &jpginfo, RenderTarget &target); virtual JpegDecodeStatus sync(RenderTarget &target); virtual bool busy(RenderTarget &target) const; - virtual JpegDecodeStatus blit(RenderTarget &src, RenderTarget &dst); + virtual JpegDecodeStatus blit(RenderTarget &src, RenderTarget &dst, int scale_factor); + virtual JpegDecodeStatus getRgbaTile(RenderTarget &src, + uint8_t *sysmem, + int left, int top, int width, int height, int scale_factor); + virtual JpegDecodeStatus blitToLinearRgba(RenderTarget &src, + uint8_t *sysmem, + uint32_t width, uint32_t height, + BlitEvent &event, int scale_factor); + virtual JpegDecodeStatus blitToCameraSurfaces(RenderTarget &src, + buffer_handle_t dst_nv12, + buffer_handle_t dst_yuy2, + uint8_t *dst_nv21, + uint8_t *dst_yv12, + uint32_t width, uint32_t height, + BlitEvent &event); + virtual void syncBlit(BlitEvent &event); virtual MapHandle mapData(RenderTarget &target, void ** data, uint32_t * offsets, uint32_t * pitches); virtual void unmapData(RenderTarget &target, MapHandle maphandle); -private: + virtual VASurfaceID getSurfaceID(RenderTarget &target) const; + virtual JpegDecodeStatus createSurfaceFromRenderTarget(RenderTarget &target, VASurfaceID *surf_id); + virtual JpegDecodeStatus destroySurface(RenderTarget &target); + virtual JpegDecodeStatus destroySurface(VASurfaceID surf_id); +protected: bool mInitialized; mutable Mutex mLock; VADisplay mDisplay; VAConfigID mConfigId; VAContextID mContextId; CJPEGParse *mParser; + JpegBitstreamParser *mBsParser; + bool mParserInitialized; JpegBlitter *mBlitter; + bool mDispCreated; KeyedVector mGrallocSurfaceMap; KeyedVector mDrmSurfaceMap; KeyedVector mNormalSurfaceMap; - virtual VASurfaceID getSurfaceID(RenderTarget &target) const; + KeyedVector mUserptrSurfaceMap; + virtual JpegDecodeStatus parseHeader(JpegInfo &jpginfo); virtual JpegDecodeStatus parseTableData(JpegInfo &jpginfo); virtual bool jpegColorFormatSupported(JpegInfo &jpginfo) const; - virtual JpegDecodeStatus createSurfaceFromRenderTarget(RenderTarget &target, VASurfaceID *surf_id); - virtual JpegDecodeStatus createSurfaceInternal(int width, int height, int pixel_format, int handle, VASurfaceID *surf_id); - virtual JpegDecodeStatus createSurfaceDrm(int width, int height, int pixel_format, unsigned long boname, int stride, VASurfaceID *surf_id); - virtual JpegDecodeStatus createSurfaceGralloc(int width, int height, int pixel_format, buffer_handle_t handle, int stride, VASurfaceID *surf_id); + virtual JpegDecodeStatus createSurfaceInternal(int width, int height, uint32_t fourcc, int handle, VASurfaceID *surf_id); + virtual JpegDecodeStatus createSurfaceUserptr(int width, int height, uint32_t fourcc, uint8_t* ptr, VASurfaceID *surf_id); + virtual JpegDecodeStatus createSurfaceDrm(int width, int height, uint32_t fourcc, unsigned long boname, int stride, VASurfaceID *surf_id); + virtual JpegDecodeStatus createSurfaceGralloc(int width, int height, uint32_t fourcc, buffer_handle_t handle, int stride, VASurfaceID *surf_id); }; diff --git a/imagedecoder/JPEGDecoder_gen.cpp b/imagedecoder/JPEGDecoder_gen.cpp index 8fa25a8..0f1fb2a 100644 --- a/imagedecoder/JPEGDecoder_gen.cpp +++ b/imagedecoder/JPEGDecoder_gen.cpp @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -25,11 +24,11 @@ * Yao Cheng * */ -//#define LOG_NDEBUG 0 #include "va/va.h" #include "va/va_vpp.h" #include "va/va_drmcommon.h" +#include "va/va_tpi.h" #include "JPEGDecoder.h" #include "ImageDecoderTrace.h" #include @@ -37,6 +36,35 @@ #include #include "JPEGCommon_Gen.h" +uint32_t aligned_height(uint32_t height, int tiling) +{ + switch(tiling) { + // Y-tile (128 x 32): NV12, 411P, IMC3, 422H, 422V, 444P + case SURF_TILING_Y: + return (height + (32-1)) & ~(32-1); + // X-tile (512 x 8): + case SURF_TILING_X: + return (height + (8-1)) & ~(8-1); + // Linear: other + default: + return height; + } +} +uint32_t aligned_width(uint32_t width, int tiling) +{ + switch(tiling) { + // Y-tile (128 x 32): NV12, 411P, IMC3, 422H, 422V, 444P + case SURF_TILING_Y: + return (width + (128-1)) & ~(128-1); + // X-tile (512 x 8): + case SURF_TILING_X: + return (width + (512-1)) & ~(512-1); + // Linear: other + default: + return width; + } +} + int fourcc2PixelFormat(uint32_t fourcc) { switch(fourcc) { @@ -50,8 +78,6 @@ int fourcc2PixelFormat(uint32_t fourcc) return HAL_PIXEL_FORMAT_NV12_TILED_INTEL; case VA_FOURCC_RGBA: return HAL_PIXEL_FORMAT_RGBA_8888; - case VA_FOURCC_422V: - case VA_FOURCC_411P: default: return -1; } @@ -74,8 +100,6 @@ uint32_t pixelFormat2Fourcc(int pixel_format) } } -//#define LOG_TAG "ImageDecoder" - #define JD_CHECK(err, label) \ if (err) { \ ETRACE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ @@ -93,17 +117,19 @@ bool JpegDecoder::jpegColorFormatSupported(JpegInfo &jpginfo) const { return (jpginfo.image_color_fourcc == VA_FOURCC_IMC3) || (jpginfo.image_color_fourcc == VA_FOURCC_422H) || + (jpginfo.image_color_fourcc == VA_FOURCC_422V) || + (jpginfo.image_color_fourcc == VA_FOURCC_411P) || + (jpginfo.image_color_fourcc == VA_FOURCC('4','0','0','P')) || (jpginfo.image_color_fourcc == VA_FOURCC_444P); } -JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, int pixel_format, unsigned long boname, int stride, VASurfaceID *surf_id) +JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, uint32_t fourcc, unsigned long boname, int stride, VASurfaceID *surf_id) { VAStatus st; VASurfaceAttrib attrib_list; VASurfaceAttribExternalBuffers vaSurfaceExternBuf; - uint32_t fourcc = pixelFormat2Fourcc(pixel_format); + memset(&vaSurfaceExternBuf, 0, sizeof (VASurfaceAttribExternalBuffers)); vaSurfaceExternBuf.pixel_format = fourcc; - VTRACE("%s extBuf.pixel_format is %s", __FUNCTION__, fourcc2str(NULL, fourcc)); vaSurfaceExternBuf.width = width; vaSurfaceExternBuf.height = height; vaSurfaceExternBuf.pitches[0] = stride; @@ -115,6 +141,22 @@ JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, int pixel_ attrib_list.value.type = VAGenericValueTypePointer; attrib_list.value.value.p = (void *)&vaSurfaceExternBuf; + VTRACE("%s, vaformat=0x%x, width=%d, height=%d, attrib=", __FUNCTION__, fourcc2VaFormat(fourcc), + width, height); + VTRACE(" ext.pixel_format=0x%x", vaSurfaceExternBuf.pixel_format); + VTRACE(" ext.width=%u", vaSurfaceExternBuf.width); + VTRACE(" ext.height=%u", vaSurfaceExternBuf.height); + VTRACE(" ext.data_size=%u", vaSurfaceExternBuf.data_size); + VTRACE(" ext.num_planes=%u", vaSurfaceExternBuf.num_planes); + VTRACE(" ext.pitches=%u,%u,%u,%u", vaSurfaceExternBuf.pitches[0],vaSurfaceExternBuf.pitches[1],vaSurfaceExternBuf.pitches[2],vaSurfaceExternBuf.pitches[3]); + VTRACE(" ext.offsets=%u,%u,%u,%u", vaSurfaceExternBuf.offsets[0],vaSurfaceExternBuf.offsets[1],vaSurfaceExternBuf.offsets[2],vaSurfaceExternBuf.offsets[3]); + VTRACE(" ext.buffers[0]=%lu", vaSurfaceExternBuf.buffers[0]); + VTRACE(" ext.num_buffers=%u", vaSurfaceExternBuf.num_buffers); + VTRACE(" ext.flags=%u", vaSurfaceExternBuf.flags); + VTRACE(" attrib_list.type=%u", attrib_list.type); + VTRACE(" attrib_list.flags=%u", attrib_list.flags); + VTRACE(" attrib_list.type=%u", attrib_list.value.type); + st = vaCreateSurfaces(mDisplay, fourcc2VaFormat(fourcc), width, @@ -123,7 +165,7 @@ JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, int pixel_ 1, &attrib_list, 1); - VTRACE("%s createSurface DRM for vaformat %u, fourcc %s", __FUNCTION__, fourcc2VaFormat(fourcc), fourcc2str(NULL, fourcc)); + VTRACE("%s createSurface DRM for vaformat %u, fourcc %s", __FUNCTION__, fourcc2VaFormat(fourcc), fourcc2str(fourcc)); if (st != VA_STATUS_SUCCESS) { ETRACE("%s: vaCreateSurfaces returns %d", __PRETTY_FUNCTION__, st); return JD_RESOURCE_FAILURE; @@ -131,51 +173,201 @@ JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, int pixel_ return JD_SUCCESS; } -JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, int pixel_format, buffer_handle_t handle, int stride, VASurfaceID *surf_id) +JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, uint32_t fourcc, buffer_handle_t handle, int stride, VASurfaceID *surf_id) { - unsigned long boname; - hw_module_t const* module = NULL; - alloc_device_t *allocdev = NULL; - struct gralloc_module_t *gralloc_module = NULL; - JpegDecodeStatus st; - - uint32_t fourcc = pixelFormat2Fourcc(pixel_format); - VTRACE("enter %s, pixel_format 0x%x, fourcc %s", __FUNCTION__, pixel_format, fourcc2str(NULL, fourcc)); - - int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); - if (err) { - ETRACE("%s failed to get gralloc module", __PRETTY_FUNCTION__); - st = JD_RESOURCE_FAILURE; - } - JD_CHECK(err, cleanup); - gralloc_module = (struct gralloc_module_t *)module; - err = gralloc_open(module, &allocdev); - if (err) { - ETRACE("%s failed to open alloc device", __PRETTY_FUNCTION__); - st = JD_RESOURCE_FAILURE; - } - JD_CHECK(err, cleanup); - err = gralloc_module->perform(gralloc_module, - INTEL_UFO_GRALLOC_MODULE_PERFORM_GET_BO_NAME, - handle, - &boname); - if (err) { - ETRACE("%s failed to get boname via gralloc->perform", __PRETTY_FUNCTION__); - st = JD_RESOURCE_FAILURE; - } - JD_CHECK(err, cleanup); - VTRACE("YAO %s fourcc %s luma_stride is %d", __FUNCTION__, - fourcc2str(NULL, fourcc), stride); - - gralloc_close(allocdev); - return createSurfaceDrm(width, height, pixel_format, boname, stride, surf_id); -cleanup: - if (allocdev) - gralloc_close(allocdev); - return st; + VAStatus st; + VASurfaceAttrib attrib_list; + VASurfaceAttribExternalBuffers vaSurfaceExternBuf; + memset(&vaSurfaceExternBuf, 0, sizeof (VASurfaceAttribExternalBuffers)); + vaSurfaceExternBuf.pixel_format = fourcc; + vaSurfaceExternBuf.width = width; + vaSurfaceExternBuf.height = height; + vaSurfaceExternBuf.pitches[0] = stride; + vaSurfaceExternBuf.buffers = (unsigned long*)&handle; + vaSurfaceExternBuf.num_buffers = 1; + vaSurfaceExternBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + attrib_list.type = VASurfaceAttribExternalBufferDescriptor; + attrib_list.flags = VA_SURFACE_ATTRIB_SETTABLE; + attrib_list.value.type = VAGenericValueTypePointer; + attrib_list.value.value.p = (void *)&vaSurfaceExternBuf; + VTRACE("%s, vaformat=0x%x, width=%d, height=%d, attrib=", __FUNCTION__, fourcc2VaFormat(fourcc), + width, height); + VTRACE(" ext.pixel_format=0x%x", vaSurfaceExternBuf.pixel_format); + VTRACE(" ext.width=%u", vaSurfaceExternBuf.width); + VTRACE(" ext.height=%u", vaSurfaceExternBuf.height); + VTRACE(" ext.data_size=%u", vaSurfaceExternBuf.data_size); + VTRACE(" ext.num_planes=%u", vaSurfaceExternBuf.num_planes); + VTRACE(" ext.pitches=%u,%u,%u,%u", vaSurfaceExternBuf.pitches[0],vaSurfaceExternBuf.pitches[1],vaSurfaceExternBuf.pitches[2],vaSurfaceExternBuf.pitches[3]); + VTRACE(" ext.offsets=%u,%u,%u,%u", vaSurfaceExternBuf.offsets[0],vaSurfaceExternBuf.offsets[1],vaSurfaceExternBuf.offsets[2],vaSurfaceExternBuf.offsets[3]); + VTRACE(" ext.buffers[0]=%lu", vaSurfaceExternBuf.buffers[0]); + VTRACE(" ext.num_buffers=%u", vaSurfaceExternBuf.num_buffers); + VTRACE(" ext.flags=%u", vaSurfaceExternBuf.flags); + VTRACE(" attrib_list.type=%u", attrib_list.type); + VTRACE(" attrib_list.flags=%u", attrib_list.flags); + VTRACE(" attrib_list.type=%u", attrib_list.value.type); + + st = vaCreateSurfaces(mDisplay, + fourcc2VaFormat(fourcc), + width, + height, + surf_id, + 1, + &attrib_list, + 1); + VTRACE("%s createSurface GRALLOC for vaformat %u, fourcc %s", __FUNCTION__, fourcc2VaFormat(fourcc), fourcc2str(fourcc)); + if (st != VA_STATUS_SUCCESS) { + ETRACE("%s: vaCreateSurfaces returns %d", __PRETTY_FUNCTION__, st); + return JD_RESOURCE_FAILURE; + } + return JD_SUCCESS; } +JpegDecodeStatus JpegDecoder::createSurfaceUserptr(int width, int height, uint32_t fourcc, uint8_t* ptr, VASurfaceID *surf_id) +{ + VAStatus st; + VASurfaceAttrib attrib_list; + VASurfaceAttribExternalBuffers vaSurfaceExternBuf; + memset(&vaSurfaceExternBuf, 0, sizeof (VASurfaceAttribExternalBuffers)); + vaSurfaceExternBuf.pixel_format = fourcc; + vaSurfaceExternBuf.width = width; + vaSurfaceExternBuf.height = height; + vaSurfaceExternBuf.pitches[0] = width; + vaSurfaceExternBuf.offsets[0] = 0; + switch (fourcc) { + case VA_FOURCC_NV12: + vaSurfaceExternBuf.pitches[1] = width; + vaSurfaceExternBuf.pitches[2] = 0; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = 0; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_YUY2: + case VA_FOURCC_UYVY: + vaSurfaceExternBuf.pitches[0] = width * 2; + vaSurfaceExternBuf.pitches[1] = 0; + vaSurfaceExternBuf.pitches[2] = 0; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = 0; + vaSurfaceExternBuf.offsets[2] = 0; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_YV12: + vaSurfaceExternBuf.pitches[1] = width / 2; + vaSurfaceExternBuf.pitches[2] = width / 2; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = width * height * 5 / 4; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_RGBA: + vaSurfaceExternBuf.pitches[0] = width * 4; + vaSurfaceExternBuf.pitches[1] = 0; + vaSurfaceExternBuf.pitches[2] = 0; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = 0; + vaSurfaceExternBuf.offsets[2] = 0; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_411P: + vaSurfaceExternBuf.pitches[1] = width; + vaSurfaceExternBuf.pitches[2] = width; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = width * height * 2; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_411R: + vaSurfaceExternBuf.pitches[1] = width; + vaSurfaceExternBuf.pitches[2] = width; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = width * height * 5 / 4; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_IMC3: + vaSurfaceExternBuf.pitches[1] = width; + vaSurfaceExternBuf.pitches[2] = width; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = width * height * 3 / 2; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_422H: + vaSurfaceExternBuf.pitches[1] = width; + vaSurfaceExternBuf.pitches[2] = width; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = width * height; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_422V: + vaSurfaceExternBuf.pitches[1] = width; + vaSurfaceExternBuf.pitches[2] = width; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = width * height * 3 / 2; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC_444P: + vaSurfaceExternBuf.pitches[1] = width; + vaSurfaceExternBuf.pitches[2] = width; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = width * height; + vaSurfaceExternBuf.offsets[2] = width * height * 2; + vaSurfaceExternBuf.offsets[3] = 0; + break; + case VA_FOURCC('4','0','0','P'): + default: + vaSurfaceExternBuf.pitches[1] = 0; + vaSurfaceExternBuf.pitches[2] = 0; + vaSurfaceExternBuf.pitches[3] = 0; + vaSurfaceExternBuf.offsets[1] = 0; + vaSurfaceExternBuf.offsets[2] = 0; + vaSurfaceExternBuf.offsets[3] = 0; + break; + } + vaSurfaceExternBuf.buffers = (unsigned long*)ptr; + vaSurfaceExternBuf.num_buffers = 1; + vaSurfaceExternBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; + attrib_list.type = VASurfaceAttribMemoryType; + attrib_list.flags = VA_SURFACE_ATTRIB_SETTABLE; + attrib_list.value.type = VAGenericValueTypeInteger; + attrib_list.value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR; + + VTRACE("%s, vaformat=0x%x, width=%d, height=%d, attrib=", __FUNCTION__, fourcc2VaFormat(fourcc), + width, height); + VTRACE(" ext.pixel_format=0x%x", vaSurfaceExternBuf.pixel_format); + VTRACE(" ext.width=%u", vaSurfaceExternBuf.width); + VTRACE(" ext.height=%u", vaSurfaceExternBuf.height); + VTRACE(" ext.data_size=%u", vaSurfaceExternBuf.data_size); + VTRACE(" ext.num_planes=%u", vaSurfaceExternBuf.num_planes); + VTRACE(" ext.pitches=%u,%u,%u,%u", vaSurfaceExternBuf.pitches[0],vaSurfaceExternBuf.pitches[1],vaSurfaceExternBuf.pitches[2],vaSurfaceExternBuf.pitches[3]); + VTRACE(" ext.offsets=%u,%u,%u,%u", vaSurfaceExternBuf.offsets[0],vaSurfaceExternBuf.offsets[1],vaSurfaceExternBuf.offsets[2],vaSurfaceExternBuf.offsets[3]); + VTRACE(" ext.buffers[0]=%lu", vaSurfaceExternBuf.buffers[0]); + VTRACE(" ext.num_buffers=%u", vaSurfaceExternBuf.num_buffers); + VTRACE(" ext.flags=%u", vaSurfaceExternBuf.flags); + VTRACE(" attrib_list.type=%u", attrib_list.type); + VTRACE(" attrib_list.flags=%u", attrib_list.flags); + VTRACE(" attrib_list.type=%u", attrib_list.value.type); + + st = vaCreateSurfaces(mDisplay, + fourcc2VaFormat(fourcc), + width, + height, + surf_id, + 1, + &attrib_list, + 1); + VTRACE("%s createSurface GRALLOC for vaformat %u, fourcc %s", __FUNCTION__, fourcc2VaFormat(fourcc), fourcc2str(fourcc)); + if (st != VA_STATUS_SUCCESS) { + ETRACE("%s: vaCreateSurfaces returns %d", __PRETTY_FUNCTION__, st); + return JD_RESOURCE_FAILURE; + } + return JD_SUCCESS; + +} diff --git a/imagedecoder/JPEGDecoder_img.cpp b/imagedecoder/JPEGDecoder_img.cpp index d90559d..165c138 100644 --- a/imagedecoder/JPEGDecoder_img.cpp +++ b/imagedecoder/JPEGDecoder_img.cpp @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -30,6 +29,35 @@ #include "JPEGCommon_Img.h" #include "JPEGDecoder.h" +uint32_t aligned_height(uint32_t height, int tiling) +{ + switch(tiling) { + // Y-tile (128 x 32): NV12, 411P, IMC3, 422H, 422V, 444P + case SURF_TILING_Y: + return (height + (32-1)) & ~(32-1); + // X-tile (512 x 8): + case SURF_TILING_X: + return (height + (8-1)) & ~(8-1); + // Linear: other + default: + return height; + } +} +uint32_t aligned_width(uint32_t width, int tiling) +{ + switch(tiling) { + // Y-tile (128 x 32): NV12, 411P, IMC3, 422H, 422V, 444P + case SURF_TILING_Y: + return (width + (128-1)) & ~(128-1); + // X-tile (512 x 8): + case SURF_TILING_X: + return (width + (512-1)) & ~(512-1); + // Linear: other + default: + return width; + } +} + int fourcc2PixelFormat(uint32_t fourcc) { switch(fourcc) { @@ -65,19 +93,19 @@ bool JpegDecoder::jpegColorFormatSupported(JpegInfo &jpginfo) const (jpginfo.image_color_fourcc == VA_FOURCC_444P); } -JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, int pixel_format, unsigned long boname, int stride, VASurfaceID *surf_id) +JpegDecodeStatus JpegDecoder::createSurfaceDrm(int width, int height, uint32_t fourcc, unsigned long boname, int stride, VASurfaceID *surf_id) { return JD_RENDER_TARGET_TYPE_UNSUPPORTED; } -JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, int pixel_format, buffer_handle_t handle, int stride, VASurfaceID *surf_id) +JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, uint32_t fourcc, buffer_handle_t handle, int stride, VASurfaceID *surf_id) { VAStatus st; VASurfaceAttributeTPI attrib_tpi; uint32_t va_format = VA_RT_FORMAT_YUV444; attrib_tpi.count = 1; attrib_tpi.luma_stride = stride; - attrib_tpi.pixel_format = pixel_format; + attrib_tpi.pixel_format = VA_FOURCC_YV32; attrib_tpi.width = width; attrib_tpi.height = height; attrib_tpi.type = VAExternalMemoryAndroidGrallocBuffer; @@ -96,4 +124,8 @@ JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, int pi return JD_SUCCESS; } +JpegDecodeStatus JpegDecoder::createSurfaceUserptr(int width, int height, uint32_t fourcc, uint8_t* ptr, VASurfaceID *surf_id) +{ + return JD_INVALID_RENDER_TARGET; +} diff --git a/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp b/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp index edfaac6..0e123e5 100644 --- a/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp +++ b/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -34,8 +33,6 @@ * to determine which path will be use (SW or HW) * */ -//#define LOG_NDEBUG 0 -#define LOG_TAG "ImageDecoder" #include #include "JPEGDecoder_libjpeg_wrapper.h" @@ -43,27 +40,42 @@ #include #include "JPEGDecoder.h" #include +#include #include "va/va_dec_jpeg.h" - +#include #ifdef NDEBUG #undef NDEBUG #endif #include - +#include static Mutex jdlock; +static VADisplay display = NULL; +static VAConfigID vpCfgId = VA_INVALID_ID; +static VAContextID vpCtxId = VA_INVALID_ID; + +#define DUMP_DECODE 0 +#define DUMP_RGBA 0 +#define RGBA_DUMP_FILE_PATTERN "/sdcard/jpeg_%dx%d_from_%s.rgba" +#define DECODE_DUMP_FILE_PATTERN "/sdcard/jpeg_%dx%d.%s" + +using namespace android; struct jdva_private { JpegInfo jpg_info; - JpegDecoder decoder; + android::Vector inputs; + JpegDecoder *decoder; RenderTarget dec_buffer; - RenderTarget yuy2_buffer; - RenderTarget rgba_buffer; + uint8_t* rgba_out; + BlitEvent blit_event; + int tile_read_x; + int tile_read_y; + int tile_read_width; + int tile_read_height; + int scale_factor; }; -static int internal_buffer_handle = 0; - #define JD_CHECK(err, label) \ if (err) { \ ALOGE("%s::%d: failed: %d", __PRETTY_FUNCTION__, __LINE__, err); \ @@ -77,6 +89,38 @@ static int internal_buffer_handle = 0; goto label; \ } +static void libva_vp_pre_init_locked() +{ + if (display == NULL && vpCfgId == VA_INVALID_ID && vpCtxId == VA_INVALID_ID) { + Display dpy; + int va_major_version, va_minor_version; + VAConfigAttrib vpp_attrib; + VAStatus st; + display = vaGetDisplay(&dpy); + st = vaInitialize(display, &va_major_version, &va_minor_version); + assert(st == VA_STATUS_SUCCESS); + vpp_attrib.type = VAConfigAttribRTFormat; + vpp_attrib.value = VA_RT_FORMAT_YUV420; + st = vaCreateConfig(display, VAProfileNone, + VAEntrypointVideoProc, + &vpp_attrib, + 1, &vpCfgId); + assert(st == VA_STATUS_SUCCESS); + st = vaCreateContext(display, vpCfgId, 1920, 1080, 0, NULL, 0, &vpCtxId); + assert(st == VA_STATUS_SUCCESS); + } +} + +/* clear the global VA context + * actually it's not needed + * when the process terminates, the drm fd will be closed by kernel and the VA + * context will be automatically released + */ +static void libva_vp_post_deinit_locked() +{ + // DO NOTHING +} + Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) { /* @@ -91,21 +135,38 @@ Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr) VAStatus va_status = VA_STATUS_SUCCESS; Decode_Status status = DECODE_SUCCESS; + Mutex::Autolock autoLock(jdlock); + + if (display == NULL || vpCfgId == VA_INVALID_ID || vpCtxId == VA_INVALID_ID) { + libva_vp_pre_init_locked(); + } + if (jd_libva_ptr->initialized) { ALOGW("%s HW decode already initialized", __FUNCTION__); return DECODE_NOT_STARTED; } { - Mutex::Autolock autoLock(jdlock); if (!(jd_libva_ptr->initialized)) { jdva_private *priv = new jdva_private; memset(&priv->jpg_info, 0, sizeof(JpegInfo)); + priv->jpg_info.use_vector_input = true; memset(&priv->dec_buffer, 0, sizeof(RenderTarget)); - memset(&priv->yuy2_buffer, 0, sizeof(RenderTarget)); - memset(&priv->rgba_buffer, 0, sizeof(RenderTarget)); + priv->rgba_out = NULL; + priv->inputs.clear(); + priv->jpg_info.inputs = &priv->inputs; jd_libva_ptr->initialized = TRUE; jd_libva_ptr->priv = (uint32_t)priv; + jd_libva_ptr->cap_available= 0x0; + jd_libva_ptr->cap_available |= JPEG_CAPABILITY_DECODE; +#ifdef GFXGEN + jd_libva_ptr->cap_available |= JPEG_CAPABILITY_UPSAMPLE | JPEG_CAPABILITY_DOWNSCALE; +#endif + jd_libva_ptr->cap_enabled = jd_libva_ptr->cap_available; + if (jd_libva_ptr->cap_available & JPEG_CAPABILITY_UPSAMPLE) + priv->decoder = new JpegDecoder(display, vpCfgId, vpCtxId, true); + else + priv->decoder = new JpegDecoder(); status = DECODE_SUCCESS; } } @@ -127,74 +188,127 @@ void jdva_deinitialize (jd_libva_struct * jd_libva_ptr) Mutex::Autolock autoLock(jdlock); if (jd_libva_ptr->initialized) { jdva_private *p = (jdva_private*)jd_libva_ptr->priv; + delete p->decoder; + jd_libva_ptr->bitstream_buf = NULL; + p->inputs.clear(); delete p; jd_libva_ptr->initialized = FALSE; } } - ALOGV("jdva_deinitialize finished"); return; } -RenderTarget * create_render_target(RenderTarget* target, int width, int height, int pixel_format) +Decode_Status jdva_fill_input(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) +{ + jdva_private *p = (jdva_private*)jd_libva_ptr->priv; + if ((*cinfo->src->fill_input_buffer)(cinfo)) { + assert(cinfo->src->next_input_byte); + assert(cinfo->src->bytes_in_buffer); + p->inputs.appendArray(cinfo->src->next_input_byte, cinfo->src->bytes_in_buffer); + jd_libva_ptr->file_size += cinfo->src->bytes_in_buffer; + ALOGV("%s read %d bytes, file_size %u bytes, vector %u bytes", __FUNCTION__, cinfo->src->bytes_in_buffer, jd_libva_ptr->file_size, p->inputs.size()); + cinfo->src->bytes_in_buffer = 0; + } + else { + return DECODE_DRIVER_FAIL; + } + return DECODE_SUCCESS; +} + +void jdva_drain_input(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) +{ + nsecs_t now = systemTime(); + jdva_private *p = (jdva_private*)jd_libva_ptr->priv; + do { + if ((*cinfo->src->fill_input_buffer)(cinfo)) { + p->inputs.appendArray(cinfo->src->next_input_byte, cinfo->src->bytes_in_buffer); + jd_libva_ptr->file_size += cinfo->src->bytes_in_buffer; + } + else { + break; + } + } while (cinfo->src->bytes_in_buffer > 0); + jd_libva_ptr->bitstream_buf = p->inputs.array(); + ALOGV("%s drained input %u bytes took %.2f ms", __FUNCTION__, jd_libva_ptr->file_size, + (systemTime() - now)/1000000.0); +} + +RenderTarget * create_render_target(RenderTarget* target, int width, int height, uint32_t fourcc) { hw_module_t const* module = NULL; alloc_device_t *allocdev = NULL; struct gralloc_module_t *gralloc_module = NULL; buffer_handle_t handle; - uint32_t fourcc; int stride, bpp, err; - fourcc = pixelFormat2Fourcc(pixel_format); bpp = fourcc2LumaBitsPerPixel(fourcc); if (target == NULL) { ALOGE("%s malloc new RenderTarget failed", __FUNCTION__); return NULL; } - ALOGV("%s created %s target %p", __FUNCTION__, fourcc2str(NULL, fourcc), target); - if ((fourcc == VA_FOURCC_422H) || - (fourcc == VA_FOURCC_YUY2) || - (fourcc == VA_FOURCC_RGBA)){ - err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); - if (err || !module) { - ALOGE("%s failed to get gralloc module", __FUNCTION__); - return NULL; - } - gralloc_module = (struct gralloc_module_t *)module; - err = gralloc_open(module, &allocdev); - if (err || !allocdev) { - ALOGE("%s failed to open alloc device", __FUNCTION__); - return NULL; - } - err = allocdev->alloc(allocdev, - width, height, pixel_format, - GRALLOC_USAGE_HW_RENDER, - &handle, &stride); - if (err) { - gralloc_close(allocdev); - ALOGE("%s failed to allocate surface", __FUNCTION__); - return NULL; - } - target->type = RenderTarget::ANDROID_GRALLOC; - target->handle = (int)handle; - target->stride = stride * bpp; - } - else { - *((int*)(&target->type)) = RENDERTARGET_INTERNAL_BUFFER; - target->handle = internal_buffer_handle++; - } + ALOGV("%s created %s target %p", __FUNCTION__, fourcc2str(fourcc), target); + target->type = RenderTarget::INTERNAL_BUF; + target->handle = generateHandle(); target->width = width; target->height = height; - target->pixel_format = pixel_format; + target->pixel_format = fourcc; target->rect.x = target->rect.y = 0; target->rect.width = target->width; target->rect.height = target->height; return target; } +Decode_Status jdva_init_read_tile_scanline(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr, int *x, int *y, int *w, int *h) +{ + if (jd_libva_ptr->cap_enabled & JPEG_CAPABILITY_UPSAMPLE) { + JpegDecodeStatus st; + jdva_private * priv = (jdva_private*)jd_libva_ptr->priv; + if (priv->scale_factor != cinfo->scale_denom) { + ALOGV("%s scale_denom changed from %d to %d!!!!", __FUNCTION__, priv->scale_factor, cinfo->scale_denom); + } + priv->tile_read_x = (*x < cinfo->image_width)? *x: (cinfo->image_width - 1); + priv->tile_read_y = (*y < cinfo->image_height)? *y: (cinfo->image_height - 1); + priv->tile_read_width = (priv->tile_read_x + *w < cinfo->image_width)? *w: (cinfo->image_width - priv->tile_read_x); + priv->tile_read_width /= priv->scale_factor; + priv->tile_read_height = (priv->tile_read_y + *h < cinfo->image_height)? *h: (cinfo->image_height - priv->tile_read_y); + priv->tile_read_height /= priv->scale_factor; + ALOGV("%s, x=%d->%d, y=%d>%d, w=%d->%d, h=%d->%d", __FUNCTION__, + *x, priv->tile_read_x, + *y, priv->tile_read_y, + *w, priv->tile_read_width, + *h, priv->tile_read_height); + *x = priv->tile_read_x; + *y = priv->tile_read_y; + *w = priv->tile_read_width; + *h = priv->tile_read_height; + return DECODE_SUCCESS; + } + else { + // should not be here + assert(false); + return DECODE_DRIVER_FAIL; + } +} +Decode_Status jdva_read_tile_scanline (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr, char ** scanlines, unsigned int* row_ctr) +{ + if (jd_libva_ptr->cap_enabled & JPEG_CAPABILITY_UPSAMPLE) { + jdva_private *priv = (jdva_private*)jd_libva_ptr->priv; + *row_ctr = 1; + memcpy(scanlines[0], priv->rgba_out + priv->tile_read_y * cinfo->image_width * 4 + priv->tile_read_x * 4, priv->tile_read_width * 4); + priv->tile_read_y++; + return DECODE_SUCCESS; + } + else { + // should not be here + assert(false); + return DECODE_DRIVER_FAIL; + } +} + void free_render_target(RenderTarget *target) { if (target == NULL) return; - uint32_t fourcc = pixelFormat2Fourcc(target->pixel_format); + uint32_t fourcc = target->pixel_format; if (target->type == RenderTarget::ANDROID_GRALLOC) { buffer_handle_t handle = (buffer_handle_t)target->handle; hw_module_t const* module = NULL; @@ -214,218 +328,190 @@ void free_render_target(RenderTarget *target) allocdev->free(allocdev, handle); gralloc_close(allocdev); } - ALOGV("%s deleting %s target %p", __FUNCTION__, fourcc2str(NULL, fourcc), target); -} - -void dump_yuy2_target(RenderTarget *target, JpegDecoder *decoder, const char *filename) -{ - uint32_t fourcc = pixelFormat2Fourcc(target->pixel_format); - assert(fourcc == VA_FOURCC_YUY2); - uint8_t *data; - uint32_t offsets[3]; - uint32_t pitches[3]; - JpegDecoder::MapHandle maphandle = decoder->mapData(*target, (void**) &data, offsets, pitches); - assert (maphandle.valid); - FILE* fpdump = fopen(filename, "wb"); - if (fpdump) { - // YUYV - for (int i = 0; i < target->height; ++i) { - fwrite(data + offsets[0] + i * pitches[0], 1, target->width * 2, fpdump); - } - fclose(fpdump); - } - else { - ALOGW("%s failed to create %s", __FUNCTION__, filename); - } - decoder->unmapData(*target, maphandle); -} - -void dump_dec_target(RenderTarget *target, JpegDecoder *decoder, const char *filename) -{ - uint32_t fourcc = pixelFormat2Fourcc(target->pixel_format); - assert((fourcc == VA_FOURCC_IMC3) || - (fourcc == VA_FOURCC_411P) || - (fourcc == VA_FOURCC('4','0','0','P')) || - (fourcc == VA_FOURCC_422H) || - (fourcc == VA_FOURCC_422V) || - (fourcc == VA_FOURCC_444P)); - uint8_t *data; - uint32_t offsets[3]; - uint32_t pitches[3]; - JpegDecoder::MapHandle maphandle = decoder->mapData(*target, (void**) &data, offsets, pitches); - assert (maphandle.valid); - FILE* fpdump = fopen(filename, "wb"); - if(fpdump) { - float hfactor, vfactor; - switch (fourcc) { - case VA_FOURCC_IMC3: - hfactor = 1; - vfactor = 0.5; - break; - case VA_FOURCC_444P: - hfactor = vfactor = 1; - break; - case VA_FOURCC_422H: - hfactor = 0.5; - vfactor = 1; - break; - case VA_FOURCC('4','0','0','P'): - hfactor = vfactor = 0; - break; - case VA_FOURCC_411P: - hfactor = 0.25; - vfactor = 1; - break; - case VA_FOURCC_422V: - hfactor = 0.5; - vfactor = 1; - break; - default: - hfactor = vfactor = 1; - break; - } - // Y - for (int i = 0; i < target->height; ++i) { - fwrite(data + offsets[0] + i * pitches[0], 1, target->width, fpdump); - } - // U - for (int i = 0; i < target->height * vfactor; ++i) { - fwrite(data + offsets[1] + i * pitches[1], 1, target->width * hfactor, fpdump); - } - // V - for (int i = 0; i < target->height * vfactor; ++i) { - fwrite(data + offsets[2] + i * pitches[2], 1, target->width * hfactor, fpdump); - } - fclose(fpdump); - } - else { - ALOGW("%s failed to create %s", __FUNCTION__, filename); - } - decoder->unmapData(*target, maphandle); + ALOGV("%s deleting %s target %p", __FUNCTION__, fourcc2str(fourcc), target); } +Decode_Status jdva_blit(struct jdva_private * priv); Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr) { JpegDecodeStatus st; - char **outbuf = jd_libva_ptr->output_image; - uint32_t lines = jd_libva_ptr->output_lines; jdva_private * priv = (jdva_private*)jd_libva_ptr->priv; - if (!priv) - return DECODE_DRIVER_FAIL; - - JpegInfo& jpginfo = priv->jpg_info; - - st = priv->decoder.decode(jpginfo, priv->dec_buffer); - if (st != JD_SUCCESS) { - ALOGE("%s: error decoding %s image", __FUNCTION__, fourcc2str(NULL, jpginfo.image_color_fourcc)); - return DECODE_DRIVER_FAIL; - } - ALOGI("%s successfully decoded JPEG with VAAPI", __FUNCTION__); - RenderTarget *src_target = &priv->dec_buffer; - //dump_dec_target(src_target, decoder,"/sdcard/dec_dump.yuv"); - - bool yuy2_csc = false; hw_module_t const* module = NULL; alloc_device_t *allocdev = NULL; struct gralloc_module_t *gralloc_module = NULL; buffer_handle_t handle; int err; + char fname[256]; + FILE *fdec; uint8_t *data = NULL; uint32_t offsets[3]; uint32_t pitches[3]; + nsecs_t t1, t2, t3; JpegDecoder::MapHandle maphandle; - FILE *rgbafile = NULL; - if (jpginfo.image_color_fourcc != VA_FOURCC_422H) - yuy2_csc = true; - - // CSC to YUY2 if needed - if (yuy2_csc) { - st = priv->decoder.blit(*src_target, priv->yuy2_buffer); - if (st != JD_SUCCESS) { - ALOGE("%s: error blitting to YUY2 buffer", __FUNCTION__); - goto cleanup; - } - //dump_yuy2_target(src_target, decoder,"/sdcard/yuy2_dump.yuv"); - src_target = &priv->yuy2_buffer; + if (!priv) + return DECODE_DRIVER_FAIL; + + t1 = systemTime(); + JpegInfo& jpginfo = priv->jpg_info; + + if (jd_libva_ptr->cap_enabled & JPEG_CAPABILITY_DOWNSCALE) { + priv->scale_factor = cinfo->scale_denom; + cinfo->min_DCT_scaled_size = DCTSIZE/priv->scale_factor; + cinfo->output_width = cinfo->image_width/priv->scale_factor; + cinfo->output_height = cinfo->image_height/priv->scale_factor; + } + else { + priv->scale_factor = 1; + cinfo->min_DCT_scaled_size = DCTSIZE; + cinfo->output_width = cinfo->image_width; + cinfo->output_height = cinfo->image_height; } - st = priv->decoder.blit(*src_target, priv->rgba_buffer); + jdva_drain_input(cinfo, jd_libva_ptr); + jpginfo.need_header_only = false; + st = priv->decoder->parse(jpginfo); + switch (st) { + case JD_ERROR_BITSTREAM: + ALOGE("%s: error parsing bitstream", __FUNCTION__); + return DECODE_PARSER_FAIL; + case JD_SUCCESS: + break; + default: + ALOGE("%s: error in driver: parse failed", __FUNCTION__); + return DECODE_DRIVER_FAIL; + } + + st = priv->decoder->decode(jpginfo, priv->dec_buffer); if (st != JD_SUCCESS) { - ALOGE("%s: error blitting to RGBA buffer", __FUNCTION__); - goto cleanup; + ALOGE("%s: error decoding %s image", __FUNCTION__, fourcc2str(jpginfo.image_color_fourcc)); + return DECODE_DRIVER_FAIL; } - maphandle = priv->decoder.mapData(priv->rgba_buffer, (void**) &data, offsets, pitches); - - //rgbafile = fopen("/sdcard/rgba_dump", "wb"); - - for (uint32_t i = 0; i < lines; ++i) { - if (outbuf[i] != NULL) { - //memcpy(outbuf[i], data + offsets[0] + i * pitches[0], 4 * jpginfo.image_width); - for (int j = 0; j < priv->rgba_buffer.width; ++j) { - // BGRA -> RGBA - // R - memcpy(outbuf[i] + 4 * j, data + offsets[0] + i * pitches[0] + 4 * j + 2, 1); - // G - memcpy(outbuf[i] + 4 * j + 1, data + offsets[0] + i * pitches[0] + 4 * j + 1, 1); - // B - memcpy(outbuf[i] + 4 * j + 2, data + offsets[0] + i * pitches[0] + 4 * j, 1); - // A - memcpy(outbuf[i] + 4 * j + 3, data + offsets[0] + i * pitches[0] + 4 * j + 3, 1); - } +#if DUMP_DECODE + sprintf(fname, DECODE_DUMP_FILE_PATTERN, jpginfo.image_width, jpginfo.image_height, fourcc2str(jpginfo.image_color_fourcc)); + fdec = fopen(fname, "wb"); + if (fdec) { + maphandle = priv->decoder->mapData(priv->dec_buffer, (void**)&data, offsets, pitches); + int ss_x, ss_y; + ss_x = ss_y = -1; + switch(jpginfo.image_color_fourcc) { + case VA_FOURCC_411P: + ss_x = 2; + ss_y = 0; + break; + case VA_FOURCC_IMC3: + ss_x = 1; + ss_y = 1; + break; + case VA_FOURCC_422V: + ss_x = 0; + ss_y = 1; + break; + case VA_FOURCC_422H: + ss_x = 1; + ss_y = 0; + break; + case VA_FOURCC_444P: + ss_x = 0; + ss_y = 0; + break; + default: + break; } - else { - ALOGE("%s outbuf line %u is NULL", __FUNCTION__, i); + for (int r = 0; r < jpginfo.image_height; ++r) + fwrite(data + offsets[0] + pitches[0] * r, 1, jpginfo.image_width, fdec); + if (ss_x >=0 && ss_y >=0) { + for (int r = 0; r < jpginfo.image_height >> ss_y; ++r) + fwrite(data + offsets[1] + pitches[1] * r, 1, jpginfo.image_width >> ss_x, fdec); + for (int r = 0; r < jpginfo.image_height >> ss_y; ++r) + fwrite(data + offsets[2] + pitches[2] * r, 1, jpginfo.image_width >> ss_x, fdec); + } + priv->decoder->unmapData(priv->dec_buffer, maphandle); + fclose(fdec); + ALOGV("%s Dumped decode surface into %s", __FUNCTION__, fname); + } +#endif + t2 = systemTime(); + + if (!(jd_libva_ptr->cap_enabled & JPEG_CAPABILITY_UPSAMPLE)) { + ALOGV("%s decoded %ux%u %s JPEG for %.2f ms", __FUNCTION__, + priv->jpg_info.image_width, priv->jpg_info.image_height, + fourcc2str(priv->jpg_info.image_color_fourcc), + (t2-t1)/1000000.0); + // TODO: implement + } + else { + priv->rgba_out = (uint8_t*)memalign(0x1000, + aligned_width(cinfo->output_width, SURF_TILING_Y) + * aligned_height(cinfo->output_height, SURF_TILING_Y) * 4); + if (priv->rgba_out == NULL) { + ALOGE("%s failed to create RGBA buffer", __FUNCTION__); + return DECODE_MEMORY_FAIL; } - //if (rgbafile) { - // fwrite(data + offsets[0] + i * pitches[0], 1, 4 * rgba_target->width, rgbafile); - //} - } - //if (rgbafile) - // fclose(rgbafile); - ALOGI("%s successfully blitted RGBA from JPEG %s data", __FUNCTION__, fourcc2str(NULL, priv->jpg_info.image_color_fourcc)); - priv->decoder.unmapData(priv->rgba_buffer, maphandle); - return DECODE_SUCCESS; + Decode_Status ret; + { + Mutex::Autolock autoLock(jdlock); + ret = jdva_blit(priv); + if (ret != DECODE_SUCCESS) { + ALOGE("%s blit %ux%u (%dx scaling) %s failed", __FUNCTION__, + priv->jpg_info.image_width, priv->jpg_info.image_height, + priv->scale_factor, + fourcc2str(priv->jpg_info.image_color_fourcc)); + goto cleanup; + } + } + t3 = systemTime(); + ALOGI("%s decode+blit %ux%u (%dx scaling) %s JPEG for %.2f+%.2f ms", __FUNCTION__, + priv->jpg_info.image_width, priv->jpg_info.image_height, + priv->scale_factor, + fourcc2str(priv->jpg_info.image_color_fourcc), + (t2-t1)/1000000.0, (t3-t2)/1000000.0); + } + return DECODE_SUCCESS; cleanup: + if (priv->rgba_out) { + free(priv->rgba_out); + priv->rgba_out = NULL; + } return DECODE_DRIVER_FAIL; } +Decode_Status jdva_read_scanlines (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr, char ** scanlines, unsigned int* row_ctr, unsigned int max_lines) +{ + if (jd_libva_ptr->cap_enabled & JPEG_CAPABILITY_UPSAMPLE) { + jdva_private *priv = (jdva_private*)jd_libva_ptr->priv; + uint32_t scanline = cinfo->output_scanline; + for (*row_ctr = 0; *row_ctr + scanline < cinfo->output_height && *row_ctr < max_lines; ++*row_ctr) { + memcpy(scanlines[*row_ctr], priv->rgba_out + (scanline + *row_ctr) * aligned_width(cinfo->output_width, SURF_TILING_Y) * 4, cinfo->output_width * 4); + } + return DECODE_SUCCESS; + } + else { + // should not be here + assert(false); + return DECODE_DRIVER_FAIL; + } +} + Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr) { VAStatus va_status = VA_STATUS_SUCCESS; Decode_Status status = DECODE_SUCCESS; - RenderTarget *dec_target, *yuy2_target, *rgba_target; - dec_target = yuy2_target = rgba_target = NULL; JpegDecodeStatus st; Mutex::Autolock autoLock(jdlock); jdva_private *priv = (jdva_private*)jd_libva_ptr->priv; jd_libva_ptr->image_width = priv->jpg_info.picture_param_buf.picture_width; jd_libva_ptr->image_height = priv->jpg_info.picture_param_buf.picture_height; - dec_target = create_render_target(&priv->dec_buffer, jd_libva_ptr->image_width,jd_libva_ptr->image_height,fourcc2PixelFormat(priv->jpg_info.image_color_fourcc)); - if (dec_target == NULL) { + create_render_target(&priv->dec_buffer, jd_libva_ptr->image_width,jd_libva_ptr->image_height,priv->jpg_info.image_color_fourcc); + if (&priv->dec_buffer == NULL) { ALOGE("%s failed to create decode render target", __FUNCTION__); return DECODE_MEMORY_FAIL; } - rgba_target = create_render_target(&priv->rgba_buffer, jd_libva_ptr->image_width,jd_libva_ptr->image_height, HAL_PIXEL_FORMAT_RGBA_8888); - if (rgba_target == NULL) { - ALOGE("%s failed to create YUY2 csc buffer", __FUNCTION__); - free_render_target(dec_target); - return DECODE_MEMORY_FAIL; - } - yuy2_target = create_render_target(&priv->yuy2_buffer, jd_libva_ptr->image_width,jd_libva_ptr->image_height, HAL_PIXEL_FORMAT_YCbCr_422_I); - if (yuy2_target == NULL) { - ALOGE("%s failed to create RGBA csc buffer", __FUNCTION__); - free_render_target(dec_target); - free_render_target(rgba_target); - return DECODE_MEMORY_FAIL; - } - RenderTarget *targetlist[3] = { dec_target, yuy2_target, rgba_target }; - st = priv->decoder.init(jd_libva_ptr->image_width, jd_libva_ptr->image_height, targetlist, 3); + RenderTarget *targets = &priv->dec_buffer; + st = priv->decoder->init(jd_libva_ptr->image_width, jd_libva_ptr->image_height, &targets, 1); if (st != JD_SUCCESS) { - free_render_target(dec_target); - free_render_target(rgba_target); - free_render_target(yuy2_target); + free_render_target(&priv->dec_buffer); ALOGE("%s failed to initialize resources for decoder: %d", __FUNCTION__, st); return DECODE_DRIVER_FAIL; } @@ -455,10 +541,12 @@ Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr) ALOGV("%s deiniting priv 0x%x", __FUNCTION__, jd_libva_ptr->priv); jdva_private *priv = (jdva_private*)jd_libva_ptr->priv; if (priv) { - priv->decoder.deinit(); + priv->decoder->deinit(); free_render_target(&priv->dec_buffer); - free_render_target(&priv->yuy2_buffer); - free_render_target(&priv->rgba_buffer); + if (priv->rgba_out) { + free(priv->rgba_out); + priv->rgba_out = NULL; + } } /* * It is safe to destroy Surface/Config/Context severl times @@ -477,9 +565,17 @@ Decode_Status jdva_parse_bitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_ if (!priv) return DECODE_DRIVER_FAIL; JpegInfo& jpginfo = priv->jpg_info; - jpginfo.buf = jd_libva_ptr->bitstream_buf; - jpginfo.bufsize = jd_libva_ptr->file_size; - JpegDecodeStatus st = priv->decoder.parse(jpginfo); + JpegDecodeStatus st; + + Decode_Status res; + jpginfo.need_header_only = true; + do { + res = jdva_fill_input(cinfo, jd_libva_ptr); + if (res) { + return res; + } + st = priv->decoder->parse(jpginfo); + } while (st == JD_INSUFFICIENT_BYTE); if (st != JD_SUCCESS) { ALOGE("%s parser for HW decode failed: %d", __FUNCTION__, st); return DECODE_PARSER_FAIL; @@ -492,8 +588,44 @@ Decode_Status jdva_parse_bitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_ cinfo->image_height = jpginfo.picture_param_buf.picture_height; /* nominal image height */ cinfo->num_components = jpginfo.picture_param_buf.num_components; /* # of color components in JPEG image */ cinfo->jpeg_color_space = JCS_YCbCr; /* colorspace of JPEG image */ - cinfo->out_color_space = JCS_RGB; /* colorspace for output */ + cinfo->out_color_space = JCS_RGB; /* set default colorspace for output */ cinfo->src->bytes_in_buffer = jd_libva_ptr->file_size; + cinfo->scale_num = cinfo->scale_denom = 1; /* set default value */ return DECODE_SUCCESS; } +Decode_Status jdva_blit(struct jdva_private * priv) +{ + JpegDecodeStatus st; + nsecs_t t1, t2; + + char fname[256]; + FILE *fdec; + t1 = systemTime(); + st = priv->decoder->blitToLinearRgba(priv->dec_buffer, priv->rgba_out, + priv->jpg_info.image_width, + priv->jpg_info.image_height, + priv->blit_event, priv->scale_factor); + if (st != JD_SUCCESS) { + ALOGE("%s: error blitting to RGBA buffer", __FUNCTION__); + goto cleanup; + } + t2 = systemTime(); +#if DUMP_RGBA + sprintf(fname, RGBA_DUMP_FILE_PATTERN, priv->jpg_info.output_width, priv->jpg_info.output_height, fourcc2str(priv->jpg_info.image_color_fourcc)); + fdec = fopen(fname, "wb"); + if (fdec) { + fwrite(priv->rgba_out, 1, priv->jpg_info.output_width * priv->jpg_info.output_height * 4, fdec); + fclose(fdec); + ALOGV("%s Dumped RGBA output into %s", __FUNCTION__, fname); + } +#endif + ALOGV("%s blitted %ux%u RGBA from JPEG %s data for %.2f ms", __FUNCTION__, + priv->jpg_info.image_width, priv->jpg_info.image_height, + fourcc2str(priv->jpg_info.image_color_fourcc), + (t2-t1)/1000000.0); + return DECODE_SUCCESS; +cleanup: + return DECODE_DRIVER_FAIL; +} + diff --git a/imagedecoder/JPEGDecoder_libjpeg_wrapper.h b/imagedecoder/JPEGDecoder_libjpeg_wrapper.h index c9d060b..72a216a 100644 --- a/imagedecoder/JPEGDecoder_libjpeg_wrapper.h +++ b/imagedecoder/JPEGDecoder_libjpeg_wrapper.h @@ -1,6 +1,5 @@ /* INTEL CONFIDENTIAL * Copyright (c) 2012, 2013 Intel Corporation. All rights reserved. -* Copyright (c) Imagination Technologies Limited, UK * * The source code contained or described herein and all documents * related to the source code ("Material") are owned by Intel @@ -41,7 +40,7 @@ #define JPEG_MAX_QUANT_TABLES 4 typedef struct { - uint8_t* bitstream_buf; + const uint8_t* bitstream_buf; uint32_t image_width; uint32_t image_height; @@ -53,24 +52,30 @@ typedef struct { uint32_t file_size; uint32_t rotation; + int tile_mode; - char ** output_image; - uint32_t output_lines; + uint32_t cap_available; + uint32_t cap_enabled; uint32_t priv; } jd_libva_struct; typedef enum { - DECODE_NOT_STARTED = -6, - DECODE_INVALID_DATA = -5, - DECODE_DRIVER_FAIL = -4, - DECODE_PARSER_FAIL = -3, + DECODE_NOT_STARTED = -7, + DECODE_INVALID_DATA = -6, + DECODE_DRIVER_FAIL = -5, + DECODE_PARSER_FAIL = -4, + DECODE_PARSER_INSUFFICIENT_BYTES = -3, DECODE_MEMORY_FAIL = -2, DECODE_FAIL = -1, DECODE_SUCCESS = 0, } IMAGE_DECODE_STATUS; +#define JPEG_CAPABILITY_DECODE 0x0 +#define JPEG_CAPABILITY_UPSAMPLE 0x1 +#define JPEG_CAPABILITY_DOWNSCALE 0x2 + /*********************** for libjpeg ****************************/ typedef int32_t Decode_Status; extern jd_libva_struct jd_libva; @@ -79,7 +84,12 @@ extern "C" { #endif Decode_Status jdva_initialize (jd_libva_struct * jd_libva_ptr); void jdva_deinitialize (jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_fill_input(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); +void jdva_drain_input(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); Decode_Status jdva_decode (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); +Decode_Status jdva_read_scanlines (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr, char ** scanlines, unsigned int* row_ctr, unsigned int max_lines); +Decode_Status jdva_init_read_tile_scanline(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr, int *x, int *y, int *w, int *h); +Decode_Status jdva_read_tile_scanline (j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr, char ** scanlines, unsigned int* row_ctr); Decode_Status jdva_create_resource (jd_libva_struct * jd_libva_ptr); Decode_Status jdva_release_resource (jd_libva_struct * jd_libva_ptr); Decode_Status jdva_parse_bitstream(j_decompress_ptr cinfo, jd_libva_struct * jd_libva_ptr); diff --git a/imagedecoder/JPEGParser.cpp b/imagedecoder/JPEGParser.cpp index 1d6ab26..c2ca299 100644 --- a/imagedecoder/JPEGParser.cpp +++ b/imagedecoder/JPEGParser.cpp @@ -104,11 +104,94 @@ bool endOfBuffer(CJPEGParse* parser) { return parser->end_of_buff; } -uint8_t* getCurrentIndex(CJPEGParse* parser) { +const uint8_t* getCurrentIndex(CJPEGParse* parser) { return parser->stream_buff + parser->parse_index; } -void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_size) { +uint32_t getRemainingBytes(CJPEGParse* parser) { + return parser->buff_size - parser->parse_index - 1; +} + +bool endOfBufferStr(CJPEGParse* parser); + +uint8_t readNextByteStr(CJPEGParse* parser) { + uint8_t byte = 0; + + if (parser->parse_index < parser->inputs->size()) { + byte = parser->inputs->itemAt(parser->parse_index); + parser->parse_index++; + } + + if (parser->parse_index == parser->inputs->size()) { + parser->end_of_buff = true; + } + + return byte; +} + +uint32_t readBytesStr( CJPEGParse* parser, uint32_t bytes_to_read ) { + uint32_t bytes = 0; + + while (bytes_to_read-- && !endOfBufferStr(parser)) { + bytes |= ( (uint32_t)readNextByteStr(parser) << ( bytes_to_read * 8 ) ); + } + + return bytes; +} + +void burnBytesStr( CJPEGParse* parser, uint32_t bytes_to_burn ) { + parser->parse_index += bytes_to_burn; + + if (parser->parse_index >= parser->inputs->size()) { + parser->parse_index = parser->inputs->size() - 1; + parser->end_of_buff = true; + } +} + +uint8_t getNextMarkerStr(CJPEGParse* parser) { + while (!endOfBufferStr(parser)) { + if (readNextByteStr(parser) == 0xff) { + break; + } + } + /* check the next byte to make sure we don't miss the real marker*/ + uint8_t tempNextByte = readNextByteStr(parser); + if (tempNextByte == 0xff) + return readNextByteStr(parser); + else + return tempNextByte; +} + +bool setByteOffsetStr(CJPEGParse* parser, uint32_t byte_offset) +{ + bool offset_found = false; + + if (byte_offset < parser->inputs->size()) { + parser->parse_index = byte_offset; + offset_found = true; +// end_of_buff = false; + } + + return offset_found; +} + +uint32_t getByteOffsetStr(CJPEGParse* parser) { + return parser->parse_index; +} + +bool endOfBufferStr(CJPEGParse* parser) { + return parser->end_of_buff; +} + +const uint8_t* getCurrentIndexStr(CJPEGParse* parser) { + return parser->inputs->array() + parser->parse_index; +} + +uint32_t getRemainingBytesStr(CJPEGParse* parser) { + return parser->inputs->size() - parser->parse_index - 1; +} + +void parserInitialize(CJPEGParse* parser, const uint8_t* stream_buff, uint32_t buff_size) { parser->parse_index = 0; parser->buff_size = buff_size; parser->stream_buff = stream_buff; @@ -121,4 +204,21 @@ void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_s parser->endOfBuffer = endOfBuffer; parser->getCurrentIndex = getCurrentIndex; parser->setByteOffset= setByteOffset; + parser->getRemainingBytes = getRemainingBytes; } + +void parserInitialize(CJPEGParse* parser, android::Vector *inputs) { + parser->parse_index = 0; + parser->inputs = inputs; + parser->end_of_buff = false; + parser->readNextByte = readNextByteStr; + parser->readBytes = readBytesStr; + parser->burnBytes = burnBytesStr; + parser->getNextMarker = getNextMarkerStr; + parser->getByteOffset = getByteOffsetStr; + parser->endOfBuffer = endOfBufferStr; + parser->getCurrentIndex = getCurrentIndexStr; + parser->setByteOffset= setByteOffsetStr; + parser->getRemainingBytes = getRemainingBytesStr; +} + diff --git a/imagedecoder/JPEGParser.h b/imagedecoder/JPEGParser.h index be6ac4d..1dfc1bd 100644 --- a/imagedecoder/JPEGParser.h +++ b/imagedecoder/JPEGParser.h @@ -30,7 +30,8 @@ #define _JPEG_PARSE_H_ #include - +#include +using namespace std; // Marker Codes #define CODE_SOF_BASELINE 0xC0 #define CODE_SOF1 0xC1 @@ -78,9 +79,10 @@ #define CODE_APP15 0xEF struct CJPEGParse { - uint8_t* stream_buff; + const uint8_t* stream_buff; uint32_t parse_index; uint32_t buff_size; + android::Vector *inputs; bool end_of_buff; uint8_t (*readNextByte)(CJPEGParse* parser); uint32_t (*readBytes)( CJPEGParse* parser, uint32_t bytes_to_read ); @@ -88,10 +90,129 @@ struct CJPEGParse { uint8_t (*getNextMarker)(CJPEGParse* parser); uint32_t (*getByteOffset)(CJPEGParse* parser); bool (*endOfBuffer)(CJPEGParse* parser); - uint8_t* (*getCurrentIndex)(CJPEGParse* parser); + const uint8_t* (*getCurrentIndex)(CJPEGParse* parser); bool (*setByteOffset)( CJPEGParse* parser, uint32_t byte_offset ); + uint32_t (*getRemainingBytes)(CJPEGParse* parser); }; -void parserInitialize(CJPEGParse* parser, uint8_t* stream_buff, uint32_t buff_size); +void parserInitialize(CJPEGParse* parser, const uint8_t* stream_buff, uint32_t buff_size); +void parserInitialize(CJPEGParse* parser, android::Vector *inputs); + +class JpegBitstreamParser +{ +public: + void set(android::Vector* inputs) + { + parserInitialize(&parser, inputs); + use_vector = true; + } + void set(const uint8_t *buf, uint32_t bufsize) + { + parserInitialize(&parser, buf, bufsize); + use_vector = false; + } + bool tryReadNextByte(uint8_t *byte) + { + if (parser.getRemainingBytes(&parser) >= 1) { + *byte = parser.readNextByte(&parser); + return true; + } + return false; + } + bool tryReadBytes(uint32_t *bytes, uint32_t bytes_to_read) + { + if (parser.getRemainingBytes(&parser) >= bytes_to_read) { + *bytes = parser.readBytes(&parser, bytes_to_read); + return true; + } + return false; + } + bool tryBurnBytes(uint32_t bytes_to_burn) + { + if (parser.getRemainingBytes(&parser) >= bytes_to_burn) { + parser.burnBytes(&parser, bytes_to_burn); + return true; + } + return false; + } + bool tryGetNextMarker(uint8_t *marker) + { + uint32_t rollbackoff = parser.getByteOffset(&parser); + while (!parser.endOfBuffer(&parser)) { + if (tryReadNextByte(marker)) { + if (*marker == 0xff) { + //rollbackoff = parser.parse_index - 1; + break; + } + } else { + goto rollback; + } + } + /* check the next byte to make sure we don't miss the real marker*/ + if (tryReadNextByte(marker)) { + if (*marker == 0xff) { + if (tryReadNextByte(marker)) { + return true; + } + else + goto rollback; + } + else { + return true; + } + } + else goto rollback; +rollback: + parser.parse_index = rollbackoff; + return false; + } + uint32_t getByteOffset() + { + return parser.getByteOffset(&parser); + } + bool endOfBuffer() + { + return parser.endOfBuffer(&parser); + } + const uint8_t* getCurrentIndex() + { + return parser.getCurrentIndex(&parser); + } + bool trySetByteOffset(uint32_t byte_offset) + { + uint32_t bufsize; + if (use_vector) + bufsize = parser.inputs->size(); + else + bufsize= parser.buff_size; + if (bufsize > byte_offset) { + parser.setByteOffset(&parser, byte_offset); + return true; + } + return false; + } + uint32_t getRemainingBytes() + { + return parser.getRemainingBytes(&parser); + } + const uint8_t itemAt(uint32_t index) + { + if (use_vector) + return parser.inputs->itemAt(index); + else + return parser.stream_buff[index]; + } + void reset() + { + parser.parse_index = 0; + parser.inputs = NULL; + parser.stream_buff = NULL; + parser.buff_size = 0; + use_vector = false; + } +private: + CJPEGParse parser; + bool use_vector; +}; #endif // _JPEG_PARSE_H_ diff --git a/imagedecoder/libjpeg_cm_genx.isa b/imagedecoder/libjpeg_cm_genx.isa new file mode 100644 index 0000000..0947267 Binary files /dev/null and b/imagedecoder/libjpeg_cm_genx.isa differ diff --git a/imagedecoder/test/testdecode.cpp b/imagedecoder/test/testdecode.cpp index 6823b85..3dde5b4 100644 --- a/imagedecoder/test/testdecode.cpp +++ b/imagedecoder/test/testdecode.cpp @@ -4,21 +4,86 @@ #include #include #include +#ifdef NDEBUG #undef NDEBUG +#endif #include #include -#define JPGFILE "/sdcard/1280x720xYUV422H.jpg" +static char jpgfile[100]; -RenderTarget& init_render_target(RenderTarget &target, int width, int height, int pixel_format) +RenderTarget& init_render_target_drm(RenderTarget &target, int width, int height, uint32_t fourcc, buffer_handle_t *handle) +{ + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + struct gralloc_module_t *gralloc_module = NULL; + int stride, bpp, err; + bpp = fourcc2LumaBitsPerPixel(fourcc); + err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + if (err || !module) { + printf("%s failed to get gralloc module\n", __PRETTY_FUNCTION__); + assert(false); + } + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &allocdev); + if (err || !allocdev) { + printf("%s failed to open alloc device\n", __PRETTY_FUNCTION__); + assert(false); + } + err = allocdev->alloc(allocdev, + width, + height, + fourcc2PixelFormat(fourcc), + GRALLOC_USAGE_HW_RENDER, + handle, + &stride); + if (err) { + gralloc_close(allocdev); + printf("%s failed to allocate surface %d, %dx%d, pixelformat %x\n", __PRETTY_FUNCTION__, err, + width, height, fourcc2PixelFormat(fourcc)); + assert(false); + } + unsigned long boname; + err = gralloc_module->perform(gralloc_module, + INTEL_UFO_GRALLOC_MODULE_PERFORM_GET_BO_NAME, + *handle, + &boname); + assert(!err); + target.type = RenderTarget::KERNEL_DRM; + target.handle = (int)boname; + switch(fourcc) { + case VA_FOURCC_NV12: + case VA_FOURCC_422H: + case VA_FOURCC_422V: + case VA_FOURCC_IMC3: + case VA_FOURCC_444P: + case VA_FOURCC_411P: + case VA_FOURCC('4','0','0','P'): + target.width = aligned_width(width, SURF_TILING_Y); + target.height = aligned_height(height, SURF_TILING_Y); + break; + default: + target.width = aligned_width(width, SURF_TILING_NONE); + target.height = aligned_height(height, SURF_TILING_NONE); + break; + } + target.format = fourcc2VaFormat(fourcc); + target.pixel_format = fourcc; + target.rect.x = target.rect.y = 0; + target.rect.width = width; + target.rect.height = height; + target.stride = stride * bpp; + gralloc_close(allocdev); + return target; +} + +RenderTarget& init_render_target_gralloc(RenderTarget &target, int width, int height, uint32_t fourcc) { hw_module_t const* module = NULL; alloc_device_t *allocdev = NULL; struct gralloc_module_t *gralloc_module = NULL; buffer_handle_t handle; - uint32_t fourcc; int stride, bpp, err; - fourcc = pixelFormat2Fourcc(pixel_format); bpp = fourcc2LumaBitsPerPixel(fourcc); err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); if (err || !module) { @@ -34,31 +99,145 @@ RenderTarget& init_render_target(RenderTarget &target, int width, int height, in err = allocdev->alloc(allocdev, width, height, - pixel_format, + fourcc2PixelFormat(fourcc), GRALLOC_USAGE_HW_RENDER, &handle, &stride); if (err) { gralloc_close(allocdev); printf("%s failed to allocate surface %d, %dx%d, pixelformat %x\n", __PRETTY_FUNCTION__, err, - width, height, pixel_format); + width, height, fourcc2PixelFormat(fourcc)); assert(false); } target.type = RenderTarget::ANDROID_GRALLOC; target.handle = (int)handle; + switch(fourcc) { + case VA_FOURCC_NV12: + case VA_FOURCC_YUY2: + case VA_FOURCC_UYVY: + case VA_FOURCC_422H: + case VA_FOURCC_422V: + case VA_FOURCC_IMC3: + case VA_FOURCC_444P: + case VA_FOURCC_411P: + case VA_FOURCC('4','0','0','P'): + target.width = aligned_width(width, SURF_TILING_Y); + target.height = aligned_height(height, SURF_TILING_Y); + break; + default: + target.width = aligned_width(width, SURF_TILING_NONE); + target.height = aligned_height(height, SURF_TILING_NONE); + break; + } + + target.format = fourcc2VaFormat(fourcc); + target.pixel_format = fourcc; + target.rect.x = target.rect.y = 0; + target.rect.width = width; + target.rect.height = height; + target.stride = stride * bpp; + gralloc_close(allocdev); + return target; +} + +RenderTarget& init_render_target_userptr(RenderTarget &target, int width, int height, uint32_t fourcc) +{ + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + static int surf_hnd = 0; + int stride, bpp, err; + void * userptr = NULL; + size_t mallocsize; + bpp = fourcc2LumaBitsPerPixel(fourcc); + target.type = RenderTarget::USER_PTR; + + // all linear, no alignment + switch(fourcc) { + case VA_FOURCC_NV12: + mallocsize = width * height * 3 / 2; + break; + case VA_FOURCC_YUY2: + case VA_FOURCC_UYVY: + mallocsize = width * height * 2; + break; + case VA_FOURCC_422H: + mallocsize = width * height * 3; + break; + case VA_FOURCC_422V: + mallocsize = width * height * 2; + break; + case VA_FOURCC_IMC3: + mallocsize = width * height * 2; + break; + case VA_FOURCC_444P: + mallocsize = width * height * 3; + break; + case VA_FOURCC_411P: + mallocsize = width * height * 3; + break; + case VA_FOURCC_411R: + mallocsize = width * height * 3 / 2; + break; + case VA_FOURCC('4','0','0','P'): + mallocsize = width * height; + break; + case VA_FOURCC_RGBA: + case VA_FOURCC_BGRA: + mallocsize = width * height * 4; + break; + default: + mallocsize = width * height * 3; + break; + } + userptr = memalign(0x1000, mallocsize); target.width = width; target.height = height; - target.pixel_format = pixel_format; + target.pixel_format = fourcc; target.rect.x = target.rect.y = 0; target.rect.width = target.width; target.rect.height = target.height; - target.stride = stride * bpp; + target.handle = (int)userptr; + //target.stride = stride * bpp; return target; } -void deinit_render_target(RenderTarget &target) +RenderTarget& init_render_target(RenderTarget &target, int width, int height, uint32_t fourcc) +{ + hw_module_t const* module = NULL; + alloc_device_t *allocdev = NULL; + static int surf_hnd = 0; + int stride, bpp, err; + bpp = fourcc2LumaBitsPerPixel(fourcc); + target.type = RenderTarget::INTERNAL_BUF; + target.handle = generateHandle(); + switch(fourcc) { + case VA_FOURCC_NV12: + case VA_FOURCC_YUY2: + case VA_FOURCC_UYVY: + case VA_FOURCC_422H: + case VA_FOURCC_422V: + case VA_FOURCC_IMC3: + case VA_FOURCC_444P: + case VA_FOURCC_411P: + case VA_FOURCC('4','0','0','P'): + target.width = aligned_width(width, SURF_TILING_Y); + target.height = aligned_height(height, SURF_TILING_Y); + break; + default: + target.width = aligned_width(width, SURF_TILING_NONE); + target.height = aligned_height(height, SURF_TILING_NONE); + break; + } + target.pixel_format = fourcc; + target.rect.x = target.rect.y = 0; + target.rect.width = target.width; + target.rect.height = target.height; + //target.stride = stride * bpp; + return target; +} + +void deinit_render_target(RenderTarget &target, buffer_handle_t *handle = NULL) { - buffer_handle_t handle = (buffer_handle_t)target.handle; hw_module_t const* module = NULL; alloc_device_t *allocdev = NULL; struct gralloc_module_t *gralloc_module = NULL; @@ -73,24 +252,66 @@ void deinit_render_target(RenderTarget &target) printf("%s failed to get gralloc module\n", __PRETTY_FUNCTION__); return; } - allocdev->free(allocdev, handle); + if (handle && target.type == RenderTarget::KERNEL_DRM) + allocdev->free(allocdev, *handle); + else if (target.type == RenderTarget::ANDROID_GRALLOC) + allocdev->free(allocdev, (buffer_handle_t)target.handle); + else if (target.type == RenderTarget::USER_PTR) + free((void*)target.handle); gralloc_close(allocdev); } -void decode_blit_functionality_test() +void decode_blit_functionality_test(RenderTarget::bufType type, uint32_t format, int scale_factor) { JpegDecodeStatus st; + VAStatus vast; JpegInfo jpginfo; hw_module_t const* module = NULL; alloc_device_t *allocdev = NULL; struct gralloc_module_t *gralloc_module = NULL; - buffer_handle_t handle; - JpegDecoder decoder; - JpegBlitter blitter; - blitter.setDecoder(decoder); - RenderTarget targets[5]; - RenderTarget *dec_target, *blit_nv12_target, *blit_rgba_target, *blit_yuy2_target, *blit_yv12_target; - FILE* fp = fopen(JPGFILE, "rb"); + VAStatus vret; + char decdumpfile[100]; + char origdecdumpfile[100]; + char nv12dumpfile[100]; + char nv21dumpfile[100]; + char yuy2dumpfile[100]; + char yv12dumpfile[100]; + char rgbadumpfile[100]; + FILE* fpdump = NULL; + memset(&jpginfo, 0, sizeof(JpegInfo)); + memset(decdumpfile, 0, sizeof(decdumpfile)); + memset(origdecdumpfile, 0, sizeof(origdecdumpfile)); + memset(nv12dumpfile, 0, sizeof(nv12dumpfile)); + memset(nv21dumpfile, 0, sizeof(nv21dumpfile)); + memset(yuy2dumpfile, 0, sizeof(yuy2dumpfile)); + memset(yv12dumpfile, 0, sizeof(yv12dumpfile)); + memset(rgbadumpfile, 0, sizeof(rgbadumpfile)); + VADisplay display = NULL; + VAConfigID vpCfgId = VA_INVALID_ID; + VAContextID vpCtxId = VA_INVALID_ID; + typedef uint32_t Display; + Display dpy; + int va_major_version, va_minor_version; + VAConfigAttrib vpp_attrib; + display = vaGetDisplay(&dpy); + vast = vaInitialize(display, &va_major_version, &va_minor_version); + assert(vast == VA_STATUS_SUCCESS); + vpp_attrib.type = VAConfigAttribRTFormat; + vpp_attrib.value = VA_RT_FORMAT_YUV420; + vast = vaCreateConfig(display, VAProfileNone, + VAEntrypointVideoProc, + &vpp_attrib, + 1, &vpCfgId); + assert(vast == VA_STATUS_SUCCESS); + vast = vaCreateContext(display, vpCfgId, 1920, 1080, 0, NULL, 0, &vpCtxId); + assert(vast == VA_STATUS_SUCCESS); + JpegDecoder decoder(display, vpCfgId, vpCtxId, true); + + RenderTarget dec_target; + buffer_handle_t dec_handle, nv12_handle, yuy2_handle; + uint8_t *nv12_mem, *yuy2_mem, *nv21_mem, *yv12_mem, *rgba_mem; + int stride; + FILE* fp = fopen(jpgfile, "rb"); assert(fp); fseek(fp, 0, SEEK_END); jpginfo.bufsize = ftell(fp); @@ -100,329 +321,325 @@ void decode_blit_functionality_test() fclose(fp); printf("finished loading src file: size %u\n", jpginfo.bufsize); + jpginfo.need_header_only = false; + jpginfo.use_vector_input = false; st = decoder.parse(jpginfo); assert(st == JD_SUCCESS); + printf("parse succeeded: %ux%u\n", jpginfo.image_width, jpginfo.image_height); - init_render_target(targets[0], jpginfo.image_width, jpginfo.image_height, jpginfo.image_pixel_format); - init_render_target(targets[1], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_NV12_TILED_INTEL); - init_render_target(targets[2], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_RGBA_8888); - init_render_target(targets[3], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_YCbCr_422_I); - init_render_target(targets[4], jpginfo.image_width, jpginfo.image_height, HAL_PIXEL_FORMAT_YV12); - dec_target = &targets[0]; - blit_nv12_target = &targets[1]; - blit_rgba_target = &targets[2]; - blit_yuy2_target = &targets[3]; - blit_yv12_target = &targets[4]; - dec_target->rect.x = blit_nv12_target->rect.x = blit_yuy2_target->rect.x = blit_rgba_target->rect.x = blit_yv12_target->rect.x = 0; - dec_target->rect.y = blit_nv12_target->rect.y = blit_yuy2_target->rect.y = blit_rgba_target->rect.y = blit_yv12_target->rect.y = 0; - dec_target->rect.width = blit_nv12_target->rect.width = blit_yuy2_target->rect.width = blit_rgba_target->rect.width = blit_yv12_target->rect.width = jpginfo.image_width; - dec_target->rect.height = blit_nv12_target->rect.height = blit_yuy2_target->rect.height = blit_rgba_target->rect.height = blit_yv12_target->rect.height = jpginfo.image_height; - RenderTarget* targetlist[5] = {dec_target, blit_nv12_target, blit_rgba_target, blit_yuy2_target, blit_yv12_target }; - //st = decoder.init(jpginfo.image_width, jpginfo.image_height, targetlist, 5); - st = decoder.init(jpginfo.image_width, jpginfo.image_height, &dec_target, 1); - assert(st == JD_SUCCESS); + if (format == 0) + format = jpginfo.image_color_fourcc; - //jpginfo.render_target = dec_target; - st = decoder.decode(jpginfo, *dec_target); - printf("decode returns %d\n", st); - assert(st == JD_SUCCESS); + char buftypename[100]; + switch(type) { + case RenderTarget::KERNEL_DRM: + sprintf(buftypename, "DRM"); + init_render_target_drm(dec_target, jpginfo.image_width, jpginfo.image_height, format, &dec_handle); + break; + case RenderTarget::ANDROID_GRALLOC: + sprintf(buftypename, "GRALLOC"); + init_render_target_gralloc(dec_target, jpginfo.image_width, jpginfo.image_height, format); + break; + case RenderTarget::INTERNAL_BUF: + sprintf(buftypename, "DRIVER"); + init_render_target(dec_target, jpginfo.image_width, jpginfo.image_height, format); + break; + default: + assert(0); + break; + } - uint8_t *data; - uint32_t offsets[3]; - uint32_t pitches[3]; - JpegDecoder::MapHandle maphandle = decoder.mapData(*dec_target, (void**) &data, offsets, pitches); - assert (maphandle.valid); - FILE* fpdump = fopen("/sdcard/dec_dump.yuv", "wb"); - assert(fpdump); - // Y - for (int i = 0; i < dec_target->height; ++i) { - fwrite(data + offsets[0] + i * pitches[0], 1, dec_target->width, fpdump); + uint32_t aligned_w = aligned_width(jpginfo.image_width, SURF_TILING_Y); + uint32_t aligned_h = aligned_height(jpginfo.image_height, SURF_TILING_Y); + uint32_t aligned_scaled_w = aligned_width(jpginfo.image_width / scale_factor, SURF_TILING_Y); + uint32_t aligned_scaled_h = aligned_width(jpginfo.image_height / scale_factor, SURF_TILING_Y); + int err; + err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module); + if (err || !module) { + printf("%s failed to get gralloc module\n", __PRETTY_FUNCTION__); + assert(false); + } + gralloc_module = (struct gralloc_module_t *)module; + err = gralloc_open(module, &allocdev); + if (err || !allocdev) { + printf("%s failed to open alloc device\n", __PRETTY_FUNCTION__); + assert(false); } - // U - for (int i = 0; i < dec_target->height; ++i) { - fwrite(data + offsets[1] + i * pitches[1], 1, dec_target->width/2, fpdump); + err = allocdev->alloc(allocdev, + aligned_w, + aligned_h, + fourcc2PixelFormat(VA_FOURCC_NV12), + GRALLOC_USAGE_HW_RENDER, + &nv12_handle, + &stride); + if (err) { + printf("%s failed to allocate surface %d, %dx%d, pixelformat %x\n", __PRETTY_FUNCTION__, err, + aligned_w, aligned_h, fourcc2PixelFormat(VA_FOURCC_NV12)); + assert(false); } - // V - for (int i = 0; i < dec_target->height; ++i) { - fwrite(data + offsets[2] + i * pitches[2], 1, dec_target->width/2, fpdump); + err = allocdev->alloc(allocdev, + aligned_w, + aligned_h, + fourcc2PixelFormat(VA_FOURCC_YUY2), + GRALLOC_USAGE_HW_RENDER, + &yuy2_handle, + &stride); + if (err) { + printf("%s failed to allocate surface %d, %dx%d, pixelformat %x\n", __PRETTY_FUNCTION__, err, + aligned_w, aligned_h, fourcc2PixelFormat(VA_FOURCC_YUY2)); + assert(false); } - fclose(fpdump); - printf("Dumped decoded YUV to /sdcard/dec_dump.yuv\n"); - decoder.unmapData(*dec_target, maphandle); - st = decoder.blit(*dec_target, *blit_nv12_target); - assert(st == JD_SUCCESS); + nv21_mem = (uint8_t*)memalign(0x1000, aligned_w * aligned_h * 3 / 2); + yv12_mem = (uint8_t*)memalign(0x1000, aligned_w * aligned_h * 3 / 2); + rgba_mem = (uint8_t*)memalign(0x1000, aligned_scaled_w * aligned_scaled_h * 4); + assert(nv21_mem && yv12_mem && rgba_mem); - maphandle = decoder.mapData(*blit_nv12_target, (void**) &data, offsets, pitches); - assert (maphandle.valid); - fpdump = fopen("/sdcard/nv12_dump.yuv", "wb"); - assert(fpdump); - // Y - for (int i = 0; i < blit_nv12_target->height; ++i) { - fwrite(data + offsets[0] + i * pitches[0], 1, blit_nv12_target->width, fpdump); - } - // UV - for (int i = 0; i < blit_nv12_target->height/2; ++i) { - fwrite(data + offsets[1] + i * pitches[1], 1, blit_nv12_target->width, fpdump); - } - fclose(fpdump); - printf("Dumped converted NV12 to /sdcard/nv12_dump.yuv\n"); - decoder.unmapData(*blit_nv12_target, maphandle); + sprintf(decdumpfile, "/sdcard/jpeg_%s_dec_%dx%d.%s", buftypename, jpginfo.image_width, jpginfo.image_height, fourcc2str(format)); + sprintf(origdecdumpfile, "/sdcard/jpeg_%s_dec_orig_%dx%d.yuv", buftypename, aligned_w, aligned_h); + sprintf(nv12dumpfile, "/sdcard/jpeg_%s_out_%dx%d.nv12", buftypename, aligned_w, aligned_h); + sprintf(nv21dumpfile, "/sdcard/jpeg_%s_out_%dx%d.nv21", buftypename, aligned_w, aligned_h); + sprintf(yuy2dumpfile, "/sdcard/jpeg_%s_out_%dx%d.yuy2", buftypename, aligned_w, aligned_h); + sprintf(yv12dumpfile, "/sdcard/jpeg_%s_out_%dx%d.yv12", buftypename, aligned_w, aligned_h); + sprintf(rgbadumpfile, "/sdcard/jpeg_%s_out_%dx%d.rgba", buftypename, aligned_scaled_w, aligned_scaled_h); - st = decoder.blit(*dec_target, *blit_yuy2_target); + RenderTarget* targetlist[1] = {&dec_target}; + st = decoder.init(jpginfo.image_width, jpginfo.image_height, targetlist, 1); assert(st == JD_SUCCESS); - maphandle = decoder.mapData(*blit_yuy2_target, (void**) &data, offsets, pitches); - assert (maphandle.valid); - fpdump = fopen("/sdcard/yuy2_dump.yuv", "wb"); - assert(fpdump); - // YUYV - for (int i = 0; i < blit_yuy2_target->height; ++i) { - fwrite(data + offsets[0] + i * pitches[0], 2, blit_yuy2_target->width, fpdump); - } - fclose(fpdump); - printf("Dumped converted YUY2 to /sdcard/yuy2_dump.yuv\n"); - decoder.unmapData(*blit_yuy2_target, maphandle); - st = decoder.blit(*dec_target, *blit_rgba_target); + st = decoder.decode(jpginfo, dec_target); + printf("decode returns %d\n", st); assert(st == JD_SUCCESS); - maphandle = decoder.mapData(*blit_rgba_target, (void**) &data, offsets, pitches); - assert (maphandle.valid); - fpdump = fopen("/sdcard/rgba_dump.yuv", "wb"); - assert(fpdump); - // RGBA - for (int i = 0; i < blit_rgba_target->height; ++i) { - fwrite(data + offsets[0] + i * pitches[0], 4, blit_rgba_target->width, fpdump); - } - fclose(fpdump); - printf("Dumped converted RGBA to /sdcard/rgba_dump.yuv\n"); - decoder.unmapData(*blit_rgba_target, maphandle); - st = decoder.blit(*dec_target, *blit_yv12_target); - assert(st == JD_SUCCESS); - maphandle = decoder.mapData(*blit_yv12_target, (void**) &data, offsets, pitches); - assert (maphandle.valid); - fpdump = fopen("/sdcard/yv12_dump.yuv", "wb"); + uint8_t *data; + uint32_t offsets[3]; + uint32_t pitches[3]; + + JpegDecoder::MapHandle maphandle = decoder.mapData(dec_target, (void**) &data, offsets, pitches); + assert (maphandle); + fpdump = fopen(decdumpfile, "wb"); assert(fpdump); - // YV12 - for (int i = 0; i < blit_yv12_target->height; ++i) { - fwrite(data + offsets[0] + i * pitches[0], 1, blit_yv12_target->width, fpdump); + int hs, vs, nv12, yuy2, uyvy; + hs = vs = nv12 = yuy2 = uyvy = 0; + switch(format) { + case VA_FOURCC_NV12: + nv12 = 1; + break; + case VA_FOURCC_YUY2: + yuy2 = 1; + break; + case VA_FOURCC_UYVY: + uyvy = 1; + break; + case VA_FOURCC('4','0','0','P'): + hs = vs = 0; + break; + case VA_FOURCC_411P: + hs = 4; + vs = 1; + break; + case VA_FOURCC_411R: + hs = 1; + vs = 4; + break; + case VA_FOURCC_IMC3: + hs = 2; + vs = 2; + break; + case VA_FOURCC_422H: + hs = 2; + vs = 1; + break; + case VA_FOURCC_422V: + hs = 1; + vs = 2; + break; + case VA_FOURCC_444P: + hs = vs = 1; + break; + default: + printf("Invalid format %x\n", format); + assert(false); + break; } - for (int i = 0; i < blit_yv12_target->height/2; ++i) { - fwrite(data + offsets[1] + i * pitches[1], 1, blit_yv12_target->width/2, fpdump); + if (nv12) { + for (int i = 0; i < jpginfo.image_height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 1, jpginfo.image_width, fpdump); + } + for (int i = 0; i < jpginfo.image_height/2; ++i) { + fwrite(data + offsets[1] + i * pitches[1], 1, jpginfo.image_width, fpdump); + } } - for (int i = 0; i < blit_yv12_target->height/2; ++i) { - fwrite(data + offsets[2] + i * pitches[2], 1, blit_yv12_target->width/2, fpdump); + else if (yuy2 || uyvy) { + for (int i = 0; i < jpginfo.image_height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 2, jpginfo.image_width, fpdump); + } + } + else { // yuv planar + // Y + for (int i = 0; i < jpginfo.image_height; ++i) { + fwrite(data + offsets[0] + i * pitches[0], 1, jpginfo.image_width, fpdump); + } + if (hs != 0 && vs != 0) { + // U + for (int i = 0; i < jpginfo.image_height / vs; ++i) { + fwrite(data + offsets[1] + i * pitches[1], 1, jpginfo.image_width/hs, fpdump); + } + // V + for (int i = 0; i < jpginfo.image_height / vs; ++i) { + fwrite(data + offsets[2] + i * pitches[2], 1, jpginfo.image_width/hs, fpdump); + } + } } fclose(fpdump); - printf("Dumped converted YV12 to /sdcard/yv12_dump.yuv\n"); - decoder.unmapData(*blit_yv12_target, maphandle); + printf("Dumped decoded YUV to %s\n", decdumpfile); + decoder.unmapData(dec_target, maphandle); + BlitEvent ev; - decoder.deinit(); - - deinit_render_target(*dec_target); - deinit_render_target(*blit_nv12_target); - deinit_render_target(*blit_yuy2_target); - deinit_render_target(*blit_rgba_target); - deinit_render_target(*blit_yv12_target); - delete[] jpginfo.buf; - -} - -enum target_state -{ - TARGET_FREE, - TARGET_DECODE, - TARGET_BLIT, -}; + st = decoder.blitToLinearRgba(dec_target, rgba_mem, aligned_w, aligned_h, ev, scale_factor); + assert(st == JD_SUCCESS); -struct thread_param -{ - JpegDecoder *decoder; - RenderTarget *targets; - RenderTarget *nv12_targets; - RenderTarget *yuy2_targets; - RenderTarget *imc3_targets; - size_t target_count; - target_state *states; -}; + decoder.syncBlit(ev); + fpdump = fopen(rgbadumpfile, "wb"); + assert(fpdump); + fwrite(rgba_mem, 4, aligned_scaled_w * aligned_scaled_h, fpdump); + fclose(fpdump); + printf("Dumped RGBA into %s\n", rgbadumpfile); -static Mutex state_lock; + // test blit_to_camera_surfaces + if (format == VA_FOURCC_422H) { -void read_new_frame(JpegInfo &jpginfo) -{ - memset(&jpginfo, 0, sizeof(JpegInfo)); - FILE* fp = fopen(JPGFILE, "rb"); - assert(fp); - fseek(fp, 0, SEEK_END); - jpginfo.bufsize = ftell(fp); - fseek(fp, 0, SEEK_SET); - jpginfo.buf = new uint8_t[jpginfo.bufsize]; - fread(jpginfo.buf, 1, jpginfo.bufsize, fp); - fclose(fp); -} + RenderTarget nv12_dst, yuy2_dst; + nsecs_t t1, t2; + init_render_target_gralloc(nv12_dst, aligned_w, aligned_h, VA_FOURCC_NV12); + init_render_target_gralloc(yuy2_dst, aligned_w, aligned_h, VA_FOURCC_YUY2); + t1 = systemTime(); + st = decoder.blit(dec_target, nv12_dst, 1); + st = decoder.blit(dec_target, yuy2_dst, 1); + t2 = systemTime(); + printf("422H->NV12+YUY2 VA took %.2f ms\n", (t2-t1)/1000000.0); + deinit_render_target(nv12_dst); + deinit_render_target(yuy2_dst); + t1 = systemTime(); + st = decoder.blitToCameraSurfaces(dec_target, nv12_handle, yuy2_handle, + NULL, NULL, + aligned_w, aligned_h, + ev); + t2 = systemTime(); + decoder.syncBlit(ev); + printf("422H->NV12+YUY2 CM took %.2f ms\n", (t2-t1)/1000000.0); + t1 = systemTime(); + st = decoder.blitToCameraSurfaces(dec_target, nv12_handle, yuy2_handle, + nv21_mem, yv12_mem, + aligned_w, aligned_h, + ev); + t2 = systemTime(); + decoder.syncBlit(ev); + printf("422H->NV12+YUY2+NV21+YV12 CM took %.2f ms\n", (t2-t1)/1000000.0); + assert(st == JD_SUCCESS); + fpdump = fopen(nv21dumpfile, "wb"); + assert(fpdump); + fwrite(nv21_mem, 1, aligned_w * aligned_h* 3 /2, fpdump); + fclose(fpdump); + printf("Dumped NV21 into %s\n", nv21dumpfile); + fpdump = fopen(yv12dumpfile, "wb"); + assert(fpdump); + fwrite(yv12_mem, 1, aligned_w * aligned_h * 3 / 2, fpdump); + fclose(fpdump); + printf("Dumped YV12 into %s\n", yv12dumpfile); + gralloc_module->lock(gralloc_module, nv12_handle, GRALLOC_USAGE_SW_READ_OFTEN, 0, 0, aligned_w, aligned_h, (void**)&nv12_mem); + fpdump = fopen(nv12dumpfile, "wb"); + assert(fpdump); + fwrite(nv12_mem, 1, aligned_w * aligned_h * 3 / 2, fpdump); + fclose(fpdump); + gralloc_module->unlock(gralloc_module, nv12_handle); + printf("Dumped NV12 into %s\n", nv12dumpfile); + gralloc_module->lock(gralloc_module, yuy2_handle, GRALLOC_USAGE_SW_READ_OFTEN, 0, 0, aligned_w, aligned_h, (void**)&yuy2_mem); + fpdump = fopen(yuy2dumpfile, "wb"); + assert(fpdump); + fwrite(yuy2_mem, 2, aligned_w * aligned_h, fpdump); + fclose(fpdump); + gralloc_module->unlock(gralloc_module, yuy2_handle); + printf("Dumped YUY2 into %s\n", yuy2dumpfile); + } -static bool exit_thread = false; + decoder.deinit(); -#define VPP_DECODE_BATCH + allocdev->free(allocdev, nv12_handle); + allocdev->free(allocdev, yuy2_handle); + free(nv21_mem); + free(yv12_mem); + free(rgba_mem); -void* decode_frame_threadproc(void* data) -{ - thread_param *param = (thread_param*) data; - JpegInfo *jpginfos = new JpegInfo[param->target_count]; - int surface_id = 0; - int blit_surface_id = (surface_id + param->target_count - 1) % param->target_count; - while(!exit_thread) { - printf("%s blit %d and decode %d\n", __FUNCTION__, blit_surface_id, surface_id); - RenderTarget& cur_target = param->targets[surface_id]; -#ifdef VPP_DECODE_BATCH - RenderTarget& blit_target = param->targets[blit_surface_id]; - RenderTarget& blit_nv12_target = param->nv12_targets[blit_surface_id]; - RenderTarget& blit_yuy2_target = param->yuy2_targets[blit_surface_id]; - if (param->states[blit_surface_id] == TARGET_BLIT) { - printf("%s blit with surface %d\n", __FUNCTION__, blit_surface_id); - nsecs_t t1 = systemTime(); - if (param->decoder->busy(blit_target)) { - param->decoder->sync(blit_target); - nsecs_t t2 = systemTime(); - printf("%s wait surface %d decode took %f ms\n", __FUNCTION__, blit_surface_id, ns2us(t2 - t1)/1000.0); - param->states[blit_surface_id] = TARGET_FREE; - } - t1 = systemTime(); - param->decoder->blit(blit_target, blit_nv12_target); - nsecs_t t2 = systemTime(); - param->decoder->blit(blit_target, blit_yuy2_target); - nsecs_t t3 = systemTime(); - printf("%s blit %d NV12 took %f ms, YUY2 took %f ms\n", - __FUNCTION__, - blit_surface_id, ns2us(t2 - t1)/1000.0, - ns2us(t3 - t2)/1000.0); - param->states[blit_surface_id] = TARGET_FREE; - } -#endif - if (param->states[surface_id] != TARGET_FREE) { - printf("%s wait surface %d blit finish\n", __FUNCTION__, surface_id); - nsecs_t t1 = systemTime(); - while (param->states[surface_id] != TARGET_FREE) { - usleep(1000); - } - nsecs_t t2 = systemTime(); - printf("%s wait surface %d for decode/blit finish took %f ms\n", __FUNCTION__, surface_id, ns2us(t2 - t1)/1000.0); - } - JpegInfo &jpginfo = jpginfos[surface_id]; - read_new_frame(jpginfo); - nsecs_t t3 = systemTime(); - param->decoder->parse(jpginfo); - nsecs_t t4 = systemTime(); - printf("%s parse surface %d took %f ms\n", __FUNCTION__, surface_id, ns2us(t4 - t3)/1000.0); - param->states[surface_id] = TARGET_DECODE; - param->decoder->decode(jpginfo, cur_target); - nsecs_t t5 = systemTime(); - printf("%s decode surface %d took %f ms\n", __FUNCTION__, surface_id, ns2us(t5 - t4)/1000.0); - param->states[surface_id] = TARGET_BLIT; - surface_id = (surface_id + 1) % param->target_count; - blit_surface_id = (blit_surface_id + 1) % param->target_count; + switch(type) { + case RenderTarget::KERNEL_DRM: + deinit_render_target(dec_target, &dec_handle); + break; + default: + deinit_render_target(dec_target); + break; } - delete[] jpginfos; - return NULL; + delete[] jpginfo.buf; + gralloc_close(allocdev); + vaDestroyContext(display, vpCtxId); + vaDestroyConfig(display, vpCfgId); + vaTerminate(display); } -void* blit_frame_threadproc(void* data) +int main(int argc, char ** argv) { - thread_param *param = (thread_param*) data; - int surface_id = 0; - while(!exit_thread) { - printf("%s blit %d->%d\n", __FUNCTION__, surface_id, surface_id); - RenderTarget& dec_target = param->targets[surface_id]; - RenderTarget& blit_target = param->nv12_targets[surface_id]; - if (param->states[surface_id] != TARGET_BLIT) { - printf("%s wait surface %d decoding finish\n", __FUNCTION__, surface_id); - nsecs_t t1 = systemTime(); - while (param->states[surface_id] != TARGET_BLIT) { - usleep(100); - } - nsecs_t t2 = systemTime(); - printf("%s wait surface %d for decode finish took %f ms\n", __FUNCTION__, surface_id, ns2us(t2 - t1)/1000.0); + int res, scale; + uint32_t format = 0; + scale = 1; + memset(jpgfile, 0, sizeof(jpgfile)); + while ((res = getopt(argc, argv, "i:f:s:")) >= 0) { + switch (res) { + case 'i': + { + strcpy(jpgfile, optarg); + break; + } + case 's': + { + scale = atoi(optarg); + break; + } + case 'f': + { + if (strcmp(optarg, "NV12") == 0) { + format = VA_FOURCC_NV12; + } + else if (strcmp(optarg, "YUY2") == 0) { + format = VA_FOURCC_YUY2; + } + else if (strcmp(optarg, "UYVY") == 0) { + format = VA_FOURCC_UYVY; + } + else { + format = 0; + printf("INVALID output decode format, using YUV planar\n"); + } + break; + } + default: + printf("usage: testjpegdec -i [-w -h ]\n"); + exit(-1); } - nsecs_t t3 = systemTime(); - param->decoder->blit(dec_target, blit_target); - nsecs_t t4 = systemTime(); - printf("%s blit surface %d took %f ms\n", __FUNCTION__, surface_id, ns2us(t4 - t3)/1000.0); - param->states[surface_id] = TARGET_FREE; - surface_id = (surface_id + 1) % param->target_count; } - return NULL; -} - -void parallel_decode_blit_test() -{ - RenderTarget **all_targets = new RenderTarget*[12]; - RenderTarget dec_targets[12]; - RenderTarget nv12_targets[12]; - RenderTarget yuy2_targets[12]; - RenderTarget imc3_targets[12]; - JpegInfo jpginfos[12]; - target_state states[12]; - for (int i = 0; i < 12; ++i) { - init_render_target(dec_targets[i], 1280, 720, fourcc2PixelFormat(VA_FOURCC_422H)); // 422H - init_render_target(nv12_targets[i], 1280, 720, fourcc2PixelFormat(VA_FOURCC_NV12)); // NV12 for video encode - init_render_target(yuy2_targets[i], 1280, 720, fourcc2PixelFormat(VA_FOURCC_YUY2)); // YUY2 for overlay - //init_render_target(imc3_targets[i], 1280, 720, HAL_PIXEL_FORMAT_IMC3); // IMC3 for libjpeg encode - jpginfos[i].buf = new uint8_t[2 * 1024 * 1024]; - all_targets[i] = &dec_targets[i]; - //all_targets[i + 12] = &nv12_targets[i]; - //all_targets[i + 24] = &yuy2_targets[i]; - //all_targets[i + 36] = &imc3_targets[i]; - states[i] = TARGET_FREE; + if (strcmp(jpgfile, "") == 0) { + printf("usage: testjpegdec -i [-f ] [-s ]\n"); + printf(" available output FOURCC: NV12, YUY2, UYVY, 0. 0 by default (YUV planar)\n"); + printf(" available scaling_factor: 1, 2, 4, 8. 1 by default (no down-scale)\n"); + exit(-1); } - - exit_thread = false; - - pthread_attr_t dec_attr, blit_attr; - pthread_attr_init(&dec_attr); - pthread_attr_init(&blit_attr); - pthread_attr_setdetachstate(&dec_attr, PTHREAD_CREATE_JOINABLE); - pthread_attr_setdetachstate(&blit_attr, PTHREAD_CREATE_JOINABLE); - pthread_t dec_thread, blit_thread; - thread_param param; - param.nv12_targets = nv12_targets; - param.yuy2_targets = yuy2_targets; - param.imc3_targets = imc3_targets; - param.targets = dec_targets; - param.target_count = 12; - param.decoder = new JpegDecoder(); - //param.decoder->init(1280, 720, all_targets, 36); - param.decoder->init(1280, 720, all_targets, 12); - param.states = states; - pthread_create(&dec_thread, &dec_attr, decode_frame_threadproc, (void*)¶m); -#ifndef VPP_DECODE_BATCH - pthread_create(&blit_thread, &blit_attr, blit_frame_threadproc, (void*)¶m); -#endif - pthread_attr_destroy(&blit_attr); - pthread_attr_destroy(&dec_attr); - - // test for 1 minute - usleep(60 * 1000 * 1000); - exit_thread = true; - void *dummy; - pthread_join(dec_thread, &dummy); -#ifndef VPP_DECODE_BATCH - pthread_join(blit_thread, &dummy); -#endif - - for (int i = 0; i < 12; ++i) { - delete[] jpginfos[i].buf; - deinit_render_target(dec_targets[i]); - deinit_render_target(nv12_targets[i]); - deinit_render_target(yuy2_targets[i]); - //deinit_render_target(imc3_targets[i]); - } - delete[] all_targets; -} - -int main(int argc, char ** argv) -{ - //decode_blit_functionality_test(); - parallel_decode_blit_test(); + printf("----- DRM surface type test -----\n"); + //decode_blit_functionality_test(RenderTarget::KERNEL_DRM, 0); + printf("----- GRALLOC surface type test -----\n"); + //decode_blit_functionality_test(RenderTarget::ANDROID_GRALLOC, 0); + printf("----- Normal surface type test, scale %d-----\n", scale); + decode_blit_functionality_test(RenderTarget::INTERNAL_BUF, format, scale); + printf("----- Userptr surface type test -----\n"); + //decode_blit_functionality_test(RenderTarget::USER_PTR, format); return 0; } -- cgit v1.2.3 From a4fd1d0189190761f88cc60619164188e416d59f Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Mon, 20 Jan 2014 13:59:58 +0800 Subject: libmix: Keep decoding even no reference frame available for P/B frames BZ: 163234 Only allow H.263 decoder to do this. Change-Id: I18916d1a74ced52d1b7438d114f4a84c7d8b0a2b Signed-off-by: Dan Liang --- videodecoder/VideoDecoderMPEG4.cpp | 37 ++++++++++++++++++++++++++++++------- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index 7f2151b..e9b9305 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -260,11 +260,19 @@ Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) { if (codingType == MP4_VOP_TYPE_B) { if (mForwardReference == NULL || mLastReference == NULL) { - return DECODE_NO_REFERENCE; + if (mIsShortHeader) { + status = DECODE_SUCCESS; + VTRACE("%s: No reference frame but keep decoding", __FUNCTION__); + } else + return DECODE_NO_REFERENCE; } } else if (codingType == MP4_VOP_TYPE_P || codingType == MP4_VOP_TYPE_S) { - if (mLastReference == NULL&& mIsSyncFrame == false) { - return DECODE_NO_REFERENCE; + if (mLastReference == NULL && mIsSyncFrame == false) { + if (mIsShortHeader) { + status = DECODE_SUCCESS; + VTRACE("%s: No reference frame but keep decoding", __FUNCTION__); + } else + return DECODE_NO_REFERENCE; } } // all sanity checks pass, continue decoding through continueDecodingFrame @@ -507,12 +515,13 @@ Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *pic picParam->backward_reference_picture = VA_INVALID_SURFACE; break; case MP4_VOP_TYPE_P: - if (mLastReference == NULL&& mIsSyncFrame == false) { + if (mLastReference == NULL && mIsSyncFrame == false && !mIsShortHeader) { return DECODE_NO_REFERENCE; } if (mLastReference != NULL) { picParam->forward_reference_picture = mLastReference->renderBuffer.surface; } else { + VTRACE("%s: no reference frame, but keep decoding", __FUNCTION__); picParam->forward_reference_picture = VA_INVALID_SURFACE; } picParam->backward_reference_picture = VA_INVALID_SURFACE; @@ -520,11 +529,25 @@ Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *pic case MP4_VOP_TYPE_B: picParam->vop_fields.bits.backward_reference_vop_coding_type = mLastVOPCodingType; // WEIRD, CHECK AGAIN !!!!!!! - if (mLastReference == NULL || mForwardReference == NULL) { + if (mIsShortHeader) { + if (mLastReference != NULL) { + picParam->forward_reference_picture = mLastReference->renderBuffer.surface; + } else { + VTRACE("%s: no forward reference frame, but keep decoding", __FUNCTION__); + picParam->forward_reference_picture = VA_INVALID_SURFACE; + } + if (mForwardReference != NULL) { + picParam->backward_reference_picture = mForwardReference->renderBuffer.surface; + } else { + VTRACE("%s: no backward reference frame, but keep decoding", __FUNCTION__); + picParam->backward_reference_picture = VA_INVALID_SURFACE; + } + } else if (mLastReference == NULL || mForwardReference == NULL) { return DECODE_NO_REFERENCE; + } else { + picParam->forward_reference_picture = mLastReference->renderBuffer.surface; + picParam->backward_reference_picture = mForwardReference->renderBuffer.surface; } - picParam->forward_reference_picture = mLastReference->renderBuffer.surface; - picParam->backward_reference_picture = mForwardReference->renderBuffer.surface; break; case MP4_VOP_TYPE_S: // WEIRD, CHECK AGAIN!!!! WAS using mForwardReference -- cgit v1.2.3 From 60e0ed22947d14c5dc575e8126d3acefc030bb5a Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Wed, 8 Jan 2014 04:48:10 +0800 Subject: Implement slice header parsing for widevine playback BZ: 146336 This patch contains the implementation of slice header parsing for widevine playback on merrifield platforms. Change-Id: Ife3dd03fadbd40ecbd4d6070e0dd1de8d811d262 Signed-off-by: wfeng6 --- mixvbp/vbp_manager/Android.mk | 8 + mixvbp/vbp_manager/include/viddec_parser_ops.h | 4 +- .../secvideo/merrifield/vbp_h264secure_parser.c | 1938 ++++++++++++++++++++ .../secvideo/merrifield/vbp_h264secure_parser.h | 73 + mixvbp/vbp_manager/vbp_loader.c | 2 +- mixvbp/vbp_manager/vbp_loader.h | 13 +- mixvbp/vbp_manager/vbp_utils.c | 11 +- mixvbp/vbp_manager/vbp_utils.h | 4 +- mixvbp/vbp_plugin/h264/Android.mk | 34 + mixvbp/vbp_plugin/h264/h264parse_dpb.c | 6 +- mixvbp/vbp_plugin/h264/include/h264.h | 2 + .../secvideo/merrifield/viddec_h264secure_parse.c | 987 ++++++++++ videodecoder/Android.mk | 8 + videodecoder/VideoDecoderAVC.cpp | 3 + videodecoder/VideoDecoderAVC.h | 8 +- videodecoder/VideoDecoderBase.cpp | 17 +- videodecoder/VideoDecoderBase.h | 3 +- videodecoder/VideoDecoderHost.cpp | 3 + videodecoder/VideoDecoderTrace.h | 0 .../merrifield/VideoDecoderAVCSecure.cpp | 735 ++++---- .../securevideo/merrifield/VideoDecoderAVCSecure.h | 60 +- 21 files changed, 3487 insertions(+), 432 deletions(-) create mode 100755 mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c create mode 100755 mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.h create mode 100755 mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c mode change 100644 => 100755 videodecoder/VideoDecoderAVC.cpp mode change 100644 => 100755 videodecoder/VideoDecoderAVC.h mode change 100644 => 100755 videodecoder/VideoDecoderBase.cpp mode change 100644 => 100755 videodecoder/VideoDecoderBase.h mode change 100644 => 100755 videodecoder/VideoDecoderTrace.h mode change 100644 => 100755 videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp mode change 100644 => 100755 videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk index e30a0e0..d60b7d6 100755 --- a/mixvbp/vbp_manager/Android.mk +++ b/mixvbp/vbp_manager/Android.mk @@ -60,4 +60,12 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/secvideo/baytrail/ LOCAL_SRC_FILES += secvideo/baytrail/vbp_h264secure_parser.c endif +PLATFORM_SUPPORT_USE_SLICE_HEADER_PARSING := merrifield + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_USE_SLICE_HEADER_PARSING)),) +LOCAL_CFLAGS += -DUSE_SLICE_HEADER_PARSING +LOCAL_C_INCLUDES += $(LOCAL_PATH)/secvideo/merrifield/ +LOCAL_SRC_FILES += secvideo/merrifield/vbp_h264secure_parser.c +endif + include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_manager/include/viddec_parser_ops.h b/mixvbp/vbp_manager/include/viddec_parser_ops.h index f9629d5..77054b5 100755 --- a/mixvbp/vbp_manager/include/viddec_parser_ops.h +++ b/mixvbp/vbp_manager/include/viddec_parser_ops.h @@ -26,7 +26,7 @@ typedef uint32_t (*fn_is_frame_start)(void *ctxt); typedef uint32_t (*fn_gen_contrib_tags)(void *parent, uint32_t ignore_partial); typedef uint32_t (*fn_gen_assoc_tags)(void *parent); typedef void (*fn_flush_parser) (void *parent, void *ctxt); -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) typedef uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size); #endif @@ -42,7 +42,7 @@ typedef struct fn_gen_contrib_tags gen_contrib_tags; fn_gen_assoc_tags gen_assoc_tags; fn_flush_parser flush; -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) fn_update_data update_data; #endif } viddec_parser_ops_t; diff --git a/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c new file mode 100755 index 0000000..3f3eeef --- /dev/null +++ b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c @@ -0,0 +1,1938 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009, 2012 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#include +#include +#include "h264.h" +#include "vbp_loader.h" +#include "vbp_utils.h" +#include "vbp_h264secure_parser.h" + +typedef struct vbp_h264_parser_private_t vbp_h264_parser_private; + +typedef enum +{ + H264_BS_LENGTH_PREFIXED, + H264_BS_SC_PREFIXED, + H264_BS_SINGLE_NAL +} H264_BS_PATTERN; + +#define MAX_PARSED_SLICE_NUM 16 +#define TERMINATE_KEY 0xFFFFFFFF +#define BUF_TOO_SMALL_KEY 0xFFFFFFFE +#define SLICE_TOO_MAY_KEY 0xFFFFFFFD + +typedef struct _vbp_h264_sliceheader { + uint32 sliceHeaderKey; + VAParseSliceHeaderGroupBuffer parsedSliceHeader; + uint32 *reorder_cmd; + int16 *weight; + uint32 *pic_marking; +} vbp_h264_sliceheader; + +typedef struct _vbp_h264_sliceheadergroup { + uint32 sliceHeaderNum; + vbp_h264_sliceheader sliceHeaders[MAX_PARSED_SLICE_NUM]; +} vbp_h264_sliceheadergroup; + +struct vbp_h264_parser_private_t +{ + /* number of bytes used to encode length of NAL payload. If parser does not receive configuration data + and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB + byte stream format. */ + int NAL_length_size; + + /* indicate if stream is length prefixed */ + int length_prefix_verified; + + H264_BS_PATTERN bitstream_pattern; +}; + +/* default scaling list table */ +static uint8 Default_4x4_Intra[16] = +{ + 6,13,20,28, + 13,20,28,32, + 20,28,32,37, + 28,32,37,42 +}; + +static uint8 Default_4x4_Inter[16] = +{ + 10,14,20,24, + 14,20,24,27, + 20,24,27,30, + 24,27,30,34 +}; + +static uint8 Default_8x8_Intra[64] = +{ + 6,10,13,16,18,23,25,27, + 10,11,16,18,23,25,27,29, + 13,16,18,23,25,27,29,31, + 16,18,23,25,27,29,31,33, + 18,23,25,27,29,31,33,36, + 23,25,27,29,31,33,36,38, + 25,27,29,31,33,36,38,40, + 27,29,31,33,36,38,40,42 +}; + +static uint8 Default_8x8_Inter[64] = +{ + 9,13,15,17,19,21,22,24, + 13,13,17,19,21,22,24,25, + 15,17,19,21,22,24,25,27, + 17,19,21,22,24,25,27,28, + 19,21,22,24,25,27,28,30, + 21,22,24,25,27,28,30,32, + 22,24,25,27,28,30,32,33, + 24,25,27,28,30,32,33,35 +}; + +static uint8 quant_flat[16] = +{ + 16,16,16,16, + 16,16,16,16, + 16,16,16,16, + 16,16,16,16 +}; + +static uint8 quant8_flat[64] = +{ + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16, + 16,16,16,16,16,16,16,16 +}; + +static uint8* UseDefaultList[8] = +{ + Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra, + Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter, + Default_8x8_Intra, + Default_8x8_Inter +}; + +static uint8 h264_aspect_ratio_table[][2] = +{ + {0, 0}, + {1, 1}, + {12, 11}, + {10, 11}, + {16, 11}, + {40, 33}, + {24, 11}, + {20, 11}, + {32, 11}, + {80, 33}, + {18, 11}, + {15, 11}, + {64, 33}, + {160, 99}, + {4, 3}, + {3, 2}, + {2, 1}, + {0, 0} +}; + + +/** + * + */ +uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext) +{ + if (NULL == pcontext->parser_ops) + { + return VBP_PARM; + } + + pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264secure_init"); + if (NULL == pcontext->parser_ops->init) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->parse_sc = viddec_parse_sc; + + pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264secure_parse"); + if (NULL == pcontext->parser_ops->parse_syntax) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264secure_get_context_size"); + if (NULL == pcontext->parser_ops->get_cxt_size) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } + + pcontext->parser_ops->is_wkld_done = NULL; + pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->update_data = dlsym(pcontext->fd_parser, "viddec_h264secure_update"); + if (NULL == pcontext->parser_ops->update_data) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } +/* + pcontext->parser_ops->flush = dlsym(pcontext->fd_parser, "viddec_h264secure_flush"); + if (NULL == pcontext->parser_ops->flush) + { + ETRACE ("Failed to set entry point."); + return VBP_LOAD; + } +*/ + /* entry point not needed */ + pcontext->parser_ops->is_frame_start = NULL; + return VBP_OK; +} + + +/** + * + */ +uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext) +{ + if (NULL != pcontext->query_data) + { + return VBP_PARM; + } + + pcontext->query_data = NULL; + vbp_data_h264 *query_data = NULL; + + query_data = vbp_malloc_set0(vbp_data_h264, 1); + if (NULL == query_data) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->query_data = (void *)query_data; + + query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES); + if (NULL == query_data->pic_data) + { + goto cleanup; + } + + int i; + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1); + if (NULL == query_data->pic_data[i].pic_parms) + { + goto cleanup; + } + query_data->pic_data[i].num_slices = 0; + query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES); + if (NULL == query_data->pic_data[i].slc_data) + { + goto cleanup; + } + } + + query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1); + if (NULL == query_data->IQ_matrix_buf) + { + goto cleanup; + } + + query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1); + if (NULL == query_data->codec_data) + { + goto cleanup; + } + + pcontext->parser_private = NULL; + vbp_h264_parser_private *parser_private = NULL; + + parser_private = vbp_malloc_set0(vbp_h264_parser_private, 1); + if (NULL == parser_private) + { + goto cleanup; + } + + /* assign the pointer */ + pcontext->parser_private = (void *)parser_private; + + /* init the pointer */ + parser_private->NAL_length_size = 0; + + parser_private->length_prefix_verified = 0; + + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + + query_data->pic_parse_buffer = vbp_malloc_set0(VAParsePictureParameterBuffer,1); + if (NULL == query_data->pic_parse_buffer) + { + goto cleanup; + } + + return VBP_OK; + +cleanup: + vbp_free_query_data_h264secure(pcontext); + + return VBP_MEM; +} + +uint32 vbp_free_query_data_h264secure(vbp_context *pcontext) +{ + if (NULL != pcontext->parser_private) + { + free(pcontext->parser_private); + pcontext->parser_private = NULL; + } + + if (NULL == pcontext->query_data) + { + return VBP_OK; + } + + int i; + vbp_data_h264 *query_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + + if (query_data->pic_data) + { + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + if (query_data->pic_data[i].slc_data) + { + free(query_data->pic_data[i].slc_data); + query_data->pic_data[i].slc_data = NULL; + } + if (query_data->pic_data[i].pic_parms) + { + free(query_data->pic_data[i].pic_parms); + query_data->pic_data[i].pic_parms = NULL; + } + } + free(query_data->pic_data); + query_data->pic_data = NULL; + } + if (query_data->IQ_matrix_buf) + { + free(query_data->IQ_matrix_buf); + query_data->IQ_matrix_buf = NULL; + } + if (query_data->codec_data) + { + free(query_data->codec_data); + query_data->codec_data = NULL; + } + if (query_data->pic_parse_buffer) + { + free(query_data->pic_parse_buffer); + query_data->pic_parse_buffer = NULL; + } + free(query_data); + pcontext->query_data = NULL; + + return VBP_OK; +} + + +static inline uint16_t vbp_utils_ntohs(uint8_t* p) +{ + uint16_t i = ((*p) << 8) + ((*(p+1))); + return i; +} + +static inline uint32_t vbp_utils_ntohl(uint8_t* p) +{ + uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3))); + return i; +} + + +static void vbp_set_VAPicture_h264secure( + int curr_picture_structure, + int bottom_field, + frame_store* store, + VAPictureH264* pic) +{ + if (FRAME == curr_picture_structure) + { + if (FRAME != viddec_h264_get_dec_structure(store)) + { + WTRACE("Reference picture structure is not frame for current frame picture!"); + } + pic->flags = 0; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + if (FRAME == viddec_h264_get_dec_structure(store)) + { + WTRACE("reference picture structure is frame for current field picture!"); + } + if (bottom_field) + { + pic->flags = VA_PICTURE_H264_BOTTOM_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic->flags = VA_PICTURE_H264_TOP_FIELD; + pic->TopFieldOrderCnt = store->top_field.poc; + pic->BottomFieldOrderCnt = store->bottom_field.poc; + } + } +} + +static void vbp_set_slice_ref_list_h264secure( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + VTRACE("vbp_set_slice_ref_list_h264secure +++"); + int i, j; + int num_ref_idx_active = 0; + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + uint8_t* p_list = NULL; + VAPictureH264* refPicListX = NULL; + frame_store* fs = NULL; + + /* initialize ref picutre list, set picture id and flags to invalid. */ + + VTRACE("slice_header->slice_type = %d", slice_header->slice_type); + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + for (j = 0; j < 32; j++) + { + refPicListX->picture_id = VA_INVALID_SURFACE; + refPicListX->frame_idx = 0; + refPicListX->flags = VA_PICTURE_H264_INVALID; + refPicListX->TopFieldOrderCnt = 0; + refPicListX->BottomFieldOrderCnt = 0; + refPicListX++; + } + } + + for (i = 0; i < 2; i++) + { + refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]); + + if ((i == 0) && + ((h264_PtypeB == slice_header->slice_type) || + (h264_PtypeP == slice_header->slice_type))) + { + num_ref_idx_active = slice_header->num_ref_idx_l0_active; + if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list0; + } + else + { + p_list = h264_parser->info.dpb.listX_0; + } + } + else if ((i == 1) && (h264_PtypeB == slice_header->slice_type)) + { + VTRACE("num_ref_idx_l1_active = %d", slice_header->num_ref_idx_l1_active); + num_ref_idx_active = slice_header->num_ref_idx_l1_active; + if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag) + { + p_list = h264_parser->info.slice_ref_list1; + } + else + { + p_list = h264_parser->info.dpb.listX_1; + } + } + else + { + num_ref_idx_active = 0; + p_list = NULL; + } + + + for (j = 0; j < num_ref_idx_active; j++) + { + fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]); + + /* bit 5 indicates if reference picture is bottom field */ + vbp_set_VAPicture_h264secure( + h264_parser->info.img.structure, + (p_list[j] & 0x20) >> 5, + fs, + refPicListX); + + refPicListX->frame_idx = fs->frame_num; + refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE; + refPicListX++; + } + } + + VTRACE("vbp_set_slice_ref_list_h264secure ---"); +} + +static void vbp_set_pre_weight_table_h264secure( + struct h264_viddec_parser* h264_parser, + VASliceParameterBufferH264 *slc_parms) +{ + h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader); + int i, j; + if ((((h264_PtypeP == slice_header->slice_type) || + (h264_PtypeB == slice_header->slice_type)) && + h264_parser->info.active_PPS.weighted_pred_flag) || + ((h264_PtypeB == slice_header->slice_type) && + (1 == h264_parser->info.active_PPS.weighted_bipred_idc))) + { + slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom; + slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom; + slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag; + slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag; + slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag; + slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag; + + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i]; + slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i]; + + slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i]; + slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i]; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j]; + slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j]; + slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j]; + slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j]; + } + } + } + else + { + /* default weight table */ + slc_parms->luma_log2_weight_denom = 5; + slc_parms->chroma_log2_weight_denom = 5; + slc_parms->luma_weight_l0_flag = 0; + slc_parms->luma_weight_l1_flag = 0; + slc_parms->chroma_weight_l0_flag = 0; + slc_parms->chroma_weight_l1_flag = 0; + for (i = 0; i < 32; i++) + { + slc_parms->luma_weight_l0[i] = 0; + slc_parms->luma_offset_l0[i] = 0; + slc_parms->luma_weight_l1[i] = 0; + slc_parms->luma_offset_l1[i] = 0; + + for (j = 0; j < 2; j++) + { + slc_parms->chroma_weight_l0[i][j] = 0; + slc_parms->chroma_offset_l0[i][j] = 0; + slc_parms->chroma_weight_l1[i][j] = 0; + slc_parms->chroma_offset_l1[i][j] = 0; + } + } + } +} + + +static void vbp_set_reference_frames_h264secure( + struct h264_viddec_parser *parser, + VAPictureParameterBufferH264* pic_parms) +{ + int buffer_idx; + int frame_idx; + frame_store* store = NULL; + h264_DecodedPictureBuffer* dpb = &(parser->info.dpb); + /* initialize reference frames */ + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + pic_parms->num_ref_frames = 0; + + frame_idx = 0; + + /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */ + /* set short term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + + store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]]; + /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0) */ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + /* set long term reference frames */ + for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++) + { + if (frame_idx >= 16 || buffer_idx >= 16) + { + WTRACE("Frame index is out of bound."); + break; + } + store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]]; + if (!viddec_h264_get_is_long_term(store)) + { + WTRACE("long term frame is not marked as long term."); + } + /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */ + if (viddec_h264_get_is_used(store)) + { + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE; + if (FRAME == parser->info.img.structure) + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc; + } + else + { + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc; + if (store->top_field.used_for_reference && store->bottom_field.used_for_reference) + { + /* if both fields are used for reference, just set flag to be frame (0)*/ + } + else + { + if (store->top_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD; + if (store->bottom_field.used_for_reference) + pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD; + } + } + } + frame_idx++; + } + + pic_parms->num_ref_frames = frame_idx; + if (frame_idx > parser->info.active_SPS.num_ref_frames) + { + WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).", + frame_idx, parser->info.active_SPS.num_ref_frames); + } +} + + +static void vbp_set_scaling_list_h264secure( + struct h264_viddec_parser *parser, + VAIQMatrixBufferH264* IQ_matrix_buf) +{ + int i; + int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0); + + if (parser->info.active_PPS.pic_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_PPS.pic_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use PPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64); + } + } + } + else /* pic_scaling_list not present */ + { + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + /* SPS matrix present - use fallback rule B */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i], + 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], + parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i], + 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + //g_warning("invalid scaling list index."); + break; + } + } + else /* seq_scaling_matrix not present */ + { + /* SPS matrix not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } /* end of seq_scaling_matrix not present */ + } /* end of pic_scaling_list not present */ + } /* for loop for each index from 0 to 7 */ + } /* end of pic_scaling_matrix present */ + else + { + /* PPS matrix not present, use SPS information */ + if (parser->info.active_SPS.seq_scaling_matrix_present_flag) + { + for (i = 0; i < lists_to_set; i++) + { + if (parser->info.active_SPS.seq_scaling_list_present_flag[i]) + { + if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) || + ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6])) + { + /* use default scaling list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + } + } + else + { + /* use SPS list */ + if (i < 6) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16); + } + else + { + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64); + } + } + } + else + { + /* SPS list not present - use fallback rule A */ + switch (i) + { + case 0: + case 3: + memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16); + break; + + case 6: + case 7: + memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64); + break; + + case 1: + case 2: + case 4: + case 5: + memcpy(IQ_matrix_buf->ScalingList4x4[i], + IQ_matrix_buf->ScalingList4x4[i - 1], + 16); + break; + + default: + WTRACE("invalid scaling list index."); + break; + } + } + } + } + else + { + /* SPS matrix not present - use flat lists */ + for (i = 0; i < 6; i++) + { + memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16); + } + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } + } + + if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) && + (parser->info.active_PPS.pic_scaling_matrix_present_flag || + parser->info.active_SPS.seq_scaling_matrix_present_flag)) + { + for (i = 0; i < 2; i++) + { + memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64); + } + } +} +static void vbp_set_pic_parse_buffer_h264secure( + struct h264_viddec_parser *parser, + vbp_data_h264 *query_data) +{ + VTRACE("vbp_set_pic_parse_buffer_h264secure starts"); + VAParsePictureParameterBuffer *buf = query_data->pic_parse_buffer; + + buf->flags.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + VTRACE("frame_mbs_only_flag = %d", buf->flags.bits.frame_mbs_only_flag); + + buf->flags.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + VTRACE("pic_order_present_flag = %d", buf->flags.bits.pic_order_present_flag); + + buf->flags.bits.delta_pic_order_always_zero_flag = parser->info.active_SPS.delta_pic_order_always_zero_flag; + VTRACE("delta_pic_order_always_zero_flag = %d", buf->flags.bits.delta_pic_order_always_zero_flag); + + buf->flags.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; + VTRACE("redundant_pic_cnt_present_flag = %d", buf->flags.bits.redundant_pic_cnt_present_flag); + + buf->flags.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + VTRACE("weighted_pred_flag = %d", buf->flags.bits.weighted_pred_flag); + + buf->flags.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + VTRACE("entropy_coding_mode_flag = %d", buf->flags.bits.entropy_coding_mode_flag); + + buf->flags.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + VTRACE("deblocking_filter_control_present_flag = %d", buf->flags.bits.deblocking_filter_control_present_flag); + + buf->flags.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + VTRACE("weighted_bipred_idc = %d", buf->flags.bits.weighted_bipred_idc); + + buf->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + VTRACE("num_slice_groups_minus1 = %d", buf->num_slice_groups_minus1); + + buf->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + VTRACE("slice_group_map_type = %d", buf->slice_group_map_type); + + buf->log2_slice_group_change_cycle = 0; + + buf->chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + VTRACE("chroma_format_idc = %d", buf->chroma_format_idc); + + buf->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + VTRACE("log2_max_pic_order_cnt_lsb_minus4 = %d", buf->log2_max_pic_order_cnt_lsb_minus4); + + buf->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + VTRACE("pic_order_cnt_type = %d", buf->pic_order_cnt_type); + + buf->log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; + VTRACE("log2_max_frame_num_minus4 = %d", buf->log2_max_frame_num_minus4); + + + buf->residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + VTRACE("residual_colour_transform_flag = %d", buf->residual_colour_transform_flag); + + buf->num_ref_idc_l0_active_minus1= parser->info.active_PPS.num_ref_idx_l0_active-1; + VTRACE("num_ref_idc_l0_active_minus1 = %d", buf->num_ref_idc_l0_active_minus1); + + buf->num_ref_idc_l1_active_minus1= parser->info.active_PPS.num_ref_idx_l1_active-1; + VTRACE("num_ref_idc_l1_active_minus1 = %d", buf->num_ref_idc_l1_active_minus1); + + VTRACE("vbp_set_pic_parse_buffer_h264secure ends"); +} + +static void vbp_set_codec_data_h264secure( + struct h264_viddec_parser *parser, + vbp_data_h264 *query_data) +{ + VTRACE("vbp_set_codec_data_h264secure +++"); + vbp_codec_data_h264* codec_data = query_data->codec_data; + + /* The following variables are used to detect if there is new SPS or PPS */ + uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id; + uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id; + int frame_width = codec_data->frame_width; + int frame_height = codec_data->frame_height; + + /* parameter id */ + codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id; + codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id; + + /* profile and level */ + codec_data->profile_idc = parser->info.active_SPS.profile_idc; + codec_data->level_idc = parser->info.active_SPS.level_idc; + + + /*constraint flag sets (h.264 Spec v2009)*/ + codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4; + codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3; + codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2; + codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1; + codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1; + + /* reference frames */ + codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames; + + if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag && + !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag) + { + /* no longer necessary: two fields share the same interlaced surface */ + /* codec_data->num_ref_frames *= 2; */ + } + + codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + + /* frame coding */ + codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + + /* frame dimension */ + codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16; + + codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16; + + /* cropping information */ + codec_data->crop_left = 0; + codec_data->crop_right = 0; + codec_data->crop_top = 0; + codec_data->crop_bottom = 0; + if(parser->info.active_SPS.sps_disp.frame_cropping_flag) { + int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0; + int ChromaArrayType = 0; + if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) { + if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) { + SubWidthC = 2; + SubHeightC = 2; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) { + SubWidthC = 2; + SubHeightC = 1; + } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) { + SubWidthC = 1; + SubHeightC = 1; + } + ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc; + } + + if(ChromaArrayType == 0) { + CropUnitX = 1; + CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + } else { + CropUnitX = SubWidthC; + CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag); + } + + codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset; + codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; + codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset; + codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; + } + /* aspect ratio */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag) + { + codec_data->aspect_ratio_idc = + parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc; + + if (codec_data->aspect_ratio_idc < 17) + { + codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0]; + codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1]; + } + else if (codec_data->aspect_ratio_idc == 255) + { + codec_data->sar_width = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width; + + codec_data->sar_height = + parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height; + } + else + { + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + } + else + { + // unspecified + codec_data->aspect_ratio_idc = 0; + codec_data->sar_width = 0; + codec_data->sar_height = 0; + } + + /* video format */ + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag) + { + codec_data->video_format = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format; + } + else + { + // Unspecified video format + codec_data->video_format = 5; + } + + codec_data->video_full_range_flag = + parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; + + + if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag) + { + codec_data->matrix_coefficients = + parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; + } + else + { + // Unspecified + codec_data->matrix_coefficients = 2; + } + + codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value; + + /* picture order type and count */ + codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + + + /* udpate sps and pps status */ + query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0; + query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0; + + query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff; + query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff; + VTRACE("parser->info.active_SPS.seq_parameter_set_id = %d", parser->info.active_SPS.seq_parameter_set_id); + VTRACE("parser->info.active_PPS.seq_parameter_set_id = %d", parser->info.active_PPS.seq_parameter_set_id); + VTRACE("has_sps = %d, has_pps %d", query_data->has_sps, query_data->has_pps); + + if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height) + { + query_data->new_sps = 1; + query_data->new_pps = 1; + } + VAPictureParameterBufferH264 *pic_parms; + pic_parms = query_data->pic_data[0].pic_parms; + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + /* frame height in MBS */ + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + if (parser->info.has_slice) { + VTRACE("Found slice info in the buffer"); + codec_data->has_slice= 1; + } else { + VTRACE("No slice info in the buffer"); + codec_data->has_slice = 0; + } + + VTRACE("vbp_set_codec_data_h264secure ---"); +} + + +static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + VTRACE("vbp_add_pic_data_h264secure +++"); + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + struct h264_viddec_parser* parser = NULL; + vbp_picture_data_h264* pic_data = NULL; + VAPictureParameterBufferH264* pic_parms = NULL; + + parser = (struct h264_viddec_parser *)cxt->codec_data; + + if (0 == parser->info.SliceHeader.first_mb_in_slice) + { + /* a new picture is parsed */ + query_data->num_pictures++; + } + + if (query_data->num_pictures == 0) + { + /* partial frame */ + query_data->num_pictures = 1; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + WTRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + pic_parms = pic_data->pic_parms; + + // relax this condition to support partial frame parsing + + //if (parser->info.SliceHeader.first_mb_in_slice == 0) + { + /** + * picture parameter only needs to be set once, + * even multiple slices may be encoded + */ + + /* VAPictureParameterBufferH264 */ + pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; + pic_parms->CurrPic.frame_idx = 0; + if (parser->info.img.field_pic_flag == 1) + { + if (parser->info.img.bottom_field_flag) + { + pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; + } + else + { + /* also OK set to 0 (from test suite) */ + pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; + } + } + else + { + pic_parms->CurrPic.flags = 0; /* frame picture */ + } + pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; + pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; + pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; + + /* don't care if current frame is used as long term reference */ + if (parser->info.SliceHeader.nal_ref_idc != 0) + { + pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; + pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + pic_parms->seq_fields.value = 0; + pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; + + /* new fields in libva 0.31 */ + pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; + pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag; + + + /* referened from UMG_Moorstown_TestSuites */ + pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; + + pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + pic_parms->slice_group_change_rate_minus1 = 0; + pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; + pic_parms->pic_init_qs_minus26 = 0; + pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; + pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; + + /* new LibVA fields in v0.31*/ + pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; + pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0; + + /* all slices in the pciture have the same field_pic_flag */ + pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; + pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; + + pic_parms->frame_num = parser->info.SliceHeader.frame_num; + } + + + /* set reference frames, and num_ref_frames */ + vbp_set_reference_frames_h264secure(parser, pic_parms); + if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + int frame_idx; + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + /* num of reference frame is 0 if current picture is IDR */ + pic_parms->num_ref_frames = 0; + } + else + { + /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ + } + + VTRACE("vbp_add_pic_data_h264secure ---"); + return VBP_OK; +} + +static uint32_t vbp_add_slice_data_h264secure(vbp_context *pcontext, uint32 key) +{ + VTRACE("vbp_add_slice_data_h264secure +++"); + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + VTRACE("pic_data_index = %d", pic_data_index); + + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + VTRACE("pic_data->num_slices = %d", pic_data->num_slices); + + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_data->buffer_addr = cxt->parse_cubby.buf; + slc_parms = &(slc_data->slc_parms); + + slc_data->nal_unit_type = h264_parser->info.nal_unit_type; + + + /* No longer used, MSVDX firmware will maintain it */ + slc_data->slice_offset = 0; + slc_parms->slice_data_offset = 0; + slc_parms->slice_data_size = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + // The slice key is stored in slice_data_bit_offset and passed to MSVDX + slc_parms->slice_data_bit_offset = key; + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + slc_parms->slice_type = slice_header->slice_type; + + slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag; + + slc_parms->num_ref_idx_l0_active_minus1 = 0; + slc_parms->num_ref_idx_l1_active_minus1 = 0; + if (slice_header->slice_type == h264_PtypeI) + { + } + else if (slice_header->slice_type == h264_PtypeP) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + } + else if (slice_header->slice_type == h264_PtypeB) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1; + } + else + { + WTRACE("slice type %d is not supported.", slice_header->slice_type); + } + + slc_parms->cabac_init_idc = slice_header->cabac_init_idc; + slc_parms->slice_qp_delta = slice_header->slice_qp_delta; + slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc; + slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2; + slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2; + + + vbp_set_pre_weight_table_h264secure(h264_parser, slc_parms); + vbp_set_slice_ref_list_h264secure(h264_parser, slc_parms); + + + pic_data->num_slices++; + + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + if (pic_data->num_slices > 1) + { + ITRACE("number of slices per picture is %d.", pic_data->num_slices); + } + + VTRACE("vbp_add_slice_data_h264secure ---"); + return VBP_OK; +} + +/** +* parse decoder configuration data +*/ +uint32 vbp_parse_init_data_h264secure(vbp_context* pcontext) +{ + /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */ + + uint8 configuration_version = 0; + uint8 AVC_profile_indication = 0; + uint8 profile_compatibility = 0; + uint8 AVC_level_indication = 0; + uint8 length_size_minus_one = 0; + uint8 num_of_sequence_parameter_sets = 0; + uint8 num_of_picture_parameter_sets = 0; + uint16 sequence_parameter_set_length = 0; + uint16 picture_parameter_set_length = 0; + + int i = 0; + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private; + //Enable emulation prevention + cxt->getbits.is_emul_reqd = 1; + + /* check if configuration data is start code prefix */ + viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + WTRACE("configuration data is start-code prefixed.\n"); + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + return vbp_parse_start_code_h264secure(pcontext); + } + + + uint8* cur_data = cxt->parse_cubby.buf; + + + if (cxt->parse_cubby.size < 6) + { + /* need at least 6 bytes to start parsing the structure, see spec 15 */ + return VBP_DATA; + } + + configuration_version = *cur_data++; + AVC_profile_indication = *cur_data++; + + + profile_compatibility = *cur_data++; + AVC_level_indication = *cur_data++; + + /* ITRACE("Level indication: %d", AVC_level_indication);*/ + /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */ + length_size_minus_one = (*cur_data) & 0x3; + + if (length_size_minus_one != 3) + { + WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1); + } + + parser_private->NAL_length_size = length_size_minus_one + 1; + + cur_data++; + + /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */ + num_of_sequence_parameter_sets = (*cur_data) & 0x1f; + if (num_of_sequence_parameter_sets > 1) + { + WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets); + } + if (num_of_sequence_parameter_sets > MAX_NUM_SPS) + { + /* this would never happen as MAX_NUM_SPS = 32 */ + WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS); + } + cur_data++; + + cxt->list.num_items = 0; + for (i = 0; i < num_of_sequence_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse sequence_parameter_set_length */ + ETRACE("Not enough data to parse SPS length."); + return VBP_DATA; + } + + /* 16 bits */ + sequence_parameter_set_length = vbp_utils_ntohs(cur_data); + + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least sequence_parameter_set_length bytes for SPS */ + ETRACE("Not enough data to parse SPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length; + + cxt->list.num_items++; + + cur_data += sequence_parameter_set_length; + } + + if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size) + { + /* need at least one more byte to parse num_of_picture_parameter_sets */ + ETRACE("Not enough data to parse number of PPS."); + return VBP_DATA; + } + + num_of_picture_parameter_sets = *cur_data++; + if (num_of_picture_parameter_sets > 1) + { + /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */ + } + + for (i = 0; i < num_of_picture_parameter_sets; i++) + { + if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size) + { + /* need at least 2 bytes to parse picture_parameter_set_length */ + ETRACE("Not enough data to parse PPS length."); + return VBP_DATA; + } + + /* 16 bits */ + picture_parameter_set_length = vbp_utils_ntohs(cur_data); + + cur_data += 2; + + if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size) + { + /* need at least picture_parameter_set_length bytes for PPS */ + ETRACE("Not enough data to parse PPS."); + return VBP_DATA; + } + + cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf; + + /* end pos is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = + cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length; + + cxt->list.num_items++; + + cur_data += picture_parameter_set_length; + } + + if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size) + { + WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.", + cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf)); + } + + parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED; + return VBP_OK; +} + +static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size) +{ + switch (*NAL_length_size) + { + case 4: + return vbp_utils_ntohl(p); + + case 3: + { + uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2))); + return i; + } + + case 2: + return vbp_utils_ntohs(p); + + case 1: + return *p; + + default: + WTRACE("invalid NAL_length_size: %d.", *NAL_length_size); + /* default to 4 bytes for length */ + *NAL_length_size = 4; + return vbp_utils_ntohl(p); + } +} + +/** +** H.264 elementary stream does not have start code. +* instead, it is comprised of size of NAL unit and payload +* of NAL unit. See spec 15 (Sample format) +*/ + +/* Start code prefix is 001 which is 3 bytes. */ +#define H264_SC_SIZE 3 +uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private; + + /* reset query data for the new sample buffer */ + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + int i; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + cxt->list.num_items = 0; + + /* reset start position of first item to 0 in case there is only one item */ + cxt->list.data[0].stpos = 0; + + /* start code emulation prevention byte is present in NAL */ + cxt->getbits.is_emul_reqd = 1; + + if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t* cubby = NULL; + int32_t size_left = 0; + int32_t size_parsed = 0; + int32_t NAL_length = 0; + + cubby = &(cxt->parse_cubby); + + size_left = cubby->size; + + while (size_left >= parser_private->NAL_length_size) + { + NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size); + if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size) + { + ETRACE("Invalid NAL_length parsed."); + break; + } + + size_parsed += parser_private->NAL_length_size; + cxt->list.data[cxt->list.num_items].stpos = size_parsed; + size_parsed += NAL_length; /* skip NAL bytes */ + /* end position is exclusive */ + cxt->list.data[cxt->list.num_items].edpos = size_parsed; + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC); + break; + } + + size_left = cubby->size - size_parsed; + } + + if (size_left != 0 && parser_private->length_prefix_verified == 0) + { + WTRACE("Elementary stream is not aligned (%d).", size_left); + + /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will + * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed + */ + parser_private->length_prefix_verified = 1; + viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby; + + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = ops->parse_sc((void *)&temp_cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + + /* found start code */ + if (ret == 1) + { + WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed."); + parser_private->NAL_length_size = 0; + parser_private->bitstream_pattern = H264_BS_SC_PREFIXED; + /* reset parsing data */ + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + cxt->list.num_items = 0; + } + } + } + + + if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED) + { + viddec_sc_parse_cubby_cxt_t cubby; + /* memory copy without updating cxt->parse_cubby */ + cubby = cxt->parse_cubby; + viddec_parser_ops_t *ops = pcontext->parser_ops; + int ret = 0; + + while (1) + { + ret = ops->parse_sc((void *)&cubby, + NULL, /* context, not used */ + &(cxt->sc_prefix_info)); + if (ret == 1) + { + if (cxt->list.num_items == 0) + { + cxt->list.data[0].stpos = cubby.sc_end_pos; + } + else + { + cxt->list.data[cxt->list.num_items].stpos = + cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos; + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE; + } + + cubby.phase = 0; + cubby.buf = cxt->parse_cubby.buf + + cxt->list.data[cxt->list.num_items].stpos; + + cubby.size = cxt->parse_cubby.size - + cxt->list.data[cxt->list.num_items].stpos; + + cxt->list.num_items++; + if (cxt->list.num_items >= MAX_IBUFS_PER_SC) + { + WTRACE("Num items exceeds the limit!"); + /* not fatal, just stop parsing */ + break; + } + } + else + { + if (cxt->list.num_items == 0) + { + cxt->list.num_items = 1; + parser_private->bitstream_pattern = H264_BS_SINGLE_NAL; + WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL."); + } + cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size; + break; + } + } + + } + + if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL) + { + cxt->list.num_items = 1; + cxt->list.data[0].stpos = 0; + cxt->list.data[0].edpos = cxt->parse_cubby.size; + } + + + return VBP_OK; +} + +/** +* +* process parsing result after a NAL unit is parsed +* +*/ +uint32 vbp_process_parsing_result_h264secure( vbp_context *pcontext, int i) +{ + if (i >= MAX_NUM_SLICES) + { + return VBP_PARM; + } + + uint32 error = VBP_OK; + + struct h264_viddec_parser* parser = NULL; + parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]); + vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data; + switch (parser->info.nal_unit_type) + { + case h264_NAL_UNIT_TYPE_SLICE: + VTRACE("slice header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_IDR: + VTRACE("IDR header is parsed."); + break; + case h264_NAL_UNIT_TYPE_SEI: + VTRACE("SEI header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_SPS: + VTRACE("SPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_PPS: + VTRACE("PPS header is parsed."); + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + VTRACE("ACC unit delimiter is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOSeq: + VTRACE("EOSeq is parsed."); + break; + + case h264_NAL_UNIT_TYPE_EOstream: + VTRACE("EOStream is parsed"); + break; + + default: + WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); + break; + } + + if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1) + { + WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures); + return (error == VBP_OK ? VBP_MULTI : error); + } + return error; +} + +/* +* +* fill query data structure after sample buffer is parsed +* +*/ +uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext) +{ + vbp_data_h264 *query_data = NULL; + struct h264_viddec_parser *parser = NULL; + struct vbp_h264_parser_private_t* private = NULL; + + parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data; + query_data = (vbp_data_h264 *)pcontext->query_data; + private = (struct vbp_h264_parser_private_t *)pcontext->parser_private; + + vbp_set_codec_data_h264secure(parser, query_data); + + vbp_set_pic_parse_buffer_h264secure(parser, query_data); + + /* buffer number */ + query_data->buf_number = buffer_counter; + + /* VQIAMatrixBufferH264 */ + vbp_set_scaling_list_h264secure(parser, query_data->IQ_matrix_buf); + + return VBP_OK; +} + +uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size) +{ + uint32 error = VBP_OK; + uint32 offset = 0; + uint32 key = 0; + uint32 i,j; + uint32 weight_pos = 0; + vbp_h264_sliceheader* sliceheader_p; + uint32 reordercmdnum = 0; + vbp_h264_sliceheader sliceheader; + sliceheader_p = &sliceheader; + memset(sliceheader_p, 0, sizeof(vbp_h264_sliceheader)); + uint32 slice_num = 0; + vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data; + + for (i = 0; i < MAX_NUM_PICTURES; i++) + { + query_data->pic_data[i].num_slices = 0; + } + query_data->num_pictures = 0; + + + uint8_t *data_u8 = (uint8_t *) newdata; + + while (offset < size) { + memcpy(&key, (uint8_t *)newdata+offset, sizeof(uint32)); + VTRACE("key = %x", key); + + if ((key == TERMINATE_KEY) || (key == BUF_TOO_SMALL_KEY) || (key == SLICE_TOO_MAY_KEY)) { + break; + } + + slice_num++; + offset += sizeof(uint32); + VTRACE("offset = %d", offset); + sliceheader_p->sliceHeaderKey = key; + memcpy((void*)&sliceheader_p->parsedSliceHeader, + (uint8_t *)newdata+offset, + sizeof(VAParseSliceHeaderGroupBuffer)); + + reordercmdnum = sliceheader_p->parsedSliceHeader.num_reorder_cmds[0] + + sliceheader_p->parsedSliceHeader.num_reorder_cmds[1]; + + for (i = 0; i < 2; i++) { + for (j = 0; j < 32; j++) { + if ((sliceheader_p->parsedSliceHeader.weights_present[i][0] >> j) & 0x01) { + weight_pos += 2; + } + if ((sliceheader_p->parsedSliceHeader.weights_present[i][1] >> j) & 0x01) { + weight_pos += 4; + } + } + } + sliceheader_p->reorder_cmd = (uint32_t *)((uint8_t *)newdata + offset + sizeof(VAParseSliceHeaderGroupBuffer)); + sliceheader_p->weight = (int16_t *)((uint8_t *)sliceheader_p->reorder_cmd + reordercmdnum * sizeof(uint32)); + + sliceheader_p->pic_marking = (uint32_t *)((uint8_t *)sliceheader_p->weight + weight_pos); + offset += sliceheader_p->parsedSliceHeader.size; + error = pcontext->parser_ops->update_data(pcontext->parser_cxt, + sliceheader_p, sizeof(vbp_h264_sliceheader)); + if (error != VBP_OK) + { + ETRACE("update_data error = 0x%x",error); + return error; + } + + error = vbp_add_pic_data_h264secure(pcontext); + if (error != VBP_OK) + { + ETRACE("vbp_add_pic_data_h264secure error = 0x%x",error); + return error; + } + + error = vbp_add_slice_data_h264secure(pcontext,key); + if (error != VBP_OK) + { + ETRACE("vbp_add_slice_data_h264secure error = 0x%x",error); + return error; + } + } + if (key != TERMINATE_KEY) + { + ETRACE("Don't find a terminated key 0xFFFFFF!"); + return VBP_DATA; + } else { + if (slice_num < 1) { + ETRACE("Don't find a valid slice header!"); + return VBP_DATA; + } + } + error = vbp_populate_query_data_h264secure(pcontext); + + if (error != VBP_OK) + { + ETRACE("vbp_populate_query_data_h264secure error = 0x%x",error); + return error; + } + + return error; +} + diff --git a/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.h b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.h new file mode 100755 index 0000000..4b08360 --- /dev/null +++ b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.h @@ -0,0 +1,73 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2009 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + + +#ifndef VBP_H264SECURE_PARSER_H +#define VBP_H264SECURE_PARSER_H + +/* + * setup parser's entry points + */ +uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext); + +/* + * allocate query data + */ +uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext); + +/* + * free query data + */ +uint32 vbp_free_query_data_h264secure(vbp_context *pcontext); + +/* + * parse initialization data + */ +uint32 vbp_parse_init_data_h264secure(vbp_context *pcontext); + +/* + * parse start code. Only support lenght prefixed mode. Start + * code prefixed is not supported. + */ +uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext); + +/* + * process parsing result + */ +uint32 vbp_process_parsing_result_h264secure(vbp_context *pcontext, int list_index); + +/* + * query parsing result + */ +uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext); + +/* + * update the parsing result with extra data + */ + +uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size); + + +#endif /*VBP_H264_PARSER_H*/ + diff --git a/mixvbp/vbp_manager/vbp_loader.c b/mixvbp/vbp_manager/vbp_loader.c index 972ab2d..ab5914f 100755 --- a/mixvbp/vbp_manager/vbp_loader.c +++ b/mixvbp/vbp_manager/vbp_loader.c @@ -174,7 +174,7 @@ uint32 vbp_flush(Handle hcontext) return error; } -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data) { vbp_context *pcontext; diff --git a/mixvbp/vbp_manager/vbp_loader.h b/mixvbp/vbp_manager/vbp_loader.h index ad4b106..46da5d7 100755 --- a/mixvbp/vbp_manager/vbp_loader.h +++ b/mixvbp/vbp_manager/vbp_loader.h @@ -50,6 +50,9 @@ typedef unsigned short uint16; #ifndef uint32 typedef unsigned int uint32; #endif +#ifndef int16 +typedef short int16; +#endif typedef void *Handle; @@ -171,6 +174,7 @@ typedef struct _vbp_codec_data_h264 int bit_rate; + int has_slice; } vbp_codec_data_h264; typedef struct _vbp_slice_data_h264 @@ -226,6 +230,10 @@ typedef struct _vbp_data_h264 vbp_codec_data_h264* codec_data; +#ifdef USE_SLICE_HEADER_PARSING + VAParsePictureParameterBuffer* pic_parse_buffer; +#endif + } vbp_data_h264; /* @@ -406,7 +414,7 @@ enum _vbp_parser_type #ifdef USE_HW_VP8 VBP_VP8, #endif -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) VBP_H264SECURE, #endif }; @@ -459,8 +467,7 @@ uint32 vbp_query(Handle hcontext, void **data); */ uint32 vbp_flush(Handle hcontent); - -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) /* * update the the vbp context using the new data * @param hcontext: handle to VBP context. diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index 0da65b5..edd94c1 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -35,7 +35,7 @@ #ifdef USE_HW_VP8 #include "vbp_vp8_parser.h" #endif -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) #include "vbp_h264secure_parser.h" #endif @@ -131,7 +131,7 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) break; #endif -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) case VBP_H264SECURE: parser_name = "libmixvbp_h264secure.so"; break; @@ -177,11 +177,11 @@ static uint32 vbp_utils_initialize_context(vbp_context *pcontext) #ifdef USE_HW_VP8 SET_FUNC_POINTER(VBP_VP8, vp8); #endif -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) SET_FUNC_POINTER(VBP_H264SECURE, h264secure); #endif } -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) if (pcontext->parser_type == VBP_H264SECURE) { pcontext->func_update_data = vbp_update_data_h264secure; } @@ -562,8 +562,7 @@ uint32 vbp_utils_flush(vbp_context *pcontext) return VBP_OK; } - -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) /** * * provide query data back to the consumer diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h index 455951c..1f54ae6 100755 --- a/mixvbp/vbp_manager/vbp_utils.h +++ b/mixvbp/vbp_manager/vbp_utils.h @@ -64,7 +64,7 @@ typedef uint32 (*function_parse_init_data)(vbp_context* cxt); typedef uint32 (*function_parse_start_code)(vbp_context* cxt); typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i); typedef uint32 (*function_populate_query_data)(vbp_context* cxt); -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size); #endif @@ -101,7 +101,7 @@ struct vbp_context_t function_parse_start_code func_parse_start_code; function_process_parsing_result func_process_parsing_result; function_populate_query_data func_populate_query_data; -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) function_update_data func_update_data; #endif }; diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk index d667227..c27b102 100755 --- a/mixvbp/vbp_plugin/h264/Android.mk +++ b/mixvbp/vbp_plugin/h264/Android.mk @@ -68,3 +68,37 @@ LOCAL_SHARED_LIBRARIES := libmixvbp liblog include $(BUILD_SHARED_LIBRARY) endif + +include $(CLEAR_VARS) +PLATFORM_SUPPORT_SLICE_HEADER_PARSER := merrifield + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),) +LOCAL_SRC_FILES := \ + h264parse.c \ + h264parse_bsd.c \ + h264parse_math.c \ + h264parse_mem.c \ + h264parse_sei.c \ + h264parse_pps.c \ + h264parse_sps.c \ + h264parse_dpb.c \ + h264parse_sh.c \ + secvideo/merrifield/viddec_h264secure_parse.c \ + mix_vbp_h264_stubs.c + +LOCAL_CFLAGS := -DUSE_SLICE_HEADER_PARSING + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/include \ + $(MIXVBP_DIR)/include \ + $(MIXVBP_DIR)/vbp_manager/include \ + $(MIXVBP_DIR)/vbp_manager/h264/include + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libmixvbp_h264secure +LOCAL_SHARED_LIBRARIES := libmixvbp liblog + +include $(BUILD_SHARED_LIBRARY) + +endif + diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c index e0b7c8f..883057a 100644 --- a/mixvbp/vbp_plugin/h264/h264parse_dpb.c +++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c @@ -2778,7 +2778,7 @@ void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr ac h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc); h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc); //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host -#ifndef USE_AVC_SHORT_FORMAT +#if (!defined USE_AVC_SHORT_FORMAT && !defined USE_SLICE_HEADER_PARSING) /// Add into drop-out list for all frms in dpb without display if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) { if ( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released @@ -3000,7 +3000,7 @@ void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t id h264_dpb_set_active_fs(p_dpb, fs_idc); viddec_h264_set_is_frame_used(p_dpb->active_fs, 0); -#ifndef USE_AVC_SHORT_FORMAT +#if (!defined USE_AVC_SHORT_FORMAT && !defined USE_SLICE_HEADER_PARSING) //add to support frame relocation interface to host if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs))) { @@ -3355,7 +3355,7 @@ void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int if (viddec_h264_get_is_non_existent(p_dpb->active_fs) == 0) { *existing = 1; -#ifndef USE_AVC_SHORT_FORMAT +#if (!defined USE_AVC_SHORT_FORMAT && !defined USE_SLICE_HEADER_PARSING) p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=p_dpb->active_fs->fs_idc; p_dpb->frame_numbers_need_to_be_displayed++; #endif diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h index f0da7ed..aefd3b7 100755 --- a/mixvbp/vbp_plugin/h264/include/h264.h +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -982,6 +982,8 @@ extern "C" { uint8_t last_I_frame_idc; uint8_t sei_b_state_ready; uint8_t gop_err_flag; + + uint8_t has_slice; } h264_Info; diff --git a/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c new file mode 100755 index 0000000..2e5ac06 --- /dev/null +++ b/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c @@ -0,0 +1,987 @@ +#include "viddec_parser_ops.h" + +#include "viddec_pm.h" + +#include "h264.h" +#include "h264parse.h" +#include "h264parse.h" +#include "h264parse_dpb.h" +#include + +typedef struct _ParsedSliceHeaderH264Secure +{ + unsigned int size; + + unsigned char nal_ref_idc; + unsigned char nal_unit_type; + unsigned char slice_type; + unsigned char redundant_pic_cnt; + + unsigned short first_mb_in_slice; + char slice_qp_delta; + char slice_qs_delta; + + unsigned char luma_log2_weight_denom; + unsigned char chroma_log2_weight_denom; + unsigned char cabac_init_idc; + unsigned char pic_order_cnt_lsb; + + unsigned char pic_parameter_set_id; + unsigned short idr_pic_id; + unsigned char colour_plane_id; + + char slice_alpha_c0_offset_div2; + char slice_beta_offset_div2; + unsigned char slice_group_change_cycle; + unsigned char disable_deblocking_filter_idc; + + unsigned int frame_num; + int delta_pic_order_cnt_bottom; + int delta_pic_order_cnt[2]; + + unsigned char num_reorder_cmds[2]; + unsigned char num_ref_active_minus1[2]; + + unsigned int weights_present[2][2]; + + unsigned short num_mem_man_ops; + + union { + struct { + unsigned field_pic_flag : 1; + unsigned bottom_field_flag : 1; + unsigned num_ref_idx_active_override_flag : 1; + unsigned direct_spatial_mv_pred_flag : 1; + unsigned no_output_of_prior_pics_flag : 1; + unsigned long_term_reference_flag : 1; + unsigned idr_flag : 1; + unsigned anchor_pic_flag : 1; + unsigned inter_view_flag : 1; + } bits; + + unsigned short value; + } flags; + unsigned short view_id; + unsigned char priority_id; + unsigned char temporal_id; +} ParsedSliceHeaderH264Secure; + + +typedef struct _vbp_h264_sliceheader { + uint32_t sliceHeaderKey; + ParsedSliceHeaderH264Secure parsedSliceHeader; + uint32_t *reorder_cmd; + int16_t *weight; + uint32_t *pic_marking; +} vbp_h264_sliceheader; + + +/* Init function which can be called to intialized local context on open and flush and preserve*/ +void viddec_h264secure_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) +{ + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + + if (!preserve) + { + /* we don't initialize this data if we want to preserve + sequence and gop information */ + h264_init_sps_pps(parser,persist_mem); + } + /* picture level info which will always be initialized */ + h264_init_Info_under_sps_pps_level(pInfo); + + return; +} + + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +uint32_t viddec_h264secure_parse(void *parent, void *ctxt) +{ + struct h264_viddec_parser* parser = ctxt; + + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + + + uint8_t nal_ref_idc = 0; + + ///// Parse NAL Unit header + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + + ///// Check frame bounday for non-vcl elimitter + h264_check_previous_frame_end(pInfo); + + pInfo->has_slice = 0; + + //////// Parse valid NAL unit + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + if (pInfo->got_start) { + pInfo->img.recovery_point_found |= 1; + } + + pInfo->sei_rp_received = 0; + + case h264_NAL_UNIT_TYPE_SLICE: + pInfo->has_slice = 1; + break; + + ///// * Main profile doesn't support Data Partition, skipped.... *//// + case h264_NAL_UNIT_TYPE_DPA: + case h264_NAL_UNIT_TYPE_DPB: + case h264_NAL_UNIT_TYPE_DPC: + //OS_INFO("***********************DP feature, not supported currently*******************\n"); + status = H264_STATUS_NOTSUPPORT; + break; + + //// * Parsing SEI info *//// + case h264_NAL_UNIT_TYPE_SEI: + status = H264_STATUS_OK; + + //OS_INFO("*****************************SEI**************************************\n"); + if (pInfo->sps_valid) { + //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW + pInfo->number_of_first_au_info_nal_before_first_slice++; + /// parsing the SEI info + status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo); + } + + //h264_rbsp_trailing_bits(pInfo); + break; + case h264_NAL_UNIT_TYPE_SPS: + { + //OS_INFO("*****************************SPS**************************************\n"); + /// + /// Can not define local SPS since the Current local stack size limitation! + /// Could be changed after the limitation gone + /// + VTRACE("h264_NAL_UNIT_TYPE_SPS +++"); + uint8_t old_sps_id=0; + vui_seq_parameters_t_not_used vui_seq_not_used; + + old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + + VTRACE("old_sps_id = %d", old_sps_id); + status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL); + if (status == H264_STATUS_OK) { + VTRACE("pInfo->active_SPS.seq_parameter_set_id = %d", pInfo->active_SPS.seq_parameter_set_id); + h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id); + pInfo->sps_valid = 1; + + if (1==pInfo->active_SPS.pic_order_cnt_type) { + h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id); + } + } + ///// Restore the active SPS if new arrival's id changed + if (old_sps_id>=MAX_NUM_SPS) { + h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used)); + pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + else { + if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + else { + //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); + // h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + VTRACE("old_sps_id==pInfo->active_SPS.seq_parameter_set_id"); + // pInfo->active_SPS.seq_parameter_set_id = 0xff; + } + } + + pInfo->number_of_first_au_info_nal_before_first_slice++; + VTRACE("h264_NAL_UNIT_TYPE_SPS ---"); + } + break; + case h264_NAL_UNIT_TYPE_PPS: + { + //OS_INFO("*****************************PPS**************************************\n"); + VTRACE("h264_NAL_UNIT_TYPE_PPS +++"); + uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id; + uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id; + VTRACE("old_sps_id = %d, old_pps_id = %d", old_sps_id, old_pps_id); + + h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set)); + pInfo->number_of_first_au_info_nal_before_first_slice++; + + if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK) + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id); + VTRACE("pInfo->active_PPS.seq_parameter_set_id = %d", pInfo->active_PPS.seq_parameter_set_id); + VTRACE("pInfo->active_SPS.seq_parameter_set_id = %d", pInfo->active_SPS.seq_parameter_set_id); + if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id) + { + pInfo->Is_SPS_updated = 1; + } + if (pInfo->active_SPS.seq_parameter_set_id != 0xff) { + h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id); + pInfo->got_start = 1; + if (pInfo->sei_information.recovery_point) + { + pInfo->img.recovery_point_found |= 2; + + //// Enable the RP recovery if no IDR ---Cisco + if ((pInfo->img.recovery_point_found & 1)==0) + pInfo->sei_rp_received = 1; + } + } + else + { + h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); + } + } else { + if (old_sps_idactive_SPS), old_sps_id); + if (old_pps_idactive_PPS), old_pps_id); + } + VTRACE("pInfo->active_PPS.seq_parameter_set_id = %d", pInfo->active_PPS.seq_parameter_set_id); + VTRACE("pInfo->active_SPS.seq_parameter_set_id = %d", pInfo->active_SPS.seq_parameter_set_id); + VTRACE("h264_NAL_UNIT_TYPE_PPS ---"); + } //// End of PPS parsing + break; + + + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + + h264_init_dpb(&(pInfo->dpb)); + + pInfo->is_current_workload_done=1; + + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + ///// primary_pic_type + { + uint32_t code = 0xff; + int32_t ret = 0; + ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3); + + if (ret != -1) { + //if(pInfo->got_start && (code == 0)) + //{ + //pInfo->img.recovery_point_found |= 4; + //} + pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1; + status = H264_STATUS_OK; + } + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + } + + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + status = H264_STATUS_OK; + pInfo->number_of_first_au_info_nal_before_first_slice++; + break; + + case h264_NAL_UNIT_TYPE_filler_data: + status = H264_STATUS_OK; + break; + case h264_NAL_UNIT_TYPE_ACP: + break; + case h264_NAL_UNIT_TYPE_SPS_extension: + case h264_NAL_UNIT_TYPE_unspecified: + case h264_NAL_UNIT_TYPE_unspecified2: + status = H264_STATUS_OK; + //nothing + break; + default: + status = H264_STATUS_OK; + break; + } + + //pInfo->old_nal_unit_type = pInfo->nal_unit_type; + switch ( pInfo->nal_unit_type ) + { + case h264_NAL_UNIT_TYPE_IDR: + case h264_NAL_UNIT_TYPE_SLICE: + case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: + case h264_NAL_UNIT_TYPE_SPS: + case h264_NAL_UNIT_TYPE_PPS: + case h264_NAL_UNIT_TYPE_SEI: + case h264_NAL_UNIT_TYPE_EOSeq: + case h264_NAL_UNIT_TYPE_EOstream: + case h264_NAL_UNIT_TYPE_Reserved1: + case h264_NAL_UNIT_TYPE_Reserved2: + case h264_NAL_UNIT_TYPE_Reserved3: + case h264_NAL_UNIT_TYPE_Reserved4: + case h264_NAL_UNIT_TYPE_Reserved5: + { + pInfo->old_nal_unit_type = pInfo->nal_unit_type; + break; + } + default: + break; + } + + return status; +} + +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ +/* ------------------------------------------------------------------------------------------ */ + +void viddec_h264secure_get_context_size(viddec_parser_memory_sizes_t *size) +{ + /* Should return size of my structure */ + size->context_size = sizeof(struct h264_viddec_parser); + size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all) + + MAX_NUM_PPS * sizeof(pic_param_set) + + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE + + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE; +} + + + +/*--------------------------------------------------------------------------------------------------*/ +// +// The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num +// specify the change from the initial reference picture lists to the reference picture lists to be used +// for decoding the slice + +// reordering_of_pic_nums_idc: +// 0: abs_diff_pic_num_minus1 is present and corresponds to a difference to subtract from a picture number prediction value +// 1: abs_diff_pic_num_minus1 is present and corresponds to a difference to add to a picture number prediction value +// 2: long_term_pic_num is present and specifies the long-term picture number for a reference picture +// 3: End loop for reordering of the initial reference picture list +// +/*--------------------------------------------------------------------------------------------------*/ + +h264_Status h264secure_Parse_Ref_Pic_List_Reordering(h264_Info* pInfo, void *newdata, h264_Slice_Header_t *SliceHeader) +{ + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t reorder= -1; + uint32_t code; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + + if ((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)(sliceheader_p->parsedSliceHeader.num_reorder_cmds[0] > 0); + VTRACE("sliceheader_p->parsedSliceHeader.num_reorder_cmds[0] = %d", + sliceheader_p->parsedSliceHeader.num_reorder_cmds[0]); + if (SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag) + { + if(sliceheader_p->parsedSliceHeader.num_reorder_cmds[0] > MAX_NUM_REF_FRAMES) { + return H264_SliceHeader_ERROR; + } + for (reorder = 0; reorder < sliceheader_p->parsedSliceHeader.num_reorder_cmds[0]; reorder++) { + code = sliceheader_p->reorder_cmd[reorder]; + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = code >> 24; + VTRACE("SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[%d] = %d", + reorder, + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] + ); + if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = code & 0xFFFFFF; + VTRACE("abs_diff_pic_num_minus1 = %d", SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1); + } + else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = code & 0xFFFFFF; + VTRACE("long_term_pic_num = %d", SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num); + } + if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 3) + { + VTRACE("break here"); + break; + } + } + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = 3; + } + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)(sliceheader_p->parsedSliceHeader.num_reorder_cmds[1] > 0); + VTRACE("sliceheader_p->parsedSliceHeader.num_reorder_cmds[1] = %d", + sliceheader_p->parsedSliceHeader.num_reorder_cmds[1]); + if (SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag) + { + if (sliceheader_p->parsedSliceHeader.num_reorder_cmds[1] > MAX_NUM_REF_FRAMES) { + return H264_SliceHeader_ERROR; + } + for (reorder = 0; reorder < sliceheader_p->parsedSliceHeader.num_reorder_cmds[1]; reorder++) { + code = *(sliceheader_p->reorder_cmd + sliceheader_p->parsedSliceHeader.num_reorder_cmds[0] + reorder); + SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = code >> 24; + if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = code & 0xFFFFFF; + } + else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) + { + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = code & 0xFFFFFF; + } + if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 3) + { + break; + } + } + SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = 3; + } + } + return H264_STATUS_OK; +} + +h264_Status h264secure_Parse_Pred_Weight_Table(h264_Info* pInfo, void *newdata, h264_Slice_Header_t *SliceHeader) +{ + uint32_t i =0, j=0; + uint8_t flag; + uint32_t weightidx = 0; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + + SliceHeader->sh_predwttbl.luma_log2_weight_denom = sliceheader_p->parsedSliceHeader.luma_log2_weight_denom; + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + SliceHeader->sh_predwttbl.chroma_log2_weight_denom = sliceheader_p->parsedSliceHeader.chroma_log2_weight_denom; + } + for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++) + { + flag = ((sliceheader_p->parsedSliceHeader.weights_present[0][0] >> i) & 0x01); + SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; + if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = sliceheader_p->weight[weightidx++]; + SliceHeader->sh_predwttbl.luma_offset_l0[i] = sliceheader_p->weight[weightidx++]; + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + flag = ((sliceheader_p->parsedSliceHeader.weights_present[0][1] >> i) & 0x01); + SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; + if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = sliceheader_p->weight[weightidx++]; + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = sliceheader_p->weight[weightidx++]; + } + } + else + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++) + { + flag = ((sliceheader_p->parsedSliceHeader.weights_present[1][0] >> i) & 0x01); + SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; + if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = sliceheader_p->weight[weightidx++]; + SliceHeader->sh_predwttbl.luma_offset_l1[i] = sliceheader_p->weight[weightidx++]; + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0) + { + flag = ((sliceheader_p->parsedSliceHeader.weights_present[1][1] >> i) & 0x01); + SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; + if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = sliceheader_p->weight[weightidx++]; + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = sliceheader_p->weight[weightidx++]; + } + } + else + { + for (j=0; j <2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; +} ///// End of h264_Parse_Pred_Weight_Table + +h264_Status h264secure_Parse_Dec_Ref_Pic_Marking(h264_Info* pInfo, void *newdata,h264_Slice_Header_t *SliceHeader) +{ + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + + uint8_t i = 0; + uint32_t idx = 0; + uint32_t code; + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)sliceheader_p->parsedSliceHeader.flags.bits.no_output_of_prior_pics_flag; + SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)sliceheader_p->parsedSliceHeader.flags.bits.long_term_reference_flag; + pInfo->img.long_term_reference_flag = SliceHeader->sh_dec_refpic.long_term_reference_flag; + } + else + { + SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)(sliceheader_p->parsedSliceHeader.num_mem_man_ops > 0); + VTRACE("SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = %d", SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag); + /////////////////////////////////////////////////////////////////////////////////////// + //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified + // Sliding window reference picture marking mode: A marking mode + // providing a first-in first-out mechanism for short-term reference pictures. + // Adaptive reference picture marking mode: A reference picture + // marking mode providing syntax elements to specify marking of + // reference pictures as unused for reference?and to assign long-term + // frame indices. + /////////////////////////////////////////////////////////////////////////////////////// + + if (SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) + { + do + { + if (i < NUM_MMCO_OPERATIONS) + { + code = sliceheader_p->pic_marking[idx++]; + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = (uint8_t)(code >> 24); + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + { + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = code & 0xFFFFFF; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) + { + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = code & 0xFFFFFF; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6) + { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = code & 0xFFFFFF; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) { + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = sliceheader_p->pic_marking[idx++]; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) + { + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = code & 0xFFFFFF; + } + + if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) + { + pInfo->img.curr_has_mmco_5 = 1; + } + } + + if (i >= NUM_MMCO_OPERATIONS) { + return H264_STATUS_ERROR; + } + + } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0); + } + } + + SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i; + + return H264_STATUS_OK; +} + + +uint32_t h264secure_Update_Slice_Header(h264_Info* pInfo, void *newdata, h264_Slice_Header_t *SliceHeader) +{ + h264_Status retStatus = H264_STATUS_OK; + uint8_t data; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata; + ///// first_mb_in_slice + SliceHeader->first_mb_in_slice = sliceheader_p->parsedSliceHeader.first_mb_in_slice; + + ///// slice_type + data = sliceheader_p->parsedSliceHeader.slice_type; + SliceHeader->slice_type = (data%5); + if (SliceHeader->slice_type > h264_PtypeI) { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + SliceHeader->pic_parameter_id = (uint8_t)sliceheader_p->parsedSliceHeader.pic_parameter_set_id; + retStatus = h264_active_par_set(pInfo, SliceHeader); + + switch (pInfo->active_SPS.profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + pInfo->active_PPS.transform_8x8_mode_flag=0; + pInfo->active_PPS.pic_scaling_matrix_present_flag =0; + pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset; + default: + break; + } + + uint32_t code; + int32_t max_mb_num=0; + + SliceHeader->frame_num = (int32_t)sliceheader_p->parsedSliceHeader.frame_num; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + SliceHeader->field_pic_flag = (uint8_t)sliceheader_p->parsedSliceHeader.flags.bits.field_pic_flag; + + if (SliceHeader->field_pic_flag) + { + SliceHeader->bottom_field_flag = (uint8_t)sliceheader_p->parsedSliceHeader.flags.bits.bottom_field_flag; + SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + if (SliceHeader->structure == FRAME) { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + } else { + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2; + } + + + if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) { + SliceHeader->first_mb_in_slice <<=1; + } + + if (SliceHeader->first_mb_in_slice >= max_mb_num) { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = sliceheader_p->parsedSliceHeader.idr_pic_id; + } + + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + SliceHeader->pic_order_cnt_lsb = (uint32_t)sliceheader_p->parsedSliceHeader.pic_order_cnt_lsb; + + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = sliceheader_p->parsedSliceHeader.delta_pic_order_cnt_bottom; + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if ((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = sliceheader_p->parsedSliceHeader.delta_pic_order_cnt[0]; + if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = sliceheader_p->parsedSliceHeader.delta_pic_order_cnt[1]; + } + } + + if (pInfo->active_PPS.redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = sliceheader_p->parsedSliceHeader.redundant_pic_cnt; + if (SliceHeader->redundant_pic_cnt > 127) { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } else { + SliceHeader->redundant_pic_cnt = 0; + } + + int32_t slice_alpha_c0_offset, slice_beta_offset; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + /// direct_spatial_mv_pred_flag + if (SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)sliceheader_p->parsedSliceHeader.flags.bits.direct_spatial_mv_pred_flag; + } + else + { + SliceHeader->direct_spatial_mv_pred_flag = 0; + } + // + // Reset ref_idx and Overide it if exist + // + SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active; + SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active; + + if ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB)) + { + SliceHeader->num_ref_idx_active_override_flag = (uint8_t)sliceheader_p->parsedSliceHeader.flags.bits.num_ref_idx_active_override_flag; + if (SliceHeader->num_ref_idx_active_override_flag) + { + SliceHeader->num_ref_idx_l0_active = sliceheader_p->parsedSliceHeader.num_ref_active_minus1[0]+ 1; + if (SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = sliceheader_p->parsedSliceHeader.num_ref_active_minus1[1]+1; + } + } + } + + if (SliceHeader->slice_type != h264_PtypeB) { + SliceHeader->num_ref_idx_l1_active = 0; + } + + if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + if (h264secure_Parse_Ref_Pic_List_Reordering(pInfo,newdata,SliceHeader) != H264_STATUS_OK) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + + //// + //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW + //// + if (((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + { + if (h264secure_Parse_Pred_Weight_Table(pInfo,newdata, SliceHeader) != H264_STATUS_OK) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } + + + + //// + //// Parse Ref_pic marking if there + //// + if (SliceHeader->nal_ref_idc != 0) + { + if (h264secure_Parse_Dec_Ref_Pic_Marking(pInfo, newdata, SliceHeader) != H264_STATUS_OK) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } + + if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + SliceHeader->cabac_init_idc = sliceheader_p->parsedSliceHeader.cabac_init_idc; + } + else + { + SliceHeader->cabac_init_idc = 0; + } + + if (SliceHeader->cabac_init_idc > 2) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + SliceHeader->slice_qp_delta = sliceheader_p->parsedSliceHeader.slice_qp_delta; + + if ( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26))) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + if ((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) ) + { + if (SliceHeader->slice_type == h264_PtypeSP) + { + SliceHeader->sp_for_switch_flag = 0; + } + SliceHeader->slice_qs_delta = sliceheader_p->parsedSliceHeader.slice_qs_delta; + if ( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } + if (pInfo->active_PPS.deblocking_filter_control_present_flag) + { + SliceHeader->disable_deblocking_filter_idc = sliceheader_p->parsedSliceHeader.disable_deblocking_filter_idc; + if (SliceHeader->disable_deblocking_filter_idc != 1) + { + SliceHeader->slice_alpha_c0_offset_div2 = sliceheader_p->parsedSliceHeader.slice_alpha_c0_offset_div2; + slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; + if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + + SliceHeader->slice_beta_offset_div2 = sliceheader_p->parsedSliceHeader.slice_beta_offset_div2; + slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; + if (slice_beta_offset < -12 || slice_beta_offset > 12) + { + retStatus = H264_STATUS_NOTSUPPORT; + return retStatus; + } + } + else + { + SliceHeader->slice_alpha_c0_offset_div2 = 0; + SliceHeader->slice_beta_offset_div2 = 0; + } + } + + retStatus = H264_STATUS_OK; + return retStatus; +} +uint32_t viddec_h264secure_update(void *parent, void *data, uint32_t size) +{ + viddec_pm_cxt_t * parser_cxt = (viddec_pm_cxt_t *)parent; + struct h264_viddec_parser* parser = (struct h264_viddec_parser*) &parser_cxt->codec_data[0]; + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) data; + + pInfo->img.g_new_frame = 0; + pInfo->push_to_cur = 1; + pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; + pInfo->nal_unit_type = sliceheader_p->parsedSliceHeader.nal_unit_type; + + h264_Slice_Header_t next_SliceHeader; + + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = sliceheader_p->parsedSliceHeader.nal_ref_idc; + + if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + { + pInfo->img.recovery_point_found |=4; + } + pInfo->primary_pic_type_plus_one = 0; + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset=0; + pInfo->h264_pwt_start_bit_offset=0; + pInfo->h264_pwt_end_byte_offset=0; + pInfo->h264_pwt_end_bit_offset=0; + pInfo->h264_pwt_enabled =0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + /// Pass slice header + status = h264secure_Update_Slice_Header(pInfo, sliceheader_p, &next_SliceHeader); + + pInfo->sei_information.recovery_point = 0; + pInfo->img.current_slice_num++; + + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + } + else ///////////////////////////////////////////////////// If Not a picture start + { + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + ////////////////////////////////////////////////////////////// + // Step 4: DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + + h264_dpb_update_ref_lists( pInfo); + + return status; +} + + diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 32abd4e..94d3605 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -34,6 +34,7 @@ ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_GEN_HW)),) LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT -DUSE_GEN_HW endif + PLATFORM_USE_HYBRID_DRIVER := \ baytrail @@ -41,6 +42,13 @@ ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_HYBRID_DRIVER)),) LOCAL_CFLAGS += -DUSE_HYBRID_DRIVER endif +PLATFORM_SUPPORT_SLICE_HEADER_PARSER := \ + merrifield + +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),) + LOCAL_CFLAGS += -DUSE_SLICE_HEADER_PARSING +endif + #LOCAL_LDLIBS += -lpthread LOCAL_SHARED_LIBRARIES := \ diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp old mode 100644 new mode 100755 index 49c2efd..c3f3bd4 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -514,6 +514,7 @@ Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picD // update DPB from the reference list in each slice. for (uint32_t slice = 0; slice < picData->num_slices; slice++) { sliceParam = &(picData->slc_data[slice].slc_parms); + for (int32_t list = 0; list < 2; list++) { refList = (list == 0) ? sliceParam->RefPicList0 : sliceParam->RefPicList1; @@ -703,6 +704,8 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { mVideoFormatInfo.height = height; } + + // video_range has default value of 0. mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag; diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h old mode 100644 new mode 100755 index efc9f97..880b659 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -39,15 +39,15 @@ public: virtual Decode_Status decode(VideoDecodeBuffer *buffer); protected: - Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); - Decode_Status beginDecodingFrame(vbp_data_h264 *data); - Decode_Status continueDecodingFrame(vbp_data_h264 *data); + virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data); + virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data); virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); Decode_Status setReference(VASliceParameterBufferH264 *sliceParam); Decode_Status updateDPB(VAPictureParameterBufferH264 *picParam); Decode_Status updateReferenceFrames(vbp_picture_data_h264 *picData); void removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam); - inline uint32_t getPOC(VAPictureH264 *pic); // Picture Order Count + uint32_t getPOC(VAPictureH264 *pic); // Picture Order Count inline VASurfaceID findSurface(VAPictureH264 *pic); inline VideoSurfaceBuffer* findSurfaceBuffer(VAPictureH264 *pic); inline VideoSurfaceBuffer* findRefSurfaceBuffer(VAPictureH264 *pic); diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp old mode 100644 new mode 100755 index 0f4c297..7707fce --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -98,6 +98,7 @@ Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) { } if ((int32_t)mParserType != VBP_INVALID) { + ITRACE("mParserType = %d", mParserType); if (vbp_open(mParserType, &mParserHandle) != VBP_OK) { ETRACE("Failed to open VBP parser."); return DECODE_NO_PARSER; @@ -1041,6 +1042,8 @@ Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool return DECODE_SUCCESS; } + + Decode_Status VideoDecoderBase::mapSurface(void) { VAStatus vaStatus = VA_STATUS_SUCCESS; VAImage image; @@ -1288,7 +1291,7 @@ void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) { } // This function should be called before start() to load different type of parsers -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) { if ((int32_t)type != VBP_INVALID) { ITRACE("Parser Type = %d", (int32_t)type); @@ -1316,6 +1319,18 @@ Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void return DECODE_SUCCESS; } +Decode_Status VideoDecoderBase::queryBuffer(void** vbpData) { + if (mParserHandle == NULL) { + return DECODE_NO_PARSER; + } + + uint32_t vbpStatus; + vbpStatus = vbp_query(mParserHandle, vbpData); + CHECK_VBP_STATUS("vbp_query"); + + return DECODE_SUCCESS; +} + Decode_Status VideoDecoderBase::getCodecSpecificConfigs(VAProfile profile, VAConfigID *config) { VAStatus vaStatus; VAConfigAttrib attrib; diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h old mode 100644 new mode 100755 index 80dd518..ab9193e --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -90,8 +90,9 @@ protected: virtual Decode_Status getRawDataFromSurface(VideoRenderBuffer *renderBuffer = NULL, uint8_t *pRawData = NULL, uint32_t *pSize = NULL, bool internal = true); -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT) || (defined USE_SLICE_HEADER_PARSING) Decode_Status updateBuffer(uint8_t *buffer, int32_t size, void** vbpData); + Decode_Status queryBuffer(void **vbpData); Decode_Status setParserType(_vbp_parser_type type); virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID *config); #endif diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp index 973ab22..1c30fe8 100644 --- a/videodecoder/VideoDecoderHost.cpp +++ b/videodecoder/VideoDecoderHost.cpp @@ -25,9 +25,11 @@ #include "VideoDecoderWMV.h" #include "VideoDecoderMPEG4.h" #include "VideoDecoderAVC.h" + #ifdef USE_INTEL_SECURE_AVC #include "VideoDecoderAVCSecure.h" #endif + #ifdef USE_HW_VP8 #include "VideoDecoderVP8.h" #endif @@ -71,6 +73,7 @@ IVideoDecoder* createVideoDecoder(const char* mimeType) { return (IVideoDecoder *)p; } #endif + else { ETRACE("Unknown mime type: %s", mimeType); } diff --git a/videodecoder/VideoDecoderTrace.h b/videodecoder/VideoDecoderTrace.h old mode 100644 new mode 100755 diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp old mode 100644 new mode 100755 index ab7bc7e..671e8bd --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp @@ -21,206 +21,417 @@ * approved by Intel in writing. * */ - -#include "VideoDecoderAVCSecure.h" +#include +#include "VideoDecoderBase.h" +#include "VideoDecoderAVC.h" #include "VideoDecoderTrace.h" -#include - +#include "vbp_loader.h" +#include "VideoDecoderAVCSecure.h" -#define STARTCODE_00 0x00 -#define STARTCODE_01 0x01 +#define MAX_SLICEHEADER_BUFFER_SIZE 4096 #define STARTCODE_PREFIX_LEN 3 #define NALU_TYPE_MASK 0x1F - - -// mask for little endian, to mast the second and fourth bytes in the byte stream -#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 -#define STARTCODE_MASK1 0x0000FF00 //0x000000FF - - -typedef enum { - NAL_UNIT_TYPE_unspecified0 = 0, - NAL_UNIT_TYPE_SLICE, - NAL_UNIT_TYPE_DPA, - NAL_UNIT_TYPE_DPB, - NAL_UNIT_TYPE_DPC, - NAL_UNIT_TYPE_IDR, - NAL_UNIT_TYPE_SEI, - NAL_UNIT_TYPE_SPS, - NAL_UNIT_TYPE_PPS, - NAL_UNIT_TYPE_Acc_unit_delimiter, - NAL_UNIT_TYPE_EOSeq, - NAL_UNIT_TYPE_EOstream, - NAL_UNIT_TYPE_filler_data, - NAL_UNIT_TYPE_SPS_extension, - NAL_UNIT_TYPE_Reserved14, - NAL_UNIT_TYPE_Reserved15, - NAL_UNIT_TYPE_Reserved16, - NAL_UNIT_TYPE_Reserved17, - NAL_UNIT_TYPE_Reserved18, - NAL_UNIT_TYPE_ACP, - NAL_UNIT_TYPE_Reserved20, - NAL_UNIT_TYPE_Reserved21, - NAL_UNIT_TYPE_Reserved22, - NAL_UNIT_TYPE_Reserved23, - NAL_UNIT_TYPE_unspecified24, -} NAL_UNIT_TYPE; - -#ifndef min -#define min(X, Y) ((X) <(Y) ? (X) : (Y)) -#endif - - +#define MAX_NALU_HEADER_BUFFER 8192 static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; +/* H264 start code values */ +typedef enum _h264_nal_unit_type +{ + h264_NAL_UNIT_TYPE_unspecified = 0, + h264_NAL_UNIT_TYPE_SLICE, + h264_NAL_UNIT_TYPE_DPA, + h264_NAL_UNIT_TYPE_DPB, + h264_NAL_UNIT_TYPE_DPC, + h264_NAL_UNIT_TYPE_IDR, + h264_NAL_UNIT_TYPE_SEI, + h264_NAL_UNIT_TYPE_SPS, + h264_NAL_UNIT_TYPE_PPS, + h264_NAL_UNIT_TYPE_Acc_unit_delimiter, + h264_NAL_UNIT_TYPE_EOSeq, + h264_NAL_UNIT_TYPE_EOstream, + h264_NAL_UNIT_TYPE_filler_data, + h264_NAL_UNIT_TYPE_SPS_extension, + h264_NAL_UNIT_TYPE_ACP = 19, + h264_NAL_UNIT_TYPE_Slice_extension = 20 +} h264_nal_unit_type_t; VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) - : VideoDecoderAVC(mimeType), - mNaluHeaderBuffer(NULL), - mInputBuffer(NULL) { - - memset(&mMetadata, 0, sizeof(NaluMetadata)); - memset(&mByteStream, 0, sizeof(NaluByteStream)); -} - -VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { + : VideoDecoderAVC(mimeType){ + mFrameSize = 0; + mFrameData = NULL; + mIsEncryptData = 0; + mClearData = NULL; + setParserType(VBP_H264SECURE); + mFrameIdx = 0; } Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + VTRACE("VideoDecoderAVCSecure::start"); Decode_Status status = VideoDecoderAVC::start(buffer); if (status != DECODE_SUCCESS) { return status; } - mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; - mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; - mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; - - if (mMetadata.naluInfo == NULL || - mByteStream.byteStream == NULL || - mNaluHeaderBuffer == NULL) { - ETRACE("Failed to allocate memory."); - // TODO: release all allocated memory + mClearData = new uint8_t [MAX_NALU_HEADER_BUFFER]; + if (mClearData == NULL) { + ETRACE("Failed to allocate memory for mClearData"); return DECODE_MEMORY_FAIL; } + return status; } void VideoDecoderAVCSecure::stop(void) { + VTRACE("VideoDecoderAVCSecure::stop"); VideoDecoderAVC::stop(); - if (mMetadata.naluInfo) { - delete [] mMetadata.naluInfo; - mMetadata.naluInfo = NULL; + if (mClearData) { + delete [] mClearData; + mClearData = NULL; } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + VTRACE("VideoDecoderAVCSecure::decode"); + Decode_Status status; + vbp_data_h264 *data = NULL; + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + + int32_t clear_data_size = 0; + uint8_t *clear_data = NULL; + uint8_t naluType = 0; + + int32_t num_nalus; + int32_t nalu_offset; + int32_t offset; + uint8_t *data_src; + uint8_t *nalu_data; + uint32_t nalu_size; +// uint32_t testsize; +// uint8_t *testdata; + if (buffer->flag & IS_SECURE_DATA) { + VTRACE("Decoding protected video ..."); + mIsEncryptData = 1; + + mFrameData = buffer->data; + mFrameSize = buffer->size; + VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize); +#if 0 + testsize = *(uint32_t *)(buffer->data + buffer->size); + testdata = (uint8_t *)(buffer->data + buffer->size + sizeof(uint32_t)); + for (int i = 0; i < testsize; i++) { + VTRACE("testdata[%d] = 0x%x", i, testdata[i]); + } +#endif + num_nalus = *(uint32_t *)(buffer->data + buffer->size + sizeof(uint32_t)); + VTRACE("num_nalus = %d", num_nalus); + offset = 4; + for (int32_t i = 0; i < num_nalus; i++) { + VTRACE("%d nalu, offset = %d", i, offset); + data_src = buffer->data + buffer->size + sizeof(uint32_t) + offset; + nalu_size = *(uint32_t *)(data_src + 2 * sizeof(uint32_t)); + nalu_size = (nalu_size + 0x03) & (~0x03); + + nalu_data = data_src + 3 *sizeof(uint32_t); + naluType = nalu_data[0] & NALU_TYPE_MASK; + offset += nalu_size + 3 *sizeof(uint32_t); + VTRACE("naluType = 0x%x", naluType); + VTRACE("nalu_size = %d, nalu_data = %p", nalu_size, nalu_data); + + if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) { + ETRACE("Slice NALU received!"); + return DECODE_INVALID_DATA; + } + + else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) { + memcpy(mClearData + clear_data_size, + startcodePrefix, + STARTCODE_PREFIX_LEN); + clear_data_size += STARTCODE_PREFIX_LEN; + memcpy(mClearData + clear_data_size, + nalu_data, + nalu_size); + clear_data_size += nalu_size; + } else { + ETRACE("Failure: DECODE_FRAME_DROPPED"); + return DECODE_FRAME_DROPPED; + } + } + clear_data = mClearData; - if (mByteStream.byteStream) { - delete [] mByteStream.byteStream; - mByteStream.byteStream = NULL; + } else { + VTRACE("Decoding clear video ..."); + mIsEncryptData = 0; + mFrameSize = buffer->size; + mFrameData = buffer->data; + clear_data = buffer->data; + clear_data_size = buffer->size; + } + if (clear_data_size > 0) { + status = VideoDecoderBase::parseBuffer( + clear_data, + clear_data_size, + false, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + } else { + status = VideoDecoderBase::queryBuffer((void**)&data); + CHECK_STATUS("VideoDecoderBase::queryBuffer"); } - if (mNaluHeaderBuffer) { - delete [] mNaluHeaderBuffer; - mNaluHeaderBuffer = NULL; + if (!mVAStarted) { + if (data->has_sps && data->has_pps) { + status = startVA(data); + CHECK_STATUS("startVA"); + } else { + WTRACE("Can't start VA as either SPS or PPS is still not available."); + return DECODE_SUCCESS; + } } + status = decodeFrame(buffer, data); + + return status; } -Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { +Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { + VTRACE("VideoDecoderAVCSecure::decodeFrame"); Decode_Status status; - int32_t sizeAccumulated = 0; - int32_t sizeLeft = 0; - uint8_t *pByteStream = NULL; - NaluInfo *pNaluInfo = mMetadata.naluInfo; + VTRACE("data->has_sps = %d, data->has_pps = %d", data->has_sps, data->has_pps); + +#if 0 + // Don't remove the following codes, it can be enabled for debugging DPB. + for (unsigned int i = 0; i < data->num_pictures; i++) { + VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic; + VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d", + i, + buffer->timeStamp/1E6, + pic.TopFieldOrderCnt, + pic.BottomFieldOrderCnt, + pic.flags, + (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)); + } +#endif - if (buffer->flag & IS_SECURE_DATA) { - // NALU headers are appended to encrypted video bitstream - // |...encrypted video bitstream (16 bytes aligned)...| 4 bytes of header size |...NALU headers..| - pByteStream = buffer->data + buffer->size + 4; - sizeLeft = *(int32_t *)(buffer->data + buffer->size); - VTRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size); - mInputBuffer = buffer->data; + if (data->new_sps || data->new_pps) { + status = handleNewSequence(data); + CHECK_STATUS("handleNewSequence"); + } + + uint64_t lastPTS = mCurrentPTS; + mCurrentPTS = buffer->timeStamp; + + // start decoding a new frame + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + + if (mFrameSize > 0) { + status = parseSliceHeader(buffer, data); + } + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + return status; + } + + status = beginDecodingFrame(data); + CHECK_STATUS("beginDecodingFrame"); + + // finish decoding the last frame + status = endDecodingFrame(false); + CHECK_STATUS("endDecodingFrame"); + + if (isNewFrame(data, lastPTS == mCurrentPTS) == 0) { + ETRACE("Can't handle interlaced frames yet"); + return DECODE_FAIL; + } + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVCSecure::beginDecodingFrame(vbp_data_h264 *data) { + VTRACE("VideoDecoderAVCSecure::beginDecodingFrame"); + Decode_Status status; + VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic); + if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + mAcquiredBuffer->referenceFrame = true; } else { - status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); - CHECK_STATUS("parseAnnexBStream"); - pByteStream = mByteStream.byteStream; - sizeLeft = mByteStream.streamPos; - mInputBuffer = buffer->data; + mAcquiredBuffer->referenceFrame = false; } - if (sizeLeft < 4) { - ETRACE("Not enough data to read number of NALU."); - return DECODE_INVALID_DATA; + + if (picture->flags & VA_PICTURE_H264_TOP_FIELD) { + mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD; + } else { + mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; } - // read number of NALU - memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); - pByteStream += 4; - sizeLeft -= 4; + mAcquiredBuffer->renderBuffer.flag = 0; + mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; + mAcquiredBuffer->pictureOrder = getPOC(picture); + + status = continueDecodingFrame(data); + return status; +} - if (mMetadata.naluNumber == 0) { - WTRACE("Number of NALU is ZERO!"); - return DECODE_SUCCESS; +Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) { + VTRACE("VideoDecoderAVCSecure::continueDecodingFrame"); + Decode_Status status; + vbp_picture_data_h264 *picData = data->pic_data; + + if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) { + ETRACE("mAcquiredBuffer is NULL. Implementation bug."); + return DECODE_FAIL; } + VTRACE("data->num_pictures = %d", data->num_pictures); + for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) { + if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) { + return DECODE_PARSER_FAIL; + } - for (int32_t i = 0; i < mMetadata.naluNumber; i++) { - if (sizeLeft < 12) { - ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); - return DECODE_INVALID_DATA; + if (picIndex > 0 && + (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) { + ETRACE("Packed frame is not supported yet!"); + return DECODE_FAIL; } - sizeLeft -= 12; - // read NALU offset - memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); - pByteStream += 4; + VTRACE("picData->num_slices = %d", picData->num_slices); + for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) { + status = decodeSlice(data, picIndex, sliceIndex); + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + // remove current frame from DPB as it can't be decoded. + removeReferenceFromDPB(picData->pic_parms); + return status; + } + } + } + return DECODE_SUCCESS; +} - // read NALU size - memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; +Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { + Decode_Status status; + VAStatus vaStatus; - // read NALU header length - memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; + VABufferID sliceheaderbufferID; + VABufferID pictureparameterparsingbufferID; + VABufferID mSlicebufferID; + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); - if (sizeLeft < pNaluInfo->naluHeaderLen) { - ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); - return DECODE_INVALID_DATA; - } + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParseSliceHeaderGroupBufferType, + MAX_SLICEHEADER_BUFFER_SIZE, + 1, + NULL, + &sliceheaderbufferID); + CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer"); + + void *sliceheaderbuf; + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); - sizeLeft -= pNaluInfo->naluHeaderLen; + memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE); - if (pNaluInfo->naluHeaderLen) { - // copy start code prefix to buffer - memcpy(mNaluHeaderBuffer + sizeAccumulated, - startcodePrefix, - STARTCODE_PREFIX_LEN); - sizeAccumulated += STARTCODE_PREFIX_LEN; + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); - // copy NALU header - memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); - pByteStream += pNaluInfo->naluHeaderLen; - sizeAccumulated += pNaluInfo->naluHeaderLen; - } else { - WTRACE("header len is zero for NALU %d", i); - } + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mFrameSize, //size + 1, //num_elements + mFrameData, + &mSlicebufferID); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - // for next NALU - pNaluInfo++; - } + data->pic_parse_buffer->frame_buf_id = mSlicebufferID; + data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID; + data->pic_parse_buffer->frame_size = mFrameSize; + data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE; + +#if 0 + + VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag); + VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag); + VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag); + VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag); + VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag); + VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag); + VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag); + VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc); + + VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id); + VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1); + VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc); + VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4); + VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type); + VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag); + VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1); + VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1); +#endif + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParsePictureParameterBufferType, + sizeof(VAParsePictureParameterBuffer), + 1, + data->pic_parse_buffer, + &pictureparameterparsingbufferID); + CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer"); + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + &pictureparameterparsingbufferID, + 1); + CHECK_VA_STATUS("vaRenderPicture"); + + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); + + status = updateSliceParameter(data,sliceheaderbuf); + CHECK_STATUS("processSliceHeader"); - buffer->data = mNaluHeaderBuffer; - buffer->size = sizeAccumulated; + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); - return VideoDecoderAVC::decode(buffer); + return DECODE_SUCCESS; } +Decode_Status VideoDecoderAVCSecure::updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf) { + VTRACE("VideoDecoderAVCSecure::updateSliceParameter"); + Decode_Status status; + status = VideoDecoderBase::updateBuffer( + (uint8_t *)sliceheaderbuf, + MAX_SLICEHEADER_BUFFER_SIZE, + (void**)&data); + CHECK_STATUS("updateBuffer"); + return DECODE_SUCCESS; +} Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { - Decode_Status status; VAStatus vaStatus; uint32_t bufferIDCount = 0; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID bufferIDs[4]; + // maximum 3 buffers to render a slice: picture parameter, IQMatrix, slice parameter + VABufferID bufferIDs[3]; vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); @@ -231,19 +442,8 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p // either condition indicates start of a new frame if (sliceParam->first_mb_in_slice != 0) { WTRACE("The first slice is lost."); - // TODO: handle the first slice lost } - if (mDecodingFrame) { - // interlace content, complete decoding the first field - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS("vaEndPicture"); - - // for interlace content, top field may be valid only after the second field is parsed - mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; - } - - // Check there is no reference frame loss before decoding a frame - + VTRACE("Current frameidx = %d", mFrameIdx++); // Update the reference frames and surface IDs for DPB and current frame status = updateDPB(picParam); CHECK_STATUS("updateDPB"); @@ -252,10 +452,6 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p status = updateReferenceFrames(picData); CHECK_STATUS("updateReferenceFrames"); - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); - CHECK_VA_STATUS("vaBeginPicture"); - - // start decoding a frame mDecodingFrame = true; vaStatus = vaCreateBuffer( @@ -284,33 +480,7 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p status = setReference(sliceParam); CHECK_STATUS("setReference"); - // find which naluinfo is correlated to current slice - int naluIndex = 0; - uint32_t accumulatedHeaderLen = 0; - uint32_t headerLen = 0; - for (; naluIndex < mMetadata.naluNumber; naluIndex++) { - headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; - if (headerLen == 0) { - WTRACE("lenght of current NAL unit is 0."); - continue; - } - accumulatedHeaderLen += STARTCODE_PREFIX_LEN; - if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { - break; - } - accumulatedHeaderLen += headerLen; - } - - if (sliceData->slice_offset != accumulatedHeaderLen) { - WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); - } - - sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; - uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; - uint32_t slice_offset_shift = sliceOffset % 16; - sliceParam->slice_data_offset += slice_offset_shift; - sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF; - + sliceParam->slice_data_size = mFrameSize; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -322,31 +492,6 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); bufferIDCount++; - // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit - // offset points to first byte of NAL unit - - if (mInputBuffer != NULL) { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceDataBufferType, - sliceData->slice_size, //Slice size - 1, // num_elements - mInputBuffer + sliceOffset - slice_offset_shift, - &bufferIDs[bufferIDCount]); - } else { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAProtectedSliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - (uint8_t*)sliceOffset, // IMR offset - &bufferIDs[bufferIDCount]); - } - CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - bufferIDCount++; - vaStatus = vaRenderPicture( mVADisplay, mVAContext, @@ -354,165 +499,25 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p bufferIDCount); CHECK_VA_STATUS("vaRenderPicture"); - return DECODE_SUCCESS; -} - + VABufferID slicebufferID; -// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. -// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. -int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { - uint8_t *ptr; - uint32_t left = 0, data = 0, phase = 0; - uint8_t mask1 = 0, mask2 = 0; - - /* Meaning of phase: - 0: initial status, "0x000001" bytes are not found so far; - 1: one "0x00" byte is found; - 2: two or more consecutive "0x00" bytes" are found; - 3: "0x000001" patten is found ; - 4: if there is one more byte after "0x000001"; - */ - - left = length; - ptr = (uint8_t *) (stream + offset); - phase = 0; - - // parse until there is more data and start code not found - while ((left > 0) && (phase < 3)) { - // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. - if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { - while (left > 3) { - data = *((uint32_t *)ptr); - mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); - mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); - // If second byte and fourth byte are not zero's then we cannot have a start code here, - // as we need two consecutive zero bytes for a start code pattern. - if (mask1 && mask2) { - // skip 4 bytes and start over - ptr += 4; - left -=4; - continue; - } else { - break; - } - } - } - - // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time - if (left > 0) { - if (*ptr == STARTCODE_00) { - phase++; - if (phase > 2) { - // more than 2 consecutive '0x00' bytes is found - phase = 2; - } - } else if ((*ptr == STARTCODE_01) && (phase == 2)) { - // start code is found - phase = 3; - } else { - // reset lookup - phase = 0; - } - ptr++; - left--; - } - } - - if ((left > 0) && (phase == 3)) { - phase = 4; - // return offset of position following the pattern in the buffer which matches "0x000001" byte string - return (int32_t)(ptr - stream); - } - return -1; -} - - -Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { - uint8_t naluType; - int32_t naluHeaderLen; - - naluType = *(uint8_t *)(stream + naluStream->naluOffset); - naluType &= NALU_TYPE_MASK; - // first update nalu header length based on nalu type - if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { - // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes - naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); - } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { - //sps, pps, sei, etc, return the entire NAL unit in clear - naluHeaderLen = naluStream->naluLen; - } else { - return DECODE_FRAME_DROPPED; - } - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); - naluStream->streamPos += 4; + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mFrameSize, //size + 1, //num_elements + mFrameData, + &slicebufferID); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); - naluStream->streamPos += 4; + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + &slicebufferID, + 1); + CHECK_VA_STATUS("vaRenderPicture"); - if (naluHeaderLen) { - memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); - naluStream->streamPos += naluHeaderLen; - } return DECODE_SUCCESS; -} - - -// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container -Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { - int32_t naluOffset, offset, left; - NaluInfo *info; - uint32_t ret = DECODE_SUCCESS; - - naluOffset = 0; - offset = 0; - left = length; - - // leave 4 bytes to copy nalu count - naluStream->streamPos = 4; - naluStream->naluCount = 0; - memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); - - for (; ;) { - naluOffset = findNalUnitOffset(stream, offset, left); - if (naluOffset == -1) { - break; - } - - if (naluStream->naluCount == 0) { - naluStream->naluOffset = naluOffset; - } else { - naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; - ret = copyNaluHeader(stream, naluStream); - if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { - LOGW("copyNaluHeader returned %d", ret); - return ret; - } - // starting position for next NALU - naluStream->naluOffset = naluOffset; - } - if (ret == DECODE_SUCCESS) { - naluStream->naluCount++; - } - - // update next lookup position and length - offset = naluOffset + 1; // skip one byte of NAL unit type - left = length - offset; - } - - if (naluStream->naluCount > 0) { - naluStream->naluLen = length - naluStream->naluOffset; - memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); - // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED - copyNaluHeader(stream, naluStream); - return DECODE_SUCCESS; - } - - LOGW("number of valid NALU is 0!"); - return DECODE_SUCCESS; } - diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h old mode 100644 new mode 100755 index af5ae44..6378243 --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h @@ -22,62 +22,34 @@ * */ -#ifndef VIDEO_DECODER_AVC_SECURE_H_ -#define VIDEO_DECODER_AVC_SECURE_H_ +#ifndef VIDEO_DECODER_AVC_SECURE_H +#define VIDEO_DECODER_AVC_SECURE_H +#include "VideoDecoderBase.h" #include "VideoDecoderAVC.h" - +#include "VideoDecoderDefs.h" class VideoDecoderAVCSecure : public VideoDecoderAVC { public: VideoDecoderAVCSecure(const char *mimeType); - virtual ~VideoDecoderAVCSecure(); - virtual Decode_Status start(VideoConfigBuffer *buffer); virtual void stop(void); // data in the decoded buffer is all encrypted. virtual Decode_Status decode(VideoDecodeBuffer *buffer); - -private: - enum { - MAX_SLICE_HEADER_SIZE = 30, - MAX_NALU_HEADER_BUFFER = 8192, - MAX_NALU_NUMBER = 400, // > 4096/12 - }; - - // Information of Network Abstraction Layer Unit - struct NaluInfo { - int32_t naluOffset; // offset of NAL unit in the firewalled buffer - int32_t naluLen; // length of NAL unit - int32_t naluHeaderLen; // length of NAL unit header - }; - - struct NaluMetadata { - NaluInfo *naluInfo; - int32_t naluNumber; // number of NAL units - }; - - struct NaluByteStream { - int32_t naluOffset; - int32_t naluLen; - int32_t streamPos; - uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData - int32_t naluCount; - }; - +protected: + virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data); + virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data); + Decode_Status parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + Decode_Status updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf); virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); - int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); - Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); - Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); - private: - NaluMetadata mMetadata; - NaluByteStream mByteStream; - uint8_t *mNaluHeaderBuffer; - uint8_t *mInputBuffer; + int32_t mIsEncryptData; + int32_t mFrameSize; + uint8_t* mFrameData; + uint8_t* mClearData; + int32_t mFrameIdx; }; - - -#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ +#endif -- cgit v1.2.3 From 6c4b6e29d6f3f29eac4d11ef202e23bbb5220491 Mon Sep 17 00:00:00 2001 From: pingshix Date: Sun, 26 Jan 2014 08:19:58 +0800 Subject: mixvbp: mute warning of syntax parsing failure BZ: 164803 WTRACE to VTRACE. This warning is harmless but annoying. Change-Id: I9300de92daa35e8f5c7d0659801580472ad8b7a2 Signed-off-by: pingshix --- mixvbp/vbp_manager/vbp_utils.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index edd94c1..1647269 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -389,7 +389,7 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f /* can't return error for now. Neet further investigation */ if (0 != error) { - WTRACE("failed to parse the syntax: %d!", error); + VTRACE("failed to parse the syntax: %d!", error); } /* process parsing result */ -- cgit v1.2.3 From a07cd04e9748dbb2080139a6e9ac175b6d149380 Mon Sep 17 00:00:00 2001 From: liubolun Date: Wed, 22 Jan 2014 15:55:06 +0800 Subject: Refine setting config max frame size for VP8 in libMix layer. BZ: 166007 Calculating the max frame size equals to ratio multiples average bitrate. Add passing down to driver layer. Change-Id: I882dc1fdf14a8411e7c647de0c7532f3b0ac1fb9 Signed-off-by: liubolun --- videoencoder/VideoEncoderBase.cpp | 2 +- videoencoder/VideoEncoderDef.h | 14 +++++++------- videoencoder/VideoEncoderVP8.cpp | 29 +++++++++++++++++------------ 3 files changed, 25 insertions(+), 20 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index d35aca1..284ab52 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1200,7 +1200,7 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeSliceNum: case VideoConfigTypeVP8: case VideoConfigTypeVP8ReferenceFrame: - case VideoConfigTypeVP8MaxFrameSize:{ + case VideoConfigTypeVP8MaxFrameSizeRatio:{ ret = derivedSetConfig(videoEncConfig); break; } diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index d65f50d..9c8737d 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -329,7 +329,7 @@ enum VideoParamConfigType { VideoConfigTypeVP8, VideoConfigTypeVP8ReferenceFrame, VideoConfigTypeCIR, - VideoConfigTypeVP8MaxFrameSize, + VideoConfigTypeVP8MaxFrameSizeRatio, VideoParamsConfigExtension }; @@ -638,7 +638,7 @@ struct VideoParamsVP8 : VideoParamConfigSet { uint32_t hrd_buf_size; uint32_t hrd_buf_initial_fullness; uint32_t hrd_buf_optimal_fullness; - uint32_t max_frame_size; + uint32_t max_frame_size_ratio; VideoParamsVP8() { type = VideoParamsTypeVP8; @@ -674,14 +674,14 @@ struct VideoConfigVP8ReferenceFrame : VideoParamConfigSet { } }; -struct VideoConfigVP8MaxFrameSize : VideoParamConfigSet { +struct VideoConfigVP8MaxFrameSizeRatio : VideoParamConfigSet { - VideoConfigVP8MaxFrameSize() { - type = VideoConfigTypeVP8MaxFrameSize; - size = sizeof(VideoConfigVP8MaxFrameSize); + VideoConfigVP8MaxFrameSizeRatio() { + type = VideoConfigTypeVP8MaxFrameSizeRatio; + size = sizeof(VideoConfigVP8MaxFrameSizeRatio); } - uint32_t max_frame_size; + uint32_t max_frame_size_ratio; }; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index d9785e4..45655bd 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -30,7 +30,7 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoParamsVP8.hrd_buf_size = 1000; mVideoParamsVP8.hrd_buf_initial_fullness = 500; mVideoParamsVP8.hrd_buf_optimal_fullness = 600; - mVideoParamsVP8.max_frame_size = 0; + mVideoParamsVP8.max_frame_size_ratio = 0; mVideoConfigVP8.force_kf = 0; mVideoConfigVP8.refresh_entropy_probs = 0; @@ -213,6 +213,11 @@ Encode_Status VideoEncoderVP8::renderMaxFrameSizeParams(void) VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncMiscParameterBuffer *misc_param; VAEncMiscParameterBufferMaxFrameSize * misc_maxframesize; + unsigned int frameRateNum = mComParams.frameRate.frameRateNum; + unsigned int frameRateDenom = mComParams.frameRate.frameRateDenom; + unsigned int frameRate = (unsigned int)(frameRateNum + frameRateDenom /2); + unsigned int bitRate = mComParams.rcParams.bitRate; + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, VAEncMiscParameterBufferType, sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD), @@ -223,7 +228,7 @@ Encode_Status VideoEncoderVP8::renderMaxFrameSizeParams(void) misc_param->type = VAEncMiscParameterTypeMaxFrameSize; misc_maxframesize = (VAEncMiscParameterBufferMaxFrameSize *)misc_param->data; memset(misc_maxframesize, 0, sizeof(*misc_maxframesize)); - misc_maxframesize->max_frame_size = mVideoParamsVP8.max_frame_size; + misc_maxframesize->max_frame_size = (unsigned int)((bitRate/frameRate) * mVideoParamsVP8.max_frame_size_ratio); vaUnmapBuffer(mVADisplay, max_frame_size_param_buf); vaStatus = vaRenderPicture(mVADisplay,mVAContext, &max_frame_size_param_buf, 1); @@ -333,16 +338,16 @@ Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncCon } break; - case VideoConfigTypeVP8MaxFrameSize:{ + case VideoConfigTypeVP8MaxFrameSizeRatio :{ - VideoConfigVP8MaxFrameSize *encConfigVP8MaxFrameSize = - reinterpret_cast (videoEncConfig); + VideoConfigVP8MaxFrameSizeRatio *encConfigVP8MaxFrameSizeRatio = + reinterpret_cast (videoEncConfig); - if (encConfigVP8MaxFrameSize->size != sizeof(VideoConfigVP8MaxFrameSize)) { + if (encConfigVP8MaxFrameSizeRatio->size != sizeof(VideoConfigVP8MaxFrameSizeRatio)) { return ENCODE_INVALID_PARAMS; } - encConfigVP8MaxFrameSize->max_frame_size = mVideoParamsVP8.max_frame_size; + encConfigVP8MaxFrameSizeRatio->max_frame_size_ratio = mVideoParamsVP8.max_frame_size_ratio; } break; @@ -388,15 +393,15 @@ Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncCon } break; - case VideoConfigTypeVP8MaxFrameSize:{ - VideoConfigVP8MaxFrameSize *encConfigVP8MaxFrameSize = - reinterpret_cast (videoEncConfig); + case VideoConfigTypeVP8MaxFrameSizeRatio:{ + VideoConfigVP8MaxFrameSizeRatio *encConfigVP8MaxFrameSizeRatio = + reinterpret_cast (videoEncConfig); - if (encConfigVP8MaxFrameSize->size != sizeof(VideoConfigVP8MaxFrameSize)) { + if (encConfigVP8MaxFrameSizeRatio->size != sizeof(VideoConfigVP8MaxFrameSizeRatio)) { return ENCODE_INVALID_PARAMS; } - mVideoParamsVP8.max_frame_size = encConfigVP8MaxFrameSize->max_frame_size; + mVideoParamsVP8.max_frame_size_ratio = encConfigVP8MaxFrameSizeRatio->max_frame_size_ratio; mRenderMaxFrameSize = true; } break; -- cgit v1.2.3 From 0501efb37069c98d91d5ff2bec8e5cdb56970899 Mon Sep 17 00:00:00 2001 From: Dmitry Shkurko Date: Tue, 17 Dec 2013 13:04:41 +0700 Subject: Misplaced 'mutable' specifier BZ:159390 'mutable' specifier cannot be applied to anonymous members Change-Id: Icf17bfe7c9b631a8792464420810154d150959b2 Category: feature differentiation Domain: Video.Rendering-VPP Origin: internal Upstream-Candidate: no, not-aosp Signed-off-by: Dmitry Shkurko --- videovpp/test/main.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videovpp/test/main.cpp b/videovpp/test/main.cpp index ce91027..5e7b0ef 100644 --- a/videovpp/test/main.cpp +++ b/videovpp/test/main.cpp @@ -52,7 +52,7 @@ struct mfx_gralloc_drm_handle_t { int pid; // creator mutable int other; // registered owner (pid) - mutable union { int data1; mutable drm_intel_bo *bo; }; // drm buffer object + union { mutable int data1; mutable drm_intel_bo *bo; }; // drm buffer object union { int data2; uint32_t fb; }; // framebuffer id int pitch; // buffer pitch (in bytes) int allocWidth; // Allocated buffer width in pixels. -- cgit v1.2.3 From 06ba8a13f111c8d05e37d05f33ce1a9ade376ff3 Mon Sep 17 00:00:00 2001 From: Jing SUN Date: Fri, 24 Jan 2014 13:30:11 +0800 Subject: Added a getCodedSize interface to IntelImageEncoder. BZ: 166869 Issue: Camera requested a getting JPEG encoding coded data's size before copying them out interface. Solution: Added that interface. Change-Id: I24a9986bfb8ade018bff4966c4012b689dc0a100 Signed-off-by: Jing SUN --- imageencoder/ImageEncoder.cpp | 107 +++++++++++++++++++++++++++++++----------- imageencoder/ImageEncoder.h | 23 +++++++-- imageencoder/test/main.cpp | 13 ++++- 3 files changed, 109 insertions(+), 34 deletions(-) diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp index 7427737..c7b522d 100644 --- a/imageencoder/ImageEncoder.cpp +++ b/imageencoder/ImageEncoder.cpp @@ -29,6 +29,9 @@ IntelImageEncoder::IntelImageEncoder(void) reserved_image_seq = -1; + va_codedbuffersegment = NULL; + coded_data_size = 0; + LOGV("IntelImageEncoder: done\n"); } @@ -355,6 +358,10 @@ int IntelImageEncoder::encode(int image_seq, unsigned int new_quality) return VA_STATUS_ERROR_INVALID_PARAMETER; } + /* Reset coded */ + va_codedbuffersegment = NULL; + coded_data_size = 0; + /* Begin picture */ va_status = vaBeginPicture(va_dpy, va_contextid, va_surfaceid[image_seq]); if (va_status != VA_STATUS_SUCCESS) { @@ -407,38 +414,30 @@ int IntelImageEncoder::encode(int image_seq, unsigned int new_quality) return VA_STATUS_SUCCESS; } -int IntelImageEncoder::getCoded(void *user_coded_buf, - unsigned int user_coded_buf_size, - unsigned int *coded_data_sizep) +int IntelImageEncoder::getCodedSize(unsigned int *coded_data_sizep) { VAStatus va_status; - VACodedBufferSegment *va_codedbuffersegment = NULL; - - if ((NULL == user_coded_buf) || - (NULL == coded_data_sizep)) { - LOGE("getCoded: invalid NULL pointer as input paramter!\n"); - return VA_STATUS_ERROR_INVALID_PARAMETER; - } + VACodedBufferSegment *coded_segment = NULL; - if (user_coded_buf_size < coded_buf_size) { - LOGE("getCoded: the coded buffer was too small!\n"); + if (encoder_status != LIBVA_ENCODING) { + LOGE("getCodedSize: no encoding active to get coded data!\n"); return VA_STATUS_ERROR_OPERATION_FAILED; } - if (encoder_status != LIBVA_ENCODING) { - LOGE("getCoded: no encoding active to get coded data!\n"); - return VA_STATUS_ERROR_OPERATION_FAILED; + if (NULL == coded_data_sizep) { + LOGE("getCodedSize: invalid NULL pointer as input paramter!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; } if (0 == va_surfaceid[reserved_image_seq]) { - LOGE("getCoded: invalid image, probably already destroyed!\n"); + LOGE("getCodedSize: invalid image, probably already destroyed!\n"); return VA_STATUS_ERROR_OPERATION_FAILED; } /* Sync surface */ va_status = vaSyncSurface(va_dpy, va_surfaceid[reserved_image_seq]); if (va_status != VA_STATUS_SUCCESS) { - LOGE("getCoded: vaSyncSurface failed (%d)!\n", va_status); + LOGE("getCodedSize: vaSyncSurface failed (%d)!\n", va_status); reserved_image_seq = -1; encoder_status = LIBVA_CONTEXT_CREATED; return va_status; @@ -453,26 +452,67 @@ int IntelImageEncoder::getCoded(void *user_coded_buf, return va_status; } - /* Mark the coded buffer empty */ + /* Initialize coded data size */ *coded_data_sizep = 0; + coded_segment = va_codedbuffersegment; + + /* Get the total size of coded data */ + while (coded_segment != NULL) { + *coded_data_sizep += coded_segment->size; + coded_segment = (VACodedBufferSegment *)coded_segment->next; + } + coded_data_size = *coded_data_sizep; + + reserved_image_seq = -1; + encoder_status = LIBVA_PENDING_GET_CODED; + + LOGV("getCodedSize: done\n"); + return va_status; +} + +int IntelImageEncoder::getCoded(void *user_coded_buf, + unsigned int user_coded_buf_size) +{ + VAStatus va_status; + unsigned int copied_size = 0; + + if (encoder_status != LIBVA_PENDING_GET_CODED) { + LOGE("getCoded: no ready coded data!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } + + if (NULL == user_coded_buf) { + LOGE("getCoded: invalid NULL pointer as input paramter!\n"); + return VA_STATUS_ERROR_INVALID_PARAMETER; + } + + if (user_coded_buf_size < coded_data_size) { + LOGE("getCoded: coded buffer was smaller than coded data size!\n"); + return VA_STATUS_ERROR_OPERATION_FAILED; + } /* Get the total size of coded data */ while (va_codedbuffersegment != NULL) { - memcpy((void *)((unsigned int)user_coded_buf+*coded_data_sizep), + memcpy((void *)((unsigned int)user_coded_buf+copied_size), va_codedbuffersegment->buf, va_codedbuffersegment->size); - *coded_data_sizep += va_codedbuffersegment->size; + copied_size += va_codedbuffersegment->size; va_codedbuffersegment = (VACodedBufferSegment *)va_codedbuffersegment->next; } + /* Reset coded records */ + va_codedbuffersegment = NULL; + coded_data_size = 0; + + reserved_image_seq = -1; + encoder_status = LIBVA_CONTEXT_CREATED; + va_status = vaUnmapBuffer(va_dpy, va_codedbufferid); if (va_status != VA_STATUS_SUCCESS) { LOGE("getCoded: vaUnmapBuffer failed (%d)!\n", va_status); + return va_status; } - reserved_image_seq = -1; - encoder_status = LIBVA_CONTEXT_CREATED; - LOGV("getCoded: done\n"); return va_status; } @@ -513,7 +553,8 @@ int IntelImageEncoder::destroySourceSurface(int image_seq) int IntelImageEncoder::destroyContext(void) { - VAStatus va_status, va_final_status; + VAStatus va_status = VA_STATUS_SUCCESS; + VAStatus va_final_status = VA_STATUS_SUCCESS; if (0 == va_contextid) { LOGE("destroyContext: no context to destroy!\n"); @@ -525,12 +566,23 @@ int IntelImageEncoder::destroyContext(void) return VA_STATUS_ERROR_OPERATION_FAILED; } + if (LIBVA_PENDING_GET_CODED == encoder_status) { + va_codedbuffersegment = NULL; + coded_data_size = 0; + + va_status = vaUnmapBuffer(va_dpy, va_codedbufferid); + if (va_status != VA_STATUS_SUCCESS) { + LOGE("destroyContext: vaUnmapBuffer failed (%d)!\n", va_status); + } + va_final_status |= va_status; + } + /* Destroy the coded buffer */ va_status = vaDestroyBuffer(va_dpy, va_codedbufferid); if (va_status != VA_STATUS_SUCCESS) { - LOGE("createContext: vaDestroyBuffer VAEncCodedBufferType failed (%d)!\n", va_status); + LOGE("destroyContext: vaDestroyBuffer VAEncCodedBufferType failed (%d)!\n", va_status); } - va_final_status = va_status; + va_final_status |= va_status; va_codedbufferid = 0; coded_buf_size = 0; @@ -572,7 +624,8 @@ int IntelImageEncoder::deinitializeEncoder(void) if (LIBVA_ENCODING == encoder_status) { LOGE("deinitializeEncoder: encoding was ongoing, can't deinitialize LibVA!\n"); return VA_STATUS_ERROR_OPERATION_FAILED; - } else if (LIBVA_CONTEXT_CREATED == encoder_status) { + } else if ((LIBVA_CONTEXT_CREATED == encoder_status) || + (LIBVA_PENDING_GET_CODED == encoder_status)) { /* Destroy context if it exists */ destroyContext(); } diff --git a/imageencoder/ImageEncoder.h b/imageencoder/ImageEncoder.h index 31ce042..e90eb8d 100644 --- a/imageencoder/ImageEncoder.h +++ b/imageencoder/ImageEncoder.h @@ -112,25 +112,33 @@ public: } /* - * getCoded waits for the current encoding task's completion. + * getCodedSize waits for the current encoding task's completion + * and returns the exact coded size. * Only one encoding task can be triggered under an instance * of IntelImageEncoder at any minute. * This has not be called right after encode is called, * instead, this can be called any minutes after the encoding * is triggered to support both synced/asynced encoding usage. * Parameters: + * coded_data_sizep: the returned pointer to the actual size + * value of coded JPEG. + * Return zero for success and non-zero for failure. + */ + int getCodedSize(unsigned int *coded_data_sizep); + + /* + * getCoded copies coded data out for users. + * This should be called after getCodedSize. + * Parameters: * user_coded_buf: the input buffer to take coded data. * After getCoded is returned with no errors, * this buffer will have the coded JPEG in it. * user_coded_buf_size: the size of input buffer. * If too small, an error'll be returned. - * coded_data_sizep: the returned pointer to the actual size - * value of coded JPEG. * Return zero for success and non-zero for failure. */ int getCoded(void *user_coded_buf, - unsigned int user_coded_buf_size, - unsigned int *coded_data_sizep); + unsigned int user_coded_buf_size); int destroySourceSurface(int image_seq); int destroyContext(void); @@ -142,6 +150,7 @@ private: LIBVA_INITIALIZED, LIBVA_CONTEXT_CREATED, LIBVA_ENCODING, + LIBVA_PENDING_GET_CODED, }IntelImageEncoderStatus; /* Valid since LIBVA_UNINITIALIZED */ @@ -170,6 +179,10 @@ private: /* Valid since LIBVA_ENCODING */ int reserved_image_seq; + + /* Valid since LIBVA_PENDING_GET_CODED */ + VACodedBufferSegment *va_codedbuffersegment; + unsigned int coded_data_size; }; #endif /* __LIBMIX_INTEL_IMAGE_ENCODER_H__ */ diff --git a/imageencoder/test/main.cpp b/imageencoder/test/main.cpp index 9aa15a9..2707680 100644 --- a/imageencoder/test/main.cpp +++ b/imageencoder/test/main.cpp @@ -514,8 +514,16 @@ int main(int argc, char** argv) } PERF_START(encode_time, fix_cpu_frequency); - status = image_encoder.getCoded(output_buffer, output_buffer_size, &output_size); - PERF_STOP(encode_time, fix_cpu_frequency); + status = image_encoder.getCodedSize(&output_size); + if (status != 0) { + fprintf(stderr, "getCodedSize failed (%d)!\n", status); + if (source_buffer) free(source_buffer); + free(output_buffer); + image_encoder.deinitializeEncoder(); + return 1; + } + + status = image_encoder.getCoded(output_buffer, output_buffer_size); if (status != 0) { fprintf(stderr, "getCoded failed (%d)!\n", status); if (source_buffer) free(source_buffer); @@ -523,6 +531,7 @@ int main(int argc, char** argv) image_encoder.deinitializeEncoder(); return 1; } + PERF_STOP(encode_time, fix_cpu_frequency); printf("Prepare encoding: %.3fms\n", (double)PERF_GET(prepare_encoding_time)/cpu_available_max); printf("Encode and stitch coded: %.3fms\n", (double)PERF_GET(encode_time)/cpu_available_max); -- cgit v1.2.3 From 7a86233e85ecfd2aa9fae1286ee1dae924c2a897 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Wed, 29 Jan 2014 16:30:26 +0800 Subject: libmix: refine the Video Error Data Structure BZ: 168230 Add another type: DecodeHeaderError and mb count Change-Id: I5ef769b7a9b99af1e2f84c3da34c0b26fa98cb91 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderBase.cpp | 3 ++- videodecoder/VideoDecoderDefs.h | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) mode change 100755 => 100644 videodecoder/VideoDecoderBase.cpp diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp old mode 100755 new mode 100644 index 7707fce..afcb8bf --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1387,9 +1387,10 @@ void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { for (int i = 0; i < MAX_ERR_NUM - offset; i++) { if (err_drv_output[i].status != -1) { currentSurface->errBuf.errorNumber++; - currentSurface->errBuf.errorArray[i + offset].type = (VideoDecodeErrorType)err_drv_output[i].decode_error_type; + currentSurface->errBuf.errorArray[i + offset].type = DecodeMBError; currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb = err_drv_output[i].start_mb; currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb = err_drv_output[i].end_mb; + currentSurface->errBuf.errorArray[i + offset].num_mbs = err_drv_output[i].end_mb - err_drv_output[i].start_mb + 1; ITRACE("Error Index[%d]: type = %d, start_mb = %d, end_mb = %d", currentSurface->errBuf.errorNumber - 1, currentSurface->errBuf.errorArray[i + offset].type, diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index d39bb78..fc347ac 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -114,9 +114,10 @@ typedef enum { typedef enum { - DecodeSliceMissing = 0, + DecodeHeaderError = 0, DecodeMBError = 1, - DecodeRefMissing = 2, + DecodeSliceMissing = 2, + DecodeRefMissing = 3, } VideoDecodeErrorType; #define MAX_ERR_NUM 10 @@ -157,6 +158,7 @@ struct VideoConfigBuffer { struct VideoErrorInfo { VideoDecodeErrorType type; + uint32_t num_mbs; union { struct {uint32_t start_mb; uint32_t end_mb;} mb_pos; } error_data; -- cgit v1.2.3 From 63573a909fb408c8194ecb8e5ec3c4fbec166281 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 10 Feb 2014 17:14:40 +0800 Subject: Fix klocwork issue in the LibMIX parser BZ: 170004 Check the index range of p_dpb->fs_dpb_idc to prevent the buffer overflow. Change-Id: Ie3b22365a23a1a0673f1df4d5d023058a5089fda Signed-off-by: wfeng6 --- mixvbp/vbp_plugin/h264/h264parse_dpb.c | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c index 883057a..f193d0f 100644 --- a/mixvbp/vbp_plugin/h264/h264parse_dpb.c +++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c @@ -3106,6 +3106,12 @@ void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, i h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]); poc_int = p_dpb->active_fs->frame.poc; + uint8_t used_size = (uint8_t)(sizeof(p_dpb->fs_dpb_idc)/sizeof(p_dpb->fs_dpb_idc[0])); + if (p_dpb->used_size >= used_size) { + WTRACE("DPB overflow: p_dpb->used_size = %d", p_dpb->used_size); + return; + } + for (idx = 0; idx < p_dpb->used_size; idx++) { h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]); -- cgit v1.2.3 From ae5d67bb721f797937e86d84a85c2191e4e7ba54 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Wed, 29 Jan 2014 16:39:59 +0800 Subject: Add LibMIX slice header parsing support for Moorefield BZ: 168151 Add LibMIX slice header parsing support for Moorefield Change-Id: Ie86065f55059d6c7e2b89aae58347fb0e0bb46a0 Signed-off-by: wfeng6 --- mixvbp/vbp_manager/Android.mk | 2 +- mixvbp/vbp_plugin/h264/Android.mk | 2 +- videodecoder/Android.mk | 3 +- .../moorefield/VideoDecoderAVCSecure.cpp | 735 +++++++++++---------- .../securevideo/moorefield/VideoDecoderAVCSecure.h | 60 +- 5 files changed, 390 insertions(+), 412 deletions(-) diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk index d60b7d6..a45fe7e 100755 --- a/mixvbp/vbp_manager/Android.mk +++ b/mixvbp/vbp_manager/Android.mk @@ -60,7 +60,7 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/secvideo/baytrail/ LOCAL_SRC_FILES += secvideo/baytrail/vbp_h264secure_parser.c endif -PLATFORM_SUPPORT_USE_SLICE_HEADER_PARSING := merrifield +PLATFORM_SUPPORT_USE_SLICE_HEADER_PARSING := merrifield moorefield ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_USE_SLICE_HEADER_PARSING)),) LOCAL_CFLAGS += -DUSE_SLICE_HEADER_PARSING diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk index c27b102..514435d 100755 --- a/mixvbp/vbp_plugin/h264/Android.mk +++ b/mixvbp/vbp_plugin/h264/Android.mk @@ -70,7 +70,7 @@ include $(BUILD_SHARED_LIBRARY) endif include $(CLEAR_VARS) -PLATFORM_SUPPORT_SLICE_HEADER_PARSER := merrifield +PLATFORM_SUPPORT_SLICE_HEADER_PARSER := merrifield moorefield ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),) LOCAL_SRC_FILES := \ diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 94d3605..6041456 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -43,7 +43,8 @@ ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_HYBRID_DRIVER)),) endif PLATFORM_SUPPORT_SLICE_HEADER_PARSER := \ - merrifield + merrifield \ + moorefield ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),) LOCAL_CFLAGS += -DUSE_SLICE_HEADER_PARSING diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp index ab7bc7e..671e8bd 100644 --- a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp @@ -21,206 +21,417 @@ * approved by Intel in writing. * */ - -#include "VideoDecoderAVCSecure.h" +#include +#include "VideoDecoderBase.h" +#include "VideoDecoderAVC.h" #include "VideoDecoderTrace.h" -#include - +#include "vbp_loader.h" +#include "VideoDecoderAVCSecure.h" -#define STARTCODE_00 0x00 -#define STARTCODE_01 0x01 +#define MAX_SLICEHEADER_BUFFER_SIZE 4096 #define STARTCODE_PREFIX_LEN 3 #define NALU_TYPE_MASK 0x1F - - -// mask for little endian, to mast the second and fourth bytes in the byte stream -#define STARTCODE_MASK0 0xFF000000 //0x00FF0000 -#define STARTCODE_MASK1 0x0000FF00 //0x000000FF - - -typedef enum { - NAL_UNIT_TYPE_unspecified0 = 0, - NAL_UNIT_TYPE_SLICE, - NAL_UNIT_TYPE_DPA, - NAL_UNIT_TYPE_DPB, - NAL_UNIT_TYPE_DPC, - NAL_UNIT_TYPE_IDR, - NAL_UNIT_TYPE_SEI, - NAL_UNIT_TYPE_SPS, - NAL_UNIT_TYPE_PPS, - NAL_UNIT_TYPE_Acc_unit_delimiter, - NAL_UNIT_TYPE_EOSeq, - NAL_UNIT_TYPE_EOstream, - NAL_UNIT_TYPE_filler_data, - NAL_UNIT_TYPE_SPS_extension, - NAL_UNIT_TYPE_Reserved14, - NAL_UNIT_TYPE_Reserved15, - NAL_UNIT_TYPE_Reserved16, - NAL_UNIT_TYPE_Reserved17, - NAL_UNIT_TYPE_Reserved18, - NAL_UNIT_TYPE_ACP, - NAL_UNIT_TYPE_Reserved20, - NAL_UNIT_TYPE_Reserved21, - NAL_UNIT_TYPE_Reserved22, - NAL_UNIT_TYPE_Reserved23, - NAL_UNIT_TYPE_unspecified24, -} NAL_UNIT_TYPE; - -#ifndef min -#define min(X, Y) ((X) <(Y) ? (X) : (Y)) -#endif - - +#define MAX_NALU_HEADER_BUFFER 8192 static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01}; +/* H264 start code values */ +typedef enum _h264_nal_unit_type +{ + h264_NAL_UNIT_TYPE_unspecified = 0, + h264_NAL_UNIT_TYPE_SLICE, + h264_NAL_UNIT_TYPE_DPA, + h264_NAL_UNIT_TYPE_DPB, + h264_NAL_UNIT_TYPE_DPC, + h264_NAL_UNIT_TYPE_IDR, + h264_NAL_UNIT_TYPE_SEI, + h264_NAL_UNIT_TYPE_SPS, + h264_NAL_UNIT_TYPE_PPS, + h264_NAL_UNIT_TYPE_Acc_unit_delimiter, + h264_NAL_UNIT_TYPE_EOSeq, + h264_NAL_UNIT_TYPE_EOstream, + h264_NAL_UNIT_TYPE_filler_data, + h264_NAL_UNIT_TYPE_SPS_extension, + h264_NAL_UNIT_TYPE_ACP = 19, + h264_NAL_UNIT_TYPE_Slice_extension = 20 +} h264_nal_unit_type_t; VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) - : VideoDecoderAVC(mimeType), - mNaluHeaderBuffer(NULL), - mInputBuffer(NULL) { - - memset(&mMetadata, 0, sizeof(NaluMetadata)); - memset(&mByteStream, 0, sizeof(NaluByteStream)); -} - -VideoDecoderAVCSecure::~VideoDecoderAVCSecure() { + : VideoDecoderAVC(mimeType){ + mFrameSize = 0; + mFrameData = NULL; + mIsEncryptData = 0; + mClearData = NULL; + setParserType(VBP_H264SECURE); + mFrameIdx = 0; } Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { + VTRACE("VideoDecoderAVCSecure::start"); Decode_Status status = VideoDecoderAVC::start(buffer); if (status != DECODE_SUCCESS) { return status; } - mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER]; - mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER]; - mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER]; - - if (mMetadata.naluInfo == NULL || - mByteStream.byteStream == NULL || - mNaluHeaderBuffer == NULL) { - ETRACE("Failed to allocate memory."); - // TODO: release all allocated memory + mClearData = new uint8_t [MAX_NALU_HEADER_BUFFER]; + if (mClearData == NULL) { + ETRACE("Failed to allocate memory for mClearData"); return DECODE_MEMORY_FAIL; } + return status; } void VideoDecoderAVCSecure::stop(void) { + VTRACE("VideoDecoderAVCSecure::stop"); VideoDecoderAVC::stop(); - if (mMetadata.naluInfo) { - delete [] mMetadata.naluInfo; - mMetadata.naluInfo = NULL; + if (mClearData) { + delete [] mClearData; + mClearData = NULL; } +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + VTRACE("VideoDecoderAVCSecure::decode"); + Decode_Status status; + vbp_data_h264 *data = NULL; + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + + int32_t clear_data_size = 0; + uint8_t *clear_data = NULL; + uint8_t naluType = 0; + + int32_t num_nalus; + int32_t nalu_offset; + int32_t offset; + uint8_t *data_src; + uint8_t *nalu_data; + uint32_t nalu_size; +// uint32_t testsize; +// uint8_t *testdata; + if (buffer->flag & IS_SECURE_DATA) { + VTRACE("Decoding protected video ..."); + mIsEncryptData = 1; + + mFrameData = buffer->data; + mFrameSize = buffer->size; + VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize); +#if 0 + testsize = *(uint32_t *)(buffer->data + buffer->size); + testdata = (uint8_t *)(buffer->data + buffer->size + sizeof(uint32_t)); + for (int i = 0; i < testsize; i++) { + VTRACE("testdata[%d] = 0x%x", i, testdata[i]); + } +#endif + num_nalus = *(uint32_t *)(buffer->data + buffer->size + sizeof(uint32_t)); + VTRACE("num_nalus = %d", num_nalus); + offset = 4; + for (int32_t i = 0; i < num_nalus; i++) { + VTRACE("%d nalu, offset = %d", i, offset); + data_src = buffer->data + buffer->size + sizeof(uint32_t) + offset; + nalu_size = *(uint32_t *)(data_src + 2 * sizeof(uint32_t)); + nalu_size = (nalu_size + 0x03) & (~0x03); + + nalu_data = data_src + 3 *sizeof(uint32_t); + naluType = nalu_data[0] & NALU_TYPE_MASK; + offset += nalu_size + 3 *sizeof(uint32_t); + VTRACE("naluType = 0x%x", naluType); + VTRACE("nalu_size = %d, nalu_data = %p", nalu_size, nalu_data); + + if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) { + ETRACE("Slice NALU received!"); + return DECODE_INVALID_DATA; + } + + else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) { + memcpy(mClearData + clear_data_size, + startcodePrefix, + STARTCODE_PREFIX_LEN); + clear_data_size += STARTCODE_PREFIX_LEN; + memcpy(mClearData + clear_data_size, + nalu_data, + nalu_size); + clear_data_size += nalu_size; + } else { + ETRACE("Failure: DECODE_FRAME_DROPPED"); + return DECODE_FRAME_DROPPED; + } + } + clear_data = mClearData; - if (mByteStream.byteStream) { - delete [] mByteStream.byteStream; - mByteStream.byteStream = NULL; + } else { + VTRACE("Decoding clear video ..."); + mIsEncryptData = 0; + mFrameSize = buffer->size; + mFrameData = buffer->data; + clear_data = buffer->data; + clear_data_size = buffer->size; + } + if (clear_data_size > 0) { + status = VideoDecoderBase::parseBuffer( + clear_data, + clear_data_size, + false, + (void**)&data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + } else { + status = VideoDecoderBase::queryBuffer((void**)&data); + CHECK_STATUS("VideoDecoderBase::queryBuffer"); } - if (mNaluHeaderBuffer) { - delete [] mNaluHeaderBuffer; - mNaluHeaderBuffer = NULL; + if (!mVAStarted) { + if (data->has_sps && data->has_pps) { + status = startVA(data); + CHECK_STATUS("startVA"); + } else { + WTRACE("Can't start VA as either SPS or PPS is still not available."); + return DECODE_SUCCESS; + } } + status = decodeFrame(buffer, data); + + return status; } -Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { +Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { + VTRACE("VideoDecoderAVCSecure::decodeFrame"); Decode_Status status; - int32_t sizeAccumulated = 0; - int32_t sizeLeft = 0; - uint8_t *pByteStream = NULL; - NaluInfo *pNaluInfo = mMetadata.naluInfo; + VTRACE("data->has_sps = %d, data->has_pps = %d", data->has_sps, data->has_pps); + +#if 0 + // Don't remove the following codes, it can be enabled for debugging DPB. + for (unsigned int i = 0; i < data->num_pictures; i++) { + VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic; + VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d", + i, + buffer->timeStamp/1E6, + pic.TopFieldOrderCnt, + pic.BottomFieldOrderCnt, + pic.flags, + (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)); + } +#endif - if (buffer->flag & IS_SECURE_DATA) { - // NALU headers are appended to encrypted video bitstream - // |...encrypted video bitstream (16 bytes aligned)...| 4 bytes of header size |...NALU headers..| - pByteStream = buffer->data + buffer->size + 4; - sizeLeft = *(int32_t *)(buffer->data + buffer->size); - VTRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size); - mInputBuffer = buffer->data; + if (data->new_sps || data->new_pps) { + status = handleNewSequence(data); + CHECK_STATUS("handleNewSequence"); + } + + uint64_t lastPTS = mCurrentPTS; + mCurrentPTS = buffer->timeStamp; + + // start decoding a new frame + status = acquireSurfaceBuffer(); + CHECK_STATUS("acquireSurfaceBuffer"); + + if (mFrameSize > 0) { + status = parseSliceHeader(buffer, data); + } + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + return status; + } + + status = beginDecodingFrame(data); + CHECK_STATUS("beginDecodingFrame"); + + // finish decoding the last frame + status = endDecodingFrame(false); + CHECK_STATUS("endDecodingFrame"); + + if (isNewFrame(data, lastPTS == mCurrentPTS) == 0) { + ETRACE("Can't handle interlaced frames yet"); + return DECODE_FAIL; + } + + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVCSecure::beginDecodingFrame(vbp_data_h264 *data) { + VTRACE("VideoDecoderAVCSecure::beginDecodingFrame"); + Decode_Status status; + VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic); + if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || + (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { + mAcquiredBuffer->referenceFrame = true; } else { - status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream); - CHECK_STATUS("parseAnnexBStream"); - pByteStream = mByteStream.byteStream; - sizeLeft = mByteStream.streamPos; - mInputBuffer = buffer->data; + mAcquiredBuffer->referenceFrame = false; } - if (sizeLeft < 4) { - ETRACE("Not enough data to read number of NALU."); - return DECODE_INVALID_DATA; + + if (picture->flags & VA_PICTURE_H264_TOP_FIELD) { + mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD; + } else { + mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; } - // read number of NALU - memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t)); - pByteStream += 4; - sizeLeft -= 4; + mAcquiredBuffer->renderBuffer.flag = 0; + mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; + mAcquiredBuffer->pictureOrder = getPOC(picture); + + status = continueDecodingFrame(data); + return status; +} - if (mMetadata.naluNumber == 0) { - WTRACE("Number of NALU is ZERO!"); - return DECODE_SUCCESS; +Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) { + VTRACE("VideoDecoderAVCSecure::continueDecodingFrame"); + Decode_Status status; + vbp_picture_data_h264 *picData = data->pic_data; + + if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) { + ETRACE("mAcquiredBuffer is NULL. Implementation bug."); + return DECODE_FAIL; } + VTRACE("data->num_pictures = %d", data->num_pictures); + for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) { + if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) { + return DECODE_PARSER_FAIL; + } - for (int32_t i = 0; i < mMetadata.naluNumber; i++) { - if (sizeLeft < 12) { - ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft); - return DECODE_INVALID_DATA; + if (picIndex > 0 && + (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) { + ETRACE("Packed frame is not supported yet!"); + return DECODE_FAIL; } - sizeLeft -= 12; - // read NALU offset - memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t)); - pByteStream += 4; + VTRACE("picData->num_slices = %d", picData->num_slices); + for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) { + status = decodeSlice(data, picIndex, sliceIndex); + if (status != DECODE_SUCCESS) { + endDecodingFrame(true); + // remove current frame from DPB as it can't be decoded. + removeReferenceFromDPB(picData->pic_parms); + return status; + } + } + } + return DECODE_SUCCESS; +} - // read NALU size - memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; +Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { + Decode_Status status; + VAStatus vaStatus; - // read NALU header length - memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t)); - pByteStream += 4; + VABufferID sliceheaderbufferID; + VABufferID pictureparameterparsingbufferID; + VABufferID mSlicebufferID; + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); - if (sizeLeft < pNaluInfo->naluHeaderLen) { - ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen); - return DECODE_INVALID_DATA; - } + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParseSliceHeaderGroupBufferType, + MAX_SLICEHEADER_BUFFER_SIZE, + 1, + NULL, + &sliceheaderbufferID); + CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer"); + + void *sliceheaderbuf; + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); - sizeLeft -= pNaluInfo->naluHeaderLen; + memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE); - if (pNaluInfo->naluHeaderLen) { - // copy start code prefix to buffer - memcpy(mNaluHeaderBuffer + sizeAccumulated, - startcodePrefix, - STARTCODE_PREFIX_LEN); - sizeAccumulated += STARTCODE_PREFIX_LEN; + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); - // copy NALU header - memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen); - pByteStream += pNaluInfo->naluHeaderLen; - sizeAccumulated += pNaluInfo->naluHeaderLen; - } else { - WTRACE("header len is zero for NALU %d", i); - } + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mFrameSize, //size + 1, //num_elements + mFrameData, + &mSlicebufferID); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - // for next NALU - pNaluInfo++; - } + data->pic_parse_buffer->frame_buf_id = mSlicebufferID; + data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID; + data->pic_parse_buffer->frame_size = mFrameSize; + data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE; + +#if 0 + + VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag); + VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag); + VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag); + VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag); + VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag); + VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag); + VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag); + VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc); + + VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id); + VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1); + VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc); + VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4); + VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type); + VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag); + VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1); + VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1); +#endif + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParsePictureParameterBufferType, + sizeof(VAParsePictureParameterBuffer), + 1, + data->pic_parse_buffer, + &pictureparameterparsingbufferID); + CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer"); + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + &pictureparameterparsingbufferID, + 1); + CHECK_VA_STATUS("vaRenderPicture"); + + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); + + status = updateSliceParameter(data,sliceheaderbuf); + CHECK_STATUS("processSliceHeader"); - buffer->data = mNaluHeaderBuffer; - buffer->size = sizeAccumulated; + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); - return VideoDecoderAVC::decode(buffer); + return DECODE_SUCCESS; } +Decode_Status VideoDecoderAVCSecure::updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf) { + VTRACE("VideoDecoderAVCSecure::updateSliceParameter"); + Decode_Status status; + status = VideoDecoderBase::updateBuffer( + (uint8_t *)sliceheaderbuf, + MAX_SLICEHEADER_BUFFER_SIZE, + (void**)&data); + CHECK_STATUS("updateBuffer"); + return DECODE_SUCCESS; +} Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { - Decode_Status status; VAStatus vaStatus; uint32_t bufferIDCount = 0; - // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data - VABufferID bufferIDs[4]; + // maximum 3 buffers to render a slice: picture parameter, IQMatrix, slice parameter + VABufferID bufferIDs[3]; vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); @@ -231,19 +442,8 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p // either condition indicates start of a new frame if (sliceParam->first_mb_in_slice != 0) { WTRACE("The first slice is lost."); - // TODO: handle the first slice lost } - if (mDecodingFrame) { - // interlace content, complete decoding the first field - vaStatus = vaEndPicture(mVADisplay, mVAContext); - CHECK_VA_STATUS("vaEndPicture"); - - // for interlace content, top field may be valid only after the second field is parsed - mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; - } - - // Check there is no reference frame loss before decoding a frame - + VTRACE("Current frameidx = %d", mFrameIdx++); // Update the reference frames and surface IDs for DPB and current frame status = updateDPB(picParam); CHECK_STATUS("updateDPB"); @@ -252,10 +452,6 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p status = updateReferenceFrames(picData); CHECK_STATUS("updateReferenceFrames"); - vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); - CHECK_VA_STATUS("vaBeginPicture"); - - // start decoding a frame mDecodingFrame = true; vaStatus = vaCreateBuffer( @@ -284,33 +480,7 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p status = setReference(sliceParam); CHECK_STATUS("setReference"); - // find which naluinfo is correlated to current slice - int naluIndex = 0; - uint32_t accumulatedHeaderLen = 0; - uint32_t headerLen = 0; - for (; naluIndex < mMetadata.naluNumber; naluIndex++) { - headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; - if (headerLen == 0) { - WTRACE("lenght of current NAL unit is 0."); - continue; - } - accumulatedHeaderLen += STARTCODE_PREFIX_LEN; - if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { - break; - } - accumulatedHeaderLen += headerLen; - } - - if (sliceData->slice_offset != accumulatedHeaderLen) { - WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); - } - - sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; - uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; - uint32_t slice_offset_shift = sliceOffset % 16; - sliceParam->slice_data_offset += slice_offset_shift; - sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF; - + sliceParam->slice_data_size = mFrameSize; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -322,31 +492,6 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); bufferIDCount++; - // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit - // offset points to first byte of NAL unit - - if (mInputBuffer != NULL) { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VASliceDataBufferType, - sliceData->slice_size, //Slice size - 1, // num_elements - mInputBuffer + sliceOffset - slice_offset_shift, - &bufferIDs[bufferIDCount]); - } else { - vaStatus = vaCreateBuffer( - mVADisplay, - mVAContext, - VAProtectedSliceDataBufferType, - sliceData->slice_size, //size - 1, //num_elements - (uint8_t*)sliceOffset, // IMR offset - &bufferIDs[bufferIDCount]); - } - CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - bufferIDCount++; - vaStatus = vaRenderPicture( mVADisplay, mVAContext, @@ -354,165 +499,25 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p bufferIDCount); CHECK_VA_STATUS("vaRenderPicture"); - return DECODE_SUCCESS; -} - + VABufferID slicebufferID; -// Parse byte string pattern "0x000001" (3 bytes) in the current buffer. -// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found. -int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) { - uint8_t *ptr; - uint32_t left = 0, data = 0, phase = 0; - uint8_t mask1 = 0, mask2 = 0; - - /* Meaning of phase: - 0: initial status, "0x000001" bytes are not found so far; - 1: one "0x00" byte is found; - 2: two or more consecutive "0x00" bytes" are found; - 3: "0x000001" patten is found ; - 4: if there is one more byte after "0x000001"; - */ - - left = length; - ptr = (uint8_t *) (stream + offset); - phase = 0; - - // parse until there is more data and start code not found - while ((left > 0) && (phase < 3)) { - // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time. - if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) { - while (left > 3) { - data = *((uint32_t *)ptr); - mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0)); - mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1)); - // If second byte and fourth byte are not zero's then we cannot have a start code here, - // as we need two consecutive zero bytes for a start code pattern. - if (mask1 && mask2) { - // skip 4 bytes and start over - ptr += 4; - left -=4; - continue; - } else { - break; - } - } - } - - // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time - if (left > 0) { - if (*ptr == STARTCODE_00) { - phase++; - if (phase > 2) { - // more than 2 consecutive '0x00' bytes is found - phase = 2; - } - } else if ((*ptr == STARTCODE_01) && (phase == 2)) { - // start code is found - phase = 3; - } else { - // reset lookup - phase = 0; - } - ptr++; - left--; - } - } - - if ((left > 0) && (phase == 3)) { - phase = 4; - // return offset of position following the pattern in the buffer which matches "0x000001" byte string - return (int32_t)(ptr - stream); - } - return -1; -} - - -Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) { - uint8_t naluType; - int32_t naluHeaderLen; - - naluType = *(uint8_t *)(stream + naluStream->naluOffset); - naluType &= NALU_TYPE_MASK; - // first update nalu header length based on nalu type - if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) { - // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes - naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE); - } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) { - //sps, pps, sei, etc, return the entire NAL unit in clear - naluHeaderLen = naluStream->naluLen; - } else { - return DECODE_FRAME_DROPPED; - } - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t)); - naluStream->streamPos += 4; - - memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t)); - naluStream->streamPos += 4; + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mFrameSize, //size + 1, //num_elements + mFrameData, + &slicebufferID); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); - memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t)); - naluStream->streamPos += 4; + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + &slicebufferID, + 1); + CHECK_VA_STATUS("vaRenderPicture"); - if (naluHeaderLen) { - memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen); - naluStream->streamPos += naluHeaderLen; - } return DECODE_SUCCESS; -} - - -// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container -Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) { - int32_t naluOffset, offset, left; - NaluInfo *info; - uint32_t ret = DECODE_SUCCESS; - - naluOffset = 0; - offset = 0; - left = length; - - // leave 4 bytes to copy nalu count - naluStream->streamPos = 4; - naluStream->naluCount = 0; - memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER); - - for (; ;) { - naluOffset = findNalUnitOffset(stream, offset, left); - if (naluOffset == -1) { - break; - } - - if (naluStream->naluCount == 0) { - naluStream->naluOffset = naluOffset; - } else { - naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN; - ret = copyNaluHeader(stream, naluStream); - if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) { - LOGW("copyNaluHeader returned %d", ret); - return ret; - } - // starting position for next NALU - naluStream->naluOffset = naluOffset; - } - if (ret == DECODE_SUCCESS) { - naluStream->naluCount++; - } - - // update next lookup position and length - offset = naluOffset + 1; // skip one byte of NAL unit type - left = length - offset; - } - - if (naluStream->naluCount > 0) { - naluStream->naluLen = length - naluStream->naluOffset; - memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t)); - // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED - copyNaluHeader(stream, naluStream); - return DECODE_SUCCESS; - } - - LOGW("number of valid NALU is 0!"); - return DECODE_SUCCESS; } - diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h index af5ae44..6378243 100644 --- a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h @@ -22,62 +22,34 @@ * */ -#ifndef VIDEO_DECODER_AVC_SECURE_H_ -#define VIDEO_DECODER_AVC_SECURE_H_ +#ifndef VIDEO_DECODER_AVC_SECURE_H +#define VIDEO_DECODER_AVC_SECURE_H +#include "VideoDecoderBase.h" #include "VideoDecoderAVC.h" - +#include "VideoDecoderDefs.h" class VideoDecoderAVCSecure : public VideoDecoderAVC { public: VideoDecoderAVCSecure(const char *mimeType); - virtual ~VideoDecoderAVCSecure(); - virtual Decode_Status start(VideoConfigBuffer *buffer); virtual void stop(void); // data in the decoded buffer is all encrypted. virtual Decode_Status decode(VideoDecodeBuffer *buffer); - -private: - enum { - MAX_SLICE_HEADER_SIZE = 30, - MAX_NALU_HEADER_BUFFER = 8192, - MAX_NALU_NUMBER = 400, // > 4096/12 - }; - - // Information of Network Abstraction Layer Unit - struct NaluInfo { - int32_t naluOffset; // offset of NAL unit in the firewalled buffer - int32_t naluLen; // length of NAL unit - int32_t naluHeaderLen; // length of NAL unit header - }; - - struct NaluMetadata { - NaluInfo *naluInfo; - int32_t naluNumber; // number of NAL units - }; - - struct NaluByteStream { - int32_t naluOffset; - int32_t naluLen; - int32_t streamPos; - uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData - int32_t naluCount; - }; - +protected: + virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data); + virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data); + Decode_Status parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + Decode_Status updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf); virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); - int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length); - Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream); - Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream); - private: - NaluMetadata mMetadata; - NaluByteStream mByteStream; - uint8_t *mNaluHeaderBuffer; - uint8_t *mInputBuffer; + int32_t mIsEncryptData; + int32_t mFrameSize; + uint8_t* mFrameData; + uint8_t* mClearData; + int32_t mFrameIdx; }; - - -#endif /* VIDEO_DECODER_AVC_SECURE_H_ */ +#endif -- cgit v1.2.3 From 0f736e5a3c8a8c5e479358c4a110ad15aebc4b6d Mon Sep 17 00:00:00 2001 From: Jing SUN Date: Thu, 13 Feb 2014 15:10:45 +0800 Subject: Corrected libmix image encoder's max coded size computing formula for small images on max quality. BZ: 170493 Issue: the current max JPEG coded size computing formula fit large images, but its output was too small if working on small images (320*240/640*480 on 100). Solution: Corrected that formula. Change-Id: I670506c937090db19190b81803955f085a5ea93a Signed-off-by: Jing SUN --- imageencoder/ImageEncoder.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp index c7b522d..741b74c 100644 --- a/imageencoder/ImageEncoder.cpp +++ b/imageencoder/ImageEncoder.cpp @@ -284,6 +284,8 @@ int IntelImageEncoder::createContext(int first_image_seq, unsigned int *max_code /* Create a coded buffer */ coded_buf_size = (((context_width+15)/16)*((context_height+15)/16)*160) + 640; coded_buf_size = (coded_buf_size+0xf) & ~0xf; + coded_buf_size = (coded_buf_size<3145728)? 3145728:coded_buf_size; /* At least 3MB */ + va_status = vaCreateBuffer(va_dpy, va_contextid, VAEncCodedBufferType, coded_buf_size, 1, NULL, &va_codedbufferid); if (va_status != VA_STATUS_SUCCESS) { -- cgit v1.2.3 From b7009592800d901856f475f3bdb5a48da067b2ec Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Tue, 11 Feb 2014 18:07:12 +0800 Subject: mixvbp: refine VP8 VA parameter usage BZ: 158760 To follow proposed change/clarification for dec_va_vp8.h, refine VP8 VA parameter usage. Changes to dec_va.vp8.h are below, 1. Remove mb_skip_coeff from VAPictureParameterBUfferVP8 2. num_of_partitions count the control partition Change-Id: I38876a3ab6a37d09f96183a13a7630ffe9708a2f Signed-off-by: Tianmi Chen --- mixvbp/vbp_manager/vbp_vp8_parser.c | 17 +++++++++++------ mixvbp/vbp_plugin/vp8/include/vp8.h | 1 - mixvbp/vbp_plugin/vp8/vp8parse.c | 4 ---- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c index 29ee8ad..73d9281 100755 --- a/mixvbp/vbp_manager/vbp_vp8_parser.c +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -414,7 +414,6 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu pic_parms->pic_fields.bits.sign_bias_alternate = pi->sign_bias_alternate; pic_parms->pic_fields.bits.mb_no_coeff_skip = pi->mb_no_coeff_skip; - pic_parms->pic_fields.bits.mb_skip_coeff = pi->mb_skip_coeff; pic_parms->prob_skip_false = pi->prob_skip_false; pic_parms->prob_intra = pi->prob_intra; @@ -442,7 +441,7 @@ static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *qu /* Bool coder */ pic_parms->bool_coder_ctx.range = pi->bool_coder.range; pic_parms->bool_coder_ctx.value = (pi->bool_coder.value >> 24) & 0xFF; - pic_parms->bool_coder_ctx.count = 8 - (pi->bool_coder.count & 0x07); + pic_parms->bool_coder_ctx.count = pi->bool_coder.count; //pic_parms->current_picture = VA_INVALID_SURFACE; pic_parms->last_ref_frame = VA_INVALID_SURFACE; @@ -470,7 +469,8 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_index]); vbp_slice_data_vp8 *slc_data = &(pic_data->slc_data[pic_data->num_slices]); - int slice_offset = (pi->frame_tag.frame_type == KEY_FRAME) ? 10 : 3; + int tag_size = (pi->frame_tag.frame_type == KEY_FRAME) ? 10 : 3; + int slice_offset = pi->header_bits >> 3; slc_data->buffer_addr = pi->source + slice_offset; slc_data->slice_offset = 0; slc_data->slice_size = pi->source_sz - slice_offset; @@ -486,16 +486,21 @@ static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 * slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; /* the offset to the first bit of MB from the first byte of slice data */ - slc_parms->macroblock_offset = pi->header_bits - (slice_offset << 3); + slc_parms->macroblock_offset = 8 - pi->bool_coder.count; /* Token Partitions */ - slc_parms->num_of_partitions = pi->partition_count; - slc_parms->partition_size[0] = pi->frame_tag.first_part_size; + slc_parms->num_of_partitions = pi->partition_count + 1; + slc_parms->partition_size[0] = pi->frame_tag.first_part_size - (pi->frame_data_offset - tag_size); for (part_index = 1; part_index < 9; part_index++) { slc_parms->partition_size[part_index] = pi->partition_size[part_index - 1]; } + // Do not remove below code, for the purpose of debug +#if 0 + ITRACE("header_bits = %d, slice_offset = %d, mb_offset = %d, first_part_size = %d, frame_data_offset = %d, partition_size[0] = %d, pos = %d, count = %d ", pi->header_bits, slice_offset, slc_parms->macroblock_offset, pi->frame_tag.first_part_size, pi->frame_data_offset, slc_parms->partition_size[0], pi->bool_coder.pos, pi->bool_coder.count); +#endif + pic_data->num_slices++; if (pic_data->num_slices > VP8_MAX_NUM_SLICES) { diff --git a/mixvbp/vbp_plugin/vp8/include/vp8.h b/mixvbp/vbp_plugin/vp8/include/vp8.h index 06a7e61..e6fe5b7 100755 --- a/mixvbp/vbp_plugin/vp8/include/vp8.h +++ b/mixvbp/vbp_plugin/vp8/include/vp8.h @@ -334,7 +334,6 @@ extern "C" { // Macroblock No Coeff Skip uint8_t mb_no_coeff_skip; vp8_prob prob_skip_false; - vp8_prob mb_skip_coeff; // Frame context FrameContextData FrameContext; diff --git a/mixvbp/vbp_plugin/vp8/vp8parse.c b/mixvbp/vbp_plugin/vp8/vp8parse.c index f3c789d..c2c0565 100755 --- a/mixvbp/vbp_plugin/vp8/vp8parse.c +++ b/mixvbp/vbp_plugin/vp8/vp8parse.c @@ -451,10 +451,6 @@ void vp8_parse_remaining_frame_header_data(vp8_Info *pi) { pi->prob_skip_false = (vp8_prob)vp8_read_bits(bc, 8); } - else - { - pi->mb_skip_coeff = 0; - } if (pi->frame_tag.frame_type == INTER_FRAME) { -- cgit v1.2.3 From 84779b42886126acab379f87253b950e64e34fca Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Thu, 20 Feb 2014 16:58:07 +0800 Subject: libmix: fix memory leak in decoder BZ: 172176 Some buffers are allocated in setupVA without free in terminateVA. Change-Id: I1c880ce38c48679396b9a533a4750395e95eb171 Signed-off-by: Dan Liang --- videodecoder/VideoDecoderBase.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index afcb8bf..4de762a 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -972,6 +972,7 @@ Decode_Status VideoDecoderBase::terminateVA(void) { } if (mVASurfaceAttrib) { + if (mVASurfaceAttrib->buffers) free(mVASurfaceAttrib->buffers); delete mVASurfaceAttrib; mVASurfaceAttrib = NULL; } -- cgit v1.2.3 From fab7bc852308deb2da96f1422192fbe5d910ef36 Mon Sep 17 00:00:00 2001 From: Jing SUN Date: Tue, 25 Feb 2014 15:40:42 +0800 Subject: Increased JPEG encoding's internal coded buffer size. BZ: 166113 Issue: Found under certain circumstances the source image data output from Camera Imaging system could be extremely complex so that the current JPEG encoding's internal coded buffer became too small to take the coded. Solution: Increased JPEG encoding's internal coded buffer size to make it even fit the extreme cases. Change-Id: I46fe64d3fa1983427787fb720a9b9d8d52d55604 Signed-off-by: Jing SUN --- imageencoder/ImageEncoder.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp index 741b74c..ec89fc6 100644 --- a/imageencoder/ImageEncoder.cpp +++ b/imageencoder/ImageEncoder.cpp @@ -282,9 +282,8 @@ int IntelImageEncoder::createContext(int first_image_seq, unsigned int *max_code /* Create a coded buffer */ - coded_buf_size = (((context_width+15)/16)*((context_height+15)/16)*160) + 640; + coded_buf_size = (((context_width+15)/16)*((context_height+15)/16)*320) + 640; coded_buf_size = (coded_buf_size+0xf) & ~0xf; - coded_buf_size = (coded_buf_size<3145728)? 3145728:coded_buf_size; /* At least 3MB */ va_status = vaCreateBuffer(va_dpy, va_contextid, VAEncCodedBufferType, coded_buf_size, 1, NULL, &va_codedbufferid); -- cgit v1.2.3 From c9f54c6030b309d573b2a8e113c999508761e937 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 27 Feb 2014 22:54:27 +0800 Subject: Remove the annoying debug messages in LibMIX BZ: 174579 Remove the annoying "short format parsing: no need to go on" messages printed by LibMIX. Change-Id: Ied651076625b26fac9747a97db7e55d751c5aff6 Signed-off-by: wfeng6 --- mixvbp/vbp_plugin/h264/h264parse_sh.c | 2 +- videodecoder/VideoDecoderBase.cpp | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c index 71d16f6..625e146 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sh.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c @@ -229,7 +229,7 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice keepParsing = h264_is_new_picture_start(pInfo, *SliceHeader, pInfo->SliceHeader) && (SliceHeader->nal_ref_idc != 0); if (!keepParsing) { - ITRACE("short format parsing: no need to go on!"); + VTRACE("short format parsing: no need to go on!"); ret = H264_STATUS_OK; break; } diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 4de762a..b71323d 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -1279,6 +1279,8 @@ void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) { return; } surface->renderBuffer.driverRenderDone = true; + +#ifndef USE_GEN_HW if (surface->renderBuffer.surface != VA_INVALID_SURFACE && (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { @@ -1288,6 +1290,7 @@ void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) { surface->renderBuffer.driverRenderDone = false; } +#endif } -- cgit v1.2.3 From e1d9c7401dec79cfa8aff2b2adbbdca968d78cfc Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Thu, 27 Feb 2014 09:47:29 +0800 Subject: libmix: add a flag indicating whether video decoder buffer contains only one field BZ: 173283 For some interfaced clips, if video decoder buffer contains only one field, add a flag to recognize it. Currently it is only available for AVC. Change-Id: I90cc291a29626e7bf6b58a042b77c23430da6ba9 Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderAVC.cpp | 15 ++++++++++++--- videodecoder/VideoDecoderDefs.h | 4 ++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index c3f3bd4..def0649 100755 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -135,10 +135,13 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h return DECODE_NO_CONFIG; } - // Don't remove the following codes, it can be enabled for debugging DPB. -#if 0 + mVideoFormatInfo.flags = 0; + uint32_t fieldFlags = 0; for (unsigned int i = 0; i < data->num_pictures; i++) { VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic; + fieldFlags |= pic.flags; + // Don't remove the following codes, it can be enabled for debugging DPB. +#if 0 VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d", i, buffer->timeStamp/1E6, @@ -147,8 +150,14 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h pic.flags, (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)); - } #endif + } + int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD; + int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD; + if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) { + mVideoFormatInfo.flags |= IS_SINGLE_FIELD; + } + if (data->new_sps || data->new_pps) { status = handleNewSequence(data); CHECK_STATUS("handleNewSequence"); diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index fc347ac..c74acf6 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -110,6 +110,9 @@ typedef enum { // indicate the frame has resolution change IS_RESOLUTION_CHANGE = 0x40000, + + // indicate whether video decoder buffer contains only one field + IS_SINGLE_FIELD = 0x80000, } VIDEO_BUFFER_FLAG; typedef enum @@ -216,6 +219,7 @@ struct VideoFormatInfo { int32_t framerateNom; int32_t framerateDenom; int32_t actualBufferNeeded; + int32_t flags; // indicate whether current picture is field or frame VideoExtensionBuffer *ext; }; -- cgit v1.2.3 From fd005037b68c23a4579aa61ffeefdb3b322142a4 Mon Sep 17 00:00:00 2001 From: Fei Jiang Date: Sun, 2 Mar 2014 23:31:03 +0800 Subject: libmix: correct the type of buffers in structure _VASurfaceAttributeTPI BZ: 172915 correct the type of buffers in structure _VASurfaceAttributeTPI Change-Id: I9ddbfb49478bca766c1d6518d34e18b460e5624a Signed-off-by: Fei Jiang --- imagedecoder/JPEGDecoder_img.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imagedecoder/JPEGDecoder_img.cpp b/imagedecoder/JPEGDecoder_img.cpp index 165c138..b5d4358 100644 --- a/imagedecoder/JPEGDecoder_img.cpp +++ b/imagedecoder/JPEGDecoder_img.cpp @@ -109,7 +109,7 @@ JpegDecodeStatus JpegDecoder::createSurfaceGralloc(int width, int height, uint32 attrib_tpi.width = width; attrib_tpi.height = height; attrib_tpi.type = VAExternalMemoryAndroidGrallocBuffer; - attrib_tpi.buffers = (uint32_t*)&handle; + attrib_tpi.buffers = (unsigned long *)&handle; st = vaCreateSurfacesWithAttribute( mDisplay, -- cgit v1.2.3 From 1c1c897df062c36b7d7a33ce1080a5cde9314471 Mon Sep 17 00:00:00 2001 From: Austin Yuan Date: Thu, 27 Feb 2014 15:17:11 +0800 Subject: Use vaSyncSurface for sync instead of vaMapBuffer(codedBuffer,XXX) BZ: 175321 Change-Id: I16d2f080ab22e74437fbdafc80dced2b77950df5 Signed-off-by: Austin Yuan --- videoencoder/VideoEncoderBase.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 284ab52..99c2606 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -454,10 +454,10 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint3 mOutCodedBuffer = mCurOutputTask->coded_buffer; // Check frame skip - // Need map buffer before calling query surface below to get the right skip frame flag for current frame + // Need encoding to be completed before calling query surface below to + // get the right skip frame flag for current frame // It is a requirement of video driver - vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); - vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); + vaSyncSurface(mVADisplay, mCurOutputTask->enc_surface); vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); -- cgit v1.2.3 From 6748d674ec40be0a92bf8711bcf950f83ba0f5c1 Mon Sep 17 00:00:00 2001 From: gji2 Date: Tue, 4 Mar 2014 01:21:53 +0800 Subject: Refine the error return value for create surface function BZ: 165996 Refine the error return value for create surface function. Resolve the widi crash issue. Change-Id: Ieba3fc63e768b416ceeace9a9b4a5d3d56821db1 Signed-off-by: gji2 --- videoencoder/VideoEncoderUtils.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 210881e..2657e01 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -733,9 +733,10 @@ VASurfaceID VASurfaceMap::CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, vinfo.width, vinfo.height, &surface, 1, attribs, 2); - if (vaStatus != VA_STATUS_SUCCESS) + if (vaStatus != VA_STATUS_SUCCESS){ LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus); - + surface = VA_INVALID_SURFACE; + } return surface; } -- cgit v1.2.3 From f0a62a2b9a168402992775792bb71b9fce813896 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Thu, 6 Mar 2014 14:41:48 +0800 Subject: libmix: refine decoder for adaptive video playback BZ: 177230 Set the output window size to the max value and disable low delay mode when Adaptive Video Playback is enabled. Change-Id: I13dfdde755a5a6f31f81539d1ba39f271256912b Signed-off-by: Dan Liang --- videodecoder/VideoDecoderAVC.cpp | 16 ++++++++++------ videodecoder/VideoDecoderDefs.h | 3 +++ 2 files changed, 13 insertions(+), 6 deletions(-) mode change 100755 => 100644 videodecoder/VideoDecoderAVC.cpp diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp old mode 100755 new mode 100644 index def0649..a029468 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -237,6 +237,7 @@ Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) { mAcquiredBuffer->renderBuffer.flag = 0; mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; mAcquiredBuffer->pictureOrder = getPOC(picture); + if (mSizeChanged) { mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; mSizeChanged = false; @@ -677,7 +678,7 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { } } - VideoDecoderBase::setOutputWindowSize(DPBSize); + VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize); updateFormatInfo(data); // for 1080p, limit the total surface to 19, according the hardware limitation @@ -687,10 +688,15 @@ Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) { DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER; } - // for baseline profile, enable low delay mode automatically - if (data->codec_data->profile_idc == 66) { - enableLowDelayMode(true); + if (mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) { + // When Adaptive playback is enabled, turn off low delay mode. + // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay. + enableLowDelayMode(false); + } else { + // for baseline profile, enable low delay mode automatically + enableLowDelayMode(data->codec_data->profile_idc == 66); } + return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile); } @@ -713,8 +719,6 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { mVideoFormatInfo.height = height; } - - // video_range has default value of 0. mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag; diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index c74acf6..67139f0 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -113,6 +113,9 @@ typedef enum { // indicate whether video decoder buffer contains only one field IS_SINGLE_FIELD = 0x80000, + + // indicate adaptive playback mode + WANT_ADAPTIVE_PLAYBACK = 0x100000, } VIDEO_BUFFER_FLAG; typedef enum -- cgit v1.2.3 From 7b4dcd1d1e92e7eaf468d15a32126ddb52cd93e2 Mon Sep 17 00:00:00 2001 From: Jing SUN Date: Tue, 11 Mar 2014 15:29:24 +0800 Subject: Make libmix_imageencoder_tester use uncached source image buffer. BZ: 175701 Issue: When SLC was enabled on MOFD, uncached source buffer needed to be used or any data in cached source buffer should be guaranteed to be up-to-date. Solution: On Camera capturing cases up-to-date was natually guaranteed and this patch made file2file test cases use uncached source buffer. Change-Id: I4c7aa9b717faaefbc35e707bc8c1fd1c9d08f2d1 Signed-off-by: Jing SUN --- imageencoder/test/main.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/imageencoder/test/main.cpp b/imageencoder/test/main.cpp index 2707680..a1eeb02 100644 --- a/imageencoder/test/main.cpp +++ b/imageencoder/test/main.cpp @@ -86,7 +86,7 @@ int main(int argc, char** argv) /* Parameter variables */ char *source_name = NULL; char *output_name = (char *)"./output.jpg"; - int surface_type = 0; + int surface_type = 1; int quality = DEFAULT_QUALITY; int burst = DEFAULT_BURST; int width = 0, height = 0; @@ -283,7 +283,7 @@ int main(int argc, char** argv) ((unsigned int)source_buffer)%4096 + 4096); } else { /* gralloc */ gralloc_buffer = new GraphicBuffer(width, height, VA_FOURCC_NV12, - GraphicBuffer::USAGE_SW_WRITE_RARELY); + GraphicBuffer::USAGE_HW_RENDER); if (NULL == gralloc_buffer) { fprintf(stderr, "Allocating GraphicBuffer failed!\n"); close(source_fd); -- cgit v1.2.3 From 3354ee6436a126d62c7f18b7c0762d6ca03684e3 Mon Sep 17 00:00:00 2001 From: liubolun Date: Wed, 26 Feb 2014 16:30:33 +0800 Subject: Implement VP8 temporal layer encode in libmix BZ: 165682 Implement VP8 temporal layer encode in libmix Fix some code style issues; Fix data structure overflow issue. Change-Id: I14958416381597c4b47b74a6cd99d83880028b29 Signed-off-by: liubolun --- videoencoder/VideoEncoderBase.cpp | 31 +++++++++- videoencoder/VideoEncoderBase.h | 1 + videoencoder/VideoEncoderDef.h | 28 +++++++++ videoencoder/VideoEncoderVP8.cpp | 116 +++++++++++++++++++++++++++++++++++++- videoencoder/VideoEncoderVP8.h | 2 + 5 files changed, 174 insertions(+), 4 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 99c2606..6110343 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -894,6 +894,7 @@ void VideoEncoderBase::setDefaultParams() { mComParams.disableDeblocking = 2; mComParams.syncEncMode = false; mComParams.codedBufNum = 2; + mComParams.numberOfLayer = 1; mHrdParam.bufferSize = 0; mHrdParam.initBufferFullness = 0; @@ -906,7 +907,7 @@ Encode_Status VideoEncoderBase::setParameters( Encode_Status ret = ENCODE_SUCCESS; CHECK_NULL_RETURN_IFFAIL(videoEncParams); - LOG_I("Config type = %d\n", (int)videoEncParams->type); + LOG_I("Config type = %x\n", (int)videoEncParams->type); if (mStarted) { LOG_E("Encoder has been initialized, should use setConfig to change configurations\n"); @@ -975,6 +976,18 @@ Encode_Status VideoEncoderBase::setParameters( break; } + case VideoParamsTypeTemporalLayerNumber:{ + VideoParamsTemporalLayerNumber *numberoflayer = + reinterpret_cast (videoEncParams); + + if (numberoflayer->size != sizeof(VideoParamsTemporalLayerNumber)) { + return ENCODE_INVALID_PARAMS; + } + + mComParams.numberOfLayer = numberoflayer->numberOfLayer; + break; + } + case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: @@ -1086,6 +1099,19 @@ Encode_Status VideoEncoderBase::getParameters( } } + case VideoParamsTypeTemporalLayerNumber:{ + VideoParamsTemporalLayerNumber *numberoflayer = + reinterpret_cast (videoEncParams); + + if(numberoflayer->size != sizeof(VideoParamsTemporalLayerNumber)) { + return ENCODE_INVALID_PARAMS; + } + + numberoflayer->numberOfLayer = mComParams.numberOfLayer; + + break; + } + case VideoParamsTypeAVC: case VideoParamsTypeH263: case VideoParamsTypeMP4: @@ -1200,7 +1226,8 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeSliceNum: case VideoConfigTypeVP8: case VideoConfigTypeVP8ReferenceFrame: - case VideoConfigTypeVP8MaxFrameSizeRatio:{ + case VideoConfigTypeVP8MaxFrameSizeRatio: + case VideoConfigTypeVP8TemporalBitRateFrameRate:{ ret = derivedSetConfig(videoEncConfig); break; } diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 692162f..f4e58e0 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -121,6 +121,7 @@ protected: bool mRenderBitRate; bool mRenderHrd; bool mRenderMaxFrameSize; + bool mRenderMultiTemporal; VABufferID mSeqParamBuf; VABufferID mRcParamBuf; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 9c8737d..c116412 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -315,6 +315,7 @@ enum VideoParamConfigType { VideoParamsTypeStoreMetaDataInBuffers, VideoParamsTypeProfileLevel, VideoParamsTypeVP8, + VideoParamsTypeTemporalLayerNumber, VideoConfigTypeFrameRate, VideoConfigTypeBitRate, @@ -330,6 +331,7 @@ enum VideoParamConfigType { VideoConfigTypeVP8ReferenceFrame, VideoConfigTypeCIR, VideoConfigTypeVP8MaxFrameSizeRatio, + VideoConfigTypeVP8TemporalBitRateFrameRate, VideoParamsConfigExtension }; @@ -364,6 +366,7 @@ struct VideoParamsCommon : VideoParamConfigSet { bool syncEncMode; //CodedBuffer properties uint32_t codedBufNum; + uint32_t numberOfLayer; VideoParamsCommon() { type = VideoParamsTypeCommon; @@ -388,6 +391,7 @@ struct VideoParamsCommon : VideoParamConfigSet { this->disableDeblocking = other.disableDeblocking; this->syncEncMode = other.syncEncMode; this->codedBufNum = other.codedBufNum; + this->numberOfLayer = other.numberOfLayer; return *this; } }; @@ -520,6 +524,17 @@ struct VideoParamsProfileLevel : VideoParamConfigSet { bool isSupported; }; +struct VideoParamsTemporalLayerNumber : VideoParamConfigSet { + + VideoParamsTemporalLayerNumber() { + type = VideoParamsTypeTemporalLayerNumber; + size = sizeof(VideoParamsTemporalLayerNumber); + } + + int32_t numberOfLayer; +}; + + struct VideoConfigFrameRate : VideoParamConfigSet { VideoConfigFrameRate() { @@ -684,6 +699,19 @@ struct VideoConfigVP8MaxFrameSizeRatio : VideoParamConfigSet { uint32_t max_frame_size_ratio; }; +struct VideoConfigVP8TemporalBitRateFrameRate : VideoParamConfigSet { + + VideoConfigVP8TemporalBitRateFrameRate() { + type = VideoConfigTypeVP8TemporalBitRateFrameRate; + size = sizeof(VideoConfigVP8TemporalBitRateFrameRate); + } + + uint32_t layerID; + uint32_t bitRate; + uint32_t frameRate; +}; + + #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 45655bd..4892cf0 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -44,6 +44,16 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoConfigVP8ReferenceFrame.refresh_golden_frame = 1; mVideoConfigVP8ReferenceFrame.refresh_alternate_frame = 1; + mVideoConfigVP8TemporalBitRateFrameRate[0].bitRate = 0; + mVideoConfigVP8TemporalBitRateFrameRate[0].frameRate = 0; + mVideoConfigVP8TemporalBitRateFrameRate[0].layerID = 0; + mVideoConfigVP8TemporalBitRateFrameRate[1].bitRate = 0; + mVideoConfigVP8TemporalBitRateFrameRate[1].frameRate = 0; + mVideoConfigVP8TemporalBitRateFrameRate[1].layerID = 0; + mVideoConfigVP8TemporalBitRateFrameRate[2].bitRate = 0; + mVideoConfigVP8TemporalBitRateFrameRate[2].frameRate = 0; + mVideoConfigVP8TemporalBitRateFrameRate[2].layerID = 0; + mComParams.profile = VAProfileVP8Version0_3; } @@ -116,13 +126,13 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { CHECK_VA_STATUS_RETURN("vaRenderPicture"); LOG_V( "End\n"); - return ret; + return ret; } Encode_Status VideoEncoderVP8::renderRCParams(void) { VABufferID rc_param_buf; - VAStatus vaStatus = VA_STATUS_SUCCESS; + VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncMiscParameterBuffer *misc_param; VAEncMiscParameterRateControl *misc_rate_ctrl; @@ -172,6 +182,7 @@ Encode_Status VideoEncoderVP8::renderFrameRateParams(void) misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data; memset(misc_framerate, 0, sizeof(*misc_framerate)); misc_framerate->framerate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; + vaUnmapBuffer(mVADisplay, framerate_param_buf); vaStatus = vaRenderPicture(mVADisplay,mVAContext, &framerate_param_buf, 1); @@ -237,6 +248,69 @@ Encode_Status VideoEncoderVP8::renderMaxFrameSizeParams(void) return 0; } +Encode_Status VideoEncoderVP8::renderMultiTemporalBitRateFrameRate(void) +{ + VABufferID rc_param_buf; + VABufferID framerate_param_buf; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncMiscParameterBuffer *misc_param; + VAEncMiscParameterRateControl *misc_rate_ctrl; + VAEncMiscParameterFrameRate *misc_framerate; + + int i; + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl), + 1,NULL,&rc_param_buf); + + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + for(i=0;itype = VAEncMiscParameterTypeRateControl; + misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data; + memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl)); + misc_rate_ctrl->bits_per_second = mVideoConfigVP8TemporalBitRateFrameRate[i].bitRate; + misc_rate_ctrl->rc_flags.bits.temporal_id = 0; + misc_rate_ctrl->target_percentage = 100; + misc_rate_ctrl->window_size = 1000; + misc_rate_ctrl->initial_qp = mVideoParamsVP8.init_qp; + misc_rate_ctrl->min_qp = mVideoParamsVP8.min_qp; + misc_rate_ctrl->basic_unit_size = 0; + misc_rate_ctrl->max_qp = mVideoParamsVP8.max_qp; + vaUnmapBuffer(mVADisplay, rc_param_buf); + + vaStatus = vaRenderPicture(mVADisplay,mVAContext, &rc_param_buf, 1); + } + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterFrameRate), + 1,NULL,&framerate_param_buf); + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + + for(i=0;itype = VAEncMiscParameterTypeFrameRate; + misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data; + memset(misc_framerate, 0, sizeof(*misc_framerate)); + misc_framerate->framerate_flags.bits.temporal_id = i; + misc_framerate->framerate = mVideoConfigVP8TemporalBitRateFrameRate[i].frameRate; + + vaUnmapBuffer(mVADisplay, framerate_param_buf); + + vaStatus = vaRenderPicture(mVADisplay,mVAContext, &framerate_param_buf, 1); + } + + CHECK_VA_STATUS_RETURN("vaRenderPicture");; + + return 0; +} + Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; @@ -272,6 +346,13 @@ Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { mRenderMaxFrameSize = false; } + if (mRenderMultiTemporal) { + ret = renderMultiTemporalBitRateFrameRate(); + CHECK_ENCODE_STATUS_RETURN("renderMultiTemporalBitRateFrameRate"); + + mRenderMultiTemporal = false; + } + ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); @@ -308,6 +389,7 @@ Encode_Status VideoEncoderVP8::derivedGetParams(VideoParamConfigSet *videoEncPar Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncConfig) { + int layer_id; CHECK_NULL_RETURN_IFFAIL(videoEncConfig); switch (videoEncConfig->type) @@ -351,6 +433,20 @@ Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncCon } break; + case VideoConfigTypeVP8TemporalBitRateFrameRate:{ + VideoConfigVP8TemporalBitRateFrameRate *encConfigVP8TemporalBitRateFrameRate = + reinterpret_cast(videoEncConfig); + + if(encConfigVP8TemporalBitRateFrameRate->size != sizeof(VideoConfigVP8TemporalBitRateFrameRate)) { + return ENCODE_INVALID_PARAMS; + } + layer_id = encConfigVP8TemporalBitRateFrameRate->layerID % 3; + encConfigVP8TemporalBitRateFrameRate->bitRate= mVideoConfigVP8TemporalBitRateFrameRate[layer_id].bitRate; + encConfigVP8TemporalBitRateFrameRate->frameRate = mVideoConfigVP8TemporalBitRateFrameRate[layer_id].frameRate; + } + break; + + default: { LOG_E ("Invalid Config Type"); break; @@ -362,6 +458,7 @@ Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncCon Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncConfig) { + int layer_id; CHECK_NULL_RETURN_IFFAIL(videoEncConfig); //LOGE ("%s begin",__func__); @@ -406,6 +503,21 @@ Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncCon } break; + case VideoConfigTypeVP8TemporalBitRateFrameRate:{ + VideoConfigVP8TemporalBitRateFrameRate *encConfigVP8TemporalBitRateFrameRate = + reinterpret_cast(videoEncConfig); + + if (encConfigVP8TemporalBitRateFrameRate->size != sizeof(VideoConfigVP8TemporalBitRateFrameRate)) { + return ENCODE_INVALID_PARAMS; + } + layer_id = encConfigVP8TemporalBitRateFrameRate->layerID % 3; + mVideoConfigVP8TemporalBitRateFrameRate[layer_id].layerID = layer_id; + mVideoConfigVP8TemporalBitRateFrameRate[layer_id].bitRate = encConfigVP8TemporalBitRateFrameRate->bitRate; + mVideoConfigVP8TemporalBitRateFrameRate[layer_id].frameRate = encConfigVP8TemporalBitRateFrameRate->frameRate; + mRenderMultiTemporal = true; + } + break; + default: { LOG_E ("Invalid Config Type"); break; diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index 60ddca8..6a036f7 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -39,11 +39,13 @@ private: Encode_Status renderHRDParams(void); Encode_Status renderFrameRateParams(void); Encode_Status renderMaxFrameSizeParams(void); + Encode_Status renderMultiTemporalBitRateFrameRate(void); VideoConfigVP8 mVideoConfigVP8; VideoParamsVP8 mVideoParamsVP8; VideoConfigVP8ReferenceFrame mVideoConfigVP8ReferenceFrame; + VideoConfigVP8TemporalBitRateFrameRate mVideoConfigVP8TemporalBitRateFrameRate[3]; }; #endif /* __VIDEO_ENCODER_VP8_H__ */ -- cgit v1.2.3 From 64157ddd4bcdcc188bfb424b34f8305894e6a8e8 Mon Sep 17 00:00:00 2001 From: Li Zeng Date: Sat, 8 Mar 2014 03:01:48 +0800 Subject: decode: pass video crop size to driver BZ: 177403 pass video crop size to driver Change-Id: Iec6516a4314e628b222072fc416da724daf14886 Signed-off-by: Li Zeng --- videodecoder/VideoDecoderAVC.cpp | 2 ++ videodecoder/VideoDecoderBase.cpp | 23 +++++++++++++++++++++++ videodecoder/VideoDecoderBase.h | 1 + videodecoder/VideoDecoderMPEG4.cpp | 4 ++++ videodecoder/VideoDecoderVP8.cpp | 2 ++ videodecoder/VideoDecoderWMV.cpp | 4 ++++ 6 files changed, 36 insertions(+) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index a029468..3ada6e1 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -786,6 +786,8 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded); mVideoFormatInfo.valid = true; + + setRenderRect(); } Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index b71323d..bffb4f0 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -830,6 +830,8 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i return DECODE_MEMORY_FAIL; } + setRenderRect(); + int32_t format = VA_RT_FORMAT_YUV420; if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { #ifndef USE_AVC_SHORT_FORMAT @@ -1432,3 +1434,24 @@ void VideoDecoderBase::setRotationDegrees(int32_t rotationDegrees) { mRotationDegrees = rotationDegrees; } +void VideoDecoderBase::setRenderRect() { + + if (!mVADisplay) + return; + + VAStatus ret; + VARectangle rect; + rect.x = mVideoFormatInfo.cropLeft; + rect.y = mVideoFormatInfo.cropTop; + rect.width = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight); + rect.height = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop); + + VADisplayAttribute render_rect; + render_rect.type = VADisplayAttribRenderRect; + render_rect.value = (long)▭ + + ret = vaSetDisplayAttributes(mVADisplay, &render_rect, 1); + if (ret) { + ETRACE("Failed to set rotation degree."); + } +} diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index ab9193e..deaa5da 100755 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -174,6 +174,7 @@ protected: void querySurfaceRenderStatus(VideoSurfaceBuffer* surface); void enableLowDelayMode(bool enable) {mLowDelay = enable;} void setRotationDegrees(int32_t rotationDegrees); + void setRenderRect(void); }; diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp index e9b9305..f4aa164 100644 --- a/videodecoder/VideoDecoderMPEG4.cpp +++ b/videodecoder/VideoDecoderMPEG4.cpp @@ -122,6 +122,8 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) { } else { mSizeChanged = true; } + + setRenderRect(); } status = decodeFrame(buffer, data); @@ -628,6 +630,8 @@ void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) { mVideoFormatInfo.aspectY = data->codec_data.par_height; //mVideoFormatInfo.bitrate = data->codec_data.bit_rate; mVideoFormatInfo.valid = true; + + setRenderRect(); } Decode_Status VideoDecoderMPEG4::checkHardwareCapability(VAProfile profile) { diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp index 6cf584c..11bd297 100644 --- a/videodecoder/VideoDecoderVP8.cpp +++ b/videodecoder/VideoDecoderVP8.cpp @@ -77,6 +77,8 @@ void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) { ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d", data->codec_data->crop_left, data->codec_data->crop_top, data->codec_data->crop_bottom); mVideoFormatInfo.valid = true; + + setRenderRect(); } Decode_Status VideoDecoderVP8::startVA(vbp_data_vp8 *data) { diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp index de065c7..831fe7f 100644 --- a/videodecoder/VideoDecoderWMV.cpp +++ b/videodecoder/VideoDecoderWMV.cpp @@ -121,6 +121,8 @@ Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) { && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight); } + setRenderRect(); + if (noNeedFlush) { mSizeChanged = true; } else { @@ -503,6 +505,8 @@ void VideoDecoderWMV::updateFormatInfo(vbp_data_vc1 *data) { mVideoFormatInfo.aspectY = data->se_data->ASPECT_VERT_SIZE; mVideoFormatInfo.bitrate = 0; //data->se_data->bitrate; mVideoFormatInfo.valid = true; + + setRenderRect(); } Decode_Status VideoDecoderWMV::allocateVABufferIDs(int32_t number) { -- cgit v1.2.3 From 675ed92166233c44949b644d98b3c697a90ec098 Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Tue, 18 Feb 2014 16:14:21 +0800 Subject: libmix: fix some compatibility in decode to support 64-bit user space BZ: 178142 fix some compatibility(H264/MPEG4/H263) in decode to support 64-bit user space Change-Id: I2fad23bd25baeb366394214ede9209ac36781c8f Signed-off-by: Gu, Wangyi --- mixvbp/vbp_manager/vbp_mp42_parser.c | 2 +- mixvbp/vbp_manager/vbp_utils.h | 2 +- mixvbp/vbp_manager/viddec_parse_sc.c | 2 +- mixvbp/vbp_manager/viddec_pm_utils_bstream.c | 2 +- mixvbp/vbp_plugin/h264/h264parse.c | 2 +- mixvbp/vbp_plugin/h264/h264parse_mem.c | 26 +++++++++++++------------- mixvbp/vbp_plugin/h264/include/h264.h | 22 +++++++++++----------- mixvbp/vbp_plugin/h264/include/h264parse.h | 6 ++---- mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c | 2 +- mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h | 1 - videodecoder/VideoDecoderBase.cpp | 2 +- 11 files changed, 33 insertions(+), 36 deletions(-) diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c index c2be912..9b4c63f 100755 --- a/mixvbp/vbp_manager/vbp_mp42_parser.c +++ b/mixvbp/vbp_manager/vbp_mp42_parser.c @@ -681,7 +681,7 @@ uint32 vbp_get_sc_pos_mp42( { /* Check if we are byte aligned & phase=0, if thats the case we can check work at a time instead of byte*/ - if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) + if (((((uintptr_t) ptr) & 0x3) == 0) && (phase == 0)) { while (data_left > 3) { diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h index 1f54ae6..633159c 100755 --- a/mixvbp/vbp_manager/vbp_utils.h +++ b/mixvbp/vbp_manager/vbp_utils.h @@ -86,7 +86,7 @@ struct vbp_context_t viddec_pm_cxt_t *parser_cxt; /* persistent memory for parser */ - uint32 *persist_mem; + uintptr_t *persist_mem; /* format specific query data */ void *query_data; diff --git a/mixvbp/vbp_manager/viddec_parse_sc.c b/mixvbp/vbp_manager/viddec_parse_sc.c index d64a229..5f76e5d 100755 --- a/mixvbp/vbp_manager/viddec_parse_sc.c +++ b/mixvbp/vbp_manager/viddec_parse_sc.c @@ -23,7 +23,7 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) /* Check if we are 16 bytes aligned & phase=0 & more than 16 bytes left, if thats the case we can check work at a time instead of byte */ - if (((((uint32_t)ptr) & 0xF) == 0) && (phase == 0) && (data_left > 0xF)) + if (((((uintptr_t)ptr) & 0xF) == 0) && (phase == 0) && (data_left > 0xF)) { // 15 14 13 12 11 10 09 08 07 06 05 04 03 02 01 00 -- check 16 bytes at one time // 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? -- if no 00 at byte position: 15,13,11,09,07,05,03,01 diff --git a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c index edc8061..47030cc 100755 --- a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c +++ b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c @@ -11,7 +11,7 @@ typedef union void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt); uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index); -extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); +extern uint32_t cp_using_dma(uintptr_t ddr_addr, uintptr_t local_addr, uint32_t size, char to_ddr, char swap); static int32_t viddec_pm_utils_bstream_peekbits_noemul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); static int32_t viddec_pm_utils_bstream_peekbits_emul(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits); diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c index d4f9c9e..cbb04fe 100755 --- a/mixvbp/vbp_plugin/h264/h264parse.c +++ b/mixvbp/vbp_plugin/h264/h264parse.c @@ -276,7 +276,7 @@ void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem) h264_Info * pInfo = &(parser->info); - parser->sps_pps_ddr_paddr = (uint32_t)persist_mem; + parser->sps_pps_ddr_paddr = (uintptr_t)persist_mem; pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr; pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all); diff --git a/mixvbp/vbp_plugin/h264/h264parse_mem.c b/mixvbp/vbp_plugin/h264/h264parse_mem.c index 0c39b17..311d611 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_mem.c +++ b/mixvbp/vbp_plugin/h264/h264parse_mem.c @@ -50,11 +50,11 @@ void* h264_memcpy( void* dest, void* src, uint32_t num ) void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId) { uint32_t copy_size = sizeof(pic_param_set); - uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + uintptr_t pps_entry_ptr = (uintptr_t)(pInfo->PPS_PADDR_GL+nPPSId*copy_size); if (nPPSId < MAX_NUM_PPS) { - cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0); + cp_using_dma(pps_entry_ptr, (uintptr_t)PPS, copy_size, 1, 0); } return; @@ -68,11 +68,11 @@ void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint3 { uint32_t copy_size= sizeof(pic_param_set); - uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size; + uintptr_t pps_entry_ptr = (uintptr_t)(pInfo->PPS_PADDR_GL+nPPSId*copy_size); if ( nPPSId < MAX_NUM_PPS) { - cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0); + cp_using_dma(pps_entry_ptr, (uintptr_t)PPS, copy_size, 0, 0); } return; @@ -84,11 +84,11 @@ void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint3 void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) { uint32_t copy_size = sizeof(seq_param_set_used); - uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + uintptr_t sps_entry_ptr = (uintptr_t)(pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all)); if (nSPSId < MAX_NUM_SPS) { - cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0); + cp_using_dma(sps_entry_ptr, (uintptr_t)SPS, copy_size, 1, 0); } //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); @@ -104,11 +104,11 @@ void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, ui void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId) { uint32_t copy_size= sizeof(seq_param_set_used); - uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all); + uintptr_t sps_entry_ptr = (uintptr_t)(pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all)); if (nSPSId < MAX_NUM_SPS) { - cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0); + cp_using_dma(sps_entry_ptr, (uintptr_t)SPS, copy_size, 0, 0); } return; @@ -155,14 +155,14 @@ void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffs //h264_Parse_Check_Sps_Updated_Flag () copy local sps to ddr mem with nSPSId uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) { - uint32_t is_updated=0; + uintptr_t is_updated=0; uint32_t copy_size = sizeof(uint32_t); - uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; + uintptr_t sps_entry_ptr = (uintptr_t)(pInfo->SPS_PADDR_GL+nSPSId*copy_size); if (nSPSId < MAX_NUM_SPS) { - cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0); + cp_using_dma(sps_entry_ptr, &is_updated, copy_size, 1, 0); } //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id); @@ -177,13 +177,13 @@ uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) // h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId) { - uint32_t is_updated=0; + uintptr_t is_updated=0; uint32_t copy_size= sizeof(uint32_t); uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size; if (nSPSId < MAX_NUM_SPS) { - cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0); + cp_using_dma(sps_entry_ptr, &is_updated, copy_size, 0, 0); } return; diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h index aefd3b7..7015c37 100755 --- a/mixvbp/vbp_plugin/h264/include/h264.h +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -941,10 +941,10 @@ extern "C" { h264_img_par img; - uint32_t SPS_PADDR_GL; - uint32_t PPS_PADDR_GL; + uintptr_t SPS_PADDR_GL; + uintptr_t PPS_PADDR_GL; uint32_t OFFSET_REF_FRAME_PADDR_GL; - uint32_t TMP_OFFSET_REFFRM_PADDR_GL; + uint32_t TMP_OFFSET_REFFRM_PADDR_GL; uint32_t h264_list_replacement; @@ -964,7 +964,7 @@ extern "C" { //h264_NAL_Unit_t uint8_t nal_unit_type; uint8_t old_nal_unit_type; - uint8_t got_start; + uint8_t got_start; //workload uint8_t push_to_cur; @@ -975,22 +975,22 @@ extern "C" { uint8_t is_frame_boundary_detected_by_non_slice_nal; uint8_t is_frame_boundary_detected_by_slice_nal; uint8_t is_current_workload_done; - uint8_t primary_pic_type_plus_one; //AUD---[0,7] + uint8_t primary_pic_type_plus_one; //AUD---[0,7] //Error handling - uint8_t sei_rp_received; - uint8_t last_I_frame_idc; - uint8_t sei_b_state_ready; - uint8_t gop_err_flag; + uint8_t sei_rp_received; + uint8_t last_I_frame_idc; + uint8_t sei_b_state_ready; + uint8_t gop_err_flag; - uint8_t has_slice; + uint8_t has_slice; } h264_Info; struct h264_viddec_parser { - uint32_t sps_pps_ddr_paddr; + uintptr_t sps_pps_ddr_paddr; h264_Info info; }; diff --git a/mixvbp/vbp_plugin/h264/include/h264parse.h b/mixvbp/vbp_plugin/h264/include/h264parse.h index 1467932..2b1c7a6 100755 --- a/mixvbp/vbp_plugin/h264/include/h264parse.h +++ b/mixvbp/vbp_plugin/h264/include/h264parse.h @@ -2,6 +2,7 @@ #define __H264PARSE_H_ #include "h264.h" +#include #define true 1 #define false 0 @@ -159,14 +160,11 @@ uint32_t mult_u(uint32_t var1, uint32_t var2); ///////////////////////////// utils functions outside h264 //////////////////////////////////////////////////////////////////// -extern void *memset(void *s, int32_t c, uint32_t n); -extern void *memcpy(void *dest, const void *src, uint32_t n); -extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap); +extern uint32_t cp_using_dma(uintptr_t ddr_addr, uintptr_t local_addr, uint32_t size, char to_ddr, char swap); extern int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits); extern int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits); - //////////////////////////////////////////////////////////////////// ///////////////////////////// Second level parse functions //////////////////////////////////////////////////////////////////// diff --git a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c index daad16c..62e6ab3 100755 --- a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c +++ b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c @@ -6,7 +6,7 @@ extern void* h264_memcpy( void* dest, void* src, uint32_t num ); -uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap) +uint32_t cp_using_dma(uintptr_t ddr_addr, uintptr_t local_addr, uint32_t size, char to_ddr, char swap) { if (swap != 0) { diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h index 2d93812..3c15ab8 100755 --- a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h +++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h @@ -522,7 +522,6 @@ typedef struct break;} \ } -extern void *memset(void *s, int32_t c, uint32_t n); void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status); diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index bffb4f0..cf78dd0 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -870,7 +870,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING; for (int i = 0; i < mNumSurfaces; i++) { - mVASurfaceAttrib->buffers[i] = (unsigned int )mConfigBuffer.graphicBufferHandler[i]; + mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i]; } attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; -- cgit v1.2.3 From d56d4b47abfe4734badd43650ea90079687d4794 Mon Sep 17 00:00:00 2001 From: Dan Liang Date: Fri, 28 Feb 2014 14:55:56 +0800 Subject: libmix: allocate the extra buffer for WiDi dynamically BZ: 175667 When WiDi is connected, the property "media.widi.enabled" is set and 6 extra buffers are allocated. Change-Id: Ib9ceb0be85cbc0e806b7413b9f7a6e2a18b3031f Signed-off-by: Dan Liang --- videodecoder/VideoDecoderAVC.cpp | 53 ++++++++++++++++++++++++++++++++------- videodecoder/VideoDecoderAVC.h | 1 + videodecoder/VideoDecoderBase.cpp | 3 ++- videodecoder/VideoDecoderBase.h | 1 + 4 files changed, 48 insertions(+), 10 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 3ada6e1..faec181 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -25,6 +25,7 @@ #include "VideoDecoderAVC.h" #include "VideoDecoderTrace.h" #include +#include // Macros for actual buffer needed calculation #define WIDI_CONSUMED 6 @@ -163,6 +164,12 @@ Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h CHECK_STATUS("handleNewSequence"); } + if (isWiDiStatusChanged()) { + mSizeChanged = false; + flushSurfaceBuffers(); + return DECODE_FORMAT_CHANGE; + } + // first pic_data always exists, check if any slice is parsed if (data->pic_data[0].num_slices == 0) { ITRACE("No slice available for decoding."); @@ -778,7 +785,7 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { + (diff > 0 ? diff : 1) #ifndef USE_GEN_HW + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */ - + WIDI_CONSUMED /* WiDi maximum needs */ + + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */ #endif + 1; } @@ -790,21 +797,49 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { setRenderRect(); } +bool VideoDecoderAVC::isWiDiStatusChanged() { +#ifndef USE_GEN_HW + if (mWiDiOn) + return false; + + if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) + return false; + + if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) + return false; + + char prop[PROPERTY_VALUE_MAX]; + bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) && + (!strcmp(prop, "1") || !strcasecmp(prop, "true")); + if (widi_on) { + mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED; + mWiDiOn = true; + ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded); + return true; + } + return false; +#else + return false; +#endif +} + Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { updateFormatInfo(data); - bool noNeedFlush = false; - if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { - noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth) - && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight); + bool needFlush = false; + bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER); + + if (!rawDataMode) { + needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth) + || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight) + || isWiDiStatusChanged(); } - if (mSizeChanged == false || noNeedFlush) { - return DECODE_SUCCESS; - } else { + if (needFlush || (rawDataMode && mSizeChanged)) { mSizeChanged = false; flushSurfaceBuffers(); return DECODE_FORMAT_CHANGE; - } + } else + return DECODE_SUCCESS; } bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) { diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index 880b659..c5ce75b 100755 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -62,6 +62,7 @@ protected: #ifdef USE_AVC_SHORT_FORMAT virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); #endif + bool isWiDiStatusChanged(); private: struct DecodedPictureBuffer { diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index cf78dd0..31bbac3 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -69,7 +69,8 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) mParserHandle(NULL), mInitialized(false), mSignalBufferSize(0), - mErrReportEnabled(false){ + mErrReportEnabled(false), + mWiDiOn(false){ memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo)); memset(&mConfigBuffer, 0, sizeof(mConfigBuffer)); diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index deaa5da..e4a033a 100755 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -131,6 +131,7 @@ protected: int32_t mRotationDegrees; bool mErrReportEnabled; + bool mWiDiOn; enum { // TODO: move this to vbp_loader.h -- cgit v1.2.3 From 64b053948578d40069107d84806cdca48a1c8301 Mon Sep 17 00:00:00 2001 From: Jing SUN Date: Thu, 13 Mar 2014 11:21:04 +0800 Subject: Requested 64 bytes stride aligned GrallocBuffer. BZ: 178523 Issue: Currently we let GrallocBuffer self decide the strides of allocated buffers and it was found that those might be always be 64 aligned. Solution: Requested probable larger buffers and we guaranteed allocated ones' alignment. Change-Id: I2de3fd94c764fa84fab340bb03e56405f1520993 Signed-off-by: Jing SUN --- imageencoder/test/main.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/imageencoder/test/main.cpp b/imageencoder/test/main.cpp index a1eeb02..ad53c7b 100644 --- a/imageencoder/test/main.cpp +++ b/imageencoder/test/main.cpp @@ -282,7 +282,10 @@ int main(int argc, char** argv) aligned_source_buffer = (void *)((unsigned int)source_buffer - ((unsigned int)source_buffer)%4096 + 4096); } else { /* gralloc */ - gralloc_buffer = new GraphicBuffer(width, height, VA_FOURCC_NV12, + /* TopazHP requires stride must be an integral multiple of 64. */ + stride = (width+0x3f) & (~0x3f); + + gralloc_buffer = new GraphicBuffer(stride, height, VA_FOURCC_NV12, GraphicBuffer::USAGE_HW_RENDER); if (NULL == gralloc_buffer) { fprintf(stderr, "Allocating GraphicBuffer failed!\n"); -- cgit v1.2.3 From 710521e8a174ef35caea5b2a372dbc19ecd3d92c Mon Sep 17 00:00:00 2001 From: Yuanjun Huang Date: Mon, 18 Nov 2013 14:52:21 +0800 Subject: Multi-thread parsing implementation. BZ: 158125 1. Refactor h.264 parser to resolve logical and data structure dependency for multithreading and to improve PnP. 2. Two multithread parsing schemes were implemented: bundle input and sequential input. Default to bundle scheme. Change-Id: I245f11d6c8083a35c3cc13fef25a1e32ded66b29 Signed-off-by: Yuanjun Huang Signed-off-by: pingshix --- mixvbp/include/vbp_trace.h | 4 +- mixvbp/vbp_manager/Android.mk | 8 +- mixvbp/vbp_manager/include/viddec_parser_ops.h | 8 + mixvbp/vbp_manager/vbp_h264_parser.c | 42 +- mixvbp/vbp_manager/vbp_mp42_parser.c | 5 + mixvbp/vbp_manager/vbp_thread.c | 634 +++++++++++++++++++++ mixvbp/vbp_manager/vbp_thread.h | 51 ++ mixvbp/vbp_manager/vbp_utils.c | 265 +++++++-- mixvbp/vbp_manager/vbp_utils.h | 17 + mixvbp/vbp_manager/vbp_vc1_parser.c | 4 + mixvbp/vbp_manager/vbp_vp8_parser.c | 4 + mixvbp/vbp_plugin/h264/h264parse.c | 273 +++++++-- mixvbp/vbp_plugin/h264/h264parse_dpb.c | 0 mixvbp/vbp_plugin/h264/h264parse_pps.c | 4 + mixvbp/vbp_plugin/h264/h264parse_sh.c | 485 ++++++++++++++-- mixvbp/vbp_plugin/h264/include/h264.h | 118 ++-- mixvbp/vbp_plugin/h264/include/h264parse.h | 10 +- mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c | 5 +- .../secvideo/baytrail/viddec_h264secure_parse.c | 5 +- mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 185 +++++- 20 files changed, 1951 insertions(+), 176 deletions(-) create mode 100755 mixvbp/vbp_manager/vbp_thread.c create mode 100755 mixvbp/vbp_manager/vbp_thread.h mode change 100644 => 100755 mixvbp/vbp_plugin/h264/h264parse_dpb.c diff --git a/mixvbp/include/vbp_trace.h b/mixvbp/include/vbp_trace.h index ba916b9..7fd891f 100755 --- a/mixvbp/include/vbp_trace.h +++ b/mixvbp/include/vbp_trace.h @@ -11,9 +11,9 @@ #define VBP_TRACE_H_ - +#ifndef VBP_TRACE #define VBP_TRACE - +#endif #ifdef VBP_TRACE /* if VBP_TRACE is defined*/ diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk index a45fe7e..a0c95a9 100755 --- a/mixvbp/vbp_manager/Android.mk +++ b/mixvbp/vbp_manager/Android.mk @@ -2,7 +2,7 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -#MIXVBP_LOG_ENABLE := true +MIXVBP_LOG_ENABLE := true LOCAL_SRC_FILES := \ vbp_h264_parser.c \ @@ -13,8 +13,10 @@ LOCAL_SRC_FILES := \ viddec_parse_sc.c \ viddec_pm_parser_ops.c \ viddec_pm_utils_bstream.c \ + vbp_thread.c \ LOCAL_CFLAGS := -DVBP -DHOST_ONLY +LOCAL_CFLAGS += -DUSE_MULTI_THREADING LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/include \ @@ -39,6 +41,8 @@ LOCAL_SHARED_LIBRARIES := \ libdl \ libcutils +LOCAL_LDLIBS += -lpthread + ifeq ($(strip $(MIXVBP_LOG_ENABLE)),true) LOCAL_CFLAGS += -DVBP_TRACE LOCAL_SHARED_LIBRARIES += liblog @@ -68,4 +72,6 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/secvideo/merrifield/ LOCAL_SRC_FILES += secvideo/merrifield/vbp_h264secure_parser.c endif +LOCAL_LDLIBS += -lpthread + include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_manager/include/viddec_parser_ops.h b/mixvbp/vbp_manager/include/viddec_parser_ops.h index 77054b5..533b231 100755 --- a/mixvbp/vbp_manager/include/viddec_parser_ops.h +++ b/mixvbp/vbp_manager/include/viddec_parser_ops.h @@ -30,6 +30,10 @@ typedef void (*fn_flush_parser) (void *parent, void *ctxt); typedef uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size); #endif +typedef uint32_t (*fn_is_payload_start)(void *parent); +typedef uint32_t (*fn_parse_syntax_threading) (void *parent, void *ctxt, uint32_t thread_index); +typedef uint32_t (*fn_post_parse_threading) (void *parent, void *ctxt, uint32_t slice_index); +typedef uint32_t (*fn_query_thread_parsing_cap) (void); typedef struct { @@ -45,6 +49,10 @@ typedef struct #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) fn_update_data update_data; #endif + fn_is_payload_start is_payload_start; + fn_parse_syntax_threading parse_syntax_threading; + fn_post_parse_threading post_parse_threading; + fn_query_thread_parsing_cap query_thread_parsing_cap; } viddec_parser_ops_t; diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c index dd93ea7..9c75519 100755 --- a/mixvbp/vbp_manager/vbp_h264_parser.c +++ b/mixvbp/vbp_manager/vbp_h264_parser.c @@ -26,6 +26,7 @@ #include #include "h264.h" +#include "h264parse.h" #include "vbp_loader.h" #include "vbp_utils.h" #include "vbp_h264_parser.h" @@ -185,6 +186,33 @@ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) return VBP_LOAD; } + pcontext->parser_ops->is_payload_start = dlsym(pcontext->fd_parser, "viddec_h264_payload_start"); + if (NULL == pcontext->parser_ops->is_payload_start) + { + ETRACE ("Failed to set entry point." ); + } + + pcontext->parser_ops->parse_syntax_threading = dlsym(pcontext->fd_parser, "viddec_h264_threading_parse"); + if (NULL == pcontext->parser_ops->parse_syntax_threading) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->post_parse_threading = dlsym(pcontext->fd_parser, "viddec_h264_post_parse"); + if (NULL == pcontext->parser_ops->post_parse_threading) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->query_thread_parsing_cap = dlsym(pcontext->fd_parser, "viddec_h264_query_thread_parsing_cap"); + if (NULL == pcontext->parser_ops->query_thread_parsing_cap) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + /* entry point not needed */ pcontext->parser_ops->is_frame_start = NULL; return VBP_OK; @@ -1022,6 +1050,7 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) { /* partial frame */ query_data->num_pictures = 1; + WTRACE("partial frame found."); } if (query_data->num_pictures > MAX_NUM_PICTURES) @@ -1041,7 +1070,7 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms = pic_data->pic_parms; // relax this condition to support partial frame parsing - + // TODO: Is partial frame needed to support?? //if (parser->info.SliceHeader.first_mb_in_slice == 0) { /** @@ -1131,6 +1160,8 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; pic_parms->frame_num = parser->info.SliceHeader.frame_num; + + } @@ -1161,7 +1192,6 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active - 1; } #endif - return VBP_OK; } @@ -1682,11 +1712,11 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_SPS: - ITRACE("SPS header is parsed."); + VTRACE("SPS header is parsed."); break; case h264_NAL_UNIT_TYPE_PPS: - ITRACE("PPS header is parsed."); + VTRACE("PPS header is parsed."); break; case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: @@ -1694,11 +1724,11 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_EOSeq: - ITRACE("EOSeq is parsed."); + VTRACE("EOSeq is parsed."); break; case h264_NAL_UNIT_TYPE_EOstream: - ITRACE("EOStream is parsed"); + VTRACE("EOStream is parsed"); break; default: diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c index 9b4c63f..b954b38 100755 --- a/mixvbp/vbp_manager/vbp_mp42_parser.c +++ b/mixvbp/vbp_manager/vbp_mp42_parser.c @@ -124,6 +124,11 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) /* entry point not needed */ pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_payload_start = NULL; + pcontext->parser_ops->parse_syntax_threading = NULL; + pcontext->parser_ops->post_parse_threading = NULL; + pcontext->parser_ops->query_thread_parsing_cap = NULL; + return VBP_OK; } diff --git a/mixvbp/vbp_manager/vbp_thread.c b/mixvbp/vbp_manager/vbp_thread.c new file mode 100755 index 0000000..e1f665b --- /dev/null +++ b/mixvbp/vbp_manager/vbp_thread.c @@ -0,0 +1,634 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ +#include +#include +#include + +#include "vbp_thread.h" +#include "vbp_loader.h" + +/* consider a qual core with hyper thread */ +#define MAX_AUTO_THREADS 8 + +#define THREADING_SCHEME_BUNDLE + +typedef long long int nsecs_t; + +static nsecs_t systemTime() +{ + struct timeval t; + gettimeofday(&t, NULL); + return 1000000 * t.tv_sec + t.tv_usec; +} + + +typedef struct PerThreadContext { + pthread_t thread; + + int32_t index; // thread index referenced by thread itself when needed. + int32_t thread_init; + struct ThreadContext* parent; + + pthread_cond_t input_cond; // Used to wait for a new packet from the main thread. + pthread_cond_t progress_cond; // Used by child threads to wait for progress to change. + pthread_cond_t output_cond; // Used by the main thread to wait for frames to finish. + + pthread_mutex_t mutex; // Mutex used to protect the contents of the PerThreadContext. + pthread_mutex_t progress_mutex; // Mutex used to protect frame progress values and progress_cond. + + vbp_context* vbpctx; + viddec_pm_cxt_t* pmctx; // Working parser context + viddec_pm_cxt_t* input_pmctx; // Input parser context + void* codec_data; // Points to specific codec data that holds output, all threads share + // one instance + uint32_t start_item; // start of parsing item num for bundle parsing + + enum { + STATE_INPUT_WAIT, + STATE_WORKING, + STATE_EXIT + } state; + +} PerThreadContext; + +typedef struct ThreadContext { + PerThreadContext* threads[MAX_AUTO_THREADS]; // The contexts for each thread. + PerThreadContext* prev_thread; // The last thread submit_packet() was called on. + int delaying; // Set for the first N packets, where N is the number of threads. + // While it is set, vbp_thread_parse_syntax won't return any results + + uint32_t next_finished; // The next thread count to return output from. + uint32_t next_parsing; // The next thread count to submit input packet to. + + uint32_t active_thread_count; // num of thread need to be warmed up + + sem_t finish_sem; // semaphore of finish work to synchronize working thread and main thread + uint32_t start_item_to_parse; + uint32_t last_item_to_parse; + +} ThreadContext; + + +int32_t get_cpu_count() +{ + int32_t cpu_num; +#if defined(_SC_NPROC_ONLN) + cpu_num = sysconf(_SC_NPROC_ONLN); +#elif defined(_SC_NPROCESSORS_ONLN) + cpu_num = sysconf(_SC_NPROCESSORS_ONLN); +#endif + return cpu_num; +} + + +void set_thread_affinity_mask(cpu_set_t mask) +{ + int err, syscallres; + pid_t pid = gettid(); + syscallres = syscall(__NR_sched_setaffinity, pid, sizeof(mask), &mask); + if (syscallres) + { + ETRACE("Error in the syscall setaffinity."); + } +} + + +static void vbp_update_parser_for_item(viddec_pm_cxt_t *cxt, + viddec_pm_cxt_t *src_cxt, + uint32 item) +{ + + /* set up bitstream buffer */ + cxt->getbits.list = src_cxt->getbits.list; + + /* setup buffer pointer */ + cxt->getbits.bstrm_buf.buf = src_cxt->getbits.bstrm_buf.buf; + + + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = src_cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_st = src_cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_end = src_cxt->list.data[item].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = src_cxt->list.data[item].stpos; + cxt->list.end_offset = src_cxt->list.data[item].edpos; + cxt->list.total_bytes = src_cxt->list.data[item].edpos - src_cxt->list.data[item].stpos; + +} + + + +static void* parser_worker_thread(void* arg) +{ + PerThreadContext* p = arg; + ThreadContext* t_cxt = p->parent; + vbp_context* vbpctx = p->vbpctx; + viddec_pm_cxt_t* pm_cxt = p->pmctx; + viddec_parser_ops_t* ops = vbpctx->parser_ops; + + +// probably not to make each parsing thread have affinity to a cpu core +// having cpus fully occupied will even lead to low performance +// current experimental solution: just make main thread have affinity +#if 0 + cpu_set_t mask; + CPU_ZERO(&mask); + CPU_SET(p->index, &mask); // cpu affinity is set to same num as thread index + set_thread_affinity_mask(mask); +#endif + + pthread_mutex_lock(&p->mutex); + + nsecs_t t0; + while (1) { + while (p->state == STATE_INPUT_WAIT) { + pthread_cond_wait(&p->input_cond, &p->mutex); + } + + if (p->state == STATE_WORKING) { + //now we get input data, call actual parse. + //t0 = systemTime(); + sleep(0); + ops->parse_syntax_threading((void *)p->pmctx, p->codec_data, p->index); + + pthread_mutex_lock(&p->progress_mutex); + p->state = STATE_INPUT_WAIT; + + pthread_cond_broadcast(&p->progress_cond); + pthread_cond_signal(&p->output_cond); + pthread_mutex_unlock(&p->progress_mutex); + } else if (p->state == STATE_EXIT) { + break; + } + } + pthread_mutex_unlock(&p->mutex); + pthread_exit(NULL); + return NULL; +} + +static void* parser_worker_thread_bundle(void* arg) +{ + PerThreadContext* p = arg; + ThreadContext* t_cxt = p->parent; + vbp_context* vbpctx = p->vbpctx; + viddec_parser_ops_t* ops = vbpctx->parser_ops; + +// probably not to make each parsing thread have affinity to a cpu core +// having cpus fully occupied will even lead to low performance +// current experimental solution: just make main thread have affinity +#if 1 + cpu_set_t mask; + CPU_ZERO(&mask); + CPU_SET(p->index, &mask); // cpu affinity is set to same num as thread index + set_thread_affinity_mask(mask); +#endif + + pthread_mutex_lock(&p->mutex); + + nsecs_t t0; + while (1) { + while (p->state == STATE_INPUT_WAIT) { + pthread_cond_wait(&p->input_cond, &p->mutex); + } + + if (p->state == STATE_WORKING) { + uint32_t working_item = p->start_item; // start point + uint32_t slice_index = 0 + p->index; // start point + + while (working_item <= t_cxt->last_item_to_parse) { + vbp_update_parser_for_item(p->pmctx, p->input_pmctx, working_item); + ops->parse_syntax_threading((void *)p->pmctx, p->codec_data, slice_index); + + working_item += t_cxt->active_thread_count; + slice_index += t_cxt->active_thread_count; + } + + pthread_mutex_lock(&p->progress_mutex); + p->state = STATE_INPUT_WAIT; + + pthread_cond_broadcast(&p->progress_cond); + pthread_mutex_unlock(&p->progress_mutex); + } else if (p->state == STATE_EXIT) { + break; + } + } + pthread_mutex_unlock(&p->mutex); + pthread_exit(NULL); + return NULL; +} + + +uint32_t update_context_from_input(viddec_pm_cxt_t* dest, + viddec_pm_cxt_t* source) +{ + if ((dest == NULL) || (source == NULL) || (dest == source)) { + ETRACE("%s error", __func__); + return 1; + } + /* set up bitstream buffer */ + dest->getbits.list = source->getbits.list; + + /* buffer pointer */ + dest->getbits.bstrm_buf.buf = source->getbits.bstrm_buf.buf; + + /* bitstream parser */ + dest->getbits.bstrm_buf.buf_index = source->getbits.bstrm_buf.buf_index; + dest->getbits.bstrm_buf.buf_st = source->getbits.bstrm_buf.buf_st; + dest->getbits.bstrm_buf.buf_end = source->getbits.bstrm_buf.buf_end; + + /* It is possible to end up with buf_offset not equal zero. */ + dest->getbits.bstrm_buf.buf_bitoff = 0; + dest->getbits.au_pos = 0; + dest->getbits.list_off = 0; + dest->getbits.phase = 0; + dest->getbits.emulation_byte_counter = 0; + + dest->list.start_offset = source->list.start_offset; + dest->list.end_offset = source->list.end_offset; + dest->list.total_bytes = source->list.total_bytes; + return 0; +} + +uint32_t update_context_to_output(viddec_pm_cxt_t* dest, + viddec_pm_cxt_t* source) +{ + if ((dest == NULL) || (source == NULL) || (dest == source)) { + ETRACE("%s error", __func__); + return 1; + } + + /* bitstream parser */ + dest->getbits.bstrm_buf.buf_index = source->getbits.bstrm_buf.buf_index; + dest->getbits.bstrm_buf.buf_st = source->getbits.bstrm_buf.buf_st; + dest->getbits.bstrm_buf.buf_end = source->getbits.bstrm_buf.buf_end; + + /* It is possible to end up with buf_offset not equal zero. */ + dest->getbits.bstrm_buf.buf_bitoff = source->getbits.bstrm_buf.buf_bitoff; + dest->getbits.au_pos = source->getbits.au_pos; + dest->getbits.list_off = source->getbits.list_off; + dest->getbits.phase = source->getbits.phase; + dest->getbits.emulation_byte_counter = source->getbits.emulation_byte_counter; + dest->getbits.is_emul_reqd = source->getbits.is_emul_reqd; + + dest->list.start_offset = source->list.start_offset; + dest->list.end_offset = source->list.end_offset; + dest->list.total_bytes = source->list.total_bytes; + + return 0; +} + + + +uint32_t feed_thread_input(PerThreadContext* p, void* parent) +{ + ThreadContext* t_context = p->parent; + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + + //nsecs_t t0 = systemTime(); + if (pm_cxt->getbits.bstrm_buf.buf == NULL) { + return 1; + } + + pthread_mutex_lock(&p->mutex); + + if (p->state == STATE_WORKING) { + pthread_mutex_lock(&p->progress_mutex); + while (p->state == STATE_WORKING) { + pthread_cond_wait(&p->progress_cond, &p->progress_mutex); + } + pthread_mutex_unlock(&p->progress_mutex); + } + + /* Now update the input to the working thread*/ + update_context_from_input(p->pmctx, pm_cxt); + p->codec_data = (void*)&(pm_cxt->codec_data[0]); + + p->state = STATE_WORKING; + t_context->next_parsing++; + + //t0 = systemTime(); + pthread_cond_signal(&p->input_cond); + pthread_mutex_unlock(&p->mutex); + + return 0; +} + +void vbp_thread_init(vbp_context* pcontext) +{ + int i; + ThreadContext* t_context = NULL; + int32_t thread_count = pcontext->thread_count; + int32_t err = 0; + +#ifdef THREADING_SCHEME_BUNDLE + ITRACE("%s, threading_parse_scheme set to SCHEME_BUNDLE", __func__); + pcontext->threading_parse_scheme = SCHEME_BUNDLE; +#else + ITRACE("%s, threading_parse_scheme set to SCHEME_SEQUENTIAL", __func__); + pcontext->threading_parse_scheme = SCHEME_SEQUENTIAL; +#endif + + if (thread_count == 0) { + int32_t cpu_num = get_cpu_count(); + if (cpu_num > 1) { + if (pcontext->threading_parse_scheme == SCHEME_BUNDLE) { + thread_count = pcontext->thread_count = cpu_num - 1; + } else { + thread_count = pcontext->thread_count = cpu_num - 1; + } + } + else { + thread_count = pcontext->thread_count = 1; + } + } + + pcontext->thread_opaque = t_context = + (ThreadContext*)malloc(sizeof(ThreadContext)); + if (t_context != NULL) { + t_context->active_thread_count = thread_count; //default active count + + t_context->delaying = 1; + t_context->next_parsing = t_context->next_finished = 0; + } + + ITRACE("%s, creating %d parsing thread.", __func__, thread_count); + for (i = 0; i < thread_count; i++) { + t_context->threads[i] = (PerThreadContext*)malloc(sizeof(PerThreadContext)); + assert(t_context->threads[i] != NULL); + PerThreadContext* p = t_context->threads[i]; + + if (p != NULL) { + p->index = i; + p->parent = t_context; + p->vbpctx = pcontext; + p->pmctx = vbp_malloc(viddec_pm_cxt_t, 1); + viddec_pm_utils_bstream_init(&(p->pmctx->getbits), NULL, 0); + + pthread_mutex_init(&p->mutex, NULL); + pthread_mutex_init(&p->progress_mutex, NULL); + pthread_cond_init(&p->input_cond, NULL); + pthread_cond_init(&p->progress_cond, NULL); + pthread_cond_init(&p->output_cond, NULL); + + p->state = STATE_INPUT_WAIT; + + if(pcontext->threading_parse_scheme == SCHEME_SEQUENTIAL) { + err = pthread_create(&p->thread, NULL, parser_worker_thread, p); + } else { + err = pthread_create(&p->thread, NULL, parser_worker_thread_bundle, p); + } + + p->thread_init = !err; + } + } +#if 1 + ITRACE("%s, set_thread_affinity_mask", __func__); + cpu_set_t mask; + CPU_ZERO(&mask); + CPU_SET(3, &mask); // 0~thread_count-1 cpus was set to each sub thread, + // last cpu is set to main thread + set_thread_affinity_mask(mask); +#endif +} + + +void vbp_thread_free(vbp_context* pcontext) +{ + ITRACE("%s", __func__); + ThreadContext* t_context = pcontext->thread_opaque; + int i; + int thread_count = pcontext->thread_count; + + for (i = 0; i < thread_count; i++) { + PerThreadContext *p = t_context->threads[i]; + + pthread_mutex_lock(&p->mutex); + p->state = STATE_EXIT; + pthread_cond_signal(&p->input_cond); + pthread_mutex_unlock(&p->mutex); + + if (p->thread_init) { + pthread_join(p->thread, NULL); + } + p->thread_init = 0; + } + + for (i = 0; i < thread_count; i++) { + PerThreadContext *p = t_context->threads[i]; + + pthread_mutex_destroy(&p->mutex); + pthread_mutex_destroy(&p->progress_mutex); + pthread_cond_destroy(&p->input_cond); + pthread_cond_destroy(&p->progress_cond); + pthread_cond_destroy(&p->output_cond); + + if (p->pmctx != NULL) { + free(p->pmctx); + } + + free(p); + p = NULL; + } + + free(t_context); +} + +/* + * Entry function of multi-thread parsing + * + * parent - A viddec_pm_cxt_t type parser management context, + * which contains input stream. + * ctxt - Codec specific parser context, actually codec_data[] in + * viddec_pm_cxt_t, Used for storing parsed output + * return - 0 indicates no output is gotten, just warm up the threads + * 1 indicates there is output + * + * see viddec_parser_ops.h + * uint32_t (*fn_parse_syntax) (void *parent, void *ctxt); + */ +uint32_t vbp_thread_parse_syntax(void* parent, + void* ctxt, + vbp_context* pcontext) +{ + ThreadContext* t_context = pcontext->thread_opaque; + uint32_t finished = t_context->next_finished; + + if ((parent == NULL) || (ctxt == NULL)) { + return 0; + } + + PerThreadContext* p; + + nsecs_t t0,t1; + //t0 = t1 = systemTime(); + + /* Submit an input packet to the next parser thread*/ + p = t_context->threads[t_context->next_parsing]; + feed_thread_input(p, parent); + + //p->state = STATE_WORKING; + //t_context->next_parsing++; + + //t0 = systemTime(); + //pthread_cond_signal(&p->input_cond); + + //t0 = systemTime(); + + if ((t_context->delaying == 1) && + (t_context->next_parsing > (t_context->active_thread_count - 1))) { + t_context->delaying = 0; + } + + /* If we are still in early stage that warming up each thread, indicate we got no output*/ + if (t_context->delaying == 1) { + return 0; + } + + /* return available parsed frame from the oldest thread + * notice that we start getting output from thread[0] after just submitting input + * to thread[active_count-1] + * */ + p = t_context->threads[finished++]; + + if (p->state != STATE_INPUT_WAIT) { + pthread_mutex_lock(&p->progress_mutex); + while (p->state != STATE_INPUT_WAIT) { + pthread_cond_wait(&p->output_cond, &p->progress_mutex); + } + pthread_mutex_unlock(&p->progress_mutex); + } + + + if (finished > (t_context->active_thread_count - 1)) { + finished = 0; + } + + if (t_context->next_parsing >= t_context->active_thread_count) { + t_context->next_parsing = 0; + } + + t_context->next_finished = finished; + + update_context_to_output((viddec_pm_cxt_t*) parent, p->pmctx); + + return 1; +} + + +/* + * Entry function of multi-thread parsing + * + * parent - A viddec_pm_cxt_t type parser management context, + * which contains input stream. + * ctxt - Codec specific parser context, actually codec_data[] in + * viddec_pm_cxt_t, Used for storing parsed output + * start_item - num of start item passed to trigger multithread parsing + * + */ +uint32_t vbp_thread_parse_syntax_bundle(void* parent, + void* ctxt, + vbp_context* pcontext, + uint32_t start_item) +{ + ThreadContext* t_context = pcontext->thread_opaque; + if ((parent == NULL) || (ctxt == NULL)) { + return 0; + } + + PerThreadContext* p = NULL; + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + t_context->start_item_to_parse = start_item; + t_context->last_item_to_parse = pm_cxt->list.num_items - 1; + + sem_init(&(t_context->finish_sem),0,0); + + uint32_t i; + for (i = 0; i < t_context->active_thread_count; i++) { + p = t_context->threads[i]; + p->start_item = start_item + i; + + if (p->state == STATE_WORKING) { + pthread_mutex_lock(&p->progress_mutex); + while (p->state == STATE_WORKING) { + pthread_cond_wait(&p->progress_cond, &p->progress_mutex); + } + pthread_mutex_unlock(&p->progress_mutex); + } + + p->codec_data = (void*)&(pm_cxt->codec_data[0]); + p->input_pmctx = pm_cxt; + + p->state = STATE_WORKING; + + pthread_cond_signal(&p->input_cond); + pthread_mutex_unlock(&p->mutex); + + } + return 1; +} + + +/* + * set active threads num since not all threads need to be warmed up + * when a frame has fewer slice num than threads we created. + * + * active_count - threads num to be activated. + */ +uint32_t vbp_thread_set_active(vbp_context* pcontext, + uint32_t active_count) +{ + ThreadContext* t_context = pcontext->thread_opaque; + + if (t_context != NULL) { + if (active_count < pcontext->thread_count) { + t_context->active_thread_count = active_count; + } else { //reset to the default + t_context->active_thread_count = pcontext->thread_count; + } + + //reset to the default + t_context->delaying = 1; + t_context->next_parsing = t_context->next_finished = 0; + } + return 0; +} + +uint32_t vbp_thread_get_active(vbp_context* pcontext) +{ + ThreadContext* t_context = pcontext->thread_opaque; + + if (t_context != NULL) { + return t_context->active_thread_count; + } + return 0; +} + + diff --git a/mixvbp/vbp_manager/vbp_thread.h b/mixvbp/vbp_manager/vbp_thread.h new file mode 100755 index 0000000..e182ac1 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_thread.h @@ -0,0 +1,51 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VBP_THREAD_H +#define VBP_THREAD_H + +#define _GNU_SOURCE +#include +#include + +#include +#include "vbp_utils.h" +#include "include/viddec_pm.h" +#include + + +void vbp_thread_init(vbp_context *pcontext); + +void vbp_thread_free(vbp_context *pcontext); + +uint32_t vbp_thread_parse_syntax(void* parent, + void* ctxt, + vbp_context* pcontext); + +uint32_t vbp_thread_set_active(vbp_context* pcontext, + uint32_t active_count); + +uint32_t vbp_thread_get_active(vbp_context* pcontext); + +#endif diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index 1647269..21b1e5d 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -39,6 +39,21 @@ #include "vbp_h264secure_parser.h" #endif +#ifdef USE_MULTI_THREADING +#include "vbp_thread.h" +#endif + +#define LEAST_SLICES_MULTI_THREADING 10 + +typedef long long int nsecs_t; + +static nsecs_t systemTime() +{ + struct timeval t; + gettimeofday(&t, NULL); + return 1000000 * t.tv_sec + t.tv_usec; +} + /* buffer counter */ uint32 buffer_counter = 0; @@ -303,12 +318,36 @@ cleanup: } +static void vbp_setup_parser_for_item(viddec_pm_cxt_t *cxt, uint32 item) +{ + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_st = cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_end = cxt->list.data[item].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = cxt->list.data[item].stpos; + cxt->list.end_offset = cxt->list.data[item].edpos; + cxt->list.total_bytes = cxt->list.data[item].edpos - cxt->list.data[item].stpos; + +} + /** * * parse the elementary sample buffer or codec configuration data * */ +//static uint32 frame_num = 0; +//static nsecs_t total_time_of_multislice = 0; +//static uint32 frame_multislice_num = 0; static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; @@ -339,6 +378,7 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f } */ + uint32_t multi_parse_done = 0; /* populate the list.*/ if (init_data_flag) @@ -364,48 +404,176 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f // TODO: check if cxt->getbits.is_emul_reqd is set properly - for (i = 0; i < cxt->list.num_items; i++) - { - /* setup bitstream parser */ - cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos; - cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos; - cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos; - - /* It is possible to end up with buf_offset not equal zero. */ - cxt->getbits.bstrm_buf.buf_bitoff = 0; - - cxt->getbits.au_pos = 0; - cxt->getbits.list_off = 0; - cxt->getbits.phase = 0; - cxt->getbits.emulation_byte_counter = 0; - - cxt->list.start_offset = cxt->list.data[i].stpos; - cxt->list.end_offset = cxt->list.data[i].edpos; - cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos; - - /* invoke parse entry point to parse the buffer */ - error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); - - /* can't return error for now. Neet further investigation */ - if (0 != error) - { - VTRACE("failed to parse the syntax: %d!", error); + //frame_num ++; + + nsecs_t t0, t1, t2, tt0, tt1, tt2; + t0 = t1 = t2 = tt0 = tt1 = tt2 = 0; + //t0 = systemTime(); + + if (0 == pcontext->is_multithread_parsing_enabled) { + for (i = 0; i < cxt->list.num_items; i++) { + vbp_setup_parser_for_item(cxt, i); + /* invoke parse entry point to parse the buffer */ + //t1 = systemTime(); + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + //t2 = systemTime(); + //tt1 += t2 - t1; + + /* can't return error for now. Neet further investigation */ + if (0 != error) { + WTRACE("failed to parse the syntax: %d!", error); + } + + /* process parsing result */ + //t2 = systemTime(); + error = pcontext->func_process_parsing_result(pcontext, i); + //tt2 += systemTime() - t2; + + if (VBP_MULTI == error) { + ITRACE("Multiple frames are found in one bufffer."); + return VBP_OK; + } + else if (0 != error) { + ETRACE("Failed to process parsing result."); + return error; + } } - - /* process parsing result */ - error = pcontext->func_process_parsing_result(pcontext, i); - - if (VBP_MULTI == error) { - ITRACE("Multiple frames are found in one bufffer."); - return VBP_OK; + } + // Multi-threading option is enabled + else if (1 == pcontext->is_multithread_parsing_enabled) { + + int got_output = 0; + int is_payload_start = 0; + int single_parse_count = 0; + int use_thread_parsing = 0; + + for (i = 0; i < cxt->list.num_items; i++) { + + vbp_setup_parser_for_item(cxt, i); + + // we assume no configuration data following slice data in a frame's buffer + is_payload_start = ops->is_payload_start((void *)cxt); + + if (is_payload_start == 0) { + //t1 = systemTime(); + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + //tt1 += systemTime() - t1; + + //t2 = systemTime(); + error = pcontext->func_process_parsing_result(pcontext, i); + single_parse_count ++; + //tt2 += systemTime() - t2; + } else if (((cxt->list.num_items - single_parse_count) < LEAST_SLICES_MULTI_THREADING)) { + //t1 = systemTime(); + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + //tt1 += systemTime() - t1; + + //t2 = systemTime(); + error = pcontext->func_process_parsing_result(pcontext, i); + //tt2 += systemTime() - t2; + } else { + use_thread_parsing = 1; + break; + } + + if (VBP_MULTI == error) { + ITRACE("Multiple frames are found in one bufffer."); + return VBP_OK; + } + else if (0 != error) { + ETRACE("Failed to process parsing result."); + return error; + } } - else if (0 != error) - { - ETRACE("Failed to process parsing result."); - return error; + + if (use_thread_parsing) { + vbp_thread_set_active(pcontext, cxt->list.num_items - single_parse_count); + uint32_t thread_count = vbp_thread_get_active(pcontext); + + //t1 = systemTime(); + if (pcontext->threading_parse_scheme == SCHEME_BUNDLE) { + // Multithread parsing Scheme-Bundle-Input + // This interface push threads to parse all slice header without interrupt. + vbp_thread_parse_syntax_bundle((void *)cxt, + (void *)&(cxt->codec_data[0]), + pcontext, + i); //first slice's item num + + uint32_t j; + for (j = i; j < cxt->list.num_items; j++) { + error = ops->post_parse_threading((void *)cxt, + (void *)&(cxt->codec_data[0]), + j-single_parse_count); // slice index + error = pcontext->func_process_parsing_result(pcontext, j); // item num + } + //tt1 += systemTime() - t1; + + } else if (pcontext->threading_parse_scheme == SCHEME_SEQUENTIAL) { + // Multithread parsing Scheme-Sequential-Input. + // This interface push threads to parse one slice header one time. + uint32_t j; + for (j = i; j < cxt->list.num_items; j++) { + vbp_setup_parser_for_item(cxt, j); + + //t1 = systemTime(); + got_output = vbp_thread_parse_syntax((void *)cxt, + (void *)&(cxt->codec_data[0]), + pcontext); + //tt1 += systemTime() - t1; + + if (got_output == 1) { + //t2 = systemTime(); + error = ops->post_parse_threading((void *)cxt, + (void *)&(cxt->codec_data[0]), + //slice count with thread delay + (j-(thread_count-1)-single_parse_count) % thread_count); + + error = pcontext->func_process_parsing_result(pcontext, + // item count with thread delay + j-(thread_count-1)); + + multi_parse_done ++; + //tt2 += systemTime() - t2; + } + } + + int need_to_clearance = thread_count -1; + cxt->getbits.bstrm_buf.buf = NULL; + for (i = cxt->list.num_items - need_to_clearance; i < cxt->list.num_items; i++) { + //t1 = systemTime(); + got_output = vbp_thread_parse_syntax((void *)cxt, + (void *)&(cxt->codec_data[0]), + pcontext); + //&got_output); + //tt1 += systemTime() - t1; + + if (got_output == 1) { + //t2 = systemTime(); + error = ops->post_parse_threading((void *)cxt, + (void *)&(cxt->codec_data[0]), + (i-single_parse_count) % thread_count); + + error = pcontext->func_process_parsing_result(pcontext, i); + multi_parse_done ++; + //tt2 += systemTime() - t2; + } + } + } } } - +#if 0 + tt0 = systemTime() - t0; + if (cxt->list.num_items > 8) { + total_time_of_multislice += tt0; + frame_multislice_num ++; + ETRACE("### ================== TIME CALCULATION ======================="); + ETRACE("### ------------item num: %d", cxt->list.num_items); + ETRACE("### ------------The frame[%d] cost time: %lld us", frame_num-1, tt0); + ETRACE("### ------------Accumulated multi-slice frames: %d", frame_multislice_num); + ETRACE("### ------------Accumulated average time that multislice frame cost: %lld us", total_time_of_multislice/frame_multislice_num); + ETRACE("### ================== TIME CALCULATION END ==================="); + } +#endif return VBP_OK; } @@ -463,6 +631,21 @@ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) *ppcontext = pcontext; error = VBP_OK; + + /* default is not enabled */ + pcontext->is_multithread_parsing_enabled = 0; + +#ifndef USE_AVC_SHORT_FORMAT +#ifdef USE_MULTI_THREADING + if (pcontext->parser_ops->query_thread_parsing_cap != NULL) { + if (pcontext->parser_ops->query_thread_parsing_cap() == 1) { + pcontext->is_multithread_parsing_enabled = 1; + ITRACE("Multi-thead parsing is enabled."); + vbp_thread_init(pcontext); + } + } +#endif +#endif cleanup: if (VBP_OK != error) @@ -483,6 +666,12 @@ cleanup: */ uint32 vbp_utils_destroy_context(vbp_context *pcontext) { +#ifdef USE_MULTI_THREADING + if (1 == pcontext->is_multithread_parsing_enabled) { + vbp_thread_free(pcontext); + } +#endif + /* entry point, not need to validate input parameters. */ vbp_utils_free_parser_memory(pcontext); vbp_utils_uninitialize_context(pcontext); diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h index 633159c..7cf9321 100755 --- a/mixvbp/vbp_manager/vbp_utils.h +++ b/mixvbp/vbp_manager/vbp_utils.h @@ -31,6 +31,9 @@ #include "viddec_pm.h" #include "vbp_trace.h" #include +#include "vbp_loader.h" + +#include #define MAGIC_NUMBER 0x0DEADBEEF #define MAX_WORKLOAD_ITEMS 1000 @@ -68,6 +71,13 @@ typedef uint32 (*function_populate_query_data)(vbp_context* cxt); typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size); #endif +typedef enum +{ + SCHEME_BUNDLE = 0, + SCHEME_SEQUENTIAL, +} threading_parse_scheme_t; + + struct vbp_context_t { /* magic number */ @@ -94,6 +104,13 @@ struct vbp_context_t /* parser type specific data*/ void *parser_private; + /* multithreading */ + uint32 thread_count; + void *thread_opaque; + uint32 is_multithread_parsing_enabled; + + threading_parse_scheme_t threading_parse_scheme; + function_init_parser_entries func_init_parser_entries; function_allocate_query_data func_allocate_query_data; function_free_query_data func_free_query_data; diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c index 12e28e9..65b6f76 100755 --- a/mixvbp/vbp_manager/vbp_vc1_parser.c +++ b/mixvbp/vbp_manager/vbp_vc1_parser.c @@ -111,6 +111,10 @@ uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) /* entry point not needed */ pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_payload_start = NULL; + pcontext->parser_ops->parse_syntax_threading = NULL; + pcontext->parser_ops->post_parse_threading = NULL; + pcontext->parser_ops->query_thread_parsing_cap = NULL; return VBP_OK; } diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c index 73d9281..9ac097d 100755 --- a/mixvbp/vbp_manager/vbp_vp8_parser.c +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -67,6 +67,10 @@ uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext) pcontext->parser_ops->is_frame_start = NULL; pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_payload_start = NULL; + pcontext->parser_ops->parse_syntax_threading = NULL; + pcontext->parser_ops->post_parse_threading = NULL; + pcontext->parser_ops->query_thread_parsing_cap = NULL; return VBP_OK; } diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c index cbb04fe..330c5e6 100755 --- a/mixvbp/vbp_plugin/h264/h264parse.c +++ b/mixvbp/vbp_plugin/h264/h264parse.c @@ -65,7 +65,7 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOf /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ - +// keep for h264 secure parse h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader) { //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; @@ -114,14 +114,43 @@ h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader } } - pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); - pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ - (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : \ - ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag ? + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); return H264_STATUS_OK; }; //// End of h264_active_par_set + + +h264_Status h264_set_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader) +{ + uint32_t pps_addr = pInfo->PPS_PADDR_GL + + SliceHeader->pic_parameter_id * sizeof(pic_param_set); + SliceHeader->active_PPS = (pic_param_set*)pps_addr; + pic_param_set* active_PPS = SliceHeader->active_PPS; + + if (active_PPS->seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + + uint32_t sps_addr = pInfo->SPS_PADDR_GL + \ + active_PPS->seq_parameter_set_id * sizeof(seq_param_set_all); + SliceHeader->active_SPS = (seq_param_set_used*)sps_addr; + seq_param_set_used* active_SPS = SliceHeader->active_SPS; + + if (active_SPS->seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected + } + + return H264_STATUS_OK; +}; // End of h264_set_active_par_set + + + /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -173,38 +202,183 @@ h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_ SliceHeader->sh_error |= 4; } - } else { + } else { SliceHeader->sh_error |= 1; } + return retStatus; +} - //if(SliceHeader->sh_error) { - //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - //} +h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status retStatus = H264_STATUS_ERROR; + + //////////////////////////////////////////////////// + //// Parse slice header info + //// Part1: not depend on the active PPS/SPS + //// Part2/3: depend on the active parset + ////////////////////////////////////////////////// + SliceHeader->sh_error = 0; + if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) + { + retStatus = h264_set_active_par_set(pInfo, SliceHeader); + } - ////////////////////////////////// - //// Parse slice data (MB loop) - ////////////////////////////////// - //retStatus = h264_Parse_Slice_Data(pInfo); + if (retStatus == H264_STATUS_OK) { - //uint32_t data = 0; - //if( viddec_pm_peek_bits(parent, &data, 32) == -1) - //retStatus = H264_STATUS_ERROR; + switch (SliceHeader->active_SPS->profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + SliceHeader->active_PPS->transform_8x8_mode_flag = 0; + SliceHeader->active_PPS->pic_scaling_matrix_present_flag = 0; + SliceHeader->active_PPS->second_chroma_qp_index_offset = + SliceHeader->active_PPS->chroma_qp_index_offset; + + default: + break; + } + + if (h264_Parse_Slice_Header_2_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 2; + } + else if (h264_Parse_Slice_Header_3_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 4; + } + } else { + SliceHeader->sh_error |= 1; } - //h264_Parse_rbsp_trailing_bits(pInfo); return retStatus; } +h264_Status h264_Post_Parsing_Slice_Header(void *parent, h264_Info* pInfo, h264_Slice_Header_t *next_SliceHeader) +{ + + h264_Status retStatus = H264_STATUS_OK; + + memcpy(&pInfo->active_PPS, next_SliceHeader->active_PPS, sizeof(pic_param_set)); + memcpy(&pInfo->active_SPS, next_SliceHeader->active_SPS, sizeof(seq_param_set_used)); + + if ((1 == pInfo->primary_pic_type_plus_one) && (pInfo->got_start)) { + pInfo->img.recovery_point_found |= 4; + } + pInfo->primary_pic_type_plus_one = 0; + + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : \ + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); + + pInfo->sei_information.recovery_point = 0; + + pInfo->img.current_slice_num++; + + + //////////////////////////////////////////////////////////////////////////// + // Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, *next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + // + // Update slice structures: + h264_update_old_slice(pInfo, *next_SliceHeader); //cur->old; next->cur; + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + /// Emit out the New Frame + if (pInfo->img.g_new_frame) + { + h264_parse_emit_start_new_frame(parent, pInfo); + } + + h264_parse_emit_current_pic(parent, pInfo); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, *next_SliceHeader); + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + + + ////////////////////////////////////////////////////////////// + // DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + +#ifndef USE_AVC_SHORT_FORMAT + h264_dpb_update_ref_lists(pInfo); +#endif + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + + return retStatus; +} + /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc) +h264_Status h264_Parse_NAL_Unit(void *parent, uint8_t* nal_unit_type, uint8_t *nal_ref_idc) { h264_Status ret = H264_STATUS_ERROR; @@ -212,7 +386,7 @@ h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref if (viddec_pm_get_bits(parent, &code, 8) != -1) { *nal_ref_idc = (uint8_t)((code >> 5) & 0x3); - pInfo->nal_unit_type = (uint8_t)((code >> 0) & 0x1f); + *nal_unit_type = (uint8_t)((code >> 0) & 0x1f); ret = H264_STATUS_OK; } @@ -430,43 +604,58 @@ int32_t h264_is_second_field(h264_Info * pInfo) /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice) +int32_t h264_is_new_picture_start(h264_Info * pInfo, + h264_Slice_Header_t cur_slice, + h264_Slice_Header_t old_slice) { int result = 0; - if (pInfo->number_of_first_au_info_nal_before_first_slice) - { + if (pInfo->number_of_first_au_info_nal_before_first_slice) { pInfo->number_of_first_au_info_nal_before_first_slice = 0; return 1; } - result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); - result |= (old_slice.frame_num != cur_slice.frame_num); - result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); - if (cur_slice.field_pic_flag && old_slice.field_pic_flag) - { - result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag); + if (old_slice.pic_parameter_id != cur_slice.pic_parameter_id) { + return 1; + } + if (old_slice.frame_num != cur_slice.frame_num) { + return 1; + } + if (old_slice.field_pic_flag != cur_slice.field_pic_flag) { + return 1; + } + if (cur_slice.field_pic_flag && old_slice.field_pic_flag) { + if (old_slice.bottom_field_flag != cur_slice.bottom_field_flag) { + return 1; + } } - result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ - ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0)); - result |= ( old_slice.idr_flag != cur_slice.idr_flag); + if ((old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ + ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0))) { + return 1; + } + if (old_slice.idr_flag != cur_slice.idr_flag) { + return 1; + } - if (cur_slice.idr_flag && old_slice.idr_flag) - { - result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id); + if (cur_slice.idr_flag && old_slice.idr_flag) { + if (old_slice.idr_pic_id != cur_slice.idr_pic_id) { + return 1; + } } - if (pInfo->active_SPS.pic_order_cnt_type == 0) - { - result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb); - result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom); + if (pInfo->active_SPS.pic_order_cnt_type == 0) { + if ((old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb) || \ + (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom)) { + return 1; + } } - if (pInfo->active_SPS.pic_order_cnt_type == 1) - { - result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]); - result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]); + if (pInfo->active_SPS.pic_order_cnt_type == 1) { + if ((old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]) || \ + (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1])) { + return 1; + } } return result; diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c old mode 100644 new mode 100755 diff --git a/mixvbp/vbp_plugin/h264/h264parse_pps.c b/mixvbp/vbp_plugin/h264/h264parse_pps.c index 2c4cc52..b4098ec 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_pps.c +++ b/mixvbp/vbp_plugin/h264/h264parse_pps.c @@ -22,6 +22,8 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->pic_parameter_set_id = (uint8_t)code; + VTRACE("parsing PPS: id = %d", PictureParameterSet->pic_parameter_set_id); + code = h264_GetVLCElement(parent, pInfo, false); if (code > MAX_NUM_SPS - 1) { @@ -30,6 +32,8 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->seq_parameter_set_id = (uint8_t)code; + VTRACE("parsing PPS: refering SPS id = %d", PictureParameterSet->seq_parameter_set_id); + ///// entropy_coding_mode_flag viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c index 625e146..02bd202 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sh.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c @@ -95,13 +95,17 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice } ////// Check valid or not of first_mb_in_slice + int32_t PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + int32_t FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag ? + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); if (SliceHeader->structure == FRAME) { - max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + max_mb_num = FrameHeightInMbs * PicWidthInMbs; } else { - max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs / 2; + max_mb_num = FrameHeightInMbs * PicWidthInMbs / 2; } ///if(pInfo->img.MbaffFrameFlag) @@ -111,7 +115,10 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice } if (SliceHeader->first_mb_in_slice >= max_mb_num) + { + WTRACE("first mb in slice exceed max mb num."); break; + } if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { @@ -165,6 +172,121 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice return ret; } +h264_Status h264_Parse_Slice_Header_2_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + uint32_t code; + int32_t max_mb_num=0; + + do { + //////////////////////////////////// Slice header part 2////////////////// + + /// Frame_num + viddec_pm_get_bits(parent, &code, SliceHeader->active_SPS->log2_max_frame_num_minus4 + 4); + SliceHeader->frame_num = (int32_t)code; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if (!(SliceHeader->active_SPS->sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->field_pic_flag = (uint8_t)code; + + if (SliceHeader->field_pic_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->bottom_field_flag = (uint8_t)code; + + SliceHeader->structure = SliceHeader->bottom_field_flag ? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + int32_t PicWidthInMbs = (SliceHeader->active_SPS->sps_disp.pic_width_in_mbs_minus1 + 1); + int32_t FrameHeightInMbs = SliceHeader->active_SPS->sps_disp.frame_mbs_only_flag ? + (SliceHeader->active_SPS->sps_disp.pic_height_in_map_units_minus1 + 1) : + ((SliceHeader->active_SPS->sps_disp.pic_height_in_map_units_minus1 + 1) << 1); + if (SliceHeader->structure == FRAME) + { + max_mb_num = FrameHeightInMbs * PicWidthInMbs; + } + else + { + max_mb_num = FrameHeightInMbs * PicWidthInMbs / 2; + } + + ///if(pInfo->img.MbaffFrameFlag) + if (SliceHeader->active_SPS->sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) + { + SliceHeader->first_mb_in_slice <<= 1; + } + + if (SliceHeader->first_mb_in_slice >= max_mb_num) + { + WTRACE("first mb in slice exceed max mb num."); + break; + } + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->active_SPS->pic_order_cnt_type == 0) + { + viddec_pm_get_bits(parent, &code , SliceHeader->active_SPS->log2_max_pic_order_cnt_lsb_minus4 + 4); + SliceHeader->pic_order_cnt_lsb = (uint32_t)code; + + if ((SliceHeader->active_PPS->pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if ((SliceHeader->active_SPS->pic_order_cnt_type == 1) && + !(SliceHeader->active_SPS->delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true); + if ((SliceHeader->active_PPS->pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true); + } + } + + if (SliceHeader->active_PPS->redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->redundant_pic_cnt > 127) + break; + } + else + { + SliceHeader->redundant_pic_cnt = 0; + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + + + /*-----------------------------------------------------------------------------------------*/ // slice header 3 // (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT, ref_pic_remark, alpha, beta, etc) @@ -244,14 +366,13 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice //// //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW //// - if (((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + if (((pInfo->active_PPS.weighted_pred_flag) + && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) + || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) { viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - pInfo->h264_pwt_enabled = 1; - pInfo->h264_pwt_start_byte_offset = byte_offset; - pInfo->h264_pwt_start_bit_offset = bits_offset; if (h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK) { @@ -259,18 +380,190 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + } + + - if (0 == bits_offset) + //// + //// Parse Ref_pic marking if there + //// + if (SliceHeader->nal_ref_idc != 0) + { + if (h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) { - pInfo->h264_pwt_end_byte_offset = byte_offset-1; - pInfo->h264_pwt_end_bit_offset = 8; + WTRACE("ref pic marking failed during slice header parsing."); + break; + } + } + + if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); + } + else + { + SliceHeader->cabac_init_idc = 0; + } + + if (SliceHeader->cabac_init_idc > 2) + { + break; + } + + SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); + if ((SliceHeader->slice_qp_delta > (25 - pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26 + pInfo->active_PPS.pic_init_qp_minus26))) + { + WTRACE("slice_qp_delta value is invalid."); + break; + } + + if ((SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeSI)) + { + if (SliceHeader->slice_type == h264_PtypeSP) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sp_for_switch_flag = (uint8_t)code; + + } + SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); + + if ((SliceHeader->slice_qs_delta > (25 - pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + { + WTRACE("slice_qp_delta value is invalid."); + break; + } + } + if (pInfo->active_PPS.deblocking_filter_control_present_flag) + { + SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->disable_deblocking_filter_idc != 1) + { + SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; + if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) + { + break; + } + + SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; + if (slice_beta_offset < -12 || slice_beta_offset > 12) + { + break; + } } else { - pInfo->h264_pwt_end_byte_offset = byte_offset; - pInfo->h264_pwt_end_bit_offset = bits_offset; + SliceHeader->slice_alpha_c0_offset_div2 = 0; + SliceHeader->slice_beta_offset_div2 = 0; + } + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + + +h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_alpha_c0_offset, slice_beta_offset; + uint32_t code; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + do { + /// direct_spatial_mv_pred_flag + if (SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code , 1); + SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code; + } + else + { + SliceHeader->direct_spatial_mv_pred_flag = 0; + } + + // + // Reset ref_idx and Overide it if exist + // + SliceHeader->num_ref_idx_l0_active = SliceHeader->active_PPS->num_ref_idx_l0_active; + SliceHeader->num_ref_idx_l1_active = SliceHeader->active_PPS->num_ref_idx_l1_active; + + if ((SliceHeader->slice_type == h264_PtypeP) || + (SliceHeader->slice_type == h264_PtypeSP) || + (SliceHeader->slice_type == h264_PtypeB)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code; + + if (SliceHeader->num_ref_idx_active_override_flag) + { + SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; + if (SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false) + 1; + } + } + } + + if (SliceHeader->slice_type != h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = 0; + } + + if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || + (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) + { + WTRACE("ref index greater than expected during slice header parsing."); + break; + } + +#ifdef USE_AVC_SHORT_FORMAT + bool keepParsing = false; + keepParsing = h264_is_new_picture_start(pInfo, *SliceHeader, pInfo->SliceHeader) && + (SliceHeader->nal_ref_idc != 0); + if (!keepParsing) + { + ITRACE("short format parsing: no need to go on!"); + ret = H264_STATUS_OK; + break; + } +#endif + if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + WTRACE("ref list reordering failed during slice header parsing."); + break; + } + + + //// + //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW + //// + if (((SliceHeader->active_PPS->weighted_pred_flag) + && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) + || ((SliceHeader->active_PPS->weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + { + + //viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + if (h264_Parse_Pred_Weight_Table_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; } + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + } @@ -287,7 +580,9 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } } - if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + if ((SliceHeader->active_PPS->entropy_coding_mode_flag) && + (SliceHeader->slice_type != h264_PtypeI) && + (SliceHeader->slice_type != h264_PtypeSI)) { SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); } @@ -302,7 +597,8 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->slice_qp_delta > (25 - pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26 + pInfo->active_PPS.pic_init_qp_minus26))) + if ((SliceHeader->slice_qp_delta > (25 - SliceHeader->active_PPS->pic_init_qp_minus26)) || + (SliceHeader->slice_qp_delta < -(26 + SliceHeader->active_PPS->pic_init_qp_minus26))) { WTRACE("slice_qp_delta value is invalid."); break; @@ -318,13 +614,14 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->slice_qs_delta > (25 - pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + if ((SliceHeader->slice_qs_delta > (25 - SliceHeader->active_PPS->pic_init_qs_minus26)) || + (SliceHeader->slice_qs_delta < -(26 + SliceHeader->active_PPS->pic_init_qs_minus26)) ) { WTRACE("slice_qp_delta value is invalid."); break; } } - if (pInfo->active_PPS.deblocking_filter_control_present_flag) + if (SliceHeader->active_PPS->deblocking_filter_control_present_flag) { SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); if (SliceHeader->disable_deblocking_filter_idc != 1) @@ -363,6 +660,7 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } + /*--------------------------------------------------------------------------------------------------*/ // // The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num @@ -377,7 +675,7 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice // /*--------------------------------------------------------------------------------------------------*/ -h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) { //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; int32_t reorder= -1; @@ -400,14 +698,18 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 return H264_SliceHeader_ERROR; } - SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = + h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || + (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = + h264_GetVLCElement(parent, pInfo, false); } else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = + h264_GetVLCElement(parent, pInfo, false); } } while (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3); @@ -430,13 +732,16 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 return H264_SliceHeader_ERROR; } SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) + if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || + (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = + h264_GetVLCElement(parent, pInfo, false); } else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = + h264_GetVLCElement(parent, pInfo, false); } } while (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3); } @@ -514,7 +819,8 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } else { - SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_weight_l1[i] = + (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; } @@ -535,7 +841,8 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { for (j = 0; j < 2; j++) { - SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = + (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; } } @@ -548,6 +855,113 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } ///// End of h264_Parse_Pred_Weight_Table +h264_Status h264_Parse_Pred_Weight_Table_opt(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + uint32_t i = 0, j = 0; + uint32_t flag; + + SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); + + if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) + { + SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); + } + + for (i = 0; i < SliceHeader->num_ref_idx_l0_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; + + if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; + } + + if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; + + if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = + (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + for (i = 0; i < SliceHeader->num_ref_idx_l1_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; + + if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = + (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; + + if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = + h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = + h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = + (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; +} + + + /*--------------------------------------------------------------------------------------------------*/ // The syntax elements specify marking of the reference pictures. // 1)IDR: no_output_of_prior_pics_flag, @@ -600,25 +1014,32 @@ h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_S { if (i < NUM_MMCO_OPERATIONS) { - SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = + h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || + (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) { - SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = + h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) { - SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = + h264_GetVLCElement(parent, pInfo, false); } - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || + (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) { - SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = + h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) { - SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = + h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h index 7015c37..acb3a59 100755 --- a/mixvbp/vbp_plugin/h264/include/h264.h +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -681,56 +681,8 @@ extern "C" { int8_t chroma_offset_l1[32][2]; } h264_pred_weight_table; - typedef struct _h264_Slice_Header - { - int32_t first_mb_in_slice; //UE - int32_t frame_num; //UV - int32_t pic_order_cnt_lsb; //UV - int32_t delta_pic_order_cnt_bottom; //SE - int32_t delta_pic_order_cnt[2]; //SE - int32_t redundant_pic_cnt; //UE - - uint32_t num_ref_idx_l0_active; //UE - uint32_t num_ref_idx_l1_active; //UE - - int32_t slice_qp_delta; //SE - int32_t slice_qs_delta; //SE - int32_t slice_alpha_c0_offset_div2; //SE - int32_t slice_beta_offset_div2; //SE - int32_t slice_group_change_cycle; //UV - - h264_pred_weight_table sh_predwttbl; - - ///// Flags or IDs - //h264_ptype_t slice_type; //UE - uint8_t slice_type; - uint8_t nal_ref_idc; - uint8_t structure; - uint8_t pic_parameter_id; //UE - - uint8_t field_pic_flag; - uint8_t bottom_field_flag; - uint8_t idr_flag; //UE - uint8_t idr_pic_id; //UE - - uint8_t sh_error; - uint8_t cabac_init_idc; //UE - uint8_t sp_for_switch_flag; - uint8_t disable_deblocking_filter_idc; //UE - - uint8_t direct_spatial_mv_pred_flag; - uint8_t num_ref_idx_active_override_flag; - int16_t current_slice_nr; - - //// For Ref list reordering - h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; - h264_Ref_Pic_List_Reordering_t sh_refpic_l0; - h264_Ref_Pic_List_Reordering_t sh_refpic_l1; - - } h264_Slice_Header_t; - +#define MAX_USER_DATA_SIZE 1024 -#define MAX_USER_DATA_SIZE 1024 typedef struct _h264_user_data_t { h264_sei_payloadtype user_data_type; @@ -828,6 +780,71 @@ extern "C" { } seq_param_set_all, *seq_param_set_all_ptr; + typedef struct _h264_Slice_Header + { + int32_t first_mb_in_slice; //UE + int32_t frame_num; //UV + int32_t pic_order_cnt_lsb; //UV + int32_t delta_pic_order_cnt_bottom; //SE + int32_t delta_pic_order_cnt[2]; //SE + int32_t redundant_pic_cnt; //UE + + uint32_t num_ref_idx_l0_active; //UE + uint32_t num_ref_idx_l1_active; //UE + + int32_t slice_qp_delta; //SE + int32_t slice_qs_delta; //SE + int32_t slice_alpha_c0_offset_div2; //SE + int32_t slice_beta_offset_div2; //SE + int32_t slice_group_change_cycle; //UV + + h264_pred_weight_table sh_predwttbl; + + ///// Flags or IDs + //h264_ptype_t slice_type; //UE + uint8_t slice_type; + uint8_t nal_ref_idc; + uint8_t structure; + uint8_t pic_parameter_id; //UE + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t idr_flag; //UE + uint8_t idr_pic_id; //UE + + uint8_t sh_error; + uint8_t cabac_init_idc; //UE + uint8_t sp_for_switch_flag; + uint8_t disable_deblocking_filter_idc; //UE + + uint8_t direct_spatial_mv_pred_flag; + uint8_t num_ref_idx_active_override_flag; + int16_t current_slice_nr; + + //// For Ref list reordering + h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; + h264_Ref_Pic_List_Reordering_t sh_refpic_l0; + h264_Ref_Pic_List_Reordering_t sh_refpic_l1; + + seq_param_set_used* active_SPS; + pic_param_set* active_PPS; + uint32_t parse_done; // flag to indicate parse done + + // temp field for multithread parsing to store bitstream info + uint32_t bstrm_buf_buf_index; + uint32_t bstrm_buf_buf_st; + uint32_t bstrm_buf_buf_end; + uint32_t bstrm_buf_buf_bitoff; + uint32_t au_pos; + uint32_t list_off; + uint32_t phase; + uint32_t emulation_byte_counter; + uint32_t is_emul_reqd; + int32_t list_start_offset; + int32_t list_end_offset; + int32_t list_total_bytes; + + } h264_Slice_Header_t; ///// Image control parameter//////////// typedef struct _h264_img_par @@ -932,13 +949,14 @@ extern "C" { //// Structures //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address seq_param_set_used active_SPS; - pic_param_set active_PPS; + pic_param_set active_PPS; h264_Slice_Header_t SliceHeader; OldSliceParams old_slice; sei_info sei_information; + h264_Slice_Header_t* working_sh[150]; // working slice header for multithreading h264_img_par img; uintptr_t SPS_PADDR_GL; diff --git a/mixvbp/vbp_plugin/h264/include/h264parse.h b/mixvbp/vbp_plugin/h264/include/h264parse.h index 2b1c7a6..6adee42 100755 --- a/mixvbp/vbp_plugin/h264/include/h264parse.h +++ b/mixvbp/vbp_plugin/h264/include/h264parse.h @@ -89,7 +89,7 @@ extern "C" { //////////////////////////////////////////////////////////////////// //NAL - extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc); + extern h264_Status h264_Parse_NAL_Unit(void *parent, uint8_t *nal_unit_type, uint8_t *nal_ref_idc); ////// Slice header extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); @@ -97,6 +97,12 @@ extern "C" { extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); +// For multi-thread parsing optimized. + extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_2_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + + extern h264_Status h264_Post_Parsing_Slice_Header(void *parent, h264_Info* pInfo, h264_Slice_Header_t *next_SliceHeader); ////// SPS extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame); @@ -113,6 +119,8 @@ extern "C" { extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Pred_Weight_Table_opt(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + ///// Mem functions diff --git a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c index 62e6ab3..eb85022 100755 --- a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c +++ b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c @@ -204,10 +204,12 @@ void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) { - uint32_t i=0,nitems=0; + uint32_t i=0,nitems=0; ///////////////////////// Frame attributes////////////////////////// +// Remove workload related stuff +# if 0 //Push data into current workload if first frame or frame_boundary already detected by non slice nal if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) { @@ -227,6 +229,7 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) pInfo->is_current_workload_done=1; } +#endif ///////////////////// SPS///////////////////// // h264_parse_emit_sps(parent, pInfo); diff --git a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c index 9490ddd..b9ec1c0 100755 --- a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c +++ b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c @@ -40,14 +40,15 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) uint8_t nal_ref_idc = 0; + uint8_t nal_unit_type = 0; ///// Parse NAL Unit header pInfo->img.g_new_frame = 0; pInfo->push_to_cur = 1; pInfo->is_current_workload_done =0; - pInfo->nal_unit_type = 0; - h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); + pInfo->nal_unit_type = nal_unit_type; ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c index 28a319a..fcf5c36 100755 --- a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -7,6 +7,12 @@ #include "h264parse_dpb.h" #include +#include + +uint32_t viddec_threading_backup_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader); +uint32_t viddec_threading_restore_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader); + +#define MAX_SLICE_HEADER 150 /* Init function which can be called to intialized local context on open and flush and preserve*/ void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) @@ -24,6 +30,26 @@ void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) /* picture level info which will always be initialized */ h264_init_Info_under_sps_pps_level(pInfo); + uint32_t i; + for(i = 0; i < MAX_SLICE_HEADER; i++) { + pInfo->working_sh[i] = (h264_Slice_Header_t*)malloc(sizeof(h264_Slice_Header_t)); + assert(pInfo->working_sh[i] != NULL); + + pInfo->working_sh[i]->parse_done = 0; + pInfo->working_sh[i]->bstrm_buf_buf_index = 0; + pInfo->working_sh[i]->bstrm_buf_buf_st = 0; + pInfo->working_sh[i]->bstrm_buf_buf_end = 0; + pInfo->working_sh[i]->bstrm_buf_buf_bitoff = 0; + pInfo->working_sh[i]->au_pos = 0; + pInfo->working_sh[i]->list_off = 0; + pInfo->working_sh[i]->phase = 0; + pInfo->working_sh[i]->emulation_byte_counter = 0; + pInfo->working_sh[i]->is_emul_reqd = 0; + pInfo->working_sh[i]->list_start_offset = 0; + pInfo->working_sh[i]->list_end_offset = 0; + pInfo->working_sh[i]->list_total_bytes = 0; + pInfo->working_sh[i]->slice_group_change_cycle = 0; + } return; } @@ -40,6 +66,7 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) h264_Status status = H264_STATUS_ERROR; uint8_t nal_ref_idc = 0; + uint8_t nal_unit_type = 0; ///// Parse NAL Unit header pInfo->img.g_new_frame = 0; @@ -47,9 +74,10 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) pInfo->is_current_workload_done =0; pInfo->nal_unit_type = 0; - h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); VTRACE("Start parsing NAL unit, type = %d", pInfo->nal_unit_type); + pInfo->nal_unit_type = nal_unit_type; ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); @@ -418,6 +446,64 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) return status; } + +uint32_t viddec_h264_threading_parse(void *parent, void *ctxt, uint32_t slice_index) +{ + struct h264_viddec_parser* parser = ctxt; + + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + + uint8_t nal_ref_idc = 0; + uint8_t nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); + + pInfo->nal_unit_type = nal_unit_type; + + + //////// Parse valid NAL unit + if (nal_unit_type == h264_NAL_UNIT_TYPE_SLICE) { + h264_Slice_Header_t* next_SliceHeader = pInfo->working_sh[slice_index]; + memset(next_SliceHeader, 0, sizeof(h264_Slice_Header_t)); + + next_SliceHeader->nal_ref_idc = nal_ref_idc; + + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// IDR flag + next_SliceHeader->idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(parent, pInfo, next_SliceHeader); + + viddec_threading_backup_ctx_info(parent, next_SliceHeader); + + if (next_SliceHeader->sh_error & 3) + { + ETRACE("Slice Header parsing error."); + status = H264_STATUS_ERROR; + return status; + } + + //h264_Post_Parsing_Slice_Header(parent, pInfo, &next_SliceHeader); + next_SliceHeader->parse_done = 1; + + } else { + ETRACE("Wrong NALU. Multi thread is supposed to just parse slice nalu type."); + status = H264_STATUS_ERROR; + return status; + } + + return status; +} + + + void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) { /* Should return size of my structure */ @@ -452,7 +538,104 @@ void viddec_h264_flush(void *parent, void *ctxt) p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + for(i = 0; i < MAX_SLICE_HEADER; i++) { + free(pInfo->working_sh[i]); + pInfo->working_sh[i] = NULL; + } return; } +uint32_t viddec_h264_payload_start(void *parent) +{ + + uint32_t code; + uint8_t nal_unit_type = 0; + if ( viddec_pm_peek_bits(parent, &code, 8) != -1) + { + nal_unit_type = (uint8_t)((code >> 0) & 0x1f); + } + //check that whether slice data starts + if (nal_unit_type == h264_NAL_UNIT_TYPE_SLICE) + { + return 1; + } else { + return 0; + } +} + +uint32_t viddec_h264_post_parse(void *parent, void *ctxt, uint32_t slice_index) +{ + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + h264_Status status = H264_STATUS_ERROR; + + h264_Slice_Header_t* next_SliceHeader = pInfo->working_sh[slice_index]; + + while (next_SliceHeader->parse_done != 1) { + sleep(0); + //WTRACE("slice header[%d] parse not finish, block to wait.", slice_index); + } + + viddec_threading_restore_ctx_info(parent, next_SliceHeader); + status = h264_Post_Parsing_Slice_Header(parent, pInfo, next_SliceHeader); + + next_SliceHeader->parse_done = 0; + + return status; +} + + +uint32_t viddec_h264_query_thread_parsing_cap(void) +{ + // current implementation of h.264 is capable to enable multi-thread parsing + return 1; +} + +uint32_t viddec_threading_backup_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader) +{ + h264_Status retStatus = H264_STATUS_OK; + + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + + next_SliceHeader->bstrm_buf_buf_index = pm_cxt->getbits.bstrm_buf.buf_index; + next_SliceHeader->bstrm_buf_buf_st = pm_cxt->getbits.bstrm_buf.buf_st; + next_SliceHeader->bstrm_buf_buf_end = pm_cxt->getbits.bstrm_buf.buf_end; + next_SliceHeader->bstrm_buf_buf_bitoff = pm_cxt->getbits.bstrm_buf.buf_bitoff; + + next_SliceHeader->au_pos = pm_cxt->getbits.au_pos; + next_SliceHeader->list_off = pm_cxt->getbits.list_off; + next_SliceHeader->phase = pm_cxt->getbits.phase; + next_SliceHeader->emulation_byte_counter = pm_cxt->getbits.emulation_byte_counter; + next_SliceHeader->is_emul_reqd = pm_cxt->getbits.is_emul_reqd; + + next_SliceHeader->list_start_offset = pm_cxt->list.start_offset; + next_SliceHeader->list_end_offset = pm_cxt->list.end_offset; + next_SliceHeader->list_total_bytes = pm_cxt->list.total_bytes; + + return retStatus; +} + +uint32_t viddec_threading_restore_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader) +{ + h264_Status retStatus = H264_STATUS_OK; + + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + + pm_cxt->getbits.bstrm_buf.buf_index = next_SliceHeader->bstrm_buf_buf_index; + pm_cxt->getbits.bstrm_buf.buf_st = next_SliceHeader->bstrm_buf_buf_st; + pm_cxt->getbits.bstrm_buf.buf_end = next_SliceHeader->bstrm_buf_buf_end; + pm_cxt->getbits.bstrm_buf.buf_bitoff = next_SliceHeader->bstrm_buf_buf_bitoff; + + pm_cxt->getbits.au_pos = next_SliceHeader->au_pos; + pm_cxt->getbits.list_off = next_SliceHeader->list_off; + pm_cxt->getbits.phase = next_SliceHeader->phase; + pm_cxt->getbits.emulation_byte_counter = next_SliceHeader->emulation_byte_counter; + pm_cxt->getbits.is_emul_reqd = next_SliceHeader->is_emul_reqd; + + pm_cxt->list.start_offset = next_SliceHeader->list_start_offset; + pm_cxt->list.end_offset = next_SliceHeader->list_end_offset; + pm_cxt->list.total_bytes = next_SliceHeader->list_total_bytes; + + return retStatus; +} -- cgit v1.2.3 From 1f6c160a174ad1faa980fb72a7bceb48d9a6b070 Mon Sep 17 00:00:00 2001 From: "Shi, PingX" Date: Wed, 19 Mar 2014 20:27:04 -0700 Subject: Revert "Multi-thread parsing implementation." BZ: 158125 This reverts commit e2699e3cabfaa4f8187451f49bd7aff411a41a9f. Change-Id: Ifc604446a28caaae33f2e586aab390c99ed11618 --- mixvbp/include/vbp_trace.h | 4 +- mixvbp/vbp_manager/Android.mk | 8 +- mixvbp/vbp_manager/include/viddec_parser_ops.h | 8 - mixvbp/vbp_manager/vbp_h264_parser.c | 42 +- mixvbp/vbp_manager/vbp_mp42_parser.c | 5 - mixvbp/vbp_manager/vbp_thread.c | 634 --------------------- mixvbp/vbp_manager/vbp_thread.h | 51 -- mixvbp/vbp_manager/vbp_utils.c | 265 ++------- mixvbp/vbp_manager/vbp_utils.h | 17 - mixvbp/vbp_manager/vbp_vc1_parser.c | 4 - mixvbp/vbp_manager/vbp_vp8_parser.c | 4 - mixvbp/vbp_plugin/h264/h264parse.c | 273 ++------- mixvbp/vbp_plugin/h264/h264parse_dpb.c | 0 mixvbp/vbp_plugin/h264/h264parse_pps.c | 4 - mixvbp/vbp_plugin/h264/h264parse_sh.c | 485 ++-------------- mixvbp/vbp_plugin/h264/include/h264.h | 118 ++-- mixvbp/vbp_plugin/h264/include/h264parse.h | 10 +- mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c | 5 +- .../secvideo/baytrail/viddec_h264secure_parse.c | 5 +- mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 185 +----- 20 files changed, 176 insertions(+), 1951 deletions(-) delete mode 100755 mixvbp/vbp_manager/vbp_thread.c delete mode 100755 mixvbp/vbp_manager/vbp_thread.h mode change 100755 => 100644 mixvbp/vbp_plugin/h264/h264parse_dpb.c diff --git a/mixvbp/include/vbp_trace.h b/mixvbp/include/vbp_trace.h index 7fd891f..ba916b9 100755 --- a/mixvbp/include/vbp_trace.h +++ b/mixvbp/include/vbp_trace.h @@ -11,9 +11,9 @@ #define VBP_TRACE_H_ -#ifndef VBP_TRACE + #define VBP_TRACE -#endif + #ifdef VBP_TRACE /* if VBP_TRACE is defined*/ diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk index a0c95a9..a45fe7e 100755 --- a/mixvbp/vbp_manager/Android.mk +++ b/mixvbp/vbp_manager/Android.mk @@ -2,7 +2,7 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -MIXVBP_LOG_ENABLE := true +#MIXVBP_LOG_ENABLE := true LOCAL_SRC_FILES := \ vbp_h264_parser.c \ @@ -13,10 +13,8 @@ LOCAL_SRC_FILES := \ viddec_parse_sc.c \ viddec_pm_parser_ops.c \ viddec_pm_utils_bstream.c \ - vbp_thread.c \ LOCAL_CFLAGS := -DVBP -DHOST_ONLY -LOCAL_CFLAGS += -DUSE_MULTI_THREADING LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/include \ @@ -41,8 +39,6 @@ LOCAL_SHARED_LIBRARIES := \ libdl \ libcutils -LOCAL_LDLIBS += -lpthread - ifeq ($(strip $(MIXVBP_LOG_ENABLE)),true) LOCAL_CFLAGS += -DVBP_TRACE LOCAL_SHARED_LIBRARIES += liblog @@ -72,6 +68,4 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/secvideo/merrifield/ LOCAL_SRC_FILES += secvideo/merrifield/vbp_h264secure_parser.c endif -LOCAL_LDLIBS += -lpthread - include $(BUILD_SHARED_LIBRARY) diff --git a/mixvbp/vbp_manager/include/viddec_parser_ops.h b/mixvbp/vbp_manager/include/viddec_parser_ops.h index 533b231..77054b5 100755 --- a/mixvbp/vbp_manager/include/viddec_parser_ops.h +++ b/mixvbp/vbp_manager/include/viddec_parser_ops.h @@ -30,10 +30,6 @@ typedef void (*fn_flush_parser) (void *parent, void *ctxt); typedef uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size); #endif -typedef uint32_t (*fn_is_payload_start)(void *parent); -typedef uint32_t (*fn_parse_syntax_threading) (void *parent, void *ctxt, uint32_t thread_index); -typedef uint32_t (*fn_post_parse_threading) (void *parent, void *ctxt, uint32_t slice_index); -typedef uint32_t (*fn_query_thread_parsing_cap) (void); typedef struct { @@ -49,10 +45,6 @@ typedef struct #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) fn_update_data update_data; #endif - fn_is_payload_start is_payload_start; - fn_parse_syntax_threading parse_syntax_threading; - fn_post_parse_threading post_parse_threading; - fn_query_thread_parsing_cap query_thread_parsing_cap; } viddec_parser_ops_t; diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c index 9c75519..dd93ea7 100755 --- a/mixvbp/vbp_manager/vbp_h264_parser.c +++ b/mixvbp/vbp_manager/vbp_h264_parser.c @@ -26,7 +26,6 @@ #include #include "h264.h" -#include "h264parse.h" #include "vbp_loader.h" #include "vbp_utils.h" #include "vbp_h264_parser.h" @@ -186,33 +185,6 @@ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) return VBP_LOAD; } - pcontext->parser_ops->is_payload_start = dlsym(pcontext->fd_parser, "viddec_h264_payload_start"); - if (NULL == pcontext->parser_ops->is_payload_start) - { - ETRACE ("Failed to set entry point." ); - } - - pcontext->parser_ops->parse_syntax_threading = dlsym(pcontext->fd_parser, "viddec_h264_threading_parse"); - if (NULL == pcontext->parser_ops->parse_syntax_threading) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->post_parse_threading = dlsym(pcontext->fd_parser, "viddec_h264_post_parse"); - if (NULL == pcontext->parser_ops->post_parse_threading) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - - pcontext->parser_ops->query_thread_parsing_cap = dlsym(pcontext->fd_parser, "viddec_h264_query_thread_parsing_cap"); - if (NULL == pcontext->parser_ops->query_thread_parsing_cap) - { - ETRACE ("Failed to set entry point." ); - return VBP_LOAD; - } - /* entry point not needed */ pcontext->parser_ops->is_frame_start = NULL; return VBP_OK; @@ -1050,7 +1022,6 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) { /* partial frame */ query_data->num_pictures = 1; - WTRACE("partial frame found."); } if (query_data->num_pictures > MAX_NUM_PICTURES) @@ -1070,7 +1041,7 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms = pic_data->pic_parms; // relax this condition to support partial frame parsing - // TODO: Is partial frame needed to support?? + //if (parser->info.SliceHeader.first_mb_in_slice == 0) { /** @@ -1160,8 +1131,6 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; pic_parms->frame_num = parser->info.SliceHeader.frame_num; - - } @@ -1192,6 +1161,7 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active - 1; } #endif + return VBP_OK; } @@ -1712,11 +1682,11 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_SPS: - VTRACE("SPS header is parsed."); + ITRACE("SPS header is parsed."); break; case h264_NAL_UNIT_TYPE_PPS: - VTRACE("PPS header is parsed."); + ITRACE("PPS header is parsed."); break; case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: @@ -1724,11 +1694,11 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_EOSeq: - VTRACE("EOSeq is parsed."); + ITRACE("EOSeq is parsed."); break; case h264_NAL_UNIT_TYPE_EOstream: - VTRACE("EOStream is parsed"); + ITRACE("EOStream is parsed"); break; default: diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c index b954b38..9b4c63f 100755 --- a/mixvbp/vbp_manager/vbp_mp42_parser.c +++ b/mixvbp/vbp_manager/vbp_mp42_parser.c @@ -124,11 +124,6 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) /* entry point not needed */ pcontext->parser_ops->flush = NULL; - pcontext->parser_ops->is_payload_start = NULL; - pcontext->parser_ops->parse_syntax_threading = NULL; - pcontext->parser_ops->post_parse_threading = NULL; - pcontext->parser_ops->query_thread_parsing_cap = NULL; - return VBP_OK; } diff --git a/mixvbp/vbp_manager/vbp_thread.c b/mixvbp/vbp_manager/vbp_thread.c deleted file mode 100755 index e1f665b..0000000 --- a/mixvbp/vbp_manager/vbp_thread.c +++ /dev/null @@ -1,634 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2013 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ -#include -#include -#include - -#include "vbp_thread.h" -#include "vbp_loader.h" - -/* consider a qual core with hyper thread */ -#define MAX_AUTO_THREADS 8 - -#define THREADING_SCHEME_BUNDLE - -typedef long long int nsecs_t; - -static nsecs_t systemTime() -{ - struct timeval t; - gettimeofday(&t, NULL); - return 1000000 * t.tv_sec + t.tv_usec; -} - - -typedef struct PerThreadContext { - pthread_t thread; - - int32_t index; // thread index referenced by thread itself when needed. - int32_t thread_init; - struct ThreadContext* parent; - - pthread_cond_t input_cond; // Used to wait for a new packet from the main thread. - pthread_cond_t progress_cond; // Used by child threads to wait for progress to change. - pthread_cond_t output_cond; // Used by the main thread to wait for frames to finish. - - pthread_mutex_t mutex; // Mutex used to protect the contents of the PerThreadContext. - pthread_mutex_t progress_mutex; // Mutex used to protect frame progress values and progress_cond. - - vbp_context* vbpctx; - viddec_pm_cxt_t* pmctx; // Working parser context - viddec_pm_cxt_t* input_pmctx; // Input parser context - void* codec_data; // Points to specific codec data that holds output, all threads share - // one instance - uint32_t start_item; // start of parsing item num for bundle parsing - - enum { - STATE_INPUT_WAIT, - STATE_WORKING, - STATE_EXIT - } state; - -} PerThreadContext; - -typedef struct ThreadContext { - PerThreadContext* threads[MAX_AUTO_THREADS]; // The contexts for each thread. - PerThreadContext* prev_thread; // The last thread submit_packet() was called on. - int delaying; // Set for the first N packets, where N is the number of threads. - // While it is set, vbp_thread_parse_syntax won't return any results - - uint32_t next_finished; // The next thread count to return output from. - uint32_t next_parsing; // The next thread count to submit input packet to. - - uint32_t active_thread_count; // num of thread need to be warmed up - - sem_t finish_sem; // semaphore of finish work to synchronize working thread and main thread - uint32_t start_item_to_parse; - uint32_t last_item_to_parse; - -} ThreadContext; - - -int32_t get_cpu_count() -{ - int32_t cpu_num; -#if defined(_SC_NPROC_ONLN) - cpu_num = sysconf(_SC_NPROC_ONLN); -#elif defined(_SC_NPROCESSORS_ONLN) - cpu_num = sysconf(_SC_NPROCESSORS_ONLN); -#endif - return cpu_num; -} - - -void set_thread_affinity_mask(cpu_set_t mask) -{ - int err, syscallres; - pid_t pid = gettid(); - syscallres = syscall(__NR_sched_setaffinity, pid, sizeof(mask), &mask); - if (syscallres) - { - ETRACE("Error in the syscall setaffinity."); - } -} - - -static void vbp_update_parser_for_item(viddec_pm_cxt_t *cxt, - viddec_pm_cxt_t *src_cxt, - uint32 item) -{ - - /* set up bitstream buffer */ - cxt->getbits.list = src_cxt->getbits.list; - - /* setup buffer pointer */ - cxt->getbits.bstrm_buf.buf = src_cxt->getbits.bstrm_buf.buf; - - - /* setup bitstream parser */ - cxt->getbits.bstrm_buf.buf_index = src_cxt->list.data[item].stpos; - cxt->getbits.bstrm_buf.buf_st = src_cxt->list.data[item].stpos; - cxt->getbits.bstrm_buf.buf_end = src_cxt->list.data[item].edpos; - - /* It is possible to end up with buf_offset not equal zero. */ - cxt->getbits.bstrm_buf.buf_bitoff = 0; - cxt->getbits.au_pos = 0; - cxt->getbits.list_off = 0; - cxt->getbits.phase = 0; - cxt->getbits.emulation_byte_counter = 0; - - cxt->list.start_offset = src_cxt->list.data[item].stpos; - cxt->list.end_offset = src_cxt->list.data[item].edpos; - cxt->list.total_bytes = src_cxt->list.data[item].edpos - src_cxt->list.data[item].stpos; - -} - - - -static void* parser_worker_thread(void* arg) -{ - PerThreadContext* p = arg; - ThreadContext* t_cxt = p->parent; - vbp_context* vbpctx = p->vbpctx; - viddec_pm_cxt_t* pm_cxt = p->pmctx; - viddec_parser_ops_t* ops = vbpctx->parser_ops; - - -// probably not to make each parsing thread have affinity to a cpu core -// having cpus fully occupied will even lead to low performance -// current experimental solution: just make main thread have affinity -#if 0 - cpu_set_t mask; - CPU_ZERO(&mask); - CPU_SET(p->index, &mask); // cpu affinity is set to same num as thread index - set_thread_affinity_mask(mask); -#endif - - pthread_mutex_lock(&p->mutex); - - nsecs_t t0; - while (1) { - while (p->state == STATE_INPUT_WAIT) { - pthread_cond_wait(&p->input_cond, &p->mutex); - } - - if (p->state == STATE_WORKING) { - //now we get input data, call actual parse. - //t0 = systemTime(); - sleep(0); - ops->parse_syntax_threading((void *)p->pmctx, p->codec_data, p->index); - - pthread_mutex_lock(&p->progress_mutex); - p->state = STATE_INPUT_WAIT; - - pthread_cond_broadcast(&p->progress_cond); - pthread_cond_signal(&p->output_cond); - pthread_mutex_unlock(&p->progress_mutex); - } else if (p->state == STATE_EXIT) { - break; - } - } - pthread_mutex_unlock(&p->mutex); - pthread_exit(NULL); - return NULL; -} - -static void* parser_worker_thread_bundle(void* arg) -{ - PerThreadContext* p = arg; - ThreadContext* t_cxt = p->parent; - vbp_context* vbpctx = p->vbpctx; - viddec_parser_ops_t* ops = vbpctx->parser_ops; - -// probably not to make each parsing thread have affinity to a cpu core -// having cpus fully occupied will even lead to low performance -// current experimental solution: just make main thread have affinity -#if 1 - cpu_set_t mask; - CPU_ZERO(&mask); - CPU_SET(p->index, &mask); // cpu affinity is set to same num as thread index - set_thread_affinity_mask(mask); -#endif - - pthread_mutex_lock(&p->mutex); - - nsecs_t t0; - while (1) { - while (p->state == STATE_INPUT_WAIT) { - pthread_cond_wait(&p->input_cond, &p->mutex); - } - - if (p->state == STATE_WORKING) { - uint32_t working_item = p->start_item; // start point - uint32_t slice_index = 0 + p->index; // start point - - while (working_item <= t_cxt->last_item_to_parse) { - vbp_update_parser_for_item(p->pmctx, p->input_pmctx, working_item); - ops->parse_syntax_threading((void *)p->pmctx, p->codec_data, slice_index); - - working_item += t_cxt->active_thread_count; - slice_index += t_cxt->active_thread_count; - } - - pthread_mutex_lock(&p->progress_mutex); - p->state = STATE_INPUT_WAIT; - - pthread_cond_broadcast(&p->progress_cond); - pthread_mutex_unlock(&p->progress_mutex); - } else if (p->state == STATE_EXIT) { - break; - } - } - pthread_mutex_unlock(&p->mutex); - pthread_exit(NULL); - return NULL; -} - - -uint32_t update_context_from_input(viddec_pm_cxt_t* dest, - viddec_pm_cxt_t* source) -{ - if ((dest == NULL) || (source == NULL) || (dest == source)) { - ETRACE("%s error", __func__); - return 1; - } - /* set up bitstream buffer */ - dest->getbits.list = source->getbits.list; - - /* buffer pointer */ - dest->getbits.bstrm_buf.buf = source->getbits.bstrm_buf.buf; - - /* bitstream parser */ - dest->getbits.bstrm_buf.buf_index = source->getbits.bstrm_buf.buf_index; - dest->getbits.bstrm_buf.buf_st = source->getbits.bstrm_buf.buf_st; - dest->getbits.bstrm_buf.buf_end = source->getbits.bstrm_buf.buf_end; - - /* It is possible to end up with buf_offset not equal zero. */ - dest->getbits.bstrm_buf.buf_bitoff = 0; - dest->getbits.au_pos = 0; - dest->getbits.list_off = 0; - dest->getbits.phase = 0; - dest->getbits.emulation_byte_counter = 0; - - dest->list.start_offset = source->list.start_offset; - dest->list.end_offset = source->list.end_offset; - dest->list.total_bytes = source->list.total_bytes; - return 0; -} - -uint32_t update_context_to_output(viddec_pm_cxt_t* dest, - viddec_pm_cxt_t* source) -{ - if ((dest == NULL) || (source == NULL) || (dest == source)) { - ETRACE("%s error", __func__); - return 1; - } - - /* bitstream parser */ - dest->getbits.bstrm_buf.buf_index = source->getbits.bstrm_buf.buf_index; - dest->getbits.bstrm_buf.buf_st = source->getbits.bstrm_buf.buf_st; - dest->getbits.bstrm_buf.buf_end = source->getbits.bstrm_buf.buf_end; - - /* It is possible to end up with buf_offset not equal zero. */ - dest->getbits.bstrm_buf.buf_bitoff = source->getbits.bstrm_buf.buf_bitoff; - dest->getbits.au_pos = source->getbits.au_pos; - dest->getbits.list_off = source->getbits.list_off; - dest->getbits.phase = source->getbits.phase; - dest->getbits.emulation_byte_counter = source->getbits.emulation_byte_counter; - dest->getbits.is_emul_reqd = source->getbits.is_emul_reqd; - - dest->list.start_offset = source->list.start_offset; - dest->list.end_offset = source->list.end_offset; - dest->list.total_bytes = source->list.total_bytes; - - return 0; -} - - - -uint32_t feed_thread_input(PerThreadContext* p, void* parent) -{ - ThreadContext* t_context = p->parent; - viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; - - //nsecs_t t0 = systemTime(); - if (pm_cxt->getbits.bstrm_buf.buf == NULL) { - return 1; - } - - pthread_mutex_lock(&p->mutex); - - if (p->state == STATE_WORKING) { - pthread_mutex_lock(&p->progress_mutex); - while (p->state == STATE_WORKING) { - pthread_cond_wait(&p->progress_cond, &p->progress_mutex); - } - pthread_mutex_unlock(&p->progress_mutex); - } - - /* Now update the input to the working thread*/ - update_context_from_input(p->pmctx, pm_cxt); - p->codec_data = (void*)&(pm_cxt->codec_data[0]); - - p->state = STATE_WORKING; - t_context->next_parsing++; - - //t0 = systemTime(); - pthread_cond_signal(&p->input_cond); - pthread_mutex_unlock(&p->mutex); - - return 0; -} - -void vbp_thread_init(vbp_context* pcontext) -{ - int i; - ThreadContext* t_context = NULL; - int32_t thread_count = pcontext->thread_count; - int32_t err = 0; - -#ifdef THREADING_SCHEME_BUNDLE - ITRACE("%s, threading_parse_scheme set to SCHEME_BUNDLE", __func__); - pcontext->threading_parse_scheme = SCHEME_BUNDLE; -#else - ITRACE("%s, threading_parse_scheme set to SCHEME_SEQUENTIAL", __func__); - pcontext->threading_parse_scheme = SCHEME_SEQUENTIAL; -#endif - - if (thread_count == 0) { - int32_t cpu_num = get_cpu_count(); - if (cpu_num > 1) { - if (pcontext->threading_parse_scheme == SCHEME_BUNDLE) { - thread_count = pcontext->thread_count = cpu_num - 1; - } else { - thread_count = pcontext->thread_count = cpu_num - 1; - } - } - else { - thread_count = pcontext->thread_count = 1; - } - } - - pcontext->thread_opaque = t_context = - (ThreadContext*)malloc(sizeof(ThreadContext)); - if (t_context != NULL) { - t_context->active_thread_count = thread_count; //default active count - - t_context->delaying = 1; - t_context->next_parsing = t_context->next_finished = 0; - } - - ITRACE("%s, creating %d parsing thread.", __func__, thread_count); - for (i = 0; i < thread_count; i++) { - t_context->threads[i] = (PerThreadContext*)malloc(sizeof(PerThreadContext)); - assert(t_context->threads[i] != NULL); - PerThreadContext* p = t_context->threads[i]; - - if (p != NULL) { - p->index = i; - p->parent = t_context; - p->vbpctx = pcontext; - p->pmctx = vbp_malloc(viddec_pm_cxt_t, 1); - viddec_pm_utils_bstream_init(&(p->pmctx->getbits), NULL, 0); - - pthread_mutex_init(&p->mutex, NULL); - pthread_mutex_init(&p->progress_mutex, NULL); - pthread_cond_init(&p->input_cond, NULL); - pthread_cond_init(&p->progress_cond, NULL); - pthread_cond_init(&p->output_cond, NULL); - - p->state = STATE_INPUT_WAIT; - - if(pcontext->threading_parse_scheme == SCHEME_SEQUENTIAL) { - err = pthread_create(&p->thread, NULL, parser_worker_thread, p); - } else { - err = pthread_create(&p->thread, NULL, parser_worker_thread_bundle, p); - } - - p->thread_init = !err; - } - } -#if 1 - ITRACE("%s, set_thread_affinity_mask", __func__); - cpu_set_t mask; - CPU_ZERO(&mask); - CPU_SET(3, &mask); // 0~thread_count-1 cpus was set to each sub thread, - // last cpu is set to main thread - set_thread_affinity_mask(mask); -#endif -} - - -void vbp_thread_free(vbp_context* pcontext) -{ - ITRACE("%s", __func__); - ThreadContext* t_context = pcontext->thread_opaque; - int i; - int thread_count = pcontext->thread_count; - - for (i = 0; i < thread_count; i++) { - PerThreadContext *p = t_context->threads[i]; - - pthread_mutex_lock(&p->mutex); - p->state = STATE_EXIT; - pthread_cond_signal(&p->input_cond); - pthread_mutex_unlock(&p->mutex); - - if (p->thread_init) { - pthread_join(p->thread, NULL); - } - p->thread_init = 0; - } - - for (i = 0; i < thread_count; i++) { - PerThreadContext *p = t_context->threads[i]; - - pthread_mutex_destroy(&p->mutex); - pthread_mutex_destroy(&p->progress_mutex); - pthread_cond_destroy(&p->input_cond); - pthread_cond_destroy(&p->progress_cond); - pthread_cond_destroy(&p->output_cond); - - if (p->pmctx != NULL) { - free(p->pmctx); - } - - free(p); - p = NULL; - } - - free(t_context); -} - -/* - * Entry function of multi-thread parsing - * - * parent - A viddec_pm_cxt_t type parser management context, - * which contains input stream. - * ctxt - Codec specific parser context, actually codec_data[] in - * viddec_pm_cxt_t, Used for storing parsed output - * return - 0 indicates no output is gotten, just warm up the threads - * 1 indicates there is output - * - * see viddec_parser_ops.h - * uint32_t (*fn_parse_syntax) (void *parent, void *ctxt); - */ -uint32_t vbp_thread_parse_syntax(void* parent, - void* ctxt, - vbp_context* pcontext) -{ - ThreadContext* t_context = pcontext->thread_opaque; - uint32_t finished = t_context->next_finished; - - if ((parent == NULL) || (ctxt == NULL)) { - return 0; - } - - PerThreadContext* p; - - nsecs_t t0,t1; - //t0 = t1 = systemTime(); - - /* Submit an input packet to the next parser thread*/ - p = t_context->threads[t_context->next_parsing]; - feed_thread_input(p, parent); - - //p->state = STATE_WORKING; - //t_context->next_parsing++; - - //t0 = systemTime(); - //pthread_cond_signal(&p->input_cond); - - //t0 = systemTime(); - - if ((t_context->delaying == 1) && - (t_context->next_parsing > (t_context->active_thread_count - 1))) { - t_context->delaying = 0; - } - - /* If we are still in early stage that warming up each thread, indicate we got no output*/ - if (t_context->delaying == 1) { - return 0; - } - - /* return available parsed frame from the oldest thread - * notice that we start getting output from thread[0] after just submitting input - * to thread[active_count-1] - * */ - p = t_context->threads[finished++]; - - if (p->state != STATE_INPUT_WAIT) { - pthread_mutex_lock(&p->progress_mutex); - while (p->state != STATE_INPUT_WAIT) { - pthread_cond_wait(&p->output_cond, &p->progress_mutex); - } - pthread_mutex_unlock(&p->progress_mutex); - } - - - if (finished > (t_context->active_thread_count - 1)) { - finished = 0; - } - - if (t_context->next_parsing >= t_context->active_thread_count) { - t_context->next_parsing = 0; - } - - t_context->next_finished = finished; - - update_context_to_output((viddec_pm_cxt_t*) parent, p->pmctx); - - return 1; -} - - -/* - * Entry function of multi-thread parsing - * - * parent - A viddec_pm_cxt_t type parser management context, - * which contains input stream. - * ctxt - Codec specific parser context, actually codec_data[] in - * viddec_pm_cxt_t, Used for storing parsed output - * start_item - num of start item passed to trigger multithread parsing - * - */ -uint32_t vbp_thread_parse_syntax_bundle(void* parent, - void* ctxt, - vbp_context* pcontext, - uint32_t start_item) -{ - ThreadContext* t_context = pcontext->thread_opaque; - if ((parent == NULL) || (ctxt == NULL)) { - return 0; - } - - PerThreadContext* p = NULL; - viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; - t_context->start_item_to_parse = start_item; - t_context->last_item_to_parse = pm_cxt->list.num_items - 1; - - sem_init(&(t_context->finish_sem),0,0); - - uint32_t i; - for (i = 0; i < t_context->active_thread_count; i++) { - p = t_context->threads[i]; - p->start_item = start_item + i; - - if (p->state == STATE_WORKING) { - pthread_mutex_lock(&p->progress_mutex); - while (p->state == STATE_WORKING) { - pthread_cond_wait(&p->progress_cond, &p->progress_mutex); - } - pthread_mutex_unlock(&p->progress_mutex); - } - - p->codec_data = (void*)&(pm_cxt->codec_data[0]); - p->input_pmctx = pm_cxt; - - p->state = STATE_WORKING; - - pthread_cond_signal(&p->input_cond); - pthread_mutex_unlock(&p->mutex); - - } - return 1; -} - - -/* - * set active threads num since not all threads need to be warmed up - * when a frame has fewer slice num than threads we created. - * - * active_count - threads num to be activated. - */ -uint32_t vbp_thread_set_active(vbp_context* pcontext, - uint32_t active_count) -{ - ThreadContext* t_context = pcontext->thread_opaque; - - if (t_context != NULL) { - if (active_count < pcontext->thread_count) { - t_context->active_thread_count = active_count; - } else { //reset to the default - t_context->active_thread_count = pcontext->thread_count; - } - - //reset to the default - t_context->delaying = 1; - t_context->next_parsing = t_context->next_finished = 0; - } - return 0; -} - -uint32_t vbp_thread_get_active(vbp_context* pcontext) -{ - ThreadContext* t_context = pcontext->thread_opaque; - - if (t_context != NULL) { - return t_context->active_thread_count; - } - return 0; -} - - diff --git a/mixvbp/vbp_manager/vbp_thread.h b/mixvbp/vbp_manager/vbp_thread.h deleted file mode 100755 index e182ac1..0000000 --- a/mixvbp/vbp_manager/vbp_thread.h +++ /dev/null @@ -1,51 +0,0 @@ -/* INTEL CONFIDENTIAL -* Copyright (c) 2013 Intel Corporation. All rights reserved. -* -* The source code contained or described herein and all documents -* related to the source code ("Material") are owned by Intel -* Corporation or its suppliers or licensors. Title to the -* Material remains with Intel Corporation or its suppliers and -* licensors. The Material contains trade secrets and proprietary -* and confidential information of Intel or its suppliers and -* licensors. The Material is protected by worldwide copyright and -* trade secret laws and treaty provisions. No part of the Material -* may be used, copied, reproduced, modified, published, uploaded, -* posted, transmitted, distributed, or disclosed in any way without -* Intel's prior express written permission. -* -* No license under any patent, copyright, trade secret or other -* intellectual property right is granted to or conferred upon you -* by disclosure or delivery of the Materials, either expressly, by -* implication, inducement, estoppel or otherwise. Any license -* under such intellectual property rights must be express and -* approved by Intel in writing. -* -*/ - -#ifndef VBP_THREAD_H -#define VBP_THREAD_H - -#define _GNU_SOURCE -#include -#include - -#include -#include "vbp_utils.h" -#include "include/viddec_pm.h" -#include - - -void vbp_thread_init(vbp_context *pcontext); - -void vbp_thread_free(vbp_context *pcontext); - -uint32_t vbp_thread_parse_syntax(void* parent, - void* ctxt, - vbp_context* pcontext); - -uint32_t vbp_thread_set_active(vbp_context* pcontext, - uint32_t active_count); - -uint32_t vbp_thread_get_active(vbp_context* pcontext); - -#endif diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index 21b1e5d..1647269 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -39,21 +39,6 @@ #include "vbp_h264secure_parser.h" #endif -#ifdef USE_MULTI_THREADING -#include "vbp_thread.h" -#endif - -#define LEAST_SLICES_MULTI_THREADING 10 - -typedef long long int nsecs_t; - -static nsecs_t systemTime() -{ - struct timeval t; - gettimeofday(&t, NULL); - return 1000000 * t.tv_sec + t.tv_usec; -} - /* buffer counter */ uint32 buffer_counter = 0; @@ -318,36 +303,12 @@ cleanup: } -static void vbp_setup_parser_for_item(viddec_pm_cxt_t *cxt, uint32 item) -{ - /* setup bitstream parser */ - cxt->getbits.bstrm_buf.buf_index = cxt->list.data[item].stpos; - cxt->getbits.bstrm_buf.buf_st = cxt->list.data[item].stpos; - cxt->getbits.bstrm_buf.buf_end = cxt->list.data[item].edpos; - - /* It is possible to end up with buf_offset not equal zero. */ - cxt->getbits.bstrm_buf.buf_bitoff = 0; - - cxt->getbits.au_pos = 0; - cxt->getbits.list_off = 0; - cxt->getbits.phase = 0; - cxt->getbits.emulation_byte_counter = 0; - - cxt->list.start_offset = cxt->list.data[item].stpos; - cxt->list.end_offset = cxt->list.data[item].edpos; - cxt->list.total_bytes = cxt->list.data[item].edpos - cxt->list.data[item].stpos; - -} - /** * * parse the elementary sample buffer or codec configuration data * */ -//static uint32 frame_num = 0; -//static nsecs_t total_time_of_multislice = 0; -//static uint32 frame_multislice_num = 0; static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; @@ -378,7 +339,6 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f } */ - uint32_t multi_parse_done = 0; /* populate the list.*/ if (init_data_flag) @@ -404,176 +364,48 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f // TODO: check if cxt->getbits.is_emul_reqd is set properly - //frame_num ++; - - nsecs_t t0, t1, t2, tt0, tt1, tt2; - t0 = t1 = t2 = tt0 = tt1 = tt2 = 0; - //t0 = systemTime(); - - if (0 == pcontext->is_multithread_parsing_enabled) { - for (i = 0; i < cxt->list.num_items; i++) { - vbp_setup_parser_for_item(cxt, i); - /* invoke parse entry point to parse the buffer */ - //t1 = systemTime(); - error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); - //t2 = systemTime(); - //tt1 += t2 - t1; - - /* can't return error for now. Neet further investigation */ - if (0 != error) { - WTRACE("failed to parse the syntax: %d!", error); - } - - /* process parsing result */ - //t2 = systemTime(); - error = pcontext->func_process_parsing_result(pcontext, i); - //tt2 += systemTime() - t2; - - if (VBP_MULTI == error) { - ITRACE("Multiple frames are found in one bufffer."); - return VBP_OK; - } - else if (0 != error) { - ETRACE("Failed to process parsing result."); - return error; - } - } - } - // Multi-threading option is enabled - else if (1 == pcontext->is_multithread_parsing_enabled) { - - int got_output = 0; - int is_payload_start = 0; - int single_parse_count = 0; - int use_thread_parsing = 0; - - for (i = 0; i < cxt->list.num_items; i++) { - - vbp_setup_parser_for_item(cxt, i); - - // we assume no configuration data following slice data in a frame's buffer - is_payload_start = ops->is_payload_start((void *)cxt); - - if (is_payload_start == 0) { - //t1 = systemTime(); - error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); - //tt1 += systemTime() - t1; - - //t2 = systemTime(); - error = pcontext->func_process_parsing_result(pcontext, i); - single_parse_count ++; - //tt2 += systemTime() - t2; - } else if (((cxt->list.num_items - single_parse_count) < LEAST_SLICES_MULTI_THREADING)) { - //t1 = systemTime(); - error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); - //tt1 += systemTime() - t1; - - //t2 = systemTime(); - error = pcontext->func_process_parsing_result(pcontext, i); - //tt2 += systemTime() - t2; - } else { - use_thread_parsing = 1; - break; - } - - if (VBP_MULTI == error) { - ITRACE("Multiple frames are found in one bufffer."); - return VBP_OK; - } - else if (0 != error) { - ETRACE("Failed to process parsing result."); - return error; - } + for (i = 0; i < cxt->list.num_items; i++) + { + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos; + cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = cxt->list.data[i].stpos; + cxt->list.end_offset = cxt->list.data[i].edpos; + cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos; + + /* invoke parse entry point to parse the buffer */ + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + + /* can't return error for now. Neet further investigation */ + if (0 != error) + { + VTRACE("failed to parse the syntax: %d!", error); } - if (use_thread_parsing) { - vbp_thread_set_active(pcontext, cxt->list.num_items - single_parse_count); - uint32_t thread_count = vbp_thread_get_active(pcontext); - - //t1 = systemTime(); - if (pcontext->threading_parse_scheme == SCHEME_BUNDLE) { - // Multithread parsing Scheme-Bundle-Input - // This interface push threads to parse all slice header without interrupt. - vbp_thread_parse_syntax_bundle((void *)cxt, - (void *)&(cxt->codec_data[0]), - pcontext, - i); //first slice's item num - - uint32_t j; - for (j = i; j < cxt->list.num_items; j++) { - error = ops->post_parse_threading((void *)cxt, - (void *)&(cxt->codec_data[0]), - j-single_parse_count); // slice index - error = pcontext->func_process_parsing_result(pcontext, j); // item num - } - //tt1 += systemTime() - t1; - - } else if (pcontext->threading_parse_scheme == SCHEME_SEQUENTIAL) { - // Multithread parsing Scheme-Sequential-Input. - // This interface push threads to parse one slice header one time. - uint32_t j; - for (j = i; j < cxt->list.num_items; j++) { - vbp_setup_parser_for_item(cxt, j); - - //t1 = systemTime(); - got_output = vbp_thread_parse_syntax((void *)cxt, - (void *)&(cxt->codec_data[0]), - pcontext); - //tt1 += systemTime() - t1; - - if (got_output == 1) { - //t2 = systemTime(); - error = ops->post_parse_threading((void *)cxt, - (void *)&(cxt->codec_data[0]), - //slice count with thread delay - (j-(thread_count-1)-single_parse_count) % thread_count); - - error = pcontext->func_process_parsing_result(pcontext, - // item count with thread delay - j-(thread_count-1)); - - multi_parse_done ++; - //tt2 += systemTime() - t2; - } - } - - int need_to_clearance = thread_count -1; - cxt->getbits.bstrm_buf.buf = NULL; - for (i = cxt->list.num_items - need_to_clearance; i < cxt->list.num_items; i++) { - //t1 = systemTime(); - got_output = vbp_thread_parse_syntax((void *)cxt, - (void *)&(cxt->codec_data[0]), - pcontext); - //&got_output); - //tt1 += systemTime() - t1; - - if (got_output == 1) { - //t2 = systemTime(); - error = ops->post_parse_threading((void *)cxt, - (void *)&(cxt->codec_data[0]), - (i-single_parse_count) % thread_count); - - error = pcontext->func_process_parsing_result(pcontext, i); - multi_parse_done ++; - //tt2 += systemTime() - t2; - } - } - } + /* process parsing result */ + error = pcontext->func_process_parsing_result(pcontext, i); + + if (VBP_MULTI == error) { + ITRACE("Multiple frames are found in one bufffer."); + return VBP_OK; + } + else if (0 != error) + { + ETRACE("Failed to process parsing result."); + return error; } } -#if 0 - tt0 = systemTime() - t0; - if (cxt->list.num_items > 8) { - total_time_of_multislice += tt0; - frame_multislice_num ++; - ETRACE("### ================== TIME CALCULATION ======================="); - ETRACE("### ------------item num: %d", cxt->list.num_items); - ETRACE("### ------------The frame[%d] cost time: %lld us", frame_num-1, tt0); - ETRACE("### ------------Accumulated multi-slice frames: %d", frame_multislice_num); - ETRACE("### ------------Accumulated average time that multislice frame cost: %lld us", total_time_of_multislice/frame_multislice_num); - ETRACE("### ================== TIME CALCULATION END ==================="); - } -#endif + return VBP_OK; } @@ -631,21 +463,6 @@ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) *ppcontext = pcontext; error = VBP_OK; - - /* default is not enabled */ - pcontext->is_multithread_parsing_enabled = 0; - -#ifndef USE_AVC_SHORT_FORMAT -#ifdef USE_MULTI_THREADING - if (pcontext->parser_ops->query_thread_parsing_cap != NULL) { - if (pcontext->parser_ops->query_thread_parsing_cap() == 1) { - pcontext->is_multithread_parsing_enabled = 1; - ITRACE("Multi-thead parsing is enabled."); - vbp_thread_init(pcontext); - } - } -#endif -#endif cleanup: if (VBP_OK != error) @@ -666,12 +483,6 @@ cleanup: */ uint32 vbp_utils_destroy_context(vbp_context *pcontext) { -#ifdef USE_MULTI_THREADING - if (1 == pcontext->is_multithread_parsing_enabled) { - vbp_thread_free(pcontext); - } -#endif - /* entry point, not need to validate input parameters. */ vbp_utils_free_parser_memory(pcontext); vbp_utils_uninitialize_context(pcontext); diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h index 7cf9321..633159c 100755 --- a/mixvbp/vbp_manager/vbp_utils.h +++ b/mixvbp/vbp_manager/vbp_utils.h @@ -31,9 +31,6 @@ #include "viddec_pm.h" #include "vbp_trace.h" #include -#include "vbp_loader.h" - -#include #define MAGIC_NUMBER 0x0DEADBEEF #define MAX_WORKLOAD_ITEMS 1000 @@ -71,13 +68,6 @@ typedef uint32 (*function_populate_query_data)(vbp_context* cxt); typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size); #endif -typedef enum -{ - SCHEME_BUNDLE = 0, - SCHEME_SEQUENTIAL, -} threading_parse_scheme_t; - - struct vbp_context_t { /* magic number */ @@ -104,13 +94,6 @@ struct vbp_context_t /* parser type specific data*/ void *parser_private; - /* multithreading */ - uint32 thread_count; - void *thread_opaque; - uint32 is_multithread_parsing_enabled; - - threading_parse_scheme_t threading_parse_scheme; - function_init_parser_entries func_init_parser_entries; function_allocate_query_data func_allocate_query_data; function_free_query_data func_free_query_data; diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c index 65b6f76..12e28e9 100755 --- a/mixvbp/vbp_manager/vbp_vc1_parser.c +++ b/mixvbp/vbp_manager/vbp_vc1_parser.c @@ -111,10 +111,6 @@ uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) /* entry point not needed */ pcontext->parser_ops->flush = NULL; - pcontext->parser_ops->is_payload_start = NULL; - pcontext->parser_ops->parse_syntax_threading = NULL; - pcontext->parser_ops->post_parse_threading = NULL; - pcontext->parser_ops->query_thread_parsing_cap = NULL; return VBP_OK; } diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c index 9ac097d..73d9281 100755 --- a/mixvbp/vbp_manager/vbp_vp8_parser.c +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -67,10 +67,6 @@ uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext) pcontext->parser_ops->is_frame_start = NULL; pcontext->parser_ops->flush = NULL; - pcontext->parser_ops->is_payload_start = NULL; - pcontext->parser_ops->parse_syntax_threading = NULL; - pcontext->parser_ops->post_parse_threading = NULL; - pcontext->parser_ops->query_thread_parsing_cap = NULL; return VBP_OK; } diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c index 330c5e6..cbb04fe 100755 --- a/mixvbp/vbp_plugin/h264/h264parse.c +++ b/mixvbp/vbp_plugin/h264/h264parse.c @@ -65,7 +65,7 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOf /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -// keep for h264 secure parse + h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader) { //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; @@ -114,43 +114,14 @@ h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader } } - pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); - pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag ? - (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : - ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : \ + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); return H264_STATUS_OK; }; //// End of h264_active_par_set - - -h264_Status h264_set_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader) -{ - uint32_t pps_addr = pInfo->PPS_PADDR_GL + - SliceHeader->pic_parameter_id * sizeof(pic_param_set); - SliceHeader->active_PPS = (pic_param_set*)pps_addr; - pic_param_set* active_PPS = SliceHeader->active_PPS; - - if (active_PPS->seq_parameter_set_id >= MAX_NUM_SPS) - { - return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected - } - - uint32_t sps_addr = pInfo->SPS_PADDR_GL + \ - active_PPS->seq_parameter_set_id * sizeof(seq_param_set_all); - SliceHeader->active_SPS = (seq_param_set_used*)sps_addr; - seq_param_set_used* active_SPS = SliceHeader->active_SPS; - - if (active_SPS->seq_parameter_set_id >= MAX_NUM_SPS) - { - return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected - } - - return H264_STATUS_OK; -}; // End of h264_set_active_par_set - - - /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -202,183 +173,38 @@ h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_ SliceHeader->sh_error |= 4; } - } else { + } else { SliceHeader->sh_error |= 1; } - return retStatus; -} -h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) -{ - h264_Status retStatus = H264_STATUS_ERROR; - - //////////////////////////////////////////////////// - //// Parse slice header info - //// Part1: not depend on the active PPS/SPS - //// Part2/3: depend on the active parset - ////////////////////////////////////////////////// + //if(SliceHeader->sh_error) { + //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; + //} - SliceHeader->sh_error = 0; - if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) - { - retStatus = h264_set_active_par_set(pInfo, SliceHeader); - } - if (retStatus == H264_STATUS_OK) + ////////////////////////////////// + //// Parse slice data (MB loop) + ////////////////////////////////// + //retStatus = h264_Parse_Slice_Data(pInfo); { - switch (SliceHeader->active_SPS->profile_idc) - { - case h264_ProfileBaseline: - case h264_ProfileMain: - case h264_ProfileExtended: - SliceHeader->active_PPS->transform_8x8_mode_flag = 0; - SliceHeader->active_PPS->pic_scaling_matrix_present_flag = 0; - SliceHeader->active_PPS->second_chroma_qp_index_offset = - SliceHeader->active_PPS->chroma_qp_index_offset; - - default: - break; - } - - if (h264_Parse_Slice_Header_2_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - SliceHeader->sh_error |= 2; - } - else if (h264_Parse_Slice_Header_3_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - SliceHeader->sh_error |= 4; - } - } else { - SliceHeader->sh_error |= 1; + //uint32_t data = 0; + //if( viddec_pm_peek_bits(parent, &data, 32) == -1) + //retStatus = H264_STATUS_ERROR; } + //h264_Parse_rbsp_trailing_bits(pInfo); return retStatus; } -h264_Status h264_Post_Parsing_Slice_Header(void *parent, h264_Info* pInfo, h264_Slice_Header_t *next_SliceHeader) -{ - - h264_Status retStatus = H264_STATUS_OK; - - memcpy(&pInfo->active_PPS, next_SliceHeader->active_PPS, sizeof(pic_param_set)); - memcpy(&pInfo->active_SPS, next_SliceHeader->active_SPS, sizeof(seq_param_set_used)); - - if ((1 == pInfo->primary_pic_type_plus_one) && (pInfo->got_start)) { - pInfo->img.recovery_point_found |= 4; - } - pInfo->primary_pic_type_plus_one = 0; - - pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); - pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ - (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : \ - ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); - - pInfo->sei_information.recovery_point = 0; - - pInfo->img.current_slice_num++; - - - //////////////////////////////////////////////////////////////////////////// - // Processing if new picture coming - // 1) if it's the second field - // 2) if it's a new frame - //////////////////////////////////////////////////////////////////////////// - //AssignQuantParam(pInfo); - if (h264_is_new_picture_start(pInfo, *next_SliceHeader, pInfo->SliceHeader)) - { - // - ///----------------- New Picture.boundary detected-------------------- - // - pInfo->img.g_new_pic++; - - // - // Complete previous picture - h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old - //h264_hdr_post_poc(0, 0, use_old); - // - // Update slice structures: - h264_update_old_slice(pInfo, *next_SliceHeader); //cur->old; next->cur; - // - // 1) if resolution change: reset dpb - // 2) else: init frame store - h264_update_img_info(pInfo); //img, dpb - // - ///----------------- New frame.boundary detected-------------------- - // - pInfo->img.second_field = h264_is_second_field(pInfo); - if (pInfo->img.second_field == 0) - { - pInfo->img.g_new_frame = 1; - h264_dpb_update_queue_dangling_field(pInfo); - // - /// DPB management - /// 1) check the gaps - /// 2) assign fs for non-exist frames - /// 3) fill the gaps - /// 4) store frame into DPB if ... - // - //if(pInfo->SliceHeader.redundant_pic_cnt) - { - h264_dpb_gaps_in_frame_num_mem_management(pInfo); - } - } - // - /// Decoding POC - h264_hdr_decoding_poc (pInfo, 0, 0); - // - /// Init Frame Store for next frame - h264_dpb_init_frame_store (pInfo); - pInfo->img.current_slice_num = 1; - if (pInfo->SliceHeader.first_mb_in_slice != 0) - { - ////Come here means we have slice lost at the beginning, since no FMO support - pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); - } - /// Emit out the New Frame - if (pInfo->img.g_new_frame) - { - h264_parse_emit_start_new_frame(parent, pInfo); - } - - h264_parse_emit_current_pic(parent, pInfo); - } - else ///////////////////////////////////////////////////// If Not a picture start - { - // - /// Update slice structures: cur->old; next->cur; - h264_update_old_slice(pInfo, *next_SliceHeader); - // - /// 1) if resolution change: reset dpb - /// 2) else: update img info - h264_update_img_info(pInfo); - } - - - ////////////////////////////////////////////////////////////// - // DPB reference list init and reordering - ////////////////////////////////////////////////////////////// - - //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field - h264_update_frame_type(pInfo); - -#ifndef USE_AVC_SHORT_FORMAT - h264_dpb_update_ref_lists(pInfo); -#endif - /// Emit out the current "good" slice - h264_parse_emit_current_slice(parent, pInfo); - - return retStatus; -} - /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -h264_Status h264_Parse_NAL_Unit(void *parent, uint8_t* nal_unit_type, uint8_t *nal_ref_idc) +h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc) { h264_Status ret = H264_STATUS_ERROR; @@ -386,7 +212,7 @@ h264_Status h264_Parse_NAL_Unit(void *parent, uint8_t* nal_unit_type, uint8_t *n if (viddec_pm_get_bits(parent, &code, 8) != -1) { *nal_ref_idc = (uint8_t)((code >> 5) & 0x3); - *nal_unit_type = (uint8_t)((code >> 0) & 0x1f); + pInfo->nal_unit_type = (uint8_t)((code >> 0) & 0x1f); ret = H264_STATUS_OK; } @@ -604,58 +430,43 @@ int32_t h264_is_second_field(h264_Info * pInfo) /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -int32_t h264_is_new_picture_start(h264_Info * pInfo, - h264_Slice_Header_t cur_slice, - h264_Slice_Header_t old_slice) +int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice) { int result = 0; - if (pInfo->number_of_first_au_info_nal_before_first_slice) { + if (pInfo->number_of_first_au_info_nal_before_first_slice) + { pInfo->number_of_first_au_info_nal_before_first_slice = 0; return 1; } - if (old_slice.pic_parameter_id != cur_slice.pic_parameter_id) { - return 1; - } - if (old_slice.frame_num != cur_slice.frame_num) { - return 1; - } - if (old_slice.field_pic_flag != cur_slice.field_pic_flag) { - return 1; - } - if (cur_slice.field_pic_flag && old_slice.field_pic_flag) { - if (old_slice.bottom_field_flag != cur_slice.bottom_field_flag) { - return 1; - } + result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); + result |= (old_slice.frame_num != cur_slice.frame_num); + result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); + if (cur_slice.field_pic_flag && old_slice.field_pic_flag) + { + result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag); } - if ((old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ - ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0))) { - return 1; - } - if (old_slice.idr_flag != cur_slice.idr_flag) { - return 1; - } + result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ + ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0)); + result |= ( old_slice.idr_flag != cur_slice.idr_flag); - if (cur_slice.idr_flag && old_slice.idr_flag) { - if (old_slice.idr_pic_id != cur_slice.idr_pic_id) { - return 1; - } + if (cur_slice.idr_flag && old_slice.idr_flag) + { + result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id); } - if (pInfo->active_SPS.pic_order_cnt_type == 0) { - if ((old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb) || \ - (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom)) { - return 1; - } + if (pInfo->active_SPS.pic_order_cnt_type == 0) + { + result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb); + result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom); } - if (pInfo->active_SPS.pic_order_cnt_type == 1) { - if ((old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]) || \ - (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1])) { - return 1; - } + if (pInfo->active_SPS.pic_order_cnt_type == 1) + { + result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]); + result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]); } return result; diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c old mode 100755 new mode 100644 diff --git a/mixvbp/vbp_plugin/h264/h264parse_pps.c b/mixvbp/vbp_plugin/h264/h264parse_pps.c index b4098ec..2c4cc52 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_pps.c +++ b/mixvbp/vbp_plugin/h264/h264parse_pps.c @@ -22,8 +22,6 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->pic_parameter_set_id = (uint8_t)code; - VTRACE("parsing PPS: id = %d", PictureParameterSet->pic_parameter_set_id); - code = h264_GetVLCElement(parent, pInfo, false); if (code > MAX_NUM_SPS - 1) { @@ -32,8 +30,6 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->seq_parameter_set_id = (uint8_t)code; - VTRACE("parsing PPS: refering SPS id = %d", PictureParameterSet->seq_parameter_set_id); - ///// entropy_coding_mode_flag viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c index 02bd202..625e146 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sh.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c @@ -95,17 +95,13 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice } ////// Check valid or not of first_mb_in_slice - int32_t PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); - int32_t FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag ? - (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : - ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); if (SliceHeader->structure == FRAME) { - max_mb_num = FrameHeightInMbs * PicWidthInMbs; + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; } else { - max_mb_num = FrameHeightInMbs * PicWidthInMbs / 2; + max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs / 2; } ///if(pInfo->img.MbaffFrameFlag) @@ -115,10 +111,7 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice } if (SliceHeader->first_mb_in_slice >= max_mb_num) - { - WTRACE("first mb in slice exceed max mb num."); break; - } if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { @@ -172,121 +165,6 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice return ret; } -h264_Status h264_Parse_Slice_Header_2_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) -{ - h264_Status ret = H264_SliceHeader_ERROR; - - uint32_t code; - int32_t max_mb_num=0; - - do { - //////////////////////////////////// Slice header part 2////////////////// - - /// Frame_num - viddec_pm_get_bits(parent, &code, SliceHeader->active_SPS->log2_max_frame_num_minus4 + 4); - SliceHeader->frame_num = (int32_t)code; - - /// Picture structure - SliceHeader->structure = FRAME; - SliceHeader->field_pic_flag = 0; - SliceHeader->bottom_field_flag = 0; - - if (!(SliceHeader->active_SPS->sps_disp.frame_mbs_only_flag)) - { - /// field_pic_flag - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->field_pic_flag = (uint8_t)code; - - if (SliceHeader->field_pic_flag) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->bottom_field_flag = (uint8_t)code; - - SliceHeader->structure = SliceHeader->bottom_field_flag ? BOTTOM_FIELD: TOP_FIELD; - } - } - - ////// Check valid or not of first_mb_in_slice - int32_t PicWidthInMbs = (SliceHeader->active_SPS->sps_disp.pic_width_in_mbs_minus1 + 1); - int32_t FrameHeightInMbs = SliceHeader->active_SPS->sps_disp.frame_mbs_only_flag ? - (SliceHeader->active_SPS->sps_disp.pic_height_in_map_units_minus1 + 1) : - ((SliceHeader->active_SPS->sps_disp.pic_height_in_map_units_minus1 + 1) << 1); - if (SliceHeader->structure == FRAME) - { - max_mb_num = FrameHeightInMbs * PicWidthInMbs; - } - else - { - max_mb_num = FrameHeightInMbs * PicWidthInMbs / 2; - } - - ///if(pInfo->img.MbaffFrameFlag) - if (SliceHeader->active_SPS->sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) - { - SliceHeader->first_mb_in_slice <<= 1; - } - - if (SliceHeader->first_mb_in_slice >= max_mb_num) - { - WTRACE("first mb in slice exceed max mb num."); - break; - } - - if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) - { - SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); - } - - if (SliceHeader->active_SPS->pic_order_cnt_type == 0) - { - viddec_pm_get_bits(parent, &code , SliceHeader->active_SPS->log2_max_pic_order_cnt_lsb_minus4 + 4); - SliceHeader->pic_order_cnt_lsb = (uint32_t)code; - - if ((SliceHeader->active_PPS->pic_order_present_flag) && !(SliceHeader->field_pic_flag)) - { - SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); - } - else - { - SliceHeader->delta_pic_order_cnt_bottom = 0; - } - } - - if ((SliceHeader->active_SPS->pic_order_cnt_type == 1) && - !(SliceHeader->active_SPS->delta_pic_order_always_zero_flag)) - { - SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->active_PPS->pic_order_present_flag) && !(SliceHeader->field_pic_flag)) - { - SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true); - } - } - - if (SliceHeader->active_PPS->redundant_pic_cnt_present_flag) - { - SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); - if (SliceHeader->redundant_pic_cnt > 127) - break; - } - else - { - SliceHeader->redundant_pic_cnt = 0; - } - - ret = H264_STATUS_OK; - } while (0); - - //////////// FMO is not supported curently, so comment out the following code - //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) - //{ - // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile - //} - - return ret; -} - - - /*-----------------------------------------------------------------------------------------*/ // slice header 3 // (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT, ref_pic_remark, alpha, beta, etc) @@ -366,13 +244,14 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice //// //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW //// - if (((pInfo->active_PPS.weighted_pred_flag) - && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) - || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + if (((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) { viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + pInfo->h264_pwt_enabled = 1; + pInfo->h264_pwt_start_byte_offset = byte_offset; + pInfo->h264_pwt_start_bit_offset = bits_offset; if (h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK) { @@ -380,190 +259,18 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - } - - - //// - //// Parse Ref_pic marking if there - //// - if (SliceHeader->nal_ref_idc != 0) - { - if (h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) + if (0 == bits_offset) { - WTRACE("ref pic marking failed during slice header parsing."); - break; - } - } - - if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) - { - SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); - } - else - { - SliceHeader->cabac_init_idc = 0; - } - - if (SliceHeader->cabac_init_idc > 2) - { - break; - } - - SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->slice_qp_delta > (25 - pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26 + pInfo->active_PPS.pic_init_qp_minus26))) - { - WTRACE("slice_qp_delta value is invalid."); - break; - } - - if ((SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeSI)) - { - if (SliceHeader->slice_type == h264_PtypeSP) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->sp_for_switch_flag = (uint8_t)code; - - } - SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); - - if ((SliceHeader->slice_qs_delta > (25 - pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) - { - WTRACE("slice_qp_delta value is invalid."); - break; - } - } - if (pInfo->active_PPS.deblocking_filter_control_present_flag) - { - SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); - if (SliceHeader->disable_deblocking_filter_idc != 1) - { - SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); - slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; - if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) - { - break; - } - - SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); - slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; - if (slice_beta_offset < -12 || slice_beta_offset > 12) - { - break; - } + pInfo->h264_pwt_end_byte_offset = byte_offset-1; + pInfo->h264_pwt_end_bit_offset = 8; } else { - SliceHeader->slice_alpha_c0_offset_div2 = 0; - SliceHeader->slice_beta_offset_div2 = 0; - } - } - - ret = H264_STATUS_OK; - } while (0); - - //////////// FMO is not supported curently, so comment out the following code - //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) - //{ - // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile - //} - - return ret; -} - - -h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) -{ - h264_Status ret = H264_SliceHeader_ERROR; - - //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; - int32_t slice_alpha_c0_offset, slice_beta_offset; - uint32_t code; - uint32_t bits_offset =0, byte_offset =0; - uint8_t is_emul =0; - - do { - /// direct_spatial_mv_pred_flag - if (SliceHeader->slice_type == h264_PtypeB) - { - viddec_pm_get_bits(parent, &code , 1); - SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code; - } - else - { - SliceHeader->direct_spatial_mv_pred_flag = 0; - } - - // - // Reset ref_idx and Overide it if exist - // - SliceHeader->num_ref_idx_l0_active = SliceHeader->active_PPS->num_ref_idx_l0_active; - SliceHeader->num_ref_idx_l1_active = SliceHeader->active_PPS->num_ref_idx_l1_active; - - if ((SliceHeader->slice_type == h264_PtypeP) || - (SliceHeader->slice_type == h264_PtypeSP) || - (SliceHeader->slice_type == h264_PtypeB)) - { - viddec_pm_get_bits(parent, &code, 1); - SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code; - - if (SliceHeader->num_ref_idx_active_override_flag) - { - SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; - if (SliceHeader->slice_type == h264_PtypeB) - { - SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false) + 1; - } - } - } - - if (SliceHeader->slice_type != h264_PtypeB) - { - SliceHeader->num_ref_idx_l1_active = 0; - } - - if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || - (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) - { - WTRACE("ref index greater than expected during slice header parsing."); - break; - } - -#ifdef USE_AVC_SHORT_FORMAT - bool keepParsing = false; - keepParsing = h264_is_new_picture_start(pInfo, *SliceHeader, pInfo->SliceHeader) && - (SliceHeader->nal_ref_idc != 0); - if (!keepParsing) - { - ITRACE("short format parsing: no need to go on!"); - ret = H264_STATUS_OK; - break; - } -#endif - if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - WTRACE("ref list reordering failed during slice header parsing."); - break; - } - - - //// - //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW - //// - if (((SliceHeader->active_PPS->weighted_pred_flag) - && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) - || ((SliceHeader->active_PPS->weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) - { - - //viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - - if (h264_Parse_Pred_Weight_Table_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) - { - break; + pInfo->h264_pwt_end_byte_offset = byte_offset; + pInfo->h264_pwt_end_bit_offset = bits_offset; } - viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - } @@ -580,9 +287,7 @@ h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_S } } - if ((SliceHeader->active_PPS->entropy_coding_mode_flag) && - (SliceHeader->slice_type != h264_PtypeI) && - (SliceHeader->slice_type != h264_PtypeSI)) + if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) { SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); } @@ -597,8 +302,7 @@ h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_S } SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->slice_qp_delta > (25 - SliceHeader->active_PPS->pic_init_qp_minus26)) || - (SliceHeader->slice_qp_delta < -(26 + SliceHeader->active_PPS->pic_init_qp_minus26))) + if ((SliceHeader->slice_qp_delta > (25 - pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26 + pInfo->active_PPS.pic_init_qp_minus26))) { WTRACE("slice_qp_delta value is invalid."); break; @@ -614,14 +318,13 @@ h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_S } SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->slice_qs_delta > (25 - SliceHeader->active_PPS->pic_init_qs_minus26)) || - (SliceHeader->slice_qs_delta < -(26 + SliceHeader->active_PPS->pic_init_qs_minus26)) ) + if ((SliceHeader->slice_qs_delta > (25 - pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) { WTRACE("slice_qp_delta value is invalid."); break; } } - if (SliceHeader->active_PPS->deblocking_filter_control_present_flag) + if (pInfo->active_PPS.deblocking_filter_control_present_flag) { SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); if (SliceHeader->disable_deblocking_filter_idc != 1) @@ -660,7 +363,6 @@ h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_S } - /*--------------------------------------------------------------------------------------------------*/ // // The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num @@ -675,7 +377,7 @@ h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_S // /*--------------------------------------------------------------------------------------------------*/ -h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) { //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; int32_t reorder= -1; @@ -698,18 +400,14 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo, h return H264_SliceHeader_ERROR; } - SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = - h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || - (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); } else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); } } while (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3); @@ -732,16 +430,13 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo, h return H264_SliceHeader_ERROR; } SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || - (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) + if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); } else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); } } while (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3); } @@ -819,8 +514,7 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } else { - SliceHeader->sh_predwttbl.luma_weight_l1[i] = - (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; } @@ -841,8 +535,7 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { for (j = 0; j < 2; j++) { - SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = - (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; } } @@ -855,113 +548,6 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } ///// End of h264_Parse_Pred_Weight_Table -h264_Status h264_Parse_Pred_Weight_Table_opt(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) -{ - uint32_t i = 0, j = 0; - uint32_t flag; - - SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); - - if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) - { - SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); - } - - for (i = 0; i < SliceHeader->num_ref_idx_l0_active; i++) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; - - if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) - { - SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true); - } - else - { - SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); - SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; - } - - if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; - - if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) - { - for (j = 0; j < 2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for (j = 0; j < 2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = - (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; - } - } - } - - } - - if (SliceHeader->slice_type == h264_PtypeB) - { - for (i = 0; i < SliceHeader->num_ref_idx_l1_active; i++) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; - - if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) - { - SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true); - } - else - { - SliceHeader->sh_predwttbl.luma_weight_l1[i] = - (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); - SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; - } - - if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) - { - viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); - SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; - - if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) - { - for (j = 0; j < 2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = - h264_GetVLCElement(parent, pInfo, true); - SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = - h264_GetVLCElement(parent, pInfo, true); - } - } - else - { - for (j = 0; j < 2; j++) - { - SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = - (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); - SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; - } - } - } - - } - } - - return H264_STATUS_OK; -} - - - /*--------------------------------------------------------------------------------------------------*/ // The syntax elements specify marking of the reference pictures. // 1)IDR: no_output_of_prior_pics_flag, @@ -1014,32 +600,25 @@ h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_S { if (i < NUM_MMCO_OPERATIONS) { - SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = - h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || - (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) { - SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) { - SliceHeader->sh_dec_refpic.long_term_pic_num[i] = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); } - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || - (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) { - SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) { - SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = - h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h index acb3a59..7015c37 100755 --- a/mixvbp/vbp_plugin/h264/include/h264.h +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -681,8 +681,56 @@ extern "C" { int8_t chroma_offset_l1[32][2]; } h264_pred_weight_table; -#define MAX_USER_DATA_SIZE 1024 + typedef struct _h264_Slice_Header + { + int32_t first_mb_in_slice; //UE + int32_t frame_num; //UV + int32_t pic_order_cnt_lsb; //UV + int32_t delta_pic_order_cnt_bottom; //SE + int32_t delta_pic_order_cnt[2]; //SE + int32_t redundant_pic_cnt; //UE + + uint32_t num_ref_idx_l0_active; //UE + uint32_t num_ref_idx_l1_active; //UE + + int32_t slice_qp_delta; //SE + int32_t slice_qs_delta; //SE + int32_t slice_alpha_c0_offset_div2; //SE + int32_t slice_beta_offset_div2; //SE + int32_t slice_group_change_cycle; //UV + + h264_pred_weight_table sh_predwttbl; + + ///// Flags or IDs + //h264_ptype_t slice_type; //UE + uint8_t slice_type; + uint8_t nal_ref_idc; + uint8_t structure; + uint8_t pic_parameter_id; //UE + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t idr_flag; //UE + uint8_t idr_pic_id; //UE + + uint8_t sh_error; + uint8_t cabac_init_idc; //UE + uint8_t sp_for_switch_flag; + uint8_t disable_deblocking_filter_idc; //UE + + uint8_t direct_spatial_mv_pred_flag; + uint8_t num_ref_idx_active_override_flag; + int16_t current_slice_nr; + + //// For Ref list reordering + h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; + h264_Ref_Pic_List_Reordering_t sh_refpic_l0; + h264_Ref_Pic_List_Reordering_t sh_refpic_l1; + + } h264_Slice_Header_t; + +#define MAX_USER_DATA_SIZE 1024 typedef struct _h264_user_data_t { h264_sei_payloadtype user_data_type; @@ -780,71 +828,6 @@ extern "C" { } seq_param_set_all, *seq_param_set_all_ptr; - typedef struct _h264_Slice_Header - { - int32_t first_mb_in_slice; //UE - int32_t frame_num; //UV - int32_t pic_order_cnt_lsb; //UV - int32_t delta_pic_order_cnt_bottom; //SE - int32_t delta_pic_order_cnt[2]; //SE - int32_t redundant_pic_cnt; //UE - - uint32_t num_ref_idx_l0_active; //UE - uint32_t num_ref_idx_l1_active; //UE - - int32_t slice_qp_delta; //SE - int32_t slice_qs_delta; //SE - int32_t slice_alpha_c0_offset_div2; //SE - int32_t slice_beta_offset_div2; //SE - int32_t slice_group_change_cycle; //UV - - h264_pred_weight_table sh_predwttbl; - - ///// Flags or IDs - //h264_ptype_t slice_type; //UE - uint8_t slice_type; - uint8_t nal_ref_idc; - uint8_t structure; - uint8_t pic_parameter_id; //UE - - uint8_t field_pic_flag; - uint8_t bottom_field_flag; - uint8_t idr_flag; //UE - uint8_t idr_pic_id; //UE - - uint8_t sh_error; - uint8_t cabac_init_idc; //UE - uint8_t sp_for_switch_flag; - uint8_t disable_deblocking_filter_idc; //UE - - uint8_t direct_spatial_mv_pred_flag; - uint8_t num_ref_idx_active_override_flag; - int16_t current_slice_nr; - - //// For Ref list reordering - h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; - h264_Ref_Pic_List_Reordering_t sh_refpic_l0; - h264_Ref_Pic_List_Reordering_t sh_refpic_l1; - - seq_param_set_used* active_SPS; - pic_param_set* active_PPS; - uint32_t parse_done; // flag to indicate parse done - - // temp field for multithread parsing to store bitstream info - uint32_t bstrm_buf_buf_index; - uint32_t bstrm_buf_buf_st; - uint32_t bstrm_buf_buf_end; - uint32_t bstrm_buf_buf_bitoff; - uint32_t au_pos; - uint32_t list_off; - uint32_t phase; - uint32_t emulation_byte_counter; - uint32_t is_emul_reqd; - int32_t list_start_offset; - int32_t list_end_offset; - int32_t list_total_bytes; - - } h264_Slice_Header_t; ///// Image control parameter//////////// typedef struct _h264_img_par @@ -949,14 +932,13 @@ extern "C" { //// Structures //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address seq_param_set_used active_SPS; - pic_param_set active_PPS; + pic_param_set active_PPS; h264_Slice_Header_t SliceHeader; OldSliceParams old_slice; sei_info sei_information; - h264_Slice_Header_t* working_sh[150]; // working slice header for multithreading h264_img_par img; uintptr_t SPS_PADDR_GL; diff --git a/mixvbp/vbp_plugin/h264/include/h264parse.h b/mixvbp/vbp_plugin/h264/include/h264parse.h index 6adee42..2b1c7a6 100755 --- a/mixvbp/vbp_plugin/h264/include/h264parse.h +++ b/mixvbp/vbp_plugin/h264/include/h264parse.h @@ -89,7 +89,7 @@ extern "C" { //////////////////////////////////////////////////////////////////// //NAL - extern h264_Status h264_Parse_NAL_Unit(void *parent, uint8_t *nal_unit_type, uint8_t *nal_ref_idc); + extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc); ////// Slice header extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); @@ -97,12 +97,6 @@ extern "C" { extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); -// For multi-thread parsing optimized. - extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); - extern h264_Status h264_Parse_Slice_Header_2_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); - extern h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); - - extern h264_Status h264_Post_Parsing_Slice_Header(void *parent, h264_Info* pInfo, h264_Slice_Header_t *next_SliceHeader); ////// SPS extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame); @@ -119,8 +113,6 @@ extern "C" { extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); - extern h264_Status h264_Parse_Pred_Weight_Table_opt(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); - ///// Mem functions diff --git a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c index eb85022..62e6ab3 100755 --- a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c +++ b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c @@ -204,12 +204,10 @@ void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) { - uint32_t i=0,nitems=0; + uint32_t i=0,nitems=0; ///////////////////////// Frame attributes////////////////////////// -// Remove workload related stuff -# if 0 //Push data into current workload if first frame or frame_boundary already detected by non slice nal if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) { @@ -229,7 +227,6 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) pInfo->is_current_workload_done=1; } -#endif ///////////////////// SPS///////////////////// // h264_parse_emit_sps(parent, pInfo); diff --git a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c index b9ec1c0..9490ddd 100755 --- a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c +++ b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c @@ -40,15 +40,14 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) uint8_t nal_ref_idc = 0; - uint8_t nal_unit_type = 0; ///// Parse NAL Unit header pInfo->img.g_new_frame = 0; pInfo->push_to_cur = 1; pInfo->is_current_workload_done =0; + pInfo->nal_unit_type = 0; - h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); - pInfo->nal_unit_type = nal_unit_type; + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c index fcf5c36..28a319a 100755 --- a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -7,12 +7,6 @@ #include "h264parse_dpb.h" #include -#include - -uint32_t viddec_threading_backup_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader); -uint32_t viddec_threading_restore_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader); - -#define MAX_SLICE_HEADER 150 /* Init function which can be called to intialized local context on open and flush and preserve*/ void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) @@ -30,26 +24,6 @@ void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) /* picture level info which will always be initialized */ h264_init_Info_under_sps_pps_level(pInfo); - uint32_t i; - for(i = 0; i < MAX_SLICE_HEADER; i++) { - pInfo->working_sh[i] = (h264_Slice_Header_t*)malloc(sizeof(h264_Slice_Header_t)); - assert(pInfo->working_sh[i] != NULL); - - pInfo->working_sh[i]->parse_done = 0; - pInfo->working_sh[i]->bstrm_buf_buf_index = 0; - pInfo->working_sh[i]->bstrm_buf_buf_st = 0; - pInfo->working_sh[i]->bstrm_buf_buf_end = 0; - pInfo->working_sh[i]->bstrm_buf_buf_bitoff = 0; - pInfo->working_sh[i]->au_pos = 0; - pInfo->working_sh[i]->list_off = 0; - pInfo->working_sh[i]->phase = 0; - pInfo->working_sh[i]->emulation_byte_counter = 0; - pInfo->working_sh[i]->is_emul_reqd = 0; - pInfo->working_sh[i]->list_start_offset = 0; - pInfo->working_sh[i]->list_end_offset = 0; - pInfo->working_sh[i]->list_total_bytes = 0; - pInfo->working_sh[i]->slice_group_change_cycle = 0; - } return; } @@ -66,7 +40,6 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) h264_Status status = H264_STATUS_ERROR; uint8_t nal_ref_idc = 0; - uint8_t nal_unit_type = 0; ///// Parse NAL Unit header pInfo->img.g_new_frame = 0; @@ -74,10 +47,9 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) pInfo->is_current_workload_done =0; pInfo->nal_unit_type = 0; - h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); + h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); VTRACE("Start parsing NAL unit, type = %d", pInfo->nal_unit_type); - pInfo->nal_unit_type = nal_unit_type; ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); @@ -446,64 +418,6 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) return status; } - -uint32_t viddec_h264_threading_parse(void *parent, void *ctxt, uint32_t slice_index) -{ - struct h264_viddec_parser* parser = ctxt; - - h264_Info * pInfo = &(parser->info); - - h264_Status status = H264_STATUS_ERROR; - - uint8_t nal_ref_idc = 0; - uint8_t nal_unit_type = 0; - - h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); - - pInfo->nal_unit_type = nal_unit_type; - - - //////// Parse valid NAL unit - if (nal_unit_type == h264_NAL_UNIT_TYPE_SLICE) { - h264_Slice_Header_t* next_SliceHeader = pInfo->working_sh[slice_index]; - memset(next_SliceHeader, 0, sizeof(h264_Slice_Header_t)); - - next_SliceHeader->nal_ref_idc = nal_ref_idc; - - - //////////////////////////////////////////////////////////////////////////// - // Step 2: Parsing slice header - //////////////////////////////////////////////////////////////////////////// - /// IDR flag - next_SliceHeader->idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); - - - /// Pass slice header - status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(parent, pInfo, next_SliceHeader); - - viddec_threading_backup_ctx_info(parent, next_SliceHeader); - - if (next_SliceHeader->sh_error & 3) - { - ETRACE("Slice Header parsing error."); - status = H264_STATUS_ERROR; - return status; - } - - //h264_Post_Parsing_Slice_Header(parent, pInfo, &next_SliceHeader); - next_SliceHeader->parse_done = 1; - - } else { - ETRACE("Wrong NALU. Multi thread is supposed to just parse slice nalu type."); - status = H264_STATUS_ERROR; - return status; - } - - return status; -} - - - void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) { /* Should return size of my structure */ @@ -538,104 +452,7 @@ void viddec_h264_flush(void *parent, void *ctxt) p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; - for(i = 0; i < MAX_SLICE_HEADER; i++) { - free(pInfo->working_sh[i]); - pInfo->working_sh[i] = NULL; - } return; } -uint32_t viddec_h264_payload_start(void *parent) -{ - - uint32_t code; - uint8_t nal_unit_type = 0; - if ( viddec_pm_peek_bits(parent, &code, 8) != -1) - { - nal_unit_type = (uint8_t)((code >> 0) & 0x1f); - } - //check that whether slice data starts - if (nal_unit_type == h264_NAL_UNIT_TYPE_SLICE) - { - return 1; - } else { - return 0; - } -} - -uint32_t viddec_h264_post_parse(void *parent, void *ctxt, uint32_t slice_index) -{ - struct h264_viddec_parser* parser = ctxt; - h264_Info * pInfo = &(parser->info); - h264_Status status = H264_STATUS_ERROR; - - h264_Slice_Header_t* next_SliceHeader = pInfo->working_sh[slice_index]; - - while (next_SliceHeader->parse_done != 1) { - sleep(0); - //WTRACE("slice header[%d] parse not finish, block to wait.", slice_index); - } - - viddec_threading_restore_ctx_info(parent, next_SliceHeader); - status = h264_Post_Parsing_Slice_Header(parent, pInfo, next_SliceHeader); - - next_SliceHeader->parse_done = 0; - - return status; -} - - -uint32_t viddec_h264_query_thread_parsing_cap(void) -{ - // current implementation of h.264 is capable to enable multi-thread parsing - return 1; -} - -uint32_t viddec_threading_backup_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader) -{ - h264_Status retStatus = H264_STATUS_OK; - - viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; - - next_SliceHeader->bstrm_buf_buf_index = pm_cxt->getbits.bstrm_buf.buf_index; - next_SliceHeader->bstrm_buf_buf_st = pm_cxt->getbits.bstrm_buf.buf_st; - next_SliceHeader->bstrm_buf_buf_end = pm_cxt->getbits.bstrm_buf.buf_end; - next_SliceHeader->bstrm_buf_buf_bitoff = pm_cxt->getbits.bstrm_buf.buf_bitoff; - - next_SliceHeader->au_pos = pm_cxt->getbits.au_pos; - next_SliceHeader->list_off = pm_cxt->getbits.list_off; - next_SliceHeader->phase = pm_cxt->getbits.phase; - next_SliceHeader->emulation_byte_counter = pm_cxt->getbits.emulation_byte_counter; - next_SliceHeader->is_emul_reqd = pm_cxt->getbits.is_emul_reqd; - - next_SliceHeader->list_start_offset = pm_cxt->list.start_offset; - next_SliceHeader->list_end_offset = pm_cxt->list.end_offset; - next_SliceHeader->list_total_bytes = pm_cxt->list.total_bytes; - - return retStatus; -} - -uint32_t viddec_threading_restore_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader) -{ - h264_Status retStatus = H264_STATUS_OK; - - viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; - - pm_cxt->getbits.bstrm_buf.buf_index = next_SliceHeader->bstrm_buf_buf_index; - pm_cxt->getbits.bstrm_buf.buf_st = next_SliceHeader->bstrm_buf_buf_st; - pm_cxt->getbits.bstrm_buf.buf_end = next_SliceHeader->bstrm_buf_buf_end; - pm_cxt->getbits.bstrm_buf.buf_bitoff = next_SliceHeader->bstrm_buf_buf_bitoff; - - pm_cxt->getbits.au_pos = next_SliceHeader->au_pos; - pm_cxt->getbits.list_off = next_SliceHeader->list_off; - pm_cxt->getbits.phase = next_SliceHeader->phase; - pm_cxt->getbits.emulation_byte_counter = next_SliceHeader->emulation_byte_counter; - pm_cxt->getbits.is_emul_reqd = next_SliceHeader->is_emul_reqd; - - pm_cxt->list.start_offset = next_SliceHeader->list_start_offset; - pm_cxt->list.end_offset = next_SliceHeader->list_end_offset; - pm_cxt->list.total_bytes = next_SliceHeader->list_total_bytes; - - return retStatus; -} -- cgit v1.2.3 From 2ec3bffb66a4b80e2003ecfd0cb56f6fe1bee6c0 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Wed, 26 Feb 2014 22:44:05 +0800 Subject: Support Modular DRM for the Merrifield platform BZ: 175259 Support Modular DRM for the Merrifield platform Change-Id: Iaa56f19ea44f216183373dd3c45794773ed1b403 Signed-off-by: wfeng6 --- .../secvideo/merrifield/vbp_h264secure_parser.c | 301 +++++++++++++++- .../secvideo/baytrail/viddec_h264secure_parse.c | 5 +- .../secvideo/merrifield/viddec_h264secure_parse.c | 124 ++++++- mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 1 - test/Android.mk | 2 +- test/mix_decoder.cpp | 74 +++- videodecoder/Android.mk | 4 + videodecoder/VideoDecoderBase.cpp | 2 +- videodecoder/VideoDecoderDefs.h | 4 + .../merrifield/VideoDecoderAVCSecure.cpp | 392 +++++++++++++++++++-- .../securevideo/merrifield/VideoDecoderAVCSecure.h | 24 +- .../securevideo/merrifield/VideoFrameInfo.h | 20 ++ 12 files changed, 905 insertions(+), 48 deletions(-) create mode 100755 videodecoder/securevideo/merrifield/VideoFrameInfo.h diff --git a/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c index 3f3eeef..60957a5 100755 --- a/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c +++ b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c @@ -1123,10 +1123,10 @@ static void vbp_set_codec_data_h264secure( } -static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext) +static uint32_t vbp_update_pic_data_h264secure(vbp_context *pcontext) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; - VTRACE("vbp_add_pic_data_h264secure +++"); + VTRACE("vbp_update_pic_data_h264secure +++"); vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; struct h264_viddec_parser* parser = NULL; vbp_picture_data_h264* pic_data = NULL; @@ -1137,7 +1137,160 @@ static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext) if (0 == parser->info.SliceHeader.first_mb_in_slice) { /* a new picture is parsed */ - query_data->num_pictures++; + query_data->num_pictures = 1; + } + + if (query_data->num_pictures == 0) + { + /* partial frame */ + query_data->num_pictures = 1; + } + + if (query_data->num_pictures > MAX_NUM_PICTURES) + { + ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES); + return VBP_DATA; + } + + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + WTRACE("MB address does not start from 0!"); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + pic_parms = pic_data->pic_parms; + + // relax this condition to support partial frame parsing + + //if (parser->info.SliceHeader.first_mb_in_slice == 0) + { + /** + * picture parameter only needs to be set once, + * even multiple slices may be encoded + */ + + /* VAPictureParameterBufferH264 */ + pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE; + pic_parms->CurrPic.frame_idx = 0; + if (parser->info.img.field_pic_flag == 1) + { + if (parser->info.img.bottom_field_flag) + { + pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD; + } + else + { + /* also OK set to 0 (from test suite) */ + pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD; + } + } + else + { + pic_parms->CurrPic.flags = 0; /* frame picture */ + } + pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc; + pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc; + pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num; + + /* don't care if current frame is used as long term reference */ + if (parser->info.SliceHeader.nal_ref_idc != 0) + { + pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; + } + + pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * + (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; + pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + pic_parms->seq_fields.value = 0; + pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; + pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; + pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag; + pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag; + pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag; + + /* new fields in libva 0.31 */ + pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag; + pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4; + pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type; + pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4; + pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag; + + + /* referened from UMG_Moorstown_TestSuites */ + pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0; + + pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1; + pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type; + pic_parms->slice_group_change_rate_minus1 = 0; + pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26; + pic_parms->pic_init_qs_minus26 = 0; + pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset; + pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset; + + pic_parms->pic_fields.value = 0; + pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag; + pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag; + pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc; + pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag; + + /* new LibVA fields in v0.31*/ + pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag; + pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag; + pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag; + pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0; + + /* all slices in the pciture have the same field_pic_flag */ + pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag; + pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; + + pic_parms->frame_num = parser->info.SliceHeader.frame_num; + } + + + /* set reference frames, and num_ref_frames */ + vbp_set_reference_frames_h264secure(parser, pic_parms); + if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + int frame_idx; + for (frame_idx = 0; frame_idx < 16; frame_idx++) + { + pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE; + pic_parms->ReferenceFrames[frame_idx].frame_idx = 0; + pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID; + pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0; + pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0; + } + /* num of reference frame is 0 if current picture is IDR */ + pic_parms->num_ref_frames = 0; + } + else + { + /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ + } + + VTRACE("vbp_update_pic_data_h264secure ---"); + return VBP_OK; +} + +static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext, int list_index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + struct h264_viddec_parser* parser = NULL; + vbp_picture_data_h264* pic_data = NULL; + VAPictureParameterBufferH264* pic_parms = NULL; + + parser = (struct h264_viddec_parser *)cxt->codec_data; + + if (0 == parser->info.SliceHeader.first_mb_in_slice) + { + /* a new picture is parsed */ + query_data->num_pictures = 1; } if (query_data->num_pictures == 0) @@ -1201,10 +1354,15 @@ static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext) } pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1; + + /* frame height in MBS */ pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1; + pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8; pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8; + + pic_parms->seq_fields.value = 0; pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc; pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag; @@ -1272,11 +1430,123 @@ static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext) /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */ } - VTRACE("vbp_add_pic_data_h264secure ---"); return VBP_OK; } -static uint32_t vbp_add_slice_data_h264secure(vbp_context *pcontext, uint32 key) +static uint32_t vbp_add_slice_data_h264secure(vbp_context *pcontext, int index) +{ + viddec_pm_cxt_t *cxt = pcontext->parser_cxt; + uint32 bit, byte; + uint8 is_emul; + + vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data; + VASliceParameterBufferH264 *slc_parms = NULL; + vbp_slice_data_h264 *slc_data = NULL; + struct h264_viddec_parser* h264_parser = NULL; + h264_Slice_Header_t* slice_header = NULL; + vbp_picture_data_h264* pic_data = NULL; + + h264_parser = (struct h264_viddec_parser *)cxt->codec_data; + int pic_data_index = query_data->num_pictures - 1; + if (pic_data_index < 0) + { + ETRACE("invalid picture data index."); + return VBP_DATA; + } + + pic_data = &(query_data->pic_data[pic_data_index]); + + slc_data = &(pic_data->slc_data[pic_data->num_slices]); + slc_data->buffer_addr = cxt->parse_cubby.buf; + slc_parms = &(slc_data->slc_parms); + + /* byte: how many bytes have been parsed */ + /* bit: bits parsed within the current parsing position */ + viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul); + + slc_data->nal_unit_type = h264_parser->info.nal_unit_type; + + slc_parms->slice_data_size = slc_data->slice_size = + pcontext->parser_cxt->list.data[index].edpos - + pcontext->parser_cxt->list.data[index].stpos; + + /* the offset to the NAL start code for this slice */ + slc_data->slice_offset = cxt->list.data[index].stpos; + slc_parms->slice_data_offset = 0; + + /* whole slice is in this buffer */ + slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL; + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + slc_parms->slice_type = slice_header->slice_type; + + /* bit offset from NAL start code to the beginning of slice data */ + slc_parms->slice_data_bit_offset = bit + byte * 8; + + if (is_emul) + { + WTRACE("next byte is emulation prevention byte."); + /*slc_parms->slice_data_bit_offset += 8; */ + } + + if (cxt->getbits.emulation_byte_counter != 0) + { + slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8; + } + + slice_header = &(h264_parser->info.SliceHeader); + slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice; + + if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag & + (!(h264_parser->info.SliceHeader.field_pic_flag))) + { + slc_parms->first_mb_in_slice /= 2; + } + + slc_parms->slice_type = slice_header->slice_type; + + slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag; + + slc_parms->num_ref_idx_l0_active_minus1 = 0; + slc_parms->num_ref_idx_l1_active_minus1 = 0; + + if (slice_header->slice_type == h264_PtypeP) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + } + else if (slice_header->slice_type == h264_PtypeB) + { + slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1; + slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1; + } + else if (slice_header->slice_type != h264_PtypeI) + { + WTRACE("slice type %d is not supported.", slice_header->slice_type); + } + + slc_parms->cabac_init_idc = slice_header->cabac_init_idc; + slc_parms->slice_qp_delta = slice_header->slice_qp_delta; + slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc; + slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2; + slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2; + + vbp_set_pre_weight_table_h264secure(h264_parser, slc_parms); + vbp_set_slice_ref_list_h264secure(h264_parser, slc_parms); + + pic_data->num_slices++; + + if (pic_data->num_slices > MAX_NUM_SLICES) + { + ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES); + return VBP_DATA; + } + + return VBP_OK; +} + + +static uint32_t vbp_update_slice_data_h264secure(vbp_context *pcontext, uint32 key) { VTRACE("vbp_add_slice_data_h264secure +++"); viddec_pm_cxt_t *cxt = pcontext->parser_cxt; @@ -1765,10 +2035,20 @@ uint32 vbp_process_parsing_result_h264secure( vbp_context *pcontext, int i) { case h264_NAL_UNIT_TYPE_SLICE: VTRACE("slice header is parsed."); + error = vbp_add_pic_data_h264secure(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264secure(pcontext, i); + } break; case h264_NAL_UNIT_TYPE_IDR: VTRACE("IDR header is parsed."); + error = vbp_add_pic_data_h264secure(pcontext, i); + if (VBP_OK == error) + { + error = vbp_add_slice_data_h264secure(pcontext, i); + } break; case h264_NAL_UNIT_TYPE_SEI: VTRACE("SEI header is parsed."); @@ -1892,6 +2172,12 @@ uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 s sliceheader_p->weight = (int16_t *)((uint8_t *)sliceheader_p->reorder_cmd + reordercmdnum * sizeof(uint32)); sliceheader_p->pic_marking = (uint32_t *)((uint8_t *)sliceheader_p->weight + weight_pos); + + if (sliceheader_p->parsedSliceHeader.size <= 0) { + ETRACE("Invalid slice header size %d", sliceheader_p->parsedSliceHeader.size); + return VBP_DATA; + } + offset += sliceheader_p->parsedSliceHeader.size; error = pcontext->parser_ops->update_data(pcontext->parser_cxt, sliceheader_p, sizeof(vbp_h264_sliceheader)); @@ -1901,14 +2187,14 @@ uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 s return error; } - error = vbp_add_pic_data_h264secure(pcontext); + error = vbp_update_pic_data_h264secure(pcontext); if (error != VBP_OK) { ETRACE("vbp_add_pic_data_h264secure error = 0x%x",error); return error; } - error = vbp_add_slice_data_h264secure(pcontext,key); + error = vbp_update_slice_data_h264secure(pcontext,key); if (error != VBP_OK) { ETRACE("vbp_add_slice_data_h264secure error = 0x%x",error); @@ -1935,4 +2221,3 @@ uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 s return error; } - diff --git a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c index 9490ddd..ef03351 100755 --- a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c +++ b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c @@ -56,7 +56,7 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) switch ( pInfo->nal_unit_type ) { case h264_NAL_UNIT_TYPE_IDR: - if (pInfo->got_start) { + if (pInfo->got_start) { pInfo->img.recovery_point_found |= 1; } @@ -83,7 +83,7 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); next_SliceHeader.nal_ref_idc = nal_ref_idc; - if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) + if ((1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start)) { pInfo->img.recovery_point_found |=4; } @@ -199,7 +199,6 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) h264_update_img_info(pInfo); } - ////////////////////////////////////////////////////////////// // Step 4: DPB reference list init and reordering ////////////////////////////////////////////////////////////// diff --git a/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c index 2e5ac06..06efe5f 100755 --- a/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c +++ b/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c @@ -133,7 +133,127 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) pInfo->sei_rp_received = 0; case h264_NAL_UNIT_TYPE_SLICE: - pInfo->has_slice = 1; + { + pInfo->has_slice = 1; + h264_Slice_Header_t next_SliceHeader; + /// Reset next slice header + h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t)); + next_SliceHeader.nal_ref_idc = nal_ref_idc; + + if ((1 == pInfo->primary_pic_type_plus_one) && (pInfo->got_start)) + { + pInfo->img.recovery_point_found |= 4; + } + pInfo->primary_pic_type_plus_one = 0; + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// PWT + pInfo->h264_pwt_start_byte_offset = 0; + pInfo->h264_pwt_start_bit_offset = 0; + pInfo->h264_pwt_end_byte_offset = 0; + pInfo->h264_pwt_end_bit_offset = 0; + pInfo->h264_pwt_enabled = 0; + /// IDR flag + next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); + pInfo->sei_information.recovery_point = 0; + + if (next_SliceHeader.sh_error & 3) + { + ETRACE("Slice Header parsing error.\n"); + break; + } + pInfo->img.current_slice_num++; + + //////////////////////////////////////////////////////////////////////////// + // Step 3: Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + + // + // Update slice structures: + h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur; + + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + /// Emit out the New Frame + if (pInfo->img.g_new_frame) + { + h264_parse_emit_start_new_frame(parent, pInfo); + } + h264_parse_emit_current_pic(parent, pInfo); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, next_SliceHeader); + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + ////////////////////////////////////////////////////////////// + // Step 4: DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + + h264_dpb_update_ref_lists(pInfo); + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + } break; ///// * Main profile doesn't support Data Partition, skipped.... *//// @@ -196,7 +316,7 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set)); // h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id); VTRACE("old_sps_id==pInfo->active_SPS.seq_parameter_set_id"); - // pInfo->active_SPS.seq_parameter_set_id = 0xff; + pInfo->active_SPS.seq_parameter_set_id = 0xff; } } diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c index 28a319a..b3639c4 100755 --- a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -102,7 +102,6 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) /// IDR flag next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); - /// Pass slice header status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader); diff --git a/test/Android.mk b/test/Android.mk index 6140f34..a231a46 100644 --- a/test/Android.mk +++ b/test/Android.mk @@ -6,9 +6,9 @@ LOCAL_SRC_FILES := mix_decoder.cpp LOCAL_C_INCLUDES := \ $(TARGET_OUT_HEADERS)/libva_videodecoder \ - $(TOP)/vendor/intel/hardware/PRIVATE/libmix/videodecoder \ $(TARGET_OUT_HEADERS)/libva \ $(TARGET_OUT_HEADERS)/libmixvbp \ + $(TARGET_OUT_HEADERS)/libmix_videodecoder LOCAL_SHARED_LIBRARIES := \ libva_videodecoder liblog libva diff --git a/test/mix_decoder.cpp b/test/mix_decoder.cpp index ef4e310..ab82747 100755 --- a/test/mix_decoder.cpp +++ b/test/mix_decoder.cpp @@ -5,17 +5,18 @@ #include #include #include - +#include #define INPUTSIZE (4*1024*1024) static int gImgWidth; static int gImgHeight; static int gCodec; static int gOutputSize; +static int gFrame; void CheckArgs(int argc, char* argv[]) { char c; - while ((c =getopt(argc, argv,"c:w:h:?") ) != EOF) { + while ((c =getopt(argc, argv,"c:w:h:f:?") ) != EOF) { switch (c) { case 'w': gImgWidth = atoi(optarg); @@ -26,6 +27,9 @@ void CheckArgs(int argc, char* argv[]) case 'c': gCodec = atoi(optarg); break; + case 'f': + gFrame = atoi(optarg); + break; case '?': default: ALOGI("./mix_encode -c Codec -w SrcWidth -h SrcHeight"); @@ -52,6 +56,8 @@ int main(int argc, char* argv[]) uint32_t out_size; char *codecname = NULL; + uint8_t nalutype; + char codecnamelist[2][32] = {"video/avc", "video/avc-secure"}; CheckArgs(argc, argv); @@ -71,6 +77,13 @@ int main(int argc, char* argv[]) return -1; } + if (gFrame < 0) { + ALOGE("Err: wrong frame number = %d", gFrame); + return -1; + } + + framenum = gFrame; + gOutputSize = gImgWidth * gImgHeight * 3/2; VideoDecodeBuffer buffer; @@ -90,6 +103,7 @@ int main(int argc, char* argv[]) configBuffer.width = gImgWidth; configBuffer.height = gImgHeight; + configBuffer.flag |= IS_SUBSAMPLE_ENCRYPTION; testDecoder->start(&configBuffer); @@ -105,8 +119,12 @@ int main(int argc, char* argv[]) return -1; } + frame_info_t frame_info; + for (frameidx = 0; frameidx < framenum; frameidx++) { - sprintf(inputfilename, "/data/decrypted_frame/decrypted_frame_%d.h264", frameidx); + + memset(inBuf, 0, INPUTSIZE); + sprintf(inputfilename, "/data/bitstream/frame_%04d.bin", frameidx); if((fp_in = fopen(inputfilename,"rb")) == NULL) { ALOGE("Fail to open inputfilename %s", inputfilename); return -1; @@ -121,10 +139,52 @@ int main(int argc, char* argv[]) } fread(inBuf, 1, in_size, fp_in); fclose(fp_in); - memset(&buffer, 0, sizeof(VideoDecodeBuffer)); - buffer.data = inBuf; - buffer.size = in_size; + + nalutype = inBuf[4] & 0x1F; + if (nalutype == 0x07 || nalutype == 0x08) { + ALOGV("Clear SPS/PPS is sent"); + frame_info.data = inBuf; + frame_info.size = in_size; + frame_info.num_nalus = 1; + frame_info.nalus[0].data = inBuf; + frame_info.nalus[0].length = in_size; + frame_info.nalus[0].type = inBuf[4]; + frame_info.nalus[0].offset = 0; + buffer.data = (uint8_t *)&frame_info; + buffer.size = sizeof(frame_info_t); + buffer.flag |= IS_SECURE_DATA; + + // buffer.data = inBuf; + // buffer.size = in_size; + } else { +#if 0 + ALOGV("Encrypted slice data is sent"); + frame_info.data = (uint8_t *) &inBuf[5]; + frame_info.size = in_size - 5; + frame_info.subsamplenum = 1; + frame_info.subsampletable[0].subsample_type = inBuf[4]; + frame_info.subsampletable[0].subsample_size = in_size - 5; +#endif + ALOGV("Encrypted slice data is sent"); + frame_info.data = inBuf; + frame_info.size = in_size; + frame_info.num_nalus = 2; + frame_info.nalus[0].offset = 0; + frame_info.nalus[0].type = 0x06; + frame_info.nalus[0].length = 5; + frame_info.nalus[0].data = NULL; + + frame_info.nalus[1].offset = 5; + frame_info.nalus[1].type = inBuf[4]; + frame_info.nalus[1].length = in_size - 5; + frame_info.nalus[1].data = NULL; + + buffer.data = (uint8_t *)&frame_info; + buffer.size = sizeof(frame_info_t); + buffer.flag |= IS_SECURE_DATA; + } + buffer.rotationDegrees = 0; buffer.timeStamp = frameidx; @@ -138,7 +198,7 @@ int main(int argc, char* argv[]) renderbuf->renderDone = true; ALOGV("Output frame %d, out_size = %d", outidx, out_size); - sprintf(outputfilename, "/data/decodedframe/frame_%d.bin", outidx++); + sprintf(outputfilename, "/data/outputsurface/frame_%04d.bin", outidx++); if((fp_out = fopen(outputfilename,"wb")) == NULL) { ALOGE("Fail to open outputfile: %s", outputfilename); return -1; diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index 6041456..eef5828 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -71,6 +71,10 @@ LOCAL_COPY_HEADERS := \ VideoDecoderInterface.h \ VideoDecoderDefs.h +ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),) + LOCAL_COPY_HEADERS += securevideo/merrifield/VideoFrameInfo.h +endif + LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libva_videodecoder diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 31bbac3..a1e3866 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -804,7 +804,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i status = checkHardwareCapability(profile); CHECK_STATUS("checkHardwareCapability"); -#ifdef USE_AVC_SHORT_FORMAT +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) status = getCodecSpecificConfigs(profile, &mVAConfig); CHECK_STATUS("getCodecSpecificAttributes"); #else diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h index 67139f0..9ab59dc 100644 --- a/videodecoder/VideoDecoderDefs.h +++ b/videodecoder/VideoDecoderDefs.h @@ -116,6 +116,10 @@ typedef enum { // indicate adaptive playback mode WANT_ADAPTIVE_PLAYBACK = 0x100000, + + // indicate the modular drm type + IS_SUBSAMPLE_ENCRYPTION = 0x200000, + } VIDEO_BUFFER_FLAG; typedef enum diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp index 671e8bd..08b9ef0 100755 --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp @@ -21,12 +21,14 @@ * approved by Intel in writing. * */ + #include #include "VideoDecoderBase.h" #include "VideoDecoderAVC.h" #include "VideoDecoderTrace.h" #include "vbp_loader.h" #include "VideoDecoderAVCSecure.h" +#include "VideoFrameInfo.h" #define MAX_SLICEHEADER_BUFFER_SIZE 4096 #define STARTCODE_PREFIX_LEN 3 @@ -61,12 +63,20 @@ VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) mFrameData = NULL; mIsEncryptData = 0; mClearData = NULL; + mCachedHeader = NULL; setParserType(VBP_H264SECURE); mFrameIdx = 0; + mModularMode = 0; + mSliceNum = 0; } Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { VTRACE("VideoDecoderAVCSecure::start"); + + if (buffer->flag & IS_SUBSAMPLE_ENCRYPTION) { + mModularMode = 1; + } + Decode_Status status = VideoDecoderAVC::start(buffer); if (status != DECODE_SUCCESS) { return status; @@ -78,6 +88,12 @@ Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { return DECODE_MEMORY_FAIL; } + mCachedHeader= new uint8_t [MAX_SLICEHEADER_BUFFER_SIZE]; + if (mCachedHeader == NULL) { + ETRACE("Failed to allocate memory for mCachedHeader"); + return DECODE_MEMORY_FAIL; + } + return status; } @@ -89,16 +105,130 @@ void VideoDecoderAVCSecure::stop(void) { delete [] mClearData; mClearData = NULL; } -} -Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { - VTRACE("VideoDecoderAVCSecure::decode"); + if (mCachedHeader) { + delete [] mCachedHeader; + mCachedHeader = NULL; + } +} +Decode_Status VideoDecoderAVCSecure::processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data) +{ + VTRACE("processModularInputBuffer +++"); Decode_Status status; - vbp_data_h264 *data = NULL; - if (buffer == NULL) { - return DECODE_INVALID_DATA; + int32_t clear_data_size = 0; + uint8_t *clear_data = NULL; + + int32_t nalu_num = 0; + uint8_t nalu_type = 0; + int32_t nalu_offset = 0; + uint32_t nalu_size = 0; + uint8_t naluType = 0; + uint8_t *nalu_data = NULL; + uint32_t sliceidx = 0; + + frame_info_t *pFrameInfo = NULL; + mSliceNum = 0; + memset(&mSliceInfo, 0, sizeof(mSliceInfo)); + mIsEncryptData = 0; + + if (buffer->flag & IS_SECURE_DATA) { + VTRACE("Decoding protected video ..."); + pFrameInfo = (frame_info_t *) buffer->data; + if (pFrameInfo == NULL) { + ETRACE("Invalid parameter: pFrameInfo is NULL!"); + return DECODE_MEMORY_FAIL; + } + + mFrameData = pFrameInfo->data; + mFrameSize = pFrameInfo->size; + VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize); + + nalu_num = pFrameInfo->num_nalus; + VTRACE("nalu_num = %d", nalu_num); + + if (nalu_num <= 0 || nalu_num >= MAX_NUM_NALUS) { + ETRACE("Invalid parameter: nalu_num = %d", nalu_num); + return DECODE_MEMORY_FAIL; + } + + for (int32_t i = 0; i < nalu_num; i++) { + + nalu_size = pFrameInfo->nalus[i].length; + nalu_type = pFrameInfo->nalus[i].type; + nalu_offset = pFrameInfo->nalus[i].offset; + nalu_data = pFrameInfo->nalus[i].data; + naluType = nalu_type & NALU_TYPE_MASK; + + VTRACE("nalu_type = 0x%x, nalu_size = %d, nalu_offset = 0x%x", nalu_type, nalu_size, nalu_offset); + + if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) { + + mIsEncryptData = 1; + VTRACE("slice idx = %d", sliceidx); + mSliceInfo[sliceidx].sliceHeaderByte = nalu_type; + mSliceInfo[sliceidx].sliceStartOffset = (nalu_offset >> 4) << 4; + mSliceInfo[sliceidx].sliceByteOffset = nalu_offset - mSliceInfo[sliceidx].sliceStartOffset; + mSliceInfo[sliceidx].sliceLength = nalu_size; + mSliceInfo[sliceidx].sliceSize = (mSliceInfo[sliceidx].sliceByteOffset + nalu_size + 0xF) & ~0xF; + VTRACE("sliceHeaderByte = 0x%x", mSliceInfo[sliceidx].sliceHeaderByte); + VTRACE("sliceStartOffset = %d", mSliceInfo[sliceidx].sliceStartOffset); + VTRACE("sliceByteOffset = %d", mSliceInfo[sliceidx].sliceByteOffset); + VTRACE("sliceSize = %d", mSliceInfo[sliceidx].sliceSize); + +#if 0 + uint32_t testsize; + uint8_t *testdata; + testsize = mSliceInfo[sliceidx].sliceSize > 64 ? 64 : mSliceInfo[sliceidx].sliceSize ; + testdata = (uint8_t *)(mFrameData); + for (int i = 0; i < testsize; i++) { + VTRACE("testdata[%d] = 0x%x", i, testdata[i]); + } +#endif + sliceidx++; + + } else if (naluType == h264_NAL_UNIT_TYPE_SPS || naluType == h264_NAL_UNIT_TYPE_PPS) { + if (nalu_data == NULL) { + ETRACE("Invalid parameter: nalu_data = NULL for naluType 0x%x", naluType); + return DECODE_MEMORY_FAIL; + } + memcpy(mClearData + clear_data_size, + nalu_data, + nalu_size); + clear_data_size += nalu_size; + } else { + ITRACE("Nalu type = 0x%x is skipped", naluType); + continue; + } + } + clear_data = mClearData; + mSliceNum = sliceidx; + + } else { + VTRACE("Decoding clear video ..."); + mIsEncryptData = 0; + mFrameSize = buffer->size; + mFrameData = buffer->data; + clear_data = buffer->data; + clear_data_size = buffer->size; } + if (clear_data_size > 0) { + status = VideoDecoderBase::parseBuffer( + clear_data, + clear_data_size, + false, + (void**)data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + } else { + status = VideoDecoderBase::queryBuffer((void**)data); + CHECK_STATUS("VideoDecoderBase::queryBuffer"); + } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVCSecure::processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data) +{ + Decode_Status status; int32_t clear_data_size = 0; uint8_t *clear_data = NULL; uint8_t naluType = 0; @@ -109,8 +239,7 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { uint8_t *data_src; uint8_t *nalu_data; uint32_t nalu_size; -// uint32_t testsize; -// uint8_t *testdata; + if (buffer->flag & IS_SECURE_DATA) { VTRACE("Decoding protected video ..."); mIsEncryptData = 1; @@ -118,13 +247,6 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { mFrameData = buffer->data; mFrameSize = buffer->size; VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize); -#if 0 - testsize = *(uint32_t *)(buffer->data + buffer->size); - testdata = (uint8_t *)(buffer->data + buffer->size + sizeof(uint32_t)); - for (int i = 0; i < testsize; i++) { - VTRACE("testdata[%d] = 0x%x", i, testdata[i]); - } -#endif num_nalus = *(uint32_t *)(buffer->data + buffer->size + sizeof(uint32_t)); VTRACE("num_nalus = %d", num_nalus); offset = 4; @@ -160,7 +282,6 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { } } clear_data = mClearData; - } else { VTRACE("Decoding clear video ..."); mIsEncryptData = 0; @@ -169,17 +290,47 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { clear_data = buffer->data; clear_data_size = buffer->size; } + if (clear_data_size > 0) { status = VideoDecoderBase::parseBuffer( clear_data, clear_data_size, false, - (void**)&data); + (void**)data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); } else { - status = VideoDecoderBase::queryBuffer((void**)&data); + status = VideoDecoderBase::queryBuffer((void**)data); CHECK_STATUS("VideoDecoderBase::queryBuffer"); } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + VTRACE("VideoDecoderAVCSecure::decode"); + Decode_Status status; + vbp_data_h264 *data = NULL; + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + +#if 0 + uint32_t testsize; + uint8_t *testdata; + testsize = buffer->size > 16 ? 16:buffer->size ; + testdata = (uint8_t *)(buffer->data); + for (int i = 0; i < 16; i++) { + VTRACE("testdata[%d] = 0x%x", i, testdata[i]); + } +#endif + + if (mModularMode) { + status = processModularInputBuffer(buffer,&data); + CHECK_STATUS("processModularInputBuffer"); + } + else { + status = processClassicInputBuffer(buffer,&data); + CHECK_STATUS("processClassicInputBuffer"); + } if (!mVAStarted) { if (data->has_sps && data->has_pps) { @@ -190,6 +341,7 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { return DECODE_SUCCESS; } } + status = decodeFrame(buffer, data); return status; @@ -220,6 +372,15 @@ Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_ CHECK_STATUS("handleNewSequence"); } + if (mModularMode && (!mIsEncryptData)) { + if (data->pic_data[0].num_slices == 0) { + ITRACE("No slice available for decoding."); + status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS; + mSizeChanged = false; + return status; + } + } + uint64_t lastPTS = mCurrentPTS; mCurrentPTS = buffer->timeStamp; @@ -227,9 +388,13 @@ Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_ status = acquireSurfaceBuffer(); CHECK_STATUS("acquireSurfaceBuffer"); - if (mFrameSize > 0) { - status = parseSliceHeader(buffer, data); + if (mModularMode) { + parseModularSliceHeader(buffer,data); + } + else { + parseClassicSliceHeader(buffer,data); } + if (status != DECODE_SUCCESS) { endDecodingFrame(true); return status; @@ -271,6 +436,11 @@ Decode_Status VideoDecoderAVCSecure::beginDecodingFrame(vbp_data_h264 *data) { mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; mAcquiredBuffer->pictureOrder = getPOC(picture); + if (mSizeChanged) { + mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; + mSizeChanged = false; + } + status = continueDecodingFrame(data); return status; } @@ -309,7 +479,7 @@ Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) return DECODE_SUCCESS; } -Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { +Decode_Status VideoDecoderAVCSecure::parseClassicSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { Decode_Status status; VAStatus vaStatus; @@ -317,6 +487,9 @@ Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, VABufferID pictureparameterparsingbufferID; VABufferID mSlicebufferID; + if (mFrameSize <= 0) { + return DECODE_SUCCESS; + } vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); CHECK_VA_STATUS("vaBeginPicture"); @@ -415,6 +588,130 @@ Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, return DECODE_SUCCESS; } +Decode_Status VideoDecoderAVCSecure::parseModularSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { + Decode_Status status; + VAStatus vaStatus; + + VABufferID sliceheaderbufferID; + VABufferID pictureparameterparsingbufferID; + VABufferID mSlicebufferID; + int32_t sliceIdx; + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + if (mFrameSize <= 0 || mSliceNum <=0) { + return DECODE_SUCCESS; + } + void *sliceheaderbuf; + memset(mCachedHeader, 0, MAX_SLICEHEADER_BUFFER_SIZE); + int32_t offset = 0; + int32_t size = 0; + + for (sliceIdx = 0; sliceIdx < mSliceNum; sliceIdx++) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParseSliceHeaderGroupBufferType, + MAX_SLICEHEADER_BUFFER_SIZE, + 1, + NULL, + &sliceheaderbufferID); + CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer"); + + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); + + memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE); + + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mSliceInfo[sliceIdx].sliceSize, //size + 1, //num_elements + mFrameData + mSliceInfo[sliceIdx].sliceStartOffset, + &mSlicebufferID); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + + data->pic_parse_buffer->frame_buf_id = mSlicebufferID; + data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID; + data->pic_parse_buffer->frame_size = mSliceInfo[sliceIdx].sliceLength; + data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE; + data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte; + data->pic_parse_buffer->slice_offset = mSliceInfo[sliceIdx].sliceByteOffset; + +#if 0 + VTRACE("data->pic_parse_buffer->slice_offset = 0x%x", data->pic_parse_buffer->slice_offset); + VTRACE("pic_parse_buffer->nalu_header.value = %x", data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte); + VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag); + VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag); + VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag); + VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag); + VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag); + VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag); + VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag); + VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc); + VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id); + VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1); + VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc); + VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4); + VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type); + VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag); + VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1); + VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1); +#endif + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParsePictureParameterBufferType, + sizeof(VAParsePictureParameterBuffer), + 1, + data->pic_parse_buffer, + &pictureparameterparsingbufferID); + CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer"); + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + &pictureparameterparsingbufferID, + 1); + CHECK_VA_STATUS("vaRenderPicture"); + + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); + + size = *(uint32 *)((uint8 *)sliceheaderbuf + 4) + 4; + VTRACE("slice header size = 0x%x, offset = 0x%x", size, offset); + if (offset + size <= MAX_SLICEHEADER_BUFFER_SIZE - 4) { + memcpy(mCachedHeader+offset, sliceheaderbuf, size); + offset += size; + } else { + WTRACE("Cached slice header is not big enough!"); + } + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); + } + memset(mCachedHeader + offset, 0xFF, 4); + status = updateSliceParameter(data,mCachedHeader); + CHECK_STATUS("processSliceHeader"); + return DECODE_SUCCESS; +} + + Decode_Status VideoDecoderAVCSecure::updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf) { VTRACE("VideoDecoderAVCSecure::updateSliceParameter"); Decode_Status status; @@ -437,6 +734,8 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); VAPictureParameterBufferH264 *picParam = picData->pic_parms; VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + uint32_t slice_data_size = 0; + uint8_t* slice_data_addr = NULL; if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { // either condition indicates start of a new frame @@ -480,7 +779,21 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p status = setReference(sliceParam); CHECK_STATUS("setReference"); - sliceParam->slice_data_size = mFrameSize; + if (mModularMode) { + if (mIsEncryptData) { + sliceParam->slice_data_size = mSliceInfo[sliceIndex].sliceSize; + slice_data_size = mSliceInfo[sliceIndex].sliceSize; + slice_data_addr = mFrameData + mSliceInfo[sliceIndex].sliceStartOffset; + } else { + slice_data_size = sliceData->slice_size; + slice_data_addr = sliceData->buffer_addr + sliceData->slice_offset; + } + } else { + sliceParam->slice_data_size = mFrameSize; + slice_data_size = mFrameSize; + slice_data_addr = mFrameData; + } + vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -505,9 +818,9 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p mVADisplay, mVAContext, VASliceDataBufferType, - mFrameSize, //size + slice_data_size, //size 1, //num_elements - mFrameData, + slice_data_addr, &slicebufferID); CHECK_VA_STATUS("vaCreateSliceDataBuffer"); @@ -521,3 +834,34 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p return DECODE_SUCCESS; } + +Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs( + VAProfile profile, VAConfigID *config) +{ + VAStatus vaStatus; + VAConfigAttrib attrib[2]; + + if (config == NULL) { + ETRACE("Invalid parameter!"); + return DECODE_FAIL; + } + + attrib[0].type = VAConfigAttribRTFormat; + attrib[0].value = VA_RT_FORMAT_YUV420; + attrib[1].type = VAConfigAttribDecSliceMode; + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + if (mModularMode) { + attrib[1].value = VA_DEC_SLICE_MODE_SUBSAMPLE; + } + + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib[0], + 2, + config); + CHECK_VA_STATUS("vaCreateConfig"); + + return DECODE_SUCCESS; +} diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h index 6378243..458196e 100755 --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h @@ -41,15 +41,37 @@ protected: virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data); virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data); - Decode_Status parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); + Decode_Status parseClassicSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + Decode_Status parseModularSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + Decode_Status updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf); virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); private: + Decode_Status processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data); + Decode_Status processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data); int32_t mIsEncryptData; int32_t mFrameSize; uint8_t* mFrameData; uint8_t* mClearData; + uint8_t* mCachedHeader; int32_t mFrameIdx; + int32_t mModularMode; + + enum { + MAX_SLICE_HEADER_NUM = 256, + }; + int32_t mSliceNum; + // Information of Slices in the Modular DRM Mode + struct SliceInfo { + uint8_t sliceHeaderByte; // first byte of the slice header + uint32_t sliceStartOffset; // offset of Slice unit in the firewalled buffer + uint32_t sliceByteOffset; // extra offset from the blockAligned slice offset + uint32_t sliceSize; // block aligned length of slice unit + uint32_t sliceLength; // actual size of the slice + }; + + SliceInfo mSliceInfo[MAX_SLICE_HEADER_NUM]; }; #endif diff --git a/videodecoder/securevideo/merrifield/VideoFrameInfo.h b/videodecoder/securevideo/merrifield/VideoFrameInfo.h new file mode 100755 index 0000000..426dfca --- /dev/null +++ b/videodecoder/securevideo/merrifield/VideoFrameInfo.h @@ -0,0 +1,20 @@ +#ifndef VIDEO_FRAME_INFO_H_ +#define VIDEO_FRAME_INFO_H_ + +#define MAX_NUM_NALUS 16 + +typedef struct { + uint8_t type; // nalu type + nal_ref_idc + uint32_t offset; // offset to the pointer of the encrypted data + uint8_t* data; // if the nalu is encrypted, this field is useless; if current NALU is SPS/PPS, data is the pointer to clear SPS/PPS data + uint32_t length; // nalu length +} nalu_info_t; + +typedef struct { + uint8_t* data; // pointer to the encrypted data + uint32_t size; // encrypted data size + uint32_t num_nalus; // number of NALU + nalu_info_t nalus[MAX_NUM_NALUS]; +} frame_info_t; + +#endif -- cgit v1.2.3 From 7df4f61cc45389e4f47cf01a0b066cf45e60bbe5 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Mon, 24 Mar 2014 20:05:21 +0800 Subject: Set the Widevine mode when decode() is called in LibMIX BZ: 181151 OMX component needs to support both classic and modular DRM. 1) In ProcessorStart (OMX in executing state), OMX does not know DRM scheme - it can be either classic or modular 2) liboemcrypto is the only component knowing DRM scheme, the info is only passed to OMX when input buffer is filled (ProcessorProcess). So the widevine mode should be set when decode() function is called in LibMIX. Change-Id: If9b8c6c731e1f72c46aab6c7f5d42aa21a76622b Signed-off-by: wfeng6 --- videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp index 08b9ef0..4ff7771 100755 --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp @@ -73,10 +73,6 @@ VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { VTRACE("VideoDecoderAVCSecure::start"); - if (buffer->flag & IS_SUBSAMPLE_ENCRYPTION) { - mModularMode = 1; - } - Decode_Status status = VideoDecoderAVC::start(buffer); if (status != DECODE_SUCCESS) { return status; @@ -322,6 +318,9 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { VTRACE("testdata[%d] = 0x%x", i, testdata[i]); } #endif + if (buffer->flag & IS_SUBSAMPLE_ENCRYPTION) { + mModularMode = 1; + } if (mModularMode) { status = processModularInputBuffer(buffer,&data); -- cgit v1.2.3 From 7c1a7935f4ef80dc3b6b6c1bc8f0e1f1c5ca2af7 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Thu, 20 Mar 2014 16:23:09 +0800 Subject: Init variable to avoid some issues. BZ: 179330 Init variable to avoid some issues. Change-Id: I37b1f6be430f7dae15d974d1d475222b469c0a96 Signed-off-by: bolunliu --- videoencoder/VideoEncoderBase.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 6110343..c14ba3d 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -28,6 +28,7 @@ VideoEncoderBase::VideoEncoderBase() ,mRenderFrameRate(false) ,mRenderBitRate(false) ,mRenderHrd(false) + ,mRenderMultiTemporal(false) ,mSeqParamBuf(0) ,mPicParamBuf(0) ,mSliceParamBuf(0) -- cgit v1.2.3 From b6964e427c9eaebf1dbad34e2f59ada15149fe7d Mon Sep 17 00:00:00 2001 From: bolunliu Date: Tue, 25 Mar 2014 16:01:21 +0800 Subject: Remove setTemporalLayerBitrateFramerate in libmix BZ: 179113 Remove setTemporalLayerBitrateFramerate in libmix. It will be replaced by VideoEncodeCommon Rename variable name. Change-Id: I9679960a873b9d4a11fcf5b1a5c3294338a0520d Signed-off-by: bolunliu --- videoencoder/VideoEncoderBase.cpp | 5 +++-- videoencoder/VideoEncoderDef.h | 20 ++++-------------- videoencoder/VideoEncoderVP8.cpp | 43 ++------------------------------------- videoencoder/VideoEncoderVP8.h | 1 - 4 files changed, 9 insertions(+), 60 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index c14ba3d..718b9c9 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -887,6 +887,8 @@ void VideoEncoderBase::setDefaultParams() { mComParams.rcParams.disableFrameSkip = 0; mComParams.rcParams.disableBitsStuffing = 1; mComParams.rcParams.enableIntraFrameQPControl = 0; + mComParams.rcParams.temporalFrameRate = 0; + mComParams.rcParams.temporalID = 0; mComParams.cyclicFrameInterval = 30; mComParams.refreshType = VIDEO_ENC_NONIR; mComParams.airParams.airMBs = 0; @@ -1227,8 +1229,7 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { case VideoConfigTypeSliceNum: case VideoConfigTypeVP8: case VideoConfigTypeVP8ReferenceFrame: - case VideoConfigTypeVP8MaxFrameSizeRatio: - case VideoConfigTypeVP8TemporalBitRateFrameRate:{ + case VideoConfigTypeVP8MaxFrameSizeRatio:{ ret = derivedSetConfig(videoEncConfig); break; } diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index c116412..6c74b32 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -231,6 +231,8 @@ struct VideoRateControlParams { uint32_t disableFrameSkip; uint32_t disableBitsStuffing; uint32_t enableIntraFrameQPControl; + uint32_t temporalFrameRate; + uint32_t temporalID; VideoRateControlParams &operator=(const VideoRateControlParams &other) { if (this == &other) return *this; @@ -246,6 +248,8 @@ struct VideoRateControlParams { this->disableFrameSkip = other.disableFrameSkip; this->disableBitsStuffing = other.disableBitsStuffing; this->enableIntraFrameQPControl = other.enableIntraFrameQPControl; + this->temporalFrameRate = other.temporalFrameRate; + this->temporalID = other.temporalID; return *this; } @@ -331,7 +335,6 @@ enum VideoParamConfigType { VideoConfigTypeVP8ReferenceFrame, VideoConfigTypeCIR, VideoConfigTypeVP8MaxFrameSizeRatio, - VideoConfigTypeVP8TemporalBitRateFrameRate, VideoParamsConfigExtension }; @@ -699,19 +702,4 @@ struct VideoConfigVP8MaxFrameSizeRatio : VideoParamConfigSet { uint32_t max_frame_size_ratio; }; -struct VideoConfigVP8TemporalBitRateFrameRate : VideoParamConfigSet { - - VideoConfigVP8TemporalBitRateFrameRate() { - type = VideoConfigTypeVP8TemporalBitRateFrameRate; - size = sizeof(VideoConfigVP8TemporalBitRateFrameRate); - } - - uint32_t layerID; - uint32_t bitRate; - uint32_t frameRate; -}; - - - - #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 4892cf0..3db3435 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -44,16 +44,6 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoConfigVP8ReferenceFrame.refresh_golden_frame = 1; mVideoConfigVP8ReferenceFrame.refresh_alternate_frame = 1; - mVideoConfigVP8TemporalBitRateFrameRate[0].bitRate = 0; - mVideoConfigVP8TemporalBitRateFrameRate[0].frameRate = 0; - mVideoConfigVP8TemporalBitRateFrameRate[0].layerID = 0; - mVideoConfigVP8TemporalBitRateFrameRate[1].bitRate = 0; - mVideoConfigVP8TemporalBitRateFrameRate[1].frameRate = 0; - mVideoConfigVP8TemporalBitRateFrameRate[1].layerID = 0; - mVideoConfigVP8TemporalBitRateFrameRate[2].bitRate = 0; - mVideoConfigVP8TemporalBitRateFrameRate[2].frameRate = 0; - mVideoConfigVP8TemporalBitRateFrameRate[2].layerID = 0; - mComParams.profile = VAProfileVP8Version0_3; } @@ -273,7 +263,7 @@ Encode_Status VideoEncoderVP8::renderMultiTemporalBitRateFrameRate(void) misc_param->type = VAEncMiscParameterTypeRateControl; misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data; memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl)); - misc_rate_ctrl->bits_per_second = mVideoConfigVP8TemporalBitRateFrameRate[i].bitRate; +// misc_rate_ctrl->bits_per_second = mVideoConfigVP8TemporalBitRateFrameRate[i].bitRate; misc_rate_ctrl->rc_flags.bits.temporal_id = 0; misc_rate_ctrl->target_percentage = 100; misc_rate_ctrl->window_size = 1000; @@ -299,7 +289,7 @@ Encode_Status VideoEncoderVP8::renderMultiTemporalBitRateFrameRate(void) misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data; memset(misc_framerate, 0, sizeof(*misc_framerate)); misc_framerate->framerate_flags.bits.temporal_id = i; - misc_framerate->framerate = mVideoConfigVP8TemporalBitRateFrameRate[i].frameRate; +// misc_framerate->framerate = mVideoConfigVP8TemporalBitRateFrameRate[i].frameRate; vaUnmapBuffer(mVADisplay, framerate_param_buf); @@ -433,20 +423,6 @@ Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncCon } break; - case VideoConfigTypeVP8TemporalBitRateFrameRate:{ - VideoConfigVP8TemporalBitRateFrameRate *encConfigVP8TemporalBitRateFrameRate = - reinterpret_cast(videoEncConfig); - - if(encConfigVP8TemporalBitRateFrameRate->size != sizeof(VideoConfigVP8TemporalBitRateFrameRate)) { - return ENCODE_INVALID_PARAMS; - } - layer_id = encConfigVP8TemporalBitRateFrameRate->layerID % 3; - encConfigVP8TemporalBitRateFrameRate->bitRate= mVideoConfigVP8TemporalBitRateFrameRate[layer_id].bitRate; - encConfigVP8TemporalBitRateFrameRate->frameRate = mVideoConfigVP8TemporalBitRateFrameRate[layer_id].frameRate; - } - break; - - default: { LOG_E ("Invalid Config Type"); break; @@ -503,21 +479,6 @@ Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncCon } break; - case VideoConfigTypeVP8TemporalBitRateFrameRate:{ - VideoConfigVP8TemporalBitRateFrameRate *encConfigVP8TemporalBitRateFrameRate = - reinterpret_cast(videoEncConfig); - - if (encConfigVP8TemporalBitRateFrameRate->size != sizeof(VideoConfigVP8TemporalBitRateFrameRate)) { - return ENCODE_INVALID_PARAMS; - } - layer_id = encConfigVP8TemporalBitRateFrameRate->layerID % 3; - mVideoConfigVP8TemporalBitRateFrameRate[layer_id].layerID = layer_id; - mVideoConfigVP8TemporalBitRateFrameRate[layer_id].bitRate = encConfigVP8TemporalBitRateFrameRate->bitRate; - mVideoConfigVP8TemporalBitRateFrameRate[layer_id].frameRate = encConfigVP8TemporalBitRateFrameRate->frameRate; - mRenderMultiTemporal = true; - } - break; - default: { LOG_E ("Invalid Config Type"); break; diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index 6a036f7..20cd72c 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -45,7 +45,6 @@ private: VideoConfigVP8 mVideoConfigVP8; VideoParamsVP8 mVideoParamsVP8; VideoConfigVP8ReferenceFrame mVideoConfigVP8ReferenceFrame; - VideoConfigVP8TemporalBitRateFrameRate mVideoConfigVP8TemporalBitRateFrameRate[3]; }; #endif /* __VIDEO_ENCODER_VP8_H__ */ -- cgit v1.2.3 From d5abfa5c2822e728d1e2d3c3e88a0574aef6c1a7 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Sun, 26 Jan 2014 10:34:25 +0800 Subject: support gfx format HAL_PIXEL_FORMAT_NV12 new alignment rule BZ: 180802 for camera v3, if gfx buffer usage is set GRALLOC_USAGE_HW_CAMERA_WRITE or GRALLOC_USAGE_HW_CAMERA_READ, then alignment is 64 on MRFLD Change-Id: I848b3b140b5227672b16e730156a8959dd5dd998 Signed-off-by: Zhao Liang --- videoencoder/VideoEncoderUtils.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 2657e01..0612ce6 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -177,7 +177,10 @@ Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ if (h->iFormat == HAL_PIXEL_FORMAT_NV12) { #ifdef MRFLD_GFX - vinfo.lumaStride = (h->iWidth + 31) & ~31; //32 aligned + if((h->usage | GRALLOC_USAGE_HW_CAMERA_READ) || (h->usage | GRALLOC_USAGE_HW_CAMERA_WRITE) ) + vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned + else + vinfo.lumaStride = (h->iWidth + 31) & ~31; //32 aligned #else //on CTP if (h->iWidth > 512) vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned -- cgit v1.2.3 From 837aad51b56aee009b5d76c9f40c84864f224520 Mon Sep 17 00:00:00 2001 From: liubolun Date: Tue, 18 Mar 2014 10:54:58 +0800 Subject: Add query VAConfigAttrib before creating config BZ: 179113 Add query VAConfigAttrib before creating config Fix code style issue. Init mPackedHeaders to avoid regression. Change-Id: I12e573c79269f4b19663834dd89a73d4d08c3805 Signed-off-by: liubolun --- videoencoder/VideoEncoderBase.cpp | 62 +++++++++++++++++++++++++++++++-------- 1 file changed, 49 insertions(+), 13 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 718b9c9..a0f6c64 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -49,6 +49,7 @@ VideoEncoderBase::VideoEncoderBase() ,mFrameSkipped(false) ,mSupportedSurfaceMemType(0) ,mVASurfaceMappingAction(0) + ,mEncPackedHeaders(VA_ATTRIB_NOT_SUPPORTED) #ifdef INTEL_VIDEO_XPROC_SHARING ,mSessionFlag(0) #endif @@ -120,23 +121,56 @@ Encode_Status VideoEncoderBase::start() { queryAutoReferenceConfig(mComParams.profile); - VAConfigAttrib vaAttrib[5]; - vaAttrib[0].type = VAConfigAttribRTFormat; - vaAttrib[1].type = VAConfigAttribRateControl; - vaAttrib[2].type = VAConfigAttribEncAutoReference; - vaAttrib[3].type = VAConfigAttribEncPackedHeaders; - vaAttrib[4].type = VAConfigAttribEncMaxRefFrames; + VAConfigAttrib vaAttrib_tmp[6],vaAttrib[VAConfigAttribTypeMax]; + int vaAttribNumber = 0; + vaAttrib_tmp[0].type = VAConfigAttribRTFormat; + vaAttrib_tmp[1].type = VAConfigAttribRateControl; + vaAttrib_tmp[2].type = VAConfigAttribEncAutoReference; + vaAttrib_tmp[3].type = VAConfigAttribEncPackedHeaders; + vaAttrib_tmp[4].type = VAConfigAttribEncMaxRefFrames; + vaAttrib_tmp[5].type = VAConfigAttribEncRateControlExt; vaStatus = vaGetConfigAttributes(mVADisplay, mComParams.profile, - VAEntrypointEncSlice, &vaAttrib[0], 5); + VAEntrypointEncSlice, &vaAttrib_tmp[0], 6); CHECK_VA_STATUS_RETURN("vaGetConfigAttributes"); - mEncPackedHeaders = vaAttrib[3].value; - mEncMaxRefFrames = vaAttrib[4].value; + if((vaAttrib_tmp[0].value & VA_RT_FORMAT_YUV420) != 0) + { + vaAttrib[vaAttribNumber].type = VAConfigAttribRTFormat; + vaAttrib[vaAttribNumber].value = VA_RT_FORMAT_YUV420; + vaAttribNumber++; + } + + vaAttrib[vaAttribNumber].type = VAConfigAttribRateControl; + vaAttrib[vaAttribNumber].value = mComParams.rcMode; + vaAttribNumber++; + + vaAttrib[vaAttribNumber].type = VAConfigAttribEncAutoReference; + vaAttrib[vaAttribNumber].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED; + vaAttribNumber++; + + if(vaAttrib_tmp[3].value != VA_ATTRIB_NOT_SUPPORTED) + { + vaAttrib[vaAttribNumber].type = VAConfigAttribEncPackedHeaders; + vaAttrib[vaAttribNumber].value = vaAttrib[3].value; + vaAttribNumber++; + mEncPackedHeaders = vaAttrib[3].value; + } + + if(vaAttrib_tmp[4].value != VA_ATTRIB_NOT_SUPPORTED) + { + vaAttrib[vaAttribNumber].type = VAConfigAttribEncMaxRefFrames; + vaAttrib[vaAttribNumber].value = vaAttrib[4].value; + vaAttribNumber++; + mEncMaxRefFrames = vaAttrib[4].value; + } - vaAttrib[0].value = VA_RT_FORMAT_YUV420; - vaAttrib[1].value = mComParams.rcMode; - vaAttrib[2].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED; + if(vaAttrib_tmp[5].value != VA_ATTRIB_NOT_SUPPORTED) + { + vaAttrib[vaAttribNumber].type = VAConfigAttribEncRateControlExt; + vaAttrib[vaAttribNumber].value = mComParams.numberOfLayer; + vaAttribNumber++; + } LOG_V( "======VA Configuration======\n"); LOG_I( "profile = %d\n", mComParams.profile); @@ -147,12 +181,14 @@ Encode_Status VideoEncoderBase::start() { LOG_I( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value); LOG_I( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value); LOG_I( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value); + LOG_I( "vaAttribNumber is %d\n", vaAttribNumber); + LOG_I( "mComParams.numberOfLayer is %d\n", mComParams.numberOfLayer); LOG_V( "vaCreateConfig\n"); vaStatus = vaCreateConfig( mVADisplay, mComParams.profile, mVAEntrypoint, - &vaAttrib[0], 2, &(mVAConfig)); + &vaAttrib[0], vaAttribNumber, &(mVAConfig)); // &vaAttrib[0], 3, &(mVAConfig)); //uncomment this after psb_video supports CHECK_VA_STATUS_RETURN("vaCreateConfig"); -- cgit v1.2.3 From 1bdb841e56d7c6c2162cde2caeb8ff18309359ce Mon Sep 17 00:00:00 2001 From: "Ren, Zhaohan" Date: Thu, 13 Mar 2014 15:30:02 +0800 Subject: Fixed encoder slow issue while rendering the same buffer repeatly BZ: 176245 This patch use vaMap/Unmap instead of vaSyncsurface to guarantee correct order Change-Id: Icf5fa8e1a81a41b6ed8b923aaab914c273c4bcaa Signed-off-by: Ren, Zhaohan --- videoencoder/VideoEncoderBase.cpp | 42 ++++++++++++++++++++------------------- videoencoder/VideoEncoderBase.h | 1 + 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index a0f6c64..00fe25d 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -41,7 +41,7 @@ VideoEncoderBase::VideoEncoderBase() ,mSliceSizeOverflow(false) ,mCurOutputTask(NULL) ,mOutCodedBuffer(0) - ,mCodedBufferMapped(false) + ,mOutCodedBufferPtr(NULL) ,mCurSegment(NULL) ,mOffsetInSeg(0) ,mTotalSize(0) @@ -478,23 +478,25 @@ Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint3 //sync/query/wait task if not completed if (mCurOutputTask->completed == false) { - uint8_t *buf = NULL; VASurfaceStatus vaSurfaceStatus; if (timeout == FUNC_BLOCK) { //block mode, direct sync surface to output data - LOG_I ("block mode, vaSyncSurface ID = 0x%08x\n", mCurOutputTask->enc_surface); - vaStatus = vaSyncSurface(mVADisplay, mCurOutputTask->enc_surface); - CHECK_VA_STATUS_GOTO_CLEANUP("vaSyncSurface"); - mOutCodedBuffer = mCurOutputTask->coded_buffer; // Check frame skip // Need encoding to be completed before calling query surface below to // get the right skip frame flag for current frame // It is a requirement of video driver - vaSyncSurface(mVADisplay, mCurOutputTask->enc_surface); + // vaSyncSurface syncs the wrong frame when rendering the same surface multiple times, + // so use vaMapbuffer instead + LOG_I ("block mode, vaMapBuffer ID = 0x%08x\n", mOutCodedBuffer); + if (mOutCodedBufferPtr == NULL) { + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr); + CHECK_VA_STATUS_GOTO_CLEANUP("vaMapBuffer"); + CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr); + } vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus); CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus"); @@ -557,9 +559,9 @@ CLEAN_UP: useLocalBuffer = false; } - if (mCodedBufferMapped) { + if (mOutCodedBufferPtr != NULL) { vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); - mCodedBufferMapped = false; + mOutCodedBufferPtr = NULL; mCurSegment = NULL; } @@ -643,7 +645,6 @@ CLEAN_UP: mSliceSizeOverflow = false; mCurOutputTask= NULL; mOutCodedBuffer = 0; - mCodedBufferMapped = false; mCurSegment = NULL; mOffsetInSeg =0; mTotalSize = 0; @@ -661,23 +662,24 @@ Encode_Status VideoEncoderBase::prepareForOutput( VAStatus vaStatus = VA_STATUS_SUCCESS; VACodedBufferSegment *vaCodedSeg = NULL; uint32_t status = 0; - uint8_t *buf = NULL; LOG_V( "begin\n"); // Won't check parameters here as the caller already checked them // mCurSegment is NULL means it is first time to be here after finishing encoding a frame - if (mCurSegment == NULL && !mCodedBufferMapped) { + if (mCurSegment == NULL) { + if (mOutCodedBufferPtr == NULL) { + vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr); + CHECK_VA_STATUS_RETURN("vaMapBuffer"); + CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr); + } + LOG_I ("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer); - vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf); - CHECK_VA_STATUS_RETURN("vaMapBuffer"); - CHECK_NULL_RETURN_IFFAIL(buf); - mCodedBufferMapped = true; mTotalSize = 0; mOffsetInSeg = 0; mTotalSizeCopied = 0; - vaCodedSeg = (VACodedBufferSegment *)buf; - mCurSegment = (VACodedBufferSegment *)buf; + vaCodedSeg = (VACodedBufferSegment *)mOutCodedBufferPtr; + mCurSegment = (VACodedBufferSegment *)mOutCodedBufferPtr; while (1) { @@ -759,10 +761,10 @@ Encode_Status VideoEncoderBase::cleanupForOutput() { VAStatus vaStatus = VA_STATUS_SUCCESS; //mCurSegment is NULL means all data has been copied out - if (mCurSegment == NULL && mCodedBufferMapped) { + if (mCurSegment == NULL && mOutCodedBufferPtr) { vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer); CHECK_VA_STATUS_RETURN("vaUnmapBuffer"); - mCodedBufferMapped = false; + mOutCodedBufferPtr = NULL; mTotalSize = 0; mOffsetInSeg = 0; mTotalSizeCopied = 0; diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index f4e58e0..5bd7711 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -151,6 +151,7 @@ protected: //Current outputting CodedBuffer status VABufferID mOutCodedBuffer; bool mCodedBufferMapped; + uint8_t *mOutCodedBufferPtr; VACodedBufferSegment *mCurSegment; uint32_t mOffsetInSeg; uint32_t mTotalSize; -- cgit v1.2.3 From 7ec362e15368734105ae3dac809694838f553f6d Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 27 Mar 2014 21:25:41 +0800 Subject: To support Modular DRM for the Moorefield platform in LibMIX (2) BZ: 175276 Add missing VideoFrameInfo.h Change-Id: I7290320b758f9503153c65618dabd0f4ef9543a3 Signed-off-by: wfeng6 --- videodecoder/securevideo/moorefield/VideoFrameInfo.h | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100755 videodecoder/securevideo/moorefield/VideoFrameInfo.h diff --git a/videodecoder/securevideo/moorefield/VideoFrameInfo.h b/videodecoder/securevideo/moorefield/VideoFrameInfo.h new file mode 100755 index 0000000..426dfca --- /dev/null +++ b/videodecoder/securevideo/moorefield/VideoFrameInfo.h @@ -0,0 +1,20 @@ +#ifndef VIDEO_FRAME_INFO_H_ +#define VIDEO_FRAME_INFO_H_ + +#define MAX_NUM_NALUS 16 + +typedef struct { + uint8_t type; // nalu type + nal_ref_idc + uint32_t offset; // offset to the pointer of the encrypted data + uint8_t* data; // if the nalu is encrypted, this field is useless; if current NALU is SPS/PPS, data is the pointer to clear SPS/PPS data + uint32_t length; // nalu length +} nalu_info_t; + +typedef struct { + uint8_t* data; // pointer to the encrypted data + uint32_t size; // encrypted data size + uint32_t num_nalus; // number of NALU + nalu_info_t nalus[MAX_NUM_NALUS]; +} frame_info_t; + +#endif -- cgit v1.2.3 From 8765d3f94f204b629ac1bf69359313d617e36b7c Mon Sep 17 00:00:00 2001 From: Nana GUo Date: Thu, 27 Mar 2014 18:23:35 -0400 Subject: libmix: reset mRotationDegree when setupVA BZ: 180000 Need to reset mRotationDegree when setupVA to make sure to pass latest rotation info to driver part Change-Id: I36ed1d957e8a230f008dcc9033e51ecdecf9571f Signed-off-by: Nana GUo --- videodecoder/VideoDecoderBase.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index a1e3866..bf2a46e 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -736,6 +736,7 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i return DECODE_SUCCESS; } + mRotationDegrees = 0; if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ #ifdef TARGET_HAS_VPP if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber - mConfigBuffer.vppBufferNum) -- cgit v1.2.3 From 536f756640041acb3f3fbf2df77d5c9cff6e0e0e Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Thu, 27 Mar 2014 15:49:24 +0800 Subject: To support Modular DRM for the Moorefield platform in LibMIX BZ: 175276 Support Modular DRM for the Moorefield platform in LibMIX Change-Id: I4225d9157ea3aec5c5a921d59ef8dc39c569d4ec Signed-off-by: wfeng6 --- videodecoder/Android.mk | 2 +- .../moorefield/VideoDecoderAVCSecure.cpp | 391 +++++++++++++++++++-- .../securevideo/moorefield/VideoDecoderAVCSecure.h | 24 +- 3 files changed, 391 insertions(+), 26 deletions(-) diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk index eef5828..31f9ff5 100644 --- a/videodecoder/Android.mk +++ b/videodecoder/Android.mk @@ -72,7 +72,7 @@ LOCAL_COPY_HEADERS := \ VideoDecoderDefs.h ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),) - LOCAL_COPY_HEADERS += securevideo/merrifield/VideoFrameInfo.h + LOCAL_COPY_HEADERS += securevideo/$(TARGET_BOARD_PLATFORM)/VideoFrameInfo.h endif LOCAL_MODULE_TAGS := optional diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp index 671e8bd..4ff7771 100644 --- a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp @@ -21,12 +21,14 @@ * approved by Intel in writing. * */ + #include #include "VideoDecoderBase.h" #include "VideoDecoderAVC.h" #include "VideoDecoderTrace.h" #include "vbp_loader.h" #include "VideoDecoderAVCSecure.h" +#include "VideoFrameInfo.h" #define MAX_SLICEHEADER_BUFFER_SIZE 4096 #define STARTCODE_PREFIX_LEN 3 @@ -61,12 +63,16 @@ VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType) mFrameData = NULL; mIsEncryptData = 0; mClearData = NULL; + mCachedHeader = NULL; setParserType(VBP_H264SECURE); mFrameIdx = 0; + mModularMode = 0; + mSliceNum = 0; } Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { VTRACE("VideoDecoderAVCSecure::start"); + Decode_Status status = VideoDecoderAVC::start(buffer); if (status != DECODE_SUCCESS) { return status; @@ -78,6 +84,12 @@ Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) { return DECODE_MEMORY_FAIL; } + mCachedHeader= new uint8_t [MAX_SLICEHEADER_BUFFER_SIZE]; + if (mCachedHeader == NULL) { + ETRACE("Failed to allocate memory for mCachedHeader"); + return DECODE_MEMORY_FAIL; + } + return status; } @@ -89,16 +101,130 @@ void VideoDecoderAVCSecure::stop(void) { delete [] mClearData; mClearData = NULL; } -} -Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { - VTRACE("VideoDecoderAVCSecure::decode"); + if (mCachedHeader) { + delete [] mCachedHeader; + mCachedHeader = NULL; + } +} +Decode_Status VideoDecoderAVCSecure::processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data) +{ + VTRACE("processModularInputBuffer +++"); Decode_Status status; - vbp_data_h264 *data = NULL; - if (buffer == NULL) { - return DECODE_INVALID_DATA; + int32_t clear_data_size = 0; + uint8_t *clear_data = NULL; + + int32_t nalu_num = 0; + uint8_t nalu_type = 0; + int32_t nalu_offset = 0; + uint32_t nalu_size = 0; + uint8_t naluType = 0; + uint8_t *nalu_data = NULL; + uint32_t sliceidx = 0; + + frame_info_t *pFrameInfo = NULL; + mSliceNum = 0; + memset(&mSliceInfo, 0, sizeof(mSliceInfo)); + mIsEncryptData = 0; + + if (buffer->flag & IS_SECURE_DATA) { + VTRACE("Decoding protected video ..."); + pFrameInfo = (frame_info_t *) buffer->data; + if (pFrameInfo == NULL) { + ETRACE("Invalid parameter: pFrameInfo is NULL!"); + return DECODE_MEMORY_FAIL; + } + + mFrameData = pFrameInfo->data; + mFrameSize = pFrameInfo->size; + VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize); + + nalu_num = pFrameInfo->num_nalus; + VTRACE("nalu_num = %d", nalu_num); + + if (nalu_num <= 0 || nalu_num >= MAX_NUM_NALUS) { + ETRACE("Invalid parameter: nalu_num = %d", nalu_num); + return DECODE_MEMORY_FAIL; + } + + for (int32_t i = 0; i < nalu_num; i++) { + + nalu_size = pFrameInfo->nalus[i].length; + nalu_type = pFrameInfo->nalus[i].type; + nalu_offset = pFrameInfo->nalus[i].offset; + nalu_data = pFrameInfo->nalus[i].data; + naluType = nalu_type & NALU_TYPE_MASK; + + VTRACE("nalu_type = 0x%x, nalu_size = %d, nalu_offset = 0x%x", nalu_type, nalu_size, nalu_offset); + + if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) { + + mIsEncryptData = 1; + VTRACE("slice idx = %d", sliceidx); + mSliceInfo[sliceidx].sliceHeaderByte = nalu_type; + mSliceInfo[sliceidx].sliceStartOffset = (nalu_offset >> 4) << 4; + mSliceInfo[sliceidx].sliceByteOffset = nalu_offset - mSliceInfo[sliceidx].sliceStartOffset; + mSliceInfo[sliceidx].sliceLength = nalu_size; + mSliceInfo[sliceidx].sliceSize = (mSliceInfo[sliceidx].sliceByteOffset + nalu_size + 0xF) & ~0xF; + VTRACE("sliceHeaderByte = 0x%x", mSliceInfo[sliceidx].sliceHeaderByte); + VTRACE("sliceStartOffset = %d", mSliceInfo[sliceidx].sliceStartOffset); + VTRACE("sliceByteOffset = %d", mSliceInfo[sliceidx].sliceByteOffset); + VTRACE("sliceSize = %d", mSliceInfo[sliceidx].sliceSize); + +#if 0 + uint32_t testsize; + uint8_t *testdata; + testsize = mSliceInfo[sliceidx].sliceSize > 64 ? 64 : mSliceInfo[sliceidx].sliceSize ; + testdata = (uint8_t *)(mFrameData); + for (int i = 0; i < testsize; i++) { + VTRACE("testdata[%d] = 0x%x", i, testdata[i]); + } +#endif + sliceidx++; + + } else if (naluType == h264_NAL_UNIT_TYPE_SPS || naluType == h264_NAL_UNIT_TYPE_PPS) { + if (nalu_data == NULL) { + ETRACE("Invalid parameter: nalu_data = NULL for naluType 0x%x", naluType); + return DECODE_MEMORY_FAIL; + } + memcpy(mClearData + clear_data_size, + nalu_data, + nalu_size); + clear_data_size += nalu_size; + } else { + ITRACE("Nalu type = 0x%x is skipped", naluType); + continue; + } + } + clear_data = mClearData; + mSliceNum = sliceidx; + + } else { + VTRACE("Decoding clear video ..."); + mIsEncryptData = 0; + mFrameSize = buffer->size; + mFrameData = buffer->data; + clear_data = buffer->data; + clear_data_size = buffer->size; + } + + if (clear_data_size > 0) { + status = VideoDecoderBase::parseBuffer( + clear_data, + clear_data_size, + false, + (void**)data); + CHECK_STATUS("VideoDecoderBase::parseBuffer"); + } else { + status = VideoDecoderBase::queryBuffer((void**)data); + CHECK_STATUS("VideoDecoderBase::queryBuffer"); } + return DECODE_SUCCESS; +} +Decode_Status VideoDecoderAVCSecure::processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data) +{ + Decode_Status status; int32_t clear_data_size = 0; uint8_t *clear_data = NULL; uint8_t naluType = 0; @@ -109,8 +235,7 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { uint8_t *data_src; uint8_t *nalu_data; uint32_t nalu_size; -// uint32_t testsize; -// uint8_t *testdata; + if (buffer->flag & IS_SECURE_DATA) { VTRACE("Decoding protected video ..."); mIsEncryptData = 1; @@ -118,13 +243,6 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { mFrameData = buffer->data; mFrameSize = buffer->size; VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize); -#if 0 - testsize = *(uint32_t *)(buffer->data + buffer->size); - testdata = (uint8_t *)(buffer->data + buffer->size + sizeof(uint32_t)); - for (int i = 0; i < testsize; i++) { - VTRACE("testdata[%d] = 0x%x", i, testdata[i]); - } -#endif num_nalus = *(uint32_t *)(buffer->data + buffer->size + sizeof(uint32_t)); VTRACE("num_nalus = %d", num_nalus); offset = 4; @@ -160,7 +278,6 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { } } clear_data = mClearData; - } else { VTRACE("Decoding clear video ..."); mIsEncryptData = 0; @@ -169,17 +286,50 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { clear_data = buffer->data; clear_data_size = buffer->size; } + if (clear_data_size > 0) { status = VideoDecoderBase::parseBuffer( clear_data, clear_data_size, false, - (void**)&data); + (void**)data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); } else { - status = VideoDecoderBase::queryBuffer((void**)&data); + status = VideoDecoderBase::queryBuffer((void**)data); CHECK_STATUS("VideoDecoderBase::queryBuffer"); } + return DECODE_SUCCESS; +} + +Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { + VTRACE("VideoDecoderAVCSecure::decode"); + Decode_Status status; + vbp_data_h264 *data = NULL; + if (buffer == NULL) { + return DECODE_INVALID_DATA; + } + +#if 0 + uint32_t testsize; + uint8_t *testdata; + testsize = buffer->size > 16 ? 16:buffer->size ; + testdata = (uint8_t *)(buffer->data); + for (int i = 0; i < 16; i++) { + VTRACE("testdata[%d] = 0x%x", i, testdata[i]); + } +#endif + if (buffer->flag & IS_SUBSAMPLE_ENCRYPTION) { + mModularMode = 1; + } + + if (mModularMode) { + status = processModularInputBuffer(buffer,&data); + CHECK_STATUS("processModularInputBuffer"); + } + else { + status = processClassicInputBuffer(buffer,&data); + CHECK_STATUS("processClassicInputBuffer"); + } if (!mVAStarted) { if (data->has_sps && data->has_pps) { @@ -190,6 +340,7 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { return DECODE_SUCCESS; } } + status = decodeFrame(buffer, data); return status; @@ -220,6 +371,15 @@ Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_ CHECK_STATUS("handleNewSequence"); } + if (mModularMode && (!mIsEncryptData)) { + if (data->pic_data[0].num_slices == 0) { + ITRACE("No slice available for decoding."); + status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS; + mSizeChanged = false; + return status; + } + } + uint64_t lastPTS = mCurrentPTS; mCurrentPTS = buffer->timeStamp; @@ -227,9 +387,13 @@ Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_ status = acquireSurfaceBuffer(); CHECK_STATUS("acquireSurfaceBuffer"); - if (mFrameSize > 0) { - status = parseSliceHeader(buffer, data); + if (mModularMode) { + parseModularSliceHeader(buffer,data); + } + else { + parseClassicSliceHeader(buffer,data); } + if (status != DECODE_SUCCESS) { endDecodingFrame(true); return status; @@ -271,6 +435,11 @@ Decode_Status VideoDecoderAVCSecure::beginDecodingFrame(vbp_data_h264 *data) { mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS; mAcquiredBuffer->pictureOrder = getPOC(picture); + if (mSizeChanged) { + mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; + mSizeChanged = false; + } + status = continueDecodingFrame(data); return status; } @@ -309,7 +478,7 @@ Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) return DECODE_SUCCESS; } -Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { +Decode_Status VideoDecoderAVCSecure::parseClassicSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { Decode_Status status; VAStatus vaStatus; @@ -317,6 +486,9 @@ Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, VABufferID pictureparameterparsingbufferID; VABufferID mSlicebufferID; + if (mFrameSize <= 0) { + return DECODE_SUCCESS; + } vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); CHECK_VA_STATUS("vaBeginPicture"); @@ -415,6 +587,130 @@ Decode_Status VideoDecoderAVCSecure::parseSliceHeader(VideoDecodeBuffer *buffer, return DECODE_SUCCESS; } +Decode_Status VideoDecoderAVCSecure::parseModularSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) { + Decode_Status status; + VAStatus vaStatus; + + VABufferID sliceheaderbufferID; + VABufferID pictureparameterparsingbufferID; + VABufferID mSlicebufferID; + int32_t sliceIdx; + + vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); + CHECK_VA_STATUS("vaBeginPicture"); + + if (mFrameSize <= 0 || mSliceNum <=0) { + return DECODE_SUCCESS; + } + void *sliceheaderbuf; + memset(mCachedHeader, 0, MAX_SLICEHEADER_BUFFER_SIZE); + int32_t offset = 0; + int32_t size = 0; + + for (sliceIdx = 0; sliceIdx < mSliceNum; sliceIdx++) { + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParseSliceHeaderGroupBufferType, + MAX_SLICEHEADER_BUFFER_SIZE, + 1, + NULL, + &sliceheaderbufferID); + CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer"); + + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); + + memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE); + + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); + + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VASliceDataBufferType, + mSliceInfo[sliceIdx].sliceSize, //size + 1, //num_elements + mFrameData + mSliceInfo[sliceIdx].sliceStartOffset, + &mSlicebufferID); + CHECK_VA_STATUS("vaCreateSliceDataBuffer"); + + data->pic_parse_buffer->frame_buf_id = mSlicebufferID; + data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID; + data->pic_parse_buffer->frame_size = mSliceInfo[sliceIdx].sliceLength; + data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE; + data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte; + data->pic_parse_buffer->slice_offset = mSliceInfo[sliceIdx].sliceByteOffset; + +#if 0 + VTRACE("data->pic_parse_buffer->slice_offset = 0x%x", data->pic_parse_buffer->slice_offset); + VTRACE("pic_parse_buffer->nalu_header.value = %x", data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte); + VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag); + VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag); + VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag); + VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag); + VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag); + VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag); + VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag); + VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc); + VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id); + VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1); + VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc); + VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4); + VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type); + VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag); + VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1); + VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1); +#endif + vaStatus = vaCreateBuffer( + mVADisplay, + mVAContext, + VAParsePictureParameterBufferType, + sizeof(VAParsePictureParameterBuffer), + 1, + data->pic_parse_buffer, + &pictureparameterparsingbufferID); + CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer"); + + vaStatus = vaRenderPicture( + mVADisplay, + mVAContext, + &pictureparameterparsingbufferID, + 1); + CHECK_VA_STATUS("vaRenderPicture"); + + vaStatus = vaMapBuffer( + mVADisplay, + sliceheaderbufferID, + &sliceheaderbuf); + CHECK_VA_STATUS("vaMapBuffer"); + + size = *(uint32 *)((uint8 *)sliceheaderbuf + 4) + 4; + VTRACE("slice header size = 0x%x, offset = 0x%x", size, offset); + if (offset + size <= MAX_SLICEHEADER_BUFFER_SIZE - 4) { + memcpy(mCachedHeader+offset, sliceheaderbuf, size); + offset += size; + } else { + WTRACE("Cached slice header is not big enough!"); + } + vaStatus = vaUnmapBuffer( + mVADisplay, + sliceheaderbufferID); + CHECK_VA_STATUS("vaUnmapBuffer"); + } + memset(mCachedHeader + offset, 0xFF, 4); + status = updateSliceParameter(data,mCachedHeader); + CHECK_STATUS("processSliceHeader"); + return DECODE_SUCCESS; +} + + Decode_Status VideoDecoderAVCSecure::updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf) { VTRACE("VideoDecoderAVCSecure::updateSliceParameter"); Decode_Status status; @@ -437,6 +733,8 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); VAPictureParameterBufferH264 *picParam = picData->pic_parms; VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); + uint32_t slice_data_size = 0; + uint8_t* slice_data_addr = NULL; if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { // either condition indicates start of a new frame @@ -480,7 +778,21 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p status = setReference(sliceParam); CHECK_STATUS("setReference"); - sliceParam->slice_data_size = mFrameSize; + if (mModularMode) { + if (mIsEncryptData) { + sliceParam->slice_data_size = mSliceInfo[sliceIndex].sliceSize; + slice_data_size = mSliceInfo[sliceIndex].sliceSize; + slice_data_addr = mFrameData + mSliceInfo[sliceIndex].sliceStartOffset; + } else { + slice_data_size = sliceData->slice_size; + slice_data_addr = sliceData->buffer_addr + sliceData->slice_offset; + } + } else { + sliceParam->slice_data_size = mFrameSize; + slice_data_size = mFrameSize; + slice_data_addr = mFrameData; + } + vaStatus = vaCreateBuffer( mVADisplay, mVAContext, @@ -505,9 +817,9 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p mVADisplay, mVAContext, VASliceDataBufferType, - mFrameSize, //size + slice_data_size, //size 1, //num_elements - mFrameData, + slice_data_addr, &slicebufferID); CHECK_VA_STATUS("vaCreateSliceDataBuffer"); @@ -521,3 +833,34 @@ Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t p return DECODE_SUCCESS; } + +Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs( + VAProfile profile, VAConfigID *config) +{ + VAStatus vaStatus; + VAConfigAttrib attrib[2]; + + if (config == NULL) { + ETRACE("Invalid parameter!"); + return DECODE_FAIL; + } + + attrib[0].type = VAConfigAttribRTFormat; + attrib[0].value = VA_RT_FORMAT_YUV420; + attrib[1].type = VAConfigAttribDecSliceMode; + attrib[1].value = VA_DEC_SLICE_MODE_NORMAL; + if (mModularMode) { + attrib[1].value = VA_DEC_SLICE_MODE_SUBSAMPLE; + } + + vaStatus = vaCreateConfig( + mVADisplay, + profile, + VAEntrypointVLD, + &attrib[0], + 2, + config); + CHECK_VA_STATUS("vaCreateConfig"); + + return DECODE_SUCCESS; +} diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h index 6378243..458196e 100644 --- a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h @@ -41,15 +41,37 @@ protected: virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data); virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data); virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data); - Decode_Status parseSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config); + Decode_Status parseClassicSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + Decode_Status parseModularSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data); + Decode_Status updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf); virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex); private: + Decode_Status processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data); + Decode_Status processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data); int32_t mIsEncryptData; int32_t mFrameSize; uint8_t* mFrameData; uint8_t* mClearData; + uint8_t* mCachedHeader; int32_t mFrameIdx; + int32_t mModularMode; + + enum { + MAX_SLICE_HEADER_NUM = 256, + }; + int32_t mSliceNum; + // Information of Slices in the Modular DRM Mode + struct SliceInfo { + uint8_t sliceHeaderByte; // first byte of the slice header + uint32_t sliceStartOffset; // offset of Slice unit in the firewalled buffer + uint32_t sliceByteOffset; // extra offset from the blockAligned slice offset + uint32_t sliceSize; // block aligned length of slice unit + uint32_t sliceLength; // actual size of the slice + }; + + SliceInfo mSliceInfo[MAX_SLICE_HEADER_NUM]; }; #endif -- cgit v1.2.3 From 2be7dd13ac4b446e3cb1644915e06af99d717885 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Fri, 28 Mar 2014 16:13:58 +0800 Subject: Reset the max kf dist as 32. BZ: 183561 Reset the max kf dist as 32. It should be the multiple of 4. It can fix wrong ref frame issue for Sand Drop. Change-Id: Ie61a778436eb850d848bcf4ad3a880ac08870cb8 Signed-off-by: bolunliu --- videoencoder/VideoEncoderVP8.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 3db3435..168b60e 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -21,7 +21,7 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoParamsVP8.num_token_partitions = 4; mVideoParamsVP8.kf_auto = 1; mVideoParamsVP8.kf_min_dist = 0; - mVideoParamsVP8.kf_max_dist = 30; + mVideoParamsVP8.kf_max_dist = 32; mVideoParamsVP8.min_qp = 4; mVideoParamsVP8.max_qp = 63; mVideoParamsVP8.init_qp = 26; -- cgit v1.2.3 From 3563210d7a68a8e94b16c9e019f339c6bddd1886 Mon Sep 17 00:00:00 2001 From: Yuanjun Huang Date: Wed, 26 Mar 2014 15:20:26 +0800 Subject: [libmix parser] Multi-thread parsing implementation. BZ: 183804 1. Refactor h.264 parser to resolve logical and data structure dependency for multithreading and to improve PnP. 2. Two multithread parsing schemes were implemented: bundle input and sequential input. Default to bundle scheme. Change-Id: I851419e079c9e0dd461337fd2be6627c88e0a294 Signed-off-by: Yuanjun Huang --- mixvbp/vbp_manager/Android.mk | 26 +- mixvbp/vbp_manager/include/viddec_parser_ops.h | 8 + mixvbp/vbp_manager/vbp_h264_parser.c | 42 +- mixvbp/vbp_manager/vbp_mp42_parser.c | 5 + mixvbp/vbp_manager/vbp_thread.c | 634 +++++++++++++++++++++ mixvbp/vbp_manager/vbp_thread.h | 51 ++ mixvbp/vbp_manager/vbp_utils.c | 265 +++++++-- mixvbp/vbp_manager/vbp_utils.h | 17 + mixvbp/vbp_manager/vbp_vc1_parser.c | 4 + mixvbp/vbp_manager/vbp_vp8_parser.c | 4 + mixvbp/vbp_plugin/h264/h264parse.c | 273 +++++++-- mixvbp/vbp_plugin/h264/h264parse_pps.c | 4 + mixvbp/vbp_plugin/h264/h264parse_sh.c | 485 ++++++++++++++-- mixvbp/vbp_plugin/h264/include/h264.h | 118 ++-- mixvbp/vbp_plugin/h264/include/h264parse.h | 10 +- mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c | 5 +- .../secvideo/baytrail/viddec_h264secure_parse.c | 5 +- .../secvideo/merrifield/viddec_h264secure_parse.c | 5 +- mixvbp/vbp_plugin/h264/viddec_h264_parse.c | 185 +++++- 19 files changed, 1961 insertions(+), 185 deletions(-) create mode 100644 mixvbp/vbp_manager/vbp_thread.c create mode 100644 mixvbp/vbp_manager/vbp_thread.h diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk index a45fe7e..d861197 100755 --- a/mixvbp/vbp_manager/Android.mk +++ b/mixvbp/vbp_manager/Android.mk @@ -2,19 +2,23 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -#MIXVBP_LOG_ENABLE := true - -LOCAL_SRC_FILES := \ - vbp_h264_parser.c \ - vbp_vc1_parser.c \ - vbp_loader.c \ - vbp_mp42_parser.c \ - vbp_utils.c \ - viddec_parse_sc.c \ - viddec_pm_parser_ops.c \ - viddec_pm_utils_bstream.c \ +ifeq (true,$(strip $(PRODUCT_PACKAGE_DEBUG))) +MIXVBP_LOG_ENABLE := true +endif + +LOCAL_SRC_FILES := \ + vbp_h264_parser.c \ + vbp_vc1_parser.c \ + vbp_loader.c \ + vbp_mp42_parser.c \ + vbp_utils.c \ + viddec_parse_sc.c \ + viddec_pm_parser_ops.c \ + viddec_pm_utils_bstream.c \ + vbp_thread.c LOCAL_CFLAGS := -DVBP -DHOST_ONLY +LOCAL_CFLAGS += -DUSE_MULTI_THREADING LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/include \ diff --git a/mixvbp/vbp_manager/include/viddec_parser_ops.h b/mixvbp/vbp_manager/include/viddec_parser_ops.h index 77054b5..533b231 100755 --- a/mixvbp/vbp_manager/include/viddec_parser_ops.h +++ b/mixvbp/vbp_manager/include/viddec_parser_ops.h @@ -30,6 +30,10 @@ typedef void (*fn_flush_parser) (void *parent, void *ctxt); typedef uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size); #endif +typedef uint32_t (*fn_is_payload_start)(void *parent); +typedef uint32_t (*fn_parse_syntax_threading) (void *parent, void *ctxt, uint32_t thread_index); +typedef uint32_t (*fn_post_parse_threading) (void *parent, void *ctxt, uint32_t slice_index); +typedef uint32_t (*fn_query_thread_parsing_cap) (void); typedef struct { @@ -45,6 +49,10 @@ typedef struct #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) fn_update_data update_data; #endif + fn_is_payload_start is_payload_start; + fn_parse_syntax_threading parse_syntax_threading; + fn_post_parse_threading post_parse_threading; + fn_query_thread_parsing_cap query_thread_parsing_cap; } viddec_parser_ops_t; diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c index dd93ea7..9c75519 100755 --- a/mixvbp/vbp_manager/vbp_h264_parser.c +++ b/mixvbp/vbp_manager/vbp_h264_parser.c @@ -26,6 +26,7 @@ #include #include "h264.h" +#include "h264parse.h" #include "vbp_loader.h" #include "vbp_utils.h" #include "vbp_h264_parser.h" @@ -185,6 +186,33 @@ uint32 vbp_init_parser_entries_h264(vbp_context *pcontext) return VBP_LOAD; } + pcontext->parser_ops->is_payload_start = dlsym(pcontext->fd_parser, "viddec_h264_payload_start"); + if (NULL == pcontext->parser_ops->is_payload_start) + { + ETRACE ("Failed to set entry point." ); + } + + pcontext->parser_ops->parse_syntax_threading = dlsym(pcontext->fd_parser, "viddec_h264_threading_parse"); + if (NULL == pcontext->parser_ops->parse_syntax_threading) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->post_parse_threading = dlsym(pcontext->fd_parser, "viddec_h264_post_parse"); + if (NULL == pcontext->parser_ops->post_parse_threading) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + + pcontext->parser_ops->query_thread_parsing_cap = dlsym(pcontext->fd_parser, "viddec_h264_query_thread_parsing_cap"); + if (NULL == pcontext->parser_ops->query_thread_parsing_cap) + { + ETRACE ("Failed to set entry point." ); + return VBP_LOAD; + } + /* entry point not needed */ pcontext->parser_ops->is_frame_start = NULL; return VBP_OK; @@ -1022,6 +1050,7 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) { /* partial frame */ query_data->num_pictures = 1; + WTRACE("partial frame found."); } if (query_data->num_pictures > MAX_NUM_PICTURES) @@ -1041,7 +1070,7 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms = pic_data->pic_parms; // relax this condition to support partial frame parsing - + // TODO: Is partial frame needed to support?? //if (parser->info.SliceHeader.first_mb_in_slice == 0) { /** @@ -1131,6 +1160,8 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag; pic_parms->frame_num = parser->info.SliceHeader.frame_num; + + } @@ -1161,7 +1192,6 @@ static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index) pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active - 1; } #endif - return VBP_OK; } @@ -1682,11 +1712,11 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_SPS: - ITRACE("SPS header is parsed."); + VTRACE("SPS header is parsed."); break; case h264_NAL_UNIT_TYPE_PPS: - ITRACE("PPS header is parsed."); + VTRACE("PPS header is parsed."); break; case h264_NAL_UNIT_TYPE_Acc_unit_delimiter: @@ -1694,11 +1724,11 @@ uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i) break; case h264_NAL_UNIT_TYPE_EOSeq: - ITRACE("EOSeq is parsed."); + VTRACE("EOSeq is parsed."); break; case h264_NAL_UNIT_TYPE_EOstream: - ITRACE("EOStream is parsed"); + VTRACE("EOStream is parsed"); break; default: diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c index 9b4c63f..b954b38 100755 --- a/mixvbp/vbp_manager/vbp_mp42_parser.c +++ b/mixvbp/vbp_manager/vbp_mp42_parser.c @@ -124,6 +124,11 @@ uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext) /* entry point not needed */ pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_payload_start = NULL; + pcontext->parser_ops->parse_syntax_threading = NULL; + pcontext->parser_ops->post_parse_threading = NULL; + pcontext->parser_ops->query_thread_parsing_cap = NULL; + return VBP_OK; } diff --git a/mixvbp/vbp_manager/vbp_thread.c b/mixvbp/vbp_manager/vbp_thread.c new file mode 100644 index 0000000..3a2aa09 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_thread.c @@ -0,0 +1,634 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ +#include +#include +#include + +#include "vbp_thread.h" +#include "vbp_loader.h" + +/* consider a qual core with hyper thread */ +#define MAX_AUTO_THREADS 8 + +#define THREADING_SCHEME_BUNDLE + +typedef long long int nsecs_t; + +static nsecs_t systemTime() +{ + struct timeval t; + gettimeofday(&t, NULL); + return 1000000 * t.tv_sec + t.tv_usec; +} + + +typedef struct PerThreadContext { + pthread_t thread; + + int32_t index; // thread index referenced by thread itself when needed. + int32_t thread_init; + struct ThreadContext* parent; + + pthread_cond_t input_cond; // Used to wait for a new packet from the main thread. + pthread_cond_t progress_cond; // Used by child threads to wait for progress to change. + pthread_cond_t output_cond; // Used by the main thread to wait for frames to finish. + + pthread_mutex_t mutex; // Mutex used to protect the contents of the PerThreadContext. + pthread_mutex_t progress_mutex; // Mutex used to protect frame progress values and progress_cond. + + vbp_context* vbpctx; + viddec_pm_cxt_t* pmctx; // Working parser context + viddec_pm_cxt_t* input_pmctx; // Input parser context + void* codec_data; // Points to specific codec data that holds output, all threads share + // one instance + uint32_t start_item; // start of parsing item num for bundle parsing + + enum { + STATE_INPUT_WAIT, + STATE_WORKING, + STATE_EXIT + } state; + +} PerThreadContext; + +typedef struct ThreadContext { + PerThreadContext* threads[MAX_AUTO_THREADS]; // The contexts for each thread. + PerThreadContext* prev_thread; // The last thread submit_packet() was called on. + int delaying; // Set for the first N packets, where N is the number of threads. + // While it is set, vbp_thread_parse_syntax won't return any results + + uint32_t next_finished; // The next thread count to return output from. + uint32_t next_parsing; // The next thread count to submit input packet to. + + uint32_t active_thread_count; // num of thread need to be warmed up + + sem_t finish_sem; // semaphore of finish work to synchronize working thread and main thread + uint32_t start_item_to_parse; + uint32_t last_item_to_parse; + +} ThreadContext; + + +int32_t get_cpu_count() +{ + int32_t cpu_num; +#if defined(_SC_NPROC_ONLN) + cpu_num = sysconf(_SC_NPROC_ONLN); +#elif defined(_SC_NPROCESSORS_ONLN) + cpu_num = sysconf(_SC_NPROCESSORS_ONLN); +#endif + return cpu_num; +} + + +void set_thread_affinity_mask(cpu_set_t mask) +{ + int err, syscallres; + pid_t pid = gettid(); + syscallres = syscall(__NR_sched_setaffinity, pid, sizeof(mask), &mask); + if (syscallres) + { + ETRACE("Error in the syscall setaffinity."); + } +} + + +static void vbp_update_parser_for_item(viddec_pm_cxt_t *cxt, + viddec_pm_cxt_t *src_cxt, + uint32 item) +{ + + /* set up bitstream buffer */ + cxt->getbits.list = src_cxt->getbits.list; + + /* setup buffer pointer */ + cxt->getbits.bstrm_buf.buf = src_cxt->getbits.bstrm_buf.buf; + + + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = src_cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_st = src_cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_end = src_cxt->list.data[item].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = src_cxt->list.data[item].stpos; + cxt->list.end_offset = src_cxt->list.data[item].edpos; + cxt->list.total_bytes = src_cxt->list.data[item].edpos - src_cxt->list.data[item].stpos; + +} + + + +static void* parser_worker_thread(void* arg) +{ + PerThreadContext* p = arg; + ThreadContext* t_cxt = p->parent; + vbp_context* vbpctx = p->vbpctx; + viddec_pm_cxt_t* pm_cxt = p->pmctx; + viddec_parser_ops_t* ops = vbpctx->parser_ops; + + +// probably not to make each parsing thread have affinity to a cpu core +// having cpus fully occupied will even lead to low performance +// current experimental solution: just make main thread have affinity +#if 0 + cpu_set_t mask; + CPU_ZERO(&mask); + CPU_SET(p->index, &mask); // cpu affinity is set to same num as thread index + set_thread_affinity_mask(mask); +#endif + + pthread_mutex_lock(&p->mutex); + + nsecs_t t0; + while (1) { + while (p->state == STATE_INPUT_WAIT) { + pthread_cond_wait(&p->input_cond, &p->mutex); + } + + if (p->state == STATE_WORKING) { + //now we get input data, call actual parse. + //t0 = systemTime(); + sleep(0); + ops->parse_syntax_threading((void *)p->pmctx, p->codec_data, p->index); + + pthread_mutex_lock(&p->progress_mutex); + p->state = STATE_INPUT_WAIT; + + pthread_cond_broadcast(&p->progress_cond); + pthread_cond_signal(&p->output_cond); + pthread_mutex_unlock(&p->progress_mutex); + } else if (p->state == STATE_EXIT) { + break; + } + } + pthread_mutex_unlock(&p->mutex); + pthread_exit(NULL); + return NULL; +} + +static void* parser_worker_thread_bundle(void* arg) +{ + PerThreadContext* p = arg; + ThreadContext* t_cxt = p->parent; + vbp_context* vbpctx = p->vbpctx; + viddec_parser_ops_t* ops = vbpctx->parser_ops; + +// probably not to make each parsing thread have affinity to a cpu core +// having cpus fully occupied will even lead to low performance +// current experimental solution: just make main thread have affinity +#if 1 + cpu_set_t mask; + CPU_ZERO(&mask); + CPU_SET(p->index, &mask); // cpu affinity is set to same num as thread index + set_thread_affinity_mask(mask); +#endif + + pthread_mutex_lock(&p->mutex); + + nsecs_t t0; + while (1) { + while (p->state == STATE_INPUT_WAIT) { + pthread_cond_wait(&p->input_cond, &p->mutex); + } + + if (p->state == STATE_WORKING) { + uint32_t working_item = p->start_item; // start point + uint32_t slice_index = 0 + p->index; // start point + + while (working_item <= t_cxt->last_item_to_parse) { + vbp_update_parser_for_item(p->pmctx, p->input_pmctx, working_item); + ops->parse_syntax_threading((void *)p->pmctx, p->codec_data, slice_index); + + working_item += t_cxt->active_thread_count; + slice_index += t_cxt->active_thread_count; + } + + pthread_mutex_lock(&p->progress_mutex); + p->state = STATE_INPUT_WAIT; + + pthread_cond_broadcast(&p->progress_cond); + pthread_mutex_unlock(&p->progress_mutex); + } else if (p->state == STATE_EXIT) { + break; + } + } + pthread_mutex_unlock(&p->mutex); + pthread_exit(NULL); + return NULL; +} + + +uint32_t update_context_from_input(viddec_pm_cxt_t* dest, + viddec_pm_cxt_t* source) +{ + if ((dest == NULL) || (source == NULL) || (dest == source)) { + ETRACE("%s error", __func__); + return 1; + } + /* set up bitstream buffer */ + dest->getbits.list = source->getbits.list; + + /* buffer pointer */ + dest->getbits.bstrm_buf.buf = source->getbits.bstrm_buf.buf; + + /* bitstream parser */ + dest->getbits.bstrm_buf.buf_index = source->getbits.bstrm_buf.buf_index; + dest->getbits.bstrm_buf.buf_st = source->getbits.bstrm_buf.buf_st; + dest->getbits.bstrm_buf.buf_end = source->getbits.bstrm_buf.buf_end; + + /* It is possible to end up with buf_offset not equal zero. */ + dest->getbits.bstrm_buf.buf_bitoff = 0; + dest->getbits.au_pos = 0; + dest->getbits.list_off = 0; + dest->getbits.phase = 0; + dest->getbits.emulation_byte_counter = 0; + + dest->list.start_offset = source->list.start_offset; + dest->list.end_offset = source->list.end_offset; + dest->list.total_bytes = source->list.total_bytes; + return 0; +} + +uint32_t update_context_to_output(viddec_pm_cxt_t* dest, + viddec_pm_cxt_t* source) +{ + if ((dest == NULL) || (source == NULL) || (dest == source)) { + ETRACE("%s error", __func__); + return 1; + } + + /* bitstream parser */ + dest->getbits.bstrm_buf.buf_index = source->getbits.bstrm_buf.buf_index; + dest->getbits.bstrm_buf.buf_st = source->getbits.bstrm_buf.buf_st; + dest->getbits.bstrm_buf.buf_end = source->getbits.bstrm_buf.buf_end; + + /* It is possible to end up with buf_offset not equal zero. */ + dest->getbits.bstrm_buf.buf_bitoff = source->getbits.bstrm_buf.buf_bitoff; + dest->getbits.au_pos = source->getbits.au_pos; + dest->getbits.list_off = source->getbits.list_off; + dest->getbits.phase = source->getbits.phase; + dest->getbits.emulation_byte_counter = source->getbits.emulation_byte_counter; + dest->getbits.is_emul_reqd = source->getbits.is_emul_reqd; + + dest->list.start_offset = source->list.start_offset; + dest->list.end_offset = source->list.end_offset; + dest->list.total_bytes = source->list.total_bytes; + + return 0; +} + + + +uint32_t feed_thread_input(PerThreadContext* p, void* parent) +{ + ThreadContext* t_context = p->parent; + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + + //nsecs_t t0 = systemTime(); + if (pm_cxt->getbits.bstrm_buf.buf == NULL) { + return 1; + } + + pthread_mutex_lock(&p->mutex); + + if (p->state == STATE_WORKING) { + pthread_mutex_lock(&p->progress_mutex); + while (p->state == STATE_WORKING) { + pthread_cond_wait(&p->progress_cond, &p->progress_mutex); + } + pthread_mutex_unlock(&p->progress_mutex); + } + + /* Now update the input to the working thread*/ + update_context_from_input(p->pmctx, pm_cxt); + p->codec_data = (void*)&(pm_cxt->codec_data[0]); + + p->state = STATE_WORKING; + t_context->next_parsing++; + + //t0 = systemTime(); + pthread_cond_signal(&p->input_cond); + pthread_mutex_unlock(&p->mutex); + + return 0; +} + +void vbp_thread_init(vbp_context* pcontext) +{ + int i; + ThreadContext* t_context = NULL; + int32_t thread_count = pcontext->thread_count; + int32_t err = 0; + +#ifdef THREADING_SCHEME_BUNDLE + ITRACE("%s, threading_parse_scheme set to SCHEME_BUNDLE", __func__); + pcontext->threading_parse_scheme = SCHEME_BUNDLE; +#else + ITRACE("%s, threading_parse_scheme set to SCHEME_SEQUENTIAL", __func__); + pcontext->threading_parse_scheme = SCHEME_SEQUENTIAL; +#endif + + if (thread_count == 0) { + int32_t cpu_num = get_cpu_count(); + if (cpu_num > 1) { + if (pcontext->threading_parse_scheme == SCHEME_BUNDLE) { + thread_count = pcontext->thread_count = cpu_num - 1; + } else { + thread_count = pcontext->thread_count = cpu_num - 1; + } + } + else { + thread_count = pcontext->thread_count = 1; + } + } + + pcontext->thread_opaque = t_context = + (ThreadContext*)malloc(sizeof(ThreadContext)); + if (t_context != NULL) { + t_context->active_thread_count = thread_count; //default active count + + t_context->delaying = 1; + t_context->next_parsing = t_context->next_finished = 0; + + ITRACE("%s, creating %d parsing thread.", __func__, thread_count); + for (i = 0; i < thread_count; i++) { + t_context->threads[i] = (PerThreadContext*)malloc(sizeof(PerThreadContext)); + assert(t_context->threads[i] != NULL); + PerThreadContext* p = t_context->threads[i]; + + if (p != NULL) { + p->index = i; + p->parent = t_context; + p->vbpctx = pcontext; + p->pmctx = vbp_malloc(viddec_pm_cxt_t, 1); + viddec_pm_utils_bstream_init(&(p->pmctx->getbits), NULL, 0); + + pthread_mutex_init(&p->mutex, NULL); + pthread_mutex_init(&p->progress_mutex, NULL); + pthread_cond_init(&p->input_cond, NULL); + pthread_cond_init(&p->progress_cond, NULL); + pthread_cond_init(&p->output_cond, NULL); + + p->state = STATE_INPUT_WAIT; + + if(pcontext->threading_parse_scheme == SCHEME_SEQUENTIAL) { + err = pthread_create(&p->thread, NULL, parser_worker_thread, p); + } else { + err = pthread_create(&p->thread, NULL, parser_worker_thread_bundle, p); + } + + p->thread_init = !err; + } + } + } +#if 1 + ITRACE("%s, set_thread_affinity_mask", __func__); + cpu_set_t mask; + CPU_ZERO(&mask); + CPU_SET(3, &mask); // 0~thread_count-1 cpus was set to each sub thread, + // last cpu is set to main thread + set_thread_affinity_mask(mask); +#endif +} + + +void vbp_thread_free(vbp_context* pcontext) +{ + ITRACE("%s", __func__); + ThreadContext* t_context = pcontext->thread_opaque; + int i; + int thread_count = pcontext->thread_count; + + for (i = 0; i < thread_count; i++) { + PerThreadContext *p = t_context->threads[i]; + + pthread_mutex_lock(&p->mutex); + p->state = STATE_EXIT; + pthread_cond_signal(&p->input_cond); + pthread_mutex_unlock(&p->mutex); + + if (p->thread_init) { + pthread_join(p->thread, NULL); + } + p->thread_init = 0; + } + + for (i = 0; i < thread_count; i++) { + PerThreadContext *p = t_context->threads[i]; + + pthread_mutex_destroy(&p->mutex); + pthread_mutex_destroy(&p->progress_mutex); + pthread_cond_destroy(&p->input_cond); + pthread_cond_destroy(&p->progress_cond); + pthread_cond_destroy(&p->output_cond); + + if (p->pmctx != NULL) { + free(p->pmctx); + } + + free(p); + p = NULL; + } + + free(t_context); +} + +/* + * Entry function of multi-thread parsing + * + * parent - A viddec_pm_cxt_t type parser management context, + * which contains input stream. + * ctxt - Codec specific parser context, actually codec_data[] in + * viddec_pm_cxt_t, Used for storing parsed output + * return - 0 indicates no output is gotten, just warm up the threads + * 1 indicates there is output + * + * see viddec_parser_ops.h + * uint32_t (*fn_parse_syntax) (void *parent, void *ctxt); + */ +uint32_t vbp_thread_parse_syntax(void* parent, + void* ctxt, + vbp_context* pcontext) +{ + ThreadContext* t_context = pcontext->thread_opaque; + uint32_t finished = t_context->next_finished; + + if ((parent == NULL) || (ctxt == NULL)) { + return 0; + } + + PerThreadContext* p; + + nsecs_t t0,t1; + //t0 = t1 = systemTime(); + + /* Submit an input packet to the next parser thread*/ + p = t_context->threads[t_context->next_parsing]; + feed_thread_input(p, parent); + + //p->state = STATE_WORKING; + //t_context->next_parsing++; + + //t0 = systemTime(); + //pthread_cond_signal(&p->input_cond); + + //t0 = systemTime(); + + if ((t_context->delaying == 1) && + (t_context->next_parsing > (t_context->active_thread_count - 1))) { + t_context->delaying = 0; + } + + /* If we are still in early stage that warming up each thread, indicate we got no output*/ + if (t_context->delaying == 1) { + return 0; + } + + /* return available parsed frame from the oldest thread + * notice that we start getting output from thread[0] after just submitting input + * to thread[active_count-1] + * */ + p = t_context->threads[finished++]; + + if (p->state != STATE_INPUT_WAIT) { + pthread_mutex_lock(&p->progress_mutex); + while (p->state != STATE_INPUT_WAIT) { + pthread_cond_wait(&p->output_cond, &p->progress_mutex); + } + pthread_mutex_unlock(&p->progress_mutex); + } + + + if (finished > (t_context->active_thread_count - 1)) { + finished = 0; + } + + if (t_context->next_parsing >= t_context->active_thread_count) { + t_context->next_parsing = 0; + } + + t_context->next_finished = finished; + + update_context_to_output((viddec_pm_cxt_t*) parent, p->pmctx); + + return 1; +} + + +/* + * Entry function of multi-thread parsing + * + * parent - A viddec_pm_cxt_t type parser management context, + * which contains input stream. + * ctxt - Codec specific parser context, actually codec_data[] in + * viddec_pm_cxt_t, Used for storing parsed output + * start_item - num of start item passed to trigger multithread parsing + * + */ +uint32_t vbp_thread_parse_syntax_bundle(void* parent, + void* ctxt, + vbp_context* pcontext, + uint32_t start_item) +{ + ThreadContext* t_context = pcontext->thread_opaque; + if ((parent == NULL) || (ctxt == NULL)) { + return 0; + } + + PerThreadContext* p = NULL; + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + t_context->start_item_to_parse = start_item; + t_context->last_item_to_parse = pm_cxt->list.num_items - 1; + + sem_init(&(t_context->finish_sem),0,0); + + uint32_t i; + for (i = 0; i < t_context->active_thread_count; i++) { + p = t_context->threads[i]; + p->start_item = start_item + i; + + if (p->state == STATE_WORKING) { + pthread_mutex_lock(&p->progress_mutex); + while (p->state == STATE_WORKING) { + pthread_cond_wait(&p->progress_cond, &p->progress_mutex); + } + pthread_mutex_unlock(&p->progress_mutex); + } + + p->codec_data = (void*)&(pm_cxt->codec_data[0]); + p->input_pmctx = pm_cxt; + + p->state = STATE_WORKING; + + pthread_cond_signal(&p->input_cond); + pthread_mutex_unlock(&p->mutex); + + } + return 1; +} + + +/* + * set active threads num since not all threads need to be warmed up + * when a frame has fewer slice num than threads we created. + * + * active_count - threads num to be activated. + */ +uint32_t vbp_thread_set_active(vbp_context* pcontext, + uint32_t active_count) +{ + ThreadContext* t_context = pcontext->thread_opaque; + + if (t_context != NULL) { + if (active_count < pcontext->thread_count) { + t_context->active_thread_count = active_count; + } else { //reset to the default + t_context->active_thread_count = pcontext->thread_count; + } + + //reset to the default + t_context->delaying = 1; + t_context->next_parsing = t_context->next_finished = 0; + } + return 0; +} + +uint32_t vbp_thread_get_active(vbp_context* pcontext) +{ + ThreadContext* t_context = pcontext->thread_opaque; + + if (t_context != NULL) { + return t_context->active_thread_count; + } + return 0; +} + + diff --git a/mixvbp/vbp_manager/vbp_thread.h b/mixvbp/vbp_manager/vbp_thread.h new file mode 100644 index 0000000..e182ac1 --- /dev/null +++ b/mixvbp/vbp_manager/vbp_thread.h @@ -0,0 +1,51 @@ +/* INTEL CONFIDENTIAL +* Copyright (c) 2013 Intel Corporation. All rights reserved. +* +* The source code contained or described herein and all documents +* related to the source code ("Material") are owned by Intel +* Corporation or its suppliers or licensors. Title to the +* Material remains with Intel Corporation or its suppliers and +* licensors. The Material contains trade secrets and proprietary +* and confidential information of Intel or its suppliers and +* licensors. The Material is protected by worldwide copyright and +* trade secret laws and treaty provisions. No part of the Material +* may be used, copied, reproduced, modified, published, uploaded, +* posted, transmitted, distributed, or disclosed in any way without +* Intel's prior express written permission. +* +* No license under any patent, copyright, trade secret or other +* intellectual property right is granted to or conferred upon you +* by disclosure or delivery of the Materials, either expressly, by +* implication, inducement, estoppel or otherwise. Any license +* under such intellectual property rights must be express and +* approved by Intel in writing. +* +*/ + +#ifndef VBP_THREAD_H +#define VBP_THREAD_H + +#define _GNU_SOURCE +#include +#include + +#include +#include "vbp_utils.h" +#include "include/viddec_pm.h" +#include + + +void vbp_thread_init(vbp_context *pcontext); + +void vbp_thread_free(vbp_context *pcontext); + +uint32_t vbp_thread_parse_syntax(void* parent, + void* ctxt, + vbp_context* pcontext); + +uint32_t vbp_thread_set_active(vbp_context* pcontext, + uint32_t active_count); + +uint32_t vbp_thread_get_active(vbp_context* pcontext); + +#endif diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index 1647269..f0cb94b 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -39,6 +39,21 @@ #include "vbp_h264secure_parser.h" #endif +#ifdef USE_MULTI_THREADING +#include "vbp_thread.h" +#endif + +#define LEAST_SLICES_MULTI_THREADING 10 + +typedef long long int nsecs_t; + +static nsecs_t systemTime() +{ + struct timeval t; + gettimeofday(&t, NULL); + return 1000000 * t.tv_sec + t.tv_usec; +} + /* buffer counter */ uint32 buffer_counter = 0; @@ -303,12 +318,36 @@ cleanup: } +static void vbp_setup_parser_for_item(viddec_pm_cxt_t *cxt, uint32 item) +{ + /* setup bitstream parser */ + cxt->getbits.bstrm_buf.buf_index = cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_st = cxt->list.data[item].stpos; + cxt->getbits.bstrm_buf.buf_end = cxt->list.data[item].edpos; + + /* It is possible to end up with buf_offset not equal zero. */ + cxt->getbits.bstrm_buf.buf_bitoff = 0; + + cxt->getbits.au_pos = 0; + cxt->getbits.list_off = 0; + cxt->getbits.phase = 0; + cxt->getbits.emulation_byte_counter = 0; + + cxt->list.start_offset = cxt->list.data[item].stpos; + cxt->list.end_offset = cxt->list.data[item].edpos; + cxt->list.total_bytes = cxt->list.data[item].edpos - cxt->list.data[item].stpos; + +} + /** * * parse the elementary sample buffer or codec configuration data * */ +//static uint32 frame_num = 0; +//static nsecs_t total_time_of_multislice = 0; +//static uint32 frame_multislice_num = 0; static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag) { viddec_pm_cxt_t *cxt = pcontext->parser_cxt; @@ -339,6 +378,7 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f } */ + uint32_t multi_parse_done = 0; /* populate the list.*/ if (init_data_flag) @@ -364,48 +404,176 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f // TODO: check if cxt->getbits.is_emul_reqd is set properly - for (i = 0; i < cxt->list.num_items; i++) - { - /* setup bitstream parser */ - cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos; - cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos; - cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos; - - /* It is possible to end up with buf_offset not equal zero. */ - cxt->getbits.bstrm_buf.buf_bitoff = 0; - - cxt->getbits.au_pos = 0; - cxt->getbits.list_off = 0; - cxt->getbits.phase = 0; - cxt->getbits.emulation_byte_counter = 0; - - cxt->list.start_offset = cxt->list.data[i].stpos; - cxt->list.end_offset = cxt->list.data[i].edpos; - cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos; - - /* invoke parse entry point to parse the buffer */ - error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); - - /* can't return error for now. Neet further investigation */ - if (0 != error) - { - VTRACE("failed to parse the syntax: %d!", error); + //frame_num ++; + + nsecs_t t0, t1, t2, tt0, tt1, tt2; + t0 = t1 = t2 = tt0 = tt1 = tt2 = 0; + //t0 = systemTime(); + + if (0 == pcontext->is_multithread_parsing_enabled) { + for (i = 0; i < cxt->list.num_items; i++) { + vbp_setup_parser_for_item(cxt, i); + /* invoke parse entry point to parse the buffer */ + //t1 = systemTime(); + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + //t2 = systemTime(); + //tt1 += t2 - t1; + + /* can't return error for now. Neet further investigation */ + if (0 != error) { + WTRACE("failed to parse the syntax: %d!", error); + } + + /* process parsing result */ + //t2 = systemTime(); + error = pcontext->func_process_parsing_result(pcontext, i); + //tt2 += systemTime() - t2; + + if (VBP_MULTI == error) { + ITRACE("Multiple frames are found in one bufffer."); + return VBP_OK; + } + else if (0 != error) { + ETRACE("Failed to process parsing result."); + return error; + } } - - /* process parsing result */ - error = pcontext->func_process_parsing_result(pcontext, i); - - if (VBP_MULTI == error) { - ITRACE("Multiple frames are found in one bufffer."); - return VBP_OK; + } + // Multi-threading option is enabled + else if (1 == pcontext->is_multithread_parsing_enabled) { + + int got_output = 0; + int is_payload_start = 0; + int single_parse_count = 0; + int use_thread_parsing = 0; + + for (i = 0; i < cxt->list.num_items; i++) { + + vbp_setup_parser_for_item(cxt, i); + + // we assume no configuration data following slice data in a frame's buffer + is_payload_start = ops->is_payload_start((void *)cxt); + + if (is_payload_start == 0) { + //t1 = systemTime(); + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + //tt1 += systemTime() - t1; + + //t2 = systemTime(); + error = pcontext->func_process_parsing_result(pcontext, i); + single_parse_count ++; + //tt2 += systemTime() - t2; + } else if (((cxt->list.num_items - single_parse_count) < LEAST_SLICES_MULTI_THREADING)) { + //t1 = systemTime(); + error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0])); + //tt1 += systemTime() - t1; + + //t2 = systemTime(); + error = pcontext->func_process_parsing_result(pcontext, i); + //tt2 += systemTime() - t2; + } else { + use_thread_parsing = 1; + break; + } + + if (VBP_MULTI == error) { + ITRACE("Multiple frames are found in one bufffer."); + return VBP_OK; + } + else if (0 != error) { + ETRACE("Failed to process parsing result."); + return error; + } } - else if (0 != error) - { - ETRACE("Failed to process parsing result."); - return error; + + if (use_thread_parsing) { + vbp_thread_set_active(pcontext, cxt->list.num_items - single_parse_count); + uint32_t thread_count = vbp_thread_get_active(pcontext); + + //t1 = systemTime(); + if (pcontext->threading_parse_scheme == SCHEME_BUNDLE) { + // Multithread parsing Scheme-Bundle-Input + // This interface push threads to parse all slice header without interrupt. + vbp_thread_parse_syntax_bundle((void *)cxt, + (void *)&(cxt->codec_data[0]), + pcontext, + i); //first slice's item num + + uint32_t j; + for (j = i; j < cxt->list.num_items; j++) { + error = ops->post_parse_threading((void *)cxt, + (void *)&(cxt->codec_data[0]), + j-single_parse_count); // slice index + error = pcontext->func_process_parsing_result(pcontext, j); // item num + } + //tt1 += systemTime() - t1; + + } else if (pcontext->threading_parse_scheme == SCHEME_SEQUENTIAL) { + // Multithread parsing Scheme-Sequential-Input. + // This interface push threads to parse one slice header one time. + uint32_t j; + for (j = i; j < cxt->list.num_items; j++) { + vbp_setup_parser_for_item(cxt, j); + + //t1 = systemTime(); + got_output = vbp_thread_parse_syntax((void *)cxt, + (void *)&(cxt->codec_data[0]), + pcontext); + //tt1 += systemTime() - t1; + + if (got_output == 1) { + //t2 = systemTime(); + error = ops->post_parse_threading((void *)cxt, + (void *)&(cxt->codec_data[0]), + //slice count with thread delay + (j-(thread_count-1)-single_parse_count) % thread_count); + + error = pcontext->func_process_parsing_result(pcontext, + // item count with thread delay + j-(thread_count-1)); + + multi_parse_done ++; + //tt2 += systemTime() - t2; + } + } + + int need_to_clearance = thread_count -1; + cxt->getbits.bstrm_buf.buf = NULL; + for (i = cxt->list.num_items - need_to_clearance; i < cxt->list.num_items; i++) { + //t1 = systemTime(); + got_output = vbp_thread_parse_syntax((void *)cxt, + (void *)&(cxt->codec_data[0]), + pcontext); + //&got_output); + //tt1 += systemTime() - t1; + + if (got_output == 1) { + //t2 = systemTime(); + error = ops->post_parse_threading((void *)cxt, + (void *)&(cxt->codec_data[0]), + (i-single_parse_count) % thread_count); + + error = pcontext->func_process_parsing_result(pcontext, i); + multi_parse_done ++; + //tt2 += systemTime() - t2; + } + } + } } } - +#if 0 + tt0 = systemTime() - t0; + if (cxt->list.num_items > 8) { + total_time_of_multislice += tt0; + frame_multislice_num ++; + ETRACE("### ================== TIME CALCULATION ======================="); + ETRACE("### ------------item num: %d", cxt->list.num_items); + ETRACE("### ------------The frame[%d] cost time: %lld us", frame_num-1, tt0); + ETRACE("### ------------Accumulated multi-slice frames: %d", frame_multislice_num); + ETRACE("### ------------Accumulated average time that multislice frame cost: %lld us", total_time_of_multislice/frame_multislice_num); + ETRACE("### ================== TIME CALCULATION END ==================="); + } +#endif return VBP_OK; } @@ -463,6 +631,21 @@ uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext) *ppcontext = pcontext; error = VBP_OK; + + /* default is not enabled */ + pcontext->is_multithread_parsing_enabled = 0; + +#if (!defined USE_AVC_SHORT_FORMAT && !defined USE_SLICE_HEADER_PARSING) +#ifdef USE_MULTI_THREADING + if (pcontext->parser_ops->query_thread_parsing_cap != NULL) { + if (pcontext->parser_ops->query_thread_parsing_cap() == 1) { + pcontext->is_multithread_parsing_enabled = 1; + ITRACE("Multi-thead parsing is enabled."); + vbp_thread_init(pcontext); + } + } +#endif +#endif cleanup: if (VBP_OK != error) @@ -483,6 +666,12 @@ cleanup: */ uint32 vbp_utils_destroy_context(vbp_context *pcontext) { +#ifdef USE_MULTI_THREADING + if (1 == pcontext->is_multithread_parsing_enabled) { + vbp_thread_free(pcontext); + } +#endif + /* entry point, not need to validate input parameters. */ vbp_utils_free_parser_memory(pcontext); vbp_utils_uninitialize_context(pcontext); diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h index 633159c..7cf9321 100755 --- a/mixvbp/vbp_manager/vbp_utils.h +++ b/mixvbp/vbp_manager/vbp_utils.h @@ -31,6 +31,9 @@ #include "viddec_pm.h" #include "vbp_trace.h" #include +#include "vbp_loader.h" + +#include #define MAGIC_NUMBER 0x0DEADBEEF #define MAX_WORKLOAD_ITEMS 1000 @@ -68,6 +71,13 @@ typedef uint32 (*function_populate_query_data)(vbp_context* cxt); typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size); #endif +typedef enum +{ + SCHEME_BUNDLE = 0, + SCHEME_SEQUENTIAL, +} threading_parse_scheme_t; + + struct vbp_context_t { /* magic number */ @@ -94,6 +104,13 @@ struct vbp_context_t /* parser type specific data*/ void *parser_private; + /* multithreading */ + uint32 thread_count; + void *thread_opaque; + uint32 is_multithread_parsing_enabled; + + threading_parse_scheme_t threading_parse_scheme; + function_init_parser_entries func_init_parser_entries; function_allocate_query_data func_allocate_query_data; function_free_query_data func_free_query_data; diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c index 12e28e9..65b6f76 100755 --- a/mixvbp/vbp_manager/vbp_vc1_parser.c +++ b/mixvbp/vbp_manager/vbp_vc1_parser.c @@ -111,6 +111,10 @@ uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext) /* entry point not needed */ pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_payload_start = NULL; + pcontext->parser_ops->parse_syntax_threading = NULL; + pcontext->parser_ops->post_parse_threading = NULL; + pcontext->parser_ops->query_thread_parsing_cap = NULL; return VBP_OK; } diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c index 73d9281..9ac097d 100755 --- a/mixvbp/vbp_manager/vbp_vp8_parser.c +++ b/mixvbp/vbp_manager/vbp_vp8_parser.c @@ -67,6 +67,10 @@ uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext) pcontext->parser_ops->is_frame_start = NULL; pcontext->parser_ops->flush = NULL; + pcontext->parser_ops->is_payload_start = NULL; + pcontext->parser_ops->parse_syntax_threading = NULL; + pcontext->parser_ops->post_parse_threading = NULL; + pcontext->parser_ops->query_thread_parsing_cap = NULL; return VBP_OK; } diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c index cbb04fe..330c5e6 100755 --- a/mixvbp/vbp_plugin/h264/h264parse.c +++ b/mixvbp/vbp_plugin/h264/h264parse.c @@ -65,7 +65,7 @@ h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOf /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ - +// keep for h264 secure parse h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader) { //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; @@ -114,14 +114,43 @@ h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader } } - pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); - pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ - (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : \ - ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag ? + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); return H264_STATUS_OK; }; //// End of h264_active_par_set + + +h264_Status h264_set_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader) +{ + uint32_t pps_addr = pInfo->PPS_PADDR_GL + + SliceHeader->pic_parameter_id * sizeof(pic_param_set); + SliceHeader->active_PPS = (pic_param_set*)pps_addr; + pic_param_set* active_PPS = SliceHeader->active_PPS; + + if (active_PPS->seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected + } + + uint32_t sps_addr = pInfo->SPS_PADDR_GL + \ + active_PPS->seq_parameter_set_id * sizeof(seq_param_set_all); + SliceHeader->active_SPS = (seq_param_set_used*)sps_addr; + seq_param_set_used* active_SPS = SliceHeader->active_SPS; + + if (active_SPS->seq_parameter_set_id >= MAX_NUM_SPS) + { + return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected + } + + return H264_STATUS_OK; +}; // End of h264_set_active_par_set + + + /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ @@ -173,38 +202,183 @@ h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_ SliceHeader->sh_error |= 4; } - } else { + } else { SliceHeader->sh_error |= 1; } + return retStatus; +} - //if(SliceHeader->sh_error) { - //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE; - //} +h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status retStatus = H264_STATUS_ERROR; + + //////////////////////////////////////////////////// + //// Parse slice header info + //// Part1: not depend on the active PPS/SPS + //// Part2/3: depend on the active parset + ////////////////////////////////////////////////// + SliceHeader->sh_error = 0; + if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK) + { + retStatus = h264_set_active_par_set(pInfo, SliceHeader); + } - ////////////////////////////////// - //// Parse slice data (MB loop) - ////////////////////////////////// - //retStatus = h264_Parse_Slice_Data(pInfo); + if (retStatus == H264_STATUS_OK) { - //uint32_t data = 0; - //if( viddec_pm_peek_bits(parent, &data, 32) == -1) - //retStatus = H264_STATUS_ERROR; + switch (SliceHeader->active_SPS->profile_idc) + { + case h264_ProfileBaseline: + case h264_ProfileMain: + case h264_ProfileExtended: + SliceHeader->active_PPS->transform_8x8_mode_flag = 0; + SliceHeader->active_PPS->pic_scaling_matrix_present_flag = 0; + SliceHeader->active_PPS->second_chroma_qp_index_offset = + SliceHeader->active_PPS->chroma_qp_index_offset; + + default: + break; + } + + if (h264_Parse_Slice_Header_2_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 2; + } + else if (h264_Parse_Slice_Header_3_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + SliceHeader->sh_error |= 4; + } + } else { + SliceHeader->sh_error |= 1; } - //h264_Parse_rbsp_trailing_bits(pInfo); return retStatus; } +h264_Status h264_Post_Parsing_Slice_Header(void *parent, h264_Info* pInfo, h264_Slice_Header_t *next_SliceHeader) +{ + + h264_Status retStatus = H264_STATUS_OK; + + memcpy(&pInfo->active_PPS, next_SliceHeader->active_PPS, sizeof(pic_param_set)); + memcpy(&pInfo->active_SPS, next_SliceHeader->active_SPS, sizeof(seq_param_set_used)); + + if ((1 == pInfo->primary_pic_type_plus_one) && (pInfo->got_start)) { + pInfo->img.recovery_point_found |= 4; + } + pInfo->primary_pic_type_plus_one = 0; + + pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \ + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : \ + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); + + pInfo->sei_information.recovery_point = 0; + + pInfo->img.current_slice_num++; + + + //////////////////////////////////////////////////////////////////////////// + // Processing if new picture coming + // 1) if it's the second field + // 2) if it's a new frame + //////////////////////////////////////////////////////////////////////////// + //AssignQuantParam(pInfo); + if (h264_is_new_picture_start(pInfo, *next_SliceHeader, pInfo->SliceHeader)) + { + // + ///----------------- New Picture.boundary detected-------------------- + // + pInfo->img.g_new_pic++; + + // + // Complete previous picture + h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old + //h264_hdr_post_poc(0, 0, use_old); + // + // Update slice structures: + h264_update_old_slice(pInfo, *next_SliceHeader); //cur->old; next->cur; + // + // 1) if resolution change: reset dpb + // 2) else: init frame store + h264_update_img_info(pInfo); //img, dpb + // + ///----------------- New frame.boundary detected-------------------- + // + pInfo->img.second_field = h264_is_second_field(pInfo); + if (pInfo->img.second_field == 0) + { + pInfo->img.g_new_frame = 1; + h264_dpb_update_queue_dangling_field(pInfo); + // + /// DPB management + /// 1) check the gaps + /// 2) assign fs for non-exist frames + /// 3) fill the gaps + /// 4) store frame into DPB if ... + // + //if(pInfo->SliceHeader.redundant_pic_cnt) + { + h264_dpb_gaps_in_frame_num_mem_management(pInfo); + } + } + // + /// Decoding POC + h264_hdr_decoding_poc (pInfo, 0, 0); + // + /// Init Frame Store for next frame + h264_dpb_init_frame_store (pInfo); + pInfo->img.current_slice_num = 1; + if (pInfo->SliceHeader.first_mb_in_slice != 0) + { + ////Come here means we have slice lost at the beginning, since no FMO support + pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17); + } + /// Emit out the New Frame + if (pInfo->img.g_new_frame) + { + h264_parse_emit_start_new_frame(parent, pInfo); + } + + h264_parse_emit_current_pic(parent, pInfo); + } + else ///////////////////////////////////////////////////// If Not a picture start + { + // + /// Update slice structures: cur->old; next->cur; + h264_update_old_slice(pInfo, *next_SliceHeader); + // + /// 1) if resolution change: reset dpb + /// 2) else: update img info + h264_update_img_info(pInfo); + } + + + ////////////////////////////////////////////////////////////// + // DPB reference list init and reordering + ////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field + h264_update_frame_type(pInfo); + +#ifndef USE_AVC_SHORT_FORMAT + h264_dpb_update_ref_lists(pInfo); +#endif + /// Emit out the current "good" slice + h264_parse_emit_current_slice(parent, pInfo); + + return retStatus; +} + /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc) +h264_Status h264_Parse_NAL_Unit(void *parent, uint8_t* nal_unit_type, uint8_t *nal_ref_idc) { h264_Status ret = H264_STATUS_ERROR; @@ -212,7 +386,7 @@ h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref if (viddec_pm_get_bits(parent, &code, 8) != -1) { *nal_ref_idc = (uint8_t)((code >> 5) & 0x3); - pInfo->nal_unit_type = (uint8_t)((code >> 0) & 0x1f); + *nal_unit_type = (uint8_t)((code >> 0) & 0x1f); ret = H264_STATUS_OK; } @@ -430,43 +604,58 @@ int32_t h264_is_second_field(h264_Info * pInfo) /* ------------------------------------------------------------------------------------------ */ /* ------------------------------------------------------------------------------------------ */ -int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice) +int32_t h264_is_new_picture_start(h264_Info * pInfo, + h264_Slice_Header_t cur_slice, + h264_Slice_Header_t old_slice) { int result = 0; - if (pInfo->number_of_first_au_info_nal_before_first_slice) - { + if (pInfo->number_of_first_au_info_nal_before_first_slice) { pInfo->number_of_first_au_info_nal_before_first_slice = 0; return 1; } - result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id); - result |= (old_slice.frame_num != cur_slice.frame_num); - result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag); - if (cur_slice.field_pic_flag && old_slice.field_pic_flag) - { - result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag); + if (old_slice.pic_parameter_id != cur_slice.pic_parameter_id) { + return 1; + } + if (old_slice.frame_num != cur_slice.frame_num) { + return 1; + } + if (old_slice.field_pic_flag != cur_slice.field_pic_flag) { + return 1; + } + if (cur_slice.field_pic_flag && old_slice.field_pic_flag) { + if (old_slice.bottom_field_flag != cur_slice.bottom_field_flag) { + return 1; + } } - result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ - ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0)); - result |= ( old_slice.idr_flag != cur_slice.idr_flag); + if ((old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \ + ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0))) { + return 1; + } + if (old_slice.idr_flag != cur_slice.idr_flag) { + return 1; + } - if (cur_slice.idr_flag && old_slice.idr_flag) - { - result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id); + if (cur_slice.idr_flag && old_slice.idr_flag) { + if (old_slice.idr_pic_id != cur_slice.idr_pic_id) { + return 1; + } } - if (pInfo->active_SPS.pic_order_cnt_type == 0) - { - result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb); - result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom); + if (pInfo->active_SPS.pic_order_cnt_type == 0) { + if ((old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb) || \ + (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom)) { + return 1; + } } - if (pInfo->active_SPS.pic_order_cnt_type == 1) - { - result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]); - result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]); + if (pInfo->active_SPS.pic_order_cnt_type == 1) { + if ((old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]) || \ + (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1])) { + return 1; + } } return result; diff --git a/mixvbp/vbp_plugin/h264/h264parse_pps.c b/mixvbp/vbp_plugin/h264/h264parse_pps.c index 2c4cc52..b4098ec 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_pps.c +++ b/mixvbp/vbp_plugin/h264/h264parse_pps.c @@ -22,6 +22,8 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->pic_parameter_set_id = (uint8_t)code; + VTRACE("parsing PPS: id = %d", PictureParameterSet->pic_parameter_set_id); + code = h264_GetVLCElement(parent, pInfo, false); if (code > MAX_NUM_SPS - 1) { @@ -30,6 +32,8 @@ h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicPa } PictureParameterSet->seq_parameter_set_id = (uint8_t)code; + VTRACE("parsing PPS: refering SPS id = %d", PictureParameterSet->seq_parameter_set_id); + ///// entropy_coding_mode_flag viddec_pm_get_bits(parent, &code, 1); PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code; diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c index 625e146..33ccbdd 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sh.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c @@ -95,13 +95,17 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice } ////// Check valid or not of first_mb_in_slice + int32_t PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1); + int32_t FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag ? + (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) : + ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) << 1); if (SliceHeader->structure == FRAME) { - max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs; + max_mb_num = FrameHeightInMbs * PicWidthInMbs; } else { - max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs / 2; + max_mb_num = FrameHeightInMbs * PicWidthInMbs / 2; } ///if(pInfo->img.MbaffFrameFlag) @@ -111,7 +115,10 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice } if (SliceHeader->first_mb_in_slice >= max_mb_num) + { + WTRACE("first mb in slice exceed max mb num."); break; + } if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) { @@ -165,6 +172,121 @@ h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice return ret; } +h264_Status h264_Parse_Slice_Header_2_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + uint32_t code; + int32_t max_mb_num=0; + + do { + //////////////////////////////////// Slice header part 2////////////////// + + /// Frame_num + viddec_pm_get_bits(parent, &code, SliceHeader->active_SPS->log2_max_frame_num_minus4 + 4); + SliceHeader->frame_num = (int32_t)code; + + /// Picture structure + SliceHeader->structure = FRAME; + SliceHeader->field_pic_flag = 0; + SliceHeader->bottom_field_flag = 0; + + if (!(SliceHeader->active_SPS->sps_disp.frame_mbs_only_flag)) + { + /// field_pic_flag + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->field_pic_flag = (uint8_t)code; + + if (SliceHeader->field_pic_flag) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->bottom_field_flag = (uint8_t)code; + + SliceHeader->structure = SliceHeader->bottom_field_flag ? BOTTOM_FIELD: TOP_FIELD; + } + } + + ////// Check valid or not of first_mb_in_slice + int32_t PicWidthInMbs = (SliceHeader->active_SPS->sps_disp.pic_width_in_mbs_minus1 + 1); + int32_t FrameHeightInMbs = SliceHeader->active_SPS->sps_disp.frame_mbs_only_flag ? + (SliceHeader->active_SPS->sps_disp.pic_height_in_map_units_minus1 + 1) : + ((SliceHeader->active_SPS->sps_disp.pic_height_in_map_units_minus1 + 1) << 1); + if (SliceHeader->structure == FRAME) + { + max_mb_num = FrameHeightInMbs * PicWidthInMbs; + } + else + { + max_mb_num = FrameHeightInMbs * PicWidthInMbs / 2; + } + + ///if(pInfo->img.MbaffFrameFlag) + if (SliceHeader->active_SPS->sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) + { + SliceHeader->first_mb_in_slice <<= 1; + } + + if (SliceHeader->first_mb_in_slice >= max_mb_num) + { + WTRACE("first mb in slice exceed max mb num."); + break; + } + + if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) + { + SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false); + } + + if (SliceHeader->active_SPS->pic_order_cnt_type == 0) + { + viddec_pm_get_bits(parent, &code , SliceHeader->active_SPS->log2_max_pic_order_cnt_lsb_minus4 + 4); + SliceHeader->pic_order_cnt_lsb = (uint32_t)code; + + if ((SliceHeader->active_PPS->pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->delta_pic_order_cnt_bottom = 0; + } + } + + if ((SliceHeader->active_SPS->pic_order_cnt_type == 1) && + !(SliceHeader->active_SPS->delta_pic_order_always_zero_flag)) + { + SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true); + if ((SliceHeader->active_PPS->pic_order_present_flag) && !(SliceHeader->field_pic_flag)) + { + SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true); + } + } + + if (SliceHeader->active_PPS->redundant_pic_cnt_present_flag) + { + SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->redundant_pic_cnt > 127) + break; + } + else + { + SliceHeader->redundant_pic_cnt = 0; + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + + + /*-----------------------------------------------------------------------------------------*/ // slice header 3 // (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT, ref_pic_remark, alpha, beta, etc) @@ -244,14 +366,13 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice //// //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW //// - if (((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + if (((pInfo->active_PPS.weighted_pred_flag) + && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) + || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) { viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); - pInfo->h264_pwt_enabled = 1; - pInfo->h264_pwt_start_byte_offset = byte_offset; - pInfo->h264_pwt_start_bit_offset = bits_offset; if (h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK) { @@ -259,18 +380,190 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + } + + - if (0 == bits_offset) + //// + //// Parse Ref_pic marking if there + //// + if (SliceHeader->nal_ref_idc != 0) + { + if (h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK) { - pInfo->h264_pwt_end_byte_offset = byte_offset-1; - pInfo->h264_pwt_end_bit_offset = 8; + WTRACE("ref pic marking failed during slice header parsing."); + break; + } + } + + if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + { + SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); + } + else + { + SliceHeader->cabac_init_idc = 0; + } + + if (SliceHeader->cabac_init_idc > 2) + { + break; + } + + SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); + if ((SliceHeader->slice_qp_delta > (25 - pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26 + pInfo->active_PPS.pic_init_qp_minus26))) + { + WTRACE("slice_qp_delta value is invalid."); + break; + } + + if ((SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeSI)) + { + if (SliceHeader->slice_type == h264_PtypeSP) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->sp_for_switch_flag = (uint8_t)code; + + } + SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); + + if ((SliceHeader->slice_qs_delta > (25 - pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + { + WTRACE("slice_qp_delta value is invalid."); + break; + } + } + if (pInfo->active_PPS.deblocking_filter_control_present_flag) + { + SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); + if (SliceHeader->disable_deblocking_filter_idc != 1) + { + SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1; + if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) + { + break; + } + + SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true); + slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1; + if (slice_beta_offset < -12 || slice_beta_offset > 12) + { + break; + } } else { - pInfo->h264_pwt_end_byte_offset = byte_offset; - pInfo->h264_pwt_end_bit_offset = bits_offset; + SliceHeader->slice_alpha_c0_offset_div2 = 0; + SliceHeader->slice_beta_offset_div2 = 0; + } + } + + ret = H264_STATUS_OK; + } while (0); + + //////////// FMO is not supported curently, so comment out the following code + //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) ) + //{ + // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile + //} + + return ret; +} + + +h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) +{ + h264_Status ret = H264_SliceHeader_ERROR; + + //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; + int32_t slice_alpha_c0_offset, slice_beta_offset; + uint32_t code; + uint32_t bits_offset =0, byte_offset =0; + uint8_t is_emul =0; + + do { + /// direct_spatial_mv_pred_flag + if (SliceHeader->slice_type == h264_PtypeB) + { + viddec_pm_get_bits(parent, &code , 1); + SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code; + } + else + { + SliceHeader->direct_spatial_mv_pred_flag = 0; + } + + // + // Reset ref_idx and Overide it if exist + // + SliceHeader->num_ref_idx_l0_active = SliceHeader->active_PPS->num_ref_idx_l0_active; + SliceHeader->num_ref_idx_l1_active = SliceHeader->active_PPS->num_ref_idx_l1_active; + + if ((SliceHeader->slice_type == h264_PtypeP) || + (SliceHeader->slice_type == h264_PtypeSP) || + (SliceHeader->slice_type == h264_PtypeB)) + { + viddec_pm_get_bits(parent, &code, 1); + SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code; + + if (SliceHeader->num_ref_idx_active_override_flag) + { + SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1; + if (SliceHeader->slice_type == h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false) + 1; + } + } + } + + if (SliceHeader->slice_type != h264_PtypeB) + { + SliceHeader->num_ref_idx_l1_active = 0; + } + + if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || + (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES)) + { + WTRACE("ref index greater than expected during slice header parsing."); + break; + } + +#ifdef USE_AVC_SHORT_FORMAT + bool keepParsing = false; + keepParsing = h264_is_new_picture_start(pInfo, *SliceHeader, pInfo->SliceHeader) && + (SliceHeader->nal_ref_idc != 0); + if (!keepParsing) + { + ITRACE("short format parsing: no need to go on!"); + ret = H264_STATUS_OK; + break; + } +#endif + if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + WTRACE("ref list reordering failed during slice header parsing."); + break; + } + + + //// + //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW + //// + if (((SliceHeader->active_PPS->weighted_pred_flag) + && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) + || ((SliceHeader->active_PPS->weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB))) + { + + //viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + + if (h264_Parse_Pred_Weight_Table_opt(parent, pInfo, SliceHeader) != H264_STATUS_OK) + { + break; } + viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul); + } @@ -287,7 +580,9 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } } - if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI)) + if ((SliceHeader->active_PPS->entropy_coding_mode_flag) && + (SliceHeader->slice_type != h264_PtypeI) && + (SliceHeader->slice_type != h264_PtypeSI)) { SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false); } @@ -302,7 +597,8 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->slice_qp_delta > (25 - pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26 + pInfo->active_PPS.pic_init_qp_minus26))) + if ((SliceHeader->slice_qp_delta > (25 - SliceHeader->active_PPS->pic_init_qp_minus26)) || + (SliceHeader->slice_qp_delta < -(26 + SliceHeader->active_PPS->pic_init_qp_minus26))) { WTRACE("slice_qp_delta value is invalid."); break; @@ -318,13 +614,14 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true); - if ((SliceHeader->slice_qs_delta > (25 - pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) ) + if ((SliceHeader->slice_qs_delta > (25 - SliceHeader->active_PPS->pic_init_qs_minus26)) || + (SliceHeader->slice_qs_delta < -(26 + SliceHeader->active_PPS->pic_init_qs_minus26)) ) { WTRACE("slice_qp_delta value is invalid."); break; } } - if (pInfo->active_PPS.deblocking_filter_control_present_flag) + if (SliceHeader->active_PPS->deblocking_filter_control_present_flag) { SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false); if (SliceHeader->disable_deblocking_filter_idc != 1) @@ -363,6 +660,7 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice } + /*--------------------------------------------------------------------------------------------------*/ // // The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num @@ -377,7 +675,7 @@ h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice // /*--------------------------------------------------------------------------------------------------*/ -h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader) { //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader; int32_t reorder= -1; @@ -400,14 +698,18 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 return H264_SliceHeader_ERROR; } - SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) + SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = + h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || + (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1)) { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = + h264_GetVLCElement(parent, pInfo, false); } else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2) { - SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = + h264_GetVLCElement(parent, pInfo, false); } } while (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3); @@ -430,13 +732,16 @@ h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h2 return H264_SliceHeader_ERROR; } SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) + if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || + (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1)) { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = + h264_GetVLCElement(parent, pInfo, false); } else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2) { - SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = + h264_GetVLCElement(parent, pInfo, false); } } while (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3); } @@ -514,7 +819,8 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } else { - SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_weight_l1[i] = + (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; } @@ -535,7 +841,8 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli { for (j = 0; j < 2; j++) { - SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = + (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; } } @@ -548,6 +855,113 @@ h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Sli } ///// End of h264_Parse_Pred_Weight_Table +h264_Status h264_Parse_Pred_Weight_Table_opt(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader) +{ + uint32_t i = 0, j = 0; + uint32_t flag; + + SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false); + + if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) + { + SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false); + } + + for (i = 0; i < SliceHeader->num_ref_idx_l0_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag; + + if (SliceHeader->sh_predwttbl.luma_weight_l0_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0; + } + + if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag; + + if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag) + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = + (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0; + } + } + } + + } + + if (SliceHeader->slice_type == h264_PtypeB) + { + for (i = 0; i < SliceHeader->num_ref_idx_l1_active; i++) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag; + + if (SliceHeader->sh_predwttbl.luma_weight_l1_flag) + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true); + } + else + { + SliceHeader->sh_predwttbl.luma_weight_l1[i] = + (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom); + SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0; + } + + if (SliceHeader->active_SPS->sps_disp.chroma_format_idc != 0) + { + viddec_pm_get_bits(parent, (uint32_t *)&flag, 1); + SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag; + + if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag) + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = + h264_GetVLCElement(parent, pInfo, true); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = + h264_GetVLCElement(parent, pInfo, true); + } + } + else + { + for (j = 0; j < 2; j++) + { + SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = + (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom); + SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0; + } + } + } + + } + } + + return H264_STATUS_OK; +} + + + /*--------------------------------------------------------------------------------------------------*/ // The syntax elements specify marking of the reference pictures. // 1)IDR: no_output_of_prior_pics_flag, @@ -600,25 +1014,32 @@ h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_S { if (i < NUM_MMCO_OPERATIONS) { - SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false); - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) + SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = + h264_GetVLCElement(parent, pInfo, false); + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || + (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3)) { - SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = + h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2) { - SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.long_term_pic_num[i] = + h264_GetVLCElement(parent, pInfo, false); } - if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) + if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || + (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)) { - SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = + h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4) { - SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false); + SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = + h264_GetVLCElement(parent, pInfo, false); } if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5) diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h index 7015c37..28699f8 100755 --- a/mixvbp/vbp_plugin/h264/include/h264.h +++ b/mixvbp/vbp_plugin/h264/include/h264.h @@ -681,56 +681,8 @@ extern "C" { int8_t chroma_offset_l1[32][2]; } h264_pred_weight_table; - typedef struct _h264_Slice_Header - { - int32_t first_mb_in_slice; //UE - int32_t frame_num; //UV - int32_t pic_order_cnt_lsb; //UV - int32_t delta_pic_order_cnt_bottom; //SE - int32_t delta_pic_order_cnt[2]; //SE - int32_t redundant_pic_cnt; //UE - - uint32_t num_ref_idx_l0_active; //UE - uint32_t num_ref_idx_l1_active; //UE - - int32_t slice_qp_delta; //SE - int32_t slice_qs_delta; //SE - int32_t slice_alpha_c0_offset_div2; //SE - int32_t slice_beta_offset_div2; //SE - int32_t slice_group_change_cycle; //UV - - h264_pred_weight_table sh_predwttbl; - - ///// Flags or IDs - //h264_ptype_t slice_type; //UE - uint8_t slice_type; - uint8_t nal_ref_idc; - uint8_t structure; - uint8_t pic_parameter_id; //UE - - uint8_t field_pic_flag; - uint8_t bottom_field_flag; - uint8_t idr_flag; //UE - uint8_t idr_pic_id; //UE - - uint8_t sh_error; - uint8_t cabac_init_idc; //UE - uint8_t sp_for_switch_flag; - uint8_t disable_deblocking_filter_idc; //UE - - uint8_t direct_spatial_mv_pred_flag; - uint8_t num_ref_idx_active_override_flag; - int16_t current_slice_nr; - - //// For Ref list reordering - h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; - h264_Ref_Pic_List_Reordering_t sh_refpic_l0; - h264_Ref_Pic_List_Reordering_t sh_refpic_l1; - - } h264_Slice_Header_t; - +#define MAX_USER_DATA_SIZE 1024 -#define MAX_USER_DATA_SIZE 1024 typedef struct _h264_user_data_t { h264_sei_payloadtype user_data_type; @@ -828,6 +780,71 @@ extern "C" { } seq_param_set_all, *seq_param_set_all_ptr; + typedef struct _h264_Slice_Header + { + int32_t first_mb_in_slice; //UE + int32_t frame_num; //UV + int32_t pic_order_cnt_lsb; //UV + int32_t delta_pic_order_cnt_bottom; //SE + int32_t delta_pic_order_cnt[2]; //SE + int32_t redundant_pic_cnt; //UE + + uint32_t num_ref_idx_l0_active; //UE + uint32_t num_ref_idx_l1_active; //UE + + int32_t slice_qp_delta; //SE + int32_t slice_qs_delta; //SE + int32_t slice_alpha_c0_offset_div2; //SE + int32_t slice_beta_offset_div2; //SE + int32_t slice_group_change_cycle; //UV + + h264_pred_weight_table sh_predwttbl; + + ///// Flags or IDs + //h264_ptype_t slice_type; //UE + uint8_t slice_type; + uint8_t nal_ref_idc; + uint8_t structure; + uint8_t pic_parameter_id; //UE + + uint8_t field_pic_flag; + uint8_t bottom_field_flag; + uint8_t idr_flag; //UE + uint8_t idr_pic_id; //UE + + uint8_t sh_error; + uint8_t cabac_init_idc; //UE + uint8_t sp_for_switch_flag; + uint8_t disable_deblocking_filter_idc; //UE + + uint8_t direct_spatial_mv_pred_flag; + uint8_t num_ref_idx_active_override_flag; + int16_t current_slice_nr; + + //// For Ref list reordering + h264_Dec_Ref_Pic_Marking_t sh_dec_refpic; + h264_Ref_Pic_List_Reordering_t sh_refpic_l0; + h264_Ref_Pic_List_Reordering_t sh_refpic_l1; + + seq_param_set_used* active_SPS; + pic_param_set* active_PPS; + uint32_t parse_done; // flag to indicate parse done + + // temp field for multithread parsing to store bitstream info + uint32_t bstrm_buf_buf_index; + uint32_t bstrm_buf_buf_st; + uint32_t bstrm_buf_buf_end; + uint32_t bstrm_buf_buf_bitoff; + uint32_t au_pos; + uint32_t list_off; + uint32_t phase; + uint32_t emulation_byte_counter; + uint32_t is_emul_reqd; + int32_t list_start_offset; + int32_t list_end_offset; + int32_t list_total_bytes; + + } h264_Slice_Header_t; ///// Image control parameter//////////// typedef struct _h264_img_par @@ -932,12 +949,13 @@ extern "C" { //// Structures //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address seq_param_set_used active_SPS; - pic_param_set active_PPS; + pic_param_set active_PPS; h264_Slice_Header_t SliceHeader; OldSliceParams old_slice; sei_info sei_information; + h264_Slice_Header_t* working_sh[150]; // working slice header for multithreading h264_img_par img; diff --git a/mixvbp/vbp_plugin/h264/include/h264parse.h b/mixvbp/vbp_plugin/h264/include/h264parse.h index 2b1c7a6..6adee42 100755 --- a/mixvbp/vbp_plugin/h264/include/h264parse.h +++ b/mixvbp/vbp_plugin/h264/include/h264parse.h @@ -89,7 +89,7 @@ extern "C" { //////////////////////////////////////////////////////////////////// //NAL - extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc); + extern h264_Status h264_Parse_NAL_Unit(void *parent, uint8_t *nal_unit_type, uint8_t *nal_ref_idc); ////// Slice header extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); @@ -97,6 +97,12 @@ extern "C" { extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); +// For multi-thread parsing optimized. + extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_2_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Slice_Header_3_opt(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader); + + extern h264_Status h264_Post_Parsing_Slice_Header(void *parent, h264_Info* pInfo, h264_Slice_Header_t *next_SliceHeader); ////// SPS extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame); @@ -113,6 +119,8 @@ extern "C" { extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + extern h264_Status h264_Parse_Pred_Weight_Table_opt(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader); + ///// Mem functions diff --git a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c index 62e6ab3..eb85022 100755 --- a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c +++ b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c @@ -204,10 +204,12 @@ void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo ) void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) { - uint32_t i=0,nitems=0; + uint32_t i=0,nitems=0; ///////////////////////// Frame attributes////////////////////////// +// Remove workload related stuff +# if 0 //Push data into current workload if first frame or frame_boundary already detected by non slice nal if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal)) { @@ -227,6 +229,7 @@ void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo ) pInfo->is_current_workload_done=1; } +#endif ///////////////////// SPS///////////////////// // h264_parse_emit_sps(parent, pInfo); diff --git a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c index ef03351..d8e0835 100755 --- a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c +++ b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c @@ -40,14 +40,15 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) uint8_t nal_ref_idc = 0; + uint8_t nal_unit_type = 0; ///// Parse NAL Unit header pInfo->img.g_new_frame = 0; pInfo->push_to_cur = 1; pInfo->is_current_workload_done =0; - pInfo->nal_unit_type = 0; - h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); + pInfo->nal_unit_type = nal_unit_type; ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); diff --git a/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c index 06efe5f..acfde74 100755 --- a/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c +++ b/mixvbp/vbp_plugin/h264/secvideo/merrifield/viddec_h264secure_parse.c @@ -115,7 +115,10 @@ uint32_t viddec_h264secure_parse(void *parent, void *ctxt) pInfo->is_current_workload_done =0; pInfo->nal_unit_type = 0; - h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + uint8_t nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); + pInfo->nal_unit_type = nal_unit_type; ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c index b3639c4..976efb5 100755 --- a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c +++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c @@ -7,6 +7,12 @@ #include "h264parse_dpb.h" #include +#include + +uint32_t viddec_threading_backup_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader); +uint32_t viddec_threading_restore_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader); + +#define MAX_SLICE_HEADER 150 /* Init function which can be called to intialized local context on open and flush and preserve*/ void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) @@ -24,6 +30,26 @@ void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve) /* picture level info which will always be initialized */ h264_init_Info_under_sps_pps_level(pInfo); + uint32_t i; + for(i = 0; i < MAX_SLICE_HEADER; i++) { + pInfo->working_sh[i] = (h264_Slice_Header_t*)malloc(sizeof(h264_Slice_Header_t)); + assert(pInfo->working_sh[i] != NULL); + + pInfo->working_sh[i]->parse_done = 0; + pInfo->working_sh[i]->bstrm_buf_buf_index = 0; + pInfo->working_sh[i]->bstrm_buf_buf_st = 0; + pInfo->working_sh[i]->bstrm_buf_buf_end = 0; + pInfo->working_sh[i]->bstrm_buf_buf_bitoff = 0; + pInfo->working_sh[i]->au_pos = 0; + pInfo->working_sh[i]->list_off = 0; + pInfo->working_sh[i]->phase = 0; + pInfo->working_sh[i]->emulation_byte_counter = 0; + pInfo->working_sh[i]->is_emul_reqd = 0; + pInfo->working_sh[i]->list_start_offset = 0; + pInfo->working_sh[i]->list_end_offset = 0; + pInfo->working_sh[i]->list_total_bytes = 0; + pInfo->working_sh[i]->slice_group_change_cycle = 0; + } return; } @@ -40,6 +66,7 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) h264_Status status = H264_STATUS_ERROR; uint8_t nal_ref_idc = 0; + uint8_t nal_unit_type = 0; ///// Parse NAL Unit header pInfo->img.g_new_frame = 0; @@ -47,9 +74,10 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) pInfo->is_current_workload_done =0; pInfo->nal_unit_type = 0; - h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc); + h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); VTRACE("Start parsing NAL unit, type = %d", pInfo->nal_unit_type); + pInfo->nal_unit_type = nal_unit_type; ///// Check frame bounday for non-vcl elimitter h264_check_previous_frame_end(pInfo); @@ -417,6 +445,64 @@ uint32_t viddec_h264_parse(void *parent, void *ctxt) return status; } + +uint32_t viddec_h264_threading_parse(void *parent, void *ctxt, uint32_t slice_index) +{ + struct h264_viddec_parser* parser = ctxt; + + h264_Info * pInfo = &(parser->info); + + h264_Status status = H264_STATUS_ERROR; + + uint8_t nal_ref_idc = 0; + uint8_t nal_unit_type = 0; + + h264_Parse_NAL_Unit(parent, &nal_unit_type, &nal_ref_idc); + + pInfo->nal_unit_type = nal_unit_type; + + + //////// Parse valid NAL unit + if (nal_unit_type == h264_NAL_UNIT_TYPE_SLICE) { + h264_Slice_Header_t* next_SliceHeader = pInfo->working_sh[slice_index]; + memset(next_SliceHeader, 0, sizeof(h264_Slice_Header_t)); + + next_SliceHeader->nal_ref_idc = nal_ref_idc; + + + //////////////////////////////////////////////////////////////////////////// + // Step 2: Parsing slice header + //////////////////////////////////////////////////////////////////////////// + /// IDR flag + next_SliceHeader->idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR); + + + /// Pass slice header + status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP_opt(parent, pInfo, next_SliceHeader); + + viddec_threading_backup_ctx_info(parent, next_SliceHeader); + + if (next_SliceHeader->sh_error & 3) + { + ETRACE("Slice Header parsing error."); + status = H264_STATUS_ERROR; + return status; + } + + //h264_Post_Parsing_Slice_Header(parent, pInfo, &next_SliceHeader); + next_SliceHeader->parse_done = 1; + + } else { + ETRACE("Wrong NALU. Multi thread is supposed to just parse slice nalu type."); + status = H264_STATUS_ERROR; + return status; + } + + return status; +} + + + void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size) { /* Should return size of my structure */ @@ -451,7 +537,104 @@ void viddec_h264_flush(void *parent, void *ctxt) p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC; p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC; + for(i = 0; i < MAX_SLICE_HEADER; i++) { + free(pInfo->working_sh[i]); + pInfo->working_sh[i] = NULL; + } return; } +uint32_t viddec_h264_payload_start(void *parent) +{ + + uint32_t code; + uint8_t nal_unit_type = 0; + if ( viddec_pm_peek_bits(parent, &code, 8) != -1) + { + nal_unit_type = (uint8_t)((code >> 0) & 0x1f); + } + //check that whether slice data starts + if (nal_unit_type == h264_NAL_UNIT_TYPE_SLICE) + { + return 1; + } else { + return 0; + } +} + +uint32_t viddec_h264_post_parse(void *parent, void *ctxt, uint32_t slice_index) +{ + struct h264_viddec_parser* parser = ctxt; + h264_Info * pInfo = &(parser->info); + h264_Status status = H264_STATUS_ERROR; + + h264_Slice_Header_t* next_SliceHeader = pInfo->working_sh[slice_index]; + + while (next_SliceHeader->parse_done != 1) { + sleep(0); + //WTRACE("slice header[%d] parse not finish, block to wait.", slice_index); + } + + viddec_threading_restore_ctx_info(parent, next_SliceHeader); + status = h264_Post_Parsing_Slice_Header(parent, pInfo, next_SliceHeader); + + next_SliceHeader->parse_done = 0; + + return status; +} + + +uint32_t viddec_h264_query_thread_parsing_cap(void) +{ + // current implementation of h.264 is capable to enable multi-thread parsing + return 1; +} + +uint32_t viddec_threading_backup_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader) +{ + h264_Status retStatus = H264_STATUS_OK; + + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + + next_SliceHeader->bstrm_buf_buf_index = pm_cxt->getbits.bstrm_buf.buf_index; + next_SliceHeader->bstrm_buf_buf_st = pm_cxt->getbits.bstrm_buf.buf_st; + next_SliceHeader->bstrm_buf_buf_end = pm_cxt->getbits.bstrm_buf.buf_end; + next_SliceHeader->bstrm_buf_buf_bitoff = pm_cxt->getbits.bstrm_buf.buf_bitoff; + + next_SliceHeader->au_pos = pm_cxt->getbits.au_pos; + next_SliceHeader->list_off = pm_cxt->getbits.list_off; + next_SliceHeader->phase = pm_cxt->getbits.phase; + next_SliceHeader->emulation_byte_counter = pm_cxt->getbits.emulation_byte_counter; + next_SliceHeader->is_emul_reqd = pm_cxt->getbits.is_emul_reqd; + + next_SliceHeader->list_start_offset = pm_cxt->list.start_offset; + next_SliceHeader->list_end_offset = pm_cxt->list.end_offset; + next_SliceHeader->list_total_bytes = pm_cxt->list.total_bytes; + + return retStatus; +} + +uint32_t viddec_threading_restore_ctx_info(void *parent, h264_Slice_Header_t *next_SliceHeader) +{ + h264_Status retStatus = H264_STATUS_OK; + + viddec_pm_cxt_t* pm_cxt = (viddec_pm_cxt_t*) parent; + + pm_cxt->getbits.bstrm_buf.buf_index = next_SliceHeader->bstrm_buf_buf_index; + pm_cxt->getbits.bstrm_buf.buf_st = next_SliceHeader->bstrm_buf_buf_st; + pm_cxt->getbits.bstrm_buf.buf_end = next_SliceHeader->bstrm_buf_buf_end; + pm_cxt->getbits.bstrm_buf.buf_bitoff = next_SliceHeader->bstrm_buf_buf_bitoff; + + pm_cxt->getbits.au_pos = next_SliceHeader->au_pos; + pm_cxt->getbits.list_off = next_SliceHeader->list_off; + pm_cxt->getbits.phase = next_SliceHeader->phase; + pm_cxt->getbits.emulation_byte_counter = next_SliceHeader->emulation_byte_counter; + pm_cxt->getbits.is_emul_reqd = next_SliceHeader->is_emul_reqd; + + pm_cxt->list.start_offset = next_SliceHeader->list_start_offset; + pm_cxt->list.end_offset = next_SliceHeader->list_end_offset; + pm_cxt->list.total_bytes = next_SliceHeader->list_total_bytes; + + return retStatus; +} -- cgit v1.2.3 From 5c90f3008d007b6cf7c59e1e9917bc60ae33bf22 Mon Sep 17 00:00:00 2001 From: "Sun, Jian" Date: Wed, 26 Mar 2014 11:41:15 +0800 Subject: use stream loading technology to optimize memory copy BZ: 181818 use stream loading technology to optimize memory copy during video editor. Change-Id: Idfeca8e985039a0145832d429fb8d3dce7b7a008 Signed-off-by: Sun, Jian --- videodecoder/VideoDecoderBase.cpp | 15 ++++++ videodecoder/use_util_sse4.h | 104 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 119 insertions(+) create mode 100644 videodecoder/use_util_sse4.h diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index bf2a46e..7d1cd13 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -27,6 +27,9 @@ #include #include #include +#ifdef __SSE4_1__ +#include "use_util_sse4.h" +#endif #define INVALID_PTS ((uint64_t)-1) #define MAXIMUM_POC 0x7FFFFFFF @@ -1165,21 +1168,33 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderB } if (size == (int32_t)vaImage.data_size) { +#ifdef __SSE4_1__ + stream_memcpy(pRawData, pBuf, size); +#else memcpy(pRawData, pBuf, size); +#endif } else { // copy Y data uint8_t *src = (uint8_t*)pBuf; uint8_t *dst = pRawData; int32_t row = 0; for (row = 0; row < cropHeight; row++) { +#ifdef __SSE4_1__ + stream_memcpy(dst, src, cropWidth); +#else memcpy(dst, src, cropWidth); +#endif dst += cropWidth; src += vaImage.pitches[0]; } // copy interleaved V and U data src = (uint8_t*)pBuf + vaImage.offsets[1]; for (row = 0; row < cropHeight / 2; row++) { +#ifdef __SSE4_1__ + stream_memcpy(dst, src, cropWidth); +#else memcpy(dst, src, cropWidth); +#endif dst += cropWidth; src += vaImage.pitches[1]; } diff --git a/videodecoder/use_util_sse4.h b/videodecoder/use_util_sse4.h new file mode 100644 index 0000000..073ae22 --- /dev/null +++ b/videodecoder/use_util_sse4.h @@ -0,0 +1,104 @@ +/* + * Copyright © 2012 Intel Corporation + * All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice (including the next + * paragraph) shall be included in all copies or substantial portions of the + * Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Authors: + * Li Zeng + * Jian Sun + */ + +#include +#include + +inline void stream_memcpy(void* dst_buff, const void* src_buff, size_t size) +{ + bool isAligned = (((size_t)(src_buff) | (size_t)(dst_buff)) & 0xF) == 0; + if (!isAligned) { + memcpy(dst_buff, src_buff, size); + } + + static const size_t regs_count = 8; + + __m128i xmm_data0, xmm_data1, xmm_data2, xmm_data3; + __m128i xmm_data4, xmm_data5, xmm_data6, xmm_data7; + + size_t remain_data = size & (regs_count * sizeof(xmm_data0) - 1); + size_t end_position = 0; + + __m128i* pWb_buff = (__m128i*)dst_buff; + __m128i* pWb_buff_end = pWb_buff + ((size - remain_data) >> 4); + __m128i* pWc_buff = (__m128i*)src_buff; + + /*sync the wc memory data*/ + _mm_mfence(); + + while (pWb_buff < pWb_buff_end) + { + xmm_data0 = _mm_stream_load_si128(pWc_buff); + xmm_data1 = _mm_stream_load_si128(pWc_buff + 1); + xmm_data2 = _mm_stream_load_si128(pWc_buff + 2); + xmm_data3 = _mm_stream_load_si128(pWc_buff + 3); + xmm_data4 = _mm_stream_load_si128(pWc_buff + 4); + xmm_data5 = _mm_stream_load_si128(pWc_buff + 5); + xmm_data6 = _mm_stream_load_si128(pWc_buff + 6); + xmm_data7 = _mm_stream_load_si128(pWc_buff + 7); + + pWc_buff += regs_count; + _mm_store_si128(pWb_buff, xmm_data0); + _mm_store_si128(pWb_buff + 1, xmm_data1); + _mm_store_si128(pWb_buff + 2, xmm_data2); + _mm_store_si128(pWb_buff + 3, xmm_data3); + _mm_store_si128(pWb_buff + 4, xmm_data4); + _mm_store_si128(pWb_buff + 5, xmm_data5); + _mm_store_si128(pWb_buff + 6, xmm_data6); + _mm_store_si128(pWb_buff + 7, xmm_data7); + + pWb_buff += regs_count; + } + + /*copy data by 16 bytes step from the remainder*/ + if (remain_data >= 16) + { + size = remain_data; + remain_data = size & 15; + end_position = size >> 4; + for (size_t i = 0; i < end_position; ++i) + { + pWb_buff[i] = _mm_stream_load_si128(pWc_buff + i); + } + } + + /*copy the remainder data, if it still existed*/ + if (remain_data) + { + __m128i temp_data = _mm_stream_load_si128(pWc_buff + end_position); + + char* psrc_buf = (char*)(&temp_data); + char* pdst_buf = (char*)(pWb_buff + end_position); + + for (size_t i = 0; i < remain_data; ++i) + { + pdst_buf[i] = psrc_buf[i]; + } + } + +} -- cgit v1.2.3 From e894084796b0aa85f7c0aca600d87353cf24a49f Mon Sep 17 00:00:00 2001 From: Yuanjun Huang Date: Thu, 3 Apr 2014 09:41:58 +0800 Subject: Fix the zoom issue when playing protected streaming and DRC happens on FFRD8. BZ: 184151 Change-Id: Ie8fd92236f8f80e15ed828b957d1900f75608f23 Signed-off-by: Yuanjun Huang --- .../securevideo/baytrail/VideoDecoderAVCSecure.cpp | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp index 06fb677..bf964a9 100644 --- a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp @@ -162,6 +162,7 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { } vbp_data_h264 *data = NULL; + int new_sequence_to_handle = 0; if (sizeAccumulated > 0) { status = VideoDecoderBase::parseBuffer( @@ -170,6 +171,14 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { false, (void**)&data); CHECK_STATUS("VideoDecoderBase::parseBuffer"); + + // [FIX DRC zoom issue] if one buffer contains more than one nalu + // for example SPS+PPS+IDR, new_sps/new_pps flags set in parseBuffer + // will be flushed in the following updateBuffer. + // So that handleNewSequence will not be handled in decodeFrame() + if (data->new_sps || data->new_pps) { + new_sequence_to_handle = 1; + } } if (sliceHeaderSize > 0) { @@ -180,6 +189,13 @@ Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) { sliceHeaderSize, (void**)&data); CHECK_STATUS("VideoDecoderBase::updateBuffer"); + + // in case the flags were flushed but indeed new sequence needed to be handled. + if ((1 == new_sequence_to_handle) && + ((data->new_sps == 0) || (data->new_pps == 0))) { + data->new_sps = 1; + data->new_pps = 1; + } } if (data == NULL) { -- cgit v1.2.3 From 53a6766169faca3e48ba2b618c9353e5aa51e316 Mon Sep 17 00:00:00 2001 From: gji2 Date: Tue, 18 Mar 2014 04:59:46 +0800 Subject: Enable video encode 64bit BZ: 178724 Enable video encode 64bit Change-Id: Ia74c37fed4e6e84ebba294b91cce7c4c8ba95515 Signed-off-by: gji2 --- test/BSClient.cpp | 6 +-- test/btest.cpp | 8 +-- test/mix_encoder.cpp | 18 +++---- test/mix_encoder2.cpp | 18 +++---- videoencoder/IntelMetadataBuffer.cpp | 101 ++++++++++++++++++----------------- videoencoder/IntelMetadataBuffer.h | 16 +++--- videoencoder/VideoEncoderAVC.cpp | 2 +- videoencoder/VideoEncoderBase.cpp | 10 ++-- videoencoder/VideoEncoderUtils.cpp | 36 ++++++------- videoencoder/VideoEncoderUtils.h | 16 +++--- 10 files changed, 118 insertions(+), 113 deletions(-) diff --git a/test/BSClient.cpp b/test/BSClient.cpp index 0c3cb77..334e371 100755 --- a/test/BSClient.cpp +++ b/test/BSClient.cpp @@ -95,7 +95,7 @@ int main(int argc, char* argv[]) return 1; } #else - mb1->SetValue((int32_t)Buffer1->pointer()); + mb1->SetValue((intptr_t)Buffer1->pointer()); #endif ret = mb1->SetValueInfo(&vi); ret = mb1->Serialize(bytes, size); @@ -131,7 +131,7 @@ int main(int argc, char* argv[]) return 1; } #else - mb1->SetValue((int32_t)graphicBuffer->handle); + mb1->SetValue((intptr_t)graphicBuffer->handle); #endif ret = mb1->SetValueInfo(&vi); ret = mb1->Serialize(bytes, size); @@ -165,7 +165,7 @@ int main(int argc, char* argv[]) } IntelMetadataBuffer mb1; - int32_t value; + intptr_t value; IMB_Result res; res = mb1.UnSerialize(bytes,size); diff --git a/test/btest.cpp b/test/btest.cpp index 2806d36..a62664e 100644 --- a/test/btest.cpp +++ b/test/btest.cpp @@ -15,11 +15,11 @@ int main(int argc, char* argv[]) IntelMetadataBufferType t1 = IntelMetadataBufferTypeCameraSource; IntelMetadataBufferType t2; - int32_t v1 = 0x00000010; - int32_t v2 = 0; + intptr_t v1 = 0x00000010; + intptr_t v2 = 0; ValueInfo vi1, *vi2 = NULL; - int32_t ev1[10]; - int32_t *ev2 = NULL; + intptr_t ev1[10]; + intptr_t *ev2 = NULL; unsigned int count; if (argc > 1) diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp index 75a3f5a..be38a27 100644 --- a/test/mix_encoder.cpp +++ b/test/mix_encoder.cpp @@ -76,7 +76,7 @@ static uint32_t gkBufHandle[gSrcFrames]; static sp gGraphicBufferAlloc; static sp gGraphicBuffer[gSrcFrames]; -static int ev1[10]; +static intptr_t ev1[10]; struct VideoConfigTypeIDRReq: VideoParamConfigSet { @@ -370,7 +370,7 @@ void MallocExternalMemoryWithExtraValues() { gUsrptr[i] = (uint8_t*)malloc(size); - gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (intptr_t)gUsrptr[i]); gIMB[i]->SetValueInfo(vinfo); } @@ -397,9 +397,9 @@ void MallocExternalMemory() for(int i = 0; i < gSrcFrames; i ++) { gMallocPtr[i] = (uint8_t*)malloc(size + 4095); - gUsrptr[i] = (uint8_t*)((((uint32_t )gMallocPtr[i] + 4095) / 4096 ) * 4096); + gUsrptr[i] = (uint8_t*)((((intptr_t )gMallocPtr[i] + 4095) / 4096 ) * 4096); - gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (intptr_t)gUsrptr[i]); gIMB[i]->SetValueInfo(vinfo); } @@ -431,7 +431,7 @@ void GetAllUsrptr() gUsrptr[i] = paramsUsrptrBuffer.usrPtr; gSrcStride = paramsUsrptrBuffer.stride; - gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeEncoder, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeEncoder, (intptr_t)gUsrptr[i]); } } @@ -577,7 +577,7 @@ void CreateSurfaceMappingForCI() { gUsrptr[i] = (uint8_t*)malloc(size); - gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (intptr_t)gUsrptr[i]); gIMB[i]->SetValueInfo(vinfo); } @@ -603,7 +603,7 @@ void CreateGfxhandle(int color) gGraphicBuffer[i] = graphicBuffer; graphicBuffer->lock(usage | GraphicBuffer::USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i])); - gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (int32_t)gGraphicBuffer[i]->handle); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (intptr_t)gGraphicBuffer[i]->handle); graphicBuffer->unlock(); IMG_native_handle_t* h = (IMG_native_handle_t*) gGraphicBuffer[i]->handle; @@ -627,7 +627,7 @@ void CreateGralloc(int color) { gfx_alloc(gSrcWidth, gSrcHeight, format, usage, &handle, (int32_t*)&gSrcStride); gfx_lock(handle, usage | GRALLOC_USAGE_SW_WRITE_OFTEN, 0, 0, gSrcWidth, gSrcHeight, (void**)(&gUsrptr[i])); - gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (int32_t)handle); + gIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (intptr_t)handle); gfx_unlock(handle); IMG_native_handle_t* h = (IMG_native_handle_t*) handle; gSrcHeight = h->iHeight; @@ -958,7 +958,7 @@ for(int i=0; i<1; i++) { if (gIMB[i] != NULL) { - gIMB[i]->GetValue((int32_t&)handle); + gIMB[i]->GetValue((intptr_t&)handle); gfx_free(handle); } } diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index 002fc20..f4e407c 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -294,7 +294,7 @@ public: if (mMetadata) memcpy ((*buffer)->data(), data, size); else { - offset = ((int)((*buffer)->data() + 0x0FFF) & ~0x0FFF) - (int)(*buffer)->data(); + offset = ((intptr_t)((*buffer)->data() + 0x0FFF) & ~0x0FFF) - (intptr_t)(*buffer)->data(); memcpy ((*buffer)->data() + offset, data, size); } (*buffer)->set_range(offset, size); @@ -396,8 +396,8 @@ public: mMallocPtr[i] = (uint8_t*) malloc(size + 4095); //keep address 4K aligned - mUsrptr[i] = (uint8_t*)((((uint32_t )mMallocPtr[i] + 4095) / 4096 ) * 4096); - mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (int32_t) mUsrptr[i]); + mUsrptr[i] = (uint8_t*)((((intptr_t)mMallocPtr[i] + 4095) / 4096 ) * 4096); + mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeCameraSource, (intptr_t) mUsrptr[i]); mIMB[i]->SetValueInfo(&vinfo); // LOG("Malloc address=%x\n", mUsrptr[i]); } @@ -447,7 +447,7 @@ public: { mBuffers[i] = new MemoryBase(mHeap, i * size, size); - mUsrptr[i] = (uint8_t*) ((int) (mBuffers[i]->pointer() + 0x0FFF) & ~0x0FFF); + mUsrptr[i] = (uint8_t*) ((intptr_t) (mBuffers[i]->pointer() + 0x0FFF) & ~0x0FFF); mIMB[i] = new IntelMetadataBuffer(); mIMB[i]->SetType(IntelMetadataBufferTypeCameraSource); @@ -455,10 +455,10 @@ public: mIMB[i]->SetSessionFlag(mSessionFlag); mIMB[i]->ShareValue(mBuffers[i]); #else - mIMB[i]->SetValue((int32_t)mUsrptr[i]); + mIMB[i]->SetValue((intptr_t)mUsrptr[i]); #endif mIMB[i]->SetValueInfo(&vinfo); - LOG("MemHeap local address=%x\n", mUsrptr[i]); + LOG("MemHeap local address=%p\n", mUsrptr[i]); } return OK; @@ -575,7 +575,7 @@ public: ValueInfo vinfo; memset(&vinfo, 0, sizeof(ValueInfo)); vinfo.mode = MEM_MODE_SURFACE; - vinfo.handle = (uint32_t) mVADisplay; + vinfo.handle = (intptr_t) mVADisplay; vinfo.size = 0; vinfo.width = mWidth; vinfo.height = mHeight; @@ -679,7 +679,7 @@ public: mIMB[i]->SetSessionFlag(mSessionFlag); mIMB[i]->ShareValue(mGraphicBuffer[i]); #else - mIMB[i]->SetValue((int32_t)mGraphicBuffer[i]->handle); + mIMB[i]->SetValue((intptr_t)mGraphicBuffer[i]->handle); #endif mIMB[i]->SetValueInfo(&vinfo); @@ -734,7 +734,7 @@ public: if (gfx_alloc(mWidth, mHeight, mColorFormat, usage, &mHandle[i], (int32_t*)&mStride) != 0) return UNKNOWN_ERROR; - mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (int32_t)mHandle[i]); + mIMB[i] = new IntelMetadataBuffer(IntelMetadataBufferTypeGrallocSource, (intptr_t)mHandle[i]); IMG_native_handle_t* h = (IMG_native_handle_t*) mHandle[i]; // mWidth = h->iWidth; mHeight = h->iHeight; diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index 4d58904..e275637 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -42,8 +42,8 @@ struct ShareMemMap { uint32_t sessionflag; - int32_t value; - int32_t value_backup; + intptr_t value; + intptr_t value_backup; uint32_t type; sp membase; sp gbuffer; @@ -67,7 +67,7 @@ enum { #define REMOTE_PROVIDER 0x80000000 #define REMOTE_CONSUMER 0x40000000 -static ShareMemMap* ReadMemObjFromBinder(const Parcel& data, uint32_t sessionflag, uint32_t value) { +static ShareMemMap* ReadMemObjFromBinder(const Parcel& data, uint32_t sessionflag, intptr_t value) { uint32_t type = data.readInt32(); if (type >= ST_MAX) @@ -96,12 +96,12 @@ static ShareMemMap* ReadMemObjFromBinder(const Parcel& data, uint32_t sessionfla return NULL; } - map->value = (int32_t) ((int) ( mem->pointer() + 0x0FFF) & ~0x0FFF); + map->value = (intptr_t)( mem->pointer() + 0x0FFF) & ~0x0FFF; map->membase = mem; #ifdef TEST ALOGI("membase heapID:%d, pointer:%x data:%x, aligned value:%x", \ - heap->getHeapID(), mem->pointer(), *((int *)(mem->pointer())), map->value); + heap->getHeapID(), mem->pointer(), *((intptr_t *)(mem->pointer())), map->value); #endif } @@ -115,14 +115,14 @@ static ShareMemMap* ReadMemObjFromBinder(const Parcel& data, uint32_t sessionfla } data.read(*buffer); - map->value = (uint32_t)buffer->handle; + map->value = (intptr_t)buffer->handle; map->gbuffer = buffer; #ifdef TEST void* usrptr[3]; buffer->lock(GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_READ_OFTEN, &usrptr[0]); buffer->unlock(); - ALOGI("gfx handle:%x data:%x", (int32_t)buffer->handle, *((int *)usrptr[0])); + ALOGI("gfx handle:%p data:%x", (intptr_t)buffer->handle, *((intptr_t *)usrptr[0])); #endif } @@ -182,7 +182,7 @@ static void ClearLocalMem(uint32_t sessionflag) gShareMemMapListLock.unlock(); } -static ShareMemMap* FindShareMem(uint32_t sessionflag, int32_t value, bool isBackup) +static ShareMemMap* FindShareMem(uint32_t sessionflag, intptr_t value, bool isBackup) { List::iterator node; @@ -208,7 +208,7 @@ static ShareMemMap* FindShareMem(uint32_t sessionflag, int32_t value, bool isBac return NULL; } -static ShareMemMap* PopShareMem(uint32_t sessionflag, int32_t value) +static ShareMemMap* PopShareMem(uint32_t sessionflag, intptr_t value) { List::iterator node; @@ -261,6 +261,7 @@ status_t IntelBufferSharingService::instantiate(){ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { + //TODO: if pid is int32? pid_t pid = data.readInt32(); uint32_t sessionflag = data.readInt32(); @@ -275,7 +276,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data return UNKNOWN_ERROR; } - int32_t value = data.readInt32(); + intptr_t value = data.readIntPtr(); // LOGI("onTransact SHARE_MEM value=%x", value); @@ -284,7 +285,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data if (map == NULL) return UNKNOWN_ERROR; - reply->writeInt32(map->value); + reply->writeIntPtr(map->value); return NO_ERROR; } @@ -311,7 +312,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data return UNKNOWN_ERROR; } - int32_t value = data.readInt32(); + intptr_t value = data.readIntPtr(); // LOGI("onTransact GET_MEM value=%x", value); @@ -345,7 +346,7 @@ IntelMetadataBuffer::IntelMetadataBuffer() #endif } -IntelMetadataBuffer::IntelMetadataBuffer(IntelMetadataBufferType type, int32_t value) +IntelMetadataBuffer::IntelMetadataBuffer(IntelMetadataBufferType type, intptr_t value) { mType = type; mValue = value; @@ -384,8 +385,8 @@ IntelMetadataBuffer::IntelMetadataBuffer(const IntelMetadataBuffer& imb) if (imb.mExtraValues) { - mExtraValues = new int32_t[mExtraValues_Count]; - memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); + mExtraValues = new intptr_t[mExtraValues_Count]; + memcpy(mExtraValues, imb.mExtraValues, sizeof(mValue) * mExtraValues_Count); } if (imb.mBytes) @@ -413,8 +414,8 @@ const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuf if (imb.mExtraValues) { - mExtraValues = new int32_t[mExtraValues_Count]; - memcpy(mExtraValues, imb.mExtraValues, 4 * mExtraValues_Count); + mExtraValues = new intptr_t[mExtraValues_Count]; + memcpy(mExtraValues, imb.mExtraValues, sizeof(mValue) * mExtraValues_Count); } if (imb.mBytes) @@ -443,7 +444,7 @@ IMB_Result IntelMetadataBuffer::SetType(IntelMetadataBufferType type) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::GetValue(int32_t& value) +IMB_Result IntelMetadataBuffer::GetValue(intptr_t& value) { value = mValue; @@ -471,9 +472,10 @@ IMB_Result IntelMetadataBuffer::GetValue(int32_t& value) //send pid, sessionflag, and memtype pid_t pid = getpid(); + //TODO: if pid is int32? data.writeInt32(pid); data.writeInt32(mSessionFlag); - data.writeInt32(mValue); + data.writeIntPtr(mValue); //do transcation if (binder->transact(GET_MEM, data, &reply) != NO_ERROR) @@ -490,7 +492,7 @@ IMB_Result IntelMetadataBuffer::GetValue(int32_t& value) #endif } -IMB_Result IntelMetadataBuffer::SetValue(int32_t value) +IMB_Result IntelMetadataBuffer::SetValue(intptr_t value) { mValue = value; @@ -519,7 +521,7 @@ IMB_Result IntelMetadataBuffer::SetValueInfo(ValueInfo* info) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::GetExtraValues(int32_t* &values, uint32_t& num) +IMB_Result IntelMetadataBuffer::GetExtraValues(intptr_t* &values, uint32_t& num) { values = mExtraValues; num = mExtraValues_Count; @@ -527,7 +529,7 @@ IMB_Result IntelMetadataBuffer::GetExtraValues(int32_t* &values, uint32_t& num) return IMB_SUCCESS; } -IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num) +IMB_Result IntelMetadataBuffer::SetExtraValues(intptr_t* values, uint32_t num) { if (values && num > 0) { @@ -538,9 +540,9 @@ IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num) } if (mExtraValues == NULL) - mExtraValues = new int32_t[num]; + mExtraValues = new intptr_t[num]; - memcpy(mExtraValues, values, sizeof(int32_t) * num); + memcpy(mExtraValues, values, sizeof(intptr_t) * num); mExtraValues_Count = num; } else @@ -555,16 +557,16 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) return IMB_INVAL_PARAM; IntelMetadataBufferType type; - int32_t value; - uint32_t extrasize = size - 8; + intptr_t value; + uint32_t extrasize = size - sizeof(type) - sizeof(value); ValueInfo* info = NULL; - int32_t* ExtraValues = NULL; + intptr_t* ExtraValues = NULL; uint32_t ExtraValues_Count = 0; - memcpy(&type, data, 4); - data += 4; - memcpy(&value, data, 4); - data += 4; + memcpy(&type, data, sizeof(type)); + data += sizeof(type); + memcpy(&value, data, sizeof(value)); + data += sizeof(value); switch (type) { @@ -577,9 +579,9 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) if (extrasize > sizeof(ValueInfo)) //has extravalues { - if ( (extrasize - sizeof(ValueInfo)) % 4 != 0 ) + if ( (extrasize - sizeof(ValueInfo)) % sizeof(mValue) != 0 ) return IMB_INVAL_BUFFER; - ExtraValues_Count = (extrasize - sizeof(ValueInfo)) / 4; + ExtraValues_Count = (extrasize - sizeof(ValueInfo)) / sizeof(mValue); } if (extrasize > 0) @@ -591,8 +593,8 @@ IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size) if (ExtraValues_Count > 0) { - ExtraValues = new int32_t[ExtraValues_Count]; - memcpy(ExtraValues, data, ExtraValues_Count * 4); + ExtraValues = new intptr_t[ExtraValues_Count]; + memcpy(ExtraValues, data, ExtraValues_Count * sizeof(mValue)); } break; @@ -631,20 +633,20 @@ IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) return IMB_INVAL_PARAM; //assemble bytes according members - mSize = 8; + mSize = sizeof(mType) + sizeof(mValue); if (mInfo) { mSize += sizeof(ValueInfo); if (mExtraValues) - mSize += 4 * mExtraValues_Count; + mSize += sizeof(mValue) * mExtraValues_Count; } mBytes = new uint8_t[mSize]; uint8_t *ptr = mBytes; - memcpy(ptr, &mType, 4); - ptr += 4; - memcpy(ptr, &mValue, 4); - ptr += 4; + memcpy(ptr, &mType, sizeof(mType)); + ptr += sizeof(mType); + memcpy(ptr, &mValue, sizeof(mValue)); + ptr += sizeof(mValue); if (mInfo) { @@ -655,7 +657,7 @@ IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size) ptr += sizeof(ValueInfo); if (mExtraValues) - memcpy(ptr, mExtraValues, mExtraValues_Count * 4); + memcpy(ptr, mExtraValues, mExtraValues_Count * sizeof(mValue)); } } @@ -687,7 +689,7 @@ IMB_Result IntelMetadataBuffer::SetSessionFlag(uint32_t sessionflag) IMB_Result IntelMetadataBuffer::ShareValue(sp mem) { - mValue = (int32_t)((int) ( mem->pointer() + 0x0FFF) & ~0x0FFF); + mValue = (intptr_t)((intptr_t) ( mem->pointer() + 0x0FFF) & ~0x0FFF); if (mSessionFlag == 0) //no sharing return IMB_SUCCESS; @@ -703,9 +705,10 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp mem) //send pid, sessionflag, and value pid_t pid = getpid(); + //TODO: if pid is int32? data.writeInt32(pid); data.writeInt32(mSessionFlag); - data.writeInt32(mValue); + data.writeIntPtr(mValue); //send type/obj (offset/size/MemHeap) ShareMemMap smem; @@ -719,7 +722,7 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp mem) return IMB_SERVICE_FAIL; //set new value gotten from peer - mValue = reply.readInt32(); + mValue = reply.readIntPtr(); // LOGI("ShareValue(membase) Get reply from sevice, new value:%x\n", mValue); } else //is local provider , direct access list @@ -739,7 +742,7 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp mem) IMB_Result IntelMetadataBuffer::ShareValue(sp gbuffer) { - mValue = (int32_t)gbuffer->handle; + mValue = (intptr_t)gbuffer->handle; if (mSessionFlag == 0) //no sharing return IMB_SUCCESS; @@ -754,9 +757,10 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp gbuffer) //send pid, sessionflag, and memtype pid_t pid = getpid(); + //TODO: if pid is int32 ? data.writeInt32(pid); data.writeInt32(mSessionFlag); - data.writeInt32(mValue); + data.writeIntPtr(mValue); //send value/graphicbuffer obj ShareMemMap smem; @@ -770,7 +774,7 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp gbuffer) return IMB_SERVICE_FAIL; //set new value gotten from peer - mValue = reply.readInt32(); + mValue = reply.readIntPtr(); // LOGI("ShareValue(gfx) Get reply from sevice, new value:%x\n", mValue); } else //is local provider, direct access list @@ -810,6 +814,7 @@ IMB_Result IntelMetadataBuffer::ClearContext(uint32_t sessionflag, bool isProvid //send pid and sessionflag pid_t pid = getpid(); + //TODO: if pid is int32? data.writeInt32(pid); data.writeInt32(sessionflag); diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h index ba0c9ba..2fbc1fe 100644 --- a/videoencoder/IntelMetadataBuffer.h +++ b/videoencoder/IntelMetadataBuffer.h @@ -61,7 +61,7 @@ typedef enum { typedef struct { MemMode mode; //memory type, vasurface/malloc/gfx/ion/v4l2/ci etc - uint32_t handle; //handle + intptr_t handle; //handle uint32_t size; //memory size uint32_t width; //picture width uint32_t height; //picture height @@ -87,7 +87,7 @@ typedef enum { class IntelMetadataBuffer { public: IntelMetadataBuffer(); //for generator - IntelMetadataBuffer(IntelMetadataBufferType type, int32_t value); //for quick generator + IntelMetadataBuffer(IntelMetadataBufferType type, intptr_t value); //for quick generator ~IntelMetadataBuffer(); IntelMetadataBuffer(const IntelMetadataBuffer& imb); @@ -95,12 +95,12 @@ public: IMB_Result GetType(IntelMetadataBufferType &type); IMB_Result SetType(IntelMetadataBufferType type); - IMB_Result GetValue(int32_t &value); - IMB_Result SetValue(int32_t value); + IMB_Result GetValue(intptr_t &value); + IMB_Result SetValue(intptr_t value); IMB_Result GetValueInfo(ValueInfo* &info); IMB_Result SetValueInfo(ValueInfo *info); - IMB_Result GetExtraValues(int32_t* &values, uint32_t &num); - IMB_Result SetExtraValues(int32_t *values, uint32_t num); + IMB_Result GetExtraValues(intptr_t* &values, uint32_t &num); + IMB_Result SetExtraValues(intptr_t *values, uint32_t num); //New API for bytes input/ouput, UnSerialize=SetBytes, Serialize=GetBytes IMB_Result UnSerialize(uint8_t* data, uint32_t size); @@ -111,10 +111,10 @@ public: private: IntelMetadataBufferType mType; - int32_t mValue; + intptr_t mValue; ValueInfo* mInfo; - int32_t* mExtraValues; + intptr_t* mExtraValues; uint32_t mExtraValues_Count; uint8_t* mBytes; diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 7b234b1..63205c6 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -386,7 +386,7 @@ Encode_Status VideoEncoderAVC::getOneNALUnit( dataRemaining = bufSize - pos + 1; while ((dataRemaining > 0) && (zeroByteCount < 3)) { - if (((((uint32_t)dataPtr) & 0xF ) == 0) && (0 == zeroByteCount) + if (((((intptr_t)dataPtr) & 0xF ) == 0) && (0 == zeroByteCount) && (dataRemaining > 0xF)) { __asm__ ( diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 00fe25d..9ab50e4 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1532,7 +1532,7 @@ Encode_Status VideoEncoderBase::getNewUsrptrFromSurface( map->setVASurface(surface); //special case, vasuface is set, so nothing do in doMapping // map->setType(MetadataBufferTypeEncoder); - map->setValue((int32_t)*usrptr); + map->setValue((intptr_t)*usrptr); ValueInfo vinfo; memset(&vinfo, 0, sizeof(ValueInfo)); vinfo.mode = (MemMode)MEM_MODE_USRPTR; @@ -1576,7 +1576,7 @@ Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upS ValueInfo vinfo; memset(&vinfo, 0, sizeof(ValueInfo)); vinfo.mode = (MemMode)upStreamBuffer->bufferMode; - vinfo.handle = (uint32_t)upStreamBuffer->display; + vinfo.handle = (intptr_t)upStreamBuffer->display; vinfo.size = 0; if (upStreamBuffer->bufAttrib) { vinfo.width = upStreamBuffer->bufAttrib->realWidth; @@ -1602,10 +1602,10 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA Encode_Status ret = ENCODE_SUCCESS; IntelMetadataBufferType type; - int32_t value; + intptr_t value; ValueInfo vinfo; ValueInfo *pvinfo = &vinfo; - int32_t *extravalues = NULL; + intptr_t *extravalues = NULL; unsigned int extravalues_count = 0; IntelMetadataBuffer imb; @@ -1630,7 +1630,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA } type = IntelMetadataBufferTypeUser; - value = (int32_t)inBuffer->data; + value = (intptr_t)inBuffer->data; } #ifdef INTEL_VIDEO_XPROC_SHARING diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 0612ce6..0ccbb3b 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -159,7 +159,7 @@ static int gfx_Blit(buffer_handle_t src, buffer_handle_t dest, return err; } -Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ +Encode_Status GetGfxBufferInfo(intptr_t handle, ValueInfo& vinfo){ /* only support OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar HAL_PIXEL_FORMAT_NV12 @@ -204,7 +204,7 @@ Encode_Status GetGfxBufferInfo(int32_t handle, ValueInfo& vinfo){ } #ifdef GFX_DUMP -void DumpGfx(int32_t handle, char* filename) { +void DumpGfx(intptr_t handle, char* filename) { ValueInfo vinfo; void* vaddr[3]; FILE* fp; @@ -306,8 +306,8 @@ Encode_Status VASurfaceMap::doMapping() { if (gfx_alloc(width, height, HAL_PIXEL_FORMAT_NV12, usage, &mGfxHandle, &stride) != 0) return ENCODE_DRIVER_FAIL; - LOG_I("Create an new gfx buffer handle 0x%08x for color convert, width=%d, height=%d, stride=%d\n", - (unsigned int)mGfxHandle, width, height, stride); + LOG_I("Create an new gfx buffer handle 0x%p for color convert, width=%d, height=%d, stride=%d\n", + mGfxHandle, width, height, stride); } #else @@ -333,7 +333,7 @@ Encode_Status VASurfaceMap::doMapping() { #ifdef IMG_GFX if (mGfxHandle != NULL) { //map new gfx handle to vasurface - ret = MappingGfxHandle((int32_t)mGfxHandle); + ret = MappingGfxHandle((intptr_t)mGfxHandle); CHECK_ENCODE_STATUS_RETURN("MappingGfxHandle"); LOGI("map new allocated gfx handle to vaSurface\n"); } else @@ -368,7 +368,7 @@ Encode_Status VASurfaceMap::MappingToVASurface() { LOG_I("VASurface is already set before, nothing to do here\n"); return ENCODE_SUCCESS; } - LOG_I("MappingToVASurface mode=%d, value=%x\n", mVinfo.mode, mValue); + LOG_I("MappingToVASurface mode=%d, value=%p\n", mVinfo.mode, (void*)mValue); const char *mode = NULL; switch (mVinfo.mode) { @@ -399,12 +399,12 @@ Encode_Status VASurfaceMap::MappingToVASurface() { } LOG_I("%s: Format=%x, lumaStride=%d, width=%d, height=%d\n", mode, mVinfo.format, mVinfo.lumaStride, mVinfo.width, mVinfo.height); - LOG_I("vaSurface 0x%08x is created for value = 0x%08x", mVASurface, mValue); + LOG_I("vaSurface 0x%08x is created for value = 0x%p\n", mVASurface, (void*)mValue); return ret; } -Encode_Status VASurfaceMap::MappingSurfaceID(int32_t value) { +Encode_Status VASurfaceMap::MappingSurfaceID(intptr_t value) { VAStatus vaStatus = VA_STATUS_SUCCESS; VASurfaceID surface; @@ -425,7 +425,7 @@ Encode_Status VASurfaceMap::MappingSurfaceID(int32_t value) { &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL); CHECK_VA_STATUS_RETURN("vaLockSurface"); - LOG_I("Surface incoming = 0x%08x\n", value); + LOG_I("Surface incoming = 0x%p\n", (void*)value); LOG_I("lumaStride = %d, chromaUStride = %d, chromaVStride=%d\n", lumaStride, chromaUStride, chromaVStride); LOG_I("lumaOffset = %d, chromaUOffset = %d, chromaVOffset = %d\n", lumaOffset, chromaUOffset, chromaVOffset); LOG_I("kBufHandle = 0x%08x, fourCC = %d\n", kBufHandle, fourCC); @@ -446,9 +446,9 @@ Encode_Status VASurfaceMap::MappingSurfaceID(int32_t value) { return ENCODE_SUCCESS; } -Encode_Status VASurfaceMap::MappingGfxHandle(int32_t value) { +Encode_Status VASurfaceMap::MappingGfxHandle(intptr_t value) { - LOG_I("MappingGfxHandle %x......\n", value); + LOG_I("MappingGfxHandle %p......\n", (void*)value); LOG_I("format = 0x%08x, lumaStride = %d in ValueInfo\n", mVinfo.format, mVinfo.lumaStride); //default value for all HW platforms, maybe not accurate @@ -482,9 +482,9 @@ Encode_Status VASurfaceMap::MappingGfxHandle(int32_t value) { return ENCODE_SUCCESS; } -Encode_Status VASurfaceMap::MappingKbufHandle(int32_t value) { +Encode_Status VASurfaceMap::MappingKbufHandle(intptr_t value) { - LOG_I("MappingKbufHandle value=%d\n", value); + LOG_I("MappingKbufHandle value=%p\n", (void*)value); mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5; mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo); @@ -498,7 +498,7 @@ Encode_Status VASurfaceMap::MappingKbufHandle(int32_t value) { return ENCODE_SUCCESS; } -Encode_Status VASurfaceMap::MappingMallocPTR(int32_t value) { +Encode_Status VASurfaceMap::MappingMallocPTR(intptr_t value) { mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo); if (mVASurface == VA_INVALID_SURFACE) @@ -518,7 +518,7 @@ Encode_Status VASurfaceMap::doActionCopy() { uint32_t width = 0, height = 0, stride = 0; uint8_t *pSrcBuffer, *pDestBuffer; - int32_t handle = 0; + intptr_t handle = 0; LOG_I("Copying Src Buffer data to VASurface\n"); @@ -553,7 +553,7 @@ Encode_Status VASurfaceMap::doActionCopy() { ValueInfo tmp; if (mGfxHandle) - handle = (int32_t) mGfxHandle; + handle = (intptr_t) mGfxHandle; else handle = mValue; @@ -659,7 +659,7 @@ Encode_Status VASurfaceMap::doActionColConv() { #ifdef GFX_DUMP LOG_I("dumpping gfx data.....\n"); DumpGfx(mValue, "/data/dump.rgb"); - DumpGfx((int32_t)mGfxHandle, "/data/dump.yuv"); + DumpGfx((intptr_t)mGfxHandle, "/data/dump.yuv"); #endif return ENCODE_SUCCESS; @@ -668,7 +668,7 @@ Encode_Status VASurfaceMap::doActionColConv() { #endif } -VASurfaceID VASurfaceMap::CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo) { +VASurfaceID VASurfaceMap::CreateSurfaceFromExternalBuf(intptr_t value, ValueInfo& vinfo) { VAStatus vaStatus; VASurfaceAttribExternalBuffers extbuf; diff --git a/videoencoder/VideoEncoderUtils.h b/videoencoder/VideoEncoderUtils.h index 786a63b..f421022 100644 --- a/videoencoder/VideoEncoderUtils.h +++ b/videoencoder/VideoEncoderUtils.h @@ -20,11 +20,11 @@ public: Encode_Status doMapping(); VASurfaceID getVASurface() {return mVASurface;} - int32_t getValue() {return mValue;} + intptr_t getValue() {return mValue;} ValueInfo* getValueInfo() {return &mVinfo;} void setVASurface(VASurfaceID surface) {mVASurface = surface;} - void setValue(int32_t value) {mValue = value;} + void setValue(intptr_t value) {mValue = value;} void setValueInfo(ValueInfo& vinfo) {memcpy(&mVinfo, &vinfo, sizeof(ValueInfo));} void setTracked() {mTracked = true;} void setAction(int32_t action) {mAction = action;} @@ -33,15 +33,15 @@ private: Encode_Status doActionCopy(); Encode_Status doActionColConv(); Encode_Status MappingToVASurface(); - Encode_Status MappingSurfaceID(int32_t value); - Encode_Status MappingGfxHandle(int32_t value); - Encode_Status MappingKbufHandle(int32_t value); - Encode_Status MappingMallocPTR(int32_t value); - VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo); + Encode_Status MappingSurfaceID(intptr_t value); + Encode_Status MappingGfxHandle(intptr_t value); + Encode_Status MappingKbufHandle(intptr_t value); + Encode_Status MappingMallocPTR(intptr_t value); + VASurfaceID CreateSurfaceFromExternalBuf(intptr_t value, ValueInfo& vinfo); VADisplay mVADisplay; - int32_t mValue; + intptr_t mValue; VASurfaceID mVASurface; int32_t mVASurfaceWidth; -- cgit v1.2.3 From 750a80f758aadb85f7f1a01f169c6726c2ceb536 Mon Sep 17 00:00:00 2001 From: wfeng6 Date: Tue, 1 Apr 2014 23:37:15 +0800 Subject: Add the offset to the encrypted slice size BZ: 183508 When the encrypted slice data doesn't start on the block-aligned address. The offset should be added to the sizeLength to reflect the true size of the encrypted slice data. Change-Id: Ib83de78bf3744f56e82635dbb4ffb3d91a8c3767 Signed-off-by: wfeng6 --- videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp index 4ff7771..1d94f47 100755 --- a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp @@ -164,13 +164,13 @@ Decode_Status VideoDecoderAVCSecure::processModularInputBuffer(VideoDecodeBuffer mSliceInfo[sliceidx].sliceHeaderByte = nalu_type; mSliceInfo[sliceidx].sliceStartOffset = (nalu_offset >> 4) << 4; mSliceInfo[sliceidx].sliceByteOffset = nalu_offset - mSliceInfo[sliceidx].sliceStartOffset; - mSliceInfo[sliceidx].sliceLength = nalu_size; + mSliceInfo[sliceidx].sliceLength = mSliceInfo[sliceidx].sliceByteOffset + nalu_size; mSliceInfo[sliceidx].sliceSize = (mSliceInfo[sliceidx].sliceByteOffset + nalu_size + 0xF) & ~0xF; VTRACE("sliceHeaderByte = 0x%x", mSliceInfo[sliceidx].sliceHeaderByte); VTRACE("sliceStartOffset = %d", mSliceInfo[sliceidx].sliceStartOffset); VTRACE("sliceByteOffset = %d", mSliceInfo[sliceidx].sliceByteOffset); VTRACE("sliceSize = %d", mSliceInfo[sliceidx].sliceSize); - + VTRACE("sliceLength = %d", mSliceInfo[sliceidx].sliceLength); #if 0 uint32_t testsize; uint8_t *testdata; -- cgit v1.2.3 From d7a8b48bc6a81cd3d853a56c8e6fb79b0177af2d Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Thu, 3 Apr 2014 13:31:21 +0800 Subject: libmix: update actual buffer count for AVC BZ: 184168 If there is a B frame to be output, which should be excluded in the caculating of buffer count. Otherwize, the buffer count is not enough then ANR happens. Change-Id: Ie3d76d5c7f1cbcb73054b68881e07939847fd9f3 Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderAVC.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index faec181..b5a4a6b 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -772,7 +772,9 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { } } - int diff = data->codec_data->num_ref_frames + 1 - mOutputWindowSize; + int DPBSize = getDPBSize(data); + // considering there is a B frame to be output, which needs to be exclued in output queue + int diff = DPBSize - (mOutputWindowSize - 1); if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber; @@ -780,7 +782,7 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { // The number of actual buffer needed is // outputQueue + nativewindow_owned + (diff > 0 ? diff : 1) + widi_need_max + 1(available buffer) // while outputQueue = DPB < 8? DPB :8 - // and diff = Reference + 1 - ouputQueue + // and diff = DBPSize - (outputQ - 1) mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */ + (diff > 0 ? diff : 1) #ifndef USE_GEN_HW -- cgit v1.2.3 From 172d2575896338519674a6c25fa24e39e9bf9582 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Mon, 31 Mar 2014 20:34:28 +0800 Subject: libmix: report format change if graphic buffer count is increased BZ: 183245 report format change if graphic buffer count is increased Change-Id: Ic75fbe790c911110808fd2b473d46b1151a32457 Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderAVC.cpp | 3 ++- videodecoder/VideoDecoderBase.cpp | 22 ++++++++++++++++++++++ videodecoder/VideoDecoderBase.h | 1 + videodecoder/VideoDecoderInterface.h | 1 + 4 files changed, 26 insertions(+), 1 deletion(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index b5a4a6b..bf2df90 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -833,7 +833,8 @@ Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) { if (!rawDataMode) { needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth) || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight) - || isWiDiStatusChanged(); + || isWiDiStatusChanged() + || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber); } if (needFlush || (rawDataMode && mSizeChanged)) { diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index 7d1cd13..a5b73f4 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -148,6 +148,7 @@ Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) { mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth; mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight; } + mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber; mLowDelay = buffer->flag & WANT_LOW_DELAY; mRawOutput = buffer->flag & WANT_RAW_OUTPUT; if (mRawOutput) { @@ -220,6 +221,22 @@ void VideoDecoderBase::flush(void) { } +void VideoDecoderBase::freeSurfaceBuffers(void) { + if (mVAStarted == false) { + // nothing to free surface buffers at this stage + return; + } + + pthread_mutex_lock(&mLock); + + endDecodingFrame(true); + + // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec + terminateVA(); + + pthread_mutex_unlock(&mLock); +} + const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) { return &mVideoFormatInfo; } @@ -961,6 +978,11 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i } Decode_Status VideoDecoderBase::terminateVA(void) { + if (mVAStarted == false) { + // VA hasn't been started yet + return DECODE_SUCCESS; + } + if (mSurfaceBuffers) { for (int32_t i = 0; i < mNumSurfaces; i++) { if (mSurfaceBuffers[i].renderBuffer.rawData) { diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h index e4a033a..893f23a 100755 --- a/videodecoder/VideoDecoderBase.h +++ b/videodecoder/VideoDecoderBase.h @@ -60,6 +60,7 @@ public: virtual void stop(void); //virtual Decode_Status decode(VideoDecodeBuffer *buffer); virtual void flush(void); + virtual void freeSurfaceBuffers(void); virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL); virtual Decode_Status signalRenderDone(void * graphichandler); virtual const VideoFormatInfo* getFormatInfo(void); diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h index 4c6a039..acedea3 100644 --- a/videodecoder/VideoDecoderInterface.h +++ b/videodecoder/VideoDecoderInterface.h @@ -36,6 +36,7 @@ public: virtual void stop(void) = 0; virtual void flush() = 0; virtual Decode_Status decode(VideoDecodeBuffer *buffer) = 0; + virtual void freeSurfaceBuffers(void) = 0; virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL) = 0; virtual const VideoFormatInfo* getFormatInfo(void) = 0; virtual Decode_Status signalRenderDone(void * graphichandler) = 0; -- cgit v1.2.3 From f03e259c3d640ae427df10a8b891776138954ca6 Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 9 Apr 2014 19:06:15 +0800 Subject: libmix: correct actual buffer count for AVC BZ: 185530 Correct actual buffer count for AVC. Change-Id: Id836390a5be0d60c4de88651d999e482ac49b755 Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderAVC.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index bf2df90..03abb3b 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -772,9 +772,8 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { } } - int DPBSize = getDPBSize(data); // considering there is a B frame to be output, which needs to be exclued in output queue - int diff = DPBSize - (mOutputWindowSize - 1); + int diff = data->codec_data->num_ref_frames + 1 - (mOutputWindowSize - 1); if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber; -- cgit v1.2.3 From 08f6276c1fea27d33124fc07a3b06fabe63fc19f Mon Sep 17 00:00:00 2001 From: "Sun, Jian" Date: Wed, 9 Apr 2014 16:32:46 +0800 Subject: fixed a klocwork issue BZ: 185618 In the function stream_memcpy, if the size isn't aligned to 16, it use memcpy. The function should end itself after calling memcpy. Change-Id: Idae5888ca54692b74cb98da774e6ae984d38c6df Signed-off-by: Sun, Jian --- videodecoder/use_util_sse4.h | 1 + 1 file changed, 1 insertion(+) diff --git a/videodecoder/use_util_sse4.h b/videodecoder/use_util_sse4.h index 073ae22..338622c 100644 --- a/videodecoder/use_util_sse4.h +++ b/videodecoder/use_util_sse4.h @@ -34,6 +34,7 @@ inline void stream_memcpy(void* dst_buff, const void* src_buff, size_t size) bool isAligned = (((size_t)(src_buff) | (size_t)(dst_buff)) & 0xF) == 0; if (!isAligned) { memcpy(dst_buff, src_buff, size); + return; } static const size_t regs_count = 8; -- cgit v1.2.3 From 5939b80110167a66339f1c6e217dfb88c37aeda6 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Wed, 9 Apr 2014 16:31:13 +0800 Subject: Use VA type surface for QVGA encode for Moorifield BZ: 182196 To fix a CTS bug, using va type surface for QVGA encode. This is just a walkround method. Change-Id: I597f1ead5251aa0cc316097f203467354dc3a7a7 Signed-off-by: bolunliu --- videoencoder/VideoEncoderUtils.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 0ccbb3b..8e08688 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -321,6 +321,10 @@ Encode_Status VASurfaceMap::doMapping() { mAction |= MAP_ACTION_COPY; } + if(mAction & MAP_ACTION_ALIGN64 && width <= 320 && height <= 240) { + mAction |= MAP_ACTION_COPY; + } + if (mAction & MAP_ACTION_COPY) { //must allocate new vasurface(EXternalMemoryNULL, uncached) //allocate new vasurface mVASurface = CreateNewVASurface(mVADisplay, stride, height); -- cgit v1.2.3 From e932a35f4f7d610d8ed9d6aa9cb1b2737bc29d1d Mon Sep 17 00:00:00 2001 From: gji2 Date: Mon, 7 Apr 2014 22:45:58 +0800 Subject: High profile feature direct_8x8_inference_flag setting enable in omx-component and libmix BZ: 185130 High profile feature direct_8x8_inference_flag setting enable in omx-component and libmix Change-Id: I483ac4bd00fb84b89f1df39334085d8a3753b9f9 Signed-off-by: gji2 --- videoencoder/VideoEncoderAVC.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp index 63205c6..fb17443 100644 --- a/videoencoder/VideoEncoderAVC.cpp +++ b/videoencoder/VideoEncoderAVC.cpp @@ -1049,11 +1049,12 @@ Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) { LOG_I( "initial_qp = %d\n", rcMiscParam->initial_qp); LOG_I( "min_qp = %d\n", rcMiscParam->min_qp); LOG_I( "basic_unit_size = %d\n", rcMiscParam->basic_unit_size); + LOG_I( "bDirect8x8Inference = %d\n",mVideoParamsAVC.bDirect8x8Inference); // Not sure whether these settings work for all drivers avcSeqParams.seq_fields.bits.frame_mbs_only_flag = 1; avcSeqParams.seq_fields.bits.pic_order_cnt_type = 0; - avcSeqParams.seq_fields.bits.direct_8x8_inference_flag = 0; + avcSeqParams.seq_fields.bits.direct_8x8_inference_flag = mVideoParamsAVC.bDirect8x8Inference; avcSeqParams.seq_fields.bits.log2_max_frame_num_minus4 = 0; avcSeqParams.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 2; -- cgit v1.2.3 From 782c99156bca9fad679859fb92693285eceee69b Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Thu, 10 Apr 2014 14:03:39 +0800 Subject: Add new error code to indicate invalid input surface error BZ: 183375 Add new error code to indicate invalid input surface error Change-Id: Ie2913afa1121e7b6b38b466c45b3c4cdc7083a24 Signed-off-by: Zhao Liang --- videoencoder/VideoEncoderDef.h | 1 + videoencoder/VideoEncoderUtils.cpp | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 6c74b32..b5b7ae8 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -19,6 +19,7 @@ typedef int32_t Encode_Status; // Video encode error code enum { + ENCODE_INVALID_SURFACE = -11, ENCODE_NO_REQUEST_DATA = -10, ENCODE_WRONG_STATE = -9, ENCODE_NOTIMPL = -8, diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 8e08688..aa2c0e2 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -442,7 +442,7 @@ Encode_Status VASurfaceMap::MappingSurfaceID(intptr_t value) { mVASurface = CreateSurfaceFromExternalBuf(kBufHandle, mVinfo); if (mVASurface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; + return ENCODE_INVALID_SURFACE; mVASurfaceWidth = mVinfo.width; mVASurfaceHeight = mVinfo.height; @@ -481,7 +481,7 @@ Encode_Status VASurfaceMap::MappingGfxHandle(intptr_t value) { vinfo.lumaStride = mVASurfaceStride; mVASurface = CreateSurfaceFromExternalBuf(value, vinfo); if (mVASurface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; + return ENCODE_INVALID_SURFACE; return ENCODE_SUCCESS; } @@ -493,7 +493,7 @@ Encode_Status VASurfaceMap::MappingKbufHandle(intptr_t value) { mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5; mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo); if (mVASurface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; + return ENCODE_INVALID_SURFACE; mVASurfaceWidth = mVinfo.width; mVASurfaceHeight = mVinfo.height; @@ -506,7 +506,7 @@ Encode_Status VASurfaceMap::MappingMallocPTR(intptr_t value) { mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo); if (mVASurface == VA_INVALID_SURFACE) - return ENCODE_DRIVER_FAIL; + return ENCODE_INVALID_SURFACE; mVASurfaceWidth = mVinfo.width; mVASurfaceHeight = mVinfo.height; -- cgit v1.2.3 From cfbdbe69f95af2b45deeebf5ba38e3882f00d026 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Tue, 15 Apr 2014 14:19:35 +0800 Subject: Add render layer structure misc parameter in libmix BZ: 186811 Due to libva interface change, libmix should render layer structure misc parameter before encode. Change-Id: I28c2e5c9c283ff8b7f6f3ef7feec0e8383c0f7c3 Signed-off-by: bolunliu --- videoencoder/VideoEncoderVP8.cpp | 47 ++++++++++++++++++++++++++++++++++++++++ videoencoder/VideoEncoderVP8.h | 1 + 2 files changed, 48 insertions(+) diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 168b60e..c82194e 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -301,12 +301,59 @@ Encode_Status VideoEncoderVP8::renderMultiTemporalBitRateFrameRate(void) return 0; } +Encode_Status VideoEncoderVP8::renderLayerStructureParam(void) +{ + VABufferID layer_struc_buf; + VAStatus vaStatus = VA_STATUS_SUCCESS; + VAEncMiscParameterBuffer *misc_param; + VAEncMiscParameterTemporalLayerStructure *misc_layer_struc; + + vaStatus = vaCreateBuffer(mVADisplay, mVAContext, + VAEncMiscParameterBufferType, + sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterTemporalLayerStructure), + 1, NULL, &layer_struc_buf); + + CHECK_VA_STATUS_RETURN("vaCreateBuffer"); + vaMapBuffer(mVADisplay, layer_struc_buf, (void **)&misc_param); + misc_param->type = VAEncMiscParameterTypeTemporalLayerStructure; + misc_layer_struc = (VAEncMiscParameterTemporalLayerStructure *)misc_param->data; + memset(misc_layer_struc, 0, sizeof(*misc_layer_struc)); + + misc_layer_struc->number_of_layers = mComParams.numberOfLayer; + + LOGE("renderLayerStructureParam misc_layer_struc->number_of_layers is %d",misc_layer_struc->number_of_layers); + + if (mComParams.numberOfLayer == 2) { + misc_layer_struc->periodicity = 2; + misc_layer_struc->layer_id[0] = 0; + misc_layer_struc->layer_id[1] = 1; + } + if (mComParams.numberOfLayer == 3) { + misc_layer_struc->periodicity = 4; + misc_layer_struc->layer_id[0] = 0; + misc_layer_struc->layer_id[1] = 2; + misc_layer_struc->layer_id[2] = 1; + misc_layer_struc->layer_id[3] = 2; + } + + vaUnmapBuffer(mVADisplay, layer_struc_buf); + + vaStatus = vaRenderPicture(mVADisplay, mVAContext, &layer_struc_buf, 1); + CHECK_VA_STATUS_RETURN("vaRenderPicture");; + + return 0; +} + + Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; LOG_V( "Begin\n"); if (mFrameNum == 0) { + if((mComParams.numberOfLayer==2)||(mComParams.numberOfLayer==3)) + ret = renderLayerStructureParam(); + ret = renderFrameRateParams(); ret = renderRCParams(); ret = renderHRDParams(); diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index 20cd72c..849d209 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -39,6 +39,7 @@ private: Encode_Status renderHRDParams(void); Encode_Status renderFrameRateParams(void); Encode_Status renderMaxFrameSizeParams(void); + Encode_Status renderLayerStructureParam(void); Encode_Status renderMultiTemporalBitRateFrameRate(void); -- cgit v1.2.3 From 6a91593b3ace19fa351b6fd6393e2058cd594e8d Mon Sep 17 00:00:00 2001 From: hding3 Date: Tue, 15 Apr 2014 09:01:50 +0800 Subject: Add the offset to the encrypted slice size BZ: 187050 When the encrypted slice data doesn't start on the block-aligned address. The offset should be added to the sizeLength to reflect the true size of the encrypted slice data. Change-Id: I2d6cc8ac649cd66f74611e307aa389fff55a9205 Signed-off-by: hding3 --- videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp index 4ff7771..4428ac4 100644 --- a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp @@ -164,12 +164,13 @@ Decode_Status VideoDecoderAVCSecure::processModularInputBuffer(VideoDecodeBuffer mSliceInfo[sliceidx].sliceHeaderByte = nalu_type; mSliceInfo[sliceidx].sliceStartOffset = (nalu_offset >> 4) << 4; mSliceInfo[sliceidx].sliceByteOffset = nalu_offset - mSliceInfo[sliceidx].sliceStartOffset; - mSliceInfo[sliceidx].sliceLength = nalu_size; + mSliceInfo[sliceidx].sliceLength = mSliceInfo[sliceidx].sliceByteOffset + nalu_size; mSliceInfo[sliceidx].sliceSize = (mSliceInfo[sliceidx].sliceByteOffset + nalu_size + 0xF) & ~0xF; VTRACE("sliceHeaderByte = 0x%x", mSliceInfo[sliceidx].sliceHeaderByte); VTRACE("sliceStartOffset = %d", mSliceInfo[sliceidx].sliceStartOffset); VTRACE("sliceByteOffset = %d", mSliceInfo[sliceidx].sliceByteOffset); VTRACE("sliceSize = %d", mSliceInfo[sliceidx].sliceSize); + VTRACE("sliceLength = %d", mSliceInfo[sliceidx].sliceLength); #if 0 uint32_t testsize; -- cgit v1.2.3 From c6f7365ea4c5f4fad78ba8c70e282173442a3bd1 Mon Sep 17 00:00:00 2001 From: Randy Xu Date: Wed, 9 Apr 2014 16:45:20 +0800 Subject: GFX: Blit interface change in DDK 1.3@286 BZ: 188023 Change-Id: Iec3c332c47adb545523818f533e509c200353c70 Signed-off-by: Randy Xu --- videoencoder/VideoEncoderUtils.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index aa2c0e2..97045bb 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -145,7 +145,7 @@ static int gfx_Blit(buffer_handle_t src, buffer_handle_t dest, IMG_gralloc_module_public_t* GrallocMod = (IMG_gralloc_module_public_t*)gModule; #ifdef MRFLD_GFX - err = GrallocMod->Blit(GrallocMod, src, dest, w, h, 0, 0, 0); + err = GrallocMod->Blit(GrallocMod, src, dest, w, h, 0, 0, 0, 0); #else err = GrallocMod->Blit2(GrallocMod, src, dest, w, h, 0, 0); #endif -- cgit v1.2.3 From 926c3526cb484747aac1237c84737d54198f055e Mon Sep 17 00:00:00 2001 From: bolunliu Date: Tue, 15 Apr 2014 09:50:59 +0800 Subject: Refine libmix video encode to fix screen record messy issue. BZ: 188859 Change GFX buffer info usage logic. Refine 64bits variable type. Change-Id: I55e50e49c0323d6fb91fc48a48314a326d7ca3b5 Signed-off-by: bolunliu --- videoencoder/VideoEncoderBase.cpp | 12 ++++++------ videoencoder/VideoEncoderBase.h | 2 +- videoencoder/VideoEncoderDef.h | 2 +- videoencoder/VideoEncoderUtils.cpp | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 9ab50e4..d9e92bd 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1642,14 +1642,14 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA if (map) { //has mapped, get surfaceID directly and do all necessary actions - LOG_I("direct find surface %d from value %x\n", map->getVASurface(), value); + LOG_I("direct find surface %d from value %p\n", map->getVASurface(), value); *sid = map->getVASurface(); map->doMapping(); return ret; } //if no found from list, then try to map value with parameters - LOG_I("not find surface from cache with value %x, start mapping if enough information\n", value); + LOG_I("not find surface from cache with value %p, start mapping if enough information\n", value); if (mStoreMetaDataInBuffers.isEnabled) { @@ -1696,7 +1696,7 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA ret = map->doMapping(); if (ret == ENCODE_SUCCESS) { - LOG_I("surface mapping success, map value %x into surface %d\n", value, map->getVASurface()); + LOG_I("surface mapping success, map value %p into surface %d\n", value, map->getVASurface()); mSrcSurfaceMapList.push_back(map); } else { delete map; @@ -1721,12 +1721,12 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA ret = map->doMapping(); if (ret == ENCODE_SUCCESS) { - LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->getVASurface()); + LOG_I("surface mapping extravalue success, map value %p into surface %d\n", extravalues[i], map->getVASurface()); mSrcSurfaceMapList.push_back(map); } else { delete map; map = NULL; - LOG_E( "surface mapping extravalue failed, extravalue is %x\n", extravalues[i]); + LOG_E( "surface mapping extravalue failed, extravalue is %p\n", extravalues[i]); } } } @@ -1868,7 +1868,7 @@ Encode_Status VideoEncoderBase::renderHrd() { return ENCODE_SUCCESS; } -VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) { +VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(intptr_t value) { android::List::iterator node; for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++) diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 5bd7711..80f6849 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -85,7 +85,7 @@ private: Encode_Status setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer); Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format, uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr); - VASurfaceMap* findSurfaceMapByValue(int32_t value); + VASurfaceMap* findSurfaceMapByValue(intptr_t value); Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid); void PrepareFrameInfo(EncodeTask* task); diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index b5b7ae8..c8695dc 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -470,7 +470,7 @@ struct VideoParamsUpstreamBuffer : VideoParamConfigSet { } VideoBufferSharingMode bufferMode; - uint32_t *bufList; + intptr_t *bufList; uint32_t bufCnt; ExternalBufferAttrib *bufAttrib; void *display; diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 97045bb..53aafbd 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -177,7 +177,7 @@ Encode_Status GetGfxBufferInfo(intptr_t handle, ValueInfo& vinfo){ if (h->iFormat == HAL_PIXEL_FORMAT_NV12) { #ifdef MRFLD_GFX - if((h->usage | GRALLOC_USAGE_HW_CAMERA_READ) || (h->usage | GRALLOC_USAGE_HW_CAMERA_WRITE) ) + if((h->usage & GRALLOC_USAGE_HW_CAMERA_READ) || (h->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) ) vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned else vinfo.lumaStride = (h->iWidth + 31) & ~31; //32 aligned -- cgit v1.2.3 From 0516a5ae1156b29cf21df0d23db0f0af31e51ee7 Mon Sep 17 00:00:00 2001 From: Zhao Liang Date: Mon, 21 Apr 2014 14:02:31 +0800 Subject: Flush surface map cache and buffer sharing cache BZ: 189363 Flush surface map cache and buffer sharing cache if new session id is detected Change-Id: Iea670ba2ea1977e6c3b9b5e856bd0039c80ad2ec Signed-off-by: Zhao Liang --- test/mix_encoder2.cpp | 1 + videoencoder/IntelMetadataBuffer.cpp | 6 +++--- videoencoder/VideoEncoderBase.cpp | 12 ++++++++++++ 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/test/mix_encoder2.cpp b/test/mix_encoder2.cpp index f4e407c..ae81ea1 100755 --- a/test/mix_encoder2.cpp +++ b/test/mix_encoder2.cpp @@ -1033,6 +1033,7 @@ public: pthread_attr_init(&attr); pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + mRunning = true; pthread_create(&mThread, &attr, MixEncoder::ThreadFunc, this); pthread_attr_destroy(&attr); diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index e275637..1cb2e39 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -691,7 +691,7 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp mem) { mValue = (intptr_t)((intptr_t) ( mem->pointer() + 0x0FFF) & ~0x0FFF); - if (mSessionFlag == 0) //no sharing + if ( !(mSessionFlag & REMOTE_PROVIDER) && !(mSessionFlag & REMOTE_CONSUMER)) //no sharing return IMB_SUCCESS; if (mSessionFlag & REMOTE_PROVIDER) //is remote provider @@ -744,7 +744,7 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp gbuffer) { mValue = (intptr_t)gbuffer->handle; - if (mSessionFlag == 0) //no sharing + if ( !(mSessionFlag & REMOTE_PROVIDER) && !(mSessionFlag & REMOTE_CONSUMER)) //no sharing return IMB_SUCCESS; if (mSessionFlag & REMOTE_PROVIDER == 0) //is remote provider @@ -794,7 +794,7 @@ IMB_Result IntelMetadataBuffer::ShareValue(sp gbuffer) IMB_Result IntelMetadataBuffer::ClearContext(uint32_t sessionflag, bool isProvider) { - if (sessionflag == 0) //no sharing + if ( !(sessionflag & REMOTE_PROVIDER) && !(sessionflag & REMOTE_CONSUMER)) //no sharing return IMB_SUCCESS; //clear local firstly diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index d9e92bd..4a0f376 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1634,7 +1634,19 @@ Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VA } #ifdef INTEL_VIDEO_XPROC_SHARING + uint32_t sflag = mSessionFlag; imb.GetSessionFlag(mSessionFlag); + if (mSessionFlag != sflag) { + //new sharing session, flush buffer sharing cache + IntelMetadataBuffer::ClearContext(sflag, false); + //flush surfacemap cache + LOG_V( "Flush Src Surface Map\n"); + while(! mSrcSurfaceMapList.empty()) + { + delete (*mSrcSurfaceMapList.begin()); + mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin()); + } + } #endif //find if mapped -- cgit v1.2.3 From ae766882631f15e79d454cc02ff97c052fb89af3 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Tue, 22 Apr 2014 16:33:26 +0800 Subject: Remove walk round code from libmix BZ: 189353 Remove walk round code from libmix layer. This kind of issue can be fixed in driver layer. Change-Id: I49616278c3cbfe258d34e8e1d2fd99e3870f8d61 Signed-off-by: bolunliu --- videoencoder/VideoEncoderUtils.cpp | 4 ---- 1 file changed, 4 deletions(-) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index 53aafbd..e2f76d5 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -321,10 +321,6 @@ Encode_Status VASurfaceMap::doMapping() { mAction |= MAP_ACTION_COPY; } - if(mAction & MAP_ACTION_ALIGN64 && width <= 320 && height <= 240) { - mAction |= MAP_ACTION_COPY; - } - if (mAction & MAP_ACTION_COPY) { //must allocate new vasurface(EXternalMemoryNULL, uncached) //allocate new vasurface mVASurface = CreateNewVASurface(mVADisplay, stride, height); -- cgit v1.2.3 From 586e013321a70498beb08e11d1c2fe9c014898bb Mon Sep 17 00:00:00 2001 From: ywan171 Date: Thu, 10 Apr 2014 11:30:10 +0800 Subject: libmix: optimize search start code function in h264 BZ: 189364 optimize search start code ASM function in h264, if 0 occurs 15, 13, 12, 11 ....3, 1 byte, do not jump out from the loop immediately, check 15,14,12,....2, 1 byte, if no 0 occurs in these position, it's impossible to have 0x010000 at these 16 byte. Change-Id: Ie58776c164d499ec3860e190786a11a9191ec7df Signed-off-by: ywan171 --- mixvbp/vbp_manager/viddec_parse_sc.c | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mixvbp/vbp_manager/viddec_parse_sc.c b/mixvbp/vbp_manager/viddec_parse_sc.c index 5f76e5d..80e1266 100755 --- a/mixvbp/vbp_manager/viddec_parse_sc.c +++ b/mixvbp/vbp_manager/viddec_parse_sc.c @@ -47,8 +47,13 @@ uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state) "sub $0x10, %%eax\n\t" //eax-16 --> eax "cmp $0x10, %%eax\n\t" //eax >= 16? "jge MATCH_8_ZERO\n\t" //search next 16 bytes + "jmp END\n\t" "DATA_RET:\n\t" + "test $0xd555, %%edx\n\t" + "jz PREPARE_NEXT_MATCH\n\t" + + "END:\n\t" "movl %%ecx, %1\n\t" //ecx --> ptr "movl %%eax, %0\n\t" //eax --> data_left : "+m"(data_left), "+m"(ptr) //data_left --> eax, ptr -> ecx -- cgit v1.2.3 From 2e93d2bb721494c61972cb80174087ffc8c1cd26 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 22 Apr 2014 15:03:36 +0800 Subject: libmix: optimize getbits, peekbits, skipbits BZ: 189364 use 32 bits cache in getbits,peekbits function to avoid multiple loading byte operation Change-Id: I0bf1758d6f6b0eca9d03075778fc550d87af206a Signed-off-by: ywan171 --- mixvbp/vbp_manager/include/viddec_pm.h | 5 +- .../vbp_manager/include/viddec_pm_utils_bstream.h | 2 + mixvbp/vbp_manager/vbp_mp42_parser.c | 7 ++ mixvbp/vbp_manager/vbp_utils.c | 5 ++ mixvbp/vbp_manager/viddec_pm_parser_ops.c | 89 +++++++++++++++++++++- mixvbp/vbp_plugin/h264/h264parse_bsd.c | 29 ++++++- 6 files changed, 131 insertions(+), 6 deletions(-) diff --git a/mixvbp/vbp_manager/include/viddec_pm.h b/mixvbp/vbp_manager/include/viddec_pm.h index 31bd0f4..d273392 100755 --- a/mixvbp/vbp_manager/include/viddec_pm.h +++ b/mixvbp/vbp_manager/include/viddec_pm.h @@ -28,7 +28,10 @@ typedef struct uint8_t late_frame_detect; uint8_t frame_start_found; uint32_t next_workload_error_eos; - +#ifdef PARSER_OPT + uint32_t cached_word; + uint32_t left_bnt; +#endif uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3]; } viddec_pm_cxt_t; diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h index 3bf1857..37e90cd 100755 --- a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h +++ b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h @@ -7,6 +7,8 @@ #define SCRATCH_SIZE 20 #define MIN_DATA 8 +#define PARSER_OPT /* parser optimization*/ + typedef struct { uint8_t *buf; diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c index b954b38..b961a65 100755 --- a/mixvbp/vbp_manager/vbp_mp42_parser.c +++ b/mixvbp/vbp_manager/vbp_mp42_parser.c @@ -1110,7 +1110,14 @@ uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) } #else +#ifdef PARSER_OPT // read 3 bytes since resync_marker_length is between 17 bits and 23 bits + int bit_index = parent->getbits.bstrm_buf.buf_index * 8 + parent->getbits.bstrm_buf.buf_bitoff - parent->left_bnt; + parent->getbits.bstrm_buf.buf_index = bit_index >> 3; + parent->getbits.bstrm_buf.buf_bitoff = bit_index & 0x7; + parent->cached_word = 0; + parent->left_bnt = 0; +#endif if (parent->getbits.bstrm_buf.buf_index + 3 > parent->getbits.bstrm_buf.buf_end) { break; diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index f0cb94b..f62f9ba 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -333,6 +333,11 @@ static void vbp_setup_parser_for_item(viddec_pm_cxt_t *cxt, uint32 item) cxt->getbits.phase = 0; cxt->getbits.emulation_byte_counter = 0; +#ifdef PARSER_OPT + cxt->cached_word = 0; + cxt->left_bnt = 0; +#endif + cxt->list.start_offset = cxt->list.data[item].stpos; cxt->list.end_offset = cxt->list.data[item].edpos; cxt->list.total_bytes = cxt->list.data[item].edpos - cxt->list.data[item].stpos; diff --git a/mixvbp/vbp_manager/viddec_pm_parser_ops.c b/mixvbp/vbp_manager/viddec_pm_parser_ops.c index b10eb2d..18a6b3c 100755 --- a/mixvbp/vbp_manager/viddec_pm_parser_ops.c +++ b/mixvbp/vbp_manager/viddec_pm_parser_ops.c @@ -12,12 +12,34 @@ int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits) viddec_pm_cxt_t *cxt; cxt = (viddec_pm_cxt_t *)parent; +#ifdef PARSER_OPT + if (cxt->left_bnt < num_bits) { + uint32_t load_word = 0; + uint32_t load_bits = 32-cxt->left_bnt; + uint32_t bits_left = (cxt->getbits.bstrm_buf.buf_end-cxt->getbits.bstrm_buf.buf_index) * 8 - cxt->getbits.bstrm_buf.buf_bitoff; + load_bits = bits_left > load_bits ? load_bits:bits_left; + ret = viddec_pm_utils_bstream_getbits(&(cxt->getbits), &load_word, load_bits); + if (ret == -1) { + VTRACE("FAILURE: getbits returned %d", ret); + return ret; + } + cxt->cached_word |= (load_word << (32-cxt->left_bnt-load_bits)); + cxt->left_bnt += load_bits; + } + *data = (cxt->cached_word >> (32 - num_bits)); + if (num_bits == 32) { + cxt->cached_word = 0; + } else { + cxt->cached_word <<= num_bits; + } + cxt->left_bnt -= num_bits; +#else ret = viddec_pm_utils_bstream_getbits(&(cxt->getbits), data, num_bits); if (ret == -1) { VTRACE("FAILURE: getbits returned %d", ret); } - +#endif return ret; } @@ -27,11 +49,29 @@ int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits) viddec_pm_cxt_t *cxt; cxt = (viddec_pm_cxt_t *)parent; +#ifdef PARSER_OPT + if (cxt->left_bnt < num_bits) { + uint32_t load_word = 0; + uint32_t load_bits = 32 - cxt->left_bnt; + uint32_t bits_left = (cxt->getbits.bstrm_buf.buf_end-cxt->getbits.bstrm_buf.buf_index) * 8 + - cxt->getbits.bstrm_buf.buf_bitoff; + load_bits = bits_left > load_bits ? load_bits:bits_left; + ret = viddec_pm_utils_bstream_getbits(&(cxt->getbits), &load_word, load_bits); + if (ret == -1) { + VTRACE("FAILURE: peekbits returned %d, %d, %d", ret, cxt->left_bnt, num_bits); + return ret; + } + cxt->cached_word |= (load_word << (32-cxt->left_bnt-load_bits)); + cxt->left_bnt += load_bits; + } + *data = (cxt->cached_word >> (32 - num_bits)); +#else ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits); if (ret == -1) { VTRACE("FAILURE: peekbits returned %d", ret); } +#endif return ret; } @@ -41,7 +81,22 @@ int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits) viddec_pm_cxt_t *cxt; cxt = (viddec_pm_cxt_t *)parent; +#ifdef PARSER_OPT + if (num_bits <= cxt->left_bnt) { + cxt->left_bnt -= num_bits; + if (num_bits < 32) { + cxt->cached_word <<= num_bits; + } else { + cxt->cached_word = 0; + } + } else { + ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits-cxt->left_bnt); + cxt->left_bnt = 0; + cxt->cached_word = 0; + } +#else ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits); +#endif return ret; } @@ -51,9 +106,20 @@ int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_ viddec_pm_cxt_t *cxt; cxt = (viddec_pm_cxt_t *)parent; +#ifdef PARSER_OPT + if (!cxt->left_bnt) { + viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits)); + viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul); + } else { + viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul); + uint32_t offset = *bit + *byte * 8 - cxt->left_bnt; + *bit = offset & 7; + *byte = offset >> 3; + } +#else viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits)); viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul); - +#endif return ret; } @@ -62,9 +128,25 @@ int32_t viddec_pm_is_nomoredata(void *parent) { int32_t ret=0; viddec_pm_cxt_t *cxt; - cxt = (viddec_pm_cxt_t *)parent; +#ifdef PARSER_OPT + uint32_t bits_left = (cxt->getbits.bstrm_buf.buf_end-cxt->getbits.bstrm_buf.buf_index) * 8 + - cxt->getbits.bstrm_buf.buf_bitoff + cxt->left_bnt; + uint32_t byte_left = (bits_left+7) >> 3; + switch (byte_left) + { + case 2: + ret = (cxt->getbits.bstrm_buf.buf[cxt->getbits.bstrm_buf.buf_end-1] == 0x0); + break; + case 1: + ret = 1; + break; + default: + break; + } +#else ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits)); +#endif return ret; } @@ -72,7 +154,6 @@ uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte) { int32_t ret=-1; viddec_pm_cxt_t *cxt; - cxt = (viddec_pm_cxt_t *)parent; ret = viddec_pm_utils_bstream_get_current_byte(&(cxt->getbits), byte); return ret; diff --git a/mixvbp/vbp_plugin/h264/h264parse_bsd.c b/mixvbp/vbp_plugin/h264/h264parse_bsd.c index e5664ce..73d99c5 100644 --- a/mixvbp/vbp_plugin/h264/h264parse_bsd.c +++ b/mixvbp/vbp_plugin/h264/h264parse_bsd.c @@ -55,7 +55,30 @@ uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo) uint32_t i = 0; VTRACE("bstream->buf_bitoff = %d", bstream->buf_bitoff); VTRACE("bstream->buf_index = %d", bstream->buf_index); - +#ifdef PARSER_OPT + viddec_pm_cxt_t *pm_cxt = (viddec_pm_cxt_t *)parent; + int32_t cached_word = pm_cxt->cached_word; + int32_t left_bnt = pm_cxt->left_bnt; + if (left_bnt != 0) { + if (cached_word == 0) { + leadingZeroBits += left_bnt; + left_bnt = 0; + } else { + match = 1; + count = 1; + left_bnt --; + while (((cached_word & 0x80000000) != 0x80000000) && (left_bnt > 0)) { + cached_word <<= 1; + count ++; + left_bnt --; + } + cached_word <<= 1; + leadingZeroBits += count; + } + pm_cxt->cached_word = cached_word; + pm_cxt->left_bnt = left_bnt; + } +#endif while (!match) { curr_byte = *curr_addr++; @@ -187,8 +210,12 @@ uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo) shift_bits = 7-bits_offset; +#ifndef PARSER_OPT // read one byte viddec_pm_get_cur_byte(parent, &cur_byte); +#else + viddec_pm_peek_bits(parent, &cur_byte, 8-bits_offset); +#endif ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01; -- cgit v1.2.3 From c38d1f16d60b574e9d8835c94e3953375323f52f Mon Sep 17 00:00:00 2001 From: bolunliu Date: Mon, 21 Apr 2014 10:48:21 +0800 Subject: Remove Temporal layer hardcode in libmix. BZ: 189356 Remove Temporal layer hardcode in libmix. Ref frame structure will be config frame OMX/APP layer. Change-Id: I51974e63e2c45ca1ffa5d5f500cc7dae99c3d436 Signed-off-by: bolunliu --- videoencoder/VideoEncoderBase.cpp | 6 ++++++ videoencoder/VideoEncoderDef.h | 6 +++++- videoencoder/VideoEncoderVP8.cpp | 28 ++++++++++------------------ 3 files changed, 21 insertions(+), 19 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 4a0f376..e1c62ff 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -936,6 +936,8 @@ void VideoEncoderBase::setDefaultParams() { mComParams.syncEncMode = false; mComParams.codedBufNum = 2; mComParams.numberOfLayer = 1; + mComParams.nPeriodicity = 0; + memset(mComParams.nLayerID,0,32*sizeof(uint32_t)); mHrdParam.bufferSize = 0; mHrdParam.initBufferFullness = 0; @@ -1026,6 +1028,10 @@ Encode_Status VideoEncoderBase::setParameters( } mComParams.numberOfLayer = numberoflayer->numberOfLayer; + mComParams.nPeriodicity = numberoflayer->nPeriodicity; + for(int i=0;inPeriodicity;i++) + mComParams.nLayerID[i] = numberoflayer->nLayerID[i]; + mRenderMultiTemporal = true; break; } diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index c8695dc..0c14809 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -371,6 +371,8 @@ struct VideoParamsCommon : VideoParamConfigSet { //CodedBuffer properties uint32_t codedBufNum; uint32_t numberOfLayer; + uint32_t nPeriodicity; + uint32_t nLayerID[32]; VideoParamsCommon() { type = VideoParamsTypeCommon; @@ -535,7 +537,9 @@ struct VideoParamsTemporalLayerNumber : VideoParamConfigSet { size = sizeof(VideoParamsTemporalLayerNumber); } - int32_t numberOfLayer; + uint32_t numberOfLayer; + uint32_t nPeriodicity; + uint32_t nLayerID[32]; }; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index c82194e..4e19f93 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -307,6 +307,7 @@ Encode_Status VideoEncoderVP8::renderLayerStructureParam(void) VAStatus vaStatus = VA_STATUS_SUCCESS; VAEncMiscParameterBuffer *misc_param; VAEncMiscParameterTemporalLayerStructure *misc_layer_struc; + uint32_t i; vaStatus = vaCreateBuffer(mVADisplay, mVAContext, VAEncMiscParameterBufferType, @@ -320,20 +321,12 @@ Encode_Status VideoEncoderVP8::renderLayerStructureParam(void) memset(misc_layer_struc, 0, sizeof(*misc_layer_struc)); misc_layer_struc->number_of_layers = mComParams.numberOfLayer; - + misc_layer_struc->periodicity = mComParams.nPeriodicity; LOGE("renderLayerStructureParam misc_layer_struc->number_of_layers is %d",misc_layer_struc->number_of_layers); - if (mComParams.numberOfLayer == 2) { - misc_layer_struc->periodicity = 2; - misc_layer_struc->layer_id[0] = 0; - misc_layer_struc->layer_id[1] = 1; - } - if (mComParams.numberOfLayer == 3) { - misc_layer_struc->periodicity = 4; - misc_layer_struc->layer_id[0] = 0; - misc_layer_struc->layer_id[1] = 2; - misc_layer_struc->layer_id[2] = 1; - misc_layer_struc->layer_id[3] = 2; + for(i=0;ilayer_id[i] = mComParams.nLayerID[i]; } vaUnmapBuffer(mVADisplay, layer_struc_buf); @@ -348,12 +341,13 @@ Encode_Status VideoEncoderVP8::renderLayerStructureParam(void) Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; - LOG_V( "Begin\n"); if (mFrameNum == 0) { - if((mComParams.numberOfLayer==2)||(mComParams.numberOfLayer==3)) - ret = renderLayerStructureParam(); - + if(mRenderMultiTemporal) + { + ret = renderLayerStructureParam(); + mRenderMultiTemporal = false; + } ret = renderFrameRateParams(); ret = renderRCParams(); ret = renderHRDParams(); @@ -484,8 +478,6 @@ Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncCon int layer_id; CHECK_NULL_RETURN_IFFAIL(videoEncConfig); - //LOGE ("%s begin",__func__); - switch (videoEncConfig->type) { case VideoConfigTypeVP8:{ -- cgit v1.2.3 From 48cc82afe5432fd182aa9ec288ee31640f07fc46 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Mon, 21 Apr 2014 18:54:57 +0800 Subject: Enable APP/OMXENC to set framerate/bitrate for layers seperately BZ: 189358 Enable APP/OMXENC to set framerate/bitrate for layers seperately Change-Id: I84bcf1fadba494a489aa6c12829ff944f241ac83 Signed-off-by: bolunliu --- videoencoder/VideoEncoderBase.cpp | 15 ++++- videoencoder/VideoEncoderBase.h | 3 + videoencoder/VideoEncoderDef.h | 13 +++++ videoencoder/VideoEncoderVP8.cpp | 119 ++++++++++++-------------------------- videoencoder/VideoEncoderVP8.h | 24 ++++---- 5 files changed, 78 insertions(+), 96 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index e1c62ff..aa556e3 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -1211,10 +1211,21 @@ Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) { if (configBitRate->size != sizeof (VideoConfigBitRate)) { return ENCODE_INVALID_PARAMS; } - mComParams.rcParams = configBitRate->rcParams; - mRenderBitRate = true; + + if(mComParams.numberOfLayer == 1) + { + mComParams.rcParams = configBitRate->rcParams; + mRenderBitRate = true; + } + else + { + mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].nLayerID = configBitRate->rcParams.temporalID; + mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].bitRate = configBitRate->rcParams.bitRate; + mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].frameRate = configBitRate->rcParams.temporalFrameRate; + } break; } + case VideoConfigTypeResolution: { // Not Implemented diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 80f6849..80532b9 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -167,6 +167,9 @@ protected: //VASurface mapping extra action int mVASurfaceMappingAction; + // For Temporal Layer Bitrate FrameRate settings + VideoConfigTemperalLayerBitrateFramerate mTemporalLayerBitrateFramerate[3]; + #ifdef INTEL_VIDEO_XPROC_SHARING uint32_t mSessionFlag; #endif diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index 0c14809..c57be9f 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -336,6 +336,7 @@ enum VideoParamConfigType { VideoConfigTypeVP8ReferenceFrame, VideoConfigTypeCIR, VideoConfigTypeVP8MaxFrameSizeRatio, + VideoConfigTypeTemperalLayerBitrateFramerate, VideoParamsConfigExtension }; @@ -707,4 +708,16 @@ struct VideoConfigVP8MaxFrameSizeRatio : VideoParamConfigSet { uint32_t max_frame_size_ratio; }; +struct VideoConfigTemperalLayerBitrateFramerate : VideoParamConfigSet { + + VideoConfigTemperalLayerBitrateFramerate() { + type = VideoConfigTypeTemperalLayerBitrateFramerate; + size = sizeof(VideoConfigTemperalLayerBitrateFramerate); + } + + uint32_t nLayerID; + uint32_t bitRate; + uint32_t frameRate; +}; + #endif /* __VIDEO_ENCODER_DEF_H__ */ diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 4e19f93..936fc6b 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -119,7 +119,7 @@ Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) { return ret; } -Encode_Status VideoEncoderVP8::renderRCParams(void) +Encode_Status VideoEncoderVP8::renderRCParams(uint32_t layer_id, bool total_bitrate) { VABufferID rc_param_buf; VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -137,7 +137,16 @@ Encode_Status VideoEncoderVP8::renderRCParams(void) misc_param->type = VAEncMiscParameterTypeRateControl; misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data; memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl)); - misc_rate_ctrl->bits_per_second = mComParams.rcParams.bitRate; + + if(total_bitrate) + misc_rate_ctrl->bits_per_second = mComParams.rcParams.bitRate; + else + { + misc_rate_ctrl->rc_flags.bits.temporal_id = layer_id; + if(mTemporalLayerBitrateFramerate[layer_id].bitRate != 0) + misc_rate_ctrl->bits_per_second = mTemporalLayerBitrateFramerate[layer_id].bitRate; + } + misc_rate_ctrl->target_percentage = 100; misc_rate_ctrl->window_size = 1000; misc_rate_ctrl->initial_qp = mVideoParamsVP8.init_qp; @@ -152,7 +161,7 @@ Encode_Status VideoEncoderVP8::renderRCParams(void) return 0; } -Encode_Status VideoEncoderVP8::renderFrameRateParams(void) +Encode_Status VideoEncoderVP8::renderFrameRateParams(uint32_t layer_id, bool total_framerate) { VABufferID framerate_param_buf; VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -171,7 +180,15 @@ Encode_Status VideoEncoderVP8::renderFrameRateParams(void) misc_param->type = VAEncMiscParameterTypeFrameRate; misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data; memset(misc_framerate, 0, sizeof(*misc_framerate)); - misc_framerate->framerate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; + + if(total_framerate) + misc_framerate->framerate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; + else + { + misc_framerate->framerate_flags.bits.temporal_id = layer_id; + if(mTemporalLayerBitrateFramerate[layer_id].frameRate != 0) + misc_framerate->framerate = mTemporalLayerBitrateFramerate[layer_id].frameRate; + } vaUnmapBuffer(mVADisplay, framerate_param_buf); @@ -238,69 +255,6 @@ Encode_Status VideoEncoderVP8::renderMaxFrameSizeParams(void) return 0; } -Encode_Status VideoEncoderVP8::renderMultiTemporalBitRateFrameRate(void) -{ - VABufferID rc_param_buf; - VABufferID framerate_param_buf; - VAStatus vaStatus = VA_STATUS_SUCCESS; - VAEncMiscParameterBuffer *misc_param; - VAEncMiscParameterRateControl *misc_rate_ctrl; - VAEncMiscParameterFrameRate *misc_framerate; - - int i; - - vaStatus = vaCreateBuffer(mVADisplay, mVAContext, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl), - 1,NULL,&rc_param_buf); - - CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - - for(i=0;itype = VAEncMiscParameterTypeRateControl; - misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data; - memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl)); -// misc_rate_ctrl->bits_per_second = mVideoConfigVP8TemporalBitRateFrameRate[i].bitRate; - misc_rate_ctrl->rc_flags.bits.temporal_id = 0; - misc_rate_ctrl->target_percentage = 100; - misc_rate_ctrl->window_size = 1000; - misc_rate_ctrl->initial_qp = mVideoParamsVP8.init_qp; - misc_rate_ctrl->min_qp = mVideoParamsVP8.min_qp; - misc_rate_ctrl->basic_unit_size = 0; - misc_rate_ctrl->max_qp = mVideoParamsVP8.max_qp; - vaUnmapBuffer(mVADisplay, rc_param_buf); - - vaStatus = vaRenderPicture(mVADisplay,mVAContext, &rc_param_buf, 1); - } - - vaStatus = vaCreateBuffer(mVADisplay, mVAContext, - VAEncMiscParameterBufferType, - sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterFrameRate), - 1,NULL,&framerate_param_buf); - CHECK_VA_STATUS_RETURN("vaCreateBuffer"); - - for(i=0;itype = VAEncMiscParameterTypeFrameRate; - misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data; - memset(misc_framerate, 0, sizeof(*misc_framerate)); - misc_framerate->framerate_flags.bits.temporal_id = i; -// misc_framerate->framerate = mVideoConfigVP8TemporalBitRateFrameRate[i].frameRate; - - vaUnmapBuffer(mVADisplay, framerate_param_buf); - - vaStatus = vaRenderPicture(mVADisplay,mVAContext, &framerate_param_buf, 1); - } - - CHECK_VA_STATUS_RETURN("vaRenderPicture");; - - return 0; -} - Encode_Status VideoEncoderVP8::renderLayerStructureParam(void) { VABufferID layer_struc_buf; @@ -341,30 +295,40 @@ Encode_Status VideoEncoderVP8::renderLayerStructureParam(void) Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { Encode_Status ret = ENCODE_SUCCESS; + uint32_t i; if (mFrameNum == 0) { + ret = renderSequenceParams(); + ret = renderFrameRateParams(0,true); + ret = renderRCParams(0,true); + ret = renderHRDParams(); + ret = renderMaxFrameSizeParams(); if(mRenderMultiTemporal) { ret = renderLayerStructureParam(); mRenderMultiTemporal = false; + } - ret = renderFrameRateParams(); - ret = renderRCParams(); - ret = renderHRDParams(); - ret = renderSequenceParams(); - ret = renderMaxFrameSizeParams(); + + if(mComParams.numberOfLayer > 1) + for(i=0;i Date: Mon, 16 Dec 2013 11:38:53 -0800 Subject: [DRM-PR-ASFPARSER]: ASF Parser changes to support Playready Protected Playback BZ: 190106 (1) Added handling to play playready protected audio/video contents (2) Modified to extract drm scheme UUID and wmdrm header to be sent to application level (3) Extract IV from Extended Stream Properties Change-Id: Ice63d2d5b812499914c4c004701b39d5d5082553 Signed-off-by: RupaliX --- asfparser/Android.mk | 12 ++- asfparser/AsfDataParser.cpp | 71 ++++++++++++++-- asfparser/AsfDataParser.h | 3 +- asfparser/AsfGuids.h | 3 +- asfparser/AsfHeaderParser.cpp | 187 +++++++++++++++++++++++++++++++++++++++++- asfparser/AsfHeaderParser.h | 15 +++- asfparser/AsfObjects.h | 23 +++++- asfparser/AsfParserDefs.h | 5 ++ asfparser/AsfStreamParser.cpp | 26 +++++- asfparser/AsfStreamParser.h | 5 +- 10 files changed, 328 insertions(+), 22 deletions(-) diff --git a/asfparser/Android.mk b/asfparser/Android.mk index 3fecfbf..69fbe01 100644 --- a/asfparser/Android.mk +++ b/asfparser/Android.mk @@ -11,13 +11,21 @@ LOCAL_SRC_FILES := \ LOCAL_C_INCLUDES := \ - $(LOCAL_PATH) + $(LOCAL_PATH) \ + bionic \ + $(call include-path-for, stlport) \ + + +LOCAL_SHARED_LIBRARIES := \ + libutils libcutils liblog libstlport LOCAL_COPY_HEADERS_TO := libmix_asfparser LOCAL_COPY_HEADERS := \ AsfParserDefs.h \ - AsfStreamParser.h + AsfStreamParser.h \ + AsfObjects.h \ + AsfGuids.h \ LOCAL_MODULE_TAGS := optional LOCAL_MODULE := libasfparser diff --git a/asfparser/AsfDataParser.cpp b/asfparser/AsfDataParser.cpp index 1982179..94e8c93 100644 --- a/asfparser/AsfDataParser.cpp +++ b/asfparser/AsfDataParser.cpp @@ -28,8 +28,12 @@ #include "AsfGuids.h" #include "AsfObjects.h" #include +#include +#include "AsfHeaderParser.h" +AsfHeaderParser *AsfDataParser::mHeaderParser = NULL; +using namespace std; // Helper fucctions static inline uint8_t lengthType2Bytes(uint8_t lengthType) { @@ -183,6 +187,7 @@ int AsfPayloadParsingInformation::parse(uint8_t *buffer, uint32_t size) { int AsfSinglePayloadUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { // initialize output + int retVal = 0; *out = NULL; streamNumber.value = *buffer; blockSize = 1; @@ -216,12 +221,43 @@ int AsfSinglePayloadUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPaylo return ASF_PARSER_NO_MEMORY; } + ALOGD("replicatedDataLength = %d", replicatedDataLength); + + // point to replicated data into object's buffer. Yet to be interpreted. + obj->replicatedDataLength = replicatedDataLength; + obj->replicatedData = buffer + blockSize; + // Replicated data, at least 8 bytes obj->mediaObjectLength = *(uint32_t*)(buffer + blockSize); obj->presentationTime = *(uint32_t*)(buffer + blockSize + 4); blockSize += replicatedDataLength; + // defined for temporary use as arg for getPayloadExtensionSystems + uint8_t streamnumber = streamNumber.bits.streamNumber; + vector *extSystems; + retVal = AsfDataParser::mHeaderParser->getPayloadExtensionSystems(streamnumber, &extSystems); + if (retVal != ASF_PARSER_SUCCESS) { + ALOGD("Error while parsing Payload Extension Systems"); + } else { + ALOGD("Extension System count = %d", extSystems->size()); + // find IV in replicated data + int rep_data_offset = 0; + + for (int i = 0; i < extSystems->size(); i++) { + // ptr to ext system's data in replicated data buffer. + if ((extSystems->at(i)->extensionSystemId) == ASF_Payload_Extension_System_Encryption_Sample_ID) { + obj->sampleID = obj->replicatedData + 8 + rep_data_offset; + } + if (extSystems->at(i)->extensionDataSize == 0xFFFF) { + uint16_t nSize = *((uint16_t*) (obj->replicatedData + 8 + rep_data_offset)); + rep_data_offset += (sizeof(uint16_t) + nSize); + } else { + rep_data_offset += extSystems->at(i)->extensionDataSize; + } + } + } + obj->payloadData = buffer + blockSize; // size = packet length - packet header length @@ -310,7 +346,6 @@ int AsfSinglePayloadCompressed::parse(uint8_t *buffer, uint32_t size, AsfPayload payloadLenRemaining -= subPayloadDataLength; } - if (payloadLenRemaining != 0) { // TODO: freePayloadDataInfo(first); @@ -340,6 +375,7 @@ int AsfMultiplePayloadsHeader::parse(uint8_t *buffer, uint32_t size) { int AsfMultiplePayloadsUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { // initialize output + int retVal = 0; *out = NULL; streamNumber.value = *buffer; blockSize = 1; @@ -373,12 +409,36 @@ int AsfMultiplePayloadsUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPa return ASF_PARSER_NO_MEMORY; } + ALOGD("replicatedDataLength = %d", replicatedDataLength); + + // point to replicated data into object's buffer. Yet to be interpreted. + obj->replicatedDataLength = replicatedDataLength; + obj->replicatedData = buffer + blockSize; + // at least 8 bytes replicated data obj->mediaObjectLength = *(uint32_t *)(buffer + blockSize); obj->presentationTime = *(uint32_t *)(buffer + blockSize + 4); blockSize += replicatedDataLength; + // defined for temporary use as arg for getPayloadExtensionSystems + uint8_t streamnumber = streamNumber.bits.streamNumber; + vector *extSystems; + retVal = AsfDataParser::mHeaderParser->getPayloadExtensionSystems(streamnumber, &extSystems); + if (retVal != ASF_PARSER_SUCCESS) { + ALOGD("Error while parsing Payload Extension Systems"); + } else { + // find IV in replicated data + int rep_data_offset = 0; + for (int i = 0; i < extSystems->size(); i++) { + // ptr to ext system's data in replicated data buffer. + if ((extSystems->at(i)->extensionSystemId) == ASF_Payload_Extension_System_Encryption_Sample_ID) { + obj->sampleID = obj->replicatedData + 8 + rep_data_offset; + } + rep_data_offset = rep_data_offset + extSystems->at(i)->extensionDataSize; + } + } + // payload length must not be 0 payloadLength = getFieldValue(buffer + blockSize, mpHeader->payloadFlags.bits.payloadLengthType); blockSize += lengthType2Bytes(mpHeader->payloadFlags.bits.payloadLengthType); @@ -473,7 +533,6 @@ int AsfMultiplePayloadsCompressed::parse(uint8_t *buffer, uint32_t size, AsfPayl payloadLenRemaining -= subPayloadDataLength; } - if (payloadLenRemaining < 0) { // TODO: freePayloadDataInfo(first); @@ -486,14 +545,13 @@ int AsfMultiplePayloadsCompressed::parse(uint8_t *buffer, uint32_t size, AsfPayl } -AsfDataParser::AsfDataParser(void) +AsfDataParser::AsfDataParser(AsfHeaderParser *hdrparser) : mTotalDataPackets(0) { mSPUncompressed.ppi = &mPPI; mSPCompressed.ppi = &mPPI; mMPHeader.ppi = &mPPI; mMPUncompressed.ppi = &mPPI; mMPCompressed.ppi = &mPPI; - mMPUncompressed.mpHeader = &mMPHeader; mMPCompressed.mpHeader = &mMPHeader; @@ -501,6 +559,9 @@ AsfDataParser::AsfDataParser(void) mSPCompressed.pool = &mPool; mMPUncompressed.pool = &mPool; mMPCompressed.pool = &mPool; + if (hdrparser) { + AsfDataParser::mHeaderParser = hdrparser; + } } @@ -604,5 +665,3 @@ int AsfDataParser::parsePacket(uint8_t *buffer, uint32_t size, AsfPayloadDataInf void AsfDataParser::releasePayloadDataInfo(AsfPayloadDataInfo *info) { mPool.releasePayloadDataInfo(info); } - - diff --git a/asfparser/AsfDataParser.h b/asfparser/AsfDataParser.h index a32924a..bc4c748 100644 --- a/asfparser/AsfDataParser.h +++ b/asfparser/AsfDataParser.h @@ -223,7 +223,7 @@ struct AsfMultiplePayloadsCompressed { class AsfDataParser { public: - AsfDataParser(void); + AsfDataParser(class AsfHeaderParser *mhdrparser); ~AsfDataParser(void); public: @@ -234,6 +234,7 @@ public: int parsePacket(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out); // put payload data info to internal queue for reuse. void releasePayloadDataInfo(AsfPayloadDataInfo *info); + static class AsfHeaderParser *mHeaderParser; private: inline AsfPayloadDataInfo* getPayloadDataInfo(); diff --git a/asfparser/AsfGuids.h b/asfparser/AsfGuids.h index 5ab07ae..0ef74cd 100644 --- a/asfparser/AsfGuids.h +++ b/asfparser/AsfGuids.h @@ -186,7 +186,8 @@ DEFINE_GUID(ASF_Payload_Extension_System_Sample_Duration, 0xC6BD9450, 0x867F, 0x4907, 0x83, 0xA3, 0xC7, 0x79, 0x21, 0xB7, 0x33, 0xAD); DEFINE_GUID(ASF_Payload_Extension_System_Encryption_Sample_ID, 0x6698B84E, 0x0AFA, 0x4330, 0xAE, 0xB2, 0x1C, 0x0A, 0x98, 0xD7, 0xA4, 0x4D); - +DEFINE_GUID(ASF_Protection_System_Identifier_Object, + 0x9A04F079, 0x9840, 0x4286, 0xAB, 0x92, 0xE6, 0x5B, 0xE0, 0x88, 0x5F, 0x95); #endif diff --git a/asfparser/AsfHeaderParser.cpp b/asfparser/AsfHeaderParser.cpp index 795e7ea..bc867f3 100644 --- a/asfparser/AsfHeaderParser.cpp +++ b/asfparser/AsfHeaderParser.cpp @@ -24,16 +24,21 @@ +#define LOG_NDEBUG 0 +#define LOG_TAG "AsfHeaderParser" +#include #include "AsfHeaderParser.h" #include +#include AsfHeaderParser::AsfHeaderParser(void) : mAudioInfo(NULL), mVideoInfo(NULL), mFileInfo(NULL), mNumObjectParsed(0), + mIsProtected(false), mNumberofHeaderObjects(0) { mFileInfo = new AsfFileMediaInfo; memset(mFileInfo, 0, sizeof(AsfFileMediaInfo)); @@ -41,6 +46,26 @@ AsfHeaderParser::AsfHeaderParser(void) AsfHeaderParser::~AsfHeaderParser(void) { delete mFileInfo; + if (mPlayreadyHeader) { + delete mPlayreadyHeader; + mPlayreadyHeader = NULL; + } + + // Deleting memory from mExtendedStreamPropertiesObj recursively + for (vector::iterator it = mExtendedStreamPropertiesObj.begin(); it != mExtendedStreamPropertiesObj.end(); ++it) { + for (int i = 0; i < (*it)->extensionSystems.size(); i++) { + if ((*it)->extensionSystems[i]->extensionSystemInfo != NULL) { + delete (*it)->extensionSystems[i]->extensionSystemInfo; + (*it)->extensionSystems[i]->extensionSystemInfo = NULL; + } + delete (*it)->extensionSystems[i]; + (*it)->extensionSystems[i] = NULL; + } + (*it)->extensionSystems.clear(); + delete (*it); + (*it) = NULL; + } + mExtendedStreamPropertiesObj.clear(); resetStreamInfo(); } @@ -95,7 +120,7 @@ bool AsfHeaderParser::isSeekable() { return mFileInfo->seekable; } -int AsfHeaderParser::parse(uint8_t *buffer, uint32_t size) { +int AsfHeaderParser::parse(uint8_t *buffer, uint64_t size) { int status = ASF_PARSER_SUCCESS; // reset parser's status @@ -147,7 +172,35 @@ int AsfHeaderParser::parse(uint8_t *buffer, uint32_t size) { } else if(obj->objectID == ASF_Extended_Content_Encryption_Object) { } else if(obj->objectID == ASF_Digital_Signature_Object) { } else if(obj->objectID == ASF_Padding_Object) { - } else { + } else if(obj->objectID == ASF_Protection_System_Identifier_Object) { + mIsProtected = true; + LOGV("ASF_Protection_System_Identifier_Object"); + uint64_t playreadyObjSize = obj->objectSize; + PlayreadyHeaderObj *plrdyHdrObj = (PlayreadyHeaderObj*)buffer; + + uint8_t* playreadyObjBuf = NULL; + GUID *pldyUuid = (GUID*)plrdyHdrObj->sysId; + memcpy(mPlayreadyUuid, (uint8_t*)pldyUuid, UUIDSIZE); + + // Rights Management Header - Record Type = 0x0001 + // Traverse till field containing number of records + playreadyObjBuf = buffer + sizeof(PlayreadyHeaderObj); + for (int i = 0; i < plrdyHdrObj->countRecords; i++) { + uint16_t* recordType = (uint16_t*)playreadyObjBuf; + if (*recordType == 0x01) {// Rights management Header + playreadyObjBuf += sizeof(uint16_t); + uint16_t* recordLen = (uint16_t*)playreadyObjBuf; + mPlayreadyHeaderLen = *recordLen; + + mPlayreadyHeader = new uint8_t [mPlayreadyHeaderLen]; + if (mPlayreadyHeader == NULL) { + return ASF_PARSER_NO_MEMORY; + } + playreadyObjBuf += sizeof(uint16_t); + memcpy(mPlayreadyHeader, playreadyObjBuf, mPlayreadyHeaderLen); + break; + } + } } if (status != ASF_PARSER_SUCCESS) { return status; @@ -165,6 +218,25 @@ int AsfHeaderParser::parse(uint8_t *buffer, uint32_t size) { return status; } +int AsfHeaderParser::getPlayreadyUuid(uint8_t *playreadyUuid) { + + if (playreadyUuid == NULL || (!mIsProtected)) + return ASF_PARSER_FAILED; + memcpy(playreadyUuid, mPlayreadyUuid, UUIDSIZE); + return ASF_PARSER_SUCCESS; +} + +int AsfHeaderParser::getPlayreadyHeaderXml(uint8_t *playreadyHeader, uint32_t *playreadyHeaderLen) { + + if (playreadyHeader == NULL) { + *playreadyHeaderLen = mPlayreadyHeaderLen; + return ASF_PARSER_NULL_POINTER; + } + memcpy(playreadyHeader, mPlayreadyHeader, mPlayreadyHeaderLen); + *playreadyHeaderLen = mPlayreadyHeaderLen; + + return ASF_PARSER_SUCCESS; +} int AsfHeaderParser::onFilePropertiesObject(uint8_t *buffer, uint32_t size) { if (size < sizeof(AsfFilePropertiesObject)) { @@ -199,6 +271,7 @@ int AsfHeaderParser::onStreamPropertiesObject(uint8_t *buffer, uint32_t size) { size - sizeof(AsfStreamPropertiesObject)) { return ASF_PARSER_BAD_VALUE; } + uint8_t *typeSpecificData = buffer + sizeof(AsfStreamPropertiesObject); if (obj->streamType == ASF_Video_Media) { status = onVideoSpecificData(obj, typeSpecificData); @@ -240,7 +313,16 @@ int AsfHeaderParser::onVideoSpecificData(AsfStreamPropertiesObject *obj, uint8_t videoInfo->timeOffset = obj->timeOffset; videoInfo->width = info->encodedImageWidth; videoInfo->height = info->encodedImageHeight; - videoInfo->fourCC = bmp->compressionID; + + if (bmp->compressionID == FOURCC('Y', 'D', 'R', 'P')) { + // That means PYV content + uint32_t* ptrActCompId = (uint32_t*)((data + sizeof(AsfVideoInfoHeader) + bmp->formatDataSize - sizeof(uint32_t))); + bmp->actualCompressionID = *ptrActCompId; + videoInfo->fourCC = bmp->actualCompressionID; + LOGV("onVideoSpecificData() with bmp->actualCompressionID = %x", bmp->actualCompressionID); + } else { + videoInfo->fourCC = bmp->compressionID; + } // TODO: get aspect ratio from video meta data videoInfo->aspectX = 1; @@ -292,7 +374,16 @@ int AsfHeaderParser::onAudioSpecificData(AsfStreamPropertiesObject *obj, uint8_t audioInfo->streamNumber = obj->flags.bits.streamNumber; audioInfo->encryptedContentFlag = obj->flags.bits.encryptedContentFlag; audioInfo->timeOffset = obj->timeOffset; - audioInfo->codecID = format->codecIDFormatTag; + LOGV("onAudioSpecificData => format->codecIDFormatTag = %x",format->codecIDFormatTag); + + if (format->codecIDFormatTag == 0x5052) { + uint32_t* ptrActCodecId = (uint32_t*)((data + sizeof(AsfWaveFormatEx) + format->codecSpecificDataSize - sizeof(format->codecIDFormatTag))); + format->codecIDFormatTag = *ptrActCodecId; + audioInfo->codecID = format->codecIDFormatTag; + } else { + audioInfo->codecID = format->codecIDFormatTag; + } + audioInfo->numChannels = format->numberOfChannels; audioInfo->sampleRate= format->samplesPerSecond; audioInfo->avgByteRate = format->averageNumberOfBytesPerSecond; @@ -329,11 +420,89 @@ int AsfHeaderParser::onAudioSpecificData(AsfStreamPropertiesObject *obj, uint8_t int AsfHeaderParser::onExtendedStreamPropertiesObject(uint8_t *buffer, uint32_t size) { + int status = ASF_PARSER_SUCCESS; + + if (size < sizeof(AsfObject)) { + return ASF_PARSER_BAD_DATA; + } + + AsfExtendedStreamPropertiesObject *extStrObj = new AsfExtendedStreamPropertiesObject; + if (extStrObj == NULL) { + return ASF_PARSER_NO_MEMORY; + } + + AsfExtendedStreamPropertiesObject *obj = (AsfExtendedStreamPropertiesObject *)buffer; + if (obj->objectSize > size) { + ALOGE("Invalid ASF Extended Stream Prop Object size"); + delete extStrObj; + return ASF_PARSER_BAD_VALUE; + } + + extStrObj->objectID = obj->objectID; + extStrObj->objectSize = obj->objectSize; + extStrObj->startTime = obj->startTime; + extStrObj->endTime = obj->endTime; + extStrObj->dataBitrate = obj->dataBitrate; + extStrObj->bufferSize = obj->bufferSize; + extStrObj->initialBufferFullness = obj->initialBufferFullness; + extStrObj->alternateDataBitrate = obj->alternateDataBitrate; + extStrObj->alternateBufferSize = obj->alternateBufferSize; + extStrObj->alternateInitialBufferFullness = obj->alternateInitialBufferFullness; + extStrObj->maximumObjectSize = obj->maximumObjectSize; + extStrObj->flags = obj->flags; + extStrObj->streamNumber = obj->streamNumber; + extStrObj->streamLanguageIDIndex = obj->streamLanguageIDIndex; + extStrObj->averageTimePerFrame = obj->averageTimePerFrame; + extStrObj->streamNameCount = obj->streamNameCount; + extStrObj->payloadExtensionSystemCount = obj->payloadExtensionSystemCount; + + ALOGD("stream number = 0x%08X", obj->streamNumber); + ALOGD("payloadExtensionSystemCount = 0x%08X", obj->payloadExtensionSystemCount); + + // Get pointer to buffer where first extension system object starts + buffer = (uint8_t *)&(obj->extensionSystems); + int extSysSize = 0; + for (int i = 0; i < obj->payloadExtensionSystemCount; i++ ) { + extSysSize = 0; + PayloadExtensionSystem *extensionObj = new PayloadExtensionSystem; + PayloadExtensionSystem *extObjData = (PayloadExtensionSystem *)buffer; + // populate the extension object from the buffer + extensionObj->extensionSystemId = extObjData->extensionSystemId; + extensionObj->extensionDataSize = extObjData->extensionDataSize; + extensionObj->extensionSystemInfoLength = extObjData->extensionSystemInfoLength; + + // Allocate space to store extensionSystemInfo + if (extensionObj->extensionSystemInfoLength > 0) { + // TODO: make sure this memory is freed when not reuired. + extensionObj->extensionSystemInfo = new uint8_t [extObjData->extensionSystemInfoLength]; + if (extensionObj->extensionSystemInfo == NULL) { + delete extensionObj; + delete extStrObj; + return ASF_PARSER_NO_MEMORY; + } + memcpy(extensionObj->extensionSystemInfo, extObjData->extensionSystemInfo, extObjData->extensionSystemInfoLength); + } else { + // no extension system info + extensionObj->extensionSystemInfo = NULL; + } + + // calculate the length of current extension system. + // if there are multiple extension systems then increment buffer by extSysSize + // to point to next extension object + extSysSize += sizeof(GUID) + sizeof(uint16_t) + sizeof(uint32_t) + extensionObj->extensionSystemInfoLength; + buffer += extSysSize; + + // add the extension object to the extended stream object + extStrObj->extensionSystems.push_back(extensionObj); + } + + mExtendedStreamPropertiesObj.push_back(extStrObj); return ASF_PARSER_SUCCESS; } int AsfHeaderParser::parseHeaderExtensionObject(uint8_t* buffer, uint32_t size) { // No empty space, padding, leading, or trailing bytes are allowed in the extention data + int status = ASF_PARSER_SUCCESS; do { if (size < sizeof(AsfObject)) { @@ -378,6 +547,16 @@ int AsfHeaderParser::parseHeaderExtensionObject(uint8_t* buffer, uint32_t size) return status; } +int AsfHeaderParser::getPayloadExtensionSystems(uint8_t streamNumber, vector **extSystems ) { + for (unsigned int i = 0; i < mExtendedStreamPropertiesObj.size(); i++) { + if (streamNumber == mExtendedStreamPropertiesObj[i]->streamNumber) { + *extSystems = &(mExtendedStreamPropertiesObj[i]->extensionSystems); + return ASF_PARSER_SUCCESS; + } + } + return ASF_PARSER_FAILED; +} + void AsfHeaderParser::resetStreamInfo() { while (mAudioInfo) { AsfAudioStreamInfo *next = mAudioInfo->next; diff --git a/asfparser/AsfHeaderParser.h b/asfparser/AsfHeaderParser.h index 1e1f821..02b8af5 100644 --- a/asfparser/AsfHeaderParser.h +++ b/asfparser/AsfHeaderParser.h @@ -31,6 +31,9 @@ #include "AsfParserDefs.h" #include "AsfObjects.h" #include "AsfGuids.h" +#include + +using namespace std; class AsfHeaderParser { public: @@ -39,7 +42,7 @@ public: public: // buffer must contain a complete header object - int parse(uint8_t* buffer, uint32_t size); + int parse(uint8_t* buffer, uint64_t size); AsfAudioStreamInfo* getAudioInfo() const; AsfVideoStreamInfo* getVideoInfo() const; @@ -56,7 +59,9 @@ public: bool hasVideo(); bool hasAudio(); bool isSeekable(); - + int getPlayreadyUuid(uint8_t* playreadyUuid); + int getPlayreadyHeaderXml(uint8_t* playreadyHeader, uint32_t* playreadyHeaderLen); + int getPayloadExtensionSystems(uint8_t streamNumber, vector **extSystems ); private: int onFilePropertiesObject(uint8_t *buffer, uint32_t size); int onStreamPropertiesObject(uint8_t *buffer, uint32_t size); @@ -72,6 +77,12 @@ private: AsfFileMediaInfo *mFileInfo; uint32_t mNumObjectParsed; uint32_t mNumberofHeaderObjects; + uint8_t mPlayreadyUuid[UUIDSIZE]; + bool mIsProtected; + + uint8_t *mPlayreadyHeader; + uint32_t mPlayreadyHeaderLen; + vector mExtendedStreamPropertiesObj; }; #endif diff --git a/asfparser/AsfObjects.h b/asfparser/AsfObjects.h index 1942c8d..7cf1c37 100644 --- a/asfparser/AsfObjects.h +++ b/asfparser/AsfObjects.h @@ -30,9 +30,13 @@ #include "AsfParserDefs.h" #include "AsfGuids.h" +#include + +#define UUIDSIZE 16 #pragma pack(push, 1) +using namespace std; struct AsfObject { GUID objectID; uint64_t objectSize; @@ -44,6 +48,14 @@ struct AsfHeaderObject : AsfObject { uint8_t reserved2; }; +struct PlayreadyHeaderObj : AsfObject { + uint8_t sysId[UUIDSIZE]; + uint32_t sysVer; + uint32_t dataSize; + uint32_t lenRecords; + uint16_t countRecords; + // records +}; struct AsfFilePropertiesObject : AsfObject { GUID fileID; @@ -146,6 +158,14 @@ struct AsfPaddingObject : AsfObject { }; // objects in the ASF Header Extension object +struct PayloadExtensionSystem { + GUID extensionSystemId; + uint16_t extensionDataSize; + uint32_t extensionSystemInfoLength; + uint8_t *extensionSystemInfo; +}; + +// class AsfHeaderParser; struct AsfExtendedStreamPropertiesObject : AsfObject { uint64_t startTime; uint64_t endTime; @@ -171,6 +191,7 @@ struct AsfExtendedStreamPropertiesObject : AsfObject { uint64_t averageTimePerFrame; uint16_t streamNameCount; uint16_t payloadExtensionSystemCount; + vector extensionSystems; //Stream Names - variable length //Payload Extension Systems - variable length //Stream Properties Object - variable length @@ -297,7 +318,7 @@ struct AsfBitmapInfoHeader { int32_t verticalPixelsPerMeter; uint32_t colorsUsedCount; uint32_t importantColorsCount; - //uint8_t codecSpecificData[]; + uint32_t actualCompressionID; }; #pragma pack(pop) diff --git a/asfparser/AsfParserDefs.h b/asfparser/AsfParserDefs.h index 27ff623..4263311 100644 --- a/asfparser/AsfParserDefs.h +++ b/asfparser/AsfParserDefs.h @@ -27,7 +27,9 @@ #ifndef ASF_PARSER_DEFS_H_ #define ASF_PARSER_DEFS_H_ +#include "AsfObjects.h" #include +#include #ifndef NULL //#define NULL (void*) 0 @@ -103,6 +105,9 @@ struct AsfSimpleIndexInfo { struct AsfPayloadDataInfo { const uint8_t *payloadData; uint32_t payloadSize; + uint8_t replicatedDataLength; + uint8_t *replicatedData; + uint8_t *sampleID; // Sample Id is always 8 bytes to be used by playready uint32_t presentationTime; // in milliseconds uint32_t offsetIntoMediaObject; uint32_t mediaObjectLength; diff --git a/asfparser/AsfStreamParser.cpp b/asfparser/AsfStreamParser.cpp index f2b45f4..dee3c3a 100644 --- a/asfparser/AsfStreamParser.cpp +++ b/asfparser/AsfStreamParser.cpp @@ -23,6 +23,9 @@ */ +#define LOG_NDEBUG 0 +#define LOG_TAG "AsfStreamParser" +#include #include "AsfHeaderParser.h" #include "AsfDataParser.h" @@ -36,8 +39,9 @@ AsfStreamParser::AsfStreamParser(void) mTimeOffsetMs(0), mHeaderParsed(false) { mHeaderParser = new AsfHeaderParser; - mDataParser = new AsfDataParser; + mDataParser = new AsfDataParser(mHeaderParser); mSimpleIndexParser = NULL; + memset(mPlayreadyUuid, 0, UUIDSIZE); } AsfStreamParser::~AsfStreamParser(void) { @@ -56,7 +60,7 @@ bool AsfStreamParser::isHeaderObject(uint8_t *guid) { return (*id == ASF_Header_Object); } -int AsfStreamParser::parseHeaderObject(uint8_t *buffer, uint32_t size) { +int AsfStreamParser::parseHeaderObject(uint8_t *buffer, uint64_t size) { int status = mHeaderParser->parse(buffer, size); if (status != ASF_PARSER_SUCCESS) { return status; @@ -73,6 +77,21 @@ int AsfStreamParser::parseHeaderObject(uint8_t *buffer, uint32_t size) { return ASF_PARSER_SUCCESS; } +int AsfStreamParser::getDrmUuid(uint8_t *playreadyUuid) { + int status = ASF_PARSER_SUCCESS; + + status = mHeaderParser->getPlayreadyUuid(playreadyUuid); + LOGV("AsfStreamParser::getDrmUuid() returns %x", status); + return status; +} + +int AsfStreamParser::getDrmHeaderXml(uint8_t *playreadyHeader, uint32_t *playreadyHeaderLen) { + int status = ASF_PARSER_SUCCESS; + + status = mHeaderParser->getPlayreadyHeaderXml(playreadyHeader, playreadyHeaderLen); + LOGV("AsfStreamParser::getDrmHeaderXml() returns %x", status); + return status; +} AsfAudioStreamInfo* AsfStreamParser::getAudioInfo() const { return mHeaderParser->getAudioInfo(); } @@ -139,7 +158,7 @@ int AsfStreamParser::parseDataPacket(uint8_t *buffer, uint32_t size, AsfPayloadD else { // TODO: next->presentationTime = 0; - //return ASF_PARSER_BAD_VALUE; + // return ASF_PARSER_BAD_VALUE; } next = next->next; } @@ -238,4 +257,3 @@ uint32_t AsfStreamParser::getMaxObjectSize() { if (!mSimpleIndexParser) return NULL; return mSimpleIndexParser->getMaximumPacketCount() * mDataPacketSize; } - diff --git a/asfparser/AsfStreamParser.h b/asfparser/AsfStreamParser.h index 91d21ef..e5fd0c4 100644 --- a/asfparser/AsfStreamParser.h +++ b/asfparser/AsfStreamParser.h @@ -39,10 +39,12 @@ public: static bool isHeaderObject(uint8_t *guid); // buffer must contain a complete header object - int parseHeaderObject(uint8_t *buffer, uint32_t size); + int parseHeaderObject(uint8_t *buffer, uint64_t size); AsfAudioStreamInfo* getAudioInfo() const; AsfVideoStreamInfo* getVideoInfo() const; AsfFileMediaInfo* getFileInfo() const; + int getDrmUuid(uint8_t* playreadyUuid); + int getDrmHeaderXml(uint8_t* playreadyHeader, uint32_t* playreadyHeaderLen); // return duration in 100-nanosecond unit , readable when header object is parsed uint64_t getDuration(); @@ -79,6 +81,7 @@ private: // Preroll value is used for data buffering. uint32_t mTimeOffsetMs; bool mHeaderParsed; + uint8_t mPlayreadyUuid[UUIDSIZE]; class AsfHeaderParser *mHeaderParser; class AsfDataParser *mDataParser; class AsfSimpleIndexParser *mSimpleIndexParser; -- cgit v1.2.3 From 5cb3c11083280fc612a9133938c7ed2631fc444d Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Fri, 25 Apr 2014 14:49:53 +0800 Subject: libmix: error handling for unsupported profile in AVC BZ: 189357 error handling for unsupported profile in AVC. Change-Id: I7bc37fdcb064de89635974991a1761d0662de18a Signed-off-by: Tianmi Chen --- mixvbp/vbp_manager/vbp_loader.h | 3 ++- mixvbp/vbp_manager/vbp_utils.c | 9 +++++++++ videodecoder/VideoDecoderTrace.h | 3 +++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/mixvbp/vbp_manager/vbp_loader.h b/mixvbp/vbp_manager/vbp_loader.h index 46da5d7..3017149 100755 --- a/mixvbp/vbp_manager/vbp_loader.h +++ b/mixvbp/vbp_manager/vbp_loader.h @@ -402,7 +402,8 @@ enum _vbp_parser_error VBP_MEM, VBP_PARM, VBP_PARTIAL, - VBP_MULTI + VBP_MULTI, + VBP_ERROR }; enum _vbp_parser_type diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c index f62f9ba..ab9a3ec 100755 --- a/mixvbp/vbp_manager/vbp_utils.c +++ b/mixvbp/vbp_manager/vbp_utils.c @@ -427,6 +427,15 @@ static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_f /* can't return error for now. Neet further investigation */ if (0 != error) { WTRACE("failed to parse the syntax: %d!", error); + if (pcontext->parser_type == VBP_H264 +#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) + || pcontext->parser_type == VBP_H264SECURE +#endif +) { + if (error == H264_SPS_INVALID_PROFILE) { + return VBP_ERROR; + } + } } /* process parsing result */ diff --git a/videodecoder/VideoDecoderTrace.h b/videodecoder/VideoDecoderTrace.h index 8de40e0..c8b7dce 100755 --- a/videodecoder/VideoDecoderTrace.h +++ b/videodecoder/VideoDecoderTrace.h @@ -93,6 +93,9 @@ TraceVideoDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define CHECK_VBP_STATUS(FUNC)\ if (vbpStatus != VBP_OK) {\ ETRACE(FUNC" failed. vbpStatus = %d", (int)vbpStatus);\ + if (vbpStatus == VBP_ERROR) {\ + return DECODE_FAIL;\ + }\ return DECODE_PARSER_FAIL;\ } -- cgit v1.2.3 From 9defc2a202dfb5b268d3eff5b7daf8cec698fc4a Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Tue, 22 Apr 2014 18:01:24 +0800 Subject: libmix: adjust video buffer count for AVC BZ: 188633 adjust video buffer count for AVC Change-Id: I4bd3d1108ff6e8dd996f46dcfbcb0d06bf7aba22 Signed-off-by: Tianmi Chen --- videodecoder/VideoDecoderAVC.cpp | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 03abb3b..13dd5db 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -772,18 +772,14 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { } } - // considering there is a B frame to be output, which needs to be exclued in output queue - int diff = data->codec_data->num_ref_frames + 1 - (mOutputWindowSize - 1); - if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber; } else { // The number of actual buffer needed is - // outputQueue + nativewindow_owned + (diff > 0 ? diff : 1) + widi_need_max + 1(available buffer) + // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer) // while outputQueue = DPB < 8? DPB :8 - // and diff = DBPSize - (outputQ - 1) mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */ - + (diff > 0 ? diff : 1) + + data->codec_data->num_ref_frames #ifndef USE_GEN_HW + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */ + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */ -- cgit v1.2.3 From 7ed83eaf49b80fe996cac0720788fd458b516b12 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Tue, 29 Apr 2014 22:21:03 +0800 Subject: disable mixvbp optimization since it can not apply to h264 decoder BZ: 191348 disable mixvbp optimization since it can not apply to h264 decoder h264 has emulation data, so the prefetch optimization can not apply to AVC parser, it may lead wrong calculation for slice byteoffset and bitoffset Change-Id: I88ede7a16a00b755170c90801173c1074e5cef86 Signed-off-by: ywan171 --- mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h index 37e90cd..7796798 100755 --- a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h +++ b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h @@ -7,7 +7,7 @@ #define SCRATCH_SIZE 20 #define MIN_DATA 8 -#define PARSER_OPT /* parser optimization*/ +//#define PARSER_OPT /* parser optimization*/ typedef struct { -- cgit v1.2.3 From c6b0b722e2caf081f2c2d3b91439036a738d7b60 Mon Sep 17 00:00:00 2001 From: Sudheer Mogilappagari Date: Thu, 1 May 2014 15:01:15 -0700 Subject: [R44c] Extension System Data size handling in ASF Parser. BZ: 191925 Initialization of mPlayreadyHeader. Fixes BZ 191925 Handling of ExtensionSystemDataSize when value is 0xFFFF. Explicit usage of SampleIDLength instead of assuming it ia always 8. Change-Id: I57878da4f5364af708bf8055fcc80dfabb6365e3 Signed-off-by: Sudheer Mogilappagari --- asfparser/AsfDataParser.cpp | 23 +++++++++++++++-------- asfparser/AsfDataParser.h | 2 +- asfparser/AsfHeaderParser.cpp | 2 ++ asfparser/AsfParserDefs.h | 1 + 4 files changed, 19 insertions(+), 9 deletions(-) diff --git a/asfparser/AsfDataParser.cpp b/asfparser/AsfDataParser.cpp index 94e8c93..3bd4d32 100644 --- a/asfparser/AsfDataParser.cpp +++ b/asfparser/AsfDataParser.cpp @@ -242,18 +242,19 @@ int AsfSinglePayloadUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPaylo } else { ALOGD("Extension System count = %d", extSystems->size()); // find IV in replicated data - int rep_data_offset = 0; + int replicatedDataOffset = 0; for (int i = 0; i < extSystems->size(); i++) { // ptr to ext system's data in replicated data buffer. if ((extSystems->at(i)->extensionSystemId) == ASF_Payload_Extension_System_Encryption_Sample_ID) { - obj->sampleID = obj->replicatedData + 8 + rep_data_offset; + obj->sampleID = obj->replicatedData + 8 + replicatedDataOffset; + obj->sampleIDLen = extSystems->at(i)->extensionDataSize; } if (extSystems->at(i)->extensionDataSize == 0xFFFF) { - uint16_t nSize = *((uint16_t*) (obj->replicatedData + 8 + rep_data_offset)); - rep_data_offset += (sizeof(uint16_t) + nSize); + uint16_t nSize = *((uint16_t*) (obj->replicatedData + 8 + replicatedDataOffset)); + replicatedDataOffset += (sizeof(uint16_t) + nSize); } else { - rep_data_offset += extSystems->at(i)->extensionDataSize; + replicatedDataOffset += extSystems->at(i)->extensionDataSize; } } } @@ -429,13 +430,19 @@ int AsfMultiplePayloadsUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPa ALOGD("Error while parsing Payload Extension Systems"); } else { // find IV in replicated data - int rep_data_offset = 0; + int replicatedDataOffset = 0; for (int i = 0; i < extSystems->size(); i++) { // ptr to ext system's data in replicated data buffer. if ((extSystems->at(i)->extensionSystemId) == ASF_Payload_Extension_System_Encryption_Sample_ID) { - obj->sampleID = obj->replicatedData + 8 + rep_data_offset; + obj->sampleID = obj->replicatedData + 8 + replicatedDataOffset; + obj->sampleIDLen = extSystems->at(i)->extensionDataSize; + } + if (extSystems->at(i)->extensionDataSize == 0xFFFF) { + uint16_t nSize = *((uint16_t*) (obj->replicatedData + 8 + replicatedDataOffset)); + replicatedDataOffset += (sizeof(uint16_t) + nSize); + } else { + replicatedDataOffset += extSystems->at(i)->extensionDataSize; } - rep_data_offset = rep_data_offset + extSystems->at(i)->extensionDataSize; } } diff --git a/asfparser/AsfDataParser.h b/asfparser/AsfDataParser.h index bc4c748..28712d4 100644 --- a/asfparser/AsfDataParser.h +++ b/asfparser/AsfDataParser.h @@ -223,7 +223,7 @@ struct AsfMultiplePayloadsCompressed { class AsfDataParser { public: - AsfDataParser(class AsfHeaderParser *mhdrparser); + AsfDataParser(class AsfHeaderParser *hdrparser); ~AsfDataParser(void); public: diff --git a/asfparser/AsfHeaderParser.cpp b/asfparser/AsfHeaderParser.cpp index bc867f3..1abb4d6 100644 --- a/asfparser/AsfHeaderParser.cpp +++ b/asfparser/AsfHeaderParser.cpp @@ -39,6 +39,8 @@ AsfHeaderParser::AsfHeaderParser(void) mFileInfo(NULL), mNumObjectParsed(0), mIsProtected(false), + mPlayreadyHeader(NULL), + mPlayreadyHeaderLen(0), mNumberofHeaderObjects(0) { mFileInfo = new AsfFileMediaInfo; memset(mFileInfo, 0, sizeof(AsfFileMediaInfo)); diff --git a/asfparser/AsfParserDefs.h b/asfparser/AsfParserDefs.h index 4263311..f505a09 100644 --- a/asfparser/AsfParserDefs.h +++ b/asfparser/AsfParserDefs.h @@ -108,6 +108,7 @@ struct AsfPayloadDataInfo { uint8_t replicatedDataLength; uint8_t *replicatedData; uint8_t *sampleID; // Sample Id is always 8 bytes to be used by playready + uint32_t sampleIDLen; uint32_t presentationTime; // in milliseconds uint32_t offsetIntoMediaObject; uint32_t mediaObjectLength; -- cgit v1.2.3 From e8b9239738aff472ffebb154190ae0abad12beb1 Mon Sep 17 00:00:00 2001 From: ywan171 Date: Thu, 8 May 2014 10:59:09 +0800 Subject: libmix: when terminate VA, reset mSignalBufferPre[] even va is not initialized BZ: 191657 when terminate VA, reset mSignalBufferPre[] even va is not initialized or else after output reconfiguration, unreseted mSignalBufferPre[] will cause buffer renderdone flag mismatch and buffer status failure. Change-Id: I8cffbbfc104882900718f7f85555f6cd6fc5a6b2 Signed-off-by: ywan171 --- videodecoder/VideoDecoderBase.cpp | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp index a5b73f4..30b92af 100644 --- a/videodecoder/VideoDecoderBase.cpp +++ b/videodecoder/VideoDecoderBase.cpp @@ -978,6 +978,11 @@ Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile, i } Decode_Status VideoDecoderBase::terminateVA(void) { + mSignalBufferSize = 0; + for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { + mSignalBufferPre[i] = NULL; + } + if (mVAStarted == false) { // VA hasn't been started yet return DECODE_SUCCESS; @@ -1044,10 +1049,6 @@ Decode_Status VideoDecoderBase::terminateVA(void) { mVAStarted = false; mInitialized = false; mErrReportEnabled = false; - mSignalBufferSize = 0; - for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { - mSignalBufferPre[i] = NULL; - } return DECODE_SUCCESS; } -- cgit v1.2.3 From 7b6e72c6e267f10a80d5ff6b795e06905552c717 Mon Sep 17 00:00:00 2001 From: RupaliX Date: Wed, 7 May 2014 11:28:39 -0700 Subject: [R44C] Fix to broken clear WMV playback BZ: 192703 A newly added member to AsfBitmapInfoHeader required for playready was causing it. Fixed using an alternative. Change-Id: I156395c859400e6eccbd0f84ee291b8819d9786f Signed-off-by: RupaliX --- asfparser/AsfHeaderParser.cpp | 10 ++++------ asfparser/AsfObjects.h | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/asfparser/AsfHeaderParser.cpp b/asfparser/AsfHeaderParser.cpp index 1abb4d6..0d9b45e 100644 --- a/asfparser/AsfHeaderParser.cpp +++ b/asfparser/AsfHeaderParser.cpp @@ -319,12 +319,11 @@ int AsfHeaderParser::onVideoSpecificData(AsfStreamPropertiesObject *obj, uint8_t if (bmp->compressionID == FOURCC('Y', 'D', 'R', 'P')) { // That means PYV content uint32_t* ptrActCompId = (uint32_t*)((data + sizeof(AsfVideoInfoHeader) + bmp->formatDataSize - sizeof(uint32_t))); - bmp->actualCompressionID = *ptrActCompId; - videoInfo->fourCC = bmp->actualCompressionID; - LOGV("onVideoSpecificData() with bmp->actualCompressionID = %x", bmp->actualCompressionID); + videoInfo->fourCC = (*ptrActCompId); } else { videoInfo->fourCC = bmp->compressionID; } + LOGV("onVideoSpecificData() with videoInfo->fourCC = %x", videoInfo->fourCC); // TODO: get aspect ratio from video meta data videoInfo->aspectX = 1; @@ -376,15 +375,14 @@ int AsfHeaderParser::onAudioSpecificData(AsfStreamPropertiesObject *obj, uint8_t audioInfo->streamNumber = obj->flags.bits.streamNumber; audioInfo->encryptedContentFlag = obj->flags.bits.encryptedContentFlag; audioInfo->timeOffset = obj->timeOffset; - LOGV("onAudioSpecificData => format->codecIDFormatTag = %x",format->codecIDFormatTag); if (format->codecIDFormatTag == 0x5052) { uint32_t* ptrActCodecId = (uint32_t*)((data + sizeof(AsfWaveFormatEx) + format->codecSpecificDataSize - sizeof(format->codecIDFormatTag))); - format->codecIDFormatTag = *ptrActCodecId; - audioInfo->codecID = format->codecIDFormatTag; + audioInfo->codecID = (*ptrActCodecId); } else { audioInfo->codecID = format->codecIDFormatTag; } + LOGV("onAudioSpecificData => format->codecIDFormatTag = %x",format->codecIDFormatTag); audioInfo->numChannels = format->numberOfChannels; audioInfo->sampleRate= format->samplesPerSecond; diff --git a/asfparser/AsfObjects.h b/asfparser/AsfObjects.h index 7cf1c37..9d92dbb 100644 --- a/asfparser/AsfObjects.h +++ b/asfparser/AsfObjects.h @@ -318,7 +318,7 @@ struct AsfBitmapInfoHeader { int32_t verticalPixelsPerMeter; uint32_t colorsUsedCount; uint32_t importantColorsCount; - uint32_t actualCompressionID; + //uint8_t codecSpecificData[]; }; #pragma pack(pop) -- cgit v1.2.3 From 2cd4533e58df3575647b988b865e714d7ab61ff7 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Fri, 9 May 2014 14:02:03 +0800 Subject: [PORT FROM MAIN]Port VP8 encode middleware to R44c branch BZ: 193452 Port VP8 encode libmix to R44c branch Signed-off-by: bolunliu Change-Id: I36f9a91b41c7d329d04574d089b41c49722f0f22 --- videoencoder/VideoEncoderBase.cpp | 27 ++++++++++++++------------- videoencoder/VideoEncoderBase.h | 1 + videoencoder/VideoEncoderDef.h | 10 +++++----- videoencoder/VideoEncoderVP8.cpp | 31 +++++++++++++++++++++++++++++++ videoencoder/VideoEncoderVP8.h | 1 + 5 files changed, 52 insertions(+), 18 deletions(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index aa556e3..813fc98 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -29,6 +29,7 @@ VideoEncoderBase::VideoEncoderBase() ,mRenderBitRate(false) ,mRenderHrd(false) ,mRenderMultiTemporal(false) + ,mForceKFrame(false) ,mSeqParamBuf(0) ,mPicParamBuf(0) ,mSliceParamBuf(0) @@ -1019,18 +1020,18 @@ Encode_Status VideoEncoderBase::setParameters( break; } - case VideoParamsTypeTemporalLayerNumber:{ - VideoParamsTemporalLayerNumber *numberoflayer = - reinterpret_cast (videoEncParams); + case VideoParamsTypeTemporalLayer:{ + VideoParamsTemporalLayer *temporallayer = + reinterpret_cast (videoEncParams); - if (numberoflayer->size != sizeof(VideoParamsTemporalLayerNumber)) { + if (temporallayer->size != sizeof(VideoParamsTemporalLayer)) { return ENCODE_INVALID_PARAMS; } - mComParams.numberOfLayer = numberoflayer->numberOfLayer; - mComParams.nPeriodicity = numberoflayer->nPeriodicity; - for(int i=0;inPeriodicity;i++) - mComParams.nLayerID[i] = numberoflayer->nLayerID[i]; + mComParams.numberOfLayer = temporallayer->numberOfLayer; + mComParams.nPeriodicity = temporallayer->nPeriodicity; + for(int i=0;inPeriodicity;i++) + mComParams.nLayerID[i] = temporallayer->nLayerID[i]; mRenderMultiTemporal = true; break; } @@ -1146,15 +1147,15 @@ Encode_Status VideoEncoderBase::getParameters( } } - case VideoParamsTypeTemporalLayerNumber:{ - VideoParamsTemporalLayerNumber *numberoflayer = - reinterpret_cast (videoEncParams); + case VideoParamsTypeTemporalLayer:{ + VideoParamsTemporalLayer *temporallayer = + reinterpret_cast (videoEncParams); - if(numberoflayer->size != sizeof(VideoParamsTemporalLayerNumber)) { + if(temporallayer->size != sizeof(VideoParamsTemporalLayer)) { return ENCODE_INVALID_PARAMS; } - numberoflayer->numberOfLayer = mComParams.numberOfLayer; + temporallayer->numberOfLayer = mComParams.numberOfLayer; break; } diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h index 80532b9..7b1fb18 100644 --- a/videoencoder/VideoEncoderBase.h +++ b/videoencoder/VideoEncoderBase.h @@ -122,6 +122,7 @@ protected: bool mRenderHrd; bool mRenderMaxFrameSize; bool mRenderMultiTemporal; + bool mForceKFrame; VABufferID mSeqParamBuf; VABufferID mRcParamBuf; diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h index c57be9f..d1bf81e 100644 --- a/videoencoder/VideoEncoderDef.h +++ b/videoencoder/VideoEncoderDef.h @@ -320,7 +320,7 @@ enum VideoParamConfigType { VideoParamsTypeStoreMetaDataInBuffers, VideoParamsTypeProfileLevel, VideoParamsTypeVP8, - VideoParamsTypeTemporalLayerNumber, + VideoParamsTypeTemporalLayer, VideoConfigTypeFrameRate, VideoConfigTypeBitRate, @@ -531,11 +531,11 @@ struct VideoParamsProfileLevel : VideoParamConfigSet { bool isSupported; }; -struct VideoParamsTemporalLayerNumber : VideoParamConfigSet { +struct VideoParamsTemporalLayer : VideoParamConfigSet { - VideoParamsTemporalLayerNumber() { - type = VideoParamsTypeTemporalLayerNumber; - size = sizeof(VideoParamsTemporalLayerNumber); + VideoParamsTemporalLayer() { + type = VideoParamsTypeTemporalLayer; + size = sizeof(VideoParamsTemporalLayer); } uint32_t numberOfLayer; diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 936fc6b..486c5ba 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -50,6 +50,23 @@ VideoEncoderVP8::VideoEncoderVP8() VideoEncoderVP8::~VideoEncoderVP8() { } +Encode_Status VideoEncoderVP8::start() { + + Encode_Status ret = ENCODE_SUCCESS; + LOG_V( "Begin\n"); + + ret = VideoEncoderBase::start (); + CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start"); + + if (mComParams.rcMode == VA_RC_VCM) { + mRenderBitRate = false; + } + + LOG_V( "end\n"); + return ret; +} + + Encode_Status VideoEncoderVP8::renderSequenceParams() { Encode_Status ret = ENCODE_SUCCESS; VAStatus vaStatus = VA_STATUS_SUCCESS; @@ -344,6 +361,11 @@ Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) { ret = renderPictureParams(task); CHECK_ENCODE_STATUS_RETURN("renderPictureParams"); + if(mForceKFrame) { + mVideoConfigVP8.force_kf = 0;//rest it as default value + mForceKFrame = false; + } + LOG_V( "End\n"); return ret; } @@ -475,6 +497,15 @@ Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncCon } break; + case VideoConfigTypeIDRRequest:{ + VideoParamConfigSet *encConfigVP8KFrameRequest = + reinterpret_cast (videoEncConfig); + + mVideoConfigVP8.force_kf = 1; + mForceKFrame = true; + } + break; + default: { LOG_E ("Invalid Config Type"); break; diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h index c8335be..9456625 100644 --- a/videoencoder/VideoEncoderVP8.h +++ b/videoencoder/VideoEncoderVP8.h @@ -18,6 +18,7 @@ class VideoEncoderVP8: public VideoEncoderBase { public: VideoEncoderVP8(); virtual ~VideoEncoderVP8(); + virtual Encode_Status start(); -- cgit v1.2.3 From 5aa95032a5c03781d25f7bb24c7905888c0fe639 Mon Sep 17 00:00:00 2001 From: RupaliX Date: Tue, 13 May 2014 10:15:38 -0700 Subject: [R44c][ASF Parser] BZ193214 fix and review comments for ASF parser BZ: 193214 This patch covers fix for BZ193214 and all review comments incorporated in main branch patch 189154 (patchset 7). Change-Id: Ic94fd907d924e7cc16483e93f649054d34a0d457 Signed-off-by: RupaliX Signed-off-by: Sudheer Mogilappagari --- asfparser/AsfDataParser.cpp | 73 ++++--------- asfparser/AsfHeaderParser.cpp | 241 +++++++++++++++++++++++++++++------------- asfparser/AsfHeaderParser.h | 13 ++- asfparser/AsfObjects.h | 25 ++++- asfparser/AsfStreamParser.cpp | 15 +-- asfparser/AsfStreamParser.h | 3 +- 6 files changed, 221 insertions(+), 149 deletions(-) diff --git a/asfparser/AsfDataParser.cpp b/asfparser/AsfDataParser.cpp index 3bd4d32..03e474d 100644 --- a/asfparser/AsfDataParser.cpp +++ b/asfparser/AsfDataParser.cpp @@ -24,6 +24,8 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "AsfDataParser" #include "AsfDataParser.h" #include "AsfGuids.h" #include "AsfObjects.h" @@ -187,7 +189,6 @@ int AsfPayloadParsingInformation::parse(uint8_t *buffer, uint32_t size) { int AsfSinglePayloadUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { // initialize output - int retVal = 0; *out = NULL; streamNumber.value = *buffer; blockSize = 1; @@ -221,8 +222,6 @@ int AsfSinglePayloadUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPaylo return ASF_PARSER_NO_MEMORY; } - ALOGD("replicatedDataLength = %d", replicatedDataLength); - // point to replicated data into object's buffer. Yet to be interpreted. obj->replicatedDataLength = replicatedDataLength; obj->replicatedData = buffer + blockSize; @@ -233,29 +232,16 @@ int AsfSinglePayloadUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPaylo blockSize += replicatedDataLength; - // defined for temporary use as arg for getPayloadExtensionSystems - uint8_t streamnumber = streamNumber.bits.streamNumber; - vector *extSystems; - retVal = AsfDataParser::mHeaderParser->getPayloadExtensionSystems(streamnumber, &extSystems); - if (retVal != ASF_PARSER_SUCCESS) { - ALOGD("Error while parsing Payload Extension Systems"); - } else { - ALOGD("Extension System count = %d", extSystems->size()); - // find IV in replicated data - int replicatedDataOffset = 0; - - for (int i = 0; i < extSystems->size(); i++) { - // ptr to ext system's data in replicated data buffer. - if ((extSystems->at(i)->extensionSystemId) == ASF_Payload_Extension_System_Encryption_Sample_ID) { - obj->sampleID = obj->replicatedData + 8 + replicatedDataOffset; - obj->sampleIDLen = extSystems->at(i)->extensionDataSize; - } - if (extSystems->at(i)->extensionDataSize == 0xFFFF) { - uint16_t nSize = *((uint16_t*) (obj->replicatedData + 8 + replicatedDataOffset)); - replicatedDataOffset += (sizeof(uint16_t) + nSize); - } else { - replicatedDataOffset += extSystems->at(i)->extensionDataSize; - } + uint8_t streamNum = streamNumber.bits.streamNumber; + + // Extension Systems are required if replicatedDataLength is greater than 8. + if (replicatedDataLength > 8) { + // For protected content, get SampleID which is used as IV. + int returnStatus = AsfDataParser::mHeaderParser->parseSampleIDFromReplicatedData(obj,streamNum); + if (returnStatus == ASF_PARSER_UNEXPECTED_VALUE) { + ALOGD("Only Encryption_Sample_ID extension system is supported"); + } else if (returnStatus != ASF_PARSER_SUCCESS) { + return ASF_PARSER_FAILED; } } @@ -376,7 +362,6 @@ int AsfMultiplePayloadsHeader::parse(uint8_t *buffer, uint32_t size) { int AsfMultiplePayloadsUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPayloadDataInfo **out) { // initialize output - int retVal = 0; *out = NULL; streamNumber.value = *buffer; blockSize = 1; @@ -410,8 +395,6 @@ int AsfMultiplePayloadsUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPa return ASF_PARSER_NO_MEMORY; } - ALOGD("replicatedDataLength = %d", replicatedDataLength); - // point to replicated data into object's buffer. Yet to be interpreted. obj->replicatedDataLength = replicatedDataLength; obj->replicatedData = buffer + blockSize; @@ -422,27 +405,17 @@ int AsfMultiplePayloadsUncompressed::parse(uint8_t *buffer, uint32_t size, AsfPa blockSize += replicatedDataLength; - // defined for temporary use as arg for getPayloadExtensionSystems - uint8_t streamnumber = streamNumber.bits.streamNumber; - vector *extSystems; - retVal = AsfDataParser::mHeaderParser->getPayloadExtensionSystems(streamnumber, &extSystems); - if (retVal != ASF_PARSER_SUCCESS) { - ALOGD("Error while parsing Payload Extension Systems"); - } else { - // find IV in replicated data - int replicatedDataOffset = 0; - for (int i = 0; i < extSystems->size(); i++) { - // ptr to ext system's data in replicated data buffer. - if ((extSystems->at(i)->extensionSystemId) == ASF_Payload_Extension_System_Encryption_Sample_ID) { - obj->sampleID = obj->replicatedData + 8 + replicatedDataOffset; - obj->sampleIDLen = extSystems->at(i)->extensionDataSize; - } - if (extSystems->at(i)->extensionDataSize == 0xFFFF) { - uint16_t nSize = *((uint16_t*) (obj->replicatedData + 8 + replicatedDataOffset)); - replicatedDataOffset += (sizeof(uint16_t) + nSize); - } else { - replicatedDataOffset += extSystems->at(i)->extensionDataSize; - } + uint8_t streamNum = streamNumber.bits.streamNumber; + + // Extension Systems are required if replicatedDataLength is greater than 8. + if (replicatedDataLength > 8) { + // For protected content, get SampleID which is used as IV. + int returnStatus = AsfDataParser::mHeaderParser->parseSampleIDFromReplicatedData(obj,streamNum); + + if (returnStatus == ASF_PARSER_UNEXPECTED_VALUE) { + ALOGD("Only Encryption_Sample_ID extension system is supported"); + } else if (returnStatus != ASF_PARSER_SUCCESS) { + return ASF_PARSER_FAILED; } } diff --git a/asfparser/AsfHeaderParser.cpp b/asfparser/AsfHeaderParser.cpp index 0d9b45e..4887a53 100644 --- a/asfparser/AsfHeaderParser.cpp +++ b/asfparser/AsfHeaderParser.cpp @@ -24,7 +24,7 @@ -#define LOG_NDEBUG 0 +//#define LOG_NDEBUG 0 #define LOG_TAG "AsfHeaderParser" #include @@ -33,6 +33,7 @@ #include +#define PRFORMATTAG 0x5052 AsfHeaderParser::AsfHeaderParser(void) : mAudioInfo(NULL), mVideoInfo(NULL), @@ -54,18 +55,24 @@ AsfHeaderParser::~AsfHeaderParser(void) { } // Deleting memory from mExtendedStreamPropertiesObj recursively - for (vector::iterator it = mExtendedStreamPropertiesObj.begin(); it != mExtendedStreamPropertiesObj.end(); ++it) { + for (vector::iterator it = mExtendedStreamPropertiesObj.begin(); it != mExtendedStreamPropertiesObj.end(); ++it) { + + for (int i = 0; i < (*it)->streamNames.size(); i++) { + if ((*it)->streamNames[i]->pStreamName != NULL) { + delete (*it)->streamNames[i]->pStreamName; + } + delete (*it)->streamNames[i]; + } + (*it)->streamNames.clear(); + for (int i = 0; i < (*it)->extensionSystems.size(); i++) { if ((*it)->extensionSystems[i]->extensionSystemInfo != NULL) { delete (*it)->extensionSystems[i]->extensionSystemInfo; - (*it)->extensionSystems[i]->extensionSystemInfo = NULL; } delete (*it)->extensionSystems[i]; - (*it)->extensionSystems[i] = NULL; } (*it)->extensionSystems.clear(); delete (*it); - (*it) = NULL; } mExtendedStreamPropertiesObj.clear(); @@ -177,30 +184,55 @@ int AsfHeaderParser::parse(uint8_t *buffer, uint64_t size) { } else if(obj->objectID == ASF_Protection_System_Identifier_Object) { mIsProtected = true; LOGV("ASF_Protection_System_Identifier_Object"); - uint64_t playreadyObjSize = obj->objectSize; - PlayreadyHeaderObj *plrdyHdrObj = (PlayreadyHeaderObj*)buffer; + if (obj->objectSize < sizeof(AsfProtectionSystemIdObj)) { + LOGE("Unsupported Protection System Object"); + return ASF_PARSER_BAD_DATA; + } + AsfProtectionSystemIdObj *protectionSysObj = (AsfProtectionSystemIdObj*)buffer; uint8_t* playreadyObjBuf = NULL; - GUID *pldyUuid = (GUID*)plrdyHdrObj->sysId; - memcpy(mPlayreadyUuid, (uint8_t*)pldyUuid, UUIDSIZE); + memcpy(mPlayreadyUuid, protectionSysObj->sysId, UUIDSIZE); // Rights Management Header - Record Type = 0x0001 // Traverse till field containing number of records - playreadyObjBuf = buffer + sizeof(PlayreadyHeaderObj); - for (int i = 0; i < plrdyHdrObj->countRecords; i++) { - uint16_t* recordType = (uint16_t*)playreadyObjBuf; - if (*recordType == 0x01) {// Rights management Header + playreadyObjBuf = buffer + sizeof(AsfProtectionSystemIdObj); + uint32_t sizeLeft = obj->objectSize - sizeof(AsfProtectionSystemIdObj); // For Maintaining and checking the left memory in the object + + for (int i = 0; i < protectionSysObj->countRecords; i++) { + if (sizeLeft > 4) { + uint16_t recordType = *((uint16_t*)playreadyObjBuf); playreadyObjBuf += sizeof(uint16_t); - uint16_t* recordLen = (uint16_t*)playreadyObjBuf; - mPlayreadyHeaderLen = *recordLen; + sizeLeft -= sizeof (recordType); - mPlayreadyHeader = new uint8_t [mPlayreadyHeaderLen]; - if (mPlayreadyHeader == NULL) { - return ASF_PARSER_NO_MEMORY; - } + uint16_t recordLen = *((uint16_t*)playreadyObjBuf); playreadyObjBuf += sizeof(uint16_t); - memcpy(mPlayreadyHeader, playreadyObjBuf, mPlayreadyHeaderLen); - break; + sizeLeft -= sizeof (recordLen); + if (sizeLeft < recordLen) { + LOGE("Invalid Rec Protection Identifier Object"); + status = ASF_PARSER_BAD_DATA; + break; + } + + if (recordType == 0x01) {// Rights management Header + mPlayreadyHeaderLen = recordLen; + + if (mPlayreadyHeaderLen == 0) { + LOGE("Invalid Protection System Record Length Value"); + return ASF_PARSER_BAD_DATA; + } + mPlayreadyHeader = new uint8_t [mPlayreadyHeaderLen]; + if (mPlayreadyHeader == NULL) { + return ASF_PARSER_NO_MEMORY; + } + + memcpy(mPlayreadyHeader, playreadyObjBuf, mPlayreadyHeaderLen); + break; + } + playreadyObjBuf += recordLen; + sizeLeft -= recordLen; + } else { + LOGE("Invalid sizeLeft"); + return ASF_PARSER_BAD_DATA; } } } @@ -220,22 +252,26 @@ int AsfHeaderParser::parse(uint8_t *buffer, uint64_t size) { return status; } -int AsfHeaderParser::getPlayreadyUuid(uint8_t *playreadyUuid) { - +int AsfHeaderParser::getPlayreadyUuid(uint8_t playreadyUuid[], uint16_t len) { if (playreadyUuid == NULL || (!mIsProtected)) return ASF_PARSER_FAILED; + if (len < UUIDSIZE) { + LOGE("Invalid length "); + return ASF_PARSER_FAILED; + } + memcpy(playreadyUuid, mPlayreadyUuid, UUIDSIZE); return ASF_PARSER_SUCCESS; } -int AsfHeaderParser::getPlayreadyHeaderXml(uint8_t *playreadyHeader, uint32_t *playreadyHeaderLen) { +int AsfHeaderParser::getPlayreadyHeaderXml(uint8_t *header, uint32_t *len) { - if (playreadyHeader == NULL) { - *playreadyHeaderLen = mPlayreadyHeaderLen; + if (header == NULL) { + *len = mPlayreadyHeaderLen; return ASF_PARSER_NULL_POINTER; } - memcpy(playreadyHeader, mPlayreadyHeader, mPlayreadyHeaderLen); - *playreadyHeaderLen = mPlayreadyHeaderLen; + memcpy(header, mPlayreadyHeader, mPlayreadyHeaderLen); + *len = mPlayreadyHeaderLen; return ASF_PARSER_SUCCESS; } @@ -316,8 +352,10 @@ int AsfHeaderParser::onVideoSpecificData(AsfStreamPropertiesObject *obj, uint8_t videoInfo->width = info->encodedImageWidth; videoInfo->height = info->encodedImageHeight; + // Following condition taken from Section 2.4.2.2 - Video Media Type, of Playready documentation if (bmp->compressionID == FOURCC('Y', 'D', 'R', 'P')) { - // That means PYV content + // That means PYV content, for which Compression Id is + // the last 4 bytes of the codec specific data following the Video format data uint32_t* ptrActCompId = (uint32_t*)((data + sizeof(AsfVideoInfoHeader) + bmp->formatDataSize - sizeof(uint32_t))); videoInfo->fourCC = (*ptrActCompId); } else { @@ -376,7 +414,11 @@ int AsfHeaderParser::onAudioSpecificData(AsfStreamPropertiesObject *obj, uint8_t audioInfo->encryptedContentFlag = obj->flags.bits.encryptedContentFlag; audioInfo->timeOffset = obj->timeOffset; - if (format->codecIDFormatTag == 0x5052) { + // Codec Id is 0x5052 i.e. ASCII value of 'P', 'R' for Playready - + // [Refer Section 2.4.2.1 - Audio Media Type, of Playready documentation] + if (format->codecIDFormatTag == PRFORMATTAG) { + // That means protected content, for which Codec Id is + // the last 2 bytes of the codec specific data following the Audio format data uint32_t* ptrActCodecId = (uint32_t*)((data + sizeof(AsfWaveFormatEx) + format->codecSpecificDataSize - sizeof(format->codecIDFormatTag))); audioInfo->codecID = (*ptrActCodecId); } else { @@ -426,46 +468,65 @@ int AsfHeaderParser::onExtendedStreamPropertiesObject(uint8_t *buffer, uint32_t return ASF_PARSER_BAD_DATA; } - AsfExtendedStreamPropertiesObject *extStrObj = new AsfExtendedStreamPropertiesObject; + AsfExtendedStreamPropertiesObject *fixedLenExtStrPropsObj = (AsfExtendedStreamPropertiesObject *)buffer; + if (fixedLenExtStrPropsObj->objectSize > size) { + ALOGE("Invalid ASF Extended Stream Prop Object size"); + return ASF_PARSER_BAD_VALUE; + } + + AsfExtendedStreamPropertiesExObject *extStrObj = new AsfExtendedStreamPropertiesExObject; if (extStrObj == NULL) { return ASF_PARSER_NO_MEMORY; } - AsfExtendedStreamPropertiesObject *obj = (AsfExtendedStreamPropertiesObject *)buffer; - if (obj->objectSize > size) { - ALOGE("Invalid ASF Extended Stream Prop Object size"); - delete extStrObj; - return ASF_PARSER_BAD_VALUE; + //copy all fixed length fields first. + memcpy(&(extStrObj->propObj), fixedLenExtStrPropsObj, sizeof(AsfExtendedStreamPropertiesObject)); + + ALOGD("Stream number = 0x%08X", fixedLenExtStrPropsObj->streamNumber); + ALOGD("PayloadExtensionSystemCount = 0x%08X", fixedLenExtStrPropsObj->payloadExtensionSystemCount); + + // Get pointer to buffer where variable length fields might start + buffer += sizeof(AsfExtendedStreamPropertiesObject); + + uint32_t streamNameOffset = sizeof(uint16_t) * 2; // languageIDIndex + streamNameLength + //StramNames might start here. depends on streamNameCount. + for (int i = 0; i < fixedLenExtStrPropsObj->streamNameCount; i++) { + AsfStreamName *streamNameObj = new AsfStreamName; + AsfStreamName *StreamNameObjBuffer = (AsfStreamName *)buffer; + + // populate the StreamName object from the buffer + streamNameObj->languageIDIndex = StreamNameObjBuffer->languageIDIndex; + streamNameObj->streamNameLength = StreamNameObjBuffer->streamNameLength; + + // Allocate space to store StreamName(3rd field) for the StreamName structure + if (streamNameObj->streamNameLength > 0) { + streamNameObj->pStreamName = new uint8_t [StreamNameObjBuffer->streamNameLength]; + if (streamNameObj->pStreamName == NULL) { + delete streamNameObj; + delete extStrObj; + return ASF_PARSER_NO_MEMORY; + } + + memcpy(streamNameObj->pStreamName, buffer + streamNameOffset, StreamNameObjBuffer->streamNameLength); + } else { + // no streamName + streamNameObj->pStreamName = NULL; + } + + // calculate the length of current StreamName entry. + // if there are multiple StreamNames then increment + // buffer by 4 + streamNameLength to point to next StreamName entry + buffer += streamNameOffset + StreamNameObjBuffer->streamNameLength; + + extStrObj->streamNames.push_back(streamNameObj); } - extStrObj->objectID = obj->objectID; - extStrObj->objectSize = obj->objectSize; - extStrObj->startTime = obj->startTime; - extStrObj->endTime = obj->endTime; - extStrObj->dataBitrate = obj->dataBitrate; - extStrObj->bufferSize = obj->bufferSize; - extStrObj->initialBufferFullness = obj->initialBufferFullness; - extStrObj->alternateDataBitrate = obj->alternateDataBitrate; - extStrObj->alternateBufferSize = obj->alternateBufferSize; - extStrObj->alternateInitialBufferFullness = obj->alternateInitialBufferFullness; - extStrObj->maximumObjectSize = obj->maximumObjectSize; - extStrObj->flags = obj->flags; - extStrObj->streamNumber = obj->streamNumber; - extStrObj->streamLanguageIDIndex = obj->streamLanguageIDIndex; - extStrObj->averageTimePerFrame = obj->averageTimePerFrame; - extStrObj->streamNameCount = obj->streamNameCount; - extStrObj->payloadExtensionSystemCount = obj->payloadExtensionSystemCount; - - ALOGD("stream number = 0x%08X", obj->streamNumber); - ALOGD("payloadExtensionSystemCount = 0x%08X", obj->payloadExtensionSystemCount); - - // Get pointer to buffer where first extension system object starts - buffer = (uint8_t *)&(obj->extensionSystems); - int extSysSize = 0; - for (int i = 0; i < obj->payloadExtensionSystemCount; i++ ) { - extSysSize = 0; - PayloadExtensionSystem *extensionObj = new PayloadExtensionSystem; - PayloadExtensionSystem *extObjData = (PayloadExtensionSystem *)buffer; + uint32_t systemInfoOffset = sizeof(GUID) + sizeof(uint16_t) + sizeof(uint32_t); + //buffer points to extension systems here depending on payloadExtensionSystemCount in previous Fixed field. + for (int i = 0; i < fixedLenExtStrPropsObj->payloadExtensionSystemCount; i++) { + AsfPayloadExtensionSystem *extensionObj = new AsfPayloadExtensionSystem; + AsfPayloadExtensionSystem *extObjData = (AsfPayloadExtensionSystem *)buffer; + // populate the extension object from the buffer extensionObj->extensionSystemId = extObjData->extensionSystemId; extensionObj->extensionDataSize = extObjData->extensionDataSize; @@ -473,24 +534,23 @@ int AsfHeaderParser::onExtendedStreamPropertiesObject(uint8_t *buffer, uint32_t // Allocate space to store extensionSystemInfo if (extensionObj->extensionSystemInfoLength > 0) { - // TODO: make sure this memory is freed when not reuired. extensionObj->extensionSystemInfo = new uint8_t [extObjData->extensionSystemInfoLength]; if (extensionObj->extensionSystemInfo == NULL) { delete extensionObj; delete extStrObj; return ASF_PARSER_NO_MEMORY; } - memcpy(extensionObj->extensionSystemInfo, extObjData->extensionSystemInfo, extObjData->extensionSystemInfoLength); + memcpy(extensionObj->extensionSystemInfo, buffer + systemInfoOffset, extObjData->extensionSystemInfoLength); } else { // no extension system info extensionObj->extensionSystemInfo = NULL; } // calculate the length of current extension system. - // if there are multiple extension systems then increment buffer by extSysSize - // to point to next extension object - extSysSize += sizeof(GUID) + sizeof(uint16_t) + sizeof(uint32_t) + extensionObj->extensionSystemInfoLength; - buffer += extSysSize; + // if there are multiple extension systems then increment + // buffer by 22 + extensionSystemInfoLength to point to + // next extension object + buffer += systemInfoOffset + extensionObj->extensionSystemInfoLength; // add the extension object to the extended stream object extStrObj->extensionSystems.push_back(extensionObj); @@ -547,14 +607,47 @@ int AsfHeaderParser::parseHeaderExtensionObject(uint8_t* buffer, uint32_t size) return status; } -int AsfHeaderParser::getPayloadExtensionSystems(uint8_t streamNumber, vector **extSystems ) { + +int AsfHeaderParser::parseSampleIDFromReplicatedData(AsfPayloadDataInfo *obj, uint8_t streamNumber) { + + vector *extSystems = NULL; + + // Get handle to extension systems in that stream's Extension Stream Properties object. for (unsigned int i = 0; i < mExtendedStreamPropertiesObj.size(); i++) { - if (streamNumber == mExtendedStreamPropertiesObj[i]->streamNumber) { - *extSystems = &(mExtendedStreamPropertiesObj[i]->extensionSystems); + if (streamNumber == mExtendedStreamPropertiesObj[i]->propObj.streamNumber) { + extSystems = &(mExtendedStreamPropertiesObj[i]->extensionSystems); + break; + } + } + + if (extSystems == NULL) { + return ASF_PARSER_FAILED; + } + + int replicatedDataOffset = 0; + // Find data correspodning to ASF_Payload_Extension_System_Encryption_Sample_ID which is SampleID + for (int i = 0; i < extSystems->size(); i++) { + // Point to ext system's data in replicated data buffer. + if ((extSystems->at(i)->extensionSystemId) == ASF_Payload_Extension_System_Encryption_Sample_ID) { + if (extSystems->at(i)->extensionDataSize == 0xFFFF) { + uint16_t extensionDataSize = *((uint16_t*) (obj->replicatedData + 8 + replicatedDataOffset)); + obj->sampleIDLen = extensionDataSize; + obj->sampleID = obj->replicatedData + 8 + replicatedDataOffset + sizeof(uint16_t); // 2 bytes denote size of this extSystem + } else { + obj->sampleID = obj->replicatedData + 8 + replicatedDataOffset; + obj->sampleIDLen = extSystems->at(i)->extensionDataSize; + } return ASF_PARSER_SUCCESS; } + // Some other extension system. Modify the replicatedData offset accordingly to point to next extension system. + if (extSystems->at(i)->extensionDataSize == 0xFFFF) { + uint16_t nSize = *((uint16_t*) (obj->replicatedData + 8 + replicatedDataOffset)); + replicatedDataOffset += (sizeof(uint16_t) + nSize); + } else { + replicatedDataOffset += extSystems->at(i)->extensionDataSize; + } } - return ASF_PARSER_FAILED; + return ASF_PARSER_UNEXPECTED_VALUE; // Other extension systems. } void AsfHeaderParser::resetStreamInfo() { diff --git a/asfparser/AsfHeaderParser.h b/asfparser/AsfHeaderParser.h index 02b8af5..285efc4 100644 --- a/asfparser/AsfHeaderParser.h +++ b/asfparser/AsfHeaderParser.h @@ -59,9 +59,9 @@ public: bool hasVideo(); bool hasAudio(); bool isSeekable(); - int getPlayreadyUuid(uint8_t* playreadyUuid); - int getPlayreadyHeaderXml(uint8_t* playreadyHeader, uint32_t* playreadyHeaderLen); - int getPayloadExtensionSystems(uint8_t streamNumber, vector **extSystems ); + int getPlayreadyUuid(uint8_t playreadyUuid[], uint16_t len); + int getPlayreadyHeaderXml(uint8_t* header, uint32_t* len); + int parseSampleIDFromReplicatedData(AsfPayloadDataInfo *obj, uint8_t streamNumber); private: int onFilePropertiesObject(uint8_t *buffer, uint32_t size); int onStreamPropertiesObject(uint8_t *buffer, uint32_t size); @@ -76,13 +76,12 @@ private: AsfVideoStreamInfo *mVideoInfo; AsfFileMediaInfo *mFileInfo; uint32_t mNumObjectParsed; - uint32_t mNumberofHeaderObjects; uint8_t mPlayreadyUuid[UUIDSIZE]; bool mIsProtected; - uint8_t *mPlayreadyHeader; - uint32_t mPlayreadyHeaderLen; - vector mExtendedStreamPropertiesObj; + uint16_t mPlayreadyHeaderLen; + uint32_t mNumberofHeaderObjects; + vector mExtendedStreamPropertiesObj; }; #endif diff --git a/asfparser/AsfObjects.h b/asfparser/AsfObjects.h index 9d92dbb..f5e3487 100644 --- a/asfparser/AsfObjects.h +++ b/asfparser/AsfObjects.h @@ -48,7 +48,7 @@ struct AsfHeaderObject : AsfObject { uint8_t reserved2; }; -struct PlayreadyHeaderObj : AsfObject { +struct AsfProtectionSystemIdObj : AsfObject { uint8_t sysId[UUIDSIZE]; uint32_t sysVer; uint32_t dataSize; @@ -157,8 +157,16 @@ struct AsfPaddingObject : AsfObject { // TODO: }; +struct AsfStreamName { + uint16_t languageIDIndex; + uint16_t streamNameLength; + uint8_t *pStreamName; +}; + + + // objects in the ASF Header Extension object -struct PayloadExtensionSystem { +struct AsfPayloadExtensionSystem { GUID extensionSystemId; uint16_t extensionDataSize; uint32_t extensionSystemInfoLength; @@ -166,7 +174,8 @@ struct PayloadExtensionSystem { }; // class AsfHeaderParser; -struct AsfExtendedStreamPropertiesObject : AsfObject { +// Fixed Length fields of AsfExtendedStreamPropertiesObject +struct AsfExtendedStreamPropertiesObject : AsfObject { uint64_t startTime; uint64_t endTime; uint32_t dataBitrate; @@ -191,12 +200,20 @@ struct AsfExtendedStreamPropertiesObject : AsfObject { uint64_t averageTimePerFrame; uint16_t streamNameCount; uint16_t payloadExtensionSystemCount; - vector extensionSystems; //Stream Names - variable length //Payload Extension Systems - variable length //Stream Properties Object - variable length }; +// AsfExtendedStreamPropertiesObjectFixed + variable length extension systems. +struct AsfExtendedStreamPropertiesExObject { + struct AsfExtendedStreamPropertiesObject propObj; + vector streamNames; + vector extensionSystems; + //Stream Properties Object - variable length +}; + + struct AsfAdvancedMutualExclusionObject : AsfObject { // TODO: diff --git a/asfparser/AsfStreamParser.cpp b/asfparser/AsfStreamParser.cpp index dee3c3a..b19f080 100644 --- a/asfparser/AsfStreamParser.cpp +++ b/asfparser/AsfStreamParser.cpp @@ -41,7 +41,6 @@ AsfStreamParser::AsfStreamParser(void) mHeaderParser = new AsfHeaderParser; mDataParser = new AsfDataParser(mHeaderParser); mSimpleIndexParser = NULL; - memset(mPlayreadyUuid, 0, UUIDSIZE); } AsfStreamParser::~AsfStreamParser(void) { @@ -77,20 +76,12 @@ int AsfStreamParser::parseHeaderObject(uint8_t *buffer, uint64_t size) { return ASF_PARSER_SUCCESS; } -int AsfStreamParser::getDrmUuid(uint8_t *playreadyUuid) { - int status = ASF_PARSER_SUCCESS; - - status = mHeaderParser->getPlayreadyUuid(playreadyUuid); - LOGV("AsfStreamParser::getDrmUuid() returns %x", status); - return status; +int AsfStreamParser::getDrmUuid(uint8_t playreadyUuid[], uint16_t len) { + return mHeaderParser->getPlayreadyUuid(playreadyUuid, len); } int AsfStreamParser::getDrmHeaderXml(uint8_t *playreadyHeader, uint32_t *playreadyHeaderLen) { - int status = ASF_PARSER_SUCCESS; - - status = mHeaderParser->getPlayreadyHeaderXml(playreadyHeader, playreadyHeaderLen); - LOGV("AsfStreamParser::getDrmHeaderXml() returns %x", status); - return status; + return mHeaderParser->getPlayreadyHeaderXml(playreadyHeader, playreadyHeaderLen); } AsfAudioStreamInfo* AsfStreamParser::getAudioInfo() const { return mHeaderParser->getAudioInfo(); diff --git a/asfparser/AsfStreamParser.h b/asfparser/AsfStreamParser.h index e5fd0c4..5fb8acf 100644 --- a/asfparser/AsfStreamParser.h +++ b/asfparser/AsfStreamParser.h @@ -43,7 +43,7 @@ public: AsfAudioStreamInfo* getAudioInfo() const; AsfVideoStreamInfo* getVideoInfo() const; AsfFileMediaInfo* getFileInfo() const; - int getDrmUuid(uint8_t* playreadyUuid); + int getDrmUuid(uint8_t playreadyUuid[], uint16_t len); int getDrmHeaderXml(uint8_t* playreadyHeader, uint32_t* playreadyHeaderLen); // return duration in 100-nanosecond unit , readable when header object is parsed @@ -81,7 +81,6 @@ private: // Preroll value is used for data buffering. uint32_t mTimeOffsetMs; bool mHeaderParsed; - uint8_t mPlayreadyUuid[UUIDSIZE]; class AsfHeaderParser *mHeaderParser; class AsfDataParser *mDataParser; class AsfSimpleIndexParser *mSimpleIndexParser; -- cgit v1.2.3 From 1e9009a0ef5b062cc19aa86a48c45f3b25edcb91 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Thu, 15 May 2014 18:44:16 +0800 Subject: Fix some GTS VP8 related failed cases. BZ: 194934 Keep encode parameters agreement with GTS test cases. Set minQP as 0; Set kf_auto as 0; Set max/min kf dist as 128. There are the same as GTS test cases setting. Then failed cases were gone. Change-Id: Ida8c7d945285cf911eeceb7d22a84254c093a513 Signed-off-by: bolunliu --- videoencoder/VideoEncoderVP8.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp index 486c5ba..1a0871d 100644 --- a/videoencoder/VideoEncoderVP8.cpp +++ b/videoencoder/VideoEncoderVP8.cpp @@ -19,10 +19,10 @@ VideoEncoderVP8::VideoEncoderVP8() mVideoParamsVP8.profile = 0; mVideoParamsVP8.error_resilient = 0; mVideoParamsVP8.num_token_partitions = 4; - mVideoParamsVP8.kf_auto = 1; - mVideoParamsVP8.kf_min_dist = 0; - mVideoParamsVP8.kf_max_dist = 32; - mVideoParamsVP8.min_qp = 4; + mVideoParamsVP8.kf_auto = 0; + mVideoParamsVP8.kf_min_dist = 128; + mVideoParamsVP8.kf_max_dist = 128; + mVideoParamsVP8.min_qp = 0; mVideoParamsVP8.max_qp = 63; mVideoParamsVP8.init_qp = 26; mVideoParamsVP8.rc_undershoot = 100; -- cgit v1.2.3 From 049b61c6db8d499cde1549781c1944053da58f2c Mon Sep 17 00:00:00 2001 From: Tianmi Chen Date: Wed, 30 Apr 2014 17:48:53 +0800 Subject: libmix: remove the work around for format change in video editor BZ: 196532 This work around leads to one 1080i video clip no thumbnail displayed. Remove it. Change-Id: I26307bb31abfc5985d9feddeab046f5e8939f5c3 Signed-off-by: Tianmi Chen Signed-off-by: Gu, Wangyi --- videodecoder/VideoDecoderAVC.cpp | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 13dd5db..296a0ca 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -760,18 +760,6 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) { data->codec_data->crop_right, data->codec_data->crop_bottom); - if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)){ - // BUG 159760: the port definition's height were set according to the infomation in container. - // In some cases, this info is not right. Need to correct it by format change notification. - uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight); - uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop); - - if (width != cropWidth || height != cropHeight) { - mSizeChanged = true; - ITRACE("Raw data mode: video size is changed."); - } - } - if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber; } else { -- cgit v1.2.3 From c47de3a8251ccc1828274a67222def14cc7d13c0 Mon Sep 17 00:00:00 2001 From: Yuanjun Huang Date: Wed, 28 May 2014 13:55:06 +0800 Subject: LibMIX: work around to fix the GTS failed issue which occurs on MOFD of 1080p Widevine playback. BZ: 197962 This issue is caused by Google's defect. After Google fix its defect, this patch will be repealled. Change-Id: Ia080e638c5891bf68268e72ba6328ef51a93f91a Signed-off-by: Yuanjun Huang --- mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c | 4 +++- videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c index 60957a5..57f852c 100755 --- a/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c +++ b/mixvbp/vbp_manager/secvideo/merrifield/vbp_h264secure_parser.c @@ -2208,7 +2208,9 @@ uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 s } else { if (slice_num < 1) { ETRACE("Don't find a valid slice header!"); - return VBP_DATA; + // workaround to fix GTS issue due to Google defect. [repeal later]. + return VBP_OK; + //return VBP_DATA; } } error = vbp_populate_query_data_h264secure(pcontext); diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp index 4428ac4..db9908f 100644 --- a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp +++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp @@ -476,6 +476,8 @@ Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) } } } + mDecodingFrame = true; + return DECODE_SUCCESS; } -- cgit v1.2.3 From 1f14f4750092f375c4261586556f94cbd0e60c1d Mon Sep 17 00:00:00 2001 From: "Gu, Wangyi" Date: Sat, 7 Jun 2014 14:22:57 +0800 Subject: fix the distortion issue in some 1080i interlace clips BZ: 199433 1.ReferenceFrames have VA_PICTURE_H264_BOTTOM_FIELD set, which confuse VED to address top field. 2.H264 dpb is wrongly updated when libmix handle SEI recover_point. 3.Set the default poc as max of int32 to avoid reference frame missing issue when frame/field poc is -1. Change-Id: Ib22c3fa8b2e8064b19b5f133a6dfa0b0c0ca3d3b Signed-off-by: Gu, Wangyi --- mixvbp/vbp_plugin/h264/h264parse_sei.c | 4 ++++ videodecoder/VideoDecoderAVC.cpp | 13 +++++++++---- videodecoder/VideoDecoderAVC.h | 4 ++-- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/mixvbp/vbp_plugin/h264/h264parse_sei.c b/mixvbp/vbp_plugin/h264/h264parse_sei.c index df18cc3..4769ef0 100755 --- a/mixvbp/vbp_plugin/h264/h264parse_sei.c +++ b/mixvbp/vbp_plugin/h264/h264parse_sei.c @@ -406,8 +406,12 @@ h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo) pInfo->img.recovery_point_found |= 2; //// Enable the RP recovery if no IDR ---Cisco +#if 0 + // don't set this flag, in some corner case + // dpb is wrongly updated when recover point is received. if ((pInfo->img.recovery_point_found & 1)==0) pInfo->sei_rp_received = 1; +#endif } return H264_STATUS_OK; diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp index 296a0ca..912ebdf 100644 --- a/videodecoder/VideoDecoderAVC.cpp +++ b/videodecoder/VideoDecoderAVC.cpp @@ -31,6 +31,7 @@ #define WIDI_CONSUMED 6 #define HDMI_CONSUMED 2 #define NW_CONSUMED 2 +#define POC_DEFAULT 0x7FFFFFFF VideoDecoderAVC::VideoDecoderAVC(const char *mimeType) : VideoDecoderBase(mimeType, VBP_H264), @@ -571,6 +572,10 @@ Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picD } } dpb->flags = refList->flags; + // if it's bottom field in dpb, there must have top field in DPB, + // so clear the bottom flag, or will confuse VED to address top field + if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD) + dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD); dpb->frame_idx = refList->frame_idx; dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt; dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt; @@ -588,10 +593,10 @@ void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picPa if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) { DecodedPictureBuffer *dpb = mDPBs[mToggleDPB]; - uint32_t poc = getPOC(&(picParam->CurrPic)); + int32_t poc = getPOC(&(picParam->CurrPic)); for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) { if (poc == dpb->poc) { - dpb->poc = (uint32_t)-1; + dpb->poc = (int32_t)POC_DEFAULT; if (dpb->surfaceBuffer) { dpb->surfaceBuffer->asReferernce = false; } @@ -602,7 +607,7 @@ void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picPa } } -uint32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) { +int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) { if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) { return pic->BottomFieldOrderCnt; } @@ -655,7 +660,7 @@ VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) { void VideoDecoderAVC::invalidateDPB(int toggle) { DecodedPictureBuffer* p = mDPBs[toggle]; for (int i = 0; i < DPB_SIZE; i++) { - p->poc = (uint32_t) -1; + p->poc = (int32_t) POC_DEFAULT; p->surfaceBuffer = NULL; p++; } diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h index c5ce75b..47f250d 100755 --- a/videodecoder/VideoDecoderAVC.h +++ b/videodecoder/VideoDecoderAVC.h @@ -47,7 +47,7 @@ protected: Decode_Status updateDPB(VAPictureParameterBufferH264 *picParam); Decode_Status updateReferenceFrames(vbp_picture_data_h264 *picData); void removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam); - uint32_t getPOC(VAPictureH264 *pic); // Picture Order Count + int32_t getPOC(VAPictureH264 *pic); // Picture Order Count inline VASurfaceID findSurface(VAPictureH264 *pic); inline VideoSurfaceBuffer* findSurfaceBuffer(VAPictureH264 *pic); inline VideoSurfaceBuffer* findRefSurfaceBuffer(VAPictureH264 *pic); @@ -67,7 +67,7 @@ protected: private: struct DecodedPictureBuffer { VideoSurfaceBuffer *surfaceBuffer; - uint32_t poc; // Picture Order Count + int32_t poc; // Picture Order Count }; enum { -- cgit v1.2.3 From ab2fa6cb77935b173e0e7377a80cf5dedbb431bb Mon Sep 17 00:00:00 2001 From: gji2 Date: Fri, 6 Jun 2014 04:36:54 +0800 Subject: Unsupport resolution 2560x1600 encoding BZ: 200960 Unsupport resolution 2560x1600 encoding Change-Id: Id905a53b279571b7e1584c6d8c41798858f8dd68 Signed-off-by: gji2 --- videoencoder/VideoEncoderBase.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 813fc98..80278c9 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -120,6 +120,11 @@ Encode_Status VideoEncoderBase::start() { return ENCODE_NOT_SUPPORTED; #endif + if (mComParams.resolution.width > 1920 || mComParams.resolution.height > 1088){ + LOGE("Unsupported resolution width %d, height %d\n", + mComParams.resolution.width, mComParams.resolution.height); + return ENCODE_NOT_SUPPORTED; + } queryAutoReferenceConfig(mComParams.profile); VAConfigAttrib vaAttrib_tmp[6],vaAttrib[VAConfigAttribTypeMax]; -- cgit v1.2.3 From f25436d6101c07bf8328950ce26b550502b593c9 Mon Sep 17 00:00:00 2001 From: gji2 Date: Sun, 15 Jun 2014 23:17:16 +0800 Subject: refine the resolution limit to 2kx2k BZ: 203614 refine the resolution limit to 2kx2k Change-Id: I7e30be6c2c40313151bc6603ead06f3bfbf29b38 Signed-off-by: gji2 --- videoencoder/VideoEncoderBase.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp index 80278c9..62183f2 100644 --- a/videoencoder/VideoEncoderBase.cpp +++ b/videoencoder/VideoEncoderBase.cpp @@ -120,7 +120,7 @@ Encode_Status VideoEncoderBase::start() { return ENCODE_NOT_SUPPORTED; #endif - if (mComParams.resolution.width > 1920 || mComParams.resolution.height > 1088){ + if (mComParams.resolution.width > 2048 || mComParams.resolution.height > 2048){ LOGE("Unsupported resolution width %d, height %d\n", mComParams.resolution.width, mComParams.resolution.height); return ENCODE_NOT_SUPPORTED; -- cgit v1.2.3 From 2060c3087569ab655dba4fcc93e6ee9d12093887 Mon Sep 17 00:00:00 2001 From: bolunliu Date: Thu, 19 Jun 2014 14:40:46 +0800 Subject: Fix CTS failed cases BZ:205039 Add walk round method in libmix to fix CTS failed cases. Change-Id: I3b4d158cd4cc7e1a4bb1b0760aa71d454394a786 Signed-off-by: bolunliu --- videoencoder/VideoEncoderUtils.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp index e2f76d5..53aafbd 100644 --- a/videoencoder/VideoEncoderUtils.cpp +++ b/videoencoder/VideoEncoderUtils.cpp @@ -321,6 +321,10 @@ Encode_Status VASurfaceMap::doMapping() { mAction |= MAP_ACTION_COPY; } + if(mAction & MAP_ACTION_ALIGN64 && width <= 320 && height <= 240) { + mAction |= MAP_ACTION_COPY; + } + if (mAction & MAP_ACTION_COPY) { //must allocate new vasurface(EXternalMemoryNULL, uncached) //allocate new vasurface mVASurface = CreateNewVASurface(mVADisplay, stride, height); -- cgit v1.2.3 From d5da0965d64441d109d5af97e29ac37a810d0812 Mon Sep 17 00:00:00 2001 From: Thierry Strudel Date: Tue, 15 Jul 2014 22:11:03 -0700 Subject: Fix build for AOSP/L BZ: 209178 Change-Id: Ie613d65660cf3c403f6eb31120ac21f27efdb63e Signed-off-by: Thierry Strudel Reviewed-on: https://android.intel.com/218248 Reviewed-by: Imberton, Guilhem Tested-by: Imberton, Guilhem --- asfparser/AsfDataParser.cpp | 2 +- asfparser/AsfHeaderParser.cpp | 2 +- imagedecoder/ImageDecoderTrace.h | 2 +- imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp | 2 +- imageencoder/ImageEncoder.cpp | 2 +- .../fw/codecs/vp8/parser/viddec_vp8_parse.c | 2 +- mix_vbp/viddec_fw/fw/parser/vbp_trace.h | 2 +- mixvbp/include/vbp_trace.h | 2 +- mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c | 2 +- test/mix_decoder.cpp | 2 +- videodecoder/VideoDecoderTrace.h | 2 +- videoencoder/IntelMetadataBuffer.cpp | 12 ++++++------ videoencoder/PVSoftMPEG4Encoder.cpp | 2 +- videoencoder/VideoEncoderHost.cpp | 2 +- videoencoder/VideoEncoderLog.h | 21 +++++++-------------- 15 files changed, 26 insertions(+), 33 deletions(-) diff --git a/asfparser/AsfDataParser.cpp b/asfparser/AsfDataParser.cpp index 03e474d..b9c07bc 100644 --- a/asfparser/AsfDataParser.cpp +++ b/asfparser/AsfDataParser.cpp @@ -30,7 +30,7 @@ #include "AsfGuids.h" #include "AsfObjects.h" #include -#include +#include #include "AsfHeaderParser.h" AsfHeaderParser *AsfDataParser::mHeaderParser = NULL; diff --git a/asfparser/AsfHeaderParser.cpp b/asfparser/AsfHeaderParser.cpp index 4887a53..e10b79e 100644 --- a/asfparser/AsfHeaderParser.cpp +++ b/asfparser/AsfHeaderParser.cpp @@ -26,7 +26,7 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "AsfHeaderParser" -#include +#include #include "AsfHeaderParser.h" #include diff --git a/imagedecoder/ImageDecoderTrace.h b/imagedecoder/ImageDecoderTrace.h index a3dadc0..68fdbfe 100644 --- a/imagedecoder/ImageDecoderTrace.h +++ b/imagedecoder/ImageDecoderTrace.h @@ -56,7 +56,7 @@ TraceImageDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define LOG_NDEBUG 0 -#include +#include #define ETRACE(...) ALOGE(__VA_ARGS__) #define WTRACE(...) ALOGW(__VA_ARGS__) #define ITRACE(...) ALOGI(__VA_ARGS__) diff --git a/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp b/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp index 0e123e5..a982c15 100644 --- a/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp +++ b/imagedecoder/JPEGDecoder_libjpeg_wrapper.cpp @@ -34,7 +34,7 @@ * */ -#include +#include #include "JPEGDecoder_libjpeg_wrapper.h" #include #include diff --git a/imageencoder/ImageEncoder.cpp b/imageencoder/ImageEncoder.cpp index ec89fc6..5605019 100644 --- a/imageencoder/ImageEncoder.cpp +++ b/imageencoder/ImageEncoder.cpp @@ -1,7 +1,7 @@ /*#define LOG_NDEBUG 0*/ #define LOG_TAG "IntelImageEncoder" -#include +#include #include "ImageEncoder.h" IntelImageEncoder::IntelImageEncoder(void) diff --git a/mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c b/mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c index b5dfbe6..6f490f6 100644 --- a/mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c +++ b/mix_vbp/viddec_fw/fw/codecs/vp8/parser/viddec_vp8_parse.c @@ -29,7 +29,7 @@ #include "viddec_fw_workload.h" #include "viddec_pm.h" -#include +#include #include "vp8.h" #include "vp8parse.h" #include "viddec_vp8_parse.h" diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h index ba916b9..dd06659 100644 --- a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h +++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h @@ -41,7 +41,7 @@ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define LOG_TAG "MixVBP" -#include +#include #define ETRACE(...) LOGE(__VA_ARGS__) #define WTRACE(...) LOGW(__VA_ARGS__) #define ITRACE(...) LOGI(__VA_ARGS__) diff --git a/mixvbp/include/vbp_trace.h b/mixvbp/include/vbp_trace.h index ba916b9..dd06659 100755 --- a/mixvbp/include/vbp_trace.h +++ b/mixvbp/include/vbp_trace.h @@ -41,7 +41,7 @@ vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define LOG_TAG "MixVBP" -#include +#include #define ETRACE(...) LOGE(__VA_ARGS__) #define WTRACE(...) LOGW(__VA_ARGS__) #define ITRACE(...) LOGI(__VA_ARGS__) diff --git a/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c index 860adc0..8226732 100755 --- a/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c +++ b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c @@ -27,7 +27,7 @@ #include "viddec_pm.h" -#include +#include #include "vp8.h" #include "vp8parse.h" diff --git a/test/mix_decoder.cpp b/test/mix_decoder.cpp index ab82747..c686d56 100755 --- a/test/mix_decoder.cpp +++ b/test/mix_decoder.cpp @@ -1,7 +1,7 @@ #define LOG_NDEBUG 0 #define LOG_TAG "mix_decoder" #include -#include +#include #include #include #include diff --git a/videodecoder/VideoDecoderTrace.h b/videodecoder/VideoDecoderTrace.h index c8b7dce..1ca7d12 100755 --- a/videodecoder/VideoDecoderTrace.h +++ b/videodecoder/VideoDecoderTrace.h @@ -54,7 +54,7 @@ TraceVideoDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__) #define LOG_TAG "VideoDecoder" -#include +#include #define ETRACE(...) LOGE(__VA_ARGS__) #define WTRACE(...) LOGW(__VA_ARGS__) #define ITRACE(...) LOGI(__VA_ARGS__) diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp index 1cb2e39..79c3801 100644 --- a/videoencoder/IntelMetadataBuffer.cpp +++ b/videoencoder/IntelMetadataBuffer.cpp @@ -24,7 +24,7 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "IntelMetadataBuffer" -#include +#include #include "IntelMetadataBuffer.h" #include @@ -240,7 +240,7 @@ static sp GetIntelBufferSharingService() { sp binder = sm->checkService(String16("media.IntelBufferSharing")); if (binder == 0) - LOGE("media.IntelBufferSharing service is not published"); + ALOGE("media.IntelBufferSharing service is not published"); return binder; } @@ -272,7 +272,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data if (pid == getpid()) //in same process, should not use binder { - LOGE("onTransact in same process, wrong sessionflag?"); + ALOGE("onTransact in same process, wrong sessionflag?"); return UNKNOWN_ERROR; } @@ -296,7 +296,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data if (pid == getpid()) //in same process, should not use binder { //same process, return same pointer in data - LOGE("onTransact CLEAR_MEM in same process, wrong sessionflag?"); + ALOGE("onTransact CLEAR_MEM in same process, wrong sessionflag?"); return UNKNOWN_ERROR; } @@ -308,7 +308,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data if (pid == getpid()) //in same process, should not use binder { - LOGE("onTransact GET_MEM in same process, wrong sessionflag?"); + ALOGE("onTransact GET_MEM in same process, wrong sessionflag?"); return UNKNOWN_ERROR; } @@ -320,7 +320,7 @@ status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data if (smem && (NO_ERROR == WriteMemObjToBinder(*reply, smem))) return NO_ERROR; else - LOGE("onTransact GET_MEM: Not find mem"); + ALOGE("onTransact GET_MEM: Not find mem"); return UNKNOWN_ERROR; } diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp index 9a8d234..b599847 100644 --- a/videoencoder/PVSoftMPEG4Encoder.cpp +++ b/videoencoder/PVSoftMPEG4Encoder.cpp @@ -39,7 +39,7 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "PVSoftMPEG4Encoder" -#include +#include #include "mp4enc_api.h" #include "OMX_Video.h" diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp index 39b0e5d..9483eb6 100644 --- a/videoencoder/VideoEncoderHost.cpp +++ b/videoencoder/VideoEncoderHost.cpp @@ -16,7 +16,7 @@ #include "VideoEncoderHost.h" #include #include -#include +#include int32_t gLogLevel = 0; diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h index 74760fe..320e495 100644 --- a/videoencoder/VideoEncoderLog.h +++ b/videoencoder/VideoEncoderLog.h @@ -11,20 +11,13 @@ #define LOG_TAG "VideoEncoder" -// Components -#include - -#if 1 -#define LOG_V(...) LOGV_IF(gLogLevel, __VA_ARGS__) -#define LOG_I(...) LOGI_IF(gLogLevel, __VA_ARGS__) -#define LOG_W(...) LOGW_IF(gLogLevel, __VA_ARGS__) -#define LOG_E(...) LOGE_IF(gLogLevel, __VA_ARGS__) -#else -#define LOG_V printf -#define LOG_I printf -#define LOG_W printf -#define LOG_E printf -#endif +#include + +#define LOG_V ALOGV +#define LOG_D ALOGD +#define LOG_I ALOGI +#define LOG_W ALOGW +#define LOG_E ALOGE extern int32_t gLogLevel; #define CHECK_VA_STATUS_RETURN(FUNC)\ -- cgit v1.2.3